diff --git a/go.mod b/go.mod index 17582c835..cdf282e77 100644 --- a/go.mod +++ b/go.mod @@ -18,7 +18,6 @@ require ( github.com/dustin/go-humanize v1.0.1 github.com/fatih/color v1.16.0 github.com/golang/mock v1.6.0 - github.com/golangci/golangci-lint v1.56.2 github.com/gopacket/gopacket v1.2.0 github.com/hashicorp/go-multierror v1.1.1 github.com/jessevdk/go-flags v1.5.0 @@ -37,86 +36,25 @@ require ( ) require ( - 4d63.com/gocheckcompilerdirectives v1.2.1 // indirect - 4d63.com/gochecknoglobals v0.2.1 // indirect cloud.google.com/go v0.112.1 // indirect cloud.google.com/go/compute v1.25.0 // indirect cloud.google.com/go/compute/metadata v0.2.3 // indirect cloud.google.com/go/iam v1.1.6 // indirect cloud.google.com/go/storage v1.39.0 // indirect code.cloudfoundry.org/tlsconfig v0.0.0-20240308143921-4b874b341ed6 // indirect - github.com/4meepo/tagalign v1.3.3 // indirect - github.com/Abirdcfly/dupword v0.0.14 // indirect - github.com/Antonboom/errname v0.1.12 // indirect - github.com/Antonboom/nilnil v0.1.7 // indirect - github.com/Antonboom/testifylint v1.2.0 // indirect - github.com/BurntSushi/toml v1.3.2 // indirect - github.com/Djarvur/go-err113 v0.0.0-20210108212216-aea10b59be24 // indirect - github.com/GaijinEntertainment/go-exhaustruct/v3 v3.2.0 // indirect - github.com/Masterminds/semver v1.5.0 // indirect - github.com/OpenPeeDeeP/depguard/v2 v2.2.0 // indirect github.com/VividCortex/ewma v1.2.0 // indirect - github.com/alecthomas/go-check-sumtype v0.1.4 // indirect - github.com/alexkohler/nakedret/v2 v2.0.2 // indirect - github.com/alexkohler/prealloc v1.0.0 // indirect - github.com/alingse/asasalint v0.0.11 // indirect - github.com/ashanbrown/forbidigo v1.6.0 // indirect - github.com/ashanbrown/makezero v1.1.1 // indirect github.com/aws/aws-sdk-go v1.50.35 // indirect - github.com/beorn7/perks v1.0.1 // indirect - github.com/bkielbasa/cyclop v1.2.1 // indirect - github.com/blizzy78/varnamelen v0.8.0 // indirect github.com/bmatcuk/doublestar v1.3.4 // indirect - github.com/bombsimon/wsl/v4 v4.2.1 // indirect - github.com/breml/bidichk v0.2.7 // indirect - github.com/breml/errchkjson v0.3.6 // indirect - github.com/butuzov/ireturn v0.3.0 // indirect - github.com/butuzov/mirror v1.1.0 // indirect - github.com/catenacyber/perfsprint v0.7.1 // indirect - github.com/ccojocar/zxcvbn-go v1.0.2 // indirect - github.com/cespare/xxhash/v2 v2.2.0 // indirect - github.com/charithe/durationcheck v0.0.10 // indirect github.com/charlievieth/fs v0.0.3 // indirect - github.com/chavacava/garif v0.1.0 // indirect github.com/cloudfoundry/go-socks5 v0.0.0-20180221174514-54f73bdb8a8e // indirect - github.com/curioswitch/go-reassign v0.2.0 // indirect - github.com/daixiang0/gci v0.13.0 // indirect + github.com/creack/pty v1.1.9 // indirect github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect - github.com/denis-tingaikin/go-header v0.5.0 // indirect - github.com/esimonov/ifshort v1.0.4 // indirect - github.com/ettle/strcase v0.2.0 // indirect - github.com/fatih/structtag v1.2.0 // indirect github.com/felixge/httpsnoop v1.0.4 // indirect - github.com/firefart/nonamedreturns v1.0.4 // indirect - github.com/fsnotify/fsnotify v1.7.0 // indirect - github.com/fzipp/gocyclo v0.6.0 // indirect - github.com/ghostiam/protogetter v0.3.4 // indirect - github.com/go-critic/go-critic v0.11.2 // indirect github.com/go-logr/logr v1.4.1 // indirect github.com/go-logr/stdr v1.2.2 // indirect github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572 // indirect - github.com/go-toolsmith/astcast v1.1.0 // indirect - github.com/go-toolsmith/astcopy v1.1.0 // indirect - github.com/go-toolsmith/astequal v1.2.0 // indirect - github.com/go-toolsmith/astfmt v1.1.0 // indirect - github.com/go-toolsmith/astp v1.1.0 // indirect - github.com/go-toolsmith/strparse v1.1.0 // indirect - github.com/go-toolsmith/typep v1.1.0 // indirect - github.com/go-viper/mapstructure/v2 v2.0.0-alpha.1 // indirect - github.com/go-xmlfmt/xmlfmt v1.1.2 // indirect - github.com/gobwas/glob v0.2.3 // indirect - github.com/gofrs/flock v0.8.1 // indirect github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect github.com/golang/protobuf v1.5.4 // indirect - github.com/golangci/check v0.0.0-20180506172741-cfe4005ccda2 // indirect - github.com/golangci/dupl v0.0.0-20180902072040-3e9179ac440a // indirect - github.com/golangci/go-misc v0.0.0-20220329215616-d24fe342adfe // indirect - github.com/golangci/gofmt v0.0.0-20231019111953-be8c47862aaa // indirect - github.com/golangci/lint-1 v0.0.0-20191013205115-297bf364a8e0 // indirect - github.com/golangci/maligned v0.0.0-20180506175553-b1d89398deca // indirect - github.com/golangci/misspell v0.4.1 // indirect - github.com/golangci/revgrep v0.5.2 // indirect - github.com/golangci/unconvert v0.0.0-20240309020433-c5143eacb3ed // indirect github.com/google/btree v1.1.2 // indirect github.com/google/go-cmp v0.6.0 // indirect github.com/google/pprof v0.0.0-20240227163752-401108e1b7e7 // indirect @@ -124,116 +62,25 @@ require ( github.com/google/uuid v1.6.0 // indirect github.com/googleapis/enterprise-certificate-proxy v0.3.2 // indirect github.com/googleapis/gax-go/v2 v2.12.2 // indirect - github.com/gordonklaus/ineffassign v0.1.0 // indirect - github.com/gostaticanalysis/analysisutil v0.7.1 // indirect - github.com/gostaticanalysis/comment v1.4.2 // indirect - github.com/gostaticanalysis/forcetypeassert v0.1.0 // indirect - github.com/gostaticanalysis/nilerr v0.1.1 // indirect github.com/hashicorp/errwrap v1.1.0 // indirect - github.com/hashicorp/go-version v1.6.0 // indirect - github.com/hashicorp/hcl v1.0.0 // indirect - github.com/hexops/gotextdiff v1.0.3 // indirect github.com/inconshreveable/mousetrap v1.1.0 // indirect - github.com/jgautheron/goconst v1.7.0 // indirect - github.com/jingyugao/rowserrcheck v1.1.1 // indirect - github.com/jirfag/go-printf-func-name v0.0.0-20200119135958-7558a9eaa5af // indirect - github.com/jjti/go-spancheck v0.5.3 // indirect github.com/jmespath/go-jmespath v0.4.0 // indirect github.com/jpillora/backoff v1.0.0 // indirect - github.com/julz/importas v0.1.0 // indirect - github.com/kisielk/errcheck v1.7.0 // indirect - github.com/kisielk/gotool v1.0.0 // indirect - github.com/kkHAIKE/contextcheck v1.1.4 // indirect - github.com/kulti/thelper v0.6.3 // indirect - github.com/kunwardeep/paralleltest v1.0.10 // indirect - github.com/kyoh86/exportloopref v0.1.11 // indirect - github.com/ldez/gomoddirectives v0.2.3 // indirect - github.com/ldez/tagliatelle v0.5.0 // indirect - github.com/leonklingele/grouper v1.1.1 // indirect - github.com/lufeee/execinquery v1.2.1 // indirect - github.com/macabu/inamedparam v0.1.3 // indirect - github.com/magiconair/properties v1.8.7 // indirect - github.com/maratori/testableexamples v1.0.0 // indirect - github.com/maratori/testpackage v1.1.1 // indirect - github.com/matoous/godox v0.0.0-20240105082147-c5b5e0e7c0c0 // indirect + github.com/kr/pretty v0.3.1 // indirect github.com/mattn/go-colorable v0.1.13 // indirect github.com/mattn/go-runewidth v0.0.15 // indirect - github.com/mbilski/exhaustivestruct v1.2.0 // indirect - github.com/mgechev/revive v1.3.7 // indirect - github.com/mitchellh/go-homedir v1.1.0 // indirect - github.com/mitchellh/mapstructure v1.5.0 // indirect - github.com/moricho/tparallel v0.3.1 // indirect - github.com/nakabonne/nestif v0.3.1 // indirect - github.com/nishanths/exhaustive v0.12.0 // indirect - github.com/nishanths/predeclared v0.2.2 // indirect github.com/nu7hatch/gouuid v0.0.0-20131221200532-179d4d0c4d8d // indirect - github.com/nunnatsa/ginkgolinter v0.15.2 // indirect - github.com/olekukonko/tablewriter v0.0.5 // indirect - github.com/pelletier/go-toml/v2 v2.1.1 // indirect github.com/pivotal-cf/paraphernalia v0.0.0-20180203224945-a64ae2051c20 // indirect github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect - github.com/polyfloyd/go-errorlint v1.4.8 // indirect - github.com/prometheus/client_golang v1.19.0 // indirect - github.com/prometheus/client_model v0.6.0 // indirect - github.com/prometheus/common v0.50.0 // indirect - github.com/prometheus/procfs v0.13.0 // indirect - github.com/quasilyte/go-ruleguard v0.4.2 // indirect - github.com/quasilyte/gogrep v0.5.0 // indirect - github.com/quasilyte/regex/syntax v0.0.0-20210819130434-b3f0c404a727 // indirect - github.com/quasilyte/stdinfo v0.0.0-20220114132959-f7386bf02567 // indirect github.com/rivo/uniseg v0.4.7 // indirect - github.com/ryancurrah/gomodguard v1.3.0 // indirect - github.com/ryanrolds/sqlclosecheck v0.5.1 // indirect - github.com/sagikazarmark/locafero v0.4.0 // indirect - github.com/sagikazarmark/slog-shim v0.1.0 // indirect - github.com/sanposhiho/wastedassign/v2 v2.0.7 // indirect - github.com/sashamelentyev/interfacebloat v1.1.0 // indirect - github.com/sashamelentyev/usestdlibvars v1.25.0 // indirect - github.com/securego/gosec/v2 v2.19.0 // indirect - github.com/shazow/go-diff v0.0.0-20160112020656-b6b7b6733b8c // indirect - github.com/sirupsen/logrus v1.9.3 // indirect - github.com/sivchari/containedctx v1.0.3 // indirect - github.com/sivchari/nosnakecase v1.7.0 // indirect - github.com/sivchari/tenv v1.7.1 // indirect - github.com/sonatard/noctx v0.0.2 // indirect - github.com/sourcegraph/conc v0.3.0 // indirect - github.com/sourcegraph/go-diff v0.7.0 // indirect - github.com/spf13/afero v1.11.0 // indirect - github.com/spf13/cast v1.6.0 // indirect + github.com/rogpeppe/go-internal v1.12.0 // indirect github.com/spf13/pflag v1.0.5 // indirect - github.com/spf13/viper v1.18.2 // indirect - github.com/ssgreg/nlreturn/v2 v2.2.1 // indirect - github.com/stbenjam/no-sprintf-host-port v0.1.1 // indirect - github.com/stretchr/objx v0.5.2 // indirect - github.com/stretchr/testify v1.9.0 // indirect - github.com/subosito/gotenv v1.6.0 // indirect - github.com/t-yuki/gocover-cobertura v0.0.0-20180217150009-aaee18c8195c // indirect - github.com/tdakkota/asciicheck v0.2.0 // indirect - github.com/tetafro/godot v1.4.16 // indirect - github.com/timakin/bodyclose v0.0.0-20240125160201-f835fa56326a // indirect - github.com/timonwong/loggercheck v0.9.4 // indirect - github.com/tomarrell/wrapcheck/v2 v2.8.3 // indirect - github.com/tommy-muehle/go-mnd/v2 v2.5.1 // indirect - github.com/ultraware/funlen v0.1.0 // indirect - github.com/ultraware/whitespace v0.1.0 // indirect - github.com/uudashr/gocognit v1.1.2 // indirect - github.com/xen0n/gosmopolitan v1.2.2 // indirect - github.com/yagipy/maintidx v1.0.0 // indirect - github.com/yeya24/promlinter v0.2.0 // indirect - github.com/ykadowak/zerologlint v0.1.5 // indirect - gitlab.com/bosi/decorder v0.4.1 // indirect - go-simpler.org/musttag v0.9.0 // indirect - go-simpler.org/sloglint v0.4.0 // indirect go.opencensus.io v0.24.0 // indirect go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0 // indirect go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 // indirect go.opentelemetry.io/otel v1.24.0 // indirect go.opentelemetry.io/otel/metric v1.24.0 // indirect go.opentelemetry.io/otel/trace v1.24.0 // indirect - go.uber.org/multierr v1.11.0 // indirect - go.uber.org/zap v1.27.0 // indirect - golang.org/x/exp v0.0.0-20240222234643-814bf88cf225 // indirect - golang.org/x/exp/typeparams v0.0.0-20240222234643-814bf88cf225 // indirect golang.org/x/mod v0.16.0 // indirect golang.org/x/net v0.22.0 // indirect golang.org/x/oauth2 v0.18.0 // indirect @@ -248,10 +95,4 @@ require ( google.golang.org/genproto/googleapis/rpc v0.0.0-20240308144416-29370a3891b7 // indirect google.golang.org/grpc v1.62.1 // indirect google.golang.org/protobuf v1.33.0 // indirect - gopkg.in/ini.v1 v1.67.0 // indirect - honnef.co/go/tools v0.4.7 // indirect - mvdan.cc/gofumpt v0.6.0 // indirect - mvdan.cc/interfacer v0.0.0-20180901003855-c20040233aed // indirect - mvdan.cc/lint v0.0.0-20170908181259-adc824a0674b // indirect - mvdan.cc/unparam v0.0.0-20240104100049-c549a3470d14 // indirect ) diff --git a/go.sum b/go.sum index ae9ba2126..a2fed0e88 100644 --- a/go.sum +++ b/go.sum @@ -1,47 +1,12 @@ -4d63.com/gocheckcompilerdirectives v1.2.1 h1:AHcMYuw56NPjq/2y615IGg2kYkBdTvOaojYCBcRE7MA= -4d63.com/gocheckcompilerdirectives v1.2.1/go.mod h1:yjDJSxmDTtIHHCqX0ufRYZDL6vQtMG7tJdKVeWwsqvs= -4d63.com/gochecknoglobals v0.2.1 h1:1eiorGsgHOFOuoOiJDy2psSrQbRdIHrlge0IJIkUgDc= -4d63.com/gochecknoglobals v0.2.1/go.mod h1:KRE8wtJB3CXCsb1xy421JfTHIIbmT3U5ruxw2Qu8fSU= cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= -cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= -cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU= -cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU= -cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= -cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc= -cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0= -cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To= -cloud.google.com/go v0.52.0/go.mod h1:pXajvRH/6o3+F9jDHZWQ5PbGhn+o8w9qiu/CffaVdO4= -cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6M= -cloud.google.com/go v0.54.0/go.mod h1:1rq2OEkV3YMf6n/9ZvGWI3GWw0VoqH/1x2nd8Is/bPc= -cloud.google.com/go v0.56.0/go.mod h1:jr7tqZxxKOVYizybht9+26Z/gUq7tiRzu+ACVAMbKVk= -cloud.google.com/go v0.57.0/go.mod h1:oXiQ6Rzq3RAkkY7N6t3TcE6jE+CIBBbA36lwQ1JyzZs= -cloud.google.com/go v0.62.0/go.mod h1:jmCYTdRCQuc1PHIIJ/maLInMho30T/Y0M4hTdTShOYc= -cloud.google.com/go v0.65.0/go.mod h1:O5N8zS7uWy9vkA9vayVHs65eM1ubvY4h553ofrNHObY= cloud.google.com/go v0.112.1 h1:uJSeirPke5UNZHIb4SxfZklVSiWWVqW4oXlETwZziwM= cloud.google.com/go v0.112.1/go.mod h1:+Vbu+Y1UU+I1rjmzeMOb/8RfkKJK2Gyxi1X6jJCZLo4= -cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= -cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= -cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc= -cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg= -cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc= -cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ= cloud.google.com/go/compute v1.25.0 h1:H1/4SqSUhjPFE7L5ddzHOfY2bCAvjwNRZPNl6Ni5oYU= cloud.google.com/go/compute v1.25.0/go.mod h1:GR7F0ZPZH8EhChlMo9FkLd7eUTwEymjqQagxzilIxIE= cloud.google.com/go/compute/metadata v0.2.3 h1:mg4jlk7mCAj6xXp9UJ4fjI9VUI5rubuGBW5aJ7UnBMY= cloud.google.com/go/compute/metadata v0.2.3/go.mod h1:VAV5nSsACxMJvgaAuX6Pk2AawlZn8kiOGuCv6gTkwuA= -cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= -cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= cloud.google.com/go/iam v1.1.6 h1:bEa06k05IO4f4uJonbB5iAgKTPpABy1ayxaIZV/GHVc= cloud.google.com/go/iam v1.1.6/go.mod h1:O0zxdPeGBoFdWW3HWmBxJsk0pfvNM/p/qa82rWOGTwI= -cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= -cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= -cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA= -cloud.google.com/go/pubsub v1.3.1/go.mod h1:i+ucay31+CNRpDW4Lu78I4xXG+O1r/MAHgjpRVR+TSU= -cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw= -cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos= -cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk= -cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs= -cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= cloud.google.com/go/storage v1.39.0 h1:brbjUa4hbDHhpQf48tjqMaXEV+f1OGoaTmQau9tmCsA= cloud.google.com/go/storage v1.39.0/go.mod h1:OAEj/WZwUYjA3YHQ10/YcN9ttGuEpLwvaoyBXIPikEk= code.cloudfoundry.org/clock v1.1.0 h1:XLzC6W3Ah/Y7ht1rmZ6+QfPdt1iGWEAAtIZXgiaj57c= @@ -50,96 +15,20 @@ code.cloudfoundry.org/tlsconfig v0.0.0-20240308143921-4b874b341ed6 h1:1ST1gDGrIB code.cloudfoundry.org/tlsconfig v0.0.0-20240308143921-4b874b341ed6/go.mod h1:F7K0dpcWQmW1Z0+4at/mDMg1OF89sJJmVMfo/EZR3iI= code.cloudfoundry.org/workpool v0.0.0-20230612151832-b93da105e0e8 h1:Y5PNS8SRggtP2RugVRkT6V7UOV7soi1srzQZ9rB/Zn8= code.cloudfoundry.org/workpool v0.0.0-20230612151832-b93da105e0e8/go.mod h1:O9HdfntfyDvYRH9nh03XdpnGMbjyZVi8nb2Kh+6hDho= -dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= filippo.io/edwards25519 v1.1.0 h1:FNf4tywRC1HmFuKW5xopWpigGjJKiJSV0Cqo0cJWDaA= filippo.io/edwards25519 v1.1.0/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4VDT4= -github.com/4meepo/tagalign v1.3.3 h1:ZsOxcwGD/jP4U/aw7qeWu58i7dwYemfy5Y+IF1ACoNw= -github.com/4meepo/tagalign v1.3.3/go.mod h1:Q9c1rYMZJc9dPRkbQPpcBNCLEmY2njbAsXhQOZFE2dE= -github.com/Abirdcfly/dupword v0.0.14 h1:3U4ulkc8EUo+CaT105/GJ1BQwtgyj6+VaBVbAX11Ba8= -github.com/Abirdcfly/dupword v0.0.14/go.mod h1:VKDAbxdY8YbKUByLGg8EETzYSuC4crm9WwI6Y3S0cLI= -github.com/Antonboom/errname v0.1.12 h1:oh9ak2zUtsLp5oaEd/erjB4GPu9w19NyoIskZClDcQY= -github.com/Antonboom/errname v0.1.12/go.mod h1:bK7todrzvlaZoQagP1orKzWXv59X/x0W0Io2XT1Ssro= -github.com/Antonboom/nilnil v0.1.7 h1:ofgL+BA7vlA1K2wNQOsHzLJ2Pw5B5DpWRLdDAVvvTow= -github.com/Antonboom/nilnil v0.1.7/go.mod h1:TP+ScQWVEq0eSIxqU8CbdT5DFWoHp0MbP+KMUO1BKYQ= -github.com/Antonboom/testifylint v1.2.0 h1:015bxD8zc5iY8QwTp4+RG9I4kIbqwvGX9TrBbb7jGdM= -github.com/Antonboom/testifylint v1.2.0/go.mod h1:rkmEqjqVnHDRNsinyN6fPSLnoajzFwsCcguJgwADBkw= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= -github.com/BurntSushi/toml v1.3.2 h1:o7IhLm0Msx3BaB+n3Ag7L8EVlByGnpq14C4YWiu/gL8= -github.com/BurntSushi/toml v1.3.2/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ= -github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= -github.com/Djarvur/go-err113 v0.0.0-20210108212216-aea10b59be24 h1:sHglBQTwgx+rWPdisA5ynNEsoARbiCBOyGcJM4/OzsM= -github.com/Djarvur/go-err113 v0.0.0-20210108212216-aea10b59be24/go.mod h1:4UJr5HIiMZrwgkSPdsjy2uOQExX/WEILpIrO9UPGuXs= -github.com/GaijinEntertainment/go-exhaustruct/v3 v3.2.0 h1:sATXp1x6/axKxz2Gjxv8MALP0bXaNRfQinEwyfMcx8c= -github.com/GaijinEntertainment/go-exhaustruct/v3 v3.2.0/go.mod h1:Nl76DrGNJTA1KJ0LePKBw/vznBX1EHbAZX8mwjR82nI= -github.com/Masterminds/semver v1.5.0 h1:H65muMkzWKEuNDnfl9d70GUjFniHKHRbFPGBuZ3QEww= -github.com/Masterminds/semver v1.5.0/go.mod h1:MB6lktGJrhw8PrUyiEoblNEGEQ+RzHPF078ddwwvV3Y= -github.com/OpenPeeDeeP/depguard/v2 v2.2.0 h1:vDfG60vDtIuf0MEOhmLlLLSzqaRM8EMcgJPdp74zmpA= -github.com/OpenPeeDeeP/depguard/v2 v2.2.0/go.mod h1:CIzddKRvLBC4Au5aYP/i3nyaWQ+ClszLIuVocRiCYFQ= github.com/VividCortex/ewma v1.2.0 h1:f58SaIzcDXrSy3kWaHNvuJgJ3Nmz59Zji6XoJR/q1ow= github.com/VividCortex/ewma v1.2.0/go.mod h1:nz4BbCtbLyFDeC9SUHbtcT5644juEuWfUAUnGx7j5l4= -github.com/alecthomas/assert/v2 v2.2.2 h1:Z/iVC0xZfWTaFNE6bA3z07T86hd45Xe2eLt6WVy2bbk= -github.com/alecthomas/assert/v2 v2.2.2/go.mod h1:pXcQ2Asjp247dahGEmsZ6ru0UVwnkhktn7S0bBDLxvQ= -github.com/alecthomas/go-check-sumtype v0.1.4 h1:WCvlB3l5Vq5dZQTFmodqL2g68uHiSwwlWcT5a2FGK0c= -github.com/alecthomas/go-check-sumtype v0.1.4/go.mod h1:WyYPfhfkdhyrdaligV6svFopZV8Lqdzn5pyVBaV6jhQ= -github.com/alecthomas/repr v0.2.0 h1:HAzS41CIzNW5syS8Mf9UwXhNH1J9aix/BvDRf1Ml2Yk= -github.com/alecthomas/repr v0.2.0/go.mod h1:Fr0507jx4eOXV7AlPV6AVZLYrLIuIeSOWtW57eE/O/4= -github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= -github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= -github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= -github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= -github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho= -github.com/alexkohler/nakedret/v2 v2.0.2 h1:qnXuZNvv3/AxkAb22q/sEsEpcA99YxLFACDtEw9TPxE= -github.com/alexkohler/nakedret/v2 v2.0.2/go.mod h1:2b8Gkk0GsOrqQv/gPWjNLDSKwG8I5moSXG1K4VIBcTQ= -github.com/alexkohler/prealloc v1.0.0 h1:Hbq0/3fJPQhNkN0dR95AVrr6R7tou91y0uHG5pOcUuw= -github.com/alexkohler/prealloc v1.0.0/go.mod h1:VetnK3dIgFBBKmg0YnD9F9x6Icjd+9cvfHR56wJVlKE= -github.com/alingse/asasalint v0.0.11 h1:SFwnQXJ49Kx/1GghOFz1XGqHYKp21Kq1nHad/0WQRnw= -github.com/alingse/asasalint v0.0.11/go.mod h1:nCaoMhw7a9kSJObvQyVzNTPBDbNpdocqrSP7t/cW5+I= -github.com/ashanbrown/forbidigo v1.6.0 h1:D3aewfM37Yb3pxHujIPSpTf6oQk9sc9WZi8gerOIVIY= -github.com/ashanbrown/forbidigo v1.6.0/go.mod h1:Y8j9jy9ZYAEHXdu723cUlraTqbzjKF1MUyfOKL+AjcU= -github.com/ashanbrown/makezero v1.1.1 h1:iCQ87C0V0vSyO+M9E/FZYbu65auqH0lnsOkf5FcB28s= -github.com/ashanbrown/makezero v1.1.1/go.mod h1:i1bJLCRSCHOcOa9Y6MyF2FTfMZMFdHvxKHxgO5Z1axI= github.com/aws/aws-sdk-go v1.50.35 h1:llQnNddBI/64pK7pwUFBoWYmg8+XGQUCs214eMbSDZc= github.com/aws/aws-sdk-go v1.50.35/go.mod h1:LF8svs817+Nz+DmiMQKTO3ubZ/6IaTpq3TjupRn3Eqk= -github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= -github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= -github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= -github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= -github.com/bkielbasa/cyclop v1.2.1 h1:AeF71HZDob1P2/pRm1so9cd1alZnrpyc4q2uP2l0gJY= -github.com/bkielbasa/cyclop v1.2.1/go.mod h1:K/dT/M0FPAiYjBgQGau7tz+3TMh4FWAEqlMhzFWCrgM= -github.com/blizzy78/varnamelen v0.8.0 h1:oqSblyuQvFsW1hbBHh1zfwrKe3kcSj0rnXkKzsQ089M= -github.com/blizzy78/varnamelen v0.8.0/go.mod h1:V9TzQZ4fLJ1DSrjVDfl89H7aMnTvKkApdHeyESmyR7k= github.com/bmatcuk/doublestar v1.3.4 h1:gPypJ5xD31uhX6Tf54sDPUOBXTqKH4c9aPY66CyQrS0= github.com/bmatcuk/doublestar v1.3.4/go.mod h1:wiQtGV+rzVYxB7WIlirSN++5HPtPlXEo9MEoZQC/PmE= -github.com/bombsimon/wsl/v4 v4.2.1 h1:Cxg6u+XDWff75SIFFmNsqnIOgob+Q9hG6y/ioKbRFiM= -github.com/bombsimon/wsl/v4 v4.2.1/go.mod h1:Xu/kDxGZTofQcDGCtQe9KCzhHphIe0fDuyWTxER9Feo= -github.com/breml/bidichk v0.2.7 h1:dAkKQPLl/Qrk7hnP6P+E0xOodrq8Us7+U0o4UBOAlQY= -github.com/breml/bidichk v0.2.7/go.mod h1:YodjipAGI9fGcYM7II6wFvGhdMYsC5pHDlGzqvEW3tQ= -github.com/breml/errchkjson v0.3.6 h1:VLhVkqSBH96AvXEyclMR37rZslRrY2kcyq+31HCsVrA= -github.com/breml/errchkjson v0.3.6/go.mod h1:jhSDoFheAF2RSDOlCfhHO9KqhZgAYLyvHe7bRCX8f/U= -github.com/butuzov/ireturn v0.3.0 h1:hTjMqWw3y5JC3kpnC5vXmFJAWI/m31jaCYQqzkS6PL0= -github.com/butuzov/ireturn v0.3.0/go.mod h1:A09nIiwiqzN/IoVo9ogpa0Hzi9fex1kd9PSD6edP5ZA= -github.com/butuzov/mirror v1.1.0 h1:ZqX54gBVMXu78QLoiqdwpl2mgmoOJTk7s4p4o+0avZI= -github.com/butuzov/mirror v1.1.0/go.mod h1:8Q0BdQU6rC6WILDiBM60DBfvV78OLJmMmixe7GF45AE= -github.com/catenacyber/perfsprint v0.7.1 h1:PGW5G/Kxn+YrN04cRAZKC+ZuvlVwolYMrIyyTJ/rMmc= -github.com/catenacyber/perfsprint v0.7.1/go.mod h1:/wclWYompEyjUD2FuIIDVKNkqz7IgBIWXIH3V0Zol50= -github.com/ccojocar/zxcvbn-go v1.0.2 h1:na/czXU8RrhXO4EZme6eQJLR4PzcGsahsBOAwU6I3Vg= -github.com/ccojocar/zxcvbn-go v1.0.2/go.mod h1:g1qkXtUSvHP8lhHp5GrSmTz6uWALGRMQdw6Qnz/hi60= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= -github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= -github.com/cespare/xxhash/v2 v2.1.2/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= -github.com/cespare/xxhash/v2 v2.2.0 h1:DC2CZ1Ep5Y4k3ZQ899DldepgrayRUGE6BBZ/cd9Cj44= -github.com/cespare/xxhash/v2 v2.2.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= -github.com/charithe/durationcheck v0.0.10 h1:wgw73BiocdBDQPik+zcEoBG/ob8uyBHf2iyoHGPf5w4= -github.com/charithe/durationcheck v0.0.10/go.mod h1:bCWXb7gYRysD1CU3C+u4ceO49LoGOY1C1L6uouGNreQ= github.com/charlievieth/fs v0.0.3 h1:3lZQXTj4PbE81CVPwALSn+JoyCNXkZgORHN6h2XHGlg= github.com/charlievieth/fs v0.0.3/go.mod h1:hD4sRzto1Hw8zCua76tNVKZxaeZZr1RiKftjAJQRLLo= -github.com/chavacava/garif v0.1.0 h1:2JHa3hbYf5D9dsgseMKAmc/MZ109otzgNFk5s87H9Pc= -github.com/chavacava/garif v0.1.0/go.mod h1:XMyYCkEL58DF0oyW4qDjjnPWONs2HBqYKI+UIPD+Gww= github.com/cheggaaa/pb/v3 v3.1.5 h1:QuuUzeM2WsAqG2gMqtzaWithDJv0i+i6UlnwSCI4QLk= github.com/cheggaaa/pb/v3 v3.1.5/go.mod h1:CrxkeghYTXi1lQBEI7jSn+3svI3cuc19haAj6jM60XI= -github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= -github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= -github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= github.com/cloudfoundry/bosh-agent v0.0.61-0.20230301011448-4cfe06c13ad7 h1:WqAIRLehvJrDhnDaVDS+OTjlbmHInIVU+dhVcvUWqmA= github.com/cloudfoundry/bosh-agent v0.0.61-0.20230301011448-4cfe06c13ad7/go.mod h1:FVqJVDjAlpDNjaI1k79SXdh+ymNMDWPg38jSIzYUF+4= @@ -163,114 +52,43 @@ github.com/cppforlife/go-patch v0.2.0/go.mod h1:67a7aIi94FHDZdoeGSJRRFDp66l9MhaA github.com/cppforlife/go-semi-semantic v0.0.0-20160921010311-576b6af77ae4 h1:J+ghqo7ZubTzelkjo9hntpTtP/9lUCWH9icEmAW+B+Q= github.com/cppforlife/go-semi-semantic v0.0.0-20160921010311-576b6af77ae4/go.mod h1:socxpf5+mELPbosI149vWpNlHK6mbfWFxSWOoSndXR8= github.com/cpuguy83/go-md2man/v2 v2.0.3/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= -github.com/creack/pty v1.1.7 h1:6pwm8kMQKCmgUg0ZHTm5+/YvRK0s3THD/28+T6/kk4A= github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7DoTY= -github.com/curioswitch/go-reassign v0.2.0 h1:G9UZyOcpk/d7Gd6mqYgd8XYWFMw/znxwGDUstnC9DIo= -github.com/curioswitch/go-reassign v0.2.0/go.mod h1:x6OpXuWvgfQaMGks2BZybTngWjT84hqJfKoO8Tt/Roc= -github.com/daixiang0/gci v0.13.0 h1:pSA5Wb05cWbGpKAgs1yGZcc8IEDRLTZcpa4n8FhyWuU= -github.com/daixiang0/gci v0.13.0/go.mod h1:xtHP9N7AHdNvtRNfcx9gwTDfw7FRJx4bZUsiEfiNNAI= +github.com/creack/pty v1.1.9 h1:uDmaGzcdjhF4i/plgjmEsriH11Y0o7RKapEf/LDaM3w= +github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM= github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/denis-tingaikin/go-header v0.5.0 h1:SRdnP5ZKvcO9KKRP1KJrhFR3RrlGuD+42t4429eC9k8= -github.com/denis-tingaikin/go-header v0.5.0/go.mod h1:mMenU5bWrok6Wl2UsZjy+1okegmwQ3UgWl4V1D8gjlY= github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY= github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto= github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= -github.com/esimonov/ifshort v1.0.4 h1:6SID4yGWfRae/M7hkVDVVyppy8q/v9OuxNdmjLQStBA= -github.com/esimonov/ifshort v1.0.4/go.mod h1:Pe8zjlRrJ80+q2CxHLfEOfTwxCZ4O+MuhcHcfgNWTk0= -github.com/ettle/strcase v0.2.0 h1:fGNiVF21fHXpX1niBgk0aROov1LagYsOwV/xqKDKR/Q= -github.com/ettle/strcase v0.2.0/go.mod h1:DajmHElDSaX76ITe3/VHVyMin4LWSJN5Z909Wp+ED1A= github.com/fatih/color v1.16.0 h1:zmkK9Ngbjj+K0yRhTVONQh1p/HknKYSlNT+vZCzyokM= github.com/fatih/color v1.16.0/go.mod h1:fL2Sau1YI5c0pdGEVCbKQbLXB6edEj1ZgiY4NijnWvE= -github.com/fatih/structtag v1.2.0 h1:/OdNE99OxoI/PqaW/SuSK9uxxT3f/tcSZgon/ssNSx4= -github.com/fatih/structtag v1.2.0/go.mod h1:mBJUNpUnHmRKrKlQQlmCrh5PuhftFbNv8Ys4/aAZl94= github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg= github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= -github.com/firefart/nonamedreturns v1.0.4 h1:abzI1p7mAEPYuR4A+VLKn4eNDOycjYo2phmY9sfv40Y= -github.com/firefart/nonamedreturns v1.0.4/go.mod h1:TDhe/tjI1BXo48CmYbUduTV7BdIga8MAO/xbKdcVsGI= -github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8= -github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0= github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= github.com/fsnotify/fsnotify v1.7.0 h1:8JEhPFa5W2WU7YfeZzPNqzMP6Lwt7L2715Ggo0nosvA= github.com/fsnotify/fsnotify v1.7.0/go.mod h1:40Bi/Hjc2AVfZrqy+aj+yEI+/bRxZnMJyTJwOpGvigM= -github.com/fzipp/gocyclo v0.6.0 h1:lsblElZG7d3ALtGMx9fmxeTKZaLLpU8mET09yN4BBLo= -github.com/fzipp/gocyclo v0.6.0/go.mod h1:rXPyn8fnlpa0R2csP/31uerbiVBugk5whMdlyaLkLoA= -github.com/ghostiam/protogetter v0.3.4 h1:5SZ+lZSNmNkSbGVSF9hUHhv/b7ELF9Rwchoq7btYo6c= -github.com/ghostiam/protogetter v0.3.4/go.mod h1:A0JgIhs0fgVnotGinjQiKaFVG3waItLJNwPmcMzDnvk= -github.com/go-critic/go-critic v0.11.2 h1:81xH/2muBphEgPtcwH1p6QD+KzXl2tMSi3hXjBSxDnM= -github.com/go-critic/go-critic v0.11.2/go.mod h1:OePaicfjsf+KPy33yq4gzv6CO7TEQ9Rom6ns1KsJnl8= -github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= -github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= -github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= -github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= -github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= -github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY= -github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= -github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= -github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A= github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= github.com/go-logr/logr v1.4.1 h1:pKouT5E8xu9zeFC39JXRDukb6JFQPXM5p5I91188VAQ= github.com/go-logr/logr v1.4.1/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= -github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572 h1:tfuBGBXKqDEevZMzYi5KSi8KkcZtzBcTgAUUtapy0OI= github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572/go.mod h1:9Pwr4B2jHnOSGXyyzV8ROjYa2ojvAY6HCGYYfMoC3Ls= -github.com/go-toolsmith/astcast v1.1.0 h1:+JN9xZV1A+Re+95pgnMgDboWNVnIMMQXwfBwLRPgSC8= -github.com/go-toolsmith/astcast v1.1.0/go.mod h1:qdcuFWeGGS2xX5bLM/c3U9lewg7+Zu4mr+xPwZIB4ZU= -github.com/go-toolsmith/astcopy v1.1.0 h1:YGwBN0WM+ekI/6SS6+52zLDEf8Yvp3n2seZITCUBt5s= -github.com/go-toolsmith/astcopy v1.1.0/go.mod h1:hXM6gan18VA1T/daUEHCFcYiW8Ai1tIwIzHY6srfEAw= -github.com/go-toolsmith/astequal v1.0.3/go.mod h1:9Ai4UglvtR+4up+bAD4+hCj7iTo4m/OXVTSLnCyTAx4= -github.com/go-toolsmith/astequal v1.1.0/go.mod h1:sedf7VIdCL22LD8qIvv7Nn9MuWJruQA/ysswh64lffQ= -github.com/go-toolsmith/astequal v1.2.0 h1:3Fs3CYZ1k9Vo4FzFhwwewC3CHISHDnVUPC4x0bI2+Cw= -github.com/go-toolsmith/astequal v1.2.0/go.mod h1:c8NZ3+kSFtFY/8lPso4v8LuJjdJiUFVnSuU3s0qrrDY= -github.com/go-toolsmith/astfmt v1.1.0 h1:iJVPDPp6/7AaeLJEruMsBUlOYCmvg0MoCfJprsOmcco= -github.com/go-toolsmith/astfmt v1.1.0/go.mod h1:OrcLlRwu0CuiIBp/8b5PYF9ktGVZUjlNMV634mhwuQ4= -github.com/go-toolsmith/astp v1.1.0 h1:dXPuCl6u2llURjdPLLDxJeZInAeZ0/eZwFJmqZMnpQA= -github.com/go-toolsmith/astp v1.1.0/go.mod h1:0T1xFGz9hicKs8Z5MfAqSUitoUYS30pDMsRVIDHs8CA= -github.com/go-toolsmith/pkgload v1.2.2 h1:0CtmHq/02QhxcF7E9N5LIFcYFsMR5rdovfqTtRKkgIk= -github.com/go-toolsmith/pkgload v1.2.2/go.mod h1:R2hxLNRKuAsiXCo2i5J6ZQPhnPMOVtU+f0arbFPWCus= -github.com/go-toolsmith/strparse v1.0.0/go.mod h1:YI2nUKP9YGZnL/L1/DLFBfixrcjslWct4wyljWhSRy8= -github.com/go-toolsmith/strparse v1.1.0 h1:GAioeZUK9TGxnLS+qfdqNbA4z0SSm5zVNtCQiyP2Bvw= -github.com/go-toolsmith/strparse v1.1.0/go.mod h1:7ksGy58fsaQkGQlY8WVoBFNyEPMGuJin1rfoPS4lBSQ= -github.com/go-toolsmith/typep v1.1.0 h1:fIRYDyF+JywLfqzyhdiHzRop/GQDxxNhLGQ6gFUNHus= -github.com/go-toolsmith/typep v1.1.0/go.mod h1:fVIw+7zjdsMxDA3ITWnH1yOiw1rnTQKCsF/sk2H/qig= -github.com/go-viper/mapstructure/v2 v2.0.0-alpha.1 h1:TQcrn6Wq+sKGkpyPvppOz99zsMBaUOKXq6HSv655U1c= -github.com/go-viper/mapstructure/v2 v2.0.0-alpha.1/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM= -github.com/go-xmlfmt/xmlfmt v1.1.2 h1:Nea7b4icn8s57fTx1M5AI4qQT5HEM3rVUO8MuE6g80U= -github.com/go-xmlfmt/xmlfmt v1.1.2/go.mod h1:aUCEOzzezBEjDBbFBoSiya/gduyIiWYRP6CnSFIV8AM= -github.com/gobwas/glob v0.2.3 h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y= -github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8= -github.com/gofrs/flock v0.8.1 h1:+gYjHKf32LDeiEEFhQaotPbLuUXjY5ZqxKgXy7n59aw= -github.com/gofrs/flock v0.8.1/go.mod h1:F1TvTiK9OcQqauNUHlbJvyl9Qa1QvF/gOUDKA14jxHU= -github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= -github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= -github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE= github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= -github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= -github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= -github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= -github.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= -github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= -github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4= github.com/golang/mock v1.6.0 h1:ErTB+efbowRARo13NNdxyJji2egdxLGQhRaY+DUumQc= github.com/golang/mock v1.6.0/go.mod h1:p6yTPP+5HYm5mzsMV8JkE6ZKdX+/wYM6Hr+LicevLPs= github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= -github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= -github.com/golang/protobuf v1.3.5/go.mod h1:6O5/vntMXwX2lRkT1hjjk0nAC1IDOTvTlVgjlRvqsdk= github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= @@ -283,61 +101,21 @@ github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaS github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek= github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps= -github.com/golangci/check v0.0.0-20180506172741-cfe4005ccda2 h1:23T5iq8rbUYlhpt5DB4XJkc6BU31uODLD1o1gKvZmD0= -github.com/golangci/check v0.0.0-20180506172741-cfe4005ccda2/go.mod h1:k9Qvh+8juN+UKMCS/3jFtGICgW8O96FVaZsaxdzDkR4= -github.com/golangci/dupl v0.0.0-20180902072040-3e9179ac440a h1:w8hkcTqaFpzKqonE9uMCefW1WDie15eSP/4MssdenaM= -github.com/golangci/dupl v0.0.0-20180902072040-3e9179ac440a/go.mod h1:ryS0uhF+x9jgbj/N71xsEqODy9BN81/GonCZiOzirOk= -github.com/golangci/go-misc v0.0.0-20220329215616-d24fe342adfe h1:6RGUuS7EGotKx6J5HIP8ZtyMdiDscjMLfRBSPuzVVeo= -github.com/golangci/go-misc v0.0.0-20220329215616-d24fe342adfe/go.mod h1:gjqyPShc/m8pEMpk0a3SeagVb0kaqvhscv+i9jI5ZhQ= -github.com/golangci/gofmt v0.0.0-20231019111953-be8c47862aaa h1:L0Zq43Px2HrLroRKEgfCsQLMJUkjskJBB1kd1Zjcvvc= -github.com/golangci/gofmt v0.0.0-20231019111953-be8c47862aaa/go.mod h1:Pm5KhLPA8gSnQwrQ6ukebRcapGb/BG9iUkdaiCcGHJM= -github.com/golangci/golangci-lint v1.56.2 h1:dgQzlWHgNbCqJjuxRJhFEnHDVrrjuTGQHJ3RIZMpp/o= -github.com/golangci/golangci-lint v1.56.2/go.mod h1:7CfNO675+EY7j84jihO4iAqDQ80s3HCjcc5M6B7SlZQ= -github.com/golangci/lint-1 v0.0.0-20191013205115-297bf364a8e0 h1:MfyDlzVjl1hoaPzPD4Gpb/QgoRfSBR0jdhwGyAWwMSA= -github.com/golangci/lint-1 v0.0.0-20191013205115-297bf364a8e0/go.mod h1:66R6K6P6VWk9I95jvqGxkqJxVWGFy9XlDwLwVz1RCFg= -github.com/golangci/maligned v0.0.0-20180506175553-b1d89398deca h1:kNY3/svz5T29MYHubXix4aDDuE3RWHkPvopM/EDv/MA= -github.com/golangci/maligned v0.0.0-20180506175553-b1d89398deca/go.mod h1:tvlJhZqDe4LMs4ZHD0oMUlt9G2LWuDGoisJTBzLMV9o= -github.com/golangci/misspell v0.4.1 h1:+y73iSicVy2PqyX7kmUefHusENlrP9YwuHZHPLGQj/g= -github.com/golangci/misspell v0.4.1/go.mod h1:9mAN1quEo3DlpbaIKKyEvRxK1pwqR9s/Sea1bJCtlNI= -github.com/golangci/revgrep v0.5.2 h1:EndcWoRhcnfj2NHQ+28hyuXpLMF+dQmCN+YaeeIl4FU= -github.com/golangci/revgrep v0.5.2/go.mod h1:bjAMA+Sh/QUfTDcHzxfyHxr4xKvllVr/0sCv2e7jJHA= -github.com/golangci/unconvert v0.0.0-20240309020433-c5143eacb3ed h1:IURFTjxeTfNFP0hTEi1YKjB/ub8zkpaOqFFMApi2EAs= -github.com/golangci/unconvert v0.0.0-20240309020433-c5143eacb3ed/go.mod h1:XLXN8bNw4CGRPaqgl3bv/lhz7bsGPh4/xSaMTbo2vkQ= -github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= -github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/btree v1.1.2 h1:xf4v41cLI2Z6FxbKm+8Bu+m8ifhj15JuZ9sa0jZCMUU= github.com/google/btree v1.1.2/go.mod h1:qOPhT0dTNdNzV6Z/lhRX0YXUafgPLFUh+gZMl761Gm4= github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.4.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.8/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= -github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= -github.com/google/martian v2.1.0+incompatible h1:/CP5g8u/VJHijgedC/Legn3BAbAaWPgecwXBIDzw5no= -github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= -github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= github.com/google/martian/v3 v3.3.2 h1:IqNFLAmvJOgVlpdEBiQbDc2EwKW77amAycfTuWKdfvw= github.com/google/martian/v3 v3.3.2/go.mod h1:oBOf6HBosgwRXnUGWUB05QECsc6uvmMiJ3+6W4l/CUk= -github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= -github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= -github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= github.com/google/pprof v0.0.0-20240227163752-401108e1b7e7 h1:y3N7Bm7Y9/CtpiVkw/ZWj6lSlDF3F74SfKwfTCer72Q= github.com/google/pprof v0.0.0-20240227163752-401108e1b7e7/go.mod h1:czg5+yv1E0ZGTi6S6vVK1mke0fV+FaUhNGcd6VRS9Ik= -github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= github.com/google/s2a-go v0.1.7 h1:60BLSyTrOV4/haCDW4zb1guZItoSq8foHCXrAnjBo/o= github.com/google/s2a-go v0.1.7/go.mod h1:50CgR4k1jNlWBu4UfS4AcfhVe1r6pdZPygJ3R8F0Qdw= github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= @@ -345,157 +123,46 @@ github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/googleapis/enterprise-certificate-proxy v0.3.2 h1:Vie5ybvEvT75RniqhfFxPRy3Bf7vr3h0cechB90XaQs= github.com/googleapis/enterprise-certificate-proxy v0.3.2/go.mod h1:VLSiSSBs/ksPL8kq3OBOQ6WRI2QnaFynd1DCjZ62+V0= -github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= -github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= github.com/googleapis/gax-go/v2 v2.12.2 h1:mhN09QQW1jEWeMF74zGR81R30z4VJzjZsfkUhuHF+DA= github.com/googleapis/gax-go/v2 v2.12.2/go.mod h1:61M8vcyyXR2kqKFxKrfA22jaA8JGF7Dc8App1U3H6jc= github.com/gopacket/gopacket v1.2.0 h1:eXbzFad7f73P1n2EJHQlsKuvIMJjVXK5tXoSca78I3A= github.com/gopacket/gopacket v1.2.0/go.mod h1:BrAKEy5EOGQ76LSqh7DMAr7z0NNPdczWm2GxCG7+I8M= -github.com/gordonklaus/ineffassign v0.1.0 h1:y2Gd/9I7MdY1oEIt+n+rowjBNDcLQq3RsH5hwJd0f9s= -github.com/gordonklaus/ineffassign v0.1.0/go.mod h1:Qcp2HIAYhR7mNUVSIxZww3Guk4it82ghYcEXIAk+QT0= -github.com/gostaticanalysis/analysisutil v0.7.1 h1:ZMCjoue3DtDWQ5WyU16YbjbQEQ3VuzwxALrpYd+HeKk= -github.com/gostaticanalysis/analysisutil v0.7.1/go.mod h1:v21E3hY37WKMGSnbsw2S/ojApNWb6C1//mXO48CXbVc= -github.com/gostaticanalysis/comment v1.4.1/go.mod h1:ih6ZxzTHLdadaiSnF5WY3dxUoXfXAlTaRzuaNDlSado= -github.com/gostaticanalysis/comment v1.4.2 h1:hlnx5+S2fY9Zo9ePo4AhgYsYHbM2+eAv8m/s1JiCd6Q= -github.com/gostaticanalysis/comment v1.4.2/go.mod h1:KLUTGDv6HOCotCH8h2erHKmpci2ZoR8VPu34YA2uzdM= -github.com/gostaticanalysis/forcetypeassert v0.1.0 h1:6eUflI3DiGusXGK6X7cCcIgVCpZ2CiZ1Q7jl6ZxNV70= -github.com/gostaticanalysis/forcetypeassert v0.1.0/go.mod h1:qZEedyP/sY1lTGV1uJ3VhWZ2mqag3IkWsDHVbplHXak= -github.com/gostaticanalysis/nilerr v0.1.1 h1:ThE+hJP0fEp4zWLkWHWcRyI2Od0p7DlgYG3Uqrmrcpk= -github.com/gostaticanalysis/nilerr v0.1.1/go.mod h1:wZYb6YI5YAxxq0i1+VJbY0s2YONW0HU0GPE3+5PWN4A= -github.com/gostaticanalysis/testutil v0.3.1-0.20210208050101-bfb5c8eec0e4/go.mod h1:D+FIZ+7OahH3ePw/izIEeH5I06eKs1IKI4Xr64/Am3M= -github.com/gostaticanalysis/testutil v0.4.0 h1:nhdCmubdmDF6VEatUNjgUZBJKWRqugoISdUv3PPQgHY= -github.com/gostaticanalysis/testutil v0.4.0/go.mod h1:bLIoPefWXrRi/ssLFWX1dx7Repi5x3CuviD3dgAZaBU= github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I= github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo= github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= -github.com/hashicorp/go-version v1.2.1/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= -github.com/hashicorp/go-version v1.6.0 h1:feTTfFNnjP967rlCxM/I9g701jU+RN74YKx2mOkIeek= -github.com/hashicorp/go-version v1.6.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= -github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= -github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= -github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= -github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= -github.com/hexops/gotextdiff v1.0.3 h1:gitA9+qJrrTCsiCl7+kh75nPqQt1cx4ZkudSTLoUqJM= -github.com/hexops/gotextdiff v1.0.3/go.mod h1:pSWU5MAI3yDq+fZBTazCSJysOMbxWL1BSow5/V2vxeg= github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= -github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= github.com/jessevdk/go-flags v1.5.0 h1:1jKYvbxEjfUl0fmqTCOfonvskHHXMjBySTLW4y9LFvc= github.com/jessevdk/go-flags v1.5.0/go.mod h1:Fw0T6WPc1dYxT4mKEZRfG5kJhaTDP9pj1c2EWnYs/m4= -github.com/jgautheron/goconst v1.7.0 h1:cEqH+YBKLsECnRSd4F4TK5ri8t/aXtt/qoL0Ft252B0= -github.com/jgautheron/goconst v1.7.0/go.mod h1:aAosetZ5zaeC/2EfMeRswtxUFBpe2Hr7HzkgX4fanO4= -github.com/jingyugao/rowserrcheck v1.1.1 h1:zibz55j/MJtLsjP1OF4bSdgXxwL1b+Vn7Tjzq7gFzUs= -github.com/jingyugao/rowserrcheck v1.1.1/go.mod h1:4yvlZSDb3IyDTUZJUmpZfm2Hwok+Dtp+nu2qOq+er9c= -github.com/jirfag/go-printf-func-name v0.0.0-20200119135958-7558a9eaa5af h1:KA9BjwUk7KlCh6S9EAGWBt1oExIUv9WyNCiRz5amv48= -github.com/jirfag/go-printf-func-name v0.0.0-20200119135958-7558a9eaa5af/go.mod h1:HEWGJkRDzjJY2sqdDwxccsGicWEf9BQOZsq2tV+xzM0= -github.com/jjti/go-spancheck v0.5.3 h1:vfq4s2IB8T3HvbpiwDTYgVPj1Ze/ZSXrTtaZRTc7CuM= -github.com/jjti/go-spancheck v0.5.3/go.mod h1:eQdOX1k3T+nAKvZDyLC3Eby0La4dZ+I19iOl5NzSPFE= github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9YPoQUg= github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo= github.com/jmespath/go-jmespath/internal/testify v1.5.1 h1:shLQSRRSCCPj3f2gpwzGwWFoC7ycTf1rcQZHOlsJ6N8= github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U= github.com/jpillora/backoff v1.0.0 h1:uvFg412JmmHBHw7iwprIxkPMI+sGQ4kzOWsMeHnm2EA= github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4= -github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= -github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= -github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= -github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= -github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= -github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= -github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= -github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8IZAc4RVcycCCAKdM= -github.com/julz/importas v0.1.0 h1:F78HnrsjY3cR7j0etXy5+TU1Zuy7Xt08X/1aJnH5xXY= -github.com/julz/importas v0.1.0/go.mod h1:oSFU2R4XK/P7kNBrnL/FEQlDGN1/6WoxXEjSSXO0DV0= -github.com/kisielk/errcheck v1.7.0 h1:+SbscKmWJ5mOK/bO1zS60F5I9WwZDWOfRsC4RwfwRV0= -github.com/kisielk/errcheck v1.7.0/go.mod h1:1kLL+jV4e+CFfueBmI1dSK2ADDyQnlrnrY/FqKluHJQ= -github.com/kisielk/gotool v1.0.0 h1:AV2c/EiW3KqPNT9ZKl07ehoAGi4C5/01Cfbblndcapg= -github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= -github.com/kkHAIKE/contextcheck v1.1.4 h1:B6zAaLhOEEcjvUgIYEqystmnFk1Oemn8bvJhbt0GMb8= -github.com/kkHAIKE/contextcheck v1.1.4/go.mod h1:1+i/gWqokIa+dm31mqGLZhZJ7Uh44DJGZVmr6QRBNJg= -github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= -github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= -github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= -github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= -github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/pty v1.1.8 h1:AkaSdXYQOWeaO3neb8EM634ahkXXe3jYbVh/F9lq+GI= github.com/kr/pty v1.1.8/go.mod h1:O1sed60cT9XZ5uDucP5qwvh+TE3NnUj51EiZO/lmSfw= -github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= -github.com/kulti/thelper v0.6.3 h1:ElhKf+AlItIu+xGnI990no4cE2+XaSu1ULymV2Yulxs= -github.com/kulti/thelper v0.6.3/go.mod h1:DsqKShOvP40epevkFrvIwkCMNYxMeTNjdWL4dqWHZ6I= -github.com/kunwardeep/paralleltest v1.0.10 h1:wrodoaKYzS2mdNVnc4/w31YaXFtsc21PCTdvWJ/lDDs= -github.com/kunwardeep/paralleltest v1.0.10/go.mod h1:2C7s65hONVqY7Q5Efj5aLzRCNLjw2h4eMc9EcypGjcY= -github.com/kyoh86/exportloopref v0.1.11 h1:1Z0bcmTypkL3Q4k+IDHMWTcnCliEZcaPiIe0/ymEyhQ= -github.com/kyoh86/exportloopref v0.1.11/go.mod h1:qkV4UF1zGl6EkF1ox8L5t9SwyeBAZ3qLMd6up458uqA= -github.com/ldez/gomoddirectives v0.2.3 h1:y7MBaisZVDYmKvt9/l1mjNCiSA1BVn34U0ObUcJwlhA= -github.com/ldez/gomoddirectives v0.2.3/go.mod h1:cpgBogWITnCfRq2qGoDkKMEVSaarhdBr6g8G04uz6d0= -github.com/ldez/tagliatelle v0.5.0 h1:epgfuYt9v0CG3fms0pEgIMNPuFf/LpPIfjk4kyqSioo= -github.com/ldez/tagliatelle v0.5.0/go.mod h1:rj1HmWiL1MiKQuOONhd09iySTEkUuE/8+5jtPYz9xa4= -github.com/leonklingele/grouper v1.1.1 h1:suWXRU57D4/Enn6pXR0QVqqWWrnJ9Osrz+5rjt8ivzU= -github.com/leonklingele/grouper v1.1.1/go.mod h1:uk3I3uDfi9B6PeUjsCKi6ndcf63Uy7snXgR4yDYQVDY= -github.com/lufeee/execinquery v1.2.1 h1:hf0Ems4SHcUGBxpGN7Jz78z1ppVkP/837ZlETPCEtOM= -github.com/lufeee/execinquery v1.2.1/go.mod h1:EC7DrEKView09ocscGHC+apXMIaorh4xqSxS/dy8SbM= -github.com/macabu/inamedparam v0.1.3 h1:2tk/phHkMlEL/1GNe/Yf6kkR/hkcUdAEY3L0hjYV1Mk= -github.com/macabu/inamedparam v0.1.3/go.mod h1:93FLICAIk/quk7eaPPQvbzihUdn/QkGDwIZEoLtpH6I= -github.com/magiconair/properties v1.8.7 h1:IeQXZAiQcpL9mgcAe1Nu6cX9LLw6ExEHKjN0VQdvPDY= -github.com/magiconair/properties v1.8.7/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0= -github.com/maratori/testableexamples v1.0.0 h1:dU5alXRrD8WKSjOUnmJZuzdxWOEQ57+7s93SLMxb2vI= -github.com/maratori/testableexamples v1.0.0/go.mod h1:4rhjL1n20TUTT4vdh3RDqSizKLyXp7K2u6HgraZCGzE= -github.com/maratori/testpackage v1.1.1 h1:S58XVV5AD7HADMmD0fNnziNHqKvSdDuEKdPD1rNTU04= -github.com/maratori/testpackage v1.1.1/go.mod h1:s4gRK/ym6AMrqpOa/kEbQTV4Q4jb7WeLZzVhVVVOQMc= -github.com/matoous/godox v0.0.0-20240105082147-c5b5e0e7c0c0 h1:Ny7cm4KSWceJLYyI1sm+aFIVDWSGXLcOJ0O0UaS5wdU= -github.com/matoous/godox v0.0.0-20240105082147-c5b5e0e7c0c0/go.mod h1:jgE/3fUXiTurkdHOLT5WEkThTSuE7yxHv5iWPa80afs= -github.com/matryer/is v1.4.0 h1:sosSmIWwkYITGrxZ25ULNDeKiMNzFSr4V/eqBQP0PeE= -github.com/matryer/is v1.4.0/go.mod h1:8I/i5uYgLzgsgEloJE1U6xx5HkBQpAZvepWuujKwMRU= github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= -github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI= github.com/mattn/go-runewidth v0.0.15 h1:UNAjwbU9l54TA3KzvqLGxwWjHmMgBUVhBiTjelZgg3U= github.com/mattn/go-runewidth v0.0.15/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w= -github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= github.com/maxbrunsfeld/counterfeiter/v6 v6.8.1 h1:NicmruxkeqHjDv03SfSxqmaLuisddudfP3h5wdXFbhM= github.com/maxbrunsfeld/counterfeiter/v6 v6.8.1/go.mod h1:eyp4DdUJAKkr9tvxR3jWhw2mDK7CWABMG5r9uyaKC7I= -github.com/mbilski/exhaustivestruct v1.2.0 h1:wCBmUnSYufAHO6J4AVWY6ff+oxWxsVFrwgOdMUQePUo= -github.com/mbilski/exhaustivestruct v1.2.0/go.mod h1:OeTBVxQWoEmB2J2JCHmXWPJ0aksxSUOUy+nvtVEfzXc= -github.com/mgechev/revive v1.3.7 h1:502QY0vQGe9KtYJ9FpxMz9rL+Fc/P13CI5POL4uHCcE= -github.com/mgechev/revive v1.3.7/go.mod h1:RJ16jUbF0OWC3co/+XTxmFNgEpUPwnnA0BRllX2aDNA= -github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y= -github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= -github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= -github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= -github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= -github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= -github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= -github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= -github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= -github.com/moricho/tparallel v0.3.1 h1:fQKD4U1wRMAYNngDonW5XupoB/ZGJHdpzrWqgyg9krA= -github.com/moricho/tparallel v0.3.1/go.mod h1:leENX2cUv7Sv2qDgdi0D0fCftN8fRC67Bcn8pqzeYNI= -github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= -github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= -github.com/nakabonne/nestif v0.3.1 h1:wm28nZjhQY5HyYPx+weN3Q65k6ilSBxDb8v5S81B81U= -github.com/nakabonne/nestif v0.3.1/go.mod h1:9EtoZochLn5iUprVDmDjqGKPofoUEBL8U4Ngq6aY7OE= -github.com/nishanths/exhaustive v0.12.0 h1:vIY9sALmw6T/yxiASewa4TQcFsVYZQQRUQJhKRf3Swg= -github.com/nishanths/exhaustive v0.12.0/go.mod h1:mEZ95wPIZW+x8kC4TgC+9YCUgiST7ecevsVDTgc2obs= -github.com/nishanths/predeclared v0.2.2 h1:V2EPdZPliZymNAn79T8RkNApBjMmVKh5XRpLm/w98Vk= -github.com/nishanths/predeclared v0.2.2/go.mod h1:RROzoN6TnGQupbC+lqggsOlcgysk3LMK/HI84Mp280c= github.com/nu7hatch/gouuid v0.0.0-20131221200532-179d4d0c4d8d h1:VhgPp6v9qf9Agr/56bj7Y/xa04UccTW04VP0Qed4vnQ= github.com/nu7hatch/gouuid v0.0.0-20131221200532-179d4d0c4d8d/go.mod h1:YUTz3bUH2ZwIWBy3CJBeOBEugqcmXREj14T+iG/4k4U= -github.com/nunnatsa/ginkgolinter v0.15.2 h1:N2ORxUxPU56R9gsfLIlVVvCv/V/VVou5qVI1oBKBNHg= -github.com/nunnatsa/ginkgolinter v0.15.2/go.mod h1:oYxE7dt1vZI8cK2rZOs3RgTaBN2vggkqnENmoJ8kVvc= github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A= github.com/nxadm/tail v1.4.11 h1:8feyoE3OzPrcshW5/MJ4sGESc5cqmGkGCWlco4l0bqY= github.com/nxadm/tail v1.4.11/go.mod h1:OTaG3NK980DZzxbRq6lEuzgU+mug70nY11sMd4JXXHc= -github.com/olekukonko/tablewriter v0.0.5 h1:P2Ga83D34wi1o9J6Wh1mRuqd4mF/x/lgBS7N7AbDhec= -github.com/olekukonko/tablewriter v0.0.5/go.mod h1:hPp6KlRPjbx+hW8ykQs1w3UBbZlj6HuIJcUGPhkA7kY= github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk= github.com/onsi/ginkgo v1.13.0/go.mod h1:+REjRxOmWfHCjfv9TTWB1jD1Frx4XydAD3zm1lskyM0= @@ -507,205 +174,52 @@ github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7J github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo= github.com/onsi/gomega v1.31.1 h1:KYppCUK+bUgAZwHOu7EXVBKyQA6ILvOESHkn/tgoqvo= github.com/onsi/gomega v1.31.1/go.mod h1:y40C95dwAD1Nz36SsEnxvfFe8FFfNxzI5eJ0EYGyAy0= -github.com/otiai10/copy v1.2.0/go.mod h1:rrF5dJ5F0t/EWSYODDu4j9/vEeYHMkc8jt0zJChqQWw= -github.com/otiai10/copy v1.14.0 h1:dCI/t1iTdYGtkvCuBG2BgR6KZa83PTclw4U5n2wAllU= -github.com/otiai10/copy v1.14.0/go.mod h1:ECfuL02W+/FkTWZWgQqXPWZgW9oeKCSQ5qVfSc4qc4w= -github.com/otiai10/curr v0.0.0-20150429015615-9b4961190c95/go.mod h1:9qAhocn7zKJG+0mI8eUu6xqkFDYS2kb2saOteoSB3cE= -github.com/otiai10/curr v1.0.0/go.mod h1:LskTG5wDwr8Rs+nNQ+1LlxRjAtTZZjtJW4rMXl6j4vs= -github.com/otiai10/mint v1.3.0/go.mod h1:F5AjcsTsWUqX+Na9fpHb52P8pcRX2CI6A3ctIT91xUo= -github.com/otiai10/mint v1.3.1/go.mod h1:/yxELlJQ0ufhjUwhshSj+wFjZ78CnZ48/1wtmBH1OTc= -github.com/pelletier/go-toml/v2 v2.1.1 h1:LWAJwfNvjQZCFIDKWYQaM62NcYeYViCmWIwmOStowAI= -github.com/pelletier/go-toml/v2 v2.1.1/go.mod h1:tJU2Z3ZkXwnxa4DPO899bsyIoywizdUvyaeZurnPPDc= github.com/peterbourgon/diskv v2.0.1+incompatible h1:UBdAOUP5p4RWqPBg048CAvpKN+vxiaj6gdUUzhl4XmI= github.com/peterbourgon/diskv v2.0.1+incompatible/go.mod h1:uqqh8zWWbv1HBMNONnaR/tNboyR3/BZd58JJSHlUSCU= github.com/pivotal-cf/paraphernalia v0.0.0-20180203224945-a64ae2051c20 h1:DR5eMfe2+6GzLkVyWytdtgUxgbPiOfvKDuqityTV3y8= github.com/pivotal-cf/paraphernalia v0.0.0-20180203224945-a64ae2051c20/go.mod h1:Y3IqE20LKprEpLkXb7gXinJf4vvDdQe/BS8E4kL/dgE= -github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= -github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U= github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/polyfloyd/go-errorlint v1.4.8 h1:jiEjKDH33ouFktyez7sckv6pHWif9B7SuS8cutDXFHw= -github.com/polyfloyd/go-errorlint v1.4.8/go.mod h1:NNCxFcFjZcw3xNjVdCchERkEM6Oz7wta2XJVxRftwO4= -github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= -github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo= -github.com/prometheus/client_golang v1.7.1/go.mod h1:PY5Wy2awLA44sXw4AOSfFBetzPP4j5+D6mVACh+pe2M= -github.com/prometheus/client_golang v1.11.0/go.mod h1:Z6t4BnS23TR94PD6BsDNk8yVqroYurpAkEiz0P2BEV0= -github.com/prometheus/client_golang v1.12.1/go.mod h1:3Z9XVyYiZYEO+YQWt3RD2R3jrbd179Rt297l4aS6nDY= -github.com/prometheus/client_golang v1.19.0 h1:ygXvpU1AoN1MhdzckN+PyD9QJOSD4x7kmXYlnfbA6JU= -github.com/prometheus/client_golang v1.19.0/go.mod h1:ZRM9uEAypZakd+q/x7+gmsvXdURP+DABIEIjnmDdp+k= -github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= -github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= -github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= -github.com/prometheus/client_model v0.6.0 h1:k1v3CzpSRUTrKMppY35TLwPvxHqBu0bYgxZzqGIgaos= -github.com/prometheus/client_model v0.6.0/go.mod h1:NTQHnmxFpouOD0DpvP4XujX3CdOAGQPoaGhyTchlyt8= -github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= -github.com/prometheus/common v0.10.0/go.mod h1:Tlit/dnDKsSWFlCLTWaA1cyBgKHSMdTB80sz/V91rCo= -github.com/prometheus/common v0.26.0/go.mod h1:M7rCNAaPfAosfx8veZJCuw84e35h3Cfd9VFqTh1DIvc= -github.com/prometheus/common v0.32.1/go.mod h1:vu+V0TpY+O6vW9J44gczi3Ap/oXXR10b+M/gUGO4Hls= -github.com/prometheus/common v0.50.0 h1:YSZE6aa9+luNa2da6/Tik0q0A5AbR+U003TItK57CPQ= -github.com/prometheus/common v0.50.0/go.mod h1:wHFBCEVWVmHMUpg7pYcOm2QUR/ocQdYSJVQJKnHc3xQ= -github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= -github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= -github.com/prometheus/procfs v0.1.3/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU= -github.com/prometheus/procfs v0.6.0/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA= -github.com/prometheus/procfs v0.7.3/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA= -github.com/prometheus/procfs v0.13.0 h1:GqzLlQyfsPbaEHaQkO7tbDlriv/4o5Hudv6OXHGKX7o= -github.com/prometheus/procfs v0.13.0/go.mod h1:cd4PFCR54QLnGKPaKGA6l+cfuNXtht43ZKY6tow0Y1g= -github.com/quasilyte/go-ruleguard v0.4.2 h1:htXcXDK6/rO12kiTHKfHuqR4kr3Y4M0J0rOL6CH/BYs= -github.com/quasilyte/go-ruleguard v0.4.2/go.mod h1:GJLgqsLeo4qgavUoL8JeGFNS7qcisx3awV/w9eWTmNI= -github.com/quasilyte/gogrep v0.5.0 h1:eTKODPXbI8ffJMN+W2aE0+oL0z/nh8/5eNdiO34SOAo= -github.com/quasilyte/gogrep v0.5.0/go.mod h1:Cm9lpz9NZjEoL1tgZ2OgeUKPIxL1meE7eo60Z6Sk+Ng= -github.com/quasilyte/regex/syntax v0.0.0-20210819130434-b3f0c404a727 h1:TCg2WBOl980XxGFEZSS6KlBGIV0diGdySzxATTWoqaU= -github.com/quasilyte/regex/syntax v0.0.0-20210819130434-b3f0c404a727/go.mod h1:rlzQ04UMyJXu/aOvhd8qT+hvDrFpiwqp8MRXDY9szc0= -github.com/quasilyte/stdinfo v0.0.0-20220114132959-f7386bf02567 h1:M8mH9eK4OUR4lu7Gd+PU1fV2/qnDNfzT635KRSObncs= -github.com/quasilyte/stdinfo v0.0.0-20220114132959-f7386bf02567/go.mod h1:DWNGW8A4Y+GyBgPuaQJuWiy0XYftx4Xm/y5Jqk9I6VQ= github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ= github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88= -github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= +github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs= github.com/rogpeppe/go-internal v1.12.0 h1:exVL4IDcn6na9z1rAb56Vxr+CgyK3nn3O+epU5NdKM8= github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99yedzYV+kq4uf4= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= -github.com/ryancurrah/gomodguard v1.3.0 h1:q15RT/pd6UggBXVBuLps8BXRvl5GPBcwVA7BJHMLuTw= -github.com/ryancurrah/gomodguard v1.3.0/go.mod h1:ggBxb3luypPEzqVtq33ee7YSN35V28XeGnid8dnni50= -github.com/ryanrolds/sqlclosecheck v0.5.1 h1:dibWW826u0P8jNLsLN+En7+RqWWTYrjCB9fJfSfdyCU= -github.com/ryanrolds/sqlclosecheck v0.5.1/go.mod h1:2g3dUjoS6AL4huFdv6wn55WpLIDjY7ZgUR4J8HOO/XQ= -github.com/sagikazarmark/locafero v0.4.0 h1:HApY1R9zGo4DBgr7dqsTH/JJxLTTsOt7u6keLGt6kNQ= -github.com/sagikazarmark/locafero v0.4.0/go.mod h1:Pe1W6UlPYUk/+wc/6KFhbORCfqzgYEpgQ3O5fPuL3H4= -github.com/sagikazarmark/slog-shim v0.1.0 h1:diDBnUNK9N/354PgrxMywXnAwEr1QZcOr6gto+ugjYE= -github.com/sagikazarmark/slog-shim v0.1.0/go.mod h1:SrcSrq8aKtyuqEI1uvTDTK1arOWRIczQRv+GVI1AkeQ= -github.com/sanposhiho/wastedassign/v2 v2.0.7 h1:J+6nrY4VW+gC9xFzUc+XjPD3g3wF3je/NsJFwFK7Uxc= -github.com/sanposhiho/wastedassign/v2 v2.0.7/go.mod h1:KyZ0MWTwxxBmfwn33zh3k1dmsbF2ud9pAAGfoLfjhtI= -github.com/sashamelentyev/interfacebloat v1.1.0 h1:xdRdJp0irL086OyW1H/RTZTr1h/tMEOsumirXcOJqAw= -github.com/sashamelentyev/interfacebloat v1.1.0/go.mod h1:+Y9yU5YdTkrNvoX0xHc84dxiN1iBi9+G8zZIhPVoNjQ= -github.com/sashamelentyev/usestdlibvars v1.25.0 h1:IK8SI2QyFzy/2OD2PYnhy84dpfNo9qADrRt6LH8vSzU= -github.com/sashamelentyev/usestdlibvars v1.25.0/go.mod h1:9nl0jgOfHKWNFS43Ojw0i7aRoS4j6EBye3YBhmAIRF8= github.com/sclevine/agouti v3.0.0+incompatible/go.mod h1:b4WX9W9L1sfQKXeJf1mUTLZKJ48R1S7H23Ji7oFO5Bw= github.com/sclevine/spec v1.4.0 h1:z/Q9idDcay5m5irkZ28M7PtQM4aOISzOpj4bUPkDee8= github.com/sclevine/spec v1.4.0/go.mod h1:LvpgJaFyvQzRvc1kaDs0bulYwzC70PbiYjC4QnFHkOM= -github.com/securego/gosec/v2 v2.19.0 h1:gl5xMkOI0/E6Hxx0XCY2XujA3V7SNSefA8sC+3f1gnk= -github.com/securego/gosec/v2 v2.19.0/go.mod h1:hOkDcHz9J/XIgIlPDXalxjeVYsHxoWUc5zJSHxcB8YM= -github.com/shazow/go-diff v0.0.0-20160112020656-b6b7b6733b8c h1:W65qqJCIOVP4jpqPQ0YvHYKwcMEMVWIzWC5iNQQfBTU= -github.com/shazow/go-diff v0.0.0-20160112020656-b6b7b6733b8c/go.mod h1:/PevMnwAxekIXwN8qQyfc5gl2NlkB3CQlkizAbOkeBs= -github.com/shurcooL/go v0.0.0-20180423040247-9e1955d9fb6e/go.mod h1:TDJrrUr11Vxrven61rcy3hJMUqaf/CLWYhHNPmT14Lk= -github.com/shurcooL/go-goon v0.0.0-20170922171312-37c2f522c041/go.mod h1:N5mDOmsrJOB+vfqUK+7DmDyjhSLIIBnXo9lvZJj3MWQ= -github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= -github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= -github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88= -github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ= -github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= -github.com/sivchari/containedctx v1.0.3 h1:x+etemjbsh2fB5ewm5FeLNi5bUjK0V8n0RB+Wwfd0XE= -github.com/sivchari/containedctx v1.0.3/go.mod h1:c1RDvCbnJLtH4lLcYD/GqwiBSSf4F5Qk0xld2rBqzJ4= -github.com/sivchari/nosnakecase v1.7.0 h1:7QkpWIRMe8x25gckkFd2A5Pi6Ymo0qgr4JrhGt95do8= -github.com/sivchari/nosnakecase v1.7.0/go.mod h1:CwDzrzPea40/GB6uynrNLiorAlgFRvRbFSgJx2Gs+QY= -github.com/sivchari/tenv v1.7.1 h1:PSpuD4bu6fSmtWMxSGWcvqUUgIn7k3yOJhOIzVWn8Ak= -github.com/sivchari/tenv v1.7.1/go.mod h1:64yStXKSOxDfX47NlhVwND4dHwfZDdbp2Lyl018Icvg= -github.com/sonatard/noctx v0.0.2 h1:L7Dz4De2zDQhW8S0t+KUjY0MAQJd6SgVwhzNIc4ok00= -github.com/sonatard/noctx v0.0.2/go.mod h1:kzFz+CzWSjQ2OzIm46uJZoXuBpa2+0y3T36U18dWqIo= -github.com/sourcegraph/conc v0.3.0 h1:OQTbbt6P72L20UqAkXXuLOj79LfEanQ+YQFNpLA9ySo= -github.com/sourcegraph/conc v0.3.0/go.mod h1:Sdozi7LEKbFPqYX2/J+iBAM6HpqSLTASQIKqDmF7Mt0= -github.com/sourcegraph/go-diff v0.7.0 h1:9uLlrd5T46OXs5qpp8L/MTltk0zikUGi0sNNyCpA8G0= -github.com/sourcegraph/go-diff v0.7.0/go.mod h1:iBszgVvyxdc8SFZ7gm69go2KDdt3ag071iBaWPF6cjs= -github.com/spf13/afero v1.11.0 h1:WJQKhtpdm3v2IzqG8VMqrr6Rf3UYpEF239Jy9wNepM8= -github.com/spf13/afero v1.11.0/go.mod h1:GH9Y3pIexgf1MTIWtNGyogA5MwRIDXGUr+hbWNoBjkY= -github.com/spf13/cast v1.6.0 h1:GEiTHELF+vaR5dhz3VqZfFSzZjYbgeKDpBxQVS4GYJ0= -github.com/spf13/cast v1.6.0/go.mod h1:ancEpBxwJDODSW/UG4rDrAqiKolqNNh2DX3mk86cAdo= github.com/spf13/cobra v1.8.0 h1:7aJaZx1B85qltLMc546zn58BxxfZdR/W22ej9CFoEf0= github.com/spf13/cobra v1.8.0/go.mod h1:WXLWApfZ71AjXPya3WOlMsY9yMs7YeiHhFVlvLyhcho= github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= -github.com/spf13/viper v1.18.2 h1:LUXCnvUvSM6FXAsj6nnfc8Q2tp1dIgUfY9Kc8GsSOiQ= -github.com/spf13/viper v1.18.2/go.mod h1:EKmWIqdnk5lOcmR72yw6hS+8OPYcwD0jteitLMVB+yk= github.com/square/certstrap v1.3.0 h1:N9P0ZRA+DjT8pq5fGDj0z3FjafRKnBDypP0QHpMlaAk= github.com/square/certstrap v1.3.0/go.mod h1:wGZo9eE1B7WX2GKBn0htJ+B3OuRl2UsdCFySNooy9hU= -github.com/ssgreg/nlreturn/v2 v2.2.1 h1:X4XDI7jstt3ySqGU86YGAURbxw3oTDPK9sPEi6YEwQ0= -github.com/ssgreg/nlreturn/v2 v2.2.1/go.mod h1:E/iiPB78hV7Szg2YfRgyIrk1AD6JVMTRkkxBiELzh2I= -github.com/stbenjam/no-sprintf-host-port v0.1.1 h1:tYugd/yrm1O0dV+ThCbaKZh195Dfm07ysF0U6JQXczc= -github.com/stbenjam/no-sprintf-host-port v0.1.1/go.mod h1:TLhvtIvONRzdmkFiio4O8LHsN9N74I+PhRquPsxpL0I= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= -github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= -github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY= -github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= -github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= -github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= -github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= -github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= -github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= -github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= -github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8= -github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU= -github.com/t-yuki/gocover-cobertura v0.0.0-20180217150009-aaee18c8195c h1:+aPplBwWcHBo6q9xrfWdMrT9o4kltkmmvpemgIjep/8= -github.com/t-yuki/gocover-cobertura v0.0.0-20180217150009-aaee18c8195c/go.mod h1:SbErYREK7xXdsRiigaQiQkI9McGRzYMvlKYaP3Nimdk= -github.com/tdakkota/asciicheck v0.2.0 h1:o8jvnUANo0qXtnslk2d3nMKTFNlOnJjRrNcj0j9qkHM= -github.com/tdakkota/asciicheck v0.2.0/go.mod h1:Qb7Y9EgjCLJGup51gDHFzbI08/gbGhL/UVhYIPWG2rg= github.com/tedsuo/ifrit v0.0.0-20230330192023-5cba443a66c4 h1:MGZzzxBuPuK4J0XQo+0uy0NnXQGKzHXhYp5oG1Wy860= github.com/tedsuo/ifrit v0.0.0-20230330192023-5cba443a66c4/go.mod h1:eyZnKCc955uh98WQvzOm0dgAeLnf2O0Rz0LPoC5ze+0= -github.com/tenntenn/modver v1.0.1 h1:2klLppGhDgzJrScMpkj9Ujy3rXPUspSjAcev9tSEBgA= -github.com/tenntenn/modver v1.0.1/go.mod h1:bePIyQPb7UeioSRkw3Q0XeMhYZSMx9B8ePqg6SAMGH0= -github.com/tenntenn/text/transform v0.0.0-20200319021203-7eef512accb3 h1:f+jULpRQGxTSkNYKJ51yaw6ChIqO+Je8UqsTKN/cDag= -github.com/tenntenn/text/transform v0.0.0-20200319021203-7eef512accb3/go.mod h1:ON8b8w4BN/kE1EOhwT0o+d62W65a6aPw1nouo9LMgyY= -github.com/tetafro/godot v1.4.16 h1:4ChfhveiNLk4NveAZ9Pu2AN8QZ2nkUGFuadM9lrr5D0= -github.com/tetafro/godot v1.4.16/go.mod h1:2oVxTBSftRTh4+MVfUaUXR6bn2GDXCaMcOG4Dk3rfio= -github.com/timakin/bodyclose v0.0.0-20240125160201-f835fa56326a h1:A6uKudFIfAEpoPdaal3aSqGxBzLyU8TqyXImLwo6dIo= -github.com/timakin/bodyclose v0.0.0-20240125160201-f835fa56326a/go.mod h1:mkjARE7Yr8qU23YcGMSALbIxTQ9r9QBVahQOBRfU460= -github.com/timonwong/loggercheck v0.9.4 h1:HKKhqrjcVj8sxL7K77beXh0adEm6DLjV/QOGeMXEVi4= -github.com/timonwong/loggercheck v0.9.4/go.mod h1:caz4zlPcgvpEkXgVnAJGowHAMW2NwHaNlpS8xDbVhTg= -github.com/tomarrell/wrapcheck/v2 v2.8.3 h1:5ov+Cbhlgi7s/a42BprYoxsr73CbdMUTzE3bRDFASUs= -github.com/tomarrell/wrapcheck/v2 v2.8.3/go.mod h1:g9vNIyhb5/9TQgumxQyOEqDHsmGYcGsVMOx/xGkqdMo= -github.com/tommy-muehle/go-mnd/v2 v2.5.1 h1:NowYhSdyE/1zwK9QCLeRb6USWdoif80Ie+v+yU8u1Zw= -github.com/tommy-muehle/go-mnd/v2 v2.5.1/go.mod h1:WsUAkMJMYww6l/ufffCD3m+P7LEvr8TnZn9lwVDlgzw= -github.com/ultraware/funlen v0.1.0 h1:BuqclbkY6pO+cvxoq7OsktIXZpgBSkYTQtmwhAK81vI= -github.com/ultraware/funlen v0.1.0/go.mod h1:XJqmOQja6DpxarLj6Jj1U7JuoS8PvL4nEqDaQhy22p4= -github.com/ultraware/whitespace v0.1.0 h1:O1HKYoh0kIeqE8sFqZf1o0qbORXUCOQFrlaQyZsczZw= -github.com/ultraware/whitespace v0.1.0/go.mod h1:/se4r3beMFNmewJ4Xmz0nMQ941GJt+qmSHGP9emHYe0= -github.com/uudashr/gocognit v1.1.2 h1:l6BAEKJqQH2UpKAPKdMfZf5kE4W/2xk8pfU1OVLvniI= -github.com/uudashr/gocognit v1.1.2/go.mod h1:aAVdLURqcanke8h3vg35BC++eseDm66Z7KmchI5et4k= github.com/vishvananda/netlink v1.1.0 h1:1iyaYNBLmP6L0220aDnYQpo1QEV4t4hJ+xEEhhJH8j0= github.com/vishvananda/netlink v1.1.0/go.mod h1:cTgwzPIzzgDAYoQrMm0EdrjRUBkTqKYppBueQtXaqoE= github.com/vishvananda/netns v0.0.0-20211101163701-50045581ed74 h1:gga7acRE695APm9hlsSMoOoE65U4/TcqNj90mc69Rlg= github.com/vishvananda/netns v0.0.0-20211101163701-50045581ed74/go.mod h1:DD4vA1DwXk04H54A1oHXtwZmA0grkVMdPxx/VGLCah0= github.com/vito/go-interact v1.0.1 h1:O8xi8c93bRUv2Tb/v6HdiuGc+WnWt+AQzF74MOOdlBs= github.com/vito/go-interact v1.0.1/go.mod h1:HrdHSJXD2yn1MhlTwSIMeFgQ5WftiIorszVGd3S/DAA= -github.com/xen0n/gosmopolitan v1.2.2 h1:/p2KTnMzwRexIW8GlKawsTWOxn7UHA+jCMF/V8HHtvU= -github.com/xen0n/gosmopolitan v1.2.2/go.mod h1:7XX7Mj61uLYrj0qmeN0zi7XDon9JRAEhYQqAPLVNTeg= -github.com/yagipy/maintidx v1.0.0 h1:h5NvIsCz+nRDapQ0exNv4aJ0yXSI0420omVANTv3GJM= -github.com/yagipy/maintidx v1.0.0/go.mod h1:0qNf/I/CCZXSMhsRsrEPDZ+DkekpKLXAJfsTACwgXLk= -github.com/yeya24/promlinter v0.2.0 h1:xFKDQ82orCU5jQujdaD8stOHiv8UN68BSdn2a8u8Y3o= -github.com/yeya24/promlinter v0.2.0/go.mod h1:u54lkmBOZrpEbQQ6gox2zWKKLKu2SGe+2KOiextY+IA= -github.com/ykadowak/zerologlint v0.1.5 h1:Gy/fMz1dFQN9JZTPjv1hxEk+sRWm05row04Yoolgdiw= -github.com/ykadowak/zerologlint v0.1.5/go.mod h1:KaUskqF3e/v59oPmdq1U1DnKcuHokl2/K1U4pmIELKg= -github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= -github.com/yuin/goldmark v1.4.1/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= -gitlab.com/bosi/decorder v0.4.1 h1:VdsdfxhstabyhZovHafFw+9eJ6eU0d2CkFNJcZz/NU4= -gitlab.com/bosi/decorder v0.4.1/go.mod h1:jecSqWUew6Yle1pCr2eLWTensJMmsxHsBwt+PVbkAqA= -go-simpler.org/assert v0.7.0 h1:OzWWZqfNxt8cLS+MlUp6Tgk1HjPkmgdKBq9qvy8lZsA= -go-simpler.org/assert v0.7.0/go.mod h1:74Eqh5eI6vCK6Y5l3PI8ZYFXG4Sa+tkr70OIPJAUr28= -go-simpler.org/musttag v0.9.0 h1:Dzt6/tyP9ONr5g9h9P3cnYWCxeBFRkd0uJL/w+1Mxos= -go-simpler.org/musttag v0.9.0/go.mod h1:gA9nThnalvNSKpEoyp3Ko4/vCX2xTpqKoUtNqXOnVR4= -go-simpler.org/sloglint v0.4.0 h1:UVJuUJo63iNQNFEOtZ6o1xAgagVg/giVLLvG9nNLobI= -go-simpler.org/sloglint v0.4.0/go.mod h1:v6zJ++j/thFPhefs2wEXoCKwT10yo5nkBDYRCXyqgNQ= -go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= -go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= -go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= -go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= -go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0= go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo= go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0 h1:4Pp6oUg3+e/6M4C0A/3kJ2VYa++dsWVTtGgLVj5xtHg= @@ -722,296 +236,87 @@ go.opentelemetry.io/otel/trace v1.24.0 h1:CsKnnL4dUAr/0llH9FKuc698G04IrpWV0MQA/Y go.opentelemetry.io/otel/trace v1.24.0/go.mod h1:HPc3Xr/cOApsBI154IU0OI0HJexz+aw5uPdbs3UCjNU= go.step.sm/crypto v0.43.1 h1:18Z/M49SnFDPXvFbfoN/ugE1i0J7phLWARhSQs/XSDI= go.step.sm/crypto v0.43.1/go.mod h1:9n90D/SWjH1hTyQn1hgviUGyK8YRv743S8UZHYbt4BU= -go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto= -go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE= -go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0= -go.uber.org/multierr v1.11.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y= -go.uber.org/zap v1.27.0 h1:aJMhYGrd5QSmlpLMr2MftRKl7t8J8PTZPA732ud/XR8= -go.uber.org/zap v1.27.0/go.mod h1:GB2qFLM7cTU87MWRP2mPIjqfIDnGu+VIO4V/SdhGo2E= -golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= -golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc= -golang.org/x/crypto v0.14.0/go.mod h1:MVFd36DqK4CsrnJYDkBA3VC4m2GkXAM0PvzMCn4JQf4= golang.org/x/crypto v0.21.0 h1:X31++rzVUdKhX5sWmSOFZxx8UW/ldWx55cbf08iNAMA= golang.org/x/crypto v0.21.0/go.mod h1:0BP7YvVV9gBbVKyeTG0Gyn+gZm94bibOW5BjDEYAOMs= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= -golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= -golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= -golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek= -golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY= -golang.org/x/exp v0.0.0-20191129062945-2f5052295587/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= -golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= -golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= -golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= -golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= -golang.org/x/exp v0.0.0-20240222234643-814bf88cf225 h1:LfspQV/FYTatPTr/3HzIcmiUFH7PGP+OQ6mgDYo3yuQ= -golang.org/x/exp v0.0.0-20240222234643-814bf88cf225/go.mod h1:CxmFvTBINI24O/j8iY7H1xHzx2i4OsyguNBmN/uPtqc= -golang.org/x/exp/typeparams v0.0.0-20220428152302-39d4317da171/go.mod h1:AbB0pIl9nAr9wVwH+Z2ZpaocVmF5I4GyWCDIsVjR0bk= -golang.org/x/exp/typeparams v0.0.0-20230203172020-98cc5a0785f9/go.mod h1:AbB0pIl9nAr9wVwH+Z2ZpaocVmF5I4GyWCDIsVjR0bk= -golang.org/x/exp/typeparams v0.0.0-20240222234643-814bf88cf225 h1:BzKNaIRXh1bD+1557OcFIHlpYBiVbK4zEyn8zBHi1SE= -golang.org/x/exp/typeparams v0.0.0-20240222234643-814bf88cf225/go.mod h1:AbB0pIl9nAr9wVwH+Z2ZpaocVmF5I4GyWCDIsVjR0bk= -golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= -golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= -golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs= -golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= -golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= -golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= -golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= -golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= -golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY= -golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= -golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= -golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.5.1/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro= -golang.org/x/mod v0.6.0-dev.0.20220106191415-9b9b3d81d5e3/go.mod h1:3p9vT2HGsQu2K1YbXdKPJLVgG5VJdoTa1poYQBtP1AY= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= -golang.org/x/mod v0.7.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= -golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= -golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= -golang.org/x/mod v0.13.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= golang.org/x/mod v0.16.0 h1:QX4fJ0Rr5cPQCF7O9lh9Se4pmwfwskqZfq5moyldzic= golang.org/x/mod v0.16.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= -golang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200222125558-5a598a2470a0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20200520004742-59133d7f0dd7/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= -golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= -golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= -golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= -golang.org/x/net v0.0.0-20210525063256-abc453219eb5/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= -golang.org/x/net v0.2.0/go.mod h1:KqCZLdyyvdV855qA2rE3GC2aiw5xGR5TEjj8smXukLY= -golang.org/x/net v0.5.0/go.mod h1:DivGGAXEgPSlEBzxGzZI+ZLohi+xUj054jfeKui00ws= -golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= -golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= -golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk= -golang.org/x/net v0.16.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE= golang.org/x/net v0.22.0 h1:9sGLhx7iRIHEiX0oAJ3MRZMUCElJgy7Br1nO+AMN3Tc= golang.org/x/net v0.22.0/go.mod h1:JKghWKKOSdJwpW2GEx0Ja7fmaKnMsbu+MWVZTokSYmg= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= -golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/oauth2 v0.0.0-20210514164344-f6687ab2804c/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.18.0 h1:09qnuIAgzdx1XplqJvW6CQqMCtGZykZWcXzPMPUusvI= golang.org/x/oauth2 v0.18.0/go.mod h1:Wf7knwG0MPoWIMMBgFlEaSUDaKskp0dCfrlJRJXbBi8= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= -golang.org/x/sync v0.4.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= golang.org/x/sync v0.6.0 h1:5BMeUDZ7vkXGfEr1x9B4bRcTH4lpkTkpdh0T/J+qjbQ= golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190904154756-749cb33beabd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191120155948-bd437916bb0e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200106162015-b016eb3dc98e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200331124033-c3d80250170d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200519105757-fe76b779f299/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200615200032-f1bc736245b1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200625212154-ddb9806d33ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210603081109-ebe580a85c40/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20211019181941-9d821ace8654/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20211105183446-c75c47738b0c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220114195835-da31bd327af9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220702020025-31831981b65f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.4.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.13.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.18.0 h1:DBdB3niSjOA/O0blCZBqDefyWNYveAYMNF1Wum0DYQ4= golang.org/x/sys v0.18.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.0.0-20220411215600-e5f449aeb171/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= -golang.org/x/term v0.2.0/go.mod h1:TVmDHMZPmdnySmBfhjOoOdhjzdE1h4u1VwSiw2l1Nuc= -golang.org/x/term v0.4.0/go.mod h1:9P2UbLfCdcvo3p/nzKvsmas4TnlujnuoV9hGgYzW1lQ= -golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= -golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= -golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU= -golang.org/x/term v0.13.0/go.mod h1:LTmsnFJwVN6bCy1rVCoS+qHT1HhALEFxKncY3WNNh4U= golang.org/x/term v0.18.0 h1:FcHjZXDMxI8mM3nwhX9HlKop4C0YQvCVCdwYl2wOtE8= golang.org/x/term v0.18.0/go.mod h1:ILwASektA3OnRv7amZ1xhE/KTR+u50pbXfZ03+6Nx58= -golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ= -golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= -golang.org/x/text v0.6.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= -golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= -golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= -golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ= golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= -golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.5.0 h1:o7cqy6amK/52YcAKIPlM3a+Fpj35zvRj2TP+e1xFSfk= golang.org/x/time v0.5.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190321232350-e250d351ecad/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= -golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= -golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= -golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= -golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= -golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191108193012-7d206e10da11/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200117161641-43d50277825c/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200122220014-bf1340f18c4a/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200204074204-1cc6d1ef6c74/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200207183749-b753a1ba74fa/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200212150539-ea181f53ac56/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200224181240-023911ca70b2/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200227222343-706bc42d1f0d/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200304193943-95d2e580d8eb/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= -golang.org/x/tools v0.0.0-20200312045724-11d5b4c81c7d/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= -golang.org/x/tools v0.0.0-20200324003944-a576cf524670/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= -golang.org/x/tools v0.0.0-20200329025819-fd4102a86c65/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= -golang.org/x/tools v0.0.0-20200331025713-a30bf2db82d4/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= -golang.org/x/tools v0.0.0-20200501065659-ab2804fb9c9d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200724022722-7017fd6b1305/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= -golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= -golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= -golang.org/x/tools v0.0.0-20200820010801-b793a1359eac/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= -golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= -golang.org/x/tools v0.0.0-20201001104356-43ebab892c4c/go.mod h1:z6u4i615ZeAfBE4XtMziQW1fSVJXACjjbWkB/mvPzlU= -golang.org/x/tools v0.0.0-20201023174141-c8cfbd0f21e6/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= -golang.org/x/tools v0.1.1-0.20210205202024-ef80cdb6ec6d/go.mod h1:9bzcO0MWcOuT0tm1iBGzDVPshzfwoVvREIui8C+MHqU= -golang.org/x/tools v0.1.1-0.20210302220138-2ac05c832e1a/go.mod h1:9bzcO0MWcOuT0tm1iBGzDVPshzfwoVvREIui8C+MHqU= golang.org/x/tools v0.1.1/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= -golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= -golang.org/x/tools v0.1.9/go.mod h1:nABZi5QlRsZVlzPpHl034qft6wpY4eDcsTt5AaioBiU= -golang.org/x/tools v0.1.10/go.mod h1:Uh6Zz+xoGYZom868N8YTex3t7RhtHDBrE8Gzo9bV56E= -golang.org/x/tools v0.1.11/go.mod h1:SgwaegtQh8clINPpECJMqnxLv9I09HLqnW3RMqW0CA4= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= -golang.org/x/tools v0.3.0/go.mod h1:/rWhSS2+zyEVwoJf8YAX6L2f0ntZ7Kn/mGgAWcipA5k= -golang.org/x/tools v0.5.0/go.mod h1:N+Kgy78s5I24c24dU8OfWNEotWjutIs8SnJvn5IDq+k= -golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= -golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58= -golang.org/x/tools v0.14.0/go.mod h1:uYBEerGOWcJyEORxN+Ek8+TT266gXkNlHdJBwexUsBg= golang.org/x/tools v0.19.0 h1:tfGCXNR1OsFG+sVdLAitlpjAvD/I6dHDKnYrpEZUHkw= golang.org/x/tools v0.19.0/go.mod h1:qoJWxmGSIBmAeriMx19ogtrEPrGtDbPK634QFIcLAhc= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= @@ -1020,61 +325,15 @@ golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8T golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028 h1:+cNy6SZtPcJQH3LJVLOSmiC7MMxXNOb3PU/VUEz+EhU= golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028/go.mod h1:NDW/Ps6MPRej6fsCIbMTohpP40sJ/P/vI1MoTEGwX90= -google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= -google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= -google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= -google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= -google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= -google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= -google.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= -google.golang.org/api v0.17.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.18.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.19.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.20.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.22.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.24.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= -google.golang.org/api v0.28.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= -google.golang.org/api v0.29.0/go.mod h1:Lcubydp8VUV7KeIHD9z2Bys/sm/vGKnG1UHuDBSrHWM= -google.golang.org/api v0.30.0/go.mod h1:QGmEvQ87FHZNiUVJkT14jQNYJ4ZJjdRF23ZXz5138Fc= google.golang.org/api v0.169.0 h1:QwWPy71FgMWqJN/l6jVlFHUa29a7dcUy02I8o799nPY= google.golang.org/api v0.169.0/go.mod h1:gpNOiMA2tZ4mf5R9Iwf4rK/Dcz0fbdIgWYWVoxmsyLg= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= -google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= -google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0= -google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= -google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= google.golang.org/appengine v1.6.8 h1:IhEN5q69dyKagZPYMSdIjS2HqprW324FRQZJcGqPAsM= google.golang.org/appengine v1.6.8/go.mod h1:1jJ3jBArFh5pcgW8gCtRJnepW8FzD1V44FJffLiz/Ds= google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= -google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= -google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8= -google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20191230161307-f3c370f40bfb/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20200115191322-ca5a22157cba/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20200122232147-0452cf42e150/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20200204135345-fa8e72b47b90/go.mod h1:GmwEX6Z4W5gMy59cAlVYjN9JhxgbQH6Gn+gFDQe2lzA= -google.golang.org/genproto v0.0.0-20200212174721-66ed5ce911ce/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200224152610-e50cd9704f63/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200228133532-8c2c7df3a383/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200305110556-506484158171/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200312145019-da6875a35672/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200331122359-1ee6d9798940/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200430143042-b979b6f78d84/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U= google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= -google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA= -google.golang.org/genproto v0.0.0-20200729003335-053ba62fc06f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20240308144416-29370a3891b7 h1:5cmXPmmYZddhZs05mvqVzGwPsoE/uq+1YBCeRmBDyMo= google.golang.org/genproto v0.0.0-20240308144416-29370a3891b7/go.mod h1:yA7a1bW1kwl459Ol0m0lV4hLTfrL/7Bkk4Mj2Ir1mWI= google.golang.org/genproto/googleapis/api v0.0.0-20240308144416-29370a3891b7 h1:bITUotW/BD35GhBwrwGexWa8/P5CKHXACICrmuFJBa8= @@ -1082,17 +341,9 @@ google.golang.org/genproto/googleapis/api v0.0.0-20240308144416-29370a3891b7/go. google.golang.org/genproto/googleapis/rpc v0.0.0-20240308144416-29370a3891b7 h1:em/y72n4XlYRtayY/cVj6pnVzHa//BDA1BdoO+z9mdE= google.golang.org/genproto/googleapis/rpc v0.0.0-20240308144416-29370a3891b7/go.mod h1:UCOku4NytXMJuLQE5VuqA5lX3PcHCBo8pxNyvkf4xBs= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= -google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= -google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= -google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= -google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= -google.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKal+60= -google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= -google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= -google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= google.golang.org/grpc v1.62.1 h1:B4n+nfKzOICUXMgyrNd19h/I9oH0L1pizfk1d4zSgTk= google.golang.org/grpc v1.62.1/go.mod h1:IWTG0VlJLCh1SkC58F7np9ka9mx/WNkjl4PGJaiq+QE= @@ -1104,28 +355,18 @@ google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzi google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= -google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4= google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= google.golang.org/protobuf v1.33.0 h1:uNO2rsAINq/JlFpSdYEKIZ0uKD/R9cpdv0T+yoGwGmI= google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= -gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= -gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= -gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA= -gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ= gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= -gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= @@ -1134,22 +375,4 @@ gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= -honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= -honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= -honnef.co/go/tools v0.4.7 h1:9MDAWxMoSnB6QoSqiVr7P5mtkT9pOc1kSxchzPCnqJs= -honnef.co/go/tools v0.4.7/go.mod h1:+rnGS1THNh8zMwnd2oVOTL9QF6vmfyG6ZXBULae2uc0= -mvdan.cc/gofumpt v0.6.0 h1:G3QvahNDmpD+Aek/bNOLrFR2XC6ZAdo62dZu65gmwGo= -mvdan.cc/gofumpt v0.6.0/go.mod h1:4L0wf+kgIPZtcCWXynNS2e6bhmj73umwnuXSZarixzA= -mvdan.cc/interfacer v0.0.0-20180901003855-c20040233aed h1:WX1yoOaKQfddO/mLzdV4wptyWgoH/6hwLs7QHTixo0I= -mvdan.cc/interfacer v0.0.0-20180901003855-c20040233aed/go.mod h1:Xkxe497xwlCKkIaQYRfC7CSLworTXY9RMqwhhCm+8Nc= -mvdan.cc/lint v0.0.0-20170908181259-adc824a0674b h1:DxJ5nJdkhDlLok9K6qO+5290kphDJbHOQO1DFFFTeBo= -mvdan.cc/lint v0.0.0-20170908181259-adc824a0674b/go.mod h1:2odslEg/xrtNQqCYg2/jCoyKnw3vv5biOc3JnIcYfL4= -mvdan.cc/unparam v0.0.0-20240104100049-c549a3470d14 h1:zCr3iRRgdk5eIikZNDphGcM6KGVTx3Yu+/Uu9Es254w= -mvdan.cc/unparam v0.0.0-20240104100049-c549a3470d14/go.mod h1:ZzZjEpJDOmx8TdVU6umamY3Xy0UAQUI2DHbf05USVbI= -rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= -rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= -rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= diff --git a/tools/tools.go b/tools/tools.go index 579ff6ebb..5115e81b3 100644 --- a/tools/tools.go +++ b/tools/tools.go @@ -5,7 +5,6 @@ package tools import ( _ "github.com/golang/mock/mockgen/model" - _ "github.com/golangci/golangci-lint/cmd/golangci-lint" _ "github.com/maxbrunsfeld/counterfeiter/v6" _ "github.com/onsi/ginkgo/v2/ginkgo" _ "golang.org/x/tools/cmd/goimports" diff --git a/vendor/4d63.com/gocheckcompilerdirectives/LICENSE b/vendor/4d63.com/gocheckcompilerdirectives/LICENSE deleted file mode 100644 index 3f12625b0..000000000 --- a/vendor/4d63.com/gocheckcompilerdirectives/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2023 Leigh McCulloch - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/4d63.com/gocheckcompilerdirectives/checkcompilerdirectives/checkcompilerdirectives.go b/vendor/4d63.com/gocheckcompilerdirectives/checkcompilerdirectives/checkcompilerdirectives.go deleted file mode 100644 index 19948c454..000000000 --- a/vendor/4d63.com/gocheckcompilerdirectives/checkcompilerdirectives/checkcompilerdirectives.go +++ /dev/null @@ -1,105 +0,0 @@ -package checkcompilerdirectives - -import ( - "strings" - - "golang.org/x/tools/go/analysis" -) - -func Analyzer() *analysis.Analyzer { - return &analysis.Analyzer{ - Name: "gocheckcompilerdirectives", - Doc: "Checks that go compiler directive comments (//go:) are valid.", - Run: run, - } -} - -func run(pass *analysis.Pass) (interface{}, error) { - for _, file := range pass.Files { - for _, group := range file.Comments { - for _, comment := range group.List { - text := comment.Text - if !strings.HasPrefix(text, "//") { - continue - } - start := 2 - spaces := 0 - for _, c := range text[start:] { - if c == ' ' { - spaces++ - continue - } - break - } - start += spaces - if !strings.HasPrefix(text[start:], "go:") { - continue - } - start += 3 - end := strings.Index(text[start:], " ") - if end == -1 { - continue - } - directive := text[start : start+end] - if len(directive) == 0 { - continue - } - prefix := text[:start+end] - // Leading whitespace will cause the go directive to be ignored - // by the compiler with no error, causing it not to work. This - // is an easy mistake. - if spaces > 0 { - pass.ReportRangef(comment, "compiler directive contains space: %s", prefix) - } - // If the directive is unknown it will be ignored by the - // compiler with no error. This is an easy mistake to make, - // especially if you typo a directive. - if !isKnown(directive) { - pass.ReportRangef(comment, "compiler directive unrecognized: %s", prefix) - } - } - } - } - return nil, nil -} - -func isKnown(directive string) bool { - for _, k := range known { - if directive == k { - return true - } - } - return false -} - -var known = []string{ - // Found by running the following command on the source of go. - // git grep -o -E -h '//go:[a-z_]+' -- ':!**/*_test.go' ':!test/' ':!**/testdata/**' | sort -u - "binary", - "build", - "buildsomethingelse", - "cgo_dynamic_linker", - "cgo_export_dynamic", - "cgo_export_static", - "cgo_import_dynamic", - "cgo_import_static", - "cgo_ldflag", - "cgo_unsafe_args", - "embed", - "generate", - "linkname", - "name", - "nocheckptr", - "noescape", - "noinline", - "nointerface", - "norace", - "nosplit", - "notinheap", - "nowritebarrier", - "nowritebarrierrec", - "systemstack", - "uintptrescapes", - "uintptrkeepalive", - "yeswritebarrierrec", -} diff --git a/vendor/4d63.com/gochecknoglobals/LICENSE b/vendor/4d63.com/gochecknoglobals/LICENSE deleted file mode 100644 index c401e6608..000000000 --- a/vendor/4d63.com/gochecknoglobals/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2018 Leigh McCulloch - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/4d63.com/gochecknoglobals/checknoglobals/check_no_globals.go b/vendor/4d63.com/gochecknoglobals/checknoglobals/check_no_globals.go deleted file mode 100644 index edf9193ec..000000000 --- a/vendor/4d63.com/gochecknoglobals/checknoglobals/check_no_globals.go +++ /dev/null @@ -1,200 +0,0 @@ -package checknoglobals - -import ( - "flag" - "fmt" - "go/ast" - "go/token" - "go/types" - "strings" - - "golang.org/x/tools/go/analysis" -) - -// allowedExpression is a struct representing packages and methods that will -// be an allowed combination to use as a global variable, f.ex. Name `regexp` -// and SelName `MustCompile`. -type allowedExpression struct { - Name string - SelName string -} - -const Doc = `check that no global variables exist - -This analyzer checks for global variables and errors on any found. - -A global variable is a variable declared in package scope and that can be read -and written to by any function within the package. Global variables can cause -side effects which are difficult to keep track of. A code in one function may -change the variables state while another unrelated chunk of code may be -effected by it.` - -// Analyzer provides an Analyzer that checks that there are no global -// variables, except for errors and variables containing regular -// expressions. -func Analyzer() *analysis.Analyzer { - return &analysis.Analyzer{ - Name: "gochecknoglobals", - Doc: Doc, - Run: checkNoGlobals, - Flags: flags(), - RunDespiteErrors: true, - } -} - -func flags() flag.FlagSet { - flags := flag.NewFlagSet("", flag.ExitOnError) - flags.Bool("t", false, "Include tests") - - return *flags -} - -func isAllowed(cm ast.CommentMap, v ast.Node, ti *types.Info) bool { - switch i := v.(type) { - case *ast.GenDecl: - return hasEmbedComment(cm, i) - case *ast.Ident: - return i.Name == "_" || i.Name == "version" || isError(i, ti) || identHasEmbedComment(cm, i) - case *ast.CallExpr: - if expr, ok := i.Fun.(*ast.SelectorExpr); ok { - return isAllowedSelectorExpression(expr) - } - case *ast.CompositeLit: - if expr, ok := i.Type.(*ast.SelectorExpr); ok { - return isAllowedSelectorExpression(expr) - } - } - - return false -} - -func isAllowedSelectorExpression(v *ast.SelectorExpr) bool { - x, ok := v.X.(*ast.Ident) - if !ok { - return false - } - - allowList := []allowedExpression{ - {Name: "regexp", SelName: "MustCompile"}, - } - - for _, i := range allowList { - if x.Name == i.Name && v.Sel.Name == i.SelName { - return true - } - } - - return false -} - -// isError reports whether the AST identifier looks like -// an error and implements the error interface. -func isError(i *ast.Ident, ti *types.Info) bool { - return looksLikeError(i) && implementsError(i, ti) -} - -// looksLikeError returns true if the AST identifier starts -// with 'err' or 'Err', or false otherwise. -func looksLikeError(i *ast.Ident) bool { - prefix := "err" - if i.IsExported() { - prefix = "Err" - } - return strings.HasPrefix(i.Name, prefix) -} - -// implementsError reports whether the AST identifier -// implements the error interface. -func implementsError(i *ast.Ident, ti *types.Info) bool { - t := ti.TypeOf(i) - et := types.Universe.Lookup("error").Type().Underlying().(*types.Interface) - return types.Implements(t, et) -} - -func identHasEmbedComment(cm ast.CommentMap, i *ast.Ident) bool { - if i.Obj == nil { - return false - } - - spec, ok := i.Obj.Decl.(*ast.ValueSpec) - if !ok { - return false - } - - return hasEmbedComment(cm, spec) -} - -// hasEmbedComment returns true if the AST node has -// a '//go:embed ' comment, or false otherwise. -func hasEmbedComment(cm ast.CommentMap, n ast.Node) bool { - for _, g := range cm[n] { - for _, c := range g.List { - if strings.HasPrefix(c.Text, "//go:embed ") { - return true - } - } - } - return false -} - -func checkNoGlobals(pass *analysis.Pass) (interface{}, error) { - includeTests := pass.Analyzer.Flags.Lookup("t").Value.(flag.Getter).Get().(bool) - - for _, file := range pass.Files { - filename := pass.Fset.Position(file.Pos()).Filename - if !strings.HasSuffix(filename, ".go") { - continue - } - if !includeTests && strings.HasSuffix(filename, "_test.go") { - continue - } - - fileCommentMap := ast.NewCommentMap(pass.Fset, file, file.Comments) - - for _, decl := range file.Decls { - genDecl, ok := decl.(*ast.GenDecl) - if !ok { - continue - } - if genDecl.Tok != token.VAR { - continue - } - if isAllowed(fileCommentMap, genDecl, pass.TypesInfo) { - continue - } - for _, spec := range genDecl.Specs { - valueSpec := spec.(*ast.ValueSpec) - onlyAllowedValues := false - - for _, vn := range valueSpec.Values { - if isAllowed(fileCommentMap, vn, pass.TypesInfo) { - onlyAllowedValues = true - continue - } - - onlyAllowedValues = false - break - } - - if onlyAllowedValues { - continue - } - - for _, vn := range valueSpec.Names { - if isAllowed(fileCommentMap, vn, pass.TypesInfo) { - continue - } - - message := fmt.Sprintf("%s is a global variable", vn.Name) - pass.Report(analysis.Diagnostic{ - Pos: vn.Pos(), - Category: "global", - Message: message, - }) - } - } - } - } - - return nil, nil -} diff --git a/vendor/github.com/4meepo/tagalign/.gitignore b/vendor/github.com/4meepo/tagalign/.gitignore deleted file mode 100644 index e37bb52e4..000000000 --- a/vendor/github.com/4meepo/tagalign/.gitignore +++ /dev/null @@ -1,75 +0,0 @@ -# File created using '.gitignore Generator' for Visual Studio Code: https://bit.ly/vscode-gig -# Created by https://www.toptal.com/developers/gitignore/api/visualstudiocode,macos,go -# Edit at https://www.toptal.com/developers/gitignore?templates=visualstudiocode,macos,go - -### Go ### -# If you prefer the allow list template instead of the deny list, see community template: -# https://github.com/github/gitignore/blob/main/community/Golang/Go.AllowList.gitignore -# -# Binaries for programs and plugins -*.exe -*.exe~ -*.dll -*.so -*.dylib - -# Test binary, built with `go test -c` -*.test - -.vscode - -# Output of the go coverage tool, specifically when used with LiteIDE -*.out - -# Dependency directories (remove the comment below to include it) -# vendor/ - -# Go workspace file -go.work - -### macOS ### -# General -.DS_Store -.AppleDouble -.LSOverride - -# Icon must end with two \r -Icon - - -# Thumbnails -._* - -# Files that might appear in the root of a volume -.DocumentRevisions-V100 -.fseventsd -.Spotlight-V100 -.TemporaryItems -.Trashes -.VolumeIcon.icns -.com.apple.timemachine.donotpresent - -# Directories potentially created on remote AFP share -.AppleDB -.AppleDesktop -Network Trash Folder -Temporary Items -.apdisk - -### macOS Patch ### -# iCloud generated files -*.icloud - -.history/ - -# Built Visual Studio Code Extensions -*.vsix - -### VisualStudioCode Patch ### -# Ignore all local history of files -.history -.ionide - -# End of https://www.toptal.com/developers/gitignore/api/visualstudiocode,macos,go - -# Custom rules (everything added below won't be overriden by 'Generate .gitignore File' if you use 'Update' option) diff --git a/vendor/github.com/4meepo/tagalign/.golangci.yml b/vendor/github.com/4meepo/tagalign/.golangci.yml deleted file mode 100644 index e65f604fe..000000000 --- a/vendor/github.com/4meepo/tagalign/.golangci.yml +++ /dev/null @@ -1,106 +0,0 @@ -# See https://golangci-lint.run/usage/configuration/ - -linters-settings: - revive: - # see https://github.com/mgechev/revive#available-rules for details. - ignore-generated-header: true - severity: warning - rules: - - name: atomic - - name: blank-imports - - name: bool-literal-in-expr - - name: call-to-gc - - name: confusing-naming - - name: confusing-results - - name: constant-logical-expr - - name: context-as-argument - - name: context-keys-type - - name: deep-exit - - name: defer - - name: dot-imports - - name: duplicated-imports - - name: early-return - - name: empty-block - - name: empty-lines - - name: error-naming - - name: error-return - - name: error-strings - - name: errorf - - name: exported - - name: get-return - - name: identical-branches - - name: if-return - - name: import-shadowing - - name: increment-decrement - - name: indent-error-flow - - name: modifies-parameter - - name: modifies-value-receiver - - name: package-comments - - name: range - - name: range-val-address - - name: range-val-in-closure - - name: receiver-naming - - name: redefines-builtin-id - - name: string-of-int - - name: superfluous-else - - name: time-naming - - name: unconditional-recursion - - name: unexported-naming - - name: unexported-return - - name: unnecessary-stmt - - name: unreachable-code - - name: unused-parameter - - name: var-declaration - - name: var-naming - - name: waitgroup-by-value - -linters: - disable-all: true - enable: - - asciicheck - - bodyclose - - dogsled - - dupl - - durationcheck - - errcheck - - errorlint - - exhaustive - - exportloopref - - forcetypeassert - - gochecknoinits - - gocognit - - goconst - - gocritic - - gocyclo - - godot - - godox - - goimports - - gomoddirectives - - gomodguard - - goprintffuncname - - gosec - - gosimple - # - govet - - importas - - ineffassign - - makezero - - misspell - - nakedret - - nestif - - nilerr - - noctx - - nolintlint - - prealloc - - predeclared - - revive - - rowserrcheck - - sqlclosecheck - - staticcheck - - stylecheck - - thelper - - tparallel - - typecheck - - unconvert - - unparam - - unused - - whitespace diff --git a/vendor/github.com/4meepo/tagalign/.goreleaser.yml b/vendor/github.com/4meepo/tagalign/.goreleaser.yml deleted file mode 100644 index e7b6f6800..000000000 --- a/vendor/github.com/4meepo/tagalign/.goreleaser.yml +++ /dev/null @@ -1,32 +0,0 @@ ---- -project_name: tagalign - -release: - github: - owner: 4meepo - name: tagalign - -builds: - - binary: tagalign - goos: - - darwin - - windows - - linux - - freebsd - goarch: - - amd64 - - arm64 - - arm - goarm: - - 6 - - 7 - gomips: - - hardfloat - env: - - CGO_ENABLED=0 - ignore: - - goos: darwin - goarch: 386 - - goos: freebsd - goarch: arm64 - main: ./cmd/tagalign/ \ No newline at end of file diff --git a/vendor/github.com/4meepo/tagalign/LICENSE b/vendor/github.com/4meepo/tagalign/LICENSE deleted file mode 100644 index da3ae8270..000000000 --- a/vendor/github.com/4meepo/tagalign/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2023 Yifei Liu - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/4meepo/tagalign/Makefile b/vendor/github.com/4meepo/tagalign/Makefile deleted file mode 100644 index 614e7773c..000000000 --- a/vendor/github.com/4meepo/tagalign/Makefile +++ /dev/null @@ -1,7 +0,0 @@ -.PHONY: lint -lint: - golangci-lint run ./... - -.PHONY: build -build: - go build -o tagalign cmd/tagalign/tagalign.go \ No newline at end of file diff --git a/vendor/github.com/4meepo/tagalign/README.md b/vendor/github.com/4meepo/tagalign/README.md deleted file mode 100644 index 9d04dccbf..000000000 --- a/vendor/github.com/4meepo/tagalign/README.md +++ /dev/null @@ -1,130 +0,0 @@ -# Go Tag Align - -![GitHub go.mod Go version](https://img.shields.io/github/go-mod/go-version/4meepo/tagalign?style=flat-square) -[![codecov](https://codecov.io/github/4meepo/tagalign/branch/main/graph/badge.svg?token=1R1T61UNBQ)](https://codecov.io/github/4meepo/tagalign) -[![GoDoc](https://godoc.org/github.com/4meepo/tagalign?status.svg)](https://pkg.go.dev/github.com/4meepo/tagalign) -[![Go Report Card](https://goreportcard.com/badge/github.com/4meepo/tagalign)](https://goreportcard.com/report/github.com/4meepo/tagalign) - -TagAlign is used to align and sort tags in Go struct. It can make the struct more readable and easier to maintain. - -For example, this struct - -```go -type FooBar struct { - Foo int `json:"foo" validate:"required"` - Bar string `json:"bar" validate:"required"` - FooFoo int8 `json:"foo_foo" validate:"required"` - BarBar int `json:"bar_bar" validate:"required"` - FooBar struct { - Foo int `json:"foo" yaml:"foo" validate:"required"` - Bar222 string `json:"bar222" validate:"required" yaml:"bar"` - } `json:"foo_bar" validate:"required"` - BarFoo string `json:"bar_foo" validate:"required"` - BarFooBar string `json:"bar_foo_bar" validate:"required"` -} -``` - -can be aligned to: - -```go -type FooBar struct { - Foo int `json:"foo" validate:"required"` - Bar string `json:"bar" validate:"required"` - FooFoo int8 `json:"foo_foo" validate:"required"` - BarBar int `json:"bar_bar" validate:"required"` - FooBar struct { - Foo int `json:"foo" yaml:"foo" validate:"required"` - Bar222 string `json:"bar222" validate:"required" yaml:"bar"` - } `json:"foo_bar" validate:"required"` - BarFoo string `json:"bar_foo" validate:"required"` - BarFooBar string `json:"bar_foo_bar" validate:"required"` -} -``` - -## Usage - -By default tagalign will only align tags, but not sort them. But alignment and [sort feature](https://github.com/4meepo/tagalign#sort-tag) can work together or separately. - -* As a Golangci Linter (Recommended) - - Tagalign is a built-in linter in [Golangci Lint](https://golangci-lint.run/usage/linters/#tagalign) since `v1.53`. - > Note: In order to have the best experience, add the `--fix` flag to `golangci-lint` to enable the autofix feature. - -* Standalone Mode - - Install it using `GO` or download it [here](https://github.com/4meepo/tagalign/releases). - - ```bash - go install github.com/4meepo/tagalign/cmd/tagalign@latest - ``` - - Run it in your terminal. - - ```bash - # Only align tags. - tagalign -fix {package path} - # Only sort tags with fixed order. - tagalign -fix -noalign -sort -order "json,xml" {package path} - # Align and sort together. - tagalign -fix -sort -order "json,xml" {package path} - # Align and sort together in strict style. - tagalign -fix -sort -order "json,xml" -strict {package path} - ``` - -## Advanced Features - -### Sort Tag - -In addition to alignment, it can also sort tags with fixed order. If we enable sort with fixed order `json,xml`, the following code - -```go -type SortExample struct { - Foo int `json:"foo,omitempty" yaml:"bar" xml:"baz" binding:"required" gorm:"column:foo" zip:"foo" validate:"required"` - Bar int `validate:"required" yaml:"foo" xml:"bar" binding:"required" json:"bar,omitempty" gorm:"column:bar" zip:"bar" ` - FooBar int `gorm:"column:bar" validate:"required" xml:"bar" binding:"required" json:"bar,omitempty" zip:"bar" yaml:"foo"` -} -``` - -will be sorted and aligned to: - -```go -type SortExample struct { - Foo int `json:"foo,omitempty" xml:"baz" binding:"required" gorm:"column:foo" validate:"required" yaml:"bar" zip:"foo"` - Bar int `json:"bar,omitempty" xml:"bar" binding:"required" gorm:"column:bar" validate:"required" yaml:"foo" zip:"bar"` - FooBar int `json:"bar,omitempty" xml:"bar" binding:"required" gorm:"column:bar" validate:"required" yaml:"foo" zip:"bar"` -} -``` - -The fixed order is `json,xml`, so the tags `json` and `xml` will be sorted and aligned first, and the rest tags will be sorted and aligned in the dictionary order. - -### Strict Style - -Sometimes, you may want to align your tags in strict style. In this style, the tags will be sorted and aligned in the dictionary order, and the tags with the same name will be aligned together. For example, the following code - -```go -type StrictStyleExample struct { - Foo int ` xml:"baz" yaml:"bar" zip:"foo" binding:"required" gorm:"column:foo" validate:"required"` - Bar int `validate:"required" gorm:"column:bar" yaml:"foo" xml:"bar" binding:"required" json:"bar,omitempty" ` -} -``` - -will be aligned to - -```go -type StrictStyleExample struct { - Foo int `binding:"required" gorm:"column:foo" validate:"required" xml:"baz" yaml:"bar" zip:"foo"` - Bar int `binding:"required" gorm:"column:bar" json:"bar,omitempty" validate:"required" xml:"bar" yaml:"foo"` -} -``` - -> ⚠️Note: The strict style can't run without the align or sort feature enabled. - -## References - -[Golang AST Visualizer](http://goast.yuroyoro.net/) - -[Create New Golang CI Linter](https://golangci-lint.run/contributing/new-linters/) - -[Autofix Example](https://github.com/golangci/golangci-lint/pull/2450/files) - -[Integrating](https://disaev.me/p/writing-useful-go-analysis-linter/#integrating) diff --git a/vendor/github.com/4meepo/tagalign/options.go b/vendor/github.com/4meepo/tagalign/options.go deleted file mode 100644 index ddec98da7..000000000 --- a/vendor/github.com/4meepo/tagalign/options.go +++ /dev/null @@ -1,37 +0,0 @@ -package tagalign - -type Option func(*Helper) - -// WithMode specify the mode of tagalign. -func WithMode(mode Mode) Option { - return func(h *Helper) { - h.mode = mode - } -} - -// WithSort enable tags sort. -// fixedOrder specify the order of tags, the other tags will be sorted by name. -// Sory is disabled by default. -func WithSort(fixedOrder ...string) Option { - return func(h *Helper) { - h.sort = true - h.fixedTagOrder = fixedOrder - } -} - -// WithAlign configure whether enable tags align. -// Align is enabled by default. -func WithAlign(enabled bool) Option { - return func(h *Helper) { - h.align = enabled - } -} - -// WithStrictStyle configure whether enable strict style. -// StrictStyle is disabled by default. -// Note: StrictStyle must be used with WithAlign(true) and WithSort(...) together, or it will be ignored. -func WithStrictStyle() Option { - return func(h *Helper) { - h.style = StrictStyle - } -} diff --git a/vendor/github.com/4meepo/tagalign/tagalign.go b/vendor/github.com/4meepo/tagalign/tagalign.go deleted file mode 100644 index 4734b5666..000000000 --- a/vendor/github.com/4meepo/tagalign/tagalign.go +++ /dev/null @@ -1,459 +0,0 @@ -package tagalign - -import ( - "fmt" - "go/ast" - "go/token" - "log" - "reflect" - "sort" - "strconv" - "strings" - - "github.com/fatih/structtag" - - "golang.org/x/tools/go/analysis" -) - -type Mode int - -const ( - StandaloneMode Mode = iota - GolangciLintMode -) - -type Style int - -const ( - DefaultStyle Style = iota - StrictStyle -) - -const ( - errTagValueSyntax = "bad syntax for struct tag value" -) - -func NewAnalyzer(options ...Option) *analysis.Analyzer { - return &analysis.Analyzer{ - Name: "tagalign", - Doc: "check that struct tags are well aligned", - Run: func(p *analysis.Pass) (any, error) { - Run(p, options...) - return nil, nil - }, - } -} - -func Run(pass *analysis.Pass, options ...Option) []Issue { - var issues []Issue - for _, f := range pass.Files { - h := &Helper{ - mode: StandaloneMode, - style: DefaultStyle, - align: true, - } - for _, opt := range options { - opt(h) - } - - // StrictStyle must be used with WithAlign(true) and WithSort(...) together, or it will be ignored. - if h.style == StrictStyle && (!h.align || !h.sort) { - h.style = DefaultStyle - } - - if !h.align && !h.sort { - // do nothing - return nil - } - - ast.Inspect(f, func(n ast.Node) bool { - h.find(pass, n) - return true - }) - h.Process(pass) - issues = append(issues, h.issues...) - } - return issues -} - -type Helper struct { - mode Mode - - style Style - - align bool // whether enable tags align. - sort bool // whether enable tags sort. - fixedTagOrder []string // the order of tags, the other tags will be sorted by name. - - singleFields []*ast.Field - consecutiveFieldsGroups [][]*ast.Field // fields in this group, must be consecutive in struct. - issues []Issue -} - -// Issue is used to integrate with golangci-lint's inline auto fix. -type Issue struct { - Pos token.Position - Message string - InlineFix InlineFix -} -type InlineFix struct { - StartCol int // zero-based - Length int - NewString string -} - -func (w *Helper) find(pass *analysis.Pass, n ast.Node) { - v, ok := n.(*ast.StructType) - if !ok { - return - } - - fields := v.Fields.List - if len(fields) == 0 { - return - } - - fs := make([]*ast.Field, 0) - split := func() { - n := len(fs) - if n > 1 { - w.consecutiveFieldsGroups = append(w.consecutiveFieldsGroups, fs) - } else if n == 1 { - w.singleFields = append(w.singleFields, fs[0]) - } - - fs = nil - } - - for i, field := range fields { - if field.Tag == nil { - // field without tags - split() - continue - } - - if i > 0 { - if fields[i-1].Tag == nil { - // if previous filed do not have a tag - fs = append(fs, field) - continue - } - preLineNum := pass.Fset.Position(fields[i-1].Tag.Pos()).Line - lineNum := pass.Fset.Position(field.Tag.Pos()).Line - if lineNum-preLineNum > 1 { - // fields with tags are not consecutive, including two case: - // 1. splited by lines - // 2. splited by a struct - split() - - // check if the field is a struct - if _, ok := field.Type.(*ast.StructType); ok { - continue - } - } - } - - fs = append(fs, field) - } - - split() -} - -func (w *Helper) report(pass *analysis.Pass, field *ast.Field, startCol int, msg, replaceStr string) { - if w.mode == GolangciLintMode { - iss := Issue{ - Pos: pass.Fset.Position(field.Tag.Pos()), - Message: msg, - InlineFix: InlineFix{ - StartCol: startCol, - Length: len(field.Tag.Value), - NewString: replaceStr, - }, - } - w.issues = append(w.issues, iss) - } - - if w.mode == StandaloneMode { - pass.Report(analysis.Diagnostic{ - Pos: field.Tag.Pos(), - End: field.Tag.End(), - Message: msg, - SuggestedFixes: []analysis.SuggestedFix{ - { - Message: msg, - TextEdits: []analysis.TextEdit{ - { - Pos: field.Tag.Pos(), - End: field.Tag.End(), - NewText: []byte(replaceStr), - }, - }, - }, - }, - }) - } -} - -func (w *Helper) Process(pass *analysis.Pass) { //nolint:gocognit - // process grouped fields - for _, fields := range w.consecutiveFieldsGroups { - offsets := make([]int, len(fields)) - - var maxTagNum int - var tagsGroup, notSortedTagsGroup [][]*structtag.Tag - - var uniqueKeys []string - addKey := func(k string) { - for _, key := range uniqueKeys { - if key == k { - return - } - } - uniqueKeys = append(uniqueKeys, k) - } - - for i := 0; i < len(fields); { - field := fields[i] - column := pass.Fset.Position(field.Tag.Pos()).Column - 1 - offsets[i] = column - - tag, err := strconv.Unquote(field.Tag.Value) - if err != nil { - // if tag value is not a valid string, report it directly - w.report(pass, field, column, errTagValueSyntax, field.Tag.Value) - fields = removeField(fields, i) - continue - } - - tags, err := structtag.Parse(tag) - if err != nil { - // if tag value is not a valid struct tag, report it directly - w.report(pass, field, column, err.Error(), field.Tag.Value) - fields = removeField(fields, i) - continue - } - - maxTagNum = max(maxTagNum, tags.Len()) - - if w.sort { - cp := make([]*structtag.Tag, tags.Len()) - for i, tag := range tags.Tags() { - cp[i] = tag - } - notSortedTagsGroup = append(notSortedTagsGroup, cp) - sortBy(w.fixedTagOrder, tags) - } - for _, t := range tags.Tags() { - addKey(t.Key) - } - tagsGroup = append(tagsGroup, tags.Tags()) - - i++ - } - - if w.sort && StrictStyle == w.style { - sortAllKeys(w.fixedTagOrder, uniqueKeys) - maxTagNum = len(uniqueKeys) - } - - // record the max length of each column tag - type tagLen struct { - Key string // present only when sort enabled - Len int - } - tagMaxLens := make([]tagLen, maxTagNum) - for j := 0; j < maxTagNum; j++ { - var maxLength int - var key string - for i := 0; i < len(tagsGroup); i++ { - if w.style == StrictStyle { - key = uniqueKeys[j] - // search by key - for _, tag := range tagsGroup[i] { - if tag.Key == key { - maxLength = max(maxLength, len(tag.String())) - break - } - } - } else { - if len(tagsGroup[i]) <= j { - // in case of index out of range - continue - } - maxLength = max(maxLength, len(tagsGroup[i][j].String())) - } - } - tagMaxLens[j] = tagLen{key, maxLength} - } - - for i, field := range fields { - tags := tagsGroup[i] - - var newTagStr string - if w.align { - // if align enabled, align tags. - newTagBuilder := strings.Builder{} - for i, n := 0, 0; i < len(tags) && n < len(tagMaxLens); { - tag := tags[i] - var format string - if w.style == StrictStyle { - if tagMaxLens[n].Key == tag.Key { - // match - format = alignFormat(tagMaxLens[n].Len + 1) // with an extra space - newTagBuilder.WriteString(fmt.Sprintf(format, tag.String())) - i++ - n++ - } else { - // tag missing - format = alignFormat(tagMaxLens[n].Len + 1) - newTagBuilder.WriteString(fmt.Sprintf(format, "")) - n++ - } - } else { - format = alignFormat(tagMaxLens[n].Len + 1) // with an extra space - newTagBuilder.WriteString(fmt.Sprintf(format, tag.String())) - i++ - n++ - } - } - newTagStr = newTagBuilder.String() - } else { - // otherwise check if tags order changed - if w.sort && reflect.DeepEqual(notSortedTagsGroup[i], tags) { - // if tags order not changed, do nothing - continue - } - tagsStr := make([]string, len(tags)) - for i, tag := range tags { - tagsStr[i] = tag.String() - } - newTagStr = strings.Join(tagsStr, " ") - } - - unquoteTag := strings.TrimRight(newTagStr, " ") - // unquoteTag := newTagStr - newTagValue := fmt.Sprintf("`%s`", unquoteTag) - if field.Tag.Value == newTagValue { - // nothing changed - continue - } - - msg := "tag is not aligned, should be: " + unquoteTag - - w.report(pass, field, offsets[i], msg, newTagValue) - } - } - - // process single fields - for _, field := range w.singleFields { - column := pass.Fset.Position(field.Tag.Pos()).Column - 1 - tag, err := strconv.Unquote(field.Tag.Value) - if err != nil { - w.report(pass, field, column, errTagValueSyntax, field.Tag.Value) - continue - } - - tags, err := structtag.Parse(tag) - if err != nil { - w.report(pass, field, column, err.Error(), field.Tag.Value) - continue - } - originalTags := append([]*structtag.Tag(nil), tags.Tags()...) - if w.sort { - sortBy(w.fixedTagOrder, tags) - } - - newTagValue := fmt.Sprintf("`%s`", tags.String()) - if reflect.DeepEqual(originalTags, tags.Tags()) && field.Tag.Value == newTagValue { - // if tags order not changed, do nothing - continue - } - - msg := "tag is not aligned , should be: " + tags.String() - - w.report(pass, field, column, msg, newTagValue) - } -} - -// Issues returns all issues found by the analyzer. -// It is used to integrate with golangci-lint. -func (w *Helper) Issues() []Issue { - log.Println("tagalign 's Issues() should only be called in golangci-lint mode") - return w.issues -} - -// sortBy sorts tags by fixed order. -// If a tag is not in the fixed order, it will be sorted by name. -func sortBy(fixedOrder []string, tags *structtag.Tags) { - // sort by fixed order - sort.Slice(tags.Tags(), func(i, j int) bool { - ti := tags.Tags()[i] - tj := tags.Tags()[j] - - oi := findIndex(fixedOrder, ti.Key) - oj := findIndex(fixedOrder, tj.Key) - - if oi == -1 && oj == -1 { - return ti.Key < tj.Key - } - - if oi == -1 { - return false - } - - if oj == -1 { - return true - } - - return oi < oj - }) -} - -func sortAllKeys(fixedOrder []string, keys []string) { - sort.Slice(keys, func(i, j int) bool { - oi := findIndex(fixedOrder, keys[i]) - oj := findIndex(fixedOrder, keys[j]) - - if oi == -1 && oj == -1 { - return keys[i] < keys[j] - } - - if oi == -1 { - return false - } - - if oj == -1 { - return true - } - - return oi < oj - }) -} - -func findIndex(s []string, e string) int { - for i, a := range s { - if a == e { - return i - } - } - return -1 -} - -func alignFormat(length int) string { - return "%" + fmt.Sprintf("-%ds", length) -} - -func max(a, b int) int { - if a > b { - return a - } - return b -} - -func removeField(fields []*ast.Field, index int) []*ast.Field { - if index < 0 || index >= len(fields) { - return fields - } - - return append(fields[:index], fields[index+1:]...) -} diff --git a/vendor/github.com/Abirdcfly/dupword/.gitignore b/vendor/github.com/Abirdcfly/dupword/.gitignore deleted file mode 100644 index b5109d2bb..000000000 --- a/vendor/github.com/Abirdcfly/dupword/.gitignore +++ /dev/null @@ -1,183 +0,0 @@ - -# Godot-specific ignores -.import/ -export.cfg -export_presets.cfg - -# Mono-specific ignores -.mono/ -data_*/ - -### JetBrains template -# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider -# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 - -# User-specific stuff -.idea/**/workspace.xml -.idea/**/tasks.xml -.idea/**/usage.statistics.xml -.idea/**/dictionaries -.idea/**/shelf - -# Generated files -.idea/**/contentModel.xml - -# Sensitive or high-churn files -.idea/**/dataSources/ -.idea/**/dataSources.ids -.idea/**/dataSources.local.xml -.idea/**/sqlDataSources.xml -.idea/**/dynamic.xml -.idea/**/uiDesigner.xml -.idea/**/dbnavigator.xml - -# Gradle -.idea/**/gradle.xml -.idea/**/libraries - -# Gradle and Maven with auto-import -# When using Gradle or Maven with auto-import, you should exclude module files, -# since they will be recreated, and may cause churn. Uncomment if using -# auto-import. -.idea/artifacts -.idea/compiler.xml -.idea/jarRepositories.xml -.idea/modules.xml -.idea/*.iml -.idea/modules -*.iml -*.ipr - -# CMake -cmake-build-*/ - -# Mongo Explorer plugin -.idea/**/mongoSettings.xml - -# File-based project format -*.iws - -# IntelliJ -out/ - -# mpeltonen/sbt-idea plugin -.idea_modules/ - -# JIRA plugin -atlassian-ide-plugin.xml - -# Cursive Clojure plugin -.idea/replstate.xml - -# Crashlytics plugin (for Android Studio and IntelliJ) -com_crashlytics_export_strings.xml -crashlytics.properties -crashlytics-build.properties -fabric.properties - -# Editor-based Rest Client -.idea/httpRequests - -# Android studio 3.1+ serialized cache file -.idea/caches/build_file_checksums.ser - -### Emacs template -# -*- mode: gitignore; -*- -*~ -\#*\# -/.emacs.desktop -/.emacs.desktop.lock -*.elc -auto-save-list -tramp -.\#* - -# Org-mode -.org-id-locations -*_archive - -# flymake-mode -*_flymake.* - -# eshell files -/eshell/history -/eshell/lastdir - -# elpa packages -/elpa/ - -# reftex files -*.rel - -# AUCTeX auto folder -/auto/ - -# cask packages -.cask/ -dist/ - -# Flycheck -flycheck_*.el - -# server auth directory -/server/ - -# projectiles files -.projectile - -# directory configuration -.dir-locals.el - -# network security -/network-security.data - - -### Vim template -# Swap -[._]*.s[a-v][a-z] -!*.svg # comment out if you don't need vector files -[._]*.sw[a-p] -[._]s[a-rt-v][a-z] -[._]ss[a-gi-z] -[._]sw[a-p] - -# Session -Session.vim -Sessionx.vim - -# Temporary -.netrwhist -*~ -# Auto-generated tag files -tags -# Persistent undo -[._]*.un~ - -### macOS template -# General -.DS_Store -.AppleDouble -.LSOverride - -# Icon must end with two \r -Icon - -# Thumbnails -._* - -# Files that might appear in the root of a volume -.DocumentRevisions-V100 -.fseventsd -.Spotlight-V100 -.TemporaryItems -.Trashes -.VolumeIcon.icns -.com.apple.timemachine.donotpresent - -# Directories potentially created on remote AFP share -.AppleDB -.AppleDesktop -Network Trash Folder -Temporary Items -.apdisk - diff --git a/vendor/github.com/Abirdcfly/dupword/.goreleaser.yml b/vendor/github.com/Abirdcfly/dupword/.goreleaser.yml deleted file mode 100644 index c3401787a..000000000 --- a/vendor/github.com/Abirdcfly/dupword/.goreleaser.yml +++ /dev/null @@ -1,72 +0,0 @@ ---- -project_name: dupword - -release: - github: - owner: Abirdcfly - name: dupword - -builds: - - binary: dupword - goos: - - darwin - - windows - - linux - - freebsd - goarch: - - amd64 - - arm64 - - arm - - 386 - - ppc64le - - s390x - - mips64 - - mips64le - - riscv64 - goarm: - - 6 - - 7 - gomips: - - hardfloat - env: - - CGO_ENABLED=0 - ignore: - - goos: darwin - goarch: 386 - - goos: freebsd - goarch: arm64 - main: ./cmd/dupword/ - flags: - - -trimpath - ldflags: -s -w -X main.version={{.Version}} -X main.commit={{.ShortCommit}} -X main.date={{.Date}} - -archives: - - format: tar.gz - wrap_in_directory: true - format_overrides: - - goos: windows - format: zip - name_template: '{{ .ProjectName }}-{{ .Version }}-{{ .Os }}-{{ .Arch }}{{ if .Arm }}v{{ .Arm }}{{ end }}' - files: - - LICENSE - - README.md - -snapshot: - name_template: SNAPSHOT-{{ .Commit }} - -checksum: - name_template: '{{ .ProjectName }}-{{ .Version }}-checksums.txt' - -changelog: - sort: asc - filters: - exclude: - - '(?i)^docs?:' - - '(?i)^docs\([^:]+\):' - - '(?i)^docs\[[^:]+\]:' - - '^tests?:' - - '(?i)^dev:' - - '^build\(deps\): bump .* in /docs \(#\d+\)' - - '^build\(deps\): bump .* in /\.github/peril \(#\d+\)' - - Merge pull request - - Merge branch diff --git a/vendor/github.com/Abirdcfly/dupword/LICENSE b/vendor/github.com/Abirdcfly/dupword/LICENSE deleted file mode 100644 index afa64c6e1..000000000 --- a/vendor/github.com/Abirdcfly/dupword/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2022 Abirdcfly - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/Abirdcfly/dupword/README.md b/vendor/github.com/Abirdcfly/dupword/README.md deleted file mode 100644 index e6c5b919f..000000000 --- a/vendor/github.com/Abirdcfly/dupword/README.md +++ /dev/null @@ -1,153 +0,0 @@ -# dupword - -![GitHub go.mod Go version](https://img.shields.io/github/go-mod/go-version/Abirdcfly/dupword?style=flat-square) -[![GoDoc](https://godoc.org/github.com/Abirdcfly/dupword?status.svg)](https://pkg.go.dev/github.com/Abirdcfly/dupword) -[![Actions Status](https://github.com/Abirdcfly/dupword/actions/workflows/lint.yml/badge.svg)](https://github.com/Abirdcfly/dupword/actions) -[![Go Report Card](https://goreportcard.com/badge/github.com/Abirdcfly/dupword)](https://goreportcard.com/report/github.com/Abirdcfly/dupword) - -A linter that checks for duplicate words in the source code (usually miswritten) - -Examples in real code and related issues can be viewed in [dupword#3](https://github.com/Abirdcfly/dupword/issues/3) - -## example - -1. Repeated words appear on two adjacent lines [commit](https://github.com/golang/go/commit/d8f90ce0f8119bf593efb6fb91825de5b61fcda7) - -```diff ---- a/src/cmd/compile/internal/ssa/schedule.go -+++ b/src/cmd/compile/internal/ssa/schedule.go -@@ -179,7 +179,7 @@ func schedule(f *Func) { - // scored CarryChainTail (and prove w is not a tail). - score[w.ID] = ScoreFlags - } -- // Verify v has not been scored. If v has not been visited, v may be the -+ // Verify v has not been scored. If v has not been visited, v may be - // the final (tail) operation in a carry chain. If v is not, v will be - // rescored above when v's carry-using op is scored. When scoring is done, - // only tail operations will retain the CarryChainTail score. -``` - -2. Repeated words appearing on the same line [commit](https://github.com/golang/go/commit/48da729e8468b630ee003ac51cbaac595d53bec8) - -```diff ---- a/src/net/http/cookiejar/jar.go -+++ b/src/net/http/cookiejar/jar.go -@@ -465,7 +465,7 @@ func (j *Jar) domainAndType(host, domain string) (string, bool, error) { - // dot in the domain-attribute before processing the cookie. - // - // Most browsers don't do that for IP addresses, only curl -- // version 7.54) and and IE (version 11) do not reject a -+ // version 7.54) and IE (version 11) do not reject a - // Set-Cookie: a=1; domain=.127.0.0.1 - // This leading dot is optional and serves only as hint for - // humans to indicate that a cookie with "domain=.bbc.co.uk" -``` - -## Install - -```bash -go install github.com/Abirdcfly/dupword/cmd/dupword@latest -``` - -**Or** install the main branch (including the last commit) with: - -```bash -go install github.com/Abirdcfly/dupword/cmd/dupword@main -``` - -## Usage - -### 1. default - -Run with default settings(include test file): - -**But note that not all repeated words are wrong** see [dupword#4](https://github.com/Abirdcfly/dupword/issues/4) for real code example. - -```bash -$ dupword ./... -/Users/xxx/go/src/dupword/dupword_test.go:88:10: Duplicate words (the) found -exit status 3 -``` - -### 2. skip test file - -Skip detection test file(`*_test.go`): - -```bash -$ dupword -test=false ./... -``` - -### 3. auto-fix - -```bash -$ dupword -fix ./... -``` - -### 4. all options - -All options: - -```bash -$ dupword --help -dupword: checks for duplicate words in the source code (usually miswritten) - -Usage: dupword [-flag] [package] - -This analyzer checks miswritten duplicate words in comments or package doc or string declaration - -Flags: - -V print version and exit - -all - no effect (deprecated) - -c int - display offending line with this many lines of context (default -1) - -cpuprofile string - write CPU profile to this file - -debug string - debug flags, any subset of "fpstv" - -fix - apply all suggested fixes - -flags - print analyzer flags in JSON - -ignore value - ignore words - -json - emit JSON output - -keyword value - keywords for detecting duplicate words - -memprofile string - write memory profile to this file - -source - no effect (deprecated) - -tags string - no effect (deprecated) - -test - indicates whether test files should be analyzed, too (default true) - -trace string - write trace log to this file - -v no effect (deprecated) -``` - -### 5. my advice - -use `--keyword=the,and,a` and `-fix` together. I think that specifying only commonly repeated prepositions can effectively avoid false positives. - -see [dupword#4](https://github.com/Abirdcfly/dupword/issues/4) for real code example. - -```bash -$ dupword --keyword=the,and,a -fix ./... -``` - -## TODO - -- [x] add this linter to golangci-lint -- [ ] rewrite the detection logic to make it more efficient - -## Limitation - -1. Only for `*.go` file.But some miswritten occurs in `*.md` or `*.json` file.(example: kubernetes), In this case, my advice is to use [rg](https://github.com/BurntSushi/ripgrep) to do the lookup and replace manually. -2. When use `-fix`, also running `go fmt` in the dark.([This logic is determined upstream](https://github.com/golang/tools/blob/248c34b88a4148128f89e41923498bd86f805b7d/go/analysis/internal/checker/checker.go#L424-L433), the project does not have this part of the code.) - -## License - -MIT diff --git a/vendor/github.com/Abirdcfly/dupword/dupword.go b/vendor/github.com/Abirdcfly/dupword/dupword.go deleted file mode 100644 index 9a78fb6cc..000000000 --- a/vendor/github.com/Abirdcfly/dupword/dupword.go +++ /dev/null @@ -1,331 +0,0 @@ -// MIT License -// -// Copyright (c) 2022 Abirdcfly -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in all -// copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -// SOFTWARE. - -// Package dupword defines an Analyzer that checks that duplicate words -// int the source code. -package dupword - -import ( - "flag" - "fmt" - "go/ast" - "go/token" - "sort" - "strconv" - "strings" - "unicode" - "unicode/utf8" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/ast/inspector" -) - -const ( - Name = "dupword" - Doc = `checks for duplicate words in the source code (usually miswritten) - -This analyzer checks miswritten duplicate words in comments or package doc or string declaration` - Message = "Duplicate words (%s) found" - CommentPrefix = `//` -) - -var ( - defaultWord = []string{} - // defaultWord = []string{"the", "and", "a"} - ignoreWord = map[string]bool{} -) - -type analyzer struct { - KeyWord []string -} - -func (a *analyzer) String() string { - return strings.Join(a.KeyWord, ",") -} - -func (a *analyzer) Set(w string) error { - if len(w) != 0 { - a.KeyWord = make([]string, 0) - a.KeyWord = append(a.KeyWord, strings.Split(w, ",")...) - } - return nil -} - -type ignore struct { -} - -func (a *ignore) String() string { - t := make([]string, 0, len(ignoreWord)) - for k := range ignoreWord { - t = append(t, k) - } - return strings.Join(t, ",") -} - -func (a *ignore) Set(w string) error { - for _, k := range strings.Split(w, ",") { - ignoreWord[k] = true - } - return nil -} - -// for test only -func ClearIgnoreWord() { - ignoreWord = map[string]bool{} -} - -func NewAnalyzer() *analysis.Analyzer { - ignore := &ignore{} - analyzer := &analyzer{KeyWord: defaultWord} - a := &analysis.Analyzer{ - Name: Name, - Doc: Doc, - Requires: []*analysis.Analyzer{inspect.Analyzer}, - Run: analyzer.run, - RunDespiteErrors: true, - } - a.Flags.Init(Name, flag.ExitOnError) - a.Flags.Var(analyzer, "keyword", "keywords for detecting duplicate words") - a.Flags.Var(ignore, "ignore", "ignore words") - a.Flags.Var(version{}, "V", "print version and exit") - return a -} - -func (a *analyzer) run(pass *analysis.Pass) (interface{}, error) { - for _, file := range pass.Files { - a.fixDuplicateWordInComment(pass, file) - } - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - nodeFilter := []ast.Node{ - (*ast.BasicLit)(nil), - } - inspect.Preorder(nodeFilter, func(n ast.Node) { - if lit, ok := n.(*ast.BasicLit); ok { - a.fixDuplicateWordInString(pass, lit) - } - }) - return nil, nil -} - -func (a *analyzer) fixDuplicateWordInComment(pass *analysis.Pass, f *ast.File) { - for _, cg := range f.Comments { - var preLine *ast.Comment - for _, c := range cg.List { - update, keyword, find := a.Check(c.Text) - if find { - pass.Report(analysis.Diagnostic{Pos: c.Slash, End: c.End(), Message: fmt.Sprintf(Message, keyword), SuggestedFixes: []analysis.SuggestedFix{{ - Message: "Update", - TextEdits: []analysis.TextEdit{{ - Pos: c.Slash, - End: c.End(), - NewText: []byte(update), - }}, - }}}) - } - if preLine != nil { - fields := strings.Fields(preLine.Text) - if len(fields) < 1 { - continue - } - preLineContent := fields[len(fields)-1] + "\n" - thisLineContent := c.Text - if find { - thisLineContent = update - } - before, after, _ := strings.Cut(thisLineContent, CommentPrefix) - update, keyword, find := a.Check(preLineContent + after) - if find { - var suggestedFixes []analysis.SuggestedFix - if strings.Contains(update, preLineContent) { - update = before + CommentPrefix + strings.TrimPrefix(update, preLineContent) - suggestedFixes = []analysis.SuggestedFix{{ - Message: "Update", - TextEdits: []analysis.TextEdit{{ - Pos: c.Slash, - End: c.End(), - NewText: []byte(update), - }}, - }} - } - pass.Report(analysis.Diagnostic{Pos: c.Slash, End: c.End(), Message: fmt.Sprintf(Message, keyword), SuggestedFixes: suggestedFixes}) - } - } - preLine = c - } - } -} - -func (a *analyzer) fixDuplicateWordInString(pass *analysis.Pass, lit *ast.BasicLit) { - if lit.Kind != token.STRING { - return - } - value, err := strconv.Unquote(lit.Value) - if err != nil { - fmt.Printf("lit.Value:%v, err: %v\n", lit.Value, err) - // fall back to default - value = lit.Value - } - quote := value != lit.Value - update, keyword, find := a.Check(value) - if quote { - update = strconv.Quote(update) - } - if find { - pass.Report(analysis.Diagnostic{Pos: lit.Pos(), End: lit.End(), Message: fmt.Sprintf(Message, keyword), SuggestedFixes: []analysis.SuggestedFix{{ - Message: "Update", - TextEdits: []analysis.TextEdit{{ - Pos: lit.Pos(), - End: lit.End(), - NewText: []byte(update), - }}, - }}}) - } -} - -// CheckOneKey use to check there is a defined duplicate word in a string. -// raw is checked line. key is the keyword to check. empty means just check duplicate word. -func CheckOneKey(raw, key string) (new string, findWord string, find bool) { - if key == "" { - has := false - fields := strings.Fields(raw) - for i := range fields { - if i == len(fields)-1 { - break - } - if fields[i] == fields[i+1] { - has = true - } - } - if !has { - return - } - } else { - if x := strings.Split(raw, key); len(x) < 2 { - return - } - } - - findWordMap := make(map[string]bool, 4) - newLine := strings.Builder{} - wordStart, spaceStart := 0, 0 - curWord, preWord := "", "" - lastSpace := "" - var lastRune int32 - for i, r := range raw { - if !unicode.IsSpace(r) && unicode.IsSpace(lastRune) { - // word start position - /* - i - | - hello[ spaceA ]the[ spaceB ]the[ spaceC ]word - ^ ^ - | curWord: the - preWord: the - */ - symbol := raw[spaceStart:i] - if ((key != "" && curWord == key) || key == "") && curWord == preWord && curWord != "" { - if !ExcludeWords(curWord) { - find = true - findWordMap[curWord] = true - newLine.WriteString(lastSpace) - symbol = "" - } - } else { - newLine.WriteString(lastSpace) - newLine.WriteString(curWord) - } - lastSpace = symbol - preWord = curWord - wordStart = i - } else if unicode.IsSpace(r) && !unicode.IsSpace(lastRune) { - // space start position - spaceStart = i - curWord = raw[wordStart:i] - } else if i == len(raw)-1 { - // last position - word := raw[wordStart:] - if ((key != "" && word == key) || key == "") && word == preWord { - if !ExcludeWords(word) { - find = true - findWordMap[word] = true - } - } else { - newLine.WriteString(lastSpace) - newLine.WriteString(word) - } - } - lastRune = r - } - if find { - new = newLine.String() - findWordSlice := make([]string, len(findWordMap)) - i := 0 - for k := range findWordMap { - findWordSlice[i] = k - i++ - } - sort.Strings(findWordSlice) - findWord = strings.Join(findWordSlice, ",") - } - return -} - -func (a *analyzer) Check(raw string) (update string, keyword string, find bool) { - for _, key := range a.KeyWord { - updateOne, _, findOne := CheckOneKey(raw, key) - if findOne { - raw = updateOne - find = findOne - update = updateOne - if keyword == "" { - keyword = key - } else { - keyword = keyword + "," + key - } - } - } - if len(a.KeyWord) == 0 { - return CheckOneKey(raw, "") - } - return -} - -// ExcludeWords determines whether duplicate words should be reported, -// -// e.g. %s, should not be reported. -func ExcludeWords(word string) (exclude bool) { - firstRune, _ := utf8.DecodeRuneInString(word) - if unicode.IsDigit(firstRune) { - return true - } - if unicode.IsPunct(firstRune) { - return true - } - if unicode.IsSymbol(firstRune) { - return true - } - if _, exist := ignoreWord[word]; exist { - return true - } - return false -} diff --git a/vendor/github.com/Abirdcfly/dupword/version.go b/vendor/github.com/Abirdcfly/dupword/version.go deleted file mode 100644 index 9d892d0c9..000000000 --- a/vendor/github.com/Abirdcfly/dupword/version.go +++ /dev/null @@ -1,41 +0,0 @@ -// MIT License -// -// # Copyright (c) 2022 Abirdcfly -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in all -// copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -// SOFTWARE. - -package dupword - -import ( - "fmt" - "os" -) - -var Version = "dev" - -type version struct{} - -func (version) IsBoolFlag() bool { return true } -func (version) Get() interface{} { return nil } -func (version) String() string { return "" } -func (version) Set(_ string) error { - fmt.Println(Version) - os.Exit(0) - return nil -} diff --git a/vendor/github.com/Antonboom/errname/LICENSE b/vendor/github.com/Antonboom/errname/LICENSE deleted file mode 100644 index e2002e4d4..000000000 --- a/vendor/github.com/Antonboom/errname/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2021 Anton Telyshev - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/Antonboom/errname/pkg/analyzer/analyzer.go b/vendor/github.com/Antonboom/errname/pkg/analyzer/analyzer.go deleted file mode 100644 index aa8522510..000000000 --- a/vendor/github.com/Antonboom/errname/pkg/analyzer/analyzer.go +++ /dev/null @@ -1,135 +0,0 @@ -package analyzer - -import ( - "fmt" - "go/ast" - "go/token" - "strconv" - "strings" - "unicode" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/ast/inspector" -) - -// New returns new errname analyzer. -func New() *analysis.Analyzer { - return &analysis.Analyzer{ - Name: "errname", - Doc: "Checks that sentinel errors are prefixed with the `Err` and error types are suffixed with the `Error`.", - Run: run, - Requires: []*analysis.Analyzer{inspect.Analyzer}, - } -} - -type stringSet = map[string]struct{} - -var ( - importNodes = []ast.Node{(*ast.ImportSpec)(nil)} - typeNodes = []ast.Node{(*ast.TypeSpec)(nil)} - funcNodes = []ast.Node{(*ast.FuncDecl)(nil)} -) - -func run(pass *analysis.Pass) (interface{}, error) { - insp := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - - pkgAliases := map[string]string{} - insp.Preorder(importNodes, func(node ast.Node) { - i := node.(*ast.ImportSpec) - if n := i.Name; n != nil && i.Path != nil { - if path, err := strconv.Unquote(i.Path.Value); err == nil { - pkgAliases[n.Name] = getPkgFromPath(path) - } - } - }) - - allTypes := stringSet{} - typesSpecs := map[string]*ast.TypeSpec{} - insp.Preorder(typeNodes, func(node ast.Node) { - t := node.(*ast.TypeSpec) - allTypes[t.Name.Name] = struct{}{} - typesSpecs[t.Name.Name] = t - }) - - errorTypes := stringSet{} - insp.Preorder(funcNodes, func(node ast.Node) { - f := node.(*ast.FuncDecl) - t, ok := isMethodError(f) - if !ok { - return - } - errorTypes[t] = struct{}{} - - tSpec, ok := typesSpecs[t] - if !ok { - panic(fmt.Sprintf("no specification for type %q", t)) - } - - if _, ok := tSpec.Type.(*ast.ArrayType); ok { - if !isValidErrorArrayTypeName(t) { - reportAboutErrorType(pass, tSpec.Pos(), t, true) - } - } else if !isValidErrorTypeName(t) { - reportAboutErrorType(pass, tSpec.Pos(), t, false) - } - }) - - errorFuncs := stringSet{} - insp.Preorder(funcNodes, func(node ast.Node) { - f := node.(*ast.FuncDecl) - if isFuncReturningErr(f.Type, allTypes, errorTypes) { - errorFuncs[f.Name.Name] = struct{}{} - } - }) - - inspectPkgLevelVarsOnly := func(node ast.Node) bool { - switch v := node.(type) { - case *ast.FuncDecl: - return false - - case *ast.ValueSpec: - if name, ok := isSentinelError(v, pkgAliases, allTypes, errorTypes, errorFuncs); ok && !isValidErrorVarName(name) { - reportAboutErrorVar(pass, v.Pos(), name) - } - } - return true - } - for _, f := range pass.Files { - ast.Inspect(f, inspectPkgLevelVarsOnly) - } - - return nil, nil //nolint:nilnil -} - -func reportAboutErrorType(pass *analysis.Pass, typePos token.Pos, typeName string, isArrayType bool) { - var form string - if unicode.IsLower([]rune(typeName)[0]) { - form = "xxxError" - } else { - form = "XxxError" - } - - if isArrayType { - form += "s" - } - pass.Reportf(typePos, "the type name `%s` should conform to the `%s` format", typeName, form) -} - -func reportAboutErrorVar(pass *analysis.Pass, pos token.Pos, varName string) { - var form string - if unicode.IsLower([]rune(varName)[0]) { - form = "errXxx" - } else { - form = "ErrXxx" - } - pass.Reportf(pos, "the variable name `%s` should conform to the `%s` format", varName, form) -} - -func getPkgFromPath(p string) string { - idx := strings.LastIndex(p, "/") - if idx == -1 { - return p - } - return p[idx+1:] -} diff --git a/vendor/github.com/Antonboom/errname/pkg/analyzer/facts.go b/vendor/github.com/Antonboom/errname/pkg/analyzer/facts.go deleted file mode 100644 index 06f8d61d8..000000000 --- a/vendor/github.com/Antonboom/errname/pkg/analyzer/facts.go +++ /dev/null @@ -1,272 +0,0 @@ -package analyzer - -import ( - "fmt" - "go/ast" - "go/token" - "go/types" - "strings" - "unicode" -) - -func isMethodError(f *ast.FuncDecl) (typeName string, ok bool) { - if f.Recv == nil || len(f.Recv.List) != 1 { - return "", false - } - if f.Name == nil || f.Name.Name != "Error" { - return "", false - } - - if f.Type == nil || f.Type.Results == nil || len(f.Type.Results.List) != 1 { - return "", false - } - - returnType, ok := f.Type.Results.List[0].Type.(*ast.Ident) - if !ok { - return "", false - } - - var receiverType string - - unwrapIdentName := func(e ast.Expr) string { - switch v := e.(type) { - case *ast.Ident: - return v.Name - case *ast.IndexExpr: - if i, ok := v.X.(*ast.Ident); ok { - return i.Name - } - case *ast.IndexListExpr: - if i, ok := v.X.(*ast.Ident); ok { - return i.Name - } - } - panic(fmt.Errorf("unsupported Error() receiver type %q", types.ExprString(e))) - } - - switch rt := f.Recv.List[0].Type; v := rt.(type) { - case *ast.Ident, *ast.IndexExpr, *ast.IndexListExpr: // SomeError, SomeError[T], SomeError[T1, T2, ...] - receiverType = unwrapIdentName(rt) - - case *ast.StarExpr: // *SomeError, *SomeError[T], *SomeError[T1, T2, ...] - receiverType = unwrapIdentName(v.X) - } - - return receiverType, returnType.Name == "string" -} - -func isValidErrorTypeName(s string) bool { - if isInitialism(s) { - return true - } - - words := split(s) - wordsCnt := wordsCount(words) - - if wordsCnt["error"] != 1 { - return false - } - return words[len(words)-1] == "error" -} - -func isValidErrorArrayTypeName(s string) bool { - if isInitialism(s) { - return true - } - - words := split(s) - wordsCnt := wordsCount(words) - - if wordsCnt["errors"] != 1 { - return false - } - return words[len(words)-1] == "errors" -} - -func isFuncReturningErr(fType *ast.FuncType, allTypes, errorTypes stringSet) bool { - if fType == nil || fType.Results == nil || len(fType.Results.List) != 1 { - return false - } - - var returnTypeName string - switch rt := fType.Results.List[0].Type.(type) { - case *ast.Ident: - returnTypeName = rt.Name - case *ast.StarExpr: - if i, ok := rt.X.(*ast.Ident); ok { - returnTypeName = i.Name - } - } - - return isErrorType(returnTypeName, allTypes, errorTypes) -} - -func isErrorType(tName string, allTypes, errorTypes stringSet) bool { - _, isUserType := allTypes[tName] - _, isErrType := errorTypes[tName] - return isErrType || (tName == "error" && !isUserType) -} - -var knownErrConstructors = stringSet{ - "fmt.Errorf": {}, - "errors.Errorf": {}, - "errors.New": {}, - "errors.Newf": {}, - "errors.NewWithDepth": {}, - "errors.NewWithDepthf": {}, - "errors.NewAssertionErrorWithWrappedErrf": {}, -} - -func isSentinelError( //nolint:gocognit,gocyclo - v *ast.ValueSpec, - pkgAliases map[string]string, - allTypes, errorTypes, errorFuncs stringSet, -) (varName string, ok bool) { - if len(v.Names) != 1 { - return "", false - } - varName = v.Names[0].Name - - switch vv := v.Type.(type) { - // var ErrEndOfFile error - // var ErrEndOfFile SomeErrType - case *ast.Ident: - if isErrorType(vv.Name, allTypes, errorTypes) { - return varName, true - } - - // var ErrEndOfFile *SomeErrType - case *ast.StarExpr: - if i, ok := vv.X.(*ast.Ident); ok && isErrorType(i.Name, allTypes, errorTypes) { - return varName, true - } - } - - if len(v.Values) != 1 { - return "", false - } - - switch vv := v.Values[0].(type) { - case *ast.CallExpr: - switch fun := vv.Fun.(type) { - // var ErrEndOfFile = errors.New("end of file") - case *ast.SelectorExpr: - pkg, ok := fun.X.(*ast.Ident) - if !ok { - return "", false - } - pkgFun := fun.Sel - - pkgName := pkg.Name - if a, ok := pkgAliases[pkgName]; ok { - pkgName = a - } - - _, ok = knownErrConstructors[pkgName+"."+pkgFun.Name] - return varName, ok - - // var ErrEndOfFile = newErrEndOfFile() - // var ErrEndOfFile = new(EndOfFileError) - // const ErrEndOfFile = constError("end of file") - // var statusCodeError = new(SomePtrError[string]) - case *ast.Ident: - if isErrorType(fun.Name, allTypes, errorTypes) { - return varName, true - } - - if _, ok := errorFuncs[fun.Name]; ok { - return varName, true - } - - if fun.Name == "new" && len(vv.Args) == 1 { - switch i := vv.Args[0].(type) { - case *ast.Ident: - return varName, isErrorType(i.Name, allTypes, errorTypes) - case *ast.IndexExpr: - if ii, ok := i.X.(*ast.Ident); ok { - return varName, isErrorType(ii.Name, allTypes, errorTypes) - } - } - } - - // var ErrEndOfFile = func() error { ... } - case *ast.FuncLit: - return varName, isFuncReturningErr(fun.Type, allTypes, errorTypes) - } - - // var ErrEndOfFile = &EndOfFileError{} - // var ErrOK = &SomePtrError[string]{Code: "200 OK"} - case *ast.UnaryExpr: - if vv.Op == token.AND { // & - if lit, ok := vv.X.(*ast.CompositeLit); ok { - switch i := lit.Type.(type) { - case *ast.Ident: - return varName, isErrorType(i.Name, allTypes, errorTypes) - case *ast.IndexExpr: - if ii, ok := i.X.(*ast.Ident); ok { - return varName, isErrorType(ii.Name, allTypes, errorTypes) - } - } - } - } - - // var ErrEndOfFile = EndOfFileError{} - // var ErrNotFound = SomeError[string]{Code: "Not Found"} - case *ast.CompositeLit: - switch i := vv.Type.(type) { - case *ast.Ident: - return varName, isErrorType(i.Name, allTypes, errorTypes) - case *ast.IndexExpr: - if ii, ok := i.X.(*ast.Ident); ok { - return varName, isErrorType(ii.Name, allTypes, errorTypes) - } - } - } - - return "", false -} - -func isValidErrorVarName(s string) bool { - if isInitialism(s) { - return true - } - - words := split(s) - wordsCnt := wordsCount(words) - - if wordsCnt["err"] != 1 { - return false - } - return words[0] == "err" -} - -func isInitialism(s string) bool { - return strings.ToLower(s) == s || strings.ToUpper(s) == s -} - -func split(s string) []string { - var words []string - ss := []rune(s) - - var b strings.Builder - b.WriteRune(ss[0]) - - for _, r := range ss[1:] { - if unicode.IsUpper(r) { - words = append(words, strings.ToLower(b.String())) - b.Reset() - } - b.WriteRune(r) - } - - words = append(words, strings.ToLower(b.String())) - return words -} - -func wordsCount(w []string) map[string]int { - result := make(map[string]int, len(w)) - for _, ww := range w { - result[ww]++ - } - return result -} diff --git a/vendor/github.com/Antonboom/nilnil/LICENSE b/vendor/github.com/Antonboom/nilnil/LICENSE deleted file mode 100644 index e2002e4d4..000000000 --- a/vendor/github.com/Antonboom/nilnil/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2021 Anton Telyshev - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/Antonboom/nilnil/pkg/analyzer/analyzer.go b/vendor/github.com/Antonboom/nilnil/pkg/analyzer/analyzer.go deleted file mode 100644 index e980db546..000000000 --- a/vendor/github.com/Antonboom/nilnil/pkg/analyzer/analyzer.go +++ /dev/null @@ -1,158 +0,0 @@ -package analyzer - -import ( - "go/ast" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/ast/inspector" -) - -const ( - name = "nilnil" - doc = "Checks that there is no simultaneous return of `nil` error and an invalid value." - - reportMsg = "return both the `nil` error and invalid value: use a sentinel error instead" -) - -// New returns new nilnil analyzer. -func New() *analysis.Analyzer { - n := newNilNil() - - a := &analysis.Analyzer{ - Name: name, - Doc: doc, - Run: n.run, - Requires: []*analysis.Analyzer{inspect.Analyzer}, - } - a.Flags.Var(&n.checkedTypes, "checked-types", "coma separated list") - - return a -} - -type nilNil struct { - checkedTypes checkedTypes -} - -func newNilNil() *nilNil { - return &nilNil{ - checkedTypes: newDefaultCheckedTypes(), - } -} - -var ( - types = []ast.Node{(*ast.TypeSpec)(nil)} - - funcAndReturns = []ast.Node{ - (*ast.FuncDecl)(nil), - (*ast.FuncLit)(nil), - (*ast.ReturnStmt)(nil), - } -) - -type typeSpecByName map[string]typer - -func (n *nilNil) run(pass *analysis.Pass) (interface{}, error) { - insp := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - - typeSpecs := typeSpecByName{ - "any": newTyper(new(ast.InterfaceType)), - } - insp.Preorder(types, func(node ast.Node) { - t := node.(*ast.TypeSpec) - typeSpecs[t.Name.Name] = newTyper(t.Type) - }) - - var fs funcTypeStack - insp.Nodes(funcAndReturns, func(node ast.Node, push bool) (proceed bool) { - switch v := node.(type) { - case *ast.FuncLit: - if push { - fs.Push(v.Type) - } else { - fs.Pop() - } - - case *ast.FuncDecl: - if push { - fs.Push(v.Type) - } else { - fs.Pop() - } - - case *ast.ReturnStmt: - ft := fs.Top() // Current function. - - if !push || len(v.Results) != 2 || ft == nil || ft.Results == nil || len(ft.Results.List) != 2 { - return false - } - - fRes1, fRes2 := ft.Results.List[0], ft.Results.List[1] - if !(n.isDangerNilField(fRes1, typeSpecs) && n.isErrorField(fRes2)) { - return false - } - - rRes1, rRes2 := v.Results[0], v.Results[1] - if isNil(rRes1) && isNil(rRes2) { - pass.Reportf(v.Pos(), reportMsg) - } - } - - return true - }) - - return nil, nil //nolint:nilnil -} - -func (n *nilNil) isDangerNilField(f *ast.Field, typeSpecs typeSpecByName) bool { - return n.isDangerNilType(f.Type, typeSpecs) -} - -func (n *nilNil) isDangerNilType(t ast.Expr, typeSpecs typeSpecByName) bool { - switch v := t.(type) { - case *ast.StarExpr: - return n.checkedTypes.Contains(ptrType) - - case *ast.FuncType: - return n.checkedTypes.Contains(funcType) - - case *ast.InterfaceType: - return n.checkedTypes.Contains(ifaceType) - - case *ast.MapType: - return n.checkedTypes.Contains(mapType) - - case *ast.ChanType: - return n.checkedTypes.Contains(chanType) - - case *ast.Ident: - if t, ok := typeSpecs[v.Name]; ok { - return n.isDangerNilType(t.Type(), typeSpecs) - } - } - return false -} - -func (n *nilNil) isErrorField(f *ast.Field) bool { - return isIdent(f.Type, "error") -} - -func isNil(e ast.Expr) bool { - return isIdent(e, "nil") -} - -func isIdent(n ast.Node, name string) bool { - i, ok := n.(*ast.Ident) - if !ok { - return false - } - return i.Name == name -} - -type typer interface { - Type() ast.Expr -} - -func newTyper(t ast.Expr) typer { return typerImpl{t: t} } // -type typerImpl struct{ t ast.Expr } // -func (ti typerImpl) Type() ast.Expr { return ti.t } diff --git a/vendor/github.com/Antonboom/nilnil/pkg/analyzer/config.go b/vendor/github.com/Antonboom/nilnil/pkg/analyzer/config.go deleted file mode 100644 index 520b813a5..000000000 --- a/vendor/github.com/Antonboom/nilnil/pkg/analyzer/config.go +++ /dev/null @@ -1,77 +0,0 @@ -package analyzer - -import ( - "fmt" - "sort" - "strings" -) - -func newDefaultCheckedTypes() checkedTypes { - return checkedTypes{ - ptrType: struct{}{}, - funcType: struct{}{}, - ifaceType: struct{}{}, - mapType: struct{}{}, - chanType: struct{}{}, - } -} - -const separator = ',' - -type typeName string - -func (t typeName) S() string { - return string(t) -} - -const ( - ptrType typeName = "ptr" - funcType typeName = "func" - ifaceType typeName = "iface" - mapType typeName = "map" - chanType typeName = "chan" -) - -var knownTypes = []typeName{ptrType, funcType, ifaceType, mapType, chanType} - -type checkedTypes map[typeName]struct{} - -func (c checkedTypes) Contains(t typeName) bool { - _, ok := c[t] - return ok -} - -func (c checkedTypes) String() string { - result := make([]string, 0, len(c)) - for t := range c { - result = append(result, t.S()) - } - - sort.Strings(result) - return strings.Join(result, string(separator)) -} - -func (c checkedTypes) Set(s string) error { - types := strings.FieldsFunc(s, func(c rune) bool { return c == separator }) - if len(types) == 0 { - return nil - } - - c.disableAll() - for _, t := range types { - switch tt := typeName(t); tt { - case ptrType, funcType, ifaceType, mapType, chanType: - c[tt] = struct{}{} - default: - return fmt.Errorf("unknown checked type name %q (see help)", t) - } - } - - return nil -} - -func (c checkedTypes) disableAll() { - for k := range c { - delete(c, k) - } -} diff --git a/vendor/github.com/Antonboom/nilnil/pkg/analyzer/func_type_stack.go b/vendor/github.com/Antonboom/nilnil/pkg/analyzer/func_type_stack.go deleted file mode 100644 index 081761547..000000000 --- a/vendor/github.com/Antonboom/nilnil/pkg/analyzer/func_type_stack.go +++ /dev/null @@ -1,29 +0,0 @@ -package analyzer - -import ( - "go/ast" -) - -type funcTypeStack []*ast.FuncType - -func (s *funcTypeStack) Push(f *ast.FuncType) { - *s = append(*s, f) -} - -func (s *funcTypeStack) Pop() *ast.FuncType { - if len(*s) == 0 { - return nil - } - - last := len(*s) - 1 - f := (*s)[last] - *s = (*s)[:last] - return f -} - -func (s *funcTypeStack) Top() *ast.FuncType { - if len(*s) == 0 { - return nil - } - return (*s)[len(*s)-1] -} diff --git a/vendor/github.com/Antonboom/testifylint/LICENSE b/vendor/github.com/Antonboom/testifylint/LICENSE deleted file mode 100644 index 9b1cf3a39..000000000 --- a/vendor/github.com/Antonboom/testifylint/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2022 Anton Telyshev - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/Antonboom/testifylint/analyzer/analyzer.go b/vendor/github.com/Antonboom/testifylint/analyzer/analyzer.go deleted file mode 100644 index 84d7e815d..000000000 --- a/vendor/github.com/Antonboom/testifylint/analyzer/analyzer.go +++ /dev/null @@ -1,93 +0,0 @@ -package analyzer - -import ( - "fmt" - "go/ast" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/ast/inspector" - - "github.com/Antonboom/testifylint/internal/analysisutil" - "github.com/Antonboom/testifylint/internal/checkers" - "github.com/Antonboom/testifylint/internal/config" - "github.com/Antonboom/testifylint/internal/testify" -) - -const ( - name = "testifylint" - doc = "Checks usage of " + testify.ModulePath + "." - url = "https://github.com/antonboom/" + name -) - -// New returns new instance of testifylint analyzer. -func New() *analysis.Analyzer { - cfg := config.NewDefault() - - analyzer := &analysis.Analyzer{ - Name: name, - Doc: doc, - URL: url, - Run: func(pass *analysis.Pass) (any, error) { - regularCheckers, advancedCheckers, err := newCheckers(cfg) - if err != nil { - return nil, fmt.Errorf("build checkers: %v", err) - } - - tl := &testifyLint{ - regularCheckers: regularCheckers, - advancedCheckers: advancedCheckers, - } - return tl.run(pass) - }, - } - config.BindToFlags(&cfg, &analyzer.Flags) - - return analyzer -} - -type testifyLint struct { - regularCheckers []checkers.RegularChecker - advancedCheckers []checkers.AdvancedChecker -} - -func (tl *testifyLint) run(pass *analysis.Pass) (any, error) { - filesToAnalysis := make([]*ast.File, 0, len(pass.Files)) - for _, f := range pass.Files { - if !analysisutil.Imports(f, testify.AssertPkgPath, testify.RequirePkgPath, testify.SuitePkgPath) { - continue - } - filesToAnalysis = append(filesToAnalysis, f) - } - - insp := inspector.New(filesToAnalysis) - - // Regular checkers. - insp.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, func(node ast.Node) { - tl.regularCheck(pass, node.(*ast.CallExpr)) - }) - - // Advanced checkers. - for _, ch := range tl.advancedCheckers { - for _, d := range ch.Check(pass, insp) { - pass.Report(d) - } - } - - return nil, nil -} - -func (tl *testifyLint) regularCheck(pass *analysis.Pass, ce *ast.CallExpr) { - call := checkers.NewCallMeta(pass, ce) - if nil == call { - return - } - - for _, ch := range tl.regularCheckers { - if d := ch.Check(pass, call); d != nil { - pass.Report(*d) - // NOTE(a.telyshev): I'm not interested in multiple diagnostics per assertion. - // This simplifies the code and also makes the linter more efficient. - return - } - } -} diff --git a/vendor/github.com/Antonboom/testifylint/analyzer/checkers_factory.go b/vendor/github.com/Antonboom/testifylint/analyzer/checkers_factory.go deleted file mode 100644 index 77573e395..000000000 --- a/vendor/github.com/Antonboom/testifylint/analyzer/checkers_factory.go +++ /dev/null @@ -1,74 +0,0 @@ -package analyzer - -import ( - "fmt" - - "github.com/Antonboom/testifylint/internal/checkers" - "github.com/Antonboom/testifylint/internal/config" -) - -// newCheckers accepts linter config and returns slices of enabled checkers sorted by priority. -func newCheckers(cfg config.Config) ([]checkers.RegularChecker, []checkers.AdvancedChecker, error) { - if err := cfg.Validate(); err != nil { - return nil, nil, err - } - - enabledCheckersSet := make(map[string]struct{}) - - if cfg.EnableAll { - for _, checker := range checkers.All() { - enabledCheckersSet[checker] = struct{}{} - } - } else if !cfg.DisableAll { - for _, checker := range checkers.EnabledByDefault() { - enabledCheckersSet[checker] = struct{}{} - } - } - - for _, checker := range cfg.EnabledCheckers { - enabledCheckersSet[checker] = struct{}{} - } - - for _, checker := range cfg.DisabledCheckers { - delete(enabledCheckersSet, checker) - } - - enabledCheckers := make([]string, 0, len(enabledCheckersSet)) - for v := range enabledCheckersSet { - enabledCheckers = append(enabledCheckers, v) - } - checkers.SortByPriority(enabledCheckers) - - regularCheckers := make([]checkers.RegularChecker, 0, len(enabledCheckers)) - advancedCheckers := make([]checkers.AdvancedChecker, 0, len(enabledCheckers)/2) - - for _, name := range enabledCheckers { - ch, ok := checkers.Get(name) - if !ok { - return nil, nil, fmt.Errorf("unknown checker %q", name) - } - - switch c := ch.(type) { - case *checkers.BoolCompare: - c.SetIgnoreCustomTypes(cfg.BoolCompare.IgnoreCustomTypes) - - case *checkers.ExpectedActual: - c.SetExpVarPattern(cfg.ExpectedActual.ExpVarPattern.Regexp) - - case *checkers.RequireError: - c.SetFnPattern(cfg.RequireError.FnPattern.Regexp) - - case *checkers.SuiteExtraAssertCall: - c.SetMode(cfg.SuiteExtraAssertCall.Mode) - } - - switch casted := ch.(type) { - case checkers.RegularChecker: - regularCheckers = append(regularCheckers, casted) - case checkers.AdvancedChecker: - advancedCheckers = append(advancedCheckers, casted) - } - } - - return regularCheckers, advancedCheckers, nil -} diff --git a/vendor/github.com/Antonboom/testifylint/internal/analysisutil/doc.go b/vendor/github.com/Antonboom/testifylint/internal/analysisutil/doc.go deleted file mode 100644 index b57cbd938..000000000 --- a/vendor/github.com/Antonboom/testifylint/internal/analysisutil/doc.go +++ /dev/null @@ -1,9 +0,0 @@ -// Package analysisutil contains functions common for `analyzer` and `internal/checkers` packages. -// In addition, it is intended to "lighten" these packages. -// -// If the function is common to several packages, or it makes sense to test it separately without -// "polluting" the target package with tests of private functionality, then you can put function in this package. -// -// It's important to avoid dependency on `golang.org/x/tools/go/analysis` in the helpers API. -// This makes the API "narrower" and also allows you to test functions without some "abstraction leaks". -package analysisutil diff --git a/vendor/github.com/Antonboom/testifylint/internal/analysisutil/file.go b/vendor/github.com/Antonboom/testifylint/internal/analysisutil/file.go deleted file mode 100644 index 3fc1f42b8..000000000 --- a/vendor/github.com/Antonboom/testifylint/internal/analysisutil/file.go +++ /dev/null @@ -1,28 +0,0 @@ -package analysisutil - -import ( - "go/ast" - "strconv" -) - -// Imports tells if the file imports at least one of the packages. -// If no packages provided then function returns false. -func Imports(file *ast.File, pkgs ...string) bool { - for _, i := range file.Imports { - if i.Path == nil { - continue - } - - path, err := strconv.Unquote(i.Path.Value) - if err != nil { - continue - } - // NOTE(a.telyshev): Don't use `slices.Contains` to keep the minimum module version 1.20. - for _, pkg := range pkgs { // Small O(n). - if pkg == path { - return true - } - } - } - return false -} diff --git a/vendor/github.com/Antonboom/testifylint/internal/analysisutil/format.go b/vendor/github.com/Antonboom/testifylint/internal/analysisutil/format.go deleted file mode 100644 index fcb4b847f..000000000 --- a/vendor/github.com/Antonboom/testifylint/internal/analysisutil/format.go +++ /dev/null @@ -1,34 +0,0 @@ -package analysisutil - -import ( - "bytes" - "go/ast" - "go/format" - "go/token" -) - -// NodeString is a more powerful analogue of types.ExprString. -// Return empty string if node AST is invalid. -func NodeString(fset *token.FileSet, node ast.Node) string { - if v := formatNode(fset, node); v != nil { - return v.String() - } - return "" -} - -// NodeBytes works as NodeString but returns a byte slice. -// Return nil if node AST is invalid. -func NodeBytes(fset *token.FileSet, node ast.Node) []byte { - if v := formatNode(fset, node); v != nil { - return v.Bytes() - } - return nil -} - -func formatNode(fset *token.FileSet, node ast.Node) *bytes.Buffer { - buf := new(bytes.Buffer) - if err := format.Node(buf, fset, node); err != nil { - return nil - } - return buf -} diff --git a/vendor/github.com/Antonboom/testifylint/internal/analysisutil/object.go b/vendor/github.com/Antonboom/testifylint/internal/analysisutil/object.go deleted file mode 100644 index 4e0346d2b..000000000 --- a/vendor/github.com/Antonboom/testifylint/internal/analysisutil/object.go +++ /dev/null @@ -1,34 +0,0 @@ -package analysisutil - -import ( - "go/ast" - "go/types" -) - -// ObjectOf works in context of Golang package and returns types.Object for the given object's package and name. -// The search is based on the provided package and its dependencies (imports). -// Returns nil if the object is not found. -func ObjectOf(pkg *types.Package, objPkg, objName string) types.Object { - if pkg.Path() == objPkg { - return pkg.Scope().Lookup(objName) - } - - for _, i := range pkg.Imports() { - if trimVendor(i.Path()) == objPkg { - return i.Scope().Lookup(objName) - } - } - return nil -} - -// IsObj returns true if expression is identifier which notes to given types.Object. -// Useful in combination with types.Universe objects. -func IsObj(typesInfo *types.Info, expr ast.Expr, expected types.Object) bool { - id, ok := expr.(*ast.Ident) - if !ok { - return false - } - - obj := typesInfo.ObjectOf(id) - return obj == expected -} diff --git a/vendor/github.com/Antonboom/testifylint/internal/analysisutil/pkg.go b/vendor/github.com/Antonboom/testifylint/internal/analysisutil/pkg.go deleted file mode 100644 index d34be5d34..000000000 --- a/vendor/github.com/Antonboom/testifylint/internal/analysisutil/pkg.go +++ /dev/null @@ -1,19 +0,0 @@ -package analysisutil - -import ( - "go/types" - "strings" -) - -// IsPkg checks that package has corresponding objName and path. -// Supports vendored packages. -func IsPkg(pkg *types.Package, name, path string) bool { - return pkg.Name() == name && trimVendor(pkg.Path()) == path -} - -func trimVendor(path string) string { - if strings.HasPrefix(path, "vendor/") { - return path[len("vendor/"):] - } - return path -} diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/blank_import.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/blank_import.go deleted file mode 100644 index 403691e27..000000000 --- a/vendor/github.com/Antonboom/testifylint/internal/checkers/blank_import.go +++ /dev/null @@ -1,69 +0,0 @@ -package checkers - -import ( - "fmt" - "strconv" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/ast/inspector" - - "github.com/Antonboom/testifylint/internal/testify" -) - -// BlankImport detects useless blank imports of testify's packages. -// These imports are useless since testify doesn't do any magic with init() function. -// -// The checker detects situations like -// -// import ( -// "testing" -// -// _ "github.com/stretchr/testify" -// _ "github.com/stretchr/testify/assert" -// _ "github.com/stretchr/testify/http" -// _ "github.com/stretchr/testify/mock" -// _ "github.com/stretchr/testify/require" -// _ "github.com/stretchr/testify/suite" -// ) -// -// and requires -// -// import ( -// "testing" -// ) -type BlankImport struct{} - -// NewBlankImport constructs BlankImport checker. -func NewBlankImport() BlankImport { return BlankImport{} } -func (BlankImport) Name() string { return "blank-import" } - -func (checker BlankImport) Check(pass *analysis.Pass, _ *inspector.Inspector) (diagnostics []analysis.Diagnostic) { - for _, file := range pass.Files { - for _, imp := range file.Imports { - if imp.Name == nil || imp.Name.Name != "_" { - continue - } - - pkg, err := strconv.Unquote(imp.Path.Value) - if err != nil { - continue - } - if _, ok := packagesNotIntendedForBlankImport[pkg]; !ok { - continue - } - - msg := fmt.Sprintf("avoid blank import of %s as it does nothing", pkg) - diagnostics = append(diagnostics, *newDiagnostic(checker.Name(), imp, msg, nil)) - } - } - return diagnostics -} - -var packagesNotIntendedForBlankImport = map[string]struct{}{ - testify.ModulePath: {}, - testify.AssertPkgPath: {}, - testify.HTTPPkgPath: {}, - testify.MockPkgPath: {}, - testify.RequirePkgPath: {}, - testify.SuitePkgPath: {}, -} diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/bool_compare.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/bool_compare.go deleted file mode 100644 index 43907123b..000000000 --- a/vendor/github.com/Antonboom/testifylint/internal/checkers/bool_compare.go +++ /dev/null @@ -1,304 +0,0 @@ -package checkers - -import ( - "go/ast" - "go/token" - "go/types" - - "golang.org/x/tools/go/analysis" - - "github.com/Antonboom/testifylint/internal/analysisutil" -) - -// BoolCompare detects situations like -// -// assert.Equal(t, false, result) -// assert.EqualValues(t, false, result) -// assert.Exactly(t, false, result) -// assert.NotEqual(t, true, result) -// assert.NotEqualValues(t, true, result) -// assert.False(t, !result) -// assert.True(t, result == true) -// ... -// -// and requires -// -// assert.False(t, result) -// assert.True(t, result) -type BoolCompare struct { - ignoreCustomTypes bool -} - -// NewBoolCompare constructs BoolCompare checker. -func NewBoolCompare() *BoolCompare { return new(BoolCompare) } -func (BoolCompare) Name() string { return "bool-compare" } - -func (checker *BoolCompare) SetIgnoreCustomTypes(v bool) *BoolCompare { - checker.ignoreCustomTypes = v - return checker -} - -func (checker BoolCompare) Check(pass *analysis.Pass, call *CallMeta) *analysis.Diagnostic { - newBoolCast := func(e ast.Expr) ast.Expr { - return &ast.CallExpr{Fun: &ast.Ident{Name: "bool"}, Args: []ast.Expr{e}} - } - - newUseFnDiagnostic := func(proposed string, survivingArg ast.Expr, replaceStart, replaceEnd token.Pos) *analysis.Diagnostic { - if !isBuiltinBool(pass, survivingArg) { - if checker.ignoreCustomTypes { - return nil - } - survivingArg = newBoolCast(survivingArg) - } - return newUseFunctionDiagnostic(checker.Name(), call, proposed, - newSuggestedFuncReplacement(call, proposed, analysis.TextEdit{ - Pos: replaceStart, - End: replaceEnd, - NewText: analysisutil.NodeBytes(pass.Fset, survivingArg), - }), - ) - } - - newUseTrueDiagnostic := func(survivingArg ast.Expr, replaceStart, replaceEnd token.Pos) *analysis.Diagnostic { - return newUseFnDiagnostic("True", survivingArg, replaceStart, replaceEnd) - } - - newUseFalseDiagnostic := func(survivingArg ast.Expr, replaceStart, replaceEnd token.Pos) *analysis.Diagnostic { - return newUseFnDiagnostic("False", survivingArg, replaceStart, replaceEnd) - } - - newNeedSimplifyDiagnostic := func(survivingArg ast.Expr, replaceStart, replaceEnd token.Pos) *analysis.Diagnostic { - if !isBuiltinBool(pass, survivingArg) { - if checker.ignoreCustomTypes { - return nil - } - survivingArg = newBoolCast(survivingArg) - } - return newDiagnostic(checker.Name(), call, "need to simplify the assertion", - &analysis.SuggestedFix{ - Message: "Simplify the assertion", - TextEdits: []analysis.TextEdit{{ - Pos: replaceStart, - End: replaceEnd, - NewText: analysisutil.NodeBytes(pass.Fset, survivingArg), - }}, - }, - ) - } - - switch call.Fn.NameFTrimmed { - case "Equal", "EqualValues", "Exactly": - if len(call.Args) < 2 { - return nil - } - - arg1, arg2 := call.Args[0], call.Args[1] - if anyCondSatisfaction(pass, isEmptyInterface, arg1, arg2) { - return nil - } - if anyCondSatisfaction(pass, isBoolOverride, arg1, arg2) { - return nil - } - - t1, t2 := isUntypedTrue(pass, arg1), isUntypedTrue(pass, arg2) - f1, f2 := isUntypedFalse(pass, arg1), isUntypedFalse(pass, arg2) - - switch { - case xor(t1, t2): - survivingArg, _ := anyVal([]bool{t1, t2}, arg2, arg1) - if call.Fn.NameFTrimmed == "Exactly" && !isBuiltinBool(pass, survivingArg) { - // NOTE(a.telyshev): `Exactly` assumes no type casting. - return nil - } - return newUseTrueDiagnostic(survivingArg, arg1.Pos(), arg2.End()) - - case xor(f1, f2): - survivingArg, _ := anyVal([]bool{f1, f2}, arg2, arg1) - if call.Fn.NameFTrimmed == "Exactly" && !isBuiltinBool(pass, survivingArg) { - // NOTE(a.telyshev): `Exactly` assumes no type casting. - return nil - } - return newUseFalseDiagnostic(survivingArg, arg1.Pos(), arg2.End()) - } - - case "NotEqual", "NotEqualValues": - if len(call.Args) < 2 { - return nil - } - - arg1, arg2 := call.Args[0], call.Args[1] - if anyCondSatisfaction(pass, isEmptyInterface, arg1, arg2) { - return nil - } - if anyCondSatisfaction(pass, isBoolOverride, arg1, arg2) { - return nil - } - - t1, t2 := isUntypedTrue(pass, arg1), isUntypedTrue(pass, arg2) - f1, f2 := isUntypedFalse(pass, arg1), isUntypedFalse(pass, arg2) - - switch { - case xor(t1, t2): - survivingArg, _ := anyVal([]bool{t1, t2}, arg2, arg1) - return newUseFalseDiagnostic(survivingArg, arg1.Pos(), arg2.End()) - - case xor(f1, f2): - survivingArg, _ := anyVal([]bool{f1, f2}, arg2, arg1) - return newUseTrueDiagnostic(survivingArg, arg1.Pos(), arg2.End()) - } - - case "True": - if len(call.Args) < 1 { - return nil - } - expr := call.Args[0] - - { - arg1, ok1 := isComparisonWithTrue(pass, expr, token.EQL) - arg2, ok2 := isComparisonWithFalse(pass, expr, token.NEQ) - - survivingArg, ok := anyVal([]bool{ok1, ok2}, arg1, arg2) - if ok && !isEmptyInterface(pass, survivingArg) { - return newNeedSimplifyDiagnostic(survivingArg, expr.Pos(), expr.End()) - } - } - - { - arg1, ok1 := isComparisonWithTrue(pass, expr, token.NEQ) - arg2, ok2 := isComparisonWithFalse(pass, expr, token.EQL) - arg3, ok3 := isNegation(expr) - - survivingArg, ok := anyVal([]bool{ok1, ok2, ok3}, arg1, arg2, arg3) - if ok && !isEmptyInterface(pass, survivingArg) { - return newUseFalseDiagnostic(survivingArg, expr.Pos(), expr.End()) - } - } - - case "False": - if len(call.Args) < 1 { - return nil - } - expr := call.Args[0] - - { - arg1, ok1 := isComparisonWithTrue(pass, expr, token.EQL) - arg2, ok2 := isComparisonWithFalse(pass, expr, token.NEQ) - - survivingArg, ok := anyVal([]bool{ok1, ok2}, arg1, arg2) - if ok && !isEmptyInterface(pass, survivingArg) { - return newNeedSimplifyDiagnostic(survivingArg, expr.Pos(), expr.End()) - } - } - - { - arg1, ok1 := isComparisonWithTrue(pass, expr, token.NEQ) - arg2, ok2 := isComparisonWithFalse(pass, expr, token.EQL) - arg3, ok3 := isNegation(expr) - - survivingArg, ok := anyVal([]bool{ok1, ok2, ok3}, arg1, arg2, arg3) - if ok && !isEmptyInterface(pass, survivingArg) { - return newUseTrueDiagnostic(survivingArg, expr.Pos(), expr.End()) - } - } - } - return nil -} - -func isEmptyInterface(pass *analysis.Pass, expr ast.Expr) bool { - t, ok := pass.TypesInfo.Types[expr] - if !ok { - return false - } - - iface, ok := t.Type.Underlying().(*types.Interface) - return ok && iface.NumMethods() == 0 -} - -func isBuiltinBool(pass *analysis.Pass, e ast.Expr) bool { - basicType, ok := pass.TypesInfo.TypeOf(e).(*types.Basic) - return ok && basicType.Kind() == types.Bool -} - -func isBoolOverride(pass *analysis.Pass, e ast.Expr) bool { - namedType, ok := pass.TypesInfo.TypeOf(e).(*types.Named) - return ok && namedType.Obj().Name() == "bool" -} - -var ( - falseObj = types.Universe.Lookup("false") - trueObj = types.Universe.Lookup("true") -) - -func isUntypedTrue(pass *analysis.Pass, e ast.Expr) bool { - return analysisutil.IsObj(pass.TypesInfo, e, trueObj) -} - -func isUntypedFalse(pass *analysis.Pass, e ast.Expr) bool { - return analysisutil.IsObj(pass.TypesInfo, e, falseObj) -} - -func isComparisonWithTrue(pass *analysis.Pass, e ast.Expr, op token.Token) (ast.Expr, bool) { - return isComparisonWith(pass, e, isUntypedTrue, op) -} - -func isComparisonWithFalse(pass *analysis.Pass, e ast.Expr, op token.Token) (ast.Expr, bool) { - return isComparisonWith(pass, e, isUntypedFalse, op) -} - -type predicate func(pass *analysis.Pass, e ast.Expr) bool - -func isComparisonWith(pass *analysis.Pass, e ast.Expr, predicate predicate, op token.Token) (ast.Expr, bool) { - be, ok := e.(*ast.BinaryExpr) - if !ok { - return nil, false - } - if be.Op != op { - return nil, false - } - - t1, t2 := predicate(pass, be.X), predicate(pass, be.Y) - if xor(t1, t2) { - if t1 { - return be.Y, true - } - return be.X, true - } - return nil, false -} - -func isNegation(e ast.Expr) (ast.Expr, bool) { - ue, ok := e.(*ast.UnaryExpr) - if !ok { - return nil, false - } - return ue.X, ue.Op == token.NOT -} - -func xor(a, b bool) bool { - return a != b -} - -// anyVal returns the first value[i] for which bools[i] is true. -func anyVal[T any](bools []bool, vals ...T) (T, bool) { - if len(bools) != len(vals) { - panic("inconsistent usage of valOr") //nolint:forbidigo // Does not depend on the code being analyzed. - } - - for i, b := range bools { - if b { - return vals[i], true - } - } - - var _default T - return _default, false -} - -func anyCondSatisfaction(pass *analysis.Pass, p predicate, vals ...ast.Expr) bool { - for _, v := range vals { - if p(pass, v) { - return true - } - } - return false -} diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/call_meta.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/call_meta.go deleted file mode 100644 index 44eed49a6..000000000 --- a/vendor/github.com/Antonboom/testifylint/internal/checkers/call_meta.go +++ /dev/null @@ -1,136 +0,0 @@ -package checkers - -import ( - "go/ast" - "go/types" - "strings" - - "golang.org/x/tools/go/analysis" - - "github.com/Antonboom/testifylint/internal/analysisutil" - "github.com/Antonboom/testifylint/internal/testify" -) - -// CallMeta stores meta info about assertion function/method call, for example -// -// assert.Equal(t, 42, result, "helpful comment") -type CallMeta struct { - // Range contains start and end position of assertion call. - analysis.Range - // IsPkg true if this is package (not object) call. - IsPkg bool - // IsAssert true if this is "testify/assert" package (or object) call. - IsAssert bool - // Selector is the AST expression of "assert.Equal". - Selector *ast.SelectorExpr - // SelectorXStr is a string representation of Selector's left part – value before point, e.g. "assert". - SelectorXStr string - // Fn stores meta info about assertion function itself. - Fn FnMeta - // Args stores assertion call arguments but without `t *testing.T` argument. - // E.g [42, result, "helpful comment"]. - Args []ast.Expr - // ArgsRaw stores assertion call initial arguments. - // E.g [t, 42, result, "helpful comment"]. - ArgsRaw []ast.Expr -} - -func (c CallMeta) String() string { - return c.SelectorXStr + "." + c.Fn.Name -} - -// FnMeta stores meta info about assertion function itself, for example "Equal". -type FnMeta struct { - // Range contains start and end position of function Name. - analysis.Range - // Name is a function name. - Name string - // NameFTrimmed is a function name without "f" suffix. - NameFTrimmed string - // IsFmt is true if function is formatted, e.g. "Equalf". - IsFmt bool -} - -// NewCallMeta returns meta information about testify assertion call. -// Returns nil if ast.CallExpr is not testify call. -func NewCallMeta(pass *analysis.Pass, ce *ast.CallExpr) *CallMeta { - se, ok := ce.Fun.(*ast.SelectorExpr) - if !ok || se.Sel == nil { - return nil - } - fnName := se.Sel.Name - - initiatorPkg, isPkgCall := func() (*types.Package, bool) { - // Examples: - // s.Assert -> method of *suite.Suite -> package suite ("vendor/github.com/stretchr/testify/suite") - // s.Assert().Equal -> method of *assert.Assertions -> package assert ("vendor/github.com/stretchr/testify/assert") - // s.Equal -> method of *assert.Assertions -> package assert ("vendor/github.com/stretchr/testify/assert") - // reqObj.Falsef -> method of *require.Assertions -> package require ("vendor/github.com/stretchr/testify/require") - if sel, ok := pass.TypesInfo.Selections[se]; ok { - return sel.Obj().Pkg(), false - } - - // Examples: - // assert.False -> assert -> package assert ("vendor/github.com/stretchr/testify/assert") - // require.NotEqualf -> require -> package require ("vendor/github.com/stretchr/testify/require") - if id, ok := se.X.(*ast.Ident); ok { - if selObj := pass.TypesInfo.ObjectOf(id); selObj != nil { - if pkg, ok := selObj.(*types.PkgName); ok { - return pkg.Imported(), true - } - } - } - return nil, false - }() - if initiatorPkg == nil { - return nil - } - - isAssert := analysisutil.IsPkg(initiatorPkg, testify.AssertPkgName, testify.AssertPkgPath) - isRequire := analysisutil.IsPkg(initiatorPkg, testify.RequirePkgName, testify.RequirePkgPath) - if !(isAssert || isRequire) { - return nil - } - - return &CallMeta{ - Range: ce, - IsPkg: isPkgCall, - IsAssert: isAssert, - Selector: se, - SelectorXStr: analysisutil.NodeString(pass.Fset, se.X), - Fn: FnMeta{ - Range: se.Sel, - Name: fnName, - NameFTrimmed: strings.TrimSuffix(fnName, "f"), - IsFmt: strings.HasSuffix(fnName, "f"), - }, - Args: trimTArg(pass, ce.Args), - ArgsRaw: ce.Args, - } -} - -func trimTArg(pass *analysis.Pass, args []ast.Expr) []ast.Expr { - if len(args) == 0 { - return args - } - - if isTestingTPtr(pass, args[0]) { - return args[1:] - } - return args -} - -func isTestingTPtr(pass *analysis.Pass, arg ast.Expr) bool { - assertTestingTObj := analysisutil.ObjectOf(pass.Pkg, testify.AssertPkgPath, "TestingT") - requireTestingTObj := analysisutil.ObjectOf(pass.Pkg, testify.RequirePkgPath, "TestingT") - - argType := pass.TypesInfo.TypeOf(arg) - if argType == nil { - return false - } - - return ((assertTestingTObj != nil) && - types.Implements(argType, assertTestingTObj.Type().Underlying().(*types.Interface))) || - ((requireTestingTObj != nil) && - types.Implements(argType, requireTestingTObj.Type().Underlying().(*types.Interface))) -} diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/checker.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/checker.go deleted file mode 100644 index ac23af6f6..000000000 --- a/vendor/github.com/Antonboom/testifylint/internal/checkers/checker.go +++ /dev/null @@ -1,23 +0,0 @@ -package checkers - -import ( - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/ast/inspector" -) - -// Checker describes named checker. -type Checker interface { - Name() string -} - -// RegularChecker check assertion call presented in CallMeta form. -type RegularChecker interface { - Checker - Check(pass *analysis.Pass, call *CallMeta) *analysis.Diagnostic -} - -// AdvancedChecker implements complex Check logic different from trivial CallMeta check. -type AdvancedChecker interface { - Checker - Check(pass *analysis.Pass, inspector *inspector.Inspector) []analysis.Diagnostic -} diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/checkers_registry.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/checkers_registry.go deleted file mode 100644 index e34a21bf9..000000000 --- a/vendor/github.com/Antonboom/testifylint/internal/checkers/checkers_registry.go +++ /dev/null @@ -1,105 +0,0 @@ -package checkers - -import ( - "sort" -) - -// registry stores checkers meta information in checkers' priority order. -var registry = checkersRegistry{ - // Regular checkers. - {factory: asCheckerFactory(NewFloatCompare), enabledByDefault: true}, - {factory: asCheckerFactory(NewBoolCompare), enabledByDefault: true}, - {factory: asCheckerFactory(NewEmpty), enabledByDefault: true}, - {factory: asCheckerFactory(NewLen), enabledByDefault: true}, - {factory: asCheckerFactory(NewCompares), enabledByDefault: true}, - {factory: asCheckerFactory(NewErrorNil), enabledByDefault: true}, - {factory: asCheckerFactory(NewNilCompare), enabledByDefault: true}, - {factory: asCheckerFactory(NewErrorIsAs), enabledByDefault: true}, - {factory: asCheckerFactory(NewExpectedActual), enabledByDefault: true}, - {factory: asCheckerFactory(NewSuiteExtraAssertCall), enabledByDefault: true}, - {factory: asCheckerFactory(NewSuiteDontUsePkg), enabledByDefault: true}, - {factory: asCheckerFactory(NewUselessAssert), enabledByDefault: true}, - // Advanced checkers. - {factory: asCheckerFactory(NewBlankImport), enabledByDefault: true}, - {factory: asCheckerFactory(NewGoRequire), enabledByDefault: true}, - {factory: asCheckerFactory(NewRequireError), enabledByDefault: true}, - {factory: asCheckerFactory(NewSuiteTHelper), enabledByDefault: false}, -} - -type checkersRegistry []checkerMeta - -type checkerMeta struct { - factory checkerFactory - enabledByDefault bool -} - -type checkerFactory func() Checker - -func asCheckerFactory[T Checker](fn func() T) checkerFactory { - return func() Checker { - return fn() - } -} - -func (r checkersRegistry) get(name string) (m checkerMeta, priority int, found bool) { - for i, meta := range r { - if meta.factory().Name() == name { - return meta, i, true - } - } - return checkerMeta{}, 0, false -} - -// All returns all checkers names sorted by checker's priority. -func All() []string { - result := make([]string, 0, len(registry)) - for _, meta := range registry { - result = append(result, meta.factory().Name()) - } - return result -} - -// EnabledByDefault returns checkers enabled by default sorted by checker's priority. -func EnabledByDefault() []string { - result := make([]string, 0, len(registry)) - for _, meta := range registry { - if meta.enabledByDefault { - result = append(result, meta.factory().Name()) - } - } - return result -} - -// Get returns new checker instance by checker's name. -func Get(name string) (Checker, bool) { - meta, _, ok := registry.get(name) - if ok { - return meta.factory(), true - } - return nil, false -} - -// IsKnown checks if there is a checker with that name. -func IsKnown(name string) bool { - _, _, ok := registry.get(name) - return ok -} - -// IsEnabledByDefault returns true if a checker is enabled by default. -// Returns false if there is no such checker in the registry. -// For pre-validation use Get or IsKnown. -func IsEnabledByDefault(name string) bool { - meta, _, ok := registry.get(name) - return ok && meta.enabledByDefault -} - -// SortByPriority mutates the input checkers names by sorting them in checker priority order. -// Ignores unknown checkers. For pre-validation use Get or IsKnown. -func SortByPriority(checkers []string) { - sort.Slice(checkers, func(i, j int) bool { - lhs, rhs := checkers[i], checkers[j] - _, lhsPriority, _ := registry.get(lhs) - _, rhsPriority, _ := registry.get(rhs) - return lhsPriority < rhsPriority - }) -} diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/compares.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/compares.go deleted file mode 100644 index 336a34512..000000000 --- a/vendor/github.com/Antonboom/testifylint/internal/checkers/compares.go +++ /dev/null @@ -1,96 +0,0 @@ -package checkers - -import ( - "bytes" - "go/ast" - "go/token" - - "golang.org/x/tools/go/analysis" - - "github.com/Antonboom/testifylint/internal/analysisutil" -) - -// Compares detects situations like -// -// assert.True(t, a == b) -// assert.True(t, a != b) -// assert.True(t, a > b) -// assert.True(t, a >= b) -// assert.True(t, a < b) -// assert.True(t, a <= b) -// assert.False(t, a == b) -// ... -// -// and requires -// -// assert.Equal(t, a, b) -// assert.NotEqual(t, a, b) -// assert.Greater(t, a, b) -// assert.GreaterOrEqual(t, a, b) -// assert.Less(t, a, b) -// assert.LessOrEqual(t, a, b) -type Compares struct{} - -// NewCompares constructs Compares checker. -func NewCompares() Compares { return Compares{} } -func (Compares) Name() string { return "compares" } - -func (checker Compares) Check(pass *analysis.Pass, call *CallMeta) *analysis.Diagnostic { - if len(call.Args) < 1 { - return nil - } - - be, ok := call.Args[0].(*ast.BinaryExpr) - if !ok { - return nil - } - - var tokenToProposedFn map[token.Token]string - - switch call.Fn.NameFTrimmed { - case "True": - tokenToProposedFn = tokenToProposedFnInsteadOfTrue - case "False": - tokenToProposedFn = tokenToProposedFnInsteadOfFalse - default: - return nil - } - - if proposedFn, ok := tokenToProposedFn[be.Op]; ok { - a, b := be.X, be.Y - return newUseFunctionDiagnostic(checker.Name(), call, proposedFn, - newSuggestedFuncReplacement(call, proposedFn, analysis.TextEdit{ - Pos: be.X.Pos(), - End: be.Y.End(), - NewText: formatAsCallArgs(pass, a, b), - }), - ) - } - return nil -} - -var tokenToProposedFnInsteadOfTrue = map[token.Token]string{ - token.EQL: "Equal", - token.NEQ: "NotEqual", - token.GTR: "Greater", - token.GEQ: "GreaterOrEqual", - token.LSS: "Less", - token.LEQ: "LessOrEqual", -} - -var tokenToProposedFnInsteadOfFalse = map[token.Token]string{ - token.EQL: "NotEqual", - token.NEQ: "Equal", - token.GTR: "LessOrEqual", - token.GEQ: "Less", - token.LSS: "GreaterOrEqual", - token.LEQ: "Greater", -} - -// formatAsCallArgs joins a and b and return bytes like `a, b`. -func formatAsCallArgs(pass *analysis.Pass, a, b ast.Node) []byte { - return bytes.Join([][]byte{ - analysisutil.NodeBytes(pass.Fset, a), - analysisutil.NodeBytes(pass.Fset, b), - }, []byte(", ")) -} diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/diagnostic.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/diagnostic.go deleted file mode 100644 index 4ab69c69b..000000000 --- a/vendor/github.com/Antonboom/testifylint/internal/checkers/diagnostic.go +++ /dev/null @@ -1,60 +0,0 @@ -package checkers - -import ( - "fmt" - - "golang.org/x/tools/go/analysis" -) - -func newUseFunctionDiagnostic( - checker string, - call *CallMeta, - proposedFn string, - fix *analysis.SuggestedFix, -) *analysis.Diagnostic { - f := proposedFn - if call.Fn.IsFmt { - f += "f" - } - msg := fmt.Sprintf("use %s.%s", call.SelectorXStr, f) - - return newDiagnostic(checker, call, msg, fix) -} - -func newDiagnostic( - checker string, - rng analysis.Range, - msg string, - fix *analysis.SuggestedFix, -) *analysis.Diagnostic { - d := analysis.Diagnostic{ - Pos: rng.Pos(), - End: rng.End(), - Category: checker, - Message: checker + ": " + msg, - } - if fix != nil { - d.SuggestedFixes = []analysis.SuggestedFix{*fix} - } - return &d -} - -func newSuggestedFuncReplacement( - call *CallMeta, - proposedFn string, - additionalEdits ...analysis.TextEdit, -) *analysis.SuggestedFix { - if call.Fn.IsFmt { - proposedFn += "f" - } - return &analysis.SuggestedFix{ - Message: fmt.Sprintf("Replace `%s` with `%s`", call.Fn.Name, proposedFn), - TextEdits: append([]analysis.TextEdit{ - { - Pos: call.Fn.Pos(), - End: call.Fn.End(), - NewText: []byte(proposedFn), - }, - }, additionalEdits...), - } -} diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/empty.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/empty.go deleted file mode 100644 index 5ad371bb4..000000000 --- a/vendor/github.com/Antonboom/testifylint/internal/checkers/empty.go +++ /dev/null @@ -1,172 +0,0 @@ -package checkers - -import ( - "fmt" - "go/ast" - "go/token" - "go/types" - - "golang.org/x/tools/go/analysis" - - "github.com/Antonboom/testifylint/internal/analysisutil" -) - -// Empty detects situations like -// -// assert.Len(t, arr, 0) -// assert.Equal(t, 0, len(arr)) -// assert.EqualValues(t, 0, len(arr)) -// assert.Exactly(t, 0, len(arr)) -// assert.LessOrEqual(t, len(arr), 0) -// assert.GreaterOrEqual(t, 0, len(arr)) -// assert.Less(t, len(arr), 0) -// assert.Greater(t, 0, len(arr)) -// assert.Less(t, len(arr), 1) -// assert.Greater(t, 1, len(arr)) -// -// assert.NotEqual(t, 0, len(arr)) -// assert.NotEqualValues(t, 0, len(arr)) -// assert.Less(t, 0, len(arr)) -// assert.Greater(t, len(arr), 0) -// -// and requires -// -// assert.Empty(t, arr) -// assert.NotEmpty(t, arr) -type Empty struct{} - -// NewEmpty constructs Empty checker. -func NewEmpty() Empty { return Empty{} } -func (Empty) Name() string { return "empty" } - -func (checker Empty) Check(pass *analysis.Pass, call *CallMeta) *analysis.Diagnostic { - if d := checker.checkEmpty(pass, call); d != nil { - return d - } - return checker.checkNotEmpty(pass, call) -} - -func (checker Empty) checkEmpty(pass *analysis.Pass, call *CallMeta) *analysis.Diagnostic { //nolint:gocognit - newUseEmptyDiagnostic := func(replaceStart, replaceEnd token.Pos, replaceWith ast.Expr) *analysis.Diagnostic { - const proposed = "Empty" - return newUseFunctionDiagnostic(checker.Name(), call, proposed, - newSuggestedFuncReplacement(call, proposed, analysis.TextEdit{ - Pos: replaceStart, - End: replaceEnd, - NewText: analysisutil.NodeBytes(pass.Fset, replaceWith), - }), - ) - } - - if len(call.Args) < 2 { - return nil - } - a, b := call.Args[0], call.Args[1] - - switch call.Fn.NameFTrimmed { - case "Len": - if isZero(b) { - return newUseEmptyDiagnostic(a.Pos(), b.End(), a) - } - - case "Equal", "EqualValues", "Exactly": - arg1, ok1 := isLenCallAndZero(pass, a, b) - arg2, ok2 := isLenCallAndZero(pass, b, a) - - if lenArg, ok := anyVal([]bool{ok1, ok2}, arg1, arg2); ok { - return newUseEmptyDiagnostic(a.Pos(), b.End(), lenArg) - } - - case "LessOrEqual": - if lenArg, ok := isBuiltinLenCall(pass, a); ok && isZero(b) { - return newUseEmptyDiagnostic(a.Pos(), b.End(), lenArg) - } - - case "GreaterOrEqual": - if lenArg, ok := isBuiltinLenCall(pass, b); ok && isZero(a) { - return newUseEmptyDiagnostic(a.Pos(), b.End(), lenArg) - } - - case "Less": - if lenArg, ok := isBuiltinLenCall(pass, a); ok && (isOne(b) || isZero(b)) { - return newUseEmptyDiagnostic(a.Pos(), b.End(), lenArg) - } - - case "Greater": - if lenArg, ok := isBuiltinLenCall(pass, b); ok && (isOne(a) || isZero(a)) { - return newUseEmptyDiagnostic(a.Pos(), b.End(), lenArg) - } - } - return nil -} - -func (checker Empty) checkNotEmpty(pass *analysis.Pass, call *CallMeta) *analysis.Diagnostic { //nolint:gocognit - newUseNotEmptyDiagnostic := func(replaceStart, replaceEnd token.Pos, replaceWith ast.Expr) *analysis.Diagnostic { - const proposed = "NotEmpty" - return newUseFunctionDiagnostic(checker.Name(), call, proposed, - newSuggestedFuncReplacement(call, proposed, analysis.TextEdit{ - Pos: replaceStart, - End: replaceEnd, - NewText: analysisutil.NodeBytes(pass.Fset, replaceWith), - }), - ) - } - - if len(call.Args) < 2 { - return nil - } - a, b := call.Args[0], call.Args[1] - - switch call.Fn.NameFTrimmed { - case "NotEqual", "NotEqualValues": - arg1, ok1 := isLenCallAndZero(pass, a, b) - arg2, ok2 := isLenCallAndZero(pass, b, a) - - if lenArg, ok := anyVal([]bool{ok1, ok2}, arg1, arg2); ok { - return newUseNotEmptyDiagnostic(a.Pos(), b.End(), lenArg) - } - - case "Less": - if lenArg, ok := isBuiltinLenCall(pass, b); ok && isZero(a) { - return newUseNotEmptyDiagnostic(a.Pos(), b.End(), lenArg) - } - - case "Greater": - if lenArg, ok := isBuiltinLenCall(pass, a); ok && isZero(b) { - return newUseNotEmptyDiagnostic(a.Pos(), b.End(), lenArg) - } - } - return nil -} - -var lenObj = types.Universe.Lookup("len") - -func isLenCallAndZero(pass *analysis.Pass, a, b ast.Expr) (ast.Expr, bool) { - lenArg, ok := isBuiltinLenCall(pass, a) - return lenArg, ok && isZero(b) -} - -func isBuiltinLenCall(pass *analysis.Pass, e ast.Expr) (ast.Expr, bool) { - ce, ok := e.(*ast.CallExpr) - if !ok { - return nil, false - } - - if analysisutil.IsObj(pass.TypesInfo, ce.Fun, lenObj) && len(ce.Args) == 1 { - return ce.Args[0], true - } - return nil, false -} - -func isZero(e ast.Expr) bool { - return isIntNumber(e, 0) -} - -func isOne(e ast.Expr) bool { - return isIntNumber(e, 1) -} - -func isIntNumber(e ast.Expr, v int) bool { - bl, ok := e.(*ast.BasicLit) - return ok && bl.Kind == token.INT && bl.Value == fmt.Sprintf("%d", v) -} diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/error_is_as.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/error_is_as.go deleted file mode 100644 index 0363873a6..000000000 --- a/vendor/github.com/Antonboom/testifylint/internal/checkers/error_is_as.go +++ /dev/null @@ -1,166 +0,0 @@ -package checkers - -import ( - "fmt" - "go/ast" - "go/types" - - "golang.org/x/tools/go/analysis" - - "github.com/Antonboom/testifylint/internal/analysisutil" -) - -// ErrorIsAs detects situations like -// -// assert.Error(t, err, errSentinel) -// assert.NoError(t, err, errSentinel) -// assert.True(t, errors.Is(err, errSentinel)) -// assert.False(t, errors.Is(err, errSentinel)) -// assert.True(t, errors.As(err, &target)) -// -// and requires -// -// assert.ErrorIs(t, err, errSentinel) -// assert.NotErrorIs(t, err, errSentinel) -// assert.ErrorAs(t, err, &target) -// -// Also ErrorIsAs repeats go vet's "errorsas" check logic. -type ErrorIsAs struct{} - -// NewErrorIsAs constructs ErrorIsAs checker. -func NewErrorIsAs() ErrorIsAs { return ErrorIsAs{} } -func (ErrorIsAs) Name() string { return "error-is-as" } - -func (checker ErrorIsAs) Check(pass *analysis.Pass, call *CallMeta) *analysis.Diagnostic { - switch call.Fn.NameFTrimmed { - case "Error": - if len(call.Args) >= 2 && isError(pass, call.Args[1]) { - const proposed = "ErrorIs" - msg := fmt.Sprintf("invalid usage of %[1]s.Error, use %[1]s.%[2]s instead", call.SelectorXStr, proposed) - return newDiagnostic(checker.Name(), call, msg, newSuggestedFuncReplacement(call, proposed)) - } - - case "NoError": - if len(call.Args) >= 2 && isError(pass, call.Args[1]) { - const proposed = "NotErrorIs" - msg := fmt.Sprintf("invalid usage of %[1]s.NoError, use %[1]s.%[2]s instead", call.SelectorXStr, proposed) - return newDiagnostic(checker.Name(), call, msg, newSuggestedFuncReplacement(call, proposed)) - } - - case "True": - if len(call.Args) < 1 { - return nil - } - - ce, ok := call.Args[0].(*ast.CallExpr) - if !ok { - return nil - } - if len(ce.Args) != 2 { - return nil - } - - var proposed string - switch { - case isErrorsIsCall(pass, ce): - proposed = "ErrorIs" - case isErrorsAsCall(pass, ce): - proposed = "ErrorAs" - } - if proposed != "" { - return newUseFunctionDiagnostic(checker.Name(), call, proposed, - newSuggestedFuncReplacement(call, proposed, analysis.TextEdit{ - Pos: ce.Pos(), - End: ce.End(), - NewText: formatAsCallArgs(pass, ce.Args[0], ce.Args[1]), - }), - ) - } - - case "False": - if len(call.Args) < 1 { - return nil - } - - ce, ok := call.Args[0].(*ast.CallExpr) - if !ok { - return nil - } - if len(ce.Args) != 2 { - return nil - } - - if isErrorsIsCall(pass, ce) { - const proposed = "NotErrorIs" - return newUseFunctionDiagnostic(checker.Name(), call, proposed, - newSuggestedFuncReplacement(call, proposed, analysis.TextEdit{ - Pos: ce.Pos(), - End: ce.End(), - NewText: formatAsCallArgs(pass, ce.Args[0], ce.Args[1]), - }), - ) - } - - case "ErrorAs": - if len(call.Args) < 2 { - return nil - } - - // NOTE(a.telyshev): Logic below must be consistent with - // https://cs.opensource.google/go/x/tools/+/master:go/analysis/passes/errorsas/errorsas.go - - var ( - defaultReport = fmt.Sprintf("second argument to %s must be a non-nil pointer to either a type that implements error, or to any interface type", call) //nolint:lll - errorPtrReport = fmt.Sprintf("second argument to %s should not be *error", call) - ) - - target := call.Args[1] - - if isEmptyInterface(pass, target) { - // `any` interface case. It is always allowed, since it often indicates - // a value forwarded from another source. - return nil - } - - tv, ok := pass.TypesInfo.Types[target] - if !ok { - return nil - } - - pt, ok := tv.Type.Underlying().(*types.Pointer) - if !ok { - return newDiagnostic(checker.Name(), call, defaultReport, nil) - } - if pt.Elem() == errorType { - return newDiagnostic(checker.Name(), call, errorPtrReport, nil) - } - - _, isInterface := pt.Elem().Underlying().(*types.Interface) - if !isInterface && !types.Implements(pt.Elem(), errorIface) { - return newDiagnostic(checker.Name(), call, defaultReport, nil) - } - } - return nil -} - -func isErrorsIsCall(pass *analysis.Pass, ce *ast.CallExpr) bool { - return isErrorsPkgFnCall(pass, ce, "Is") -} - -func isErrorsAsCall(pass *analysis.Pass, ce *ast.CallExpr) bool { - return isErrorsPkgFnCall(pass, ce, "As") -} - -func isErrorsPkgFnCall(pass *analysis.Pass, ce *ast.CallExpr, fn string) bool { - se, ok := ce.Fun.(*ast.SelectorExpr) - if !ok { - return false - } - - errorsIsObj := analysisutil.ObjectOf(pass.Pkg, "errors", fn) - if errorsIsObj == nil { - return false - } - - return analysisutil.IsObj(pass.TypesInfo, se.Sel, errorsIsObj) -} diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/error_nil.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/error_nil.go deleted file mode 100644 index 5b0af7458..000000000 --- a/vendor/github.com/Antonboom/testifylint/internal/checkers/error_nil.go +++ /dev/null @@ -1,113 +0,0 @@ -package checkers - -import ( - "go/ast" - "go/token" - "go/types" - - "golang.org/x/tools/go/analysis" - - "github.com/Antonboom/testifylint/internal/analysisutil" -) - -// ErrorNil detects situations like -// -// assert.Nil(t, err) -// assert.NotNil(t, err) -// assert.Equal(t, nil, err) -// assert.EqualValues(t, nil, err) -// assert.Exactly(t, nil, err) -// assert.ErrorIs(t, err, nil) -// -// assert.NotEqual(t, nil, err) -// assert.NotEqualValues(t, nil, err) -// assert.NotErrorIs(t, err, nil) -// -// and requires -// -// assert.NoError(t, err) -// assert.Error(t, err) -type ErrorNil struct{} - -// NewErrorNil constructs ErrorNil checker. -func NewErrorNil() ErrorNil { return ErrorNil{} } -func (ErrorNil) Name() string { return "error-nil" } - -func (checker ErrorNil) Check(pass *analysis.Pass, call *CallMeta) *analysis.Diagnostic { - const ( - errorFn = "Error" - noErrorFn = "NoError" - ) - - proposedFn, survivingArg, replacementEndPos := func() (string, ast.Expr, token.Pos) { - switch call.Fn.NameFTrimmed { - case "Nil": - if len(call.Args) >= 1 && isError(pass, call.Args[0]) { - return noErrorFn, call.Args[0], call.Args[0].End() - } - - case "NotNil": - if len(call.Args) >= 1 && isError(pass, call.Args[0]) { - return errorFn, call.Args[0], call.Args[0].End() - } - - case "Equal", "EqualValues", "Exactly", "ErrorIs": - if len(call.Args) < 2 { - return "", nil, token.NoPos - } - a, b := call.Args[0], call.Args[1] - - switch { - case isError(pass, a) && isNil(b): - return noErrorFn, a, b.End() - case isNil(a) && isError(pass, b): - return noErrorFn, b, b.End() - } - - case "NotEqual", "NotEqualValues", "NotErrorIs": - if len(call.Args) < 2 { - return "", nil, token.NoPos - } - a, b := call.Args[0], call.Args[1] - - switch { - case isError(pass, a) && isNil(b): - return errorFn, a, b.End() - case isNil(a) && isError(pass, b): - return errorFn, b, b.End() - } - } - return "", nil, token.NoPos - }() - - if proposedFn != "" { - return newUseFunctionDiagnostic(checker.Name(), call, proposedFn, - newSuggestedFuncReplacement(call, proposedFn, analysis.TextEdit{ - Pos: call.Args[0].Pos(), - End: replacementEndPos, - NewText: analysisutil.NodeBytes(pass.Fset, survivingArg), - }), - ) - } - return nil -} - -var ( - errorType = types.Universe.Lookup("error").Type() - errorIface = errorType.Underlying().(*types.Interface) -) - -func isError(pass *analysis.Pass, expr ast.Expr) bool { - t := pass.TypesInfo.TypeOf(expr) - if t == nil { - return false - } - - _, ok := t.Underlying().(*types.Interface) - return ok && types.Implements(t, errorIface) -} - -func isNil(expr ast.Expr) bool { - ident, ok := expr.(*ast.Ident) - return ok && ident.Name == "nil" -} diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/expected_actual.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/expected_actual.go deleted file mode 100644 index e6825eaa6..000000000 --- a/vendor/github.com/Antonboom/testifylint/internal/checkers/expected_actual.go +++ /dev/null @@ -1,215 +0,0 @@ -package checkers - -import ( - "go/ast" - "go/token" - "go/types" - "regexp" - - "golang.org/x/tools/go/analysis" - - "github.com/Antonboom/testifylint/internal/analysisutil" -) - -// DefaultExpectedVarPattern matches variables with "expected" or "wanted" prefix or suffix in the name. -var DefaultExpectedVarPattern = regexp.MustCompile( - `(^(exp(ected)?|want(ed)?)([A-Z]\w*)?$)|(^(\w*[a-z])?(Exp(ected)?|Want(ed)?)$)`) - -// ExpectedActual detects situation like -// -// assert.Equal(t, result, expected) -// assert.EqualExportedValues(t, resultObj, User{Name: "Anton"}) -// assert.EqualValues(t, result, 42) -// assert.Exactly(t, result, int64(42)) -// assert.JSONEq(t, result, `{"version": 3}`) -// assert.InDelta(t, result, 42.42, 1.0) -// assert.InDeltaMapValues(t, result, map[string]float64{"score": 0.99}, 1.0) -// assert.InDeltaSlice(t, result, []float64{0.98, 0.99}, 1.0) -// assert.InEpsilon(t, result, 42.42, 0.0001) -// assert.InEpsilonSlice(t, result, []float64{0.9801, 0.9902}, 0.0001) -// assert.IsType(t, result, (*User)(nil)) -// assert.NotEqual(t, result, "expected") -// assert.NotEqualValues(t, result, "expected") -// assert.NotSame(t, resultPtr, &value) -// assert.Same(t, resultPtr, &value) -// assert.WithinDuration(t, resultTime, time.Date(2023, 01, 12, 11, 46, 33, 0, nil), time.Second) -// assert.YAMLEq(t, result, "version: '3'") -// -// and requires -// -// assert.Equal(t, expected, result) -// assert.EqualExportedValues(t, User{Name: "Anton"}, resultObj) -// assert.EqualValues(t, 42, result) -// ... -type ExpectedActual struct { - expVarPattern *regexp.Regexp -} - -// NewExpectedActual constructs ExpectedActual checker using DefaultExpectedVarPattern. -func NewExpectedActual() *ExpectedActual { - return &ExpectedActual{expVarPattern: DefaultExpectedVarPattern} -} - -func (ExpectedActual) Name() string { return "expected-actual" } - -func (checker *ExpectedActual) SetExpVarPattern(p *regexp.Regexp) *ExpectedActual { - if p != nil { - checker.expVarPattern = p - } - return checker -} - -func (checker ExpectedActual) Check(pass *analysis.Pass, call *CallMeta) *analysis.Diagnostic { - switch call.Fn.NameFTrimmed { - case "Equal", - "EqualExportedValues", - "EqualValues", - "Exactly", - "InDelta", - "InDeltaMapValues", - "InDeltaSlice", - "InEpsilon", - "InEpsilonSlice", - "IsType", - "JSONEq", - "NotEqual", - "NotEqualValues", - "NotSame", - "Same", - "WithinDuration", - "YAMLEq": - default: - return nil - } - - if len(call.Args) < 2 { - return nil - } - first, second := call.Args[0], call.Args[1] - - if checker.isWrongExpectedActualOrder(pass, first, second) { - return newDiagnostic(checker.Name(), call, "need to reverse actual and expected values", &analysis.SuggestedFix{ - Message: "Reverse actual and expected values", - TextEdits: []analysis.TextEdit{ - { - Pos: first.Pos(), - End: second.End(), - NewText: formatAsCallArgs(pass, second, first), - }, - }, - }) - } - return nil -} - -func (checker ExpectedActual) isWrongExpectedActualOrder(pass *analysis.Pass, first, second ast.Expr) bool { - leftIsCandidate := checker.isExpectedValueCandidate(pass, first) - rightIsCandidate := checker.isExpectedValueCandidate(pass, second) - return rightIsCandidate && !leftIsCandidate -} - -func (checker ExpectedActual) isExpectedValueCandidate(pass *analysis.Pass, expr ast.Expr) bool { - switch v := expr.(type) { - case *ast.ParenExpr: - return checker.isExpectedValueCandidate(pass, v.X) - - case *ast.StarExpr: // *value - return checker.isExpectedValueCandidate(pass, v.X) - - case *ast.UnaryExpr: - return (v.Op == token.AND) && checker.isExpectedValueCandidate(pass, v.X) // &value - - case *ast.CompositeLit: - return true - - case *ast.CallExpr: - return isParenExpr(v) || - isCastedBasicLitOrExpectedValue(v, checker.expVarPattern) || - isExpectedValueFactory(pass, v, checker.expVarPattern) - } - - return isBasicLit(expr) || - isUntypedConst(pass, expr) || - isTypedConst(pass, expr) || - isIdentNamedAsExpected(checker.expVarPattern, expr) || - isStructVarNamedAsExpected(checker.expVarPattern, expr) || - isStructFieldNamedAsExpected(checker.expVarPattern, expr) -} - -func isParenExpr(ce *ast.CallExpr) bool { - _, ok := ce.Fun.(*ast.ParenExpr) - return ok -} - -func isCastedBasicLitOrExpectedValue(ce *ast.CallExpr, pattern *regexp.Regexp) bool { - if len(ce.Args) != 1 { - return false - } - - fn, ok := ce.Fun.(*ast.Ident) - if !ok { - return false - } - - switch fn.Name { - case "complex64", "complex128": - return true - - case "uint", "uint8", "uint16", "uint32", "uint64", - "int", "int8", "int16", "int32", "int64", - "float32", "float64", - "rune", "string": - return isBasicLit(ce.Args[0]) || isIdentNamedAsExpected(pattern, ce.Args[0]) - } - return false -} - -func isExpectedValueFactory(pass *analysis.Pass, ce *ast.CallExpr, pattern *regexp.Regexp) bool { - switch fn := ce.Fun.(type) { - case *ast.Ident: - return pattern.MatchString(fn.Name) - - case *ast.SelectorExpr: - timeDateFn := analysisutil.ObjectOf(pass.Pkg, "time", "Date") - if timeDateFn != nil && analysisutil.IsObj(pass.TypesInfo, fn.Sel, timeDateFn) { - return true - } - return pattern.MatchString(fn.Sel.Name) - } - return false -} - -func isBasicLit(e ast.Expr) bool { - _, ok := e.(*ast.BasicLit) - return ok -} - -func isUntypedConst(p *analysis.Pass, e ast.Expr) bool { - t := p.TypesInfo.TypeOf(e) - if t == nil { - return false - } - - b, ok := t.(*types.Basic) - return ok && b.Info()&types.IsUntyped > 0 -} - -func isTypedConst(p *analysis.Pass, e ast.Expr) bool { - tt, ok := p.TypesInfo.Types[e] - return ok && tt.IsValue() && tt.Value != nil -} - -func isIdentNamedAsExpected(pattern *regexp.Regexp, e ast.Expr) bool { - id, ok := e.(*ast.Ident) - return ok && pattern.MatchString(id.Name) -} - -func isStructVarNamedAsExpected(pattern *regexp.Regexp, e ast.Expr) bool { - s, ok := e.(*ast.SelectorExpr) - return ok && isIdentNamedAsExpected(pattern, s.X) -} - -func isStructFieldNamedAsExpected(pattern *regexp.Regexp, e ast.Expr) bool { - s, ok := e.(*ast.SelectorExpr) - return ok && isIdentNamedAsExpected(pattern, s.Sel) -} diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/float_compare.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/float_compare.go deleted file mode 100644 index 10b1330de..000000000 --- a/vendor/github.com/Antonboom/testifylint/internal/checkers/float_compare.go +++ /dev/null @@ -1,70 +0,0 @@ -package checkers - -import ( - "fmt" - "go/ast" - "go/token" - "go/types" - - "golang.org/x/tools/go/analysis" -) - -// FloatCompare detects situation like -// -// assert.Equal(t, 42.42, result) -// assert.EqualValues(t, 42.42, result) -// assert.Exactly(t, 42.42, result) -// assert.True(t, result == 42.42) -// assert.False(t, result != 42.42) -// -// and requires -// -// assert.InEpsilon(t, 42.42, result, 0.0001) // Or assert.InDelta -type FloatCompare struct{} - -// NewFloatCompare constructs FloatCompare checker. -func NewFloatCompare() FloatCompare { return FloatCompare{} } -func (FloatCompare) Name() string { return "float-compare" } - -func (checker FloatCompare) Check(pass *analysis.Pass, call *CallMeta) *analysis.Diagnostic { - invalid := func() bool { - switch call.Fn.NameFTrimmed { - case "Equal", "EqualValues", "Exactly": - return len(call.Args) > 1 && (isFloat(pass, call.Args[0]) || isFloat(pass, call.Args[1])) - - case "True": - return len(call.Args) > 0 && isFloatCompare(pass, call.Args[0], token.EQL) - - case "False": - return len(call.Args) > 0 && isFloatCompare(pass, call.Args[0], token.NEQ) - } - return false - }() - - if invalid { - format := "use %s.InEpsilon (or InDelta)" - if call.Fn.IsFmt { - format = "use %s.InEpsilonf (or InDeltaf)" - } - return newDiagnostic(checker.Name(), call, fmt.Sprintf(format, call.SelectorXStr), nil) - } - return nil -} - -func isFloat(pass *analysis.Pass, expr ast.Expr) bool { - t := pass.TypesInfo.TypeOf(expr) - if t == nil { - return false - } - - bt, ok := t.Underlying().(*types.Basic) - return ok && (bt.Info()&types.IsFloat > 0) -} - -func isFloatCompare(p *analysis.Pass, e ast.Expr, op token.Token) bool { - be, ok := e.(*ast.BinaryExpr) - if !ok { - return false - } - return be.Op == op && (isFloat(p, be.X) || isFloat(p, be.Y)) -} diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/go_require.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/go_require.go deleted file mode 100644 index 0844f15a0..000000000 --- a/vendor/github.com/Antonboom/testifylint/internal/checkers/go_require.go +++ /dev/null @@ -1,327 +0,0 @@ -package checkers - -import ( - "fmt" - "go/ast" - "go/types" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/ast/inspector" - - "github.com/Antonboom/testifylint/internal/analysisutil" -) - -const ( - goRequireFnReportFormat = "%s contains assertions that must only be used in the goroutine running the test function" - goRequireCallReportFormat = "%s must only be used in the goroutine running the test function" -) - -// GoRequire takes idea from go vet's "testinggoroutine" check -// and detects usage of require package's functions or assert.FailNow in the non-test goroutines -// -// go func() { -// conn, err = lis.Accept() -// require.NoError(t, err) -// -// if assert.Error(err) { -// assert.FailNow(t, msg) -// } -// }() -type GoRequire struct{} - -// NewGoRequire constructs GoRequire checker. -func NewGoRequire() GoRequire { return GoRequire{} } -func (GoRequire) Name() string { return "go-require" } - -// Check should be consistent with -// https://cs.opensource.google/go/x/tools/+/master:go/analysis/passes/testinggoroutine/testinggoroutine.go -// -// But due to the fact that the Check covers cases missed by go vet, -// the implementation turned out to be terribly complicated. -// -// In simple words, the algorithm is as follows: -// - we walk along the call tree and store the status, whether we are in the test goroutine or not; -// - if we are in a test goroutine, then require is allowed, otherwise not; -// - when we encounter the launch of a subtest or `go` statement, the status changes; -// - in order to correctly handle the return to the correct status when exiting the current function, -// we have to store a stack of statuses (inGoroutineRunningTestFunc). -// -// Other test functions called in the test function are also analyzed to make a verdict about the current function. -// This leads to recursion, which the cache of processed functions (processedFuncs) helps reduce the impact of. -// Also, because of this, we have to pre-collect a list of test function declarations (testsDecls). -func (checker GoRequire) Check(pass *analysis.Pass, inspector *inspector.Inspector) (diagnostics []analysis.Diagnostic) { - testsDecls := make(funcDeclarations) - inspector.Preorder([]ast.Node{(*ast.FuncDecl)(nil)}, func(node ast.Node) { - fd := node.(*ast.FuncDecl) - - if isTestingFuncOrMethod(pass, fd) { - if tf, ok := pass.TypesInfo.ObjectOf(fd.Name).(*types.Func); ok { - testsDecls[tf] = fd - } - } - }) - - var inGoroutineRunningTestFunc boolStack - processedFuncs := make(map[*ast.FuncDecl]goRequireVerdict) - - nodesFilter := []ast.Node{ - (*ast.FuncDecl)(nil), - (*ast.FuncType)(nil), - (*ast.GoStmt)(nil), - (*ast.CallExpr)(nil), - } - inspector.Nodes(nodesFilter, func(node ast.Node, push bool) bool { - if fd, ok := node.(*ast.FuncDecl); ok { - if !isTestingFuncOrMethod(pass, fd) { - return false - } - - if push { - inGoroutineRunningTestFunc.Push(true) - } else { - inGoroutineRunningTestFunc.Pop() - } - return true - } - - if ft, ok := node.(*ast.FuncType); ok { - if !isTestingAnonymousFunc(pass, ft) { - return false - } - - if push { - inGoroutineRunningTestFunc.Push(true) - } else { - inGoroutineRunningTestFunc.Pop() - } - return true - } - - if _, ok := node.(*ast.GoStmt); ok { - if push { - inGoroutineRunningTestFunc.Push(false) - } else { - inGoroutineRunningTestFunc.Pop() - } - return true - } - - ce := node.(*ast.CallExpr) - if isSubTestRun(pass, ce) { - if push { - // t.Run spawns the new testing goroutine and declines - // possible warnings from previous "simple" goroutine. - inGoroutineRunningTestFunc.Push(true) - } else { - inGoroutineRunningTestFunc.Pop() - } - return true - } - - if !push { - return false - } - if inGoroutineRunningTestFunc.Len() == 0 { - // Insufficient info. - return true - } - if inGoroutineRunningTestFunc.Last() { - // We are in testing goroutine and can skip any assertion checks. - return true - } - - testifyCall := NewCallMeta(pass, ce) - if testifyCall != nil { - switch checker.checkCall(testifyCall) { - case goRequireVerdictRequire: - d := newDiagnostic(checker.Name(), testifyCall, fmt.Sprintf(goRequireCallReportFormat, "require"), nil) - diagnostics = append(diagnostics, *d) - - case goRequireVerdictAssertFailNow: - d := newDiagnostic(checker.Name(), testifyCall, fmt.Sprintf(goRequireCallReportFormat, testifyCall), nil) - diagnostics = append(diagnostics, *d) - - case goRequireVerdictNoExit: - } - return false - } - - // Case of nested function call. - { - calledFd := testsDecls.Get(pass, ce) - if calledFd == nil { - return true - } - - if v := checker.checkFunc(pass, calledFd, testsDecls, processedFuncs); v != goRequireVerdictNoExit { - caller := analysisutil.NodeString(pass.Fset, ce.Fun) - d := newDiagnostic(checker.Name(), ce, fmt.Sprintf(goRequireFnReportFormat, caller), nil) - diagnostics = append(diagnostics, *d) - } - } - return true - }) - - return diagnostics -} - -func (checker GoRequire) checkFunc( - pass *analysis.Pass, - fd *ast.FuncDecl, - testsDecls funcDeclarations, - processedFuncs map[*ast.FuncDecl]goRequireVerdict, -) (result goRequireVerdict) { - if v, ok := processedFuncs[fd]; ok { - return v - } - - ast.Inspect(fd, func(node ast.Node) bool { - if result != goRequireVerdictNoExit { - return false - } - - if _, ok := node.(*ast.GoStmt); ok { - return false - } - - ce, ok := node.(*ast.CallExpr) - if !ok { - return true - } - - testifyCall := NewCallMeta(pass, ce) - if testifyCall != nil { - if v := checker.checkCall(testifyCall); v != goRequireVerdictNoExit { - result, processedFuncs[fd] = v, v - } - return false - } - - // Case of nested function call. - { - calledFd := testsDecls.Get(pass, ce) - if calledFd == nil { - return true - } - if calledFd == fd { - // Recursion. - return true - } - - if v := checker.checkFunc(pass, calledFd, testsDecls, processedFuncs); v != goRequireVerdictNoExit { - result = v - return false - } - return true - } - }) - - return result -} - -type goRequireVerdict int - -const ( - goRequireVerdictNoExit goRequireVerdict = iota - goRequireVerdictRequire - goRequireVerdictAssertFailNow -) - -func (checker GoRequire) checkCall(call *CallMeta) goRequireVerdict { - if !call.IsAssert { - return goRequireVerdictRequire - } - if call.Fn.NameFTrimmed == "FailNow" { - return goRequireVerdictAssertFailNow - } - return goRequireVerdictNoExit -} - -type funcDeclarations map[*types.Func]*ast.FuncDecl - -// Get returns the declaration of a called function or method. -// Currently, only static calls within the same package are supported, otherwise returns nil. -func (fd funcDeclarations) Get(pass *analysis.Pass, ce *ast.CallExpr) *ast.FuncDecl { - var obj types.Object - - switch fun := ce.Fun.(type) { - case *ast.SelectorExpr: - obj = pass.TypesInfo.ObjectOf(fun.Sel) - - case *ast.Ident: - obj = pass.TypesInfo.ObjectOf(fun) - - case *ast.IndexExpr: - if id, ok := fun.X.(*ast.Ident); ok { - obj = pass.TypesInfo.ObjectOf(id) - } - - case *ast.IndexListExpr: - if id, ok := fun.X.(*ast.Ident); ok { - obj = pass.TypesInfo.ObjectOf(id) - } - } - - if tf, ok := obj.(*types.Func); ok { - return fd[tf] - } - return nil -} - -type boolStack []bool - -func (s boolStack) Len() int { - return len(s) -} - -func (s *boolStack) Push(v bool) { - *s = append(*s, v) -} - -func (s *boolStack) Pop() bool { - n := len(*s) - if n == 0 { - return false - } - - last := (*s)[n-1] - *s = (*s)[:n-1] - return last -} - -func (s boolStack) Last() bool { - n := len(s) - if n == 0 { - return false - } - return s[n-1] -} - -func isSubTestRun(pass *analysis.Pass, ce *ast.CallExpr) bool { - se, ok := ce.Fun.(*ast.SelectorExpr) - if !ok || se.Sel == nil { - return false - } - return (isTestingTPtr(pass, se.X) || implementsTestifySuiteIface(pass, se.X)) && se.Sel.Name == "Run" -} - -func isTestingFuncOrMethod(pass *analysis.Pass, fd *ast.FuncDecl) bool { - return hasTestingTParam(pass, fd.Type) || isTestifySuiteMethod(pass, fd) -} - -func isTestingAnonymousFunc(pass *analysis.Pass, ft *ast.FuncType) bool { - return hasTestingTParam(pass, ft) -} - -func hasTestingTParam(pass *analysis.Pass, ft *ast.FuncType) bool { - if ft == nil || ft.Params == nil { - return false - } - - for _, param := range ft.Params.List { - if isTestingTPtr(pass, param.Type) { - return true - } - } - return false -} diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/len.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/len.go deleted file mode 100644 index d4e6a48b5..000000000 --- a/vendor/github.com/Antonboom/testifylint/internal/checkers/len.go +++ /dev/null @@ -1,97 +0,0 @@ -package checkers - -import ( - "go/ast" - "go/token" - - "golang.org/x/tools/go/analysis" -) - -// Len detects situations like -// -// assert.Equal(t, 3, len(arr)) -// assert.EqualValues(t, 3, len(arr)) -// assert.Exactly(t, 3, len(arr)) -// assert.True(t, len(arr) == 3) -// -// and requires -// -// assert.Len(t, arr, 3) -type Len struct{} - -// NewLen constructs Len checker. -func NewLen() Len { return Len{} } -func (Len) Name() string { return "len" } - -func (checker Len) Check(pass *analysis.Pass, call *CallMeta) *analysis.Diagnostic { - const proposedFn = "Len" - - switch call.Fn.NameFTrimmed { - case "Equal", "EqualValues", "Exactly": - if len(call.Args) < 2 { - return nil - } - a, b := call.Args[0], call.Args[1] - - if lenArg, expectedLen, ok := xorLenCall(pass, a, b); ok { - if expectedLen == b && !isIntBasicLit(expectedLen) { - // https://github.com/Antonboom/testifylint/issues/9 - return nil - } - return newUseFunctionDiagnostic(checker.Name(), call, proposedFn, - newSuggestedFuncReplacement(call, proposedFn, analysis.TextEdit{ - Pos: a.Pos(), - End: b.End(), - NewText: formatAsCallArgs(pass, lenArg, expectedLen), - }), - ) - } - - case "True": - if len(call.Args) < 1 { - return nil - } - expr := call.Args[0] - - if lenArg, expectedLen, ok := isLenEquality(pass, expr); ok && isIntBasicLit(expectedLen) { - return newUseFunctionDiagnostic(checker.Name(), call, proposedFn, - newSuggestedFuncReplacement(call, proposedFn, analysis.TextEdit{ - Pos: expr.Pos(), - End: expr.End(), - NewText: formatAsCallArgs(pass, lenArg, expectedLen), - }), - ) - } - } - return nil -} - -func xorLenCall(pass *analysis.Pass, a, b ast.Expr) (lenArg ast.Expr, expectedLen ast.Expr, ok bool) { - arg1, ok1 := isBuiltinLenCall(pass, a) - arg2, ok2 := isBuiltinLenCall(pass, b) - - if xor(ok1, ok2) { - if ok1 { - return arg1, b, true - } - return arg2, a, true - } - return nil, nil, false -} - -func isLenEquality(pass *analysis.Pass, e ast.Expr) (ast.Expr, ast.Expr, bool) { - be, ok := e.(*ast.BinaryExpr) - if !ok { - return nil, nil, false - } - - if be.Op != token.EQL { - return nil, nil, false - } - return xorLenCall(pass, be.X, be.Y) -} - -func isIntBasicLit(e ast.Expr) bool { - bl, ok := e.(*ast.BasicLit) - return ok && bl.Kind == token.INT -} diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/nil_compare.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/nil_compare.go deleted file mode 100644 index 89680a069..000000000 --- a/vendor/github.com/Antonboom/testifylint/internal/checkers/nil_compare.go +++ /dev/null @@ -1,69 +0,0 @@ -package checkers - -import ( - "go/ast" - - "golang.org/x/tools/go/analysis" - - "github.com/Antonboom/testifylint/internal/analysisutil" -) - -// NilCompare detects situations like -// -// assert.Equal(t, nil, value) -// assert.EqualValues(t, nil, value) -// assert.Exactly(t, nil, value) -// -// assert.NotEqual(t, nil, value) -// assert.NotEqualValues(t, nil, value) -// -// and requires -// -// assert.Nil(t, value) -// assert.NotNil(t, value) -type NilCompare struct{} - -// NewNilCompare constructs NilCompare checker. -func NewNilCompare() NilCompare { return NilCompare{} } -func (NilCompare) Name() string { return "nil-compare" } - -func (checker NilCompare) Check(pass *analysis.Pass, call *CallMeta) *analysis.Diagnostic { - if len(call.Args) < 2 { - return nil - } - - survivingArg, ok := xorNil(call.Args[0], call.Args[1]) - if !ok { - return nil - } - - var proposedFn string - - switch call.Fn.NameFTrimmed { - case "Equal", "EqualValues", "Exactly": - proposedFn = "Nil" - case "NotEqual", "NotEqualValues": - proposedFn = "NotNil" - default: - return nil - } - - return newUseFunctionDiagnostic(checker.Name(), call, proposedFn, - newSuggestedFuncReplacement(call, proposedFn, analysis.TextEdit{ - Pos: call.Args[0].Pos(), - End: call.Args[1].End(), - NewText: analysisutil.NodeBytes(pass.Fset, survivingArg), - }), - ) -} - -func xorNil(first, second ast.Expr) (ast.Expr, bool) { - a, b := isNil(first), isNil(second) - if xor(a, b) { - if a { - return second, true - } - return first, true - } - return nil, false -} diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/require_error.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/require_error.go deleted file mode 100644 index ab09dd447..000000000 --- a/vendor/github.com/Antonboom/testifylint/internal/checkers/require_error.go +++ /dev/null @@ -1,338 +0,0 @@ -package checkers - -import ( - "fmt" - "go/ast" - "go/token" - "regexp" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/ast/inspector" -) - -const requireErrorReport = "for error assertions use require" - -// RequireError detects error assertions like -// -// assert.Error(t, err) // s.Error(err), s.Assert().Error(err) -// assert.ErrorIs(t, err, io.EOF) -// assert.ErrorAs(t, err, &target) -// assert.EqualError(t, err, "end of file") -// assert.ErrorContains(t, err, "end of file") -// assert.NoError(t, err) -// assert.NotErrorIs(t, err, io.EOF) -// -// and requires -// -// require.Error(t, err) // s.Require().Error(err), s.Require().Error(err) -// require.ErrorIs(t, err, io.EOF) -// require.ErrorAs(t, err, &target) -// ... -// -// RequireError ignores: -// - assertion in the `if` condition; -// - the entire `if-else[-if]` block, if there is an assertion in any `if` condition; -// - the last assertion in the block, if there are no methods/functions calls after it; -// - assertions in an explicit goroutine; -// - assertions in an explicit testing cleanup function or suite teardown methods; -// - sequence of NoError assertions. -type RequireError struct { - fnPattern *regexp.Regexp -} - -// NewRequireError constructs RequireError checker. -func NewRequireError() *RequireError { return new(RequireError) } -func (RequireError) Name() string { return "require-error" } - -func (checker *RequireError) SetFnPattern(p *regexp.Regexp) *RequireError { - if p != nil { - checker.fnPattern = p - } - return checker -} - -func (checker RequireError) Check(pass *analysis.Pass, inspector *inspector.Inspector) []analysis.Diagnostic { - callsByFunc := make(map[funcID][]*callMeta) - - // Stage 1. Collect meta information about any calls inside functions. - - inspector.WithStack([]ast.Node{(*ast.CallExpr)(nil)}, func(node ast.Node, push bool, stack []ast.Node) bool { - if !push { - return false - } - if len(stack) < 3 { - return true - } - - fID := findSurroundingFunc(pass, stack) - if fID == nil { - return true - } - - _, prevIsIfStmt := stack[len(stack)-2].(*ast.IfStmt) - _, prevIsAssignStmt := stack[len(stack)-2].(*ast.AssignStmt) - _, prevPrevIsIfStmt := stack[len(stack)-3].(*ast.IfStmt) - inIfCond := prevIsIfStmt || (prevPrevIsIfStmt && prevIsAssignStmt) - - callExpr := node.(*ast.CallExpr) - testifyCall := NewCallMeta(pass, callExpr) - - call := &callMeta{ - call: callExpr, - testifyCall: testifyCall, - rootIf: findRootIf(stack), - parentIf: findNearestNode[*ast.IfStmt](stack), - parentBlock: findNearestNode[*ast.BlockStmt](stack), - inIfCond: inIfCond, - inNoErrorSeq: false, // Will be filled in below. - } - - callsByFunc[*fID] = append(callsByFunc[*fID], call) - return testifyCall == nil // Do not support asserts in asserts. - }) - - // Stage 2. Analyze calls and block context. - - var diagnostics []analysis.Diagnostic - - callsByBlock := map[*ast.BlockStmt][]*callMeta{} - for _, calls := range callsByFunc { - for _, c := range calls { - if b := c.parentBlock; b != nil { - callsByBlock[b] = append(callsByBlock[b], c) - } - } - } - - markCallsInNoErrorSequence(callsByBlock) - - for funcInfo, calls := range callsByFunc { - for i, c := range calls { - if funcInfo.isTestCleanup { - continue - } - if funcInfo.isGoroutine { - continue - } - - if c.testifyCall == nil { - continue - } - if !c.testifyCall.IsAssert { - continue - } - switch c.testifyCall.Fn.NameFTrimmed { - default: - continue - case "Error", "ErrorIs", "ErrorAs", "EqualError", "ErrorContains", "NoError", "NotErrorIs": - } - - if needToSkipBasedOnContext(c, i, calls, callsByBlock) { - continue - } - if p := checker.fnPattern; p != nil && !p.MatchString(c.testifyCall.Fn.Name) { - continue - } - - diagnostics = append(diagnostics, - *newDiagnostic(checker.Name(), c.testifyCall, requireErrorReport, nil)) - } - } - - return diagnostics -} - -func needToSkipBasedOnContext( - currCall *callMeta, - currCallIndex int, - otherCalls []*callMeta, - callsByBlock map[*ast.BlockStmt][]*callMeta, -) bool { - if currCall.inNoErrorSeq { - // Skip `assert.NoError` sequence. - return true - } - - if currCall.inIfCond { - // Skip assertions in the "if condition". - return true - } - - if currCall.rootIf != nil { - for _, rootCall := range otherCalls { - if (rootCall.rootIf == currCall.rootIf) && rootCall.inIfCond { - // Skip assertions in the entire if-else[-if] block, if some of "if condition" contains assertion. - return true - } - } - } - - block := currCall.parentBlock - blockCalls := callsByBlock[block] - isLastCallInBlock := blockCalls[len(blockCalls)-1] == currCall - - noCallsAfter := true - - _, blockEndWithReturn := block.List[len(block.List)-1].(*ast.ReturnStmt) - if !blockEndWithReturn { - for i := currCallIndex + 1; i < len(otherCalls); i++ { - nextCall := otherCalls[i] - nextCallInElseBlock := false - - if pIf := currCall.parentIf; pIf != nil && pIf.Else != nil { - ast.Inspect(pIf.Else, func(n ast.Node) bool { - if n == nextCall.call { - nextCallInElseBlock = true - return false - } - return true - }) - } - - if !nextCallInElseBlock { - noCallsAfter = false - break - } - } - } - - // Skip assertion if this is the last operation in the test. - return isLastCallInBlock && noCallsAfter -} - -func findSurroundingFunc(pass *analysis.Pass, stack []ast.Node) *funcID { - for i := len(stack) - 2; i >= 0; i-- { - var fType *ast.FuncType - var fName string - var isTestCleanup bool - var isGoroutine bool - - switch fd := stack[i].(type) { - case *ast.FuncDecl: - fType, fName = fd.Type, fd.Name.Name - - if isTestifySuiteMethod(pass, fd) { - if ident := fd.Name; ident != nil && isAfterTestMethod(ident.Name) { - isTestCleanup = true - } - } - - case *ast.FuncLit: - fType, fName = fd.Type, "anonymous" - - if i >= 2 { //nolint:nestif - if ce, ok := stack[i-1].(*ast.CallExpr); ok { - if se, ok := ce.Fun.(*ast.SelectorExpr); ok { - isTestCleanup = isTestingTPtr(pass, se.X) && se.Sel != nil && (se.Sel.Name == "Cleanup") - } - - if _, ok := stack[i-2].(*ast.GoStmt); ok { - isGoroutine = true - } - } - } - - default: - continue - } - - return &funcID{ - pos: fType.Pos(), - posStr: pass.Fset.Position(fType.Pos()).String(), - name: fName, - isTestCleanup: isTestCleanup, - isGoroutine: isGoroutine, - } - } - return nil -} - -func findRootIf(stack []ast.Node) *ast.IfStmt { - nearestIf, i := findNearestNodeWithIdx[*ast.IfStmt](stack) - for ; i > 0; i-- { - parent, ok := stack[i-1].(*ast.IfStmt) - if ok { - nearestIf = parent - } else { - break - } - } - return nearestIf -} - -func findNearestNode[T ast.Node](stack []ast.Node) (v T) { - v, _ = findNearestNodeWithIdx[T](stack) - return -} - -func findNearestNodeWithIdx[T ast.Node](stack []ast.Node) (v T, index int) { - for i := len(stack) - 2; i >= 0; i-- { - if n, ok := stack[i].(T); ok { - return n, i - } - } - return -} - -func markCallsInNoErrorSequence(callsByBlock map[*ast.BlockStmt][]*callMeta) { - for _, calls := range callsByBlock { - for i, c := range calls { - if c.testifyCall == nil { - continue - } - - var prevIsNoError bool - if i > 0 { - if prev := calls[i-1].testifyCall; prev != nil { - prevIsNoError = isNoErrorAssertion(prev.Fn.Name) - } - } - - var nextIsNoError bool - if i < len(calls)-1 { - if next := calls[i+1].testifyCall; next != nil { - nextIsNoError = isNoErrorAssertion(next.Fn.Name) - } - } - - if isNoErrorAssertion(c.testifyCall.Fn.Name) && (prevIsNoError || nextIsNoError) { - calls[i].inNoErrorSeq = true - } - } - } -} - -type callMeta struct { - call *ast.CallExpr - testifyCall *CallMeta - rootIf *ast.IfStmt // The root `if` in if-else[-if] chain. - parentIf *ast.IfStmt // The nearest `if`, can be equal with rootIf. - parentBlock *ast.BlockStmt - inIfCond bool // True for code like `if assert.ErrorAs(t, err, &target) {`. - inNoErrorSeq bool // True for sequence of `assert.NoError` assertions. -} - -type funcID struct { - pos token.Pos - posStr string - name string - isTestCleanup bool - isGoroutine bool -} - -func (id funcID) String() string { - return fmt.Sprintf("%s at %s", id.name, id.posStr) -} - -func isAfterTestMethod(name string) bool { - // https://github.com/stretchr/testify/blob/master/suite/interfaces.go - switch name { - case "TearDownSuite", "TearDownTest", "AfterTest", "HandleStats", "TearDownSubTest": - return true - } - return false -} - -func isNoErrorAssertion(fnName string) bool { - return (fnName == "NoError") || (fnName == "NoErrorf") -} diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/suite_dont_use_pkg.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/suite_dont_use_pkg.go deleted file mode 100644 index bf84f6378..000000000 --- a/vendor/github.com/Antonboom/testifylint/internal/checkers/suite_dont_use_pkg.go +++ /dev/null @@ -1,96 +0,0 @@ -package checkers - -import ( - "fmt" - "go/ast" - "go/types" - - "golang.org/x/tools/go/analysis" - - "github.com/Antonboom/testifylint/internal/analysisutil" - "github.com/Antonboom/testifylint/internal/testify" -) - -// SuiteDontUsePkg detects situation like -// -// func (s *MySuite) TestSomething() { -// assert.Equal(s.T(), 42, value) -// } -// -// and requires -// -// func (s *MySuite) TestSomething() { -// s.Equal(42, value) -// } -type SuiteDontUsePkg struct{} - -// NewSuiteDontUsePkg constructs SuiteDontUsePkg checker. -func NewSuiteDontUsePkg() SuiteDontUsePkg { return SuiteDontUsePkg{} } -func (SuiteDontUsePkg) Name() string { return "suite-dont-use-pkg" } - -func (checker SuiteDontUsePkg) Check(pass *analysis.Pass, call *CallMeta) *analysis.Diagnostic { - if !call.IsPkg { - return nil - } - - args := call.ArgsRaw - if len(args) < 2 { - return nil - } - t := args[0] - - ce, ok := t.(*ast.CallExpr) - if !ok { - return nil - } - se, ok := ce.Fun.(*ast.SelectorExpr) - if !ok { - return nil - } - if se.X == nil || !implementsTestifySuiteIface(pass, se.X) { - return nil - } - if se.Sel == nil || se.Sel.Name != "T" { - return nil - } - rcv, ok := se.X.(*ast.Ident) // At this point we ensure that `s.T()` is used as the first argument of assertion. - if !ok { - return nil - } - - newSelector := rcv.Name - if !call.IsAssert { - newSelector += "." + "Require()" - } - - msg := fmt.Sprintf("use %s.%s", newSelector, call.Fn.Name) - return newDiagnostic(checker.Name(), call, msg, &analysis.SuggestedFix{ - Message: fmt.Sprintf("Replace `%s` with `%s`", call.SelectorXStr, newSelector), - TextEdits: []analysis.TextEdit{ - // Replace package function with suite method. - { - Pos: call.Selector.X.Pos(), - End: call.Selector.X.End(), - NewText: []byte(newSelector), - }, - // Remove `s.T()`. - { - Pos: t.Pos(), - End: args[1].Pos(), - NewText: []byte(""), - }, - }, - }) -} - -func implementsTestifySuiteIface(pass *analysis.Pass, rcv ast.Expr) bool { - suiteIface := analysisutil.ObjectOf(pass.Pkg, testify.SuitePkgPath, "TestingSuite") - if suiteIface == nil { - return false - } - - return types.Implements( - pass.TypesInfo.TypeOf(rcv), - suiteIface.Type().Underlying().(*types.Interface), - ) -} diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/suite_extra_assert_call.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/suite_extra_assert_call.go deleted file mode 100644 index 791488b65..000000000 --- a/vendor/github.com/Antonboom/testifylint/internal/checkers/suite_extra_assert_call.go +++ /dev/null @@ -1,99 +0,0 @@ -package checkers - -import ( - "fmt" - "go/ast" - - "golang.org/x/tools/go/analysis" - - "github.com/Antonboom/testifylint/internal/analysisutil" -) - -// SuiteExtraAssertCallMode reflects different modes of work of SuiteExtraAssertCall checker. -type SuiteExtraAssertCallMode int - -const ( - SuiteExtraAssertCallModeRemove SuiteExtraAssertCallMode = iota - SuiteExtraAssertCallModeRequire -) - -const DefaultSuiteExtraAssertCallMode = SuiteExtraAssertCallModeRemove - -// SuiteExtraAssertCall detects situation like -// -// func (s *MySuite) TestSomething() { -// s.Assert().Equal(42, value) -// } -// -// and requires -// -// func (s *MySuite) TestSomething() { -// s.Equal(42, value) -// } -// -// or vice versa (depending on the configurable mode). -type SuiteExtraAssertCall struct { - mode SuiteExtraAssertCallMode -} - -// NewSuiteExtraAssertCall constructs SuiteExtraAssertCall checker. -func NewSuiteExtraAssertCall() *SuiteExtraAssertCall { - return &SuiteExtraAssertCall{mode: DefaultSuiteExtraAssertCallMode} -} - -func (SuiteExtraAssertCall) Name() string { return "suite-extra-assert-call" } - -func (checker *SuiteExtraAssertCall) SetMode(m SuiteExtraAssertCallMode) *SuiteExtraAssertCall { - checker.mode = m - return checker -} - -func (checker SuiteExtraAssertCall) Check(pass *analysis.Pass, call *CallMeta) *analysis.Diagnostic { - if call.IsPkg { - return nil - } - - switch checker.mode { - case SuiteExtraAssertCallModeRequire: - x, ok := call.Selector.X.(*ast.Ident) // s.True - if !ok || x == nil || !implementsTestifySuiteIface(pass, x) { - return nil - } - - msg := fmt.Sprintf("use an explicit %s.Assert().%s", analysisutil.NodeString(pass.Fset, x), call.Fn.Name) - return newDiagnostic(checker.Name(), call, msg, &analysis.SuggestedFix{ - Message: "Add `Assert()` call", - TextEdits: []analysis.TextEdit{{ - Pos: x.End(), - End: x.End(), // Pure insertion. - NewText: []byte(".Assert()"), - }}, - }) - - case SuiteExtraAssertCallModeRemove: - x, ok := call.Selector.X.(*ast.CallExpr) // s.Assert().True - if !ok { - return nil - } - - se, ok := x.Fun.(*ast.SelectorExpr) - if !ok || se == nil || !implementsTestifySuiteIface(pass, se.X) { - return nil - } - if se.Sel == nil || se.Sel.Name != "Assert" { - return nil - } - - msg := fmt.Sprintf("need to simplify the assertion to %s.%s", analysisutil.NodeString(pass.Fset, se.X), call.Fn.Name) - return newDiagnostic(checker.Name(), call, msg, &analysis.SuggestedFix{ - Message: "Remove `Assert()` call", - TextEdits: []analysis.TextEdit{{ - Pos: se.Sel.Pos(), - End: x.End() + 1, // +1 for dot. - NewText: []byte(""), - }}, - }) - } - - return nil -} diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/suite_thelper.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/suite_thelper.go deleted file mode 100644 index 5cadc93ad..000000000 --- a/vendor/github.com/Antonboom/testifylint/internal/checkers/suite_thelper.go +++ /dev/null @@ -1,130 +0,0 @@ -package checkers - -import ( - "fmt" - "go/ast" - "strings" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/ast/inspector" - - "github.com/Antonboom/testifylint/internal/analysisutil" - "github.com/Antonboom/testifylint/internal/testify" -) - -// SuiteTHelper requires t.Helper() call in suite helpers: -// -// func (s *RoomSuite) assertRoomRound(roundID RoundID) { -// s.T().Helper() -// s.Equal(roundID, s.getRoom().CurrentRound.ID) -// } -type SuiteTHelper struct{} - -// NewSuiteTHelper constructs SuiteTHelper checker. -func NewSuiteTHelper() SuiteTHelper { return SuiteTHelper{} } -func (SuiteTHelper) Name() string { return "suite-thelper" } - -func (checker SuiteTHelper) Check(pass *analysis.Pass, inspector *inspector.Inspector) (diagnostics []analysis.Diagnostic) { - inspector.Preorder([]ast.Node{(*ast.FuncDecl)(nil)}, func(node ast.Node) { - fd := node.(*ast.FuncDecl) - if !isTestifySuiteMethod(pass, fd) { - return - } - - if ident := fd.Name; ident == nil || isTestMethod(ident.Name) || isServiceMethod(ident.Name) { - return - } - - if !containsSuiteAssertions(pass, fd) { - return - } - - rcv := fd.Recv.List[0] - if len(rcv.Names) != 1 || rcv.Names[0] == nil { - return - } - rcvName := rcv.Names[0].Name - - helperCallStr := fmt.Sprintf("%s.T().Helper()", rcvName) - - firstStmt := fd.Body.List[0] - if analysisutil.NodeString(pass.Fset, firstStmt) == helperCallStr { - return - } - - msg := fmt.Sprintf("suite helper method must start with " + helperCallStr) - d := newDiagnostic(checker.Name(), fd, msg, &analysis.SuggestedFix{ - Message: fmt.Sprintf("Insert `%s`", helperCallStr), - TextEdits: []analysis.TextEdit{ - { - Pos: firstStmt.Pos(), - End: firstStmt.Pos(), // Pure insertion. - NewText: []byte(helperCallStr + "\n\n"), - }, - }, - }) - diagnostics = append(diagnostics, *d) - }) - return diagnostics -} - -func isTestifySuiteMethod(pass *analysis.Pass, fDecl *ast.FuncDecl) bool { - if fDecl.Recv == nil || len(fDecl.Recv.List) != 1 { - return false - } - - rcv := fDecl.Recv.List[0] - return implementsTestifySuiteIface(pass, rcv.Type) -} - -func isTestMethod(name string) bool { - return strings.HasPrefix(name, "Test") -} - -func isServiceMethod(name string) bool { - // https://github.com/stretchr/testify/blob/master/suite/interfaces.go - switch name { - case "T", "SetT", "SetS", "SetupSuite", "SetupTest", "TearDownSuite", "TearDownTest", - "BeforeTest", "AfterTest", "HandleStats", "SetupSubTest", "TearDownSubTest": - return true - } - return false -} - -func containsSuiteAssertions(pass *analysis.Pass, fn *ast.FuncDecl) bool { - if fn.Body == nil { - return false - } - - for _, s := range fn.Body.List { - if isSuiteAssertion(pass, s) { - return true - } - } - return false -} - -func isSuiteAssertion(pass *analysis.Pass, stmt ast.Stmt) bool { - expr, ok := stmt.(*ast.ExprStmt) - if !ok { - return false - } - - ce, ok := expr.X.(*ast.CallExpr) - if !ok { - return false - } - - se, ok := ce.Fun.(*ast.SelectorExpr) - if !ok || se.Sel == nil { - return false - } - - if sel, ok := pass.TypesInfo.Selections[se]; ok { - pkg := sel.Obj().Pkg() - isAssert := analysisutil.IsPkg(pkg, testify.AssertPkgName, testify.AssertPkgPath) - isRequire := analysisutil.IsPkg(pkg, testify.RequirePkgName, testify.RequirePkgPath) - return isAssert || isRequire - } - return false -} diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/useless_assert.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/useless_assert.go deleted file mode 100644 index 669f9d187..000000000 --- a/vendor/github.com/Antonboom/testifylint/internal/checkers/useless_assert.go +++ /dev/null @@ -1,71 +0,0 @@ -package checkers - -import ( - "golang.org/x/tools/go/analysis" - - "github.com/Antonboom/testifylint/internal/analysisutil" -) - -// UselessAssert detects useless asserts like -// -// 1) Asserting of the same variable -// -// assert.Equal(t, tt.value, tt.value) -// assert.ElementsMatch(t, users, users) -// ... -// -// 2) Open for contribution... -type UselessAssert struct{} - -// NewUselessAssert constructs UselessAssert checker. -func NewUselessAssert() UselessAssert { return UselessAssert{} } -func (UselessAssert) Name() string { return "useless-assert" } - -func (checker UselessAssert) Check(pass *analysis.Pass, call *CallMeta) *analysis.Diagnostic { - switch call.Fn.NameFTrimmed { - case - "Contains", - "ElementsMatch", - "Equal", - "EqualExportedValues", - "EqualValues", - "ErrorAs", - "ErrorIs", - "Exactly", - "Greater", - "GreaterOrEqual", - "Implements", - "InDelta", - "InDeltaMapValues", - "InDeltaSlice", - "InEpsilon", - "InEpsilonSlice", - "IsType", - "JSONEq", - "Less", - "LessOrEqual", - "NotEqual", - "NotEqualValues", - "NotErrorIs", - "NotRegexp", - "NotSame", - "NotSubset", - "Regexp", - "Same", - "Subset", - "WithinDuration", - "YAMLEq": - default: - return nil - } - - if len(call.Args) < 2 { - return nil - } - first, second := call.Args[0], call.Args[1] - - if analysisutil.NodeString(pass.Fset, first) == analysisutil.NodeString(pass.Fset, second) { - return newDiagnostic(checker.Name(), call, "asserting of the same variable", nil) - } - return nil -} diff --git a/vendor/github.com/Antonboom/testifylint/internal/config/config.go b/vendor/github.com/Antonboom/testifylint/internal/config/config.go deleted file mode 100644 index 7eba0ea32..000000000 --- a/vendor/github.com/Antonboom/testifylint/internal/config/config.go +++ /dev/null @@ -1,111 +0,0 @@ -package config - -import ( - "errors" - "flag" - "fmt" - - "github.com/Antonboom/testifylint/internal/checkers" -) - -// NewDefault builds default testifylint config. -func NewDefault() Config { - return Config{ - EnableAll: false, - DisabledCheckers: nil, - DisableAll: false, - EnabledCheckers: nil, - ExpectedActual: ExpectedActualConfig{ - ExpVarPattern: RegexpValue{checkers.DefaultExpectedVarPattern}, - }, - RequireError: RequireErrorConfig{ - FnPattern: RegexpValue{nil}, - }, - SuiteExtraAssertCall: SuiteExtraAssertCallConfig{ - Mode: checkers.DefaultSuiteExtraAssertCallMode, - }, - } -} - -// Config implements testifylint configuration. -type Config struct { - EnableAll bool - DisabledCheckers KnownCheckersValue - DisableAll bool - EnabledCheckers KnownCheckersValue - - BoolCompare BoolCompareConfig - ExpectedActual ExpectedActualConfig - RequireError RequireErrorConfig - SuiteExtraAssertCall SuiteExtraAssertCallConfig -} - -// BoolCompareConfig implements configuration of checkers.BoolCompare. -type BoolCompareConfig struct { - IgnoreCustomTypes bool -} - -// ExpectedActualConfig implements configuration of checkers.ExpectedActual. -type ExpectedActualConfig struct { - ExpVarPattern RegexpValue -} - -// RequireErrorConfig implements configuration of checkers.RequireError. -type RequireErrorConfig struct { - FnPattern RegexpValue -} - -// SuiteExtraAssertCallConfig implements configuration of checkers.SuiteExtraAssertCall. -type SuiteExtraAssertCallConfig struct { - Mode checkers.SuiteExtraAssertCallMode -} - -func (cfg Config) Validate() error { - if cfg.EnableAll { - if cfg.DisableAll { - return errors.New("enable-all and disable-all options must not be combined") - } - - if len(cfg.EnabledCheckers) != 0 { - return errors.New("enable-all and enable options must not be combined") - } - } - - if cfg.DisableAll { - if len(cfg.DisabledCheckers) != 0 { - return errors.New("disable-all and disable options must not be combined") - } - - if len(cfg.EnabledCheckers) == 0 { - return errors.New("all checkers were disabled, but no one checker was enabled: at least one must be enabled") - } - } - - for _, checker := range cfg.DisabledCheckers { - if cfg.EnabledCheckers.Contains(checker) { - return fmt.Errorf("checker %q disabled and enabled at one moment", checker) - } - } - - return nil -} - -// BindToFlags binds Config fields to according flags. -func BindToFlags(cfg *Config, fs *flag.FlagSet) { - fs.BoolVar(&cfg.EnableAll, "enable-all", false, "enable all checkers") - fs.Var(&cfg.DisabledCheckers, "disable", "comma separated list of disabled checkers (to exclude from enabled by default)") - fs.BoolVar(&cfg.DisableAll, "disable-all", false, "disable all checkers") - fs.Var(&cfg.EnabledCheckers, "enable", "comma separated list of enabled checkers (in addition to enabled by default)") - - fs.BoolVar(&cfg.BoolCompare.IgnoreCustomTypes, "bool-compare.ignore-custom-types", false, - "ignore user defined types (over builtin bool)") - fs.Var(&cfg.ExpectedActual.ExpVarPattern, "expected-actual.pattern", "regexp for expected variable name") - fs.Var(&cfg.RequireError.FnPattern, "require-error.fn-pattern", "regexp for error assertions that should only be analyzed") - fs.Var(NewEnumValue(suiteExtraAssertCallModeAsString, &cfg.SuiteExtraAssertCall.Mode), - "suite-extra-assert-call.mode", "to require or remove extra Assert() call") -} - -var suiteExtraAssertCallModeAsString = map[string]checkers.SuiteExtraAssertCallMode{ - "remove": checkers.SuiteExtraAssertCallModeRemove, - "require": checkers.SuiteExtraAssertCallModeRequire, -} diff --git a/vendor/github.com/Antonboom/testifylint/internal/config/flag_value_types.go b/vendor/github.com/Antonboom/testifylint/internal/config/flag_value_types.go deleted file mode 100644 index 5b08ec47b..000000000 --- a/vendor/github.com/Antonboom/testifylint/internal/config/flag_value_types.go +++ /dev/null @@ -1,114 +0,0 @@ -package config - -import ( - "flag" - "fmt" - "regexp" - "sort" - "strings" - - "github.com/Antonboom/testifylint/internal/checkers" -) - -var ( - _ flag.Value = (*KnownCheckersValue)(nil) - _ flag.Value = (*RegexpValue)(nil) - _ flag.Value = (*EnumValue[checkers.SuiteExtraAssertCallMode])(nil) -) - -// KnownCheckersValue implements comma separated list of testify checkers. -type KnownCheckersValue []string - -func (kcv KnownCheckersValue) String() string { - return strings.Join(kcv, ",") -} - -func (kcv *KnownCheckersValue) Set(v string) error { - chckrs := strings.Split(v, ",") - for _, checkerName := range chckrs { - if ok := checkers.IsKnown(checkerName); !ok { - return fmt.Errorf("unknown checker %q", checkerName) - } - } - - *kcv = chckrs - return nil -} - -func (kcv KnownCheckersValue) Contains(v string) bool { - for _, checker := range kcv { - if checker == v { - return true - } - } - return false -} - -// RegexpValue is a special wrapper for support of flag.FlagSet over regexp.Regexp. -// Original regexp is available through RegexpValue.Regexp. -type RegexpValue struct { - *regexp.Regexp -} - -func (rv RegexpValue) String() string { - if rv.Regexp == nil { - return "" - } - return rv.Regexp.String() -} - -func (rv *RegexpValue) Set(v string) error { - compiled, err := regexp.Compile(v) - if err != nil { - return err - } - - rv.Regexp = compiled - return nil -} - -// EnumValue is a special type for support of flag.FlagSet over user-defined constants. -type EnumValue[EnumT comparable] struct { - mapping map[string]EnumT - keys []string - dst *EnumT -} - -// NewEnumValue takes the "enum-value-name to enum-value" mapping and a destination for the value passed through the CLI. -// Returns an EnumValue instance suitable for flag.FlagSet.Var. -func NewEnumValue[EnumT comparable](mapping map[string]EnumT, dst *EnumT) *EnumValue[EnumT] { - keys := make([]string, 0, len(mapping)) - for k := range mapping { - keys = append(keys, k) - } - sort.Strings(keys) - - return &EnumValue[EnumT]{ - mapping: mapping, - keys: keys, - dst: dst, - } -} - -func (e EnumValue[EnumT]) String() string { - if e.dst == nil { - return "" - } - - for k, v := range e.mapping { - if v == *e.dst { - return k - } - } - return "" -} - -func (e *EnumValue[EnumT]) Set(s string) error { - v, ok := e.mapping[s] - if !ok { - return fmt.Errorf("use one of (%v)", strings.Join(e.keys, " | ")) - } - - *e.dst = v - return nil -} diff --git a/vendor/github.com/Antonboom/testifylint/internal/testify/const.go b/vendor/github.com/Antonboom/testifylint/internal/testify/const.go deleted file mode 100644 index 3476e4040..000000000 --- a/vendor/github.com/Antonboom/testifylint/internal/testify/const.go +++ /dev/null @@ -1,17 +0,0 @@ -package testify - -const ( - ModulePath = "github.com/stretchr/testify" - - AssertPkgName = "assert" - HTTPPkgName = "http" - MockPkgName = "mock" - RequirePkgName = "require" - SuitePkgName = "suite" - - AssertPkgPath = ModulePath + "/" + AssertPkgName - HTTPPkgPath = ModulePath + "/" + HTTPPkgName - MockPkgPath = ModulePath + "/" + MockPkgName - RequirePkgPath = ModulePath + "/" + RequirePkgName - SuitePkgPath = ModulePath + "/" + SuitePkgName -) diff --git a/vendor/github.com/BurntSushi/toml/.gitignore b/vendor/github.com/BurntSushi/toml/.gitignore deleted file mode 100644 index fe79e3add..000000000 --- a/vendor/github.com/BurntSushi/toml/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -/toml.test -/toml-test diff --git a/vendor/github.com/BurntSushi/toml/COPYING b/vendor/github.com/BurntSushi/toml/COPYING deleted file mode 100644 index 01b574320..000000000 --- a/vendor/github.com/BurntSushi/toml/COPYING +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2013 TOML authors - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/vendor/github.com/BurntSushi/toml/README.md b/vendor/github.com/BurntSushi/toml/README.md deleted file mode 100644 index 3651cfa96..000000000 --- a/vendor/github.com/BurntSushi/toml/README.md +++ /dev/null @@ -1,120 +0,0 @@ -TOML stands for Tom's Obvious, Minimal Language. This Go package provides a -reflection interface similar to Go's standard library `json` and `xml` packages. - -Compatible with TOML version [v1.0.0](https://toml.io/en/v1.0.0). - -Documentation: https://godocs.io/github.com/BurntSushi/toml - -See the [releases page](https://github.com/BurntSushi/toml/releases) for a -changelog; this information is also in the git tag annotations (e.g. `git show -v0.4.0`). - -This library requires Go 1.13 or newer; add it to your go.mod with: - - % go get github.com/BurntSushi/toml@latest - -It also comes with a TOML validator CLI tool: - - % go install github.com/BurntSushi/toml/cmd/tomlv@latest - % tomlv some-toml-file.toml - -### Examples -For the simplest example, consider some TOML file as just a list of keys and -values: - -```toml -Age = 25 -Cats = [ "Cauchy", "Plato" ] -Pi = 3.14 -Perfection = [ 6, 28, 496, 8128 ] -DOB = 1987-07-05T05:45:00Z -``` - -Which can be decoded with: - -```go -type Config struct { - Age int - Cats []string - Pi float64 - Perfection []int - DOB time.Time -} - -var conf Config -_, err := toml.Decode(tomlData, &conf) -``` - -You can also use struct tags if your struct field name doesn't map to a TOML key -value directly: - -```toml -some_key_NAME = "wat" -``` - -```go -type TOML struct { - ObscureKey string `toml:"some_key_NAME"` -} -``` - -Beware that like other decoders **only exported fields** are considered when -encoding and decoding; private fields are silently ignored. - -### Using the `Marshaler` and `encoding.TextUnmarshaler` interfaces -Here's an example that automatically parses values in a `mail.Address`: - -```toml -contacts = [ - "Donald Duck ", - "Scrooge McDuck ", -] -``` - -Can be decoded with: - -```go -// Create address type which satisfies the encoding.TextUnmarshaler interface. -type address struct { - *mail.Address -} - -func (a *address) UnmarshalText(text []byte) error { - var err error - a.Address, err = mail.ParseAddress(string(text)) - return err -} - -// Decode it. -func decode() { - blob := ` - contacts = [ - "Donald Duck ", - "Scrooge McDuck ", - ] - ` - - var contacts struct { - Contacts []address - } - - _, err := toml.Decode(blob, &contacts) - if err != nil { - log.Fatal(err) - } - - for _, c := range contacts.Contacts { - fmt.Printf("%#v\n", c.Address) - } - - // Output: - // &mail.Address{Name:"Donald Duck", Address:"donald@duckburg.com"} - // &mail.Address{Name:"Scrooge McDuck", Address:"scrooge@duckburg.com"} -} -``` - -To target TOML specifically you can implement `UnmarshalTOML` TOML interface in -a similar way. - -### More complex usage -See the [`_example/`](/_example) directory for a more complex example. diff --git a/vendor/github.com/BurntSushi/toml/decode.go b/vendor/github.com/BurntSushi/toml/decode.go deleted file mode 100644 index 4d38f3bfc..000000000 --- a/vendor/github.com/BurntSushi/toml/decode.go +++ /dev/null @@ -1,602 +0,0 @@ -package toml - -import ( - "bytes" - "encoding" - "encoding/json" - "fmt" - "io" - "io/ioutil" - "math" - "os" - "reflect" - "strconv" - "strings" - "time" -) - -// Unmarshaler is the interface implemented by objects that can unmarshal a -// TOML description of themselves. -type Unmarshaler interface { - UnmarshalTOML(interface{}) error -} - -// Unmarshal decodes the contents of data in TOML format into a pointer v. -// -// See [Decoder] for a description of the decoding process. -func Unmarshal(data []byte, v interface{}) error { - _, err := NewDecoder(bytes.NewReader(data)).Decode(v) - return err -} - -// Decode the TOML data in to the pointer v. -// -// See [Decoder] for a description of the decoding process. -func Decode(data string, v interface{}) (MetaData, error) { - return NewDecoder(strings.NewReader(data)).Decode(v) -} - -// DecodeFile reads the contents of a file and decodes it with [Decode]. -func DecodeFile(path string, v interface{}) (MetaData, error) { - fp, err := os.Open(path) - if err != nil { - return MetaData{}, err - } - defer fp.Close() - return NewDecoder(fp).Decode(v) -} - -// Primitive is a TOML value that hasn't been decoded into a Go value. -// -// This type can be used for any value, which will cause decoding to be delayed. -// You can use [PrimitiveDecode] to "manually" decode these values. -// -// NOTE: The underlying representation of a `Primitive` value is subject to -// change. Do not rely on it. -// -// NOTE: Primitive values are still parsed, so using them will only avoid the -// overhead of reflection. They can be useful when you don't know the exact type -// of TOML data until runtime. -type Primitive struct { - undecoded interface{} - context Key -} - -// The significand precision for float32 and float64 is 24 and 53 bits; this is -// the range a natural number can be stored in a float without loss of data. -const ( - maxSafeFloat32Int = 16777215 // 2^24-1 - maxSafeFloat64Int = int64(9007199254740991) // 2^53-1 -) - -// Decoder decodes TOML data. -// -// TOML tables correspond to Go structs or maps; they can be used -// interchangeably, but structs offer better type safety. -// -// TOML table arrays correspond to either a slice of structs or a slice of maps. -// -// TOML datetimes correspond to [time.Time]. Local datetimes are parsed in the -// local timezone. -// -// [time.Duration] types are treated as nanoseconds if the TOML value is an -// integer, or they're parsed with time.ParseDuration() if they're strings. -// -// All other TOML types (float, string, int, bool and array) correspond to the -// obvious Go types. -// -// An exception to the above rules is if a type implements the TextUnmarshaler -// interface, in which case any primitive TOML value (floats, strings, integers, -// booleans, datetimes) will be converted to a []byte and given to the value's -// UnmarshalText method. See the Unmarshaler example for a demonstration with -// email addresses. -// -// # Key mapping -// -// TOML keys can map to either keys in a Go map or field names in a Go struct. -// The special `toml` struct tag can be used to map TOML keys to struct fields -// that don't match the key name exactly (see the example). A case insensitive -// match to struct names will be tried if an exact match can't be found. -// -// The mapping between TOML values and Go values is loose. That is, there may -// exist TOML values that cannot be placed into your representation, and there -// may be parts of your representation that do not correspond to TOML values. -// This loose mapping can be made stricter by using the IsDefined and/or -// Undecoded methods on the MetaData returned. -// -// This decoder does not handle cyclic types. Decode will not terminate if a -// cyclic type is passed. -type Decoder struct { - r io.Reader -} - -// NewDecoder creates a new Decoder. -func NewDecoder(r io.Reader) *Decoder { - return &Decoder{r: r} -} - -var ( - unmarshalToml = reflect.TypeOf((*Unmarshaler)(nil)).Elem() - unmarshalText = reflect.TypeOf((*encoding.TextUnmarshaler)(nil)).Elem() - primitiveType = reflect.TypeOf((*Primitive)(nil)).Elem() -) - -// Decode TOML data in to the pointer `v`. -func (dec *Decoder) Decode(v interface{}) (MetaData, error) { - rv := reflect.ValueOf(v) - if rv.Kind() != reflect.Ptr { - s := "%q" - if reflect.TypeOf(v) == nil { - s = "%v" - } - - return MetaData{}, fmt.Errorf("toml: cannot decode to non-pointer "+s, reflect.TypeOf(v)) - } - if rv.IsNil() { - return MetaData{}, fmt.Errorf("toml: cannot decode to nil value of %q", reflect.TypeOf(v)) - } - - // Check if this is a supported type: struct, map, interface{}, or something - // that implements UnmarshalTOML or UnmarshalText. - rv = indirect(rv) - rt := rv.Type() - if rv.Kind() != reflect.Struct && rv.Kind() != reflect.Map && - !(rv.Kind() == reflect.Interface && rv.NumMethod() == 0) && - !rt.Implements(unmarshalToml) && !rt.Implements(unmarshalText) { - return MetaData{}, fmt.Errorf("toml: cannot decode to type %s", rt) - } - - // TODO: parser should read from io.Reader? Or at the very least, make it - // read from []byte rather than string - data, err := ioutil.ReadAll(dec.r) - if err != nil { - return MetaData{}, err - } - - p, err := parse(string(data)) - if err != nil { - return MetaData{}, err - } - - md := MetaData{ - mapping: p.mapping, - keyInfo: p.keyInfo, - keys: p.ordered, - decoded: make(map[string]struct{}, len(p.ordered)), - context: nil, - data: data, - } - return md, md.unify(p.mapping, rv) -} - -// PrimitiveDecode is just like the other Decode* functions, except it decodes a -// TOML value that has already been parsed. Valid primitive values can *only* be -// obtained from values filled by the decoder functions, including this method. -// (i.e., v may contain more [Primitive] values.) -// -// Meta data for primitive values is included in the meta data returned by the -// Decode* functions with one exception: keys returned by the Undecoded method -// will only reflect keys that were decoded. Namely, any keys hidden behind a -// Primitive will be considered undecoded. Executing this method will update the -// undecoded keys in the meta data. (See the example.) -func (md *MetaData) PrimitiveDecode(primValue Primitive, v interface{}) error { - md.context = primValue.context - defer func() { md.context = nil }() - return md.unify(primValue.undecoded, rvalue(v)) -} - -// unify performs a sort of type unification based on the structure of `rv`, -// which is the client representation. -// -// Any type mismatch produces an error. Finding a type that we don't know -// how to handle produces an unsupported type error. -func (md *MetaData) unify(data interface{}, rv reflect.Value) error { - // Special case. Look for a `Primitive` value. - // TODO: #76 would make this superfluous after implemented. - if rv.Type() == primitiveType { - // Save the undecoded data and the key context into the primitive - // value. - context := make(Key, len(md.context)) - copy(context, md.context) - rv.Set(reflect.ValueOf(Primitive{ - undecoded: data, - context: context, - })) - return nil - } - - rvi := rv.Interface() - if v, ok := rvi.(Unmarshaler); ok { - return v.UnmarshalTOML(data) - } - if v, ok := rvi.(encoding.TextUnmarshaler); ok { - return md.unifyText(data, v) - } - - // TODO: - // The behavior here is incorrect whenever a Go type satisfies the - // encoding.TextUnmarshaler interface but also corresponds to a TOML hash or - // array. In particular, the unmarshaler should only be applied to primitive - // TOML values. But at this point, it will be applied to all kinds of values - // and produce an incorrect error whenever those values are hashes or arrays - // (including arrays of tables). - - k := rv.Kind() - - if k >= reflect.Int && k <= reflect.Uint64 { - return md.unifyInt(data, rv) - } - switch k { - case reflect.Ptr: - elem := reflect.New(rv.Type().Elem()) - err := md.unify(data, reflect.Indirect(elem)) - if err != nil { - return err - } - rv.Set(elem) - return nil - case reflect.Struct: - return md.unifyStruct(data, rv) - case reflect.Map: - return md.unifyMap(data, rv) - case reflect.Array: - return md.unifyArray(data, rv) - case reflect.Slice: - return md.unifySlice(data, rv) - case reflect.String: - return md.unifyString(data, rv) - case reflect.Bool: - return md.unifyBool(data, rv) - case reflect.Interface: - if rv.NumMethod() > 0 { /// Only empty interfaces are supported. - return md.e("unsupported type %s", rv.Type()) - } - return md.unifyAnything(data, rv) - case reflect.Float32, reflect.Float64: - return md.unifyFloat64(data, rv) - } - return md.e("unsupported type %s", rv.Kind()) -} - -func (md *MetaData) unifyStruct(mapping interface{}, rv reflect.Value) error { - tmap, ok := mapping.(map[string]interface{}) - if !ok { - if mapping == nil { - return nil - } - return md.e("type mismatch for %s: expected table but found %T", - rv.Type().String(), mapping) - } - - for key, datum := range tmap { - var f *field - fields := cachedTypeFields(rv.Type()) - for i := range fields { - ff := &fields[i] - if ff.name == key { - f = ff - break - } - if f == nil && strings.EqualFold(ff.name, key) { - f = ff - } - } - if f != nil { - subv := rv - for _, i := range f.index { - subv = indirect(subv.Field(i)) - } - - if isUnifiable(subv) { - md.decoded[md.context.add(key).String()] = struct{}{} - md.context = append(md.context, key) - - err := md.unify(datum, subv) - if err != nil { - return err - } - md.context = md.context[0 : len(md.context)-1] - } else if f.name != "" { - return md.e("cannot write unexported field %s.%s", rv.Type().String(), f.name) - } - } - } - return nil -} - -func (md *MetaData) unifyMap(mapping interface{}, rv reflect.Value) error { - keyType := rv.Type().Key().Kind() - if keyType != reflect.String && keyType != reflect.Interface { - return fmt.Errorf("toml: cannot decode to a map with non-string key type (%s in %q)", - keyType, rv.Type()) - } - - tmap, ok := mapping.(map[string]interface{}) - if !ok { - if tmap == nil { - return nil - } - return md.badtype("map", mapping) - } - if rv.IsNil() { - rv.Set(reflect.MakeMap(rv.Type())) - } - for k, v := range tmap { - md.decoded[md.context.add(k).String()] = struct{}{} - md.context = append(md.context, k) - - rvval := reflect.Indirect(reflect.New(rv.Type().Elem())) - - err := md.unify(v, indirect(rvval)) - if err != nil { - return err - } - md.context = md.context[0 : len(md.context)-1] - - rvkey := indirect(reflect.New(rv.Type().Key())) - - switch keyType { - case reflect.Interface: - rvkey.Set(reflect.ValueOf(k)) - case reflect.String: - rvkey.SetString(k) - } - - rv.SetMapIndex(rvkey, rvval) - } - return nil -} - -func (md *MetaData) unifyArray(data interface{}, rv reflect.Value) error { - datav := reflect.ValueOf(data) - if datav.Kind() != reflect.Slice { - if !datav.IsValid() { - return nil - } - return md.badtype("slice", data) - } - if l := datav.Len(); l != rv.Len() { - return md.e("expected array length %d; got TOML array of length %d", rv.Len(), l) - } - return md.unifySliceArray(datav, rv) -} - -func (md *MetaData) unifySlice(data interface{}, rv reflect.Value) error { - datav := reflect.ValueOf(data) - if datav.Kind() != reflect.Slice { - if !datav.IsValid() { - return nil - } - return md.badtype("slice", data) - } - n := datav.Len() - if rv.IsNil() || rv.Cap() < n { - rv.Set(reflect.MakeSlice(rv.Type(), n, n)) - } - rv.SetLen(n) - return md.unifySliceArray(datav, rv) -} - -func (md *MetaData) unifySliceArray(data, rv reflect.Value) error { - l := data.Len() - for i := 0; i < l; i++ { - err := md.unify(data.Index(i).Interface(), indirect(rv.Index(i))) - if err != nil { - return err - } - } - return nil -} - -func (md *MetaData) unifyString(data interface{}, rv reflect.Value) error { - _, ok := rv.Interface().(json.Number) - if ok { - if i, ok := data.(int64); ok { - rv.SetString(strconv.FormatInt(i, 10)) - } else if f, ok := data.(float64); ok { - rv.SetString(strconv.FormatFloat(f, 'f', -1, 64)) - } else { - return md.badtype("string", data) - } - return nil - } - - if s, ok := data.(string); ok { - rv.SetString(s) - return nil - } - return md.badtype("string", data) -} - -func (md *MetaData) unifyFloat64(data interface{}, rv reflect.Value) error { - rvk := rv.Kind() - - if num, ok := data.(float64); ok { - switch rvk { - case reflect.Float32: - if num < -math.MaxFloat32 || num > math.MaxFloat32 { - return md.parseErr(errParseRange{i: num, size: rvk.String()}) - } - fallthrough - case reflect.Float64: - rv.SetFloat(num) - default: - panic("bug") - } - return nil - } - - if num, ok := data.(int64); ok { - if (rvk == reflect.Float32 && (num < -maxSafeFloat32Int || num > maxSafeFloat32Int)) || - (rvk == reflect.Float64 && (num < -maxSafeFloat64Int || num > maxSafeFloat64Int)) { - return md.parseErr(errParseRange{i: num, size: rvk.String()}) - } - rv.SetFloat(float64(num)) - return nil - } - - return md.badtype("float", data) -} - -func (md *MetaData) unifyInt(data interface{}, rv reflect.Value) error { - _, ok := rv.Interface().(time.Duration) - if ok { - // Parse as string duration, and fall back to regular integer parsing - // (as nanosecond) if this is not a string. - if s, ok := data.(string); ok { - dur, err := time.ParseDuration(s) - if err != nil { - return md.parseErr(errParseDuration{s}) - } - rv.SetInt(int64(dur)) - return nil - } - } - - num, ok := data.(int64) - if !ok { - return md.badtype("integer", data) - } - - rvk := rv.Kind() - switch { - case rvk >= reflect.Int && rvk <= reflect.Int64: - if (rvk == reflect.Int8 && (num < math.MinInt8 || num > math.MaxInt8)) || - (rvk == reflect.Int16 && (num < math.MinInt16 || num > math.MaxInt16)) || - (rvk == reflect.Int32 && (num < math.MinInt32 || num > math.MaxInt32)) { - return md.parseErr(errParseRange{i: num, size: rvk.String()}) - } - rv.SetInt(num) - case rvk >= reflect.Uint && rvk <= reflect.Uint64: - unum := uint64(num) - if rvk == reflect.Uint8 && (num < 0 || unum > math.MaxUint8) || - rvk == reflect.Uint16 && (num < 0 || unum > math.MaxUint16) || - rvk == reflect.Uint32 && (num < 0 || unum > math.MaxUint32) { - return md.parseErr(errParseRange{i: num, size: rvk.String()}) - } - rv.SetUint(unum) - default: - panic("unreachable") - } - return nil -} - -func (md *MetaData) unifyBool(data interface{}, rv reflect.Value) error { - if b, ok := data.(bool); ok { - rv.SetBool(b) - return nil - } - return md.badtype("boolean", data) -} - -func (md *MetaData) unifyAnything(data interface{}, rv reflect.Value) error { - rv.Set(reflect.ValueOf(data)) - return nil -} - -func (md *MetaData) unifyText(data interface{}, v encoding.TextUnmarshaler) error { - var s string - switch sdata := data.(type) { - case Marshaler: - text, err := sdata.MarshalTOML() - if err != nil { - return err - } - s = string(text) - case encoding.TextMarshaler: - text, err := sdata.MarshalText() - if err != nil { - return err - } - s = string(text) - case fmt.Stringer: - s = sdata.String() - case string: - s = sdata - case bool: - s = fmt.Sprintf("%v", sdata) - case int64: - s = fmt.Sprintf("%d", sdata) - case float64: - s = fmt.Sprintf("%f", sdata) - default: - return md.badtype("primitive (string-like)", data) - } - if err := v.UnmarshalText([]byte(s)); err != nil { - return err - } - return nil -} - -func (md *MetaData) badtype(dst string, data interface{}) error { - return md.e("incompatible types: TOML value has type %T; destination has type %s", data, dst) -} - -func (md *MetaData) parseErr(err error) error { - k := md.context.String() - return ParseError{ - LastKey: k, - Position: md.keyInfo[k].pos, - Line: md.keyInfo[k].pos.Line, - err: err, - input: string(md.data), - } -} - -func (md *MetaData) e(format string, args ...interface{}) error { - f := "toml: " - if len(md.context) > 0 { - f = fmt.Sprintf("toml: (last key %q): ", md.context) - p := md.keyInfo[md.context.String()].pos - if p.Line > 0 { - f = fmt.Sprintf("toml: line %d (last key %q): ", p.Line, md.context) - } - } - return fmt.Errorf(f+format, args...) -} - -// rvalue returns a reflect.Value of `v`. All pointers are resolved. -func rvalue(v interface{}) reflect.Value { - return indirect(reflect.ValueOf(v)) -} - -// indirect returns the value pointed to by a pointer. -// -// Pointers are followed until the value is not a pointer. New values are -// allocated for each nil pointer. -// -// An exception to this rule is if the value satisfies an interface of interest -// to us (like encoding.TextUnmarshaler). -func indirect(v reflect.Value) reflect.Value { - if v.Kind() != reflect.Ptr { - if v.CanSet() { - pv := v.Addr() - pvi := pv.Interface() - if _, ok := pvi.(encoding.TextUnmarshaler); ok { - return pv - } - if _, ok := pvi.(Unmarshaler); ok { - return pv - } - } - return v - } - if v.IsNil() { - v.Set(reflect.New(v.Type().Elem())) - } - return indirect(reflect.Indirect(v)) -} - -func isUnifiable(rv reflect.Value) bool { - if rv.CanSet() { - return true - } - rvi := rv.Interface() - if _, ok := rvi.(encoding.TextUnmarshaler); ok { - return true - } - if _, ok := rvi.(Unmarshaler); ok { - return true - } - return false -} diff --git a/vendor/github.com/BurntSushi/toml/decode_go116.go b/vendor/github.com/BurntSushi/toml/decode_go116.go deleted file mode 100644 index 086d0b686..000000000 --- a/vendor/github.com/BurntSushi/toml/decode_go116.go +++ /dev/null @@ -1,19 +0,0 @@ -//go:build go1.16 -// +build go1.16 - -package toml - -import ( - "io/fs" -) - -// DecodeFS reads the contents of a file from [fs.FS] and decodes it with -// [Decode]. -func DecodeFS(fsys fs.FS, path string, v interface{}) (MetaData, error) { - fp, err := fsys.Open(path) - if err != nil { - return MetaData{}, err - } - defer fp.Close() - return NewDecoder(fp).Decode(v) -} diff --git a/vendor/github.com/BurntSushi/toml/deprecated.go b/vendor/github.com/BurntSushi/toml/deprecated.go deleted file mode 100644 index b9e309717..000000000 --- a/vendor/github.com/BurntSushi/toml/deprecated.go +++ /dev/null @@ -1,29 +0,0 @@ -package toml - -import ( - "encoding" - "io" -) - -// TextMarshaler is an alias for encoding.TextMarshaler. -// -// Deprecated: use encoding.TextMarshaler -type TextMarshaler encoding.TextMarshaler - -// TextUnmarshaler is an alias for encoding.TextUnmarshaler. -// -// Deprecated: use encoding.TextUnmarshaler -type TextUnmarshaler encoding.TextUnmarshaler - -// PrimitiveDecode is an alias for MetaData.PrimitiveDecode(). -// -// Deprecated: use MetaData.PrimitiveDecode. -func PrimitiveDecode(primValue Primitive, v interface{}) error { - md := MetaData{decoded: make(map[string]struct{})} - return md.unify(primValue.undecoded, rvalue(v)) -} - -// DecodeReader is an alias for NewDecoder(r).Decode(v). -// -// Deprecated: use NewDecoder(reader).Decode(&value). -func DecodeReader(r io.Reader, v interface{}) (MetaData, error) { return NewDecoder(r).Decode(v) } diff --git a/vendor/github.com/BurntSushi/toml/doc.go b/vendor/github.com/BurntSushi/toml/doc.go deleted file mode 100644 index 81a7c0fe9..000000000 --- a/vendor/github.com/BurntSushi/toml/doc.go +++ /dev/null @@ -1,11 +0,0 @@ -// Package toml implements decoding and encoding of TOML files. -// -// This package supports TOML v1.0.0, as specified at https://toml.io -// -// There is also support for delaying decoding with the Primitive type, and -// querying the set of keys in a TOML document with the MetaData type. -// -// The github.com/BurntSushi/toml/cmd/tomlv package implements a TOML validator, -// and can be used to verify if TOML document is valid. It can also be used to -// print the type of each key. -package toml diff --git a/vendor/github.com/BurntSushi/toml/encode.go b/vendor/github.com/BurntSushi/toml/encode.go deleted file mode 100644 index 9cd25d757..000000000 --- a/vendor/github.com/BurntSushi/toml/encode.go +++ /dev/null @@ -1,759 +0,0 @@ -package toml - -import ( - "bufio" - "encoding" - "encoding/json" - "errors" - "fmt" - "io" - "math" - "reflect" - "sort" - "strconv" - "strings" - "time" - - "github.com/BurntSushi/toml/internal" -) - -type tomlEncodeError struct{ error } - -var ( - errArrayNilElement = errors.New("toml: cannot encode array with nil element") - errNonString = errors.New("toml: cannot encode a map with non-string key type") - errNoKey = errors.New("toml: top-level values must be Go maps or structs") - errAnything = errors.New("") // used in testing -) - -var dblQuotedReplacer = strings.NewReplacer( - "\"", "\\\"", - "\\", "\\\\", - "\x00", `\u0000`, - "\x01", `\u0001`, - "\x02", `\u0002`, - "\x03", `\u0003`, - "\x04", `\u0004`, - "\x05", `\u0005`, - "\x06", `\u0006`, - "\x07", `\u0007`, - "\b", `\b`, - "\t", `\t`, - "\n", `\n`, - "\x0b", `\u000b`, - "\f", `\f`, - "\r", `\r`, - "\x0e", `\u000e`, - "\x0f", `\u000f`, - "\x10", `\u0010`, - "\x11", `\u0011`, - "\x12", `\u0012`, - "\x13", `\u0013`, - "\x14", `\u0014`, - "\x15", `\u0015`, - "\x16", `\u0016`, - "\x17", `\u0017`, - "\x18", `\u0018`, - "\x19", `\u0019`, - "\x1a", `\u001a`, - "\x1b", `\u001b`, - "\x1c", `\u001c`, - "\x1d", `\u001d`, - "\x1e", `\u001e`, - "\x1f", `\u001f`, - "\x7f", `\u007f`, -) - -var ( - marshalToml = reflect.TypeOf((*Marshaler)(nil)).Elem() - marshalText = reflect.TypeOf((*encoding.TextMarshaler)(nil)).Elem() - timeType = reflect.TypeOf((*time.Time)(nil)).Elem() -) - -// Marshaler is the interface implemented by types that can marshal themselves -// into valid TOML. -type Marshaler interface { - MarshalTOML() ([]byte, error) -} - -// Encoder encodes a Go to a TOML document. -// -// The mapping between Go values and TOML values should be precisely the same as -// for [Decode]. -// -// time.Time is encoded as a RFC 3339 string, and time.Duration as its string -// representation. -// -// The [Marshaler] and [encoding.TextMarshaler] interfaces are supported to -// encoding the value as custom TOML. -// -// If you want to write arbitrary binary data then you will need to use -// something like base64 since TOML does not have any binary types. -// -// When encoding TOML hashes (Go maps or structs), keys without any sub-hashes -// are encoded first. -// -// Go maps will be sorted alphabetically by key for deterministic output. -// -// The toml struct tag can be used to provide the key name; if omitted the -// struct field name will be used. If the "omitempty" option is present the -// following value will be skipped: -// -// - arrays, slices, maps, and string with len of 0 -// - struct with all zero values -// - bool false -// -// If omitzero is given all int and float types with a value of 0 will be -// skipped. -// -// Encoding Go values without a corresponding TOML representation will return an -// error. Examples of this includes maps with non-string keys, slices with nil -// elements, embedded non-struct types, and nested slices containing maps or -// structs. (e.g. [][]map[string]string is not allowed but []map[string]string -// is okay, as is []map[string][]string). -// -// NOTE: only exported keys are encoded due to the use of reflection. Unexported -// keys are silently discarded. -type Encoder struct { - // String to use for a single indentation level; default is two spaces. - Indent string - - w *bufio.Writer - hasWritten bool // written any output to w yet? -} - -// NewEncoder create a new Encoder. -func NewEncoder(w io.Writer) *Encoder { - return &Encoder{ - w: bufio.NewWriter(w), - Indent: " ", - } -} - -// Encode writes a TOML representation of the Go value to the [Encoder]'s writer. -// -// An error is returned if the value given cannot be encoded to a valid TOML -// document. -func (enc *Encoder) Encode(v interface{}) error { - rv := eindirect(reflect.ValueOf(v)) - err := enc.safeEncode(Key([]string{}), rv) - if err != nil { - return err - } - return enc.w.Flush() -} - -func (enc *Encoder) safeEncode(key Key, rv reflect.Value) (err error) { - defer func() { - if r := recover(); r != nil { - if terr, ok := r.(tomlEncodeError); ok { - err = terr.error - return - } - panic(r) - } - }() - enc.encode(key, rv) - return nil -} - -func (enc *Encoder) encode(key Key, rv reflect.Value) { - // If we can marshal the type to text, then we use that. This prevents the - // encoder for handling these types as generic structs (or whatever the - // underlying type of a TextMarshaler is). - switch { - case isMarshaler(rv): - enc.writeKeyValue(key, rv, false) - return - case rv.Type() == primitiveType: // TODO: #76 would make this superfluous after implemented. - enc.encode(key, reflect.ValueOf(rv.Interface().(Primitive).undecoded)) - return - } - - k := rv.Kind() - switch k { - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, - reflect.Int64, - reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, - reflect.Uint64, - reflect.Float32, reflect.Float64, reflect.String, reflect.Bool: - enc.writeKeyValue(key, rv, false) - case reflect.Array, reflect.Slice: - if typeEqual(tomlArrayHash, tomlTypeOfGo(rv)) { - enc.eArrayOfTables(key, rv) - } else { - enc.writeKeyValue(key, rv, false) - } - case reflect.Interface: - if rv.IsNil() { - return - } - enc.encode(key, rv.Elem()) - case reflect.Map: - if rv.IsNil() { - return - } - enc.eTable(key, rv) - case reflect.Ptr: - if rv.IsNil() { - return - } - enc.encode(key, rv.Elem()) - case reflect.Struct: - enc.eTable(key, rv) - default: - encPanic(fmt.Errorf("unsupported type for key '%s': %s", key, k)) - } -} - -// eElement encodes any value that can be an array element. -func (enc *Encoder) eElement(rv reflect.Value) { - switch v := rv.Interface().(type) { - case time.Time: // Using TextMarshaler adds extra quotes, which we don't want. - format := time.RFC3339Nano - switch v.Location() { - case internal.LocalDatetime: - format = "2006-01-02T15:04:05.999999999" - case internal.LocalDate: - format = "2006-01-02" - case internal.LocalTime: - format = "15:04:05.999999999" - } - switch v.Location() { - default: - enc.wf(v.Format(format)) - case internal.LocalDatetime, internal.LocalDate, internal.LocalTime: - enc.wf(v.In(time.UTC).Format(format)) - } - return - case Marshaler: - s, err := v.MarshalTOML() - if err != nil { - encPanic(err) - } - if s == nil { - encPanic(errors.New("MarshalTOML returned nil and no error")) - } - enc.w.Write(s) - return - case encoding.TextMarshaler: - s, err := v.MarshalText() - if err != nil { - encPanic(err) - } - if s == nil { - encPanic(errors.New("MarshalText returned nil and no error")) - } - enc.writeQuoted(string(s)) - return - case time.Duration: - enc.writeQuoted(v.String()) - return - case json.Number: - n, _ := rv.Interface().(json.Number) - - if n == "" { /// Useful zero value. - enc.w.WriteByte('0') - return - } else if v, err := n.Int64(); err == nil { - enc.eElement(reflect.ValueOf(v)) - return - } else if v, err := n.Float64(); err == nil { - enc.eElement(reflect.ValueOf(v)) - return - } - encPanic(fmt.Errorf("unable to convert %q to int64 or float64", n)) - } - - switch rv.Kind() { - case reflect.Ptr: - enc.eElement(rv.Elem()) - return - case reflect.String: - enc.writeQuoted(rv.String()) - case reflect.Bool: - enc.wf(strconv.FormatBool(rv.Bool())) - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: - enc.wf(strconv.FormatInt(rv.Int(), 10)) - case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: - enc.wf(strconv.FormatUint(rv.Uint(), 10)) - case reflect.Float32: - f := rv.Float() - if math.IsNaN(f) { - enc.wf("nan") - } else if math.IsInf(f, 0) { - enc.wf("%cinf", map[bool]byte{true: '-', false: '+'}[math.Signbit(f)]) - } else { - enc.wf(floatAddDecimal(strconv.FormatFloat(f, 'f', -1, 32))) - } - case reflect.Float64: - f := rv.Float() - if math.IsNaN(f) { - enc.wf("nan") - } else if math.IsInf(f, 0) { - enc.wf("%cinf", map[bool]byte{true: '-', false: '+'}[math.Signbit(f)]) - } else { - enc.wf(floatAddDecimal(strconv.FormatFloat(f, 'f', -1, 64))) - } - case reflect.Array, reflect.Slice: - enc.eArrayOrSliceElement(rv) - case reflect.Struct: - enc.eStruct(nil, rv, true) - case reflect.Map: - enc.eMap(nil, rv, true) - case reflect.Interface: - enc.eElement(rv.Elem()) - default: - encPanic(fmt.Errorf("unexpected type: %T", rv.Interface())) - } -} - -// By the TOML spec, all floats must have a decimal with at least one number on -// either side. -func floatAddDecimal(fstr string) string { - if !strings.Contains(fstr, ".") { - return fstr + ".0" - } - return fstr -} - -func (enc *Encoder) writeQuoted(s string) { - enc.wf("\"%s\"", dblQuotedReplacer.Replace(s)) -} - -func (enc *Encoder) eArrayOrSliceElement(rv reflect.Value) { - length := rv.Len() - enc.wf("[") - for i := 0; i < length; i++ { - elem := eindirect(rv.Index(i)) - enc.eElement(elem) - if i != length-1 { - enc.wf(", ") - } - } - enc.wf("]") -} - -func (enc *Encoder) eArrayOfTables(key Key, rv reflect.Value) { - if len(key) == 0 { - encPanic(errNoKey) - } - for i := 0; i < rv.Len(); i++ { - trv := eindirect(rv.Index(i)) - if isNil(trv) { - continue - } - enc.newline() - enc.wf("%s[[%s]]", enc.indentStr(key), key) - enc.newline() - enc.eMapOrStruct(key, trv, false) - } -} - -func (enc *Encoder) eTable(key Key, rv reflect.Value) { - if len(key) == 1 { - // Output an extra newline between top-level tables. - // (The newline isn't written if nothing else has been written though.) - enc.newline() - } - if len(key) > 0 { - enc.wf("%s[%s]", enc.indentStr(key), key) - enc.newline() - } - enc.eMapOrStruct(key, rv, false) -} - -func (enc *Encoder) eMapOrStruct(key Key, rv reflect.Value, inline bool) { - switch rv.Kind() { - case reflect.Map: - enc.eMap(key, rv, inline) - case reflect.Struct: - enc.eStruct(key, rv, inline) - default: - // Should never happen? - panic("eTable: unhandled reflect.Value Kind: " + rv.Kind().String()) - } -} - -func (enc *Encoder) eMap(key Key, rv reflect.Value, inline bool) { - rt := rv.Type() - if rt.Key().Kind() != reflect.String { - encPanic(errNonString) - } - - // Sort keys so that we have deterministic output. And write keys directly - // underneath this key first, before writing sub-structs or sub-maps. - var mapKeysDirect, mapKeysSub []string - for _, mapKey := range rv.MapKeys() { - k := mapKey.String() - if typeIsTable(tomlTypeOfGo(eindirect(rv.MapIndex(mapKey)))) { - mapKeysSub = append(mapKeysSub, k) - } else { - mapKeysDirect = append(mapKeysDirect, k) - } - } - - var writeMapKeys = func(mapKeys []string, trailC bool) { - sort.Strings(mapKeys) - for i, mapKey := range mapKeys { - val := eindirect(rv.MapIndex(reflect.ValueOf(mapKey))) - if isNil(val) { - continue - } - - if inline { - enc.writeKeyValue(Key{mapKey}, val, true) - if trailC || i != len(mapKeys)-1 { - enc.wf(", ") - } - } else { - enc.encode(key.add(mapKey), val) - } - } - } - - if inline { - enc.wf("{") - } - writeMapKeys(mapKeysDirect, len(mapKeysSub) > 0) - writeMapKeys(mapKeysSub, false) - if inline { - enc.wf("}") - } -} - -const is32Bit = (32 << (^uint(0) >> 63)) == 32 - -func pointerTo(t reflect.Type) reflect.Type { - if t.Kind() == reflect.Ptr { - return pointerTo(t.Elem()) - } - return t -} - -func (enc *Encoder) eStruct(key Key, rv reflect.Value, inline bool) { - // Write keys for fields directly under this key first, because if we write - // a field that creates a new table then all keys under it will be in that - // table (not the one we're writing here). - // - // Fields is a [][]int: for fieldsDirect this always has one entry (the - // struct index). For fieldsSub it contains two entries: the parent field - // index from tv, and the field indexes for the fields of the sub. - var ( - rt = rv.Type() - fieldsDirect, fieldsSub [][]int - addFields func(rt reflect.Type, rv reflect.Value, start []int) - ) - addFields = func(rt reflect.Type, rv reflect.Value, start []int) { - for i := 0; i < rt.NumField(); i++ { - f := rt.Field(i) - isEmbed := f.Anonymous && pointerTo(f.Type).Kind() == reflect.Struct - if f.PkgPath != "" && !isEmbed { /// Skip unexported fields. - continue - } - opts := getOptions(f.Tag) - if opts.skip { - continue - } - - frv := eindirect(rv.Field(i)) - - if is32Bit { - // Copy so it works correct on 32bit archs; not clear why this - // is needed. See #314, and https://www.reddit.com/r/golang/comments/pnx8v4 - // This also works fine on 64bit, but 32bit archs are somewhat - // rare and this is a wee bit faster. - copyStart := make([]int, len(start)) - copy(copyStart, start) - start = copyStart - } - - // Treat anonymous struct fields with tag names as though they are - // not anonymous, like encoding/json does. - // - // Non-struct anonymous fields use the normal encoding logic. - if isEmbed { - if getOptions(f.Tag).name == "" && frv.Kind() == reflect.Struct { - addFields(frv.Type(), frv, append(start, f.Index...)) - continue - } - } - - if typeIsTable(tomlTypeOfGo(frv)) { - fieldsSub = append(fieldsSub, append(start, f.Index...)) - } else { - fieldsDirect = append(fieldsDirect, append(start, f.Index...)) - } - } - } - addFields(rt, rv, nil) - - writeFields := func(fields [][]int) { - for _, fieldIndex := range fields { - fieldType := rt.FieldByIndex(fieldIndex) - fieldVal := rv.FieldByIndex(fieldIndex) - - opts := getOptions(fieldType.Tag) - if opts.skip { - continue - } - if opts.omitempty && isEmpty(fieldVal) { - continue - } - - fieldVal = eindirect(fieldVal) - - if isNil(fieldVal) { /// Don't write anything for nil fields. - continue - } - - keyName := fieldType.Name - if opts.name != "" { - keyName = opts.name - } - - if opts.omitzero && isZero(fieldVal) { - continue - } - - if inline { - enc.writeKeyValue(Key{keyName}, fieldVal, true) - if fieldIndex[0] != len(fields)-1 { - enc.wf(", ") - } - } else { - enc.encode(key.add(keyName), fieldVal) - } - } - } - - if inline { - enc.wf("{") - } - writeFields(fieldsDirect) - writeFields(fieldsSub) - if inline { - enc.wf("}") - } -} - -// tomlTypeOfGo returns the TOML type name of the Go value's type. -// -// It is used to determine whether the types of array elements are mixed (which -// is forbidden). If the Go value is nil, then it is illegal for it to be an -// array element, and valueIsNil is returned as true. -// -// The type may be `nil`, which means no concrete TOML type could be found. -func tomlTypeOfGo(rv reflect.Value) tomlType { - if isNil(rv) || !rv.IsValid() { - return nil - } - - if rv.Kind() == reflect.Struct { - if rv.Type() == timeType { - return tomlDatetime - } - if isMarshaler(rv) { - return tomlString - } - return tomlHash - } - - if isMarshaler(rv) { - return tomlString - } - - switch rv.Kind() { - case reflect.Bool: - return tomlBool - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, - reflect.Int64, - reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, - reflect.Uint64: - return tomlInteger - case reflect.Float32, reflect.Float64: - return tomlFloat - case reflect.Array, reflect.Slice: - if isTableArray(rv) { - return tomlArrayHash - } - return tomlArray - case reflect.Ptr, reflect.Interface: - return tomlTypeOfGo(rv.Elem()) - case reflect.String: - return tomlString - case reflect.Map: - return tomlHash - default: - encPanic(errors.New("unsupported type: " + rv.Kind().String())) - panic("unreachable") - } -} - -func isMarshaler(rv reflect.Value) bool { - return rv.Type().Implements(marshalText) || rv.Type().Implements(marshalToml) -} - -// isTableArray reports if all entries in the array or slice are a table. -func isTableArray(arr reflect.Value) bool { - if isNil(arr) || !arr.IsValid() || arr.Len() == 0 { - return false - } - - ret := true - for i := 0; i < arr.Len(); i++ { - tt := tomlTypeOfGo(eindirect(arr.Index(i))) - // Don't allow nil. - if tt == nil { - encPanic(errArrayNilElement) - } - - if ret && !typeEqual(tomlHash, tt) { - ret = false - } - } - return ret -} - -type tagOptions struct { - skip bool // "-" - name string - omitempty bool - omitzero bool -} - -func getOptions(tag reflect.StructTag) tagOptions { - t := tag.Get("toml") - if t == "-" { - return tagOptions{skip: true} - } - var opts tagOptions - parts := strings.Split(t, ",") - opts.name = parts[0] - for _, s := range parts[1:] { - switch s { - case "omitempty": - opts.omitempty = true - case "omitzero": - opts.omitzero = true - } - } - return opts -} - -func isZero(rv reflect.Value) bool { - switch rv.Kind() { - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: - return rv.Int() == 0 - case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: - return rv.Uint() == 0 - case reflect.Float32, reflect.Float64: - return rv.Float() == 0.0 - } - return false -} - -func isEmpty(rv reflect.Value) bool { - switch rv.Kind() { - case reflect.Array, reflect.Slice, reflect.Map, reflect.String: - return rv.Len() == 0 - case reflect.Struct: - if rv.Type().Comparable() { - return reflect.Zero(rv.Type()).Interface() == rv.Interface() - } - // Need to also check if all the fields are empty, otherwise something - // like this with uncomparable types will always return true: - // - // type a struct{ field b } - // type b struct{ s []string } - // s := a{field: b{s: []string{"AAA"}}} - for i := 0; i < rv.NumField(); i++ { - if !isEmpty(rv.Field(i)) { - return false - } - } - return true - case reflect.Bool: - return !rv.Bool() - case reflect.Ptr: - return rv.IsNil() - } - return false -} - -func (enc *Encoder) newline() { - if enc.hasWritten { - enc.wf("\n") - } -} - -// Write a key/value pair: -// -// key = -// -// This is also used for "k = v" in inline tables; so something like this will -// be written in three calls: -// -// ┌───────────────────┐ -// │ ┌───┐ ┌────┐│ -// v v v v vv -// key = {k = 1, k2 = 2} -func (enc *Encoder) writeKeyValue(key Key, val reflect.Value, inline bool) { - /// Marshaler used on top-level document; call eElement() to just call - /// Marshal{TOML,Text}. - if len(key) == 0 { - enc.eElement(val) - return - } - enc.wf("%s%s = ", enc.indentStr(key), key.maybeQuoted(len(key)-1)) - enc.eElement(val) - if !inline { - enc.newline() - } -} - -func (enc *Encoder) wf(format string, v ...interface{}) { - _, err := fmt.Fprintf(enc.w, format, v...) - if err != nil { - encPanic(err) - } - enc.hasWritten = true -} - -func (enc *Encoder) indentStr(key Key) string { - return strings.Repeat(enc.Indent, len(key)-1) -} - -func encPanic(err error) { - panic(tomlEncodeError{err}) -} - -// Resolve any level of pointers to the actual value (e.g. **string → string). -func eindirect(v reflect.Value) reflect.Value { - if v.Kind() != reflect.Ptr && v.Kind() != reflect.Interface { - if isMarshaler(v) { - return v - } - if v.CanAddr() { /// Special case for marshalers; see #358. - if pv := v.Addr(); isMarshaler(pv) { - return pv - } - } - return v - } - - if v.IsNil() { - return v - } - - return eindirect(v.Elem()) -} - -func isNil(rv reflect.Value) bool { - switch rv.Kind() { - case reflect.Interface, reflect.Map, reflect.Ptr, reflect.Slice: - return rv.IsNil() - default: - return false - } -} diff --git a/vendor/github.com/BurntSushi/toml/error.go b/vendor/github.com/BurntSushi/toml/error.go deleted file mode 100644 index efd68865b..000000000 --- a/vendor/github.com/BurntSushi/toml/error.go +++ /dev/null @@ -1,279 +0,0 @@ -package toml - -import ( - "fmt" - "strings" -) - -// ParseError is returned when there is an error parsing the TOML syntax such as -// invalid syntax, duplicate keys, etc. -// -// In addition to the error message itself, you can also print detailed location -// information with context by using [ErrorWithPosition]: -// -// toml: error: Key 'fruit' was already created and cannot be used as an array. -// -// At line 4, column 2-7: -// -// 2 | fruit = [] -// 3 | -// 4 | [[fruit]] # Not allowed -// ^^^^^ -// -// [ErrorWithUsage] can be used to print the above with some more detailed usage -// guidance: -// -// toml: error: newlines not allowed within inline tables -// -// At line 1, column 18: -// -// 1 | x = [{ key = 42 # -// ^ -// -// Error help: -// -// Inline tables must always be on a single line: -// -// table = {key = 42, second = 43} -// -// It is invalid to split them over multiple lines like so: -// -// # INVALID -// table = { -// key = 42, -// second = 43 -// } -// -// Use regular for this: -// -// [table] -// key = 42 -// second = 43 -type ParseError struct { - Message string // Short technical message. - Usage string // Longer message with usage guidance; may be blank. - Position Position // Position of the error - LastKey string // Last parsed key, may be blank. - - // Line the error occurred. - // - // Deprecated: use [Position]. - Line int - - err error - input string -} - -// Position of an error. -type Position struct { - Line int // Line number, starting at 1. - Start int // Start of error, as byte offset starting at 0. - Len int // Lenght in bytes. -} - -func (pe ParseError) Error() string { - msg := pe.Message - if msg == "" { // Error from errorf() - msg = pe.err.Error() - } - - if pe.LastKey == "" { - return fmt.Sprintf("toml: line %d: %s", pe.Position.Line, msg) - } - return fmt.Sprintf("toml: line %d (last key %q): %s", - pe.Position.Line, pe.LastKey, msg) -} - -// ErrorWithPosition returns the error with detailed location context. -// -// See the documentation on [ParseError]. -func (pe ParseError) ErrorWithPosition() string { - if pe.input == "" { // Should never happen, but just in case. - return pe.Error() - } - - var ( - lines = strings.Split(pe.input, "\n") - col = pe.column(lines) - b = new(strings.Builder) - ) - - msg := pe.Message - if msg == "" { - msg = pe.err.Error() - } - - // TODO: don't show control characters as literals? This may not show up - // well everywhere. - - if pe.Position.Len == 1 { - fmt.Fprintf(b, "toml: error: %s\n\nAt line %d, column %d:\n\n", - msg, pe.Position.Line, col+1) - } else { - fmt.Fprintf(b, "toml: error: %s\n\nAt line %d, column %d-%d:\n\n", - msg, pe.Position.Line, col, col+pe.Position.Len) - } - if pe.Position.Line > 2 { - fmt.Fprintf(b, "% 7d | %s\n", pe.Position.Line-2, lines[pe.Position.Line-3]) - } - if pe.Position.Line > 1 { - fmt.Fprintf(b, "% 7d | %s\n", pe.Position.Line-1, lines[pe.Position.Line-2]) - } - fmt.Fprintf(b, "% 7d | %s\n", pe.Position.Line, lines[pe.Position.Line-1]) - fmt.Fprintf(b, "% 10s%s%s\n", "", strings.Repeat(" ", col), strings.Repeat("^", pe.Position.Len)) - return b.String() -} - -// ErrorWithUsage returns the error with detailed location context and usage -// guidance. -// -// See the documentation on [ParseError]. -func (pe ParseError) ErrorWithUsage() string { - m := pe.ErrorWithPosition() - if u, ok := pe.err.(interface{ Usage() string }); ok && u.Usage() != "" { - lines := strings.Split(strings.TrimSpace(u.Usage()), "\n") - for i := range lines { - if lines[i] != "" { - lines[i] = " " + lines[i] - } - } - return m + "Error help:\n\n" + strings.Join(lines, "\n") + "\n" - } - return m -} - -func (pe ParseError) column(lines []string) int { - var pos, col int - for i := range lines { - ll := len(lines[i]) + 1 // +1 for the removed newline - if pos+ll >= pe.Position.Start { - col = pe.Position.Start - pos - if col < 0 { // Should never happen, but just in case. - col = 0 - } - break - } - pos += ll - } - - return col -} - -type ( - errLexControl struct{ r rune } - errLexEscape struct{ r rune } - errLexUTF8 struct{ b byte } - errLexInvalidNum struct{ v string } - errLexInvalidDate struct{ v string } - errLexInlineTableNL struct{} - errLexStringNL struct{} - errParseRange struct { - i interface{} // int or float - size string // "int64", "uint16", etc. - } - errParseDuration struct{ d string } -) - -func (e errLexControl) Error() string { - return fmt.Sprintf("TOML files cannot contain control characters: '0x%02x'", e.r) -} -func (e errLexControl) Usage() string { return "" } - -func (e errLexEscape) Error() string { return fmt.Sprintf(`invalid escape in string '\%c'`, e.r) } -func (e errLexEscape) Usage() string { return usageEscape } -func (e errLexUTF8) Error() string { return fmt.Sprintf("invalid UTF-8 byte: 0x%02x", e.b) } -func (e errLexUTF8) Usage() string { return "" } -func (e errLexInvalidNum) Error() string { return fmt.Sprintf("invalid number: %q", e.v) } -func (e errLexInvalidNum) Usage() string { return "" } -func (e errLexInvalidDate) Error() string { return fmt.Sprintf("invalid date: %q", e.v) } -func (e errLexInvalidDate) Usage() string { return "" } -func (e errLexInlineTableNL) Error() string { return "newlines not allowed within inline tables" } -func (e errLexInlineTableNL) Usage() string { return usageInlineNewline } -func (e errLexStringNL) Error() string { return "strings cannot contain newlines" } -func (e errLexStringNL) Usage() string { return usageStringNewline } -func (e errParseRange) Error() string { return fmt.Sprintf("%v is out of range for %s", e.i, e.size) } -func (e errParseRange) Usage() string { return usageIntOverflow } -func (e errParseDuration) Error() string { return fmt.Sprintf("invalid duration: %q", e.d) } -func (e errParseDuration) Usage() string { return usageDuration } - -const usageEscape = ` -A '\' inside a "-delimited string is interpreted as an escape character. - -The following escape sequences are supported: -\b, \t, \n, \f, \r, \", \\, \uXXXX, and \UXXXXXXXX - -To prevent a '\' from being recognized as an escape character, use either: - -- a ' or '''-delimited string; escape characters aren't processed in them; or -- write two backslashes to get a single backslash: '\\'. - -If you're trying to add a Windows path (e.g. "C:\Users\martin") then using '/' -instead of '\' will usually also work: "C:/Users/martin". -` - -const usageInlineNewline = ` -Inline tables must always be on a single line: - - table = {key = 42, second = 43} - -It is invalid to split them over multiple lines like so: - - # INVALID - table = { - key = 42, - second = 43 - } - -Use regular for this: - - [table] - key = 42 - second = 43 -` - -const usageStringNewline = ` -Strings must always be on a single line, and cannot span more than one line: - - # INVALID - string = "Hello, - world!" - -Instead use """ or ''' to split strings over multiple lines: - - string = """Hello, - world!""" -` - -const usageIntOverflow = ` -This number is too large; this may be an error in the TOML, but it can also be a -bug in the program that uses too small of an integer. - -The maximum and minimum values are: - - size │ lowest │ highest - ───────┼────────────────┼────────── - int8 │ -128 │ 127 - int16 │ -32,768 │ 32,767 - int32 │ -2,147,483,648 │ 2,147,483,647 - int64 │ -9.2 × 10¹⁷ │ 9.2 × 10¹⁷ - uint8 │ 0 │ 255 - uint16 │ 0 │ 65535 - uint32 │ 0 │ 4294967295 - uint64 │ 0 │ 1.8 × 10¹⁸ - -int refers to int32 on 32-bit systems and int64 on 64-bit systems. -` - -const usageDuration = ` -A duration must be as "number", without any spaces. Valid units are: - - ns nanoseconds (billionth of a second) - us, µs microseconds (millionth of a second) - ms milliseconds (thousands of a second) - s seconds - m minutes - h hours - -You can combine multiple units; for example "5m10s" for 5 minutes and 10 -seconds. -` diff --git a/vendor/github.com/BurntSushi/toml/internal/tz.go b/vendor/github.com/BurntSushi/toml/internal/tz.go deleted file mode 100644 index 022f15bc2..000000000 --- a/vendor/github.com/BurntSushi/toml/internal/tz.go +++ /dev/null @@ -1,36 +0,0 @@ -package internal - -import "time" - -// Timezones used for local datetime, date, and time TOML types. -// -// The exact way times and dates without a timezone should be interpreted is not -// well-defined in the TOML specification and left to the implementation. These -// defaults to current local timezone offset of the computer, but this can be -// changed by changing these variables before decoding. -// -// TODO: -// Ideally we'd like to offer people the ability to configure the used timezone -// by setting Decoder.Timezone and Encoder.Timezone; however, this is a bit -// tricky: the reason we use three different variables for this is to support -// round-tripping – without these specific TZ names we wouldn't know which -// format to use. -// -// There isn't a good way to encode this right now though, and passing this sort -// of information also ties in to various related issues such as string format -// encoding, encoding of comments, etc. -// -// So, for the time being, just put this in internal until we can write a good -// comprehensive API for doing all of this. -// -// The reason they're exported is because they're referred from in e.g. -// internal/tag. -// -// Note that this behaviour is valid according to the TOML spec as the exact -// behaviour is left up to implementations. -var ( - localOffset = func() int { _, o := time.Now().Zone(); return o }() - LocalDatetime = time.FixedZone("datetime-local", localOffset) - LocalDate = time.FixedZone("date-local", localOffset) - LocalTime = time.FixedZone("time-local", localOffset) -) diff --git a/vendor/github.com/BurntSushi/toml/lex.go b/vendor/github.com/BurntSushi/toml/lex.go deleted file mode 100644 index 3545a6ad6..000000000 --- a/vendor/github.com/BurntSushi/toml/lex.go +++ /dev/null @@ -1,1283 +0,0 @@ -package toml - -import ( - "fmt" - "reflect" - "runtime" - "strings" - "unicode" - "unicode/utf8" -) - -type itemType int - -const ( - itemError itemType = iota - itemNIL // used in the parser to indicate no type - itemEOF - itemText - itemString - itemRawString - itemMultilineString - itemRawMultilineString - itemBool - itemInteger - itemFloat - itemDatetime - itemArray // the start of an array - itemArrayEnd - itemTableStart - itemTableEnd - itemArrayTableStart - itemArrayTableEnd - itemKeyStart - itemKeyEnd - itemCommentStart - itemInlineTableStart - itemInlineTableEnd -) - -const eof = 0 - -type stateFn func(lx *lexer) stateFn - -func (p Position) String() string { - return fmt.Sprintf("at line %d; start %d; length %d", p.Line, p.Start, p.Len) -} - -type lexer struct { - input string - start int - pos int - line int - state stateFn - items chan item - tomlNext bool - - // Allow for backing up up to 4 runes. This is necessary because TOML - // contains 3-rune tokens (""" and '''). - prevWidths [4]int - nprev int // how many of prevWidths are in use - atEOF bool // If we emit an eof, we can still back up, but it is not OK to call next again. - - // A stack of state functions used to maintain context. - // - // The idea is to reuse parts of the state machine in various places. For - // example, values can appear at the top level or within arbitrarily nested - // arrays. The last state on the stack is used after a value has been lexed. - // Similarly for comments. - stack []stateFn -} - -type item struct { - typ itemType - val string - err error - pos Position -} - -func (lx *lexer) nextItem() item { - for { - select { - case item := <-lx.items: - return item - default: - lx.state = lx.state(lx) - //fmt.Printf(" STATE %-24s current: %-10s stack: %s\n", lx.state, lx.current(), lx.stack) - } - } -} - -func lex(input string, tomlNext bool) *lexer { - lx := &lexer{ - input: input, - state: lexTop, - items: make(chan item, 10), - stack: make([]stateFn, 0, 10), - line: 1, - tomlNext: tomlNext, - } - return lx -} - -func (lx *lexer) push(state stateFn) { - lx.stack = append(lx.stack, state) -} - -func (lx *lexer) pop() stateFn { - if len(lx.stack) == 0 { - return lx.errorf("BUG in lexer: no states to pop") - } - last := lx.stack[len(lx.stack)-1] - lx.stack = lx.stack[0 : len(lx.stack)-1] - return last -} - -func (lx *lexer) current() string { - return lx.input[lx.start:lx.pos] -} - -func (lx lexer) getPos() Position { - p := Position{ - Line: lx.line, - Start: lx.start, - Len: lx.pos - lx.start, - } - if p.Len <= 0 { - p.Len = 1 - } - return p -} - -func (lx *lexer) emit(typ itemType) { - // Needed for multiline strings ending with an incomplete UTF-8 sequence. - if lx.start > lx.pos { - lx.error(errLexUTF8{lx.input[lx.pos]}) - return - } - lx.items <- item{typ: typ, pos: lx.getPos(), val: lx.current()} - lx.start = lx.pos -} - -func (lx *lexer) emitTrim(typ itemType) { - lx.items <- item{typ: typ, pos: lx.getPos(), val: strings.TrimSpace(lx.current())} - lx.start = lx.pos -} - -func (lx *lexer) next() (r rune) { - if lx.atEOF { - panic("BUG in lexer: next called after EOF") - } - if lx.pos >= len(lx.input) { - lx.atEOF = true - return eof - } - - if lx.input[lx.pos] == '\n' { - lx.line++ - } - lx.prevWidths[3] = lx.prevWidths[2] - lx.prevWidths[2] = lx.prevWidths[1] - lx.prevWidths[1] = lx.prevWidths[0] - if lx.nprev < 4 { - lx.nprev++ - } - - r, w := utf8.DecodeRuneInString(lx.input[lx.pos:]) - if r == utf8.RuneError { - lx.error(errLexUTF8{lx.input[lx.pos]}) - return utf8.RuneError - } - - // Note: don't use peek() here, as this calls next(). - if isControl(r) || (r == '\r' && (len(lx.input)-1 == lx.pos || lx.input[lx.pos+1] != '\n')) { - lx.errorControlChar(r) - return utf8.RuneError - } - - lx.prevWidths[0] = w - lx.pos += w - return r -} - -// ignore skips over the pending input before this point. -func (lx *lexer) ignore() { - lx.start = lx.pos -} - -// backup steps back one rune. Can be called 4 times between calls to next. -func (lx *lexer) backup() { - if lx.atEOF { - lx.atEOF = false - return - } - if lx.nprev < 1 { - panic("BUG in lexer: backed up too far") - } - w := lx.prevWidths[0] - lx.prevWidths[0] = lx.prevWidths[1] - lx.prevWidths[1] = lx.prevWidths[2] - lx.prevWidths[2] = lx.prevWidths[3] - lx.nprev-- - - lx.pos -= w - if lx.pos < len(lx.input) && lx.input[lx.pos] == '\n' { - lx.line-- - } -} - -// accept consumes the next rune if it's equal to `valid`. -func (lx *lexer) accept(valid rune) bool { - if lx.next() == valid { - return true - } - lx.backup() - return false -} - -// peek returns but does not consume the next rune in the input. -func (lx *lexer) peek() rune { - r := lx.next() - lx.backup() - return r -} - -// skip ignores all input that matches the given predicate. -func (lx *lexer) skip(pred func(rune) bool) { - for { - r := lx.next() - if pred(r) { - continue - } - lx.backup() - lx.ignore() - return - } -} - -// error stops all lexing by emitting an error and returning `nil`. -// -// Note that any value that is a character is escaped if it's a special -// character (newlines, tabs, etc.). -func (lx *lexer) error(err error) stateFn { - if lx.atEOF { - return lx.errorPrevLine(err) - } - lx.items <- item{typ: itemError, pos: lx.getPos(), err: err} - return nil -} - -// errorfPrevline is like error(), but sets the position to the last column of -// the previous line. -// -// This is so that unexpected EOF or NL errors don't show on a new blank line. -func (lx *lexer) errorPrevLine(err error) stateFn { - pos := lx.getPos() - pos.Line-- - pos.Len = 1 - pos.Start = lx.pos - 1 - lx.items <- item{typ: itemError, pos: pos, err: err} - return nil -} - -// errorPos is like error(), but allows explicitly setting the position. -func (lx *lexer) errorPos(start, length int, err error) stateFn { - pos := lx.getPos() - pos.Start = start - pos.Len = length - lx.items <- item{typ: itemError, pos: pos, err: err} - return nil -} - -// errorf is like error, and creates a new error. -func (lx *lexer) errorf(format string, values ...interface{}) stateFn { - if lx.atEOF { - pos := lx.getPos() - pos.Line-- - pos.Len = 1 - pos.Start = lx.pos - 1 - lx.items <- item{typ: itemError, pos: pos, err: fmt.Errorf(format, values...)} - return nil - } - lx.items <- item{typ: itemError, pos: lx.getPos(), err: fmt.Errorf(format, values...)} - return nil -} - -func (lx *lexer) errorControlChar(cc rune) stateFn { - return lx.errorPos(lx.pos-1, 1, errLexControl{cc}) -} - -// lexTop consumes elements at the top level of TOML data. -func lexTop(lx *lexer) stateFn { - r := lx.next() - if isWhitespace(r) || isNL(r) { - return lexSkip(lx, lexTop) - } - switch r { - case '#': - lx.push(lexTop) - return lexCommentStart - case '[': - return lexTableStart - case eof: - if lx.pos > lx.start { - return lx.errorf("unexpected EOF") - } - lx.emit(itemEOF) - return nil - } - - // At this point, the only valid item can be a key, so we back up - // and let the key lexer do the rest. - lx.backup() - lx.push(lexTopEnd) - return lexKeyStart -} - -// lexTopEnd is entered whenever a top-level item has been consumed. (A value -// or a table.) It must see only whitespace, and will turn back to lexTop -// upon a newline. If it sees EOF, it will quit the lexer successfully. -func lexTopEnd(lx *lexer) stateFn { - r := lx.next() - switch { - case r == '#': - // a comment will read to a newline for us. - lx.push(lexTop) - return lexCommentStart - case isWhitespace(r): - return lexTopEnd - case isNL(r): - lx.ignore() - return lexTop - case r == eof: - lx.emit(itemEOF) - return nil - } - return lx.errorf( - "expected a top-level item to end with a newline, comment, or EOF, but got %q instead", - r) -} - -// lexTable lexes the beginning of a table. Namely, it makes sure that -// it starts with a character other than '.' and ']'. -// It assumes that '[' has already been consumed. -// It also handles the case that this is an item in an array of tables. -// e.g., '[[name]]'. -func lexTableStart(lx *lexer) stateFn { - if lx.peek() == '[' { - lx.next() - lx.emit(itemArrayTableStart) - lx.push(lexArrayTableEnd) - } else { - lx.emit(itemTableStart) - lx.push(lexTableEnd) - } - return lexTableNameStart -} - -func lexTableEnd(lx *lexer) stateFn { - lx.emit(itemTableEnd) - return lexTopEnd -} - -func lexArrayTableEnd(lx *lexer) stateFn { - if r := lx.next(); r != ']' { - return lx.errorf("expected end of table array name delimiter ']', but got %q instead", r) - } - lx.emit(itemArrayTableEnd) - return lexTopEnd -} - -func lexTableNameStart(lx *lexer) stateFn { - lx.skip(isWhitespace) - switch r := lx.peek(); { - case r == ']' || r == eof: - return lx.errorf("unexpected end of table name (table names cannot be empty)") - case r == '.': - return lx.errorf("unexpected table separator (table names cannot be empty)") - case r == '"' || r == '\'': - lx.ignore() - lx.push(lexTableNameEnd) - return lexQuotedName - default: - lx.push(lexTableNameEnd) - return lexBareName - } -} - -// lexTableNameEnd reads the end of a piece of a table name, optionally -// consuming whitespace. -func lexTableNameEnd(lx *lexer) stateFn { - lx.skip(isWhitespace) - switch r := lx.next(); { - case isWhitespace(r): - return lexTableNameEnd - case r == '.': - lx.ignore() - return lexTableNameStart - case r == ']': - return lx.pop() - default: - return lx.errorf("expected '.' or ']' to end table name, but got %q instead", r) - } -} - -// lexBareName lexes one part of a key or table. -// -// It assumes that at least one valid character for the table has already been -// read. -// -// Lexes only one part, e.g. only 'a' inside 'a.b'. -func lexBareName(lx *lexer) stateFn { - r := lx.next() - if isBareKeyChar(r, lx.tomlNext) { - return lexBareName - } - lx.backup() - lx.emit(itemText) - return lx.pop() -} - -// lexBareName lexes one part of a key or table. -// -// It assumes that at least one valid character for the table has already been -// read. -// -// Lexes only one part, e.g. only '"a"' inside '"a".b'. -func lexQuotedName(lx *lexer) stateFn { - r := lx.next() - switch { - case isWhitespace(r): - return lexSkip(lx, lexValue) - case r == '"': - lx.ignore() // ignore the '"' - return lexString - case r == '\'': - lx.ignore() // ignore the "'" - return lexRawString - case r == eof: - return lx.errorf("unexpected EOF; expected value") - default: - return lx.errorf("expected value but found %q instead", r) - } -} - -// lexKeyStart consumes all key parts until a '='. -func lexKeyStart(lx *lexer) stateFn { - lx.skip(isWhitespace) - switch r := lx.peek(); { - case r == '=' || r == eof: - return lx.errorf("unexpected '=': key name appears blank") - case r == '.': - return lx.errorf("unexpected '.': keys cannot start with a '.'") - case r == '"' || r == '\'': - lx.ignore() - fallthrough - default: // Bare key - lx.emit(itemKeyStart) - return lexKeyNameStart - } -} - -func lexKeyNameStart(lx *lexer) stateFn { - lx.skip(isWhitespace) - switch r := lx.peek(); { - case r == '=' || r == eof: - return lx.errorf("unexpected '='") - case r == '.': - return lx.errorf("unexpected '.'") - case r == '"' || r == '\'': - lx.ignore() - lx.push(lexKeyEnd) - return lexQuotedName - default: - lx.push(lexKeyEnd) - return lexBareName - } -} - -// lexKeyEnd consumes the end of a key and trims whitespace (up to the key -// separator). -func lexKeyEnd(lx *lexer) stateFn { - lx.skip(isWhitespace) - switch r := lx.next(); { - case isWhitespace(r): - return lexSkip(lx, lexKeyEnd) - case r == eof: - return lx.errorf("unexpected EOF; expected key separator '='") - case r == '.': - lx.ignore() - return lexKeyNameStart - case r == '=': - lx.emit(itemKeyEnd) - return lexSkip(lx, lexValue) - default: - return lx.errorf("expected '.' or '=', but got %q instead", r) - } -} - -// lexValue starts the consumption of a value anywhere a value is expected. -// lexValue will ignore whitespace. -// After a value is lexed, the last state on the next is popped and returned. -func lexValue(lx *lexer) stateFn { - // We allow whitespace to precede a value, but NOT newlines. - // In array syntax, the array states are responsible for ignoring newlines. - r := lx.next() - switch { - case isWhitespace(r): - return lexSkip(lx, lexValue) - case isDigit(r): - lx.backup() // avoid an extra state and use the same as above - return lexNumberOrDateStart - } - switch r { - case '[': - lx.ignore() - lx.emit(itemArray) - return lexArrayValue - case '{': - lx.ignore() - lx.emit(itemInlineTableStart) - return lexInlineTableValue - case '"': - if lx.accept('"') { - if lx.accept('"') { - lx.ignore() // Ignore """ - return lexMultilineString - } - lx.backup() - } - lx.ignore() // ignore the '"' - return lexString - case '\'': - if lx.accept('\'') { - if lx.accept('\'') { - lx.ignore() // Ignore """ - return lexMultilineRawString - } - lx.backup() - } - lx.ignore() // ignore the "'" - return lexRawString - case '.': // special error case, be kind to users - return lx.errorf("floats must start with a digit, not '.'") - case 'i', 'n': - if (lx.accept('n') && lx.accept('f')) || (lx.accept('a') && lx.accept('n')) { - lx.emit(itemFloat) - return lx.pop() - } - case '-', '+': - return lexDecimalNumberStart - } - if unicode.IsLetter(r) { - // Be permissive here; lexBool will give a nice error if the - // user wrote something like - // x = foo - // (i.e. not 'true' or 'false' but is something else word-like.) - lx.backup() - return lexBool - } - if r == eof { - return lx.errorf("unexpected EOF; expected value") - } - return lx.errorf("expected value but found %q instead", r) -} - -// lexArrayValue consumes one value in an array. It assumes that '[' or ',' -// have already been consumed. All whitespace and newlines are ignored. -func lexArrayValue(lx *lexer) stateFn { - r := lx.next() - switch { - case isWhitespace(r) || isNL(r): - return lexSkip(lx, lexArrayValue) - case r == '#': - lx.push(lexArrayValue) - return lexCommentStart - case r == ',': - return lx.errorf("unexpected comma") - case r == ']': - return lexArrayEnd - } - - lx.backup() - lx.push(lexArrayValueEnd) - return lexValue -} - -// lexArrayValueEnd consumes everything between the end of an array value and -// the next value (or the end of the array): it ignores whitespace and newlines -// and expects either a ',' or a ']'. -func lexArrayValueEnd(lx *lexer) stateFn { - switch r := lx.next(); { - case isWhitespace(r) || isNL(r): - return lexSkip(lx, lexArrayValueEnd) - case r == '#': - lx.push(lexArrayValueEnd) - return lexCommentStart - case r == ',': - lx.ignore() - return lexArrayValue // move on to the next value - case r == ']': - return lexArrayEnd - default: - return lx.errorf("expected a comma (',') or array terminator (']'), but got %s", runeOrEOF(r)) - } -} - -// lexArrayEnd finishes the lexing of an array. -// It assumes that a ']' has just been consumed. -func lexArrayEnd(lx *lexer) stateFn { - lx.ignore() - lx.emit(itemArrayEnd) - return lx.pop() -} - -// lexInlineTableValue consumes one key/value pair in an inline table. -// It assumes that '{' or ',' have already been consumed. Whitespace is ignored. -func lexInlineTableValue(lx *lexer) stateFn { - r := lx.next() - switch { - case isWhitespace(r): - return lexSkip(lx, lexInlineTableValue) - case isNL(r): - if lx.tomlNext { - return lexSkip(lx, lexInlineTableValue) - } - return lx.errorPrevLine(errLexInlineTableNL{}) - case r == '#': - lx.push(lexInlineTableValue) - return lexCommentStart - case r == ',': - return lx.errorf("unexpected comma") - case r == '}': - return lexInlineTableEnd - } - lx.backup() - lx.push(lexInlineTableValueEnd) - return lexKeyStart -} - -// lexInlineTableValueEnd consumes everything between the end of an inline table -// key/value pair and the next pair (or the end of the table): -// it ignores whitespace and expects either a ',' or a '}'. -func lexInlineTableValueEnd(lx *lexer) stateFn { - switch r := lx.next(); { - case isWhitespace(r): - return lexSkip(lx, lexInlineTableValueEnd) - case isNL(r): - if lx.tomlNext { - return lexSkip(lx, lexInlineTableValueEnd) - } - return lx.errorPrevLine(errLexInlineTableNL{}) - case r == '#': - lx.push(lexInlineTableValueEnd) - return lexCommentStart - case r == ',': - lx.ignore() - lx.skip(isWhitespace) - if lx.peek() == '}' { - if lx.tomlNext { - return lexInlineTableValueEnd - } - return lx.errorf("trailing comma not allowed in inline tables") - } - return lexInlineTableValue - case r == '}': - return lexInlineTableEnd - default: - return lx.errorf("expected a comma or an inline table terminator '}', but got %s instead", runeOrEOF(r)) - } -} - -func runeOrEOF(r rune) string { - if r == eof { - return "end of file" - } - return "'" + string(r) + "'" -} - -// lexInlineTableEnd finishes the lexing of an inline table. -// It assumes that a '}' has just been consumed. -func lexInlineTableEnd(lx *lexer) stateFn { - lx.ignore() - lx.emit(itemInlineTableEnd) - return lx.pop() -} - -// lexString consumes the inner contents of a string. It assumes that the -// beginning '"' has already been consumed and ignored. -func lexString(lx *lexer) stateFn { - r := lx.next() - switch { - case r == eof: - return lx.errorf(`unexpected EOF; expected '"'`) - case isNL(r): - return lx.errorPrevLine(errLexStringNL{}) - case r == '\\': - lx.push(lexString) - return lexStringEscape - case r == '"': - lx.backup() - lx.emit(itemString) - lx.next() - lx.ignore() - return lx.pop() - } - return lexString -} - -// lexMultilineString consumes the inner contents of a string. It assumes that -// the beginning '"""' has already been consumed and ignored. -func lexMultilineString(lx *lexer) stateFn { - r := lx.next() - switch r { - default: - return lexMultilineString - case eof: - return lx.errorf(`unexpected EOF; expected '"""'`) - case '\\': - return lexMultilineStringEscape - case '"': - /// Found " → try to read two more "". - if lx.accept('"') { - if lx.accept('"') { - /// Peek ahead: the string can contain " and "", including at the - /// end: """str""""" - /// 6 or more at the end, however, is an error. - if lx.peek() == '"' { - /// Check if we already lexed 5 's; if so we have 6 now, and - /// that's just too many man! - /// - /// Second check is for the edge case: - /// - /// two quotes allowed. - /// vv - /// """lol \"""""" - /// ^^ ^^^---- closing three - /// escaped - /// - /// But ugly, but it works - if strings.HasSuffix(lx.current(), `"""""`) && !strings.HasSuffix(lx.current(), `\"""""`) { - return lx.errorf(`unexpected '""""""'`) - } - lx.backup() - lx.backup() - return lexMultilineString - } - - lx.backup() /// backup: don't include the """ in the item. - lx.backup() - lx.backup() - lx.emit(itemMultilineString) - lx.next() /// Read over ''' again and discard it. - lx.next() - lx.next() - lx.ignore() - return lx.pop() - } - lx.backup() - } - return lexMultilineString - } -} - -// lexRawString consumes a raw string. Nothing can be escaped in such a string. -// It assumes that the beginning "'" has already been consumed and ignored. -func lexRawString(lx *lexer) stateFn { - r := lx.next() - switch { - default: - return lexRawString - case r == eof: - return lx.errorf(`unexpected EOF; expected "'"`) - case isNL(r): - return lx.errorPrevLine(errLexStringNL{}) - case r == '\'': - lx.backup() - lx.emit(itemRawString) - lx.next() - lx.ignore() - return lx.pop() - } -} - -// lexMultilineRawString consumes a raw string. Nothing can be escaped in such a -// string. It assumes that the beginning triple-' has already been consumed and -// ignored. -func lexMultilineRawString(lx *lexer) stateFn { - r := lx.next() - switch r { - default: - return lexMultilineRawString - case eof: - return lx.errorf(`unexpected EOF; expected "'''"`) - case '\'': - /// Found ' → try to read two more ''. - if lx.accept('\'') { - if lx.accept('\'') { - /// Peek ahead: the string can contain ' and '', including at the - /// end: '''str''''' - /// 6 or more at the end, however, is an error. - if lx.peek() == '\'' { - /// Check if we already lexed 5 's; if so we have 6 now, and - /// that's just too many man! - if strings.HasSuffix(lx.current(), "'''''") { - return lx.errorf(`unexpected "''''''"`) - } - lx.backup() - lx.backup() - return lexMultilineRawString - } - - lx.backup() /// backup: don't include the ''' in the item. - lx.backup() - lx.backup() - lx.emit(itemRawMultilineString) - lx.next() /// Read over ''' again and discard it. - lx.next() - lx.next() - lx.ignore() - return lx.pop() - } - lx.backup() - } - return lexMultilineRawString - } -} - -// lexMultilineStringEscape consumes an escaped character. It assumes that the -// preceding '\\' has already been consumed. -func lexMultilineStringEscape(lx *lexer) stateFn { - if isNL(lx.next()) { /// \ escaping newline. - return lexMultilineString - } - lx.backup() - lx.push(lexMultilineString) - return lexStringEscape(lx) -} - -func lexStringEscape(lx *lexer) stateFn { - r := lx.next() - switch r { - case 'e': - if !lx.tomlNext { - return lx.error(errLexEscape{r}) - } - fallthrough - case 'b': - fallthrough - case 't': - fallthrough - case 'n': - fallthrough - case 'f': - fallthrough - case 'r': - fallthrough - case '"': - fallthrough - case ' ', '\t': - // Inside """ .. """ strings you can use \ to escape newlines, and any - // amount of whitespace can be between the \ and \n. - fallthrough - case '\\': - return lx.pop() - case 'x': - if !lx.tomlNext { - return lx.error(errLexEscape{r}) - } - return lexHexEscape - case 'u': - return lexShortUnicodeEscape - case 'U': - return lexLongUnicodeEscape - } - return lx.error(errLexEscape{r}) -} - -func lexHexEscape(lx *lexer) stateFn { - var r rune - for i := 0; i < 2; i++ { - r = lx.next() - if !isHexadecimal(r) { - return lx.errorf( - `expected two hexadecimal digits after '\x', but got %q instead`, - lx.current()) - } - } - return lx.pop() -} - -func lexShortUnicodeEscape(lx *lexer) stateFn { - var r rune - for i := 0; i < 4; i++ { - r = lx.next() - if !isHexadecimal(r) { - return lx.errorf( - `expected four hexadecimal digits after '\u', but got %q instead`, - lx.current()) - } - } - return lx.pop() -} - -func lexLongUnicodeEscape(lx *lexer) stateFn { - var r rune - for i := 0; i < 8; i++ { - r = lx.next() - if !isHexadecimal(r) { - return lx.errorf( - `expected eight hexadecimal digits after '\U', but got %q instead`, - lx.current()) - } - } - return lx.pop() -} - -// lexNumberOrDateStart processes the first character of a value which begins -// with a digit. It exists to catch values starting with '0', so that -// lexBaseNumberOrDate can differentiate base prefixed integers from other -// types. -func lexNumberOrDateStart(lx *lexer) stateFn { - r := lx.next() - switch r { - case '0': - return lexBaseNumberOrDate - } - - if !isDigit(r) { - // The only way to reach this state is if the value starts - // with a digit, so specifically treat anything else as an - // error. - return lx.errorf("expected a digit but got %q", r) - } - - return lexNumberOrDate -} - -// lexNumberOrDate consumes either an integer, float or datetime. -func lexNumberOrDate(lx *lexer) stateFn { - r := lx.next() - if isDigit(r) { - return lexNumberOrDate - } - switch r { - case '-', ':': - return lexDatetime - case '_': - return lexDecimalNumber - case '.', 'e', 'E': - return lexFloat - } - - lx.backup() - lx.emit(itemInteger) - return lx.pop() -} - -// lexDatetime consumes a Datetime, to a first approximation. -// The parser validates that it matches one of the accepted formats. -func lexDatetime(lx *lexer) stateFn { - r := lx.next() - if isDigit(r) { - return lexDatetime - } - switch r { - case '-', ':', 'T', 't', ' ', '.', 'Z', 'z', '+': - return lexDatetime - } - - lx.backup() - lx.emitTrim(itemDatetime) - return lx.pop() -} - -// lexHexInteger consumes a hexadecimal integer after seeing the '0x' prefix. -func lexHexInteger(lx *lexer) stateFn { - r := lx.next() - if isHexadecimal(r) { - return lexHexInteger - } - switch r { - case '_': - return lexHexInteger - } - - lx.backup() - lx.emit(itemInteger) - return lx.pop() -} - -// lexOctalInteger consumes an octal integer after seeing the '0o' prefix. -func lexOctalInteger(lx *lexer) stateFn { - r := lx.next() - if isOctal(r) { - return lexOctalInteger - } - switch r { - case '_': - return lexOctalInteger - } - - lx.backup() - lx.emit(itemInteger) - return lx.pop() -} - -// lexBinaryInteger consumes a binary integer after seeing the '0b' prefix. -func lexBinaryInteger(lx *lexer) stateFn { - r := lx.next() - if isBinary(r) { - return lexBinaryInteger - } - switch r { - case '_': - return lexBinaryInteger - } - - lx.backup() - lx.emit(itemInteger) - return lx.pop() -} - -// lexDecimalNumber consumes a decimal float or integer. -func lexDecimalNumber(lx *lexer) stateFn { - r := lx.next() - if isDigit(r) { - return lexDecimalNumber - } - switch r { - case '.', 'e', 'E': - return lexFloat - case '_': - return lexDecimalNumber - } - - lx.backup() - lx.emit(itemInteger) - return lx.pop() -} - -// lexDecimalNumber consumes the first digit of a number beginning with a sign. -// It assumes the sign has already been consumed. Values which start with a sign -// are only allowed to be decimal integers or floats. -// -// The special "nan" and "inf" values are also recognized. -func lexDecimalNumberStart(lx *lexer) stateFn { - r := lx.next() - - // Special error cases to give users better error messages - switch r { - case 'i': - if !lx.accept('n') || !lx.accept('f') { - return lx.errorf("invalid float: '%s'", lx.current()) - } - lx.emit(itemFloat) - return lx.pop() - case 'n': - if !lx.accept('a') || !lx.accept('n') { - return lx.errorf("invalid float: '%s'", lx.current()) - } - lx.emit(itemFloat) - return lx.pop() - case '0': - p := lx.peek() - switch p { - case 'b', 'o', 'x': - return lx.errorf("cannot use sign with non-decimal numbers: '%s%c'", lx.current(), p) - } - case '.': - return lx.errorf("floats must start with a digit, not '.'") - } - - if isDigit(r) { - return lexDecimalNumber - } - - return lx.errorf("expected a digit but got %q", r) -} - -// lexBaseNumberOrDate differentiates between the possible values which -// start with '0'. It assumes that before reaching this state, the initial '0' -// has been consumed. -func lexBaseNumberOrDate(lx *lexer) stateFn { - r := lx.next() - // Note: All datetimes start with at least two digits, so we don't - // handle date characters (':', '-', etc.) here. - if isDigit(r) { - return lexNumberOrDate - } - switch r { - case '_': - // Can only be decimal, because there can't be an underscore - // between the '0' and the base designator, and dates can't - // contain underscores. - return lexDecimalNumber - case '.', 'e', 'E': - return lexFloat - case 'b': - r = lx.peek() - if !isBinary(r) { - lx.errorf("not a binary number: '%s%c'", lx.current(), r) - } - return lexBinaryInteger - case 'o': - r = lx.peek() - if !isOctal(r) { - lx.errorf("not an octal number: '%s%c'", lx.current(), r) - } - return lexOctalInteger - case 'x': - r = lx.peek() - if !isHexadecimal(r) { - lx.errorf("not a hexidecimal number: '%s%c'", lx.current(), r) - } - return lexHexInteger - } - - lx.backup() - lx.emit(itemInteger) - return lx.pop() -} - -// lexFloat consumes the elements of a float. It allows any sequence of -// float-like characters, so floats emitted by the lexer are only a first -// approximation and must be validated by the parser. -func lexFloat(lx *lexer) stateFn { - r := lx.next() - if isDigit(r) { - return lexFloat - } - switch r { - case '_', '.', '-', '+', 'e', 'E': - return lexFloat - } - - lx.backup() - lx.emit(itemFloat) - return lx.pop() -} - -// lexBool consumes a bool string: 'true' or 'false. -func lexBool(lx *lexer) stateFn { - var rs []rune - for { - r := lx.next() - if !unicode.IsLetter(r) { - lx.backup() - break - } - rs = append(rs, r) - } - s := string(rs) - switch s { - case "true", "false": - lx.emit(itemBool) - return lx.pop() - } - return lx.errorf("expected value but found %q instead", s) -} - -// lexCommentStart begins the lexing of a comment. It will emit -// itemCommentStart and consume no characters, passing control to lexComment. -func lexCommentStart(lx *lexer) stateFn { - lx.ignore() - lx.emit(itemCommentStart) - return lexComment -} - -// lexComment lexes an entire comment. It assumes that '#' has been consumed. -// It will consume *up to* the first newline character, and pass control -// back to the last state on the stack. -func lexComment(lx *lexer) stateFn { - switch r := lx.next(); { - case isNL(r) || r == eof: - lx.backup() - lx.emit(itemText) - return lx.pop() - default: - return lexComment - } -} - -// lexSkip ignores all slurped input and moves on to the next state. -func lexSkip(lx *lexer, nextState stateFn) stateFn { - lx.ignore() - return nextState -} - -func (s stateFn) String() string { - name := runtime.FuncForPC(reflect.ValueOf(s).Pointer()).Name() - if i := strings.LastIndexByte(name, '.'); i > -1 { - name = name[i+1:] - } - if s == nil { - name = "" - } - return name + "()" -} - -func (itype itemType) String() string { - switch itype { - case itemError: - return "Error" - case itemNIL: - return "NIL" - case itemEOF: - return "EOF" - case itemText: - return "Text" - case itemString, itemRawString, itemMultilineString, itemRawMultilineString: - return "String" - case itemBool: - return "Bool" - case itemInteger: - return "Integer" - case itemFloat: - return "Float" - case itemDatetime: - return "DateTime" - case itemTableStart: - return "TableStart" - case itemTableEnd: - return "TableEnd" - case itemKeyStart: - return "KeyStart" - case itemKeyEnd: - return "KeyEnd" - case itemArray: - return "Array" - case itemArrayEnd: - return "ArrayEnd" - case itemCommentStart: - return "CommentStart" - case itemInlineTableStart: - return "InlineTableStart" - case itemInlineTableEnd: - return "InlineTableEnd" - } - panic(fmt.Sprintf("BUG: Unknown type '%d'.", int(itype))) -} - -func (item item) String() string { - return fmt.Sprintf("(%s, %s)", item.typ.String(), item.val) -} - -func isWhitespace(r rune) bool { return r == '\t' || r == ' ' } -func isNL(r rune) bool { return r == '\n' || r == '\r' } -func isControl(r rune) bool { // Control characters except \t, \r, \n - switch r { - case '\t', '\r', '\n': - return false - default: - return (r >= 0x00 && r <= 0x1f) || r == 0x7f - } -} -func isDigit(r rune) bool { return r >= '0' && r <= '9' } -func isBinary(r rune) bool { return r == '0' || r == '1' } -func isOctal(r rune) bool { return r >= '0' && r <= '7' } -func isHexadecimal(r rune) bool { - return (r >= '0' && r <= '9') || (r >= 'a' && r <= 'f') || (r >= 'A' && r <= 'F') -} - -func isBareKeyChar(r rune, tomlNext bool) bool { - if tomlNext { - return (r >= 'A' && r <= 'Z') || - (r >= 'a' && r <= 'z') || - (r >= '0' && r <= '9') || - r == '_' || r == '-' || - r == 0xb2 || r == 0xb3 || r == 0xb9 || (r >= 0xbc && r <= 0xbe) || - (r >= 0xc0 && r <= 0xd6) || (r >= 0xd8 && r <= 0xf6) || (r >= 0xf8 && r <= 0x037d) || - (r >= 0x037f && r <= 0x1fff) || - (r >= 0x200c && r <= 0x200d) || (r >= 0x203f && r <= 0x2040) || - (r >= 0x2070 && r <= 0x218f) || (r >= 0x2460 && r <= 0x24ff) || - (r >= 0x2c00 && r <= 0x2fef) || (r >= 0x3001 && r <= 0xd7ff) || - (r >= 0xf900 && r <= 0xfdcf) || (r >= 0xfdf0 && r <= 0xfffd) || - (r >= 0x10000 && r <= 0xeffff) - } - - return (r >= 'A' && r <= 'Z') || - (r >= 'a' && r <= 'z') || - (r >= '0' && r <= '9') || - r == '_' || r == '-' -} diff --git a/vendor/github.com/BurntSushi/toml/meta.go b/vendor/github.com/BurntSushi/toml/meta.go deleted file mode 100644 index 2e78b24e9..000000000 --- a/vendor/github.com/BurntSushi/toml/meta.go +++ /dev/null @@ -1,121 +0,0 @@ -package toml - -import ( - "strings" -) - -// MetaData allows access to meta information about TOML data that's not -// accessible otherwise. -// -// It allows checking if a key is defined in the TOML data, whether any keys -// were undecoded, and the TOML type of a key. -type MetaData struct { - context Key // Used only during decoding. - - keyInfo map[string]keyInfo - mapping map[string]interface{} - keys []Key - decoded map[string]struct{} - data []byte // Input file; for errors. -} - -// IsDefined reports if the key exists in the TOML data. -// -// The key should be specified hierarchically, for example to access the TOML -// key "a.b.c" you would use IsDefined("a", "b", "c"). Keys are case sensitive. -// -// Returns false for an empty key. -func (md *MetaData) IsDefined(key ...string) bool { - if len(key) == 0 { - return false - } - - var ( - hash map[string]interface{} - ok bool - hashOrVal interface{} = md.mapping - ) - for _, k := range key { - if hash, ok = hashOrVal.(map[string]interface{}); !ok { - return false - } - if hashOrVal, ok = hash[k]; !ok { - return false - } - } - return true -} - -// Type returns a string representation of the type of the key specified. -// -// Type will return the empty string if given an empty key or a key that does -// not exist. Keys are case sensitive. -func (md *MetaData) Type(key ...string) string { - if ki, ok := md.keyInfo[Key(key).String()]; ok { - return ki.tomlType.typeString() - } - return "" -} - -// Keys returns a slice of every key in the TOML data, including key groups. -// -// Each key is itself a slice, where the first element is the top of the -// hierarchy and the last is the most specific. The list will have the same -// order as the keys appeared in the TOML data. -// -// All keys returned are non-empty. -func (md *MetaData) Keys() []Key { - return md.keys -} - -// Undecoded returns all keys that have not been decoded in the order in which -// they appear in the original TOML document. -// -// This includes keys that haven't been decoded because of a [Primitive] value. -// Once the Primitive value is decoded, the keys will be considered decoded. -// -// Also note that decoding into an empty interface will result in no decoding, -// and so no keys will be considered decoded. -// -// In this sense, the Undecoded keys correspond to keys in the TOML document -// that do not have a concrete type in your representation. -func (md *MetaData) Undecoded() []Key { - undecoded := make([]Key, 0, len(md.keys)) - for _, key := range md.keys { - if _, ok := md.decoded[key.String()]; !ok { - undecoded = append(undecoded, key) - } - } - return undecoded -} - -// Key represents any TOML key, including key groups. Use [MetaData.Keys] to get -// values of this type. -type Key []string - -func (k Key) String() string { - ss := make([]string, len(k)) - for i := range k { - ss[i] = k.maybeQuoted(i) - } - return strings.Join(ss, ".") -} - -func (k Key) maybeQuoted(i int) string { - if k[i] == "" { - return `""` - } - for _, c := range k[i] { - if !isBareKeyChar(c, false) { - return `"` + dblQuotedReplacer.Replace(k[i]) + `"` - } - } - return k[i] -} - -func (k Key) add(piece string) Key { - newKey := make(Key, len(k)+1) - copy(newKey, k) - newKey[len(k)] = piece - return newKey -} diff --git a/vendor/github.com/BurntSushi/toml/parse.go b/vendor/github.com/BurntSushi/toml/parse.go deleted file mode 100644 index 9c1915369..000000000 --- a/vendor/github.com/BurntSushi/toml/parse.go +++ /dev/null @@ -1,811 +0,0 @@ -package toml - -import ( - "fmt" - "os" - "strconv" - "strings" - "time" - "unicode/utf8" - - "github.com/BurntSushi/toml/internal" -) - -type parser struct { - lx *lexer - context Key // Full key for the current hash in scope. - currentKey string // Base key name for everything except hashes. - pos Position // Current position in the TOML file. - tomlNext bool - - ordered []Key // List of keys in the order that they appear in the TOML data. - - keyInfo map[string]keyInfo // Map keyname → info about the TOML key. - mapping map[string]interface{} // Map keyname → key value. - implicits map[string]struct{} // Record implicit keys (e.g. "key.group.names"). -} - -type keyInfo struct { - pos Position - tomlType tomlType -} - -func parse(data string) (p *parser, err error) { - _, tomlNext := os.LookupEnv("BURNTSUSHI_TOML_110") - - defer func() { - if r := recover(); r != nil { - if pErr, ok := r.(ParseError); ok { - pErr.input = data - err = pErr - return - } - panic(r) - } - }() - - // Read over BOM; do this here as the lexer calls utf8.DecodeRuneInString() - // which mangles stuff. UTF-16 BOM isn't strictly valid, but some tools add - // it anyway. - if strings.HasPrefix(data, "\xff\xfe") || strings.HasPrefix(data, "\xfe\xff") { // UTF-16 - data = data[2:] - } else if strings.HasPrefix(data, "\xef\xbb\xbf") { // UTF-8 - data = data[3:] - } - - // Examine first few bytes for NULL bytes; this probably means it's a UTF-16 - // file (second byte in surrogate pair being NULL). Again, do this here to - // avoid having to deal with UTF-8/16 stuff in the lexer. - ex := 6 - if len(data) < 6 { - ex = len(data) - } - if i := strings.IndexRune(data[:ex], 0); i > -1 { - return nil, ParseError{ - Message: "files cannot contain NULL bytes; probably using UTF-16; TOML files must be UTF-8", - Position: Position{Line: 1, Start: i, Len: 1}, - Line: 1, - input: data, - } - } - - p = &parser{ - keyInfo: make(map[string]keyInfo), - mapping: make(map[string]interface{}), - lx: lex(data, tomlNext), - ordered: make([]Key, 0), - implicits: make(map[string]struct{}), - tomlNext: tomlNext, - } - for { - item := p.next() - if item.typ == itemEOF { - break - } - p.topLevel(item) - } - - return p, nil -} - -func (p *parser) panicErr(it item, err error) { - panic(ParseError{ - err: err, - Position: it.pos, - Line: it.pos.Len, - LastKey: p.current(), - }) -} - -func (p *parser) panicItemf(it item, format string, v ...interface{}) { - panic(ParseError{ - Message: fmt.Sprintf(format, v...), - Position: it.pos, - Line: it.pos.Len, - LastKey: p.current(), - }) -} - -func (p *parser) panicf(format string, v ...interface{}) { - panic(ParseError{ - Message: fmt.Sprintf(format, v...), - Position: p.pos, - Line: p.pos.Line, - LastKey: p.current(), - }) -} - -func (p *parser) next() item { - it := p.lx.nextItem() - //fmt.Printf("ITEM %-18s line %-3d │ %q\n", it.typ, it.pos.Line, it.val) - if it.typ == itemError { - if it.err != nil { - panic(ParseError{ - Position: it.pos, - Line: it.pos.Line, - LastKey: p.current(), - err: it.err, - }) - } - - p.panicItemf(it, "%s", it.val) - } - return it -} - -func (p *parser) nextPos() item { - it := p.next() - p.pos = it.pos - return it -} - -func (p *parser) bug(format string, v ...interface{}) { - panic(fmt.Sprintf("BUG: "+format+"\n\n", v...)) -} - -func (p *parser) expect(typ itemType) item { - it := p.next() - p.assertEqual(typ, it.typ) - return it -} - -func (p *parser) assertEqual(expected, got itemType) { - if expected != got { - p.bug("Expected '%s' but got '%s'.", expected, got) - } -} - -func (p *parser) topLevel(item item) { - switch item.typ { - case itemCommentStart: // # .. - p.expect(itemText) - case itemTableStart: // [ .. ] - name := p.nextPos() - - var key Key - for ; name.typ != itemTableEnd && name.typ != itemEOF; name = p.next() { - key = append(key, p.keyString(name)) - } - p.assertEqual(itemTableEnd, name.typ) - - p.addContext(key, false) - p.setType("", tomlHash, item.pos) - p.ordered = append(p.ordered, key) - case itemArrayTableStart: // [[ .. ]] - name := p.nextPos() - - var key Key - for ; name.typ != itemArrayTableEnd && name.typ != itemEOF; name = p.next() { - key = append(key, p.keyString(name)) - } - p.assertEqual(itemArrayTableEnd, name.typ) - - p.addContext(key, true) - p.setType("", tomlArrayHash, item.pos) - p.ordered = append(p.ordered, key) - case itemKeyStart: // key = .. - outerContext := p.context - /// Read all the key parts (e.g. 'a' and 'b' in 'a.b') - k := p.nextPos() - var key Key - for ; k.typ != itemKeyEnd && k.typ != itemEOF; k = p.next() { - key = append(key, p.keyString(k)) - } - p.assertEqual(itemKeyEnd, k.typ) - - /// The current key is the last part. - p.currentKey = key[len(key)-1] - - /// All the other parts (if any) are the context; need to set each part - /// as implicit. - context := key[:len(key)-1] - for i := range context { - p.addImplicitContext(append(p.context, context[i:i+1]...)) - } - p.ordered = append(p.ordered, p.context.add(p.currentKey)) - - /// Set value. - vItem := p.next() - val, typ := p.value(vItem, false) - p.set(p.currentKey, val, typ, vItem.pos) - - /// Remove the context we added (preserving any context from [tbl] lines). - p.context = outerContext - p.currentKey = "" - default: - p.bug("Unexpected type at top level: %s", item.typ) - } -} - -// Gets a string for a key (or part of a key in a table name). -func (p *parser) keyString(it item) string { - switch it.typ { - case itemText: - return it.val - case itemString, itemMultilineString, - itemRawString, itemRawMultilineString: - s, _ := p.value(it, false) - return s.(string) - default: - p.bug("Unexpected key type: %s", it.typ) - } - panic("unreachable") -} - -var datetimeRepl = strings.NewReplacer( - "z", "Z", - "t", "T", - " ", "T") - -// value translates an expected value from the lexer into a Go value wrapped -// as an empty interface. -func (p *parser) value(it item, parentIsArray bool) (interface{}, tomlType) { - switch it.typ { - case itemString: - return p.replaceEscapes(it, it.val), p.typeOfPrimitive(it) - case itemMultilineString: - return p.replaceEscapes(it, p.stripEscapedNewlines(stripFirstNewline(it.val))), p.typeOfPrimitive(it) - case itemRawString: - return it.val, p.typeOfPrimitive(it) - case itemRawMultilineString: - return stripFirstNewline(it.val), p.typeOfPrimitive(it) - case itemInteger: - return p.valueInteger(it) - case itemFloat: - return p.valueFloat(it) - case itemBool: - switch it.val { - case "true": - return true, p.typeOfPrimitive(it) - case "false": - return false, p.typeOfPrimitive(it) - default: - p.bug("Expected boolean value, but got '%s'.", it.val) - } - case itemDatetime: - return p.valueDatetime(it) - case itemArray: - return p.valueArray(it) - case itemInlineTableStart: - return p.valueInlineTable(it, parentIsArray) - default: - p.bug("Unexpected value type: %s", it.typ) - } - panic("unreachable") -} - -func (p *parser) valueInteger(it item) (interface{}, tomlType) { - if !numUnderscoresOK(it.val) { - p.panicItemf(it, "Invalid integer %q: underscores must be surrounded by digits", it.val) - } - if numHasLeadingZero(it.val) { - p.panicItemf(it, "Invalid integer %q: cannot have leading zeroes", it.val) - } - - num, err := strconv.ParseInt(it.val, 0, 64) - if err != nil { - // Distinguish integer values. Normally, it'd be a bug if the lexer - // provides an invalid integer, but it's possible that the number is - // out of range of valid values (which the lexer cannot determine). - // So mark the former as a bug but the latter as a legitimate user - // error. - if e, ok := err.(*strconv.NumError); ok && e.Err == strconv.ErrRange { - p.panicErr(it, errParseRange{i: it.val, size: "int64"}) - } else { - p.bug("Expected integer value, but got '%s'.", it.val) - } - } - return num, p.typeOfPrimitive(it) -} - -func (p *parser) valueFloat(it item) (interface{}, tomlType) { - parts := strings.FieldsFunc(it.val, func(r rune) bool { - switch r { - case '.', 'e', 'E': - return true - } - return false - }) - for _, part := range parts { - if !numUnderscoresOK(part) { - p.panicItemf(it, "Invalid float %q: underscores must be surrounded by digits", it.val) - } - } - if len(parts) > 0 && numHasLeadingZero(parts[0]) { - p.panicItemf(it, "Invalid float %q: cannot have leading zeroes", it.val) - } - if !numPeriodsOK(it.val) { - // As a special case, numbers like '123.' or '1.e2', - // which are valid as far as Go/strconv are concerned, - // must be rejected because TOML says that a fractional - // part consists of '.' followed by 1+ digits. - p.panicItemf(it, "Invalid float %q: '.' must be followed by one or more digits", it.val) - } - val := strings.Replace(it.val, "_", "", -1) - if val == "+nan" || val == "-nan" { // Go doesn't support this, but TOML spec does. - val = "nan" - } - num, err := strconv.ParseFloat(val, 64) - if err != nil { - if e, ok := err.(*strconv.NumError); ok && e.Err == strconv.ErrRange { - p.panicErr(it, errParseRange{i: it.val, size: "float64"}) - } else { - p.panicItemf(it, "Invalid float value: %q", it.val) - } - } - return num, p.typeOfPrimitive(it) -} - -var dtTypes = []struct { - fmt string - zone *time.Location - next bool -}{ - {time.RFC3339Nano, time.Local, false}, - {"2006-01-02T15:04:05.999999999", internal.LocalDatetime, false}, - {"2006-01-02", internal.LocalDate, false}, - {"15:04:05.999999999", internal.LocalTime, false}, - - // tomlNext - {"2006-01-02T15:04Z07:00", time.Local, true}, - {"2006-01-02T15:04", internal.LocalDatetime, true}, - {"15:04", internal.LocalTime, true}, -} - -func (p *parser) valueDatetime(it item) (interface{}, tomlType) { - it.val = datetimeRepl.Replace(it.val) - var ( - t time.Time - ok bool - err error - ) - for _, dt := range dtTypes { - if dt.next && !p.tomlNext { - continue - } - t, err = time.ParseInLocation(dt.fmt, it.val, dt.zone) - if err == nil { - ok = true - break - } - } - if !ok { - p.panicItemf(it, "Invalid TOML Datetime: %q.", it.val) - } - return t, p.typeOfPrimitive(it) -} - -func (p *parser) valueArray(it item) (interface{}, tomlType) { - p.setType(p.currentKey, tomlArray, it.pos) - - var ( - types []tomlType - - // Initialize to a non-nil empty slice. This makes it consistent with - // how S = [] decodes into a non-nil slice inside something like struct - // { S []string }. See #338 - array = []interface{}{} - ) - for it = p.next(); it.typ != itemArrayEnd; it = p.next() { - if it.typ == itemCommentStart { - p.expect(itemText) - continue - } - - val, typ := p.value(it, true) - array = append(array, val) - types = append(types, typ) - - // XXX: types isn't used here, we need it to record the accurate type - // information. - // - // Not entirely sure how to best store this; could use "key[0]", - // "key[1]" notation, or maybe store it on the Array type? - _ = types - } - return array, tomlArray -} - -func (p *parser) valueInlineTable(it item, parentIsArray bool) (interface{}, tomlType) { - var ( - hash = make(map[string]interface{}) - outerContext = p.context - outerKey = p.currentKey - ) - - p.context = append(p.context, p.currentKey) - prevContext := p.context - p.currentKey = "" - - p.addImplicit(p.context) - p.addContext(p.context, parentIsArray) - - /// Loop over all table key/value pairs. - for it := p.next(); it.typ != itemInlineTableEnd; it = p.next() { - if it.typ == itemCommentStart { - p.expect(itemText) - continue - } - - /// Read all key parts. - k := p.nextPos() - var key Key - for ; k.typ != itemKeyEnd && k.typ != itemEOF; k = p.next() { - key = append(key, p.keyString(k)) - } - p.assertEqual(itemKeyEnd, k.typ) - - /// The current key is the last part. - p.currentKey = key[len(key)-1] - - /// All the other parts (if any) are the context; need to set each part - /// as implicit. - context := key[:len(key)-1] - for i := range context { - p.addImplicitContext(append(p.context, context[i:i+1]...)) - } - p.ordered = append(p.ordered, p.context.add(p.currentKey)) - - /// Set the value. - val, typ := p.value(p.next(), false) - p.set(p.currentKey, val, typ, it.pos) - hash[p.currentKey] = val - - /// Restore context. - p.context = prevContext - } - p.context = outerContext - p.currentKey = outerKey - return hash, tomlHash -} - -// numHasLeadingZero checks if this number has leading zeroes, allowing for '0', -// +/- signs, and base prefixes. -func numHasLeadingZero(s string) bool { - if len(s) > 1 && s[0] == '0' && !(s[1] == 'b' || s[1] == 'o' || s[1] == 'x') { // Allow 0b, 0o, 0x - return true - } - if len(s) > 2 && (s[0] == '-' || s[0] == '+') && s[1] == '0' { - return true - } - return false -} - -// numUnderscoresOK checks whether each underscore in s is surrounded by -// characters that are not underscores. -func numUnderscoresOK(s string) bool { - switch s { - case "nan", "+nan", "-nan", "inf", "-inf", "+inf": - return true - } - accept := false - for _, r := range s { - if r == '_' { - if !accept { - return false - } - } - - // isHexadecimal is a superset of all the permissable characters - // surrounding an underscore. - accept = isHexadecimal(r) - } - return accept -} - -// numPeriodsOK checks whether every period in s is followed by a digit. -func numPeriodsOK(s string) bool { - period := false - for _, r := range s { - if period && !isDigit(r) { - return false - } - period = r == '.' - } - return !period -} - -// Set the current context of the parser, where the context is either a hash or -// an array of hashes, depending on the value of the `array` parameter. -// -// Establishing the context also makes sure that the key isn't a duplicate, and -// will create implicit hashes automatically. -func (p *parser) addContext(key Key, array bool) { - var ok bool - - // Always start at the top level and drill down for our context. - hashContext := p.mapping - keyContext := make(Key, 0) - - // We only need implicit hashes for key[0:-1] - for _, k := range key[0 : len(key)-1] { - _, ok = hashContext[k] - keyContext = append(keyContext, k) - - // No key? Make an implicit hash and move on. - if !ok { - p.addImplicit(keyContext) - hashContext[k] = make(map[string]interface{}) - } - - // If the hash context is actually an array of tables, then set - // the hash context to the last element in that array. - // - // Otherwise, it better be a table, since this MUST be a key group (by - // virtue of it not being the last element in a key). - switch t := hashContext[k].(type) { - case []map[string]interface{}: - hashContext = t[len(t)-1] - case map[string]interface{}: - hashContext = t - default: - p.panicf("Key '%s' was already created as a hash.", keyContext) - } - } - - p.context = keyContext - if array { - // If this is the first element for this array, then allocate a new - // list of tables for it. - k := key[len(key)-1] - if _, ok := hashContext[k]; !ok { - hashContext[k] = make([]map[string]interface{}, 0, 4) - } - - // Add a new table. But make sure the key hasn't already been used - // for something else. - if hash, ok := hashContext[k].([]map[string]interface{}); ok { - hashContext[k] = append(hash, make(map[string]interface{})) - } else { - p.panicf("Key '%s' was already created and cannot be used as an array.", key) - } - } else { - p.setValue(key[len(key)-1], make(map[string]interface{})) - } - p.context = append(p.context, key[len(key)-1]) -} - -// set calls setValue and setType. -func (p *parser) set(key string, val interface{}, typ tomlType, pos Position) { - p.setValue(key, val) - p.setType(key, typ, pos) -} - -// setValue sets the given key to the given value in the current context. -// It will make sure that the key hasn't already been defined, account for -// implicit key groups. -func (p *parser) setValue(key string, value interface{}) { - var ( - tmpHash interface{} - ok bool - hash = p.mapping - keyContext Key - ) - for _, k := range p.context { - keyContext = append(keyContext, k) - if tmpHash, ok = hash[k]; !ok { - p.bug("Context for key '%s' has not been established.", keyContext) - } - switch t := tmpHash.(type) { - case []map[string]interface{}: - // The context is a table of hashes. Pick the most recent table - // defined as the current hash. - hash = t[len(t)-1] - case map[string]interface{}: - hash = t - default: - p.panicf("Key '%s' has already been defined.", keyContext) - } - } - keyContext = append(keyContext, key) - - if _, ok := hash[key]; ok { - // Normally redefining keys isn't allowed, but the key could have been - // defined implicitly and it's allowed to be redefined concretely. (See - // the `valid/implicit-and-explicit-after.toml` in toml-test) - // - // But we have to make sure to stop marking it as an implicit. (So that - // another redefinition provokes an error.) - // - // Note that since it has already been defined (as a hash), we don't - // want to overwrite it. So our business is done. - if p.isArray(keyContext) { - p.removeImplicit(keyContext) - hash[key] = value - return - } - if p.isImplicit(keyContext) { - p.removeImplicit(keyContext) - return - } - - // Otherwise, we have a concrete key trying to override a previous - // key, which is *always* wrong. - p.panicf("Key '%s' has already been defined.", keyContext) - } - - hash[key] = value -} - -// setType sets the type of a particular value at a given key. It should be -// called immediately AFTER setValue. -// -// Note that if `key` is empty, then the type given will be applied to the -// current context (which is either a table or an array of tables). -func (p *parser) setType(key string, typ tomlType, pos Position) { - keyContext := make(Key, 0, len(p.context)+1) - keyContext = append(keyContext, p.context...) - if len(key) > 0 { // allow type setting for hashes - keyContext = append(keyContext, key) - } - // Special case to make empty keys ("" = 1) work. - // Without it it will set "" rather than `""`. - // TODO: why is this needed? And why is this only needed here? - if len(keyContext) == 0 { - keyContext = Key{""} - } - p.keyInfo[keyContext.String()] = keyInfo{tomlType: typ, pos: pos} -} - -// Implicit keys need to be created when tables are implied in "a.b.c.d = 1" and -// "[a.b.c]" (the "a", "b", and "c" hashes are never created explicitly). -func (p *parser) addImplicit(key Key) { p.implicits[key.String()] = struct{}{} } -func (p *parser) removeImplicit(key Key) { delete(p.implicits, key.String()) } -func (p *parser) isImplicit(key Key) bool { _, ok := p.implicits[key.String()]; return ok } -func (p *parser) isArray(key Key) bool { return p.keyInfo[key.String()].tomlType == tomlArray } -func (p *parser) addImplicitContext(key Key) { p.addImplicit(key); p.addContext(key, false) } - -// current returns the full key name of the current context. -func (p *parser) current() string { - if len(p.currentKey) == 0 { - return p.context.String() - } - if len(p.context) == 0 { - return p.currentKey - } - return fmt.Sprintf("%s.%s", p.context, p.currentKey) -} - -func stripFirstNewline(s string) string { - if len(s) > 0 && s[0] == '\n' { - return s[1:] - } - if len(s) > 1 && s[0] == '\r' && s[1] == '\n' { - return s[2:] - } - return s -} - -// stripEscapedNewlines removes whitespace after line-ending backslashes in -// multiline strings. -// -// A line-ending backslash is an unescaped \ followed only by whitespace until -// the next newline. After a line-ending backslash, all whitespace is removed -// until the next non-whitespace character. -func (p *parser) stripEscapedNewlines(s string) string { - var b strings.Builder - var i int - for { - ix := strings.Index(s[i:], `\`) - if ix < 0 { - b.WriteString(s) - return b.String() - } - i += ix - - if len(s) > i+1 && s[i+1] == '\\' { - // Escaped backslash. - i += 2 - continue - } - // Scan until the next non-whitespace. - j := i + 1 - whitespaceLoop: - for ; j < len(s); j++ { - switch s[j] { - case ' ', '\t', '\r', '\n': - default: - break whitespaceLoop - } - } - if j == i+1 { - // Not a whitespace escape. - i++ - continue - } - if !strings.Contains(s[i:j], "\n") { - // This is not a line-ending backslash. - // (It's a bad escape sequence, but we can let - // replaceEscapes catch it.) - i++ - continue - } - b.WriteString(s[:i]) - s = s[j:] - i = 0 - } -} - -func (p *parser) replaceEscapes(it item, str string) string { - replaced := make([]rune, 0, len(str)) - s := []byte(str) - r := 0 - for r < len(s) { - if s[r] != '\\' { - c, size := utf8.DecodeRune(s[r:]) - r += size - replaced = append(replaced, c) - continue - } - r += 1 - if r >= len(s) { - p.bug("Escape sequence at end of string.") - return "" - } - switch s[r] { - default: - p.bug("Expected valid escape code after \\, but got %q.", s[r]) - case ' ', '\t': - p.panicItemf(it, "invalid escape: '\\%c'", s[r]) - case 'b': - replaced = append(replaced, rune(0x0008)) - r += 1 - case 't': - replaced = append(replaced, rune(0x0009)) - r += 1 - case 'n': - replaced = append(replaced, rune(0x000A)) - r += 1 - case 'f': - replaced = append(replaced, rune(0x000C)) - r += 1 - case 'r': - replaced = append(replaced, rune(0x000D)) - r += 1 - case 'e': - if p.tomlNext { - replaced = append(replaced, rune(0x001B)) - r += 1 - } - case '"': - replaced = append(replaced, rune(0x0022)) - r += 1 - case '\\': - replaced = append(replaced, rune(0x005C)) - r += 1 - case 'x': - if p.tomlNext { - escaped := p.asciiEscapeToUnicode(it, s[r+1:r+3]) - replaced = append(replaced, escaped) - r += 3 - } - case 'u': - // At this point, we know we have a Unicode escape of the form - // `uXXXX` at [r, r+5). (Because the lexer guarantees this - // for us.) - escaped := p.asciiEscapeToUnicode(it, s[r+1:r+5]) - replaced = append(replaced, escaped) - r += 5 - case 'U': - // At this point, we know we have a Unicode escape of the form - // `uXXXX` at [r, r+9). (Because the lexer guarantees this - // for us.) - escaped := p.asciiEscapeToUnicode(it, s[r+1:r+9]) - replaced = append(replaced, escaped) - r += 9 - } - } - return string(replaced) -} - -func (p *parser) asciiEscapeToUnicode(it item, bs []byte) rune { - s := string(bs) - hex, err := strconv.ParseUint(strings.ToLower(s), 16, 32) - if err != nil { - p.bug("Could not parse '%s' as a hexadecimal number, but the lexer claims it's OK: %s", s, err) - } - if !utf8.ValidRune(rune(hex)) { - p.panicItemf(it, "Escaped character '\\u%s' is not valid UTF-8.", s) - } - return rune(hex) -} diff --git a/vendor/github.com/BurntSushi/toml/type_fields.go b/vendor/github.com/BurntSushi/toml/type_fields.go deleted file mode 100644 index 254ca82e5..000000000 --- a/vendor/github.com/BurntSushi/toml/type_fields.go +++ /dev/null @@ -1,242 +0,0 @@ -package toml - -// Struct field handling is adapted from code in encoding/json: -// -// Copyright 2010 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the Go distribution. - -import ( - "reflect" - "sort" - "sync" -) - -// A field represents a single field found in a struct. -type field struct { - name string // the name of the field (`toml` tag included) - tag bool // whether field has a `toml` tag - index []int // represents the depth of an anonymous field - typ reflect.Type // the type of the field -} - -// byName sorts field by name, breaking ties with depth, -// then breaking ties with "name came from toml tag", then -// breaking ties with index sequence. -type byName []field - -func (x byName) Len() int { return len(x) } - -func (x byName) Swap(i, j int) { x[i], x[j] = x[j], x[i] } - -func (x byName) Less(i, j int) bool { - if x[i].name != x[j].name { - return x[i].name < x[j].name - } - if len(x[i].index) != len(x[j].index) { - return len(x[i].index) < len(x[j].index) - } - if x[i].tag != x[j].tag { - return x[i].tag - } - return byIndex(x).Less(i, j) -} - -// byIndex sorts field by index sequence. -type byIndex []field - -func (x byIndex) Len() int { return len(x) } - -func (x byIndex) Swap(i, j int) { x[i], x[j] = x[j], x[i] } - -func (x byIndex) Less(i, j int) bool { - for k, xik := range x[i].index { - if k >= len(x[j].index) { - return false - } - if xik != x[j].index[k] { - return xik < x[j].index[k] - } - } - return len(x[i].index) < len(x[j].index) -} - -// typeFields returns a list of fields that TOML should recognize for the given -// type. The algorithm is breadth-first search over the set of structs to -// include - the top struct and then any reachable anonymous structs. -func typeFields(t reflect.Type) []field { - // Anonymous fields to explore at the current level and the next. - current := []field{} - next := []field{{typ: t}} - - // Count of queued names for current level and the next. - var count map[reflect.Type]int - var nextCount map[reflect.Type]int - - // Types already visited at an earlier level. - visited := map[reflect.Type]bool{} - - // Fields found. - var fields []field - - for len(next) > 0 { - current, next = next, current[:0] - count, nextCount = nextCount, map[reflect.Type]int{} - - for _, f := range current { - if visited[f.typ] { - continue - } - visited[f.typ] = true - - // Scan f.typ for fields to include. - for i := 0; i < f.typ.NumField(); i++ { - sf := f.typ.Field(i) - if sf.PkgPath != "" && !sf.Anonymous { // unexported - continue - } - opts := getOptions(sf.Tag) - if opts.skip { - continue - } - index := make([]int, len(f.index)+1) - copy(index, f.index) - index[len(f.index)] = i - - ft := sf.Type - if ft.Name() == "" && ft.Kind() == reflect.Ptr { - // Follow pointer. - ft = ft.Elem() - } - - // Record found field and index sequence. - if opts.name != "" || !sf.Anonymous || ft.Kind() != reflect.Struct { - tagged := opts.name != "" - name := opts.name - if name == "" { - name = sf.Name - } - fields = append(fields, field{name, tagged, index, ft}) - if count[f.typ] > 1 { - // If there were multiple instances, add a second, - // so that the annihilation code will see a duplicate. - // It only cares about the distinction between 1 or 2, - // so don't bother generating any more copies. - fields = append(fields, fields[len(fields)-1]) - } - continue - } - - // Record new anonymous struct to explore in next round. - nextCount[ft]++ - if nextCount[ft] == 1 { - f := field{name: ft.Name(), index: index, typ: ft} - next = append(next, f) - } - } - } - } - - sort.Sort(byName(fields)) - - // Delete all fields that are hidden by the Go rules for embedded fields, - // except that fields with TOML tags are promoted. - - // The fields are sorted in primary order of name, secondary order - // of field index length. Loop over names; for each name, delete - // hidden fields by choosing the one dominant field that survives. - out := fields[:0] - for advance, i := 0, 0; i < len(fields); i += advance { - // One iteration per name. - // Find the sequence of fields with the name of this first field. - fi := fields[i] - name := fi.name - for advance = 1; i+advance < len(fields); advance++ { - fj := fields[i+advance] - if fj.name != name { - break - } - } - if advance == 1 { // Only one field with this name - out = append(out, fi) - continue - } - dominant, ok := dominantField(fields[i : i+advance]) - if ok { - out = append(out, dominant) - } - } - - fields = out - sort.Sort(byIndex(fields)) - - return fields -} - -// dominantField looks through the fields, all of which are known to -// have the same name, to find the single field that dominates the -// others using Go's embedding rules, modified by the presence of -// TOML tags. If there are multiple top-level fields, the boolean -// will be false: This condition is an error in Go and we skip all -// the fields. -func dominantField(fields []field) (field, bool) { - // The fields are sorted in increasing index-length order. The winner - // must therefore be one with the shortest index length. Drop all - // longer entries, which is easy: just truncate the slice. - length := len(fields[0].index) - tagged := -1 // Index of first tagged field. - for i, f := range fields { - if len(f.index) > length { - fields = fields[:i] - break - } - if f.tag { - if tagged >= 0 { - // Multiple tagged fields at the same level: conflict. - // Return no field. - return field{}, false - } - tagged = i - } - } - if tagged >= 0 { - return fields[tagged], true - } - // All remaining fields have the same length. If there's more than one, - // we have a conflict (two fields named "X" at the same level) and we - // return no field. - if len(fields) > 1 { - return field{}, false - } - return fields[0], true -} - -var fieldCache struct { - sync.RWMutex - m map[reflect.Type][]field -} - -// cachedTypeFields is like typeFields but uses a cache to avoid repeated work. -func cachedTypeFields(t reflect.Type) []field { - fieldCache.RLock() - f := fieldCache.m[t] - fieldCache.RUnlock() - if f != nil { - return f - } - - // Compute fields without lock. - // Might duplicate effort but won't hold other computations back. - f = typeFields(t) - if f == nil { - f = []field{} - } - - fieldCache.Lock() - if fieldCache.m == nil { - fieldCache.m = map[reflect.Type][]field{} - } - fieldCache.m[t] = f - fieldCache.Unlock() - return f -} diff --git a/vendor/github.com/BurntSushi/toml/type_toml.go b/vendor/github.com/BurntSushi/toml/type_toml.go deleted file mode 100644 index 4e90d7737..000000000 --- a/vendor/github.com/BurntSushi/toml/type_toml.go +++ /dev/null @@ -1,70 +0,0 @@ -package toml - -// tomlType represents any Go type that corresponds to a TOML type. -// While the first draft of the TOML spec has a simplistic type system that -// probably doesn't need this level of sophistication, we seem to be militating -// toward adding real composite types. -type tomlType interface { - typeString() string -} - -// typeEqual accepts any two types and returns true if they are equal. -func typeEqual(t1, t2 tomlType) bool { - if t1 == nil || t2 == nil { - return false - } - return t1.typeString() == t2.typeString() -} - -func typeIsTable(t tomlType) bool { - return typeEqual(t, tomlHash) || typeEqual(t, tomlArrayHash) -} - -type tomlBaseType string - -func (btype tomlBaseType) typeString() string { - return string(btype) -} - -func (btype tomlBaseType) String() string { - return btype.typeString() -} - -var ( - tomlInteger tomlBaseType = "Integer" - tomlFloat tomlBaseType = "Float" - tomlDatetime tomlBaseType = "Datetime" - tomlString tomlBaseType = "String" - tomlBool tomlBaseType = "Bool" - tomlArray tomlBaseType = "Array" - tomlHash tomlBaseType = "Hash" - tomlArrayHash tomlBaseType = "ArrayHash" -) - -// typeOfPrimitive returns a tomlType of any primitive value in TOML. -// Primitive values are: Integer, Float, Datetime, String and Bool. -// -// Passing a lexer item other than the following will cause a BUG message -// to occur: itemString, itemBool, itemInteger, itemFloat, itemDatetime. -func (p *parser) typeOfPrimitive(lexItem item) tomlType { - switch lexItem.typ { - case itemInteger: - return tomlInteger - case itemFloat: - return tomlFloat - case itemDatetime: - return tomlDatetime - case itemString: - return tomlString - case itemMultilineString: - return tomlString - case itemRawString: - return tomlString - case itemRawMultilineString: - return tomlString - case itemBool: - return tomlBool - } - p.bug("Cannot infer primitive type of lex item '%s'.", lexItem) - panic("unreachable") -} diff --git a/vendor/github.com/Djarvur/go-err113/.gitignore b/vendor/github.com/Djarvur/go-err113/.gitignore deleted file mode 100644 index 66fd13c90..000000000 --- a/vendor/github.com/Djarvur/go-err113/.gitignore +++ /dev/null @@ -1,15 +0,0 @@ -# Binaries for programs and plugins -*.exe -*.exe~ -*.dll -*.so -*.dylib - -# Test binary, built with `go test -c` -*.test - -# Output of the go coverage tool, specifically when used with LiteIDE -*.out - -# Dependency directories (remove the comment below to include it) -# vendor/ diff --git a/vendor/github.com/Djarvur/go-err113/.golangci.yml b/vendor/github.com/Djarvur/go-err113/.golangci.yml deleted file mode 100644 index 2abdfc639..000000000 --- a/vendor/github.com/Djarvur/go-err113/.golangci.yml +++ /dev/null @@ -1,150 +0,0 @@ -# This file contains all available configuration options -# with their default values. - -# options for analysis running -run: - # default concurrency is a available CPU number - concurrency: 4 - - # timeout for analysis, e.g. 30s, 5m, default is 1m - deadline: 15m - - # exit code when at least one issue was found, default is 1 - issues-exit-code: 1 - - # include test files or not, default is true - tests: false - - # list of build tags, all linters use it. Default is empty list. - #build-tags: - # - mytag - - # which dirs to skip: they won't be analyzed; - # can use regexp here: generated.*, regexp is applied on full path; - # default value is empty list, but next dirs are always skipped independently - # from this option's value: - # vendor$, third_party$, testdata$, examples$, Godeps$, builtin$ - skip-dirs: - - /gen$ - - # which files to skip: they will be analyzed, but issues from them - # won't be reported. Default value is empty list, but there is - # no need to include all autogenerated files, we confidently recognize - # autogenerated files. If it's not please let us know. - skip-files: - - ".*\\.my\\.go$" - - lib/bad.go - - ".*\\.template\\.go$" - -# output configuration options -output: - # colored-line-number|line-number|json|tab|checkstyle, default is "colored-line-number" - format: colored-line-number - - # print lines of code with issue, default is true - print-issued-lines: true - - # print linter name in the end of issue text, default is true - print-linter-name: true - -# all available settings of specific linters -linters-settings: - errcheck: - # report about not checking of errors in type assetions: `a := b.(MyStruct)`; - # default is false: such cases aren't reported by default. - check-type-assertions: false - - # report about assignment of errors to blank identifier: `num, _ := strconv.Atoi(numStr)`; - # default is false: such cases aren't reported by default. - check-blank: false - govet: - # report about shadowed variables - check-shadowing: true - - # Obtain type information from installed (to $GOPATH/pkg) package files: - # golangci-lint will execute `go install -i` and `go test -i` for analyzed packages - # before analyzing them. - # By default this option is disabled and govet gets type information by loader from source code. - # Loading from source code is slow, but it's done only once for all linters. - # Go-installing of packages first time is much slower than loading them from source code, - # therefore this option is disabled by default. - # But repeated installation is fast in go >= 1.10 because of build caching. - # Enable this option only if all conditions are met: - # 1. you use only "fast" linters (--fast e.g.): no program loading occurs - # 2. you use go >= 1.10 - # 3. you do repeated runs (false for CI) or cache $GOPATH/pkg or `go env GOCACHE` dir in CI. - use-installed-packages: false - golint: - # minimal confidence for issues, default is 0.8 - min-confidence: 0.8 - gofmt: - # simplify code: gofmt with `-s` option, true by default - simplify: true - gocyclo: - # minimal code complexity to report, 30 by default (but we recommend 10-20) - min-complexity: 10 - maligned: - # print struct with more effective memory layout or not, false by default - suggest-new: true - dupl: - # tokens count to trigger issue, 150 by default - threshold: 100 - goconst: - # minimal length of string constant, 3 by default - min-len: 3 - # minimal occurrences count to trigger, 3 by default - min-occurrences: 3 - depguard: - list-type: blacklist - include-go-root: false - packages: - - github.com/davecgh/go-spew/spew - -linters: - #enable: - # - staticcheck - # - unused - # - gosimple - enable-all: true - disable: - - lll - disable-all: false - #presets: - # - bugs - # - unused - fast: false - -issues: - # List of regexps of issue texts to exclude, empty list by default. - # But independently from this option we use default exclude patterns, - # it can be disabled by `exclude-use-default: false`. To list all - # excluded by default patterns execute `golangci-lint run --help` - exclude: - - "`parseTained` is unused" - - "`parseState` is unused" - - # Independently from option `exclude` we use default exclude patterns, - # it can be disabled by this option. To list all - # excluded by default patterns execute `golangci-lint run --help`. - # Default value for this option is false. - exclude-use-default: false - - # Maximum issues count per one linter. Set to 0 to disable. Default is 50. - max-per-linter: 0 - - # Maximum count of issues with the same text. Set to 0 to disable. Default is 3. - max-same: 0 - - # Show only new issues: if there are unstaged changes or untracked files, - # only those changes are analyzed, else only changes in HEAD~ are analyzed. - # It's a super-useful option for integration of golangci-lint into existing - # large codebase. It's not practical to fix all existing issues at the moment - # of integration: much better don't allow issues in new code. - # Default is false. - new: false - - # Show only new issues created after git revision `REV` - #new-from-rev: REV - - # Show only new issues created in git patch with set file path. - #new-from-patch: path/to/patch/file \ No newline at end of file diff --git a/vendor/github.com/Djarvur/go-err113/.travis.yml b/vendor/github.com/Djarvur/go-err113/.travis.yml deleted file mode 100644 index 44fe77d53..000000000 --- a/vendor/github.com/Djarvur/go-err113/.travis.yml +++ /dev/null @@ -1,24 +0,0 @@ -language: go - -go: - - "1.13" - - "1.14" - - tip - -env: - - GO111MODULE=on - -before_install: - - go get github.com/axw/gocov/gocov - - go get github.com/mattn/goveralls - - go get golang.org/x/tools/cmd/cover - - go get golang.org/x/tools/cmd/goimports - - wget -O - -q https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh| sh - -script: - - test -z "$(goimports -d ./ 2>&1)" - - ./bin/golangci-lint run - - go test -v -race ./... - -after_success: - - test "$TRAVIS_GO_VERSION" = "1.14" && goveralls -service=travis-ci diff --git a/vendor/github.com/Djarvur/go-err113/LICENSE b/vendor/github.com/Djarvur/go-err113/LICENSE deleted file mode 100644 index a78ad8c77..000000000 --- a/vendor/github.com/Djarvur/go-err113/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2020 Djarvur - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/Djarvur/go-err113/README.adoc b/vendor/github.com/Djarvur/go-err113/README.adoc deleted file mode 100644 index b26af4038..000000000 --- a/vendor/github.com/Djarvur/go-err113/README.adoc +++ /dev/null @@ -1,75 +0,0 @@ -= err113 image:https://godoc.org/github.com/Djarvur/go-err113?status.svg["GoDoc",link="http://godoc.org/github.com/Djarvur/go-err113"] image:https://travis-ci.org/Djarvur/go-err113.svg["Build Status",link="https://travis-ci.org/Djarvur/go-err113"] image:https://coveralls.io/repos/Djarvur/go-err113/badge.svg?branch=master&service=github["Coverage Status",link="https://coveralls.io/github/Djarvur/go-err113?branch=master"] -Daniel Podolsky -:toc: - -Golang linter to check the errors handling expressions - -== Details - -Starting from Go 1.13 the standard `error` type behaviour was changed: one `error` could be derived from another with `fmt.Errorf()` method using `%w` format specifier. - -So the errors hierarchy could be built for flexible and responsible errors processing. - -And to make this possible at least two simple rules should be followed: - -1. `error` values should not be compared directly but with `errors.Is()` method. -1. `error` should not be created dynamically from scratch but by the wrapping the static (package-level) error. - -This linter is checking the code for these 2 rules compliance. - -=== Reports - -So, `err113` reports every `==` and `!=` comparison for exact `error` type variables except comparison to `nil` and `io.EOF`. - -Also, any call of `errors.New()` and `fmt.Errorf()` methods are reported except the calls used to initialise package-level variables and the `fmt.Errorf()` calls wrapping the other errors. - -Note: non-standard packages, like `github.com/pkg/errors` are ignored completely. - -== Install - -``` -go get -u github.com/Djarvur/go-err113/cmd/err113 -``` - -== Usage - -Defined by link:https://pkg.go.dev/golang.org/x/tools/go/analysis/singlechecker[singlechecker] package. - -``` -err113: checks the error handling rules according to the Go 1.13 new error type - -Usage: err113 [-flag] [package] - - -Flags: - -V print version and exit - -all - no effect (deprecated) - -c int - display offending line with this many lines of context (default -1) - -cpuprofile string - write CPU profile to this file - -debug string - debug flags, any subset of "fpstv" - -fix - apply all suggested fixes - -flags - print analyzer flags in JSON - -json - emit JSON output - -memprofile string - write memory profile to this file - -source - no effect (deprecated) - -tags string - no effect (deprecated) - -trace string - write trace log to this file - -v no effect (deprecated) -``` - -== Thanks - -To link:https://github.com/quasilyte[Iskander (Alex) Sharipov] for the really useful advices. - -To link:https://github.com/jackwhelpton[Jack Whelpton] for the bugfix provided. \ No newline at end of file diff --git a/vendor/github.com/Djarvur/go-err113/comparison.go b/vendor/github.com/Djarvur/go-err113/comparison.go deleted file mode 100644 index 8a8555783..000000000 --- a/vendor/github.com/Djarvur/go-err113/comparison.go +++ /dev/null @@ -1,123 +0,0 @@ -package err113 - -import ( - "fmt" - "go/ast" - "go/token" - "go/types" - - "golang.org/x/tools/go/analysis" -) - -func inspectComparision(pass *analysis.Pass, n ast.Node) bool { // nolint: unparam - // check whether the call expression matches time.Now().Sub() - be, ok := n.(*ast.BinaryExpr) - if !ok { - return true - } - - // check if it is a comparison operation - if be.Op != token.EQL && be.Op != token.NEQ { - return true - } - - if !areBothErrors(be.X, be.Y, pass.TypesInfo) { - return true - } - - oldExpr := render(pass.Fset, be) - - negate := "" - if be.Op == token.NEQ { - negate = "!" - } - - newExpr := fmt.Sprintf("%s%s.Is(%s, %s)", negate, "errors", rawString(be.X), rawString(be.Y)) - - pass.Report( - analysis.Diagnostic{ - Pos: be.Pos(), - Message: fmt.Sprintf("do not compare errors directly %q, use %q instead", oldExpr, newExpr), - SuggestedFixes: []analysis.SuggestedFix{ - { - Message: fmt.Sprintf("should replace %q with %q", oldExpr, newExpr), - TextEdits: []analysis.TextEdit{ - { - Pos: be.Pos(), - End: be.End(), - NewText: []byte(newExpr), - }, - }, - }, - }, - }, - ) - - return true -} - -func isError(v ast.Expr, info *types.Info) bool { - if intf, ok := info.TypeOf(v).Underlying().(*types.Interface); ok { - return intf.NumMethods() == 1 && intf.Method(0).FullName() == "(error).Error" - } - - return false -} - -func isEOF(ex ast.Expr, info *types.Info) bool { - se, ok := ex.(*ast.SelectorExpr) - if !ok || se.Sel.Name != "EOF" { - return false - } - - if ep, ok := asImportedName(se.X, info); !ok || ep != "io" { - return false - } - - return true -} - -func asImportedName(ex ast.Expr, info *types.Info) (string, bool) { - ei, ok := ex.(*ast.Ident) - if !ok { - return "", false - } - - ep, ok := info.ObjectOf(ei).(*types.PkgName) - if !ok { - return "", false - } - - return ep.Imported().Path(), true -} - -func areBothErrors(x, y ast.Expr, typesInfo *types.Info) bool { - // check that both left and right hand side are not nil - if typesInfo.Types[x].IsNil() || typesInfo.Types[y].IsNil() { - return false - } - - // check that both left and right hand side are not io.EOF - if isEOF(x, typesInfo) || isEOF(y, typesInfo) { - return false - } - - // check that both left and right hand side are errors - if !isError(x, typesInfo) && !isError(y, typesInfo) { - return false - } - - return true -} - -func rawString(x ast.Expr) string { - switch t := x.(type) { - case *ast.Ident: - return t.Name - case *ast.SelectorExpr: - return fmt.Sprintf("%s.%s", rawString(t.X), t.Sel.Name) - case *ast.CallExpr: - return fmt.Sprintf("%s()", rawString(t.Fun)) - } - return fmt.Sprintf("%s", x) -} diff --git a/vendor/github.com/Djarvur/go-err113/definition.go b/vendor/github.com/Djarvur/go-err113/definition.go deleted file mode 100644 index 689236bac..000000000 --- a/vendor/github.com/Djarvur/go-err113/definition.go +++ /dev/null @@ -1,74 +0,0 @@ -package err113 - -import ( - "go/ast" - "go/types" - "strings" - - "golang.org/x/tools/go/analysis" -) - -var methods2check = map[string]map[string]func(*ast.CallExpr, *types.Info) bool{ // nolint: gochecknoglobals - "errors": {"New": justTrue}, - "fmt": {"Errorf": checkWrap}, -} - -func justTrue(*ast.CallExpr, *types.Info) bool { - return true -} - -func checkWrap(ce *ast.CallExpr, info *types.Info) bool { - return !(len(ce.Args) > 0 && strings.Contains(toString(ce.Args[0], info), `%w`)) -} - -func inspectDefinition(pass *analysis.Pass, tlds map[*ast.CallExpr]struct{}, n ast.Node) bool { //nolint: unparam - // check whether the call expression matches time.Now().Sub() - ce, ok := n.(*ast.CallExpr) - if !ok { - return true - } - - if _, ok = tlds[ce]; ok { - return true - } - - fn, ok := ce.Fun.(*ast.SelectorExpr) - if !ok { - return true - } - - fxName, ok := asImportedName(fn.X, pass.TypesInfo) - if !ok { - return true - } - - methods, ok := methods2check[fxName] - if !ok { - return true - } - - checkFunc, ok := methods[fn.Sel.Name] - if !ok { - return true - } - - if !checkFunc(ce, pass.TypesInfo) { - return true - } - - pass.Reportf( - ce.Pos(), - "do not define dynamic errors, use wrapped static errors instead: %q", - render(pass.Fset, ce), - ) - - return true -} - -func toString(ex ast.Expr, info *types.Info) string { - if tv, ok := info.Types[ex]; ok && tv.Value != nil { - return tv.Value.ExactString() - } - - return "" -} diff --git a/vendor/github.com/Djarvur/go-err113/err113.go b/vendor/github.com/Djarvur/go-err113/err113.go deleted file mode 100644 index ec4f52ac7..000000000 --- a/vendor/github.com/Djarvur/go-err113/err113.go +++ /dev/null @@ -1,90 +0,0 @@ -// Package err113 is a Golang linter to check the errors handling expressions -package err113 - -import ( - "bytes" - "go/ast" - "go/printer" - "go/token" - - "golang.org/x/tools/go/analysis" -) - -// NewAnalyzer creates a new analysis.Analyzer instance tuned to run err113 checks. -func NewAnalyzer() *analysis.Analyzer { - return &analysis.Analyzer{ - Name: "err113", - Doc: "checks the error handling rules according to the Go 1.13 new error type", - Run: run, - } -} - -func run(pass *analysis.Pass) (interface{}, error) { - for _, file := range pass.Files { - tlds := enumerateFileDecls(file) - - ast.Inspect( - file, - func(n ast.Node) bool { - return inspectComparision(pass, n) && - inspectDefinition(pass, tlds, n) - }, - ) - } - - return nil, nil -} - -// render returns the pretty-print of the given node. -func render(fset *token.FileSet, x interface{}) string { - var buf bytes.Buffer - if err := printer.Fprint(&buf, fset, x); err != nil { - panic(err) - } - - return buf.String() -} - -func enumerateFileDecls(f *ast.File) map[*ast.CallExpr]struct{} { - res := make(map[*ast.CallExpr]struct{}) - - var ces []*ast.CallExpr // nolint: prealloc - - for _, d := range f.Decls { - ces = append(ces, enumerateDeclVars(d)...) - } - - for _, ce := range ces { - res[ce] = struct{}{} - } - - return res -} - -func enumerateDeclVars(d ast.Decl) (res []*ast.CallExpr) { - td, ok := d.(*ast.GenDecl) - if !ok || td.Tok != token.VAR { - return nil - } - - for _, s := range td.Specs { - res = append(res, enumerateSpecValues(s)...) - } - - return res -} - -func enumerateSpecValues(s ast.Spec) (res []*ast.CallExpr) { - vs, ok := s.(*ast.ValueSpec) - if !ok { - return nil - } - - for _, v := range vs.Values { - if ce, ok := v.(*ast.CallExpr); ok { - res = append(res, ce) - } - } - - return res -} diff --git a/vendor/github.com/GaijinEntertainment/go-exhaustruct/v3/LICENSE b/vendor/github.com/GaijinEntertainment/go-exhaustruct/v3/LICENSE deleted file mode 100644 index 6698196c5..000000000 --- a/vendor/github.com/GaijinEntertainment/go-exhaustruct/v3/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2022 Gaijin Entertainment - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/GaijinEntertainment/go-exhaustruct/v3/analyzer/analyzer.go b/vendor/github.com/GaijinEntertainment/go-exhaustruct/v3/analyzer/analyzer.go deleted file mode 100644 index b490f1c64..000000000 --- a/vendor/github.com/GaijinEntertainment/go-exhaustruct/v3/analyzer/analyzer.go +++ /dev/null @@ -1,291 +0,0 @@ -package analyzer - -import ( - "flag" - "fmt" - "go/ast" - "go/token" - "go/types" - "sync" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/ast/inspector" - - "github.com/GaijinEntertainment/go-exhaustruct/v3/internal/comment" - "github.com/GaijinEntertainment/go-exhaustruct/v3/internal/pattern" - "github.com/GaijinEntertainment/go-exhaustruct/v3/internal/structure" -) - -type analyzer struct { - include pattern.List `exhaustruct:"optional"` - exclude pattern.List `exhaustruct:"optional"` - - structFields structure.FieldsCache `exhaustruct:"optional"` - comments comment.Cache `exhaustruct:"optional"` - - typeProcessingNeed map[string]bool - typeProcessingNeedMu sync.RWMutex `exhaustruct:"optional"` -} - -func NewAnalyzer(include, exclude []string) (*analysis.Analyzer, error) { - a := analyzer{ - typeProcessingNeed: make(map[string]bool), - comments: comment.Cache{}, - } - - var err error - - a.include, err = pattern.NewList(include...) - if err != nil { - return nil, err //nolint:wrapcheck - } - - a.exclude, err = pattern.NewList(exclude...) - if err != nil { - return nil, err //nolint:wrapcheck - } - - return &analysis.Analyzer{ //nolint:exhaustruct - Name: "exhaustruct", - Doc: "Checks if all structure fields are initialized", - Run: a.run, - Requires: []*analysis.Analyzer{inspect.Analyzer}, - Flags: a.newFlagSet(), - }, nil -} - -func (a *analyzer) newFlagSet() flag.FlagSet { - fs := flag.NewFlagSet("", flag.PanicOnError) - - fs.Var(&a.include, "i", `Regular expression to match type names, can receive multiple flags. -Anonymous structs can be matched by '' alias. -4ex: - github.com/GaijinEntertainment/go-exhaustruct/v3/analyzer\. - github.com/GaijinEntertainment/go-exhaustruct/v3/analyzer\.TypeInfo`) - fs.Var(&a.exclude, "e", `Regular expression to exclude type names, can receive multiple flags. -Anonymous structs can be matched by '' alias. -4ex: - github.com/GaijinEntertainment/go-exhaustruct/v3/analyzer\. - github.com/GaijinEntertainment/go-exhaustruct/v3/analyzer\.TypeInfo`) - - return *fs -} - -func (a *analyzer) run(pass *analysis.Pass) (any, error) { - insp := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) //nolint:forcetypeassert - - insp.WithStack([]ast.Node{(*ast.CompositeLit)(nil)}, a.newVisitor(pass)) - - return nil, nil //nolint:nilnil -} - -// newVisitor returns visitor that only expects [ast.CompositeLit] nodes. -func (a *analyzer) newVisitor(pass *analysis.Pass) func(n ast.Node, push bool, stack []ast.Node) bool { - return func(n ast.Node, push bool, stack []ast.Node) bool { - if !push { - return true - } - - lit, ok := n.(*ast.CompositeLit) - if !ok { - // this should never happen, but better be prepared - return true - } - - structTyp, typeInfo, ok := getStructType(pass, lit) - if !ok { - return true - } - - if len(lit.Elts) == 0 { - if ret, ok := stackParentIsReturn(stack); ok { - if returnContainsNonNilError(pass, ret) { - // it is okay to return uninitialized structure in case struct's direct parent is - // a return statement containing non-nil error - // - // we're unable to check if returned error is custom, but at least we're able to - // cover str [error] type. - return true - } - } - } - - file := a.comments.Get(pass.Fset, stack[0].(*ast.File)) //nolint:forcetypeassert - rc := getCompositeLitRelatedComments(stack, file) - pos, msg := a.processStruct(pass, lit, structTyp, typeInfo, rc) - - if pos != nil { - pass.Reportf(*pos, msg) - } - - return true - } -} - -// getCompositeLitRelatedComments returns all comments that are related to checked node. We -// have to traverse the stack manually as ast do not associate comments with -// [ast.CompositeLit]. -func getCompositeLitRelatedComments(stack []ast.Node, cm ast.CommentMap) []*ast.CommentGroup { - comments := make([]*ast.CommentGroup, 0) - - for i := len(stack) - 1; i >= 0; i-- { - node := stack[i] - - switch node.(type) { - case *ast.CompositeLit, // stack[len(stack)-1] - *ast.ReturnStmt, // return ... - *ast.IndexExpr, // map[enum]...{...}[key] - *ast.CallExpr, // myfunc(map...) - *ast.UnaryExpr, // &map... - *ast.AssignStmt, // variable assignment (without var keyword) - *ast.DeclStmt, // var declaration, parent of *ast.GenDecl - *ast.GenDecl, // var declaration, parent of *ast.ValueSpec - *ast.ValueSpec: // var declaration - comments = append(comments, cm[node]...) - - default: - return comments - } - } - - return comments -} - -func getStructType(pass *analysis.Pass, lit *ast.CompositeLit) (*types.Struct, *TypeInfo, bool) { - switch typ := pass.TypesInfo.TypeOf(lit).(type) { - case *types.Named: // named type - if structTyp, ok := typ.Underlying().(*types.Struct); ok { - pkg := typ.Obj().Pkg() - ti := TypeInfo{ - Name: typ.Obj().Name(), - PackageName: pkg.Name(), - PackagePath: pkg.Path(), - } - - return structTyp, &ti, true - } - - return nil, nil, false - - case *types.Struct: // anonymous struct - ti := TypeInfo{ - Name: "", - PackageName: pass.Pkg.Name(), - PackagePath: pass.Pkg.Path(), - } - - return typ, &ti, true - - default: - return nil, nil, false - } -} - -func stackParentIsReturn(stack []ast.Node) (*ast.ReturnStmt, bool) { - // it is safe to skip boundary check, since stack always has at least one element - // - whole file. - ret, ok := stack[len(stack)-2].(*ast.ReturnStmt) - - return ret, ok -} - -func returnContainsNonNilError(pass *analysis.Pass, ret *ast.ReturnStmt) bool { - // errors are mostly located at the end of return statement, so we're starting - // from the end. - for i := len(ret.Results) - 1; i >= 0; i-- { - if pass.TypesInfo.TypeOf(ret.Results[i]).String() == "error" { - return true - } - } - - return false -} - -func (a *analyzer) processStruct( - pass *analysis.Pass, - lit *ast.CompositeLit, - structTyp *types.Struct, - info *TypeInfo, - comments []*ast.CommentGroup, -) (*token.Pos, string) { - shouldProcess := a.shouldProcessType(info) - - if shouldProcess && comment.HasDirective(comments, comment.DirectiveIgnore) { - return nil, "" - } - - if !shouldProcess && !comment.HasDirective(comments, comment.DirectiveEnforce) { - return nil, "" - } - - // unnamed structures are only defined in same package, along with types that has - // prefix identical to current package name. - isSamePackage := info.PackagePath == pass.Pkg.Path() - - if f := a.litSkippedFields(lit, structTyp, !isSamePackage); len(f) > 0 { - pos := lit.Pos() - - if len(f) == 1 { - return &pos, fmt.Sprintf("%s is missing field %s", info.ShortString(), f.String()) - } - - return &pos, fmt.Sprintf("%s is missing fields %s", info.ShortString(), f.String()) - } - - return nil, "" -} - -// shouldProcessType returns true if type should be processed basing off include -// and exclude patterns, defined though constructor and\or flags. -func (a *analyzer) shouldProcessType(info *TypeInfo) bool { - if len(a.include) == 0 && len(a.exclude) == 0 { - return true - } - - name := info.String() - - a.typeProcessingNeedMu.RLock() - res, ok := a.typeProcessingNeed[name] - a.typeProcessingNeedMu.RUnlock() - - if !ok { - a.typeProcessingNeedMu.Lock() - res = true - - if a.include != nil && !a.include.MatchFullString(name) { - res = false - } - - if res && a.exclude != nil && a.exclude.MatchFullString(name) { - res = false - } - - a.typeProcessingNeed[name] = res - a.typeProcessingNeedMu.Unlock() - } - - return res -} - -func (a *analyzer) litSkippedFields( - lit *ast.CompositeLit, - typ *types.Struct, - onlyExported bool, -) structure.Fields { - return a.structFields.Get(typ).Skipped(lit, onlyExported) -} - -type TypeInfo struct { - Name string - PackageName string - PackagePath string -} - -func (t TypeInfo) String() string { - return t.PackagePath + "." + t.Name -} - -func (t TypeInfo) ShortString() string { - return t.PackageName + "." + t.Name -} diff --git a/vendor/github.com/GaijinEntertainment/go-exhaustruct/v3/internal/comment/cache.go b/vendor/github.com/GaijinEntertainment/go-exhaustruct/v3/internal/comment/cache.go deleted file mode 100644 index 88edef638..000000000 --- a/vendor/github.com/GaijinEntertainment/go-exhaustruct/v3/internal/comment/cache.go +++ /dev/null @@ -1,35 +0,0 @@ -package comment - -import ( - "go/ast" - "go/token" - "sync" -) - -type Cache struct { - comments map[*ast.File]ast.CommentMap - mu sync.RWMutex -} - -// Get returns a comment map for a given file. In case if a comment map is not -// found, it creates a new one. -func (c *Cache) Get(fset *token.FileSet, f *ast.File) ast.CommentMap { - c.mu.RLock() - if cm, ok := c.comments[f]; ok { - c.mu.RUnlock() - return cm - } - c.mu.RUnlock() - - c.mu.Lock() - defer c.mu.Unlock() - - if c.comments == nil { - c.comments = make(map[*ast.File]ast.CommentMap) - } - - cm := ast.NewCommentMap(fset, f, f.Comments) - c.comments[f] = cm - - return cm -} diff --git a/vendor/github.com/GaijinEntertainment/go-exhaustruct/v3/internal/comment/directive.go b/vendor/github.com/GaijinEntertainment/go-exhaustruct/v3/internal/comment/directive.go deleted file mode 100644 index a39a8076f..000000000 --- a/vendor/github.com/GaijinEntertainment/go-exhaustruct/v3/internal/comment/directive.go +++ /dev/null @@ -1,28 +0,0 @@ -package comment - -import ( - "go/ast" - "strings" -) - -type Directive string - -const ( - prefix = `//exhaustruct:` - DirectiveIgnore Directive = prefix + `ignore` - DirectiveEnforce Directive = prefix + `enforce` -) - -// HasDirective parses a directive from a given list of comments. -// If no directive is found, the second return value is `false`. -func HasDirective(comments []*ast.CommentGroup, expected Directive) bool { - for _, cg := range comments { - for _, commentLine := range cg.List { - if strings.HasPrefix(commentLine.Text, string(expected)) { - return true - } - } - } - - return false -} diff --git a/vendor/github.com/GaijinEntertainment/go-exhaustruct/v3/internal/pattern/list.go b/vendor/github.com/GaijinEntertainment/go-exhaustruct/v3/internal/pattern/list.go deleted file mode 100644 index a16e5058d..000000000 --- a/vendor/github.com/GaijinEntertainment/go-exhaustruct/v3/internal/pattern/list.go +++ /dev/null @@ -1,82 +0,0 @@ -package pattern - -import ( - "fmt" - "regexp" - "strings" -) - -var ( - ErrEmptyPattern = fmt.Errorf("pattern can't be empty") - ErrCompilationFailed = fmt.Errorf("pattern compilation failed") -) - -// List is a list of regular expressions. -type List []*regexp.Regexp - -// NewList parses slice of strings to a slice of compiled regular expressions. -func NewList(strs ...string) (List, error) { - if len(strs) == 0 { - return nil, nil - } - - l := make(List, 0, len(strs)) - - for _, str := range strs { - re, err := strToRe(str) - if err != nil { - return nil, err - } - - l = append(l, re) - } - - return l, nil -} - -// MatchFullString matches provided string against all regexps in a slice and returns -// true if any of them matches whole string. -func (l List) MatchFullString(str string) bool { - for i := 0; i < len(l); i++ { - if m := l[i].FindStringSubmatch(str); len(m) > 0 && m[0] == str { - return true - } - } - - return false -} - -func (l *List) Set(value string) error { - re, err := strToRe(value) - if err != nil { - return err - } - - *l = append(*l, re) - - return nil -} - -func (l *List) String() string { - res := make([]string, 0, len(*l)) - - for _, re := range *l { - res = append(res, `"`+re.String()+`"`) - } - - return strings.Join(res, ", ") -} - -// strToRe parses string to a compiled regular expression that matches full string. -func strToRe(str string) (*regexp.Regexp, error) { - if str == "" { - return nil, ErrEmptyPattern - } - - re, err := regexp.Compile(str) - if err != nil { - return nil, fmt.Errorf("%w: %s: %w", ErrCompilationFailed, str, err) - } - - return re, nil -} diff --git a/vendor/github.com/GaijinEntertainment/go-exhaustruct/v3/internal/structure/fields-cache.go b/vendor/github.com/GaijinEntertainment/go-exhaustruct/v3/internal/structure/fields-cache.go deleted file mode 100644 index 12a379692..000000000 --- a/vendor/github.com/GaijinEntertainment/go-exhaustruct/v3/internal/structure/fields-cache.go +++ /dev/null @@ -1,35 +0,0 @@ -package structure - -import ( - "go/types" - "sync" -) - -type FieldsCache struct { - fields map[*types.Struct]Fields - mu sync.RWMutex -} - -// Get returns a struct fields for a given type. In case if a struct fields is -// not found, it creates a new one from type definition. -func (c *FieldsCache) Get(typ *types.Struct) Fields { - c.mu.RLock() - fields, ok := c.fields[typ] - c.mu.RUnlock() - - if ok { - return fields - } - - c.mu.Lock() - defer c.mu.Unlock() - - if c.fields == nil { - c.fields = make(map[*types.Struct]Fields) - } - - fields = NewFields(typ) - c.fields[typ] = fields - - return fields -} diff --git a/vendor/github.com/GaijinEntertainment/go-exhaustruct/v3/internal/structure/fields.go b/vendor/github.com/GaijinEntertainment/go-exhaustruct/v3/internal/structure/fields.go deleted file mode 100644 index b6b1a48c8..000000000 --- a/vendor/github.com/GaijinEntertainment/go-exhaustruct/v3/internal/structure/fields.go +++ /dev/null @@ -1,127 +0,0 @@ -package structure - -import ( - "go/ast" - "go/types" - "reflect" - "strings" -) - -const ( - tagName = "exhaustruct" - optionalTagValue = "optional" -) - -type Field struct { - Name string - Exported bool - Optional bool -} - -type Fields []*Field - -// NewFields creates a new [Fields] from a given struct type. -// Fields items are listed in order they appear in the struct. -func NewFields(strct *types.Struct) Fields { - sf := make(Fields, 0, strct.NumFields()) - - for i := 0; i < strct.NumFields(); i++ { - f := strct.Field(i) - - sf = append(sf, &Field{ - Name: f.Name(), - Exported: f.Exported(), - Optional: HasOptionalTag(strct.Tag(i)), - }) - } - - return sf -} - -func HasOptionalTag(tags string) bool { - return reflect.StructTag(tags).Get(tagName) == optionalTagValue -} - -// String returns a comma-separated list of field names. -func (sf Fields) String() string { - b := strings.Builder{} - - for i := 0; i < len(sf); i++ { - if b.Len() != 0 { - b.WriteString(", ") - } - - b.WriteString(sf[i].Name) - } - - return b.String() -} - -// Skipped returns a list of fields that are not present in the given -// literal, but expected to. -// -//revive:disable-next-line:cyclomatic -func (sf Fields) Skipped(lit *ast.CompositeLit, onlyExported bool) Fields { - if len(lit.Elts) != 0 && !isNamedLiteral(lit) { - if len(lit.Elts) == len(sf) { - return nil - } - - return sf[len(lit.Elts):] - } - - em := sf.existenceMap() - res := make(Fields, 0, len(sf)) - - for i := 0; i < len(lit.Elts); i++ { - kv, ok := lit.Elts[i].(*ast.KeyValueExpr) - if !ok { - continue - } - - k, ok := kv.Key.(*ast.Ident) - if !ok { - continue - } - - em[k.Name] = true - } - - for i := 0; i < len(sf); i++ { - if em[sf[i].Name] || (!sf[i].Exported && onlyExported) || sf[i].Optional { - continue - } - - res = append(res, sf[i]) - } - - if len(res) == 0 { - return nil - } - - return res -} - -func (sf Fields) existenceMap() map[string]bool { - m := make(map[string]bool, len(sf)) - - for i := 0; i < len(sf); i++ { - m[sf[i].Name] = false - } - - return m -} - -// isNamedLiteral returns true if the given literal is unnamed. -// -// The logic is basing on the principle that literal is named or unnamed, -// therefore is literal's first element is a [ast.KeyValueExpr], it is named. -// -// Method will panic if the given literal is empty. -func isNamedLiteral(lit *ast.CompositeLit) bool { - if _, ok := lit.Elts[0].(*ast.KeyValueExpr); !ok { - return false - } - - return true -} diff --git a/vendor/github.com/Masterminds/semver/.travis.yml b/vendor/github.com/Masterminds/semver/.travis.yml deleted file mode 100644 index 096369d44..000000000 --- a/vendor/github.com/Masterminds/semver/.travis.yml +++ /dev/null @@ -1,29 +0,0 @@ -language: go - -go: - - 1.6.x - - 1.7.x - - 1.8.x - - 1.9.x - - 1.10.x - - 1.11.x - - 1.12.x - - tip - -# Setting sudo access to false will let Travis CI use containers rather than -# VMs to run the tests. For more details see: -# - http://docs.travis-ci.com/user/workers/container-based-infrastructure/ -# - http://docs.travis-ci.com/user/workers/standard-infrastructure/ -sudo: false - -script: - - make setup - - make test - -notifications: - webhooks: - urls: - - https://webhooks.gitter.im/e/06e3328629952dabe3e0 - on_success: change # options: [always|never|change] default: always - on_failure: always # options: [always|never|change] default: always - on_start: never # options: [always|never|change] default: always diff --git a/vendor/github.com/Masterminds/semver/CHANGELOG.md b/vendor/github.com/Masterminds/semver/CHANGELOG.md deleted file mode 100644 index e405c9a84..000000000 --- a/vendor/github.com/Masterminds/semver/CHANGELOG.md +++ /dev/null @@ -1,109 +0,0 @@ -# 1.5.0 (2019-09-11) - -## Added - -- #103: Add basic fuzzing for `NewVersion()` (thanks @jesse-c) - -## Changed - -- #82: Clarify wildcard meaning in range constraints and update tests for it (thanks @greysteil) -- #83: Clarify caret operator range for pre-1.0.0 dependencies (thanks @greysteil) -- #72: Adding docs comment pointing to vert for a cli -- #71: Update the docs on pre-release comparator handling -- #89: Test with new go versions (thanks @thedevsaddam) -- #87: Added $ to ValidPrerelease for better validation (thanks @jeremycarroll) - -## Fixed - -- #78: Fix unchecked error in example code (thanks @ravron) -- #70: Fix the handling of pre-releases and the 0.0.0 release edge case -- #97: Fixed copyright file for proper display on GitHub -- #107: Fix handling prerelease when sorting alphanum and num -- #109: Fixed where Validate sometimes returns wrong message on error - -# 1.4.2 (2018-04-10) - -## Changed -- #72: Updated the docs to point to vert for a console appliaction -- #71: Update the docs on pre-release comparator handling - -## Fixed -- #70: Fix the handling of pre-releases and the 0.0.0 release edge case - -# 1.4.1 (2018-04-02) - -## Fixed -- Fixed #64: Fix pre-release precedence issue (thanks @uudashr) - -# 1.4.0 (2017-10-04) - -## Changed -- #61: Update NewVersion to parse ints with a 64bit int size (thanks @zknill) - -# 1.3.1 (2017-07-10) - -## Fixed -- Fixed #57: number comparisons in prerelease sometimes inaccurate - -# 1.3.0 (2017-05-02) - -## Added -- #45: Added json (un)marshaling support (thanks @mh-cbon) -- Stability marker. See https://masterminds.github.io/stability/ - -## Fixed -- #51: Fix handling of single digit tilde constraint (thanks @dgodd) - -## Changed -- #55: The godoc icon moved from png to svg - -# 1.2.3 (2017-04-03) - -## Fixed -- #46: Fixed 0.x.x and 0.0.x in constraints being treated as * - -# Release 1.2.2 (2016-12-13) - -## Fixed -- #34: Fixed issue where hyphen range was not working with pre-release parsing. - -# Release 1.2.1 (2016-11-28) - -## Fixed -- #24: Fixed edge case issue where constraint "> 0" does not handle "0.0.1-alpha" - properly. - -# Release 1.2.0 (2016-11-04) - -## Added -- #20: Added MustParse function for versions (thanks @adamreese) -- #15: Added increment methods on versions (thanks @mh-cbon) - -## Fixed -- Issue #21: Per the SemVer spec (section 9) a pre-release is unstable and - might not satisfy the intended compatibility. The change here ignores pre-releases - on constraint checks (e.g., ~ or ^) when a pre-release is not part of the - constraint. For example, `^1.2.3` will ignore pre-releases while - `^1.2.3-alpha` will include them. - -# Release 1.1.1 (2016-06-30) - -## Changed -- Issue #9: Speed up version comparison performance (thanks @sdboyer) -- Issue #8: Added benchmarks (thanks @sdboyer) -- Updated Go Report Card URL to new location -- Updated Readme to add code snippet formatting (thanks @mh-cbon) -- Updating tagging to v[SemVer] structure for compatibility with other tools. - -# Release 1.1.0 (2016-03-11) - -- Issue #2: Implemented validation to provide reasons a versions failed a - constraint. - -# Release 1.0.1 (2015-12-31) - -- Fixed #1: * constraint failing on valid versions. - -# Release 1.0.0 (2015-10-20) - -- Initial release diff --git a/vendor/github.com/Masterminds/semver/LICENSE.txt b/vendor/github.com/Masterminds/semver/LICENSE.txt deleted file mode 100644 index 9ff7da9c4..000000000 --- a/vendor/github.com/Masterminds/semver/LICENSE.txt +++ /dev/null @@ -1,19 +0,0 @@ -Copyright (C) 2014-2019, Matt Butcher and Matt Farina - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/vendor/github.com/Masterminds/semver/Makefile b/vendor/github.com/Masterminds/semver/Makefile deleted file mode 100644 index a7a1b4e36..000000000 --- a/vendor/github.com/Masterminds/semver/Makefile +++ /dev/null @@ -1,36 +0,0 @@ -.PHONY: setup -setup: - go get -u gopkg.in/alecthomas/gometalinter.v1 - gometalinter.v1 --install - -.PHONY: test -test: validate lint - @echo "==> Running tests" - go test -v - -.PHONY: validate -validate: - @echo "==> Running static validations" - @gometalinter.v1 \ - --disable-all \ - --enable deadcode \ - --severity deadcode:error \ - --enable gofmt \ - --enable gosimple \ - --enable ineffassign \ - --enable misspell \ - --enable vet \ - --tests \ - --vendor \ - --deadline 60s \ - ./... || exit_code=1 - -.PHONY: lint -lint: - @echo "==> Running linters" - @gometalinter.v1 \ - --disable-all \ - --enable golint \ - --vendor \ - --deadline 60s \ - ./... || : diff --git a/vendor/github.com/Masterminds/semver/README.md b/vendor/github.com/Masterminds/semver/README.md deleted file mode 100644 index 1b52d2f43..000000000 --- a/vendor/github.com/Masterminds/semver/README.md +++ /dev/null @@ -1,194 +0,0 @@ -# SemVer - -The `semver` package provides the ability to work with [Semantic Versions](http://semver.org) in Go. Specifically it provides the ability to: - -* Parse semantic versions -* Sort semantic versions -* Check if a semantic version fits within a set of constraints -* Optionally work with a `v` prefix - -[![Stability: -Active](https://masterminds.github.io/stability/active.svg)](https://masterminds.github.io/stability/active.html) -[![Build Status](https://travis-ci.org/Masterminds/semver.svg)](https://travis-ci.org/Masterminds/semver) [![Build status](https://ci.appveyor.com/api/projects/status/jfk66lib7hb985k8/branch/master?svg=true&passingText=windows%20build%20passing&failingText=windows%20build%20failing)](https://ci.appveyor.com/project/mattfarina/semver/branch/master) [![GoDoc](https://godoc.org/github.com/Masterminds/semver?status.svg)](https://godoc.org/github.com/Masterminds/semver) [![Go Report Card](https://goreportcard.com/badge/github.com/Masterminds/semver)](https://goreportcard.com/report/github.com/Masterminds/semver) - -If you are looking for a command line tool for version comparisons please see -[vert](https://github.com/Masterminds/vert) which uses this library. - -## Parsing Semantic Versions - -To parse a semantic version use the `NewVersion` function. For example, - -```go - v, err := semver.NewVersion("1.2.3-beta.1+build345") -``` - -If there is an error the version wasn't parseable. The version object has methods -to get the parts of the version, compare it to other versions, convert the -version back into a string, and get the original string. For more details -please see the [documentation](https://godoc.org/github.com/Masterminds/semver). - -## Sorting Semantic Versions - -A set of versions can be sorted using the [`sort`](https://golang.org/pkg/sort/) -package from the standard library. For example, - -```go - raw := []string{"1.2.3", "1.0", "1.3", "2", "0.4.2",} - vs := make([]*semver.Version, len(raw)) - for i, r := range raw { - v, err := semver.NewVersion(r) - if err != nil { - t.Errorf("Error parsing version: %s", err) - } - - vs[i] = v - } - - sort.Sort(semver.Collection(vs)) -``` - -## Checking Version Constraints - -Checking a version against version constraints is one of the most featureful -parts of the package. - -```go - c, err := semver.NewConstraint(">= 1.2.3") - if err != nil { - // Handle constraint not being parseable. - } - - v, _ := semver.NewVersion("1.3") - if err != nil { - // Handle version not being parseable. - } - // Check if the version meets the constraints. The a variable will be true. - a := c.Check(v) -``` - -## Basic Comparisons - -There are two elements to the comparisons. First, a comparison string is a list -of comma separated and comparisons. These are then separated by || separated or -comparisons. For example, `">= 1.2, < 3.0.0 || >= 4.2.3"` is looking for a -comparison that's greater than or equal to 1.2 and less than 3.0.0 or is -greater than or equal to 4.2.3. - -The basic comparisons are: - -* `=`: equal (aliased to no operator) -* `!=`: not equal -* `>`: greater than -* `<`: less than -* `>=`: greater than or equal to -* `<=`: less than or equal to - -## Working With Pre-release Versions - -Pre-releases, for those not familiar with them, are used for software releases -prior to stable or generally available releases. Examples of pre-releases include -development, alpha, beta, and release candidate releases. A pre-release may be -a version such as `1.2.3-beta.1` while the stable release would be `1.2.3`. In the -order of precidence, pre-releases come before their associated releases. In this -example `1.2.3-beta.1 < 1.2.3`. - -According to the Semantic Version specification pre-releases may not be -API compliant with their release counterpart. It says, - -> A pre-release version indicates that the version is unstable and might not satisfy the intended compatibility requirements as denoted by its associated normal version. - -SemVer comparisons without a pre-release comparator will skip pre-release versions. -For example, `>=1.2.3` will skip pre-releases when looking at a list of releases -while `>=1.2.3-0` will evaluate and find pre-releases. - -The reason for the `0` as a pre-release version in the example comparison is -because pre-releases can only contain ASCII alphanumerics and hyphens (along with -`.` separators), per the spec. Sorting happens in ASCII sort order, again per the spec. The lowest character is a `0` in ASCII sort order (see an [ASCII Table](http://www.asciitable.com/)) - -Understanding ASCII sort ordering is important because A-Z comes before a-z. That -means `>=1.2.3-BETA` will return `1.2.3-alpha`. What you might expect from case -sensitivity doesn't apply here. This is due to ASCII sort ordering which is what -the spec specifies. - -## Hyphen Range Comparisons - -There are multiple methods to handle ranges and the first is hyphens ranges. -These look like: - -* `1.2 - 1.4.5` which is equivalent to `>= 1.2, <= 1.4.5` -* `2.3.4 - 4.5` which is equivalent to `>= 2.3.4, <= 4.5` - -## Wildcards In Comparisons - -The `x`, `X`, and `*` characters can be used as a wildcard character. This works -for all comparison operators. When used on the `=` operator it falls -back to the pack level comparison (see tilde below). For example, - -* `1.2.x` is equivalent to `>= 1.2.0, < 1.3.0` -* `>= 1.2.x` is equivalent to `>= 1.2.0` -* `<= 2.x` is equivalent to `< 3` -* `*` is equivalent to `>= 0.0.0` - -## Tilde Range Comparisons (Patch) - -The tilde (`~`) comparison operator is for patch level ranges when a minor -version is specified and major level changes when the minor number is missing. -For example, - -* `~1.2.3` is equivalent to `>= 1.2.3, < 1.3.0` -* `~1` is equivalent to `>= 1, < 2` -* `~2.3` is equivalent to `>= 2.3, < 2.4` -* `~1.2.x` is equivalent to `>= 1.2.0, < 1.3.0` -* `~1.x` is equivalent to `>= 1, < 2` - -## Caret Range Comparisons (Major) - -The caret (`^`) comparison operator is for major level changes. This is useful -when comparisons of API versions as a major change is API breaking. For example, - -* `^1.2.3` is equivalent to `>= 1.2.3, < 2.0.0` -* `^0.0.1` is equivalent to `>= 0.0.1, < 1.0.0` -* `^1.2.x` is equivalent to `>= 1.2.0, < 2.0.0` -* `^2.3` is equivalent to `>= 2.3, < 3` -* `^2.x` is equivalent to `>= 2.0.0, < 3` - -# Validation - -In addition to testing a version against a constraint, a version can be validated -against a constraint. When validation fails a slice of errors containing why a -version didn't meet the constraint is returned. For example, - -```go - c, err := semver.NewConstraint("<= 1.2.3, >= 1.4") - if err != nil { - // Handle constraint not being parseable. - } - - v, _ := semver.NewVersion("1.3") - if err != nil { - // Handle version not being parseable. - } - - // Validate a version against a constraint. - a, msgs := c.Validate(v) - // a is false - for _, m := range msgs { - fmt.Println(m) - - // Loops over the errors which would read - // "1.3 is greater than 1.2.3" - // "1.3 is less than 1.4" - } -``` - -# Fuzzing - - [dvyukov/go-fuzz](https://github.com/dvyukov/go-fuzz) is used for fuzzing. - -1. `go-fuzz-build` -2. `go-fuzz -workdir=fuzz` - -# Contribute - -If you find an issue or want to contribute please file an [issue](https://github.com/Masterminds/semver/issues) -or [create a pull request](https://github.com/Masterminds/semver/pulls). diff --git a/vendor/github.com/Masterminds/semver/appveyor.yml b/vendor/github.com/Masterminds/semver/appveyor.yml deleted file mode 100644 index b2778df15..000000000 --- a/vendor/github.com/Masterminds/semver/appveyor.yml +++ /dev/null @@ -1,44 +0,0 @@ -version: build-{build}.{branch} - -clone_folder: C:\gopath\src\github.com\Masterminds\semver -shallow_clone: true - -environment: - GOPATH: C:\gopath - -platform: - - x64 - -install: - - go version - - go env - - go get -u gopkg.in/alecthomas/gometalinter.v1 - - set PATH=%PATH%;%GOPATH%\bin - - gometalinter.v1.exe --install - -build_script: - - go install -v ./... - -test_script: - - "gometalinter.v1 \ - --disable-all \ - --enable deadcode \ - --severity deadcode:error \ - --enable gofmt \ - --enable gosimple \ - --enable ineffassign \ - --enable misspell \ - --enable vet \ - --tests \ - --vendor \ - --deadline 60s \ - ./... || exit_code=1" - - "gometalinter.v1 \ - --disable-all \ - --enable golint \ - --vendor \ - --deadline 60s \ - ./... || :" - - go test -v - -deploy: off diff --git a/vendor/github.com/Masterminds/semver/collection.go b/vendor/github.com/Masterminds/semver/collection.go deleted file mode 100644 index a78235895..000000000 --- a/vendor/github.com/Masterminds/semver/collection.go +++ /dev/null @@ -1,24 +0,0 @@ -package semver - -// Collection is a collection of Version instances and implements the sort -// interface. See the sort package for more details. -// https://golang.org/pkg/sort/ -type Collection []*Version - -// Len returns the length of a collection. The number of Version instances -// on the slice. -func (c Collection) Len() int { - return len(c) -} - -// Less is needed for the sort interface to compare two Version objects on the -// slice. If checks if one is less than the other. -func (c Collection) Less(i, j int) bool { - return c[i].LessThan(c[j]) -} - -// Swap is needed for the sort interface to replace the Version objects -// at two different positions in the slice. -func (c Collection) Swap(i, j int) { - c[i], c[j] = c[j], c[i] -} diff --git a/vendor/github.com/Masterminds/semver/constraints.go b/vendor/github.com/Masterminds/semver/constraints.go deleted file mode 100644 index b94b93413..000000000 --- a/vendor/github.com/Masterminds/semver/constraints.go +++ /dev/null @@ -1,423 +0,0 @@ -package semver - -import ( - "errors" - "fmt" - "regexp" - "strings" -) - -// Constraints is one or more constraint that a semantic version can be -// checked against. -type Constraints struct { - constraints [][]*constraint -} - -// NewConstraint returns a Constraints instance that a Version instance can -// be checked against. If there is a parse error it will be returned. -func NewConstraint(c string) (*Constraints, error) { - - // Rewrite - ranges into a comparison operation. - c = rewriteRange(c) - - ors := strings.Split(c, "||") - or := make([][]*constraint, len(ors)) - for k, v := range ors { - cs := strings.Split(v, ",") - result := make([]*constraint, len(cs)) - for i, s := range cs { - pc, err := parseConstraint(s) - if err != nil { - return nil, err - } - - result[i] = pc - } - or[k] = result - } - - o := &Constraints{constraints: or} - return o, nil -} - -// Check tests if a version satisfies the constraints. -func (cs Constraints) Check(v *Version) bool { - // loop over the ORs and check the inner ANDs - for _, o := range cs.constraints { - joy := true - for _, c := range o { - if !c.check(v) { - joy = false - break - } - } - - if joy { - return true - } - } - - return false -} - -// Validate checks if a version satisfies a constraint. If not a slice of -// reasons for the failure are returned in addition to a bool. -func (cs Constraints) Validate(v *Version) (bool, []error) { - // loop over the ORs and check the inner ANDs - var e []error - - // Capture the prerelease message only once. When it happens the first time - // this var is marked - var prerelesase bool - for _, o := range cs.constraints { - joy := true - for _, c := range o { - // Before running the check handle the case there the version is - // a prerelease and the check is not searching for prereleases. - if c.con.pre == "" && v.pre != "" { - if !prerelesase { - em := fmt.Errorf("%s is a prerelease version and the constraint is only looking for release versions", v) - e = append(e, em) - prerelesase = true - } - joy = false - - } else { - - if !c.check(v) { - em := fmt.Errorf(c.msg, v, c.orig) - e = append(e, em) - joy = false - } - } - } - - if joy { - return true, []error{} - } - } - - return false, e -} - -var constraintOps map[string]cfunc -var constraintMsg map[string]string -var constraintRegex *regexp.Regexp - -func init() { - constraintOps = map[string]cfunc{ - "": constraintTildeOrEqual, - "=": constraintTildeOrEqual, - "!=": constraintNotEqual, - ">": constraintGreaterThan, - "<": constraintLessThan, - ">=": constraintGreaterThanEqual, - "=>": constraintGreaterThanEqual, - "<=": constraintLessThanEqual, - "=<": constraintLessThanEqual, - "~": constraintTilde, - "~>": constraintTilde, - "^": constraintCaret, - } - - constraintMsg = map[string]string{ - "": "%s is not equal to %s", - "=": "%s is not equal to %s", - "!=": "%s is equal to %s", - ">": "%s is less than or equal to %s", - "<": "%s is greater than or equal to %s", - ">=": "%s is less than %s", - "=>": "%s is less than %s", - "<=": "%s is greater than %s", - "=<": "%s is greater than %s", - "~": "%s does not have same major and minor version as %s", - "~>": "%s does not have same major and minor version as %s", - "^": "%s does not have same major version as %s", - } - - ops := make([]string, 0, len(constraintOps)) - for k := range constraintOps { - ops = append(ops, regexp.QuoteMeta(k)) - } - - constraintRegex = regexp.MustCompile(fmt.Sprintf( - `^\s*(%s)\s*(%s)\s*$`, - strings.Join(ops, "|"), - cvRegex)) - - constraintRangeRegex = regexp.MustCompile(fmt.Sprintf( - `\s*(%s)\s+-\s+(%s)\s*`, - cvRegex, cvRegex)) -} - -// An individual constraint -type constraint struct { - // The callback function for the restraint. It performs the logic for - // the constraint. - function cfunc - - msg string - - // The version used in the constraint check. For example, if a constraint - // is '<= 2.0.0' the con a version instance representing 2.0.0. - con *Version - - // The original parsed version (e.g., 4.x from != 4.x) - orig string - - // When an x is used as part of the version (e.g., 1.x) - minorDirty bool - dirty bool - patchDirty bool -} - -// Check if a version meets the constraint -func (c *constraint) check(v *Version) bool { - return c.function(v, c) -} - -type cfunc func(v *Version, c *constraint) bool - -func parseConstraint(c string) (*constraint, error) { - m := constraintRegex.FindStringSubmatch(c) - if m == nil { - return nil, fmt.Errorf("improper constraint: %s", c) - } - - ver := m[2] - orig := ver - minorDirty := false - patchDirty := false - dirty := false - if isX(m[3]) { - ver = "0.0.0" - dirty = true - } else if isX(strings.TrimPrefix(m[4], ".")) || m[4] == "" { - minorDirty = true - dirty = true - ver = fmt.Sprintf("%s.0.0%s", m[3], m[6]) - } else if isX(strings.TrimPrefix(m[5], ".")) { - dirty = true - patchDirty = true - ver = fmt.Sprintf("%s%s.0%s", m[3], m[4], m[6]) - } - - con, err := NewVersion(ver) - if err != nil { - - // The constraintRegex should catch any regex parsing errors. So, - // we should never get here. - return nil, errors.New("constraint Parser Error") - } - - cs := &constraint{ - function: constraintOps[m[1]], - msg: constraintMsg[m[1]], - con: con, - orig: orig, - minorDirty: minorDirty, - patchDirty: patchDirty, - dirty: dirty, - } - return cs, nil -} - -// Constraint functions -func constraintNotEqual(v *Version, c *constraint) bool { - if c.dirty { - - // If there is a pre-release on the version but the constraint isn't looking - // for them assume that pre-releases are not compatible. See issue 21 for - // more details. - if v.Prerelease() != "" && c.con.Prerelease() == "" { - return false - } - - if c.con.Major() != v.Major() { - return true - } - if c.con.Minor() != v.Minor() && !c.minorDirty { - return true - } else if c.minorDirty { - return false - } - - return false - } - - return !v.Equal(c.con) -} - -func constraintGreaterThan(v *Version, c *constraint) bool { - - // If there is a pre-release on the version but the constraint isn't looking - // for them assume that pre-releases are not compatible. See issue 21 for - // more details. - if v.Prerelease() != "" && c.con.Prerelease() == "" { - return false - } - - return v.Compare(c.con) == 1 -} - -func constraintLessThan(v *Version, c *constraint) bool { - // If there is a pre-release on the version but the constraint isn't looking - // for them assume that pre-releases are not compatible. See issue 21 for - // more details. - if v.Prerelease() != "" && c.con.Prerelease() == "" { - return false - } - - if !c.dirty { - return v.Compare(c.con) < 0 - } - - if v.Major() > c.con.Major() { - return false - } else if v.Minor() > c.con.Minor() && !c.minorDirty { - return false - } - - return true -} - -func constraintGreaterThanEqual(v *Version, c *constraint) bool { - - // If there is a pre-release on the version but the constraint isn't looking - // for them assume that pre-releases are not compatible. See issue 21 for - // more details. - if v.Prerelease() != "" && c.con.Prerelease() == "" { - return false - } - - return v.Compare(c.con) >= 0 -} - -func constraintLessThanEqual(v *Version, c *constraint) bool { - // If there is a pre-release on the version but the constraint isn't looking - // for them assume that pre-releases are not compatible. See issue 21 for - // more details. - if v.Prerelease() != "" && c.con.Prerelease() == "" { - return false - } - - if !c.dirty { - return v.Compare(c.con) <= 0 - } - - if v.Major() > c.con.Major() { - return false - } else if v.Minor() > c.con.Minor() && !c.minorDirty { - return false - } - - return true -} - -// ~*, ~>* --> >= 0.0.0 (any) -// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0, <3.0.0 -// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0, <2.1.0 -// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0, <1.3.0 -// ~1.2.3, ~>1.2.3 --> >=1.2.3, <1.3.0 -// ~1.2.0, ~>1.2.0 --> >=1.2.0, <1.3.0 -func constraintTilde(v *Version, c *constraint) bool { - // If there is a pre-release on the version but the constraint isn't looking - // for them assume that pre-releases are not compatible. See issue 21 for - // more details. - if v.Prerelease() != "" && c.con.Prerelease() == "" { - return false - } - - if v.LessThan(c.con) { - return false - } - - // ~0.0.0 is a special case where all constraints are accepted. It's - // equivalent to >= 0.0.0. - if c.con.Major() == 0 && c.con.Minor() == 0 && c.con.Patch() == 0 && - !c.minorDirty && !c.patchDirty { - return true - } - - if v.Major() != c.con.Major() { - return false - } - - if v.Minor() != c.con.Minor() && !c.minorDirty { - return false - } - - return true -} - -// When there is a .x (dirty) status it automatically opts in to ~. Otherwise -// it's a straight = -func constraintTildeOrEqual(v *Version, c *constraint) bool { - // If there is a pre-release on the version but the constraint isn't looking - // for them assume that pre-releases are not compatible. See issue 21 for - // more details. - if v.Prerelease() != "" && c.con.Prerelease() == "" { - return false - } - - if c.dirty { - c.msg = constraintMsg["~"] - return constraintTilde(v, c) - } - - return v.Equal(c.con) -} - -// ^* --> (any) -// ^2, ^2.x, ^2.x.x --> >=2.0.0, <3.0.0 -// ^2.0, ^2.0.x --> >=2.0.0, <3.0.0 -// ^1.2, ^1.2.x --> >=1.2.0, <2.0.0 -// ^1.2.3 --> >=1.2.3, <2.0.0 -// ^1.2.0 --> >=1.2.0, <2.0.0 -func constraintCaret(v *Version, c *constraint) bool { - // If there is a pre-release on the version but the constraint isn't looking - // for them assume that pre-releases are not compatible. See issue 21 for - // more details. - if v.Prerelease() != "" && c.con.Prerelease() == "" { - return false - } - - if v.LessThan(c.con) { - return false - } - - if v.Major() != c.con.Major() { - return false - } - - return true -} - -var constraintRangeRegex *regexp.Regexp - -const cvRegex string = `v?([0-9|x|X|\*]+)(\.[0-9|x|X|\*]+)?(\.[0-9|x|X|\*]+)?` + - `(-([0-9A-Za-z\-]+(\.[0-9A-Za-z\-]+)*))?` + - `(\+([0-9A-Za-z\-]+(\.[0-9A-Za-z\-]+)*))?` - -func isX(x string) bool { - switch x { - case "x", "*", "X": - return true - default: - return false - } -} - -func rewriteRange(i string) string { - m := constraintRangeRegex.FindAllStringSubmatch(i, -1) - if m == nil { - return i - } - o := i - for _, v := range m { - t := fmt.Sprintf(">= %s, <= %s", v[1], v[11]) - o = strings.Replace(o, v[0], t, 1) - } - - return o -} diff --git a/vendor/github.com/Masterminds/semver/doc.go b/vendor/github.com/Masterminds/semver/doc.go deleted file mode 100644 index 6a6c24c6d..000000000 --- a/vendor/github.com/Masterminds/semver/doc.go +++ /dev/null @@ -1,115 +0,0 @@ -/* -Package semver provides the ability to work with Semantic Versions (http://semver.org) in Go. - -Specifically it provides the ability to: - - * Parse semantic versions - * Sort semantic versions - * Check if a semantic version fits within a set of constraints - * Optionally work with a `v` prefix - -Parsing Semantic Versions - -To parse a semantic version use the `NewVersion` function. For example, - - v, err := semver.NewVersion("1.2.3-beta.1+build345") - -If there is an error the version wasn't parseable. The version object has methods -to get the parts of the version, compare it to other versions, convert the -version back into a string, and get the original string. For more details -please see the documentation at https://godoc.org/github.com/Masterminds/semver. - -Sorting Semantic Versions - -A set of versions can be sorted using the `sort` package from the standard library. -For example, - - raw := []string{"1.2.3", "1.0", "1.3", "2", "0.4.2",} - vs := make([]*semver.Version, len(raw)) - for i, r := range raw { - v, err := semver.NewVersion(r) - if err != nil { - t.Errorf("Error parsing version: %s", err) - } - - vs[i] = v - } - - sort.Sort(semver.Collection(vs)) - -Checking Version Constraints - -Checking a version against version constraints is one of the most featureful -parts of the package. - - c, err := semver.NewConstraint(">= 1.2.3") - if err != nil { - // Handle constraint not being parseable. - } - - v, err := semver.NewVersion("1.3") - if err != nil { - // Handle version not being parseable. - } - // Check if the version meets the constraints. The a variable will be true. - a := c.Check(v) - -Basic Comparisons - -There are two elements to the comparisons. First, a comparison string is a list -of comma separated and comparisons. These are then separated by || separated or -comparisons. For example, `">= 1.2, < 3.0.0 || >= 4.2.3"` is looking for a -comparison that's greater than or equal to 1.2 and less than 3.0.0 or is -greater than or equal to 4.2.3. - -The basic comparisons are: - - * `=`: equal (aliased to no operator) - * `!=`: not equal - * `>`: greater than - * `<`: less than - * `>=`: greater than or equal to - * `<=`: less than or equal to - -Hyphen Range Comparisons - -There are multiple methods to handle ranges and the first is hyphens ranges. -These look like: - - * `1.2 - 1.4.5` which is equivalent to `>= 1.2, <= 1.4.5` - * `2.3.4 - 4.5` which is equivalent to `>= 2.3.4, <= 4.5` - -Wildcards In Comparisons - -The `x`, `X`, and `*` characters can be used as a wildcard character. This works -for all comparison operators. When used on the `=` operator it falls -back to the pack level comparison (see tilde below). For example, - - * `1.2.x` is equivalent to `>= 1.2.0, < 1.3.0` - * `>= 1.2.x` is equivalent to `>= 1.2.0` - * `<= 2.x` is equivalent to `<= 3` - * `*` is equivalent to `>= 0.0.0` - -Tilde Range Comparisons (Patch) - -The tilde (`~`) comparison operator is for patch level ranges when a minor -version is specified and major level changes when the minor number is missing. -For example, - - * `~1.2.3` is equivalent to `>= 1.2.3, < 1.3.0` - * `~1` is equivalent to `>= 1, < 2` - * `~2.3` is equivalent to `>= 2.3, < 2.4` - * `~1.2.x` is equivalent to `>= 1.2.0, < 1.3.0` - * `~1.x` is equivalent to `>= 1, < 2` - -Caret Range Comparisons (Major) - -The caret (`^`) comparison operator is for major level changes. This is useful -when comparisons of API versions as a major change is API breaking. For example, - - * `^1.2.3` is equivalent to `>= 1.2.3, < 2.0.0` - * `^1.2.x` is equivalent to `>= 1.2.0, < 2.0.0` - * `^2.3` is equivalent to `>= 2.3, < 3` - * `^2.x` is equivalent to `>= 2.0.0, < 3` -*/ -package semver diff --git a/vendor/github.com/Masterminds/semver/version.go b/vendor/github.com/Masterminds/semver/version.go deleted file mode 100644 index 400d4f934..000000000 --- a/vendor/github.com/Masterminds/semver/version.go +++ /dev/null @@ -1,425 +0,0 @@ -package semver - -import ( - "bytes" - "encoding/json" - "errors" - "fmt" - "regexp" - "strconv" - "strings" -) - -// The compiled version of the regex created at init() is cached here so it -// only needs to be created once. -var versionRegex *regexp.Regexp -var validPrereleaseRegex *regexp.Regexp - -var ( - // ErrInvalidSemVer is returned a version is found to be invalid when - // being parsed. - ErrInvalidSemVer = errors.New("Invalid Semantic Version") - - // ErrInvalidMetadata is returned when the metadata is an invalid format - ErrInvalidMetadata = errors.New("Invalid Metadata string") - - // ErrInvalidPrerelease is returned when the pre-release is an invalid format - ErrInvalidPrerelease = errors.New("Invalid Prerelease string") -) - -// SemVerRegex is the regular expression used to parse a semantic version. -const SemVerRegex string = `v?([0-9]+)(\.[0-9]+)?(\.[0-9]+)?` + - `(-([0-9A-Za-z\-]+(\.[0-9A-Za-z\-]+)*))?` + - `(\+([0-9A-Za-z\-]+(\.[0-9A-Za-z\-]+)*))?` - -// ValidPrerelease is the regular expression which validates -// both prerelease and metadata values. -const ValidPrerelease string = `^([0-9A-Za-z\-]+(\.[0-9A-Za-z\-]+)*)$` - -// Version represents a single semantic version. -type Version struct { - major, minor, patch int64 - pre string - metadata string - original string -} - -func init() { - versionRegex = regexp.MustCompile("^" + SemVerRegex + "$") - validPrereleaseRegex = regexp.MustCompile(ValidPrerelease) -} - -// NewVersion parses a given version and returns an instance of Version or -// an error if unable to parse the version. -func NewVersion(v string) (*Version, error) { - m := versionRegex.FindStringSubmatch(v) - if m == nil { - return nil, ErrInvalidSemVer - } - - sv := &Version{ - metadata: m[8], - pre: m[5], - original: v, - } - - var temp int64 - temp, err := strconv.ParseInt(m[1], 10, 64) - if err != nil { - return nil, fmt.Errorf("Error parsing version segment: %s", err) - } - sv.major = temp - - if m[2] != "" { - temp, err = strconv.ParseInt(strings.TrimPrefix(m[2], "."), 10, 64) - if err != nil { - return nil, fmt.Errorf("Error parsing version segment: %s", err) - } - sv.minor = temp - } else { - sv.minor = 0 - } - - if m[3] != "" { - temp, err = strconv.ParseInt(strings.TrimPrefix(m[3], "."), 10, 64) - if err != nil { - return nil, fmt.Errorf("Error parsing version segment: %s", err) - } - sv.patch = temp - } else { - sv.patch = 0 - } - - return sv, nil -} - -// MustParse parses a given version and panics on error. -func MustParse(v string) *Version { - sv, err := NewVersion(v) - if err != nil { - panic(err) - } - return sv -} - -// String converts a Version object to a string. -// Note, if the original version contained a leading v this version will not. -// See the Original() method to retrieve the original value. Semantic Versions -// don't contain a leading v per the spec. Instead it's optional on -// implementation. -func (v *Version) String() string { - var buf bytes.Buffer - - fmt.Fprintf(&buf, "%d.%d.%d", v.major, v.minor, v.patch) - if v.pre != "" { - fmt.Fprintf(&buf, "-%s", v.pre) - } - if v.metadata != "" { - fmt.Fprintf(&buf, "+%s", v.metadata) - } - - return buf.String() -} - -// Original returns the original value passed in to be parsed. -func (v *Version) Original() string { - return v.original -} - -// Major returns the major version. -func (v *Version) Major() int64 { - return v.major -} - -// Minor returns the minor version. -func (v *Version) Minor() int64 { - return v.minor -} - -// Patch returns the patch version. -func (v *Version) Patch() int64 { - return v.patch -} - -// Prerelease returns the pre-release version. -func (v *Version) Prerelease() string { - return v.pre -} - -// Metadata returns the metadata on the version. -func (v *Version) Metadata() string { - return v.metadata -} - -// originalVPrefix returns the original 'v' prefix if any. -func (v *Version) originalVPrefix() string { - - // Note, only lowercase v is supported as a prefix by the parser. - if v.original != "" && v.original[:1] == "v" { - return v.original[:1] - } - return "" -} - -// IncPatch produces the next patch version. -// If the current version does not have prerelease/metadata information, -// it unsets metadata and prerelease values, increments patch number. -// If the current version has any of prerelease or metadata information, -// it unsets both values and keeps curent patch value -func (v Version) IncPatch() Version { - vNext := v - // according to http://semver.org/#spec-item-9 - // Pre-release versions have a lower precedence than the associated normal version. - // according to http://semver.org/#spec-item-10 - // Build metadata SHOULD be ignored when determining version precedence. - if v.pre != "" { - vNext.metadata = "" - vNext.pre = "" - } else { - vNext.metadata = "" - vNext.pre = "" - vNext.patch = v.patch + 1 - } - vNext.original = v.originalVPrefix() + "" + vNext.String() - return vNext -} - -// IncMinor produces the next minor version. -// Sets patch to 0. -// Increments minor number. -// Unsets metadata. -// Unsets prerelease status. -func (v Version) IncMinor() Version { - vNext := v - vNext.metadata = "" - vNext.pre = "" - vNext.patch = 0 - vNext.minor = v.minor + 1 - vNext.original = v.originalVPrefix() + "" + vNext.String() - return vNext -} - -// IncMajor produces the next major version. -// Sets patch to 0. -// Sets minor to 0. -// Increments major number. -// Unsets metadata. -// Unsets prerelease status. -func (v Version) IncMajor() Version { - vNext := v - vNext.metadata = "" - vNext.pre = "" - vNext.patch = 0 - vNext.minor = 0 - vNext.major = v.major + 1 - vNext.original = v.originalVPrefix() + "" + vNext.String() - return vNext -} - -// SetPrerelease defines the prerelease value. -// Value must not include the required 'hypen' prefix. -func (v Version) SetPrerelease(prerelease string) (Version, error) { - vNext := v - if len(prerelease) > 0 && !validPrereleaseRegex.MatchString(prerelease) { - return vNext, ErrInvalidPrerelease - } - vNext.pre = prerelease - vNext.original = v.originalVPrefix() + "" + vNext.String() - return vNext, nil -} - -// SetMetadata defines metadata value. -// Value must not include the required 'plus' prefix. -func (v Version) SetMetadata(metadata string) (Version, error) { - vNext := v - if len(metadata) > 0 && !validPrereleaseRegex.MatchString(metadata) { - return vNext, ErrInvalidMetadata - } - vNext.metadata = metadata - vNext.original = v.originalVPrefix() + "" + vNext.String() - return vNext, nil -} - -// LessThan tests if one version is less than another one. -func (v *Version) LessThan(o *Version) bool { - return v.Compare(o) < 0 -} - -// GreaterThan tests if one version is greater than another one. -func (v *Version) GreaterThan(o *Version) bool { - return v.Compare(o) > 0 -} - -// Equal tests if two versions are equal to each other. -// Note, versions can be equal with different metadata since metadata -// is not considered part of the comparable version. -func (v *Version) Equal(o *Version) bool { - return v.Compare(o) == 0 -} - -// Compare compares this version to another one. It returns -1, 0, or 1 if -// the version smaller, equal, or larger than the other version. -// -// Versions are compared by X.Y.Z. Build metadata is ignored. Prerelease is -// lower than the version without a prerelease. -func (v *Version) Compare(o *Version) int { - // Compare the major, minor, and patch version for differences. If a - // difference is found return the comparison. - if d := compareSegment(v.Major(), o.Major()); d != 0 { - return d - } - if d := compareSegment(v.Minor(), o.Minor()); d != 0 { - return d - } - if d := compareSegment(v.Patch(), o.Patch()); d != 0 { - return d - } - - // At this point the major, minor, and patch versions are the same. - ps := v.pre - po := o.Prerelease() - - if ps == "" && po == "" { - return 0 - } - if ps == "" { - return 1 - } - if po == "" { - return -1 - } - - return comparePrerelease(ps, po) -} - -// UnmarshalJSON implements JSON.Unmarshaler interface. -func (v *Version) UnmarshalJSON(b []byte) error { - var s string - if err := json.Unmarshal(b, &s); err != nil { - return err - } - temp, err := NewVersion(s) - if err != nil { - return err - } - v.major = temp.major - v.minor = temp.minor - v.patch = temp.patch - v.pre = temp.pre - v.metadata = temp.metadata - v.original = temp.original - temp = nil - return nil -} - -// MarshalJSON implements JSON.Marshaler interface. -func (v *Version) MarshalJSON() ([]byte, error) { - return json.Marshal(v.String()) -} - -func compareSegment(v, o int64) int { - if v < o { - return -1 - } - if v > o { - return 1 - } - - return 0 -} - -func comparePrerelease(v, o string) int { - - // split the prelease versions by their part. The separator, per the spec, - // is a . - sparts := strings.Split(v, ".") - oparts := strings.Split(o, ".") - - // Find the longer length of the parts to know how many loop iterations to - // go through. - slen := len(sparts) - olen := len(oparts) - - l := slen - if olen > slen { - l = olen - } - - // Iterate over each part of the prereleases to compare the differences. - for i := 0; i < l; i++ { - // Since the lentgh of the parts can be different we need to create - // a placeholder. This is to avoid out of bounds issues. - stemp := "" - if i < slen { - stemp = sparts[i] - } - - otemp := "" - if i < olen { - otemp = oparts[i] - } - - d := comparePrePart(stemp, otemp) - if d != 0 { - return d - } - } - - // Reaching here means two versions are of equal value but have different - // metadata (the part following a +). They are not identical in string form - // but the version comparison finds them to be equal. - return 0 -} - -func comparePrePart(s, o string) int { - // Fastpath if they are equal - if s == o { - return 0 - } - - // When s or o are empty we can use the other in an attempt to determine - // the response. - if s == "" { - if o != "" { - return -1 - } - return 1 - } - - if o == "" { - if s != "" { - return 1 - } - return -1 - } - - // When comparing strings "99" is greater than "103". To handle - // cases like this we need to detect numbers and compare them. According - // to the semver spec, numbers are always positive. If there is a - at the - // start like -99 this is to be evaluated as an alphanum. numbers always - // have precedence over alphanum. Parsing as Uints because negative numbers - // are ignored. - - oi, n1 := strconv.ParseUint(o, 10, 64) - si, n2 := strconv.ParseUint(s, 10, 64) - - // The case where both are strings compare the strings - if n1 != nil && n2 != nil { - if s > o { - return 1 - } - return -1 - } else if n1 != nil { - // o is a string and s is a number - return -1 - } else if n2 != nil { - // s is a string and o is a number - return 1 - } - // Both are numbers - if si > oi { - return 1 - } - return -1 - -} diff --git a/vendor/github.com/Masterminds/semver/version_fuzz.go b/vendor/github.com/Masterminds/semver/version_fuzz.go deleted file mode 100644 index b42bcd62b..000000000 --- a/vendor/github.com/Masterminds/semver/version_fuzz.go +++ /dev/null @@ -1,10 +0,0 @@ -// +build gofuzz - -package semver - -func Fuzz(data []byte) int { - if _, err := NewVersion(string(data)); err != nil { - return 0 - } - return 1 -} diff --git a/vendor/github.com/OpenPeeDeeP/depguard/v2/.gitignore b/vendor/github.com/OpenPeeDeeP/depguard/v2/.gitignore deleted file mode 100644 index e189bdb22..000000000 --- a/vendor/github.com/OpenPeeDeeP/depguard/v2/.gitignore +++ /dev/null @@ -1,15 +0,0 @@ -# Binaries for programs and plugins -*.exe -*.exe~ -*.dll -*.so -*.dylib - -# Test binary, build with `go test -c` -*.test - -# Output of the go coverage tool, specifically when used with LiteIDE -*.out - -.idea -.null-ls*.go diff --git a/vendor/github.com/OpenPeeDeeP/depguard/v2/LICENSE b/vendor/github.com/OpenPeeDeeP/depguard/v2/LICENSE deleted file mode 100644 index 94a9ed024..000000000 --- a/vendor/github.com/OpenPeeDeeP/depguard/v2/LICENSE +++ /dev/null @@ -1,674 +0,0 @@ - GNU GENERAL PUBLIC LICENSE - Version 3, 29 June 2007 - - Copyright (C) 2007 Free Software Foundation, Inc. - Everyone is permitted to copy and distribute verbatim copies - of this license document, but changing it is not allowed. - - Preamble - - The GNU General Public License is a free, copyleft license for -software and other kinds of works. - - The licenses for most software and other practical works are designed -to take away your freedom to share and change the works. By contrast, -the GNU General Public License is intended to guarantee your freedom to -share and change all versions of a program--to make sure it remains free -software for all its users. We, the Free Software Foundation, use the -GNU General Public License for most of our software; it applies also to -any other work released this way by its authors. You can apply it to -your programs, too. - - When we speak of free software, we are referring to freedom, not -price. Our General Public Licenses are designed to make sure that you -have the freedom to distribute copies of free software (and charge for -them if you wish), that you receive source code or can get it if you -want it, that you can change the software or use pieces of it in new -free programs, and that you know you can do these things. - - To protect your rights, we need to prevent others from denying you -these rights or asking you to surrender the rights. Therefore, you have -certain responsibilities if you distribute copies of the software, or if -you modify it: responsibilities to respect the freedom of others. - - For example, if you distribute copies of such a program, whether -gratis or for a fee, you must pass on to the recipients the same -freedoms that you received. You must make sure that they, too, receive -or can get the source code. And you must show them these terms so they -know their rights. - - Developers that use the GNU GPL protect your rights with two steps: -(1) assert copyright on the software, and (2) offer you this License -giving you legal permission to copy, distribute and/or modify it. - - For the developers' and authors' protection, the GPL clearly explains -that there is no warranty for this free software. For both users' and -authors' sake, the GPL requires that modified versions be marked as -changed, so that their problems will not be attributed erroneously to -authors of previous versions. - - Some devices are designed to deny users access to install or run -modified versions of the software inside them, although the manufacturer -can do so. This is fundamentally incompatible with the aim of -protecting users' freedom to change the software. The systematic -pattern of such abuse occurs in the area of products for individuals to -use, which is precisely where it is most unacceptable. Therefore, we -have designed this version of the GPL to prohibit the practice for those -products. If such problems arise substantially in other domains, we -stand ready to extend this provision to those domains in future versions -of the GPL, as needed to protect the freedom of users. - - Finally, every program is threatened constantly by software patents. -States should not allow patents to restrict development and use of -software on general-purpose computers, but in those that do, we wish to -avoid the special danger that patents applied to a free program could -make it effectively proprietary. To prevent this, the GPL assures that -patents cannot be used to render the program non-free. - - The precise terms and conditions for copying, distribution and -modification follow. - - TERMS AND CONDITIONS - - 0. Definitions. - - "This License" refers to version 3 of the GNU General Public License. - - "Copyright" also means copyright-like laws that apply to other kinds of -works, such as semiconductor masks. - - "The Program" refers to any copyrightable work licensed under this -License. Each licensee is addressed as "you". "Licensees" and -"recipients" may be individuals or organizations. - - To "modify" a work means to copy from or adapt all or part of the work -in a fashion requiring copyright permission, other than the making of an -exact copy. The resulting work is called a "modified version" of the -earlier work or a work "based on" the earlier work. - - A "covered work" means either the unmodified Program or a work based -on the Program. - - To "propagate" a work means to do anything with it that, without -permission, would make you directly or secondarily liable for -infringement under applicable copyright law, except executing it on a -computer or modifying a private copy. Propagation includes copying, -distribution (with or without modification), making available to the -public, and in some countries other activities as well. - - To "convey" a work means any kind of propagation that enables other -parties to make or receive copies. Mere interaction with a user through -a computer network, with no transfer of a copy, is not conveying. - - An interactive user interface displays "Appropriate Legal Notices" -to the extent that it includes a convenient and prominently visible -feature that (1) displays an appropriate copyright notice, and (2) -tells the user that there is no warranty for the work (except to the -extent that warranties are provided), that licensees may convey the -work under this License, and how to view a copy of this License. If -the interface presents a list of user commands or options, such as a -menu, a prominent item in the list meets this criterion. - - 1. Source Code. - - The "source code" for a work means the preferred form of the work -for making modifications to it. "Object code" means any non-source -form of a work. - - A "Standard Interface" means an interface that either is an official -standard defined by a recognized standards body, or, in the case of -interfaces specified for a particular programming language, one that -is widely used among developers working in that language. - - The "System Libraries" of an executable work include anything, other -than the work as a whole, that (a) is included in the normal form of -packaging a Major Component, but which is not part of that Major -Component, and (b) serves only to enable use of the work with that -Major Component, or to implement a Standard Interface for which an -implementation is available to the public in source code form. A -"Major Component", in this context, means a major essential component -(kernel, window system, and so on) of the specific operating system -(if any) on which the executable work runs, or a compiler used to -produce the work, or an object code interpreter used to run it. - - The "Corresponding Source" for a work in object code form means all -the source code needed to generate, install, and (for an executable -work) run the object code and to modify the work, including scripts to -control those activities. However, it does not include the work's -System Libraries, or general-purpose tools or generally available free -programs which are used unmodified in performing those activities but -which are not part of the work. For example, Corresponding Source -includes interface definition files associated with source files for -the work, and the source code for shared libraries and dynamically -linked subprograms that the work is specifically designed to require, -such as by intimate data communication or control flow between those -subprograms and other parts of the work. - - The Corresponding Source need not include anything that users -can regenerate automatically from other parts of the Corresponding -Source. - - The Corresponding Source for a work in source code form is that -same work. - - 2. Basic Permissions. - - All rights granted under this License are granted for the term of -copyright on the Program, and are irrevocable provided the stated -conditions are met. This License explicitly affirms your unlimited -permission to run the unmodified Program. The output from running a -covered work is covered by this License only if the output, given its -content, constitutes a covered work. This License acknowledges your -rights of fair use or other equivalent, as provided by copyright law. - - You may make, run and propagate covered works that you do not -convey, without conditions so long as your license otherwise remains -in force. You may convey covered works to others for the sole purpose -of having them make modifications exclusively for you, or provide you -with facilities for running those works, provided that you comply with -the terms of this License in conveying all material for which you do -not control copyright. Those thus making or running the covered works -for you must do so exclusively on your behalf, under your direction -and control, on terms that prohibit them from making any copies of -your copyrighted material outside their relationship with you. - - Conveying under any other circumstances is permitted solely under -the conditions stated below. Sublicensing is not allowed; section 10 -makes it unnecessary. - - 3. Protecting Users' Legal Rights From Anti-Circumvention Law. - - No covered work shall be deemed part of an effective technological -measure under any applicable law fulfilling obligations under article -11 of the WIPO copyright treaty adopted on 20 December 1996, or -similar laws prohibiting or restricting circumvention of such -measures. - - When you convey a covered work, you waive any legal power to forbid -circumvention of technological measures to the extent such circumvention -is effected by exercising rights under this License with respect to -the covered work, and you disclaim any intention to limit operation or -modification of the work as a means of enforcing, against the work's -users, your or third parties' legal rights to forbid circumvention of -technological measures. - - 4. Conveying Verbatim Copies. - - You may convey verbatim copies of the Program's source code as you -receive it, in any medium, provided that you conspicuously and -appropriately publish on each copy an appropriate copyright notice; -keep intact all notices stating that this License and any -non-permissive terms added in accord with section 7 apply to the code; -keep intact all notices of the absence of any warranty; and give all -recipients a copy of this License along with the Program. - - You may charge any price or no price for each copy that you convey, -and you may offer support or warranty protection for a fee. - - 5. Conveying Modified Source Versions. - - You may convey a work based on the Program, or the modifications to -produce it from the Program, in the form of source code under the -terms of section 4, provided that you also meet all of these conditions: - - a) The work must carry prominent notices stating that you modified - it, and giving a relevant date. - - b) The work must carry prominent notices stating that it is - released under this License and any conditions added under section - 7. This requirement modifies the requirement in section 4 to - "keep intact all notices". - - c) You must license the entire work, as a whole, under this - License to anyone who comes into possession of a copy. This - License will therefore apply, along with any applicable section 7 - additional terms, to the whole of the work, and all its parts, - regardless of how they are packaged. This License gives no - permission to license the work in any other way, but it does not - invalidate such permission if you have separately received it. - - d) If the work has interactive user interfaces, each must display - Appropriate Legal Notices; however, if the Program has interactive - interfaces that do not display Appropriate Legal Notices, your - work need not make them do so. - - A compilation of a covered work with other separate and independent -works, which are not by their nature extensions of the covered work, -and which are not combined with it such as to form a larger program, -in or on a volume of a storage or distribution medium, is called an -"aggregate" if the compilation and its resulting copyright are not -used to limit the access or legal rights of the compilation's users -beyond what the individual works permit. Inclusion of a covered work -in an aggregate does not cause this License to apply to the other -parts of the aggregate. - - 6. Conveying Non-Source Forms. - - You may convey a covered work in object code form under the terms -of sections 4 and 5, provided that you also convey the -machine-readable Corresponding Source under the terms of this License, -in one of these ways: - - a) Convey the object code in, or embodied in, a physical product - (including a physical distribution medium), accompanied by the - Corresponding Source fixed on a durable physical medium - customarily used for software interchange. - - b) Convey the object code in, or embodied in, a physical product - (including a physical distribution medium), accompanied by a - written offer, valid for at least three years and valid for as - long as you offer spare parts or customer support for that product - model, to give anyone who possesses the object code either (1) a - copy of the Corresponding Source for all the software in the - product that is covered by this License, on a durable physical - medium customarily used for software interchange, for a price no - more than your reasonable cost of physically performing this - conveying of source, or (2) access to copy the - Corresponding Source from a network server at no charge. - - c) Convey individual copies of the object code with a copy of the - written offer to provide the Corresponding Source. This - alternative is allowed only occasionally and noncommercially, and - only if you received the object code with such an offer, in accord - with subsection 6b. - - d) Convey the object code by offering access from a designated - place (gratis or for a charge), and offer equivalent access to the - Corresponding Source in the same way through the same place at no - further charge. You need not require recipients to copy the - Corresponding Source along with the object code. If the place to - copy the object code is a network server, the Corresponding Source - may be on a different server (operated by you or a third party) - that supports equivalent copying facilities, provided you maintain - clear directions next to the object code saying where to find the - Corresponding Source. Regardless of what server hosts the - Corresponding Source, you remain obligated to ensure that it is - available for as long as needed to satisfy these requirements. - - e) Convey the object code using peer-to-peer transmission, provided - you inform other peers where the object code and Corresponding - Source of the work are being offered to the general public at no - charge under subsection 6d. - - A separable portion of the object code, whose source code is excluded -from the Corresponding Source as a System Library, need not be -included in conveying the object code work. - - A "User Product" is either (1) a "consumer product", which means any -tangible personal property which is normally used for personal, family, -or household purposes, or (2) anything designed or sold for incorporation -into a dwelling. In determining whether a product is a consumer product, -doubtful cases shall be resolved in favor of coverage. For a particular -product received by a particular user, "normally used" refers to a -typical or common use of that class of product, regardless of the status -of the particular user or of the way in which the particular user -actually uses, or expects or is expected to use, the product. A product -is a consumer product regardless of whether the product has substantial -commercial, industrial or non-consumer uses, unless such uses represent -the only significant mode of use of the product. - - "Installation Information" for a User Product means any methods, -procedures, authorization keys, or other information required to install -and execute modified versions of a covered work in that User Product from -a modified version of its Corresponding Source. The information must -suffice to ensure that the continued functioning of the modified object -code is in no case prevented or interfered with solely because -modification has been made. - - If you convey an object code work under this section in, or with, or -specifically for use in, a User Product, and the conveying occurs as -part of a transaction in which the right of possession and use of the -User Product is transferred to the recipient in perpetuity or for a -fixed term (regardless of how the transaction is characterized), the -Corresponding Source conveyed under this section must be accompanied -by the Installation Information. But this requirement does not apply -if neither you nor any third party retains the ability to install -modified object code on the User Product (for example, the work has -been installed in ROM). - - The requirement to provide Installation Information does not include a -requirement to continue to provide support service, warranty, or updates -for a work that has been modified or installed by the recipient, or for -the User Product in which it has been modified or installed. Access to a -network may be denied when the modification itself materially and -adversely affects the operation of the network or violates the rules and -protocols for communication across the network. - - Corresponding Source conveyed, and Installation Information provided, -in accord with this section must be in a format that is publicly -documented (and with an implementation available to the public in -source code form), and must require no special password or key for -unpacking, reading or copying. - - 7. Additional Terms. - - "Additional permissions" are terms that supplement the terms of this -License by making exceptions from one or more of its conditions. -Additional permissions that are applicable to the entire Program shall -be treated as though they were included in this License, to the extent -that they are valid under applicable law. If additional permissions -apply only to part of the Program, that part may be used separately -under those permissions, but the entire Program remains governed by -this License without regard to the additional permissions. - - When you convey a copy of a covered work, you may at your option -remove any additional permissions from that copy, or from any part of -it. (Additional permissions may be written to require their own -removal in certain cases when you modify the work.) You may place -additional permissions on material, added by you to a covered work, -for which you have or can give appropriate copyright permission. - - Notwithstanding any other provision of this License, for material you -add to a covered work, you may (if authorized by the copyright holders of -that material) supplement the terms of this License with terms: - - a) Disclaiming warranty or limiting liability differently from the - terms of sections 15 and 16 of this License; or - - b) Requiring preservation of specified reasonable legal notices or - author attributions in that material or in the Appropriate Legal - Notices displayed by works containing it; or - - c) Prohibiting misrepresentation of the origin of that material, or - requiring that modified versions of such material be marked in - reasonable ways as different from the original version; or - - d) Limiting the use for publicity purposes of names of licensors or - authors of the material; or - - e) Declining to grant rights under trademark law for use of some - trade names, trademarks, or service marks; or - - f) Requiring indemnification of licensors and authors of that - material by anyone who conveys the material (or modified versions of - it) with contractual assumptions of liability to the recipient, for - any liability that these contractual assumptions directly impose on - those licensors and authors. - - All other non-permissive additional terms are considered "further -restrictions" within the meaning of section 10. If the Program as you -received it, or any part of it, contains a notice stating that it is -governed by this License along with a term that is a further -restriction, you may remove that term. If a license document contains -a further restriction but permits relicensing or conveying under this -License, you may add to a covered work material governed by the terms -of that license document, provided that the further restriction does -not survive such relicensing or conveying. - - If you add terms to a covered work in accord with this section, you -must place, in the relevant source files, a statement of the -additional terms that apply to those files, or a notice indicating -where to find the applicable terms. - - Additional terms, permissive or non-permissive, may be stated in the -form of a separately written license, or stated as exceptions; -the above requirements apply either way. - - 8. Termination. - - You may not propagate or modify a covered work except as expressly -provided under this License. Any attempt otherwise to propagate or -modify it is void, and will automatically terminate your rights under -this License (including any patent licenses granted under the third -paragraph of section 11). - - However, if you cease all violation of this License, then your -license from a particular copyright holder is reinstated (a) -provisionally, unless and until the copyright holder explicitly and -finally terminates your license, and (b) permanently, if the copyright -holder fails to notify you of the violation by some reasonable means -prior to 60 days after the cessation. - - Moreover, your license from a particular copyright holder is -reinstated permanently if the copyright holder notifies you of the -violation by some reasonable means, this is the first time you have -received notice of violation of this License (for any work) from that -copyright holder, and you cure the violation prior to 30 days after -your receipt of the notice. - - Termination of your rights under this section does not terminate the -licenses of parties who have received copies or rights from you under -this License. If your rights have been terminated and not permanently -reinstated, you do not qualify to receive new licenses for the same -material under section 10. - - 9. Acceptance Not Required for Having Copies. - - You are not required to accept this License in order to receive or -run a copy of the Program. Ancillary propagation of a covered work -occurring solely as a consequence of using peer-to-peer transmission -to receive a copy likewise does not require acceptance. However, -nothing other than this License grants you permission to propagate or -modify any covered work. These actions infringe copyright if you do -not accept this License. Therefore, by modifying or propagating a -covered work, you indicate your acceptance of this License to do so. - - 10. Automatic Licensing of Downstream Recipients. - - Each time you convey a covered work, the recipient automatically -receives a license from the original licensors, to run, modify and -propagate that work, subject to this License. You are not responsible -for enforcing compliance by third parties with this License. - - An "entity transaction" is a transaction transferring control of an -organization, or substantially all assets of one, or subdividing an -organization, or merging organizations. If propagation of a covered -work results from an entity transaction, each party to that -transaction who receives a copy of the work also receives whatever -licenses to the work the party's predecessor in interest had or could -give under the previous paragraph, plus a right to possession of the -Corresponding Source of the work from the predecessor in interest, if -the predecessor has it or can get it with reasonable efforts. - - You may not impose any further restrictions on the exercise of the -rights granted or affirmed under this License. For example, you may -not impose a license fee, royalty, or other charge for exercise of -rights granted under this License, and you may not initiate litigation -(including a cross-claim or counterclaim in a lawsuit) alleging that -any patent claim is infringed by making, using, selling, offering for -sale, or importing the Program or any portion of it. - - 11. Patents. - - A "contributor" is a copyright holder who authorizes use under this -License of the Program or a work on which the Program is based. The -work thus licensed is called the contributor's "contributor version". - - A contributor's "essential patent claims" are all patent claims -owned or controlled by the contributor, whether already acquired or -hereafter acquired, that would be infringed by some manner, permitted -by this License, of making, using, or selling its contributor version, -but do not include claims that would be infringed only as a -consequence of further modification of the contributor version. For -purposes of this definition, "control" includes the right to grant -patent sublicenses in a manner consistent with the requirements of -this License. - - Each contributor grants you a non-exclusive, worldwide, royalty-free -patent license under the contributor's essential patent claims, to -make, use, sell, offer for sale, import and otherwise run, modify and -propagate the contents of its contributor version. - - In the following three paragraphs, a "patent license" is any express -agreement or commitment, however denominated, not to enforce a patent -(such as an express permission to practice a patent or covenant not to -sue for patent infringement). To "grant" such a patent license to a -party means to make such an agreement or commitment not to enforce a -patent against the party. - - If you convey a covered work, knowingly relying on a patent license, -and the Corresponding Source of the work is not available for anyone -to copy, free of charge and under the terms of this License, through a -publicly available network server or other readily accessible means, -then you must either (1) cause the Corresponding Source to be so -available, or (2) arrange to deprive yourself of the benefit of the -patent license for this particular work, or (3) arrange, in a manner -consistent with the requirements of this License, to extend the patent -license to downstream recipients. "Knowingly relying" means you have -actual knowledge that, but for the patent license, your conveying the -covered work in a country, or your recipient's use of the covered work -in a country, would infringe one or more identifiable patents in that -country that you have reason to believe are valid. - - If, pursuant to or in connection with a single transaction or -arrangement, you convey, or propagate by procuring conveyance of, a -covered work, and grant a patent license to some of the parties -receiving the covered work authorizing them to use, propagate, modify -or convey a specific copy of the covered work, then the patent license -you grant is automatically extended to all recipients of the covered -work and works based on it. - - A patent license is "discriminatory" if it does not include within -the scope of its coverage, prohibits the exercise of, or is -conditioned on the non-exercise of one or more of the rights that are -specifically granted under this License. You may not convey a covered -work if you are a party to an arrangement with a third party that is -in the business of distributing software, under which you make payment -to the third party based on the extent of your activity of conveying -the work, and under which the third party grants, to any of the -parties who would receive the covered work from you, a discriminatory -patent license (a) in connection with copies of the covered work -conveyed by you (or copies made from those copies), or (b) primarily -for and in connection with specific products or compilations that -contain the covered work, unless you entered into that arrangement, -or that patent license was granted, prior to 28 March 2007. - - Nothing in this License shall be construed as excluding or limiting -any implied license or other defenses to infringement that may -otherwise be available to you under applicable patent law. - - 12. No Surrender of Others' Freedom. - - If conditions are imposed on you (whether by court order, agreement or -otherwise) that contradict the conditions of this License, they do not -excuse you from the conditions of this License. If you cannot convey a -covered work so as to satisfy simultaneously your obligations under this -License and any other pertinent obligations, then as a consequence you may -not convey it at all. For example, if you agree to terms that obligate you -to collect a royalty for further conveying from those to whom you convey -the Program, the only way you could satisfy both those terms and this -License would be to refrain entirely from conveying the Program. - - 13. Use with the GNU Affero General Public License. - - Notwithstanding any other provision of this License, you have -permission to link or combine any covered work with a work licensed -under version 3 of the GNU Affero General Public License into a single -combined work, and to convey the resulting work. The terms of this -License will continue to apply to the part which is the covered work, -but the special requirements of the GNU Affero General Public License, -section 13, concerning interaction through a network will apply to the -combination as such. - - 14. Revised Versions of this License. - - The Free Software Foundation may publish revised and/or new versions of -the GNU General Public License from time to time. Such new versions will -be similar in spirit to the present version, but may differ in detail to -address new problems or concerns. - - Each version is given a distinguishing version number. If the -Program specifies that a certain numbered version of the GNU General -Public License "or any later version" applies to it, you have the -option of following the terms and conditions either of that numbered -version or of any later version published by the Free Software -Foundation. If the Program does not specify a version number of the -GNU General Public License, you may choose any version ever published -by the Free Software Foundation. - - If the Program specifies that a proxy can decide which future -versions of the GNU General Public License can be used, that proxy's -public statement of acceptance of a version permanently authorizes you -to choose that version for the Program. - - Later license versions may give you additional or different -permissions. However, no additional obligations are imposed on any -author or copyright holder as a result of your choosing to follow a -later version. - - 15. Disclaimer of Warranty. - - THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY -APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT -HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY -OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, -THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR -PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM -IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF -ALL NECESSARY SERVICING, REPAIR OR CORRECTION. - - 16. Limitation of Liability. - - IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING -WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS -THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY -GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE -USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF -DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD -PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), -EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF -SUCH DAMAGES. - - 17. Interpretation of Sections 15 and 16. - - If the disclaimer of warranty and limitation of liability provided -above cannot be given local legal effect according to their terms, -reviewing courts shall apply local law that most closely approximates -an absolute waiver of all civil liability in connection with the -Program, unless a warranty or assumption of liability accompanies a -copy of the Program in return for a fee. - - END OF TERMS AND CONDITIONS - - How to Apply These Terms to Your New Programs - - If you develop a new program, and you want it to be of the greatest -possible use to the public, the best way to achieve this is to make it -free software which everyone can redistribute and change under these terms. - - To do so, attach the following notices to the program. It is safest -to attach them to the start of each source file to most effectively -state the exclusion of warranty; and each file should have at least -the "copyright" line and a pointer to where the full notice is found. - - - Copyright (C) - - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . - -Also add information on how to contact you by electronic and paper mail. - - If the program does terminal interaction, make it output a short -notice like this when it starts in an interactive mode: - - Copyright (C) - This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. - This is free software, and you are welcome to redistribute it - under certain conditions; type `show c' for details. - -The hypothetical commands `show w' and `show c' should show the appropriate -parts of the General Public License. Of course, your program's commands -might be different; for a GUI interface, you would use an "about box". - - You should also get your employer (if you work as a programmer) or school, -if any, to sign a "copyright disclaimer" for the program, if necessary. -For more information on this, and how to apply and follow the GNU GPL, see -. - - The GNU General Public License does not permit incorporating your program -into proprietary programs. If your program is a subroutine library, you -may consider it more useful to permit linking proprietary applications with -the library. If this is what you want to do, use the GNU Lesser General -Public License instead of this License. But first, please read -. diff --git a/vendor/github.com/OpenPeeDeeP/depguard/v2/README.md b/vendor/github.com/OpenPeeDeeP/depguard/v2/README.md deleted file mode 100644 index 2ccfa22c5..000000000 --- a/vendor/github.com/OpenPeeDeeP/depguard/v2/README.md +++ /dev/null @@ -1,163 +0,0 @@ -# Depguard - -A Go linter that checks package imports are in a list of acceptable packages. -This allows you to allow imports from a whole organization or only -allow specific packages within a repository. - -## Install - -```bash -go install github.com/OpenPeeDeeP/depguard@latest -``` - -## Config - -The Depguard binary looks for a file named `^\.?depguard\.(yaml|yml|json|toml)$` in the current working directory. Examples include (`.depguard.yml` or `depguard.toml`). - -The following is an example configuration file. - -```json -{ - "main": { - "files": [ - "$all", - "!$test" - ], - "listMode": "Strict", - "allow": [ - "$gostd", - "github.com/OpenPeeDeeP" - ], - "deny": { - "reflect": "Who needs reflection", - } - }, - "tests": { - "files": [ - "$test" - ], - "listMode": "Lax", - "deny": { - "github.com/stretchr/testify": "Please use standard library for tests" - } - } -} -``` - -- The top level is a map of lists. The key of the map is a name that shows up in -the linter's output. -- `files` - list of file globs that will match this list of settings to compare against -- `allow` - list of allowed packages -- `deny` - map of packages that are not allowed where the value is a suggestion -= `listMode` - the mode to use for package matching - -Files are matched using [Globs](https://github.com/gobwas/glob). If the files -list is empty, then all files will match that list. Prefixing a file -with an exclamation mark `!` will put that glob in a "don't match" list. A file -will match a list if it is allowed and not denied. - -> Should always prefix a file glob with `**/` as files are matched against absolute paths. - -Allow is a prefix of packages to allow. A dollar sign `$` can be used at the end -of a package to specify it must be exact match only. - -Deny is a map where the key is a prefix of the package to deny, and the value -is a suggestion on what to use instead. A dollar sign `$` can be used at the end -of a package to specify it must be exact match only. - -A Prefix List just means that a package will match a value, if the value is a -prefix of the package. Example `github.com/OpenPeeDeeP/depguard` package will match -a value of `github.com/OpenPeeDeeP` but won't match `github.com/OpenPeeDeeP/depguard/v2`. - -ListMode is used to determine the package matching priority. There are three -different modes; Original, Strict, and Lax. - -Original is the original way that the package was written to use. It is not recommended -to stay with this and is only here for backwards compatibility. - -Strict, at its roots, is everything is denied unless in allowed. - -Lax, at its roots, is everything is allowed unless it is denied. - -There are cases where a package can be matched in both the allow and denied lists. -You may allow a subpackage but deny the root or vice versa. The `settings_tests.go` file -has many scenarios listed out under `TestListImportAllowed`. These tests will stay -up to date as features are added. - -### Variables - -There are variable replacements for each type of list (file or package). This is -to reduce repetition and tedious behaviors. - -#### File Variables - -> you can still use an exclamation mark `!` in front of a variable to say not to -use it. Example `!$test` will match any file that is not a go test file. - -- `$all` - matches all go files -- `$test` - matches all go test files - -#### Package Variables - -- `$gostd` - matches all of go's standard library (Pulled from GOROOT) - -### Example Configs - -Below: - -- non-test go files will match `Main` and test go files will match `Test`. -- both allow all of go standard library except for the `reflect` package which will -tell the user "Please don't use reflect package". -- go test files are also allowed to use https://github.com/stretchr/testify package -and any sub-package of it. - -```yaml -Main: - files: - - $all - - "!$test" - allow: - - $gostd - deny: - reflect: Please don't use reflect package -Test: - files: - - $test - allow: - - $gostd - - github.com/stretchr/testify - deny: - reflect: Please don't use reflect package -``` - -Below: - -- All go files will match `Main` -- Go files in internal will match both `Main` and `Internal` - -```yaml -Main: - files: - - $all -Internal: - files: - - "**/internal/**/*.go" -``` - -Below: - -- All packages are allowed except for `github.com/OpenPeeDeeP/depguard`. Though -`github.com/OpenPeeDeeP/depguard/v2` and `github.com/OpenPeeDeeP/depguard/somepackage` -would be allowed. - -```yaml -Main: - deny: - - github.com/OpenPeeDeeP/depguard$ -``` - -## Golangci-lint - -This linter was built with -[Golangci-lint](https://github.com/golangci/golangci-lint) in mind. It is compatible -and read their docs to see how to implement all their linters, including this one. diff --git a/vendor/github.com/OpenPeeDeeP/depguard/v2/depguard.go b/vendor/github.com/OpenPeeDeeP/depguard/v2/depguard.go deleted file mode 100644 index 2729091e8..000000000 --- a/vendor/github.com/OpenPeeDeeP/depguard/v2/depguard.go +++ /dev/null @@ -1,95 +0,0 @@ -package depguard - -import ( - "fmt" - "go/ast" - "path/filepath" - "strings" - - "golang.org/x/tools/go/analysis" -) - -// NewAnalyzer creates a new analyzer from the settings passed in. -// This can fail if the passed in LinterSettings does not compile. -// Use NewUncompiledAnalyzer if you need control when the compile happens. -func NewAnalyzer(settings *LinterSettings) (*analysis.Analyzer, error) { - s, err := settings.compile() - if err != nil { - return nil, err - } - analyzer := newAnalyzer(s.run) - return analyzer, nil -} - -type UncompiledAnalyzer struct { - Analyzer *analysis.Analyzer - settings *LinterSettings -} - -// NewUncompiledAnalyzer creates a new analyzer from the settings passed in. -// This can never error unlike NewAnalyzer. -// It is advised to call the Compile method on the returned Analyzer before running. -func NewUncompiledAnalyzer(settings *LinterSettings) *UncompiledAnalyzer { - return &UncompiledAnalyzer{ - Analyzer: newAnalyzer(settings.run), - settings: settings, - } -} - -// Compile the settings ahead of time so each subsuquent run of the analyzer doesn't -// need to do this work. -func (ua *UncompiledAnalyzer) Compile() error { - s, err := ua.settings.compile() - if err != nil { - return err - } - ua.Analyzer.Run = s.run - return nil -} - -func (settings LinterSettings) run(pass *analysis.Pass) (interface{}, error) { - s, err := settings.compile() - if err != nil { - return nil, err - } - return s.run(pass) -} - -func newAnalyzer(run func(*analysis.Pass) (interface{}, error)) *analysis.Analyzer { - return &analysis.Analyzer{ - Name: "depguard", - Doc: "Go linter that checks if package imports are in a list of acceptable packages", - URL: "https://github.com/OpenPeeDeeP/depguard", - Run: run, - RunDespiteErrors: false, - } -} - -func (s linterSettings) run(pass *analysis.Pass) (interface{}, error) { - for _, file := range pass.Files { - // For Windows need to replace separator with '/' - fileName := filepath.ToSlash(pass.Fset.Position(file.Pos()).Filename) - lists := s.whichLists(fileName) - for _, imp := range file.Imports { - for _, l := range lists { - if allowed, sugg := l.importAllowed(rawBasicLit(imp.Path)); !allowed { - diag := analysis.Diagnostic{ - Pos: imp.Pos(), - End: imp.End(), - Message: fmt.Sprintf("import '%s' is not allowed from list '%s'", rawBasicLit(imp.Path), l.name), - } - if sugg != "" { - diag.Message = fmt.Sprintf("%s: %s", diag.Message, sugg) - diag.SuggestedFixes = append(diag.SuggestedFixes, analysis.SuggestedFix{Message: sugg}) - } - pass.Report(diag) - } - } - } - } - return nil, nil -} - -func rawBasicLit(lit *ast.BasicLit) string { - return strings.Trim(lit.Value, "\"") -} diff --git a/vendor/github.com/OpenPeeDeeP/depguard/v2/internal/utils/errors.go b/vendor/github.com/OpenPeeDeeP/depguard/v2/internal/utils/errors.go deleted file mode 100644 index 65325f612..000000000 --- a/vendor/github.com/OpenPeeDeeP/depguard/v2/internal/utils/errors.go +++ /dev/null @@ -1,18 +0,0 @@ -package utils - -import ( - "strings" -) - -type MultiError []error - -func (me MultiError) Error() string { - b := strings.Builder{} - for i, e := range me { - b.WriteString(e.Error()) - if i < len(me)-1 { - b.WriteByte('\n') - } - } - return b.String() -} diff --git a/vendor/github.com/OpenPeeDeeP/depguard/v2/internal/utils/variables.go b/vendor/github.com/OpenPeeDeeP/depguard/v2/internal/utils/variables.go deleted file mode 100644 index 3363bd840..000000000 --- a/vendor/github.com/OpenPeeDeeP/depguard/v2/internal/utils/variables.go +++ /dev/null @@ -1,131 +0,0 @@ -package utils - -import ( - "fmt" - "os" - "os/exec" - "path" - "path/filepath" - "runtime" - "strings" -) - -type Expander interface { - Expand() ([]string, error) -} - -type ExpanderMap map[string]Expander - -var ( - PathExpandable = ExpanderMap{ - "$all": &allExpander{}, - "$test": &testExpander{}, - } - PackageExpandable = ExpanderMap{ - "$gostd": &gostdExpander{}, - } -) - -type allExpander struct{} - -func (*allExpander) Expand() ([]string, error) { - return []string{"**/*.go"}, nil -} - -type testExpander struct{} - -func (*testExpander) Expand() ([]string, error) { - return []string{"**/*_test.go"}, nil -} - -type gostdExpander struct { - cache []string -} - -// We can do this as all imports that are not root are either prefixed with a domain -// or prefixed with `./` or `/` to dictate it is a local file reference -func (e *gostdExpander) Expand() ([]string, error) { - if len(e.cache) != 0 { - return e.cache, nil - } - root := path.Join(findGOROOT(), "src") - fs, err := os.ReadDir(root) - if err != nil { - return nil, fmt.Errorf("could not read GOROOT directory: %w", err) - } - var pkgPrefix []string - for _, f := range fs { - if !f.IsDir() { - continue - } - pkgPrefix = append(pkgPrefix, f.Name()) - } - e.cache = pkgPrefix - return pkgPrefix, nil -} - -func findGOROOT() string { - // code borrowed from https://github.com/golang/tools/blob/86c93e8732cce300d0270bce23117456ce92bb17/cmd/godoc/goroot.go#L15-L30 - if env := os.Getenv("GOROOT"); env != "" { - return filepath.Clean(env) - } - def := filepath.Clean(runtime.GOROOT()) - if runtime.Compiler == "gccgo" { - // gccgo has no real GOROOT, and it certainly doesn't - // depend on the executable's location. - return def - } - out, err := exec.Command("go", "env", "GOROOT").Output() - if err != nil { - return def - } - return strings.TrimSpace(string(out)) -} - -func ExpandSlice(sl []string, exp ExpanderMap) ([]string, error) { - for i, s := range sl { - f, found := exp[s] - if !found { - continue - } - e, err := f.Expand() - if err != nil { - return nil, fmt.Errorf("couldn't expand %s: %w", s, err) - } - sl = insertSlice(sl, i, e...) - } - return sl, nil -} - -func ExpandMap(m map[string]string, exp ExpanderMap) error { - for k, v := range m { - f, found := exp[k] - if !found { - continue - } - e, err := f.Expand() - if err != nil { - return fmt.Errorf("couldn't expand %s: %w", k, err) - } - for _, ex := range e { - m[ex] = v - } - delete(m, k) - } - return nil -} - -func insertSlice(a []string, k int, b ...string) []string { - n := len(a) + len(b) - 1 - if n <= cap(a) { - a2 := a[:n] - copy(a2[k+len(b):], a[k+1:]) - copy(a2[k:], b) - return a2 - } - a2 := make([]string, n) - copy(a2, a[:k]) - copy(a2[k:], b) - copy(a2[k+len(b):], a[k+1:]) - return a2 -} diff --git a/vendor/github.com/OpenPeeDeeP/depguard/v2/settings.go b/vendor/github.com/OpenPeeDeeP/depguard/v2/settings.go deleted file mode 100644 index 311cacc88..000000000 --- a/vendor/github.com/OpenPeeDeeP/depguard/v2/settings.go +++ /dev/null @@ -1,240 +0,0 @@ -package depguard - -import ( - "errors" - "fmt" - "sort" - "strings" - - "github.com/OpenPeeDeeP/depguard/v2/internal/utils" - "github.com/gobwas/glob" -) - -type List struct { - ListMode string `json:"listMode" yaml:"listMode" toml:"listMode" mapstructure:"listMode"` - Files []string `json:"files" yaml:"files" toml:"files" mapstructure:"files"` - Allow []string `json:"allow" yaml:"allow" toml:"allow" mapstructure:"allow"` - Deny map[string]string `json:"deny" yaml:"deny" toml:"deny" mapstructure:"deny"` -} - -type listMode int - -const ( - listModeOriginal listMode = iota - listModeStrict - listModeLax -) - -type list struct { - listMode listMode - name string - files []glob.Glob - negFiles []glob.Glob - allow []string - deny []string - suggestions []string -} - -func (l *List) compile() (*list, error) { - if l == nil { - return nil, nil - } - li := &list{} - var errs utils.MultiError - var err error - - // Determine List Mode - switch strings.ToLower(l.ListMode) { - case "": - li.listMode = listModeOriginal - case "original": - li.listMode = listModeOriginal - case "strict": - li.listMode = listModeStrict - case "lax": - li.listMode = listModeLax - default: - errs = append(errs, fmt.Errorf("%s is not a known list mode", l.ListMode)) - } - - // Compile Files - for _, f := range l.Files { - var negate bool - if len(f) > 0 && f[0] == '!' { - negate = true - f = f[1:] - } - // Expand File if needed - fs, err := utils.ExpandSlice([]string{f}, utils.PathExpandable) - if err != nil { - errs = append(errs, err) - } - for _, exp := range fs { - g, err := glob.Compile(exp, '/') - if err != nil { - errs = append(errs, fmt.Errorf("%s could not be compiled: %w", exp, err)) - continue - } - if negate { - li.negFiles = append(li.negFiles, g) - continue - } - li.files = append(li.files, g) - } - } - - if len(l.Allow) > 0 { - // Expand Allow - l.Allow, err = utils.ExpandSlice(l.Allow, utils.PackageExpandable) - if err != nil { - errs = append(errs, err) - } - - // Sort Allow - li.allow = make([]string, len(l.Allow)) - copy(li.allow, l.Allow) - sort.Strings(li.allow) - } - - if l.Deny != nil { - // Expand Deny Map (to keep suggestions) - err = utils.ExpandMap(l.Deny, utils.PackageExpandable) - if err != nil { - errs = append(errs, err) - } - - // Split Deny Into Package Slice - li.deny = make([]string, 0, len(l.Deny)) - for pkg := range l.Deny { - li.deny = append(li.deny, pkg) - } - - // Sort Deny - sort.Strings(li.deny) - - // Populate Suggestions to match the Deny order - li.suggestions = make([]string, 0, len(li.deny)) - for _, dp := range li.deny { - li.suggestions = append(li.suggestions, strings.TrimSpace(l.Deny[dp])) - } - } - - // Populate the type of this list - if len(li.allow) == 0 && len(li.deny) == 0 { - errs = append(errs, errors.New("must have an Allow and/or Deny package list")) - } - - if len(errs) > 0 { - return nil, errs - } - return li, nil -} - -func (l *list) fileMatch(fileName string) bool { - inAllowed := len(l.files) == 0 || strInGlobList(fileName, l.files) - inDenied := strInGlobList(fileName, l.negFiles) - return inAllowed && !inDenied -} - -func (l *list) importAllowed(imp string) (bool, string) { - inAllowed, aIdx := strInPrefixList(imp, l.allow) - inDenied, dIdx := strInPrefixList(imp, l.deny) - var allowed bool - switch l.listMode { - case listModeOriginal: - inAllowed = len(l.allow) == 0 || inAllowed - allowed = inAllowed && !inDenied - case listModeStrict: - allowed = inAllowed && (!inDenied || len(l.allow[aIdx]) > len(l.deny[dIdx])) - case listModeLax: - allowed = !inDenied || (inAllowed && len(l.allow[aIdx]) > len(l.deny[dIdx])) - default: - allowed = false - } - sugg := "" - if !allowed && inDenied && dIdx != -1 { - sugg = l.suggestions[dIdx] - } - return allowed, sugg -} - -type LinterSettings map[string]*List - -type linterSettings []*list - -func (l LinterSettings) compile() (linterSettings, error) { - if len(l) == 0 { - // Only allow $gostd in all files - set := &List{ - Files: []string{"$all"}, - Allow: []string{"$gostd"}, - } - li, err := set.compile() - if err != nil { - return nil, err - } - li.name = "Main" - return linterSettings{li}, nil - } - names := make([]string, 0, len(l)) - for name := range l { - names = append(names, name) - } - sort.Strings(names) - li := make(linterSettings, 0, len(l)) - var errs utils.MultiError - for _, name := range names { - c, err := l[name].compile() - if err != nil { - errs = append(errs, err) - continue - } - if c == nil { - continue - } - c.name = name - li = append(li, c) - } - if len(errs) > 0 { - return nil, errs - } - - return li, nil -} - -func (ls linterSettings) whichLists(fileName string) []*list { - var matches []*list - for _, l := range ls { - if l.fileMatch(fileName) { - matches = append(matches, l) - } - } - return matches -} - -func strInGlobList(str string, globList []glob.Glob) bool { - for _, g := range globList { - if g.Match(str) { - return true - } - } - return false -} - -func strInPrefixList(str string, prefixList []string) (bool, int) { - // Idx represents where in the prefix slice the passed in string would go - // when sorted. -1 Just means that it would be at the very front of the slice. - idx := sort.Search(len(prefixList), func(i int) bool { - return strings.TrimRight(prefixList[i], "$") > str - }) - 1 - // This means that the string passed in has no way to be prefixed by anything - // in the prefix list as it is already smaller then everything - if idx == -1 { - return false, idx - } - ioc := prefixList[idx] - if ioc[len(ioc)-1] == '$' { - return str == ioc[:len(ioc)-1], idx - } - return strings.HasPrefix(str, prefixList[idx]), idx -} diff --git a/vendor/github.com/alecthomas/go-check-sumtype/.goreleaser.yml b/vendor/github.com/alecthomas/go-check-sumtype/.goreleaser.yml deleted file mode 100644 index 33bd03d06..000000000 --- a/vendor/github.com/alecthomas/go-check-sumtype/.goreleaser.yml +++ /dev/null @@ -1,32 +0,0 @@ -project_name: go-check-sumtype -release: - github: - owner: alecthomas - name: go-check-sumtype -env: - - CGO_ENABLED=0 -builds: -- goos: - - linux - - darwin - - windows - goarch: - - arm64 - - amd64 - - "386" - goarm: - - "6" - main: ./cmd/go-check-sumtype - binary: go-check-sumtype -archives: - - - format: tar.gz - name_template: '{{ .Binary }}-{{ .Version }}-{{ .Os }}-{{ .Arch }}{{ if .Arm }}v{{ - .Arm }}{{ end }}' - files: - - COPYING - - README* -snapshot: - name_template: SNAPSHOT-{{ .Commit }} -checksum: - name_template: '{{ .ProjectName }}-{{ .Version }}-checksums.txt' diff --git a/vendor/github.com/alecthomas/go-check-sumtype/COPYING b/vendor/github.com/alecthomas/go-check-sumtype/COPYING deleted file mode 100644 index bb9c20a09..000000000 --- a/vendor/github.com/alecthomas/go-check-sumtype/COPYING +++ /dev/null @@ -1,3 +0,0 @@ -This project is dual-licensed under the Unlicense and MIT licenses. - -You may use this code under the terms of either license. diff --git a/vendor/github.com/alecthomas/go-check-sumtype/LICENSE-MIT b/vendor/github.com/alecthomas/go-check-sumtype/LICENSE-MIT deleted file mode 100644 index 3b0a5dc09..000000000 --- a/vendor/github.com/alecthomas/go-check-sumtype/LICENSE-MIT +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2015 Andrew Gallant - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/vendor/github.com/alecthomas/go-check-sumtype/README.md b/vendor/github.com/alecthomas/go-check-sumtype/README.md deleted file mode 100644 index 36614ef40..000000000 --- a/vendor/github.com/alecthomas/go-check-sumtype/README.md +++ /dev/null @@ -1,120 +0,0 @@ -**Note: This is a fork of the great project [go-sumtype](https://github.com/BurntSushi/go-sumtype) by BurntSushi.** -**The original seems largely unmaintained, and the changes in this fork are backwards incompatible.** - -# go-check-sumtype [![CI](https://github.com/alecthomas/go-check-sumtype/actions/workflows/ci.yml/badge.svg)](https://github.com/alecthomas/go-check-sumtype/actions/workflows/ci.yml) -A simple utility for running exhaustiveness checks on type switch statements. -Exhaustiveness checks are only run on interfaces that are declared to be -"sum types." - -Dual-licensed under MIT or the [UNLICENSE](http://unlicense.org). - -This work was inspired by our code at -[Diffeo](https://diffeo.com). - -## Installation - -```go -$ go get github.com/alecthomas/go-check-sumtype -``` - -For usage info, just run the command: - -``` -$ go-check-sumtype -``` - -Typical usage might look like this: - -``` -$ go-check-sumtype $(go list ./... | grep -v vendor) -``` - -## Usage - -`go-check-sumtype` takes a list of Go package paths or files and looks for sum type -declarations in each package/file provided. Exhaustiveness checks are then -performed for each use of a declared sum type in a type switch statement. -Namely, `go-check-sumtype` will report an error for any type switch statement that -either lacks a `default` clause or does not account for all possible variants. - -Declarations are provided in comments like so: - -``` -//sumtype:decl -type MySumType interface { ... } -``` - -`MySumType` must be *sealed*. That is, part of its interface definition -contains an unexported method. - -`go-check-sumtype` will produce an error if any of the above is not true. - -For valid declarations, `go-check-sumtype` will look for all occurrences in which a -value of type `MySumType` participates in a type switch statement. In those -occurrences, it will attempt to detect whether the type switch is exhaustive -or not. If it's not, `go-check-sumtype` will report an error. For example, running -`go-check-sumtype` on this source file: - -```go -package main - -//sumtype:decl -type MySumType interface { - sealed() -} - -type VariantA struct{} - -func (*VariantA) sealed() {} - -type VariantB struct{} - -func (*VariantB) sealed() {} - -func main() { - switch MySumType(nil).(type) { - case *VariantA: - } -} -``` - -produces the following: - -``` -$ sumtype mysumtype.go -mysumtype.go:18:2: exhaustiveness check failed for sum type 'MySumType': missing cases for VariantB -``` - -Adding either a `default` clause or a clause to handle `*VariantB` will cause -exhaustive checks to pass. - -As a special case, if the type switch statement contains a `default` clause -that always panics, then exhaustiveness checks are still performed. - -## Details and motivation - -Sum types are otherwise known as discriminated unions. That is, a sum type is -a finite set of disjoint values. In type systems that support sum types, the -language will guarantee that if one has a sum type `T`, then its value must -be one of its variants. - -Go's type system does not support sum types. A typical proxy for representing -sum types in Go is to use an interface with an unexported method and define -each variant of the sum type in the same package to satisfy said interface. -This guarantees that the set of types that satisfy the interface is closed -at compile time. Performing case analysis on these types is then done with -a type switch statement, e.g., `switch x.(type) { ... }`. Each clause of the -type switch corresponds to a *variant* of the sum type. The downside of this -approach is that Go's type system is not aware of the set of variants, so it -cannot tell you whether case analysis over a sum type is complete or not. - -The `go-check-sumtype` command recognizes this pattern, but it needs a small amount -of help to recognize which interfaces should be treated as sum types, which -is why the `//sumtype:decl` annotation is required. `go-check-sumtype` will -figure out all of the variants of a sum type by finding the set of types -defined in the same package that satisfy the interface specified by the -declaration. - -The `go-check-sumtype` command will prove its worth when you need to add a variant -to an existing sum type. Running `go-check-sumtype` will tell you immediately which -case analyses need to be updated to account for the new variant. diff --git a/vendor/github.com/alecthomas/go-check-sumtype/UNLICENSE b/vendor/github.com/alecthomas/go-check-sumtype/UNLICENSE deleted file mode 100644 index 68a49daad..000000000 --- a/vendor/github.com/alecthomas/go-check-sumtype/UNLICENSE +++ /dev/null @@ -1,24 +0,0 @@ -This is free and unencumbered software released into the public domain. - -Anyone is free to copy, modify, publish, use, compile, sell, or -distribute this software, either in source code form or as a compiled -binary, for any purpose, commercial or non-commercial, and by any -means. - -In jurisdictions that recognize copyright laws, the author or authors -of this software dedicate any and all copyright interest in the -software to the public domain. We make this dedication for the benefit -of the public at large and to the detriment of our heirs and -successors. We intend this dedication to be an overt act of -relinquishment in perpetuity of all present and future rights to this -software under copyright law. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. -IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR -OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, -ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR -OTHER DEALINGS IN THE SOFTWARE. - -For more information, please refer to diff --git a/vendor/github.com/alecthomas/go-check-sumtype/check.go b/vendor/github.com/alecthomas/go-check-sumtype/check.go deleted file mode 100644 index 21d751af4..000000000 --- a/vendor/github.com/alecthomas/go-check-sumtype/check.go +++ /dev/null @@ -1,184 +0,0 @@ -package gochecksumtype - -import ( - "fmt" - "go/ast" - "go/token" - "go/types" - "sort" - "strings" - - "golang.org/x/tools/go/packages" -) - -// inexhaustiveError is returned from check for each occurrence of inexhaustive -// case analysis in a Go type switch statement. -type inexhaustiveError struct { - Position token.Position - Def sumTypeDef - Missing []types.Object -} - -func (e inexhaustiveError) Pos() token.Position { return e.Position } -func (e inexhaustiveError) Error() string { - return fmt.Sprintf( - "%s: exhaustiveness check failed for sum type %q (from %s): missing cases for %s", - e.Pos(), e.Def.Decl.TypeName, e.Def.Decl.Pos, strings.Join(e.Names(), ", ")) -} - -// Names returns a sorted list of names corresponding to the missing variant -// cases. -func (e inexhaustiveError) Names() []string { - var list []string - for _, o := range e.Missing { - list = append(list, o.Name()) - } - sort.Strings(list) - return list -} - -// check does exhaustiveness checking for the given sum type definitions in the -// given package. Every instance of inexhaustive case analysis is returned. -func check(pkg *packages.Package, defs []sumTypeDef) []error { - var errs []error - for _, astfile := range pkg.Syntax { - ast.Inspect(astfile, func(n ast.Node) bool { - swtch, ok := n.(*ast.TypeSwitchStmt) - if !ok { - return true - } - if err := checkSwitch(pkg, defs, swtch); err != nil { - errs = append(errs, err) - } - return true - }) - } - return errs -} - -// checkSwitch performs an exhaustiveness check on the given type switch -// statement. If the type switch is used on a sum type and does not cover -// all variants of that sum type, then an error is returned indicating which -// variants were missed. -// -// Note that if the type switch contains a non-panicing default case, then -// exhaustiveness checks are disabled. -func checkSwitch( - pkg *packages.Package, - defs []sumTypeDef, - swtch *ast.TypeSwitchStmt, -) error { - def, missing := missingVariantsInSwitch(pkg, defs, swtch) - if len(missing) > 0 { - return inexhaustiveError{ - Position: pkg.Fset.Position(swtch.Pos()), - Def: *def, - Missing: missing, - } - } - return nil -} - -// missingVariantsInSwitch returns a list of missing variants corresponding to -// the given switch statement. The corresponding sum type definition is also -// returned. (If no sum type definition could be found, then no exhaustiveness -// checks are performed, and therefore, no missing variants are returned.) -func missingVariantsInSwitch( - pkg *packages.Package, - defs []sumTypeDef, - swtch *ast.TypeSwitchStmt, -) (*sumTypeDef, []types.Object) { - asserted := findTypeAssertExpr(swtch) - ty := pkg.TypesInfo.TypeOf(asserted) - def := findDef(defs, ty) - if def == nil { - // We couldn't find a corresponding sum type, so there's - // nothing we can do to check it. - return nil, nil - } - variantExprs, hasDefault := switchVariants(swtch) - if hasDefault && !defaultClauseAlwaysPanics(swtch) { - // A catch-all case defeats all exhaustiveness checks. - return def, nil - } - var variantTypes []types.Type - for _, expr := range variantExprs { - variantTypes = append(variantTypes, pkg.TypesInfo.TypeOf(expr)) - } - return def, def.missing(variantTypes) -} - -// switchVariants returns all case expressions found in a type switch. This -// includes expressions from cases that have a list of expressions. -func switchVariants(swtch *ast.TypeSwitchStmt) (exprs []ast.Expr, hasDefault bool) { - for _, stmt := range swtch.Body.List { - clause := stmt.(*ast.CaseClause) - if clause.List == nil { - hasDefault = true - } else { - exprs = append(exprs, clause.List...) - } - } - return -} - -// defaultClauseAlwaysPanics returns true if the given switch statement has a -// default clause that always panics. Note that this is done on a best-effort -// basis. While there will never be any false positives, there may be false -// negatives. -// -// If the given switch statement has no default clause, then this function -// panics. -func defaultClauseAlwaysPanics(swtch *ast.TypeSwitchStmt) bool { - var clause *ast.CaseClause - for _, stmt := range swtch.Body.List { - c := stmt.(*ast.CaseClause) - if c.List == nil { - clause = c - break - } - } - if clause == nil { - panic("switch statement has no default clause") - } - if len(clause.Body) != 1 { - return false - } - exprStmt, ok := clause.Body[0].(*ast.ExprStmt) - if !ok { - return false - } - callExpr, ok := exprStmt.X.(*ast.CallExpr) - if !ok { - return false - } - fun, ok := callExpr.Fun.(*ast.Ident) - if !ok { - return false - } - return fun.Name == "panic" -} - -// findTypeAssertExpr extracts the expression that is being type asserted from a -// type swtich statement. -func findTypeAssertExpr(swtch *ast.TypeSwitchStmt) ast.Expr { - var expr ast.Expr - if assign, ok := swtch.Assign.(*ast.AssignStmt); ok { - expr = assign.Rhs[0] - } else { - expr = swtch.Assign.(*ast.ExprStmt).X - } - return expr.(*ast.TypeAssertExpr).X -} - -// findDef returns the sum type definition corresponding to the given type. If -// no such sum type definition exists, then nil is returned. -func findDef(defs []sumTypeDef, needle types.Type) *sumTypeDef { - for i := range defs { - def := &defs[i] - if types.Identical(needle.Underlying(), def.Ty) { - return def - } - } - return nil -} diff --git a/vendor/github.com/alecthomas/go-check-sumtype/decl.go b/vendor/github.com/alecthomas/go-check-sumtype/decl.go deleted file mode 100644 index 9dec9eefd..000000000 --- a/vendor/github.com/alecthomas/go-check-sumtype/decl.go +++ /dev/null @@ -1,69 +0,0 @@ -package gochecksumtype - -import ( - "go/ast" - "go/token" - "strings" - - "golang.org/x/tools/go/packages" -) - -// sumTypeDecl is a declaration of a sum type in a Go source file. -type sumTypeDecl struct { - // The package path that contains this decl. - Package *packages.Package - // The type named by this decl. - TypeName string - // Position where the declaration was found. - Pos token.Position -} - -// Location returns a short string describing where this declaration was found. -func (d sumTypeDecl) Location() string { - return d.Pos.String() -} - -// findSumTypeDecls searches every package given for sum type declarations of -// the form `sumtype:decl`. -func findSumTypeDecls(pkgs []*packages.Package) ([]sumTypeDecl, error) { - var decls []sumTypeDecl - var retErr error - for _, pkg := range pkgs { - for _, file := range pkg.Syntax { - ast.Inspect(file, func(node ast.Node) bool { - if node == nil { - return true - } - decl, ok := node.(*ast.GenDecl) - if !ok || decl.Doc == nil { - return true - } - var tspec *ast.TypeSpec - for _, spec := range decl.Specs { - ts, ok := spec.(*ast.TypeSpec) - if !ok { - continue - } - tspec = ts - } - for _, line := range decl.Doc.List { - if !strings.HasPrefix(line.Text, "//sumtype:decl") { - continue - } - pos := pkg.Fset.Position(decl.Pos()) - if tspec == nil { - retErr = notFoundError{Decl: sumTypeDecl{Package: pkg, Pos: pos}} - return false - } - pos = pkg.Fset.Position(tspec.Pos()) - decl := sumTypeDecl{Package: pkg, TypeName: tspec.Name.Name, Pos: pos} - debugf("found sum type decl: %s.%s", decl.Package.PkgPath, decl.TypeName) - decls = append(decls, decl) - break - } - return true - }) - } - } - return decls, retErr -} diff --git a/vendor/github.com/alecthomas/go-check-sumtype/def.go b/vendor/github.com/alecthomas/go-check-sumtype/def.go deleted file mode 100644 index 24729ac01..000000000 --- a/vendor/github.com/alecthomas/go-check-sumtype/def.go +++ /dev/null @@ -1,173 +0,0 @@ -package gochecksumtype - -import ( - "flag" - "fmt" - "go/token" - "go/types" - "log" -) - -var debug = flag.Bool("debug", false, "enable debug logging") - -func debugf(format string, args ...interface{}) { - if *debug { - log.Printf(format, args...) - } -} - -// Error as returned by Run() -type Error interface { - error - Pos() token.Position -} - -// unsealedError corresponds to a declared sum type whose interface is not -// sealed. A sealed interface requires at least one unexported method. -type unsealedError struct { - Decl sumTypeDecl -} - -func (e unsealedError) Pos() token.Position { return e.Decl.Pos } -func (e unsealedError) Error() string { - return fmt.Sprintf( - "%s: interface '%s' is not sealed "+ - "(sealing requires at least one unexported method)", - e.Decl.Location(), e.Decl.TypeName) -} - -// notFoundError corresponds to a declared sum type whose type definition -// could not be found in the same Go package. -type notFoundError struct { - Decl sumTypeDecl -} - -func (e notFoundError) Pos() token.Position { return e.Decl.Pos } -func (e notFoundError) Error() string { - return fmt.Sprintf("%s: type '%s' is not defined", e.Decl.Location(), e.Decl.TypeName) -} - -// notInterfaceError corresponds to a declared sum type that does not -// correspond to an interface. -type notInterfaceError struct { - Decl sumTypeDecl -} - -func (e notInterfaceError) Pos() token.Position { return e.Decl.Pos } -func (e notInterfaceError) Error() string { - return fmt.Sprintf("%s: type '%s' is not an interface", e.Decl.Location(), e.Decl.TypeName) -} - -// sumTypeDef corresponds to the definition of a Go interface that is -// interpreted as a sum type. Its variants are determined by finding all types -// that implement said interface in the same package. -type sumTypeDef struct { - Decl sumTypeDecl - Ty *types.Interface - Variants []types.Object -} - -// findSumTypeDefs attempts to find a Go type definition for each of the given -// sum type declarations. If no such sum type definition could be found for -// any of the given declarations, then an error is returned. -func findSumTypeDefs(decls []sumTypeDecl) ([]sumTypeDef, []error) { - var defs []sumTypeDef - var errs []error - for _, decl := range decls { - def, err := newSumTypeDef(decl.Package.Types, decl) - if err != nil { - errs = append(errs, err) - continue - } - if def == nil { - errs = append(errs, notFoundError{decl}) - continue - } - defs = append(defs, *def) - } - return defs, errs -} - -// newSumTypeDef attempts to extract a sum type definition from a single -// package. If no such type corresponds to the given decl, then this function -// returns a nil def and a nil error. -// -// If the decl corresponds to a type that isn't an interface containing at -// least one unexported method, then this returns an error. -func newSumTypeDef(pkg *types.Package, decl sumTypeDecl) (*sumTypeDef, error) { - obj := pkg.Scope().Lookup(decl.TypeName) - if obj == nil { - return nil, nil - } - iface, ok := obj.Type().Underlying().(*types.Interface) - if !ok { - return nil, notInterfaceError{decl} - } - hasUnexported := false - for i := 0; i < iface.NumMethods(); i++ { - if !iface.Method(i).Exported() { - hasUnexported = true - break - } - } - if !hasUnexported { - return nil, unsealedError{decl} - } - def := &sumTypeDef{ - Decl: decl, - Ty: iface, - } - debugf("searching for variants of %s.%s\n", pkg.Path(), decl.TypeName) - for _, name := range pkg.Scope().Names() { - obj, ok := pkg.Scope().Lookup(name).(*types.TypeName) - if !ok { - continue - } - ty := obj.Type() - if types.Identical(ty.Underlying(), iface) { - continue - } - // Skip generic types. - if named, ok := ty.(*types.Named); ok && named.TypeParams() != nil { - continue - } - if types.Implements(ty, iface) || types.Implements(types.NewPointer(ty), iface) { - debugf(" found variant: %s.%s\n", pkg.Path(), obj.Name()) - def.Variants = append(def.Variants, obj) - } - } - return def, nil -} - -func (def *sumTypeDef) String() string { - return def.Decl.TypeName -} - -// missing returns a list of variants in this sum type that are not in the -// given list of types. -func (def *sumTypeDef) missing(tys []types.Type) []types.Object { - // TODO(ag): This is O(n^2). Fix that. /shrug - var missing []types.Object - for _, v := range def.Variants { - found := false - varty := indirect(v.Type()) - for _, ty := range tys { - ty = indirect(ty) - if types.Identical(varty, ty) { - found = true - } - } - if !found { - missing = append(missing, v) - } - } - return missing -} - -// indirect dereferences through an arbitrary number of pointer types. -func indirect(ty types.Type) types.Type { - if ty, ok := ty.(*types.Pointer); ok { - return indirect(ty.Elem()) - } - return ty -} diff --git a/vendor/github.com/alecthomas/go-check-sumtype/doc.go b/vendor/github.com/alecthomas/go-check-sumtype/doc.go deleted file mode 100644 index 2b6e86764..000000000 --- a/vendor/github.com/alecthomas/go-check-sumtype/doc.go +++ /dev/null @@ -1,53 +0,0 @@ -/* -sumtype takes a list of Go package paths or files and looks for sum type -declarations in each package/file provided. Exhaustiveness checks are then -performed for each use of a declared sum type in a type switch statement. -Namely, sumtype will report an error for any type switch statement that -either lacks a default clause or does not account for all possible variants. - -Declarations are provided in comments like so: - - //sumtype:decl - type MySumType interface { ... } - -MySumType must be *sealed*. That is, part of its interface definition contains -an unexported method. - -sumtype will produce an error if any of the above is not true. - -For valid declarations, sumtype will look for all occurrences in which a -value of type MySumType participates in a type switch statement. In those -occurrences, it will attempt to detect whether the type switch is exhaustive -or not. If it's not, sumtype will report an error. For example: - - $ cat mysumtype.go - package gochecksumtype - - //sumtype:decl - type MySumType interface { - sealed() - } - - type VariantA struct{} - - func (a *VariantA) sealed() {} - - type VariantB struct{} - - func (b *VariantB) sealed() {} - - func main() { - switch MySumType(nil).(type) { - case *VariantA: - } - } - $ sumtype mysumtype.go - mysumtype.go:18:2: exhaustiveness check failed for sum type 'MySumType': missing cases for VariantB - -Adding either a default clause or a clause to handle *VariantB will cause -exhaustive checks to pass. - -As a special case, if the type switch statement contains a default clause -that always panics, then exhaustiveness checks are still performed. -*/ -package gochecksumtype diff --git a/vendor/github.com/alecthomas/go-check-sumtype/run.go b/vendor/github.com/alecthomas/go-check-sumtype/run.go deleted file mode 100644 index fdcb643c5..000000000 --- a/vendor/github.com/alecthomas/go-check-sumtype/run.go +++ /dev/null @@ -1,26 +0,0 @@ -package gochecksumtype - -import "golang.org/x/tools/go/packages" - -// Run sumtype checking on the given packages. -func Run(pkgs []*packages.Package) []error { - var errs []error - - decls, err := findSumTypeDecls(pkgs) - if err != nil { - return []error{err} - } - - defs, defErrs := findSumTypeDefs(decls) - errs = append(errs, defErrs...) - if len(defs) == 0 { - return errs - } - - for _, pkg := range pkgs { - if pkgErrs := check(pkg, defs); pkgErrs != nil { - errs = append(errs, pkgErrs...) - } - } - return errs -} diff --git a/vendor/github.com/alexkohler/nakedret/v2/.gitignore b/vendor/github.com/alexkohler/nakedret/v2/.gitignore deleted file mode 100644 index b4822913a..000000000 --- a/vendor/github.com/alexkohler/nakedret/v2/.gitignore +++ /dev/null @@ -1,8 +0,0 @@ -# editor specific -.vscode - -# binary -/nakedret - -# usage video for docs -.github/images diff --git a/vendor/github.com/alexkohler/nakedret/v2/LICENSE b/vendor/github.com/alexkohler/nakedret/v2/LICENSE deleted file mode 100644 index 9310fbcff..000000000 --- a/vendor/github.com/alexkohler/nakedret/v2/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2017 Alex Kohler - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/alexkohler/nakedret/v2/README.md b/vendor/github.com/alexkohler/nakedret/v2/README.md deleted file mode 100644 index e30a0cde7..000000000 --- a/vendor/github.com/alexkohler/nakedret/v2/README.md +++ /dev/null @@ -1,125 +0,0 @@ -# nakedret - -nakedret is a Go static analysis tool to find naked returns in functions greater than a specified function length. - -## Installation -Install Nakedret via go install: - -```cmd -go install github.com/alexkohler/nakedret/cmd/nakedret@latest -``` - -If you have not already added your `GOPATH/bin` directory to your `PATH` environment variable then you will need to do so. - -Windows (cmd): -```cmd -set PATH=%PATH%;C:\your\GOPATH\bin -``` - -Bash (you can verify a path has been set): -```Bash -# Check if nakedret is on PATH -which nakedret -export PATH=$PATH:/your/GOPATH/bin #to set path if it does not exist -``` - -## Usage - -Similar to other Go static anaylsis tools (such as `golint`, `go vet`), nakedret can be invoked with one or more filenames, directories, or packages named by its import path. Nakedret also supports the `...` wildcard. - - nakedret [flags] files/directories/packages - -Currently, the only flag supported is -l, which is an optional numeric flag to specify the maximum length a function can be (in terms of line length). If not specified, it defaults to 5. - -It can also be run using `go vet`: - -```shell -go vet -vettool=$(which nakedret) ./... -``` - -## Purpose - -As noted in Go's [Code Review comments](https://github.com/golang/go/wiki/CodeReviewComments#named-result-parameters): - -> Naked returns are okay if the function is a handful of lines. Once it's a medium sized function, be explicit with your return -> values. Corollary: it's not worth it to name result parameters just because it enables you to use naked returns. Clarity of docs is always more important than saving a line or two in your function. - -This tool aims to catch naked returns on non-trivial functions. - -## Example - -Let's take the `types` package in the Go source as an example: - -```Bash -$ nakedret -l 25 types/ -types/check.go:245 checkFiles naked returns on 26 line function -types/typexpr.go:443 collectParams naked returns on 53 line function -types/stmt.go:275 caseTypes naked returns on 27 line function -types/lookup.go:275 MissingMethod naked returns on 39 line function -``` - -Below is one of the not so intuitive uses of naked returns in `types/lookup.go` found by nakedret (nakedret will return the line number of the last naked return in the function): - - -```Go -func MissingMethod(V Type, T *Interface, static bool) (method *Func, wrongType bool) { - // fast path for common case - if T.Empty() { - return - } - - // TODO(gri) Consider using method sets here. Might be more efficient. - - if ityp, _ := V.Underlying().(*Interface); ityp != nil { - // TODO(gri) allMethods is sorted - can do this more efficiently - for _, m := range T.allMethods { - _, obj := lookupMethod(ityp.allMethods, m.pkg, m.name) - switch { - case obj == nil: - if static { - return m, false - } - case !Identical(obj.Type(), m.typ): - return m, true - } - } - return - } - - // A concrete type implements T if it implements all methods of T. - for _, m := range T.allMethods { - obj, _, _ := lookupFieldOrMethod(V, false, m.pkg, m.name) - - f, _ := obj.(*Func) - if f == nil { - return m, false - } - - if !Identical(f.typ, m.typ) { - return m, true - } - } - - return -} -``` - -## TODO - -- Unit tests (may require some refactoring to do correctly) -- supporting toggling of `build.Context.UseAllFiles` may be useful for some. -- Configuration on whether or not to run on test files -- Vim quickfix format? - - -## Contributing - -Pull requests welcome! - - -## Other static analysis tools - -If you've enjoyed nakedret, take a look at my other static anaylsis tools! - -- [unimport](https://github.com/alexkohler/unimport) - Finds unnecessary import aliases -- [prealloc](https://github.com/alexkohler/prealloc) - Finds slice declarations that could potentially be preallocated. diff --git a/vendor/github.com/alexkohler/nakedret/v2/import.go b/vendor/github.com/alexkohler/nakedret/v2/import.go deleted file mode 100644 index dea842333..000000000 --- a/vendor/github.com/alexkohler/nakedret/v2/import.go +++ /dev/null @@ -1,310 +0,0 @@ -package nakedret - -/* - -This file holds a direct copy of the import path matching code of -https://github.com/golang/go/blob/master/src/cmd/go/main.go. It can be -replaced when https://golang.org/issue/8768 is resolved. - -It has been updated to follow upstream changes in a few ways. - -*/ - -import ( - "fmt" - "go/build" - "log" - "os" - "path" - "path/filepath" - "regexp" - "runtime" - "strings" -) - -var buildContext = build.Default - -var ( - goroot = filepath.Clean(runtime.GOROOT()) - gorootSrc = filepath.Join(goroot, "src") -) - -// importPathsNoDotExpansion returns the import paths to use for the given -// command line, but it does no ... expansion. -func importPathsNoDotExpansion(args []string) []string { - if len(args) == 0 { - return []string{"."} - } - var out []string - for _, a := range args { - // Arguments are supposed to be import paths, but - // as a courtesy to Windows developers, rewrite \ to / - // in command-line arguments. Handles .\... and so on. - if filepath.Separator == '\\' { - a = strings.Replace(a, `\`, `/`, -1) - } - - // Put argument in canonical form, but preserve leading ./. - if strings.HasPrefix(a, "./") { - a = "./" + path.Clean(a) - if a == "./." { - a = "." - } - } else { - a = path.Clean(a) - } - if a == "all" || a == "std" { - out = append(out, allPackages(a)...) - continue - } - out = append(out, a) - } - return out -} - -// importPaths returns the import paths to use for the given command line. -func importPaths(args []string) []string { - args = importPathsNoDotExpansion(args) - var out []string - for _, a := range args { - if strings.Contains(a, "...") { - if build.IsLocalImport(a) { - out = append(out, allPackagesInFS(a)...) - } else { - out = append(out, allPackages(a)...) - } - continue - } - out = append(out, a) - } - return out -} - -// matchPattern(pattern)(name) reports whether -// name matches pattern. Pattern is a limited glob -// pattern in which '...' means 'any string' and there -// is no other special syntax. -func matchPattern(pattern string) func(name string) bool { - re := regexp.QuoteMeta(pattern) - re = strings.Replace(re, `\.\.\.`, `.*`, -1) - // Special case: foo/... matches foo too. - if strings.HasSuffix(re, `/.*`) { - re = re[:len(re)-len(`/.*`)] + `(/.*)?` - } - reg := regexp.MustCompile(`^` + re + `$`) - return func(name string) bool { - return reg.MatchString(name) - } -} - -// hasPathPrefix reports whether the path s begins with the -// elements in prefix. -func hasPathPrefix(s, prefix string) bool { - switch { - default: - return false - case len(s) == len(prefix): - return s == prefix - case len(s) > len(prefix): - if prefix != "" && prefix[len(prefix)-1] == '/' { - return strings.HasPrefix(s, prefix) - } - return s[len(prefix)] == '/' && s[:len(prefix)] == prefix - } -} - -// treeCanMatchPattern(pattern)(name) reports whether -// name or children of name can possibly match pattern. -// Pattern is the same limited glob accepted by matchPattern. -func treeCanMatchPattern(pattern string) func(name string) bool { - wildCard := false - if i := strings.Index(pattern, "..."); i >= 0 { - wildCard = true - pattern = pattern[:i] - } - return func(name string) bool { - return len(name) <= len(pattern) && hasPathPrefix(pattern, name) || - wildCard && strings.HasPrefix(name, pattern) - } -} - -// allPackages returns all the packages that can be found -// under the $GOPATH directories and $GOROOT matching pattern. -// The pattern is either "all" (all packages), "std" (standard packages) -// or a path including "...". -func allPackages(pattern string) []string { - pkgs := matchPackages(pattern) - if len(pkgs) == 0 { - fmt.Fprintf(os.Stderr, "warning: %q matched no packages\n", pattern) - } - return pkgs -} - -func matchPackages(pattern string) []string { - match := func(string) bool { return true } - treeCanMatch := func(string) bool { return true } - if pattern != "all" && pattern != "std" { - match = matchPattern(pattern) - treeCanMatch = treeCanMatchPattern(pattern) - } - - have := map[string]bool{ - "builtin": true, // ignore pseudo-package that exists only for documentation - } - if !buildContext.CgoEnabled { - have["runtime/cgo"] = true // ignore during walk - } - var pkgs []string - - // Commands - cmd := filepath.Join(goroot, "src/cmd") + string(filepath.Separator) - filepath.Walk(cmd, func(path string, fi os.FileInfo, err error) error { - if err != nil || !fi.IsDir() || path == cmd { - return nil - } - name := path[len(cmd):] - if !treeCanMatch(name) { - return filepath.SkipDir - } - // Commands are all in cmd/, not in subdirectories. - if strings.Contains(name, string(filepath.Separator)) { - return filepath.SkipDir - } - - // We use, e.g., cmd/gofmt as the pseudo import path for gofmt. - name = "cmd/" + name - if have[name] { - return nil - } - have[name] = true - if !match(name) { - return nil - } - _, err = buildContext.ImportDir(path, 0) - if err != nil { - if _, noGo := err.(*build.NoGoError); !noGo { - log.Print(err) - } - return nil - } - pkgs = append(pkgs, name) - return nil - }) - - for _, src := range buildContext.SrcDirs() { - if (pattern == "std" || pattern == "cmd") && src != gorootSrc { - continue - } - src = filepath.Clean(src) + string(filepath.Separator) - root := src - if pattern == "cmd" { - root += "cmd" + string(filepath.Separator) - } - filepath.Walk(root, func(path string, fi os.FileInfo, err error) error { - if err != nil || !fi.IsDir() || path == src { - return nil - } - - // Avoid .foo, _foo, testdata and vendor directory trees. - _, elem := filepath.Split(path) - if strings.HasPrefix(elem, ".") || strings.HasPrefix(elem, "_") || elem == "testdata" || elem == "vendor" { - return filepath.SkipDir - } - - name := filepath.ToSlash(path[len(src):]) - if pattern == "std" && (strings.Contains(name, ".") || name == "cmd") { - // The name "std" is only the standard library. - // If the name is cmd, it's the root of the command tree. - return filepath.SkipDir - } - if !treeCanMatch(name) { - return filepath.SkipDir - } - if have[name] { - return nil - } - have[name] = true - if !match(name) { - return nil - } - _, err = buildContext.ImportDir(path, 0) - if err != nil { - if _, noGo := err.(*build.NoGoError); noGo { - return nil - } - } - pkgs = append(pkgs, name) - return nil - }) - } - return pkgs -} - -// allPackagesInFS is like allPackages but is passed a pattern -// beginning ./ or ../, meaning it should scan the tree rooted -// at the given directory. There are ... in the pattern too. -func allPackagesInFS(pattern string) []string { - pkgs := matchPackagesInFS(pattern) - if len(pkgs) == 0 { - fmt.Fprintf(os.Stderr, "warning: %q matched no packages\n", pattern) - } - return pkgs -} - -func matchPackagesInFS(pattern string) []string { - // Find directory to begin the scan. - // Could be smarter but this one optimization - // is enough for now, since ... is usually at the - // end of a path. - i := strings.Index(pattern, "...") - dir, _ := path.Split(pattern[:i]) - - // pattern begins with ./ or ../. - // path.Clean will discard the ./ but not the ../. - // We need to preserve the ./ for pattern matching - // and in the returned import paths. - prefix := "" - if strings.HasPrefix(pattern, "./") { - prefix = "./" - } - match := matchPattern(pattern) - - var pkgs []string - filepath.Walk(dir, func(path string, fi os.FileInfo, err error) error { - if err != nil || !fi.IsDir() { - return nil - } - if path == dir { - // filepath.Walk starts at dir and recurses. For the recursive case, - // the path is the result of filepath.Join, which calls filepath.Clean. - // The initial case is not Cleaned, though, so we do this explicitly. - // - // This converts a path like "./io/" to "io". Without this step, running - // "cd $GOROOT/src/pkg; go list ./io/..." would incorrectly skip the io - // package, because prepending the prefix "./" to the unclean path would - // result in "././io", and match("././io") returns false. - path = filepath.Clean(path) - } - - // Avoid .foo, _foo, testdata and vendor directory trees, but do not avoid "." or "..". - _, elem := filepath.Split(path) - dot := strings.HasPrefix(elem, ".") && elem != "." && elem != ".." - if dot || strings.HasPrefix(elem, "_") || elem == "testdata" || elem == "vendor" { - return filepath.SkipDir - } - - name := prefix + filepath.ToSlash(path) - if !match(name) { - return nil - } - if _, err = build.ImportDir(path, 0); err != nil { - if _, noGo := err.(*build.NoGoError); !noGo { - log.Print(err) - } - return nil - } - pkgs = append(pkgs, name) - return nil - }) - return pkgs -} diff --git a/vendor/github.com/alexkohler/nakedret/v2/nakedret.go b/vendor/github.com/alexkohler/nakedret/v2/nakedret.go deleted file mode 100644 index f78bb8cb6..000000000 --- a/vendor/github.com/alexkohler/nakedret/v2/nakedret.go +++ /dev/null @@ -1,309 +0,0 @@ -package nakedret - -import ( - "bytes" - "errors" - "flag" - "fmt" - "go/ast" - "go/build" - "go/parser" - "go/printer" - "go/token" - "log" - "os" - "path/filepath" - "strings" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/ast/inspector" -) - -const pwd = "./" - -func NakedReturnAnalyzer(defaultLines uint) *analysis.Analyzer { - nakedRet := &NakedReturnRunner{} - flags := flag.NewFlagSet("nakedret", flag.ExitOnError) - flags.UintVar(&nakedRet.MaxLength, "l", defaultLines, "maximum number of lines for a naked return function") - var analyzer = &analysis.Analyzer{ - Name: "nakedret", - Doc: "Checks that functions with naked returns are not longer than a maximum size (can be zero).", - Run: nakedRet.run, - Flags: *flags, - Requires: []*analysis.Analyzer{inspect.Analyzer}, - } - return analyzer -} - -type NakedReturnRunner struct { - MaxLength uint -} - -func (n *NakedReturnRunner) run(pass *analysis.Pass) (any, error) { - inspector := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - - nodeFilter := []ast.Node{ // filter needed nodes: visit only them - (*ast.FuncDecl)(nil), - (*ast.FuncLit)(nil), - (*ast.ReturnStmt)(nil), - } - retVis := &returnsVisitor{ - pass: pass, - f: pass.Fset, - maxLength: n.MaxLength, - } - inspector.Nodes(nodeFilter, retVis.NodesVisit) - return nil, nil -} - -type returnsVisitor struct { - pass *analysis.Pass - f *token.FileSet - maxLength uint - - // functions contains funcInfo for each nested function definition encountered while visiting the AST. - functions []funcInfo -} - -type funcInfo struct { - // Details of the function we're currently dealing with - funcType *ast.FuncType - funcName string - funcLength int - reportNaked bool -} - -func checkNakedReturns(args []string, maxLength *uint, setExitStatus bool) error { - - fset := token.NewFileSet() - - files, err := parseInput(args, fset) - if err != nil { - return fmt.Errorf("could not parse input: %v", err) - } - - if maxLength == nil { - return errors.New("max length nil") - } - - analyzer := NakedReturnAnalyzer(*maxLength) - pass := &analysis.Pass{ - Analyzer: analyzer, - Fset: fset, - Files: files, - Report: func(d analysis.Diagnostic) { - log.Printf("%s:%d: %s", fset.Position(d.Pos).Filename, fset.Position(d.Pos).Line, d.Message) - }, - ResultOf: map[*analysis.Analyzer]any{}, - } - result, err := inspect.Analyzer.Run(pass) - if err != nil { - return err - } - pass.ResultOf[inspect.Analyzer] = result - - _, err = analyzer.Run(pass) - if err != nil { - return err - } - - return nil -} - -func parseInput(args []string, fset *token.FileSet) ([]*ast.File, error) { - var directoryList []string - var fileMode bool - files := make([]*ast.File, 0) - - if len(args) == 0 { - directoryList = append(directoryList, pwd) - } else { - for _, arg := range args { - if strings.HasSuffix(arg, "/...") && isDir(arg[:len(arg)-len("/...")]) { - - for _, dirname := range allPackagesInFS(arg) { - directoryList = append(directoryList, dirname) - } - - } else if isDir(arg) { - directoryList = append(directoryList, arg) - - } else if exists(arg) { - if strings.HasSuffix(arg, ".go") { - fileMode = true - f, err := parser.ParseFile(fset, arg, nil, 0) - if err != nil { - return nil, err - } - files = append(files, f) - } else { - return nil, fmt.Errorf("invalid file %v specified", arg) - } - } else { - - // TODO clean this up a bit - imPaths := importPaths([]string{arg}) - for _, importPath := range imPaths { - pkg, err := build.Import(importPath, ".", 0) - if err != nil { - return nil, err - } - var stringFiles []string - stringFiles = append(stringFiles, pkg.GoFiles...) - // files = append(files, pkg.CgoFiles...) - stringFiles = append(stringFiles, pkg.TestGoFiles...) - if pkg.Dir != "." { - for i, f := range stringFiles { - stringFiles[i] = filepath.Join(pkg.Dir, f) - } - } - - fileMode = true - for _, stringFile := range stringFiles { - f, err := parser.ParseFile(fset, stringFile, nil, 0) - if err != nil { - return nil, err - } - files = append(files, f) - } - - } - } - } - } - - // if we're not in file mode, then we need to grab each and every package in each directory - // we can to grab all the files - if !fileMode { - for _, fpath := range directoryList { - pkgs, err := parser.ParseDir(fset, fpath, nil, 0) - if err != nil { - return nil, err - } - - for _, pkg := range pkgs { - for _, f := range pkg.Files { - files = append(files, f) - } - } - } - } - - return files, nil -} - -func isDir(filename string) bool { - fi, err := os.Stat(filename) - return err == nil && fi.IsDir() -} - -func exists(filename string) bool { - _, err := os.Stat(filename) - return err == nil -} - -func hasNamedReturns(funcType *ast.FuncType) bool { - if funcType == nil || funcType.Results == nil { - return false - } - for _, field := range funcType.Results.List { - for _, ident := range field.Names { - if ident != nil { - return true - } - } - } - return false -} - -func nestedFuncName(functions []funcInfo) string { - var names []string - for _, f := range functions { - names = append(names, f.funcName) - } - return strings.Join(names, ".") -} - -func nakedReturnFix(s *ast.ReturnStmt, funcType *ast.FuncType) *ast.ReturnStmt { - var nameExprs []ast.Expr - for _, result := range funcType.Results.List { - for _, ident := range result.Names { - if ident != nil { - nameExprs = append(nameExprs, ident) - } - } - } - var sFix = *s - sFix.Results = nameExprs - return &sFix -} - -func (v *returnsVisitor) NodesVisit(node ast.Node, push bool) bool { - var ( - funcType *ast.FuncType - funcName string - ) - switch s := node.(type) { - case *ast.FuncDecl: - // We've found a function - funcType = s.Type - funcName = s.Name.Name - case *ast.FuncLit: - // We've found a function literal - funcType = s.Type - file := v.f.File(s.Pos()) - funcName = fmt.Sprintf("", file.Position(s.Pos()).Line) - case *ast.ReturnStmt: - // We've found a possibly naked return statement - fun := v.functions[len(v.functions)-1] - funName := nestedFuncName(v.functions) - if fun.reportNaked && len(s.Results) == 0 && push { - sFix := nakedReturnFix(s, fun.funcType) - b := &bytes.Buffer{} - err := printer.Fprint(b, v.f, sFix) - if err != nil { - log.Printf("failed to format named return fix: %s", err) - } - v.pass.Report(analysis.Diagnostic{ - Pos: s.Pos(), - End: s.End(), - Message: fmt.Sprintf("naked return in func `%s` with %d lines of code", funName, fun.funcLength), - SuggestedFixes: []analysis.SuggestedFix{{ - Message: "explicit return statement", - TextEdits: []analysis.TextEdit{{ - Pos: s.Pos(), - End: s.End(), - NewText: b.Bytes()}}, - }}, - }) - } - } - - if !push { - if funcType == nil { - return false - } - // Pop function info - v.functions = v.functions[:len(v.functions)-1] - return false - } - - if push && funcType != nil { - // Push function info to track returns for this function - file := v.f.File(node.Pos()) - length := file.Position(node.End()).Line - file.Position(node.Pos()).Line - if length == 0 { - // consider functions that finish on the same line as they start as single line functions, not zero lines! - length = 1 - } - v.functions = append(v.functions, funcInfo{ - funcType: funcType, - funcName: funcName, - funcLength: length, - reportNaked: uint(length) > v.maxLength && hasNamedReturns(funcType), - }) - } - - return true -} diff --git a/vendor/github.com/alexkohler/prealloc/LICENSE b/vendor/github.com/alexkohler/prealloc/LICENSE deleted file mode 100644 index 9310fbcff..000000000 --- a/vendor/github.com/alexkohler/prealloc/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2017 Alex Kohler - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/alexkohler/prealloc/pkg/prealloc.go b/vendor/github.com/alexkohler/prealloc/pkg/prealloc.go deleted file mode 100644 index 72d8b95f7..000000000 --- a/vendor/github.com/alexkohler/prealloc/pkg/prealloc.go +++ /dev/null @@ -1,267 +0,0 @@ -package pkg - -import ( - "fmt" - "go/ast" - "go/token" -) - -type sliceDeclaration struct { - name string - // sType string - genD *ast.GenDecl -} - -type returnsVisitor struct { - // flags - simple bool - includeRangeLoops bool - includeForLoops bool - // visitor fields - sliceDeclarations []*sliceDeclaration - preallocHints []Hint - returnsInsideOfLoop bool - arrayTypes []string -} - -func Check(files []*ast.File, simple, includeRangeLoops, includeForLoops bool) []Hint { - hints := []Hint{} - for _, f := range files { - retVis := &returnsVisitor{ - simple: simple, - includeRangeLoops: includeRangeLoops, - includeForLoops: includeForLoops, - } - ast.Walk(retVis, f) - // if simple is true, then we actually have to check if we had returns - // inside of our loop. Otherwise, we can just report all messages. - if !retVis.simple || !retVis.returnsInsideOfLoop { - hints = append(hints, retVis.preallocHints...) - } - } - - return hints -} - -func contains(slice []string, item string) bool { - for _, s := range slice { - if s == item { - return true - } - } - - return false -} - -func (v *returnsVisitor) Visit(node ast.Node) ast.Visitor { - - v.sliceDeclarations = nil - v.returnsInsideOfLoop = false - - switch n := node.(type) { - case *ast.TypeSpec: - if _, ok := n.Type.(*ast.ArrayType); ok { - if n.Name != nil { - v.arrayTypes = append(v.arrayTypes, n.Name.Name) - } - } - case *ast.FuncDecl: - if n.Body != nil { - for _, stmt := range n.Body.List { - switch s := stmt.(type) { - // Find non pre-allocated slices - case *ast.DeclStmt: - genD, ok := s.Decl.(*ast.GenDecl) - if !ok { - continue - } - if genD.Tok == token.TYPE { - for _, spec := range genD.Specs { - tSpec, ok := spec.(*ast.TypeSpec) - if !ok { - continue - } - - if _, ok := tSpec.Type.(*ast.ArrayType); ok { - if tSpec.Name != nil { - v.arrayTypes = append(v.arrayTypes, tSpec.Name.Name) - } - } - } - } else if genD.Tok == token.VAR { - for _, spec := range genD.Specs { - vSpec, ok := spec.(*ast.ValueSpec) - if !ok { - continue - } - var isArrType bool - switch val := vSpec.Type.(type) { - case *ast.ArrayType: - isArrType = true - case *ast.Ident: - isArrType = contains(v.arrayTypes, val.Name) - } - if isArrType { - if vSpec.Names != nil { - /*atID, ok := arrayType.Elt.(*ast.Ident) - if !ok { - continue - }*/ - - // We should handle multiple slices declared on same line e.g. var mySlice1, mySlice2 []uint32 - for _, vName := range vSpec.Names { - v.sliceDeclarations = append(v.sliceDeclarations, &sliceDeclaration{name: vName.Name /*sType: atID.Name,*/, genD: genD}) - } - } - } - } - } - - case *ast.RangeStmt: - if v.includeRangeLoops { - if len(v.sliceDeclarations) == 0 { - continue - } - // Check the value being ranged over and ensure it's not a channel (we cannot offer any recommendations on channel ranges). - rangeIdent, ok := s.X.(*ast.Ident) - if ok && rangeIdent.Obj != nil { - valueSpec, ok := rangeIdent.Obj.Decl.(*ast.ValueSpec) - if ok { - if _, rangeTargetIsChannel := valueSpec.Type.(*ast.ChanType); rangeTargetIsChannel { - continue - } - } - } - if s.Body != nil { - v.handleLoops(s.Body) - } - } - - case *ast.ForStmt: - if v.includeForLoops { - if len(v.sliceDeclarations) == 0 { - continue - } - if s.Body != nil { - v.handleLoops(s.Body) - } - } - - default: - } - } - } - } - return v -} - -// handleLoops is a helper function to share the logic required for both *ast.RangeLoops and *ast.ForLoops -func (v *returnsVisitor) handleLoops(blockStmt *ast.BlockStmt) { - - for _, stmt := range blockStmt.List { - switch bodyStmt := stmt.(type) { - case *ast.AssignStmt: - asgnStmt := bodyStmt - for index, expr := range asgnStmt.Rhs { - if index >= len(asgnStmt.Lhs) { - continue - } - - lhsIdent, ok := asgnStmt.Lhs[index].(*ast.Ident) - if !ok { - continue - } - - callExpr, ok := expr.(*ast.CallExpr) - if !ok { - continue - } - - rhsFuncIdent, ok := callExpr.Fun.(*ast.Ident) - if !ok { - continue - } - - if rhsFuncIdent.Name != "append" { - continue - } - - // e.g., `x = append(x)` - // Pointless, but pre-allocation will not help. - if len(callExpr.Args) < 2 { - continue - } - - rhsIdent, ok := callExpr.Args[0].(*ast.Ident) - if !ok { - continue - } - - // e.g., `x = append(y, a)` - // This is weird (and maybe a logic error), - // but we cannot recommend pre-allocation. - if lhsIdent.Name != rhsIdent.Name { - continue - } - - // e.g., `x = append(x, y...)` - // we should ignore this. Pre-allocating in this case - // is confusing, and is not possible in general. - if callExpr.Ellipsis.IsValid() { - continue - } - - for _, sliceDecl := range v.sliceDeclarations { - if sliceDecl.name == lhsIdent.Name { - // This is a potential mark, we just need to make sure there are no returns/continues in the - // range loop. - // now we just need to grab whatever we're ranging over - /*sxIdent, ok := s.X.(*ast.Ident) - if !ok { - continue - }*/ - - v.preallocHints = append(v.preallocHints, Hint{ - Pos: sliceDecl.genD.Pos(), - DeclaredSliceName: sliceDecl.name, - }) - } - } - } - case *ast.IfStmt: - ifStmt := bodyStmt - if ifStmt.Body != nil { - for _, ifBodyStmt := range ifStmt.Body.List { - // TODO should probably handle embedded ifs here - switch /*ift :=*/ ifBodyStmt.(type) { - case *ast.BranchStmt, *ast.ReturnStmt: - v.returnsInsideOfLoop = true - default: - } - } - } - - default: - - } - } - -} - -// Hint stores the information about an occurrence of a slice that could be -// preallocated. -type Hint struct { - Pos token.Pos - DeclaredSliceName string -} - -func (h Hint) String() string { - return fmt.Sprintf("%v: Consider preallocating %v", h.Pos, h.DeclaredSliceName) -} - -func (h Hint) StringFromFS(f *token.FileSet) string { - file := f.File(h.Pos) - lineNumber := file.Position(h.Pos).Line - - return fmt.Sprintf("%v:%v Consider preallocating %v", file.Name(), lineNumber, h.DeclaredSliceName) -} diff --git a/vendor/github.com/alingse/asasalint/.gitignore b/vendor/github.com/alingse/asasalint/.gitignore deleted file mode 100644 index d0fc531c8..000000000 --- a/vendor/github.com/alingse/asasalint/.gitignore +++ /dev/null @@ -1,18 +0,0 @@ -# Binaries for programs and plugins -*.exe -*.exe~ -*.dll -*.so -*.dylib - -# Test binary, built with `go test -c` -*.test - -# Output of the go coverage tool, specifically when used with LiteIDE -*.out - -# Dependency directories (remove the comment below to include it) -# vendor/ -.vscode - -asasalint diff --git a/vendor/github.com/alingse/asasalint/.goreleaser.yml b/vendor/github.com/alingse/asasalint/.goreleaser.yml deleted file mode 100644 index e45d5860d..000000000 --- a/vendor/github.com/alingse/asasalint/.goreleaser.yml +++ /dev/null @@ -1,72 +0,0 @@ ---- -project_name: asasalint - -release: - github: - owner: alingse - name: asasalint - -builds: - - binary: asasalint - goos: - - darwin - - windows - - linux - - freebsd - goarch: - - amd64 - - arm64 - - arm - - 386 - - ppc64le - - s390x - - mips64 - - mips64le - - riscv64 - goarm: - - 6 - - 7 - gomips: - - hardfloat - env: - - CGO_ENABLED=0 - ignore: - - goos: darwin - goarch: 386 - - goos: freebsd - goarch: arm64 - main: ./cmd/asasalint/ - flags: - - -trimpath - ldflags: -s -w -X main.version={{.Version}} -X main.commit={{.ShortCommit}} -X main.date={{.Date}} - -archives: - - format: tar.gz - wrap_in_directory: true - format_overrides: - - goos: windows - format: zip - name_template: '{{ .ProjectName }}-{{ .Version }}-{{ .Os }}-{{ .Arch }}{{ if .Arm }}v{{ .Arm }}{{ end }}' - files: - - LICENSE - - README.md - -snapshot: - name_template: SNAPSHOT-{{ .Commit }} - -checksum: - name_template: '{{ .ProjectName }}-{{ .Version }}-checksums.txt' - -changelog: - sort: asc - filters: - exclude: - - '(?i)^docs?:' - - '(?i)^docs\([^:]+\):' - - '(?i)^docs\[[^:]+\]:' - - '^tests?:' - - '(?i)^dev:' - - '^build\(deps\): bump .* in /docs \(#\d+\)' - - '^build\(deps\): bump .* in /\.github/peril \(#\d+\)' - - Merge pull request - - Merge branch diff --git a/vendor/github.com/alingse/asasalint/LICENSE b/vendor/github.com/alingse/asasalint/LICENSE deleted file mode 100644 index a7c39f2e3..000000000 --- a/vendor/github.com/alingse/asasalint/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2022 alingse - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/alingse/asasalint/Makefile b/vendor/github.com/alingse/asasalint/Makefile deleted file mode 100644 index 12f8ba928..000000000 --- a/vendor/github.com/alingse/asasalint/Makefile +++ /dev/null @@ -1,15 +0,0 @@ -.PHONY: clean check test build - -default: clean check test build - -clean: - rm -rf dist/ cover.out - -test: clean - go test -v -cover ./... - -check: - golangci-lint run - -build: - go build -ldflags "-s -w" -trimpath ./cmd/asasalint/ diff --git a/vendor/github.com/alingse/asasalint/README.md b/vendor/github.com/alingse/asasalint/README.md deleted file mode 100644 index 3fa7e65b3..000000000 --- a/vendor/github.com/alingse/asasalint/README.md +++ /dev/null @@ -1,76 +0,0 @@ -# asasalint -Golang linter, lint that pass any slice as any in variadic function - - -## Install - -```sh -go install github.com/alingse/asasalint/cmd/asasalint@latest -``` - -## Usage - -```sh -asasalint ./... -``` - -ignore some func that was by desgin - -```sh -asasalint -e append,Append ./... -``` - -## Why - -two kind of unexpected usage, and `go build` success - -```Go -package main - -import "fmt" - -func A(args ...any) int { - return len(args) -} - -func B(args ...any) int { - return A(args) -} - -func main() { - - // 1 - fmt.Println(B(1, 2, 3, 4)) -} -``` - - - -```Go -package main - -import "fmt" - -func errMsg(msg string, args ...any) string { - return fmt.Sprintf(msg, args...) -} - -func Err(msg string, args ...any) string { - return errMsg(msg, args) -} - -func main() { - - // p1 [hello world] p2 %!s(MISSING) - fmt.Println(Err("p1 %s p2 %s", "hello", "world")) -} -``` - - - -## TODO - -1. add to golangci-lint -2. given a SuggestEdition -3. add `append` to default exclude ? -4. ingore pattern `fn(a, b, []any{1,2,3})` , because `[]any{1,2,3}` is most likely by design diff --git a/vendor/github.com/alingse/asasalint/asasalint.go b/vendor/github.com/alingse/asasalint/asasalint.go deleted file mode 100644 index f34516a46..000000000 --- a/vendor/github.com/alingse/asasalint/asasalint.go +++ /dev/null @@ -1,166 +0,0 @@ -package asasalint - -import ( - "bytes" - "fmt" - "go/ast" - "go/printer" - "go/token" - "go/types" - "log" - "regexp" - "strings" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/ast/inspector" -) - -const BuiltinExclusions = `^(fmt|log|logger|t|)\.(Print|Fprint|Sprint|Fatal|Panic|Error|Warn|Warning|Info|Debug|Log)(|f|ln)$` - -type LinterSetting struct { - Exclude []string - NoBuiltinExclusions bool - IgnoreTest bool -} - -func NewAnalyzer(setting LinterSetting) (*analysis.Analyzer, error) { - a, err := newAnalyzer(setting) - if err != nil { - return nil, err - } - - return &analysis.Analyzer{ - Name: "asasalint", - Doc: "check for pass []any as any in variadic func(...any)", - Run: a.run, - Requires: []*analysis.Analyzer{inspect.Analyzer}, - }, nil -} - -type analyzer struct { - excludes []*regexp.Regexp - setting LinterSetting -} - -func newAnalyzer(setting LinterSetting) (*analyzer, error) { - a := &analyzer{ - setting: setting, - } - - if !a.setting.NoBuiltinExclusions { - a.excludes = append(a.excludes, regexp.MustCompile(BuiltinExclusions)) - } - - for _, exclude := range a.setting.Exclude { - if exclude != "" { - exp, err := regexp.Compile(exclude) - if err != nil { - return nil, err - } - - a.excludes = append(a.excludes, exp) - } - } - - return a, nil -} - -func (a *analyzer) run(pass *analysis.Pass) (interface{}, error) { - inspectorInfo := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - nodeFilter := []ast.Node{(*ast.CallExpr)(nil)} - - inspectorInfo.Preorder(nodeFilter, a.AsCheckVisitor(pass)) - return nil, nil -} - -func (a *analyzer) AsCheckVisitor(pass *analysis.Pass) func(ast.Node) { - return func(n ast.Node) { - if a.setting.IgnoreTest { - pos := pass.Fset.Position(n.Pos()) - if strings.HasSuffix(pos.Filename, "_test.go") { - return - } - } - - caller, ok := n.(*ast.CallExpr) - if !ok { - return - } - if caller.Ellipsis != token.NoPos { - return - } - if len(caller.Args) == 0 { - return - } - - fnName, err := getFuncName(pass.Fset, caller) - if err != nil { - log.Println(err) - return - } - - for _, exclude := range a.excludes { - if exclude.MatchString(fnName) { - return - } - } - - fnType := pass.TypesInfo.TypeOf(caller.Fun) - if !isSliceAnyVariadicFuncType(fnType) { - return - } - - fnSign := fnType.(*types.Signature) - if len(caller.Args) != fnSign.Params().Len() { - return - } - - lastArg := caller.Args[len(caller.Args)-1] - argType := pass.TypesInfo.TypeOf(lastArg) - if !isSliceAnyType(argType) { - return - } - node := lastArg - - d := analysis.Diagnostic{ - Pos: node.Pos(), - End: node.End(), - Message: fmt.Sprintf("pass []any as any to func %s %s", fnName, fnType.String()), - Category: "asasalint", - } - pass.Report(d) - } -} - -func getFuncName(fset *token.FileSet, caller *ast.CallExpr) (string, error) { - buf := new(bytes.Buffer) - if err := printer.Fprint(buf, fset, caller.Fun); err != nil { - return "", fmt.Errorf("unable to print node at %s: %w", fset.Position(caller.Fun.Pos()), err) - } - - return buf.String(), nil -} - -func isSliceAnyVariadicFuncType(typ types.Type) (r bool) { - fnSign, ok := typ.(*types.Signature) - if !ok || !fnSign.Variadic() { - return false - } - - params := fnSign.Params() - lastParam := params.At(params.Len() - 1) - return isSliceAnyType(lastParam.Type()) -} - -func isSliceAnyType(typ types.Type) (r bool) { - sliceType, ok := typ.(*types.Slice) - if !ok { - return - } - elemType, ok := sliceType.Elem().(*types.Interface) - if !ok { - return - } - return elemType.NumMethods() == 0 -} diff --git a/vendor/github.com/ashanbrown/forbidigo/LICENSE b/vendor/github.com/ashanbrown/forbidigo/LICENSE deleted file mode 100644 index dc1d47ad5..000000000 --- a/vendor/github.com/ashanbrown/forbidigo/LICENSE +++ /dev/null @@ -1,13 +0,0 @@ -Copyright 2019 Andrew Shannon Brown - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. diff --git a/vendor/github.com/ashanbrown/forbidigo/forbidigo/config_options.go b/vendor/github.com/ashanbrown/forbidigo/forbidigo/config_options.go deleted file mode 100644 index 3f0ed6682..000000000 --- a/vendor/github.com/ashanbrown/forbidigo/forbidigo/config_options.go +++ /dev/null @@ -1,129 +0,0 @@ -package forbidigo - -// Code generated by github.com/launchdarkly/go-options. DO NOT EDIT. - -import "fmt" - -import "github.com/google/go-cmp/cmp" - -type ApplyOptionFunc func(c *config) error - -func (f ApplyOptionFunc) apply(c *config) error { - return f(c) -} - -func newConfig(options ...Option) (config, error) { - var c config - err := applyConfigOptions(&c, options...) - return c, err -} - -func applyConfigOptions(c *config, options ...Option) error { - c.ExcludeGodocExamples = true - for _, o := range options { - if err := o.apply(c); err != nil { - return err - } - } - return nil -} - -type Option interface { - apply(*config) error -} - -type optionExcludeGodocExamplesImpl struct { - o bool -} - -func (o optionExcludeGodocExamplesImpl) apply(c *config) error { - c.ExcludeGodocExamples = o.o - return nil -} - -func (o optionExcludeGodocExamplesImpl) Equal(v optionExcludeGodocExamplesImpl) bool { - switch { - case !cmp.Equal(o.o, v.o): - return false - } - return true -} - -func (o optionExcludeGodocExamplesImpl) String() string { - name := "OptionExcludeGodocExamples" - - // hack to avoid go vet error about passing a function to Sprintf - var value interface{} = o.o - return fmt.Sprintf("%s: %+v", name, value) -} - -// OptionExcludeGodocExamples don't check inside Godoc examples (see https://blog.golang.org/examples) -func OptionExcludeGodocExamples(o bool) Option { - return optionExcludeGodocExamplesImpl{ - o: o, - } -} - -type optionIgnorePermitDirectivesImpl struct { - o bool -} - -func (o optionIgnorePermitDirectivesImpl) apply(c *config) error { - c.IgnorePermitDirectives = o.o - return nil -} - -func (o optionIgnorePermitDirectivesImpl) Equal(v optionIgnorePermitDirectivesImpl) bool { - switch { - case !cmp.Equal(o.o, v.o): - return false - } - return true -} - -func (o optionIgnorePermitDirectivesImpl) String() string { - name := "OptionIgnorePermitDirectives" - - // hack to avoid go vet error about passing a function to Sprintf - var value interface{} = o.o - return fmt.Sprintf("%s: %+v", name, value) -} - -// OptionIgnorePermitDirectives don't check for `permit` directives(for example, in favor of `nolint`) -func OptionIgnorePermitDirectives(o bool) Option { - return optionIgnorePermitDirectivesImpl{ - o: o, - } -} - -type optionAnalyzeTypesImpl struct { - o bool -} - -func (o optionAnalyzeTypesImpl) apply(c *config) error { - c.AnalyzeTypes = o.o - return nil -} - -func (o optionAnalyzeTypesImpl) Equal(v optionAnalyzeTypesImpl) bool { - switch { - case !cmp.Equal(o.o, v.o): - return false - } - return true -} - -func (o optionAnalyzeTypesImpl) String() string { - name := "OptionAnalyzeTypes" - - // hack to avoid go vet error about passing a function to Sprintf - var value interface{} = o.o - return fmt.Sprintf("%s: %+v", name, value) -} - -// OptionAnalyzeTypes enable to match canonical names for types and interfaces using type info -func OptionAnalyzeTypes(o bool) Option { - return optionAnalyzeTypesImpl{ - o: o, - } -} diff --git a/vendor/github.com/ashanbrown/forbidigo/forbidigo/forbidigo.go b/vendor/github.com/ashanbrown/forbidigo/forbidigo/forbidigo.go deleted file mode 100644 index a7a3ab591..000000000 --- a/vendor/github.com/ashanbrown/forbidigo/forbidigo/forbidigo.go +++ /dev/null @@ -1,398 +0,0 @@ -// Package forbidigo provides a linter for forbidding the use of specific identifiers -package forbidigo - -import ( - "bytes" - "fmt" - "go/ast" - "go/printer" - "go/token" - "go/types" - "log" - "regexp" - "strings" -) - -type Issue interface { - Details() string - Pos() token.Pos - Position() token.Position - String() string -} - -type UsedIssue struct { - identifier string - pattern string - pos token.Pos - position token.Position - customMsg string -} - -func (a UsedIssue) Details() string { - explanation := fmt.Sprintf(` because %q`, a.customMsg) - if a.customMsg == "" { - explanation = fmt.Sprintf(" by pattern `%s`", a.pattern) - } - return fmt.Sprintf("use of `%s` forbidden", a.identifier) + explanation -} - -func (a UsedIssue) Position() token.Position { - return a.position -} - -func (a UsedIssue) Pos() token.Pos { - return a.pos -} - -func (a UsedIssue) String() string { return toString(a) } - -func toString(i UsedIssue) string { - return fmt.Sprintf("%s at %s", i.Details(), i.Position()) -} - -type Linter struct { - cfg config - patterns []*pattern -} - -func DefaultPatterns() []string { - return []string{`^(fmt\.Print(|f|ln)|print|println)$`} -} - -//go:generate go-options config -type config struct { - // don't check inside Godoc examples (see https://blog.golang.org/examples) - ExcludeGodocExamples bool `options:",true"` - IgnorePermitDirectives bool // don't check for `permit` directives(for example, in favor of `nolint`) - AnalyzeTypes bool // enable to match canonical names for types and interfaces using type info -} - -func NewLinter(patterns []string, options ...Option) (*Linter, error) { - cfg, err := newConfig(options...) - if err != nil { - return nil, fmt.Errorf("failed to process options: %w", err) - } - - if len(patterns) == 0 { - patterns = DefaultPatterns() - } - compiledPatterns := make([]*pattern, 0, len(patterns)) - for _, ptrn := range patterns { - p, err := parse(ptrn) - if err != nil { - return nil, err - } - compiledPatterns = append(compiledPatterns, p) - } - return &Linter{ - cfg: cfg, - patterns: compiledPatterns, - }, nil -} - -type visitor struct { - cfg config - isTestFile bool // godoc only runs on test files - - linter *Linter - comments []*ast.CommentGroup - - runConfig RunConfig - issues []Issue -} - -// Deprecated: Run was the original entrypoint before RunWithConfig was introduced to support -// additional match patterns that need additional information. -func (l *Linter) Run(fset *token.FileSet, nodes ...ast.Node) ([]Issue, error) { - return l.RunWithConfig(RunConfig{Fset: fset}, nodes...) -} - -// RunConfig provides information that the linter needs for different kinds -// of match patterns. Ideally, all fields should get set. More fields may get -// added in the future as needed. -type RunConfig struct { - // FSet is required. - Fset *token.FileSet - - // TypesInfo is needed for expanding source code expressions. - // Nil disables that step, i.e. patterns match the literal source code. - TypesInfo *types.Info - - // DebugLog is used to print debug messages. May be nil. - DebugLog func(format string, args ...interface{}) -} - -func (l *Linter) RunWithConfig(config RunConfig, nodes ...ast.Node) ([]Issue, error) { - if config.DebugLog == nil { - config.DebugLog = func(format string, args ...interface{}) {} - } - var issues []Issue - for _, node := range nodes { - var comments []*ast.CommentGroup - isTestFile := false - isWholeFileExample := false - if file, ok := node.(*ast.File); ok { - comments = file.Comments - fileName := config.Fset.Position(file.Pos()).Filename - isTestFile = strings.HasSuffix(fileName, "_test.go") - - // From https://blog.golang.org/examples, a "whole file example" is: - // a file that ends in _test.go and contains exactly one example function, - // no test or benchmark functions, and at least one other package-level declaration. - if l.cfg.ExcludeGodocExamples && isTestFile && len(file.Decls) > 1 { - numExamples := 0 - numTestsAndBenchmarks := 0 - for _, decl := range file.Decls { - funcDecl, isFuncDecl := decl.(*ast.FuncDecl) - // consider only functions, not methods - if !isFuncDecl || funcDecl.Recv != nil || funcDecl.Name == nil { - continue - } - funcName := funcDecl.Name.Name - if strings.HasPrefix(funcName, "Test") || strings.HasPrefix(funcName, "Benchmark") { - numTestsAndBenchmarks++ - break // not a whole file example - } - if strings.HasPrefix(funcName, "Example") { - numExamples++ - } - } - - // if this is a whole file example, skip this node - isWholeFileExample = numExamples == 1 && numTestsAndBenchmarks == 0 - } - } - if isWholeFileExample { - continue - } - visitor := visitor{ - cfg: l.cfg, - isTestFile: isTestFile, - linter: l, - runConfig: config, - comments: comments, - } - ast.Walk(&visitor, node) - issues = append(issues, visitor.issues...) - } - return issues, nil -} - -func (v *visitor) Visit(node ast.Node) ast.Visitor { - switch node := node.(type) { - case *ast.FuncDecl: - // don't descend into godoc examples if we are ignoring them - isGodocExample := v.isTestFile && node.Recv == nil && node.Name != nil && strings.HasPrefix(node.Name.Name, "Example") - if isGodocExample && v.cfg.ExcludeGodocExamples { - return nil - } - ast.Walk(v, node.Type) - if node.Body != nil { - ast.Walk(v, node.Body) - } - return nil - // Ignore constant and type names - case *ast.ValueSpec: - // Look at only type and values for const and variable specs, and not names - if node.Type != nil { - ast.Walk(v, node.Type) - } - if node.Values != nil { - for _, x := range node.Values { - ast.Walk(v, x) - } - } - return nil - // Ignore import alias names - case *ast.ImportSpec: - return nil - // Ignore type names - case *ast.TypeSpec: - // Look at only type parameters for type spec - if node.TypeParams != nil { - ast.Walk(v, node.TypeParams) - } - ast.Walk(v, node.Type) - return nil - // Ignore field names - case *ast.Field: - if node.Type != nil { - ast.Walk(v, node.Type) - } - return nil - // The following two are handled below. - case *ast.SelectorExpr: - case *ast.Ident: - // Everything else isn't. - default: - return v - } - - // The text as it appears in the source is always used because issues - // use that. It's used for matching unless usage of type information - // is enabled. - srcText := v.textFor(node) - matchTexts, pkgText := v.expandMatchText(node, srcText) - v.runConfig.DebugLog("%s: match %v, package %q", v.runConfig.Fset.Position(node.Pos()), matchTexts, pkgText) - for _, p := range v.linter.patterns { - if p.matches(matchTexts) && - (p.Package == "" || p.pkgRe.MatchString(pkgText)) && - !v.permit(node) { - v.issues = append(v.issues, UsedIssue{ - identifier: srcText, // Always report the expression as it appears in the source code. - pattern: p.re.String(), - pos: node.Pos(), - position: v.runConfig.Fset.Position(node.Pos()), - customMsg: p.Msg, - }) - } - } - - // descend into the left-side of selectors - if selector, isSelector := node.(*ast.SelectorExpr); isSelector { - if _, leftSideIsIdentifier := selector.X.(*ast.Ident); !leftSideIsIdentifier { - return v - } - } - - return nil -} - -// textFor returns the expression as it appears in the source code (for -// example, .). -func (v *visitor) textFor(node ast.Node) string { - buf := new(bytes.Buffer) - if err := printer.Fprint(buf, v.runConfig.Fset, node); err != nil { - log.Fatalf("ERROR: unable to print node at %s: %s", v.runConfig.Fset.Position(node.Pos()), err) - } - return buf.String() -} - -// expandMatchText expands the selector in a selector expression to the full package -// name and (for variables) the type: -// -// - example.com/some/pkg.Function -// - example.com/some/pkg.CustomType.Method -// -// It updates the text to match against and fills the package string if possible, -// otherwise it just returns. -func (v *visitor) expandMatchText(node ast.Node, srcText string) (matchTexts []string, pkgText string) { - // The text to match against is the literal source code if we cannot - // come up with something different. - matchTexts = []string{srcText} - - if !v.cfg.AnalyzeTypes || v.runConfig.TypesInfo == nil { - return matchTexts, pkgText - } - - location := v.runConfig.Fset.Position(node.Pos()) - - switch node := node.(type) { - case *ast.Ident: - if object, ok := v.runConfig.TypesInfo.Uses[node]; !ok { - // No information about the identifier. Should - // not happen, but perhaps there were compile - // errors? - v.runConfig.DebugLog("%s: unknown identifier %q", location, srcText) - } else if pkg := object.Pkg(); pkg != nil { - pkgText = pkg.Path() - // if this is a method, don't include the package name - isMethod := false - if signature, ok := object.Type().(*types.Signature); ok && signature.Recv() != nil { - isMethod = true - } - v.runConfig.DebugLog("%s: identifier: %q -> %q in package %q", location, srcText, matchTexts, pkgText) - // match either with or without package name - if !isMethod { - matchTexts = []string{pkg.Name() + "." + srcText, srcText} - } - } else { - v.runConfig.DebugLog("%s: identifier: %q -> %q without package", location, srcText, matchTexts) - } - case *ast.SelectorExpr: - selector := node.X - field := node.Sel.Name - - // If we are lucky, the entire selector expression has a known - // type. We don't care about the value. - selectorText := v.textFor(node) - if typeAndValue, ok := v.runConfig.TypesInfo.Types[selector]; ok { - m, p, ok := typeNameWithPackage(typeAndValue.Type) - if !ok { - v.runConfig.DebugLog("%s: selector %q with supported type %T", location, selectorText, typeAndValue.Type) - } - matchTexts = []string{m + "." + field} - pkgText = p - v.runConfig.DebugLog("%s: selector %q with supported type %q: %q -> %q, package %q", location, selectorText, typeAndValue.Type.String(), srcText, matchTexts, pkgText) - } - // Some expressions need special treatment. - switch selector := selector.(type) { - case *ast.Ident: - if object, hasUses := v.runConfig.TypesInfo.Uses[selector]; hasUses { - switch object := object.(type) { - case *types.PkgName: - pkgText = object.Imported().Path() - matchTexts = []string{object.Imported().Name() + "." + field} - v.runConfig.DebugLog("%s: selector %q is package: %q -> %q, package %q", location, selectorText, srcText, matchTexts, pkgText) - case *types.Var: - if typeName, packageName, ok := typeNameWithPackage(object.Type()); ok { - matchTexts = []string{typeName + "." + field} - pkgText = packageName - v.runConfig.DebugLog("%s: selector %q is variable of type %q: %q -> %q, package %q", location, selectorText, object.Type().String(), srcText, matchTexts, pkgText) - } else { - v.runConfig.DebugLog("%s: selector %q is variable with unsupported type %T", location, selectorText, object.Type()) - } - default: - // Something else? - v.runConfig.DebugLog("%s: selector %q is identifier with unsupported type %T", location, selectorText, object) - } - } else { - // No information about the identifier. Should - // not happen, but perhaps there were compile - // errors? - v.runConfig.DebugLog("%s: unknown selector identifier %q", location, selectorText) - } - default: - v.runConfig.DebugLog("%s: selector %q of unsupported type %T", location, selectorText, selector) - } - default: - v.runConfig.DebugLog("%s: unsupported type %T", location, node) - } - return matchTexts, pkgText -} - -// typeNameWithPackage tries to determine `.` and the full -// package path. This only needs to work for types of a selector in a selector -// expression. -func typeNameWithPackage(t types.Type) (typeName, packagePath string, ok bool) { - if ptr, ok := t.(*types.Pointer); ok { - t = ptr.Elem() - } - - switch t := t.(type) { - case *types.Named: - obj := t.Obj() - pkg := obj.Pkg() - if pkg == nil { - return "", "", false - } - return pkg.Name() + "." + obj.Name(), pkg.Path(), true - default: - return "", "", false - } -} - -func (v *visitor) permit(node ast.Node) bool { - if v.cfg.IgnorePermitDirectives { - return false - } - nodePos := v.runConfig.Fset.Position(node.Pos()) - nolint := regexp.MustCompile(fmt.Sprintf(`^//\s?permit:%s\b`, regexp.QuoteMeta(v.textFor(node)))) - for _, c := range v.comments { - commentPos := v.runConfig.Fset.Position(c.Pos()) - if commentPos.Line == nodePos.Line && len(c.List) > 0 && nolint.MatchString(c.List[0].Text) { - return true - } - } - return false -} diff --git a/vendor/github.com/ashanbrown/forbidigo/forbidigo/patterns.go b/vendor/github.com/ashanbrown/forbidigo/forbidigo/patterns.go deleted file mode 100644 index 2692dcd24..000000000 --- a/vendor/github.com/ashanbrown/forbidigo/forbidigo/patterns.go +++ /dev/null @@ -1,127 +0,0 @@ -package forbidigo - -import ( - "fmt" - "regexp" - "regexp/syntax" - "strings" - - "gopkg.in/yaml.v2" -) - -// pattern matches code that is not supposed to be used. -type pattern struct { - re, pkgRe *regexp.Regexp - - // Pattern is the regular expression string that is used for matching. - // It gets matched against the literal source code text or the expanded - // text, depending on the mode in which the analyzer runs. - Pattern string `yaml:"p"` - - // Package is a regular expression for the full package path of - // an imported item. Ignored unless the analyzer is configured to - // determine that information. - Package string `yaml:"pkg,omitempty"` - - // Msg gets printed in addition to the normal message if a match is - // found. - Msg string `yaml:"msg,omitempty"` -} - -// A yamlPattern pattern in a YAML string may be represented either by a string -// (the traditional regular expression syntax) or a struct (for more complex -// patterns). -type yamlPattern pattern - -func (p *yamlPattern) UnmarshalYAML(unmarshal func(interface{}) error) error { - // Try struct first. It's unlikely that a regular expression string - // is valid YAML for a struct. - var ptrn pattern - if err := unmarshal(&ptrn); err != nil { - errStr := err.Error() - // Didn't work, try plain string. - var ptrn string - if err := unmarshal(&ptrn); err != nil { - return fmt.Errorf("pattern is neither a regular expression string (%s) nor a Pattern struct (%s)", err.Error(), errStr) - } - p.Pattern = ptrn - } else { - *p = yamlPattern(ptrn) - } - return ((*pattern)(p)).validate() -} - -var _ yaml.Unmarshaler = &yamlPattern{} - -// parse accepts a regular expression or, if the string starts with { or contains a line break, a -// JSON or YAML representation of a Pattern. -func parse(ptrn string) (*pattern, error) { - pattern := &pattern{} - - if strings.HasPrefix(strings.TrimSpace(ptrn), "{") || - strings.Contains(ptrn, "\n") { - // Embedded JSON or YAML. We can decode both with the YAML decoder. - if err := yaml.UnmarshalStrict([]byte(ptrn), pattern); err != nil { - return nil, fmt.Errorf("parsing as JSON or YAML failed: %v", err) - } - } else { - pattern.Pattern = ptrn - } - - if err := pattern.validate(); err != nil { - return nil, err - } - return pattern, nil -} - -func (p *pattern) validate() error { - ptrnRe, err := regexp.Compile(p.Pattern) - if err != nil { - return fmt.Errorf("unable to compile source code pattern `%s`: %s", p.Pattern, err) - } - re, err := syntax.Parse(p.Pattern, syntax.Perl) - if err != nil { - return fmt.Errorf("unable to parse source code pattern `%s`: %s", p.Pattern, err) - } - msg := extractComment(re) - if msg != "" { - p.Msg = msg - } - p.re = ptrnRe - - if p.Package != "" { - pkgRe, err := regexp.Compile(p.Package) - if err != nil { - return fmt.Errorf("unable to compile package pattern `%s`: %s", p.Package, err) - } - p.pkgRe = pkgRe - } - - return nil -} - -func (p *pattern) matches(matchTexts []string) bool { - for _, text := range matchTexts { - if p.re.MatchString(text) { - return true - } - } - return false -} - -// Traverse the leaf submatches in the regex tree and extract a comment, if any -// is present. -func extractComment(re *syntax.Regexp) string { - for _, sub := range re.Sub { - subStr := sub.String() - if strings.HasPrefix(subStr, "#") { - return strings.TrimSpace(strings.TrimPrefix(sub.String(), "#")) - } - if len(sub.Sub) > 0 { - if comment := extractComment(sub); comment != "" { - return comment - } - } - } - return "" -} diff --git a/vendor/github.com/ashanbrown/makezero/LICENSE b/vendor/github.com/ashanbrown/makezero/LICENSE deleted file mode 100644 index dc1d47ad5..000000000 --- a/vendor/github.com/ashanbrown/makezero/LICENSE +++ /dev/null @@ -1,13 +0,0 @@ -Copyright 2019 Andrew Shannon Brown - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. diff --git a/vendor/github.com/ashanbrown/makezero/makezero/makezero.go b/vendor/github.com/ashanbrown/makezero/makezero/makezero.go deleted file mode 100644 index 18bcad3d0..000000000 --- a/vendor/github.com/ashanbrown/makezero/makezero/makezero.go +++ /dev/null @@ -1,230 +0,0 @@ -// Package makezero provides a linter for appends to slices initialized with non-zero length. -package makezero - -import ( - "bytes" - "fmt" - "go/ast" - "go/printer" - "go/token" - "go/types" - "log" - "regexp" -) - -// a decl might include multiple var, -// so var name with decl make final uniq obj. -type uniqDecl struct { - varName string - decl interface{} -} - -type Issue interface { - Details() string - Pos() token.Pos - Position() token.Position - String() string -} - -type AppendIssue struct { - name string - pos token.Pos - position token.Position -} - -func (a AppendIssue) Details() string { - return fmt.Sprintf("append to slice `%s` with non-zero initialized length", a.name) -} - -func (a AppendIssue) Pos() token.Pos { - return a.pos -} - -func (a AppendIssue) Position() token.Position { - return a.position -} - -func (a AppendIssue) String() string { return toString(a) } - -type MustHaveNonZeroInitLenIssue struct { - name string - pos token.Pos - position token.Position -} - -func (i MustHaveNonZeroInitLenIssue) Details() string { - return fmt.Sprintf("slice `%s` does not have non-zero initial length", i.name) -} - -func (i MustHaveNonZeroInitLenIssue) Pos() token.Pos { - return i.pos -} - -func (i MustHaveNonZeroInitLenIssue) Position() token.Position { - return i.position -} - -func (i MustHaveNonZeroInitLenIssue) String() string { return toString(i) } - -func toString(i Issue) string { - return fmt.Sprintf("%s at %s", i.Details(), i.Position()) -} - -type visitor struct { - initLenMustBeZero bool - - comments []*ast.CommentGroup // comments to apply during this visit - info *types.Info - - nonZeroLengthSliceDecls map[uniqDecl]struct{} - fset *token.FileSet - issues []Issue -} - -type Linter struct { - initLenMustBeZero bool -} - -func NewLinter(initialLengthMustBeZero bool) *Linter { - return &Linter{ - initLenMustBeZero: initialLengthMustBeZero, - } -} - -func (l Linter) Run(fset *token.FileSet, info *types.Info, nodes ...ast.Node) ([]Issue, error) { - var issues []Issue - for _, node := range nodes { - var comments []*ast.CommentGroup - if file, ok := node.(*ast.File); ok { - comments = file.Comments - } - visitor := visitor{ - nonZeroLengthSliceDecls: make(map[uniqDecl]struct{}), - initLenMustBeZero: l.initLenMustBeZero, - info: info, - fset: fset, - comments: comments, - } - ast.Walk(&visitor, node) - issues = append(issues, visitor.issues...) - } - return issues, nil -} - -func (v *visitor) Visit(node ast.Node) ast.Visitor { - switch node := node.(type) { - case *ast.CallExpr: - fun, ok := node.Fun.(*ast.Ident) - if !ok || fun.Name != "append" { - break - } - if sliceIdent, ok := node.Args[0].(*ast.Ident); ok && - v.hasNonZeroInitialLength(sliceIdent) && - !v.hasNoLintOnSameLine(fun) { - v.issues = append(v.issues, - AppendIssue{ - name: sliceIdent.Name, - pos: fun.Pos(), - position: v.fset.Position(fun.Pos()), - }) - } - case *ast.AssignStmt: - for i, right := range node.Rhs { - if right, ok := right.(*ast.CallExpr); ok { - fun, ok := right.Fun.(*ast.Ident) - if !ok || fun.Name != "make" { - continue - } - left := node.Lhs[i] - if len(right.Args) == 2 { - // ignore if not a slice or it has explicit zero length - if !v.isSlice(right.Args[0]) { - continue - } else if lit, ok := right.Args[1].(*ast.BasicLit); ok && lit.Kind == token.INT && lit.Value == "0" { - continue - } - if v.initLenMustBeZero && !v.hasNoLintOnSameLine(fun) { - v.issues = append(v.issues, MustHaveNonZeroInitLenIssue{ - name: v.textFor(left), - pos: node.Pos(), - position: v.fset.Position(node.Pos()), - }) - } - v.recordNonZeroLengthSlices(left) - } - } - } - } - return v -} - -func (v *visitor) textFor(node ast.Node) string { - typeBuf := new(bytes.Buffer) - if err := printer.Fprint(typeBuf, v.fset, node); err != nil { - log.Fatalf("ERROR: unable to print type: %s", err) - } - return typeBuf.String() -} - -func (v *visitor) hasNonZeroInitialLength(ident *ast.Ident) bool { - if ident.Obj == nil { - log.Printf("WARNING: could not determine with %q at %s is a slice (missing object type)", - ident.Name, v.fset.Position(ident.Pos()).String()) - return false - } - _, exists := v.nonZeroLengthSliceDecls[uniqDecl{ - varName: ident.Obj.Name, - decl: ident.Obj.Decl, - }] - return exists -} - -func (v *visitor) recordNonZeroLengthSlices(node ast.Node) { - ident, ok := node.(*ast.Ident) - if !ok { - return - } - if ident.Obj == nil { - return - } - v.nonZeroLengthSliceDecls[uniqDecl{ - varName: ident.Obj.Name, - decl: ident.Obj.Decl, - }] = struct{}{} -} - -func (v *visitor) isSlice(node ast.Node) bool { - // determine type if this is a user-defined type - if ident, ok := node.(*ast.Ident); ok { - obj := ident.Obj - if obj == nil { - if v.info != nil { - _, ok := v.info.ObjectOf(ident).Type().(*types.Slice) - return ok - } - return false - } - spec, ok := obj.Decl.(*ast.TypeSpec) - if !ok { - return false - } - node = spec.Type - } - - if node, ok := node.(*ast.ArrayType); ok { - return node.Len == nil // only slices have zero length - } - return false -} - -func (v *visitor) hasNoLintOnSameLine(node ast.Node) bool { - nolint := regexp.MustCompile(`^\s*nozero\b`) - nodePos := v.fset.Position(node.Pos()) - for _, c := range v.comments { - commentPos := v.fset.Position(c.Pos()) - if commentPos.Line == nodePos.Line && nolint.MatchString(c.Text()) { - return true - } - } - return false -} diff --git a/vendor/github.com/beorn7/perks/LICENSE b/vendor/github.com/beorn7/perks/LICENSE deleted file mode 100644 index 339177be6..000000000 --- a/vendor/github.com/beorn7/perks/LICENSE +++ /dev/null @@ -1,20 +0,0 @@ -Copyright (C) 2013 Blake Mizerany - -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -"Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/github.com/beorn7/perks/quantile/exampledata.txt b/vendor/github.com/beorn7/perks/quantile/exampledata.txt deleted file mode 100644 index 1602287d7..000000000 --- a/vendor/github.com/beorn7/perks/quantile/exampledata.txt +++ /dev/null @@ -1,2388 +0,0 @@ -8 -5 -26 -12 -5 -235 -13 -6 -28 -30 -3 -3 -3 -3 -5 -2 -33 -7 -2 -4 -7 -12 -14 -5 -8 -3 -10 -4 -5 -3 -6 -6 -209 -20 -3 -10 -14 -3 -4 -6 -8 -5 -11 -7 -3 -2 -3 -3 -212 -5 -222 -4 -10 -10 -5 -6 -3 -8 -3 -10 -254 -220 -2 -3 -5 -24 -5 -4 -222 -7 -3 -3 -223 -8 -15 -12 -14 -14 -3 -2 -2 -3 -13 -3 -11 -4 -4 -6 -5 -7 -13 -5 -3 -5 -2 -5 -3 -5 -2 -7 -15 -17 -14 -3 -6 -6 -3 -17 -5 -4 -7 -6 -4 -4 -8 -6 -8 -3 -9 -3 -6 -3 -4 -5 -3 -3 -660 -4 -6 -10 -3 -6 -3 -2 -5 -13 -2 -4 -4 -10 -4 -8 -4 -3 -7 -9 -9 -3 -10 -37 -3 -13 -4 -12 -3 -6 -10 -8 -5 -21 -2 -3 -8 -3 -2 -3 -3 -4 -12 -2 -4 -8 -8 -4 -3 -2 -20 -1 -6 -32 -2 -11 -6 -18 -3 -8 -11 -3 -212 -3 -4 -2 -6 -7 -12 -11 -3 -2 -16 -10 -6 -4 -6 -3 -2 -7 -3 -2 -2 -2 -2 -5 -6 -4 -3 -10 -3 -4 -6 -5 -3 -4 -4 -5 -6 -4 -3 -4 -4 -5 -7 -5 -5 -3 -2 -7 -2 -4 -12 -4 -5 -6 -2 -4 -4 -8 -4 -15 -13 -7 -16 -5 -3 -23 -5 -5 -7 -3 -2 -9 -8 -7 -5 -8 -11 -4 -10 -76 -4 -47 -4 -3 -2 -7 -4 -2 -3 -37 -10 -4 -2 -20 -5 -4 -4 -10 -10 -4 -3 -7 -23 -240 -7 -13 -5 -5 -3 -3 -2 -5 -4 -2 -8 -7 -19 -2 -23 -8 -7 -2 -5 -3 -8 -3 -8 -13 -5 -5 -5 -2 -3 -23 -4 -9 -8 -4 -3 -3 -5 -220 -2 -3 -4 -6 -14 -3 -53 -6 -2 -5 -18 -6 -3 -219 -6 -5 -2 -5 -3 -6 -5 -15 -4 -3 -17 -3 -2 -4 -7 -2 -3 -3 -4 -4 -3 -2 -664 -6 -3 -23 -5 -5 -16 -5 -8 -2 -4 -2 -24 -12 -3 -2 -3 -5 -8 -3 -5 -4 -3 -14 -3 -5 -8 -2 -3 -7 -9 -4 -2 -3 -6 -8 -4 -3 -4 -6 -5 -3 -3 -6 -3 -19 -4 -4 -6 -3 -6 -3 -5 -22 -5 -4 -4 -3 -8 -11 -4 -9 -7 -6 -13 -4 -4 -4 -6 -17 -9 -3 -3 -3 -4 -3 -221 -5 -11 -3 -4 -2 -12 -6 -3 -5 -7 -5 -7 -4 -9 -7 -14 -37 -19 -217 -16 -3 -5 -2 -2 -7 -19 -7 -6 -7 -4 -24 -5 -11 -4 -7 -7 -9 -13 -3 -4 -3 -6 -28 -4 -4 -5 -5 -2 -5 -6 -4 -4 -6 -10 -5 -4 -3 -2 -3 -3 -6 -5 -5 -4 -3 -2 -3 -7 -4 -6 -18 -16 -8 -16 -4 -5 -8 -6 -9 -13 -1545 -6 -215 -6 -5 -6 -3 -45 -31 -5 -2 -2 -4 -3 -3 -2 -5 -4 -3 -5 -7 -7 -4 -5 -8 -5 -4 -749 -2 -31 -9 -11 -2 -11 -5 -4 -4 -7 -9 -11 -4 -5 -4 -7 -3 -4 -6 -2 -15 -3 -4 -3 -4 -3 -5 -2 -13 -5 -5 -3 -3 -23 -4 -4 -5 -7 -4 -13 -2 -4 -3 -4 -2 -6 -2 -7 -3 -5 -5 -3 -29 -5 -4 -4 -3 -10 -2 -3 -79 -16 -6 -6 -7 -7 -3 -5 -5 -7 -4 -3 -7 -9 -5 -6 -5 -9 -6 -3 -6 -4 -17 -2 -10 -9 -3 -6 -2 -3 -21 -22 -5 -11 -4 -2 -17 -2 -224 -2 -14 -3 -4 -4 -2 -4 -4 -4 -4 -5 -3 -4 -4 -10 -2 -6 -3 -3 -5 -7 -2 -7 -5 -6 -3 -218 -2 -2 -5 -2 -6 -3 -5 -222 -14 -6 -33 -3 -2 -5 -3 -3 -3 -9 -5 -3 -3 -2 -7 -4 -3 -4 -3 -5 -6 -5 -26 -4 -13 -9 -7 -3 -221 -3 -3 -4 -4 -4 -4 -2 -18 -5 -3 -7 -9 -6 -8 -3 -10 -3 -11 -9 -5 -4 -17 -5 -5 -6 -6 -3 -2 -4 -12 -17 -6 -7 -218 -4 -2 -4 -10 -3 -5 -15 -3 -9 -4 -3 -3 -6 -29 -3 -3 -4 -5 -5 -3 -8 -5 -6 -6 -7 -5 -3 -5 -3 -29 -2 -31 -5 -15 -24 -16 -5 -207 -4 -3 -3 -2 -15 -4 -4 -13 -5 -5 -4 -6 -10 -2 -7 -8 -4 -6 -20 -5 -3 -4 -3 -12 -12 -5 -17 -7 -3 -3 -3 -6 -10 -3 -5 -25 -80 -4 -9 -3 -2 -11 -3 -3 -2 -3 -8 -7 -5 -5 -19 -5 -3 -3 -12 -11 -2 -6 -5 -5 -5 -3 -3 -3 -4 -209 -14 -3 -2 -5 -19 -4 -4 -3 -4 -14 -5 -6 -4 -13 -9 -7 -4 -7 -10 -2 -9 -5 -7 -2 -8 -4 -6 -5 -5 -222 -8 -7 -12 -5 -216 -3 -4 -4 -6 -3 -14 -8 -7 -13 -4 -3 -3 -3 -3 -17 -5 -4 -3 -33 -6 -6 -33 -7 -5 -3 -8 -7 -5 -2 -9 -4 -2 -233 -24 -7 -4 -8 -10 -3 -4 -15 -2 -16 -3 -3 -13 -12 -7 -5 -4 -207 -4 -2 -4 -27 -15 -2 -5 -2 -25 -6 -5 -5 -6 -13 -6 -18 -6 -4 -12 -225 -10 -7 -5 -2 -2 -11 -4 -14 -21 -8 -10 -3 -5 -4 -232 -2 -5 -5 -3 -7 -17 -11 -6 -6 -23 -4 -6 -3 -5 -4 -2 -17 -3 -6 -5 -8 -3 -2 -2 -14 -9 -4 -4 -2 -5 -5 -3 -7 -6 -12 -6 -10 -3 -6 -2 -2 -19 -5 -4 -4 -9 -2 -4 -13 -3 -5 -6 -3 -6 -5 -4 -9 -6 -3 -5 -7 -3 -6 -6 -4 -3 -10 -6 -3 -221 -3 -5 -3 -6 -4 -8 -5 -3 -6 -4 -4 -2 -54 -5 -6 -11 -3 -3 -4 -4 -4 -3 -7 -3 -11 -11 -7 -10 -6 -13 -223 -213 -15 -231 -7 -3 -7 -228 -2 -3 -4 -4 -5 -6 -7 -4 -13 -3 -4 -5 -3 -6 -4 -6 -7 -2 -4 -3 -4 -3 -3 -6 -3 -7 -3 -5 -18 -5 -6 -8 -10 -3 -3 -3 -2 -4 -2 -4 -4 -5 -6 -6 -4 -10 -13 -3 -12 -5 -12 -16 -8 -4 -19 -11 -2 -4 -5 -6 -8 -5 -6 -4 -18 -10 -4 -2 -216 -6 -6 -6 -2 -4 -12 -8 -3 -11 -5 -6 -14 -5 -3 -13 -4 -5 -4 -5 -3 -28 -6 -3 -7 -219 -3 -9 -7 -3 -10 -6 -3 -4 -19 -5 -7 -11 -6 -15 -19 -4 -13 -11 -3 -7 -5 -10 -2 -8 -11 -2 -6 -4 -6 -24 -6 -3 -3 -3 -3 -6 -18 -4 -11 -4 -2 -5 -10 -8 -3 -9 -5 -3 -4 -5 -6 -2 -5 -7 -4 -4 -14 -6 -4 -4 -5 -5 -7 -2 -4 -3 -7 -3 -3 -6 -4 -5 -4 -4 -4 -3 -3 -3 -3 -8 -14 -2 -3 -5 -3 -2 -4 -5 -3 -7 -3 -3 -18 -3 -4 -4 -5 -7 -3 -3 -3 -13 -5 -4 -8 -211 -5 -5 -3 -5 -2 -5 -4 -2 -655 -6 -3 -5 -11 -2 -5 -3 -12 -9 -15 -11 -5 -12 -217 -2 -6 -17 -3 -3 -207 -5 -5 -4 -5 -9 -3 -2 -8 -5 -4 -3 -2 -5 -12 -4 -14 -5 -4 -2 -13 -5 -8 -4 -225 -4 -3 -4 -5 -4 -3 -3 -6 -23 -9 -2 -6 -7 -233 -4 -4 -6 -18 -3 -4 -6 -3 -4 -4 -2 -3 -7 -4 -13 -227 -4 -3 -5 -4 -2 -12 -9 -17 -3 -7 -14 -6 -4 -5 -21 -4 -8 -9 -2 -9 -25 -16 -3 -6 -4 -7 -8 -5 -2 -3 -5 -4 -3 -3 -5 -3 -3 -3 -2 -3 -19 -2 -4 -3 -4 -2 -3 -4 -4 -2 -4 -3 -3 -3 -2 -6 -3 -17 -5 -6 -4 -3 -13 -5 -3 -3 -3 -4 -9 -4 -2 -14 -12 -4 -5 -24 -4 -3 -37 -12 -11 -21 -3 -4 -3 -13 -4 -2 -3 -15 -4 -11 -4 -4 -3 -8 -3 -4 -4 -12 -8 -5 -3 -3 -4 -2 -220 -3 -5 -223 -3 -3 -3 -10 -3 -15 -4 -241 -9 -7 -3 -6 -6 -23 -4 -13 -7 -3 -4 -7 -4 -9 -3 -3 -4 -10 -5 -5 -1 -5 -24 -2 -4 -5 -5 -6 -14 -3 -8 -2 -3 -5 -13 -13 -3 -5 -2 -3 -15 -3 -4 -2 -10 -4 -4 -4 -5 -5 -3 -5 -3 -4 -7 -4 -27 -3 -6 -4 -15 -3 -5 -6 -6 -5 -4 -8 -3 -9 -2 -6 -3 -4 -3 -7 -4 -18 -3 -11 -3 -3 -8 -9 -7 -24 -3 -219 -7 -10 -4 -5 -9 -12 -2 -5 -4 -4 -4 -3 -3 -19 -5 -8 -16 -8 -6 -22 -3 -23 -3 -242 -9 -4 -3 -3 -5 -7 -3 -3 -5 -8 -3 -7 -5 -14 -8 -10 -3 -4 -3 -7 -4 -6 -7 -4 -10 -4 -3 -11 -3 -7 -10 -3 -13 -6 -8 -12 -10 -5 -7 -9 -3 -4 -7 -7 -10 -8 -30 -9 -19 -4 -3 -19 -15 -4 -13 -3 -215 -223 -4 -7 -4 -8 -17 -16 -3 -7 -6 -5 -5 -4 -12 -3 -7 -4 -4 -13 -4 -5 -2 -5 -6 -5 -6 -6 -7 -10 -18 -23 -9 -3 -3 -6 -5 -2 -4 -2 -7 -3 -3 -2 -5 -5 -14 -10 -224 -6 -3 -4 -3 -7 -5 -9 -3 -6 -4 -2 -5 -11 -4 -3 -3 -2 -8 -4 -7 -4 -10 -7 -3 -3 -18 -18 -17 -3 -3 -3 -4 -5 -3 -3 -4 -12 -7 -3 -11 -13 -5 -4 -7 -13 -5 -4 -11 -3 -12 -3 -6 -4 -4 -21 -4 -6 -9 -5 -3 -10 -8 -4 -6 -4 -4 -6 -5 -4 -8 -6 -4 -6 -4 -4 -5 -9 -6 -3 -4 -2 -9 -3 -18 -2 -4 -3 -13 -3 -6 -6 -8 -7 -9 -3 -2 -16 -3 -4 -6 -3 -2 -33 -22 -14 -4 -9 -12 -4 -5 -6 -3 -23 -9 -4 -3 -5 -5 -3 -4 -5 -3 -5 -3 -10 -4 -5 -5 -8 -4 -4 -6 -8 -5 -4 -3 -4 -6 -3 -3 -3 -5 -9 -12 -6 -5 -9 -3 -5 -3 -2 -2 -2 -18 -3 -2 -21 -2 -5 -4 -6 -4 -5 -10 -3 -9 -3 -2 -10 -7 -3 -6 -6 -4 -4 -8 -12 -7 -3 -7 -3 -3 -9 -3 -4 -5 -4 -4 -5 -5 -10 -15 -4 -4 -14 -6 -227 -3 -14 -5 -216 -22 -5 -4 -2 -2 -6 -3 -4 -2 -9 -9 -4 -3 -28 -13 -11 -4 -5 -3 -3 -2 -3 -3 -5 -3 -4 -3 -5 -23 -26 -3 -4 -5 -6 -4 -6 -3 -5 -5 -3 -4 -3 -2 -2 -2 -7 -14 -3 -6 -7 -17 -2 -2 -15 -14 -16 -4 -6 -7 -13 -6 -4 -5 -6 -16 -3 -3 -28 -3 -6 -15 -3 -9 -2 -4 -6 -3 -3 -22 -4 -12 -6 -7 -2 -5 -4 -10 -3 -16 -6 -9 -2 -5 -12 -7 -5 -5 -5 -5 -2 -11 -9 -17 -4 -3 -11 -7 -3 -5 -15 -4 -3 -4 -211 -8 -7 -5 -4 -7 -6 -7 -6 -3 -6 -5 -6 -5 -3 -4 -4 -26 -4 -6 -10 -4 -4 -3 -2 -3 -3 -4 -5 -9 -3 -9 -4 -4 -5 -5 -8 -2 -4 -2 -3 -8 -4 -11 -19 -5 -8 -6 -3 -5 -6 -12 -3 -2 -4 -16 -12 -3 -4 -4 -8 -6 -5 -6 -6 -219 -8 -222 -6 -16 -3 -13 -19 -5 -4 -3 -11 -6 -10 -4 -7 -7 -12 -5 -3 -3 -5 -6 -10 -3 -8 -2 -5 -4 -7 -2 -4 -4 -2 -12 -9 -6 -4 -2 -40 -2 -4 -10 -4 -223 -4 -2 -20 -6 -7 -24 -5 -4 -5 -2 -20 -16 -6 -5 -13 -2 -3 -3 -19 -3 -2 -4 -5 -6 -7 -11 -12 -5 -6 -7 -7 -3 -5 -3 -5 -3 -14 -3 -4 -4 -2 -11 -1 -7 -3 -9 -6 -11 -12 -5 -8 -6 -221 -4 -2 -12 -4 -3 -15 -4 -5 -226 -7 -218 -7 -5 -4 -5 -18 -4 -5 -9 -4 -4 -2 -9 -18 -18 -9 -5 -6 -6 -3 -3 -7 -3 -5 -4 -4 -4 -12 -3 -6 -31 -5 -4 -7 -3 -6 -5 -6 -5 -11 -2 -2 -11 -11 -6 -7 -5 -8 -7 -10 -5 -23 -7 -4 -3 -5 -34 -2 -5 -23 -7 -3 -6 -8 -4 -4 -4 -2 -5 -3 -8 -5 -4 -8 -25 -2 -3 -17 -8 -3 -4 -8 -7 -3 -15 -6 -5 -7 -21 -9 -5 -6 -6 -5 -3 -2 -3 -10 -3 -6 -3 -14 -7 -4 -4 -8 -7 -8 -2 -6 -12 -4 -213 -6 -5 -21 -8 -2 -5 -23 -3 -11 -2 -3 -6 -25 -2 -3 -6 -7 -6 -6 -4 -4 -6 -3 -17 -9 -7 -6 -4 -3 -10 -7 -2 -3 -3 -3 -11 -8 -3 -7 -6 -4 -14 -36 -3 -4 -3 -3 -22 -13 -21 -4 -2 -7 -4 -4 -17 -15 -3 -7 -11 -2 -4 -7 -6 -209 -6 -3 -2 -2 -24 -4 -9 -4 -3 -3 -3 -29 -2 -2 -4 -3 -3 -5 -4 -6 -3 -3 -2 -4 diff --git a/vendor/github.com/beorn7/perks/quantile/stream.go b/vendor/github.com/beorn7/perks/quantile/stream.go deleted file mode 100644 index d7d14f8eb..000000000 --- a/vendor/github.com/beorn7/perks/quantile/stream.go +++ /dev/null @@ -1,316 +0,0 @@ -// Package quantile computes approximate quantiles over an unbounded data -// stream within low memory and CPU bounds. -// -// A small amount of accuracy is traded to achieve the above properties. -// -// Multiple streams can be merged before calling Query to generate a single set -// of results. This is meaningful when the streams represent the same type of -// data. See Merge and Samples. -// -// For more detailed information about the algorithm used, see: -// -// Effective Computation of Biased Quantiles over Data Streams -// -// http://www.cs.rutgers.edu/~muthu/bquant.pdf -package quantile - -import ( - "math" - "sort" -) - -// Sample holds an observed value and meta information for compression. JSON -// tags have been added for convenience. -type Sample struct { - Value float64 `json:",string"` - Width float64 `json:",string"` - Delta float64 `json:",string"` -} - -// Samples represents a slice of samples. It implements sort.Interface. -type Samples []Sample - -func (a Samples) Len() int { return len(a) } -func (a Samples) Less(i, j int) bool { return a[i].Value < a[j].Value } -func (a Samples) Swap(i, j int) { a[i], a[j] = a[j], a[i] } - -type invariant func(s *stream, r float64) float64 - -// NewLowBiased returns an initialized Stream for low-biased quantiles -// (e.g. 0.01, 0.1, 0.5) where the needed quantiles are not known a priori, but -// error guarantees can still be given even for the lower ranks of the data -// distribution. -// -// The provided epsilon is a relative error, i.e. the true quantile of a value -// returned by a query is guaranteed to be within (1±Epsilon)*Quantile. -// -// See http://www.cs.rutgers.edu/~muthu/bquant.pdf for time, space, and error -// properties. -func NewLowBiased(epsilon float64) *Stream { - ƒ := func(s *stream, r float64) float64 { - return 2 * epsilon * r - } - return newStream(ƒ) -} - -// NewHighBiased returns an initialized Stream for high-biased quantiles -// (e.g. 0.01, 0.1, 0.5) where the needed quantiles are not known a priori, but -// error guarantees can still be given even for the higher ranks of the data -// distribution. -// -// The provided epsilon is a relative error, i.e. the true quantile of a value -// returned by a query is guaranteed to be within 1-(1±Epsilon)*(1-Quantile). -// -// See http://www.cs.rutgers.edu/~muthu/bquant.pdf for time, space, and error -// properties. -func NewHighBiased(epsilon float64) *Stream { - ƒ := func(s *stream, r float64) float64 { - return 2 * epsilon * (s.n - r) - } - return newStream(ƒ) -} - -// NewTargeted returns an initialized Stream concerned with a particular set of -// quantile values that are supplied a priori. Knowing these a priori reduces -// space and computation time. The targets map maps the desired quantiles to -// their absolute errors, i.e. the true quantile of a value returned by a query -// is guaranteed to be within (Quantile±Epsilon). -// -// See http://www.cs.rutgers.edu/~muthu/bquant.pdf for time, space, and error properties. -func NewTargeted(targetMap map[float64]float64) *Stream { - // Convert map to slice to avoid slow iterations on a map. - // ƒ is called on the hot path, so converting the map to a slice - // beforehand results in significant CPU savings. - targets := targetMapToSlice(targetMap) - - ƒ := func(s *stream, r float64) float64 { - var m = math.MaxFloat64 - var f float64 - for _, t := range targets { - if t.quantile*s.n <= r { - f = (2 * t.epsilon * r) / t.quantile - } else { - f = (2 * t.epsilon * (s.n - r)) / (1 - t.quantile) - } - if f < m { - m = f - } - } - return m - } - return newStream(ƒ) -} - -type target struct { - quantile float64 - epsilon float64 -} - -func targetMapToSlice(targetMap map[float64]float64) []target { - targets := make([]target, 0, len(targetMap)) - - for quantile, epsilon := range targetMap { - t := target{ - quantile: quantile, - epsilon: epsilon, - } - targets = append(targets, t) - } - - return targets -} - -// Stream computes quantiles for a stream of float64s. It is not thread-safe by -// design. Take care when using across multiple goroutines. -type Stream struct { - *stream - b Samples - sorted bool -} - -func newStream(ƒ invariant) *Stream { - x := &stream{ƒ: ƒ} - return &Stream{x, make(Samples, 0, 500), true} -} - -// Insert inserts v into the stream. -func (s *Stream) Insert(v float64) { - s.insert(Sample{Value: v, Width: 1}) -} - -func (s *Stream) insert(sample Sample) { - s.b = append(s.b, sample) - s.sorted = false - if len(s.b) == cap(s.b) { - s.flush() - } -} - -// Query returns the computed qth percentiles value. If s was created with -// NewTargeted, and q is not in the set of quantiles provided a priori, Query -// will return an unspecified result. -func (s *Stream) Query(q float64) float64 { - if !s.flushed() { - // Fast path when there hasn't been enough data for a flush; - // this also yields better accuracy for small sets of data. - l := len(s.b) - if l == 0 { - return 0 - } - i := int(math.Ceil(float64(l) * q)) - if i > 0 { - i -= 1 - } - s.maybeSort() - return s.b[i].Value - } - s.flush() - return s.stream.query(q) -} - -// Merge merges samples into the underlying streams samples. This is handy when -// merging multiple streams from separate threads, database shards, etc. -// -// ATTENTION: This method is broken and does not yield correct results. The -// underlying algorithm is not capable of merging streams correctly. -func (s *Stream) Merge(samples Samples) { - sort.Sort(samples) - s.stream.merge(samples) -} - -// Reset reinitializes and clears the list reusing the samples buffer memory. -func (s *Stream) Reset() { - s.stream.reset() - s.b = s.b[:0] -} - -// Samples returns stream samples held by s. -func (s *Stream) Samples() Samples { - if !s.flushed() { - return s.b - } - s.flush() - return s.stream.samples() -} - -// Count returns the total number of samples observed in the stream -// since initialization. -func (s *Stream) Count() int { - return len(s.b) + s.stream.count() -} - -func (s *Stream) flush() { - s.maybeSort() - s.stream.merge(s.b) - s.b = s.b[:0] -} - -func (s *Stream) maybeSort() { - if !s.sorted { - s.sorted = true - sort.Sort(s.b) - } -} - -func (s *Stream) flushed() bool { - return len(s.stream.l) > 0 -} - -type stream struct { - n float64 - l []Sample - ƒ invariant -} - -func (s *stream) reset() { - s.l = s.l[:0] - s.n = 0 -} - -func (s *stream) insert(v float64) { - s.merge(Samples{{v, 1, 0}}) -} - -func (s *stream) merge(samples Samples) { - // TODO(beorn7): This tries to merge not only individual samples, but - // whole summaries. The paper doesn't mention merging summaries at - // all. Unittests show that the merging is inaccurate. Find out how to - // do merges properly. - var r float64 - i := 0 - for _, sample := range samples { - for ; i < len(s.l); i++ { - c := s.l[i] - if c.Value > sample.Value { - // Insert at position i. - s.l = append(s.l, Sample{}) - copy(s.l[i+1:], s.l[i:]) - s.l[i] = Sample{ - sample.Value, - sample.Width, - math.Max(sample.Delta, math.Floor(s.ƒ(s, r))-1), - // TODO(beorn7): How to calculate delta correctly? - } - i++ - goto inserted - } - r += c.Width - } - s.l = append(s.l, Sample{sample.Value, sample.Width, 0}) - i++ - inserted: - s.n += sample.Width - r += sample.Width - } - s.compress() -} - -func (s *stream) count() int { - return int(s.n) -} - -func (s *stream) query(q float64) float64 { - t := math.Ceil(q * s.n) - t += math.Ceil(s.ƒ(s, t) / 2) - p := s.l[0] - var r float64 - for _, c := range s.l[1:] { - r += p.Width - if r+c.Width+c.Delta > t { - return p.Value - } - p = c - } - return p.Value -} - -func (s *stream) compress() { - if len(s.l) < 2 { - return - } - x := s.l[len(s.l)-1] - xi := len(s.l) - 1 - r := s.n - 1 - x.Width - - for i := len(s.l) - 2; i >= 0; i-- { - c := s.l[i] - if c.Width+x.Width+x.Delta <= s.ƒ(s, r) { - x.Width += c.Width - s.l[xi] = x - // Remove element at i. - copy(s.l[i:], s.l[i+1:]) - s.l = s.l[:len(s.l)-1] - xi -= 1 - } else { - x = c - xi = i - } - r -= c.Width - } -} - -func (s *stream) samples() Samples { - samples := make(Samples, len(s.l)) - copy(samples, s.l) - return samples -} diff --git a/vendor/github.com/bkielbasa/cyclop/LICENSE b/vendor/github.com/bkielbasa/cyclop/LICENSE deleted file mode 100644 index b4a776a40..000000000 --- a/vendor/github.com/bkielbasa/cyclop/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2020 Bartłomiej Klimczak - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/bkielbasa/cyclop/pkg/analyzer/analyzer.go b/vendor/github.com/bkielbasa/cyclop/pkg/analyzer/analyzer.go deleted file mode 100644 index eaf408d6f..000000000 --- a/vendor/github.com/bkielbasa/cyclop/pkg/analyzer/analyzer.go +++ /dev/null @@ -1,107 +0,0 @@ -package analyzer - -import ( - "flag" - "go/ast" - "go/token" - "strings" - - "golang.org/x/tools/go/analysis" -) - -//nolint:gochecknoglobals -var flagSet flag.FlagSet - -//nolint:gochecknoglobals -var ( - maxComplexity int - packageAverage float64 - skipTests bool -) - -//nolint:gochecknoinits -func init() { - flagSet.IntVar(&maxComplexity, "maxComplexity", 10, "max complexity the function can have") - flagSet.Float64Var(&packageAverage, "packageAverage", 0, "max average complexity in package") - flagSet.BoolVar(&skipTests, "skipTests", false, "should the linter execute on test files as well") -} - -func NewAnalyzer() *analysis.Analyzer { - return &analysis.Analyzer{ - Name: "cyclop", - Doc: "calculates cyclomatic complexity", - Run: run, - Flags: flagSet, - } -} - -func run(pass *analysis.Pass) (interface{}, error) { - var sum, count float64 - var pkgName string - var pkgPos token.Pos - - for _, f := range pass.Files { - ast.Inspect(f, func(node ast.Node) bool { - f, ok := node.(*ast.FuncDecl) - if !ok { - if node == nil { - return true - } - if file, ok := node.(*ast.File); ok { - pkgName = file.Name.Name - pkgPos = node.Pos() - } - // we check function by function - return true - } - - if skipTests && testFunc(f) { - return true - } - - count++ - comp := complexity(f) - sum += float64(comp) - if comp > maxComplexity { - pass.Reportf(node.Pos(), "calculated cyclomatic complexity for function %s is %d, max is %d", f.Name.Name, comp, maxComplexity) - } - - return true - }) - } - - if packageAverage > 0 { - avg := sum / count - if avg > packageAverage { - pass.Reportf(pkgPos, "the average complexity for the package %s is %f, max is %f", pkgName, avg, packageAverage) - } - } - - return nil, nil -} - -func testFunc(f *ast.FuncDecl) bool { - return strings.HasPrefix(f.Name.Name, "Test") -} - -func complexity(fn *ast.FuncDecl) int { - v := complexityVisitor{} - ast.Walk(&v, fn) - return v.Complexity -} - -type complexityVisitor struct { - Complexity int -} - -func (v *complexityVisitor) Visit(n ast.Node) ast.Visitor { - switch n := n.(type) { - case *ast.FuncDecl, *ast.IfStmt, *ast.ForStmt, *ast.RangeStmt, *ast.CaseClause, *ast.CommClause: - v.Complexity++ - case *ast.BinaryExpr: - if n.Op == token.LAND || n.Op == token.LOR { - v.Complexity++ - } - } - return v -} diff --git a/vendor/github.com/blizzy78/varnamelen/.editorconfig b/vendor/github.com/blizzy78/varnamelen/.editorconfig deleted file mode 100644 index 7b6461545..000000000 --- a/vendor/github.com/blizzy78/varnamelen/.editorconfig +++ /dev/null @@ -1,13 +0,0 @@ -root = true - -[**] -charset = utf-8 -end_of_line = lf -insert_final_newline = true -indent_style = tab -indent_size = 4 -trim_trailing_whitespace = true - -[**/*.yml] -indent_style = space -indent_size = 2 diff --git a/vendor/github.com/blizzy78/varnamelen/.gitignore b/vendor/github.com/blizzy78/varnamelen/.gitignore deleted file mode 100644 index 1a4a71669..000000000 --- a/vendor/github.com/blizzy78/varnamelen/.gitignore +++ /dev/null @@ -1 +0,0 @@ -/cmd/__debug_bin diff --git a/vendor/github.com/blizzy78/varnamelen/.golangci.yml b/vendor/github.com/blizzy78/varnamelen/.golangci.yml deleted file mode 100644 index 566572363..000000000 --- a/vendor/github.com/blizzy78/varnamelen/.golangci.yml +++ /dev/null @@ -1,70 +0,0 @@ -# https://github.com/golangci/golangci-lint/issues/456#issuecomment-617470264 -issues: - exclude-use-default: false - exclude: - # errcheck: Almost all programs ignore errors on these functions and in most cases it's ok - - Error return value of .((os\.)?std(out|err)\..*|.*print(f|ln)?|os\.(Un)?Setenv). is not checked - # golint: False positive when tests are defined in package 'test' - - func name will be used as test\.Test.* by other packages, and that stutters; consider calling this - # gosec: Duplicated errcheck checks - - G104 - # gosec: Too many issues in popular repos - - (Expect directory permissions to be 0750 or less|Expect file permissions to be 0600 or less) - # gosec: False positive is triggered by 'src, err := ioutil.ReadFile(filename)' - - Potential file inclusion via variable - -linters: - enable: - - asciicheck - - bodyclose - - cyclop - - durationcheck - - errname - - errorlint - - exportloopref - - forcetypeassert - - gocognit - - gocritic - - goerr113 - - gofmt - - goprintffuncname - - gosec - - ifshort - - nakedret - - nestif - - nilerr - - noctx - - nolintlint - - prealloc - - predeclared - - promlinter - - revive - - rowserrcheck - - sqlclosecheck - - stylecheck - - thelper - - tparallel - - unconvert - - unparam - - varnamelen - - wastedassign - - wrapcheck - - wsl - -linters-settings: - gocognit: - min-complexity: 15 - nakedret: - max-func-lines: 0 - nolintlint: - allow-unused: false - allow-leading-space: false - require-explanation: true - require-specific: true - unused: - go: 1.16 - varnamelen: - check-return: true - ignore-type-assert-ok: true - ignore-map-index-ok: true - ignore-chan-recv-ok: true diff --git a/vendor/github.com/blizzy78/varnamelen/LICENSE b/vendor/github.com/blizzy78/varnamelen/LICENSE deleted file mode 100644 index c45156a2c..000000000 --- a/vendor/github.com/blizzy78/varnamelen/LICENSE +++ /dev/null @@ -1,18 +0,0 @@ -Copyright 2021-2022 Maik Schreiber - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -of the Software, and to permit persons to whom the Software is furnished to do -so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS -FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR -COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER -IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/github.com/blizzy78/varnamelen/README.md b/vendor/github.com/blizzy78/varnamelen/README.md deleted file mode 100644 index 02131ff29..000000000 --- a/vendor/github.com/blizzy78/varnamelen/README.md +++ /dev/null @@ -1,155 +0,0 @@ -[![GoDoc](https://pkg.go.dev/badge/github.com/blizzy78/varnamelen)](https://pkg.go.dev/github.com/blizzy78/varnamelen) - - -varnamelen -========== - -A Go Analyzer that checks that the length of a variable's name matches its usage scope: - -Variables with short names can be hard to use if the variable is used over a longer span of lines of code. -A longer variable name may be easier to comprehend. - -The analyzer also checks method receiver names, named return values, and type parameter names. - -Arbitrary declarations such as `f *foo` can be ignored, as well as idiomatic `ok` variables. -Conventional Go parameters such as `ctx context.Context` or `t *testing.T` will always be ignored. - -**Example output** - -``` -test.go:4:2: variable name 'x' is too short for the scope of its usage (varnamelen) - x := 123 - ^ -test.go:6:2: variable name 'i' is too short for the scope of its usage (varnamelen) - i := 10 - ^ -``` - - -golangci-lint Integration -------------------------- - -varnamelen is integrated into [golangci-lint] (though it may not always be the most recent version.) - -Example configuration for golangci-lint: - -```yaml -linters-settings: - varnamelen: - # The longest distance, in source lines, that is being considered a "small scope." (defaults to 5) - # Variables used in at most this many lines will be ignored. - max-distance: 5 - # The minimum length of a variable's name that is considered "long." (defaults to 3) - # Variable names that are at least this long will be ignored. - min-name-length: 3 - # Check method receiver. (defaults to false) - check-receiver: false - # Check named return values. (defaults to false) - check-return: false - # Check type parameters. (defaults to false) - check-type-param: false - # Ignore "ok" variables that hold the bool return value of a type assertion. (defaults to false) - ignore-type-assert-ok: false - # Ignore "ok" variables that hold the bool return value of a map index. (defaults to false) - ignore-map-index-ok: false - # Ignore "ok" variables that hold the bool return value of a channel receive. (defaults to false) - ignore-chan-recv-ok: false - # Optional list of variable names that should be ignored completely. (defaults to empty list) - ignore-names: - - err - # Optional list of variable declarations that should be ignored completely. (defaults to empty list) - # Entries must be in one of the following forms (see below for examples): - # - for variables, parameters, or named return values: - # - - # - * - # - for type parameters: - # - - # - for constants: - # - const - ignore-decls: - - c echo.Context - - t testing.T - - f *foo.Bar - - e error - - i int - - const C - - T any -``` - - -Standalone Usage ----------------- - -The `cmd/` folder provides a standalone command line utility. You can build it like this: - -``` -go build -o varnamelen ./cmd/ -``` - -**Usage** - -``` -varnamelen: checks that the length of a variable's name matches its scope - -Usage: varnamelen [-flag] [package] - -A variable with a short name can be hard to use if the variable is used -over a longer span of lines of code. A longer variable name may be easier -to comprehend. - -Flags: - -V print version and exit - -all - no effect (deprecated) - -c int - display offending line with this many lines of context (default -1) - -checkReceiver - check method receiver names - -checkReturn - check named return values - -checkTypeParam - check type parameter names - -cpuprofile string - write CPU profile to this file - -debug string - debug flags, any subset of "fpstv" - -fix - apply all suggested fixes - -flags - print analyzer flags in JSON - -ignoreChanRecvOk - ignore 'ok' variables that hold the bool return value of a channel receive - -ignoreDecls value - comma-separated list of ignored variable declarations - -ignoreMapIndexOk - ignore 'ok' variables that hold the bool return value of a map index - -ignoreNames value - comma-separated list of ignored variable names - -ignoreTypeAssertOk - ignore 'ok' variables that hold the bool return value of a type assertion - -json - emit JSON output - -maxDistance int - maximum number of lines of variable usage scope considered 'short' (default 5) - -memprofile string - write memory profile to this file - -minNameLength int - minimum length of variable name considered 'long' (default 3) - -source - no effect (deprecated) - -tags string - no effect (deprecated) - -trace string - write trace log to this file - -v no effect (deprecated) -``` - - -License -------- - -This package is licensed under the MIT license. - - - -[golangci-lint]: https://github.com/golangci/golangci-lint diff --git a/vendor/github.com/blizzy78/varnamelen/doc.go b/vendor/github.com/blizzy78/varnamelen/doc.go deleted file mode 100644 index d63c71cfb..000000000 --- a/vendor/github.com/blizzy78/varnamelen/doc.go +++ /dev/null @@ -1,3 +0,0 @@ -// Package varnamelen implements an analyzer checking that the length of a variable's name -// matches its usage scope. -package varnamelen diff --git a/vendor/github.com/blizzy78/varnamelen/flags.go b/vendor/github.com/blizzy78/varnamelen/flags.go deleted file mode 100644 index ee80774f9..000000000 --- a/vendor/github.com/blizzy78/varnamelen/flags.go +++ /dev/null @@ -1,109 +0,0 @@ -package varnamelen - -import "strings" - -// stringsValue is the value of a list-of-strings flag. -type stringsValue struct { - Values []string -} - -// declarationsValue is the value of a list-of-declarations flag. -type declarationsValue struct { - Values []declaration -} - -// Set implements Value. -func (sv *stringsValue) Set(values string) error { - if strings.TrimSpace(values) == "" { - sv.Values = nil - return nil - } - - parts := strings.Split(values, ",") - - sv.Values = make([]string, len(parts)) - - for i, part := range parts { - sv.Values[i] = strings.TrimSpace(part) - } - - return nil -} - -// String implements Value. -func (sv *stringsValue) String() string { - return strings.Join(sv.Values, ",") -} - -// contains returns true if sv contains s. -func (sv *stringsValue) contains(s string) bool { - for _, v := range sv.Values { - if v == s { - return true - } - } - - return false -} - -// Set implements Value. -func (dv *declarationsValue) Set(values string) error { - if strings.TrimSpace(values) == "" { - dv.Values = nil - return nil - } - - parts := strings.Split(values, ",") - - dv.Values = make([]declaration, len(parts)) - - for idx, part := range parts { - dv.Values[idx] = parseDeclaration(strings.TrimSpace(part)) - } - - return nil -} - -// String implements Value. -func (dv *declarationsValue) String() string { - parts := make([]string, len(dv.Values)) - - for idx, val := range dv.Values { - parts[idx] = val.name + " " + val.typ - } - - return strings.Join(parts, ",") -} - -// matchVariable returns true if vari matches any of the declarations in dv. -func (dv *declarationsValue) matchVariable(vari variable) bool { - for _, decl := range dv.Values { - if vari.match(decl) { - return true - } - } - - return false -} - -// matchParameter returns true if param matches any of the declarations in dv. -func (dv *declarationsValue) matchParameter(param parameter) bool { - for _, decl := range dv.Values { - if param.match(decl) { - return true - } - } - - return false -} - -// matchParameter returns true if param matches any of the declarations in dv. -func (dv *declarationsValue) matchTypeParameter(param typeParam) bool { - for _, decl := range dv.Values { - if param.match(decl) { - return true - } - } - - return false -} diff --git a/vendor/github.com/blizzy78/varnamelen/typeparam.go b/vendor/github.com/blizzy78/varnamelen/typeparam.go deleted file mode 100644 index a1f3de99a..000000000 --- a/vendor/github.com/blizzy78/varnamelen/typeparam.go +++ /dev/null @@ -1,35 +0,0 @@ -//go:build go1.18 -// +build go1.18 - -package varnamelen - -import "go/ast" - -// isTypeParam returns true if field is a type parameter of any of the given funcs. -func isTypeParam(field *ast.Field, funcs []*ast.FuncDecl, funcLits []*ast.FuncLit) bool { //nolint:gocognit // it's not that complicated - for _, f := range funcs { - if f.Type.TypeParams == nil { - continue - } - - for _, p := range f.Type.TypeParams.List { - if p == field { - return true - } - } - } - - for _, f := range funcLits { - if f.Type.TypeParams == nil { - continue - } - - for _, p := range f.Type.TypeParams.List { - if p == field { - return true - } - } - } - - return false -} diff --git a/vendor/github.com/blizzy78/varnamelen/typeparam_go1.16.go b/vendor/github.com/blizzy78/varnamelen/typeparam_go1.16.go deleted file mode 100644 index 7856651b9..000000000 --- a/vendor/github.com/blizzy78/varnamelen/typeparam_go1.16.go +++ /dev/null @@ -1,11 +0,0 @@ -//go:build (go1.16 && !go1.18) || (go1.17 && !go1.18) -// +build go1.16,!go1.18 go1.17,!go1.18 - -package varnamelen - -import "go/ast" - -// isTypeParam returns true if field is a type parameter of any of the given funcs. -func isTypeParam(_ *ast.Field, _ []*ast.FuncDecl, _ []*ast.FuncLit) bool { - return false -} diff --git a/vendor/github.com/blizzy78/varnamelen/varnamelen.code-workspace b/vendor/github.com/blizzy78/varnamelen/varnamelen.code-workspace deleted file mode 100644 index 68c485c96..000000000 --- a/vendor/github.com/blizzy78/varnamelen/varnamelen.code-workspace +++ /dev/null @@ -1,13 +0,0 @@ -{ - "folders": [ - { - "path": "." - } - ], - "extensions": { - "recommendations": [ - "EditorConfig.EditorConfig", - "golang.go" - ] - } -} diff --git a/vendor/github.com/blizzy78/varnamelen/varnamelen.go b/vendor/github.com/blizzy78/varnamelen/varnamelen.go deleted file mode 100644 index a5b960311..000000000 --- a/vendor/github.com/blizzy78/varnamelen/varnamelen.go +++ /dev/null @@ -1,891 +0,0 @@ -package varnamelen - -import ( - "go/ast" - "go/token" - "go/types" - "sort" - "strings" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/ast/inspector" -) - -// varNameLen is an analyzer that checks that the length of a variable's name matches its usage scope. -// It will create a report for a variable's assignment if that variable has a short name, but its -// usage scope is not considered "small." -type varNameLen struct { - // maxDistance is the longest distance, in source lines, that is being considered a "small scope." - maxDistance int - - // minNameLength is the minimum length of a variable's name that is considered "long." - minNameLength int - - // ignoreNames is an optional list of variable names that should be ignored completely. - ignoreNames stringsValue - - // checkReceiver determines whether method receivers should be checked. - checkReceiver bool - - // checkReturn determines whether named return values should be checked. - checkReturn bool - - // ignoreTypeAssertOk determines whether "ok" variables that hold the bool return value of a type assertion should be ignored. - ignoreTypeAssertOk bool - - // ignoreMapIndexOk determines whether "ok" variables that hold the bool return value of a map index should be ignored. - ignoreMapIndexOk bool - - // ignoreChannelReceiveOk determines whether "ok" variables that hold the bool return value of a channel receive should be ignored. - ignoreChannelReceiveOk bool - - // ignoreDeclarations is an optional list of variable declarations that should be ignored completely. - ignoreDeclarations declarationsValue - - // checkTypeParameters determines whether type parameters should be checked. - checkTypeParameters bool -} - -// variable represents a declared variable. -type variable struct { - // name is the name of the variable. - name string - - // constant is true if the variable is actually a constant. - constant bool - - // typ is the type of the variable. - typ string - - // assign is the assign statement that declares the variable. - assign *ast.AssignStmt - - // valueSpec is the value specification that declares the variable. - valueSpec *ast.ValueSpec -} - -// parameter represents a declared function or method parameter. -type parameter struct { - // name is the name of the parameter. - name string - - // typ is the type of the parameter. - typ string - - // field is the declaration of the parameter. - field *ast.Field -} - -// typeParam represents a declared type parameter. -type typeParam struct { - // name is the name of the type parameter. - name string - - // typ is the type of the type parameter. - typ string - - // field is the field that declares the type parameter. - field *ast.Field -} - -// declaration is a variable declaration. -type declaration struct { - // name is the name of the variable. - name string - - // constant is true if the variable is actually a constant. - constant bool - - // typ is the type of the variable. Not used for constants. - typ string -} - -// importDeclaration is an import declaration. -type importDeclaration struct { - // name is the short name or alias for the imported package. This is either the package's default name, - // or the alias specified in the import statement. - // Not used if self is true. - name string - - // path is the full path to the imported package. - path string - - // self is true when this is an implicit import declaration for the current package. - self bool -} - -const ( - // defaultMaxDistance is the default value for the maximum distance between the declaration of a variable and its usage - // that is considered a "small scope." - defaultMaxDistance = 5 - - // defaultMinNameLength is the default value for the minimum length of a variable's name that is considered "long." - defaultMinNameLength = 3 -) - -// conventionalDecls is a list of conventional variable declarations. -var conventionalDecls = []declaration{ - parseDeclaration("ctx context.Context"), - - parseDeclaration("b *testing.B"), - parseDeclaration("f *testing.F"), - parseDeclaration("m *testing.M"), - parseDeclaration("pb *testing.PB"), - parseDeclaration("t *testing.T"), - parseDeclaration("tb testing.TB"), -} - -// NewAnalyzer returns a new analyzer. -func NewAnalyzer() *analysis.Analyzer { - vnl := varNameLen{ - maxDistance: defaultMaxDistance, - minNameLength: defaultMinNameLength, - ignoreNames: stringsValue{}, - ignoreDeclarations: declarationsValue{}, - } - - analyzer := analysis.Analyzer{ - Name: "varnamelen", - Doc: "checks that the length of a variable's name matches its scope\n\n" + - "A variable with a short name can be hard to use if the variable is used\n" + - "over a longer span of lines of code. A longer variable name may be easier\n" + - "to comprehend.", - - Run: func(pass *analysis.Pass) (interface{}, error) { - (&vnl).run(pass) - return nil, nil - }, - - Requires: []*analysis.Analyzer{ - inspect.Analyzer, - }, - } - - analyzer.Flags.IntVar(&vnl.maxDistance, "maxDistance", defaultMaxDistance, "maximum number of lines of variable usage scope considered 'short'") - analyzer.Flags.IntVar(&vnl.minNameLength, "minNameLength", defaultMinNameLength, "minimum length of variable name considered 'long'") - analyzer.Flags.Var(&vnl.ignoreNames, "ignoreNames", "comma-separated list of ignored variable names") - analyzer.Flags.BoolVar(&vnl.checkReceiver, "checkReceiver", false, "check method receivers") - analyzer.Flags.BoolVar(&vnl.checkReturn, "checkReturn", false, "check named return values") - analyzer.Flags.BoolVar(&vnl.ignoreTypeAssertOk, "ignoreTypeAssertOk", false, "ignore 'ok' variables that hold the bool return value of a type assertion") - analyzer.Flags.BoolVar(&vnl.ignoreMapIndexOk, "ignoreMapIndexOk", false, "ignore 'ok' variables that hold the bool return value of a map index") - analyzer.Flags.BoolVar(&vnl.ignoreChannelReceiveOk, "ignoreChanRecvOk", false, "ignore 'ok' variables that hold the bool return value of a channel receive") - analyzer.Flags.Var(&vnl.ignoreDeclarations, "ignoreDecls", "comma-separated list of ignored variable declarations") - analyzer.Flags.BoolVar(&vnl.checkTypeParameters, "checkTypeParam", false, "check type parameters") - - return &analyzer -} - -// Run applies v to a package, according to pass. -func (v *varNameLen) run(pass *analysis.Pass) { - varToDist, paramToDist, returnToDist, typeParamToDist := v.distances(pass) - - v.checkVariables(pass, varToDist) - v.checkParams(pass, paramToDist) - v.checkReturns(pass, returnToDist) - v.checkTypeParams(pass, typeParamToDist) -} - -// checkVariables applies v to variables in varToDist. -func (v *varNameLen) checkVariables(pass *analysis.Pass, varToDist map[variable]int) { //nolint:gocognit // it's not that complicated - for variable, dist := range varToDist { - if v.ignoreNames.contains(variable.name) { - continue - } - - if v.ignoreDeclarations.matchVariable(variable) { - continue - } - - if v.checkNameAndDistance(variable.name, dist) { - continue - } - - if v.checkTypeAssertOk(variable) { - continue - } - - if v.checkMapIndexOk(variable) { - continue - } - - if v.checkChannelReceiveOk(variable) { - continue - } - - if variable.isConventional() { - continue - } - - if variable.assign != nil { - pass.Reportf(variable.assign.Pos(), "%s name '%s' is too short for the scope of its usage", variable.kindName(), variable.name) - continue - } - - pass.Reportf(variable.valueSpec.Pos(), "%s name '%s' is too short for the scope of its usage", variable.kindName(), variable.name) - } -} - -// checkParams applies v to parameters in paramToDist. -func (v *varNameLen) checkParams(pass *analysis.Pass, paramToDist map[parameter]int) { - for param, dist := range paramToDist { - if v.ignoreNames.contains(param.name) { - continue - } - - if v.ignoreDeclarations.matchParameter(param) { - continue - } - - if v.checkNameAndDistance(param.name, dist) { - continue - } - - if param.isConventional() { - continue - } - - pass.Reportf(param.field.Pos(), "parameter name '%s' is too short for the scope of its usage", param.name) - } -} - -// checkReturns applies v to named return values in returnToDist. -func (v *varNameLen) checkReturns(pass *analysis.Pass, returnToDist map[parameter]int) { - for returnValue, dist := range returnToDist { - if v.ignoreNames.contains(returnValue.name) { - continue - } - - if v.ignoreDeclarations.matchParameter(returnValue) { - continue - } - - if v.checkNameAndDistance(returnValue.name, dist) { - continue - } - - pass.Reportf(returnValue.field.Pos(), "return value name '%s' is too short for the scope of its usage", returnValue.name) - } -} - -// checkTypeParams applies v to type parameters in paramToDist. -func (v *varNameLen) checkTypeParams(pass *analysis.Pass, paramToDist map[typeParam]int) { - for param, dist := range paramToDist { - if v.ignoreNames.contains(param.name) { - continue - } - - if v.ignoreDeclarations.matchTypeParameter(param) { - continue - } - - if v.checkNameAndDistance(param.name, dist) { - continue - } - - pass.Reportf(param.field.Pos(), "type parameter name '%s' is too short for the scope of its usage", param.name) - } -} - -// checkNameAndDistance returns true if name or dist are considered "short". -func (v *varNameLen) checkNameAndDistance(name string, dist int) bool { - if len(name) >= v.minNameLength { - return true - } - - if dist <= v.maxDistance { - return true - } - - return false -} - -// checkTypeAssertOk returns true if "ok" variables that hold the bool return value of a type assertion -// should be ignored, and if vari is such a variable. -func (v *varNameLen) checkTypeAssertOk(vari variable) bool { - return v.ignoreTypeAssertOk && vari.isTypeAssertOk() -} - -// checkMapIndexOk returns true if "ok" variables that hold the bool return value of a map index -// should be ignored, and if vari is such a variable. -func (v *varNameLen) checkMapIndexOk(vari variable) bool { - return v.ignoreMapIndexOk && vari.isMapIndexOk() -} - -// checkChannelReceiveOk returns true if "ok" variables that hold the bool return value of a channel receive -// should be ignored, and if vari is such a variable. -func (v *varNameLen) checkChannelReceiveOk(vari variable) bool { - return v.ignoreChannelReceiveOk && vari.isChannelReceiveOk() -} - -// distances returns maps of variables, parameters, return values, and type parameters mapping to their longest usage distances. -func (v *varNameLen) distances(pass *analysis.Pass) (map[variable]int, map[parameter]int, map[parameter]int, map[typeParam]int) { - assignIdents, valueSpecIdents, paramIdents, returnIdents, typeParamIdents, imports, switches := v.identsAndImports(pass) - - varToDist := map[variable]int{} - - for _, ident := range assignIdents { - assign := ident.Obj.Decl.(*ast.AssignStmt) //nolint:forcetypeassert // check is done in identsAndImports - - var typ string - if isTypeSwitchAssign(assign, switches) { - typ = "" - } else { - typ = shortTypeName(pass.TypesInfo.TypeOf(ident), imports) - } - - variable := variable{ - name: ident.Name, - typ: typ, - assign: assign, - } - - useLine := pass.Fset.Position(ident.NamePos).Line - declLine := pass.Fset.Position(assign.Pos()).Line - varToDist[variable] = useLine - declLine - } - - for _, ident := range valueSpecIdents { - valueSpec := ident.Obj.Decl.(*ast.ValueSpec) //nolint:forcetypeassert // check is done in identsAndImports - - variable := variable{ - name: ident.Name, - constant: ident.Obj.Kind == ast.Con, - typ: shortTypeName(pass.TypesInfo.TypeOf(ident), imports), - valueSpec: valueSpec, - } - - useLine := pass.Fset.Position(ident.NamePos).Line - declLine := pass.Fset.Position(valueSpec.Pos()).Line - varToDist[variable] = useLine - declLine - } - - paramToDist := map[parameter]int{} - - for _, ident := range paramIdents { - field := ident.Obj.Decl.(*ast.Field) //nolint:forcetypeassert // check is done in identsAndImports - - param := parameter{ - name: ident.Name, - typ: shortTypeName(pass.TypesInfo.TypeOf(field.Type), imports), - field: field, - } - - useLine := pass.Fset.Position(ident.NamePos).Line - declLine := pass.Fset.Position(field.Pos()).Line - paramToDist[param] = useLine - declLine - } - - returnToDist := map[parameter]int{} - - for _, ident := range returnIdents { - field := ident.Obj.Decl.(*ast.Field) //nolint:forcetypeassert // check is done in identsAndImports - - param := parameter{ - name: ident.Name, - typ: shortTypeName(pass.TypesInfo.TypeOf(ident), imports), - field: field, - } - - useLine := pass.Fset.Position(ident.NamePos).Line - declLine := pass.Fset.Position(field.Pos()).Line - returnToDist[param] = useLine - declLine - } - - typeParamToDist := map[typeParam]int{} - - for _, ident := range typeParamIdents { - field := ident.Obj.Decl.(*ast.Field) //nolint:forcetypeassert // check is done in identsAndImports - - param := typeParam{ - name: ident.Name, - typ: shortTypeName(pass.TypesInfo.TypeOf(field.Type), imports), - field: field, - } - - useLine := pass.Fset.Position(ident.NamePos).Line - declLine := pass.Fset.Position(field.Pos()).Line - typeParamToDist[param] = useLine - declLine - } - - return varToDist, paramToDist, returnToDist, typeParamToDist -} - -// identsAndImports returns Idents referencing assign statements, value specifications, parameters, -// return values, and type parameters, respectively, as well as import declarations, and type switch statements. -func (v *varNameLen) identsAndImports(pass *analysis.Pass) ([]*ast.Ident, []*ast.Ident, []*ast.Ident, []*ast.Ident, //nolint:gocognit,cyclop // this is complex stuff - []*ast.Ident, []importDeclaration, []*ast.TypeSwitchStmt) { - inspector := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) //nolint:forcetypeassert // inspect.Analyzer always returns *inspector.Inspector - - filter := []ast.Node{ - (*ast.ImportSpec)(nil), - (*ast.FuncDecl)(nil), - (*ast.FuncLit)(nil), - (*ast.CompositeLit)(nil), - (*ast.TypeSwitchStmt)(nil), - (*ast.Ident)(nil), - } - - assignIdents := []*ast.Ident{} - valueSpecIdents := []*ast.Ident{} - paramIdents := []*ast.Ident{} - returnIdents := []*ast.Ident{} - typeParamIdents := []*ast.Ident{} - imports := []importDeclaration{} - switches := []*ast.TypeSwitchStmt{} - - funcs := []*ast.FuncDecl{} - methods := []*ast.FuncDecl{} - funcLits := []*ast.FuncLit{} - compositeLits := []*ast.CompositeLit{} - - inspector.Preorder(filter, func(node ast.Node) { - switch node2 := node.(type) { - case *ast.ImportSpec: - decl, ok := importSpecToDecl(node2, pass.Pkg.Imports()) - if !ok { - return - } - - imports = append(imports, decl) - - case *ast.FuncDecl: - funcs = append(funcs, node2) - - if node2.Recv == nil { - return - } - - methods = append(methods, node2) - - case *ast.FuncLit: - funcLits = append(funcLits, node2) - - case *ast.CompositeLit: - compositeLits = append(compositeLits, node2) - - case *ast.TypeSwitchStmt: - switches = append(switches, node2) - - case *ast.Ident: - if node2.Obj == nil { - return - } - - if isCompositeLitKey(node2, compositeLits) { - return - } - - switch objDecl := node2.Obj.Decl.(type) { - case *ast.AssignStmt: - assignIdents = append(assignIdents, node2) - - case *ast.ValueSpec: - valueSpecIdents = append(valueSpecIdents, node2) - - case *ast.Field: - switch { - case isReceiver(objDecl, methods): - if !v.checkReceiver { - return - } - - paramIdents = append(paramIdents, node2) - - case isReturn(objDecl, funcs, funcLits): - if !v.checkReturn { - return - } - - returnIdents = append(returnIdents, node2) - - case isTypeParam(objDecl, funcs, funcLits): - if !v.checkTypeParameters { - return - } - - typeParamIdents = append(typeParamIdents, node2) - - case isParam(objDecl, funcs, funcLits, methods): - paramIdents = append(paramIdents, node2) - } - } - } - }) - - imports = append(imports, importDeclaration{ - path: pass.Pkg.Path(), - self: true, - }) - - sort.Slice(imports, func(a, b int) bool { - // reversed: longest path first - return len(imports[a].path) > len(imports[b].path) - }) - - return assignIdents, valueSpecIdents, paramIdents, returnIdents, typeParamIdents, imports, switches -} - -func importSpecToDecl(spec *ast.ImportSpec, imports []*types.Package) (importDeclaration, bool) { - path := strings.TrimSuffix(strings.TrimPrefix(spec.Path.Value, "\""), "\"") - - if spec.Name != nil { - return importDeclaration{ - name: spec.Name.Name, - path: path, - }, true - } - - for _, imp := range imports { - if imp.Path() == path { - return importDeclaration{ - name: imp.Name(), - path: path, - }, true - } - } - - return importDeclaration{}, false -} - -// isTypeAssertOk returns true if v is an "ok" variable that holds the bool return value of a type assertion. -func (v variable) isTypeAssertOk() bool { - if v.name != "ok" { - return false - } - - if v.assign == nil { - return false - } - - if len(v.assign.Lhs) != 2 { - return false - } - - ident, ok := v.assign.Lhs[1].(*ast.Ident) - if !ok { - return false - } - - if ident.Name != "ok" { - return false - } - - if len(v.assign.Rhs) != 1 { - return false - } - - if _, ok := v.assign.Rhs[0].(*ast.TypeAssertExpr); !ok { - return false - } - - return true -} - -// isMapIndexOk returns true if v is an "ok" variable that holds the bool return value of a map index. -func (v variable) isMapIndexOk() bool { - if v.name != "ok" { - return false - } - - if v.assign == nil { - return false - } - - if len(v.assign.Lhs) != 2 { - return false - } - - ident, ok := v.assign.Lhs[1].(*ast.Ident) - if !ok { - return false - } - - if ident.Name != "ok" { - return false - } - - if len(v.assign.Rhs) != 1 { - return false - } - - if _, ok := v.assign.Rhs[0].(*ast.IndexExpr); !ok { - return false - } - - return true -} - -// isChannelReceiveOk returns true if v is an "ok" variable that holds the bool return value of a channel receive. -func (v variable) isChannelReceiveOk() bool { - if v.name != "ok" { - return false - } - - if v.assign == nil { - return false - } - - if len(v.assign.Lhs) != 2 { - return false - } - - ident, ok := v.assign.Lhs[1].(*ast.Ident) - if !ok { - return false - } - - if ident.Name != "ok" { - return false - } - - if len(v.assign.Rhs) != 1 { - return false - } - - unary, ok := v.assign.Rhs[0].(*ast.UnaryExpr) - if !ok { - return false - } - - if unary.Op != token.ARROW { - return false - } - - return true -} - -// isConventional returns true if v matches a conventional Go variable/parameter name and type, -// such as "ctx context.Context" or "t *testing.T". -func (v variable) isConventional() bool { - for _, decl := range conventionalDecls { - if v.match(decl) { - return true - } - } - - return false -} - -// match returns true if v matches decl. -func (v variable) match(decl declaration) bool { - if v.name != decl.name { - return false - } - - if v.constant != decl.constant { - return false - } - - if v.constant { - return true - } - - if v.typ == "" { - return false - } - - return decl.matchType(v.typ) -} - -// kindName returns "constant" if v.constant==true, else "variable". -func (v variable) kindName() string { - if v.constant { - return "constant" - } - - return "variable" -} - -// isReceiver returns true if field is a receiver parameter of any of the given methods. -func isReceiver(field *ast.Field, methods []*ast.FuncDecl) bool { - for _, m := range methods { - for _, recv := range m.Recv.List { - if recv == field { - return true - } - } - } - - return false -} - -// isReturn returns true if field is a return value of any of the given funcs. -func isReturn(field *ast.Field, funcs []*ast.FuncDecl, funcLits []*ast.FuncLit) bool { //nolint:gocognit // it's not that complicated - for _, f := range funcs { - if f.Type.Results == nil { - continue - } - - for _, r := range f.Type.Results.List { - if r == field { - return true - } - } - } - - for _, f := range funcLits { - if f.Type.Results == nil { - continue - } - - for _, r := range f.Type.Results.List { - if r == field { - return true - } - } - } - - return false -} - -// isParam returns true if field is a parameter of any of the given funcs. -func isParam(field *ast.Field, funcs []*ast.FuncDecl, funcLits []*ast.FuncLit, methods []*ast.FuncDecl) bool { //nolint:gocognit,cyclop // it's not that complicated - for _, f := range funcs { - if f.Type.Params == nil { - continue - } - - for _, p := range f.Type.Params.List { - if p == field { - return true - } - } - } - - for _, f := range funcLits { - if f.Type.Params == nil { - continue - } - - for _, p := range f.Type.Params.List { - if p == field { - return true - } - } - } - - for _, m := range methods { - if m.Type.Params == nil { - continue - } - - for _, p := range m.Type.Params.List { - if p == field { - return true - } - } - } - - return false -} - -// isCompositeLitKey returns true if ident is a key of any of the given composite literals. -func isCompositeLitKey(ident *ast.Ident, compositeLits []*ast.CompositeLit) bool { - for _, cl := range compositeLits { - if _, ok := cl.Type.(*ast.MapType); ok { - continue - } - - for _, kvExpr := range cl.Elts { - kv, ok := kvExpr.(*ast.KeyValueExpr) - if !ok { - continue - } - - if kv.Key == ident { - return true - } - } - } - - return false -} - -// isTypeSwitchAssign returns true if assign is an assign statement of any of the given type switch statements. -func isTypeSwitchAssign(assign *ast.AssignStmt, switches []*ast.TypeSwitchStmt) bool { - for _, s := range switches { - if s.Assign == assign { - return true - } - } - - return false -} - -// isConventional returns true if v matches a conventional Go variable/parameter name and type, -// such as "ctx context.Context" or "t *testing.T". -func (p parameter) isConventional() bool { - for _, decl := range conventionalDecls { - if p.match(decl) { - return true - } - } - - return false -} - -// match returns whether p matches decl. -func (p parameter) match(decl declaration) bool { - if p.name != decl.name { - return false - } - - return decl.matchType(p.typ) -} - -// match returns whether p matches decl. -func (p typeParam) match(decl declaration) bool { - if p.name != decl.name { - return false - } - - return decl.matchType(p.typ) -} - -// parseDeclaration parses and returns a variable declaration parsed from decl. -func parseDeclaration(decl string) declaration { - if strings.HasPrefix(decl, "const ") { - return declaration{ - name: strings.TrimPrefix(decl, "const "), - constant: true, - } - } - - parts := strings.SplitN(decl, " ", 2) - - return declaration{ - name: parts[0], - typ: parts[1], - } -} - -// matchType returns true if typ matches d.typ. -func (d declaration) matchType(typ string) bool { - return d.typ == typ -} - -// shortTypeName returns the short name of typ, with respect to imports. -// For example, if package github.com/matryer/is is imported with alias "x", -// and typ represents []*github.com/matryer/is.I, shortTypeName will return "[]*x.I". -// For imports without aliases, the package's default name will be used. -func shortTypeName(typ types.Type, imports []importDeclaration) string { - if typ == nil { - return "" - } - - typStr := typ.String() - - for _, imp := range imports { - prefix := imp.path + "." - - replace := "" - if !imp.self { - replace = imp.name + "." - } - - typStr = strings.ReplaceAll(typStr, prefix, replace) - } - - return typStr -} diff --git a/vendor/github.com/bombsimon/wsl/v4/.gitignore b/vendor/github.com/bombsimon/wsl/v4/.gitignore deleted file mode 100644 index 1c8eba613..000000000 --- a/vendor/github.com/bombsimon/wsl/v4/.gitignore +++ /dev/null @@ -1,70 +0,0 @@ - -# Created by https://www.gitignore.io/api/go,vim,macos - -### Go ### -# Binaries for programs and plugins -*.exe -*.exe~ -*.dll -*.so -*.dylib - -# Test binary, build with `go test -c` -*.test - -# Output of the go coverage tool, specifically when used with LiteIDE -*.out - -### Go Patch ### -/vendor/ -/Godeps/ - -### macOS ### -# General -.DS_Store -.AppleDouble -.LSOverride - -# Icon must end with two \r -Icon - -# Thumbnails -._* - -# Files that might appear in the root of a volume -.DocumentRevisions-V100 -.fseventsd -.Spotlight-V100 -.TemporaryItems -.Trashes -.VolumeIcon.icns -.com.apple.timemachine.donotpresent - -# Directories potentially created on remote AFP share -.AppleDB -.AppleDesktop -Network Trash Folder -Temporary Items -.apdisk - -### Vim ### -# Swap -[._]*.s[a-v][a-z] -[._]*.sw[a-p] -[._]s[a-rt-v][a-z] -[._]ss[a-gi-z] -[._]sw[a-p] - -# Session -Session.vim - -# Temporary -.netrwhist -*~ -# Auto-generated tag files -tags -# Persistent undo -[._]*.un~ - - -# End of https://www.gitignore.io/api/go,vim,macos diff --git a/vendor/github.com/bombsimon/wsl/v4/.golangci.yml b/vendor/github.com/bombsimon/wsl/v4/.golangci.yml deleted file mode 100644 index 543012008..000000000 --- a/vendor/github.com/bombsimon/wsl/v4/.golangci.yml +++ /dev/null @@ -1,81 +0,0 @@ ---- -run: - deadline: 1m - issues-exit-code: 1 - tests: true - skip-dirs: - - vendor$ - -output: - format: colored-line-number - print-issued-lines: false - -linters-settings: - gocognit: - min-complexity: 10 - - depguard: - list-type: blacklist - include-go-root: false - packages: - - github.com/davecgh/go-spew/spew - - misspell: - locale: US - - gocritic: - # Enable multiple checks by tags, run `GL_DEBUG=gocritic golangci-lint run` - # to see all tags and checks. Empty list by default. See - # https://github.com/go-critic/go-critic#usage -> section "Tags". - enabled-tags: - - diagnostic - - experimental - - opinionated - - performance - - style - -linters: - enable-all: true - disable: - - cyclop - - deadcode - - depguard - - dupl - - dupword - - exhaustivestruct - - exhaustruct - - forbidigo - - funlen - - gci - - gocognit - - gocyclo - - godox - - golint - - gomnd - - ifshort - - interfacer - - lll - - maintidx - - maligned - - nakedret - - nestif - - nlreturn - - nosnakecase - - paralleltest - - prealloc - - rowserrcheck - - scopelint - - structcheck - - testpackage - - varcheck - - varnamelen - - wastedassign - fast: false - - -issues: - exclude-use-default: true - max-issues-per-linter: 0 - max-same-issues: 0 - -# vim: set sw=2 ts=2 et: diff --git a/vendor/github.com/bombsimon/wsl/v4/LICENSE b/vendor/github.com/bombsimon/wsl/v4/LICENSE deleted file mode 100644 index 4dade6d1c..000000000 --- a/vendor/github.com/bombsimon/wsl/v4/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2018 Simon Sawert - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/bombsimon/wsl/v4/README.md b/vendor/github.com/bombsimon/wsl/v4/README.md deleted file mode 100644 index 0bcf01d96..000000000 --- a/vendor/github.com/bombsimon/wsl/v4/README.md +++ /dev/null @@ -1,98 +0,0 @@ -# wsl - Whitespace Linter - -[![forthebadge](https://forthebadge.com/images/badges/made-with-go.svg)](https://forthebadge.com) -[![forthebadge](https://forthebadge.com/images/badges/built-with-love.svg)](https://forthebadge.com) - -[![GitHub Actions](https://github.com/bombsimon/wsl/actions/workflows/go.yml/badge.svg)](https://github.com/bombsimon/wsl/actions/workflows/go.yml) -[![Coverage Status](https://coveralls.io/repos/github/bombsimon/wsl/badge.svg?branch=master)](https://coveralls.io/github/bombsimon/wsl?branch=master) - -`wsl` is a linter that enforces a very **non scientific** vision of how to make -code more readable by enforcing empty lines at the right places. - -**This linter is aggressive** and a lot of projects I've tested it on have -failed miserably. For this linter to be useful at all I want to be open to new -ideas, configurations and discussions! Also note that some of the warnings might -be bugs or unintentional false positives so I would love an -[issue](https://github.com/bombsimon/wsl/issues/new) to fix, discuss, change or -make something configurable! - -## Installation - -```sh -# Latest release -go install github.com/bombsimon/wsl/v4/cmd/wsl - -# Main branch -go install github.com/bombsimon/wsl/v4/cmd/wsl@master -``` - -## Usage - -> **Note**: This linter provides a fixer that can fix most issues with the -> `--fix` flag. However, currently `golangci-lint` [does not support suggested -> fixes](https://github.com/golangci/golangci-lint/issues/1779) so the `--fix` -> flag in `golangci-lint` will **not** work. - -`wsl` uses the [analysis](https://pkg.go.dev/golang.org/x/tools/go/analysis) -package meaning it will operate on package level with the default analysis flags -and way of working. - -```sh -wsl --help -wsl [flags] - -wsl --allow-cuddle-declarations --fix ./... -``` - -`wsl` is also integrated in [`golangci-lint`](https://golangci-lint.run) - -```sh -golangci-lint run --no-config --disable-all --enable wsl -``` - -## Issues and configuration - -The linter suppers a few ways to configure it to satisfy more than one kind of -code style. These settings could be set either with flags or with YAML -configuration if used via `golangci-lint`. - -The supported configuration can be found [in the -documentation](doc/configuration.md). - -Below are the available checklist for any hit from `wsl`. If you do not see any, -feel free to raise an [issue](https://github.com/bombsimon/wsl/issues/new). - -> **Note**: this linter doesn't take in consideration the issues that will be -> fixed with `go fmt -s` so ensure that the code is properly formatted before -> use. - -* [Anonymous switch statements should never be cuddled](doc/rules.md#anonymous-switch-statements-should-never-be-cuddled) -* [Append only allowed to cuddle with appended value](doc/rules.md#append-only-allowed-to-cuddle-with-appended-value) -* [Assignments should only be cuddled with other assignments](doc/rules.md#assignments-should-only-be-cuddled-with-other-assignments) -* [Block should not end with a whitespace (or comment)](doc/rules.md#block-should-not-end-with-a-whitespace-or-comment) -* [Block should not start with a whitespace](doc/rules.md#block-should-not-start-with-a-whitespace) -* [Case block should end with newline at this size](doc/rules.md#case-block-should-end-with-newline-at-this-size) -* [Branch statements should not be cuddled if block has more than two lines](doc/rules.md#branch-statements-should-not-be-cuddled-if-block-has-more-than-two-lines) -* [Declarations should never be cuddled](doc/rules.md#declarations-should-never-be-cuddled) -* [Defer statements should only be cuddled with expressions on same variable](doc/rules.md#defer-statements-should-only-be-cuddled-with-expressions-on-same-variable) -* [Expressions should not be cuddled with blocks](doc/rules.md#expressions-should-not-be-cuddled-with-blocks) -* [Expressions should not be cuddled with declarations or returns](doc/rules.md#expressions-should-not-be-cuddled-with-declarations-or-returns) -* [For statement without condition should never be cuddled](doc/rules.md#for-statement-without-condition-should-never-be-cuddled) -* [For statements should only be cuddled with assignments used in the iteration](doc/rules.md#for-statements-should-only-be-cuddled-with-assignments-used-in-the-iteration) -* [Go statements can only invoke functions assigned on line above](doc/rules.md#go-statements-can-only-invoke-functions-assigned-on-line-above) -* [If statements should only be cuddled with assignments](doc/rules.md#if-statements-should-only-be-cuddled-with-assignments) -* [If statements should only be cuddled with assignments used in the if statement itself](doc/rules.md#if-statements-should-only-be-cuddled-with-assignments-used-in-the-if-statement-itself) -* [If statements that check an error must be cuddled with the statement that assigned the error](doc/rules.md#if-statements-that-check-an-error-must-be-cuddled-with-the-statement-that-assigned-the-error) -* [Only cuddled expressions if assigning variable or using from line above](doc/rules.md#only-cuddled-expressions-if-assigning-variable-or-using-from-line-above) -* [Only one cuddle assignment allowed before defer statement](doc/rules.md#only-one-cuddle-assignment-allowed-before-defer-statement) -* [Only one cuddle assignment allowed before for statement](doc/rules.md#only-one-cuddle-assignment-allowed-before-for-statement) -* [Only one cuddle assignment allowed before go statement](doc/rules.md#only-one-cuddle-assignment-allowed-before-go-statement) -* [Only one cuddle assignment allowed before if statement](doc/rules.md#only-one-cuddle-assignment-allowed-before-if-statement) -* [Only one cuddle assignment allowed before range statement](doc/rules.md#only-one-cuddle-assignment-allowed-before-range-statement) -* [Only one cuddle assignment allowed before switch statement](doc/rules.md#only-one-cuddle-assignment-allowed-before-switch-statement) -* [Only one cuddle assignment allowed before type switch statement](doc/rules.md#only-one-cuddle-assignment-allowed-before-type-switch-statement) -* [Ranges should only be cuddled with assignments used in the iteration](doc/rules.md#ranges-should-only-be-cuddled-with-assignments-used-in-the-iteration) -* [Return statements should not be cuddled if block has more than two lines](doc/rules.md#return-statements-should-not-be-cuddled-if-block-has-more-than-two-lines) -* [Short declarations should cuddle only with other short declarations](doc/rules.md#short-declaration-should-cuddle-only-with-other-short-declarations) -* [Switch statements should only be cuddled with variables switched](doc/rules.md#switch-statements-should-only-be-cuddled-with-variables-switched) -* [Type switch statements should only be cuddled with variables switched](doc/rules.md#type-switch-statements-should-only-be-cuddled-with-variables-switched) diff --git a/vendor/github.com/bombsimon/wsl/v4/analyzer.go b/vendor/github.com/bombsimon/wsl/v4/analyzer.go deleted file mode 100644 index b8eac1587..000000000 --- a/vendor/github.com/bombsimon/wsl/v4/analyzer.go +++ /dev/null @@ -1,141 +0,0 @@ -package wsl - -import ( - "flag" - "strings" - - "golang.org/x/tools/go/analysis" -) - -func NewAnalyzer(config *Configuration) *analysis.Analyzer { - wa := &wslAnalyzer{config: config} - - return &analysis.Analyzer{ - Name: "wsl", - Doc: "add or remove empty lines", - Flags: wa.flags(), - Run: wa.run, - RunDespiteErrors: true, - } -} - -func defaultConfig() *Configuration { - return &Configuration{ - AllowAssignAndAnythingCuddle: false, - AllowAssignAndCallCuddle: true, - AllowCuddleDeclaration: false, - AllowMultiLineAssignCuddle: true, - AllowSeparatedLeadingComment: false, - AllowTrailingComment: false, - ForceCuddleErrCheckAndAssign: false, - ForceExclusiveShortDeclarations: false, - StrictAppend: true, - AllowCuddleWithCalls: []string{"Lock", "RLock"}, - AllowCuddleWithRHS: []string{"Unlock", "RUnlock"}, - ErrorVariableNames: []string{"err"}, - ForceCaseTrailingWhitespaceLimit: 0, - } -} - -// wslAnalyzer is a wrapper around the configuration which is used to be able to -// set the configuration when creating the analyzer and later be able to update -// flags and running method. -type wslAnalyzer struct { - config *Configuration -} - -func (wa *wslAnalyzer) flags() flag.FlagSet { - flags := flag.NewFlagSet("", flag.ExitOnError) - - // If we have a configuration set we're not running from the command line so - // we don't use any flags. - if wa.config != nil { - return *flags - } - - wa.config = defaultConfig() - - flags.BoolVar(&wa.config.AllowAssignAndAnythingCuddle, "allow-assign-and-anything", false, "Allow assignments and anything to be cuddled") - flags.BoolVar(&wa.config.AllowAssignAndCallCuddle, "allow-assign-and-call", true, "Allow assignments and calls to be cuddled (if using same variable/type)") - flags.BoolVar(&wa.config.AllowCuddleDeclaration, "allow-cuddle-declarations", false, "Allow declarations to be cuddled") - flags.BoolVar(&wa.config.AllowMultiLineAssignCuddle, "allow-multi-line-assign", true, "Allow cuddling with multi line assignments") - flags.BoolVar(&wa.config.AllowSeparatedLeadingComment, "allow-separated-leading-comment", false, "Allow empty newlines in leading comments") - flags.BoolVar(&wa.config.AllowTrailingComment, "allow-trailing-comment", false, "Allow blocks to end with a comment") - flags.BoolVar(&wa.config.ForceCuddleErrCheckAndAssign, "force-err-cuddling", false, "Force cuddling of error checks with error var assignment") - flags.BoolVar(&wa.config.ForceExclusiveShortDeclarations, "force-short-decl-cuddling", false, "Force short declarations to cuddle by themselves") - flags.BoolVar(&wa.config.StrictAppend, "strict-append", true, "Strict rules for append") - flags.IntVar(&wa.config.ForceCaseTrailingWhitespaceLimit, "force-case-trailing-whitespace", 0, "Force newlines for case blocks > this number.") - - flags.Var(&multiStringValue{slicePtr: &wa.config.AllowCuddleWithCalls}, "allow-cuddle-with-calls", "Comma separated list of idents that can have cuddles after") - flags.Var(&multiStringValue{slicePtr: &wa.config.AllowCuddleWithRHS}, "allow-cuddle-with-rhs", "Comma separated list of idents that can have cuddles before") - flags.Var(&multiStringValue{slicePtr: &wa.config.ErrorVariableNames}, "error-variable-names", "Comma separated list of error variable names") - - return *flags -} - -func (wa *wslAnalyzer) run(pass *analysis.Pass) (interface{}, error) { - for _, file := range pass.Files { - filename := pass.Fset.PositionFor(file.Pos(), false).Filename - if !strings.HasSuffix(filename, ".go") { - continue - } - - processor := newProcessorWithConfig(file, pass.Fset, wa.config) - processor.parseAST() - - for pos, fix := range processor.result { - textEdits := []analysis.TextEdit{} - for _, f := range fix.fixRanges { - textEdits = append(textEdits, analysis.TextEdit{ - Pos: f.fixRangeStart, - End: f.fixRangeEnd, - NewText: []byte("\n"), - }) - } - - pass.Report(analysis.Diagnostic{ - Pos: pos, - Category: "whitespace", - Message: fix.reason, - SuggestedFixes: []analysis.SuggestedFix{ - { - TextEdits: textEdits, - }, - }, - }) - } - } - - //nolint:nilnil // A pass don't need to return anything. - return nil, nil -} - -// multiStringValue is a flag that supports multiple values. It's implemented to -// contain a pointer to a string slice that will be overwritten when the flag's -// `Set` method is called. -type multiStringValue struct { - slicePtr *[]string -} - -// Set implements the flag.Value interface and will overwrite the pointer to the -// slice with a new pointer after splitting the flag by comma. -func (m *multiStringValue) Set(value string) error { - s := []string{} - - for _, v := range strings.Split(value, ",") { - s = append(s, strings.TrimSpace(v)) - } - - *m.slicePtr = s - - return nil -} - -// Set implements the flag.Value interface. -func (m *multiStringValue) String() string { - if m.slicePtr == nil { - return "" - } - - return strings.Join(*m.slicePtr, ", ") -} diff --git a/vendor/github.com/bombsimon/wsl/v4/wsl.go b/vendor/github.com/bombsimon/wsl/v4/wsl.go deleted file mode 100644 index 6fd33335a..000000000 --- a/vendor/github.com/bombsimon/wsl/v4/wsl.go +++ /dev/null @@ -1,1411 +0,0 @@ -package wsl - -import ( - "fmt" - "go/ast" - "go/token" - "reflect" - "sort" - "strings" -) - -// Error reason strings. -const ( - reasonAnonSwitchCuddled = "anonymous switch statements should never be cuddled" - reasonAppendCuddledWithoutUse = "append only allowed to cuddle with appended value" - reasonAssignsCuddleAssign = "assignments should only be cuddled with other assignments" - reasonBlockEndsWithWS = "block should not end with a whitespace (or comment)" - reasonBlockStartsWithWS = "block should not start with a whitespace" - reasonCaseBlockTooCuddly = "case block should end with newline at this size" - reasonDeferCuddledWithOtherVar = "defer statements should only be cuddled with expressions on same variable" - reasonExprCuddlingNonAssignedVar = "only cuddled expressions if assigning variable or using from line above" - reasonExpressionCuddledWithBlock = "expressions should not be cuddled with blocks" - reasonExpressionCuddledWithDeclOrRet = "expressions should not be cuddled with declarations or returns" - reasonForCuddledAssignWithoutUse = "for statements should only be cuddled with assignments used in the iteration" - reasonForWithoutCondition = "for statement without condition should never be cuddled" - reasonGoFuncWithoutAssign = "go statements can only invoke functions assigned on line above" - reasonMultiLineBranchCuddle = "branch statements should not be cuddled if block has more than two lines" - reasonMustCuddleErrCheck = "if statements that check an error must be cuddled with the statement that assigned the error" - reasonNeverCuddleDeclare = "declarations should never be cuddled" - reasonOnlyCuddle2LineReturn = "return statements should not be cuddled if block has more than two lines" - reasonOnlyCuddleIfWithAssign = "if statements should only be cuddled with assignments" - reasonOnlyCuddleWithUsedAssign = "if statements should only be cuddled with assignments used in the if statement itself" - reasonOnlyOneCuddleBeforeDefer = "only one cuddle assignment allowed before defer statement" - reasonOnlyOneCuddleBeforeFor = "only one cuddle assignment allowed before for statement" - reasonOnlyOneCuddleBeforeGo = "only one cuddle assignment allowed before go statement" - reasonOnlyOneCuddleBeforeIf = "only one cuddle assignment allowed before if statement" - reasonOnlyOneCuddleBeforeRange = "only one cuddle assignment allowed before range statement" - reasonOnlyOneCuddleBeforeSwitch = "only one cuddle assignment allowed before switch statement" - reasonOnlyOneCuddleBeforeTypeSwitch = "only one cuddle assignment allowed before type switch statement" - reasonRangeCuddledWithoutUse = "ranges should only be cuddled with assignments used in the iteration" - reasonShortDeclNotExclusive = "short declaration should cuddle only with other short declarations" - reasonSwitchCuddledWithoutUse = "switch statements should only be cuddled with variables switched" - reasonTypeSwitchCuddledWithoutUse = "type switch statements should only be cuddled with variables switched" -) - -// Warning strings. -const ( - warnTypeNotImplement = "type not implemented" - warnStmtNotImplemented = "stmt type not implemented" - warnBodyStmtTypeNotImplemented = "body statement type not implemented " - warnWSNodeTypeNotImplemented = "whitespace node type not implemented " - warnUnknownLHS = "UNKNOWN LHS" - warnUnknownRHS = "UNKNOWN RHS" -) - -// Configuration represents configurable settings for the linter. -type Configuration struct { - // StrictAppend will do strict checking when assigning from append (x = - // append(x, y)). If this is set to true the append call must append either - // a variable assigned, called or used on the line above. Example on not - // allowed when this is true: - // - // x := []string{} - // y := "not going in X" - // x = append(x, "not y") // This is not allowed with StrictAppend - // z := "going in X" - // - // x = append(x, z) // This is allowed with StrictAppend - // - // m := transform(z) - // x = append(x, z) // So is this because Z is used above. - StrictAppend bool - - // AllowAssignAndCallCuddle allows assignments to be cuddled with variables - // used in calls on line above and calls to be cuddled with assignments of - // variables used in call on line above. - // Example supported with this set to true: - // - // x.Call() - // x = Assign() - // x.AnotherCall() - // x = AnotherAssign() - AllowAssignAndCallCuddle bool - - // AllowAssignAndAnythingCuddle allows assignments to be cuddled with anything. - // Example supported with this set to true: - // if x == 1 { - // x = 0 - // } - // z := x + 2 - // fmt.Println("x") - // y := "x" - AllowAssignAndAnythingCuddle bool - - // AllowMultiLineAssignCuddle allows cuddling to assignments even if they - // span over multiple lines. This defaults to true which allows the - // following example: - // - // err := function( - // "multiple", "lines", - // ) - // if err != nil { - // // ... - // } - AllowMultiLineAssignCuddle bool - - // If the number of lines in a case block is equal to or lager than this - // number, the case *must* end white a newline. - ForceCaseTrailingWhitespaceLimit int - - // AllowTrailingComment will allow blocks to end with comments. - AllowTrailingComment bool - - // AllowSeparatedLeadingComment will allow multiple comments in the - // beginning of a block separated with newline. Example: - // func () { - // // Comment one - // - // // Comment two - // fmt.Println("x") - // } - AllowSeparatedLeadingComment bool - - // AllowCuddleDeclaration will allow multiple var/declaration statements to - // be cuddled. This defaults to false but setting it to true will enable the - // following example: - // var foo bool - // var err error - AllowCuddleDeclaration bool - - // AllowCuddleWithCalls is a list of call idents that everything can be - // cuddled with. Defaults to calls looking like locks to support a flow like - // this: - // - // mu.Lock() - // allow := thisAssignment - AllowCuddleWithCalls []string - - // AllowCuddleWithRHS is a list of right hand side variables that is allowed - // to be cuddled with anything. Defaults to assignments or calls looking - // like unlocks to support a flow like this: - // - // allow := thisAssignment() - // mu.Unlock() - AllowCuddleWithRHS []string - - // ForceCuddleErrCheckAndAssign will cause an error when an If statement that - // checks an error variable doesn't cuddle with the assignment of that variable. - // This defaults to false but setting it to true will cause the following - // to generate an error: - // - // err := ProduceError() - // - // if err != nil { - // return err - // } - ForceCuddleErrCheckAndAssign bool - - // When ForceCuddleErrCheckAndAssign is enabled this is a list of names - // used for error variables to check for in the conditional. - // Defaults to just "err" - ErrorVariableNames []string - - // ForceExclusiveShortDeclarations will cause an error if a short declaration - // (:=) cuddles with anything other than another short declaration. For example - // - // a := 2 - // b := 3 - // - // is allowed, but - // - // a := 2 - // b = 3 - // - // is not allowed. This logic overrides ForceCuddleErrCheckAndAssign among others. - ForceExclusiveShortDeclarations bool -} - -// fix is a range to fixup. -type fix struct { - fixRangeStart token.Pos - fixRangeEnd token.Pos -} - -// result represents the result of one error. -type result struct { - fixRanges []fix - reason string -} - -// processor is the type that keeps track of the file and fileset and holds the -// results from parsing the AST. -type processor struct { - config *Configuration - file *ast.File - fileSet *token.FileSet - result map[token.Pos]result - warnings []string -} - -// newProcessorWithConfig will create a Processor with the passed configuration. -func newProcessorWithConfig(file *ast.File, fileSet *token.FileSet, cfg *Configuration) *processor { - return &processor{ - config: cfg, - file: file, - fileSet: fileSet, - result: make(map[token.Pos]result), - } -} - -// parseAST will parse the AST attached to the Processor instance. -func (p *processor) parseAST() { - for _, d := range p.file.Decls { - switch v := d.(type) { - case *ast.FuncDecl: - p.parseBlockBody(v.Name, v.Body) - case *ast.GenDecl: - // `go fmt` will handle proper spacing for GenDecl such as imports, - // constants etc. - default: - p.addWarning(warnTypeNotImplement, d.Pos(), v) - } - } -} - -// parseBlockBody will parse any kind of block statements such as switch cases -// and if statements. A list of Result is returned. -func (p *processor) parseBlockBody(ident *ast.Ident, block *ast.BlockStmt) { - // Nothing to do if there's no value. - if reflect.ValueOf(block).IsNil() { - return - } - - // Start by finding leading and trailing whitespaces. - p.findLeadingAndTrailingWhitespaces(ident, block, nil) - - // Parse the block body contents. - p.parseBlockStatements(block.List) -} - -// parseBlockStatements will parse all the statements found in the body of a -// node. A list of Result is returned. -func (p *processor) parseBlockStatements(statements []ast.Stmt) { - for i, stmt := range statements { - // Start by checking if this statement is another block (other than if, - // for and range). This could be assignment to a function, defer or go - // call with an inline function or similar. If this is found we start by - // parsing this body block before moving on. - for _, stmtBlocks := range p.findBlockStmt(stmt) { - p.parseBlockBody(nil, stmtBlocks) - } - - firstBodyStatement := p.firstBodyStatement(i, statements) - - // First statement, nothing to do. - if i == 0 { - continue - } - - previousStatement := statements[i-1] - previousStatementIsMultiline := p.nodeStart(previousStatement) != p.nodeEnd(previousStatement) - cuddledWithLastStmt := p.nodeEnd(previousStatement) == p.nodeStart(stmt)-1 - - // If we're not cuddled and we don't need to enforce err-check cuddling - // then we can bail out here - if !cuddledWithLastStmt && !p.config.ForceCuddleErrCheckAndAssign { - continue - } - - // We don't force error cuddling for multilines. (#86) - if p.config.ForceCuddleErrCheckAndAssign && previousStatementIsMultiline && !cuddledWithLastStmt { - continue - } - - // Extract assigned variables on the line above - // which is the only thing we allow cuddling with. If the assignment is - // made over multiple lines we should not allow cuddling. - var assignedOnLineAbove []string - - // We want to keep track of what was called on the line above to support - // special handling of things such as mutexes. - var calledOnLineAbove []string - - // Check if the previous statement spans over multiple lines. - cuddledWithMultiLineAssignment := cuddledWithLastStmt && p.nodeStart(previousStatement) != p.nodeStart(stmt)-1 - - // Ensure previous line is not a multi line assignment and if not get - // rightAndLeftHandSide assigned variables. - if !cuddledWithMultiLineAssignment { - assignedOnLineAbove = p.findLHS(previousStatement) - calledOnLineAbove = p.findRHS(previousStatement) - } - - // If previous assignment is multi line and we allow it, fetch - // assignments (but only assignments). - if cuddledWithMultiLineAssignment && p.config.AllowMultiLineAssignCuddle { - if _, ok := previousStatement.(*ast.AssignStmt); ok { - assignedOnLineAbove = p.findLHS(previousStatement) - } - } - - // We could potentially have a block which require us to check the first - // argument before ruling out an allowed cuddle. - var calledOrAssignedFirstInBlock []string - - if firstBodyStatement != nil { - calledOrAssignedFirstInBlock = append(p.findLHS(firstBodyStatement), p.findRHS(firstBodyStatement)...) - } - - var ( - leftHandSide = p.findLHS(stmt) - rightHandSide = p.findRHS(stmt) - rightAndLeftHandSide = append(leftHandSide, rightHandSide...) - calledOrAssignedOnLineAbove = append(calledOnLineAbove, assignedOnLineAbove...) - ) - - // If we called some kind of lock on the line above we allow cuddling - // anything. - if atLeastOneInListsMatch(calledOnLineAbove, p.config.AllowCuddleWithCalls) { - continue - } - - // If we call some kind of unlock on this line we allow cuddling with - // anything. - if atLeastOneInListsMatch(rightHandSide, p.config.AllowCuddleWithRHS) { - continue - } - - nStatementsBefore := func(n int) bool { - if i < n { - return false - } - - for j := 1; j < n; j++ { - s1 := statements[i-j] - s2 := statements[i-(j+1)] - - if p.nodeStart(s1)-1 != p.nodeEnd(s2) { - return false - } - } - - return true - } - - nStatementsAfter := func(n int) bool { - if len(statements)-1 < i+n { - return false - } - - for j := 0; j < n; j++ { - s1 := statements[i+j] - s2 := statements[i+j+1] - - if p.nodeEnd(s1)+1 != p.nodeStart(s2) { - return false - } - } - - return true - } - - isLastStatementInBlockOfOnlyTwoLines := func() bool { - // If we're the last statement, check if there's no more than two - // lines from the starting statement and the end of this statement. - // This is to support short return functions such as: - // func (t *Typ) X() { - // t.X = true - // return t - // } - if len(statements) == 2 && i == 1 { - if p.nodeEnd(stmt)-p.nodeStart(previousStatement) <= 2 { - return true - } - } - - return false - } - - // If it's a short declaration we should not cuddle with anything else - // if ForceExclusiveShortDeclarations is set on; either this or the - // previous statement could be the short decl, so we'll find out which - // it was and use *that* statement's position - if p.config.ForceExclusiveShortDeclarations && cuddledWithLastStmt { - if p.isShortDecl(stmt) && !p.isShortDecl(previousStatement) { - var reportNode ast.Node = previousStatement - - cm := ast.NewCommentMap(p.fileSet, stmt, p.file.Comments) - if cg, ok := cm[stmt]; ok && len(cg) > 0 { - for _, c := range cg { - if c.Pos() > previousStatement.End() && c.End() < stmt.Pos() { - reportNode = c - } - } - } - - p.addErrorRange( - stmt.Pos(), - reportNode.End(), - reportNode.End(), - reasonShortDeclNotExclusive, - ) - } else if p.isShortDecl(previousStatement) && !p.isShortDecl(stmt) { - p.addErrorRange( - previousStatement.Pos(), - stmt.Pos(), - stmt.Pos(), - reasonShortDeclNotExclusive, - ) - } - } - - // If it's not an if statement and we're not cuddled move on. The only - // reason we need to keep going for if statements is to check if we - // should be cuddled with an error check. - if _, ok := stmt.(*ast.IfStmt); !ok { - if !cuddledWithLastStmt { - continue - } - } - - reportNewlineTwoLinesAbove := func(n1, n2 ast.Node, reason string) { - if atLeastOneInListsMatch(rightAndLeftHandSide, assignedOnLineAbove) || - atLeastOneInListsMatch(assignedOnLineAbove, calledOrAssignedFirstInBlock) { - // If both the assignment on the line above _and_ the assignment - // two lines above is part of line or first in block, add the - // newline as if non were. - _, isAssignmentTwoLinesAbove := statements[i-2].(*ast.AssignStmt) - assignedTwoLinesAbove := p.findLHS(statements[i-2]) - - if isAssignmentTwoLinesAbove && - (atLeastOneInListsMatch(rightAndLeftHandSide, assignedTwoLinesAbove) || - atLeastOneInListsMatch(assignedTwoLinesAbove, calledOrAssignedFirstInBlock)) { - p.addWhitespaceBeforeError(n1, reason) - } else { - // If the variable on the line above is allowed to be - // cuddled, break two lines above so we keep the proper - // cuddling. - p.addErrorRange(n1.Pos(), n2.Pos(), n2.Pos(), reason) - } - } else { - // If not, break here so we separate the cuddled variable. - p.addWhitespaceBeforeError(n1, reason) - } - } - - switch t := stmt.(type) { - case *ast.IfStmt: - checkingErrInitializedInline := func() bool { - if t.Init == nil { - return false - } - - // Variables were initialized inline in the if statement - // Let's make sure it's the err just to be safe - return atLeastOneInListsMatch(p.findLHS(t.Init), p.config.ErrorVariableNames) - } - - if !cuddledWithLastStmt { - checkingErr := atLeastOneInListsMatch(rightAndLeftHandSide, p.config.ErrorVariableNames) - if checkingErr { - // We only want to enforce cuddling error checks if the - // error was assigned on the line above. See - // https://github.com/bombsimon/wsl/issues/78. - // This is needed since `assignedOnLineAbove` is not - // actually just assignments but everything from LHS in the - // previous statement. This means that if previous line was - // `if err ...`, `err` will now be in the list - // `assignedOnLineAbove`. - if _, ok := previousStatement.(*ast.AssignStmt); !ok { - continue - } - - if checkingErrInitializedInline() { - continue - } - - if atLeastOneInListsMatch(assignedOnLineAbove, p.config.ErrorVariableNames) { - p.addErrorRange( - stmt.Pos(), - previousStatement.End(), - stmt.Pos(), - reasonMustCuddleErrCheck, - ) - } - } - - continue - } - - if len(assignedOnLineAbove) == 0 { - p.addWhitespaceBeforeError(t, reasonOnlyCuddleIfWithAssign) - continue - } - - if nStatementsBefore(2) { - reportNewlineTwoLinesAbove(t, statements[i-1], reasonOnlyOneCuddleBeforeIf) - continue - } - - if atLeastOneInListsMatch(rightAndLeftHandSide, assignedOnLineAbove) { - continue - } - - if atLeastOneInListsMatch(assignedOnLineAbove, calledOrAssignedFirstInBlock) { - continue - } - - p.addWhitespaceBeforeError(t, reasonOnlyCuddleWithUsedAssign) - case *ast.ReturnStmt: - if isLastStatementInBlockOfOnlyTwoLines() { - continue - } - - p.addWhitespaceBeforeError(t, reasonOnlyCuddle2LineReturn) - case *ast.BranchStmt: - if isLastStatementInBlockOfOnlyTwoLines() { - continue - } - - p.addWhitespaceBeforeError(t, reasonMultiLineBranchCuddle) - case *ast.AssignStmt: - // append is usually an assignment but should not be allowed to be - // cuddled with anything not appended. - if len(rightHandSide) > 0 && rightHandSide[len(rightHandSide)-1] == "append" { - if p.config.StrictAppend { - if !atLeastOneInListsMatch(calledOrAssignedOnLineAbove, rightHandSide) { - p.addWhitespaceBeforeError(t, reasonAppendCuddledWithoutUse) - } - } - - continue - } - - switch previousStatement.(type) { - case *ast.AssignStmt, *ast.IncDecStmt: - continue - } - - if p.config.AllowAssignAndAnythingCuddle { - continue - } - - if _, ok := previousStatement.(*ast.DeclStmt); ok && p.config.AllowCuddleDeclaration { - continue - } - - // If the assignment is from a type or variable called on the line - // above we can allow it by setting AllowAssignAndCallCuddle to - // true. - // Example (x is used): - // x.function() - // a.Field = x.anotherFunction() - if p.config.AllowAssignAndCallCuddle { - if atLeastOneInListsMatch(calledOrAssignedOnLineAbove, rightAndLeftHandSide) { - continue - } - } - - p.addWhitespaceBeforeError(t, reasonAssignsCuddleAssign) - case *ast.IncDecStmt: - switch previousStatement.(type) { - case *ast.AssignStmt, *ast.IncDecStmt: - continue - } - - p.addWhitespaceBeforeError(t, reasonAssignsCuddleAssign) - - case *ast.DeclStmt: - if !p.config.AllowCuddleDeclaration { - p.addWhitespaceBeforeError(t, reasonNeverCuddleDeclare) - } - case *ast.ExprStmt: - switch previousStatement.(type) { - case *ast.DeclStmt, *ast.ReturnStmt: - if p.config.AllowAssignAndCallCuddle && p.config.AllowCuddleDeclaration { - continue - } - - p.addWhitespaceBeforeError(t, reasonExpressionCuddledWithDeclOrRet) - case *ast.IfStmt, *ast.RangeStmt, *ast.SwitchStmt: - p.addWhitespaceBeforeError(t, reasonExpressionCuddledWithBlock) - } - - // If the expression is called on a type or variable used or - // assigned on the line we can allow it by setting - // AllowAssignAndCallCuddle to true. - // Example of allowed cuddled (x is used): - // a.Field = x.func() - // x.function() - if p.config.AllowAssignAndCallCuddle { - if atLeastOneInListsMatch(calledOrAssignedOnLineAbove, rightAndLeftHandSide) { - continue - } - } - - // If we assigned variables on the line above but didn't use them in - // this expression there should probably be a newline between them. - if len(assignedOnLineAbove) > 0 && !atLeastOneInListsMatch(rightAndLeftHandSide, assignedOnLineAbove) { - p.addWhitespaceBeforeError(t, reasonExprCuddlingNonAssignedVar) - } - case *ast.RangeStmt: - if nStatementsBefore(2) { - reportNewlineTwoLinesAbove(t, statements[i-1], reasonOnlyOneCuddleBeforeRange) - continue - } - - if !atLeastOneInListsMatch(rightAndLeftHandSide, assignedOnLineAbove) { - if !atLeastOneInListsMatch(assignedOnLineAbove, calledOrAssignedFirstInBlock) { - p.addWhitespaceBeforeError(t, reasonRangeCuddledWithoutUse) - } - } - case *ast.DeferStmt: - if _, ok := previousStatement.(*ast.DeferStmt); ok { - // We may cuddle multiple defers to group logic. - continue - } - - if nStatementsBefore(2) { - // We allow cuddling defer if the defer references something - // used two lines above. - // There are several reasons to why we do this. - // Originally there was a special use case only for "Close" - // - // https://github.com/bombsimon/wsl/issues/31 which links to - // resp, err := client.Do(req) - // if err != nil { - // return err - // } - // defer resp.Body.Close() - // - // After a discussion in a followup issue it makes sense to not - // only hard code `Close` but for anything that's referenced two - // statements above. - // - // https://github.com/bombsimon/wsl/issues/85 - // db, err := OpenDB() - // require.NoError(t, err) - // defer db.Close() - // - // All of this is only allowed if there's exactly three cuddled - // statements, otherwise the regular rules apply. - if !nStatementsBefore(3) && !nStatementsAfter(1) { - variablesTwoLinesAbove := append(p.findLHS(statements[i-2]), p.findRHS(statements[i-2])...) - if atLeastOneInListsMatch(rightHandSide, variablesTwoLinesAbove) { - continue - } - } - - reportNewlineTwoLinesAbove(t, statements[i-1], reasonOnlyOneCuddleBeforeDefer) - - continue - } - - // Be extra nice with RHS, it's common to use this for locks: - // m.Lock() - // defer m.Unlock() - previousRHS := p.findRHS(previousStatement) - if atLeastOneInListsMatch(rightHandSide, previousRHS) { - continue - } - - // Allow use to cuddled defer func literals with usages on line - // above. Example: - // b := getB() - // defer func() { - // makesSenseToUse(b) - // }() - if c, ok := t.Call.Fun.(*ast.FuncLit); ok { - funcLitFirstStmt := append(p.findLHS(c.Body), p.findRHS(c.Body)...) - - if atLeastOneInListsMatch(assignedOnLineAbove, funcLitFirstStmt) { - continue - } - } - - if atLeastOneInListsMatch(assignedOnLineAbove, calledOrAssignedFirstInBlock) { - continue - } - - if !atLeastOneInListsMatch(rightAndLeftHandSide, assignedOnLineAbove) { - p.addWhitespaceBeforeError(t, reasonDeferCuddledWithOtherVar) - } - case *ast.ForStmt: - if len(rightAndLeftHandSide) == 0 { - p.addWhitespaceBeforeError(t, reasonForWithoutCondition) - continue - } - - if nStatementsBefore(2) { - reportNewlineTwoLinesAbove(t, statements[i-1], reasonOnlyOneCuddleBeforeFor) - continue - } - - // The same rule applies for ranges as for if statements, see - // comments regarding variable usages on the line before or as the - // first line in the block for details. - if !atLeastOneInListsMatch(rightAndLeftHandSide, assignedOnLineAbove) { - if !atLeastOneInListsMatch(assignedOnLineAbove, calledOrAssignedFirstInBlock) { - p.addWhitespaceBeforeError(t, reasonForCuddledAssignWithoutUse) - } - } - case *ast.GoStmt: - if _, ok := previousStatement.(*ast.GoStmt); ok { - continue - } - - if nStatementsBefore(2) { - reportNewlineTwoLinesAbove(t, statements[i-1], reasonOnlyOneCuddleBeforeGo) - continue - } - - if c, ok := t.Call.Fun.(*ast.SelectorExpr); ok { - goCallArgs := append(p.findLHS(c.X), p.findRHS(c.X)...) - - if atLeastOneInListsMatch(calledOnLineAbove, goCallArgs) { - continue - } - } - - if c, ok := t.Call.Fun.(*ast.FuncLit); ok { - goCallArgs := append(p.findLHS(c.Body), p.findRHS(c.Body)...) - - if atLeastOneInListsMatch(assignedOnLineAbove, goCallArgs) { - continue - } - } - - if !atLeastOneInListsMatch(rightAndLeftHandSide, assignedOnLineAbove) { - p.addWhitespaceBeforeError(t, reasonGoFuncWithoutAssign) - } - case *ast.SwitchStmt: - if nStatementsBefore(2) { - reportNewlineTwoLinesAbove(t, statements[i-1], reasonOnlyOneCuddleBeforeSwitch) - continue - } - - if !atLeastOneInListsMatch(rightAndLeftHandSide, assignedOnLineAbove) { - if len(rightAndLeftHandSide) == 0 { - p.addWhitespaceBeforeError(t, reasonAnonSwitchCuddled) - } else { - p.addWhitespaceBeforeError(t, reasonSwitchCuddledWithoutUse) - } - } - case *ast.TypeSwitchStmt: - if nStatementsBefore(2) { - reportNewlineTwoLinesAbove(t, statements[i-1], reasonOnlyOneCuddleBeforeTypeSwitch) - continue - } - - // Allowed to type assert on variable assigned on line above. - if !atLeastOneInListsMatch(rightHandSide, assignedOnLineAbove) { - // Allow type assertion on variables used in the first case - // immediately. - if !atLeastOneInListsMatch(assignedOnLineAbove, calledOrAssignedFirstInBlock) { - p.addWhitespaceBeforeError(t, reasonTypeSwitchCuddledWithoutUse) - } - } - case *ast.CaseClause, *ast.CommClause: - // Case clauses will be checked by not allowing leading of trailing - // whitespaces within the block. There's nothing in the case itself - // that may be cuddled. - default: - p.addWarning(warnStmtNotImplemented, t.Pos(), t) - } - } -} - -// firstBodyStatement returns the first statement inside a body block. This is -// because variables may be cuddled with conditions or statements if it's used -// directly as the first argument inside a body. -// The body will then be parsed as a *ast.BlockStmt (regular block) or as a list -// of []ast.Stmt (case block). -func (p *processor) firstBodyStatement(i int, allStmt []ast.Stmt) ast.Node { - stmt := allStmt[i] - - // Start by checking if the statement has a body (probably if-statement, - // a range, switch case or similar. Whenever a body is found we start by - // parsing it before moving on in the AST. - statementBody := reflect.Indirect(reflect.ValueOf(stmt)).FieldByName("Body") - - // Some cases allow cuddling depending on the first statement in a body - // of a block or case. If possible extract the first statement. - var firstBodyStatement ast.Node - - if !statementBody.IsValid() { - return firstBodyStatement - } - - switch statementBodyContent := statementBody.Interface().(type) { - case *ast.BlockStmt: - if len(statementBodyContent.List) > 0 { - firstBodyStatement = statementBodyContent.List[0] - - // If the first body statement is a *ast.CaseClause we're - // actually interested in the **next** body to know what's - // inside the first case. - if x, ok := firstBodyStatement.(*ast.CaseClause); ok { - if len(x.Body) > 0 { - firstBodyStatement = x.Body[0] - } - } - } - - // If statement bodies will be parsed already when finding block bodies. - // The reason is because if/else-if/else chains is nested in the AST - // where the else bit is a part of the if statement. Since if statements - // is the only statement that can be chained like this we exclude it - // from parsing it again here. - if _, ok := stmt.(*ast.IfStmt); !ok { - p.parseBlockBody(nil, statementBodyContent) - } - case []ast.Stmt: - // The Body field for an *ast.CaseClause or *ast.CommClause is of type - // []ast.Stmt. We must check leading and trailing whitespaces and then - // pass the statements to parseBlockStatements to parse it's content. - var nextStatement ast.Node - - // Check if there's more statements (potential cases) after the - // current one. - if len(allStmt)-1 > i { - nextStatement = allStmt[i+1] - } - - p.findLeadingAndTrailingWhitespaces(nil, stmt, nextStatement) - p.parseBlockStatements(statementBodyContent) - default: - p.addWarning( - warnBodyStmtTypeNotImplemented, - stmt.Pos(), statementBodyContent, - ) - } - - return firstBodyStatement -} - -func (p *processor) findLHS(node ast.Node) []string { - var lhs []string - - if node == nil { - return lhs - } - - switch t := node.(type) { - case *ast.BasicLit, *ast.FuncLit, *ast.SelectStmt, - *ast.LabeledStmt, *ast.ForStmt, *ast.SwitchStmt, - *ast.ReturnStmt, *ast.GoStmt, *ast.CaseClause, - *ast.CommClause, *ast.CallExpr, *ast.UnaryExpr, - *ast.BranchStmt, *ast.TypeSpec, *ast.ChanType, - *ast.DeferStmt, *ast.TypeAssertExpr, *ast.RangeStmt: - // Nothing to add to LHS - case *ast.IncDecStmt: - return p.findLHS(t.X) - case *ast.Ident: - return []string{t.Name} - case *ast.AssignStmt: - for _, v := range t.Lhs { - lhs = append(lhs, p.findLHS(v)...) - } - case *ast.GenDecl: - for _, v := range t.Specs { - lhs = append(lhs, p.findLHS(v)...) - } - case *ast.ValueSpec: - for _, v := range t.Names { - lhs = append(lhs, p.findLHS(v)...) - } - case *ast.BlockStmt: - for _, v := range t.List { - lhs = append(lhs, p.findLHS(v)...) - } - case *ast.BinaryExpr: - return append( - p.findLHS(t.X), - p.findLHS(t.Y)..., - ) - case *ast.DeclStmt: - return p.findLHS(t.Decl) - case *ast.IfStmt: - return p.findLHS(t.Cond) - case *ast.TypeSwitchStmt: - return p.findLHS(t.Assign) - case *ast.SendStmt: - return p.findLHS(t.Chan) - default: - if x, ok := maybeX(t); ok { - return p.findLHS(x) - } - - p.addWarning(warnUnknownLHS, t.Pos(), t) - } - - return lhs -} - -func (p *processor) findRHS(node ast.Node) []string { - var rhs []string - - if node == nil { - return rhs - } - - switch t := node.(type) { - case *ast.BasicLit, *ast.SelectStmt, *ast.ChanType, - *ast.LabeledStmt, *ast.DeclStmt, *ast.BranchStmt, - *ast.TypeSpec, *ast.ArrayType, *ast.CaseClause, - *ast.CommClause, *ast.MapType, *ast.FuncLit: - // Nothing to add to RHS - case *ast.Ident: - return []string{t.Name} - case *ast.SelectorExpr: - // TODO: Should this be RHS? - // t.X is needed for defer as of now and t.Sel needed for special - // functions such as Lock() - rhs = p.findRHS(t.X) - rhs = append(rhs, p.findRHS(t.Sel)...) - case *ast.AssignStmt: - for _, v := range t.Rhs { - rhs = append(rhs, p.findRHS(v)...) - } - case *ast.CallExpr: - for _, v := range t.Args { - rhs = append(rhs, p.findRHS(v)...) - } - - rhs = append(rhs, p.findRHS(t.Fun)...) - case *ast.CompositeLit: - for _, v := range t.Elts { - rhs = append(rhs, p.findRHS(v)...) - } - case *ast.IfStmt: - rhs = append(rhs, p.findRHS(t.Cond)...) - rhs = append(rhs, p.findRHS(t.Init)...) - case *ast.BinaryExpr: - return append( - p.findRHS(t.X), - p.findRHS(t.Y)..., - ) - case *ast.TypeSwitchStmt: - return p.findRHS(t.Assign) - case *ast.ReturnStmt: - for _, v := range t.Results { - rhs = append(rhs, p.findRHS(v)...) - } - case *ast.BlockStmt: - for _, v := range t.List { - rhs = append(rhs, p.findRHS(v)...) - } - case *ast.SwitchStmt: - return p.findRHS(t.Tag) - case *ast.GoStmt: - return p.findRHS(t.Call) - case *ast.ForStmt: - return p.findRHS(t.Cond) - case *ast.DeferStmt: - return p.findRHS(t.Call) - case *ast.SendStmt: - return p.findLHS(t.Value) - case *ast.IndexExpr: - rhs = append(rhs, p.findRHS(t.Index)...) - rhs = append(rhs, p.findRHS(t.X)...) - case *ast.SliceExpr: - rhs = append(rhs, p.findRHS(t.X)...) - rhs = append(rhs, p.findRHS(t.Low)...) - rhs = append(rhs, p.findRHS(t.High)...) - case *ast.KeyValueExpr: - rhs = p.findRHS(t.Key) - rhs = append(rhs, p.findRHS(t.Value)...) - default: - if x, ok := maybeX(t); ok { - return p.findRHS(x) - } - - p.addWarning(warnUnknownRHS, t.Pos(), t) - } - - return rhs -} - -func (p *processor) isShortDecl(node ast.Node) bool { - if t, ok := node.(*ast.AssignStmt); ok { - return t.Tok == token.DEFINE - } - - return false -} - -func (p *processor) findBlockStmt(node ast.Node) []*ast.BlockStmt { - var blocks []*ast.BlockStmt - - switch t := node.(type) { - case *ast.BlockStmt: - return []*ast.BlockStmt{t} - case *ast.AssignStmt: - for _, x := range t.Rhs { - blocks = append(blocks, p.findBlockStmt(x)...) - } - case *ast.CallExpr: - blocks = append(blocks, p.findBlockStmt(t.Fun)...) - - for _, x := range t.Args { - blocks = append(blocks, p.findBlockStmt(x)...) - } - case *ast.FuncLit: - blocks = append(blocks, t.Body) - case *ast.ExprStmt: - blocks = append(blocks, p.findBlockStmt(t.X)...) - case *ast.ReturnStmt: - for _, x := range t.Results { - blocks = append(blocks, p.findBlockStmt(x)...) - } - case *ast.DeferStmt: - blocks = append(blocks, p.findBlockStmt(t.Call)...) - case *ast.GoStmt: - blocks = append(blocks, p.findBlockStmt(t.Call)...) - case *ast.IfStmt: - blocks = append([]*ast.BlockStmt{t.Body}, p.findBlockStmt(t.Else)...) - } - - return blocks -} - -// maybeX extracts the X field from an AST node and returns it with a true value -// if it exists. If the node doesn't have an X field nil and false is returned. -// Known fields with X that are handled: -// IndexExpr, ExprStmt, SelectorExpr, StarExpr, ParentExpr, TypeAssertExpr, -// RangeStmt, UnaryExpr, ParenExpr, SliceExpr, IncDecStmt. -func maybeX(node interface{}) (ast.Node, bool) { - maybeHasX := reflect.Indirect(reflect.ValueOf(node)).FieldByName("X") - if !maybeHasX.IsValid() { - return nil, false - } - - n, ok := maybeHasX.Interface().(ast.Node) - if !ok { - return nil, false - } - - return n, true -} - -func atLeastOneInListsMatch(listOne, listTwo []string) bool { - sliceToMap := func(s []string) map[string]struct{} { - m := map[string]struct{}{} - - for _, v := range s { - m[v] = struct{}{} - } - - return m - } - - m1 := sliceToMap(listOne) - m2 := sliceToMap(listTwo) - - for k1 := range m1 { - if _, ok := m2[k1]; ok { - return true - } - } - - for k2 := range m2 { - if _, ok := m1[k2]; ok { - return true - } - } - - return false -} - -// findLeadingAndTrailingWhitespaces will find leading and trailing whitespaces -// in a node. The method takes comments in consideration which will make the -// parser more gentle. -func (p *processor) findLeadingAndTrailingWhitespaces(ident *ast.Ident, stmt, nextStatement ast.Node) { - var ( - commentMap = ast.NewCommentMap(p.fileSet, stmt, p.file.Comments) - blockStatements []ast.Stmt - blockStartLine int - blockEndLine int - blockStartPos token.Pos - blockEndPos token.Pos - isCase bool - ) - - // Depending on the block type, get the statements in the block and where - // the block starts (and ends). - switch t := stmt.(type) { - case *ast.BlockStmt: - blockStatements = t.List - blockStartPos = t.Lbrace - blockEndPos = t.Rbrace - case *ast.CaseClause: - blockStatements = t.Body - blockStartPos = t.Colon - isCase = true - case *ast.CommClause: - blockStatements = t.Body - blockStartPos = t.Colon - isCase = true - default: - p.addWarning(warnWSNodeTypeNotImplemented, stmt.Pos(), stmt) - - return - } - - // Ignore empty blocks even if they have newlines or just comments. - if len(blockStatements) < 1 { - return - } - - blockStartLine = p.fileSet.PositionFor(blockStartPos, false).Line - blockEndLine = p.fileSet.PositionFor(blockEndPos, false).Line - - // No whitespace possible if LBrace and RBrace is on the same line. - if blockStartLine == blockEndLine { - return - } - - var ( - firstStatement = blockStatements[0] - lastStatement = blockStatements[len(blockStatements)-1] - ) - - // Get the comment related to the first statement, we do allow comments in - // the beginning of a block before the first statement. - var ( - openingNodePos = blockStartPos + 1 - lastLeadingComment ast.Node - ) - - var ( - firstStatementCommentGroups []*ast.CommentGroup - lastStatementCommentGroups []*ast.CommentGroup - ) - - if cg, ok := commentMap[firstStatement]; ok && !isCase { - firstStatementCommentGroups = cg - } else { - // TODO: Just like with trailing whitespaces comments in a case block is - // tied to the last token of the first statement. For now we iterate over - // all comments in the stmt and grab those that's after colon and before - // first statement. - for _, cg := range commentMap { - if len(cg) < 1 { - continue - } - - // If we have comments and the last comment ends before the first - // statement and the node is after the colon, this must be the node - // mapped to comments. - for _, c := range cg { - if c.End() < firstStatement.Pos() && c.Pos() > blockStartPos { - firstStatementCommentGroups = append(firstStatementCommentGroups, c) - } - } - - // And same if we have comments where the first comment is after the - // last statement but before the next statement (next case). As with - // the other things, if there is not next statement it's no next - // case and the logic will be handled when parsing the block. - if nextStatement == nil { - continue - } - - for _, c := range cg { - if c.Pos() > lastStatement.End() && c.End() < nextStatement.Pos() { - lastStatementCommentGroups = append(lastStatementCommentGroups, c) - } - } - } - - // Since the comments come from a map they might not be ordered meaning - // that the last and first comment groups can be in the wrong order. We - // fix this by sorting all comments by pos after adding them all to the - // slice. - sort.Slice(firstStatementCommentGroups, func(i, j int) bool { - return firstStatementCommentGroups[i].Pos() < firstStatementCommentGroups[j].Pos() - }) - - sort.Slice(lastStatementCommentGroups, func(i, j int) bool { - return lastStatementCommentGroups[i].Pos() < lastStatementCommentGroups[j].Pos() - }) - } - - for _, commentGroup := range firstStatementCommentGroups { - // If the comment group is on the same line as the block start - // (LBrace) we should not consider it. - if p.nodeEnd(commentGroup) == blockStartLine { - openingNodePos = commentGroup.End() - continue - } - - // We only care about comments before our statement from the comment - // map. As soon as we hit comments after our statement let's break - // out! - if commentGroup.Pos() > firstStatement.Pos() { - break - } - - // We never allow leading whitespace for the first comment. - if lastLeadingComment == nil && p.nodeStart(commentGroup)-1 != blockStartLine { - p.addErrorRange( - openingNodePos, - openingNodePos, - commentGroup.Pos(), - reasonBlockStartsWithWS, - ) - } - - // If lastLeadingComment is set this is not the first comment so we - // should remove whitespace between them if we don't explicitly - // allow it. - if lastLeadingComment != nil && !p.config.AllowSeparatedLeadingComment { - if p.nodeStart(commentGroup)+1 != p.nodeEnd(lastLeadingComment) { - p.addErrorRange( - openingNodePos, - lastLeadingComment.End(), - commentGroup.Pos(), - reasonBlockStartsWithWS, - ) - } - } - - lastLeadingComment = commentGroup - } - - lastNodePos := openingNodePos - if lastLeadingComment != nil { - lastNodePos = lastLeadingComment.End() - blockStartLine = p.nodeEnd(lastLeadingComment) - } - - // Check if we have a whitespace between the last node which can be the - // Lbrace, a comment on the same line or the last comment if we have - // comments inside the actual block and the first statement. This is never - // allowed. - if p.nodeStart(firstStatement)-1 != blockStartLine { - p.addErrorRange( - openingNodePos, - lastNodePos, - firstStatement.Pos(), - reasonBlockStartsWithWS, - ) - } - - // If the blockEndLine is not 0 we're a regular block (not case). - if blockEndLine != 0 { - // We don't want to reject example functions since they have to end with - // a comment. - if isExampleFunc(ident) { - return - } - - var ( - lastNode ast.Node = lastStatement - trailingComments []ast.Node - ) - - // Check if we have an comments _after_ the last statement and update - // the last node if so. - if c, ok := commentMap[lastStatement]; ok { - lastComment := c[len(c)-1] - if lastComment.Pos() > lastStatement.End() && lastComment.Pos() < stmt.End() { - lastNode = lastComment - } - } - - // TODO: This should be improved. - // The trailing comments are mapped to the last statement item which can - // be anything depending on what the last statement is. - // In `fmt.Println("hello")`, trailing comments will be mapped to - // `*ast.BasicLit` for the "hello" string. - // A short term improvement can be to cache this but for now we naively - // iterate over all items when we check a block. - for _, commentGroups := range commentMap { - for _, commentGroup := range commentGroups { - if commentGroup.Pos() < lastNode.End() || commentGroup.End() > stmt.End() { - continue - } - - trailingComments = append(trailingComments, commentGroup) - } - } - - // TODO: Should this be relaxed? - // Given the old code we only allowed trailing newline if it was - // directly tied to the last statement so for backwards compatibility - // we'll do the same. This means we fail all but the last whitespace - // even when allowing trailing comments. - for _, comment := range trailingComments { - if p.nodeStart(comment)-p.nodeEnd(lastNode) > 1 { - p.addErrorRange( - blockEndPos, - lastNode.End(), - comment.Pos(), - reasonBlockEndsWithWS, - ) - } - - lastNode = comment - } - - if !p.config.AllowTrailingComment && p.nodeEnd(stmt)-1 != p.nodeEnd(lastStatement) { - p.addErrorRange( - blockEndPos, - lastNode.End(), - stmt.End()-1, - reasonBlockEndsWithWS, - ) - } - - return - } - - // Nothing to do if we're not looking for enforced newline. - if p.config.ForceCaseTrailingWhitespaceLimit == 0 { - return - } - - // If we don't have any nextStatement the trailing whitespace will be - // handled when parsing the switch. If we do have a next statement we can - // see where it starts by getting it's colon position. We set the end of the - // current case to the position of the next case. - switch nextStatement.(type) { - case *ast.CaseClause, *ast.CommClause: - default: - // No more cases - return - } - - var closingNode ast.Node = lastStatement - for _, commentGroup := range lastStatementCommentGroups { - // TODO: In future versions we might want to close the gaps between - // comments. However this is not currently reported in v3 so we - // won't add this for now. - // if p.nodeStart(commentGroup)-1 != p.nodeEnd(closingNode) {} - closingNode = commentGroup - } - - totalRowsInCase := p.nodeEnd(closingNode) - blockStartLine - if totalRowsInCase < p.config.ForceCaseTrailingWhitespaceLimit { - return - } - - if p.nodeEnd(closingNode)+1 == p.nodeStart(nextStatement) { - p.addErrorRange( - closingNode.Pos(), - closingNode.End(), - closingNode.End(), - reasonCaseBlockTooCuddly, - ) - } -} - -func isExampleFunc(ident *ast.Ident) bool { - return ident != nil && strings.HasPrefix(ident.Name, "Example") -} - -func (p *processor) nodeStart(node ast.Node) int { - return p.fileSet.PositionFor(node.Pos(), false).Line -} - -func (p *processor) nodeEnd(node ast.Node) int { - line := p.fileSet.PositionFor(node.End(), false).Line - - if isEmptyLabeledStmt(node) { - return p.fileSet.PositionFor(node.Pos(), false).Line - } - - return line -} - -func isEmptyLabeledStmt(node ast.Node) bool { - v, ok := node.(*ast.LabeledStmt) - if !ok { - return false - } - - _, empty := v.Stmt.(*ast.EmptyStmt) - - return empty -} - -func (p *processor) addWhitespaceBeforeError(node ast.Node, reason string) { - p.addErrorRange(node.Pos(), node.Pos(), node.Pos(), reason) -} - -func (p *processor) addErrorRange(reportAt, start, end token.Pos, reason string) { - report, ok := p.result[reportAt] - if !ok { - report = result{ - reason: reason, - fixRanges: []fix{}, - } - } - - report.fixRanges = append(report.fixRanges, fix{ - fixRangeStart: start, - fixRangeEnd: end, - }) - - p.result[reportAt] = report -} - -func (p *processor) addWarning(w string, pos token.Pos, t interface{}) { - position := p.fileSet.PositionFor(pos, false) - - p.warnings = append(p.warnings, - fmt.Sprintf("%s:%d: %s (%T)", position.Filename, position.Line, w, t), - ) -} diff --git a/vendor/github.com/breml/bidichk/LICENSE b/vendor/github.com/breml/bidichk/LICENSE deleted file mode 100644 index 47a8419ce..000000000 --- a/vendor/github.com/breml/bidichk/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2021 Lucas Bremgartner - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/breml/bidichk/pkg/bidichk/bidichk.go b/vendor/github.com/breml/bidichk/pkg/bidichk/bidichk.go deleted file mode 100644 index f1bf20fab..000000000 --- a/vendor/github.com/breml/bidichk/pkg/bidichk/bidichk.go +++ /dev/null @@ -1,180 +0,0 @@ -package bidichk - -import ( - "bytes" - "flag" - "fmt" - "go/token" - "os" - "sort" - "strings" - "unicode/utf8" - - "golang.org/x/tools/go/analysis" -) - -const ( - doc = "bidichk detects dangerous unicode character sequences" - disallowedDoc = `comma separated list of disallowed runes (full name or short name) - -Supported runes - -LEFT-TO-RIGHT-EMBEDDING, LRE (u+202A) -RIGHT-TO-LEFT-EMBEDDING, RLE (u+202B) -POP-DIRECTIONAL-FORMATTING, PDF (u+202C) -LEFT-TO-RIGHT-OVERRIDE, LRO (u+202D) -RIGHT-TO-LEFT-OVERRIDE, RLO (u+202E) -LEFT-TO-RIGHT-ISOLATE, LRI (u+2066) -RIGHT-TO-LEFT-ISOLATE, RLI (u+2067) -FIRST-STRONG-ISOLATE, FSI (u+2068) -POP-DIRECTIONAL-ISOLATE, PDI (u+2069) -` -) - -type disallowedRunes map[string]rune - -func (m disallowedRunes) String() string { - ss := make([]string, 0, len(m)) - for s := range m { - ss = append(ss, s) - } - sort.Strings(ss) - return strings.Join(ss, ",") -} - -func (m disallowedRunes) Set(s string) error { - ss := strings.FieldsFunc(s, func(c rune) bool { return c == ',' }) - if len(ss) == 0 { - return nil - } - - for k := range m { - delete(m, k) - } - - for _, v := range ss { - switch v { - case runeShortNameLRE, runeShortNameRLE, runeShortNamePDF, - runeShortNameLRO, runeShortNameRLO, runeShortNameLRI, - runeShortNameRLI, runeShortNameFSI, runeShortNamePDI: - v = shortNameLookup[v] - fallthrough - case runeNameLRE, runeNameRLE, runeNamePDF, - runeNameLRO, runeNameRLO, runeNameLRI, - runeNameRLI, runeNameFSI, runeNamePDI: - m[v] = runeLookup[v] - default: - return fmt.Errorf("unknown check name %q (see help for full list)", v) - } - } - return nil -} - -const ( - runeNameLRE = "LEFT-TO-RIGHT-EMBEDDING" - runeNameRLE = "RIGHT-TO-LEFT-EMBEDDING" - runeNamePDF = "POP-DIRECTIONAL-FORMATTING" - runeNameLRO = "LEFT-TO-RIGHT-OVERRIDE" - runeNameRLO = "RIGHT-TO-LEFT-OVERRIDE" - runeNameLRI = "LEFT-TO-RIGHT-ISOLATE" - runeNameRLI = "RIGHT-TO-LEFT-ISOLATE" - runeNameFSI = "FIRST-STRONG-ISOLATE" - runeNamePDI = "POP-DIRECTIONAL-ISOLATE" - - runeShortNameLRE = "LRE" // LEFT-TO-RIGHT-EMBEDDING - runeShortNameRLE = "RLE" // RIGHT-TO-LEFT-EMBEDDING - runeShortNamePDF = "PDF" // POP-DIRECTIONAL-FORMATTING - runeShortNameLRO = "LRO" // LEFT-TO-RIGHT-OVERRIDE - runeShortNameRLO = "RLO" // RIGHT-TO-LEFT-OVERRIDE - runeShortNameLRI = "LRI" // LEFT-TO-RIGHT-ISOLATE - runeShortNameRLI = "RLI" // RIGHT-TO-LEFT-ISOLATE - runeShortNameFSI = "FSI" // FIRST-STRONG-ISOLATE - runeShortNamePDI = "PDI" // POP-DIRECTIONAL-ISOLATE -) - -var runeLookup = map[string]rune{ - runeNameLRE: '\u202A', // LEFT-TO-RIGHT-EMBEDDING - runeNameRLE: '\u202B', // RIGHT-TO-LEFT-EMBEDDING - runeNamePDF: '\u202C', // POP-DIRECTIONAL-FORMATTING - runeNameLRO: '\u202D', // LEFT-TO-RIGHT-OVERRIDE - runeNameRLO: '\u202E', // RIGHT-TO-LEFT-OVERRIDE - runeNameLRI: '\u2066', // LEFT-TO-RIGHT-ISOLATE - runeNameRLI: '\u2067', // RIGHT-TO-LEFT-ISOLATE - runeNameFSI: '\u2068', // FIRST-STRONG-ISOLATE - runeNamePDI: '\u2069', // POP-DIRECTIONAL-ISOLATE -} - -var shortNameLookup = map[string]string{ - runeShortNameLRE: runeNameLRE, - runeShortNameRLE: runeNameRLE, - runeShortNamePDF: runeNamePDF, - runeShortNameLRO: runeNameLRO, - runeShortNameRLO: runeNameRLO, - runeShortNameLRI: runeNameLRI, - runeShortNameRLI: runeNameRLI, - runeShortNameFSI: runeNameFSI, - runeShortNamePDI: runeNamePDI, -} - -type bidichk struct { - disallowedRunes disallowedRunes -} - -// NewAnalyzer return a new bidichk analyzer. -func NewAnalyzer() *analysis.Analyzer { - bidichk := bidichk{} - bidichk.disallowedRunes = make(map[string]rune, len(runeLookup)) - for k, v := range runeLookup { - bidichk.disallowedRunes[k] = v - } - - a := &analysis.Analyzer{ - Name: "bidichk", - Doc: doc, - Run: bidichk.run, - } - - a.Flags.Init("bidichk", flag.ExitOnError) - a.Flags.Var(&bidichk.disallowedRunes, "disallowed-runes", disallowedDoc) - a.Flags.Var(versionFlag{}, "V", "print version and exit") - - return a -} - -func (b bidichk) run(pass *analysis.Pass) (interface{}, error) { - var err error - - pass.Fset.Iterate(func(f *token.File) bool { - if strings.HasPrefix(f.Name(), "$GOROOT") { - return true - } - - return b.check(f.Name(), f.Pos(0), pass) == nil - }) - - return nil, err -} - -func (b bidichk) check(filename string, pos token.Pos, pass *analysis.Pass) error { - body, err := os.ReadFile(filename) - if err != nil { - return err - } - - for name, r := range b.disallowedRunes { - start := 0 - for { - idx := bytes.IndexRune(body[start:], r) - if idx == -1 { - break - } - start += idx - - pass.Reportf(pos+token.Pos(start), "found dangerous unicode character sequence %s", name) - - start += utf8.RuneLen(r) - } - } - - return nil -} diff --git a/vendor/github.com/breml/bidichk/pkg/bidichk/version.go b/vendor/github.com/breml/bidichk/pkg/bidichk/version.go deleted file mode 100644 index 4cfc57dd1..000000000 --- a/vendor/github.com/breml/bidichk/pkg/bidichk/version.go +++ /dev/null @@ -1,19 +0,0 @@ -package bidichk - -import ( - "fmt" - "os" -) - -var Version = "bidichk version dev" - -type versionFlag struct{} - -func (versionFlag) IsBoolFlag() bool { return true } -func (versionFlag) Get() interface{} { return nil } -func (versionFlag) String() string { return "" } -func (versionFlag) Set(s string) error { - fmt.Println(Version) - os.Exit(0) - return nil -} diff --git a/vendor/github.com/breml/errchkjson/.gitignore b/vendor/github.com/breml/errchkjson/.gitignore deleted file mode 100644 index 0362de301..000000000 --- a/vendor/github.com/breml/errchkjson/.gitignore +++ /dev/null @@ -1,29 +0,0 @@ -# Binaries for programs and plugins -*.exe -*.exe~ -*.dll -*.so -*.dylib -/errchkjson -/cmd/errchkjson/errchkjson - -# Test binary, build with `go test -c` -*.test - -# Output of the go coverage tool, specifically when used with LiteIDE -*.out -coverage.html - -# Log files -*.log - -# Env files -.env - -# Exclude todo -TODO.md - -# Exclude IDE settings -.idea/ -*.iml -.vscode/ diff --git a/vendor/github.com/breml/errchkjson/.goreleaser.yml b/vendor/github.com/breml/errchkjson/.goreleaser.yml deleted file mode 100644 index a05c172cb..000000000 --- a/vendor/github.com/breml/errchkjson/.goreleaser.yml +++ /dev/null @@ -1,34 +0,0 @@ -# This is an example .goreleaser.yml file with some sane defaults. -# Make sure to check the documentation at http://goreleaser.com -before: - hooks: - # You may remove this if you don't use go modules. - - go mod tidy -builds: - - main: ./cmd/errchkjson - binary: errchkjson - env: - - CGO_ENABLED=0 - goos: - - linux - - windows - - darwin -archives: - - name_template: >- - {{- .Binary }}_ - {{- .Version }}_ - {{- title .Os }}_ - {{- if eq .Arch "amd64" }}x86_64 - {{- else if eq .Arch "386" }}i386 - {{- else }}{{ .Arch }}{{ end }} - {{- if .Arm }}v{{ .Arm }}{{ end -}} -snapshot: - name_template: "{{ .Tag }}-next" -changelog: - skip: true -release: - github: - owner: breml - name: errchkjson -gomod: - proxy: true diff --git a/vendor/github.com/breml/errchkjson/LICENSE b/vendor/github.com/breml/errchkjson/LICENSE deleted file mode 100644 index 08db5cb6f..000000000 --- a/vendor/github.com/breml/errchkjson/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2019 Lucas Bremgartner - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/breml/errchkjson/README.md b/vendor/github.com/breml/errchkjson/README.md deleted file mode 100644 index 197959738..000000000 --- a/vendor/github.com/breml/errchkjson/README.md +++ /dev/null @@ -1,131 +0,0 @@ -# errchkjson - -[![Test Status](https://github.com/breml/errchkjson/actions/workflows/ci.yml/badge.svg)](https://github.com/breml/errchkjson/actions/workflows/ci.yml) [![Go Report Card](https://goreportcard.com/badge/github.com/breml/errchkjson)](https://goreportcard.com/report/github.com/breml/errchkjson) [![License](https://img.shields.io/badge/license-MIT-blue.svg)](LICENSE) - -Checks types passed to the json encoding functions. Reports unsupported types and reports occurrences where the check for the returned error can be omitted. - -Consider this [http.Handler](https://pkg.go.dev/net/http#Handler): - -```Go -func JSONHelloWorld(w http.ResponseWriter, r *http.Request) { - response := struct { - Message string - Code int - }{ - Message: "Hello World", - Code: 200, - } - - body, err := json.Marshal(response) - if err != nil { - panic(err) // unreachable, because json encoding of a struct with just a string and an int will never return an error. - } - - w.Write(body) -} -``` - -Because the `panic` is not possible to happen, one might refactor the code like this: - -```Go -func JSONHelloWorld(w http.ResponseWriter, r *http.Request) { - response := struct { - Message string - Code int - }{ - Message: "Hello World", - Code: 200, - } - - body, _ := json.Marshal(response) - - w.Write(body) -} -``` - -This is ok, as long as the struct is not altered in such a way, that could potentially lead -to `json.Marshal` returning an error. - -`errchkjson` allows you to lint your code such that the above error returned from `json.Marshal` -can be omitted while still staying safe, because as soon as an unsafe type is added to the -response type, the linter will warn you. - -## Installation - -Download `errchkjson` from the [releases](https://github.com/breml/errchkjson/releases) or get the latest version from source with: - -```shell -go get github.com/breml/errchkjson/cmd/errchkjson -``` - -## Usage - -### Shell - -Check everything: - -```shell -errchkjson ./... -``` - -`errchkjson` also recognizes the following command-line options: - -The `-omit-safe` flag disables checking for safe returns of errors from json.Marshal - -## Types - -### Safe - -The following types are safe to use with [json encoding functions](https://pkg.go.dev/encoding/json), that is, the encoding to JSON can not fail: - -Safe basic types: - -* `bool` -* `int`, `int8`, `int16`, `int32`, `int64`, `uint`, `uint8`, `uint16`, `uint32`, `uint64`, `uintptr` -* `string` -* Pointer type of the above listed basic types - -Composed types (struct, map, slice, array) are safe, if the type of the value is -safe. For structs, only exported fields are relevant. For maps, the key needs to be either an integer type or a string. - -### Unsafe - -The following types are unsafe to use with [json encoding functions](https://pkg.go.dev/encoding/json), that is, the encoding to JSON can fail (return an error): - -Unsafe basic types: - -* `float32`, `float64` -* `interface{}` -* Pointer type of the above listed basic types - -Any composed types (struct, map, slice, array) containing an unsafe basic type. - -If a type implements the `json.Marshaler` or `encoding.TextMarshaler` interface (e.g. `json.Number`). - -### Forbidden - -Forbidden basic types: - -* `complex64`, `complex128` -* `chan` -* `func` -* `unsafe.Pointer` - -Any composed types (struct, map, slice, array) containing a forbidden basic type. Any map -using a key with a forbidden type (`bool`, `float32`, `float64`, `struct`). - -## Accepted edge case - -For `encoding/json.MarshalIndent`, there is a (pathological) edge case, where this -function could [return an error](https://cs.opensource.google/go/go/+/refs/tags/go1.18:src/encoding/json/scanner.go;drc=refs%2Ftags%2Fgo1.18;l=181) for an otherwise safe argument, if the argument has -a nesting depth larger than [`10000`](https://cs.opensource.google/go/go/+/refs/tags/go1.18:src/encoding/json/scanner.go;drc=refs%2Ftags%2Fgo1.18;l=144) (as of Go 1.18). - -## Bugs found during development - -During the development of `errcheckjson`, the following issues in package `encoding/json` of the Go standard library have been found and PR have been merged: - -* [Issue #34154: encoding/json: string option (struct tag) on string field with SetEscapeHTML(false) escapes anyway](https://github.com/golang/go/issues/34154) -* [PR #34127: encoding/json: fix and optimize marshal for quoted string](https://github.com/golang/go/pull/34127) -* [Issue #34268: encoding/json: wrong encoding for json.Number field with string option (struct tag)](https://github.com/golang/go/issues/34268) -* [PR #34269: encoding/json: make Number with the ,string option marshal with quotes](https://github.com/golang/go/pull/34269) -* [PR #34272: encoding/json: validate strings when decoding into Number](https://github.com/golang/go/pull/34272) diff --git a/vendor/github.com/breml/errchkjson/errchkjson.go b/vendor/github.com/breml/errchkjson/errchkjson.go deleted file mode 100644 index 4a23929cf..000000000 --- a/vendor/github.com/breml/errchkjson/errchkjson.go +++ /dev/null @@ -1,348 +0,0 @@ -// Package errchkjson defines an Analyzer that finds places, where it is -// safe to omit checking the error returned from json.Marshal. -package errchkjson - -import ( - "flag" - "fmt" - "go/ast" - "go/token" - "go/types" - "reflect" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/types/typeutil" -) - -type errchkjson struct { - omitSafe bool // -omit-safe flag - reportNoExported bool // -report-no-exported flag -} - -// NewAnalyzer returns a new errchkjson analyzer. -func NewAnalyzer() *analysis.Analyzer { - errchkjson := &errchkjson{} - - a := &analysis.Analyzer{ - Name: "errchkjson", - Doc: "Checks types passed to the json encoding functions. Reports unsupported types and reports occations, where the check for the returned error can be omitted.", - Run: errchkjson.run, - } - - a.Flags.Init("errchkjson", flag.ExitOnError) - a.Flags.BoolVar(&errchkjson.omitSafe, "omit-safe", false, "if omit-safe is true, checking of safe returns is omitted") - a.Flags.BoolVar(&errchkjson.reportNoExported, "report-no-exported", false, "if report-no-exported is true, encoding a struct without exported fields is reported as issue") - a.Flags.Var(versionFlag{}, "V", "print version and exit") - - return a -} - -func (e *errchkjson) run(pass *analysis.Pass) (interface{}, error) { - for _, file := range pass.Files { - ast.Inspect(file, func(n ast.Node) bool { - if n == nil { - return true - } - - // if the error is returned, it is the caller's responsibility to check - // the return value. - if _, ok := n.(*ast.ReturnStmt); ok { - return false - } - - ce, ok := n.(*ast.CallExpr) - if ok { - fn, _ := typeutil.Callee(pass.TypesInfo, ce).(*types.Func) - if fn == nil { - return true - } - - switch fn.FullName() { - case "encoding/json.Marshal", "encoding/json.MarshalIndent": - e.handleJSONMarshal(pass, ce, fn.FullName(), blankIdentifier, e.omitSafe) - case "(*encoding/json.Encoder).Encode": - e.handleJSONMarshal(pass, ce, fn.FullName(), blankIdentifier, true) - default: - e.inspectArgs(pass, ce.Args) - } - return false - } - - as, ok := n.(*ast.AssignStmt) - if !ok { - return true - } - - ce, ok = as.Rhs[0].(*ast.CallExpr) - if !ok { - return true - } - - fn, _ := typeutil.Callee(pass.TypesInfo, ce).(*types.Func) - if fn == nil { - return true - } - - switch fn.FullName() { - case "encoding/json.Marshal", "encoding/json.MarshalIndent": - e.handleJSONMarshal(pass, ce, fn.FullName(), evaluateMarshalErrorTarget(as.Lhs[1]), e.omitSafe) - case "(*encoding/json.Encoder).Encode": - e.handleJSONMarshal(pass, ce, fn.FullName(), evaluateMarshalErrorTarget(as.Lhs[0]), true) - default: - return true - } - return false - }) - } - - return nil, nil -} - -func evaluateMarshalErrorTarget(n ast.Expr) marshalErrorTarget { - if errIdent, ok := n.(*ast.Ident); ok { - if errIdent.Name == "_" { - return blankIdentifier - } - } - return variableAssignment -} - -type marshalErrorTarget int - -const ( - blankIdentifier = iota // the returned error from the JSON marshal function is assigned to the blank identifier "_". - variableAssignment // the returned error from the JSON marshal function is assigned to a variable. - functionArgument // the returned error from the JSON marshal function is passed to an other function as argument. -) - -func (e *errchkjson) handleJSONMarshal(pass *analysis.Pass, ce *ast.CallExpr, fnName string, errorTarget marshalErrorTarget, omitSafe bool) { - t := pass.TypesInfo.TypeOf(ce.Args[0]) - if t == nil { - // Not sure, if this is at all possible - if errorTarget == blankIdentifier { - pass.Reportf(ce.Pos(), "Type of argument to `%s` could not be evaluated and error return value is not checked", fnName) - } - return - } - - if _, ok := t.(*types.Pointer); ok { - t = t.(*types.Pointer).Elem() - } - - err := e.jsonSafe(t, 0, map[types.Type]struct{}{}) - if err != nil { - if _, ok := err.(unsupported); ok { - pass.Reportf(ce.Pos(), "`%s` for %v", fnName, err) - return - } - if _, ok := err.(noexported); ok { - pass.Reportf(ce.Pos(), "Error argument passed to `%s` does not contain any exported field", fnName) - } - // Only care about unsafe types if they are assigned to the blank identifier. - if errorTarget == blankIdentifier { - pass.Reportf(ce.Pos(), "Error return value of `%s` is not checked: %v", fnName, err) - } - } - if err == nil && errorTarget == variableAssignment && !omitSafe { - pass.Reportf(ce.Pos(), "Error return value of `%s` is checked but passed argument is safe", fnName) - } - // Report an error, if err for json.Marshal is not checked and safe types are omitted - if err == nil && errorTarget == blankIdentifier && omitSafe { - pass.Reportf(ce.Pos(), "Error return value of `%s` is not checked", fnName) - } -} - -const ( - allowedBasicTypes = types.IsBoolean | types.IsInteger | types.IsString - allowedMapKeyBasicTypes = types.IsInteger | types.IsString - unsupportedBasicTypes = types.IsComplex -) - -func (e *errchkjson) jsonSafe(t types.Type, level int, seenTypes map[types.Type]struct{}) error { - if _, ok := seenTypes[t]; ok { - return nil - } - - if types.Implements(t, textMarshalerInterface()) || types.Implements(t, jsonMarshalerInterface()) { - return fmt.Errorf("unsafe type `%s` found", t.String()) - } - - switch ut := t.Underlying().(type) { - case *types.Basic: - if ut.Info()&allowedBasicTypes > 0 { // bool, int-family, string - if ut.Info()&types.IsString > 0 && t.String() == "encoding/json.Number" { - return fmt.Errorf("unsafe type `%s` found", t.String()) - } - return nil - } - if ut.Info()&unsupportedBasicTypes > 0 { // complex64, complex128 - return newUnsupportedError(fmt.Errorf("unsupported type `%s` found", ut.String())) - } - switch ut.Kind() { - case types.UntypedNil: - return nil - case types.UnsafePointer: - return newUnsupportedError(fmt.Errorf("unsupported type `%s` found", ut.String())) - default: - // E.g. float32, float64 - return fmt.Errorf("unsafe type `%s` found", ut.String()) - } - - case *types.Array: - err := e.jsonSafe(ut.Elem(), level+1, seenTypes) - if err != nil { - return err - } - return nil - - case *types.Slice: - err := e.jsonSafe(ut.Elem(), level+1, seenTypes) - if err != nil { - return err - } - return nil - - case *types.Struct: - seenTypes[t] = struct{}{} - exported := 0 - for i := 0; i < ut.NumFields(); i++ { - if !ut.Field(i).Exported() { - // Unexported fields can be ignored - continue - } - if tag, ok := reflect.StructTag(ut.Tag(i)).Lookup("json"); ok { - if tag == "-" { - // Fields omitted in json can be ignored - continue - } - } - err := e.jsonSafe(ut.Field(i).Type(), level+1, seenTypes) - if err != nil { - return err - } - exported++ - } - if e.reportNoExported && level == 0 && exported == 0 { - return newNoexportedError(fmt.Errorf("struct does not export any field")) - } - return nil - - case *types.Pointer: - err := e.jsonSafe(ut.Elem(), level+1, seenTypes) - if err != nil { - return err - } - return nil - - case *types.Map: - err := jsonSafeMapKey(ut.Key()) - if err != nil { - return err - } - err = e.jsonSafe(ut.Elem(), level+1, seenTypes) - if err != nil { - return err - } - return nil - - case *types.Chan, *types.Signature: - // Types that are not supported for encoding to json: - return newUnsupportedError(fmt.Errorf("unsupported type `%s` found", ut.String())) - - default: - // Types that are not supported for encoding to json or are not completely safe, like: interfaces - return fmt.Errorf("unsafe type `%s` found", t.String()) - } -} - -func jsonSafeMapKey(t types.Type) error { - if types.Implements(t, textMarshalerInterface()) || types.Implements(t, jsonMarshalerInterface()) { - return fmt.Errorf("unsafe type `%s` as map key found", t.String()) - } - switch ut := t.Underlying().(type) { - case *types.Basic: - if ut.Info()&types.IsString > 0 && t.String() == "encoding/json.Number" { - return fmt.Errorf("unsafe type `%s` as map key found", t.String()) - } - if ut.Info()&allowedMapKeyBasicTypes > 0 { // bool, int-family, string - return nil - } - // E.g. bool, float32, float64, complex64, complex128 - return newUnsupportedError(fmt.Errorf("unsupported type `%s` as map key found", t.String())) - case *types.Interface: - return fmt.Errorf("unsafe type `%s` as map key found", t.String()) - default: - // E.g. struct composed solely of basic types, that are comparable - return newUnsupportedError(fmt.Errorf("unsupported type `%s` as map key found", t.String())) - } -} - -func (e *errchkjson) inspectArgs(pass *analysis.Pass, args []ast.Expr) { - for _, a := range args { - ast.Inspect(a, func(n ast.Node) bool { - if n == nil { - return true - } - - ce, ok := n.(*ast.CallExpr) - if !ok { - return false - } - - fn, _ := typeutil.Callee(pass.TypesInfo, ce).(*types.Func) - if fn == nil { - return true - } - - switch fn.FullName() { - case "encoding/json.Marshal", "encoding/json.MarshalIndent": - e.handleJSONMarshal(pass, ce, fn.FullName(), functionArgument, e.omitSafe) - case "(*encoding/json.Encoder).Encode": - e.handleJSONMarshal(pass, ce, fn.FullName(), functionArgument, true) - default: - e.inspectArgs(pass, ce.Args) - } - return false - }) - } -} - -// Construct *types.Interface for interface encoding.TextMarshaler -// -// type TextMarshaler interface { -// MarshalText() (text []byte, err error) -// } -func textMarshalerInterface() *types.Interface { - textMarshalerInterface := types.NewInterfaceType([]*types.Func{ - types.NewFunc(token.NoPos, nil, "MarshalText", types.NewSignatureType( - nil, nil, nil, nil, types.NewTuple( - types.NewVar(token.NoPos, nil, "text", - types.NewSlice( - types.Universe.Lookup("byte").Type())), - types.NewVar(token.NoPos, nil, "err", types.Universe.Lookup("error").Type())), - false)), - }, nil) - textMarshalerInterface.Complete() - - return textMarshalerInterface -} - -// Construct *types.Interface for interface json.Marshaler -// -// type Marshaler interface { -// MarshalJSON() ([]byte, error) -// } -func jsonMarshalerInterface() *types.Interface { - textMarshalerInterface := types.NewInterfaceType([]*types.Func{ - types.NewFunc(token.NoPos, nil, "MarshalJSON", types.NewSignatureType( - nil, nil, nil, nil, types.NewTuple( - types.NewVar(token.NoPos, nil, "", - types.NewSlice( - types.Universe.Lookup("byte").Type())), - types.NewVar(token.NoPos, nil, "", types.Universe.Lookup("error").Type())), - false)), - }, nil) - textMarshalerInterface.Complete() - - return textMarshalerInterface -} diff --git a/vendor/github.com/breml/errchkjson/noexported_error.go b/vendor/github.com/breml/errchkjson/noexported_error.go deleted file mode 100644 index 07b7a07d2..000000000 --- a/vendor/github.com/breml/errchkjson/noexported_error.go +++ /dev/null @@ -1,23 +0,0 @@ -package errchkjson - -type noexported interface { - noexported() -} - -var _ noexported = noexportedError{} - -type noexportedError struct { - err error -} - -func newNoexportedError(err error) error { - return noexportedError{ - err: err, - } -} - -func (u noexportedError) noexported() {} - -func (u noexportedError) Error() string { - return u.err.Error() -} diff --git a/vendor/github.com/breml/errchkjson/unsupported_error.go b/vendor/github.com/breml/errchkjson/unsupported_error.go deleted file mode 100644 index 1a38c3f53..000000000 --- a/vendor/github.com/breml/errchkjson/unsupported_error.go +++ /dev/null @@ -1,23 +0,0 @@ -package errchkjson - -type unsupported interface { - unsupported() -} - -var _ unsupported = unsupportedError{} - -type unsupportedError struct { - err error -} - -func newUnsupportedError(err error) error { - return unsupportedError{ - err: err, - } -} - -func (u unsupportedError) unsupported() {} - -func (u unsupportedError) Error() string { - return u.err.Error() -} diff --git a/vendor/github.com/breml/errchkjson/version.go b/vendor/github.com/breml/errchkjson/version.go deleted file mode 100644 index 77d8ef8bb..000000000 --- a/vendor/github.com/breml/errchkjson/version.go +++ /dev/null @@ -1,19 +0,0 @@ -package errchkjson - -import ( - "fmt" - "os" -) - -var Version = "errchkjson version dev" - -type versionFlag struct{} - -func (versionFlag) IsBoolFlag() bool { return true } -func (versionFlag) Get() interface{} { return nil } -func (versionFlag) String() string { return "" } -func (versionFlag) Set(s string) error { - fmt.Println(Version) - os.Exit(0) - return nil -} diff --git a/vendor/github.com/butuzov/ireturn/LICENSE b/vendor/github.com/butuzov/ireturn/LICENSE deleted file mode 100644 index a9752e972..000000000 --- a/vendor/github.com/butuzov/ireturn/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2021 Oleg Butuzov - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/butuzov/ireturn/analyzer/analyzer.go b/vendor/github.com/butuzov/ireturn/analyzer/analyzer.go deleted file mode 100644 index f68170fb3..000000000 --- a/vendor/github.com/butuzov/ireturn/analyzer/analyzer.go +++ /dev/null @@ -1,277 +0,0 @@ -package analyzer - -import ( - "flag" - "go/ast" - gotypes "go/types" - "runtime" - "strings" - "sync" - - "github.com/butuzov/ireturn/analyzer/internal/config" - "github.com/butuzov/ireturn/analyzer/internal/types" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/ast/inspector" -) - -const name string = "ireturn" // linter name - -type validator interface { - IsValid(types.IFace) bool -} - -type analyzer struct { - once sync.Once - mu sync.RWMutex - handler validator - err error - diabledNolint bool - - found []analysis.Diagnostic -} - -func (a *analyzer) run(pass *analysis.Pass) (interface{}, error) { - // 00. Part 1. Handling Configuration Only Once. - a.once.Do(func() { a.readConfiguration(&pass.Analyzer.Flags) }) - - // 00. Part 2. Handling Errors - if a.err != nil { - return nil, a.err - } - - ins, _ := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - - // 00. does file have dot-imported standard packages? - dotImportedStd := make(map[string]struct{}) - ins.Preorder([]ast.Node{(*ast.ImportSpec)(nil)}, func(node ast.Node) { - i, _ := node.(*ast.ImportSpec) - if i.Name != nil && i.Name.Name == "." { - dotImportedStd[strings.Trim(i.Path.Value, `"`)] = struct{}{} - } - }) - - // 01. Running Inspection. - ins.Preorder([]ast.Node{(*ast.FuncDecl)(nil)}, func(node ast.Node) { - // 001. Casting to funcdecl - f, _ := node.(*ast.FuncDecl) - - // 002. Does it return any results ? - if f.Type == nil || f.Type.Results == nil { - return - } - - // 003. Is it allowed to be checked? - if !a.diabledNolint && hasDisallowDirective(f.Doc) { - return - } - - seen := make(map[string]bool, 4) - - // 004. Filtering Results. - for _, issue := range filterInterfaces(pass, f.Type, dotImportedStd) { - if a.handler.IsValid(issue) { - continue - } - - issue.Enrich(f) - - key := issue.HashString() - - if ok := seen[key]; ok { - continue - } - seen[key] = true - - a.addDiagnostic(issue.ExportDiagnostic()) - } - }) - - // 02. Printing reports. - a.mu.RLock() - defer a.mu.RUnlock() - for i := range a.found { - pass.Report(a.found[i]) - } - - return nil, nil -} - -func (a *analyzer) addDiagnostic(d analysis.Diagnostic) { - a.mu.Lock() - defer a.mu.Unlock() - - a.found = append(a.found, d) -} - -func (a *analyzer) readConfiguration(fs *flag.FlagSet) { - cnf, err := config.New(fs) - if err != nil { - a.err = err - return - } - - // First: checking nonolint directive - val := fs.Lookup("nonolint") - if val != nil { - a.diabledNolint = fs.Lookup("nonolint").Value.String() == "true" - } - - // Second: validators implementation next - if validatorImpl, ok := cnf.(validator); ok { - a.handler = validatorImpl - return - } - - a.handler = config.DefaultValidatorConfig() -} - -func NewAnalyzer() *analysis.Analyzer { - a := analyzer{} //nolint: exhaustivestruct - - return &analysis.Analyzer{ - Name: name, - Doc: "Accept Interfaces, Return Concrete Types", - Run: a.run, - Requires: []*analysis.Analyzer{inspect.Analyzer}, - Flags: flags(), - } -} - -func flags() flag.FlagSet { - set := flag.NewFlagSet("", flag.PanicOnError) - set.String("allow", "", "accept-list of the comma-separated interfaces") - set.String("reject", "", "reject-list of the comma-separated interfaces") - set.Bool("nonolint", false, "disable nolint checks") - return *set -} - -func filterInterfaces(p *analysis.Pass, ft *ast.FuncType, di map[string]struct{}) []types.IFace { - var results []types.IFace - - if ft.Results == nil { // this can't happen, but double checking. - return results - } - - for _, el := range ft.Results.List { - switch v := el.Type.(type) { - // ----- empty or anonymous interfaces - case *ast.InterfaceType: - if len(v.Methods.List) == 0 { - results = append(results, types.NewIssue("interface{}", types.EmptyInterface)) - continue - } - - results = append(results, types.NewIssue("anonymous interface", types.AnonInterface)) - - // ------ Errors and interfaces from same package - case *ast.Ident: - - t1 := p.TypesInfo.TypeOf(el.Type) - val, ok := t1.Underlying().(*gotypes.Interface) - if !ok { - continue - } - - var ( - name = t1.String() - isNamed = strings.Contains(name, ".") - isEmpty = val.Empty() - ) - - // catching any - if isEmpty && name == "any" { - results = append(results, types.NewIssue(name, types.EmptyInterface)) - continue - } - - // NOTE: FIXED! - if name == "error" { - results = append(results, types.NewIssue(name, types.ErrorInterface)) - continue - } - - if !isNamed { - - typeParams := val.String() - prefix, suffix := "interface{", "}" - if strings.HasPrefix(typeParams, prefix) { // nolint: gosimple - typeParams = typeParams[len(prefix):] - } - if strings.HasSuffix(typeParams, suffix) { - typeParams = typeParams[:len(typeParams)-1] - } - - goVersion := runtime.Version() - if strings.HasPrefix(goVersion, "go1.18") || strings.HasPrefix(goVersion, "go1.19") { - typeParams = strings.ReplaceAll(typeParams, "|", " | ") - } - - results = append(results, types.IFace{ - Name: name, - Type: types.Generic, - OfType: typeParams, - }) - continue - } - - // is it dot-imported package? - // handling cases when stdlib package imported via "." dot-import - if len(di) > 0 { - pkgName := stdPkgInterface(name) - if _, ok := di[pkgName]; ok { - results = append(results, types.NewIssue(name, types.NamedStdInterface)) - - continue - } - } - - results = append(results, types.NewIssue(name, types.NamedInterface)) - - // ------- standard library and 3rd party interfaces - case *ast.SelectorExpr: - - t1 := p.TypesInfo.TypeOf(el.Type) - if !gotypes.IsInterface(t1.Underlying()) { - continue - } - - word := t1.String() - if isStdPkgInterface(word) { - results = append(results, types.NewIssue(word, types.NamedStdInterface)) - continue - } - - results = append(results, types.NewIssue(word, types.NamedInterface)) - } - } - - return results -} - -// stdPkgInterface will return package name if tis std lib package -// or empty string on fail. -func stdPkgInterface(named string) string { - // find last "." index. - idx := strings.LastIndex(named, ".") - if idx == -1 { - return "" - } - - return stdPkg(named[0:idx]) -} - -// isStdPkgInterface will run small checks against pkg to find out if named -// interface we looking on - comes from a standard library or not. -func isStdPkgInterface(namedInterface string) bool { - return stdPkgInterface(namedInterface) != "" -} - -func stdPkg(pkg string) string { - if _, ok := std[pkg]; ok { - return pkg - } - - return "" -} diff --git a/vendor/github.com/butuzov/ireturn/analyzer/disallow.go b/vendor/github.com/butuzov/ireturn/analyzer/disallow.go deleted file mode 100644 index 36b6fcb4f..000000000 --- a/vendor/github.com/butuzov/ireturn/analyzer/disallow.go +++ /dev/null @@ -1,45 +0,0 @@ -package analyzer - -import ( - "go/ast" - "strings" -) - -const nolintPrefix = "//nolint" - -func hasDisallowDirective(cg *ast.CommentGroup) bool { - if cg == nil { - return false - } - - return directiveFound(cg) -} - -func directiveFound(cg *ast.CommentGroup) bool { - for i := len(cg.List) - 1; i >= 0; i-- { - comment := cg.List[i] - if !strings.HasPrefix(comment.Text, nolintPrefix) { - continue - } - - startingIdx := len(nolintPrefix) - for { - idx := strings.Index(comment.Text[startingIdx:], name) - if idx == -1 { - break - } - - if len(comment.Text[startingIdx+idx:]) == len(name) { - return true - } - - c := comment.Text[startingIdx+idx+len(name)] - if c == '.' || c == ',' || c == ' ' || c == ' ' { - return true - } - startingIdx += idx + 1 - } - } - - return false -} diff --git a/vendor/github.com/butuzov/ireturn/analyzer/internal/config/allow.go b/vendor/github.com/butuzov/ireturn/analyzer/internal/config/allow.go deleted file mode 100644 index 6a294ca35..000000000 --- a/vendor/github.com/butuzov/ireturn/analyzer/internal/config/allow.go +++ /dev/null @@ -1,17 +0,0 @@ -package config - -import "github.com/butuzov/ireturn/analyzer/internal/types" - -// allowConfig specifies a list of interfaces (keywords, patters and regular expressions) -// that are allowed by ireturn as valid to return, any non listed interface are rejected. -type allowConfig struct { - *defaultConfig -} - -func allowAll(patterns []string) *allowConfig { - return &allowConfig{&defaultConfig{List: patterns}} -} - -func (ac *allowConfig) IsValid(i types.IFace) bool { - return ac.Has(i) -} diff --git a/vendor/github.com/butuzov/ireturn/analyzer/internal/config/config.go b/vendor/github.com/butuzov/ireturn/analyzer/internal/config/config.go deleted file mode 100644 index 46c73170a..000000000 --- a/vendor/github.com/butuzov/ireturn/analyzer/internal/config/config.go +++ /dev/null @@ -1,66 +0,0 @@ -package config - -import ( - "regexp" - "sync" - - "github.com/butuzov/ireturn/analyzer/internal/types" -) - -// defaultConfig is core of the validation, ... -// todo(butuzov): write proper intro... - -type defaultConfig struct { - List []string - - // private fields (for search optimization look ups) - once sync.Once - quick uint8 - list []*regexp.Regexp -} - -func (config *defaultConfig) Has(i types.IFace) bool { - config.once.Do(config.compileList) - - if config.quick&uint8(i.Type) > 0 { - return true - } - - // not a named interface (because error, interface{}, anon interface has keywords.) - if i.Type&types.NamedInterface == 0 && i.Type&types.NamedStdInterface == 0 { - return false - } - - for _, re := range config.list { - if re.MatchString(i.Name) { - return true - } - } - - return false -} - -// compileList will transform text list into a bitmask for quick searches and -// slice of regular expressions for quick searches. -func (config *defaultConfig) compileList() { - for _, str := range config.List { - switch str { - case types.NameError: - config.quick |= uint8(types.ErrorInterface) - case types.NameEmpty: - config.quick |= uint8(types.EmptyInterface) - case types.NameAnon: - config.quick |= uint8(types.AnonInterface) - case types.NameStdLib: - config.quick |= uint8(types.NamedStdInterface) - case types.NameGeneric: - config.quick |= uint8(types.Generic) - } - - // allow to parse regular expressions - // todo(butuzov): how can we log error in golangci-lint? - if re, err := regexp.Compile(str); err == nil { - config.list = append(config.list, re) - } - } -} diff --git a/vendor/github.com/butuzov/ireturn/analyzer/internal/config/new.go b/vendor/github.com/butuzov/ireturn/analyzer/internal/config/new.go deleted file mode 100644 index 6aa04e52e..000000000 --- a/vendor/github.com/butuzov/ireturn/analyzer/internal/config/new.go +++ /dev/null @@ -1,74 +0,0 @@ -package config - -import ( - "errors" - "flag" - "strings" - - "github.com/butuzov/ireturn/analyzer/internal/types" -) - -var ErrCollisionOfInterests = errors.New("can't have both `-accept` and `-reject` specified at same time") - -// nolint: exhaustivestruct -func DefaultValidatorConfig() *allowConfig { - return allowAll([]string{ - types.NameEmpty, // "empty": empty interfaces (interface{}) - types.NameError, // "error": for all error's - types.NameAnon, // "anon": for all empty interfaces with methods (interface {Method()}) - types.NameStdLib, // "std": for all standard library packages - }) -} - -// New is factory function that return allowConfig or rejectConfig depending -// on provided arguments. -func New(fs *flag.FlagSet) (interface{}, error) { - var ( - allowList = toSlice(getFlagVal(fs, "allow")) - rejectList = toSlice(getFlagVal(fs, "reject")) - ) - - // can't have both at same time. - if len(allowList) != 0 && len(rejectList) != 0 { - return nil, ErrCollisionOfInterests - } - - switch { - case len(allowList) > 0: - return allowAll(allowList), nil - case len(rejectList) > 0: - return rejectAll(rejectList), nil - } - - // can have none (defaults are used) at same time. - return nil, nil -} - -// both constants used to cleanup items provided in comma separated list. -const ( - SepTab string = " " - SepSpace string = " " -) - -func toSlice(s string) []string { - var results []string - - for _, pattern := range strings.Split(s, ",") { - pattern = strings.Trim(pattern, SepTab+SepSpace) - if pattern != "" { - results = append(results, pattern) - } - } - - return results -} - -func getFlagVal(fs *flag.FlagSet, name string) string { - flg := fs.Lookup(name) - - if flg == nil { - return "" - } - - return flg.Value.String() -} diff --git a/vendor/github.com/butuzov/ireturn/analyzer/internal/config/reject.go b/vendor/github.com/butuzov/ireturn/analyzer/internal/config/reject.go deleted file mode 100644 index bef6913bb..000000000 --- a/vendor/github.com/butuzov/ireturn/analyzer/internal/config/reject.go +++ /dev/null @@ -1,17 +0,0 @@ -package config - -import "github.com/butuzov/ireturn/analyzer/internal/types" - -// rejectConfig specifies a list of interfaces (keywords, patters and regular expressions) -// that are rejected by ireturn as valid to return, any non listed interface are allowed. -type rejectConfig struct { - *defaultConfig -} - -func rejectAll(patterns []string) *rejectConfig { - return &rejectConfig{&defaultConfig{List: patterns}} -} - -func (rc *rejectConfig) IsValid(i types.IFace) bool { - return !rc.Has(i) -} diff --git a/vendor/github.com/butuzov/ireturn/analyzer/internal/types/iface.go b/vendor/github.com/butuzov/ireturn/analyzer/internal/types/iface.go deleted file mode 100644 index 5e576374d..000000000 --- a/vendor/github.com/butuzov/ireturn/analyzer/internal/types/iface.go +++ /dev/null @@ -1,54 +0,0 @@ -package types - -import ( - "fmt" - "go/ast" - "go/token" - - "golang.org/x/tools/go/analysis" -) - -type IFace struct { - Name string // Interface name - Type IType // Type of the interface - - Pos token.Pos // Token Position - FuncName string // - OfType string -} - -func NewIssue(name string, interfaceType IType) IFace { - return IFace{ - Name: name, - // Pos: pos, - Type: interfaceType, - } -} - -func (i *IFace) Enrich(f *ast.FuncDecl) { - i.FuncName = f.Name.Name - i.Pos = f.Pos() -} - -func (i IFace) String() string { - if i.Type != Generic { - return fmt.Sprintf("%s returns interface (%s)", i.FuncName, i.Name) - } - - if i.OfType != "" { - return fmt.Sprintf("%s returns generic interface (%s) of type param %s", i.FuncName, i.Name, i.OfType) - } - - return fmt.Sprintf("%s returns generic interface (%s)", i.FuncName, i.Name) -} - -func (i IFace) HashString() string { - return fmt.Sprintf("%v-%s", i.Pos, i.String()) -} - -func (i IFace) ExportDiagnostic() analysis.Diagnostic { - return analysis.Diagnostic{ //nolint: exhaustivestruct - Pos: i.Pos, - Message: i.String(), - } -} diff --git a/vendor/github.com/butuzov/ireturn/analyzer/internal/types/names.go b/vendor/github.com/butuzov/ireturn/analyzer/internal/types/names.go deleted file mode 100644 index 1092c9667..000000000 --- a/vendor/github.com/butuzov/ireturn/analyzer/internal/types/names.go +++ /dev/null @@ -1,9 +0,0 @@ -package types - -const ( - NameEmpty = "empty" - NameAnon = "anon" - NameError = "error" - NameStdLib = "stdlib" - NameGeneric = "generic" -) diff --git a/vendor/github.com/butuzov/ireturn/analyzer/internal/types/types.go b/vendor/github.com/butuzov/ireturn/analyzer/internal/types/types.go deleted file mode 100644 index 5c0bd7407..000000000 --- a/vendor/github.com/butuzov/ireturn/analyzer/internal/types/types.go +++ /dev/null @@ -1,12 +0,0 @@ -package types - -type IType uint8 - -const ( - EmptyInterface IType = 1 << iota // ref as empty - AnonInterface // ref as anon - ErrorInterface // ref as error - NamedInterface // ref as named - NamedStdInterface // ref as named stdlib - Generic // ref as generic type parameter -) diff --git a/vendor/github.com/butuzov/ireturn/analyzer/std.go b/vendor/github.com/butuzov/ireturn/analyzer/std.go deleted file mode 100644 index cac464612..000000000 --- a/vendor/github.com/butuzov/ireturn/analyzer/std.go +++ /dev/null @@ -1,203 +0,0 @@ -// Code generated using std.sh; DO NOT EDIT. - -// We will ignore that fact that some of packages -// were removed from stdlib. - -package analyzer - -var std = map[string]struct{}{ - // added in Go v1.2 in compare to v1.1 (docker image) - "archive/tar": {}, - "archive/zip": {}, - "bufio": {}, - "bytes": {}, - "cmd/cgo": {}, - "cmd/fix": {}, - "cmd/go": {}, - "cmd/gofmt": {}, - "cmd/yacc": {}, - "compress/bzip2": {}, - "compress/flate": {}, - "compress/gzip": {}, - "compress/lzw": {}, - "compress/zlib": {}, - "container/heap": {}, - "container/list": {}, - "container/ring": {}, - "crypto": {}, - "crypto/aes": {}, - "crypto/cipher": {}, - "crypto/des": {}, - "crypto/dsa": {}, - "crypto/ecdsa": {}, - "crypto/elliptic": {}, - "crypto/hmac": {}, - "crypto/md5": {}, - "crypto/rand": {}, - "crypto/rc4": {}, - "crypto/rsa": {}, - "crypto/sha1": {}, - "crypto/sha256": {}, - "crypto/sha512": {}, - "crypto/subtle": {}, - "crypto/tls": {}, - "crypto/x509": {}, - "crypto/x509/pkix": {}, - "database/sql": {}, - "database/sql/driver": {}, - "debug/dwarf": {}, - "debug/elf": {}, - "debug/gosym": {}, - "debug/macho": {}, - "debug/pe": {}, - "encoding": {}, - "encoding/ascii85": {}, - "encoding/asn1": {}, - "encoding/base32": {}, - "encoding/base64": {}, - "encoding/binary": {}, - "encoding/csv": {}, - "encoding/gob": {}, - "encoding/hex": {}, - "encoding/json": {}, - "encoding/pem": {}, - "encoding/xml": {}, - "errors": {}, - "expvar": {}, - "flag": {}, - "fmt": {}, - "go/ast": {}, - "go/build": {}, - "go/doc": {}, - "go/format": {}, - "go/parser": {}, - "go/printer": {}, - "go/scanner": {}, - "go/token": {}, - "hash": {}, - "hash/adler32": {}, - "hash/crc32": {}, - "hash/crc64": {}, - "hash/fnv": {}, - "html": {}, - "html/template": {}, - "image": {}, - "image/color": {}, - "image/color/palette": {}, - "image/draw": {}, - "image/gif": {}, - "image/jpeg": {}, - "image/png": {}, - "index/suffixarray": {}, - "io": {}, - "io/ioutil": {}, - "log": {}, - "log/syslog": {}, - "math": {}, - "math/big": {}, - "math/cmplx": {}, - "math/rand": {}, - "mime": {}, - "mime/multipart": {}, - "net": {}, - "net/http": {}, - "net/http/cgi": {}, - "net/http/cookiejar": {}, - "net/http/fcgi": {}, - "net/http/httptest": {}, - "net/http/httputil": {}, - "net/http/pprof": {}, - "net/mail": {}, - "net/rpc": {}, - "net/rpc/jsonrpc": {}, - "net/smtp": {}, - "net/textproto": {}, - "net/url": {}, - "os": {}, - "os/exec": {}, - "os/signal": {}, - "os/user": {}, - "path": {}, - "path/filepath": {}, - "reflect": {}, - "regexp": {}, - "regexp/syntax": {}, - "runtime": {}, - "runtime/cgo": {}, - "runtime/debug": {}, - "runtime/pprof": {}, - "runtime/race": {}, - "sort": {}, - "strconv": {}, - "strings": {}, - "sync": {}, - "sync/atomic": {}, - "syscall": {}, - "testing": {}, - "testing/iotest": {}, - "testing/quick": {}, - "text/scanner": {}, - "text/tabwriter": {}, - "text/template": {}, - "text/template/parse": {}, - "time": {}, - "unicode": {}, - "unicode/utf16": {}, - "unicode/utf8": {}, - "unsafe": {}, - // added in Go v1.3 in compare to v1.2 (docker image) - "cmd/addr2line": {}, - "cmd/nm": {}, - "cmd/objdump": {}, - "cmd/pack": {}, - "debug/plan9obj": {}, - // added in Go v1.4 in compare to v1.3 (docker image) - "cmd/pprof": {}, - // added in Go v1.5 in compare to v1.4 (docker image) - "go/constant": {}, - "go/importer": {}, - "go/types": {}, - "mime/quotedprintable": {}, - "runtime/trace": {}, - // added in Go v1.6 in compare to v1.5 (docker image) - // added in Go v1.7 in compare to v1.6 (docker image) - "context": {}, - "net/http/httptrace": {}, - // added in Go v1.8 in compare to v1.7 (docker image) - "plugin": {}, - // added in Go v1.9 in compare to v1.8 (docker image) - "math/bits": {}, - // added in Go v1.10 in compare to v1.9 (docker image) - // added in Go v1.11 in compare to v1.10 (docker image) - // added in Go v1.12 in compare to v1.11 (docker image) - // added in Go v1.13 in compare to v1.12 (docker image) - "crypto/ed25519": {}, - // added in Go v1.14 in compare to v1.13 (docker image) - "hash/maphash": {}, - // added in Go v1.15 in compare to v1.14 (docker image) - "time/tzdata": {}, - // added in Go v1.16 in compare to v1.15 (docker image) - "embed": {}, - "go/build/constraint": {}, - "io/fs": {}, - "runtime/metrics": {}, - "testing/fstest": {}, - // added in Go v1.17 in compare to v1.16 (docker image) - // added in Go v1.18 in compare to v1.17 (docker image) - "debug/buildinfo": {}, - "net/netip": {}, - // added in Go v1.19 in compare to v1.18 (docker image) - "go/doc/comment": {}, - // added in Go v1.20 in compare to v1.19 (docker image) - "crypto/ecdh": {}, - "runtime/coverage": {}, - // added in Go v1.21 in compare to v1.20 (docker image) - "cmp": {}, - "log/slog": {}, - "maps": {}, - "slices": {}, - "testing/slogtest": {}, - // added in Go v1.22 in compare to v1.21 (docker image) - "go/version": {}, - "math/rand/v2": {}, -} diff --git a/vendor/github.com/butuzov/mirror/.act b/vendor/github.com/butuzov/mirror/.act deleted file mode 100644 index 8182d703a..000000000 --- a/vendor/github.com/butuzov/mirror/.act +++ /dev/null @@ -1,2 +0,0 @@ ---platform ubuntu-latest=butuzov/act-go:latest ---env DRY_RUN=1 diff --git a/vendor/github.com/butuzov/mirror/.editorconfig b/vendor/github.com/butuzov/mirror/.editorconfig deleted file mode 100644 index 4d9c20d8d..000000000 --- a/vendor/github.com/butuzov/mirror/.editorconfig +++ /dev/null @@ -1,28 +0,0 @@ -# top-most EditorConfig file -root = true - - -[*] -end_of_line = lf # Unix-style newlines -charset = utf-8 - -indent_style = space # default identation - spaces -indent_size = 4 # default identation - size - -insert_final_newline = true # new line at the end of file -trim_trailing_whitespace = true # no extra sapces at the end of lines - -[*.{go,gohtml,gotpl}] # Go -indent_style = tab -indent_size = 2 - -[{Makefile,makefile}] # CMake -indent_style = tab - -[*.md] # Markdown -trim_trailing_whitespace = true -max_line_length = 100 -insert_final_newline = true -indent_size = 2 - - diff --git a/vendor/github.com/butuzov/mirror/.gitignore b/vendor/github.com/butuzov/mirror/.gitignore deleted file mode 100644 index 109f33b98..000000000 --- a/vendor/github.com/butuzov/mirror/.gitignore +++ /dev/null @@ -1,11 +0,0 @@ -# artifacts -coverage.cov -bin/* -dist/* -tmp/* -out* -sandbox* -demo* -.task* -.ipynb* -.jupyter* diff --git a/vendor/github.com/butuzov/mirror/.goreleaser.yaml b/vendor/github.com/butuzov/mirror/.goreleaser.yaml deleted file mode 100644 index 11749ed2b..000000000 --- a/vendor/github.com/butuzov/mirror/.goreleaser.yaml +++ /dev/null @@ -1,61 +0,0 @@ ---- -project_name: mirror - -builds: - - binary: mirror - env: - - CGO_ENABLED=0 - goos: - - darwin - - linux - - windows - goarch: - - amd64 - - 386 - - arm64 - - arm - goarm: - - 6 - ignore: - - goos: windows - goarm: 6 - - goos: windows - goarch: arm64 - - goos: linux - goarm: 6 - - goos: darwin - goarch: 386 - main: ./cmd/mirror/ - flags: - - -trimpath - ldflags: -s -w - -checksum: - name_template: 'checksums.txt' - -changelog: - sort: asc - filters: - exclude: - - '(?i)^docs?:' - - '(?i)^docs\([^:]+\):' - - '(?i)^docs\[[^:]+\]:' - - '^tests?:' - - '(?i)^dev:' - - Merge pull request - - Merge branch - -archives: - - name_template: '{{ .ProjectName }}_{{ .Os }}_{{ .Arch }}{{ if .Arm }}v{{ .Arm }}{{ end }}{{ if .Mips }}_{{ .Mips }}{{ end }}' - replacements: - darwin: darwin - linux: linux - windows: windows - 386: i386 - amd64: x86_64 - format_overrides: - - goos: windows - format: zip - files: - - LICENSE - - readme.md diff --git a/vendor/github.com/butuzov/mirror/LICENSE b/vendor/github.com/butuzov/mirror/LICENSE deleted file mode 100644 index a9752e972..000000000 --- a/vendor/github.com/butuzov/mirror/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2021 Oleg Butuzov - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/butuzov/mirror/MIRROR_FUNCS.md b/vendor/github.com/butuzov/mirror/MIRROR_FUNCS.md deleted file mode 100644 index 776816e51..000000000 --- a/vendor/github.com/butuzov/mirror/MIRROR_FUNCS.md +++ /dev/null @@ -1,150 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
func (b *bufio.Writer) WriteString(s string) (int, error) - func (b *bufio.Writer) Write(p []byte) (int, error) - func (b *bufio.Writer) WriteRune(r rune) (int, error) -
func (b *bytes.Buffer) WriteString(s string) (int, error) - func (b *bytes.Buffer) Write(p []byte) (int, error) - func (b *bytes.Buffer) WriteRune(r rune) (int, error) -
func strings.Compare(a, b string) intfunc bytes.Compare(a, b []byte) int
func strings.Contains(s, substr string) boolfunc bytes.Contains(b, subslice []byte) bool
func strings.ContainsAny(s, chars string) boolfunc bytes.ContainsAny(b []byte, chars string) bool
func strings.ContainsRune(s string, r rune) boolfunc bytes.ContainsRune(b []byte, r rune) bool
func strings.Count(s, substr string) intfunc bytes.Count(s, sep []byte) int
func strings.EqualFold(s, t string) boolfunc bytes.EqualFold(s, t []byte) bool
func strings.HasPrefix(s, prefix string) boolfunc bytes.HasPrefix(s, prefix []byte) bool
func strings.HasSuffix(s, suffix string) boolfunc bytes.HasSuffix(s, suffix []byte) bool
func strings.Index(s, substr string) intfunc bytes.Index(s, sep []byte) int
func strings.IndexAny(s, chars string) intfunc bytes.IndexAny(s []byte, chars string) int
func strings.IndexByte(s string, c byte) intfunc bytes.IndexByte(b []byte, c byte) int
func strings.IndexFunc(s string, f func(rune) bool) intfunc bytes.IndexFunc(s []byte, f func(r rune) bool) int
func strings.IndexRune(s string, r rune) intfunc bytes.IndexRune(s []byte, r rune) int
func strings.LastIndex(s, sep string) intfunc bytes.LastIndex(s, sep []byte) int
func strings.LastIndexAny(s, chars string) intfunc bytes.LastIndexAny(s []byte, chars string) int
func strings.LastIndexByte(s string, c byte) intfunc bytes.LastIndexByte(s []byte, c byte) int
func strings.LastIndexFunc(s string, f func(rune) bool) intfunc bytes.LastIndexFunc(s []byte, f func(r rune) bool) int
func bytes.NewBufferString(s string) *bytes.Bufferfunc bytes.NewBuffer(buf []byte *bytes.Buffer
func (h *hash/maphash.Hash) WriteString(s string) (int, error)func (h *hash/maphash.Hash) Write(b []byte) (int, error)
func (rw *net/http/httptest.ResponseRecorder) WriteString(str string) (int, error)func (rw *net/http/httptest.ResponseRecorder) Write(buf []byte) (int, error)
func (f *os.File) WriteString(s string) (n int, err error)func (f *os.File) Write(b []byte) (n int, err error)
func regexp.MatchString(pattern string, s string) (bool, error)func regexp.Match(pattern string, b []byte) (bool, error)
func (re *regexp.Regexp) FindAllStringIndex(s string, n int) [][]intfunc (re *regexp.Regexp) FindAllIndex(b []byte, n int) [][]int
func (re *regexp.Regexp) FindAllStringSubmatch(s string, n int) [][]stringfunc (re *regexp.Regexp) FindAllSubmatch(b []byte, n int) [][][]byte
func (re *regexp.Regexp) FindStringIndex(s string) (loc []int)func (re *regexp.Regexp) FindIndex(b []byte) (loc []int)
func (re *regexp.Regexp) FindStringSubmatchIndex(s string) []intfunc (re *regexp.Regexp) FindSubmatchIndex(b []byte) []int
func (re *regexp.Regexp) MatchString(s string) boolfunc (re *regexp.Regexp) Match(b []byte) bool
func (b *strings.Builder) WriteString(s string) error - func (b *strings.Builder) Write(p []byte) (int, error) - func (b *strings.Builder) WriteRune(r rune) (int, error) -
func utf8.ValidString(s string) boolfunc utf8.Valid(p []byte) bool
func utf8.FullRuneInString(s string) boolfunc utf8.FullRune(p []byte) bool
func utf8.RuneCountInString(s string) (n int)func utf8.RuneCount(p []byte) int
func utf8.DecodeLastRuneInString(s string) (rune, int)func utf8.DecodeLastRune(p []byte) (rune, int)
func utf8.DecodeRuneInString(s string) (une, int)func utf8.DecodeRune(p []byte) (rune, int)
diff --git a/vendor/github.com/butuzov/mirror/Makefile b/vendor/github.com/butuzov/mirror/Makefile deleted file mode 100644 index b4b952b01..000000000 --- a/vendor/github.com/butuzov/mirror/Makefile +++ /dev/null @@ -1,58 +0,0 @@ -# --- Required ---------------------------------------------------------------- -export PATH := $(PWD)/bin:$(PATH) # ./bin to $PATH -export SHELL := bash # Default Shell - -GOPKGS := $(shell go list ./... | grep -vE "(cmd|sandbox|testdata)" | tr -s '\n' ',' | sed 's/.\{1\}$$//' ) - - -build: - @ go build -trimpath -ldflags="-w -s" \ - -o bin/mirror ./cmd/mirror/ - -build-race: - @ go build -race -trimpath -ldflags="-w -s" \ - -o bin/mirror ./cmd/mirror/ - -tests: - go test -v -count=1 -race \ - -failfast \ - -parallel=2 \ - -timeout=1m \ - -covermode=atomic \ - -coverpkg=$(GOPKGS) -coverprofile=coverage.cov ./... - -tests-summary: - go test -v -count=1 -race \ - -failfast \ - -parallel=2 \ - -timeout=1m \ - -covermode=atomic \ - -coverpkg=$(GOPKGS) -coverprofile=coverage.cov --json ./... | tparse -all - -test-generate: - go run ./cmd/internal/generate-tests/ "$(PWD)/testdata" - -lints: - golangci-lint run --no-config ./... -D deadcode --skip-dirs "^(cmd|sandbox|testdata)" - - -cover: - go tool cover -html=coverage.cov - -install: - go install -trimpath -v -ldflags="-w -s" \ - ./cmd/mirror - -funcs: - echo "" > "out/results.txt" - go list std | grep -v "vendor" | grep -v "internal" | \ - xargs -I {} sh -c 'go doc -all {} > out/$(basename {}).txt' - -bin/goreleaser: - @curl -Ls https://github.com/goreleaser/goreleaser/releases/download/v1.17.2/goreleaser_Darwin_all.tar.gz | tar -zOxf - goreleaser > ./bin/goreleaser - chmod 0755 ./bin/goreleaser - -test-release: bin/goreleaser - goreleaser release --help - goreleaser release -f .goreleaser.yaml \ - --skip-validate --skip-publish --clean diff --git a/vendor/github.com/butuzov/mirror/Taskfile.yml b/vendor/github.com/butuzov/mirror/Taskfile.yml deleted file mode 100644 index 26c9ba257..000000000 --- a/vendor/github.com/butuzov/mirror/Taskfile.yml +++ /dev/null @@ -1,28 +0,0 @@ -version: '3' - -tasks: - default: - sources: - - "./**/*.go" - method: timestamp - cmds: - - clear - - make build - - make build-race - - task: lints - # - make test-generate - - task: tests - - cmd: go run ./cmd/mirror/ --with-tests --with-debug ./sandbox - ignore_error: true - - testcase: go test -v -failfast -count=1 -run "TestAll/{{ .Case }}" ./... - - tests: - cmds: - - cmd: make tests - ignore_error: true - - lints: - cmds: - - cmd: make lints - ignore_error: true diff --git a/vendor/github.com/butuzov/mirror/analyzer.go b/vendor/github.com/butuzov/mirror/analyzer.go deleted file mode 100644 index 13ded46c6..000000000 --- a/vendor/github.com/butuzov/mirror/analyzer.go +++ /dev/null @@ -1,144 +0,0 @@ -package mirror - -import ( - "flag" - "go/ast" - "strings" - - "github.com/butuzov/mirror/internal/checker" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/ast/inspector" -) - -func NewAnalyzer() *analysis.Analyzer { - flags := flags() - - return &analysis.Analyzer{ - Name: "mirror", - Doc: "reports wrong mirror patterns of bytes/strings usage", - Run: run, - Requires: []*analysis.Analyzer{ - inspect.Analyzer, - }, - Flags: flags, - } -} - -func run(pass *analysis.Pass) (interface{}, error) { - withTests := pass.Analyzer.Flags.Lookup("with-tests").Value.String() == "true" - // --- Reporting violations via issues --------------------------------------- - for _, violation := range Run(pass, withTests) { - pass.Report(violation.Diagnostic(pass.Fset)) - } - - return nil, nil -} - -func Run(pass *analysis.Pass, withTests bool) []*checker.Violation { - violations := []*checker.Violation{} - // --- Setup ----------------------------------------------------------------- - - check := checker.New( - BytesFunctions, BytesBufferMethods, - RegexpFunctions, RegexpRegexpMethods, - StringFunctions, StringsBuilderMethods, - BufioMethods, HTTPTestMethods, - OsFileMethods, MaphashMethods, - UTF8Functions, - ) - - check.Type = checker.WrapType(pass.TypesInfo) - check.Print = checker.WrapPrint(pass.Fset) - - ins, _ := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - imports := checker.Load(pass.Fset, ins) - - // --- Preorder Checker ------------------------------------------------------ - ins.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, func(n ast.Node) { - callExpr := n.(*ast.CallExpr) - fileName := pass.Fset.Position(callExpr.Pos()).Filename - - if !withTests && strings.HasSuffix(fileName, "_test.go") { - return - } - - // ------------------------------------------------------------------------- - switch expr := callExpr.Fun.(type) { - // NOTE(butuzov): Regular calls (`*ast.SelectorExpr`) like strings.HasPrefix - // or re.Match are handled by this check - case *ast.SelectorExpr: - - x, ok := expr.X.(*ast.Ident) - if !ok { - return - } - - // TODO(butuzov): Add check for the ast.ParenExpr in e.Fun so we can - // target the constructions like this (and other calls) - // ----------------------------------------------------------------------- - // Example: - // (&maphash.Hash{}).Write([]byte("foobar")) - // ----------------------------------------------------------------------- - - // Case 1: Is this is a function call? - pkgName, name := x.Name, expr.Sel.Name - if pkg, ok := imports.Lookup(fileName, pkgName); ok { - if v := check.Match(pkg, name); v != nil { - if args, found := check.Handle(v, callExpr); found { - violations = append(violations, v.With(check.Print(expr.X), callExpr, args)) - } - return - } - } - - // Case 2: Is this is a method call? - tv := pass.TypesInfo.Types[expr.X] - if !tv.IsValue() || tv.Type == nil { - return - } - - pkgStruct, name := cleanAsterisk(tv.Type.String()), expr.Sel.Name - for _, v := range check.Matches(pkgStruct, name) { - if v == nil { - continue - } - - if args, found := check.Handle(v, callExpr); found { - violations = append(violations, v.With(check.Print(expr.X), callExpr, args)) - return - } - } - - case *ast.Ident: - // NOTE(butuzov): Special case of "." imported packages, only functions. - - if pkg, ok := imports.Lookup(fileName, "."); ok { - if v := check.Match(pkg, expr.Name); v != nil { - if args, found := check.Handle(v, callExpr); found { - violations = append(violations, v.With(nil, callExpr, args)) - } - return - } - } - } - }) - - return violations -} - -func flags() flag.FlagSet { - set := flag.NewFlagSet("", flag.PanicOnError) - set.Bool("with-tests", false, "do not skip tests in reports") - set.Bool("with-debug", false, "debug linter run (development only)") - return *set -} - -func cleanAsterisk(s string) string { - if strings.HasPrefix(s, "*") { - return s[1:] - } - - return s -} diff --git a/vendor/github.com/butuzov/mirror/checkers_bufio.go b/vendor/github.com/butuzov/mirror/checkers_bufio.go deleted file mode 100644 index 292ed269a..000000000 --- a/vendor/github.com/butuzov/mirror/checkers_bufio.go +++ /dev/null @@ -1,56 +0,0 @@ -package mirror - -import "github.com/butuzov/mirror/internal/checker" - -var BufioMethods = []checker.Violation{ - { // (*bufio.Writer).Write - Targets: checker.Bytes, - Type: checker.Method, - Package: "bufio", - Struct: "Writer", - Caller: "Write", - Args: []int{0}, - AltCaller: "WriteString", - - Generate: &checker.Generate{ - PreCondition: `b := bufio.Writer{}`, - Pattern: `Write($0)`, - Returns: 2, - }, - }, - { // (*bufio.Writer).WriteString - Type: checker.Method, - Targets: checker.Strings, - Package: "bufio", - Struct: "Writer", - Caller: "WriteString", - Args: []int{0}, - AltCaller: "Write", - - Generate: &checker.Generate{ - PreCondition: `b := bufio.Writer{}`, - Pattern: `WriteString($0)`, - Returns: 2, - }, - }, - { // (*bufio.Writer).WriteString -> (*bufio.Writer).WriteRune - Targets: checker.Strings, - Type: checker.Method, - Package: "bufio", - Struct: "Writer", - Caller: "WriteString", - Args: []int{0}, - ArgsType: checker.Rune, - AltCaller: "WriteRune", - }, - // { // (*bufio.Writer).WriteString -> (*bufio.Writer).WriteByte - // Targets: checker.Strings, - // Type: checker.Method, - // Package: "strings", - // Struct: "Builder", - // Caller: "WriteString", - // Args: []int{0}, - // ArgsType: checker.Byte, - // AltCaller: "WriteByte", // byte - // }, -} diff --git a/vendor/github.com/butuzov/mirror/checkers_bytes.go b/vendor/github.com/butuzov/mirror/checkers_bytes.go deleted file mode 100644 index c490a3784..000000000 --- a/vendor/github.com/butuzov/mirror/checkers_bytes.go +++ /dev/null @@ -1,326 +0,0 @@ -package mirror - -import "github.com/butuzov/mirror/internal/checker" - -var ( - BytesFunctions = []checker.Violation{ - { // bytes.NewBuffer - Targets: checker.Bytes, - Type: checker.Function, - Package: "bytes", - Caller: "NewBuffer", - Args: []int{0}, - AltCaller: "NewBufferString", - - Generate: &checker.Generate{ - Pattern: `NewBuffer($0)`, - Returns: 1, - }, - }, - { // bytes.NewBufferString - Targets: checker.Strings, - Type: checker.Function, - Package: "bytes", - Caller: "NewBufferString", - Args: []int{0}, - AltCaller: "NewBuffer", - - Generate: &checker.Generate{ - Pattern: `NewBufferString($0)`, - Returns: 1, - }, - }, - { // bytes.Compare: - Targets: checker.Bytes, - Type: checker.Function, - Package: "bytes", - Caller: "Compare", - Args: []int{0, 1}, - AltPackage: "strings", - AltCaller: "Compare", - - Generate: &checker.Generate{ - Pattern: `Compare($0, $1)`, - Returns: 1, - }, - }, - { // bytes.Contains: - Targets: checker.Bytes, - Type: checker.Function, - Package: "bytes", - Caller: "Contains", - Args: []int{0, 1}, - AltPackage: "strings", - AltCaller: "Contains", - - Generate: &checker.Generate{ - Pattern: `Contains($0, $1)`, - Returns: 1, - }, - }, - { // bytes.ContainsAny - Targets: checker.Bytes, - Type: checker.Function, - Package: "bytes", - Caller: "ContainsAny", - Args: []int{0}, - AltPackage: "strings", - AltCaller: "ContainsAny", - - Generate: &checker.Generate{ - Pattern: `ContainsAny($0, "f")`, - Returns: 1, - }, - }, - { // bytes.ContainsRune - Targets: checker.Bytes, - Type: checker.Function, - Package: "bytes", - Caller: "ContainsRune", - Args: []int{0}, - AltPackage: "strings", - AltCaller: "ContainsRune", - - Generate: &checker.Generate{ - Pattern: `ContainsRune($0, 'ф')`, - Returns: 1, - }, - }, - { // bytes.Count - Targets: checker.Bytes, - Type: checker.Function, - Package: "bytes", - Caller: "Count", - Args: []int{0, 1}, - AltPackage: "strings", - AltCaller: "Count", - - Generate: &checker.Generate{ - Pattern: `Count($0, $1)`, - Returns: 1, - }, - }, - { // bytes.EqualFold - Targets: checker.Bytes, - Type: checker.Function, - Package: "bytes", - Caller: "EqualFold", - Args: []int{0, 1}, - AltPackage: "strings", - AltCaller: "EqualFold", - - Generate: &checker.Generate{ - Pattern: `EqualFold($0, $1)`, - Returns: 1, - }, - }, - - { // bytes.HasPrefix - Targets: checker.Bytes, - Type: checker.Function, - Package: "bytes", - Caller: "HasPrefix", - Args: []int{0, 1}, - AltPackage: "strings", - AltCaller: "HasPrefix", - - Generate: &checker.Generate{ - Pattern: `HasPrefix($0, $1)`, - Returns: 1, - }, - }, - { // bytes.HasSuffix - Targets: checker.Bytes, - Type: checker.Function, - Package: "bytes", - Caller: "HasSuffix", - Args: []int{0, 1}, - AltPackage: "strings", - AltCaller: "HasSuffix", - - Generate: &checker.Generate{ - Pattern: `HasSuffix($0, $1)`, - Returns: 1, - }, - }, - { // bytes.Index - Targets: checker.Bytes, - Type: checker.Function, - Package: "bytes", - Caller: "Index", - Args: []int{0, 1}, - AltPackage: "strings", - AltCaller: "Index", - - Generate: &checker.Generate{ - Pattern: `Index($0, $1)`, - Returns: 1, - }, - }, - { // bytes.IndexAny - Targets: checker.Bytes, - Type: checker.Function, - Package: "bytes", - Caller: "IndexAny", - Args: []int{0}, - AltPackage: "strings", - AltCaller: "IndexAny", - - Generate: &checker.Generate{ - Pattern: `IndexAny($0, "f")`, - Returns: 1, - }, - }, - { // bytes.IndexByte - Targets: checker.Bytes, - Type: checker.Function, - Package: "bytes", - Caller: "IndexByte", - Args: []int{0}, - AltPackage: "strings", - AltCaller: "IndexByte", - - Generate: &checker.Generate{ - Pattern: `IndexByte($0, 'f')`, - Returns: 1, - }, - }, - { // bytes.IndexFunc - Targets: checker.Bytes, - Type: checker.Function, - Package: "bytes", - Caller: "IndexFunc", - Args: []int{0}, - AltPackage: "strings", - AltCaller: "IndexFunc", - - Generate: &checker.Generate{ - Pattern: `IndexFunc($0, func(rune) bool {return true })`, - Returns: 1, - }, - }, - { // bytes.IndexRune - Targets: checker.Bytes, - Type: checker.Function, - Package: "bytes", - Caller: "IndexRune", - Args: []int{0}, - AltPackage: "strings", - AltCaller: "IndexRune", - - Generate: &checker.Generate{ - Pattern: `IndexRune($0, rune('ф'))`, - Returns: 1, - }, - }, - { // bytes.LastIndex - Targets: checker.Bytes, - Type: checker.Function, - Package: "bytes", - Caller: "LastIndex", - Args: []int{0, 1}, - AltPackage: "strings", - AltCaller: "LastIndex", - - Generate: &checker.Generate{ - Pattern: `LastIndex($0, $1)`, - Returns: 1, - }, - }, - { // bytes.LastIndexAny - Targets: checker.Bytes, - Type: checker.Function, - Package: "bytes", - Caller: "LastIndexAny", - Args: []int{0}, - AltPackage: "strings", - AltCaller: "LastIndexAny", - - Generate: &checker.Generate{ - Pattern: `LastIndexAny($0, "ф")`, - Returns: 1, - }, - }, - { // bytes.LastIndexByte - Targets: checker.Bytes, - Type: checker.Function, - Package: "bytes", - Caller: "LastIndexByte", - Args: []int{0}, - AltPackage: "strings", - AltCaller: "LastIndexByte", - - Generate: &checker.Generate{ - Pattern: `LastIndexByte($0, 'f')`, - Returns: 1, - }, - }, - { // bytes.LastIndexFunc - Targets: checker.Bytes, - Type: checker.Function, - Package: "bytes", - Caller: "LastIndexFunc", - Args: []int{0}, - AltPackage: "strings", - AltCaller: "LastIndexFunc", - - Generate: &checker.Generate{ - Pattern: `LastIndexFunc($0, func(rune) bool {return true })`, - Returns: 1, - }, - }, - } - - BytesBufferMethods = []checker.Violation{ - { // (*bytes.Buffer).Write - Targets: checker.Bytes, - Type: checker.Method, - Package: "bytes", - Struct: "Buffer", - Caller: "Write", - Args: []int{0}, - AltCaller: "WriteString", - - Generate: &checker.Generate{ - PreCondition: `bb := bytes.Buffer{}`, - Pattern: `Write($0)`, - Returns: 2, - }, - }, - { // (*bytes.Buffer).WriteString - Targets: checker.Strings, - Type: checker.Method, - Package: "bytes", - Struct: "Buffer", - Caller: "WriteString", - Args: []int{0}, - AltCaller: "Write", - - Generate: &checker.Generate{ - PreCondition: `bb := bytes.Buffer{}`, - Pattern: `WriteString($0)`, - Returns: 2, - }, - }, - { // (*bytes.Buffer).WriteString -> (*bytes.Buffer).WriteRune - Targets: checker.Strings, - Type: checker.Method, - Package: "bytes", - Struct: "Buffer", - Caller: "WriteString", - Args: []int{0}, - ArgsType: checker.Rune, - AltCaller: "WriteRune", - }, - // { // (*bytes.Buffer).WriteString -> (*bytes.Buffer).WriteByte - // Targets: checker.Strings, - // Type: checker.Method, - // Package: "bytes", - // Struct: "Buffer", - // Caller: "WriteString", - // Args: []int{0}, - // ArgsType: checker.Byte, - // AltCaller: "WriteByte", - // }, - } -) diff --git a/vendor/github.com/butuzov/mirror/checkers_httptest.go b/vendor/github.com/butuzov/mirror/checkers_httptest.go deleted file mode 100644 index ae6750930..000000000 --- a/vendor/github.com/butuzov/mirror/checkers_httptest.go +++ /dev/null @@ -1,36 +0,0 @@ -package mirror - -import "github.com/butuzov/mirror/internal/checker" - -var HTTPTestMethods = []checker.Violation{ - { // (*net/http/httptest.ResponseRecorder).Write - Targets: checker.Bytes, - Type: checker.Method, - Package: "net/http/httptest", - Struct: "ResponseRecorder", - Caller: "Write", - Args: []int{0}, - AltCaller: "WriteString", - - Generate: &checker.Generate{ - PreCondition: `h := httptest.ResponseRecorder{}`, - Pattern: `Write($0)`, - Returns: 2, - }, - }, - { // (*net/http/httptest.ResponseRecorder).WriteString - Targets: checker.Strings, - Type: checker.Method, - Package: "net/http/httptest", - Struct: "ResponseRecorder", - Caller: "WriteString", - Args: []int{0}, - AltCaller: "Write", - - Generate: &checker.Generate{ - PreCondition: `h := httptest.ResponseRecorder{}`, - Pattern: `WriteString($0)`, - Returns: 2, - }, - }, -} diff --git a/vendor/github.com/butuzov/mirror/checkers_maphash.go b/vendor/github.com/butuzov/mirror/checkers_maphash.go deleted file mode 100644 index 4d184d2a9..000000000 --- a/vendor/github.com/butuzov/mirror/checkers_maphash.go +++ /dev/null @@ -1,36 +0,0 @@ -package mirror - -import "github.com/butuzov/mirror/internal/checker" - -var MaphashMethods = []checker.Violation{ - { // (*hash/maphash).Write - Targets: checker.Bytes, - Type: checker.Method, - Package: "hash/maphash", - Struct: "Hash", - Caller: "Write", - Args: []int{0}, - AltCaller: "WriteString", - - Generate: &checker.Generate{ - PreCondition: `h := maphash.Hash{}`, - Pattern: `Write($0)`, - Returns: 2, - }, - }, - { // (*hash/maphash).WriteString - Targets: checker.Strings, - Type: checker.Method, - Package: "hash/maphash", - Struct: "Hash", - Caller: "WriteString", - Args: []int{0}, - AltCaller: "Write", - - Generate: &checker.Generate{ - PreCondition: `h := maphash.Hash{}`, - Pattern: `WriteString($0)`, - Returns: 2, - }, - }, -} diff --git a/vendor/github.com/butuzov/mirror/checkers_os.go b/vendor/github.com/butuzov/mirror/checkers_os.go deleted file mode 100644 index 09f5a18e5..000000000 --- a/vendor/github.com/butuzov/mirror/checkers_os.go +++ /dev/null @@ -1,36 +0,0 @@ -package mirror - -import "github.com/butuzov/mirror/internal/checker" - -var OsFileMethods = []checker.Violation{ - { // (*os.File).Write - Targets: checker.Bytes, - Type: checker.Method, - Package: "os", - Struct: "File", - Caller: "Write", - Args: []int{0}, - AltCaller: "WriteString", - - Generate: &checker.Generate{ - PreCondition: `f := &os.File{}`, - Pattern: `Write($0)`, - Returns: 2, - }, - }, - { // (*os.File).WriteString - Targets: checker.Strings, - Type: checker.Method, - Package: "os", - Struct: "File", - Caller: "WriteString", - Args: []int{0}, - AltCaller: "Write", - - Generate: &checker.Generate{ - PreCondition: `f := &os.File{}`, - Pattern: `WriteString($0)`, - Returns: 2, - }, - }, -} diff --git a/vendor/github.com/butuzov/mirror/checkers_regexp.go b/vendor/github.com/butuzov/mirror/checkers_regexp.go deleted file mode 100644 index 17175e028..000000000 --- a/vendor/github.com/butuzov/mirror/checkers_regexp.go +++ /dev/null @@ -1,187 +0,0 @@ -package mirror - -import "github.com/butuzov/mirror/internal/checker" - -var ( - RegexpFunctions = []checker.Violation{ - { // regexp.Match - Targets: checker.Bytes, - Type: checker.Function, - Package: "regexp", - Caller: "Match", - Args: []int{1}, - AltCaller: "MatchString", - - Generate: &checker.Generate{ - Pattern: `Match("foo", $0)`, - Returns: 2, - }, - }, - { // regexp.MatchString - Targets: checker.Strings, - Type: checker.Function, - Package: "regexp", - Caller: "MatchString", - Args: []int{1}, - AltCaller: "Match", - - Generate: &checker.Generate{ - Pattern: `MatchString("foo", $0)`, - Returns: 2, - }, - }, - } - - RegexpRegexpMethods = []checker.Violation{ - { // (*regexp.Regexp).Match - Targets: checker.Bytes, - Type: checker.Method, - Package: "regexp", - Struct: "Regexp", - Caller: "Match", - Args: []int{0}, - AltCaller: "MatchString", - - Generate: &checker.Generate{ - PreCondition: `re := regexp.MustCompile(".*")`, - Pattern: `Match($0)`, - Returns: 1, - }, - }, - { // (*regexp.Regexp).MatchString - Targets: checker.Strings, - Type: checker.Method, - Package: "regexp", - Struct: "Regexp", - Caller: "MatchString", - Args: []int{0}, - AltCaller: "Match", - - Generate: &checker.Generate{ - PreCondition: `re := regexp.MustCompile(".*")`, - Pattern: `MatchString($0)`, - Returns: 1, - }, - }, - { // (*regexp.Regexp).FindAllIndex - Targets: checker.Bytes, - Type: checker.Method, - Package: "regexp", - Struct: "Regexp", - Caller: "FindAllIndex", - Args: []int{0}, - AltCaller: "FindAllStringIndex", - - Generate: &checker.Generate{ - PreCondition: `re := regexp.MustCompile(".*")`, - Pattern: `FindAllIndex($0, 1)`, - Returns: 1, - }, - }, - { // (*regexp.Regexp).FindAllStringIndex - Targets: checker.Strings, - Type: checker.Method, - Package: "regexp", - Struct: "Regexp", - Caller: "FindAllStringIndex", - Args: []int{0}, - AltCaller: "FindAllIndex", - - Generate: &checker.Generate{ - PreCondition: `re := regexp.MustCompile(".*")`, - Pattern: `FindAllStringIndex($0, 1)`, - Returns: 1, - }, - }, - { // (*regexp.Regexp).FindAllSubmatchIndex - Targets: checker.Bytes, - Type: checker.Method, - Package: "regexp", - Struct: "Regexp", - Caller: "FindAllSubmatchIndex", - Args: []int{0}, - AltCaller: "FindAllStringSubmatchIndex", - - Generate: &checker.Generate{ - PreCondition: `re := regexp.MustCompile(".*")`, - Pattern: `FindAllSubmatchIndex($0, 1)`, - Returns: 1, - }, - }, - { // (*regexp.Regexp).FindAllStringSubmatchIndex - Targets: checker.Strings, - Type: checker.Method, - Package: "regexp", - Struct: "Regexp", - Caller: "FindAllStringSubmatchIndex", - Args: []int{0}, - AltCaller: "FindAllSubmatchIndex", - - Generate: &checker.Generate{ - PreCondition: `re := regexp.MustCompile(".*")`, - Pattern: `FindAllStringSubmatchIndex($0, 1)`, - Returns: 1, - }, - }, - { // (*regexp.Regexp).FindIndex - Targets: checker.Bytes, - Type: checker.Method, - Package: "regexp", - Struct: "Regexp", - Caller: "FindIndex", - Args: []int{0}, - AltCaller: "FindStringIndex", - - Generate: &checker.Generate{ - PreCondition: `re := regexp.MustCompile(".*")`, - Pattern: `FindIndex($0)`, - Returns: 1, - }, - }, - { // (*regexp.Regexp).FindStringIndex - Targets: checker.Strings, - Type: checker.Method, - Package: "regexp", - Struct: "Regexp", - Caller: "FindStringIndex", - Args: []int{0}, - AltCaller: "FindIndex", - - Generate: &checker.Generate{ - PreCondition: `re := regexp.MustCompile(".*")`, - Pattern: `FindStringIndex($0)`, - Returns: 1, - }, - }, - { // (*regexp.Regexp).FindSubmatchIndex - Targets: checker.Bytes, - Type: checker.Method, - Package: "regexp", - Struct: "Regexp", - Caller: "FindSubmatchIndex", - Args: []int{0}, - AltCaller: "FindStringSubmatchIndex", - - Generate: &checker.Generate{ - PreCondition: `re := regexp.MustCompile(".*")`, - Pattern: `FindSubmatchIndex($0)`, - Returns: 1, - }, - }, - { // (*regexp.Regexp).FindStringSubmatchIndex - Targets: checker.Strings, - Type: checker.Method, - Package: "regexp", - Struct: "Regexp", - Caller: "FindStringSubmatchIndex", - Args: []int{0}, - AltCaller: "FindSubmatchIndex", - - Generate: &checker.Generate{ - PreCondition: `re := regexp.MustCompile(".*")`, - Pattern: `FindStringSubmatchIndex($0)`, - Returns: 1, - }, - }, - } -) diff --git a/vendor/github.com/butuzov/mirror/checkers_strings.go b/vendor/github.com/butuzov/mirror/checkers_strings.go deleted file mode 100644 index ead7e9cc7..000000000 --- a/vendor/github.com/butuzov/mirror/checkers_strings.go +++ /dev/null @@ -1,299 +0,0 @@ -package mirror - -import "github.com/butuzov/mirror/internal/checker" - -var ( - StringFunctions = []checker.Violation{ - { // strings.Compare - Targets: checker.Strings, - Type: checker.Function, - Package: "strings", - Caller: "Compare", - Args: []int{0, 1}, - AltPackage: "bytes", - AltCaller: "Compare", - - Generate: &checker.Generate{ - Pattern: `Compare($0,$1)`, - Returns: 1, - }, - }, - { // strings.Contains - Targets: checker.Strings, - Type: checker.Function, - Package: "strings", - Caller: "Contains", - Args: []int{0, 1}, - AltPackage: "bytes", - AltCaller: "Contains", - - Generate: &checker.Generate{ - Pattern: `Contains($0,$1)`, - Returns: 1, - }, - }, - { // strings.ContainsAny - Targets: checker.Strings, - Type: checker.Function, - Package: "strings", - Caller: "ContainsAny", - Args: []int{0}, - AltPackage: "bytes", - AltCaller: "ContainsAny", - - Generate: &checker.Generate{ - Pattern: `ContainsAny($0,"foobar")`, - Returns: 1, - }, - }, - { // strings.ContainsRune - Targets: checker.Strings, - Type: checker.Function, - Package: "strings", - Caller: "ContainsRune", - Args: []int{0}, - AltPackage: "bytes", - AltCaller: "ContainsRune", - - Generate: &checker.Generate{ - Pattern: `ContainsRune($0,'ф')`, - Returns: 1, - }, - }, - { // strings.Count - Targets: checker.Strings, - Type: checker.Function, - Package: "strings", - Caller: "Count", - Args: []int{0, 1}, - AltPackage: "bytes", - AltCaller: "Count", - - Generate: &checker.Generate{ - Pattern: `Count($0, $1)`, - Returns: 1, - }, - }, - { // strings.EqualFold - Targets: checker.Strings, - Type: checker.Function, - Package: "strings", - Caller: "EqualFold", - Args: []int{0, 1}, - AltPackage: "bytes", - AltCaller: "EqualFold", - - Generate: &checker.Generate{ - Pattern: `EqualFold($0,$1)`, - Returns: 1, - }, - }, - { // strings.HasPrefix - Targets: checker.Strings, - Type: checker.Function, - Package: "strings", - Caller: "HasPrefix", - Args: []int{0, 1}, - AltPackage: "bytes", - AltCaller: "HasPrefix", - - Generate: &checker.Generate{ - Pattern: `HasPrefix($0,$1)`, - Returns: 1, - }, - }, - { // strings.HasSuffix - Targets: checker.Strings, - Type: checker.Function, - Package: "strings", - Caller: "HasSuffix", - Args: []int{0, 1}, - AltPackage: "bytes", - AltCaller: "HasSuffix", - - Generate: &checker.Generate{ - Pattern: `HasSuffix($0,$1)`, - Returns: 1, - }, - }, - { // strings.Index - Targets: checker.Strings, - Type: checker.Function, - Package: "strings", - Caller: "Index", - Args: []int{0, 1}, - AltPackage: "bytes", - AltCaller: "Index", - - Generate: &checker.Generate{ - Pattern: `Index($0,$1)`, - Returns: 1, - }, - }, - { // strings.IndexAny - Targets: checker.Strings, - Type: checker.Function, - Package: "strings", - Caller: "IndexAny", - Args: []int{0}, - AltPackage: "bytes", - AltCaller: "IndexAny", - - Generate: &checker.Generate{ - Pattern: `IndexAny($0, "f")`, - Returns: 1, - }, - }, - { // strings.IndexByte - Targets: checker.Strings, - Type: checker.Function, - Package: "strings", - Caller: "IndexByte", - Args: []int{0}, - AltPackage: "bytes", - AltCaller: "IndexByte", - - Generate: &checker.Generate{ - Pattern: `IndexByte($0, byte('f'))`, - Returns: 1, - }, - }, - { // strings.IndexFunc - Targets: checker.Strings, - Type: checker.Function, - Package: "strings", - Caller: "IndexFunc", - Args: []int{0}, - AltPackage: "bytes", - AltCaller: "IndexFunc", - - Generate: &checker.Generate{ - Pattern: `IndexFunc($0,func(r rune) bool { return true })`, - Returns: 1, - }, - }, - { // strings.IndexRune - Targets: checker.Strings, - Type: checker.Function, - Package: "strings", - Caller: "IndexRune", - Args: []int{0}, - AltPackage: "bytes", - AltCaller: "IndexRune", - - Generate: &checker.Generate{ - Pattern: `IndexRune($0, rune('ф'))`, - Returns: 1, - }, - }, - { // strings.LastIndex - Targets: checker.Strings, - Type: checker.Function, - Package: "strings", - Caller: "LastIndex", - Args: []int{0, 1}, - AltPackage: "bytes", - AltCaller: "LastIndex", - - Generate: &checker.Generate{ - Pattern: `LastIndex($0,$1)`, - Returns: 1, - }, - }, - { // strings.LastIndexAny - Targets: checker.Strings, - Type: checker.Function, - Package: "strings", - Caller: "LastIndexAny", - Args: []int{0}, - AltPackage: "bytes", - AltCaller: "LastIndexAny", - - Generate: &checker.Generate{ - Pattern: `LastIndexAny($0,"f")`, - Returns: 1, - }, - }, - { // strings.LastIndexByte - Targets: checker.Strings, - Type: checker.Function, - Package: "strings", - Caller: "LastIndexByte", - Args: []int{0}, - AltPackage: "bytes", - AltCaller: "LastIndexByte", - - Generate: &checker.Generate{ - Pattern: `LastIndexByte($0, byte('f'))`, - Returns: 1, - }, - }, - { // strings.LastIndexFunc - Targets: checker.Strings, - Type: checker.Function, - Package: "strings", - Caller: "LastIndexFunc", - Args: []int{0}, - AltPackage: "bytes", - AltCaller: "LastIndexFunc", - - Generate: &checker.Generate{ - Pattern: `LastIndexFunc($0, func(r rune) bool { return true })`, - Returns: 1, - }, - }, - } - - StringsBuilderMethods = []checker.Violation{ - { // (*strings.Builder).Write - Targets: checker.Bytes, - Type: checker.Method, - Package: "strings", - Struct: "Builder", - Caller: "Write", - Args: []int{0}, - AltCaller: "WriteString", - - Generate: &checker.Generate{ - PreCondition: `builder := strings.Builder{}`, - Pattern: `Write($0)`, - Returns: 2, - }, - }, - { // (*strings.Builder).WriteString - Targets: checker.Strings, - Type: checker.Method, - Package: "strings", - Struct: "Builder", - Caller: "WriteString", - Args: []int{0}, - AltCaller: "Write", - - Generate: &checker.Generate{ - PreCondition: `builder := strings.Builder{}`, - Pattern: `WriteString($0)`, - Returns: 2, - }, - }, - { // (*strings.Builder).WriteString -> (*strings.Builder).WriteRune - Targets: checker.Strings, - Type: checker.Method, - Package: "strings", - Struct: "Builder", - Caller: "WriteString", - Args: []int{0}, - ArgsType: checker.Rune, - AltCaller: "WriteRune", - }, - // { // (*strings.Builder).WriteString -> (*strings.Builder).WriteByte - // Targets: checker.Strings, - // Type: checker.Method, - // Package: "strings", - // Struct: "Builder", - // Caller: "WriteString", - // Args: []int{0}, - // ArgsType: checker.Byte, - // AltCaller: "WriteByte", // byte - // }, - } -) diff --git a/vendor/github.com/butuzov/mirror/checkers_utf8.go b/vendor/github.com/butuzov/mirror/checkers_utf8.go deleted file mode 100644 index e7c4d5ba4..000000000 --- a/vendor/github.com/butuzov/mirror/checkers_utf8.go +++ /dev/null @@ -1,138 +0,0 @@ -package mirror - -import "github.com/butuzov/mirror/internal/checker" - -var UTF8Functions = []checker.Violation{ - { // utf8.Valid - Type: checker.Function, - Targets: checker.Bytes, - Package: "unicode/utf8", - Caller: "Valid", - Args: []int{0}, - AltCaller: "ValidString", - - Generate: &checker.Generate{ - Pattern: `Valid($0)`, - Returns: 1, - }, - }, - { // utf8.ValidString - Targets: checker.Strings, - Type: checker.Function, - Package: "unicode/utf8", - Caller: "ValidString", - Args: []int{0}, - AltCaller: "Valid", - - Generate: &checker.Generate{ - Pattern: `ValidString($0)`, - Returns: 1, - }, - }, - { // utf8.FullRune - Targets: checker.Bytes, - Type: checker.Function, - Package: "unicode/utf8", - Caller: "FullRune", - Args: []int{0}, - AltCaller: "FullRuneInString", - - Generate: &checker.Generate{ - Pattern: `FullRune($0)`, - Returns: 1, - }, - }, - { // utf8.FullRuneInString - Targets: checker.Strings, - Type: checker.Function, - Package: "unicode/utf8", - Caller: "FullRuneInString", - Args: []int{0}, - AltCaller: "FullRune", - - Generate: &checker.Generate{ - Pattern: `FullRuneInString($0)`, - Returns: 1, - }, - }, - - { // bytes.RuneCount - Targets: checker.Bytes, - Type: checker.Function, - Package: "unicode/utf8", - Caller: "RuneCount", - Args: []int{0}, - AltCaller: "RuneCountInString", - - Generate: &checker.Generate{ - Pattern: `RuneCount($0)`, - Returns: 1, - }, - }, - { // bytes.RuneCountInString - Targets: checker.Strings, - Type: checker.Function, - Package: "unicode/utf8", - Caller: "RuneCountInString", - Args: []int{0}, - AltCaller: "RuneCount", - - Generate: &checker.Generate{ - Pattern: `RuneCountInString($0)`, - Returns: 1, - }, - }, - - { // bytes.DecodeLastRune - Targets: checker.Bytes, - Type: checker.Function, - Package: "unicode/utf8", - Caller: "DecodeLastRune", - Args: []int{0}, - AltCaller: "DecodeLastRuneInString", - - Generate: &checker.Generate{ - Pattern: `DecodeLastRune($0)`, - Returns: 2, - }, - }, - { // utf8.DecodeLastRuneInString - Targets: checker.Strings, - Type: checker.Function, - Package: "unicode/utf8", - Caller: "DecodeLastRuneInString", - Args: []int{0}, - AltCaller: "DecodeLastRune", - - Generate: &checker.Generate{ - Pattern: `DecodeLastRuneInString($0)`, - Returns: 2, - }, - }, - { // utf8.DecodeRune - Targets: checker.Bytes, - Type: checker.Function, - Package: "unicode/utf8", - Caller: "DecodeRune", - Args: []int{0}, - AltCaller: "DecodeRuneInString", - - Generate: &checker.Generate{ - Pattern: `DecodeRune($0)`, - Returns: 2, - }, - }, - { // utf8.DecodeRuneInString - Targets: checker.Strings, - Type: checker.Function, - Package: "unicode/utf8", - Args: []int{0}, - Caller: "DecodeRuneInString", - AltCaller: "DecodeRune", - - Generate: &checker.Generate{ - Pattern: `DecodeRuneInString($0)`, - Returns: 2, - }, - }, -} diff --git a/vendor/github.com/butuzov/mirror/internal/checker/checker.go b/vendor/github.com/butuzov/mirror/internal/checker/checker.go deleted file mode 100644 index c1a941631..000000000 --- a/vendor/github.com/butuzov/mirror/internal/checker/checker.go +++ /dev/null @@ -1,147 +0,0 @@ -package checker - -import ( - "bytes" - "go/ast" - "go/printer" - "go/token" - "go/types" - "strings" -) - -// Checker will perform standart check on package and its methods. -type Checker struct { - Violations []Violation // List of available violations - Packages map[string][]int // Storing indexes of Violations per pkg/kg.Struct - Type func(ast.Expr) string // Type Checker closure. - Print func(ast.Node) []byte // String representation of the expresion. -} - -func New(violations ...[]Violation) Checker { - c := Checker{ - Packages: make(map[string][]int), - } - - for i := range violations { - c.register(violations[i]) - } - - return c -} - -// Match will check the available violations we got from checks against -// the `name` caller from package `pkgName`. -func (c *Checker) Match(pkgName, name string) *Violation { - for _, v := range c.Matches(pkgName, name) { - return v - } - - return nil -} - -// Matches do same thing as Match but return a slice of violations -// as only things that require this are bytes.Buffer and strings.Builder -// it only be used in matching methods in analyzer. -func (c *Checker) Matches(pkgName, name string) []*Violation { - var matches []*Violation - checkStruct := strings.Contains(pkgName, ".") - - for _, idx := range c.Packages[pkgName] { - if c.Violations[idx].Caller == name { - if checkStruct == (len(c.Violations[idx].Struct) == 0) { - continue - } - - // copy violation - v := c.Violations[idx] - matches = append(matches, &v) - } - } - - return matches -} - -func (c *Checker) Handle(v *Violation, ce *ast.CallExpr) (map[int]ast.Expr, bool) { - m := map[int]ast.Expr{} - - // We going to check each of elements we mark for checking, in order to find, - // a call that violates our rules. - for _, i := range v.Args { - if i >= len(ce.Args) { - continue - } - - call, ok := ce.Args[i].(*ast.CallExpr) - if !ok { - continue - } - - // is it convertsion call - if !c.callConverts(call) { - continue - } - - // somehow no argument of call - if len(call.Args) == 0 { - continue - } - - // wrong argument type - if normalType(c.Type(call.Args[0])) != v.getArgType() { - continue - } - - m[i] = call.Args[0] - } - - return m, len(m) == len(v.Args) -} - -func (c *Checker) callConverts(ce *ast.CallExpr) bool { - switch ce.Fun.(type) { - case *ast.ArrayType, *ast.Ident: - res := c.Type(ce.Fun) - return res == "[]byte" || res == "string" - } - - return false -} - -// register violations. -func (c *Checker) register(violations []Violation) { - for _, v := range violations { // nolint: gocritic - c.Violations = append(c.Violations, v) - if len(v.Struct) > 0 { - c.registerIdxPer(v.Package + "." + v.Struct) - } - c.registerIdxPer(v.Package) - } -} - -// registerIdxPer will register last added violation element -// under pkg string. -func (c *Checker) registerIdxPer(pkg string) { - c.Packages[pkg] = append(c.Packages[pkg], len(c.Violations)-1) -} - -func WrapType(info *types.Info) func(node ast.Expr) string { - return func(node ast.Expr) string { - if t := info.TypeOf(node); t != nil { - return t.String() - } - - if tv, ok := info.Types[node]; ok { - return tv.Type.Underlying().String() - } - - return "" - } -} - -func WrapPrint(fSet *token.FileSet) func(ast.Node) []byte { - return func(node ast.Node) []byte { - var buf bytes.Buffer - printer.Fprint(&buf, fSet, node) - return buf.Bytes() - } -} diff --git a/vendor/github.com/butuzov/mirror/internal/checker/imports.go b/vendor/github.com/butuzov/mirror/internal/checker/imports.go deleted file mode 100644 index 4015de597..000000000 --- a/vendor/github.com/butuzov/mirror/internal/checker/imports.go +++ /dev/null @@ -1,89 +0,0 @@ -package checker - -import ( - "go/ast" - "go/token" - "path" - "sort" - "strings" - "sync" - - "golang.org/x/tools/go/ast/inspector" -) - -// Imports represents an imported package in a nice for lookup way... -// -// examples: -// import . "bytes" -> checker.Import{Pkg:"bytes", Val:"."} -// import name "bytes" -> checker.Import{Pkg:"bytes", Val:"name"} -type Import struct { - Pkg string // package name - Name string // alias -} - -type Imports map[string][]Import - -// we are going to have Imports entries to be sorted, but if it has less then -// `sortLowerLimit` elements we are skipping this step as its not going to -// be worth of effort. -const sortLowerLimit int = 13 - -// Package level lock is to prevent import map corruption -var lock sync.RWMutex - -func Load(fs *token.FileSet, ins *inspector.Inspector) Imports { - lock.Lock() - defer lock.Unlock() - - imports := make(Imports) - - // Populate imports map - ins.Preorder([]ast.Node{(*ast.ImportSpec)(nil)}, func(node ast.Node) { - importSpec, _ := node.(*ast.ImportSpec) - - var ( - key = fs.Position(node.Pos()).Filename - pkg = strings.Trim(importSpec.Path.Value, `"`) - name = importSpec.Name.String() - ) - - if importSpec.Name == nil { - name = path.Base(pkg) // note: we need only basename of the package - } - - imports[key] = append(imports[key], Import{ - Pkg: pkg, - Name: name, - }) - }) - - imports.sort() - - return imports -} - -// sort will sort imports for each of the checking files. -func (i *Imports) sort() { - for k := range *i { - if len((*i)[k]) < sortLowerLimit { - continue - } - - k := k - sort.Slice((*i)[k], func(left, right int) bool { - return (*i)[k][left].Name < (*i)[k][right].Name - }) - } -} - -func (i Imports) Lookup(file, pkg string) (string, bool) { - if _, ok := i[file]; ok { - for idx := range i[file] { - if i[file][idx].Name == pkg { - return i[file][idx].Pkg, true - } - } - } - - return "", false -} diff --git a/vendor/github.com/butuzov/mirror/internal/checker/violation.go b/vendor/github.com/butuzov/mirror/internal/checker/violation.go deleted file mode 100644 index 375d3c8e6..000000000 --- a/vendor/github.com/butuzov/mirror/internal/checker/violation.go +++ /dev/null @@ -1,208 +0,0 @@ -package checker - -import ( - "bytes" - "fmt" - "go/ast" - "go/printer" - "go/token" - "path" - "strings" - - "golang.org/x/tools/go/analysis" -) - -// Type of violation: can be method or function -type ViolationType int - -const ( - Function ViolationType = iota + 1 - Method -) - -const ( - Strings string = "string" - Bytes string = "[]byte" - Byte string = "byte" - Rune string = "rune" - UntypedRune string = "untyped rune" -) - -// Violation describs what message we going to give to a particular code violation -type Violation struct { - Type ViolationType // - Args []int // Indexes of the arguments needs to be checked - ArgsType string - - Targets string - Package string - AltPackage string - Struct string - Caller string - AltCaller string - - // --- tests generation information - Generate *Generate - - // --- suggestions related info about violation of rules. - base []byte // receiver of the method or pkg name - callExpr *ast.CallExpr // actual call expression, to extract arguments - arguments map[int]ast.Expr // fixed arguments -} - -// Tests (generation) related struct. -type Generate struct { - PreCondition string // Precondition we want to be generated - Pattern string // Generate pattern (for the `want` message) - Returns int // Expected to return n elements -} - -func (v *Violation) With(base []byte, e *ast.CallExpr, args map[int]ast.Expr) *Violation { - v.base = base - v.callExpr = e - v.arguments = args - - return v -} - -func (v *Violation) getArgType() string { - if v.ArgsType != "" { - return v.ArgsType - } - - if v.Targets == Strings { - return Bytes - } - - return Strings -} - -func (v *Violation) Message() string { - if v.Type == Method { - return fmt.Sprintf("avoid allocations with (*%s.%s).%s", - path.Base(v.Package), v.Struct, v.AltCaller) - } - - pkg := v.Package - if len(v.AltPackage) > 0 { - pkg = v.AltPackage - } - - return fmt.Sprintf("avoid allocations with %s.%s", path.Base(pkg), v.AltCaller) -} - -func (v *Violation) suggest(fSet *token.FileSet) []byte { - var buf bytes.Buffer - - if len(v.base) > 0 { - buf.Write(v.base) - buf.WriteString(".") - } - - buf.WriteString(v.AltCaller) - buf.WriteByte('(') - for idx := range v.callExpr.Args { - if arg, ok := v.arguments[idx]; ok { - printer.Fprint(&buf, fSet, arg) - } else { - printer.Fprint(&buf, fSet, v.callExpr.Args[idx]) - } - - if idx != len(v.callExpr.Args)-1 { - buf.WriteString(", ") - } - } - buf.WriteByte(')') - - return buf.Bytes() -} - -func (v *Violation) Diagnostic(fSet *token.FileSet) analysis.Diagnostic { - diagnostic := analysis.Diagnostic{ - Pos: v.callExpr.Pos(), - End: v.callExpr.Pos(), - Message: v.Message(), - } - - var buf bytes.Buffer - printer.Fprint(&buf, fSet, v.callExpr) - noNl := bytes.IndexByte(buf.Bytes(), '\n') < 0 - - // Struct based fix. - if v.Type == Method && noNl { - diagnostic.SuggestedFixes = []analysis.SuggestedFix{{ - Message: "Fix Issue With", - TextEdits: []analysis.TextEdit{{ - Pos: v.callExpr.Pos(), End: v.callExpr.End(), NewText: v.suggest(fSet), - }}, - }} - } - - if v.AltPackage == "" { - v.AltPackage = v.Package - } - - // Hooray! we dont need to change package and redo imports. - if v.Type == Function && v.AltPackage == v.Package && noNl { - diagnostic.SuggestedFixes = []analysis.SuggestedFix{{ - Message: "Fix Issue With", - TextEdits: []analysis.TextEdit{{ - Pos: v.callExpr.Pos(), End: v.callExpr.End(), NewText: v.suggest(fSet), - }}, - }} - } - - // do not change - - return diagnostic -} - -type GolangIssue struct { - Start token.Position - End token.Position - Message string - InlineFix string - Original string -} - -// Issue inteanded to be used only with golangci-lint, bu you can use use it -// alongside Diagnostic if you wish. -func (v *Violation) Issue(fSet *token.FileSet) GolangIssue { - issue := GolangIssue{ - Start: fSet.Position(v.callExpr.Pos()), - End: fSet.Position(v.callExpr.End()), - Message: v.Message(), - } - - // original expression (useful for debug & requied for replace) - var buf bytes.Buffer - printer.Fprint(&buf, fSet, v.callExpr) - issue.Original = buf.String() - - noNl := strings.IndexByte(issue.Original, '\n') < 0 - - if v.Type == Method && noNl { - fix := v.suggest(fSet) - issue.InlineFix = string(fix) - } - - if v.AltPackage == "" { - v.AltPackage = v.Package - } - - // Hooray! we don't need to change package and redo imports. - if v.Type == Function && v.AltPackage == v.Package && noNl { - fix := v.suggest(fSet) - issue.InlineFix = string(fix) - } - - return issue -} - -// ofType normalize input types (mostly typed and untyped runes). -func normalType(s string) string { - if s == UntypedRune { - return Rune - } - return s -} diff --git a/vendor/github.com/butuzov/mirror/readme.md b/vendor/github.com/butuzov/mirror/readme.md deleted file mode 100644 index fcfd1de11..000000000 --- a/vendor/github.com/butuzov/mirror/readme.md +++ /dev/null @@ -1,60 +0,0 @@ -# `mirror` [![Code Coverage](https://coveralls.io/repos/github/butuzov/mirror/badge.svg?branch=main)](https://coveralls.io/github/butuzov/mirror?branch=main) [![build status](https://github.com/butuzov/mirror/actions/workflows/main.yaml/badge.svg?branch=main)]() - -`mirror` suggests use of alternative functions/methods in order to gain performance boosts by avoiding unnecessary `[]byte/string` conversion calls. See [MIRROR_FUNCS.md](MIRROR_FUNCS.md) list of mirror functions you can use in go's stdlib. - -## 🇺🇦 PLEASE HELP ME 🇺🇦 -Fundrise for scout drone **DJI Matrice 30T** for my squad (Ukrainian Forces). See more details at [butuzov/README.md](https://github.com/butuzov/butuzov/) - -## Linter Use Cases - -### `github.com/argoproj/argo-cd` - -```go -// Before -func IsValidHostname(hostname string, fqdn bool) bool { - if !fqdn { - return validHostNameRegexp.Match([]byte(hostname)) || validIPv6Regexp.Match([]byte(hostname)) - } else { - return validFQDNRegexp.Match([]byte(hostname)) - } -} - -// After: With alternative method (and lost `else` case) -func IsValidHostname(hostname string, fqdn bool) bool { - if !fqdn { - return validHostNameRegexp.MatchString(hostname) || validIPv6Regexp.MatchString(hostname) - } - - return validFQDNRegexp.MatchString(hostname) -} -``` - -## Install - -``` -go install github.com/butuzov/mirror/cmd/mirror@latest -``` - -## How to use - -You run `mirror` with [`go vet`](https://pkg.go.dev/cmd/vet): - -``` -go vet -vettool=$(which mirror) ./... -# github.com/jcmoraisjr/haproxy-ingress/pkg/common/net/ssl -pkg/common/net/ssl/ssl.go:64:11: avoid allocations with (*os.File).WriteString -pkg/common/net/ssl/ssl.go:161:12: avoid allocations with (*os.File).WriteString -pkg/common/net/ssl/ssl.go:166:3: avoid allocations with (*os.File).WriteString -``` - -Can be called directly: -``` -mirror ./... -# https://github.com/cosmtrek/air -/air/runner/util.go:149:6: avoid allocations with (*regexp.Regexp).MatchString -/air/runner/util.go:173:14: avoid allocations with (*os.File).WriteString -``` - -## Command line - -- You can add checks for `_test.go` files with cli option `--with-tests` diff --git a/vendor/github.com/catenacyber/perfsprint/LICENSE b/vendor/github.com/catenacyber/perfsprint/LICENSE deleted file mode 100644 index 14c2b9e73..000000000 --- a/vendor/github.com/catenacyber/perfsprint/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2023 Catena cyber - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/catenacyber/perfsprint/analyzer/analyzer.go b/vendor/github.com/catenacyber/perfsprint/analyzer/analyzer.go deleted file mode 100644 index 543b4bdbc..000000000 --- a/vendor/github.com/catenacyber/perfsprint/analyzer/analyzer.go +++ /dev/null @@ -1,603 +0,0 @@ -package analyzer - -import ( - "bytes" - "go/ast" - "go/format" - "go/token" - "go/types" - "sort" - "strconv" - "strings" - - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/ast/inspector" - - "golang.org/x/tools/go/analysis" -) - -type perfSprint struct { - intConv bool - errError bool - errorf bool - sprintf1 bool - fiximports bool - strconcat bool -} - -func newPerfSprint() *perfSprint { - return &perfSprint{ - intConv: true, - errError: false, - errorf: true, - sprintf1: true, - fiximports: true, - strconcat: true, - } -} - -func New() *analysis.Analyzer { - n := newPerfSprint() - r := &analysis.Analyzer{ - Name: "perfsprint", - Doc: "Checks that fmt.Sprintf can be replaced with a faster alternative.", - Run: n.run, - Requires: []*analysis.Analyzer{inspect.Analyzer}, - } - r.Flags.BoolVar(&n.intConv, "int-conversion", true, "optimizes even if it requires an int or uint type cast") - r.Flags.BoolVar(&n.errError, "err-error", false, "optimizes into err.Error() even if it is only equivalent for non-nil errors") - r.Flags.BoolVar(&n.errorf, "errorf", true, "optimizes fmt.Errorf") - r.Flags.BoolVar(&n.sprintf1, "sprintf1", true, "optimizes fmt.Sprintf with only one argument") - r.Flags.BoolVar(&n.fiximports, "fiximports", true, "fix needed imports from other fixes") - r.Flags.BoolVar(&n.strconcat, "strconcat", true, "optimizes into strings concatenation") - return r -} - -// true if verb is a format string that could be replaced with concatenation. -func isConcatable(verb string) bool { - hasPrefix := - (strings.HasPrefix(verb, "%s") && !strings.Contains(verb, "%[1]s")) || - (strings.HasPrefix(verb, "%[1]s") && !strings.Contains(verb, "%s")) - hasSuffix := - (strings.HasSuffix(verb, "%s") && !strings.Contains(verb, "%[1]s")) || - (strings.HasSuffix(verb, "%[1]s") && !strings.Contains(verb, "%s")) - - if strings.Count(verb, "%[1]s") > 1 { - return false - } - return (hasPrefix || hasSuffix) && !(hasPrefix && hasSuffix) -} - -func (n *perfSprint) run(pass *analysis.Pass) (interface{}, error) { - var fmtSprintObj, fmtSprintfObj, fmtErrorfObj types.Object - for _, pkg := range pass.Pkg.Imports() { - if pkg.Path() == "fmt" { - fmtSprintObj = pkg.Scope().Lookup("Sprint") - fmtSprintfObj = pkg.Scope().Lookup("Sprintf") - fmtErrorfObj = pkg.Scope().Lookup("Errorf") - } - } - if fmtSprintfObj == nil && fmtSprintObj == nil && fmtErrorfObj == nil { - return nil, nil - } - removedFmtUsages := make(map[string]int) - neededPackages := make(map[string]map[string]bool) - - insp := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - nodeFilter := []ast.Node{ - (*ast.CallExpr)(nil), - } - insp.Preorder(nodeFilter, func(node ast.Node) { - call := node.(*ast.CallExpr) - called, ok := call.Fun.(*ast.SelectorExpr) - if !ok { - return - } - calledObj := pass.TypesInfo.ObjectOf(called.Sel) - - var ( - fn string - verb string - value ast.Expr - err error - ) - switch { - case calledObj == fmtErrorfObj && len(call.Args) == 1: - if n.errorf { - fn = "fmt.Errorf" - verb = "%s" - value = call.Args[0] - } else { - return - } - - case calledObj == fmtSprintObj && len(call.Args) == 1: - fn = "fmt.Sprint" - verb = "%v" - value = call.Args[0] - - case calledObj == fmtSprintfObj && len(call.Args) == 1: - if n.sprintf1 { - fn = "fmt.Sprintf" - verb = "%s" - value = call.Args[0] - } else { - return - } - - case calledObj == fmtSprintfObj && len(call.Args) == 2: - verbLit, ok := call.Args[0].(*ast.BasicLit) - if !ok { - return - } - verb, err = strconv.Unquote(verbLit.Value) - if err != nil { - // Probably unreachable. - return - } - // one single explicit arg is simplified - if strings.HasPrefix(verb, "%[1]") { - verb = "%" + verb[4:] - } - - fn = "fmt.Sprintf" - value = call.Args[1] - - default: - return - } - - switch verb { - default: - if fn == "fmt.Sprintf" && isConcatable(verb) && n.strconcat { - break - } - return - case "%d", "%v", "%x", "%t", "%s": - } - - valueType := pass.TypesInfo.TypeOf(value) - a, isArray := valueType.(*types.Array) - s, isSlice := valueType.(*types.Slice) - - var d *analysis.Diagnostic - switch { - case isBasicType(valueType, types.String) && oneOf(verb, "%v", "%s"): - fname := pass.Fset.File(call.Pos()).Name() - _, ok := neededPackages[fname] - if !ok { - neededPackages[fname] = make(map[string]bool) - } - removedFmtUsages[fname]++ - if fn == "fmt.Errorf" { - neededPackages[fname]["errors"] = true - d = &analysis.Diagnostic{ - Pos: call.Pos(), - End: call.End(), - Message: fn + " can be replaced with errors.New", - SuggestedFixes: []analysis.SuggestedFix{ - { - Message: "Use errors.New", - TextEdits: []analysis.TextEdit{{ - Pos: call.Pos(), - End: value.Pos(), - NewText: []byte("errors.New("), - }}, - }, - }, - } - } else { - d = &analysis.Diagnostic{ - Pos: call.Pos(), - End: call.End(), - Message: fn + " can be replaced with just using the string", - SuggestedFixes: []analysis.SuggestedFix{ - { - Message: "Just use string value", - TextEdits: []analysis.TextEdit{{ - Pos: call.Pos(), - End: call.End(), - NewText: []byte(formatNode(pass.Fset, value)), - }}, - }, - }, - } - } - case types.Implements(valueType, errIface) && oneOf(verb, "%v", "%s") && n.errError: - // known false positive if this error is nil - // fmt.Sprint(nil) does not panic like nil.Error() does - errMethodCall := formatNode(pass.Fset, value) + ".Error()" - fname := pass.Fset.File(call.Pos()).Name() - removedFmtUsages[fname]++ - d = &analysis.Diagnostic{ - Pos: call.Pos(), - End: call.End(), - Message: fn + " can be replaced with " + errMethodCall, - SuggestedFixes: []analysis.SuggestedFix{ - { - Message: "Use " + errMethodCall, - TextEdits: []analysis.TextEdit{{ - Pos: call.Pos(), - End: call.End(), - NewText: []byte(errMethodCall), - }}, - }, - }, - } - - case isBasicType(valueType, types.Bool) && oneOf(verb, "%v", "%t"): - fname := pass.Fset.File(call.Pos()).Name() - removedFmtUsages[fname]++ - _, ok := neededPackages[fname] - if !ok { - neededPackages[fname] = make(map[string]bool) - } - neededPackages[fname]["strconv"] = true - d = &analysis.Diagnostic{ - Pos: call.Pos(), - End: call.End(), - Message: fn + " can be replaced with faster strconv.FormatBool", - SuggestedFixes: []analysis.SuggestedFix{ - { - Message: "Use strconv.FormatBool", - TextEdits: []analysis.TextEdit{{ - Pos: call.Pos(), - End: value.Pos(), - NewText: []byte("strconv.FormatBool("), - }}, - }, - }, - } - - case isArray && isBasicType(a.Elem(), types.Uint8) && oneOf(verb, "%x"): - if _, ok := value.(*ast.Ident); !ok { - // Doesn't support array literals. - return - } - - fname := pass.Fset.File(call.Pos()).Name() - removedFmtUsages[fname]++ - _, ok := neededPackages[fname] - if !ok { - neededPackages[fname] = make(map[string]bool) - } - neededPackages[fname]["encoding/hex"] = true - d = &analysis.Diagnostic{ - Pos: call.Pos(), - End: call.End(), - Message: fn + " can be replaced with faster hex.EncodeToString", - SuggestedFixes: []analysis.SuggestedFix{ - { - Message: "Use hex.EncodeToString", - TextEdits: []analysis.TextEdit{ - { - Pos: call.Pos(), - End: value.Pos(), - NewText: []byte("hex.EncodeToString("), - }, - { - Pos: value.End(), - End: value.End(), - NewText: []byte("[:]"), - }, - }, - }, - }, - } - case isSlice && isBasicType(s.Elem(), types.Uint8) && oneOf(verb, "%x"): - fname := pass.Fset.File(call.Pos()).Name() - removedFmtUsages[fname]++ - _, ok := neededPackages[fname] - if !ok { - neededPackages[fname] = make(map[string]bool) - } - neededPackages[fname]["encoding/hex"] = true - d = &analysis.Diagnostic{ - Pos: call.Pos(), - End: call.End(), - Message: fn + " can be replaced with faster hex.EncodeToString", - SuggestedFixes: []analysis.SuggestedFix{ - { - Message: "Use hex.EncodeToString", - TextEdits: []analysis.TextEdit{{ - Pos: call.Pos(), - End: value.Pos(), - NewText: []byte("hex.EncodeToString("), - }}, - }, - }, - } - - case isBasicType(valueType, types.Int8, types.Int16, types.Int32) && oneOf(verb, "%v", "%d") && n.intConv: - fname := pass.Fset.File(call.Pos()).Name() - removedFmtUsages[fname]++ - _, ok := neededPackages[fname] - if !ok { - neededPackages[fname] = make(map[string]bool) - } - neededPackages[fname]["strconv"] = true - d = &analysis.Diagnostic{ - Pos: call.Pos(), - End: call.End(), - Message: fn + " can be replaced with faster strconv.Itoa", - SuggestedFixes: []analysis.SuggestedFix{ - { - Message: "Use strconv.Itoa", - TextEdits: []analysis.TextEdit{ - { - Pos: call.Pos(), - End: value.Pos(), - NewText: []byte("strconv.Itoa(int("), - }, - { - Pos: value.End(), - End: value.End(), - NewText: []byte(")"), - }, - }, - }, - }, - } - case isBasicType(valueType, types.Int) && oneOf(verb, "%v", "%d"): - fname := pass.Fset.File(call.Pos()).Name() - removedFmtUsages[fname]++ - _, ok := neededPackages[fname] - if !ok { - neededPackages[fname] = make(map[string]bool) - } - neededPackages[fname]["strconv"] = true - d = &analysis.Diagnostic{ - Pos: call.Pos(), - End: call.End(), - Message: fn + " can be replaced with faster strconv.Itoa", - SuggestedFixes: []analysis.SuggestedFix{ - { - Message: "Use strconv.Itoa", - TextEdits: []analysis.TextEdit{{ - Pos: call.Pos(), - End: value.Pos(), - NewText: []byte("strconv.Itoa("), - }}, - }, - }, - } - case isBasicType(valueType, types.Int64) && oneOf(verb, "%v", "%d"): - fname := pass.Fset.File(call.Pos()).Name() - removedFmtUsages[fname]++ - _, ok := neededPackages[fname] - if !ok { - neededPackages[fname] = make(map[string]bool) - } - neededPackages[fname]["strconv"] = true - d = &analysis.Diagnostic{ - Pos: call.Pos(), - End: call.End(), - Message: fn + " can be replaced with faster strconv.FormatInt", - SuggestedFixes: []analysis.SuggestedFix{ - { - Message: "Use strconv.FormatInt", - TextEdits: []analysis.TextEdit{ - { - Pos: call.Pos(), - End: value.Pos(), - NewText: []byte("strconv.FormatInt("), - }, - { - Pos: value.End(), - End: value.End(), - NewText: []byte(", 10"), - }, - }, - }, - }, - } - - case isBasicType(valueType, types.Uint8, types.Uint16, types.Uint32, types.Uint) && oneOf(verb, "%v", "%d", "%x") && n.intConv: - base := []byte("), 10") - if verb == "%x" { - base = []byte("), 16") - } - fname := pass.Fset.File(call.Pos()).Name() - removedFmtUsages[fname]++ - _, ok := neededPackages[fname] - if !ok { - neededPackages[fname] = make(map[string]bool) - } - neededPackages[fname]["strconv"] = true - d = &analysis.Diagnostic{ - Pos: call.Pos(), - End: call.End(), - Message: fn + " can be replaced with faster strconv.FormatUint", - SuggestedFixes: []analysis.SuggestedFix{ - { - Message: "Use strconv.FormatUint", - TextEdits: []analysis.TextEdit{ - { - Pos: call.Pos(), - End: value.Pos(), - NewText: []byte("strconv.FormatUint(uint64("), - }, - { - Pos: value.End(), - End: value.End(), - NewText: base, - }, - }, - }, - }, - } - case isBasicType(valueType, types.Uint64) && oneOf(verb, "%v", "%d", "%x"): - base := []byte(", 10") - if verb == "%x" { - base = []byte(", 16") - } - fname := pass.Fset.File(call.Pos()).Name() - removedFmtUsages[fname]++ - _, ok := neededPackages[fname] - if !ok { - neededPackages[fname] = make(map[string]bool) - } - neededPackages[fname]["strconv"] = true - d = &analysis.Diagnostic{ - Pos: call.Pos(), - End: call.End(), - Message: fn + " can be replaced with faster strconv.FormatUint", - SuggestedFixes: []analysis.SuggestedFix{ - { - Message: "Use strconv.FormatUint", - TextEdits: []analysis.TextEdit{ - { - Pos: call.Pos(), - End: value.Pos(), - NewText: []byte("strconv.FormatUint("), - }, - { - Pos: value.End(), - End: value.End(), - NewText: base, - }, - }, - }, - }, - } - case isBasicType(valueType, types.String) && fn == "fmt.Sprintf" && isConcatable(verb): - var fix string - if strings.HasSuffix(verb, "%s") { - fix = strconv.Quote(verb[:len(verb)-2]) + "+" + formatNode(pass.Fset, value) - } else if strings.HasSuffix(verb, "%[1]s") { - fix = strconv.Quote(verb[:len(verb)-5]) + "+" + formatNode(pass.Fset, value) - } else if strings.HasPrefix(verb, "%s") { - fix = formatNode(pass.Fset, value) + "+" + strconv.Quote(verb[2:]) - } else { - fix = formatNode(pass.Fset, value) + "+" + strconv.Quote(verb[5:]) - } - fname := pass.Fset.File(call.Pos()).Name() - removedFmtUsages[fname]++ - d = &analysis.Diagnostic{ - Pos: call.Pos(), - End: call.End(), - Message: fn + " can be replaced with string concatenation", - SuggestedFixes: []analysis.SuggestedFix{ - { - Message: "Use string concatenation", - TextEdits: []analysis.TextEdit{{ - Pos: call.Pos(), - End: call.End(), - NewText: []byte(fix), - }}, - }, - }, - } - } - - if d != nil { - pass.Report(*d) - } - }) - - if len(removedFmtUsages) > 0 && n.fiximports { - for _, pkg := range pass.Pkg.Imports() { - if pkg.Path() == "fmt" { - insp = pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - nodeFilter = []ast.Node{ - (*ast.SelectorExpr)(nil), - } - insp.Preorder(nodeFilter, func(node ast.Node) { - selec := node.(*ast.SelectorExpr) - selecok, ok := selec.X.(*ast.Ident) - if ok { - pkgname, ok := pass.TypesInfo.ObjectOf(selecok).(*types.PkgName) - if ok && pkgname.Name() == pkg.Name() { - fname := pass.Fset.File(pkgname.Pos()).Name() - removedFmtUsages[fname]-- - } - } - }) - } else if pkg.Path() == "errors" || pkg.Path() == "strconv" || pkg.Path() == "encoding/hex" { - insp = pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - nodeFilter = []ast.Node{ - (*ast.ImportSpec)(nil), - } - insp.Preorder(nodeFilter, func(node ast.Node) { - gd := node.(*ast.ImportSpec) - if gd.Path.Value == strconv.Quote(pkg.Path()) { - fname := pass.Fset.File(gd.Pos()).Name() - _, ok := neededPackages[fname] - if ok { - delete(neededPackages[fname], pkg.Path()) - } - } - }) - } - } - insp = pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - nodeFilter = []ast.Node{ - (*ast.ImportSpec)(nil), - } - insp.Preorder(nodeFilter, func(node ast.Node) { - gd := node.(*ast.ImportSpec) - if gd.Path.Value == `"fmt"` { - fix := "" - fname := pass.Fset.File(gd.Pos()).Name() - if removedFmtUsages[fname] < 0 { - fix += `"fmt"` - if len(neededPackages[fname]) == 0 { - return - } - } - keys := make([]string, 0, len(neededPackages[fname])) - for k := range neededPackages[fname] { - keys = append(keys, k) - } - sort.Strings(keys) - for _, k := range keys { - fix = fix + "\n\t\"" + k + `"` - } - pass.Report(analysis.Diagnostic{ - Pos: gd.Pos(), - End: gd.End(), - Message: "Fix imports", - SuggestedFixes: []analysis.SuggestedFix{ - { - Message: "Fix imports", - TextEdits: []analysis.TextEdit{{ - Pos: gd.Pos(), - End: gd.End(), - NewText: []byte(fix), - }}, - }, - }}) - } - }) - } - - return nil, nil -} - -var errIface = types.Universe.Lookup("error").Type().Underlying().(*types.Interface) - -func isBasicType(lhs types.Type, expected ...types.BasicKind) bool { - for _, rhs := range expected { - if types.Identical(lhs, types.Typ[rhs]) { - return true - } - } - return false -} - -func formatNode(fset *token.FileSet, node ast.Node) string { - buf := new(bytes.Buffer) - if err := format.Node(buf, fset, node); err != nil { - return "" - } - return buf.String() -} - -func oneOf[T comparable](v T, expected ...T) bool { - for _, rhs := range expected { - if v == rhs { - return true - } - } - return false -} diff --git a/vendor/github.com/ccojocar/zxcvbn-go/.gitignore b/vendor/github.com/ccojocar/zxcvbn-go/.gitignore deleted file mode 100644 index e032cc2fc..000000000 --- a/vendor/github.com/ccojocar/zxcvbn-go/.gitignore +++ /dev/null @@ -1,5 +0,0 @@ -zxcvbn -debug.test - -# SBOMs generated during CI -/bom.json diff --git a/vendor/github.com/ccojocar/zxcvbn-go/.golangci.yml b/vendor/github.com/ccojocar/zxcvbn-go/.golangci.yml deleted file mode 100644 index b54f70092..000000000 --- a/vendor/github.com/ccojocar/zxcvbn-go/.golangci.yml +++ /dev/null @@ -1,39 +0,0 @@ -linters: - enable: - - asciicheck - - bodyclose - - dogsled - - durationcheck - - errcheck - - errorlint - - exportloopref - - gci - - ginkgolinter - - gofmt - - gofumpt - - goimports - - gosimple - - govet - - importas - - ineffassign - - megacheck - - misspell - - nakedret - - nolintlint - - revive - - staticcheck - - typecheck - - unconvert - - unparam - - unused - - wastedassign - -linters-settings: - gci: - sections: - - standard - - default - - prefix(github.com/ccojocar) - -run: - timeout: 5m diff --git a/vendor/github.com/ccojocar/zxcvbn-go/.goreleaser.yml b/vendor/github.com/ccojocar/zxcvbn-go/.goreleaser.yml deleted file mode 100644 index 2386aeee5..000000000 --- a/vendor/github.com/ccojocar/zxcvbn-go/.goreleaser.yml +++ /dev/null @@ -1,27 +0,0 @@ ---- -project_name: zxcvbn-go - -release: - extra_files: - - glob: ./bom.json - github: - owner: ccojocar - name: zxcvbn-go - -builds: - - main: ./testapp/ - binary: zxcvbn-go - goos: - - darwin - - linux - - windows - goarch: - - amd64 - - arm64 - - s390x - ldflags: -X main.Version={{.Version}} -X main.GitTag={{.Tag}} -X main.BuildDate={{.Date}} - env: - - CGO_ENABLED=0 - -gomod: - proxy: true diff --git a/vendor/github.com/ccojocar/zxcvbn-go/LICENSE.txt b/vendor/github.com/ccojocar/zxcvbn-go/LICENSE.txt deleted file mode 100644 index e8f59e06d..000000000 --- a/vendor/github.com/ccojocar/zxcvbn-go/LICENSE.txt +++ /dev/null @@ -1,20 +0,0 @@ -Copyright (c) Nathan Button - -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -"Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/github.com/ccojocar/zxcvbn-go/Makefile b/vendor/github.com/ccojocar/zxcvbn-go/Makefile deleted file mode 100644 index 0690f3753..000000000 --- a/vendor/github.com/ccojocar/zxcvbn-go/Makefile +++ /dev/null @@ -1,61 +0,0 @@ -GIT_TAG?= $(shell git describe --always --tags) -BIN = zxcvbn-go -FMT_CMD = $(gofmt -s -l -w $(find . -type f -name '*.go' -not -path './vendor/*') | tee /dev/stderr) -IMAGE_REPO = ccojocar -DATE_FMT=+%Y-%m-%d -ifdef SOURCE_DATE_EPOCH - BUILD_DATE ?= $(shell date -u -d "@$(SOURCE_DATE_EPOCH)" "$(DATE_FMT)" 2>/dev/null || date -u -r "$(SOURCE_DATE_EPOCH)" "$(DATE_FMT)" 2>/dev/null || date -u "$(DATE_FMT)") -else - BUILD_DATE ?= $(shell date "$(DATE_FMT)") -endif -BUILDFLAGS := "-w -s -X 'main.Version=$(GIT_TAG)' -X 'main.GitTag=$(GIT_TAG)' -X 'main.BuildDate=$(BUILD_DATE)'" -CGO_ENABLED = 0 -GO := GO111MODULE=on go -GO_NOMOD :=GO111MODULE=off go -GOPATH ?= $(shell $(GO) env GOPATH) -GOBIN ?= $(GOPATH)/bin -GO_MINOR_VERSION = $(shell $(GO) version | cut -c 14- | cut -d' ' -f1 | cut -d'.' -f2) -GOVULN_MIN_VERSION = 17 -GO_VERSION = 1.20 - -default: - $(MAKE) test - -install-govulncheck: - @if [ $(GO_MINOR_VERSION) -gt $(GOVULN_MIN_VERSION) ]; then \ - go install golang.org/x/vuln/cmd/govulncheck@latest; \ - fi - -test-all: fmt vet lint sec govulncheck test - -test: - go test -v ./... - -fmt: - @echo "FORMATTING" - @FORMATTED=`$(GO) fmt ./...` - @([ ! -z "$(FORMATTED)" ] && printf "Fixed unformatted files:\n$(FORMATTED)") || true - -vet: - @echo "VETTING" - $(GO) vet ./... - -lint: - @echo "LINTING: golangci-lint" - golangci-lint run - -sec: - @echo "SECURITY SCANNING" - gosec ./... - -govulncheck: install-govulncheck - @echo "CHECKING VULNERABILITIES" - @if [ $(GO_MINOR_VERSION) -gt $(GOVULN_MIN_VERSION) ]; then \ - govulncheck ./...; \ - fi - -clean: - rm -rf build vendor dist coverage.txt - rm -f release image $(BIN) - -.PHONY: test test-all fmt vet govulncheck clean diff --git a/vendor/github.com/ccojocar/zxcvbn-go/README.md b/vendor/github.com/ccojocar/zxcvbn-go/README.md deleted file mode 100644 index 3f742a9da..000000000 --- a/vendor/github.com/ccojocar/zxcvbn-go/README.md +++ /dev/null @@ -1,78 +0,0 @@ -This is a goLang port of python-zxcvbn and [zxcvbn](https://github.com/dropbox/zxcvbn), which are python and JavaScript password strength -generators. zxcvbn attempts to give sound password advice through pattern -matching and conservative entropy calculations. It finds 10k common passwords, -common American names and surnames, common English words, and common patterns -like dates, repeats (aaa), sequences (abcd), and QWERTY patterns. - -Please refer to https://dropbox.tech/security/zxcvbn-realistic-password-strength-estimation for the full details and -motivation behind zxcbvn. The source code for the original JavaScript (well, -actually CoffeeScript) implementation can be found at: - -https://github.com/lowe/zxcvbn - -Python at: - -https://github.com/dropbox/python-zxcvbn - -For full motivation, see: - -https://dropbox.tech/security/zxcvbn-realistic-password-strength-estimation - ------------------------------------------------------------------------- -Use ------------------------------------------------------------------------- - -The zxcvbn module has the public method PasswordStrength() function. Import zxcvbn, and -call PasswordStrength(password string, userInputs []string). The function will return a -result dictionary with the following keys: - -Entropy # bits - -CrackTime # estimation of actual crack time, in seconds. - -CrackTimeDisplay # same crack time, as a friendlier string: - # "instant", "6 minutes", "centuries", etc. - -Score # [0,1,2,3,4] if crack time is less than - # [10^2, 10^4, 10^6, 10^8, Infinity]. - # (useful for implementing a strength bar.) - -MatchSequence # the list of patterns that zxcvbn based the - # entropy calculation on. - -CalcTime # how long it took to calculate an answer, - # in milliseconds. usually only a few ms. - -The userInputs argument is an splice of strings that zxcvbn -will add to its internal dictionary. This can be whatever list of -strings you like, but is meant for user inputs from other fields of the -form, like name and email. That way a password that includes the user's -personal info can be heavily penalized. This list is also good for -site-specific vocabulary. - -Bug reports and pull requests welcome! - ------------------------------------------------------------------------- -Project Status ------------------------------------------------------------------------- - -Use zxcvbn_test.go to check how close to feature parity the project is. - ------------------------------------------------------------------------- -Acknowledgment ------------------------------------------------------------------------- - -Thanks to Dan Wheeler (https://github.com/lowe) for the CoffeeScript implementation -(see above.) To repeat his outside acknowledgements (which remain useful, as always): - -Many thanks to Mark Burnett for releasing his 10k top passwords list: -https://xato.net/passwords/more-top-worst-passwords -and for his 2006 book, -"Perfect Passwords: Selection, Protection, Authentication" - -Huge thanks to Wiktionary contributors for building a frequency list -of English as used in television and movies: -https://en.wiktionary.org/wiki/Wiktionary:Frequency_lists - -Last but not least, big thanks to xkcd :) -https://xkcd.com/936/ diff --git a/vendor/github.com/ccojocar/zxcvbn-go/adjacency/adjcmartix.go b/vendor/github.com/ccojocar/zxcvbn-go/adjacency/adjcmartix.go deleted file mode 100644 index 34526685c..000000000 --- a/vendor/github.com/ccojocar/zxcvbn-go/adjacency/adjcmartix.go +++ /dev/null @@ -1,105 +0,0 @@ -package adjacency - -import ( - "encoding/json" - "log" - - "github.com/ccojocar/zxcvbn-go/data" -) - -// Graph holds information about different graphs -type Graph struct { - Graph map[string][]string - averageDegree float64 - Name string -} - -// GraphMap is a map of all graphs -var GraphMap = make(map[string]Graph) - -func init() { - GraphMap["qwerty"] = BuildQwerty() - GraphMap["dvorak"] = BuildDvorak() - GraphMap["keypad"] = BuildKeypad() - GraphMap["macKeypad"] = BuildMacKeypad() - GraphMap["l33t"] = BuildLeet() -} - -// BuildQwerty builds the Qwerty Graph -func BuildQwerty() Graph { - data, err := data.Asset("data/Qwerty.json") - if err != nil { - panic("Can't find asset") - } - return getAdjancencyGraphFromFile(data, "qwerty") -} - -// BuildDvorak builds the Dvorak Graph -func BuildDvorak() Graph { - data, err := data.Asset("data/Dvorak.json") - if err != nil { - panic("Can't find asset") - } - return getAdjancencyGraphFromFile(data, "dvorak") -} - -// BuildKeypad builds the Keypad Graph -func BuildKeypad() Graph { - data, err := data.Asset("data/Keypad.json") - if err != nil { - panic("Can't find asset") - } - return getAdjancencyGraphFromFile(data, "keypad") -} - -// BuildMacKeypad builds the Mac Keypad Graph -func BuildMacKeypad() Graph { - data, err := data.Asset("data/MacKeypad.json") - if err != nil { - panic("Can't find asset") - } - return getAdjancencyGraphFromFile(data, "mac_keypad") -} - -// BuildLeet builds the L33T Graph -func BuildLeet() Graph { - data, err := data.Asset("data/L33t.json") - if err != nil { - panic("Can't find asset") - } - return getAdjancencyGraphFromFile(data, "keypad") -} - -func getAdjancencyGraphFromFile(data []byte, name string) Graph { - var graph Graph - err := json.Unmarshal(data, &graph) - if err != nil { - log.Fatal(err) - } - graph.Name = name - return graph -} - -// CalculateAvgDegree calclates the average degree between nodes in the graph -// on qwerty, 'g' has degree 6, being adjacent to 'ftyhbv'. '\' has degree 1. -// this calculates the average over all keys. -// TODO double check that i ported this correctly scoring.coffee ln 5 -func (adjGrp Graph) CalculateAvgDegree() float64 { - if adjGrp.averageDegree != float64(0) { - return adjGrp.averageDegree - } - var avg float64 - var count float64 - for _, value := range adjGrp.Graph { - for _, char := range value { - if len(char) != 0 || char != " " { - avg += float64(len(char)) - count++ - } - } - } - - adjGrp.averageDegree = avg / count - - return adjGrp.averageDegree -} diff --git a/vendor/github.com/ccojocar/zxcvbn-go/data/bindata.go b/vendor/github.com/ccojocar/zxcvbn-go/data/bindata.go deleted file mode 100644 index 3db0f1b10..000000000 --- a/vendor/github.com/ccojocar/zxcvbn-go/data/bindata.go +++ /dev/null @@ -1,446 +0,0 @@ -// Code generated by go-bindata. -// sources: -// data/Dvorak.json -// data/English.json -// data/FemaleNames.json -// data/Keypad.json -// data/L33t.json -// data/MacKeypad.json -// data/MaleNames.json -// data/Passwords.json -// data/Qwerty.json -// data/Surnames.json -// DO NOT EDIT! - -package data - -import ( - "bytes" - "compress/gzip" - "fmt" - "io" - "io/ioutil" - "os" - "path/filepath" - "strings" - "time" -) - -func bindataRead(data []byte, name string) ([]byte, error) { - gz, err := gzip.NewReader(bytes.NewBuffer(data)) - if err != nil { - return nil, fmt.Errorf("Read %q: %v", name, err) - } - - var buf bytes.Buffer - _, err = io.Copy(&buf, gz) // #nosec - clErr := gz.Close() - - if err != nil { - return nil, fmt.Errorf("Read %q: %v", name, err) - } - if clErr != nil { - return nil, err - } - - return buf.Bytes(), nil -} - -type asset struct { - bytes []byte - info os.FileInfo -} - -type bindataFileInfo struct { - name string - size int64 - mode os.FileMode - modTime time.Time -} - -func (fi bindataFileInfo) Name() string { - return fi.name -} -func (fi bindataFileInfo) Size() int64 { - return fi.size -} -func (fi bindataFileInfo) Mode() os.FileMode { - return fi.mode -} -func (fi bindataFileInfo) ModTime() time.Time { - return fi.modTime -} -func (fi bindataFileInfo) IsDir() bool { - return false -} -func (fi bindataFileInfo) Sys() interface{} { - return nil -} - -var _dataDvorakJson = []byte("\x1f\x8b\x08\x00\x00\x09\x6e\x88\x00\xff\xb4\x98\x57\x57\x1b\x41\x0c\x85\xdf\xf9\x15\xb0\x74\x30\xbd\xf7\xde\x7b\x6f\xa6\x77\x30\xbd\x17\xf3\xdb\x33\x26\x39\x99\xef\x9e\x78\x96\x7d\x88\x5e\x72\xc6\x61\xf9\xa4\x95\xae\x34\xd7\x7c\x14\x14\x16\x46\x63\xf7\xfb\xb7\x67\x51\x67\x61\xee\x83\xfb\x58\xef\x8e\x5b\xdf\x47\xf7\xa1\xa3\x22\x4a\xfd\x39\x5f\x3f\x65\x32\xf9\xce\xd1\xd6\xc7\xdf\x67\xa2\xcc\xb4\x3f\xdf\x2f\x46\xdf\xc7\xed\xdf\xff\x13\x35\x10\xbc\xf7\xf5\x33\xb8\xb1\xdf\xc3\xca\xd3\x91\xfc\x82\x90\x1b\x49\x6e\x28\xfa\x99\xdc\x54\xec\xc9\xa9\x6e\x8d\x22\xe4\x26\x92\x91\x4f\x90\xdc\x5c\xe2\x69\xb5\xbd\x12\x45\xc0\xcd\x04\x23\x9d\x20\xb8\xa5\xd4\xc3\x6e\xe7\x25\x88\x80\x5b\x08\x46\x36\x41\x70\xeb\x8e\x87\xbd\x6d\x48\x10\x01\xb7\x12\x8c\x6c\x82\xe0\xb6\x32\x0f\x3b\x19\x95\x20\x02\x6e\x23\x18\xd9\x04\xc1\xed\x55\x1e\x76\x3a\x26\x41\x04\xdc\x4e\x30\xb2\x09\x82\xa1\xf6\xe8\x70\x48\x82\x08\xb8\x83\x60\x64\x13\x04\xd7\x57\xca\x58\x30\x88\x80\x8b\xcc\x46\xc4\xfd\xcc\xa3\x05\x81\x79\x11\x1c\xe7\x62\x7f\x20\x4c\x2e\xb6\x1a\x91\x12\xab\x11\x29\xb5\x1a\x91\x32\x2b\x25\x97\x5b\x35\xaf\xc2\x4a\xc9\x95\x56\xb7\x48\x95\xd5\x50\x57\x13\x5c\xd7\xe7\x1f\xdc\xce\xe6\x0d\x12\xa5\xd3\x9f\xf9\x7f\x50\xb3\xab\xe4\x14\xc9\x9c\x52\x69\x19\xef\x24\x8e\xc5\xcd\x9c\xb4\x52\xc8\x35\x24\x3f\x2c\xf9\x07\x99\x7f\x4f\xf5\xcf\x45\x7a\xdf\x54\x70\x2d\xc1\x14\x13\xb3\xe4\x20\x73\xde\x8e\x47\x24\x7b\x01\xd7\x11\xcc\x3e\xb3\xff\xa8\x38\xb3\xcf\x15\x36\x85\xb7\x15\x70\x67\x68\x44\x20\x7f\xa9\xe5\xdd\x42\xb0\x2c\x02\xee\xb2\x02\x77\x9b\xc9\xa2\xc7\x4c\xca\xbd\x56\xba\xe8\xb3\xd2\x45\xbf\x99\x37\x1c\x08\x09\x43\x7a\x49\x04\x7b\xd6\xd5\x19\xde\xca\x83\xcc\xf9\x75\xdd\xff\xd2\xd1\xb0\x3f\x9f\x8d\xfb\xf3\xd5\x4c\x32\xc9\x0d\x11\xcc\x0b\x87\x17\x11\x17\x26\x57\xfc\xe3\xb2\x04\x17\xf0\x30\xc1\xe7\x13\xf9\x8d\x1f\x03\x32\xfb\x83\x41\x7f\x76\x6f\x2b\xe0\x11\x82\x59\x3f\xce\x02\x45\xf6\xb4\xe2\xcf\x17\x93\x32\x95\x02\x1e\x25\x98\x97\x3a\x2f\x7b\x5a\x58\x66\xcf\x3e\xb8\xb7\x15\xf0\x18\xc1\x7c\x7d\xc2\x58\x6f\x5e\x4a\x2c\x8b\x0b\x22\xe0\x71\x82\x99\x01\x33\x23\x8c\x0d\x83\x42\x72\xf5\x16\xf0\x04\xc1\xac\x1f\xcb\xc2\x37\x61\x70\xca\xf3\x72\x4a\xc1\x93\x04\x63\x2d\xca\x26\x60\x40\x07\x48\xa4\xe3\x29\x82\xd9\x65\xc2\x28\x43\x64\x19\x0b\x9e\x26\x98\xe2\xa7\xef\xe1\x4a\xe2\x76\xe5\x05\x7c\x3d\xab\xa5\x98\x21\x98\x82\x67\xc7\xd9\xb0\x97\xb5\x64\x19\xcf\x12\x4c\x00\xb3\xe7\x42\x65\x96\xcf\xab\x12\x50\xc0\x73\x04\xf3\xae\xe3\x46\xe3\x14\xb2\xa9\x6c\xb6\xdb\x74\x02\x9e\x27\x98\x00\xde\x1b\xf4\xf2\x94\x21\x1b\xec\x02\x0a\x78\x81\x60\xec\x57\xd9\x1b\xcc\x12\xca\x89\xad\xf1\x22\xc1\x9c\x30\xae\x4a\x2a\x84\xf5\x76\x4a\x60\x83\x05\xbc\x44\x30\x1f\x24\x80\x12\xe3\x4d\xe7\x3c\x1b\x1b\x29\xe0\x65\x82\x29\x31\x66\x4f\x85\x30\x38\xa4\x97\xdb\x1b\x02\x5e\x21\x98\xb5\xe4\x0e\x66\xc3\x38\x85\x18\xef\x5c\xed\x05\xbc\x4a\x30\x33\x60\x66\xd4\x2e\x5e\x3f\xb6\x79\x6b\x04\x73\x0d\x72\x58\x18\x04\x43\x11\x0b\x5e\x27\x98\xaf\xc6\x57\xe6\xaa\xc4\xd8\xc7\x82\x37\x08\x66\x5d\x39\x14\xbc\xa6\xb8\x9b\x19\xdc\x0d\x8b\x80\x37\x09\xe6\xec\xb3\xae\x90\x58\xc8\x23\xfd\x93\xf1\x16\xc1\x18\x84\xa0\xc9\xa2\x93\xa3\xbe\x9d\xee\x05\x9c\x4e\x93\x9c\xe0\x9b\x4c\xe2\x94\xb7\x09\xc6\x46\x4f\x02\x13\xf3\xe9\xd2\x17\xf0\x8e\xd5\xdf\xc9\x76\xad\xbe\xec\xed\x05\x8c\x6c\x10\x10\x63\xa3\x05\xbc\x6f\xe6\x90\x0f\xac\x1c\xf2\xa1\x95\x43\x3e\xb2\x72\xc8\xc7\x56\x0e\xf9\xc4\xca\x21\x9f\x5a\x39\xe4\x33\x2b\x87\x7c\x6e\xe5\x90\x2f\xac\x1c\xf2\xa5\x95\x43\xce\x58\x39\xe4\x2b\x2b\x87\x7c\x6d\xe5\x90\x6f\xac\x1c\xf2\xad\x95\x43\xbe\xb3\x72\xc8\xf7\x56\x0e\xf9\xc1\xca\x21\x3f\x5a\x39\xe4\x27\x2b\x87\xfc\x6c\xe5\x90\x5f\xac\x1c\xf2\xab\x95\x43\x7e\xb3\x72\xc8\xef\x56\x0e\xf9\xc3\xca\x21\x7f\x5a\x19\xe4\xac\x95\x41\xfe\xfa\xef\x76\xd3\xfd\x9b\x2d\xc8\x16\xfc\x0a\x00\x00\xff\xff\xd5\xc4\xca\x21\xce\x20\x00\x00") - -func dataDvorakJsonBytes() ([]byte, error) { - return bindataRead( - _dataDvorakJson, - "data/Dvorak.json", - ) -} - -func dataDvorakJson() (*asset, error) { - bytes, err := dataDvorakJsonBytes() - if err != nil { - return nil, err - } - - info := bindataFileInfo{name: "data/Dvorak.json", size: 8398, mode: os.FileMode(420), modTime: time.Unix(1452717629, 0)} - a := &asset{bytes: bytes, info: info} - return a, nil -} - -var _dataEnglishJson = []byte("\x1f\x8b\x08\x00\x00\x09\x6e\x88\x00\xff\x5c\xbd\x4b\x9a\xe3\xca\x0e\x34\xb6\x15\x7f\x9a\xf4\xe4\xae\xc0\x6b\xf0\x0e\x3c\x4a\x92\x29\x31\x4b\x24\x93\x87\x0f\xa9\xd4\xde\xbc\x81\x88\x40\x52\xfd\x0f\xee\xed\xea\x3e\x55\x2a\x32\x1f\x78\x04\x02\x81\xff\xef\xf6\xff\x94\xfd\xb8\xfd\xdf\xff\xef\xff\x75\xfb\xd4\xf3\xf6\xbf\x5b\xb1\xff\x1d\xd5\xff\x6f\xcc\xf6\xff\xc9\xff\xb7\x0c\xf8\x7b\x3a\xfc\xbf\xfb\xff\xd5\xbb\xfd\xdf\xec\xff\xfd\xad\x7f\xdd\xfd\xff\x16\x7c\x1b\xbe\x7e\x2e\xf5\xed\xff\xf4\x67\xb6\xff\xbf\xd7\xcd\xfe\x7f\xf1\x4f\x1d\xd3\xcb\x7f\x6c\xfe\xd8\xff\x0d\x75\xf9\xe3\x3f\xfc\x73\xee\x07\xfe\xfb\x81\x7f\xb4\xff\xeb\xfc\x7b\xaa\x7f\x9c\x3d\x94\xff\xec\x3b\xf9\x87\xbe\x33\x1e\xe0\x0f\xbe\x2e\xc7\x68\x7f\xec\xf8\xf6\xd3\x7f\x32\x4d\x13\xbe\x07\x7f\xa4\xcd\xbf\x17\xaf\x50\xfd\xfb\x52\x57\xf1\x4d\x5b\x79\x8c\x07\x3f\xf7\x0f\xbe\xe5\x91\x0f\x7c\x23\xfe\xc2\xef\x79\xd4\xb2\x3c\xec\xcf\xa9\x3c\xfd\x1f\x3f\x39\xf9\x27\x94\x3b\xbf\xcf\x7f\x29\x3e\xb7\x4f\xfe\x80\xe7\x8a\xc7\x5b\x0e\xbe\xfb\xf2\xd4\x52\xe1\x21\xb9\x08\x0f\xbc\x78\xf1\x95\xc0\x62\x8d\xfa\x57\xfe\x88\x7e\xf3\xe2\xff\x3f\x94\x01\x6b\xea\xab\xb3\x67\xfc\x92\x8a\x65\x7e\xd4\xca\x3d\xc8\xfe\x9f\xb6\x6c\xef\xea\x5f\x70\x55\xea\x39\xf9\x7f\x9c\x6a\x7d\xe2\xa7\x33\xf6\xa1\x70\x7f\x0a\x96\xa3\x3e\x93\x7f\x7f\x97\xfa\x27\x9f\x1c\x0b\x3f\x67\xbc\xc2\xc1\x25\x2b\x7f\xf0\xc7\x7d\xab\x33\xde\xf4\x83\xd5\xd4\x3a\xe2\x75\x7a\xfd\x26\x7b\x4c\x7e\xc0\x27\xef\x78\xb5\x1d\x7b\x86\xdf\x8b\xad\xde\xed\xa9\x7d\x31\x1e\x78\x1e\xee\x68\x9f\xce\x3d\xeb\xbf\xe1\x20\xe8\x85\xfc\x87\x76\x3c\x1d\x1e\xff\x48\x58\x74\x3c\xd8\x1b\xff\x7c\xee\xd8\x8b\xe3\x98\x70\x72\xf8\xdf\x97\x9c\x07\xac\xcb\xb2\x24\xfc\xf5\x85\x9d\x79\x67\xee\xea\x51\x2b\xf7\x09\xcf\x5d\xfe\xe0\xd0\xed\x27\xff\xdb\x98\xfd\x05\xe7\xca\x85\xe7\x0f\xf2\x98\xed\x75\xdb\x3e\x5c\xc3\x2d\x4e\x37\x3e\x61\xca\xb1\xbf\xfe\x4a\x09\x3f\x9f\x3e\x38\xa6\x43\x7d\x2f\xf8\xeb\x82\xb7\xf2\xef\x3e\xfd\xbc\xd8\xe7\x62\xc9\x3f\xb1\xcb\xf8\xa0\x7d\xd4\x1a\xa6\xe5\x13\x1f\xb7\x27\x6c\xfb\x7c\xf6\x23\xff\x03\xde\xf7\xee\x1f\x6e\xaf\x85\x45\xbd\xdf\xf1\x9b\xf8\xfd\x76\xbc\x70\xcc\x1e\xe5\xc5\xa3\x33\xf1\x77\xd4\x93\x67\x7b\xcc\x93\x1f\xca\xe3\x8d\x6b\x9c\x26\xff\xde\x35\xd7\x75\xe2\x49\xf2\xdf\xb5\x1f\x3c\x17\xef\x84\xfb\x5c\x16\x5c\xf9\x7b\xc1\x4d\xb7\x8b\x18\x2f\xfa\x48\x71\xa9\x97\x87\xd6\x01\xaf\x31\xd4\xbc\xf3\x08\xf4\xbc\x76\x47\xc5\x5b\x3d\xec\x68\x1e\xd8\xee\x3b\x97\xb7\xcb\xc7\x81\x05\xd6\x06\x2d\xba\x7f\xfe\x0e\xfe\x0b\xb8\xc5\xf7\xb2\xed\xfc\xb1\xa9\x64\xbe\xd4\xc1\xab\x76\xcf\x79\xd2\x4f\xb7\xf5\x7a\xd7\xcd\x5f\xc9\xaf\x30\xbe\xd7\x1e\x1b\x27\x8a\x07\x2b\xdd\xf9\x0b\xa7\xb4\xeb\x82\xe1\xd8\x0d\xf8\x45\xcf\x9c\x57\x3d\xbe\x2f\x0a\xed\xc6\x56\xcf\x85\x8b\x52\x57\x5d\x32\x9e\xa2\xf2\x07\xef\x74\xfa\x4f\x16\xbd\x6f\x9a\xec\x91\x79\x22\xf7\x23\xf3\x94\x2e\x07\x0e\xe3\xbc\xf1\xbb\x71\x27\x70\x06\x6c\x4f\xf0\xcb\xbb\x02\x7b\x82\x1b\x3f\xa6\x75\xcd\x4b\x1e\x62\x23\x79\x85\xf9\xe1\xc7\xf6\xe1\x2b\x3e\xb9\x13\xef\xad\x6a\xc7\x37\xdf\x5c\x6d\x27\xbf\x65\x4e\x03\x2f\xc2\x1b\x0b\xc7\x7f\x7c\x9c\x79\xe7\x95\xc4\xd6\x70\x0b\x70\xd3\x66\xdc\xea\x2d\xcf\x79\xee\xb0\x3e\x66\xf8\x0e\xad\x67\xfe\xa3\x3d\xb4\x7f\xe3\xb2\x0f\xf8\x99\x29\xd3\x5a\xaf\x53\xea\xfd\x4f\x5b\xa6\x6c\x1b\x45\x7f\x00\xb3\xa3\x35\xe9\x8f\x53\x16\x69\xcc\x69\xc3\xef\xc4\xb9\x5f\x0a\x7e\xee\x9e\xf4\xb1\x79\xa2\x09\x38\x92\xec\x3f\x2e\x59\xd2\xd2\xda\x37\x15\xff\x2e\x33\x33\x1b\xbe\x6f\xd6\x61\x99\xb9\x1a\x59\x57\x2a\x2f\xb1\x18\x1b\x7f\x25\x9e\xbe\x4f\x73\x98\x17\x7e\xae\x9f\x0f\x1a\x3f\x5c\xd0\x7b\x9a\xcb\xc4\xcb\x5d\x27\x9a\x98\x38\x64\x76\x73\x76\xfd\x84\xfd\xe2\xc9\xef\x5a\x19\x32\x5c\xdf\xfe\xc4\xea\xe2\x28\xcd\xf4\x53\x66\x92\xb9\x6e\xb5\x99\xdd\x38\x97\x34\x02\xef\xb1\xe0\x26\x9b\xe3\xd8\x7c\x33\xb6\x8a\xb5\x9f\xf2\xdd\x7f\xfc\xc9\x1d\x3b\x6a\x5c\x05\xb7\xe6\x78\x70\x18\x90\xba\x62\x57\xf5\x32\xfc\xe3\x9d\xe1\x5e\x4e\xd8\xe5\x79\xd6\x21\xf2\x77\x59\x37\xdb\x45\x78\x8b\x04\x9f\x52\x36\x7c\x54\xa1\x97\x82\x9f\xb9\x6f\x25\x63\xf9\xd2\x64\xbf\x69\xe0\x37\xeb\x98\x2d\xf9\x97\x0b\xbf\xc1\xd9\xfc\xd4\x0e\x9f\x59\xbb\x89\x26\xb2\x2c\xe7\x81\x0d\xd4\x0d\xb1\x9b\x37\xf1\xe0\x9a\xa3\xe3\x47\xb8\x43\xe7\xf6\xf9\xd6\x0f\x78\x85\x05\xbe\x63\x4e\xba\xfa\xf3\x47\x8b\xda\x5f\x47\x26\xff\xda\xa1\xe1\x89\x49\x2f\x19\x9b\x51\xbf\x3c\x75\xfa\x0f\x7e\x51\x78\x3e\x71\x0c\xc7\x73\x83\x91\xa8\xb0\xab\x15\xfe\xdf\xd6\x1a\x9b\x69\xdb\x45\x7f\x70\x1c\xd8\xb7\x89\x67\x60\x2f\x0b\x4e\x60\xfe\xed\xcf\x38\x7a\xf8\x90\x27\xed\xdf\xc8\x47\xf6\xa3\x82\xf7\xaa\x34\x13\x3d\x0e\x71\x2c\xd7\xb9\x98\xb5\xbc\x31\xe4\x60\x84\xe0\x57\x1e\xd7\x8c\x47\xc0\xfe\x0a\x2f\xf4\x81\x97\xd8\xe9\x08\x07\x6e\xeb\x41\xaf\x5c\xf9\x1c\x0f\x3e\x96\x1b\x4d\x9a\x8c\xba\x2d\x72\x02\xe7\xba\x9a\xb1\x18\xda\x8e\xed\x5c\x52\x1a\xa6\x13\x0e\x60\xae\x38\x80\x27\xbf\xed\x2d\x67\xb5\xe7\xbe\x62\x77\x56\xbe\xda\x44\xb7\x70\x6c\x27\x23\xa4\x7e\xac\x15\x8e\x9d\x97\xd8\x0c\x33\x63\x1e\xfb\x06\x98\x87\xd3\x16\x89\x56\x23\x27\x9c\xdb\xcb\x53\xe3\x48\x7a\x28\xe7\xff\xf5\xce\x73\x6b\xb7\x4d\xc7\x9d\x2e\xca\xae\x13\x5c\x02\x7f\xa6\x37\x93\x86\x5f\xb3\x9d\xf0\x8a\xf5\xc5\xab\xf6\xe1\x8b\xaf\x66\x3f\xf0\xa1\xdd\x27\x2e\x6b\x57\xb1\xc8\xc3\xa6\x23\x84\x25\xa4\x2b\x9c\x0b\x1e\x6b\x4e\x9b\xad\x08\xde\xd0\x3c\x20\x5e\x31\xe9\x64\xa5\x30\x86\x19\xaf\x64\xbf\x87\x0e\x65\xa6\x17\x5d\x47\xfe\x5a\xdb\x49\xfe\xda\x21\xcd\x0b\xee\x2c\x16\xd2\xaf\x2e\x42\x97\x8d\x9f\xd2\x27\xbc\xc3\x71\x6e\x38\x76\x65\x87\x91\xa9\xb6\x49\x1b\xae\xd2\xb3\x60\x57\x6c\xe1\x68\x08\xc6\xcc\x70\xca\xde\xf9\x81\x53\x86\x6d\xf0\x87\xe3\xb3\x28\xc4\xb8\x73\x43\xcc\x9a\xd2\x23\xd1\x1e\x99\x71\xd6\xbd\x49\xdc\x4c\xbd\xca\x87\x16\xd4\xe3\x1c\xfc\xb2\xeb\x15\xd3\xc2\x87\x0d\xb3\x65\x66\x99\xf7\x84\xbf\x0f\x61\xe6\x4e\xe7\xd4\xe5\x74\x1e\xe5\x7e\xfa\xbe\x3f\x68\xc4\x6d\xfb\xf1\xd0\x5b\xfa\xfb\xd1\x61\xc1\xe9\x61\x40\xcf\x9b\x96\xee\x1b\x63\x11\x73\xdb\x6f\x1e\xe3\x32\xaf\x75\x3b\x18\xdf\x6e\xb4\x83\x77\x6c\xed\xb3\xe8\x10\xf2\x06\x1c\xd8\xb1\xc7\x94\x9a\xa5\x56\x48\xcb\x07\xda\xdd\x5b\xe2\xbd\x60\x53\xf6\xf8\x26\xed\x7e\x57\x68\x59\x4f\x06\x29\x76\xa6\x7c\x51\x72\xd1\x9b\xce\x1b\x0f\x60\xd6\xd1\x1b\x92\x87\x3a\x8c\xf8\x18\x4c\x67\xdc\x47\x33\xd7\x38\x27\x70\x54\x58\xab\x9c\x10\xfd\x58\x4c\x5b\x18\x06\x54\x06\xa8\x38\x2e\xfd\x44\xf7\x3c\xd2\xa3\xd9\xfe\xf8\xb3\xfc\x67\x3e\xf4\x28\x38\x2e\x87\xce\xdd\x3d\x31\x09\x41\x28\xe5\xa1\xe7\xce\x37\x61\x38\x19\xde\xc8\x22\x6e\xc4\x46\xe5\x7e\xb7\x70\x6f\x09\x33\x93\x69\x14\xf7\xb5\x98\xf3\xe6\xc1\xd0\x5b\x9b\x45\x96\x0f\x78\xe7\x61\x90\x1d\x18\x19\x94\x74\x34\x6b\x45\xb7\xe6\x9e\xb1\x7b\xe1\x7c\x3d\x2d\xa0\x9f\xe7\xbf\x56\x58\x82\x8f\x2f\x1a\xfc\x83\xc7\x75\x5a\xaa\xb2\x2c\x58\x0b\xc4\x72\xb1\x7f\xb8\x1d\xe3\xb9\x77\x49\xae\x41\xa6\x65\xc5\x89\xb2\x83\xf6\x66\xf4\xc5\xb0\xd8\xc2\x4f\x3a\xf2\xcc\x80\xdf\x3c\x18\xd2\x01\xb3\xbd\x30\x4f\xe3\x65\xd7\xc7\x04\x4b\xbf\x97\xa1\x39\xd4\x1b\x82\x0f\xae\xc7\x3e\x31\x00\xdb\x68\xd8\xde\x57\x5c\xec\x76\x99\x26\x91\x19\x81\x7b\x1c\xfc\x87\xca\x87\x5d\x0b\xee\x1a\x32\x0b\x2d\x34\x7c\x54\x97\xf9\x02\x48\x04\x07\x5e\x85\x89\x3b\x63\xf7\x83\x2e\x95\x0f\xf1\xb5\xd9\xf8\xe0\xbc\x95\x7a\xf2\xb2\x8c\x0c\x30\x6c\x7d\xf8\xdc\x63\xc1\x8e\x26\xd9\x5f\xbb\x0a\xc8\xac\xaa\x1f\x2b\x5e\xaa\xbb\x36\x6e\xe5\x9d\xee\xb1\x69\xff\x9d\x05\xbf\x7f\x39\x15\x63\x8d\x8a\xac\x76\x3e\xbc\xa5\xae\x13\x6f\x24\xdc\x3a\x43\xe5\xa5\xea\x34\x3c\x18\x94\xba\xeb\x62\x56\xc0\xa4\x25\xe7\x19\x87\xbf\x2c\x0a\xb1\xdc\xbe\x33\x68\x0c\xb3\x78\xae\x3a\xcf\x69\xc3\x77\xcc\x99\x7e\x12\x9e\xa7\xdb\x22\x2f\xf0\x8f\xc2\x12\xd2\xc6\xa6\x3b\xd7\x4a\xee\x17\xbb\xb9\xc9\xe1\x86\x6d\xf2\x08\x90\xff\xd9\xd2\x42\xee\xe0\x58\x69\x3a\x5f\x5c\xf8\x5d\xe6\xc9\xec\xbd\x3c\x59\x3f\xd1\x82\xd9\x12\x31\xe1\xe2\x4f\xdc\xf3\x84\x9f\x2f\xbf\x58\x36\xbf\x20\xb8\x2d\x1b\x73\xe6\xd4\xed\x75\x32\xcb\x40\xd7\x5f\xdf\x4c\x33\xd2\xa0\x8b\x52\xb4\x18\x0b\x03\xc3\x4c\x83\xc4\x10\x01\xbb\xb9\xe6\xbe\xe0\x7a\x75\x7c\xf9\x89\x19\xb3\x9d\xf9\x3f\xbc\x88\x71\xc1\x26\x5c\x29\xdb\xc5\xe9\x46\xbf\xa1\x7b\xc2\x87\xb6\x35\xd2\x3d\x54\xc8\x73\x4f\xe7\x44\x97\x4f\x7b\xa3\x98\xbf\xf2\x98\x76\xc8\x0d\x66\x5b\x9a\x71\xd7\x51\xe3\xc3\xc0\xec\x4c\x8c\x1f\x7e\x72\xe4\x50\x4f\x5e\x9b\xcd\x8c\xe1\x8c\x4c\x51\x5b\x6f\x46\x54\x59\xe8\x11\x27\x30\xe1\x29\x2c\x24\xd8\x0b\x4d\x81\x6d\xf7\x2e\x44\x60\xa5\x77\x33\x93\xa6\x6c\xb9\x02\xe3\xf0\x93\x8d\x6f\xb5\xdf\x4f\xc8\xa6\x1e\x3a\x33\x8f\xb6\xcb\xf6\xa8\x1b\xc2\x81\x76\xf5\xe0\x5e\xd3\x43\x7e\x72\x2a\x7f\x71\x77\xcc\xd6\xdb\x93\x30\xee\x5c\x26\xc6\x07\x7b\xfe\xe5\x26\x20\x4b\x34\x33\xc8\x2c\xc6\x9e\x01\xbe\x95\x87\x71\xf7\xb4\x63\xe0\x8d\x3d\x94\x71\x9b\xe1\xd4\x69\x61\x6e\xb4\x0c\xfc\x16\x7a\x4e\x19\x34\x33\xfd\xda\xfe\xfc\x6b\x57\x17\x89\xa7\xbb\x38\x3d\x78\x36\x2f\x54\x98\xea\xee\xe5\x81\xc5\xaf\x70\x2d\x5b\x36\x8f\x6b\xf6\x7a\x1f\xcb\xaa\x93\x89\xdf\x92\x10\x05\x4f\xf1\xf3\x63\xa5\x0d\xf3\x18\x41\xe9\xfd\xfd\xd4\x03\xbe\x2d\xf1\x80\xeb\x3e\x69\x64\xe2\xde\x7a\x24\x80\xc7\xf3\x18\x08\x9b\x58\x19\x99\x3c\x19\x99\x98\x33\x85\xa5\x09\x97\xe1\xff\x56\xbb\x97\xdb\x15\xa6\x1a\x55\x8e\x68\xe5\xeb\x78\xdc\x17\xc7\x6b\xce\xf1\x18\x9e\xdd\xb9\x13\xdb\x0f\xf3\xbe\xbc\xaa\x3d\x03\xcb\x3b\xad\x8b\x3b\x59\xd8\x00\x9e\xe9\x45\x9e\x75\xe3\xa5\x90\x5b\x44\xb0\xe0\xa9\x2b\x13\x7b\x8b\x27\x68\x4e\x13\xa3\xc4\x49\x9f\x3c\x6c\x48\x72\x7b\x0b\x59\x2b\x8d\x02\xf6\xb8\x2f\x07\xa3\xb0\x0c\x70\xc3\x1c\xe6\x41\x8f\x73\x78\xf2\x81\x98\x78\xe2\xee\xed\xe7\xb6\x6e\x74\xa6\x05\x37\xd8\x7c\x85\xa7\x8d\x8c\xdc\x56\xfa\x57\x8f\xac\xf4\xeb\xe1\x59\x7f\xfb\xbc\x02\x5a\x43\xe8\xf0\xf9\x23\x78\x70\x50\xb0\x78\x28\xd8\xb1\xeb\xbc\xf9\x3d\x86\x15\xcd\xba\xd3\xda\x72\x4b\x31\xe5\x95\x3b\x1c\x4d\x26\xf0\x8c\xe2\xcb\xac\x0c\x63\x8c\x5c\x92\xc1\x8e\xad\x02\x63\x60\x5b\xe9\x32\x10\x71\x98\xd3\x5f\xc1\x28\x58\x08\x07\x51\x16\x9e\x39\xbe\xf0\x16\xc9\x6a\x59\x5e\x75\xa2\x95\xb3\x57\x84\x75\x58\x4b\xee\x15\x33\x63\x4d\x91\x74\x0d\xf6\x94\xbc\x09\x34\x47\x0a\xd7\xea\xab\xf0\x68\x21\xb3\xe8\x15\x20\x59\x88\xb3\x30\x6d\xd5\x9d\x32\xdf\x6e\x3f\x46\x17\x08\xd7\x78\x17\x8e\xe9\x3f\x20\x30\x80\x6b\x37\x98\x75\x97\xad\xaa\x8c\xbf\xf1\x3e\x85\x19\xec\x82\x75\x67\x7c\x9d\x26\x3d\x00\xdf\x93\xa9\x4e\x1a\x66\x04\x0b\x4a\xef\x70\xf3\x4f\xd9\x9d\x91\x0f\x99\x5f\xc8\x73\x86\x6c\xce\xa6\xe8\x2e\xfa\xe9\x22\xb8\x6b\x89\xc3\x1c\x5b\xe0\x71\x35\x8e\x66\x46\x1c\x6f\x3e\xe6\xc9\x84\x7d\x67\x6e\xe2\x3e\x8c\x40\x42\x9d\x26\x66\xc1\x47\xe1\x9d\x77\x48\x4b\xa9\xe3\xc9\x7c\x19\xbe\x5f\x5e\x84\xdf\xb2\xf8\x37\xdf\x10\x7f\xe1\xb0\x4e\xa7\x27\xa6\x8b\xa5\x7a\x3c\x81\x48\x95\x78\x9f\xfc\xb4\x96\x39\x3d\xe8\xf2\xef\x5c\x8e\x3e\xc9\xe5\x75\x93\x12\xe9\x63\x63\x86\x60\xe7\x48\xbf\xea\x9e\x5e\x58\xb2\xe4\x91\xb0\xc2\x13\x47\x1e\x70\xe8\xb3\x99\x44\x9a\x5d\xf7\x66\x48\x19\xec\x04\x2c\xba\x58\xb8\x8b\x5b\xda\x4b\x66\x9a\x38\xa5\x5f\xbc\xbc\x5d\x6e\xac\x4e\xea\xfb\xb0\x9d\x6f\x1a\x68\x5b\x74\x3a\xb0\x99\xd7\xc4\x02\x98\x9d\x26\xd7\x97\x4c\x0f\x64\x89\x59\x15\x1a\x97\x3d\xd4\x96\x75\xe5\x73\x2c\xdd\xbe\xe2\xec\x0b\x11\x9e\x2b\xd3\x3b\xbb\x30\x8f\x85\xde\x70\x36\x1f\x87\xef\x6d\xe9\xcc\x96\x19\xad\xec\xa3\x50\x3a\xde\xee\x8d\x3e\x86\x0e\x9a\x01\x4e\x12\x62\x40\x9b\x70\x30\x5e\xb9\xdb\xe6\xe1\x9e\x9a\x95\xc6\x99\xf5\x00\x52\x21\xd1\xc6\x50\xc7\x31\xb3\x95\x9b\xa6\xe0\x63\x13\x74\xbb\xdd\x14\xae\x5e\x6f\x47\x7b\xf5\x93\x98\x4d\xdb\x0f\x5c\x60\x97\x47\xae\xd3\xb9\xf0\x8e\xb8\x7f\x2c\xf8\x98\x2c\xab\xf5\xb0\xed\x33\x2f\xc4\x14\xfa\xa0\x0f\x1e\xeb\xca\x9f\xb7\x2c\x2f\x77\x11\x8a\xef\x8c\x68\xed\x15\xb1\xbc\x6b\x5a\xe9\xb6\x1b\x7e\xb6\x0a\x06\x38\x72\x5c\xc7\x1b\x5c\x49\xc3\x49\x6c\xaf\x4e\x38\xab\x07\x51\x91\xae\x98\x11\x21\x2e\xb0\x0b\x8f\x85\x9d\x25\x26\x51\x22\xf8\xb6\x35\x26\xa0\xec\xd1\x3f\x4e\x37\x0d\xef\x9d\x07\x8f\xae\x20\x62\x8d\xbe\xec\xb2\x65\xa7\x45\x5d\x82\x93\x96\xac\x40\x8f\x89\xe7\x4b\x0e\xcb\xac\x91\xac\x1b\x20\x37\xfe\x5e\x47\x1f\x75\x8e\xf0\x81\x16\xcd\xbe\x08\x18\xfb\xc9\xc3\xcf\xf5\xc4\x6e\xcc\xa4\xef\xfb\xd7\xa9\xd2\xdd\x33\xfb\xee\xbb\x60\x21\x2b\x43\x47\x07\x28\xe4\xec\xb6\x2a\xd4\x21\x7c\x6f\xc7\x48\x15\x86\xbe\x30\xfe\x5a\x64\x08\x2c\x85\x72\x84\xb5\x3d\x37\xd3\x25\x22\x72\xcb\x95\xbf\x21\x29\xb9\x23\xf4\xdb\x60\xd7\xf1\x79\x2b\xef\x88\xbb\x13\xde\x1b\xdf\xd9\x66\x61\x62\x5d\xdc\x69\x32\xeb\xe0\x99\x92\xff\x41\x9a\x8a\x7f\x71\x50\x57\xf0\x67\xb9\x33\x2c\x54\x36\xb2\x9e\x71\xdb\xcd\x78\xc2\xb3\x73\x21\x2b\x2a\x49\xa7\x2e\x5e\x2c\xf3\xa4\x4f\x59\xdc\xd0\xc1\x64\x9e\x42\x1b\xde\x9f\xdc\x4c\xc5\xad\xc5\x22\x37\xa6\x18\x3b\xce\xf4\xef\x8d\xf0\x5b\x3b\x46\xf7\x89\xae\xd0\xbc\x7c\xa0\x9f\x0d\xc4\x46\xdc\xe5\x21\x16\x17\xc0\x7d\xe7\x88\x53\xf8\x8b\xad\x20\x54\xfe\x73\xf2\x08\x5b\x74\x6f\xbf\x0d\x36\x74\xa7\x35\xb1\x74\x0a\x05\x34\x45\x87\x8c\x34\x88\xa0\xfb\xc1\x0e\x23\xa4\x57\x37\x6b\x44\x07\xd2\x4d\x2d\xcc\x20\x5e\xe1\x29\x06\x83\xaf\xc5\x7e\xe5\x1e\x3f\xe9\x2f\x85\x5f\x63\xbe\xf2\x86\x22\x8f\x9d\x1c\x5e\x80\x89\x31\x9e\x9d\x9a\x9e\x01\xae\xbc\xc4\x7e\x30\x72\xda\xf2\xca\xec\x22\x92\x60\x62\x41\x45\xf0\x3a\x2e\xc8\x2e\x6f\xd3\x25\x9c\xad\xc8\x4e\x86\x7a\x76\xc7\x2d\x70\x75\x41\x4c\x0d\xed\xee\xeb\xca\x83\x4e\xfc\xde\x5f\x60\x53\xf6\x76\xde\xef\x78\x84\xa9\x1d\x83\x7e\x9c\x2a\x7e\x2f\x53\x9c\x7d\x1c\xf1\x68\x3d\xb1\x8a\x89\x7f\xcc\xca\x60\x7e\xaa\x42\x33\xfc\xab\xd9\x1e\x16\xdc\x6c\x67\xcc\x4b\x32\xd8\x2f\x17\xe4\xb5\x0c\x58\xc3\x2d\x07\x48\x14\xa1\xed\xc3\x2e\xf2\xc1\x25\x7a\xea\x3e\xd8\x7b\x45\x0e\xee\x09\x34\xf3\x08\xcb\x2b\x26\xfc\xd6\x27\xfd\x95\x39\x21\x5a\x52\x33\x7e\x93\x5c\xc5\xac\xe3\xbd\x9e\xdd\x54\xfa\x1b\xc1\x4e\xc6\x24\xfb\x44\xfb\xa6\xec\x15\x8b\xdf\xf3\xba\x4d\xb4\xcc\x1e\x5a\xe9\xda\xba\x57\xc6\x51\x18\x5e\x3c\xe3\x1e\x75\x12\x15\x5e\x89\xf3\x58\x4e\x2c\x1b\x7a\xc2\xe6\xf8\x25\x60\xc0\xdb\xde\xf9\xdc\x03\xdb\x77\x0b\x4f\x97\xd7\xd5\x9d\x89\xb3\xb9\xdd\xe5\x2a\x96\x32\x87\x5e\x6a\xcf\x63\x3d\x54\x7f\xf6\xed\xa4\xd7\xee\x11\x6e\xc1\x8d\xaa\xf8\x51\x88\x40\xd9\x65\xc6\x59\x6a\xf5\x08\xa4\x4f\xb2\x89\x40\x1e\x36\x9e\x21\x5f\x42\xee\x39\x22\x00\x9c\x33\x54\x7b\x16\x02\x95\x5d\xc5\x96\xa4\xb5\x4e\xf5\xc1\x34\x66\x57\x8e\x6d\xd6\x91\x3f\x99\xfa\x8d\xcf\xbe\x57\x1d\x95\xb4\xc9\x51\x98\x69\x93\x2f\x88\x2a\x2d\x8e\xe9\x48\x67\x39\x64\x0f\x88\x4b\xd4\x43\xe2\xb5\x17\xd5\x9e\xce\x07\xe3\x20\x7b\x72\x05\xed\x63\x6d\x31\x04\x3d\x7d\x7a\xd3\x19\xaf\x0e\x04\x32\xef\xf0\x10\x3e\x60\xf4\xe5\xa1\x9d\xb0\xab\xc5\xa0\x93\xdb\xea\xa0\x02\xde\x8a\x39\x33\x76\x62\x08\xfc\xbe\x1e\xdc\x58\xbb\xc1\x0f\x0b\x5c\x4f\x65\x42\x78\xb1\x92\x61\xea\x16\x65\x79\x9e\xa3\xda\x9f\xaf\xb2\x07\xd6\x16\xbe\xa4\xff\x33\xd7\x45\x3b\xc0\xa0\x0d\x11\xad\xee\x24\x33\xca\x44\xcf\x48\x57\x62\xbe\xfc\xcd\xeb\x72\x27\xb4\xec\xa5\x06\x3e\xf3\xcc\x87\x76\xa8\x81\xff\xb0\x66\x25\xda\x33\xdd\xa4\xb2\x42\x6c\x81\xec\x11\xf0\x6b\x25\x86\x8c\x33\x6c\x0b\x36\xe2\xbd\xf6\xb0\xf2\x07\x2d\x5c\xd8\xea\x5f\x1c\x50\x8b\x8f\x33\x2b\xaf\x7f\x92\x58\x06\x04\xb8\xb9\x44\xe7\x16\xa7\x89\xb8\x37\x51\x2a\xbe\xb1\xf2\x5c\xa6\x26\x16\x20\x14\x19\xaa\x27\xcd\x1a\x6e\x9b\x3f\x90\xad\x69\x87\xb5\xf3\x7f\x20\x22\x51\xf6\xfe\xdc\x77\x9d\x5f\x45\xf6\xb8\xed\x77\x1e\x66\xf3\x6e\xb6\x01\xda\xf8\x7b\x52\x81\x47\xae\xcc\xce\x70\x65\xdd\x7c\xc5\x29\x1e\xcc\x57\xb3\x90\xff\xd0\x95\x31\x5b\xbb\xb1\xbe\x61\x06\x95\x60\x73\x52\x4e\x4a\x78\x68\xb7\xcb\x45\x4c\x90\xaf\x95\xb6\x99\x86\x51\x7b\x88\x25\x9d\xca\x8b\xd9\x47\x5f\x01\x69\x31\x05\x5d\xf9\x52\xb6\x31\xc5\x7c\x2a\xee\x6a\x6f\xf1\x5f\xe0\x0b\x85\x19\xca\xb0\x9d\x04\x64\x0e\xb3\xd7\xac\xe4\x38\xe6\x2f\x52\x07\x9e\x45\x15\xd0\x87\x80\xa8\x21\x7f\x85\xd0\x0b\x23\xf3\xfd\xf9\x09\x28\x1d\x3f\x69\xb6\x19\x7e\xd2\x16\x9e\x17\xd1\x7e\x0d\xf3\xe0\x4a\x78\xd2\x83\xd5\x9d\xb7\x83\xf9\x66\xfe\xb5\x57\xc7\x2f\x27\x3e\xae\x00\x81\xab\xf1\x88\x9a\xed\x5b\x15\x56\x8b\x86\x51\xf8\x71\xa8\x8e\xfe\x97\xe5\x27\x00\x65\x9e\xcc\xfa\x27\x7d\x56\x6d\x4a\xec\xe6\xa3\xc2\xea\x55\x58\x93\x73\x61\x0e\xec\x88\x77\x43\x5d\x3a\x95\x08\xde\x76\xa9\x85\x8b\x1c\xc2\x3f\x12\x4f\xf8\xcf\x39\xfb\x66\x9a\xa1\xd1\xfd\xdd\x6a\x65\xb1\x8b\x88\x06\x0e\x3a\x1d\xb0\xbf\x23\x03\x89\x4d\x50\xba\x1b\x0c\x12\x6d\x84\xf1\xd2\x99\xdb\xa6\xde\x98\xdd\xd2\x54\xbe\x05\xbf\x46\xca\xf1\xa0\xcf\x1a\xe2\x77\xee\xb4\x11\x9b\x17\x20\x0f\x78\x0c\x5f\x5e\x5a\xdd\xf9\xee\xf8\xbd\xce\x63\xe1\x45\x43\xe5\x82\xa1\x5a\xa1\xb7\x7e\xe9\xf2\xc0\xa4\x63\x3d\x3d\xe8\xe4\xfe\x02\x26\x87\x0d\x3e\x99\x72\x4e\x0d\xf3\xde\x67\x96\xdf\xa6\x14\x15\x59\x1a\xdc\x47\x1d\xfe\x10\xb2\x27\x72\xee\x3c\x0a\xd5\xd2\xf1\xef\xb8\x50\x6f\xa2\x08\xf9\xb7\x2f\x82\x7e\x99\x19\x98\xc5\x0d\xa7\xaf\x98\x86\x05\x5b\xbf\xc8\xa8\x2a\xc3\xea\xf1\xe9\x95\x30\xfe\xba\x0f\xe4\x01\x74\xa8\x10\xe1\x85\xdf\x7f\x6d\x09\x5c\x8f\x39\xd1\x03\x85\xd8\x85\xc7\x81\xd7\xcf\xff\x43\x72\x04\x06\xff\xe9\x1e\x30\x09\x13\x5c\x41\x0f\x4a\x60\x70\x75\x1d\xbb\x25\xea\xa4\xaa\x74\x47\x32\x06\x1d\x89\x98\x49\xaa\xa7\xf9\x72\xeb\x09\xbc\x82\xce\xeb\x9c\x59\x15\x21\x32\x6a\x66\x4e\xe4\x8d\x24\x6c\x38\xc2\x74\xbc\xb7\x2a\xc3\x85\xfe\xd7\xb1\x3b\xe2\x1a\x87\x2a\x75\x66\x14\x56\x7c\xfa\xc9\xbd\xf6\xaa\x27\x39\x3c\x99\xb5\xab\xad\xe8\xa2\xbf\xb4\xc4\x5b\xc5\x6f\xd9\xcc\x22\x55\xbc\x05\x3f\x68\xcc\x48\xbc\x2c\xff\xe3\x96\xcc\xd9\x5e\x66\xe9\x09\x09\x45\x86\x07\x63\x81\x6a\x11\x21\x2e\xfc\x13\x42\x9e\xaa\xd3\x34\x05\xe6\x40\xc3\xfd\x22\x18\xce\xd2\x8c\x67\x53\x74\x22\x27\xd3\xb8\x3a\xb0\x08\xe8\xd6\x3f\xac\xfc\xe3\x94\x5f\x31\xeb\x87\xb3\xf6\x20\x34\xd6\x47\xe0\x0b\x00\x6c\x48\x3c\x5d\xc0\x37\xf1\x14\x69\xe5\xc3\x44\xbe\x22\x0c\x9d\x05\x32\xb7\x4e\x34\x32\xb6\x1e\x3d\xeb\x41\xb6\x41\x74\xc1\x15\xc1\x21\x2f\x1c\x52\x25\xbe\x2a\x68\x5d\x16\xed\x30\xc2\x64\x21\x8e\x16\x60\xc9\x2a\x44\xa8\xbc\x1a\x54\x27\x12\xbe\xc4\x5c\xd8\x12\xcf\xb6\x65\x47\x3b\x2f\x3e\x16\xca\xcd\x16\xf8\x62\x66\xcc\x19\xe4\x03\xfe\x0e\x5a\x8f\x25\x95\x83\x8c\x82\xfe\xed\xae\xeb\x25\xbb\xcb\x60\x03\xf7\x31\x92\xf0\x70\x72\x55\xe5\x1b\xfd\xfd\x38\x09\x9c\xdb\x6b\x92\x15\x13\x95\x17\xb3\x10\x5c\xbb\x33\x2a\xcf\x53\x7e\x60\x15\x5f\xe5\x59\xb0\x78\x83\x2e\x03\xbc\x3a\x49\x3b\xcf\x3a\x25\xbe\x88\x9d\x8c\xc4\x07\x9b\x6e\x51\x58\x55\x4c\x70\xb0\x92\xf7\x20\xf2\xb3\x90\xc9\x40\x0b\x77\x43\x34\x68\x0e\x05\x26\x5e\xd5\xf3\xc9\xd6\x20\x90\x2a\x2c\xf4\x23\xd3\xa3\xc1\xe2\xdf\xeb\x04\xcb\x4b\xc3\xe2\x67\xbf\x2c\x38\x39\xf7\xda\x9f\xbc\x67\xe4\x22\x58\x80\x89\x73\x9a\x11\x46\x7a\x2e\xa3\xa2\x7f\x3f\x29\xa2\x73\x7e\xc3\x9b\x57\x15\x11\xdd\xb9\x1d\xaa\x19\xc4\xfd\x4c\x02\x3a\xcc\xe1\xb3\x20\x86\x0a\x18\xbe\xc9\xc3\x54\xde\x6f\x40\x4b\xf8\x4e\x5d\x25\x3e\xda\x40\x63\x6c\x21\x0f\x5d\x8a\x6d\xec\x22\x67\x18\xb9\x4d\x9e\x57\x46\x07\xf2\x5a\x24\x4d\xbe\x69\x40\x7c\x99\x71\x43\xde\x34\x0a\x4e\x1c\xd0\x2d\xa2\xb3\xf3\x6a\x47\x20\x66\x01\x94\x09\xec\xde\xcf\xee\x87\xc9\xb4\x33\x7f\x80\x08\xd4\x99\xd9\x25\xf6\x74\x16\xec\x48\x87\xc3\x88\x2c\x91\xdc\x61\x0b\x75\x17\x93\xc0\xae\xc2\x43\xd4\x1a\xa0\xa0\xad\x22\xd2\xe3\x52\x9e\x0e\xfd\x99\x7b\x8c\x64\xcf\x8c\x7b\xd4\xd1\x11\x1e\xad\x49\xb7\xc6\x2d\x2c\xbd\x66\xfe\x54\xb9\xe7\x1c\xd4\xbc\x95\xa1\xa2\x83\x36\x11\xd1\x4d\x42\xfe\x19\xba\x00\x06\x64\x90\xa0\x5c\xdd\x0c\x0e\xfe\xe1\xde\x60\x73\xaf\x07\x64\x56\x1f\x7a\x87\xd0\x18\x2d\xda\xba\xf1\x08\xa4\xa5\xd7\x42\x65\x77\x52\xbe\xf5\xf6\xda\xca\x3e\x68\x42\x3d\xe9\xbf\x29\x84\xc7\x27\x0d\xf8\x8c\x44\x52\xe0\xf1\x76\xa7\xe7\xbf\xdc\xa2\x00\x5a\x66\x95\x3e\x68\x8e\xea\x9d\xd8\x93\x87\x7b\xac\x8f\xdb\x21\xca\x58\xe2\x45\x8e\xa1\xd8\x95\x83\x61\x71\x6c\x15\x36\x01\xc4\x16\xd4\xef\x70\x63\x76\x4b\x04\xf9\x48\x03\xd7\x0a\x9c\xb7\x56\x04\xb1\xd8\x8c\x1b\x75\xce\x64\x6c\x59\x0e\xb3\xf1\x37\xd4\xa8\x18\xe4\xbd\x27\x8e\x85\xdb\xb6\x47\x6d\x3e\x9d\x1b\x91\x40\xcf\x05\x04\xfd\x21\x7d\xf3\xef\x64\xbe\x9a\x86\xc0\x83\x9c\xcd\xc2\x83\x6b\x16\x2c\x18\x5e\x7b\xcf\x9b\x02\xa8\xa2\xd5\x27\x36\x82\x1d\x76\xe7\xf9\x44\x66\xca\xe9\xe4\xeb\x20\x4f\xa7\x52\x14\xa3\xc1\xc3\xdc\x38\x5d\xa4\x62\xe1\x32\x31\xb2\xda\xcf\x80\x7e\x7a\x0f\x2d\x89\x3c\x6f\x28\xdc\x9e\x73\x77\x13\xee\x7a\x47\x00\x73\xb7\xa3\xce\x9c\xf7\x41\x96\xeb\x08\x4a\xa7\xb8\x5b\x48\x27\x68\x34\xea\x9f\x5e\xe6\x91\x70\x25\x0f\x80\xdd\x6c\x9e\x13\x54\x57\x69\xf1\xbc\x4c\x71\x63\x14\xa3\xdc\x64\x62\x6a\x55\x36\x65\x93\x5d\x80\x9a\x79\xe9\x68\xe6\xeb\xc4\xe8\x79\x2a\x81\x90\xbe\x5b\x98\xa7\x9b\xe7\xc8\x81\x40\x38\x0b\xe2\x88\x2b\x3a\xc7\xb4\xc5\x4f\xab\x87\x78\x0a\xcb\xd2\xcc\x64\xd0\x56\xe6\xc9\xca\x91\x79\x24\x86\x19\xa2\xeb\xcd\x69\xc3\x81\x49\xf7\x3b\x81\x8a\x97\xc2\x95\x1c\x0c\x58\x62\x64\x3c\xbc\xa7\xd0\x4c\xde\x0a\xcf\xb5\x6f\x0a\xef\x02\xfc\x27\x4a\xb9\xa9\x50\x1c\xb9\x14\xc3\x40\x82\x7e\xfe\xe2\x13\xb6\xdd\xae\xb8\x1d\x21\x40\x5f\x53\x9f\xfe\xe2\x95\xcd\xf0\xcd\x6b\x7a\x2c\x72\xd5\x8b\xdb\x07\x92\x23\x3c\x51\x25\x0d\xca\x92\xd3\xe0\x10\x79\x95\x00\x56\xe6\x15\xe0\x6e\xe0\xdb\xa3\xce\x75\xd4\x28\xf8\xd6\x67\x1c\x60\xf3\xc9\xab\x2a\x87\x00\x45\x6f\xf0\x4c\xbe\x05\x24\xa7\x82\x8c\x88\x75\xae\xef\xa5\x21\x62\x7c\x88\x1b\x6a\xeb\x84\xcb\x65\x72\x27\x85\x32\x76\x60\x08\x50\x25\x50\xa8\x76\xf8\x96\xe1\x3c\x14\x50\xc1\x57\x10\x8e\xd8\x62\x6b\x87\x7c\x57\x9d\xf9\xd9\xea\x19\xf9\x41\xf4\xc0\x31\x24\xb9\x4d\xa2\xc4\xeb\xe6\x40\x1b\x2f\x8f\xaf\xde\x20\xa2\x75\xc7\xd3\x69\x81\x45\x23\x14\xe3\x01\xcd\xc8\x47\x85\xa8\xd8\x8f\x72\xbf\x3c\x3c\x90\xc3\x8f\x7a\x65\x37\xd1\x3b\x9c\x0f\x3e\xbf\x45\x7a\xeb\xb5\x8e\xbb\xee\x6d\x64\xf7\xbb\xc2\xd6\x4e\xd0\x7e\x4f\x0c\xc3\x9f\x97\x18\x26\x00\x6f\x06\xf3\x01\xd4\x5b\xc8\x9c\x3b\x9d\x57\xfb\x39\x33\xb1\x49\x25\x8b\x25\x90\xc3\x81\x39\x3e\x9d\x07\xaf\x01\xd2\x38\x73\xc0\x07\x2b\xd7\x0e\xf5\x90\x13\xef\x96\x43\xbb\x17\x41\x08\x31\x98\xa3\x06\x13\x17\xee\x90\xa5\xae\x53\xf5\x11\x07\x05\xb9\x52\x1e\x6c\xc8\x53\x6c\xf5\x6e\x87\xa9\xd4\xb0\x98\x49\xf6\x03\x48\x3c\x76\xe3\x17\xc7\xda\x32\xbc\xe2\x9f\x84\x47\x7d\xd9\x63\xf3\x6e\xc1\x89\x37\xce\xe9\x9e\xe6\x82\xd5\xc2\x91\xaa\xc1\xbd\xc5\x37\x01\xdf\xec\xb2\x80\xa0\xe5\xd1\xd6\x95\xbb\x68\xdf\x83\x0f\x57\x46\x27\xe6\x95\xd6\x9b\x1f\x93\x10\x3e\xce\xd7\x06\xae\x1e\x33\x89\x78\xb5\xb1\x68\xcc\x5f\xd5\x18\xcd\x2a\xd9\xb7\x4a\x21\x7d\x6e\xab\x8d\x35\x2a\xad\xd3\x4d\x09\x57\x38\x19\x75\xbf\x29\x53\x8a\xd4\xc8\xd6\x4a\xdc\x24\xcb\xc1\x68\x9d\xba\x0a\x2b\xaa\x56\x07\xf3\xd6\xf9\x15\x75\x06\xcb\x34\xcc\xcb\xf2\xd0\x4f\x55\x5c\xbb\x17\xed\xce\x98\x07\xda\x0d\x42\xa4\x8c\xf3\x56\xa5\x8c\x3a\xdf\x7a\x26\xbb\x2d\xd3\xd9\xb2\x33\x3b\x17\xb4\x80\x93\x02\xb3\xad\x15\x67\xca\xec\x30\x43\x44\x0c\x66\x1f\x9c\xb9\x17\x24\xa0\x2a\xaa\x06\xcc\x0d\x43\x1e\xd4\x2f\x08\xab\x04\x85\xdc\x19\x1c\xcc\x2e\x16\x7e\x0a\x3c\x2a\x2d\xf7\xc8\xb2\x9e\x19\x7e\x7c\xd0\x4f\x92\x15\x18\xc9\x79\x11\x8c\xd2\xf8\x08\x7c\xf4\x97\x57\x3f\x1e\x0d\x1f\xef\x58\x92\x63\x6e\x74\x53\xf5\x2b\x36\x91\x4c\x4a\x3b\x85\xac\x72\x6c\x65\x96\xff\x7e\x34\x33\x01\x9a\x24\x0c\x74\xb1\x15\x9a\xaf\x83\x5b\x49\x9d\x6a\x81\xe2\xfd\x34\x5f\xc7\x17\xae\x57\xaa\x10\xd0\x8c\xae\x7b\x0b\xc7\xbd\x54\xb9\x3c\x78\xce\x09\x55\x5a\x74\x4c\x72\x6f\xc2\xc9\xdb\x05\x6b\x34\xfe\x1c\x2d\x80\xff\x15\xbf\xc4\xa1\xdc\x96\x1e\x1c\x39\x5c\xfc\xf5\x9d\x23\xdd\xa5\x7f\x5f\x8b\x96\x18\xe2\x56\x3b\x1c\x34\x08\x20\xf2\x2b\xa2\xb4\x54\x66\xd3\x69\xb0\x3d\xb7\x7d\x1c\x2b\xe2\x1a\xb7\x39\xb3\x38\xe6\x9b\x18\x08\x7f\x93\x18\x0f\x9e\x0f\xdb\xc9\x21\x5c\x9c\x45\xce\xb6\xb3\xca\xbc\xd1\x2e\x21\xec\xa2\x65\x6e\xfb\x78\x0b\xda\x28\xdf\x23\x62\x72\x3f\x8d\x8f\x85\x36\x84\xf1\x1c\xa1\x41\xad\xd8\x3d\xab\x56\xfc\x48\x8f\xf0\x2b\x02\x9e\xdd\xbf\xd1\x6c\x97\x25\xb2\xee\x70\x3e\xe7\xa6\xda\xf1\x73\x21\x7f\x15\x14\x3c\x22\x52\x89\xbc\x2b\xb2\xc5\x89\x0c\x30\x3d\x1f\x59\x8e\x76\x16\x07\xd3\xac\x6d\x68\xd5\x3b\x9c\xd5\xec\x2c\x66\xec\x3b\x92\x33\x9a\x51\x78\xd6\x87\xd3\x6d\xf9\xd2\x76\x4f\x11\xd7\x9e\xdb\x4b\xcc\x0d\x07\x5d\x64\x95\xb6\x38\x97\xa8\x1f\xea\xd5\x18\x57\x10\x60\x12\xa3\x30\x18\xfc\x7a\xbf\x7c\xda\x0e\xab\xfb\x48\xd6\x8a\x5d\x05\x8d\xf1\xa3\xe0\x6d\x73\xdb\x42\xa4\x7b\xfc\xec\x02\x00\xd3\xcb\x42\xb5\x76\xd1\xab\x19\x2a\x1e\xe3\xcc\x12\xa6\xb3\xf8\x79\x64\x56\x6e\xb3\x19\x03\xc4\xd9\x65\x77\xa6\x83\xbc\x75\x12\x65\x09\x2e\xa6\x5d\x71\xd2\xf4\x6e\xc8\x5e\x0e\x2e\xa8\x67\xd0\x2c\x67\x20\x27\x94\x1b\x75\x87\x2d\x74\xf4\x50\x79\xee\x60\x6e\x73\x30\xdb\xb7\x23\xc6\xcb\x6d\x1e\x9c\x39\x98\x08\x7d\x43\x5e\xb5\x44\xb3\xaa\x71\x64\xad\xf0\xd3\x7e\x4e\x5c\xfb\x05\xeb\x8a\xb7\x35\x67\xc7\xda\xa9\x20\x7f\xb7\x1a\x7f\x5a\x69\x54\xb7\xcc\xe2\xf4\x87\x70\xd2\xb3\xd9\x1b\x10\x92\x96\x56\x1f\xdb\x6a\x2f\xf2\x46\x89\x38\xab\x4b\x4a\xa5\x8e\x0f\x57\xd3\x99\x22\x00\x0f\x26\x46\x73\xbb\x5f\xdc\x24\x9f\xb7\x05\x3d\x22\x91\xf4\xce\x20\xcd\x89\x89\x2c\xee\x9f\x0b\xb3\x02\x6d\x8d\xf7\x0a\x71\x31\x97\x70\xde\x66\x01\xe1\x46\x82\x69\x4b\x54\x2d\x3d\x87\x14\x11\x05\x83\xe6\xbc\xc9\xa9\x46\x81\xd8\x41\xc5\x30\x72\x85\x79\x38\x00\xe1\xe0\x4f\xf4\x07\xe1\x4e\xe1\x45\x8e\xd6\xe2\xa2\x96\xad\x3f\x67\x7f\x03\x5d\x53\xf9\x5a\x1e\x05\xff\xa4\x20\x12\xa0\x89\xcb\x19\xd2\x3a\xa1\xbd\x62\x29\xda\xbe\x1b\x13\xbe\xc0\x5a\x0f\xdd\xff\xab\xb3\x44\x46\x70\x22\xf1\x03\xf9\x25\xa8\x78\x3c\x6a\xa2\x53\x88\x21\x3f\xc6\x39\x99\x89\x62\xd9\x6f\x67\x80\x4b\x27\x22\x4f\x3f\x7f\xdc\x62\x7e\x6e\xaa\x26\xd1\xf5\x58\x12\x73\x4e\x64\xf9\x17\x2e\xd0\x2b\x33\xd1\xd0\xa3\xf0\xec\x81\x2e\x97\xc4\x0b\x22\x40\x9a\xf6\xe6\x0d\xf7\xc0\xcc\xef\xe6\xe1\x19\xc5\x23\xfb\x13\xc4\x9a\x88\x21\xdd\xe3\x26\x58\xc8\xf3\x09\x2b\x22\xd7\x9f\x64\x32\x08\xef\x34\xb6\x43\xda\x3a\x2e\xef\x7b\x24\x24\x74\xf0\x3b\xd5\x50\x32\xa0\x9d\x68\x6f\x4c\xfc\xbd\x95\xce\x2c\x66\xcc\x17\xb3\x2e\xc2\xb0\xf8\xca\xbe\x29\xf0\x30\xaf\xa5\x25\x02\x50\x41\x62\xe2\xcd\x4d\xaf\xca\x6e\xb4\x80\xc2\xee\x41\x61\x06\xf7\x4d\x2e\x95\x95\x5c\x36\x95\x45\x5d\x84\x26\xe1\xa6\xde\x0e\x1c\x6b\x95\x0d\xa2\xa2\x35\x12\x66\xdb\xf0\xbb\xc8\x2e\x02\xb0\xf0\x05\xac\x37\x9b\xb5\xab\x9a\xb8\x7b\x4e\xab\x2e\xa2\x06\x48\xbe\x74\xc6\x19\x6f\x02\xed\x6b\x1e\x89\x2e\x8e\xe9\xb1\x45\x3a\x79\x4a\x45\xf7\x93\xbb\xa1\xa8\x53\x20\xde\xb2\xd0\xc2\xdd\xed\x1c\x46\x5a\xa3\xf6\x1e\xc7\x22\xbd\xb5\x4b\x19\xd5\x1e\xf1\x35\x37\x86\xe9\x9b\x1d\x4d\xe2\xe6\x2a\x1f\x32\x82\xc9\x2c\x75\x99\xa7\x7a\x28\x6a\xe8\x82\x48\x08\x5a\x17\x17\xa3\x55\x85\xdc\xea\x91\xf2\x51\xc9\xbf\xda\xc4\x14\x35\x73\x37\x85\x6b\x2b\x44\x3e\x09\xdf\xfa\x15\x72\xdf\x0e\xdb\x64\x51\x5e\x3a\x18\x73\x6c\x39\xff\xe5\x5d\x0b\x68\x9d\x34\x91\xbd\x8f\x0a\xd7\x4a\x0b\x9a\xf5\x16\x29\xf8\x20\x7b\x12\xb1\x21\x2b\x4c\x5c\x71\xc6\xce\xe5\xdf\x02\xc2\x9a\xb9\x32\x8b\x6c\xc0\x76\xc1\x94\x30\x76\xec\x98\x7d\xe7\x08\xbe\x1d\x14\x51\xf9\x31\xd0\xf4\x63\x6b\xc4\x37\x70\x9f\x18\x8c\x6d\x9b\x60\x7b\xe4\x61\xb0\xb2\x28\xc2\x04\x74\x60\x67\x6a\x5e\x55\x91\x51\x37\x86\x6d\x6d\xe6\xae\x32\x64\x71\x58\x9f\x89\x04\xd2\xc5\x62\xe6\x9e\x15\xc7\xcd\xdd\x2b\x3e\x65\xc9\x1e\xfe\xa9\x1d\xa2\x8b\x6a\x32\xa2\xee\x1b\x7b\x03\x58\x62\x73\x16\x24\xdf\xec\x37\xe2\x19\x33\x21\x20\xd6\x3b\x50\xbf\xc9\xaf\x01\x6a\xcf\xc1\x91\x42\xd9\x99\xf4\xf9\x87\xda\xca\x96\xfc\x99\x59\x9b\x7e\x93\x95\xcc\x02\x87\x5d\xc0\x67\x14\x70\xbf\xfd\x56\x25\xca\xf2\xfb\x15\xd7\xcb\x96\x78\x1c\xc3\x33\x6f\xb9\x38\xd9\xaf\x73\x62\xe7\x31\x0e\x0e\x4a\x24\x44\x21\x97\x86\xbc\xa3\x7e\xd0\xd8\xaf\x3c\xff\xf9\xf8\xaa\x0a\xdd\xd4\x44\x24\x72\xc3\x08\xd4\x77\xad\x8a\xa5\x67\x5b\x56\xb4\xee\x4e\x66\x52\x2e\x58\x59\xa7\x9c\xe5\x54\x92\x12\x1e\x8b\x1a\x5d\x9c\x3e\x07\xa4\xca\x2d\x99\x6d\xd4\x48\x2b\x7a\xef\xd0\x55\x9e\x54\xa1\x39\x48\x22\x31\x2b\x17\x3e\x24\xb7\x12\x66\x04\x3c\xbc\x2c\xa8\xf1\xaa\x0a\x5a\x96\x20\x9a\x33\xc3\x55\x5d\x74\xae\x03\x40\x64\x52\x24\xc2\x7c\x0c\x30\x44\x0f\x82\x59\x76\xa6\x93\x90\xae\x3b\x49\x53\x9e\xe1\x92\xe6\xe1\x79\xca\xad\x01\x1b\xa2\x3e\xa8\x48\xc6\xb4\x75\x7f\x06\xab\x83\x16\x4b\x09\xf3\x29\xee\xfd\x78\x8a\x6a\xd1\x8a\x8a\x40\x55\xdc\x6d\xa8\x60\x88\x55\x15\xe1\x0f\xc7\x8d\x9f\xeb\x2e\xf0\xc6\xd8\x63\x51\x75\x05\xc7\x08\xb9\x0d\xac\xac\x2a\x0b\xd8\x8a\x45\xa9\xad\x02\x4e\x7a\xba\x5d\xe7\xb2\xc5\x30\xdb\x29\x96\x2d\x0d\x91\x99\xc5\x53\x4e\xfa\x90\x11\x75\xd4\xb7\xf1\x72\x10\x16\x47\xc4\x36\x56\xb9\xb7\x16\xc3\x79\x6a\x2f\xab\xa1\xb2\xc6\xc3\x39\xc0\x3c\x92\xa9\x2b\xc4\x86\xdd\x7d\xb2\x6d\xaa\xbe\x85\x84\x6d\x16\x98\x25\xa6\x0e\x76\x57\xd9\xa5\x43\xe0\xfb\xf0\x10\x8c\xc0\x4d\x8a\x02\x42\x1c\xfc\x3a\x5c\xf1\xfc\x1c\xc0\xbd\xc5\xc5\x2b\xd9\xb2\x51\xcd\xb5\x53\xd2\x22\xba\x8d\x86\x68\x4e\x0f\x27\xf5\x22\xa6\x33\x0b\x78\xbb\x18\x76\xcc\xe7\x4f\x72\x86\xd8\x33\xf4\x47\x59\x66\xb8\x35\xcf\x46\xf3\xf1\x69\xab\xcc\x28\x8e\x71\x86\x9d\x3e\xf6\xdd\x31\x37\xf5\x92\xb4\x3d\x1a\xc9\x30\xe0\xa8\x32\x51\x47\x77\x0b\xbf\xc1\xdb\x11\x78\xfb\x80\xbc\x04\x58\xcf\x32\xd0\xb3\x0c\x4b\x52\x31\x6d\xff\xef\x64\x4c\x6d\x3e\xe8\x4f\x00\x33\x4b\x03\xf7\x71\x50\x57\x25\x63\x65\x5e\x5b\x65\x0a\x0c\x0b\x5d\x09\x7d\xd4\xcc\x98\xef\x60\xf3\x41\x12\xd1\x19\xc5\xfa\x88\x05\x36\x99\xb3\x74\x20\x9c\xe1\x13\x5a\x18\x7c\x72\x77\x19\xf3\x89\x3f\x6f\xe6\xeb\xe4\x69\xb3\x44\xc3\x0c\x73\xa0\xfb\x04\x20\xe1\x8e\xec\xea\xf1\x3b\x26\xd2\x88\xec\xc1\xed\x21\x60\x7c\x1b\xbf\xd6\x0e\x9d\xcc\x73\x77\x06\x35\x8d\x98\x5d\x9a\xa2\xbe\x8d\x05\x58\x68\x14\x17\x05\xa8\x8d\x94\xc0\x4a\x38\x8b\x40\x03\x8b\x30\xf6\x11\xc8\x85\x98\x04\x63\xd9\x40\x88\x55\x78\x43\xd6\x2e\x6e\x29\x02\x44\xb1\x96\xf6\xa2\xea\xc0\xbc\x88\xf1\xd5\xe1\x5c\x9c\x9e\x69\xb5\x87\x7d\x9c\xcd\x17\x1e\x0d\x6b\x33\xdb\xa2\xf0\x70\x8b\xa2\xb4\x53\xa4\xe0\x9c\xf3\xa0\x84\xdb\x63\x1f\xe1\x63\x9d\x02\x34\xb4\x83\x82\x6a\x44\x5c\x56\x6c\x53\x44\xa5\xda\x3b\x6c\x79\x02\x6e\x60\xf1\x64\xb9\x7b\x27\x68\xe2\xc5\xc0\x2b\xe7\x07\x62\x9a\x7c\x57\xeb\x97\x39\x81\x9e\xe2\x12\x8c\x6d\x5b\xe1\x8f\x1c\xaf\xe2\x7d\x1d\xc3\x79\x5d\xf2\x70\xad\x6d\x13\x23\x51\xc1\xc7\xea\x48\xa9\xea\xca\xa7\xf2\xce\xc3\x3f\xba\xbf\xe7\xc5\x23\x04\xe7\x82\xe1\xf7\xcc\xb2\xaa\x53\xcc\x26\x85\x10\x62\x31\xd2\x04\xe1\xf7\x9d\x1b\x2b\x7f\x3d\xab\x5c\x5c\x66\x1d\xb7\xb8\xd7\xe0\xf5\x71\x89\xde\xb0\xa2\xde\x0e\xc1\xb6\xfe\x7b\x14\xcd\x7a\xcb\x2a\xfa\x8b\xcd\xea\x06\x0c\x39\x2b\xe8\x8c\x5c\x56\x1c\xd8\x55\x98\xe7\x5e\x59\xba\xf1\x16\x6a\x6f\xef\x21\x43\x52\x6d\x64\x6b\x11\xa5\x91\x17\x6a\x8a\xa2\xe7\xc3\x92\x04\x11\x09\xc9\x11\x97\x05\xc7\x79\x37\x3f\xcd\xef\xaf\xe7\x41\xa2\xc4\xac\xf6\x48\x96\x34\x57\x11\x96\xe7\xf4\x89\xf8\x2b\x3d\x15\x93\x3e\x1e\x0d\xc5\xa0\xa5\xcb\xcb\x4f\x0d\x6c\x17\x3e\x03\xed\xe2\xc1\x04\x8d\x04\x5b\x25\x30\x4b\xdc\x32\x4b\xff\x8f\x45\x67\x6c\x6b\x11\x02\xf0\xe9\x41\xff\x95\x05\x86\xb2\x10\x72\xfc\x7c\xf3\x36\x36\xda\x60\x0b\xd2\x76\x6e\xde\x29\xa3\x2a\x32\x86\x97\x63\x2c\x1f\xe2\xdb\xdf\x27\x46\x80\xad\x9b\xbd\x3b\x8f\xa3\x8a\xca\xad\x7a\x49\xf9\x3d\x18\x1a\x1e\x2d\x8d\xa0\x33\xb0\xdf\x50\xb8\x70\x4c\xc4\x18\xa0\x7d\x5a\x82\x32\xa9\xe0\x90\xa2\x5d\x9b\xed\xa6\x7e\x4e\xdb\x06\xd7\xeb\x36\xde\xc5\xf8\x2d\x2a\x96\xdc\x65\x2f\x10\xef\xd1\x00\x2d\x00\x12\x82\xa6\x85\xfc\x07\xbf\x57\x38\xe6\x9d\xdd\xc8\x4e\xda\x73\xd3\xa7\x80\x7b\x9a\xc8\xfa\x71\x3c\x76\x4d\x37\x61\x1e\x0c\x47\x1d\xec\x2a\x77\xbe\x57\xdc\x15\x0f\xc0\x0e\x06\x91\x6c\x1f\x42\x7e\x08\xc4\xf7\xf1\xc1\xc5\xdd\xce\xcc\xce\x70\x26\x20\x5d\x55\x2c\xe3\xe7\x3c\xce\x97\xf3\x48\x93\x12\x5f\x98\x85\x46\x4a\x46\xdc\x87\xd5\x47\xf5\x16\x5f\x94\x8d\x0f\x09\x5e\x1a\xb1\xa5\x25\xf3\xf0\xdd\xc5\x21\xf1\x36\xe5\x1d\xab\x0a\x6b\x16\xd1\xd2\x61\xd9\x83\x1a\xd0\xbd\x70\xca\xe7\xde\x17\x66\xe4\x33\xb8\xd7\xf7\xb2\x51\x3d\x00\xd8\x24\xb9\xa1\xce\xcb\x98\x1b\x11\xfb\x95\x98\x63\xa9\x5a\xe0\x3c\x47\x50\x83\xf9\x61\x73\x1c\x0e\xb2\x5f\x70\xf6\x40\x3c\xe8\x1b\xc0\x76\xd8\x5a\xe0\x36\xbd\x83\x9a\xd4\x3b\x58\xa2\x3e\x84\xe8\xc3\x66\x88\x7d\xbb\x50\x50\xb9\xe2\xa9\x10\x0e\x6b\x2c\x46\x24\x3a\x57\x40\x42\x8c\x8d\xdc\x03\x17\x93\xa0\xe7\xdc\x48\x5e\x4d\xb3\x85\xf4\x8a\x73\xde\xde\x61\x4f\x6a\xd3\x7e\x61\xe5\x01\x00\xb0\xb2\x0b\xb7\xa3\x56\x26\x10\x61\x5a\x3e\x45\xd1\x95\x55\xf7\x6d\x4a\xde\xb0\xa6\x74\x75\xa6\x2d\x0b\xfa\x88\x97\xde\xf0\xba\xcf\xc2\xe6\x9d\x22\x1b\x15\x4d\xfa\xaa\xc7\xf8\x92\x07\xfe\x56\x57\xe5\xde\x20\xc3\x36\x07\xe4\xbc\xc2\x20\xd5\x17\x9e\x6f\x47\x9d\xc0\x1f\xf8\x6d\x3d\xe6\x5d\x36\xe7\xf1\x11\xc3\x09\x1d\xdf\x47\x03\xa1\x14\x8c\xf1\x7a\x12\x26\xc4\x39\x11\x8d\xc3\x02\x72\xc5\x0b\x8b\x7e\xd1\x5e\xd1\x78\x81\xf6\x28\x96\xc7\xd4\xc1\xca\x56\x42\x56\x73\x82\x37\x90\x17\xa5\x99\x13\xfb\x03\x50\xf5\xc3\x07\x9b\xd5\x51\xca\x03\xde\x49\xd0\x34\x67\xed\x05\xf0\xb2\x41\x74\x9b\x9e\xed\x34\x7b\xb3\x66\xf6\x55\xdb\x68\xae\xac\x59\xb5\xd8\xf1\xb3\x88\x89\x1a\xc7\xcd\x8e\x95\x1f\x9b\x86\x14\x74\xe9\x90\xa5\xf1\x07\xb3\xe5\x12\x8c\x67\x4b\xcb\xe0\xc6\x16\x6e\x8f\x34\x3b\x1d\xb4\xb6\x95\x3d\x21\xfe\x3b\x49\xf7\x50\x0d\x1e\x01\xcc\x39\xb3\x3c\x62\xdf\x04\xbb\x68\xa1\xd8\xc8\x2b\x1e\x2e\xd7\xfb\x54\x75\x32\x1c\x68\x46\x76\xe8\x40\xdb\x9e\x08\x96\x26\x51\xe2\xbc\x6b\x00\xb0\x44\x65\x43\xbf\xd9\x22\x82\x4f\x53\x40\xeb\xde\x67\xcc\x20\x05\x05\x7d\xf8\xd8\x73\xb8\x72\x75\x61\xbe\xa0\x2d\xd1\xc3\xee\x4c\xe8\x41\xca\x8a\xd4\x8e\x9c\x42\xd2\x3a\x83\x2d\x6b\x1f\xcc\x6a\xd3\x44\x26\x98\xa7\x00\xf6\x29\x64\x09\xda\xe3\xb3\x9c\xeb\xd5\xa6\xab\x79\x85\xec\x90\x65\x78\x53\xbe\xc4\xcf\x80\x32\x51\xca\xb6\x74\x8d\xe3\xd8\xa0\x9e\xb7\x87\x16\x0b\x8d\x93\xcb\xab\xfc\xa1\x95\x0f\xf7\x6b\x21\x3d\xfe\xc1\xf1\xa6\xa9\x65\x1c\x41\x77\x62\x4e\xc6\x02\x70\x60\xfa\x02\x2b\xdf\xea\x38\x29\x4a\x19\xd1\x0a\x48\x62\x40\x3f\x7d\x7f\x93\x3a\xb0\xfe\x3b\xa3\x88\xcc\x7a\xc1\xac\x7e\x27\xd5\x16\xc5\x52\x8c\xc7\xca\xd1\xfe\x73\x8a\x8a\xe2\xaf\xaf\x37\x73\xc2\x54\x00\xc8\xc3\x43\xe7\x10\x4c\x87\x68\xa0\x24\x57\x12\x68\x4c\xf6\xa6\x7e\x98\x81\x73\x8b\x46\x88\x20\xba\xd2\x9b\x22\x10\xe3\xee\xa3\xa2\x83\xe6\x86\x20\x7b\x90\x4d\x09\xaa\x11\x0b\x49\xe7\xb4\xb4\x52\x4b\xa7\x06\xc1\xe1\x02\x23\x9c\x97\x25\x3c\xc0\x8d\xcd\x5f\x72\x27\xcc\x02\x7b\x32\x82\x93\xb5\x46\xc1\x7a\xe1\x0b\x3e\x05\xc1\x1d\x42\x0e\x9f\x4b\x0e\x44\x41\xa2\x39\x9b\x36\xad\xf3\x0c\x1b\x9b\x15\xc4\x3f\x5b\x93\x17\xd9\x78\x43\x56\x2b\xd4\xb9\x04\x9d\xc4\xc5\x3f\xf0\x73\x3f\x35\x6a\x22\x0a\x12\xf1\xbc\x22\xd1\x6d\x8c\x46\x5f\xe1\xd8\x5d\x38\x20\x3a\xb2\xa8\x9b\x83\xff\xce\x84\xf8\x21\x78\xde\x42\xa7\x5b\x04\x3c\x3c\xdf\x71\x64\x40\x8c\xf3\x5f\xea\xb9\x53\xc8\x49\xfc\x0a\xa7\xb6\x5f\xcf\xb6\x2b\x88\x1a\xd0\xd1\xdb\xdd\xe5\x81\x7c\x8a\xaf\x7e\x54\xb2\x2d\x2d\x4a\x62\x89\x77\x53\xdc\x65\x9e\x07\x2f\x6d\x29\x9a\x5e\xab\x35\x9e\x98\x7b\x3d\x4a\x6b\x77\x8a\x04\x98\xcc\x3a\x44\xa1\x70\x6a\x21\xd8\x50\xff\x32\xb4\x51\xc6\xac\x27\x75\x00\x82\x66\x0c\x3d\x82\x84\x3b\xec\x93\x36\x5e\xc8\x88\x93\xc8\x9d\xb9\xa1\xe1\x5d\x70\xce\xf6\xd2\x51\x9b\x5a\xa8\xd6\xb7\xae\x8d\x97\xa5\x43\x65\x13\x1f\xb5\x45\x47\xce\x82\x52\xf7\x21\x71\x99\x57\x62\x81\x56\x1f\x6e\x16\x57\xce\x2b\x51\xae\x43\x95\x0e\x2c\x5f\x11\x74\xa0\x05\xf3\x03\x7e\xa3\xbf\x0e\x25\x32\xe1\xf9\x43\xa8\x21\x2d\x17\xba\xd8\x49\xd7\x81\x67\x3a\x48\x04\x0d\xaa\x54\x33\x26\x2a\xae\x61\xa9\x1e\x1b\xfb\x14\x9c\x49\x1c\xac\x37\x12\x2b\x0f\x09\x35\x74\x96\x6c\xdc\x5a\xa3\x23\x8f\xa6\xad\xd8\xe7\x4d\x70\x89\xdd\x63\xea\x5b\x40\xb6\x86\xd2\xfd\x29\x2c\xe2\x55\x36\x46\xf7\xa3\x84\x06\x2c\x54\xb6\xd5\x10\xa4\xec\x80\x69\xec\xb3\x73\x61\xb0\x68\x1b\xa9\x3f\xa2\x8d\xf2\xf6\x9a\x57\x9a\xeb\x8d\x34\x40\x54\xfe\x0a\x3f\x7e\x8a\x7b\xf0\x56\xc7\x6a\xee\xc7\xa5\x15\x08\x98\x1c\x71\x69\x26\xc9\xbe\x2c\xbf\xb1\x8d\x3c\xee\xf0\xd8\xd1\xf2\x56\x24\x97\x02\x96\x86\x02\x88\x7d\x54\xd7\x2b\xca\x86\x03\x6f\x6d\x94\xa0\xdd\x4d\x73\x91\x08\xb0\xa0\x19\xfe\xca\xfa\xed\xcf\x0f\xd0\x0f\x0b\x39\xb0\x6b\x1e\x2d\xca\xbc\x55\xf5\x29\x6d\xdc\x95\x8e\xa4\x4f\x75\x0a\x39\x1d\xdd\xdb\xe0\x14\x9b\xf7\x16\x73\xca\x54\xba\x49\xe6\x3f\xae\xa4\x82\x2e\x76\x02\x74\x26\xee\x28\xa6\xca\xc0\x39\x4f\x27\xb4\x6e\x2c\x6a\xb2\x45\x61\x60\x38\x15\xe1\x16\x43\x11\x99\xb4\x6b\xb4\x2d\x08\x4c\xa9\x9e\x1d\xd1\xd0\x77\x7f\xff\x17\x8a\xa2\xd3\x05\xe9\x31\x1e\x54\xdc\xbb\xc2\x1e\xe6\xad\xfe\x7e\x64\x95\x54\x6a\xb1\xdc\xb8\x35\x90\x4f\x61\x51\xdf\x4c\xc4\x6b\xb7\xb7\x72\xc7\x46\x2e\xfd\xe1\x95\x09\x55\x39\xc8\x57\x62\x58\xe5\x94\x03\x7d\x4c\xbe\x0b\x44\x85\x4c\xc5\x9f\x88\x57\x1b\x62\xbc\x65\x31\x1a\x9c\xfb\x13\x4a\x64\xe2\x80\x14\x42\xd3\x54\x7d\x68\xd0\x61\xb0\x1a\x8b\xf8\xde\xe7\x12\x3c\x25\x05\x0c\x68\x40\x65\xf6\xb5\xd1\xee\xce\x6a\xe4\xbc\x23\xc0\xbc\x85\xe1\xc4\x07\xa0\x94\xa5\xde\x5e\xaf\xed\x32\xaf\x18\x53\x84\x74\x0c\x7b\xe2\xd6\x45\x51\x48\x00\x4a\xe3\x20\xab\x8d\x78\x52\xbd\xae\x17\xd6\x14\xdd\xe9\x8e\x6c\x36\x53\x89\xab\xce\x78\x1a\x66\x9f\xc4\x8b\xab\xab\xd3\xad\x85\x3b\x6b\x55\x11\x3b\x69\x82\xa4\x1e\xb4\x3f\xc1\x66\x82\x85\xd5\xfb\x85\x87\xf6\xdc\x59\x26\xa4\x57\x73\x0a\xd8\x05\x30\x36\x08\x46\xf0\x71\x30\xe9\x1f\x5a\x2e\xb6\xe8\xc9\x42\x4b\x87\x6a\x39\x45\xd8\x13\x78\x0b\xbd\xaf\x3f\x2d\xfd\x53\xe9\x03\x0f\x4c\xbc\x73\x0b\x6d\xbf\x57\xa9\x71\xee\x47\x02\xff\xcc\xc2\x1b\xac\xe6\x14\x5f\x2c\xb9\x6a\xd4\xdb\x45\xeb\x74\xe7\x72\xa8\xe2\x3e\xd5\x0f\xfe\x74\xa5\x03\x25\xcb\xde\x06\x45\x6b\x31\xf1\xf2\xb3\xff\x74\x62\x17\xd0\x5f\x87\x33\xd3\xd4\xd7\xb1\x32\xb3\x60\x0c\x1e\x5d\xb0\x08\x68\xf7\x8f\x6d\x46\x3a\xb6\x12\x44\x3a\x7e\x58\x11\x7c\x83\xb0\xc9\x65\xba\x06\x3c\x60\x11\x45\x4f\x64\x4b\x65\x5e\x04\x12\x24\x8b\x64\x31\x3a\xfe\xf9\x2f\x9f\x30\x20\xa7\x9b\x3a\x0f\xb4\xf1\x27\x65\x34\xa3\x6e\x61\x31\x38\x44\x9b\x16\xcb\x0d\x05\xe8\xcb\xc1\xf5\x66\xae\x5b\xbc\x2f\x50\x87\x31\x63\x99\xc2\x13\x1d\x65\xd7\xb3\xb0\x9b\xff\x52\x1a\x90\x92\x84\xd7\xcb\x3c\x47\xbb\x85\x84\x59\x63\x42\x9d\xb6\x59\xe4\x34\x79\xfd\x38\x73\x95\xc4\xab\x40\x23\x02\x76\xcd\x23\x62\x4a\x16\xfc\x77\x96\x55\x81\x5d\x97\x88\x62\xb9\x34\x13\x3b\xce\xd1\x1f\xa7\x68\x85\x6c\x48\xdd\x87\x70\xec\x7e\xc8\x61\x9a\xc4\x2e\xa3\x15\x9d\x52\x27\x35\x01\x4f\x2f\xb6\x12\x38\x9f\x7a\xa9\xb6\x59\xc5\x2d\x36\xa8\xd3\x61\xf7\x59\xf6\x7c\x38\x17\x88\xe3\x7a\xbf\x0b\xe9\xfa\x7e\x9c\xaa\xea\x57\x0e\xbb\x34\x2d\x2e\x80\x76\x38\xcb\x7f\xa9\x02\xf2\x6c\xd9\xdf\x0f\x7f\xd7\x28\xb7\x9a\x3a\x4a\x23\x9d\x8b\x74\x6c\xaf\x1a\xbb\x36\xc7\x2e\x1d\xda\xe6\xc9\x93\xdf\x69\x9f\x10\xdb\x04\xe6\xa8\x56\x96\xa8\xf8\x6f\xaf\xa8\x32\x7b\xc0\xde\x5f\xdc\xcf\x49\xac\x10\xa4\xb0\x8a\x64\x5a\xe8\xa2\x7e\x63\xa6\xa2\x57\x2b\xdc\x02\x25\xb9\x47\x5c\x2a\xb6\xfa\xa4\xd3\x96\x68\x0b\xb6\xb0\x7d\x40\x99\xa6\x56\xb9\x78\x6b\xf9\xf3\xa6\x22\xbe\x87\x16\x1b\xa3\x2c\xea\xaf\x28\x8b\xa0\x2b\x74\x3e\x0f\xe1\xfc\x8e\x54\x9c\x1a\x5c\x79\xef\x22\x26\x2f\x0d\x68\xf2\xbb\x21\xe9\xe8\xf2\x62\xbd\x9d\x06\xc0\x73\x25\x9a\x33\xa7\x84\xe1\x7d\x2b\xbf\x15\xc7\x1d\xb6\xe9\x3a\xda\x1e\x9f\x0b\x61\x9c\xbd\x66\x4a\xbb\x6c\xe1\xd1\x12\x11\x2c\xea\x70\x24\xbb\xb3\xa1\xd9\x09\xc5\x2a\x80\x86\x14\x4e\xf2\xf8\xb5\x06\xe4\x36\xe8\x2a\x21\xb2\x91\xd5\x0e\xf5\x2f\xfd\x50\x54\x1f\x3c\x72\x1f\xc4\x68\xa4\xb0\xef\x18\x19\x92\xb7\x0b\xf3\x9c\xda\x6d\xd8\x5a\x74\x52\xd7\x20\xc9\x7a\xb7\x53\x73\x19\x8b\x5a\x36\xc0\x4f\xa2\x33\xde\x5e\x82\xbf\xf6\x74\x46\x52\x3f\x97\xc0\xa1\x42\xe8\x07\x04\x06\xf8\x09\xb6\xe2\x82\x6a\x26\x13\x3c\x31\xce\x93\xb6\x8f\x52\xa0\x53\xec\x51\xa7\x97\x20\x7e\x26\x97\x4e\xf5\x8d\x65\xbf\x2b\xbf\x03\x91\x5c\xe9\xfc\xc2\x1e\x27\x18\x11\xa9\xd6\x84\xe2\x1a\xdd\xa6\xed\x2a\x43\x3e\x54\x8e\xbc\x43\xf7\x08\xb3\xbe\x05\xcc\x76\x46\x57\x2f\xdc\x49\x57\xd0\x32\x4c\x13\xbd\x5a\x42\x8d\x02\x81\xca\x17\x1e\xfa\xe2\x98\x14\xe1\x87\x4e\x8e\xd9\x5a\x5b\x40\x24\x58\x53\x6e\x6a\x83\x11\xe0\x38\x8d\x41\x98\x41\xbf\xa9\x13\x23\x88\x72\xf0\xdf\xfc\x34\xca\x35\x8e\x39\x6a\x32\x51\x53\x7f\xa8\x4f\xd6\x3e\x98\xc4\xac\xb4\x3f\xc3\x72\x04\x36\xc5\xa8\xa7\xaa\x3b\xbe\x48\xea\xcd\xcc\x89\x50\x18\x34\x96\xe2\x01\xfd\x32\x6c\x4c\x66\x13\x5b\x43\xf0\x06\xe9\x62\xe7\x7b\xcf\x17\x93\xf9\x9f\xd3\xad\xe1\xc5\x8d\xfa\x22\x51\x3d\x0a\xd9\xc9\xcc\xd9\xc0\x86\xe1\x66\xb5\x1e\x2e\x8b\xf9\xa8\xd9\x1b\xb8\xf1\x76\x52\x56\x54\x8a\x52\x7f\x68\x36\x24\xf9\xe7\x4c\xe2\x99\xc6\x01\x20\xe2\x29\xd9\xe0\x6d\x3e\x71\x96\x59\x1e\xc4\x5a\x2f\x49\xd2\x71\xbd\x52\xfd\x59\x28\x49\x5a\xaa\x84\xb3\xd3\x4d\xd8\x55\xf0\x6b\x8e\xd0\xeb\xb0\x3b\x5e\x42\xbe\x35\xd4\x5e\xd0\xb2\x87\xeb\x0e\xf9\x86\x9d\x09\x02\xef\xba\x88\x8b\x01\x19\x95\xb0\x4a\xad\xce\x2a\x92\x88\x5a\x23\x64\x3e\xb6\x1c\x5a\xb2\xfb\xc8\xf8\xd3\xcb\x96\x6c\xcc\x35\xc7\xaf\x4f\x40\x17\x44\x95\x24\x04\xb7\x07\xd8\xb6\x64\x5c\x9c\x38\x7b\x0b\xcd\x52\x46\x90\x5b\x3c\x88\xa7\x92\x8b\x40\x89\xb3\x45\x5e\xde\x5c\xb2\xc8\x28\x0e\x59\xa0\x96\xed\xaf\xb4\xcb\x8e\x10\x36\x89\xf6\x30\x77\xda\xa4\xc2\x81\xc9\xe9\x2c\x0f\xcf\xe2\x98\x97\xa0\xd6\x4e\xa0\xd0\x7e\x3f\x9c\x3c\x9a\x90\x59\x52\x20\x0a\xc9\xad\x20\xb3\x4a\x3f\x76\x2e\x21\x4a\x7b\x48\x7b\x95\xe5\x7b\x5e\x84\xd5\xd2\x7e\x75\xec\x47\x66\xe9\xf9\x3f\x3e\x4b\x64\x81\x1b\x91\x45\x72\x03\x4e\x95\xbc\xa6\x41\x1c\x02\x67\xdb\xea\x2e\x29\xab\x5d\xcb\xb7\x9a\x18\xe3\xbf\x70\x16\x2e\x74\xde\x7a\x84\x0f\xd2\x68\x67\xa8\xdb\x82\xcd\xea\x4f\xf9\x02\x1d\xa1\x88\x3a\x77\xa8\x5d\x7e\x37\x57\x42\x0e\xcd\x54\xbb\x8e\x30\xc8\xc8\x4b\xf3\x87\xc2\xdd\x0f\x56\x98\xf3\xaf\xbb\x21\xb2\x6c\xa0\xbc\x69\xee\x66\xa6\xf4\x2c\x09\x45\x92\x9e\xba\x27\xb5\x24\x75\x11\xc8\x74\xd4\x26\x17\x67\xc9\xc3\x81\xee\xdc\x70\x5a\x91\x3c\x31\x88\xff\x43\x35\xfe\xee\x8c\x3c\x41\x41\x1b\xf8\x7e\x83\x58\x98\x51\x63\x82\x85\x62\xa2\x30\x47\x0f\x05\xf5\x66\xb2\x8a\xee\xc8\x89\xa2\x94\x39\x52\x8e\xc5\x99\x5f\x2c\x27\x29\xe7\xf3\xb8\x8b\x26\xf0\xa7\x9e\x6a\xfd\xbe\x13\x0f\x90\x6f\xa7\x4f\x3a\xd7\x55\x75\xef\x2d\x51\x49\x6e\x5a\x89\xef\xa3\x56\xc8\x2d\x37\x2b\x3e\x07\x33\xd9\xfb\x97\xa2\xbd\x21\x93\x6d\x76\x15\xb4\xcd\x46\x4e\x2d\x3c\x69\x7b\xb6\xb2\x2d\x48\x35\x7b\x5a\x07\x95\xb1\xb0\x40\x2f\xf6\xcc\xd1\x85\x25\x42\x5f\xc7\xd7\x61\xc2\x43\xd3\xed\xf7\xe1\x1e\xb7\x38\xbd\x66\x1f\xcf\x49\x46\x40\x57\xcc\xfe\x08\x91\xbb\xff\xce\x44\x8f\x75\x09\xb4\xac\x28\xad\xc2\x32\x56\xdd\x0d\x08\x4f\x04\xe1\x21\x0d\x20\xee\x6f\x0c\x15\xf6\x88\xc3\xc1\xea\x21\x1c\x87\xc0\x1a\xfc\x18\x6e\x70\x34\x5f\x28\xf9\x32\x6b\x5b\x61\x1a\x48\x27\x9c\x12\x63\x28\x08\xa2\x30\xf5\x60\xd9\x54\x04\x65\x5e\x11\x6c\x62\x12\x6e\xee\x5c\x9f\x3f\xa8\x89\x91\x39\x19\x9f\x2f\xc5\xb4\x49\x94\x29\xc7\xd1\xe9\x7b\xbd\xf6\x4c\xa8\x07\xc8\x66\x14\x7e\xd5\x08\x06\x5f\x86\x15\x76\xb8\x4e\xa5\x59\x51\x2f\xf0\x53\x9f\x55\x67\xc5\x42\xd6\x4e\xca\x1b\x7b\xd0\xc4\x42\x6f\x8d\xd5\xcd\xd8\xd2\x84\x9a\xdb\x04\xd3\x61\x61\x4d\xb4\x50\xf4\xe7\x26\x9c\xbb\xa3\xb1\xc9\x51\x9e\x38\xd8\x5a\xcd\x0a\xb2\x62\x49\x69\x32\xbb\xa0\x16\x5d\x38\xfa\x2d\x43\x28\xdc\xfc\x57\x94\x18\x7b\xc1\x05\x5d\xc6\xc5\x8e\xb2\xd4\xc9\x03\xdd\xd5\x5f\x85\x33\x13\x53\x42\x33\x96\x83\x50\x71\x66\x2b\xf8\x26\x25\x87\x9e\x91\x0e\x44\x0e\xed\x41\x24\x6d\x6f\x31\x49\x89\xfe\x94\x60\xff\x87\x18\xf5\x43\xb9\xff\xbb\xac\x82\x87\x1f\x59\x4d\x8e\x96\x97\x32\xed\x3b\xb2\xc2\x84\xc5\x1c\x3c\xdb\xaf\x88\x7a\x4f\x82\x9a\x54\x1d\x85\xd1\x34\x4b\x45\x3b\x00\xdf\x41\xe3\xad\x35\x24\x85\xb4\xb0\x81\xae\xb0\x47\x4c\xa8\xba\x28\xef\x2e\xcc\xa4\xe3\xee\x5a\xf9\xcc\x82\x9c\x26\x2e\x72\xb7\xc7\x24\x96\x7c\xb1\x41\xa6\xc7\x7e\x20\xe3\x22\xc2\x70\xf7\xd6\x84\xf0\xcb\x66\x60\xc9\x0e\x3b\xde\xa2\x16\xda\x77\x37\x7e\xae\xfd\x45\x2a\xd3\xa8\xf9\x88\x01\x8c\xc2\x98\xe2\x4f\x8a\x96\xf2\xac\x45\xfb\xe9\x22\x55\xbd\x47\x48\x37\xee\x35\x91\xe5\xf1\xb8\x3a\x44\x1a\xcf\x18\xa6\x20\xbd\x49\x83\x5c\x3c\x5e\x64\xaa\x5a\xd9\xcf\xbe\xd3\xe5\xad\x22\x5b\xb1\xc3\xef\x2e\x3a\x31\x53\xc5\x9b\xc4\x86\x64\x55\x14\x4b\x7b\xc9\x2e\x88\x70\x59\x2b\x33\xa6\xab\xde\x89\x72\x9a\xae\xeb\x98\x24\x25\x3b\x4d\x6f\x69\x30\x4f\x77\x7e\x45\x1e\x1a\x33\xaf\x5d\x3d\x98\xae\xe5\x7c\xb2\xf9\x7a\x40\x20\xf7\x08\x7e\xa4\x3a\xf0\xf0\x24\x70\xfa\x6c\xb0\xde\xb9\xd4\xfd\x53\x8a\x08\xfb\x67\x5e\x0f\x62\xff\x22\x57\xa0\x0d\x4d\x8d\x2d\x54\xbf\x1b\xe2\xe3\x28\x35\x51\x1e\xc9\xb5\xe1\x79\x18\xa0\x6f\xc1\x43\xc3\xce\x49\xdc\x5c\x0f\xc5\xab\xae\xda\x07\x0f\xff\xc6\x22\xda\x71\x3f\xe1\xca\x24\x76\x39\x65\x2a\x7c\xdb\x41\x08\xe8\x52\x1d\x23\x20\x0b\xb7\x45\x83\x3c\xb8\x58\x2a\x95\x92\xa4\x10\x72\x69\x35\x23\x11\x77\xfd\x32\x88\x73\x36\x4a\xe6\xfb\xff\xe4\x7f\xcf\x2e\x26\x26\x31\x1a\xdb\x6c\xe6\x30\xe4\xf8\x8f\x04\x69\xdc\x10\x2b\xef\xc5\x92\x22\x2e\xbb\xb3\x61\x95\x3d\xd8\x55\x6d\x28\xab\x52\x8c\x4b\x50\xd1\xf2\xc1\x18\x6c\xb1\xb9\xcf\xf3\x0f\x87\x4a\x3a\xcf\xa5\x27\x1b\xd7\xc5\xc1\xb5\x5b\x8e\x88\xeb\x46\xcd\x88\x80\x7b\x26\xb3\x90\x81\x11\x96\xaa\xf2\x8d\x9c\xb6\x3c\x91\x52\xea\x81\x1e\x0b\x2c\x14\xe4\xa0\x78\xe5\x8d\x14\x11\xe5\x7b\x54\xbe\xa5\x75\x28\x7d\xf0\x64\xf3\x9d\x4e\x61\xaf\x8c\xbd\x00\xb2\xc2\x57\x31\x73\x5c\x1a\x42\xf7\x41\x88\x82\x24\x4d\xa7\xf5\x1d\xfa\x39\x13\xd1\x0f\x74\x84\x32\x0d\x20\xff\x42\x3d\x20\x6a\xea\xb2\xdf\x7b\xb6\x38\xdb\x35\x7d\x18\xa8\x4c\x74\xb2\x66\xd5\xbb\x16\xfa\x8a\xcf\x5c\x24\x15\x94\x86\x1a\x05\xab\xcf\x9f\x10\x19\x8c\x8a\xb1\x4a\x7b\x61\xf8\xb4\x5d\xd9\xd2\x03\x1c\x5c\xf3\x6f\xa1\xc2\x87\xec\x46\x38\xc5\x1a\xa4\x7a\x04\xe5\x28\x5c\x06\x9e\x63\x7e\x65\x52\x23\x22\x5a\xb3\x14\x68\x65\x32\x62\x9d\x5a\x48\xe8\xf0\xa0\x36\x26\x59\x60\x52\x12\x9e\x97\x12\xf5\x8a\x70\x53\x68\x6a\xc0\xe5\x3f\xaf\xec\x85\x74\xf4\xa0\xb7\x22\x70\x3c\x59\x35\x87\xbe\x80\x07\x3b\x8a\x14\xee\x8d\x17\xab\xa4\x96\xbf\x28\x84\x36\xba\x4e\x5d\x07\x80\x1d\xc2\x11\x6b\x00\x92\x97\x64\x9a\xb3\xb4\x45\xa4\xa0\xb6\xc7\x07\x12\x5f\x60\x7b\xbe\x4a\xa5\x48\xa0\xc8\x9f\x0d\x48\x45\x4a\xb5\xf3\x55\x08\xdb\x43\x88\xd7\xc1\x2c\x44\x83\xca\xad\xb0\x5f\x7f\x5d\x02\x00\x07\x3d\x8a\x07\x2b\xf1\x22\x36\x26\xba\xe0\x86\x48\xcb\x93\xb4\x66\x83\xc2\x60\xbf\x96\x81\xbb\xfb\x1d\x26\x41\xaf\xdc\xdb\x5d\xa2\xc1\x75\x58\x82\xa2\x06\xe6\x84\x35\xce\xc5\x4e\x10\x6f\x0e\x3b\xda\x1b\xd8\xe6\x18\x12\x03\xa6\x43\x39\x89\x77\x1f\x52\xa0\x22\xe4\xe4\xf9\x18\x20\x40\x85\xe6\x42\xf0\xa3\xd9\xc0\x52\x33\x05\x54\x49\xad\xc1\x67\x58\xc6\x7d\xc4\xc3\xec\x3c\x43\x3d\x19\x14\x5e\xf0\xbc\x05\xcb\x36\x6c\x64\x3b\xee\x30\x0b\x64\xc2\x67\x6f\x65\x67\x60\xfd\x0a\xaa\xcf\x21\xc9\xa5\x83\x89\xe1\xde\x34\x8a\xc8\xb4\x24\x35\xaf\xd9\x43\x21\xca\x34\x56\xce\x2f\xfe\xa3\x72\x07\x7b\x48\xcf\x46\x60\x8e\x56\x0b\xbb\x09\x5c\xa5\x7b\x13\x5b\xcb\xbf\x3a\xa4\x87\xfa\x6b\xfa\xea\xbd\xe9\xaa\x56\xaf\xd1\x69\xe5\x11\x38\xbd\x28\x6c\x2a\xe3\xb4\x2d\x04\xd8\x1a\xd9\x97\xd1\x34\xe3\x77\xd1\x98\x7c\x3f\x1a\x95\xda\xb3\x77\x3a\xe5\x70\xe0\x5f\xea\x84\x04\xad\xf7\x5b\xab\x62\x33\x98\x60\x40\x63\xa7\x4a\xdc\xd7\x14\x42\xa6\x76\xf0\xe1\xc0\x68\x20\x3b\x17\x2f\x54\xc7\x04\x13\xab\x95\x15\x67\x4f\x42\xe5\xfa\x08\xc5\x90\xd0\x72\x93\xf4\xd0\xff\x01\x6b\xdd\xd4\x8a\xc4\x0e\x63\x8a\x97\x32\x13\xbe\x9b\x93\x02\x0e\x37\x21\x4f\x0a\x0a\xd0\xd2\x9a\xa1\xec\x52\xad\x5f\x12\xaa\xd8\x0b\x73\xa3\x89\x6d\x84\xf1\x34\x99\x9b\x8f\x36\x23\x24\xe3\x0c\xcf\xa7\xe0\x3e\xd8\xcd\xbe\x78\x92\x60\x75\x6b\xa0\x88\xad\x63\xfd\x50\xcb\xc7\x1c\x54\x54\x4a\xf0\xea\x5e\x23\x21\xf9\xc8\x23\xab\xbe\x50\x8a\x63\x25\xb8\xff\xa3\xd8\xc2\xcb\x04\x22\x94\xf7\x81\xc0\x0c\x9b\xbf\x8b\xa3\x93\xa1\x12\xc5\x23\x6f\x7e\xe8\x3f\x16\x70\xb7\x56\xcf\x6a\xcc\x03\x65\xd8\x99\xb8\x06\x42\x38\x4f\x9a\x50\x99\x4b\xd4\xce\x5f\x52\xf8\xfe\xe5\xbc\xa9\xad\x99\xa7\x42\x2c\x05\x4a\x43\x08\x7b\x2d\x82\xfc\xc8\x7e\xa4\x2d\xdb\x43\xc8\x80\x12\x61\xd1\xcf\xf4\x4a\x60\x52\x88\x0b\x81\x6b\x18\x91\xbb\xd4\x49\x78\x43\xd9\x36\x4c\x93\x7b\x94\xbf\x92\x4e\x9f\xd4\x8d\xd0\x69\xb2\xc5\x46\xce\x53\x28\x4a\x01\xfa\xfa\xcd\xfd\x79\x04\xa1\x7a\x11\x7f\xed\xa8\xe1\x2c\x85\xa5\x25\xb5\x3e\x8e\x69\xeb\xc2\xed\x3b\x5e\x43\xc7\x8b\xd4\x87\x6d\x23\xba\xa3\xa4\x6c\x49\x6e\x5b\x78\x2c\x79\x72\xfe\x3e\x27\xc3\x80\xb9\x86\xe2\xd7\x6c\x29\x52\xe0\x6d\x35\x93\x8a\x18\x85\x21\x28\x2f\xe3\xab\x4f\x28\x02\x89\xab\x12\x6d\xdc\x94\x98\xe7\xdb\x04\xe7\xcd\x35\x90\x83\xdb\x63\x0e\x41\xb7\x1f\x6a\x2d\x2a\x27\x7c\x1a\xd4\x5e\xf6\x56\xe7\x52\x55\x4f\x35\x7a\x2f\x35\x47\xcf\xc9\x40\x4b\x62\x77\xa5\x92\x15\xc6\x29\x7c\x45\x24\x68\x50\x1e\x35\x02\x45\x52\xd1\x5e\x62\x50\x67\x4b\x76\xde\xfb\xa6\x25\x39\x32\x3e\xd0\x36\xc3\xd5\xa9\xfe\x34\x23\x03\xcf\xb2\x89\x80\xba\x30\x82\xe4\xc0\x0e\x42\x2f\xfd\x33\xb2\xe1\x43\xd1\x9d\x46\xf1\x49\x9d\x91\x74\x31\xe6\x1e\x6b\xd9\x29\x7e\x02\x0d\x73\xe1\x2f\x0b\xe9\x20\x76\xb0\x37\xbe\xe0\x7a\x95\x96\x78\xe9\xd8\x00\xca\x68\x5d\x34\x59\x33\x06\x21\xa5\xb4\x65\xd7\x08\x14\x18\xb0\x49\xdd\xd3\x23\xc1\x18\x4c\xb4\x9c\x31\x9a\x60\xca\x6c\xfb\xf0\xaa\x43\x50\x4c\xfb\xcb\x3b\x10\x87\xeb\x32\xb3\x71\x0c\xe5\xc2\x99\xfe\x85\x90\xc0\x28\x29\x67\x1e\x5e\x8b\xa6\x49\x05\x2b\x33\x9f\x5a\x45\x1a\x9a\x8b\x22\xc6\xbc\x8b\xe5\xf3\x58\x17\xa6\xeb\x24\x5e\x5c\x45\x10\xb6\x82\xc4\xaf\x76\xb0\x47\xb0\x4b\xa8\x10\xc3\x09\xf1\x96\x68\xda\x90\x4a\xac\x38\xff\x27\x41\x27\x08\xb5\xed\xd8\x6f\x46\x11\x1e\x7f\x45\x8b\xbd\x1d\x33\xd5\x2e\xe2\x25\x59\x62\x9c\x14\x5f\x76\x1e\x4a\x44\xed\xfa\x14\xca\x2e\xc9\x16\xe1\x38\xd3\x57\x39\x15\xb9\x9d\xb3\xa8\x19\x43\x4e\x93\x04\x40\xc6\x68\x52\xdc\xa2\x72\x94\x3e\xe4\x39\x8c\x41\x02\x83\x05\x46\x25\x31\x34\x96\xa8\x91\xac\x92\xd8\x2d\x4a\x9a\x6e\x0e\x2e\x70\xc2\x4f\x1d\x7e\x2b\x3a\x1b\x2e\x8e\x96\x7c\x59\x08\xf6\x65\x8d\x20\x09\x11\x6e\x3d\xe0\x3b\x82\x65\x92\x9a\xcd\x85\x3a\x72\xc9\xda\x08\xa6\x7b\x6c\x22\xfe\x59\xca\xe9\xec\x1b\x8d\x46\x58\xc4\x00\x71\x82\xba\xc2\xbe\x31\x54\xb1\x9b\xe0\x0b\xa3\xad\xad\x15\x79\x0f\xc5\x34\x2e\x36\xad\x54\xd7\x6b\xad\x8c\xba\xc0\x26\x50\x09\xe3\xc8\xdc\xa4\x05\xe3\x6e\x1e\x31\xf9\xe7\x47\xc5\x98\x3b\xed\xa2\xf9\x20\x09\x7e\xf5\xad\x6e\x1d\x43\x2f\x58\x7d\x20\x3e\xa5\xb1\x77\x22\xa5\x47\x91\xee\xa6\x3a\xbf\xaf\xc9\x7f\x67\x55\x75\xca\xcc\x44\xff\x91\xfa\x08\x55\x50\xbe\x98\x7d\xac\xad\xbd\x5a\x14\x11\x55\x24\x35\x49\x0a\xb6\x1b\x94\x36\x23\xc4\x93\x1b\x82\x32\xb1\x48\xd6\x85\x71\x00\x4a\x6c\x98\x50\x72\x34\xfe\xa4\x39\x1e\x75\xda\x51\x7b\x62\x68\xef\xa0\x1b\xbf\xb4\xd0\xda\xc9\x42\x4d\x4e\x93\x1a\xe2\xa1\x34\x93\x25\x62\xe3\x8d\x81\xbf\xb7\x86\x7e\xde\x22\x09\x67\x08\x57\xa5\x03\x65\x1b\x10\x4a\xdd\xa9\x17\x11\x16\xbf\xeb\xc7\xfd\x7b\x9f\x54\xed\x78\x95\xd6\x4a\x4b\x68\x45\xba\xef\xfe\xe1\x6d\x8a\x50\x6f\x07\x50\x64\x2a\xb3\x98\x78\x43\x9a\xd3\x80\x2b\x1c\xba\x10\xf5\xbd\xe5\xb4\x3f\xa9\x0b\x79\xf7\xa2\x2a\xe1\xe0\xdf\x46\xd7\x9f\x53\x93\x07\xe6\x83\x80\x6b\x0c\xa0\xaf\x0e\xb2\x26\x94\x9d\x56\x5c\x84\xfb\xa5\xb8\x23\x92\x7d\xe2\x06\xed\xdf\xbb\x4b\x91\xde\x89\x87\x0c\xa6\x2d\x76\xd4\xec\x2c\xbd\x20\x24\x0f\x9b\x16\x5c\x62\x92\x83\xa9\x6e\xcc\x03\x34\x70\x68\x43\x1f\x5c\x9a\x54\x0e\x92\x0d\x54\xc7\x09\x74\xb9\xb0\xe0\xb5\x69\x20\x79\x67\x1e\x4b\x1f\x16\xbd\xb0\x3f\xa2\x78\x1d\xb8\x55\xbe\xdc\xbe\xa6\xa6\x0a\xe2\xfd\x06\x3a\x15\x3e\xee\x4c\x79\x4e\x7a\x6d\x95\xb0\x07\x05\x24\xd4\x10\xdb\x9a\xc0\x54\x79\x6e\x3a\x27\x4d\x4c\x10\x89\xf2\xa8\xe2\xc5\xb2\x07\x93\xfa\x20\x28\xb8\x06\x8d\xaf\xfe\x7e\x1e\x70\x6d\x3e\xaa\x90\xe3\x75\x5c\xe8\x8c\xdd\x05\x85\x65\x64\x86\x8a\x44\xdb\xda\x89\x13\xbb\x62\x59\x2c\x76\xee\x23\x56\x05\x79\x80\xe0\xf3\x97\x68\x9a\xbf\x08\x51\x4f\x06\xbf\x78\x80\x09\x3d\x20\x76\xa0\xd3\x62\xb9\x59\xb4\xc0\x69\x9a\x4d\x65\xfd\xb5\xfe\x59\x32\x7f\xf9\x7c\x8a\xe6\xe4\xb1\xdb\x19\x59\x33\xe2\xa1\xa0\x82\xc3\xa1\x7c\xcc\xa6\x92\x2f\x4b\x0f\x71\x91\xc0\xef\x32\x1d\x90\x55\xfd\x42\x8f\xba\x14\xf2\xc2\x39\x45\x70\x9b\x3a\x62\x1d\xe6\xd0\x65\x2d\xdf\x0d\xe0\x15\x5d\xe3\xce\xc5\xda\x72\x50\x68\x57\x4e\xb7\xec\x4e\xea\x90\x27\x94\xe2\x30\xc0\x91\x1a\x36\xd4\xd9\x8d\xb3\x1b\x11\xdd\x7c\xb5\x00\xf7\xb2\x65\x29\xc4\x7f\x49\xe0\x29\xad\xaf\xdd\xc5\x1e\xa4\x97\xfe\xab\xba\x75\xb6\xeb\x6a\xee\xb7\x9d\xa0\x41\xed\x17\xd0\x1f\x90\xb5\x71\x2a\x0d\x0f\xbc\x57\x9b\x95\xa5\x77\xb5\x52\x1f\xfa\xcd\xda\xee\x2e\x48\xb7\x77\x4d\x50\xa5\x9c\xf6\xbc\x93\xfa\x5c\x24\x3a\x96\x96\xd1\x2b\x1c\xe2\x7e\x5d\x38\x6c\x28\xe7\x42\x36\x19\x27\x04\xcd\x4a\xda\x19\x8e\x50\x76\x98\xbd\x0a\x6f\x15\x8b\x75\xf7\x36\x96\x9d\xda\xe4\x8d\x55\xf3\xa0\x6f\xc1\xc4\xc5\x38\x16\x98\x36\xc4\xd2\xb3\x9c\xab\x8b\x0f\xd3\x47\xec\x4f\xce\x40\xb9\xd4\x4e\x07\x56\x8a\x86\x96\xe5\x31\x2b\x38\x97\x2f\xa1\xa1\xdd\x2b\x77\x33\xb1\x56\x4a\xf8\x2d\xbd\x98\x2c\xb3\x07\x7e\x79\x52\x95\x76\xdb\x55\x90\xf0\xcb\xa9\x03\x04\x0e\x33\x0f\x0a\xe1\xae\x63\x83\x4e\xb4\x3c\x51\x1b\x63\x18\xdd\xf8\x2e\x72\x7e\x4a\xe5\x5c\x75\x4c\xc4\x7d\x61\x56\x4f\xdd\x7b\x81\x37\xd3\xf9\x54\x5f\x47\x52\x71\xa6\x8b\xf9\x48\x80\x09\x23\xea\x01\xfe\xce\xb7\x99\x6a\x12\x6a\x43\x08\x9f\xda\xc2\x64\x08\xdc\x69\x5e\x21\x08\x14\xdc\x94\xf5\x94\xd1\x9d\x4a\xf7\xe5\xc1\xcd\x6a\xac\x2e\xca\x85\xbf\x4c\x6d\x0a\xae\x84\xab\xd4\xc2\x62\x01\x1c\xbb\x37\x82\x2a\x2f\xd9\x62\x75\xcc\x2c\x12\xbf\x93\xba\x97\x9c\x8a\xfd\xfa\x00\xe0\x43\x41\x60\xd8\x24\x1c\x0b\x89\x09\xde\xc5\xdc\xc6\x6a\xf9\x99\x25\x32\xe9\x1d\xde\x61\x0c\x43\x0d\x7f\x6f\x83\x69\xd4\x04\x5d\x64\xce\x9c\x9d\x12\xe5\xc7\x36\xd9\x66\x0b\xef\xd6\x9f\x7f\x79\x1e\x74\xe3\x2d\x48\x4e\xca\xc9\xfb\x88\x5d\x64\x91\xd4\x28\xd2\xab\x64\xa8\x59\x38\xbc\x89\x76\xb7\xd8\x0d\xc3\xe6\x27\xf0\x6c\x82\x8b\xa5\x1a\xa7\x37\xef\xb7\xb4\x19\xb0\xbd\xea\x7f\x54\xbf\x77\x1d\x59\x78\x84\x81\x87\xd7\x7b\x9c\x68\xf2\x04\xdb\xbb\x78\xba\x54\xaa\x38\x7e\xe6\xb1\x91\xae\x70\x8f\x30\xce\x1c\x26\x60\x85\x83\x4d\x6d\x0e\x70\x6f\x34\x65\xd0\xd7\xc0\x53\x48\x6d\xfa\xa3\x85\x69\xb0\x1d\xef\x50\x7f\x46\xb3\x9f\x25\xa7\x7a\x54\x7b\x4b\xa7\xa4\x08\x6f\xb0\xbc\x68\x2b\x9c\xaa\x88\x38\xbd\xe9\xbf\xaa\x48\x16\xb7\x12\x2a\x46\x41\xba\x5d\xf3\xd5\x01\x74\xa3\x2e\xe3\xd5\xbc\xfe\x93\xa2\xdd\x17\xc3\x6f\x14\x59\x46\x93\xcb\xe8\x24\xef\x2a\xbc\xfa\x1e\x53\x28\xcc\xc8\xb7\x36\xe2\x4d\xe0\x70\xba\x8b\xf0\xec\xe2\x80\x38\x30\x2d\x74\xf2\xb2\x58\xd5\x98\x93\x98\xb8\xe0\x7f\x32\x10\x75\x90\xb8\x69\xc4\xaf\xa4\x8d\xb8\xed\x3f\x4a\x0c\xad\x6c\x3a\x51\xef\x00\x8f\x2c\x10\xfc\xef\xa4\xa1\xda\x3f\x73\x57\xc5\xc6\x96\x6a\xc2\x9a\x34\x50\xcc\x2d\x31\xa5\x1b\x65\xc2\x52\x04\x61\x54\x78\x9e\xa3\x94\xf5\xe1\x08\x82\xa3\x29\xba\xef\x92\xf1\xd9\x13\xb5\x4b\x37\xa9\x77\x78\xf5\x97\xa6\xc4\x7c\x6c\x63\x90\xb2\xa9\x60\x0c\x4e\x9d\xe7\x80\x9e\x01\xd2\xb6\x02\x29\x9d\xce\xfe\xa3\x94\x21\x4d\x02\x9b\xae\xb8\xa4\xf3\xbc\x43\x7c\xc1\x16\xbd\xde\x38\xaa\x47\xb8\xc0\xf9\x78\x68\x90\x5a\x15\x75\x7d\x17\xfd\xeb\x6b\x05\x6f\x68\xd6\xc7\x8f\x7c\x82\x6c\x2e\x09\x0a\x68\x05\xf3\x79\x38\xd5\x95\x91\x05\xe5\x07\x1a\x1f\x76\xd7\x5f\x1e\xa2\x96\xbc\x23\x8a\x06\x83\x86\x3f\xbe\x5f\x26\x1b\x71\x1a\x3e\xe8\xbf\xd3\x1b\x86\x8b\x1a\x1f\x1e\xb7\x7f\xc4\xcd\xe4\x2f\xff\x25\x22\x36\x39\xb6\x9b\xc2\xe4\xa5\xb4\xb4\x9d\x17\x16\x5e\xcd\xf9\xc9\xaa\xd9\xc5\x0c\xa0\x89\x24\x01\xfc\x50\x73\xd5\x76\xfc\xd0\xcc\x74\x0b\xf5\x15\x95\xc4\xef\xed\x61\xfb\xb4\xdd\x79\x4c\x9c\x24\xc1\x66\xa0\xee\x32\x09\x6c\xb5\xd7\x89\x98\x04\x2f\x4d\x0d\x8d\xf9\x49\x72\x01\x16\x06\x37\xec\x1c\x18\xc6\xd7\xe4\x4f\x94\xaf\xd4\x5d\x97\x1d\xfe\x56\x3c\x33\xf5\xac\x10\x99\xf9\xde\x35\x7e\xd1\xce\x7f\xc8\x6e\x86\x4c\x1f\x46\x6b\x84\xb6\x48\x21\x14\x19\xa0\x1e\x8f\x8e\x86\xe8\xf5\x6c\x23\xdb\x62\xa5\xbe\xa9\x32\xe9\x0d\xc9\x06\x0d\xe3\xa7\x4b\xa4\xe4\x22\x23\x99\xfd\xa4\x82\x67\x6b\xdd\x38\x54\x64\xa2\x04\x94\xe6\x25\xad\xda\x40\x59\x2d\xbb\xf8\x1d\x23\xcf\xe8\xf9\xe5\xf3\x38\xdd\x58\xc8\x04\xa3\xb0\x08\x3e\xf1\x08\x90\xeb\xe4\x1d\x70\x25\x09\x5d\x2c\x3b\xfe\x3b\x9b\x4f\x51\x8f\x60\xce\x4e\x5d\x22\x5b\x62\xf5\xca\x59\x48\x94\x42\xfb\x7e\x91\x6c\x96\xf7\x45\x4b\xb2\xb4\xf6\xd1\xb6\xed\x6a\x44\xbc\x04\x2f\x26\x89\xce\xea\x7b\xe8\xd5\x2e\xbd\x16\x1e\x2d\xd2\xf4\xfd\xa3\xc8\x70\x44\xfe\x12\x2c\x48\xc1\x38\x7e\xe2\xa3\xe7\xde\x52\x27\xc1\x2a\x28\x95\xee\xd1\x2c\xc7\x2a\x1c\x31\x46\xa7\xd8\xe8\xa9\xca\xde\x86\x8d\x85\x3a\x1f\x41\x0a\x5f\x47\xc6\x54\x0f\xc9\x07\x6f\x6a\xad\x53\x2f\xce\x57\xc2\x8c\x75\x98\xc9\x47\xb0\x2b\xb9\xcb\x3e\x69\xbe\x41\x5a\xaa\xcf\xf0\xe5\x4e\x0f\xaa\x70\xdb\xa1\x55\x61\xd2\xe7\x96\x70\x35\x1e\xad\xc8\x69\x17\x97\xb8\x01\x05\x29\xf8\x35\xe5\x5b\x60\x97\x02\x07\xfc\x02\xe1\xd1\xd9\xa3\x9b\xe9\x7c\x27\x31\x0e\x42\x33\xc9\x05\x68\x39\xac\xb8\xbc\xfc\x47\x53\xcc\x55\xab\x6a\xb2\xe2\x8c\x1b\x8b\xbb\x73\x3a\x6f\x82\xbd\x63\x7a\xad\x48\x79\x89\x48\x8e\xd3\x2c\x06\xe1\x7c\x14\xd6\xb2\x5f\xa2\x64\xda\x22\x5f\xcc\x28\xaf\x95\xa7\x31\xc2\x6d\x22\x00\xde\x24\x1f\x36\xcf\xac\x7b\x58\x19\x0b\xbf\x81\x29\xc5\xee\xea\x7e\xcc\xdd\x45\xdf\x76\xf5\xd8\x4e\x83\x7a\x91\x42\xbc\x52\xcc\xc1\xd9\xb2\x1c\x49\x2b\xcd\x98\xb5\x4d\xaa\xb9\x38\x1f\xee\x86\x6a\xef\xe1\xb2\x74\x2b\xca\xb0\xe9\xd3\xb5\x78\xe7\x1f\x99\x2e\x9f\xee\x09\x75\xe0\x4b\xf0\x26\x99\x09\xa2\x71\x70\x10\x86\xfe\xd5\x11\xd4\xab\x47\xd2\x17\xa1\x0b\x40\x2b\x6d\xcf\x28\x0a\xba\xb2\x89\x4a\x17\x49\xfd\x40\xfb\x11\x4a\x5b\xe5\x57\x43\xb5\x55\x47\xf5\x01\x2e\x53\xd6\x04\x19\x6a\x96\x2a\x4a\x90\x9e\xa5\xd9\x4f\x4d\x67\xf0\x0b\x91\x85\xd5\x5a\x66\xd3\xe9\xec\xd6\xab\x7d\x02\x83\x99\xed\x53\x6a\x8c\x40\xd4\x18\x3c\x68\x90\xf3\xa8\x99\x23\xde\x22\xc2\x3a\x02\xe1\x84\xe9\xde\x9a\x96\xe4\xcc\x9c\x11\x24\x2e\x6c\xfd\x03\xe1\xa4\x22\x5e\x0e\x38\x9f\xce\x6e\xd7\x19\x46\x85\x20\xc4\xc3\x97\x50\x97\x04\xae\xad\x25\x79\x5c\x0c\x0f\xc6\xf2\xfe\x09\xb6\x8d\xea\xef\x83\xfd\xe6\x82\xb9\x89\x52\x3b\xa1\x25\x39\x83\xb8\xcd\xec\x8e\x08\xe4\x8a\x36\x04\x44\x23\xd9\xd3\xfb\xc4\xce\x54\xb3\xdd\xf0\x23\x7d\xd0\x6b\x45\x85\xbe\x11\x9f\x9f\xb9\xf0\x48\xcf\x98\x91\x0d\x27\x8b\x8c\x13\x23\x72\x8c\x7b\x0c\x1a\x97\x93\x09\x9a\x1e\x0d\x75\xba\xf6\x7f\x4e\x94\xdc\x2f\x2d\x01\xd1\xd8\x1f\xcd\xc3\x53\x96\x1d\x55\x1f\x8b\xc4\x34\xb4\x84\x3b\xd7\x5a\xb6\xfd\x2f\xb7\xab\x46\x7d\x63\xc4\xf1\x6f\x54\xc9\xb8\x60\x78\x52\xbc\x53\x53\x2c\xbf\x36\x13\x1b\x86\x68\xfb\x1a\x0b\x14\x3d\x5f\x20\xba\xb4\xfe\xb4\x90\x2a\x88\x49\xbd\x5a\xe8\x92\xef\x1a\x5c\xee\x6c\x23\x86\x44\xf6\x40\x0c\x60\x88\x49\x6f\x6a\xb1\xdc\xc2\xc7\xac\xe5\xc1\xa8\x9a\xd8\xa7\x6d\x40\x08\xcb\x33\xbd\x63\x45\x07\xcd\x13\xb7\xff\x49\xb5\x1a\xd9\x2c\x14\xde\x15\x14\x7a\xab\x12\xad\xd5\x67\x55\xbd\x74\x85\x71\xde\x38\xe5\xc6\xf3\x34\xea\xfb\x34\x4e\xc7\x2c\x2b\x13\x44\xc3\x62\x51\x05\xd6\xcd\x91\x78\x16\x7e\x9a\x5e\x09\x6f\xb1\xd4\xcb\x43\x8b\x3e\xcd\x51\x14\x4f\xd3\x81\x4d\x47\x73\x23\xd7\x53\x7a\xa1\x4c\x5d\xca\xa2\x19\x6d\xde\xbc\xcb\x2d\x91\xf4\x50\x6f\x81\x25\x83\x9f\x43\xc2\x80\xec\xad\xc3\x6f\xda\x9f\x92\x9a\x51\xa1\x3e\xda\x0b\xe9\xe2\x28\xc4\x2e\x5c\x6e\x54\x05\x13\x43\xbc\x98\xd5\x06\x0f\xfb\xef\xdf\x8f\x36\x8a\x23\x15\x38\xf7\xb4\x0b\x3e\xb2\x0f\xe4\xda\x55\xd0\x03\xc9\x3d\xd8\x6d\x9b\x86\x87\x6d\x79\x84\x85\xa6\x38\xdf\x75\x66\x89\x4d\xf8\xe8\x1f\x66\xa3\x53\xe6\x0b\x10\xb0\x53\xe2\xf7\x25\xd5\x8f\x4a\x7c\x50\x14\x2c\x98\x68\x87\xcf\x5c\xe9\x9d\x3c\x21\xd4\xe3\x84\x2d\x34\x6f\xf0\x64\xa5\x83\x60\x99\xc3\x42\x1c\x35\x89\x34\x31\xba\x7f\x42\x44\xd4\x8b\x01\x73\x54\x30\x1c\x8a\x1b\xb8\x67\xa3\xba\x14\x58\xc3\x13\x76\x90\x34\x35\x3d\xa2\x0a\x9d\xf2\x45\x23\xc2\xa6\x13\x73\x05\x3a\x75\xa6\x1c\xe4\x5c\xa0\xa2\x26\xba\x1e\x29\x77\xa7\xa4\xf1\x49\x29\xf6\x82\xcb\x57\xf1\x51\x6a\xaa\x78\xcc\x91\x75\xe1\xbb\xf3\x7c\x02\x43\x76\x7e\x61\x2f\x3f\x1c\xcd\xac\x1e\xf2\xf3\xd4\xb7\xc1\x75\x7c\x22\x11\x33\x16\xf1\x3f\xbd\x6b\x1b\x12\xa6\x81\x56\xd9\xb2\x23\x6e\xa1\x4c\x4e\x6b\x22\x0f\x72\xa3\x10\x41\x9c\xb1\x72\xe8\x9e\x5e\xd6\xc8\x5f\x00\x86\xa1\x47\x90\x66\x1e\x4e\x2b\x09\x84\x5e\x97\x06\xc7\xd9\xe7\xfa\x30\xf5\x29\x40\x29\xa2\xd6\x1f\xbd\xbc\x64\x81\xe9\xa5\xcd\xee\xd1\x22\xf7\x13\x67\x29\x7c\x38\x44\x49\x1d\x62\xef\x36\x6d\xe6\x38\x3b\x26\x88\x2d\x32\xda\x29\x52\xbb\x12\xb7\x5f\xe9\xe6\x16\x0d\x61\x1b\x13\x87\x9f\x4f\x4d\xb1\xad\xf5\xe8\xe2\xc6\x4b\xbd\x26\xdd\xd1\xed\x79\x8b\xde\x47\xa6\x14\x73\x89\x8a\xe9\x9b\x0a\x7a\x39\x08\x36\x97\xac\xbc\xff\x0d\x23\x29\x89\xbc\x9d\xd2\x81\xf1\xb1\x59\xbc\x69\xc1\x9b\xdf\x92\x04\x5e\x96\x4f\x6b\xc1\x13\x47\xdd\xa5\x38\xb1\x23\xb3\x9d\xaa\xc2\x1e\x02\x34\x4c\x30\x42\xab\x0a\x14\xb6\x43\x63\x2d\x29\x04\xe4\xc5\xec\xd0\xb8\x8f\x59\xe7\x5d\xdb\x51\xef\xc0\xe0\x9c\xa9\x50\xea\x25\xc6\xb3\x50\xd8\x57\xba\x41\x16\x08\x94\x86\xed\xfb\xfc\x4f\x5f\x05\xc2\xd6\xbf\x57\x8b\xe4\xb9\x5c\x31\x88\xcf\x9a\xc3\x33\x9f\xd0\xb8\x37\xc3\x57\x1f\x12\x89\x0d\x9c\xde\x25\x35\x74\xf8\x99\x20\x91\x76\x29\x3a\x6b\xfa\x48\xdd\xb7\x27\x31\x43\x8a\x1c\xab\x57\x6c\x14\xd3\x8f\x27\x04\xc2\xa3\x61\xc1\xce\xab\x78\xae\x96\x9f\x57\xce\x17\xdc\x17\xc6\x55\xdb\xd5\xe7\x71\x69\xba\x36\xca\x30\x1a\x0e\xc2\x3f\xf6\x4d\xf7\x73\x9d\x44\x47\x27\xf9\xc1\x21\x20\x11\x30\x50\x41\x82\x43\xca\x98\xbe\xee\xff\x7a\x2e\x40\x0c\x55\x6c\x73\xaf\x4e\x7b\x87\x9e\x8b\xe1\xeb\x71\x83\x55\xf0\xcd\xd5\xa9\x6b\xb9\x26\x52\xb1\x7a\x49\xfd\x31\x14\x8d\x61\x59\x3d\xea\xa3\xe4\x7e\x21\x76\x87\x31\x2d\x54\x44\xdf\xa5\x5f\x0c\x8e\xeb\x4d\x7e\xe8\x86\x21\xc6\xb0\x4e\xff\xd1\x70\x0d\xae\x40\xd9\xbc\xee\xa1\x5a\x33\x14\x77\x77\xa8\x1e\xfb\x03\x17\x01\xf2\x4e\xcd\x67\xcf\xde\xd3\x4c\x02\x63\x9b\x31\x10\x9b\xf1\x6b\xd5\x5b\xdb\x55\x16\xb4\x85\xc6\x31\xbe\xa5\x2a\xb0\xd1\x00\xb3\xcb\x70\x88\x6a\xed\x7a\xce\x81\x44\xda\x97\xc2\x21\x6b\x0c\xdc\x8c\x40\x0a\x8d\x4e\x39\x8a\x19\xcf\x80\x01\x89\x31\xce\xe4\x2f\x5b\xda\x3d\x7d\xc8\x0d\xf8\x48\x35\xce\xf5\x36\xf1\x6b\x44\x03\xd8\xdb\x99\x93\x4b\x6a\xca\x73\x13\xdb\x04\x5c\xae\xe0\x2b\x7e\x89\x68\x1d\x97\x3c\x09\x3e\x7f\x4a\xb0\xa4\x4d\x4d\xc4\x81\x80\x68\x0e\xe1\xcd\x12\x38\xd8\xbb\x70\xa2\xe6\x51\x67\x30\x34\x18\x50\xc4\x10\x15\xc0\x6e\xec\x24\x8b\x11\x3d\xff\x4c\x15\x09\x2a\xf1\x4a\x31\x87\xfc\x13\x4b\xda\x1a\x65\x34\xfe\x3e\x06\xb3\x0a\x15\x1a\x04\xf8\x42\x06\x53\x5b\x9d\x43\x80\xa0\xab\xbc\xd6\x16\x2e\x0c\x0c\x4c\x11\x4d\xb8\xce\x36\x16\xe0\x27\x11\x12\xfd\xd6\xda\xa4\x65\xc2\x12\x17\x16\x9d\xca\x2a\x08\x29\x84\x06\x35\xf8\xb6\xd1\x75\xa2\x9f\xcf\x1b\xa8\xff\xe9\x2c\xe2\x35\xf0\x99\x37\x62\x50\xf4\xca\xec\xa1\xc4\x79\x6b\x92\x50\x58\x3e\x33\xf5\x96\xda\xd3\x59\x8a\x40\xe1\x45\x45\xe7\x9f\x70\x04\x11\xd9\x87\xe4\xff\x9c\x31\x93\x66\xa8\x12\x33\xd8\xc2\xce\xe1\xc3\xc0\xbe\xe3\xc7\x72\xcb\x90\x65\xa1\x35\x3a\x12\x12\x2d\xf8\x03\xf4\x5c\x3e\xbf\x65\x02\x7a\xd9\x9f\x33\x44\xb4\x1c\x9e\x0b\xbc\xd5\x8b\x31\x41\x25\xba\x73\x0c\x43\x5e\xfe\x8a\xe2\xfe\xfa\xea\x0d\x0f\xdb\xc8\x84\x36\x40\xe5\xd0\x2c\x94\x50\x2c\x59\x2c\xe6\xc2\x48\x80\x74\x31\x41\xec\xbd\x5b\x14\x05\xfb\x4d\x53\xcc\x9b\xda\x35\x70\xae\x9b\x2e\xe9\x7f\xc7\xc2\x18\x88\x96\x67\x54\x69\xcf\x2d\x09\x32\x44\x4e\x1b\x85\x2a\xfe\x8d\x87\x46\x63\xd9\x86\xf4\x2a\x41\x2a\x0d\xf9\xe0\xaf\xf9\x9b\xe4\x3a\x2c\xde\x4a\xc7\xa9\x20\xef\x72\xfc\xcd\x5b\x9b\xdb\xfa\x22\x47\x59\xf2\xa5\xeb\xf9\xf7\x2f\x33\xc9\x33\x6f\xf0\x01\xde\x9a\x18\x8a\xc7\xba\x5e\xe0\x85\x66\xea\x70\x47\x35\x0b\x93\xcd\x99\x51\xad\x71\x07\x50\xdf\xb5\x4b\xe9\x28\xbf\x6b\xca\xdd\x30\x33\x28\x0a\xdb\x55\xb2\x70\xde\x48\xfd\x05\x85\xf4\xce\xea\x96\x66\x94\x8f\x24\xc2\xbf\xa1\x1d\x93\x31\xb2\x25\xaa\xbd\xc8\x67\x6a\x33\xb4\xdc\x7b\xf9\x0c\x29\x8e\x77\xd3\x45\xea\x85\x32\xbb\x5a\xe8\xd9\x58\x1c\x95\x80\x4d\xaf\x69\x59\xe8\x3b\xa1\x11\x70\x88\xbc\xf9\x52\x28\x10\x69\xff\x22\x04\xb2\x74\x99\xb4\x16\x07\xfd\x90\xaf\xa4\x46\xae\xc7\x88\x3b\x42\x5b\x79\x79\x91\x3d\xc7\x74\x33\xc6\x87\xff\xd1\xf3\x1f\xe4\xe2\x45\xc5\x77\xaf\x77\x46\x5c\xa5\x41\x3e\xc8\xa5\x71\xa7\x31\x5e\x97\x26\xfc\x31\x2a\x18\x1e\x68\x69\x35\x56\x96\x5d\x0c\x79\xba\xd3\x0f\x78\xff\x41\x0d\x4d\xd1\xac\x29\xcd\x07\xc5\x8b\x24\x71\xea\x20\x32\x7f\xf1\x9c\xdf\xd4\x44\xdc\x3e\x7a\x3c\x0d\x22\xdd\x63\xb1\x6e\x0c\xd9\x71\x6d\x30\x4f\x3b\x72\x1d\xb6\xc6\xbc\xa5\xc2\x13\x64\x79\xe5\xe7\x67\xa8\xcf\x1c\xd1\x27\x88\xce\x5f\x62\x07\xae\xbf\x26\x53\x1e\xc2\x04\xe9\x57\x39\x12\x0d\x86\x77\x1d\x65\xa6\x83\xa9\x0d\x54\xb9\xa7\x72\x8c\x74\x35\x9d\x3c\xab\x68\xab\x7f\x0b\xbb\x61\xc5\x00\x7f\xa7\x6b\xb0\x39\xa1\x30\xdd\x7b\x52\x16\xa1\x97\xc1\xb0\x35\xbb\xec\x4b\x20\x0a\x99\xb2\x15\x21\x57\xbe\xc5\xd0\x0a\x94\xc3\x9a\x20\xce\x91\xd4\xa6\xe3\xaa\xa7\x02\xe0\x0e\x0e\x66\xc6\x3a\xd2\xf2\x39\x25\xf8\xa6\x6e\x7f\xbd\x99\xc8\xb0\x31\x37\x3e\xa6\x7d\xee\x78\xf6\x47\x9a\xbb\x4b\x71\x87\x0a\x81\xb8\x09\x1d\x18\xac\x75\xf7\xe1\x05\xa4\xbe\xaa\x66\x21\x2e\x11\x4f\x3f\x94\x17\x02\xb6\x94\x6e\x9a\x8f\x11\x0a\xac\x86\x8d\x34\xed\x8d\xbc\x27\x65\x66\x29\x68\x6d\xde\x66\xcb\xae\x95\xe2\xb1\x64\xe5\xa6\x5c\xaa\xbc\x54\x83\x94\x04\x41\xcc\xd8\x60\xb8\xf9\x83\x26\x31\x97\x5b\x8e\xec\x46\x0d\x63\xf5\x21\x73\xe1\x26\x7c\x8f\xcb\x10\x08\x93\x10\xfc\xef\x29\x13\x9d\x32\x6c\xbf\x84\xaa\x8b\x38\x6a\xb0\xc6\x4e\xee\x75\x75\x10\xa1\x0d\x23\x13\xd2\x0b\x3a\x87\x59\x66\xa2\xe9\x57\xd5\x01\x01\x2f\xe9\xfb\x80\xe5\x19\xdf\x37\x6d\x88\x5e\x38\x03\x0a\x65\xff\x44\x56\x01\x19\x36\x60\x8e\xab\x8e\x60\xb8\xe7\xd8\xf0\xb1\x11\xa7\x6b\x78\x52\xc4\xc1\xf5\x8c\xac\xbf\xcd\xb8\x65\x6a\xd4\xa4\x85\x34\x40\xe6\x21\x90\x43\xe2\x34\xaa\x44\xfa\x93\x33\xe5\x77\x95\xe5\x5e\xf1\x17\xd5\xbb\x60\x70\xa3\xe3\xc3\xfb\xc9\xaf\xbc\xae\x91\xab\x59\xed\x88\xb2\x21\x06\x19\xcf\x1f\xcd\x72\x75\xbe\x3a\x41\xdb\xca\x62\xaf\x03\xde\xfd\x29\x6a\x9f\x32\x68\x87\x4f\x88\x12\xd8\x7f\x9c\x35\x6f\x44\x42\xab\x31\xc2\x9a\x29\x64\xa7\x42\x62\x30\xf1\xc0\x93\x5b\x19\x98\x60\xb8\x91\xf8\xb5\x30\x5c\x49\x4a\xc4\x80\x9f\xa2\x4f\x8c\x41\xb4\xfe\x76\x57\x0a\x6e\x67\x8e\x5d\x3c\x03\xf7\x0b\xd0\xf1\x8d\x55\x2c\x78\x96\x56\xf3\xa0\x0c\x29\xcd\x74\x14\xc2\x2c\xda\x91\x09\x9b\xcb\x35\xb2\x65\x0a\x6a\xd0\x52\x08\xc3\x63\x0c\x3e\xcd\xa8\x1c\x00\x24\xe0\x42\xde\x91\x53\x1d\x89\xf5\x2c\x7d\x5c\xf2\x5e\x54\x29\x47\x82\x58\x4e\x1d\x54\x1a\xdc\x17\x35\x3e\xc4\xa0\xd6\xe8\xd6\x96\x35\x25\x77\xa3\x31\xa0\x96\xca\x11\x93\x4d\x78\x0c\xf7\x28\x31\x40\x92\x9b\xf1\xcf\x66\x25\x00\xa0\xcd\xc3\x4e\x6f\x68\xa2\x16\x7f\x26\x9e\x01\x7b\xc5\xa1\xcd\xca\x92\x68\x80\x52\x37\x8f\x6e\xa6\x12\xcd\xea\xaf\x88\x4a\x91\xfa\x37\xb0\xd9\x1c\xe1\xe4\x6c\x0a\x8e\xac\x48\x31\xf3\x20\x06\xa9\x3a\x53\x57\xfe\xa6\xbc\xda\x9c\x9e\x73\x1a\xc8\xe3\xd9\xcd\x05\xd3\xb0\x1d\x81\x8d\xba\xae\x6b\x12\xeb\xf3\x64\x5f\xab\x4f\x37\x60\x8d\x27\x75\x59\xf0\x9a\x03\x0c\x11\x05\x4f\x64\xae\x02\xab\x6f\xea\x31\xa0\x62\x6c\x5f\x43\x1c\xbe\x26\x35\x39\xee\x81\x38\xcd\x0c\xae\xa0\xb2\x2e\x2b\x91\xa3\xae\x97\x1a\x90\x62\xe3\xde\x31\xf6\x0d\x82\x61\xec\x73\x84\xb4\x4b\x0b\xe0\xa4\xd5\x27\xa3\x42\x59\x58\x90\xb3\xfc\x8b\x05\x30\xd9\x12\x63\x0f\x42\xa3\x84\x5f\x0f\xcd\x18\xb9\x50\x17\x22\x5c\x56\x4d\xd0\x7a\x5b\x42\xaf\x54\xfa\x74\xd2\xb8\x29\xca\x92\xf6\x26\xb9\x6a\xa1\x2c\x7b\xaa\xa3\x47\xc5\xfe\xee\x41\x1e\x77\x9c\xb3\xbd\xef\xd1\x36\x81\x49\x70\x41\x56\xb2\x48\xc2\x8e\x81\x50\xa0\x35\xab\x64\x13\x92\xb4\x6c\x48\x25\x34\x1f\x83\xf1\x26\x1f\x63\xcb\xda\xaf\x67\x2b\xea\xf5\x68\x2c\x94\xab\xb3\x00\x81\xf0\xb6\x7f\x5f\x0c\xd8\xb4\xf4\xa5\x9b\x84\x1a\xa2\xce\x08\x74\xb2\x18\xd7\x61\xdc\x7e\x2f\x6a\x46\xac\xc9\xd7\xc8\xb1\xc8\x03\x9d\x1d\x8d\xe4\x81\x52\x12\x5f\xf3\x7b\x1e\x8f\x4b\x55\x06\xbd\xe2\xc4\x1d\x26\xb6\x5b\x64\x2f\x42\xe8\x3c\xf8\x2c\xfa\xb6\x90\x6a\x4c\x8f\x44\x6e\x49\x9a\xcb\x80\xb1\xae\x38\xd3\xd2\x56\x22\x97\x11\x07\xef\x52\x77\x1a\xce\x28\x7e\xb9\x51\xc6\x0e\xa7\x4f\x5c\xea\x21\x80\x03\xf5\x06\x07\x3d\x87\xfd\xb0\xac\x0b\x3a\xb9\x51\x04\x7f\x41\xe1\xe9\x0c\x45\x8d\x70\x32\x2f\x8a\x53\x84\x0c\x37\x0c\x28\xf8\x58\x5b\x24\xc8\x98\x42\x41\x33\x10\x8f\xaf\x5e\x4a\x0c\x00\x6d\x69\x7c\xd4\x05\x24\x3d\x0b\xe5\x2f\x6e\xd4\xb6\xd2\x4b\x08\x85\xde\x6b\x97\x23\xcc\x8a\xc4\xd7\x89\xa3\xf5\x6b\xbc\x93\xf0\x14\x4b\x58\xa8\x51\x8a\x81\x44\xf4\x5f\xad\x82\x0e\x7e\x88\xea\xd3\xa8\x32\xe7\xa6\xaf\x09\xe0\x60\x6b\xd5\x6b\xfd\x2d\x49\x8a\xfc\x2b\x59\xea\x95\xde\x39\x39\x50\xd3\x0c\xde\x42\x0e\xbd\xa9\xbc\xd9\xa6\x13\x93\x70\xe3\x18\xcf\xa5\x0d\x6c\xf4\x20\x21\x24\x30\x29\x21\xc8\x08\x5c\x27\xf8\x8f\xb7\xe9\x4c\x95\xfb\xb4\xff\x13\xbe\xc9\x8c\x4f\x5e\x10\x42\xba\xb2\x9d\xd4\xc3\xf1\x30\x3c\x32\xd7\x60\x9e\xbf\xd3\x43\x83\x66\xbf\x64\x30\x70\xa1\xe2\xce\x5e\x7d\x29\xce\x22\xb8\x08\x18\x3e\x23\x6f\xf9\x03\x4f\x2b\xb5\x96\x87\xe0\x8c\xbb\xc4\x06\x07\x47\xa1\x5b\xcd\xf8\x1f\x9a\x63\xdc\x40\x41\x2b\xda\xdb\x3d\x02\x5b\xb3\x3c\xbb\x36\x4c\x73\x5c\xc1\x6a\x99\x88\x28\xfb\xc8\xf1\x00\x9c\x73\xda\xa2\x15\x05\xd8\x42\xf4\x91\xa3\x75\x8b\x7d\x12\xd3\x94\x56\xae\xd0\xb9\xca\x8f\xec\xd7\x2c\x80\x99\xc9\xa0\x5d\x6a\xb5\xc2\x50\xfb\x0c\x66\x9e\x12\x69\x50\x83\x69\x05\xe3\xb3\x0d\x47\xec\xc3\x47\xf8\x84\x19\x75\xe9\xd1\xdf\x3a\x02\x41\x5c\x27\x8a\xbe\x2b\x47\x98\x00\x4d\x07\x21\xf8\xd6\xd2\x20\x4a\xab\x22\x1b\x61\xff\x1c\x71\xa3\x35\x46\x7f\xbb\x85\x66\x55\x3f\xbf\x6e\x01\x55\xd4\x96\x50\x83\x6b\x7e\x9c\x97\xc2\x97\x47\x23\xc8\xf3\xcd\xd2\x6b\xda\xf0\x17\xf0\x89\x8b\xe3\xb3\xc6\x83\x0e\x50\x8b\xd8\x7d\xd0\x33\x55\x16\x19\x32\xc7\xf6\xbb\x79\x4d\xfc\x42\xac\x91\x62\xbb\xf0\x62\x0c\xdb\x62\xbb\xad\x0e\x6d\x5d\x58\xa0\xf4\xc8\x7e\x69\x8a\xe9\xb0\x2c\x4c\x67\x7a\xdf\xd1\xa6\xd3\xe2\xa9\x0f\x8d\x88\xb7\xa1\xf1\x88\x2a\xfa\x52\xd0\x37\xfc\x50\xe9\x12\xba\x90\xf4\x72\x55\x74\x88\x26\xf4\x23\xde\xa7\xf4\x15\x10\x84\x20\xf7\xfd\x52\x15\x62\x2b\x18\x89\xa4\x6e\xdd\xf3\xc9\x1b\xff\x13\xbe\xeb\x1e\x33\xf2\x4a\xf8\xde\x7b\xd0\xd2\xfa\xd1\x73\x5a\x75\x33\x45\xa0\x78\xe3\x38\x30\x06\x9a\xcf\xca\xa9\xd9\xa2\xe7\x95\xaf\xca\x8b\x96\xa0\xb4\xc9\x3b\x88\x00\xcb\x15\x9a\x37\x71\x2a\x0e\x74\x9c\xbe\x1b\x05\x61\x3f\xda\x7c\x77\xbb\xda\xeb\x28\x7d\xa7\xe3\x6c\x45\xab\x33\xf4\x9f\x36\x44\x6d\x5e\x54\xd6\xcd\xf5\x58\xa2\xb5\x4d\xd8\x82\x8f\x84\x7f\x3c\x53\xbe\x51\xa9\x47\xb1\xb0\x6f\x0f\x38\xb4\x4d\x9f\xb2\x67\x9b\x73\x3f\x4e\x55\x78\xf2\x54\x30\x47\x26\x75\xa1\xa9\xe9\xf9\x24\xaa\x18\xf5\xf9\x41\xe3\xfa\x18\x42\xfe\x4e\xe0\x0a\xd2\x1c\x87\x5e\xf9\x43\x29\xe8\x20\x77\x6e\x3e\x67\xe0\xc4\x33\x5f\xf2\xa7\xe6\xcf\x37\x52\x22\xb3\x37\x86\x40\x16\x5b\xa0\xbf\x39\xdd\xf7\x29\x17\x0b\xeb\x18\xb9\xdf\x9b\x63\x34\x53\xa8\xc2\xa3\xe4\x06\xe2\x18\xb5\x16\xe3\x77\x51\xe3\x97\x65\x2b\x2c\x60\x35\xd5\x50\xac\xe1\x40\x3c\x68\x5d\x5b\x56\xeb\xbf\x34\x0c\x20\x06\xde\xa8\x06\x97\x21\x14\x51\xb6\x86\x89\x4e\xac\x3c\x08\xf0\x97\x14\xf4\x07\xe3\xd4\x29\xe8\xe8\x5f\x41\xbf\xe6\xee\xbc\x2d\xa6\x1f\x1c\x65\xd5\xf4\x06\xae\x76\xfd\xdc\x07\x0e\xef\x79\x3b\x35\x96\x62\x68\x80\xaa\x0e\x97\xb6\xef\x16\xaa\x62\x71\xf0\xf1\x97\x36\x26\xc2\x15\xfb\xb9\x01\x98\x91\xbf\x91\x10\x77\xf2\xb2\xfc\xc8\xec\x5d\x09\x25\x75\xde\x49\xe0\xd5\xa0\x1d\x33\xe4\x6d\xd4\xc7\x68\x5e\x6f\x61\xf4\x3b\xb6\xa9\x69\xa2\xc6\x13\x1d\x22\xe6\xa8\x5c\x2f\x75\x5b\x78\x80\x68\xee\x3a\x41\xf1\xe9\x1a\x4f\xbc\x3b\x91\xc8\xbf\xa9\x04\x35\xe5\xac\xea\x2f\x9a\x52\x60\x8f\xc6\x32\xd5\x5d\xe7\xdf\xb9\x8e\xa4\x95\x7a\x1f\x26\x53\xa3\x81\x4c\x98\x21\xf7\x93\xf8\x59\xa7\x63\x73\xd4\x55\xd8\xb8\xf6\x87\x2a\xc8\xfb\xd9\xd1\x62\xc2\x11\x78\x76\x11\x46\x82\xa4\x3b\xaa\x79\x4c\xe4\xb8\x4e\x49\x8c\xcc\x91\x7a\x4b\x3c\x27\xee\xe8\x0e\x4a\x13\x0d\xad\xbb\xfd\x9e\xaf\x0e\xcd\x50\x51\x09\x89\x4b\xcc\xda\xa0\x5f\x96\x1c\x86\xff\x9a\x06\x79\xff\x2b\xd3\xe0\xdf\xf6\xc9\xa0\x45\x56\xa8\x0b\xb8\x81\x9d\xa3\xac\x0c\x7e\x3d\x0d\x9e\x73\xf4\xe1\x1d\xf3\xc3\x42\x1c\x75\x7e\x38\xe4\xbd\x2c\xc4\x58\xe7\xf4\x13\xda\x59\x93\x8b\x8e\xb0\x6c\xad\x09\xa7\x25\x84\xb4\x25\xe0\xc4\x44\xd4\xe7\x4e\x36\x37\x1c\xda\x7e\x24\xbb\x9c\xd0\x62\xd1\x3c\xcd\xcc\x55\x7b\x16\xcb\x84\x9e\x85\x07\xc8\x3d\x81\x62\xb5\x93\x47\x2f\xa3\xdf\xff\xe2\x90\x59\x50\xc6\x36\x75\xea\x53\xea\x01\x60\x12\x99\xea\xad\x4d\x84\x4c\xba\x6f\x9b\xb8\x9c\x21\x67\x80\xbf\x41\xd5\x88\x0f\xc5\xb6\xd2\x18\xf7\x86\x15\xf2\xd3\xbe\x28\xaf\xf4\xf3\x19\xed\x35\x52\x26\x3c\x17\xb6\xea\xb3\x82\x1a\xe6\xd7\x0f\xe7\x5f\x42\x50\x10\x36\x69\x14\x2a\x4e\x52\xe5\x81\x8d\xd9\x58\x7e\xec\x71\xa2\xce\x98\x91\xd5\x48\x36\xbb\xea\x5d\x5e\x20\x27\x6b\x9b\xb2\xc0\x37\x58\xe4\x5d\x6b\xb9\x72\x7a\xca\x58\x73\x97\x15\x0b\x1f\x9a\x23\x29\xb9\xdf\xe8\x60\xad\x11\x69\xcf\x62\xb7\xb8\xa0\x89\x68\xe1\xe5\xe2\x14\x8c\x1e\x80\xf3\xfa\x9d\x64\x30\xd7\x98\x12\x09\x36\x59\x1c\xb5\x87\x34\x17\x2e\x66\x57\x6a\x9d\xf4\x3e\xc7\x4c\x78\x14\x04\x72\xa2\xd9\xd9\xd1\xa2\x45\xfb\xee\xb7\x73\x55\x5c\xdd\x5d\x43\x39\x81\x1b\xb7\xbf\x52\x31\x4b\x49\x33\xdb\xa0\x69\x4e\x78\x7b\xee\x12\x83\x53\x0f\x5c\x23\x5d\xe1\xc9\xd2\xa6\x9c\xc6\x5d\x66\x0a\x06\xc2\x91\x62\x66\xe2\x1e\xf2\x6d\x53\xe8\x45\x5a\x64\x99\x7a\xb9\x0f\xf7\xa8\x72\x1a\xe8\x2e\xa5\x91\xda\x6a\x61\x17\xec\x4b\x7f\x0a\x3e\x88\x9c\x28\x9c\xc7\x14\x22\x15\xfc\xca\xa5\xc1\xb4\xf3\xf5\x73\x95\xb0\xba\x48\x91\xbb\x14\x60\x5c\xbc\x82\x7c\x4c\x1b\x98\xfb\x6e\x4d\xc5\x9e\x61\x4a\xe1\x0d\xfd\x1a\xac\x1e\xb8\x23\x57\x08\x9c\xdb\xa8\x84\x77\xce\x51\x68\xbe\xf6\x97\xe0\xe9\x3e\xa6\xb7\xc0\x6b\xc8\x51\x81\x8c\x2c\xdf\xe4\x40\xfa\x4a\xf5\xb7\x15\xc9\xa6\xbb\x87\xb7\x19\xeb\xd8\x88\xe9\x69\x41\x3d\xdb\x13\xe7\x90\xa4\x1e\x85\x8b\x4d\x92\x1b\x81\xc6\xba\x80\xb1\x2d\xba\x3b\x7d\x22\xa6\x2a\x34\x48\x3a\xf8\xd5\x9c\xfe\x8a\x6e\x9c\x19\x9e\x36\x60\x96\x14\x55\x39\x7c\x9d\x8d\x88\x49\xbc\x8e\x49\xaf\x4d\x35\xf3\x28\x8e\xc1\xb1\x7c\x2c\x7b\x57\xaf\x4a\xab\x44\xdf\xd0\x0b\x96\x03\x14\x12\x14\xc6\x8e\xa6\x60\x8a\x0c\x7f\x3e\xbc\xb9\x67\x24\x86\xd0\x72\xdd\x2f\x8b\x82\xed\x9a\xcc\xcd\x44\x5b\x4e\x44\x0c\x0e\xa9\xe0\x8f\x41\x53\xe9\x24\xf7\x0b\x97\xf5\xca\x8f\x7c\x34\x78\x3f\x91\xad\x24\x17\x8c\x4d\x59\xb7\x6b\x90\xe4\xea\x06\xa6\x34\x13\x10\xb0\x69\x95\x88\x61\x54\x72\x97\xaa\x11\x89\x7f\xb9\xa2\x3f\xa2\x01\xab\x71\xdb\x25\xd9\x5f\xd1\x8d\xa4\x52\xe9\x3d\x68\x44\x13\xb8\xb6\xea\x5e\xdc\xa3\x31\x07\xaf\x1d\xe5\x1a\x47\xc2\x71\xc9\x28\x60\xc0\x2b\x05\x27\xf2\x91\xc0\x97\x47\x3b\x2c\x0f\xb4\xa1\xc1\xcc\x9d\x97\x7a\xf7\x8b\xd1\x87\x9e\x4d\x12\xcc\x9f\x62\xae\x1c\x68\xdb\xe2\xca\x58\xc4\xf1\xf9\x2b\xd3\x0b\x77\xe9\xaa\x19\x37\xce\x02\xf0\x6f\x6f\x03\x3c\x91\x89\x60\x2c\x15\x7e\x72\x50\xe4\xe7\xa2\x76\x16\x91\x07\x1a\x6a\x8b\x1a\x9c\x12\x86\x95\xa1\xbd\xd1\x92\x69\x85\xd5\xa0\xdc\x69\xeb\x26\x86\x0d\xa4\x92\x09\x83\x3a\x66\x8b\x0a\x46\x96\x03\xde\xe8\x18\xf3\x97\x3e\x11\x58\x38\x91\x32\x9a\x1e\xf0\x7a\x3c\xb5\x77\xb1\x49\xa3\x54\xe0\xdc\x6c\xf5\x89\x78\x97\xd1\x16\x4d\xa9\xae\xdd\x89\x57\x63\x24\xd7\x3a\x5d\xf2\xd4\x16\x71\x50\xa3\x5f\xcf\x26\x0a\xb3\xe1\x9b\x02\x24\x54\xd0\xe5\xe5\xfb\xef\x19\x5e\x28\xa6\xca\x1c\xfc\x92\x10\xf2\x4c\xad\x50\x61\x57\x80\xc7\xef\xbf\xb3\x48\x24\xc6\xac\xb9\xc4\x2c\x2d\xae\x03\xd2\xd8\x2e\xba\x70\x57\xf3\x8e\x1d\x71\xaf\xe8\xe0\x13\x14\x49\xb5\xdf\x74\x0d\x24\xbb\x27\xae\x76\x5e\xa2\x70\xe4\x83\x7d\xde\x5a\xbf\x41\x64\x03\x79\x44\x5f\x6a\xa6\x10\x5d\x0c\xf8\xf1\x15\xee\xd1\x6d\x48\xa5\x63\x36\xc8\x40\x44\x40\xca\xf0\x63\x1b\xb8\x24\x51\x8b\x2b\x7a\x0d\xde\x36\x98\x8e\x76\x56\xd9\xa8\xf0\xf2\x26\x33\xce\x87\x44\x6c\xc5\xa5\x9e\x1a\x6c\xaf\xf4\x63\x60\x6e\x30\x94\xf4\x58\xaa\x06\x60\xe9\x9e\xa3\xca\x19\x81\xa5\xda\x03\xca\xa6\xa3\x96\x94\x05\xf9\xbc\xb5\x8e\x03\xeb\x5f\x29\x30\x67\x78\xf1\x48\x5d\x38\x0e\x7b\x2b\xec\x58\xd8\x44\x24\x5e\x5d\x8f\xa3\x25\x76\x2e\x46\xd3\x5e\xa2\xc9\xd8\xc7\xf0\x02\xe2\xcd\x73\xb1\x0c\x5a\x12\x01\x3e\x48\x27\x22\x42\xef\x54\x68\xc2\x6c\x8b\xb8\x01\xc7\xce\x31\x73\x75\x13\x62\xbf\x6b\x78\xda\xa6\xa9\x67\x4d\x6e\xdc\x4f\xca\xcc\xa9\x85\xda\x3a\xf4\x09\xde\xa0\xc9\x45\x93\xcf\xd9\xa0\x88\x09\x4a\x48\xa7\x78\xf7\xa6\xf4\x82\x7f\xc7\x42\x1d\xd8\xfc\x82\x77\x66\x3b\xef\x19\x73\xc5\x5c\xf7\x9e\xba\xea\x57\xfb\xb5\xce\x76\x12\xfc\xa5\xa9\x02\xe6\xb0\x9e\xf2\x8d\x27\xa7\x7b\x1f\x63\xe6\xac\xdf\xb3\x25\x82\x51\xd5\xb7\x6b\xef\x53\xfa\x69\x52\xa0\xf6\xd5\x8a\x72\xea\x38\x64\x1b\x63\x24\x6c\x18\xa7\xcb\xb8\xa7\x69\x88\xb1\xae\x89\x57\x94\xec\xf6\x36\x4b\x32\x71\x04\x53\x7f\x24\x55\x11\x48\x6c\x30\x76\x79\xa2\xee\xa1\xfe\xf6\x88\x37\x6e\x21\xea\x4d\x6c\xc8\x20\x15\x00\xcc\xdb\xf5\xfe\xd2\x51\xf4\x4c\x25\x3a\xb4\xb6\x69\xd0\x84\xa9\x7d\x6e\x7d\x82\xf9\xf7\x8c\x01\xd5\x19\x83\x4e\x65\xcf\xf6\x4c\xe5\x0f\x14\xea\x79\x48\x30\xb4\x98\xf0\xad\xe3\x4d\x02\x42\x9f\x39\x28\xbb\x0d\x71\x19\xb0\xa9\xe1\x26\xb6\x45\x96\xa9\xb7\x67\x81\x53\xdc\xc4\x46\xc8\xdb\xa6\x0a\x7a\xeb\x57\x6f\x6c\xe9\x36\x11\xc0\x41\xfd\x9b\x2c\x2a\x83\x97\x4e\x23\xd9\xd3\xfb\x1f\x81\x28\x19\xab\xb9\x83\x3c\x23\x1e\x1e\xed\x6d\x38\xa6\x6c\x61\x88\x11\xf0\x18\x2b\x11\xb7\x3c\x8a\x96\x67\xd7\x35\x2b\x76\x11\x84\x6e\x5f\x72\x39\x5f\xc6\xe0\xc9\x2f\x52\xa8\xb5\x84\xe0\x3c\xd1\xc4\x8a\x52\x59\x9e\x3b\x4d\x40\x70\xd1\x1c\x71\x65\x5a\x81\x12\x13\x89\xa2\x7e\x4a\x7a\xc4\xb3\x41\x44\x79\x57\x37\x1e\x55\x2e\xe2\xca\x62\x79\x23\x66\xc5\x5d\xc5\xdc\x50\xc2\x44\x01\x20\xcf\x16\xe9\xe8\xf6\x7a\xef\x2d\x80\x38\x07\x7c\xb0\x0d\x16\x5a\x06\xaf\x63\x3b\x94\xe9\xd9\xb7\x55\x96\x80\x04\xc8\x12\x4a\xc9\x79\x6b\x23\xab\xcc\x80\xee\xd0\x08\x71\xa9\x0f\x8e\x9d\xa0\xee\xdc\xb9\x5c\x63\x48\xdb\x92\xaa\x58\xe6\xd8\x2c\x9f\xe4\x7b\xb0\x8d\xe0\xba\x80\x0e\x77\x61\x5c\x47\x7e\xc0\x1d\x9f\xbf\xa7\xd2\x7d\x89\x9f\x3f\x3f\x9a\x1a\x6e\xde\xa4\xb4\x12\xc1\x0d\x11\xd7\x4d\x79\x04\xaf\x4e\xf4\x4d\x3d\x8a\x6a\xa7\x8f\x58\x39\xf3\x98\x30\xff\x3c\x70\xd8\x1a\x0b\xc7\x28\xfe\xa3\x46\x3e\x50\x64\x70\x98\x3a\xaf\xbb\xab\x95\x3a\x6c\x22\xc6\x74\x97\xe8\xf8\xa3\x6f\xd9\x9f\x21\xce\x39\x6a\x74\x90\xb7\xb5\x47\xe0\x08\xcd\x3c\x4e\x09\x60\x2f\xfb\x21\xae\x65\x48\x60\xad\xca\x8e\xe8\xba\xe6\xab\x67\x7a\x72\x8d\x55\xfc\x20\x9a\xe5\x70\xcf\x2c\xe8\xda\x6f\x5f\xba\x85\x32\x8e\x9f\x2c\xd1\x7d\x5e\xa1\xf1\xdc\x4b\xda\x5b\xf1\x44\x72\xe9\xe6\x72\xd6\x50\xc5\x4d\x85\x82\x5d\x3e\xbf\x83\x49\x39\x9b\xcb\x14\xbe\x8f\x12\x85\xdb\xa3\x21\x05\x43\x2b\x03\xeb\xff\x51\xda\xb5\x86\x5c\xbd\xeb\xdd\x53\x05\xe5\x6f\xc1\x56\x4c\x32\x45\x13\x27\xaa\x31\x9e\x8d\x8c\xa2\x6b\x2d\x9c\x4e\x21\xd3\xe0\x0d\x52\x30\xd9\xbb\xac\xa9\x6d\x40\x8b\xb7\x78\x3c\x2f\xee\x6a\xc1\x80\x59\xa2\xe5\x98\x18\x02\x59\x8a\x6e\xb1\xc8\x7e\x66\x4b\x44\x3b\xf9\x58\x4b\x6a\x7d\x30\xee\xc2\xb8\x97\x4b\x3f\xc1\xb5\xed\xba\x8e\xf7\x42\x43\xc7\x70\x43\x3d\xa0\xa7\xc1\xac\x00\x79\xc6\x4a\xfa\xf3\xab\x3c\x79\xfe\x5e\x89\x93\x07\x7d\x1c\x94\xba\xf2\x3a\x80\x8d\xab\xba\xb4\x95\x4f\x8a\x43\xdc\x2d\x35\xa6\xf8\xcc\x4a\x1a\x5d\xf6\x87\xe5\x55\x8f\x5c\x10\xfb\x3c\xa4\x9b\x78\x57\x55\xdf\xcb\xdb\xec\x53\x19\xed\x79\x58\xf8\x82\x49\x0b\x64\xad\x6b\xe4\x02\xce\x4c\xa0\x09\x21\x9c\x0d\xa5\x79\xec\xa3\xa2\x6f\x8b\x07\x7a\xe9\x90\x36\x3d\x8e\x19\xd9\xc5\xf7\xb1\x0a\x50\x1c\x40\x51\x59\x82\xe3\x36\xab\x55\xd7\xf6\xb5\x46\xc4\xd2\xf6\x92\x8b\x4a\x42\xc2\x69\x8e\xab\xc4\x70\x4b\xf3\xcb\x8c\xda\x3b\x16\x3c\x8e\xab\xcd\x4c\x09\xe4\x26\xeb\xe8\x93\x22\xe5\x8c\x2e\xc6\x06\x2d\xf0\x94\x2e\xa2\x77\x70\xe0\x2e\x78\x66\xdd\x94\x61\xa8\xbc\x78\x64\x46\x99\xb8\x9e\xf6\x00\x5f\x1a\xa1\xd5\x6c\x93\x3e\xc9\x82\x59\x80\xe0\x7e\x7b\xd1\x3d\x58\xa4\x6e\xa0\x86\x03\x80\x2e\x51\xfd\x69\x60\xf7\x40\xa0\x5b\x73\x83\xb6\xfe\x6c\x03\x08\xd9\x08\x38\x50\x85\xb4\x05\x8c\x6f\x4a\xb4\xb8\xf6\x3b\x03\xea\x6b\xf2\x22\x67\xa9\x7c\xae\x3c\x1a\x11\x88\xa7\x32\xa3\x86\x02\x7e\xb0\x1a\x6f\xcd\x2f\xc6\x84\xce\x2f\x97\x14\x4e\xe8\xfe\x35\x4c\x02\xd4\xb0\xc0\x56\x3f\x04\x68\x96\x65\xff\x4c\x96\x36\x80\x6d\xe0\x31\xde\x14\xf7\x25\xb0\x1a\xc7\x2e\x10\x81\x59\x54\xb8\x0f\x0d\x44\x32\x07\xbd\x97\x28\xf3\xbb\xf8\xad\x60\xaf\xab\x88\xd5\x6f\x67\x1f\xaa\x5e\x6c\x35\xd2\xd4\x52\xf3\x1a\x48\xc7\x91\xcd\x25\x5e\x9b\xbd\x84\xc4\xe7\x7a\x71\x9a\x40\x78\x6b\x5c\x40\x76\xd7\xef\x3d\x3b\x0a\xfd\x5e\xea\xb5\x76\x0d\x7a\xde\x28\xf9\xb8\xd4\xab\x6c\xc4\xa0\xbb\x78\xa9\xe8\xaa\x62\x73\x01\xd9\xbf\x82\xaf\x1a\x90\x3e\xa0\xac\x1f\x75\xc7\xef\x12\x14\x30\x04\x69\x60\x75\xac\x37\x36\x40\x5b\x44\xd2\xf5\x62\xb8\x47\x1b\x78\x2f\x1a\x54\xcb\x64\xdf\x31\xcb\x09\xa2\x5f\x3c\xcf\x12\xa3\xed\xbd\xca\x79\x8b\xfa\xf8\x4d\xb2\x03\xba\x2a\x8e\x0b\x21\x6c\x38\x1f\x8d\x20\xec\xa3\xc6\x64\xbc\xf7\x5e\xa3\x78\x3d\xd6\x7b\x7e\xe2\x8b\xb8\x85\xa1\x61\x5f\x87\x0f\xc6\x98\xf9\x13\x2e\xbf\x1a\x52\xae\xfe\x62\x26\xc6\x6e\x60\xf8\xa0\xa3\xb8\x99\x4d\xc0\x80\xe9\x5c\xd8\xdd\x1b\xe5\x43\xf7\x5b\xc8\x88\xd2\x4f\x94\xa5\xe9\x1b\x37\x49\x09\x0a\xdf\xc5\x80\x90\xa3\xf6\x55\xda\xf1\x2f\x89\x62\x3d\xd2\x46\x68\xc2\x4f\x0f\xe7\xb1\xf9\x65\xc2\x4a\x33\x76\x53\x5b\x58\x72\x20\xec\x86\x4e\xad\xfa\xc8\x8a\x79\x35\x7f\x88\x30\x94\x27\xad\x43\xf0\xb8\xec\xf8\x9c\x5b\x64\xc4\xb2\xba\x5b\xbd\x7f\xc9\x48\x44\x34\xe2\x50\xbc\xf9\x98\xc9\xfe\xa4\x4c\x31\x32\x6c\x56\x86\x81\xb8\x9e\x22\xf5\x6e\xf2\x82\x3e\x3c\x15\xfb\x91\x33\xe9\xe4\x9c\xae\x4b\x1f\x8b\x29\x27\xca\x0c\x1a\x43\x01\x11\x85\x1d\xaa\x4a\xa4\x40\xe4\xaa\xa1\xb6\xc6\x56\x4c\x0c\x2f\x91\x00\x46\xc4\xce\x7e\xc0\xa0\x35\x05\xb7\x0d\xf3\x03\xf1\x64\x9d\x77\xaf\xb5\xab\xd5\x95\x98\xb2\x1b\x86\xd5\x85\xe4\xdd\x8f\x47\xcd\x08\xcb\xe4\x12\x56\x21\x70\x69\xc7\x6d\x23\x6b\xab\xc1\x5a\x7b\x18\x0b\x95\xb9\xa1\x43\x1e\x88\x8f\x08\xec\xde\xcb\xa9\x00\x92\xfc\x20\x4b\xb6\xb9\x67\xa3\xbd\x2f\xc6\xfa\xfb\xd7\x19\x8c\x25\x1e\x57\xdb\x01\x51\x47\x9a\x30\x36\x1a\x24\x9a\x54\xca\x45\x20\x68\xd0\xa4\x28\xb7\x5d\xb0\x2b\xb5\x90\x2e\xbd\x22\x5e\x7f\x54\xbc\xdd\xbf\x0d\x51\xfc\xde\x02\x6b\xf4\x3a\x8d\x7c\xd4\x36\x7e\x8e\x91\x04\x14\x74\x38\x04\xe3\x33\xaf\x2d\xc6\x5e\x4f\xea\x05\xef\x24\xf1\xa6\x0f\x54\x8e\x6f\x5f\x8d\x87\x42\x1d\xfb\x11\x70\x17\x22\x5f\x45\x7f\x9e\x2c\x26\xcd\x73\xf9\x39\x5d\x8e\x91\x04\xf6\x3a\x6a\xec\xc9\x2c\xe4\xf6\xe1\x42\xe6\x94\x3b\x71\xc6\xb1\x46\xac\xed\x31\x75\x20\x12\x20\x0c\x36\xe3\x9d\x76\x30\x5b\x89\xbe\x26\x9e\x63\x8f\x43\x12\xde\x9c\xc2\xc3\x52\x43\x85\x1d\xad\xb7\xfd\x5c\x48\xf8\x15\x0c\xd3\xf9\x3d\xc7\x8a\x21\x78\xfb\x2c\x83\x14\xb0\xb6\x16\x93\xaf\x9a\x16\x00\x09\x25\x29\xcd\x60\x53\x98\x6e\x53\xb2\x47\x1b\xc7\x46\x37\x1c\xe1\x12\x92\x76\x7d\xfd\x27\xb2\xf3\xaf\x4b\xaa\x38\xa4\x90\xee\xce\x93\x39\xaf\xbf\x95\x73\x50\x26\x45\xec\xf6\xa1\x9e\xf8\xb0\x3f\xde\x3b\xbb\x43\x2a\x84\x2c\x04\xc0\x36\xa9\x93\xf1\x7f\x27\x35\x13\x14\xbe\x06\xfc\x06\x6f\x3e\x9b\xef\x8f\x36\x2a\x87\x53\xe3\x8e\xc0\xd3\x2d\x0b\xc1\xff\x53\x89\x65\x3b\xa7\x56\x4f\xde\x63\xca\x93\x97\x11\xff\x8f\x2c\x95\x77\xac\xf8\x0c\x26\xe2\x0b\x1a\x06\xca\xb3\x9d\x5b\x3a\x3e\x24\x3a\x3f\x0c\xc4\x08\x52\x85\x25\xe5\xda\xc8\x81\x4a\xeb\x5e\xa0\x80\xdc\x40\xe8\x3b\xd8\x72\xb3\x69\xa9\xc9\xbd\x1c\x68\xb1\x8e\xa1\xf6\xa3\x5c\xef\x25\xf7\xe2\xf5\x3c\x29\xa7\xee\x54\x64\x85\x0e\xe4\xce\xd3\x78\x69\xae\x09\x79\x71\x35\x43\xee\xe3\x10\x33\xe6\x8e\x28\x35\xf4\xae\xca\xcd\x9a\x45\x6a\x35\x4b\xe9\xb1\xa2\x66\xf7\x7b\x95\xe7\xbe\x48\x7c\xb4\xbf\x73\x84\x5f\xae\x45\x90\x35\x76\x52\xb8\x32\x3c\xc1\x49\x62\x48\xda\xa8\x84\x18\xc0\xf9\xeb\x1a\x75\xdc\xf8\x0b\x0c\x44\xb6\x8b\xfc\xba\x4a\x53\xcb\xb5\xff\x20\xfa\x55\x45\xca\x38\x5c\x90\x3c\xb2\x9c\xe2\xc5\x41\x6e\x50\xa8\x3f\xf8\x03\xb0\x12\x47\xe1\x8d\xd6\x73\xc1\x70\xa4\x35\xdf\x31\xef\x09\x63\x4a\x39\x69\x66\x1f\x02\xd2\x5e\x59\x90\x8f\x03\x56\x4d\x7e\x19\x7f\x06\xc5\xe6\x48\x61\x78\x42\xe5\x85\x2c\x18\x6a\x68\x2d\xb5\x46\xa2\x3b\x0a\x2d\xdf\x33\x98\xb5\x90\x4c\xf8\xfb\x92\xc4\x85\xc7\x59\x0f\xbd\xb4\xc3\x80\x0f\x69\x91\x37\xe1\xf1\x9f\x5c\x57\x17\x83\xf8\x4b\x10\x6c\x51\xef\xa7\x26\xce\x6f\x7a\x70\x2a\x7c\xf0\x3d\x19\xa1\xa6\x7f\x7b\x60\xd4\xbb\x79\x86\x64\x51\x1f\x05\x16\xff\x31\x99\x56\x8f\xc6\x2e\x6a\x01\xbe\x38\xd1\x84\xa2\xd6\xe0\x73\x61\xdb\x63\xd4\x0e\x4e\xb5\xa1\xd6\x17\x55\x7f\xf7\x31\x96\x24\xb3\x9d\x76\x97\x62\xd1\x0e\x0e\x27\x53\x45\x6a\x83\xfd\xa4\x39\x71\x87\x4f\x5e\x12\xe7\x20\x10\x2c\x32\xb7\xce\xee\x6a\x0d\xf2\x2b\xdb\x57\x0f\xe1\x60\xbf\x9e\x90\x56\xb4\xed\x79\x70\xad\x7a\x7e\x57\xe7\x4e\x39\xa4\x9d\xb1\x18\x30\xe9\x84\xdf\xfd\xf8\x2a\x45\x37\x61\xb4\x97\x43\x9b\x88\x3e\x2d\x84\x67\x2c\x04\xc5\x27\xbe\x15\x95\x9d\x25\x59\xb2\xb3\x96\x03\x48\x8c\x7b\xc8\xe4\xd6\x81\xd3\x56\x75\xff\xd2\xf5\xc5\x2b\x15\x1c\x25\x9f\x8f\xca\x03\xfb\x42\x12\xca\xc6\x2c\x01\xd8\x51\x18\xc7\x76\x12\xc5\xfb\xee\x19\x92\x25\xf1\xf0\x28\xf6\xf2\x62\x85\x58\xd4\xd5\xc9\x3d\x86\x50\x9b\xc5\x49\xee\x8a\xdc\x7e\x0c\x73\x1b\xd8\x0b\xdb\x43\x7a\x1e\x99\x7a\x4d\x27\xf8\x0c\xf1\x13\x4c\xe1\xa4\xfd\x01\xef\xfc\xcd\x49\x48\x80\x9f\x3e\xdc\xda\xfb\x3f\x28\x57\x8e\x5a\x5c\x68\xeb\x71\x0c\x9c\xac\xa4\x6b\x0c\x33\xa1\x19\xa3\x66\xe6\x72\x5b\x18\xdb\x31\x9d\x08\x18\x5a\xad\x4b\x6f\xb3\x69\xd4\x93\x7d\xa1\x80\x07\x0d\xbc\xbd\xc6\x36\x51\x59\xbe\xe7\x14\x79\xae\x51\x7a\x53\x2b\x07\xca\xe5\x28\x60\x6b\xb6\x20\x03\x04\x17\x41\x8e\x0b\x1a\x0d\x7f\xa2\xee\x86\x1f\xb7\xe5\x6f\x7a\x37\x0a\x7d\x44\xb7\x15\xe8\xee\x6d\x9d\x6b\x11\xb5\x72\x66\x72\x4e\x84\x11\xc6\x35\x75\x20\xbe\x90\x63\x42\x4a\x89\x6f\x23\x2e\xcb\x03\x2a\x78\x8f\x44\x64\x29\x9c\x92\x37\x54\xc4\x40\x1c\x46\x8c\x1a\xf8\x14\x12\x05\x4d\xce\x70\xbf\x86\x90\xba\xf2\x43\x9b\xe4\x6f\x61\x9b\xa0\xe3\x6e\x93\xe1\x49\xcb\xe7\x52\x59\x77\xd8\x9a\x91\x91\x8f\x53\x51\xfa\x28\x40\xf7\xcd\x49\x1e\x66\x9b\xa9\x53\xba\xdb\xca\x5d\x85\x8c\xa3\x49\x92\x44\x23\x93\xa2\x68\x49\xaa\xde\xa2\x1b\x07\xf7\x78\x5b\xa5\xb1\x54\x29\xdb\x6f\x49\x97\x62\x87\x10\x5a\xe2\xe9\x6e\xac\xa7\xca\x52\x23\xc6\x48\x46\x0c\x21\xc9\xfe\xfc\xeb\x6a\x60\x25\x4a\x80\xea\x4b\xb8\x7d\x71\xc6\x14\x57\xa8\x82\x95\x36\xf6\xfa\xbe\x6a\x9b\xfc\x6e\x21\x84\x5a\x0f\xf7\x13\x1b\xde\x82\x42\x9c\xe7\xa9\x11\x23\x7d\x24\xab\xc8\xf0\x2d\x74\x1c\x2f\x5c\x6d\x3b\x9b\xde\x87\x39\x2b\x05\x8e\xf9\x5e\x94\x99\x79\x5a\x22\xa0\xfb\xa3\x74\x6d\x39\xc1\xac\x99\x6b\x93\x96\x74\x1e\xfd\xcf\x99\x20\xa9\x6c\x91\x04\xac\xd0\x8f\xc5\x8f\xdf\x6c\x30\xa1\x28\x3c\xd6\x0d\xd8\xbe\x81\xf9\x79\x8c\x16\x15\x23\x10\x1d\xb6\x82\x6b\xe7\x13\xd0\x75\xae\x06\x6f\x7e\x38\x32\x87\xcf\x40\xae\xf3\x12\xc6\x61\xc2\xd6\x41\x0f\x90\xb8\xed\xc5\xb9\xe2\x41\xe1\x71\x87\x11\x50\x84\x76\x04\x17\x05\x5c\x3e\x75\x02\x53\x45\xd8\xd6\xf1\x12\x0a\xa4\x58\x9b\xc7\x7c\x02\x55\xbe\xee\x86\xb2\x02\xf9\x05\x4f\xbf\x43\xd2\x41\xb5\x5e\x6f\x3d\x85\x7f\x3e\x17\x73\x87\x82\xbd\x55\x93\xc7\x7c\xf3\x2b\x8e\x8c\x3a\xc5\x5d\x64\x4a\x4c\x8e\xe5\x35\x18\x43\x94\xdd\xbe\x82\xa4\x40\x6f\xaf\x77\xf6\xf6\xbb\x10\x63\x6e\x6a\x65\xad\xa7\x65\x3e\xc8\x65\xe7\x53\x8d\x7a\xc3\x05\x5b\x38\x52\x87\x98\xc1\x42\xc9\x39\x9c\xdc\xc4\x86\x3c\x10\xe8\x02\x0a\x60\x97\x01\xb5\xf1\x34\x4c\x98\x21\xcc\x94\x7e\xa3\x60\xb3\xd0\xd8\xad\x27\x53\x42\x33\xf4\x23\x53\xf7\xab\x3d\x58\xfc\x6a\x07\x8c\x9b\xc8\xaa\x0f\xe8\x89\x40\xc4\xa5\x86\xdb\x78\x6d\x57\x71\x16\x1e\x9a\x1f\x74\x15\x24\x97\x80\x30\x19\x05\x62\x5e\x1a\x0d\x4e\x62\xd8\x7c\x29\x32\xf2\x5d\xc9\xb6\x62\x63\x8b\x37\xdf\x84\x44\x95\x72\x5f\xa2\xca\x51\x8f\xa2\xff\x5d\xe2\x13\x66\xd2\x05\x19\x60\x4d\x3c\xa5\x0f\x89\x35\xcc\x64\x8f\xdc\xd8\x07\x17\x25\xe4\x8d\xc0\xb9\x8f\xdb\xd7\x15\x9f\x75\x68\x67\x1a\xb3\xfe\x8b\xc6\xc6\xa8\x8b\xe3\x7b\xe5\xc8\xce\xab\x85\x4c\xbc\x34\x07\x7f\x57\x0d\x66\xf6\x8f\xe7\x77\x02\x75\x55\x85\x44\xbd\xe6\x4d\xd6\x43\x4d\xac\x50\xe1\x7e\x39\x8d\xe0\x7b\xe3\x08\x00\x0c\x0d\xaa\x59\xc1\x56\xd0\x8c\x2c\x0b\x41\x04\x13\x85\x2f\x50\xfe\x73\x43\x34\x22\x45\xe3\xa7\x8f\xd6\x50\x3c\x62\x8e\x45\x3c\x02\xf7\xdb\x37\x51\x1f\xf1\x67\x78\x40\x28\xb8\x28\x19\xbf\xfb\x95\x96\xdc\xf5\xd0\x2c\x7f\x0b\x3a\xa1\x3c\x01\x33\xe8\x80\xda\x2e\xf3\x2f\x21\x4f\xca\xfa\xf7\xed\xe0\xc4\xbc\xf6\x26\x9e\x17\x3a\x5f\x18\xd6\xfd\x50\x2d\x13\xe7\xd2\x0e\x48\xd9\xda\x6a\x86\x2c\xbd\x47\x9b\x08\x8b\x06\xf5\xd6\xbe\x32\x83\xed\xe3\xcd\xa6\xb6\x03\xf1\x3c\x63\xf8\xed\xd1\x46\xb2\x61\x80\x46\xcc\x11\x3a\xf6\x28\x71\x95\x16\x07\xad\x41\xbe\x0e\xd9\x6d\x0d\x88\x40\x52\x12\x53\xab\xda\xd8\x3e\x20\x72\x52\x93\xb0\x17\x99\xae\xd2\xf8\x22\xba\x65\x93\xee\xa3\x68\x9d\x5a\xbe\xb2\xa6\x89\x6b\xe6\x26\x63\xc0\x28\xfe\xf6\x82\x49\xed\x98\x5e\xcc\xd8\x1d\x9d\x2f\x6c\x06\xb8\xc6\xc9\x39\x1d\x29\xa4\x69\xd2\xf4\xe2\xda\xf8\x84\x02\xb5\x5a\xb4\x49\x39\x8d\x48\xb5\x36\x1e\x31\x68\x2f\xad\x44\x0e\x28\x86\xad\x03\x28\x79\x20\x74\xad\x6f\x67\x93\xdf\xd0\xbf\x7e\x93\x40\x77\x19\x5a\x79\x57\x3c\xf2\x6b\x5a\xd2\x4a\xb3\xe1\xe5\x28\x8a\x04\xf9\x85\x8e\x48\xb0\x5e\xe0\xa4\x8f\xc8\xeb\x34\x11\xae\x8f\x41\x59\x43\x8a\xc9\x38\x30\x57\x1d\xb4\x7f\xfd\x8b\x1a\x0d\xcb\xb8\x47\xd2\xd9\xc6\x02\x89\x3d\xc3\x81\x9a\x51\x6a\x04\x9b\x43\xf6\x5e\x13\x09\x28\xc1\xec\xad\x57\xaa\xc6\xba\x5a\x16\x6e\x93\x07\x24\x1a\xb2\x71\x5d\x87\x95\xd5\x10\x1f\xe9\xa2\xa4\x2e\xc6\x80\x53\xf3\x71\xaa\x7b\x0c\xf8\x65\xfd\x8d\xa1\xd3\xed\x5b\x10\x4e\x37\x26\x4b\xa9\xe0\xba\x2b\x89\xed\x28\xde\xc7\x70\x32\xcb\x12\x31\x9a\xf9\x0c\xca\x36\x61\x48\x6d\xab\x38\xde\x1f\xd7\xe1\x55\xd8\x31\x52\x39\x12\xfa\xa8\x4f\x4d\x1e\x8f\x5a\x12\x47\x63\xb3\xaf\x0a\x16\x64\x6f\x9b\x8a\xbf\x6a\x6e\x37\xfe\xca\xc1\x3d\x29\x7a\xeb\xb6\x1c\x62\x5f\x8a\x0e\x1a\x55\x3f\x5c\xe3\xfe\xd5\x62\x84\x71\xfd\x34\xb2\xde\x2b\x1a\x72\x28\x4e\xa4\xe5\xbe\x47\xb7\xf8\x33\x85\x1c\x17\x6e\xf6\x74\xfe\x70\x68\x68\xe2\xe8\xbb\x2f\x31\x96\xec\xc1\xbf\xfa\xee\x62\x0e\x4e\x5f\x39\x38\x27\x54\x1b\x83\x2a\x9e\x24\x24\x7b\x25\x71\xe7\xc2\x91\xfd\xf6\xd7\x4b\xb6\x8a\xc6\x73\x0e\x2d\xe4\x39\x37\x10\x5e\x2b\xbc\x7b\x2b\x93\xc6\x40\x2f\x90\x6e\x56\x41\x1a\xa4\xc6\x4e\xb1\xa2\xd7\xfd\xb9\x44\x8f\xef\xa2\x87\xa7\x49\x31\x67\x3e\x72\x5e\x04\x92\x1b\x0b\x9a\x17\x57\xd4\xd7\x61\x74\x06\x21\x74\x08\xd5\x8c\xa6\x4e\xdc\x53\x64\x5e\x8f\x27\x11\x5a\xc0\xa4\x9b\x9d\x51\xac\x76\xb7\xcc\x70\x8c\xee\x2f\x07\x9b\x34\xbd\xad\x3a\x57\x1c\xd7\x62\xaa\x75\x10\x85\x8e\x3d\x3b\xcc\x1d\xdf\x12\xfc\xb7\x3c\x34\xb2\xab\x98\xc8\x29\xae\xc8\xdb\x57\x1f\xff\xed\x94\x5a\xf5\x09\x69\xd2\xfd\xad\x0f\x71\xe6\x00\x43\xcc\xe6\xb5\x39\xc3\x33\x45\xae\x81\xbd\x27\x35\x68\xb2\x93\x2b\xc7\x5b\x1c\x5b\xc7\xf5\xf8\x6c\xec\x1f\xbe\xdc\x3e\x47\xaa\x45\x34\xed\xd5\xf8\xa0\x6d\xa3\x4a\xdf\x94\x31\x12\x8d\x46\xaf\x81\x3c\x9c\x60\x50\x76\x45\x88\x5e\x7b\xa3\xb1\x70\xef\x11\xa2\xd8\x9b\x79\xc6\xfe\xc6\xfa\x54\xe4\x19\x58\x5f\x52\xb3\x4f\xa7\x93\xe3\xab\x20\xdc\xb3\xbf\x6b\x0f\xd5\x43\xaf\xf7\x2b\x4c\x34\x8f\xf7\x72\xae\x36\xef\x88\xea\x51\xfb\x05\xfa\x6c\xd7\x14\x77\xcb\x21\xb4\x04\xab\x33\x3c\x17\xa2\xca\x2a\xc9\xba\xec\x25\x0a\x70\x80\x27\x6e\x08\xa0\x71\x98\xc6\x98\xe5\x36\x51\x55\x4d\xaa\x09\xc8\x25\x62\xa8\x52\x70\x5f\x02\x82\x77\x90\x99\x11\x26\x24\x3e\x6f\x52\x2d\x8c\x82\x55\x04\x95\x1b\xa8\x10\x11\x94\xec\x9c\x4a\xd9\xff\x51\xf1\x1d\xfa\x83\xa7\xc6\x31\x09\x97\x0e\x12\x9d\xc3\xed\x5d\xa9\x6a\xf1\xbf\x64\x28\xfc\x2f\x9d\x3b\xc3\xc8\x4f\xfb\xb1\x97\x4c\x9a\xc5\xa1\x4d\x63\xd8\xeb\x9d\xcc\x5a\x7d\x2c\x33\x56\x72\xbd\xee\x9c\x34\xde\x31\x4e\x90\xf9\x08\xf8\x14\xba\x62\x0d\x18\xf4\x22\x7c\x73\xc0\x07\x09\xa0\x18\x8c\x10\xec\x2d\xb7\xe0\x7c\xc0\x25\x6b\x75\x29\xdf\xa2\x58\x3d\x06\x17\xcf\x1a\x0a\xee\x3a\xbd\x43\x0a\xe4\x8d\xf5\xd2\x28\x05\x23\x34\x24\x08\x1a\x9a\xec\x1e\xf5\x61\x46\x80\xd7\x82\x7b\x69\x9e\x7b\x41\x27\xed\x57\xcc\x17\xf2\xd6\x63\x0d\x24\xce\x65\x5d\x74\x0a\x03\x9b\xea\x4e\x89\x15\x76\x27\xa5\xf5\xcf\xb9\xe9\x7b\x80\xca\xc2\x9c\x0f\x19\x3e\x96\x2f\x7d\x26\x29\x66\xec\xa3\x5a\x88\xa4\x36\x63\x67\x70\x90\x35\xc2\x3c\xa0\x5b\xe4\xb0\xca\xeb\x2e\x5d\x36\xe4\x49\x1c\x6e\xe2\xf6\xff\xe4\xa1\xd0\xb4\x6d\x2c\x0b\x4b\x51\x17\x34\xcc\x1c\xc0\x0c\xf6\x9d\x32\x29\xde\x2f\xe8\x27\xd3\x9c\x8b\x34\x5c\xbc\x41\x49\x89\x9e\xdf\x51\xe2\x20\x41\x21\xc5\x75\x9d\xa8\xa4\x60\x71\x23\xc7\x18\xfd\x8a\x30\xeb\x4e\x25\x04\xab\xca\xfe\x50\x0d\x3c\xa4\x25\x5a\xc5\x28\x7f\x51\xca\xc0\x85\x6a\x23\xde\x2c\x07\x24\xce\x7e\x4a\xf4\xd6\x69\x81\xc1\x17\xea\x38\x66\x36\x7d\x09\x1e\xbd\xa2\xe7\xc4\xc2\x41\x0e\x21\x31\x63\xde\x18\x95\x9c\xac\x83\x05\x35\x67\xf6\x66\xf6\x05\xb1\xa2\x4e\x52\x33\x97\xb2\x48\x45\xbd\x89\xf1\xf4\xea\x61\x8f\x8a\x48\x61\xd6\x61\xea\x29\xdc\xcc\xef\x07\x1d\xde\x93\xc6\x3b\x5b\x03\xb6\x45\x17\x1e\xc0\x27\x92\x67\x1f\xd3\x84\x73\x77\x55\xb6\xbd\x60\x17\x0c\xc7\xa1\xb4\xc1\x34\x0c\x16\x75\xb3\xf7\x50\xbb\x39\xb7\x6b\xde\xd7\xf0\x1d\x2f\x7f\x67\x20\xec\x21\xd0\x34\xc8\x4f\x62\xf5\xa1\xb8\xa2\x5c\xb4\x48\x43\x7a\x4e\x22\x2d\x0b\x52\x4a\x02\xdf\x5c\xe1\xa3\x72\x60\xae\x1d\x72\xba\xd2\x2e\xb8\x30\xfb\x21\xac\x60\x07\xdc\x82\xdb\xcc\x11\xf6\x5b\x06\x1f\xe2\x06\xf5\x9e\x51\x84\x4a\x8a\xc7\xec\xba\xb5\xee\xfd\x6f\x54\x63\x4d\xfc\x82\xfc\xc6\xfa\xc7\x59\xc3\x20\x13\xf2\x9a\xcc\xe9\x57\xdc\x6b\x33\x0f\x77\x99\x16\x8e\x86\xe9\x19\x66\x7a\x9f\x6d\xf8\xa0\xbb\xa6\x0a\x48\xf3\x1a\x0b\x56\x2f\x1a\x36\x7b\x7e\x55\xc8\xd8\x25\x04\xa4\x9b\xad\xde\xd7\xdc\xda\x8e\x29\x65\x2d\xac\xca\xee\xea\xd6\xcb\x3a\x74\x2e\x4b\x23\x24\x82\x01\x7f\x44\xe5\xce\x32\x44\x1a\xfb\x32\x33\x4e\x20\x42\xf4\x50\xfa\x1c\x3d\x9d\x07\xbc\x58\x4c\x10\x31\x74\x02\x65\x70\x7c\xc6\x13\x2b\xe9\x4d\x22\x20\x13\x26\xc7\x58\xc5\x7e\x93\xb8\x17\xaa\x6c\xcd\x32\x82\xe7\x2a\x56\xd9\xce\x7d\x5d\x59\xd8\xba\xb1\x0c\x6c\xbe\x5b\xdd\xe7\x2b\x3b\xac\x66\x08\xd2\x31\xc4\xf1\x77\x43\x8f\x21\x19\x69\x9c\x85\xc2\x30\xd3\xc2\x09\x8e\x91\x2d\xcb\xef\x4d\xfd\x19\xd0\xae\xf7\xba\x67\x76\x15\x5c\x31\xac\x01\x7d\x79\x14\x5b\x9a\x22\x54\x09\x4e\xd4\xc6\x0e\xc5\x89\xc5\x5c\x4c\xec\x68\x1c\xf0\x10\x84\x55\x76\x43\x41\x31\x5f\xbd\xa0\x66\x7e\x36\xea\x5c\xfb\x9c\x45\x49\x58\x65\x4e\x3c\xbe\x7c\xef\xdc\xc5\x40\xfb\x2f\x1d\x55\x12\x81\x23\xb7\x69\xb5\xae\xb5\x12\xd8\xfd\x2b\xc9\x24\x71\x2d\xa5\x02\xcc\x92\x02\x44\x7d\x19\xa6\xda\x1d\xd3\xa0\x26\xb5\x4e\x20\x04\x63\xdb\xe8\xa6\x29\x75\x9e\xaf\x9f\x57\x65\x62\x4d\xa1\x76\x35\x35\xda\xa8\x6d\x33\x2b\xd7\xd0\x12\x93\x6b\xc0\x3c\xfc\xf4\x6e\xbc\xf3\xde\x5c\x7a\xb8\xb6\xb7\x43\xc7\x81\xe4\x90\xe7\x68\x29\x1d\x37\xd9\x67\x64\xee\x6d\x21\x6e\xad\x21\x99\xa1\xb9\x0f\xe8\xd1\x11\x70\x59\x2f\xf9\xc1\x0f\x2a\x0c\xb3\x6d\x67\x84\x8f\x97\x4e\xbe\xeb\x61\xf0\xdb\x9e\x69\x5f\x2a\x18\x63\x1c\xa8\x27\x28\xa4\xf1\xe1\x95\xd0\x70\x4a\x51\xfb\x57\x0d\x48\xa6\x73\x92\x38\x3c\xb5\x8f\xf9\xe0\x21\x15\x19\x94\x6f\x82\x60\x1d\x8b\x9f\x88\x40\x24\x3f\x78\x43\x00\xab\xff\xec\x24\x19\x59\x59\x8f\x44\x86\xaa\xc9\x0b\xe8\x66\xb8\x89\xa6\x1f\x22\x33\x03\x47\xf1\xb3\x10\x70\x70\xd1\xea\xed\xd2\x80\x6a\x63\x15\x8e\xb3\x51\xf8\xcf\x5e\x3d\xba\xb4\x62\x67\xf7\x2d\x59\xbf\xfb\x7e\x89\x64\xe6\x42\xf1\xf4\xbb\xac\x9f\x39\x61\x26\xf8\x6c\x5b\x73\x22\x9b\xf3\x6f\x2d\x12\xe5\x8d\xf4\xc0\x01\x4e\x01\x3a\x06\x37\x28\x5d\xfb\x1f\xcf\x45\xeb\x24\x4d\xa9\x8d\x31\xb4\x5c\x8e\xac\x99\x12\x6a\xa7\x6f\xab\x55\x15\xb2\xbf\x4c\x8d\x08\x28\x50\x8d\x41\x78\xe2\x9d\x0a\xdc\x1e\x12\xc6\x31\xb4\xe0\x1d\x38\x50\x7f\x29\x8e\xb7\xce\x55\x86\x1d\xd1\x47\xe4\xb6\xf8\xa1\x14\x60\x7a\xc6\xb8\xe7\x65\xbf\xa2\xe6\xc3\x3c\xf8\x0d\x81\xf0\xa6\x04\x5a\x4d\x75\xb6\x20\xa8\xb2\xb2\x29\xbe\x8f\xdc\xb1\x8b\x49\xf6\xce\x09\x18\x44\x4c\x74\x01\xb4\x08\xda\x38\x55\xa2\x9e\x87\x08\x18\x9e\x2d\x60\x35\x5c\xce\x90\xef\x7e\x15\xa0\xa2\x55\x3b\xb5\xb4\xc0\x57\x9c\x5f\x0f\x4a\x26\xa5\xd6\xe0\x7a\x24\xe0\xd7\x4a\xfa\x5d\x72\x41\x21\xa2\x4b\xf3\xe1\xbc\x22\x61\x7b\x43\x99\x4e\x73\xe0\xb7\xa8\x20\xdf\x64\xf5\xf7\x5b\x33\xff\x3c\xbe\x12\x19\x21\x76\x5b\x66\x69\x92\xa7\x46\x1c\xe0\x4c\xd0\x1b\x7a\xe2\x5d\x10\xea\xe6\x83\xd6\x41\xc9\x7b\xa5\xfe\x44\xd9\xd0\xa1\x15\x16\x18\x03\x0f\x3d\x6c\x23\x19\x29\x27\x8e\x2c\xf4\x5c\x48\x0c\xb1\xd2\x1c\x83\xf7\x5e\x30\xf8\x63\xd1\x39\x37\x19\x64\x8d\xfb\xad\x14\xfa\x0e\xe6\xae\x0f\xfe\xe6\x7f\x26\x8c\xba\xaa\x60\x3f\x73\x66\x3e\x3a\x4a\x75\xfe\xec\x9d\x1e\x18\x66\xa8\x13\xa6\x6c\x6b\x92\x80\x43\x62\xca\xec\xeb\x35\xea\xe4\x0f\x28\xe1\x9e\xbf\x5a\x95\x73\x0e\x52\x59\x53\x1c\xe1\x19\x6d\x9c\x00\xff\x7a\xae\x96\x40\xfa\x2f\x7f\x95\xe7\x21\x72\xba\x19\xe9\x59\xef\xde\x2b\xc1\xb8\xe6\x8c\x66\x25\x0b\xe1\x70\xcd\x53\x3f\x46\x6f\x03\xc3\x7a\x94\x90\xc3\x3f\xa4\xe1\xea\xa3\x84\xc3\xe2\x69\x31\xc8\x6a\x5c\x27\xc9\xbe\xfa\x17\x14\x67\xe7\xf9\x7b\x2a\x8e\xf0\x48\x64\xa9\x20\x47\x2d\x3b\x0b\x1b\x65\x11\x3f\x90\x40\xd5\x4f\x8d\x01\x1b\xd7\x24\x48\xf3\xcc\x15\x72\xa8\xbf\xe9\xf1\xb8\x24\x94\x33\x66\x55\x37\xa9\x0b\xbb\xe5\xae\x73\x1d\xd5\x4e\x37\x6f\x0b\x2d\x9e\x4f\xf5\xda\xa2\xe9\xd8\x87\xd8\xb2\x8c\x7d\x8a\xdd\xa3\xb9\x23\xe1\x4d\x3c\xa0\xea\x2c\xb1\xc3\x1f\x2d\x28\xc0\x44\x79\xde\xe3\xe5\x13\xb5\xb7\x84\x4b\xe9\x48\x88\xb7\x6d\xf8\xa3\xfe\x05\xb2\x42\xb6\xc9\x1e\x17\x7b\x8a\xbe\xd1\xd6\x63\x67\x6b\x1d\x73\x04\xf7\x89\x51\xee\xaa\x86\x22\xa2\x5d\x5f\xad\x8b\x94\x5e\xc6\x89\x3a\x1b\xcf\xdb\x8b\x43\x84\xb2\xa6\x73\x0b\xad\xc8\x1a\xb2\xe1\xcd\xb7\xb6\x01\xf4\x9c\x28\x49\x2d\x66\xde\xca\x11\xd2\x6c\x0f\xd7\xdc\xc5\x8c\xc7\xed\x1a\x07\xfb\x48\x9a\x11\xed\x29\x49\x50\xa5\x21\x46\x86\xdf\xe8\xf2\xaa\xaa\x79\x7d\xcd\x39\xe3\x7f\xf2\xc1\x9b\x4c\x3d\x26\x8e\xb7\xe5\xac\x6f\x86\xcf\x5d\x92\xb0\xc3\x40\x91\xd4\x8f\xfe\xc6\x83\xbc\xc7\xcd\xdf\x8f\x10\x79\xa2\x38\x09\x6c\x00\xa7\x25\xf6\x3e\xb5\xe0\xc6\x22\xd5\x1a\x28\x1d\x21\x6b\xba\xb6\x32\x24\xa6\x29\x4a\xce\x9c\xf9\xa7\xb1\xf6\xea\xdd\xfe\xb6\x1f\xc2\xa1\x5c\xff\x75\x0e\xba\x29\x1f\xff\x03\xe9\x2a\x29\xd3\x1c\x9c\x2d\xce\xe4\x0a\xf3\xf3\x1e\xe5\xd5\x90\x2f\xbb\xd4\xe8\x77\xb9\xa9\x47\x0a\x77\x0c\x60\x92\x24\x0e\x79\xbb\xcc\x4c\xb9\x92\x2c\x0e\x42\x9e\xee\x31\x0d\xc0\x09\xb7\x3d\x75\x42\x97\xb7\xc4\x62\xf7\xc0\x61\xe4\xd6\xe6\x16\x76\xb6\xc2\x0b\x11\x8f\x67\x66\xae\x43\x63\x3e\xe7\x23\xad\xa3\xf8\xe9\x42\x48\x5d\xa5\x45\xd3\x0e\xa0\xea\xcb\xe6\xe8\x2f\xb9\x02\x0d\xd2\x48\x1e\xee\x91\xdb\x1a\x55\xcf\x7b\xeb\x08\xd4\x58\x5c\x5e\x25\x8f\xd7\xbb\xd4\x0c\x75\x93\x94\xc0\x1c\x9f\xe5\xd6\xaa\x32\xb2\xd8\x20\xba\x5a\x08\x10\xdd\xcc\x1e\x3a\x8a\xb4\x84\xc6\x13\x62\x62\xb3\xfc\x09\x35\x4d\xe6\x26\x6e\xe8\x08\x23\x9f\xd0\xcb\x0a\x9a\x97\xba\xcd\x9c\x9c\x69\x21\x4c\x12\x25\xa6\x81\xa7\x5e\xb3\x8c\x6a\x2b\xb5\x9d\xcc\x3b\xe6\xbe\x8f\xde\x90\x2c\x0b\xc6\x21\xdb\x20\xbe\x88\xb5\x15\xb3\x7e\x5c\x19\x4b\x60\xb2\x43\x03\xea\x1d\x78\x84\x82\x99\xf7\x8e\x7b\xe1\x94\x8b\x31\x67\x22\x65\xfd\x27\x54\x32\xbc\xe7\xc2\x73\xd2\xab\xdf\x59\x03\x5a\x5e\xf5\xc3\xb4\xba\x31\x61\x99\x27\x6f\x5d\x22\x2b\xe6\xdf\xf2\xa7\x8f\xfb\x17\x86\x0e\x9e\x32\xc6\xb5\x33\x71\x09\x6e\xec\x3e\x4a\x43\x95\xb3\xc0\x2c\xaa\x7d\x08\x40\x7d\x9c\xd3\xd7\x34\x87\x47\x00\xa6\x2e\xf6\x44\x4f\x35\x9d\x21\xb0\xe0\xf0\x90\x33\x28\x6e\x98\xed\x4c\x8d\x64\xef\xbe\x8a\x1e\x4f\x1e\xb1\x87\xaa\xa2\xee\x8d\xde\x1c\x6d\x82\x76\x43\xe1\xd1\x31\x6c\x85\xa3\x75\x99\xe3\x79\x2c\xac\xa9\x11\x70\x84\x90\xce\xe2\x1d\x47\x7e\x61\x17\xd7\xa7\x69\x9e\x84\x17\xbc\x67\x92\xcd\xc6\xa1\x77\xef\x31\x8d\xea\xbf\xef\xdc\xa9\xac\xef\xb2\xf0\x7b\x5c\xb4\x4b\x26\x6a\x6d\x22\xdd\xaa\x30\x91\xb5\xe9\x93\x5c\x88\xe7\x4c\xd1\xf6\xb7\xe7\x8b\xc8\xce\x60\x0a\x67\x04\x6d\x5f\x2e\xc1\x16\x7c\x58\x4f\xe6\x74\x80\x2e\x85\x00\x7f\x25\xcd\xfe\xb1\x50\x45\xf4\x0d\x80\xf5\xdf\x73\xa4\xa8\x32\xe8\x2b\x75\x8c\x91\x18\x2c\x1d\x5a\x12\x81\x93\x35\xe4\x1e\xf1\xb2\x97\x06\x6b\xdc\xca\xef\x3a\xdd\xc3\xfc\xc2\xaa\x60\x93\x33\x21\xef\xa5\xd3\x7c\xa9\x48\x3e\x2d\x9b\xf5\xff\x5f\x62\x3c\x07\x14\xf3\x04\x65\xac\xfc\x16\x8e\xc8\x60\x7c\x55\x59\xee\x3f\x2f\xa5\x0a\xf6\xf3\x47\x9c\x55\x63\xaa\xa1\x8c\x47\x27\x62\xe8\xac\x0a\xc7\xfb\x94\xc6\xdb\x74\x67\x6d\xf6\xed\xcd\x35\x38\xbc\x4a\x42\xc4\x99\xe1\x46\x1f\x55\xfd\xf8\xee\x67\xbf\x9a\x10\x30\xbe\x47\xa9\x66\xa1\x40\xa0\xd3\xa4\x46\x26\xbe\x76\xb9\xc0\xb5\x38\x00\x47\xd3\x74\x4a\x01\xcd\xc2\x60\x4a\x21\x7a\xf3\x10\xfe\x44\x45\x09\xa7\x7c\xff\xef\xc4\x25\x40\xb9\x00\x57\x9c\x1c\xdc\x31\xc6\xee\x7b\x52\x1e\x39\xf9\x49\x8b\xbe\xa8\x15\x6d\x66\x1d\x4d\x89\x1b\xb3\x87\x39\x7d\x04\x06\x16\x4b\x7a\x6e\x51\x5e\x6b\x94\xab\x73\xd2\x4e\x43\x3e\xe8\x19\xa3\xf1\x1f\x90\xf7\x86\xef\x0c\x95\x87\x7b\x65\xa3\x2e\x2e\x5a\x4c\x61\x24\x85\x5b\xda\x2f\x2f\xa1\x49\x31\x22\x36\x48\x94\xba\xe7\x6f\x0d\x8f\x3c\xc6\x53\xdd\x0c\x5b\x70\xfe\x87\xa0\x2e\x3a\x22\x21\xbb\xe7\x47\x40\xc8\x38\xe0\xeb\xe6\x3c\xe0\x4a\xf8\x5f\x1c\x81\x27\x37\x99\x97\x6d\xb1\x77\x1c\xd8\xcb\xb1\xe4\xf7\x2c\x11\xe0\xb9\x4d\x8a\xb2\xcc\x39\x58\x27\x49\x81\x94\x7b\x33\x76\x74\xea\x2f\xfe\x07\x0b\x16\xee\x20\x19\x0b\xdf\xaf\xe6\xc7\x3b\x07\x27\xdd\x1b\x4a\x32\xa8\x34\xe7\xca\x4a\x92\x39\x96\xbc\x04\x5d\xb0\x23\xd5\x58\x86\xe5\x9f\xe9\x15\x8d\x3c\xea\xb7\xb1\xfd\x45\x7f\x50\xfa\x54\xc5\xaa\xa8\xdf\xbf\x8a\x3c\xb5\xc7\x64\xb5\x49\x12\xd7\x4b\x61\xcd\xcd\x1c\xad\x87\x74\x44\xf6\x0a\xea\xb5\x1a\x2a\x72\x6e\x4a\xd2\x1b\x5a\xcc\x68\x65\xa1\x52\x02\x51\xa1\x55\x0d\xc8\x5e\xbe\x8e\xb4\x40\x54\x63\x69\xdb\x62\x93\xcc\x40\x7c\x0d\x0d\xb0\xc3\x87\x43\x98\x5d\x3a\x86\x91\x1c\x7a\x70\xa6\x90\xd3\x23\x22\x80\xaf\x52\x14\xf1\xa4\x8e\xfb\x50\xdb\xe7\x7d\x6a\xb3\x4c\xcb\x74\x50\xfe\xad\x4d\xad\xa2\x00\x9f\x1a\xaa\xf2\x10\xe3\xf8\x1d\x49\xa7\x8d\x18\xe8\xae\x9d\x1d\xac\xd5\xbf\x5f\xa3\x9b\x5d\x29\x89\x72\x71\xb9\x29\xe5\x11\xaf\xa2\x22\x59\x1c\xd2\xa6\xcb\x74\x7c\x75\x88\x73\x5a\x77\x6c\x44\xee\xca\xee\xa9\xc4\x5b\x8d\xda\xec\x5e\x51\x06\x4c\x00\xe8\x95\x22\x0f\x5e\x5b\xd8\x8c\xde\xbf\x36\x13\x2b\xaf\xad\x3d\x99\x62\x0b\xea\x04\x51\x61\x94\xc9\x88\x4e\x8a\x44\xd5\xc5\xb2\xdf\xf6\x10\xf7\xf9\x2d\x57\x72\xb1\x45\x3d\x85\x23\x6a\x82\x60\x44\x56\x51\x33\x66\x43\xb1\x2f\xaf\xd9\x6e\x3e\x9f\x1f\x35\xd1\x4d\x02\x1f\x88\x80\x62\xd2\x5c\xef\xd3\x8c\xb8\x94\x0e\x2a\x06\x2a\xac\xdb\xda\x9d\x0b\x8b\x08\x41\x1f\x65\xc4\xd3\x84\xff\x10\x0e\x54\xc2\x0c\x20\xe2\xd1\x1a\x26\x49\xc2\x7b\xca\x10\xc0\x47\xbd\xda\x87\xd1\x98\xe5\x5f\xb1\x1e\xc4\x09\x8c\x58\x99\x10\xe4\x71\x9d\x7f\x02\x0f\x2b\x75\x83\xfc\xd2\xab\xd0\xae\xd4\x7b\x81\xae\xf1\x92\x1f\x2d\x70\x9b\xff\x3c\x13\x5b\x63\x17\xef\x4c\x51\x13\x3d\x86\xd2\x42\xbf\x75\xb1\xf5\x13\xbf\x87\xda\x9e\x38\x9e\x76\xa1\xcf\x17\xb2\x09\x1f\xf4\xc2\x53\xe5\xfd\x45\x1a\xf9\x03\x0a\xad\x26\x6f\x6e\x30\x01\x5b\x1b\x03\xa4\xd6\x14\x9d\xaa\x2c\xe5\x84\x34\xe9\xe4\xa7\x61\x68\x2a\x21\x0e\xdb\x85\x5a\xa4\xe6\xd6\x9c\x18\xab\xb0\xd1\xa8\xed\x8e\xca\x60\xc5\x2e\x5a\xe2\x2a\x9f\xb4\x4f\x2a\xe9\x6c\xe3\x47\xda\xd7\x93\x88\xe7\x12\x2f\x26\xb2\x14\xf6\x51\xbd\x8c\x51\x83\x57\x1f\x8f\xd9\xcf\x10\x4e\x59\x05\xae\x79\xbd\x62\x00\xe0\x08\x89\x44\xe5\x33\x0b\x42\x25\x55\xe9\x1b\xbd\xe3\xe1\xcd\x45\xdb\x9d\x72\x6f\x1e\x70\x86\x9a\x7c\x54\x69\x39\xa2\x70\x55\xb4\x7e\x7c\x0f\xa0\xc7\x64\x1d\x40\xa6\xe8\xb9\xc5\xb2\x57\x96\x96\xee\x2e\xf2\x47\x57\x91\x3f\xb9\x13\x59\x6e\x10\x71\xd8\x51\x85\xdb\xff\xbe\xa4\x74\x95\x27\x07\x13\xa2\x44\x90\xd0\x37\xce\xd0\x16\x28\x19\xf4\x0e\x7e\xc9\xfd\x06\x56\xd1\x6b\x3a\x61\x9f\x84\x31\xf5\x0e\x27\x23\x14\xee\xec\x96\x1e\xa8\x4a\xa4\x97\xa5\xca\xd8\xc5\x4d\x79\xa9\x86\x5a\x7b\x08\xcf\x06\x53\x37\x55\x92\x21\xd1\x74\x44\xd5\x84\xb9\xab\x4e\x30\x8d\xb6\x8b\xfb\x9d\xa1\xdc\x5a\x86\x18\x08\xc4\x6c\x75\xcb\x8a\x8b\xb6\xc8\xd5\xd7\x73\x1f\xd5\x26\x16\x89\xc1\x8a\x7a\x12\xfe\x51\x52\x40\xfe\xaf\x75\x64\xe5\xdd\x75\x67\x10\x8c\x91\xb6\x3f\x17\x4d\x60\x6c\x23\x3d\x91\x32\xad\x59\x89\xe9\xd8\x46\x40\xb0\x09\x47\x32\x89\x1b\x11\x4c\x76\xd6\xee\x02\x17\x2c\x26\x5b\xb5\xcc\xf4\x7a\x96\xa2\x35\x19\x85\x26\x3d\xc9\x53\x1f\x03\x6e\xc1\xa5\xb8\x6b\x31\xf9\x07\x4d\x76\x0a\x64\xda\x8c\x02\x9a\x64\xde\xb2\x97\xef\x82\x3a\x2f\x13\xa4\x5c\x62\xca\xd2\xab\x16\xe0\x18\x47\xb1\x1c\x72\x94\x5d\xe1\xb5\xdc\xdf\x57\x69\xf8\xad\xb1\x00\x5e\x6b\xa0\x66\x81\x06\x88\x30\x45\x77\xaf\xb7\xf2\x86\x38\x1a\x88\xbd\x51\x97\x7d\x17\xfa\xa0\x3e\x9b\x19\x21\x6e\xf3\x8f\xa1\xe1\xc2\xd1\x1b\x25\x50\x46\x3b\x69\xb4\xc0\x54\xa3\x0a\x03\x5c\x64\x7f\xa3\x49\x80\x0c\x6d\x9e\x6a\xb1\xfd\x1c\x5e\x0c\x7d\xb4\xbe\x6e\x5f\x92\xc4\x93\x26\xff\x39\x01\x57\x4b\x79\x32\x95\xf1\xcc\x33\x6d\x8c\x23\xb2\x5b\x9b\x59\xab\x1a\x7e\x6a\x27\xfe\x6a\x81\x11\x9a\x9a\xbc\x17\x41\x8b\x72\x76\x6b\xcd\x20\xdf\xee\x9c\x41\x2b\xe7\x1f\x86\x17\xf2\x6f\x4c\xd2\x35\x6a\x91\x68\x0d\x79\x8d\x33\x59\xc4\xcf\x1c\xd6\xb2\x3c\x64\xdb\x30\x33\x8a\x63\xfc\xbd\x9d\x11\xde\x5c\xeb\x9b\x97\x6b\xd2\xec\x00\xa3\xef\xef\x4c\xfe\x56\x09\xdf\x85\x1e\x69\xce\x2d\xc1\x71\x2a\x9b\x1a\x1d\x7d\x42\x0b\x4f\x55\xa7\x0c\xde\x3c\x36\x3b\xb1\x3a\xe8\x80\xce\xec\x6b\x75\x22\x57\xb4\x6a\xf4\xde\xf0\xcd\x72\xd0\xba\x46\x9d\x20\xb9\x16\x40\x4c\xa6\xa4\xc7\x52\xa1\x44\xb9\xf8\x07\x7d\x58\xef\x2d\xc7\x5e\xc3\xfe\xd2\xea\xe1\x6f\x5f\x65\x29\xe1\xcb\x3b\xeb\xb3\xfb\xec\x3d\x83\x21\x01\xd2\x24\xb1\x38\x89\x9a\xfe\x0b\x94\x4a\x84\xfb\x17\x15\x70\x6a\x4a\x52\x6e\xd2\x77\x67\x8f\x22\xde\xca\xdb\x89\x35\xff\xb0\x84\x82\xfc\xac\x2f\x2b\x97\x26\x06\x86\xfb\x97\xeb\x06\xde\xd3\x42\x5d\xca\xd9\xd5\x6f\x61\x71\x2b\x63\x75\xfb\x13\x7a\x86\x0a\xb7\x44\xb3\x25\xa2\xe7\x8d\x17\xac\x7b\x7a\xdb\x50\x53\x6e\x85\x7e\x0c\xbd\xe8\x18\x39\xfc\x88\x0e\x8a\x1b\xb3\x00\x6c\xac\xfb\xc0\x98\x61\x82\x3a\x56\xb9\x1a\x87\xb6\x36\xa1\x45\x9d\x14\xbd\x05\x80\xd2\x17\xf3\xe6\x56\x39\x55\x17\xe9\x52\x8f\xa6\x0b\xd5\x68\x8a\xa5\x03\x93\x8a\x1f\xa0\xe5\x8a\x8d\x7b\x85\x30\x33\xc0\x7b\x5c\xe4\xc3\x12\xfd\xa6\x36\x26\x8d\x2a\x1a\xcd\xef\x4e\x05\x4d\x28\xbc\x98\x55\x1a\xc0\xcb\xb6\xb6\xe8\x31\xda\x34\x82\x97\xb0\x09\xde\xb8\x2e\xd1\xb8\x31\x47\xde\x7e\x89\xee\x7b\xdc\x40\xe0\x29\x6d\xad\x1b\x7e\xbf\x7d\x75\x60\x05\x27\xa9\x69\x15\x78\x7f\xf7\x50\x14\xce\x8d\xe7\xcc\x8b\xfd\xd8\x58\xe4\x7d\xb8\xc9\x60\x45\x86\x57\xff\x6b\x00\x90\x84\xa6\x88\x6d\xe6\xc1\x0c\xab\x46\xe1\x6c\x51\x2f\xb1\x5c\x29\x75\x8c\x17\xd4\xfb\x2d\x56\x93\xb6\x84\xa9\x2c\x1a\x9d\xf3\x15\xef\xf2\x1e\xa1\xda\xda\xf1\x29\xd2\x6f\x40\x59\xba\x37\x58\x8e\x1b\x78\xf1\xa1\x7e\x73\x8c\x96\xb4\xe1\xea\xa0\x01\xfa\xd6\x26\x8d\x45\x77\x25\xe6\x96\x2b\xfd\xa0\x84\x29\x01\x43\xf2\xe6\xb8\x75\x79\xe9\xb2\xd4\xc0\xf7\x35\x0a\x15\x6b\x3d\xa2\x9e\xb8\x8b\xa9\x2f\xf2\x5c\x14\xaf\xc9\xa4\x8b\x5c\x30\xa2\x70\x12\x0c\x6f\xa8\x9a\xe1\xff\x7b\x8d\x98\xd9\xea\x4b\xa9\x72\x59\x03\xa5\x0f\x36\x90\x3b\x4d\x42\x1b\x67\xc1\x35\x22\x72\xc1\x3a\xb5\xf7\x5a\xb0\x3b\x01\x32\xbc\xa4\x59\xe9\x0d\x7f\xd0\x7d\x03\xa5\x72\xfe\x49\x41\x1f\xdf\xd4\x4b\x98\x75\xac\xca\x04\xef\xf4\x56\x16\x0c\x08\xc7\xe0\x31\x71\x6a\x31\x99\xbb\x30\x85\x54\xb3\x12\x27\xa3\x63\x49\x97\xb3\x5e\x19\xb8\xa4\xf7\x74\x85\xe4\x57\xf7\xbd\xd9\xcb\x25\x90\xe2\x66\x17\x11\x64\x7b\x21\x94\xa5\x19\x8b\x2e\xb1\x71\xee\x39\xdd\x40\xbc\xdb\x0c\x77\x4a\xb8\x9c\x07\xcc\x07\xe5\x09\xb5\x6b\xf5\xf9\x07\x8a\xed\xfb\xd5\x2b\xea\x66\x2c\xbe\xac\xd2\xd6\x80\xd0\x4e\xe0\x75\x97\x7e\x63\xef\xc5\x4b\x5d\xcc\x74\x1c\x52\x59\xab\x77\x95\x4c\x5c\x3e\x9c\x2d\x8b\x11\x12\xce\x8b\x5a\x6a\xaf\x21\x3b\x37\x10\x09\xf0\x2a\x66\xc0\xae\x4b\xc7\x5f\x3c\x95\x19\xbc\x21\xff\xe4\x67\xea\xe9\x87\x5e\x0d\x73\x6d\x77\x90\x2e\xf8\xd4\x5c\xd6\x98\x42\x89\x2f\x6b\xe8\x25\x51\xec\x45\xd7\x88\xf3\x26\xed\x5e\xbd\x85\x35\x65\x47\xc6\x53\xa3\x79\x90\xdc\x21\xd3\x66\x5e\x52\xa9\xa5\xd7\x45\xbf\xc6\x06\xcc\x59\xb1\x83\xb3\xf9\x99\xb1\x78\x17\x2b\xf7\xf2\xd2\x03\xd2\x5f\xd3\xaa\xb4\xca\x7b\xd8\x82\x6f\xa8\x09\x6c\x9d\x0b\x9f\x47\x36\x1a\xba\x8c\xa9\x3b\x79\x54\x3e\xf5\xc1\x09\x93\x39\x56\xf2\x1d\x03\xca\xee\x77\x4d\x8c\x2d\xfe\x53\xb4\x9c\x3e\xd3\xc4\xff\xcd\xe2\x32\x97\x10\xc5\x47\x9c\xcb\xd7\xcc\x21\xe2\xfd\x89\x07\x97\x3c\x84\x7c\xfb\x5f\xd3\x45\xe1\x31\x48\xd2\x2b\xdd\x31\x00\x9e\xf7\xaf\xae\xba\x93\xd0\xf5\x0a\xa6\x6b\xcb\x5d\x31\x4a\x23\xb8\xe2\x31\xe0\x35\x22\x0c\x0b\x59\x7b\x9a\x59\x8c\x8a\x65\x69\xc9\x45\x82\x2e\x5d\x51\x73\xd9\x55\x97\xf6\x8f\x45\xc4\xd4\x7a\xcd\x6f\xfd\x29\x66\x88\x39\xbe\x55\x60\xd2\x14\x74\x70\xcc\xbf\x6c\xc1\xd4\x14\x00\xae\x67\x82\xa1\x70\xc3\x76\x7f\x8e\x89\xc6\xed\xf6\x76\x60\x81\xf4\x3f\x28\x23\x89\x4d\xe9\x87\x0a\xe0\x3f\x51\x21\xd4\xf0\x24\xb9\x5f\x6c\x11\x63\x14\xab\x34\x6d\x27\x21\xcb\x0b\x49\x72\x0f\xcb\x29\x9c\xbb\x13\x8e\xf3\x6a\x70\xb6\xc8\xfa\x04\x92\x96\x82\xb2\x4d\xb6\x72\xeb\x91\xae\x94\x16\x76\xa8\xe6\x12\x56\x6f\x43\x8e\xd9\x31\xd2\x8e\x5e\x6b\xa0\xb9\x02\x7a\x8f\xc4\xeb\x42\x69\x52\x1c\x2d\x7b\xec\x2a\x0c\x8e\x99\x8b\xd0\x0d\x18\x8b\xfd\x33\x21\x38\x4b\x8c\x42\x3c\x72\x8a\x59\x9f\xca\x27\x3e\xc8\x73\x43\xdf\xf8\x1d\x6a\x86\x8a\x0d\xbd\xa3\x9e\x9d\x93\xcd\x34\x40\x3b\x21\xc0\xac\xc3\x91\xc3\x98\xb5\x99\x37\xa7\x00\xe1\x54\x95\x2d\x62\xd3\xb5\x68\x74\x7a\xdd\x42\xc2\xd8\xdb\x82\x17\x8a\xc0\xba\xe6\xa8\x78\x3b\xab\x3a\x9a\xbd\x5b\x8e\x36\x66\xc9\xa7\x37\xe9\x40\xa6\x45\x5c\xd8\xfa\x0e\xa1\xc4\x8d\x41\x9f\x67\x43\x22\xaf\xc0\xe6\xfc\xa8\x05\x6d\xff\xca\x88\x1a\xad\xc1\x67\xb7\x48\xc1\x81\xaa\xc2\x96\xd0\x9d\x1a\xfb\x11\xed\x0a\xb7\x0b\x72\x61\xdb\xb1\xab\x87\x90\xc7\xb8\x29\xfb\x76\x91\x6c\x1c\x02\x4e\xce\x6a\x3a\xd9\x2c\xee\x92\x96\xcb\x96\x06\x8c\x71\x0a\x1e\x8c\x93\xc4\x98\x1b\x65\x3d\x86\x87\xcc\x9d\x90\x45\x8b\x63\x24\x82\x9a\xa5\x17\x45\xd1\x44\xfa\x0d\x2c\x61\x8f\x86\x7e\xd4\x24\x43\xcf\xce\xd9\x59\x64\x0e\x79\x19\x47\x43\xaa\xfc\x22\xf7\xa2\xa3\x25\x97\x9c\xae\x21\xa5\x3d\x42\xbc\x17\xe7\xc1\xeb\xe9\xcb\xdc\xa2\x81\x73\x0a\xe6\x47\x30\x42\xd0\x5f\x14\xb3\xda\x5c\xb2\xec\x86\x12\x10\xed\x1f\x3b\xdf\x37\x56\xd1\xbc\xfe\xc3\xc3\x70\x86\x4e\x4a\xd3\x65\x3a\xbc\xcc\x4a\x3f\xee\xca\x8b\xd1\x8c\x72\x88\x50\xe9\x75\xfc\x99\xda\x5a\xab\x72\xa7\xbd\xf5\x0a\xee\x44\x55\xd5\x96\xa6\x7f\x4b\x12\x93\x54\x56\x13\x59\x34\xd5\x8f\xa4\xab\xb9\xeb\x8c\x88\x04\x71\x21\x9a\x49\x62\x56\xe0\x5e\x32\xcf\x5b\xec\xf1\x80\x28\x79\x86\x57\xc3\x62\xf8\x7f\xf9\x29\xd2\xb6\x08\x09\xf5\x8b\x8a\xdf\xf4\x08\x6f\x6d\x18\xac\x70\xa3\x36\xe3\x7d\x8a\xd1\x65\x68\xff\x45\x76\xe8\xbd\x20\xbe\xfb\xbb\xc5\xc9\x81\x68\x44\x5a\x82\x54\x49\x49\x13\x32\x46\xd1\x0c\x8b\x67\x09\x28\xfc\xb8\x1b\x66\x24\xb8\x2d\x92\x0f\xb9\x6c\x7a\x34\x0f\x2b\xa3\x04\xed\xb7\xab\x4d\x99\xdc\x75\xc0\x29\x96\x62\x8e\x88\x96\xc6\xb1\x6a\xa5\x61\x1f\x0d\xcc\x7e\x87\xba\x98\x1d\x39\x0b\x45\xe5\xdf\x25\x5a\x4e\xd5\xd9\xd1\xee\x28\xf1\xaa\x36\x4b\x6e\xd1\xf4\xb6\x9e\x75\x35\x9f\xc5\x4f\x74\xdf\x32\x58\xc1\xfc\xc7\xd6\x70\xab\xa9\xe9\x23\x7b\x80\x7c\xf5\x14\x09\xf9\x71\x58\xda\xb1\x9c\x20\x28\xb7\xce\xbe\xc5\xb5\x67\x79\x76\x43\x25\x5d\xc5\xd0\x9c\x07\x4e\x2f\xc1\x0c\xcd\x1b\x1b\xfb\x14\x39\xcc\x7c\xdf\x14\x23\xe2\xbd\xae\x34\xb2\x87\x71\xce\x4e\x6b\x54\x25\x74\xfa\x0c\x59\x84\x1e\x2f\x65\xdd\x98\xf4\x53\xa5\x57\x01\x20\xc0\x02\x95\x5a\x7b\xc2\x47\x0f\xa7\xdf\xb0\xda\x45\xfc\xea\x2e\x09\xf6\xfc\xf8\x60\x61\x21\x12\x15\xb1\xc6\x57\x2b\x87\x33\xf0\xef\xb8\xe1\xef\x98\xfc\x23\x89\x1b\xc6\xf7\x8e\x4d\x52\xe5\xac\x2b\xa1\x7e\x3f\x62\x22\x25\x0b\xb2\xd1\xb8\xe3\x5a\x20\xbc\x50\x48\xf6\x26\x91\xef\xce\x45\x8d\x84\xde\xe6\x99\x18\x16\xa0\xda\x96\x44\xda\xb5\xef\x93\x8c\x51\x14\x3f\xa4\xcb\xce\x60\xde\xc2\xa5\xce\x9b\x77\xb1\xcf\x35\x44\xc8\x96\x90\x14\xee\x47\x22\x38\xc4\x66\x40\x22\xd1\x5e\x2a\x32\x7c\x45\x9b\x96\x6d\x60\x00\xdf\xfe\x5f\xff\x28\x37\xdb\xcf\x14\x7a\xd1\x9f\x3d\xc4\xc4\x67\xea\xf2\xb3\x88\xb3\x71\x1f\xe7\x60\x48\xce\x31\xfb\x79\x76\xc9\xd4\x99\xd5\x37\x9f\x41\x25\xfa\x3e\x0b\x55\x48\x36\x62\x26\x89\x2b\xee\x06\x71\xe3\xf9\xf0\x10\xff\x87\x70\xe1\x78\x4d\x97\x01\x51\xb5\x04\x7f\x24\x3a\xee\x1a\x63\x51\x5f\xa7\xe5\x9a\xf8\x5e\xcc\x4c\x69\x3a\x40\x3e\xc2\xad\xf4\xa7\x3a\xf4\xce\xb5\x44\x24\x5f\xa7\x6b\xfc\xf9\x1c\x4d\xbc\x50\xe0\x54\xa1\xaa\x3b\x27\xb0\x77\xec\x86\x1f\xc1\x09\xf5\xe4\x2d\x82\x42\x17\x7c\xa1\x0b\xdf\x53\xc8\xf1\x7b\x33\x2a\x08\xf7\xc9\x3f\x3f\xf4\x24\x49\x44\xe0\x37\x74\x9c\x85\xf1\xa1\xf4\x93\xcb\x70\xdd\xa8\x59\xce\x9c\xdb\x15\x6c\x59\x04\x1d\xda\xc4\xc6\x60\x81\xfa\xc2\xde\xfe\x47\x79\x35\xc5\x86\xb1\xe3\x47\x0a\x29\xf7\xd6\xc9\x0e\xa9\x50\x81\x9b\xcf\x8f\x42\x48\x0e\x70\x24\x64\x22\x12\x60\x3e\x14\x3b\xef\x5e\x46\x54\x7b\x4c\x93\x83\xdc\x30\x09\x67\xc3\x52\xde\xc4\x09\xcc\xa1\x95\xc7\x19\x8e\x5f\x3d\x54\x57\xdb\xec\x5a\x9d\x65\xb6\xf6\x2b\x0b\x82\x4b\xbe\xca\xd7\xce\x22\x69\x67\x25\x07\xb8\x30\x27\xde\xed\xa4\xbe\xa0\x91\x69\x9b\x39\x98\x67\x6b\x57\xc0\xa9\xc0\xbf\x22\x85\x7e\xa4\x05\x33\x3e\x08\x24\xdc\xf3\x14\xc3\xa2\xee\x12\xc2\xf2\x3e\x3d\x19\xc1\x26\x79\x15\xfd\x37\x29\xe2\x9f\xc1\x02\xdc\x81\x53\x3a\x37\xf2\x81\x06\x62\x35\x43\x5d\x4e\xda\x88\x87\x50\x33\xfe\x7f\x2f\xf1\x28\xca\xfd\x07\x5f\x9a\x20\xa2\xb2\x7f\x28\x2e\x07\xe4\x62\x87\x3a\xa8\xe3\x9d\x8c\xa8\x65\x19\x39\x2e\x70\xeb\x75\x23\xaf\xa8\x7f\xde\x55\x8e\x88\xaf\xfd\xd4\xb8\x2a\xc4\x8d\x4c\x5a\x42\x52\x17\xbf\x45\xa7\xb1\xbf\x7a\xf7\xa4\xe3\xf2\x72\x8a\xec\x8d\x6d\x14\x33\x29\xc7\xaf\xc2\x4c\xf0\xdc\xd8\x7b\xda\xb7\x16\xff\x73\x9e\xe1\x3e\x18\x20\xd0\xf4\x1c\x47\xad\x02\x12\xb6\x2f\x45\x6f\x9f\xc1\xc2\x6e\x3c\xb3\xc4\x3c\x49\x50\x46\xc6\x57\x5d\x1b\x48\xc3\xe0\x55\x82\xb0\xc2\x8c\xda\xf4\x02\xf9\x9a\xa5\x9d\x32\x32\xa1\x15\x25\xdc\xfe\xc7\x51\x4c\xcc\x57\xb2\x05\x58\x6c\x96\xf4\xc3\x26\xee\xc6\x4a\x39\x98\x43\xca\xc5\xf5\x26\x19\x8c\x1c\xe6\xa6\xc3\x01\x74\x8a\x5a\x04\xfe\x1e\x5b\x76\x85\x4c\x2e\x2a\xcc\xf2\x76\x3c\x17\x29\xa9\x3f\x43\xe0\x8c\xa3\xdc\x5a\x37\xb5\xb4\xd4\x34\xc9\x00\x62\xd5\x83\x97\xec\x24\x92\x15\xa3\xc8\x74\x46\xe4\xe8\x55\xf0\x5b\xaa\x19\x07\x65\xb6\x18\x59\xcd\x33\x97\xf6\x51\xcc\x97\x7e\x0b\xfb\xdd\x18\x1b\x6d\x10\xdc\x11\x05\x69\x9f\xa0\x06\x04\x81\xed\x42\xef\xb4\xad\x4a\x26\xbf\x06\x57\x93\x33\x76\xe1\xe3\x87\xf8\x14\x52\xdc\x14\xd9\xc4\xde\x5b\xb1\x2d\x74\xa7\xb4\x5b\x87\x4a\xf7\x6e\x3f\xc2\x26\xbb\x1a\x60\x69\x3d\x0c\x1a\x7d\x94\x09\x54\x51\xa0\x2c\xcb\x5a\x78\x55\x11\x5b\x15\x4d\x52\xeb\xd5\xc2\x1b\x49\x83\x45\x0d\xac\x15\xb0\x99\xe6\x41\xa7\xe1\x2a\x94\xbb\x27\x2e\xb4\x06\x66\x24\xde\xd1\xef\xc5\x14\x30\x26\xe5\x68\x7d\x4b\xc0\x9b\x4e\x2b\x9e\x38\x74\xd4\x1b\x8b\xba\x26\xe9\x70\xf6\x57\x48\xc0\xe9\xa7\x51\xc2\xd2\xc8\x34\x33\x0a\x92\xd2\x5d\x7e\xea\x27\xae\xb4\xc7\x20\xe2\x17\x7b\xb6\x76\xf2\x82\xef\x5f\xb3\x64\x3d\xea\x17\x46\xe1\xb5\x41\x0d\xc9\x5e\xa5\xfc\x13\xe3\x9f\xbf\x66\x74\xf9\x5c\xba\x20\x3f\xf4\x89\x39\x96\x43\xc1\x16\x94\x90\x0e\x3a\xa9\x1c\xf3\xe0\xf0\x4f\x91\x08\x5b\x93\xde\x39\x34\x25\x84\x36\xb8\x83\x20\x2d\x4c\xc2\x4c\x8f\x64\x07\xfc\x68\xf6\xe0\x85\x21\x91\x0c\x15\xcb\x31\x0e\x54\x1a\x79\xdb\x95\x22\xde\xf0\x66\xe8\xf4\xca\x63\x88\xc6\x52\x9b\xed\xc6\xa6\x5b\xbb\x5e\x41\x67\x58\x34\x53\xb0\xd1\x7c\xc9\xb9\x23\x1a\xcf\xaf\x1a\xf3\x69\x8a\xfa\xbd\x1b\x82\xb4\x8a\x96\x98\x04\xc4\x59\xb8\x69\xd9\x8b\x54\x4b\xbe\x7a\x4f\x27\x19\x01\x99\xc9\xf5\x54\x1b\x92\xb8\xc1\xf8\xd9\x4a\x25\x4e\x57\x26\x6f\x10\xc4\x9b\x11\xdc\xe2\xf5\x05\x89\xd3\xd6\xc2\xc3\xe4\x43\xc5\x05\x1b\x4f\x3c\x9a\x96\x31\x4a\xee\x7b\xe5\xb5\xd7\x0b\x3e\x31\x3d\x5b\x4f\xe5\xf9\x44\x23\x9d\x46\x85\x34\xd0\x64\x10\x6a\xd4\xab\x39\x86\x42\x17\x05\xf2\x1e\xe2\xbd\xab\x56\x07\x0a\x05\x72\x1c\x0d\xcb\x4a\x3b\xce\x62\xd0\x27\xd6\x51\xc4\x12\x33\x12\x13\xa7\x08\x8b\x10\x70\x23\xc4\xa0\x8e\x27\x65\x17\x38\x57\x9a\xe1\x10\x1a\xe9\x51\xa4\x12\x93\xdf\x5b\x8a\x62\x6c\xca\x21\x7e\x53\x17\x78\x50\x97\x46\x09\x5a\x9d\x4b\x7a\x99\x45\x8c\xbd\x7d\xbb\xd9\x65\x74\xe9\x79\x31\x00\x66\xe2\xfc\x64\x17\x97\x4b\xef\xb6\xcd\x44\x01\x94\x1c\xb5\x2c\x0b\x49\xf5\xcf\x87\xc7\x53\xb2\x16\x4f\xa9\x4c\xe7\x98\x0e\x00\x0d\x83\xd6\xe6\xe3\x73\xeb\xb9\xdf\x77\x85\x99\x2e\xb5\x96\x8f\x08\x1d\xee\x79\x53\xbf\xf7\xaa\xdc\xc2\x22\x00\x8e\xce\x61\x3f\x6e\xfe\x92\xdc\x5a\x4e\x16\x5a\x1d\x1a\xe7\x49\xc1\x71\x00\x13\x1c\x2d\x52\x38\x16\xa7\xc6\xd0\xd1\xdb\x79\xd8\xc1\xc3\xc1\xbc\x62\x9f\x65\x67\x98\x4a\xba\x8f\x91\xf1\x1f\x85\x83\xba\xee\x5a\x09\x95\x74\xca\xea\x99\xd1\x13\x7d\xe1\x51\xe9\x55\xf2\x6f\x2f\x70\x30\x4b\xd6\x6e\x38\xe7\xce\xfd\x11\x84\x72\x22\xb7\x98\x4a\x6b\x0c\x4c\xf3\x35\x07\xa7\x2b\xfd\xd4\xf0\xa3\x98\x27\x59\x67\x76\x26\x4d\x51\x07\xb4\x2f\x73\xf4\xa8\x8a\x56\x07\x11\xc8\xa0\x3e\xb2\x38\xa1\x79\xb1\xaa\xa0\x6b\x92\x2f\x0f\x87\x6d\x08\x45\x62\x6b\x53\xe7\x1a\x44\xe4\xf3\xa6\x97\x77\x89\x31\xf2\xe3\x4d\xec\xff\xda\xb1\xe2\x90\x5c\x7a\x48\x88\xc4\xa4\xb6\xd8\xd4\xed\x8e\x8a\x30\x9c\x95\x1e\x24\x88\xdd\x31\x14\xdc\xf5\x6c\x62\x2e\xc8\x30\xa0\x6a\xaf\x20\x24\x8e\x59\x2b\x73\x5c\x73\xc6\xd5\xeb\xbf\x40\xa6\x53\xf2\x3f\x2d\x48\xb3\x0b\x83\xef\xd6\xfd\x06\x14\x2a\xf6\x4a\xa2\x38\x06\x85\x11\xd5\xdc\xcd\x68\xde\xcb\xca\x9b\x4a\xf7\x68\xb1\x63\x7b\xc1\x56\xdb\x20\x8b\xb5\x8d\x56\x3f\x48\x3f\xd5\x01\x84\x52\xdb\x8d\xa9\x50\x63\x90\xaf\x8e\xe4\xac\xd7\xe4\xa5\x1e\x3a\x79\xf8\x71\x61\xa8\xd0\xfe\xb2\xcf\x16\x4c\xfa\x18\x3b\x3b\x7b\x23\xfd\xfc\xd2\x12\x07\xc7\x25\xa5\x6f\x33\xe5\x18\xb6\x04\x51\x17\x79\x3e\x9f\x5b\x5c\x32\x55\x5f\xbc\x08\xb9\x14\x21\x62\x1d\xe7\xd9\x50\xed\xe2\x47\xad\x95\x0e\xe4\x35\xc0\xdc\x87\x52\xaa\x2e\x31\x92\x00\x36\xd6\x4f\xc4\xcc\xfe\x74\x8f\x74\xe2\x91\xef\x16\xfc\x35\x65\x0b\x4b\x96\x21\xc3\xc9\x0e\xdf\xb8\x64\x50\x20\xbd\x45\x0b\x26\x4d\x13\x8c\x66\x73\x88\x64\x56\x80\x8a\xbf\xdf\x82\x75\xd3\x66\x2a\x07\x46\xaf\x98\x78\xfe\x9e\x01\x36\xd5\x4d\x63\x2a\x39\x7f\xb7\xd7\x68\x73\x73\xd2\xfd\x18\x73\xc9\xbc\x5d\x82\x35\xbb\xce\x96\x81\xf4\xbd\xed\xd1\xba\xcb\xd3\x1b\x07\x14\x82\xf0\xd4\x61\xcf\x53\xe3\xa2\x63\x70\x18\xbf\xf4\xde\x7f\xc6\x70\x2f\xb6\x3b\x46\xc5\x2c\xca\x2c\xe5\xd9\xa2\x20\x5a\x8e\x9d\x04\x8e\x7d\xe6\xa4\xed\x9d\x41\x38\x64\xba\x14\x3a\x21\x55\x52\x3f\x63\x6b\x10\xe5\x2c\x16\x02\x2a\x7e\x2d\x94\x6a\x5c\x33\x32\x55\x23\xfb\xff\xb9\xba\xb2\x34\xc5\x75\x25\xbd\x17\x5e\xea\xe5\x6e\x4a\x1e\xc0\x4e\x6c\xcb\xc7\x03\x14\xb9\xfa\xd6\x3f\x84\x4c\xf5\xed\xfe\x0e\x82\x24\x29\x12\xac\x50\x0c\xff\xf0\xc5\x01\x1a\x7d\x65\x85\x4c\x00\xe9\xcd\xaf\x80\x00\x0d\xfa\xf7\x65\xb9\xc6\x05\x30\x5b\xf2\xf6\xad\x3d\x96\xb8\x96\xe2\x6c\xe2\x3c\xda\x4e\x74\xfa\xc4\x07\x7e\xc4\x93\x68\x04\xcf\xd1\x3d\xf9\x1f\xc0\xb5\xdc\xdb\x10\xa1\x15\xa2\x90\x0a\x7b\x53\xb4\x54\xb0\xa9\x25\xfb\x45\x53\xe0\x80\xf2\xb7\x17\xb0\x94\x99\xf2\x54\x4d\x7a\x5f\x29\x6a\xf4\x0e\x26\x17\xee\x97\xad\x55\x75\x14\xe2\x60\x5c\x5a\x7d\x9d\x61\xe7\xb3\x0a\x88\x48\xcb\x21\x7e\x94\x4d\x4e\xb5\x4f\xd0\xd4\x19\x70\x6a\x95\xdb\xa4\xb6\x1d\xab\x7f\x11\x0e\x00\xc9\x8b\xdd\x84\x21\x26\x71\x70\xaa\x34\x15\xc6\x1b\x7c\xe2\x2e\xa2\xb9\x99\x80\x7e\x55\x62\x73\xcd\x59\xc6\x2d\xac\x3e\xfb\x52\x40\x5a\xe1\x00\x5f\x2e\x23\xcf\xc2\x92\x73\x9f\xd1\x74\x55\x83\x2d\x2a\x1d\xe0\x88\x79\x7d\xb8\xb7\xb4\xc5\xa4\x79\x1b\x0d\x74\x51\x65\xa2\x1a\xa8\x24\xa8\x86\xa4\x0a\x59\xaf\xc6\xc4\x7a\x5a\x13\xc0\xb8\x7a\x5d\x27\x6d\x78\x4d\x80\xdc\xef\xd6\xfc\xaa\x4b\x34\x82\x91\x9a\x34\xe3\x86\x61\x46\xf8\x05\xe2\x0a\xd0\x59\xcf\x02\x5b\xbb\x99\x89\x71\x17\x56\xea\xc1\x32\xc7\xb0\xf5\xd4\x04\xd6\xfd\xe8\x69\x6c\xb6\x68\x1b\xff\xf8\xb8\x00\x5a\xb9\x02\xdd\xa1\xa1\x27\xe2\x08\x7f\x34\xcf\x76\xff\x85\xa7\x98\xb4\x29\x1d\x46\xd9\x97\x37\xb2\xa9\x7c\x20\xb3\x7a\x73\x49\x1d\x79\xb5\x9f\x19\x7d\xfa\x40\xe6\x44\xea\x7a\x4f\x7f\x75\xa0\xfe\x3d\x2a\x00\xb6\x64\xed\x15\xb2\x57\xc9\x02\xf2\xd1\xc9\xef\x25\x38\xb8\xdd\x68\xb4\xbe\xfd\xba\xb6\x00\x9f\x43\x79\x3c\x0a\x6e\x34\x09\xe2\x34\xdd\x04\xb5\x4c\xfb\x99\x02\xb4\xd6\x7c\x1e\x39\x1a\x40\xbf\xbf\xd1\xaf\xdd\x02\xa2\xea\x4e\xac\x9c\x5b\xa2\x0d\xaf\x73\xd3\xcd\xe8\x77\xaf\xc0\x70\x61\x12\x38\x19\xb5\x5d\x31\xa9\x99\x1e\x55\x1f\xd1\x65\x86\x5c\xab\xcc\x0c\x21\x02\x15\x13\xde\xa3\x5f\x65\x6e\xae\xd8\xf4\xa5\x3f\xaf\xcb\xad\x3e\x02\x2d\xe2\xcd\x6c\xea\x52\x63\xc6\x20\x38\x5a\x7c\x67\x24\x5c\x59\x57\x0e\xda\x42\x65\xfb\xde\x42\xfa\x47\x5c\x71\x68\xe7\xc9\x01\x7a\xac\xf9\x46\x49\xb8\xfd\xca\x73\xbf\x05\x16\x09\x76\x32\xea\x90\xcc\xb0\x70\x9b\x54\x22\x94\xf4\x15\xcd\xdb\x53\x47\x93\xc3\x0a\xb5\x11\x74\x1a\x95\x5c\x2a\xc4\x83\xfa\xc5\x6d\xc1\xa1\xfe\x09\x8f\xbe\x67\x09\x4e\xb0\xb7\xa6\x73\xc6\x76\xf9\x8b\xf7\x88\xa6\x14\xfd\x1e\xcc\x6c\x2a\xd1\x14\x9a\x41\x24\xba\xa9\xc4\xbe\x49\xe0\x23\xb2\x55\x1e\x42\x02\x1e\x4f\x7d\x53\xfd\xb5\x49\xfe\xd4\x35\x92\x6c\xdb\x17\xa2\xe5\xfe\xea\x3b\xdb\x0d\x96\xf4\xc7\x85\x16\x34\x3d\x2f\x67\xe7\xb4\x3c\x5d\x41\x55\x46\xc7\xf1\xa9\x03\xb8\xb1\x01\xbe\x66\x64\xa4\xa9\x02\x0d\x80\x92\x3a\xc4\xb0\xa9\x51\xe1\xc8\x4b\x50\xb1\x4b\xb0\x35\x2f\xa7\xff\x1b\xce\xd5\x25\x32\x18\x49\xb1\xb7\x97\x26\x64\xa9\xa9\x30\x18\xf5\x64\xee\x6d\x2f\xbf\x12\x52\x50\x3d\x05\x71\x47\xa7\x1e\x29\xb5\xbe\x00\xda\xfa\x51\x02\xb5\xcf\xc4\x7a\x43\x5d\x72\x23\x9f\x84\xfb\xb8\xd4\x51\x75\xc0\x52\xd6\x5f\xf6\x45\x8c\x2c\xc2\x40\x19\x07\xed\x41\x70\xa9\x87\x3d\xb4\x6b\x43\x8c\x71\xea\x3d\x01\x0e\xde\xc1\x94\xaa\x23\xcb\x94\x42\x77\xb2\x1c\x50\x25\x4a\x58\xb9\xf4\x0c\xb6\xd5\x0f\xf7\x87\xcb\xfd\x64\x60\x30\x44\xfe\xcf\xf5\x8a\x41\xd6\x57\xc5\xa1\xf4\x70\x24\xa0\x08\x42\x1b\xa0\x89\xc7\xe4\xe6\xe8\x03\x2f\xa7\x3c\xc7\x20\x62\xbb\x1b\xc4\xdc\xd7\xad\x1a\x1c\xac\x67\xc4\x91\x7d\x0d\xdb\xb3\x1d\xe3\x08\x2b\xe0\x3d\x94\xc0\x0f\x51\x2b\x74\x1a\x66\x97\x3a\xad\xb7\xfa\x57\x49\x0b\xce\x2a\x22\x08\x5b\x38\xf5\xce\x61\xbc\x11\xd8\xe4\x50\x02\xe3\xc5\x57\xb2\xa4\x29\x62\x0c\xfe\xea\xfb\xe4\x28\xd4\x3b\x48\x2b\x2f\xdf\x28\x43\x78\xa3\x7e\xe4\x1e\x3d\x4c\x52\xbf\xaf\x0b\x72\xd2\xcc\x23\x8a\x8b\x5f\x0c\x97\xd4\x0a\xe0\x25\xc8\x93\xa2\x54\xf9\xda\xcf\x07\x1b\xf8\xd0\xd2\xbd\x5a\x40\x21\x93\x0d\x62\x9b\x35\xb5\xd6\xb2\x0d\x2b\xd2\x68\xaf\x72\x0b\xbb\x58\x35\xe5\x3b\xbe\xd2\xc5\x4b\x47\xfb\xd2\x79\xd8\x6b\x7f\x6f\x8e\x5e\x12\xca\xca\x9c\x62\xe4\x17\xec\x9e\xb0\xbf\xf5\xb9\x55\x51\x90\x4c\x92\xca\x21\x90\xef\x12\xd4\x5c\x95\x10\xbd\xbf\x21\x45\xf2\x96\x73\xbe\x44\x58\xc3\xa6\x16\x72\xee\xd4\x04\x80\xcb\x8f\x0a\x42\x31\x5e\xf7\xdb\xb7\xb7\xfb\x35\x4f\x8e\x91\xf9\x38\x7f\xd1\x2c\x70\xa7\x0d\x81\xbb\xb1\xc5\x7c\xb7\x24\xa8\x36\x59\x1f\xac\x33\xd1\x73\xc6\x02\x3c\x49\xc8\x4d\xc8\xa5\xd9\xc1\x6d\x0b\xf0\xc9\x23\x84\x2e\xa0\xcb\x8f\xf9\x4c\x5c\x7a\x8f\x73\x0f\xd9\x0d\x57\x89\xbd\x26\xdc\xb7\x6f\x8c\xe6\x78\x79\x99\xc7\xa9\xf6\x3b\x86\x33\xe5\x12\xd4\x4c\x39\xbf\xfb\x84\x53\x11\x40\x77\x93\xbe\xea\x64\x96\x38\xa2\x7a\x8f\xa6\x08\xce\xbb\x11\xa8\x5c\xab\xe0\xa4\x1c\xa3\x3a\x9c\x6c\x30\x72\xf1\x5a\x79\x95\x49\xbf\x6f\x59\x4c\x6d\x00\x7e\x46\x07\x23\x87\xd2\xbc\x54\x3b\x28\xf0\x79\x77\x8b\x95\xb9\xc5\xca\x9f\x67\x00\xf1\x95\xd4\xa9\x83\x7d\xfb\x9f\x9d\x4a\xa4\xef\x25\x48\x11\x28\xe7\x25\xf6\x24\x23\xde\x26\x5f\xcc\xe5\x63\x5d\xdd\x54\x39\xc2\x6f\x1c\xc4\xc5\xf0\x05\x01\x2f\x62\xab\x1d\x2c\x5e\xe5\x5b\xb6\x32\xfa\x91\xb3\xfa\xa1\x25\x1a\x59\xa7\x49\x43\x70\xf2\x7a\x14\x6f\xcf\x29\x5c\x92\x09\x5b\xc6\x6d\x8c\x94\x30\x12\xd9\x2a\xf2\xa6\xe4\x37\xad\xc1\xc6\x22\x8d\xb9\x97\x61\xa5\x17\x5e\xe9\x52\x09\x88\x51\x5b\x77\xfb\x9e\x98\xf0\x27\xc0\xd1\x8b\xfa\x37\x7d\xb5\x3c\x17\xe4\x36\xed\x44\x10\x3c\x91\x5c\x92\x4f\x80\xf3\xd8\x10\xc4\xaa\xc0\x6d\x00\x2e\xe9\x90\x4c\x8c\x9d\x99\xe5\xe3\x9e\x00\x62\x70\x7f\x83\x1a\xc8\x2e\x17\x47\xda\x5f\x9d\x84\x83\x0d\xa7\x8d\x9c\x6c\xec\x7d\x1f\x0d\x5d\xb8\x13\x61\xcf\xab\x32\x6b\xda\x61\xae\xb0\xc8\x66\x1e\x8e\x87\xad\xe8\x1c\x96\x17\xba\xc2\xcb\xe5\xac\x76\xf6\x76\x5e\x80\x55\x2b\x32\x34\xb5\xf7\x31\x55\x8c\x1d\xa2\x61\x04\xca\x68\x8c\x0f\x94\xfe\xad\x40\xae\xf8\x71\xdf\x3e\x6d\x2c\x59\x12\xb8\xc1\x92\x68\x38\xcf\x55\x42\x4c\x9a\xc4\x69\xbe\xa2\x4b\x2f\x34\x9c\x61\xa9\x1b\x2d\x87\x34\x29\x6c\x9a\x05\xa9\x0f\xe9\x93\x34\x9d\x33\x86\x0c\xee\x9b\x0a\xa0\xc7\x39\xd6\x06\x99\xf7\xba\xcc\x9c\xb5\xaf\x8f\x5c\x2e\x0f\x8f\x4f\xda\x18\x9c\x96\x65\x76\x3f\x63\xdc\xcc\x53\xdb\x57\xf9\x5d\xd1\xc1\x53\xb1\x16\xfa\x72\x92\x2f\x8c\x6c\x40\xd3\x35\x37\x35\xca\xb7\x13\x57\x1b\x8d\xdc\x74\x05\xf1\x50\xf5\xe1\x7e\x84\xcd\x99\x98\xa5\x96\xf2\xc3\xdb\x5a\x4a\xb9\xeb\x77\xb8\x94\xc3\x3c\x6d\x82\x03\x62\xcb\xdf\xcf\x90\x49\x9a\x47\x7b\x71\xcd\xde\x67\xcf\xcf\x92\x76\x35\xe8\x9e\x8b\xf6\x4f\x39\xcc\xd5\x08\x86\xd1\x69\xb5\x7f\x8e\x6a\x31\x8e\xf5\x52\xd7\x4c\x1e\x7b\x97\xb2\x91\x87\x71\xee\x6a\x2a\x7b\x87\x54\x9c\x4e\x54\x28\xaa\xda\xe4\x55\xbd\xb6\x9b\x9c\x87\x97\x36\xde\xff\x7d\x0c\xb3\xd5\x3b\x00\xcf\x46\xea\x1a\x31\xd7\x7d\xf6\x28\x46\x93\x06\x78\xda\xd3\xd2\xde\x7a\xd4\x1a\x9f\xb2\x50\x56\x2b\xbb\xf7\x15\xb9\x71\x86\x03\x4d\x6b\xfa\x91\xc1\x1c\xbf\xba\x2a\xbf\xf2\x8d\xe8\xbf\xe6\x0a\x3f\xd4\xe1\xd0\x5a\x6e\x99\xbe\x5a\x35\x9f\x23\xc7\x4d\xc7\xfb\x64\x36\x49\xb3\x55\xbc\x0d\x26\x64\xee\xd0\x8e\xdb\x31\x98\x28\xd9\x90\x75\xdd\x04\xa0\xa5\x41\x8b\x42\x90\x60\x36\xf5\x0c\x86\xbf\xe6\x7f\x37\x07\x5f\x5e\xb9\x54\xc7\x0d\x6d\xfd\x81\x43\xe2\x72\x53\x2e\x8c\x97\x9d\xd7\xf0\xd6\x8c\x42\x60\x1f\xa5\x6a\xc3\x97\x4f\xcf\xa9\xbe\x79\xcf\x5a\x52\x83\x22\xcc\x0a\x76\xa8\xfb\xf0\x0a\xc5\xeb\x45\x8b\x2d\x94\xd8\xff\x9f\x23\xf9\x7f\x92\x92\xd3\xe0\x9a\xaa\x9a\xda\x96\x40\xdd\x87\xbe\xe6\x16\xa0\x01\x78\x6a\x54\xb1\x23\x66\x52\xca\x46\xaf\x95\x12\x4f\xaa\xf0\xb4\x82\x08\x95\xd0\x28\xb8\x09\x8b\x96\x85\xd1\x5e\xa5\x4a\xed\x87\xe0\x5e\xda\xda\x90\x3e\x54\xe5\x0e\xcd\xb7\xb2\x35\xb3\x12\xd9\xae\x9b\x62\x04\x0d\x5d\xb2\x5d\xb0\xd7\xfc\x34\xfe\x55\x7d\x36\x95\x75\xd7\x0c\xca\xd0\x43\xd2\x2f\xbc\xe6\x00\x56\x66\xbf\x20\xe0\x5a\x30\x62\x48\x9f\xda\xfc\x78\xc0\x12\x51\xfd\x29\x5a\xc2\xba\x90\x99\x5e\xa6\x4d\xee\xe5\x80\x69\xbc\x37\x41\xa7\xc2\x09\xe5\x0c\x60\x8d\x0a\xb3\xe3\x97\xeb\xe6\xf0\xb6\xd5\x52\x1c\xc8\x73\x3f\x3a\x35\x51\xd3\xf6\xbd\x5b\x6b\xa4\xad\xa8\xad\x82\x84\xc9\x69\x65\x15\xe4\x6b\xd2\x6e\x69\x48\x9b\xdd\xa8\xca\x39\x04\x46\xd4\x64\x03\x9c\x76\xee\x0c\xb6\xd3\xf0\xf3\x73\xdd\x15\xdf\xce\xe5\xd2\xf0\x38\x46\xe6\xa6\xfb\x1b\x2d\x6b\x56\x34\x6f\x6e\x51\x5e\x4c\x6f\x9d\x9b\x1c\x15\xb8\x73\x12\x79\xe3\x8d\x35\xcd\xb4\xb2\xb9\xbe\x55\x84\xc1\x91\x4b\x25\xe5\x66\xc8\x1a\x67\x26\xdc\x54\x9d\x0e\xc2\x23\x56\x07\xa6\x3b\xd3\x20\x58\xf2\x1a\xe2\x80\x4f\xa3\x02\xd1\xe9\x1b\x8b\xf3\x39\xfd\x1b\x82\xb4\x33\x9f\xd3\x5b\xda\xff\xc6\x22\x5d\x57\xc3\x97\x55\xe9\xe4\x73\x7a\xd2\x1e\x9d\xaa\x25\xcd\x53\xf8\xf9\x67\x8f\xff\xfe\x28\xa2\xcb\x31\xd6\xc5\x34\xcd\x26\x16\x22\xd2\x3c\xae\x9c\x23\xd3\x44\x45\xaa\xe8\x51\x2e\x8c\x6e\x18\x6f\x55\x21\xd6\x60\xb4\x90\x80\x05\x7a\xa1\x9a\xc0\x7d\x9b\xd4\x54\x49\x1f\x27\x8f\x54\x23\xaf\x26\x0d\xf6\xf9\x8e\xc1\xe3\xf6\x31\x91\x87\xc4\x5c\x23\x99\x60\xea\xea\x0b\xc7\x72\xf9\x21\xf2\x55\x63\x58\xe7\x32\xb9\x4e\x43\x1d\xc2\xf8\x0d\x36\x3d\x1c\x51\x5e\x42\xac\x05\x6b\x4f\x2d\x11\x5b\xb6\x71\x6c\xa0\x39\xa5\xba\x63\x36\x86\xe6\xe1\x5a\x5e\x91\x20\x98\xf7\x07\x80\x7a\x5b\x88\x2b\x74\xbf\x72\xc9\xb5\xd1\xed\x62\x07\xa5\x65\x97\xe4\x9c\xa8\xee\x53\x52\xc9\xeb\x60\xd5\x25\x75\x97\x16\xfe\x24\x2a\x3b\xf9\xe3\x01\x60\x5d\x46\x33\x1f\xfa\x47\x35\xd5\x7a\xf5\xd3\x97\xee\x5c\x39\x7a\x0d\xc7\x2a\x5f\xf9\xea\xcb\xed\x16\x44\xbf\x68\x9e\xb0\x64\x8e\x6c\x2d\xd4\x99\xd7\x3c\x7d\x38\x9a\xba\xb1\x13\xc7\xdf\xea\x17\xef\x31\x81\x3f\x0c\x00\x83\x8e\x6c\x95\xc3\x84\xae\xae\xd0\x8d\xbd\x10\x07\xec\x9d\x85\xa3\xe1\xf1\x09\xae\x97\x82\xfd\x34\xda\x61\x6e\xea\xef\x47\xb6\x16\xc3\x4f\xb6\xf3\xc4\xcf\x38\x7b\x02\x3c\x7e\x95\xca\x8c\x4a\x73\x15\xb0\x18\xaa\x82\x51\x09\x4f\x4c\xd9\x06\xff\xfa\x00\xb4\x4c\x5e\x83\xf4\xd9\x7b\xac\x99\x2a\x8b\x79\x3b\x9a\xcd\x9a\xd4\xe5\x38\x47\x77\x85\xd3\xaf\x20\xdf\x6a\x88\x30\x85\xed\xde\x1d\xa8\xc3\x41\x73\xf5\xfb\x18\x0e\x1a\x6b\xed\xf8\x83\xe9\xec\x5e\xf0\x3d\x3c\xba\x5a\x20\x96\x63\xe4\x85\xe6\x4b\x5c\x9c\x55\x2d\x13\x50\x8a\xda\xc8\x09\x3d\x03\x55\x30\x53\xf8\x90\x0c\x62\x61\x35\x25\x3d\x72\xda\x97\x3a\xcf\x12\x42\x46\xa1\x01\xe9\x89\xe3\xbe\xb4\x92\x4a\xba\x94\xf8\x64\xbf\x96\x98\x4c\xa7\x80\xb0\x38\xbd\x7e\x4b\xba\xe9\x1d\x22\x0d\xef\x5e\xe4\xb7\x12\xd8\x9e\xb5\x4b\x54\xc5\x57\xa2\xcd\x97\xab\x06\xab\xfc\x1b\x7c\xc6\xfe\x63\xfc\x74\x78\xa6\x7b\x94\x78\x26\x76\x36\x2a\x04\x71\x45\x89\xb4\x71\x4d\xba\x9f\x0d\x74\x99\x16\xaf\x47\x0f\x2b\x92\x89\xdc\xa1\x50\x42\x61\x8b\x9b\x81\xba\x77\x6b\x71\x2d\x34\x7a\xd9\xab\xc4\xdf\xee\x1e\x77\xab\x0e\x3f\x28\xdf\xfa\xe2\xb9\xa7\x5d\xb7\x8c\x1c\x2e\xdc\xe8\x80\x8f\x67\xff\x77\xff\x3b\xff\x48\xcc\x65\xb5\xff\x1b\xe6\x65\x51\xa2\x57\x60\xec\xca\x3a\xa1\xd3\x1e\x68\x05\x74\xa2\x48\xe8\x47\x43\xb6\x45\x06\x76\x33\x4e\xf3\x08\xb3\xcf\x3e\xbf\x4c\x58\x46\x02\xab\x68\x9b\x03\x9b\x53\x2a\xa5\xcf\xe2\x2a\x12\x34\x31\xb3\x80\x5a\xf4\x63\x71\x81\xef\x41\x0f\x90\x36\xe0\xcd\x0a\xcc\xba\x74\x2d\x43\x7d\x13\x83\xce\xff\xcc\xc3\x46\x3f\x48\xe2\x6c\x86\x60\x7f\x0d\x20\xac\x84\x6f\x06\x5a\xc2\x0d\x17\x57\xe4\x2e\x63\xb6\xa0\xff\xa4\x98\x58\xf9\xca\x03\xfa\xac\x8d\xb6\x49\xd9\x2e\xa9\xfb\x72\x2d\xdb\xd8\x23\x69\x28\x37\xe2\x87\x92\xcd\x10\x9c\xb1\x09\xd1\x33\xf9\x54\x14\xd3\x56\xe8\xd0\x7f\x04\xa5\xa0\x65\xad\xbc\x6c\xea\xc3\x05\x61\xfc\xeb\x46\xe6\x3e\xa8\xca\xd9\xad\x36\x40\xf7\xf3\xc4\xaf\x73\x37\x45\xe4\x20\x7a\xba\x7c\xa9\x2a\x0e\xca\x39\x6c\xa7\x8d\x89\xd9\x6e\xf2\x3c\xc1\x8c\x3b\xe4\x64\x91\xf9\xad\xf1\x85\xe5\xf3\x90\x6b\x49\x89\x22\x5e\x34\x55\x33\xbb\x7c\x3a\xaa\x39\xdd\x1e\xd4\x89\xf9\x8e\x17\xf9\x21\xfd\xbd\xf6\x5c\x10\x45\x53\xa5\x9f\x1f\x1a\x63\xdc\xbe\xd9\x7a\x0c\x35\x25\x63\xe1\x5c\xe2\x31\x29\xd5\xba\xc6\x7e\x3e\x2c\x7d\x67\xea\xd5\x4e\xc1\x48\xdf\xdf\xd7\x4b\x29\x93\x28\x1b\xfa\x02\xaf\xba\xb3\xbc\xe9\xe8\xec\xb8\x57\x17\x33\xf7\x4b\xdd\x1a\x87\x6b\x05\xc2\x03\xc5\x63\x23\x90\x4c\x11\xd5\x8b\xfe\xaa\x6f\xdd\x69\x09\xf4\x8b\xe3\x6b\x3e\x86\x8b\x1c\xcc\x41\xfb\x8d\x70\xd1\xad\x37\x76\x20\xc9\x2b\x90\x23\xad\xdd\x22\xa0\x89\x3b\xdf\x7a\x98\x72\xca\xe4\x53\x4b\x26\x31\x31\x72\x97\xeb\xf6\x29\xc6\x8c\x54\xa6\x83\x3a\xb6\x55\x22\xa8\x6d\xb3\xae\xcd\x55\x76\x11\x0f\x4a\xfb\x8f\x84\xf8\x85\x40\x38\xae\x48\xa7\xe7\x37\xf0\xab\xb6\xfd\xda\x3e\x44\xc1\x4b\x14\x61\x30\x79\x3a\xa7\xda\x47\x9f\xab\x61\x1d\x81\x19\xba\xae\xb4\xf0\xc1\xb3\xe8\x7e\x34\xfc\x9c\xec\xd3\x7f\x40\x41\x64\x8e\x31\xf1\x8a\x0b\x26\x52\x3c\xb4\x97\x4e\xa7\x76\x5b\xff\x73\xda\xa3\x05\xea\x49\x39\x86\xee\x9c\x9d\x74\x8e\x30\x71\x04\xcf\xda\x03\xf3\x39\x9b\x98\xa4\x6a\x48\x45\xad\xf0\x3f\x63\x9d\x58\xcd\x3e\x01\xe7\x74\xd9\xff\x1f\x76\x2d\xdf\xcc\x66\x72\xee\x0e\x6c\xb6\x22\x92\xd0\x82\x37\x9a\x44\xf4\x31\xf9\xc5\x77\x58\x52\x4d\x09\xe0\x9d\xc6\x89\x80\x9c\xa8\x19\x23\xa5\x40\x70\xe9\x9e\x71\x5e\xe0\x57\x55\x02\x4f\xca\x65\xee\xd1\xcc\x80\x3f\xde\x7e\x0b\x9f\xbc\xa8\x11\xc6\xdf\x54\x25\xff\x89\x58\xd7\x58\x0e\x00\x93\xea\xc9\x32\xbc\x93\x69\x15\x12\x10\xf3\x29\xcb\x7c\x39\xc0\x27\xb5\x5c\xad\xa6\xf7\x3e\x7b\x79\xe5\xc7\x66\x78\x8d\x17\xc0\xc8\xde\xc8\x51\x58\x5b\x22\x32\x76\x4a\x95\x2b\xc2\x1d\x7c\x9b\x7b\x1d\x93\x3d\x9c\xba\x35\xa7\xf4\xb7\x44\x77\x6e\x00\xa2\x7c\x07\xd6\xa6\xb1\x16\x6f\x09\xac\xd2\x1f\x98\x68\xd1\x93\xee\x76\x8e\x7a\xa9\x35\x08\x2e\x97\x61\xe0\x09\xa0\x19\x95\xb7\x62\xcd\xdf\xe4\xb7\x59\x9b\x87\xe7\x22\xd6\xd5\xcd\xec\xa7\x8a\x67\x2a\x1f\xb9\x91\x6c\x30\x4d\xb4\xd5\x86\x6b\xa5\xc0\xb5\x6a\x53\x98\xa4\xb9\x97\x4b\x4f\xe7\x69\x0d\x32\x72\x90\x68\xed\x53\x1e\x03\x8d\x2b\xc1\x17\x1e\x0e\x2a\xef\xda\x18\x9e\xf5\xa9\x5c\xd6\xb9\x9b\x36\x6b\xd1\x56\xfd\x3d\x16\x3e\xb8\x2d\x5f\xb8\xde\x4d\x9a\xfa\xa0\x63\x58\x87\xda\x12\x09\x15\xaa\x2f\x8b\x16\xd7\x3e\x57\xdb\x90\x9a\xd1\x71\x6f\x4a\xd6\x64\x80\xac\xa4\x7f\x13\xa3\x70\x95\xd7\xa0\x65\x79\xdb\x06\xdb\xe6\x66\x77\x2c\x27\xa6\x3a\x83\xe7\x5e\x1a\x9c\xd0\xe1\xeb\x05\xc2\x9b\xdb\xf9\xdc\x04\xc6\xa2\x1a\xb3\x3b\x8e\x5f\xae\x1f\x73\x00\x77\xca\x77\xd5\x9f\x3e\xdf\x25\xe1\x09\x85\x21\x6e\xfe\x72\x30\x7c\x42\x06\x7c\x03\xb4\xfc\x2a\xaf\xa3\x97\xce\xb0\xbf\x89\xfa\x9e\x77\x15\x87\x0f\x0d\xa7\x09\x36\x08\xaf\x6f\xc0\x9d\xa6\x00\x7c\xb7\x97\x6c\x3c\xf8\x5d\x9e\xfd\xf7\xa8\x39\x25\x8c\x75\x59\x17\xf6\x2d\x87\xa9\x6d\x2c\xe5\xf8\x75\x4e\xee\x68\xe6\xcd\x1f\x1b\x27\x42\x97\x34\x74\x07\xd5\x6a\xe1\x5e\x1a\x19\x00\xd6\xce\x57\x7b\xf9\x37\xa2\x51\x1b\xc0\x85\x56\x4e\x35\xed\x64\x7c\xb1\x8c\x21\x3d\x10\x22\x40\x4b\x7b\xe6\x2b\x65\x08\x51\xa2\x26\x06\x23\x65\xaf\x94\x42\xdb\xed\xfa\x3a\x69\x4c\xe4\xb3\xd9\xbd\x3e\xea\x2e\x1d\x1d\x98\x7d\xd8\xd9\x9d\x26\x84\xdc\x68\x9d\x21\xd8\x12\x41\xe4\x43\xc4\x70\xdd\xa8\x70\xab\x9d\x75\x12\x53\x77\xae\x4e\x45\x4b\x12\xe2\x1d\x51\xbe\x6b\xde\xe4\x92\x31\xec\x9c\x19\x96\xf3\xad\x6e\x8e\x63\x10\x8f\x20\x5c\x00\xd8\x05\x85\x79\x0d\x37\x58\xf5\x39\xa4\x64\x8e\x6a\xe3\x92\x9d\xaa\xa5\xbe\x60\xcc\x84\x05\xe8\xfe\x15\x37\x1e\x29\x6f\x2f\x42\x74\x9b\xfd\xd7\xef\x61\x63\x9a\xe4\x6b\x65\xb2\x95\xb4\x24\x6e\xff\xbb\x6c\x2f\xab\x40\x21\x55\x1c\x32\x58\x01\x6e\x3c\x51\xbf\x8a\xff\x24\xa2\xb4\x39\x65\x6a\x85\x6e\x4a\xb0\x67\xa8\x1b\xbb\x83\x2e\x71\x26\x00\x23\x2a\x9f\x15\xf3\x4d\x7e\xcc\xcf\xde\x34\xeb\x9f\xfc\xc9\xb6\x53\x7a\xc1\x3a\xb6\xb9\x5c\x61\xb1\x61\x6f\xd6\xd8\x39\xeb\xe4\x59\x92\x3b\x5a\x41\x79\x48\x6a\x10\xf7\x34\xfb\x65\x18\x5d\xf5\xcc\xe9\x62\x9d\x96\x2f\xc5\x43\x4d\x0f\xd1\x4b\x26\xfb\x98\x2f\xa3\x55\x69\x21\xe9\xa0\x00\x39\x86\x7d\xac\x90\x85\xe3\x89\xb0\x19\xac\x6a\xe5\xcd\x9e\x95\xb3\xab\x31\xf7\xe0\xcc\x80\x52\x27\x34\x09\x8f\xd2\xae\x1e\x2f\xad\xee\x38\x95\x5c\x34\xec\x5a\xf3\xf2\x63\x8f\x2b\x1c\x0f\x4e\x92\x39\x66\x8f\x16\x6b\xda\xba\x90\x74\x68\x2a\x91\xa3\x99\x84\xe9\x9a\xc2\x12\x4a\xc2\xf2\x80\x23\xc9\xf3\x3b\x91\xd5\xa0\xb2\xcc\x06\x51\xae\x9b\x4b\x46\x02\xbe\xf4\xe6\x6b\x99\x3e\xb9\xd5\x52\xe5\x8d\xfa\x81\x68\xd6\x1b\x31\xcf\xea\x54\x22\xc1\xb6\xef\x55\x89\x63\xfc\x46\x5e\xc0\x0f\xf1\x22\xc3\x40\x4c\xc7\xc7\xe5\xa6\x54\x2a\xb7\x53\xa3\xf1\x63\xb3\x11\x6c\xb8\x92\x2a\x3b\x3a\x34\x68\xc2\xc6\x2c\xf1\x8e\xd7\x7b\xcd\xfa\x39\x50\x55\x66\x65\x8e\x96\xd3\xf4\xa3\xd4\x8d\xce\xb8\x94\xcb\x3f\x45\x00\xa2\xda\x68\xb4\xfb\xc9\x38\x7e\x79\xdc\xd4\xbf\xa5\xc0\x78\x13\xdc\x11\x6e\x97\x3c\x1c\x72\x18\x4c\x46\xed\x66\x0c\x7e\x00\x80\x25\xda\xe5\x4b\xfe\x5c\x43\x47\x9a\xa4\xaf\x98\x14\xf0\x44\xf3\x5a\x4a\xeb\x90\xe9\xb8\xac\x46\xdf\x3e\xb8\x78\xb6\xf0\x36\x1b\xa7\x54\xb1\x61\x6b\x8f\xba\x25\x49\xbf\x72\x72\x53\xad\xeb\xd4\x52\xdb\x62\x1a\x35\x86\x6e\x3f\x99\x7d\xc1\x40\xa4\x1b\x83\x76\x9b\xba\xff\xe2\x87\x07\x84\xf1\xa7\xb7\xaf\x4b\x10\xfc\x58\xe1\x7d\x1e\xa5\x38\xf0\xc4\x61\x85\x5c\x71\x0c\x5a\xd7\xfe\xbe\x9d\xa3\xc0\x45\xbf\x7d\x23\xcb\x25\x34\x63\x2c\x18\x66\x6c\x10\x1b\x6b\x3e\x01\xca\xc6\xc3\xc5\x60\x6d\x61\xff\x23\xec\x59\x70\x10\x65\xba\x4f\xd6\xa6\x48\x6d\x30\x00\xcf\x2a\x2c\x28\x9c\x63\x72\x6e\x44\x85\x88\x90\x1c\xbc\xb9\x9a\xb8\x7a\x9c\x25\xee\x73\x08\x4b\xfc\x1d\x9c\xbf\x04\x15\xb0\x76\x58\x9b\x20\xfa\xaf\xa9\x7c\x09\xc5\x2f\x49\xc0\xa6\xa9\xfd\x92\x5d\x85\x88\xa6\x60\x71\x99\x23\xba\x06\x6e\xb1\x1b\x79\xd7\xc9\xdf\xea\x2f\x72\x4b\x5e\xef\x0b\xca\x0f\xaa\x84\x18\x54\xf9\x06\x97\x58\xa1\x42\x7f\xb5\xfb\x70\x90\x5b\x8f\xe6\x16\x67\xa9\xaa\x54\x78\xd9\xc7\x75\x75\x80\x6b\xe6\x2c\x06\x0a\xab\xbe\xb8\x73\x40\xe5\x92\x65\xde\x25\x28\x17\x3a\xc4\x10\xe9\xae\x56\x41\x7c\x62\xae\x13\x1d\x8c\xfe\xa2\x13\x88\xa4\x49\xd6\x65\xb2\xc1\xb1\x81\x7e\xd9\x6e\x76\x15\xaa\x82\x80\xeb\xd9\x84\xe4\x16\x81\xf0\x36\xc8\xe9\x75\x20\xa1\x18\x68\x07\xbd\x93\x0c\x05\xc4\x87\x57\x74\x26\xbd\xd5\x9e\x70\xa7\xa5\xf4\xf9\x10\x1b\xe3\x7b\x9f\xf3\x05\xa4\x0c\xa7\xe7\x98\xac\x4d\x01\xd7\xaa\x42\x6a\x8c\xce\x95\xe1\x52\x3e\xf0\x3a\x14\xc4\x68\x07\x45\x62\x95\x41\xc0\x5a\xd1\x64\x38\xe7\x48\x57\xe8\x2e\x68\x6c\x00\x84\xfc\x8d\xc2\xdd\xe6\x40\x94\x57\xe5\x57\xb4\x7d\x55\x65\xab\x87\xa4\xeb\xf5\xe1\x63\x26\xe4\x67\x19\xf6\xa7\x50\x8e\xed\xb6\x52\x8b\xd8\x55\xa7\xcb\xbb\x3e\x0b\x58\xd8\x8a\x4b\x2c\x37\x9a\x48\x71\xaa\xc2\x79\xbf\x45\x77\xa2\xe4\x37\x15\xb5\x5b\x22\x21\x87\x21\x2d\x2d\x38\x3c\x37\xc3\xec\x34\x8e\x00\xe3\xf2\x03\xe3\x8b\x73\xd5\x75\x33\x46\x62\xf3\x17\xf1\xa8\x7c\x5c\xf3\xd2\xeb\x2a\xc6\xee\x54\x4a\x33\x55\x01\xe2\x66\x84\x66\x85\x2d\xcf\xa7\xc7\x68\x55\x04\x8d\x95\xed\x1b\xc1\xb8\x0f\xfc\x40\xe0\x41\xa9\xdd\x77\xfb\x5f\xb8\x30\x45\x4b\x65\xb2\x96\xf1\x05\xa6\x43\xab\x6e\xb6\x40\x87\x8a\x54\xc8\x1a\x31\x9c\xa3\x80\xae\xf5\x42\x8a\xd9\x4a\xc9\x2d\x59\xfd\x42\x62\x2a\x9a\x68\xa8\xbb\x44\x7c\x4b\x9f\x60\x56\x8e\x9e\x99\x8d\x92\x97\x41\x8e\x63\x34\x01\x60\xab\x8a\xeb\x90\x2e\x30\xdc\x1d\x9b\xc0\x54\x18\x61\xae\xb7\xb1\xeb\xaa\xfb\xc9\x3b\x20\x05\xc8\x6a\x34\xdf\x0d\xd8\x67\x25\x5e\xde\x54\x0a\xfc\x4a\xed\x2b\x76\x83\xa2\x75\x74\x74\x74\xc7\x9c\xb9\x52\x00\x3d\x2f\xb8\x4c\xe2\xb0\x82\x46\xb9\xd7\xb0\xcc\x35\x09\x02\xf6\x9c\x94\xec\x3f\xcb\x76\x56\x79\x86\xb4\x1e\xc8\x5b\x2e\xb7\x34\xb8\x34\x7e\x39\x72\x5b\x9a\xa6\x7e\x24\xe3\x9e\xa7\xaf\x06\xcf\x76\xfa\x4d\x13\x55\x03\xc5\xd4\x80\xaa\x4b\xf3\xca\xc3\xb3\x2c\x00\x8d\xbd\x5c\x19\xd9\xdf\x35\xd5\xf1\x65\x01\xb5\x6c\xba\x87\xf1\xce\xa4\xb6\x22\x94\x60\x79\x9f\x5c\xaf\xc8\xb2\xee\xc1\xed\xbd\xf7\x25\x19\xe6\xb6\x81\x95\x83\xd5\x9d\x08\x66\x5c\x9c\x16\x76\x23\x8a\x6c\x67\x45\x55\xeb\x27\x94\x67\x6e\xb5\x7e\xd6\xb5\x5f\x2f\x28\x61\x6f\x2c\xf9\xbf\xad\x6e\x69\xe7\x50\x30\xcc\x48\x06\xbb\xca\x30\x26\x34\xc8\xb3\xf4\x52\x14\xf3\x22\x87\x5d\xa5\xbe\xc4\xa6\x3a\x20\x43\xb5\x8f\xb8\x59\x6d\x8e\xe4\x1e\x24\xe5\x01\xc2\xf2\xa9\x41\x02\xcc\x8d\x54\x2e\x5c\x37\x84\xd3\x0a\xfc\xee\x47\x12\xcf\x04\xde\xe8\x1f\xfe\x15\xbd\xfe\x66\x83\x40\xb7\x96\xe1\xaa\xb9\x5a\x05\xe7\x35\x56\xce\x97\x1b\xad\x25\xb3\x09\x4c\x0d\x32\x43\x35\x31\x6c\x3d\x6f\x2f\x23\x38\x95\x5a\xf8\xcf\xc2\x11\x9a\xf1\x91\xad\xa0\x95\x12\x2b\xc8\x2f\xb6\x01\x5f\xcc\x1d\xbf\x29\xc0\x1d\x8d\x78\x28\x9f\x66\xb4\xaa\x78\x39\x70\xb7\xf4\xf6\x91\xbb\xf0\xb3\x5b\x0f\x9b\xa6\x0a\xbb\xf1\x61\x4b\x20\x84\xb0\x49\x5b\xcc\xb3\xe5\x64\x1e\xbf\x05\x90\xf4\x59\x4b\x63\x52\xce\xe2\x47\xc4\x8b\x42\xa9\x0c\x1d\xeb\xf1\x9a\x0a\xe9\x81\xfe\x56\x65\xcc\x76\x0f\xb2\x5f\x31\x9e\xec\x6b\xad\x87\x5a\xfb\xb8\x76\x16\xcd\x8b\xb0\x66\xe1\x91\xd5\x54\xce\x7f\x16\x99\x97\x2d\x25\xfc\x78\xb2\xbb\x64\x41\x27\x90\xf2\x5f\x70\x1e\xb2\x22\x5d\x7a\x94\x2d\xeb\xb4\xc9\x6d\xc6\x99\xa3\xe8\x09\x1a\xe4\xfd\x18\xd0\xc9\x6e\x12\x26\xf0\x1b\x79\x7d\xd3\xd0\xc7\xd3\x9f\x12\xf1\x46\x51\xea\x9d\x40\x97\x13\xed\x3a\x98\x20\x35\xa7\x07\x3d\x51\xf3\xd8\xa7\x27\xf8\x65\x48\xd5\xd6\xee\x61\x3e\xf5\x7d\xb3\x35\xdc\x08\x64\x0f\xb7\xe2\x8d\xf5\xb6\x5d\xa0\x4a\xe9\x61\xee\x72\xff\x77\x48\x95\x9b\x5c\x0e\xb4\x21\x0e\x17\x1c\x6e\xeb\x45\x31\x01\x9e\xd0\xc6\x6c\x64\x68\x57\xb7\x1b\x5a\xae\xdb\xd8\xbd\x4b\xf1\x9c\x7a\xde\x44\xcf\xaa\x56\x7c\x49\xad\xf9\x0a\x0d\x7a\x6d\x76\x97\x4c\x04\x3b\x73\x80\x79\xf6\x4b\x8e\x43\xa7\x8b\x43\x47\xe2\xdd\x4d\xaf\x4e\x1f\xc4\x1c\x6b\x1b\xb7\x7b\x54\x83\x98\xc0\x5d\x98\x9a\xd9\x61\x4c\xc9\xad\xf4\x4b\xf5\x61\x1d\x36\xd8\x61\x1c\xa2\xde\x38\x27\x3f\xea\xf4\x0b\x72\xfc\x97\xdb\x02\x1f\x50\x54\x3f\x97\x81\x32\x41\xc1\xf8\x4f\xae\xb7\xe7\x6c\xc3\xbb\x33\x94\xa2\x0e\x9c\x43\x2b\xe2\x01\xee\xf0\xd8\xbe\xd1\x9f\x0c\x88\x55\xa7\x5d\x65\xd3\xa5\x23\x7d\xac\x23\xd6\x5f\x2e\xba\xd4\x77\x7c\x68\x68\xba\x9e\x3a\xa6\x20\x87\x2c\xb5\x74\xd0\x3e\x7c\x9a\x81\xf9\x16\xd2\xa8\x83\x2e\xae\xfd\xab\x40\x04\xf8\x4d\xf9\x94\x8e\xaa\xd8\x4c\x77\x71\x8a\x4a\x2a\xce\x43\x4b\xf2\x88\xfa\x87\x30\x53\x4d\x06\x83\xe4\xcb\xe9\x1d\xca\x01\x74\x94\xab\xdd\x2c\x8b\xba\x52\xa6\x39\x26\x4c\xc1\x33\xc4\xdf\xcd\x9f\x06\x29\xc6\x9b\xee\xb1\x09\x13\x42\x91\x22\x6e\x2c\xbb\x2f\x1a\x0c\x62\x12\xc4\xf1\xd2\x06\x7b\x84\x12\x39\xa0\x73\xa6\x84\x47\x57\x77\xb4\x38\x46\x39\x4d\x1e\xea\xbc\xff\xe4\x20\x6c\xfd\x08\x57\x80\x8c\x4f\xd5\xfa\x3e\x2e\xe7\xc5\xb8\xda\x9d\x81\x83\x92\xdc\x7a\x02\x3b\x8f\x8f\x8a\xd7\x8c\x7a\xc5\xb9\x5f\xde\x66\x5d\x8f\x50\xa7\x49\x81\x83\xdf\xc2\x24\x62\xe9\xd6\xe4\x63\x4f\xac\xd2\x98\xb8\x1e\x43\x25\x37\xdd\x4d\xd1\xbf\x83\x60\xa8\x9e\x17\x07\x06\x87\x8e\xb7\x25\x58\x8b\xe2\xe3\x68\xdf\x75\x31\x71\xe8\xa4\x3d\x13\xfe\x84\x15\x52\x90\xac\x20\xd7\x8d\x8d\xce\x42\x44\xd7\x8f\x97\xab\xa7\x49\xf2\x2f\xec\x6e\x41\x67\x0f\xd1\x36\x00\x76\x03\xf4\xd7\x06\x4b\x9a\xd4\x68\xd7\x84\x80\xff\x99\x17\xd6\x9d\x15\x48\x45\xc9\xda\x3d\x1c\xb3\x19\x76\xae\xbc\xd1\xb2\x49\x16\x95\x83\x34\x47\xb9\x5c\xa4\x92\x60\x06\xf5\x58\x27\xbd\x56\x59\x64\x03\x72\x0f\x78\xf4\x76\x30\x77\x95\x18\x69\xa7\x55\x8a\xfa\x35\x91\x31\xa5\xee\xd8\xd7\x74\x20\x68\xd3\x49\xd4\xa2\x10\x44\x7f\xe5\xc0\xa1\xa2\x62\x55\x1a\xf9\x18\x11\xff\xd5\x58\x40\xec\x95\xa4\x42\xa7\x2f\xf7\xd8\x3c\x30\xa6\x51\x99\x91\xb9\x28\xcc\xdc\x37\x3a\x44\xc7\x3b\x06\xc9\x15\x97\xd4\x80\xbb\x41\xbf\xcb\x6d\x76\xd3\xa1\x7b\x64\x69\x7e\xd8\xb8\xe3\x0c\xb4\x60\x59\xb4\x29\x78\x92\x79\xd6\xf9\x3b\x5a\x22\x67\xff\x22\x94\xac\x75\x30\xb5\x2f\x55\x4f\x17\xd2\xa9\xba\x9a\xc5\x46\x51\x4c\x48\x73\xec\xf4\xaa\xa2\xea\x3b\xfd\xe5\x92\x8e\x29\x96\x4c\x0c\x27\x3b\xcc\x3c\x2e\x08\xf8\xfd\x74\xe3\xba\xd4\xbc\xe3\x1c\x31\x41\xa5\x5c\xd6\x98\x59\x93\x8f\xf5\xfc\xfd\x9d\xdc\xa3\xd8\x00\x25\x34\x0d\x2a\x40\xb2\xf6\x44\x64\xac\x98\x9c\x0b\xc3\x5c\xc8\x53\x0b\x69\x66\x5f\x4c\x06\x31\xc7\x1b\x05\x0b\x89\x44\xb0\xec\xbb\xdf\xf7\xd5\x4f\x5b\x3e\xbc\x20\xad\x62\x13\xb0\x54\x4a\x12\xad\xe6\x8a\x2f\x16\x24\x2f\x19\x2f\x34\x27\x16\x67\xbf\xad\xf0\x92\x93\x38\xe4\x4f\x98\xd6\x6e\x26\x8e\x3f\x7b\xe9\x4c\x7c\xf3\x13\xb8\xa3\x73\xc2\xc3\x0f\xa8\xb3\x6d\xc2\xfa\x70\x2b\xdd\x61\xa5\x24\x04\x78\x1c\x84\xdc\xa8\x0b\xed\x61\xd4\x81\x68\xa4\x53\xd0\x9d\xe2\x9b\xb0\x7a\x43\x97\x63\xaf\xbb\xae\xd2\x31\xa5\xa1\x6e\x35\x92\xc9\xdc\x17\x56\x89\x2d\xf1\xc2\x44\xff\x48\x99\xa0\x1c\xdd\xe6\x2a\x8c\xc6\xc5\xa1\xa9\x47\x41\x67\xcb\x73\xc7\x6a\x39\xbe\xc6\x3e\x10\xbc\xbc\x19\xa3\x5b\x9b\x83\x12\x05\xe6\x0a\x8a\x59\x7a\xee\x64\x01\xb8\xb6\x52\xd5\xaa\x9a\x8e\x4a\xbd\x4f\x1f\x69\xaf\xc7\x40\x18\xa4\x06\xda\x31\xa0\x9a\x69\x0b\x68\x21\x6c\x22\xa4\x79\x62\x21\x6b\xf2\x8f\x55\xe1\x65\x92\x96\xb8\x0d\x21\x9a\xc4\x8d\x07\xf5\x70\x95\x71\xda\x2a\x98\x03\xed\x92\x86\x11\x05\xf0\x6b\x76\x50\x9b\xd5\x3c\x6f\x71\xd1\xbb\x33\x75\xb0\xea\x69\x2b\x8c\xe8\xd3\x05\x30\x0e\x52\x99\x89\xe7\xa5\x54\x70\xb9\xda\xbe\xe4\x38\x4f\xbb\x07\x49\x11\xc7\x33\x9e\x61\x3e\x85\x3b\x2f\xaf\x48\x2d\x7b\x41\x2f\x6e\x91\xc8\xde\xfe\x67\xc3\x64\x67\xaf\xe8\xce\xd4\x2d\x68\x25\x11\xad\xc7\x50\x7c\x64\x33\xc7\xe7\x6d\xb4\x4b\x46\xfe\xe5\x9c\xb0\x8e\xd0\x5f\x0a\x2d\x69\x2a\x96\xcb\xb5\x15\x7d\x70\x03\x1d\xd6\xe9\xd4\x0b\x94\x45\x9d\xfa\x47\x64\x10\xa5\x42\x3c\x8a\x3b\x3d\x72\x9b\x5a\x14\x95\x93\x36\x1b\xd6\x95\xff\x90\x09\xc9\x3e\x0a\xcb\x62\x41\x01\xb8\x14\x75\xcc\xe8\x3a\x1b\x57\x76\x26\xe3\xcb\x08\x00\x38\xf5\xaa\x06\xb4\x9c\xea\xb7\x4c\x53\x6a\x74\x28\xeb\x1c\x9b\xa0\xe4\xcd\x1a\x33\x7d\x14\xcb\x9e\xc2\xb5\x3c\x47\x8b\x75\x3e\x91\x09\xa8\x03\x04\xf2\xe6\xc3\x5c\x6a\x40\xa1\xa6\x51\x60\x10\x1d\xcd\xc6\x13\x68\x29\x35\xb4\xbb\xcd\xbf\x28\xa4\x6c\xd8\x7b\x1c\xaf\xbd\x09\x43\x50\xfc\xf3\xf1\xb9\x6a\x5e\x54\x72\x54\x43\x32\x6d\x8a\xdd\x8d\xd2\x06\x7c\xc0\x4b\x43\x1b\x90\x22\xb9\x3a\x01\xc3\x46\xbe\x3d\x89\x5d\xd6\x96\xda\x73\xc8\xe0\xb7\xa1\xf9\x80\x48\x6e\x81\xa9\x72\xd0\x10\xbc\x59\xa2\xea\xb2\x5c\xfc\x1f\x03\x47\x74\x11\x00\xf9\x5b\xfd\x8c\x12\x19\x79\x7f\xe2\x24\xf3\x74\x67\xd3\x57\x98\x1a\x2b\x6f\x03\x88\x10\x94\xdd\xb2\x7d\x9e\x90\xfb\xe5\x12\xe9\x6b\x34\xfb\xde\x3a\x82\x4b\x6a\x67\x07\x73\xcc\x58\xff\xb0\x61\xf0\x4e\x3a\xfe\xb2\xfd\x7d\x80\x46\x10\x0e\xa7\x5a\x01\x9a\xd2\x08\xc1\x32\x1f\x51\x7b\xa8\x52\x9d\x15\x1a\x5b\x4e\x26\x4d\x04\xf7\x6a\xb2\x34\x5d\x33\x51\x3a\xea\x0a\x73\x9c\xd7\xcb\xd7\x74\x1f\xfc\x87\xcb\x70\x49\xbf\x56\x5b\x11\x68\x9a\x6f\x81\x35\xb0\x7b\xe4\x7f\xe7\xf8\x3b\x87\x7f\x06\xad\x64\x0d\x97\xb7\x7f\x2e\xf2\xf1\x0a\xf1\x10\x23\x3b\x76\xc9\x45\x60\x5d\x87\x52\x30\x94\xeb\xd3\xcc\xec\x29\xc2\x7e\x9e\xca\xb9\x1c\xae\xe5\x7d\x5c\xc8\x9c\x20\xd8\xad\xfc\x3c\x8e\x5b\x00\xec\xcd\xa9\x91\x56\x5a\xc0\x02\x99\x1f\x5a\x3b\x75\xfc\xfd\x1d\x43\xf4\x72\x12\xf2\xf8\x2e\xe5\xf0\xbe\x9c\x2f\x33\xbb\xbc\x01\x01\xe4\x6e\x50\x6f\xeb\xc9\x56\x04\x2e\xf7\x8e\xc8\x49\x58\x89\x2b\x99\x9c\xa6\x41\x97\xd8\xf0\x87\x4a\x12\x8f\x72\xf1\xf3\xd2\x47\x03\x5c\xfb\x81\x5d\x47\x5c\xea\x67\x25\xdd\x6e\x63\x24\x83\x40\xe0\x4b\xbf\xd7\x82\x0f\xae\xf0\x8c\x44\xc5\x51\x57\x43\xc2\x65\x0b\xc3\x27\x22\x5f\x72\x46\x5a\xd2\x2f\x7d\x3f\xdd\xfc\xba\x59\x34\x47\x9e\x05\xcb\x68\x27\xad\xf6\xb4\x05\xef\x65\x86\x44\x8c\x4e\xf5\x6e\x25\x9a\x59\x47\x89\x4c\x88\x13\x91\x88\x00\x25\x13\x86\x0a\x9f\x6a\x6e\x90\x7e\x11\x8a\xa6\x49\x17\x92\xb9\x0e\x39\xd0\x95\x00\x2b\x49\x8d\x12\x2b\x2b\xd0\x9f\xd6\x8d\xfd\xf4\x08\xb4\xc3\xed\x7f\x5f\xae\x71\xda\x49\xa7\x5b\xb1\x32\xcd\x77\x33\x65\x74\xd8\x7f\x0f\xf5\x04\x00\xb1\xc3\x6d\xf5\xf2\x2f\xa7\x67\xb5\xf4\xa3\x26\xae\x9b\xec\x21\x54\xa9\x83\x07\x33\xb9\xa4\x42\xaf\x1c\x50\xc0\x64\xa8\xed\x32\x90\xe1\xb5\x68\x57\xfd\x3d\x4c\x72\x51\x5f\xe6\x76\xe1\x79\xfa\x58\xa3\x0a\x0e\xe3\x5b\x70\x43\x04\xdb\x74\x0d\xd8\x07\xbe\xc1\xe7\x58\xd9\xaf\x97\x55\x3c\x34\xa0\xdc\xba\x91\x8c\x82\x9a\x36\xe5\x0b\x2a\x71\xdc\xed\x7b\x3b\xc6\x4c\xee\xb9\x97\x83\xda\xe0\xf5\x30\x30\x52\x69\x78\xd2\x3b\xb7\x94\x11\x39\xc4\xa4\xb6\xfc\xf7\x13\x3b\x74\x0b\x18\x81\x18\x03\x7b\x9c\x77\x57\xe7\xe5\xa7\x76\xa5\xcb\xe6\x85\x6d\xde\x2d\xd8\xf0\x4e\xf3\xb2\xf1\xb5\x6b\xdf\xbb\x9f\xb9\x32\x87\x5e\x53\xe6\xb6\x71\xe3\xdf\x07\x15\xd0\x69\xe4\x0a\xfe\xe1\x31\xca\x20\x36\x07\x04\x60\xce\x29\x0c\xba\xd2\x89\x78\x7c\x4a\x0c\x66\x72\xc3\xdf\xc8\xcd\x67\x2e\x07\x00\xed\xf3\xa7\xf3\x00\xd8\x6f\xdc\xd2\x7f\xb7\x6f\xf2\x60\x9d\x00\x94\x20\xea\x33\x10\x77\x42\x5e\x96\x16\x15\xda\xa4\x70\x43\xaa\xce\xc0\x56\x0b\x56\xef\xf3\xa3\x46\x1c\x75\xac\x7c\x65\x0c\xe4\x50\x99\xd4\xea\x37\x03\x71\x23\xb7\x63\xa0\x6d\x2c\x2d\xfb\x5e\x5e\x05\x8f\xfe\x49\xd9\x83\xfb\xa6\x54\x39\x70\x8e\x5c\x03\xd9\xa2\xc9\xc0\xdd\xde\x16\xc8\x49\xbd\xb5\xef\x81\xd5\x45\xba\x1d\x1e\xd0\x04\x21\x89\x22\x2b\x57\x54\x0d\x1b\x4c\x36\xe8\x4a\x54\xf2\x84\x6c\x97\x6c\x74\xcd\x46\xc3\x30\xee\xf2\x26\xd8\x3e\xbb\x4d\xfc\x49\xc6\xdf\x72\x30\x66\xcb\x53\x2a\x5f\xf1\xdc\x6c\xb3\xed\x23\x91\xcb\xbb\x85\x01\xa0\x9f\x47\x87\xa3\x1b\x1b\x3c\xc6\xd1\xb7\x43\xe8\xa4\xb7\xc9\x9a\x7a\x17\xc1\xac\x29\xa5\x46\x79\xe7\x9e\x3b\xcf\x01\xd0\xb3\xde\x56\xef\x33\x55\xb3\x5d\x82\x83\xc3\xb9\x13\x91\x2c\x31\xd5\xa0\x5a\x66\x85\xe3\x39\x0a\x93\x70\x3b\xc6\xa3\xa3\x4f\xad\x12\x22\x64\xf0\x8a\xc3\xf6\x3a\x61\xc3\xd6\x15\x15\xe5\x50\xd9\x4d\x6f\x83\xc2\x44\xbc\xd5\xe2\x19\x53\xba\x38\xef\x4f\xe1\xdd\xb3\xf8\xf8\x07\x9a\x76\x3e\x61\xa1\xd0\xbc\xab\xe3\x7a\xbf\xe7\xf6\xda\xc6\x96\x2e\x0c\x17\xde\x7b\x8e\x31\x39\x90\x6b\xd6\x88\xec\xd3\x5c\x41\xc3\x31\x96\x47\xf3\xe2\xad\xda\x84\xd0\x0b\x0d\xa9\xd1\x1d\x6a\x55\x3e\x06\x91\x80\x22\xf3\xdc\xd4\xd1\x94\x2e\xab\x90\x23\x2f\x71\x4c\x70\x0d\x23\xff\xea\x54\xba\xbf\xb0\x62\xee\xa7\xfe\x3a\x8d\x65\x4f\x9d\xa7\x73\x64\xad\x1e\x73\xbc\xcc\xfa\x5b\x7b\xf4\x7c\x31\x3d\xd2\x4f\x7a\x27\xaa\xaa\xf1\x30\x36\x9a\xac\xc7\x96\xa5\x8a\xb7\xc8\x61\xb1\x7c\x88\xb9\x44\xa5\x27\x71\x1d\x27\x34\x0a\x6f\x6a\xf9\x3c\xca\x09\x1c\x7a\xa4\xe3\xf1\xfb\x52\x39\x88\x5c\x32\xdc\xbe\xe7\x51\x04\x78\x71\xff\xc9\x71\x9f\xab\xd1\xb6\x1c\xa1\xb4\x6a\x4d\x5f\x2f\x81\x99\x19\xab\x8d\x65\x9f\x27\xc8\x86\xe4\xc9\x2d\x1c\x48\xff\xf4\x74\x2e\x20\xc4\xa9\xb2\x8b\x43\xd6\xfc\x56\xb9\x98\xfc\x65\x70\xda\x6d\x01\x52\xde\xa8\x95\xa2\x6b\xdf\x34\xb8\xed\x5c\x32\xdc\x11\xda\xbc\xd8\xd1\xe5\x91\xd3\x9f\x53\x67\xe1\x63\x54\xc7\xe5\x61\x16\x1f\x10\x70\xe7\xee\x33\x9e\x5d\x03\xcd\x4d\x2a\x8e\xa0\x44\x05\x62\xa3\xc7\xda\x37\xa2\x3a\x87\x68\x79\xf4\xb2\x44\xf3\x28\xdb\xc2\xbb\xdb\x38\xb1\xc3\x2a\xe7\xcd\x9d\xa4\xf6\xf2\x61\x28\x27\xb3\x82\x4c\x17\xca\x72\x6d\x68\x9b\x70\xd3\x7b\x4f\x87\x42\xe6\x4d\xe5\x24\x67\x8c\x25\x8c\x2f\x96\x6f\xb7\xed\x6b\xb6\xfd\x99\x1d\x00\x30\x74\x60\xc1\x9a\xca\xd3\x42\x3c\x86\x98\x22\x3a\xc7\x31\x14\x86\x6c\x5e\x85\xd7\x6a\x6b\xcf\x16\xe9\x4a\x30\xf5\x37\x7f\x6e\x7a\x00\xaa\xc2\x55\xff\x77\xe4\x96\xae\x42\x33\x43\x6c\x64\xa5\xc1\x52\xc8\x7b\x07\xc6\x18\x4a\x0f\x91\x25\x1e\x24\x9f\x1c\x25\x5f\x75\x17\x36\x2f\xee\x20\x42\xb9\xc9\x90\xc3\xc1\xd0\xbe\xfd\x53\xfe\x56\x99\xa9\x9c\x84\xe5\xa9\xef\xca\x92\x7d\xcf\xf7\x68\xc4\xf4\x53\xa9\xf8\x9f\x82\x41\x41\x6c\xd3\xb9\xef\x64\x7e\x28\xf7\xc0\x76\xea\xee\x36\xfa\x60\x55\x41\xbc\x95\x7c\xf5\x1d\xe5\x24\x38\x8e\x95\x82\x0f\x5a\x73\xa4\xc9\x6f\x95\x6e\xc8\x97\xd5\xce\xa2\x64\xb2\x07\x1c\xfb\x5e\x21\xb9\x82\x4b\x5b\xd1\x75\x2b\x61\x65\x22\x6c\x8a\x6c\x14\xe3\x2a\xec\x8c\xe3\x96\x6c\xe3\xaa\x56\x27\xb0\x3b\x2f\xe5\x15\x25\xcd\x06\x2c\xbc\xe6\xed\xe3\x52\xe1\xb8\x9d\x25\xdc\xca\x45\xcf\x1d\x09\x70\x88\x9a\xde\xa4\x0c\x18\x51\xf8\x18\xab\x4d\x11\x2a\x37\x8f\x04\x61\xae\x73\xba\xe6\xbb\xdc\x74\x42\x2a\x3e\x4e\x67\xe2\x4a\x8d\xb4\x12\xc4\x29\xce\xe1\x50\xdc\xbc\x99\xd1\xaa\x82\x61\x88\xb2\x78\xc8\x4b\xae\xfe\x22\x08\xe1\xdc\x7e\xae\x29\x73\x57\xa2\xec\x9e\x94\x6c\xdd\x3d\x5d\x26\x35\x66\x73\xcb\xb6\x9f\xb3\xd8\x31\x39\x4f\x65\xa3\x73\x2e\x04\xa2\xab\x90\x29\x58\xf0\x80\x96\x89\xa5\xd8\xae\xa8\x18\x0f\x1b\x5f\x8a\x08\xeb\x8e\x6f\xb9\x36\x6c\x6a\x46\x7c\x23\xdb\x17\xc2\x38\x02\xef\xa5\xb7\xd4\xb7\x94\x3b\x91\xb3\xc6\x56\x47\x99\xb9\x73\x23\x3b\x9c\xf4\x63\xc2\xf2\x08\x4f\x1a\x18\xc3\x97\x28\x10\x21\xaa\x3d\xb7\xcd\xde\x63\x44\x81\xfb\x6c\xb6\xdf\x64\xcc\x5e\x5a\x08\x62\x6a\xd8\x39\x87\x7d\x26\xb7\xeb\x19\x27\xf4\xb2\xe6\x67\x16\x2a\x17\xc6\x43\x56\xd7\x82\xb6\xfa\x91\x05\x82\x14\xca\x45\xe7\xf3\x19\x86\xb1\x9d\x6a\xa4\x7a\x1c\xab\xf9\x13\xa2\xd2\xa9\x95\x0b\x78\x05\x34\xee\xcc\x42\x94\xac\x77\xc4\x74\xe0\xb5\x25\xcf\xfa\x5b\x76\xb2\x86\x33\x5b\x30\x97\xdf\x8c\xe8\x37\x14\xb3\x2f\xfe\xb7\xad\x76\x1d\xfd\x72\x1a\x27\xa6\x72\x11\x6d\x04\x66\x86\x80\xcd\x6c\x2e\xd2\xce\xf5\x61\xb5\xe8\x73\xad\x1d\x79\x38\x30\x9d\x4a\xf7\x5b\xd7\x69\xc7\x9b\xd3\xff\xa3\xd4\x05\x77\x59\x4c\x6f\xb0\xab\x0e\xd4\x3e\x9b\x53\x0a\x13\x94\xf5\x62\x58\x81\x47\x98\xd8\x19\x18\x78\xa9\x39\x05\xc8\xbb\x9e\xd7\x07\x24\xa7\x0e\xe6\x0e\x90\x6d\x8d\xc9\x4c\x2a\xbf\x27\x77\x80\xcf\xf2\x31\x35\x2e\x09\x4a\x65\xaa\x24\x1d\xe4\x32\x2e\xa6\x88\x74\xbb\x0f\xe4\x5d\x21\x5f\x78\x05\xeb\xfd\x4b\xf5\x55\x87\x7f\x0a\x71\xac\xf4\x7a\x09\x85\x79\xca\xce\x0c\x6c\x2b\xb1\x3a\x53\xbb\x91\xa9\xac\x10\x73\x36\x8d\xc2\xff\x16\x82\x09\x63\x58\x13\x6e\x6e\x96\x63\xc4\x1b\xa9\x81\x11\x5c\x80\xb1\xe9\xd9\x16\x16\x99\x22\x2b\x40\xf1\xd3\xba\x26\xb8\xf3\x3b\x8f\x18\x75\x8c\x61\x99\xb9\xf2\x14\x87\x7e\xbc\xa6\xb6\xfd\x26\x74\x50\xaf\xde\x03\x5d\x15\x35\x7a\x55\xd0\x62\x26\x05\x08\x28\x07\xb4\xf0\xbb\x52\xb0\xb1\xe3\xdf\x1c\xa4\xbb\x59\xb4\x29\x8c\x09\x4e\xc7\x2c\x7c\x2f\x0a\x56\x0a\x44\x73\xd9\x3f\x65\xcb\x85\xb9\x67\x89\x5c\x31\xc0\x7d\x2c\x66\xa9\x52\x67\x38\x1a\x59\xd5\x4c\xf2\x67\x54\x8c\x7a\x05\x15\x40\xd8\xd1\x2c\x4a\x69\xd9\x2b\x25\x30\x07\xdb\x6b\xd4\x39\x3a\xc0\xb5\xc9\x50\xb8\xcd\x4e\xbd\x40\xe1\x86\x95\xd3\x90\xce\x2a\xe0\x36\x3a\x3f\x38\x17\xca\x4b\xdc\xe4\xbe\xbd\xbc\x55\xd7\x3c\x4c\xba\x85\x92\x36\xfd\x74\xb7\xda\x03\xa3\xac\x9b\xab\x06\x8c\xe4\x95\x29\x30\x01\xb9\x13\x63\x3e\x29\x44\xf1\xbf\x66\x93\xdd\x53\x30\x57\xab\xf2\x0b\x9a\x9b\x12\x13\x2c\x39\x50\x27\x5c\x5a\x77\xb9\x05\x42\x24\x8e\xca\x4d\xf9\x4b\xc2\xbd\x3f\xaa\x7e\x32\x0d\x64\x24\x9a\x9a\x34\x32\x2a\x1f\xcd\xb7\x5b\x5f\x09\x4a\x50\xb6\xa8\x84\x68\x54\xf5\x5e\x0f\x6a\x7c\xa7\x29\xc2\x43\x28\x3c\xe9\xf2\x6e\x4e\x4c\xa5\x9d\xdd\x0a\x7a\x7d\xa4\x0a\x51\xd5\x0c\xbc\x99\xa2\xfa\x6a\xc2\x68\xb8\x49\xd4\x70\x61\xf0\xd1\xf4\x07\xdd\x83\x14\x73\xb6\x74\x01\x4b\x4a\x5d\x70\x93\xd0\xbb\x9a\xd6\xf2\xb9\xf3\x7c\x49\x5d\x84\x87\x18\x93\xef\x21\x33\xc3\x78\x4b\x81\x04\xc3\x65\x4d\x89\xcd\x03\xc0\x54\xe9\x6c\x42\x6e\xb9\x6c\xbc\x43\xfd\x18\x29\x52\xbc\xad\x73\x04\x79\xf8\xde\x27\x58\x89\x29\x55\x92\x62\x84\x95\xbf\x31\x1e\xe9\xef\x5a\xa1\x4b\x47\x3d\x21\x76\x41\x02\x6a\x71\x10\x7d\xb9\xda\xdc\xa6\x6c\xbc\x5b\xe2\x9c\x3f\x44\x5c\xb8\xc2\x48\x13\x5d\xcf\x52\x84\x19\xb0\x3d\x60\xcf\xe2\x6f\xd4\xbd\x6a\x48\xba\xf7\x36\x26\xc0\x57\xe7\x3d\x07\x1f\x51\xc5\x95\x99\xe4\x2b\xc6\x87\x73\x51\x74\x78\x5c\x0d\x72\xe8\x98\x54\x80\x77\x6b\xc5\x3d\x4c\x1f\x35\x3c\x0e\x5e\x27\x00\xab\x61\xce\x1f\xe6\x40\x31\x3d\xcf\x9f\xf0\x8d\x25\x6a\xe3\x54\x78\xca\x13\x0d\x1f\xa4\xb6\xc2\xcb\xbc\xec\x5b\x76\x4a\xb9\x24\x24\x4f\xa8\xd6\xbc\x83\x4b\x27\x33\x9b\x09\x3e\x35\xf6\xb0\x01\x7f\x72\xb2\x9a\xc2\x4c\xe4\xaf\xf8\xbc\x40\x4f\x0d\xe5\x70\x3b\x5c\x3c\x2c\xa7\x5a\x3d\xbe\x73\xaf\x94\xc4\x72\xcf\xcd\xc2\xe9\x94\xfb\x8e\x88\xcd\xc0\x23\x0b\x63\x95\x93\x3e\x87\x52\xfb\x10\xc9\x30\x5e\x3e\x7b\xc3\xb9\x4d\xde\xfb\x2e\x18\x3c\x2c\x41\xd7\x0f\x90\x38\x6e\xef\x9e\x68\x69\xef\xef\xe8\x76\x2f\xb6\x9f\xa1\xcf\x28\x7f\x98\x42\xf8\x65\x9c\xfc\x4d\x2b\xcb\xf0\x9e\x06\x87\xc4\xa4\x0a\x9a\xa9\x6d\x1e\xdb\x75\x17\x28\xb2\xfb\x93\x00\x30\x10\x63\xa2\x35\x8f\x9f\x06\x29\x4e\x19\x16\xf2\xb8\x83\x3e\x31\x8d\x15\x2b\x9b\x2d\x30\xd0\x4e\x6c\x2d\x4c\xce\x25\xec\x10\x5b\x4e\x62\x37\x04\x77\x55\x02\x9c\xf9\x79\x74\x6d\x79\xd0\x18\x24\xa5\x50\x92\xe1\xf0\xd2\x08\x72\x68\x79\xa1\xa0\x66\xde\x3f\x87\x47\xe5\x51\x91\x1e\x40\x98\x2f\x76\x04\x34\xaa\x30\xc4\x8a\x9a\x3e\x82\xb6\xb3\x09\x26\x16\x79\xea\xd5\x34\x7f\x45\x8f\xef\x50\x22\x60\x7b\xc1\xf2\x55\xbe\xd5\x4d\x3f\xef\xb4\x05\xd5\x61\x72\x64\x95\x69\xd1\xe7\xa3\xda\xbe\x8e\x7c\xeb\xf9\xee\xef\xe0\xfa\xc6\x36\x0d\x35\x2e\xd5\x0c\x93\x7d\x1c\x26\xc3\xff\x08\x41\xd4\x69\x3d\x80\x5b\x7c\x88\x3f\x11\x81\x4b\xb2\x94\x3c\x7f\xf3\xc3\xdb\x84\xde\xcf\xb6\x7a\x1f\x75\x68\x6e\xe2\x53\x83\x14\xdb\xa9\x84\xef\xfa\x79\xad\xea\x1e\xbd\x41\x79\x74\x80\xaf\x30\x55\xaa\x7b\xf8\x4a\xb1\x9d\xb1\x9e\x35\xe0\x42\xcd\xab\x7d\x7f\x52\xeb\xde\x6c\x5e\x91\xa1\xf3\xf8\x95\xa2\xc7\x8d\xfd\x73\x7f\x47\xf3\xc9\x7d\xc6\x97\x74\x97\x4e\x2f\xdf\x69\x3f\x3d\x63\xd1\xea\x58\x4e\xa0\x78\x58\xfc\xb2\x02\x86\xcb\xa6\x61\xe3\x7c\xbc\x53\x49\x0d\x9b\x26\x9b\x28\xf7\xcc\x3f\x23\x37\x51\xe7\x76\xf8\xcf\x29\x64\x9a\xa0\x8b\x6a\xdc\x99\x99\xea\x4a\xc1\x14\x0e\x95\x10\x63\x55\xd7\x41\x57\xcf\x54\x23\xe3\xfd\x8e\xf8\x09\xf9\xd2\xc3\x29\x9c\x95\xe5\xc3\xe9\x60\x61\x44\x41\xd5\x2a\xc7\x6c\x4a\xe2\xd2\x95\x53\xf3\x98\x6c\x08\xfe\xa0\xf0\xd1\xc3\x14\x28\x55\x05\x98\x43\x56\xb5\xf2\xfb\x24\x34\xc7\x1d\x04\x2b\x15\x0d\xae\x24\xca\xc9\x6f\xbc\xa0\x7d\xad\x2a\x16\x0b\x41\x4a\xc9\x3d\x20\xff\x01\x15\xd9\x04\x1d\xd9\xf2\x14\x30\x90\x1d\x7b\xd1\x1e\x8a\xbd\x80\x90\x91\xe6\x5b\xfc\x4e\x7b\xf2\xdc\x02\x76\x75\xae\xd1\x56\x32\x94\xeb\xde\x8f\x1a\x2b\x7f\x19\x73\x77\xb4\x45\xa9\x4a\xc0\xe8\xf8\x05\x9b\xb0\x36\x5e\x41\xef\xd7\x01\x8f\x49\x06\x91\x11\x82\x10\x0b\x48\x5f\x89\x84\x1b\x13\x79\x88\x06\x97\x93\x7f\xe4\xd2\x1e\xc7\x25\x83\xaf\x50\x51\xe2\xbd\xee\xa2\x35\x35\x34\xe9\x69\xea\x47\xdd\x20\x4e\x88\x9e\x55\x36\x8a\xba\x8c\x09\x22\x36\xc6\x1f\x13\x31\xe2\xce\x01\xb0\x66\xf6\xb1\x25\xf7\x82\xd5\x88\xf3\xf4\x8f\x8e\xf4\xbf\x5f\x70\xb0\x8f\xdb\xf5\x6f\x50\x78\x2b\x20\x19\x12\xc5\xfd\x35\x4b\x73\x22\x73\xae\x21\x61\x7a\x62\x3e\x18\xa1\x04\x90\xc5\x0a\xcf\x94\x21\xca\x51\x4d\x7d\xd5\x4b\x10\xaa\x0b\x7a\x2b\xe5\xe0\xe5\x10\xbf\x7c\x5c\xe3\x52\x12\x9a\xf0\x66\x46\xdf\x8f\x81\x62\x35\x2f\x6d\x27\x64\x01\x5e\xac\xe1\xfb\x35\x55\x44\x66\xae\x4c\xf3\xe8\x67\x94\xed\x36\x59\xa6\x1d\xfd\x55\x93\x50\xa6\x20\x97\x83\x85\x66\xed\xc9\x6f\x7b\x0f\xfd\x74\x1d\xb6\xe4\xa8\xb2\x86\xb6\x04\x68\x9d\x25\x91\xae\x98\xe7\xae\xef\x2b\x50\xc4\x54\x96\x5a\x26\x95\x83\x76\x92\x22\x01\xea\x0f\xb5\x27\x3e\x81\x81\xc6\xe9\xdd\x44\xd3\x62\xac\xb8\xb2\xfd\xcb\xdb\x88\x8e\x88\xed\xc7\x77\x28\x21\xa3\xb5\x7b\xc7\x6b\x55\x65\x33\x89\xd2\xd1\x0c\xc8\x17\xb5\x93\x6a\x04\x33\xb8\x72\x55\xb1\x23\x8f\xa4\xce\x8d\x4a\xb6\xc1\x20\xd6\x82\xd9\x7b\xeb\x06\xa5\xc5\x2c\x43\xa2\x50\x49\xc6\x09\xd0\x8e\x40\xa2\xd4\x5c\xed\x18\x3b\x96\x8c\xf6\x9a\xc1\xa1\xf8\x7e\x1f\xea\xfe\xcd\x9f\x83\xc2\xd1\x02\xaa\xa8\x4b\x66\xbd\x1a\x85\xc0\xf2\x4f\xe6\x40\x66\x57\x02\xd8\x9c\x00\x5e\x1e\xd9\x2f\xba\x99\xc8\x60\xfc\x24\xa2\x21\x00\xbd\x66\x32\x8f\x97\xdc\x3f\x36\x9c\x71\xdc\x73\xa3\xce\x0a\x63\xa6\x36\x4a\xf4\x2d\xb7\xaa\x9d\xb8\xec\x11\x0d\x69\xbd\xed\xd2\x85\xd3\x2d\x0f\x00\x87\xcb\x32\x46\xfa\x6d\xc8\x8e\xca\x11\xc4\x5a\xf8\x21\x42\xf7\xc3\xf8\xbb\xc7\x20\x70\xd0\x43\x55\x95\x0c\xff\x9d\x9e\x94\xeb\xe5\x57\xfd\x94\x8f\xfb\x26\xb9\x33\xd1\x0d\xd0\x7d\x2d\x80\x64\xe3\xaa\x9c\x7c\x55\x32\xe7\x6f\x82\x38\x94\xa7\x17\xf8\x1a\x2a\x4d\xab\x7a\x63\x7b\x96\x5f\xc3\x01\xf2\xec\xc9\xb0\xf0\xc7\x68\xb7\x06\xb7\x39\xba\xfe\x95\x0d\xb5\xb9\x57\x94\x78\x5b\x22\x9c\x9a\x55\xdd\x9f\xdc\x9f\x2f\x7b\x8e\x12\x0a\xd0\xe6\x52\x6a\x54\x63\x10\xea\xdf\xa7\x2a\x6c\xb2\x07\x54\x96\x22\x27\x0e\x86\x53\xcc\xb8\xe0\xd2\x54\x62\x99\x9a\x24\xe8\xf3\x72\x51\x12\x97\x2e\x1e\x8c\xe6\x5a\x73\x56\x9b\xf2\xec\x4a\x1b\x04\x3b\x5b\x0b\x25\xab\x10\xb0\xc8\x6a\x52\xf3\xd9\x47\x4f\x37\x46\x52\x33\x18\xf7\x10\x8f\x63\xf8\x19\x26\xde\xa7\x7d\x87\x56\x8e\x5e\xac\xec\x68\xe4\x7a\x59\x94\xbc\x5e\xdc\x53\xfc\x4f\x75\x8d\x1d\x37\x68\x51\x4c\x56\xdc\x7b\x18\xad\xe0\x38\xee\xcf\xbc\xe2\x1d\xbc\xd3\xa4\xc1\xde\xeb\xf2\xda\x7d\xb1\xab\x8b\x17\x03\x15\xf5\x0a\x88\x15\x49\xbb\x40\x5f\xd1\x21\xd1\x32\x66\x62\x25\xd5\xd9\x07\xe8\xa3\x31\xe6\x6c\x9f\x94\x1a\x56\x89\x03\x4d\x94\xa0\x1d\x2d\x3a\x3f\x19\xda\xf8\x3e\x0c\x2d\xd8\x4e\x13\xf1\x36\x75\xaa\x76\x4b\x11\x93\x99\x57\x55\xaa\x7a\x28\xfa\x64\xf7\x42\x66\x25\x4c\x5d\xd7\xbb\x8d\x91\x27\xf7\x31\xe0\x76\xe3\x28\xf7\x38\xbf\x86\x9b\xe8\xbd\x56\x49\xc8\x10\x92\xfb\xef\xcc\xc7\x77\xa7\x55\xd1\xc4\xa0\x03\x45\x1b\x50\x56\x2b\xe3\x0e\xcd\x1e\x96\xe5\x8c\x24\x46\xf8\xe7\x79\x7c\xb0\x66\xc8\x92\x68\x59\x50\x12\xe7\x35\x22\xc9\x5d\x7c\x8f\x25\xa9\x81\x81\x3c\x9b\x83\x91\x9e\x56\x76\x9e\x62\x70\x7f\xa3\xa5\x20\x29\xf0\xb1\x69\x42\x9e\xf7\x25\xe0\x5b\x0c\xd6\x26\xe8\x48\xee\x8a\x0b\xb9\x5a\x16\x3c\xc3\xa1\x56\x2e\xb7\x5e\x85\x71\x8f\x07\xa1\x4b\x6e\xeb\x00\x04\x49\x0a\xe5\x5c\x91\x34\xed\x55\xd2\x57\xce\x68\xe3\xfc\x8f\xce\xf2\x70\x06\x3a\xa4\xb6\x38\xf8\x70\x32\x7a\x13\xed\x0c\x7e\x5f\x0f\x9b\x37\x3e\x90\x0a\x39\x8f\xea\x89\xe8\x0b\xc2\xec\x22\x3e\x4b\xc9\x99\x0e\xdd\xaa\xfd\x1a\x8a\x12\xf0\x91\xf5\x7c\xe2\x3e\x32\x43\xbc\xf7\xce\xb2\x4a\xd6\xcb\x7c\x09\x2a\x80\x6b\x38\x88\x75\xdc\x4a\xdd\xc7\xc3\x90\x77\x58\xf9\x71\x1e\x32\xdf\xf5\xd7\x40\x94\x61\xb9\xbc\x63\x8f\x1c\x6d\x5b\xc9\xbb\xf9\x70\xef\x7a\xd9\xc1\x80\x01\xca\x05\xd1\x83\x4a\x6f\x49\xae\xdd\xdb\x2b\x72\x70\x44\xea\xee\x47\x39\xf5\x3c\x42\xa9\x1f\x9a\xee\xce\xf9\x6b\x5d\xd1\x14\xe5\x92\x94\x5c\x66\x49\x8e\x77\xed\x60\x69\x9f\x4b\xa2\x33\x59\xba\x97\x72\x0d\xea\x76\x94\x04\x83\x88\x5b\x32\x46\xa4\xf9\xba\xcd\x56\x4c\x7a\xb3\x46\x85\xf9\x5e\x66\x82\x21\x92\x56\x4c\x50\xf8\x26\xba\xfa\x3d\x7e\xce\x27\xdf\xcf\xc7\x49\xc8\x3b\x84\x95\x06\x05\x27\xb5\x65\xdd\x4d\x2d\xdf\x95\xf2\x2f\xea\x13\xaa\xb3\xa9\x4d\x74\xfb\xd6\x75\x60\x3e\x34\x6c\xee\x83\x00\x00\x23\x24\x12\x2d\x78\x88\xab\x40\x19\x7e\x44\xff\xe7\xc0\x71\xa9\x96\x28\xfd\x6c\x0c\x9e\xfd\xd8\x72\xcc\xde\xdb\xe1\x97\xbe\x29\xd4\x48\x0d\xd9\xc1\xa2\xac\x93\x3c\xb5\xd6\xab\x79\x52\x72\x21\x62\x24\xf7\x3a\x2b\x90\xe5\x75\xd9\x34\x5f\x00\xa6\xa3\x96\x98\xe5\x8e\x84\x56\x77\x87\x67\x8c\x56\xdf\xbc\xc0\xb6\x5c\xc9\x19\x20\xbe\x45\x74\x79\x3a\xa4\x94\x74\xe6\x54\x9c\xd1\x25\x0f\x58\xa0\x73\x2f\x99\x3c\x56\x3e\x0a\x42\x4b\x64\x41\xa6\x74\x85\xf0\xac\x03\x0e\xe6\x20\x61\x50\x41\xa7\x01\x77\x55\x26\x09\x99\xac\x56\x0b\x1a\x6b\xdf\x70\x05\x88\xc3\xf8\x04\x18\x61\x1e\x6e\xb1\x22\xb2\x29\x54\xc9\xe4\xc2\x79\x52\x39\x62\x42\x4f\x92\x1c\x05\x56\xa3\x8b\xb8\x95\x4b\x15\xc3\x5c\xd2\x6b\x34\x29\x69\x29\x51\x06\x56\x1d\x5a\xcf\xbd\xe1\xb9\x7b\xaf\x43\x87\x52\x69\xbd\x82\xd8\x3b\x9d\x4f\xee\x13\x24\x40\xf5\x13\x9b\x71\x3e\xb2\x01\xab\x2c\x68\x0b\x12\x79\xf9\xfb\x0f\x51\xf9\x83\x01\x30\xa5\x4a\xe9\x5c\x57\x5e\xe0\x50\x29\x1a\x23\x8a\x49\xe1\xf1\xe6\x4c\xa7\xc2\x61\x81\xe6\xb8\xdf\xab\x6e\xad\x70\x1c\xc6\x43\x96\xa3\xbd\x1a\xa7\x61\xaa\x0b\x6c\x91\xfa\x30\x8c\xa9\xa5\x10\x14\x09\xb4\xd4\x01\x2f\x77\x6e\x61\xd4\x1b\xf9\x51\x12\xb7\x15\x15\x62\x85\xad\x95\x6b\x86\xb2\xbc\x49\x77\x7c\x71\x63\x10\x6c\x40\x10\x84\xc4\x0f\xed\xae\x07\xd9\x68\x37\x26\x56\x02\x84\x60\x0e\xf5\xc7\x05\x23\x82\xa0\x96\xed\x33\x82\x61\x05\x7b\xb9\xc5\xa5\x69\x93\x39\x73\x01\x1b\x2a\xd5\xe2\x76\xb6\xd5\xc9\x95\x8e\x69\xfe\xeb\xfb\x65\x88\x3e\xa8\x97\xb7\x4b\xd6\x1a\x4b\x18\x85\x8d\xce\xbe\xbb\x60\x4e\x76\x91\xbe\x83\x75\x87\xe2\x25\x09\x3e\x49\xae\x81\x9e\x01\x86\x41\x0d\x8b\x77\x29\x0a\x2a\x78\x36\x9b\x1c\x18\x3d\xc4\x68\xa9\xdf\xca\xc0\x76\xb6\xf1\x0f\xb4\x67\xa3\x51\x73\x39\x0c\x20\x08\x70\x53\xee\xd5\xef\x36\xdd\x08\xed\x61\xc5\x4f\x58\x59\x0c\x42\xc0\x40\xb8\xe3\x8f\xfb\x4e\x81\x2c\x81\x5e\xc7\x1c\x5a\xdb\xa4\x55\x6d\x4e\xce\x6c\x36\x6c\x02\x37\x1c\xb6\xf5\xbb\xe5\x80\xee\x75\xdb\x4d\x7d\xf4\xe3\x09\x53\x8b\xd3\xb6\xd9\xce\xe8\x11\x68\xa9\x6b\x86\x85\x73\x74\x9a\x07\xdb\x12\x41\x13\x21\xd4\x0e\x42\x11\x8c\xeb\x6e\xab\x72\x9d\xa1\xcd\xdd\x1b\x94\x0c\x82\xb2\xbe\xed\x92\x8a\xc4\x0e\x4c\xa5\x80\x6e\xac\xf6\x49\x0b\xe5\x12\x81\x42\x57\xe3\xcd\x12\x3b\xcf\x4e\xd9\x58\xf5\xbc\xfa\xa0\x26\xbe\x38\x6a\xc4\x02\x42\x34\xd6\xd5\x41\x39\xa4\x88\x5a\xa2\xa0\x54\xb7\xbf\x7c\x16\x8f\x1c\x7e\xa5\xe3\x31\x79\x10\x5e\x22\x8f\x43\xb0\xd8\x98\xa8\x49\x15\xab\x71\xb5\x2b\x81\xa6\xb1\x18\x6f\xc3\xcb\xf4\x34\x07\x9f\x91\x59\x8b\x10\xd5\x83\x39\xc6\xd0\x57\xd3\xd3\x71\x6b\x55\x68\xca\x63\x41\xc9\x5f\xb9\xa2\x85\x7c\x61\x97\x60\x47\xe9\xe1\x51\x33\xd7\x7d\xf2\x3a\xc7\xce\x17\xaf\x57\xcb\xf1\xab\xf9\x6d\x14\x2f\x2e\x1c\x34\xde\xfd\x0e\xfa\x49\xa6\x5a\x25\x7c\x9f\xd5\x74\x15\x32\xe1\x5e\xc2\xa1\x41\x9f\x98\x98\x9e\xa6\xb4\xeb\xf8\x13\x0e\x5c\x9f\xe2\x26\xfc\xca\x36\x76\x06\x84\x7f\xcb\xce\x6c\x3d\x8d\xef\xa3\x76\xbe\x6f\x81\x1a\x6f\xf9\xdf\x54\x65\x6f\x36\x29\x63\x73\x49\x74\x84\x45\x51\x8e\xcb\x83\x7b\xcb\x3f\x01\xaa\x00\xe5\x82\x91\xbc\x24\x43\xd9\x87\x6d\xa9\x7b\xff\x63\xd8\x5b\xc7\x29\xb9\x8f\x6e\xb4\x2e\x25\xa5\x9d\x85\x4a\x9e\xf7\x4f\x79\x63\x93\x79\xf4\x59\x7e\x49\x33\xca\xd8\xf6\x63\x67\xc1\x4b\x5e\x3a\x7a\x78\x7c\x25\xb4\xf2\x42\xf1\x92\xbc\xac\x24\x81\xa6\x29\x78\x02\x93\x54\xbf\xa6\x51\xae\x45\xa1\x16\x59\xea\x52\xe5\xa2\xe5\x0f\xc6\xe2\xe7\xd2\x8c\xf9\x29\x05\xb8\x57\xa5\xc6\xba\x82\xb8\x32\xcc\xfe\x45\xa0\xef\x77\x60\xf6\xb9\x37\x7c\xd6\x30\x02\x1b\x02\xa3\x82\xb8\xdc\x63\xdc\xa2\x12\xb6\xb1\x85\x02\xb4\xbc\xd4\xcf\x67\xac\xbe\xf7\x93\x06\x6d\xdb\x64\x4c\xc0\xb8\x49\x4b\x81\xd5\xec\x46\xf2\x05\x9a\x2b\x82\x9c\x03\x45\x00\xde\xda\x2d\x4a\x5b\x05\xe3\xb0\xe2\x08\x2d\x97\x9b\x66\x6d\x15\x96\x03\x94\x06\x53\x5a\x62\xd4\x95\xa3\xf6\xc7\xa9\x92\xd7\x6d\x33\x29\x06\x9c\x6e\x12\xf4\xff\x55\x05\xd4\x1e\x1e\xa6\xa1\x2a\xcc\xfa\xd8\x02\x47\xfb\x85\xd1\xc2\xfd\x3e\x2a\x4e\x44\xcc\x86\x3b\x05\x2e\x4b\x87\x6e\x1f\x02\xf1\xa9\xcb\xde\x66\x04\x23\x9b\x93\xa2\xbf\x17\x32\x30\xa8\xc3\x8f\xba\x6c\xbf\x44\xb3\xb1\x77\x0c\x24\x90\x35\x66\x2d\x79\x8d\xcd\xb5\x25\xd4\xd5\x1d\xb4\xa0\x76\xa9\x66\x8c\x16\x60\x2b\xd0\x3f\xa7\x8d\x81\x01\x4c\xf6\xae\x35\xd3\x84\xe8\x02\xc7\x60\x9b\xe6\xfd\xb1\x56\x2d\x4c\xa5\xdc\x2e\x0c\x42\x71\x03\xb6\x8d\x7e\x9a\x77\x3b\xe3\xd9\xa1\x77\x2c\x85\x0a\x78\x04\xaa\x9b\x31\xd2\xc9\x9b\x89\x75\x28\x39\x1c\x75\x79\x09\x8a\x6d\x76\x53\xcc\x05\x88\x90\xe9\x30\x1d\xa0\xdd\x31\xec\x8d\x71\x7f\xe7\xcc\x1e\x46\x60\x01\x6f\xe6\x96\xa9\x11\x78\x4e\x8f\x24\x2a\xb6\x8a\xfb\xf3\x30\x6e\xe4\x5c\xce\x9a\x9c\x9f\x0b\x7a\xec\x83\x1b\x83\xe5\x45\xcc\x1f\xe5\x90\x4e\x6f\x83\x1a\x1e\x1a\x1a\x18\x86\xe0\x67\x57\xed\x54\xbe\x3c\xc4\x95\x69\x18\xb3\xf9\x0e\x0b\xeb\xb7\x49\x23\x34\x20\x52\x8e\x6c\x94\x71\x17\x77\x4a\x3a\xdf\x2b\x50\x37\xa7\x98\xc3\x65\xb5\x02\xb3\xe0\x90\x8c\x04\xd7\xc1\x6f\xf9\x54\xd7\x9a\x85\x65\xcc\xfe\x04\xdd\x49\xe7\x3d\xb5\x04\x2b\x02\x11\x69\xb2\x23\x2a\x04\x70\x8c\x36\x84\x03\xf5\x6e\x37\xf3\x9c\xa7\xc7\xc8\xf1\xd6\x96\x67\x72\x71\xfd\xa1\x6c\x71\x1c\x20\x4b\x1e\x45\xab\xa1\xd9\x92\xf3\x65\x4b\xfc\xaa\xec\x86\xf0\xc7\x4d\x7c\xb5\xaa\x08\x7c\x9f\x7c\x1d\x6e\xb5\xa7\x03\x49\x7d\xb7\x3f\x56\x7e\xcd\xab\x69\x79\xa6\xd3\x38\x7f\xd6\xd1\xb2\xe6\x5c\xc5\x34\xfd\xe5\xac\xca\x71\x65\x60\x1c\x7d\x64\x80\x19\xdb\xa0\xad\x94\x03\x9a\x66\x11\x9e\x4a\x6e\x47\x08\x33\x2f\xe7\x56\x59\x6d\x9a\x87\x9b\xaf\xba\xf5\x7f\x18\x3b\xf3\xc2\xe4\x90\xe1\x73\x1e\x08\xee\x27\x5a\xd1\x93\xc5\xed\x69\x33\x41\xe2\xd7\xf4\xbc\x29\xdb\xcb\x87\x93\x10\xe5\x4b\x3f\xb9\xea\xde\xfe\xc8\x0b\x0e\xd0\x92\x7b\xe8\xaa\xde\xc7\x29\xa0\x56\x32\xda\x61\x40\x25\xf6\xf5\x08\xfc\xed\x70\x6a\xe8\x4c\x04\x82\x0d\xc4\x61\xec\x44\x3f\x89\x5e\x02\x64\x8f\xd0\xa7\xda\x82\xa1\x77\xdf\x20\xf9\xc5\xc7\x72\x0c\x07\xee\x79\xa2\xf3\x81\x03\x9d\xdc\x39\xca\x82\x5f\xe7\x9d\x82\x68\x0a\x7d\xb0\x53\xe5\xeb\xf6\x32\x35\x53\xe8\xe3\x57\xd4\xaf\x23\xd4\xe2\x15\xf7\xa8\xd6\xc8\xbc\x74\xa2\xa6\xd1\x70\x33\x4d\xe1\xc6\xac\xb4\x6f\xab\x51\x64\x27\x85\x9f\x71\x2f\x11\x7b\x39\xae\xb9\x87\x65\x87\x2e\xb5\x6d\x59\x87\xab\xe3\x37\x41\xb7\xab\xba\x7e\x8f\x9e\x6d\x95\x2c\xb5\x82\x9f\xc9\xbc\x3b\x1b\x87\xd5\x52\xa2\x84\xcf\xc5\x7a\x0d\x3d\x2a\xdd\xa0\x5d\x34\xf6\x94\xe1\x31\x9d\xfb\x95\x6e\x96\xec\xc0\x57\x14\x54\xb8\x77\x66\x5d\xca\x2d\xd1\x75\x17\xca\xa7\x39\xa7\xa7\xc2\x99\x52\xcb\x14\x9a\x35\x40\x3d\x03\x46\x48\x2a\x11\x6c\x06\x62\xbc\xd0\x48\x10\x22\x59\x36\xe5\xcd\xb9\x4a\x9a\xc5\xaa\x4c\xd3\x39\xd3\xb4\x2a\xe9\x63\xf2\x14\x94\x86\x2d\x41\x7d\x67\xe2\x88\x87\x7f\xfb\xa0\x2f\xc3\x1d\x43\x9b\x9c\x14\x78\xfd\x91\xaf\x72\xa9\x45\x58\xeb\x0f\x14\x74\xbb\xc6\xa2\xac\xd3\x2b\x39\x62\xef\x7b\xf1\x66\xe5\x25\xef\x80\x05\xb6\x73\xee\x38\xa6\x60\x50\x1b\xc2\x53\x04\xb1\x2c\x57\xc6\x44\x15\x03\x65\x1e\xaa\x4c\xb3\x52\x58\x0f\x7b\xdf\xb0\x1b\x90\x63\x4a\x5f\xee\x21\x9e\x5f\xc9\xea\x75\xff\xa6\xf4\x34\xaa\xe2\xa3\xc2\x1f\x8e\x64\x38\xe0\x91\x58\xd7\x7f\x16\xa9\x2d\x68\x5d\x35\xd3\xf6\xb7\x54\xe1\x61\x69\x3a\xb9\xcd\x40\x09\x3e\x33\x72\x6b\x5a\xea\xfc\x31\xe3\x83\xe1\x43\x39\x08\x15\x14\xc2\xeb\x15\xf4\xa0\xf8\x15\x49\x65\x32\x98\x60\x4f\xe0\xf3\xdd\x94\x15\xf2\x7c\x0f\xc8\x9e\x21\x54\xc6\x4e\x2d\x96\x71\x76\x95\xbb\xc2\xdf\x22\xf2\xc1\x9f\x6a\x1f\x00\x55\x8b\x4a\x84\x5d\xf3\xbb\xab\xaa\x5f\x46\x60\x67\x69\x2c\x2b\x9a\xb5\x21\x31\x8f\xce\xab\x46\x21\x4b\x1f\x71\x62\xbd\xe0\x59\x98\x81\x64\xeb\xf2\xa7\xca\xab\x2e\xd7\x3e\x03\xe1\x62\x63\xb5\x92\x78\x95\xe7\x99\xac\xaf\x22\xa3\xe3\xfa\xdc\xbe\xba\x08\xc2\x6f\x2b\x06\xbe\x6f\x41\xa2\x12\x59\x03\xa1\x5f\x91\x10\x4a\x96\x86\x60\x04\xab\x83\xed\x8d\x50\x56\x07\x48\x42\x83\xbb\x19\x26\xac\x4e\x33\xf3\xa9\x54\x66\xaa\x58\x25\x21\xca\x45\xae\xe2\x95\xfb\x63\x6a\x1b\xd5\xfc\x07\x37\x2f\xbb\xe0\x04\x8e\x5f\x94\x58\x48\x70\x88\x59\xcb\xf7\x5d\x2e\xbd\x24\x91\xa9\x21\x4f\x71\x3c\x4b\xea\xc1\x59\x64\x9a\x67\x7f\xda\x43\x9a\x68\x9a\x73\x23\x4a\xcb\x0b\x08\xdb\xf2\x23\x7c\x4c\x52\x38\x29\x05\x7d\xf5\x42\x2e\xdb\xea\x13\x12\x01\xbb\x84\x0c\xd0\xdd\x6c\x1d\x57\x01\x7d\xd5\xf6\xe2\x74\x38\x30\xa6\x6f\x27\x90\xf8\x90\x84\x4a\x4d\x27\x36\x58\xff\xa6\x6d\x2d\x28\xb0\xce\x56\xca\xd2\x7c\x3b\x4e\xfe\x63\x8c\x4c\xd5\x9c\x9b\xf4\xa5\x2a\xa0\x54\x49\x4f\x97\x5b\x33\x42\x36\x16\x18\xa6\x24\x97\x22\x24\xf4\x68\xcb\xa5\x46\x86\x8a\xa5\x02\xb8\xc2\xf4\xcb\x55\xf9\xa6\x32\x03\x2a\x01\x06\x80\x97\xa3\x1d\x40\x50\xad\xe1\x13\x18\xb2\x69\xc8\x31\x85\x3f\x4e\xe2\x28\xd3\x58\x56\xb9\xe4\x94\x2b\x8d\xcc\x28\x5b\x70\x0e\xb8\x6b\xdb\xaa\xc7\xce\x02\xdf\x95\x52\x3b\xc8\x08\x3c\xec\xb2\x34\xae\x2e\x79\xbd\x41\xbe\x94\x95\xf6\x4b\xaa\xea\xaf\xb2\xd0\x34\x10\x8c\x8a\x9e\xd5\xba\xbd\x6b\xf5\x58\x49\x3a\x23\x26\xe3\x26\x47\x4b\x45\xf6\x2f\xfa\xc5\x31\xb4\x6c\x9b\x74\xfa\xc2\x6b\x00\x2a\x13\xce\x8c\xf0\x31\x96\xf3\x81\x4d\xe9\x90\xdf\xff\xe3\x90\xa5\x76\x5c\x6a\xac\xed\x45\x00\xac\x80\xaf\xf6\xb0\x7c\xab\x34\x7b\x5b\xca\xff\x5d\x95\x4f\x5e\xa3\x9c\xe0\x97\xfe\xd7\xf4\x95\x13\x68\x2d\xcb\x50\x9e\x93\xe2\xcc\x61\x9a\xeb\x81\xd9\x72\x00\x90\x21\xf4\xff\xfc\x12\x88\xd3\x44\x26\x86\x06\xfb\xdb\x02\x9e\xef\xb2\x73\x85\xea\xdc\xdf\xc6\x80\x9d\xa8\xf8\xf6\xe3\x32\x74\xe4\x70\x3a\x96\x01\x0e\xaa\x5a\xd4\x37\xd6\xf3\x23\x67\x82\x3b\xa2\xd2\xae\xed\x51\x52\xca\xac\x9f\x0e\xe7\xfd\xae\x80\x09\xf9\x5d\x9b\x43\xf7\x1a\x26\x02\xbf\xea\x07\xd2\x5d\xb8\x2a\x33\xd6\x04\x34\x7b\x49\x63\x21\x75\xe1\x1f\xbc\x05\xfe\x1b\x00\x55\xdc\x94\x28\x0f\x29\xf9\x64\x7b\xbb\x29\x86\x39\x93\x36\x10\x27\x39\xa1\xbb\x6b\xc5\x82\x5a\x69\x97\xc3\x3a\x74\x47\x59\x73\x3a\x6d\x44\xc3\x5e\x51\xb4\x64\xf0\x81\xd6\x5b\x7b\x73\xa4\x34\x56\xae\x5e\xc0\xa8\x49\x55\x6c\x4f\xf6\x14\x5d\xce\x43\x57\x4c\xb5\x97\xe6\xfa\xb1\x65\x3e\x11\x46\xef\x8a\x9b\xbc\x4a\x60\x17\xa6\xc0\x28\x16\x7a\xf9\xa4\xcb\x55\x91\x0f\x4d\x80\x10\x2b\x2d\x55\x3d\x53\x1a\xf5\x16\x63\x64\xe3\x6b\xb6\x30\x86\x82\x0a\x5b\x28\x13\x05\x5e\x4d\x78\xb4\xc3\xb1\x34\x3c\xa4\x26\x94\x4a\xf6\xbf\x9f\x42\x52\x5e\xd6\x8f\x31\x41\x5a\xac\xae\x02\x25\x84\xe9\x0a\xa9\x3e\x5c\x0c\xd0\x77\x5f\x14\x05\x7a\xb0\xb3\xa8\xaa\xfb\xac\xfa\x5b\x43\x49\xe7\x71\xd2\x2b\xca\xce\xab\x5a\xa5\x13\x6d\x6e\xeb\xd0\x41\xce\x7b\x37\xba\xb5\x97\x60\x62\x1d\x5e\xd4\x12\x0c\x9a\x25\xdd\x66\x72\x20\xf4\x1b\x74\x1d\xa2\xda\xc6\xa7\x45\xff\x8a\x3d\x48\xaf\x04\xc8\x2b\x28\x2e\x8f\x54\xe3\xdf\x9b\xed\x44\x2e\xed\xe8\x03\x56\x8c\x78\xea\x55\x62\xe5\xe5\x32\x7c\xf7\x9c\xf0\x4a\x29\x75\x87\x63\x56\xff\x04\x28\x33\x01\xf8\x89\xb6\xa9\x49\xe7\x2b\xba\x45\x50\x78\xd8\xea\x10\x89\x7c\x44\xd6\xee\x16\x9a\x70\xf2\xea\x89\x8e\xb4\x29\x03\x99\x9f\xe7\x1a\xd7\x38\x30\x12\x23\x59\x4f\x9c\x4b\x32\x55\xf1\xff\x83\x88\xb9\x43\xb4\xc9\x01\xbc\x71\x26\xd0\xc6\x06\x02\x0a\xc7\xe6\x83\x78\x9f\x9e\x4a\x8f\xe5\x9a\xd4\xaa\x1b\x2f\xcf\xeb\x92\xa7\xda\xd7\x8a\x0d\x74\xc5\x45\xa2\x9e\x9a\xfc\xb6\x04\x5a\x36\x0d\x20\x87\x39\x61\x39\x8d\xab\x78\x2c\xe0\xa5\xda\x87\xf0\xc2\x84\x67\x2d\x47\x9c\xe9\x0d\xfa\x45\x80\xfc\x3e\x1a\x49\x97\xc8\xce\x58\xd8\xe9\xa4\x53\x55\xde\x2a\x1b\xea\x62\x5d\x3d\x59\xac\xb3\x7c\xcd\xab\xf0\x21\xfe\x02\x65\x8a\xc5\x27\x1c\xed\x2d\x8a\x45\xac\xad\x92\x5e\x24\xd2\x6e\x62\xbe\x07\x9a\x4b\x71\x99\x36\x49\xcc\x2e\x1d\xc2\x84\xc8\x03\x6a\x03\x41\x3d\x54\xc1\x75\x0d\xdb\x8a\x93\xb3\xda\xb5\x66\xc3\x24\xc4\x3a\xd1\xc5\x21\x51\xd2\x16\x75\x5c\x79\x25\xdd\x9c\xf6\xa2\xab\x1b\x34\x82\xe0\x0d\xfb\x0e\x00\x33\x6e\x44\x20\x27\x16\x95\x60\x83\xb2\x7a\xb4\xe1\x8e\xfc\x57\x8d\x15\x4c\x0e\x2a\xd7\x20\xba\xd3\xd8\x0f\xe5\xec\x3f\x6d\x25\x40\x2d\xcb\x52\xe4\xeb\xdf\xf0\x2c\xfc\x6c\x6a\xde\xcd\x2e\x6c\x0a\x31\xc7\x1d\xba\xcf\xea\x10\x40\xd0\xc9\x61\x9d\xe2\xe9\xba\x5c\xf7\x92\x1c\x9e\x3c\x6b\xf6\x3c\x50\x20\x1a\x94\x27\xfe\xa4\xec\xf3\xc3\x68\xc6\xda\x43\x18\xaa\x0e\x2d\xec\x70\x2f\xef\xa8\x93\x17\x02\xf2\x95\xe8\xb4\x5a\x07\x08\x8c\x44\xdc\x8c\x73\x23\xc1\x9f\x4d\x1a\x34\x14\xab\x52\x96\x2c\xd4\xb3\x96\xe3\x34\x46\x1f\xc0\x0a\x40\xb7\xaf\x7e\x2b\xd7\x83\xca\x32\x70\x17\xa7\xa9\x26\xce\x5b\x72\x5b\xb0\x7c\xdc\x4b\xe0\x21\x89\x7d\xaa\xc8\x23\xb0\x81\x83\xaf\x50\x8a\x2d\xb1\x99\xf3\xba\x8f\x52\x5c\x87\x22\xd1\xee\xba\x90\xb4\xbf\xca\xb2\x02\x6e\xb7\x5c\x93\xe7\x51\x01\x47\x86\x15\xdd\xfe\x57\x15\xac\xa3\x13\x7b\xf8\x28\x41\x73\xf6\x4b\x9e\xcc\xba\xa7\x58\x91\x0e\x8e\xbf\x00\x1e\xaa\x93\x1c\x89\xd9\xae\xe5\x63\x34\xf3\xd0\xf0\x4d\xc7\x89\xdb\x10\xfd\xc3\xe5\x32\x1a\xe8\x59\xe9\xf7\xcb\x5f\xf2\x3c\xfe\xad\x92\xbe\x78\xf5\x24\xca\x24\xfb\xbf\xed\x03\xfd\x38\x9f\x14\xdb\x8e\x06\xb1\xd9\x5b\x81\x6f\xf6\xfe\x9a\x65\x56\x32\x31\x1f\x28\x19\xee\x6a\xc1\x02\x60\xb5\x6d\xa8\x60\x69\x1b\xa3\x93\x60\xcc\x6b\x2d\x90\xc0\x90\x3e\x7b\x31\x51\x7e\xe4\xd2\x86\x41\xef\x65\xd1\x66\x9f\x26\x61\x35\x2b\x2d\x8c\x77\xfc\x14\xad\xb9\x2c\x69\x72\xef\x51\xd6\x48\x9f\x83\x98\xe3\x41\xc0\xb3\x5f\x42\xdf\x1b\xdf\x40\x49\xad\x79\xe1\x5a\x72\x6f\x13\x2f\xd8\xd4\x6d\x4a\xe8\x29\xd8\x97\x1f\x65\xa5\x41\xe5\x68\x3e\x35\x62\x84\xa6\x7d\x30\xb8\xb7\x14\x76\xa1\xa2\x71\x56\x50\x25\xd0\xda\xbb\xb9\xae\xf0\x32\x77\x3e\xf9\xe0\x1f\x79\xdf\x2e\x2b\xb9\x03\x4d\xfe\x6a\x0e\x36\x2a\xbc\x86\xa3\x31\xd7\xec\x5d\xf2\xc7\x62\x16\xdf\x78\xd6\x18\xa8\xbd\x7f\x09\x23\x02\x2e\x56\x01\xd8\x04\x37\x8d\x71\x66\xa5\xb7\x30\x13\x5d\x7a\x99\xf3\x1d\x58\x8f\xce\xc5\x7a\xe7\x91\x6a\x97\xe6\x5d\x87\xc0\xb9\x55\x53\xa7\xed\xf8\x04\x1e\xd4\x52\x7d\x71\xfa\x34\xe7\xfe\xc5\xd9\xe8\x2f\x5a\x19\xc8\x1c\x96\x61\xe2\x9d\x87\x1f\xbe\x39\x19\xbf\xfd\x4f\x52\x60\xba\x6e\xcb\x39\x93\xad\x25\x91\xb6\x58\x58\x3a\xd9\xf0\xa8\x79\x8d\x9e\xef\x68\x6e\x22\x3d\x5b\x74\xe8\x94\x77\x07\x2c\xd0\xcd\x79\xf9\x1e\x0b\x5b\x49\x99\x5f\xd0\xa0\x2d\xe5\x43\x09\x99\x8e\x5f\x10\x03\x24\xe5\xec\x10\xf5\xba\xfb\xa8\x28\x19\xfc\x13\xe9\x38\xbb\xde\x54\x35\xb7\x04\x4b\xc9\xb5\x1f\x01\xaa\xe2\xd4\xd1\x24\x92\x08\x9f\x49\x8e\x95\x58\xdd\x1f\x88\xaa\xbb\x68\x10\xe5\x25\x96\x72\x4a\xf6\xee\xe3\x51\x91\x82\x6f\x00\xaa\x38\x96\x07\xfc\xf4\x6a\x1b\x81\x6d\x12\xb8\x8a\xb2\xbd\x85\xf4\x7b\x63\x50\xc6\x53\x8c\x0a\x85\x42\x04\xbd\xf2\x1c\xd4\xd0\x97\xae\x54\xbe\x3f\xbd\xdc\x59\x9d\x15\xce\x65\xad\xbc\x8d\x13\x32\xf5\x50\xcb\x94\x49\x51\x17\x63\xbe\xd0\x43\x22\x56\x49\x27\x8e\x9c\x77\xb5\x73\x8e\xcc\x91\xe3\x31\x5e\xfd\x14\xb0\xe5\x66\xad\xdc\x0c\x39\xe1\xde\xa1\x43\x44\x62\x85\x7d\xdc\x89\xc1\x5f\x2d\x0e\x82\x7f\xcc\xb2\x5d\x3f\x0c\x0f\x8e\xc3\x62\xb4\xac\x1a\x36\x4d\x30\x29\x00\x66\xb0\x7e\xf8\xae\xf7\xb7\x0a\x0e\x8b\xb6\x77\x3d\x53\xf7\xe7\x68\x11\x57\x68\x67\x66\xdf\xea\x94\x1a\x48\x9a\xe6\x4a\xb2\x19\x35\x43\xda\xfb\x5e\xc2\x8b\xed\x00\x8d\x68\xbe\xe4\x9f\x18\xb2\x6d\x62\x90\xd3\x6c\xc4\x0f\x2c\xc4\x2f\x54\xdb\xf6\x57\xb4\x80\xfe\x19\xce\x84\xb1\x30\x97\xab\xd1\x63\x4b\xb4\x69\x60\xf6\x9e\x89\x2e\xb0\xed\xbb\x99\x77\x7d\x38\x8a\x97\x53\xeb\x99\x2b\x71\xc6\x43\xe8\x1b\xfb\x43\x5d\x28\x47\xcd\x1a\x64\x3b\x12\x58\x3b\xfe\x7a\x60\x3d\xcb\x75\xf0\x6b\x15\x1d\x11\x16\xae\x16\x52\x9c\x7b\x6c\xd0\x63\x2d\xf0\x70\xb9\x41\xdf\xc7\x23\xc5\x72\xb0\x3e\xad\x3a\x95\x30\x69\xe4\x39\xd6\x07\x56\x75\x25\x52\x47\x87\x6c\x59\x4b\xab\x82\xeb\xb4\x6b\xb3\xf3\x30\xe3\xf5\x59\x56\x34\xcb\xc0\x92\x96\x13\x10\x7e\xf4\x51\x74\x06\x98\x71\xae\xbd\x89\x12\xd3\xf0\x65\xeb\x00\x1a\x21\xff\x75\x88\x4f\x04\xa7\xe2\xcd\x13\x41\x74\x36\xcb\x93\xaa\x1a\x9c\x1c\x10\x82\x9a\x0c\xde\xa0\xcf\xaf\x07\x59\x49\xae\x81\x56\x3b\xf5\x4e\x30\x1a\xaa\x06\xf7\xf9\xc2\xd3\x65\xdd\xd4\x1e\xd4\x04\x71\x74\x8f\x31\x0f\x1b\x5c\x4c\x4e\x0b\x27\xc3\xcf\x9f\xe5\x92\xc0\xa7\xf8\x93\x87\x65\xb1\xe5\x59\x1f\x2b\x1c\x52\x63\x7b\xd1\x94\x45\x39\x68\xc5\xfc\x51\xef\x89\x64\x93\x41\xc6\x99\x30\xe5\x65\xcb\x89\xf0\x16\x35\x9f\x00\xa2\x97\x45\x22\xef\x7e\xe1\x5f\x4b\xa5\x7a\x36\xa1\x44\x89\xe5\x4d\xcd\x2a\x77\xaa\x1a\x1c\xac\x37\x76\xa7\x96\x60\x2c\x04\xef\x15\x4b\xdd\x8c\xee\x5b\xe9\x10\xe3\x11\x5e\xce\x32\xb2\xcc\xd9\xc3\x52\x97\xea\x13\x13\x4f\x78\x04\x86\x86\xdb\xbd\x5c\xcf\x54\x2f\x91\xea\x29\xe6\x10\xfd\x66\x9a\xd2\x08\xa1\xd0\xb7\x6d\xfb\x35\x0c\x05\xd0\x08\xd7\x56\x68\xcc\xd6\x23\x90\xa7\xde\x8a\x14\x07\x9b\xc4\xf5\x7d\x3f\xb0\xff\xff\x78\x2c\x99\x27\x20\x1a\x1a\x46\xfd\xa6\x7b\x30\xa1\x8d\xd1\x4b\x9a\x88\x52\x93\x57\x75\x17\x30\x2c\xec\xf4\xff\x49\x9b\xf9\xd0\x59\x83\x8f\x36\x33\x25\x51\xb3\x69\x0b\xd3\xfb\x96\x2e\x17\x37\xb7\xa9\x30\xc5\xaf\x78\xdf\x12\xe1\x42\xef\x00\x6a\x7f\x31\xfd\x6c\xcd\x6f\x74\x3f\x27\x87\x83\x68\x6b\x3e\x51\x39\xec\x64\x7a\x8f\x1a\xaa\x3f\x82\x46\x6e\x2c\x4c\x43\xe8\xb2\xc3\x1f\x95\x7b\x7d\xc2\x69\x4b\x35\x17\xe4\xb9\x51\x01\x93\x90\xbb\x72\x78\x09\xb0\x9e\x0a\xa5\xe9\x2c\xdf\xa6\xe8\x0e\x93\x9c\x7e\x21\x8e\xf4\x70\xd2\x5e\xb2\xd7\xc3\xc3\x83\xd4\xbd\x82\x47\x52\x96\xe8\x87\x59\x6f\x1a\x83\x5d\xc9\x2b\xd5\xa2\xcb\xa5\x96\xa1\xc3\xcd\x1e\x1a\xc1\x29\xf1\x0b\x79\x97\x8f\x4c\xe7\x99\xa4\x36\x65\xa5\x68\xdb\x4f\x24\xd2\x15\x3c\x7b\xae\x8d\x6c\x41\xce\xe5\x7b\xe4\x5b\x0e\x1b\x15\x3d\x55\xe1\x1a\xa9\xae\x35\x2f\x8e\xa8\x73\x0f\x93\x26\x34\x33\xc0\x9f\xe1\xae\x3f\xaa\x0c\x23\x52\x22\x29\xb3\xf6\x9f\x60\x91\x47\x30\xa5\x41\x85\x4f\x9a\x7a\xce\x21\x00\x86\x0e\xd6\xf2\x90\x7e\x0c\xc2\x75\xdc\xe2\x38\x51\x5f\x6a\x4d\xd2\xde\x54\x33\x73\x27\xf2\x4c\x8b\xe4\x5e\x23\x1a\x56\x84\xa5\x0c\x15\x30\x12\x56\x5f\x7b\x15\x58\xea\x7b\x9a\x72\xb3\x30\x62\x94\x7d\x4b\x72\x83\x1b\x50\xc7\x02\x15\xf0\xaf\x29\x40\x77\x86\x05\xab\x37\x55\x39\x04\xce\x2f\x8d\x7e\x54\x32\x00\x84\x5e\xc0\xcc\xcd\x56\x38\xb8\x30\xf0\x7d\x00\x40\xc3\xaa\x62\xef\xcf\x4e\xd6\x20\x99\x72\xc7\x26\x47\x6c\xd1\x38\xc7\xb9\x8a\xaa\x63\x8a\x02\xe8\x08\xac\xc6\x0a\x8e\x87\xbd\x41\xca\xd7\x23\x1d\xfa\x21\x1f\x6a\x61\x7b\xa2\x70\xdd\x17\xec\xf9\xbb\xee\xe1\x13\xa2\x27\xb5\x86\xa0\x37\x32\x65\x7c\x00\xa2\x52\x6c\xe3\x63\x74\x07\xa9\x7c\x6b\xd9\x96\xd3\x4b\xff\xde\xd5\x84\xba\x7d\x35\xc9\x74\x32\xc0\x5e\xd1\x34\xb1\xab\xd3\x5f\x96\xbd\x92\xcc\xb9\x47\x82\x19\xe8\xc1\x0f\xb4\x48\x74\x7d\x93\xc2\x19\x3a\x14\x5b\xb6\x35\x70\x5b\x75\xa8\x27\xb8\x3a\xdf\x28\xdf\x76\x23\x62\xc5\x17\x54\x65\x58\xf0\x59\x4f\x5e\x12\x3f\xa7\x80\xcf\x6a\x0b\x5f\x42\x52\xc1\x33\x93\xc4\x82\xeb\x95\x9f\xb3\x0a\xf4\x8e\xf3\x5a\x8d\x61\x87\x88\xbe\x97\x5e\xb7\x43\xb5\xa1\xb9\x83\x8f\x94\xc7\xe4\xce\xf8\xc3\x5a\x3b\x40\x0f\xfd\xb2\x92\xb0\xa8\x4d\x39\x55\x56\xf7\xbc\x4e\x8b\x79\x9b\x1f\x46\x38\xdf\xae\x79\xaf\x86\x02\x2b\x22\xe4\x43\x30\xbf\x07\x7d\x77\xf8\x37\x58\x01\x2e\xd4\x5a\x7b\xe0\x56\x07\x87\xec\xae\x6a\x80\x77\x15\x07\xd4\x6d\x31\xef\xef\x72\x4c\xa1\xbb\xcb\x16\x40\x98\xaa\xea\xbd\xa5\xa6\x18\xf9\x23\x5d\xfa\x20\x7a\xb3\x33\xf5\x51\x65\x53\x8e\x7f\x9b\xa8\x6c\x67\x28\x91\xb5\x61\xfe\x2c\xb8\x8a\x56\xaf\xd0\x4e\xb3\xd6\x69\x44\xe5\x8a\xb4\x8a\x10\xec\x8c\xcd\x00\x6e\x67\x09\x6d\xd6\x63\xed\x73\xf5\x80\xc0\x1e\xae\x2d\x7d\x47\x93\x1e\x2b\xf9\x15\x47\xad\xb7\xff\xd9\x41\xf2\x5f\xd6\xf9\x3b\xeb\xf3\x6a\x36\xf5\xa2\x81\x46\x51\xd0\x6c\xe4\x18\x51\x82\xb5\x41\x8a\x30\xfd\x53\xf5\xb0\x45\xad\x11\x23\x48\x75\xbc\xb4\xdc\x77\x8b\x8e\x04\xfc\x4f\x8f\x36\x54\x13\xbe\x51\xf7\xee\xa3\x1b\x87\x8d\x72\x86\xdd\x48\x5b\x1f\xb2\xf3\x8a\xf4\x94\x87\xcc\xb6\x46\x63\x16\x8a\x1a\x53\x35\x4b\xb1\xd2\xaa\x52\xef\xf7\xa5\x48\xf9\x1e\x3e\x7f\x36\x23\x06\x27\xdb\x97\xc2\xa7\x92\x2f\xfa\x42\x45\xf7\x69\x5c\x67\x8c\xe8\xd7\x28\x2c\x52\x07\xbd\x92\xfb\x8d\x68\xe1\x0a\x5e\xd7\x6d\x10\xdc\x8f\x37\x0e\x00\xa9\xc1\x94\x02\x78\xc1\xc4\xf4\xc6\xa0\x1e\xf6\x97\xcb\x1e\x7d\xb2\xc3\x73\x0e\x68\xe6\xf5\x6e\x61\x45\xba\xae\x66\x96\x13\x7c\x2a\x22\x13\xdc\xb8\x0b\x50\x5c\x6a\x6b\xfa\x5a\x96\x58\x3e\xee\xe9\x72\x01\x74\x84\x07\x7f\x5c\xa3\xb7\x72\x9a\x95\x0f\x2c\x14\x62\xa1\xd8\xef\x22\x63\xcd\x55\x9b\xa7\x5c\x17\x81\x62\xb4\x4f\x37\x1f\x5e\xc2\xf5\x69\xb9\xb8\xf4\xec\x65\xc4\x01\xb9\x57\x64\xd7\x8e\x71\xa2\x7b\x5c\xe3\xf4\xa2\xcd\x95\x06\x1b\xa3\x85\x65\xab\x5a\x28\x00\x87\xeb\x14\x12\x7c\x67\xe3\x85\x2b\x25\x50\x26\x93\x08\xbf\x6b\x34\xea\xfd\xa4\x2d\xc7\x28\x09\x36\xfe\xb1\xba\x57\x4d\xcd\xaa\x83\x89\x39\x05\xfb\x7d\x31\xe8\x10\x23\xbe\x54\x11\xf2\x29\x92\x6e\x90\x16\x98\x43\xeb\x7d\x6f\xc9\x54\xe2\xd4\x59\x92\x80\x40\xc4\xf0\xd8\xde\x07\x2b\xa3\xac\x12\xc1\x58\xa7\x6b\xd8\xe0\xf0\x3f\xee\x81\x34\x0f\x4d\xbe\x95\x3a\x7e\x21\xe7\xb1\x72\xe0\xac\xd7\xbb\x82\x72\x79\x4f\x31\xb4\xc7\xc4\x3a\x96\xb0\x38\xe1\xcc\x64\x84\x02\x87\xe7\x23\xf1\x35\x2c\x29\xec\xba\x31\xf1\xb2\x1d\xc3\x26\xd5\x4e\xd2\x49\xb9\xca\x96\xff\xb8\x1a\xe5\x73\xee\x02\x04\x3f\xd7\x5e\x70\xa9\x28\xdc\xaf\x4d\x55\x43\x8b\xde\xcd\x29\x7a\x61\x6d\xed\x8a\x39\xb5\xac\x4e\xf3\x59\x80\x9d\xe9\x7e\xc5\xfa\x49\x0c\xf1\x69\x0c\x26\x5e\x25\x9e\x4e\x69\xd5\x40\x45\xdd\xaa\xe7\xf8\x4a\x42\xb1\x1b\x5f\x23\x21\x42\xe9\xce\xc1\xc6\x77\xab\xd0\x6d\x40\x1f\x6d\x40\x60\xf4\x3a\x62\x5d\x8c\x52\xa6\x4e\x28\xf5\x51\xdd\xa6\xc1\xa6\x11\x32\x8b\x70\x8d\x60\x43\x63\xcb\xef\xab\x58\x48\x1d\xc4\x56\xf4\x3b\x9c\x68\xd7\xaa\xa0\x7c\x1e\x0e\x3a\x0f\x42\x50\x05\x2c\xa5\xf6\x48\xd8\x23\x62\x38\x13\xe8\xb6\x47\xf8\x1d\x82\xb2\x63\x04\xff\x23\x9d\x8c\x1a\x0f\x8c\x17\x38\xc4\x61\x6b\xdf\xee\x30\x1e\x80\x8f\x25\xd8\xe8\x4c\x07\xb6\x68\x72\x26\x8c\xb5\x2b\x0d\x9f\xfe\x16\xc4\xf3\x83\xfd\xa9\x59\xf8\xb6\x25\x0b\x97\x9c\x93\x8d\x24\xcb\x17\xbd\x47\x87\x55\xba\xa7\x25\x03\x1c\x5b\x4b\x0b\xbb\xe6\xb8\x71\x34\x9e\x14\xa5\xc1\x13\x8b\x81\x37\xdc\x23\x3d\xa5\x21\xd5\x47\x83\xee\x35\xc4\xbd\xe1\x76\x1f\xca\x93\x5d\x4f\xa5\x6c\xaf\xc1\x0c\x5f\xfc\x9b\x08\xc9\x30\x2d\xf6\xd9\x58\xea\x63\xb2\x09\xcf\x70\x02\x6d\x31\xf4\x14\x66\xb8\x75\x6b\x41\x06\x7d\x5e\x95\x53\xed\xe3\xa6\x53\x2b\xf7\x26\x61\x2f\xbf\xe8\x41\x38\xe7\x2c\xc1\xbf\xfa\x88\xe3\x81\x17\x03\x72\xf3\xb1\xe9\x33\x89\xee\x6c\x23\x16\xe3\xd0\xdf\xdd\x63\x2b\x6f\x25\x6b\xb5\xe5\x0a\xdd\x85\x4a\xeb\xe9\xf1\x78\x08\x3d\x94\xb3\x45\xfe\xec\x7b\x93\x05\x7a\x37\x16\x13\xb4\x46\x75\xd9\x44\x09\x6d\xc6\x0b\xe1\x80\x09\x8f\x58\xb5\xf6\xbf\x54\x9b\x6d\xb7\x00\x1f\x99\xda\x8a\xf1\x89\x51\x64\xd4\xb9\xf6\x30\x4c\xa6\xc2\x97\x3c\xcd\xe1\xa1\xfd\x9b\x7c\x8c\x7d\xb2\x95\x68\x3f\x89\xc1\xac\x5c\xa7\xb3\xfb\xcf\x14\x63\x14\xda\x3d\xa8\x87\xec\x10\xf1\xf5\xcb\x3d\xf8\x2e\x73\x25\xb6\x9a\x8d\x33\x17\x8a\x7e\x1d\x55\x64\xe5\xad\x36\xe3\xf1\xd2\x79\x24\x73\x62\x1c\x64\x8a\x7b\xc0\xc7\xe0\xc6\x9c\xeb\x23\xdf\x31\x31\x0b\xa1\x57\xd5\x24\xa6\x2d\x25\x15\x1e\x70\xd1\x51\x97\x8c\x78\x22\x2f\xfb\xad\x91\x72\x00\xa5\xc9\x3f\x3e\xd3\xa2\x1f\x75\xc8\x4c\x4a\x14\xee\xa3\x5f\x77\x9b\x3f\x33\x42\x61\xb5\xb3\x0e\xbb\xca\x16\x9d\x63\xc9\x06\x66\x7b\x3e\x21\x0e\x60\x79\xba\x76\xcc\xab\xd8\x28\x7b\x08\x54\xed\x93\xcf\x43\xd3\xc3\x55\xd3\x8c\x55\x07\x90\x23\x1b\x75\xc6\xca\xde\xd7\x3c\x1e\xc7\xa5\x88\x4f\x13\x8a\x7c\x03\x4b\x78\x74\x99\xd2\x42\x7c\xe7\x28\xff\x72\x62\x3c\xba\x68\x93\xed\xba\x4d\x82\x50\x22\x87\xe4\x4d\xd3\x84\xb9\x4f\x08\x6f\x40\x4f\x7d\x33\xe4\x73\x4a\x7f\x2f\x0e\xe6\x45\xe7\xe4\xb7\xfa\x5b\x7f\x72\x79\xbe\x28\x5d\xb8\xb1\x08\x12\x16\x14\x61\x91\x0a\x95\x06\x8a\x5b\x91\xf6\x6a\x7e\x9d\x62\xf3\x93\x53\xee\xb9\x4c\xde\x03\x3f\x9a\x5d\x0f\x4d\x95\x22\x5d\x5e\xdf\x8f\x55\x67\xbb\x5a\x08\xa9\x37\x76\xbf\x07\xc1\xfc\x72\x40\xe8\x6d\xe5\x65\x9d\xd4\xf2\x2d\x1d\x9f\x41\x4a\x56\x40\x09\xa8\x20\x3a\x43\x15\xca\x8a\x7c\xb5\xc2\xab\x9e\x39\xbc\x93\x07\xf5\xce\xca\x99\x62\x4c\x01\xd0\x76\x26\x43\x2c\x71\x66\x01\x88\xc5\xab\xcf\x56\x2a\x3a\xa7\x96\x46\x3a\x7e\x9a\xd5\x95\xdb\x21\x2c\x55\xe0\x1c\xa6\x24\x0f\x62\xb3\xbc\x59\xca\x35\xa3\x48\x5f\xd6\xcb\x38\x24\x01\x0c\xb6\x6b\x42\x54\xaa\x8b\xf9\xa3\x75\xe0\x07\xf2\x07\x3a\x0b\x96\xe1\x8d\x41\x80\xf3\xa2\x29\x7d\x34\x8d\x40\x59\x83\x2d\x04\xa1\xe6\xb0\xba\xfc\xc9\xfb\x10\xc8\x7f\x16\x2d\x16\xe8\xc0\x6b\xd8\x4e\xf2\x80\x4a\x7b\x54\x5a\x8e\xc2\x23\x53\x4f\xb4\xa3\xc2\xba\x65\xcd\x47\x08\x02\x22\x23\x48\x95\xd4\x55\x4d\xcb\xc7\x40\xbe\x6e\xb6\x2c\xb6\x0c\xe8\x45\x3c\x46\x7b\x21\x78\x03\xae\xd0\x7c\xd4\x96\x77\x8e\xf0\xad\x3e\xda\x67\xdd\xad\xc7\x7d\xa6\x12\x31\x15\x83\x1e\x80\x02\x48\x10\x04\x9c\xae\x89\xf3\xd2\x47\x06\x30\x50\x07\x64\xd9\xc3\xab\x4c\x2e\x65\x0a\x19\x99\x43\x52\xf3\x6c\x9a\x67\xd7\x83\x3c\xdf\x1e\x89\x7a\x2e\xf7\xbe\x37\x6f\xeb\xd3\x87\x72\x23\x35\x7d\x62\x08\xa4\x8f\x61\xae\x33\xcc\x7e\xf9\xcd\x9e\x29\x75\xc0\xf8\x06\xeb\x75\xf9\x82\x44\xf5\x32\xdf\x17\x1d\xbf\x3b\xeb\xb8\x8e\x6a\x55\xe6\x17\x40\xdd\xdc\x1b\xbd\x14\x75\x40\x6f\x4f\x01\x81\x18\x4b\x30\x73\xdf\x8c\xfe\x33\x31\x73\x90\x00\x58\xb6\xb5\xcc\xa5\x23\x59\x0e\xd2\x2c\xd8\xed\xbd\x0e\x8c\x88\x86\xd0\xad\xaf\x7e\x30\xd0\x82\xcd\x4f\xfd\xc4\x1b\xcb\x43\xcf\x28\x5a\x02\xdf\x19\x61\x00\x2b\xd3\xe6\xa5\x2e\x51\xf4\xf2\x72\x4c\xaf\x28\x70\x78\xf3\x71\xb9\x54\x08\x05\xb9\x8f\x7a\x73\xc4\x94\x39\xcd\xc1\x61\x2e\x72\x1d\x1d\x6c\x46\x03\x29\x36\x48\x5d\x3c\xfb\x7f\xee\x70\x9d\x23\x5c\xb4\x09\x66\x1a\x3a\xe1\x18\xbd\x5b\x06\x1d\x96\x89\x2e\xfe\x72\xb8\x88\x80\xd9\x7f\xe8\x16\x97\x01\xdb\x82\xa0\x3b\x1b\x59\x56\x0e\x45\x08\xe0\xd8\x13\x07\x79\xf9\xa0\xc2\xa6\xa1\x74\x40\x2c\x78\xbb\xc1\xc9\x80\xab\xe5\x27\xeb\x56\xc3\x27\xfa\x5e\xe9\xbc\x6d\x4c\x29\x7b\x8b\xe5\x45\xa3\xe9\xb0\x67\x07\x18\x2e\x85\xbe\x7a\x0e\x75\xd5\xf2\xe3\x26\x44\xe6\x89\x26\x33\x67\x2c\xfa\x82\x91\xbd\xff\x6b\xde\x8b\x67\x7f\x92\x8d\x06\x34\xad\x51\xf1\x89\xa4\x7a\xb2\x46\xda\x68\x1d\x16\x80\x2d\x24\xb8\x34\x1d\xce\xd3\xc2\x0a\x3e\xc6\x59\x2c\x42\xc5\x5a\x1b\xb9\x7d\xc2\xac\xe7\xc6\x76\xe2\x45\x53\x00\xfe\x78\x32\xee\x1d\x0a\x79\xab\xd3\x3c\xe0\x30\x21\xe0\xa5\x32\xd5\x84\x26\x34\xfd\x0c\xc7\x48\xe2\x51\x09\xff\x86\xb3\x7f\x0c\xe4\xef\x66\xd5\x0b\x9f\xc6\x3a\x73\xb7\xb1\x02\xdb\xb6\xe6\x6c\xc3\x08\x18\x38\x1d\xad\xa6\x90\x2f\xa8\xe6\xf2\x47\x12\x78\xc2\xb4\x94\x7d\x39\x55\xbb\x2e\xa5\x36\x90\x9c\x81\x8e\x46\x89\x36\xc7\x91\x0b\x06\xb2\x64\x9f\xbe\x2d\xa7\xc5\xab\x48\x61\x45\x8c\x5e\x89\x94\x19\x80\x42\x82\x07\x2c\x4f\xe7\xf0\xda\x2f\x0b\x69\x74\xef\x3e\xe9\x37\xc8\xcf\x51\x8f\x64\xcb\xbf\x96\x8d\xca\x6c\x66\x5b\x38\x2a\x60\x6a\x98\x5d\x65\xb7\x00\x01\xaf\x08\xce\xe5\x57\x93\xf2\xa6\x53\x79\xb5\x28\x0c\x8c\xc3\xfc\x74\xb7\x25\xe3\x1c\xde\x57\x98\x27\x4b\x0e\x26\xa4\x5d\x60\x5b\x7c\xd9\x61\x82\x5e\x35\xab\x23\x89\x53\xd8\xad\x1a\xc1\xc8\xd3\x97\x39\xbe\x67\x52\x9e\x46\x29\xf2\xe1\x40\xb6\x78\xcb\x18\x20\xb6\xd5\x73\xf3\x92\x10\xa5\x4a\x79\xf3\x57\x8f\x03\xb7\x1a\x90\x21\x93\xd3\x89\x8b\x0c\x65\xea\x09\xbe\x89\x1a\x54\xe7\x6d\x2d\xad\x16\xf3\xe8\x97\xbb\xcf\x52\x88\x5d\xf1\x63\x88\x35\xeb\x95\xa5\xca\x56\xcd\xe7\x11\x35\xd1\x7c\xb6\x83\xca\xd0\xd3\xbb\x77\xa6\x06\xab\x5b\x98\x18\x39\x08\x89\x35\xc3\x04\xe5\x46\x14\xdf\xe1\xa3\xf5\xe8\xc3\xc5\x3c\xa8\x71\x17\x74\xaf\xde\x4f\x8f\xaa\x18\x33\x85\x8f\x28\xef\x8d\x44\x8d\xc5\x58\xeb\x6d\xab\xb4\xb1\x62\xfb\xa7\x1c\xc8\x3e\x9c\x73\xf1\x20\x3e\x5b\x0d\xc0\x38\x8b\xbc\x99\x66\x8d\x47\x9e\x06\x52\x3f\x93\xb8\xe8\x3e\xb8\xab\x60\xc1\xcf\xb9\x31\x2c\x7c\x35\x3c\x7f\xce\xc9\x73\xf1\x1f\xb9\x90\x61\x2e\x16\x86\x0d\x3f\x08\x06\x77\x09\x9f\xfe\x44\x3f\x26\x20\x20\x37\xfb\x8e\x86\xb0\xc4\xf2\x8d\x5e\x1a\x69\xcb\x5c\x19\xdc\xf7\xb0\x9a\xfe\xc2\xd6\x52\x94\x7b\x5e\x0f\x64\xe5\x21\x67\x3a\xcc\xf3\x2c\x0d\x9b\xfb\xe1\x7a\xb5\xec\x00\x16\xa5\xd4\x98\xe0\x2f\x3e\x3a\x22\xb6\x59\xa6\xdc\x58\x9c\x72\xc6\xc6\x02\xf3\x12\x53\x2c\xa1\x7c\x3f\xdc\x47\x41\x58\x57\x21\x4b\x15\xd0\x3b\xe0\xfd\x3a\x99\x43\xba\x46\xad\xd7\xa3\x9a\xf3\xf6\xcb\xcb\x1a\x06\x9a\xe0\x05\xbf\x25\x48\x26\xd8\xa9\xa6\x0c\x77\x25\xef\x13\xcb\xa5\xdf\x4d\x64\xc1\x75\x79\xd1\x5a\xca\xbd\x20\x5a\x97\xe3\x37\x2a\x6b\xe1\x11\xa3\x91\xa2\x47\x5c\x39\x75\xe3\x3c\x56\xa7\xa6\x0e\xe2\x83\x6e\x85\x74\x63\xaa\xe8\xc4\x35\x57\xd4\xe2\x37\x03\x9c\x4a\x39\x5c\x60\xb0\xe8\x21\x17\xea\x5e\x4f\x77\x50\xde\xba\x6d\xfb\xea\x55\xa5\x32\x3a\xfb\xa4\x45\xdb\xb6\xd2\x08\xed\x05\x09\xa1\x4d\x41\xd2\xdb\xe9\x6a\xff\x0a\xd1\x6d\xf9\xb0\x4e\x12\xa0\x7b\x23\x33\xba\x04\x02\x6d\xaf\x02\x75\x72\xdf\xa0\x4d\x83\xa5\xb5\xda\xa4\xc9\x1a\x91\x3d\x66\x68\x8f\xf1\xba\x74\xab\xe2\x82\xc8\xc3\x46\xaa\x07\xe8\xd4\x4a\x20\xa3\x97\x8a\x5d\x93\x02\x77\x80\xfe\x27\xd3\x15\x09\xfe\xa7\xf3\x7b\xdc\xc1\xa3\x34\x98\xed\x12\x1e\x0b\xd7\x3a\xd4\x91\xac\x94\x13\xfc\x62\xc7\x90\x5e\x49\x10\xe5\xaf\xf0\x11\x72\xf2\xf5\xba\x38\x72\x83\x70\x91\x90\xb6\x7f\x88\x3a\xa4\x90\xa1\x8d\xef\x81\xff\xeb\xe0\x3b\x5e\xd6\xbf\x27\x31\x0d\xbf\xdf\x66\x40\x95\xc5\xe3\xdb\xae\x93\x8b\xa4\xca\xe1\xbe\x42\x47\x66\x03\x8a\x5e\x69\xc7\xf9\x37\x0b\x20\xc2\xfa\xb4\xad\x88\xc4\xd1\x03\xa1\xe3\x32\xef\x3e\xb2\x50\x7f\x07\xbc\x20\x1f\x5a\x2d\x2e\x82\xc3\x4d\xa0\x7a\xf8\x90\xf0\xd5\x88\x24\x4e\xbf\x03\xbb\xee\x13\xf8\xe0\x4c\xd2\x82\xa7\xf5\x1f\xd8\x3f\x30\x8f\x12\x10\xfc\x6d\xb5\xf8\x70\x48\x70\xb9\x2c\x13\xee\xfd\x6c\x70\x05\xb5\x95\xf6\x82\x23\xf9\x55\x3d\x6b\x03\xb2\x58\x62\xac\x5e\x03\xdc\xca\xd0\x41\x43\xbe\x2f\x93\x85\xa5\xdc\x1e\x72\x1b\xd8\xe7\x91\xe9\x1f\x55\x18\x55\xdc\x96\xb3\xbc\xab\xbd\xde\xa5\xb3\xbd\xed\x3c\x7f\xf9\xdc\x26\xad\x66\xb9\xe5\x95\x22\x40\xc4\x77\x40\xde\x46\x1d\xdf\x9c\x81\xfa\xf4\x36\x68\x0e\x22\x44\xe1\x7d\x84\xdc\xc9\xd8\x47\xd3\xca\xf3\x5b\x72\x8d\x39\x5c\x11\xb6\xdc\xc5\x81\x1b\xc2\xd6\x5b\xff\x45\xcf\xc5\xa9\xb9\x6d\xfd\xd7\x5d\x6c\xe8\xe8\x0e\x87\xbf\x29\x2c\xc9\x52\x1c\xe9\xa5\xe2\x48\xf6\xf9\x03\xbc\xb1\x3b\x85\xae\xf4\x80\x51\x4f\x67\xe3\x58\x6a\x48\x15\x8b\x44\xce\x7c\x89\x55\xfe\x86\xa1\xa2\xb2\xfb\x14\xff\x0b\x73\x1c\xaf\x5f\xf9\x59\xe1\xf3\x25\xfe\x86\xf4\x73\x73\x55\x06\x6b\x28\xe6\x73\xf6\x18\xb3\x46\x39\xb1\x78\x3a\xd9\xbb\x1e\x2f\x01\xf6\xea\x27\xad\x68\xcb\x4d\x6a\xcb\xac\xd6\xae\x5b\xc3\xd9\xa6\x7c\xc7\x6e\xca\xae\xe3\xc3\xa9\x40\x0c\x51\x57\xbe\xf3\xb2\x80\xa6\xbe\xdd\x9f\xca\xa5\x11\x0a\x12\x65\xbd\x8a\xaa\x9b\x37\x2b\x00\xe0\xf0\x07\xd2\x44\x7d\x04\x60\x2e\x33\x40\x8e\xd2\xc3\xaf\x79\xc9\x72\x86\x70\xf3\x92\xeb\x5f\x57\xc9\x4f\x2a\xc5\x23\xc1\x5a\xca\x4b\x34\x52\xba\x3c\xf7\x68\x44\xce\x00\x6c\xbb\x7f\x3d\x85\xb3\xcc\x0c\x72\x37\x1b\x17\xb1\x53\x29\x90\x82\x32\x70\xe9\x93\xd9\xf7\xb7\x40\xf9\x9b\x72\x35\x43\xcc\x39\x20\xff\x3a\xf9\x7d\x07\x3e\x42\x97\x35\x55\x53\xcf\xa3\xf2\xef\xf9\xcf\x98\x3c\x73\xc1\xf4\xb0\x43\xef\x46\x65\xfa\xd2\x91\x41\xc4\xb5\x95\xde\xa8\x1c\xc5\xdf\xfe\xc9\x53\x94\xe3\xa5\x06\x99\xb9\x64\x00\xd1\xaa\xbc\xd3\x90\xd6\xdc\x52\x38\xea\xdc\xd3\x71\x5e\x30\xce\xee\x9c\x1e\x17\x5b\x60\x47\x72\x1b\x3f\x0a\x79\x8f\x48\x03\xa4\xc9\xc9\xb6\xc0\x5c\xb5\xb9\x86\x72\xdd\xf3\x3c\x18\xce\x05\xb8\x1e\x7e\x32\x43\xc9\x73\xa2\x1d\x8e\x1d\xca\xbd\x3c\x60\xe4\xc0\x3a\xbe\x7c\x62\x84\xc5\xc0\x5c\x72\x37\x6f\x6b\x39\x9f\x4f\xbe\x0e\xb5\x3c\x59\x9c\xb3\x76\xbf\xa0\x31\x65\xbb\x50\xc1\x37\xd8\x5b\xa2\x74\x25\xb7\xb5\x1d\x1b\x1e\x25\xe1\x46\xee\x45\x01\xee\xd3\x0a\x67\x25\x79\x70\x4f\xef\x6e\x06\xe1\x7d\xfc\x6b\x6d\x2a\x26\xfe\x77\x97\xf8\x7d\x67\xf6\x2d\xab\xf9\xb1\x5a\x2e\x67\x68\x7e\x0a\x26\xa3\x57\xec\x4b\xd4\xd6\x26\xc1\x98\x7e\x0e\x15\x17\x16\xf7\x37\x53\xbd\x0c\x8c\x31\xd9\x84\xd3\x56\x65\x10\x12\xb7\xf2\xd4\xd6\x40\xc4\x2e\xb7\x21\x44\x88\x64\x02\xda\x19\x5e\xe7\xa6\xaf\xeb\xa5\xff\xb8\x82\xb3\xc7\x2c\x57\xfd\x23\x9b\x18\x36\x7c\xba\xad\xfa\x59\x3a\x1d\x93\xc0\xbf\x11\x35\xbb\xec\x2b\xe9\x6c\xff\x1b\x45\xba\x74\x06\xea\x14\xb7\x87\x77\x4a\x55\x13\x5e\xf6\xf0\x94\x45\x0d\xbf\x56\x78\x4d\x8f\xdc\x4f\xa9\x03\xd0\x86\xee\x7b\x4f\x9e\x10\xc0\xb2\x5d\xb7\xe5\xd7\x4e\xff\x92\xad\x87\x7a\x57\xfb\x4e\x7b\x5b\x5c\x58\x0b\x46\xd6\x37\x0e\x83\xb7\x2b\xf7\x00\xc2\x2d\x24\x8b\xa7\x6a\x47\x2d\x13\xea\x1b\x49\x64\xfd\x5c\x45\xfe\x12\xdb\x99\x29\xa4\x63\x84\x6d\x2d\xe7\x44\xf2\xa4\xf8\x9c\xa4\x6e\x5c\x0e\xaf\x98\xe4\x03\xbb\x3a\x56\x0d\x83\xa4\x5b\x4c\x6c\xd5\x40\x80\x7b\xa0\x16\x41\xc7\x6a\x66\x4a\xa4\x42\x0f\xd5\x42\x4e\x4d\xb2\x0f\x7d\x68\xfa\x97\x4c\x85\x19\xc3\xd9\x59\x1c\x10\xc0\x0b\x4b\x7a\x6d\x82\x83\x55\x85\x83\xdd\x77\xca\xc6\x43\x9b\x98\x78\x64\x15\xfb\x24\x8b\xaa\x09\x90\x2c\x47\x51\x5e\x37\xeb\x6d\x43\x3b\xb0\x84\xfd\x4d\x63\x20\xf1\xd5\xf8\xf9\x7f\x94\xd4\x7c\xbe\xa6\x47\xef\x51\x86\x45\x6f\x28\x11\x2a\x35\xa1\x75\xbe\x97\xe3\xd6\x39\x0b\x78\xa3\xd3\x63\x13\xfb\xe6\xaa\xf9\x5e\xbc\xa8\x6f\xae\xe2\x9f\x2e\xe2\xed\x99\xa8\x5c\xe2\x5d\xf6\xf5\x8d\x15\xfd\x8e\x39\xa2\x97\xbc\xa1\x30\x84\xab\x7c\x51\x2d\xf4\x2b\xb4\x10\x8d\x9a\x42\xd2\xe9\x6c\xf6\xe3\x13\x1f\xb5\xb2\x27\x20\x34\x6a\x6e\x4c\x68\x42\x56\xe7\x18\x36\xb6\xf8\x61\x64\xb3\x55\xaa\xf1\x66\x26\x82\xf3\xa7\x40\x16\x25\x55\x0d\x57\x1f\xa0\x56\x58\xfb\xea\x52\x18\x92\x5b\x4c\x26\xb2\x7a\xf7\xfe\x71\xc9\x55\x35\xb0\x1e\x65\xa9\xb9\x03\xc8\x6a\x63\x6f\x68\xaa\x3a\x9d\xd8\x64\x96\xcd\xb6\xbb\xe7\xc9\x9e\xe1\xed\x54\x3c\x16\x2b\x6e\x13\xc0\xac\x6c\x59\xd0\xaf\x1f\x92\x4e\xc8\x36\xfa\x1e\x1f\x32\xfa\x1e\xbb\xf1\x0b\x3d\xe9\xbb\x46\xb4\x5a\x85\x68\xab\xd0\x24\xce\x99\x2f\x14\x12\x7d\xf6\xf9\x2a\xb5\x93\x90\xac\xf7\x03\xf5\xb2\xc4\xdb\xf1\xaf\x24\x37\xd7\x13\x04\x20\x8f\x87\x4d\xad\x5f\xad\x0f\x4f\x76\xf3\x4d\x19\xc4\xd8\x99\xf0\x5c\x32\x4d\x04\x58\xeb\x37\x4c\x01\x36\x0b\xf0\x6a\xd0\x34\xa0\x51\xe8\xbc\x01\xc4\x29\x37\xed\x8f\x35\xb2\xe3\x75\x12\x73\x4e\xb3\x67\x61\x27\x57\x5a\x72\x28\x4f\xd8\x46\x43\x57\x57\x3e\x6a\x7e\xb4\x4f\xba\xe8\x20\x68\x09\xb6\x90\x27\xd3\xf4\xa1\xb8\x11\xc3\x14\x29\x5d\xbe\x4c\xf1\x11\x7c\x4c\xdd\x5f\xf2\xc1\xed\xb1\xd4\x91\x03\x78\xd3\x69\x92\x97\xfb\x92\xbf\x64\x0a\x60\xea\x27\x8d\x08\x52\x04\x85\x80\x5d\x9e\x7d\x18\x5e\x95\x1d\x94\xc5\x01\xd4\xfd\x29\xc6\xb0\xb3\xcd\x18\x4b\xaa\x20\x09\x9c\x39\x7d\x80\x99\xb2\xfb\xb1\x9a\xf4\x37\x4e\xa5\x35\x9c\x2e\xb9\x88\x82\x03\xc6\x84\xd3\x18\x92\xf5\xa3\xe4\x1e\xf5\xa4\x52\x31\x9a\x04\x38\xa2\x82\xd8\x9d\x24\x84\x87\xca\x13\x1c\x58\x7e\x99\x4f\xa1\x1c\x9e\x18\xd9\x61\xf1\x63\x8e\x1e\x34\x7d\x94\x2e\xf4\xe5\xab\xdc\xba\xb8\xc8\x7f\xd2\xef\xaf\x93\x82\x57\x15\x36\x82\x30\x5b\xa4\x0a\xdf\xdc\xae\x71\x19\x92\x0b\x7d\xe8\x05\x06\x91\xe3\x3e\xa5\xaf\x84\x81\x39\x30\xd6\x0f\x16\x1d\x63\x17\x6a\xdd\xd9\x07\x2f\x74\x27\x6d\x91\x46\x2d\x8a\xf1\x88\xe4\x60\x9a\x15\x76\x87\x12\x94\xcc\xd3\x2e\x4b\x13\x46\x06\xdb\x0d\x00\xd9\x60\x29\xf7\x01\x78\x24\x55\xbe\xa0\xd0\xb2\x91\xc0\x96\xaf\x72\x87\x33\xe2\x45\x49\x1d\xdc\x2c\xb8\xd8\x21\x7c\x4a\xce\xf7\x78\xbc\xd3\x40\xef\x31\x25\xfb\x4b\x6f\x21\x21\x74\xe9\xee\x1a\xf3\xcc\xac\x81\xba\x64\x77\xb6\xe2\xb5\x7a\x9c\x1a\x0a\x89\x0a\x49\xbf\x02\x2c\xb8\x65\x68\x31\xa4\xab\x11\x66\x80\x06\x9b\x94\x0c\xa2\xad\x69\x58\x29\x7c\x05\xe6\xd6\xf7\x18\xca\x6f\xd7\x98\x60\x32\x94\xaa\x9c\xcd\xe5\xa3\x90\x72\x9b\xb3\x82\x33\xd2\x8c\x2e\x30\x5f\xbf\xce\x28\x64\xd8\x02\x1b\x5b\x2a\x89\x74\xa3\x8d\xa9\x41\x9e\x04\xf8\xf3\x4b\x93\x63\x0b\x01\x4e\x73\xce\x4d\x40\x4f\xd1\xfb\xb7\xde\xb3\xb2\x0c\x5d\x54\x31\x38\xe7\x7a\xb3\x82\x09\xd9\xe5\x32\x1a\x33\x84\x06\xd7\xc4\xb9\x46\x76\xb1\xe5\x25\x7d\x29\x1a\xf5\x98\xfc\x06\xd8\x17\xbd\x88\xf5\xfa\xa1\x52\xcf\xc0\x8c\x59\x5c\x2e\xc4\xcd\x4b\x5a\x61\x87\x83\xbe\x1c\xdc\x8f\xc8\x11\xd4\x8c\xa0\x5b\x84\xd2\x00\x28\x99\x39\x21\xb0\x84\x4b\xf3\xe1\xd8\xd6\xbc\xc9\x73\x8f\x86\xc4\x19\x60\x5c\x12\x5a\x2e\xe5\xce\xa8\x4d\xe1\x78\xa8\x03\x7e\x5c\xa2\x39\x61\x6e\x25\x55\xcf\x4f\x7e\xd7\x65\x9d\xbd\x43\x1b\x9a\xa2\xfa\x44\x97\x1a\x5e\xc2\x48\x59\x0d\xff\x52\x62\x26\xe5\x04\x80\x10\x93\x99\x32\x4b\x6c\x59\x27\x7c\x0f\xf1\xf3\xcd\x4e\xc4\x77\x69\x83\x76\xa8\x63\x4f\xb7\x2c\x53\x97\xa3\x36\x4c\x1d\x38\xbf\x7b\x5d\xf9\x51\xba\x39\xb9\x93\xf5\x41\x1b\xe2\xc6\xc6\x7f\xb4\xfd\xde\xa3\xc1\x67\xb0\xf4\x90\x86\xa4\x72\x82\x19\xc8\xb9\x8f\x26\xf8\xa2\xec\xbc\xfb\x6e\xd2\x39\x0c\x61\x51\x6d\xd3\x37\xcd\x91\x04\x51\x3b\x27\xb4\xed\x6a\xe9\xf5\x32\xd6\x8b\x89\x04\xaf\x60\xc2\x9b\x75\x54\xbf\xbc\xfd\xce\x93\x16\x2e\xe7\x46\xf6\xc1\xe9\xa6\x18\xba\x54\x75\x80\x7c\x2e\x99\xba\x87\x49\x30\x81\x6a\xc2\x41\x78\x50\x40\xae\x6c\x1b\xae\xd5\xdd\xd4\x99\x00\xb9\x1e\x54\x3d\xc1\x37\x58\x47\x71\x25\x02\xe7\x63\xf8\x18\xdd\x86\x6d\xa3\x15\x14\xd5\xed\x10\x99\x8c\x71\xb3\x54\x74\x29\xe2\xde\x02\x1c\x7f\x20\x60\xae\x93\xfc\xe4\xf1\xee\x4f\x97\x70\x74\xf5\x2b\xce\xb0\x60\x81\xc5\xcc\x5f\x02\xd0\x8e\x00\x14\xed\x6b\xb8\xbe\x66\xcb\x97\xcc\x15\xfe\x36\x55\x65\xde\x7d\xfc\x5b\x3b\x91\x7b\x78\x48\xb2\xd1\x11\x08\xb6\x4b\x07\x75\x0e\x9c\xdc\x70\x09\xe9\x6d\x8f\x9e\xde\xb1\xbd\xcd\xea\xf7\x5e\xde\x13\xe1\xba\x60\x54\x74\x8a\x3c\x5b\x8d\xd5\x72\x51\x2d\x61\x95\x3e\xe7\x70\x97\xb5\x98\x2b\xfa\x16\xaf\xaa\x31\x5f\x2e\xb0\xbb\x10\x01\x54\x49\xd1\xef\xb4\xe3\x1a\xfc\xcf\xf4\xaa\x7d\x36\x9a\xcf\x5a\xe4\xfd\x5c\x25\x74\x42\xe1\xa7\xaf\xa1\x3f\x49\xfc\x22\x77\xc6\xb0\x78\xcd\x59\x82\x50\x95\xa7\xb5\x0e\xb9\x6f\x9c\x25\x54\xc8\xe9\x1a\xb8\x24\xd0\x3a\x4d\x5e\x81\x2f\x0d\x88\x38\xb7\x2a\x98\x12\xd3\xeb\xfc\x6b\xe6\x8b\x35\xe4\xdd\x61\x38\x8f\x2f\xcb\x75\x8c\xfd\x75\xb2\x67\x18\xe5\x10\x5d\x0c\xef\x54\xc7\x8b\x52\xa3\x0a\xf9\x86\xc2\x54\x5d\x06\xc7\xe5\x25\x2c\x0c\x96\x51\x3d\xed\x59\xca\xaf\x33\x00\x44\xd1\x5a\x28\x1b\xd3\x8c\xcd\x1c\x76\x7d\xbb\x0c\xaa\xb6\x3e\x7a\x0e\xce\x31\xe1\xdb\xd7\x7b\x9c\x2b\x7f\x0a\xfb\xe5\x98\x35\xb3\x41\x02\xd1\x26\x1d\xde\x3a\x73\x12\x5f\x6c\x3a\x57\x7b\xc0\xcf\x4d\x0a\x62\xcd\x2e\x10\x1a\x52\xb7\x0a\x7f\x93\xd2\x8b\xfa\x11\xcf\xaf\xdc\xc3\xaa\xbc\x6c\x43\xd4\x79\x44\x52\x7c\xff\x51\x50\xff\x71\x48\x42\x78\x34\x29\xdf\x7d\x06\x2a\xb1\x47\x66\x50\xc5\x5d\x90\x25\x60\xc8\x55\x9b\x0e\x06\x03\xc2\x93\xab\xfc\xa1\x15\x48\x30\x27\xfb\xfd\x0d\x1c\xa5\x75\xd2\x6c\x55\xbf\x73\x00\xaf\x18\x80\x77\xe5\x01\x4b\x57\xc2\x4f\x89\xf9\xfd\xdd\x77\xc5\xf8\xfc\x2c\x60\x17\x47\xdb\xe7\x01\x9e\x7a\x39\x0c\x7a\xf8\x54\x08\x48\x37\x64\xf5\x13\x2a\x68\x41\xc8\x39\xed\x5a\x10\x46\x9b\x8f\xa1\x01\xc4\x21\x69\x56\x87\xc1\x1a\x0f\x74\xd0\x46\xa4\xf7\x72\xc8\x31\x04\x42\xba\x37\x8b\x0a\x46\xef\xe0\xd3\x4f\xe1\xcd\xe9\xf5\xcd\x22\x83\x76\xf4\x92\x53\xc8\x9f\xaf\x34\xc0\xe3\x08\x8e\xf6\x08\x8d\x93\xa5\xbf\x54\xb9\x16\x1b\x80\xf0\xf9\xd5\xe0\x03\x1d\x00\x9e\x4c\x98\xf0\xef\x9a\x06\x94\x7f\xa1\x0e\xeb\x1f\x61\x13\x2a\x7d\x64\x80\xfa\x83\xdc\xd1\xc2\x6a\xd1\x5c\xd0\x15\x82\xb3\x0f\x0f\x09\x96\x9f\xf3\x91\x34\xa6\x87\xc5\x72\xa5\xf1\x44\x4d\x63\x7d\x98\x2a\x81\x60\xc3\xfd\x6c\x53\xe2\x01\xb5\xb5\xce\x62\xcc\x0d\xd0\x19\x53\x19\x10\xca\x30\xae\xce\xa9\x3c\xd8\x40\xa1\x54\xc7\x34\x52\x39\xc0\xd7\xf9\x3e\x5d\xd4\xeb\xdf\xd5\x3c\x10\x28\xfa\xac\xdb\x70\x30\xdf\x82\xf4\x06\x70\x5c\xa8\x35\x94\x12\xd9\x67\xf8\x49\xcc\x97\xea\x7d\x56\xa7\xcd\xf9\x78\xe8\x4c\x3f\x17\x65\xe6\xcd\x96\xcb\xd7\xf7\xda\x9f\xb2\x3a\xa9\x4e\x20\x8d\xfa\xd2\x4d\x06\xc3\xc1\x63\x5c\xd0\x58\x6d\x5d\x36\x55\x06\xd0\x19\x98\x74\xe8\xcd\x68\x71\x54\xae\xce\x31\x40\xcb\x58\x87\xfe\x5e\x45\x91\x58\xc3\x04\x67\x32\x79\xa2\x61\x13\x55\xfe\xda\xd4\x3f\x01\x9f\xe6\x71\x3f\xe0\x7f\x58\x00\xe8\x59\x2d\xd4\x1f\x25\xe8\xcf\x8b\xd5\x16\xe4\xa8\xa4\x9f\xfc\xb2\x5f\x58\xc2\xbb\xfc\x02\x40\x09\xd2\xe1\x20\x79\x42\x9d\xf9\x1b\xa2\x05\x69\xac\x25\x3b\x4c\x83\x1d\x4f\xde\x59\x72\x0c\x25\x47\x6a\x12\xf9\x76\xc0\x05\xac\xfe\x5b\xde\x66\xef\xbc\xc6\xe7\x73\xd4\x81\xdf\x57\xf1\x6a\xda\x0c\xe0\x1f\x3c\xd7\xae\xaa\x2d\xbc\x01\x82\x32\xe1\x35\x54\x5a\x6a\xe7\x60\x0f\x1d\x02\x42\x00\x6e\xd1\x0d\xc8\x3e\xdc\xeb\xbd\x9b\x99\x47\xb3\x94\xd2\x0f\xd5\xae\x00\x82\x2e\x5a\x5c\xc0\x7a\x68\xfc\xbe\x95\x03\x0c\x74\x61\xf2\xec\x03\x56\xca\x75\x69\x36\xed\x90\xe5\xbb\xc1\xc9\x08\xd9\x5f\x5c\x49\x48\xfa\x1f\x2f\x50\x0e\x49\xc2\x7f\xae\x94\x29\xa2\x48\x25\x17\xda\xa5\x0a\x21\xe2\xfd\xc4\x44\xb4\xff\xc7\x23\xde\x90\xc1\xf2\xf2\x87\x18\x84\x94\x29\xd3\xb9\x9b\x75\x19\xee\xf5\x5d\xee\xa1\x84\x52\x2e\x44\xdf\xa8\xed\xf0\x1b\x33\x18\x9b\x49\xaa\xf5\x10\x80\x87\xa1\x7c\x93\x8b\x91\x80\x2f\x43\x03\x17\x8b\xac\x5f\x84\xd2\x72\xcc\x57\x65\xc8\xea\x88\x0f\xad\x07\x73\x72\xed\x5e\xb2\xe5\xbf\xc8\x8f\xd5\x37\xe8\x6b\x3f\x73\x33\x18\x29\x8c\xe2\x4e\xf5\x15\xaa\xb4\xe3\x11\xb3\x06\xcc\x3e\x27\x1f\xe7\x8b\x50\x7e\x9e\x8d\xc6\xf8\xe0\x38\x61\xe3\xab\x43\x3e\x9c\xad\xaa\x8c\x4d\x49\x0c\x4a\x3c\x9e\x7d\xbc\x1f\x81\x0c\x5b\xab\x3c\xcd\x5a\xf6\x38\xf6\x13\x97\xf6\x21\xca\xaf\x8b\x97\x0e\x32\xab\x35\x45\xf2\x1a\xd0\xec\x5b\x60\x07\xb4\x0d\x81\x60\x67\xaf\x7c\x29\xa5\xca\x16\x88\xd7\x85\x1e\x32\xea\x0a\xcc\xee\x42\x2d\xfd\x5b\x98\x82\xf7\xce\x52\x41\x6b\x56\x82\x5c\xae\x49\xf6\xf7\xce\xb8\xec\x5f\x19\xe7\xb2\x4d\xae\x6f\xff\xe8\xfb\x08\x96\x4e\x8c\x80\x9e\xf7\x83\xbe\x98\xe1\x02\x00\x3b\xf5\xa2\x41\xed\x61\xbd\x35\x55\xb5\x0e\x08\x3a\xf0\x11\x9b\x38\xa1\xf0\x47\xbe\xa4\xd2\x7f\x3b\x3b\x05\x30\xb0\xda\x83\xdd\xf6\xec\x3f\xad\xbe\xf4\x27\x15\x1f\x03\x51\xfc\x73\x2e\xcf\xf0\xe8\x22\xd3\x91\xcf\xf9\x29\x67\x86\x73\xec\x9f\x4c\x0c\x19\x56\xfa\x50\x84\x02\x88\xf9\x3e\xed\xeb\xe3\x8e\xc4\xa8\xca\x97\x91\xff\x5e\x1a\x43\xc1\xb9\xd2\xb2\xec\x50\x0f\x16\xca\xe1\x77\xee\xf5\x57\x21\xfc\xc9\x05\xe8\xb1\x66\xdf\x22\x9b\x39\xc2\xb6\x07\x6d\xec\xac\x33\x69\x40\x45\xe0\x2f\x6b\x20\xc7\x84\xad\x16\x5c\x4d\x07\xef\x10\xb9\x3d\xe4\xd9\x74\x5c\x35\x1b\xe6\xa0\x9b\x94\x08\x37\xe9\x73\x1e\xf0\xef\xde\xd8\x75\x10\x26\x16\xe5\xc4\x85\x23\x7c\xc8\x5c\xac\x7c\x77\xd1\x9d\xf8\x96\x3f\xba\x91\xb0\xbb\x0f\xd9\x6e\x63\xb3\x6e\xc6\x45\xee\xd1\x96\xa5\xf0\x52\x3b\xfc\x91\x2d\x0a\xfb\x20\xac\xa3\xf1\x2c\xe9\x31\x56\xa5\xe5\x92\x61\xcc\xbd\x90\x30\xe5\x6f\x95\x6c\x52\x70\xda\xe9\xd7\x92\x64\x33\x38\x06\x09\xfb\x5e\xd3\xe1\x7b\x2f\x60\xf1\x5d\xe3\xfe\x3b\x45\x51\x9c\x6f\x98\x2a\x84\xae\x43\xb4\x2e\xca\xc7\x3f\x54\xff\xbe\x0a\x87\x70\xeb\xa1\x44\x2a\x8f\xf9\x4a\xe9\x80\xd8\x2f\xd1\xcf\xdb\x97\xb1\xcf\xd5\x87\xf8\xb4\x08\x57\x1d\x3f\xc0\x52\x9e\x3e\xd4\x26\xd8\x6b\x85\x8f\xd9\x45\x2f\xef\xbb\xb2\x24\x00\xf5\x66\x89\x26\xaa\x14\xdf\x84\x7b\x70\x43\xa2\xb7\x80\x06\x8a\xb3\xdd\xba\xf4\xd3\xa0\x1f\x4d\xa9\xdd\x32\xdb\xe2\x5d\xff\x05\x0b\xe9\xe2\xcd\x76\x7d\xcb\x7c\x21\x88\x6c\x81\xe6\x2f\xa5\x87\xb1\xfb\x50\xa1\x56\xe6\x53\xaa\xed\x26\x74\xf4\xd8\x76\x58\x42\xcf\x6e\x79\x20\xcf\xbf\x9a\x0e\xcc\x59\xf7\xdb\x77\x02\xe4\xbc\x67\x85\xad\x79\x45\xfc\x4f\xfd\x17\xfc\x5f\xa3\x10\x65\x42\x40\xf9\xf1\xa5\xd1\xb1\x71\x45\xc5\xe9\x85\x7e\x3e\xf4\x40\x15\xda\xd2\x39\x55\xaf\x22\x81\xfe\xec\xe2\x2c\x97\xa1\x0f\x8c\xb9\xb1\x38\x15\x84\x9b\xf2\x6a\xf7\x30\xbb\x69\x68\xc3\x17\x6c\xb6\x69\x7a\x48\x37\xae\x49\x31\xb2\xa2\x54\x06\x64\xe4\x02\xef\xb0\x23\x0d\x51\x6f\xc1\x60\x43\x2e\x3d\xf6\x34\x46\xe5\x26\x3e\x1b\x02\x7a\x0c\x1d\xae\xbb\xa3\xf9\xcd\xbf\x43\x3f\x06\x77\x17\xdd\x80\x87\x89\x66\xe8\x59\xe8\xc1\x71\x63\x73\xfd\x66\x62\xf3\x9e\xcc\x66\xce\x13\x3d\xe5\x5b\xe1\x99\x92\xbc\xb6\xdc\x85\xa7\x11\xf5\x3f\x9c\x39\xbd\x9d\x46\x14\x5c\xfd\x0b\x0d\xd5\x05\x55\x2c\xa6\x06\x99\xcb\x2f\x8d\x2b\x7f\xa1\x06\x86\xcd\xfd\x5b\x76\x32\xb3\x20\x6e\xfe\x4f\x85\x09\xbd\x09\x33\xa1\xde\xd4\x87\x8b\xa5\x66\x44\xe3\x64\xf6\xe6\x7b\xb0\xd1\x52\x0a\xc0\x46\xb2\x8c\xcc\x9b\x96\x96\x5e\x72\x46\xf9\x1a\x37\xbf\x5b\x60\x22\xd1\xa8\xc1\xb2\x77\x6a\x00\xab\xd9\xa9\xf6\x30\x28\x65\xae\xd5\xf8\x1a\xbb\xab\x83\x61\x42\xa8\x9f\x37\xf1\x8e\x22\xcb\xf1\x96\x40\x33\x62\xe9\xd3\x46\x38\x32\xbc\x76\x7b\x23\x04\xa8\x6d\x11\x10\x30\xb4\xb2\x6a\x07\x25\x47\x95\xaa\x8e\x9b\x40\x8e\x8c\x53\xb0\xb4\xd5\xe1\x80\xc2\x94\xa9\x73\xa0\x6d\x49\x9f\xaa\x9a\x39\x1d\xf6\x72\x72\x56\xc3\xd4\x61\xcd\xc6\x62\xef\xab\x13\x2a\xa8\x4e\xd9\x13\x6a\x0c\xbf\x97\x69\x54\x2b\xa2\x53\xdb\x84\xf9\x91\x33\x0f\xdb\x12\x80\x78\x2f\x63\xa5\x8d\xf5\x18\x7d\xf7\x85\x50\x44\x13\x47\x7d\x89\x44\x54\x10\xf5\x3c\xea\xa8\x03\x9c\x79\xe1\x30\xf6\x50\x26\xde\xfa\x9f\x4b\x77\xda\xa6\xac\x41\xe0\x96\xbc\x5b\x9d\x93\x9c\x5b\xa5\xec\x41\xe5\x5c\x2d\x03\x30\x21\x85\x8d\x2c\x75\xe9\x2c\xa0\x04\xa6\x90\x29\xf8\xd7\xf9\x4b\x6d\x87\xd9\xce\xf6\x25\x54\x08\xdb\xdc\x20\x2d\x44\x6f\x2b\xe8\xdb\x5a\xf2\x80\x8d\xa7\xb4\xf2\x87\x02\x6b\x2f\x08\x0e\x36\x4d\x59\xcb\x47\xa2\xc4\xc8\x02\xc2\xab\xbf\xae\x2a\x5a\x35\x1a\x4a\x89\xf5\xd3\xdc\x85\xcb\xd3\x6e\x4d\x9d\x3b\x20\xad\x99\x3d\xab\x69\x60\x79\x7b\xd9\xbe\x0e\xfe\xa0\x01\x79\xcb\x0d\x87\x37\x56\x02\x5d\x6a\x72\xcf\x56\x86\xeb\xb6\x12\x76\x5f\x79\x17\x4d\xe1\x75\x4d\x24\x60\x88\x27\x76\x9f\xa6\x24\xe3\x62\x6e\x9f\xec\xba\x7b\x64\x04\x16\xfd\x38\x37\xe3\x20\xfb\xc9\xba\xda\x27\xff\xeb\x3b\xe5\xc2\x0a\x80\xbd\xd1\x95\x7a\x5c\x98\xa5\xe9\x7c\x64\x9a\xbf\x4d\xe3\x5a\xde\x20\x16\xf0\x45\x6c\xe1\x57\x75\xc3\xe0\x43\x98\xfd\x67\xba\x93\x79\x4b\x42\x4b\x0a\x57\xbd\x7f\xfa\x4c\xe3\x22\xe7\x86\x3e\xee\x6c\x2f\x0d\xee\xd4\xb1\x48\xdf\xb6\x34\x74\xfa\x3a\x7d\x87\x00\xd8\x48\x5c\xbe\xa4\x5a\xc8\xd4\x9c\xfd\x2b\xd4\xbc\x32\x9f\x1c\xe8\x47\x76\xa8\x1c\xb4\x9c\xd9\x04\xf3\xcf\x34\x03\xe4\xf1\xa9\x11\x5c\x77\x20\xa7\x40\x7d\x3d\x2e\x2c\x70\x3b\xc3\xe7\x20\x60\x54\x03\xfb\xfb\x58\xa4\x5f\x4d\x3f\x3e\x24\xcf\xde\x94\xa9\x98\xfe\x34\xe0\x00\xe0\x2d\x79\x0e\xdb\x69\xf4\xf9\x03\x1a\x02\x81\xaa\x48\x0b\x1a\xb5\x76\x2c\x5f\xba\xe7\xd8\xf9\xf1\xff\xa7\xa7\x25\x2e\x61\xee\x76\x95\x69\x0f\x9e\x6d\x78\xea\xfd\x43\x3f\xaf\xb3\x3a\xd3\xde\x6d\x32\x52\x76\x9f\xb0\x9a\xfc\x92\xe1\xc4\x10\xbe\xd9\xc4\x5d\xf0\x29\xa3\x52\xe5\x3b\xd2\xff\x89\x28\xc4\x7b\xa3\x0b\xef\xee\x3a\xb3\xe4\x30\x25\x52\x73\xb6\xc2\xbb\x7f\xbf\xbc\xbd\x68\x86\x5e\x3f\x6b\xa0\xa6\x36\xcf\x45\x9a\x80\x36\xf7\x54\xa0\xea\x9f\x6c\x83\x9c\x9d\x05\xb6\xca\x7b\xc1\xe9\x2e\x00\x86\xde\x42\x97\xff\x11\x26\xdf\x2a\x0a\x5d\x62\x91\x37\x69\x79\x2d\xd7\x97\x87\xa4\x24\x1c\x77\xfa\xeb\xf3\xea\xbe\xc8\x50\x1d\x4d\x2c\x94\x16\x41\x7c\xd6\xa4\x43\x77\x4f\x40\xb4\x57\x3a\x41\x34\xe6\xa4\x41\x09\xa6\xe2\x72\x28\x13\x75\xbd\xcd\x19\xdc\xca\x98\xa3\x10\x9e\xe1\x06\x07\x00\xe1\x8b\x6c\xf3\xff\x45\xde\xb1\x75\x53\xe5\x7a\x29\x89\x72\x7e\x81\x38\xa0\x77\x50\x1b\x38\xff\xe8\x54\xa6\x00\x67\xb5\xd9\xc4\x0a\x7c\xba\xd5\x8d\xae\x6d\xdd\x64\x39\x5b\xe3\x3b\x91\xc6\x97\x2f\x95\xc3\x8e\xf3\x7e\x67\x0e\xd6\x40\xcd\x48\x59\xca\xe6\xd6\x6d\x93\x09\x8a\x6f\xb2\x35\xb8\x9b\x29\x9c\x26\x9b\xcb\xd7\xba\x01\x89\x81\x7c\x89\xf1\x87\xe7\x77\xd3\xf3\x8c\x56\xa7\xa5\x6c\xd0\x57\x94\x10\x8d\x35\xe8\x9a\x8b\x30\x4b\xc4\x05\x13\x89\x4d\xdc\xc5\xcb\x51\x16\x19\xde\x22\x4f\x6a\x6b\x80\xcd\x49\x9c\xbc\x34\x95\x5a\x65\x4b\x68\x07\x7e\x29\x57\xf2\x27\xcd\xf7\xe5\xf0\xce\x5b\x58\x30\x42\x43\xce\x28\x04\x38\x4e\x44\x49\xe8\x0e\x89\x32\x87\x98\xae\xa4\x8f\xe1\xc2\x65\xe5\xa2\xeb\x5d\x71\x95\xaf\x71\xf9\x70\x02\x02\x94\x94\xb7\xd2\x2b\x5d\xa0\xd4\x73\xdd\xdb\x64\xed\x4a\x9e\xe4\x97\xae\xfb\x6a\xc5\x61\xba\x4f\x50\x19\xac\xfe\x2c\xbd\x72\x95\xa8\xa6\x99\x7d\x68\xa1\x01\x36\x17\x13\x92\xc3\x79\xc3\xb2\x73\xce\xeb\x4e\xca\x29\x6b\x49\x28\x56\x0a\x34\x3a\xc7\x04\xa4\x9f\x9d\x28\x08\x33\xea\xd7\x09\x86\xcf\x91\xca\x49\x8f\x10\x60\x98\xc5\x5b\x07\x35\x11\x90\x55\x50\x0b\xf4\x4a\x9a\xb8\x7c\xdf\xb9\x31\xad\xa8\xe2\x63\x97\x1d\xa3\x68\x1d\x5c\x94\x3d\x68\xfe\x22\x47\x6f\xf1\xf3\x0c\x1f\x06\xbd\x40\x0e\xd7\x22\x19\xed\x6b\xf0\x02\x10\xe3\xc3\xc7\x16\xd2\x94\xf2\x35\xf5\x36\xd9\x97\xb4\x80\x7e\xc7\x56\x6c\xc0\xa2\x2a\x7b\x19\x05\x61\x26\x2d\xd3\x27\xe9\x9e\xcd\xfe\x9f\x2d\xfa\x40\xa9\x32\xf1\x4d\x77\x39\x88\xed\x52\x08\x53\x8f\x85\x1b\x66\xb1\xb0\x00\xf7\xad\x3e\x97\x96\xe6\xbb\x82\x87\xd4\xf3\x9e\xf8\x90\x1b\x89\x20\x4b\x60\x4b\x99\x0b\x29\x63\x09\x65\x1f\x8a\x6b\x8e\xf5\xb8\xf9\x76\xdf\x85\x7a\x59\xd2\x02\x18\x43\xab\x75\xd2\x88\xe3\x66\x68\xc8\xdf\xe0\x81\x7c\xda\x20\x63\xb6\xb3\x4c\xed\xde\x9d\x79\x9a\x86\x5f\xa1\xe5\xcc\x7e\xff\x7f\x27\xf1\x47\x14\xa9\x21\x47\xf3\x5b\x73\x0b\x53\x9b\x51\xad\x61\x78\xe0\xf1\x49\x44\x00\x71\xe1\x46\x50\x09\x64\x61\x1d\x40\x92\x26\x78\x1e\x56\x6d\x9e\x42\xa9\x67\x75\xfb\x66\x1d\x34\xc1\x89\x61\xd6\x8a\x81\x87\x61\xa7\xd4\xaa\x51\x02\x03\x19\xe1\xa3\xc2\x43\xc3\x77\xa1\x2c\x4b\x79\xad\xd6\x0f\xf4\x45\x99\x52\x72\x56\x18\xb0\xd2\x3f\xa8\x37\xc3\x0e\xb8\x6e\x5b\xd3\x48\x72\x1a\x9c\xe9\x98\x93\xb5\xf4\xef\x59\x79\xc3\xd2\x97\x7f\x25\x14\x36\xe6\x93\xca\x49\x66\x8b\x5c\xec\xc5\x72\x28\x9c\xd5\x34\x3c\x24\x9a\x94\xff\x54\x3f\x08\x65\x3f\x4b\x55\x3f\x6b\xa1\xca\x32\x75\x6e\xf9\x7c\xee\x93\x45\x5d\xe6\x04\x36\xff\x96\xac\xa8\xbf\xc8\x9e\x18\xb5\x91\xa6\x76\x13\xf6\xb4\x66\xc8\xd3\xc9\x63\x15\xe3\x9c\xcd\xda\x68\x2f\x6f\xfc\x29\x9f\x55\x89\xbf\x36\x63\xc0\xca\x95\x64\xcb\x34\x06\x23\x6f\x2a\xdf\xca\x63\x91\xf7\x53\xc9\xc4\xd5\x2e\x02\xf4\x0b\xb7\x7d\xef\x80\xf6\x73\xbe\xec\xe3\xd7\x99\x0c\x92\x2b\xc3\xf3\x38\xc7\x6a\x43\xca\x24\x37\x06\x3f\x4a\xa6\xa9\xf7\x3a\x7d\x63\x42\xb6\x80\x3c\x63\xd8\x73\x69\xb3\xca\xde\xb8\x9a\xf6\x7d\xf6\x60\x12\x56\x9d\xea\x1b\xa5\xff\xd7\x9a\xff\xbc\x3e\xb4\xf9\x61\xdb\x87\xa8\xb9\xd1\x7d\x19\x0f\x74\x98\x7b\x40\x34\x2d\xe0\x1e\x10\xb0\x77\x9b\xba\x94\x40\x74\x91\xaa\x5d\x90\xc7\x9f\xc5\xaf\x75\x3f\x8d\x30\xba\x67\xf7\x99\xa8\xf4\x1f\x2d\x97\x71\x16\x2f\x94\x9a\x3e\x41\x3a\xf9\x9b\xc2\xe5\xf4\xf8\xc5\x48\x5c\xd5\xee\x7d\xfc\xa2\x8e\x56\xa1\xf9\x3b\xfd\xaf\xd5\x7a\xf9\xf4\x76\x59\xef\xff\xe6\xe5\xa2\xa4\x1c\xea\x19\xc7\x84\x07\x12\x40\x91\xed\x93\x30\xbc\xc7\xf0\x1c\x04\x58\x79\x14\x40\x0c\xc8\xf8\x0f\xa1\x32\xb2\x73\x70\x14\x99\x55\x8b\x54\x83\x07\x0b\x61\x1a\x57\xea\x1f\x40\xa4\x37\xa9\x39\x03\xea\x89\xa9\xa2\x17\x3c\xa3\x24\x41\x53\xbe\xb2\xa1\x4a\x5f\xe9\xa9\xc6\x8f\x55\xfa\xfd\xad\xd4\xd0\x77\xe8\x96\x2e\x7f\xf8\x9a\x69\x5e\x7a\x83\x92\x3a\xc8\x9b\x84\xae\xd0\x1e\xb2\x42\x67\xab\x7f\xb3\xc5\x98\x65\xb2\x3a\xc3\x65\x47\xf0\x89\x7b\xa9\xd1\x71\x7b\xf9\x0e\xfa\x89\xec\x90\xd5\xfc\x66\x3e\x97\x4b\x00\xb5\x44\xef\xf8\x41\x75\x29\xc8\xae\x98\x68\x8b\x1f\x0b\x59\xc0\x80\x61\x2a\x1c\x38\x96\xd1\xa8\xb1\xdd\x21\x30\xca\x2d\x2e\xcd\x40\x72\xb4\xb4\x4f\x99\x42\xac\x1b\x7a\x3c\x76\xcd\xda\x9a\xb2\x05\x9c\xb4\xa5\x20\x0e\x34\xc1\x3f\x4d\x46\x9c\xf6\x68\xc9\x72\x99\x95\x44\xd1\x16\xcd\x9a\x45\xc8\x35\xbe\x9c\x0d\x76\xb7\x0b\x4a\xc6\x15\x9d\x9c\x26\x48\xbe\x30\x6b\xff\x44\x62\xd2\x54\x53\xe1\x86\xff\xb2\xb8\x90\x58\xab\x23\xf3\x1a\x89\x6e\x85\xa0\x3d\x5d\x6d\x6e\x6c\x03\x85\x08\x7a\x82\x10\x42\xf8\x55\xa2\xe3\x43\x85\xa8\xb4\x94\x23\x57\x7d\x97\xe5\xc1\xf1\x1b\x1a\x3b\x38\xa7\xb1\x7c\x5c\xd2\xc9\xa9\xa3\xfe\x3c\x56\xf8\xb2\xaa\x23\xdc\x6f\xf2\x67\xfe\xc9\xa7\x6e\x2f\xed\xdc\x8f\xcc\x5a\xdf\x5b\xf4\x76\x61\x2f\xe9\x86\x0c\x53\xff\x8a\x5e\xb5\xae\xdd\xe0\xb6\xcf\x5b\x13\xc1\x92\x66\xed\xe9\xad\xc5\xaa\x9b\x41\x0d\xd6\x37\xde\x1a\x53\xab\x72\xd9\x55\x48\xca\xb9\x4b\x74\xc2\x51\x8a\x57\xcb\xce\x05\x48\xe5\x6a\xc9\xc8\x4f\x49\x4b\xd9\x8a\x9d\x4b\xc3\x0f\x4c\x3d\x9b\xc7\x64\xd9\x6c\xb0\x06\x4b\xdc\x8d\xc2\x55\x93\x28\xd9\xdc\x64\xc0\xfe\xf8\xe0\xb8\x1e\xd9\x70\xd5\xfe\x82\xae\x42\xfe\xcf\x12\x4b\xd0\xca\xdf\x25\x06\x09\xb3\x06\x66\x50\x1f\x10\xd7\x85\xf3\x78\x57\xb2\x54\x59\x2a\x35\x3a\xb7\xeb\x40\x0e\x43\x31\x27\x15\x67\x73\x4c\xbd\x72\x95\xe3\x8c\xf6\x4d\x75\xfe\x87\x3a\x93\x52\x27\xb9\x7b\x7b\x6d\xd2\x2b\xf9\x70\xf5\x99\xe5\xad\x28\x7b\x59\x37\xb5\xe9\xe1\xce\xfb\x31\x52\xd6\xfc\x8d\x7d\xa9\xd6\x3a\xf3\x65\x17\xfe\x3c\xdd\x11\x92\x97\xcb\x18\xa9\xd3\xfb\xf2\xea\xf5\x44\x53\xe2\x15\x58\xb4\x84\xaa\xc8\x09\xa8\x64\xe0\x95\x35\xb4\x9d\xb3\x20\x79\xdb\x05\xc0\x83\xf3\xc3\x71\x50\x5d\x6f\xcc\x6e\xf0\xe0\xc2\xb6\xb8\xeb\x6a\x74\xb4\xed\xbf\xb5\x74\x56\x75\x81\x4e\x74\xb7\x5c\xc9\x42\x54\x22\x85\xb2\xba\x52\x8f\x2b\xc1\x12\xb3\x7d\xff\xa5\x4f\xde\x77\xe3\x4d\x28\xdb\x47\xbd\xe6\x37\xab\x36\x20\x8d\x8a\x47\xba\x9a\x40\x6e\xa8\x2d\xfa\xc3\x9d\xf8\xad\x6a\xfe\x96\x5c\xcb\xce\x49\x04\xc6\x58\x28\x6b\xfd\x00\xb0\x22\x9f\xb3\x73\x9a\x43\xfd\x72\x25\x3e\x4a\x58\xdd\x2a\x68\xbe\x49\xa4\x84\xe8\xdc\xfd\x6a\x42\xcd\xdc\x14\xe2\xeb\x54\x95\xa1\x95\x7f\xdc\xb9\xf5\xce\xe0\xc8\xdf\x57\xcb\x09\xb4\x78\x4d\xb6\xd7\x48\x70\x56\x78\xf5\x95\xc2\x8c\xa9\x9d\x93\x31\x83\x6c\x6c\x60\x14\x90\x56\x40\x6c\x02\x6c\x13\x21\x98\x6a\x82\x71\x3a\x33\x82\xf7\x81\xb5\xd9\xd8\xab\xa9\xf9\x2f\x72\xb7\x4d\xa8\x0a\xd0\x78\x76\xa5\x6b\x18\xff\x1b\xa5\xfb\x41\x92\x43\x4a\x0f\xc7\x6d\x18\x25\xbf\xe9\xed\xb1\xe8\xee\x6c\x4d\x8d\x1c\xc4\xb0\x85\x1d\xfc\xe8\x63\xe9\x8e\xf2\xaf\x73\x17\x53\x4a\x77\x1e\xf2\xd2\x15\x24\xc7\x98\x5e\x78\x6c\x3a\xf7\x72\x52\x77\x23\xf8\x66\x1b\x6d\x77\x14\xa2\x5f\x90\x2d\x2c\x79\xaf\xf3\x37\xe7\x6c\x7e\x95\xc4\xfe\x9a\x99\xc4\x69\x2b\x25\x6b\x95\xaa\x7a\xa8\xcc\xa0\x0e\x8b\xb5\x10\x99\x86\x95\xbc\x46\x30\x60\x11\x31\x27\xc3\x87\x45\x01\xe6\x62\x0a\xdd\x8e\x66\x24\xbc\x79\x4a\x6f\x35\xa5\x9e\xa7\x9c\xd6\x9e\x4b\xa0\x2d\x9e\x0c\x15\x3a\x81\x9f\x90\xf8\x71\x31\xfe\x73\x1a\x6f\xf6\x73\x76\x8f\x38\xc0\x21\xed\x71\xda\xae\x4e\x7a\x4d\x3f\x8a\xc5\x3f\xa0\x13\xfb\x71\xa6\x7a\x11\x7f\x30\xe3\x9b\xbe\x8c\x9a\x39\x24\xdf\x2b\x21\x58\x9a\x20\x95\x10\xcc\x56\xe9\xd5\x24\xe3\x36\x32\x54\x58\xa1\x1b\x5c\xb3\x25\xbe\xa2\x0b\x00\x54\x95\x19\xb7\x2c\x84\xc2\xf8\x8f\x0b\x3b\xa0\x41\x2c\xcf\x95\x7a\x3a\x0d\x44\xd7\x5a\x5d\x31\x78\x44\x01\x4b\x6c\xfe\x11\x3f\x9a\x20\xbd\x0f\x39\x64\x39\x4b\x51\x13\x7e\xd0\xfd\xb8\x55\x27\xa9\x0d\x5a\xc8\x6a\xae\x7d\x42\x77\x6f\xa0\x48\x1b\x64\x7a\xd9\x4a\x3b\xf9\xe2\x02\x15\x61\xb6\xa8\xac\x12\x4a\x00\xc2\x1d\x5d\x6e\xe1\x68\x9f\xd5\xbe\x4c\x89\x8d\x04\x14\xa3\x1b\x08\x5f\x2a\xb7\xcb\xa6\x7b\xa9\x14\x02\x62\x7c\x3e\xc6\xf8\x38\x98\xd4\x70\x41\x75\x6b\x26\xa3\xc0\xea\x3c\x2b\x04\xf9\x11\xcf\xc4\x18\xbd\xef\xab\x37\x15\x04\x7a\x77\xad\x36\x33\x9b\xaa\x5a\xcf\x7d\xac\x8e\xa8\xa3\x2f\x19\x0c\x0e\xa3\x5f\xa4\xb5\xf2\x59\x2b\x76\xf1\x29\x6c\x9e\xf8\xe1\xd1\x6a\xaa\x77\x77\x11\xfb\xbf\xeb\x65\x63\x07\x68\x53\xaa\xe8\x2e\xc9\x6f\x63\x71\xb2\xb7\x53\xbe\x5c\x9f\x52\x06\x2f\xf3\xd5\x89\x28\x8c\x4a\xb9\x1f\xef\x77\x4e\xbf\xc0\x6a\xe4\x4f\xd3\x66\xc5\x93\xd3\x64\xe9\xd3\xee\x07\x55\xcb\x52\x42\x25\x40\xe2\x6f\x75\xf4\xe8\x7f\x87\xf8\x66\xcf\x1a\x35\x8e\xd4\x73\x23\xd4\x95\x8c\xd7\x60\xe7\x3b\xa9\x97\xee\x26\x76\x63\x29\x8d\xf2\x1e\x86\x31\x07\x90\x94\x76\x26\xdc\xd9\x09\xe4\xd0\x53\xf9\x6f\xe4\xbd\x25\x05\xe6\x54\x12\x0a\x5d\x4c\xfc\x3e\x53\x54\xe4\x6d\xed\xc7\x95\x70\x27\x24\x1e\x84\xa5\x57\xe5\x66\x2d\x04\x93\x3f\x5a\xb4\x4f\x49\xb1\x50\xe3\x3f\x32\xd7\x60\x93\x2a\xdd\xdd\xba\x30\xe8\x5a\x3f\xb5\xcb\xb7\x9f\x97\xd5\xeb\x17\xd9\x3f\xc6\x98\x9e\x48\x56\xe4\x74\xcc\x2b\x25\x76\x6b\xa7\x00\xa5\xc2\x7d\x12\x6c\xa3\xc4\xff\xb2\xfd\xb6\x18\x33\xb6\x84\xdc\xc6\x6a\xd4\x9b\x1c\xa0\x76\x92\x6c\x21\xb0\x9a\x07\xda\xa2\x09\xdd\x3b\x78\xb5\xb5\x47\xd9\x45\xf7\x70\xea\x4c\xd9\x4a\x47\x15\x2e\x6f\xd3\x2a\xcc\x70\x10\xb8\xa6\x0a\x8d\x82\x3c\xb3\x0a\x37\xd0\xb5\x4c\xc7\xb2\x3a\xf3\x61\x32\x79\x5f\x65\x58\xf2\x2e\x43\x25\xd8\xde\x0c\x5a\xcc\x4d\x74\x09\x01\x7f\x31\x18\xa0\xe4\xd7\x6f\x75\x71\x9a\x28\xbf\xcb\xe1\xba\xde\x9c\x6d\x07\x6e\x7b\x4a\xbf\x91\x6b\x97\xb3\xa7\x4b\xee\x6c\x8e\xff\x34\x44\x60\xa2\xa3\x34\x7e\x4c\xbb\xfb\x95\x3b\x84\xc9\x6f\xec\x4c\x96\xff\xe7\xdf\xd8\x04\x30\xb1\xe4\x02\x5b\xfa\xb9\x99\x18\x96\xed\x64\x5a\xee\x24\x27\x4a\x82\x89\x69\x16\x0a\x07\xa3\x75\x1b\x2b\x09\x8b\x09\x7a\x1b\x04\xf7\x25\xfc\x49\x94\x7d\x4f\x6d\x26\x8e\x8b\x3f\x1b\x23\x0d\x06\xa4\xaf\x23\x38\x21\x51\x02\x0b\x9a\x31\x47\x68\x82\xa6\x2e\xd9\xe1\x2c\x04\x36\x79\x47\xbc\x58\xff\xf1\x25\x69\x17\x99\x0c\x7a\x05\xcc\xbc\x47\x18\x33\xec\xbd\x06\xa8\xc7\x30\xd8\xcc\x47\xa3\xce\x60\x91\x95\x7b\x46\x9d\x41\xc3\x90\xcf\xed\x61\xce\x6d\xbe\x59\x32\x1a\xf8\x1d\x00\xc9\x57\xf9\x8c\x4f\x1d\x10\xaf\x91\xdd\x5e\xbe\x43\x0c\x4a\x4b\x36\xb8\xfa\x39\xc9\xf6\xe9\xca\xce\xd7\xe9\x74\xf3\x4a\x86\x0a\x5e\x02\x05\x61\x23\x5f\xf5\x3f\x4b\x41\x16\xe2\xbb\x5f\xc9\x70\xc9\xde\xb3\xfb\x1c\xc7\x76\x39\xf9\x00\x54\x33\x84\x34\xcd\x2c\xd7\xea\xc9\xd4\x35\x8e\xcf\xba\xba\x8e\x5f\x4e\x21\x5d\x84\x2c\xc2\xbf\x5b\xb6\x97\xb1\xe5\xa3\xc9\xf9\xc3\x67\xcb\xea\xaf\xf6\x61\xf6\xa2\xc1\x6b\x0a\x3f\x92\xa3\xf7\x25\x74\xa4\x77\x67\x1d\x1c\x95\x05\x76\x4a\xf8\x50\x1e\x71\xbc\xaa\x01\x03\xd0\x62\x74\x4d\xcd\x3a\xc3\xce\x25\x90\x80\x4c\xff\xf0\x2d\x10\x48\xdc\xaf\xe0\xfd\x55\x73\xa1\xf7\xa8\xe7\xf5\x6b\x97\xd4\x19\x0d\x3f\x67\xb0\xf8\x08\x41\xcb\x5f\x1a\xdc\x98\x18\xe9\xcd\x3c\x8d\x20\x1f\xe7\xaa\xb8\xc9\x91\x99\xde\xd4\x50\x29\x38\xd4\xe4\xd6\x63\x39\x40\x6b\x38\x90\xf5\xdb\x0f\x41\xd3\xe7\xca\x79\x64\x8b\xd4\x7d\xd3\x61\xfc\xcd\x6b\xf9\x75\xc5\x89\xdd\xc3\xc4\x9d\x4c\x0f\xad\x8e\x71\x17\x18\x82\xe9\xf6\x27\xfd\xa1\xe5\x5c\x39\xac\x79\xbf\xc4\x0c\x61\xd5\x80\x3f\xb1\xb7\x71\xea\x9a\xb0\x2c\xdb\xe4\xcb\xbf\xd1\xb8\xb9\x0e\x96\xf3\x53\xed\x53\x22\x16\x54\x53\xac\xc1\xf9\x5f\xf2\xeb\x92\xbe\x9b\xae\x81\x32\x4b\x86\xa0\xe6\x3d\xfc\x5b\x1e\x3a\x1b\x2c\x9f\x5f\x55\x05\x16\xdc\xd8\x78\x94\x7a\xd8\xfb\x85\xa9\xef\x7d\xae\xd3\x52\xcf\x4c\xbe\x92\x8f\xb1\xf2\x50\x35\x71\x93\x43\x12\x7f\x01\xe5\x03\xab\xf8\x15\xc4\x09\xa4\xb8\x5c\x07\xe3\x3f\x34\xd1\xa4\xf2\xd3\x5e\x62\xe3\x3f\x21\x4e\x0b\x25\x80\x92\x90\x48\x91\x5c\xfa\x08\x66\xf1\x41\xa6\x9b\x55\x05\x26\x10\x37\x31\xf8\xa2\x6c\xd8\x8f\xd6\x1d\xc5\x15\x7b\x48\xd8\x3e\x9b\x5a\xaf\x06\x3f\x50\xc3\xf4\xa3\x85\xc4\xf1\x6d\x45\x11\x0d\x86\x15\xa5\x53\x4c\xaf\x2b\x04\xf0\x72\x5d\x8a\xfb\xa0\x40\x08\x91\x0e\xca\x9a\x94\x7f\x08\xe8\x63\x61\x91\x1d\xa6\x2c\xe9\x94\x87\x99\xff\x75\x81\xb9\x7c\x28\xed\x0a\x64\x7f\xf4\x7c\xc1\xae\x93\x14\x80\xcc\x52\xb5\xea\x05\xe2\xe7\xe4\x47\x28\xc0\x51\x5a\xe7\x49\x71\x08\xdd\x7c\xde\xb0\x90\x84\xe2\xad\x86\xe1\xd5\x18\x77\x3e\x9d\x02\xcc\x2c\xd7\xd0\x26\x3e\x43\xdc\x76\xf3\x41\x07\xab\x6f\xd1\x60\x4b\x0c\xfe\xe5\x8d\xa0\xd5\x7a\xde\x38\x8f\xa6\x15\x76\x6f\x49\x8c\x04\xfa\xdf\xc4\x81\xb6\x9d\x92\x09\x84\x7f\xc3\x38\xaa\x04\x8f\xa7\xd4\x07\x48\x52\x98\xa9\xe4\x3d\x53\xba\xeb\x46\x24\xe1\x70\x53\x99\xc1\x22\xe2\x6c\x47\x63\x09\x77\x4b\x90\x70\x19\x5a\x44\x06\x16\xbe\x97\x39\xbc\x38\xa2\x5d\x5f\xf2\xba\x2c\x7f\xf6\xe7\x96\xfb\x41\xdf\xc2\x73\xe3\x10\xd6\x35\x86\x3b\x01\x3f\x31\x6a\xfd\xf1\xeb\xfd\x8c\xcb\x5f\xbc\xfe\x0f\x86\x40\x38\x64\x7f\x04\xab\x29\x75\x86\xd3\x96\x9f\x74\x11\x27\x7f\x6a\x91\xcc\x66\x73\x1c\xb3\xd4\x1e\xb2\x3e\xc4\xb8\x30\xcb\xbf\x5d\x75\x45\x14\x19\x14\x34\x30\xd3\xc0\x66\x5d\xb2\x9b\xda\xbf\xcb\x8a\x57\x05\x51\xa3\xb4\x50\x9b\x05\xab\x64\x70\xf2\xa8\x93\xba\x54\x0b\xb3\x9b\xb8\xe5\x0a\xf6\xe0\x0d\x08\x2e\x78\xe8\x89\xaa\x08\x11\xf0\x0b\x3d\x58\x0e\x5c\x33\x12\x21\x3c\xeb\xa2\x22\x9d\xe6\x2b\xc6\x76\x1e\xd0\x44\x55\x20\x2b\xcb\xe8\x6a\x27\x02\xe1\x20\x67\xd0\xbf\xc2\xe2\x0a\x02\x33\x9c\xe2\x3f\xf8\xe5\x3d\x04\x3f\x94\x4b\x4b\x49\x50\x45\x6a\x74\xc7\x3a\xda\x49\x28\x07\xc6\x87\x07\x4a\xf7\x6c\x1b\xa7\xfb\x24\x27\xd8\x92\x0e\x6e\xd1\xb0\x46\x8e\x06\x85\x2c\xdc\xe9\xa4\xc2\x8b\x0f\xe3\xca\xe5\x65\x31\x17\xf7\xca\x1f\x9e\x2a\x82\xa6\xa7\x12\x54\x90\x15\x7e\xf2\x27\x80\x81\x60\x9a\x70\xea\x8e\xf1\xb7\xdb\xbe\xa7\x53\xe2\x4e\x82\xa5\xdd\xd9\xb8\x10\x84\x46\x1a\x1f\x61\x50\xe0\x73\x73\xd5\xb3\x1f\x77\xdb\xe6\x44\x4e\x4f\xfc\x44\x20\x89\xcb\x7d\x0a\x4a\x68\x59\x39\xf7\x1a\xde\x07\x2d\xa2\x95\x50\x53\xb5\x7b\xe9\x40\xe8\x8f\x01\xbe\x1f\xa1\xbe\x21\xf3\x63\xa8\x23\x44\x72\xbd\xc1\x5a\x53\x49\xf9\x18\xfa\x49\xa7\x08\x3a\xea\x1f\x99\x7c\xb8\xb9\xdd\x14\x34\x08\xc3\xa9\x91\xa6\x3b\x5d\xe7\x7f\x9d\xd4\x87\x2e\x7e\xa4\xea\x16\x58\x08\x30\x62\x0a\xce\x0f\x58\xdc\xa7\x47\xfe\x38\x0f\xc2\x1c\x17\x19\xfb\x5e\x3f\x02\x41\x0c\x5d\x30\x94\x0f\xea\xae\x0c\x3e\xa9\x5b\x3d\x86\xa6\x30\x75\xdb\x97\x00\x1d\x96\xfa\xa3\x65\xb7\x99\x00\x44\xe5\xc0\x25\x94\xb3\xad\x58\xd2\xf0\x3f\x4a\xcd\xe7\x35\x68\x93\x53\xab\xfc\xb4\x39\x75\x01\x41\x4b\xd1\x87\xa7\x52\x73\x25\xe4\xd9\x6d\x7f\xb0\x27\xd4\x45\x21\x65\xd2\x8f\xa5\x51\xd9\xf9\x5f\x67\xe9\xa7\xc6\x97\x50\x4f\x68\xc8\x90\x55\xb6\x7d\x22\x3d\x74\xc2\xcd\x06\xf8\x61\x5f\x82\xa6\xbc\x28\x33\xc8\x86\x17\x50\x83\xf1\x94\x12\xed\x0a\xc9\x81\x3d\x98\x3a\xdc\xd4\xe7\x65\xe2\x1b\xd2\x50\x7c\xf8\xe0\x3c\xf5\x6b\x19\x78\x48\x18\xf8\xf0\xfd\x95\x5c\x5d\x22\x8a\x1b\xf1\xfc\x14\x84\x5a\x2c\xde\x16\x32\x07\x4a\xbb\xbb\xb0\x16\x9b\xc9\x4f\xc1\x6a\xaa\x51\xa0\xfc\xb1\xae\xee\xd3\x20\xc1\x29\x9e\x48\xbf\x8c\x57\x6f\x22\xe8\xb0\x18\x15\x90\xdf\x63\x67\xd0\x21\x16\xf6\x15\x2b\xd9\xeb\x7b\x0c\x6c\x01\xf1\x7f\x6f\xd6\x38\x81\x3c\x14\x6a\xa5\xdc\xfa\x3e\x3b\xff\xaf\x91\xb6\x6e\x9b\xf2\x64\xbc\xe9\x17\xe7\x13\x49\x89\x30\x30\x89\x01\x44\xcc\xfc\x6f\x97\xe3\x7e\x03\x99\x40\xdf\x69\x25\x29\x05\x1b\xd9\xcd\x40\x77\x5e\xb5\x8a\x83\xe5\x13\xe5\x8c\x2a\x04\xa3\xd8\x45\x88\x55\x5f\xb9\x12\x4a\x72\x4b\xf1\xa7\x14\x56\x1e\x18\x3b\x26\x7b\xf6\x01\x78\xf7\x78\x99\x6a\x06\x03\xc6\xbf\xc0\xb8\x7a\x26\xe4\xa4\x66\x96\x29\x4e\x61\xe9\x0b\x63\x6a\x05\xe7\xff\x4e\x5e\x2b\x46\x84\x53\xdd\x80\xae\x96\x3b\x77\x4b\xee\xc3\x32\x33\x94\xa6\x4a\xe9\x64\x53\xb2\x2c\xb9\xce\x1d\xbd\x46\xfe\xc2\x4c\x71\x32\xbd\x12\xf7\x4b\xf4\xa9\xf3\xdb\x22\x8d\x65\x79\x46\x66\xdb\x83\x7c\xd6\x39\x91\x9d\x6a\x96\x2b\xeb\xd5\xbd\x27\x78\x3d\x54\x98\xa5\x9b\x68\x8d\x08\x81\x20\x87\x10\x43\xd5\x52\xb2\x64\x25\xb7\xe5\x3b\xb1\xca\x56\x02\x64\xa0\x7c\x1a\x34\x4a\xfb\x33\x9b\x2a\xb1\x39\xd7\xa2\x7e\xda\xf8\x2f\x4d\x43\xeb\xb9\xda\x60\x97\x34\x14\xc3\x2c\x2d\xc1\x50\xdf\xa2\x4d\xdd\x87\xe1\xd8\x3d\x70\x85\xa0\x15\x05\x87\x98\xda\x0c\x4e\xe0\x7a\xc8\x32\x2f\x4e\xd6\xcc\x58\xda\x7c\xaf\xcb\x97\x5c\xe3\xba\x19\xd7\x88\x3b\x4d\x1d\x78\x62\x50\x1f\x3d\x5f\x93\x85\x73\xc8\xe8\xc9\xa9\xb4\x64\x6b\xe3\x23\xab\xbd\x1b\xda\x8e\x79\x7b\xf7\x0f\x75\xef\x96\x6a\x2d\xb4\x8c\x7e\x53\x50\x5c\x2a\x7f\xe1\x31\x24\x71\x2f\xe0\x49\x77\x64\xf3\x2a\x69\x25\xee\x29\xfd\x18\xbd\xe9\x39\xf3\x93\x9d\x85\x8a\x98\xc7\xe9\x29\x2e\xe5\x2e\xaf\x05\xfa\xd3\xdc\x02\x75\xb8\xdb\xaf\x19\x3a\xd9\x65\x3f\x29\xc3\x2a\x57\x09\x12\x6e\xad\xc1\x05\xeb\xd5\x12\x1e\x63\x8a\x6f\xa3\x54\x4e\xeb\x53\xe7\xc1\xfd\x78\x4d\xe7\xf3\x53\x02\xcb\x19\x87\x0c\x1f\x2a\x67\x96\x25\x1d\xee\x3d\x40\x23\x7c\xb0\xb7\xd3\x59\x5a\x3c\x98\x7f\x12\x02\xff\x44\x55\x7f\x43\x3e\x65\xee\x10\x08\x18\xe6\xfe\x8d\x0c\xd5\x21\xc9\x8e\x7b\x31\x24\x81\xb2\x32\x9b\x91\xfc\x0d\x86\x08\x65\x97\x30\x33\xdb\x3a\xdb\xc5\xd3\x63\x93\x59\x49\x16\x77\x7a\xc8\x73\x23\x62\x65\x7e\x54\xc4\xa1\x92\x15\x96\x38\x82\x1c\x6a\x26\x5f\xce\x0d\xdd\x45\xe4\x8c\x3c\x66\x94\x08\xc7\x90\xee\x96\x76\x8a\x13\xfd\x61\x6c\xe0\xe3\x84\xf0\x44\x0a\x27\xe9\x1d\xa6\xc3\x4a\x10\xe8\x2b\x7d\x21\x0c\xb1\x28\x7f\x57\x24\x86\xf0\x1c\xa8\x7c\xe8\x92\xd9\xec\x9e\xaa\xdc\xcb\xe7\x4b\x24\x24\x66\xf0\xe0\x33\x18\x5c\x71\xaf\x4d\xcb\x92\x14\xbc\x55\x40\xdc\xe1\x6d\xe4\xae\x66\x1f\xdf\xcf\xbd\x8f\x79\xc8\xbd\x37\x2e\xef\x5e\x4a\x39\x4b\x38\xf0\x3c\xb5\x0a\x94\xf2\x9d\x73\x25\x03\x95\x2a\x50\xc0\x67\x6d\x61\xdf\x43\x24\x85\x90\x87\xec\x61\xa2\xa3\x25\xfe\xc4\x04\x0e\xb1\xcc\x7f\xa6\x98\x86\x9f\x34\xee\x87\xf8\x0b\x5b\xd8\x65\xf5\x54\x8a\xd9\x59\x6f\x1f\xbc\x57\x79\x59\xa3\x02\x64\xba\x92\xcb\x45\xab\x9f\x59\x03\xbd\x24\x3f\xd1\x33\x00\xe1\x3f\x0c\xac\xc7\x92\x10\x0d\x72\x3c\x50\x2f\x93\x2f\x03\xb3\x21\x3f\x17\xa2\x73\x7a\xec\x2e\x55\x36\xa5\x43\x7f\x4a\x96\xaf\xcc\x03\x47\xba\xd3\x0c\x34\x49\xea\x54\x7e\x5c\xce\x2f\x17\xec\xaa\x15\x62\x79\xa8\xe8\xda\x10\x9f\xb8\xbd\x02\xaa\x08\xbd\xca\xe8\xfe\x0b\xa0\x58\x5b\x92\x53\xff\xcf\xec\xfe\xff\xe5\x39\xf1\xef\x4e\x8e\x3b\xed\x3f\xb2\x51\xca\x6f\xc0\x76\x33\xd7\xd4\xb3\x38\x74\x26\x83\x7f\x59\xd6\x73\x9e\x43\x10\x22\xe0\x00\xec\x30\x9e\xfb\xed\x6a\x36\x8a\x4d\xb1\xb1\xb5\x75\x53\x57\x31\xe0\x87\xf9\xd3\x96\xd2\xe7\xf6\x3f\xe9\x44\xeb\x31\x34\x04\xa3\x37\xf8\x04\xf0\xe7\xa6\x1c\x24\xf8\x9f\x53\x9d\xb1\x41\x6f\xb2\x4e\x2e\x41\xbf\x38\xa7\xca\x01\x2d\x47\x19\x6f\x9b\xf1\xa8\x79\x77\x9d\xc1\xdf\xd8\xe1\xe3\x11\x9a\x66\xe9\x3a\x9a\x21\x1a\xfd\xbe\xd1\x78\x9a\x80\x19\x81\x58\xd1\x43\xcf\xd0\x94\x8b\x2a\x0e\x5d\xd5\x21\x2e\x39\xe7\x84\x03\xc4\xa4\xc8\xbf\x25\xce\x03\x77\x4a\xaa\x28\x53\x8a\xbc\x05\xdb\x80\xb4\x08\x86\x23\xd9\x71\x7b\x1c\xdf\x63\x4b\xd9\x4f\x8d\xb8\x17\xc2\xb4\x5f\xe8\x95\x6b\x67\x9f\x8a\xae\x22\x83\xf2\x81\xe5\xdd\x4f\xad\x8a\x16\x8c\xdf\x29\x4a\x61\x5f\xd4\x05\x96\x6c\xee\xd8\xad\xd2\x76\xe3\xd4\x63\x0e\xb9\x5c\x8c\xdc\x23\xe7\xf8\x94\x7f\x81\xdd\x9d\xe3\xec\xf7\x2e\xcc\x8b\x50\x64\x88\xcb\x40\xda\x68\xf5\x89\xfe\x92\x66\x3c\x0c\xf8\x38\x7a\xc1\x1e\x01\x01\x32\x91\xa2\x0e\x6f\xdd\x4b\x8b\x61\x7a\xb9\xf2\x71\x08\xba\xa3\x76\x97\xf2\xaa\x59\x0e\xc7\xa9\x14\xa0\x64\x27\x34\x2d\x0d\xe7\x09\x2c\x74\x13\x90\xa5\xb2\x2f\xee\x77\xfd\x4e\xf2\xa1\x54\x56\xe3\xa6\x62\x71\xdf\x77\x59\x46\x84\x70\xd5\x45\x09\x30\x60\xd1\x77\x96\x53\x37\x7d\x1f\xd2\x54\x73\x9f\xc2\xf0\x88\x14\x8a\xdd\xfe\x25\xfb\x24\x13\xbc\xa7\xe8\x17\x68\xd0\x01\x05\x59\xd5\x39\xd5\xb1\xb3\xf1\xb8\x52\x18\xa7\x36\x76\x3a\xdc\x7b\xff\x2d\xfd\x5b\xc1\xad\x44\x99\xa3\x64\xdf\x4c\x51\x5a\xf6\x23\xbc\x9c\x67\xa7\x32\x22\xc1\x26\x0c\x1f\x90\x15\x6b\xb3\xef\x69\x1e\x65\x9a\xe4\x2e\xef\x16\x0d\x5b\x68\x81\x3a\xa7\x99\x2d\xe6\xb8\x5d\x5c\x95\xad\x1e\xfc\x50\x21\x54\xee\xb9\x41\x14\xb6\x6a\x5f\xf6\xb0\x5e\xf1\x8c\x7e\x0f\x56\xf1\xd6\x97\x7c\x43\x19\x0f\xfb\xe4\x5c\x2d\xfe\x5b\x28\x67\x21\x88\x64\x39\xb9\x63\x56\x6f\x60\xcd\x66\x6f\x37\x61\x1f\x0f\x53\x3f\x2c\x58\x01\x55\xec\xba\x71\x80\x7a\x4c\x9e\x0a\xaf\x46\x63\x4a\x4c\x6b\xeb\x07\x4e\xa1\x64\x38\x51\x72\xaa\x96\x66\xa4\x6a\x7d\x49\xfe\x2a\xd4\xba\x69\x73\x22\xa2\x6c\x56\x27\x69\x35\xe6\x7b\x2d\xb1\x2e\x1a\xf1\x65\xcd\xd6\xce\x1a\x98\x79\xa0\xb6\xb4\x65\xd6\xaa\x5a\x00\x8f\xa7\x94\xed\xf4\xfa\xe2\xcd\x62\x57\xb6\xb5\x9f\x5e\x76\x38\xf7\x08\x9f\xde\xe6\x5e\xee\xec\xf9\xa5\x90\xd0\x20\xc2\xb2\xf3\xd3\x97\xde\x66\x81\x95\x17\x44\xba\xad\xdf\x45\x8a\x81\x3b\x3e\xcf\x52\xfc\x2b\xf5\x2b\x11\x31\x77\x8c\x23\x76\xca\x50\xa3\x6e\xe9\xa5\x40\x9f\xcb\xe9\xf4\x1a\x45\x38\x81\x87\xd4\xc5\xb4\xc8\xe9\xd9\xa5\x29\x74\x3b\xf9\x0c\x9b\x8e\x20\xeb\x43\x8b\x8d\x0f\x61\x8a\x08\xe4\x4b\xcf\xcf\x66\x49\xa1\x1b\x8b\x3a\x81\x37\xbb\xb2\x6a\x2e\x92\x57\x1a\xde\x02\x8e\x69\x01\x07\xe4\x7f\x9e\xd6\x8f\xe5\x90\x83\xff\x98\xb9\x2a\x87\x45\xd7\x6e\xd2\xf7\x84\xf9\xab\x5a\x75\xb8\xf4\x2d\xa9\x51\xd6\x00\x0e\x45\xfb\x68\xae\x96\xbf\x33\x2a\xc0\x51\x79\x64\x3b\x28\xc0\x4c\x9f\x8e\xa0\x26\x25\x75\x20\x12\xe1\x36\xa3\x6f\xac\x4c\x0c\xb3\xa6\xe4\x94\xef\x63\x69\xef\x97\x20\x7a\x93\x69\xbb\x4b\xb6\x64\xcc\x73\x28\x7b\xa8\xbc\x32\xbe\xe1\x67\xf9\x76\xb2\x9b\x6e\x36\xcb\xf8\x21\x3e\x92\x8b\x87\x31\x6c\xc4\x62\x55\x27\xe6\x1f\x9d\xeb\x3f\x65\x33\x08\x97\xf4\x93\x1a\x6a\x10\xea\x6b\xed\xaf\xc1\xfd\xd1\x73\x1e\x78\x5e\x8d\x37\x50\x1c\xcc\x82\x01\xa4\x73\x0e\x32\x2f\x10\xc3\xa3\xa7\xf7\x50\x0f\x0d\x64\x28\x32\x1c\x1a\x2e\x56\x9e\x6f\xdf\x84\xae\x87\x46\xf9\xbe\x53\x85\xc5\x29\x6a\x53\x93\xd3\xea\x54\x6a\x44\x68\x39\xf3\xf8\xb3\x73\x0b\xa7\xdb\xb2\x94\x49\x2f\x40\xe9\x41\x90\x05\x90\x4a\x4d\x50\xe8\xbc\xe9\x73\x1c\xfa\xf2\x7f\x37\x75\xe4\xec\xe6\x08\x2c\x6c\x68\x89\x59\xb8\x69\x00\x5d\x7b\x72\x2e\x1b\xd9\x0e\xe0\x7d\x6a\x90\x60\xd5\x24\x8d\xf4\x4b\x8a\x82\x82\x00\x6b\x73\x80\x27\x36\x48\x1e\x96\xe3\x7c\x4c\xd1\xde\xc6\x80\x3e\x29\xc4\x97\xa2\x99\x53\xd5\x07\x0a\x2a\x26\x9f\x27\xcc\x61\xa4\x1f\x72\x56\xeb\xae\xfb\x56\x5b\xec\xf0\xa4\x2f\x27\xb4\x1f\x76\x97\x4e\x90\x1f\x7b\x6e\x45\xdf\x2f\x1c\xef\x35\xf9\xd7\xa3\xe5\x3d\x3c\x75\x3b\xcf\x17\x30\xe0\xa9\x24\x19\x7d\xbf\xfe\x4a\x86\x75\x79\xdc\x53\x55\xe7\xc6\xb8\x7f\xd1\xdc\xff\xa1\x1f\xb5\x98\xcd\xea\x9b\xea\xff\xf6\x82\x18\xc1\xd1\x72\xfd\xe2\x1e\x22\x38\x4b\xb7\x6c\xd7\x91\x8c\x92\x89\xfb\x07\xbd\x42\x90\xb5\xb0\x81\x6f\xa6\xea\x54\x18\x40\x65\xb7\xa3\x15\x56\x22\x89\x80\xaf\x73\xd3\xa3\x99\x1e\x4d\x45\x05\xa3\xee\xa4\x1c\x7f\x77\x1a\x22\x80\xc2\xfc\x9e\xdc\x40\x3c\xc2\xfb\x92\xd5\x41\xa7\x11\x0e\x21\x02\xc1\x3c\xb0\x80\xaa\x56\x40\x0d\xa7\x2d\xbe\x29\x3c\xd0\xbd\x80\x78\x50\xba\x3d\x4a\x44\x2e\x74\xd9\xb9\x4c\xeb\x50\xf2\x7c\xa9\xac\x9c\x61\x46\xc6\xb7\xec\x7e\x62\xf5\x15\x7b\x8d\x5d\x28\x6b\x85\xc9\x19\xaf\x90\xf6\xdc\xaa\x22\x5a\x6f\xad\x75\xe4\x93\x91\x16\x5f\xbd\xc4\xd6\x79\x33\xb0\x45\x91\x28\xa3\xc3\xab\xe7\x61\x24\xa0\x07\x23\x43\x2d\xb9\xb0\xc7\xf7\x69\xfb\xd6\xff\x6f\x47\xf3\x8b\x4f\x8b\xb3\xa1\x17\xa8\x99\xff\x10\xc4\xc1\xca\x6e\x0c\x39\x55\x67\xc8\x90\x4b\xd5\x70\x3e\xea\x0b\x48\xad\x18\x85\x90\x56\x76\xab\xa1\x46\x2c\x90\xeb\xb2\xc4\x4f\x40\xb8\xf7\x1f\x9f\x9c\x1f\xb6\xe9\xe1\x09\xfe\x7e\xb8\x89\x58\xae\x41\x4f\xf7\x83\xbb\x2c\xf7\x10\xad\xf8\xa3\x9c\xe7\x01\x7a\x2a\x37\x0d\xe9\xf5\x33\x44\x4c\x40\x32\x2b\x18\xf6\x46\xee\x10\xf2\x73\x8a\xb7\x8c\xdb\x17\x47\x3a\xc4\x0c\x1a\xa8\x0c\x6f\x75\xfa\x3e\x51\x53\x97\xaf\x5b\xd3\x3e\xf4\x0a\xc3\x92\x15\xee\x6c\xf2\x3b\xdb\x4b\xac\xaf\x68\x58\x04\x5f\x93\x8c\x12\x1a\xec\x7c\x6a\xf9\xb3\x3f\x56\xda\x46\x97\x40\xda\xad\x63\x45\xc0\xa0\x55\xc8\x1b\xa0\x88\x75\x71\xa5\xe8\x17\x71\xeb\x95\x24\xd0\xf4\xe9\x06\x5c\x07\x26\xb8\x54\x79\xf1\xae\xe5\x65\xd9\xc6\x46\x28\xf9\xfd\x16\xd9\x3b\xaf\xa6\xde\x6c\xe8\x4e\x4a\x0b\x25\x23\xbf\xe3\xdf\x7b\x8f\xd2\x4b\xa2\xaa\xab\x50\xb3\x9c\x3e\x88\xa3\x94\xf6\xc1\x30\x08\x63\x65\xd9\x31\xcc\x93\xaf\xfe\x17\x61\x59\x4b\x70\x9a\x01\xeb\xd0\xb2\xa5\x75\x1a\x93\x7a\xe0\x61\xd8\xc2\x3d\xd7\x77\x80\x66\xc3\xa8\xd1\x42\x7d\x20\x28\x7d\x07\xfb\x73\x90\xca\x9b\xe7\x72\xc7\xfb\x22\x7f\x81\xcd\xc1\x9b\x1e\x25\xcb\x24\xbe\x73\x49\xe2\x53\x9d\xec\x1c\xd9\xd2\x56\xc7\x78\xbf\xa7\x25\x34\xdb\xfa\xba\x00\x82\xf6\x76\xa9\xb6\x38\xdf\xef\x09\x12\x94\x72\x9b\xbe\xd2\x23\xd4\x90\xca\x89\xf1\x8c\xba\x68\x7f\x4b\x3e\xa9\xd2\x96\x22\xfb\x67\x7b\x37\x60\xb5\xf8\xe3\xda\x0a\x98\xf5\x6c\x7d\x14\x97\x75\x57\xfb\x53\x4b\x25\x36\xe2\x22\xfd\x67\xc4\x3d\x66\x19\xe1\x48\x0a\x14\x6d\xa4\xf7\x4a\xd3\xd7\x6a\x48\x0b\x35\x7c\x8f\x26\xf6\xea\x2a\xba\xdb\xe1\x6e\x5f\x64\xa5\xc9\x0a\xc0\x69\xbf\xf9\x49\x4f\xeb\x5e\xee\xb1\x8b\x4a\x5a\x2a\x58\xee\x00\x60\x35\x17\xe5\x97\xa3\x29\x89\xb5\x3d\x8c\xa2\x9b\xf9\xc7\x3d\xcc\x4d\x5b\x9a\x92\x72\x4a\xec\xcf\x69\x0d\x7c\x02\x06\xf0\xf1\x89\x24\x82\x1a\x4a\xe0\xb4\x99\x69\x49\x76\x04\x72\x2d\x95\x9f\x3c\xe7\x4e\x3a\x46\x73\xd9\x29\xe1\xd9\x80\x1b\xbe\x23\xbf\x10\x53\xe9\x35\x06\xcc\x36\x3c\x77\x91\x72\xe7\x4a\xcd\xde\xbf\xb0\x65\x5b\x3f\x06\x7a\xa4\xec\x0f\xfe\x0b\x64\x35\xc5\x1c\xfd\xf4\x84\xde\x5d\x7e\xa8\x5d\x8d\xb6\xa0\x31\xd5\x1c\xb7\xc9\x48\x01\xf3\xba\x23\xa5\xa7\xc4\xad\x49\x4c\x1b\xc1\xd8\x5a\x52\x0e\xbf\xba\x6f\xa3\xce\xf0\xc6\x2c\x35\xda\x24\xc5\x8e\x12\xb2\x3f\x87\x0e\xe8\x72\x1c\xf8\x1c\x42\x88\x79\x07\x49\x09\x80\x5a\xdd\x3e\x0e\x9b\xff\xac\x63\xb2\x94\xfd\x4a\x41\xeb\x95\x03\x18\x8a\x0a\x42\xa9\x20\xd8\xde\xdd\x54\x87\xe0\x6e\x19\xe7\xf7\x1b\x97\x8c\x60\x26\x95\x8f\x9a\xb7\x46\xb7\x16\xdf\x90\x52\x1d\x53\xed\x3f\x28\x0f\x6e\xd2\xba\x6d\x23\xac\x5c\x12\xb7\xcc\xac\xcb\x85\xed\x2c\xf6\xb3\x1d\x4e\xc4\xe7\xf2\x35\x8a\xf2\x39\xcb\x91\x10\x52\x75\xd3\x28\xb2\xb7\xdf\x21\xf2\x84\xfb\x28\xf4\x4f\xc9\x9f\xcb\x11\x69\xf6\x77\xa9\xb4\x46\xaf\x34\x6d\x9a\xa1\x55\xe3\x41\xf1\xac\x6b\x28\xd2\xe6\x12\x4b\x55\xad\x80\xc2\x76\xdf\xa4\x5f\x01\x8c\xa7\x0a\xb2\xb2\x72\xa2\x45\x7d\x1c\x65\x26\x53\x39\xc9\x79\x93\xa2\x0b\x33\xd5\xca\x70\xea\x75\x05\x97\x1a\x20\xe4\x72\x2e\xca\x93\xb6\xe0\x94\x9c\x8b\x03\x71\xca\x56\x02\xbd\x93\x94\x88\x77\x1f\x6b\xc9\x12\x48\xa7\x6a\xfd\x67\x54\xfd\xfe\x13\xd3\xf0\x9f\xf4\x64\xc9\x3a\x2e\x46\x8b\xfc\xab\xde\xc7\x7b\xcb\x95\x40\x27\x3f\x07\xa8\xca\xd1\x39\x31\xe9\x50\xa3\x85\x7a\xc0\x15\x67\xcf\xab\xba\xc8\xd6\xec\x07\x16\x17\x52\x10\x18\x94\xff\x95\x0f\xb9\xda\x26\x0f\x99\xc2\x55\x43\x9e\x15\xbc\x5d\x80\x2a\x95\xae\x6c\xa3\x01\xea\xbb\x31\x99\xa6\xf9\x6c\x34\x76\xa7\xa9\xd1\xbf\x52\x3e\x98\x4e\x3f\x8f\x89\xce\x90\x6a\x6e\x3c\x87\xaf\x35\xba\xbb\x16\x42\x7b\xc0\xd8\xeb\x6d\xa9\x7d\x08\xf5\x87\xd1\xea\x03\x13\x78\x94\x43\x37\x72\xb1\x56\x8f\xb2\x2f\x86\x57\x6d\x4a\x4b\x85\x57\x21\xe9\x91\xb5\x47\x1f\x19\x20\xb1\x49\xf9\xf3\x23\x93\x6d\x12\xa2\x2f\x4c\xaf\x7b\x5b\xcb\x9e\xbf\xc0\x4f\xdc\x43\x33\xfb\x5e\x12\x25\xf6\x9d\xb9\xf0\x68\xc4\xd0\xd8\x1b\xfd\xfb\x8e\xa0\x9d\x4f\x0a\x6c\x77\x08\x4c\x19\x02\x4b\xf8\x94\xbe\xf5\x7b\x4f\xa7\x01\x63\x5c\xf7\x2f\xdd\x9f\xfe\xaf\x14\x0e\x7d\x87\xb3\x5b\x46\x80\xb2\x6e\x4f\x11\x50\x71\x8f\xa1\x84\x79\xeb\xd1\xb2\x1f\x76\xbb\x2c\xdf\x99\xdd\x42\x76\x4b\xfc\xf6\x68\x35\xe0\xd4\x89\x68\x4f\x91\xbf\xcd\xf8\x26\xb2\x0f\xac\x2c\x58\x75\x49\xcb\x53\xd1\x24\x63\xb6\x19\x62\xe3\x5f\x68\x58\x67\xb4\x50\xe2\xa9\xa3\x74\xe6\x14\xfa\xc1\xec\xdc\xf5\x95\x22\xcc\x75\xfd\xa1\xce\x13\x5c\x86\xf6\xd1\xec\xae\x10\xee\xd2\x3d\x1c\xe4\x76\x1c\x0a\xe6\x58\xda\x9e\x56\x8b\xe5\xbd\x90\xf4\xfb\x2c\xa3\x2d\x12\xda\x4f\xb2\x9d\x5a\x5b\xb1\x60\xa5\xe4\x5e\xa2\x0d\xfc\xca\xd1\x46\x1e\x97\xb3\x36\x87\xd9\x61\xfe\x27\x05\xae\x1d\xe6\x7b\xb4\x6d\x38\x17\x5f\x6a\xf8\x13\xad\xbd\x32\xd3\x78\x77\xb9\x04\x7b\x60\x2e\x7d\x98\xe6\xde\xc4\x0b\xdc\x75\x5a\x91\x15\xcb\x9f\x51\x56\x4b\xb9\xb1\x25\x0b\x9d\x16\xf7\xb5\x93\x5c\x8e\x9d\x26\xf4\x0f\x01\xd8\xd8\x91\xe8\x09\xc0\xda\x24\x29\x1e\x60\x3e\xae\x06\xf2\x59\x52\xaf\xc5\x79\x6f\xd7\x0d\x8a\xf6\xcd\x76\xf6\xa6\x37\x80\x20\x3f\x33\x71\xb8\x29\xfd\xbd\x79\x68\xee\xa6\xf3\xaf\x44\x0b\xd1\x64\x6f\x45\x89\x9f\xca\x11\x67\x0b\x83\xc9\x2c\xf6\xb2\xa8\x00\xa4\x06\x2c\x4f\x91\xe8\x2b\xe2\x01\x72\x6d\x36\x08\xee\xd3\x09\x3b\xe3\x9b\xf8\xf2\xfe\x31\x84\xfc\x7b\x3d\xc6\x11\x53\xa3\x7d\xdb\x88\xe9\xc6\xcb\x2e\xfd\x65\x73\x5b\x0e\xf9\x37\xf6\xb3\xb7\x64\x9c\x6a\x0a\xfb\xbf\xb4\x4b\x26\x5d\xa2\x07\x32\x42\x70\x3f\x84\xa5\x16\x3f\x58\x94\x50\x19\x40\x8e\x24\x53\x25\x8d\xd4\xab\x9e\x21\xdc\xed\x23\x17\xf6\x54\x1d\xb0\x4a\x35\x3a\x90\x0b\x56\xe4\x5d\x6a\x79\x48\x7c\xca\xdf\xcd\x8c\x76\xb3\x10\x35\x86\x3c\xeb\xc5\x23\x83\x77\x0a\xaf\x2a\x0c\xd3\xf3\xdb\xa4\x32\xf1\x2d\xd0\xd6\x5e\x63\xe0\xce\xd1\xbb\xc6\xed\x7d\x5f\x3b\xdc\xbd\x9e\x58\xfe\x7e\x3d\x90\x94\xf4\xbd\xca\xc1\xaf\x53\xa1\x42\x53\xf1\xab\x50\x42\x8c\xe4\xa0\x6c\xad\x20\x12\x92\xe2\x7f\x54\x8e\x99\x74\x79\x94\x30\x4f\x81\xf2\x05\xcb\x2c\xe4\xbe\xce\xc5\x41\x8b\x59\xeb\x5b\x7f\xfc\x41\xf6\xdb\x8d\x6d\xee\xfb\x3d\x6c\x13\xc7\xc7\xe3\xc2\x9f\x06\xd1\x11\x22\x6c\xfc\xd5\x72\xa9\x13\xe5\x89\x35\xa5\x4f\xf9\xcc\xb1\xe3\x77\x7a\x5c\x74\x5a\xc8\x20\x3a\x8b\x1e\xb7\x50\x40\xde\xc4\xa5\xe0\x78\xa7\xad\x52\x41\x01\x59\x5d\x2c\x8a\x58\x4e\xda\xb0\x29\x2f\xd7\x28\x74\xcb\x3d\xd5\x3b\xe8\x3a\xcf\xc5\x24\xcb\xa9\xd4\xda\x7c\x52\x20\x80\x74\x5c\xf4\xb5\x37\x8f\x14\x25\xd9\x63\x6b\xc7\x17\x18\x4b\x55\xf9\x80\xa6\xfc\x01\x41\x55\x2b\xbb\x4a\x8d\xf4\xad\xba\x54\x90\xfa\x58\x76\x93\x21\xb2\xc7\x55\x58\x40\xd0\x6c\x0b\x78\x80\xa4\x74\x58\xa2\x70\x17\x80\xb9\xc3\x0f\x83\x82\x01\x4f\xbf\x96\xf5\x02\x6a\xa7\xbc\xfc\x96\x6d\x2f\x20\xdd\x58\xd3\xf6\xf0\x9f\xc2\x09\x06\xb8\x15\xd6\x8b\xc9\x79\x0b\xcf\x91\x7d\x09\x5e\xfd\xae\xf2\x1f\x72\x8e\x55\xd8\x99\x1e\x6f\x6d\x40\xd8\x4b\xb2\x7e\xa1\x0c\xde\x2e\xce\xb0\xb4\x22\x21\x2d\xfe\x27\x67\xee\xc7\xe3\xb4\x2b\x74\x86\x34\x9a\x3e\x97\xa1\x7c\x42\x9a\xd7\x94\xe5\xdd\x7c\xbb\xb2\xf7\x6d\xc2\xa9\xbc\x7e\xb7\x6c\x41\xf9\xb7\xf8\x84\x5e\xff\x64\x5f\x9d\x2e\x40\xf0\x52\x11\xd3\x02\xfb\xa1\xc6\x3f\x8a\xfc\x47\x36\x38\xe1\x83\xca\x46\x79\xff\x61\x94\x4b\xa9\x00\x3a\x3b\x4e\x9c\xc1\xd8\x9b\x2b\x7b\xaf\xfc\x2d\x6a\x4f\x81\x9a\x76\x63\x13\x3f\x42\x1c\x7b\xe7\xf2\x7e\x90\x95\x65\x64\xfd\xff\x60\xac\x00\xb9\x3d\x03\x1c\x5b\xca\xf5\x00\x3d\x54\x99\x50\x62\x68\x29\x11\x1f\x77\x87\x64\x69\xa9\x70\x19\x0f\xe8\x2f\x88\x79\x7c\x12\xa5\x0e\xf6\x9b\x0c\xa9\xcd\xd8\x5c\x99\xee\xf1\xed\x88\x6a\xf7\xe7\x2b\xeb\x0f\xe2\x5c\x79\x57\x51\x33\xd8\xdf\xa6\x1c\x93\x7b\x2d\x11\x72\xae\x30\xc6\x15\x2a\xf1\x50\xe0\x56\x96\x4f\xc2\xa4\xd5\x2c\xca\xe1\xad\x13\x7d\x75\xdf\x86\x1a\x09\x21\xa1\x43\x22\x85\xfe\xbd\x29\x3d\x4e\x43\x6a\x03\x28\x3b\xc6\x24\x74\x1d\xdb\x45\x78\x69\x7c\xb1\x6a\xe5\xbf\xa3\x5a\xd8\x21\xea\xab\x84\x8f\xb8\x8d\xda\xe1\xbf\xa1\x82\x70\xef\x9e\xbc\x79\x66\x11\xd8\xc4\x3d\x25\xd9\xb2\xe0\x27\x20\xc0\xf1\x24\x09\x5f\x0c\x55\x10\x2d\x61\x41\x04\xcc\x8e\x28\xe0\xb9\xb0\x6e\xc2\x5f\xf5\xed\x4d\xf5\x09\x7b\x6b\x35\xe4\x4f\x01\x64\xcd\xc9\x28\xd9\xa4\xfc\x37\x4b\x6e\x23\x5b\x93\x79\xfc\x2b\xfc\x04\xe6\x0f\x51\x5b\xf4\xd0\xca\x61\x5d\xd0\x36\xda\xa8\x94\x48\xeb\x55\x1e\xa0\x92\xf3\xca\xee\x9a\x65\xdb\x79\xa8\xe5\x22\x63\x38\x5d\x56\x98\x8c\xc2\x9a\xc2\x11\x0a\x90\x8d\xaa\x93\x90\xee\x21\x7e\x7d\xbc\x84\xa5\x62\x5e\x33\x84\x99\x29\x30\x1a\x04\x2d\xdc\x84\x8d\x55\x9d\x70\x3e\x10\x65\x82\x77\xfd\xcc\x1d\x55\x47\x9f\x1a\xc0\xb1\xcd\x6e\x89\x62\xc0\x5e\xc3\x73\xb3\xbc\x3c\xfe\xf6\x51\x73\xea\x71\x19\xc6\x66\xb4\xee\x25\xe4\xd4\xaa\x48\x76\x95\x3b\x21\x7a\x75\xf9\xc7\x58\x63\xcd\x97\x0f\xd7\x3f\xf8\x40\x98\x6c\x5f\x26\x1d\x01\x0c\x61\xe3\x7e\x6e\xfa\xd6\x85\x07\xd4\x64\x88\x85\x18\xce\xd8\xb8\xbc\x66\xbf\x70\xac\x6a\xaa\x33\xe1\x2f\x85\x62\xc4\x64\x60\x60\x6f\xb5\xc4\x88\x4d\x35\x8c\x18\xe6\xf0\xe7\x25\x44\xa1\x0a\xbb\xa9\xeb\x7e\x33\x62\xc4\x48\x11\x87\x01\x08\x3c\x3b\x2d\x7a\x9c\x41\x55\xe7\x4a\x75\x42\x34\x1b\x1f\xb9\xeb\xd2\xcc\x4b\xec\x21\x7b\x02\x90\xb2\xd9\x5b\x37\x17\x2e\xa4\x30\x33\xcc\xd4\xb4\xe8\x4e\x75\xba\xd9\xc0\x73\x27\x5c\x31\x09\xfe\x14\x2c\xf9\xfe\x8f\xaa\x2b\xcb\x52\x5d\x47\xb6\x73\xe1\xe7\xfc\xbc\x49\xc9\x0d\xc6\x89\x6d\xb9\xdc\x40\x92\xa3\x7f\xda\x4d\xc8\xdc\x5b\xb5\x16\x32\x09\x1c\x1a\x5b\x52\xc4\xee\xa0\x87\x0a\x6f\x07\xfe\x60\xf7\x51\x0b\x66\x30\x43\xd4\x18\x17\x05\xe6\x8e\xf5\xef\xc5\x13\x5b\xd5\x82\x31\xad\x52\x5c\x83\x91\xe5\x1a\x21\x08\xb5\xb5\x50\xed\x7f\x99\xf7\x2f\xde\x48\xae\x86\x95\xe8\x98\xf7\xe1\xfb\x3d\xb2\x61\x07\x1f\x83\x66\xaa\xbe\x3f\xec\x57\xc5\xd2\xd3\x2f\x6c\xee\xf3\xe1\xa5\x58\xfc\xb8\xb4\xef\xe7\xce\x36\xe2\x34\xba\x8a\x46\x7a\xa9\xff\xbb\xa0\x9e\x8c\x8e\xde\x2d\x5b\x99\x6f\x5f\xab\xcf\x82\x8e\x2e\xb7\xee\xa7\x15\x8f\x65\x25\xdc\x6c\x1a\xf1\xb6\x36\x2e\xa3\x65\x26\x2a\x2e\x9e\xbc\x58\x3c\x47\x0b\x1a\x57\x16\x12\x5d\x54\x5e\x2e\x33\x4f\xe2\x40\xe9\xe3\xf5\x28\x70\x8a\x6e\x6c\x1a\x3d\x1b\x4e\x60\xf6\x2a\xaf\x7e\x48\x1d\x49\x0b\xea\xa3\xf3\x14\x0e\xf7\xad\x7b\xe0\xff\x61\xdf\x77\x85\x93\x5f\xee\x14\xe9\x78\x34\x6e\xc2\x27\x8b\x13\x25\xc7\xe3\x7d\xae\x4a\x4e\x7b\xaa\xb5\xee\x64\x81\xdf\x12\xc5\x04\xe4\x1d\xbc\x75\x93\xb0\xdd\x02\xd5\x6e\xcb\xf4\x67\x3e\xcb\x3e\xc3\xfc\xb2\x6c\xf1\xa4\x9d\xdb\x0e\xe0\x3d\x87\xe9\x29\xeb\x47\xec\xbd\x6a\xd8\xe5\x67\x2f\x90\x7c\x5c\x21\x27\x58\x80\xda\xa0\x02\xd7\x28\xe0\x36\xdb\x2b\x37\x9e\x84\x54\x51\xf3\x89\xcb\x69\x34\x2f\x5f\x35\xcf\x78\x15\x3c\x67\xf5\x3d\x07\x71\x26\x02\xd8\xd2\x21\xae\x4f\x9b\x2f\x95\x69\x9b\x1f\x14\x37\xde\x54\xee\xd8\xbd\xe2\x8a\xce\xa5\xef\xc5\x7c\x51\x26\xda\xc7\xb9\xae\x4a\x76\x75\x38\x6a\xb9\x7d\x96\xb9\xa4\x63\x63\x8f\x9d\xa7\x88\x72\x2b\xdb\x00\xb8\x37\x39\x83\xaf\xec\x15\x68\xbd\xb7\x8b\x56\x8c\x6c\xc0\xa1\x3a\x9b\x36\x6a\x24\x97\x52\x27\xc9\x7f\xb9\x81\x88\x7f\x8e\xea\x25\x47\x00\xac\x28\x99\x94\xeb\x55\x33\x0c\xcc\x4c\x15\x23\x2e\x77\x3c\x5d\xfd\x64\x01\x00\x76\x04\xdb\x80\x9c\x6b\xa4\xd7\xe8\x1f\xe9\x35\xba\x7c\x6e\xca\xee\x21\x99\x9d\xb3\xb9\xfa\x59\x9e\x30\x9e\xd2\x50\x15\x47\xd9\x0e\xb3\xdc\xb0\x4d\xaa\xfd\xd9\xb7\xdd\x0a\x24\x9a\xa6\x57\x17\x52\x34\x07\x1d\xbc\xfe\x14\x1e\x29\xab\x76\x1a\x8d\xdd\x27\x6d\x9a\x52\x57\x66\x7d\xf7\xef\x5b\xc1\x51\x2a\x68\xa8\xe5\x74\xff\x07\x56\xa7\x2e\x83\x1a\x7d\xc8\xbf\x48\x07\x2e\x53\x0e\xdf\xd2\x5f\xf0\x50\xfe\xcc\xa9\xf8\x9c\xf6\x8b\xfc\x60\xaf\x16\xce\x43\x9f\xbc\x79\x13\x07\x4d\x13\x6e\x40\x60\xd7\x40\x22\xba\xb7\x3b\x76\xef\xc7\x67\xf9\xc7\x3f\x00\x26\xe0\xbf\x51\xaa\xa0\xa5\x72\x6a\x4c\x28\xd6\xb0\x14\x58\xb0\xe9\xbf\x5d\xa6\x65\x37\x74\xff\x69\xf4\x7f\x63\x65\x03\xd7\xbd\xcb\xa7\xcc\xa6\x14\xae\x69\xc2\x04\x6a\x09\x03\xd4\x55\x94\x70\x40\x9f\x59\xcf\xb7\x8b\xdd\x52\x5f\xa2\x8d\xf8\xf6\xb2\xe9\x10\x6d\xe7\x9c\x73\xd4\x3b\x3c\x75\x3d\x5e\xf6\x0a\x15\x1f\xe8\x9b\x38\x27\x1f\x78\xbf\x12\xe4\xc6\x59\x38\xc9\x31\x72\x71\x2a\xc5\xcf\x36\x88\x0e\xc8\xee\xe1\x2e\x7e\x02\xc6\xaa\x6f\xca\x35\x7e\xaa\xa8\x71\x50\xee\x4d\x42\xbd\xa3\xee\x2e\x60\x80\x06\x8e\xd4\x8d\x88\x42\xb8\xab\x95\xf3\xdd\x7f\x96\x73\xd7\x7e\x2d\xa9\x65\x77\x77\x8f\x94\xb9\x93\x51\xa6\x2e\x56\xce\x80\xa0\x1d\x46\xbd\xd5\x83\xf1\xf8\x89\x94\x54\xc0\x66\xbe\xfd\x26\xee\x54\xe6\x3a\x31\x87\xaf\x8d\x2e\xe0\xae\xe3\xf2\xf1\xd8\x02\x8b\xa8\xb1\xb9\x6b\xdf\x5e\xa6\x80\x7b\xd9\xc9\xb6\x61\x4a\x3f\x4d\xf9\xef\x8f\xa4\xe3\x0c\x20\x4e\x95\x44\x96\xd3\xd9\x02\xd1\x26\xff\xbd\x19\x3e\x5a\xa0\xbe\xf3\x7d\xb8\x17\x55\x7e\x06\x93\x8e\xf8\x5e\xcb\x43\x93\x7d\x22\xb0\x2d\x88\xbc\x5e\xa8\x48\x6a\x82\xbe\xb9\xb8\xc4\x23\x44\x41\x77\x35\xb3\x56\x1b\xb5\x47\x5f\x9e\xa1\x37\xd7\xbf\x22\xa7\x1e\x96\x89\xe6\x29\x4d\x55\xef\x01\x8a\x52\x74\xaf\x41\x8a\x34\x3f\x19\xc1\xfc\x4d\x13\xc3\x9c\x5d\xd5\x6c\xfd\x5b\x65\x20\xf3\xfa\x54\xf4\x80\x8d\xed\xef\xa4\x4d\x76\x0e\xd9\xc1\xd8\xf2\x15\xb1\x13\x45\x53\xac\xff\xd2\xe1\x7a\x70\x05\x54\x0e\x48\x7c\xd9\xcb\x14\xb5\xcf\x0e\xf5\x0b\x4a\x3d\x09\x4e\xad\xd1\x0d\x48\xe2\xa3\xce\x79\x29\xc1\x44\x54\xa6\x5a\xfc\x84\x37\x2f\xeb\xa0\x64\x47\x92\x52\xa2\xf6\x2e\x87\x18\xac\x15\xd4\x19\x64\x8a\x6e\xd5\xca\x0a\x05\x8f\xe6\x23\x44\xef\xc8\x5c\xce\x8f\xd5\x59\x0c\x45\x11\x7f\x11\x44\x01\xaa\x28\x08\xa1\x8b\x9c\xde\x9a\x64\xe2\x2d\x73\x83\xbf\xdd\x46\xd8\xa2\xa8\xd9\xc1\x53\x1f\xad\x2f\x6c\x20\x22\x08\xde\x05\xd3\xbd\x5a\x44\x48\x44\xc8\x7f\x27\x5f\xdb\x9c\xb5\x02\xd7\xab\xfc\xf1\x95\xee\xe3\xe2\xa7\x4c\xff\xb6\x3d\x99\xbc\x7e\x21\x48\x66\x62\x7b\xca\x99\x3f\xaa\x7a\x7c\x32\xda\x84\x84\xf7\xa5\x1a\x04\xb8\x7b\x2b\xc7\x50\xc0\xca\x01\xc8\xe7\xd1\x8d\x31\xa8\xd8\xc9\x18\x0e\x6e\x5a\xbc\xb5\x08\xe6\x8d\x1b\x16\xe8\x0c\x17\x9f\x8b\xa0\x2d\x6d\xb1\x48\x2e\x68\xaf\x8a\xc2\xe5\xc0\x00\x0e\xcb\xa0\x72\x94\x61\xd3\x1d\x46\xf6\x70\x90\x5b\x24\x06\x3c\x8f\x47\x99\xb7\x55\xb5\x94\x5f\x41\xbc\xa2\x1c\x8b\x31\x60\x16\xc3\x2e\xb0\x24\x17\xc9\xbd\x7e\x27\x73\x88\xf6\x09\x16\x88\xbe\x2e\xdc\xa5\x5e\xfc\xb3\x58\x60\x33\xf3\x13\x6e\x02\x63\x54\x50\x85\x9e\x00\x29\xc6\xa4\x14\x6b\x4c\x74\xda\x4f\x4d\xfb\x74\x55\x5f\xf6\xe2\x35\xe1\x16\x0a\x2d\xbd\x1d\xc4\xa1\xbf\x22\xbf\x40\x63\xbf\xf7\xb2\xa3\x73\xf7\x02\x2c\xa8\x48\x1b\x98\x82\xef\xc1\x8e\x97\xcc\xda\xe0\x8e\x67\x0e\xd4\x3d\x1a\x98\x93\x03\xbb\xc3\x98\x6b\x42\x6a\xd8\x56\xdd\x4f\xfa\x90\xb8\x4d\xe5\x2a\xe2\x6b\xf4\x83\x27\x61\xf0\x5d\xc4\x98\xb7\x4a\xe0\x59\x7e\x8f\x56\xf8\xcf\xb3\x2c\xa4\x24\x9e\x3c\x47\xd4\x7c\x18\xa4\x95\x78\xea\x0f\xf8\x08\xf7\x58\xcc\x7e\x4a\x99\xc1\x09\xac\x7c\xb3\xa7\x8b\xaf\x32\xa5\x89\x04\x31\x06\xbb\xd9\xe3\xae\x5a\x99\x2c\x27\x1a\x44\x1a\x3a\x08\xa1\x14\x58\x63\xf8\x93\x8d\xf2\xfa\xbe\xc8\x54\x1d\x10\xf1\x10\x26\xf6\x6d\x60\x3c\xf4\x34\x71\x19\x37\xd1\x33\xe9\x06\xe6\xd3\x2c\x0f\x93\xe8\x90\x51\xf6\x28\xdc\x67\xd7\x4e\xeb\xcf\x05\x1c\x52\xfe\x1f\x7a\x92\x0c\x24\x1f\x59\x6f\xcb\xee\xc1\x5a\x28\xd0\x12\x52\x82\x52\x99\x28\x1e\x26\x71\x3e\xd2\xe7\x3d\x3a\x4b\x69\x8e\x6d\xc4\xa3\xbc\x76\x63\xf2\xd3\xb8\x31\x1e\xef\x26\x32\xbf\x63\x15\x4f\xee\xe2\x91\x77\xa0\xf9\x63\xd8\x46\x1b\x9d\x40\x54\x6d\xe9\x17\xdd\x67\xcb\x76\x9e\xdf\xd5\x30\x41\x3b\x46\xe6\xd4\x14\xb9\xed\xc3\x48\xcd\x17\x5f\x7c\x40\xb4\xa3\x5e\xa2\xff\x0a\x6e\xa2\x61\xca\x76\x33\x0c\x54\xed\x54\x84\x09\x69\x43\x30\xf4\xbd\x52\x1f\x85\x49\xdf\x4f\x43\x44\x0f\xe5\x6d\x7c\x19\xc4\xdd\x41\x6c\x1e\xe9\x83\x70\xe7\x64\x7f\x63\x8d\x88\x96\xab\x7f\x0a\xa4\x58\xee\xba\x8d\xb2\xd0\xb6\x5f\x40\x8e\xe6\x6a\xc0\x82\x0e\x81\x0b\x40\x07\x48\x33\x2f\x78\x57\x01\x18\x66\x7b\xbd\x75\x95\xdb\x95\x20\x00\x5d\x25\xe1\x9c\x90\x2e\x81\x91\xb9\x49\xfc\xc7\xa3\xf2\x29\x6a\x53\x3c\xf2\x1f\x2b\x5f\xca\x85\x5b\x3f\xf0\x9a\x86\xdf\x70\x50\xfe\x43\xb5\xe3\x96\x1a\x2f\xf0\x6b\x4b\xc0\x78\x69\x0b\x26\x3a\x2b\xcc\x15\x03\x19\xb5\xda\x0e\x3a\xb7\xc7\x53\x4d\xf8\x57\x24\xe4\x9a\x44\xf7\x60\x7c\x83\x2f\xfd\x4e\x9d\x8e\xae\x5c\x8c\xbc\xbc\x48\xfa\x0b\x06\x17\x02\x1b\x09\x89\x97\x82\x2e\xd2\x91\x3a\x7e\x2a\x05\x46\xf4\xed\x18\xe1\x55\x09\x11\x25\x1a\xf1\x77\x21\x80\x84\xdb\xed\x72\x6a\x68\x33\x68\x26\xa3\x41\xa1\xb7\xab\x9a\x57\x1f\xa2\xcb\x37\x13\x16\x75\xf0\xe4\xda\xce\x71\x7e\xba\xe3\x86\x42\xea\x4a\xa1\x91\xf5\x71\x77\x85\x3d\xd4\x28\x2b\xbd\xc4\x74\xce\x86\x8e\x22\xe9\x3c\xc9\x71\xa5\x79\x2b\x7a\xae\x9d\xb2\x03\xbc\x18\x5e\xed\xf7\x38\x55\x0b\x89\x52\x5b\xed\x34\x51\x19\xe1\x5e\xb9\x18\x59\x6c\x71\x65\x06\x07\xbd\xfc\xf6\xd2\x6b\x3e\x1c\x8e\xf9\x9f\x7c\x96\x16\x57\x82\xb9\x53\xb1\x6a\xb6\xd0\x91\xf1\x76\xd2\xb5\x83\xfd\x47\x59\x93\x09\xee\x9c\xd3\xd4\xe5\xa1\x4a\x30\x1f\x61\x8d\x92\xfe\xfa\x8a\x2b\xbd\x2f\x6b\xc0\x0d\x28\x37\x06\xd1\x0c\x84\x61\x8a\x65\x98\xe5\x1b\x73\xdb\xb9\xf1\xf7\x25\x31\xe6\x19\x9f\xae\x99\xaa\xc6\xa8\x0c\x53\xf0\x8a\x3c\xe6\x13\xfa\xc9\x02\xae\xb2\xef\x5a\x3d\x88\x56\x80\xdc\x9a\x07\x13\xb0\xb0\x9f\xd2\x45\x9c\xde\xa2\x36\x24\xa5\x01\x90\x7c\x65\xe6\xd5\x5e\xab\xc8\x04\xae\x4c\x1a\x94\x7b\x59\x76\x19\xbc\x5d\xe5\x6b\x23\x42\x56\xdf\x76\xca\x87\x82\x7f\x1e\x7f\x37\x32\xae\x2e\x07\x15\xed\x24\xd2\x44\x12\x33\x46\xc3\x80\xc8\xef\x80\x96\xee\x12\xeb\xc2\x69\x81\xb1\xfd\x41\xff\x6a\x05\x24\x56\xf1\x85\xb3\x29\xf8\x4f\x7d\xd2\xd3\xdf\x0d\xa1\x28\x41\xf1\x65\x69\xc2\x0f\x1f\xb9\x13\xe5\xdf\xeb\xe5\x1c\x5d\x2e\x48\x07\x70\x5a\x3e\xfb\x1e\x87\xf0\x36\xcc\x79\x77\x46\xf7\xfe\x34\x71\x0b\x36\x23\x50\x3f\xf2\xe0\xb8\x92\xb8\x60\x04\xa2\x91\x14\x52\xac\xd3\xf2\xe4\x80\x2e\xb9\x69\xc3\x48\xc5\xa8\xf3\x6b\x34\x0d\xbe\xd4\x72\x46\xa6\x94\xe1\xc3\x82\x6c\xd7\x6f\x78\x46\x57\xed\x24\x2d\xe5\xa6\x04\x8b\x5d\x05\x59\xad\xa3\x4a\x79\x07\xaa\xa9\x86\x22\x2d\xfa\x62\x2e\x85\xdd\xb9\x84\xec\xea\x5c\xa0\x9a\x8a\x6e\x3f\x2b\xbc\x97\x7c\x70\xf9\xb3\x1c\xa7\xe2\xe8\xbe\xbc\x9f\xc1\x00\xdb\x1d\x92\x14\x32\x0e\x95\x72\xbf\x63\x1b\x96\x7a\x47\x2e\x97\xa3\x0a\xc3\x52\x35\x64\xbd\x3b\x07\x7b\xfa\xee\x7c\x3c\x92\xe3\x2f\xa2\xc0\xae\x91\x73\xc8\x5d\x1e\x47\x71\xd0\xca\x9e\x42\xfd\x1c\x94\x84\x55\x4a\x82\x9e\x07\xfd\x75\x51\xff\x41\x4f\xa8\x8a\xaf\x1a\xb5\xfc\xfb\x3a\xae\xe6\xbf\x40\xc3\xd0\xd9\xd2\xdf\x3a\x3f\xa6\x7d\x2c\x5c\xc9\xc2\xdc\xc5\x15\x41\x19\x4d\xff\x84\x2f\x80\x87\xa6\x2a\x35\x79\x65\x25\x88\xa8\xba\xe2\x33\x37\xa3\x4e\xe6\xdd\x39\x20\xd0\xa0\x64\xe7\xac\x81\x93\xd6\xff\xef\xfc\xd2\xa3\xac\xce\xe9\xde\xa3\x42\xe6\x38\x3a\x95\x65\x69\x6f\x03\xe6\x22\x51\x0f\xf1\xcf\x81\x8e\xc1\x89\xdb\xf8\xd8\xb8\x54\xc1\xac\x48\x99\xc2\xbc\x22\x20\x95\x16\xe4\xbb\x9a\xcb\x3c\x74\x34\x2c\x9c\x2d\x2d\x68\x59\xf2\x1b\xdd\x55\xbe\xde\x04\x1c\xd5\x56\x44\x3b\xf7\xf9\x8f\xe4\xc5\x7f\x37\x78\x8c\x5d\x96\xca\xc1\x43\x39\xc8\xe6\xa2\xe5\xf7\xe0\x4f\x4d\xfc\xcb\x6f\xe4\x51\x76\xc8\xab\xaa\xc6\xb4\x3d\x11\x74\xa3\xb1\x7a\x19\x3b\x14\xb0\x29\x40\xac\x74\x19\x5f\x7e\x59\xc9\x24\x1d\xab\x84\x71\xe5\x57\x56\xd2\xf8\xd6\xdb\x70\x9b\x91\x8f\xdc\x47\x43\xc4\xcf\xf1\xf3\x26\x27\x76\xef\xe0\xcf\x87\x1d\xef\x66\x44\x67\x33\x0b\xed\x94\xe3\xd1\x96\x8f\xb0\x7f\xf9\x3c\x7f\x4a\x4d\x26\xda\x9a\xd3\x97\xe1\xf1\x8d\xb2\xce\x60\xd8\x75\x19\x61\x37\xa2\x77\xa4\x3f\x7d\x87\x6f\x5f\x16\x34\x2a\x12\xe7\x64\xb7\x9a\x49\x21\xdd\xaa\x17\x7f\xf2\x68\xc8\xec\x21\x67\xad\x8b\x45\x17\xf5\xa3\x0e\x9e\xc1\x70\x44\x0e\x8a\x19\x74\xcd\x39\x4e\x1e\x01\x2b\xe4\xa8\x72\x61\xe3\x28\x28\xb0\x1b\x8b\xee\x7d\xf1\xdb\x08\x9f\x73\xb4\xa6\x49\x02\x29\x95\x26\x2d\x69\xb6\xfc\xd6\x59\x85\xf2\xf1\x26\x19\x8d\xea\xb7\xcd\x3e\x0d\x2b\xfc\x35\xbf\xf2\x60\x53\x35\x15\x2f\xe5\x89\x46\x8a\x1e\x58\x83\x93\xba\x56\x67\xc2\x15\xa0\x84\x98\x75\xa8\xfd\x84\xb9\x31\x5c\x3e\x38\x22\x6b\xaf\x69\x67\xed\x9f\x9c\xfe\xd6\x8a\xf4\xaf\x0c\x6b\xbc\xa9\x92\x74\x58\x3d\xdc\xce\x76\x87\xac\x84\x1e\xa0\x8c\xd2\xbf\x28\x33\x05\xad\x89\xf5\x57\x36\x7e\xe1\x00\xab\x72\x53\xbf\x62\x76\xec\x2e\xe4\x93\xd4\x2f\xa0\x4c\x74\xe2\xca\xfd\x4e\x28\xae\x81\x48\x52\x5f\xfb\x92\xd5\x8c\xe2\x90\xff\x71\xb4\x44\xce\xc5\x4d\x46\x97\xfc\x2c\x0b\xca\x47\x3e\xb2\x17\x32\xd7\x0f\x53\x8d\xdf\x5f\xd2\x89\x4e\x88\x2b\xb2\xd8\x72\x97\xc1\x85\xc6\xa9\xab\x34\x9f\x4b\xf0\xd4\xe7\x6b\xab\x35\xc7\x96\x80\x85\xaa\x40\x9b\x19\x16\x31\xab\xc8\xa7\x40\xef\xca\x94\x76\xaa\x7c\x9d\xed\xc1\x39\xbb\x80\x04\x50\x70\x0a\xb5\xeb\x7b\xd7\x9f\xe9\x77\x9c\x6d\xa3\x5e\xce\x9b\xde\xf1\xf8\xf0\xaf\xae\xe6\x98\x60\x7a\xf1\xbd\xd1\x3b\x71\xbc\xa9\x94\x74\x01\xaa\x30\x9b\x29\xbf\x51\x2a\xb2\xfe\x83\x01\xc5\xac\xe0\x12\xa4\xd8\x08\xa3\x7b\x29\x39\x6f\x54\xb5\x5b\x36\x63\x60\xcd\xd5\xd2\xb1\x5a\xa1\x97\x22\x83\x95\x62\xd4\xa1\xe5\xd6\x5c\xe6\xa9\xaf\x19\xb7\xb0\x27\xc6\x87\x20\xca\x17\xce\xea\x61\x54\x26\xe7\x74\xdd\xf5\x65\x58\xf9\x04\x09\x93\x83\xa4\x45\xe5\x59\x96\x29\x2a\x72\xf2\x16\x8e\x10\x3f\xb0\x34\xa8\xa0\x60\x60\x82\xb3\xb3\xb7\xc6\x8d\x86\xfe\xb5\x8d\x0a\x8b\xf5\x26\xa0\xbe\x2f\x6f\x61\xe5\xef\x6f\x75\x24\xcc\xb0\x7a\x18\xfe\xe7\xa8\x0d\x60\x50\x56\x0e\x84\x08\x43\x89\x03\x38\x83\x65\x1f\x7e\x7d\x62\x79\x19\x1b\x7f\xfd\x7e\x90\x3d\x6a\x76\x7c\xc0\xcd\xf5\x46\xc6\xdf\x93\x37\x50\x84\x1e\xc1\x02\x2c\xbb\xfe\x80\xbc\x87\x2f\x8b\x5e\x8d\x99\xc5\x58\xef\xc8\xb3\x8b\xc2\x71\x75\x71\x68\xd9\xf1\x00\x59\x73\x8d\xe6\x4f\x72\x7d\xa0\xc1\x8d\xaa\xbb\x49\x5c\x9a\x3b\xed\x5f\x54\x74\x9d\x9e\x67\xee\xdb\x49\xf5\x57\xa9\xf1\x9a\xa8\xb7\xee\x5b\x7f\x76\xda\x7f\xde\x69\xe6\xa6\x99\x9b\x05\xdf\xfe\x70\x0f\xba\x2c\x50\xde\x75\xdd\xc7\x70\x63\xbb\xf7\xb3\xd4\x39\xc0\xe9\xf8\x4a\x97\xa4\x06\xa3\x90\xd2\x94\x0a\x31\xeb\x76\xd4\x85\x20\xf5\x8c\x4d\xaa\xee\xb4\x4a\x74\x6d\x68\x17\x4d\xdd\x3f\x5a\x50\x48\x23\xf8\x18\xb6\x95\xb0\x89\x88\x60\x41\x19\xa5\x80\xc4\x9e\x74\xd0\xef\xcd\x23\x6c\x46\x98\x04\x91\xa6\xb8\x4b\xed\x2a\x4b\x11\xb1\xd7\x72\xdb\xa0\x47\x7a\x88\xcb\x42\x83\x07\x14\xdd\x84\x02\x5d\x68\xa2\x79\x8c\x0b\x6c\xcc\x49\x9d\xfb\x2f\x66\xa8\x82\x93\xde\x9c\x7c\x52\x17\x16\x77\xa5\x72\xb8\x5f\x16\x93\x9c\xec\x54\x6f\x46\x86\x02\x81\x43\x90\x5b\x6e\x36\xe0\x74\x1d\xb9\x56\x07\x24\x98\xce\x27\xc7\x01\xa2\x7f\xa9\xd2\x12\x29\x18\x9e\x7f\x83\x9d\x18\x10\x1f\x20\xc9\xab\x25\x20\x36\xa2\xc5\x0f\x5d\x4f\xd5\x14\x47\xcd\x89\xb6\x8b\x5e\xa0\x94\x16\x61\xc8\x29\x16\x54\x87\x2f\x9d\x58\xde\x29\xfb\x40\x10\xdb\x65\xc0\x09\x0b\x7a\xc1\x99\x48\x5b\x58\x3d\x02\xd9\x45\x65\xe0\xb9\x1d\x51\xc3\x29\xaf\x27\xd4\xf0\x8e\x8e\x12\x6b\xb1\xd6\x8f\x08\x92\x6a\x2f\x42\x23\xb5\xa3\xef\x3c\xa7\xb0\xe4\x1c\xab\x0c\x94\xd9\x82\xbc\x4a\xf7\x38\xea\xfa\x2b\x9e\xb0\xcd\xdf\xde\x9d\xe5\x6b\x0d\x2b\x22\xf4\x01\x96\xce\x90\xdf\x17\xe8\x6f\xff\x7b\xb1\x2f\x31\x39\xf3\x76\x12\x89\xd4\xc5\x6d\xef\x3e\x3d\x44\x43\x21\x1f\x1a\x2f\x9f\xad\xf6\x41\x95\xb1\x6d\x83\xdc\x53\xa2\x59\x50\xd8\x7b\x66\x73\x1e\xcb\xa7\x02\xe1\xe5\xc6\x22\xd5\xd6\x9c\x2d\xce\xb1\x2b\xf4\xa1\xbc\xa8\xb3\xd6\x9b\x0d\xff\x1a\xa1\x36\x78\xe6\x6e\xc9\x65\x67\xd7\xb3\x65\xa3\x12\x93\x83\x84\x68\x6f\xb9\xfd\x4c\xe1\x1b\xdf\x8c\xd2\xc1\x37\xea\x39\x34\xb2\xed\x6b\x30\xc7\x23\xe0\xc7\x32\x1f\x86\x34\x0b\xff\x4b\x16\x1a\xf5\x76\xa9\xa3\x11\x67\x1d\x6c\x66\x81\x20\x1b\x71\xad\xfe\x14\xf0\xe3\x8c\x9f\x02\xa4\x99\x5d\x57\x04\x9c\x14\x3e\xc7\x63\x16\x3e\xe8\xb3\x95\x31\x8d\x7e\x9a\xd5\xfb\xc2\x01\xcb\x16\xec\xf6\xed\xdb\x69\x85\x0e\xa4\x65\xf0\xc9\x67\x61\x5a\x3d\xd6\xc9\x70\x79\xf4\xfa\x3e\x59\xa6\xd6\x8e\x7b\xc2\x02\xa6\x7f\x7f\xe0\x09\xa4\x97\x67\x8a\xa2\xf5\x46\x5d\x5e\x6b\x31\xca\xef\x58\x90\x62\x58\x07\xa0\xa3\xc0\x66\x5f\x4a\x49\x90\xe4\xfb\x9a\x8d\xd1\x37\x25\xfc\x81\xc1\x18\xa2\x7f\x69\x5f\x6e\x12\x12\x99\x2e\x69\xb3\x47\x82\x86\x7e\x6e\xf5\xeb\x78\x83\x23\x3e\xc7\x40\x36\x2a\xef\xf4\xeb\x32\xb3\xa9\x8b\x2e\x6d\x01\x38\x55\xf1\x1a\x30\x28\xce\xaa\xb4\xfa\x0d\x96\x0b\x9e\xbb\xc0\x52\x96\x66\x97\xa7\x63\xe7\x40\x69\xbb\x6a\x94\xa2\x54\xae\xcd\x52\x1e\x55\xb7\x10\x06\xeb\x6b\xb0\xb7\xe1\x05\x8b\x22\xf5\xae\x9a\xb4\x1a\x66\x23\x07\x09\xf3\x68\x64\x94\x92\x6c\x39\x44\x34\x34\xd6\x2e\x3b\x15\xb4\xec\x7c\x82\x76\xe9\x98\xdc\xa3\x76\x3f\xb0\x24\xdf\x2a\x13\x53\x23\x9f\xa5\x48\x44\x98\xc2\x84\xa0\x9c\x82\x29\x52\x1a\x55\xd5\xb2\x36\x45\x55\x2b\x5a\x0b\x86\xe5\x6c\x71\x2c\x01\xd0\xca\x52\x4a\x65\xb1\x14\x71\xb5\x66\xde\xce\x8a\x57\x2a\x2b\xa6\xab\x58\x85\x83\xb2\x16\x37\x5e\x49\x52\xb4\x2a\x57\x86\x0a\x70\x5f\x74\xc8\xe7\xd7\x75\xe7\xea\xc4\x6a\xa5\xac\x62\x92\xdc\x9c\x2f\x19\x76\xc5\xa8\x43\x45\xc7\x0c\x8b\xff\xf7\x55\x42\x6a\x6b\x85\x40\x48\xde\x98\xb1\x54\xea\xcf\xea\x22\xb9\x1f\x1f\xa3\xfe\xca\x72\x0a\x62\x66\x28\x98\x8e\xf1\x12\x47\xd5\x0c\x13\x56\xa1\x99\x97\xb2\x1c\x9b\xf8\x4a\xa4\xe4\xb9\xb4\x34\x31\xf3\x2a\x46\xc1\xe8\x74\xd5\xd9\x22\x6a\x49\x7f\x01\x13\x5d\x40\xa5\x89\xfb\xa8\x40\x5d\x89\xaa\xc9\x50\xea\x93\x9a\xc7\x3d\xd3\xfe\x49\xc3\x70\xb3\x66\x99\x2a\x44\xf1\xb1\xc9\x66\x01\x98\x64\x50\x26\x73\x6f\x7c\xcf\xe8\x1f\x62\xeb\x1f\x1e\xf0\x26\x85\x6a\x62\x87\xd9\xbf\x4d\x4e\xfb\xae\x1a\xd0\x33\xf9\xb2\xd2\x52\x5b\xca\x77\x84\x48\x42\x36\xc8\x51\x3a\x3e\x7c\x4f\x29\x7c\x63\xf7\xf4\x60\x54\x4d\x59\xde\x10\x7d\xfe\xb1\x82\xca\xc0\xdd\xa6\xf9\xb2\xda\x99\x62\x8c\x7e\x6e\x23\xe9\x2f\x63\x34\xf9\x80\x72\x7a\xb4\xb5\x92\x04\x97\xd1\x7c\xcc\xb2\xce\x4d\x53\xd8\x9a\x46\x97\x08\xe1\x51\xbb\x74\x4d\xb0\x31\x75\xcb\x6e\xeb\xef\xa7\x04\x9b\x2e\x1f\x3d\xac\x0b\x20\x8a\xc7\x29\xfe\x0d\x50\xe0\x93\xc1\x49\x68\x2d\x38\x71\x96\x33\xfd\x4f\x94\xf5\xff\x9d\x32\x63\x5c\x1c\x65\xf7\x3f\x40\xe0\x4a\xf4\x3e\xbd\x81\x5b\xad\xbc\x11\x40\x69\xbe\x16\x66\xd4\x73\xda\x2b\x20\x69\xcb\xd2\x8c\xfc\x5b\x8d\x1c\xd3\x0b\xfe\xc9\xcc\x7b\xbc\xf1\x42\xfc\xc1\x18\x19\x9c\x5d\x80\x06\xdf\xe1\x9d\xa6\x27\x04\xeb\x72\xe9\x2d\x1c\x1b\x07\xb9\x75\xae\x8f\xcf\x0e\x55\x43\x08\xb6\xf4\x57\xac\x3c\x6e\x95\xac\xcc\xb8\xd9\x22\x97\xf3\xc3\x05\x53\xb5\xe2\x51\x1d\x43\xe0\xbe\x60\xfe\x24\xba\x98\x5d\x94\x9f\x7c\x35\x54\x8e\x29\xa0\xec\xfc\x0a\xdc\x0b\xa1\x38\xdb\x14\xe3\x64\x32\x76\xd9\x4c\x70\xb2\xcd\xb6\x5a\x95\xde\xcb\xf5\xdf\x29\x94\x6b\xe9\xe1\x0a\xc5\x7f\x6d\xc1\x0e\xbe\xc9\x92\x77\x23\x7c\x99\xdb\xe9\x52\x28\xc6\x95\xb8\x24\x9b\x10\xcd\xce\x18\x97\xd8\x4b\x71\xf3\x3c\x6c\xc6\x88\xc2\x52\xcd\x60\x93\x05\xff\x79\x9c\x9e\xb0\x0e\xd1\x1f\xca\xef\x65\x35\x59\x7f\x6c\x5f\xa4\xa9\xb9\xf7\x42\x0d\x43\x05\x15\x7e\x9f\xe4\x0a\x51\x95\x29\xd4\x08\xc7\xea\x0c\xa5\x99\x04\x6f\xdd\x3f\x9c\xac\xd9\xce\x45\x1a\x14\x38\x94\xc9\x06\xa9\x6c\x35\x16\x74\x36\x0d\x25\x22\xd3\x2e\x9b\xc1\x79\x54\x52\xa7\x73\x14\xda\xec\x04\xd2\x52\x10\xea\x1f\x9e\x70\xb6\x4f\x61\xc9\x52\x76\x1e\x7e\x4a\x99\xa3\x9b\x93\x17\xc1\xb3\x9c\x3a\x42\x89\x9e\xe8\x0a\xf2\x6c\x79\xe6\xcc\xa9\x52\x41\xeb\x18\xa8\x5d\x82\x50\xf5\x8f\x93\xd4\xc7\xf0\x42\xd5\x0c\xf2\x53\x26\xf6\xab\xdd\xf6\x23\x5d\xbd\x1c\xef\x82\xe6\xf9\x9c\x62\xf4\x48\x5f\xa1\x59\x0c\x14\x6d\xed\xb1\x70\x9f\xaa\xb9\xa9\xa4\xfd\x26\xca\xd9\x05\xd5\xcf\x6f\x2d\x17\x1b\x97\xe6\xaa\xfa\x78\x6e\xe9\xc7\xa6\x49\x18\x1a\x23\xae\x32\x25\x6d\x83\x44\x4c\x6c\x69\x1c\xfc\xd3\xde\xf1\xf1\x59\x69\x83\x21\xfe\xe7\x69\x44\x99\xf5\xa2\x71\x47\x53\x56\xaf\xd8\x75\x2d\x67\xcc\x8c\x13\xd1\xef\x91\x9f\xbd\xee\x52\xf7\xe6\x81\xa6\xcd\x23\xb8\xf4\x17\x3c\xa9\x52\x33\x75\x0f\xfa\x6c\xe2\x20\xed\x76\xf4\x78\xd8\x28\x10\xe4\x51\x6f\x58\x6e\x02\x26\xdb\xd3\x7f\x40\x86\x0a\x76\xb5\xa5\x1e\x43\x64\x87\xd4\x67\x3d\x1a\x78\xd2\x92\xa5\x3f\x25\xb2\x71\x2c\xa8\x65\xd0\x1f\x96\x3f\x07\x37\xd8\xa5\x1c\x09\xda\xd8\x82\x3a\x86\xdd\x05\xde\x29\x9c\x9e\xe2\x0c\xe7\x32\xe4\xe3\xdd\xab\x2e\xcd\xda\x3a\x43\x98\xae\xd2\xb2\xff\xbd\x02\x87\xf0\xa3\x1d\xa6\xbc\xde\xd1\x44\xe0\x75\xc5\xc7\x45\x82\x43\xb9\x8a\x59\x0b\x62\x8f\xe5\xba\xf1\xa2\x81\x22\xbd\xfd\xac\x05\x19\x2d\x5a\x4d\x2d\x05\x9d\xf4\x46\x12\x69\x64\xe4\xf7\x8b\xc2\x60\x02\x57\xb4\xda\xb4\x94\x1b\x61\x4c\x66\x67\x05\x31\x45\x33\xbb\xc7\x37\xc2\x89\x0a\x26\xb9\xd1\xae\xd5\xfd\x88\x4e\x06\x47\xe0\xe2\xf3\x16\x7e\x2b\x1f\x0d\xc4\xd1\x56\x96\xea\x3f\x0f\xef\x7a\x54\x56\x8c\x22\x62\xc8\x04\xd3\x23\xd1\xc5\xfe\xab\x91\xc2\xd5\x8d\xbf\xf1\xbc\xd0\xad\x45\x61\xc8\x09\xcf\x7f\x01\x84\x1a\x06\x0d\x8c\x51\xe7\x70\x90\xc2\xb9\xa3\x97\x81\x5e\xa4\x3f\x52\xfb\x98\x6b\x46\x43\x1f\xe1\x09\x1d\x88\x33\xaf\x18\xcf\xb2\x03\x75\xaa\xc3\x74\xc5\xbd\x97\x1a\xb2\xa6\xb9\x76\x97\x6f\x09\x79\xa6\xa7\xc8\xaa\xf1\xc6\xd3\xa7\xfe\x7c\x65\xcc\xfe\x06\x87\xfb\x83\x84\x45\x8e\x41\x28\xe1\x60\xbe\xc8\xb7\xff\x00\xe3\x4e\xd9\x64\xd4\x2a\xec\x95\xd1\x6c\x8c\xc7\x48\x38\x28\xc5\xe7\x5f\x24\xa5\xa1\x0c\xbd\xa9\xf2\x6c\xc7\x29\xaa\xc8\x32\x5f\xbb\x32\x34\xe3\xb4\xb2\x4c\x91\x33\x2e\xbf\x09\x32\x4e\xaf\xca\x8f\xd3\x8d\x46\x94\x4b\xa4\xe3\x2b\xd3\x3e\x58\xa3\x90\xec\x57\x24\x45\x87\x55\x8c\xf7\x48\x49\xa7\x59\x3b\x9d\x55\x63\xd3\x4e\x61\x97\x50\x46\x51\x4e\xce\x86\x49\xed\xb9\x42\x96\xa0\xdd\x26\xb0\x70\x3b\x04\xab\x2c\x92\xff\x3b\x43\x64\x87\xe8\x0b\xb1\xdc\x60\xe3\xa6\x3b\xb1\x37\x8a\x2f\x2a\xb2\x03\x5b\x06\x5d\x9b\xc3\x8a\x3e\xb4\x5d\xdc\xb6\x70\xf3\x03\x4e\xba\x33\xbc\xb2\x4d\xa6\x50\x82\x96\xba\xeb\x43\xa0\x12\x9b\x90\x77\x7a\xab\xc5\x6a\xc0\xab\xb0\x6a\x52\xe5\x8d\xab\x0f\xfd\x21\x1f\x34\x59\x7b\x18\x00\xae\x56\x5f\x20\xe0\x2d\x9d\x48\xec\x10\xce\x5a\x4e\x83\x7b\xda\x9d\x4e\x51\xf3\x25\xcb\xf0\xdf\xc1\x8b\x0a\xa8\xaa\xb9\xab\x27\xce\x49\x45\xd9\x96\x4d\x67\x37\x56\x3d\x9f\xff\x9e\x4c\x56\x6b\x46\x4d\xa8\x1c\x4a\x52\x38\x1a\xe1\x05\x80\x60\x66\x66\x35\xba\x50\x14\x1c\x5b\xd9\xa1\xed\x8b\x02\xd5\x70\x43\x7a\xa9\x5f\x0d\xaf\x8b\x66\xd4\xb5\xc0\x42\xd2\x32\xb1\xb4\x79\xe3\x99\xca\x46\x41\x02\x3e\xd7\xc2\xe5\xb7\x3e\xfa\x1a\x1e\xc0\xe2\x56\x33\x17\x0a\x8b\xf2\xd3\x39\xd6\x5f\xac\x49\xb4\x70\xf7\xde\xbd\xef\x54\xb6\x3b\x9f\x58\x78\x10\x05\xa8\xfd\x54\x9a\xd6\x47\x6a\xfa\xfa\x07\x88\xc0\xc3\xe3\x62\xaa\xe6\x1a\x53\x3f\x3e\x4f\xa5\x63\x74\x2f\xcc\x0c\x9f\x6b\xb8\xc7\xf0\xf2\xc6\x38\xa7\xb0\x89\x67\xa0\xef\xe5\x94\x11\xec\xaf\x3f\xc8\x3f\xcd\xae\x2d\x5b\x17\x56\xb0\x98\xce\x35\x1d\xbe\xe1\x39\x29\x69\xe0\xf1\xb0\x2f\x36\xf5\x80\x86\x66\xcb\x57\x2b\xdb\xf2\xf7\x65\x69\xf9\x06\x9e\x25\x3c\x36\x6c\x57\xdf\xa5\x90\xd2\x45\xaa\xa9\x58\xdf\x0d\xc3\x2d\x8c\xc3\x52\xba\xfa\x4f\xd5\xef\xa0\xb5\xe7\x45\xfa\x0f\x4b\xd4\x5d\xa7\x1d\xd2\x86\x66\x17\xa9\x63\xb9\x4a\x0c\xbc\xfe\x87\x28\x04\xcd\xe0\x32\x5e\x84\xd9\x20\x39\xb8\x93\x83\x99\x25\xc4\x09\xe7\xd2\x6c\xa3\xd5\x1e\x65\xd9\xea\xff\x8c\x35\xbe\xe6\xf2\x16\xf6\x48\x05\x8e\x3c\xde\x03\xa1\x24\xeb\xc3\x49\x76\x5b\xd5\xad\x61\xa6\xee\xa3\xbc\xed\x2a\xe5\x96\x04\x5c\xdf\xef\x9a\x36\x5e\x3f\xc0\x04\x67\xe6\x1e\x65\x5b\x69\x1f\xae\xe3\xa1\x76\xf2\x21\xfe\x43\x29\x96\x89\x78\xec\x67\x18\xe4\x39\x28\x38\xf8\xb3\x6d\xcb\x8b\x45\x07\xcd\x15\x3b\x45\x35\xaa\x6a\xcb\x72\x1a\xbf\xc5\x70\x29\xe3\xe1\x2b\xd5\xe2\x0c\x95\x32\xed\xab\x5c\x88\x2a\xd2\x0e\xbe\xae\xac\xf1\xd6\xc0\xd0\x11\x8d\x59\xad\x3a\xfa\xf0\xc3\xab\x29\x4c\x70\xe8\x18\xeb\x48\xe5\x20\x74\x1a\x9b\xef\x83\x7b\xea\x26\x02\xed\xd4\xcf\x8b\xeb\xd2\x7b\xec\x00\x1c\xa5\xb1\xdb\xcf\x43\x61\x7c\xcb\xb8\xae\xae\x52\xf5\xd9\x59\xa2\xfa\xc3\xc2\xba\x9f\xb7\x65\x5e\x36\x81\x67\x87\x4d\x98\x95\x84\xf2\xff\x00\xdd\x48\x4d\xf2\x7d\xac\xd1\x29\x28\x59\x34\x00\x98\x1f\xb6\x1e\x11\x47\x0d\xf8\xb4\x7f\xc0\x4b\x27\xec\x8b\xcb\x12\x67\x15\x62\xd9\x41\xd6\xd5\x61\xef\xcb\xf6\xdb\xdf\x5a\x1b\xfe\x5b\x20\xd5\xc2\x31\xd8\x2e\x7f\x28\x0a\x67\x7d\x51\x65\x56\x34\x62\x7a\xd8\xa2\x54\xc3\x60\xca\x5e\x41\xc6\xf8\x4a\xd9\x83\xda\xea\x6a\x56\x4a\x5d\x5d\x2f\x5b\x6e\x18\xee\xff\x7b\x53\x89\xbb\x4b\x47\x5e\x76\x76\xae\x2d\x80\x9d\x2a\xd2\xe2\x26\x06\x2d\x53\x92\xa2\xca\x3d\xf2\x97\x6d\x48\x3c\xb7\xec\x0b\x52\xd4\xdd\x40\x58\xe5\xb0\x1d\x72\xc3\xde\x63\x25\x24\x19\xc4\xec\x45\xd5\xfa\x76\x85\x81\x8b\x60\x17\xab\x12\x50\xd7\xa1\x42\xb1\xb1\xb0\x56\x40\x75\x2c\x2f\x6d\xb0\x5e\x69\x1e\x7c\xb3\x65\x41\x49\x53\x18\xa2\x90\x31\x99\xaa\x07\x61\x2b\x6c\x19\x6e\x31\xe4\xbb\x82\x94\x6b\x95\xdf\x9e\xec\x45\x28\x80\x0b\x2f\xfd\x95\xb5\x65\x52\xae\x35\x8b\xb9\xf9\x67\xaa\x2e\x73\x5f\x55\xfa\x02\x43\x3a\xf5\x1a\x60\xfe\x05\x1a\xdb\x9b\xc9\x7a\xfb\x46\x60\x75\xd0\xf6\x9d\x75\x8d\xa7\x73\x9d\x8f\x10\x9d\x40\xf9\xeb\xfa\x95\xb1\xcc\x16\x07\xac\xbd\xce\xe4\x15\xbd\xbe\x87\x06\xc1\xf5\x85\x52\x4f\xb5\xaf\x76\x8f\xa8\x78\xa7\xf8\xec\x04\x4e\x73\x55\x26\x56\x6a\x5f\x39\x3f\x5d\xfc\x36\xbd\x1b\x9f\xcb\x67\x96\xa7\xfb\x12\x7d\x9c\x32\xe8\xb1\xc2\xea\x4e\x32\xbe\x96\xf1\x99\xa7\x84\xef\x70\xa9\xce\x0a\x18\x09\x53\x3c\x2b\xb6\x80\x76\x39\xd2\x46\xcc\xb3\xc5\x56\xda\xff\x4c\xc4\x39\xab\x94\x1d\x27\xfb\x3a\x8f\xbd\xca\x5c\xb8\xe6\xec\xa6\xd3\x96\x12\x26\x2a\xde\x09\xf2\x28\x8f\xbb\x6a\x7e\x34\xc3\x5f\x4e\x95\x08\xf6\xd9\x62\x3d\xcf\xb0\x8f\x77\x91\x04\xef\x93\x6a\x77\x3f\x97\x22\xc8\x90\xda\xfc\xcf\xff\xee\x74\xfe\x9e\xc1\x25\x9e\x62\x37\x55\x06\x69\xb7\x88\xf1\x10\xac\x69\x6e\xc4\x45\x4b\x9b\xca\xea\x31\x3a\xfb\xf9\x5e\xbe\x25\x9e\x45\x53\xef\xd9\x53\xf0\xa7\xee\x4a\xdb\xe2\xc8\x0f\xf0\xa5\x26\x8e\xf6\xbf\x49\x91\x83\xdb\xcb\xd1\x1f\x3c\x6b\xa0\x72\xc4\x2d\x92\x3d\x5c\x90\xbe\x32\xec\x09\x92\xeb\x51\xf4\x75\x4d\x72\x45\xeb\x77\xbe\x29\x18\xba\xf9\xcf\x66\xb5\x94\xba\x17\x60\x39\x54\x51\x23\x0a\xa6\x74\x39\x07\xae\xb2\x85\x37\x7d\xb6\xec\x35\xc2\xeb\x7a\x11\x15\x76\xac\x62\x63\x54\xab\xc3\xf4\x69\x7b\xf5\xea\x1f\x4c\x77\xe4\x00\x09\x2c\x2a\x3e\x4f\xd0\xaa\x3d\x2e\x9b\x05\x49\xb3\x91\x24\xe1\x76\x53\x19\x46\x79\x09\xd1\xb9\x9d\x00\x55\xa4\xda\x62\xa5\x9f\xec\x94\xad\x13\xcb\xc6\x28\xa0\x0d\xdd\x1c\xf3\x11\xf5\xe9\x32\x24\xdd\x37\xdd\x0d\x6c\xb1\xa2\xe1\xed\x50\xb6\xd2\x8f\x7e\x14\x89\x76\x93\xfb\xb5\x43\x4d\x01\x8c\x56\x0b\x39\x1e\xcc\x22\x6b\x0f\x99\xa5\x2d\xef\xce\x11\x30\x02\x8e\x4c\x10\x68\xf3\xa9\x6a\xb7\x5c\x90\x0f\xf5\x26\x08\x8e\xa9\xf6\x2c\x1b\x00\x27\x98\xd9\x41\x50\xb5\xea\x69\xf6\xc3\xbd\x6c\x8b\xcd\xc4\xc5\xde\x77\x15\xe7\x02\x60\x95\x8a\x5b\x1a\xcb\xeb\xe9\x7d\xfd\x8c\xf7\xe0\x4e\xdc\x23\x07\x1b\x01\x81\xf7\xd3\x05\x71\xb5\xde\xbd\xa7\xb2\xff\xe0\x3c\x56\x2a\xd8\x0b\xaf\xc2\xc1\xad\xba\x6b\xeb\xcf\xda\xac\x22\x49\xf0\xe4\xbc\x4d\xad\x64\x04\x0a\xf6\x1b\x3c\x1e\x84\x54\xc2\x03\x5d\x77\xef\xab\x6e\x20\xcb\x73\x9d\x2c\x08\x54\x70\x27\xf2\x84\xac\xaf\xb4\xdc\xab\x7c\xf5\x5f\xb6\x6a\x20\x70\xbf\x5d\x32\x13\x0d\x0d\x92\x2d\xca\xe6\x00\x42\xcb\xa4\x14\x05\x26\x7e\x04\xd7\xbc\x12\x4a\xbe\x97\xc0\x2d\xcb\x10\x53\x9a\x8b\xc9\x2c\xe1\x12\x34\x95\x6f\x9b\xa6\x64\xda\xb9\x08\xa3\x1c\x5f\xe7\x64\xc3\xc1\x9d\xfb\x82\x1a\x5c\xb2\x47\x01\x0c\x51\x95\xf3\x0f\x41\xbd\x85\xfa\xb5\x9a\xba\x6c\x87\x3c\x29\x19\x50\x38\xe6\x8a\xb6\x5c\xc7\x2a\x62\xb1\x02\x04\x03\x57\x5b\x2a\xab\x28\x97\xc8\x42\x59\x46\x15\xb9\xf7\xfe\x7a\x8d\xb2\xc0\xff\x69\x00\x87\x5c\x43\xad\x76\xe1\x10\x9c\xfa\xe9\x30\x5f\xb1\xe4\xfa\x53\xa8\xe1\xb9\x4d\xaa\x6f\xa7\x55\xda\xb3\xd6\xe5\xdd\xf6\x69\x64\x10\x5d\x5e\x15\x24\x69\xfe\x0d\x32\xa0\xff\xb1\xbb\x02\xfd\xc8\x5a\x2b\x62\xf3\x46\x4a\x71\xb5\x5c\xe9\x20\x6d\xc0\xa9\xe7\xea\x5c\xee\x0c\x7d\x19\x07\x51\xe5\xe5\xe5\x65\x91\xaa\x94\x97\x31\xc4\x67\xaa\x78\xeb\xe1\xfe\x8d\xbc\xc7\x2b\x0e\x4b\x46\x70\x8c\xe6\x5c\x31\x58\x18\xc6\x7c\x15\xca\xa5\xc6\x0b\x25\x78\x3b\x55\xaf\xc4\x71\xf3\xe7\x78\x9c\x11\x6b\xdd\x3e\xd4\x87\x2c\x17\xca\x2b\x4d\x9b\x90\xd5\x5e\x31\x13\x69\xd5\x61\xa9\xb3\xb0\xa0\xfe\x8b\x62\x17\xc5\x68\x8e\x22\x16\x65\xea\xe0\xaf\x0f\x29\x43\x0f\xbb\x60\x32\xac\xc5\x58\xeb\x96\x7d\x97\x56\x2a\x0e\x81\xad\xaa\xfe\xe3\x85\xad\x52\x32\xaa\x55\x15\x65\x4d\xd9\x78\xc9\xab\x66\xfa\x49\x8b\x00\x98\xe6\x6c\x82\x01\x24\x2b\x0d\x95\xb2\x58\x9e\x45\x20\x1e\xe7\xb0\x13\x21\x99\xb8\xaa\x36\xcb\xa2\x91\x35\x30\x7d\x38\x08\xc5\x9d\x63\x1b\x49\x73\x35\x8a\xdb\x2a\x86\xb9\xe9\xc7\x1f\x3f\x4c\xc4\xc0\x26\x75\xa4\x1b\xf9\xce\xa4\x2b\xbc\x14\xae\xac\x21\x4b\xbd\xba\x96\x35\x93\x55\xde\x11\x7e\xde\xbb\x8e\x84\x21\x94\xdd\xc2\xac\x3b\x1e\x36\x5c\x48\x7b\x2b\x52\xfd\x4d\x60\xae\xcc\x20\x05\xc6\x96\x1d\x09\xb8\x22\xa2\x19\xc3\x88\x19\xb7\x8a\x63\x81\x64\xd5\xc7\xdc\x46\xc0\x0c\x5c\x05\xab\x53\x28\x19\x6e\xd9\xe8\x2e\xf9\x7e\xa4\xa9\x7c\x1c\xfe\x93\xa3\xbe\xed\x34\xba\x20\x45\xfe\x55\x2d\x42\x63\x1a\x2e\xd7\x81\x5c\x1e\x9b\xdd\xe9\xa8\xa5\xf2\x71\x95\x96\x52\x22\xd3\x26\xf1\x3f\x8c\x90\xbd\xc8\xef\xe2\x93\x5d\x7f\x7c\x42\xfa\x86\x0c\x75\xd5\xab\x9f\xa5\x7b\x70\xe3\x40\xff\xf6\xd8\x45\x61\xa2\xdc\xcb\x4b\xe3\xd7\x79\x8f\xb1\x1d\x78\x3f\x04\xfb\x22\xf7\xc6\x8e\xb5\x6f\x4a\xc8\xb8\xe7\x29\xe5\xd3\xf1\xe7\x82\x76\xb2\xc4\x97\xee\xee\xc6\xa2\x5f\x60\x5b\x39\x96\xb1\xa9\x5b\x6c\x86\x3c\xbb\x9e\x1d\x9c\x3c\x63\xd7\x9b\x5b\x05\x72\x75\x50\xca\x5a\x50\xd7\xf6\x60\xad\x9c\x4b\xdd\x01\x62\x23\x56\xce\xa6\xfd\x76\x99\x49\x46\xed\xaa\x99\x29\x68\xc4\x30\x6a\xc2\x08\x95\x96\x57\xc3\xe3\xc3\x4d\xd9\x71\x79\xc9\x94\x2a\x72\x25\xb6\xdc\xeb\x60\xb1\xf5\xf7\x81\x98\x3c\xac\x7e\xba\xdf\x49\xd0\x44\x6e\x86\x6a\xac\x93\x6c\xb6\xd3\xd7\xec\x09\x9e\x75\xb8\xcd\xa3\xe0\xd7\xae\x11\x88\x3b\x86\xb1\x4d\xa9\x67\x75\x63\xa3\x2d\xb2\xa2\x20\x11\x27\x1a\x8b\x33\x5b\x34\x5f\x39\x54\xea\x4c\x04\x89\xd8\x24\xf5\xa3\xef\xe3\xf4\x3f\xc0\x92\xe3\x7d\xe4\x06\xb3\x6e\xfa\x48\x37\xe4\x92\xf7\x03\xbf\x7d\xe3\x8d\x6f\xb3\x51\xe9\xa7\x19\x3d\xc8\xfd\x0d\xa1\x1c\x47\x27\xc8\x28\x35\x85\x58\x51\x3b\x39\x64\x94\x32\xe1\x0f\x34\xb7\xb2\x89\x97\xfe\x3f\x77\xf8\xa0\x45\xf5\x7f\x63\xc5\x5d\x56\x66\x95\xd9\x65\xaa\x08\x84\x17\x06\x01\xc6\x4b\x0f\xe4\x08\x06\xe7\x18\xdd\x01\x39\x4d\x1e\xfd\x3d\x3c\x63\xca\x46\x71\x7f\x86\x54\x55\x85\x23\x84\xb4\x49\xfe\x96\xe8\xde\x84\x70\x96\x2e\x3d\x1e\x67\x27\x98\x97\x2b\x67\x44\x9b\xf5\x88\xe2\x5a\xb5\x3e\xb6\xfe\x82\xb5\x4b\x45\x7d\x11\x86\xdd\xcb\xd9\xe7\x51\x69\x1a\xe0\x1e\x4d\xab\xf0\xdc\x49\x7e\xc1\xbb\x9c\x64\x11\x5c\x1f\x9f\x67\x0c\xf4\x09\x9a\x55\x31\x7f\x47\xee\x85\x19\x71\xb9\xf7\x7d\x3c\x0e\x70\x81\x40\xe0\xd3\x31\x30\x0e\x34\x71\x55\x2d\xdc\x18\xb3\xb6\xe0\x65\x34\x35\x55\x64\xcf\x07\x0d\x63\x77\x46\x86\x8b\xbc\x8c\x1e\x4e\x18\x73\x52\xc2\x1a\x6a\x56\xf5\x60\x09\x23\x2f\xab\x32\x9d\xca\x7e\x40\x81\x43\xb8\x6c\x6e\xc2\x92\x3b\x15\x19\x28\xb9\xcb\x59\x9c\x5d\x74\xdf\xfb\x27\xe1\x6d\xbe\xe1\x90\xc8\x6d\x88\xcc\xbe\x85\x5d\x0f\x47\x39\x7c\x4a\xca\xc8\xa4\x39\xca\x58\xed\xd3\x1f\xd1\x1b\xe5\x07\xeb\xa2\x06\x4f\x2e\x9c\x0f\x47\x50\xa0\x14\x2f\xa5\x75\xa5\x3d\x53\xc9\xfc\xe5\xf7\x53\x33\x96\x15\xce\xaf\x90\x45\xda\xf6\xf3\x76\x3d\x8f\x2f\x6e\xf3\xd3\xb1\xf9\xa0\x33\xeb\xef\xf7\xf2\xb4\x68\xf4\xe3\xab\x28\x57\x65\x3d\x42\x27\x5a\xa5\x36\x64\x0f\xca\xcb\x8c\x28\xcd\xc5\x9f\x19\xf3\xfb\xe2\x2a\x9c\xe9\x00\x29\x44\x18\x5b\xaa\x99\x99\x9e\xcd\x24\x65\x08\x25\x2b\xba\x69\x66\x5c\xac\xa7\x95\xa7\xb0\x0d\x5d\xda\xa8\x9d\xb3\xce\x68\x85\xf0\xc7\x03\xea\xf5\x85\x70\xcd\xb1\x7f\xbb\xe6\x5e\xdd\x5b\x5d\xd1\xfa\xc0\x8f\xb3\xf3\x14\x5e\x29\x03\xd5\x1f\x10\x03\xa0\x57\x99\xe8\x1f\xc0\x20\x80\xb2\x33\xe4\xbf\xd1\x1f\xe7\x14\x1e\xff\x07\xa9\xf8\x86\xa4\x37\xaa\xa4\x83\x3a\x4d\x63\xf7\x9a\x26\xe0\xe8\x01\x61\xdd\x7a\x78\xa4\x39\xc9\xd4\x66\x85\x07\xb1\x8d\xff\x71\x41\x5b\x54\xbb\xd9\xbf\x92\x11\x01\xf6\x59\x5d\x6d\x38\xbb\xc6\x36\x1b\xf3\xeb\x5f\x9f\x6c\xa0\x8f\xa3\x32\x15\xb2\xce\x0d\xc1\x58\x46\xb6\x86\x56\xf6\xf2\x55\x71\xc7\x98\x61\xdf\xb8\x54\x21\x6e\xd9\x2c\xc9\xb4\x74\x8d\xce\x30\x62\x64\x28\xe9\x05\xc0\xbd\xbb\x11\x60\x73\x79\xf5\x0a\xc8\x41\xc1\xd5\xfe\xe1\x11\x24\x04\x5b\xed\x18\x74\x2e\x6e\x3d\x94\x70\xb7\xfc\x68\x1e\x70\xc6\x35\x1f\x74\x21\x89\x4b\xc0\xb9\x6c\xe1\x16\x6d\xb3\xe6\x37\x57\xe2\xf9\x74\x70\xe7\x69\x44\xbc\xfa\xa5\x03\x2b\x0f\x98\x9c\xbd\x5a\x33\xa2\x29\xb7\x95\xf8\x37\x37\xc1\x8e\x4e\x35\x7c\x00\xe3\x7e\x31\x6d\xba\xac\xb5\xbd\x74\xe3\x73\x99\x65\xd4\x71\x68\xcb\xd9\x23\xd6\xe1\x1c\xf6\x4c\x88\x03\x0d\xca\xc1\x0c\x23\xc1\xa4\xd7\x4f\x7f\x56\xe6\x9e\xbb\x49\xd3\xd0\x44\x37\xd5\x22\x69\x7f\x98\x69\xbd\x9c\xa8\xc9\x20\xb1\xd1\xf1\x23\x0f\x49\x06\xa8\x9f\x5d\x51\x31\xec\x22\x70\xcf\xb6\x84\xb5\xeb\x34\x3e\x83\x03\x8d\xb9\xef\xdd\xdb\xd7\xd6\xc4\x6a\xfe\xa1\x7f\xb9\x51\x3e\xf5\x87\xbb\x88\x53\x6f\x1f\xd6\x52\x3c\xb9\x89\xf0\x59\x7e\x35\xe8\x92\x14\xbf\x98\x1b\xf8\xcf\x07\x19\xb7\x4c\xc4\x7c\xab\x4f\xe6\x24\x70\x20\x32\xf5\xd8\x2d\x97\xeb\xc3\xb3\x17\x49\xe9\x09\x83\x05\xec\x11\x7f\x54\xd3\x00\x77\x17\xbb\xba\x6f\xcc\xa9\xde\x53\x63\xbe\xe6\x18\x1d\x86\x2d\xfd\x0f\xb3\xc3\x98\xf9\x61\xc7\xb2\x53\x55\x23\x61\x29\xf5\xd8\x52\xb5\xeb\x68\x62\xec\xb5\x89\x01\x57\x63\x05\x6e\x21\xc4\xb1\xad\xac\xec\xb2\x38\x74\xe9\x2b\x3a\x61\x53\x22\x92\xbb\x15\x7b\xe8\x2b\x31\x9c\xbf\x3b\x1f\x58\xe2\xf4\x12\xb5\x9d\xf1\xf3\x85\xe1\x53\x42\x1c\x50\x3f\x1d\x79\xe2\x5f\x57\x80\xae\xb5\xc5\xe1\x06\x55\xdd\xa1\xa6\xaf\x7f\x63\x5e\xcf\x48\x36\x4d\x9a\xea\xc7\x59\xc1\x44\x1c\x4f\x64\x46\xf8\x1f\xff\xc7\xd9\x70\xfc\x37\xd2\xbd\xa9\xd4\x71\x6a\x86\x9c\xfa\x87\x1e\x20\xef\x4a\x14\x43\xd5\x8c\x62\x8b\xd0\x08\x71\x7b\x64\xf2\x72\x4f\x37\xf1\x63\xcb\xee\x7f\xc0\x38\x56\x29\x2c\x0f\xa5\x82\x3e\xd8\xcb\x60\x59\x8f\xe1\x68\xf9\xf6\xf0\x99\x97\x52\xcc\x9f\x56\x14\xbf\xa7\xa0\x82\xc3\x1f\xd6\x12\x5f\x8e\x39\xd8\xa4\xf5\x55\x9f\x6f\x18\x37\x79\x4c\x0c\x68\x4a\x08\xf5\x47\x08\xf6\x43\x7b\xf2\x21\x12\xdc\xcb\xc0\x66\xd4\x64\x35\xf1\x76\x52\x8e\xdb\x1d\x9b\x52\x26\x28\x6c\x16\x6e\xdd\xc1\x47\x37\xde\x4f\xce\xb8\xe1\xfd\x9a\x0c\x0f\x8b\xa9\xe8\x80\x94\x13\x95\x75\x28\x52\xc6\xe4\x23\x7e\xef\xcb\x95\xeb\x5e\xdd\x5d\xc1\x8d\xee\x8d\x1c\x91\xd5\x80\x74\x28\xab\x86\x9f\xf5\xf6\x4b\x2b\x2c\x21\x55\x39\xd0\x59\x02\x87\xd9\x9b\xfa\x1f\xff\x4a\x65\xc5\xbf\x71\x7f\xcc\x36\xc5\xbe\x6b\x55\xd5\xc1\xd3\x0a\x88\xb2\x77\xab\x4b\x69\xef\x1b\x71\x7a\x22\xdd\x10\x12\xea\xc1\xec\x53\x74\x3e\xd0\xe3\xa4\x59\xed\x3c\x8f\xbc\x41\x2f\x53\x0e\xb7\x73\x93\xcd\xaa\xa2\xc9\x97\x5c\x81\x20\x27\xe3\xb2\xd5\x29\x32\x9d\xcd\x04\x20\xd7\xb8\x75\x52\x61\x57\x1b\x91\x5d\x6e\x4f\xca\xe5\x02\x12\xcf\xbe\xd0\x28\x43\x36\xb6\xbf\x87\x3c\x01\x22\xe4\xf1\x19\xe0\x3f\x6d\x26\x0f\x2b\x90\xa5\x90\xbe\xb9\x25\x02\xab\xdc\xe8\x88\xac\x53\xb6\x77\x95\x86\x36\xac\x7d\x8d\x29\x98\x01\x6d\xa4\x53\x4a\xa0\xec\xe1\x7a\xc5\xc2\xf2\x93\x77\x97\xbb\x60\x19\x4e\x02\x19\xe8\x46\xe5\x36\xca\x5d\x5e\x59\xfd\xbd\x0d\x0f\x2a\x37\x46\xc4\x1c\x17\x28\xd3\x6e\x17\x69\xde\xe9\xb1\x1a\x22\x71\x2f\xee\xde\x9c\x1f\x63\xaf\xa8\x4b\xfe\x46\x57\xa8\xa0\x52\xfb\x0e\x12\x81\xb5\x4c\xe0\x88\x18\x77\x45\xec\xdd\x77\xc8\x13\x7a\x16\xc9\x30\x7d\x13\x7e\xb7\x53\xd6\xcd\x68\xe8\xb1\x9d\xc2\x2d\x90\x90\xbc\xae\xbd\x72\x0a\xcc\xc6\xe3\xc9\xfe\x76\x67\x42\xbb\x96\x72\x6b\x56\x35\x8c\xd3\xd5\x1e\xe9\x81\x86\x98\xdc\x81\xee\x45\x30\x11\x5b\x85\x8d\xea\x6e\x03\xef\xaf\xc0\xd9\x6b\x32\x5b\x59\x6c\xd0\x60\x51\x0b\x03\x58\x86\x43\x25\xca\x9b\x66\x9f\xa1\x6c\xc4\x84\xa8\xa3\x9d\xe8\xd6\xc0\xa5\x42\xde\xce\x49\x19\x12\xdb\x68\xdc\x7d\xdc\x65\xa5\x0b\x78\x53\xf7\x80\x5f\xea\x10\xda\x1a\xdf\x5a\x46\x9d\xb6\x9d\x7c\xb7\x4d\xd0\x17\x1a\xf7\xb4\x9b\x9e\x20\x87\xe8\xe3\xd3\x29\x7a\x7a\x1f\xf5\x7f\xd3\x3f\xca\xab\x8a\x64\xfe\xf0\xf6\x12\xde\x6b\xd1\xb9\x00\xf5\x51\xe7\x37\x74\xce\x63\xc4\xc1\xa1\xa0\x35\xff\x7c\x12\xb7\x9d\x72\xcf\x58\x61\xa3\xff\x71\x13\x27\xdd\x01\x11\xa5\x88\x8a\x78\x8a\x43\x56\x52\xf0\xc3\x0f\xd6\x39\x4e\xf5\xda\x4c\x2a\x13\xcf\xaa\x26\xc4\x06\xeb\x75\xb6\x15\x96\x0f\xf6\x37\x1c\xe5\x1a\x1e\xab\xc4\x8a\x28\x07\xe1\x5d\xc4\xd3\xce\x5b\xa6\xb2\x7d\x5b\xfe\x6c\xb8\x7b\xc7\xf9\x8a\x8f\xc8\x83\x3b\x98\xe5\xce\xa4\xb8\xc3\x44\x2f\x78\xea\xdd\x26\x3b\x85\xff\xf8\x97\xa7\xb2\xdb\x82\x82\x96\x97\x85\x1b\x32\xc0\xe4\x4d\x0c\xfd\x94\x5f\x11\xff\x3c\x5a\x18\xa4\x9b\x63\xc5\x30\xf6\x4e\x6f\xc8\xf7\x63\x74\x5f\x99\x01\xb8\x96\x19\xbc\x41\x97\xe3\x59\xcf\x38\x8b\x8e\xa7\xf4\x1b\x65\xa3\x50\x69\x40\xef\x01\xc2\x8b\x73\xf6\xca\xb3\x97\xd1\x17\x04\x91\xb3\x2a\x41\xb0\x64\x6f\x8c\xc7\x65\xd3\x02\x0a\xaa\xaf\x70\x8b\x57\x6a\xed\x76\x70\x35\xd0\x4e\x2d\x74\x88\xb0\x53\x13\x82\x9a\xcc\xe8\x48\xc8\x5a\xdf\x63\xd8\x5e\x99\x4e\xfe\x14\xc2\x78\x2e\x3f\x92\x54\x95\x7a\xa2\xce\x2b\x84\xeb\xcb\x92\x11\x47\xa4\x2f\x54\x97\x73\x10\x1d\x35\x65\xf0\x68\x98\x84\x38\x1f\x9f\x40\x24\x8e\xf7\x88\x9d\x14\x1b\x1b\xec\xdf\x1d\x04\x51\x6f\x96\x53\xf7\x12\x56\xa7\xd9\xcd\x52\x0c\xfb\x08\xd6\x68\x05\x3d\x1e\xd2\xf3\x1d\x59\xb2\x5e\x7a\xec\xf2\xb1\x65\x97\x70\x3e\x0f\x0a\xa0\x1f\xef\x20\x10\x70\x33\xdf\xc8\x1d\xf3\x78\x28\x5e\xe3\x11\xb6\x4e\x65\x34\x47\x90\x1e\x26\x2d\x07\xf5\x21\x60\x07\x75\x0a\xc7\xc1\x6a\x2e\xa3\xe4\x87\x3e\xfb\x28\x0d\xf6\x0f\x55\x2c\xf2\xc9\xfd\xcc\x20\x8e\x8a\x8c\x6e\xfb\x5a\x58\x66\x55\x26\x80\xed\xb3\xf8\x3c\x5b\xe1\x9e\xe1\x7b\x75\x29\x1c\xe8\xa7\xe5\x80\xbe\x74\x9c\x66\x4a\xa2\xdd\x30\x79\xfb\x8a\xf1\xf0\x08\x5e\x80\x12\x01\xd0\x85\xe0\xfa\x05\x63\x46\x3e\x19\xfd\x87\xd9\x6f\x63\xcd\x4f\x61\x4e\xe0\x08\xd4\x93\x83\x17\x65\x8d\xcb\x2a\x47\x4a\xc9\x5b\x83\x42\x40\xc0\x5f\xad\x8a\x35\xc5\xbe\x80\x84\x02\xfc\x18\xa4\xd4\xcf\x95\x51\xb2\x4f\x0e\x01\x9c\x92\x0f\xeb\x5f\x9e\x61\xd3\x52\x96\x78\xa7\x7e\xf4\xb2\x07\x7b\x26\x9b\xf2\x62\xab\x57\xce\x45\xb6\x29\x1e\x5b\x9d\x96\x20\xa5\x36\xb9\x1d\x46\x5b\xa7\x9d\xb5\x44\xfb\x80\xf5\xe6\x54\x1f\x58\x4a\xd1\x60\x07\xc8\x58\x5c\x1f\xaa\x9c\x99\x77\xed\x92\xb9\xd7\x90\xec\x3d\xbc\xb8\x5a\xf7\x2e\x36\xba\x14\x55\xff\x2d\x39\x9b\x3b\xfb\xf3\x7e\x99\x93\xc1\x6d\x28\x14\xda\x8f\xde\x52\x70\xf5\x2e\x22\x34\x50\x96\x5b\xee\x5e\x38\x62\x25\x89\x54\x4f\xe9\x9c\xfb\x17\xda\xba\x6c\xb2\x1f\x2b\x53\x58\x57\x6a\xc4\xf6\xb1\xf4\x96\xc1\x6f\xf6\xf0\xdd\x1e\x9f\xb0\xe2\x7a\xf4\x91\xcf\x47\x2b\x2e\x35\x0e\xa8\x47\x31\x66\xff\x32\xe0\x6f\xe6\x9c\xc6\xc6\xca\x30\x9a\xc6\x2f\x62\x81\x48\x07\x1c\xcf\xc2\x6c\x18\xc0\x0c\x17\xcc\xe0\x0a\x84\x11\x68\x29\x66\x4a\xcd\x5a\x05\xd4\x34\x58\xb9\x38\x04\x7e\xc8\x28\x01\x29\x48\xf8\xd1\x02\x19\x2c\x75\x06\x05\xdf\x6f\x8d\x3d\x10\x07\xa8\x44\xb3\xb7\x8c\xaa\x5d\x6d\x19\xf7\x7d\xf5\x46\x46\x42\x8a\x86\x72\xd4\xf0\x98\xae\x61\xee\x89\xac\x96\xf6\xfe\xef\x9c\x53\x68\xb5\x43\x1b\x0d\xba\x41\xb5\x6c\xd0\x91\xd9\xf8\xec\xdf\xc4\xa3\xfa\x9f\x13\xf6\x2c\xee\x6e\xb4\xd5\xf6\x6a\xcd\xef\xb7\x2c\xbe\xf2\x45\xa1\x58\x61\xca\xaa\x57\xe1\x48\x4d\x0e\xb5\x25\x5e\xce\x72\x45\x8f\x4b\x1d\x33\x8c\x3c\xf1\xac\xa0\xb6\x3a\x83\x50\x8c\x7c\x4d\xe1\xeb\x63\xea\x19\xcd\xb5\xf6\xdb\x3d\x68\x20\xf0\xac\xc1\x87\xe6\xf0\x89\x72\x9b\xcb\x76\xfe\x15\x1d\x04\x14\x7b\xb6\x0c\xce\x43\x6e\x50\x65\x50\x77\x1c\x59\xdc\x23\xb5\x25\xd6\xa4\xfa\x31\xe7\x2c\x1d\x55\x06\x9c\xcc\xb2\x26\x5b\xb4\x92\xff\x1d\x59\x78\xe9\x72\x6e\x26\x30\x2d\xe8\x23\xcd\xf2\x39\x44\x00\x2f\xd9\x7e\x18\x57\xee\xeb\x52\x36\xd0\x6a\x5d\xce\x5c\x6a\xdc\x19\x20\x23\x54\x96\x5b\x75\xb7\x33\xe7\x41\x4a\xec\xb2\xa1\xdc\x47\xf5\x07\x76\x18\xea\x24\x99\xb1\xcf\xf4\x5a\xdd\x9c\x7d\x3d\xb2\xcc\x9e\xc5\x78\xe6\xae\x82\x51\xd0\x35\xb5\xa5\xb3\xb4\xba\x47\x08\x90\xd8\x0c\xe0\x5f\x79\x87\x8f\x26\xc3\xc3\x25\xa6\x1d\x99\x93\xba\x0b\xdb\x80\x94\x83\x68\x25\x40\x8d\x7d\xf1\x28\x50\x1f\x02\x3f\x3e\x74\xd0\xf5\x53\x93\x4e\xb1\x19\x96\xb2\x5d\xf7\xde\x61\x06\x9b\x92\xb7\x53\x88\x48\x4d\x90\xd0\x87\x82\xb0\xda\xeb\x00\xc6\xc8\xd2\x32\xe7\xa1\xec\xda\x62\x8b\x04\xc5\x87\x9a\x05\x7c\x0b\x9f\xc9\x32\x6f\x25\x98\xd0\xe0\xf7\x90\xa0\x7b\xa5\x1b\xb8\x16\x09\xb9\xa8\x90\xb8\x5f\xb6\x4e\xb4\x61\x7e\xe6\x9f\xd1\xdd\x06\x19\xa8\x3e\x7b\x97\x82\xcf\xb2\xb7\xd2\xfc\x23\x93\x30\x6f\x3f\x7f\xfa\x32\xeb\x94\xe5\x8b\x7c\x86\x7d\x4b\xbd\x5c\x87\xa0\x85\xb8\x49\xbf\xcd\xb9\xe4\x6a\x20\x2c\xb5\xa2\xb7\x6d\xa6\xfa\x02\x07\x64\xff\xd3\x1e\xe3\x52\x8a\x92\x9e\x7f\x53\xc4\xe2\x76\x56\x5b\x11\x26\x2e\x56\x2a\x39\x9c\xc7\xc0\x05\xd7\xab\x5f\x1c\x47\xe2\x9b\x67\x3c\x06\x30\xf1\x74\xb5\x2b\x08\xb7\xd8\x65\x0c\x1d\xcb\xdb\x77\x68\xf8\xe5\x15\x7d\x11\xa8\x6b\x70\x38\xc6\xdd\x3f\x36\x02\x10\xc1\x78\x23\x7d\x02\xd9\xe1\xe5\x45\x9f\x3a\x22\x7b\xe2\x54\x90\xa4\x5c\xa4\x55\xd1\x3f\xf2\x1c\xf0\x12\xd8\xba\x9e\xa9\x1f\xf6\x3d\x78\x84\xaa\xf0\xe1\x7e\xe6\xc3\x04\xb8\xc7\xf8\x13\xed\x69\xe8\xcf\xdd\x53\x58\x0d\x11\xc0\xbd\x4c\xbe\x08\x28\x45\xa2\xe1\xb0\x1d\x72\x26\xdb\xf7\xc8\xad\x59\xba\xf6\xe4\xa6\x08\x0a\x01\x9e\x1f\x1c\x77\x2c\x6e\xd5\x6b\xb0\xf1\xc7\x70\x7e\xec\xfe\x39\x9c\x4e\x6e\x2a\xd7\xd9\xaa\x1b\x51\x29\xce\xb0\xa1\x1d\x36\xbb\x76\x0f\x30\xed\xb0\xa5\x19\x7a\xcd\xc7\x65\xc7\x38\x6c\xf2\xb0\x82\xa0\x3d\xd9\xe1\x2c\x67\x87\xe4\x4c\xc1\x12\x1a\xa4\x2d\x18\xe4\xe3\x3c\x84\x2b\xcc\x90\x42\x56\x81\xf3\xe1\x57\x53\xc0\x90\xba\x41\x7d\xc8\x72\xbd\x74\xf6\x9b\xe1\x21\xd6\xbf\x8b\xc3\x7d\xdf\xf2\xe5\x3f\x75\xdf\x54\x86\xdd\xc1\x54\x60\xeb\x62\x83\xb6\x45\xbd\x0d\x34\x87\x75\xb5\x7d\x25\x4d\x40\xcd\x30\xe5\xec\x9c\x75\x1b\x3d\x52\x7b\x22\x4f\xb4\x71\xab\x57\xba\xac\xb4\x2d\x67\xb8\x5c\xd0\xa4\x72\xc0\x08\x53\x11\x9b\x1c\x23\x92\x0a\xf5\x4a\x50\x6c\x3b\x20\xf1\x0e\x1b\xaa\xea\xf3\x0a\xfb\xec\xa0\x92\x5c\x91\x98\xd5\x37\xf5\x5e\x36\xe3\x62\x81\x40\x06\x61\x6d\x3c\x7a\x04\xea\x95\xa0\x1d\xaa\x59\xba\x7f\x05\xeb\x02\x76\x94\x5c\x72\x7a\xd1\x6a\xfa\xe5\xef\xa3\xaa\x96\xfd\x0e\x4b\xed\xb1\x51\x7a\x48\xd1\x04\x23\xed\x8f\x5a\x26\xa4\x7a\x07\x2b\xa4\x9c\x00\x4d\xbf\x89\xfd\x51\x16\x15\x14\x5c\x65\x76\x72\xbb\xa4\xcc\x96\x6c\x95\xa4\x2d\x4c\xf4\xcb\x06\x1e\xde\xf5\xb4\x37\xab\x74\xc8\x6e\x4b\xef\xd0\x03\x76\x9b\xd7\xf2\x2e\x7f\xec\x35\xd3\xe5\x33\x50\xf7\x2e\x07\xf5\x02\x3f\x83\xff\x6a\xc9\x43\xce\x94\xe8\xc8\x91\x3b\x94\x12\xaf\x08\xc3\x51\xff\x64\x8c\xf6\x08\x1a\xbf\x6e\x9b\x48\x3c\xe6\xfb\xd1\x4e\x71\x67\x65\x38\x77\x87\xfc\xbc\xe4\x0b\xd8\x95\x6d\xd1\xef\x4d\x1d\x92\x2f\x2f\xcf\xb2\x9f\xd2\x3f\x29\x87\xb9\x5e\x04\xb9\x8b\x23\x52\x97\x1f\x10\x22\xb7\x0a\x32\x42\x37\xd1\x89\x62\x52\xd6\x00\xc8\x2e\x79\x2f\x12\x7a\x3c\x83\x77\xff\x18\x58\x5c\x7e\xa8\x31\x26\xa7\x96\x96\xa5\x6a\x19\x54\xbc\xa6\xcd\xef\x6a\x67\x6c\x01\x84\x02\x34\x35\xfe\xea\x9c\x0c\x57\x6a\xa6\x4d\xa6\xa2\x89\x42\xfb\x14\xd6\x73\x3e\xd2\x88\x32\xc4\xe8\xab\x4c\x94\xa4\x87\xec\xfe\x7e\xee\x55\x8f\x8f\x93\xb9\xde\xef\x05\x94\x3a\xfd\xbd\x3e\xa6\x3b\x47\x73\x48\xc2\x9d\x8f\x01\xf6\xe5\x72\x30\xb5\x43\xfe\x72\xa1\xa8\x68\x3c\x28\xab\x58\x35\x12\x88\xce\x11\x56\xb9\x68\xd8\x98\x91\x40\x73\xee\xea\xd5\x8b\x23\x8a\xfa\x6d\x67\x0f\x1e\xd6\xa2\xef\x17\x7e\x64\xcd\x16\x92\x30\x13\x48\xd4\x5b\x21\x14\xee\x6f\x31\x59\x81\x58\x06\xf6\xd5\x6d\x53\xcd\xf7\x93\x7f\xb7\x94\xfb\xeb\xb5\xee\x80\x60\xa2\x3b\xfd\x05\xa4\x11\xf6\xc2\x7e\x76\x17\x74\x94\x2e\xbd\x4c\x42\x09\x07\x2b\x30\x4d\x3e\xd1\xc2\xe9\xf2\x9f\x7d\xeb\x22\xb8\xa6\x39\x23\x0a\xb0\xd9\xce\x45\xd3\x3d\xb5\x14\x41\x3f\x81\x6a\xcb\x46\x72\x5b\x7c\x85\xa4\x9f\xb0\x85\x52\x1d\x76\xc9\xfd\x3c\x82\x81\x02\x2b\x81\x46\xe6\x68\xcd\x34\xd6\xee\x0e\x13\xa8\xde\xaa\x98\x9a\xf1\x72\x93\x00\x41\xc5\xad\x9d\xe3\xb8\x52\x93\x44\x0f\x69\x2a\x6e\x8a\xfc\xa4\x43\xb7\x4c\x41\x7e\x6a\x2c\xfb\xbd\x04\x40\xdc\x7a\xd6\x06\x25\x73\xa8\x01\x1a\xec\x99\xec\x7b\x0e\x1e\x85\x9a\x3d\xed\x53\xf6\x28\xee\xf2\x04\xfb\xe3\x55\x7e\x33\x77\x69\xca\x95\x42\x0d\x9a\x44\x14\xfb\x6e\x05\xc7\xe5\x4a\xce\x27\x6c\x73\x99\xf4\x26\xa2\xd2\x14\x69\xa8\x57\x83\xbd\x2b\xc9\x64\x6a\x14\xad\x97\x09\x6f\xa2\x95\xa6\xda\x40\xe2\x88\xdd\xa8\xc5\x70\xeb\x87\x51\x72\x8c\xbf\xaf\xae\x77\x68\x25\xa8\x17\x9d\x06\x26\xd9\x7a\x73\x0b\x21\x90\xed\x11\x20\x83\xe7\x6a\x53\x3e\xd6\x52\x0a\x87\xbe\x8b\xa6\x15\xb6\xb1\xaf\x2b\xdf\x49\x86\xe7\x0e\x70\xb2\x7b\x9e\xde\x01\x82\x0c\xca\xd9\xd3\xb0\x88\x4d\x44\xf4\x6e\x70\x40\x5f\x5a\xde\x4c\x3c\xd5\x3f\xca\x0a\xf9\x18\x27\xfb\x2d\xdb\x4a\x4c\x59\x6f\xf6\x9e\x4d\x6e\x81\x36\x99\x4e\x07\xf9\x9c\x3a\x76\x96\x46\x2f\xeb\x6f\x24\xd4\xa9\xc3\xa4\x44\xc2\x37\x6c\xde\x82\xde\x01\xe3\x3d\xf7\x9d\xf8\x65\xbc\xcb\xdc\x40\x9b\x7b\xf5\xa0\xfa\x66\xb7\x06\xf2\x0d\x2d\xfd\xb2\x89\x2b\xb3\xf3\x5f\x28\xcb\xb1\xd0\xb5\x17\xb3\x9d\xf8\xf5\xbe\xc6\x21\x18\xa7\xaf\x91\x67\x1c\x49\x8c\xb3\x05\xa0\xe5\x20\x4b\xf8\x73\x92\x8f\xe6\x9e\x0f\x68\x0d\x6d\xed\x2a\x95\xbd\xd2\xc5\x59\x82\x53\x41\x7b\x86\x9d\xfa\xbd\xbf\x3a\x53\xcb\xbd\x1a\x59\x55\xc3\xbd\x2a\x09\xe9\xa5\xdc\x0f\x3e\x8d\xab\x15\x1e\xe2\x1c\x67\x97\xe8\x64\xe7\xcb\x8e\x7c\x3d\x26\x1b\x57\x18\xc7\xf6\xd9\x25\x00\xc9\x3e\x56\xcf\xc9\x16\x06\x97\xe5\x3a\x9d\x84\x44\xb4\x41\xa2\x56\x28\x3e\xe0\xb4\x85\xc1\xb8\x69\x09\x87\xe1\xac\x0b\x46\xd4\x1d\xbc\xc5\x76\xa6\x06\x4c\xdd\x27\xc7\xb6\xd2\x3b\x58\x96\x06\xf8\x28\xaa\xff\xdf\x94\xa1\x68\x74\xb7\xc0\xa1\x9c\x9a\x7f\x49\xa6\x04\xd4\x02\x5c\x2d\xa2\x68\x18\x73\x3c\x46\xb7\x08\xdb\xde\xa3\xa6\xda\xf1\x98\x96\xba\x37\x31\x5c\x70\x03\x5b\xc2\xbc\x2d\x91\x94\x4a\xa8\x39\xd2\x4f\x49\xd0\x8e\x98\x57\x18\x9f\xf9\xa9\xa1\x3c\x89\xce\x93\x5c\xda\xc5\x95\x19\xef\xf7\x68\x42\x55\x67\xbb\xe3\x6a\xea\xd0\xb2\x4d\xbd\x21\x73\x40\x60\xe3\xa3\x1e\x13\x31\x9f\xb2\x4d\x2f\x55\x15\xbb\x3b\x33\x14\x5a\x37\xb2\x56\xd8\x91\xdf\xa7\x20\xb6\x72\x91\xdb\x2c\xfa\x28\xfb\x1e\x1b\xa6\x43\xff\x61\xee\xca\x63\x93\x9f\x23\x60\x2b\x35\xa6\x90\x13\xe5\x5e\xcd\x72\x31\x82\xfa\xe8\xf2\xf4\x41\xc0\xe9\x3b\xb5\x13\x61\x2a\x56\x4e\x1a\xfd\x18\xad\x3c\x5f\xcb\x2d\xfb\xd7\xb2\x4e\xb7\xf8\x59\x95\xa8\xc9\x26\x6a\x9c\xa5\xed\x19\xcd\x9c\xf2\xf6\x48\xcd\x28\x5b\x90\x10\x51\xe4\x5f\xab\xad\xcb\x0c\x23\xa8\x6d\xeb\x7f\x7d\x63\xb1\x87\x3b\x5f\xa5\xbe\x0a\xbb\x3c\x59\xe7\xa9\x0b\xa2\xdf\xde\xd6\x79\xa0\x90\x54\xf9\xc6\x39\x45\xc0\xec\x6a\xd1\x39\xb4\x1c\x97\x97\x1e\xb5\x1c\xb7\xda\x91\x89\x3e\x8c\x5b\x2e\x41\x37\x8f\x47\x0f\xb6\xf1\xbd\x85\xda\x43\x44\x04\xdf\x81\x0d\xa8\xce\x9a\x30\x5d\x17\x1b\xc5\x3d\x1f\x35\xd4\x39\x2a\xb3\xa0\x1d\x11\x39\xb6\x12\x24\xc4\x9f\x32\x44\xd0\xaf\x49\x33\x8c\x73\x35\x05\x65\x6e\x12\x59\x20\x54\xbe\x78\x7e\x81\x2d\x42\xac\xa2\xd2\x84\x8c\x32\x3f\xc8\x4d\x34\x67\x88\x4c\xe9\xb1\x4e\x82\xda\xe0\x05\xda\xd5\xe6\x0d\xaa\xfc\xba\xbf\x2f\xbb\xeb\x23\x0c\xbb\x31\x0e\xcf\xfd\x55\xd6\xa4\xce\xaa\x0a\x5a\x0c\x5c\x1d\x94\xf3\xb7\xaa\x49\x8a\xc4\xde\xe5\xa1\x3b\xb4\x83\x29\x9b\x8f\x72\x9d\xd8\x40\x69\x35\xc5\xfa\x66\x47\x3e\xbb\xce\x96\x8f\x20\x36\x2f\x28\x28\x63\xd9\x0a\xcb\xc1\x41\x5e\x6d\x30\xde\xfb\x68\x00\x73\x55\x13\x3f\xb0\x6e\x9e\x84\x0f\x3c\x0c\x16\x0a\x34\x5f\x6b\x15\x95\xb4\x52\x3c\x94\x21\x7f\xd4\x72\x91\xe1\x3a\x72\xc3\xc7\xbc\x93\x60\x89\x6c\x5d\x70\x95\xd8\xeb\xc1\xed\xd2\x5e\x3b\x85\x7c\xa9\xce\xac\xf8\x2e\x73\xb4\xd9\x5a\xb9\xd3\x15\x21\x8f\x78\xeb\x4d\x3e\x65\xff\xc2\xcb\x6f\xc9\xa0\xb9\x69\xf2\x5f\x32\x10\x5c\x55\x9d\x0b\x2d\x16\xfa\x18\x01\xb0\x53\x63\xe8\x8d\x9d\x0a\x75\x2c\xe2\x13\x60\x94\xd4\x9b\xd4\xf9\xf8\x56\x17\x6b\x09\xb5\xe8\x22\xbe\xe8\x5c\xa6\x6c\x93\xaa\x67\xc5\xe5\xcf\x26\x3e\xce\xe0\xc6\xb7\x7a\x3b\xe8\x5f\x49\x5e\x92\xb9\x06\xbb\xe1\x42\xc4\xd1\x7a\x97\x5c\x7d\xa8\xe6\xcc\xc9\x0a\xd1\xea\x3c\x6a\xc2\x22\x7c\x26\x23\xdd\xd1\xe3\x98\x2d\xc2\x41\x70\xfc\x32\x80\x78\x18\x55\x93\x4d\x3d\x7a\x76\x49\x16\x10\x00\xad\x07\x2f\x62\x94\xa7\xeb\xb6\xd3\x6d\x97\x42\xad\x4f\x94\x6b\xae\x46\x9b\xb3\x33\x7a\xe6\x4a\x8e\xc3\xa8\x7d\x44\xe3\x69\x3b\xf2\x79\xf7\xa8\x66\x7d\xd1\x7b\x48\x31\xc4\x8b\xfc\x9b\xe6\xb0\x5d\x81\x08\xc7\x2f\xfc\x63\x83\x0c\x94\x79\xb5\xe3\x33\x13\x8e\x33\xbb\x26\x98\x24\xd3\x87\x6c\x7f\xf8\xda\xb3\x0d\x75\x56\xb1\x0d\xd2\x6b\x06\x79\x4b\x20\x9b\xd8\x42\x67\xb8\x54\x94\x6d\x35\x49\x30\xe3\xfe\x27\xee\x0c\x7f\x57\x35\xae\xca\x6a\x1a\x62\x1d\x30\x6a\x64\x52\x01\x5d\xda\x2e\x6e\x4c\xf9\xab\x4c\x0d\xb9\x6d\x73\xfe\xd8\xac\x5a\x79\x42\x7a\x99\xa9\x0d\xd0\x0a\xf9\x77\x7a\x6e\xda\xae\x3e\x45\x93\x78\x0a\x23\x7d\x8e\x41\xb3\xff\xc9\x9f\x4d\xa7\x10\x8c\xa1\x37\x31\x4d\x98\xf7\x68\x2e\xcc\x92\xdb\x1e\xdc\xb6\x5b\x8d\x29\xf3\xaa\x89\xcc\x67\x34\x59\x25\x4d\x86\xd3\x60\x05\xed\xcb\x7c\x79\x5e\xf4\x99\xb6\xcc\x57\xa1\xee\xa9\xb1\x65\x7e\x18\xab\x34\x32\xb2\x74\x87\x42\xf3\xcd\x56\xd9\x10\x43\x51\x73\x90\x55\x4a\xc2\x9e\x42\x1d\x9f\x0f\x35\xc7\xda\xa2\x3c\x3e\x91\xde\xf7\xb8\xd6\x13\x0e\xcd\x5f\x29\x05\x2e\x1f\x97\xdf\xff\x24\xf4\xa1\x4f\x7e\x66\x43\x28\x67\xfb\x52\xc8\x49\xe7\x91\x91\x83\x70\x63\x6e\xb2\xb3\x94\x7f\xd4\x76\xa6\x15\x22\x27\x24\x04\xcd\xcb\x64\xbf\x1f\xe7\xb2\xfa\xaa\xf3\x84\xa4\x90\x48\x54\x4e\xf3\xb9\x94\x7a\x6a\x13\xe9\xa5\xfc\x20\xca\x43\x4b\xe3\xf3\xe4\xad\x7d\x41\x1e\x89\x33\xf9\x70\xca\xd7\x6c\xd8\xce\xe8\x14\x0e\x50\x48\xc4\x50\x1c\x16\x20\xac\x6a\x21\x61\xcd\x8b\xbd\xf9\xb0\x39\x53\x0b\xd1\xbe\xaf\x8f\x07\x6e\x62\xd5\xb0\x6d\x69\x89\x8e\x2a\x49\x1f\x72\x07\xee\xa1\x03\x98\xbd\x91\x18\x22\x98\x61\xd0\x81\x1b\x56\xd8\x70\xf1\x0b\x28\x9b\xfd\xdc\x8e\x66\xcd\xac\xd1\x63\x42\x55\x21\xfb\x8b\x52\x87\x4c\x91\xe9\x82\xa6\x12\x14\xdc\xfe\x4b\x0f\x2b\xf3\x9b\x1a\x4c\xa1\x7b\xbf\x57\xd1\xe9\x3d\x8b\xaf\x75\x77\x8e\xfd\xdd\x2d\xb6\xfb\xe8\x66\x0f\x1d\x19\x35\x48\x9b\xb9\x33\xfd\x3f\x5d\x0d\xb0\x64\xb4\xd3\x7f\x29\x55\xa9\x23\xfa\x98\x95\xdb\xff\x1a\xdd\x84\xd9\xfe\x36\x7e\xd3\x4e\x70\x4f\x27\xe4\xcb\x22\x21\x3f\xc5\x50\x0e\xea\xbd\x7e\x7d\x6c\xba\x0e\x98\xd0\x3c\xb9\x7b\x74\x8c\xf6\x4f\xc4\x3e\xc5\x7a\x21\x9f\xa8\xbd\x97\x06\x75\x91\xe0\x05\xa8\x87\x86\x57\x44\x2f\xe4\x40\x7e\xfd\xfe\x07\x3b\xff\x44\x6c\x61\x96\x85\x5e\x6c\x17\x1e\x05\x79\xa2\x2f\x75\x96\x1b\x38\xe7\x12\xd0\x48\x77\x0e\xda\x2a\xb2\xa1\x14\x4d\xa3\x13\xf2\xcf\xde\x82\xa4\xe8\x29\x8d\xf4\x69\x0e\x6a\x0d\x84\xad\xf5\xfe\x14\xd6\xb9\xf5\x60\xbb\x99\x52\x33\x7d\xa5\xab\x95\xad\x27\xe5\xfe\x49\x1a\xa4\x25\xbc\x39\x08\xef\x77\x63\x67\x65\x50\xa9\x6e\xbc\xc7\xe8\xc6\xf4\xd7\xaf\x8e\x78\x23\xdf\xc6\x50\x42\xf9\xc9\x73\x8d\xdf\x77\xce\x9b\x29\x36\x97\xa5\x46\x7f\x0f\xa5\x52\xeb\xb6\x29\x7a\x48\x7e\x65\x10\xc1\xf1\x33\x97\x57\x94\xf7\x34\x52\xdd\x1e\x9b\x16\x78\xc5\x05\x58\x6e\x74\x8a\x87\xe3\xee\x92\xee\x23\x71\xcd\x4b\x5d\x4b\xb7\x92\x6b\x57\xc2\x63\xf7\x65\x4a\xe5\xf8\x4f\xbe\x89\x44\xf7\xa2\xb5\x8b\xe2\x75\x8a\xf6\x8e\x75\x37\x35\xd2\xad\x9a\x6f\xa0\xba\x88\x2c\xb6\xda\xc3\x5a\x0c\x9d\x52\xbf\x95\x6a\xa4\x81\x8c\x1b\xfb\x5b\x90\x78\x5a\x7f\x4e\x9b\x38\x5a\x52\x84\x34\x82\xe0\xf3\x94\x95\x38\x95\x05\x6b\xd4\x1f\x52\x2c\xc4\xb6\xe0\xd0\x30\x23\x50\xc3\xdd\xa4\xea\x3e\x8f\x61\x68\x03\x4a\xb9\xb5\x45\xe2\x75\xb9\x30\xfe\x92\x86\x08\xbf\x66\xf3\x86\x61\x70\xed\x18\xb1\x75\x65\x82\x37\xbf\xa7\xac\xb1\x5c\x07\x21\x5c\x62\x3e\x74\x15\x2e\x9d\x4f\xa5\xc5\x49\x71\x93\xe4\x23\xc9\x96\x53\x04\xdd\x80\x1c\xe4\x09\x1e\x6a\x26\x30\x66\x9d\x11\x07\xd5\x93\xdb\x47\x66\x21\x35\xe7\xdf\x9f\x65\x4b\xbb\x23\x4a\x9a\x13\xc1\x24\xea\x9e\x9c\x8a\xd0\x6c\xe0\x46\x53\x05\x4b\xea\xf6\x50\xae\xa4\x76\x52\x9f\xec\xb7\xb1\xb9\x29\x45\x2f\x24\xbb\x64\x94\x59\x27\xb4\x4e\x39\x18\x52\xcd\x04\x34\x97\xff\xc2\xb8\x3d\xbd\x3d\x6f\x7a\x2c\x35\x9d\x28\x40\xce\xa5\xeb\x11\x93\xa1\x0e\xbe\x68\x41\xea\x18\x95\x79\x5b\xf2\x82\xa6\x6f\x9f\xef\x2c\x54\xa2\xe9\x9d\xe6\xd6\x98\x2e\x0d\xe5\xb7\x8b\x84\x26\x0d\xab\x13\xd9\xca\x70\xd0\x5f\x9b\xb2\x43\x8a\xa8\xf4\x74\x85\x07\xa6\xf3\x17\x95\x9c\x0c\x30\x0e\x1b\xe6\xa8\x51\x14\x1a\x27\x14\xa1\xe1\xbc\x0f\xef\x54\xde\xec\x55\x59\x04\x92\xc2\x95\x81\xb0\x2d\xd9\x4c\xec\xb2\x16\x68\x5d\x85\x57\xca\xe1\x2e\x47\x19\x27\xed\x79\xd2\x5a\xca\xa2\xe9\x13\x6e\x1e\x91\xb2\x6e\x2a\x91\x92\x79\xd9\xc2\x01\x53\x49\xba\x29\x01\xe1\x69\x6e\xce\xe9\xab\x8d\x94\x75\x03\x2e\x9e\xd2\x19\xca\x56\x47\xb9\xe2\xd3\xe0\x02\xa2\xcc\xc5\xfb\x53\x29\xe2\xe3\xe6\xa6\xd3\xfd\x8e\xcf\xed\xcf\x50\x13\xc9\xbb\xd7\x05\x9f\xc3\x1e\x1a\x96\x6f\x22\x23\xcd\x96\x21\xa4\x0e\x5b\x04\xf5\xc3\x2e\x7a\x38\x43\x1c\xd6\xb4\x78\x6f\xf7\xd7\xf3\x6b\xfa\xe4\x0f\xf3\xd5\x3f\x99\x2c\xf7\x4f\x6a\x35\x19\xd1\x2e\x53\xcd\xa3\xed\x72\xfc\xb0\x33\x77\xb9\xf5\x1d\xee\xad\xbd\x91\xff\xa3\x1f\x1b\x71\xe3\x1e\x4c\x65\x82\xe1\xa9\x02\xa7\x10\x5e\xb7\x6f\x98\x07\x6b\xe2\x7f\x33\x0b\xd5\x43\xef\x20\x21\x5d\xe8\x9d\xe1\xb0\xc8\x17\xfd\x25\x72\x52\xb4\x93\x2e\xbf\xe0\xf2\xa9\x5a\xc5\x3c\x44\x70\x4e\x99\xa6\x45\x49\x7b\xb1\x73\x27\xd2\xd2\x8a\xc0\x9b\xe8\x10\xd1\x9e\x9b\xa7\xa7\xda\x4a\xd8\x7a\x0e\x95\x75\x64\x24\x21\x6c\x30\xd1\x3e\x3a\xc2\x3a\xb3\xbb\xdc\x38\xd8\x40\xa2\xb3\xd2\xd4\xdb\x9a\x73\x51\xde\xfd\x8d\xba\xac\x71\xf2\x6e\xfd\x78\x2b\xbb\xf5\x70\xdf\x15\x8e\x59\xd5\x47\x73\xfb\xdf\x19\xe9\x0d\x78\x53\x96\x4d\x6d\x67\xf0\xdf\x68\x3c\x62\xfa\x12\xb3\xce\xa3\xe1\xa4\xcc\x11\x53\x9c\x62\x8a\x3b\xa8\x5e\x9e\x39\x72\x6f\xfe\xa0\xa2\xc0\x7f\xcd\x48\xdd\x12\x7d\x29\x93\xd3\x77\x60\x02\xfd\xc4\x80\x8f\x1f\x93\xe8\xa3\x87\xcd\x6f\x39\x14\xc3\xa9\x5c\xf0\x9c\x29\x0f\x68\x4e\xfc\x5b\x7c\x65\x40\xf0\x08\x67\xb7\x4e\x86\x32\xee\x92\x6c\x35\x95\x9e\x7f\x58\x14\xb8\xbf\xdd\x86\x79\x07\x99\x72\x3f\xf7\x2b\x0a\x74\x87\xd5\x69\x50\x68\x38\xb6\xfe\x4b\xf6\x27\x1a\x36\x6e\x7e\x95\x01\x5a\x3d\x4b\x1f\xfd\x2b\xf4\x4d\xfd\xd4\xa6\xb3\x59\x0f\x7a\x56\x0c\xf8\x2f\xf3\xc3\x4a\xda\x9c\xc6\xed\x53\xd6\x1f\x3c\x2a\xef\xe9\x9d\xc2\x21\xa5\x37\x70\x89\x4d\xe3\x10\xfd\xaa\x20\xd6\xa9\x5f\xa5\x06\x52\x59\xa0\x7a\xb9\x42\xa2\x67\x15\x00\x83\xfa\x57\xa2\x3e\x4d\x4a\x0d\x5f\xc7\xff\x50\xa1\x9c\x39\xb1\xa6\x3f\xdd\x40\x41\x87\x11\x42\x44\xe9\x0d\x1a\x32\xb7\xb2\x91\x46\xde\x39\x5f\x75\x96\xe7\xde\x6e\xfb\xc1\x9d\x35\x97\x9a\x4b\xd3\x58\x19\xd6\xfb\xd3\xa8\x04\xa9\x51\x7e\x43\x65\xe3\x0a\x66\xa0\x3b\x62\x76\x0a\x7d\x98\x35\x84\x01\x56\x4c\xdf\x39\xf6\xd1\x98\xa3\xfc\x5f\x3d\x40\xc0\xd5\x62\x77\xec\xbd\x63\x64\x77\x06\xd1\xa9\x7b\xa6\xda\x75\x97\xda\x50\xf2\xab\x9e\xc1\xeb\xa6\x4e\x0d\x35\xcb\xb0\x4d\x01\x88\xee\xe9\xdd\x89\x14\xa8\x94\x89\xf8\xc8\xe5\x2b\xd7\x64\x48\xa7\x14\xf1\xe1\x12\xf8\xfe\xf6\xca\xdf\xd3\xe3\xe7\xe1\x1c\x8a\xb6\x9e\x7a\xd0\x38\xb0\x13\xc6\xca\x87\x83\xd5\x0d\x91\x4d\xee\x31\x54\x2a\xa9\xd9\x16\xbe\xc5\x1b\x6c\xb8\xf2\x76\xbf\xd9\x76\x25\xfa\x50\xb9\xf3\xdb\x2d\x65\x98\xde\xe2\x36\x9a\x60\x55\x83\xde\x69\x88\xac\x1e\xd9\xab\x9f\xbe\x14\x5b\x4c\x9e\xd4\x88\x71\x16\x8d\xdb\x79\xeb\x77\xe7\x4c\xfc\x3d\x77\xf6\xd6\xca\x7e\x5a\xd3\x15\x01\xbf\x84\x30\x0c\x1d\x0a\xbd\x04\x9d\x44\xbf\x7a\x75\xf1\x6a\xa1\xdf\xea\x47\x05\x26\x57\x3d\x18\x01\xf3\xe8\x04\x76\x63\xa5\x6a\xd1\x6c\xec\x26\x75\xd8\xf2\x96\x63\xd4\x15\x93\xe8\x77\x50\xea\x38\x75\xe5\xe6\x3d\x1c\x67\x48\xa1\x54\x2b\x4f\xe2\x00\xec\xfd\x78\x0a\x96\x8b\xa2\x66\xe0\xad\x9f\x9f\xf2\x83\xb1\x95\x75\x12\xa6\x52\xb0\xa2\xda\x6f\x67\xf9\x12\x92\xed\x5a\x22\x5e\xd1\x4d\xbc\xb3\x8b\x16\xde\x41\xd9\xc9\x7e\x1d\xf4\x71\xc0\x32\x3c\x94\x65\x3c\xd0\xf0\xab\x2f\x09\x33\x9b\xae\x8a\xcf\xbe\x4c\xd3\xa3\x37\xe8\xbf\x8d\x33\xc0\x98\x24\x56\x97\x33\xa9\x38\xb0\x6f\x8c\x7c\x52\x3d\x86\x0f\x8c\x15\x63\xa8\xfa\xa2\x6f\x98\x86\x1a\x02\x89\xbe\xff\x95\x7c\xbf\x57\x70\x09\x63\x91\xf5\xf1\xa3\xfb\x6b\xc8\x0c\x6a\xd7\x1b\x51\x6e\x04\xad\xdf\xd4\x4f\x7c\x65\x31\x5f\x56\xc0\x5c\x6c\xfa\x29\xd0\xa3\xaf\x92\x36\x72\x68\x37\x77\x16\x0f\xfa\xea\x29\x1b\x52\x1d\xc9\xbe\x75\x1f\xf1\xd1\x5f\xf1\x67\xb0\x08\x56\x07\x71\x4b\x66\x4d\x52\xd1\x62\x0d\xdb\xd2\xe5\x2d\xb2\x3d\x66\xb0\xc9\xf4\x61\x43\xd1\x6d\x35\x5b\x30\xc5\xa3\x87\xa8\xbf\x21\xc3\x98\xbf\x29\x3f\x20\x8a\x4b\x86\x7e\x2c\x6d\xa0\x93\x79\x8c\x5e\xa1\x1b\x79\xe7\x01\xeb\x03\xad\x21\xe5\x20\xbe\xb7\x45\xfe\x1b\x3e\x7f\xcb\x66\xe2\xf8\xdb\x95\x62\xb4\x8c\x55\x9c\xbf\xf4\xef\xe9\x23\x16\x72\x19\xb6\xd9\xdf\xc4\xd2\x5b\xba\x42\x53\xd7\x49\x2f\x9d\xc0\x8e\x30\xe2\x3d\x7f\x1c\xbe\x0f\x52\xb8\x5b\x69\x78\xe4\x97\x0f\x4e\x67\x2e\xd8\x19\x54\xdd\xb9\x3a\x41\xce\x11\xb7\x30\xdb\x28\xa3\x9c\x3d\xd6\xe3\xa0\xaf\xd8\xcf\x1e\x98\x29\x37\x67\xed\x6b\xe9\xb5\x32\x45\xcf\x71\x4a\x5e\x97\xe7\xdc\x91\x0f\x1a\xff\xf4\xb8\x57\xcf\x57\x94\x9f\xac\x7e\xbe\xfe\x2a\x85\xd8\xcc\x14\x1b\x25\xfa\x8f\x1d\x98\x5f\x37\x6a\xe0\x76\xdd\x44\x12\x3a\xcd\x1b\xe2\x2a\x96\x93\x03\xb6\x23\x8a\xba\x44\x62\xba\x3e\x23\x90\xd4\x21\x3a\x7c\x7b\x90\xec\x98\x4c\x3b\xeb\x01\x35\x8f\xb3\x94\x37\x91\xac\x49\xfe\x07\xff\xc1\x64\xff\x1d\x98\xf2\x38\x93\xe4\xed\x9b\x1a\x22\x92\xdf\x21\x84\x43\x3e\x73\xdc\x59\x66\xf2\x8d\x55\x2c\x58\x6c\xfe\xd7\x4a\x75\x96\xf7\x8a\x24\x94\x09\xed\xc4\x36\xc3\xdb\xec\x69\xec\xca\x4c\xa1\x0a\x72\x42\x5c\x22\xdf\xc1\xd4\xbf\xd9\x7a\xec\x5f\xa1\xb5\x85\xb1\xbf\x37\xa0\x53\xef\x24\xa6\xa9\xaf\x3e\xca\x53\x2a\x03\x47\x6e\x26\x50\x0c\xed\xf5\x83\x1d\xbc\xbc\x15\x70\x4d\x3a\xf5\x64\xeb\x95\xaa\x39\xb8\xed\x08\x4b\x5f\x8c\x9e\x80\x20\x3e\x72\x02\x2a\x45\x41\xa9\x38\x44\x9b\xc3\x4a\xad\x5e\x59\x79\xa0\xd7\xca\xa7\xda\x0d\x8a\x9f\x41\x26\x0a\x18\xa7\x27\x1b\x63\x65\x97\x8f\x3b\x7f\x4e\x00\x4f\x66\xd9\x79\xd3\xf0\xa3\x65\xf9\x27\x7f\xb4\xef\xfc\x19\x1f\xac\xc4\x7e\xb0\x4c\xc6\x83\xcd\xee\x1f\x37\x30\xdd\x92\x3a\x88\x2f\x27\x1e\x71\x23\x02\xe7\xf6\x3e\x7a\x9a\x9b\x8d\x64\x38\x46\xb3\x74\xbf\xc9\x8d\x28\xea\x7b\x1e\xfe\x58\x2c\x57\xfe\x99\x8a\x9d\xc3\x57\xd7\x74\x09\xf4\x39\xf5\xa3\xc3\x07\xa7\xef\x4c\xb5\x3b\x25\x0f\xa4\xa6\x67\xa8\xbc\x3f\xba\x4c\xd6\x7e\xe9\x1c\x7d\x2f\xd0\xec\x2a\xbd\x88\x09\x98\x7b\xf0\xef\x70\x22\x6b\x67\x50\xf6\x62\x22\x76\xf1\xa5\x9e\xfa\x41\xc7\x41\x12\x4a\x9b\x2f\xd0\xab\x57\x89\x2d\x1f\xcc\x49\x88\x77\x98\x75\xc8\x42\x9c\x33\x8a\xba\xa5\xe1\x1f\xf1\xa0\x81\x9d\xf8\x78\x59\x7d\xcb\x5c\x2e\xac\x9a\x36\xfb\xa0\x69\xbb\x9b\x98\xe2\x8d\x3c\x20\xf3\x36\x57\xfe\x51\x6e\x77\xdf\xea\xf1\xe5\x4b\x92\x8b\x52\x99\x7d\x7f\x79\x2b\x8b\xf7\x07\xa5\x97\x18\x40\x1d\x23\x5f\x0a\x90\xf3\xac\xfc\x2f\xc3\x1e\x04\x1b\x8d\xa1\x1f\x88\xc6\xe9\x32\x44\xf2\xe8\xf3\x5c\xdc\x1c\x0d\x8b\xa3\x73\xb7\x31\x52\x15\x34\xc1\x33\x49\x2d\xd1\x7e\x88\x46\xe7\x9f\x7b\x9f\xe9\xf8\x9e\x5e\xd8\x01\xed\x4f\x71\xf1\x36\xa2\xc5\x43\xee\x10\xba\xc3\xe6\xea\x12\x3a\xa8\x61\x8a\xa0\xcf\x32\x2a\x3f\x76\xcd\x16\x5d\xae\xf4\x20\x10\xf3\x74\x3e\x0e\x49\xc6\xa3\x77\x38\xbe\x78\x47\x5c\x36\x1c\x30\xce\x12\xcb\xed\x62\xeb\x99\x81\x57\x56\xc2\x46\x97\xcb\xdd\x16\x11\xf7\xf2\x96\xed\xc5\x5b\x86\xe7\xd4\xdb\xb1\x89\x36\xe1\x31\x87\x95\xd5\xee\x38\x11\x2a\x6c\x8c\x4f\xdd\xd6\xae\x7f\x7c\x3a\x91\xf5\xc0\xd7\xa0\xa7\x07\x9a\xa9\xee\x97\xdc\xa7\x53\xfd\x86\x3b\xd9\xb0\xfe\x8e\xe9\x3e\x1c\x6c\x34\xa7\xde\x68\x08\x5b\xf7\xaa\x3b\x0a\x9a\xd3\x3d\xc8\x83\xa3\x5d\x32\xee\xfd\x61\x66\xd4\x1d\xc2\x34\x7d\x4e\x10\xfd\x6b\x2c\xce\x15\x63\xe3\x30\xd3\xdf\x03\x8c\x78\x17\x94\x0c\xad\xe9\xbc\x83\x91\xb6\x51\x77\x47\x8f\xd6\x7f\x18\xab\xd9\x31\xf7\x38\xfb\x35\xf4\x03\xfc\xb4\x72\xf5\x85\x03\xd4\xaf\xdd\x21\x65\x99\xfc\x4a\x6b\x38\xf8\xf4\xd8\x0a\xdf\xa8\x86\x1c\x91\x27\xac\x16\x53\xbf\x96\xca\xd5\x4d\x0d\x34\x7b\x93\x8c\xf3\x7a\xf4\xf3\x6c\x99\x8c\xc6\x6f\xe5\x0a\x46\xdf\x10\x08\xf0\xe0\x95\xbb\x1f\x06\xc4\x01\xe8\xd1\xe1\xb5\xda\x9d\xef\x24\xfb\x0c\x9a\x24\xeb\x56\xa1\xab\x5b\xff\xf1\x79\xdf\x6d\x69\xff\x4e\x66\x7d\xf5\xba\x3d\x1b\xdf\x91\xff\x74\xbb\x78\xda\xed\xb2\xc2\x5e\x82\x00\x78\x13\xd1\xcf\x49\x5b\xd5\x42\xb9\x36\x77\x8f\xab\x54\xef\xb0\x5d\x3b\xfa\xa1\x7a\x4e\xd1\x42\xe2\x08\x52\x60\x02\x52\x10\xf5\x49\xa7\x58\x15\x18\x2b\x8b\x7f\x58\xeb\x59\x5a\x4a\xcd\x55\xfe\xc8\xb6\x6e\x90\x00\x85\x2f\x87\x91\x32\x5d\x95\xc5\xfd\x9b\x94\xf4\x8a\x98\x8e\xeb\xef\x6a\x2c\xfa\xe5\x45\x37\x8b\x2e\xb0\x9b\x38\x5d\xef\x26\x5e\x17\x36\x9c\xa5\x94\x7e\x3a\x56\xba\xb3\x6f\x02\xa2\xe5\xdd\x29\x8e\xc4\x72\x2a\x1d\x70\x4f\x7b\x8a\x51\xa1\xb0\x6e\x75\x7f\xcf\xb5\x75\x7f\x1d\xd4\x60\xcb\x34\xf3\xb9\x4a\x57\x55\x86\x9c\x17\x68\x49\xd5\xba\x40\x45\x2e\x7c\x18\x4b\x9d\x21\x10\x81\xd5\x06\x10\xe6\xec\x86\xee\xfa\xd9\x3c\x43\xa2\x29\xdc\x57\xa7\xe5\x71\x39\xab\x07\x73\xaa\x3f\x1a\x8e\x74\xb3\xc3\x74\x23\x3a\xce\xe4\x11\x7c\x8f\x5b\x3f\xaa\xcb\x7e\x3d\x04\x01\xd5\x84\x9f\x2b\x05\xa1\xf2\x0c\xf7\x38\x60\x39\xa2\x04\xdb\x60\x3a\x4e\xc0\x56\xfc\x08\xf0\x65\xfd\x4c\xff\x7b\x68\x15\xb2\xa9\xcb\x66\xb0\x68\x92\x60\xb2\xbd\xf2\xc7\xbc\x4c\xf3\x0f\xf9\xfe\x1f\x59\x4a\x18\x73\xa3\xb4\xb6\xb3\x9e\x8a\x9e\x33\x4c\xad\x9c\x7f\x86\x68\xdb\x3e\xac\xc2\xd1\xdc\x1e\xbe\x52\xca\x5b\x6e\x08\xda\x30\xb2\x12\x38\x8a\xc4\xee\x73\xfa\x62\x22\x3a\x73\xa0\x75\x9a\x67\x73\x12\xa7\x30\xb5\x70\xeb\xa4\xb3\x04\x53\xfb\x51\xf9\x48\x8d\x3d\xd5\x9a\x30\x49\x6c\x36\x07\x9f\x96\x01\xdc\x79\xc4\x26\x8c\x1b\x84\xe4\xb8\x1f\x0b\xb1\xc8\x71\x51\x0c\x51\x84\x55\x7e\x61\x5c\x4e\xb0\xb9\x4a\x22\xfd\x37\xe5\xb7\xb2\x8e\x34\x37\x2c\x2e\x9a\x2c\xac\xab\xec\xd5\x59\x74\x97\xf3\xf2\xd4\x4d\xef\x43\xc7\x8a\x31\xd8\x28\x4b\x86\xd5\x8c\xbb\x3a\xfd\x0d\xeb\x57\xed\xe2\xc1\x49\x64\xbf\x9e\x07\x58\x0d\xdc\x46\x46\x88\x48\x7c\x01\x32\x6b\xe3\x30\x7d\xe0\x94\xa6\x16\xb2\x2f\x1a\xd0\x10\x27\x8b\x35\x9a\xb4\xf2\x8d\x40\x7c\x91\xbd\xe2\x80\x8e\x68\x9d\xea\x54\xd5\xab\xcf\x29\xd1\xe7\xb1\x21\xab\x2f\xc8\x8a\x77\x4f\xd4\x4d\x6a\x78\x99\xa4\xf7\x13\xad\x4b\xf7\xb1\x6b\x3f\xba\xd3\x81\x56\x20\x34\xa6\xa5\x28\xe0\x63\xca\xce\xbf\x26\x0c\x62\x33\x70\x4e\xbe\x7f\x7d\xa4\x49\xba\x55\x64\xb0\xba\x25\x5d\x7e\xa6\x72\x21\x6a\xcc\xd4\x08\x36\x77\x95\xb3\xa4\x1e\xf8\xca\xc2\x56\x32\xdf\xb4\x94\x2d\x27\xfb\xcb\x0b\xb8\x0f\xd1\x2d\x9e\x5a\x67\x81\x81\xab\xd8\xda\xd9\x1a\x8e\xd1\x21\x63\x45\x96\xd2\xf1\x6d\x20\x8d\x6e\xa7\xd3\x67\x53\xc3\x7a\x48\x49\x4a\x8e\x4f\x62\x8e\xd2\x0d\xde\xd1\x50\x0b\xde\x68\xce\xb5\xf3\xc6\xec\xa6\x4f\x29\x38\x6f\x72\xe8\x2a\xd5\x80\xe7\x08\x68\x5c\xc3\x9b\xf4\x53\xb6\x36\xec\x22\xbe\xcf\xae\x93\x97\xf4\x95\xb0\xfa\xce\xf6\x2c\x66\x54\x98\x9d\xac\xdf\xb5\x0b\xf5\xbe\x1e\x08\x7f\x74\x3e\xb9\x8f\x93\x1e\xd1\x9a\xf4\x12\x82\x44\xf6\xe3\x8e\x72\xdc\x02\x86\xd9\x44\xee\x84\xed\x97\x02\x9e\x94\x05\x7d\x5c\x9d\x68\xc9\x65\x39\xc9\xbe\x10\xd9\x61\xf3\xae\x8c\xaf\x91\xeb\xe4\x2b\x97\x42\xdb\xce\xd6\x57\xff\xa5\x7c\x9f\x51\x1e\xd1\x75\xc7\xd3\xce\xcb\x41\xf7\xa6\x3b\x0e\x63\xd8\x4e\xc0\x03\x27\x8c\xde\x99\x44\x5c\x59\xe3\x2f\x63\x51\xb0\x75\x8c\x66\xf6\xd8\x89\x7b\xf4\x4a\x6d\x99\x4f\xdd\x7c\x66\xb4\x11\x97\x5d\xb4\xb6\x69\x22\x7d\x2e\xaf\x7e\x54\x9b\x7a\xca\x2f\x77\xa4\x2f\x23\x46\xe4\x3e\x5d\xfb\x87\xb2\xbb\xe4\xc9\xa9\x83\xe7\xe6\xda\xea\xf8\xd0\x8b\x92\x2d\x6a\x35\x71\xdf\xf2\x16\xc3\x0e\x4f\x1e\xd5\x30\x04\x62\x29\x79\xec\xad\xb4\xb6\x50\xc0\x3a\xf0\xa9\x5c\x03\x7a\x9d\xe0\x47\x46\x17\x5b\xfe\xd8\x59\x3e\x63\xc9\x0d\xca\x23\x0c\x47\x0f\x9a\x1f\x93\xf7\x18\x0a\x80\x23\xaf\xf6\x2c\x86\xf1\x9d\x3b\xe9\x39\xd2\xda\x0e\x6b\x65\x8e\xb1\xb3\xd1\x58\x5f\xa3\x91\xcb\x24\xa0\x5d\xab\xa2\x8b\x75\x1f\x6b\x11\x42\x21\x57\x20\x71\x4c\xc3\x47\xe2\xf6\x55\xa3\xc1\xd3\x05\xf6\xac\xba\x8a\x11\x20\xe1\xb8\x89\x23\x0d\xd4\x3e\x96\xe1\xfe\xb7\x46\x10\x01\x59\x99\x6e\x84\xbe\x8d\x71\xed\x67\xd8\x48\x95\x12\x2b\x5c\xc7\x44\x61\xdb\xcb\xb6\xd4\x59\x52\x73\xb5\x2e\xde\x4f\x37\xb6\x1d\x31\x07\x26\xe6\x41\x03\xab\xaa\xea\x3d\x6b\x19\x1b\xad\x6b\x76\x46\x8f\x7e\x9d\x79\xc6\x52\xa5\x1b\x9d\x5b\x06\x45\x5d\x59\x1a\x08\xa9\x53\xf3\x97\xc6\xde\xb3\xbc\x2b\x60\x68\xa2\x9f\x79\xcf\x91\xe4\xb5\x33\xaa\xc6\x1a\x5b\x71\x71\xeb\x7b\x84\x64\xd5\xfa\xe4\xd9\x05\x14\x7a\xd2\xfe\xe8\x33\xa5\xe8\x6c\x4a\xeb\x35\x05\xc3\xfa\x0d\x3d\x3f\x55\x72\xfc\x2c\x2b\x83\x58\x9a\x47\x4c\x54\x90\x74\xd9\x28\x1b\x74\x17\xfd\x6a\x74\xf1\x76\x1b\x7d\x44\xb0\xa1\x58\x3f\x65\x6c\x75\xee\x19\x2e\x3b\x68\x5a\x2f\x8e\x1c\xdf\x1f\x59\x1a\xb8\xfd\xc1\xff\x38\xaa\x09\x07\x1c\x9a\xfb\x59\xb6\x51\xfd\x5d\x23\x74\x9c\x35\x0a\xda\x16\x32\x5a\x75\xbb\xbd\x2a\xc3\x14\x4e\xbd\xe2\xf1\xc2\x52\x2a\x3e\x5a\x5b\xbf\xbb\x16\xfb\x5c\x3e\xab\x75\xa4\x97\x75\xbe\xa6\x54\x2b\xe9\x4a\x5f\x57\x3a\xfb\x2d\x08\x4a\xb0\xbc\xd8\x8d\xfa\xef\xd8\x39\x38\x7b\xd9\x8f\xdc\x3a\xef\x3a\xcb\xb0\x6c\xb7\x1e\xce\x7e\xa7\xc1\x19\xf7\x0c\xf0\xd8\x1b\x83\x5c\x4a\x1a\x0c\xfb\x74\xdd\xcd\x0e\x67\x7a\x53\xa5\x70\x8c\xfc\xac\xb7\x3d\x90\xcb\xf6\xef\xf0\x6b\x6f\x41\x05\x64\x5e\x73\xb4\xc7\xa7\xbb\x0c\xbd\x18\x15\x13\xfe\xe3\xc1\xf4\xa2\xd2\xaf\x52\x51\xfb\x4a\x1d\x25\x15\xf5\x52\x4e\x94\x3b\xf2\x52\x55\xbf\x5f\x8b\x20\x2d\xc0\xbe\x1a\xe7\x79\xed\xc3\x25\x8d\x5c\x62\x8f\x86\x7c\x8c\x61\xa5\x26\x65\xb1\xc6\x12\x13\x1f\xee\xae\x43\xb1\x2f\x9a\x07\xca\x93\x39\x44\xc4\x15\x56\x42\x83\x80\xe7\x2e\x58\xab\x74\x3a\xdd\x7d\x30\x94\x3a\xb3\xf6\xc8\x23\x38\x6b\xeb\x6b\x10\xd5\x96\xde\x0f\x7f\x74\x80\xc2\x68\xe2\xba\x19\x8e\xa6\x47\x92\x6a\x38\x82\x3f\xd7\x33\xca\x59\x78\xa8\x9d\xaa\xdc\xca\x10\xf4\xb0\xb0\x04\x5e\x25\xc6\x5c\xcf\x46\x8d\x63\x36\xc7\x2f\x1f\x0b\x70\xde\xec\x58\xa6\x76\x2c\x16\xae\x3a\x88\xc6\x33\x93\xbe\xdc\x0c\x82\xaf\x9a\x04\x3f\x3a\xa0\x4d\x61\x65\xb6\x4e\x22\xc3\xda\x71\x38\x48\xad\x9b\x1a\xd2\xe3\xee\xee\x7b\x4e\x81\x13\x82\xcf\xea\x46\xb0\xc8\x0c\xbe\x5b\x72\xc9\xc0\xae\x7c\xe8\x4f\x5d\xf6\xb8\xd6\x14\xbb\x39\xad\xa7\x2f\x47\xae\x94\x1d\x07\x2e\x45\x12\xf5\x30\x7c\xdc\x6c\xfe\xc0\x47\xbd\xd7\x30\x8b\x29\x0e\x8c\x9d\x96\xe4\x21\x63\xf6\xee\x02\x2a\xc8\x40\x0f\x12\x35\x8a\xe5\xa6\x47\x9e\x82\x67\x7f\x74\xa9\x1f\x89\x5d\x33\x0c\xef\x53\x84\x2f\xe4\x6d\x80\x43\xac\x89\xad\x33\x43\x4f\xf3\x0f\xcd\xd1\x15\x1c\x66\x7f\xb6\xae\xd3\xac\x6f\x4f\x35\x3d\xf7\xdf\x43\x5e\x99\xf9\x5f\xd9\x0f\x8d\x62\xa0\xe6\xe0\x0e\x2d\xd9\x61\x3c\x26\xb5\xb2\x2d\x3d\xd6\x38\x22\x92\x5a\x49\x3e\xbb\x39\xb4\x5a\x3d\x6a\xf4\xdd\xad\x80\x3e\xcb\xd7\x12\x5f\x2b\xbc\x33\xca\x6e\x20\x0c\xd9\xfa\x8d\x73\x19\x5a\xdb\xd5\x88\x60\x49\xaf\x71\xb8\xf4\x96\x0b\x74\xec\x3e\x55\x17\x6d\xca\x96\x04\xce\x34\xdb\x60\x30\x0c\xd3\x93\x16\xe4\x37\xb2\xeb\xbf\x40\x64\xc9\x6e\x70\xa9\xdc\xd8\xc9\x99\xcb\x2c\x91\x1d\x1c\x3a\x9f\x53\x6c\xa4\x69\x74\x87\xad\x81\xd9\xb3\x4a\xd2\x92\xed\xdb\x3d\x96\xaa\x99\x61\x10\x1c\x2c\x4c\x3a\x8b\xc6\x73\x56\x50\x20\xf3\xb0\x63\x72\x28\x07\xdb\x57\x64\xda\x2e\x03\x19\x8e\xe1\x46\x28\xc9\x36\x5d\xe0\xf5\xd4\xa5\xd4\xd5\x76\x84\x9b\xc6\xf0\x89\x8f\xa4\xb5\x5d\x07\x7d\x6c\x33\x67\x82\x73\xd5\x9a\x6d\x06\x43\x6d\x52\xe6\xd0\x4d\xec\xdb\xf2\x5e\xcb\xc9\x66\xf1\xb8\xe2\x0b\xa9\xee\xe6\x31\x3e\xcf\x52\x09\xb5\x07\x1b\x08\xbd\x0f\xca\x4f\x9d\x44\xb9\xa5\xc3\x28\xf9\xa4\x33\x1c\x69\xc4\xa8\xdd\x3e\x7a\x5c\x99\x9e\xfd\xf4\x32\x39\xb5\xca\xf0\xfe\xf1\xe6\x01\x16\xed\xca\xe4\xc2\xa8\x0d\x12\x6d\x79\x3f\xb3\x5c\xeb\xdf\x4e\x5e\xcb\xfa\xcd\x85\x5a\xe9\xbd\xa1\x97\xb4\xc4\x40\xeb\xdc\x84\x1e\xec\x32\xf7\x1f\x27\xbb\xcd\xfd\xf4\x65\x49\x47\xe7\xfa\x3e\x60\x74\xd0\xd5\xd5\x00\xef\xd3\xaa\xee\xf6\x6f\x9c\x32\x53\x3a\xa2\x0d\xae\xa0\x07\x88\xee\xd6\x6c\x5e\x2d\xa5\xbf\xdc\xee\x4d\xa9\xf9\xa0\x87\x8b\xef\x0c\xa6\x2e\x9e\xc3\x63\xbd\x7c\xf6\x83\x6f\xd4\x27\x7b\x26\x88\x82\xb8\x76\xfc\x04\x3e\xf0\x73\x76\xf8\x21\x7e\xb2\x4e\x9c\x9f\xbe\x6c\x11\xed\xe8\x8b\xd6\xb6\x33\x39\xca\xec\xc3\xfe\xae\xa4\xe5\x37\xf7\xac\x97\xe8\x46\xef\xe7\x57\x32\x9c\xa8\x66\xe3\xd7\xb4\x34\x2e\x5c\x7d\x3c\x0e\x5d\x2a\x05\x82\xb3\x3e\x14\xda\xdc\x7a\xf6\x1d\x04\xeb\x6a\xb2\x8f\x8e\xb3\xdb\x5a\x38\x9a\x42\x4d\x39\xc2\x3f\xa9\xdf\xcd\x02\x86\xbc\x35\x34\x5b\x0c\x12\xdf\xaf\x94\x33\x1c\xcb\x70\x1b\x1e\x24\xc4\x80\xd1\xc9\xae\xad\x8f\xb1\x7c\x9b\x0b\xf7\xd3\x8f\xcf\xba\x38\x42\xb8\x0c\xcd\xd3\xe5\x0f\xcc\xb6\xb5\x5a\xd4\xe0\x24\x44\xff\xad\x1c\xa1\x17\xa0\x37\xfb\x70\xa8\x08\x8c\x4c\x6a\xeb\x7a\xae\xf9\xfb\x60\x10\x84\xcc\x5a\x63\xcd\x98\x8f\x71\x2b\xfb\x91\x72\x79\x4f\x7e\xd8\xa0\x18\x36\xfd\x73\x65\xf6\x16\xa3\x12\x96\x76\xee\x42\xc3\xc9\x8e\x33\x86\x4f\x83\xa0\x0a\x96\x43\xd6\x09\xf8\x3c\x65\x66\x4b\x73\x6d\x4c\x9b\x82\xfb\x79\x78\xae\x40\x47\x3a\x9a\xd5\x69\x91\x5e\x98\xcd\x68\x34\xaf\xf8\x60\x80\x41\xce\x44\xcf\xea\x48\x0c\x48\xf1\xe0\xbf\x3a\x31\xb9\xff\xa3\x61\x2b\xba\xfe\x30\x65\x39\x1d\x0d\x63\xde\xb8\xfb\xc1\x65\x35\xba\xb5\x3e\xf4\xf6\x4c\x02\x6d\xe4\xa0\x63\xd2\x90\x6c\x62\x31\x04\x97\x63\x08\x06\xf2\x7d\xe3\x5c\x11\x81\xe9\x6e\xda\xc3\x02\x0f\xaa\xc3\x67\x68\xc5\xab\xf8\xe9\x9e\x7f\x0d\xec\xdc\xc9\xa4\xab\xc2\xf3\xec\x70\x51\x90\x7d\x0f\xc9\x81\x80\xc3\x1b\xfa\xbf\xe7\x89\x56\x8e\x37\x3a\xe5\xe5\xad\x76\x35\xee\x91\xf8\x7c\x9f\x42\xfe\x7a\x07\x61\xd1\x3d\xe8\x89\xe4\x5e\x8d\xb4\x4f\xbf\x43\xcb\xef\x86\xf4\xd6\xdf\x0d\xfb\xdf\xd5\xf7\xe5\xb0\x0a\xcb\x3f\x3a\x6a\xb5\x9d\xb9\x63\x91\x18\x4e\x09\xcb\xa7\xc9\x5e\x22\xf7\xcb\x46\xbc\x0c\xc3\x3d\xef\xd7\xfe\xcc\xea\x1d\xff\xd6\x8e\x33\x01\x68\x11\x8a\xed\x1a\xe7\x8e\x74\x74\xaa\x5b\x53\xe7\xb0\x2d\xa4\xef\xd6\x20\x85\x07\x36\x27\xbc\xe2\xfb\x52\xce\xc6\x96\xbf\x17\x06\xab\x66\x35\xe2\x06\x82\x6a\xbc\xc9\xb8\xa2\xcc\x77\xa7\xee\xea\x67\x19\xf2\xad\xd2\x35\x4a\xaa\x8e\x01\x30\x2d\xbe\x94\x43\xfc\xf4\x5a\xda\x6d\x44\x86\xbb\xde\x90\xd5\x94\x7d\x6d\xbc\x77\x7e\xd1\xb2\xf1\x0a\xc6\xb1\xd4\xde\xe7\x1c\xea\x00\x24\xc8\xcd\x31\xcc\xd5\x76\x13\xe6\x7e\x7d\xcd\xfb\x83\x87\x56\x97\x5d\x52\x60\x60\xd1\x7a\x7b\x98\x3c\x8d\x4e\x34\x5a\x6a\x35\xc0\x60\xfd\xea\x51\xaf\xe7\x51\xe9\xc7\x92\x14\xf9\x29\x5d\x72\xbc\x5f\xf9\xfd\xd6\x29\x38\xc5\x65\x81\xed\xa2\x29\x8d\x30\x06\x0d\x19\x8a\x6f\x6f\xbf\xa3\x82\x79\x04\x47\xb6\xaf\x70\xbf\xd1\x1d\xe6\x7b\x64\xd9\x81\x5e\x3c\x57\x4b\x29\x51\x8c\xd5\x69\x7e\x2f\xb5\xe5\xec\x10\x3f\x5c\x11\x2f\x36\x17\xca\x3b\xf3\x59\xd7\x4a\x19\xd9\x02\x97\xea\x4c\xce\xdd\x2c\xaa\x77\x9c\xfc\x2f\x87\xa5\x46\x73\xe3\x78\x8b\x56\x73\xbf\x8a\xae\x5c\x7f\x10\x84\xfd\xf5\xba\x7d\x9b\x57\xfc\xea\x2b\x1b\x79\x9d\x42\x45\x6e\x49\x2b\x79\xc9\x60\xd7\x74\xb5\x25\xbd\xd5\xed\x5a\x39\x78\xce\x29\xc2\x00\x68\x43\x1d\x1e\x73\x6d\xce\x47\x64\x29\xe4\x35\x5a\xd5\xca\xda\x89\xe6\xf3\x95\xdd\x2c\x6e\xf3\xfa\xc5\x6c\x3e\xe7\x10\xbc\x73\xe5\xf9\x4f\xda\xfd\xe6\xee\xf6\xfe\xe5\x43\xc8\x83\x78\xe1\x72\x8e\x46\x6f\x7c\x60\x0c\xac\x85\xe5\xe8\x66\x83\x2c\xe2\x96\xbb\x0e\x3d\x82\x29\xe3\x1e\x63\x8b\xa8\x2f\x76\x80\x7d\x00\x70\x98\xa6\x5b\xf4\xc2\xe3\x31\x7d\xa5\xfe\x58\x68\xaf\x3f\x4c\xd3\xe8\xd4\x7c\x58\x5d\xee\x75\x74\x23\x73\x9a\x53\x44\xb9\xcd\x62\x1c\xb4\x20\x2c\x04\x69\x19\xe3\x11\x27\xd2\xae\x23\x65\x85\xb2\x55\xee\xdf\x07\x6c\x48\x7f\xaa\x11\x89\xa6\xda\x69\xa1\xb0\x50\x0e\x44\xa9\xdf\x6a\xf3\x3b\x18\xd4\x6e\x6f\x97\xeb\x5d\x1f\x27\x1d\xfb\xd3\xe1\x22\xad\xae\x78\xda\x25\xae\xd8\x3c\xf9\x01\x69\xf2\xb9\x97\x82\x81\xd6\x3a\x1d\x15\x3a\xff\xc8\xe3\xdf\xe8\xdd\xe5\x9d\x34\x0e\xaf\x70\xc4\x6e\x54\xab\x7d\xb3\xf1\xe2\x0a\x8b\x3e\xcb\xfc\x57\x5a\x74\x87\x2b\x40\x52\x83\xa9\x81\xa0\x92\x8e\x87\x27\xac\xd3\xd5\xfc\x3e\x1d\x85\x5d\x0a\xcf\x10\xc8\x9f\x6e\xaa\x93\x08\xaa\x07\xd9\x51\x91\x49\x74\xb7\x2f\x17\x00\xde\x0b\x02\x77\xaf\x41\xfb\x84\x29\xb3\x38\xdc\x8a\x5a\x84\x89\xe3\xc7\x03\x75\xc1\xc1\xc6\x05\xdb\xff\x14\x12\x0d\x62\x77\xea\xc4\x04\x4f\xd1\xf4\x6c\xa4\xa2\xbc\xd1\x12\x60\xd0\x64\xda\xe4\xa9\xbc\xd7\xd7\x48\xc1\xfe\x54\x36\x2d\x8a\x8a\x80\x4d\xc0\x5c\xf9\x09\x71\xe4\x61\xb7\x47\xea\x34\x3d\x22\xd5\xf7\x68\x62\x93\xc9\xf6\x3d\xb3\xc3\x6f\xff\x67\xbb\x48\xbf\x0a\x8a\x3c\x6d\x4a\x1b\xb8\x4b\x54\x77\x03\x24\x45\x0f\x5e\xa6\xcb\x06\x75\x3c\x44\xd3\x83\xbb\x00\xef\x4a\xe1\xb7\xd5\xf8\xb7\xb4\x89\x40\xbd\xd3\x91\x90\x69\x79\x46\x2f\x19\xcb\x98\x9b\xfb\x8d\x6f\xd4\xc8\x4b\x60\x14\x89\x40\x5e\x46\x4a\xbf\x51\xe8\x22\xd2\x98\x35\x33\x27\x9a\xf4\xe6\x39\xf2\x17\x69\x30\x60\xba\x34\xb8\x79\xe6\x59\x6f\x65\xb9\x11\xc6\x08\xab\x81\xa1\xef\x84\xb8\xa0\x10\x3d\xb2\x3e\x02\xe3\xbd\xf8\x8f\x2d\x9f\xf2\xee\xc8\xe3\x2e\x95\xc4\x27\x7a\xe7\x4b\x3f\x3b\x08\x52\x1e\x01\x4b\x1a\x44\x05\x57\xcf\x26\x4d\x83\xc5\xd9\xe2\x7c\xd7\x91\xf3\x25\x47\x43\x02\xe3\x5c\x4b\x40\x03\xb4\xe9\xc1\x05\xc8\x66\x94\x6a\xf9\x77\x61\x24\x0b\xfa\xb7\x7c\x0a\x20\xc7\x94\x0f\x6c\x4a\xb0\xfb\xd3\x80\xfb\xb3\x3f\x55\xbb\x9f\x3c\x20\xd9\xf8\xc6\x56\x3d\xef\x71\xa0\x86\x3a\xe3\xdb\x18\x28\x0b\x86\x61\x34\x90\x25\xac\x87\x3b\x89\xe7\x99\x52\x6b\x07\x45\x7c\x34\xd3\x02\x1a\x4d\x3e\xea\x91\xf3\x1a\xbd\xfb\xb1\xfa\x5a\xca\x68\x0f\xdd\xfc\x2b\x56\x92\xd4\x72\xb3\xc8\xd3\x9f\x98\xe6\x69\x57\x62\xc7\xa8\xc6\xf6\x4b\x8c\x97\xd7\xb8\x29\x15\xf4\xa5\x3e\xd8\xcb\x65\xd5\x8b\xe9\x3f\x11\x3b\x69\x8f\x13\x0a\xd4\x1a\x19\x28\xf0\x3b\xe2\x2b\xa0\x29\x11\x9e\x97\xe5\xaa\xd2\xe9\xf9\x4a\xe4\x33\x68\xd8\x56\x9f\xe2\xb3\xac\x87\x6c\xb0\xaf\x91\x1c\x75\xa2\x25\x23\x5b\x4b\x27\x88\xb8\xbb\x0e\xd2\x5a\xe7\x4d\xe1\xb9\xa0\xcd\x55\x2e\x83\x38\xec\xd0\x56\x79\x7e\xd1\xc8\x01\x0f\xdf\xc4\x10\x97\x91\x73\xd9\x70\x96\x53\x6b\x11\x47\x93\x2e\xe6\x6c\x8e\x33\x1c\x37\x48\xdf\xb0\xee\x4b\x37\x37\xd7\x3f\x13\xde\x73\x3d\x5c\x23\x29\xe5\xc8\x1f\x77\xd8\xdf\x4b\xcd\xf1\x40\xbf\x84\xb7\xee\xa5\xcb\x71\xc0\xf6\x9a\x8f\x8f\x7d\xd6\x11\x4e\x49\xcb\x84\x4c\x43\xe7\xe3\xa1\xa8\xca\x87\xaa\xaa\x5b\xf5\xbe\xdc\x39\x2c\x05\xfe\xe6\x56\x7c\x7a\x9b\xff\x5d\x79\xe3\x65\x45\xe3\x9f\x80\x8c\x23\x71\xf5\xe6\x36\x3c\x2e\xbd\xa8\x70\x8e\x74\xfe\x63\x13\xf6\x28\x53\xf8\xad\x76\xe5\xdd\x77\x37\x33\xf1\x80\x45\xfa\x16\x1d\xfa\xfb\x5d\xe2\x85\x43\xab\xd7\x11\xb6\x59\x7b\x59\x1d\xd0\xba\xe6\x90\xae\xcd\x5c\x62\x14\x08\xa2\x41\x99\x73\x0e\x77\x10\xf6\x77\xa2\xe1\x71\xb9\x95\xfb\x01\x6c\x14\xa2\x1f\x54\xb6\x64\xf7\x2f\xe3\x84\x77\x0e\xa3\x04\x78\xf2\xeb\xee\x59\x91\x9d\x80\x00\x4c\x65\x03\x53\xfd\x9c\x22\x34\x44\xd9\xab\x7a\x6c\xb3\xe6\x5e\x15\xe0\x7e\xd0\x46\x8c\x23\x89\xca\x77\x70\x28\xcb\x1a\x21\x98\xb7\x6c\x5f\xde\x6e\xf6\xf7\xeb\xea\x74\x90\x1e\x0d\xb1\xf0\xe8\x3c\x22\xd4\x11\x7c\xe3\xca\x3e\xa0\xc3\x85\x5a\xc1\xe8\x01\x71\xb0\x8e\xad\xb2\x20\x30\xe2\x4d\xa4\x01\xee\x19\xd5\xa0\x1a\xdd\x65\xa8\xe7\x41\xbf\xab\xcf\x96\x43\x8e\xbf\x2f\x24\x0a\x95\xc5\xd4\x35\xe6\xbe\x5c\x19\x6f\xbb\x1a\x42\x3e\x80\x2b\x38\x6f\xe3\xab\x9a\x52\xd8\x84\x5f\xb2\x1e\x74\xf9\x6b\xb0\xc7\xc8\x6d\x8e\xde\xfd\x48\xd9\xb1\x47\xa6\x11\xed\xe6\x5d\x95\x8f\x6b\x87\xcf\x0e\xfa\xd5\x37\x87\xa1\x51\xd8\x1f\xe7\x12\xa6\x0e\x5b\x45\x58\xca\xa3\x9a\xf1\x4f\x23\x13\x19\xca\x28\xc7\x6c\x56\xde\x18\x7e\xb6\x80\x06\x70\x42\x20\x0e\xa2\x66\x83\x46\xa3\x7b\x67\x7b\xb5\xef\xd5\xfd\x60\x07\x3b\xc2\x48\x7a\x65\xa3\xef\x7d\xe5\x77\xec\x2d\x5b\xc0\x88\x9e\xd2\xd1\xdc\x28\xeb\x9e\x5d\xd9\x26\x90\x82\xde\x6e\xa4\xb0\x71\xea\x36\x73\xe6\xcb\x8e\xa4\xec\xfd\xfd\x90\xa4\xcc\xd3\xd6\xa7\x65\x3a\xdb\x80\x0e\xbe\x4c\x03\xe9\x05\x6a\x3a\x7d\xd7\x45\xff\xbc\xec\x07\xbd\x90\x60\xb8\xb9\x18\xdf\x90\x01\xe8\x07\x88\x83\xe7\xa0\xd1\xd3\x41\x01\xdb\xfe\x12\x51\xde\xdc\xf7\xd9\xcb\xf5\x86\x22\xc4\xd8\xc1\xa3\xe7\x4c\x8d\x24\x8f\xb0\xa1\xd8\xc3\x85\x0b\xdd\xfd\xe5\xb8\xc6\x5c\x18\xc5\x61\x47\x2b\x39\x87\x81\xc5\xa3\x9e\x12\x18\xb3\x7f\x52\xc1\x83\x01\x42\xee\xda\x8e\x1f\xc6\x6a\x03\x18\xa6\x9f\x7a\x58\x6f\x72\xdf\x76\x31\xe2\x5b\x9c\xb8\x6d\xfd\xe7\x9b\x51\xfa\x33\x52\xdc\xb3\x99\xec\x6b\x50\xdb\xa7\xf5\x11\x4d\x7c\x86\x8f\x6a\x1c\x1a\xa1\xcd\x57\x61\xb9\x84\x42\x30\xb7\x7e\x26\xfa\x9a\xad\x60\x2e\xab\xbf\x0f\x9a\xe4\x4b\x4d\xff\xe0\x35\xab\x97\x4f\x5f\x35\xb6\xf6\xcb\xd7\x6f\xd7\x89\x1c\xa1\x6b\x1e\xda\xff\xc1\xbd\xfe\xca\x75\x47\x07\xe9\xcb\x6a\x74\xa8\x16\x9c\x20\xb5\xeb\xc6\x91\x16\x80\xf8\xdc\x4c\x06\x95\x3d\xee\x54\x7a\x69\xb0\xd3\x3f\xa3\x20\x01\x14\xfa\x3c\x67\x4c\x48\xdf\x2b\x39\x6a\xcd\x33\x26\x65\x97\xfb\x6b\x9e\x06\xd9\x04\xac\x13\xa6\xb4\x78\xa5\xa9\x02\x57\x18\xd6\x3b\x11\xfe\xe0\x8e\x3d\x7a\x85\xfa\xa8\xe5\x2a\xff\xe7\x01\x39\x48\xeb\x23\x1f\x4a\xda\xae\xb9\xa9\xe3\x14\xc1\x08\x38\x44\x8c\x2d\x3f\xaf\x04\x25\x91\xd4\x22\xb8\x61\xd7\x78\x25\x76\xe0\xaf\xa9\xdf\x6c\x74\x3a\xe2\x35\x93\x9f\x8b\x10\x0b\xbd\x1e\x0d\x50\xb3\x86\x91\xbc\xda\xdb\x63\xa3\xb7\x1f\x47\x37\x0e\xea\x39\xac\x65\x95\xb5\x6d\x47\xd9\x4d\x23\x7f\x92\x4c\xfa\xb2\x07\x5c\x8c\x6e\x01\x63\x90\xb9\xc0\x9a\x7e\x94\x99\xb3\x26\x77\xbf\xe1\x66\x7c\xd7\xd2\x59\x96\x5a\x4c\x35\xd5\x7e\xc3\xe0\xc1\x79\xc0\x8f\xd8\xee\x1a\x2b\x7c\x71\xf0\xe6\xf3\x53\x2f\x44\x2b\x09\x2c\xec\x36\xc9\x38\x8f\x3d\xf3\x36\x45\x52\xcb\x69\xf3\x0c\x73\xdf\x49\x7d\xe4\x66\x4e\x43\x61\x08\xf4\x5c\x00\x4e\xa6\x0b\x67\x81\xbb\x27\x6f\xa1\x88\xd6\x2b\x97\x75\xfb\xa3\x5b\xc1\x16\xa3\x94\x75\xf0\xd9\x20\x29\x92\xe3\x17\xda\xa4\xb2\x6e\x05\x01\x7b\x33\x26\x11\x19\xb3\x08\x7c\x7d\x1b\xb5\x18\x4a\x29\xd0\xeb\x91\xed\xe6\x4a\x6f\xe9\x1b\x79\x5d\x07\x46\xa1\xf7\x5c\x49\xf8\xa3\xd0\x89\x52\x45\x2b\x4d\x67\xfe\x4c\xf6\x7e\x75\xa7\x9d\xd7\x94\x3a\x52\xf0\xdc\x7d\x5d\x30\x87\xb3\x64\x38\xa2\xc1\xdf\xf8\x67\x54\x22\x62\xfc\x71\x30\xaf\x72\x7a\xdd\x15\x12\x12\x98\x03\x55\xc7\xeb\xc5\xa6\x5f\xc6\xe8\x08\xcc\xe3\xf3\xa9\xac\x99\xf2\x21\x71\x02\xe9\x1f\x83\x65\x6a\xd9\x7e\xcc\xfa\x4b\xc3\xcc\xaf\x99\x4a\x74\x0c\x40\x35\xe2\x5f\xb0\x08\xce\x35\xce\x26\xed\x42\x4d\xd2\xc6\x52\xee\x76\x39\xc4\xda\xee\x35\x3d\x93\x30\x81\x2e\x0d\x09\x93\x3d\x0e\xfe\x11\xd4\x98\xce\x3f\x19\xd5\x83\xf9\x6c\x9e\x3c\xbc\x23\x36\xfb\x2f\x94\xca\xfc\xc5\xdf\x05\xe9\xb6\x68\x42\xee\x1c\xc7\xb5\xf8\x9f\x4c\x0b\x39\x68\xf0\xca\x29\x8f\xa3\xd7\xec\xd4\xc3\x7d\xc5\x10\xc0\x5d\x7f\x49\x91\x65\x3a\xa5\x4f\xcc\xf4\xe0\xc2\x77\x86\x03\x66\xb5\x64\x9e\xa7\xc0\xa2\xe7\x52\x4d\xb6\x9e\x4a\x01\x7b\x8e\x8d\xb6\x2e\x65\x5b\xbc\xf2\xa6\x5c\xf1\xa6\xb2\x97\xf5\x5d\x96\x80\x3f\xd9\x1e\xb2\xe5\x2b\xe1\xcd\x24\x33\xb0\x9f\x6c\xa4\x00\xf1\x8b\x2f\x8f\xca\xf7\x96\xca\x42\xc7\xab\x1c\x4e\xb2\xd8\x38\x47\x83\x1e\x7e\xb1\xfd\xf2\xdd\xe4\xd7\x6c\x00\xd0\xe6\x75\x41\x02\xf4\xec\x88\xa4\x5c\x39\xe5\xe3\x5c\x60\x6a\x50\x12\x8b\x1d\x0e\x61\x63\x7b\xc5\xeb\xca\x16\xe0\xeb\xb8\xbd\xa2\x66\x08\x0f\xf3\xf7\x03\x87\xbd\x1f\xc2\x77\x2e\x7a\xff\x93\x1b\xff\x9a\x73\x38\x2e\xbb\xb8\x20\x0b\xc2\xd5\xa4\x12\xe9\xa7\x69\x89\x9d\x20\x48\x3f\xce\xe2\x5d\x5c\x3b\xc2\x48\x36\xe2\x79\x83\xf2\x23\x37\xd9\xf1\xe5\x6e\xff\x39\xdf\xe5\xe8\x40\xb4\x38\x5d\x28\x81\xff\x9e\xe3\x81\x79\x9d\x64\x46\x5b\xbe\xec\x21\x19\x57\x98\xed\x02\x22\xe9\x3c\x5c\x67\xb5\xcf\x2f\x7b\xa0\x72\x66\x7e\x84\x4b\x8c\x6c\xca\x43\xfa\x29\xe1\x53\x59\xed\x3f\xba\xdd\x43\x79\x02\xb6\x84\x91\x02\x46\xf4\xda\x83\x05\x07\xa6\xf3\x3d\xd2\xe8\x9a\x0a\xe6\xb3\xc1\x5f\x7f\xa3\x01\xc0\x0e\x3e\xf6\x47\x06\x05\xce\x91\x73\x0e\xef\x2e\x6b\xe5\x10\x46\x20\x83\xee\x01\xdd\x42\xa1\xbf\xa7\x2c\x65\xb7\xc8\xb2\x18\x2a\xb1\x49\x5e\x1d\xbc\x6f\x8a\x7d\xed\x50\xe9\x40\x43\xcf\x6b\x78\xb0\x71\xd0\x40\x32\x9d\x8c\x3f\xc8\xab\x20\x80\x90\x2a\xee\x07\x8c\x00\x9c\x03\x0e\x49\xe1\x18\x90\xda\x8b\x26\x46\xa5\xb4\x5f\xf2\xaf\xfb\x56\xce\x71\x3b\x72\xc0\x40\xa9\x73\x46\xc1\x7d\x63\x68\x60\xc4\xdb\x6c\xbd\xe5\x9d\x77\x5a\xf3\x2b\x5e\x38\x93\xe1\xa7\x66\x0a\x60\x03\xc7\xe6\x94\xcd\x29\x92\xbc\x3d\x34\xdb\x3d\x87\xbf\xac\x7a\xbe\x77\x18\xdf\x2c\x74\xb8\xbf\x8f\xbf\x7a\xcc\xb8\xa9\x56\xff\xe2\xb1\xf3\xc8\x27\xaf\xf8\xeb\xc2\x10\xfa\xbf\xbf\x51\xdd\x7f\xc0\x0b\xee\xd9\xdf\xd1\x30\xd5\xd5\x75\x4f\x1b\xd3\x8d\x7d\x50\x3e\x46\x04\xf6\xcc\x14\xe0\xab\x9a\xba\x9b\x24\x74\x47\x2f\x22\xdd\x35\x04\xb5\xd4\xa9\x43\x4a\x3b\xbe\x1c\x47\x50\xb1\x96\x59\x4f\xfc\x36\x1e\xb6\x95\x04\x5f\xc9\xbc\x0c\x41\xae\xfe\x9d\x8e\x44\xfe\x72\x2d\x99\xd2\x72\xfd\x91\x06\xcf\xce\x44\xce\x93\x60\x40\xfd\x09\x5e\xa8\x93\x3b\x77\xd8\xba\x0b\x18\x38\x97\x93\xb5\x13\x36\x5f\xfc\xcb\xae\xfe\x1e\x86\xf4\x06\xe6\x9d\x4b\x64\xbe\xf5\x94\x52\x1a\xdb\x80\x38\x6e\xaf\x24\x71\x5a\x9e\xf8\x2f\xf3\x7a\x7c\xea\xb0\xdf\xcc\xc8\x40\x89\x40\x45\x58\x19\x94\x39\x2c\x36\x5b\x65\x8f\x8a\xe2\x54\x7f\x11\x5a\xd6\x37\x54\x29\x75\xa5\x26\x25\x98\x50\x3e\x87\xb1\x02\x43\x11\x78\x39\xc6\x2d\xab\x29\x8f\x4d\xaa\xa9\xf2\x83\xc1\x86\xc9\x86\x27\x53\x54\xf8\x9d\xd5\x64\x9d\xc6\xf4\x65\x09\xcc\x81\x71\xca\xfe\x03\x1a\xf1\xb5\xbb\xd1\x01\x8d\x2f\x27\xb0\xe8\xef\x62\xc5\xd7\x67\xf1\x48\xcf\x1a\xd0\x47\xb1\x31\x4a\x67\xdc\xe1\x55\x63\x84\x5e\x63\xe5\xb2\xd7\x44\x66\x43\x10\x61\x9c\x90\xa6\x7a\x18\xd8\x03\x1c\xf5\xf5\xa4\x32\x4f\x67\xfd\x7d\x22\x89\x68\x0a\xa8\x42\x37\xe9\x13\x4f\x81\xe7\xaa\xff\x75\xf1\xa4\x3a\x2d\x57\xa5\x0e\x8b\xf0\x8d\xee\x5f\x9a\x3c\x73\xd8\xb5\xb3\xf3\x90\x1b\x0b\x63\x12\x1f\x5e\x20\x80\x2e\x7c\xc6\xc0\x45\x25\x02\x96\x65\xa8\xa2\x61\x76\x82\x34\x18\xf2\x61\x35\xb2\x79\x3b\x44\x97\x7d\x03\x18\x2f\x77\xec\xff\x3e\x15\x8f\x38\xec\x03\x0e\x03\x95\x73\xcb\x6a\xfb\xe7\xa7\x41\x0d\xe2\x0a\x15\x1a\x28\xdf\x58\x95\x86\xc2\x37\x65\x1a\x2f\x60\x63\xb9\x8f\x9d\x19\xbd\xa0\xc6\x43\x30\xe5\x87\xb5\x7d\xf5\xde\x09\x13\xde\x40\x07\x42\x77\xbc\xff\xf7\xd0\x47\xf4\x5c\x89\xd7\xc7\xe7\xfe\xbd\x60\x09\xf8\x4a\xdc\xbe\x01\x07\xff\x6b\xf0\xb4\xe6\xa0\xc2\x2f\xd7\x4b\x24\x75\x1f\xdb\x29\xbf\x6d\x8e\x8b\xee\xf1\x7b\xf4\xbd\xbd\x99\x3c\x02\x13\x04\xb3\xe0\xcb\xa9\xae\x5b\xed\x43\xf1\xa5\xb8\xd5\xae\xb4\x7d\xb0\xd7\x5a\xcb\x07\x30\xf0\x7b\x15\x14\x02\x12\xb2\x54\x57\x8e\x99\x76\x48\x76\xd9\x4d\xad\x99\xbd\x44\x84\x7a\xf5\x66\xe3\x8f\x8f\x33\x95\xff\x73\xdc\xf7\x12\x21\x02\x89\xa8\xcc\x7d\xd7\x71\x6d\xb9\x12\x1a\xf5\x4c\x5b\x3b\x23\xb4\x11\xf8\x84\xe2\xf5\x82\x12\xfc\x9d\x94\x97\xf8\xd8\x1f\x18\xb5\x8a\x5f\x45\xad\x2c\x84\x54\x1f\xa3\x1e\x56\xd6\xc0\x26\x45\x62\x75\x67\xc4\x43\x69\x56\x4e\xfd\x53\x66\x75\xd9\xb2\xca\xa7\xa0\x39\x23\x00\xca\x5f\x17\x00\x04\xe8\x32\x38\x34\xf5\xa8\x39\x6b\xfa\xd2\x86\xde\xb8\x1a\x43\x65\xdc\xed\xf4\xde\x6a\xf8\x0d\xbe\x8d\x0f\x8c\x7a\x20\xda\x28\xc8\x66\x08\xd4\x80\xbf\x87\x9e\x97\x60\x88\xb6\x3f\x62\x3c\x26\x46\x66\x67\xd0\x72\xd4\xb4\x2f\x3f\x38\xa8\xc5\x37\x11\xfd\x59\x48\x34\x75\xf5\x25\x36\x10\x0e\x2f\xe3\x9a\x85\xdb\x94\x11\xae\xf8\x5d\x56\xc6\xf6\x03\x1e\xe7\x26\x6b\x00\x0b\x78\x7b\x11\xf7\xdc\x6e\x37\xd1\x55\x10\xa2\xa0\x2f\x0d\x90\x02\x3b\x3f\x0d\x71\x05\x6c\x85\x05\x3a\xf4\xec\x1a\x35\x7d\xb7\x26\x3b\x0d\x03\x5f\x30\x5f\x01\x4e\xc8\x10\xcf\x18\x2b\xc0\x6c\x28\x80\xa4\xca\x5e\x1b\x88\x97\x78\xbb\xfd\x89\x06\x55\x46\x8b\xcd\x3c\x1b\x2c\x88\x1e\x34\xc9\x6c\x0a\xe4\x59\x8d\x7a\x91\xea\xf1\xc2\x40\x2b\xff\x23\xed\xf3\x94\xa7\xf0\x8b\xbf\x7c\x52\x32\x51\x3a\x03\xf1\xac\x92\x01\x8d\xc7\xe3\xec\xec\x66\xa3\x6e\xbf\x63\x17\x89\x63\x66\x45\x82\xca\xcb\x26\xa8\xfa\x11\x97\x6f\x3b\x9b\x1d\x3c\x3d\xe1\x1a\xe5\x0d\xb1\xa6\x40\xe3\x67\x0f\x80\x62\x04\x36\xcc\x47\xfe\x8f\xe7\x7f\x02\x1d\xd9\xc5\x05\xbd\x6e\xac\x65\x94\x80\x20\xba\xb2\xc4\x31\x34\x18\x7a\xcb\xdd\x52\x29\x19\x79\x53\xae\xbc\x57\x3c\xac\x0d\xbc\x65\x21\x38\xc7\x21\x54\x3a\xfc\xe7\xa7\xf5\x91\x9a\x2f\x25\x7a\x9a\xb2\xee\x9f\xec\xa4\x8c\x0d\xcd\xc8\x9d\x5e\x1a\x1d\x61\x50\xb6\x17\x51\xa3\xa5\x07\xa9\x6c\x69\x18\x9c\xdc\xaf\x17\x19\x90\x3c\x60\x27\x1d\x98\xc3\xcd\x42\x0b\xc3\x2f\xc7\xae\x41\xe5\x84\x79\xd8\xb0\xbe\xaa\x1d\x0c\x94\x60\xe9\x93\x1a\xa3\x13\x20\x8d\xba\xcc\x88\x89\x66\x46\xbd\x13\x76\x70\xfd\x48\x2c\x3b\xb3\x48\x59\x0e\x2a\xff\x73\x27\xee\x2f\xbb\x45\x86\x04\xd8\x9b\xac\x74\x78\xfb\xcf\x1e\xad\xef\x4f\xd5\x33\x53\xf2\xa0\x3b\xc7\xe3\x1d\x83\x47\x99\x6e\xdf\x75\x18\x9a\xcc\x38\xd2\x05\x00\x60\xc5\x5e\x85\x6f\x06\x4e\x09\x8e\x7a\x8f\x36\x76\x9e\xa6\x60\x99\xc2\xdc\xf9\x2e\xa1\x43\xd9\xe2\x87\xe9\x24\xc6\x6b\xb4\x3c\xdf\x35\xf4\x48\x56\xa8\x1c\x5c\x59\x8d\x65\x3c\xc1\xb0\x58\x80\x10\x5a\xbf\x2e\x30\xdf\xd8\x45\xeb\xde\xb4\xa8\x73\xfb\x4e\x23\x36\x0e\x65\x23\x96\xac\x6b\x78\x09\x91\x79\x09\x9f\x41\x14\x3d\xdf\x40\xa8\x22\x42\x3a\xf1\x14\xa9\x2f\xdc\xa2\x2d\x83\xd8\xbe\xd5\xab\x8a\x54\xc7\xa7\x07\xe7\x5a\xbf\x38\x90\x17\x4e\x2b\xe7\xea\x89\xe8\x5c\xde\x5c\x98\x17\x8f\x39\x43\x9e\x65\x6d\x35\xce\x72\x51\xea\x2b\x48\xb3\x96\xcf\x64\xaa\x0c\x20\x9b\xed\x2c\x27\xac\x96\x8f\x73\xc1\x3a\x5e\x3d\xa5\xff\x77\x6a\x4f\x8e\x51\x9a\x46\x93\x29\xce\xa5\x7c\xd7\xf7\xd8\x99\xa1\x70\x3e\xae\x3d\x0b\xeb\xe8\xc9\x12\x6c\x26\x9d\x7d\x91\xd5\x6e\xa1\xc0\xf8\xf2\xa9\xae\x9e\xe5\x7a\xc6\x3c\x33\x58\xe4\x9c\xdc\xf5\x45\x84\xc7\x92\x65\x28\xd4\x27\xe1\x42\x0a\x28\x83\x36\x63\xd1\x40\x6b\x2b\x89\x9e\xce\x48\x3b\xcb\xcf\x29\x58\x28\xaf\x6c\x4f\x1d\x9b\xa9\xcf\x9c\xcc\x6d\x6a\x8d\x6b\xa6\xb5\x26\x63\x93\x14\xeb\xb8\x28\x2f\x07\x3b\xe6\x1a\x74\x36\xaa\xe6\x08\x03\xfd\xce\xc7\x78\xc9\x98\x21\x62\xda\x23\xba\x0a\x45\xce\xcb\xc0\x10\x74\x83\xbe\xb3\xdf\xd7\xd1\xa3\xb2\x73\xb2\xb5\xa3\x50\x24\x5d\x5c\xe5\x5d\xe3\x6f\x87\xdd\xb0\x7f\x25\x30\xc1\x2e\xa1\xd5\xbf\xdb\x6f\x81\x2d\xf9\xfb\xe9\x13\xe5\x84\x1c\xa7\xdf\xd5\x18\xf4\xc1\x44\x05\xbf\x0c\x5c\xba\x35\x40\x6a\xa7\x5a\xfb\xdc\x8e\xdb\xc2\x23\x22\xe6\x8d\x22\x01\xa7\x75\x7b\xff\x5d\x65\x03\x65\x9f\xbe\x84\x46\xe0\xac\xae\xd8\xa0\xdb\x07\xc7\x1d\x98\x51\x6a\x7d\x3f\xdd\xb6\x53\x04\xb5\xdd\x2b\xcb\xb3\x6c\xf9\x92\x1c\x32\xf7\xd3\x74\x37\xbb\x17\xe9\x8a\xa1\x07\xb7\x16\x49\x68\x3f\x42\xe4\x04\x66\xac\x24\x0f\xf4\xe1\xd6\x9f\xf3\x3b\xc9\xea\xa0\x0c\xf5\x39\x44\x98\xf4\xa8\xcf\xc7\x67\xb5\x79\x37\xb8\xb7\x16\x94\xd8\x66\xdf\xc6\x46\x31\x46\x33\xfe\xaf\x8a\x48\x92\x07\xd9\xac\x3a\x3a\xa8\xc5\x37\x87\x94\xee\xb8\xbb\x37\xf3\xa9\x8c\xca\x8c\x34\xf7\xf5\xfe\x36\x2e\x16\x5e\x6c\x65\x93\xcc\x7f\x1c\x84\x6e\x0b\x4f\x64\x25\xb6\x97\x8d\x2f\x2e\x5f\x3d\x2d\xbb\x9c\x20\xde\xa4\xcf\x31\xcb\xe0\xc8\x60\xcf\xdc\x2b\x5f\x6e\xb6\x90\x7b\x47\x27\x4c\x6f\x60\xc2\x4a\x7b\x5c\x4e\x2c\x48\xde\x17\x30\x15\x8f\x7d\x6a\x35\xdd\x9f\xfd\x05\x61\x3d\xcb\x8f\x5e\x69\x79\xfb\x58\x1b\x14\xfb\xf8\x6b\x59\xca\xa6\x6e\xb1\xf4\x29\xee\x64\xee\x8a\x7d\xac\x7c\x91\xfd\x91\x23\x57\x2e\x4c\xe7\x60\xb4\x6a\x63\x0f\x04\xcb\x09\x46\x7b\xa4\x77\x40\x4c\xfe\x17\x1f\x74\x80\xe2\x13\x7a\xa6\x72\x84\x99\x13\xe4\x66\x34\x77\x87\x6d\xa6\xde\x4a\x7f\x15\xbe\xca\xc7\xe7\x7c\xb2\xe3\x74\x38\x1c\xbe\x51\x0e\x1a\xc5\x7e\x95\x59\xed\xa3\x9b\xd1\x81\xec\xd8\x59\xca\x5e\x09\xcc\xa7\x3d\x06\xfe\xdb\x63\xa4\x54\x26\x4e\x88\x96\x01\x9c\xfa\x55\x90\x48\x77\x99\x34\xd9\x45\x2b\x4d\x91\x9b\x89\x26\x80\xb3\x23\xf6\x7f\xa1\x30\xfa\x47\xd2\xd8\x76\x02\x99\x92\x2d\x78\x2e\x27\xe9\xeb\x66\x74\x69\x74\xbe\xfe\x5c\x97\xe2\x2d\xb3\x5f\x59\x6e\x02\xe7\x2d\x9b\xf4\x1f\x22\x4e\xe3\x5d\xe7\x42\xf9\xf7\x56\xa3\x4a\xaf\xd1\x8a\x0f\xb8\x68\x58\x45\xb2\x9e\xc8\xcc\x92\xcd\xd1\x77\x75\x83\xa3\xd4\xd6\x1a\x9e\x5e\x4b\xc2\x92\x96\xfe\x1d\x0f\xb1\x3f\x4a\x28\x4c\x9c\x92\x37\xa5\x5f\xfb\x2b\x91\xf9\x6c\x68\x2a\x9a\x7c\x17\x66\x25\x25\xca\x40\x23\xfa\x78\x45\x7b\x27\xe8\x81\x70\xde\xb0\xa4\xa9\x9c\x4e\x52\xe5\xe0\x87\xad\x12\x95\x2f\xed\x33\x16\x83\xfa\xd6\x53\x0d\xad\x5b\x81\x50\x6a\xa8\x15\x1b\x1e\xfc\x81\x5a\xe9\x32\xdf\x92\x5d\x7c\x39\xd0\x57\x95\x6c\x23\x57\x55\xbd\xb7\x0b\xfb\xd2\xb3\x61\x65\x26\x78\xe5\x5c\x7e\xcb\xd4\x06\x1a\xa0\xd5\x2a\x41\x86\x58\xf7\xdd\x3e\x4a\x67\x67\xa5\xca\x69\xbd\x04\x51\xad\xf6\xc2\xb1\xaa\x6b\x01\x94\x2a\xe3\x95\xa4\x07\xab\xec\xe0\x5a\xc1\x8e\x89\x36\x00\x4b\xaa\x88\x55\x6e\xbf\x6d\x9c\xaa\x06\x67\xdd\x64\xa9\x48\xa4\x2b\x42\xf9\x33\x11\xf3\x9b\x1c\x96\xaa\x07\xcf\x0a\x56\xa3\xf4\x27\x59\x5e\xa9\xab\x2d\xf6\xd6\x71\x0d\x85\x4b\xec\xce\x57\xe5\x45\x71\x08\x67\xee\x68\x1b\x02\xa2\x82\xe3\x18\xd8\x42\xfa\x7c\x0f\xe4\x00\x29\x01\xac\x8c\xd3\x1e\xfe\xed\xec\xfb\x8e\xe1\xe5\xb9\xd2\x87\x5a\x56\x4c\x19\xb4\xc5\x49\xe0\xd4\x22\xe2\xfa\xcd\x70\x93\x05\x35\x8a\x73\x5c\xa5\x80\x80\xeb\x7b\xb6\xa4\xb0\xea\x5f\xf4\x8f\x80\x3e\xd2\x0f\xbc\xcc\x31\x66\xe9\xab\x67\xe2\xf0\xef\x4f\x80\xd5\xf2\xcc\x7a\xf4\xb5\xc1\x5b\x43\x6a\x19\xa0\x94\x87\xf4\xaa\xe2\x38\x2e\xe8\x32\x71\x51\x38\xa1\xe1\x64\x0e\x07\x2c\x17\xfd\x02\x6b\x1f\x79\x10\x79\x5e\x4a\x51\x67\x2e\x70\x2e\x6f\x0c\x37\x4f\xae\x65\xf9\xc9\x0e\x6f\xee\xaa\xb3\xd3\x1b\x00\xea\xad\x66\x02\x12\x4e\x42\xc3\xda\x20\xd2\x7b\xb7\xa9\x81\x30\xa6\xc9\x71\xa5\x02\x9a\x34\xea\xe5\x73\x2b\x09\x4c\xc8\x66\x41\x8d\x99\xbc\xc8\x96\x45\x82\x2d\x9c\xc5\x60\x11\xee\x9b\x6b\xbe\xce\x4c\x75\x9e\xf5\x2b\xbb\x55\x2d\x9b\x8d\xcf\xf3\x2e\x5f\xa5\x1c\x36\x4e\xb3\x10\xa2\xf2\x69\x75\xbf\xbd\x9b\xf8\x42\x79\xea\x74\xa7\x6c\xe1\xa7\x38\x98\xaa\xe7\x3b\x6d\x26\xb6\xb2\x8d\x93\xfc\xa4\x7c\x61\x10\x69\x3b\xaa\x70\xa9\xe8\xd3\xd8\x08\xf9\xc1\xce\x71\xb1\xef\xfc\xd8\xf2\x3a\x51\xf6\xa0\xe3\xcd\xc0\xe0\x14\x65\x7e\x8e\xc8\x6e\xa8\x3c\xe0\x23\x63\x8b\xf8\xd0\xaa\xd8\x1d\xde\x2e\x53\x89\xf9\x57\x71\x4d\xd1\xe7\xa9\x0d\x0d\xca\xee\x54\xa0\x72\x15\x45\x8e\x2e\xe6\x61\xb9\x91\xc5\xd1\x5d\x14\xf6\x59\x12\xab\x39\x1c\xd3\x10\x1d\xdc\xe4\x5f\x3f\x8c\x39\x60\x7b\x4d\x29\xc4\xed\x3f\x68\x57\x38\x78\x98\x3f\x4a\x81\x0b\x6e\xdd\xd2\x9f\x4e\x5e\x10\xd3\x49\x96\xdc\x64\x77\xa7\x8c\xad\xb6\x50\xa7\x1c\x19\x6d\x53\xbe\xe8\x82\xf6\x3f\x87\xe5\x9d\x6a\x1b\xc5\xf6\xb7\x1a\x1a\x98\xa6\x34\x86\xaf\x01\x6f\xf6\xcd\x0e\x51\xbd\xb2\x51\x90\xc8\xe3\x87\x81\xa7\xc5\xce\xeb\x04\x2b\x80\x6d\x78\xa8\xde\x9f\x10\xfe\x23\x34\xab\x6c\xdd\x09\xba\x3e\x73\xd9\x09\xa9\x81\xf2\x5c\xc0\xcf\x24\x20\xf0\x64\x37\x95\x83\xcc\x74\x81\xe7\x03\xc5\x77\x3a\xad\x83\x01\x0d\x45\x46\x51\xf4\xfb\xe5\x23\xcb\x97\x4d\xc0\xac\x5c\xc6\x63\x97\xcb\x7a\xbb\xe2\xf3\xfc\x84\xa3\xd8\x0f\xcf\xd6\x45\x62\xd0\x9f\xf2\x15\x98\x9c\xf0\x03\x0e\x3b\x25\x34\x7d\x48\x09\x7f\xca\x87\xe3\x3d\xe9\x03\x43\x47\xfd\xc4\x3f\xf8\x61\xe4\x79\x3f\xca\x4c\x1d\xdb\x15\x2c\x50\x5b\x14\xf2\x0e\x62\x34\xaa\x75\x70\x22\xf6\x1f\x7c\xca\xf6\xb2\x71\x72\x9e\x47\xa8\x6d\x8e\x1a\xe9\x32\x2a\x2e\x8c\x23\x90\x36\x34\xa2\x3c\x22\xb0\x37\xb9\xd2\x69\x98\xb6\xeb\x7e\xf8\xa6\x5b\x4c\x23\xe7\x0e\x97\xf0\xa3\x9b\x91\x7a\x7d\xaa\x67\xea\xd8\xe1\x8c\x01\xb6\xc1\xef\x14\xa3\x32\xb3\xb4\x72\x79\x2a\x8f\x68\x24\xc6\x20\xd3\x1d\xb7\xe7\x26\xa3\xc4\x47\x39\xdb\x04\x05\x30\xac\x91\x40\xd9\x59\xfe\x87\x5b\xff\x6b\xf0\xd4\x5c\xb3\xc0\xb4\xad\x7c\xe1\x9e\x10\x61\xee\x11\x20\x5a\x37\xa9\x13\x59\xce\xe5\xa7\x50\xb1\xb2\xe7\x79\x8c\x21\xb2\x51\xb0\x23\x65\x33\xaa\x5a\x1f\x88\xf5\x90\xf0\xb4\x2c\x10\xf6\x9a\x44\x49\xf3\x50\x16\xdf\x43\xa5\xb5\x74\x39\x18\xb7\x93\xcc\x66\x1e\xac\xb3\x5b\x69\x0c\x01\xec\x0d\x3a\x91\x31\x0c\xc7\xea\x47\xe8\x68\xca\xde\xf1\x19\xa3\xad\x97\xb3\xd4\xd2\x85\xbd\xd6\x23\xcc\x6f\x19\xfb\xb8\xea\xeb\x18\x4a\xd5\x09\x1f\x18\x94\x7f\x74\xc5\xb7\x91\x7e\xb8\xe2\xdf\x98\xd0\xb8\xf7\xcd\xa9\xb0\x12\xc4\x35\x1a\x64\x8b\x09\x08\x16\x51\xba\x91\x30\x67\x0c\xf5\xe3\x60\x36\xf9\x20\x7e\xc5\x00\x93\x65\x41\x70\x69\xf7\x3d\xdb\x36\x06\xd7\x4b\x09\x8d\x1c\x00\x7d\xd1\xa0\x37\x55\xe9\x0e\xde\x30\x31\xa7\x72\x1e\x2c\x17\x34\xa6\xde\x98\xc6\x43\x1f\x78\x5b\xd9\x60\xde\xf9\x7d\xf2\xa9\x39\xf8\x49\xf7\xa8\x1d\x94\x15\xa5\xbb\x22\x5d\xf2\x3e\x55\x13\x93\xfb\x94\x62\x57\x72\x1f\x8f\xbf\x01\xdd\x09\x49\x71\xc6\x2b\x12\xeb\x3e\x06\x0f\x10\x53\xf3\x9c\xaa\x39\xd4\xbd\x5c\x55\x9c\x46\xef\x94\x1c\xca\x18\x6a\x9a\xd8\xa6\xba\x3b\x64\xe7\x9e\x42\xf7\x50\xae\x85\xff\x9d\x29\x19\x5b\x73\xad\x46\x60\xcd\x00\x18\x0d\xa2\x76\x9b\x3c\xe1\x20\x3c\xa1\xaa\x04\xb1\xff\x05\x16\xb5\x4b\x7d\xf3\x10\x43\x0c\x86\x4f\x92\xd7\x50\x55\x13\xce\xfb\x38\xdd\x38\x90\xb3\xbf\xfe\x09\x7a\x3d\xed\xa1\xb5\x11\x9f\xb9\x5f\xb6\x24\xa9\x7e\x99\xb3\xce\xf1\x16\xe1\x90\x7f\xbe\xaf\xcb\xef\x88\x81\x04\x7b\xe2\x41\xae\x26\x5f\x79\x5e\xab\x2d\x54\xd3\xd7\x5a\xa9\xec\x67\x35\x89\xd0\x11\x5c\x30\x18\x56\x38\x8f\xc6\xdf\x51\x7f\x64\x8e\xe4\xf4\xb1\x34\x67\xaa\x25\x6f\xd9\x66\xbb\xae\xc2\x64\xcd\xf7\xd0\x0a\xde\x6b\xf9\x20\xfe\x25\x0d\x11\x1b\x09\x55\x08\xf3\x1a\xb7\x8f\x0d\xf9\xb7\xfe\xd7\x2e\x53\x11\xbf\x16\xd1\x93\x37\x03\x67\xbe\xb7\x9c\x8d\x32\x7b\xca\xad\x6e\x9a\x51\x66\x16\xdd\xf8\x82\x7d\xaa\x30\xb4\xaf\x70\x00\x3c\x11\xa6\xbf\x15\x0f\x3b\xa2\xe5\xdb\x41\xd0\x7a\x49\x0e\x81\xbc\x6d\xf9\x65\x10\xae\x14\x31\x9f\xfa\x94\x72\xd0\x8d\x9e\x06\xe9\x48\x85\x8d\xa3\x36\x78\x80\xe2\xfc\x62\xd0\xbd\x6c\x01\xc8\xad\xbe\x01\x97\x52\x43\x21\xe4\xe4\x2e\x09\xe2\x02\x61\xf2\x48\xf5\x19\x04\xe7\xfc\xcf\xf7\x55\x68\x84\x60\x82\xbc\xb9\x19\xae\xc3\x80\xd1\x5e\xe1\xc1\xd1\x45\x31\xd5\x21\x5e\xc3\xdf\x31\x68\x99\xc1\xba\xb2\xba\xc8\xcf\xb3\xa5\x66\x87\x04\xec\xc3\x21\x05\xd0\x01\x4b\xc1\x04\xcb\x23\xdd\xfa\xcd\xf4\x32\xc8\xf2\x81\x0c\xb2\xf4\xa2\x77\xb7\xa5\x31\x4a\x1e\x91\xab\xe3\xc7\x9a\x1d\x18\xff\x6e\xb9\xe4\xdf\x54\xdf\x76\x6e\x94\x77\x40\x5e\x2c\xbb\x12\xaf\xbe\x4b\xf7\x7b\xd9\x81\x0a\x64\x29\xdb\xce\xdd\x46\x58\xbb\x53\x21\xcf\x95\xea\xef\xf6\x54\x94\x5a\xe0\x81\xc4\x76\xce\x1f\x22\x42\x56\xe6\x03\xf3\x7b\x3a\x9e\x80\xd9\x09\x86\xcd\x20\x87\x72\xd4\x26\x62\x3a\x03\xfa\x7a\xc1\x68\x8d\xa3\x73\xab\xda\xa5\x45\xad\x33\x8c\xf6\xba\x55\x84\x85\x16\xcc\x87\x38\x5c\xd8\x95\x0e\x84\x0f\xfb\x6a\x5f\x76\x3e\xaa\xf2\x23\xbb\x68\x5d\x72\x24\xcc\x8d\x61\xb1\xc5\xbe\x81\x89\x2f\x12\x1e\xed\x4a\xa7\xe1\x01\xec\x04\x5e\xc6\xf2\xae\x58\x6f\x0a\xc0\x9c\xc1\x28\x31\xd8\x11\xa3\xa5\x6a\x9b\x66\x99\x34\xd4\x67\x57\x4a\x8e\x34\x46\xf5\x9d\x4f\x86\xbe\xa1\x30\xea\xdb\x2b\x7d\x14\x96\x8a\x4a\x05\x68\xb3\xb7\x02\x61\x08\xd8\x4e\x67\x7f\x48\x70\x84\x1e\xb9\xef\x63\x0c\x43\x17\xc3\x29\x02\xc5\x5a\xf1\x2f\xdb\x29\x55\xb5\xd1\x31\x86\x14\x0f\xa4\x4b\x46\xc5\x95\x01\x69\xb3\x30\xde\xe8\x37\x01\x7d\x96\x4d\x3d\xc4\xce\xc6\x55\xfd\x96\x0b\x3a\x86\xbb\xb5\x5e\x65\xb1\x5d\xb5\xbd\x69\x1f\xe9\x08\x54\xef\x2c\x4b\xf4\xe4\xbd\x01\xd4\x48\x69\xb3\xa4\xc9\x56\x5e\xe1\xeb\xd5\x3e\x02\xfc\xab\x72\xa3\x6e\x1c\x22\x45\x94\x2e\x5e\xdc\x2a\xb7\x69\xc9\xab\x50\x45\x6e\x99\xdb\x34\xb7\x4c\xa8\xe5\x78\xca\x52\xd2\xb7\x57\x7f\xb2\x4d\x8d\xd9\x1f\xcd\x87\x8b\x75\xa4\x86\x42\x19\x2b\x50\xea\x2b\x37\xbd\x89\x04\xe7\x26\x34\xd7\x0d\x1c\xd4\x25\x52\x9a\xa6\xd5\xe2\xa4\x72\x7d\x08\x36\xdb\xb2\x76\xee\xcd\x96\x0c\xf6\xd9\xbc\xa5\x29\x45\xa0\x96\x91\x26\xe7\x70\x04\xcb\xd0\x6f\x09\xfc\xca\x43\xff\x99\x3d\x14\xba\x37\x9d\x71\x12\x63\xd8\xf0\xd6\xa6\x8c\x54\x13\x85\x22\x18\x71\xa4\xbb\xd6\x70\x32\x74\xc1\x96\xe3\x38\x9b\xda\xde\x8c\x03\xf7\x65\x80\xfa\x9c\x7c\x51\xd6\xa5\xbb\x61\x33\xa4\xef\xc5\x00\x04\x44\x4b\x1f\x9b\x68\x40\x36\x7d\x40\xa2\x65\x7b\xc0\x0b\xac\x29\x3b\x93\x37\x9d\x05\x9a\xbe\xda\x86\x85\xf9\x02\x0c\xc4\x96\xd1\x39\xa6\x21\x7e\x4a\x7f\x49\x9f\x29\xed\x0f\xbf\x53\x6c\x62\x9c\xc2\xdc\x54\x2a\x6c\x53\xfd\x3a\x9b\xf4\x23\xcb\xb0\x61\x10\xd3\xbe\x49\xd5\x1a\xec\x2d\xb7\xae\xb7\x92\x15\xca\x1e\x1f\xa2\xa3\x8f\x87\x73\x59\x7a\x8c\x77\x1d\xc0\xc7\x6e\x46\xfe\x46\x67\x57\x5c\xc0\xd3\xfe\xd8\x0c\xea\x69\x42\x2b\xb7\x7a\x6f\xa4\x77\xf3\x76\x5e\x55\x23\xd4\xe4\x68\x6c\xff\x1f\x73\xa0\x5f\xcb\x32\xa2\xab\x21\xc0\x6c\xd1\x64\x8f\xcc\x8a\x6c\x5f\x85\x04\x1e\xba\x11\xbe\xff\xc0\x66\x73\xe4\x78\x96\x82\x69\x1c\xce\x6b\x1c\x02\x27\xfc\x12\xb1\x4c\x60\xf2\xdd\x3a\x43\x6c\x17\xfd\x03\xb1\x14\xc7\x11\x4f\xe8\xe8\xe3\x52\x4f\xdb\xd4\x89\x0c\x9b\xca\x84\x80\x9b\xe6\x23\xd7\xbf\xd4\x94\x29\x2e\x68\x9b\x7f\x11\x26\xf3\x07\x9c\x9f\x33\x3d\x90\x36\xeb\x98\xd2\x9f\xe2\x2b\xca\xce\xdf\xbb\x10\x58\x92\x09\xd2\x2a\xb3\xf8\x50\x51\x31\x6a\x99\x38\x48\x21\xb4\x7e\xe7\xb2\x84\xbc\x25\xd8\x7f\xbb\x27\xfa\xa6\x82\x06\xa7\x90\x8e\xca\xc9\x1e\xd1\xa9\xe5\x5d\x2c\xe1\x47\x06\x5c\xd5\x49\xaa\x79\x95\x73\x19\xc9\x99\xda\xa1\x71\xcc\x78\xf6\xbe\x33\x26\x37\x5a\xf5\x51\x46\xed\x43\xdd\xa1\x37\x1a\xdb\x06\xea\xfa\x7e\x6a\xb9\x75\xbf\x49\x15\xf5\x4f\xb2\xa8\xf2\x6b\x08\x37\xec\x45\x22\xd4\x10\xce\xd7\x1c\x26\x7a\xfe\x40\x81\x54\xa3\x34\x2e\x31\xd2\x3b\x85\x4a\xe7\x8d\x32\x83\x3f\x35\x82\x3a\xb3\x14\x55\x01\x00\xa6\x2e\x24\x57\x56\xd1\xbc\x8c\x7d\xbc\xc6\x0d\xb9\x26\xc2\xee\xf8\x86\x5f\x0e\xe7\x00\x57\x58\x59\xd6\x18\xca\x7c\x8c\x31\xd4\x9b\x06\x8d\x1f\xd5\x84\x39\x1a\xc9\x55\xbb\xee\xb5\xa9\xc8\xab\x6f\xcb\x77\x98\x2d\xbb\x9a\x81\x2f\xdc\xe8\x6f\xc6\x50\x0f\x28\x2c\xf9\xd6\x31\x8b\x79\x71\x2b\x2b\x6a\x36\x02\x5a\x96\xd4\x47\x7a\x56\x04\x0d\xc9\x04\x81\xc5\x6d\xa7\x1f\xbc\x69\x0e\x39\x97\x1a\x77\x07\x57\x34\x41\x01\xe7\x72\x41\x79\x43\xee\xfc\x8c\xee\xac\xf8\xdd\x5c\xf1\x3b\xa8\xb4\xf2\xb7\x64\x8b\x6f\x6f\x09\xf1\xb0\x96\x47\xc2\x7c\xd4\x0f\x57\x54\x11\x13\x8c\xdf\xe0\xf1\xc6\x96\x92\xea\xad\x73\xa1\x06\x8a\xf5\x3f\x51\x2a\x65\x1e\xc2\x02\x33\xe2\x66\x81\x0a\x39\x3a\xef\x80\xeb\x96\x3a\x7e\x07\xa4\x18\xa3\xe3\xb8\x41\x20\x68\x7a\xc9\xc1\x22\x90\xc6\xce\x6b\x95\xf0\x83\x0c\x30\x83\x54\x65\x58\x6a\xe1\x1c\x63\xce\xf8\x65\x32\x1a\x92\x90\x8d\x03\xe2\x61\xdc\x7e\x53\x89\x2d\x02\x6b\xc4\x31\x8b\x23\xb5\x1a\x00\xe8\xa1\x2a\xe5\x93\xcb\xfa\x27\x33\xb5\x87\x35\x59\x0f\x9b\x4c\x13\xb3\xb3\xb7\x15\xa5\xea\xbc\xed\x17\x97\x1e\xff\xb1\x63\xe3\x71\x64\x9c\x21\x36\x44\xef\xa1\x54\x66\x72\x8e\x2b\xbf\x75\xd9\xc4\x72\x24\xf4\x21\xd4\x5d\xfb\x75\xc0\x65\x76\xff\xe0\x27\x90\x4b\x56\x5c\xd1\xbb\xdc\x03\x39\xea\x25\x9c\x3a\x03\x81\xa3\xe2\xcb\x88\x0f\x7d\xda\x22\x54\x02\x2e\x0f\x01\xf6\x21\xde\x45\xe2\xaf\x96\xb1\x7e\x04\x4c\xce\x86\xe1\x68\x1e\x1b\xe2\xf5\x0b\x35\x10\xc6\xa8\x0f\xc1\x34\x12\xeb\xb9\xbe\xd6\xeb\x72\xb0\x66\x89\xc0\x5c\x7b\xa3\xfd\x32\x47\xf8\xc8\x66\x7b\x3b\x79\x61\x0f\x96\xb1\x1d\x34\x11\x9f\x2e\xc5\x57\x45\x6f\x0e\xf5\xf1\xe2\x7e\x57\x0b\x65\x30\x70\x7a\x85\xa6\x2c\x19\x55\x61\x16\x6f\xc0\x58\x08\xac\x38\x57\x8f\x0c\x46\x1e\xc9\x1c\x38\xf9\xca\xed\x1a\xd5\xe6\xc3\xbe\x96\x42\x7b\x5f\xc5\x47\x66\xde\x49\x45\x05\xb9\xdd\xc0\x37\x1a\x7a\x25\xa1\x80\x57\xd2\xc3\xbe\x86\xc5\x40\x19\x89\x63\x0f\x2b\xba\xd1\xd1\xb9\x5a\x5f\xf6\x7c\x3f\xdc\xa9\xd8\x73\xb0\x9b\xf6\x9c\xd6\x86\x29\xfd\xfb\x62\x3a\xd3\x3e\x33\xc3\xc5\x7f\x07\x4c\xd8\xc0\x5a\xed\x46\x50\xd0\xe4\xe7\x5d\x0e\xd0\x1c\xc1\x1b\x94\xa3\x27\xec\x52\xa5\x55\x7b\xd2\x07\xd9\xa7\x8e\x83\x8b\x3b\x8d\x47\xf6\xd0\xa4\x48\x9b\x5e\x84\x13\x24\x52\x33\xe8\x45\x99\x5a\x60\x85\x51\x60\x52\x96\xa6\x64\x13\xc7\x0f\xf3\xb1\x0f\xb4\xa2\xfd\x08\xc2\xe2\x0a\x23\x86\x67\xcc\xc1\xd9\x18\x9b\xf6\xcb\x4e\x66\xef\x4d\xc5\xdf\x61\x7b\xe6\xf3\x48\xf5\xb2\x55\x69\x43\x80\x0d\x7b\x8d\x68\xa7\x31\x9d\xf3\x55\x60\xfe\x1b\x11\xd0\xed\x76\x2a\x2d\x1f\x26\x75\x8a\x8f\x01\x79\xf1\x34\x00\x08\xaa\xf8\xa1\x4c\x21\x1d\x8f\xd0\x74\x3b\x6f\xa5\xdc\xbb\x3c\xad\xe6\x0b\x56\xca\x8e\x06\x17\x6f\xcd\xdb\xdb\x95\x5e\x58\x6e\x46\x83\xb4\x69\x7a\x45\xbe\x40\x19\xcf\x70\x20\x98\x14\xeb\x72\xef\x69\x28\xc3\x71\xbb\x31\x40\xe9\x46\x79\x9a\x74\xa2\x5b\x18\xea\x6d\x27\x99\x60\x72\xa9\x13\x3a\x78\x0e\xa1\x1c\xcb\x47\xf8\xd5\xed\x8f\x2a\x54\x2b\xab\xb6\x76\x7d\xc0\xb5\x0c\x1f\xc6\xc9\xc3\x88\x8f\x07\xfb\x58\x10\x77\x13\x33\xd8\x10\x05\x1d\xfe\x51\xf4\xb3\x0b\x27\x6f\x5a\xaa\x9d\x86\xfd\x0e\x05\x7b\x61\x2d\x01\x5e\xa1\x3b\x77\x5f\x9d\x5b\x4f\xef\x3c\x8e\x10\x0d\x34\x7a\xbc\xba\x16\x08\x18\xd2\xf7\xf6\xc7\x77\xf4\xf2\xcb\x2f\xe1\x00\x99\x50\x44\x13\x37\x14\x0a\x38\x80\x27\xc2\xd1\x7d\xfa\x8a\x78\xee\x00\x81\xe8\xa1\x5d\x0d\xaa\xa6\x0b\xbe\x34\x7d\x3d\x93\xe6\x05\x6d\x6d\x84\x89\x8d\xff\x7d\x7c\x5b\x4e\x3f\x7e\x51\xb6\x96\xa9\xc0\x60\xc8\xe2\x96\x2b\x23\x66\x0c\x79\xdc\x9d\xb1\xd5\xb1\x43\x80\xa1\xc5\xe7\x26\x95\x5c\x75\x0a\x2f\x3f\xe1\xc3\xa2\xb3\x7c\x54\xe6\x01\x94\x6d\xa5\x22\x11\xa7\xad\x1c\x80\xf0\xeb\xfb\x21\xbc\xe9\xcf\x7a\x60\x83\xc5\x32\x26\x19\x62\xf2\x41\xf9\x3a\x0c\x19\xde\x7b\x23\x81\xf9\x4a\x6f\xce\xad\x88\x64\x80\x07\x7b\xab\x70\xf1\xf5\x87\x5f\x6a\x19\xaf\xf6\xd6\x2b\x5f\x1c\xda\x91\x7e\x04\x7a\x0d\x66\xca\xae\x71\x42\xa1\x0c\x2e\x73\xec\x79\x05\xbb\xac\x6b\x20\x8e\xf9\x2b\xe1\x25\xab\xe2\x20\xbf\x31\x4b\x12\x08\x23\x14\x6c\x71\x88\xc3\x4d\xa7\xa3\x55\x26\x07\x86\x60\x42\xf5\x05\xb1\x4e\xe9\x63\xe4\x0b\xc3\xfa\xa2\x93\x57\x1b\x68\xeb\xf4\x22\xc9\x67\xce\x8a\xcd\x43\xd9\x5a\x3f\x39\xde\x03\x7f\x1c\x17\xf7\xb7\x80\x64\x56\xb9\x5e\x55\xdc\xc5\x71\x79\x87\x5c\xef\x29\xa3\x7b\x6a\x90\xac\x9b\x53\x77\xc0\x90\xe4\x06\xe1\x93\x53\x66\x96\xc5\x6f\x06\x6d\x3c\x81\xb9\x7d\xff\x0a\x39\x5d\x08\x3b\x90\x4b\x6d\x17\xc0\xe4\x8e\x2b\xf4\x8f\x6e\xea\x62\xf8\x15\xc0\xe3\xa6\xcd\x0a\x46\xa2\xd7\xd8\xb5\xcc\x18\xbc\x81\xb7\xaa\x4c\xd3\xb2\x3c\x60\xd0\x50\x79\xf3\x92\x3f\xe2\xe8\xd1\x4f\x4c\x59\xfe\xf8\x0e\xd9\x34\x0a\xc1\x2c\x87\xdf\xec\x2c\x5a\x01\x86\xef\x1c\xe0\x4d\xcd\x38\x40\x37\x2d\x81\x2b\xc3\x20\xf7\x52\xc1\x5c\xea\x80\x49\x9b\xc0\x8c\x24\x8b\x25\x1c\x5a\xec\x1d\xc8\xcb\x1e\x26\x06\x63\x05\x26\x2a\x1f\x8a\x8b\x15\x4e\x3c\x1e\xa0\xaf\x47\x1c\x14\xce\x0b\x7a\x60\x23\x98\xbb\xd7\xb8\x87\x60\x5b\x17\x6a\xfe\x27\x69\x9d\x03\x71\x8c\x23\xd7\x43\x8e\xe7\x26\xfc\x00\xcf\x66\xb4\xc0\xef\x3f\xf9\x0e\xcc\xd6\x96\x26\x50\xff\x71\xd8\xd9\x9f\xb0\xc9\xf6\x89\x5f\xfa\x35\x62\xc5\x17\x80\xbd\x0d\x4e\xe7\x6c\xc4\x95\xbd\x85\x52\x1f\x18\x3f\x25\x61\xd0\xc3\xc7\xc9\xdb\x6d\x7d\x44\x28\xce\x84\xcf\x2a\x63\xb8\xf9\x6c\x79\xf5\x7e\xab\xc0\x31\x6e\x5c\x3b\xc1\x16\x70\xa8\xfc\x83\x72\x24\x92\xda\x8c\x32\xe9\xa8\xc8\x69\xd9\x4a\x48\x11\x88\x61\x90\xad\xa4\xdc\xab\xce\xf0\x38\xc4\x1a\xb2\xf4\xda\x22\xe0\x38\x4d\x6d\x5d\xae\x51\x54\xce\xc2\x6c\xe6\x71\x71\x98\x4e\xd9\x1c\xbc\x39\x28\x67\x11\xdf\x2e\x3b\xf6\x5a\xcd\x80\xb4\x36\x46\x24\x6a\x1a\x37\x9f\xce\x0d\xa0\x75\x81\x53\x5c\xbf\x8e\x88\xef\x3d\x34\x60\x33\xa7\x4f\x56\x34\xf6\xef\x48\x81\x8c\x30\x58\x9b\x3f\x28\x65\xc7\x43\xe8\x35\x35\xbd\xcd\xe8\x0f\xf1\xd6\xa2\xa8\x19\x38\x79\x20\xb0\x88\xe3\xd6\xa8\xcc\x07\x72\x9e\x93\xca\x50\x77\xfe\xe4\x00\x6e\x47\x49\x07\xcb\x2f\x37\x24\xcb\xfb\x18\x72\x26\x90\x54\xd2\x87\x72\x33\xee\x61\xc2\x39\x55\xc2\xc6\x84\xd0\x26\xde\x86\x05\xd4\x14\xbe\x98\x97\x0f\x0c\xf2\x79\x76\xde\x76\x35\xbc\x87\x66\x88\x37\x62\xb0\x87\x2d\x92\xa7\xf1\xda\xd2\xe2\xe2\x6a\x42\x7f\x84\x88\xa2\x69\xe4\x0f\x3b\x8d\xff\x64\x4a\x38\x5c\x9e\x0f\x13\xe2\x3d\x3f\xdf\xc2\x43\x7e\x51\x53\x5a\x3b\x9a\x09\x03\xe1\xbf\x0c\x62\x9f\x79\x85\xa5\xa0\xe1\x58\x16\x4c\xcf\x65\xf4\xa4\x8d\xdd\x5c\xf4\x19\x9f\x63\xb7\x68\x37\x52\xde\x45\xde\x25\xe7\x7a\xda\x62\xe4\x89\xd2\x18\x7f\xfb\x29\x35\xdf\x64\x5f\x42\xbe\xc3\x1f\x95\x46\x3f\x39\x92\x7e\x7e\xca\xfb\x71\x0c\xd4\x4f\x4f\x85\xd3\x4f\x39\xf7\x46\x56\xa6\x3f\xa9\xfd\x1f\x43\xaa\x6d\x5a\x58\x2e\x1c\x76\x6f\x46\x84\xe0\x54\x35\x22\x3c\xef\x08\xf1\x39\x9b\x67\xd7\x10\x5c\xd7\x2b\x5f\x47\x89\xe3\x69\xf2\xc1\x56\x49\x2f\x11\x5a\x51\xdd\x0e\xf3\x54\x3d\x09\x77\x9c\xe0\x4e\xeb\xd9\xb1\xea\xaf\x11\x53\x5e\xd6\x2d\xc7\x97\x3f\x52\x85\x6e\x99\x7d\xa1\x7f\xfd\x0e\xca\x8d\xdd\xa4\x75\x47\xbf\x1f\xd7\x3f\x72\x77\x58\x30\x68\xba\xaf\xb1\x3b\x63\x12\x64\xf8\xcf\x52\xf9\x0c\xe3\x12\x79\x18\x56\x3a\x2e\xb0\x16\x73\x16\x12\xc3\xcd\xf7\xea\x17\x87\x43\x74\x9d\x1f\xdc\xde\x5a\x65\x99\xce\xc1\xf1\x8c\x23\x96\xfc\x32\x73\xb7\x01\x09\x6b\xab\xe0\xa3\x6c\x78\x61\x84\x6b\xe6\x72\x25\x0a\x85\xb1\xa2\x3f\x21\x60\x10\x61\xc5\x43\xbc\x09\xe5\xc9\x8e\xff\xc8\x3f\x51\x48\x90\xb8\xa5\x88\x4f\x67\x71\x1f\xfe\x8b\x1b\xb1\x71\xb7\xc6\x78\x07\xfd\xd5\xf9\x8d\xf0\xb0\xfe\x24\x0f\x58\x28\x54\x04\xb9\x73\x48\x50\xa4\x08\xc5\x1f\xb4\xd5\x7a\x64\xcd\xe5\x8f\x4c\xa5\x21\xff\x34\x56\xca\xe1\x63\x5c\xc2\x79\xd1\xad\xcb\x0b\x5a\xfe\x51\x04\x3b\x66\x2c\x36\x55\x68\xd5\x68\x6a\xcc\xa3\x6c\x1a\xe1\x81\x38\x0a\x67\x26\xaf\x00\xa6\xd6\xf4\x9d\x7d\xf4\xe3\xe2\xe0\xa1\x25\x2b\x74\x9d\xef\x70\x90\x29\xc7\x70\x02\xa3\x29\x33\x28\xef\xdb\xb2\x2c\x28\xca\x80\x80\x2d\x5a\x33\xd6\x48\x20\xe2\x25\x2c\x24\xcb\xa9\xb3\xab\xbe\x24\x62\xac\xe4\xf4\x5b\xb5\x71\x14\xdb\x92\x0f\x2c\x2b\xb5\x1e\x97\xa1\x8e\xe3\x5d\x53\xcf\x7e\x53\x55\xe3\x0d\x68\x8a\x6b\x17\x08\xa7\x75\xe2\xd7\xcc\xbe\x05\xef\x97\x63\x33\xdd\x86\x52\x69\xea\x46\x47\x47\xff\xec\x4d\xfb\x1b\xd2\x36\xe4\xcf\x14\xe8\x72\x07\x2f\x35\x8f\x85\x17\x0e\x48\x1b\xe0\xed\x14\xe1\xee\xc9\x19\xc4\x65\x60\x13\x6c\x8c\x0c\xcf\x9e\xa2\x07\xa3\x45\x28\x48\x1a\xdf\x7c\xc8\x3a\x61\x39\xb1\x54\x87\xc8\x4e\x9b\xff\xbb\x8d\x71\x35\x2e\xc5\xf3\x78\x68\x49\x02\x03\xe2\xd4\xf7\xcb\x20\x46\xe9\x35\xb3\xbb\xea\x77\xd8\x0b\x4b\xd2\x99\xd8\x02\xbe\x4f\x39\x3c\x46\xee\x98\x01\x75\x6b\x9d\xe6\x68\xc6\xe2\xee\x83\xfb\x58\xbf\xea\xbb\xff\xe9\x71\x3a\xaa\xaa\x13\xb8\xa1\xa4\xa5\xb1\xb5\xbf\xd7\x24\x0a\x8e\x2a\x1a\x7d\x58\xcb\xca\xaf\x87\x03\x62\x60\xd6\x81\x4a\xb6\x59\x91\xe9\xc4\x32\xc2\x62\x4f\x43\xd2\xd3\xe7\x1a\x1b\xbb\xa5\x70\xd3\x12\xca\x97\x45\x95\x48\x2c\xa8\x49\xef\x67\x3b\x71\x8f\x5c\x76\xfb\x49\x3c\x76\x06\xb7\x0a\x5e\x7e\x19\x09\x87\x47\x9a\xef\x4a\xe1\x55\x82\x5c\x3b\xde\x05\x25\xe5\x33\xa0\x66\x21\x4c\x82\x96\x07\x4a\xbe\x75\x02\xf5\xaa\x13\xba\x53\x2c\x29\x02\x93\xe7\xf2\x76\xa6\x7e\x77\x86\xab\x7f\xb7\x89\xaf\x03\x6b\xc7\x48\x92\x0f\xd8\x19\x5a\x4f\xce\xa6\x1d\xd2\x4f\x22\xad\x5d\x46\x4f\xe5\x66\x5b\x74\x55\x76\x5e\x1c\xbb\xf8\x79\xbb\xb2\x4a\xea\xa1\x7a\xe9\xf8\x47\xcb\xcf\x52\xf6\x40\x01\x9b\x51\x77\x39\x7e\xcd\x5d\x44\x7d\x19\x97\xa3\x3f\xfb\x55\x9d\x3c\x85\x40\xa2\x50\x75\xf6\x8e\x28\x6a\x8f\x2b\x9b\x1e\x72\x1b\xce\xc8\x97\xe0\xd2\x78\xae\x9f\x2f\x5f\xe0\xe8\x7f\x7b\x96\xf6\x2c\x5a\xca\x9f\x2d\xbc\x81\xcb\x78\xaa\x51\xfa\x7d\x82\x96\x59\xdf\x54\xd9\x59\x2a\xbd\x09\x00\xc1\xc9\xe5\xac\xfd\x2c\xc7\xc3\x59\x70\xad\xe6\x16\x04\xce\x93\x1f\xd7\x6e\x70\xe3\x94\xbf\x63\x86\x44\x30\x02\xe9\x47\x51\xc0\xf0\xe1\x95\x37\xdf\x77\x52\x5e\xa2\xf5\x33\x69\x74\xe1\x5e\xf9\x93\xc3\xfa\x10\x18\x6e\x78\x16\x9e\x6b\x7d\xc0\x89\xc5\x12\xe6\xdd\xdb\x75\x78\xef\xc7\xcb\x05\x12\xb1\x57\xd7\x41\xad\x9d\xdb\xbc\x5e\xde\x90\x15\x77\x8d\xac\x10\xe2\xb1\x9f\x1c\xfe\x8f\xe7\xa5\x9c\xf4\x81\xc6\xca\x12\x8f\x87\xd1\x3c\xdb\x2f\xb4\x20\x71\x5f\x59\x6b\xe5\xe8\xa7\x8f\x85\x16\xea\x4d\xb7\x6b\x84\xb5\xc6\xdd\x33\x5b\x02\x55\xad\xd9\x87\x93\x63\x05\xfb\x80\xad\x86\x78\x55\xe0\xea\x05\xb5\x0a\xa7\x46\x0b\xcc\xd8\xad\x20\x56\x0e\xbb\xce\x38\x6d\xfb\xdc\x6b\xa2\x74\x9b\x89\x85\x41\xb2\x79\xd8\x46\xae\x9d\xbc\x62\x91\x90\x64\x79\x66\xbf\xb9\x23\xdb\x96\x89\x09\x6d\x8f\x52\x54\x36\xa3\x11\xd8\x7d\x8f\x36\x08\x6c\x20\x63\x45\x84\xfd\x23\x17\xd8\xf6\xa1\x4e\x69\xb9\x1d\x06\x61\xb0\x28\xf9\x77\xe3\x15\x12\x74\x5a\x6d\xde\xd2\x99\x29\x94\x9d\x63\x38\x29\xb6\x0f\x2b\xf2\xbe\x7d\x22\xf5\xf8\x1a\x49\x55\xf6\x37\xcf\x9a\xe8\xaf\xf2\xcb\x3f\x2c\x0d\x2d\xf8\x8a\x8c\x40\xf5\x9d\xe5\x37\x33\x22\xbb\xfb\x22\x83\x7e\xb3\xac\xfb\x3a\x2f\xd3\x36\x85\x9d\x32\xe0\x5b\x0f\x10\x08\x2b\xa4\x16\x65\x09\x7f\x3d\x88\xe2\x26\xc3\xc0\x1c\x6a\x14\xb2\xce\xb2\x89\x2e\x9b\x10\x8f\xcb\x5a\xad\x3f\x83\x59\xde\xb3\x5b\xea\x04\xa7\xfd\x1a\x1a\xb2\xdd\x1b\x69\xed\xe0\x22\x99\xce\x72\x35\x58\xf8\xb9\x94\x85\x4c\xa2\xc7\xb3\x94\x33\x82\x54\xcf\xee\xdd\x8f\xbb\x05\xa1\xa1\x1d\x68\x4e\x39\x95\xc0\x56\x72\x89\xae\x16\xd3\xff\x27\xf9\x48\x52\x65\x20\x43\xc8\x13\x17\x29\x5f\x3f\x1f\xec\xa8\x22\x16\xc9\x00\x28\x21\x5f\xbd\xc1\xdc\xb1\xe1\xde\x4c\x67\x85\x9e\xa7\xb0\x08\x91\x2f\xa4\x92\xa6\xa6\x60\xa8\x52\xfc\x59\x93\x98\xb0\xdf\x11\xd0\x0a\xac\x37\x22\x4e\x1b\x90\xf8\x0e\x8d\x42\x80\x19\x31\x8f\x52\x7b\x6a\x04\x69\x8b\xc1\xde\xc0\xa0\x7b\x4c\x46\x22\x3d\x34\x80\xd6\xaa\x46\x74\xa0\xc7\x77\xc5\x80\x05\xe0\xf6\x83\x2c\x9d\x01\xee\x5a\xc1\xa9\x9c\xfb\x26\x31\xf1\x5b\x78\x2e\x59\x9c\x65\x71\x41\x27\xfd\x3f\x68\xef\xae\x03\x9f\x14\xb4\xa0\x6c\xcf\xfe\x7b\xdc\x79\xec\x07\x40\x9b\x37\xe4\x4b\x1e\xaa\x7b\xcb\x32\x5b\x47\x72\x9a\x72\xa2\x54\xe0\xcd\x50\x0f\x13\xff\x2c\x45\x42\x4d\x5c\xa2\xf0\x82\x30\xea\xc9\x30\x1c\xa1\xc4\x74\x29\x57\x16\x14\xa8\x25\x01\x1d\xdb\xaf\x2a\x1d\x0e\x68\xa9\xea\x50\xe3\xc0\xc7\x58\xae\x33\x01\xd0\x5b\x39\x39\xec\x1e\x82\xcf\xf9\xd2\x69\x9b\xb6\x6c\xa9\xe8\x9c\x1d\x2f\x05\x4c\xc4\x7a\x48\xb4\xc4\x30\x03\xd4\x03\x63\xd6\xe8\xee\xf0\xc1\x65\x37\xd4\x4a\xe7\x0a\x96\x0b\x6f\x05\xfb\x2e\x8b\x40\xea\x00\x9f\x8d\x06\x03\x6a\x7e\x1b\x6b\xee\x48\xf5\x0c\xe0\x19\xe6\x0c\x5e\x59\x52\xd8\x02\x25\x93\x38\xd3\x5c\x6d\x5b\xe0\x80\xd1\xd5\x68\x2d\x5c\x5a\xfc\xa0\x50\x94\xfa\x33\xe3\x5b\x8f\x18\xac\x3b\xfe\xcf\x51\x9b\x1f\x01\x12\x19\x66\xad\x3c\x40\xb4\x37\x17\xdb\x84\xd2\x45\x33\x68\x55\x80\xab\xbd\xb7\x29\xcb\x22\x2d\x7c\x52\x77\x49\xb5\xca\x2e\x67\x3d\x97\x98\xe6\x53\x2b\x32\x79\x6a\xe4\xcf\xe4\x17\x64\x70\x96\xa2\xb3\x6a\x75\xfd\x37\x96\xf5\x87\x7f\xe6\x48\xf7\x95\x25\x8d\x6f\xfa\x73\x4e\xfd\xa1\x99\xfc\x53\xd6\xc2\xee\xe3\x24\xad\x05\x4c\x4b\x0d\x9d\xc9\xf5\x2b\x55\x02\x34\xa5\x4f\x63\xdc\xd1\xa1\x7d\xe7\xa8\x3c\xde\x57\x40\x3c\x5c\x3a\x77\xdd\x76\xb3\x9d\x76\xdf\x63\x67\x3c\xe5\xfd\xa8\xab\x22\xe0\x6b\xbd\x60\x6f\xd5\x0b\x24\x02\xc6\xc9\xfb\xb2\x21\xb7\x1d\x35\xe3\xb5\x60\x03\x5a\x0f\xee\xe2\x81\x73\xdc\x18\x85\x46\xfb\x92\xe7\xc2\x3b\x2d\x11\xc0\x55\x5e\xde\xaf\x30\xea\x2f\xb1\x1b\x7e\xa7\xa4\x3b\x12\xdb\x96\x2f\xc3\x18\x2f\x5b\xa2\x44\x3b\x03\xcc\xa9\xbe\x4c\x6b\x1e\x03\x8c\xf0\xfd\x4b\xa0\x9e\x02\x9d\x25\x1a\x7c\xd5\xa4\x27\x85\x69\x09\xdb\x95\x32\xfb\xfc\x72\xdb\x2f\xfb\x06\xf9\x53\x9d\x6b\x17\xd7\x10\x90\x64\xd8\x33\x06\xd8\xcc\xf4\xf2\xa7\xd1\x61\xb0\x37\xbf\x80\xe8\xa7\xb0\x5f\x7b\xc2\xf8\x20\x4f\xdd\x85\x1f\xef\xe3\xf6\xad\x13\x8d\x7f\x0e\x63\xcb\x92\xca\x19\xa5\xbd\x16\x8d\x83\x3d\x15\x50\x25\xda\xa7\xaa\x4b\x4d\x4d\x9e\x15\x7b\x7f\xde\xd9\xad\x3a\x3e\x46\x5d\x11\xaa\xc2\xd7\x81\xa9\x68\xc8\x42\x7f\x7b\xf1\x2a\x0e\xf4\xe8\x09\xc4\x82\x16\x7c\x68\x20\x51\xe8\x39\x2f\xf2\xea\x14\xe5\x16\x68\xa4\xb1\x15\x62\xd2\xa9\x93\x23\x3d\x6d\x74\x5b\x0e\x46\x4b\x18\x14\xf2\xe1\xbc\xaf\x3e\x59\x63\x9a\x64\x8d\xdd\xc5\x81\x8d\xd0\x38\xfe\x8a\x6c\x47\x14\xd8\x92\xe2\x7a\x3e\xa2\xe7\x5b\x06\xed\x03\x21\x4f\xba\x57\x02\x9f\x23\x4f\x65\xa6\x16\xd8\x8c\x7f\x48\x23\x55\x44\xb4\x27\x0d\x4f\x52\x45\x97\xa1\xe4\x6d\x4c\x1a\x85\x35\xc8\xeb\x4a\xdf\x04\xf8\xac\x5b\x75\x4f\x0f\xcc\xa4\x83\x92\xb2\x0f\xb0\x6f\x78\x6d\x1c\x69\xfb\x13\x5b\xeb\x28\x7b\x00\x3d\xb1\xb2\xfe\x40\x65\x4b\xc8\xf6\x15\x86\x8e\x1f\xb5\x45\xc0\x35\x1e\xb5\x7f\xca\x24\xf8\x99\x8d\x67\x7e\x16\xec\x82\x16\x83\xb8\x9f\x79\x85\x7a\x29\x74\xa7\xb0\xda\x8b\x0d\xd7\xfe\x61\xc8\x93\x00\x68\x64\x8b\x45\x43\x6f\x87\x38\x1b\x4e\x96\x18\x97\x5f\x2e\xa8\x7b\x10\x9c\xc6\x3e\x88\x8a\xd3\xf5\x52\x99\x2e\x46\x19\xaa\xf8\x54\x63\x2e\xdd\x1c\x96\xeb\x19\x56\x7f\x1c\x77\x36\x37\x25\x70\x6d\xe4\xf9\x5c\x1d\xb8\x4c\x64\x7a\x35\x38\x8c\xfc\x54\xf9\x97\x05\x36\xdd\x5e\x49\x65\x40\x8e\x25\x43\x44\xd6\x9b\x04\x91\x47\xb5\xff\xe5\x2a\xab\xfb\x46\x2b\x73\x20\xc1\xd5\x3d\x64\xc2\x5b\x9f\x3a\x09\x74\x46\x1a\x59\xb5\x73\xa7\xbd\xa9\x46\xbb\xc3\xbd\x90\x76\x18\x88\x28\xc7\x81\x3a\x67\x6b\x73\x57\x8b\x45\x99\x4a\x85\x6e\xb1\x03\xd9\xd6\xc8\xf2\x81\xf5\xa9\xa6\xa6\x3d\x97\xe9\x8a\x0f\xce\xe1\x25\x5b\x96\xd1\x2a\x6b\xc4\x92\x6a\x03\xcf\x05\x5b\xad\xce\x23\xde\x54\xf1\xc2\xbe\x8c\xc0\x2f\xf9\x8c\x25\x29\xcd\xa4\x0c\x9e\xbd\xa3\x8f\x77\x88\x93\xfc\xc1\x67\x60\x0b\x65\xdf\xa7\xdf\x8e\xa9\x6a\x7e\x91\xa9\x9a\x8a\x97\xa1\xfa\x9a\xfb\x94\xe5\x60\xb4\xd3\x17\xe8\xc1\x82\x14\xd8\x76\x40\xdb\xc2\x66\x10\xb0\x36\xc7\x48\x7f\x22\xd7\xca\x71\x8a\x3b\x14\x9e\x81\x4e\x77\x6c\x61\x6a\xa8\xf8\xc9\xdb\xff\x39\x4e\xcd\x0a\xd5\x71\x7d\x6f\xc2\x01\xe9\x96\xca\x0f\xf3\x60\x04\x8a\x1c\x56\xfb\xc6\xfa\xda\x07\x8c\x4f\x96\x80\xb7\xcb\x9d\x8f\x64\xe5\xef\x23\x88\xf3\x40\xb4\x23\x18\xed\xdc\x5e\x02\xa9\x21\xd2\x53\x05\x0b\x47\x3c\xa5\x8a\xa3\xaf\x6f\x41\x2a\x75\x46\x02\xb9\xd3\x6f\x8e\xa8\xa8\x3d\xbd\x4b\x71\xc1\x77\x95\x5e\xa3\xe6\xa4\x1d\x2d\x45\xbe\x8e\x94\xaa\x7a\x81\xc4\x93\xc2\x62\xd5\xa5\x6b\x92\xf1\xe8\xc8\x3d\xdb\xff\xad\xda\xf9\x6c\xf9\x93\xa2\x0b\xe2\x5c\x34\x09\x3a\x19\xf5\xd1\xa8\x49\x40\xf9\xea\xe2\x81\x70\xe2\xb2\x09\x08\x74\x7a\xac\xf7\x71\x49\xd9\xc6\x7b\x4d\x3a\x7b\x55\x8f\x2b\xa5\xa6\xd5\xdc\x32\x04\x45\x6d\x6a\x98\x10\x5e\x1e\xdb\x8a\x0e\xef\x6e\x42\x21\x40\x6d\x7a\xc5\x2b\xed\x55\xc0\xba\xb7\xe1\xb2\x8d\x40\x85\x80\x1e\xca\x18\x51\x5f\x01\x03\x0b\xbc\xfa\xd3\xeb\x2f\x56\x96\x55\xba\x50\x19\xe4\xd7\xa5\x90\x55\x17\x1a\x02\x04\x5f\xff\x5b\x4f\xa5\x95\xb1\x6b\x89\x88\x3c\x9c\x2d\x43\x85\xd9\xa0\x1e\x0a\xd3\xc4\x4d\x6f\xa7\x3d\x2d\xcc\x15\xbb\x75\xaf\xe3\x7f\x31\x5c\xda\xb1\x7e\x39\xce\x60\x0b\xd9\x2d\x0f\x77\x7f\x30\xc4\x0b\x5c\xe8\xf6\xe5\x06\x86\x83\x3e\x1c\x68\x9b\x78\x87\xe8\xca\xa7\xe8\xac\x7e\x65\xcb\xc1\x6d\x51\x9f\x3c\xad\x26\x2a\x42\xfd\x3a\x09\xbe\x2e\x4f\x80\x9a\x80\x5f\x6b\x79\xfe\x83\xfd\xd0\xf5\x33\xcc\x01\x69\x57\x8d\x83\x36\x7a\xb1\xab\x93\xdb\xab\x2c\x4f\x78\x06\xaf\x20\x3a\xdc\x88\x48\x5f\xfc\x49\xba\xba\xee\x15\xee\xf6\x2d\xa0\x91\xda\xdb\x0f\xc1\x6c\x3d\x52\xd9\x42\x55\x8f\x45\xb9\x9b\x30\x09\x2a\xb5\x7e\xa5\xb8\x05\xf7\x5e\xd6\x22\xc4\xb7\xfd\x94\x24\xc0\x1b\x2d\x6b\x03\xd5\xfb\x51\xe7\x4f\xb4\xd3\x83\x02\xb0\xa2\x97\xb0\xc4\x40\xf7\xa4\xa8\x4c\x6d\xff\xea\x51\x13\x77\x1a\xc7\xc6\x76\x9e\xcf\x1c\xc3\xfc\xac\x8c\x82\x2c\xb9\x8e\xff\x53\x61\x04\xa3\xfd\xd6\xfc\xbb\x75\x0c\xa3\x43\x06\x39\x0c\x82\xb4\x07\x14\x22\x7e\xac\x6e\x20\xc2\x95\x01\xc5\x8a\xcd\xc1\x1c\x1c\xa5\xc0\xa9\xf5\x97\xed\xc7\x78\x3f\xf0\x6a\xdf\xb2\x85\x6f\xe0\xfa\x40\xaa\x90\x95\xb7\x0a\xa1\xc2\xd4\xe4\xdf\xa0\xaf\xfa\x5b\x29\xff\xa0\x2d\xab\xa4\x81\xaa\xa6\x2d\x5f\x98\x3e\xcd\x65\x5a\xb4\x42\xc6\xc7\xa9\x06\xfe\x05\x92\xf1\x84\xb5\xbb\x1e\x0a\x8b\xf0\x88\x1f\x93\x0e\x97\x83\x69\xd7\x43\xa3\x9b\x0b\x59\x6e\xc5\xa8\x89\x69\x7b\x8c\x64\x58\x0f\xfd\x65\x96\xab\xd3\xf5\x1e\x18\xd9\x89\x1e\xec\xb0\x94\xcd\x51\xe7\x80\x81\x69\xf6\x40\x19\xea\x8b\xcd\xe7\x48\x3e\x72\x46\x5c\x30\xbf\x87\xec\x20\x50\xe4\xeb\xf1\x57\x65\xdf\x4c\x77\xb5\xa5\x7e\xf1\xaf\x08\x98\x1a\xd0\x50\x60\xd6\x97\x71\x58\x0e\x6f\x0a\x1d\xf4\xf5\x09\x2a\x46\xf2\x3f\xd0\x78\x84\x67\x2f\xf2\xbf\xc9\xff\x1e\x4c\x8a\xc4\x13\x96\xcf\x14\x38\x75\xbe\xd2\x6e\x17\x68\xce\x26\x67\x5b\xb0\x5a\xd2\xb6\x0c\xbd\xd2\xee\x76\x81\xd9\x42\xb3\xf5\x3f\x0d\xd1\xfa\xd1\x06\x69\x61\x1c\xbc\x12\xf9\xf8\x0b\x32\x7a\x4f\xe2\x63\x87\xc4\x2e\xfd\x79\xf9\xd3\x3e\x1f\x79\x73\xcc\x06\x7d\x6b\x19\x00\xa1\xf7\x96\xaa\x30\xdd\xf9\x79\x1c\xe9\x9f\x4c\x3c\x53\x96\x7f\x3a\x99\xe7\x4f\x39\x2b\x9f\x31\x30\xdf\x20\x90\x67\x13\xf9\x88\x91\x2f\x5f\xfe\xb5\xa7\xdb\x71\xc2\xc9\x2d\xc6\x99\x4d\xdd\x9b\xf3\x72\xce\x97\x30\x82\x98\x79\xf2\xce\x11\xb1\x3f\xa5\x60\xb8\x49\x82\x6c\xa4\xdb\x2a\xdc\x71\xcf\xc3\x67\x89\x8b\xe4\x5b\x81\x1c\xc8\x3a\x50\x1a\x95\xd3\x08\x5b\xf3\xb6\x6f\x1e\x67\x3f\xa1\x3b\x14\xa1\x4a\x01\x32\x57\x57\x3e\xb4\xd2\x21\xe7\x7e\x48\xb1\xe8\xce\x6e\xae\x0b\xfc\x8e\x7f\xa1\x4f\x91\xb2\xc6\xa1\xc0\xec\xe8\x80\x96\x45\xe2\xd4\x4d\xd9\x05\x4c\x46\xad\x4b\xa1\x58\x7e\xa3\xd9\xe3\x49\xee\x28\x90\xf5\x23\x82\x52\x43\x20\x55\xa3\x6c\x1a\xc3\x43\xd7\xff\x1c\x66\x77\x23\x2f\xd0\x23\xfb\x25\xd1\x24\xfc\x49\x52\x1a\x93\x22\x33\x9d\x93\x1a\xfc\xd3\xf9\x04\xeb\x91\x40\xb9\x74\xc6\x86\xbf\x5f\xbd\xf7\x72\x10\x2d\xfb\xe7\x98\x72\xf8\x2c\x4f\x65\x3d\xda\xd8\x9f\x9e\xca\x7c\x64\x28\x82\xd9\x5c\x1a\x04\x53\x20\xdc\xcf\x70\x1b\x1d\xb7\xf2\x6f\x9c\xb4\x83\xb1\x06\x79\x16\x0f\x7d\xea\xc7\x3d\xb8\x79\x53\x3a\x59\x2e\xdd\x1c\xc0\xa7\x5b\x7b\x6e\xfa\x1b\x99\x52\x14\xe2\x53\xd2\xa9\xf3\x2c\x2b\x0f\x31\xec\xb2\x87\x98\xe0\xa6\xb3\xbf\xa5\xe6\x7e\xe6\xfd\x23\x6e\xeb\xf3\x01\x87\xd1\x1b\x85\xc8\xfa\xee\x9f\x96\x55\x3d\xfd\xd0\xb2\x23\x9f\x38\xa1\xfc\xd0\x69\x58\xf0\xf6\x89\x08\x35\xae\xdd\x65\x38\x3a\x03\xf9\x87\x42\x79\x41\xe1\x79\x72\xec\xd3\x8f\x34\xab\x3f\xc0\xeb\xc7\x56\xa0\xf8\xa6\x9e\x17\xdc\xc4\xb4\xca\xfe\x54\xd5\x0a\x80\x6f\xed\xa4\x81\x7b\x7f\x87\xe5\x05\xd4\x6d\x84\x5a\xd4\x59\x3f\x72\x1f\xd7\x51\x23\x6d\x4f\x0c\x87\xd3\xf8\x54\xf7\x0b\x6b\xb6\xcd\xef\x9d\xc0\x47\xbc\x6e\xff\x0b\xff\x83\x2b\xbd\x0f\x8e\xa8\x61\x08\xdc\x7d\xd1\x50\x05\x4b\x97\xc3\x80\xa4\x9d\xed\xe7\x7f\x21\x8e\x6e\x55\xc1\xac\xa9\x14\xc0\xfb\x12\xbc\xde\x72\x90\x84\x9b\x37\x5b\x48\x95\x19\x9a\x1c\xfa\xe6\x2f\x03\x92\x4b\xd4\x1c\xe9\x81\x61\x81\x3a\xfa\x0a\x85\x9c\x80\xec\x82\x51\xfa\x86\x87\xe2\xb8\xa1\x6b\x2e\x2f\xb3\xd8\x27\x58\xdd\x1b\x38\xff\xfe\xf3\x60\xd5\x0f\x01\x25\x73\x40\xd0\xb9\x33\x06\x1d\x66\x45\x0f\x1a\xf5\xda\x35\xb8\x9c\xc7\x65\x4f\x72\x24\x3b\x0e\x33\xfd\xf0\x26\x04\x5a\x30\xf2\x21\xd3\x05\xf6\xce\x85\x30\xef\xce\x5d\x05\x88\x8f\x42\x52\x43\x8b\x5f\x01\x40\xc7\x35\xfd\xe8\xcd\x58\x63\x7c\xa0\x2f\x89\x47\x2a\x2b\xc0\x5d\x0a\x65\xed\x44\x1e\xc9\xb6\xc1\x5b\xac\x4d\x65\x88\x56\xb6\x86\x4b\x37\x38\x10\x11\x3d\xbd\x36\x68\x06\x38\x1a\xdf\x21\xf6\x79\x80\x19\x4e\x80\xbc\x36\xf1\x10\xda\x50\x87\xdd\x4f\xb9\x29\xf3\x63\xea\xdb\x8b\x83\x36\x20\x90\x81\x80\x16\x48\xd4\x44\xbe\xd1\x56\x17\xb4\x7c\x26\xea\x26\x6c\x7c\x3a\xa8\xc4\xbe\x90\x67\x1e\x1b\x15\x1f\x97\xfa\xe9\xca\xc1\x9c\x47\x3d\x62\x84\x7c\xb2\xf5\xbd\x9e\xf2\x07\x10\xf5\x47\x81\xe6\xfd\xe7\x7a\x56\xd9\x0e\x4c\x55\x7f\x1d\x26\xc6\x59\xca\xe2\x01\x5a\x65\xaa\xb7\x27\x41\xe8\xe5\xc5\x3e\xec\xfd\x53\x09\xd1\x87\x20\x65\xd0\xde\x63\x20\x83\x54\xe8\xf7\x9f\x5d\x1a\x87\xca\xd6\x1e\xd2\x19\x77\x1d\x01\x84\xbb\xa5\x74\x77\xde\xe4\xfd\x7c\xda\xad\x38\xcf\xb4\x47\xb8\x6f\x97\x9e\xfd\x5e\xbe\x15\x4e\xfd\xf7\x0c\x1d\x25\x17\xda\x7b\x39\xdb\xb4\xe4\x43\x8b\x1d\x6b\xf7\x1d\x7b\x0e\x09\xa9\xcb\x5c\xfa\x16\x5f\x11\xc3\x47\xc4\x22\x2e\x7f\xbc\x19\xf5\x18\x25\x2e\x32\xfe\x90\xc0\xf8\x34\x2e\x15\x10\x1f\xc3\x29\xe5\x3e\x56\x20\xbb\xfc\x86\xf7\x90\x86\x8f\xd8\xf8\x6b\x9b\x04\x24\xbc\xee\xa6\xef\xf2\x30\xd2\xfd\x5d\x24\x95\x72\x0d\x71\xb7\x1b\xfe\x10\x30\xba\xaa\xcf\x00\xd7\x51\x9c\xb0\x7b\x0f\xe7\x09\xa3\xf9\x1c\xeb\x6b\xe9\x0d\xc4\xa7\x17\x66\x9a\x48\x55\xb4\xdf\xdc\x3d\xc1\xf0\xa0\xd7\x28\x39\xb4\x15\x26\x60\x7a\x4d\x44\x2a\x2e\xb1\x93\xea\x7f\x1d\xee\xfc\x75\x20\x50\xfb\xb7\x54\x4d\x75\xd8\xaf\x81\xc1\x43\x97\xe6\xbd\x5a\xff\xab\x1b\x2b\xbe\x8d\xac\xb3\xd8\x44\xec\x7d\x10\xe3\xfa\xe5\xef\xc3\xdf\xa8\x2f\xb3\xaf\x45\xdf\x6f\x6f\x39\xd1\x0e\x0d\xf1\x72\x79\x53\xb4\x8e\xd5\xfd\xd0\xad\x54\xc1\xf7\x80\x13\x53\xff\xd8\x12\x24\xa8\x7e\x1e\x05\xc4\x43\xe3\xa3\x3b\x90\xc6\x15\xcf\x99\x43\xf2\x5d\xe5\xfe\xd6\x7a\xeb\x93\x96\x45\x8c\xf4\xf9\xfe\xb1\xfd\x3f\x57\xdf\xb6\xe5\xaa\xce\x33\xfb\x2e\xb9\xe9\x9b\xfd\x52\x06\x1c\xa0\x03\x98\x8f\x43\x32\xe9\xa7\xdf\x2a\x55\xc9\x64\xfd\x73\xad\x31\x90\x09\x49\x68\x02\xb6\x25\xd7\xc1\x7e\xd9\x41\x57\xc6\x13\x13\x3d\x04\xdd\x35\xe9\x2e\xea\x5c\x6f\x8d\xb4\xe3\x33\x64\xf8\xba\x28\xb9\x74\xee\xa3\xa2\x9d\x10\x3d\xc2\xb6\x50\xfd\xd7\x66\x33\x10\xb7\xa0\xe9\x54\x17\x66\xaa\x5d\x89\xed\x66\xf3\x4d\xbe\xb4\xfa\x75\xf5\x70\x76\x5d\xb9\x58\x38\x97\x88\x0d\x96\xf5\x4b\xf5\x70\x74\x0f\xa6\x4a\xf9\x3e\xc8\x7b\x08\xdf\x46\xbe\xcf\xfe\xec\x0f\xd7\xe4\x77\x37\x4e\x7e\x88\xe7\x0d\x54\x64\x65\x87\x27\x99\x12\x20\x5c\x9d\x2a\xae\x8f\xa4\x0d\x4a\x65\x7e\x43\x0e\xc4\xa3\xf0\xd5\xee\xf4\xfb\x75\x23\x68\x8f\xfa\x74\x5f\x9e\x74\xce\xf3\x98\x62\xf1\x13\x90\x00\x4a\xd1\x8b\xbe\xad\xd5\x7d\x0d\xb0\x8c\xc7\x5e\x1d\x6a\x57\x61\xea\x50\x61\x9e\x58\x1d\x47\x48\xf8\x30\x69\xe9\xd9\x2b\x14\xfc\x98\xa7\x8d\x6e\x21\xbc\xdc\x96\xd8\xdb\xa4\x5b\xab\x39\xb1\x57\xe8\x30\xe7\xf3\xed\xca\xde\xb3\x4b\xb3\x2a\xaa\x16\x05\xd0\x20\x8d\xfb\x25\xf2\x76\x47\x3a\x5c\xf7\x53\xb2\xbc\x0e\x5a\xa7\xca\x08\x65\xb0\xe9\x86\x80\xf1\x3e\x5f\xe4\xdd\xd0\xd2\x0d\x14\xaa\xcc\xb7\xb9\xe0\x66\x23\x19\x83\xbc\x0a\x7e\x60\x93\x13\x2d\x7b\x39\x56\x60\xbb\xd8\xb5\x79\x43\xab\xe9\x5b\xe0\xf0\xd9\x5a\x6e\xdb\xc8\xb5\x9a\x45\x5a\xfa\x5a\x0e\x45\x42\x11\x10\x21\x53\xa1\x06\xa5\x42\x01\x9c\x0c\xc7\x90\x62\xaf\x15\x2f\x00\xf4\x40\x1b\x4e\x94\x0e\x1f\xe8\x6e\xc9\x67\xfb\x75\x26\x31\xe9\x49\xde\xfe\x72\xa7\xb4\xce\xef\x56\x61\x83\x06\xf4\xd9\x86\x07\x65\x17\x00\x34\x97\x86\x9e\xbf\x98\xe2\x33\x7d\x35\xf4\x75\xf3\x2c\x1f\x18\xb7\x8b\xac\x3b\x6f\xa7\xca\xb0\xc4\xb3\xa8\x39\x83\x12\x1e\x4e\x91\x32\x7f\x2c\xc2\x0b\x30\x57\x6f\x03\xb7\xde\x86\x72\x14\x70\x01\x5a\xa6\x1f\x1d\xd3\xcd\x45\x7f\x9b\x43\x48\x5e\xda\xa6\x28\xbb\x82\xb9\xae\xf9\x8f\xcc\x9d\x2c\x00\xb9\x9a\xcb\xff\x23\x96\x8b\x3c\x42\xb7\xc7\x40\x3f\xd8\x90\x33\xb9\xde\x39\xf4\xbc\x3c\x8c\x17\x63\x39\x02\x58\x02\x51\xc6\x87\x2a\xd8\xec\x02\xcf\x12\x74\x46\x77\xac\x9c\x94\x0d\xb1\x6d\x5c\xf2\x79\x92\x9b\x24\xca\x0a\xda\xbb\x55\xb8\x47\x72\x7f\x8e\xda\x50\xca\x82\x3a\x66\x3f\xbe\x05\x3b\x80\xdb\xa4\x64\xc5\xe0\x38\xe9\x9b\xf5\x88\xfc\xbd\xf5\xf9\x06\x61\x09\x98\x4e\xa1\x27\x7a\xed\x6a\x06\xbf\xdc\xa2\x2e\xd4\xa8\x7d\x89\xd4\x23\xac\xb9\x27\xd9\x5f\xda\x44\xde\x7f\xa8\x84\xc5\x7b\x5f\xd6\x86\x24\xa5\xd6\xc3\xb1\x56\xc0\xed\x22\xb9\x68\xae\x51\x9f\xb1\x8a\xd1\x54\xab\x5b\xd0\xc4\xb1\x40\xc8\xa5\x71\x14\x9e\xb9\x74\xed\xac\x34\x5f\x1c\x0f\xe4\x47\x03\x8a\x4d\xe0\xe7\x9b\x2d\x5c\x7f\x6c\x1e\xfc\xd1\x56\x7c\x6e\x24\x3a\xa4\x84\x40\x64\x5a\x48\x06\xdc\xfe\x02\x1b\x9c\x01\x43\x38\xb5\x30\xdb\x94\x9d\x5e\x76\xf6\xb7\x08\xd3\xd6\x4c\x67\x96\x2f\x82\xb3\xc9\xb5\xc0\xd0\xd8\x7d\x47\x90\x83\x3d\xb7\x49\x35\x55\x98\x73\xb1\x0b\x07\x51\xd1\xf7\x10\x02\xdc\xe4\xac\x85\x6b\xeb\x39\x9a\xb3\x67\xd4\x9e\x54\x6e\x6f\xc4\xa7\x69\x00\x41\x14\xab\xfb\xaf\x94\x17\x89\xdd\xd7\x47\x45\xe4\x26\xbd\x6f\x09\xe8\x5d\x03\xbc\xac\x2a\xab\xf8\xf3\xe6\x00\x17\x72\xc8\xb7\x99\x0b\x7a\x16\xf5\x81\x1d\x08\xdc\x40\x22\x74\xbe\x49\xac\x11\x34\x20\x55\x7b\xc1\xa1\x41\x29\xda\x45\xb7\xd3\x8b\x78\x85\x97\xde\x0b\xea\xb4\x9c\x36\x53\x5b\x91\x09\xed\x6b\x95\xbc\x25\xad\x2f\x43\xc3\x13\x69\xe6\x34\x92\xb0\x7d\x48\x4c\x82\x8d\xad\xb4\x9a\xe5\x20\xbf\x22\xa2\x00\xec\x60\x5f\xc3\xde\x03\xe2\x01\x32\x3a\x79\xfb\x96\xd2\xd7\xa5\x75\x74\x5f\xa3\x64\xa7\xd7\xe1\xfa\x77\xab\x2d\xef\x43\xa0\x37\xec\x09\x22\x04\xc7\xeb\x67\x7c\x72\xd2\x26\x54\x00\x5d\x52\x7d\x00\xfc\x17\xd6\x55\xae\x7f\x3b\x54\xdc\x2d\x9a\x36\x38\xf8\x75\x81\x56\x35\x94\xb3\xf9\xed\xcb\x15\x3a\xd4\xb4\x24\x96\x62\xb5\x5e\xc4\x9a\xb9\x0f\x69\x4a\xbf\x5d\xc1\x3a\x56\xd0\x20\x49\x2d\x04\xbf\x5d\xe0\xed\x8c\xc2\x49\x02\x56\x80\x00\x01\xd7\x26\xd1\x29\x58\x2f\xbc\xdc\x0d\xc7\xa9\x2a\x4c\xfc\x88\x2a\xdc\x91\xc6\x0d\xa5\x55\x9e\xd7\x28\xfa\xbb\x4d\xfb\xe9\x3b\xc0\x3f\x02\x2a\xb7\xa8\xd5\x71\xf6\x9a\x9e\x36\xd3\x5e\x14\x8d\x5e\x9f\x48\x36\x54\x0a\xb1\xf0\x55\x64\xb6\x5f\x19\x89\x3a\x25\xa7\x15\x7b\xd8\x9e\x73\x48\x50\x42\xb0\x1a\x88\xd4\x9a\x5d\xa7\xe6\x82\x4f\xad\x47\xee\x41\x8c\xa3\xfe\x46\x97\x14\xbe\x38\xe7\xb8\x4a\x19\xbc\xb4\x76\xcd\x0e\x70\xba\xc2\xa9\xf5\xb2\x5b\x6e\x1f\x2c\xef\xb6\xf8\x73\x72\xfd\xea\xb3\x8d\xf2\x4c\x07\x59\x9e\x03\x1f\x26\xf7\xb3\x2c\x40\xad\x7b\xe3\x93\xe0\x4c\x79\x59\x33\x01\x60\xb0\x8e\x3e\x3d\x73\x3f\xd0\x7f\x0a\xb8\x06\x03\xad\x6a\xad\xfe\xfb\x7c\xfd\x21\xb3\xd0\xc0\x1e\x8c\xa1\xcd\xee\xf2\xd5\x01\x43\x60\xe6\xf6\x51\x25\x0a\x5b\xbd\x94\x34\xb7\x70\xcc\x01\xe9\xf0\x07\x1d\x77\x3f\xb7\xff\xf6\x27\x91\x3f\x00\x71\x7b\x5f\xa6\xf5\x38\x7c\x1b\x81\x36\x70\x9e\xba\xa5\xb7\xce\x65\xc7\x72\x9b\xff\x1e\xef\x32\xbd\xf6\x4f\xea\xfd\xaf\x7d\x8f\x7f\x0b\xf5\x43\xde\x23\x34\x56\x76\x46\x63\xa0\xf2\xdc\x7d\x54\x13\x59\xa2\x0f\xe0\x4f\xce\xe4\x13\x56\x74\x2a\xeb\xbd\x69\x2a\xa2\x08\x6c\x4b\xc6\xc4\xe4\xbd\xf3\x30\xfa\x5a\x25\xe2\x74\x0d\xa7\xf0\x0b\x17\x37\x1c\x18\xde\xf1\x2d\xc9\x2e\x7a\x61\xd4\x8d\x4c\xea\xcf\xd3\x97\x47\x4f\x18\x5d\xf6\xc4\x32\x08\x6c\x72\x42\xbf\x92\x43\xa9\xa5\xd8\x81\x36\x40\x65\x30\x60\x05\xa3\x63\x29\x26\x21\x17\xc6\x50\xa1\xa9\x7b\x64\x5d\xce\x37\x52\x17\xa8\x36\xdc\xcc\xf4\x94\x48\x37\x99\xf2\x4b\xc4\xad\x9e\x8c\x73\x69\xc3\x2b\xc0\x19\xf2\x9b\xe8\xf2\x96\x2f\xee\x0f\x42\x16\x1c\x14\x70\xee\xac\xae\x40\xd9\x5a\x65\x8c\x03\x00\x7c\xe1\x11\x2e\xae\xc9\x1d\xee\xd9\xc6\xd5\x78\x87\x25\xf0\x55\x16\xf7\x6d\xeb\x43\x9b\xf6\xb5\x5a\x11\xc5\x02\xb6\x3e\x25\x2b\x6f\x26\x68\x41\xb9\xb9\xf5\x62\x76\x87\xc8\x75\xd5\x0d\x55\x29\x28\x48\x0c\x83\xb4\xf9\xbd\x81\x62\x50\xc5\x34\x70\xfa\x72\x58\xa7\x26\xbf\x61\x54\x57\x77\xf8\xff\x3f\x82\x8a\xcf\x1f\xff\xc0\x53\xe4\xa0\x08\xa4\xc6\xab\x86\xfc\xc3\x26\x5c\xdc\x42\xbb\x46\xaa\x01\x70\x0c\xf5\x7e\x0e\xd6\xf8\x1a\xf7\x8e\x71\xb5\xfd\xfa\x9b\x85\x7c\x70\x05\xee\x86\xd3\x95\x83\x86\xdb\x07\xcc\xa8\xfc\x34\x98\x11\x57\x07\xd8\xb1\xbb\xe2\xcd\xc4\x74\x1e\xc3\x39\x0b\x0e\x7f\x0c\xdb\x19\xf0\x3f\x8b\xab\x55\xe3\x81\x89\x5a\x64\x76\xe0\x00\x49\xdc\x94\xa2\xdc\xba\x55\x49\x09\x89\xe9\x0e\x50\x9b\xd6\x9b\xf8\xef\x60\x37\xbe\x74\xba\x97\x14\x17\xa7\x26\x79\xc0\x65\x00\x01\xc1\xbd\xc2\x6d\x84\xe5\xeb\xb6\x09\x53\x07\x15\x1d\x3a\xe1\xee\x2e\xa5\xd0\x88\x4c\xe5\x72\x3e\x5c\xe8\x46\xde\xb8\x3f\x2a\x54\x62\xe4\xaf\xb6\x7b\x5f\x90\xef\x90\x41\x38\x82\x9e\x7f\x90\x86\x10\xcb\x7f\x0b\xca\xf2\xb9\x62\x1d\x94\x50\x01\xf4\xad\x84\x46\xcc\xeb\x0f\x83\x9e\xca\x01\x2e\x05\xf0\xa8\xa0\x09\x82\x3b\xce\x46\x29\x81\xb7\x28\xf3\xb0\x33\xa4\x5b\xa2\x5b\x4f\xf7\x2a\x89\xec\x94\x5a\xf7\x48\x42\x9c\x1e\xe7\x8f\x54\xd4\x80\x95\x0d\x03\x45\x36\xd3\x8c\x64\xdd\x63\x22\x40\x8e\x10\x24\x75\xf8\x9c\x76\xc9\x60\x6d\x77\x17\xcb\x10\x94\x06\x7e\x5a\x9c\x7d\x94\x31\x85\xed\xb0\xf1\xb5\x4a\x7c\x83\x22\x7f\xd5\x48\x57\x03\xe0\x09\x99\xf0\x52\xed\x8c\xc4\xfc\x6d\x0b\x68\xc4\x06\x3a\x12\x57\xf9\x89\x94\x47\x99\xe8\x13\x2a\x10\x7b\xe0\x94\xb0\x92\x2d\x6d\x05\x00\xa2\x42\x67\x9a\xd5\xb3\x7d\x71\x64\x04\x61\x15\xe5\x43\x71\x1c\x35\x50\xe9\xe1\xb1\x80\x57\xe8\x98\x24\xd0\xa8\x45\xfe\x84\x07\xb4\x42\xb1\xf0\xfc\xfb\x3c\x06\x71\x60\x9f\x73\xf5\xbe\x8d\x1a\xd1\x3e\x65\x62\x3e\x6c\xbb\x16\x19\xca\xa3\xe1\xf0\x82\x29\x05\x72\xda\x86\x0e\xbb\xb9\xa9\x18\x30\x86\x1a\x38\xd6\x46\xf4\x79\xa3\x2a\x6a\xa2\x66\x09\x61\xe1\x86\xb6\x71\x44\x57\xa5\xe3\xdd\xb8\x56\xf0\x08\xa7\xbb\x33\x74\x55\x00\xc5\x70\x08\x4b\x5a\xe0\xb7\xdf\xe1\x60\x37\x65\x11\x4f\x21\x90\x19\xf1\xba\x34\x06\x30\xd5\xf1\x53\xb3\xe4\x08\x93\x3c\x22\x37\xd2\x07\x90\x0c\xc5\xf3\xc9\xed\x0f\x35\xba\xb1\xec\x28\xb8\x17\x44\xc4\xf4\x41\x19\x65\x75\x55\x66\xf7\x2c\xde\x2e\xf4\xc6\x5b\x16\x5c\x76\x32\x7b\x76\x7b\xf2\x57\x76\x87\xd4\x18\xe7\xc2\xec\xde\xd2\x6b\xd5\xdf\xd4\x06\x7a\xdd\xa2\x2d\x57\xa4\x92\x35\x2a\xda\xa5\xf5\x7a\xbd\x1f\x1d\x5e\xcf\xbb\x93\x8d\xe4\x91\xec\x5a\x02\x02\x74\x4c\x55\x61\xc3\x42\xed\x4a\x73\xb0\x3d\xb6\xb3\x17\x70\x9a\x32\x02\x77\xe4\x90\x80\xb3\x2b\xd5\xfa\x15\xac\xfd\xed\x21\x55\x81\x4a\xa7\x47\x43\xbb\xc1\x74\x11\xd8\x63\x17\x2e\xc4\xce\x5a\x6f\xa6\x58\x1a\xb2\x21\xa2\x44\x22\x8b\x05\x89\x3d\x8e\x69\xb8\xb1\x41\x99\xb0\x10\x69\xec\x6c\x55\x35\x12\x33\xca\x25\x09\x14\x11\x2e\x66\x81\xd6\x20\x52\x8d\xaf\x79\x02\x2f\x14\x84\xd0\x82\x08\x82\xb0\xb1\xe5\xea\x2e\xa1\x96\xcd\xfc\x53\x05\x79\xec\x1a\xbe\xa0\x8e\x1e\x46\xbb\x2e\xa5\x7d\xe3\x47\xa6\x00\x7c\xf8\x8a\x0b\x3f\x7f\x8e\x35\xac\xcd\xe6\x4f\x21\x61\xbe\x41\x75\xab\xda\xf5\x82\xf0\xc4\x23\x74\xdf\x5a\xf0\xd4\x8e\x00\x7b\x74\x00\xd0\x84\xcd\xd8\xe6\xaa\x87\xbd\x34\x16\xda\xf0\xf6\x05\x30\x5d\xc4\x22\x6b\xe4\x6a\x28\x0c\xb3\x9e\x7e\x99\x03\x76\x92\xaa\xae\x7a\x5a\x03\x56\x92\x26\x9d\x01\xc0\x89\x7e\x58\xc5\x88\xd4\xc3\x2d\xb1\x8a\xc7\x1d\x55\x77\xb8\xcf\x51\xda\xc0\x26\xce\xbc\xc4\xff\x3b\x9d\x30\x07\x52\xe7\xe4\x5b\x5f\x09\x50\xd1\xcd\x5a\x1c\x60\xd6\x73\xdb\x4f\x51\xef\xcf\x4d\x7b\xfa\x8a\x0d\x09\xb0\x86\x3d\x13\xa4\xef\x9f\x90\x3b\x0b\x48\x05\x1a\xf7\xf2\x13\xc1\x23\x5c\x96\xa8\xed\x5a\xc1\x42\x69\x70\xd4\x23\x01\x99\xf5\x2a\xb2\x5e\x5a\xa2\xe7\x56\x01\xb6\x57\xc7\x65\x87\xd4\x3a\x16\xe9\xc9\xb0\xc7\x14\xf1\x10\xe4\xcb\x46\xb2\x77\x92\x2e\x2a\x46\x35\x2f\x54\xd0\x61\x18\x8e\x96\xf1\xd3\xb8\x79\xf0\xbe\x96\x30\x1c\xce\xff\x29\x86\xac\x31\x4a\x01\x2e\x72\x86\x5b\xf0\x44\x81\x75\xfb\x89\x5d\x55\x40\x92\xb9\xd0\x31\x38\x42\x7c\x00\x03\x5a\x42\x97\xb3\x8e\xff\xc6\xe0\x6a\xaf\xe3\x9b\x6a\xe5\x63\xcc\xdd\x50\x12\x5b\x88\xd9\x5c\xc7\xdc\xea\xa8\x4e\x04\x7b\x4c\x91\xf4\x07\xc4\x60\x4f\x95\x83\x2c\x13\x61\x2c\x83\x0d\x21\x54\xb6\x0e\x52\x9f\x5d\x9f\xcf\x27\x75\xd9\x8f\x1c\x62\x06\xba\xdc\xd9\x9d\x33\x43\xeb\x40\x58\x13\xc6\x85\x07\x6c\xe7\x9b\x09\x1f\x6e\x35\xe7\xfb\x54\x6b\x62\x99\x08\xdb\x0d\xd2\x12\xd0\x66\x53\x09\x79\x15\x63\xa5\xcb\xcb\x8c\x2b\xa9\xdf\x55\x08\x01\xde\x95\x8c\xec\xc7\x79\xd1\x45\xd8\x45\x8c\x09\x0e\x59\x38\x71\x85\x21\x5f\x72\x4f\xf9\x35\xcd\x12\x4b\x98\x32\x2c\xfb\xbf\x54\x14\x46\x97\x5d\xd4\xab\x10\xd7\xf1\xfd\xe5\xef\x8f\xf2\xeb\x81\x2a\xd9\x1f\xa1\xfc\x1e\xd5\x49\x6f\xc5\xf2\x96\x2b\xc1\x73\xfe\x8f\x70\x4a\x21\x0f\xff\x64\x91\x06\x61\xc7\x97\xcf\xc3\x09\xfa\xc2\xa1\x4c\x5c\x90\xa5\x50\xbc\x7f\x09\x12\xdb\x0e\x24\x36\x9e\x08\x6b\x77\x65\x93\x6d\x0f\xfc\xda\xb8\xed\x64\xf5\x1a\xf3\x23\xdf\xeb\xff\x06\x45\x44\x75\x5a\x1f\xea\x9e\xcb\xfe\x3a\xcc\x69\xc6\xd0\x4e\x38\x84\x75\x91\xbd\x40\x49\xaf\x0f\x6f\xf2\x45\x52\xe3\x55\x3a\xc1\xef\xf0\xc5\x86\x9e\x60\x07\x2c\x05\xc6\xec\x1e\xbc\x0b\x4a\x66\x8c\x8f\x6a\x9a\x5c\xca\x7f\xe5\x13\x1e\x04\x91\xf0\x6a\x2d\x82\x8b\x2d\x70\x0c\xf6\xad\x25\xf2\x5e\xad\xf3\x46\x0b\x61\x7e\x77\xd6\x72\xbc\x89\x66\xc0\x8b\x25\xa5\x9a\x32\x2f\x2e\x7a\xfa\xa0\x5e\x7d\xe8\x39\xa4\x2a\x8a\xef\xbc\x0c\x7e\x01\x78\xac\x61\x95\x2c\xe7\x64\x97\x6c\x00\xf9\x3a\xc7\xa7\x25\x0d\x1b\x8b\x0d\xe7\x84\xaf\xbc\x92\xe5\x0f\x8e\x9f\xb0\x74\xc9\x71\x1a\x96\xbe\x26\x3a\x41\xb8\x50\x9b\x2c\xdd\xe6\x72\xfc\x25\x05\xd6\x35\x3d\x6f\xb7\x20\x78\x28\x50\x9a\x1e\x9a\xd8\x74\x5d\xfe\x52\x6f\xb6\x06\xed\x95\xcb\x16\x0a\x6a\x20\x69\xea\x27\x75\xf0\xea\x83\xd8\x15\xad\x59\xcc\x76\x19\xb4\x4f\xb4\xcb\xb9\xa2\xe0\xe7\x32\x38\xe7\x65\x2e\x9d\x50\x2e\x9d\x65\x11\x0e\x0a\x81\xd4\xba\x66\xc1\x3e\x05\x64\xb6\x20\x51\xb3\xc8\x7d\xad\x09\xb7\xc8\xa4\xb4\xe2\x4b\x0b\xa2\xea\xec\x97\xaa\x7e\x0e\xd1\x07\x45\x59\x44\xd1\x39\x7b\x61\x8e\xfb\x30\x4c\x9c\x84\xb8\xd8\x05\xaf\x23\xd1\x4c\x39\x94\x2e\xfc\x5e\x35\x0c\x49\x07\x22\xf6\xfd\x50\xf0\xe1\x82\x7b\x31\xc1\x66\x92\x82\xa8\x5a\x10\xa8\x12\xa9\x52\xee\xba\xfb\x0a\xfc\x78\x01\x5e\xa0\xe8\x12\xf0\x98\x90\xdb\x03\x87\x2d\xcc\xb0\x51\x37\x2f\x4d\x52\x98\xda\x38\x20\x9f\xef\x1c\xda\xfe\x04\xe1\x24\x57\xa7\x6a\xd5\xb3\xcd\xa9\xab\x35\x95\xe9\x0a\x88\xb9\xf5\xdf\x3e\xc7\x70\x59\x7e\x87\x9c\x08\x1a\xe3\xa1\x80\x33\x85\x1c\x2e\x08\x44\xd4\x4f\x28\xd6\x09\x10\xe2\xf2\x0e\x9d\xfe\xd1\x0b\x45\x94\x80\x10\x38\x66\x72\x57\x0e\x79\x52\x6b\x1f\x62\x46\x2d\x6a\x7c\xfc\xb8\x0c\x89\x37\x56\x33\xfd\x71\x0f\x0d\x09\x8a\x44\x08\x28\x13\xc0\x41\xcb\x30\xdf\x89\x48\x19\x51\x47\xb1\x6e\xa8\x51\xdd\x3d\xf8\x26\x81\x4e\x26\x8d\xd6\x96\x1f\x5e\x55\x52\x29\xc0\x77\xaf\x73\xfb\xf3\x6f\xb4\xe0\xf8\xfb\x50\xb6\xf0\x45\xdb\x99\x97\x25\x1a\x6e\x15\xa0\x2e\xc1\xb2\x06\x21\x83\x60\xfc\xf2\xf3\xe6\xbe\x99\x38\x34\xeb\x0e\x7f\x5d\x8c\xc2\x3a\x18\x01\x6d\x16\xdf\xa6\xbf\x93\x6a\xff\x57\x58\x83\xff\xc2\x9f\xc6\xef\xd3\x5f\x20\xac\xb4\x93\xc3\xc4\xef\x28\x45\xd3\xdf\xb1\x42\x17\x7e\xb3\x25\xb2\x9c\x35\x58\x88\x2c\xc8\x81\x36\xf9\xf9\xdc\x34\x8c\xfd\xa6\xb9\x46\xed\xff\x96\xe2\x13\x61\xac\x45\xec\xbc\x61\x7e\x93\xcf\x3d\xa1\x67\x2d\x03\x80\x9d\x5a\x0c\xdb\x76\x63\x68\x6d\x48\xfb\x9f\xd4\x1c\x3c\x17\xf7\xaa\x23\xdb\x96\x08\xcf\x2c\x29\x0a\x1c\x73\x4b\x5e\xcb\x15\x20\xc8\x1a\x6e\xb9\x2d\x51\x50\x8b\x2b\x58\xc2\x62\x2a\x52\x3c\x27\xf7\x42\x16\xce\x07\xfa\x12\x59\xd8\x99\xee\x9c\x7a\xa1\x6d\xba\x8c\xf4\xa1\xa9\x5f\x27\xc6\xae\x60\x3b\x6d\x19\x42\xe9\x75\x5c\xbe\xf4\x31\x50\x04\x0d\x8c\x8e\x0b\x47\x48\x44\xd8\xa1\x36\x6f\xd5\xb7\xbd\x81\x99\x1c\xdf\x3f\xdf\x9e\x2d\x7c\xd5\xa6\x11\xbe\x8c\x04\x29\x7f\x75\xdb\x8e\xf6\x09\x8d\xb7\x91\x26\x46\xc3\x35\x13\x90\x22\x93\xeb\x73\xd3\xd4\x64\x38\x41\x39\x71\x30\xcb\x09\x37\x60\x0f\xe8\xd2\x30\x58\xe6\x43\x6f\x00\x40\x73\x78\x34\xba\xd6\x2d\x75\xcc\xe2\x06\x0e\x36\xe0\x87\xd3\xf5\x7c\xc0\x92\x1e\x05\x4a\xc0\xbe\x12\xff\x71\x28\xbd\x3a\xef\xa1\x04\x85\x73\xa8\xae\xbe\x00\x0d\xcd\x36\x1c\x9e\x1e\xdb\x87\x6f\x94\x99\x1c\xc6\x4e\x2a\x11\xb7\xc7\x9a\x4d\x0b\x56\x88\xce\x20\x74\xed\x06\x3f\xd0\xc9\x7e\x7a\x92\x86\x58\x3d\x09\x7b\x6f\xfe\x01\x01\xbc\xe1\x21\x2f\x96\x7b\x86\x44\x8c\x31\xb0\x3a\x3e\x1c\x32\x96\x97\x36\x13\xa8\x88\xfc\x0f\xe9\x37\x5a\x5f\xf4\x30\xfa\x11\x60\x86\xef\xda\xea\x40\xeb\x16\x05\x0e\xca\x7d\x54\x95\x89\x00\xdd\x24\x9a\x26\xf7\x50\x4a\xb6\x4e\xce\x7f\xdf\x7e\xfc\xfb\x63\x4a\xd9\x43\xad\xcf\x75\x75\xfa\xb1\x1a\x75\xf6\x10\x47\x93\x2e\xc4\x5e\x3d\x09\xc4\x03\x73\x89\x88\xea\x58\x30\x25\xc9\xcc\xf7\x69\xdc\x3e\x17\xdf\x23\x8c\xe1\xd3\xd7\xa2\x85\x11\x21\x59\xcc\xb1\x2f\xe7\x0c\xbd\x17\x42\x68\x60\x07\xae\x0a\x9d\xb8\x5f\xd1\xc8\x45\x30\x1a\x77\x66\x0b\xad\x05\x00\x31\xb8\xad\xb7\xe3\x53\xfe\x2a\xc0\xd4\xa8\x72\xca\xd0\x03\x1b\x51\x7a\x02\x66\xce\x7f\xbe\x29\x42\x26\x3e\xbd\x83\xa5\xa4\x44\x66\xf9\xe6\x69\xf3\xb9\x5f\x2d\x0a\x3d\xe1\x68\xc6\x03\x39\xdf\xe6\x4e\x64\x26\x15\xb8\xe3\x7a\x57\x02\xd9\xb4\x82\xbf\xdc\x12\xbd\x4f\xa8\x35\x00\x92\xca\x83\x73\x77\x83\xd7\x9f\xe9\xb8\x25\x7d\x9e\x69\x6b\x74\x91\x68\x17\x2e\xe7\x70\x7e\x39\xd7\x35\x1e\x5f\x32\x13\x16\xe7\xab\x1f\xaa\x49\x78\xdd\xf9\x8f\xac\xb7\xdd\xe3\xa4\x2e\x12\x29\xb2\x88\x10\x90\xe7\x13\x3f\xc2\x26\xec\x36\xe1\x27\xf7\x23\x3b\xe9\x9f\x1f\xb7\x30\x81\x7e\x10\xf4\x42\x3c\x8c\xa3\x63\x63\x3c\x07\xa4\xcf\x9f\x6d\x88\xa5\x6f\xb1\x0f\xa5\xb5\x88\x29\x3c\x9a\x6d\x66\x99\x6e\x74\x01\x6e\xa8\xc0\xbc\x34\x2e\x27\xb4\xdf\x5e\x06\x90\x23\x64\xd5\x25\xd0\x2f\xf7\xfb\xb8\x43\xbe\xe4\x4f\x21\x29\xf8\x92\x3d\xa9\x90\xea\x76\x24\x4f\x6b\xfd\xab\x2f\xf6\x0a\xb6\xc6\x31\xdb\x57\x38\x1e\xc0\xc6\xd8\x38\x48\x98\x48\xe8\x24\x76\x9b\xa5\x5d\xb1\x6e\xdc\x6d\x51\x15\x73\xe2\x9c\xfb\x04\xaa\x21\x7c\xcb\x34\x3d\x93\x6c\x08\xde\xf2\x15\x70\x44\x4b\x48\x4a\x40\x08\xeb\x88\xa8\xfd\xde\x7d\xbb\x1c\xf8\x65\x0e\x9f\x83\xb5\xdc\xfe\xe2\x6e\x50\x55\x5b\x18\x7d\x96\x2f\xb5\x8a\x3d\x34\x8d\xe2\x73\x7c\xdd\x4a\xe7\x50\x75\xab\xa9\x70\x21\x68\xc9\xbb\xdc\x66\x05\xe3\xed\x26\x7e\x10\x8e\xce\x3f\x14\xcb\x7d\x22\x32\xf9\xea\x5f\x7b\x1b\x06\xb8\xa1\x7c\x40\x53\x26\x99\x0a\xf0\xd9\xed\xc2\x9c\x2d\xbc\x06\xf8\x8b\x84\x1f\x3a\x8c\xd9\x02\xe7\x32\x47\x56\x0c\xc1\x40\x00\x7e\xf4\xbd\x4d\x33\xc6\x29\xa4\x63\x90\xc2\x46\x22\x40\xa5\x73\x86\xb6\x60\x2e\xcb\x8b\x0b\x6a\xed\x5f\xb6\x19\xf9\x6e\xf3\x2d\x9b\x1b\xf8\x8e\x0b\x0b\x51\xf4\x4b\x68\x2f\x10\x2d\x56\xfe\x38\xed\x79\x34\x7a\x7c\xed\x71\x74\x9c\xc3\x39\xad\xa1\xbd\xef\x68\x72\xc1\x5f\xd6\x70\x30\x07\x3d\x6f\x67\x28\xb2\x03\x23\x79\x95\x9f\x39\xd0\x31\x62\x54\x5a\x70\x46\x09\xae\x75\x8b\x68\x06\x94\xe7\x18\xbf\x2a\xf9\x2e\xd2\x41\xab\x01\x48\xb9\x05\xce\xe4\x13\xdd\x03\x4a\x7c\x92\xa2\xa8\xb4\xf4\x56\x00\x8f\xb2\xbd\x42\x5a\x03\xe6\x3a\x2e\x16\xe2\xd8\x98\xfd\x46\xc7\x3b\xce\xe5\x16\xc5\xb3\x26\x84\x40\xf4\x2e\x80\x5c\x42\xba\x63\x71\xf3\x43\x09\x1a\x84\x76\xc6\x97\xb9\x39\xe0\xbc\xd1\x37\xb7\x45\x32\x1c\x13\x3d\x60\x5b\xa8\x9f\x09\xa6\x52\x97\x8c\xdc\x64\x40\xe2\x19\xee\x49\xee\x03\x37\xe0\x2b\x0d\xcb\x6c\x50\xb5\x08\xa8\xa0\xbf\x57\x60\xae\x76\x54\x3d\x1e\x40\xe1\x60\x72\xb6\x03\x46\x26\x19\x99\xd7\x11\x0d\xd7\x58\x62\x29\x80\xb8\xa4\xe5\x2f\xf3\x08\xd5\x22\x1c\xb5\xe2\xd6\x66\x6a\x25\x2a\xf5\x43\x0a\x7c\xd9\x99\x62\xb7\xde\x47\x65\x45\xd5\xcd\x03\x86\x86\x47\x38\x86\xd8\x0c\x18\x6c\x68\xe9\xa6\xe4\x0c\xf8\x6b\x9b\xde\x15\xfa\x63\x1d\xf5\x7a\x4e\xc4\x89\xec\x7b\xe4\xd7\x10\xc1\xa8\x16\x0d\x09\x9e\xca\x7d\x4f\x63\xf4\xa4\x4a\x2a\xf0\x2a\x5f\x96\xf9\x29\x94\x7d\xa1\x75\x21\x8a\x3e\xc2\x2a\xc2\x6f\x1d\x16\x12\x9a\x50\xdb\x98\xc6\x86\x27\x8e\x04\xed\x08\xf0\x8a\x0d\x0f\xfe\x2b\xc1\xb3\x40\x30\x95\x63\x0a\x59\x8c\xfd\x6a\x0a\xa1\x1b\x67\xcc\x72\xa8\x8a\xa1\x23\x65\xa6\x66\x41\x27\x21\xff\x73\x12\xa7\xc1\xa2\x3e\x51\x40\x1f\xf8\x15\xbd\x79\x3b\xb5\x2d\x1f\x48\x72\x33\xb4\x2c\x41\xf2\x15\x88\x17\x29\xb5\x7b\x3b\x4b\x0a\x17\xe8\x95\x57\x30\x90\xed\xda\x9e\x42\x5c\xa0\x8a\xd8\xc4\x55\x6b\xca\xa5\x4a\x5b\x53\x8e\xb4\xa8\xc3\x6e\xca\xa2\x8b\xd3\xc4\x1a\x01\x2c\x11\xc4\x91\x03\x03\xd7\x37\x63\xd5\xda\x70\xe5\x8c\x12\x2f\xa3\x41\xa1\x6d\xd7\xce\xd8\x19\x28\x8d\x75\x93\x04\x49\x6c\x50\xe7\xac\x19\xed\xa3\x6e\x57\xfc\x06\xc2\x40\xd8\x0e\x69\xf6\x3c\xbb\xc9\xd6\x3f\xe5\x21\x53\x39\x43\x57\xbc\x8a\x76\xe4\x29\x48\x88\x0d\xca\x14\x0f\x2a\x65\x84\x4b\xfa\x87\xe0\x94\x2f\x9f\x09\x79\xe1\x37\x60\x57\x8e\x3a\xde\xd2\x45\xd9\x7e\x3a\x00\xc6\x13\xbb\x1b\x0f\xd3\x0a\x41\x08\x3d\x8d\x80\x2a\xc2\x09\xbf\xba\xb2\x8b\x83\xe6\x22\x19\xb4\x7d\x48\xe1\xf9\x60\x6f\x5d\x90\xa8\xe9\x4d\xed\x6b\x20\x60\x34\xfd\x1d\x6e\xfa\x91\xfe\x51\x13\xe2\xdc\xdb\xe1\x33\x1e\x00\x18\x27\x0a\x56\x5c\xbb\x80\x13\x58\xa7\xdd\x5b\x41\x39\xb6\x9e\x64\x9b\x10\x70\xb0\xb6\x56\xa0\x6d\xf6\xc2\xfb\x09\x12\x62\x5b\x3e\x28\xa6\x61\x5d\x5e\x22\xe8\xc2\x45\x31\xb0\xb8\xeb\x69\x93\xfb\xa6\x07\x6b\xcf\x2e\xbc\x8d\xed\xa3\xec\x15\xfe\x55\x44\x19\xb4\x31\xfa\x52\x39\x3a\x69\x29\x50\x85\xa0\xb0\x44\x2c\x3b\xda\x4c\xa7\x50\x01\x64\xc9\x6d\x27\xe1\xa3\xb4\xe8\xaf\xb4\x34\xa7\xb8\x4d\x0a\x60\x93\x25\x4c\x19\x72\x9d\x47\x02\x35\x94\xc3\x8a\xa1\x30\xed\x02\x06\xd4\xad\xdd\xa7\x6a\xa4\x4a\xf5\x74\x7f\x73\x48\x8f\xd8\xdd\xaf\x29\x6a\x7a\x3a\x44\x2c\xcc\xd3\xa5\x7b\x21\xbf\x74\x49\x5f\x14\x17\x97\x49\x1d\x88\x23\x21\xc3\x61\x43\x48\xe3\xbf\xbf\x7b\xa6\xcf\xa5\xab\x78\x99\x16\xbd\x51\x95\x39\x74\x2b\x87\x26\x62\x61\xbe\x2c\xd8\x2a\x22\xa9\xea\x65\xe8\x8f\x4a\x1f\xfb\xf7\xa0\x74\x86\xdd\x40\xf8\xd7\xf7\xbd\x34\x34\xce\x17\x27\xa2\x7f\x45\xb4\x82\x3f\xfc\x2e\xd8\xc2\xd4\xcf\x57\xfe\xaf\x33\x64\x7b\x2f\x98\x34\x62\xfb\x33\xd0\x58\xed\x73\xf9\xb5\xf9\x6c\xb1\x10\xf1\x01\xc8\x49\x68\x13\x76\xad\xb7\x35\x44\x14\xc7\x2c\x7c\xfa\xc6\xbe\xc0\xd9\x58\x1f\x17\xc0\x64\x30\xb8\x37\x8b\x3e\x61\x74\xc8\x66\x2b\x91\x0d\x94\x81\xe6\x1d\x93\x3b\x36\x03\x25\xf6\x41\xba\x7f\x45\x10\x6f\xcd\x14\xce\x18\x28\xae\xc9\x4c\xef\xc3\xf3\x79\xd0\x12\x82\x32\x96\x88\x76\xbb\x2d\xb5\x6a\x8b\xa6\x8d\x16\x33\x0a\x43\x93\x2f\x3d\x7c\x20\x4a\xe0\x7c\x01\x8b\xba\xf0\x91\xa8\x65\x18\x78\x88\xc7\x97\xa6\x7f\xc4\xcb\x84\x05\x8f\xa5\x9a\xf4\xc0\x48\xdb\x50\x08\x9b\xd9\x9e\x69\x93\x66\xc7\x8a\x7b\xd9\x23\xeb\xfc\xc3\x4c\x42\x1a\x1f\x1d\x51\x88\x1f\xbb\x61\x1f\x40\xcd\x6c\x4c\x5a\xdf\xa5\x0f\xb3\xc9\xb7\x65\x22\x44\x66\xd8\xe4\x9c\x88\x17\xeb\xdd\xce\x56\x6e\xf0\xdb\xe8\x2a\x93\x88\x33\x9d\xe3\xf3\x41\x3d\x14\xbf\x15\xdb\xea\x24\xb1\x05\x88\x40\xfa\x1e\x01\x95\x09\x21\x47\x7f\x8b\x9d\x73\xcb\x55\x92\x77\x68\x03\xbe\x41\x87\x9b\xa4\x25\x45\xc3\x89\x1c\x18\x9a\x12\xdb\x78\xcf\xa4\x57\x5a\xf2\x4f\xf8\x16\xae\xe0\x08\xbd\x22\x59\x22\xf7\xa5\x77\x40\x4d\xdc\x38\xe7\xfa\x22\x38\x95\xe2\x73\x01\x40\xf3\x3c\x39\x3c\x25\xf6\x9c\xc2\xae\xc2\x29\xb5\x8a\x87\xb1\xaa\x7d\xa8\x9f\x3b\x17\x58\xdd\x59\xbf\x11\x92\x21\x58\xa6\x7d\x57\x37\xcc\x13\xc6\x97\xc7\xa0\xf7\xf8\x88\x75\xbb\xca\x2f\xe3\x1c\x3d\x91\xb5\x3a\x98\x7c\x49\x92\x44\x58\xaa\xe3\x42\xff\xb2\xe1\x5c\x0f\xbb\x75\x89\xcc\x38\x99\x85\x1e\x27\xfc\x91\x3c\xc0\x93\xec\x01\xd7\x5b\x8e\x33\xe0\x22\x7b\xa2\x03\xfc\x76\x2a\xc7\x97\x79\x45\xb5\xcb\x73\x43\x0c\x17\xad\x80\x7b\x85\x50\x37\x40\xab\x10\x34\x33\x06\x58\x04\x72\xe8\x21\x1f\xf2\x62\x09\xc2\x11\x36\x7b\xfe\xc7\xa4\x91\xe0\x9b\x3a\x67\xf4\xe6\xb3\x0a\xb4\x61\x4d\x4f\xd4\x6e\x07\xd5\xd8\x90\x2f\xdb\xe2\x70\x4a\x3a\xa8\x80\x74\xb8\x30\x1a\x0f\x1c\x8f\x50\x37\xa1\x8f\xcb\x11\xdc\xf1\x63\xa4\x15\xac\x6b\x3a\x13\xf1\x7d\x7c\xd5\x33\xe5\x84\x51\x91\x31\x1b\x0d\x63\xdd\x12\xa3\xe5\x05\x1a\xc6\xfd\x87\x87\xe6\x10\x1e\x43\xb9\x3d\xc1\x50\x38\x30\x34\x85\x0e\x8e\x87\x3d\xa1\x1d\x51\x9a\x87\x25\xbb\x7e\x68\x5e\x54\x53\x03\xf8\x4a\x2a\x25\x4b\xf8\x52\x00\x2f\x73\x0c\xf2\xc6\x9f\x32\x68\xe5\xd2\xb4\x73\x3f\x8d\xd1\xdd\xb8\x1e\xc4\xd2\xac\x45\xc7\xd1\x1f\x9f\x61\xeb\x3d\xc3\x81\x0c\x85\x7f\x4f\x52\x5f\x76\xb8\x56\xa9\x50\x46\x36\xf0\xbd\xa4\x7b\xe2\x4c\x5a\xdb\x8e\xa5\x0d\x25\x94\x0f\x5d\xa4\x31\xe1\x71\x20\x9d\xbb\x53\x71\xcf\xa4\x9f\x04\x13\x3e\xee\xa1\xdb\x7f\x58\xb0\xdf\x5a\x28\xbe\x7e\xff\x09\x2f\x04\xeb\x0a\x7d\x93\x02\x31\xf1\x49\x2a\x26\xc1\x96\xe3\x96\x8b\xb9\x4d\x3a\xf4\xda\x6d\x69\xbc\x9f\x21\x7e\x61\x41\xda\x02\x91\x33\xbb\x14\x99\x87\xd3\x21\x4b\x8e\xe9\x49\xdd\x08\x4a\x7a\xed\x67\xe3\xf2\xff\x6f\x41\x78\xce\x8d\xd0\x28\xb7\xf5\x48\x4b\xf5\xe5\xbf\xb6\x02\xf9\x4e\xc6\x15\x43\x02\x8f\x7e\x05\x45\x10\xa4\x63\xe3\x80\xe9\x29\xb8\xb4\x4c\xa8\x95\x42\x68\xc8\x51\x6c\xd2\x2f\x80\x8b\x3b\x73\x74\xe1\xf6\xe8\x65\x97\xe8\xa4\x6a\x83\x71\x9a\x04\x15\x82\xdb\x0f\x5f\xb7\x07\x2d\x7c\xe7\x0f\x2f\x29\xd3\xa8\x43\x01\xef\x04\x44\x8d\xb4\x4b\x56\x57\x8a\x12\x1c\x08\xc8\x00\x2c\x34\x30\x9d\xf0\x55\x3e\xff\x24\x40\x7c\x0f\xca\x69\xb8\x70\x8a\x1e\x2d\x37\xf3\xa8\x3a\x19\x6e\xe4\x1f\x78\x1b\x28\xc3\x6e\xa3\x97\x9d\xf7\x12\x36\x11\x65\xbb\x57\x61\x30\x03\xb0\xe9\x89\x9f\x43\x01\xcc\xa0\xfe\x26\xc5\x39\x93\x1e\x3d\xe9\x7d\x61\x5b\xdd\x85\x34\x02\x89\x8f\xc3\x97\x4b\x46\xc5\xc1\x42\xda\xbf\xa4\xaa\xa9\x67\xf1\x76\xef\xe6\x4a\x98\x05\x6d\x45\x00\x5d\xf6\xe0\xf1\x9e\x02\x18\xc8\x83\x29\x6c\x62\x9d\xda\xb8\x4b\x57\xe5\xd6\x33\x77\x68\x50\x05\x06\xa5\xbf\x2c\x19\x91\x49\x52\x0a\x16\x50\x07\x57\x87\xbf\x2e\x4e\x9d\xf7\xd7\xd8\x09\xc0\xf3\xaa\x9e\x22\x6f\x97\x4d\x73\x8c\x75\xa2\xf3\xd6\x3e\x56\x9e\x8b\x85\x2d\xf1\xf5\x3b\x32\x28\x5e\x37\xe8\xaf\xfa\x76\xaa\x6e\x78\x3b\xd7\xae\x19\x76\x59\x5a\x3c\x63\xdb\xea\x4b\xf8\xb9\xc3\x15\x3a\x3f\x83\x16\x2a\xf7\x61\x3b\x25\x57\x0a\xb4\x51\xd6\xbe\x2c\x61\x5a\xe0\x7e\xf9\xa1\x43\x39\xa7\xee\x27\x3d\x84\x44\xca\x4b\x44\x2d\xd3\x62\xaf\x72\xc5\xb0\x06\xa4\x5d\x5c\x5f\x70\x41\x05\x72\xb2\x21\x29\x8c\x5b\x86\x14\x9f\x8b\xc5\x4f\x7e\x95\x73\x2a\x22\x5e\x85\x5f\x1b\x50\xa9\xf6\xd3\xcb\xff\x42\x46\xc8\xad\x0d\xc3\x60\x26\xaf\x9c\x95\xc3\x15\x45\xf2\x49\x79\x42\xd5\x73\x8f\x4b\x93\x89\xcd\xab\x76\xf4\x36\xce\x36\xee\x1d\xb8\x4b\x52\xcb\xad\x4f\x44\x02\x45\x1c\x48\xac\xb6\x6a\x14\xb5\x5a\xcc\x84\x3c\x4d\xfc\x09\x40\x6b\x84\x76\x10\x3a\xd9\xe3\x8f\x9d\x96\xdd\x78\xea\x71\x6c\x40\x0e\xbc\xd2\x15\x1d\x03\xb3\xf7\x1d\x53\xef\x99\x2a\x36\x0b\xef\xf0\xb4\xbc\x12\xed\x87\xa2\xfe\xb0\x27\xe7\x06\xb3\xce\x62\x93\xbb\x30\xe0\xb1\x19\x89\xcc\x57\xa6\x3f\xcd\x23\x21\x60\xf3\x11\x67\x12\x9a\x23\xfc\x9c\x09\x4e\xa4\x1b\xa9\xbb\x7b\x7a\x9d\x53\xe1\x77\x3f\xab\xa3\xed\x9e\x42\xad\x25\x8c\xd9\xad\xdf\x12\x1c\xea\x5c\x04\x96\xfa\xc2\xca\x28\xf6\x70\xfc\x7d\x67\xa2\xa1\xfe\xa9\xe8\x75\xd3\x61\x5d\xdc\x95\x1f\x0c\xea\x0e\x47\x2e\xd7\x51\xfe\xaf\xcb\x8a\xef\xce\xef\xb1\xaa\xd9\x1c\x20\xcc\x56\x0c\x53\xd9\x02\x57\xb4\xe4\x3e\xf6\x7e\x7b\xac\x83\xcd\x50\x75\x62\x26\x27\xc2\x56\xd4\x10\x5d\x45\xf9\xf6\xe9\x16\x73\x80\x56\x8d\x6b\x31\xf3\x15\x00\xf2\xf4\x51\x3d\xa0\x3b\x47\x80\x9d\xfa\xea\x23\x00\xa9\xe5\x49\x3a\x39\x4f\xac\x3b\xb5\xf1\x8e\x90\x0e\x01\xa6\xa9\x0d\x38\xd3\x96\xeb\xfb\x54\x9c\x7a\xdc\x88\xa6\x25\x3c\x60\xda\x54\x55\x69\x54\xd6\x46\xf5\x5f\x7d\xd6\x86\x59\xbb\x5e\xa5\xbc\x95\xdc\xcd\x5d\x9b\xc6\xbb\x12\x40\x90\xf8\xe5\x90\x4a\xf7\xe1\x04\x56\xda\x01\x10\xaa\xa5\x6e\xe2\x81\x76\xc9\xa9\x60\xb9\x05\x73\x26\x41\x8a\x96\x73\x69\xc7\x2f\xd1\x19\xfa\x32\xc8\x93\x05\x4c\xe0\xc0\x1e\xd9\xd5\x12\x6d\x01\x62\x34\x98\xcc\x3d\x28\x47\xd3\x8e\xab\x70\x2e\x9b\xc0\xdc\x36\x2c\x7f\xfb\xde\xac\x71\xcd\x02\x52\x64\x1f\x15\xc7\x25\xc1\xe9\x57\x71\x44\x28\x4d\x43\x4b\x13\x48\x5c\xb6\xc2\xfa\x54\x5b\x0d\x00\x8c\xae\xc4\x8c\x76\x2d\xd5\x64\xca\xc3\x4a\x39\x06\x2a\xa2\xf5\x1c\x34\xac\x58\xf6\x88\xf5\x87\x6b\x4c\x5b\x0b\x68\x8f\x04\x6f\x15\x4b\x07\x08\xf3\x81\xd2\x97\x16\x12\x56\x14\x0a\x03\xb5\x74\xf6\x35\xcc\x39\x27\xf9\xb3\x20\xf0\xeb\x35\x55\xa5\xb1\x15\xe3\x1b\x91\x49\x21\x55\x05\x6a\x60\xab\x67\x68\x1d\xa0\x58\xba\xf3\x54\x87\x52\x7c\xa2\xe8\xb0\x51\xc9\xd1\xdc\x8b\x2c\xd0\x2d\x08\x73\xf8\x15\x17\x51\xc2\xc8\x2e\x6f\xb3\x56\xe0\x18\x4a\x17\xf2\x6c\x84\xb6\x99\xef\x4b\xc7\x3d\x9f\x82\x4e\xcd\xe8\x62\x7a\x42\x6e\x61\x98\xa9\xc2\x35\x69\xba\x7c\x2e\xb1\x12\xb0\x12\x07\xb4\xc3\x29\x4b\x1e\xbb\x2d\x1d\x14\xef\xb0\xaa\xa4\x1f\x22\xc9\xbc\x69\x4d\x76\xe7\xd3\xa4\xe5\x1f\xca\x3d\xb4\x5e\xf9\xc8\xa3\x88\x88\x22\xbf\xe3\x11\x42\x62\x7e\x51\xdc\x71\x31\x0b\x21\xea\xcf\xbc\x27\xbc\x85\x22\xee\xb2\x7f\x23\x94\xda\x50\xc8\x84\xec\xa8\xcd\xa7\xf9\x67\x01\x7d\x92\xa4\x9b\x43\x47\x96\x7c\x87\x71\x53\x7f\x01\xaf\x5d\xfd\x46\x9f\x03\xb1\xbd\x1c\xce\x3c\xc5\xee\x3f\xf6\x79\x6e\xde\xc7\xc2\x41\x59\xda\x28\x76\xab\x6c\x55\x66\xaf\xd0\xc1\x93\xc8\x9b\x2f\x0d\xf6\x96\x8f\xe2\x21\x78\x48\x3a\x47\x10\xc9\xd2\xec\xb7\xd9\x4b\x71\x9c\xcf\xe9\x37\xf9\x72\xda\x3d\x21\x8f\x17\xfb\x0d\x13\x51\x49\xcb\x90\xdd\xbc\x03\x58\x19\xc9\xd5\xa0\x84\xee\xe9\x32\x88\x6e\xab\x14\x70\x8e\xdb\xe2\xc5\xfe\x32\xc7\x04\x41\xe0\xc6\x86\xdd\x36\x33\x23\x5f\xc6\xd7\x48\x24\x10\xba\xdb\x39\xdc\x71\xbd\x05\x91\x29\x8f\x31\xf5\x73\xb9\x1c\xd7\xc2\xf1\x9f\x54\x0e\x30\xe7\x26\x18\xd1\xfb\x52\xf0\x45\xf5\x58\x00\x61\x3b\x1f\xd4\xc1\xd9\xa1\xda\x45\xac\x92\xd4\x0c\x30\x95\x22\x98\xe6\xb4\xdf\x57\xc4\xcb\x19\x80\x8d\x97\x14\x5a\x20\x82\xb3\x4e\x21\x90\x03\xd7\x18\xcc\x61\x08\xd0\x39\x27\x30\xd6\x09\x54\x9a\x08\x13\x3a\x5b\x49\xde\x00\xb9\xe4\x01\xc6\x9a\x87\xf0\x4b\x02\x21\x51\x7a\x9f\x8e\x69\x73\x68\x20\x02\x93\xe4\x97\xc3\xd2\x36\x70\x47\xc9\x3d\x9b\xdd\x99\x41\x68\x10\x6f\x34\x42\xc9\x80\x22\xeb\xdb\x99\x2c\x86\xb9\xd0\x10\x6c\x1e\xff\x79\x6a\x34\xbb\xfd\xb7\xea\xd9\xf3\xb8\xf7\x3e\x64\xa9\x11\xa6\x25\x4e\xcc\x97\x55\x0b\xe4\x20\xfc\x12\xdb\x5f\x58\xf4\x2a\x64\x8a\x24\x53\x67\x59\xa4\xf5\x91\xf2\x8e\xd9\xe7\x2a\x67\x3d\xc7\x9a\x32\xee\x32\x87\xf7\xd8\x28\x26\xb8\x15\x0c\x76\x14\x25\x42\xa0\x9a\xcf\x49\x44\x11\xba\x8c\xa4\xa4\xa0\xb6\x42\x05\xc8\x39\xf5\x57\x8d\xe6\x50\x89\x63\xb3\x65\x81\x19\xb9\x51\xf9\x0f\xe2\x88\x90\xa7\xb4\x9e\x03\x3f\x75\x39\x89\xe8\xe6\x31\x61\x06\x35\xa7\xc9\x9e\x50\x9e\x85\x25\x50\x4b\x7b\x57\x43\x20\xc3\x73\xcc\x49\x11\x01\x4a\x0e\x7f\xf7\xa8\x5d\xd2\xcc\x09\xcf\x9c\x5c\x5b\x45\xbf\x7d\xf2\xd5\x91\xa3\x02\x9d\x40\x9e\x5d\x74\x18\x8c\x61\x3d\x09\x9b\xae\xbd\x10\xb8\xf4\x2f\xcf\xb8\x69\x1d\x26\x74\x52\x72\x16\xb3\x77\xef\xdd\xdc\x12\x84\x77\xd0\x44\xeb\x38\xc7\x35\x51\x90\x77\xc2\x08\xd3\x09\xed\xd4\x4a\x84\x65\x2a\x4d\xa3\x5c\xd3\xe6\xfb\xb2\xd2\x82\xe1\xb8\xd3\xcb\x30\xdb\x26\xb8\x09\x35\x14\x09\x06\xa2\x21\xe4\xd3\xac\x1d\xd5\x97\xc1\xee\xf4\x16\x35\x02\xbb\xce\x94\xfc\x19\x73\x03\xe4\x90\x87\x1d\x8f\x68\xbc\x14\x07\x1d\x52\x02\x9c\xb2\x9d\x61\x95\x41\x23\x0a\x4a\x3c\xf5\x29\x57\x0a\x2f\x2c\x65\xb9\x88\x32\xc9\x5b\x7d\x8a\x84\xc4\xa6\x3d\xba\x76\x53\x7e\x1e\xb1\xcd\x1a\xe8\x6c\xc0\xda\x42\x3d\x62\x72\xb7\xce\x9d\xe1\x95\x28\x60\xe6\x99\x0b\x05\x81\xa6\xb4\x63\xb1\x4b\x17\xd0\xc6\x0e\x1b\x0c\x59\xe8\x9e\xa0\x9a\xb7\x2a\x62\x95\x67\x4a\x85\xed\xa5\x73\x3f\x76\x04\x4f\xfa\xc3\x4e\xf4\x29\xd7\x61\x94\x62\x98\x80\x43\x93\xac\x51\x13\xfe\x4e\xaf\xab\xf8\x43\x07\x24\x96\x1f\x05\x18\x16\xe1\x58\x12\x3a\x5a\x5c\xdb\x5c\xd7\x03\xe8\xac\xac\xe0\xb6\xfd\xf1\x3f\x0d\xfd\x8d\xbf\x61\x9c\xfc\x73\x46\xac\xa4\xc8\x3f\xf4\x05\xda\xa9\x90\x79\x98\x72\x1f\xbe\x6d\xf9\x68\xff\x9e\x28\xe7\x73\x9c\xb4\x38\x11\x6a\xe9\xca\x47\xf7\x13\x05\xf5\xa3\xe4\xdb\x97\x90\x5a\x33\x27\xd9\xbf\x18\xef\x6b\xc8\x0f\xcc\x5c\x08\xfd\x85\x55\x9a\xef\x48\x81\xd3\xe2\xc9\xff\x02\x5f\xc3\x1d\xc0\xf4\x6d\x11\xb6\x59\x87\x2d\x3d\x0d\x86\xb0\xe6\x40\xa1\x26\xcb\xe1\xbd\xfe\xf2\x9b\xc6\xc9\xcb\x7b\x0f\x62\xbd\x84\x10\x83\xec\x92\x2a\x07\xbf\x3f\x9e\x29\xda\xaf\x48\xb4\xd5\x3b\x51\x59\xc0\x1b\xce\x4b\xab\xfa\x46\xb8\x65\x81\xfa\x4a\x5b\x34\xff\xd3\xf5\xfb\x1e\x9f\x75\x08\xb8\xe5\xcd\x4e\x9f\xc4\x35\xfc\x07\xc1\x60\x50\x8c\xe3\xdf\xe1\xbe\x44\xe7\x97\xc2\x92\xf3\x5f\x08\x03\x63\xa7\x06\x38\xd8\x78\xab\x3e\x49\xb7\x45\xa7\x44\x59\x16\x7e\x6c\x7e\x3e\xf3\x0d\xf4\xd2\xb2\x97\x07\x0f\x82\xb8\x8e\xf3\x0b\xde\x35\x06\x76\x05\xcb\xab\xbc\xb9\x20\x82\xba\xed\x3c\x3a\xba\x48\x6f\x34\xb9\x95\x27\xc5\x38\xfb\x24\x8a\xd6\x42\xdc\xd3\x61\xb2\xd6\x7a\x54\xa2\x02\x63\x3f\x0d\x4d\x47\xc7\x1f\x61\x00\x21\xcc\xd4\x02\xaa\x05\xcd\x32\xb6\x07\xbf\x81\x87\xab\xdb\x60\x1a\xef\x3b\xed\xae\x0e\x29\x22\x28\x38\x11\xc9\x85\xd1\x51\xfd\xcc\x40\xb0\xbc\x6d\x46\xbe\x44\x34\x3d\x4c\x13\x88\xd5\x3a\xc3\xac\x66\x70\xc7\x63\x85\xf8\xd5\x1e\x04\x92\xe9\x3a\x0e\xd0\x02\x23\x4e\x2c\xf0\x64\x73\xa0\xcd\xe6\x2c\x57\x01\x17\xf3\x06\x56\xfa\x27\x5a\xc8\x00\x39\x89\x1a\x0a\x3b\xa3\x61\xfc\x95\x38\xf8\x90\xbd\x9b\x42\x26\x71\x68\x39\x68\x80\xdd\x9e\xd4\xa0\x36\x20\x63\x3c\x97\xb6\xe4\x77\x7d\x10\x57\x56\x73\xb1\x21\xe7\x8b\x5b\x68\x9b\x11\x82\xa6\xf5\x03\x3c\xca\x0d\xab\xda\x96\xff\xbf\xb9\x01\x82\x46\x95\x4d\x14\x05\x74\xea\x90\xa2\x0f\x25\x9e\x01\xf5\xfb\x73\xd3\x11\x0d\xef\xb6\x01\x99\x01\x67\x05\x43\x9a\xa1\x9d\xca\xab\x91\x6e\x2d\xaa\x14\xff\x79\x03\x33\x7d\x89\x66\x25\xc1\xcf\xce\x85\x14\xe6\x9e\x95\x81\xfe\x2b\xe3\x85\xaa\x53\xe0\x9c\x2d\x7e\x4a\xbf\x29\x0b\xaf\x66\x7f\x84\xa4\xfc\xa2\x29\x40\xdb\x97\x2d\x52\xa0\xd6\x02\x10\x97\xd8\x41\xf7\x98\xe4\x6a\xd0\xec\x31\xaf\xcb\xea\x9e\xfa\x52\xaf\x6b\x5f\xc0\x0e\xd6\xde\xae\x96\x06\xfb\xe9\x1e\x94\x7b\xeb\x6e\x23\x09\xea\xf3\xc5\xc4\xa4\xb7\x3f\xf1\x8a\xe5\xa6\x3e\x7f\x01\x88\xa4\x1b\x95\xe3\x0d\x12\x4c\xec\x51\x04\xe6\xec\xd0\x42\x5f\x74\xef\xd3\x1e\x7e\x4c\xee\xaf\xa4\xbf\x51\x31\xdf\x8e\x7e\x7f\xf2\x13\x4d\x53\x48\x4a\xd9\x13\x4c\xd7\x24\xad\x7b\x3f\xcf\xd7\x68\xef\xf0\x5b\xec\x0b\x66\xfe\xb4\xc9\x8f\xb3\xca\xdd\x5d\x69\xa9\x18\x39\x38\x2d\x09\x6d\xb6\xe5\xc6\x37\x49\x6c\xf9\xa7\x93\x71\x62\xc1\xe3\xb9\x55\xb5\xa9\x7f\x4d\x29\x84\xb8\x61\x29\x91\xd2\x59\x70\xc9\x24\xde\xac\xb8\x21\x4c\xc4\x4d\xb5\x82\x85\x20\x95\xf6\x8e\xe2\x66\x3e\x27\xa8\x61\x28\x24\x6e\xc4\xb6\xb9\x25\xdc\x2e\x1d\x8d\x70\x76\x36\x7f\x64\xa9\x14\xa4\x0b\x41\xea\xe4\xcd\xe4\x31\xea\x20\x4e\xd0\x60\x4b\x47\xd8\xb0\x7f\x4a\x11\xe2\xe9\x05\x0c\x59\x30\x0d\x53\x08\x0a\x03\x60\xc7\x5b\xeb\x89\xe4\x59\x68\xa6\x67\x9e\x79\x2a\xee\x42\xc8\xf7\x58\xf7\xda\x9f\x01\xb6\x9b\xa3\xb2\xf3\xb4\xf9\x1a\x13\x33\x20\x6c\xd3\xfe\x75\xb5\x08\xc4\xa3\x71\x94\x8d\xab\xcf\x2c\x04\xde\x39\xc5\xca\x50\xbe\x50\x7b\x21\xee\xee\xb8\x55\xf9\xa0\x5b\x45\x8b\x37\xc1\xe8\xfe\xad\x10\x06\x26\xf4\xed\x9f\x97\x85\x17\x02\xd5\xfe\xa9\x47\xa5\x9e\x15\x41\x73\xff\xda\x29\x34\x1b\xb2\xd6\x72\x41\x69\x6b\x2b\x7b\xcc\x21\x7c\x5c\xda\x81\x57\xd4\xc3\x5d\xa2\x2c\x3d\x5d\x59\x77\xb6\x1f\x73\x09\x1e\x75\x76\x07\x58\xa9\x5b\xfd\xef\xcc\xd5\x05\x03\xc6\x71\xfa\xbe\x75\x8c\x9a\x45\x5e\x36\x4a\x7c\x02\xdf\x00\xc1\x2e\x7f\x1b\xcd\xa5\xfc\x8f\x9c\x1b\x94\x09\x84\x57\x77\x10\x5f\xd2\xe2\x55\x34\x14\x4e\x4a\x44\xf3\x2c\xaf\x9c\x3c\x91\x29\x99\xa7\x7d\xe5\xdd\x26\x54\x9f\xb4\x69\xd9\x0a\x4d\x50\x4b\xb1\x78\xcc\x90\x7d\xd3\xf7\x5e\x1e\x45\xc8\x1e\x32\xfb\xe4\xb7\xf5\x59\x66\x16\x67\x29\x7b\x17\xe8\x37\x5e\x86\xe2\xac\x4f\x64\x81\xfa\x73\x5e\x5d\x07\x7d\xd1\x23\xb4\xb1\xe6\x06\xcc\x50\x85\x31\x09\xe8\x4e\x62\x4b\xba\x73\xdc\x45\x7d\xe9\xb6\x58\x16\xe9\x7c\xe9\x8d\x6f\xc7\x82\x8a\x43\xd6\x60\xf7\x4b\x24\x4c\xb7\xb9\x44\xba\xf0\x83\xdb\x4c\x1b\xac\x29\x96\xba\xbb\x32\xca\x95\xea\xfd\xd6\xb6\x6a\x60\x74\x44\x5e\x00\x28\x78\x26\x69\x56\xed\x5b\xf6\xe5\x83\x1b\x1e\x08\x18\xfe\x5c\xf1\x85\xe1\x08\xe3\x50\x41\x0a\x55\x0b\x18\x38\x43\xce\x25\x1a\xbd\x4b\x55\xf6\xfa\xea\xea\x9f\xa5\x77\xb6\x94\x4f\xaf\x92\x5a\xf1\x65\x0b\x47\xab\x6e\x9c\x3f\xbc\x46\x94\xce\xd2\x17\xa2\xf4\x17\x16\x5b\xdd\x2f\xc5\xb6\xe8\x60\xd5\x8d\xac\xdd\x31\xac\x30\xc4\x40\x88\x5a\x1c\xd8\xb4\x2e\xcb\x89\x1e\xf2\x5a\x98\x06\xc5\x11\xa8\xc3\x26\xe7\x28\x74\x10\xef\x4e\xff\xd1\xdd\x0a\x08\xe3\x26\x63\x86\x2e\xdf\xe8\xc2\xd5\xe5\x04\x5a\x02\x09\x9d\x4f\xa5\x43\x66\x37\x42\xd5\x50\x0c\xe6\x8b\x80\xd3\x50\x70\x57\xc9\x58\x3a\x5d\x1e\xd9\x03\xf5\x62\x60\x49\x8a\xff\x71\x0e\x67\xac\xbe\x5f\xa8\x5f\x7a\xd0\xd8\xc4\xe4\xa9\x2f\xb7\xa1\x59\x62\xa1\x5d\xfa\xe3\xe6\x88\x9e\xa8\xb3\x59\x74\x17\x8e\x60\xf3\x12\x90\xca\x34\xd5\x82\xac\x4d\x26\x2c\xc5\xdd\xd0\x05\x75\x3f\xa9\x2f\x92\x29\x3a\xdf\xea\xec\xda\x0a\xe5\xaa\xa0\xfd\x76\xbb\xfc\x32\x63\x48\x13\x5e\x31\x4b\xcf\xc0\x6b\x50\xa7\x8a\x19\x60\x13\xb3\xb2\xee\xc0\x8a\xec\xaa\x4b\x0f\x21\x10\x7d\x5b\x45\xd8\x1d\x89\x18\x62\x5d\xce\xc8\x14\x71\xc8\x77\x1c\xfb\x27\x24\xac\x6c\xce\x04\x2b\x18\x07\xba\x15\xf7\x3d\x05\x2a\x31\x38\x97\x44\x28\x1e\x31\x3a\xfc\x17\xb0\x38\x4a\x14\x4b\xaa\x5e\x55\xc6\x4b\x9e\x60\xd1\xac\x8e\x00\x94\xf8\x22\x6a\x30\x80\x8e\xb7\x93\x58\xda\x14\xb9\x20\x8c\xde\xfc\x74\xeb\x39\x61\x21\x9f\x72\x1a\x57\xb8\xa0\x8a\x34\xc5\x6b\x0e\xc5\x6e\xd5\x6a\x39\x9f\x23\x30\x52\xb7\xba\xcb\x7c\x55\x6d\x2f\x5d\x8d\xb8\x42\xb3\x97\x33\xf4\x6b\xb8\x0a\x58\x1b\xe7\x3c\xcf\xb1\x13\x63\xbd\x0e\x7f\x86\x1f\xa7\xc5\xf9\x5c\xd7\x54\xbf\xa3\x49\xd5\x5f\x6d\x4e\xc1\xd9\x41\x4d\x0d\x5a\xc0\x33\xe3\xea\x92\x66\x93\xcf\xa6\x7c\xe9\x98\xd1\x24\xab\xd8\x38\xa2\x4b\x9b\xfe\x05\x3c\x73\x7c\x3e\xc3\xfd\x0d\xac\xd2\x70\xe8\x9a\xc2\x6c\xba\x0d\x1f\xba\xe1\xbc\xb8\xd1\x0f\x32\x9c\x22\xc7\x57\xe9\x31\x1d\xa7\x72\xb0\x77\xbd\x2c\xbe\x58\x38\x75\x32\xaa\xf5\x38\x6c\x91\xc1\xc8\x7d\xc5\xac\xc4\xe9\xb9\x4e\xfd\x81\x23\x7e\xf9\x70\xd9\x1b\x95\x2a\x4a\x2e\x40\x5d\xac\x9a\x75\x38\x7c\x3d\x1c\xcc\xe6\x55\xe6\xee\x3a\x0c\xec\x46\xfe\xe5\x24\x46\x5a\x56\x46\xac\x4e\x6b\x73\xe2\xb1\x7a\x93\xb9\x2a\x91\x10\x95\xeb\x5d\x67\xb1\xc6\xce\xd2\x8f\x45\x2b\xc4\x51\x89\xd6\x5c\x47\xba\x40\xb9\x2b\xd9\xa2\xf5\x2d\x34\x46\x8b\x19\x76\x13\x98\x45\x6c\xd0\xbf\x2d\xdd\x98\xcd\x2f\xf6\x1c\x16\xf9\x3e\xc9\x65\x4b\x5a\x1b\x18\xfc\x0f\x4a\xd6\x91\x48\x8c\x0f\x70\x05\x07\xfa\x9d\xff\xe4\x27\x76\x4f\x9b\x6d\x5a\xf1\xe2\x2e\xe2\x30\x89\xd3\xec\x25\x28\x86\xd4\x46\x10\x4b\xe0\x45\xa8\x28\x76\x2e\x81\x45\x85\xce\x98\x33\x3c\x6c\x80\x3e\x7c\xd6\x6e\x01\x5f\xb0\x9b\xdb\x7d\xba\xec\xb7\x24\x74\x04\xf0\x4b\xeb\xf1\xe8\x6f\x12\x2a\x63\xbd\xe0\x99\x96\x24\x93\x0c\xee\xf6\x67\x8e\xec\x64\x43\x25\xa0\xc6\x89\x5e\x0c\xb4\x95\xba\x4e\xe3\x8e\x05\xfa\x5b\x6c\x72\x59\x09\x4d\xcd\x14\x53\x38\x8b\x42\x94\x8c\x5f\x47\x80\x08\x95\xc9\x46\x09\x92\xd9\x73\x16\xa6\x8b\xcd\x94\x54\xed\x6a\x46\x96\x2b\x1b\x96\x41\x61\xab\x59\x9b\x02\x7e\xf2\x5e\x6f\x46\x00\xc3\xf5\xa2\x08\xcf\x70\x55\x1b\x77\x79\x2b\x59\x23\x7b\x4a\xd2\xdc\x20\xbb\x26\x9e\x64\x00\x3b\x2f\x6e\xf5\x20\x7a\xcd\xe5\x7d\xf2\x45\xf1\x77\x11\x64\xe9\x5d\x34\x39\x56\xbf\x9a\x3c\xbe\x0a\x45\xc7\x32\x2b\xdf\x36\xf5\xdd\xb1\x7a\xc3\x57\x3b\x41\x47\x53\x88\x9d\x7d\xf8\x55\xc0\x23\x04\x92\x17\x71\xdc\x0e\xb8\x73\xff\x88\x0a\x5d\x2e\x8a\xa9\x4d\xf6\xd3\xfe\x25\x21\x40\xe7\xc2\x49\x5b\x93\x5e\x3b\xbd\xe6\x34\xd5\x6a\x52\x57\x6f\x16\xac\x78\xb9\xe9\x8c\x1a\xb0\xed\xb4\x30\x7d\x5e\x48\x8c\x89\xb3\x7c\x6b\xe5\x27\x01\x19\xe5\x63\x6f\x12\x93\x2a\x1d\xc7\xc9\x99\xb8\x8b\xa0\xf1\x28\x4b\xdd\xc4\xbc\x4f\xee\x31\xe3\x70\x46\x88\x9c\x09\x1f\x95\x02\x36\x09\x93\x34\x74\xb2\x3e\xb9\x86\x9c\x5b\xb5\x1d\xdb\x84\xb2\x4f\x5b\x7f\x4d\x81\x24\x9d\xc3\x5b\x03\x7a\x6f\xae\x20\xf0\x20\x70\xf4\xf0\x49\x08\x21\xa4\x58\x9b\x76\xdd\x08\xfa\x39\xf1\xdc\x56\x24\x6b\x39\x60\xa2\xc7\x38\xc0\x55\x4a\x22\xa5\x68\x93\x5e\xec\xab\xc5\xc8\xcb\x6f\x42\x78\x9a\x4f\xb9\x57\xa4\xc9\x52\x48\x2c\x1a\x7a\x0c\x2c\x51\xc0\x0d\xb5\x90\x93\x26\x5a\x4b\x58\x1e\xb0\x89\xf0\x9e\x46\x25\x2f\xa9\xaf\xf2\x61\x2e\xe9\x4b\xd5\x27\x9e\x5b\x57\x89\x24\x76\x69\xed\xa3\xda\xb8\x06\xf0\x46\x0b\xf0\x6c\xf7\xe5\x11\x80\x2e\xa2\xda\xba\x61\x6f\x5c\x6c\x39\x74\x78\xa8\x1f\xac\x09\x8d\xb9\xc6\xd5\xfc\x05\x2d\x6d\x42\x8e\xd4\xa2\x8e\x7b\x9e\x7e\x69\xf8\x8f\x5e\x6a\x3e\xbb\xfd\x2b\x13\xf5\x79\xfe\x0a\x1f\xc8\xbf\xcc\x2e\xfe\x2f\x39\x5e\xcd\xe5\xcd\xd2\x4c\x41\x33\x4e\x79\x3f\x5b\x8b\x84\x04\x06\x69\xc4\x2e\x96\x12\x5b\x9b\x16\x53\x64\x6c\x84\xc2\x86\x23\x28\xed\xa9\x2d\x9f\x7d\xd4\xee\xc5\xa6\x8b\x5a\x36\x45\xe3\x29\x4d\x32\x9b\x88\x8b\x92\x89\x30\x55\xdc\x28\x94\xcc\x04\xd2\x1c\x67\xee\xb1\x07\x36\x50\x9c\x9d\x90\x99\xe8\x5a\xfc\x69\xb3\xfe\x59\xa7\x94\x47\xdd\xda\xd0\xc3\x62\x6a\xfa\xc9\x4d\xad\x09\x7f\x12\x45\x66\xc1\x0c\x64\x5f\x07\x67\xb5\xc2\x2d\x1f\xbe\xf7\x78\x04\x91\xd6\x86\x95\x7c\x25\xba\x7c\xbd\x51\x81\x9b\x35\xc5\x7d\x43\x14\x8b\x40\xc7\xbc\x75\x49\x58\x4b\xa1\x03\xde\x58\x8d\xf0\x2d\x7d\xd7\x1c\x3d\xe9\x3d\x19\x54\xae\xcb\xf9\x8e\xdd\x36\xe8\x5f\xed\x48\x84\xe5\x34\x06\x7e\xb2\xaa\x9a\xbe\x13\x52\x56\x9c\xa3\x8d\xda\xd6\x2d\x2a\xe9\x3f\x57\xb9\x18\x9d\x37\x29\xeb\x5c\x31\x8b\x55\xbc\xfc\x8d\x02\x51\xa6\x3d\x00\x8e\x30\x95\xaa\x10\xca\x73\x3c\x2a\xd6\x71\x3f\x4e\x1f\x22\x60\xce\xe6\x2b\xd5\x81\xae\xd4\x9d\x9c\x34\x99\x3b\x17\x20\x98\x43\xdd\x8c\x0f\x2b\x0d\xda\x18\xe5\x79\x9d\xca\x75\x83\x2d\x6d\xae\x9c\xaa\xc0\xd9\x19\x1a\x68\xd6\x21\xf8\x45\x3f\x17\xcc\x3e\xab\x9c\x19\x66\x74\xae\xac\x18\x7f\xe2\xbd\x47\xef\x6c\x32\x0a\xd9\x0b\xaf\xc6\x92\xbc\xce\xc9\x4f\x6f\x30\x81\x96\x93\xf4\x71\x01\x1e\x94\x65\xc6\x16\xc0\xc6\x73\x5b\xe0\xdc\xca\x50\xd8\x9e\x43\xda\xde\x87\x5d\xe1\x4e\xc5\x66\x8a\xa3\x31\x8a\x95\x33\xe0\x32\x57\xa9\x9f\xe1\xda\x1c\x04\x03\x6e\x79\x7d\x51\x76\xcd\x01\x98\x0c\x72\x1f\x32\x69\xe9\xa8\x1d\xda\xe1\x4e\xa2\x93\x5e\x41\x9c\x43\x8a\x2d\xad\xf9\x4f\x80\x4d\x9b\xe4\x28\xea\x32\xb5\x31\x50\x51\x70\x80\xa0\x74\x21\x0e\x00\xc5\x37\x97\x98\x38\xbc\x0c\xe9\x81\x34\xc1\x9c\x1f\xe3\x48\xd3\xe0\x45\xbb\x36\x99\x5b\xe1\x78\x63\x2b\x29\xf0\x93\x5a\x55\x3f\x28\xf5\xaa\x38\x2d\x91\xbc\xc1\xfa\x8d\x95\x02\xea\x94\x2d\xa1\xbe\x84\xb2\x37\xbd\xc7\x1d\x3d\x29\x13\x4b\x17\x21\x4b\xdb\x2b\x32\x09\xb4\x5f\xa8\x0b\x13\xbb\x88\x7a\x3e\xd1\x93\x59\x45\x81\xa3\xe5\xa7\x05\x12\x0f\xae\x70\xc2\x95\x26\x5f\xa4\x62\xd7\x7d\xa4\x00\xaa\x78\x83\x72\x68\xe9\x65\xbd\xed\x03\x08\xc7\x22\x0b\x10\x6a\x9a\x59\xca\x5a\xd8\xd8\x6e\xcf\xb7\x58\x63\xb6\xc1\x69\x8d\x42\x05\x94\xca\x42\x0c\xc2\x01\x90\x3c\x75\x36\x91\x2a\x12\xec\x8a\x07\x44\x28\x28\xcc\xbd\xf6\x80\x31\x9e\xee\x96\xee\x77\x12\x44\xcc\xbe\x90\x95\x8d\x04\x4b\x2c\x82\x93\xe3\xd9\x0b\x0f\xe9\x3a\x51\x92\xee\x3a\x9b\xb9\xea\xb6\xec\x00\xfb\xd6\x86\xf5\xb7\x4b\x45\x85\x41\x55\x4a\xf7\x0e\x15\xcf\x58\x46\xc7\x4f\xb2\x1e\x82\x54\x20\xa9\xb5\xfe\x03\x2c\x25\x22\xae\x70\xe3\x04\x7e\x0f\x56\x71\x23\x8f\x92\xb7\xb6\xa3\x26\x19\x8c\x36\xea\x8d\x8c\xfa\x39\x79\xb2\x49\x4b\x39\x0f\x30\xaa\xca\x31\x0e\x4a\x6a\x94\xb7\xb2\x9f\x4a\xbe\x66\xc0\xb9\xd2\x18\x0f\x3f\xcf\x1c\x9a\x6b\xa9\xce\x4f\xf7\xff\x9d\xd9\xee\xe9\x9d\x61\x55\x47\x83\x2e\x90\x90\x6a\xab\x0b\x48\x09\x45\xe9\xcb\x56\x04\x4d\xf2\x0b\xd6\x71\x09\x6d\xb5\x7c\x97\x69\xad\x21\x0f\x46\x07\x52\x3e\xaf\x88\xb0\x98\xab\x3f\x1a\xfc\x3f\x62\x20\xa9\x69\xbc\x63\x74\x9c\xb4\x67\x9a\x55\x02\x00\x33\x1b\x13\xde\x9f\x68\x80\x30\xc5\xb5\x6c\xc1\x29\x03\x32\x69\x77\x1f\xad\x20\xf7\xd2\x54\x84\xa2\xf5\x40\xcf\x49\xa2\xb2\x21\xaf\xa6\x58\x8a\x6d\x22\xcd\xed\xd3\x18\x7c\xbf\xdd\x4d\x7e\xfd\x32\xbc\x2e\x57\xf8\x0a\x61\xb4\xd3\x33\x85\xfd\x35\xbe\xdf\x82\x2c\xbe\x6c\xb0\x94\xa7\x9e\x65\x12\x71\x4d\x3d\xf4\x20\xb4\x99\xf7\xfa\x67\x23\x45\x0b\x2b\xba\xdc\x04\x9e\x91\x5d\xd3\x3e\x9c\x55\x89\x6e\xe0\xa8\xb7\x87\xb7\xf4\x1e\x96\x70\x96\xcd\xff\xbc\xb9\x67\x7c\x73\x23\xd2\xbb\x43\x0d\x03\x5e\xe8\xa0\xc3\x4e\x71\x86\x06\xdf\x0d\x41\xfc\xcf\x5b\xba\x0a\x62\x45\xec\x5f\x81\x47\x40\x08\xcb\x3c\x91\x20\xbe\x7b\xc5\x8f\x69\xa4\x0d\x6d\xf2\x1a\xcc\xd2\xf8\x85\x4a\x1a\xa1\x93\xe0\x5a\x06\xb8\xc8\xa5\x0e\x22\x5f\xda\x41\xee\x11\x60\xb2\xbd\x5d\x12\x5b\x68\xfc\x4c\x53\x4c\xed\xf7\x76\x98\xab\x41\x33\x20\x87\x0b\xaa\xaf\x32\xcb\xcb\xa1\x5e\xd8\x7e\x71\xb4\x76\x50\xf2\x46\x06\xc0\xdf\xfb\x01\xa1\xe3\x66\xd3\xce\x41\xb9\xc7\x4e\xa8\xc4\x8e\xd5\xc8\x54\xc1\x87\xa1\x3b\x97\x96\x20\x00\x03\x46\xc8\x03\x5b\xe7\xde\xb3\x57\xdd\xec\x11\x0f\x9b\x3c\x09\xa2\xed\xb9\xa1\x25\xbc\x85\x09\x17\x99\xa0\xc1\xa5\xe3\xb4\x0a\x72\x68\x51\xb4\x76\xf3\x3c\xe2\xde\x46\x39\xe7\xb5\x05\xfa\xde\x0f\x82\x06\x21\x12\xe0\x37\xc0\x96\x3f\x5b\x11\x50\xef\x4d\x78\x19\xe3\xe4\xab\x4f\x98\xa8\x81\xb6\x14\xa6\x77\xbb\x6c\x9a\xab\xca\x19\x63\x87\x17\x0a\x2d\x48\x67\x16\x59\xc1\x65\xaf\x43\x6a\xc4\x87\x64\x3c\xff\xb6\x5c\xaa\xa3\x21\x74\xd1\x3e\xd4\x56\xd3\xf2\xbf\x5c\xf2\xf8\xd1\x73\x83\x19\x13\x35\xca\xec\x77\x05\x1c\x6e\xb8\x11\x8b\x37\xc2\xf1\xb7\x8c\x15\x4c\x08\xef\x98\x6d\xa9\xd8\xc1\x71\x6e\xce\x6d\xaf\x00\xc4\x8a\xf5\x77\x3c\xdd\xa6\x13\x92\xf3\x03\x4f\x53\xba\x6a\xfc\x7b\x28\xd4\x64\x5b\x7f\xef\xa3\x22\x13\xe3\xdb\xe0\x4c\x1f\x13\x0d\x80\xf2\xf6\x6a\x47\xd8\x7c\xa1\x2b\x29\xb7\xc6\x30\xdc\xc8\xa0\xb5\xb6\xc6\x0d\x8d\x34\x90\x25\xe0\x4d\xd2\xdb\xd6\x7d\x07\x5a\x14\xd4\x51\xee\x3a\x76\xc7\x7d\xda\x30\x48\x84\x08\xf1\xa0\xa8\x5a\xe9\x9b\x52\xf7\x95\xbf\x80\xcd\xe2\x4f\x9d\x05\xac\xd9\xa2\xf3\x7d\x3e\x28\xc3\xf6\xe2\x96\x72\x56\xeb\x85\xfb\xc0\x39\x0e\xd1\x7c\xb8\x08\xdb\xc8\x0c\x60\x95\xfb\x2e\xb0\x91\xf1\xc7\x57\x15\x36\x62\xf0\x5c\x77\x6d\xaf\xe6\xab\x15\x3c\x99\x04\x77\xb4\x53\x69\x18\x1d\x61\x3b\x0e\x1c\xe5\x36\xe6\xc0\xd3\x41\x6e\x63\x0f\xcd\x13\x20\x27\x43\xd5\x7e\x75\xb1\xa4\xf8\x22\x9a\x12\x31\xf9\x85\x7f\x9f\xce\x26\x54\xd6\x6e\xbb\x3f\xfb\x71\x42\x8c\x7b\x75\x10\x69\xf7\x05\x9e\x8c\x77\x81\xa1\x51\xe3\xe5\x55\x0d\xfd\xe4\x66\xb6\x56\x46\x33\x50\x95\x70\xad\xf3\x27\x59\x86\x7f\x8a\xe2\xcc\x5c\x7d\x96\x1e\x7c\x2e\xe6\x16\x6f\x74\xe4\x81\x4d\x7e\x3f\x81\x7c\xac\x97\x10\x9a\xea\x5c\xa8\x02\x7f\x05\x98\x9d\xc2\x78\x97\x11\xe0\x82\x3b\xcc\x3f\x71\x04\x97\xb9\x65\x8d\x86\x0c\xcc\x50\xb8\xf3\x86\xe0\xa3\xae\x1a\xab\xba\x26\xa4\xd6\x76\x81\x1e\xc3\xdc\x8b\x40\xe6\x58\x48\x91\x73\x92\x10\x4f\x98\xed\xdc\xba\x71\x74\x03\xd4\x51\xa3\xd2\x04\x84\x01\x27\x95\xde\x4b\x1c\xdd\x3a\x7e\x52\x2f\xad\x97\xb6\x6b\x76\xe2\x8b\x37\xec\x69\xf6\x2b\x96\x6f\x60\xa6\x1c\xfe\x6c\xf0\xb6\x8e\x93\x36\x83\xb0\x9d\x15\x38\xf2\x18\x0a\xb5\xa1\x31\x3d\xa3\xf3\xc4\x0a\x41\x2f\x61\x33\x37\xa0\x33\x8b\x46\xcb\xaf\xa6\x5a\xe0\x91\xe3\x6f\x4b\x8f\x0a\xd0\xd4\xd7\x06\x29\x6c\x4d\x37\xdb\x68\x4d\x22\xb0\x58\x10\x72\x00\x58\x70\xc4\x88\xab\x8f\xec\xe3\x9b\xda\xe1\xb2\xf7\xcd\x8c\x2b\xe2\x12\xb9\x69\xc0\x32\xed\xb7\xbc\x14\xe2\x17\x99\xd5\x1d\xf9\x43\x40\xb8\x9a\x4d\x72\xb8\x7c\x82\x84\xe2\x25\x45\x5d\x8b\x7b\xce\xe0\x2d\x6a\xa8\x11\xb7\x8d\x72\xfb\xb3\x9c\x7a\x51\x5d\xb8\xd0\x96\xa5\x14\x8e\x3f\xa5\x50\xa3\xed\x96\x79\xf3\xa9\xbe\x2b\xc1\xfa\xd6\xcb\x35\xa5\xeb\x54\xbc\x2c\x0d\x2d\x2e\xf7\x47\x00\x2c\xf9\x9c\x95\x66\xba\x7d\x49\xd1\xa0\xb8\x96\x4c\x0a\x4b\x94\x10\xd8\xe6\x1f\xc4\xf5\xcf\xf2\xf3\xb2\x8e\xc5\x31\x91\x57\xef\x05\x81\xe5\x22\x30\x12\xb9\x2c\xa1\x8e\xf5\x41\xb2\x28\x26\x58\x4b\x51\x1d\x6d\x59\x68\x08\x68\xb7\x9f\x03\x24\x6f\xec\x97\x85\xbd\x9e\xaf\x45\x85\xd8\x25\x7f\x76\x27\x86\xee\x6c\x3c\x7d\x26\xc7\xe1\xc8\x9a\x4d\x61\xc6\x4f\x50\x26\xe6\xf3\xfe\xbb\x2e\xd9\x7d\x93\x74\x0e\xe8\x9c\x85\xe8\x24\xa6\x4c\xb8\xcd\x2d\x5e\xef\x27\xbb\xee\x52\xb0\x73\x47\x43\x04\x28\x8e\xeb\xa7\x84\x5f\x84\x4c\xef\x16\x28\x57\x14\xc1\x3c\xa1\x37\xa7\xcf\x00\x02\x83\x8b\xa8\xae\x3d\x67\x29\xd1\x4e\xf4\xe7\x4a\x0f\xc4\x89\x9b\x11\xbe\xa4\x0f\xa0\x33\xc9\xb1\xf4\x70\x6f\x27\xa1\x3b\x37\x2e\x55\xdb\xac\xdc\x67\x61\x0e\xf7\x94\xae\xac\x70\xa0\xae\x00\x3f\x9f\x31\xdc\xa0\x6a\xb5\xfb\xf6\x45\xb7\xc5\xb9\xea\xb4\xce\xa7\x27\x9a\x42\x7c\x22\x71\x14\x5c\xb2\xd8\x0c\xff\x41\xa5\xba\x5e\x0f\x31\x3c\x94\x4f\x45\x05\x72\xe2\xf4\x61\xa4\x85\xe2\xc5\xb0\x63\x4e\x33\xa3\xb7\xdb\x5f\x8f\x50\xa4\x13\x2e\x14\x61\xd5\x8d\x2b\x61\x48\x58\x3a\x4c\x73\x6e\x64\x22\x68\x08\x3a\x09\xa1\x4b\xc7\x9d\xdd\x96\x05\x91\x9f\xce\x58\xb9\x0c\x23\xbf\x19\xc4\x7d\xdf\xda\x5d\xf3\x61\xd0\x71\x56\x3c\x8f\x73\x21\xff\x15\x88\xd8\x00\x8f\xcc\xe3\x6b\xa0\x22\xcd\x9c\x31\x6d\x1b\xe5\xbb\xb8\xb9\x63\xd4\x83\x28\x51\xcb\xe8\x13\x0f\xd1\x9a\xed\xec\xe0\x00\x5d\x53\xf7\x66\x1c\x3b\x85\xfa\x59\x2d\xd4\x0a\xbb\x45\xc0\xd6\x13\xfa\x31\xb7\x7d\x0a\x05\x67\x3c\xf9\xc2\x8c\x56\x7d\x0d\xc4\xa9\xe5\x34\x71\x16\x95\xca\x66\x53\xf1\x06\xfa\x32\x7e\xc7\x82\x99\xda\xfd\x7f\xc6\xbb\x26\x68\xf6\x48\x37\xcf\x6e\xa4\x6b\x0f\x51\xbc\xdf\xb8\x85\xe7\xd4\x71\x8e\x0c\x28\x28\xa1\xa7\x2d\xbd\xa8\xf5\x3d\x30\x5c\x11\xc2\xf1\xd2\x24\x72\xba\x5a\x87\x92\x4e\xa7\x33\x09\x3d\x93\xf3\x58\x58\x4e\xeb\x76\xc4\xb2\x98\x2c\x9f\x59\xfe\x84\xd8\x8c\x5c\x1b\x3a\x76\xda\xc3\x0a\xfd\x24\xde\x19\xf1\xa0\x74\x88\xb4\xf1\xb9\x0f\x30\x24\x0a\x33\xc2\xd1\x4d\x50\x45\xe1\x8a\x16\xee\x63\x02\x46\x01\x1e\x5f\x02\x15\x5a\x45\xa0\x3c\x8e\x1b\x6b\x1a\x25\x81\xed\xfa\x78\x7c\xb9\x4f\xd2\x4b\x9e\x2c\xef\x60\xb1\x04\x90\xed\xa6\xa4\xb0\x8e\x9c\xc7\x8f\x10\x59\x53\xee\x7d\x78\x76\x44\x27\xd1\xa5\x28\xa2\xf3\xed\xe9\x73\x85\xfa\x1e\x70\x49\x49\xc0\xd8\xa4\x39\x24\xd4\x4c\x5a\x1a\xc7\x43\xe3\x8a\xfc\xe6\x17\xae\x3f\x65\xee\x96\xa2\x25\x86\xd7\xc2\x04\xc6\xfa\xf9\x57\x05\x56\xfa\x26\x23\xa5\xb2\xa1\x4d\x2e\x2c\x36\x82\x5c\xaa\x3e\x58\x58\x76\x5f\xab\x79\x65\xdc\xd4\x8e\x57\x78\x25\xf7\xd1\x75\x0d\x3c\xdf\x52\x72\x8f\x45\xd8\x5f\x15\x61\x7f\x35\x59\xfb\x2d\x57\xa0\x1f\x69\x39\xa9\x98\xee\x2d\xbf\xfc\xf1\x7e\xc7\x70\x2b\xfa\x0d\x7b\x42\x60\x2d\x5d\x8b\xf3\x37\x2b\xbf\xfe\x4d\x4b\xe0\x30\x21\xc7\x98\xf6\xe4\x7d\xd2\x6f\xa8\xdf\xfe\x12\x6e\x00\xb5\x45\xff\x09\x6c\xba\x76\x94\x55\x02\x79\x00\xa6\x3a\x56\x11\x65\xa3\x15\x3e\x4c\xb7\x5b\xe5\xa6\x42\xf8\xb8\x78\xe5\x6b\x09\xbd\x3b\x61\x2a\xa5\xa7\xc7\xe5\x66\x37\x5a\x79\x50\x45\xef\x79\xfb\x65\x05\x38\x52\x9f\xe3\x86\xcf\x01\x9b\x1c\x62\x39\xdf\xc3\x50\x85\x02\x98\x88\xa8\x46\x0b\x96\x93\x50\xc8\x50\x5c\x96\xb0\xde\x5f\x88\xec\xe5\x7f\xd5\xc8\xd2\x7d\x32\x3d\x02\xca\x38\x7d\x79\x6e\xe6\xff\x9c\x8f\x75\x10\xf1\x16\xe0\x01\xe6\xd8\xdf\x55\x9e\x8c\x3b\x66\x1e\xb7\xd4\x1f\xea\x39\x9a\x6c\xa1\xf1\x04\xb9\xe8\x96\xec\x73\x6b\x84\x2a\xa0\x24\x7f\xcd\xed\x4b\xd3\xcf\x8d\xa8\xb4\x38\xa3\x91\xd5\x75\xfb\xde\x51\xfc\xb5\x16\xc6\x48\x7e\xc1\x74\x63\xd7\xec\x84\xca\x26\xbb\x4a\x10\xe8\x75\xf9\xb1\x9c\x39\xba\x88\x3e\x80\x97\x85\xea\x3b\xad\x9a\x72\x8c\xb4\xac\x5b\x0a\xa5\x03\xbc\x23\xf4\x68\x02\x74\x39\x8d\xb5\x73\xb7\x66\x52\x95\x64\xa0\x71\xc6\x70\xea\xda\x0f\x67\x17\xeb\x18\x83\x13\x59\x36\x59\x5a\x96\xb7\x70\x98\x8b\x1c\x29\x86\x92\x7a\x2a\xe7\xcd\x9c\xe6\x0c\x63\x70\x84\x06\x54\xb7\x06\x2e\x30\x0f\x2a\x11\x0d\x7e\x39\x85\xd4\x1b\x60\xa3\x5e\x4d\xc4\xa0\xcb\x36\xea\x6b\x32\x6b\xf6\x83\xbb\x31\xca\x82\xd3\xae\x8c\x80\x9e\x79\xdc\x26\x19\x32\xc1\xd5\x43\x03\xbd\xaf\x77\xee\x64\x7c\xba\x0d\x27\xab\xec\x08\x3b\xd9\xbd\x0d\xd5\x12\x01\x4e\x9c\xa3\x44\xfe\xec\x5d\x6e\x7a\x26\x90\xe4\x97\xf8\x2b\x1b\x71\xb6\x96\x0d\x36\xb5\x0c\x0f\x45\x9b\x58\xdc\x1e\xd2\x1a\x57\xd8\x1e\x45\xca\x0c\x52\x46\x90\x7a\x81\x18\xa9\xf8\xaa\xdd\xfd\x1c\xc7\x06\xb0\x33\xb1\xaf\x3f\x0f\x61\x31\x59\xd7\xed\xcf\xd9\xc7\x6f\xf8\x73\x8a\x42\x63\x61\x97\xb1\x2a\x8b\xd8\x06\x6d\x5f\xae\xe8\x37\x69\x88\xf5\x5b\xa0\x38\xc7\x9d\xed\xb0\x88\x87\x17\x27\xf7\xa8\x73\x71\x91\xc1\xf7\x28\x57\xcc\x8d\xe5\xc3\x7e\xab\x34\x07\x70\x48\xec\x21\xf0\xef\x29\x42\xe7\x58\xf0\x54\xa0\x3a\x4a\xbf\x88\x10\xda\x87\x52\xa3\xcd\xc6\x1c\x05\x65\xdb\x79\x0d\xc8\x66\x0a\x50\x45\x3f\x62\xb9\x93\x7b\xb1\xe4\x1b\x54\x8f\xde\x06\x45\x4c\xf4\x10\x66\x4a\x6d\xf9\xee\xec\x12\xca\x81\xd7\xf4\xc5\x1c\x3f\xcf\x74\x75\x5e\x20\xeb\xb9\xac\xdd\xfb\x64\x2e\x11\xba\x09\x90\xa6\x8e\x5a\xfa\xa6\x5a\x79\xf4\x90\x40\xff\x0e\x85\xd9\x94\x7f\xe6\x49\xe8\xd3\x13\x22\x62\x0c\xca\xa7\x82\x31\x0b\x0b\xe4\x16\xfc\x1c\x7e\x49\x9e\x5b\xe0\x09\x37\xfa\xd4\x3d\xc1\x20\xbe\x84\xee\x04\x39\x4d\x30\x1a\x9f\xf1\xf2\x5b\xad\xdf\xea\xb3\x4c\x0c\x80\xbf\xdc\x69\xfd\x09\x50\x88\x12\xb5\x67\x11\xfc\x12\xa8\x9e\x8b\xd1\x5a\xf1\x8d\xd0\x3a\xfc\xd1\xeb\xb2\x6c\x79\x4e\x4a\x47\x9e\xd5\xef\xd3\xe6\xa5\x4f\xfa\x4f\x54\x58\xa6\x50\x97\x0e\xcc\xdc\xc6\x90\x64\xfb\xda\x21\x8c\x26\xda\xf5\xb5\x34\x09\xb8\x49\x79\x22\x30\x1b\x32\x3f\x55\x57\xce\x61\x9c\x0f\x47\x68\x6a\x01\xfb\xe9\x75\x14\x47\x60\x52\x1e\x91\x9f\xeb\xb1\xfe\xc0\xf4\x47\xa8\xe6\x47\x5e\xab\xe9\xa8\x18\xcd\x3d\xb2\x64\x0f\x43\x1b\xfb\x99\xb6\x80\x6b\x12\xc4\x29\xd5\x49\xfc\x82\x0c\xa6\x3d\x72\xe4\x67\xf2\x34\x2d\xa4\x15\xf5\xa7\xc4\xa2\x17\x80\x9d\x59\x2b\xbe\x37\xae\x53\xd0\x48\xfa\x93\x6a\xd6\x17\xc8\x4e\x1e\xd9\xda\xe5\xff\x63\xb4\xc5\x08\x01\x7b\x52\x08\x53\xeb\x83\xdb\xf4\xbe\x11\xa2\x69\xfe\x62\xf3\x64\x09\xa8\xdb\xe4\xd3\xf1\x8f\xc2\x76\xf9\x02\xb6\x1c\x4d\x77\xa6\x6e\xd2\x70\x64\xe1\xcd\xe5\x4d\xb0\x6a\x42\x70\xe9\xff\x54\xf1\x42\x30\xc9\x5b\x0a\x0f\xaf\x75\xc4\x5f\x9e\xe9\xae\xf1\xc8\x33\x42\xfd\x4d\x7a\x8f\x70\x73\xe0\x17\xa1\xf8\x9a\x15\x02\x5a\x4d\x80\x26\x00\xa2\x5b\x75\x30\x0d\xf3\x95\x0c\x21\xb9\x41\x4b\xf1\x20\x1c\x89\x7f\xe0\x30\x50\xbe\xb1\xef\x65\xc1\x92\x3b\x69\xbb\xa1\x00\x52\xe2\x0a\x57\x32\x61\xd8\x83\xe2\x6b\xe4\xfe\x12\xb5\xb4\xee\xda\x31\x6a\x13\xaa\x29\x8f\xcc\x93\x6e\xa6\xd0\x65\x16\x54\x0f\x66\x49\x42\x1b\x5a\x96\x03\x1e\xdc\xc3\xf1\x9c\x0b\x67\x6c\x44\x76\x0a\xcf\x09\xce\x2a\x51\x9c\xd6\xd3\x07\xfa\xa4\x83\x7c\x95\x0c\xc4\x2c\xee\x29\x4d\x80\x28\xa0\x7d\xe5\xe3\x8f\x1b\x64\xce\xa0\xdc\xaf\x50\x1f\x8a\x1a\x56\x61\xe4\x3f\xed\x36\xfe\xf3\x46\xbc\xac\x4a\x8f\x05\xac\x8b\x38\x24\x54\x32\x7a\x10\x96\x04\x9c\x78\x7f\xdc\xa2\x91\xa1\x04\x89\x25\xd6\xe9\xd0\x51\x3e\x73\x8a\x04\x02\x90\x4e\x64\x8b\x0a\x87\xb4\xdd\x82\x92\x09\x1d\x77\x80\x0f\xc7\x4d\xc3\x42\x67\x87\xa3\xa0\xc7\xef\x59\xfa\x81\x1f\x1b\x95\x38\x22\x40\xe3\xc3\xc1\x29\x5c\x3c\xc9\xb2\x78\xe6\x91\x51\xda\xeb\x46\x27\x83\x22\x71\xda\x46\x02\x6f\x69\xa7\xca\x8f\x71\xda\x6d\x84\x36\xeb\x8d\xab\x6a\x39\x0e\xe0\x5d\x1b\x21\x95\x75\x36\x43\xc5\x4a\xee\x84\x11\x55\xf8\xa6\xee\x79\xbb\x41\xa0\x2b\x37\x04\x70\xae\x3c\xe0\xdb\xc0\x0e\xad\xf4\x0e\xc5\x4a\xe1\x43\xf9\x99\x76\x77\x2f\x81\x4f\xb5\x3e\xbf\xce\xf3\xc2\xca\x55\x2f\x79\x41\x3a\xc7\xa7\x3d\x75\x07\xfb\xec\xad\xd3\x3e\xbf\x43\x74\x46\x79\x6d\x8b\xfe\x12\x87\x8c\x4a\x1c\x13\xd5\xe4\x5d\x21\x14\xb8\xf7\x1a\xa6\x1b\x56\xda\xd6\x0a\x52\xc4\x1e\x36\x02\xb1\xa6\xae\x29\x13\x21\xa4\x76\x1f\x9f\x1a\x7e\xbb\xe4\xcb\x71\xbc\x59\x6d\x40\x15\xfa\x98\x49\x76\xa0\x4a\x19\xf7\x2a\x6d\x75\x96\x2a\x7a\xb5\xa6\xfb\xf1\x9e\xa5\xbd\x00\xd6\xc6\x2b\xd6\x5f\x41\x06\x9f\xe5\x02\x4b\x43\xdf\x11\x20\x95\x23\x72\xd1\x95\x9b\xb9\x77\x1d\x25\x08\x79\x92\x47\xdd\x9e\x30\x5f\x58\x7c\x61\xa9\xdd\x4e\x55\xd4\x2c\x12\x2a\x74\x13\x08\xbf\x85\xa2\x71\x00\xe4\x10\x6b\x8a\xd9\x6e\xe1\x9d\x89\xb4\x19\xde\x60\x94\xd4\x8c\x15\x46\x88\x4d\x54\x20\xeb\xd3\x95\x57\x10\x17\xfa\x3a\x0b\xb0\x9a\xda\x43\x8d\x3d\x07\x6a\xf4\xac\x80\xca\x4d\xb2\x7c\x00\xb0\xe6\xa4\xa0\xd9\xc2\xda\x76\xeb\x12\xb1\xa0\xb0\x10\x96\x90\xaf\xa3\x57\xaf\xc0\x9f\x1e\x55\xb2\xce\x1b\x15\xed\x8f\x56\x3c\x82\x1e\xcf\x6b\x78\xe3\x63\x6a\x7f\x02\x63\x13\xf0\xd4\x6f\xb4\xb3\x37\xc7\xf5\xf6\xa8\xb5\x9b\x6d\x0b\xa7\xdb\x98\x1b\x5b\xf8\x5b\x34\x5f\xb4\x58\xb3\x99\x87\xb0\xae\x31\xe5\x77\xe7\x5a\xbf\x6c\x6c\xb4\x50\x99\xd2\x09\xb4\x45\x97\x65\x69\x25\x21\x6a\x83\x10\x15\x4b\xdd\xc3\x3a\x00\xae\xeb\xd7\xa7\xfd\x87\x59\xce\x6c\x64\xdc\xab\x58\xe8\xfc\x1f\x18\xec\x71\xe8\xf9\x14\x0c\x36\x7f\x35\x51\x9f\x51\x7e\xdd\x86\x22\x76\x7b\xff\x39\x3d\xa4\xf8\x84\x7d\xa5\xcd\x61\x5b\xaa\x3e\xab\x75\x90\xc1\x0e\x69\xa7\xb3\xaa\xe4\x43\x94\xd4\x93\xdf\xd6\xa6\x3b\xcb\x93\x48\x27\x8b\x93\x30\xab\xf0\xc8\xe6\x2f\x3e\x39\x88\x8f\x3b\x09\xf9\x73\xcd\x52\x62\x4b\x5d\xbd\x54\x97\xd8\x85\x4a\x39\xda\xb4\xe3\xd6\x9e\x73\x5b\x5b\x18\x22\x28\x37\x3b\xda\x6c\xf4\xd8\x08\x62\x3d\x8f\xbf\x95\x8f\x8f\x85\x04\x28\x3a\x6a\x36\xe0\xae\xa7\xaa\x02\x14\x6b\xd2\x2c\xad\x1d\xc6\x7a\x5b\x02\x1d\x9b\x14\xa4\xbe\x08\x5f\xfb\xb5\x08\xe9\x86\xbb\x77\x98\x60\x1c\x16\xac\xca\x36\xd4\xc3\x11\xe8\x4f\xc0\x02\xeb\x3f\x06\x59\xcf\x73\xba\x32\x35\x83\x5a\x14\x3b\x88\x44\xdd\xc8\xbf\xe7\xfa\x59\xe8\xb2\x26\x9f\xf6\x48\x23\xd5\x81\xb1\x40\xc6\xf2\xa5\x90\x09\xa5\xa9\x6e\xc7\xa8\x8d\x0b\x67\xd9\x32\x11\xb4\xa8\x0e\x4a\xce\x14\x20\x6f\xf0\x91\x5a\x36\x3b\x0a\x5b\x9e\x7f\x7f\x7b\x72\x03\xdb\x93\x2a\xdb\xf0\xd4\x0d\xd1\xd2\x45\x3f\x79\x73\x4a\xd3\xd2\xe9\x2f\x32\x6b\x05\xa3\x7d\x49\x92\x11\x0d\xae\x3c\x6c\x73\x8b\xf6\xd5\x65\x76\x38\xe7\x6a\xce\x4c\x2d\x37\xce\xec\x1d\xbb\xca\x9d\xc9\xbe\x48\xb2\xa7\xe5\x6a\x6b\xbd\xba\xf1\xe1\x80\x6f\xc4\xce\x56\x29\x45\x53\xa8\x64\xe4\x61\xd9\x8f\xff\x62\x5b\x6b\xab\xa7\xe5\x6e\x38\x79\x5a\x44\x9d\x20\xc0\x5c\x5d\x9c\x36\xb0\xae\xa5\x3b\x6c\xac\xa3\x52\x1c\x56\xb1\x0f\xc2\x24\xdc\x77\x57\x70\x57\x47\xea\x8e\xb0\xe4\x61\x7e\x60\x8f\x31\xbc\x0f\xf9\x55\x28\x78\xb2\x5f\x69\xc6\xbe\x29\xb2\xf5\xcd\xd4\x46\x14\x06\x95\x6e\x98\x9e\x13\x6f\x81\x38\xb5\x7e\x25\xf9\xec\xa7\xb1\xac\x97\xe0\xde\x3c\x94\x22\x2c\x6b\xe5\x50\x37\x90\x8e\x89\x1f\x45\xb1\x87\xd4\x64\xf7\x90\x40\xd2\xdc\xad\x74\xf1\x6a\x72\x17\xea\xa4\xba\xae\x09\xeb\xda\x15\x9e\x6b\x37\x71\xc5\xb8\x86\x4c\xe9\x26\x9d\x49\x44\x99\x93\xd8\x26\x20\xdf\x80\xb8\xca\xd9\x77\x9c\x84\x43\x4e\x82\x26\x3b\x94\x35\x35\x4d\x88\x99\xb6\xa2\x24\xa7\xc8\xb7\xec\xf7\x16\x5c\x1f\x85\x26\x56\xb6\x13\xf2\x38\x07\x75\x9e\x9c\xe1\xc1\x0e\xf7\x46\x67\xde\xcc\x0e\xd7\x3d\xd5\xd4\xd3\x71\xe9\x47\xb8\x80\xa1\x96\x8b\xd5\xba\x74\x1f\xfa\xe5\x69\xe4\x93\xe0\x31\x1c\x7b\x21\x05\xdd\xea\xb4\x36\xa4\xdf\x4e\x4c\x85\xaf\x00\xef\x45\x44\x29\x7f\x01\x50\x37\x7b\x9c\x08\x80\xb5\x59\x08\x95\x52\xb1\x94\x4f\xc4\xe7\xea\x6b\xed\x84\x7c\xae\x7c\x93\xfe\x52\x38\x8b\x2e\x84\xe8\x2e\xa8\x27\x72\x67\x40\x70\xec\x59\xdd\x24\xa8\x02\x42\x41\x91\x52\x02\x63\x1e\x31\xa3\x6c\xe6\xa9\x70\x28\x9c\x58\xfe\xd1\x09\x14\x3b\xf2\x10\xac\x04\x52\x53\x12\xe5\xd0\x40\x97\x3a\x5d\x2a\xc9\x81\xf5\x6e\xf1\x52\xcd\x96\xa8\xa9\xfb\x47\x82\xbc\x8e\x12\x57\x05\x96\xde\x0f\x18\xa1\x36\x41\x87\xe4\x11\xda\x98\x99\x91\xd4\x25\xd2\x10\xff\xbe\x1b\x5f\xf1\xc3\xc1\xb6\x87\xaa\xf3\xee\xcd\xb0\x94\x00\xcb\x3e\xfb\x81\x7f\xa7\xfb\x02\xd7\x1f\x28\x94\x59\x11\x67\x4b\x94\x7c\x67\x07\xad\x04\xb7\xf9\x6d\xc3\x7e\x16\xa8\xda\x42\xcc\x5a\xa2\x0f\x04\x3c\x80\xfc\xf7\x73\x9c\xd7\x97\x39\x74\xdb\x3a\x01\x4f\x16\xc2\xf6\xf8\xcc\x95\x34\x98\x1a\xf5\x84\x09\xa6\xdd\x55\xa3\x15\xaa\x63\x84\xe2\x76\x4e\xfa\x7e\x10\x8e\x1b\xaa\xac\x7f\x98\x92\xfd\x31\xff\xb6\x3c\x4d\x4e\xf8\x7f\x79\xe2\xb5\xb2\xc0\xab\x0c\x17\x47\x8a\x2b\xbf\x7b\xfa\xf8\x5e\xd9\xd9\x0f\x57\xde\x5f\x61\x45\xcc\x32\xd1\x95\x29\x18\x7e\xe5\xe4\x5f\x71\xa5\xf9\x74\x86\xd1\x45\x45\xbd\xcf\x95\x84\x39\xff\xec\xab\x63\x6c\xb7\xb1\x5a\x63\x7d\x90\x51\x4c\x21\xe4\x7a\x4e\x50\x27\x25\x7f\xdb\xad\x8b\xab\xe8\x1d\x30\xbf\x9a\x89\x7d\xc6\x65\x91\x15\xf1\xc2\x81\x18\xbd\x92\xa0\x9c\x40\x91\x55\x20\xfc\x67\x88\xf5\x0b\x07\xfe\xca\x9e\xe3\x03\xa5\x2b\x87\xec\xda\xd8\x41\x7f\x45\x8b\xb2\xfc\x90\x2d\x9a\xf4\x3d\x08\x1b\x2f\xdb\x78\x2b\x85\x16\xab\x2f\x4a\x85\x58\x2c\xf5\xe9\xf9\xb7\xe4\xfc\xd2\x22\x96\x85\xa1\x34\x9b\x93\x90\x25\xae\x05\x66\x13\x13\xfe\x4e\xb4\x3b\x0d\x94\xf0\x3e\x84\xa8\xf4\xc7\x9e\x72\x29\xbc\xb2\xa8\x68\x33\x58\x75\xde\x88\x74\xfc\xf4\xe2\x43\xfe\x91\xe3\xe2\xc7\xee\x4f\xc1\x89\xed\x22\x7a\x31\x00\x4e\x85\x6b\x10\xf0\xdf\xa3\x4c\x73\x09\xa3\x20\x94\x18\x7d\xbc\xa2\xde\xa7\x1c\x99\x8d\x2e\x97\x23\xad\x81\x4e\xbe\x61\xb1\xc0\x55\xf9\x11\xd9\x26\x96\x44\x1e\xdb\xdf\x43\x74\xb1\x3d\xc0\x61\xa1\xf9\xbe\xeb\xcf\xef\xe4\xc8\xe9\x77\xd2\xd4\xfd\x2d\xfa\xa4\x6d\x6d\x36\x2f\xe1\x0a\xb7\x4a\x3e\xc2\x2d\x79\x2d\x44\x2b\x0b\x2c\x0f\xd0\x31\xa7\xed\xe7\x3a\x94\x69\x8f\xa9\x99\xb7\x2a\x02\xd9\x75\x71\xa3\x71\xee\x01\x42\xa6\x77\xab\x4d\xd1\xaf\xef\x5a\xc1\xb9\x6c\x60\x3f\xd5\x46\xf6\xc9\xe1\x5f\xc5\x12\x3b\xf8\xa3\xfd\x2a\xc9\x9f\xcb\x9c\x76\x82\x96\xa1\x2d\x35\xc4\x17\x7d\x99\x2e\xb3\x8d\xca\xc4\x56\x05\x5f\x7b\x1b\x93\xf9\xfd\x55\x0a\xf6\x21\x54\xb2\xc8\x52\x1e\x6f\xe9\xcb\x98\x59\x75\x31\xf7\x53\x12\x33\xea\x5c\xa0\xbe\xee\xfd\x8b\x85\xa3\xc4\x66\x9b\x31\xc5\x9f\x49\x90\xea\x19\x9d\x16\xf0\xc7\xe1\x90\x7a\x54\x81\x2c\x09\xbe\x0a\x0a\x3c\xa6\x8e\x81\x3d\x2b\xc1\x48\x45\x65\x66\xa0\x0c\x0b\x96\x4f\x46\x96\x5b\x0f\xb2\xfa\x78\x00\x4c\x96\x27\x25\x18\x54\x76\x2d\x61\x07\x81\xe6\x12\x7a\xb0\x61\xb3\x49\x14\x78\xbc\xb9\xab\x91\xfd\x71\x45\xdc\x8a\x03\x8b\x49\x80\x06\x38\x70\xd8\xfe\xfa\x4b\xdb\x21\xc0\xb8\xf6\x5b\x6d\x01\x44\x4e\xd5\xf4\x58\xb0\x5d\x14\x9d\xf9\x55\x72\x44\xf6\xd0\xfd\xdb\x6f\x04\x3b\x14\x5f\x17\x39\xd0\x49\xde\x95\x7f\x31\x70\xc8\x52\x0d\x0a\x60\xb2\xb0\xc7\xdb\x36\x2a\xa2\xd2\x87\x6d\x9f\xbc\x9b\x80\x4b\x06\x1c\xf5\x21\x60\xf2\x40\x79\x14\xc5\x52\x9e\xad\x28\x65\xe9\xbd\x46\xdd\xf5\x08\x89\x05\x92\xbf\x15\xe4\xa8\x02\x1d\x29\xfc\xd4\x0f\x49\x98\x1d\x72\x65\x39\xbc\x28\xef\xc1\x2b\x4b\xc9\xd5\x8b\xd3\x9e\xb3\x1c\xa9\x83\xc0\x9a\x7f\x03\x60\x3f\x0d\x87\xd2\xfd\x72\xc9\x3f\xec\x76\x88\xb2\x20\xcc\x58\xb7\x25\x6c\xb9\xba\x0f\x9f\x9a\xb5\x3b\xc0\x61\x67\xf0\xfc\x82\x2d\x63\x59\xbd\xb6\x06\xfb\x56\xf6\x3e\xbb\x3d\x6c\x2b\xb7\x42\xb5\x52\xcf\x55\x1f\x31\x87\x19\xbf\xe5\xa5\x27\x37\xeb\x20\x39\xd7\x09\x9e\x64\x81\x8c\x9e\x9e\x14\x71\xa5\x29\x32\x60\xcf\xa1\x2a\xe9\x6b\x73\xc2\x52\x37\xb4\x4f\xad\x2d\xe1\x85\xcf\xc6\xbe\xa7\x8f\xbd\xdd\x49\x90\xf0\x55\xe1\xc9\x28\x01\xf2\x55\x17\x9f\x20\x2e\xd9\x86\xe4\x1e\xa9\xc3\xae\x56\xae\x08\xe8\xe4\xe5\x68\xa9\xbf\xaa\xa5\xc3\xca\x76\x1d\x81\xad\x47\x2b\x3b\x19\x94\x0d\x51\x5f\x43\x30\x36\xbe\x1c\xf8\xe7\xa4\x63\xaa\x25\xf2\x91\x89\xca\x73\x3a\x09\xb1\x9c\xe8\xd6\xec\xde\x5f\xb5\x94\x66\xed\x95\x6a\x3c\x6a\x38\xd5\x2e\x5a\x42\xe5\x5a\xb0\x06\x90\x9a\xf2\x44\xda\x9d\x5e\x55\x4a\xd2\xa6\x2f\x67\x18\x54\x83\x7b\xa7\xc8\x72\x00\xe8\xf1\x6d\xc0\xc4\xbe\x6c\x7a\x77\xcd\x82\x61\x2f\x1d\x9c\x1c\x69\x5e\x6c\xf7\xdc\x87\x6b\x17\xee\x25\xa6\xf9\x85\xfb\xde\xf0\x13\x51\xf3\x39\x2a\x10\x5b\x0a\x5e\x40\x5f\xcb\x91\x78\x3d\x79\x46\xd6\x85\xfb\x7a\x0b\x02\xa2\xa7\x37\xda\x15\xec\xab\x9d\x67\x52\xb4\x45\xa5\xc5\x12\x92\x71\x09\xf7\x6a\xaf\xa5\x85\xaa\xad\x53\x37\xfd\x84\xd6\xdb\x0a\xdb\x7b\x07\x1e\xe0\x52\xf1\x7e\x3e\x28\xc1\x06\x40\x5b\x28\xdf\xb2\x9c\xdf\x6a\xb6\x40\x66\x0b\x7c\xdd\xd6\xe1\x67\x5f\xce\xea\x5b\x5d\x3e\x22\x0d\x39\xf4\xba\xb8\xe6\x32\x61\xd9\xa3\x0c\xc2\x97\xf4\xf4\xdb\x7a\x06\x59\xd5\x25\x4a\xbd\x35\x4a\xfd\x77\x3a\x43\xfd\xd7\xa2\x95\x5e\xd4\xf2\x5e\xdd\x27\xda\x0e\xbb\x66\x2d\x46\x73\xfe\x2d\x76\xf2\xdc\x2b\x74\x3f\x14\xa0\x08\xed\x7f\x5d\x5d\xe8\x71\x82\x59\xa9\x5a\x83\x43\xb4\xbf\x4f\xed\x95\x88\x43\xd8\xc7\x7f\x3c\x80\x54\x6f\x58\x61\xb3\x59\xbb\x28\x86\x1e\x2c\x7d\xd5\xb5\x75\x99\x1e\x41\xbd\x5d\xbb\x96\x08\xef\xfe\x29\x65\x9c\x7d\xa4\xec\x17\x9e\xbd\xb4\x51\x16\xb7\x93\x65\xea\xee\xc6\xfd\x1e\x34\xf5\xc1\x07\xd1\xf7\x22\xf2\xfb\xa4\x8c\xad\xc3\x87\xaa\xbf\xb4\xf5\xda\xc8\xb2\x74\xf5\x07\x1b\xa4\xc2\x4b\xbb\x7c\xbe\x14\x8f\x07\xac\x36\xf1\x1c\x87\xff\x4c\x12\x5d\xed\x16\xa3\x3f\xe3\x75\xa2\x04\x8c\x85\xa5\xaa\xf5\x0e\x63\xa2\xef\x1a\xe0\xa5\xf1\xdb\x0d\x49\x46\xdb\x54\x7b\xd9\xb1\xa0\xdf\x8d\x2b\x07\x7c\x6f\xc9\xe1\xda\xa5\xeb\x73\x84\x82\xcc\xed\x30\x7a\x09\xd6\xd6\x0e\xfa\x7e\x40\xe5\x73\x0e\x4f\xdd\x3d\x77\x51\xc3\xdb\x73\x68\xdf\xc3\x75\x20\x47\x72\xb7\xdf\x3e\xe7\xf0\x5c\xb1\xcb\x72\xba\x85\x04\xfd\xba\x3d\x28\x9f\x1b\x81\x7e\xd6\x83\x0b\xa4\x4a\x65\xd0\x6d\x5d\xf1\xf6\x07\x29\x56\x75\xe5\xc8\x23\x42\xed\xb9\x1d\xe6\x53\x6a\xde\x50\x89\x8a\xbd\x49\xfa\xb8\x69\x5a\x63\x47\x85\xa3\x3f\x9f\xb7\x98\x6e\x3a\xab\xdd\xb7\x4b\xde\x32\x5e\xa4\x33\xec\xda\xb7\xd2\xb6\xbd\x8d\x67\x76\xff\x2c\xed\xb7\x87\x55\x9e\x2b\x16\xab\x53\xb0\x1b\x9a\x3a\xba\xf0\xbc\xc4\x9e\xed\xea\xd3\x44\x8f\xcd\xed\x8c\xc2\x07\xcc\xb2\x34\x4d\xdd\xce\x5b\xdc\x04\xf6\x48\x42\x8a\x9f\x5d\xb2\x4c\x8e\xea\xb8\x5a\x3e\xb3\x7e\xc0\x65\x59\xcb\x71\x54\x27\x92\x4d\xea\x8c\x7c\xbd\xc8\x2e\x30\x26\x80\x8c\xd1\xa5\x37\x53\xf0\xd9\xac\xdd\x11\x2c\xcf\x8a\x38\xb0\xf0\xb8\x01\x11\x12\x1c\xb0\x81\xec\x4b\xcf\x6d\xd5\x5e\xb7\x21\x3b\xc6\x83\x7e\xe0\x15\xcf\xed\x0d\x41\xb2\x81\xe9\x54\x08\xff\xb1\xb5\x0a\x67\x6d\xd9\x67\x91\x8a\x31\xf0\x26\x01\xe7\x7d\x8c\xd9\x19\xae\xa3\x00\xe5\xfb\x6d\x83\x41\x31\xde\x3b\xa6\x33\xb9\xde\xa0\x95\x4a\xb8\x89\x4f\xbb\x20\xf3\x2e\xce\xae\xc3\xd7\xd0\x6f\x77\x1d\x5f\x9d\x99\x3a\x1e\x0f\x02\xf2\x4e\x61\xde\x47\x95\xdf\xf5\xf9\xf9\xe6\x0a\xe9\xe1\xee\xfd\x94\xc0\xa3\x45\xd3\xed\xda\xe8\x18\xea\x56\x45\x77\x80\xde\xdb\x72\x7b\x92\xc3\xf4\xea\xa8\xe8\xf9\xbb\x5a\x1d\xc2\xbc\xb1\x23\x0e\x67\xf5\xb7\xb6\xdc\x08\x53\xf1\x28\x77\xf2\xf1\x06\xce\xaf\x71\x96\x08\x53\x1b\x46\xe6\x79\x7c\x57\x43\x72\xcc\x7a\x15\x2e\x63\xcd\xe0\x51\xd1\x0c\xc2\x01\x94\xf8\xbc\x27\x27\x66\xde\x3f\x1b\xc7\xed\x11\x38\xd6\xf9\xf6\x31\x3f\x62\x91\x17\x48\x05\x9b\xd7\x6a\xe5\x04\xfe\x1a\x3e\xc8\x5b\xd0\xfe\x25\x5a\x9a\x6f\x12\x10\x0e\xf7\x29\xe9\x0c\xeb\xb3\xe3\xc9\xc2\xf4\x99\xf7\xa0\x4d\xf6\x38\xe5\x82\xa7\x91\xdb\x53\x3e\xdc\x0e\xfd\x10\x4e\xc8\x86\xe2\x3f\x55\x0c\x2c\xf4\xc7\xda\x15\x78\x22\xd7\x03\x46\x3f\x52\x27\x5f\xa1\x7d\xde\x88\x28\x98\xa4\x07\x08\x7f\x9e\x83\x13\xbe\x9e\x14\x92\xa3\x23\x7a\xa8\xd6\x02\x97\x9f\xb1\x5a\x25\x54\xfe\xbf\x91\x36\xd8\xc0\xe1\x0b\x13\x8e\x90\xb0\x75\xdd\xe6\x81\xc0\xf7\x95\xb0\x40\xc7\x7b\x8b\x91\x25\xc4\x7d\xdd\x8d\xc5\x3b\x21\xf3\xcb\x0d\x83\x82\xda\xb1\x13\x25\x42\x14\x19\xde\x5f\xb7\x4b\xfb\x46\xa0\xfb\x37\xa1\xd4\xa5\x8f\xf5\x62\xfd\xd2\xbc\x37\xd7\x11\x63\x93\x35\xa9\x0d\x7b\x43\xfc\x09\xad\x97\x38\xb2\x64\x60\x95\x0d\xf2\xa3\x90\x2a\x3e\x02\xe8\x4f\x68\x7e\xac\xee\xad\xe5\x00\x82\x67\x67\x68\xfb\x37\x1f\x7a\x39\xaf\xd1\x8a\xb0\x35\x58\x84\x74\x9f\xf6\x2c\x9c\xff\xac\x8d\x83\x4a\x78\x6a\xd6\x00\x3e\x43\x00\x03\x9b\xdb\x5f\xa0\x38\xea\xb5\xe9\x92\x1c\xb4\x63\x36\x1d\x90\x1e\x94\x0d\x78\xbb\x67\x6d\x9b\x20\x12\x4c\xe7\xc8\x7d\x48\x8b\x3c\xc8\xbb\xe3\x4c\xb1\x8e\x2c\x67\x74\x8b\x82\xb4\xe6\x19\x5e\xe8\x27\x2f\x15\xea\x3f\xb9\xa1\xf9\x93\x59\x81\xf5\x46\x20\xf5\x12\xe2\x3e\xda\x98\x50\xb4\x1b\x59\x4a\x2b\x1d\x66\xf7\x85\x7f\xfc\x3f\x99\xc1\xfb\xeb\x03\x91\xfc\x83\x5b\xe4\xaf\xa2\x0d\x0c\xe3\x54\x79\x65\xeb\x33\x63\x9a\xac\x33\xcf\x55\x4a\x24\x2c\xe4\x2a\xd1\xe0\x8c\xee\x1a\xb6\x12\x39\x0a\x64\xd1\x90\x3f\xbf\xfd\xd4\x70\xac\xfe\x6a\x4e\x62\x70\x60\xc9\xc9\x7a\x11\xa2\xf9\x3f\x01\xe0\x3f\xf7\x80\xef\x9f\x81\xfc\xc7\x34\x33\x5e\xfe\x52\x71\xe6\xb4\x0c\xb9\xb3\xfa\x37\x90\x39\x2a\x59\x24\x49\x7f\x05\x22\xcf\x5e\x8b\x41\x10\x2a\xce\x40\x6b\xc6\x5f\x95\x90\x15\x33\xb2\x2f\x44\xc5\x4b\xfb\x75\xc6\xd5\x04\xaa\xbc\x6f\xdd\x8f\xa0\x07\x3c\x6e\xe3\xf8\x88\xd7\xdb\xed\xaf\x36\x2b\x57\xa0\x2a\xd3\xa8\x8d\xc5\x05\xbd\x08\xb3\x08\x4a\x3f\x9f\x7f\xe5\x41\x3e\x01\x20\xec\x3b\x63\x01\x77\x2c\x7a\x0a\x17\x67\x21\xb7\xc7\x21\xd7\xf8\xb0\x93\x07\xc1\x20\xb4\x9d\xdd\xe3\xda\x23\x9b\x99\x9f\x24\xff\x94\x15\x0c\x86\x49\x91\x8f\xc0\xf6\x30\xd8\x53\x72\x56\xfb\xad\x52\x5c\x88\x0c\x7c\x04\x27\x12\x94\xff\xb8\xcf\xab\xcc\x51\x96\xeb\x9f\x6f\xe4\xeb\xf0\x70\x8a\xc2\xee\x66\xe4\xfe\x9d\x31\xe5\x2a\x6d\x00\x4f\x6d\xce\xed\x63\x0a\x43\x30\xf2\xfc\x2a\x97\x9f\x0e\xc8\x5c\x8f\xa0\xb4\x24\x36\xc1\x79\xfc\x96\xc6\x83\xc5\x73\xd6\x05\xb0\xf6\x2b\x58\x07\xc8\x0d\xc9\x11\x28\x8b\x97\x47\x6d\xfb\x4c\x64\x29\x34\x32\x52\xb7\x71\x45\x98\xff\x91\x58\x22\x70\x0e\xa8\xf0\xfc\xd9\x3f\x45\x69\xb8\xa8\x04\xa2\x0f\xec\x12\xd4\x42\xe9\x62\x14\x5f\x00\xa1\x12\x14\xb8\x08\xed\xd5\x20\x6a\xc9\x3f\x14\xad\x06\xc8\x98\x65\x01\x90\x74\x29\x10\x40\x4e\x01\x5f\xde\xc3\x87\x7e\x73\xd3\xa7\x05\x0b\x98\x2a\x8e\xcc\xca\xa8\x67\xeb\xfb\x63\x00\x13\x15\xe0\x0a\x0a\x40\x47\xe4\xff\xf1\x16\xd6\x1f\x5e\xbe\xd2\x1a\x9f\x1d\x5b\xfb\x08\x06\x80\xa0\xe6\xc5\xad\x08\x09\x6b\x2f\xd5\x87\x17\x52\x8c\x09\x1a\x83\x3c\x1e\xd9\xa7\xe4\xa0\x49\x14\xe8\x14\x87\xcc\x71\xb1\x7e\x9e\xea\xaa\x01\xc8\x03\x79\x00\xf5\x37\x47\xa0\x97\xb0\x33\x98\xa5\x28\xe1\x6c\x00\xbe\xf3\x56\x60\xa0\x3b\xfd\xdf\x4d\x1d\x68\x1a\xe1\xfe\x0f\x40\xbb\xf5\x27\x62\x0d\x86\x2c\x80\x9d\xf3\xa4\x2c\xc8\x99\xed\x18\x5e\xe2\x0f\x3c\xfd\x2e\xe5\x89\x8c\x7b\x37\x6e\xd5\x78\x15\x54\xb3\x61\x84\xfe\x18\x5f\xb4\x94\x54\x86\xef\xe0\xa6\x23\xc9\x78\x04\x9d\xe0\x41\x36\x81\xce\xdc\x79\x05\xc4\xf4\x8f\x03\xb5\xad\x3b\x82\x03\xe6\x11\x40\x06\x39\x72\x58\x43\xc2\xdd\xf9\xb0\xc9\x7e\x93\x68\xc2\x9f\x0f\x5d\x16\x7b\xb8\x7d\xb3\x1c\x15\xb0\x8f\x51\x2b\xc0\x54\x73\x8b\x69\x18\x99\x01\x57\x58\xd8\x9f\xda\xee\xce\x20\xa7\x12\xb4\xe5\xc2\xc3\x2d\x51\x0d\xe4\x0e\x03\x74\xd0\xfc\x43\xbd\xaa\xa5\x77\x6e\xe3\x6f\xfa\x24\xe9\x40\x6f\x1a\xcf\x66\xd7\x13\x0a\xf6\xc1\x25\x37\x7c\x79\x96\x63\xd6\x0d\x9b\x1f\x86\xa1\xaf\xee\x6e\xf9\xdc\xf2\xfb\xe6\x51\xef\x9f\xf0\x75\x3c\x6f\x77\xcc\x0f\x7f\xfe\x9e\x1d\x87\x07\xf7\xb7\x01\xf5\xcb\xcf\xe9\x9c\xc3\x3e\xa7\x5b\xde\xc4\x25\xac\x79\x1b\xff\xcc\xfe\xa5\x3f\x83\x36\x84\x13\xb9\xf1\xfe\x83\xea\xd5\xbe\xa9\xf0\x37\x48\x0a\x39\xef\xe0\xc4\x9a\x68\x18\xf0\xbf\x73\xf5\x79\x99\x4a\xcc\x44\xc0\x59\x70\xca\x03\x20\x0e\xfe\x69\x25\x75\x22\x14\x1c\xbe\x68\xd0\x3f\xc4\x5a\x08\x71\x6b\x38\xd8\x3c\xc8\x5e\x00\x2e\x5a\xec\x85\x9d\x9b\xfe\x1c\x03\xbf\x34\x49\x95\x08\x8a\xd6\x9b\x6c\x84\xbd\xfe\xd7\x9c\x13\xba\x29\xa0\xac\x59\xc8\x9d\x32\x6b\x1c\xd0\x11\xf5\x73\x00\xbf\x2a\x24\xaa\xb1\x5c\xe9\x5d\x0f\x5b\xd5\xe4\x5f\x85\x79\xcc\x08\xde\xf2\xe5\xbf\xd9\x0a\xd5\xf3\xd5\xba\xda\x8d\x95\x40\x79\xfe\x4b\x2d\x9a\x0c\x22\x1b\xa1\x84\x25\x44\x21\xbc\xaa\x58\xef\xb2\x96\x9d\x92\x8d\x77\x51\xf7\x45\x1a\x37\x15\x2d\x1b\xbb\xd0\x98\xce\x84\xb7\xe7\xeb\x9c\x81\x28\xf4\x28\x51\x7f\x7a\xe3\x54\xe5\x65\x9d\x8c\xa3\x5f\x5f\x5b\x26\x43\x22\x51\x44\xe3\xc5\x1f\xff\x4b\x8e\xda\xf7\x3a\xfb\x4b\x59\xe8\x6b\x7c\x53\x80\xfa\xd8\xb9\xea\xf6\xaa\x26\x82\x2f\xb9\x9e\xbd\x46\xe8\x4e\x7e\x3c\x7a\x91\x44\x71\x51\xe7\x1f\xeb\xf0\x7b\xd2\xbe\x26\x35\xbe\x4d\x7a\xc2\x5e\xd6\x03\xbf\xdc\x60\xd0\x1b\xdb\xc4\x93\x85\x86\x89\x16\x4d\xc0\xa3\xf0\xcc\xf7\xf7\x1c\x59\x57\xf9\x3d\x55\x15\xb6\xa0\xcf\x44\x49\xa2\xb5\xfb\x5a\x34\x90\x38\xec\x82\x7e\x8b\x8f\xbe\xbf\xd9\x66\xf8\xf9\x21\x41\x6b\xbf\x58\xbf\xf9\x75\x79\x89\xea\xd7\x72\x82\xb7\x64\x76\x7f\xb3\x26\x69\xbf\x99\x7a\x1f\xb6\xe5\x74\xc4\x82\xfd\xd4\x2b\x5e\x7e\x02\x13\x83\xcf\xdc\xaf\x3d\x6a\x36\x29\xa2\xa8\x35\xa5\x9d\x5c\xc8\x9a\x70\xd5\xdf\x24\x98\xc1\xaf\x5b\x34\x29\x18\x42\x60\xc7\x65\xae\xd9\x29\x8c\xff\x16\xef\xbd\x50\xdc\x0d\xd6\x2b\xb4\x26\x79\x95\xc4\xe4\xf0\x08\x37\x00\x77\x6e\x5b\x95\x2f\x66\xfc\xc5\xe8\xb0\xbc\xb9\xb4\x41\x4e\xd8\x90\x69\x1e\xb5\x35\x27\xbd\xff\x25\xae\xc3\xbb\xa8\x7a\x3e\xba\xdd\x60\xaa\x1c\x07\xf4\x70\x92\x64\xe7\x9a\x4d\xec\x7e\x90\x00\xd2\x7c\x39\x52\x8c\xff\xc9\x44\xd1\x42\x31\xb2\xb6\xd6\x5c\x17\x71\xc7\x65\xaf\xba\x9a\x16\x43\x2f\x40\x27\x42\xa7\x95\x78\x0f\x67\x7e\xe3\x62\x77\x00\xdf\xe6\xac\x12\x1d\xfa\xf4\x12\x7d\x65\x75\x60\x41\x25\xd5\xac\xcd\xd5\xb6\x7d\x0b\x09\xc8\xca\x4a\x01\xf5\x23\xc8\x22\x80\xf0\xed\xf1\x4d\x2d\x14\x3a\xf9\xc6\x16\xe0\x3b\x8f\x6c\x76\x74\x6e\x37\xfb\x05\xbe\xa6\xb1\xc0\x0e\x5e\x47\x88\x16\x7b\xdc\xc7\x45\x50\xa3\x1e\x36\x15\x51\xd1\xa5\xd8\xed\x27\xf2\x72\x68\x9b\x44\xb8\xfd\x33\xba\x6f\x2d\x63\xd4\x08\x27\x6a\xb5\xdb\x1d\xbf\x8a\xe9\x70\xad\x40\xaa\x43\xda\x95\x2f\x61\x87\x3b\x65\xcc\x41\x0a\xe1\xbd\x02\xe0\x93\x6f\x6c\x28\x1e\x3d\x98\xa9\xed\x32\x9c\x0e\xc2\x91\xef\x2e\x28\x20\xe4\x45\xc0\x6d\x5b\xcc\x0c\x54\xbf\x29\x09\x27\xce\xc7\x12\x2f\x34\x36\x3b\x6b\xbe\x1b\x08\x00\x80\x85\x70\x9f\xf7\xb3\xdf\x82\x5e\xc3\xb8\x95\x61\xf4\xee\x07\xf6\x9e\x0e\xb7\xe0\x59\xda\x85\x57\x4a\xbd\xab\x49\xee\xc4\xe8\xfc\x39\x32\x40\xc6\x17\xfd\x0a\x07\xcd\xfe\xb0\x6e\x72\x04\x36\x79\x00\x89\xde\xfe\x6e\xcb\x7e\xf8\xb5\xa1\xc6\xed\xaa\x75\x40\xe4\x1c\xe5\x49\x88\xa7\x35\x9a\xd0\xb3\x8c\x31\x69\xc8\xcb\x76\x91\x39\x62\xf9\xe7\x53\x1d\xfa\x90\x59\xf4\xb6\x14\x7d\x1b\x25\x11\x0e\xba\x88\x2f\x7c\xa8\xd9\x61\xae\x13\xfc\x92\x0c\x19\x17\x4e\x9e\x07\x77\xac\x3f\x2a\x59\xa4\x42\x75\x06\xaa\xa1\x3d\xc8\x12\xd1\xc4\x85\x61\x93\xd4\xe8\xc7\x43\xf2\xdd\x8d\xcf\x0a\x7b\xfb\x81\xc3\x24\xa7\x3f\x2f\x9a\x2c\x5a\xa0\xad\x33\x6e\x96\x91\x2f\x83\xcd\xe0\xdb\x60\x67\x80\xdd\x3f\x55\x7d\xef\x22\xb3\x33\x8b\xfc\xb2\x3a\x29\x24\x4c\xd7\xfb\x4d\x5a\xa8\x3d\x08\x5f\xdd\xa8\x22\x68\x5f\xd5\xf6\x7a\xe8\x04\x0f\x25\x7a\x79\x58\x2f\xaf\xf5\xa3\x93\x13\x80\x7a\x37\x7b\xba\x65\xc0\xbb\x91\xd4\x3b\xca\x85\xef\x54\x14\x57\x51\xa2\xdf\x94\x0e\xbb\x4c\xb8\x4e\x37\x75\xe4\xc8\x58\xd4\xd8\x65\xd0\x27\x15\x67\xf2\xf5\x48\x5c\x3a\x05\xda\xf2\xcc\x4b\xb7\x4b\xda\xa5\x2f\xba\xd2\x3d\x98\x51\xec\xd4\xfa\x29\x81\xb2\xe0\x4c\xbc\x1e\x02\x1c\x7e\xa6\x40\x4d\xbb\xbe\x5b\x0f\x85\xf9\x2f\xde\x88\x1b\x94\xf8\x49\x83\x8d\xe2\x1c\x93\x3c\x92\xef\xd7\xa7\x3f\x4f\xe9\x6c\x8b\xb5\x2b\x3f\x3c\x6d\xe4\x14\x5a\xd0\xf0\x76\xec\xd3\xd4\xdf\x96\x18\x3d\x46\x03\xa8\xc3\x7a\xfc\x74\x2e\x76\xff\xc3\x79\xe9\xf3\xb2\xd1\x0d\x5b\xac\xdb\xfa\x42\xd1\xf3\x54\x81\xdc\x66\x91\x61\xca\xf5\x0c\xfb\x7a\x0f\x48\x7d\xd8\x80\xd9\xac\xea\xe2\xbe\xae\x36\x4a\x85\x08\xc2\xe2\x9e\x99\x58\x90\x35\xc5\x7a\xba\x23\x23\x0b\xaf\x1e\xdb\xb7\x85\xb2\xf8\xd9\x9d\xe2\xab\x8a\xc9\xe2\xa1\x3d\x2e\x9f\xa4\x6c\x1d\x8d\x76\xe2\x2f\x89\x38\x91\x90\x07\x61\x46\xf2\x12\x9f\x31\xf9\x78\xc6\x93\xef\x34\x17\x91\x5c\x7a\xad\x84\x3f\x65\x93\xf7\x9c\xc2\xa8\xd4\xa2\xe7\x33\x76\x56\x81\x03\x90\x5b\x8e\x5d\x18\x8b\x27\x38\xa1\x64\xb4\xd8\x44\x28\x54\xc2\xc1\xef\x2c\x97\x4e\xd5\x66\x3c\x2e\xa1\x89\xb5\x36\xc8\x5c\x7a\xb8\x89\x5c\x72\xaf\xc8\x80\xe4\x02\xaa\x1e\xd9\x31\x64\xcf\x40\x14\xa2\xf2\x63\x5a\x2f\x3a\xf8\xa0\xfe\x0c\x99\xc8\xa7\x94\xd1\xa5\x55\xf7\x84\x14\x16\xfb\xda\x27\x3d\xc2\x9e\xb7\xc1\xda\x33\x34\x5c\xa5\x58\xde\x31\x1c\x58\x94\x42\x94\x63\x2d\xf6\x29\x3c\xf9\xf3\x3e\x23\x54\xa8\xd4\xd9\x3f\xd3\x28\xf1\x72\x8c\x8f\x64\x53\x5c\x36\x45\x56\xd0\x4f\x01\x33\xb0\x06\x99\x18\xff\x7c\xec\x29\x0c\xed\xc2\xdd\x8a\xb9\xd6\xb6\x39\x2c\x07\x35\x12\x5d\x46\xf2\x56\x80\xf1\x1e\x53\x55\x99\xce\x6f\x09\xed\x6e\x6c\xd9\x45\x8b\xc5\x17\x18\xb6\x5c\x1c\x44\x41\xdc\x3f\x64\x72\x9c\x01\x1b\x21\xc8\x36\x9f\x7d\x48\xf2\xe4\x93\x7a\x8b\x58\xba\x29\xb2\x81\x8d\xf8\xe1\x4c\x18\xef\x22\x82\x81\x82\x75\x46\x97\x92\xca\xe0\x4a\xf0\x4f\xb5\x67\x41\x9f\xb5\x4d\x92\x35\xe7\xd5\xb2\x71\xef\x20\x91\x1b\x11\x7b\x9c\x2f\x5e\xcc\xfe\xf8\x3f\x34\x19\x65\xd0\x58\x75\x72\x59\x43\x18\xcb\x9e\xfb\xf8\xad\x15\x8f\xd4\xd4\x25\x38\xb2\x34\xd3\x8f\xba\xee\x69\x83\x81\xee\x79\x8b\x82\xd4\x32\x7f\x55\x85\xb2\x9b\x29\xef\x8c\x86\x6b\xe7\x0d\x91\x67\x39\x0e\xba\xb2\xba\x57\x41\xed\x85\x9d\x5a\xb3\x7c\x1f\x52\x68\xd8\xd3\xf3\xe2\x00\xeb\x10\x98\x45\xf7\xab\xf6\x4a\x51\xee\x2f\x98\x67\x91\x3e\xf4\xb4\xfb\xd5\xcf\x96\xab\x60\xbe\xaf\x1d\x84\x36\xb3\xc7\x11\x9c\x98\x47\xe5\xd7\xb8\xfa\xf5\xb9\xba\x26\x16\x6f\x67\xb6\x82\xa8\x60\x63\x04\x59\x13\xd0\xb0\xcd\x9d\xb8\x0f\xe7\x53\x87\x56\xac\x6d\x27\x8e\x5c\x77\x2a\x61\xee\x4e\x97\xf6\x7d\x04\x1d\x67\x51\x5a\x82\xaf\xe7\x3a\x6d\x17\x5d\x55\xb7\x91\x5c\xea\xea\xeb\x24\xe5\x8c\x5c\x1d\xec\xb6\xf4\x69\x42\x8d\xc5\xa9\x3a\x73\x65\xe5\x04\x13\xba\x2b\x95\xd8\xe3\x44\x00\xba\x8f\x76\xb0\x0e\xb8\xc2\x3a\xa2\x2b\x35\x37\xee\x28\x43\xcb\x63\x96\x9f\x83\xb3\xb5\xae\x78\x39\xdd\x59\x3c\x55\xd8\xa4\x2b\x1a\xbd\xbb\xd2\xb7\x44\x8d\x79\xa3\x75\x19\xd3\x38\xa6\xad\xa9\x3f\x63\x7f\x2a\x9d\xf9\x43\x32\xc6\xf8\x56\x41\x1c\x12\xf0\x62\xf4\xec\xb0\xfa\x4a\x9b\x1a\xff\x91\x80\x87\xf1\x5c\x66\x3c\x8d\xaf\xba\x3f\x6c\x65\xea\x8e\x01\xce\xe2\x49\x31\x58\xb6\xc1\x0e\x1a\xea\x21\x79\x89\xe2\x8e\xcb\xc2\xfb\xf2\x67\x30\x79\x76\xad\x63\x82\x35\xf4\xd6\x04\x40\x85\x95\xb7\xce\x96\xb6\xc6\x36\x2a\x6b\x38\x04\x21\x28\x58\x42\xe8\xd8\xea\x17\x85\x82\xfb\xf8\x2b\xfa\x92\xab\xc7\x93\x94\x62\x33\x83\xa2\xaf\x6c\xe3\x78\x67\x07\x55\x12\x10\x11\xc0\x08\xf8\xe7\xa0\x07\x29\x6b\xcd\xca\x3a\xe5\x50\x98\x11\x8c\xe2\x9f\x4a\x36\x3e\x38\x39\xce\x89\x48\xa2\x0f\xb9\x1e\x2d\xa3\xfc\x96\x11\x7e\x97\xa1\x59\x59\x19\x46\x2e\xc2\xc7\xfd\xf0\x10\x64\x04\xd9\xdf\x2d\xc4\xda\xb1\xce\x52\xd1\x77\xde\x8a\x9c\xa6\x23\xec\xb0\x4a\xcd\xf7\x89\x5b\x5f\x7f\x2b\x3c\xd1\xa7\xb8\x5e\xe1\xf5\xf8\x08\x3e\x91\xee\x08\xd7\x97\xe7\x39\xda\xaf\xb2\x52\x7e\x1f\x61\xec\x2c\x5d\x5c\x9d\x76\x74\x90\xb8\xef\x4f\x92\xff\x4f\x9f\x4e\xcc\xa0\x73\x1d\xaa\xfe\x7c\xde\xc9\x4d\xf3\x2e\xc9\xbf\x26\x81\xe0\xc1\x20\xe6\x53\xe4\xff\x04\xb2\x1f\x2b\x25\x51\xac\x6d\xcf\xf6\x8c\xce\x08\x3c\x9f\x4d\x69\x4c\xbb\x5d\x50\xef\xa9\x7d\x60\x8b\x95\xe0\x89\xaf\x94\x4f\x17\x7c\x1f\x02\x4e\x3d\x20\x81\x00\x48\xd3\xd0\x5b\xdf\xfc\x2f\x6c\x51\x3f\x10\xbb\x07\xc8\x7a\x9d\x11\x62\x7d\x0a\x7a\xac\x08\x25\xc0\x6d\xcf\x7f\x9d\x2f\xb8\xba\xbd\xe8\x43\x91\x0e\xfb\x2a\xe7\x21\xca\xc9\x3b\xfe\x30\x30\x85\xae\x6f\xd2\x10\x9e\x9e\xe9\xf6\x37\x68\xcb\x66\xd3\xe4\x20\xa5\x6c\x6b\x65\x1a\x6d\xdd\x2d\x1e\xbf\xee\x84\x56\x81\x1f\x74\x4e\x81\xf2\x45\x0b\xeb\xbd\xc1\xc8\x81\x26\x7e\x0c\x8b\xd2\xc1\xbf\x09\x43\x10\xf2\xaa\xab\xb2\x6d\x30\xa7\xca\xf2\xb4\xd1\x34\xb8\x47\x0e\x65\x4a\x15\xcc\x88\x3d\x5e\xe2\x00\xdb\xa7\x4c\xf5\x84\x97\x6e\xfc\x3e\x86\xad\x20\x0f\x75\x63\x0c\x69\x4e\xf1\xd9\x6f\x4a\x4f\x58\x47\x23\x1e\xa7\x4a\xfc\x19\x45\x5c\xb2\xc8\xb2\x4d\xc5\x28\x48\x4b\xbc\x7e\x2e\x9d\xce\x2f\x24\xf1\xd5\xd2\x83\xd3\x3a\x8a\x87\x41\x33\x86\xf2\xba\xcb\xd9\xeb\x7d\x20\x8b\x54\xfd\xfa\x29\x6d\x95\xee\x53\xa9\x56\x36\xb2\x6a\x86\xd6\x42\x58\x84\x5f\x3d\xc1\xca\x81\xe2\xf2\x53\xda\x83\xe3\x03\x66\xef\xb0\x55\x11\x5d\xdf\xa1\x8a\xb9\xed\xf9\x9f\x6f\xb0\x98\x4e\xb6\x0d\x96\x38\xef\x64\x98\xfa\xf6\x36\xa8\x5e\xdf\x0d\xb5\x6c\xde\x8f\x64\xcf\x05\xf8\x91\x68\xfa\x30\x42\xbd\x7b\x8e\x56\xa0\x19\xe9\xb7\x1e\x9c\x20\x2c\x41\x18\x6b\x61\x8d\x2b\xc2\x56\xba\xf7\x76\x7f\x4a\xa3\x7f\x48\x7f\xdc\x9c\x6f\xf4\x95\xda\x47\xbe\x50\x12\xea\xb1\xfd\xca\xeb\x30\x58\x96\x10\xca\xc7\x83\x38\x53\xa7\xc6\x5a\x2f\xea\x74\xb7\x44\xce\x58\x5f\xb7\xab\xb3\x27\x0b\x4b\x21\x54\x0c\x07\xe6\x22\xd6\x40\x67\xc9\x3f\xdf\xe2\x66\xfb\xe2\x01\xbd\xb3\x57\xf7\x5a\xcc\x35\x71\x77\xf8\xfc\xb4\x05\xd7\xba\xf4\xa7\x7e\x48\xe2\xd3\x04\xf8\x00\xa1\x48\xe4\x26\x9b\xe3\x8b\x62\x24\x0e\x19\xbc\x53\xb3\x98\x48\x6b\x98\x3b\xa4\xb5\xea\xe4\x73\x08\xb1\x6d\xb0\x8a\xde\xa9\xda\x5c\x48\x7a\x3f\x4e\x60\x49\x5d\xa8\xf9\x7b\x99\x5e\xf1\x34\xc9\x0f\x0c\x58\x99\x26\x60\x0c\xd6\x88\x99\x80\x85\x3a\xb4\xc1\x8f\xd0\x5c\x55\x04\x16\x7c\x24\xb2\x90\x0e\x67\x74\x3b\x1f\x91\x0d\xce\xf4\xf7\x07\xa9\x4a\x3a\x1a\xf3\x6d\x51\x55\x40\x2e\x24\x31\x89\xa4\x9c\x13\x62\x35\xce\x00\x3a\x5f\x09\x42\x90\x1e\x8e\x00\xec\x78\xd4\x89\x0c\xe4\x3a\xfc\x22\x2f\x49\x79\xbf\x34\x9c\x96\x36\x5b\xc0\x73\x1a\x49\xdf\x36\x40\x3e\x51\xb1\xa7\x01\x2e\x2f\x08\x4e\x89\xea\x59\x4e\x2d\xe0\xf6\x55\x5f\x12\x67\xad\xd9\xbe\xec\xba\xd0\xe0\xde\x72\x31\x31\x74\xc9\x70\xdd\xc1\x4d\x61\x51\xba\x81\xf1\x74\xcf\xa8\xe8\xa8\x22\x4f\xa7\xa6\x2c\x8d\x98\x56\xbc\x07\xa1\x29\xce\xad\x5d\x01\xbf\x6c\xd3\x29\xd9\x95\x06\x0f\x79\x58\xd0\x36\x53\x55\x6e\x43\x38\xdf\x61\xb7\xc7\x02\xb6\xb7\x86\xb0\x22\x85\x36\xd9\x1f\xaf\xed\x34\x8a\xa5\x6d\x13\x4b\x9e\xa2\x3d\xf2\x07\x0f\xb2\x81\x98\x77\x43\xe3\xe5\x7b\x7e\xf3\x28\x83\x81\x91\x76\xb5\x2d\x63\x24\x0c\x2d\x0f\xe5\x2f\x3c\xfe\x92\xa2\x35\xf6\xf2\xb1\x6a\x50\x05\x7b\x38\x71\xca\xd1\x9c\xbe\x2f\x6f\x3d\xdc\xc4\xc4\x9e\xda\x50\x9f\xe7\x7e\x12\xa9\xd4\x70\x6a\x4f\xf3\x25\x21\x65\x71\xe8\x28\x35\xd4\x47\xf5\xef\x12\x7d\x4a\xef\x0a\x2b\xe7\x26\x7d\xfc\x8c\x1c\xb4\xc0\x1d\x47\x5d\x27\x45\xac\x39\x69\x63\x19\x43\x58\x23\x58\xd8\x84\xed\x44\x83\x65\xc3\x2e\x58\x56\xbc\x46\x69\x6b\x94\xf1\x38\xb7\xaa\xd5\x33\x0b\xdb\x38\x79\x41\xdb\xdb\x65\x3a\x30\x56\x26\x5c\xea\x07\x2e\x4e\xe2\x69\x0a\x6f\x74\xc4\x00\x85\xbf\x14\x77\x1b\xd7\xcb\xd2\xe5\xb5\xb0\xf4\xf1\xdb\x05\x65\x4c\x4c\xbd\xe8\x19\xd0\xde\xbc\xaa\xc3\x21\xcf\x24\xcb\x78\x4c\x4a\xcc\x01\x8f\xed\x4b\x91\x1c\xaa\x2c\x19\xcf\xdc\x0e\x48\x30\x5b\x1e\x18\xcc\x9b\xfd\x9a\xf9\x83\x7a\x8f\x65\xf7\xc0\x10\xf2\x93\x58\xe6\xd7\xb6\xda\x11\x60\xb2\xa7\x6f\x0d\x32\x2c\x16\xcc\xda\xea\x50\x9e\xb6\xcd\xa9\x37\xdb\x4c\x5d\x65\x12\x7f\xb6\xd9\x99\x94\x0f\x39\x13\x90\xd6\x05\x3c\x7c\xf0\xc1\xa2\x11\xa7\x81\x19\xd0\x22\x0d\xfe\x95\xf7\x3e\x6a\x58\xd9\xd1\x79\x8f\x70\x2d\xf8\xab\x64\xad\x11\x78\x56\xbe\x15\x12\x8a\x30\x9c\x8e\xd7\xac\xbf\x66\xa6\x05\x62\x2a\x19\x5b\xa1\x5c\xe2\xe2\x66\x62\xdd\x26\x2c\x13\x0f\x22\x8d\x2d\xe1\x62\x6d\xd3\xb8\xb3\xea\x64\xa5\x19\x4b\x3f\xa2\x5e\xe5\xe9\xe4\xa7\x4e\x10\x64\xb1\xab\xe6\x1f\x32\x35\x80\xc0\x93\xc7\xf5\x9b\xc4\xc2\x92\x1a\x3a\x59\x58\x1e\xf0\xe0\x2a\x74\x6f\xf7\x4a\x75\x6e\x90\x95\x42\x5f\x3d\xb3\x53\x75\x82\x4f\x3d\x0a\x7e\x91\x86\x90\x84\xa5\x18\x20\x2a\xcf\xce\x3d\x9a\xe9\xff\x82\x18\x68\x16\x92\xa8\x2a\xb3\xea\x9c\xef\x5f\xab\xe5\x3c\x25\xd2\x09\xb4\x27\x81\x33\x2c\xf6\x42\x03\x2f\x3e\x39\x58\x7e\x6a\x8d\xd8\xea\xa9\xf1\x6e\x86\x91\xdf\x4b\xcd\x82\x42\x7e\xd0\xb4\x9c\xe4\xa1\x2b\x87\x27\x44\xd7\x39\x39\xcc\x81\x46\x09\xce\xcd\x9a\xca\xe1\x00\x99\xbf\xcc\x15\xc7\xbf\xbc\xfd\x9d\xae\xa1\xf6\x97\x56\xa8\xf8\x3a\x41\x8b\xbe\xb7\x7f\xf0\x11\x60\x81\xe2\x3a\xfb\xb2\x4f\xa0\x6c\xa0\x51\xce\xe2\x9b\x0d\x90\xd7\x8b\x45\xd6\x2b\x7b\x19\xcd\x39\x5b\xdc\xda\x38\xc3\x15\x5d\xb0\xb7\xf0\x9d\x17\xdd\x1a\xae\x1f\x08\xdf\xbb\xfb\xc1\xcf\xbc\x03\x23\x68\x3b\xbd\xfd\xa0\x31\x83\x3c\x0e\xb6\x4a\x78\x76\xd6\xd6\x8f\x1b\x24\xd8\x7c\x56\x6f\x80\xfe\x15\xf9\x54\x45\x2a\x48\x1f\xef\xe6\x49\xec\x9a\x9e\xbc\xeb\x2c\xb2\x0f\x79\x79\xc4\x8f\xf5\xeb\xcf\x20\xf7\xf4\xe6\xfe\x60\x0d\x46\x93\xcc\x8f\x32\xf3\x4f\xac\x6c\xc3\xdb\xc1\x09\xb8\x72\x7a\xe8\x7b\xbe\x1c\x7e\x88\x16\xe1\xc6\xce\x74\xb8\xfc\x0c\xae\xdd\xfc\x20\x1f\x4c\x9d\xe1\x67\x08\x32\xd8\xc1\xcd\x96\x55\x78\xff\x78\x4d\xdc\x5f\x94\x85\xfb\x87\xca\x6b\xb0\xe7\x89\x62\xdf\x27\xf3\x8f\x72\x40\x45\x48\xfb\x5b\xab\x17\x65\xeb\xad\xbc\xc9\xf9\x5e\xa3\xbc\x20\x0e\xeb\x5c\xe5\x90\x53\x4d\x43\x3f\xc9\x87\xb8\x8f\xbc\x65\x91\x57\xe7\x4a\xf7\x9a\x14\x50\x14\xf4\x93\xc4\x89\x02\x0b\xd5\x57\x70\xf8\x0d\x2d\xb4\x79\x67\x0f\xe5\xfe\xfd\xde\xae\x97\xdd\xc7\xbe\x28\x0d\x66\x18\xb7\x07\x2f\xde\x9b\x3a\x0f\xb6\xe9\xb9\x7f\x82\x75\x12\x79\x58\x60\x2b\x79\x50\x1d\xd8\xde\xa3\x43\x9e\xff\x82\x17\xb6\xb7\x41\x2d\x63\xc5\x08\x5b\xdf\x94\x38\x6f\x71\xc7\x54\x71\x79\xe7\x8f\x7f\x8c\xfb\xae\x71\xc7\x76\x04\x9f\x8c\xda\x56\x6f\xcb\x6a\x8f\x14\xc6\xe0\x6f\x8d\x0f\xef\x7b\x58\x72\xaa\x58\xf7\x10\x4f\x8c\x42\xb3\x6f\x8c\x37\x2d\x91\xd3\x6f\x5c\x34\xbd\x19\x05\x62\x79\x66\xa4\xea\x0d\xe1\x49\x98\x53\xaf\xe8\x7c\x66\x37\x34\xcb\x29\x27\x8b\xa5\xc0\x58\x47\xb7\x72\xa2\x17\x74\xda\x55\xa0\x34\x2d\xd1\xcd\xa2\x94\x7d\x6a\xe9\x17\x66\x17\xb9\x72\xb8\x58\x8b\x53\x38\x6b\xa7\x74\xbb\x49\x0b\xa3\x29\x03\x5f\x28\xa1\x73\x0e\xf0\x93\x8b\x12\x80\x55\x66\x3f\xc4\x3b\xf6\x4a\x73\xe5\x9b\x72\x36\xdd\x62\xa2\xe7\x42\x5f\xdc\xb0\xf5\x02\x34\x78\xf7\x12\x20\xdf\xee\x32\x26\xef\x78\x73\xe7\x82\x42\xb2\xd7\xb0\x46\x4f\x55\x1d\xb6\xa9\xd5\x10\x67\xd3\xc0\x9a\xc0\xff\xf6\x17\x96\x3c\x58\x13\x3d\xae\x2d\xc8\x5f\xd7\xca\xf1\xe1\xb8\xb8\x86\x71\x5c\xa0\x53\x23\xf8\x14\x09\x57\x42\x91\xd6\x5f\x3a\x49\x0f\x06\x2f\xad\xa3\x3e\xf7\x81\x6c\x5f\x9d\xe8\x71\x36\x90\xce\x84\x34\x0c\x5a\xbb\x8d\x08\x83\xbe\x0e\x4a\x61\xc9\xcb\x32\x16\xca\xd1\xe2\x64\x81\xe5\xd8\x48\x29\x3f\x20\x86\xbe\xb2\x23\x82\x63\x81\xf6\xf5\x50\xf9\xc9\x94\xe0\x85\xd3\xe3\x14\x07\x40\x64\x98\xb4\x36\x7a\x37\x29\x7c\x45\xd4\x08\x66\xe7\x27\x8e\xc5\xe7\x30\xcf\x40\x42\x78\x44\x24\x64\xaf\xff\xee\xa2\x92\x61\xe6\x1c\xa6\x1b\x80\xa4\x85\x19\x08\x5a\xad\x34\x5c\xac\x01\x3a\x13\xfb\xaa\x03\x55\x42\x59\x6d\x2c\xc2\x13\x1e\xf0\x6e\x03\x7a\x9f\x24\x38\x2e\xa5\x1c\xf8\x25\x3c\xf0\x89\x2a\xff\x12\xac\x14\x3b\xee\xf9\x40\xf5\x75\xe3\x4a\xdb\x31\xd6\x75\xf8\x63\x20\x49\x0c\x4b\xa1\xbc\x98\x43\xb1\x99\x44\x65\xdd\xd9\x24\x49\x14\x38\x4c\x24\xb6\x53\xb1\x4f\x3c\x74\x7c\x16\xf3\x6c\x48\x9f\xa0\xd8\x61\x7e\xc5\x80\xb4\xb6\x7f\x87\x2c\x45\x0f\xd7\x25\x2e\x32\xc0\x39\x5c\xb8\xdc\x03\x39\x93\xf2\xfd\x79\x76\x60\xf2\x91\xfe\x8d\x7e\x2b\xf2\x0e\x12\x10\xe6\x70\x64\xa4\x07\x50\x1e\xe1\x4b\xad\xac\xa6\x31\xad\xbb\x90\xef\xf1\x96\xf9\x49\x2f\x12\x4b\xf7\x6b\x71\x9b\x25\xef\x08\x6c\xfe\x06\x3b\xa2\x28\x11\xed\xd6\x4b\xb7\xf4\xe9\xc8\xf9\x70\xf1\x95\x9d\xad\x4e\x94\xb8\x54\x1d\x31\xd0\x25\x8f\xde\x49\x5a\xc8\xb3\xdc\x4f\x7e\x28\x90\xa0\x4b\xa1\x67\x01\x62\x77\xc8\x13\x31\xcb\x9e\x93\x54\xe9\x1e\xa4\xbe\xf1\xab\xed\xc6\x2d\x15\x5f\xbd\x9f\xf6\xbc\x93\x33\xe1\x72\x17\xad\xec\x39\x8e\x91\x18\x62\x71\xd9\x3a\x86\x5e\xe8\x90\xff\x07\xa5\x92\xf5\xf1\xb1\x6e\x6d\x51\x82\x7a\x0e\x26\xa4\xfb\x71\x62\x6d\x96\x1d\x31\x7c\x3d\x44\xc7\xb2\x87\xaa\x11\x35\xec\x14\xde\x0d\x8b\xc0\xef\xca\x89\xb3\x2b\x72\xd3\x8e\xdc\xb2\xb2\xaf\x26\x0f\xce\xda\xfc\x8a\xb1\x6a\x2d\xbe\x1f\x60\x40\xea\x8a\x2d\x96\xb3\x47\x08\x65\x40\x60\xa6\xce\xa7\x48\x7d\xd3\x79\x39\xa4\x97\x51\xfb\x22\x63\xb0\x92\xea\x72\x9a\xc9\xc0\x50\xab\x93\xf7\x9d\xc7\xcf\x24\x22\x9e\x18\x3e\xa0\xb3\xc9\x5a\x03\x33\x41\x89\xca\x02\x76\xa3\x7d\xad\x32\x0d\xeb\xd6\x09\x13\xdd\xbf\x64\xfe\x11\x47\xc1\xdc\x19\x6b\x22\x8b\xb9\x49\x88\x76\xc3\x8b\x60\x65\xe0\x9a\x8c\xb2\x92\x26\x65\x4d\x87\x97\xc3\x17\xcd\xb9\xbf\x04\xb8\xc0\x42\x71\x4e\x5c\xb3\x81\x7c\xc9\xd5\x95\x2d\x44\x87\xcb\xb9\x6b\x64\xe7\xe2\xea\x6d\x12\x6b\x01\xb1\xcd\x7e\x89\x35\x3c\x32\xd6\x24\x62\xdb\x32\x8a\x02\x0e\xbb\x11\x7d\x89\x8b\x80\x8f\xfd\x72\x88\xdc\x76\x0c\x36\xdc\x2c\x8c\x49\xf9\x43\x8a\xbb\x82\xea\xe2\x0d\x4b\x34\xf7\x44\x74\xeb\x1e\xea\x05\x3b\x16\x62\x15\x90\xae\xc4\x86\xdd\xd7\x72\x3b\x91\x12\x37\x8c\x46\x8e\xa0\xc3\x6d\xaf\xa0\x06\xec\xcb\x18\xba\x94\x58\xc5\xaf\x3b\x03\x6f\x01\x86\x9c\xd8\x79\xe0\xca\x91\x0d\xb7\x78\xc7\x17\xc7\x66\xbd\xcc\xb5\x64\x70\xe6\xfc\x01\x43\x2e\xae\x43\xa6\x4c\xcc\xfc\x3e\x55\xdf\xa0\xbd\xde\x5c\x0e\x51\x8f\xc8\x1e\x46\x3d\x00\x53\x02\x34\x04\x51\xf5\x3e\xf1\x86\x7e\xd2\xd7\x38\xb1\xe4\x60\x53\xaf\x29\xb3\xae\xbf\xbf\x12\xe9\x6e\x47\xa6\xff\x0a\x92\x94\xb1\x12\xe3\x7c\x5b\x25\xe7\xdc\x1a\x56\x77\x81\xdd\x01\xc1\x94\xeb\xf2\x4b\x80\xbc\xdd\x9d\x28\x11\x80\xd8\xb6\x47\xe0\xdb\xf2\xf9\xa2\x89\x0e\xe5\x08\x3a\x9b\xdd\xb5\xaf\x80\x4a\x81\xce\x16\x73\x1d\x87\x2d\xf0\x4c\x44\x6c\x8b\xb7\xae\xf1\xd6\x1c\x01\xff\x14\x4a\x99\x32\xda\xe2\xf3\xad\x5b\x86\x3a\x8d\xc7\xc0\x7c\x6f\xfa\x70\x51\x0d\xad\x77\x0f\xa6\xdb\x60\x19\x55\x97\x46\x86\xdb\x2a\x0e\x9d\x5e\x0a\xad\xf3\x7d\x08\xc8\x1b\x28\x74\xd9\x49\x99\x59\x5c\xbf\x40\x89\xc1\x8b\xa5\xe3\x63\x0e\xb3\xd0\x99\x81\x38\x74\xc0\x2c\x88\x56\x96\x81\x06\x24\xc5\x66\x47\xbd\x63\x69\x2b\xcf\x2e\x4c\x8c\xc0\xb3\xd3\x8f\x8b\x65\x6c\xfe\x9d\xf9\x8b\x64\xb4\x67\xfb\x02\xf6\x85\xa0\xe6\xab\xef\xc8\x29\xb2\xbd\x3d\x57\x72\x5b\x4e\x03\xd0\x8f\x0c\x1d\x30\x82\x90\xcc\xbb\x8e\xe1\xa4\xb1\x17\xc4\x3b\x6e\xf8\x94\xb7\x65\xd3\xb2\x1b\x43\x0f\xec\x37\x24\x20\xcf\xc2\xa5\xfc\x31\x98\xa5\xa6\x6f\xe1\xe4\xd5\x1e\x00\x83\x1d\x80\x81\xc0\xeb\xac\xbc\x7a\x61\xce\xe7\x34\xbc\x4a\xbf\x9b\xe4\x33\xd5\xba\x1d\x8c\xa2\xca\xc7\xbb\x58\xed\xb6\xc0\xe6\x67\xf2\x8e\x4a\x5c\x34\xc3\xf2\x59\x3c\x5d\x29\x5c\x12\xf6\xaa\xa9\x84\x08\xbd\x77\x18\xc7\xa4\xe9\x13\xc4\xbc\xb9\x24\x6d\x7d\x33\x9d\x37\x61\xce\x5a\xc1\xe5\xa5\x25\xd6\x0e\xa9\x1c\xfd\x01\xa9\x17\x5d\xf7\xc7\x6e\x30\xa7\x26\x05\x9e\x71\xdb\xdf\xab\x23\x7d\x36\xc8\x07\xe6\x87\x8c\x67\xc8\x8d\xb2\x9b\x8a\xa4\x2a\xeb\xa1\xfc\x60\x08\x80\x91\xa9\x07\x79\xdf\x37\xf7\xe1\x87\x68\x49\xf7\x67\xfb\x39\xf2\x73\x62\x3d\xb3\xca\x33\xa1\xb2\xb9\xa8\x72\xb1\x69\x69\x73\x73\xd9\xda\x00\x32\xe3\xb2\x7c\xa8\x15\xba\x0d\x17\x67\x35\xdb\x90\x6d\x2f\x19\xfd\xf0\xad\x19\x0f\x11\xed\xea\xdc\xdb\xc3\x93\x6b\x95\x63\x7d\xf1\x75\x53\xfb\x64\x93\x44\x66\x1f\x5f\x3f\xc0\x59\x22\x2d\xcd\xae\xb5\x82\x91\xe2\x39\x95\xda\xc7\x0f\xaf\xba\x3e\x7a\x4d\x7c\x40\x19\xdf\x38\x97\x2f\x33\xe4\xd5\xca\xd6\xa5\x37\x61\x2a\x03\xba\xce\x17\x8f\xaf\xd2\xe8\xc2\xd3\x7e\x73\x75\xcd\x5d\x21\xd9\x58\x5b\x5d\xf8\x03\xb7\x75\xe6\x2e\x87\x73\xf3\x84\x26\x18\xd6\xea\x33\x5f\x63\xc8\x64\x6d\xf9\xf7\xb4\x67\xe6\xb6\xa8\x51\xb3\x92\x03\x5d\x94\x5f\xaf\xb1\x75\x7b\xe2\x58\x6f\xc9\xef\xf3\x05\xc8\x8e\xd4\x67\x50\xff\xc6\x6f\xfd\x51\x67\xee\x71\xb8\xb4\xb0\xca\x5e\x90\xbb\x17\xab\x4b\x20\xbc\xae\xa1\x2a\xb2\xe5\xa6\x54\xc6\x21\xca\xec\x73\x98\x02\xa5\x60\xf7\xe9\x57\xce\x52\xaf\xc4\x58\xae\x07\xcf\xc2\xdb\xdd\x72\x03\x56\xaf\x3a\xe4\xd0\x10\x29\xf9\x5c\x6f\xf3\x85\x09\xbe\x62\xc3\x2f\x39\x86\x58\xde\xe4\xdb\xd6\xa0\x57\x2a\x21\xda\xa0\x48\xc8\xa3\xa1\x37\xec\x41\x37\xd6\x4b\x83\x98\x9f\x80\x19\x7a\x96\x8f\x4e\xa9\xf5\x34\x58\x0a\xf2\xb1\xf0\x88\xbf\x93\xcf\x56\x90\xdc\x07\x15\x0d\x3b\xc4\xbd\x87\x7c\x1f\x84\xdc\x19\xda\xd3\xf2\xaa\x91\x07\xc9\x46\xad\xd2\xd1\xa5\xc7\x61\x8a\x81\x98\x53\x93\x34\xa3\xcb\x9f\xb9\x91\x64\x30\xeb\x07\x61\x5b\xea\x64\x0e\x79\xf5\xa8\x06\xb9\x9e\xd2\x57\xb0\x80\xbc\xa6\x93\xd8\xeb\x15\x69\xeb\x10\x39\xe4\x7a\xc6\xc4\x9d\x3c\xc2\x07\x49\x84\xf4\xfe\x91\x93\x0d\x6b\x96\xb4\xf7\x91\x14\x87\x5f\x51\xee\xe1\x59\xc1\x9d\xaa\x85\x9b\x62\x10\xf0\xca\xde\x8e\x90\x40\x61\x23\x5c\x79\x2a\x65\x70\xcd\x41\xa7\x10\xad\x50\x61\x5f\xb1\xad\x20\x0c\x86\xfd\x0c\x98\x84\x98\xed\x2f\xdf\x6c\xc2\xaf\x03\x9b\x74\xe3\x13\x7c\x32\x18\xdf\x33\x56\x82\x1c\xfa\x83\x43\xe7\x9a\xa1\x76\x13\xe9\x37\x9a\x42\x4f\x03\xdc\xa4\x49\xbd\xd3\x49\xb6\x77\xde\xef\x38\x05\xbc\x01\x64\x43\x17\xd3\x5b\x9d\x20\xeb\xd2\x4f\xda\x2f\x61\xa8\xd5\xd1\x00\x1b\xa3\xec\x57\x45\x7f\x20\xb2\xa3\x36\x48\xe8\x6b\xf9\xe8\xf1\xb6\xe9\xe9\xa9\x2d\xcf\x11\x10\x94\x95\xc1\x58\xbf\xb7\xec\xe2\x10\x6e\x36\x8f\xa7\x61\xb6\xc7\xaf\x34\x15\x1e\x50\x26\x29\x6c\xac\x65\x3a\xa0\xb0\x11\x0e\x41\xae\x26\x2b\xce\x23\x4d\x23\xd7\xe9\x14\x00\x16\xc4\xc3\x39\x5e\x9d\xaa\x4c\xe1\xca\xc1\x6c\x85\xe0\xb6\x1c\x8a\x6c\x02\x86\x94\xf4\x41\xf2\x61\x9d\x85\xae\xce\x3b\xf8\x89\xb0\x29\x97\x42\xfb\xc9\xc4\x9d\x04\x13\xb1\x9d\x70\xfb\xf8\x4b\x61\xca\x34\x2e\x6b\x85\xcc\x5b\xe3\x15\x7b\x5f\xdc\x4e\x3d\x7e\x41\xde\x85\x63\xbf\xbf\x64\x89\xf4\x7c\x06\x47\xf1\xeb\x67\x1f\xdb\x96\xea\x37\xc1\x54\xbc\x18\x43\x74\xe7\x41\xfe\x21\x8d\x80\x64\x5c\x2f\x83\x1f\x94\xb1\xf4\x3a\xb4\xf0\x17\x86\xcf\x93\x93\xed\x35\x6b\x4f\xf8\x05\x65\x7a\x25\x31\xe6\xf2\x80\x8d\x02\xaf\xd8\x8a\x74\xf8\x0e\x4a\x22\x54\x7b\x38\xf5\x80\x8c\xe7\x12\x64\xc4\xa8\x4b\x63\xbd\xb1\x3a\x0d\x61\x21\x62\x0c\x47\x4a\x60\x79\xf0\xcb\xb7\x11\xa7\xd0\x27\x41\x8b\x7a\xfd\x1e\x01\x76\xaf\x2b\x00\x8d\xb8\x32\xd5\xb7\xc4\xe7\xae\xa3\x78\x8d\x2b\x7b\x14\x8f\xed\xa2\xc3\x28\x9c\x67\xa6\x5b\x35\xa5\x91\x3d\x6c\xf9\x47\x1b\xa3\x8f\x68\x75\x0e\x94\x90\xf1\x88\xc7\xf6\xc3\xbf\x1e\xb7\xcf\x51\x65\x2b\x6e\x27\x69\x7c\x16\x4e\x95\x26\xfd\x6d\x81\xf4\x08\x56\x63\xb0\x18\xcf\x03\xc9\x00\x7b\x71\xf7\x3e\xca\xf2\x3e\xea\x8a\x3d\x54\x57\xc4\xdc\x86\x9d\x71\xf1\x4c\x74\xa4\x52\x77\xb0\x17\xf5\x65\x9b\x2f\xbb\x87\x83\xd1\xa6\xba\x82\xcd\xca\x05\x9f\x06\x6f\x31\xae\x39\xe2\x25\x8c\x34\xb1\x18\x79\xa3\xf8\x9d\xbb\x48\xf2\xe2\x7c\xae\x9f\x44\x53\xa5\x60\xb3\xdb\xd9\x5f\x5d\x21\xcb\xc9\x9e\xbe\x44\xab\xa7\x21\xd1\x67\xbb\x20\x73\x0e\xfa\xe5\x93\x7e\xa8\xa5\xf3\xc9\x15\xc8\xcb\xdb\x16\x93\x66\x52\x99\x17\x71\x1b\xd1\xc5\x84\xbd\x12\x66\x9a\x41\x1f\x84\x0d\x53\x1f\x64\x9e\x92\xa8\xd7\x51\x7e\xb0\x52\xa1\xc4\xb9\xfc\xf4\x3e\x7c\x2d\x97\xe8\x24\xcb\x79\xd3\xdc\x96\x33\x9c\x96\xce\x5e\x5c\x47\xb7\x02\xd5\x4d\x84\x6c\x91\xb3\x64\x8b\x5a\x4f\xcd\x97\x52\xfc\x96\x5d\x00\x51\x09\x8f\x19\x6b\x60\x62\xaf\xd1\x65\x29\x5f\x35\x22\x6b\x00\xac\xe2\x51\x66\xd1\x7b\xe1\xec\x72\x01\x55\xa7\x7b\x90\x55\x29\xb7\xa3\xb1\x97\x62\x2a\x22\x1d\x8d\x62\x5e\xe2\xe9\xe4\x8f\x8d\x66\x64\x62\xda\x05\x09\x1c\x15\x44\xfc\x79\x68\x1e\x7c\x8d\x64\x71\xbc\x84\x8f\xde\x4b\x46\xdd\x84\xf3\x5a\x30\x29\xfb\x6a\xb9\xb4\x77\xe9\x7f\x1e\x6c\x10\x82\xf3\xc8\xfa\x39\xcf\x1d\x97\x9f\xe5\xc7\xf9\x7d\x97\x5d\xe9\x94\x3d\x4a\xcb\xec\x97\x71\x3e\xc9\xf8\x9f\xcf\xd3\xfe\xf7\x00\xce\xf5\xc1\x4d\x24\x9a\xd1\x23\x9b\x76\x0e\x34\xaf\x16\xfb\xf2\x26\xe8\x87\x33\x13\xdf\xe2\x8a\x5e\x0c\xed\xe6\xf0\x2f\x2e\x7f\x61\x84\xee\x4c\x73\x71\x2d\xcb\xd6\x90\xc2\x24\x3a\x66\xf5\x48\xf2\x96\xf8\x94\x47\x6a\x82\x8d\x19\x96\x4a\x5b\x27\xee\x22\xfd\xc0\x66\xfc\x84\x32\x61\x02\x91\x55\x91\x8b\x8e\x01\x25\x53\xb4\xd5\xd2\x07\x9c\x9a\x78\x02\xb4\x6a\x06\x9c\xc4\x1e\x78\x96\xe8\xe6\xa2\xc5\xcf\x59\x0e\xb1\xf3\xfc\x12\xdb\xd1\x0e\x12\x01\x93\xba\x88\x02\xfc\xfc\x28\xee\x32\x13\x56\xbb\x57\x4e\x24\x6a\x99\x31\x79\xa5\x12\xf9\xb5\x2d\xb0\x28\x89\x64\xcb\xe9\xa8\xef\x9e\xb4\x67\x02\xe6\x8e\x0c\xcc\x97\x78\x95\x3d\x2f\x65\x68\x33\xcd\x63\x7e\x89\xc7\x38\x76\x32\xfc\x7e\x38\xd3\x12\xfe\xa2\x72\x7a\x02\x8c\x17\x2e\x79\x7e\x98\xf3\x74\x69\xaa\x95\x43\x03\xdd\x05\xa9\x78\x35\xf3\x52\xc0\x2b\x4f\xb7\x19\xd4\x46\x52\x25\x97\x8d\x66\xd4\x0b\x94\x80\xcf\xad\xfb\x4d\xb4\x82\xb0\xce\xe9\xa2\xcb\x82\xbf\x96\xfe\x8d\xa4\x20\xcc\x18\x08\xf0\x4b\xf0\x23\xed\x67\xdd\xe9\x6f\x85\xb5\x6a\xbf\xd5\xa8\xad\x27\x86\x10\x5a\x45\xfe\x4f\x7b\x22\xf1\x72\xfb\xc3\xd8\xc0\x90\x55\xcf\x19\x8e\x20\xca\x08\xed\xee\xff\xd3\x3c\x12\xc4\x4d\x00\xba\xab\x51\x14\x34\x53\xca\x16\x44\xcf\xc5\x85\xed\x16\x35\x34\x5d\x46\xe4\x78\x22\xc6\x76\xa3\x31\xb2\x7c\xb6\xba\x58\x4d\x4f\x3a\x4e\xea\xad\xe3\x12\x26\x5a\x58\xf3\xd5\x31\xdd\xa7\xcc\x3a\x49\x57\x47\x8f\x2f\xb5\x2c\xda\x1d\xae\xc9\xd9\x5c\x78\xb3\x59\x50\x6d\xa5\xae\x58\x98\x9c\x4e\x50\xde\xfd\x66\x9a\xce\x5f\x8e\x56\xd3\xe9\xc6\x0b\x34\x5c\x2a\xd2\xa5\x9d\x4a\x65\x6f\xce\xdc\x3c\xbd\x93\x86\x30\x45\x96\xaa\xbb\xc5\x96\x9c\x91\x4e\xf3\xa0\x27\xd5\xb8\xf2\x01\x99\x8a\xcf\x99\xa7\x89\xf8\xd4\xf0\xa1\x0a\x47\x29\x9b\x53\xc7\x9f\xe2\x1c\x4e\x75\xb4\x88\x81\xfe\xf0\x2f\xc2\xc8\x3b\x68\xb7\x18\x90\xf0\x98\x22\xbb\x12\x22\xd3\x7c\xb7\x1d\x7f\xc4\xbb\x7d\x26\x32\xb9\x4b\x84\x07\x24\x47\xb9\x63\x78\x70\x3d\x77\xe2\x0b\xa6\xbc\x2e\x3a\xa8\x54\xa5\x5e\x47\xb8\xf9\x1f\x96\xb9\x66\x3a\xe5\xf4\x8c\xad\xb4\x17\x27\x64\x4d\x64\x8a\xa6\xbf\xcc\x45\x02\xf7\xb3\xf2\x6f\x22\x24\xd2\x23\xe7\xa9\xc6\xf5\xb7\xd6\xf3\x26\x76\xda\x1f\x3c\x8d\x62\x87\x92\xf1\xc9\xfd\x7d\x71\x32\xee\x94\x3a\x5d\x1a\x15\x1a\x5e\xe7\x87\x9c\xc3\x97\x4f\x29\x9d\xae\x09\xb0\x1c\x5f\xac\xca\xed\x2f\x24\x94\xac\xb6\xbe\x90\x68\x7a\xb6\xf1\x2a\x9f\x22\x25\xa1\x57\xd9\xcb\xdb\x69\xa3\xb0\xcd\xd2\xb2\xd3\x0b\x06\x68\x9d\x07\xb9\x6e\x5b\x87\xd9\xbf\xc6\x4e\x38\xcb\x57\x7e\x13\x8a\x8a\x07\xd3\x1f\xd5\x57\xd6\x7a\xdd\x2b\x41\x36\x95\xfb\xd2\xf9\x0c\xd8\xcc\x2b\x01\x4a\x34\x17\x56\x59\x5f\x18\x2a\x82\x05\xaa\xd7\x37\xf9\x6b\xd9\x24\x5d\xa7\x07\x36\x0b\x95\x13\x7e\xcf\x59\xeb\xa1\xbf\xe7\x44\xe6\xd8\xef\x89\xf5\x42\xe7\x62\x16\x08\x11\x93\x09\xf0\xeb\xba\x02\x7e\xa0\xf5\x5a\x3e\x94\xfd\x66\x58\x87\x23\x48\x34\x5c\xfe\x4d\x21\xe5\xf2\xeb\xea\xc5\x22\x27\xee\x96\x39\x4c\xe3\x4d\xb1\xec\xc5\xf2\xab\xf4\x4a\x91\x2d\x85\xff\x54\x33\x96\x73\xf4\x9c\x8e\x36\x82\x53\x94\xdc\x22\xea\x5e\x72\x37\xd6\xdb\xce\x60\x5a\x56\x6f\x01\x5f\xb9\x44\x95\x44\x4c\x46\xb4\xc2\xa8\xd7\x5b\x0e\x40\xcc\xd5\x78\xea\xb8\x0d\xb5\xa4\xaa\x59\x3f\x86\x90\xd9\x78\x75\xaf\x6b\x0c\x4e\xce\x3c\xce\x0a\x48\xf5\xf6\x54\xa3\x18\xbc\xa3\x51\xdf\xe3\x75\xb9\x71\x09\xd9\x09\x7e\xff\x72\x62\x21\x45\x7f\x86\x5b\x7d\xc5\xb9\x3e\xa7\x60\x86\x3e\x6d\x24\xf9\xa2\x52\x3e\xe5\x39\x61\x11\x4d\x0c\xdd\xa2\x2b\xde\x76\x83\xdd\xf7\xaf\x76\x3e\xd4\x28\x8f\x9b\xca\x19\x3c\x55\xc0\x74\x1f\x37\xab\x53\x5f\xd3\x86\x8e\x1c\xc2\xe6\xfe\x7a\x68\x24\x59\xaa\x72\x86\xcd\x99\xcd\x92\xdd\x02\xd7\xce\x96\x97\x6a\x46\x7e\x7e\x44\x28\xe5\x0f\x7a\x79\x05\xe5\xb3\x1c\x82\xf6\x8e\xb3\xe8\x16\x7a\x01\x73\xa8\xb1\xd2\x3f\xe5\xce\x2a\x06\x28\x24\x73\xea\xdb\xe6\x9b\x91\x3b\x4b\x6b\xc1\x1b\x40\xde\x7e\x5d\x2c\x6b\xce\x77\xce\x3d\xda\xdf\x2f\xc2\xb0\x93\x44\x99\x05\x89\x30\xea\x67\xd9\xba\xd0\x97\xcf\x04\xc7\x34\x37\xe2\x86\x62\x3c\xae\x0b\x7c\xc3\x85\x07\x67\xa5\x5a\xd6\x70\xf5\x76\x4e\x22\x49\xc2\x58\xcc\x86\x40\x12\x2b\xe9\x32\x86\xcf\x1f\xce\xbf\xbf\x44\xdf\xb0\x9d\xec\x48\x87\x9e\x47\x41\x1b\xa6\xe5\x11\x68\x08\x19\x42\x0e\x70\xc0\xb8\xc1\xd7\x9a\xc6\x0b\xfa\xee\x33\xd6\x0a\xaa\x06\x5c\x61\x4f\xd6\xa7\x5d\x51\x3e\xf6\x83\x56\xe1\x2d\xcb\x25\x17\x92\x3b\x89\x45\x1d\x50\xb5\xb0\xa7\x59\xd8\x08\xf0\xd3\xab\x9a\x31\x68\x14\x67\xe4\x5c\xce\xa9\xd0\x13\x8a\x05\xa2\x6b\x3f\xb5\x5a\x3d\x94\x39\x57\x07\x18\x34\xfa\x71\xa3\xd7\xd9\x14\x0e\x63\x98\x7a\xe9\x23\x5d\x5c\x42\x15\x16\x87\x18\xfb\x7d\x01\x88\x31\xdd\xd0\xc6\x57\x50\x59\x83\x97\xea\x2d\x1b\x5f\xfc\x07\x03\x2a\x66\x64\x60\xd7\xb5\xac\x47\xe5\xa1\xc6\xd9\x88\x88\xe9\x23\x64\x93\xe5\xbd\x65\x2d\x65\x6e\x03\x4a\x71\xc3\x18\xd6\x40\x4e\x44\x4d\xfb\xfd\x31\xa9\x73\xfe\x1b\xe2\xf4\x61\x65\x77\xb0\x09\x78\x65\x89\x0f\xd0\xf0\xf3\x2f\x02\xb9\x5e\x84\x55\xbb\x66\x9b\x38\xaa\x8b\x24\x88\x06\x22\xf6\x87\xf4\x35\x01\xb4\x09\x3b\x94\x47\x48\xe9\x4d\xae\x8a\xcf\xc9\xf4\xa0\x62\xcd\x50\x59\xb9\x29\xf5\x32\x44\xc3\xbf\x07\x98\xac\x5e\x1b\x01\x67\x55\xb4\xd1\xd3\xaf\x72\x7f\xce\x58\x80\x61\x34\xbb\x29\xd8\x39\xf1\x05\x94\x01\xc8\x82\x3d\x0e\xf7\x26\xdb\x2a\x25\xc0\xc2\x73\xad\xcc\xd6\x1b\x1d\xeb\xe4\xd6\xd8\x3f\xee\x87\x1c\xcf\x44\xa6\xec\x61\xe3\x0d\x3e\x28\xf7\x0a\x36\x6e\xc1\x70\xd3\x52\xb1\x3a\x8c\x08\x5f\xb5\x54\xd6\x4a\x5f\x84\xdb\xec\x4b\x2f\xca\x27\x68\x47\x9e\x15\xf4\x94\xa4\xd0\xb7\x4e\x4c\xd7\x7b\xac\x01\x6a\x4f\x4e\x73\x84\xf1\x95\xf2\x4a\xed\x3d\x05\x7b\x54\xaf\x34\x9e\xc4\xd8\x84\xd8\x86\xc3\x53\xc8\xcb\xcd\x34\x3f\xcb\x37\x56\xd6\x62\xa7\xde\x2a\xfd\x03\x8f\x25\x09\xfc\xd8\xd7\x4a\x5e\x7f\x5b\xcb\xf6\x2e\xcd\x5d\xa3\x87\xd3\x56\xb1\x6e\xc5\xa8\x87\xa4\x08\x99\xc1\x69\x7b\x82\xe2\x35\x31\xee\x62\xe1\xd5\xe2\x36\x68\xb5\xc9\x6d\xc3\xe1\xba\x16\x56\x6c\x96\xb8\xc8\x50\xbf\xb7\x34\xd6\x4f\x16\x25\xb0\x41\x4c\x59\xea\xd4\xc6\x6f\x8f\x3c\x59\xab\x01\xbd\x4d\x5b\x43\xb7\xb7\xe7\xe4\xb2\x07\x54\xd3\xe7\x0e\x4f\x67\x1d\x4c\x14\x62\x43\x83\xf3\xd5\x27\x1d\x6e\xc8\x62\x75\xb2\xac\x07\x3d\x12\xc3\xe7\x66\x39\xb5\xb7\x29\x54\x40\x24\x2f\x04\x30\xfd\xcc\x9f\x40\xa9\x88\xaa\x89\x52\xa1\x26\xee\x16\x7b\xf2\xca\xfd\x89\x82\xab\x4f\x74\x78\x1b\xc7\x44\xf8\xb6\x00\x9b\x38\x50\x11\xdd\x99\xe2\x1f\x82\x22\x61\xf6\x56\x1e\xd5\xf4\x4d\x1d\x0e\xb0\x6f\xbb\x38\xe1\x1e\xfb\x85\x86\x2e\x35\x5c\x45\xfd\xe3\x2d\xd9\x8b\xaf\x2c\xb1\x50\xc0\xc8\x83\xe9\xe5\x37\xcb\x93\x46\x2a\xc1\x8f\x3d\x39\x71\x72\xfa\x6c\xec\xa3\xf1\x5c\x6d\x6d\x32\x8c\x2b\xae\xbe\x26\xd6\xac\xdd\x72\xa9\x52\x5c\x41\x15\xd4\xd1\xae\xcc\x15\x24\xdb\xea\x09\xf7\xf7\xe7\x9b\x7f\xd5\x5a\x6d\x7c\x87\x15\xdc\x3e\x2c\x72\x87\x23\x51\x75\xa4\xc0\xa2\xf3\x62\x6d\x30\xa5\x7b\x9e\xcd\x64\xb6\x1c\xd7\x6a\x7c\x92\xd7\xf8\xc4\x2f\x21\xae\x33\x14\x1f\x34\xc4\x3d\x73\x98\x92\x3d\x2b\x16\x18\xfe\x72\x89\x6f\xca\x59\x34\xb1\x27\xe6\x8f\x62\xe6\x86\x69\x88\xbb\xca\x85\x4f\x9c\xbc\xe4\x3c\xb6\x1f\xab\x39\xa5\x52\xf6\x74\xe3\x38\x1e\x83\xe5\x40\x22\x73\x60\x21\x37\x54\x4e\xee\xa8\xa4\x0a\x91\x48\xb9\xf2\x69\x79\xa6\x51\x3a\x6e\x4f\x16\xfc\xb8\xb3\xf5\x89\x25\x16\xa2\xe4\x64\xd7\xe6\xb8\xbc\xf9\xca\x6b\x22\x4c\x3f\xff\xfb\x27\xaf\x38\xaf\x7f\xc1\x6c\x15\xf5\x47\xef\x8e\x7c\x5f\x47\x3d\x4a\x8f\x6f\x06\x97\x5e\x1e\x99\x10\x78\xc3\x57\x05\x3a\x86\x85\xc9\x13\x38\xbd\xa0\x0e\xd1\x06\x06\xad\xa0\xfb\xe7\x7f\x28\x5a\x54\x5f\xbb\xb1\x0f\x2f\xbb\x61\x74\x0f\x2b\x7d\x50\xb5\xe6\x62\xa8\xb7\xba\x17\x92\x76\xdb\x29\x05\x5d\xc8\x1a\x61\xd6\x0f\x4f\xbc\xfd\xcb\x43\x4f\x0e\x79\x8a\x59\x60\x60\x54\x59\xc4\x5b\x53\x3a\xb2\x8b\x81\x38\x8c\x33\x38\x31\x9c\xa4\x5d\x44\x62\xca\x36\x62\xbb\x66\xb1\x5f\xc1\x6b\xec\xcb\xa1\x90\xe2\xbf\x96\xcd\xec\xc1\x09\xf6\x44\xa2\x55\xe3\xc0\xa0\xaa\x6b\xbb\x0e\xe4\xc5\xde\xbc\xdf\xc7\x4d\xf1\xbd\x19\xc3\x80\xfb\x8a\xf1\xeb\xa1\x47\x3d\x4c\xae\xb4\xd7\xa6\x52\x6f\x9a\x6d\x50\xed\x63\xaf\x91\xbf\x4e\x4e\x70\xf5\xd4\x03\x2f\x31\xcc\xfe\xe6\x75\x48\x61\xaa\x07\x35\x7b\x5e\x15\xa8\xe2\x84\xad\xde\xec\x54\x0f\xed\xa6\xd3\xdf\x0c\x72\x42\xb8\x99\xa1\x80\xd6\x56\xb3\x29\x61\x8e\xed\x4f\x94\x0a\x99\x3d\x0d\x51\x7f\xca\x96\x96\x57\x3f\x3f\x79\x13\xe5\xa7\x33\xd6\xb5\x93\x7e\x7d\x31\xe5\xcb\xee\x86\x01\x45\x47\xcd\x99\x73\xda\xaf\xfe\x26\x15\xcb\x43\xb5\x3b\xe9\xd8\x76\xb2\xf8\xdf\x49\xb4\xab\xb3\xee\x3a\xe8\xbf\x27\x7b\x64\x6c\x49\x67\x84\xe2\xac\x2a\x28\xdd\x16\x38\x0d\x67\x00\xab\x2e\xd2\x6d\xd9\x1f\xa1\x0e\x5d\x2e\xb7\x60\xf4\x7b\xb5\x09\x46\x7d\x74\x95\x41\xaa\xb9\x07\x94\xaa\xa3\x73\x4d\x07\x4f\x52\x72\x76\x97\xf2\xa1\x16\xb6\x53\x7c\x99\x6e\xd0\xb3\x4f\x04\x4d\x61\x3a\x64\xd0\x27\xcb\xb6\x51\x22\x34\x5f\xcd\x40\x2a\x74\xee\xe6\xdb\x05\xbd\x76\x2f\xd3\xbb\x12\x6f\x77\xff\x0d\x18\x7b\x06\xc8\x90\xfa\x9f\x7a\x73\xa8\x33\x90\xca\xfb\xc5\xda\x6d\x4f\xad\x89\xb9\x88\xeb\x22\xbb\xe4\x6f\x78\xa9\x2c\x00\x03\x2b\x86\x26\x78\xf0\xd1\xb0\x74\x04\xeb\x31\xfc\x88\x25\x08\xc0\xc1\x59\xb4\xc4\xe7\xb6\x12\x6c\x2b\xc1\x38\x35\x5c\x7e\xea\xf2\xe9\x0e\x86\xd9\xe6\x70\x95\x6d\x9c\xeb\xb2\x1d\x0f\xb1\xec\xa7\xf2\x77\x01\xcd\x09\x27\x41\xf7\xee\x63\x04\x34\x4d\x4b\x62\x2b\xe5\x06\x48\x8d\x5d\x8b\x5c\xf2\x56\x2e\xd3\x77\xd5\x79\x6f\x76\xe1\xd9\xaf\x2f\xf1\xaa\xcb\x57\xfb\x29\x7b\x89\xae\x6a\xdd\x7e\x35\x82\x2e\xbb\x5d\xd5\x44\xd1\xee\xe0\x6b\x57\x70\xae\x22\x55\x43\xcf\x36\x56\x11\x90\xeb\x35\x99\x1c\x63\x70\x0b\x82\xa9\x9b\xa6\xfa\xf7\xa4\x67\x64\xce\xb0\xe7\x13\x89\xa2\xab\x66\x44\x9d\x80\xd2\x1d\x56\xf3\xf5\x73\xda\x9c\x47\x24\x65\xb1\xdf\x3a\x1a\xdc\x6a\xdf\xac\x99\x5a\x87\x07\x2b\x3e\xa5\xe7\x19\xff\xe4\x83\xdc\xbc\x3f\x10\xf6\xf7\xa9\xbc\xd3\x4b\x8b\x8e\xb0\xb8\x0f\x8b\xb1\xb3\x4a\x7c\x59\x67\x9b\xda\xe4\xdc\xc3\x73\x75\x69\x6f\xee\x75\xdc\x94\xe8\xad\x36\xa6\x1d\x87\xde\x38\x7d\x65\x82\xb5\x45\x36\xde\x4e\xc4\x36\xd4\x81\x09\x7c\x70\xe3\xbe\x41\xe2\x12\xd6\x58\x2e\xed\x75\x77\x32\x22\x09\xa6\x2a\x92\x86\x1d\x44\x23\x51\x2e\x56\x3d\xbd\x13\x79\xf5\x0d\xb7\xe6\x6e\xeb\x4e\xc2\x08\x44\x3c\x17\x4b\xd7\x3e\x61\x57\xc5\xb3\x2d\xdb\x4e\x79\xc3\x16\xb3\x13\x25\x8a\x28\x81\x06\xaf\xb7\xc8\x7d\x0d\x9c\xdb\xea\x9c\x17\x8c\xdb\xca\xc0\x1d\x97\x93\x83\x3d\x7d\xf8\xa2\x66\x31\xdd\xbb\xaa\x0c\x2d\x9a\xbc\x29\x3d\x9a\xc2\xd2\x3e\x9c\xbb\xf5\x15\x0b\xef\x3d\x9d\xc5\xf2\xbc\xd9\xaf\x14\xf9\x94\x17\xa2\xb5\xe2\xae\xf7\xb8\xab\x9c\x71\xb7\xf1\x73\xe3\xbd\xf8\x5a\x0c\x93\xf5\x43\xa0\x95\xfc\x9f\x86\x72\x50\x6f\xa5\xe9\xf6\xdd\x83\xbc\x74\xbd\xce\xa0\xd0\x24\xa9\x26\xb3\xd1\xde\x07\xc6\xaa\xa3\x5b\xfe\x91\xad\x49\xb3\xf1\xd8\x6b\x37\xa5\xbf\x23\x5a\x5d\x44\x5f\x4b\x01\x6a\x72\x99\xd2\x1b\xf5\xdd\xc0\x8a\x6d\x71\xb1\xa6\x73\x5e\xb4\x60\x0d\x42\xf0\xe2\x29\xf8\x15\xad\xfb\x95\x49\x09\x9f\x87\x4d\xb9\xdf\x0f\xbb\xbc\xf0\x35\xac\xca\x52\x76\x89\x1c\x4a\x68\x5b\xc1\x6a\xdd\x19\x30\x5e\xb4\x5e\x45\xcc\x71\x94\x8b\x19\xc0\x7e\x5e\xa7\x8e\xae\xb2\x8d\xf0\xf9\xf4\xfa\x17\xe9\xc6\x20\x96\x7d\x29\x32\xb7\x28\xa2\x62\x3b\x1e\xac\xba\xda\x2c\x85\x54\x61\x58\x06\x8e\xc4\xb5\x57\x86\xf1\xcd\x28\x2e\xab\x6b\x09\x82\xd0\xb6\x07\x9f\x37\x6e\x50\xb7\x5d\xe7\x15\x45\xfe\x9f\x3f\xf2\x04\x14\xeb\x7d\xb0\x13\xe2\x77\x0f\x22\x1f\x21\x48\x0e\x4d\x65\x63\x5b\x4a\x73\x91\xca\x1c\x1e\x93\x48\xbb\xab\xcd\x60\x9a\x57\xf8\xd6\xea\xe3\x12\xad\x98\xc1\x16\xf6\x2f\x42\x2d\x90\x09\x1f\x38\xc2\x81\x75\x45\x1c\xb0\x0a\x70\x82\x37\xb1\x83\x8f\xb0\xed\x4c\x87\x9b\x13\xb6\x8c\xab\x61\x52\x8b\xc5\xee\xc5\x46\xdd\x68\x65\x08\x2d\x3e\xc8\x12\x76\x7d\x14\xbd\xb0\x95\x90\x81\x01\x6b\x78\x7c\x17\x41\x45\xad\xf5\x1b\x84\x5e\x9b\xe2\xa9\x48\xc4\x57\x56\x12\x93\xd7\xf1\x96\x63\x11\x65\x98\x11\x88\x3f\x54\x14\x48\x4b\x15\x41\x6f\x7d\xcd\x23\x66\x10\x6c\x4d\xe4\xbc\xa7\x8a\x20\x6b\xd3\xcc\x9f\x0e\x74\x65\x9d\xd5\xf4\x17\xb7\x0a\x12\x02\xa7\xf4\xb6\x62\x4e\xb6\xa9\x91\xfe\x77\x13\xa8\x6b\xf2\x86\x1b\x1a\x53\x40\x00\x8a\x0b\xf2\xcd\x69\xbd\xd6\xce\x03\xb0\x5c\xce\x00\xf6\xc4\xa2\x65\x5a\x7f\x96\x4e\xb9\xc5\x35\x40\xa7\x8b\x67\x7b\x66\xa2\xd3\x9a\xed\x0a\x12\x28\x90\x85\xf1\x6d\x76\x87\xfd\x05\xa5\x58\xb3\x3f\x8b\xbc\x72\xdf\xc8\xbf\xdd\xb6\x58\x1f\x64\x94\x48\x2c\x06\x73\x35\xdc\x03\xb7\xcc\xa3\xa9\x2c\x0f\x36\xf1\x3e\x4a\x1b\x07\x44\xe2\x49\x47\x25\xd2\x06\xe1\x9b\xd8\x55\xe7\x45\x1b\x39\xf3\x02\xf6\x9c\xf3\x48\xdd\x11\x42\x2f\x7c\xe2\x0c\x81\xdc\xfe\xdf\x29\x96\x70\xf8\x29\xea\x20\x12\xcf\x9a\x12\x85\x6c\x30\x8d\x55\x97\x70\xd2\x71\xd0\x8f\xfb\x5c\xed\x14\x11\x7b\x30\x37\xfa\x90\x89\x9e\xb7\x0d\x45\x18\x6d\xc3\xeb\x25\xd3\x8d\x30\x5a\x69\xa6\xf3\x14\x79\xf8\x2b\xa7\x6d\x26\xb9\x49\xc3\x9e\x91\x14\x63\x0d\x35\x0d\x86\xc3\xa2\x80\xb9\xa7\x18\xcd\xf1\x32\x15\x98\x9c\xb5\x0c\x79\xe0\x88\x47\xbd\x3e\xd2\x2e\x92\xdf\x22\x90\x4b\x33\xa5\xef\xbd\x09\xce\x8e\x33\xef\x77\x6b\xc5\x24\xab\x19\x29\x80\x05\x01\xbd\x88\x98\x1c\x36\x61\x60\x85\xf8\x49\x6b\xbe\x46\x10\x7c\x68\x72\xe4\x4d\x44\x69\x0b\xdc\x49\xd9\xdf\x94\x5d\x68\xde\xa3\xe5\x17\xc3\x2c\xf7\x2e\xc0\x4f\x6a\x32\xd3\x54\xb2\x60\x03\xf0\x20\xef\x92\x3c\xd9\x4d\xf2\x27\x34\x29\xd2\x87\xab\xfa\x5c\x59\x0b\x7d\xb1\x22\x1d\x30\x84\x12\x1c\x8c\x20\xfd\x82\xd9\x2d\xfd\x5b\x6c\xbc\xe1\xce\x90\x64\x6f\xd2\xbf\x10\x38\x05\xe3\x59\x84\xe7\x3d\x07\x11\x3d\x8a\x05\x16\x3c\x6b\xd4\xd0\x25\x72\x91\xa4\x99\x45\xaf\xea\x1b\x64\x8d\x4c\x6e\xf2\x34\xed\x97\x02\x9f\xdb\x32\x7e\xc9\x31\x34\x4d\x6d\xbd\xd3\x92\xca\xa9\x0d\x6a\x74\x64\x43\x3f\x9f\x41\xad\x0f\x63\x1f\x04\x7c\x4d\x9b\xf6\x08\xf6\x75\xfb\xea\x13\xc6\x43\x6f\xa4\x97\x73\x4a\xff\x8e\x0d\x0b\x78\x33\xe3\x4c\xf6\x32\x6f\x27\xcb\xf5\x9c\x5c\x7a\x1e\xa5\x19\xff\x53\x5e\xb3\x5d\x81\x83\x85\x9d\x33\x08\xba\x89\x74\x6a\xac\x1d\x28\xf1\x27\xb7\xfa\x87\x61\x17\x2b\x1f\x08\x65\xc4\x48\xbe\x75\x25\x59\xdb\x04\x46\x2a\x8d\x22\x57\x8f\xe2\x5c\x83\x56\xad\x17\x9c\xf4\x5c\x28\x83\x46\xb3\x05\x9e\x29\x96\xa3\x1f\xff\xef\xcb\xbf\x92\xe7\xb6\xd1\x08\x72\xd3\x4a\x92\xd3\x9e\x8f\x22\xde\xb3\x38\xd0\x08\x2d\x79\x91\xc3\x24\xa1\x80\x8a\x48\xa8\x5d\xd7\x4d\xca\x04\x08\x41\xfa\x14\x95\xc6\xc6\x31\xa4\x13\x69\xb9\x64\xfa\x93\xdc\x71\x83\x13\x15\x5f\xd5\xf6\xf1\x93\x0d\x2c\xc8\xfb\x9b\x48\x98\x6e\x19\xf2\xe7\xb6\x3b\xc4\xd3\x73\xdc\x29\xc3\x58\xc9\xc3\xa0\xe1\xb1\x43\xd7\x5a\xa8\x76\xf7\x63\x71\xe8\xda\x15\xad\x87\x13\xa9\x49\x0b\x9e\xf1\x03\x4c\x14\xa1\x4a\x20\xb1\x51\x77\xd2\x3f\x65\x16\xd1\x5d\xc6\x98\xfc\xe8\xb9\x19\x7b\xcd\x23\x11\xcb\xfb\xd2\x59\x47\xf4\xfe\x9c\xae\x5d\x39\x76\x9a\xbc\x02\x28\xf7\xc7\xa9\xe5\x42\x77\x82\x8a\x05\xcb\xc8\x90\x56\x9e\x75\x68\x6a\x02\x68\xed\x9e\x99\x12\x36\x4c\xc3\x41\xfe\xf5\x66\xaf\xfb\x2d\xf3\xb4\x99\x25\x5c\xce\xfd\x5d\x1d\x56\x0e\xdd\xfb\xdf\x42\x29\x0f\x5b\x84\x6e\x82\x7f\x5e\x07\x90\x42\x5c\x09\x35\x3c\x8c\xbc\x26\x85\xfd\xb2\x5b\xc1\x74\xbc\xae\x2d\xe8\xc7\x53\xee\xaa\xdc\x1a\x68\xda\x62\x59\x8b\xa2\xcd\xd0\x9e\x7b\xbe\x01\xd2\x22\x5f\xf4\x6a\x2f\xe1\x26\x18\xfd\xca\xcd\xb4\xb1\x59\x61\xa7\xab\xd6\xec\x01\xef\x4e\x80\xac\x89\xab\x0d\x10\x31\x0f\x05\x50\x8e\x57\xaa\xa1\x11\x1f\x3d\x34\xab\xb5\x66\x57\x0d\xb5\x53\xd3\xc8\x39\x55\x38\x38\xdb\x36\x3e\xa7\x4a\xd4\x2b\xa1\xe3\xa6\xd3\xba\x55\xd8\xfc\x83\x1d\x8e\xf7\x7d\x7f\x58\x31\x84\xe8\x86\xc7\x33\xde\x99\x3d\xac\x2e\x5b\x7f\x79\x1d\x2e\xef\x41\xfe\xd0\x5f\xb6\x8c\x08\x0e\xbc\xca\xe9\x50\xa2\x0b\x58\x1f\xee\x78\x71\x3e\x7d\x51\xc0\xe2\x42\x06\xa6\x99\xc8\x15\x22\xbd\xd7\x0f\x9d\x07\x0f\x9e\xde\xe7\x5a\x00\x15\x40\xb4\x51\x5d\xe4\xe3\xc2\x1c\x4e\x49\xf6\x01\x1c\x1f\xf8\xd1\x70\x15\xce\x9c\x62\x79\xa3\x2a\xad\xfd\xc1\xbb\x47\x14\xbb\x5e\x54\x8a\xfc\x10\x1e\xf1\x29\x85\x89\xce\x87\x12\xfe\xee\xe4\x79\x45\x10\x1c\xf2\xd2\xb9\x95\xb5\xc7\xbf\xa9\x3b\xad\x07\x8d\xc5\xf2\xcf\xd7\x58\xf7\x01\xf3\x28\x42\x30\x3e\xfc\x1b\x6c\x28\x3a\x12\xbf\xec\xe6\x5b\x7d\x22\x25\xfa\xe0\xf1\x68\xc8\x0b\x87\x9e\xbc\xf6\x0e\xa7\x00\x65\x64\x85\x2b\xf0\xe9\xfd\x07\x58\x96\x3d\x4d\x72\x04\x1d\x6b\xa6\xeb\xd6\xa1\xdc\x89\xd9\xa8\x73\x10\xc9\x12\xe7\xab\xfe\x9c\x7c\xb2\x03\x11\x7d\x8f\x8d\x8c\x1f\xf9\x7a\xe6\xe5\xe4\x2e\x1b\x77\xc5\x76\xcf\x3a\xa8\xe1\x26\xbd\xab\x91\xa8\x0c\x62\x3e\xb9\x9e\x6e\x3a\x4e\xd7\x61\x71\xf6\xb8\x1f\x94\x5c\x4e\xe1\x47\xf1\x73\xcc\x53\x10\xc9\x57\xba\x1e\x03\x49\x11\x29\xbf\xc5\xfb\xd1\xe8\xb7\x4f\xfc\x99\xed\x7e\xf2\x0e\xf4\x4d\x2a\xaa\xb3\xb7\xcb\xe1\xdd\xc3\xbb\x60\x09\xe0\x87\xaf\xca\x81\xef\x3d\xbe\x99\x0b\x5b\x90\x23\xe0\x43\x86\x20\x06\x93\xf7\xc8\x7c\x53\x4e\x48\x8c\x58\xf7\x00\x45\x7c\x64\x3f\x09\x34\x20\xa7\x0a\x96\x66\x2e\xe4\x97\x2f\xad\xf8\xda\x58\x3f\x63\xd0\xeb\xa3\xba\x5c\xf4\x52\xcb\xfa\xe8\xdb\x55\x78\xf8\x6a\xd6\x28\x8f\xce\xc7\x67\x94\x3b\x1b\x4b\x18\x9d\x86\x43\xf3\xdb\x8b\x87\x53\x01\x39\x62\x66\x5b\x93\xa1\xb7\xab\xa9\x71\xb4\x7b\x5b\xa6\x40\xe4\xcc\xdb\xb9\xe8\x47\x8d\x5a\xbe\x4c\xea\x79\xc7\x78\xe4\x76\x71\x1e\x24\xc3\x4e\xd4\xf9\xca\x39\x78\x43\x41\xa2\x96\x3c\xec\xe9\x75\xa8\xdb\x59\x57\x12\xce\xbd\xde\xe0\xe7\xde\x91\x97\x1e\x76\xa4\xdb\x49\x7a\xfa\x6d\xe4\x2c\x8e\x3a\x3f\x0b\x31\x89\xe5\xab\x3d\xda\x5e\x37\x38\xc1\xfe\x0b\x3e\x7a\xf0\x4c\xd5\x5a\x63\x1a\x0f\x69\x21\xcf\x06\x3c\xb0\x8b\xa0\xfc\xde\x9b\xf4\x29\x45\x6d\x60\x0f\xc7\x8b\x13\xcf\x95\x38\xe6\xd3\xf8\xaa\x66\xa9\x53\xc8\x04\x5b\x1e\xef\x25\x4b\x01\x94\xd4\xcc\xd1\x40\x6e\x5e\x39\xed\x20\xbf\x44\xc3\x2d\x44\x65\xb3\x5a\x39\xd9\xe7\x32\x04\xac\xd7\x3a\x27\xbf\x58\xcb\xf3\xe4\x7d\x62\xd1\x96\xf3\x5f\x9c\x96\x56\x82\x6a\xd3\x86\x30\x3d\xac\xe7\x22\x8d\x51\x7d\x4e\x5d\xa4\x41\x48\x99\xb9\x78\x25\x71\xbe\x48\xfa\x7c\x79\xd7\x17\x6a\x89\xb3\xf2\xe9\xdd\x79\x65\x89\x86\x04\xdf\x19\x83\x74\x17\x8d\x58\x6f\x89\x46\x8e\x98\x98\x5d\x1e\xe7\x95\xa2\x98\x65\xc2\x1b\xe9\x3d\x06\x14\x02\x08\x84\xff\x63\xc5\x0f\x09\xcc\x8c\x65\x01\xc4\xbd\x94\x6a\x4f\xfa\x17\x1f\xb2\x63\x39\xdc\xe9\x3e\xc2\x46\x26\xa9\xf8\xed\x26\xed\xa4\xd9\xc4\xe1\x66\xa9\x99\x91\x58\xd9\x1b\x8c\x39\x6e\x32\x38\xf8\x8b\x5c\x64\x85\x75\xac\xdc\x48\x37\xa9\x31\xba\xa0\x74\x70\xe6\x63\x5e\xeb\x44\xfa\xf0\x60\xc5\xe3\xd2\x4b\xd4\xaf\xda\xcc\x8a\x12\x1f\xad\x07\x9d\x66\x33\xdd\xac\x2c\x3c\xe7\xc4\x23\xec\x27\x95\x4c\xab\x2b\xb7\x06\xc5\x7e\xd9\x9d\xd3\x51\xf9\xf9\x09\x34\xa7\xe9\x21\xcf\xd9\x7a\x1a\x90\x46\x8d\xb7\xa8\x0a\x7a\xac\x3f\x64\xb3\x97\x7f\x3e\x23\x9b\xe5\xbe\x7d\x00\xb8\xe1\xdf\x0e\x27\x35\x75\x3b\x16\xeb\x37\xb2\xc8\xc6\x88\xa2\xb7\xd2\x8e\x06\x24\x4e\xfe\xe9\x65\x5d\xa4\x6a\x75\x88\x93\x79\xa0\x48\x47\x5a\x3e\xaa\x8c\xdc\x17\xa7\x0b\xac\xcc\xc0\x60\x84\xba\x2c\xaf\xf3\x38\xe9\x27\x41\x87\x5b\x83\x70\xae\xbd\xed\x47\x8f\xb1\x6b\x24\xa8\x30\x56\x02\xc6\x01\x3e\x9b\x6f\x37\x55\x11\x2c\x82\xaa\xa1\xb3\xef\xab\x0f\x2e\xcd\x65\x21\x69\x3c\x04\x50\x18\x2d\x2e\x95\x43\xec\x77\xe6\xf1\x18\x2c\x26\xee\xfb\xd6\x20\xc2\xfa\x14\x37\xd4\x14\xc8\x54\x45\x85\x1e\x00\x5d\x18\xbf\x8d\x74\xfd\x95\x7c\x0c\xb5\x4a\xe1\xda\x00\xaf\xf2\xf6\x70\xdb\xd2\x97\x67\xb0\xcb\x03\xf0\x32\x64\x76\xa4\x58\xcb\x28\x8b\x8c\xa1\xad\xd1\x10\x92\x09\x43\xdd\x7f\xda\xaa\x64\xef\x21\xd8\xf7\xee\xa2\x8b\xc5\xd6\x26\x56\x6d\x20\x30\xd0\xba\x7a\xa1\x45\xbc\x4e\x92\x91\xe0\xa2\x71\xa7\x30\xf5\x82\x51\x1e\xe0\x33\x68\xf7\x22\x98\xc2\x81\xda\x96\xd7\xbe\xf0\xc4\x87\x70\x05\x00\x23\xab\x64\x0a\x5a\xbe\x95\xca\x86\xbe\x7c\x5e\x03\xa7\xa7\x58\x63\x07\x62\x49\xf4\xd4\x8b\x37\xf4\x7e\x85\xf1\xa6\xb4\x0b\x76\x8f\x6d\x3e\x5f\xf8\x9c\xef\x1f\x17\xa9\xa1\x2e\xc1\xf8\xf7\x47\x1e\xff\x67\xbc\xf7\xc5\x9a\xfb\xfe\x19\x63\xe1\x74\xff\x54\x9d\x0f\xb7\xfa\x05\x4b\xd8\x3f\xf8\x93\xae\xbf\x24\x7e\xfe\x27\xd5\x42\xe6\xfe\xf9\x36\x70\xd8\x6d\x08\x7d\xd1\xb1\x62\x3f\xff\x24\x09\x65\x1d\xf6\x9a\xe4\xc2\xbb\xc6\xc2\xa2\x4b\x20\x50\xc2\xcb\x43\x1d\x39\x57\xa3\xf1\x9d\x1a\x33\xc0\x25\xea\xb5\x9e\xe3\x93\x07\x75\x97\x93\x49\x4f\x79\x0e\x37\x6f\xa2\x44\x2c\x3a\x1d\x9f\xe1\x5e\xbf\x48\xcb\x44\xbb\x76\x81\x04\x1d\x0b\x71\x37\xa5\xa5\x3b\xac\xe9\xe6\x74\x2b\x24\xfc\x82\x96\x20\xd1\x85\xc3\xed\xa1\x6c\x33\x8f\xa1\x89\xb0\x1c\x62\xac\x5a\xfa\xda\x85\x28\xc2\x85\xd4\x52\x8a\x06\x9b\x2b\xdd\x52\xc2\x01\x16\xc1\xf1\x37\xb9\x5e\xc2\xcd\xf9\x47\x9f\x04\xad\xce\x68\xa0\x2b\x1c\xc8\x25\x95\x54\xb4\x1e\x5f\x27\x47\x74\xb7\x13\x71\x92\x23\x9a\xcd\x15\x3e\x90\x54\xe0\x31\xea\x43\x7c\x02\xe1\xba\x2b\xb2\xf7\xad\x1d\x2b\x10\x8b\xc2\xda\x22\xec\x82\x93\x0b\x22\xa1\xfc\x61\xf1\x00\xf8\x75\x3f\xdc\x97\x85\xd1\x71\xea\xd2\x1f\x10\xe2\xd2\xb9\xa7\xe5\x4f\x07\x2e\xc1\xf9\xb0\xb8\x5f\xc2\xa5\xd8\x3d\xa1\xeb\x55\x70\x1d\x05\x04\x96\x77\x3b\x23\x7a\x3d\xf9\x53\xf9\xb2\x3e\xaf\xf4\xea\x62\xee\x3c\x7e\x55\x9d\x8f\x7e\xbe\x98\xc7\xb2\x01\xb1\x51\xfa\xfc\x4e\xc1\x74\xf4\x70\x7f\xab\x02\x88\x29\x01\x59\xbb\x54\x36\xb0\xbb\x5d\xdb\xee\x5b\x3f\x41\xfa\x1a\x6b\x6e\xef\xb5\x25\xb4\x46\x94\x6a\xd9\x48\x62\x4b\x62\x90\xd1\x13\xb5\x26\x7e\x40\x92\x4b\xd9\x5e\xae\x46\xa4\xf5\x72\xbe\x27\x71\x9b\x91\x4e\x91\xcf\x8d\x28\x05\x53\x0d\x0e\xc2\xeb\xc9\xdf\xb3\x9c\xab\x74\x17\xc4\xc1\x77\x1e\xea\xa1\x1f\xa1\xac\x43\x3c\x3b\x25\xe8\x69\xa8\xe8\x01\x42\xe0\x94\x9d\xdd\xa5\x2e\xa5\x5c\x50\x9e\x61\x33\x5d\x6c\xea\x5c\xbb\x32\xca\xb7\x69\x1a\xa0\x06\x29\xf4\xcb\x15\x4e\xc8\x96\x22\x53\xbf\x2f\x5a\xe8\x6a\xa5\xd3\x90\xaa\x34\xc3\x7c\xf6\x71\x1f\xce\x3c\x1f\xff\xb6\x39\x06\x54\xb7\x25\xd6\xd6\x37\x45\xd9\xd8\x3e\x65\x01\x80\xf6\xd7\xe5\x94\x6b\xc6\x7a\x90\x5e\x59\x94\x73\x3b\x49\xcd\x03\xf7\x11\x90\x0e\x5a\xe3\x8e\x6b\xb1\x3c\xcb\xe9\xf0\xb8\x8d\x42\x24\x61\xfe\x52\xd6\x09\x49\x05\xc5\xc0\x76\xf0\x77\x1d\x2d\x3b\xdf\xee\x28\x34\xb6\x20\x3a\x76\xd0\x77\x38\x63\xdd\xd4\xa3\x2e\x7f\xc2\x76\x63\x1f\xae\xf8\x4b\x87\xf3\x9d\x88\xf9\x87\xe2\x82\xfa\x34\x98\x04\xdf\xae\x69\xe8\x1b\xe9\x0e\xc2\x46\xb8\xde\x43\x3f\xa1\x1a\x67\x63\x32\x17\x26\xe4\x96\xed\xda\x54\x68\xd5\xfe\x10\x36\xb6\xf0\x29\x57\x61\x36\xdd\x84\xe1\x47\xf1\x24\x4b\xe2\x3c\x35\x57\xbd\xc7\x07\x58\x08\xbd\xa4\xe9\x40\x21\x90\xc1\x17\x2c\xf8\xa6\xf4\xbf\xb3\x1e\xe9\x6c\x23\x88\x2d\xd8\x5f\xa4\x68\x0c\x47\x62\xf5\xac\xec\xf1\x15\xae\x23\xb5\x6e\x9c\x91\xc1\x3f\xd3\x79\x6d\x60\x16\x84\x7d\xb1\x5c\x5a\xf6\x2c\x50\x97\x05\x14\x69\xf0\xb5\x40\xbe\x07\xa5\xa0\xd0\x28\x0f\x45\x05\x8f\x7d\xe1\x54\xe2\x04\x36\xdb\x6a\xa6\x2a\x87\xb0\xc6\xaf\x1a\x20\x57\xe8\x21\xa4\xf3\x2f\xfc\x88\xe7\x50\x3f\xf8\xa3\xbe\xfa\xde\x42\x12\x5a\x8e\xd4\x2d\xe5\x37\xdc\x61\x95\xf7\x00\xca\x8e\xfe\x29\x76\xbb\xf0\xa7\x06\x28\x8a\x7b\x96\xfd\x45\x15\x67\xac\xfe\xc0\xb6\x50\xbb\xab\x42\x02\x72\x2f\x0f\xa6\x50\x90\xb0\x2c\xbe\xf2\x2f\x76\x47\x69\xf8\x91\x3f\x2d\x7f\xe8\xad\x02\x3b\x2d\x72\x58\x15\x29\xf8\xe7\xc2\x7b\x67\xc3\xe4\xd9\xc7\x72\x10\x12\x49\x2f\x07\x82\x8d\xbd\xbf\x4d\x92\x29\x0d\x80\x99\x16\x96\xd7\xa4\x4d\x10\x7e\x46\x8c\x3a\x8f\xa0\x47\xc4\x59\xb8\xc5\xe8\xe7\x9c\x66\x5e\xc5\x4f\xb7\x60\xba\x57\x3b\x62\xed\x1e\xae\x95\x76\x03\x5b\x51\x71\x34\x24\x0f\x74\xbc\xd4\x4a\xe1\x53\xac\x8f\x1a\xa3\x90\xb0\x01\xe2\xe9\xfa\x01\x63\xf5\x3d\xf1\x50\xc7\xa9\x38\xb0\x29\xcf\xc6\x16\x0c\x92\xe2\x31\x32\x90\x8e\xa1\x16\x3b\x6f\x4d\x84\x9d\xad\xe9\xff\x73\xf5\x6e\x59\xb2\xea\x3e\x13\xe7\x7b\x0f\xa3\x5e\x6a\x54\xfd\x60\xc0\x09\xae\x04\xcc\x9f\x4b\xe5\xce\x5a\xab\xe7\xde\x0a\x45\xc8\xe4\xf9\xea\x9c\xbd\x90\x49\xee\x17\x63\xcb\xd2\x2f\x5a\x7a\xfe\xaf\xa7\xf8\xab\x40\x30\x96\xa8\x00\x68\x84\x47\x70\x21\x44\x0a\xc3\xcb\x04\x9f\xcd\x5c\x1a\x4a\xc0\x95\x71\x94\xbe\xef\xe8\x11\x5a\xad\x59\x03\x7b\x48\x32\x9c\x92\x19\x0b\xf7\x97\x40\x0c\x3b\xe3\xf6\xf6\x2c\xfd\xa5\x38\xcc\x2d\x58\x0a\xd2\x97\x95\xed\x22\x7f\x1f\x18\x02\x7c\xb2\x74\x02\x76\x81\xe4\xae\x34\xdb\xa5\x47\xa5\x4b\x2c\x1d\xd2\x4b\x8b\xdd\x8a\xe0\x3b\x84\x57\x52\x83\x6d\xa2\xff\x20\x41\x61\xeb\x8b\x68\xab\x18\xd1\x0e\xac\x02\x18\x6e\x11\x0a\xea\x41\x2a\x5c\x44\xa9\x15\x0d\xa8\xf0\x1b\x64\x65\x8f\x4a\xe4\x35\x99\x28\x90\x15\x30\x08\x78\x05\x76\x22\x07\x02\x0a\x1f\x5a\xc6\x8f\x2b\x90\x3d\x3b\xc8\xb0\x63\xde\x83\x4b\x86\xb4\xf3\x35\x7e\xca\x8a\x13\xb4\x57\x72\x0f\x51\x64\xd7\x67\xd6\x5c\x0c\x5c\x87\x59\x90\x2e\x9f\xa8\xb8\x8c\x0a\x21\x85\x56\x72\x88\x26\xbf\x28\x9e\x1d\x1f\x99\x1d\xd0\x6c\xce\x09\x95\xdf\x3d\xfd\x64\xe9\x7d\x27\xb9\x5f\x9c\x28\xac\x0d\xd8\x71\x49\xfc\x78\xb4\x9e\x52\xa6\xa5\x6d\x0d\x6a\x42\xa2\x92\x63\xa3\xcd\x07\x9f\x3c\xc4\xee\x2b\x00\x08\x84\x2b\x38\xe6\x00\xb9\x65\x92\xd9\x7d\x7b\x5d\xb2\xbd\xc7\x25\xcd\xa1\x7b\x7c\xe8\x21\xda\x04\x49\xb3\xc6\x0d\x32\x56\x68\xfd\x57\x22\x79\x61\xab\x7f\xbb\xb4\x83\x0d\x6f\xbe\x82\x1b\x1c\x5e\x50\x90\x97\xd7\xb0\x00\x57\x88\x0f\xd7\x5f\xc4\x1b\x64\xda\xf8\xe0\x47\x52\xb6\x8b\x23\xef\xc1\x34\xd8\x4b\x66\x1a\xbf\x87\x9e\x0d\xb4\x9a\x10\x72\x23\x16\x68\x3b\x9f\x9c\x00\x2f\x44\x06\x8c\x4b\x20\xb3\x1e\xb5\xe7\xfb\x1d\xf8\x81\xdf\x26\x30\xbe\xe1\x7d\x6c\xc2\xc6\x91\xe0\x0f\x16\x1f\xe9\x1c\xe0\x11\x30\x02\x83\x04\x82\x40\x0a\xb4\xfc\x7e\x85\x70\x62\xaa\xa6\x9e\x4c\x6e\x13\xbc\x28\x5e\x93\xba\x7d\x7c\xf0\xad\x01\x28\x74\x06\xb8\x02\x4a\x3e\xaf\xf5\x41\x72\x81\xbd\x2a\xbc\xca\x08\x0c\x29\x54\x4d\x76\x91\xe3\x5b\xb6\xd3\x66\xcc\x55\xbf\x40\xf2\xf1\x4b\xf0\x81\x80\x0e\x08\xdf\x30\x7b\xe8\x4e\x30\x07\x44\x7c\x03\x47\x80\xd1\xf6\x2a\x71\xfc\x2a\x0a\xe8\x8f\x86\x9d\xd3\x25\x74\x41\xd2\x38\x16\x35\x90\xdb\x79\xcc\x29\xe8\xe4\x5b\x39\xe3\xdd\xdf\xca\x1d\x99\x00\xe0\xc0\x3b\xac\x10\x21\xdc\x90\xb9\x63\xed\x05\x9e\x4a\xd1\x00\x10\x0c\xd1\x0a\xc6\xf1\x1d\xe9\xf4\x05\x78\x24\x6d\x20\x87\x72\x35\xd2\x4d\x3f\x3a\xe4\x4c\x3f\x8d\xcf\xb4\xf5\x27\x0f\xc4\x6c\xfa\xa1\x20\x87\x98\x18\x44\xd7\x55\xde\x8e\x30\x3b\x32\x0c\x24\x0a\xb3\xc1\xbb\x54\xdd\xc7\x6f\x55\xda\x7b\xe7\x94\x12\xcb\x2c\x1c\xd3\x59\xa8\x87\x6d\xad\x05\x56\x9a\x9e\x6e\x73\x4b\x73\x6c\x2e\xe2\x85\xd6\xe1\x9b\xa5\x2d\x47\xb2\x3c\x39\x79\xf1\x0d\xd8\xa0\xc7\xee\xd3\x35\x7c\x37\x4e\x41\xe0\x94\xc8\xa6\x2d\x2f\x08\xf3\x24\x21\x21\x0b\x82\x30\x04\xbb\xc9\xea\x99\x3e\x2f\xa2\xbc\x42\xb0\x19\x6d\x38\x37\xe3\xeb\xbd\x39\xaf\x85\x57\x48\x3c\x04\x80\x9d\x2f\x1f\x2e\xda\xf4\x89\xf4\x1e\x41\x96\xc5\xf8\x32\x28\x33\xeb\x55\x42\xe3\xe1\x5b\x56\x62\x48\xc3\x96\xe6\x93\x6a\x90\x1b\x50\xef\x52\x77\x8e\x08\x3c\xb3\x44\x3c\xda\xee\x6e\x9e\x83\x0c\x66\xb6\xac\x60\x4f\xed\x53\x88\xd2\xa0\xd8\x49\x4f\x1e\xb2\x7b\xe0\x1d\x07\x2f\x10\xc9\xe3\xba\xcc\x33\x9d\x0d\xd6\x0b\x39\xbc\xe5\x6d\x06\xaa\xc0\xf8\x5c\x50\x86\x59\xf4\x00\xe8\x3c\xd0\x6b\x0e\xb2\xd7\x1e\x14\x83\xe4\x29\x29\xf8\x24\x89\x8e\x05\xd3\x27\xd1\x9d\xfa\xcc\x7e\xab\xb5\x52\x70\xf9\x3f\x4a\xcb\x21\xb4\x6c\xaf\xa2\x42\xcf\xeb\x4c\x79\x27\xb2\x0a\xdc\x0f\xef\x4b\x71\xb9\x51\x54\x87\xe1\x7d\x1c\xf9\xa2\x59\x43\x3d\xea\x96\x5a\xbf\x4b\xab\x0f\x1f\x06\xa1\x80\x36\xd2\x14\x23\xcc\x7a\x7d\x57\x42\x01\xec\x00\xa6\x9b\x54\xb3\xda\x75\xe1\xcb\xb2\x5e\x3e\xb6\x6e\xf5\xc7\x1f\xc3\x94\xd6\x8a\x51\x49\xbf\xb1\xab\x30\xf3\x2b\x3d\x31\x2b\xc9\x6f\x6b\xdd\x5f\x79\x54\xeb\xcd\x79\x05\x07\xc3\x11\x20\xe3\x4c\xbd\x65\x0c\x54\x32\xd5\x68\xad\x90\x19\xf3\x25\xd1\x79\xc0\x40\x2b\x37\xe2\xfd\x5d\x11\x03\x5c\xfd\x88\x80\x75\xb3\x19\x1d\x62\x6f\xc5\xc5\x89\xbd\x04\x5c\xaa\x7f\xae\x82\x12\x99\x9d\x9b\x9a\xf3\x4b\x9a\x1f\x6b\x76\xce\x05\x15\xa5\xf3\x78\x87\x2e\x22\x53\xd5\xb3\x60\xad\x51\xbf\x73\x46\x04\x72\xac\x11\xe1\x6d\x8d\x6d\x6b\x12\x2d\xb4\xbc\x49\x8c\x88\x23\x96\x57\x6f\x33\xad\x50\xcb\xa7\x3c\xb4\x18\x6d\x2b\x6b\xa6\x15\x03\x9d\x98\x7e\x1f\xef\xd5\x55\x7f\xdf\xf7\x7b\xe4\xc5\xe3\x0c\x75\x86\xe5\x96\x6a\x42\xd2\x9f\xe4\x9d\x01\xd2\x7d\x14\x5a\x07\x87\x21\xcd\x22\x59\x02\x46\xe0\x8e\x96\x6b\x3f\x13\x6f\x89\x18\x08\x92\x9f\x6e\x4f\xa4\x99\x52\x09\xbe\x5a\x36\xf7\x62\x9f\x25\x4f\x63\xbf\x82\x2a\x60\xa7\x1a\xfd\xa9\xa5\x4a\x28\x61\xe1\xcd\x05\xc4\xa0\x49\x74\x2f\xc8\x6a\x59\xc1\xd6\x7f\x84\x4a\x74\x95\x62\x35\x52\x4a\x69\x58\x73\x97\x7d\x92\x05\x8c\x9b\x95\x86\x67\xfd\x8b\x80\xe0\xec\x35\xae\xdd\x3b\xf2\x9e\xf6\x37\x8f\xbd\xfc\x0b\x19\xe7\x63\x67\x35\x66\x56\x04\x3d\x40\x39\x3a\x73\xec\x0f\x66\x0a\xaf\xdd\xe2\x79\xa0\x74\xb7\x2c\x1e\x5b\x2d\xd0\x15\xb7\xb4\x96\x0f\xbc\x01\xc2\x0a\x38\xe8\x03\x7a\x41\x28\x2c\x9b\xa9\x0c\xde\xa5\x8c\x7b\x58\x56\x63\x21\x6c\x30\xcf\x1c\x29\x5b\xb2\xe2\x61\x98\x78\x45\x51\x18\xb6\xe8\x40\x33\xb0\x57\x53\xa1\x83\xc0\xf5\xec\xb1\x4a\xae\xc2\x16\x1c\xd5\x7a\x6b\x12\x95\xae\x3b\xb5\xb1\x33\xb6\xce\x4d\x06\x57\x6e\xf1\x64\x00\x69\x62\xe7\x20\xd5\x21\x06\x37\xe4\xc5\xfb\x35\x6b\xc8\x6f\xe9\x47\x74\x9d\xdc\x4a\xfb\x4a\x87\x99\x3d\x97\x7f\x5e\xdf\x2c\x11\xab\x83\x27\x35\x2f\xe9\xd6\x3b\x43\x80\xe0\x7e\xfb\x44\x22\xc1\x98\xe9\x83\xf8\x48\x0a\x4e\x60\xc5\x5e\x7c\x5b\x48\xab\x24\x06\x57\x42\xd4\x5a\x9c\x81\xfd\x37\x9a\xc3\xd0\xb6\x0e\xd0\xc0\x6e\xcf\x50\xa6\x55\x95\x74\x0c\xb3\x01\x07\xac\x11\x39\x5c\x4f\x2d\x11\x88\x85\xbd\x6f\x72\xd7\xeb\xc7\xad\x05\xbb\x3b\x50\x09\x20\x1f\x84\xe9\x9c\x12\xf2\x1a\x56\x7a\xd8\x17\x97\xdc\xa7\xf1\x21\xf2\x04\x36\x82\xb5\x61\xd4\xb0\xb0\x52\x9a\xd8\x46\xc3\xc7\xef\x9c\x64\xc0\xf5\x70\xca\xae\xbf\x41\x88\x28\x4b\x9c\xdc\x98\xfe\xfe\x03\x4c\x38\xbc\x0b\xbe\x24\xb4\x89\x3d\xd9\xde\xc5\xad\x99\x77\x8f\xf4\xf3\xb8\x6c\xb3\x0f\x8a\x71\x2b\xf3\x65\x3d\x14\xdf\xc4\x7c\xf9\x57\x4a\xe8\x84\xe0\x1a\xb8\x64\xee\x48\x8b\xfc\x03\xeb\xca\xd6\x4b\x74\x84\x4a\x29\x34\x34\x7d\x09\x31\x00\x28\x84\xcd\x15\xe1\x35\xe7\x3a\x04\xc8\xc4\x3e\xa7\xd5\xa9\x24\x5c\x85\x5d\xea\xb9\x86\xfc\xc5\x5c\x02\x64\xa0\xc1\x02\xf7\xc4\x7a\xa2\xa9\x7e\xb7\xcf\x1d\x5b\x9d\x80\x26\xa0\xd3\xc0\x1d\x95\x87\xa4\xb7\x35\xf1\xce\xec\x5c\xa4\xd9\x34\xe7\x97\xf0\x08\x60\x9c\xc7\x69\x91\x4f\x59\xdd\x31\xe6\x04\x05\x3f\x26\xb4\x9f\x29\x7b\x9d\x05\xeb\x98\xf3\x72\x91\xa9\xa0\x8d\x8d\xe5\x50\x22\x8b\xd9\x37\x27\x7d\x46\xc8\x35\x52\x81\x09\x39\x78\x2d\x32\xac\x35\xba\x87\x2c\xf6\x71\x3c\xeb\xf6\x70\xd3\xb3\x16\xbf\x04\x4e\x40\xaf\xe6\x50\xe1\xc1\xc1\x14\x88\x48\x5a\x23\x95\x16\x42\x73\xdc\x0a\xaf\xc6\x0c\x46\x09\xb7\xdf\xd3\xf5\x39\x03\x76\xa7\x2d\x76\xd6\x06\x97\x5e\x37\xb2\xc8\x49\x2b\x78\x05\xc4\x80\x7e\x95\x27\x82\x7d\xac\x07\xe2\x24\x84\xed\x1f\x1e\x8a\xa7\x14\x4b\x9f\x90\xce\xf5\x19\xf8\x9a\xd9\xe7\x8c\xd1\x28\x4f\x7b\x98\x1c\xba\xf8\xb4\x1a\x7a\x25\x0f\xcc\x4c\xfb\xee\x9c\xee\xb9\xb2\x8b\x8d\x10\x7e\x5a\x12\x42\x83\x45\x5d\x21\x90\x06\xbf\xa9\xc9\x8d\x47\x4f\x6a\x9b\x00\xb5\x2e\x12\x5c\x7b\x4e\xfb\x75\x58\xdd\xf6\xcb\x5f\x26\x3a\x6b\x9f\xf9\xdd\x12\x0d\x9e\xf9\x45\x70\x03\xfd\xe8\x78\x3d\xb2\x00\x0e\x5c\x07\x94\x93\x2a\x0b\x31\x5f\xbe\x3f\x7b\x2b\x17\x8e\xa7\x41\x97\xd9\x61\x4b\x66\xf0\xf8\x7e\x1a\x97\xe7\xe7\xfa\xb1\x85\x2e\xb7\x86\x60\x74\xfc\xb0\x97\xf7\x03\xa8\x25\xa7\xd6\xf4\x62\x67\xeb\xa7\x9e\x21\xda\xbd\x22\x08\x96\x47\xf0\xc3\xbc\xc4\x9f\xdb\x1f\xfa\x53\x90\x65\x4a\x13\xe3\x9f\xde\x93\x32\x4b\x81\xc2\x3f\x1e\x0e\x49\x17\x35\xec\x98\x9b\x37\x4e\x11\x7e\xc6\x87\xfc\x27\xfd\xfa\xf9\xfc\xd8\x2b\x80\x61\x85\x35\x6c\xfd\xba\xe0\xae\xe7\x2f\xca\x79\xfb\xbc\x22\x66\xae\xdb\x48\xec\x83\xe0\x71\x68\x6a\x87\x24\x66\x09\xa8\x00\x34\x81\xcb\xf8\x29\x49\x1d\x73\xc4\x2c\xf0\x28\x07\x9a\x88\xb9\x6a\xf0\x87\xbd\x22\x58\x8a\x6d\xc5\xc6\x82\x90\x9a\x35\xc6\x01\xaf\xad\xa9\x70\xef\xdb\x9e\x1b\x6d\x01\x45\xb0\x5b\x45\x22\xf3\x19\xca\xeb\x27\x36\x62\x4f\x62\xdf\x48\x8d\xbb\x01\x16\x58\xd2\x66\x0e\xe5\x90\xf1\x68\xe8\x10\x12\x0d\x42\xcd\x2c\xa4\xef\x4a\xc8\x7a\x45\x62\x6e\x83\x3c\xcc\x9e\x7a\x44\xee\x43\x6c\x0e\x39\x15\xf3\x0d\x6a\xb8\x15\xf5\xca\xea\xd9\x8c\x4d\x82\x7b\xb8\x98\x28\x19\x47\x8c\xde\x79\x6a\xec\x0a\x7d\xe9\x65\xaf\xb1\x7b\x04\xb0\xcb\x00\xe9\xf9\x8c\x43\xe9\xa5\xe0\x7d\x2c\x0c\x2e\x36\xb3\xf7\x64\x1b\x1d\x07\xb1\x5b\x24\x3a\x28\xf0\xd1\xb5\xba\x55\x33\x7b\xff\xf2\xd3\x8c\xfc\xc1\xb2\xd8\x47\xf4\x92\x63\x46\x49\xe6\x68\x53\x85\xac\x00\x24\xbd\x7d\xde\x00\x20\x9c\x67\xbf\x51\x69\xf5\x51\xe2\x49\x19\xbe\xfd\x2c\xfa\x48\x07\x2a\x1d\xc2\x4e\x1e\x5e\xbb\x95\xef\xf2\x4d\x69\xef\x85\x70\x81\xf7\xb0\x5b\xdb\xeb\xa0\x0d\x21\x4d\xac\x31\x21\xd4\x04\x38\xd7\x6f\x95\xd8\xbb\x9b\x14\x8f\x60\x53\xde\x58\x33\xec\x78\xf4\x93\x67\x92\xc9\xee\x42\xee\xdb\x7a\x01\xc0\x46\x0f\x6e\x2b\x0f\x6f\x6a\x69\xc6\xee\xd2\x9f\x44\xa3\xf2\x82\x1d\x72\x6a\x8a\xe0\xcd\xe5\x6f\x66\x2f\x45\x66\x40\x99\x09\x5c\x00\x1f\x43\x59\x42\x28\xf4\x1c\x20\xf2\xfc\x8a\xd1\xfb\x4c\x84\x35\xc4\xc6\x18\x87\xed\x9c\x86\xa5\xd6\x9b\xcd\x50\x1d\x1e\xd8\xb3\x50\x04\x73\x58\xf2\x3d\xea\x33\xd5\x47\xe3\xc9\x4c\xab\xd7\xe1\x13\xbc\x0a\x1f\x5c\x06\x8d\xc0\x4e\xc5\xc9\xe0\xd2\x2a\x9f\xe7\x35\x44\xaf\xa7\xfc\x2f\x51\xf5\xc2\x0b\xaf\x97\x13\x1f\x72\xd3\x4e\x81\x2f\xc7\x3b\xb5\x66\x58\xeb\x8f\x90\x48\x0c\x5a\x48\x32\x1c\xf9\xaa\x5c\xd5\xea\x04\xce\xd2\x89\x03\x84\x33\xb2\xcb\x64\x76\xbb\x5e\xd9\x9a\xc7\x6f\xe9\xa1\x6b\x94\x00\x89\xac\x9b\x3b\x61\xa7\xac\xaf\xef\x94\xbb\xdd\xba\x39\xb4\x42\x7a\x3d\x64\xae\xfc\x23\xae\xf7\xd1\x6c\x6a\x98\xb7\xfe\xb5\xc7\xd8\xc9\x2f\x82\x20\xbb\x60\x17\x99\xbd\x87\x10\xd6\x94\x3c\xdc\x47\xf4\x06\x6f\x93\x68\x99\x6d\x7b\xcb\xa2\xc2\x5b\x14\xc6\x68\x31\xa1\xdd\x56\x34\x6d\x72\x2c\x2c\x8c\xfc\x1d\x7a\x5b\x39\x37\x88\x44\x6c\x62\x7e\xec\xf9\x29\xab\x1d\xde\xc8\x08\xe6\x09\x19\x4f\xb8\x3c\xe3\x9b\x4e\x4b\x97\x3f\x27\xb9\x41\x41\x3a\xe3\xf5\x21\xb6\x34\x5e\x2d\x6b\x79\xbc\x42\xe0\xc0\xda\xc9\x18\xab\x56\x2c\xcd\x28\x94\xb8\xc3\x23\xbe\xc3\xea\xa9\x5d\x5e\x91\x4e\x11\x96\x16\xaf\x74\x2b\xd9\x11\x49\xab\x9c\x6e\xd4\x11\x74\xc3\x4a\xe3\xf1\xd0\x86\xb2\x84\xbe\xbe\xa4\xa1\x7e\x30\xaf\x1b\xdd\x0a\x85\x1a\x33\x9a\x3c\xe6\xae\x94\x24\xdf\xbd\x49\x72\x4e\x4d\x56\x1d\x57\xad\xd3\xe2\x03\xbd\xbb\xe3\x1e\xd4\xfa\x51\xb0\x97\x11\x47\xbb\xd1\xaa\xec\x6e\x9b\xd1\xcd\x61\x68\x6b\xf1\xe2\x58\xef\xcc\x3e\xbe\x6f\x72\x2d\xf8\x34\x49\x36\x9d\xd6\x80\xfe\xa4\xae\xd1\x2a\x1d\xf8\x59\xee\x39\x33\x9c\xf4\x60\xcd\x49\xdd\xfa\x71\x96\x80\xf5\x58\xfe\x48\xce\x1d\x8b\x9e\x04\xab\x32\xfe\xa8\xb4\xbe\xb0\xc6\x82\xe1\x5f\x4d\xd7\x53\x0f\x78\x04\x7c\x56\xbb\xb4\xd4\xad\x9e\x9e\x42\x56\xdd\x2f\x76\x3a\x59\x94\x2a\x7c\x10\x33\x20\xb5\xae\x23\xb2\x6b\x34\x95\x1f\xbf\x32\x69\xc6\x28\x65\xac\xe9\xb9\xbc\xdc\x05\x84\x06\xe7\xc8\x0f\x18\x93\xd5\x95\x8a\xdc\x1a\x13\xf5\x99\xec\x4b\xa5\x86\x32\xac\x16\x36\xf8\x00\x7f\x86\x34\x89\xa5\xb4\x20\x88\xc7\x35\x0c\x3e\xe9\x05\x8b\xb0\x0a\x0a\xd9\x86\x54\xa3\x80\x20\x7b\x27\x7a\xc0\xde\x74\xdb\xa3\xf5\xa4\x20\x3b\x59\xe2\x67\xc3\x44\xa6\x95\x96\xb0\x6a\x77\x56\x94\xd3\x03\x12\x8c\x4a\x54\x79\x20\xaf\x87\x6c\x03\xef\x91\x17\x11\x22\x76\x56\x54\x4e\x95\x60\x0e\x5b\xa8\xb4\x93\x04\x60\xa5\x2e\x0d\xb2\x28\x36\x6f\x75\x6e\x93\xa1\x83\x34\x0a\x5e\x71\xfe\x10\x5a\xb2\x8f\x4a\xe4\x3f\xd4\xdb\x51\x29\x34\x53\xfb\x02\x51\x8f\x60\x8a\xf9\xe2\xe4\x56\xc3\x36\xbb\xb5\xfc\xcd\x8e\xc4\xd0\x07\x12\x6d\xa2\x85\x63\x05\xb9\x4c\x60\xf9\xa4\x08\xce\xfa\x98\x1b\x53\xdb\xcc\x44\x49\x33\x6b\x35\xf0\x2b\xf0\x28\xff\xd4\xbf\x01\x8f\x42\x70\x0d\x6b\x16\x30\x10\x02\xf8\x89\x10\xc0\x23\x8a\x22\xed\x77\x69\x0e\x00\x87\x7f\xee\xba\x24\x96\xc5\x7a\xf7\x2b\x1e\x91\x39\xf0\x80\x97\xd7\xa3\xab\x1e\xec\xe0\xa0\xa9\x22\x14\x04\x95\xdf\xc9\x79\x68\x8c\x05\xb3\x1c\x3c\xe7\x66\x19\x89\x94\xf8\x6d\x90\xa1\x07\x69\x02\x0f\xeb\xa1\x78\x5d\x04\xe3\x4b\xc0\x0a\xfe\x10\x6a\x44\x8f\xb4\x3f\xc5\xa9\xa0\xbb\x04\x7d\x14\xb5\x20\x1c\x52\x61\x67\xba\x06\xa7\xe2\x96\xa7\x36\x5b\xc3\x57\x5c\x87\x2b\x58\xfb\x99\x17\xc6\x89\x12\xe8\x69\x93\x60\x43\xc0\x84\xb0\x11\x01\x96\x10\x06\x22\xe4\x65\x1a\x5e\x42\x78\x05\x07\x4d\x90\x4b\x70\x2b\xcb\x7b\xc9\x9a\x0a\x5a\x18\x96\x1b\xe9\xd8\x9a\xc3\x31\xff\x7a\x0e\xbb\xd0\x03\xbf\xcc\xa3\xe5\x0f\x1e\xe0\xeb\x6c\x09\x08\x7a\x73\x7a\xc7\x1e\xe5\x63\x41\xaa\xf9\x90\x64\x7f\x89\xfb\xa0\x41\x5c\x28\xd1\x89\x4c\x41\xfa\x23\x46\x28\xa3\xd1\x6d\x6d\x7a\x8f\x71\xcb\x5b\xf5\xfe\x16\x0e\x57\x7b\x75\x06\x44\x98\xf6\xac\x91\x97\xb1\x1e\x97\xe6\x4d\x49\x79\xbe\x19\xb1\x96\x0f\xcd\xe5\x47\x81\xc7\xb5\xa2\x52\x0f\xdb\x5a\x51\x50\x6a\xe6\x0a\x89\x84\x6e\x87\x66\x90\x1f\x01\x69\x78\xb9\xfd\xf2\xb2\x78\x8f\x24\x2b\xf5\x96\xe6\x1a\x10\xb6\xbc\x04\x15\xc3\x3a\x6d\x88\x50\x13\x03\x22\xed\x2d\xdc\x96\x25\xca\xac\xc6\x3a\x48\x1b\xe6\x6f\xe2\x43\x68\xcb\x0d\xcf\x11\x14\x10\x07\x44\xc4\x8f\x2e\x68\xad\x5b\x12\x09\x37\xb9\xe8\x3a\xda\x97\x45\xc1\x82\xc0\x46\x44\xf3\x3f\x87\xa0\x58\xce\x3e\x9a\x07\x65\x5c\xbf\xc4\x43\x21\x56\xfc\x60\xc1\xc9\x00\xb9\xff\x93\x0a\xbe\x0b\xaa\xc7\x15\x00\x56\x47\x92\xfe\x88\x0e\x7e\xd3\x08\xef\x44\x96\x6f\x6a\x78\x67\x65\xcf\xbf\xd4\x04\x1a\x7e\xbd\xa1\x39\x5c\xbf\x99\x28\x0a\x6b\xc5\x13\x1d\x70\xcd\xbd\x66\x35\x64\xcf\x10\x28\x36\x47\x4e\x84\x1c\x3d\x1b\xdf\x03\x92\xcc\xd3\xd5\x7f\x09\x35\x21\xd5\xee\xfa\x3a\x82\x0b\x60\xbd\xbd\x5d\x42\x70\xb0\xf1\xe5\x65\xf2\x7f\xc4\x0f\x0e\x9e\x30\xf8\x25\xc4\x44\x89\x03\xf9\x04\xee\x5a\xc1\xb3\x56\xf9\xad\x1f\xaa\x64\x27\x90\xa1\x40\xe0\x43\xd5\xf6\x15\xf0\x31\x94\xdf\x6b\x0e\x98\xc3\x6f\x69\x23\x8f\x0d\x58\xd1\x94\xdd\x4f\x02\x07\xbe\x1a\x97\x22\x68\x12\x47\x43\x83\x40\x68\xbe\x49\x19\xa0\x80\x2e\x91\xe4\xea\xbd\xd0\xc4\xd5\x91\x53\x50\x6e\xb5\xfb\x72\x58\x0b\xdb\xbe\x36\xb3\xb6\x72\x8b\x26\xa1\x80\xfe\x56\x5b\x10\x35\x68\x04\xf2\x01\x72\x91\x43\x7d\xcd\x0a\x8e\xfb\x69\x4b\xf6\xd6\x90\x3f\xf2\x57\x23\x5e\x58\x87\x36\x16\x64\x4b\x90\x85\xb5\x21\x2e\x22\xf9\xdc\x85\xe8\xaf\x33\x0e\xe0\x83\x56\x68\x05\x90\x95\x56\x51\x38\x6e\xb1\x78\x6b\xb4\xeb\x8a\x7b\xf3\x9d\x07\x07\x16\xec\xc1\x91\x16\x6b\x07\x6e\xd3\x9e\xc6\x85\xbf\xb8\x54\x3d\xaf\x1f\x55\xf4\x24\x4f\x2f\x49\xfa\xdf\x24\x19\x06\x09\xc5\xb3\x62\x2c\x8d\x9a\x91\x1a\x96\x0f\x6c\x8c\x5e\x6f\x13\x45\xea\xb9\xd1\xed\x56\xc2\x07\xfd\x95\x46\xbd\xda\x1d\xcc\x41\x69\x0e\x0c\xc6\x5d\x4e\x5a\x1a\x8e\xd8\x2f\x21\x34\x78\x18\x0d\x45\x42\xd5\x44\x0a\xd8\x87\x62\x3d\x78\x1d\x7e\x41\xb3\x13\xcd\xe8\x6a\x1c\xfe\x1b\xc1\x30\x64\x67\x5a\xd1\x8a\x4f\x3a\x84\xed\x3b\x47\xac\xdd\x42\xf7\xd1\xc3\x76\xad\x7b\xe9\xd4\x43\xea\xbe\x8f\xac\xcf\x01\xc3\xda\x71\x34\x83\xbb\x31\x55\x70\xfc\xc2\x7e\xf5\x1f\xbc\x8e\xa5\xe1\x41\xa8\x7b\xdf\x16\x05\xa7\x83\x5b\xf3\x8e\xbc\x86\x68\x86\xfc\xe1\xc0\x07\xc3\x63\xbb\xfd\x89\x43\xee\x2e\xbf\xff\xfc\xad\xb3\xcb\x55\x78\xd0\xd6\xcb\x29\xa2\x76\xbc\x62\xec\x10\xd9\x75\xfe\xbe\x27\x60\x99\x75\xc3\xd3\x8e\x4c\x26\x1f\x81\x19\x90\xc3\x7e\xd2\x20\xd9\xc6\xda\x1a\x99\x53\x6b\x28\x22\xa3\x9d\xf1\x14\x94\xdf\xf7\xcb\xe0\xda\xaa\x4e\x00\x78\x9f\xf5\x1f\xd9\x18\xc8\x26\x1f\x42\x64\xff\xac\x1e\x1b\x6d\x86\x90\x32\xfd\x25\xaa\x87\x07\xbb\x7d\xd3\xde\x7f\xa3\x9b\xd0\x7f\x40\xcc\xcc\x7e\xb0\x73\xd7\x83\xfb\xbf\x77\x84\xaa\xf7\x56\xc3\x55\xba\xd2\xfa\xdd\x61\x8a\x32\xfb\x82\x56\x51\x15\xc2\xe3\x52\x52\xb0\x99\xae\x81\x00\x85\x7e\xc8\x99\xf3\x67\xab\x01\xb8\x56\x8b\x57\xea\x71\xd7\xc9\xf3\x28\x1d\x67\xe4\xd7\x12\xb8\x0f\x88\x7f\x29\xb9\xdf\x43\xe0\x35\x9b\x39\x56\x50\xf5\xcf\x6c\xe4\x63\xf0\xc5\x73\xf4\xa1\xe8\xf5\x0e\x43\x8b\xbb\x8a\x1f\x57\x08\x8d\x6f\x0f\x65\xd9\x69\xf5\xcf\x18\xd2\xef\x2b\x1a\xbb\x62\x7f\x38\x36\x64\x4b\xa2\x76\xe0\xad\x01\xed\x58\x05\x4e\x3c\x7d\xee\x5b\x52\xff\x82\x4f\x30\x18\xf7\x5b\x05\xc7\x04\xaa\x80\x5b\x2e\x8b\x43\xc0\x3d\x52\x12\x8e\xcd\x99\x1d\x5a\x1b\x19\x16\x83\x0e\x60\xbb\xef\x08\x41\x24\x5c\x04\x20\x12\x3d\xa9\x31\xa3\x78\x5d\x18\x8b\x9e\x5e\x3b\xca\x9d\x84\x72\xfa\x84\xb1\x54\xb0\x56\x65\x78\x38\xf6\x5a\x37\x29\xf6\x5b\x25\xb8\xdd\xe4\x90\x70\x46\x9a\xf9\xe4\xc4\x23\xb1\xb5\x28\xfd\x4e\xb4\x95\xfe\xd2\x50\x22\xf5\x83\x92\xd2\x0b\x64\x45\x25\xca\x33\xb6\xa8\xe9\xc2\x84\xde\x46\x28\x41\xbe\x47\x6a\x6c\x11\x07\x8d\xf4\xc1\x20\xb9\x2b\x5a\x67\x8d\x30\x7e\x2b\x60\x35\x4e\x0a\x19\x50\x79\x70\x67\xd6\x26\x89\xf3\x87\x29\xd0\x0d\xc2\x76\xe5\xe1\x36\xbb\x2b\x71\x49\x97\x6f\x6d\x77\x6e\xb2\xc4\xb2\xef\x5d\xba\x40\xa0\x7e\x99\x3f\x74\x7d\x9c\x31\xb2\x77\x42\x5f\x34\xe0\x48\x9c\x51\x03\x90\x68\x55\x35\x6c\xfa\x40\x10\xf4\xd5\xba\xed\x97\x8e\x3a\x46\x35\xfb\xaa\x07\xbc\x0e\x08\x47\x1b\xe2\x4a\xdb\xa1\x37\xf5\x0d\x94\x98\x64\x02\x4b\xef\xd2\x7c\x9d\xf1\xfa\x59\xc7\xe8\x6c\xdc\x12\x8c\x43\xe7\xf4\x60\xe1\x50\x9e\x89\x99\xa5\x8d\x8a\x22\x1c\xac\xad\x1a\x72\x96\x3d\x08\xd4\x9c\xee\xa3\x70\x0a\xf0\xb3\xf1\x61\x05\xa1\x2f\x06\x66\xfb\x39\x02\x49\xfb\x72\x47\xd6\x58\xe5\xbc\x5a\x9d\xb7\xd3\xf4\x58\xd6\x7e\xf2\xc4\x25\x5a\x74\x0a\x7a\x8e\x28\x0d\xcf\x1f\xf7\xc3\x04\xf0\x74\xf5\x71\x1b\xba\x18\x6d\x46\xb1\xab\x4c\x86\x49\xf5\x38\x14\x9e\xfc\x34\xa7\xe5\x4d\x0f\x4b\x8f\xe0\x18\x9e\xf2\x54\x90\x06\xe2\xcf\xed\x94\x33\xbd\x29\x6e\xf1\x3a\x4d\xb9\x9f\xd6\x37\xd7\x49\xd7\xaf\x77\xa8\xc8\x23\x99\x92\x57\xca\x08\xe3\x38\xf7\x7c\x69\xdf\xa8\xa2\x45\x5c\xb1\x0f\x81\x78\x29\x5b\xe0\x3e\x26\x88\x2e\x0e\xb4\x06\x22\x28\x81\x46\xc9\x7f\x82\xa4\x5c\xb3\xf0\x45\x79\x97\xb7\x95\x8b\xcc\x47\x61\xd5\x66\xad\xcc\x6b\xa6\x37\x06\x36\xb7\x95\x1d\xad\xc3\x3d\xc0\x07\x28\x6f\x82\xdb\x3d\x7b\xd4\xbd\xe4\xfc\xf1\x01\x16\xb0\xe4\x04\x76\x53\xf7\x24\x11\x2f\x93\x28\x77\x31\xd0\xa6\x88\xa7\x19\x53\xb8\x26\x61\xab\xbe\xb7\xcf\x52\xf2\x51\x77\xb3\x5c\xcb\x94\x7b\x21\x64\x45\xb6\xdd\xc9\x3a\x66\x31\x5d\x76\xa4\xfe\x7f\x87\xbd\x86\xb9\x5d\x22\xc5\xa4\xbb\x9e\x72\xb3\x1d\xc7\x76\xba\xaf\xb9\x87\x40\x95\xde\xb8\x90\x29\xee\x7d\x9c\x99\xc6\xe0\xa4\x24\x6d\x15\x5f\xfe\x3b\xfc\xac\x4f\xcb\x31\x91\xd4\x8b\xd8\x63\xd6\xf5\x69\xd6\x78\x9d\x5b\x3b\x0d\xd4\x0c\xc1\x8e\x99\x3d\xc7\xa0\xea\x6a\x15\x2e\x61\xef\x5d\xeb\x72\xda\xa5\x4d\xbf\x71\x35\x7b\xf8\x78\x89\x93\x4a\x62\xe2\x76\xef\x8d\xa3\x20\xb0\x5f\x89\xaa\xf8\xd7\x1f\xa3\xb0\xbb\xeb\xcd\x0f\x2f\xa9\x17\x45\x9c\x96\x43\x20\x1b\xe0\x5b\xe6\xe0\xc6\x20\x2d\x46\x53\x6e\xa3\x6a\x5b\x6b\x09\x8e\x0b\xdc\x3f\xf5\x25\x7b\x98\xb5\x5a\x7c\x7a\xbb\x6b\x9e\xa7\xc8\xd4\xec\x02\x4a\x72\xf5\xef\x27\xa7\xd1\x1f\xe9\x38\xd6\x46\x99\xd7\x6e\xaf\xaf\xae\x45\x2f\x81\x0b\x43\x42\xde\xa1\x12\x21\x1c\x7b\x5d\x66\x19\xfc\x88\x76\x08\x3f\x1a\xc4\xa5\xb1\x13\x78\xc8\x0b\x02\xbf\xff\x18\xd9\x5b\xee\x13\x56\xe4\xb3\x78\x31\xf3\xfb\x2f\x07\x3e\xe6\x79\x6d\xf1\xcb\x40\x92\x87\x3d\x96\xaf\x8b\x80\xe0\xce\x47\x80\x64\x44\xa0\x08\x58\x32\x43\x33\xb5\xc3\x80\xf2\x81\x2a\xf3\x16\xc6\xaf\xab\x7f\x7c\xf4\x80\x8b\x09\xdc\x8a\x9b\x6d\x6e\x5c\x8c\x7a\x52\x6a\xbd\xab\xcd\x89\x6e\xe6\xe1\xbb\x46\x20\x15\xa9\x0b\x5d\xc3\xcc\x3c\x11\x2d\xff\xa0\x3d\x16\x32\x3d\xec\xf3\x77\x04\x5c\x66\x46\xef\xa3\xf0\xfa\x59\xa5\xce\xb3\x75\x30\x2f\x67\xa5\x3d\xe6\xe9\x19\xec\x66\xe8\x66\x29\x28\x01\x61\x61\xa7\xc4\x07\x1d\x1f\x23\xa2\x0c\x1d\xaa\xdd\x2c\x91\x61\xe8\x97\x08\x62\x32\x23\x82\xb3\x04\x0d\xc6\xea\xc8\x53\x49\xc4\x5c\x2e\x0d\xed\x37\xb0\xe2\x5d\x3a\xe4\xcb\xf9\x30\x88\x6f\x78\xd2\x54\x1e\x99\x59\x7d\x7a\x13\x4e\x02\x6e\x0c\xf9\xf9\x30\x7b\x26\xbf\x74\xe0\x83\xf4\x34\xe8\x31\xe6\x49\xfd\x97\x46\xd2\x15\x09\x83\x59\x87\xa4\xfe\x73\xe6\x4a\xb0\xec\xbb\xc2\x9b\xe9\x99\xeb\x1d\x52\xba\x79\x09\xac\x5d\x27\x35\x43\x38\xad\x34\xcf\xda\x10\x47\x90\x62\xc8\xb7\xb1\x2e\x86\x7e\x9b\x35\x88\x0c\xab\xfe\x4f\x46\x40\x64\xe6\x86\xb3\x01\x5d\xba\xc1\x67\x46\x51\x81\xf2\xa3\xc9\x7a\x77\x19\x51\x32\xbf\xf2\xfe\x77\x9e\x2f\x22\x6b\xfe\xcb\x64\x80\x9b\x4d\xa7\x4b\x97\xd1\x6d\xd7\x13\x04\x9b\x97\xcf\xac\x02\xb6\x05\x6d\x8f\x91\x77\xb3\x7f\xea\xb5\xc5\xf8\xd7\x19\x07\x9e\xae\x33\x8e\x3c\xde\xd4\x74\x09\xdd\x49\xeb\x8b\x48\x9b\x39\x2b\x68\x8d\x05\xc6\xb6\xb6\x22\x37\x91\x90\x12\xda\x69\x5d\x8c\x6d\x38\xbd\x86\xa5\xc6\x11\x85\x8b\x86\x09\x96\x66\xad\xa9\xd7\x1e\x24\xc4\xdc\x85\x38\x60\x17\x84\x64\x33\xa2\x6e\x48\x91\x06\x43\x8b\x47\xb3\x46\xfa\x56\x97\x16\x39\x2d\x21\x0b\xae\x63\x9d\x67\x8a\x7b\x71\x2d\xff\xb6\x74\x69\xda\x93\x76\x26\x39\x49\x07\xe5\x68\xcb\x2e\x7e\x40\x9d\x4b\x14\xda\xb1\x22\xd4\x29\xee\x5d\xea\xde\x07\xb1\x56\x66\xa1\x21\xa6\xad\x74\x42\x5c\xd9\x6d\xfe\xf5\x49\xfe\xe3\x84\x7d\xfc\x04\xa1\x36\x06\xd2\x3b\x19\xe4\x1a\x97\xc8\x06\xf8\x12\x05\x87\x63\xd7\x32\x65\xd5\x93\x91\xe6\x22\xe2\x30\xb4\x2d\x29\xc9\x02\x29\xa5\x4a\xbe\x81\x19\x52\x95\x18\xc5\x72\x30\x08\x6a\x11\xe9\xe5\x79\xf1\x00\x66\xc2\xcf\x22\x44\x0c\x7d\x6b\xfb\x35\x32\x89\x32\x61\x90\x28\x62\xca\x71\xab\xfc\x0c\x5c\xbe\xa1\xd2\x7a\xf8\x44\x8d\x3e\x24\x45\x24\xe2\x6a\xa0\x04\xf0\xcf\xdd\x93\x3c\x9d\xcd\x47\x88\x09\x2e\x21\x96\x33\x0a\xf0\x6d\x51\xf9\x16\x0d\xfc\x09\x6e\x9b\x06\xa1\x59\x41\x1e\x39\xc9\x7c\x00\x60\xfd\x1d\xee\x46\x28\xee\xcf\x61\xe0\xf5\xd2\x59\xac\xa3\xfc\x6e\x8c\x43\xbb\x9a\xa5\x9f\xed\xb9\x45\xc6\x6b\x44\x9e\x81\xe0\x56\xb5\x6f\x4a\x5a\xcb\xab\x06\x62\x1a\xc2\x96\x08\x58\x59\xc1\xb5\xc9\xb4\x1c\x7a\xc7\xce\xb0\x1d\xa7\x3c\xa8\x69\x21\xdd\xc7\x9a\xe9\x6d\x00\x34\x21\x04\x8c\xb7\x62\xf1\xa7\x96\x80\x9c\x6b\x61\x08\x51\xf0\x71\x74\x19\x54\xca\xed\xa7\x76\xe5\xe6\xea\xcd\x06\x3b\xb0\x80\x23\x62\xb8\x46\x40\x1e\xb0\x9e\xfd\x24\xc3\x5f\xef\x43\x83\x83\x7e\xa4\x64\xa9\x90\x3a\x6e\x71\xa4\xc2\x1e\xbc\x78\x04\xda\xab\x9b\x46\x0a\x9d\x5b\x65\xaa\xbd\x40\xee\x77\x9c\x7d\x8c\x39\x3d\xf6\xf0\xdd\xd9\xcb\xb1\x57\xba\x93\xd2\x43\x9a\x0c\xa1\xf7\x90\xf2\xae\x41\x66\x12\x79\x76\x59\x33\xa7\xf0\x9e\x11\x73\x33\xfc\x26\x17\x4a\x5a\x45\xf2\x81\x20\x28\x6f\xe7\xb0\x40\xb9\xfa\x3b\xec\xd5\x53\x4b\xd4\x04\x4a\xc3\xcf\x25\x32\x73\x1a\xa6\x16\x00\x61\x55\xd5\xff\xae\x76\x08\xc3\xd0\x24\x06\x69\xeb\x25\x1c\xd0\xa4\xe3\x89\xc1\x8c\x1b\xd7\x7b\xc6\x67\xeb\x3a\x21\x6a\xe9\xb7\xdd\x14\xa0\x3f\x49\x00\x12\x44\x08\xb1\x0f\x47\x98\x08\x8a\xbe\xcf\xa1\x3b\x5a\x0e\x9b\xbd\xf9\xbf\xd9\x3f\xa6\x2c\xf8\x0d\xea\x82\x11\xe2\xdc\x1e\x9e\xa0\x7d\xf4\x58\xdf\xa6\x44\x19\x9e\xe4\xcc\x1e\x33\xfe\xae\xbf\xcb\x7f\xf9\xbb\x5e\x85\x42\x55\x7f\x95\xde\xfd\x3f\xc6\x0c\xfd\x95\x6d\x76\x04\xe9\x5f\x7a\x22\xb6\xd6\xfb\x0f\xef\xcb\xd1\xbe\xef\x6b\xc1\xf3\x69\xcd\x3b\xef\x11\xbd\xab\xa3\x0c\xdf\x20\x02\x78\x19\xa0\x96\xc3\x8d\x41\x54\xf1\xb7\xf7\x5e\x06\xe6\x53\xbc\x33\xd5\x03\xde\x39\x11\x87\xe8\x86\x1a\x51\x6e\xfb\x89\xbc\x1b\xfe\xf5\x8d\x2a\xd6\xa7\xd3\xf9\xe7\xa3\x96\x6f\xc0\xb3\xfc\x04\xde\xdf\xe2\xa1\xbe\xbf\x29\xdb\xf3\x4f\x08\x98\xeb\x83\xa9\x83\x06\x17\x16\x06\xe5\xe7\x8b\x80\x1f\xb1\x4c\xcd\xb4\x7e\x56\xf5\x01\x0d\xf8\x3b\x8e\x56\x43\xa2\x54\x8a\x36\x50\xa1\xe8\x7c\x12\x81\xc1\x45\xdd\x89\xfc\x52\x38\x39\xcd\x41\x5b\x5c\x9f\x84\xff\x2c\x77\xb6\xd4\xcb\x6a\x04\x6b\x77\x1f\xfc\x65\x7d\xbe\x7c\xe2\xb5\xfa\xab\x88\x56\x8f\xc9\xb0\x27\x6f\xe2\xbe\xec\xb2\xbd\x35\xe5\x42\xfa\x38\xbb\xe1\xd3\xb5\xb2\xac\x98\x78\xa4\x8a\x3a\x00\x21\xab\x00\x31\x5e\xed\xdc\x7d\x61\x0b\xbf\xcd\xaf\x12\xcd\x31\x5a\x83\x2c\xaf\xdf\x5f\x93\xdf\xa0\xd7\xf4\x62\xf2\xe6\x6b\x72\x9e\xc3\x6b\xe2\xbe\xa6\x50\x42\x30\x6b\xb8\xe8\xa8\x33\x93\x3a\x12\xa0\x08\x45\xc3\xd2\x89\x42\xc7\xca\x71\x6c\x2b\xec\x6d\xbe\x1d\x84\xf7\x69\xcd\x0a\xc2\x89\x9b\x5c\x30\x9d\x91\xb1\x0d\x7b\xf8\x0e\xfc\xd0\x99\xc2\x22\xc8\xc1\x0c\xc5\x38\xbe\xd8\xae\x01\x86\x28\x04\x0f\x31\x4e\xe1\x7e\x3c\x18\xf9\x60\x58\xb5\xd9\x3d\x63\xdb\x88\x27\x0a\x13\x71\xee\x22\x1f\xe5\xcc\x9a\xe4\x95\x87\xb1\x08\x5f\x14\xf1\x1c\xaf\x9c\xe8\x52\x32\x63\x53\x4b\xf9\xc5\xe7\x0e\x59\xee\x8c\x60\x7d\xa5\x7f\xe2\x16\xc1\x03\xbf\x8e\x22\x4c\x79\x20\x92\x1b\xba\xa7\x36\xdd\x45\x3f\x7c\xa1\x6b\xa9\xcd\x39\x2f\xcb\x0f\xc0\xae\x9b\x8b\x8b\x03\x6f\x94\xdc\xf3\xce\xcd\x0f\x03\x2f\x03\x5c\xa7\x7e\x5f\x92\xc6\xcc\xad\x43\xba\x7a\xe6\xc0\x6f\xf5\x5e\x1c\x88\x46\x3c\x70\x64\x0b\x34\x1f\x36\x0a\x8a\xdb\xfa\x2d\xff\xb2\xe0\x44\xc7\xb4\xd6\xe0\x19\xb9\xa7\xcf\x4d\x74\xd4\xc3\x3c\xaf\xea\x20\xb1\x5f\x0a\xf8\xdc\xae\x26\x10\x8d\x84\xba\x74\xf3\x9b\xd6\x2c\xc7\xef\x6f\xf1\x5c\xa7\xdf\x32\xe6\x20\x27\x21\xc2\xc0\x3f\x75\xbf\xc5\xd6\x74\x17\x06\xc3\xed\x38\x80\x09\xa6\x91\x03\xd3\x89\x3d\x0a\xe7\xa1\x57\xed\xea\xcc\xb6\x84\x41\x27\x1a\x65\xad\x36\x73\x32\xe6\x26\x55\xfb\x9b\xe3\xf5\x87\xa5\x35\xc8\x55\xfc\x4d\xce\x7c\x76\x6b\xae\x7c\x29\x5d\xd6\x16\x7d\x52\xb7\x47\x69\x5a\x9b\xa5\x69\x7f\x47\x08\x5e\xd7\xe2\xfc\x20\xe4\x7a\x7a\x77\xd5\xc9\x44\x1c\x32\xba\x36\x74\x6f\xb9\xdf\x6b\xf3\x01\x2b\x5f\x65\x7d\xdd\x58\x66\x2b\xa0\x86\x24\xea\xe7\x24\xb3\x9e\x76\xf8\x8d\x02\x4d\x54\xfe\x53\x6a\x99\x29\xd7\xea\x5a\xf2\x53\x5b\xd1\xc9\x45\x5c\x74\xa1\xb4\xd4\x8d\x2f\xaa\xbf\xcd\x06\x79\x6e\xe5\xe1\x90\x5d\x24\x7b\x7d\x04\xdb\xcc\x0a\xcb\x27\x51\x08\x41\x46\x45\xd8\x24\x8f\xde\xd1\xfc\xfc\x6f\x83\x88\x05\x0b\x43\x0e\x8e\x86\xdb\x63\x0a\xec\xbe\x17\x63\x20\x27\x96\x5d\x4b\x03\x31\xb9\xcf\xd3\x5a\x45\x47\x5c\x97\x1e\x3c\xcd\x49\xa6\xb5\x26\xb4\xd2\x7f\x49\x41\x6a\x42\x5d\x2b\xfd\x0a\x5a\x46\xbd\x42\x95\xfc\xa6\x5d\xcb\xe6\x77\x67\xbe\x7c\x14\xed\x9a\xa6\xc9\x51\x2b\x2f\xd5\x37\x27\xd2\x6c\x3c\x8d\xe8\xbc\x8e\x27\x27\xfe\x86\xda\x14\x60\x5d\xbf\xb7\xf6\xf2\xd2\xd7\x61\xc6\x38\xf9\x1b\x0f\xf0\x50\xbc\x1d\x84\x10\xb1\xf3\x70\x5e\x8f\x07\x57\x57\x9c\xcf\x79\xb8\x73\x1c\x83\x2f\xa4\xe8\x5b\x5b\xff\x1f\x99\x3f\xfe\xd1\xb6\xc9\x86\x76\x0f\x67\x81\xf2\x7a\xee\xb2\x37\xd6\xcc\x50\x42\x5c\x82\x2a\x64\x6f\x8c\x40\x46\xa5\x23\x9e\xac\x91\x86\xb8\x2c\x6b\x49\x27\x0e\x85\xcc\x0c\x99\x43\x67\x7c\x87\x1c\x31\xe4\x2f\xdc\x79\x23\x87\x12\x02\xd4\xe8\xf2\x43\xf9\x5b\x3f\xd8\x17\x26\x36\x31\x10\x08\xb3\xb7\x16\x3f\xe2\x70\x59\x47\x9e\xf5\xda\xb3\x88\x64\x27\x32\x92\x02\xec\x03\x8f\x99\x1f\x4e\x95\xf6\xb9\x1b\x3a\xad\xaa\xe4\x34\x87\x12\x89\x13\x64\xfd\x70\xe6\xbc\x9d\xca\xb2\x47\x67\xa2\x97\x31\x72\xba\xf0\x3e\xd5\xa5\x22\x40\x8e\xf3\x82\xba\x7d\x56\xc1\x47\xce\xea\xbe\x24\x3b\xe9\x3f\xe2\x86\xa0\xfc\xeb\x21\xce\x80\x16\xe9\x04\x0a\x35\x8d\x6c\xda\x11\x30\x35\xbd\x9c\x2a\x8c\x80\x04\xd1\x84\xce\x89\x3f\xec\x17\xfd\x98\xb0\x0e\x81\x81\xf6\xd0\xad\xa4\x39\xc8\xb2\xf3\x6f\xcb\x96\x9e\xab\x5b\x3b\x80\xb1\xca\x88\x84\x7c\x06\xe7\xde\x0a\x7e\x75\x10\x9f\x58\xd7\x0b\x9e\xe4\x5d\xe5\xa4\xb8\x3e\x50\x89\x5e\x42\x0c\xe1\x1b\x18\x5b\xe6\x8f\x08\x16\xe1\xe9\xe6\x7f\x6c\x3a\x23\xe8\x58\x6b\x22\xce\xfa\xd1\x5e\x15\xd0\xfd\x78\xdc\x0e\x1f\xba\xad\x2f\x52\x86\xc0\x35\x10\x65\x88\xf4\xa1\x68\x03\xdd\x08\xa2\xb6\xa5\x19\xda\xf9\xf9\x0c\x28\x51\x03\x93\x9d\xf6\x85\xdc\x6a\x6c\x3c\x2d\x81\x8d\x06\xae\x85\xfb\xb6\x66\xb6\x43\x98\xe0\x1b\x3d\xaf\x39\xb8\x44\x9a\x5a\x55\x4f\x9f\x97\x63\x89\x06\x1a\xc4\xfa\xb8\x34\x88\xf6\xe2\x44\x71\x51\xbf\xac\x59\xaa\xe7\x0e\x61\x39\x5a\xa7\x28\xd9\xd1\x2c\xad\x6c\x6d\x11\x76\x99\xad\x8e\xd6\xac\x5e\x31\x4a\x67\x8a\xd1\x82\x53\x62\xe4\xde\x70\xf7\x13\x89\x82\xe7\x22\x1c\x6f\x66\x08\x1d\xf4\x10\x21\x95\xcc\xfb\x69\x8a\x2a\xf0\x32\xa7\xe8\x06\xf4\x61\xb2\x86\x05\xef\x48\x77\xfd\x70\xbc\xa3\x58\x0d\xaf\xb2\x80\x22\x74\xc8\x5e\x02\x03\x63\x6d\x95\x76\x59\x9d\xa9\x10\xae\x6f\x2b\x64\x61\x68\x5e\x0d\xef\x71\xfd\x22\xb3\xcb\x2d\xb1\x8c\xf0\x71\x73\x63\x5f\x99\x73\x89\xa6\x20\x61\x14\x17\x32\xa1\x09\x35\x35\x3b\x84\xe6\x55\x82\x47\x5e\xcb\x6f\x88\x66\x9f\xe1\x52\x3b\xa2\xdc\xd0\x1f\x28\x3c\xe6\x46\x43\x02\xe8\xc2\xd3\x68\x0f\xa7\xef\xf1\x74\x40\xb2\x1b\xd2\x1c\x9c\xa5\x55\x42\x41\xb0\x74\x5e\x00\xeb\xe9\x78\xd7\x48\xca\x3f\xc8\x9e\x3f\x2e\xe2\x6a\x40\xf1\xf6\x9e\xc5\x71\xc5\x86\xfa\x27\x6f\xc3\x65\xdd\xa3\x93\x8d\x20\xa2\x94\xda\x69\x58\x3f\x69\x2f\x6d\xb0\x1a\x38\x25\xe0\x90\xb3\x0a\x18\xc3\x99\x69\x86\x9b\x08\x25\xc8\xa5\xf9\xf5\x39\xe1\x01\x11\x12\x09\x26\x77\x8b\xfa\xb8\x04\xe2\x28\x94\xf4\x1b\x3f\xa9\xd1\x94\xd2\x12\x1c\xbd\xc3\xab\xcf\x23\xac\x86\x55\xa2\xfb\xdc\xa1\x2f\xaf\x06\xb4\x41\x69\xca\xab\x30\x47\x18\xce\xf3\xfb\x6a\x75\xd5\x8d\x90\x41\x8f\x31\x8e\x40\x68\x19\xae\x5b\x9a\xe0\x0b\x6c\x78\x01\x68\xe7\x97\x72\x47\xcc\xfc\x65\xb4\x33\x64\x54\xaa\xa6\x9c\x2c\x8b\xb6\x92\xf3\xfa\x7a\xbb\x47\xef\x70\xb0\xc7\xa2\x25\xcc\x9e\xe8\x54\x86\x3f\x6e\x6c\x47\x83\xdc\x1d\x09\x51\xc2\x9d\xb2\xb4\xa7\xf5\x4c\xc8\x06\xf2\x6b\xfa\xbf\x2b\x1c\x9f\x32\x33\xcd\x26\xc1\x02\x3b\xd6\xdb\x42\x01\x12\xac\x25\xd6\x17\x07\xf2\xf3\x81\x4f\x25\x9b\x67\x03\x00\xcf\x0d\x77\x0c\x73\x09\xe8\xf9\xd3\xa8\xcf\xd8\x92\xab\x4b\x80\x68\xe3\x25\x38\x8d\x19\x35\x78\x6c\x37\x97\x09\xad\x5b\x66\xc9\x98\xed\xcd\xbf\x06\x5c\xd2\x73\x83\x4c\x8b\x3d\x89\xb2\xc4\xd7\x0b\x9e\x24\x6b\x9b\xf2\x67\xb1\xb1\x5c\x0e\xfe\x22\x44\xe9\x25\x96\xd2\x39\x45\x4f\xc4\x0e\x64\xe7\xa4\x17\xf4\x07\x21\xf3\x42\x22\xf1\x5e\x58\x9f\x7e\xca\x9e\xc9\x8a\x4e\xe8\x07\xa0\x8e\x45\x19\x9b\xa0\x4e\x0b\x5c\xe5\xf1\xbb\x93\x96\x2a\x77\xf0\x20\x3e\xa6\x0e\x4c\x73\x3b\xf8\x05\x3c\xaa\x67\x1c\x85\xc5\x0b\x5c\x53\x33\x88\x7a\x02\x6b\x89\xf7\xdb\x39\x4b\x3e\xc0\x1d\xc8\x25\x5d\xd4\x35\x50\x7a\x07\x1a\x8e\xf7\xec\xd2\x90\xdf\xde\xd9\x0b\xf2\xcf\x7a\x53\x68\x16\xca\xe6\x3b\x9f\x89\xcf\xf5\x82\x41\x12\x5f\x6c\x69\xcd\x09\x37\x45\xa0\x3b\x66\x6b\x5e\x8b\x05\x36\x5f\x6d\x43\x60\xc7\x7b\x2b\xeb\x98\x4b\x40\xdd\xe6\x9b\x78\x84\x7c\x74\x49\x2a\x98\x9d\xfe\x22\x96\xe3\x78\x56\x66\xea\xda\xb1\xed\xde\x40\x3c\x58\xd8\x1a\xdb\xed\x99\xa3\xca\x2e\xe7\x93\x0c\xa6\x13\xb4\x6e\xc1\x9b\x0e\xe2\xc4\x8f\x50\x67\x3f\x4a\xf4\xd3\x0e\xa4\xa2\x40\xbf\xe7\x50\x81\x13\xbd\xe4\xa0\x1e\x73\x07\x65\x7e\x6a\x68\x06\x66\xcc\xe2\x35\x28\x10\x9b\x14\x02\xea\xee\x8b\xa2\x43\x19\x0e\x9e\x63\xba\xdc\x21\xee\xe6\x7e\x5f\x10\xb4\x36\xf8\x86\x4f\xd5\x23\xa6\x0e\x38\xf3\x55\xa7\x4c\x1f\x38\x63\xb4\xaa\x9c\x79\xcd\x4b\x6d\x9f\xc3\x50\x85\xc3\x5b\xf2\x8e\xbd\xa0\x4b\xa2\x95\xcd\xaa\x8d\xf3\x94\x79\x3a\x93\x35\x2a\x57\x1a\xe0\x29\x88\x15\x65\x4f\x0b\x01\x68\x53\xc8\xcf\x1c\x4c\x2a\x38\x64\x6a\xb1\xa1\xfd\xa8\x91\x43\xb3\xac\x6f\x3d\xbf\xc3\xe4\xf3\x8b\x01\x04\xbe\x60\x9e\xdb\x1b\xce\xaf\x23\x6f\xdb\xf5\xf4\xef\xb0\x35\x9f\x02\xa0\x66\xed\x7d\xbe\x98\x66\x80\x0b\xbf\x88\xb4\x8e\x6a\xbd\xe9\x6e\x1e\x79\xfe\x5b\x99\xd3\x8a\xb8\x30\x24\x5d\x09\x9e\x86\xe8\x87\x42\xa2\x14\x4f\x01\xf5\xb7\x2e\x7f\x7f\xb1\xe9\x78\x20\xe8\x47\x86\x5f\x9a\x7e\xbf\xac\x0e\x09\x76\x19\x43\x7f\xf8\x78\xf9\x57\x47\xa2\x3a\x28\x74\xca\xaf\x43\x97\xfa\x0c\xab\x91\xcb\x10\xf2\xa0\x99\x45\xea\x35\x07\x44\x4d\xd6\x86\x94\x2a\x83\x20\x58\xfd\xc4\x40\xff\x03\xf2\x21\xed\x2b\xd0\x27\x81\xde\xfa\x06\xf9\x87\x69\xa7\x58\x5f\x5a\x4f\xbc\x3b\x34\xdd\x1a\xbf\x8a\x41\xee\x36\x65\xa2\xe3\x21\x49\xfb\x23\x12\xed\x61\xa8\x7a\x48\x7b\x5f\xad\xe6\x14\xbe\xd1\xd3\x1f\xb8\x44\x08\x5c\x1d\xf0\x23\xcb\xa0\x97\xef\x48\xf4\x64\x81\x51\x0c\xdd\x30\xae\x39\x82\x23\xb2\x73\x74\xdf\x4a\xe2\x54\x3d\xb2\xf7\x27\x39\xb3\x17\x06\xab\x47\x4f\x4f\xac\x42\xeb\x71\x5b\x93\xc3\x6f\x50\xea\x30\xc7\x9e\x8c\x89\xc1\x9e\x20\xba\x0a\x33\x74\x2d\xae\xee\x0f\x6f\x05\xc6\x27\xdd\xec\xba\xa0\x54\x59\x73\x9c\x83\x77\xbb\x7f\x34\x06\x92\x91\x2a\xf1\x4f\xd5\x85\xe5\x19\x66\xbb\x57\x7b\x1e\xf8\x3a\xed\xa1\x80\x6d\x46\x34\x62\x61\xa2\xe5\x20\x7a\x12\xee\xd3\x3b\x4c\xef\xf2\x61\xaa\x95\xa8\x21\xcf\x60\x44\x94\xb4\xdc\x5c\x07\xf7\xe1\x9b\xf5\x88\x59\x21\xfd\xba\xd7\xe1\xde\x60\x27\x4a\xa6\x33\xb0\x38\xaf\x28\xd1\xdd\x23\x42\x76\x05\x5b\xee\x45\xf1\xb4\x3b\xb8\x9a\xfe\x04\x20\x1d\xa6\x59\xa3\x7e\xf4\x04\x3d\xb7\x02\x64\xb2\xe7\xd7\x8d\xcd\x42\x44\xb8\xd6\xc9\xbf\x37\x4c\x49\x0b\x3a\xe7\xea\xa0\xc9\x0f\x57\x40\xad\x62\xc1\x43\x01\xe7\x9e\xf1\x70\x41\x49\xbf\xfd\x82\x1e\xab\x20\x57\x1e\xe0\x1c\x88\xa9\x23\xc0\x51\x60\x39\x22\x1a\xe3\x22\x7a\xe9\xb8\x65\x9d\x44\xc8\x92\x99\xcf\x7b\x77\x56\x25\x44\xb5\x20\x32\x16\xcd\x3d\x5e\xb3\x06\xc1\x5a\x62\x1d\x68\x26\xb0\x0b\x1a\x36\xcd\xea\xa2\x0d\xb4\x91\x9b\xc0\xa3\x5b\xff\x2a\xa7\xce\x1b\xff\x22\xd6\x6a\xe7\x6f\x4b\xb9\x8f\x64\xc9\x9d\x90\x51\x76\x39\xda\x30\x9b\x95\xd2\xbf\x14\xc7\xf7\x73\xfd\xe6\x35\x30\x56\x3f\x77\x2a\xe7\x9e\x7d\x54\xc5\x1a\xce\x70\x75\xc5\x9a\xe2\x5c\xf1\x9e\x5b\xa9\xae\x71\xd4\xa3\xb5\xcb\x65\x31\x7a\x54\x67\x3a\x2a\x24\x6c\xcf\x8f\xda\x0b\xd9\x85\xf8\xd0\x58\x8f\x0d\x3e\xa4\xb4\xd8\xbe\xb5\x8e\xfc\x31\x6a\xfa\x5a\x8d\xf1\x56\x82\x23\x2a\x8f\xf2\x21\x5c\xb6\x67\xc6\x85\xa5\x58\x70\xbe\x81\x5a\x7d\xba\x19\x5e\x7d\x6b\x27\xe0\x61\x39\x99\x20\x8d\xb4\x2b\x28\xd6\xb5\x1f\x66\x29\x28\x00\xba\xd4\x10\x5d\x89\xc3\x3b\xde\x36\xda\x3b\xaf\x69\xf6\xa6\x98\x62\xd6\xe8\xbd\xb4\x5d\xb9\x74\x3b\x47\x86\x57\xe5\x30\xaa\xf8\x45\x16\x17\x39\x63\x09\x3e\x14\x7d\xfa\x1d\xe7\x27\xe4\xd6\xc6\x36\xa0\x7b\x3b\xfc\x63\xb0\x7b\x42\xe5\xc8\x5f\x7f\xfc\x0b\x0f\x44\x57\x71\xcd\x00\xce\xb5\x4f\x9f\x57\x17\xc9\xa3\xbb\xd7\x30\x83\xfa\x35\xb4\x4e\x31\xec\x80\xa4\xe1\xab\xa6\xa3\xef\xbc\xa6\xfb\xdf\x45\x3c\xcb\xff\x2e\x74\x44\xbe\x48\xf6\xf2\xc7\x03\x2c\xc4\xdb\xe0\x85\x32\x33\x73\xad\xec\xb9\x3c\x68\x22\xef\xfa\xa8\x4b\xd5\x90\x0b\x02\xf3\x85\x7d\x83\x56\xaa\x29\x54\x45\x0f\x9a\x48\x1c\x1d\xdc\x8c\x1c\xc1\xcd\xbe\x54\xe7\x75\xd3\xbe\xec\xbb\x05\x8d\x21\xb7\x47\x6f\x61\x6f\x60\xcd\xc6\x29\x39\xf0\x2b\x04\x3c\x59\xd8\xa4\x98\xc0\x92\x8f\x59\x7a\x1d\xb7\xed\x21\x70\x86\x21\xd8\x74\x94\x27\x4d\xab\x0d\x86\x36\xdf\x53\x5c\xfc\x2c\x50\x10\x21\x09\xb8\xb0\xf6\x4a\x44\x69\xfd\x0a\x74\x58\xcf\xc6\x2d\x0a\x19\x41\xbc\xc2\x4f\xed\x2d\x8b\xc9\xf5\x51\xf6\x26\xa4\x8f\xe2\xf4\x6e\x60\x00\x14\x23\x7e\x0c\xbc\xf8\x47\x58\x4b\x0c\xd4\xd1\xce\xda\xc9\x23\xbc\x9e\x8e\x23\xcb\x37\x8a\x6c\x2e\xbf\x91\x17\x6d\xb5\x07\xf4\x65\xc2\x9b\x86\xbe\x8a\xae\x01\xab\x59\x1d\x61\xa4\x4f\xb9\x27\x95\xcd\x75\x87\xf1\x89\x8b\x96\xbd\x7d\xa0\x0d\x66\x7b\x71\x49\xb3\x42\x75\x31\x00\x7a\xa5\xe5\x1f\xfe\xbd\xd8\xf0\x1a\xba\x3e\x5d\x6c\xb2\xb7\xce\x68\x03\x98\x2d\xf4\xed\x6e\xe2\x57\xed\x69\x56\x67\x77\x43\x6c\x7d\xd2\x80\xe8\x76\x67\x84\xb9\xc9\x4f\x39\x7a\x52\xfe\xd1\x52\x97\x2a\xa8\x33\xe8\xc4\xc7\x7c\xd7\x89\xa0\xa9\xba\x86\xd5\x26\x8d\x40\xb1\xc3\x91\xa0\x79\xfb\xf9\xcc\x6f\x59\xe0\x42\x30\x6a\xd0\x4a\x2b\xe3\x64\x82\xb7\xd6\x0e\xc7\x9e\xd0\x5e\xb4\x29\x90\xe1\xce\x30\xb4\x5c\xdd\xe2\x58\x16\x89\x3e\x6c\x95\xdd\x8c\xad\x16\x31\x1e\xb6\xaa\x9d\x0f\x60\xd4\xf9\xde\x67\xf6\x3b\xac\x99\x40\x5f\xaf\xb3\xd2\xf4\xc2\x23\x48\x43\x8d\x05\xc7\xa0\x89\x1c\xec\x49\xf9\xdf\x11\x09\xe2\x01\xb7\x5d\x3d\xc2\xd4\x6d\xfc\xe1\xf9\x96\x7f\x8c\x93\xdf\x02\x53\xb5\xa1\x9f\x60\x6d\x41\xae\x50\x3c\x84\xca\xba\x99\x29\x7e\xdd\x1b\x6e\xad\xac\xd5\x9a\x74\x99\x26\x08\x83\xba\xc0\x56\xd1\x3e\xf8\x9d\x71\x7e\x9a\x66\xaa\x0b\x0d\x43\x57\xb8\xc4\x76\x1a\x90\x0b\x64\xd4\x95\xd3\xe7\xe6\x30\x5b\x2e\x60\xad\x29\x7c\x65\xfe\xc3\x56\xe3\xd6\x9c\xae\xe6\x86\xbd\x8c\xdc\x5c\xa4\x6f\x83\xa4\x56\x09\xfa\x3d\x3e\x8b\xba\x1f\x48\xc8\x56\x8d\x32\x15\x62\x16\x74\x10\x13\x32\x26\xf2\xda\x76\xe1\x90\x17\x1e\xc9\x94\x8e\x58\x08\xe2\x72\x84\xd1\xe5\x97\x5e\x3d\x7c\xdd\xe5\x1f\x07\x5f\xad\x12\x6e\x1b\xb0\x36\xad\x98\x77\x78\xda\x79\x31\x33\xb5\x0c\xf0\x88\x28\x82\xc2\x4d\xd5\x7e\x4e\x69\xbb\x6e\x32\xdb\xee\x28\x27\x9a\xa0\x14\xe9\x85\xb1\x02\x4e\xea\x2d\xdb\x61\x1c\x5a\xff\xa1\x26\x8d\x33\xde\x98\xd1\xe4\x9d\xc5\x8b\xbf\xe3\xd9\x94\xb4\x9e\x78\x6f\xb1\xa6\x67\x2f\x72\xeb\x73\xdb\xff\x3c\x6b\x0b\xf6\x35\xe4\xe8\x1c\x40\x05\xbd\xba\x0a\x66\x53\x78\x5a\x07\x1f\x3d\xe2\x2d\x2b\x1e\x62\x4b\x0e\xce\xfb\x96\xad\x69\x8c\x8c\x6f\xc0\x61\x1d\xa2\xb6\x9d\xe7\xbb\xe7\x90\xeb\x06\xd5\xaa\x99\xbd\x7a\xda\xaa\x07\xd3\x59\x79\x75\x01\x93\xa3\x21\xc5\xcd\x26\x58\x80\xa6\xd0\xea\x23\xdd\x1b\x17\xd8\xc9\xa1\xc3\xc5\xbb\xcf\xf5\x2e\xb2\xc4\xc8\x62\xda\xf6\x1e\x5c\x0d\x5a\xb7\xf9\x18\xcc\x06\xfe\xbc\xd5\xa4\xdc\x94\x9d\x47\xe2\x56\xaa\x22\x67\xb6\x04\x54\xbc\xce\x03\xd2\xfe\xf2\x24\x6f\x01\x1f\x72\x7b\x89\x5a\x21\xcd\xdb\x47\x64\xf5\x26\x1d\x1b\x10\xec\x42\x56\x7d\x83\x36\xf8\xed\xc1\xd8\x92\xbb\x28\xeb\x8b\x18\x38\x80\xa2\x24\xd0\xe1\xb9\xf2\xa7\x1c\x90\x5e\xc0\x3d\x55\x5b\x15\xe5\x35\xdc\x77\xae\x9a\xf1\x2f\xe8\xd6\x28\xa5\xf5\x9f\xbc\x85\x5e\x42\x57\x90\xc9\x6a\xd6\x16\x7d\x35\x80\xdd\x75\xfe\xd6\x66\xae\x97\x04\xa5\xcd\x9e\x55\xa7\xbb\xb9\x36\x93\xf1\x7b\x6e\x29\xfd\xc1\x79\x78\x6c\x8c\x9a\xc9\x1c\x0b\xb8\xbe\x6c\x81\x20\xe3\xd9\xb3\xfc\x76\x43\xcd\xcd\xea\xbe\xdf\x95\x29\xa6\xcc\x2c\x6b\xe3\xa1\x08\xb1\xcc\xd4\x00\x56\x59\x89\x46\x75\xf3\x2f\x96\xce\x7d\x23\x4c\x95\x3b\x00\xc6\x70\x97\xe5\xb4\xbd\xea\x6e\xb3\x3a\x37\x12\x2f\x7a\xb4\x98\x28\xe6\xb5\x3e\xbd\x3d\x2d\x7a\x9e\x00\x7a\x9f\x76\xb8\xf4\xab\xfb\x90\x32\xad\x8e\x08\x58\x7b\xeb\xd2\x1a\x71\x95\x5f\x4e\xd0\xfb\x90\x35\xaf\x9d\x58\x34\x55\x4d\x92\xfa\x8d\xd1\x13\xbe\x1b\xf5\x7b\xe2\x80\x5a\xfd\x86\xda\x9d\x3f\x52\x34\xd9\xf4\xb1\x1b\xb0\xf9\xd7\x73\x55\xd7\x7e\xbd\xf8\xb1\x5e\x49\xee\x5a\xaf\x88\x01\x73\xec\x45\x38\x2b\x57\x34\x86\xb2\x46\x2e\xad\xd0\x2c\x6b\x33\xf0\x7a\x39\x6b\x2f\x3b\xd2\x60\x65\xb1\x73\x44\x5e\x5d\x04\xb7\xf1\x84\x32\x2e\xb9\x88\x4d\x57\xbd\x73\xba\x16\x6a\x38\xac\xc5\x73\xef\x5d\x77\x8d\xdf\x6d\x3e\x7b\x62\x61\xad\xf9\xb5\xf0\xf2\xac\xe8\xb4\xf9\x74\xe1\xeb\xb3\x22\x90\xa1\xab\x3b\x9c\xb5\x9a\x31\xa2\xc9\xcf\x51\x8e\xd5\xe5\xde\xf9\x84\xd3\x76\xa3\xf7\xbe\x6a\xef\x4f\x18\x98\x7b\x5c\xb1\xf7\x0b\x4e\xb4\x4f\xa4\xe6\xaf\xd6\xbe\xbe\x48\xd9\xdb\xc0\x02\x9c\x09\x80\xb0\x9e\x01\xda\x6c\x53\xd3\x68\xc5\x8c\xfe\xcd\xf6\xb3\xd9\x74\xcf\xac\xc9\xae\x8f\x2c\x12\x05\xd6\xef\x37\x49\x18\xce\xde\x6b\x38\xbb\xf7\x64\x47\xed\xf7\xc8\x6b\x6c\xcd\xbd\x8e\x27\x45\x3d\xcd\x6a\xb3\xf2\xc5\x48\xd4\x05\xa3\x16\x27\xad\x45\x81\xa4\x4b\x08\x9e\x2f\xd7\x48\xfc\x9b\xf5\x11\xbd\xc7\xbf\x5c\xbd\x6d\xeb\x29\x58\x5f\x3f\xe1\x70\x7c\x8d\xdd\xaf\xe6\x12\xd9\x49\x8b\xe3\x8e\x84\xd4\xc3\xa8\x3c\xba\xd5\x64\x94\x01\xb7\xb9\xca\xf0\x7d\x80\x3a\x1f\x04\x48\x16\x1e\x14\xf9\x72\x74\x5f\xa1\x3b\x7e\xf1\xf7\x93\xcb\xcb\x8b\xb8\xd8\x9d\xf2\xe1\x30\xc0\xfa\xe8\xa3\x00\x0d\x06\xdf\x88\x9d\x36\xaf\xd5\x52\x3d\x52\x90\x5b\xb1\x13\x73\x4c\x21\x52\x55\xf0\x6c\xf2\x4c\x6a\x21\xeb\x6f\x8a\x10\x3a\x50\x98\xe3\x95\x5e\x2a\xc3\xd9\x17\x7a\x84\x17\x72\x9a\x16\x0d\xff\x2e\xe5\x63\x40\x03\x29\xa6\x5b\x0e\x6a\x5f\xc3\x16\x45\x9f\x01\xce\x53\x77\x8f\x02\xe9\x97\xa8\x58\xea\xe2\x26\x5a\x19\x2f\xc5\xb5\xcb\x4c\x5c\xc3\xea\x2f\x3f\x33\xeb\x56\x46\xaa\x8e\x63\xfe\x08\x64\x67\x89\x6f\xdf\xe2\x19\xf3\x6c\x2f\x90\xf9\x17\x7b\x85\x56\xcc\xf7\x19\x66\x11\x79\x30\xef\x7f\x2f\xe6\xb4\x2c\x65\x78\x29\x20\xcf\xf6\xe2\x2a\x31\x24\x38\xa2\xd0\xeb\x2b\xe6\x85\x2e\x16\x9a\xb2\x46\xa6\x96\xc9\x03\xd9\x96\xfc\x87\xd1\x47\x7f\xa8\x17\xab\x5b\x22\x5c\x69\xc1\x38\xe7\x35\x37\x74\x9d\xd3\x03\xf9\x54\x2f\xd0\x63\x83\xd8\x24\xc3\xbe\x17\xcf\x8f\x8f\x7e\x01\x46\xec\xa9\x2c\xb4\xe4\x42\x1e\x21\x07\xf0\x96\x8c\xb7\xae\x73\xcb\x73\x34\xb9\xa9\xc1\x15\x64\x6d\x1a\x94\x3d\x5c\xac\x55\x50\xbf\x7e\xcc\x2f\x5e\xce\x7e\xb8\xd6\x80\x7e\x2e\x3d\xfd\x32\xae\x92\x88\x69\x7a\x9f\x4e\x67\x30\x43\x84\xc3\x74\xfe\x25\x71\x07\xa1\x5b\xdb\xcb\x26\xf3\xb0\x21\x08\xd5\xf2\x00\x8b\xc5\x1a\x58\x37\x79\xf0\xc8\x7a\xd2\xd3\xfe\x4f\x80\x46\x6b\x18\xbc\xb5\xe9\xfd\x0c\x28\xab\xdb\x29\xe8\x80\xc0\xef\xc5\xec\xd9\x2a\xa4\x2a\xfb\xe9\x6a\x22\x1c\x94\x21\x65\x94\x8f\x52\xda\x23\xa8\xc6\x87\xe9\x74\x41\x40\x8d\x8d\x99\x88\x98\x77\x6b\x2d\x8f\x7c\xdc\x1f\x7d\xcc\xe8\xa5\xb8\xb5\x78\xe8\x14\x37\xb8\x2c\x81\x3d\x5c\xb8\xad\x59\xf5\xb8\x96\x9c\x89\x37\x9c\xc9\x85\x58\xd2\x4f\x25\xea\x66\x49\xf0\x3e\x71\xcd\xa9\xf9\xf7\x16\xe4\x25\xaf\x32\x30\xa2\x1e\xa9\x7d\x4b\xc2\x17\x54\x20\xca\xa6\x3c\xeb\x2c\x42\x8f\xbe\xe0\x88\xb3\x95\x07\xbb\x61\x7c\x44\xe0\xdd\x2c\x0c\x6c\x36\x9b\x71\x7e\x30\x56\x3e\x29\xdf\x07\x53\x2b\xe6\xf7\xfa\x8f\x13\xd1\x04\xdf\x31\xf2\x3a\xbf\x07\xa5\xe0\xcf\x8a\x43\x98\xd5\x8a\x9c\xaf\x80\xf6\x5d\xf6\x32\x8c\x32\x7b\x3e\xaa\xc2\x1a\xae\x02\x17\x5e\x45\x9a\x65\x73\xa8\x09\xcd\x51\x29\x01\x66\x88\x8f\xd5\x79\xdb\xfc\x7d\xf1\xd0\xf2\xb9\xaa\xd9\x3b\x7b\x1c\xd9\x9f\xd6\xe9\xe9\xe5\x77\x9c\x21\xa6\x73\x7d\xf3\x23\x14\xa4\x5b\x37\xad\x32\x0c\x76\x1a\x0a\xd9\xdd\xa6\xb3\xd4\xb6\xf0\xbe\x5d\x91\x76\x36\x17\xa6\xc2\xcd\xc5\x2f\xa3\x36\x25\xa4\xe1\x87\x74\x14\x8e\x74\xe6\xb4\xb3\x57\x9a\x3f\x87\x46\xdf\x8c\x0c\x6a\x19\x0d\x7d\x38\x25\x8f\x1e\x9c\xf3\xab\xb0\xab\x8e\xe1\xe0\x5d\x91\x5b\x73\xf6\xb7\x41\xf3\xad\x32\xa3\x73\x7a\x06\xc3\xaa\x99\xd4\x68\x85\x91\x22\x88\x70\xce\xee\x69\xa4\xd9\x5c\xba\x40\x8d\x01\x9f\xe1\x5d\x66\xd2\xb8\x60\xa4\x7f\x29\xa2\x87\xe7\xf4\x1b\x4f\xa5\x75\x42\x9f\xe4\x1c\xc6\xb0\xc5\x0c\x18\x14\x71\x88\xab\x46\x68\x66\xbe\xa8\x80\xaf\xbc\xbf\x6e\x7e\x22\x1f\x3c\x94\x22\xaa\x7d\x46\xc3\x4f\x97\xc8\x51\x9a\xec\x55\x02\xba\x53\xb4\x40\xff\xc4\x13\x24\x6a\x62\xdf\x1c\x8a\x24\x28\x36\x0b\x9f\x10\xad\xf1\x9d\x16\x0e\x10\x3c\xdf\xfb\xf8\xfe\x93\x2a\xdc\x73\x8f\x21\x3f\xfb\xae\x12\x91\x58\x7f\x8b\x20\x85\xf8\x88\x42\x15\xcb\x59\x84\x35\x34\xa0\x9f\x10\xaa\xc7\x26\x11\x8b\xf6\x4d\x03\xe6\x97\xa8\x8b\x74\x3c\x3b\x7b\x51\xe6\xc2\xac\xe3\xe7\xac\xc4\xac\x67\x79\x49\xff\xe4\x69\x4d\xec\xc0\x29\x9e\xf2\xd0\x3c\xf5\x65\x7b\x42\x42\x9f\x5d\xbd\x67\xd9\xbb\x58\x6c\x1d\x07\x76\x3f\x9f\x45\xa1\x6a\xa7\xbe\xf4\xcf\x62\x9f\xef\xe0\x34\xea\xf3\xf0\x2c\xb3\x10\x8d\x65\x58\xbd\x17\x67\x06\x83\x37\xcc\xa0\x47\xe0\xa9\xf0\x69\x4c\x23\x9f\x36\x6c\x99\x8c\x8d\x73\xcc\xe3\x04\xce\xa3\x17\xac\xcd\x44\xc9\xb0\x67\x7e\x31\x6c\xfa\x69\x75\x0b\x77\x4b\xb7\xed\x13\x21\x3a\x3e\x9d\x43\x50\xf6\xd9\x0f\xf8\x5e\x3d\x51\xa3\xeb\x8d\x87\x4d\xdc\xe3\x9e\x99\x48\xf2\x4c\x85\x33\x1e\x79\x5e\xa5\x4b\xf6\xb4\xfb\xd9\xf9\xd4\x57\xfa\xb9\xd6\x1f\x1f\xb5\xfe\xd1\xf8\xc9\xcf\x35\x3b\xcf\xfb\x07\xd0\xd3\xec\xc6\xea\x01\x8a\x3f\x75\x66\xf7\xf2\x07\xe9\xab\xb8\xb1\x3f\xf9\x95\xe3\x78\x7e\xb2\x32\x69\x7e\xb2\xd4\x44\x7e\xb2\xe7\xed\xaa\x52\xff\x49\xef\x80\x38\xbe\x94\x15\xfc\x83\x76\x61\xd5\x20\x1c\x0a\xde\xe6\x85\x21\x2e\x63\xc8\x84\x14\x74\x81\x51\x1f\x94\xd5\x55\x32\x1b\xea\xd0\x29\xbf\x34\xe1\x63\x6c\xa6\x7f\x6b\x60\x60\x9c\x2c\xbe\xd9\x0d\xc5\x28\xda\x23\x4a\x75\x4c\x37\x53\xd1\x15\xdb\xfa\xe0\x2d\x06\xaa\x51\xac\x43\xb4\xe5\xe5\x28\xf7\x82\xa7\x4f\x9f\xf9\x3f\xc5\x1b\xe2\x08\x07\x7f\xd8\x0d\xc6\xe0\x25\x04\xdc\xad\x5e\x68\xf0\x7e\x33\x11\x41\xa1\xa3\xf7\xec\xf3\xb4\xe6\x80\x52\xc2\xb1\xd1\xf8\x8b\xd1\x23\x15\xcd\x70\xf5\x51\xc8\xc0\x14\xae\x50\x45\xe1\x4a\xab\xea\x29\x00\x20\x0b\xb7\xfb\x13\x5e\xd4\x02\xc9\xb9\x56\xf7\x00\xdc\xef\x13\x6f\x68\x41\x2b\xe0\xeb\x03\x12\xc9\x25\x1e\x3b\xd9\x8c\x88\x0d\xbd\x43\x88\x01\x8d\x2c\x7d\x43\x3c\x3e\xbc\x4d\xa6\x13\x1a\xae\xfb\x87\x81\xed\xb0\x28\xe0\x2b\xce\xcd\x81\x9b\xc2\xda\x7e\x6a\xa7\xe0\x4d\xa1\x2a\xc6\xe6\x10\x75\xbe\x8f\xb3\x47\x23\x09\xfa\x02\xdd\xbd\xd0\x7f\x47\x2a\x50\x6e\x83\xc5\xc8\xad\x06\x21\xa0\xe4\xb5\xdd\x02\xe4\xf7\x5b\x6f\x39\xf6\xa7\x0f\xa3\xfb\x6c\x43\xd6\xcd\x19\x95\x9a\xee\x9f\x5b\x4e\x7b\x03\x26\x95\xb5\xdb\x73\x03\x4e\x02\x55\x89\x5e\xa9\xb6\xba\x6c\xd7\x10\xb7\x8c\x31\xb4\xa5\x06\x9b\xca\x1d\x7a\x4b\x04\x2e\x03\x55\x19\x29\x1f\x66\x7b\x1a\xf2\xda\x4a\xbd\xd6\x28\xe8\x72\xe3\x6d\x2f\xe1\xe7\x2b\x43\xa9\x88\x34\x83\x86\x33\x6f\x52\x2f\xf2\x97\xb5\x4b\x3c\xa9\x94\xf4\xd1\x1e\x23\x53\xfe\x41\xb2\x26\x1a\x7e\x9e\xde\x00\x97\x3a\x09\xf0\xed\x8e\x5c\x75\xf1\x1d\x5b\xd9\x4f\x73\xdd\xd9\xbd\x99\x28\xdb\x3a\x5d\xfa\x2e\x4d\x17\xf9\x9b\x48\x4e\xe7\x0f\x8b\xb7\x97\xa7\x6b\x56\x83\x67\xba\xba\x3e\x89\x66\xd9\x71\x70\x79\x3a\x4f\xd4\x24\x08\x4a\x48\x9c\x76\x4c\x03\x36\x8b\x55\xa6\x3b\x85\x47\x97\x35\x89\x92\xab\xc2\xad\x5e\x3a\x5f\x4c\xb1\x30\x2b\xc2\xc9\xc0\xb0\x8c\x8c\x41\x28\x5d\xa1\x51\x7e\x5e\x5a\x0a\xaf\xaf\xac\x41\xd3\xa4\x48\x74\xa4\x76\x86\xe3\x64\x82\x08\x07\xa7\xac\xb2\x1a\xc3\x92\x4b\x8a\x60\xc9\x43\x5e\xf2\x6b\xcf\xca\xfb\xb7\x52\xb7\x0b\x9a\x89\xc8\x86\xef\x30\x83\x7d\x09\x4b\x5b\x7c\x02\xfa\xcb\xbd\xb8\x93\x63\xaa\x23\x22\xfc\xb9\x5c\x48\x2c\xda\x6e\xaa\xb6\xd7\x35\xc2\x27\x4c\x1e\x7d\x62\xfa\xde\x54\xf8\x9d\x9f\xca\x22\xbc\x09\x5c\xaf\xbd\x44\x46\xa6\xf2\xd3\x1a\x9d\x93\xbd\x83\x8c\x84\x11\x47\x13\xd2\x03\x6e\x74\xf9\x7e\x92\xa7\xcc\xe4\x04\xeb\xa1\x5e\xdf\x17\xb9\x95\xf6\x81\x4e\x9a\xa7\x87\x7a\xca\xf0\x47\xf3\x13\x37\x65\x51\x1c\x79\xc6\xd0\x76\xe2\x25\xcf\x81\xde\x9e\x32\xd5\x3b\xdd\x6c\xff\xdd\x25\xb7\x86\x31\x36\xdd\x92\xb0\xdd\xe4\xdd\xcc\xe9\xf7\x4d\x00\xc8\xe4\xa2\x8e\x71\xb7\x90\x30\x9d\x05\xe4\xb4\x2b\xe8\xb9\x9e\x2c\x0c\x9b\x38\x3c\xce\xd1\x94\x6e\xcf\x94\xbf\xfd\x94\xd3\x9f\x7d\xf3\xf8\xb4\xa5\x3f\x55\x87\x53\x7a\x2b\x9b\x17\x31\x2c\x7e\x77\x24\x22\x30\x41\x5c\x43\xa7\x02\x87\xa8\x40\xad\x49\x11\x72\xa0\x70\x96\x20\x69\x9a\x39\x24\xcd\x8d\x38\x57\x33\x83\x0e\x38\x79\x92\xa2\xac\x5e\xa2\x97\x13\x58\x18\x8d\x9d\x0a\x30\x27\xe9\xe1\x30\x75\x8d\x81\x4c\xd4\x29\x03\x9c\xc0\xe7\xc8\x1a\xd4\x13\xa7\x9d\x2b\xf0\xfa\x3b\x97\xca\xbe\xe9\x67\x08\x00\x9f\xb2\x3c\x76\x32\x0e\x92\x4d\x91\x89\x49\x21\x53\x62\x62\xce\xf4\xfd\xf6\xf2\xf8\xf6\x96\xcf\x78\x51\x69\x6e\x54\xb0\xfb\x78\x89\xfe\x32\x5e\x37\xc9\xd1\x53\x0a\xf8\x6b\x51\x94\x87\x5b\x7e\xae\xe3\x7e\x65\xe2\x34\xaf\xb8\xb9\xe3\xee\x7f\x5f\x24\x77\xfa\x03\xfc\x01\x69\x23\xba\x93\xab\x22\xf1\x85\x8e\xe9\xb1\x45\x55\x8e\x3b\xc8\xd6\xea\x39\x8e\x08\x27\xdd\xf9\xe9\xb0\xcf\x93\xb0\x91\x4e\xee\x3c\x69\x35\xe0\xe2\x18\x8d\x02\xf7\x14\x6a\xb9\x95\xe2\x9d\x76\xad\x67\x06\x88\x8e\xb5\x7a\x27\x83\x44\xce\x43\x56\xe2\x74\x88\xce\x15\xec\x99\x8b\xcd\x43\x70\x0b\xc7\xea\x4f\xd8\xc1\x05\x86\xf4\xa2\x43\x0d\xe6\xe2\xc7\x47\xaf\x8a\x4d\xb8\x8d\xf9\xdd\xab\x2e\x18\x11\x25\xc3\x79\x8c\xce\x1a\x89\x1d\x03\xae\x13\x83\x1c\x11\xc3\x38\x7a\x6f\x87\x27\x31\xe7\x35\x86\xe0\xc6\x39\x5d\x7d\x25\x89\xb4\xec\xb3\x07\x11\xd1\x46\x52\xa4\xaf\xc8\x78\xb3\xb1\xd8\x33\xf2\xa6\xc1\x63\xf0\x50\x4b\xc9\x39\x8e\x13\x93\x18\xc6\x7c\x34\x8f\xda\x98\x3f\x72\xe4\xad\xd0\xb2\xd7\x79\xf6\x20\xd0\x39\xe7\x33\x3f\xc5\x32\x1f\x33\xbe\xd5\xb4\xf2\x53\xb3\xd2\x1e\x6e\x49\xd8\x9c\x99\x42\xa3\x70\x04\x72\x70\xa0\x31\xdf\x2d\x84\x11\x63\xd9\x27\xf7\x6b\x6f\x86\x1f\x58\xe2\x33\x49\x9e\xc1\x08\xaf\x6a\xff\xc7\x48\xd4\xf1\x7b\xa2\xda\x19\xc8\x9f\xe5\xeb\x06\x7f\xfa\xb6\x1e\x37\x1d\xd9\xee\x49\xe4\x50\x3d\x40\x13\xe0\x3a\xfd\xf3\x95\xc2\x62\xbd\x65\xd6\x44\xd7\xd0\x03\xd2\x5f\x82\x62\xee\xf9\x62\x8f\xed\xb1\xb7\x30\x13\x32\x42\x09\xdf\x44\x16\xa8\xc6\xc2\x1c\x18\x2a\x1a\x26\x94\xc4\x98\x37\x6c\xed\xa4\xbf\x12\x73\xdd\x77\xdc\x27\x6d\x30\x15\xa1\x3c\xff\xc5\x30\xea\xa3\x6a\xbc\xfe\x81\x97\x23\xfa\xd0\xd6\xc4\x12\xf3\x15\x8d\xad\x4b\x89\x4e\x66\x3f\x32\xc3\xc3\x31\x30\xf8\x62\x4c\x64\x00\x45\x63\x45\x26\x40\x01\x27\xaa\x57\x11\x44\x51\xb5\x0f\x3c\x28\x24\x96\xa4\xa3\xea\x81\x8f\xbd\x80\x99\xad\xdf\x0f\x1c\xe8\x8e\x30\xc2\xb0\x23\xf6\xf0\xe1\x20\x1c\x6d\x77\xce\xec\xdd\x3c\x66\x8f\x16\x93\x89\x5c\x97\x93\xe6\xf9\x62\x06\x26\x4c\xe4\xc7\xf4\x51\x88\xd1\x8d\x07\xde\xc8\x21\xb5\x1f\x0a\x8f\xa8\x45\x8b\x3e\x42\x0c\xf8\x21\xe8\xdf\xa3\x90\xbc\xff\xb0\x27\xb0\x91\x39\x1e\x68\x2e\xeb\x67\x84\x6b\x14\x59\xf3\x2c\x2b\x8d\xda\xc8\xfc\x24\x2e\x74\x46\x45\x43\xd3\x71\x78\xbc\x38\xb0\x23\x28\xe9\x51\x1e\x5c\x19\x99\xab\xd2\xe4\x7c\x40\x14\x90\xbb\x49\x91\xdb\xf6\xc8\x0c\x12\x7b\x64\xaf\x43\x11\x6b\x53\x7c\x50\xfa\x91\x97\x08\x77\x7f\x84\x8b\xfd\x61\x5f\xfc\x9d\x8b\x79\xc7\x00\xba\x17\x8d\x2c\x2a\x86\x0c\x11\xc3\x31\xd3\xfa\x26\xd7\xf4\xa4\x29\xe8\xab\x5e\xae\x47\x8a\x18\xfa\x47\xfa\xa1\xb2\x22\x84\xe0\xdf\x8d\x59\x5a\x76\x57\x12\xec\xa3\x74\xeb\x81\x3d\x92\x26\xd6\x15\xf0\x6e\x01\xf4\xcf\x4e\x4d\xd5\x26\x71\x48\x29\x7e\xcb\xef\xcc\x51\xb3\x2f\xe7\x8f\x32\xbc\x82\xfe\xbf\xfc\x8f\x90\x79\x0e\xc2\x90\x41\xca\x03\x47\x36\x18\x82\x91\xe3\x07\xc4\x5c\x47\x2c\x24\x8a\x43\x69\x04\xd3\xc4\x63\xca\xff\xea\xde\x53\xcf\x36\xff\xf3\xbe\xce\xd9\x90\xf2\xf9\x9f\x7d\x9f\x8f\x60\x76\xfe\xcb\x3d\x57\xb0\x36\x1f\x86\x2f\x77\xee\x03\x90\x2b\xc8\xe9\x37\x36\x2a\xdc\x18\x5d\x70\x4d\x31\x42\xac\x43\x43\xba\x60\x13\xdd\x05\xe5\x94\x43\x6a\x19\x5f\x41\xbf\x58\xf9\xa2\x1f\x57\x26\xb2\xc8\xbc\x4e\xca\x37\x0d\xf5\xeb\x86\xa1\xc6\xbc\x44\xc0\xb2\xa3\x07\xe7\x08\x0a\x0a\x32\x1f\xea\x71\xeb\xc6\x72\xfc\x4d\x27\x65\xfd\x52\x28\x32\xae\xf9\x52\xdf\x17\xa7\xcc\xb0\xbe\x0c\x7a\xbf\x3c\x2d\xd0\xa2\x59\x74\xb0\xeb\x28\x59\xc0\xbc\x3e\xe4\xbe\xb3\xf5\xfd\x00\x56\x0d\x3a\x85\x22\x25\x62\x33\x7a\xcf\xb2\x63\x8b\x2e\x53\x8c\x96\x97\x60\xe1\xaa\xe0\xe6\xbf\x25\xd5\x68\x05\x0c\xca\xb3\xfa\x01\xec\xf4\x86\x9a\x96\xa6\x20\x0e\xa0\xe9\xa1\xaa\xc4\x7a\x62\x2e\x58\x99\xfd\x46\x7f\x09\x5b\xba\x83\x8f\xee\xc7\xa1\xe0\xb6\x2f\xc7\x94\xd6\xa6\x2c\x2b\x66\x69\x4b\x18\xcd\xc3\x2f\x6b\x96\x6c\x9d\xc5\xe8\xce\xe5\x01\x83\x20\xaa\xc2\x73\x7f\x25\xc2\xf0\xd1\x75\x65\x5a\x52\xee\xcb\xee\xd3\x74\xe8\xe1\xb3\x36\xd8\x24\x3d\xf8\x0c\xfd\x1a\xc2\x02\x65\xf2\xe0\xad\x6d\xe6\x1b\x1c\xde\x87\xcb\x3f\x62\xbd\xc1\xa5\xc5\x79\x59\xe0\x28\x55\x0e\xb9\xa3\x4d\x89\x2a\xbc\x98\x0d\x3a\x58\xfd\xc6\x24\xd4\xe1\x52\x3a\xfa\x70\xad\x1c\x3b\x37\x43\x2d\x6f\x58\xfa\x6d\x19\x3c\x6a\x64\xb8\x40\x5a\xe2\xac\x61\xa6\xe8\x85\x9d\x29\x2e\xd1\xb0\x5f\x41\x13\x1f\x3c\x8c\xdb\x9f\x0b\x98\x3c\x06\xb4\x91\x6a\x58\xd5\xa3\x2b\xbe\x04\x4b\xe5\x3e\xc0\xb2\x94\xa4\xf6\xb0\xcb\x71\xea\xdc\xd4\x20\x40\x22\x77\xfc\x0c\x96\xe3\x9e\x62\x5a\xd6\x8d\x7a\x79\x66\x8f\x6a\xb9\x0e\x55\x43\x15\x78\xff\xb9\x7f\xfa\xf7\xe0\xae\x56\x93\x78\xa8\x82\x3a\xe2\xeb\x34\xd3\x78\x4a\x3b\x71\xb0\x8f\x04\xe4\x52\xf8\x7b\xf5\xc4\x36\x3f\x9e\x0a\xa0\x99\x2e\xbc\xd9\x88\xae\xe1\x79\x60\x40\x4b\x86\x7e\x25\xd9\x38\xec\x9e\xc9\x6c\x43\xa5\x90\xc9\x80\x84\x05\x29\x90\x0f\x3f\xee\x7e\x1e\x90\xcf\xa7\xa3\x2e\xbf\x94\xba\x02\xa1\x75\x10\x17\x68\x00\x4a\xfd\x4b\x64\xd6\x43\x40\xd2\xe3\xae\x15\x07\x68\x58\x8d\x91\xc3\x1a\x30\x56\xd9\x4b\xd2\xaa\x50\xed\x89\x99\x6e\x7f\x09\xb2\x8a\x6a\xfa\x8c\x42\x73\x8a\x70\x06\x02\xcb\xb5\xa0\x67\x50\xc7\x51\x1e\x7d\xd9\x22\x20\x0b\x90\x55\xa7\x94\x68\xeb\x7b\xc4\x29\x0e\xd0\x8a\xf1\xe9\x12\x48\xd5\x2b\x70\xaa\x57\xaf\xc6\xc4\x50\xc6\x7c\xb4\xed\x0a\xcc\x34\xdc\x31\x40\xc8\xcb\xf6\xf7\x49\x03\x5e\xad\xfc\x45\xba\x6a\x83\xd8\x3a\x68\x95\xe3\x3e\x5f\x4e\x56\xd5\xa7\x7c\x40\xf6\x7b\x51\x1c\xe5\x40\x4d\x5b\x5a\xf5\x9f\x76\x91\xcf\xb6\x2c\x2a\x6c\x3f\x44\x45\xfd\x72\xae\xc8\xab\x03\x0b\x7d\xdc\x17\x7c\x24\x69\x78\xd2\xb0\x2c\x5d\xb1\x6c\xaf\xb8\x56\xdf\xca\xcd\x28\x85\xad\x0d\xad\x11\x61\x20\x60\x2b\x57\x5b\x2e\x6d\x69\x71\x8e\x60\xb0\x49\x05\x83\x18\xb2\x34\x9d\x86\xf6\x34\x66\x05\xa9\xc3\xe8\x3e\x09\xac\x73\xb7\x73\xc8\x16\x63\x48\x52\xa1\x1f\x72\x84\xfc\x0d\xf6\xc8\x5c\x6c\x94\x0f\xf9\xd1\x20\xac\xbc\x36\x8f\x56\x59\x03\xdf\x1a\xd0\x59\x34\x41\x6e\x5a\xeb\xaa\xe1\x16\xd8\x7d\x00\x5d\xe1\x2a\xd3\xe9\xf5\xfb\x7b\x6b\xf6\x0d\xa8\x0c\x56\xeb\xb3\x9d\xbc\xb0\xab\x59\x05\x49\xda\x0e\xea\x05\x73\x21\xe8\x2b\xd0\x40\x9e\xa4\xe6\x2d\x1e\xbc\x41\xd3\x07\xc5\x06\x97\x75\xe4\xc3\x97\x9a\x0f\xc2\x4d\x1e\xc8\x37\x02\xbf\xdd\xc0\x50\x1c\x31\xc9\xdf\x31\xec\x6f\xd6\xb8\xa2\x25\x63\x76\xff\x97\x39\x38\xed\x46\xf5\xb4\xfa\x67\x11\x83\x15\x30\x53\x82\x04\xdf\x40\x63\xbb\x61\x57\x7e\x60\x0e\x25\x42\x3a\x89\x69\x33\x4b\x7e\xac\x5e\x02\x12\xa0\xb1\xde\xb0\x53\xe2\x58\x79\x9d\xc9\x63\xd5\x42\x70\x02\xa9\xdf\xd2\x93\x64\xdc\x5f\x4c\x6c\x80\xb8\x2f\xdf\x6d\x67\xb2\xd2\xb7\xd2\x37\x4d\x8d\x3e\x72\x76\xc1\x68\x55\x38\x6a\xdf\x84\x69\xcd\x92\x2b\xad\xf7\x88\xfb\x2e\xd8\x86\xee\x74\xe4\xe6\xf3\x11\xf0\x54\x8a\xe4\x12\x64\xb9\xc7\xbc\xb4\x68\x8d\x14\x0a\xbc\x50\x17\x67\x9d\x05\x7d\x9f\x17\x33\x88\x7a\xc4\x0d\x11\xd6\x07\xa9\x25\xe6\x9a\x82\xd2\x7c\x06\x20\x42\xe8\x55\xd2\x96\x5a\x79\x55\x1b\x1a\xa5\xa3\x91\xa2\x7a\xeb\x55\xaa\xc5\xd0\xd7\xe3\x86\x0d\x59\x81\xa3\x9d\x66\xf8\x01\x20\x79\x9a\x60\x4e\xf1\x3c\x03\xbd\x7a\xfc\xa7\xc4\x42\x84\x7c\xc1\xe4\x04\xba\x41\x4f\xed\x66\xef\x1a\x59\xb3\x2a\xd4\xb8\xf7\x71\xc5\xad\x9e\xcd\x56\x47\xce\x0a\x68\x79\xf2\x91\xb0\x0f\xc8\x70\x30\x5a\x0e\xfe\xda\x6b\x96\xaf\x80\x4c\xd7\x21\x5e\x7f\x21\x5e\x1b\x2b\x15\x78\xd7\xfb\xb8\x81\x6f\x15\x2c\xd6\xcd\x9b\x87\xe8\xcc\xe3\x54\x02\x25\xbb\xfe\xa8\x39\x6a\xe6\x98\x25\xaf\x64\x36\x24\x13\xe2\x52\xae\x6c\xcd\x8f\x51\x18\xf2\xc7\x15\x5a\x09\x18\xf7\x8f\xbc\x63\x5d\x45\xd2\xf6\xd8\x5c\xc5\x9f\x01\x67\x60\xe7\xc1\x26\xaa\x33\x5b\xad\x6d\x7b\x36\xf4\x2a\x22\xe3\x3e\x48\xa9\xee\xea\x15\x43\x77\x59\xea\xf0\xb1\xe4\x92\xf7\xde\x9b\xc3\xdc\x9b\xd5\x3e\x69\xbc\x02\x40\x3b\xb7\xb1\x39\x2f\x3e\x85\x3d\x2d\x22\x65\xd6\x11\xa4\x56\x31\x52\x1f\x51\xfb\xf5\xb5\x7b\xe5\xce\x0d\x96\xd7\xce\xef\x02\x86\x94\x39\x63\x76\x79\x22\x99\xcb\xc1\xd4\x2c\x33\x1b\x62\x75\x16\xc8\xdd\x8d\x95\x06\xc3\xaf\xdc\xc8\x34\x36\x4e\x06\xcd\xbf\xd7\xd6\x10\xa9\x19\x4b\x17\xfb\xc9\x84\xb0\xe6\xb4\x47\xcc\x15\x08\xb9\x51\x17\xcc\xc0\x91\x09\x1e\x0c\x09\x39\x35\x73\x60\x4f\xb1\xd5\xb4\x3e\x6f\x73\x6c\xe6\x22\xc7\x52\x5f\x7e\x7f\x75\x55\xad\xfa\x78\xd1\xcb\x6d\x66\xfc\xba\x0b\x80\xf9\x8e\x92\x3a\x2a\x6a\x6f\x82\xcf\x2a\xe8\x28\xa9\x5c\x41\x16\x9d\x40\xad\xe0\x12\x95\x0d\x31\x24\x6f\xf1\x27\x68\x6e\xba\x61\xaf\x65\x6d\x60\x0d\xe0\x58\xa3\x89\x0b\x7b\x26\x36\x79\x12\x3f\x55\x83\x88\x88\xa2\x3c\xb5\x1d\xf0\x59\xb9\x26\xe8\x6b\xd6\x25\xff\xc7\xc2\xae\x70\x32\x87\xae\xee\x1a\x38\xe8\xa1\x01\x74\x31\x39\xda\x6c\x57\x9a\x4b\x25\x76\x67\xdd\xc1\xc6\x37\xb5\x82\x3f\x55\x50\xed\x8a\xa7\xdb\xda\xfb\x60\x9c\xf1\xe1\xf6\x9e\xca\x20\xeb\x43\x7e\x92\xc5\x07\x45\x2d\x1d\x3e\xca\x6b\x91\xed\x77\xbf\xac\xd6\x99\xe4\xd9\xa0\x6f\x1b\x97\x38\xfd\x66\x7a\xfb\x1c\xd5\x5a\x82\x25\x7a\x21\xc1\x3d\x50\xa7\x57\xf0\x66\xed\x43\x3b\x44\xcd\xd8\x74\x8f\xcd\x9a\x82\xf6\x7a\x46\xee\x3f\x38\xae\x51\xeb\xf9\xf8\xc9\x96\x49\xbd\x4d\x07\x70\xb0\xfa\x61\x0f\xda\xea\x7e\x02\x32\x34\xcb\x76\x4c\xa7\x7e\xd8\x82\xff\x9b\xe8\x60\xb6\xe9\x7c\x74\x49\xb3\x72\x78\x30\x61\xe3\xf9\xe3\x5b\x91\x90\x59\xd3\x7e\x00\x03\x76\xe1\xce\x11\xfa\xaf\x00\x04\x7e\xad\xe7\xc6\xd2\x05\x2a\xb7\xe1\x4e\x43\xf7\xb0\xf7\xd8\xd6\xef\x30\x8b\x30\xc8\x69\x4d\xfc\x84\x82\xec\x06\x2d\x1a\x22\x7b\x01\x77\x13\x49\x76\x6e\x41\x49\xb0\x8b\xfb\xf0\x54\x7a\x68\x48\x1e\xa1\xc0\xda\x72\x0c\xfc\xf7\x09\x98\x67\xd1\x18\x81\x69\x25\xa9\xf0\x3a\x5b\x98\x7f\xe7\x8c\x74\x2d\x70\xbc\x25\x77\xd3\x5d\x72\xdf\x76\xd7\xbe\x69\x3a\xd6\x25\x17\xf9\x61\xba\xcb\x41\x3e\x5d\xcb\xf3\x37\xeb\x19\xa1\xff\x20\xb7\xf6\xf4\x2d\x75\xd7\xfc\xd4\xb7\xa0\xbb\xca\x3c\xc4\xfe\x03\xaf\x78\x59\x6f\x91\xd8\x46\x58\x5c\xac\x7f\x42\xb5\x94\x5b\x45\xf2\x25\xc1\x9f\xfb\x25\x61\x5c\x20\x5c\x0f\x51\x55\xd1\xc5\x10\x52\x1a\xd0\x56\x61\x29\x11\xfc\xfc\xa6\x21\x49\x57\xb3\x62\xf8\x4d\x94\xd6\xe3\x36\x65\x89\xaa\x9a\x45\xf9\xc0\xe8\x9a\xe6\x24\x74\x22\x57\xed\x52\xc8\x5b\x07\xa3\xf0\xc8\xab\x07\xd8\x75\xde\x0b\xa0\xb7\xb8\x6b\xda\xf2\x1d\xf5\x06\xdc\x3a\xf8\x7d\xb7\xaa\x7c\x54\xe4\x59\xd7\x12\x5b\xba\x5a\x41\xbd\xe4\xef\x35\xe6\x11\xa8\x89\x51\x2b\xf6\x30\xbb\x8a\x7e\xbb\x70\x94\x35\x02\xce\x3b\x84\xe4\x12\x1e\x8b\x6f\xc2\xbe\xa9\x59\xda\x55\x77\xb8\x76\xf6\xc1\xf3\x15\x66\x24\x78\x13\xe2\x89\x67\x89\xb7\x17\xb2\x40\x80\x2c\xe8\xa4\xbd\xd8\x88\x8f\x1d\x24\x4e\x05\xdf\x9c\x19\xaf\x6f\xed\x2a\x26\xa9\x77\xb3\x54\xa2\x88\x61\xd5\x42\xa0\xc9\x53\x38\xda\x6d\xf5\x1d\xdd\xa6\x77\x95\x66\x6c\xde\xed\xb0\xba\xe0\x23\x00\xde\xaa\xcd\x75\x4d\xbb\x15\x5c\xd6\x2e\xc0\x9a\x05\x21\xeb\xd6\xbb\x8a\x00\xb3\xae\x54\xd7\x5c\x18\x34\xe0\x0b\x86\x97\xd5\x89\xf4\xe7\x74\x56\x0f\x2b\x9f\xaf\xf3\x00\x23\x0e\x12\x75\x11\x88\xd8\x95\xf1\xfb\xe2\xa5\x2e\x5e\x19\x7d\xcb\xce\xa4\xaa\xc2\x21\xc2\x59\xf9\x08\x29\xcc\x2e\x1f\xfa\x95\x99\x09\x18\x77\x53\x56\x7f\xe7\x1c\x57\xad\x20\xf2\x2b\xe4\xaf\x35\xc3\xba\x75\x7e\x57\x00\x79\x9d\xf8\xeb\x7c\x92\xf3\x82\x1a\xb9\xab\x6f\x2e\x38\xdf\x6c\x35\x14\x42\xc7\xbd\xcb\x53\x11\x14\x37\x43\xad\x4e\x7b\x1f\xab\x70\xb2\x79\x0e\xb0\xac\x2d\xf8\x4b\x8b\x48\x05\xae\x03\x4c\x6b\x6c\x35\xb1\xd3\x09\xa3\xe1\x68\xad\x96\xe5\xbb\x83\xf1\x6a\xe7\x32\x7a\x81\xfb\x4e\x97\xdf\x59\x38\x30\x59\x26\x23\xb0\x43\xd7\x80\x97\x3a\xe9\xcd\x94\x02\x1b\x97\xda\xa3\x47\x63\x66\xf7\x69\xd2\x51\xd0\xb1\xcf\x0f\x0e\x01\xde\x5c\x1d\x1d\x08\x30\x40\x69\x4d\x2c\x0c\x8a\xd9\x72\x0a\xeb\x17\xd1\xab\x7a\xb7\xd3\x28\x6f\x47\x97\x24\x1b\xd8\x25\x41\x10\xf0\xbc\x53\xe1\xaa\x0b\x7c\x1c\x90\xaa\xdc\x45\xfa\x7b\x2a\x63\x2f\xbd\xad\x49\x00\xf0\x25\x6c\x0e\x9a\xa6\x17\xfe\xdc\x48\xcf\x1c\xd4\x55\x86\x7e\x00\xb8\x2a\xca\x22\xba\x12\x6a\x18\x98\x0d\xaf\x22\x49\xd8\xc9\x83\xad\x9c\xcf\x69\xd5\x82\xaf\x74\x61\x18\x2e\xde\x14\x7c\xeb\x7a\x1a\x3b\x75\xe5\x81\x29\xaf\xae\xb3\xad\x92\xd7\x21\xc3\xd7\xcd\x58\xfd\x22\x53\x75\x0f\xb0\x24\x6d\x9e\xca\x81\x04\x6c\x79\x0c\xad\xb1\xc4\x67\x05\xb1\x99\x1c\xce\x4f\x22\x7f\x25\x0c\xd5\x25\x87\x67\xca\xdb\x8c\x18\xcc\x38\x52\xab\xe1\x05\x01\xdd\x07\xc5\x48\x78\x9a\xc7\x1e\xd6\x50\x6a\x4f\x96\x9b\x73\x59\x73\x83\x9e\xee\x5d\x41\x1e\xab\x40\xa9\x7b\x62\xfc\xb6\x3b\x3f\x36\x0f\x30\x19\x54\x4c\xc1\xf0\xdc\xe0\x3f\x0e\x64\x26\x72\x53\xd4\x4b\x48\xeb\x1b\x3a\x4c\xdf\xbe\xc2\xfa\xaf\x84\xac\xbc\x75\xa0\xe4\x17\xf7\x64\xeb\x19\x81\x76\xb1\x06\x61\xbc\x10\x6e\x0b\x66\x6b\x59\xc8\x77\x1d\xe7\x7a\xc8\xd0\x10\xb7\x80\xad\x81\x0d\x05\xad\xf5\xb0\x16\x34\x53\xa7\xbd\x68\xad\x37\xae\x3c\x54\x0e\x49\xc0\x9a\xe9\xf3\xc7\x73\xc8\x43\x5e\xb9\xb5\x85\x6f\x6e\x5a\xe8\x4f\xb0\x66\xe8\x54\xba\xa2\xdf\x56\x5c\x2e\xe8\xa4\xf8\xd7\xc6\xca\x88\x8f\xf1\x43\xb6\xd6\xbf\x7a\x16\xd0\x40\x42\xa8\xa6\xdf\x00\x7c\xfb\x31\x9d\xb5\xd5\xf9\x97\x8f\x24\xdb\x69\x69\x3e\xd1\xc7\x95\x89\x81\xfe\x0d\x59\x33\x7f\x5c\x65\x9b\x00\x04\x65\xf7\xca\x4a\x89\x84\xdb\x19\x4a\xdd\xbe\xcb\x78\x5d\x40\x7d\x8d\xa9\x9e\xaf\x79\xae\xfd\x0d\x87\x9d\x4b\x43\xe2\xc2\x45\x2a\xaf\x25\xec\x51\x73\x41\x9f\x1a\x18\x7c\x65\x85\x5e\x9b\x4d\xe5\xa9\xc3\xb4\xb7\xca\x0d\xc4\xd5\xf9\x79\x15\xbe\x36\x76\x84\x5c\x16\xb1\x5d\x2d\xf6\x22\x8d\x02\x6f\xd8\xdb\xbb\x7b\x0c\x45\x1a\x91\x8b\xc0\x45\x01\xb1\x08\xdf\x5e\x7a\x60\x60\xa7\xf5\xf8\x12\xd2\xa7\x31\x1d\xea\x76\x43\x59\x7f\x22\xcc\x11\x6d\xa0\xfe\x03\xd6\xea\x59\xfa\x3c\x42\x17\xc5\xfa\x6a\x6c\x56\x92\x90\x69\xf3\x48\xfa\xfd\x4d\x3f\x06\xa0\xac\x50\xe5\x90\xbb\xdf\x8a\x0d\x52\x67\xdf\xa8\x4a\x84\xbd\x5d\xf8\x7d\x17\x45\xb6\xf3\x14\x90\xc4\x14\x2a\x0c\xc7\x7f\x70\x58\xdb\x1f\xca\xdf\xc2\xeb\xfe\x5d\x08\x12\xf6\xa7\xeb\x8f\x1d\x0a\x9b\x3c\x83\xf5\xf4\x67\xdd\x92\x4a\x4a\xd6\x5f\x23\x99\xfd\x41\x6a\xcd\xdb\xb6\x7f\xd9\x1a\x81\x7c\x2d\xfe\xbc\x7d\xfe\x76\xab\x9c\x18\xc1\xf5\xad\xa7\x95\x4f\xe6\x1f\x48\xc2\xbe\x19\x5c\x5d\x9b\xbe\x67\xba\x8b\xdf\x8d\x02\xf2\xce\x87\x4b\xdc\xbe\x31\x88\xe1\xd3\xd1\x17\xc8\x76\x80\x53\x18\x98\xa6\x77\xf5\xc9\x6b\xe3\x84\x44\xd7\xa4\x28\x9b\x77\x9a\x86\xc4\xe5\x18\x95\xf0\xfe\x1e\xdd\x33\xf0\xcf\xaa\x19\xa6\xbd\xbc\x5e\xb5\x7a\xe5\xfa\x82\xb7\xe5\x95\x18\x3f\xfa\xda\x9b\xc2\x0f\xa8\xad\x83\x9f\xc0\xab\xfa\xc0\x8e\x80\x9d\x04\xed\xbf\xe8\xf4\x64\xd5\x47\xb2\x2b\x0d\xcf\xee\xc1\xf4\xe4\x40\x23\xcc\x68\xe7\xda\x2e\x49\xd0\xb4\x16\x8e\x83\x5e\xb5\x36\x3d\xb1\x2f\x24\x32\x72\xb1\xd9\x1d\x34\x5a\xc7\x3e\x61\xa7\xb6\x3e\x07\x5b\xb5\x42\xd5\x8f\x68\xd0\x3b\x9c\xc7\x93\xbf\x78\x27\x5e\xe5\x70\x94\x67\x11\xa5\x35\x08\xaa\x08\x54\xe4\x9c\x75\x1c\x62\x2d\x60\x95\x88\x66\x81\x5d\x5f\x80\xbc\x79\x01\x01\xde\x34\xce\x58\x56\x61\x19\x2f\x77\xb2\x3d\x65\x41\xbc\x56\x47\x63\xed\x6b\x2a\x4a\x1d\x2a\x59\x1f\x8f\xad\x2e\x2b\xb8\xfc\x08\xb0\xaf\x17\x77\x26\x60\x2f\x7a\xc8\xe2\xe2\x4e\xef\xb7\x6f\x66\x6a\xb9\xb8\x4e\x7a\xf5\x69\xf9\x6b\xe8\x56\xe6\x93\x90\xf7\x7a\x86\xf5\xe0\xf7\xe9\x05\xc0\xb5\x6e\xe1\x54\xe2\x67\x4a\xee\x3a\x09\xf6\xda\x48\xda\x85\xdd\x69\x6e\x56\xf7\xf6\x85\x10\xa0\x30\xd3\xdf\x1f\x81\xa8\xd6\x17\xd6\xbf\x24\x40\x2d\xa3\x15\x1c\x17\xdb\x68\xb1\xdf\xcd\xdc\xbf\x04\x8b\xf5\xed\xe4\x7f\xd1\xb9\x79\x65\xf5\x33\x61\x7c\x11\x23\x7b\x91\x67\x9b\x43\xae\x17\x4c\x59\x89\x39\x60\x61\x92\x70\x73\x1b\x34\x7a\x49\xab\xfc\x95\xbb\x8d\x09\x3c\x56\x4f\xb7\x94\xbd\x17\x44\x09\x7d\xea\x6a\xfb\x82\xd2\xbd\xf8\x69\xa6\xe5\x55\xc4\x2b\x35\x97\xfd\x4b\x34\x64\x4c\xb3\xfc\xd0\x66\x77\x12\x84\x79\xa5\x59\x9e\x4a\xa0\xc3\xba\x24\x99\xe8\x57\x2a\xe2\x9e\x83\x16\x8b\x7f\x5f\x00\xbd\xca\xd1\xf9\xeb\xba\x25\x44\xc2\xee\x0a\xd1\xfd\x65\xa7\xce\x26\xf0\x31\xb8\x35\xc3\x8d\xbe\x93\xf6\xfa\xeb\x6d\xe3\xdf\x12\x70\x05\x07\xc4\x5e\x6e\x6c\x2c\xe3\xb3\x57\xc9\x83\x1d\x72\x1d\xd3\xa2\xe5\x82\xeb\xf0\x0b\x6f\xc6\xb7\xac\xe9\x7d\x1c\xb5\x04\xf2\x35\xed\xea\x1e\xff\xaa\x1b\x6a\x3d\xf4\xeb\xb7\x68\xd6\x2e\x70\xeb\x3e\x5e\x92\xb2\xf2\xd1\xdb\x86\x71\x85\xdd\x80\xb0\xf9\xef\x42\xb8\x3d\x0b\x9e\x20\xf4\x9b\x67\x64\x9a\x65\xce\x9b\x39\xa0\x05\xa3\x2f\xf6\x85\x61\xc5\x1e\x88\x58\x7f\xf0\x7e\xad\xa9\x9b\x23\xe4\x06\x12\x73\x81\xb5\xb5\x06\x6b\x0f\x5c\x3b\x4f\x2c\x6d\x3a\x2f\xbf\xb9\x93\xf3\x8b\x7f\xed\x63\xca\xe6\x30\xdc\x1a\x83\x3e\x15\xb0\x43\xa9\x0d\xa0\x76\x85\x31\xfd\xda\x37\xe6\x5a\xb8\x40\xdf\xab\x3a\xb9\xfe\xba\xfc\x2c\x8a\xa4\xbf\x8e\xcb\xbf\x11\xd6\x11\xdb\x57\x7f\x27\x2f\x81\x03\x70\xf6\xc4\x79\x5c\xab\xd7\xce\xbc\x18\x60\xc8\x7a\xeb\x58\x87\x7c\xad\xbf\x60\x68\x37\x00\xea\x19\xc3\x96\x66\x3a\x89\x53\xd9\x71\x48\x67\x8b\x7a\xd5\xe5\xc9\xaf\xa0\xb8\x1e\xfd\x1e\x34\x81\x6b\xdd\xf3\xc7\x06\x80\x0c\xdc\x9f\xe9\xfe\x6d\xbc\xa5\x92\xae\xd5\xb9\x11\x6d\xbf\x88\x9e\x76\x02\xed\xc5\xbd\x55\x40\x06\xf5\xdb\xea\xc1\x6c\x2b\x0b\x0b\x55\x7b\xae\x75\x2e\x4d\x27\x1e\x48\xe9\x60\xd2\x06\x6b\xd7\xfa\x27\x76\xf0\xf2\xcb\x39\x9e\x16\x7d\x91\x76\x30\x4e\x05\xde\xd3\xcc\x42\xef\x1e\xc5\x6b\x7d\x04\x5e\xc9\xcc\xe4\x79\xca\x6d\x85\xc1\x73\xe7\x3a\xfd\x6a\xf7\x13\x2c\x9c\xb8\xa6\x2a\xe6\x28\x34\xe2\x49\x94\x9a\xad\x78\x00\x1d\x2d\x5c\xc5\x9b\xa3\x19\x52\x1c\x28\x9a\x18\x42\x02\xc3\x5b\x7c\x8a\x6a\xcb\x77\x0f\x06\x9a\x82\x67\x5b\x00\x6d\xc0\xa0\xe2\xb2\x8f\x02\x57\xc1\x6e\x6d\x93\xbe\xc4\xc5\x81\xb6\x12\xb8\xd2\x51\x24\xe4\x76\xfd\x94\xd7\xb1\xcf\xc4\x3d\xc6\x78\xad\xd1\x9e\x64\xc1\x03\x92\xbe\x9c\x8b\xab\xb7\xe4\x52\x02\xdc\x35\xbd\xdc\x7d\x74\xa9\x38\x92\xfb\x6d\x53\x9f\x3c\xe8\x7f\x01\xf5\xcb\x2f\x0f\x01\x9b\x8e\xb2\xcc\x1f\xe6\x40\x93\x9d\x2d\x33\xd6\x88\xf7\x74\x5b\xd3\x31\xd6\x68\x43\x97\xb4\x71\xcd\x4f\x50\x3e\xb8\x19\xeb\x8c\xce\xac\x55\xcf\x96\x8c\x09\x2b\x29\x98\x58\xe1\xbf\xa7\xdd\x74\x85\x99\x9f\xd7\x82\x01\x21\xc1\x64\x2f\xe5\xab\x9c\x8c\x13\x83\xb5\x33\x69\x18\xe3\x95\x4f\xcd\xb9\x3a\xe4\xe9\xf1\xe4\x30\x24\x34\xd3\x80\xbb\x2b\x11\xd0\x0b\x5f\x8a\x92\xfa\xad\x20\xdd\x0e\x00\x78\x4b\xa2\x90\x0b\x6c\x4e\x12\x24\x6a\x1b\x39\xd5\xe3\xc6\x1a\xf6\xd7\xb5\x2f\xb8\x77\xf1\x78\xb9\x45\xeb\xec\x02\x3b\x71\xc6\x6f\xeb\x71\x33\x08\xbd\x18\x22\x1d\x2c\x28\xc4\xdd\x0b\x0f\xba\xd7\x68\x5b\xa3\xa1\xfd\x00\xb8\x88\xae\xac\x17\xf3\x1a\x1b\xff\x1f\xa7\xcb\xa6\x2d\x96\xad\x91\x83\xcb\x1a\x88\x5f\xc5\x11\x9a\xf5\x48\xf3\x48\x28\x2d\x72\xdb\xb3\xd8\xc2\x9e\x05\xed\x7b\xf6\xbc\x3a\xef\xce\x3a\xf8\xb7\x70\xe6\x19\xc1\x36\x34\x79\x20\x15\x38\x32\xb6\x6b\x4f\x66\xae\xcb\x2a\x73\x98\x81\x13\x86\x87\xcd\xab\xea\x93\x60\xfd\x53\x8d\xb2\x13\xef\x08\xa8\xbf\x6e\xc3\x37\x42\x77\x87\xf7\xd5\x79\xc1\x2a\xe3\xed\xce\x3a\xb2\x48\x37\xcb\x59\x1f\xd7\xce\x61\x12\x47\x04\xc7\x21\x85\x57\xf4\xac\xdd\x7e\x11\x2b\x0c\xd7\x85\xe6\x25\x34\xc5\xaa\x3f\x10\x95\x19\xf7\x8a\x48\x72\x15\x52\x2e\x24\xb9\x0b\x50\x53\xdf\x9a\x9e\x51\xd1\x9e\x1e\x1a\xed\x8d\x9e\xb3\xc8\x6d\x76\x16\x88\xfd\xf0\x58\x27\x6a\xe4\x9f\x71\xe7\xa6\x6b\x1c\xdd\x8f\x4e\xca\xb0\x0e\x12\x03\x0d\x1d\x9f\x68\x33\x73\x18\xaa\x65\x00\x19\x7e\xf0\x5a\x4f\xd5\x69\x67\x9d\x5e\x1f\x08\x01\x4c\x8a\x3c\x47\x8b\x59\xd4\x49\x12\x88\x69\xd9\x91\xf0\x57\xfb\x02\xcf\x89\x38\x5f\x04\xa8\x70\x03\x5e\xbf\x55\x0d\x94\x9d\x1e\xe6\x74\x49\xdd\xfc\x74\x46\x42\x68\x16\x11\x35\x4c\x23\x1f\xfc\x3a\x03\xb5\x86\xcb\x7c\x0a\x22\xbc\x5d\x7e\xe3\xf3\x7a\xc5\xd6\xed\x2b\xd4\x0b\x5e\x1c\x63\x35\xb0\xf2\xae\x81\x4d\x14\xb4\xa9\x96\xfc\x04\x50\x0b\xf5\x78\xa3\xf4\x20\x44\x13\xa6\x4f\xda\x1d\x8d\x81\xf6\x33\xfd\x83\x56\xb8\xf7\x04\x60\x73\x3b\x9e\x84\x27\xc0\xaf\x55\xc1\xe2\xbe\x9d\x10\x11\xd2\x9e\x12\x02\x98\xce\x30\x13\xc4\xc5\x68\x0f\x9c\x7c\x73\x8a\xc1\x7a\x01\xf3\xe3\xb6\x5a\x25\xf0\xd6\x34\xc0\xc2\x84\x55\xda\xbb\xb0\x5c\x5c\x0d\x09\x6c\xda\x4f\x47\x1d\x1c\x7f\x6a\x4e\x0a\x51\x41\xcb\x36\xe0\xd0\xc9\x23\xf8\x79\xa5\x0e\x8c\xb0\x33\x13\xf8\x78\xab\x61\x71\xbc\x81\xcd\x18\x1a\x94\x18\xb0\x9c\x43\xe6\xca\x70\xef\xe3\xc5\xfc\x0c\x9b\x02\x2b\x00\xcb\xda\x4b\x3f\xea\x25\xc0\x1e\x13\x67\x5f\xbf\xbe\x9c\x80\x9d\xd7\xfe\x0b\xad\x19\xc5\x8a\x5a\x71\x61\x23\x0e\x2c\x61\x0f\x0f\xd5\xec\xfc\x60\xba\x06\xd0\xc2\xb1\x04\x21\xc3\x6d\x55\x72\x86\xe3\x0d\x17\x5c\xf8\x60\x9b\xca\x4b\x87\xa4\x2a\xc9\x19\x26\x66\xf2\x72\xfa\xca\xd6\x4c\x3b\xf6\x20\x0e\xe3\xe1\xa1\xd1\x45\x23\xe5\x70\x71\xd6\x7e\x2e\xde\xc2\xb5\x07\x5a\x3c\x68\x34\xea\x83\x1d\x3c\x06\xf8\xf4\x7a\x3c\x4a\x5f\x84\x33\x3a\x40\x07\xe0\xce\x03\x4d\x79\xb5\xa0\x0b\x07\x0e\x73\x6a\x5f\x62\xd5\xaa\xc7\xd5\xe1\xe9\x88\xb3\xe9\xa8\x1c\x1a\x44\x5e\x2b\x17\x64\x85\xdd\x3f\x07\x49\x14\xed\x24\xc4\x76\x88\x3f\x3c\x5d\xc2\x40\x9f\x04\xc2\xe2\x03\x14\x73\x90\x0d\x33\xc8\x1a\x93\xda\xed\x88\xed\x0a\x5e\x2a\x3e\x2f\x23\x32\x24\x74\x48\x56\xb6\x7e\x8b\x9c\x08\x18\xf9\x7e\xb7\x1f\x92\x52\x24\x82\x44\xfc\xdf\x42\xfc\x66\x5f\x99\xf3\x06\xbb\x9e\x20\x86\xc6\xc6\xaa\x5d\x03\x6f\xab\x59\x95\x80\x54\x3a\x1e\x40\x11\x9c\xdb\xab\xaa\xb0\x96\x4f\x62\xf1\x23\x5e\x5c\xd0\x88\x63\xc3\xf6\x56\x05\x5a\x14\x15\x46\xb5\x36\x45\xfb\x89\x47\x92\xb7\xa5\x0a\xba\x69\xf6\x94\x82\x59\x9c\x85\x5f\x25\xa0\x58\x17\xea\x86\x09\x9f\xde\x91\xe2\x8b\x70\x46\x16\x2e\xe2\x34\xdb\xf6\xd3\xa2\x4e\x15\x86\x34\xf6\x50\x33\x0f\xed\xe8\x46\x90\x86\xd6\x0f\x91\x53\x30\x6f\xc0\xd1\xe1\xfc\xb5\x41\x56\xeb\xea\x7b\x01\x53\x30\xc0\xa2\xc1\xe3\x05\x5d\x15\xc7\x18\xf3\xdc\xac\xbb\xeb\xb5\x83\x1b\x99\x19\x99\x66\x3f\xf3\xe1\x62\x31\xad\xa4\x57\x00\xa1\x63\xd1\xce\x76\x7a\x31\x1f\x4f\x67\x4e\x71\x7b\x92\xd1\x3f\x36\x4a\x56\x1e\x1e\x79\xcb\x75\x19\x72\x8c\xa9\x52\x6d\xcc\x84\x9f\xf6\x52\x5a\x13\xb0\xc6\x82\xb0\x83\x6c\xcc\x37\x77\x4b\x7b\xe3\x28\x27\xb1\x4c\x37\xe7\x60\xf9\x61\xd7\x77\x97\x05\x3e\x46\x8b\xf6\xfc\x40\x1e\x0b\x90\xe1\xb6\x3a\xcd\x6e\xab\x2f\x7d\x54\xc9\xfc\x1f\x08\x15\x50\x3a\xee\x51\x39\xe6\x71\x38\x2a\x8c\x86\xbd\x04\x1c\x35\x23\x34\x59\x3c\x56\x7d\xea\xed\xd1\x19\xef\x06\x28\x70\xc8\xdf\x02\x23\xcf\x18\x66\xe5\x01\xcc\x65\xae\xff\xbb\x78\xbc\xac\xa4\x40\x45\x0e\x64\x6b\xd5\x61\xf6\x45\x59\x10\x5e\xea\xec\xcd\xe2\x91\x44\x42\x35\x00\xc8\x87\xa6\x01\xe8\x85\xe9\xed\xb9\x63\x45\x7b\x37\x16\x6c\x6d\xdf\x63\x6d\x88\x9d\x63\x4d\xd7\xa1\x58\x1c\x00\x0b\x35\x17\x14\x64\x2d\x6b\x66\xe4\x15\x98\x6d\xcd\x7a\x57\x36\x38\x88\x50\xa0\x35\x07\x56\x75\x56\xaa\x8e\x19\x11\xa1\x02\x4a\x32\x9f\x98\xb9\x2a\x4c\xfd\xb0\x76\x97\xdc\xf3\xc7\x1c\x70\xd0\x63\xce\x57\xc4\x42\x03\x92\xfc\x1b\x7a\xe1\xc7\x1c\xe3\xe4\x18\x2f\x5e\x1b\x0c\xb9\x34\x2e\xfb\xd3\xda\x66\xe1\xdb\x82\xbe\x1e\x88\x81\x0e\x4d\x3b\x9e\x69\x8c\xc0\x7b\xe7\x23\xfb\xb4\x5e\xff\x7c\xda\x08\xef\x65\xfd\xe1\x21\x14\x6f\x18\x0b\x44\x6d\xaf\xe8\x4b\x06\xbc\xa7\xda\x2d\x6a\x8e\x3b\xff\xf5\x28\x70\xbb\xcd\x4c\x89\x40\x21\x40\xca\xe3\x45\xde\xb7\x63\x52\x04\xf0\x36\x1b\x0a\x3b\x39\x16\x59\x6f\x6c\x75\xa1\x62\xaf\x3d\x9b\xfc\x6d\x40\x80\xb0\x9b\xd3\x3b\xea\x9b\x29\x6a\xca\xe9\x66\xd3\x4f\x27\x69\xbf\x4d\xa6\xc7\xac\x92\x49\x6e\x9e\x5a\x6c\x83\x9b\x3e\x20\xcc\xc2\xf6\x6c\x12\x8a\xc7\xa4\x21\x00\x17\x96\xe5\x77\xc6\xbd\x66\x27\xad\x33\x44\xb6\x61\x7b\x18\xbe\xec\x2e\x71\xcb\x65\x8b\xce\xa5\xa7\xd9\xc4\x2d\x99\xa0\xb4\x42\x23\xa4\xe7\x61\x86\x68\x92\xd9\x85\x7b\xcd\x03\x85\xd5\x0e\xa4\xda\x71\xf7\x99\x78\x06\x80\x9c\x15\x8b\x73\x30\xb2\x44\xa6\xb5\xe2\xb8\x69\xab\x32\xe9\x7a\x26\xca\xf9\x94\xf9\x88\x0b\x96\xbe\xed\xe1\x26\x68\xf9\x9f\xe4\xd2\x88\x6b\xe6\x96\x72\xd4\x4e\x99\x0f\x08\xe3\x6f\xd5\x67\x38\xe0\x1b\xd7\x97\x32\xef\x1a\x1b\x39\x32\xe4\x6c\x90\x00\xc0\x47\x24\x47\x2b\x06\xc1\xf0\x59\x96\xea\x35\x84\xe7\x73\x6a\x15\x11\xdf\xd9\xbb\x85\x08\x82\x46\x8b\xd2\x38\x3c\x42\xfd\x68\x56\x44\x5e\x1d\xb9\xac\x0f\xf9\x3b\x8f\x1c\x07\xdd\xb6\x40\xa7\xde\xa1\x38\xf9\xc3\x23\x35\xa3\x1a\xca\x14\xfa\xa6\x63\x3d\x4a\x6e\x22\x40\x4c\x2b\xf4\x76\x71\xc4\xb0\x4e\x71\x25\x52\x40\x18\xcc\x9c\xe3\x92\x21\x8c\x4b\x9f\xc2\xfe\x6a\xc3\xbb\x66\x7b\x90\xde\x81\x7c\x87\x78\xd2\xa4\x31\x3f\xd0\x2e\xcb\xc2\xae\x31\xe2\x8e\xf3\xca\x1a\xd1\x8b\x76\xb3\x54\x2b\xf4\xb6\x3f\xce\xb3\x8b\x13\x10\x67\xb2\xa6\x27\xe5\x60\x1c\xfd\xb4\x8b\x61\x0b\xf6\x34\xb8\x63\xe4\x67\xf7\xf7\x43\x8b\x28\x26\x1d\x42\x6a\xd5\x88\x87\xaa\xac\x89\x51\x06\x07\xf4\x86\xb9\x39\x3c\x2d\xfa\x48\x27\x09\x51\x99\x61\x87\xca\xeb\xd5\x46\x22\x8f\xb4\x6b\xa3\x70\x59\xf9\x46\x3c\xfb\x7d\x24\x62\xde\xda\xd4\x55\x46\xc7\x1f\x67\xe1\xab\x9d\x2c\x4e\xa3\x88\x0c\xfd\xc8\x3d\x55\x69\xbc\x34\x70\x40\xcc\x0c\x09\x04\x03\x2d\x5d\xc0\xe2\x50\xc5\x92\x3a\xe5\xeb\x98\xa5\xf7\x01\xc4\x47\x36\x75\x76\x04\xc2\xe4\xc2\xf6\xba\x75\xc2\x0e\x45\x9f\x01\x32\x40\xf6\xb1\xa3\xa5\x71\x32\xf0\x11\x04\x00\xb7\x3a\x53\x7c\x75\xf3\xa0\xaf\x01\xc8\x52\x2e\x76\x04\xff\x59\x42\xbc\x60\x6e\x39\x06\xb6\x81\xa2\x8f\x25\x4a\xcc\x27\xc3\xa8\xf7\xe9\x0d\x5f\xa0\xa2\xf9\x8c\xef\x35\x40\xd1\xf6\x35\xe7\x17\x75\x77\xd0\xa3\x58\xb5\xc1\xf4\x47\x8a\xc0\x1e\xe3\xf4\x76\xa6\x5b\xa5\x6a\xf5\xee\x7d\x56\x35\x43\xec\xf9\xf1\x95\x4a\xb4\x72\xed\xcd\xd7\x68\x46\x53\x61\xdc\x4b\x27\xd9\x5b\x9c\x9a\xce\x61\x7a\x9f\x13\x47\xb7\xed\x19\x22\xe4\xf7\x25\x8a\x1e\x48\xd1\xd5\x07\xcd\xb2\x4a\xec\x39\x82\xa5\xc0\x2d\x13\xa5\xe4\xae\x4d\x40\xa3\x0f\x97\x46\xd7\xba\xd6\x7a\x8f\xa5\x08\x8d\xbe\x4b\x8a\x52\x3d\xa2\x48\x78\x6f\x46\xee\x9c\x02\x37\x01\x56\xdd\xf1\xb8\x69\x99\xe6\x6a\x41\x77\x94\xd8\xec\xcc\xc4\xa5\xc8\xb2\xbf\x09\xd1\x77\xd1\x5e\xf3\xd8\xe9\x07\x03\xcf\x0a\x73\xe9\x03\x80\x0c\x38\xd6\x6d\xcf\x73\xb3\x99\x94\x01\xa3\xdc\x58\xe4\xba\x8f\xe9\x43\x34\x7f\x57\xf0\x3a\xf7\x08\xd2\xd9\xf9\x41\xd1\xb6\x66\xe6\x1f\x8d\x14\x07\xe5\xe2\x94\x7d\x50\x9a\x17\xb2\x88\xb3\xdc\xa6\x80\x4e\x0b\x8a\x8c\x18\x0f\x9c\x9f\xb8\xc4\xd9\x3d\xd0\xb4\x20\xa5\xf8\x75\x33\xa9\xdd\x7c\xde\x19\x30\x7b\x06\x00\x20\xf0\xd4\xe3\x1d\x0a\x49\xe0\x74\x40\xac\x47\xf5\x56\xad\xab\x77\xe5\xb8\xb4\x0f\x7b\x69\xee\xf8\x58\x94\xb3\x37\xc5\x09\x44\xce\x48\x16\x64\x17\x1c\x1d\xc4\x35\xcb\xb8\x41\xd1\xf9\x5f\x5a\x62\x7e\x3e\x34\x1c\x6f\xcf\xd2\x1a\x44\xee\xc1\x9e\xac\x0f\x66\xb5\x2d\xd1\xe9\x66\x0e\x7a\x8c\xf3\xd0\xd7\x80\x93\xf7\x77\x87\x1b\xbc\x5c\x1f\x2c\x72\x70\x2e\x94\xae\x1a\x18\xbc\x2f\xe0\xf9\xa6\x8f\x62\x69\x67\xd9\x07\xfe\x97\x40\x6b\xce\xec\x5a\xd8\x85\xf3\xac\x5b\x38\x20\xb2\x2e\xdf\x9a\x16\x31\x68\xc8\xb8\xbe\xaf\x88\x82\x82\x77\x4a\x54\xbb\xf5\xfd\x64\x57\x7b\x4f\xaf\x39\x20\xe8\x5e\x41\xfa\xfa\xa9\x7d\x43\xf7\xe4\x9f\x80\x9d\x3d\x77\xb8\x96\x31\x04\xc1\x03\xb5\xda\x71\xf7\x78\x80\x0f\xc2\x79\xb2\xa7\x82\x17\xd0\x1e\xc0\x61\xaf\x5a\x6d\x62\xbc\x0c\xa2\xa9\xf9\x6e\x0a\x34\x0e\x5c\x35\x4f\xb0\x75\xd2\x61\x71\x03\x9d\x35\x40\xe1\xfa\x04\xb7\xda\x7b\x07\xce\xab\xbe\x64\xc9\xe1\x6b\xdf\x6b\x4d\xd4\xcf\xa5\x1e\x1d\x8d\xf9\xab\x71\xab\x8f\xdb\xe4\xa1\x5a\xa1\x68\x7c\xc4\xcc\x71\xa1\xef\xc0\x4c\xe4\x2b\x11\xe5\x0a\x89\x63\x4c\xaf\xf3\x5a\x38\xb6\x6c\xe6\xb7\x3f\xfe\x80\x5c\x3f\x84\xfc\xa5\x39\xd0\xcc\x32\x56\x85\xff\x6d\x17\x02\x24\x49\xb5\x16\x2c\xf8\x1a\xbc\xeb\x65\xd3\x86\x01\xf6\xae\x10\x8f\xcb\xec\x9e\x1f\x06\x00\xa1\xeb\x60\x97\xeb\xed\xbc\x5f\x87\x5d\x0f\xb8\xc2\x5e\xca\xd7\x00\xc7\x8a\xdb\x49\x38\x67\xd7\x21\x26\xb3\xb9\x62\x44\x6c\xd7\xec\xf3\x13\x57\x26\xbc\xb5\x7b\xf8\x88\xb7\x66\xc5\x09\x3b\x35\x5e\x75\xfd\xa8\xa5\x36\xc8\xcd\x3f\xe2\xa7\x71\xff\x00\x60\x82\x56\x7d\x7b\x30\xc4\xae\xbe\x77\x34\x94\xf8\xde\x35\x7c\xb5\x96\xeb\x62\xcb\x1d\x89\xf5\x41\xb5\x16\x84\x15\x4a\xa8\x6b\x98\x18\x37\x3a\xc3\xb6\x57\x25\xc0\xbe\xf6\xe2\xfb\x44\xbe\x38\xd9\x29\x54\xd8\x36\xaf\xce\x37\x7d\x09\xb6\x9b\x4e\x40\x02\x76\x18\xeb\xe7\xa5\xc9\x1f\x42\x54\x9b\xc3\xf8\xa3\x26\xda\xf0\xe4\xc0\x69\xb2\xc7\xa1\x5a\x4d\xb0\x9d\x61\x15\xed\x03\x09\x49\x08\x61\xd2\xe9\x91\x98\xbd\x7d\xec\x00\xaf\xfa\x19\x48\x6f\x6b\xdc\xf8\xdb\xe2\x28\xec\x8d\xd8\x4f\xb7\xa3\xb7\x43\x30\xf6\x21\x2b\x48\xd4\xe7\x26\x10\xf5\x41\xc1\x02\xbc\x92\xcd\xd1\xe2\xc9\xb0\x3e\xaa\x0e\x0b\x61\x06\x32\x1f\x76\x17\x49\xda\xad\x7c\x2f\x31\x8d\x75\x88\xaa\xad\x9e\xc9\xbf\x55\xd5\xed\x5b\x9d\xdf\x5a\x72\x7e\x2f\x2d\x4e\x7a\xf3\x3c\xa7\x3e\xac\xc8\xa8\x44\x41\x00\x07\xe4\x97\x08\xa4\x1d\x14\x61\x6f\x24\x3c\x81\xce\xf3\x92\x5d\x73\x7f\x8e\x67\x6b\xf7\xb1\xc9\x02\x16\x36\x52\x85\x37\xda\x2e\xb9\x17\x77\x1a\x45\x3e\x0d\xf3\x4d\x09\x0a\x62\x36\xcd\xa3\x09\x95\xa1\xb0\x38\xdf\x5b\x11\x4e\x36\x43\xd5\xc4\x06\x1c\x91\x8c\x1e\xd4\xc1\x66\x73\xeb\xd6\x31\x4a\x1e\x46\x00\x4c\x36\x1f\xe9\xb2\xe5\x06\x32\x2f\x9b\x16\x5b\x01\x98\xac\x34\x5d\x93\x2b\xc0\xd9\xae\xf7\x2d\xfb\x29\x24\x71\x11\xd9\x74\xf3\x2f\x19\xd7\x9a\x67\x0d\xc9\x99\x49\x85\xd5\xad\x84\xc3\x6a\x2b\xa3\x5a\xf3\xb4\xdc\xc8\x42\x54\x97\x9c\xf9\xe6\x96\xde\xf3\x9a\x19\x7b\xbc\xd9\x25\x0c\x7a\xf6\x7e\x23\xae\xcd\xe4\x4e\x00\xd7\xfe\x01\x06\x2d\x92\x80\x7d\x0e\xb4\xf6\x84\xd6\xb6\x6a\xc0\xfa\x05\x97\x0a\x02\xde\xb2\x90\xbb\x58\xc6\xf7\x44\x22\x6c\x5c\x11\x94\xd0\xbf\x8f\x9f\xdc\x4b\xc5\x23\xb5\xeb\x5f\x17\x3d\x57\x53\x8a\x57\x0f\xfa\x99\x95\xba\x81\xdb\x83\xcf\xfb\xc3\x41\x5d\xeb\x85\xf7\x13\x65\x34\xe0\x7e\x69\xb4\x74\xba\xcd\x5f\x2b\x7d\xfe\xed\x6a\xfb\x55\xb4\xc6\x16\x8b\x3b\x74\xb4\xb2\x4c\x7b\x6e\x9c\xe8\x07\x1b\xe3\xf8\xf1\x03\x72\x60\x78\x35\x9c\xcb\xbd\x7f\x70\xb9\x75\x5b\x1d\x21\xa6\x67\x39\xef\xc8\xd3\x21\x23\x7b\x2f\x75\xa8\xa1\xe8\xed\x54\xee\x35\xf4\x4f\x51\x42\x02\x26\x4d\xe9\x7b\xb9\xbf\x98\x9f\xc1\x0d\xd1\x03\x6c\x1d\x7a\x9b\x6d\x89\xfb\x88\x42\x84\x35\x6e\xf7\x20\xc4\xd6\x88\x48\x01\xf4\xa6\xb9\xbe\x3b\x25\x82\xa1\xd0\xed\x95\x80\x35\x2b\x4c\x4c\xdd\xdc\xb2\x64\x56\x90\x61\x47\x87\x88\x5b\x7b\x3c\x54\x0c\xe2\xb6\x49\x43\x7f\x67\xed\x27\x6f\x51\x49\x78\x4b\x89\x1f\x64\x78\xeb\xb4\xbd\xf4\x7c\x6b\x7a\x73\xf9\xed\x73\xc8\x8f\x5a\xfa\xd7\x7b\x47\x68\x4b\x72\xab\x81\x03\x1e\xd1\xa4\x00\x81\x6b\xe6\x21\x6d\x15\x87\x7b\xfb\xd4\x91\xa8\x69\xa6\x1d\x7d\x09\x77\xfb\x59\x17\x95\x4f\x86\x47\xe8\x72\x43\xfb\xd0\x00\xec\xf6\x04\x6d\xe8\x48\xf5\x51\xf8\x78\xe4\x51\x64\x8d\x80\x94\x94\x58\xb9\x7d\xe2\xec\x2d\x81\x02\x27\xd7\xd4\xb7\x37\xad\xb9\xa1\xb9\x15\x32\xe3\x46\x0b\x85\xdc\xec\xae\xcd\x49\x27\x14\x81\x8d\x66\x09\xcd\x6e\x86\x82\x2f\x37\x6f\x76\xd1\xc7\x0e\x12\x38\x5e\x13\xd5\x6d\x56\x42\x55\x39\x69\xd7\xf6\xaa\x07\xfd\x3d\x31\xf2\x42\x2c\xf4\x31\x47\xe3\xc3\xea\xcf\xa8\x22\xac\x45\xc4\x27\x1a\x06\x5b\x1a\xf5\x4d\xb4\x75\x44\x03\x3b\x47\x1c\xfa\x9f\x6b\x14\xbe\x82\x21\xce\x87\x1e\xf6\x5e\x14\xb2\xa1\x02\x37\xf1\x0b\x4f\xce\xfb\xfe\x01\xc9\xdb\xb1\x15\x04\x19\x66\xc8\xb2\xe6\x7b\x46\x5b\x0b\x19\x40\x3c\x42\x7b\x16\x4e\xe5\xe1\x99\x69\xaf\x69\xbe\x1a\x7d\x7c\x8e\x4d\x3b\x5a\xbc\xd9\x0f\x85\xdc\x79\xb2\x61\x18\x9c\xf8\xbe\x2e\x31\xc6\x1b\x73\x57\xb4\x59\xe5\x77\x55\x07\x35\xa4\x88\xee\xa8\xf0\x99\xf9\xb4\x47\xd5\xaa\x0f\xb8\x7f\xfe\x00\x22\xe0\x96\x3f\x7a\x41\x69\xfe\x3f\x33\xfc\xee\xd6\xdd\xbb\xbd\x15\x51\xc3\x2c\x7f\x0f\x59\x99\x3b\x35\x3c\x20\x95\xdc\xe9\xe5\x76\xda\x12\x4f\x4e\x3a\x38\x4c\x60\x72\x60\x57\x86\x5c\x4b\x98\xdd\x0f\x7e\x55\xb0\xab\x23\xe9\xfc\x3c\x16\xde\x79\xbb\xac\x1a\xe6\xab\x4f\xa4\x79\xee\x0f\xf6\xf3\x91\x54\x37\xf0\x5d\xaf\x0f\x9d\xfa\x83\xdf\xd8\xda\x61\x0c\x63\x67\xb8\x76\x2b\xf0\xcc\x3a\xe6\xfb\xdb\xd4\x7b\xc7\xf5\xdb\xbe\xab\x3b\xb7\xf2\xbd\xe7\x32\x37\x40\x79\x5e\xbd\x1a\x5b\xdf\x50\x7e\x71\x23\xb5\xff\xb1\xe6\x7a\x49\xa9\xd0\x0c\x4f\x69\xb7\xe9\x1e\x5d\x15\xb3\x95\x38\xbe\x5e\x3b\x18\xd6\xc5\xcd\xa5\x3b\x9e\x97\x66\xcf\x68\x38\xe6\xe3\xb6\x5b\x40\xef\xda\xf2\xf9\xd6\x0b\xda\xe6\x30\x4e\xff\xea\x7d\x52\x72\xd7\xea\xa3\x75\xde\x6a\x74\xac\xb9\xea\x09\x20\x89\xad\x8a\x52\xab\xd4\x4a\x8f\x96\xb1\xbd\x2a\x4d\x05\x53\xef\x08\xac\x75\xaa\xa4\x7d\xd7\x8e\x3d\x21\x6b\x9c\xbf\x78\x8b\xcd\x22\x70\x63\x65\x48\xd9\x5a\x3a\x4d\x34\x78\xbe\xe6\x57\x24\xcb\x99\x49\x4d\xc8\xb0\x13\xb5\x48\x3f\x8b\x6d\xc1\x3e\xc9\xad\xbb\xe6\xdf\x36\xfb\x8a\x59\xd6\x78\x40\x6c\xb9\x9f\x95\x0f\x75\xbf\xea\xee\x0f\xfc\x2a\x14\x9d\x35\x1d\x1c\x01\xcf\x8f\xbd\xa3\xd2\xf9\x40\xac\x72\x9e\xae\xd6\x5b\x23\x20\xc2\x7d\xce\xe9\x63\xdc\xc8\x66\x70\x7f\x48\x08\x3b\x42\x98\x62\x0d\x45\xef\x35\x11\xe6\xb9\x92\x74\xb4\xa6\x87\xbb\xcd\x97\xb7\xf5\x47\xf7\x81\x35\xf2\xf2\x9e\x95\x5b\xb2\xbc\x9d\x93\xfa\xe5\x28\xf4\x32\xdf\x30\xf4\x00\x8c\x8b\x61\x7e\x3a\x52\x81\xb8\x74\x61\x91\xaf\x53\xdf\x1e\x5a\x6e\x1c\x02\x68\x5f\xc7\x33\x68\xe9\x7b\x90\xb5\xcc\x24\x99\x75\xb9\xe6\xd7\x3d\x73\xb6\x56\x40\x3a\x02\x6b\x6c\xf5\x42\x0d\x4e\xf3\xf5\x93\x3d\x93\x74\x89\x81\xd3\xe5\x1a\xe0\xf6\x1a\x63\x59\x87\xac\xab\xb0\x7b\x47\x76\x61\x0d\xb9\x20\x8b\x3b\xcb\xc0\x47\x72\xa3\xbd\x8b\x5a\x6e\x06\xc7\x7e\x96\xea\xd1\xd7\xda\x06\x11\xeb\x7c\x02\x85\x58\x77\x0b\x83\xa3\x34\xd8\x81\x72\xca\x7a\xac\x63\xa6\xce\xbe\x42\x5f\x99\x3b\xd8\xef\x7c\xbc\xa5\x2a\xd2\xc8\xc7\x61\xde\x61\xc4\xea\x78\x03\x56\xd6\x72\x28\xec\x49\x8c\xf5\xb5\xa2\xbd\xfd\x97\x55\x18\x29\xbb\x03\x93\x2d\x09\x8c\xe4\x4c\x14\x46\x35\x33\xed\x9a\xea\xd8\x1f\xfe\x91\x04\x9e\x5d\x79\x35\xa0\xb3\x5f\xbc\x0a\x3d\x2f\x94\x8f\x45\xea\x28\xca\xf9\xf7\x9b\x88\x7e\x2f\xcc\xc7\x27\xaf\xbd\x5d\x19\x7b\xce\x4e\xc7\xb6\x2e\x40\x5b\x7c\xf6\xab\x38\xef\xe7\x22\xbf\x62\xf9\xbf\xc8\x0a\x8c\xe5\xef\x11\xa9\x62\x85\x0f\x67\x8b\xe8\xee\x45\x5e\x09\x10\xde\x35\x82\xee\x0e\xa9\x67\xd9\x4f\xd9\xcb\x7d\xa0\xeb\xf0\x42\x2a\x25\xd7\x97\x93\x6b\x29\x33\xc4\xb8\x2a\xe5\x2c\x1c\xfb\xde\x44\x56\x96\x32\x5e\xc8\x69\xaa\xb4\xf7\xfb\x8c\x00\x0b\xe3\xa5\xb1\x2e\xc3\x25\xa1\x4f\xd8\xae\xaf\x4b\x7b\xe7\x28\xff\x82\xf1\x1c\x6d\x7c\x48\xdc\xac\x53\xde\x61\xa1\x39\xeb\x5d\x23\x16\x26\x7b\xe0\x98\xd1\x63\x5f\xd2\x41\x11\xab\x0b\x50\x5d\xa4\xb7\xef\xfd\xa5\x24\x1e\x6b\xfa\x5f\xfe\xa6\xc2\x88\x39\x0a\x6c\x35\x8b\x41\x99\x0b\xc2\x65\xed\xa5\xd5\xef\xcb\x42\x7e\x3b\xc6\x64\x38\x67\x6e\x3c\xdb\x25\x8f\x89\xae\x1e\x58\x7b\x92\x31\xbb\xc3\xd7\xe3\x18\x16\x75\x2e\xac\x65\x73\x31\x1a\x0a\x5c\xd9\x5f\xfd\x16\xc0\xd9\xc5\x19\xd2\xf0\x71\x1d\x6a\x2d\x2f\xfd\x8a\x98\x21\xbe\xd0\x3d\x12\x5b\xa7\x37\xbd\xc9\x4b\x7a\x6f\x6c\x5c\x9a\xc5\xe0\xa5\x25\x5d\x9a\xb6\x78\xda\x25\xb5\x20\x75\x0c\x36\xbd\xc3\xd0\x9c\xbb\x6b\xb7\x24\x75\x30\xc9\x82\xe7\x1e\xa0\x15\xbd\x04\x24\x7d\x77\x61\x4f\x99\x59\xcd\x55\x04\x8d\xea\x09\x43\x93\x32\xc8\xf2\x30\xd5\x0c\x5b\x3c\xa6\x6d\x0f\xd3\xbe\xfa\xf4\x52\x7a\x81\x53\x57\x7f\x89\xc3\x58\xfb\x00\xae\xaf\x4c\xbc\x5c\xd2\x22\xce\xfa\xec\x9f\xc8\x78\xbc\x31\x50\xb0\x24\xb1\xea\x67\x40\xa9\xfb\x49\x6b\xce\x1e\x79\xa9\xdd\xcc\xa9\x68\x47\x4f\xeb\x0f\x6a\x89\x9f\x1c\x51\x60\x0b\x94\x1f\xf4\xea\xdb\x64\x11\x69\x1f\xc7\xc4\x1b\x09\x98\xca\x4c\xe2\x92\x00\xf1\x3a\xc9\xd1\x29\xcf\x4b\xca\x33\x6a\x29\x5e\xb1\xa1\x67\x85\x17\x6a\x3e\x80\xc5\x5f\x9c\x3e\xd2\xee\xe2\x53\x9a\x9d\xf8\x00\x26\x4f\xc0\x58\xbe\x39\xfa\xbe\x7c\xe3\x5b\xc2\x4b\xeb\xcd\x3b\x5f\x1a\x1c\xef\x5d\xc8\x73\xf8\xc1\x56\x37\x14\x65\x39\x5f\x8b\xc0\xee\xae\x3a\x13\x20\xf1\x10\xc8\x98\x2f\x0a\x77\xcf\x8a\x30\x17\x28\x5e\x2c\x70\xa4\xfb\x53\x6e\x64\x8e\xc1\x7b\x33\x14\x75\x3e\x57\x0e\x4c\xcc\x80\x45\xf8\x94\xa4\x33\x9b\x72\x68\x67\x6e\xb0\xc5\x19\x48\xa0\xf0\x21\xcf\x55\x6d\xfa\xb9\x16\xad\x88\x71\x60\x0e\xef\xb3\x6d\xa7\xd6\xd4\x6c\x2d\x2f\x2e\xa8\x64\x6d\x37\x76\x5a\x9d\x42\x71\xe6\x9a\xd8\xdd\x77\x19\x1a\x1e\xa3\xb5\xf7\x2f\x49\xc9\x3b\x68\xfe\xe2\xef\xeb\x93\xe3\x19\x4d\x4c\xdd\xf1\xf2\x52\x79\xd4\x8c\xd1\x61\x9d\x5f\xc2\xcc\x07\x8a\x1d\xba\x03\x49\xa3\xad\x28\x34\xdd\x52\x14\x3c\x47\xf6\x8b\xbc\xf9\x90\xa0\x71\xbe\x8c\xb0\xf7\x09\x2e\x35\xce\xfc\x74\xb9\x59\xb3\x93\x2e\x4d\x7b\x41\x30\x0a\xca\xb7\x6a\xce\x91\x40\x0d\x8b\xdc\xf3\xe0\xe8\xcd\xd9\x23\x84\xf9\xbc\x5b\x47\xec\xef\xab\x41\xe6\xf9\x3d\x9a\x51\xab\x04\x94\x9f\x76\x03\xdf\xbb\xf6\x19\x0c\x40\x51\x8f\x30\xfc\xf3\x39\x5b\xa7\x92\x18\x96\x39\xbd\xde\xf1\xca\x9b\x1d\xae\x40\x33\x3d\xcf\x4d\x05\xeb\x10\x88\x1b\x7f\x59\x67\xdb\xbb\x6a\x33\x2a\x12\xfd\x7c\xee\x8d\x42\x1f\x4d\x12\x8f\xd0\xa6\xa1\xcf\xde\x6c\x8d\x36\xea\x18\x24\x17\x2c\xd5\xba\xb1\xbf\x67\x8e\x21\xac\x99\xe9\x0c\x33\x34\xfc\x7d\x0a\xb4\xd8\x65\x4d\x2d\xd2\xf2\xfb\x69\x7f\xdb\xe7\xdf\xcf\xde\xd1\xf5\x7e\x60\xdf\x9e\x77\xcf\x37\xe1\xdb\x5f\xde\xe7\x2b\xd1\xb9\xfb\xbc\xfe\x20\xff\xe1\x56\x38\xb7\x9e\xfb\xd5\xff\xbd\x35\x98\x6c\x4d\x17\x86\x4c\xd3\x7e\xda\x42\xe4\xd7\x5b\x7b\xd4\x19\xf5\x15\x55\x3c\x81\xef\xf5\x7a\xa4\x7f\x6e\x58\x9d\x27\xcc\xba\x75\x45\x9c\xfa\xbe\xd6\x7f\xd1\x6b\x07\xdd\x3e\xd9\x06\xb4\x83\xb5\x48\x82\xde\x19\xf7\x7e\x00\xb3\x47\x4e\x58\x5d\x29\x60\x83\x7d\x63\xc5\x98\xb7\x35\x39\x63\x05\x81\x93\x74\xfa\x96\xd8\xf8\x04\x9c\xba\x0c\xb2\xe7\x3b\xe0\x12\xb4\xfa\xd4\xf4\xad\x41\xa0\x67\xcf\x9e\x58\x7a\x52\xeb\x27\x5d\x6d\xeb\xe9\x78\x52\x15\xcd\x25\xda\xcf\xcf\xfc\x2b\x26\xfd\xaf\xa2\xeb\x9f\x79\x7d\xf3\x7e\x3e\xf3\xa8\x17\x11\x01\x0f\x87\xba\xa0\x4f\x08\x01\x69\xdd\x44\x72\x3f\x38\x10\x4f\xa2\xe8\xd1\x3d\x1d\x2e\x9a\xb1\xe7\x04\xe1\x4d\x3f\x5e\x5c\x73\x62\xf1\xad\x3e\x9f\xf5\xeb\xe8\x0d\xe5\x9f\xeb\xb7\xf8\x64\x47\x2b\xa5\x6f\xdd\xc8\x9f\xeb\xe7\xed\x18\x4f\xb7\x07\xc7\xfa\xfe\xd4\x48\x37\xfa\xa9\x52\x27\xf9\x71\xbd\x75\x36\x3f\x7f\xc4\x4a\xf9\xa9\x6a\x9c\xff\xc0\x6d\xa8\xc5\xa6\x35\x3e\xd8\x3f\xd6\x26\xf3\xad\xd9\xf7\x8d\xd5\xf9\x8f\x23\x55\x65\x71\x19\x75\x18\x6c\xea\xfd\xb3\x9f\xbc\xfe\xf9\x03\xf7\x83\x54\x06\xb1\xef\xf3\xca\xed\x58\x9d\xad\xd7\xf6\x87\x5a\xf6\x3f\x79\xc0\x67\x9d\x30\x7d\x0c\x20\x69\x7c\xfe\x27\xbd\x85\xe7\x8f\x34\xe1\x9f\xb4\x2f\x59\xa0\x7c\x06\x27\xfe\xa4\x51\xa0\x7d\xeb\xf1\xda\x4b\xe4\x0f\xd5\x8f\xc0\xbf\x98\xba\x02\x29\x0f\x2e\x45\x35\xf9\xe3\x99\xf3\x3f\xae\x33\x02\x67\xab\x53\xea\xaf\xc5\x79\xee\xe0\xa2\x07\xc1\x7d\x87\x04\xf0\x2e\xde\xbd\x28\x40\xe0\x9b\xbf\x55\xe6\xc8\xa8\x78\xe7\xfb\x7f\xd4\x6b\x51\xfc\x18\xa5\x03\xa5\xdf\xaa\xf2\xb1\x09\x88\x59\x33\xf2\xd7\x63\x53\x30\x98\x54\x3e\xf0\xfb\x57\x97\x6e\x9a\x3f\x65\x3f\x1b\x7d\xdf\xf5\x8c\xc3\x1d\xed\x64\x7f\x4e\x5b\xe6\x85\xc7\xc7\x51\x28\xc3\xcd\xe3\xd6\x13\x25\xd9\x3f\xb6\x9c\x15\x27\x4a\x6c\x7f\x59\xdb\x61\x5a\x11\x41\x0b\x7b\x14\x7a\x7b\x86\xee\xed\x01\xab\x32\xc6\x82\x63\x2c\x05\xd2\x4c\xac\x7f\x9c\xef\x66\xed\xd7\xf2\xd1\xfe\x76\xc0\xff\x18\xe0\xf8\x0f\xc0\xbf\xce\xc6\xea\x4e\xb6\xc1\x59\xe2\x31\xff\x38\x0b\x45\xe7\xa6\xbb\xc2\xfc\x17\xce\x1a\x35\x91\xcf\xc9\x1b\x84\xd2\x96\xf7\xd4\x8a\xb6\xef\x9b\x16\x07\xac\xff\xd1\x78\xfd\x21\xa2\x05\xbe\x7f\xeb\xb3\xa9\xc0\x23\xf1\xa1\xdb\x9b\xef\x6f\x7d\x75\xaf\xe4\xcd\x4e\x92\x37\x20\xee\x53\x0a\x68\x56\x84\xce\x95\x80\xf4\xf0\xea\xdd\x17\x67\x40\xe3\xe4\xad\xfb\x0f\x0c\x62\x58\x76\x3e\x6d\x75\x80\xf8\x3e\xe4\x0c\xd8\x15\xa1\x49\xa1\xd4\x7b\x49\x8c\xc0\x0f\x0d\xf3\x3f\xfc\xe7\x62\x13\xbb\xd6\x08\xff\xd7\xde\xce\xd9\x11\x52\x94\xfc\x6b\x3f\xdb\x33\xd2\x82\x8e\x34\xc7\xb5\xfc\x75\x84\x7d\x9d\xfc\x3d\xd0\x8e\x7b\xc2\xfb\xbf\xa4\x1d\x10\x17\xad\x2f\x7e\x08\x33\xd7\x49\xe4\xd2\xc7\x0d\x4f\x83\xed\x20\xf5\xba\x2e\xcb\x86\x20\x12\x6e\x62\xf1\x71\xc2\x36\x52\xec\xe5\x6d\x6f\xb9\xed\x28\x97\xa3\xfd\x54\x6e\xd1\x80\xcf\x21\x40\x2b\xd6\x23\xe4\x10\x16\x0c\x34\x49\x16\x1c\x52\x03\x42\x74\x22\x57\x2a\xf2\xed\xa0\x2c\x20\x9d\x02\x64\xfa\xb5\x96\x02\x8a\x8a\x6e\x81\xd9\x15\x2d\xd4\x5d\xfa\xb9\xc3\x78\x7b\x98\x88\xc2\x76\x49\x8e\xd9\xdb\x9d\xf1\x16\xce\xde\xde\xa0\x59\x3c\x1c\xce\xbe\x40\xee\x72\x80\x64\x81\x3f\x3d\xbd\x88\xe9\x1e\x77\xe7\xdb\x90\x14\xc1\x25\x85\x02\xab\xcd\x56\xcd\xa7\xfb\x0c\x2a\x04\xd0\x2f\xe6\x55\x99\xae\x95\x44\x7d\x6b\x81\xa6\x90\xcb\x84\x2e\x81\x20\x26\x66\xaa\x43\x0f\x91\x82\x30\x3c\x41\x5c\xeb\x5b\x27\x31\x06\x8f\x27\x29\xc3\x79\xce\x05\xd1\xe9\x68\x02\xf3\xd3\x37\x9d\x0b\x45\x0a\x3c\xfe\xd3\x26\xb3\xe0\xfe\x27\x89\xfe\x27\x07\x51\xa7\xaa\x37\xc9\x8c\x50\x02\xb0\xfa\x07\xac\xa5\x2f\x4a\x08\xc4\x4b\x0a\x1b\x14\x83\x30\x19\xd6\x08\x01\x98\x5b\xe0\x70\xaa\x4d\x08\x40\xf9\xaf\x53\x2d\xf1\x06\xbb\x8a\xc0\x4c\xb6\xd6\x54\xa3\x29\x3d\x49\xd9\x0d\xd2\x01\x45\x12\x03\xc2\x55\x4c\xcb\x22\xa1\x86\x22\x70\xcf\x54\x10\x84\x2f\x23\x24\x04\xd0\x8b\x61\xe3\x12\xb6\xcb\x43\x14\x72\xeb\xe0\xc2\x70\x3a\x8a\x56\x6e\xd5\x3c\xb4\x06\xd4\x84\x00\xda\xe0\x60\xa4\xdb\x94\xdf\x83\xc7\x24\x4c\xd6\xda\x26\xf5\xdf\x7a\x84\x0a\x4a\xa4\xcc\x00\x1f\xe2\x09\xae\xbb\x6d\xc2\x10\xb5\x3f\xac\x56\x76\x42\xbd\xb5\x76\xad\x2d\x4d\x90\xff\x12\x03\x7f\x32\x69\x31\xec\x6a\xca\xf3\xf6\xd1\x03\xb6\xe2\x72\xcc\x59\xbf\xcc\x13\xa2\x35\xbe\x3e\xb4\x08\xdc\xd6\xcf\x31\x1d\x06\xee\x51\x9d\x30\x57\x17\xe8\x24\x72\x6f\x85\x4d\xc7\x99\x42\xde\x01\x81\x9a\x27\x9b\xe2\x6e\xe7\x58\x72\x50\x64\x9c\x64\x08\x74\xab\xd2\x2b\x06\x55\xa6\xf4\xeb\xcd\x54\x9b\x46\x04\xd5\x94\x24\x1f\x48\x89\x01\xee\x2a\x1d\x9c\xb3\xff\x66\x31\x19\x5d\x61\x80\xd3\x81\xda\x41\x93\x34\x36\xd2\xaa\x4d\xae\x47\xa5\x28\xc0\x3a\xae\xec\xa0\x7a\x54\xdf\x19\xbb\xc6\xad\x6d\xb3\xd9\x60\x98\x62\x9c\x25\x9a\x7e\x13\xc8\xb1\xa7\xcc\x19\x72\x61\xd0\xa3\x5c\x55\xac\x5c\xf4\x6e\x9d\xb8\xec\xa3\x4f\x27\xc9\x0b\x50\xe9\x02\x39\x2a\x83\x7b\x08\x70\x81\xc7\xb7\x1c\x9e\xe3\xa5\xb3\x19\x2f\x6f\x7a\x8f\xe8\x82\xb2\x3c\x7b\x0b\x7c\xe4\xe0\xf0\x88\x18\x92\x99\x81\xe8\xd6\xb3\xfa\xa5\x34\x81\x3d\x2f\xe8\x9d\xaf\x2c\xec\x71\x36\x66\x5b\x57\xc4\x9a\x8d\xa4\xda\xef\x17\xd2\x5f\x76\x99\xcc\x82\x87\x85\x70\xa2\x10\x13\x88\x0c\xf8\xd1\x75\xc3\xbf\x24\x41\xa0\x95\x2a\xbb\x45\x23\xf9\xa4\x63\x0c\x29\x9b\xc1\x10\x49\x33\xec\x30\x86\xa2\x6d\x95\x47\x50\xfb\x19\x32\x27\x73\x98\xab\x76\x9d\xc9\x2e\x30\xc3\x1a\x52\x6f\x5a\xa9\x89\x20\xa4\x5f\x47\x37\x71\x2d\xbb\x1b\x4f\xcf\x26\x18\x11\xb4\xc3\x65\x15\x08\x3c\xba\xec\xac\xcc\x2b\x14\x16\x5c\x99\x89\x07\x52\x01\x81\xa2\x31\x86\x66\xc1\x43\x47\x56\xe7\x61\x61\x48\x03\xcc\xbc\xee\x95\xdb\x29\xc4\xd2\x8d\x4d\xe5\xc0\x61\x9f\xef\x91\x21\xdd\x2a\xe6\x56\x4c\xdf\x97\xba\xc5\xe3\xac\x1b\x05\x72\x7c\x34\x5f\xa0\x55\x30\x4a\x04\x72\x04\x9c\x4f\xe8\x77\xb3\x17\x7f\x72\xcc\xe8\x45\x7b\x1b\x5d\xc1\xc8\xcf\x01\x4d\x6a\xd6\x5a\x66\x32\x88\x60\x2c\x4a\xd5\x1d\x43\xeb\x7a\x54\x23\x7b\x2c\xe0\xd8\x71\x54\x75\x9c\xa0\xbc\x49\x39\x87\xc9\xe6\x79\x5d\x36\xe2\x91\xe6\xaf\x8c\xab\x1a\x73\x3c\x0e\x7a\xae\xc7\x08\xc1\x71\x1b\xec\x19\x8d\xdc\x8c\x08\x3e\x9e\x65\x45\x20\xc2\x98\x5d\x5e\x07\xa8\x3f\x7e\x34\x47\x04\xf1\x15\xaa\x21\x54\xc5\x5f\x24\x7f\x0e\xf1\xdd\xe2\xd4\xeb\x31\x20\x25\xcf\xf8\xc5\x75\xe4\xa8\x55\x81\xf4\x9a\x7b\xa4\x4b\xc5\x2c\x7b\x40\xe3\xde\xcd\xb9\x63\xff\x60\x4c\x7f\xa8\x74\xad\xbd\xe2\x27\x9a\xde\x92\xbc\x48\xed\x21\x48\x72\x7a\x87\xb0\x02\x4d\x46\x8d\x43\xcc\x4c\x57\xdc\x15\x16\x69\x24\xcd\x90\x3f\xd7\xbe\x1e\x1e\xde\x35\x2a\x51\x0d\xd3\xb9\x19\xac\x3e\x46\xa5\xac\x8d\x2d\x92\x1e\xde\x84\xd9\x9d\x94\x66\x39\x6a\x87\x15\xda\x98\x34\x4a\x34\x26\xd6\xab\x23\xe3\x37\x1e\x97\xeb\x93\xd2\xd5\xfe\x60\x18\xd1\xe3\xda\x83\xba\xfa\x68\xa0\x38\x58\xd2\x2a\xb8\xd6\xe7\x5b\x3d\x35\xb3\x07\xc2\x81\xf4\xcb\x90\x3e\x59\xea\x0f\xab\x41\x76\x6f\x94\x3e\xec\xc2\x2c\xf5\x74\x3f\xf2\xe3\xd2\xcd\x40\x94\x22\x0f\xfb\xd1\xf4\x49\x30\x66\xe2\x8e\x76\x1f\x3c\xf1\x7a\xec\x81\x70\x5f\x1e\xc4\xfe\xd1\x98\x79\xec\xd6\xaf\xb7\x7f\x34\x2b\x35\x20\x10\x87\x10\x12\x0f\x15\xae\x34\x6f\x2b\x9b\x3d\x6a\x03\x88\xed\xd7\xea\x25\x22\x86\xd8\xa6\x96\x65\xf7\x2a\xa2\x8b\x50\x92\xc2\x03\x06\xf5\x86\x50\x5c\x84\xbc\xc4\x5c\x93\x46\x8b\xbd\x94\x42\xc2\x0a\x7a\x13\x22\xbf\x3c\x20\xf4\x94\xfe\x84\xf2\xc7\x61\xef\x1f\x61\xa6\x90\xa0\x78\xda\xa5\x96\x40\xc2\xee\x5b\x90\x2e\x01\x0a\xd6\x22\xd0\x21\x25\xc1\xa5\x1e\xb5\x25\xf3\xd3\xbb\xa2\xa5\xeb\xbe\xde\xe3\x86\xad\xc4\xa3\x91\x4a\xc5\x5d\x6a\xce\x1e\x2f\xac\xb1\x85\x31\xe2\x03\x21\x54\x71\x4e\x3e\xb0\xfc\x25\xb5\x0a\xbf\x00\xd0\xaa\x50\x9a\xed\xe3\x6e\xf0\x50\xb7\xa2\xd9\xcf\xb0\x12\xf5\x22\xa4\xac\xf2\xf0\x6c\x98\x30\x05\x0c\x7c\xcc\x1f\xe3\x98\x2e\x76\x21\x54\x9a\xd9\xbe\x47\xe4\x54\xb9\xc2\x03\x67\xe6\xc2\x70\x77\x2d\x93\x87\x90\xc0\x81\x8d\xd6\xca\xc5\x1f\xd2\x5b\x5b\xf4\x76\x8e\x4d\x5e\x2d\xd1\xe0\x31\xb7\x34\x0a\x17\xb8\xe8\x92\x36\x90\x9a\xbc\x46\xc3\x94\x3e\x66\xbd\x5b\x8f\x72\xa8\x83\x02\x58\x8a\xce\xa2\xec\xf1\xa3\xc7\x24\x3b\x68\x02\x76\x3b\x3e\xb3\xbb\x6b\x0c\xab\x50\xa3\x03\x4f\xd9\xfe\x11\x7a\xfc\xf0\x70\x9a\xac\xdf\x24\x45\x51\x80\x0c\x56\xd8\x27\x0a\xca\xd1\x46\x48\x2e\x5b\x34\x8f\x0f\x97\x27\x9a\xc9\xbd\x1e\xca\x92\x1f\x83\xb7\x35\x1e\xa5\x8b\x44\x82\x07\x1c\x9c\x5e\xed\xb3\x20\x37\x03\xc4\x31\x78\x8b\x20\xab\xe6\xbf\x21\x32\x7b\x97\xc1\xee\xcf\xc3\x83\xa8\xdf\x5c\xcc\xba\x8b\xbc\x06\x94\x9a\xfa\x92\x66\x46\x3f\x79\x3e\xf6\xc3\x73\xd7\xb8\x93\xfc\xa9\x93\x01\xf5\x99\xea\x75\xe7\x23\x11\x8c\x63\xd3\x5e\xe7\x61\xb5\xaa\xea\x31\x33\x73\xa4\xf9\x41\x40\x21\x1d\xef\xf6\xcb\x9a\x5a\xec\xfd\xc3\xf9\x59\xbc\x5e\x56\xe3\xa9\x06\x43\x8a\x20\xef\x75\x62\x67\xe1\x91\xe2\xbd\xcf\xff\x90\xd0\xcf\x95\xa5\x91\xe1\xc7\x45\xdb\x37\x64\x26\xfd\x0c\xed\xa7\xe9\xba\xe5\x23\xfe\x79\x8b\xfa\xa3\x98\xa4\xe7\x9a\xff\x21\x5c\x50\x2a\x16\xff\x10\xe9\xa8\xd7\x2a\xff\xeb\x43\xa6\x19\xe6\x4c\x03\x32\x3b\xfb\xbd\x8b\x14\x81\xf3\xf9\x57\xf9\xa3\x66\xf4\x21\x71\x02\xd7\x8e\xfc\x50\x6e\x36\xb0\x5a\xfe\x4d\x11\x3c\x0a\x7d\x8c\xd1\x45\xf7\x5c\xa1\x22\xc4\x2d\x22\x16\x3c\x1f\xde\x07\x95\xf2\x45\x8d\x9e\x1c\xd2\x31\xbd\x79\x96\xa5\x01\xe5\x49\x3e\xcd\x71\x93\xbd\x99\x35\xca\xf2\x43\xd9\xd7\xe3\x55\x18\x15\x90\x37\x30\xf3\xb7\x24\xc5\xdc\xbc\x79\x63\x9f\xfb\xdf\xfa\xda\xb4\x33\x22\xff\x05\x83\x71\x4b\xa0\xe6\x5a\xa1\x89\x6c\x4c\x7b\x70\x23\xf3\x7a\x5c\xda\xed\xea\xf9\xea\xc9\x4d\x3b\xe4\x90\x06\x59\x55\x0b\xe5\x75\x6e\x55\x0f\x42\xbb\x14\x39\x28\xb3\x9d\xfc\x0a\x21\x84\x58\x0a\xa9\x64\x6e\xf4\x7b\xc4\xd2\xb8\xde\xa0\x6b\x85\xac\x49\x83\xa9\xd4\xdc\xd0\x3a\xe8\x8d\x4b\xf0\x62\x41\x1c\x02\xaf\xdd\x62\xf7\xa1\x68\xe1\x2e\x4b\xff\x06\xca\x1b\xa1\x63\x81\xc1\x2e\x56\x78\xb9\x11\x84\x33\x52\x1c\x35\x30\x60\x76\xf1\xc9\x22\x47\x10\xd2\x78\xb7\x68\xa9\x60\xc0\x68\x95\x0f\x2e\xd3\x8d\xa1\xf9\x90\xef\x68\xf2\xcf\xcc\x90\xe2\x71\x5a\xbd\x25\x31\x10\x55\x5a\xd6\x2a\xe4\x60\x25\xd7\x1c\xbd\x21\x99\x43\xb4\x87\x12\x1f\x5c\xe1\xd1\x64\x2f\x73\xf6\x08\x29\x48\xbf\x1c\x9a\xc6\x51\xc4\x37\xd9\xf5\x3d\x10\x35\xea\x36\xc9\x89\xdc\x03\xc9\x62\x19\x92\x5f\x43\xa3\xb2\x63\xac\xb7\x53\x0f\xcf\x2e\xfe\xcb\xc3\x90\xf3\xb7\x64\x3b\xc2\x93\x36\x5c\x6b\x27\x34\xe0\x70\x2d\xcb\xb3\x6e\x0f\x37\x47\x06\xcd\x0f\x40\xe1\x94\x83\x1c\xfb\x26\x0b\x33\xd8\x3d\x44\x1f\x84\xab\x59\x07\xe2\x29\x0d\x8f\x5d\x03\xe8\x03\x20\xf2\xf7\x02\xd5\xbb\xe2\x36\x65\x58\x2b\xd4\x38\x62\xca\x63\x35\x2b\x42\x59\x86\x5d\xb8\xfa\x41\x34\x61\xda\xe9\x2f\x4b\x88\xe3\xe5\x8d\x7f\x4c\x85\x43\x87\xb9\x85\x1a\xc7\x2b\xd1\xed\x08\x31\x0e\x9f\x44\x56\xa9\x3d\x8c\x35\xc0\xc8\x43\x85\x6a\xdf\x2e\x2b\xcf\x63\xc0\xa1\xac\xa8\x85\xef\xcf\xb2\xe4\x3a\xb8\x53\xff\xc4\x20\xcc\xb2\x89\x73\xd4\xdf\x26\xd0\xb1\xe6\x71\xe2\x62\x4b\xe1\x44\x1b\xfb\x71\xf1\x89\xda\xd3\xdf\xa9\x6f\x30\xa4\x38\x56\x5d\x4f\x37\xfd\x26\x43\xa7\x86\x42\x27\xe5\xf2\x1c\xa0\xe3\xc3\xfe\xa2\x58\x07\xde\x20\x06\x7e\xa0\x44\xc9\x21\x2e\x76\x1c\x45\x5e\x33\x88\x77\x5c\xdb\x79\xdc\xe6\xbd\x90\x47\xce\xfb\x5a\x29\x54\x38\x3c\x7e\x8e\x92\x2f\x05\xa9\xcc\x71\x5c\xc7\x82\xb7\x6c\x5f\xda\x46\x59\xfe\x0a\x59\x0f\xba\x1e\x61\xc3\xc3\x55\xea\x7a\x2f\xe9\x9e\x82\xdc\x4e\xeb\x08\x09\x52\xde\x70\x17\xfe\xb0\xdf\xd9\x71\xf0\xa2\x7d\xb0\x18\x27\xec\xa5\x59\xee\x29\xc8\x80\x1c\x77\x92\x12\x8a\x4d\x80\xa1\xec\x67\x89\xab\x85\x80\x37\xf4\x58\xbe\x28\x10\xc2\xad\xc2\x47\xc9\xb5\x46\xae\xf0\x68\xbe\x5d\xd8\x52\x97\x29\xc4\x9f\x0d\x76\x97\x5f\x59\xcb\x87\x1c\x1a\xac\x18\xd9\x1b\xbc\x3f\xd4\xe5\xb0\x27\x45\x6e\x9b\xdd\xb5\x5b\x95\x5f\xa9\xe9\x83\x28\x4a\x12\xea\x20\xa5\xd5\x24\x90\x05\x69\x42\x2e\xf0\xa0\x9e\xb7\x15\x2f\x65\x3e\x53\x7b\x20\xa8\x2d\xa0\xcb\x2a\xc9\x90\x50\xbb\xc0\x45\x8d\xd4\x01\x24\xde\xc7\xb9\xe5\xbd\xe5\x17\x0c\x79\xbb\x4e\x39\x2f\x07\x8e\x48\x48\xe5\x24\xbb\xd3\x44\x32\x22\xb1\x6c\x91\x8a\x09\x3c\xc7\x49\xfb\x5c\x43\x9b\x0a\x82\x22\x7e\x0f\x63\x0b\x0b\xdf\x3a\x0c\xb5\x7a\x8f\xa0\xcd\xb7\xf7\x42\x9f\x9c\x21\xcf\x3a\x41\x10\x16\xa5\xc6\xe1\x29\x58\x4a\xc7\x80\x8a\x48\xaf\xd7\x97\xa2\x20\x3c\x54\x77\xa5\x87\xfb\x16\xc5\xee\x2b\x44\x41\x62\x09\xf5\xff\x87\x9c\xa5\x51\x63\xd5\xe2\xc7\x51\x74\xd4\x9f\xc9\xe9\x7f\xbc\x2a\x29\x44\x70\x12\xa3\x05\x87\x74\xe7\x4a\x0f\xa8\x21\x15\x07\x3b\xb8\x4c\xe5\x18\xf3\x9f\x1f\x62\xa1\x83\x2b\xe7\xc7\xab\x14\x05\xda\xfd\x7f\x05\x44\x06\x9a\x21\x34\xb2\x78\x74\x58\x8d\x82\x4f\xe6\xdd\x9f\x50\xb7\xc7\xda\xb9\x9b\x68\xa0\x7f\x76\xf8\xb6\x9a\xc8\xc3\x1d\x86\x6f\x04\x1a\xef\x6f\xdf\xcb\xb7\x13\x77\x5c\xc9\xe0\x1d\xa4\xf4\xf7\xca\x47\xcf\xc5\x42\xd6\xd0\x8b\x70\xb5\x10\x1a\xd9\xc7\xdc\xcc\x48\x6c\xa1\x59\x67\xe1\xf4\xb4\x4a\x29\x81\xec\x67\x43\xdf\x63\x18\xc0\x9f\x26\xb3\x16\x54\xe6\x04\xde\x5f\xfb\x20\xbd\x91\x35\x97\x07\x89\x8b\x90\x13\x71\xe6\xe7\xc0\xc2\xc6\xf1\x7a\x6e\xd2\xe5\x76\x7a\x64\x11\x6a\x58\xbe\xbf\x22\x67\xd2\xf5\x5b\x8e\x30\xbc\x57\x66\x16\x3c\x69\x3c\xf0\xdd\x47\x77\x3f\x32\x78\xd1\x77\xed\xe6\x50\x05\xb9\x06\x09\x42\x40\xff\xa9\x87\x5f\x92\x05\x1e\xe8\x0e\x19\x73\xf7\x17\xa3\xba\x11\xcf\x1d\x1f\x17\xb6\x5c\x7a\x64\xde\x30\xb4\x1b\x42\x24\x6a\x00\x20\x2b\xd8\xdf\x05\x08\x91\xb0\xd1\x48\x4b\x3f\x32\xb4\x1c\x32\x24\xcf\x43\x06\x27\x8f\x53\xe8\x09\xb7\xe3\xf4\x52\x67\x1f\x4f\x4a\x2f\x40\xcb\x37\x44\x3e\x7c\x60\x78\xa6\x7d\xd9\x5d\xff\xa6\xb4\x08\xb0\x65\xa2\x1d\xf6\xe8\xef\xd1\x38\x9b\xf6\x41\x3d\x5a\xeb\xcd\xec\xd9\xc7\x24\x6f\xc9\x11\x1e\x62\xdd\xb7\x10\x36\x84\xea\x48\xa0\x4c\x21\x3d\x72\x21\x7d\x42\x22\x26\x6b\x6e\x72\x1d\xd4\xff\xb2\xe9\x10\x82\x08\xb5\x86\xf8\x49\x9d\xa5\xb0\x52\x23\x4f\x1e\x66\x48\x64\xd4\x27\xa8\x06\x9c\x0b\xa5\xa1\x10\x91\xc0\xa0\x91\x52\x2c\x69\xfb\xab\x4e\xb3\xbb\xce\x38\xc3\x10\x2b\x09\xd1\x0f\x0d\x52\xc2\x90\x58\x92\xe2\x8d\xda\x96\x5a\x57\xc7\x0b\xcb\x16\x35\x84\x6b\xff\xdc\x42\x27\x4c\x84\x95\xc8\xc7\xea\xe9\x5f\x4d\xc3\xa1\x7e\x0c\xf5\x41\xaa\x04\xfe\x89\x4f\xe9\x93\x87\x9a\x49\x6e\xee\x92\x2c\x79\x94\x33\x8c\xa3\xf5\xe4\xa5\x7a\x12\x07\x70\x5f\x3d\x28\xf8\x95\x5b\xfa\x65\xf5\x0e\x79\xdb\x81\x8b\x44\x48\xc7\x05\xe1\x7e\xb1\x31\x49\xd6\xf8\x08\x96\x36\xb4\x04\x16\xc8\x4d\x49\xda\xc3\x8e\x61\x13\x57\xf2\x8b\x4b\x10\xd2\x27\xa1\x91\xc2\x01\xd5\xa6\x92\xf2\xd1\xdc\xf0\xe2\x52\xf7\xfb\x4c\x96\xc5\x3d\xdc\xba\x44\x4b\xdb\xba\x1d\x1e\x8f\x14\x24\xad\x18\x03\x54\x49\xbb\x45\x7c\x54\x6d\x56\x88\x8e\x54\xf8\x12\xd8\x30\x81\xbd\xaa\xfb\xdc\x13\x4b\xde\xde\x36\x44\x19\x66\x09\xc3\xcc\x91\xc0\xda\xd7\x1f\xf9\x18\xfa\x5a\xd8\x19\xc6\xa0\x62\xd4\x46\x6e\xea\xfe\x85\xf0\x6d\x6f\x15\x5c\x7b\x40\x07\x08\xa2\xb9\xd5\x5b\x97\x26\x16\x68\xfa\x91\x2e\xdc\x72\x28\x03\xa2\x9f\xaf\xb3\x05\xeb\x49\xc8\x25\x2c\x4e\x97\x23\x52\x6c\x50\xd8\x68\x04\xbd\x9e\xf2\x2d\x87\xc6\x5a\x7a\x97\xfc\xe4\x91\xd3\xe4\xb6\xb2\xfb\xa4\xb5\x0d\x25\x7b\xf6\x33\x05\x8b\x21\xe1\xc2\xf3\x9a\xc1\x87\xd1\xca\xe9\xbd\x08\xdf\xd0\x63\xfc\x48\xd5\x14\xcd\x26\xeb\xa2\xb1\xba\xbe\xb8\xfa\x58\x53\x24\x29\x17\x55\x7c\x89\x40\x94\xeb\x20\xf4\x5a\x72\xfb\xa5\xe8\x75\x81\xf6\xfd\x12\x35\x4c\xe1\xb3\x3f\x21\xb0\x99\x73\x30\xce\xa7\xab\xe3\x3a\x2e\xd1\x33\xb5\x12\x42\x41\x42\xcc\x08\x25\xad\x50\xd9\x72\x33\x63\xd9\x42\xf3\xa5\xf6\xd4\x75\xb1\xbb\x3d\xc7\xd6\xca\x2e\xf5\x97\xe2\xf8\x4e\x74\x2d\x59\x97\xa0\x8f\x99\xfa\x49\xfe\x74\x2b\x0a\x46\xef\x52\x2f\xf4\x7c\xbb\x99\xad\xa9\xff\x2f\x0a\x69\xa0\x92\xca\x24\xfd\x09\xdc\x87\x23\xb4\x58\x71\x27\xae\x8e\xc6\x22\x15\x9f\x29\xc1\xef\x75\xed\xad\xd4\x6b\x27\xe0\xac\x9e\x9a\xb9\x8f\xda\x58\x82\x90\x12\xf2\x09\xe3\xe8\x31\x64\xcc\xf1\xd7\x7e\xe2\xb7\x3f\xce\x35\x2d\x7c\x4f\xa7\x34\x5f\x4e\x1c\x31\x0b\xa3\x22\xbc\xf4\x53\xe2\xa7\x38\x33\x27\x19\x53\xe6\xca\xd0\xba\xdd\x86\x28\xbb\x63\x2a\x6b\x39\x64\xc1\xd3\xca\xdd\x2e\xbd\x19\x98\x9c\x89\x86\x99\x5e\x00\xd4\x98\x6b\xe8\x34\xa1\x2b\x1f\x9a\x34\x40\xec\xb1\xae\x45\x3f\xd7\x1b\xcb\x7d\x6e\xb9\x4f\x56\x17\xa5\x90\x85\xf1\xdc\x61\xd7\x8c\x79\xf9\xb8\xdb\x18\x45\x2a\xb1\x1c\x2a\x8e\xf5\x56\xa2\x39\x83\x6f\x6f\x66\xb2\x77\xec\x90\xca\x0f\x24\x66\x86\x10\xa1\x39\x0a\x13\xa3\x61\x29\xff\xaf\x8f\x8f\x2b\x46\xf1\x42\x66\x86\xc3\xbb\xb1\xbd\x5b\x11\xc7\x1a\x8d\xb3\xe6\xb5\x30\x6b\xd8\xf4\x28\xc0\xd2\x36\x6f\xfd\x03\xb7\xad\xe9\xd1\xe6\xb7\xad\xae\x92\x86\xd9\x25\x8b\xb3\x0f\xa5\x5a\x93\x00\xef\xc9\xde\x66\x28\x4c\xbd\x07\x4c\x78\x95\xba\x52\xda\x85\xd7\xe8\xd3\x76\xc9\x07\x68\x66\x08\x27\xa5\x88\xf6\x72\x6e\xa3\x8e\xd1\x9a\xd0\xd7\x1c\x76\xda\xb2\x8c\xa1\x64\x2d\xba\x6c\xa9\x8c\x7a\x48\x92\x7b\x36\x42\xc7\xc6\x9a\x72\x4d\xd9\x67\x46\xbb\xf6\x3c\x69\xa7\xa5\x9c\xa1\x98\xc3\xa7\xca\xb6\x07\x2f\x1c\x67\xd9\x0d\xe1\x82\x5d\xfe\xe3\xd1\x76\x2e\x84\xe9\xc2\x0e\xef\xbf\x46\x71\x87\x9c\xcd\x93\x81\xf7\x30\x43\xd5\xc3\xcc\x6f\x1a\xef\x26\x6b\x63\x4f\xa4\x4f\x8f\xa6\xa8\x71\xb1\x7a\x74\xec\xb7\x97\x91\x8a\x43\x43\x81\x5f\x9d\xd5\x9a\x4f\x0e\xf8\x75\x1e\x95\x4e\x0b\xce\xd3\x98\x3b\xcf\x4d\x59\x67\x9e\x87\xea\x5d\x79\x33\x3b\x9e\x5c\x27\x99\x4f\x28\xdb\xbc\x3b\x77\x6b\x75\x68\x8f\x4a\x40\xe4\xe2\xf0\x49\x77\x75\x17\x7f\xea\x3a\xbe\x33\x1d\x86\x33\x29\x4a\x23\xe7\x1a\x14\x6f\x56\xe6\x01\x76\x90\x0b\x4e\x13\x25\x68\xea\xaa\xf7\x5a\x42\x38\xfe\xea\x98\x2d\x3f\x1b\x44\x6f\x86\xe8\xbe\x76\x56\xd3\x3d\xa5\x8a\x53\xf2\xa3\x57\x2b\x11\xe1\xbf\x52\x41\x81\xd4\x75\x2c\x9b\xf3\xa4\x4d\x78\x7a\xa2\xcb\x27\xb1\x98\x9e\xa1\x94\xee\x85\xd0\x5b\xd9\x7d\xd0\x9c\xe3\xb1\x1d\x86\x3d\x29\x57\xef\x26\xb7\x6e\x2f\x14\x2f\x07\xc6\x41\xe3\x4c\x51\x05\x95\x30\x9b\x40\x8f\xd5\x1f\xaf\xf4\x1b\x8b\x94\x55\x3d\x64\xb3\xc7\x91\x6d\x71\x33\x07\x8e\x4e\x41\x2f\x85\x62\xdf\x6e\xf1\x71\x80\x35\xd0\xe0\x88\x66\xe7\x9a\x7f\xab\x22\xcf\xed\x66\x9c\x8a\xa8\x6c\x32\xb8\x2e\xcf\xf3\xa6\xa1\x4e\x14\x34\xd7\xa4\xc0\x53\x0f\x49\xfd\x40\x80\xe7\x1f\xad\xa7\x23\x99\xb2\xe6\x3f\x11\xe6\x2e\x93\x95\x3d\x2c\xe9\x61\x76\x68\xf7\x59\xdb\x58\xbb\x5c\xc7\x2a\x23\xd0\x5c\x30\xe3\xc7\x38\x62\x38\x04\xd5\xcc\x52\x41\x07\x4a\x25\xd0\xae\x16\x49\xd1\xba\xc5\xa9\x6a\x89\xae\xe2\x3a\x69\x3b\x5d\x57\x35\xa5\xfe\x09\x06\x77\x4f\x19\x9d\x7c\xe5\xd0\x06\xfa\xa9\x12\xdd\x81\x04\xd0\xc4\xe7\x15\xfa\x3f\xa5\x09\x00\xf5\x4d\xca\x09\xb6\x37\xa6\xa4\x1a\x04\xbc\x15\x1f\xae\x99\x1a\x38\x73\x96\x46\x90\x86\x67\x3b\x26\x8e\x73\x69\x44\x36\x72\xe1\x34\x0c\x6d\x26\x95\x83\x5a\xa1\xcb\xd2\x3c\xfa\xa9\x8e\x34\x71\x95\x62\x9f\x52\x7e\xad\x63\x8e\xbd\x4d\x26\x44\x36\x0f\xb4\x35\x18\xab\x5f\x7a\xd7\x76\x26\x75\xff\x2e\x7a\x01\x15\xc1\xc5\x17\xa6\xcc\x7c\xb8\xcb\xb8\x88\xda\x6c\xe6\x14\x7b\x45\x20\xe8\x11\x01\xb3\x5d\xb1\x56\xf1\x56\xa4\xbf\x73\x1d\xd2\xbb\x02\x58\x15\xc2\x6c\x6e\xdb\x17\x64\xef\xa9\xca\x73\x94\x2c\xd5\x1a\xb4\xa4\x28\x85\x64\xfd\x8d\x68\xd5\x9b\x9d\xa9\x96\xe6\xc7\x04\xd7\xf6\x1a\x31\xc4\x28\xd1\x5f\x6c\xd6\x4f\xc9\x61\x68\xc5\x35\xbb\xb8\xf7\xae\x52\x3f\x31\x26\xc6\x4d\x3d\x33\x70\x73\xf9\xd4\x5a\x3e\xe9\x9e\x8b\xa0\x19\x2a\x7a\xe1\xe3\xcf\xa9\x8b\x0d\xee\x3a\xd2\xf0\x1b\x74\x79\xda\x75\x28\xa3\x23\xde\xc2\x8c\x2d\xe5\x55\xeb\xe7\x07\x97\x07\xad\x94\x2d\xe4\x2e\x87\x62\xd2\xc0\xd1\xab\x0e\x90\x2c\x9f\x42\x3c\x07\xa3\x1f\xac\x77\x73\x0a\xe5\xa9\x9c\xfa\x50\x4d\x8a\x31\x71\xb7\xaf\xa1\x5e\x12\xee\xd1\x13\x4d\x54\x6b\x2b\xec\x31\x86\x0c\x9b\x11\xd1\xfa\x45\x4d\x3a\x58\xaa\x8e\xf0\xd1\x7c\xeb\xd7\x2e\xa6\x3a\x80\x04\x31\x71\xd6\xdc\x69\x3b\x15\x30\xec\xa2\x44\x97\xd4\x5a\x51\x68\xf2\x45\x0b\x77\x69\x7d\x07\xcd\xb0\xd7\x32\x8e\xdf\xe3\x99\xb8\xeb\x67\x8c\x87\x76\xf0\x67\x31\xec\x1e\x01\x6c\xd6\xf8\x1e\xb8\x44\xc7\x31\x99\x2e\x25\xbe\x5b\xdf\x0f\x82\xc3\xa0\x40\xe4\x9a\x32\x52\x80\x76\x21\x22\x37\xd0\x0f\x1f\xc3\x5c\x15\xcf\x96\x7e\xd3\xc9\x26\x8e\x44\x89\x24\xe0\x73\x61\xbc\x44\x0e\x36\x2f\x28\x37\x49\xb6\x54\x88\x20\xb3\xb6\x7c\x35\x11\xa3\x33\xdd\x6b\x4c\x88\x99\x8c\x1c\x91\xcf\xb2\x17\x39\xbc\x0b\x01\xa9\x16\xf4\x90\xae\xce\x5d\x3d\xd0\x38\xe2\x43\x05\x2d\x53\x65\xeb\x26\x64\x69\x4d\x45\xb3\xed\xbe\xe8\xe9\xf7\x8a\x22\x5c\x06\xe9\x78\x1f\x27\xd3\x99\x3c\xfc\xc1\x1a\x89\x0f\xb7\x8f\xb2\x94\xe8\x0e\xdc\xa5\xaf\x26\x86\xd4\x54\xc7\xe5\x70\x95\x76\xd0\x01\x07\x6c\x09\x6d\x23\xdc\xdb\x2c\xab\x0f\xc9\x20\xd8\x53\xd5\x05\x46\x54\x9f\x76\xef\x8d\x34\xce\x44\xea\xee\xac\xd1\x13\x6f\x86\x69\x4d\x45\x65\xb8\x54\x92\x9d\xcf\x3b\xec\x81\x46\x40\x17\x13\x30\xa3\x14\x95\xd9\xfc\x05\x0c\x3d\x24\x9a\xc8\xae\xe0\x2a\x9b\x7b\x2e\x75\x83\xad\xa0\x96\x66\xda\x3c\xd8\x9d\x96\x3d\x6c\xdc\xe8\x7a\x82\x68\x42\x6b\x96\x5c\xd1\x59\x10\x9f\xfb\x96\xbd\x6a\x2a\xd7\xa9\x74\x6a\x6c\x86\x22\x14\x13\x86\xd0\x10\xeb\xe2\x63\x6a\xda\xe8\xd4\xa4\x9d\x10\x6f\x45\x3d\x24\x07\x29\xe8\x31\x51\x14\x3d\x60\xe6\x89\x09\xe3\x50\xa3\x54\x27\x2b\x11\xdf\x9d\xec\x3b\x37\xd3\x90\xe6\x12\x9f\xb5\x55\xfa\x4a\x7d\xe4\x37\xe3\xd3\x5f\xd5\x99\x45\x2b\x73\x7a\xcf\xb7\xb8\x53\x92\xac\x5a\x44\xd4\xa5\xe5\xdc\x13\xb5\x96\xa0\x5f\xe0\xfb\x5b\xb6\x2b\xd0\x62\xc4\x33\xcb\xf4\xbc\x0e\xae\x94\x43\x4c\xcb\x5e\x58\xe8\x34\x70\x24\x0d\xc9\x07\x45\xd2\x4b\x54\x5a\x3a\x19\x65\x01\xc7\x66\x89\x1b\x7b\x6b\x2b\xf1\x27\xbb\x3f\x14\x57\xaa\x57\xe6\x47\xc9\x31\x75\xa1\x66\x85\x48\x3a\xfe\x0e\x68\xcb\xfe\xa1\xac\x94\x57\xd5\x8f\xae\xa6\xa4\x13\x76\xf3\x96\x5c\x2a\xe3\x1a\x3f\x78\xfc\x12\x1f\xb7\x59\x1d\xaf\x34\xdf\x1f\x31\xb3\x33\x33\x7a\xd2\xec\xe2\x98\x5f\x2e\xb5\xf4\x52\x46\x2f\x24\x40\xa9\x72\xa4\x06\x63\x9a\x3c\xbb\xcc\xde\xc4\xa6\x13\x4e\x5b\xc7\xf7\xdf\x88\x8d\xf4\xa0\xb8\xd2\x7f\x6e\x1d\xc0\x82\x1b\x35\xfd\x60\x0f\xef\x15\x94\x07\xff\x69\xf0\x70\x6b\xc9\x8c\x0d\x22\x23\x98\xe1\xcf\x8d\x06\x49\x12\x64\xff\x23\x65\xcf\x0a\x48\x2f\x3d\x3f\xc2\xa5\xac\x1f\x60\x55\xf5\xa0\x2c\x19\x34\x5e\x35\xde\x96\xe0\x0a\x3f\x58\xd3\xa6\xfe\xda\x00\x06\xf1\x70\x23\x96\xf9\x94\xd9\xac\x24\x08\x5d\x13\x76\x3a\x54\x68\x72\x10\xa9\x97\x4f\xd5\x76\xb2\xd6\x17\xc2\x3f\x78\xc8\xfd\x24\x59\xa7\xbe\x41\xcd\x20\x5d\xac\xea\xa5\xef\x2f\x39\xaf\x60\x2e\x37\xf5\x1f\x02\x51\x6a\x99\x41\x1c\xca\xf3\x14\xfe\x53\x80\xdd\x1d\x70\x3f\xbf\x69\x42\x40\x59\x5a\x51\x47\x9d\x7f\x6b\x58\x8a\x71\x31\x3b\xc7\xc3\x60\xcd\xdc\x49\xba\xd2\x48\x76\x2a\xa3\x58\xd2\xc9\xc5\x17\xdd\xc0\x10\xdd\x6a\x4f\xa6\xdf\xfc\xf8\xfb\xaf\xe0\x14\x0a\xdf\xac\xa5\xfe\xdf\xff\xef\xff\xf9\xff\x03\x00\x00\xff\xff\x71\x1d\x08\x34\x38\x36\x05\x00") - -func dataEnglishJsonBytes() ([]byte, error) { - return bindataRead( - _dataEnglishJson, - "data/English.json", - ) -} - -func dataEnglishJson() (*asset, error) { - bytes, err := dataEnglishJsonBytes() - if err != nil { - return nil, err - } - - info := bindataFileInfo{name: "data/English.json", size: 341560, mode: os.FileMode(420), modTime: time.Unix(1452717629, 0)} - a := &asset{bytes: bytes, info: info} - return a, nil -} - -var _dataFemalenamesJson = []byte("\x1f\x8b\x08\x00\x00\x09\x6e\x88\x00\xff\x6c\xbd\x4b\xb2\xab\xbc\xd3\xe5\xdd\x7f\x47\xf1\xc6\xbf\xfd\x8d\xe0\x1b\x43\xcd\xa0\xa2\x1a\x32\xc8\x20\x5b\x80\x0f\x17\xef\x83\x2b\x6a\xee\xa5\x14\xde\xe4\x6f\x71\x2a\xe2\x69\x1c\x1e\x6f\x63\x90\x52\x79\x5d\xb9\xf2\x7f\xff\xd7\x7f\xff\xf7\x7f\xfe\x47\x5a\xd6\xff\xfc\xff\xff\xfd\x3f\xcb\xbf\xcb\xd5\x10\xe6\xfd\x3f\xff\xdf\xf1\xef\x57\x58\xe7\xd4\xa4\xf0\x7b\x9d\xd3\xd8\x9e\x17\xb7\x30\x97\xff\xce\xcb\x98\xd3\x27\xdc\xe2\xda\xff\xfe\x8f\x47\x1c\xc7\x74\x8f\xf3\xef\x75\xb9\xb1\xdf\x69\xd9\x96\x30\xe2\x93\x2e\xcc\x71\xfd\xbd\x6e\xa7\x79\x5a\xfb\xdd\x7f\x75\x39\xbf\x37\x86\xb1\x39\x3f\x78\x96\x2f\x9d\x37\x29\x3f\xbd\x9e\x9f\xf4\x31\xfb\x27\xe5\x97\x5a\x7f\xce\x76\x1a\xc7\xf3\xa2\x09\xf3\x94\x7f\x2f\xe6\xcd\x1f\x7e\xe9\xcb\x27\xfe\x80\xa9\x29\x77\xcc\xf1\x7c\xa2\xb0\xf9\x0d\x97\xb2\x08\xe7\xf7\x9e\x69\xb8\xc5\x39\x9f\x0f\xd2\xc6\xdb\x84\x8f\x1f\x71\x59\x52\xe3\x5f\xed\xd3\x9c\xe3\xf9\xc7\xcd\x3e\xae\xbd\xaf\x51\x18\xbb\x98\xcf\xab\xa1\x2c\xf0\xe2\x0b\x71\x2b\xaf\xee\x7b\x11\x86\xdd\xbf\xe5\xaf\x37\xc7\x5b\x6c\xfc\xe7\xde\x69\xee\xd2\xe8\x3f\xf0\x0c\x6b\x9f\xa3\x2f\xd4\x2b\x0c\xfc\xc1\x30\xaf\x7d\xc0\x8b\xcc\xf8\xb9\x80\x1f\x5f\xd6\xf8\xea\xc3\x98\xa2\xac\xea\x7e\xde\xb6\xe9\xe7\x22\x61\x69\x8c\x94\x84\xf3\xe2\x11\x46\xdf\xf9\xa6\x3c\x51\x9c\xf1\xa7\xf7\xb9\x6c\x78\x5c\xf0\x72\xe7\xf7\xa6\xbd\x39\xff\xac\x4d\xc1\xbf\x13\x72\xf2\x4f\x1e\x5b\xf6\xdf\xea\x63\xbd\xff\xef\xe5\x1a\xe7\xb8\x40\x30\xca\x63\xfe\x5e\x74\x79\x82\xb4\xc6\x77\xc4\xfb\x3c\x62\xc0\xbb\xc5\x79\xcf\x2e\x27\xb9\x9d\x63\xcb\xf5\x95\xb7\x79\x4c\xfe\xcd\xb0\xf4\xd8\xfa\xc7\xd6\x26\x97\xbe\x79\x5a\xb8\x3e\x78\x9d\x67\x11\xc3\xf3\x4b\xe5\x83\x29\xe3\x4d\xdb\xfd\xba\xe4\xb2\xd5\xe7\xa7\xf6\x54\x78\xf1\x5b\x79\x3d\x11\xd9\x31\xf9\xcf\xaf\x61\x70\xd9\x4a\x45\xe8\x64\xe3\xce\x03\x51\x56\xeb\x7c\xf8\x60\x87\x85\x7b\x8d\xb5\xb3\xa3\x18\xe5\xa9\x66\xff\x30\x4f\x1b\x7e\x79\x81\x72\x29\xfb\x8e\xdf\x6d\xfe\x6c\xe5\x30\xf8\xff\xf9\xa1\x34\xde\xca\xf9\x4e\xb2\xfd\x7e\x1c\xb6\x9b\x2b\x95\xc9\x37\x9b\x0b\xf5\xea\xf7\x9c\xfd\xa3\x71\x9a\x07\xff\x2c\x6c\x7e\x3c\x4c\xe4\xf8\x7c\xfe\x9b\x39\x95\x3b\xf8\x46\xc7\x22\x15\xbb\xef\xec\x2d\xf9\x79\x8b\x5d\xe7\x5b\x36\xef\xcb\x1a\xb2\x0b\x60\x68\xf7\xf3\x31\xe6\xb4\xfa\x0f\x87\x1f\x17\x3f\x79\xd9\x7b\xd9\x86\x38\xba\xb0\xac\x65\x2b\xce\xfb\xc7\xd6\x1f\x77\x4d\xf7\x7b\x18\x77\x9c\xd7\xc1\xb5\x40\x11\x3e\xd7\x8f\x45\xe1\x9f\x7f\xd6\x95\xdb\xf9\x9a\x47\x7c\xf2\x4e\xcd\x2a\xe7\x85\xc2\x5c\x54\xa2\xeb\xbb\x38\xbb\x75\x59\xf6\xfc\xf6\xef\x3c\x8a\xcc\xbf\x7a\xec\x6a\x91\xd2\x3c\x40\x55\x96\x35\x76\x8d\x5c\xee\x94\x7c\x2f\xe2\x0a\x89\x33\x45\xed\x8b\x9f\x83\x2a\x9e\xc7\x34\x8b\xa2\xe2\x55\xd1\xf9\x79\x5a\x57\xff\xf3\x69\x84\xb2\x8e\x0b\x75\x87\xc9\x02\xee\x1c\x07\x7f\xd4\xc7\x56\x8e\xec\x0a\xd9\xc0\xc5\xdc\x4f\x90\xd5\x3e\x7c\xfc\xb9\x83\x59\x0e\xd7\x38\xd4\xbc\x37\x7f\xc8\xf0\x2a\xe7\xe9\x94\xb4\xb8\x40\xbb\x35\x19\x27\x26\x6f\x4d\x82\xc5\x7a\x84\x21\x89\x1a\xc2\xa3\xe7\xa2\xcf\xa6\xf3\x97\xdf\x21\x47\x2c\x4a\x6b\x8a\x1d\x37\x1a\x62\x07\x1d\x96\xe9\x1d\x2c\xdb\x87\xf7\x3d\x8c\xe6\x79\xd9\x05\x7f\xee\xf2\xa2\x34\x2d\x65\xd9\xa1\x57\x8a\x32\x92\x75\x7f\x94\xf7\x38\x9f\x75\xf6\xd3\xd3\xc5\x39\xe4\x16\x7b\x60\x56\xd9\x77\xbe\xa1\xc6\xab\x6f\x0c\x65\x35\x53\x2a\x8a\x7a\x82\xaf\x00\xfd\x3a\xc7\x0e\x9a\xa1\xfc\x76\x03\x9d\x59\x5d\x23\x5f\xa6\xc9\xce\xde\x82\x17\xa4\xd6\x0e\x5b\xd1\x7a\xe7\x6d\xf7\xf7\x84\x65\xb2\x25\x83\xcc\x3d\x36\xb5\x93\x2e\xf2\x2d\xf4\x4d\xd1\x14\x0d\x2c\x3e\x0c\xfe\x18\xcf\xaf\xa7\x16\x96\xff\x0d\x8d\x54\x94\x50\xd9\x00\x97\xc2\x09\xef\x7c\x2b\xaa\x66\x85\x6e\x30\x8f\x80\xba\xad\xbc\xe5\xea\x5f\xdd\xa7\x4c\xdd\x6b\x96\x91\x2f\x63\x3b\xe2\xdf\x2d\xda\x1e\x17\x66\xa0\x7c\xb7\xde\xe5\x8b\xf0\x6e\x02\x4f\xfe\x06\x51\x5d\xfc\x0e\xea\x6b\x52\xf2\x8a\x3a\x7a\x26\x9c\x71\x57\x14\x2b\x4e\x88\x19\x59\x3a\xbb\x45\xa1\xc0\x17\x2a\xbe\x81\x9b\xb4\x22\x6a\xeb\xbc\xb5\x11\x87\xcb\x8d\xe9\x34\xee\x70\x82\xb3\xec\x91\x6f\xfa\x4f\xc2\x2b\x51\xac\x9e\x87\xa5\xf6\x37\x59\xf0\x8e\x63\x28\x26\x01\x87\xbf\x1b\x5d\xc6\xca\x39\x09\xa2\xba\x70\x86\x6e\x72\x97\x36\x8a\x70\x9a\x27\x89\x4d\x7b\xc5\xf2\xdd\xf3\x17\xe4\x36\x83\x9d\x28\x1c\xa9\x22\x28\xb8\x0c\x66\x28\x5d\x3e\x8b\xa7\x80\xd5\x2d\xee\x99\xaf\xde\x54\xfc\xce\x00\x9b\x55\x96\x06\x66\xaa\xda\x4b\x6a\xb1\xe2\x0b\xf9\x7d\x42\xd9\x4b\x9e\x08\x28\x1c\x93\x53\x8f\x0c\x22\xef\x32\x24\x1a\xc6\x41\x5f\xab\xd8\x8e\xd6\xf7\x36\xd3\x91\xce\x3b\x7e\xfb\x9d\xa6\x8c\xc7\xde\xe6\x75\xf4\x0d\xad\xe1\x8c\xeb\x8d\x95\x5b\x5f\x1d\x60\x68\x98\xe2\x5a\x62\x61\x44\x4c\xf1\x8c\xeb\xca\xab\xbf\xf8\x7e\x82\x13\x32\x94\x20\x8b\xde\xd5\xe2\x8a\x74\xd8\xe7\x15\x41\x4a\x44\x9c\x53\xfd\x41\xb8\x78\x8c\x12\x4a\x94\xb7\x78\xe4\x54\x5c\x58\x97\x53\xb3\xf2\x93\xb8\xbe\x5c\xd6\x11\xaf\x65\x31\x1c\x3e\x49\x8c\xb1\x16\x8d\xc0\x8a\xbd\xe9\x5d\xf8\xc6\x11\xf1\x9c\x47\x9e\xe5\x79\xcd\x80\x21\xf0\x9b\x86\x24\xeb\x7b\xfe\xd8\x94\x3b\xd7\xcb\x22\x4b\xe6\xa3\x8d\xe2\x0e\xf9\xfe\xe5\x38\x8d\x81\xef\x75\x7e\x72\x8f\x6a\xd6\x26\x84\x4c\x43\x2a\xbb\x3e\xf8\x11\xc4\xa1\x2e\xa1\xd6\xd3\xd5\xe8\x44\xa3\x6d\x82\xe4\x41\xce\xf3\xe2\x9b\x17\x0d\x92\xfc\xfe\xd8\xa6\x21\xb8\x33\xdc\xd3\xf1\x6f\x43\xf2\x1d\x9b\xc3\x80\x37\xa9\x2e\x56\x82\xcd\x7a\xd2\xdb\x9e\xb9\x2f\xc5\x35\x9a\x71\x08\xc7\x76\xc1\xea\xd8\x65\xc0\x11\x1e\xe1\x91\x74\x5b\x68\x43\xde\x5e\x50\x37\xa2\x51\x8e\x78\x1e\x5e\xcf\x22\x51\x52\x03\xb1\xb9\x87\xdd\xb5\x5b\x1b\xa8\xf7\x20\xd8\x4b\xb8\xf1\xc9\xd3\x72\x39\x03\xdd\x56\x5e\xd4\xcd\x4e\x2f\x47\xa9\xaf\x4e\xde\xb9\xfa\x03\xad\x5d\x13\x8b\x90\xa5\x8b\x2e\x3e\xdf\xaa\xc4\x9e\xae\x27\x6e\xc5\xd2\x15\x87\xc6\x1f\x09\xfe\x6f\x31\x25\x17\xef\xca\x9f\x15\x41\x65\xdc\xc4\x29\x18\x71\x60\xca\x61\xff\xb8\x0e\x92\xb5\x6c\x25\xd6\xb1\x10\x1d\x29\x82\x2c\x96\xbc\x6e\x53\x2a\xe1\x9c\xbf\xec\x98\xfe\x6c\x78\x34\x7f\x9f\x07\xcd\x7a\x39\x79\x99\x02\xbf\x43\xff\x8e\x17\xb9\x85\xf7\x5c\x9e\xd3\xdf\xe7\x1e\x2e\x2a\x77\x87\xcf\x35\xbd\x3c\xb4\x09\x62\x36\xf6\x37\x3d\x86\xf2\x76\x93\xaf\x6b\xde\xce\x45\x59\x36\x2c\x71\x51\xae\x88\x1c\x2c\xec\x09\x72\x50\x5c\x8e\x9f\xb3\x1e\x28\x78\xc1\x19\xc1\x5c\x86\xa2\x2f\x76\x75\xc4\xa1\x2e\x7f\xd5\xfb\x96\xad\x53\x12\x0f\xc7\xd6\x40\xb2\x0d\x61\x14\x6f\x48\xfe\xf8\xe5\x9a\xc3\xe2\xf8\x73\x2b\x7e\xfa\x44\xcb\x52\x64\xbd\xed\xa0\x2e\xe8\xc0\x34\xdc\xfd\x1c\xd6\x69\xa7\x1a\xf7\x37\xb5\x65\xf0\x3f\xec\xca\xf6\xd2\x1b\xc0\xcb\x26\x28\x2d\x73\x84\xa9\x71\x5c\x8f\x1c\xc6\x6e\x84\xbc\xcc\xb4\xd0\x16\x8f\xbb\x91\x88\xb0\x56\xf5\xd4\xd0\x40\xc1\x0a\xac\xd4\xb2\xe5\xe9\x65\xcf\xe0\xa6\xfd\x0d\xe2\x2e\x77\xa6\x9e\xf1\xf1\x0d\xd9\x85\xb6\x9c\x74\xfc\xb1\x85\xb2\x74\x9f\x72\xfc\x2b\x96\xe8\x36\x4f\xd3\x53\x1c\x4a\x78\xbe\x4b\x68\xe9\x6c\x16\xa1\x68\xb9\x99\x66\x4d\x12\x0e\x96\x8b\x5c\xe4\x4f\x94\x13\xdf\xc3\x56\x59\x72\x24\x50\x77\xdd\xfc\xb3\xf2\x13\x51\xf3\x64\xe6\x3d\x53\xcf\x35\xdc\xba\x48\x03\xff\xd4\xec\xc1\xb1\x4a\x23\xf4\x29\x03\xa5\xb2\x7b\x6e\x21\xaa\xe7\xa2\x16\x53\x12\x1e\xe2\xcf\xd3\xe0\xc3\xe8\x14\xff\x26\x4a\x02\x21\x4a\x8a\xaf\x8d\x4b\xb1\x34\x91\xa7\x68\xa4\x87\x37\x26\x04\x4e\xf0\x45\xbe\x59\x2c\x24\x03\x23\x14\x63\x9c\x25\xcd\x50\x04\x50\xb6\x07\xd2\xdb\x5c\x0c\x6e\xe4\xc9\x8f\xa9\xeb\xfd\x9e\x5d\x0f\x9f\x6e\x7a\x21\x1b\x1b\x2d\x63\x04\xc9\xd2\x8c\x70\x57\xc2\xa2\xf2\x7f\x46\x18\x16\x1e\xc0\x39\xd8\x11\x39\x2d\x52\x1c\xcd\x22\x21\x45\x90\x77\x04\x3f\x0f\xe8\x10\xf3\xc0\x5c\x9f\x60\x6b\x2e\x89\xf9\x60\x5a\x4b\xf4\x23\x32\x78\xb4\xa4\x89\x39\x97\x58\x2e\x90\x19\xe7\x13\x17\x67\x21\x52\xb3\x24\xc8\x00\xe5\x4e\xbc\xee\xa2\xd6\x1f\xfe\x4c\x48\xd0\x64\xbc\x52\x4b\x7d\x36\x30\xc1\x24\x41\xcb\x40\x03\x5b\x7f\x94\x61\xa0\xa4\x8d\xee\xe9\xa2\x74\x19\x07\xcc\x0c\x50\xdb\xf4\x97\x06\x00\x2b\x68\xd9\x6e\x78\x25\x14\x2c\xe4\xc4\x1f\x53\xcf\xf3\x61\x55\x02\x38\xa5\x03\x33\x2e\x25\x1c\x4a\xf8\xbb\x1f\x88\x1f\x0b\x1a\xf1\x36\x49\xb0\x7d\x3b\x3f\xc1\x59\x33\xc9\x4e\xb8\xf3\x2d\x21\x7d\x32\x71\x07\x7e\xd2\x98\xee\x4c\x41\xeb\x09\xb2\x63\x89\xac\x65\x79\x10\xbf\x4f\x48\x83\x9f\x51\xee\x71\xd1\xda\x23\xd7\x89\xe1\xf4\x40\x83\xaa\x4e\x5b\x79\x31\xb8\x80\x2f\xcd\x97\xcc\x4c\x83\x1d\x3e\x2b\xfc\xc0\x96\x67\xbe\x18\x10\x86\x52\x16\xcb\xba\xbe\xb0\x20\x9b\xdf\x9c\x99\x95\x6d\x68\xc2\xca\xa2\x31\x05\x55\x8e\xa7\xa9\x3e\x17\x9b\x1f\x3f\x9f\x0d\x33\xcf\x2d\x72\x58\x56\x4f\x79\xba\x3b\x60\x02\x44\xe1\x9a\x66\xa4\x9c\xe2\xfd\x8e\x2d\xe3\xb2\x97\xbb\xc0\xb3\xe5\xee\x95\x70\x7b\xa2\x85\x64\x80\x45\x7d\x61\x59\x4a\x38\x6f\xf4\xf8\xcb\x2f\x61\x0f\xc6\xf4\x7c\x5e\x54\x7d\xd0\x95\x5d\x1a\x1e\xa5\xa4\xe6\x4d\xe4\x06\x1a\xca\x0c\x88\x17\xde\x2c\x63\x04\xf7\xc5\xea\x02\xac\x95\x95\xb8\xb9\xf5\xe4\x43\x4d\xd2\x41\x13\x32\x2e\xbf\x17\x1d\x06\xd5\x84\xd0\xbb\xfc\x1c\xcc\x57\x39\xff\xfe\x73\x89\xce\x1a\x97\xa5\x1e\x33\x26\x89\x02\x12\x62\x4d\x39\xe1\xd9\xdf\x21\x8d\x5d\x71\xba\xce\xab\x37\x4c\x45\xf1\xa9\xd3\x25\x8e\x76\xed\x3c\xe5\x96\x8b\x84\x14\x51\x51\xd6\x2c\x7e\x1e\x25\x17\x48\xf1\x88\xcd\xc4\xf1\x7a\x5c\x13\x68\x92\x5e\x6a\xa8\x13\x2d\x8f\x52\x8c\xca\x44\x85\xf3\x84\x5f\xb4\x50\x62\x8e\x3a\xa4\x1c\x0c\x57\xef\x8c\x88\x5a\xd1\xa5\xe5\x50\x64\x57\x8a\xbe\xc5\x5a\xe1\x38\xea\xb5\x11\xea\xca\xbf\x65\x79\x33\x28\xf8\x57\x2c\x52\xf7\xb9\x68\x05\x97\xab\xa3\x7a\x1a\xe1\xc6\x52\xdd\xf6\xe2\xf2\x34\x25\x82\x77\x35\x10\xef\x8c\x67\xcc\xf1\x9b\xd3\xe4\x2a\x52\xbc\x0d\x78\x58\xa2\x3c\x19\xd2\x06\x2a\xe9\x22\xc3\x3c\x09\x08\x3e\x6b\x1d\x9b\x79\x37\x6c\x5e\xfd\x4c\xb4\xbe\x87\x0d\x16\x77\x7a\xf0\x57\xe3\x59\x66\x6e\x16\x96\x06\xa7\x86\xde\xf5\x2b\xa4\x0e\x01\xdd\x78\xa9\xbb\xf1\x2e\x78\xed\x36\xbc\x7a\x56\x48\x5d\x32\x4c\x2b\x78\x55\x23\xae\x2e\x25\x5d\x55\x01\x7e\x8f\x34\x4c\x1d\xeb\x7f\x13\xcf\xee\x33\x52\xef\x64\x66\x5a\x2c\xc3\x06\x8f\xc6\x62\x79\xdc\xf5\x09\x1d\xbd\xcd\x0b\x5d\xb3\xf4\xf9\x40\xcb\xa5\x99\x09\x63\x93\x0c\x51\xd2\x52\xe6\xd9\x19\x9e\xec\x52\x3b\xc4\x52\x76\xa2\xe3\x8a\xf3\x02\xbf\xbd\xd6\xc4\xa4\x18\x0e\x79\x38\xe2\x8a\xb1\xf5\xf3\x64\xe9\x0e\xa6\x84\x13\x94\x92\xc9\xf8\x47\x6e\xe4\x98\x03\xab\xd9\x30\x7e\x8c\x22\x9e\xc5\x30\x46\x16\x19\x3b\x79\xcd\x7b\x59\xc7\x96\xee\x16\x8f\xcd\x06\x93\xfd\x6f\x7e\x55\x43\x58\x45\x01\x1c\x3f\x73\xdd\xaf\x0c\x23\xb5\x6e\x83\x27\x05\xca\xb7\x83\xd6\xb8\xa1\x12\xab\x79\x76\x81\xbc\xe8\x51\x0f\x9f\x36\x89\x7b\x98\x55\xcb\x1b\x6d\x96\xe5\x7a\x04\x28\x52\xfc\x47\x26\x38\x96\xa9\xd8\x8a\x79\xc2\x9f\xd3\xef\xb7\x6f\xe3\xb3\xb7\xbb\x11\x8d\x98\xe1\x9a\xbc\xe3\x86\xad\x1f\x3c\x0f\x4f\x6e\x96\xba\xc3\xe1\x7b\xd2\x57\xa1\x8c\x4a\x79\x21\xb3\x0c\x99\x83\x80\x52\x7e\x92\xbc\xf1\xb8\x41\x0e\xbb\xe0\x3f\xbe\x70\x9d\x5a\x2a\xf1\x65\xbf\xb9\xa7\x13\x6e\x5e\xf5\x4e\x6f\xe4\x80\xe9\x34\x6a\xe6\xa6\x41\x06\x06\x62\xff\x90\x8c\x40\xd7\x06\xaa\xe3\xe9\x4e\xa9\x19\x62\x60\x9d\x70\x6b\x99\x29\x5e\x99\x63\xce\x2c\xe4\x96\xdd\x1b\x79\x9b\x5b\x62\xfa\x60\x29\x1a\x48\x75\xed\x8e\xc0\x1a\xb9\xba\x0a\x74\xa1\x11\xa5\x6e\x32\xdc\x12\x94\x5c\x2d\x4e\x88\x9a\xc6\x66\x47\xa4\xb0\xbe\x69\x19\x3f\x38\x73\x3f\xb1\x94\x0b\x95\x6d\xfa\xc8\xad\x21\xd4\xf0\x91\x46\xe0\x19\xce\x13\x83\x1e\x2c\x85\xc9\xb9\x2b\x9c\x6d\xed\x91\x5d\x8f\x58\xbf\x72\x02\xe2\xfc\xa6\x1a\x70\x39\x58\xc5\xaf\xce\x11\x51\x43\x5f\x7e\x0a\xb5\x2f\x73\x0d\xcf\xcf\xde\x21\x33\x7c\xbf\xcd\x92\x14\xa8\xca\x0f\xd9\x9e\x12\x10\x48\x52\xd0\x2a\x56\x10\xfb\x37\x9e\x34\x88\xda\x83\x43\x99\x53\xcb\x02\xd1\x4c\x85\xf2\x9b\xb5\x91\x0a\x52\x13\x5f\x0d\x6a\xa3\x6b\x92\x98\x0c\xd0\xad\xd9\x73\xe8\xed\xa4\xc5\xdc\x27\x6c\x84\xc7\x6a\x31\x8f\xe2\x29\x5d\x4a\x17\xe5\x43\xec\xc3\x9b\xba\x65\xa7\x9b\x20\x41\x5e\x89\xd8\x02\xd6\x7e\x89\x0c\xaf\xa8\x87\x82\x46\xd3\xf7\x24\x71\x2d\x12\xa0\x2d\xab\xa5\xa3\x84\xbc\xdf\x7c\xf7\x0a\x55\xc3\x6a\xf7\xab\x18\xed\x3c\xbd\x10\x21\xe7\xd0\x95\x3d\x3d\xdf\x7f\x60\x46\xfc\x16\x05\xca\x50\x64\x0b\xa9\xdb\x0a\x61\x42\x58\x0a\xab\x17\x97\xa1\x96\xe7\x35\x64\xf0\x8f\xdf\x78\x1e\x13\xa0\xf3\xf2\x43\xa5\x36\x04\xa9\x2b\x8c\x92\x66\x2a\xfe\x6e\x7c\x22\x90\xda\xb8\x51\xbd\xe9\x50\x97\x6f\xe6\x7d\x8b\xf3\x03\x5f\xf5\xd7\x94\xf3\x00\x31\xbf\x1e\xa4\x60\x32\x53\x2a\x1f\x22\x1a\x66\x9a\x1d\x45\x61\x18\x03\x94\x04\x18\xa1\xb5\xd4\xaf\xe5\xe5\x2f\x49\xbf\xc3\xf1\xa0\xfa\x88\x03\x8d\x51\x43\xa0\xd1\x07\xce\xc1\x78\x71\x22\x27\xaa\xee\x78\x2d\xec\xe6\xf2\x84\x04\x00\xf1\x53\x6e\xf9\x22\x3e\xb7\xed\x31\x54\x69\x31\xfe\x40\xb0\x65\x49\x11\xe7\xf0\xe4\x9f\xc6\xf6\x47\x2d\x71\xf1\x7a\xd4\xae\xc7\x99\x29\xd4\x5d\x2c\x1b\x3d\xfb\xbf\xe2\xe6\x43\x13\x55\x34\x19\x56\x72\xd4\x58\xca\x9c\xb4\x41\x70\x1f\x41\x80\x18\x89\x4b\x50\x33\x1b\xe7\x1a\x6c\x1f\xb9\xd1\x34\x4b\x32\x75\xc7\x4f\x68\x1a\x96\x8a\xb0\x15\x57\xa1\x4a\xd9\xa8\x6b\x07\xa9\xe3\x53\x6e\xb0\x41\xcf\xb0\x48\x05\x7a\x17\x20\xcf\xfb\xea\x21\x1a\x3c\x50\x8b\x2b\x30\x14\x41\x60\x73\x7b\x89\x4e\x71\x7e\xcd\x4a\x61\x77\xda\x2e\x72\x95\xa1\x88\x5e\x52\x68\x38\xc0\x43\x90\x5d\x7a\xa1\xeb\x47\x9c\xaa\xd3\xd0\x99\x05\x96\xf5\xad\x08\x00\xc5\x80\x62\x69\x16\xf1\xc3\x69\xa1\x8b\xc8\x49\x14\x72\x04\x67\xfe\xd7\x02\xaa\x4a\xed\x4c\xd1\x58\x92\x14\x4f\x90\xae\x8e\xcc\x98\x16\xcb\x07\xd0\x4f\x4c\x33\xee\xd2\xe1\xa0\xdc\x24\x11\x15\xac\xf2\xec\xf5\x1e\xcb\xad\x42\x6e\x4d\x05\x61\x1b\xad\xe6\xee\xe9\x1c\xea\x79\xa9\xf5\x95\x25\x7c\x23\x34\x13\x97\x20\x68\x7a\xb9\x6a\x0d\x37\x61\x13\xf4\x94\x1a\xef\x85\xf9\xbb\x5a\x79\x44\xe9\x9f\x4b\x59\x51\xb5\xd8\xb2\x56\xe2\x14\xcb\xf0\x28\xb0\xef\x8e\x0f\xbb\x0d\x19\x94\xe3\x1c\xc0\xbe\xd2\x1b\x0a\xa6\xff\xa1\xe1\xdf\xf6\x08\x28\xc6\x85\x72\xa3\x85\x70\x0d\x98\x8e\x03\x39\x23\x92\x02\xa5\x5a\x2b\x5d\xd7\x23\x8d\x4d\x96\x4a\x43\x16\xb4\x44\x18\x5e\xc1\x23\x0c\x3b\x61\x62\x92\x90\x39\xa4\x28\xfe\x24\xb5\x85\xbe\xae\x25\x7a\x89\x40\xc2\x6f\x74\x94\xd3\x3f\x47\x3b\xa8\xd0\xd0\xfd\x84\x92\x64\x05\xb3\x9c\xea\x96\x82\xc1\xd5\x06\x88\xec\xd8\x51\xdf\xec\xe6\x89\xbd\x2f\xcb\x91\x2e\x25\xe9\xa8\xb0\x4a\xc4\xcd\x0a\xe9\xa5\x24\x34\x61\xb9\x94\xdf\x0e\x8f\xda\x77\xd4\xde\x71\x0d\x14\x39\x82\x39\x2e\xb0\x39\xa9\x31\x19\xb8\x8c\x99\x40\x45\x65\x1b\x22\x7c\x5c\xa9\xc2\x22\x1e\xb2\x16\xbd\x60\x0b\xa9\x62\x4c\xc9\x0a\xa2\x34\x7b\x2c\x5f\xe1\x6d\xe7\x0e\x44\xc0\x50\x3a\x6e\xf7\x0c\x77\xa3\xdc\xee\xcf\x96\x14\x7b\x87\xa4\x2b\x45\xc6\xfc\x1b\x22\xa0\x6d\x6d\x10\xea\x7e\x91\xf1\x48\x9e\x69\x4a\x33\x4f\x5c\x4a\xab\x91\x27\x16\x46\x7f\x22\x15\xe3\x0e\x13\xff\x83\x20\xc4\x6a\xfa\x97\xc4\xee\xa9\x72\x00\x45\xab\x0e\x39\x14\x8e\x29\x2a\x3a\x5c\x37\x6e\x79\x97\x70\xf0\x82\x38\x94\x6f\x56\x05\x5e\x61\xf4\x52\x20\x0b\x23\xc5\x23\x92\x34\xd7\x9a\xe0\xfe\xb7\x1a\xb2\x59\x54\x48\x97\x48\x94\x00\x02\xac\x12\xbc\xc6\x46\x83\x08\xba\x03\xf9\x1f\xb8\x3c\x12\xd0\x47\xf6\xce\xb7\x34\xce\x03\xbb\x2c\x2c\x08\xf3\x23\x20\x87\x78\x05\x6c\x7e\x6a\xd6\x40\x38\x74\x68\x99\x2d\x46\x8e\xa8\x8b\xea\xc6\x7c\x9b\x53\xa0\xca\x98\x8d\x1f\x05\x37\x64\xe8\xcd\x5e\x8d\xa0\x8b\xe6\x34\x22\x7f\x9c\x8b\x29\xe0\x6e\x58\x83\xc2\xf9\x13\xbd\x64\xe0\xc3\xb8\x96\xa3\x29\x8e\x86\x25\x5a\xfc\x58\x5b\x34\x9c\x5a\xfa\x65\xbe\xae\xdc\xae\x28\xf5\xa8\x56\x6c\x41\x4e\x48\x63\x54\x1f\xe4\x5c\x0f\x62\x57\xac\x06\x05\xdf\x71\xa0\xcf\x69\xa5\x4a\xd4\x30\xbe\xb9\x66\xe4\x06\x97\xc1\x8f\x8f\x45\x68\x0c\x4a\x5a\x38\xfa\x5c\x15\x29\x2c\x17\xd3\x86\x47\xae\x91\x14\xfc\xfa\x81\x79\x9d\x40\x7c\xd8\x3c\x35\x9e\xac\xae\xe7\x18\xf1\x75\xbb\xaf\x0e\x3b\xaa\x21\x17\x2a\xd6\x8b\xc8\x93\x39\x40\x58\xc1\x8e\xe5\xaf\x4c\x79\x7e\x99\x69\x0d\x94\xa0\x10\x99\x2c\xde\xa5\x45\x4a\x5d\x15\x9a\xe4\x0d\xee\x41\xf3\x47\x10\xf1\xb5\xf7\x48\xd4\x28\x6b\x31\x15\xaf\x80\x0c\x38\x53\xd5\x52\xdc\xba\x69\x95\x3d\x2d\xb2\x15\xdf\xe0\x89\x41\x18\x9a\xa2\x5a\x05\xad\xb4\x71\x88\xeb\x4c\x55\x01\x7f\xe1\x11\x2e\x4d\x25\xa6\x00\x80\x4f\x4e\x92\x25\x92\x63\x74\x2f\xa6\xc7\x6f\xb4\x4a\xb6\xf5\x0b\xae\x67\x74\x87\x52\x1d\x1c\xd2\xa2\x75\xad\xdf\xe1\x62\xe1\x21\x9e\xe6\x1a\x48\xf0\xb2\x6a\xe2\x98\x41\x1a\x76\x8c\xf6\xd7\x3a\x09\x10\x3d\x6c\x6d\xd2\xd4\x25\x57\x26\x01\xf6\xd4\x28\x70\x50\x15\x43\x5f\xf4\x4d\x1c\x89\xca\xed\xe3\x44\xc0\x6b\xed\xb2\x62\x0b\x84\x65\x57\x89\x6a\x57\x47\x32\x4b\x13\xc8\x4d\xb7\x70\xa4\x5d\x6d\xf9\xc8\x56\x89\x75\xac\x5e\x51\x1d\x2e\x5a\xcc\xac\x32\x22\x79\x43\x56\x0c\xc1\x27\x0e\xb3\xa6\xbc\xac\xb4\x82\xde\x9c\xcd\x0a\xfd\xf3\x20\x82\x37\x41\xb7\x1c\xc5\x65\x0d\xea\xe9\xe4\x49\x10\x93\x27\x56\xac\x8b\x32\x45\x75\xa9\x57\x50\x73\xa8\xf2\x0f\x03\x86\x65\x0e\x9d\xa4\x98\x8a\x5f\xda\x06\x77\xfc\x92\x65\x6b\x7d\x8f\xd2\x5b\x23\x63\x05\xe3\x57\x43\x7f\xae\x52\xa4\x49\x16\x1c\x82\x28\xe8\x35\x0a\x18\x38\xb1\xab\x22\x6f\x4d\x39\x31\x04\x97\x30\x3b\x62\x25\xc7\x1e\x9b\xca\xea\x50\x10\xb3\x7c\xd4\x79\x7c\x03\xb5\xcb\x62\xe9\xf9\xcd\x57\x91\x4b\xe6\xa3\xd1\x78\x64\xb6\x0d\x4d\x54\x2d\xdc\xd7\xd5\xa2\xe2\xf3\x6a\x1a\xf8\x5b\x2d\x9f\x6a\xa4\x71\x00\x76\x92\x69\xe8\x39\xdc\x83\x24\x39\x76\x58\x1f\xba\xcc\xe5\x3b\xbc\xf9\x9b\x87\xcd\x9a\x6a\xfc\x0d\xc5\xd6\xd1\xcd\xe6\xc6\x2c\x45\xbb\x13\x20\x32\x73\xeb\x8f\xf4\x3b\xf4\x88\xb8\xbb\xef\x48\xe8\x9d\x24\xff\xec\x80\x51\x43\xa0\x81\xce\x8a\x71\x90\x12\x4b\xbe\xf9\x87\x35\x15\xe6\x1a\x5b\x92\x6d\xcd\x25\x65\x63\xb9\xf9\x73\x5d\x27\x37\x0f\x87\x8b\x85\x23\xed\x26\x41\x4e\xc4\x01\x16\xe4\x8a\x0f\x1e\x21\x48\x3a\x4a\xce\x55\xf5\x0c\xa4\x13\x14\x10\x13\x39\x38\xe2\xe9\x1d\xf0\xce\xe0\x0f\x0c\xcf\x59\xb2\xf7\x51\x6c\xf4\x26\x2d\x34\xcf\x9d\x4e\xc2\x81\x2f\x38\xdf\xf4\x65\x59\x7a\x28\x87\x99\xf0\x3f\x3e\xf2\xd1\xd1\xe7\xa6\x25\x28\x88\x8f\xbf\x1f\x27\x38\xea\xb3\xe8\xfd\x07\xd7\xeb\x48\x44\xf2\xac\xb1\xac\x5f\x8e\x06\x83\x24\x48\x7d\x2d\xca\x75\x3c\x11\x2b\x7d\x30\x96\x07\x33\xa2\xdd\x9a\xe6\xf7\xe7\x97\xe2\x6b\x9a\x89\x38\xa3\xee\x78\xc7\x71\x3b\x77\xea\x00\xee\xfb\xaf\x35\x02\xfe\x3d\xde\x27\x49\x94\x52\x74\x0d\x01\xd9\xa3\xe2\x16\x8b\xfe\xd0\x76\x21\x6d\x6f\x5b\x80\x54\x64\xff\xab\xc9\xa0\xaf\xbe\xe1\xf4\x20\x33\x4f\xdd\xe0\xc7\x36\x76\xbe\xa3\xfb\x82\x2c\xfe\x8e\x0a\x5d\x90\xda\x4d\xc5\xb9\x68\x59\x13\xe7\x41\xa0\x19\xf7\x94\xa7\x01\xbe\xc1\xa7\xfc\x3b\xb1\x6a\x75\xd4\x0a\xc4\x3f\x81\xdd\x78\x03\x93\xbf\x31\x9f\xc5\xc7\x79\x5d\x4a\x40\x47\x97\x25\xd3\x94\x4c\xf5\x5a\x24\x7a\xbe\xb1\x98\xa1\xa8\xfd\xbf\x52\x46\x6a\x43\x35\x7e\xd4\x1b\x52\x75\xbe\x31\xb5\x39\x86\xa7\x00\xfb\x98\xc1\xae\xe9\x54\x44\x84\x9a\xca\x45\xe1\xa8\x15\xc8\x61\x2b\x5d\x9c\xd6\x7c\x4f\x7f\x6b\x95\xac\xca\xc4\x8c\xed\x37\x2d\x0e\x9d\xc1\x4a\x96\xa9\x4a\xd9\x9f\x5a\x5e\xe6\x59\x65\x31\xaa\x9d\xde\x8c\x19\x06\xc9\xae\x0e\x40\xf6\x7f\x1b\xa4\x4f\x99\x40\x8a\x7a\x25\x44\x73\x60\x11\xde\x92\xa0\x2b\xb3\xa0\x56\xbb\xbf\x68\x56\xff\x62\x43\xcb\x56\x02\x47\x7a\x4f\xd1\x0b\xb1\x97\xb4\x64\xf6\x96\xe6\xc7\xd4\x8f\x92\x33\x84\x73\xc2\x44\x8f\x99\xbc\xc8\x96\xf7\x27\xb2\xc8\xd2\xcd\xdb\x5f\x8c\xcb\x1e\x5a\x58\xd9\x57\x9c\xa5\xb8\xda\xb1\x61\xb9\x42\x92\x05\xbb\xd3\x13\xa0\x3f\x4c\x1f\xe9\x81\x35\xf7\xd0\x7b\x0f\x25\x1d\x51\x34\x41\x7b\xcd\x7e\x8a\xab\x95\x24\xff\xb3\x0a\x9e\x2c\x07\xf9\xf0\xdb\x38\x37\xaa\xa7\xe0\x8f\x18\xd0\x10\x6d\x7d\x5b\xb1\x0b\x33\x8e\x6e\x11\x7b\xaa\x89\x2f\x4d\x03\xce\x65\x27\x29\x49\x07\xa5\xfc\x42\x3e\x60\x5d\xd8\x63\x0a\xaf\x68\xbe\x10\x63\x4c\xcc\xa9\x1c\x72\x78\xee\xaa\xf4\x1d\x04\xeb\xcb\x56\x27\x07\x66\xa8\xc8\x39\x96\x68\x22\xa0\x31\x5e\x52\x91\x33\x0b\x40\x70\x8e\xca\xaf\x11\x16\xbc\xc6\xcb\x42\xc4\x91\xfe\xd2\x4d\xb0\xf2\x45\xb7\xa8\xd7\xdd\x48\x10\x2a\xc9\xad\x56\xe3\x44\x96\x16\x0f\x0e\x11\xc2\x5c\xc5\xb9\xbf\x91\xa3\xa4\xd8\xb1\xb8\xfc\x03\x30\x72\x17\x0e\x39\x86\x38\x4e\x6f\x04\x8e\x4d\x2e\x61\xed\xb8\xa6\x4b\xd7\xc3\x8d\x5e\x1c\xb0\xdc\xf5\xea\x1f\x09\x1b\xa9\x9e\x99\x2e\x6a\x82\xd7\xa6\x25\x77\xbc\x02\x62\x55\x5e\x5a\x8b\x0f\xde\xe7\x62\xea\xef\xfc\x77\x24\xf7\xc9\x9b\xd0\xd4\x37\x0f\x67\xcd\xd5\xfb\xae\x5a\xd5\x2a\x09\x42\x52\x4e\x6a\x09\xcb\x47\xd2\xad\x80\x6d\xa5\x91\x22\xac\x96\x0a\x44\x52\xad\x71\x12\x0e\xeb\x85\xf5\xa6\xb6\xbd\x5e\x0a\x89\x0a\x98\x12\x0f\xba\xb6\x5a\xfb\xda\xfc\x40\x28\xc7\x48\x2c\x41\x10\x9c\x58\x76\xa3\xdd\x4b\xc2\xf9\x5e\xa4\x73\x12\xa7\x33\x88\x97\x62\x41\x51\xa6\xcc\x91\x79\xc5\x9c\xaf\x4c\x35\xed\xdb\xb4\x9d\xaa\x22\x27\xa9\x6f\x66\x62\xf2\x61\x49\x25\xb7\xf4\x88\x02\xe9\x17\xc8\x6f\x5d\x20\x46\xbf\xbd\x04\x94\x41\xc1\x4f\x89\x7e\x37\xdc\x8a\x46\x3d\x25\xb3\x85\xc8\xc7\xa6\x0c\x77\xb7\x93\xfa\x7a\x74\xf0\x96\x94\x1b\xec\xf9\x11\x02\xbb\x16\xb3\x7e\x10\x3c\x46\x0c\x84\x44\xb6\x5b\x6e\x90\xba\x5c\xd3\x9b\x0e\x52\x16\xe8\xd4\x13\x38\x92\xe3\x68\xc1\xb1\x92\xfa\x84\x35\x1e\xb9\x2e\xf4\xd3\x53\xa1\x58\x04\xf0\x2f\xe2\xf4\x1b\x0e\x88\x66\x38\x06\x36\xac\xfc\x62\xb2\xb0\xb6\x8b\x58\x9f\x6f\x26\x16\xd1\xde\x28\xb6\x88\x4a\xb2\x52\x13\x11\x8a\x28\x70\x0b\x83\x49\x45\x26\xf4\xda\x24\x61\xea\xb8\x8a\x9c\x43\x70\x9a\x83\x64\x83\x6f\xac\x8d\x94\x8a\x12\x60\x79\x37\x12\x8b\x26\xcc\x27\x2d\xfb\x2b\x1a\xa9\x50\x97\xcd\x04\xb4\x3b\x6b\x47\xbb\x65\x3a\xfc\xaa\x16\x63\xfc\xd8\xfd\x20\xfb\xd7\x3c\x23\x51\x55\xd4\xd5\x87\x5a\xf7\x1d\xba\xed\x17\xd3\x91\x34\xbf\xd3\x06\xb9\x91\xaf\x70\x45\x61\x78\x35\xac\x7c\x79\x94\x6e\xc5\x0d\xdb\x2a\x94\x11\x06\x35\x07\xd6\x3c\x56\x94\x12\x0e\x95\x36\x29\x70\xcb\x2d\x26\x0a\xa0\xd0\x9a\x15\xed\xb3\x86\x0a\x91\x65\x8c\x86\x8f\x5b\x4d\x92\x7d\x0c\x67\x04\xb0\x8e\x91\x13\xb9\x10\xec\xd2\x5e\x5c\x51\xd8\x7e\x27\x40\x2d\x56\x8a\xbb\x91\xf1\xb8\x5e\x7e\xf5\xdb\xe4\xd1\xd2\x73\x57\xfa\x03\x13\xf5\xf3\x45\x6a\xaf\xee\x79\xcf\x30\x5e\x28\x59\xe6\x89\xe6\x7e\x6c\x04\x97\x20\x39\x57\xe9\x1e\xb2\xe2\x95\x38\x89\xc7\x8d\x45\x5e\x11\xd8\x5f\xfb\xde\x2b\x63\x15\xab\xaa\x45\x34\x5e\x78\x07\x65\xb7\x98\x60\xd6\x4a\x00\x1c\x77\x04\x18\xe5\xa0\x3a\xf2\xe7\xb9\x63\x55\x52\x8e\x17\x7f\x31\x66\xe1\x5d\x13\xe4\xbf\x45\x1b\xd0\xa9\x9a\x52\x3e\x8a\x09\x00\x02\x33\x03\x16\xd9\x1b\x19\x06\x81\xcb\xee\xac\x42\x1a\xc6\xc6\xcf\x97\xa5\x8a\x7d\x9b\x3a\x03\x3f\x9d\xf2\x1e\xdf\xec\x3e\xb9\x9c\xb0\x9d\x09\x14\xba\x03\xcf\x1d\x11\xf4\x97\x7f\x44\xde\x76\x4e\x6a\x1d\x2f\xc2\xe5\xac\x18\x89\x29\xb4\x30\xb2\xc5\xbc\xc8\xfd\xc2\x04\xf7\xe6\xba\x40\x51\x34\xd6\xfb\xcd\xfc\xc1\x5d\xba\x12\x6a\xde\x71\x54\x4c\x1c\x5e\xd3\xba\x02\x8b\xc3\x4c\xa6\x27\x56\x2b\x0e\x22\x1e\x78\x25\xd6\x14\x43\x87\x9a\x57\xb7\x5b\x84\x52\xfd\x1b\xc4\x3b\x8c\x9a\x23\xeb\xc6\x89\x4d\xc5\x89\xaf\xa3\x64\x4a\x33\x7f\x3d\xb0\x5f\x4a\x7c\x60\x92\x8b\x19\xc7\x15\x19\x23\xde\x82\x6d\x4b\x92\x1d\x95\x5a\xcf\x23\xaa\x4f\x21\xf8\xc1\x72\x1f\x84\x9b\x59\x6d\xc6\x1f\x65\x03\xf8\x65\x17\x84\xc2\xfc\x52\x4d\xf9\xca\x4b\xc4\xd9\x24\xa5\xe1\xcb\x37\x29\xd9\xfa\x41\x2c\x9b\x8f\x5a\x63\x3d\x5a\xe2\x53\xc2\x0f\xbe\x92\x23\xb2\x19\xe4\x19\xa3\x97\x68\x2b\xc3\xc6\x79\x40\x2c\xb9\x49\xbf\xca\x60\x0d\xa8\x50\xb1\xff\xd5\xae\x16\x0a\x6c\x7f\xd1\xd4\x34\xe5\xcc\x07\x5a\x6b\x95\xc6\x81\xaa\xa2\x58\x92\x9d\x14\xc6\xe8\x7e\x96\xd4\xfb\xe7\x01\x32\xf4\x81\x15\xf8\x9b\x26\x62\x45\x8a\xd7\x0f\xd9\x30\x0f\x03\x05\xe8\x31\x8a\x8e\xbe\xd4\x97\xd9\xc6\xc9\xac\x48\xd5\x00\x6e\x65\xd2\x40\x8a\x05\xcf\x92\xdd\xc3\x8c\x38\x21\x4a\x30\xda\x68\xfd\xf5\x37\xa9\xb8\x70\x17\x32\xb3\x8a\x8e\x34\xd9\xa7\x3c\x49\x02\x82\x15\xe5\x95\x18\xa9\x97\xd1\x09\x81\x03\xe7\xa5\xc4\x2a\xbb\xa0\xef\x8a\xf3\x31\xa2\x13\x66\x9a\x5f\x44\x69\xcd\xef\x08\xc0\xe1\x47\xa4\xec\xc8\x35\x63\x75\x66\xcd\x97\x5c\xa8\x80\x0e\x5e\x15\xe8\x2f\x42\x17\x2a\xf5\x09\x5c\x4f\xcd\xea\x82\xc5\x05\x42\x62\xd2\x2c\x0f\x60\x45\x70\x28\x3c\x44\x35\xbd\x2f\x62\xb1\x3a\x4f\x6f\xed\x3a\x40\x7c\xb0\xe6\x02\x83\xea\x93\xf0\xc2\x3d\x04\x8f\xd5\x5c\x18\x2b\x8a\xfe\x5d\x01\x5f\xae\xa8\x3c\x74\xed\x08\x9e\xb8\x0f\xb2\x97\xb5\x95\xc4\xd7\xb5\xa1\xd3\xcf\xee\x32\xff\x46\xca\xcc\xe6\xa6\x1c\xaf\x1c\x5b\xc9\x8f\x08\xfc\xee\x1d\x8c\x00\x07\x5f\x1e\xe2\xb7\x56\xf3\x9a\xc0\x0c\x08\x0d\xc5\x53\xf2\xf1\xf1\xe6\x6a\x61\x97\x94\xde\x48\x8d\xca\x74\xe7\x10\x81\x51\xb6\x83\x73\x2b\xc1\x8c\x6b\xa5\xe9\x4d\x0b\xd2\x91\xfc\xee\x5b\x4b\x74\xd3\xb9\xb5\x9e\x0a\xfe\x20\x99\xf9\xbe\x64\xc1\x7c\x83\x6b\x9e\x4a\xba\xa0\x12\x71\x1a\x66\x43\xa1\x27\xaa\xf0\x31\x5b\x83\x93\xf2\x08\x9f\x41\x9a\x3c\xcf\x6f\xf5\x93\xff\x53\x6a\x67\x95\x55\x0e\xa7\x9f\x32\x10\x4b\x28\x21\x49\xac\x56\x4b\x7f\x54\xff\xb4\x56\x86\x03\x51\x48\x59\xe2\x29\xb5\x2e\x52\x31\x45\xe7\xbf\x8d\xd6\x10\xa5\xfc\x31\xb6\xe9\xd2\xdd\xbc\x48\x90\x0d\xcd\x8a\x74\x78\xf5\x51\x20\x61\x40\x16\x95\x33\xbd\x8a\x3b\x82\xa5\x33\x33\x88\x78\x8f\xd1\x3e\x1d\xb0\x46\x94\x84\xe1\x53\x04\xf9\x25\x4c\x7f\xab\xb1\xbc\x31\x99\xc7\xa4\x2a\x0b\xf8\x59\x83\x96\xe2\xc2\x02\xf8\xa6\x64\x00\x07\xaa\xc3\x1f\x8e\x29\x96\x86\x05\x9c\x6f\x6d\x3d\x53\x1b\xa0\x65\x6a\xfb\x30\x0a\x0d\x2c\x27\x4c\x42\xba\xb2\x0b\x32\x14\x6f\x57\x31\x2d\x70\x54\x8f\x52\x0f\xb0\xdb\xda\x04\x20\x04\x61\x82\xeb\x97\xae\xf2\xe6\xd2\x1c\x2f\x48\xd2\x90\x87\x48\x10\x83\xb0\xf1\x65\x85\x56\x58\xf7\x16\x9b\x1f\xcd\xb7\xf0\xda\xcb\x9b\x52\xdf\xec\xd8\x8b\x86\x41\x5f\xc3\xce\xef\x4f\xd2\xc2\x12\xa4\x4f\x48\xda\x70\x5a\xbf\x91\x1a\x3e\x95\xd2\xf4\xe1\xa9\xc1\x61\x16\x44\xa0\xb1\x14\x45\x28\x39\xeb\x5b\x64\xe3\xa1\xd0\x2e\xe1\x79\x12\x21\x16\x49\xea\xb9\x35\xa3\xbe\xf0\x8a\xcc\x74\x15\x3f\x48\xe2\x92\xb4\xb2\x60\x09\x2f\x67\x49\x6c\x20\x3f\x88\x79\x15\x10\xb7\x3b\xf1\x5b\x56\x0e\xa1\xde\x7a\x23\x3a\x55\xa0\xda\x0e\x10\xff\x62\x3d\x67\xa5\xc9\xb0\x56\x20\x24\x5b\xb6\x57\x22\xba\x1a\xa9\xde\x1c\xf0\xa6\x51\xf6\xc8\x4e\xf4\x55\xe2\xce\x2b\x6b\x55\xde\x71\x66\x60\x32\xbf\x4d\x27\xe7\x65\xa7\x70\xb8\x1f\x09\xba\xab\x1a\x97\x33\x86\x9a\x4f\xbf\x51\x07\x3a\xac\x6c\x67\x38\x57\x99\x41\x11\x86\x59\x0d\x9a\x55\x84\x74\xdb\x19\xea\x8d\x23\xdd\x95\x4b\x73\xf4\xc6\xb8\x57\x5a\xbc\x60\x98\x9f\x2a\x6f\x48\x54\x3d\x26\xe2\x5b\xee\x24\x5f\x31\x46\x0f\x5f\x3d\xd1\x55\x86\x76\xf5\x55\xff\x6c\xa4\xb7\x83\xab\x31\xa1\x6c\x55\x13\x27\x81\x44\x28\x82\x1e\x78\x08\x03\xc7\x37\xc0\x16\xe2\x27\x56\xed\x03\x6b\x8f\x54\xe2\xef\x8a\x8f\xe6\x99\x25\xf5\x78\xb5\xc8\x6c\xac\x91\xaa\xe9\x28\x98\xda\x0b\x9a\x9f\xeb\x57\x01\x97\x90\x1e\x85\xfe\x77\x6c\x48\xba\x0b\xc7\x53\xd4\xc6\xf3\x98\x6b\xe7\xb3\x5f\xb7\x91\xdd\x88\xab\xe4\x1a\xc9\xcf\x77\xfe\x73\x92\xb4\x87\x64\x70\x93\x32\x5b\x89\x56\xac\x99\x4b\x3f\x96\xda\x16\x3d\xad\xc2\x24\x67\xdd\xea\x09\xba\xbf\x61\x43\x7f\x2f\x59\x8a\x12\x65\xf3\xea\x4a\x6b\xfb\x08\x1b\x8f\x51\x14\x2e\x1d\xcb\x98\xa3\xad\x1a\x99\xb4\x30\x84\x56\x3c\x95\xc4\x73\x24\x79\x5f\xab\x27\xf8\x21\x9a\x6e\x48\x29\x9b\xf4\xba\x7d\x29\xa7\x1f\x7a\x62\xda\x24\xdd\x07\x52\x9e\x81\xf0\xfb\x83\x82\x4d\x0a\xe2\x92\xe3\x20\xd7\xef\x81\x47\xf1\xf3\xb2\x81\x2c\xbc\xc8\x64\x66\x71\x2d\xb2\xcd\x73\x6a\x55\xdd\xd2\x60\x97\x23\x98\x23\xfa\x5f\x90\x9d\x78\xb2\x8e\x5f\x0d\x0a\x5c\xbf\xa4\xc0\xbc\x39\x2b\xb9\xc3\x47\x42\xd7\xea\x5a\x9c\xd7\x01\x1d\xc6\xf3\x51\x8e\x3f\x9f\x8d\xe9\xba\x21\xcd\x1a\x61\x8c\x9a\x9b\x93\x7e\x3a\x82\x31\xbe\xb0\x02\x17\x0a\xb2\xc1\x1e\x20\xca\xf0\x8f\x45\x91\x02\x8d\x1a\xa8\x86\xce\x09\xbb\x80\xaa\x26\x40\x46\x36\x4c\x4c\xdf\x8c\x77\x1c\x76\x7a\x1e\x0e\xf8\x92\x39\x01\xc4\x28\x2d\xbd\xf4\x13\xbc\x82\x34\x7e\x5e\xd4\xb5\xb0\xcf\x10\x12\xf8\xa5\x90\x87\x96\x96\xb2\x85\xad\x13\xa9\xb5\xc4\xd7\x3f\x26\x54\x20\x73\xde\x6f\x54\xdc\x42\x96\x19\xaa\x73\x85\x3e\xb4\xa8\x48\x5d\x28\xd6\x5a\x4d\x6c\xc4\xf6\x43\x0d\x66\x6e\x44\x6d\x74\xe4\xe1\x17\x4c\x6e\x00\x8e\xdf\xb0\xc1\x6c\x36\x25\xc2\xb6\xa5\x14\x37\xc2\xac\xd0\x10\x29\xff\x91\xda\x8f\x14\x50\xde\xe0\xbc\xa9\xea\x81\xf9\xc2\x90\xaf\xc3\x1b\xa0\xf0\x8d\x44\x4b\x55\x3e\xfa\xdc\xd0\xb9\x57\x55\x3a\xbc\x76\x16\x5e\x02\xe3\x9a\x9f\x9d\x20\xdd\x55\x18\x6e\xed\xd4\x4f\x52\x67\x12\x07\xe3\x22\x5d\x2d\x0f\x21\x01\x8a\x69\x7c\x4a\x37\xbd\x22\x0a\x12\xfa\xb8\xb3\xf4\x16\x6c\x34\xe6\x59\xf8\xff\xc8\xab\x19\x85\xf9\x77\x12\xaa\x73\x4b\xe2\x46\x5e\x8a\x84\xed\xd2\x87\x04\x65\x64\x96\x53\xc8\x43\x77\x49\xf0\x8d\xf6\x12\x91\xd8\xa9\x48\xbb\x60\xed\x4e\xf4\xb9\x07\xe4\x57\xe6\x8d\x38\x09\x63\x8e\x83\x23\xf2\xc1\x01\xad\x72\xbc\x5f\xd8\x40\x83\x74\xdf\xee\x33\x3b\x55\xb5\xf8\xaa\x64\xa1\xc6\x71\x87\x45\x1a\x22\xad\x80\xe1\xd4\x67\xae\x99\x52\xd0\x1f\x45\x35\x7f\xf9\x07\x83\x90\xe9\xa6\x5d\x3c\xc2\x18\x83\xae\xce\x5a\x45\x80\x1a\xd9\x9c\x9a\x93\xc9\x1d\x6b\x84\x93\xb6\x00\xa9\x41\xdd\x95\xca\x4d\xa8\x59\xbf\xc8\x7f\x3f\x97\x3b\x93\xca\x8d\xb2\x7e\x57\x3e\x04\x7a\xd2\xb5\xb7\xb1\x11\x5f\xd1\xb5\xe3\x47\x56\x76\x9f\x7c\x33\xd7\x38\xfe\xdb\x00\xcb\xc3\xc3\x08\xd0\xca\x2a\xf8\xb7\x14\x32\x0c\xe4\xc3\xa4\xfa\x2a\xbc\xd0\x65\x41\x50\xcc\x7b\x46\x88\x93\x55\x40\x3c\xd6\x22\x36\xb7\x13\xc7\xf5\xb7\xeb\x17\x4e\x7f\x47\x4c\x4e\xe5\x8c\x84\x88\x44\x72\x4f\x5a\xf5\xe1\x83\x44\x5e\x25\x94\x64\x32\x11\x11\xa7\xb1\xa9\xc4\x0b\x09\x74\xc4\x51\x60\x04\x3f\x5e\xe0\x4c\xc3\x85\xa1\x9b\xb3\x0f\xf2\x26\x9d\x94\x54\x44\x4f\xa9\xe4\xc5\x56\x75\xf7\x8d\x54\x98\x77\xe4\x23\xd7\x4b\xdf\x04\x49\x0b\xca\xde\xb1\x93\xa9\xc3\x4e\xf4\xf8\xd9\x44\x21\xc6\x01\xda\xf5\xfc\x88\xe9\x3f\x92\x34\xcc\x92\xb6\xfc\x6b\x2b\x62\x25\xb0\xa2\x26\xc2\x0e\x8f\x22\xef\x22\x0b\x38\x21\x5f\x89\xf8\x46\xe0\xd7\x03\x6b\xac\x8c\x76\x84\xe1\xe1\x99\x3e\x9e\x94\xb9\x1b\x65\x01\x90\x2d\x47\xcb\x82\x87\xea\xd3\x0d\x85\xf7\x72\xb6\x1b\xd6\x48\x4b\x40\xe3\x5a\x62\xde\xa4\xf5\xca\x55\x70\x6d\xc5\x4d\xff\x16\x8e\xfc\x79\x6a\xcb\xd8\xf9\xf7\x69\x1e\x88\xb8\x4b\x04\xe2\xf4\x6c\x46\xae\xd1\x36\xca\x4d\x08\xd8\xad\x49\x2c\xd2\x2c\x72\xf8\x4e\x4b\x4a\x8d\x86\xe9\xb9\x1f\xf7\xb0\x84\x98\x8a\xa0\xaa\x97\x39\x94\xa4\xb8\xdc\x50\xa5\x5d\x82\x9f\xd9\xbc\xbd\xd9\xfb\x5f\x37\xc1\x2f\x7c\x7d\x8a\x89\x41\xfa\xa8\x73\xa8\xc4\x31\x77\x86\xdd\x01\x82\x96\x35\x4a\x1a\x97\xa7\x89\x55\x9e\xca\xa0\xb9\xd2\x4c\x43\x96\x9e\x20\xce\x1d\xe3\x44\xf7\xf0\x20\x18\xc7\x56\xb5\x8c\xd4\x2a\x31\x23\xbd\x51\xe0\xaf\x03\x01\x69\x8a\x24\x3b\x60\x8f\x91\xbb\xb6\xcb\x9c\x14\xd1\xe7\xbf\xb8\x3b\x57\x3b\x5f\x4a\x10\x66\xc5\xb6\xf2\x57\xee\x58\x25\x75\xba\x2d\x8b\xce\x3a\x77\xf1\x64\x11\x9b\x88\xba\x7c\x09\x53\x72\x62\xe9\xa8\xb6\xc3\xf2\x04\x42\x65\x3d\x26\xba\xd8\xc5\xed\x27\x97\xa0\x98\xab\x9b\xcc\xd0\x61\x0f\xe5\x01\xce\xf4\x0d\x73\x8e\x1b\x22\x95\x89\x71\x3f\xd8\x68\xa1\x0e\xa3\x72\x86\x48\xba\xdd\xcc\xc0\x18\xf5\xa4\x44\x9c\x14\x3c\x7f\xbc\x94\xb7\x0c\x6a\xc7\x10\xe9\x2d\x4d\xf0\xc6\x3c\x48\x26\xb2\x8a\x7e\x85\xc4\x74\x21\x03\x0b\xdb\x26\x71\xca\xa5\x43\xa3\xc4\x46\x0a\x19\xc2\x87\x47\xb7\xaf\x9f\xd8\x79\x13\xb8\x7f\x1d\xde\x73\x2e\xf9\xb7\x2d\x0c\x7b\xc0\xf6\x64\x2b\x0b\x8d\xe8\x61\xe3\x46\x06\x36\x9b\x7d\x70\x62\x76\xe4\xca\x6a\xfa\x37\xea\xea\x9c\x57\x96\xa2\x5a\x59\x72\x41\x43\xef\x32\x0d\x03\x49\x00\xcd\x42\x40\xdc\x28\xd1\x42\x57\x55\xd5\x3f\x29\x67\x40\x8e\x91\x39\x8f\x2f\x0b\x7d\xde\x2f\x67\x9c\x87\x3d\x8a\xf8\x3c\xa4\x96\x61\x1a\x9b\x7f\x28\x4c\x36\xc9\x43\x0a\x03\xcc\x9c\x1f\x6a\x89\xeb\x7d\xf7\x8a\x59\x6d\x44\xf1\x6d\x92\x8c\xd6\x6d\xde\xa4\xd1\xb0\x36\xad\x9f\x6a\xbe\x96\x38\xce\x8b\x59\x6a\x40\x2b\xbb\xc7\x56\x3f\x2a\x2b\x67\x04\x96\xe7\x47\x91\x67\x89\xc2\x0d\x4b\xa4\x4b\x9d\xec\x80\xbd\x46\xfd\xaa\x0e\x3d\xa0\x06\xac\x2d\x76\x8c\x8b\x2f\xe3\x4a\x8c\x1f\xc1\x17\x54\xb8\x5e\x1a\xc1\xa0\x17\x15\x41\x95\x06\xb7\x9c\x49\x95\xb7\xe0\x1e\x8d\x87\x34\x93\x00\x7c\x50\xf2\x6b\x69\x50\x2f\x51\xce\xf0\xe2\x44\x8d\x86\xe5\xde\x2a\x1b\xac\xe1\x3e\xa6\xcc\x88\xbd\x88\x1f\x4f\x22\xb4\x67\x2f\x64\x9b\x31\x2c\x4c\x6e\x64\xa1\x27\x30\x93\x03\x97\xa6\x67\xfd\x35\x58\x43\x86\xbf\xc9\x62\x7b\xd0\xba\x4e\x26\xc9\xcb\x4c\xd8\xc0\xd8\x4d\x8d\x1b\x2b\xc9\x3f\x5d\xcc\x2b\xb1\x44\x59\x4f\x97\x51\xd5\x09\xad\xb7\xce\x0b\x7b\x08\x0e\x1a\x9d\x10\x11\x43\xfa\x5a\xe9\xa9\x6f\x74\x60\x53\xb8\x68\x81\x5e\xda\xb9\x7b\x99\x35\x38\xb2\xc4\x4f\xc7\x62\x60\x13\x46\x66\xd9\x45\xd0\x67\x06\x77\xda\x58\xbe\x49\xc2\x6a\xba\x33\xc8\xfa\xb3\xe1\xfc\xa7\x6e\x64\xec\xd3\x43\xc7\xf5\x69\x46\xb8\xf3\x4b\xa4\x0e\x1b\x4d\xb6\xcf\x56\xd1\x0d\x6d\xe8\xb3\xb0\xcc\xc9\x68\xa3\x4a\xa6\x8b\x4f\xc7\x22\x96\xa8\x7b\x89\x59\xba\x05\x21\x7a\xb5\x23\xc3\xe8\xe3\xff\xe5\x5d\x73\x91\x7f\x46\xad\x66\x03\x76\xf0\x67\x4b\xd2\xba\x27\x07\x09\xb5\xec\x81\x25\x9a\x21\x5e\xc8\xbd\xe6\x2e\xc1\x69\x15\xeb\x9b\x22\x63\xf2\x5d\x0b\x68\xa8\x75\x3d\x64\x0a\xa4\xfe\x40\x64\xcd\xc1\xca\x69\xa4\x0e\x48\x88\x0c\x9b\x20\xf3\xba\xe8\x6e\x84\x9b\x02\x81\xf0\x87\x75\x16\x18\xd6\x8b\xdc\x1f\x75\xf5\x98\x75\x7e\x32\xc6\x50\x17\xf1\xd2\x21\x07\xde\xd0\xea\x2e\x4a\x33\x3d\xf5\xf6\x33\x09\x6a\x73\x93\x72\xcb\x67\x90\xd4\xfc\x88\xf0\xa0\xaf\x69\x18\x34\xfb\xd7\xd1\x52\x13\xb9\x26\xeb\x18\xb1\xf3\xf2\x38\x8c\xa7\x71\x2a\xf2\x08\xe3\x35\x97\xb8\x91\xdc\xc8\x75\xca\x09\xdc\xbe\x7b\x26\x5d\x48\xa3\xf5\xe4\x2f\x47\x89\x4b\xe1\x78\x21\x96\x7e\x13\x74\xbe\x0a\x1b\x84\xa9\x43\x11\x57\x57\x8d\xe2\x08\x8c\xd2\x88\xa3\xa9\x85\x41\x11\x81\x79\x93\xa7\xcb\xd7\xac\x3b\xc7\xfa\x3d\x38\x0b\xcd\xdd\xa1\xde\x73\x01\x8a\x49\xae\x83\x32\xcf\x45\xa1\x9d\x8d\x64\xde\x8c\xa4\x4a\x2d\x7b\x88\xce\xa4\xd6\x30\x17\xf0\xd8\x84\xf3\x2f\xb2\x1f\xaf\xb1\xe9\x8b\x8d\x94\xf3\x7a\xba\xff\xcc\xf0\xbc\xc9\xc8\x50\xd9\xbf\x59\x13\xff\xa1\xf9\x09\xec\x06\x5f\x64\x8c\xda\x12\xc4\x3a\x1e\xa6\x34\x8a\x7e\x10\x16\xbd\x51\xa0\xcc\x32\x3c\xe4\x42\x40\x00\xd7\xbd\xa3\x71\xd2\x06\x95\x59\x8c\x51\xb8\x14\xf3\x74\x0f\x89\x74\x88\x02\x68\x8c\xc8\xb1\x57\x41\xec\x7d\x5b\x19\x18\x9b\xd0\x4b\x8f\x39\x1e\xd3\x5a\x7b\x26\xf4\xf6\xa4\x7f\xc8\x91\x9e\xb8\x6e\x99\x64\xc9\x48\xf1\xec\xe4\xf4\x78\x07\x24\x5f\x66\x06\x5f\xab\x20\x66\xb6\xa7\xff\x93\x31\xc5\xe4\x28\xb5\x62\xb3\xfc\xff\x5f\xaa\x03\x40\x74\xce\x25\x1e\xe0\x48\xe6\x5a\x2b\x90\xba\x15\x90\x4d\x4c\xd7\xe7\x89\xac\x39\x45\x5e\xe7\x0b\x2e\x02\xa4\x30\x65\x7d\x77\x8e\x43\x05\xd3\x15\x07\x01\x35\x52\x3b\x3e\x68\xfc\x3d\xfc\x48\x34\x22\x41\x13\x18\x61\xd9\x46\xd2\xaf\x5e\xe8\x32\x8a\x3c\xbb\xc8\xbe\x25\x6f\xbd\x4a\xde\x7a\x9d\x04\x2d\x2d\x1f\x69\xc2\xc8\x6a\x47\x82\xa5\x5e\xd8\x36\x72\xcc\x6d\x81\x7c\x0b\x70\x8f\x58\xbd\xc8\x54\x84\x5d\xc1\xe2\x09\x99\xa4\x81\xde\x66\xbf\x60\x27\x69\x27\x5e\x45\x65\x37\x4f\xe8\x41\x2f\x76\x87\x41\x48\x50\x4c\x44\x14\xb5\xa9\x93\xe6\x5a\xe7\x4c\x6a\xcd\x07\xc2\x41\x90\x01\x8d\xd2\x3d\x93\x05\x6b\x33\x0c\x42\x66\x71\x63\x9f\x76\x2d\x99\x4a\x95\x72\xbe\xc8\x25\x1c\x8f\x4d\x7e\x66\xd4\x1e\x1e\xcb\xe6\x91\x2d\x85\x72\xf4\x8c\x17\x8a\xd0\xcf\x46\xef\x5e\x08\x92\xec\x65\x30\x46\x20\x08\xca\xa0\x38\x68\xc4\xe7\x37\x79\x12\xe2\x8a\x6f\xbd\x04\x26\x4f\x5d\xdb\x83\x68\x90\x42\xbb\xb5\x52\x4d\xc6\xa6\xfd\x92\xa7\x9e\x4a\x82\x7a\xa1\xfc\xe1\x65\xac\x0b\x18\xa9\x65\x7e\x45\xe5\xe4\xf3\x73\x1f\x65\x74\xc4\x9f\xa4\x9d\x22\xba\x82\x60\xdc\x8e\x4a\x14\xaa\xb4\x3f\xcf\xdd\x5b\x2b\x4b\xf0\x13\x2f\x73\x94\xae\x9d\x9d\xe0\xc9\x60\x66\x33\xb2\x00\x64\xed\x95\xb0\x85\x3a\x2c\x3f\x74\xc0\x06\x34\x32\x3b\x79\x62\xc8\x77\x80\x11\x31\xc0\x5c\x28\xca\x5e\x13\x8b\x78\xaf\x2b\x19\x6d\xcd\xba\x27\x41\x9f\xaa\xef\xc6\xdf\x3d\xb3\xb2\x50\x8d\xb3\xb8\xad\x28\xd6\x28\x49\xcf\xaf\x4f\xc5\x36\xb6\x27\x53\x37\x8b\xa4\x6e\x8a\xbf\x85\x79\x2e\x2d\x59\x47\x1a\x61\xc5\x3d\xf2\x03\x6a\x8d\x22\x39\x9d\x63\x27\x69\xfd\x08\xa7\x75\xd1\x99\xff\x0b\xfa\x02\x8b\x3a\x7a\x0b\x83\x25\x54\xdc\x2c\xc8\xc3\x39\x48\xcf\x99\x35\x91\x08\x99\x51\x59\x30\xa7\x83\x26\x85\x4c\x11\xa7\x1b\xd1\xec\x4f\x0a\xe9\x01\xb7\x63\x81\x4f\xc8\x76\x39\x33\xfc\x21\x39\xc8\x24\x54\xcf\x83\x92\xa7\xc1\xa7\x6b\x09\xdc\xb4\x7c\xea\x40\x26\x14\xc3\x90\x30\x75\x29\x08\x0b\xab\x6b\x83\x81\x4e\xfb\xd6\x66\x76\xcb\x76\x28\x7c\x1c\xb3\xe3\x7c\x63\xc0\xb7\xf1\x21\xd9\xf6\x2f\xb4\x2c\x8a\x16\x60\x31\x66\xce\xc2\x16\xab\x23\x4b\x00\x7e\x99\xcb\xeb\x17\xc9\xf1\x1e\xb6\x28\xf1\xbe\x65\x6b\x91\xe4\x1e\xd8\xcc\x54\x5c\x03\x7c\x32\x09\xcd\xd0\xc1\x31\xe1\x4a\x42\x67\x6f\x90\x69\xa9\x12\x12\x9f\xdb\x64\xd8\x9b\xcb\xc0\x02\x99\xe8\x5f\xe2\x70\x09\x8c\x92\x80\xf4\xea\x38\xd8\x20\xda\x1b\x1a\xa0\x78\x25\x96\x2b\xd4\x90\x24\x42\xfb\xb2\xbb\xaa\x5a\x7e\x99\x80\x86\x68\xd3\xb8\x96\x94\x6b\x86\x25\xce\x3e\x60\xe4\x8d\xbc\x79\xd9\x09\xa9\x2d\x84\xb7\xe2\x9e\x25\x17\x68\xad\xa6\x38\x40\xcc\x99\x4c\x2d\xa1\x5c\x23\xf5\x76\xd2\xce\x29\xf8\x64\xbb\xe4\x2c\x9f\x44\x9e\x95\xd3\x81\xbb\x9b\xd7\xe9\x47\x45\x52\x9b\x8c\x3c\xfa\x1d\x6a\xcc\xd5\x78\x31\x62\x9e\x0d\x31\x83\x06\xe8\xfd\x2c\x94\x34\xd2\x96\xfa\x25\xbf\x81\x35\x9c\xdf\x29\x5f\xbb\xca\xe1\x32\x13\x4f\x54\xdc\xd9\xc4\xae\x96\x11\xce\xd0\x5b\x1a\x5a\xc5\x0d\x5c\xbd\xfd\xe9\x0b\xfd\xc1\x4e\x25\x19\x91\x88\x9a\xa1\xb5\x63\x42\xec\xc7\xc5\xe0\xe5\xc2\x61\x2d\xe3\x51\x2d\x5a\x03\x3a\x52\xda\xcb\x2a\x60\xc3\x77\x04\x64\xb7\xbf\x23\x2b\x3d\xaf\xb4\xbb\x37\xdb\x09\x66\x3d\x66\x1d\x29\x76\xd4\x1d\xaf\xff\x83\x96\x30\x08\x2c\x3f\xac\xf1\x02\x24\xca\x4c\x90\x14\x2f\x16\x01\xa5\x50\xd5\x88\x5b\x55\xcb\xd8\x5a\x3b\x22\x84\x6f\xdb\x3d\xbb\xb1\xec\x25\xf8\x41\xe9\xc8\x4e\xbc\x13\xa8\xec\x12\x2f\xee\xbe\x47\xf5\x0a\xb2\xcd\xf0\x5d\x48\x54\x31\x9c\x85\x63\x58\xb3\xc0\xb9\x7e\x6b\xa5\x11\xba\x07\x75\xce\x49\x7c\xb5\x72\xfb\x56\xe0\x4c\xca\x03\x1a\x08\x94\xff\x81\x15\xce\xc2\x01\xd4\x68\xb7\x4c\x09\x17\xc7\x1f\xcc\x06\xcd\x32\xe0\x02\x6c\xf3\xab\x90\x74\x6c\x3c\xab\x4b\xf1\x01\xd8\xe1\x37\xc9\xd8\x21\x4b\x4d\x05\xea\x7d\xea\xc3\x2f\xbc\x0d\xd9\x54\xe5\x15\x2e\x97\x6c\xdf\x1b\x2e\x4c\x7f\x34\xe8\xaf\x2c\x65\x2b\x24\xb2\x8c\x3e\xfb\xfc\x77\x89\x0a\x2f\xdc\x41\x44\xb9\x4a\xd3\x67\x05\xd6\x5f\xc6\x47\xc2\x66\x48\x07\xcf\x53\x4c\x6d\xbd\xf2\x73\x04\xb5\x06\x2b\x6f\x60\x11\x70\x18\x9c\xdc\x34\xe2\x8d\xba\x5b\xb6\x89\x3b\xfa\x26\x75\xd1\x3f\x2d\x6a\x42\x67\x0b\x6b\xd9\x4e\x32\x17\xb0\x92\x45\xc3\x8d\x05\xda\xa8\x06\x0a\x32\x87\x59\x80\x55\x39\xfd\x85\x1d\x92\xae\x86\xb9\x15\xc8\xe7\x8f\xa6\xc9\x7e\x44\x32\xc2\x26\x23\x53\xc4\x7a\x6b\x57\xc1\xcc\x87\xfb\x4e\xe3\x50\xab\xe2\xe7\x10\x8a\x25\x4f\xcc\x0e\xa4\x45\xd8\x55\x55\x14\x1f\x02\x20\x7e\xb0\x19\x62\xf1\x25\xeb\xf7\xda\xf2\xe3\xa9\x99\xe8\x01\xb6\x59\x7e\x6d\x4b\x62\x3a\x58\x5b\x59\x26\x99\xdd\x70\x99\x40\xdd\x5e\x22\xde\x4b\x63\x67\x93\xd8\x91\x77\xd3\xee\xfb\x4c\x22\xdc\x63\x58\xaf\x9f\xee\x85\xd0\x5d\x13\x75\x66\x3f\x33\x1f\x70\xdf\x98\x9c\x60\xf1\xae\xfc\x5c\xe7\x4a\xe1\x47\xa9\x1a\x2e\x28\x44\xfc\xda\x68\xcd\x3b\x38\xd9\xa9\xdb\xb4\x25\x59\xc6\x26\xed\x37\xc5\xd4\x65\x0d\x0b\x8f\x51\xe6\xf8\x3f\x2d\x71\x0d\x80\xc8\x9b\xb8\x0b\x39\xc6\x44\x9e\xd9\x9a\x0f\x92\xd1\x08\x9b\xf6\xf6\xcb\x0b\x65\x25\x2a\x7a\x2a\x11\xc0\x93\x59\xfc\x27\xe7\x1c\x3d\x82\x75\xae\xb8\x7f\xa8\x58\xfa\xce\x3c\xc4\xeb\x20\x07\x02\x33\xe2\x2b\x89\x57\xd0\xee\x32\x53\x90\x0c\xa1\x11\x73\x16\x0c\x3b\xca\x51\x8a\x95\xd5\x97\xd0\x52\x99\x12\xa8\x03\x50\x1b\xe1\x4c\xb5\x61\x45\x17\xe3\x8c\x8d\x34\x32\x4a\xf1\x82\xd8\xf3\xba\xce\xe8\xde\x30\x22\x0f\xa0\xa0\x16\x2b\xc4\x77\x0c\x17\xc8\xa6\x7b\x94\x7f\x99\x2d\x24\xac\xe4\x3b\x58\xd8\xf7\xbf\x22\xef\xfe\x89\x7e\x21\x20\x6c\x4d\x23\x2c\x2f\x6b\xdb\x5f\xe6\x48\xb8\xa7\x8c\xf0\x8d\x92\x8d\x67\x6a\x9d\xae\x56\xa7\xd5\x22\x65\x65\x32\x37\x0c\x9c\x23\x61\xdc\x14\xe2\xff\xd4\x0d\x61\xe1\x8d\x5a\xf5\x68\x9b\x42\x4c\x28\xe8\x20\x73\x62\xdd\x4d\xbb\xcd\xf0\xdf\xc2\x67\x6b\x64\x04\xc5\xa8\xc9\x9e\x0e\x59\xf3\xb7\xf8\x25\x6f\xad\x6d\x59\x78\x7e\x99\x97\x4c\xbd\x1e\x6f\xc2\x84\x1c\x08\xec\xe3\xcc\x88\x59\x17\xeb\xc5\xae\x7e\xdb\xee\xde\x2f\x50\xb3\x4a\x73\x64\xae\xa9\x0e\x91\x5a\x92\xfa\x6b\x59\x8e\xb1\x8c\x76\x3a\x7c\x2e\x98\x6a\x76\x61\x04\x65\x3a\xf4\x34\x92\x36\x0d\xd6\x09\xc7\x7e\x15\x0d\x42\xea\xc7\x5d\x9a\x27\x4e\x2f\xda\xc3\xc5\xbc\x59\xbe\x45\x1c\x01\x22\xe3\x7b\xb4\x79\x77\xa2\x2b\x0e\xd5\x80\xe2\x96\x96\xad\xe3\x5f\xe2\x29\x51\xd2\x9d\x59\xd7\xb1\xb6\x07\x3f\xbd\xf1\xcd\x83\xec\x59\xac\xb0\x2c\xdb\xc8\x6c\x81\xa4\x57\xcd\x72\x40\x07\xd8\x85\x3f\x7f\x31\x1d\xf0\x54\x0e\xbc\xcd\x79\xf9\x43\x15\xfa\xe3\x0d\x27\x3f\x21\x17\xcb\xe2\x65\xbf\x37\xb5\x48\x64\xcf\xb9\xc9\x9e\x60\xe3\xd2\xc5\xa3\x14\x58\xdd\x2b\x91\xe1\x59\x3d\x0c\xeb\xeb\x15\x8e\x80\xe4\xec\x41\x45\x4a\x88\xd7\xdd\xa4\x21\xe1\x21\xd9\x7a\x43\x69\x6b\xd0\x94\x04\x0b\x4d\xcd\x74\x0f\x50\x0d\xe0\x06\x8e\x4d\x3f\xf9\x0e\xbd\x29\xfb\x25\x54\xda\xfc\x34\xd3\xcb\xb3\x31\xf0\x4c\xef\x0c\xfc\xf0\x3b\x4d\xc7\x13\x6c\x2f\xc8\xf9\xe1\x50\x70\xfb\xa4\x26\x87\x8a\xe0\x26\x85\x08\xe1\x1c\x30\x0a\x73\x4e\xb9\xe6\x3d\x2c\xe7\x70\x49\xaa\x7b\x53\x63\x94\xb0\x66\x8e\x52\xba\xb3\x69\x9e\xe8\x9e\xba\xb5\x9b\xd4\xf6\xa5\xa5\x5d\x06\x3c\xc4\xae\x0b\x5a\x5a\xe5\x55\xc7\xba\xf6\xdc\x5c\x26\xa1\x4a\xba\xa2\x0d\x32\xc8\xa4\xf8\x57\xec\x65\xfe\x70\x14\x6e\xd0\xb7\xce\x57\x38\x3f\x43\x9b\x72\x44\xf0\x11\x4e\x4f\x1a\xa7\x46\x80\xa1\xdd\x0f\x87\xca\x94\x23\x3f\x09\xc8\x40\xe6\x66\x58\xd8\xec\xee\x40\x1a\x10\x05\x1d\x72\x44\x8b\xc2\x1c\x63\xe5\xb4\xe2\x25\xa9\x49\x28\xec\x87\x59\x10\xb4\x13\x06\xaa\xb5\xe5\x7b\x04\x45\x4b\x62\xec\xef\x64\xf0\x10\x4f\x51\x89\xa9\xad\x10\x57\x99\xcb\x2c\x85\xad\x0b\x42\x1f\xd0\xea\xf3\x34\x54\x9e\xb7\xf3\x82\xf8\x24\xeb\xbd\xf4\x7b\x8f\x97\x6d\x1a\xa4\x57\xbc\x4e\x7b\xd7\xd6\x96\xe7\x24\x92\x33\x73\x4a\x20\x18\x10\xd3\x47\x19\x01\xa3\x96\x74\x12\x27\xe2\xcb\x90\x9e\xca\x30\x82\x91\xe2\x92\x5d\x1c\x47\x4e\x18\x1a\x39\x7a\x43\x5a\xde\x7b\xce\xb9\x8a\x3a\x73\xb1\x55\xc3\xd0\xea\x51\xf9\x92\x24\x9d\x92\x91\x25\xed\x78\xf0\x6e\xb8\xda\x30\xb6\x16\xf4\x91\x68\x2a\x3c\x08\x84\x6c\xc7\xa4\x9f\xdd\xcb\xc1\x3f\x7e\x9c\x7e\x02\xf1\x52\x9b\x2f\x9e\x49\x83\x6b\x0b\xf6\xd9\x2d\x02\xff\x3c\x46\x54\xf9\xa5\x0d\xe5\xe1\x14\xa1\xd0\xa1\x8b\xb0\x92\x57\x35\x02\x0a\x86\x9d\x9d\x72\x22\xc4\x6e\x6a\x15\x45\xb4\xa2\x75\x51\x34\x6b\x09\x5d\xc0\xa5\xc2\x42\xd7\x10\x59\x6e\x2d\x82\xf4\x26\xb6\x80\x13\x71\x77\x51\x81\x96\x59\x90\x79\xe6\x1b\x87\x43\x05\x9d\x56\x6f\x23\x03\xaf\x60\x25\xfa\x8a\x4f\x4c\x74\x7c\x4c\x8b\x54\x0b\x1f\xa4\x2d\x7a\x68\x8b\x34\x91\x4c\x87\x15\x73\xd5\xf5\xae\x3e\x96\xc7\x2c\xe5\x2e\x7e\x0e\x94\x1a\xf5\xdb\x4c\x4d\x61\xdb\x21\x6c\x26\x7b\xf2\xa9\x50\x88\xce\x0c\x48\x2d\x53\x75\xfe\x9b\xa6\x26\xe8\xb8\x8b\x4a\xe3\x44\xf7\xc3\xe3\xd3\x5a\xf8\x8d\x48\xd9\x5a\x5d\xa1\x77\x03\x26\x8d\x24\x2e\x71\x65\x37\xb2\x66\xb0\x66\xf1\x38\xe2\x85\x03\x67\x93\x9e\xa0\x45\x8c\xcb\x2c\x5c\xcc\xca\xf4\x2c\x63\xd5\x6b\xbc\x83\x5d\x5e\x05\xf1\xf8\x67\x53\xd2\x0a\xe6\x6f\xb5\x1d\xe7\x99\x83\x36\x94\x25\x61\x93\x86\xd6\x09\x3a\x1c\xec\x81\x74\x5a\x57\x5c\x73\x19\xa6\xb4\x62\x6a\x7f\x1c\x24\xfa\xae\xad\xfc\x88\x1d\x15\x5f\x5c\x2c\xc0\xd8\x71\x4b\x49\x0a\xc5\xa1\xbf\xb3\xcc\xa4\xb2\x89\xbf\xd3\xe9\x7e\xd7\x89\xc0\x73\x2b\xd8\x2d\x2c\x6b\x2d\xf7\x03\x63\x34\xcb\x6c\xee\x20\x6c\x14\x04\x85\x8b\x4f\x8f\xdc\xf6\x7e\x29\x1a\xaf\xd3\x0d\x43\x03\x16\xd6\x25\x59\x16\xae\xae\x28\x2f\xa2\xd4\xbf\xb2\xa4\x41\x95\x65\xc5\x10\x4c\xf0\x88\xec\xed\x5c\x6b\xcd\xd3\x3b\xb5\xe2\x21\x54\xe4\x81\xb0\xec\xf7\xf4\x91\x08\x6f\xba\x76\xa5\xba\xb6\xb2\x8d\x52\x02\xd2\x5d\x5c\x19\xa6\xa8\x9e\x51\x3b\xf5\x59\xe2\x7a\x46\xba\x7a\xdf\xf9\xa2\x10\x2a\xe4\xe7\xbb\x3a\xd0\x14\xd1\xb2\x72\x7c\x44\x1d\xc9\xc3\xc0\x59\xc7\xb8\x1f\x4a\x85\x52\x45\xb5\xa1\x29\xe5\x9b\x8c\x51\x29\xaa\x15\x76\xf2\x97\x26\x18\x02\x22\x9c\xc5\x32\xed\x94\xd3\xb9\x0e\xd2\x32\xd4\x19\x96\x7e\xe5\xc8\xa2\x70\x1d\x3a\x11\xa5\x12\xba\x2b\xdb\xa2\xf6\xc8\x07\x48\x85\xfb\x3a\x01\x1e\xe3\x48\x26\xe9\xb4\x2e\xc8\xd5\x59\x74\x2c\x21\x8e\x52\xcb\xef\x4a\x6a\x58\xcc\xa5\x00\x3e\x6f\x17\xf6\x0f\xc5\x0f\xc8\x7c\x8c\xf5\x52\x4d\x95\x0e\xe4\xac\x5b\x90\x83\x30\x80\x3e\xa5\x04\x67\x69\x58\x5e\x44\x09\xac\x17\x28\xde\x47\x54\xaf\x49\x7f\xe4\x21\x21\xe9\x23\x08\x8b\x68\x22\xc1\x5e\x44\x8e\x47\x26\xbd\x15\x1f\x64\x25\x4c\xb7\x0e\xce\x47\xc7\xad\x10\xe0\xcd\x34\x49\xb5\x8c\xeb\x17\xff\x20\xc6\x48\x0d\x63\x15\x56\x17\xc0\xa4\xe0\x48\xb6\x7b\x7b\x0c\x58\xc4\x8d\xf9\xd8\x90\x39\x7f\x83\x7d\xe0\x49\xee\x87\xae\x76\xab\xaf\x09\x70\x8f\x16\xcc\xba\x15\x85\xf9\x50\x14\xe3\x22\xda\x5e\x89\x75\xe6\x78\xdf\x3a\xba\x54\x17\x84\xf5\x85\xed\x51\x5c\x39\xeb\x69\xa4\x47\x04\x50\xa3\xb5\xa6\x41\xf0\x24\x1e\xa9\x49\x0f\x08\xe2\xa5\x44\x33\x29\xb4\x5e\x80\xd0\x99\x23\x2f\x9e\x6c\x09\xae\x08\x00\x12\xa6\xda\x75\xa4\xbe\x23\xc3\x77\x46\x7b\x45\x94\x6e\x80\xb4\x4c\x68\x50\x3e\x89\xe3\xfd\x7f\x80\xaf\xf6\xe0\x4c\x44\x76\x22\x8a\xba\x39\x86\x07\x21\xe9\x67\xe3\xdc\x57\xe0\x09\x1b\x69\xf6\xbd\xb4\x2d\x37\x75\x0f\xe7\x4b\xb8\xa7\xed\xbf\x2e\x0f\xcc\xd2\x12\x32\x7a\x40\x72\x91\xb3\x3d\xfa\xc3\x12\x45\x07\x46\x6d\x57\x7c\xb4\x52\xcc\xd2\xf8\xc9\x00\x87\x2a\x6f\xa7\x40\x4d\x0b\xe1\x61\xe3\xd4\x63\xb0\xf7\x08\xef\x61\x98\x38\x5e\x42\x67\xd6\x0c\x4a\x56\x10\xc9\x56\xf4\x6d\xd0\x67\x6e\x40\xac\x67\xd0\x5e\x89\x41\x79\xd8\x8f\xa9\xae\xe4\x1f\x69\x13\x33\xbc\x59\xb0\xc0\x93\xf0\x4a\x66\x75\x6f\xaf\x83\xc0\xb5\xfd\xa5\x02\xda\x5a\x11\xe5\x09\x2d\xbf\xec\xb6\xf5\x76\xe6\x28\x2d\x9c\xab\xd4\xee\x9f\x01\x3c\x0c\x8c\x08\x84\x0a\x74\x67\xeb\x8c\x70\x16\xf5\x5e\x9e\xea\xa5\xf4\x7f\x8c\x58\x65\xca\x3b\x49\x28\x1a\x97\x97\x0c\xf8\x66\xd7\x77\x1b\x85\x88\xae\x36\x8b\x36\x82\xd7\x9f\x50\xe6\xbc\x6d\x28\x10\xfc\xd2\x43\xba\xa2\x75\x4c\xd8\xcd\x48\x29\x11\x74\xdc\xd2\x0d\x1d\x83\x41\x6e\x13\x31\xe8\xc1\x82\x74\x5c\x15\x6f\xe5\x25\xf4\x4f\x33\xca\xfd\x6f\xe1\x90\x5f\xb5\x0e\x36\x0d\x03\x7b\x79\xe1\x3c\xce\x8c\x38\x30\xe5\x30\x0c\x83\x1e\x11\xa2\xdf\x42\xeb\x77\xfb\x92\xdb\x23\x20\x6e\xd0\x92\x5a\x42\x4e\x38\x8e\x40\x25\x8d\x92\x15\x35\x86\x7e\x69\x34\x9f\x95\xd1\xdb\xfa\xfe\x84\xe0\xfb\x8d\xb4\x87\xf6\x6e\x66\x16\x46\x6a\x20\x8b\x2b\x75\x30\xc9\x20\x15\x81\xb4\x4f\x34\x7b\x47\xaf\xeb\x45\x67\x42\xd8\x88\x0b\xfe\x81\x49\xbf\x17\x3d\xe1\x97\x03\x5c\x6f\xc0\x0f\xea\x98\x50\x17\x3e\xa9\x7e\xc7\x20\x90\x4e\x01\x99\x34\xca\xc2\xd3\x28\x92\xbf\x09\x25\x56\xa1\xb2\xb5\xff\x51\x8b\x2b\x1e\xdd\x1c\x01\x8c\x5e\xbb\x6e\x96\xe2\x7d\xb8\xf8\xa4\x6f\x19\xa5\x72\x0c\x8f\x77\xd9\x9a\x29\x44\x2b\xdb\x12\x75\xee\xf7\xa2\x44\x98\x07\xcd\x05\x50\xce\x0c\x61\x37\x94\x90\x8b\x42\x06\x1d\x1d\xfa\x74\x67\xe6\xa6\xfe\x6c\x68\x19\xa8\xd0\xd5\xf3\x4b\x46\x59\x84\xfa\x8f\xf6\x6a\x52\x0a\x47\x49\x6b\x8c\xa4\xcd\x02\xfe\x46\x8a\xca\x43\xd8\x27\x45\xea\xe8\x74\x2c\x63\x8b\xea\x71\xd1\x89\x7b\x72\x49\x8b\x09\x93\xad\xf9\x15\x42\x34\xc5\xce\xfa\x1c\x55\x53\x8b\x5a\x97\x42\x65\x16\x0c\xf8\x33\x89\xcf\x11\xd7\x0d\x79\xfe\xaa\xb0\x93\xb4\x87\x10\x3a\xca\xf4\xe1\x41\x8d\xc6\x72\x04\x5b\xd2\xa4\xdd\xaa\x0e\x2d\x71\x25\x5e\x82\x28\xcc\xe2\x4e\x8e\xeb\xed\x05\x73\xde\xa5\x6d\x89\x2f\x2a\xbf\x7a\x2c\x71\x2a\xef\xc6\xe3\xe0\xc7\xf2\xc0\xf3\xc3\x06\x88\x3e\x34\x3b\x32\xb0\xf2\xe1\xd3\xc1\xdb\x0d\x3c\x4c\x36\xd4\xd6\xd3\x03\x3b\x92\x69\x35\x35\x84\x24\x43\xed\x11\xc7\x01\xea\xae\x24\x03\x6f\x66\x47\xff\xd2\xb4\xfe\x04\x90\x38\x69\x9b\xfa\x2a\xa3\x70\x6a\xd2\x98\x40\x3d\x3c\x4f\xd1\xd7\x37\x41\x2b\x8b\x8b\x24\x2d\x18\x7d\x92\xb8\x7f\x13\xb7\x88\xdc\x4f\xb5\xb1\xf4\xcf\xc6\x4f\xfd\x2e\x51\xa2\xbc\xe2\x92\x87\xbb\x94\x58\x05\x2b\x33\x2d\xaa\x41\x5e\xe5\xe4\xb3\x43\x6b\x12\x7d\x33\x86\x96\xc7\x62\x9b\xd8\xc2\xda\x5f\x41\x1a\x4d\x6c\xe3\x87\x27\xce\x12\x5e\xb8\x6e\xe4\x88\x21\x91\xde\x81\x53\x62\x08\x77\x66\x6d\x73\xd0\xf9\x30\x3b\x61\x15\xd5\x05\xc0\xf5\x40\x98\xc0\x53\x9a\xf2\x6c\xd0\xb4\x5f\xc8\xf7\x2a\x8b\xd9\x05\xc4\xcb\xad\x4f\x32\x96\xe3\x34\x42\xea\xe5\xf4\x9c\x24\x5f\x89\x8c\xe8\xf6\xc0\x91\x8a\x3d\xa0\x61\x51\x20\xbe\x36\x29\x47\xfa\x42\xa3\x40\xf5\x78\x0a\x6b\x5e\x03\x26\x48\x46\xfb\xcd\xd3\x9e\x49\x83\x35\x5c\x10\x1a\x9c\xe0\x68\x1e\x03\x52\x20\x3b\x58\x4d\x24\x23\x64\x58\x6f\x06\xa3\x97\xd1\x1c\x45\x97\x61\xe9\x8f\xd2\xcf\xef\x95\xa8\x99\x7d\xba\xa4\xd6\xa4\x80\xb4\x87\x9e\x6c\x36\x06\x97\x66\x5a\x7a\x4d\x92\x2c\x50\x60\xe8\xaa\x2d\x82\xc6\x2f\x48\x8b\xa6\x33\x9e\xde\x97\xb9\x62\xdb\xc8\xd1\x75\x34\xc7\xd5\xdf\xc2\xb9\x64\x63\xda\xce\x86\x82\x0d\xf6\x0f\x1c\x60\x73\xec\x58\x94\xb3\x06\x44\x8c\x27\x35\x23\x44\x0d\xf4\x0a\x03\x02\xae\x41\x28\xe6\x86\x09\xe5\x9f\xef\xc8\x71\xfa\x65\xc4\x39\x64\xe9\x82\x63\xfa\x25\xe9\xc6\xe5\x28\x6d\x62\x6c\x27\xb2\xf9\x25\x52\xb2\x3c\x02\xe1\xf3\x52\xe7\xe4\x1b\xd7\x10\x69\x40\x49\x52\xf2\x24\x38\xd2\xf2\x83\x8e\xaa\x88\x97\xfe\x09\xcd\x7a\x3c\xb6\x0b\x17\x21\x0d\x1d\x5d\xdd\x47\x00\x23\x52\x6f\xa9\x3a\x87\xde\xa5\x16\x15\xde\xee\x9f\x7e\x49\xe9\x67\xb9\x93\xab\xf9\x98\x4b\xee\x5d\x81\xc5\xf0\xdd\xc8\x4f\xf6\xc1\xbf\x15\x3d\x76\xd0\x8a\x72\x0e\x1c\x9b\xd4\x24\x78\x6a\x93\xc0\x59\x2a\x4f\xd7\xc2\x63\x2f\xa3\x42\xbf\x54\x94\x24\x9f\xe8\x95\xe7\x0e\xec\x6a\xb7\x98\x9f\xa8\xf2\x73\x7f\xcc\x28\xe0\xf4\xa2\xac\x69\x1a\x1f\xa7\x73\xf6\xa3\xf9\x90\xa3\x19\x10\xee\xe0\x71\xb6\x3c\xc3\x75\xa8\xb0\x00\x3f\xa1\x86\x56\xc3\x78\xcc\x28\x97\x7d\xfa\x6c\xf4\x41\x47\x65\x39\x52\x0a\xf6\x5e\xc7\x6c\xd4\x4c\xa9\x5e\xff\xd9\x14\x4e\x5a\x6c\x2b\x9c\xd5\x8f\xc6\x49\xd6\x86\x86\xc4\x15\x19\xba\x74\xc0\xef\x1c\x24\xc6\x7f\xf5\xa8\x28\x59\x6b\x8a\x20\x88\xad\xb8\xdc\xb0\xe9\x6f\x04\x89\xad\x75\xa7\xf8\x61\xa6\xf3\x37\x88\xb3\x5b\x71\xce\x51\x5c\xd5\xf7\xa5\x67\xd9\x52\x26\xd2\xae\xd4\xcb\xd5\x95\x09\xf4\x23\x63\x54\x6e\x37\x16\x9a\x7a\xba\xb2\xa1\xc5\x08\xaf\x67\xba\xb4\x9c\x5f\x5a\x9b\x64\x6e\xde\xf3\x42\x57\x23\xdd\xb5\x86\xa6\xe5\x99\x4e\x0c\xe9\xcc\x81\x5d\x94\xc2\x33\xa0\x62\x90\x06\x29\x7a\x3f\x8f\x39\xad\x7e\xf2\x13\x69\xb9\xac\x7b\xcd\xfd\x5b\xf5\xe8\x0d\x48\xc1\x11\x02\x2c\x5a\x15\x57\x81\x03\x7d\xe2\x70\x29\x38\x00\x5d\xdd\x12\x32\x3d\x29\x16\xf7\x7b\x8d\x93\x1e\x67\x45\x61\x82\x9e\xaf\x02\x4f\x78\xea\x39\x0d\x3f\xd9\x69\x95\xd9\x45\xfd\x45\x01\xc4\x45\x60\x7f\x4a\x0a\x56\xee\xbc\x48\x00\x4a\x7e\x89\x85\x04\x9a\x38\x05\xc6\xe1\xe3\x7e\xc2\x91\x51\x89\x8a\x2c\xf6\xbf\x4d\x98\xfb\x76\x73\xcf\xee\x77\x2a\x2c\xe2\xd7\x4e\x5a\xcb\xa2\x60\x8e\x2f\xc0\x31\x38\x4d\xc5\x11\xc0\x91\x34\xb7\x40\xf0\x87\x0a\x91\x3e\x58\xc1\xe0\xb9\x43\xc2\xd7\x38\xbc\x48\xbb\x33\xc8\xa0\x14\x72\x8d\x2d\xc5\x36\x3f\x51\x85\x9b\x40\xf0\x54\xb4\x93\x30\x6a\xcc\x32\x44\x9f\xb3\x1a\x8f\x29\x28\x50\x34\x69\x12\xad\x03\xc7\x20\x02\x2a\x46\x54\xde\xcc\x2c\xca\x24\x63\x6c\xe2\x65\x48\xbd\xef\xde\x04\x1f\x1e\xd8\xa7\x31\xc8\x0c\xca\x21\x65\x52\x78\xcd\x3a\x40\xca\x82\xe2\x59\xe7\xe9\x69\xf3\xc4\x20\xb3\xb9\xb6\x59\x27\x27\x66\x99\x89\x55\x04\xd4\xa6\x16\x4a\x52\x53\xca\x40\x97\x5a\xb9\x39\x1f\xca\x7a\xb6\x37\xa4\x92\x17\xb6\xab\x1d\x9e\xfd\x25\xfa\x0d\x54\x25\x1c\x56\xce\x44\x55\x57\x01\x50\xe7\x55\x0e\x7b\xeb\x10\x86\xf2\x52\x28\xe4\x76\x88\x29\xcc\x7b\x68\xaf\x3e\x80\x1f\xef\x9b\x30\x4a\x07\x01\xd4\x1e\xec\xc8\xd2\xab\xdd\x12\xed\x50\xdc\x51\xe4\xd6\x9b\x49\x43\x81\x78\xe9\xe4\x56\xeb\x56\x29\x37\x50\xe4\xac\x63\xe9\xe0\x06\x08\xb2\x5f\x1a\xc8\xcd\xef\xc5\x40\xac\xf1\x21\x2c\xf2\xa4\x80\x0c\x59\xa9\x26\x82\x22\x9c\x3e\xc0\xc5\xed\x1b\x2b\x48\x06\xbb\x98\x5a\xfc\xe6\x5b\x60\xac\xd2\x70\xb8\xee\x52\xb1\xc2\xc8\xb2\x75\xd2\x2a\x97\xfc\x1d\xeb\x9a\x2b\xe2\x90\x25\xe9\x7c\xce\x9a\xd9\xe4\xb1\xf5\x6c\xc3\xa2\x53\xde\x17\xa5\x09\xb1\x5c\xd7\x8e\x4e\x62\xe6\xc7\x22\x01\xa6\x45\x4d\xb8\xab\x10\x36\xd6\x21\xc6\xe9\x86\xa0\x7d\xec\xd8\x7f\x66\x54\x8d\x08\xb4\x9f\x9c\x14\x7e\x54\x2c\x60\xde\xff\xa6\xe1\x32\xca\xde\xff\x9d\x60\x49\x2d\xe1\x1f\xe4\x74\x5e\xce\x1e\x4f\x1e\xca\xfb\x39\xa0\x93\xa9\x92\xf2\xe3\x14\xae\xbd\x06\xe0\x72\x60\x67\xed\x76\x4e\xec\xe7\x04\x71\x88\x71\xd7\xdd\x2f\xe1\x38\x82\xb5\x03\x6f\x84\xab\x27\x8d\x46\xbf\xbb\xb5\x27\xb9\xe6\x1c\x3e\xa8\x57\xdc\xb3\x64\x6f\xee\x52\x8a\x89\xd7\x21\xf1\x3f\x3a\x1f\x40\x46\x62\x93\x25\xec\x81\x0f\x5a\x58\x72\x70\x87\x49\xf3\x70\x39\xb7\xa4\xcf\xca\x12\xb4\x97\xa7\x77\x71\x68\xb4\x3f\xa6\x39\xc2\x57\x89\xf7\x8d\x1e\x80\xd7\xb0\xef\xa0\x32\xbb\xf1\xa4\x2b\xad\xd6\x2c\x24\xb5\x96\x65\x01\xfd\x4b\x71\x41\xfd\xa3\x41\x1a\x81\x18\x1d\xbc\x27\xe9\x0a\x72\x80\x9f\xb9\xfd\xfe\x49\x14\x08\xdb\x0e\x81\xfa\x36\x65\x9e\x77\x2c\x37\x6c\x12\x1b\x0d\x34\x07\xc7\x5a\xa4\x8e\x9b\x5f\x13\xf1\xbf\x6b\xd4\x2b\x30\xc9\xae\x51\xb1\xa4\xe4\xb7\x5e\xc9\x6c\x5c\xeb\xc3\x88\xfb\xc3\xba\xcd\xe4\x48\xb6\xe4\x08\xd2\x04\x2f\x2f\x31\xbd\xc2\xe2\xa9\x4a\x26\x67\xc7\xed\x26\xbd\x08\xe9\x3a\x13\xff\xe2\xd7\x5b\x69\x5b\xfe\x47\x2b\xbd\x6c\xd2\x9c\xc2\x7a\xb0\xb9\x51\x3a\x2b\x71\x6c\xdd\xe7\x3b\x2c\xf3\x45\x17\x60\xd0\x62\x44\x22\xe7\xe8\x82\xf2\x2b\x49\x60\x3f\x2f\x8d\xbb\xcf\xbe\x32\x99\x81\x4e\x39\x66\xf6\x3d\x2a\x29\xd8\x28\x94\xd7\xe5\x88\x13\xdb\xa1\x23\x08\x50\xa2\x7d\xc8\x88\xc3\x1e\x88\xdf\x4a\x55\x01\x93\x5e\xcd\xf6\xdb\xd3\x6e\x68\xb4\xad\x4c\xd9\x70\xef\x87\xa0\xd9\xba\x3b\x1b\x51\x24\xde\x6b\x55\xea\xca\x7e\x34\x52\xb1\x0c\xd2\x33\x39\x23\xdc\x6b\xb6\x46\x4e\x3e\x61\x70\x49\x66\x6c\x31\x93\xad\xb3\x05\xad\xb2\xc4\x32\xfe\x92\x58\x3d\xf9\x76\xa8\x53\x11\x44\xfa\x60\x15\x45\xe7\x57\x93\x30\xf0\x9d\x28\x3a\xff\x03\xd8\xae\x5b\xf9\x29\x41\xcd\xb1\x67\xc5\xd2\xac\xae\x1f\x58\x26\xfe\x10\x7f\xf1\x61\x9a\xcb\xd0\xc0\xe8\x03\x09\xd0\x7d\x3f\xd2\xb1\x79\x90\x8f\xbb\x7e\xb0\xc9\x0a\xf4\x02\x70\xae\x67\xc9\x00\xac\x02\x08\x32\xa2\x6b\x39\xf6\x3f\x4c\xde\x6b\x52\x91\x48\x9e\x0a\x6a\x60\xc6\x9e\x44\x97\xc2\x49\x28\x45\xbe\x9a\xcb\x40\x60\xd1\xc7\xcb\x28\xf0\x78\x89\x18\x88\x21\x9d\x85\x21\xd6\xa2\x04\x2a\x1d\xc4\x05\x88\x1d\x18\x8f\xcc\x2c\x8a\x5a\x0a\x22\xef\xb8\x1c\x9b\x4b\x2d\x17\x00\x54\xc2\xc1\xb4\xfb\xe5\x79\xe1\x43\x49\xcd\xbc\x31\xd3\xdf\x44\x45\xf7\x48\x6b\x5b\xde\x6e\xd0\x53\x8a\x1c\x23\x2f\x42\x8e\xcd\xa5\x76\x06\x9f\xa4\xaa\x63\x7c\x4a\xb7\xa3\x0f\x6d\x7a\x68\xb5\x4c\x7b\xb5\x16\xd0\xba\xe7\x7c\x49\x3e\xc8\xe8\xa0\x65\x5b\x92\x5c\xaf\xe2\x99\xb0\x99\xec\x11\xef\x77\x49\x54\xec\xc4\xfa\x04\xe5\x2d\xfc\x82\x7d\xdc\x05\x49\x02\x29\x8f\x6f\xc9\x3f\x60\x4e\x09\x01\x09\xf1\x48\x99\x41\x35\x2d\xac\x64\x0b\x1d\x6f\xcb\xe6\x4f\x38\x2a\x23\xc1\xda\xc7\x50\x80\x05\x97\xc2\x44\xb9\x23\x37\x60\x9e\x0c\x3c\x10\x63\xf7\x14\x57\xe6\x9a\x7e\xc8\x52\xac\xb8\x94\xcc\x6b\x31\xd6\x53\x96\x46\x74\x2c\xad\xc8\x30\x1b\xd6\x97\x1c\xe9\x98\x20\x6b\xd9\x6d\x10\xd9\xd0\x8d\xdb\xe2\xfa\x45\xa8\x1e\x12\x0b\xcb\xef\xd4\xac\x02\x58\x5c\x77\x8d\x1f\x24\xe0\x5e\xa7\xa6\x21\x23\x18\x38\x9a\x0e\x92\x1b\xfe\x2d\xc7\x1b\x2c\x93\x14\xf7\xe2\x25\x7b\x39\xe8\x80\x4e\x0b\xe0\xe4\x12\xdc\xd2\xf1\xb5\x46\x92\x05\x2e\x52\x19\xb4\x0e\x46\xbf\x10\x7a\x16\xae\xc1\xac\xdc\xd6\xb5\x2a\xe1\xd9\x82\x95\xc2\x58\x82\x4d\x18\xcb\x63\x22\x01\x69\x18\x1a\xb2\xca\x33\x4c\x32\x2e\x3e\x5c\x68\xa6\x61\x5d\x3c\x43\x33\x70\x32\xe6\x10\x95\xc1\x6f\xee\x90\x92\xe8\x75\xbc\xa1\xd6\xc4\x8a\x32\xd1\xc9\x07\x64\xf8\x93\xa2\xc6\x34\x8a\x9e\x19\x05\x7b\x42\x2f\x36\xd7\xf9\x7c\x59\xf4\x8e\x0e\xb7\xb3\x7e\x7c\x37\x2c\xd6\x31\x4a\x48\xfd\x41\xc5\xc9\x20\x88\xed\xa4\x62\x84\x1e\x13\xe7\xae\x17\x6f\xc5\x3f\x09\x47\x5b\xe8\xef\x75\xef\x60\x96\x8e\x53\x11\xba\x3e\x2d\x9a\xd4\x3c\xf8\x51\x41\xa6\x57\x24\x65\x49\xa8\x4c\xde\x95\xd0\x4b\xa6\x39\x69\x1d\xd9\x7c\x51\x22\xae\xa1\x45\x84\x6b\xa4\x66\xa1\xa2\x5c\xd3\x78\x14\x8d\xb3\x53\xe1\x48\x42\x94\xcf\xde\xec\x25\xb6\xc4\x48\xf4\x9a\x20\x71\x8d\xb1\x5f\x00\xb5\x6c\x6a\xa6\xdf\x77\xdb\x38\xb1\x6e\xe3\x07\x24\x1c\x2b\xa7\x86\xfc\xb9\xe1\x8d\x54\x40\x90\x35\xfd\xe0\x4c\x15\x6f\xc4\x30\x87\xf8\x1f\x0a\x12\x38\x52\x91\x4f\x5e\x67\x85\x82\x17\x37\xea\x09\x84\x8e\x50\xd0\x10\x84\x1b\xa7\x7b\x62\x03\xe5\x4e\x07\x65\x1b\x39\x42\x9d\xe2\xb9\x86\x67\x60\x42\x13\x4d\xd3\x8b\x10\x5e\x2d\xe9\xaf\x62\x0d\x58\x1e\x8d\xcc\x3e\x9a\xff\xc1\x16\x96\x9b\xf0\x77\xef\x46\xee\xed\x39\x8e\x88\xd9\x19\xdc\x15\x99\x83\xa4\xbc\xb7\xc2\x2e\x50\xab\x9a\x50\x0c\x3a\xf5\x77\x53\xc0\xf0\x8c\x2a\xb7\x5d\x5d\x7c\x94\x46\x58\x00\x68\x7f\x86\xf0\xe4\xac\xb5\xe9\xad\x60\xf6\x6d\x26\x59\xcf\x3c\x0b\x0e\x48\xc0\x70\xc7\x40\xab\x20\xda\x82\x94\xfd\xd2\x5c\x9e\x75\x7a\x52\x33\x4b\x7a\xd3\x62\x29\x34\xea\x18\xf0\x94\x0c\x03\x6f\xfc\x3b\x51\xe1\xcc\x13\x6b\xa2\xf0\x47\x12\xe0\xc4\x8f\x18\xb4\x14\xf1\x08\x2c\x8c\x24\xf0\x94\xa4\x4a\xb7\x0d\x8d\xe2\x7f\x78\x0f\x36\x7f\x03\x3e\x8b\x80\x48\xa3\x82\x7a\x12\xfe\x8d\x25\x52\x48\x6c\x5b\x8c\x36\x70\x3c\x32\x67\xa8\x81\x6b\x6a\x05\x10\x71\x27\x8a\x1f\xd6\x92\xa8\x4b\x2e\x95\x7c\xa3\xb9\xea\x64\xab\x79\x44\xed\x34\xca\x54\x2d\xe2\xa8\x28\x74\x2f\x93\xe7\xff\xa6\x1d\x13\xb5\x44\x12\x47\xa4\x5e\xa8\x7c\x80\xe7\x34\x1a\x4d\xa1\x75\x63\x8f\xc9\x2c\x6e\x61\xe5\xc3\x16\xfa\x3d\x9c\xa9\xc0\xa1\xbc\xa1\x92\xd4\x96\x7f\xff\xaf\xff\xfa\x3f\xff\xf5\x7f\x03\x00\x00\xff\xff\x1c\xf7\x13\x7e\x95\xd2\x00\x00") - -func dataFemalenamesJsonBytes() ([]byte, error) { - return bindataRead( - _dataFemalenamesJson, - "data/FemaleNames.json", - ) -} - -func dataFemalenamesJson() (*asset, error) { - bytes, err := dataFemalenamesJsonBytes() - if err != nil { - return nil, err - } - - info := bindataFileInfo{name: "data/FemaleNames.json", size: 53909, mode: os.FileMode(420), modTime: time.Unix(1452717629, 0)} - a := &asset{bytes: bytes, info: info} - return a, nil -} - -var _dataKeypadJson = []byte("\x1f\x8b\x08\x00\x00\x09\x6e\x88\x00\xff\x9c\x94\xcd\x6a\x85\x30\x14\x84\xf7\x3e\x45\xc8\x52\xeb\xff\x5f\xec\x0b\xf4\x21\x4a\x17\xdd\x75\x21\xa5\x14\xba\x2a\xbe\xfb\xcd\xd5\x8b\x99\x91\xe3\x3d\xea\x46\x26\x42\xe6\x3b\x33\x1c\xf2\x1f\x19\x63\xdf\x7e\x3f\x7f\xbe\xec\xab\xb9\x1f\xfc\xb1\xf0\xf2\x7d\x96\xc6\x7c\xff\x8d\xe3\xcb\x43\xdb\xd2\xae\xb2\x0a\xb2\x0e\x32\x5b\x25\xde\xdb\xea\x59\x7e\x2c\x7f\xbc\xa7\x0c\x23\x70\x13\x10\xad\x38\x43\xb1\x0b\x26\x58\x05\x30\x4c\x23\xfb\x77\x4a\xc6\x0d\x95\x48\x35\x92\x2a\xc5\xfe\x59\x59\x32\x97\x58\xcd\x91\x0a\xfb\x70\xdd\x69\x6d\x96\xfb\xb9\x5a\xcc\x05\xb5\xc9\xf6\x83\x56\x26\x41\x09\xd4\x21\xa8\xd5\xdc\x13\xb5\x4b\xa6\x12\xaa\x3f\xd0\x1f\x59\xe5\x5a\x97\xcd\x7e\x81\x0e\x73\xf5\xe2\xd8\xe8\x1f\x6b\x65\x12\x95\x48\x03\x92\x60\x4e\xd9\x3d\xd5\xca\x64\x28\x91\x62\x24\xe5\xe7\xd6\x5a\xe4\x62\x56\xc7\xac\x04\x59\x83\x12\xe5\xc8\x04\x9b\x8c\x04\x4b\x11\x16\x5f\xb6\x25\x04\xa5\x24\x5a\x86\xb4\x42\x79\x6b\xaf\xcc\x40\xb4\xfc\xe4\xd6\x53\x08\x79\x2d\x1d\xbe\x07\x0b\xcc\x7f\xa7\x68\x8a\x6e\x01\x00\x00\xff\xff\x2d\x9a\xa0\x40\x67\x06\x00\x00") - -func dataKeypadJsonBytes() ([]byte, error) { - return bindataRead( - _dataKeypadJson, - "data/Keypad.json", - ) -} - -func dataKeypadJson() (*asset, error) { - bytes, err := dataKeypadJsonBytes() - if err != nil { - return nil, err - } - - info := bindataFileInfo{name: "data/Keypad.json", size: 1639, mode: os.FileMode(420), modTime: time.Unix(1452717629, 0)} - a := &asset{bytes: bytes, info: info} - return a, nil -} - -var _dataL33tJson = []byte("\x1f\x8b\x08\x00\x00\x09\x6e\x88\x00\xff\xaa\xe6\x52\x50\x50\x4a\x2f\x4a\x2c\xc8\x50\xb2\x52\x00\x71\x80\xdc\x44\x20\x33\x1a\xcc\x04\x72\x4c\x94\x74\x60\x4c\x07\x25\x30\x2b\x16\x22\xa0\x94\x84\xac\xcc\x02\x55\x2e\x19\x59\x4e\x03\x61\x44\x35\x82\x19\x8d\x60\xda\xa0\x6a\x4e\x45\xd6\x6c\x8c\x2a\x97\x8e\x2c\x67\x86\x30\xc2\x12\x55\x59\x26\xb2\x32\x43\x84\x32\x45\x04\xb3\x06\x55\x47\x0e\x0e\x1d\x35\x08\xa6\x39\xaa\x8e\x7c\x64\x1d\x06\xa8\x72\xc5\xc8\x72\x2a\x08\x23\x4c\x51\x95\x95\x20\x2b\xd3\xc6\x69\x53\x05\xb2\x32\x55\x54\xb9\x2a\x64\x39\x23\xa8\x1c\x90\xac\xe5\xaa\xe5\x02\x04\x00\x00\xff\xff\xd5\xd6\x71\x46\xdd\x01\x00\x00") - -func dataL33tJsonBytes() ([]byte, error) { - return bindataRead( - _dataL33tJson, - "data/L33t.json", - ) -} - -func dataL33tJson() (*asset, error) { - bytes, err := dataL33tJsonBytes() - if err != nil { - return nil, err - } - - info := bindataFileInfo{name: "data/L33t.json", size: 477, mode: os.FileMode(420), modTime: time.Unix(1452717629, 0)} - a := &asset{bytes: bytes, info: info} - return a, nil -} - -var _dataMackeypadJson = []byte("\x1f\x8b\x08\x00\x00\x09\x6e\x88\x00\xff\x9c\xd4\xcb\x4a\xc6\x30\x10\x05\xe0\x7d\x9f\x22\x64\xd9\xda\xfb\x35\x42\xd7\x3e\x84\xb8\x70\xe7\xa2\x88\x08\xae\xa4\xef\x6e\x6c\xa5\x39\xa7\x4c\x99\xd4\xcd\xcf\xf4\x87\xcc\x97\x39\x24\xf9\x4e\x8c\xb1\x4f\x9f\xaf\x1f\x6f\xf6\xd1\xfc\x7e\xf8\xcf\xca\x97\xcf\x5b\x69\xcc\xfb\xd7\xb2\x3c\xfc\xd5\xb6\xb6\x47\xd9\x84\xb2\x0d\x65\x71\x94\xb8\xee\x5c\x6f\xe5\xcb\xfe\x8f\xef\x29\x63\x04\x77\x81\xe8\xc5\x3d\x54\x97\x30\x61\x0d\x60\x38\x8d\xdc\x7f\x50\x66\x3c\xa9\x24\xb5\x28\x35\x5a\xfb\x4c\xcd\x8d\x59\xa2\xba\x98\x04\xc7\xb0\x7c\xd2\xc2\xac\xaf\xc7\xea\x71\x2c\x48\x4d\x6e\xef\xb4\x2c\x09\x25\x68\x40\xa8\xd7\xba\xe7\x5a\x94\x8c\x92\x34\x46\xc4\x47\xad\x66\x2d\xca\xee\x3a\xbf\x09\xc7\x1a\xe5\xad\x42\xff\x52\xcb\x92\x54\x92\x1c\x4a\xb0\x4f\xb9\x7b\xaa\x64\xc9\x26\x41\x29\x42\x65\xd4\x03\xa0\x46\x9c\xe3\xdc\xa4\x65\xa8\x0d\xca\x61\xb8\xed\xb6\x8c\xe5\x88\x39\x25\xb8\x28\x80\x12\x25\xab\x40\xab\x52\x1e\xda\xff\x84\x4b\x5a\x89\xda\x7c\x6f\x06\xf9\xac\x38\xbc\x15\x64\xcd\x37\x2f\x18\x61\xf2\x0d\x98\xf0\xe5\xd9\x31\xff\xbb\x26\x6b\xf2\x13\x00\x00\xff\xff\xa3\x67\xe0\x02\xd0\x06\x00\x00") - -func dataMackeypadJsonBytes() ([]byte, error) { - return bindataRead( - _dataMackeypadJson, - "data/MacKeypad.json", - ) -} - -func dataMackeypadJson() (*asset, error) { - bytes, err := dataMackeypadJsonBytes() - if err != nil { - return nil, err - } - - info := bindataFileInfo{name: "data/MacKeypad.json", size: 1744, mode: os.FileMode(420), modTime: time.Unix(1452717629, 0)} - a := &asset{bytes: bytes, info: info} - return a, nil -} - -var _dataMalenamesJson = []byte("\x1f\x8b\x08\x00\x00\x09\x6e\x88\x00\xff\x5c\x5a\x4b\xb6\xeb\x38\x6c\x9c\xf7\x2a\xfa\xbc\x71\x56\x90\x35\x64\x07\x39\x19\x50\x22\x2d\xd1\xa6\x48\x3f\x52\xb2\x5b\x37\x27\x7b\x0f\x28\xb1\x0a\x78\x3d\xd3\xf5\x95\xf8\x01\x0a\x85\x02\xc8\xff\xfd\xf5\x5f\xb1\xed\xbf\xfe\xf3\xbf\xff\xfa\xfb\xef\x5f\x4f\xb7\x85\xf6\xeb\x3f\xae\xc7\xb2\xe6\xfb\xa9\x96\x29\xd4\xfd\x7e\xde\xe2\xbc\xba\x90\xee\x3f\xbe\x31\xa5\xe8\xb6\xfb\x0f\xef\x3e\xd1\x8f\x0f\xfa\x4b\x75\xfc\xd1\x1f\x93\x0e\xda\xc2\x7b\xbd\x9f\xf7\xb5\x6c\xae\xe1\xa5\x2a\x8b\x28\xef\x35\x54\x8c\x96\x23\xa6\x79\xbb\x63\x3c\x6d\xae\xbe\xc6\xff\x4b\x76\x69\xcc\xb0\x84\x52\x97\x70\x3f\xbf\x42\xce\x61\x1f\x33\xb4\x3d\x7c\xc2\xd8\x44\xf0\x5f\x2e\x69\xaa\xd1\x71\x6f\x3a\x8e\xcb\xb2\xa4\x7c\x62\xa0\x4f\x1c\xef\x3c\x5d\x2b\x19\x0b\xd8\xf7\x35\x7c\xc7\xbc\xae\x8e\x97\xf7\xb8\x95\x7d\x3d\x75\x8f\xf7\x53\x72\x15\x6f\x3c\xc3\xe3\x51\xc3\xf8\xe3\x51\x5d\x1e\xfb\x68\x73\xd9\x87\x69\x83\x98\x8d\xeb\x16\x4b\x64\xac\xca\x57\xcc\x58\xdd\xb9\x95\x8c\x6d\xd7\xb0\x94\xaa\x93\xae\x87\xc3\x5c\x9c\xd6\x8b\x39\xe2\xb0\xf1\xd7\xa5\x1d\xe6\x7d\xbb\x5d\x66\x1b\x6b\x78\x07\xfe\x2e\xbe\x2a\x30\x87\x2f\xc7\x92\xe0\x20\x59\x0e\xc6\x9c\xc5\x9f\x63\x69\x75\x5f\x8f\xf1\x65\x3d\xd5\xa4\x0b\x86\x7b\x96\x61\x89\xe7\xe1\x68\x4c\x4c\xeb\x92\xc2\xea\x29\x6e\xd8\x57\xbe\x73\xb4\x1d\xc6\xdf\x75\x33\x32\x2c\x7d\xf5\x0a\x91\x5e\x76\xdb\xf1\x07\x22\x03\x8c\x95\x00\xb5\xe4\xbe\x35\xe4\x79\xfc\x23\x0b\x3c\x0b\x77\x56\xcb\x18\x7e\x0a\x59\xe0\x8f\x79\xa7\x7a\xe0\xfd\x49\xdc\xe5\x81\x00\xe7\x01\xf8\x55\xbd\x2b\xae\xf5\x30\xf2\x99\xf1\x99\xac\xe5\x34\x40\x1c\x4b\x29\x07\x1c\x22\x7e\x0a\xdb\x78\xc3\x89\xe1\x61\x3f\x99\x6d\xfc\x1a\x8e\x25\x60\xb8\x6e\xf6\x82\x35\x1f\xad\x85\x34\xf6\x3c\x95\x69\x1a\xef\x7f\xe2\xbc\x97\x0a\x38\xe5\xd0\x86\x79\xdf\x6b\xb7\xcb\x7b\x58\xb4\x78\x8f\x05\x34\x40\x75\xae\x2e\x2e\xf0\x0b\xfc\xd0\x56\xf7\x1d\x8f\xb3\x60\x59\x2d\xd8\x02\x5e\xe9\x23\x63\xe0\x2b\x88\x95\x3c\x10\x49\x81\x78\x79\xc6\x8d\xfb\xcd\x7b\xc9\xb1\x30\xd8\xf1\xf2\xa4\x38\xda\x19\x8b\x89\x26\xdb\xe2\x0b\x4b\xd8\x5d\x4e\x08\xa8\x14\x04\x3f\x88\x6e\x8b\x24\xef\x52\x40\xe4\x66\xa2\xa4\x16\x9f\xf1\xe9\x7c\xd4\x1d\xa3\xe7\x52\x37\xc7\x48\xaf\x24\x00\x79\x98\x43\x1e\xb6\x5c\x92\xc4\x94\x06\x75\x20\x07\x54\x21\xc0\xa6\xff\x20\xfb\x79\x02\xbf\x4c\x63\xec\x90\x38\xb6\x4b\x0a\x9e\xd7\x89\xd5\x76\x86\x98\x31\x9a\xe0\xcf\x73\xab\xe2\xb3\x83\x31\x59\x35\x82\xfa\x28\x41\x63\x5a\x98\x82\x21\x98\xc8\x80\x98\x94\x68\x0e\xde\x33\x5c\xe4\x5b\x6e\x05\x31\xd1\xb1\xe8\x08\x34\x05\xbc\xcc\xab\x06\x17\x53\xc1\x93\x29\xf0\xd3\xb1\x85\xb9\xf0\xa5\xf9\xa0\x17\xe7\x35\xb8\x31\xaa\xf0\x69\xf1\xa5\x02\x87\x29\x3e\x1e\x85\x23\xc7\x85\x4e\x2b\x4d\x22\x00\xa6\x3c\x09\x28\x60\x65\x43\x94\x58\xd3\x86\x7f\x48\x2f\xf0\xbc\xf0\xa1\x89\xcf\xb1\xea\x54\x4e\xcf\x81\x36\x03\x2a\xb0\x68\x0d\xc8\x3b\x62\x03\x46\x9e\x04\x70\xd9\xe0\x31\x1d\x43\x3e\xc4\x02\xb8\x96\x1d\x2b\xfd\x86\x46\x5f\x7a\x86\x51\xe7\x72\x2c\x95\xb9\xcc\x1b\x1a\x7c\x07\x5f\x0b\x57\xa3\x7e\xf6\x18\xe1\xc7\xf5\x44\x0b\x50\x17\xe6\x1a\x01\x89\xc1\xf4\x21\x5f\x8e\xd1\x3f\xe2\xc2\xf2\x47\x86\x2f\x70\xc1\xe9\x83\x62\x1d\xc3\x28\xb3\x08\x2d\x64\x05\x8d\xc0\xa0\x90\x88\x15\x4a\x30\x45\x63\x6e\x99\x2a\x63\xa8\xba\x0d\x53\xef\x82\xf9\xf1\xc2\x12\x4d\x46\x50\xe2\xeb\xc8\x19\x9f\x85\x25\x6a\xba\xae\xc7\x14\x48\xd6\x01\x59\xd4\xe5\x05\x88\xb9\x89\x80\x32\xa2\xba\x87\xd3\x58\x58\x00\xa6\x2d\xa6\x5d\xa9\x17\x4a\x83\x43\xcf\x61\x8e\x09\xfe\xe0\xbe\xaf\xa4\x3c\x86\x4a\x9b\xee\x10\xc1\xbe\x38\xd1\x18\x4a\x37\x34\xb3\x50\x2b\xe9\x80\x61\xef\xbc\xea\x91\x17\xc9\x72\x66\x6e\x17\xee\x3d\xe0\x11\x09\x71\x83\xc4\x13\x3b\x0d\x58\xe3\x2d\xa4\xa2\x66\xdb\x4f\xc4\xea\x1e\x3d\x66\xb3\xa7\x97\x63\xe6\xbe\xf7\xc0\x50\x56\x79\xb3\x9f\xb2\xf0\x60\x60\x3f\x5e\x4e\x26\x86\x64\x3f\xc0\xd3\x5c\x90\xb5\x9e\x47\x02\x1f\xbc\x0e\xf8\x53\x58\xc4\x69\x2e\x39\x39\xf5\x7a\x2c\x2b\xe6\x1e\xd1\xea\x4d\x12\x15\xce\x5a\x56\x0c\xd1\x91\x8c\xf5\x3f\x42\x8a\xff\x90\x05\x36\x2c\x48\xf2\xac\x62\x41\xad\x4a\xa0\x80\x82\x9f\x2e\x6e\x4c\xac\x0d\xc1\x72\x93\x31\xa3\xd7\x04\x85\xa8\xdb\xf1\x7a\xfc\xd0\xb8\x3d\xc9\x61\xe2\x16\x35\xa7\x4c\x27\x5d\xde\x6d\x81\xf7\x63\x73\x6e\x56\x9a\xa3\x09\xba\x68\x21\x99\x8a\xa5\xe1\xd5\x4f\xac\x0b\xfc\xaa\x9a\xa0\x09\xab\x38\x8f\x58\x7c\x45\x28\xe2\x16\xe4\x6d\x44\x22\x25\xf0\x8b\x31\xd7\x85\x94\x7a\xab\x32\xbc\x82\xc0\x9a\x21\xdc\x5f\xd2\x34\x2f\x9a\x2f\xfe\x3e\x98\x92\x4c\xb2\x68\xbb\x7c\x64\x32\x8f\x46\xbc\xfc\x7c\x54\x5a\x30\x3c\x7b\xa4\x14\xd8\x0b\x06\xca\x4a\x10\xc7\x4e\xcd\xff\x0d\xd9\x5b\x6e\x0d\x5b\x74\x2b\xdc\xea\x8c\x41\x91\x47\xbc\x41\x62\x61\x16\xdf\x6b\xf8\xc0\x3c\xe9\x80\x5e\x90\x62\x83\x3c\x23\x3a\xb2\x1a\x91\x4b\x4b\x41\x6f\xac\x87\xf2\xd0\xc5\x27\x85\xc4\x77\x20\x08\x4e\x15\x16\xa4\x9e\xf4\x28\xb9\x8d\x77\x4b\x4d\x0a\xd6\x8a\x44\xd4\x45\x1c\xfd\x7e\x2b\xb4\xf1\xca\xdb\x4d\x98\x26\x09\x7b\xe7\x1f\x8c\xb3\x81\xa8\x04\x26\x13\xd4\xf6\x94\x9c\x6e\xac\x3a\x7a\xb5\x58\x0d\xf0\x52\xe9\x2c\x74\xe4\x56\x4b\xcf\xf0\x44\xac\xce\x30\x1a\xaa\xb1\xd0\x98\x6a\x05\xe3\x46\x4f\x6d\x2e\xcd\x25\x61\x9c\xc3\x17\x4a\x6c\xaf\x94\xfe\x32\xf2\x46\xde\x19\x33\xbd\x6b\xdf\x2c\x53\xb3\x3e\xd6\x79\x8d\x86\xea\xc7\xbe\xbf\x63\x92\x5e\x9c\x80\x9c\xb3\x99\x6f\x71\xa6\x2e\x28\xa6\xb8\xba\x4b\x09\x8c\xde\x69\xe2\x8d\x84\x1f\x14\x2e\x37\x06\x0a\x42\x00\x60\x10\xf9\x8f\x22\xec\xf2\xc2\xf8\x3d\x98\xc4\x24\xfb\xc7\xe6\x96\x43\x22\x57\xf2\x2b\x86\x71\x46\x72\x4f\x0a\xa7\x5c\x8c\xf0\x2c\x02\x92\xb1\x59\x65\x89\xe0\xb7\x03\x89\xe1\x8a\xef\x5d\x05\xd1\x15\x5c\x58\xd2\x97\xf1\xd1\x15\xc3\x02\x58\x9a\x1c\xef\xb5\x6e\x6d\x67\xfa\x98\xfc\x5b\x8b\x14\xa6\x9f\x90\x40\x8e\xad\x32\x1d\x3e\xfb\x00\x11\x8c\x20\xfa\xab\x72\x1f\x5f\x9b\x94\x53\xd0\x04\xd6\x22\x1d\x1e\xbb\x54\x1e\x8b\x91\x25\xd2\xf5\x0f\x04\xea\xb7\x14\x61\x81\x91\x57\x5e\xff\x2a\xf5\x93\xac\xd8\xb8\xbf\xc1\xb6\xa2\xf0\x3f\x88\x88\x28\x99\x48\x7b\x00\x3b\x1c\x1f\xd3\x1f\xba\x25\x82\xeb\xc4\x89\x5a\xe0\x49\xf6\x34\x9c\x16\xdb\xc6\x6d\x17\x59\x77\x42\x1c\x6b\x95\x1d\x54\xf9\xfb\xf0\xd5\xdc\x40\x83\xa8\x62\xef\x65\x37\x38\x63\x19\xeb\x88\x4b\x76\x33\x98\x58\xf4\x68\x18\x29\x67\xd7\x76\x87\x78\x2c\xa9\x06\xd7\x88\xbd\x1a\x03\xd1\xe6\x5c\x02\x47\x58\x28\x2a\x49\xeb\x7e\x9c\xa4\x63\x70\x85\xbc\x82\x19\xd4\x67\x16\xbb\xa2\xbb\x22\xd8\x78\x2e\x99\x72\x25\x17\x10\xed\x22\xbf\x21\x60\xd7\xa8\x52\x44\xcc\x4f\xda\x4d\x8e\x9c\x74\x2b\x1a\x3a\x00\xba\xfc\x1d\xea\x4c\x8d\xfb\xcf\x6e\xf5\x91\x11\xf7\xa1\x26\xdd\x2a\x45\xfb\x06\x04\x5c\xb9\x08\x70\x97\x5f\xc1\x4e\x51\xdd\xd3\xa8\xd7\x44\x88\x6b\xcd\xd7\x57\x9b\x94\x34\x58\x69\x14\xd5\x09\xbb\x96\xa2\xdd\xce\x5a\xf5\x4b\x0e\xd2\x02\x94\x75\x35\x7e\x72\x13\x3c\x32\xb8\x62\x38\x4e\x52\x15\xdd\x23\x96\x7a\xc2\x9c\x33\x25\xf9\xf5\x3e\x20\xe2\x8e\x89\x7c\x15\xb6\x8d\x82\x65\xeb\x35\x3e\x60\x61\x2b\xd6\xa7\x69\x7d\x08\x59\xb0\x07\xd4\xe7\x25\x52\x55\x35\x95\x7d\x27\x56\x3c\xfc\x29\x33\x76\xf1\x5c\xb8\x53\xf4\x5e\x42\xd3\xcc\x89\x08\x5a\x8f\xcd\x84\x96\xac\xd1\x2c\xe6\x6a\x4f\x39\xaa\xc1\x03\x36\xfd\xc4\x5e\x25\x53\x19\xca\x2a\x77\x72\xb5\xf2\x86\x14\x7f\xcc\xea\x82\xf7\xc8\x02\xc3\x41\xb3\x08\x4b\xb2\x83\xe4\x83\x58\xa7\x12\x7a\x41\x53\x91\x89\xa0\x5a\x58\xba\xbe\x63\x47\x0d\x1d\x81\xbd\x9e\x0c\x17\x11\xbd\x24\x3a\x29\x7b\xa6\xa8\x8d\x43\xad\x74\xa5\x88\x65\xab\x43\x60\x98\xc8\x6e\x4b\x2d\x1f\x2c\x2d\x68\x0e\x9b\x53\x27\x56\x6e\xd1\x9f\xaa\x75\xd7\x43\x4b\xb5\x4d\xdb\x8e\x41\x8b\x97\xde\xc9\x00\x13\x5e\x6d\x0c\x88\x86\xab\xc9\x03\x8e\x60\xed\x2e\xdf\x45\xf8\x45\x8c\x81\x92\xba\xd0\xf2\xa2\x0d\xb8\x43\x91\x18\xd8\xe0\x4c\x29\x2e\x92\x2e\xb3\xb0\x74\x8b\xa9\x32\x9f\x12\xd9\xc4\xd3\x47\x5b\x5e\x9a\xb4\xf6\xb3\x19\xd9\x6a\xd4\xf2\x44\x3d\x28\x95\x8d\x78\x90\x81\x03\x43\xfa\xc0\x4e\x64\xb7\x44\xa0\x88\x76\xbf\x0f\x82\xe0\x02\x0a\xde\x6f\x06\xe7\x49\xc2\x6b\x67\xbc\xd2\x18\x77\x0f\x94\xd5\x86\xa4\xb1\x08\xaa\x2b\x6d\x46\xf7\xb1\x27\xc1\x89\x21\xac\x65\x4f\x2b\xa9\x30\x89\xe5\x52\x2d\x69\x92\x63\x72\x51\x67\x3a\x46\x2d\x77\xd8\x17\xaf\xc5\x0c\xac\x37\x29\xb0\x2b\x4a\x8e\x90\x7a\x1a\x54\xb4\xd9\x22\x5e\x11\xbe\x95\x48\xa6\xeb\x74\x80\x61\x1e\xd1\x07\x76\x4c\x9c\xf7\x01\x1f\x5c\x1d\x13\xbe\x3f\xe3\x25\x07\xf0\x57\xf7\xd6\x8e\xbd\x44\x82\xea\xae\x8d\x80\xed\x92\x83\x2a\xbd\x77\xaa\x49\x8d\x00\x92\x68\xf7\xa6\x8d\xfb\x43\xf5\x46\x0e\xd4\x2d\xca\xae\x02\x5d\xe4\xe8\xab\xd7\x93\x14\xa4\xb4\x1f\x7b\xa7\x6e\x32\x61\xb1\x2c\x0c\x48\x36\x13\xa5\x6e\x62\x21\xb0\x60\xdb\x52\x8f\x61\x04\xed\xbc\x48\x7d\x42\x8a\x35\x5a\x71\xb3\x05\x96\x90\x05\x34\xe6\x22\xa9\xd3\x25\x6d\xab\x68\xf7\x4c\xe4\x4a\xc4\x48\x8b\xca\xe8\xfe\xa3\xa1\xc5\xde\x60\x56\xc5\xcf\xad\xc9\x30\xd8\x44\x2e\xa6\xdb\xa2\xd0\xd2\x24\x6d\xe2\xfd\xee\xce\x00\xcb\xab\x28\x73\x9c\xac\x94\x5c\x98\x81\x26\xd7\x40\xf9\x3d\xfd\xa8\xcc\xe9\x27\x09\xe0\x61\x06\x5d\x47\x27\x32\xb1\x66\x82\x26\xd1\x60\x9a\x04\x8f\xbb\x73\x38\x26\x3e\xd2\xd9\x88\x40\x11\x69\xd1\x56\xc1\x52\x70\x31\xb5\x7a\x55\xb8\x8b\x63\xb8\x08\x5f\xd3\x5a\x52\x72\x01\x54\x4e\x79\xc3\x47\x3e\x39\x92\x96\x30\xe8\xc6\xe4\x33\xa3\x87\x58\x4c\x3d\xd8\xfb\xb1\x6c\x88\x1e\x19\x62\xab\x35\xd4\xd4\xfe\xd0\x2e\x69\x73\x59\x85\x86\x68\xc1\x49\xcf\x2e\x36\x96\x73\x57\xf5\x85\x83\x8f\x22\x0c\x03\x4b\xee\xe1\xe1\xb4\x53\x8a\x4d\x92\xe0\x24\x26\x35\x92\xa7\xe0\x8e\xc1\x87\x26\x23\x2d\x8e\x9d\xb3\xd8\x22\x2b\xe0\xbd\x1e\x46\xa8\xa7\xed\x5f\x55\x16\x79\x29\xd2\xab\x68\xeb\x8e\xb1\x94\xf9\xb6\xde\x05\x4c\xec\xfa\xf4\x33\xb6\xd3\xb2\x40\xcc\x54\xa0\x35\x71\xfa\xa0\x5c\xf5\x32\x8d\xef\xae\x90\xa9\x5f\x64\x0a\x15\xb8\x1f\xe6\x9a\xb9\x5b\x88\x6b\xcf\x1f\x95\xef\xaa\xcd\xc3\xc6\xb0\x7e\xbb\xf6\xfb\xd0\xee\x7b\x91\xea\x73\x73\xa6\x61\xec\xb2\x36\xff\x23\x68\x33\x99\x83\x16\xcf\xce\xc6\x75\xfe\x12\xf4\xfc\x05\xcc\x73\x49\x14\xb2\xa0\x8b\x50\x99\x87\xba\x7e\x31\x95\xd1\x64\x92\xa1\xe4\x2e\xb6\x03\x3a\x62\x09\xce\xde\x46\x25\x80\x9f\x6e\x96\x5c\x09\xe9\x21\x59\x0d\x58\x2c\xed\x60\xf7\xe4\xab\x11\xdc\x33\x22\xa7\xee\x7d\x0d\xed\x2b\x9e\xbd\xc0\xe3\x8e\x59\x59\xee\xbd\x16\xd2\x83\x33\x1e\xe4\x88\xa5\xc5\xde\x9a\x95\x59\xb5\x66\xf2\x74\x99\xa5\x3e\x62\x99\x51\x8c\xe6\x5d\x0f\xc8\x95\x9e\x89\x6d\xda\xd3\x9e\x70\x53\x56\xee\xe4\x3f\x86\x8f\x9d\x67\x72\x04\x86\x2b\x14\xd9\x7c\x56\xa6\x9b\xb3\x46\xed\x6b\x66\x73\xc0\xc5\x9e\xa0\xf6\x01\x64\xac\x5d\x95\xf0\x46\x59\xe2\x8e\x5b\x75\x68\x53\x73\x5e\x89\x64\xff\x25\x0f\xdf\x2d\x0f\xd6\x93\xd0\xa9\xdb\x69\x0e\x94\xb3\xe3\xa9\xc2\x6e\x52\x8d\x29\xe2\x7e\x34\x8b\x89\x25\x09\xb5\x1f\x57\xff\xe4\x71\xd3\x67\xd0\xba\xc3\x3d\x8b\x09\xd2\xc8\x81\xaa\x69\x39\x6b\xab\xd7\x4d\xfe\x60\xb7\x98\x39\x30\x1d\xb3\xe2\xbd\x85\x73\x2b\xe8\xe3\x5c\xc7\xbf\xec\x5d\xf6\xd3\x3e\x78\xf4\x8e\x18\x86\x61\xe7\x13\xd5\x6c\x35\x1f\x54\x64\x7d\xf0\x40\x26\xaf\x3e\x66\xa6\xf5\x9d\x87\x4f\xe1\x87\x7d\x9e\xa4\xfa\x72\x52\x8d\x72\x22\x04\x2f\x2b\x8e\x8f\x1c\x0b\x34\xa9\x7b\x18\xba\xbf\x79\x1a\x97\xf4\xdc\xee\x25\x98\x0e\x1b\xad\x0c\x62\x9b\x8a\xc9\xe2\xda\xcd\x93\x84\xac\xdd\x73\x38\x81\x50\x15\x46\x79\x17\x2e\x73\x74\x74\xa9\x43\x5a\x78\xe0\xd3\xc2\xda\xca\x1e\xe8\x1b\x61\x55\x43\x34\x25\x4f\xb7\x14\x45\x8e\x6a\xd4\xaf\xe9\x4b\xf5\xc8\x20\x86\x24\x25\x1d\x0a\x17\xcd\xe0\x13\x95\xf4\xf3\x20\x10\xd9\xa9\x15\x22\x6d\x3c\x00\x5a\xc9\x79\x39\xf2\x5c\x42\xca\x1e\x3d\xdc\xdf\xbf\x14\xa6\x89\x24\x27\xf6\x93\xe8\x23\xf5\xd6\xf0\x38\xd8\xdb\xf5\x64\xf6\xd2\x3e\x66\x73\x58\x48\x3f\x54\xe2\xe1\xc4\x7d\xa2\x09\xd1\x19\xd9\x16\x8e\x9f\xa2\xe7\x96\xec\xca\xde\x0a\x18\xd9\xd1\x89\x13\x78\x04\xd6\x8f\xba\x50\x80\x3e\x64\x65\x3f\xb0\x76\x0b\xe4\xb5\xcd\xd2\x9f\x64\xe0\x05\x0b\x62\x99\x34\x6e\x79\x80\x79\xaf\x86\x29\x9a\x17\xe9\xdc\xb4\xb1\xae\x58\xe9\xe4\xaf\xad\x28\x3d\xa9\xa1\x5e\xe9\x68\xd3\xa2\xe5\x1b\x11\x95\x93\x4d\x76\xa2\x7a\xd1\x9f\x52\x45\xed\x2e\x0e\xd2\xe6\xc5\xe1\x09\x8f\xc3\x33\x23\x2c\x7c\xe5\x3a\x34\xd2\xd3\x81\x30\x1b\x2e\x60\x07\x31\x2b\xb3\xe7\xe8\xe1\xfd\x1d\x0f\xef\x52\x1f\x91\x05\x4b\xf1\x0c\xa5\x90\xed\x8d\x99\x43\x14\x1c\xfc\x53\xd8\x2f\xd1\x0e\xd8\x4c\x11\xf3\xa2\x23\x6f\x52\x05\x4f\xbf\xf4\xea\xcb\x2a\x72\x82\x53\x4e\x87\xf6\x19\xc7\x22\xea\x87\xc1\xef\xd8\xbb\xef\x67\xf2\x20\xa6\xfb\x6c\x91\x2e\x29\x49\xe1\x1b\x27\x25\x8e\xc4\xe0\x52\xfe\xc9\xe3\x1b\x71\x92\x22\x95\xac\xb1\x17\x55\xd6\xda\x05\xbe\x3a\xda\x5c\xaf\x9e\x58\x5e\x94\x48\x4b\x9b\x73\x37\xaf\xdb\xdb\xa0\x06\x7b\x62\x58\x4d\xb0\xf3\xd4\xb0\x4c\x68\x71\xe8\x25\x1c\x26\xcc\x7a\xbc\xb5\x59\x56\x92\x96\xf4\x2f\x03\xa6\x5e\xbe\x2a\xa4\x23\x23\x99\xa7\x65\x5f\x0d\xcb\x71\x38\x42\x2c\x7b\x5c\x09\x79\xe1\xd6\x84\xb0\x09\xea\xba\x53\x38\x14\x4b\xee\x37\x0b\xd8\x7a\x5c\xb9\xed\xaf\xbd\x54\x62\x45\xae\x76\xab\xb7\x22\x14\x06\x9f\xbc\x48\x3a\xb9\xcc\xab\x46\x06\x65\xd6\x55\xbf\x9a\xfa\x3f\x12\xf8\x8f\xab\x03\x3e\xeb\x25\x0d\x73\xec\xbc\x97\x29\x6a\xde\x85\x93\x57\x27\x6a\x9c\x61\x73\xb4\xf0\x7e\xb3\xf8\xb7\xe7\xff\x22\xf6\x89\xd3\xbe\x69\x9e\xc3\x9f\xba\xae\x57\x90\x88\x40\xa7\xa1\x9f\x94\x54\x60\x2a\x68\x5d\xa6\x76\x16\xb5\x24\x72\x90\x13\xac\x81\x6d\xf8\xba\xb3\x90\xea\xd7\xc7\x78\x79\xe2\xe0\x09\xb9\xa1\x15\x51\x73\xe6\x8e\x04\x92\xce\xae\x9d\x7a\x3d\x45\xba\xaa\x52\x0c\x16\x85\x2b\x49\xd7\x8f\xde\x86\xd0\xfe\x0f\xc5\x98\xff\xe3\x44\xb1\x6b\x11\x84\xc1\x75\x03\x02\x08\x96\xea\x31\x1b\x31\xff\x4f\x44\x86\x6c\x85\x4d\xcd\x14\xe2\xa2\x77\x9b\xfe\xe8\x07\x4f\xdc\x6e\x75\x4d\x05\x80\x14\xe7\xbb\x36\x50\x14\xbe\x24\xc6\x7e\x8d\x8a\x94\x20\x91\x0b\xe7\xfe\x66\x0e\x0c\xe2\xd1\x29\x6a\x29\x08\x7e\x17\x9b\xf0\xd4\xc4\x1b\xc9\x79\x1f\x1d\x8c\xb5\x88\x37\xd9\xb5\x2a\x27\x43\xf0\xd4\x4a\x2a\xfc\x84\x17\x85\x94\x10\x4e\xb3\x30\x54\xa2\x0e\x89\x57\x4c\x46\xd1\x88\x9d\x57\xe6\xcf\x1c\x4c\x63\x3f\x78\xc5\xb4\x0f\xf6\x4e\x93\x53\x49\x2f\x3f\xb3\x7b\x3a\xee\x0d\x38\xaa\xfd\xea\x4d\xc5\x4a\x9a\xbb\x32\xc6\xfd\x48\x02\xed\x47\x31\xba\xa3\xfb\xba\x1e\x9f\x4d\x45\x7a\xe5\x41\x52\xa7\xdb\xa6\xca\x4b\x83\x7b\x64\x6c\xac\x9a\x12\x1f\xa6\xdb\x28\x86\x92\xa8\xd5\xd6\xa0\x6b\x1b\x4f\x0e\x49\xad\x5a\x7f\xbe\x48\x06\x6c\x64\x69\x75\x28\xc9\x0b\xab\xed\xd7\x89\xc0\x65\x22\x20\x78\x3f\x6e\xa5\xd0\x7d\xf2\x1a\xc2\xb8\x1b\x88\x70\x93\x8d\x31\x88\x4b\x33\xf4\x27\xbf\xb6\x6f\x61\x1d\x71\x9f\x68\xf3\x1c\xa3\xf0\xc4\xfc\x36\x39\xdb\x54\x6c\x75\x4d\x21\x8b\xfb\xe6\x9d\xcc\xc6\x14\x01\xf4\xc5\x56\x79\x95\x68\xb9\x54\xf9\x30\x58\xcf\xee\xc8\xd5\x0d\xe2\xb7\xd2\x2c\x3f\xbc\x87\xb8\x15\xaf\xc7\xb0\xd7\xad\xa5\x60\x6e\x2d\x65\x06\xc5\xd3\xa1\x0a\x73\x67\xd2\x0a\xd3\xa9\x7a\x7c\xc8\xde\x6c\xb7\x24\xd8\xc3\xe3\xeb\xe6\x2c\x30\xb9\xc8\x34\x06\xa2\x5f\xed\xf8\x25\xdc\xef\xe8\x0b\xd4\x0b\xa6\xbc\x3f\x99\xf5\x9a\xd6\xfc\xaf\x83\x34\x49\x41\xb1\xe8\xad\x99\x34\xe3\x52\xd4\xab\x98\xd3\xd1\xf6\x0e\x2b\x39\x53\xef\x20\xf4\x3d\x8d\x67\x36\x7e\x57\xb1\xb7\x63\x7c\xa8\xb0\xa3\x2e\xed\x07\xc0\x43\x8d\x57\x7d\x62\x8f\x34\x18\xf1\x7d\x8b\x96\xc4\x33\x24\xd1\xe2\x94\x2a\xd7\x3e\x1e\xe6\x0e\xaa\xe6\x5d\x29\x75\x8a\xa9\x58\x02\xbb\xe6\x29\x98\x9b\x05\x2f\xbd\xbd\xf8\x0c\xff\xbe\xa0\x05\x9a\x37\xb7\x98\xe7\x3f\x6f\x2c\x24\x6d\x0b\x98\x8c\x2a\x30\x4e\x7a\x01\x71\x19\x79\x5e\x84\x00\x55\xf4\x53\x64\x0a\x8d\xb1\xe8\x1a\xae\x63\x3c\x49\x66\xe0\x1f\x53\xfb\xb7\xc8\x4b\x62\xf1\xc1\x45\x3a\x2c\xf2\xbe\x51\x0d\x3f\x5c\xd7\x0e\x7f\x78\xf9\x80\x97\x90\x2f\x24\x8d\x61\x4e\xd3\x8b\xea\x77\x26\x60\xc3\x88\x00\x24\x29\xf6\xb6\x03\xaf\xf2\xfd\x79\x53\xa0\x1f\x22\xb6\xf2\xeb\xaf\xff\xf9\xbf\xbf\xfe\x3f\x00\x00\xff\xff\x45\xf8\xc0\x95\x0f\x2e\x00\x00") - -func dataMalenamesJsonBytes() ([]byte, error) { - return bindataRead( - _dataMalenamesJson, - "data/MaleNames.json", - ) -} - -func dataMalenamesJson() (*asset, error) { - bytes, err := dataMalenamesJsonBytes() - if err != nil { - return nil, err - } - - info := bindataFileInfo{name: "data/MaleNames.json", size: 11791, mode: os.FileMode(420), modTime: time.Unix(1452717629, 0)} - a := &asset{bytes: bytes, info: info} - return a, nil -} - -var _dataPasswordsJson = []byte("\x1f\x8b\x08\x00\x00\x09\x6e\x88\x00\xff\x4c\xfd\xeb\x72\xf3\x3a\xd0\x2c\x8c\xdd\x0b\x7f\x24\x55\x7b\xbf\x3b\x65\xc9\xe7\xe4\x16\x72\x07\xf9\x91\x02\x49\x88\x84\x44\x12\x7c\x78\xd0\xe9\xbb\xf9\x6f\xa6\xbb\x21\x2f\xfb\x59\x65\x2d\xd9\x92\x48\x60\x30\xc7\x9e\x9e\xff\xab\xfa\xff\xa6\x75\xab\xfe\xdf\xff\xbf\x6a\x0e\xeb\x7a\xcb\x4b\x5b\xfd\x4f\x75\x38\xbe\x7f\x7c\x7e\xbd\x1e\x7c\xff\xe8\xa1\xfd\xf8\x77\x8b\xcb\xf6\x28\xbf\xb2\x9f\xed\x12\xba\x3c\xd9\x83\x79\x5f\x57\xff\x45\x1d\xd6\x58\x87\x61\xb0\x87\xa7\x9c\x37\x3d\x1c\xe2\x36\xc6\xe4\x7f\x37\xe6\xe9\x12\xfd\x0f\xbf\x7e\xfd\xdb\x1e\x84\xba\xb1\x77\xf3\x5f\xed\xeb\x16\xa6\xce\x1e\xad\x7d\x68\xf3\xcd\x9f\x0a\xeb\x16\x17\xff\x3c\x7c\xd9\x83\xe3\xdb\xdb\x9b\xfd\x38\xdb\xa5\x06\x7f\xbf\x75\x9f\xe3\x32\xe2\x61\x1f\x96\x21\x3e\xfe\xae\xdb\x2f\x61\x6f\x2e\x63\xf4\xdf\xed\x13\xdf\xc8\x9f\x79\xe4\xdd\x1e\x6d\x8b\x7d\xde\x94\xfd\x4d\x17\xfb\x58\xfc\xb6\xde\xf5\x79\x5b\xea\xf8\xcc\x9a\x9b\xe6\xf5\x42\xdc\xdf\xc6\x4f\xdb\xa2\x2d\xdc\xff\x60\xdd\xec\xc7\x25\x0d\x03\xfe\xac\xcf\x0d\xef\xaf\xf1\xcb\x49\xfe\xd9\x43\xbe\x46\x5c\xe9\xb4\xf6\x69\xf2\x87\xf6\x9a\x3e\x0f\x51\xab\xe0\xef\x12\xe7\x19\x2f\x0f\xf6\x69\x78\xc3\xb2\xf6\xfe\xdb\xaf\xcf\x8f\xf7\xe3\x01\xcb\xd1\x75\x78\x4b\x5b\xa8\xe5\x16\x16\xff\xc3\x35\x0d\x57\xbc\xb4\xb5\xa5\x0e\xfe\xcc\x23\xd8\x12\x47\xbd\x09\x97\xf6\x0b\x5f\x7e\x79\x71\x18\xb2\xfd\xcc\xb8\x65\xbf\x9f\xb4\x45\xac\xd0\x69\x89\xb1\xcd\xa3\x5f\x79\x1e\xe7\x9d\xcb\xb0\xc6\xbb\xdf\xcb\x66\xcb\xd7\xe2\x89\x2e\x69\xa5\xfa\x30\x8e\xfc\x93\x5d\x0f\x9a\xbc\x5c\xe3\xb6\x45\xad\x15\x6f\xc7\xd6\x13\xdb\xae\xdd\xb3\xbf\x1c\xf8\xff\x47\xff\xf6\x37\xcc\xc3\x89\x2f\xef\xed\x9a\xfd\xc5\xf3\x92\x26\x2d\x82\x3d\x37\xac\x31\xf8\xcd\xa5\x60\x92\xe3\xd2\xf9\xf0\x3b\xb8\xe1\xca\xbb\x36\x43\x5a\x62\xb3\xc4\x0d\xcb\xda\x9e\xba\xde\x9f\x99\xc3\x72\xc1\x26\xe4\x5b\x9d\xf1\x20\x8c\x61\xc9\x58\xc2\x6d\x49\x77\xbf\xc6\x30\x34\x90\xdc\xe4\xfb\x43\x91\xe8\xf6\x64\x0b\x0b\x69\x5e\x66\xec\xcf\xda\x98\x10\xe3\xfa\xe6\x3e\xc7\x09\xaf\x0c\xf8\xa2\x90\x44\xec\xc1\x9c\x97\xd5\xae\xd5\xdf\x3e\x69\xfb\xc7\x60\xbb\x92\x20\x32\xf6\x16\x17\xec\xda\x14\xd6\x86\x6f\x1f\xc3\xb4\xfb\x15\x1f\xde\xfd\x9b\xc7\x02\x2f\xeb\xf3\x32\xf9\xcf\xd5\xae\x77\xda\xfa\x00\x09\x9b\xb6\x84\xed\x34\xd9\x8c\x03\x3f\x71\x9d\x72\x9e\x71\xde\x72\xe6\xf2\xdf\xfa\xb0\x45\x4a\x42\x6a\x22\x65\x74\x1d\x33\xaf\xa6\xb3\xdf\xdd\xc2\x03\x52\x72\xc9\x5b\x78\x2d\x8d\xbf\x57\x0c\xdd\x10\xb9\xdc\x7e\xf5\x13\x96\x9b\xc2\x3e\x04\xfc\x7c\xde\x9b\x6b\xed\xcf\xdb\x56\x2d\x61\x49\x2e\xf0\x53\xea\xfa\x8d\xa7\xae\xb5\xad\x8f\x12\x9c\xf0\x0f\x0f\x4e\xa7\x18\x79\x75\xf6\x8f\x62\xd6\xf4\x38\x60\xc3\xc0\x5d\xbb\xe3\xcb\x2f\x3c\xda\x46\x40\x04\xe7\x21\x3c\x70\x03\x53\xd3\x1c\xbe\xdf\x5c\x60\x6e\xe9\x19\xa0\x94\x7c\x23\x6a\xbf\x81\xf3\x3e\xa5\x8c\xbb\xf4\x13\x3d\x61\xe3\x5d\x14\x74\xc5\x26\xd7\x2d\x44\x36\x4e\x53\x5a\x71\x0f\xf9\x76\xce\x35\x8e\xee\x64\xdf\x5c\x6c\x1d\xf3\x75\x4e\x94\xeb\x21\x5c\xb8\xb0\x4b\xa8\xed\x5a\x7d\x55\xa4\x30\x6c\x75\x9b\xd8\x62\x7d\x4e\x51\xa7\xe0\x61\xbb\xd3\x4b\x2c\x6b\x9c\xa7\x3a\xaf\x1b\xa4\x09\x32\x81\xfd\x5f\x78\xd6\x7d\x51\x4d\x47\xbe\xb4\xcc\x8a\xed\x98\xec\xb4\x9e\x70\x7f\xfa\x18\xbb\x85\x2d\x6d\xb8\xe0\xb0\x50\x16\x96\xe0\x17\x27\xf9\x9a\xf0\x9b\x96\xd2\xec\xf7\x84\x15\x33\x41\x9f\x20\xe8\xbe\x02\xa1\x6d\xa9\x78\xa2\xee\x6e\xb0\x13\x83\x8b\x1a\x53\x5b\xb6\xab\x1e\x76\x9c\xd0\xe4\xba\xc8\xf7\xe1\x0d\x5f\xd8\xc8\x69\x0a\x38\x3d\x65\x1b\x0e\x87\x97\xda\x35\x45\x60\xff\xfc\xa2\x62\xbb\xe6\x3b\x74\xc2\xfd\x70\x78\xff\xd1\xb1\xe3\x1d\x0f\x75\xc6\x21\x73\x91\xed\xf1\x16\x94\x1c\xd7\x2a\xa6\xad\xe2\x14\xdc\x18\xfc\x0b\xcf\xdb\x7a\xc7\x3e\xe8\xaf\xbe\xf9\xe5\xbb\x1b\x56\x2a\x42\x5b\x98\x89\x1a\x27\x0f\x2d\x64\xb2\xde\x37\xfb\xf3\xe0\xc2\x70\x4d\x17\x5e\x7c\xba\xd9\x27\xf9\xc9\xf5\xa5\x1d\xd6\x97\xf6\xe0\x89\x8f\x0b\xee\xbe\x4b\x0b\x7e\x35\x86\x36\xad\x78\xa6\xc7\xa1\xe6\x91\x0a\xcb\xb6\x44\x1c\xd3\x30\x6f\x81\x36\xca\x56\x12\x32\x6a\x57\x33\x72\x17\xfd\xc3\xb9\x4f\x39\x77\xe5\xba\xfc\xae\x17\xd7\xf7\x1b\xd6\x43\x87\x6e\x48\x76\x0a\x67\x48\xfd\xc9\xf6\x89\xc6\xc3\x0e\x08\xcf\xe6\xb8\x9f\x4e\xf8\x90\x6d\x5f\x36\xaa\x98\x3c\xf7\x50\x0f\xbe\xb4\x76\x5f\x7e\x59\x5b\x7e\xf0\x94\xae\xc9\x4f\x1c\xd7\x43\x12\xd6\x25\xbb\x67\xdc\x6a\x68\x24\xb4\x53\xbc\x3d\xf2\xc2\x9b\xd0\xfa\xd5\x7b\x5d\x07\xec\xe1\xf1\xf8\xfe\x5e\xd1\x0a\xe1\x8f\xc7\x6c\xb6\x90\x37\x6a\x47\x69\x8b\xbe\xa0\xed\x92\x78\x79\xa6\x70\xe7\xb1\xa8\x70\xae\xf1\x5c\xd4\xd7\xe0\xa6\x13\xcb\x73\xb7\x33\xeb\x3b\xf9\xf3\xf5\xfd\xf9\xfe\xe6\x26\xaa\x8e\x81\xc6\xc0\x5e\xb5\x51\xe1\x64\x7c\xda\xe7\xe1\xd3\x85\xeb\x88\x2f\xff\x8b\x3e\x95\x5d\xf4\x8f\x30\x45\x81\x75\xee\xf6\x40\xeb\xb9\x71\xe5\x75\x16\xba\x68\xeb\x9f\xca\xd5\xbf\xb4\x06\xf4\xc6\x4b\x66\x1a\x3b\xd5\x6d\xe0\x72\x2f\x09\x8f\x7e\xf4\x85\x0d\x32\x87\x64\x6f\x71\x92\x37\x33\x91\xa9\xf1\xfb\xcd\xcd\x06\x25\xb2\x2d\x79\xaf\xb9\x0d\x7b\x31\xbd\xeb\xb6\x9b\x5a\xc0\x2e\x8c\x4d\xf0\xbb\x31\x53\xbb\x50\xe9\x98\xe6\xf6\xf3\xe5\x12\x37\xcf\xd4\x9a\x7e\x94\xaa\xe2\x1a\xed\x09\x0b\xd4\xe1\x53\xec\x63\x71\x22\xdb\x3c\xd8\x16\x4f\x54\x2f\x69\xaa\x61\xbe\xba\x5d\x02\xfe\xfb\xf3\x6d\x06\xbe\x82\x05\xe6\xdf\x87\x21\xde\xa9\xbb\x16\xb3\x22\xd0\xcc\x19\xda\xed\x78\x80\xcd\x94\xe9\xb4\x27\x43\x93\x07\xbf\xdf\x7b\xb8\x26\x3a\x01\xfc\xa8\x8a\xfe\x09\x37\xb0\x36\x35\xf0\xf6\xf6\x8d\x15\x18\x6b\xfc\xd9\x35\xe7\x16\x12\xaa\x25\x34\x73\xc3\x93\x11\xe6\x4c\x07\xe1\x94\xa0\xd2\xb7\xa2\x4b\x6c\x83\xaf\xb8\xa6\x6b\x7e\x04\x9e\x00\xd7\x4a\xd0\x6b\xfb\xda\xeb\xca\xea\x48\xb3\x3f\xbf\xac\xe7\x32\x27\xff\x1b\x13\xec\x19\xf6\x6a\x7d\xb4\xd2\x6e\xb1\xa5\x6f\x32\xeb\x8c\xf8\x27\x50\x29\xd3\x06\x9f\x4d\xbc\xe9\x5c\x9d\x86\x07\x45\xd7\xcd\x89\xff\x3c\xbe\xfb\xb7\xab\x17\x7c\xfd\x7d\x12\x6c\x57\x96\x93\x34\xc4\x2e\xc2\x65\xc8\xfb\x0f\xae\x8e\x8e\x51\x05\xbb\xf6\xc4\x47\x2d\x65\x13\xea\xb4\xb4\x49\xfe\x90\x36\xf8\x13\x5f\x10\x83\xd9\x36\xab\xa2\xa2\x93\xb2\xeb\xfd\x62\x27\xa8\x22\x39\x6e\x53\x1a\xc3\x20\x49\xc7\xfb\x74\xd4\xd5\x1f\xf6\x55\x41\x2f\x5d\x03\x9c\xa3\x2e\xb7\x4f\xf3\x16\x03\x54\xc5\x29\xf6\x34\xb1\xb3\xa9\xdf\x0d\xee\xd7\xc2\xbd\x0e\x7b\xb7\xc3\xc3\xb4\xbd\x19\xe9\xcb\x64\x7f\x7f\x33\x92\xb6\xaf\xfb\x88\x15\xba\xe0\x92\x4d\xcd\x35\x19\x17\x75\x09\x5f\x37\x8a\x86\xfc\xc9\x66\x1f\xcd\xd5\xe4\x89\x5c\x4e\x29\x0e\x2d\x74\x89\x5c\xa8\xc6\x34\x81\x7f\xc2\x00\x67\x84\x1e\x39\xfc\x51\x3b\xc4\x5b\x7c\xb9\xd7\x55\xf1\xb7\xb1\xe6\x70\x17\x60\x13\x4c\x59\xc0\x24\xbb\xa6\x35\x23\x57\xc9\x62\x50\x94\x23\x04\xf5\x9d\x8e\xab\x2d\xe9\x15\x66\xca\xcc\xbf\x6b\x80\xd7\x55\xaf\x15\x7d\x1e\x88\xc4\xba\x2f\xf4\x05\xcd\x72\xaf\xd0\xa5\xbf\xf8\xc2\x22\x98\xbe\xc4\x1e\xa4\xa5\xf6\xbf\xf7\x0b\x19\x22\x56\x4d\x5a\xc5\x7f\xd9\x6c\x94\x01\xd3\xf4\x2d\x7c\x61\x9c\xca\x8a\x8e\x96\xbf\xe4\x3e\x9b\x8e\x90\xf4\x9a\x2a\xa0\x81\x5e\x92\xbc\xd4\x36\x36\xe5\x84\xdc\xa8\xde\xd7\x5b\x2c\xda\x5d\xfb\x92\xa7\x81\x26\xc1\x3d\x0e\x69\x32\xbb\x97\x7c\x19\x1e\xfe\x16\x8d\x3b\x7d\x78\xc9\x12\x1a\x29\xd0\x48\x4f\x44\x76\xd6\x9c\x38\xaa\x82\xd8\x9a\x52\xef\xfc\x61\xbb\x44\x3b\x80\x15\xdc\xc7\x3e\x75\x8c\x66\xa2\x6b\x69\x6a\x13\x6e\x37\xf4\x04\xd7\xb3\x2d\x51\x94\x39\xb3\xa6\xc3\xa1\x64\x52\x67\xce\xab\x4b\xc8\x3b\xbe\xfc\x5a\xcc\xb6\xd1\x26\x99\x63\x6b\x82\x5e\x02\x0b\x8b\xb8\xfc\xd6\x66\x73\xf8\xb9\x29\x27\x73\x86\xb3\xb6\xdd\x9f\x99\xd3\x38\x63\x41\xb0\xd7\xfe\xd3\xad\x0f\x03\xb8\x29\x2d\x57\xba\x51\xd7\x60\x7f\x05\x2d\x21\x8d\xec\xfe\x1b\x85\x7e\xde\xc7\xf9\x82\x3f\x37\x47\xf5\xa6\x20\xd0\x75\x09\x8f\xfc\x5a\xe2\x83\xd1\x14\x1d\x44\xd7\x0e\xff\x26\x75\x6a\x3b\x1a\x10\x4a\x36\x71\xd8\xa0\x47\x4d\x7a\x96\x85\x5e\xfc\xba\xe2\x94\xbe\xb4\xfb\x3a\xe9\xfc\xd9\x51\x8f\x6b\xa2\x38\x99\x52\x6f\xb9\xc0\x34\x26\xae\x4f\xcd\xe3\xa2\x8b\x3a\xb8\x5a\x70\x89\x2e\x9a\x0d\x91\xef\x1b\x9c\x4c\x3b\xb4\xd2\xb4\x16\x49\x5d\x63\x07\x11\x5a\x87\x34\x5f\x26\x9c\x21\xad\x6b\x6b\x17\xeb\x2a\xff\xf0\xef\x78\x7b\xc7\xf2\x34\xf6\x37\x08\x5e\xf8\xd4\x07\x54\xf1\x52\x22\x23\xf3\x37\x37\x79\x9e\xfb\x8a\x3b\x52\x00\x61\x1b\x64\x52\x38\xd1\x1d\x37\xc1\xb4\xa8\x17\x22\x64\xef\x9f\x10\x33\x22\x5c\x3b\x40\x69\x64\xca\xe5\xc9\x2d\x1c\x1d\x7f\xfb\x55\x73\xc1\x15\x36\x99\xfb\x66\x1b\xdb\xc6\x53\x45\xeb\xcf\xe5\xec\xc3\x2d\x24\x17\xa3\xd3\x60\xdb\xac\x10\x63\xc5\xaf\x2c\xaa\x1b\xb9\x78\x52\x74\x1e\xc3\x33\xd2\x68\x2d\x7e\x92\xbc\x30\xa8\x3a\xc3\xb9\xb6\x8b\xe0\xef\xdd\xb2\xc1\x01\xb6\x63\xda\x52\xc4\x26\x53\x58\xfd\xc6\x0f\x98\x60\x84\x5d\xc7\xe2\x09\xc5\x9e\xa6\x88\xf8\xff\x08\x76\x0e\xdc\x75\x44\x05\x6e\xdd\xe1\x75\x48\xbc\x57\x93\x3f\xc8\x9d\xf9\x1e\xbd\x5c\x8b\x8f\x0f\x69\x52\xf3\x82\x69\x16\x5a\x58\x38\x7f\x43\xfa\x83\xfe\xdc\x50\xe2\x05\xfb\x13\x5d\x97\x7b\xd8\x13\x16\xd6\x84\xcf\xf4\x81\x3f\xe7\x9f\x57\xff\x39\x14\x7e\xac\xcd\x21\xe0\x33\x83\xef\x2e\x8d\x42\x70\x53\x84\x94\xc8\x9b\x7f\x43\x81\x98\xb7\xc8\x60\x45\x09\x93\x83\x16\x0e\x37\x64\xf1\x92\xf9\x1b\x90\x04\x13\xc3\x31\xa4\x01\x07\xcd\xb4\x03\xf5\xa4\x59\x80\x1d\xd6\xc2\xe4\xeb\x12\x78\x33\x32\xa6\x61\x71\xf7\x14\x31\x64\x58\x1e\xe7\x30\xe9\xfe\x67\x2c\x98\xdd\xd5\x86\x1b\x6d\x63\xc6\xd1\xeb\x28\x36\x16\xc0\x6c\x32\xd5\x7e\x4a\x32\xed\x5b\x93\xf7\x8e\x8b\x76\x7c\xb3\x7f\x65\xd3\xfd\xfa\x2d\x0e\x18\x43\x55\x82\x8b\x0a\x3e\xc6\x13\x97\x59\x7b\xb8\x10\x25\x0d\xc8\xed\xec\xf8\xdf\x76\xc9\x37\x53\xc7\xb3\xf6\x69\x40\x86\xc5\x7c\xf8\x23\x9d\x78\x13\x0a\x6c\xaa\x9d\xc4\x97\x3b\x9a\x8a\x6f\x64\x36\x75\x8d\xfe\xe6\x8b\x44\xd5\xfd\xb9\xe8\xca\xec\x66\x9b\xb7\xa6\x96\x9e\xd7\x73\xbf\x24\x2d\x29\xf5\x77\x5f\x8e\xce\x10\x5a\x4a\x5c\x30\xbf\x26\x48\x75\x60\xfd\x0f\x1f\xdf\xf6\x0f\xca\x6a\xa1\x61\x5d\xa3\x8b\xf3\xc9\xdf\xe9\x6c\x1e\x1c\x4f\xb7\x29\x81\x05\xda\xd3\x4d\xd2\xbe\x52\xb7\xbb\x1f\x40\x4f\x23\x31\x7c\x6a\x77\x73\xf4\x78\x09\x0b\x3d\xaf\x50\x3f\x3c\x6e\xa8\xe0\x79\xb7\xb6\x02\x5c\xec\x25\x31\xe9\xe3\x51\x9b\x42\x49\x1e\xc7\x39\x14\xbf\xa1\x5e\xf6\x6d\xc7\x9f\x8f\xb6\x34\x2e\xde\x66\x00\x5c\x01\x73\x6b\x99\xa2\xf2\x80\x00\xd6\xdd\x0f\xbb\x5f\x1e\xf2\x21\x30\x9d\x26\x1a\x3d\xfe\xf4\x9e\xc6\xfd\x6f\x39\xb1\xfb\x5a\x9e\xd9\xf6\x79\xc4\xa3\xa6\x77\x25\x8c\x87\x2e\x06\x14\x79\xbb\xe0\xc4\xf8\x3f\x9f\x4a\x06\x6e\x36\x03\xcd\x40\xfa\x61\x7a\x8f\x69\x9e\x1a\xb7\x10\xa7\x33\x74\x36\xdc\xfb\x03\x77\x94\x0e\x14\x63\x58\xdc\x4a\x6c\xf6\xa5\xd8\x19\x1e\xe2\xd0\x8e\xb8\x25\x73\x26\x5a\x5b\x3c\x04\x24\x76\x6c\xa1\x3c\xa6\x46\x39\xa2\x85\xf1\x27\xf2\x41\xad\x14\x94\x52\x89\xd4\x55\x08\xe2\xcc\x13\xe0\x85\x9b\xd1\xf5\xac\xc9\xa4\xed\x52\x6e\xcd\xb4\xf2\x82\x57\xaf\xff\xf6\x84\x63\x70\xf6\xb4\x52\xc3\x53\x56\xd7\xb8\x42\x73\xcc\x97\xe1\x8c\xa5\xf4\x88\x7d\x5f\xe8\x78\xdb\x35\xfa\x79\x91\x17\xb1\x78\x02\xc6\x57\xc3\xdc\x05\x7e\xe2\x68\xa6\x7f\xa1\xa2\xab\xf1\x85\xb7\x32\x3f\x8a\x69\x8b\x29\xf0\xba\x4d\xdf\x0d\x0a\xa0\xcd\xeb\x68\xcc\xea\xc2\x82\x9a\xdf\x54\x82\xc8\x4e\x32\x68\xfe\x3d\xa3\x78\x17\x0c\x65\xd8\x3a\x7e\xd6\x33\x8f\x35\xb3\x77\xb7\x34\x8e\xf4\x0d\xda\x1d\x2e\x1c\x83\x87\x03\x05\x2f\x16\x9d\x3e\x04\xb8\xaf\xa6\x16\xe4\x32\xe7\x2c\x9b\x35\x96\x98\x0c\x2a\xe8\x44\xbb\x67\xce\x9c\x1c\x7e\xb3\x29\x97\xc3\x8f\x3b\x82\xc5\x4c\x9a\x7b\xb1\xf3\x77\x1e\x17\x22\xb3\xd2\x33\x69\xe3\x86\x73\xc1\xa2\xf8\x72\x9b\x4f\x0f\x47\x2a\xbc\x92\x5c\xa6\x54\x68\x85\x87\x54\x4b\x9f\x9b\x58\x3c\x33\xd4\x8f\x6f\x62\x85\xf8\x53\x2f\x84\x9f\xcf\x83\x67\xf6\x69\xe3\x01\x77\x11\xa4\x2d\xb2\x18\x00\xd6\x2c\x0c\xca\x18\xc1\xf7\x82\x3a\x08\x54\x14\xc7\x0f\xff\x86\x2c\x8c\x35\x82\xaf\xb6\xa4\x62\x9b\xc1\xf5\x03\x03\x2b\x64\x91\x98\x1a\xc9\x8c\xb5\x43\xd3\x30\x7d\x3d\xe7\xb9\x61\xbe\xc4\x5c\x46\x7a\x49\xb5\xb9\xf9\x34\x3c\x3b\xb4\xa0\x45\x43\x0c\x88\x2c\xcc\x34\xc7\x9d\x77\xc7\xa8\xdd\xaf\x01\xee\x18\x3e\xd2\x83\xf6\x4d\x07\xcd\x1e\x0f\x34\x4a\xeb\xa3\xe9\x21\x1d\x66\x09\xe8\x73\xcc\x66\xe1\x53\x80\xaf\x92\xa7\x09\x91\xa2\x59\x11\xe8\x29\xf3\x32\xa9\xb0\xcc\x15\xd7\xa6\xdb\x41\xd5\xd6\x32\xff\x34\x32\xcd\x67\x52\x95\x19\xf5\x66\x65\x69\xda\x78\x4d\x03\xd5\xa6\x3c\x2d\x58\xe3\x0a\x9e\x43\x9b\x98\x4e\x59\xfb\xd0\x75\xd0\x29\x4b\xe6\x83\x7f\x0c\xa8\x2e\x66\xfa\xd7\x00\xcd\x7a\xc9\xa6\x11\x18\x66\xcc\x90\xf1\xe2\x48\xda\x8d\x68\xbf\x7a\x13\x29\x25\x12\x1f\xf4\x76\x4c\x85\xd7\xd2\x23\x13\x23\x0d\x77\x5d\xab\x57\x7a\xce\x6f\xe0\x1f\xbe\x2a\xf8\xe5\xe6\xfe\x31\x57\x62\x9a\x82\x02\x1b\xae\xb6\xd1\x0c\x23\x3a\xe5\x07\x4c\x36\xb1\x1a\x52\x1b\x38\x94\x8c\xa4\x3c\x37\x7a\x0a\x0d\x3d\x7b\x33\xe3\x35\xbc\x68\xd3\x16\x8c\x8e\xb7\x65\xa7\x09\x71\xcf\x7c\x56\xd0\x14\x97\x2b\xfd\x7b\x53\x10\xf2\xc5\x15\x8c\x30\x5f\x66\x22\x04\x63\x63\xe1\xee\x83\x4b\x75\xa5\x26\x31\xaf\x54\x69\x7c\xcf\xd7\x71\x25\x12\xdf\x7f\x09\x33\x73\x03\x07\xc6\x42\x08\xa3\x97\xc0\xd7\x97\x6c\xf0\x69\x09\x4c\x33\xf9\xd9\x57\x72\xc9\x34\x36\xb6\xd4\x4e\x82\x3f\xf3\xfd\xf3\xcb\x32\x8a\x7b\x33\x16\x5d\xd3\x3d\x31\x25\xc2\x54\xab\x29\x3d\x08\x5e\x1f\xfa\xa0\x3c\xaf\xc5\xbb\x90\x60\x73\xfd\x5b\x26\xa9\xcd\x5e\x5d\xa9\xe8\x56\x4f\x6c\x21\xdb\xed\x1e\xff\x35\xc8\x1e\x61\x5b\x8e\x9f\xfe\x5d\x54\x0d\x34\x13\x8b\x15\xab\xbd\x44\x7e\xd4\x1c\x1f\xd8\xc1\xb0\x6d\x38\x2e\x9b\xb9\xc4\xab\x8e\x12\x23\x9c\xb0\x2f\x34\x5b\xe6\xb5\x64\x85\x40\xcc\x3e\x28\x9d\xe7\x0f\xcf\x61\xa6\x44\x98\xb0\xe2\xb3\xe1\xfa\x70\x23\x71\x98\xe1\x05\x79\xb2\x46\x99\xca\x92\xf0\x77\x85\x0d\x53\x6b\xeb\xd5\x47\xe6\xea\x16\xf7\x37\x83\xb6\xb4\x63\xe9\xc7\x83\x04\x95\x56\x46\x8f\xf9\x98\xca\x33\xdb\x8a\xa8\xfa\xbc\x73\x35\x3c\x4e\x0a\x71\xf0\x0f\x9b\x8a\x7a\x38\x7c\xfe\x7e\x7f\xba\x7b\x0e\x57\xe1\x00\x91\x31\xe5\x20\x4b\x74\xbb\x70\x01\xcc\xae\x04\x9a\xfc\xdc\x96\xec\xbd\x9d\x11\xaf\x3c\xe1\x23\x1b\x95\x68\xe6\xc7\x12\x46\x64\x81\x2c\x0e\x88\xc8\xc5\x5d\x42\xf1\xe2\x4d\x0f\x63\x31\x93\xfc\x88\xc6\xd3\x75\xfe\x4c\x9a\x4e\x25\x1e\x9b\xe3\xbc\x22\x73\xc5\xd2\x17\x75\x3b\x6e\xb1\x77\xfb\x44\xff\xc8\x0c\x56\xc4\x92\xf9\xea\x60\xa7\xf2\x6e\x6a\x1f\xe5\x0d\x73\xbb\x26\x9a\xbf\x61\xb0\xb0\x0a\x96\xa9\xfc\x15\x42\x29\xed\x33\x6f\xc1\x14\x45\x2a\x1e\xc1\xd4\xd1\x45\x6d\x42\x5c\x59\x6f\x30\x85\x23\x67\xce\x1c\x7b\xb9\x5d\x75\xbe\x0d\x14\xa9\xdb\x5e\xea\x6e\xfb\xb4\xb2\x92\x32\x04\x3b\x39\xc8\x70\x97\xd2\xd8\xd3\x4b\x54\xdc\xcb\x19\x5f\x15\x8a\x71\x70\x7f\x65\xcb\xfc\xbd\x99\xbb\xb1\xd3\x60\x7a\x30\x50\x17\x31\xe6\x33\xd9\xcc\x4c\x88\x87\x95\x46\x59\x15\x32\xd4\x85\xf0\x55\x31\xa9\xdf\x31\xe1\xe9\xe9\xf2\x41\xe9\xa4\xc0\xea\xca\xe6\xd1\x78\xd1\x70\x66\x2f\x91\x68\xc9\x75\x38\xc5\x0d\x22\x6c\x2f\x59\xe9\x4d\x31\x3d\xb4\x9a\x12\x56\x92\x15\x5f\xbe\xbe\xe6\x8c\xed\x33\x85\xba\xcd\xf8\x24\x88\xf0\x5a\x54\x0c\x2b\x28\xe6\x41\x21\x67\xbf\x9a\x2a\x80\x4f\x11\xcc\xfc\x79\xa2\xd9\xae\x80\xb1\xe5\x35\x45\x6c\x93\x39\x8a\x6d\x86\x8b\xd5\x4a\xfd\xb6\x69\xa8\xa9\x95\x66\x0f\x21\x71\xa8\xce\x52\xae\x26\xca\xca\x17\x9e\x86\x62\x29\x4f\x25\x69\xda\x46\x93\xd8\xc4\xa3\x64\xd2\x3a\x49\x9b\xee\x4c\x1d\xa2\x98\x53\xc9\x2b\x43\x8a\x2e\x6e\x7c\xab\xfb\x29\x0d\x2a\x10\x95\xf2\xdc\x9c\x9e\x4f\xc6\x17\x35\xb5\x54\x0c\xc3\x46\xb7\x72\xda\x37\xbf\x83\x6e\x37\x07\x05\x7e\xa0\xb9\x3f\xd8\x2a\x53\xd5\x5d\x4f\xcb\x69\x02\x63\x92\x83\xcc\xdd\x38\xd5\xd7\xe6\xee\x39\xb6\x5b\x92\x17\xa7\x42\xad\x6f\x9c\x69\x27\x5b\x21\xbf\x6b\xcf\x00\x2a\x0b\x18\xcc\xb9\x67\x45\xc7\x23\x29\x8a\xc1\xe1\x76\x8c\xef\x8b\x2b\xf2\xde\x42\x5e\xfa\xcc\x97\xa9\xa8\xdc\xd8\x4e\x11\x1a\xcb\x9d\x43\x39\x88\x5d\x7a\x8e\xd8\x02\x3b\x2c\x08\x3a\x58\x6b\x46\xfa\xc3\xed\xa2\x12\xc5\x4b\xb2\x28\x16\xa5\x25\xb3\x99\x9e\xc6\xab\xa0\xa7\x37\xa6\xec\xcc\x97\xbb\x21\x24\x42\x81\x80\x25\x48\x66\x81\x43\x25\xa5\x0d\x8f\x79\x9c\x03\x33\x72\xb6\xea\x38\x46\x76\xfd\x1d\x3c\x1c\x64\x85\x0f\x7e\x41\xa8\x08\xcd\xa8\x7e\x99\x60\x2d\x88\x5c\xd7\x51\x29\x13\xb3\x6b\x4f\x85\x9e\xbb\xdd\x96\x1c\x10\xf9\x8c\x6e\xb5\x75\x57\x76\xbd\xd2\x57\x90\xb6\x0a\x41\xf0\xbe\xb1\xca\x6a\x9a\x97\x7e\xc1\xab\xfa\xfb\x06\x99\x69\x3d\x5f\xd8\xf0\xec\x50\x9e\x4d\xea\xa9\x74\x91\x63\xa6\x2a\xa0\x8f\xe3\x55\x02\x46\x4c\x2b\x8d\xb7\x97\x2e\x2a\x2a\x65\x56\x39\x3d\xa9\xa0\x98\xd1\xd6\x89\x65\x1d\x86\x0c\x2a\x37\x3e\x4b\xf5\x28\x2a\x0b\x66\x97\x6a\xa1\x15\x04\xd3\xd3\x14\x2c\xab\xbd\x4a\xf3\xc7\x57\x31\xd7\xdf\xb5\xc4\x1b\x4b\x6c\x52\xa6\xec\xef\xb2\x42\xc1\x3d\x5a\xa8\x52\x2f\xea\xe0\xdd\x16\xfa\x91\x73\x1e\x14\x78\x6d\x49\x81\xb9\x49\x0e\xf5\x85\xbf\xa1\x2a\xc4\xe5\xc4\xac\x97\x47\x49\xc2\x41\x26\x56\x9e\x3e\x5a\x58\x1d\x61\x8b\xb9\xec\x4f\xe8\x08\xe5\x92\x87\x6b\x18\xe9\x5a\xf0\x1d\x37\x94\x47\x90\x7b\xc4\x3a\x86\x85\xce\xf9\x2d\x6e\x05\xcb\xc0\x45\x91\x80\x52\x50\x4e\xd9\xd4\x0d\x77\xcf\x5c\x09\x7f\xb3\x31\x2f\x73\x1f\x71\xa3\x1e\x06\xd0\x69\xf9\xf0\x6f\xad\xef\xb6\xa3\xc6\x31\xdb\x63\xa4\x3b\x3d\x14\xc6\x8e\xf9\xb9\xff\xff\xdb\xd9\xa6\x3a\x76\x25\xc2\x77\xde\xcd\x76\x42\xc8\xd7\xd4\xb1\x5a\xb5\x6d\xb8\xb4\xe3\x9b\x7f\x57\x25\x54\xf7\x37\xb3\xc3\xf2\x03\xb1\xb5\x13\x09\x49\xbb\x06\x6a\xe0\x2e\x5b\x88\x99\xe8\x5f\x4d\xac\xeb\x87\xba\xf3\x0a\x85\xef\x08\xbe\x20\x4f\x0a\xd5\x78\x3e\x29\xd0\x8c\x50\xe3\x7d\x56\x60\x74\x4d\x4b\x07\xbd\xff\x2c\xeb\x7f\x33\x0d\x74\x61\x12\xd8\xf5\x1f\x7d\x3c\xf3\x7c\xd7\xfd\x95\xf0\xba\x30\xc3\x63\x0e\x4c\x94\x41\x34\xcd\xbc\x2f\xc8\x68\xfe\xfe\xbe\xf2\xba\x48\x93\x33\xa9\x6f\x7b\x12\x37\x25\x9b\xa3\xfd\xab\x0a\xfc\xc3\xdf\xa7\x44\x23\xe6\xaf\x7a\xc9\xe3\xe5\x6e\x6c\xf0\x1f\xec\xaf\x3c\x13\x9c\xb0\x07\xd8\x6b\x3f\x40\x72\x83\x7f\x95\x37\xc5\xd5\xdc\xf7\x80\x4d\xf3\xe0\xda\x5f\xf9\xfd\x57\x7a\xac\x5d\xf9\xc3\x47\xe8\xed\xd8\x16\x07\xdd\x13\x16\x84\x6a\x98\xe3\x30\x97\xb4\x50\xc9\x2d\xed\x1d\x37\xdc\x62\xe7\xc4\xd4\xb5\xf9\xc1\xb6\xf0\x8d\xca\xfd\xb8\xf1\x9b\x99\xb9\x52\xbf\x51\xf6\xb6\xed\x5e\xd1\x0f\x51\x33\xfb\x22\xb8\xc9\x14\x58\x28\x37\x9b\xea\xaf\x90\x57\xe4\x15\xc9\x44\x1c\xcb\xaa\x24\xcb\x9c\x3a\x3a\x6e\x10\xac\xf2\xae\x4c\x25\x79\xd6\x84\xbe\x80\x45\x7b\x8f\xbf\x7b\x63\x10\x36\xf2\x75\x0a\xc0\x7b\xee\x82\x83\x75\xa8\x45\xa8\x9b\xed\x06\x59\x11\xf2\xa3\x2a\x93\x65\x3e\x3c\x4b\xb1\xf6\xba\xa2\x65\xcc\x1f\x2b\x29\x0c\x48\x65\x52\x59\x7a\xa1\x43\xe5\x36\x5e\x77\xe6\x96\x94\x05\x7c\xf3\xf7\x99\x21\xb1\x38\x30\x00\x60\x12\xbd\x7e\xe3\x9f\x8f\x7a\x71\x85\x54\xb2\xce\xa5\x69\x75\x66\x24\xaf\x09\xb9\x36\x05\x1f\x48\x01\xfd\x22\xaf\xe8\x25\x46\x95\x19\x1b\x8f\x38\x58\x09\xeb\xcc\xf9\xa7\xfc\x17\xa9\xcd\xa9\x51\x21\x68\x72\x91\x85\x8e\xb4\x3b\xba\x06\x54\x4e\x6f\x7d\x5a\x15\x2d\x2a\x6d\xef\x1a\x8e\xd9\x5a\x06\x5a\x12\x75\x77\xb4\x87\xcc\x4c\xd2\x62\xf1\x0a\x21\x1d\x76\x8e\x19\xfd\x08\x7d\x43\x53\x33\xd2\x11\xeb\x7d\x31\x07\xfa\x3c\x29\x9e\x90\xe6\x36\x95\x72\x62\x5a\x03\x2e\x69\xc5\xf8\x0d\xe7\x64\x18\xcb\x81\x72\x60\x4b\xd4\xb2\x54\x88\x5f\xc6\x52\x1e\x82\x2e\x4d\x0c\x88\x96\x56\xa9\x92\x25\x8f\x23\xb4\x92\xd2\x95\x76\x84\xdb\xc0\x68\x66\xf4\xe0\xf9\x2a\x77\x9a\x7e\xfe\x38\xe2\x8c\x7a\x0e\x44\x5b\xc4\x3a\x69\x58\xcc\x6e\xe3\x12\x16\x93\x1f\xd6\x15\xb9\x26\x66\x08\xa6\x8b\x72\xaf\xb4\x20\x9e\xdb\xb6\xeb\x1f\x2a\x66\x65\x58\x3b\x2e\xa6\x21\x2d\x14\x9c\x03\x0f\xfb\x39\x7a\xfc\x0e\xd7\xdc\x7c\xf4\x52\x45\x01\xb0\xa7\xd4\x47\xfc\xa2\xd7\x5b\xf1\x62\xec\x7e\x19\x3b\xb8\xc5\x66\xe6\x2b\x6d\x1e\xb1\x54\x2f\xbf\xfa\x88\x1b\xf8\x0f\x6a\x0c\xb1\xdc\x90\xec\xa6\xb1\x38\x84\x22\xd1\xd7\x2e\xe5\x66\xc8\xd4\xe5\x51\xc4\x9d\x81\x9c\xe9\x29\xb3\xf8\x2d\xad\xa2\x39\x1c\x37\x84\x49\x87\x4f\xff\xae\x18\xa2\x08\x54\x63\x07\x82\x22\x26\xf7\xb9\xcb\x6d\x4b\x4d\x69\x7f\xa0\xda\xe8\x5a\x3c\x55\x64\x18\x54\xd7\xf0\x3f\x3e\x15\x88\xcd\x64\xf7\x4f\x4b\x8a\x5c\x7d\x55\x92\xfd\x70\x5c\x93\x85\x0a\xf1\x59\xde\x67\x8e\x54\x72\xb9\x94\xe4\x51\xfe\xf4\x3b\xb7\xbb\x9c\x54\x34\xc8\x9e\xe6\xc6\xb9\x76\x93\x48\xf7\x63\xc9\x67\x9d\x42\x57\x95\x48\x25\x23\x11\x2b\x13\xe1\xf8\x88\x34\x12\xb0\xa1\xc4\x58\xb3\xa4\xb1\x9c\xfb\x4c\xab\x7d\x32\x8d\x14\x8a\x07\x21\x2c\x9b\x17\x0e\xe5\x70\x8e\x28\xeb\x9e\xca\x96\xdd\x4a\x70\x87\xc2\x28\x73\xe2\x2c\xae\x75\x4b\x8c\x52\x2e\xbf\xff\x29\x65\x41\x7b\xcc\x54\x6e\xc9\x74\x99\x5f\xa4\x69\xc1\x97\x6f\x6c\x42\x0e\xa1\x6a\xe9\x2d\xd9\x3d\x8d\x35\x1d\x83\xd0\xee\x03\x7d\x12\x4f\x86\x61\xfd\xf7\x81\x49\x6d\xcf\x2e\xf7\x50\x4a\xf8\xf4\x0c\xe1\x8b\xb8\x7f\xcf\x36\xb0\xe6\x66\x0e\x1b\xc3\xcc\x56\xa5\x0e\xcf\x84\xc1\x83\xbf\x28\x2d\xfc\x6e\x66\xfc\x13\x39\x80\x81\xf0\x22\xa0\x3d\x4a\xc5\x44\x69\x72\x13\xdf\x4e\x01\x21\x4a\x6a\xfe\xde\xf8\xe2\x51\xa3\x9b\x6a\x0e\x3d\x65\xa5\xcf\x88\x6d\xbb\xec\xdf\xd8\x41\xaf\xb4\x22\x01\xe0\x29\x56\x7a\x4c\x44\x9b\x9c\x06\x1e\x4e\x25\x7d\x6d\x2b\xf7\xb0\x28\x8d\x01\x95\xb5\xc5\xa6\xc7\xbd\x0d\xc5\x8c\x0f\x61\xa0\x1f\xeb\xd9\x8c\xde\xed\xa1\x1d\x03\xe2\x92\x96\x74\x4f\xcc\x29\x0d\x70\xc1\xcc\x83\x64\x50\x59\x67\x79\x5c\x17\x99\xce\xdf\x2f\xa4\x0c\xe7\x9d\x8b\x30\xa4\x7f\x3b\x02\xe1\xba\x54\x03\x2d\x40\xa3\xaf\x69\x2a\xfa\x46\xdc\xc9\x05\x5f\x78\xf5\xb7\xa2\x3d\xe6\x90\x6b\x93\x5f\x85\xac\x1e\xc4\x2d\x14\x5a\x13\x56\xe4\xec\xa6\xc0\x2a\x61\x67\x4e\xc2\x5e\x80\x0e\x54\xf7\xb8\x96\x4b\x28\xb9\x7d\xcf\x40\x31\x31\x3f\x75\x97\x7c\xa9\x5e\x48\xce\x23\xc4\xd7\xcc\x2f\x81\x1d\xe1\xb1\xd1\xa5\xb4\x50\xdc\x14\x24\xd7\xeb\xf2\xb8\x85\xe1\xa2\xfc\xac\x4e\xad\xb9\x22\xe6\x4b\xe3\xd3\xaf\xc1\x36\x60\x88\x2c\x3c\x04\xa5\x50\x4d\x5b\x24\x66\x4c\x96\xe7\x2b\xe5\xd1\x4a\x2d\xc5\x62\xd1\x3b\xba\x96\xa6\x10\xb6\x86\xe1\x99\xc9\x64\x1a\x75\xd1\x3d\xdc\x1d\x4f\xdd\x6c\xca\x17\xa0\x1c\xb9\x6c\x4c\x7a\x36\xfc\xbd\xfd\xa0\xbf\x77\x85\x77\x3a\x3f\xa4\x7c\x4d\x43\x9c\xfd\x4e\x1e\xa6\x60\x6f\x9e\x63\x43\x31\xfe\x84\x58\xe8\x85\x0d\xdd\xe8\xe4\x9f\xf9\xd6\xa6\x61\xf7\xb5\x14\xe0\xcd\xeb\x83\xb7\xb1\xbc\xd0\xa5\xe6\x3d\x21\x00\xc8\xc2\xb8\xd9\x8d\xe0\x83\x2c\x22\x1b\xf1\xc4\xf8\xd0\x01\xaf\x93\x85\x5d\x78\xea\x11\x7a\x68\x42\xbb\x99\x95\x90\xa4\x78\xff\x20\xf8\x6c\x54\x50\x3b\xc7\x52\x6d\x9c\x53\x83\x1f\x7d\x62\xc8\xb6\x71\x79\xec\xc6\xa9\x03\x94\x52\xc4\x4e\x87\x86\x4e\x80\xa7\xd2\x2e\xc8\x81\xe0\x0b\x97\x89\xd8\xcd\x64\x00\x2e\x9d\xd9\xd0\x25\x0c\x8a\xeb\x88\x7b\x63\xcd\x87\xfe\x7e\x28\xa0\x0f\xfb\x71\x4f\x58\xca\x3b\x9f\xdc\xc2\x53\x49\xbe\x52\xf0\x31\x0f\xbc\x4f\x35\x23\x10\xf3\xb3\x58\x08\xa3\xa4\xf1\x7c\x9e\x03\xe0\x24\x2e\xbe\xbb\xdf\xc0\x63\xe7\x1b\xe4\x91\xe0\x0c\xdf\x2b\x5c\x74\xb6\x63\x89\xc2\x96\x79\x00\x8b\x47\x50\xd0\x63\x2c\xcc\xc2\x7d\x47\xa9\x57\x65\x0d\x0b\x16\x97\x70\xa2\x3b\x43\x13\xb3\x16\x41\x5b\x79\xaf\xd2\x2f\xe7\xa4\xaa\x84\x97\x11\x33\x5e\xb1\x70\x09\x4f\xd2\x1f\x71\x53\x68\xfd\xcf\x3c\x59\x3a\xd6\x76\xe4\xe0\xbf\xf2\x2f\x6e\xf8\xf2\x4b\xcf\xfe\x6d\x0f\x9e\xbd\x3e\x8a\x7f\xe0\x55\x19\xfa\xfc\xe1\x79\x5f\xfd\x2f\xff\xf1\x53\x1b\x7e\x52\xd3\xf3\x9e\x5b\xfe\xf9\xa4\x03\x91\x63\xd2\x42\xc5\xf9\xa5\x63\x23\xec\x8c\x69\x0c\x56\xf5\x2d\x34\xda\x25\x56\xd8\xf6\x7e\xef\x1c\xe2\x82\xd3\xcf\xe5\x4d\xfc\x8c\x91\xfb\x34\x16\xef\x76\x62\xf2\x9c\x05\x68\x3f\x22\x87\xfa\xd8\x00\xe0\x43\x89\x7d\x96\xe5\xba\xcb\xd4\xbb\xfb\xd9\x64\x56\x57\x1f\x82\x70\xa6\xa0\xcf\xd0\xbb\xc6\xd1\xe4\x10\xa2\x16\xf5\x31\x5d\x6a\x98\x9f\xec\x92\xeb\xce\x72\xb3\x5c\x1c\xf3\x87\x74\x68\x61\x11\xb3\xb0\x82\x9b\x02\xfc\x0b\x57\xc5\x7e\x24\xde\x00\x71\x15\x42\x9e\x8f\xf2\x78\x3a\xfe\xda\x71\xb2\x70\x5f\xbb\xa8\x32\x0f\xfe\x6e\x2e\xf7\x9b\xe8\x47\xff\xe3\xd3\x66\x56\xaf\xf8\xeb\x89\xab\x33\x69\xe5\x4b\x91\xc7\xcd\x41\x56\x2e\x90\x21\xbd\xad\x02\x51\xb5\x81\x1b\xcc\xd7\x3d\xf9\xba\x27\x45\xea\xc1\xa5\x7a\xf2\x77\x77\x7e\xd4\x93\x57\xf0\xf6\xff\xb2\x6f\x3a\x5b\x7c\xde\xb6\x2c\x51\x1a\xf9\xc3\x53\x79\x7c\x50\xa4\x14\x61\x3a\x3f\x60\xe6\x55\xcc\xfc\xf4\x41\x7b\x19\xcb\xce\x8e\xfe\x16\x8b\xe4\x55\xcb\xdb\xf0\xc7\x22\xd1\xd2\x2b\xf9\x7f\xff\x52\x91\x3b\x6a\xc3\x1d\xfe\xcc\xae\x08\xab\x14\xf1\x99\x39\xe5\x0f\x6d\x85\x67\xfb\x68\xf6\xd7\x66\x60\xec\x4e\x0b\xa6\xda\x7c\x45\x57\x6f\xcc\xd8\xc8\x27\xdf\xff\xc9\x5b\xb9\x4b\x4a\x9e\x7d\x7c\xfd\xe4\x0f\xfe\x51\x28\x27\x07\x27\x69\x64\x32\xf4\x49\x85\xfd\xe4\x25\x3e\xa5\x1b\x98\xd0\xc6\x2a\x65\x8a\x3f\x7e\xbd\xe9\x26\xf9\xfe\x97\xfd\x25\x9d\xf0\x2b\xdf\x3f\xbf\x11\xff\x22\x15\xc1\x33\x98\xf1\x83\xdb\xf5\x4f\x97\xe7\xf2\x99\xb0\x00\x5a\x07\xfc\xd5\xa4\xdf\xea\x97\xc7\xe3\x0b\x5e\xf9\xd4\xda\x3c\xf5\xab\xb5\x2f\xfb\xab\xff\x77\x40\x03\x71\x65\xe7\x82\xfe\xf8\xfa\x7a\xb5\x07\x20\x25\x9b\xa1\x84\x91\x62\x85\x2e\x54\xa4\x77\x33\x8d\x6b\xbf\xf4\xcf\x3f\xbe\xbd\x01\x87\x08\x6c\x29\x40\x19\xe6\x13\x4b\xd5\x3a\xc2\x11\xfe\x9f\xbd\x9c\x39\xd4\xe3\xc7\xd7\xcf\x81\x69\x0b\x33\xaa\xc8\x0a\x98\x39\x26\x02\x0d\xe9\x06\xd5\x50\xcd\x39\x24\x74\x83\x70\x1d\xa1\x22\x16\xc4\xa5\xaa\xdc\x9b\xaa\x43\x45\x65\x5f\x4e\x0a\xd2\x6c\xbd\xfe\x83\x39\x44\x68\xb1\xb6\xf6\xcf\x1f\xec\x35\x31\x63\x8e\x6c\x36\xad\x8f\x9e\x14\x77\x3d\x92\xc2\xe4\x89\xbb\x2a\x00\x3a\xd2\x99\x6a\x60\x70\xb1\x11\x5c\x35\x8e\x01\xb2\x75\xf0\xbb\x45\xc2\xd9\x9b\x30\x70\x8b\xdd\x92\x33\x60\x6d\xdd\x90\xf9\xf6\x87\x1f\xff\xe6\x85\xfe\x56\x52\xf7\x2c\xd5\x98\x09\x52\x75\xf6\xa1\x8e\x0b\x7b\x91\xb4\x46\x1b\x6e\x34\x3e\xf6\x57\x6c\x82\xb0\xd7\xbf\xe1\xc7\xd7\x47\xc5\xa2\x68\x85\x2c\x19\x5d\xfd\x86\xfe\x42\x6c\x23\x8b\x21\x8c\x5b\xed\x35\xd8\x95\x6f\xff\xc6\x03\x64\xe7\xdd\x6f\x71\xcf\xf7\x6c\xe1\x06\x9b\x01\xdc\x11\x87\xeb\x1d\x66\x41\xc6\xe6\x02\x6f\xbe\x06\xb9\x38\xd1\xdc\x64\x58\x15\x3b\x64\x7d\xa4\xd7\xe7\xf0\x77\x24\xba\x95\xb6\xed\x53\x4b\x3d\x86\x16\x8a\x1d\x97\xf0\xf3\x06\x99\x6f\x9f\xf0\xb6\x0f\x1f\xdf\xc7\x4f\x5f\x8f\x53\x59\xc6\x7a\xef\x4a\xab\xcd\x49\x69\xfb\x31\x0f\x54\xb5\xca\x6e\xdd\x5e\xc5\x6d\xaf\x79\x6d\xbc\xf4\xc4\x92\xeb\xc1\xfe\xe4\xdd\xff\x88\x60\x7a\x56\x79\x62\x8d\xb3\x6e\xab\x31\x6f\xfd\x92\x99\x17\x35\xb9\x50\x45\x70\xf4\x70\x8e\x81\xee\xc2\x92\xfc\x98\x5b\x06\x29\x76\xbd\x1f\x92\x50\x9e\xdf\x9b\xa2\x0d\x40\x0d\x16\xdc\xad\x29\xbd\x2d\x01\x42\xe2\x18\x4f\xac\x84\x42\xea\x39\xdf\xb8\x5a\xdb\xad\x84\xd0\x5e\xfe\x8d\x0d\x74\xb9\x5d\xcc\x4e\x9c\x22\x32\x91\x15\x4a\xc2\x02\x2a\x2a\x83\xce\xe8\x3d\x2b\x81\xdb\x67\xe6\xac\x57\x7b\xa5\xc3\x93\x2a\xe2\x9f\xa0\x34\xc6\x5a\x48\x5b\x25\x75\xd7\x50\xf3\x3d\xaf\xf9\x41\x48\x91\x4a\x80\xef\x5f\xfe\x5d\x11\xff\xee\x97\xdf\xa1\x3d\x0a\xfb\x9e\x08\x96\x54\xc5\x76\xda\x5b\xba\xa1\x0a\xa7\xd6\xc1\xd6\x8f\xad\x13\x76\xf3\x88\x22\x4e\x9e\xbf\x66\xee\xa4\x76\xa8\xbd\x5f\x79\xb8\x78\xe2\x33\x41\x56\x1c\x34\x93\xe9\xe1\x0c\x6a\x68\xf9\xfd\xf6\x05\x35\xb1\x7f\xd8\x4e\xe3\x32\x26\x39\xe4\x4d\xe3\x95\x2c\x5c\x3b\x73\x43\xde\x7f\x85\x3b\x97\xba\x86\x48\xe6\x61\x1f\x6b\x24\xf5\x2e\x16\xf9\x8c\xdc\x2d\x8f\x18\x06\xa2\xe0\x07\x33\x37\x70\x6b\xed\x68\xb8\x7c\x16\xfc\xef\x25\x28\x87\x31\xee\xaa\x1d\x6e\x7d\x44\x32\xe0\xea\xf9\xe3\x02\xa0\x45\x49\xa3\x0d\x45\x06\xb3\xd0\xbf\x4c\x29\x78\x32\xf8\x05\xfc\x67\xa5\x41\xd0\x1d\xe6\x80\xcd\x25\xe6\x41\x48\x26\xba\xfe\x8e\x3f\xe7\x8f\x47\x7c\xdf\x9f\xb8\x4f\xf3\x74\x18\x76\x03\x93\xe3\xaf\x28\x90\x70\xc4\xb4\x5e\x16\x56\x52\xd1\x6e\x72\x43\xf7\xdc\x1f\x98\xfd\xee\x18\x76\x74\xa5\x05\xd3\x45\xc2\x06\x32\xe5\xde\xe1\x0b\xeb\x76\x62\x4e\xc1\x13\x7c\xc8\x59\xf8\xe9\xc5\x5a\x99\xdf\xbd\xed\x4a\x0b\x3a\x48\x12\xf6\xc7\xbd\x02\xa4\xa5\x1d\x17\xb5\xaa\x0e\xc1\xfc\x82\xd9\xb1\x85\x30\xa4\xfb\x46\x05\x6b\xa2\x4d\xd4\x64\x9f\x37\x96\x6e\xdf\x3f\xfc\xbb\x7a\xb5\x81\x54\x7f\x58\x40\xc8\x95\x62\x5e\xaf\xf5\x0e\x44\x5e\xac\xdb\xab\x91\xee\xe3\xd3\xbf\x2b\xba\x53\xaa\x00\xe7\xac\x8c\xc7\x9a\x9b\xe2\xbe\x17\x00\x04\x6b\x35\x5e\x9b\xc2\xb1\x7c\x7f\x35\x46\xb9\x21\xe0\x3e\x9e\xd2\xb0\xf5\x7e\x61\xb9\x7f\x44\x34\xaa\x84\x93\x60\xc7\xc9\x73\x50\xc4\xec\xb3\x44\xca\xfa\xc7\x2a\xf8\xf8\xa2\x32\xcc\x9c\xb7\x80\xb8\xb3\xc7\x17\x64\x6c\xea\x98\x1c\xbf\xc5\xc8\xb2\x07\xf0\x27\x08\x2a\xbd\x80\x0b\x05\x53\x74\xca\x50\x20\x12\xf3\x4e\x30\xbb\x39\x66\xac\x02\x79\x96\x06\x9e\x08\x52\x48\x28\xb4\x79\xd2\x90\xb5\x95\xe9\x31\xf2\xb7\xed\x2b\x2f\xe0\x69\x6f\x81\x62\x72\xb8\x48\x2c\x32\xbe\x28\x37\xb4\x60\x66\xfd\x59\x34\x6f\x8a\x1d\x54\x3e\x95\xd8\xba\xb5\x51\x57\x44\x5b\x34\x66\xe3\x1d\x3f\x8b\x44\xb9\x94\x85\x16\xc2\x0a\xd7\xd2\x51\x73\xce\xe7\xac\x6b\x55\xda\x70\xf4\x90\x14\xb1\x96\xfd\xbf\x1a\x1d\x4f\xf8\xa2\xae\xb2\x05\xc8\x04\x9c\x9b\xeb\x8a\x15\xff\xcb\xdf\xbc\xba\x30\xf0\x55\xa9\x6e\x87\x9c\x94\x89\x83\xda\x2c\xc6\x87\x3e\xc9\x65\x52\xe5\x18\x53\x15\xc0\x2c\x98\xa9\x55\x23\x80\xa7\x3a\x14\x38\xd8\x19\x40\x1c\xeb\xf0\x3d\x22\xb0\x54\xb3\xf9\xd3\x20\xf4\x58\x5a\x47\x94\xc0\x27\x76\x3c\xec\x5d\x7b\xfa\xf7\x11\xe8\x19\x95\x73\x74\xfc\xf6\xef\x0a\x21\x2c\xa0\x8b\xc4\x7b\x75\x9e\x09\x1a\x1e\x52\x44\xc4\x1c\xc5\x93\x94\xa5\x57\xc9\x99\x1d\xf9\xc3\x37\x8e\x79\x4a\xbc\x2f\xd3\xfb\x14\x9f\x75\x08\x14\x09\xbb\x74\x82\x48\xb7\x95\x4d\x5f\x8e\x2a\x45\xad\x1e\x05\x7d\xec\xd2\x1b\xf2\x5f\x15\x01\xd3\x32\x38\x40\x86\x4c\xb8\x13\x93\xd1\x11\xc9\x8c\x2e\x5f\x5c\x25\x0c\xf8\x82\x5c\x84\xf5\x89\x58\x33\x5b\xec\x4d\x94\xd9\x0a\x5f\x80\xdd\x78\x78\xc6\x44\x67\xd3\x05\x09\xf9\xff\xd7\xd9\x6b\xde\x0f\x4b\x12\xad\x29\xd9\xb2\xf5\x5e\x76\x2a\x49\xab\x0a\x30\x0a\xf3\x4d\x2b\x04\x3e\x43\xa4\x85\x08\x25\x2e\xf3\xa6\x27\x15\xfb\x81\x05\xae\x98\x15\x4c\x84\x70\x2d\xa5\x5e\x88\xdc\x1f\xed\xeb\x17\xf7\x38\xd4\x03\xd1\x9b\x0b\x50\xf5\xaf\xe6\x22\x77\x47\x59\xf7\xeb\x4d\x1c\xb9\x12\xb1\xa3\x51\x31\xbb\x4e\x2b\xbd\x86\x25\xf4\xbe\x84\xa6\x40\x94\x9f\x18\xf2\x3e\xb0\x8b\x97\xc5\x0f\xb8\x96\x4c\x53\xbe\x09\xda\x65\x8a\xb4\x66\x9b\x12\x7b\x78\xfd\x0d\xce\xb1\x45\xf4\x66\x47\x34\x13\x2b\x96\x99\x7a\xf4\x22\x14\xf6\x48\xcb\x07\x24\xbc\x2a\xac\x91\xb9\x8c\x39\x5d\x82\x1d\xc8\xea\xd5\x0e\x4b\xdd\xe0\x0d\x78\xb8\x12\xc2\xd5\xa9\x70\x72\xe9\x1e\x46\xd7\x22\x0e\x83\x59\xec\x9a\xb0\x71\x57\xff\x8b\xa0\x52\xbb\xdc\x66\x34\xbb\xf8\xb5\x96\x86\x16\x4f\x08\x94\xf4\xf2\xdc\x0b\xe2\x6e\xc1\x09\x72\x9a\xee\x91\xab\x51\x75\x11\x48\x74\x2e\x50\x17\xc7\xe2\x09\x37\xb9\x04\xe6\x28\x7a\x22\x9a\xfd\x7a\x59\x3f\xaa\x00\x63\x1c\x99\x2f\xb6\xf0\x68\x64\x8f\x66\x8c\xa7\x86\x40\xaf\xe7\xf3\xaf\xb3\xa6\x57\x00\xec\xe9\x1e\xc8\xa7\x6a\x87\x0e\x0a\xca\x54\x32\xa6\xb7\x6b\xc1\x66\x9b\x0b\xd0\x15\x5d\x5e\xd4\xe3\xe2\xea\x44\xb0\x2e\x73\xf9\x4b\x0a\xcd\x93\x8b\xaa\x64\xd8\x5a\x9c\x94\xbc\xf3\xd4\x24\xde\x6f\x0c\x4f\x02\x47\xed\xa0\x13\xeb\x8d\xe2\x16\x32\xba\x2d\x74\x36\xa3\xa4\x7f\x2f\xfc\x19\x77\x8e\x67\x82\x28\x3a\xc0\xd0\xa1\x02\xdd\xc6\x60\x07\xa5\xbe\xdd\x20\xfa\x7d\xe4\x7e\xf2\xff\x2a\xf8\xa9\x3f\x9f\xc7\xea\xd5\x99\xcd\x2a\x5d\xf7\x82\x18\x48\x0b\x9b\xc7\x52\xfc\xe3\x87\xe9\xb6\x5d\x49\x5f\x13\x01\x75\x63\xc6\x50\xfa\x1a\xec\x46\xe9\xa9\xd9\xbd\xb4\x2c\x0f\xb3\x3f\x1d\xa6\x0c\xbd\xc7\x10\x7b\x40\x14\x51\xbb\xba\xc4\xc9\x7d\x41\x3c\x7a\xd4\x99\x87\x12\x9e\x35\xcd\x88\x29\x33\x3a\xa9\xe1\x89\x75\xb0\x07\x03\x74\x0f\x44\x05\x6e\xb2\xbb\x89\x75\x45\xec\x25\xd3\xe7\xa5\x5b\xdd\x71\xfa\x29\x76\x4c\xba\x74\x93\x72\xad\x16\xb5\x11\x16\xd4\x94\xfe\xba\xd1\x4e\x35\x11\x63\xee\x87\x69\xb5\xee\xd4\xc5\x1b\xd1\x91\x5b\x41\x0c\x12\x1e\xcc\xae\x59\x3f\xaa\x15\xbd\x20\x87\x49\xf0\x38\xd0\x54\xe4\xfb\x89\xda\x27\xaf\x89\x35\x99\xbc\x74\x61\x1d\x25\x05\x44\x14\xb1\x81\x1a\x09\xcd\x3d\xae\xea\x70\x58\xdc\x26\x63\x47\x4b\xc1\xd5\x53\xd4\x61\xc9\xcc\x79\x9b\xc9\x0a\x34\xf4\xe6\xc3\x40\x4f\xd8\x06\xb1\xc7\x41\x9e\x17\x65\x9f\x70\xbc\x73\x69\x58\x3f\x87\x6b\x69\x8c\x84\xa0\xe6\x56\x2a\x58\xdd\xe6\x4a\x9f\x16\x37\xe2\x34\xa8\xa7\xc5\x34\x5e\x8b\xa5\x99\xa2\x9f\x2c\xc2\xf7\x4d\xd6\xa0\x3f\x2e\x05\x76\x70\x8b\xf5\x8b\xb4\xe0\xba\x0f\x04\x22\x1e\x7e\xfd\xbb\xa2\xb1\x53\x1d\xd6\xac\x27\x0d\xe8\x29\x2c\x82\x00\x2c\x7b\x57\x0b\x62\x1d\x65\x38\x01\x45\x20\x45\x80\xb7\xc9\xe1\x08\xdb\x92\xf7\x6c\x16\x37\xcf\x29\xca\x20\x65\x0b\x0e\x59\xbc\x5c\xa4\x52\x3c\x25\xbb\xd2\x76\xba\x72\x3c\x13\x18\x98\x77\x57\x05\x74\xb7\x1c\xce\xc2\xa0\xbd\xb7\xcd\x2e\xd5\x32\xd3\x66\xb0\xbc\xf8\xaa\x4a\x2e\xae\xfd\x80\xc4\xb0\xe1\x2c\xf7\xdc\xda\x38\x5c\x89\xf8\xc3\x27\x40\x49\x78\xb0\x34\x31\x1b\xe2\x29\x7b\xbc\x93\x83\xf5\xd8\x28\xe1\x65\x79\x61\x86\xbd\xe3\x53\x47\xa4\xf1\x9e\x6d\xe1\x2b\x52\xe9\x5a\x74\x38\x28\x0b\xd5\xad\x63\x7d\x2a\x04\xec\x0c\x02\x47\x17\x66\x66\xc0\x00\x5a\x87\xfb\x91\x85\x72\x18\x77\xfb\x2b\x4a\x95\x90\x73\x2c\xf4\xf9\xee\x33\xf7\x20\x67\xf5\x92\xf4\x41\x4c\xce\x9b\xc3\x2f\xfb\xcd\x4c\x95\x93\x57\x28\xb3\x00\x47\x92\xa8\x74\xf9\x44\x27\x47\xb8\x52\x31\xed\xee\xf3\x32\x63\xb8\x32\x98\x76\x0c\xe1\x40\xe1\xf1\x18\x46\xc9\x13\x74\x90\x42\x53\x0f\x8f\xd1\x77\x81\x47\x49\x1d\x4a\x2e\x31\x40\x7c\xf6\x81\xfd\x97\x08\xdf\x36\xa0\x20\xa0\x41\xe9\x94\x45\x7c\xf1\xa2\x7b\x79\xdd\xb6\x9f\xb2\x14\x5e\xda\x52\x60\x63\xdb\x61\xf2\xca\x9c\x7f\xbc\x2e\x44\x30\x6c\x05\xb4\x6a\xfe\x83\xad\x02\x2e\xa1\x77\x48\x34\xd5\xe5\xc8\xb3\xec\x62\x21\xc8\x86\xbf\x2f\x1b\xc1\x1c\xd0\x69\x07\x8e\x10\xbd\x66\xaf\x77\x59\x77\x6e\x3e\x99\x19\x9e\x61\xa6\x55\x35\x93\x31\x64\xf2\x30\x30\x28\x19\x93\x5a\x83\xa0\x1e\xa0\x7b\x5c\x19\xf2\x54\x09\x1d\x0f\xc0\x15\x42\xae\xa5\xf4\xc4\x83\xed\x82\xd5\xe6\x50\xe0\xbd\xe8\xcf\x64\x5a\xc3\xac\x20\xdb\x96\x4c\xe0\x3c\xfb\x53\xd6\x02\xf8\x3c\xb4\x65\xaf\x65\x21\x99\xb2\x5f\xc6\x1d\x26\x29\x9b\xee\x1e\x0a\x78\x55\x96\x61\xb9\x34\xac\x3d\x02\x16\x51\x01\x6d\x3e\x35\x40\x42\x3c\x04\x03\x48\xd3\x29\xb2\xe3\x15\xe2\x2e\xf2\x09\x27\x5d\x60\x45\xfb\x84\xba\x82\x43\xa5\x10\x5a\x0d\x81\x87\x73\x96\x4e\x9a\x97\x0c\xac\x83\xc5\xa3\xbe\xc9\xfe\xc7\xe6\x9b\xb3\x74\x6e\x47\x92\xde\x94\x6d\x20\x53\x14\x0f\x7c\xd9\x03\x74\x1e\x11\x65\x69\xb7\x0c\x25\x6b\xb6\x1b\x7e\x2b\xc3\x42\x0f\x2c\xf7\x8e\x92\x57\x68\x59\xcc\x09\xe0\xfb\x09\x56\x79\x60\xae\x1f\x19\xc2\xbc\xa0\x47\xff\xb6\xa4\x52\xf5\x7f\x21\x24\x9c\xba\xa5\x40\x6a\xe6\x85\x0b\xee\xbd\x04\x0c\x6d\x28\xdd\x8e\xf2\xb3\x1f\x9f\x5f\xfe\x0d\xe9\x9c\x4e\xfb\x5f\xb5\x19\x4b\xfe\xa0\xe5\xe8\xf7\xa5\x40\xb1\xd1\xf2\x05\x99\xb9\x1f\x9f\x97\xee\xc7\x73\x7f\x4f\xa1\xa6\xcc\xd7\xdf\x98\x48\x66\xef\x26\x91\x0d\xbe\xe2\x66\x6e\x19\x1a\x00\xd9\x59\xbd\xfa\x12\x8a\xb8\xe2\x73\xfb\x87\x5d\x3c\x5f\x7a\x89\x57\x21\x6d\x7d\x83\x80\x8f\xfa\x78\xf5\x8d\xad\xee\xd3\x95\x1e\xc3\x85\x2d\xb1\x53\x27\x4c\xee\x69\x27\xac\xcb\xeb\x83\x35\xa3\xbd\x9c\x86\x52\xa0\x2f\x28\x24\xf4\xae\x03\xf6\x90\xc6\xd1\x96\x1d\x0b\x02\x64\x33\x43\xef\x41\xb8\xf9\xfb\xc8\x02\xed\x39\x2e\x82\x1c\x78\x4f\x57\x89\xfe\xfb\x58\x58\x1d\xd6\x15\x79\xa5\xa6\x64\xdf\x46\x8b\x2b\xa1\xf2\x4a\x5b\x07\x0f\x91\x5a\x2b\x04\x18\x87\xde\xa6\x33\xeb\x91\xa9\x84\x9d\x4b\xc7\xf8\xbc\x62\x24\x53\xda\xab\x4d\x66\xfd\xe3\x04\x9c\x29\x66\xeb\x51\x0a\xcd\xb2\x16\xd9\x7c\xa1\x9d\x39\xd6\x2f\x24\x6b\x23\x81\xed\xef\x5f\xc7\x0f\x24\x94\x04\xea\xc1\x96\x25\x35\xec\xd8\x05\x44\xc7\x7a\xe2\x0e\xef\x04\xc0\x0b\xfa\x56\xba\x69\x70\xfe\x14\xbb\xc6\x9b\xa2\x08\x28\xcc\xd3\xae\xca\x36\x9a\x66\x2a\x28\x7a\x0b\x83\x82\xaf\x97\xd9\x33\x05\x8c\xbb\x23\x1e\x29\x4d\x93\x6e\xfc\x66\xfb\x40\xa4\x8e\xfb\x7c\xfe\x8c\x99\xd4\xc7\x5f\x9e\xb5\x42\x1a\x88\x18\x2b\xc7\x51\x9d\x86\xac\x16\x2d\xef\xb0\xc0\x0e\xd9\xdf\xed\x38\x78\xf0\x0a\xfe\xa3\x1e\xa0\x5a\x2c\xde\xe1\x29\x14\xfd\x0b\x62\x0f\x7b\x2d\xa5\x27\xbb\x69\x52\xb2\x2a\x2b\x59\xd5\x2c\xca\xf9\xc4\xbb\xda\x65\x92\x97\x3e\x4a\xc7\x9a\xf2\x1e\x43\x40\xf0\x62\x6a\x5f\x88\x0a\xd7\xc7\x5c\x1c\xf3\xca\xbc\x29\xa4\x52\x45\x11\x6f\xe9\x96\xac\x62\x3f\x85\x44\xc1\x71\x6e\x15\xaa\xbf\x6a\xa4\x35\x0d\x4d\xd4\x67\x2c\x46\xc0\x85\xa1\xa7\xf3\xe8\x19\x13\x75\x2b\x98\x63\xca\x48\xea\xb2\x2f\x6a\x36\x50\xd2\x25\xa3\x39\x95\x65\xa7\x9b\xaa\x75\x38\x5c\x7e\x87\x17\x2f\x96\x2b\xc3\xb7\xa9\x7c\x7f\x43\x3e\x8d\xa5\x71\x46\x93\x66\x1b\x6d\x69\x89\x51\x31\xa1\xe9\x91\x6e\x06\x59\x8d\x9e\xaa\x69\xbd\xe6\x2c\x03\x1e\xdc\xb5\x64\x5b\xeb\xf4\xa4\xb5\xf3\x83\x95\x8a\x33\x40\xdc\x89\x79\x41\x2c\x76\x7b\x3f\x0c\x53\x48\x4d\x66\xc2\xa5\x15\x5d\xc5\xbe\x8e\x0d\x85\x44\x08\x90\x8b\x43\xcb\xe8\x8a\xd8\x71\xbf\x28\x80\xf3\x15\x01\x42\x2d\x35\x0f\x6a\x7e\x73\x72\x96\x91\x91\xfb\xb6\xb7\xc8\x0a\xbe\xbf\xbf\x1a\xa7\xd7\x59\x4d\x78\xe6\x51\x13\x23\xe5\x8a\xb5\x34\xa7\x9a\xad\xe9\x05\x7e\x32\x6d\x31\x6a\xd3\xc0\xd5\x82\xb8\x41\x81\x74\xbc\x37\x05\x34\xee\x72\x8d\xf4\x4a\xf2\x92\x4b\x82\xda\xb0\xcd\x56\xb6\xc3\x8e\xcd\x1d\x77\xb9\xf5\x0a\xfb\x87\x53\xa1\xd0\x0a\x0d\xff\x1a\x75\x0b\xe6\xa5\xc6\xd0\x14\xaa\x17\x77\x73\x85\x2b\xbe\x7a\x57\xda\x7f\x78\x7d\x08\xad\x09\x4f\x5c\xbd\x57\xd2\xe5\x08\x85\x7f\xe6\x77\xa5\xbf\xc2\x3b\x4c\x15\x9d\x1a\xf7\x72\x70\x4e\x08\xee\x9f\xe7\x17\xce\xdf\x4c\x57\x9b\x80\x8c\x99\x8a\x13\x6c\x21\x65\x81\xc9\x5e\x4d\x3f\x21\x42\xb9\xfd\xd5\x38\x92\x52\xee\x6b\xf2\x30\xf3\x85\x09\xe0\x26\x75\x4b\xde\x67\x7a\x01\xf1\x8a\x3f\x2b\xa9\xa3\xee\x95\xec\x6c\xf3\xde\xf5\x72\xbc\x3d\xd4\x82\xbd\x9c\x76\xb5\x2e\x0f\x2c\x81\x0c\x16\xe7\xa4\x19\x66\x7b\xb7\x43\xc5\x93\xe8\x9d\x35\x2c\x93\xe4\x41\x11\xf1\x95\xfc\x20\xb7\x54\x17\xae\x8f\x91\x78\x07\x33\x66\x9f\x90\xb9\x99\x39\xf0\x93\x19\xd2\x27\xfb\x40\xd9\xc3\x4c\xae\x1a\x78\xd1\x9e\xf4\xe3\xf5\x9b\x1a\x61\x53\x98\xe7\x71\xfc\x03\xf0\xe5\x5b\x93\x0b\x8f\x93\xe7\x82\x3b\x1c\x6c\x4f\x89\x88\xb3\x6b\x6f\x21\xce\x26\x8b\x8e\xf7\xb4\x47\x3f\x87\xd3\x7e\xb9\xf8\x9d\x7f\x7f\x1c\x18\x80\x36\x25\x62\xf3\x46\x14\x38\x14\x28\x37\x75\x92\xfe\x8d\xaa\x28\xf2\x50\xda\xfe\xb3\xa1\x19\x3b\x5d\x87\x9a\x7b\x9d\x88\x74\x6f\xbc\x95\x44\x59\xb7\x41\x71\x42\xe9\xe3\x01\x3a\xe1\xc0\xd3\xbc\xc8\x99\x43\xd6\x07\x3c\x44\x08\x87\x5e\x9d\xce\xab\x85\x95\x38\xdc\xab\x30\x86\xa5\x2c\x6d\x87\xb6\xe8\x1c\xd1\x45\x8c\x21\x7a\x30\x56\x09\xc5\xec\x1f\xf0\x42\xfa\x3a\x7d\x0f\xfd\x50\x25\x5f\x1f\x19\x4b\xec\x35\x66\x26\xb1\x7b\x6a\x39\xbb\x46\x75\x86\x21\x27\x46\xb2\xaf\x42\xc9\xd3\x79\x73\x3c\x37\x4b\xa4\x56\x70\xbe\x6a\xfb\x58\xdf\x89\x30\x17\xbe\x12\x3b\x66\x74\x05\x72\x47\x6b\xdf\x27\x19\x12\x6f\x3d\x13\xfe\xdd\xd3\x2e\xca\xf4\x1f\x7e\x7f\xd4\x92\xce\x2c\xa9\xc0\x13\xa8\x94\xbf\x6a\xe5\x82\xb4\xad\x68\x02\xf5\x07\x8e\x00\x56\x01\x73\xd8\x45\x9e\x40\xa2\x30\x44\xcc\x0e\xd0\x62\x4e\x68\x63\x11\x1a\xf0\x33\x04\x45\xe1\xc1\x9e\x32\x3b\xbf\x44\xf2\x1c\xde\x90\xd0\x0c\x43\x4d\xab\x88\x32\x1d\x8e\x4b\x74\x4e\x9f\x85\xa4\x19\xbb\x7b\x7f\x44\x0b\xfa\x77\xe5\x2a\xaf\x94\xb7\x98\xd7\x3d\xfc\xfe\xfe\x60\x23\x2f\xd4\x39\xb7\x9c\x4f\xfe\x5f\x45\xa7\xb2\x62\x9e\x1e\x2a\x3a\x31\x09\xdf\x98\xd2\x5c\xa8\x66\xfe\xd7\xff\x72\x45\x3d\x3d\xce\x74\x9e\x2d\x40\x4f\x6b\x2f\x55\x6b\xce\x2f\x33\x74\x57\xdb\x30\x3a\xb0\x2c\x7c\xb1\x43\xd6\x9b\x47\xd4\xa6\x1c\xd8\xc2\xe3\xbd\xbc\x5b\xa0\xab\x33\x09\x0c\x89\xb7\x1d\x42\x6d\x4b\xe3\xeb\x1f\x27\x66\x90\x18\x33\xd7\xc4\x4d\x8d\xe1\xd1\xa3\x45\x79\x89\xad\x4a\x33\x5d\xbe\xe6\x81\xba\x23\x8e\x34\x06\x9f\x6f\xfe\xed\x4f\x95\x43\xb7\x14\x8a\x23\x81\x4a\x59\x1d\x74\xac\x05\xf1\xce\x93\x99\x78\xdc\x66\x1b\xd7\xcb\x99\xfd\x0c\x7b\xdb\x0e\x6a\xdb\x2e\xfe\xf5\xe8\x5d\x18\xe8\xb2\xef\x05\x2b\x3e\x7b\x89\x1f\xde\xb0\x49\xc9\x91\xa7\x44\xad\x26\x75\x7a\x32\xf9\xb2\xa6\x7b\xb1\xf2\x05\x26\x66\x96\x4f\x1d\x84\xce\xbd\xb2\xaa\x15\x3c\xda\x5a\xab\x6c\x66\xae\x2f\xcd\x8b\xa7\xa1\xde\x0f\x2c\xdb\x99\x0b\x44\x6d\x77\x1a\xe4\x58\x9a\x1d\x07\x3b\x88\x19\x99\x61\xc9\x81\x25\xa4\x15\xc0\xcd\xc6\x42\xf7\x01\x49\xc9\x7a\x79\x6b\xdf\x17\x4a\xf0\x52\xe8\x98\x6e\xb1\x2d\x4e\x64\xb8\x11\x81\x82\x6c\x96\x52\xeb\x70\xa9\x27\x6f\x65\x61\x5d\x38\x33\xb9\xea\xc1\x26\x37\x5d\x05\x09\x27\x5d\x51\xca\xa8\x5e\x82\x10\xb6\x33\x23\xb3\x81\x08\xe5\xb6\x30\x27\x22\x73\x4a\x71\x34\x91\xe1\x03\xef\x06\x60\x0e\xe6\x29\x44\x96\xf2\x21\xc7\x2f\xff\xc6\x95\xa6\xa9\xa8\xb9\x6d\x61\x9a\x01\x80\x57\x62\x03\x4a\xb1\xdf\xdc\xe3\x89\xf2\xe3\x5c\x19\x2c\x66\x38\x31\x0d\xd6\xd0\xae\xa0\x65\xfb\xd4\x68\x3a\xa2\x58\x50\x14\x9e\x09\x48\x50\x4e\xe5\x45\xe9\xd7\xc6\x17\xa3\xdb\xc4\x6e\x46\x74\x47\x33\x89\x38\xc9\x69\xbb\x04\x73\x3c\x55\x7b\x54\x66\xe5\xe3\x20\x96\x0a\x19\x52\xef\xbd\xa0\x6b\x9e\xe2\x22\xd2\x17\xfb\x84\x85\x0c\x53\x76\x8a\xe9\x4c\x0e\x51\x47\xb7\xc0\xbb\x70\xa5\xde\xaf\x87\x17\x2f\xb9\xce\x42\x25\x79\x75\x8c\xa1\x42\x20\xee\x61\xf5\x5a\x92\xf8\x0b\xc1\x30\x51\x11\x3c\xb4\x21\x7e\xd8\x56\xbb\x42\xa0\x6e\xe0\x18\xd0\xf4\xef\xac\xfa\x28\x1d\x0c\x33\xb5\x13\x8b\xe0\x38\x63\x55\xd0\x4c\xe9\xfb\x36\x3c\xc3\xbf\x83\xe8\x1c\xd8\xd8\x89\xcf\x9c\x4b\xe7\xa9\xbb\x16\xf8\xa0\xd5\x62\x74\x14\x27\x91\x30\xaa\x4a\x23\x09\xd2\xc1\x63\xe1\x65\xf4\x9e\x92\x9d\x9e\xdc\x22\xad\xb7\xda\x7e\x51\x49\x06\xc7\x07\x55\x85\x7f\x12\xde\xf4\x65\x08\x3d\x4b\x9e\x03\x81\x29\xad\xf3\xc4\x25\x5d\x70\x55\x7c\x0e\x9e\x51\xd6\x31\x5f\x6d\xb8\xe3\xf8\xea\x1c\xbc\xd9\xa7\xf5\x99\xd8\xcd\x3d\xaa\x81\xfa\x69\xbe\x9a\x1a\x4c\x41\x2c\x40\x28\x69\x3a\xa9\x88\xf1\xd7\x06\x33\x90\x86\xc6\x61\x93\xb1\x68\x92\x0a\xde\xb1\xea\xfa\x8e\xf8\x47\x66\xdb\x24\x63\x0d\x84\x25\x26\xea\x5d\x17\x6e\x99\xa4\x94\xc0\xb5\x62\xcb\xea\x6e\xc5\xe3\x59\x4a\x9b\x66\xd2\x6a\xee\x9a\xa9\x18\xb3\x9e\xb0\x95\x5e\xf0\x90\xf3\x48\x64\x65\x5d\xbf\x38\x08\xfa\x30\xfc\xd2\xe8\x74\xa6\xba\x79\x3a\x95\xf2\xea\x72\x69\xc7\xf0\x42\x1b\x3d\xd2\x91\x6d\x8f\x6b\xb3\xc3\x52\xe7\x69\x78\x40\x53\x6f\xdb\x1f\xa8\xd2\xe2\x7e\x41\xc5\x6d\x9f\xb0\x3f\x83\x77\x3a\x23\xf8\xdb\x59\x8b\x3c\x9b\xe2\x0d\x37\x74\x43\x10\x43\x08\x20\x58\x89\x2f\x28\x15\xeb\xab\x95\x72\x2e\x34\x52\x76\x77\xe6\x23\x40\xb1\xbf\x00\x03\xc3\xe5\xdc\x77\x94\xf0\xc1\xd9\x9c\xa8\xe4\xed\x74\xec\x2b\xd2\xae\x7b\x52\x02\xad\x7b\x8c\x13\x6b\x2e\x8d\x09\x3b\xe9\x01\x72\x18\x14\x28\xb6\xeb\xc8\x56\xef\x81\xd8\x17\x71\x28\x22\x1f\x92\x59\x71\x33\x87\x5e\x90\xf5\xe6\xd5\x52\xd9\xc6\xf8\x54\x2d\xb9\x2f\xe8\x0f\x65\xce\xc0\x14\xc3\xdb\x7e\x64\x6f\xef\x61\xf6\xaf\x30\x54\xac\xf1\xae\xd0\x2a\xde\x95\x78\x4d\xcd\xff\x53\x79\x8a\x51\x4e\xf4\xcb\x5a\xa9\x42\xb3\xc4\xed\x79\xa1\x6b\x67\xd7\xc4\xec\xf5\x69\x49\xac\xcd\x65\x53\xaf\xc8\x49\x51\xe6\xb9\x10\x13\x58\x1a\x2b\xb7\x4a\x71\x69\xd5\x56\x1f\x0f\x1f\xf8\xc4\x3a\x0b\xe6\x78\x21\xa0\x00\xf9\xb0\x26\xef\xa4\x70\x39\x7c\xf9\x37\xa4\xec\xc7\xa5\xec\x99\xd8\x76\x0a\xef\x44\x10\xdd\x4d\x16\x31\xbc\x7a\xa8\xbd\x79\x45\x2d\xd9\x0d\x78\xc0\xb0\xf6\x79\x48\xac\x49\x89\x85\xcc\x03\x47\x95\x65\x2c\xe2\x60\xdd\xca\x3e\x18\xf6\xcd\x7c\x4c\x2e\xb8\xf9\xc4\xf6\xaf\x62\x7d\x57\x3c\xaf\x8d\x08\x8b\xec\x8f\xae\x82\xf3\xd9\x72\x85\x49\xbd\x09\x02\xdd\x7a\xf4\xa5\xa6\x3e\x57\xb4\xa4\x1a\xb3\x55\xe4\xa2\xcc\x11\x70\x85\xeb\x1a\x3b\x38\x86\xe8\x1e\x39\x68\x2b\x0e\x54\x3f\x2c\xc5\xad\xff\x76\x47\x97\xe1\x73\x3c\x6b\x04\xd9\xcd\xa5\xcf\x23\xbf\x0a\xdc\xa1\x50\x0d\x5a\x34\xa7\xd2\x8b\xc7\x1e\x0c\x42\x3d\x0f\xb3\x05\x82\x40\x03\x0a\x42\x88\x38\x51\x43\x16\x41\x27\xc1\x29\xba\x47\x2f\x91\xe0\x8a\xb7\xc7\xa0\xe2\xdf\xc3\x7c\x3d\x1a\xf2\x7d\x6a\x97\x82\xaa\x97\x52\x98\x4a\x8a\xf5\xea\xce\x52\x47\xb0\x8e\xa0\x28\x1e\x14\xbc\x43\xf5\x8a\x9f\x07\xbd\x2f\x28\x4d\x3b\x37\x48\x69\x72\x6c\xb9\xa8\x00\x5a\xe3\xc4\xc5\xbd\x8b\x99\x50\x06\x5b\x2e\xa0\xf3\x2d\x14\x20\xed\xaf\x53\x20\x64\x22\x86\x65\xa7\xbc\xf9\x67\x60\x27\xb2\x39\x64\x33\x1b\x06\x5f\x94\x77\x8e\x29\x20\x89\xcd\x69\xd0\x51\x02\x51\x9e\x1f\x4f\xb3\x2d\x1b\xfd\x78\xdb\xb0\xbd\xf5\x0b\x45\x03\x16\xf2\x48\xf9\x44\xef\xc2\xf3\x62\x10\xfa\x3c\xc6\x91\xb0\xe6\x97\x32\x36\x57\x9e\x27\xd1\xf4\xa8\xb7\x08\x55\x7f\x95\x11\x7f\x98\x48\x2c\x90\xa6\xd5\xf7\x51\xed\x0d\x8f\x33\x99\x74\x2c\x50\xa4\x73\xb1\x95\xca\xa7\x17\xec\x0a\x4d\xcb\x98\x5a\xf6\xdc\x2d\x69\x66\x27\x8d\x93\x3a\x9c\x64\x71\xd8\xbc\x8b\x0c\xa9\xf8\x6a\xba\x3e\x6e\x1b\x55\x47\x67\xae\x0e\x5a\x07\xf6\x75\x0a\x57\x42\x82\x26\x69\x7a\x1a\x4c\x82\xf1\x86\x15\x8e\xb5\xb7\xf1\xaa\xc9\x71\x2f\xbd\xbd\xde\x8b\x19\x55\x21\x9e\x3a\x29\xe2\x31\xad\x27\xfa\x1d\x69\x38\x4d\x68\x0b\x47\xcf\x1f\x5e\x71\x33\xeb\x4b\xc3\x91\x3a\xd1\x29\x23\x3b\xd9\xa3\xcb\xff\x29\x5a\xc4\xad\xd4\xef\x2f\xe6\x47\x9f\x50\xf5\x0f\xcc\x2c\xb9\x14\x0f\x19\x49\xb1\x60\xb1\xcf\x27\x9b\x2e\x3d\x48\xe4\x23\xe7\xb4\xf5\x17\x02\xf7\xe5\x8a\xab\x44\xd0\x60\x94\x82\x34\xdb\x69\x61\x24\x81\x8e\x19\x3c\xb0\xe0\x80\x47\x33\x75\x4c\xf3\x99\x20\xaf\xc4\x45\xc6\xf8\xc8\x6a\xed\xca\x74\xe0\xec\x66\xd0\x8c\x04\x3d\x54\x32\x12\xce\x9b\x13\xd4\x79\x45\xb8\x0e\x04\xd4\x8f\x25\x91\x13\x4e\xa6\xa2\x34\x95\x39\xf4\x54\x8a\x60\x49\x54\xc0\x50\x30\xf9\x1e\xda\x45\x35\xc5\x38\x5f\x10\xa0\x8e\x05\x4c\x42\x36\x38\x51\x40\xa5\x55\xc0\xf4\xd2\xb6\xed\x92\x37\xf3\x68\xc5\x33\x92\x04\x05\x8f\xd6\x67\x13\x6f\xca\xd8\x2a\xe2\x44\x70\x2d\xd4\x99\xde\x27\xf9\xcd\x4b\x06\xe3\xe6\x00\xdf\x8a\xfe\x32\xb9\x48\x7c\x79\xa0\xfd\x66\x6a\xc3\x97\xcd\x43\x2b\x05\xd3\xf1\x53\xc1\x17\xba\x3b\x87\xf4\xab\x59\x51\xd6\x10\x00\x07\x87\x12\x73\x9a\xcf\x20\x94\x2d\x03\xe1\x93\x89\xa6\x1a\x79\x4e\xe6\x3c\xc3\xe1\xb3\x53\xbd\xd0\x34\xac\xa5\xf3\x69\x35\x9d\x05\xab\x89\x12\x17\x83\x5f\x54\x69\x69\x40\xba\xa4\x94\x9a\x7f\x57\xa5\xed\xc4\x57\xee\x51\xfa\x76\xed\x7c\xc2\xc8\x8a\x03\x00\xd4\xc8\x8c\x75\xaf\xfb\x80\x1a\xe8\xd5\xbe\xb0\x32\x4e\xc6\x98\x28\xa3\x0a\x1d\x1e\x3b\x59\x27\xdf\x80\xb8\xf4\xeb\xb1\xe7\x49\x82\x02\xba\x6d\x3a\xc0\x53\x1d\x64\x34\x09\x63\x5b\xed\xc0\x90\xf3\x2c\x9f\x36\x17\x1b\x2c\x54\xd3\xd0\x78\x6f\x7d\x5a\x44\x21\x33\xc4\x3c\xf3\x44\x99\x02\x0b\xac\xb3\xc4\xd1\xe4\x4e\x1c\xc3\xb7\xd2\xa4\xb3\xce\xbb\x18\xc4\xdf\x3f\xbe\x80\xa9\x0d\x53\xe1\xb1\x44\x84\x5c\x07\x02\xa4\xd4\xa4\x76\x7a\xb1\xb7\xad\xa6\x0d\x26\x21\x7d\x3f\x3f\x55\xf5\xf3\x36\xb9\x0c\x95\x43\x64\x8b\xc5\x0e\x66\x3f\xf1\x20\x91\xc2\xcd\x69\x91\x94\xc8\x6d\xe1\xdd\xa7\x54\xbc\x3a\xb2\x5d\xac\x7b\x4d\x2a\x6d\x74\xa9\x31\x9f\x34\xc5\xc2\x1f\xed\x65\x44\x15\xeb\xe6\x52\x17\x28\x39\x33\x11\x20\x02\x20\x49\x44\xcf\x79\x9f\x22\x71\x92\x5e\x31\x10\x74\x60\x60\x3f\xb5\xb7\xc6\xb3\xf0\xe9\x31\xdf\x85\xd8\x49\xf7\x18\x89\x6a\xf0\x5c\x3f\x91\x78\xf5\x8b\x93\x16\xc9\x11\xb8\xff\x5e\x63\x18\x62\x50\x38\x6c\x51\x11\xd3\x28\xf9\xf9\x64\x8b\xca\xfd\x2f\x4d\x62\x8a\x47\xa4\xc8\x16\xaf\xdd\xd2\x89\x79\x54\x76\x73\x91\xfc\x81\xba\xa0\xc1\x3b\x38\x03\x1d\xd2\x45\xc4\x18\x3b\xe3\x8c\x7c\x0d\x94\x01\x57\xae\x13\xb2\x25\x4e\x0c\xc1\xf0\x28\x2e\x60\xad\x13\x24\xd0\x2c\xde\xf1\x83\x30\xac\x76\x2f\x8e\x83\xab\x46\x11\xae\x79\x93\x50\x24\x2a\x06\xf4\xf1\xcc\xbf\x7e\x7c\x02\xfe\xd0\x38\x8c\x86\xdd\xd3\x0e\x99\x54\x1b\x86\x0a\x39\x6c\xbe\x73\xa8\x08\xb2\x9e\x76\x20\x60\x31\xc2\x10\x2e\x85\xe8\xa3\x42\x36\xb3\x16\xab\x6e\x1e\x0a\x79\xa1\xdc\x3f\xd3\xef\x45\x29\x90\xf8\x0a\x48\x54\x2a\x10\x27\x25\x14\xf5\xb2\x03\x0e\x77\xf1\xe7\x99\x06\xc5\xdb\x3e\xbd\xd1\xbd\x87\x36\xc9\x6c\x99\x72\xb5\xc2\x7a\x49\x29\x11\x9b\x32\x62\xfe\x32\x74\x0c\x2a\xc7\x70\x27\x46\xc9\x4b\xe0\xb9\x15\xc0\x7c\xf1\xaa\x80\xa0\xa4\xde\x58\x8c\xb0\x8a\xe4\xec\xc8\x07\x78\x76\x9a\x89\x40\x07\x52\xac\x45\x21\x28\xa3\xbf\x64\x5a\x23\xbb\x91\x07\x8b\x9b\xc5\x0d\x72\xe4\x38\x71\xba\x40\x16\x12\x32\x38\x38\xb1\x95\x98\x22\x92\xfa\x27\xeb\xbd\xbd\xc5\x54\x78\x0c\x85\x23\xb3\x93\x91\x08\xcb\x89\x77\x4f\x63\xc7\xa9\x21\x1d\x88\x93\x11\xe0\x7e\x25\x3c\x87\xdf\x2f\x60\xe7\x37\x95\x42\x9d\x24\xa3\x57\x01\x44\xce\xd4\x0c\x9a\x5d\xd1\xef\x10\x9f\xc6\x75\x99\xd9\xa6\x6a\xaa\x54\xcc\xdb\xde\xfa\xab\xb2\xf5\x5f\x78\x64\x2f\x11\x51\xa7\x05\x4a\x27\x55\x2d\x0a\x27\xa8\x2d\x5a\x23\x72\x4a\x0b\xa4\x73\x57\x78\x2a\x99\x0a\x2f\xe4\x76\x80\x88\x7c\x80\x90\xd5\x8b\x8b\x85\x19\x00\x81\xde\xff\x81\xc1\x4e\xf5\xd7\x5e\xbb\xc6\x31\xb7\xed\x8a\x16\x52\x87\x94\x1d\x8a\x4a\xa8\x54\x7f\x62\x72\xd7\xd9\x37\x68\xe3\xe0\x7c\xb7\x95\x0a\x76\xca\x0f\x2c\x4a\x38\xba\xce\x50\x23\x73\xb3\xed\x5a\x80\x38\x17\xed\x90\xc8\xaa\xb2\x39\xd5\x1a\x73\x0b\x16\x3c\x05\xff\x4f\xe7\x82\xbc\xdd\x43\xda\x44\x49\x20\x7e\xca\xa5\x50\x04\x86\xd7\x89\x71\x66\x49\x9e\x27\xaf\x27\x88\xad\xdc\x3c\xbd\x32\xc9\x61\xc9\x8f\x80\xa0\x0f\x14\x1a\x64\x4e\x4e\x92\x89\xbf\xfe\x99\x31\x38\x4f\x2a\x93\x1b\xe6\x24\x62\x6f\xbc\xc5\x43\xa5\xd1\x29\x5f\x92\xdc\x5e\x76\xbd\xd5\xcb\xe7\x87\x2f\x99\xd3\x76\x8f\xa5\x39\x61\x64\x72\x05\x65\x3b\x92\xc7\x9b\x01\x76\x2d\xa8\x2e\xe6\xf7\x4f\xff\x86\x12\x08\x44\xb3\xa9\x5b\x1e\x39\x55\x16\xe8\x6d\xb9\x58\x79\xdd\x0a\x35\xf1\x50\x33\x66\x31\x03\x05\xba\x18\xd7\xb2\x82\xae\xa2\x19\x19\x6a\xbd\x03\xbc\x51\x65\x07\xd6\x75\xfd\xe6\x70\x69\x39\x90\xbc\x6d\x40\x53\xaf\x20\xae\x83\xc9\x8b\x12\xb4\xaf\x22\xa7\x43\xcb\xc4\x44\x65\xcb\xcb\x3e\x40\x8f\x64\xe0\xa4\x4e\x25\xe6\x04\xc7\x02\xcf\x09\xbd\x92\xff\x53\x09\x69\x8c\x33\x32\x8a\x93\x2b\x99\xfe\xc6\x26\xaa\x54\xef\x20\x13\x01\x4d\xea\x97\xeb\xbf\x15\x27\xfd\xe3\xf3\x8b\x24\x62\x9b\xa3\xda\x52\x91\x93\x48\x0e\x63\xb3\x7f\x64\x9d\x38\xbc\x81\xd7\xc0\xc2\x37\x8d\x62\xb0\xfd\x1e\x90\xb2\x01\x0e\xfb\x43\xfa\x83\xa5\xb6\x6b\x2c\x5c\x26\x4b\x7b\x62\x82\xd3\x1d\x59\xae\x51\x78\x8c\xf4\x80\x2c\x72\x20\xd7\x43\xa1\x0a\x87\x15\x25\x4f\xae\x7b\x18\xf0\xa9\x0b\xfa\x0e\x6e\x24\xe4\xa0\x13\x32\x69\x60\xa7\x50\x2b\x84\xf4\xc5\x5c\xc5\x4b\x00\x9a\xcb\x1d\x62\x86\x06\x29\x6e\x13\xfc\xae\x2b\x05\x9c\xfd\x2f\xb8\x8e\x37\x05\xe1\x45\x31\x00\x5f\xab\x66\x53\x72\xc6\x96\x18\x3c\x29\x0e\x74\xa7\x81\xf0\x39\xb3\x3f\xbe\xb0\xe6\xee\xf9\x09\xe3\xad\x33\x8f\xba\x2b\x39\xb7\x3a\xab\x28\x83\xbb\xc9\x91\xfe\xa4\x59\xa6\xb5\x58\x02\x7b\x2d\xf6\xe9\x32\x31\x4f\x12\x6a\x13\xc7\x9d\xde\x82\x8f\x58\xa1\x97\x20\x63\x59\x21\xff\x73\x2b\x31\x81\x2e\xf2\xf0\xf3\xf1\xfe\xf5\xc9\x76\x1c\xf2\x07\x3c\xef\x8d\xfd\x73\x61\x7f\x7b\x67\x07\x8b\x93\x39\x68\x92\xcc\x62\xd1\x99\xf8\x41\x57\x2f\x9c\x14\x37\x76\x54\xd7\x79\x2c\x58\x80\xc3\xc7\x2f\x50\x8a\x53\x47\xfe\x8c\x5c\x18\x16\x38\x85\x83\xd2\x9f\x17\x46\x43\xce\xae\x41\x37\xd3\xb9\x34\x99\x4a\x30\x83\xdb\xd3\x3b\xb4\xd0\x5f\x9c\xf1\x43\xde\x04\x21\x5d\xa6\xec\x81\x22\xcb\x90\xb6\x10\xab\x00\x2c\x4b\xbb\x0b\x3f\x74\x8d\xe2\xe2\x59\x58\xca\xa6\x53\x59\x89\x1c\x09\x81\xaf\x39\x2a\x6a\xdb\x30\xbf\x40\x0d\xb0\xb1\xf4\xe5\x9b\xf4\xaf\x42\x4e\xb3\x85\x22\x69\x44\x45\x81\xbb\x6c\xe1\x31\xb0\x15\xe4\xe3\xe3\x40\x7a\x57\xd5\x9d\xdd\x9d\x82\x5e\x46\xf2\x71\x22\x31\xa2\x27\xce\x99\xd6\x50\xc5\xc2\x77\x5d\x1c\xbb\x64\xdb\xc5\x93\x60\xb5\x47\x3f\x81\x17\xdf\x54\x3e\xca\x43\x71\x33\x55\x48\x30\x99\x1f\x64\x24\xba\x5e\x54\x91\x73\x68\xe9\x0f\xce\xae\x39\xf9\xe7\x16\x4f\x61\xbb\xcd\x07\x28\x78\x64\x68\xd5\x02\x17\x15\x46\xb4\x19\x0a\x97\x51\x6d\x8b\xcd\x42\x48\x9f\xfd\xbb\x62\x9d\x49\x0d\xf2\xa6\xc5\x37\x32\x9e\xc5\xd2\x91\xce\x1c\x38\x21\x39\x77\x2d\xe2\x39\xde\x62\xb9\x67\xf7\x6c\x61\xf6\x79\xd3\x4b\x4f\x8e\x83\xba\x05\xac\xd2\x53\xdf\x39\xab\x45\x2d\xfd\x87\x7a\x13\x67\xca\x81\x49\x4a\x25\x2e\x79\xdf\x22\x61\xf4\x11\x16\xce\xfe\xff\x12\x34\xd7\xc8\x7c\x1c\x36\xb5\x99\x6f\x41\x90\xf4\x30\xbc\xa0\xe8\xdb\x2d\x15\x42\xd4\x66\x49\x82\xaf\x34\xfb\x2c\xfc\x30\xb0\xf2\xf0\x4d\x3d\xb3\x48\xb4\x80\x00\xf4\x43\x21\x15\x32\x9f\x81\x49\xdd\x34\x79\x97\x9c\xdf\xe2\x3d\x2e\x77\x15\xd3\x12\x51\xf5\x22\x62\x98\xf5\xd3\xe1\xd6\x81\x34\x4a\x63\x2c\x3d\x57\xa6\xb0\x82\x28\x3c\xb6\xc4\x6e\x81\x60\xc6\x41\xa0\x50\x2d\x06\x36\x9c\x86\xe3\xf3\xf3\xb3\x0c\x1f\xb2\xe5\x6b\x79\xc8\xdb\xac\x7c\xe1\x74\xcb\x1f\xa5\x69\xb9\x64\x4e\x47\xe2\x10\xa9\xa5\xdb\xbf\x36\x0e\xa5\x19\x3d\x36\x06\xd9\x8f\xcb\xce\x94\x27\xd6\x9f\x0b\x5d\x18\x79\x97\x95\x7d\x64\xaa\xe0\x97\x26\x72\xb8\x70\x67\xaf\x57\x05\x62\x6a\x1e\x44\x02\x9b\xb9\x3a\x8b\xc7\x48\xdf\xa5\xca\xc0\xf6\x72\x59\xcf\x7b\xd7\x91\xb5\xdd\x25\x1f\x2c\x9f\xe6\x38\x46\xf1\xf8\x7c\xbe\x01\x09\x43\x25\x51\xa0\x3c\x1e\x11\x39\x6b\xe8\x55\xb0\xcb\xbd\x16\x9d\xb7\x39\xae\xc4\x56\x17\x1a\xed\xda\x51\x50\x15\xa2\xc0\x07\xcc\xc8\x7f\xf0\xe0\xdb\x63\x80\xba\x78\x3f\x1e\xc8\xcc\xe9\x05\xdc\x89\x81\xe5\x0a\x42\xce\x0a\x19\xba\x20\xc8\x4a\x50\x0e\x5c\xd3\x8b\x7c\x37\x7b\x8b\xeb\x58\x49\x47\x81\xf4\xdf\x2e\xa7\x58\x4d\x9e\xde\x1b\xeb\xe7\x11\xec\x71\x70\xd4\xeb\xe5\x0b\x31\x85\xa9\x02\xd3\x89\x81\x85\x9d\xd2\x72\xed\x8d\x74\x5b\x81\x9a\x83\x0f\xa2\x5d\x34\x1d\x29\xd7\xe7\xd8\x6c\x2f\x2f\xb3\x2d\x42\x5c\xfd\xe5\x60\x95\xa5\x6a\x5f\x67\xb6\x30\x10\x90\x25\xd2\xc2\x32\x01\xa4\x9c\x6a\x91\x8e\xb0\x73\xae\x21\x71\xe8\x5d\x03\x51\x8c\x9d\xa0\x8f\xe6\x38\x9f\x03\x77\x4e\x6e\x4f\x4d\xf6\xc8\x67\x6a\x89\xf9\xdf\x0a\x27\xaa\xa3\x81\x04\xec\x5b\x89\x12\xf3\x26\x08\x0f\xe1\x2a\x80\xc8\x76\x3a\xf6\x9b\x2a\x5d\x8e\xc7\xa3\xd7\x85\x15\x4d\x15\x55\x98\x1a\x01\x0a\x1a\x4f\x6d\x96\xa7\x5d\x48\x0c\xac\x49\xf5\x82\x56\x54\x40\x37\x2d\x72\xf7\x3a\x11\x2c\xce\x0e\x39\xa2\x2b\x24\x62\x23\x68\xbb\x28\x5b\x3f\xd2\x4e\x6c\x5e\x52\x27\x03\x88\x97\x1f\x05\xd9\x9f\x23\xb9\x19\xa7\xbc\x30\x2c\x36\xdb\xcf\xf3\x82\x26\xc7\x85\x19\x12\x1d\x8e\x73\xe9\xbc\x7b\xbf\x7f\xcf\x77\x08\x0a\xc6\x97\x41\x76\x7c\x52\x40\xd6\x7c\x2c\x55\x49\x59\xf6\x73\x82\x2b\x4a\x88\x47\x86\x22\x52\x9d\xda\x45\x74\x6a\x1e\x1c\xbf\xa0\x4f\xc2\x9a\x1f\xd8\xc3\x72\xf8\x78\xb7\x7f\xbe\x64\x6b\x60\xaf\xb8\xbb\x89\x3c\x5b\x3f\xbf\xfe\x5d\xfd\x4f\x99\xd0\x04\x2c\x36\xe7\x5b\x21\x51\xec\xa9\xd3\xe2\x38\xb6\x27\x97\x45\x3c\x7b\xbb\x48\xea\x96\x2e\x3f\x48\x87\x02\x01\xa4\x43\x56\xb8\x7c\x84\x9d\x36\x4b\x23\x34\x05\xa2\xca\x17\xe4\x6a\x16\xb1\xbb\x17\xe2\x45\xcd\x91\x4a\x25\x32\xde\xc0\xf2\x07\xf1\x5a\x52\x83\x6e\x75\x10\x18\x13\xcd\x70\x27\x18\x2e\x6f\x8b\x02\xd6\x46\x54\x6b\xe6\xff\x08\x65\x3d\x7a\x57\x10\xc6\xe7\xe5\x65\xeb\x5e\x2c\xd4\x1b\xeb\xd4\x3e\x39\x4c\x5d\x8e\xd9\xdb\xc2\x59\x3b\x1d\x84\x45\xc0\xb9\x63\x13\x04\x5c\x26\xd3\x5f\x2c\x00\x85\xd7\x9c\x89\xab\x98\x9b\xfa\xb0\xa8\x6c\xe7\x84\x76\x58\xe5\x4b\x62\x16\x5c\xfc\x35\xcc\x0e\x6f\x35\x7c\xfe\xbe\x60\xe5\xfb\x7c\x03\x7a\xc9\x91\xf9\xfd\x40\x9e\xb3\x8b\x85\x06\xdc\x16\x77\xa5\x8a\x23\xf7\xfb\x43\x75\x68\x8e\x7e\x9d\x14\x6c\x0b\x6e\x61\xc6\x92\xe8\x38\x5a\x4d\xc4\x56\x2c\xff\x30\x87\x1c\x33\x87\xb5\xcd\x05\x61\xe0\xa4\x05\xf9\x55\x8d\x53\xdf\x83\xe3\x0f\xb0\x10\xd3\x39\xeb\x01\xa9\xbb\x5c\xaa\x3c\x53\x8e\xd3\xb6\x60\x31\xc3\xf3\x29\x8b\x1c\xbc\x48\x86\xd0\x29\x45\x41\xed\xda\xbc\x35\x08\x0c\xcc\x73\x0f\x72\xd6\x4c\xae\xc8\xd3\x6b\x7b\x3b\x05\x71\x7a\x79\x3a\x5d\x85\xf3\xfd\xae\x73\x53\x10\xf9\x75\xe9\xa9\x72\x1c\x11\xca\xfb\x63\xa9\x7c\x5e\xbd\x2f\xd6\x7f\xc5\xd1\x37\x64\x54\x35\x33\xc1\x8d\xbc\xbd\x48\x3f\x9e\x3e\xa6\x84\xe9\x8a\x87\x79\x30\x4a\x44\x99\xf7\x4a\x80\xed\x68\x3e\x2b\x81\x2f\x99\x74\xe0\xe1\x36\x3d\x1a\x7a\x43\x4a\x89\x8b\x8d\x18\x98\x83\xff\x6d\x3e\xd7\xff\xfe\xef\x41\x48\xcb\x3a\x48\xa9\x6c\xe4\xd6\xb0\xa7\x4e\x9e\x0a\x20\xf1\x65\x63\x9b\x92\x65\x82\x5f\x63\x24\xc1\x3e\x55\x0f\xb8\xfd\x42\xea\xdd\x3c\x4a\x13\xca\x3e\x67\xd4\x7a\xcd\x88\x35\x61\x2e\xf0\x20\x85\x4d\x1c\x4d\x07\xe9\x1e\x58\xa8\x33\xa7\xbc\xc0\x29\x2d\x88\x74\xf1\x20\xcb\x0b\x41\xcb\xbd\x59\x71\x92\x2d\xba\x6c\x36\x0f\x6d\x76\x26\x33\x45\x4b\x05\xa5\x49\x64\x6c\x5c\x0a\x4d\x99\x56\xb1\x3b\x2c\x06\x75\xe7\x6e\xc9\x84\x87\x99\xc7\x90\x7b\xe6\x67\xbe\x7f\xfc\xbb\x12\x5b\x42\x85\xf2\xc9\x18\xef\x84\x6c\xc9\xcd\x43\x5f\x38\x74\x47\x2a\xcd\xe9\xa7\x9d\x19\x37\x13\x1a\x46\x51\x7f\xfd\x3c\x0e\xc6\x47\x5d\x70\xc7\x97\x3d\xf8\x3d\x1c\x7e\x0f\xac\x37\x80\xdf\x9c\xdd\x1e\xa1\x50\xbe\x4f\x25\x11\xf0\x9f\xa2\xb7\x87\xd4\xac\xcf\x5d\xd3\xb2\x31\x85\x71\x36\xff\x2e\x52\xdb\x82\x4f\xa0\x52\xee\x9b\x05\x26\xb5\x40\x1e\x55\x6a\x1c\x04\x8c\x74\x36\x1c\xa5\xb1\xe6\xfe\xc1\x20\x34\x11\xb8\x8e\xee\x56\x68\x68\x6f\x5b\x98\x89\x2c\x01\x28\x80\xf1\x78\x57\x1a\xf1\xe3\xe5\x22\x78\x86\x66\x0c\x7a\x6d\xee\xf2\x9f\x52\xbb\x80\x02\x9d\xfb\x66\xcc\xe9\xe2\x22\x38\xa3\x84\x99\x3c\x56\x83\x5f\x38\x48\xc1\x44\x11\x0e\xfb\xd5\x7d\x7f\x7f\xff\xbe\x63\x8d\x40\xcc\xc6\x8c\xf9\xe4\x48\xc6\xca\xb9\x7d\xb7\xd2\x0d\x47\xc6\xea\x83\x16\x5c\x29\xf3\x4e\x41\x9f\x99\x6f\x35\x4c\x84\xa5\x10\xca\xfa\xd8\xac\x0a\x8e\x6b\x54\xf7\x5c\xde\x27\x8d\x18\x10\x8e\x85\x33\x3e\x0f\x3a\x9f\x15\x0c\x9c\x39\x39\x89\xeb\xcb\x69\x08\xc7\x37\x24\xcb\x5f\x03\x48\x6f\xb3\x3f\x73\x03\xda\x6a\xaf\x29\x55\x7f\x15\x74\x10\x78\xd3\xdf\x2d\xac\x3a\x4a\xbd\x20\x9a\xe4\x89\x17\x87\xe0\x68\x3a\x8b\xbb\x5f\x78\xff\x2c\xaa\xab\x33\x33\x20\x88\x94\x4c\x9d\x9a\xa9\xa6\xb7\xbf\x2c\x84\x81\x84\x53\xf3\x4b\xc3\xb6\x09\x43\x9c\xed\x06\x61\x42\x73\x7e\x35\x1c\x8f\x25\x3f\x29\xbc\x27\x48\x3c\x88\xfa\x92\x64\xbe\xf9\x77\xa5\xd1\x37\x45\x44\x45\x58\xd8\x17\xfa\x35\x33\x1a\x0f\x11\x1c\xc4\x57\xa7\x8a\x66\xe8\xf9\x6b\xfe\x1f\x2c\xb3\xfa\x8f\xff\x0f\xef\xfe\xd5\x91\xac\xa9\x99\xa8\x52\x13\xba\xd1\x65\x35\xe7\x7f\x7c\xfb\x77\xc5\xa4\xf6\xa5\xf8\x72\xca\xa3\xcf\x41\xcc\x00\xf9\x92\x49\x0b\x36\x9b\x07\x93\x5a\xc1\x33\xfd\xbb\x62\xe3\x5e\xaf\x10\xa5\x76\xa8\x51\x45\xd0\xa6\x2f\xb2\x49\x2a\x1c\xa4\x11\x79\x25\x98\x96\x3e\x6c\x2c\xd9\x15\xbf\x3c\x15\x36\xe0\x57\xc1\x12\x12\xe2\xcd\x1a\x03\xdd\x65\x52\x65\x01\x02\xe3\x77\xfa\x56\x1a\x31\xb0\xc3\x3c\x3b\xfe\xb6\x15\x32\x9f\xbb\x22\xca\xa8\x64\xa8\x89\x2a\x15\x68\x1f\x1e\xa1\xe6\x2e\x8b\xb1\x9c\xe9\xde\xbc\xf0\x74\xfc\x0b\x4f\xc6\x70\xf6\x37\x24\xd3\xb4\x38\x6c\x22\x25\x38\x32\xba\x9a\xf9\xe3\xce\x5b\xab\xbd\xb9\x16\x7f\x81\xbe\xab\x45\xc4\x19\x90\x18\xcf\x25\x73\x52\xdc\x25\x9f\xc1\x3f\x09\x1e\x11\xd6\xfb\xe4\xc4\x1e\xca\x3c\xb0\x9b\x09\x04\x72\xdb\xcf\xc8\xa1\x21\xc5\xde\xbc\x10\xd3\x69\x54\x04\x80\x86\x7e\xfa\x73\xa5\xf1\x05\x05\x5d\xe6\x38\xd9\x39\x5b\x43\x5b\x08\xaa\x44\x3e\x6b\x4e\x41\xb5\x87\x41\xd8\x34\x11\xfc\x9a\x99\x0f\xa4\xf9\x33\xb7\xb9\xe7\xca\xbc\xba\xc5\x1a\x4f\x9d\x2d\xa2\x12\x1d\x63\x53\x38\xd9\x6a\xd1\xa2\x7b\xe2\x83\xca\xb6\xb5\x6b\x51\x9f\x91\x64\x9e\xdc\xad\xaf\xdc\x39\x17\xd8\x29\xb0\x39\x9e\xcf\xdb\x5f\x4a\xe2\xc1\xf3\x22\xac\xaa\x2c\x8f\x75\x28\x3c\x1f\x9b\x88\x6e\xbc\xc6\x7e\x26\xf8\x54\x33\xce\xec\xb6\xa9\xba\xdb\x2e\xf6\xda\x06\x54\x57\xb9\x39\x1a\x7e\xd4\x93\x67\xa7\x8f\x0f\x76\xf3\x21\x55\x26\x22\x42\xdb\x34\xa6\x18\xbc\x86\x26\xd6\xbd\xe4\x03\x5e\xd9\x6a\xbc\xee\x13\x46\x4b\x56\x64\x69\xf0\x67\xe2\x54\x26\x2e\xfa\x3c\x08\x6c\x84\xb0\x57\x79\x22\xba\xca\xbb\xd3\x2e\x05\xd5\xd4\xf4\xca\x25\x2e\xa3\xc6\x48\x78\x4e\xf1\xc4\xac\x13\x0a\xc7\x28\x4f\x1c\x68\x33\x0e\x1f\xc7\xcf\x77\x82\x53\x93\x1d\x51\x98\x4b\x8e\x59\x60\xa6\xb9\x8c\x19\x0b\x02\xff\x76\x9c\x7b\xa3\x92\x85\x0a\x52\xb2\x63\x60\x40\x3c\xe8\x14\xab\xc9\x23\x2f\xb8\x84\xe5\xd8\x1e\x9b\xf7\x99\x75\xe9\x2b\x47\x7f\x99\x3e\x26\xee\xed\x71\x37\xb5\x88\xb2\x5f\xa1\x1a\x74\x6a\xc4\xbc\x2f\x8c\xbb\xd6\x5e\x69\x1d\x51\xf0\xa3\x60\x17\x8e\xf5\xbb\xbf\xcb\xe1\x35\xb7\xfa\x96\x21\x7f\xec\x09\xec\xf3\x99\xcb\x89\x71\xc1\x42\x98\x74\xaa\x95\x98\x5b\xb2\x0b\xb3\x6a\xde\xc5\x4e\x2e\xc9\x49\x8c\x67\x67\x01\x51\x0e\x47\xc8\x6d\xed\x03\x53\xe1\xc1\x79\x6e\x13\x9e\xc3\x7a\x5f\xbe\x45\x49\x2b\x0a\x7a\x8d\x5c\xa5\x57\x30\x0c\x62\xa5\xea\x4a\xab\x60\xcb\xf2\xad\xf7\xac\xd3\x71\x65\x5d\x77\x2e\x14\xc1\x4b\x10\x41\x47\x5d\x66\x23\x79\x58\x44\x4f\xfc\x51\xa8\x86\xa3\x18\x0f\x2f\x79\x7a\x30\x7f\x9f\x34\x99\xea\xf8\xe3\xdf\xf6\xe0\xfe\x90\x0e\xf9\x38\x7e\x79\x34\xed\x8f\xde\xfc\x1b\x52\xe4\x3d\x6e\x03\x79\x34\xa6\x6e\x66\x2b\xe4\x10\x9e\x61\x51\x30\xba\x48\x95\x43\xbf\xc1\xd5\x2f\xa3\x52\x5c\x75\xab\xcf\xdb\xeb\xb2\x7c\xe0\x11\xa0\x2b\x50\x8d\x5b\x69\x9a\xbf\xba\x7e\x47\x78\xfc\x89\x4d\x71\x9d\x53\x16\xb3\x0d\xac\x2d\x95\x29\xd3\x60\x27\x06\x02\x9a\x74\x7a\xd0\x09\x55\xd3\xb6\x0f\xcc\x62\x2e\x60\x63\x31\xad\x19\x70\x7d\xaa\xeb\x4e\x4a\xc7\x5c\xe9\xc0\x99\x78\xb0\xf8\x7f\x31\x9b\x3d\x26\x42\x55\x91\x36\xf1\x28\x29\x15\xe2\x6a\x27\x06\x9a\x84\x7e\x10\x19\xcc\xd5\xaf\x8d\xaf\xa0\xfe\xaf\xe0\x7b\xfa\x1a\x3e\xef\x4d\x99\x68\x37\x0b\x2d\x2f\x24\x04\x4a\xe2\x15\x0b\xe1\x1c\x03\x66\x02\x45\x57\x57\xcb\x3a\x96\xc1\x21\x8b\x2a\x36\x66\x4d\x61\xf9\x9c\xc3\x38\x30\xb8\x09\xbb\xad\x8a\x9f\xef\xcf\xcf\xba\x13\x65\x4a\xf3\xb7\x8a\xde\x5b\xc5\x79\x8b\x9e\x7f\x27\x09\xb4\xc6\x4e\xb6\xd3\x1a\x5a\xb6\xcb\xcf\xb2\xc9\x1c\x31\xd6\x01\x7e\xb9\xbd\x06\x81\x9b\xdb\x11\xca\x2c\x54\xbb\x40\xc2\x4e\x33\xd3\xd1\x4d\xde\x1b\x72\x1b\x38\x12\x57\x74\xf5\xf9\x35\xb6\xa3\x07\xbe\x1b\xe1\x65\x9f\x9e\x29\x30\x1e\xb5\x95\x84\x9b\x57\x06\x8b\x80\x58\xf9\x51\x91\x59\x05\xa7\xcd\xa9\x29\xa9\x90\xc0\x8b\x58\x01\xeb\x34\xc8\xaf\x0c\x65\x78\xf5\x2c\x5c\x1b\x26\xb4\xf8\x0b\x1d\xea\x04\x67\x36\x3e\xd5\x6e\x0e\xcc\x17\x6d\xc8\x13\x4e\x3c\x08\x3e\x8e\xd4\x37\xa5\x69\x6f\x59\x5e\xad\xcc\xad\x5b\x1f\x64\x32\x4b\x73\xfb\x5f\x8f\xcb\x56\x48\x4d\x83\x29\xb9\x07\xf9\x65\x5a\x67\x8c\x42\x55\xd0\x19\x39\x14\xb6\x95\x41\xca\x16\xe3\xaa\xd0\xef\xf7\xcc\x55\x47\xef\x2c\x5e\x71\x71\xa8\x36\xba\x63\x92\xf8\x6f\x3c\xd7\x5c\xf8\x20\x87\x91\x3d\x09\x9e\xdf\xa3\xbb\x83\x91\xa4\x38\x4f\x51\xe7\x29\xe6\xdb\x8e\xa2\x9d\xb3\x73\xd3\x77\x7b\xbc\xba\x42\x3d\x42\xc6\x21\x71\x20\xaf\x1f\xbd\xaa\x38\x1d\x28\x16\x15\x4a\xa7\xc7\xf3\x41\x6c\xa0\x8f\x39\x66\xb2\x3f\x09\x3f\xe0\xa3\xd8\xd9\x79\xe6\x33\x18\x09\xca\xb6\x37\xf8\xfc\x91\x46\x2b\x83\x51\xff\x7a\x24\xeb\x02\xbb\x31\x85\xaf\xc6\x2c\xaf\x8c\xd1\xf9\xf0\x66\x6d\x32\x8f\x9e\x5e\xcc\x31\xad\x73\x9f\x8c\x85\x9f\x53\x52\xe8\x0e\x89\xc0\x0c\x1e\xa8\x92\x66\xd3\x4e\xee\x63\x64\xe5\xaa\x8d\xf0\xad\xcd\x68\xf1\xff\xff\x82\x41\x10\x3a\xa0\xfa\x1e\xc6\x0e\x82\x8a\x14\x2b\x4e\x1d\xa7\x72\xb1\x38\x1d\x5e\x93\xe5\x50\x85\x54\x79\xe7\xa1\x55\x72\x18\xb7\x88\xa8\x38\x83\x88\x3c\x0b\xb3\xb9\x85\x38\x72\xc7\x42\x43\xed\x59\x53\x62\x4a\x9e\xa9\x7b\xc2\xa6\x3e\xfe\x36\x00\x41\x11\x83\x2b\x0b\x62\x18\xc7\xb8\x14\x17\xe8\xde\xed\x65\x6c\x4d\x7c\x45\x24\x59\x66\xfc\x73\x1a\x44\xa5\x49\x35\x75\x99\x5d\xcf\xfa\x10\x5c\x60\x5e\x2a\xc0\x5d\xd4\xa3\x0f\xba\x71\xaa\xce\x40\x0d\x78\xce\x9f\x44\xb8\x1c\x7b\x8c\xc5\xf7\xc9\xc1\xf5\x50\x40\xfc\xaf\xe1\x02\xfb\x82\x5e\x8f\x1d\x83\xbb\xb3\x38\x53\x42\x1b\x46\x0e\x12\x06\x3d\xf4\x58\xba\xa7\x46\x02\x34\xc5\x38\x4f\x7a\x60\x78\x2e\x69\x66\x1d\x80\x49\x6c\x7f\xea\xf8\xeb\xdf\x94\x47\x56\x22\x6f\x7f\xb9\x88\x75\x13\xfb\xbc\x13\xc8\xcd\x3c\xdc\x8b\xf0\xa9\xa2\x95\x9c\x77\x4e\xc1\xd2\x60\xc8\xd9\x01\x45\x43\xe1\x12\xff\x60\x19\xcb\xdb\xe3\xee\x84\x05\xd5\xb1\xa1\xcb\xf1\x27\x63\xee\xe2\x31\xac\xef\x82\xba\x80\xcb\x69\x5f\x0b\x5f\x80\x85\xad\xaa\x15\x8f\xc5\xec\x30\x06\xac\xd0\xc2\xc5\x33\x63\x2e\x1a\xea\xb9\xe3\x43\xa3\xe4\xdd\xf9\xd5\x08\x28\x9f\x0b\x0a\x49\x70\x0b\x1e\xff\xd2\x96\xd0\x78\x0e\x4e\xa4\xf2\x1a\x64\x0a\x7d\x4a\x11\x10\x4d\xf2\xfd\x39\xca\x0f\x7d\xd8\x47\xec\x3b\xc7\x49\x82\x13\x11\xd9\x25\x24\x75\x87\x5d\xb2\xe8\xf3\xbc\xc8\x5b\x6a\xeb\x46\x7e\xa9\x70\xe2\xd4\x7d\x4f\x0a\xe0\x1d\x40\xeb\x4c\x2f\x6a\xa0\x7b\xe7\x2e\x8e\x86\x13\x7b\x79\x56\x0c\xa3\xd7\xd8\x25\xc2\x1f\x39\xdc\x17\xdb\x9e\x39\x8f\x96\xbc\xd9\x30\x92\x9c\x1f\x7e\xd0\xbe\x0b\x28\x0c\x13\x81\x8d\x37\xb7\x20\x32\xb4\xf7\xcf\x90\xa5\x34\xdd\xff\xc4\x4d\x0e\xc3\x8d\x9b\x76\x4d\x6b\xc9\x40\x3c\xd4\x1e\x73\xf8\xfd\x79\xe7\x59\xda\xb7\x04\x5c\xe3\xd3\x47\x91\x81\x8a\xc2\x3b\xe7\x27\xe1\xc8\x5e\x14\xf4\x43\xa1\xe9\x58\xc7\x50\x46\xd4\x83\x7d\x9b\xc1\x85\x99\x4e\xfa\xad\x7e\x95\xa0\x45\xc7\xe4\x24\xaa\xc7\x73\xa6\x06\x37\xaf\x0c\xc9\x9d\x79\x4b\x80\xb9\x01\xa6\x25\x9e\x1f\x87\x2e\x10\x08\xbc\x9b\xed\x62\x1b\x6f\x30\x67\x89\x05\xd3\x8e\x10\xa2\x13\x86\x4c\x12\x43\x84\xa9\xba\x7c\x38\x65\xcf\x9d\x57\xe2\x2f\xf7\x9f\x6f\x47\x6c\x22\x10\xf5\xcc\x66\xd5\xb4\x12\x91\x58\x56\x1f\x4e\x85\xc6\x45\x73\xa0\xc8\x53\x94\xef\x9a\x17\xd0\xc4\x52\x62\xe4\x7c\x8f\xf5\xa5\xc5\xd8\x6c\x22\x94\xab\x2d\xe0\x17\xb5\x12\x9a\xaa\xcd\x58\xc8\x16\x80\x03\xd6\xaf\xeb\x7f\xf9\x17\xd6\xd3\x0c\x39\xd1\x13\x16\x30\xa9\xe4\x92\x97\x91\xb5\x54\xf6\xc0\x7f\x1c\x09\xcd\x32\xc5\x39\xd1\x45\xf3\xfa\xa2\xdc\x61\x4d\x06\x05\xcf\x4f\xe4\x71\x05\x49\x8b\x1d\xfb\x2c\x39\x7f\xf1\x85\x09\x1a\x71\x78\x13\x62\xa9\xb1\xf0\x13\x52\xfc\xdc\x38\xed\xff\x8e\x9e\x5a\x9f\xc0\x86\x68\xab\x0f\x17\xea\x2f\xb3\xe1\xf0\x5d\xc3\x6b\xee\x99\x33\x59\x57\xff\xa3\x79\xfd\xac\xbe\x62\x86\x39\xa5\xbb\x25\x74\xce\x49\x6b\xc8\x93\x7a\x13\x9d\xd6\xfc\xd0\x28\x4c\x30\xc7\x20\xff\xf4\xe2\x31\x98\x32\x55\x42\xed\x04\x04\x5a\x77\xf4\x19\xfb\x64\x0f\x91\xb5\x62\xde\x9d\x5f\x6a\x6c\x05\x06\x70\xe7\x99\x65\xa3\xcb\xe5\xc5\x67\xfe\xf7\x08\x4d\xbc\x39\x31\xf3\x88\xe0\xde\x9c\x40\xb8\xd6\x1f\x5f\x60\xbd\xd4\x1c\x1c\xb8\xc7\xaf\x59\x06\x22\x2a\x86\xa2\x08\x00\x60\xac\x7b\xfd\x64\x4c\x03\x1f\x11\x4b\x7f\xc9\xc3\x05\xcf\x5c\x4a\x03\x06\xa8\x33\x28\xf3\xfb\x36\x25\xd2\xbd\xaf\x9b\x36\xe0\xec\x72\xa5\x2d\x38\x02\xfe\xe2\x35\x38\x92\x64\x61\x18\x9c\xef\xca\x47\xbc\x16\xa0\x37\x3d\x5f\x0d\x49\x87\x53\xb4\x7d\x3d\x04\xa8\x19\x7d\xee\x2c\x95\xc9\x90\xe1\x10\xbb\x5d\x56\x77\x86\x5f\xba\xd0\x29\x8d\x1d\x84\x56\x23\x6f\x26\x8d\x3f\x1a\x1d\x81\x36\x31\x67\x80\xd3\x4a\x91\xfd\x25\x1f\xf5\xc0\x76\xfa\x47\x78\xb0\x0f\x7e\x56\xe4\xef\x4c\x01\x14\xcf\x42\x7f\x25\xce\x4a\x4c\xad\x42\x5d\x30\x9d\x4e\x61\xd2\x48\x5d\x71\x8f\x0d\x25\xa9\x75\xce\x6b\x9c\x7b\xfa\x30\xde\x33\x4a\x74\x77\xb4\xd3\xce\xd2\x4b\x78\x66\x61\x62\x54\xf0\x57\x33\x7b\xf7\x98\x57\x7a\x2d\x65\x18\x74\x18\x0b\x2f\xba\x60\xd8\xa6\xe5\xa9\x5f\xed\x99\x51\xe0\xf8\x14\x59\x5d\xec\x44\xf9\xde\x9a\xbe\xe5\xb4\xf0\xa4\x79\x35\x1c\x42\xa4\xce\x1b\xa7\xc4\x77\x11\x7c\x80\x6e\xf3\x55\x7a\xcc\x4b\xeb\x7b\xe2\xdd\xd7\x50\x9b\xf3\x52\x88\xe5\xec\xd1\xdc\x73\x04\x9f\x53\xfc\xf8\x2b\xf6\xa1\xf4\x72\x37\x65\xc0\x9c\xb9\x57\x0a\xf6\x90\x5c\x85\xde\xda\x91\x03\x9e\x5e\xed\xc7\x28\x0e\x51\x61\x3f\x88\xf7\x34\x2d\x63\xc6\x1b\xad\x24\xde\x96\xc7\x7e\x78\x9f\x0a\x59\x83\xc0\x93\x63\x93\xa1\xf6\x63\xf1\xd4\xd6\x79\x28\xfc\x2b\x28\x15\xa9\x78\x82\xd9\x6f\x34\xfe\xa5\xe0\x34\xfa\x4b\xd2\x0b\x01\x8d\x67\x9c\x9f\x1d\x9f\xec\xad\x9b\xea\x90\x0c\xa4\x1c\x40\xb3\xf7\x4a\x79\xb4\x10\x07\x08\xa4\xa5\xb0\xbf\xa0\xaa\xc4\xbe\x6f\x55\x08\xdb\x32\x17\xb2\x5d\xd2\x49\x24\xa4\x3e\x52\x33\x70\x54\xe9\x6b\xd4\xe0\x1f\x19\x98\xf7\xc3\x7d\xbc\xbf\x7d\x13\xc5\xe3\x6c\x4e\xc0\x3a\xc3\x02\x72\x35\x9d\xb7\x89\xab\xb9\x06\xc4\x73\x6d\x68\xd9\x1a\x3e\x3e\x54\xf3\xaf\xf3\xed\x86\x1b\xf0\x0a\x1b\x07\xa5\xb8\x43\xa9\x8a\x94\x07\x85\x1e\x00\x54\x0c\x15\x39\x68\x60\x18\x0a\x91\x45\x7a\x61\x85\xd5\x49\x05\x6d\xcc\x71\x26\x4c\xcd\x94\x0d\x5d\xcd\xe5\x42\x32\xd4\x74\x00\x7d\x59\xaf\x98\x0f\x8a\x13\x41\x32\x97\x4a\xa3\xd3\x96\x23\x07\xdb\x72\x48\x20\x3d\x0c\x0d\xce\x03\xe5\x17\x1e\xc4\x93\x39\x60\x20\xfe\xd4\x70\x44\x17\xb7\x25\x13\x99\x89\xde\x42\x9c\x09\xfa\x9d\xd3\xc4\x97\xd7\x72\xb7\x00\x0f\xa1\x2b\x58\x02\x05\x80\x4c\xf9\x08\x6c\x65\x0c\x33\xcb\x80\x7d\x1f\xf1\xa2\xd7\x76\x41\xa6\xca\x3b\x84\x54\xfe\x34\x13\xaf\x46\xac\xb4\x6c\x35\xeb\xe0\x19\x23\x56\xa9\xd5\x1b\xb5\x32\xfe\x15\x44\x09\x54\x2d\x18\xaf\xbc\x2a\xa3\xf4\xd7\xbf\x16\x9a\xfd\x3f\xfc\x06\x90\x7f\x53\x43\x10\x90\x10\x39\xac\x65\x8a\xa3\xed\x3b\x8a\x42\xe5\x48\x4c\xf1\xbe\x11\x88\x92\xeb\xf6\x41\x98\x6b\xdc\x97\x78\x21\xc3\xec\x38\x92\xbd\xab\x39\xff\xde\xee\x9f\xc8\x0e\xd8\x76\x2a\x3c\x40\x8f\x3c\x60\xf2\xa6\x9d\xd7\x1b\x22\x80\x1b\xc3\x5d\xc7\x31\x8d\x0a\x22\xa2\x26\x73\x4f\x0a\x85\x93\x26\xad\x01\x28\xba\x11\x6a\xf8\x94\x0d\x7d\x3f\x1c\x58\xbd\xf0\x80\x4c\x82\x84\xee\x1e\x66\x9f\x26\x75\x58\xad\x79\x1f\x34\x43\xc2\x99\x28\x60\x96\x2f\x8b\xda\x6d\xe0\x1a\xb3\x24\x50\x98\x1c\x40\xbf\x06\xbb\xa3\xb8\x62\x07\xcd\xe1\x29\x29\x1f\x1a\x68\x0e\xf1\x0e\xc7\x4a\x59\x02\x6a\x13\xf3\xdb\xf7\xff\x54\x03\xfc\x51\xed\xc8\x30\x65\x02\xbd\x7c\xc3\x99\x40\x3d\x25\xef\x0f\x02\x34\x17\xfe\xf0\x59\x55\xc6\x7d\xe0\xb4\xc9\xab\x85\x3e\x8f\x85\xf9\xc5\x7d\xd6\x54\xdc\x51\x96\xcb\x87\x1a\x99\x2e\x24\x88\x60\x57\xf3\x89\x99\x95\xa0\x6e\x01\x2f\xc0\x30\x26\x9c\xb2\x73\xda\xb2\x5b\x2c\xd7\x1a\x22\x99\x0b\x28\xc0\x3b\x16\x10\x30\x3b\x21\xcf\x8d\xcc\x76\x7d\x34\x95\xca\xa4\x6a\xde\xc9\xc5\x6e\x31\xdc\xa4\x12\x6d\xfc\xab\x49\x38\xfe\x89\x09\x2d\x67\x67\xe4\x2a\x61\x28\x15\x2e\xc1\x3b\x45\x89\x73\xf6\x66\x95\x54\x3a\x46\xc8\xc7\xfd\x41\x76\x6f\x80\x5c\x5f\xec\xe7\x68\x07\x87\x78\x14\xed\xb1\x2a\x6b\x87\xfc\x7f\xd1\xa5\x7a\x34\xec\x1d\xab\x2d\x83\xa7\x0d\x71\x78\xd7\x9a\xa0\xc6\xf3\x4e\xf0\xe1\x18\x5b\x62\xf0\x1c\x97\xc2\xb1\x20\x62\xa4\x83\xfb\x81\x38\x5f\x24\x76\x70\x44\xde\x90\x73\xf2\xf4\x2b\x7b\x4e\x46\xc4\x5d\xdf\x9f\xfe\x6d\x0f\x3e\x7f\xfc\x1b\xc7\x1b\x5e\x4c\x45\x17\x97\x30\xd7\x36\xe5\x1e\x9e\xf4\x3e\x3b\x2b\x0a\xcc\xaf\x1d\x90\x8a\x7e\x80\x8a\xc9\x9a\xbd\xcc\xa8\xc4\xd3\xc7\x3c\x70\xe9\x72\x01\x58\xc2\x01\x44\x74\xb3\x1e\x1b\x13\x21\xf7\x46\xf5\xda\xb4\x68\x0a\x9a\xad\x39\xb1\xe7\x5e\x82\xd6\x9d\xbc\x26\x72\x1d\x5e\xa4\x79\xb5\xf7\x3b\x09\xb1\x50\xcb\xe4\x7a\xbb\x1c\x51\xa9\xc8\x7d\x02\x3e\x85\x91\xc2\x50\x8f\x1e\x54\x16\xa8\x9c\x3a\xac\xcc\xc1\x9d\x34\xab\xa4\x0c\x6d\x9a\x17\x36\xf5\xdb\xaa\x6f\x8f\x99\x55\x42\x1f\x89\x43\x18\x92\xc5\x07\x1c\xba\x32\xb8\xb5\x3a\xe8\x01\x89\x03\x96\x02\xcb\x3d\x90\x9f\xfc\x70\x7c\x11\xb1\x9a\x73\x51\xf0\x97\x66\x3f\x40\x7f\xe4\xee\xb9\x52\xc8\xa7\xcc\xb6\xe9\x36\xb7\xec\xa5\x54\x7b\xba\x27\xe2\x46\x32\x91\x82\xca\x33\xeb\x34\x0b\xa2\x5e\xca\xc1\xce\x2c\x47\x86\x2c\xa9\x40\xb0\x3f\xb0\xb2\x30\x94\xa1\xcd\x73\x3c\x81\xc8\x28\xb6\xbb\x90\x08\x85\x55\xdb\x8c\x7d\xaf\x46\x7c\x73\x4e\x38\xa2\x8d\x93\x12\x28\xf9\x53\x49\x90\xb1\x3f\xbf\x26\x82\x7f\x6a\xc5\xd1\x7a\x7d\xa1\xd2\xae\xa6\x0e\x90\x23\x99\x30\x88\x8b\xf2\x8f\xb2\xc0\xcf\xb7\xfd\xe3\xff\xb3\x5c\x04\xbf\x73\xdd\x57\xfa\x84\x1c\x9f\xc4\xfe\x7e\x2f\xf5\x95\x56\x21\xf6\x04\xf8\x70\x7c\xc6\x19\xe6\xab\x98\xb5\xb9\xe8\x48\x68\x92\xa1\x3f\x0c\xfa\x29\x85\xbf\xe5\x22\x24\xe7\x28\x5f\xd0\x55\xa0\x62\x6c\xf9\xb0\x23\x26\x0e\x2b\x9f\xaa\x0c\x60\x0e\x6d\x19\x56\x89\x14\x17\x1f\x10\x00\xe0\xc3\xbe\x59\xc5\xc0\x4c\xed\x8a\x58\x5a\x69\xa3\xef\x9f\x5f\x92\x73\xfc\x8d\x7b\x6a\x2c\xac\x0c\xfc\x00\x33\xb8\x9d\x46\x26\xaf\x1a\x26\xf3\x97\x6a\xe4\xa6\x5a\xa8\x4d\xb1\xca\x6a\x60\xfe\x63\x2c\x45\x25\x6c\xc3\x6d\xef\xfb\x0b\x03\xf0\xf7\xc8\xdc\x15\xfa\x99\x0e\x0d\x92\xc1\xcb\x52\x97\x6b\x43\xd1\x9c\xcc\xfc\xe3\x0a\xbc\xc6\x83\x49\xec\x41\x49\x21\x87\x63\x31\xa1\xe5\x09\x1c\xf1\x69\x4a\x03\x79\xe8\x83\xfa\xe6\x25\xdd\x84\x5a\x9b\xfe\x66\xa4\xcd\xf4\x14\x50\x3b\xf9\x40\x0c\xff\x0d\x96\x1b\xcf\x87\x31\x21\x6e\x27\xf3\xc5\xc6\xb7\xb4\xa8\xff\x39\xa5\x71\xc5\xa2\x13\x69\xb7\xc0\xfe\x09\xe3\xee\x95\x6d\x01\xf7\x5f\xe8\x1b\xdd\x88\x6d\x63\x24\x01\x46\x28\xed\x6b\x2a\x2f\xf9\x1e\xd0\x42\x15\xf8\x7a\xd8\x6e\xb9\x70\x44\x4e\x6a\x08\x39\x85\x65\x64\xf0\xea\x24\x9a\x82\x8f\xf8\x40\xfb\x95\xde\xe1\xea\xc9\x11\xad\xff\x2a\x59\x38\xc2\x0f\x77\xe3\x75\x49\xa2\x22\xf4\x12\x4b\x05\x7f\xba\xc1\x79\xc5\xe0\xa5\xb5\x3c\xf8\x2b\xfd\xe3\xdc\xbc\x40\x00\x69\x75\x42\x37\x8a\xa3\x5a\x0b\x79\x4a\xe8\x70\xf8\x68\x81\x32\x5e\x80\x1c\x17\x35\x26\xc4\x32\xd9\xeb\x9c\xd7\x78\xea\xf2\x22\x9b\x61\xf0\xbd\x13\x50\xe0\xc9\x52\x55\x53\x00\xe4\x54\x5b\x80\x39\x79\x9c\x28\x65\xb1\x16\x8a\x44\x83\x27\x60\xe9\x53\xed\xec\x99\x07\x4d\xa0\x58\xda\x6a\x21\xfd\xc6\x70\xbf\x59\x34\x86\xe3\xa1\x49\x49\x8e\x95\x53\x5b\xe5\x32\xae\x5b\x69\x60\x34\x4f\x93\x93\x37\x2c\xc0\x23\x0a\xa3\x2e\x94\xee\x41\x3c\x59\xc1\x6b\xbd\x28\xea\x7c\xbe\x33\x00\xfe\x10\x01\x72\x99\xfa\xe4\xad\x97\x87\x9f\x77\x34\x3b\x0e\x8f\x32\xef\x98\x9d\x9c\x08\xa9\xbc\x6f\xa1\x30\x79\xfb\xa8\x73\x6c\x74\x66\x93\x58\xee\xcb\x8c\x84\xd0\xd4\x51\x33\x8b\xdb\xbf\x09\x74\x8f\x92\xec\xd2\x78\x4f\x8b\x50\xa7\xd0\xb1\xa1\x3b\xf3\x14\xfa\xc9\xe1\x64\xb8\xc5\xb5\x09\xb3\x37\xa1\xa0\x60\x27\x0b\x8b\x37\xed\x6e\x4c\x85\x62\xcd\xdd\x12\x02\x8d\xe6\x35\x0d\x62\x13\xf4\x1e\x57\x26\x8a\x27\x5f\x54\xc6\xb9\x20\xf2\x84\x00\xfc\xb9\xfb\xc5\xe7\x83\x29\x63\x77\xbe\xb9\xa4\xf4\x4a\xd7\xd2\x5d\xed\x98\x99\x64\xbe\x11\xb6\xc6\xb7\xb4\x26\x04\xcd\x49\x4a\x61\x80\x89\xf0\xac\x48\xcd\x92\xef\xac\xb3\xa1\x7b\xe0\xa0\xc1\xca\x8b\xcf\x02\xe6\x31\xe9\x3a\xec\x86\x29\xa5\x82\x47\x2a\x45\xf1\x7a\xf9\x7d\x7b\x5b\x08\x48\xaa\x4d\x3d\xd2\x4b\xf7\xc9\xc6\x37\x0e\xf9\x21\xbd\x1d\x8b\x8e\xe4\x28\x00\xa2\x42\x0b\x5c\xfa\x11\x9b\x51\x33\x99\x9c\x2d\xa3\x02\xeb\x4f\x27\x1b\xb3\xf9\x0c\x7a\x5a\x7c\x8b\xa3\x44\xfa\x1d\x42\xe0\x94\x00\x8b\x5a\x03\xb7\xb4\xd1\xdb\xb7\x0e\xa8\x40\x84\xba\xa8\x27\xb2\x2e\xce\x63\x1c\x7c\x0a\x37\xac\xe3\x25\x15\xee\x81\x5b\x7a\x0a\xca\xc8\xa9\x9c\xd8\x37\xcf\xd6\x2b\x0c\x33\x11\xef\x55\x25\x3a\x7e\xfe\xbc\xf9\x7f\x54\x4b\x65\x54\x8d\xf7\x02\x91\x6d\xd1\x2f\x6e\x56\xbe\x71\x09\x37\x56\x7d\x5e\x1d\x18\xb1\x26\xe1\x5f\xba\xcf\x4c\x10\x7b\x77\x09\xcf\x53\xff\x90\xbf\x43\x80\x93\x2f\xff\x46\xee\x1d\xc7\x82\xc8\xd4\x30\xd4\x15\x9a\xbe\x74\xfd\xba\x0a\xc1\x0c\x16\xff\x9b\xaf\x1f\xff\x86\x1e\x63\x63\xb3\xd9\x74\xb5\x69\x64\x44\x50\x07\x3d\x3e\x83\xb9\xd9\x8c\x39\xa9\xea\xd6\x18\x3a\x52\x1f\x65\xce\x24\x1c\x34\x56\xcc\x51\x63\x70\xa8\x6c\x3f\xcc\x83\x46\xf7\xc1\x35\xec\xf7\x9e\x07\x2a\x34\xa9\xd5\xd8\x1b\x93\x25\x10\x70\x3c\x50\x54\x83\x8d\xcb\xf9\xa9\x5e\x25\x07\x68\x36\x1c\x06\x07\xfc\x8c\x63\x74\xd4\x71\x37\x8a\x77\xf0\xc0\xbc\x06\x56\xd6\x8c\x17\xae\xa2\xb6\x33\x51\xef\x82\xa6\x7b\xbc\x06\x61\x74\x80\x26\x51\x68\xe6\xb1\xdd\x4a\x82\x72\xbc\xbd\xc3\x3b\xc5\x8c\xdf\x9a\x3b\x40\x37\xc4\xc7\x56\xac\x65\x34\x1b\x51\xe5\x6e\x0d\xe9\xe8\xfe\x87\x86\xef\x52\xe6\xe8\xed\x1b\xec\x15\xc6\x51\x95\xa1\x7a\xde\x7e\x48\xa0\xc9\xe0\xd8\x9a\x49\x55\xeb\x8b\x5c\x05\x81\x4d\x9d\x4c\x8c\xf8\xcd\xf3\xee\xd4\xd6\x38\x72\x49\x30\xe6\xd1\xd9\x90\xee\x74\x1f\x3c\x67\x88\xc4\xb6\x0f\x29\x45\xf9\x4f\x59\xf4\x8d\x07\xb2\x0e\x8b\xda\x72\xd5\xd4\xe9\x9a\xcb\x8c\x3c\xb0\x8e\x4e\xc6\x8b\x85\x72\xd0\x08\xbd\x89\x56\x31\x3f\xf3\xcc\x42\x65\x71\x8c\x9d\x1b\xb7\xb6\xc4\x66\x76\x05\xd0\x03\x5f\xbf\xc8\xce\x7f\x3b\x41\x05\x10\xfc\xb7\xe5\x98\xbe\x7b\x40\x0e\xdf\x3f\x8f\x1f\x44\x51\x48\xdb\xbb\xf6\xe4\x9f\x9d\x92\x32\xa4\xe6\x20\x6b\xe2\x29\x58\x7e\x57\x54\xd9\x9d\x1a\x16\x8e\xf0\xc9\xce\xe9\xc0\x62\xd2\xc9\x01\x06\xc8\x87\x05\xed\xc1\x49\x70\x99\x35\xf8\x25\xe2\x81\x50\x03\x66\x2c\x80\x04\xe7\x43\x4d\xb5\x6f\xd3\xb5\xac\x19\xc7\xc4\x38\x40\x46\xfd\x45\x0e\xfb\x67\x10\x18\x27\xe0\x61\xcd\x2f\xa0\x6b\xa0\x41\xea\xd5\xff\x14\x9a\x16\x62\x67\xa4\xb3\x4a\x70\x3b\x05\x6f\xf1\x40\x48\xf1\x9f\xbc\xd6\xb2\x13\x80\x53\xef\x6d\x98\xe3\x8b\xa0\x9f\x70\x2d\x53\x0f\x79\xc7\x09\x27\xbd\x1d\xb5\xb4\xa9\x95\xa2\x3f\x0a\xed\x5b\x6f\x2e\x44\xaf\x0c\x98\x1b\x6e\x3c\xeb\x50\xb1\xed\x32\x91\x38\x0d\xf5\x69\xfb\x41\x6b\xdd\x69\x2c\x93\x4b\x3a\x93\x35\xcf\xf0\x8f\x2c\x49\x98\xef\x0d\x17\xf7\xed\xed\x0d\x49\x4f\x0c\x4b\x81\xd0\xc6\x65\xa6\xd0\xac\x23\x3d\x69\x53\xae\xb7\x42\xd5\x9c\xcc\x33\x21\x57\x7b\xee\xe5\xa4\xd6\x59\x63\x03\x4c\xc8\xd8\x29\x83\x70\xde\x4f\xc5\xe1\x8d\x85\x0c\x0b\x59\x70\x6e\x30\xd2\x1d\xea\x33\xc8\x59\x9a\x9e\x41\x2e\xeb\x95\xeb\x82\x90\xcf\xd7\xfb\xf3\x1d\x85\xb9\x53\x20\x2b\x1d\x99\xae\x58\x23\xf4\x34\xe3\x11\xfb\x3e\x09\xdb\xcd\xa8\xb3\x5d\x5e\x2c\xd9\xb6\xdb\x34\xb9\xd9\xe2\x14\x46\x4b\x79\xe5\x87\xce\x4b\x7e\xd2\x32\x2c\xe9\x6a\x07\x94\x51\xe4\x28\x1e\x06\xc5\xf7\x15\xdc\xd6\xc8\x9a\xa2\x89\x86\x2a\x44\x16\x7c\x88\x5e\xaf\x49\x4d\x93\x48\x30\xa2\x16\x41\xfb\x33\x33\xcc\xc0\x89\xb4\x21\xd2\x6d\xab\x5f\xac\x1c\x35\xe8\x78\xfd\x56\xd9\xf4\x1e\xbb\xee\x35\xde\x69\x11\x15\x98\x7b\x61\x88\x2a\x9d\x06\x2e\x4f\x48\x18\xf8\xa8\x5f\xca\xd8\x33\xef\x4f\xf8\x29\xc0\x32\xfe\x62\x79\x31\xc6\x0c\x39\x7a\xf8\x31\x05\x05\x73\x87\xdd\xf6\xae\x65\x66\xe0\x23\x81\xee\x68\xf3\x67\xa2\x6a\x20\xff\xbb\xcf\x18\xea\x59\x1c\x01\x90\x0a\x92\xb9\x3c\x44\xab\x34\x38\x20\x76\xd1\x66\x2b\x3c\x61\xf3\x56\x55\x1c\x30\x6c\xea\x9c\xd4\xb3\xa9\x9a\xbe\xd7\xfd\x74\xce\xe3\xc4\xd4\x8d\xeb\x54\x26\xde\x92\x50\x7b\x7e\x68\xf3\x09\x14\x35\xde\xe7\x4b\xea\xd1\x9c\x47\x91\xe7\xfb\x79\x2c\x6d\x11\x8e\x3d\x8f\xcc\x8a\xfa\xe0\x4f\x46\xf8\x4e\x3f\x8a\xbc\x9a\x7f\xd1\x38\x3c\x38\xee\x2d\x5f\x4c\x5c\x81\x0a\xce\x1a\x33\xb8\x7b\xbe\x90\x66\xbb\x61\x37\x8b\x17\x52\xcc\xcc\xb0\x4c\x38\x31\x16\x8b\xa1\x0c\x84\xb4\xa0\x18\x83\xae\xb1\x3b\x04\x0c\x8f\xa8\xf1\x2d\x44\x44\x28\x3a\x77\x8b\x2e\x3a\x20\x8c\x81\xdf\x68\x61\x82\x48\xe5\x39\x1c\x09\x15\x84\xdc\x25\xf1\x52\xa0\xb0\xe2\xcf\xbd\xfd\xb0\xaf\x72\x1f\x7c\x4f\x3f\x8e\xa5\xe0\xe2\x55\x2c\x22\x49\xa4\x48\xdd\x03\x64\xd7\x34\x28\xa9\xd4\x67\xde\x4d\x00\x9e\x38\x33\xba\x28\x27\xc8\xfc\xb3\x78\x47\x2d\xac\x84\x45\x8a\xfc\xa7\xc7\x95\xfa\x2e\xa9\xde\x95\x46\xb1\xd3\x45\x1d\xe8\x98\xa7\x96\xd5\x85\x1d\xa7\x1f\xa8\x55\x5a\x7c\x53\x08\xda\x58\x04\xfe\x40\xd0\xe7\xfb\xc3\x87\x64\x55\x42\x8d\x93\x3e\xe7\xb4\x6f\x75\x16\xdd\xeb\xaa\xca\x6f\x7a\x30\xc9\x69\x67\x13\x1a\x33\x0b\x98\xe1\x7e\x3c\x75\x74\x49\x32\xec\x2d\xc4\x7b\xda\x37\xf3\x50\x2a\xb4\x29\xd6\x60\x59\xbc\x9a\x36\x8c\x64\xce\xdb\xb2\x9a\x6f\x37\xcf\x04\x54\x6c\x6e\x13\xd6\x07\xf7\xe1\xe4\x2a\x33\x73\x3e\xb5\x23\x9a\xfc\x57\x7d\xde\x0a\x8f\xe8\xe6\x94\xb7\xd8\xec\xf5\xc2\xd1\xd1\x76\x10\x35\x9b\x21\x5f\xe8\xc5\xc5\x45\xb3\x1a\xb6\x45\xf8\xd2\xa5\x55\xf4\x71\xc9\x1a\x0d\x34\xe6\xe2\x6a\x30\x48\xbd\xfe\xa5\x1b\xc4\xc9\xe9\x9b\xfb\x37\xd7\xd0\xab\x18\x17\xc2\x87\xc1\x51\x82\x03\xed\xdc\x1d\xa4\xed\x76\x39\x11\x0e\x4b\xb8\xaf\xf7\x6f\xff\xae\x50\x51\x7e\xbc\x1a\x8c\x38\x89\xcb\x8d\xb4\x5a\x03\x1a\xa6\xfc\x06\x50\x83\x33\x8f\x6e\xb2\x79\xae\x44\xca\x86\xea\xbf\x79\xb5\xbe\x8f\xa5\x8d\xbc\xf7\xe2\x05\xb7\x3c\x10\x88\xdd\x84\xa1\x63\xa7\xad\x9d\xdc\xd2\x9d\x70\x1a\x5e\x61\x9b\x45\xa1\x33\xb3\xe2\xcb\xa6\x2d\x63\x06\xdc\x11\x6e\x1a\x8d\xb2\x5f\x8f\x91\x7a\xd6\xb1\xd7\x75\xbe\xb9\x0d\x22\x6c\x51\x90\xdf\x51\x6a\xc2\x93\xa2\x60\x58\xbe\xd6\x13\x3b\x4e\x1a\x4d\x2a\xf5\xb1\x1f\xd8\x49\x37\x6e\xe4\x7b\x00\x07\x17\xf2\xaa\x51\x33\x85\x1b\x26\xee\xba\x7e\xbb\x69\x6e\x52\x21\x7b\xae\x3d\xf6\xc4\xee\x68\x9f\xfd\x3c\x01\x62\xc3\x62\x92\x4f\x7e\xe5\x91\xba\x58\x80\x47\x0f\x78\xaa\x99\x21\x8a\x37\xb5\x51\x5f\x3d\x65\x88\xd6\x04\xf8\xf0\xac\x96\xfe\xb2\xd1\x7d\xb9\xc4\x42\x7f\x95\x5b\x67\x77\xac\x90\xbd\x23\x21\x82\xab\xb5\x5c\x97\x19\x95\x65\xbc\x0d\xed\xb5\x77\x3d\xb2\x89\x08\x85\x0f\x76\xea\xf4\xda\xe9\x73\x8e\x9a\x7a\x92\xda\x32\x8d\xe4\xc4\x53\x7d\x38\xbe\x7d\x21\x77\xe1\x59\x1f\x9e\xdf\xc8\xe6\x45\xef\x0f\x58\x90\xaf\x73\xba\xf9\xb5\x52\x55\x10\x56\x16\x58\x26\x3e\xf0\xbe\x04\x24\x7e\xbf\x3e\x3f\x35\xae\x1c\x5c\x39\xd8\x6a\xe7\xc1\xd6\x70\x53\xd3\x06\x2c\x43\x3a\xcd\x83\x6c\x1d\xd8\xc4\xb8\x65\x16\x05\xab\xa6\x52\x86\x78\x74\x66\x72\x35\xa2\xc5\x79\xd6\x98\xac\x18\x25\xc3\x8d\xda\x59\xca\x90\x8b\xd9\xe7\x34\x96\x30\x6d\x64\x7f\x2e\xa8\xba\xde\x21\x2e\xca\x0e\xd9\x7e\x0a\x30\xb7\x14\x0d\x6a\xfe\x76\x14\x9f\x86\x3b\x9c\x50\xf7\xed\xc8\xe4\xd5\xef\xd7\xbb\x46\xde\x34\x91\x32\xd8\xa4\x85\x30\x4f\xaf\x95\x90\xca\x15\xac\xf0\xd7\x44\x31\xd9\x56\x72\xc7\x08\xc8\x1b\x07\x33\x4d\xdf\x90\x8b\x42\xe2\xea\x4d\xc7\xb4\x4e\xce\xf9\xa5\x82\xc9\x83\xc6\x15\xad\x2a\xd8\xaf\xc0\x96\x98\x73\xb8\xc4\x33\xdb\x85\x4d\x42\x3e\xf8\x03\xf5\x74\x02\x55\xdd\x74\x9f\x03\x69\xea\xe3\x20\x12\xb1\xd5\x7c\x3e\x06\xb1\xea\x45\x61\xd0\x56\x9a\x27\xdc\x7f\x25\xd1\x4a\x57\xf4\xdd\x18\x54\x51\x77\xe6\x21\x82\xc1\x2d\x0a\x68\xd7\x7d\x6b\x2b\x4e\x85\x2f\x39\xc5\x34\x9e\xd1\x75\xcc\xfa\x32\x6f\xc3\x79\x58\x88\x78\xd8\xcc\x79\x23\x41\x80\x79\x6c\x0d\x16\xf5\xc0\x66\x8f\xc3\x1b\x3a\x6a\x1c\x1c\xac\x83\x1e\x9e\xeb\xbd\x65\x0b\xcb\xc2\x03\x15\xc4\x1f\xe6\x52\xc6\x8a\xa3\x9a\x0f\xbf\x7f\x70\xeb\xde\x75\x12\x58\xac\xb7\x73\x42\xa8\x83\x63\x18\x45\x34\xee\xc5\x4c\xa2\xfc\xcb\x6e\x7b\xee\x31\x11\x56\xec\x98\x44\xa0\xae\x93\x85\x8a\x78\x44\x02\xd7\x40\xb7\xdd\x2d\x2f\xd9\xa2\xc3\x8d\xf3\x60\xb6\xe5\x35\x7f\xab\xb1\xc0\x85\x40\x17\x67\x0e\x25\xf0\xce\xb3\x41\xa3\xb2\xf9\x6a\x43\xce\xb9\x64\xe7\x45\xae\x6e\xce\x48\x1e\x68\x7a\x23\x66\x65\x16\x1c\xb2\xf8\x14\x31\x73\x53\xcd\x69\xde\xe2\x7f\x5b\xe9\x96\x9f\x4e\x1a\xd4\x1b\x06\x71\xe3\x79\xcf\xd8\x46\xb9\xa8\x6b\x65\xd3\x3c\xcf\xc0\x74\xa4\x8f\x9a\x44\x25\xc6\x79\x0e\x68\x67\x92\x0f\x67\xc3\xf8\xb6\xd4\xa9\x49\xca\x45\x05\xbe\x7b\x0d\xa5\xdb\xea\xa9\x77\xa6\xd8\x7c\xdc\x2a\x6b\xdc\x00\xf4\xf9\x1b\x3f\x3c\xc8\x83\x6e\x7e\x0c\xee\x38\x23\xca\xdc\x27\x1d\xd7\x4d\x95\x6b\xf7\x57\xa8\xc2\xdf\xd3\xfc\xfd\x75\x81\xb3\xe8\xa0\x25\x16\x0f\x86\x6e\x81\xbc\x6c\xde\x89\xe8\x32\xb9\x25\x55\x2d\x21\x60\x15\xcc\x4a\x2f\xe8\x78\xd2\xa4\x23\x4f\x58\x9f\xf7\x50\x1e\xd2\x96\x3b\xf9\x93\x90\x75\xed\xc4\x05\xf4\xca\x8f\x05\xa4\xee\xa3\x1c\x0e\xf0\xe4\xed\xb8\x73\x2a\x60\x71\x4c\x2a\x32\x8e\x51\x53\x7a\xc7\x3b\xe1\xab\x40\x17\xe2\xc1\x2c\x7a\xbc\xa5\x2d\xa5\xda\xec\xcc\xd7\xd8\xf8\xef\x6f\xf4\x0d\x7c\xfc\x52\x65\x9a\x67\x32\x15\x5a\x25\x31\x87\xf8\xfc\x1d\xa6\xc4\x4e\xa5\xc1\xff\xe4\x2a\xf2\xc5\x42\x4a\x45\x64\x9a\x6c\x54\xf3\xf5\x48\xc2\x4c\x32\x45\x16\x27\x45\x29\x9b\xf6\xac\x17\x12\xc9\x43\x68\x7b\x1c\xb9\x3a\xbf\xbf\x85\x25\x3c\xd4\x8d\x3a\x7b\x1c\x93\x26\x9e\xc1\x17\xe1\xb8\x85\xa2\x9b\x32\xd9\x37\x91\x7b\x0f\xb9\x9b\x42\xd1\x46\x15\x33\xb5\x67\x08\xed\xc5\xf9\xa8\x58\x7a\x06\x15\xf1\x42\x6d\xc4\xa8\x6f\xb4\x0f\x7f\xaa\xa3\x27\x8f\x54\xb7\x8e\xe3\xd1\x24\x58\x65\x94\x8e\x1c\x06\xc9\x12\xdf\xab\x2e\x0c\x60\x0b\xcc\x28\xa2\x0b\x09\x20\xb1\x1e\x5b\x2c\x8c\xbd\xa7\xb2\xb4\xab\xf7\xe0\x90\x13\xcd\xe4\x81\x7d\xa7\x28\x0b\x12\xf1\x38\xce\x7e\xf4\x20\x3d\xa3\x5a\xf9\x36\xf3\xeb\x09\x53\x8e\x27\x96\x7c\xcd\xd8\xb5\x98\x05\x84\x47\x51\xb4\x07\x9d\xda\xc1\x0a\x35\xaa\xa4\xcf\x14\x94\x48\xb3\x0e\xb8\xf4\x03\x41\x6a\xa6\xa6\x50\xf8\x38\x32\x19\x1d\xea\xd7\xac\xff\xb0\xd4\xa1\xcd\x32\x82\xdc\xcd\xaf\x0f\xff\xae\xe4\xbd\x6a\xa2\x97\xcf\xbe\xa7\x3c\xbc\xba\x2d\xfc\xc4\xf8\x90\xe3\x8a\x60\x8e\x95\xdb\x5f\x7a\x02\xc0\x47\x0b\xed\xa2\x0c\x83\x3b\xad\x1a\xe9\xa3\x99\x6a\xb9\x5e\xcb\xa4\x34\x27\x1c\xc1\xdd\x7b\x64\xcf\x2c\x58\x48\xea\xbe\xf7\xa1\xa8\x13\x1b\xcd\x1b\x1f\x06\x0b\x5f\xb5\x31\x97\x98\xc4\x2c\x8d\x29\x2a\xc0\x5c\xea\x5c\x12\xb5\xbd\xf9\xc3\x24\x40\xf7\x86\x74\xfc\x15\xc6\x55\xa1\xee\x11\x2f\x3e\x59\xa6\xa2\x9d\x62\xcf\x53\x94\x8c\xdf\xc2\xa6\x1e\xf2\x9b\x85\x9e\x2c\x45\x1f\xa8\xab\x0f\xcc\x64\xf8\x98\x62\x8d\x2a\xae\x53\x60\xf0\x5a\x97\xf1\xa1\xde\xa1\xbc\x33\x46\x77\x16\x07\x9e\x3b\xb4\x57\xc2\xf1\x79\x64\x51\x26\x3e\x5c\x51\x45\x70\xe1\xaa\x83\x99\x19\xfe\x7d\x4e\x48\x7c\xae\xae\x21\x99\x2d\xf3\x2c\xd4\x85\xe9\xa2\xd5\x39\xc8\xd8\x4f\x36\x62\x04\x11\x6a\xcd\x0e\x9f\x15\x52\x72\x16\x60\x1d\x54\xa9\xb4\x89\x03\xdb\x2a\x9c\x4c\x93\xce\x94\x7d\x1a\xb3\x99\x79\xdc\x59\x75\x56\x2f\xc0\xd9\xc7\xb1\x31\x2f\x76\x4f\x63\xf9\x89\x27\xbc\x7d\x95\xb0\x1e\xbf\x7e\x22\xae\x0a\xe4\xd7\xa4\xec\xed\x1d\xa2\xf5\xc6\xd9\xd7\x47\xba\xcd\x47\x98\x45\x14\xab\xd9\x9e\xf8\x1a\xd8\xee\x61\x10\xe7\x05\x96\x81\x79\x85\xf3\x8c\x59\x0d\x2f\x06\x21\xa4\xb7\x90\xb8\xcb\x54\x37\x99\x3a\xd6\x7c\x16\xe2\x75\x3f\xbe\x99\x6c\x7e\xcd\x25\x01\x8b\x2b\xb3\xa9\x4b\x21\x0b\x75\x49\x2d\xf5\x07\xb8\x63\x1d\xd9\x16\xb5\x59\x8b\x73\x6b\x91\x3e\xd3\xbc\xaf\x9d\x18\x0a\x0b\xb9\xba\x02\x97\xcb\x24\x73\x6d\x85\x3f\x6d\xc9\x11\xa5\x94\x11\x3a\xec\xc1\x8d\x22\x4c\xe4\x89\x5e\x37\x66\x68\xa3\x90\x3b\x21\x13\x81\x44\xc1\x2e\x4e\xd3\xb1\xb0\xdc\xd5\x3e\x6b\x9a\xf0\xcf\xe1\xd1\x08\x72\xa4\x21\x49\x71\xb2\x7b\x68\x8a\x38\x96\x7e\x25\x73\x8f\x7c\xdf\x8e\xef\x2f\x76\xca\x5a\x73\xa1\x51\x4f\x82\x1e\x32\xc3\x45\xab\xfb\xb4\xd8\xfa\xcc\x73\x05\xc2\x21\x38\x96\x2c\x3b\xe0\x91\x43\x6d\x5a\xa2\xb3\x1d\x01\x4a\x8a\x48\x0d\x30\xc4\x7a\x0c\x65\x50\xb7\x89\xd7\xdf\x00\x48\xe6\xc6\x4d\xe1\x68\x16\xb9\xa9\xf9\x53\x6c\x36\xa6\x5c\x3d\xce\x45\x8f\xb3\xbd\x89\x80\xe7\x18\xa0\xb2\x52\xb6\x54\x36\x31\x07\xd6\xd6\x7a\x62\x88\xcd\xc8\x6a\x5e\x18\x54\x3a\xc7\x35\x24\xf2\xfb\xeb\x9b\x8e\xbb\xa0\x8c\xbe\xc5\x0a\xbf\x1c\xab\x67\x2f\x07\x3e\xc1\xb7\xf1\x20\xb7\xdc\x82\xa2\x6b\xe0\x0c\xc2\x39\x36\x99\xf3\x6f\x54\x9f\x73\xc2\x5a\x21\x0e\x9c\x04\x0c\x92\x61\x87\x74\xdb\x01\x21\xef\x9c\x8a\x06\x1b\x6e\x7e\x73\x26\x92\xc0\xd6\x03\x0b\x82\x24\x2f\x64\xa1\x77\x40\x3f\x9e\xb2\x78\x81\x01\x60\x9b\x56\xf5\xf2\xb9\x7e\x13\x96\xc9\xd4\x14\x9d\xda\x19\xdd\x62\x22\xbe\x1e\x92\x26\x60\x5e\x35\xec\x96\x38\xd6\x32\xdc\xaa\x4c\xfc\x89\x05\x6f\xd2\xdc\xf2\x8e\x7e\xe4\xe0\xc3\x86\x88\xf0\xcc\x85\xe1\x38\xfa\x78\x40\xfe\x9d\x37\xe1\xad\x0c\xff\x62\xeb\x63\x73\x2b\x06\xe2\x0c\xc5\xf3\xa5\xd0\x08\xe4\x55\x84\xbb\x2a\x83\x0b\x23\x46\xa9\x82\x13\xfb\xfb\xfd\xa3\x09\x93\x47\x22\x48\x39\xbd\x39\x15\x08\x7f\x61\x66\xbb\xa4\x9a\x5c\xf0\x0e\xea\xd7\x0c\xb4\x78\xa7\xe1\x9f\x87\xc4\x49\x32\x2f\x22\x6e\x74\x21\xe2\x75\x73\xe9\x28\x01\xcf\x07\x1d\xac\xcd\x1b\x5b\x39\xc9\xd6\xd1\xc5\x5a\x8e\xe8\xe4\x46\x6b\x94\x20\x11\x03\x77\x76\x86\x56\x9a\xbb\x67\x1b\x96\xfb\x37\x74\x51\xa3\x39\xb1\x57\xd0\xbb\x43\x03\x11\x0f\xc3\x89\x69\xa6\x54\xb4\x3f\x6e\xed\xb4\x2d\xde\x9b\x08\xb5\xda\x79\x98\x44\x02\x63\xcf\xa6\xdf\xb0\xe1\xe0\xd6\xf6\x57\xf8\xe4\x07\x52\xce\x04\x60\xc0\x9c\x4c\x34\xd4\x82\xe5\x3b\x88\xc6\x65\xa8\xc5\xd9\xee\xcc\x9d\x61\x2e\xde\xa4\x00\x0b\x81\x94\x8e\x2a\xd0\xe2\x03\xd9\x47\x21\xe3\x2c\xb0\x92\x8f\xdc\x66\x44\x2f\xde\x9a\x2a\x67\xa9\x2d\x90\x3f\xc0\x06\xe0\x41\x16\x38\xa4\xcf\x9e\xce\x1a\x6d\xde\xa5\x06\xae\xad\x3a\x3c\xb7\x1b\x29\x15\xf2\xb5\xb4\xff\x35\x5e\x9c\x8b\xc4\x0e\x5c\x4d\x40\x08\xea\x65\x0b\x97\x8a\xa6\xa5\x03\x14\xc8\xac\x7f\x48\xb8\xfb\xbc\x3b\xe6\x01\xc4\x74\x61\xfe\xd6\x8b\x0f\x72\xbb\x30\x34\xdc\x55\x3a\xe9\x73\x67\xea\x4b\x94\x99\x48\x40\x56\x68\x14\x68\x81\x07\x8a\x5d\x07\xae\xcc\xea\x7f\xca\xa8\xfb\xaf\xaa\x00\x4d\x60\x77\x6f\x61\xc6\xb7\x3f\x8c\x0a\x69\xc8\x8e\x8d\xf2\xff\x35\xd5\x0b\x6b\x3f\xb5\x08\x42\x4c\x62\xb6\xbe\x0d\x0f\x3f\x09\xaf\xbc\x20\xe6\x61\xa9\x87\x60\xaf\x9d\x38\x6d\x92\x64\xd6\x6a\x33\x59\x44\x5c\x80\x01\xc0\x6f\x7a\xce\x9b\xce\xb6\xbf\xa7\xb1\x94\x98\x3d\x0e\xe5\x36\x80\x7f\x00\x2e\x7e\xf4\x11\xb7\x84\x0a\x39\xd4\xb7\x4e\x6c\x94\xdd\xd6\x3d\x4b\x7a\xa7\x2c\x89\x55\xc0\xed\x69\xe0\x8d\xae\xdb\xa8\xc4\x9a\x0b\xb1\x9c\xe0\xf3\xfe\x4a\x5a\xc7\x61\xe1\xfc\x74\x54\x13\xe5\xf1\xd3\xa7\x42\x96\x02\x7a\xc3\x33\xc2\x2f\x12\xc7\xe9\x99\xf9\x9c\xed\xc9\xc4\x47\xdb\x23\x33\x31\x87\x61\x7c\x2e\xf9\x98\xa7\x01\x8d\xe9\x4a\xb5\xd1\x04\x48\x75\x2a\x9b\x6b\xaf\xf9\x78\x26\xb2\x85\xb6\xb5\x2b\x61\xd7\xb2\x4f\xc2\x09\xba\x8a\xe3\x38\x09\xdb\x6d\xaa\x46\xe7\x4b\x11\x67\x4a\x16\x78\x2a\x94\xea\xa0\x9d\xec\xd2\x33\x1b\x7c\x3e\x2e\x74\xa3\x05\xc2\x93\x26\x50\xe6\xf6\xa1\x31\x8b\x18\x72\x36\x88\xa5\x34\x4d\x85\xdf\xc7\xb5\xbe\x10\xfc\x26\x2c\x70\x38\xc4\xa1\xe5\xbd\xd4\xe2\x54\x64\x15\xea\x12\xd8\xab\xee\x6e\xbe\xba\xc8\x46\x76\x33\x11\xc4\x02\x39\x4a\x77\xc8\x3e\x32\x0e\x34\x9d\x30\xc3\xd0\xc0\xd7\xfc\x08\x82\x05\x39\xac\xa6\x62\xbe\xf1\xcc\xea\xaf\x73\xb9\xd2\xc3\x67\xdf\x0d\x9d\x39\x48\xa1\x66\x99\xba\xa5\xfd\x55\x4b\x10\x24\xeb\x61\x1f\x2a\xfa\xfd\x57\x81\xeb\xfd\xdd\xb9\x9e\x2b\xd4\x95\xb7\x07\x1e\xf8\x1c\x05\x56\x9a\x47\x95\x9c\x9d\xa0\x5c\x3e\x9b\x37\xb5\xd4\x64\x9b\xa6\x61\x5b\x7d\x49\xe8\x4c\x0f\x9a\x74\x36\xf7\x09\xdd\x62\xd5\x1f\x9e\xdc\xdf\x18\x88\x79\xda\xe4\xcc\x88\x82\xfd\x30\x6c\xdc\x8d\x3e\x47\x00\x6e\xf6\x10\x6e\xa2\x0d\xcb\x32\x40\x68\x4e\x62\x06\x29\x5b\xe0\x71\xa0\x04\xfa\x46\xc2\x43\xf3\x96\x19\x61\x2e\x9d\x7d\x47\x92\x79\x4d\xc5\x40\x81\x7b\x59\x58\x79\x87\xb2\x92\x94\xad\x7e\xc4\xfa\xa1\x4c\x77\xa1\x28\x6f\xc2\xc8\x5e\x24\x9f\xff\x47\x78\x88\xc5\xbb\x9c\x86\x71\x0a\x57\xef\x23\x8a\xa0\xae\x72\xb4\x1c\x98\x32\x3a\x13\xd7\x86\x3c\x66\xa7\x65\xa7\x8e\xf6\x11\x22\xe6\xaa\xc1\xd8\x7a\xa1\x58\xd3\xdf\x7d\x4a\x18\x51\x85\x6e\x6e\x13\x71\xc6\x3a\x47\x85\xc2\x0e\x9c\xc2\xfb\xc8\x71\x5a\xca\xae\x62\xc9\xec\x22\x68\xa5\x4c\x39\x2f\x91\x31\x85\xc9\x97\xe7\x16\x7e\x3f\x0f\xdf\xe8\xf0\x0d\x4d\xbf\x89\x2a\x87\xa1\x29\x30\x60\xae\x34\x1b\x2a\xdd\x45\x5d\xc2\x1e\x95\xf5\x4c\xfd\x3b\x69\x98\xd8\xdf\x5b\x8e\x23\x9c\xd2\xc8\x6e\xa9\xfa\x95\xae\xeb\xe3\x56\x78\xac\x9d\x6c\x89\xe7\xb2\xff\x8b\x32\xfb\xdd\x0e\x29\x8b\x35\x6e\xdf\xf6\x0d\x99\x9d\x4b\x54\x92\xd4\x84\xbb\x0c\xa9\xbb\x75\x3f\xf1\xfd\x76\x76\xc3\x9b\x5c\xd7\xd6\x00\x04\xa6\xe9\xaa\x5e\x95\x5f\x95\x33\xd5\x36\x8d\xb8\x44\x3d\xd1\xa5\x29\xbf\x8e\xd3\x39\x55\xaf\x9c\x5b\x85\x36\xf1\xa7\xc4\xfa\xa2\x5a\xf5\x16\x12\x7d\x70\x53\xbb\x83\xda\x75\xfb\x50\x68\x0a\xc7\x62\xf0\x87\x7d\xe1\xec\xc4\x7f\x7b\x0c\xc2\x54\xd0\x3c\x00\xca\x57\x40\xee\xf9\xa2\xee\x7a\x35\xd1\x9a\x60\xca\xc4\x33\xb5\xfb\x09\x7d\xe1\x51\xc1\xc0\xb4\xc1\xdd\x3f\xda\x96\xd8\xc3\x55\x6c\xe4\xc6\xb3\x6d\x3b\x47\x5c\x86\x19\x7f\x56\xf5\xde\x89\xd2\x64\x84\x56\x97\x91\x74\x01\x53\xdb\xe0\x02\x2c\x0f\xa1\x3d\x3f\xbf\xd1\x95\x50\x79\x22\xe4\x83\xa0\x5e\x9f\x1d\xb0\xbc\x4a\x6d\xff\xed\x4d\x87\x27\x09\x0b\xca\xc2\xed\x7a\xe3\x2a\xba\x10\xb2\x61\xbd\x0c\x81\x6e\xd9\x77\x66\x72\xa8\x54\xcf\xf4\x18\xe7\x9e\x41\xba\x98\xd6\x90\x83\xa3\xce\x0b\xa3\xb8\x7b\x8b\x17\xe8\x6f\xb3\xa9\x52\xce\x99\x9a\x76\x80\x34\x52\xb4\xd9\xe3\x72\x65\x9e\x4e\x9c\xde\x93\xb7\x37\x20\xf5\x33\xc5\x5d\x40\xae\xda\x19\xc0\x68\xcb\x4f\x18\x4b\xeb\xe3\xd1\x05\xfe\xf3\x05\x75\x0b\xd1\x3f\xe8\xda\xfa\x8c\xe2\x0b\x11\x70\x68\x12\x11\x3a\x74\x8b\x6c\x54\xf3\x3e\x07\xc4\x82\xf6\x27\xfe\xaf\x42\x33\xda\xe1\xc8\x2a\x9c\xe7\xe0\x14\x1c\x99\x0c\x9d\x88\x97\x7f\x04\xf3\x55\x69\x9f\x97\x44\xb1\xd8\x42\x1d\x56\xb5\x3f\xcd\xc0\x18\xbc\xff\xf8\x77\x05\x8a\x5b\xf1\x08\xc6\x29\x36\xa4\x71\xda\xc5\x99\x60\x46\xc4\x27\xc5\xe0\xb4\x86\xad\x3f\x11\x8c\x0e\xa6\x64\x19\x63\x3b\x46\x3d\x61\x54\x1b\x8b\xda\xa3\x58\xc9\x46\x69\xdc\xd1\xbc\x33\x35\x08\xed\xe6\x4c\x70\xd2\x92\xaa\x77\x4e\x33\x12\x45\x1e\x2b\x15\xe7\xd0\x2b\x66\xea\x9c\x72\xbc\xfa\x9f\x32\xe6\x1e\x11\xec\xb0\x65\xce\xc0\xa8\x31\xca\xbd\x02\x55\xda\x2d\xcd\xcc\xa2\xec\x7d\xa0\x8f\x84\x2f\x5a\xe8\x9e\x30\x50\x27\xe4\x21\x89\x9e\x93\x1a\xcd\x22\xc9\x70\x43\x8e\x77\xf9\x71\xe2\x47\x7f\xc1\xd7\x57\xa1\x97\x92\xf7\xf9\xb9\xdc\x1f\x33\x94\x27\xda\xe3\x0a\x97\x46\x71\xe3\x9c\x8f\x63\x79\x41\xea\x58\xb7\x3c\x05\xd5\xad\x4f\xed\xf8\x0d\x0f\xcd\x5c\x88\x70\x42\x77\x4b\xf7\xe2\x4f\x3b\xb9\x85\xc1\x31\x7d\x75\x41\x2d\x4b\x81\x81\x79\x0a\xe7\x46\x50\x5f\x8b\x8c\x03\x0d\x7e\x43\x77\xc7\x53\xfb\x4b\x2a\xf5\x08\x81\x78\xbd\xd5\x9b\x59\x66\x27\x47\xa5\x45\xb2\x28\xf3\x2a\x32\x78\x1f\x9e\xa8\x7c\xf0\x5c\xfc\x5d\x56\xb7\xbe\x71\x06\x12\xa9\x46\xbd\x25\x19\x1e\xbc\xd8\xc3\x38\xd9\x7b\x18\x0a\x53\x79\xf6\xf9\x65\x95\x86\x91\xc0\x4f\x08\x65\xa6\x08\x24\x7d\xa0\xd0\xfb\x54\x0f\x94\x8b\xea\xfd\x72\x09\x9a\xec\xb5\xa8\xa1\xdd\xab\x91\x61\xe7\x6f\x39\x83\xda\xd4\x36\xb3\x8f\x28\x76\x51\xfe\x55\x34\xb8\xca\xfd\x32\xa5\x48\x23\x76\x10\x5e\xfd\x17\x60\x8f\xa3\xb7\x00\xbc\xf3\x1c\xb0\x78\xe0\x07\xa2\x72\xb0\x01\x8b\x60\xf6\x61\x40\x2e\x61\x48\xcf\xa1\x9c\x8d\xcc\x5f\xc9\xa1\x7f\x38\x17\x1b\x0a\xde\x64\x20\x26\xd5\x26\x9d\xda\x45\x0c\x58\x7e\x92\x4e\xca\x52\x17\x06\x82\xad\x4c\x88\x58\x2f\xff\x71\x1b\xa8\xd0\x67\xce\x04\xc0\x3e\x38\x25\x06\x45\x63\x66\x66\xd7\x93\x8e\x9a\x52\xd6\x47\x64\x1a\x2a\xc1\x72\x59\x32\x29\x0c\x72\x43\x50\x0f\xb8\x57\xce\x7d\x11\xf6\x39\x15\xcc\xb8\x60\x25\x76\x5c\xe4\xc7\x1e\xd8\x5a\xea\x4a\x18\xc7\xda\xe1\x9e\xe4\x39\x22\x27\xf9\xbb\x1c\x5a\x02\x1b\x50\x9d\xf5\x85\xf3\xfa\x87\x4f\xb3\xe5\x43\x26\xd8\x5e\xa9\x47\x27\xac\xe3\x14\x9a\x02\xfc\x6e\x3c\xab\xcd\xae\x5d\x2f\x4e\xab\x4c\x82\x01\xe7\xa5\x64\xb2\xaa\xd4\xce\x11\xa9\xc4\xdb\x41\xe2\xdd\x68\xd1\x8a\x9e\xde\xde\xfe\xba\x97\x30\x44\xf2\x58\x89\x42\x68\xaf\x19\xae\x75\xcc\xfc\x78\x6f\x69\x62\x30\xda\xda\x6a\xb7\x54\xf3\x77\x62\xce\xf3\xe9\xb4\xf6\x64\x29\xf0\xe6\xed\x95\x0b\xd3\x84\xa5\x30\x4a\x4f\x1d\x01\x33\xc0\x9b\xad\x92\x7d\xc8\x7b\x36\x5d\x84\xf7\xd0\xb0\x01\x5f\x71\x61\xd8\x7c\x58\xc6\xc4\x19\x2b\xf6\x50\xfe\xc4\xd6\x5f\xc9\xe0\x0f\xde\xbc\x3b\xe3\x35\x07\x36\x94\xfe\x6d\x5a\x87\x3e\xc9\xda\x61\xfa\x1c\x7c\x87\xa1\x71\xba\x5f\xa6\x03\xf2\xc0\x7e\xbd\x0b\xea\xbf\xb8\xdc\x7d\x99\xfb\x12\x7d\x35\xec\x90\x48\xe6\xf6\xe3\xc1\x91\x44\xda\x2e\xe3\x55\xf1\x1b\x16\x25\x39\x99\xe3\x7c\x04\x2f\xaf\x61\x3f\x9e\x25\xe7\x8f\x01\xe3\x40\x68\x90\xcc\xee\xfd\xed\x8d\x1d\x00\xf7\x15\x65\x1b\x5b\xb2\xd7\xb8\xf1\x3b\xbb\x45\x0a\xf5\x27\xc9\x00\x62\x14\x51\xdf\x2c\x97\x56\x2d\x00\x48\x5a\x92\xd9\xc7\xe7\xca\x89\x01\xcb\x42\x95\xba\xa5\xdb\x10\xcb\xa4\x97\x7d\x92\xd9\x31\xeb\x59\x26\xd0\x38\xb9\x09\xf9\xd3\xe2\x76\x24\xf0\x67\xb1\x30\x35\x60\xbb\x3d\x19\x8d\x8c\xf9\xf1\x9d\xe5\xe2\xa3\xf2\x99\x24\x3d\x1a\x3e\x04\x53\xf1\xe2\x1c\x47\xf7\x87\xb5\xe3\xf5\x7e\xfc\xf8\x77\xe5\x7d\xcc\x68\x75\xf7\x39\x2f\x90\xbb\x8d\x55\x7a\xef\xa8\x4b\x6c\xe1\xb2\x98\x6c\x55\x47\x4b\xe3\x24\x34\x65\x8c\xa9\x22\xe5\x13\x28\x2d\xe1\x71\xf8\x64\x37\x56\x85\x5d\xff\x30\xf1\x45\xa5\x5c\xc1\xdf\x30\xe1\x81\x77\x99\xca\xc0\xcb\x2d\x7e\xec\xee\x0c\x10\xb2\x79\x82\x69\xf4\xcc\x3c\x4e\xb6\x73\x84\x11\x0c\xb9\xe4\x59\x94\x39\xf6\x66\xa5\x89\xc0\xa7\x42\x08\xa6\xeb\xbd\x02\x78\x40\xe6\x2e\xf3\x5e\xdb\x13\x87\xca\xa8\x7b\x86\xf8\x3c\x55\x63\xa6\xd2\x6d\x3a\xb0\x8c\xe3\xae\x05\x34\x58\x5c\x31\x83\x86\xde\x05\xfd\x6a\xe7\xda\x19\x33\x7b\xdb\x3f\xba\x1f\x64\x2c\xd2\x60\x31\x08\x40\x48\x18\x97\xab\x7e\x77\x74\x54\x39\x9c\xe7\x28\x1e\x0e\x45\xfe\x2a\xe1\x6d\xc3\xab\xe4\x12\xff\xe3\x81\x30\x9b\x6c\xce\x80\x6a\x1a\x0e\x49\x1d\x30\x10\x17\xcc\x2a\x1c\xe4\x5e\x0a\xd5\x1e\x8e\xdf\x98\x7d\x31\xe1\xf4\xed\xd2\x8c\xfd\x4d\x37\x2e\xd3\x63\xa2\xda\x2a\xab\x77\x1d\x80\x79\x77\xca\xba\xd0\xc8\x37\xb1\xc0\x20\x91\x82\x80\x19\x76\x8b\x88\x92\x62\x7f\x13\xcf\x96\x19\xac\xa1\x64\x54\x07\x8b\xf1\x6e\x24\x6a\xf6\x67\xfd\x1b\x0f\x0b\xdd\xc2\xb9\x54\xb1\xcf\xf9\xc1\x0a\xd6\xe8\x10\xdd\xd2\xe7\x32\x6d\x82\xac\x4e\x2f\xf8\xd8\xb4\x40\xdc\x17\x48\x41\x52\x69\x90\xe3\xf1\x0f\x8e\x18\x27\x74\xc6\x39\xaa\x90\x66\x35\xd5\x0c\x19\x27\x95\x80\xfd\xfe\x8d\xc2\xcd\x6c\x42\x28\x34\x1f\x80\x31\xc3\x18\x7b\xd2\xf2\xa0\xbf\x79\x10\x0e\xe8\x0f\x3b\x02\xbf\x2d\x66\xca\xc4\xf3\x77\x2b\x7b\x46\x3c\x5d\xc1\x0a\x7b\xef\xe3\xc3\x52\x19\x29\xa7\x3c\x15\x18\x34\x7d\x59\x7f\xd2\xa4\x81\x35\x1f\xb7\xdb\x95\xb5\x25\xf3\x24\xd4\x79\x00\x2e\x99\xa4\xe0\xd3\xd4\xf3\x8a\x04\xa4\x77\x75\x2c\xa4\x85\x34\xb3\x82\x23\x48\x96\x06\x14\x65\xc3\x28\xcc\xb4\x12\x92\x5e\x14\x18\x99\xb7\xbd\x87\xcb\x15\x4f\x99\x3d\x3a\x51\x7d\x4b\xfd\x98\xf5\xe5\x3f\x2c\x6b\xcb\x06\x1a\x2f\x1a\xd2\xcd\xfa\x57\x77\xc7\x2f\xf0\x35\xb9\x8e\x56\x6e\xf2\x39\x5f\x31\x8d\xe5\x4a\x2c\x92\x07\x82\xf8\x71\x93\x9f\xe0\xfd\xf2\xae\xae\x70\x78\x6e\x08\xe1\xa7\xed\xb3\x45\x25\xc4\x9c\x92\xe4\x1d\x2b\x78\xe8\xf4\xbd\x48\x43\x9a\xee\x56\xaf\xfd\xf8\xb8\xc8\xbb\x5c\x1e\x65\x76\xb1\x73\x50\xc2\x6b\x4f\xfe\x5d\xa9\x46\xc0\xe4\x74\x02\x53\x12\x6b\x5d\x5b\x99\xc7\xea\x25\x78\x9c\x3b\x4d\xbe\xbc\x84\x7e\x00\x85\xf0\x28\x36\xbe\x31\x3d\x09\x83\xbc\x21\xa3\xc9\x6a\x7c\x6b\x97\xc0\x11\xd9\xb6\x73\x70\x58\x50\x77\x38\x05\xd1\xe4\x0c\xe5\xf4\x89\xdb\xda\xce\x74\xad\x11\x8a\xbb\x2a\x21\x64\xc6\xa1\x25\xf0\x2d\xc4\xc3\xb7\x3f\x37\xee\xed\xfb\xed\x03\x99\xa9\xe7\xbe\x10\xf3\x55\x06\x16\xb2\x20\x91\x06\xcd\x69\x03\x8f\x85\xcb\xc6\xf1\xfb\xf7\x0f\xd2\xa7\xc8\xd3\x55\x0c\x94\xa1\x8f\x04\x24\x5a\x66\x4c\xaa\x88\xe5\xb9\xa7\x39\xb4\xf7\x7c\x08\xfb\xe7\xd8\x26\x32\xb2\xf9\xae\x94\x9e\x5a\xd3\x87\xa7\x21\x3f\x90\xb2\xea\x35\x64\x70\x2e\x33\xca\x7c\x56\x27\x0b\xcf\xde\x16\x78\x52\x57\xa0\xc7\xe2\x10\xf6\xcb\x7a\xab\xdb\x1d\xe7\x95\xa3\x31\xcd\x9e\x2c\x0c\x30\x18\xa6\xf3\xc8\xd1\x8c\xbc\xd1\xa8\xf0\xc0\xe9\x38\x1e\x80\x5b\x74\x02\x85\x40\x0f\xbc\x2e\xc4\x06\xee\x00\xc8\x2d\x41\xfd\x43\xa4\x97\x37\xa7\xd7\x80\xbc\x2f\xaf\x3a\xbe\x88\x46\x9c\x48\x4e\xd4\x36\xa5\x91\xde\xe1\x47\x70\x1a\x00\xfc\xf8\x47\x1f\x5e\x8d\xd7\x6d\x9f\x66\xf4\x8d\x22\x09\x5d\xa8\x3c\xcc\x66\x5c\xa4\x5a\x48\xb0\x35\xfb\x3c\xcb\x38\x48\xf4\x57\x62\x8a\x92\x85\x01\x40\x6c\x84\x2e\xa8\xd5\xa9\x31\x9d\x88\x1c\x43\x0b\x5c\x80\x3f\xb3\x9e\xea\xe5\x13\x04\x49\x0e\x24\x5c\x49\x23\x36\x79\xbb\x2d\xb2\x2f\x53\xd3\x7c\x7f\x90\xa9\xbc\x7e\x0d\x34\xa9\x67\xd9\x63\x13\x77\x31\x55\xd4\x25\x5d\xde\xc7\x07\xe3\x10\xcf\xc3\x96\xca\x6e\x1c\x65\x45\x7b\xb5\x55\x9b\x13\x36\x45\x4d\xfb\xb4\xed\xe6\x4a\xc4\x7b\x23\x36\xc5\xbb\x23\xc5\xcb\x19\x60\x34\x79\x4d\x4b\x97\xa8\xad\x2d\x0a\x69\xd9\xc8\xff\x9f\xba\x9e\x27\x04\xb1\x77\x28\xdd\x56\x18\xe3\x31\xdd\xcf\x23\x7a\x1c\x3c\xb3\xc9\xac\x5c\x3d\xde\xec\x5f\xa5\x09\xba\x2e\x8e\xb7\x7b\x73\xad\xa7\x22\xdd\x13\x8c\xc6\x66\x3e\x3f\xfb\x18\xc2\x65\x97\xe3\xf3\x10\x43\x57\xbc\xa6\x07\x24\x7b\x2c\xb9\x6a\xf7\x7f\x20\x90\xb1\xa0\xbe\x4c\x2e\xb6\x3c\x2a\x39\xfd\x70\xaf\x13\xc2\x29\xa9\xd9\x62\x89\x2d\x41\xa0\xef\x4f\x89\x2b\xd9\x59\xe4\x52\xc9\xa8\x74\x14\x8b\x73\x9c\xce\x1c\x3f\x6b\xb7\xbb\xd0\x4d\x5d\x9d\x02\x8e\x0f\xb7\x2d\x92\x42\xc0\x9c\x46\x64\x39\xd2\xe9\xe4\xc1\x63\x55\xe6\x91\x51\xa0\x59\xe7\x7d\x3b\x0a\x5b\x47\xb7\xdd\x4c\xa8\x8a\x2f\x48\x9c\x59\x18\x4e\x62\x65\x9f\x7f\xca\x71\xdf\xe0\x58\xa6\xc9\xc8\x9b\xcf\x3f\xa8\x10\xd8\xd6\x4b\x6a\x89\xa3\x93\xbb\x2f\xf8\x9e\x39\x05\x51\xca\xd4\x6b\x2c\x2c\xd6\x99\x61\x57\x24\x3d\x64\x8d\x62\x74\xe4\x01\x33\x30\x5d\x49\xd2\x2f\xaf\xea\x25\x78\xcb\xc1\x09\xe7\xd0\x59\xc4\x4b\x3e\x91\x95\xce\x5b\x7e\x28\xae\x98\xc3\x30\x96\x20\x29\x9c\x45\x39\xf9\x79\xbb\xfc\x03\x86\x39\x98\xf5\xd9\x5e\x54\xe7\xdc\xb9\x0d\x11\x37\x09\x02\x43\xe9\xfd\xdf\x57\x82\x7a\xfc\xf7\x8f\xeb\xde\xfd\xf3\xe7\xe6\xad\xcc\x91\xb1\xd8\x00\xd3\xab\x5e\x98\x6b\x90\xb6\xe2\x64\xa2\x3a\x4e\x00\x0e\x87\xa4\xc2\x74\x84\x6b\x38\x33\x0f\x5e\xd7\x81\x02\x96\xcc\xac\x00\x89\xd4\x2f\xa7\x27\xba\x04\xc0\x6b\x80\xaa\xcb\xb4\x10\xde\x49\x16\x6e\x7f\x80\x39\xdd\x6c\x90\xdd\x37\x15\x64\xd2\x73\x87\x2f\xa7\xca\xcc\xb1\xd2\x34\x0e\x9e\x8b\xc8\xb6\xba\x9b\x49\x21\x47\x93\x9a\x49\x18\x44\x22\xed\xb8\x2c\x08\xc0\x2f\xc8\x80\x0f\x9f\xbf\x07\x0c\x31\x3d\x14\x24\x89\xb7\xd8\xc9\x29\x43\x82\xab\x12\xb4\x9b\x8c\xca\x03\x73\xa0\xf7\x7f\x1d\x21\xf0\xce\x5d\xa7\x41\xd4\xcf\x08\x15\xe4\x28\xb9\xcc\x29\x13\xa9\x54\x54\xdf\x7f\x58\x8a\xc4\x90\x7a\x9c\x22\x66\xd7\x56\x4e\xdb\x61\x56\xdd\xc4\x7b\x66\xe9\x96\xb8\x40\xe0\x22\xf2\x44\x87\xd3\x01\x6a\x08\x2d\x2c\x56\x68\x15\x2a\x0f\x05\xed\x37\xbb\x40\xd1\x12\x64\xc2\xf6\xfd\x19\xe6\xe3\x91\xb9\xe4\x19\xf3\xc9\x7d\x15\x8c\x83\x7b\x5e\x78\xce\x09\x3a\xb8\x27\x9e\xbe\x0d\x1a\x66\x58\x86\xb5\xc4\xb9\x8c\xa3\x8d\x35\xc7\x24\x7b\xc6\x9d\x09\xf7\xd8\x32\xb7\x86\xb9\x2d\xc0\x14\x9e\x6f\xdf\x77\x7a\x65\xd0\x84\x3e\xbe\xe0\xeb\xe3\x9b\xe8\xe3\x23\xe9\xac\xba\xdb\x79\x7f\xd7\xb8\xe1\x87\x7a\xee\xbe\xeb\x2e\xfd\xf3\xfb\x11\x39\x39\x06\x69\xf9\x47\x7c\xac\xd3\xf3\x97\x67\x6a\x6a\x1f\xa2\x5e\xc0\x17\x53\xf2\xdb\xa9\x4c\x6f\x21\xe5\xf9\xa9\x10\x70\x9d\x80\xf4\x23\x76\x62\x52\x0f\x9d\x77\x14\x89\x29\xcc\x22\x93\xe7\x48\xf4\xc7\xc3\x14\x0a\x59\x25\x3a\x1a\x6c\x35\xb9\xe7\x38\xb6\x83\x7f\xb4\xed\xc4\xfb\x9d\x0d\x30\x8c\xc5\xbd\x26\x44\xf5\x8a\x74\x10\x9c\xac\xf0\xbc\xad\xf7\xd8\x6a\xe0\x10\x87\xf2\xfb\x74\xf7\x3b\x99\x5a\xa6\x8f\xe3\xbf\x33\xce\x47\xe9\xf6\x02\x03\x35\x73\x64\xd3\xde\xb2\x38\x30\xa5\xb1\x0d\x47\x48\xdb\x9e\xc4\x52\x89\x39\x71\xb5\x93\xc8\x70\x60\x93\x1f\x0e\x92\x35\xe6\x21\x75\xb4\x10\xa6\xc4\x07\xa2\xd8\x2e\x61\x91\x8b\x01\x36\x50\xe5\x40\x85\xec\x30\xb9\x68\x38\x6c\xe2\x56\x26\x04\xb9\x4f\xc4\x15\x49\x93\x83\x6e\x48\x12\x21\x7d\x24\x4e\x3c\x10\x18\xd0\x38\xdf\x1f\xdb\x69\x77\x1d\xf9\xb0\xff\x27\xee\xda\x2d\xc6\x7d\xbd\xf9\x6e\x3f\x1e\x9f\x4b\x7d\x42\xc7\x96\xcf\x98\x84\xd3\xf3\xf6\xf5\x85\xd9\x74\xa1\x57\xc4\x6b\x36\xba\x27\xec\xe0\xfd\x5d\x12\xf1\x9a\x9a\x26\xb6\x5e\xf3\x7f\x25\x17\x9a\x1d\xac\x84\x3c\x07\x79\xad\x2a\x17\xac\xf3\x99\x35\x35\xf5\xa3\x1e\x74\x1c\x3c\xbd\x81\x53\x03\xa6\x04\xd6\xec\x25\xea\xb6\x6f\xac\x6f\xce\x40\xdb\xb0\xd4\x69\xce\xd6\xb2\xc8\x61\xda\xc4\xd4\xb6\xf9\xe8\x43\x74\xea\xa6\x71\x16\xc4\xdf\x3e\xf5\xe3\xed\xfb\x1b\x0f\xed\xd5\xc4\x71\x93\xee\x61\xc6\xa3\x3b\x9d\xf6\x25\x79\x73\x4a\xa5\xce\x18\xa5\x67\x09\x59\x32\x9f\x09\x18\x85\x8f\xef\x9f\xdf\xaf\x77\x1e\x90\x77\x45\x26\x1a\x2c\xeb\x39\x7a\x28\x59\x6f\xe8\xc7\x33\xe6\x97\xf5\x0f\xec\x47\x6d\x16\xae\xb4\x5a\x86\x4d\x29\x59\xcc\x55\x3f\x54\xaa\x64\xf1\xd4\xfa\x73\x2f\x46\x9c\x9f\xd3\x63\xe6\x73\x2d\x3f\xe3\xfb\xfb\xe7\x07\x69\x42\xb7\x1e\x05\x03\x5e\x78\xa3\x4c\x7c\x82\x80\x94\x2d\x41\x10\x8d\xa3\xb6\xc9\x0f\x8e\x51\x95\x54\xf8\xe1\xac\x49\xc5\x05\x52\xb9\x14\x1e\xec\x52\x0b\x3b\xe2\x14\x92\xd6\xf2\xe4\x4c\x45\x64\xde\x3b\xc5\x38\x74\x2c\x23\x77\xa7\xfb\xbf\xfb\x17\x78\xe1\xd6\xb0\x72\x02\x9f\xd9\xb9\x33\x13\x71\xab\x7d\x26\xd1\x29\x43\x2d\xae\xb4\x76\xbf\x44\xff\x0f\x87\xf4\x36\xf5\xac\x30\x39\x77\x52\x4f\x1e\x24\xf3\xa7\x76\x72\xc7\xb9\x67\xa5\x91\x48\xa3\x2d\xaa\xce\x44\x30\x07\x3c\xd1\x02\xda\xe9\xe1\x18\x40\xaf\xa4\xc9\x65\x56\x37\x3d\x24\x64\x17\xc4\x88\xd5\x35\x14\xc9\x30\x52\xed\x50\xbd\x8a\x6f\xfe\x39\xff\x76\x3a\x9d\x6c\x25\xa6\x23\xe3\x53\x7b\x94\xdc\xb3\x0f\x2b\x48\x18\x27\x21\xa1\x7e\x89\x27\x51\x81\xb7\x26\x8f\xa4\xbb\xfd\x75\x3c\x42\x25\xf8\x32\x4d\xe6\xae\xa9\x9d\x93\x77\x6d\x71\x06\xd5\xa5\x04\xde\x35\xe6\xb3\xd0\x44\xe5\x8e\x43\x0a\x6a\x0e\xe4\xe8\xf7\x81\x2e\xe5\xa6\x9b\x8e\x02\x0f\xc5\xfb\x2c\x3e\xfe\x24\xc2\xe0\x4b\x83\xee\x26\x54\x4c\x38\x37\x09\x4a\x83\xbe\x96\x97\x4e\x4a\x69\x5f\xfd\x53\xb7\xf0\xef\xf6\x8e\xa0\x2a\x95\x39\xd2\xb7\xbe\xad\x37\x3c\xd5\x5f\x9e\x0f\x9c\xfc\xcf\x8f\xbd\x8b\xa0\x7e\x3a\x9c\x8a\xe9\x64\x7f\x9e\x29\x92\x85\x3e\x61\x3d\xec\xaa\xd6\xd7\x51\x29\x2b\x07\xc9\x2a\x51\x47\x72\x6b\xb0\x2c\xde\x17\x8d\x59\x56\xb7\xd4\xe6\x0d\x7a\xa4\x81\x58\xf7\xe5\xca\x4a\xb1\x45\xcd\x4c\x72\x6e\x25\x25\xff\x3e\xce\xcf\x0f\x3c\x60\xee\xe5\x5d\xdc\x24\x4e\x5e\x4e\x28\x93\x03\x81\x66\xf6\xb5\xdb\xe9\x42\xb6\xca\x67\xf3\x69\x78\x96\x3b\x14\x42\x95\x81\xeb\x53\x94\xf0\x7e\xa2\xa3\x7a\xb1\x26\xf1\x2f\xce\x85\xbb\x77\x03\xcc\x1d\x4a\x03\xcc\xc7\x88\xa8\x87\x92\xa0\x84\x4f\x4b\xb3\x70\x8e\x6d\xf2\xcc\x65\xa5\x69\x12\xcc\x8b\x3c\xdb\x40\x7a\x85\x11\x68\x0d\x8e\x76\x5a\x3a\x66\xf8\xc1\xa3\x03\xc9\x73\x4b\xab\x60\x4c\x60\xea\x0f\x36\x74\x98\xa3\xcb\xa8\x87\x80\x09\xd8\x9f\xb5\x3d\x75\x3d\x23\xb2\xb4\xbc\x14\x82\x4e\xac\xa3\xbe\x89\x25\xf8\xbe\x8f\x9f\x8b\xbb\x78\x5f\x24\x09\xf1\xa2\xb7\xe0\x4e\x76\xf8\xd9\x06\xe3\x89\xbe\x13\xed\xbf\x0f\x5c\xcf\x04\x9f\xf9\x14\xb3\xa4\x0e\xdd\x11\xc2\xcd\x3c\x1f\x8e\xef\x49\x84\xa6\xe8\x0e\x62\xc7\x6d\xd6\x94\x3c\x33\xa9\x0e\xac\x23\xaa\x02\xfa\x2e\xcf\x89\x41\xed\xe0\x23\xee\x78\xac\xaf\x24\x97\x30\xe9\x1f\x44\x61\x66\x2a\x9d\xdc\xd2\x76\x8f\x23\x46\x12\x1d\x79\x58\xbb\x95\x75\xac\xe5\xf8\xdb\x83\x95\xe4\x8a\x64\xcf\x0d\x8e\x7b\x12\x25\x5c\xa8\x1b\x3b\x7b\x5d\xf1\x53\xa9\xd7\x9b\x42\x43\x3e\xb9\xa4\x91\x54\xc2\x19\x15\xb4\x5c\x26\x89\x8f\xe1\x93\xbf\x36\x93\x81\x11\x69\xf1\xc6\x97\x4e\x7e\xd3\x04\x5f\x38\xdd\x93\xaa\xdc\x60\x1e\x15\xe4\x60\xd7\x69\x76\x5d\xad\x12\x0d\x39\xa4\xed\xcf\xe5\x94\xf4\xcb\xfb\x63\x43\xd6\xd1\x44\x45\xe4\x6d\xbd\x80\xc1\x65\xb2\x10\x4e\xaf\x48\xfa\xa2\x93\xc7\xa9\x91\x29\x8d\x17\xf8\x7e\x97\xa3\xf7\x7f\x57\xe8\x21\x9e\x0a\xb5\x61\xd4\x45\x78\x37\x0b\xef\xdf\x33\x0c\x4c\xee\xef\xa7\xc0\x40\xd2\x01\xe5\x00\xf4\x62\xa6\x29\xce\xf8\x02\xa6\xc1\x8a\xb1\xa3\x0a\x69\xc7\xe3\x37\x11\xc3\xaf\x69\x6c\xe6\x0b\x12\x51\x57\x0f\xaf\xd3\xad\x04\x21\x9c\x85\x82\x44\x71\xa6\x5b\x1e\x78\xfa\x8c\xe2\x31\xba\x39\x11\x30\x40\x9d\x3f\x9f\xdb\xf3\xf9\xcf\xd5\xd1\xf3\xee\xdf\xfe\x60\x1d\xcf\x47\x24\xbb\xf6\x88\xec\xe3\x76\xfc\xea\x26\x94\x56\x0b\xd3\x96\xc7\x00\xf4\x89\x6f\x39\xc3\x1d\xf8\x20\x1e\xfc\xe3\x08\xba\x02\x3b\x13\xbc\xfb\xf7\x9f\xf7\xb9\x3d\x5f\x07\x1e\x79\xb6\xf0\x78\xf6\x45\x65\x11\x73\xe9\x98\x94\x66\xcf\xb5\x0e\xbc\x8a\x66\x3e\xd0\x4c\xa0\x15\x73\x9d\xe8\x40\x88\x18\xce\x3c\x6a\x2a\x9f\xd9\xc7\x90\x32\xff\xbc\xb1\x47\xdb\xb7\xc1\xcc\x2a\xce\xe2\x60\xb6\x8b\x2a\x21\x4d\x3a\xc8\x43\x98\x0a\xd3\xcf\x52\x33\x20\xc1\x94\x08\xba\xb1\x76\xac\xc9\xea\x23\x20\x32\xfe\x2f\x0c\x3e\x2f\x0e\xce\x40\x1e\x56\x95\x81\x2c\x98\xb9\x07\xc6\x97\xde\xbc\xd9\xb0\x1d\xcc\x3d\x49\xb8\x0a\xe6\xba\xc5\xe7\x13\x28\x61\x33\xb6\xea\x40\xe8\x8a\xd6\xfd\xf9\x7a\x0f\x75\xb7\xa2\x81\x82\xdd\x9d\xdf\xbf\xfe\xed\x0b\x08\xc8\x7c\xe5\x44\x0f\xef\x04\xdc\x7f\x3c\xff\x05\x4c\x23\x2a\xc8\x82\xfe\x73\x1a\x19\xf2\x02\x30\x53\x31\xbb\x13\xe5\x40\x94\xac\x13\xca\xbb\x54\x0a\xf6\xd0\x7c\x0f\x25\x45\x69\x95\x98\x28\xc7\xda\xd8\x43\xc9\x3d\xa9\x26\xe1\xcc\xe8\x77\x16\x9a\x28\xc4\xb5\xb8\xdf\xc4\x17\x05\xe7\xf7\xfd\x7c\xe3\x50\xdc\xe9\x0c\x2c\xcc\x7e\x62\x3b\xab\x8f\x47\x29\xf4\xdb\x69\xe5\x92\x38\xda\x4b\x13\x33\x86\xc7\x58\x5a\xc0\x1b\x8a\x51\xf6\x04\x3f\x11\x5d\x2d\xb1\xcb\xee\x41\xac\xe4\x68\x79\x88\x51\xc2\x8e\x03\xe7\x40\x98\xfe\x67\x64\xc5\xfe\x63\x08\xc4\x76\x8a\xef\xf7\x3b\xda\x31\x6f\xf7\xf6\x13\x64\x36\xfb\x22\x70\xbb\x3b\x11\xa1\xcc\xea\x9e\x6d\x3d\xfc\xa3\xf6\xc7\x7d\x42\xca\x6e\x7b\xa8\x97\xda\x14\x54\x81\xf2\x91\x5d\xa0\xb4\x90\xd8\xbf\x8a\xd5\x33\xe8\xe2\x74\x4d\x03\xc9\x1d\x9a\x01\x8c\xfa\x70\x31\x7c\x12\x3c\xb2\x31\x1c\xe7\x88\xe7\x1e\x33\x85\xb9\x0d\xf3\x73\xff\x00\xdf\x13\x06\xc3\xd3\x43\x9d\x72\x99\xaf\x52\xd0\x93\xbd\xc6\xe5\xc5\xc1\xde\xb5\x1e\x58\xf0\x98\x3a\x25\x38\xc7\xbd\xbd\x7a\xfb\x5b\xc5\x18\x82\x4e\xd5\xfc\x93\xbf\x80\x12\xf7\x12\x55\xc7\x69\xe2\x5e\xc8\x0b\x2a\xb9\x7d\xb2\xe4\x46\x38\xf7\x90\x2f\xaa\x66\xb4\x91\xd5\xe3\x91\x68\xfe\x67\xce\x9a\xe3\xf2\xf8\x37\x74\xcb\x17\x53\xac\x3e\xc7\x54\xc3\x66\xbd\x39\x14\xc9\xd8\xdb\xd7\xfa\x38\x57\x04\xf2\x68\x40\x0a\xc6\x91\xe3\x18\xdf\x02\x27\x74\x7f\x7c\x38\x8d\x4c\x05\xf2\x0d\xc6\x56\xef\x3f\x6f\xcf\x81\xee\xce\xea\x1e\x10\x9b\xef\x5e\x4d\xcb\x40\xb6\x4d\x9c\x55\x9a\x1c\x1f\x46\x9b\x5f\x26\x46\x81\xce\xff\x22\xad\xa0\xa9\x34\xb3\x33\x24\x72\x12\x42\xbf\x10\x19\xb4\x95\x26\x05\xb3\x48\x1b\x7b\xa9\x5c\x9b\x43\x03\x98\x55\x61\xaf\x9e\x8f\xe8\xa4\x2e\xf0\xfe\x6d\xfc\x6a\x6a\x85\x86\x19\x5e\x0e\xd5\xe8\x67\x4a\xad\x7a\x4b\xd0\x6c\x76\x33\x75\x8c\x6f\x4c\x90\xdd\x2b\xfa\x1b\x76\x6f\x5a\x8e\x88\x0e\x52\x6c\x05\xb9\x98\x70\x06\xaa\x57\xc4\x00\x21\x7b\x3a\x4a\x01\x60\xec\xf0\x5c\x88\x99\xee\x0b\x64\x02\x23\x45\x35\x65\x73\x38\x75\x1b\xb9\x18\x6e\x1e\xe2\xc1\x46\x1d\xd7\xa1\x51\xab\x30\x4b\x29\x5f\x26\x38\xad\xeb\x26\x8f\x64\x44\x6c\x35\x89\x2c\x27\xac\x24\x44\x08\x4f\x70\x15\x93\xc7\xa7\x62\xcb\xa9\x7e\x33\x3c\xe6\x95\x8f\xe6\x28\x81\x3a\x15\x62\xd6\xd3\x10\xca\x9c\x77\x8c\x73\x26\x0c\xe6\x64\xee\xba\x43\x78\x2a\x40\xde\x35\x36\xf8\xf7\x39\xad\x1f\x55\x81\x83\x54\x2f\xa2\x20\x84\x21\x9e\x91\x62\x66\x3d\x9c\x4e\x9a\x14\x99\xbb\x91\x63\x29\x9d\x2d\x6d\x64\xa4\x83\x6e\x66\x61\xdf\xf2\xce\xde\x1c\x9f\x95\x4b\x9c\x84\xd7\xe4\x84\x1b\x6b\x31\xd8\x9c\x4e\x9f\xe2\xa5\x1c\x2e\xca\xb6\x0d\xfb\x69\xfb\xc0\x91\xdf\x08\xcf\x53\xd6\x70\x5b\x9c\x21\x08\xba\xe2\x6a\x87\xbc\x75\xdf\x04\x4d\x14\x15\x01\xf3\x0f\x98\xab\xed\xd1\xb0\xb6\x14\xfe\x93\x53\xcb\x53\x19\xd0\xeb\x89\x47\xae\x52\xb3\xbc\x12\x32\x0e\xd9\xeb\xd9\xde\xda\x16\x1e\x6b\xb1\xc0\xda\xa5\xaf\x9a\x72\xf8\x1b\xeb\x0b\xa9\x28\x16\xe8\x30\x0b\x38\xbc\xed\x9b\xd5\x76\x99\xab\x7a\x89\x1a\xa6\x50\x3b\x15\x29\xfa\xc9\x7c\x5c\xfe\x0c\xb5\xd2\xef\xa7\xf1\xdf\xad\x7a\xcd\x26\xa8\xc8\x9e\xce\x86\xf3\xe8\xa3\xdc\xc8\xfc\x1f\xb7\xeb\xed\xe6\x4b\x10\x6f\x8f\x1d\xef\xe1\x85\x2e\x7f\x57\x0b\xfc\x18\xfb\x5d\xd2\xca\x91\x9a\x97\x21\x84\x8d\xd3\xaa\x86\x32\x75\xee\x26\x34\xba\xbd\x61\xb3\x1e\xa0\x3e\x12\x49\xb6\x0f\x4e\x56\x26\xfe\x4a\x3b\x47\xc2\x23\xdf\xdf\x3f\xaf\x3f\x1c\x10\xd8\xf9\xeb\x1e\xb7\xeb\x7d\x46\x5e\x6e\xea\x6e\xd9\x55\x85\xb3\x95\xb2\x7c\x83\x46\xa7\x8a\x1c\xe3\x2e\x11\x0e\xa0\xbb\x05\x38\x05\x05\x6b\x6e\x4a\x81\xff\x2a\x64\xdc\x5c\x37\xb3\x59\x24\x2d\x13\x53\xb6\x5e\xe2\x81\x43\xba\xe6\x41\x94\x66\x62\x5e\x9f\x2f\xf7\x88\x9a\xd0\x9c\x86\x2c\xd0\x90\xf9\x50\x44\x1b\xed\x83\xff\x07\xe5\xa0\x29\x2f\xa9\xd9\x40\x3c\x02\xa3\x4f\x7e\xcf\x21\x8a\x8d\xe7\xb2\xd7\x2a\x38\xea\x4e\x51\x6c\xc1\xe3\x95\xa4\x3f\x8f\xf5\xeb\xe6\xb7\x89\x19\x8f\x28\xb8\xbe\xa2\x88\xb3\x68\xda\x1d\xfc\xb5\x91\xfb\x62\x29\x3c\xf1\x07\x72\xbd\xa8\x64\x2a\x3c\x40\x21\xcc\xae\x43\x2b\x6e\xc2\xb0\x6e\x3d\x0e\x5c\xd8\x95\x8b\x06\x8d\xa0\x02\x76\xd1\x7a\x7e\x1d\xbe\x7e\x91\xc9\xf8\xfc\x9e\x66\xf0\xa3\x7d\x7e\xfd\xeb\xef\xa8\x84\x8e\xd3\x35\x92\x27\x6d\x1b\xf6\xeb\x82\xaa\xf4\xe9\xdf\xe5\xf6\x39\x56\xa5\xc9\x57\x44\x2e\x7e\xcc\x99\xa1\xb3\xd3\x36\x11\x4a\x26\xc4\x61\x67\xae\x03\x5b\x81\x6b\x5d\x61\x97\xbb\x32\x18\x0a\x25\x54\xe5\x7b\x0a\x05\xf7\x92\x6f\x65\x24\xba\x1a\x75\x56\x8b\x81\x36\x3d\x20\x22\x1e\xa3\x53\x99\x63\x18\x73\x4f\x2d\xe0\x28\x03\x5c\x62\xab\x86\xe0\xc2\x81\x4d\x44\xe4\x92\x4a\xf7\xd9\xfb\xed\x1f\x92\xb1\xde\xc3\x3e\x90\x24\x29\x4f\xa2\xb2\xf6\x7c\x20\x67\x82\xf8\xe0\xbe\x24\xf7\x61\xea\x34\x66\xd7\x87\xa2\x8b\x60\x76\x5d\x81\xfe\x58\x1c\x02\x2d\x62\xd9\x3b\x7b\x8d\xc8\xa2\xc8\xf8\xca\xdf\x8f\xc3\xaf\x9c\x6f\xa2\x25\x3a\xb3\xd9\xbd\xc5\x6b\x61\x49\x6a\xe0\x14\xee\xf1\xa1\xa9\xac\xd3\xff\xcd\xd4\x95\x6c\xa9\xae\x04\xc7\xbd\xff\xc2\x2c\xbd\x6a\x66\x58\xf9\x4b\xbc\xd0\x50\x12\x42\x13\x68\x44\x9c\xe3\x7f\x77\x46\x44\x16\xd7\xef\xbe\x3e\x57\x07\xba\x6f\x83\xa8\xca\xca\x8c\x8c\x8c\xb0\x7d\xeb\x17\x11\x16\x88\x23\x12\x69\xff\xf3\xf5\xb7\x85\xcc\x6e\x22\x38\x87\x8d\xd2\xfd\x47\xf5\x78\x06\xcb\x06\x79\xe9\x85\x61\x50\x51\x11\x90\xeb\xab\x0a\xb1\xc5\xc7\x9b\x14\x6c\x29\x33\x6e\xdb\x49\x3a\x38\x49\x5c\x62\xb4\xfb\xf8\x7d\xdc\xef\x98\x5e\x57\x2b\x4e\xd0\x3c\x8b\xfa\x00\xb2\x22\x35\x86\xb0\xd3\xc9\xc5\x64\x73\x56\x88\xc6\x1a\x2c\xdd\xe1\xd4\x3a\x04\x30\x99\x4d\xc8\x00\x8b\x1d\x62\x97\xa5\xfb\x4f\x3e\x5f\xbb\xfb\x46\x6a\xb9\x74\x74\x53\xcd\x25\xe3\x18\xdc\x46\x65\xeb\x25\x3f\xb8\x59\xbc\x96\x45\xc7\xe7\x70\xaa\x6a\xbc\xcd\xbf\xcb\x31\xdf\x9e\x1c\x87\xa4\x06\x0b\x85\x7a\xbe\x53\x5b\x64\x6f\x45\x86\x97\xcc\x0d\x27\x28\x40\x72\xfd\x6c\x6d\xbf\xde\x76\xde\xd0\x65\x72\x63\x9f\xeb\x89\x86\x5b\x2e\xbd\x48\xfa\xb0\xb7\x73\xb9\xd2\xba\xc4\x95\xa8\xfa\x62\xf2\x64\x01\xd0\x64\xc8\x75\x22\x91\xed\xee\xba\x61\x40\xdf\x33\x57\x87\xe8\x5c\x6f\x27\xf1\x59\x99\xca\x15\x06\xe6\x27\x1b\xb5\x90\x5a\xe2\xcf\x3f\x31\x57\xa4\x0d\x1f\x19\x59\xb4\x5f\xfa\x8d\x63\x0b\xa5\xea\xed\x54\xcf\x23\x14\xe9\x69\xb4\x85\x86\x35\x3a\xd2\xbd\xe7\x2a\x28\x46\x2c\x80\x45\x1c\xa1\xf4\x16\x98\x5a\xbd\x4c\xc3\x12\x0d\x31\x22\xa9\x60\x64\x68\x5e\xe2\xa0\x27\x76\xf6\x55\xaa\x9f\x68\xf1\xe2\xfa\xa3\x18\xb1\x12\x8d\xa2\x71\xaf\x3a\xc0\x8b\x95\x5a\x41\x57\x3b\x27\x37\xc4\x58\x94\x17\x3b\x21\xfd\x3b\xb9\x60\xee\xe4\x91\xb8\xa3\xaa\x4c\x54\x9c\x28\x96\xfc\xa9\x0a\x05\x5d\x34\xa7\xb4\x96\x89\x73\x4d\x64\x06\xcd\xf8\x60\xeb\xdf\x9b\x99\x65\xf3\x9b\xf3\x8e\x2a\x2a\x1a\x81\xd4\xb0\xa1\xc5\x8c\xde\x69\xd3\x10\xa6\x90\x8f\x2b\xfc\x31\x24\xd5\xd4\x53\x04\xde\xe7\x41\x74\x2a\x42\xf2\xf4\x23\xb4\x37\xb3\xe4\x4f\x95\xdd\xec\xb2\x01\xc0\x2b\xb6\x88\x24\x3a\xb6\x6f\xe7\xb6\xc8\x28\xa4\x86\x7e\x99\x74\xcd\xaf\xae\x68\xa9\x14\x02\x0e\x12\x3f\xef\xc1\x03\xd3\x7d\xb4\xf0\x48\x6a\x7e\xf4\x8c\x4a\xb2\x66\x3c\x90\x9e\x95\x58\xa9\xc1\x5e\x9b\xa5\x4a\x73\xc3\xae\x58\xe2\x81\x39\xeb\x91\xc1\x30\x3f\x80\x1b\x2f\xcb\xaf\xf7\xa1\x7e\xe1\xe7\xf2\x4b\x7f\x7b\x11\xb7\xd9\x40\x87\x91\x77\xa0\x95\xaa\x59\x4c\x18\xdc\x89\x31\xc3\x40\xd7\xe8\x4f\xe2\x8b\x97\xa3\x6c\xb9\xdb\x6f\x78\xb1\xd1\xd7\x6e\x2e\x8b\xd6\x61\x3c\x91\x7b\xaf\x8f\xa2\xb4\x68\x4d\x44\x51\x33\xbd\xae\x74\xea\x9e\x2c\xaa\x88\x68\x44\x6d\x51\xdb\x33\xa1\xa1\x41\x8d\xfd\x46\x4a\x33\x86\xd5\x6e\x25\x71\x48\xd8\xf6\x08\x76\xad\x7b\x32\x39\xea\xee\x67\x02\x09\x66\xd5\x43\x51\x05\x74\xdd\x2e\xf6\xbf\x15\x5d\x62\xaf\xc3\x52\x72\x2d\x3f\x1c\x03\x70\x3d\xe7\xa5\x67\x85\x20\x83\x88\x5d\xbd\x60\x3a\x9a\xb9\x58\x4f\x40\x7b\xa9\x06\x69\x6e\xda\xc7\xfa\x3c\xb2\x79\x0e\x79\x06\x99\x72\xa1\x52\xa5\x50\x43\x03\x11\x73\xc2\x5c\xfb\x38\xdf\x7f\x90\xeb\x8d\x15\xf0\xfe\xf6\x52\x2a\x75\xf8\x30\x70\xe7\xe3\x37\x94\x47\xa5\x0a\xc0\x37\x79\x7f\x46\x04\xf7\xcf\xa0\xb1\x84\x7f\xa2\x50\xb4\xa2\x14\x09\x51\x29\xf8\x04\xe5\x22\x39\x41\x93\x77\x8e\xc7\x5c\x04\xc8\xb5\xd9\x8e\x57\xf2\xfa\x4e\xa7\xe8\x87\x72\xbc\xdc\xc2\xd3\x62\xf9\x4e\xc9\x89\x1a\x88\x9d\x3a\xf3\x5d\xe5\xa0\xa8\x3d\x92\x4c\xb1\x8f\x38\x08\x4f\x73\x9d\x62\x25\x57\xaf\x7a\x6a\x3f\x62\xdd\x5a\x81\xec\x66\x6a\xe1\x23\x8d\x00\x78\x96\x6b\xe5\xdf\x96\xf3\x91\x1a\x36\xfd\xbd\x3e\x3c\xd9\xf9\x81\x7c\xbf\x86\xbc\xed\x8a\xbe\x01\xa2\xd8\xf2\xb9\x79\xa0\xe6\xf6\x2e\x2a\x45\xb8\x31\x75\x1c\x0e\xd8\x9f\xae\x87\xf3\xcd\xf9\x8a\x64\xd9\x08\xd8\xdc\x3b\xbe\xf9\x77\xd7\x30\x36\xc9\x9b\x68\xe1\x8e\x02\xce\xa0\xbc\xa1\xb8\x24\x0f\x0f\x8e\x15\xc5\x51\x1f\x97\xfc\x4f\x13\xe8\x60\x05\x35\x4f\x9a\xb4\xe7\xd2\x8d\xac\x76\x4b\x7a\x92\x54\x59\x43\xd9\xbb\x6e\x26\x62\x96\xe6\x70\xe0\x7c\xe8\xa4\x6f\xb9\xbd\x30\x90\x5c\x2f\x97\xa1\x6c\xde\x24\xbb\x9f\xd9\x56\xbe\x9e\x8f\x6a\x1e\xdf\xf2\xea\x91\xe1\x5d\x5c\xee\x9a\x0c\xda\x91\x44\x79\xbd\x46\xcf\x57\xce\xf2\xd1\xee\xca\x1f\xa8\x7d\x8a\xc8\x0b\x16\x5b\x7a\x3e\xdb\x59\xfc\xe0\x71\x20\x22\xb2\xc8\xef\xf8\x9e\xf8\xf6\x60\x66\x2e\xea\x14\x6d\x99\xf7\x7e\x91\x46\x29\xda\x25\xec\x8f\x13\x45\xc0\x2c\xe1\x70\xc5\x2a\xb2\xd2\x84\x7c\x5b\x78\x9b\x34\xfa\x9a\xd9\x56\x94\xc9\xa6\xd5\x4d\x02\x6d\x5b\xaf\x84\xe0\x41\xe3\x4e\x16\xcc\x88\x98\xf8\xe2\xd2\xfb\xab\xfd\xe0\x53\x23\x30\x11\x71\x28\xb3\x75\xb9\xd4\x77\x77\x39\x1e\xd8\x77\xee\x81\x7a\xf0\x68\xfe\x49\x69\x20\x92\x9c\x1c\xef\xb1\x07\xe7\x4e\x0c\x9d\x25\x34\xfd\x3f\x41\x50\x7c\xc8\x6a\xa5\xcc\xc1\x8d\xe5\xc0\x54\xb3\x33\x81\x39\x6a\x9a\x08\xdb\xa1\x6f\x11\x4b\x4b\xbb\x23\x6a\x60\x11\xd3\xc0\x4b\xbd\xac\xdd\xe0\x64\xd0\x59\x5c\xab\x3c\x3c\x5e\x9b\xc8\xf6\xcb\x36\xca\x80\xa3\x7b\xbe\x33\x16\x2d\x9d\xd5\x57\x3e\x86\x12\x7d\xa0\x49\x2e\xee\x57\x7e\x9b\x25\x7a\x9d\x83\xbe\x78\x54\xd2\x6c\xe9\x1c\x0b\x28\xe0\xbc\x62\xfe\x54\x51\x6a\xce\x1b\xe8\xe0\x0c\xb8\x77\xea\x06\x7e\xc8\x4e\xc3\x51\x3e\x56\x8e\x4b\x09\x1b\x81\xfa\x16\xc8\xb3\x7b\x7c\xbf\xe1\x7e\xb8\x23\xee\xd2\x31\x4e\x5d\x5f\xcb\x39\x1f\xbc\x51\x18\x1a\xd3\x2b\x81\x2d\x2b\xb4\x1d\x70\x79\x7e\x15\xac\xa7\x42\xce\x6e\x49\xb0\xcf\xb5\x69\xbd\xed\xc3\xe9\x8a\x59\xf0\x4e\x28\x74\xf6\x58\x3d\xf6\x95\x65\x04\x38\x70\xb8\x61\x35\xda\xab\x3c\x27\x08\x0a\xab\x61\x3c\x5b\x5c\xdd\x4b\x07\xc9\x93\x6f\x3b\xda\x85\xb4\x23\x06\xa3\x37\xbb\x93\xb0\xba\x0f\xec\xac\x7f\x7f\x13\xcd\xee\x99\xb2\x49\x21\x32\x19\x9e\xb2\x8a\x82\x9c\x27\xf3\xcf\x2e\xdf\x44\x1c\xd8\x4b\x5f\x96\x29\xc7\x85\x64\xf9\xfd\xf5\x6f\x8f\xaf\x1d\x87\x12\xb4\x79\x0e\x17\x26\xa8\xb6\xe4\x1b\x17\xa9\xe8\xa4\x9c\xb6\x55\x93\x62\xd2\x3f\xce\xde\x6f\xe4\x60\x72\x05\xef\x89\xba\xcf\x0c\xe4\x53\xf2\xb5\xff\xf9\x24\x88\xe6\x2a\x0a\x08\xff\x6b\xec\x1b\x24\x9a\x56\x0f\xa2\xb5\xd4\x3b\xa1\x6f\xf0\x31\xc4\xe3\x7b\x79\xb3\x0c\x3a\xaa\xaf\x61\xa1\x4a\xad\x97\xd1\xf2\x34\xc5\xeb\xaa\x7d\xb0\x91\xc2\xae\x11\xcb\xc7\xf1\x35\x7d\x4e\x92\x93\x07\x26\x27\xec\x78\x18\x75\xfa\xbe\xa2\xa4\xf2\xab\x3b\x3f\x97\x55\x3b\xc6\xe1\x53\xfa\xfc\x7b\x8c\xf6\x3d\x87\xb6\x3c\xd3\xdb\x49\x9d\xd9\xe9\xf1\xab\x4f\xd4\x87\xe6\x26\x1d\xa2\x09\xf5\xfe\xe0\x7a\x3b\xba\xbf\xa7\xab\x43\xca\x04\x17\xe1\xb4\x9a\xf0\xf4\x85\xaa\xe2\x20\x82\x13\xe8\x41\x8e\x23\xa5\x0a\x53\xc9\x37\xd1\xfe\x84\xa4\x81\x4e\xba\xcb\xdc\xe4\x0b\xfe\xa9\xdb\x71\xbb\xbc\x00\xd3\xdf\xf4\x1f\x6e\xc9\xd4\x2c\x5c\x80\x18\x3b\x3f\xab\x51\x3c\xd8\x2b\x23\xc0\x03\xf1\xb0\x91\xdf\x26\xd7\x3b\x05\xbd\x21\x38\xda\x8e\x4b\x05\x1f\x5a\xb6\x78\x27\xfb\x97\xdf\xf5\x39\x19\x65\x3b\x22\xc1\xee\x94\x57\x61\x89\x91\xe6\x69\x99\x1e\x4a\x21\x21\x58\x0f\xfd\x68\xb5\xc8\xc9\xd4\x3e\xfe\x8d\xe1\x46\xc4\x74\xef\x4e\x79\x5d\x30\xcc\xb9\xcf\xd6\x22\x42\xaa\xcd\x8d\x41\xb8\x44\xf5\x5f\x52\xfe\x31\x51\x2c\x93\xbb\xcb\x0a\x8e\xb5\x36\x6c\xdf\x35\xce\xa0\xea\xa7\xc8\xbd\x72\x5f\xc7\x36\x29\xaa\x1f\xcb\x4a\xcd\x68\xe8\x3f\x15\x4e\x4e\x74\x75\xb7\xf7\xfb\x71\x3f\x70\x7e\x28\x6b\x8b\x5c\xea\xf0\x83\x1d\x49\x83\x23\xc7\xca\xc1\x5e\x53\x9a\x3f\xb0\x32\xa6\x79\x98\x94\x11\x45\xc1\x7d\xad\x09\xbb\xa5\x05\x3e\xdf\xb9\x0c\xcf\x85\x12\x69\x29\x0f\x9a\xfb\x7e\x3f\x81\x86\x88\x47\x32\xf2\x52\xd0\xfe\xf2\x49\x57\x3b\x91\x1f\xc2\x98\x1a\xc1\x1c\x56\x52\xba\xdf\x6a\x02\x29\x2b\x4a\x36\xf4\x98\x3a\x96\x08\x3f\xb6\x37\x27\x8d\xfa\x21\x49\x45\x46\x4a\x7b\xd7\x6c\x4d\x71\x1a\x69\xf4\x1b\x96\x64\x3e\x4b\xdf\xcb\x7a\xd4\x87\xea\xb1\xe9\x2f\x87\x32\x7d\xef\x9c\xda\xc8\xa0\xf6\xaa\x0f\x6f\xbc\xb0\xf0\x5a\x9e\x29\x5b\x9f\x5d\xba\x48\x00\xd7\x3b\xba\xed\xdc\xf9\xda\x5f\xab\x4c\x3a\xfc\x07\x79\x54\xa6\x96\xc9\x6d\xf2\x64\x4b\xc1\x75\x56\x07\x6a\x74\x3c\xd5\xf6\x50\xa7\x93\x2d\x85\x56\xa6\x46\x42\xed\x84\x97\x7a\xc5\xb6\xd8\x27\x76\xe6\x54\xdc\xd6\xb1\x44\xdf\xc4\xb0\xfc\x36\xdf\x62\xe0\x28\x6a\xdf\x2c\x5c\x27\x56\x1e\x3a\x49\xf4\xbf\xa2\x3f\x1d\xea\xb9\x52\xbd\x4a\xcc\x2b\xea\x4c\xfd\x3c\xde\x9c\x5b\xa7\x34\x89\x84\x1b\x2d\x97\x39\x71\x43\x1e\xeb\xea\x74\x20\xac\x84\xd8\xad\x40\xf1\x88\x5e\x1c\x63\x14\x11\x45\xef\xd8\xfb\xc7\x23\x65\xfa\xc5\xfb\xe5\x70\x02\x7f\x05\xf2\xba\x4a\xbd\x2a\x14\xc0\x6a\x2b\x70\x6c\x89\x17\x56\xe9\xac\x7c\xfa\x15\x29\xc1\x13\x4b\x1e\x1f\x8f\x9a\x7c\xde\x83\x48\xd2\xfe\x57\x27\x12\x23\x6f\x5d\xc3\x08\x91\x43\xf8\x44\x58\xdf\xfc\xab\x8a\x22\x4d\xfb\xbd\x8b\x2e\xc9\x47\x7b\x2f\x26\xb0\x4a\x38\xca\x9e\x54\xaa\x0c\x21\xe3\xa5\x31\x36\x8c\xc8\xab\x32\x8c\x52\x49\xe0\x07\x8b\x5e\x58\xce\x03\x0e\xab\xeb\x21\x3f\x4f\x78\x29\x97\x3f\xfc\x41\xfc\x58\xb3\xf7\xd3\x19\x29\x3e\x4b\x0f\xfd\x49\xfe\x50\xf1\xb3\xe7\x2e\x03\x2b\xea\x32\x7d\x64\x05\x59\x26\x96\xe6\x78\xf2\x53\xcc\xc9\xfb\x4b\x48\x38\x89\x5e\xb6\x80\x8b\xec\xc3\x5c\xf8\x06\xa7\xc4\xc1\xbe\xa8\xa6\x37\x26\xac\x70\x46\xe2\xb5\x52\x26\x0d\x4b\x43\x28\x37\x77\xc7\xc9\x7c\x70\x45\x6f\x28\xfc\xfb\x60\x6c\x98\x33\xe7\xe0\x93\xe1\x87\x28\x90\x4d\xae\x28\x60\xf1\x20\x59\xc5\x31\x19\xa9\x81\xe0\x11\x60\x88\x2e\x51\xe2\x5b\xbf\xef\x73\x4b\x8e\xa0\x65\x41\xee\xd8\x43\x80\x48\x9e\x48\x22\x2a\x63\xd5\xcc\xe1\x56\xbc\x3c\x02\xe8\xb4\xb3\xa2\xf0\x48\x85\x5f\x2b\x74\x40\x4a\xd5\x1c\x4c\xf2\x8d\xc2\xf9\xf3\x54\xb9\x4d\xe4\xaf\x23\x94\x87\x67\xb2\xd0\xdc\x60\xfe\x58\xf8\xc6\x9e\x4e\x97\x2a\x9d\xb8\xb9\x6b\x97\xd4\xac\x35\x19\x83\xe6\x98\x0a\x3a\x8b\x24\xca\x6a\xe2\xa1\xa4\xb2\x4f\x5d\xee\xa6\x59\x5d\xd2\x62\x06\x7b\x69\x2d\xe3\xde\x96\xd8\x2a\x4a\x88\x46\xc3\x23\x74\x7e\x67\xfe\x48\x61\x3d\x6e\xf9\x62\x1b\x38\xb1\xda\x42\xee\x60\x94\x81\x6f\x5f\xae\x8c\x28\xed\xd8\x7d\x38\x1c\xde\xae\xef\xcb\xbb\x61\x17\x01\xec\x66\xde\x7a\xbb\x8f\x9b\xf3\x87\x16\xfb\x4b\x03\xba\x32\x30\x60\x35\x71\xbe\xcb\xcc\x61\x7f\x91\x68\x98\xc5\xc1\x44\xd9\x84\x2c\x8e\x30\xdb\x23\x75\xc1\x34\xe4\x0d\x6b\x16\x7b\x4c\xb8\x02\x70\x31\x7c\xef\xc7\xca\x23\xd2\xf0\xfe\x9b\xff\x61\x7d\xcc\xb6\x51\x79\xb2\xcf\x9d\xd3\x8a\xc6\xb9\x0e\x52\x1b\x5a\x2b\xc9\x9b\x1d\x20\x25\x27\x60\x28\x7c\xb4\xcd\x07\xcd\x4f\x82\xd1\xa6\xc3\x6b\xac\x43\x98\xa2\x8e\x6b\x9c\xb3\x66\x57\xdd\x1b\xd2\x8a\xb8\x56\xca\xd8\xd6\x76\x77\x9f\x46\x6c\x0b\xdb\xec\xb9\xb7\x99\x1e\xb4\x1d\x60\x86\x0c\xfc\x88\xb3\xa7\xed\xf3\xd3\x09\x45\xa1\xa9\xc0\x44\x52\x74\x73\x28\xaf\xf5\x71\xf7\x43\x93\xc5\xe9\x2c\x0a\x7c\xed\x04\x29\xb9\xf3\x4f\x1b\x3b\xea\xa0\x77\x32\xc4\x8a\x93\x82\x05\x5a\xbd\x26\xe5\xab\x54\xf9\x1e\x15\x01\x14\x08\x34\xa3\x7e\x3a\xdc\xce\x1e\x18\xc0\xfb\xdb\xeb\x29\x6a\x22\x0d\x68\xc8\xaa\x5e\x6b\x3a\xef\x72\xa3\xfb\x37\xfb\x3a\x02\xd7\x04\xef\x98\xa6\x4f\xac\x9a\xed\x84\x11\x79\x1e\xe1\x44\x85\xda\x38\xea\xa1\x47\x66\x59\x2a\x33\x63\x34\x60\x3c\x1a\x0c\x89\xc3\x84\x76\x15\x09\xa5\xa8\xe3\x94\x36\xb4\x22\xd4\xdf\x96\xe7\x97\xb7\xf3\x7a\x23\xe7\xd7\xfe\xd2\x59\x78\x9b\xab\xe4\x8b\x73\xd3\xf5\xc6\x4f\xd7\xd3\x33\x5f\x78\x77\xce\xe7\xfd\x98\x29\x49\x3a\xff\xc5\x21\x33\x96\x76\x3c\x3b\x1f\x91\xb9\x8e\xee\xbc\x50\x5f\xe4\x30\xb2\xf3\x42\x35\x37\x49\x34\x81\xd6\xbe\xd5\xcf\xbd\xc2\x92\x0c\x25\x23\x7d\xe9\xbc\x6b\x08\xa7\x88\xd2\x98\x55\x16\x83\x35\xc3\x86\x98\x13\x3d\xa7\xfb\x5c\x27\x7f\x6f\xcb\x54\xa1\x85\xbd\x6c\xee\xa4\x7e\xb0\xbc\x52\x71\xe7\xd1\x6e\xa5\xfb\xcc\x57\xce\x87\x79\x25\x00\x2f\x5c\x3a\xa2\xfb\xc7\x5a\xec\x00\x89\xa8\xbb\xfd\x48\x5c\x98\xa0\x03\x2a\xc4\xd1\xe7\x15\x7f\x70\x51\xce\xcb\xc6\xa1\x93\xcf\xe7\x18\x8a\x49\x20\x57\x54\x7c\x85\x6d\x82\x08\x6f\xf3\x50\xef\x6f\xe4\x5f\xe7\xcd\x67\x26\xc8\x35\x97\x10\x58\xdd\x91\x88\x53\xa9\xe6\x98\xd7\xa1\xb9\xb2\xa1\x0d\xa7\x1a\xcf\x44\xda\xa2\x7b\x71\xb6\x23\x64\x95\x68\x6e\x96\x8f\xa5\x95\x6b\x79\xea\x5c\xcf\x6d\x2f\x24\xd2\xbb\x05\x82\xe1\xd3\xc2\x5d\x35\xba\x3f\x93\x14\x7f\xf8\xef\xc1\x14\x05\x0a\xe6\x7c\xd4\xde\xbc\x4f\x26\x01\x27\x8b\x42\xb0\xa5\x0e\xd9\x2a\xfd\x74\x23\x79\x37\xff\xaa\xac\xbe\x11\xf0\x1a\xf2\x36\x62\xb1\x00\x49\x3b\xde\xb4\x7c\x71\x57\x13\xd8\xed\xe1\x65\xd5\xdd\x2c\x9a\x0e\x51\xee\xc2\x49\x73\xbe\x52\xb1\x20\xb4\x9e\xe5\x2d\xea\xcf\x56\xf2\x7d\x6e\xfb\xa7\xb0\x36\x0a\x14\x38\x05\x61\x70\xc5\x8c\xa5\x4e\x3e\x99\xd0\x29\x3b\x53\x5c\x0a\xa6\x6f\x1c\x90\x59\x8a\xfc\x51\x61\x75\xda\x92\x1a\xa3\x97\xcb\xf2\xfa\x3a\xb0\x9e\xfb\x20\xd2\xca\x09\x6a\x62\x55\xd5\xfb\xfb\x9d\x24\x04\x09\xd5\x16\x37\x85\x16\xc0\x7b\xbf\x9e\xf0\x85\x4b\xa9\x1a\xe1\xea\x7d\x58\x8f\xe1\x34\x9c\xc9\x01\x3e\xb3\xc0\x3e\x1c\xce\x6e\x81\x61\xd5\xec\xaa\xab\xd4\x99\x28\xcc\xac\x32\xaa\x20\x7f\x29\xc2\x8c\x5f\x3f\x65\x09\x01\xf5\xb5\x77\xc6\xcc\x47\xc7\xc2\xe7\xf9\xed\xde\xcc\xbc\x82\x5c\x46\x70\xf9\x58\x3b\x26\x82\x56\xbb\xc9\x22\xfb\x78\xc7\x1f\xfc\xde\xc2\xd6\x2c\x11\x0a\x5b\xfa\x0e\x25\x28\x3d\xe2\xd5\x03\x72\x7e\xea\xc9\x85\xc4\x6b\xea\xb1\xea\x64\x25\x36\x5a\x56\x22\x65\xad\xb1\xad\xaf\x47\xa6\x3a\xb0\x26\xf4\xb6\xcc\xc8\xe1\x5a\xa5\x56\xd4\x4f\xe2\xa5\xd5\xb7\xb2\xf0\xfe\xf5\x59\x5f\x70\x93\x45\x3b\xf1\x95\x57\xe2\x7f\x47\xb1\x4b\x91\x86\x07\xf1\x45\x30\xbe\x22\x44\xbf\x7f\xb1\xc8\x9c\x7a\x1f\xf6\x44\xeb\xf2\xa1\xf3\x53\xfa\xdc\xad\x06\x26\x67\x17\x9f\xb6\xf2\x12\xe6\x3f\xbc\x2c\xbb\xf9\x2e\x85\xa5\x2e\x24\x6c\x90\x62\x14\x94\x24\xa1\xbe\xd4\xa0\xe8\xd0\xb7\x73\xe3\x76\x89\xb9\x65\x8c\x85\xf3\x86\x48\x27\x3e\xb9\x64\x21\xe8\xaf\x8a\xba\xf4\x00\xe4\xa2\x4b\x1a\x4b\xa1\xc4\x83\x4c\x9a\x7b\xc2\xf9\xfe\xa4\x6f\xa2\x85\xbb\x8f\xb6\xbc\xc0\xbf\xe3\xb7\x0f\xa3\x12\x39\xe7\x37\xa6\x69\x2a\x74\x3e\xf9\x44\x94\x1a\x0a\xbb\xdc\x38\xc9\x0a\x2c\xce\xa9\xc7\x79\x11\x9c\x69\xb8\xd6\x22\xf3\x00\xc7\x64\xb5\x05\xcf\x00\x46\xb2\xdb\xf9\xe4\xb2\xcd\xf4\x95\x21\x73\xab\x2b\x2f\x07\xbc\x97\xf3\xf9\x34\x7f\x5f\xfc\xa8\xcf\x27\x9e\x25\xb0\x56\x66\xd4\x3e\x1f\x98\x71\x82\xf7\xdf\xbb\x34\xde\xe0\x59\x62\x85\xbc\x97\xa5\xe0\xaf\xb6\xec\xbf\xc2\x38\x8a\x46\x78\x71\xf1\xf5\x61\x42\xba\x74\xf8\x88\x25\x28\xcf\xca\x00\x55\xfa\x0e\x73\x1c\xb8\x69\x47\xd7\x33\x50\xe3\x1f\x17\x24\xc7\x08\x09\xa3\x36\xa8\x9e\x9d\x20\x63\x87\x9c\x2a\xef\xdf\xcb\xfb\xc8\xa7\x37\x57\xcf\x7e\x89\xd1\xed\xa4\x81\xc4\xf3\xa8\x1e\x8c\x04\x97\x9a\xea\xd0\xf0\x62\x28\xef\x36\xa9\x69\x62\x21\xcc\x61\xa2\x89\xdb\x3b\x2b\x5a\x19\xed\x6e\xc5\x8d\x25\x15\x2c\xdf\xe3\x14\xbe\xa5\x8b\xe2\x33\xb0\xa3\xc8\x5f\x42\x11\x2a\xcd\xf1\x0e\xd5\xa2\xb9\x5e\xcf\xe7\xed\x81\x73\xc2\x0f\x3b\x5f\x74\x9a\xdd\x6f\x04\x57\x93\x72\x5b\xfa\x81\xa1\xb9\x89\x12\xbe\x59\xef\x16\x30\x59\x3f\xfb\x5c\x96\xfc\xf5\xdd\xa2\x6a\x55\x30\xcc\xb6\x6f\xcd\xfa\x94\x52\x16\x1a\xba\xce\x0f\x96\x21\x31\xa8\x27\xf0\x02\xa1\xee\x76\x1e\xae\x2d\x4d\xdd\xbb\xe4\x5b\xce\x12\x8e\xb4\x64\x22\xea\x5c\x7c\x05\x9a\x4d\xfa\x95\x60\x49\x7a\xd9\x43\xa7\x03\x11\x12\x86\xce\x4f\x7c\x2b\x32\x6b\xe5\x2a\xa1\x2b\x81\x3d\xb2\xfb\xd8\xb8\xeb\x44\x4d\xed\x32\x75\x0c\x52\xa5\x5f\xa0\x35\x6b\x58\xcc\x9d\xb5\x2c\xd6\xf9\xe6\x44\x74\x16\xbb\xcb\x12\x34\x69\xe7\xad\x71\xfe\xd9\xee\x89\x3a\xbd\x6b\x22\x9f\x48\x82\x5a\x3a\x62\xab\x2e\x12\x58\x9f\xd1\xb0\x6a\x2f\x9d\x80\xbd\x9d\x55\x1a\x1f\xdf\x8b\xf7\xb2\x1f\xa2\xf5\xd8\xe1\x6f\xff\xb4\xa4\x13\x1f\xa7\xc5\x55\x7e\xf3\x9b\xb1\xc2\xa2\xa9\x33\x8f\x11\x57\x63\x2f\x53\x18\x2f\x4e\x05\x71\xcd\x2d\x4f\xad\x98\x5c\xac\xcb\xf3\xdc\x91\x75\x3d\x9f\x02\xf5\xbf\x25\x60\xc0\xc3\x00\x45\x4a\x13\x48\x9f\x42\x67\x61\xbf\x8b\x22\xad\xc4\xc7\xec\x86\xb8\xa6\xc7\x1f\xdd\xdf\xd9\x4f\xf8\x93\x99\xeb\x2f\xd9\x9d\x9e\x49\x2a\xc0\x0c\xa7\x08\x21\xaf\xa9\xed\x02\xe9\x57\x35\xba\x8a\xdc\x20\x75\x68\xfc\x8a\x26\xb0\x12\x9f\x7e\xf9\xac\xb5\x3d\x96\xb9\x1b\x2c\xfc\x47\x24\x83\xd0\x34\x24\x41\x52\x84\xae\xeb\x45\x48\xa1\x79\x25\xff\x1e\x7d\x6c\xa9\x57\xc6\x16\x29\xc8\x16\x4f\x7d\xfe\xb2\x9e\xcb\xf6\x8a\x25\xd6\x04\x2b\xea\xc8\x76\xc7\x7c\x12\xbe\x76\x1c\x37\x8f\xb5\x18\x04\x60\x5b\xb5\x5d\x9f\xbd\xfe\xf0\xb2\x53\x45\xd8\x56\xf9\x0f\xb1\xde\x0a\xf5\x90\x41\x75\x73\x32\xe6\xb7\xe3\x41\x8b\x6a\xd8\x7d\xbc\x06\x00\x21\xdc\x66\x56\x4b\x7a\x10\x4e\x5c\xdc\x81\xfe\x0a\xd2\x68\xda\x9f\xf6\xde\xd0\xc0\x78\xc7\x8e\x6d\x54\x82\x6d\x03\x72\x45\xf2\x37\x31\xc4\xe4\x60\x85\x6d\x34\xaf\x8f\xb1\x8f\xf1\x24\x7a\x5e\x7a\x5d\xd4\x17\x89\xbd\xd3\xaf\x33\xab\xe7\x8c\xb8\x0c\xb5\x8d\xd9\x5e\x4b\x7e\x5a\x34\xd7\x3a\x0d\x3c\x37\x5c\x6e\xf4\x92\x3e\x17\xaa\xf9\x9c\x9b\x2d\xe4\x54\x88\x92\x71\xea\xf9\x70\x57\xc6\xca\xd9\x74\xc6\xd2\x6b\x33\xac\xac\x0c\xf1\xc2\x46\x3d\x39\xe9\xf0\xf9\x7f\x83\x56\x83\x05\x31\xa1\x63\x45\xf5\xc8\x0b\x44\x3b\xb9\x32\x89\xa3\x5d\xf6\xae\x48\x5e\x26\x41\xe2\x44\xc5\x3a\x26\xb9\x4e\x84\xdc\x0b\xf1\x26\x6f\x49\xe0\x2c\x3e\xc7\x69\x96\xbe\x7b\xaa\x52\xd3\x29\x1d\xfc\x07\xa2\x90\xcb\xf0\x69\xa7\x9a\x19\xe7\xcc\x1d\x03\xab\x6b\x96\x6b\xf9\x1c\x27\x3f\x40\xd7\x26\x61\xbb\xff\x36\x56\x29\xae\x78\xb6\x5f\x8e\xc9\x93\x82\x1a\x7d\xf6\x53\x9a\xac\x93\xcd\xb9\x9c\x59\xb9\x2d\x8e\x98\xda\xb6\x1a\x32\x17\x36\x91\x23\xd0\xf2\x0c\x25\xde\xf7\xfb\x61\xf5\x27\x8f\xfc\x21\x34\xb3\x28\x8b\x13\x10\x70\x86\xd5\x19\x8c\x80\x9d\xfa\xb7\xa3\xab\x60\xcd\x6e\x4a\x25\x94\x4e\xad\x8b\x44\x32\x94\x49\x18\x7a\xec\x81\x1d\xa1\x39\xed\x89\xac\xc1\x98\x85\xc8\x9d\xbf\x59\x91\x58\xaa\xe4\xa1\x01\x61\x84\x71\x76\x7b\xa9\x08\xcc\x21\xc2\xaa\x9c\x78\x91\x54\x4a\x3f\x77\xee\x72\xdb\x85\xcf\x3c\x72\x8e\xac\x77\x2d\x0f\x5b\xc0\x45\xcf\x31\x10\x50\xbb\x26\xc5\x58\x47\xe4\xb0\x92\x25\x16\xc5\xbe\x88\xf3\xd4\x63\xa5\xc5\x57\xb3\x93\x1b\xb1\x5a\x9e\xb6\x9f\x5d\x4c\x06\x4d\x88\x4e\x63\xca\xb3\x76\x1f\x87\xf8\xf3\x20\xcd\x5f\x84\x05\x85\xec\x5e\xd4\x92\xb0\x74\x2d\x32\xcb\xc7\x5c\xf6\xf8\xda\xd1\x94\x50\x59\xf5\x42\x60\x84\xe3\x29\x3f\xfa\x08\x48\xab\xf9\xee\x7f\xfe\xf7\x3f\xfe\x2f\x00\x00\xff\xff\x9c\x84\xb0\xba\xce\x07\x01\x00") - -func dataPasswordsJsonBytes() ([]byte, error) { - return bindataRead( - _dataPasswordsJson, - "data/Passwords.json", - ) -} - -func dataPasswordsJson() (*asset, error) { - bytes, err := dataPasswordsJsonBytes() - if err != nil { - return nil, err - } - - info := bindataFileInfo{name: "data/Passwords.json", size: 67534, mode: os.FileMode(420), modTime: time.Unix(1452717629, 0)} - a := &asset{bytes: bytes, info: info} - return a, nil -} - -var _dataQwertyJson = []byte("\x1f\x8b\x08\x00\x00\x09\x6e\x88\x00\xff\xb4\x98\xd7\x52\x23\x3d\x10\x85\xef\x79\x0a\x18\x72\xce\x39\xe7\x9c\x33\x98\x9c\xc1\xe4\x8c\x31\xcf\xfe\x6b\xf8\xb7\x56\xdf\xa9\xb2\xc6\xbe\xd8\xbe\xd9\x6a\x2f\x33\x5f\xb7\x5a\xad\xa3\x63\x67\x8a\x8a\x8b\xa3\xc9\xa7\xa3\x87\xcb\xa8\xa7\x38\xfe\xe0\x3e\x96\xb8\x70\xf7\x37\x74\x1f\x0e\x7f\xa2\xba\x3f\xf1\xdd\x6b\x3a\x9d\x2b\x8e\x5a\x86\xfe\x3e\x13\x3d\x2e\xcb\xf3\xbf\xe1\xde\xff\xff\x13\xa5\x22\x92\x7b\x7b\xfc\x5b\xbb\x19\x1f\xef\x65\xf3\x67\x6c\x1c\x8c\x84\x5c\x4a\x30\xca\x09\x02\xda\xca\x7c\xc2\xb3\x71\x1f\xbf\x6f\x2a\xb8\x8c\xe0\xd6\xd2\xfc\xe0\xf6\x72\x0f\x7b\x5a\x91\x24\x02\x2e\x27\x18\xd5\x04\xc1\x1d\xfb\x1e\xf6\xb2\x26\x49\x04\x5c\x41\x30\x5e\x0a\x82\xbb\x6a\x3c\xec\x75\xdd\xc7\x9f\xdb\x0a\xae\xb4\xda\xbc\x2a\x82\x51\x4d\x10\xd0\x54\xed\x13\xde\x2f\xfa\xf8\x6a\x5a\xc1\xd5\x04\x77\x57\xe5\x07\xd7\x1f\x78\xd8\xc3\x92\x24\x11\x70\x0d\xc1\x9d\x15\xf9\xc1\x48\x1e\x57\xc9\x7e\x0b\xb8\x96\x60\x54\x13\x02\x4b\x12\xf4\x3e\xde\x13\x01\xd7\x11\x7c\x3b\xef\x1f\xbc\x99\xf5\x71\x7a\xce\xc7\x0d\x03\xc1\xe4\x02\xae\x27\x18\x1b\x13\xac\xb2\xbf\x36\xf7\xe4\xb8\x7e\x0b\xb8\x81\xe0\xba\xbe\xdc\x55\x72\x0c\xdd\x58\x15\x54\x71\x23\xc1\x58\xa6\xc0\x2a\x9d\x56\x15\xd2\x7c\x21\x37\x59\xcd\x5b\xb3\x95\x22\xb7\x10\xdc\x5c\x92\x1f\x0c\x09\x8c\xb5\x92\x49\x04\xdc\x6a\x25\xc8\x6d\x56\x82\xdc\x6e\x25\xc8\x1d\x04\xa3\x9a\x20\x18\x82\x12\x8b\x30\x93\x08\xb8\xd3\x4a\xe9\xbb\xac\xe4\xad\xdb\x4a\xe9\x7b\x08\xa6\x40\xf0\x54\x51\x6d\x78\xbe\xa9\x1c\xb1\x20\x08\xb9\xd7\x8c\xdc\x67\x25\xc9\xfd\x56\x97\xc8\x80\x95\x24\x0f\x9a\x49\xf2\x90\x95\xc0\x0d\x03\x2c\x00\xc8\xad\x00\x9e\x57\x7d\xfc\xb5\x13\x96\xe4\x11\x56\xfc\xb6\xe1\x5f\xba\x98\xf4\xf1\xe5\x94\x8f\xef\x16\x0a\x6b\xf2\x28\xc1\x1f\x5b\x1e\x70\x3a\xe6\xe3\xf3\x09\x1f\x23\x79\x22\x78\x8c\x60\x2e\x93\x3a\x4e\xe9\x65\x92\x93\x51\x1f\xbb\xa2\x04\x3c\x4e\x30\x7b\xc9\x4d\xe2\xc5\xc1\x24\x5c\x95\x2b\x4a\xc0\x13\x04\xf3\x41\x02\xa8\xe9\xec\x3d\xf7\xc4\x55\x2f\xe0\x49\x82\xb9\x4c\xc2\xa8\xe9\xdc\xc8\xe3\x11\x49\x22\xe0\x29\x82\x59\x0d\x61\xd4\xf4\xeb\x99\x9c\x13\x12\x27\x11\xf0\x34\xc1\x04\xf0\xb2\xa0\xbe\x53\x86\xa9\x54\x2e\xa1\x80\x67\x08\xe6\x32\x99\x84\x97\x05\x61\x94\x43\x57\xbd\x80\x67\x09\xe6\x32\x09\x63\x95\x54\x27\xaa\x96\x4b\x22\xe0\x39\x82\x59\x0d\x61\x94\x7d\xaa\x13\x55\xcb\x25\x11\xf0\x3c\xc1\xdc\x0c\x56\xcf\x84\xa8\x32\xf1\xe4\x2d\x10\xcc\xf1\x61\xbf\x99\x04\x7d\x4d\x04\x2f\x12\xcc\xbe\x72\x12\x78\x39\xb3\x2d\xec\xb7\x5b\x95\x80\x97\x08\x66\x5f\x09\xa3\x2d\xe6\xd5\xca\x7e\xbb\x24\x02\x5e\x0e\xe9\x31\x44\x5f\x6c\x31\xf5\xe4\x68\x38\xac\xc7\x2b\xac\x98\x8a\x46\xe1\xa1\xc9\xe4\x51\xa7\x04\x38\x9d\x11\xf0\x2a\xc1\xa8\x40\x2a\x63\x42\x0a\x15\xf5\xdb\xdd\x26\x02\x5e\x23\x98\x8a\xc6\x2a\xe9\x5e\xa9\x21\xd4\x16\x57\xbd\x80\xd7\x09\xe6\x4b\x74\xac\xd4\x0d\x4e\x0e\xc7\xd0\x8d\xa7\x80\x37\x08\xe6\x8d\xc0\xfe\xb1\x32\xcc\x7a\xe2\x1c\x6f\x12\xcc\x9b\x99\x93\xc0\xdb\x84\xfd\xe6\x55\xe6\xf6\x47\xc0\x5b\x04\xe3\x3a\x97\x97\xb8\x61\x58\x55\x62\xc5\xdb\x04\x73\x94\xb8\x61\xec\x37\xe5\x94\xc7\xde\xb5\x4b\xc0\x3b\xa1\x03\xc2\xd1\x63\xf5\x18\xb1\xc4\x8a\x77\x59\x31\x75\x80\xc7\x98\xdf\xfa\x69\x31\xc5\x2d\xbb\xf3\x2d\xe4\x54\x8a\xe8\x02\x7e\x5a\x2a\xd8\x16\xee\x11\x4c\x89\x41\x9d\xd2\xa4\x54\xea\x3b\xf7\x1f\xe2\x05\x08\x7a\xdf\xea\xeb\xde\x81\xd5\x2f\x2d\x87\x81\xc9\x08\x82\x13\x7c\xb4\x80\x8f\xac\x3c\xf2\xb1\x95\x47\x3e\xb1\xf2\xc8\xa7\x56\x1e\xf9\xcc\xca\x23\x9f\x5b\x79\xe4\x0b\x2b\x8f\x7c\x69\xe5\x91\xaf\xac\x3c\xf2\xb5\x95\x47\xbe\xb1\xf2\xc8\x69\x2b\x8f\x7c\x6b\xe5\x91\xef\xac\x3c\xf2\xbd\x95\x47\x7e\xb0\xf2\xc8\x8f\x56\x1e\xf9\xc9\xca\x23\x3f\x5b\x79\xe4\x17\x2b\x8f\xfc\x6a\xe5\x91\xdf\xac\x3c\xf2\xbb\x95\x47\xfe\xb0\xf2\xc8\x9f\x56\x1e\xf9\xcb\xca\x23\x67\xcc\x3c\xf2\xb7\x95\x45\xce\xda\x59\xe4\x9f\x7f\x6e\x38\xdd\xbf\xd9\xa2\x6c\xd1\x7f\x01\x00\x00\xff\xff\x4c\xae\x50\xc0\xce\x20\x00\x00") - -func dataQwertyJsonBytes() ([]byte, error) { - return bindataRead( - _dataQwertyJson, - "data/Qwerty.json", - ) -} - -func dataQwertyJson() (*asset, error) { - bytes, err := dataQwertyJsonBytes() - if err != nil { - return nil, err - } - - info := bindataFileInfo{name: "data/Qwerty.json", size: 8398, mode: os.FileMode(420), modTime: time.Unix(1452717629, 0)} - a := &asset{bytes: bytes, info: info} - return a, nil -} - -var _dataSurnamesJson = []byte("\x1f\x8b\x08\x00\x00\x09\x6e\x88\x00\xff\x54\x7d\x5b\x62\xf3\x3a\xcc\xdc\x5e\xfc\xdc\x15\x74\x0d\xdd\x41\x9f\x28\x89\x96\x18\x51\xa2\x0e\x29\xda\x9f\xd3\xcd\x17\xc0\x0c\xe8\xfc\x4f\x27\x27\x5f\x12\x4b\xbc\xe0\x32\x18\x0c\xfe\xdf\xe3\xff\xa4\x76\x3f\xfe\xf7\xff\x7d\xb4\x23\xdd\xdb\xe3\x7f\x3d\x7e\xca\x76\xb6\x72\xca\x57\xef\x94\x73\x0a\x47\xb3\x6f\x9e\x51\xff\x3b\xd5\xf2\xd6\x7f\x5a\xc2\x2b\xe9\xff\x1f\xf2\x23\xb1\xe2\x67\xf1\x4b\x47\x29\x35\xca\x7f\xef\xf0\xc9\x45\xff\x25\x9c\x4b\xac\xf8\xb7\x9f\x30\xef\xfc\xd3\x5b\xba\xf5\xa7\xb6\x50\x2b\xfe\x50\xa8\x77\xd2\x7f\xb9\xb7\x72\x5c\xf8\xa1\x35\xd4\x39\x85\xf1\x8f\xf1\x57\xbe\xac\x65\x4a\x7c\xbc\x39\x87\xba\xdb\xb7\x96\x9a\xd6\x6e\xff\x9c\xe3\xdb\xfe\x5c\x8e\xfa\xd7\xdf\x21\xef\xf6\x74\x5b\xc8\x59\x1f\x45\x1e\x56\x7f\xf3\x53\xfa\xb9\xea\xb7\x63\x3d\xf5\xf1\xf4\x37\xf7\x64\xdf\x7a\xcb\x9f\xda\x6e\xfd\x0b\xe5\xb2\xef\x6f\xc9\x7e\x75\xad\xd1\x7e\x35\x2c\x58\x90\x29\xe0\x2f\xaf\xe5\xfc\x0d\xd9\x7e\xf2\x8c\x5c\x82\x59\x1e\xd7\xfe\x51\x56\x74\xde\xa2\xfd\xfe\x15\xab\x3f\x7f\xac\xb7\xfe\x85\xbb\xd7\xd3\x7e\xea\xd2\x8f\x48\x57\xb3\xdf\x3c\xae\x89\xbf\x20\x2f\x67\xff\x1c\x5f\xe1\xd4\x7f\x8b\xcb\x3b\xd4\xc5\x7e\xaa\xc8\xcf\xdb\xf7\xda\x1d\xe5\x9b\xfa\xb8\x2d\x9c\xf2\x51\xbf\xb6\x01\x5c\xd2\x5a\x56\x59\x78\xfd\x22\xc6\xc5\x7e\xad\xec\xf8\xf7\x35\xe8\x63\xf2\x83\x8e\x5e\xaf\xed\x63\xaf\x94\x72\xd4\x2f\x6a\x7a\xc5\x1a\xf0\x1b\x97\x3d\x43\x4d\xf3\xa6\x1f\x8e\xd7\x2b\xff\x74\x5d\x8a\x3e\x8e\x2d\xb2\xfd\xe7\x96\x8f\xb5\x23\x72\xc5\xdb\xf7\x7b\xad\xc1\xfe\x5e\x38\x12\xde\xfe\x1d\x6e\xfc\x8b\x9c\xa3\xb2\x37\x3c\xf7\x82\xa7\xbc\xe4\x43\xa2\x3d\xd6\x79\xc6\x5b\xdf\xe9\x5d\xca\x62\xcf\x55\x71\xf8\x6a\x69\xcd\x76\xed\x7b\xa2\x64\x29\xe2\x61\x6f\xf3\x13\xcf\x1d\x8b\x22\x8f\x5c\xf5\x63\xaf\xf2\xc6\x1b\xe6\x62\x5b\x7b\x85\x7b\x3c\xd9\xd6\xd7\xcd\xfe\xe6\x53\x0e\xa9\x7d\xf1\x0e\x6d\x93\x23\x70\xe3\xf1\xfa\x8d\x63\xdd\xd2\x71\x14\xfb\xab\xcf\xd2\xee\xbf\x3b\x8e\xdb\xf0\x09\xe7\x6d\x07\x2b\xfe\xb3\x17\xb1\x97\x4e\xcf\xa7\x1d\xe5\x25\x05\x3b\x41\xe1\x63\x3f\x7d\x7c\xf0\xa2\xcf\x62\x0b\xb6\xc9\xaa\xe4\xdb\xd7\x49\xfe\x4f\x3f\xae\xcb\xce\xbe\xec\x7d\xe4\xec\xe6\x60\x0b\xa2\xcb\xc0\x6d\xb7\xff\x8d\x4d\x3f\x52\x5e\xee\xc4\xe6\x7e\xce\x92\xed\x07\x9e\xa9\x6d\x38\x32\xf2\x57\xda\xb8\x5d\xf8\x88\x34\xf1\x7e\xce\x4b\x39\x43\xb6\x03\x51\xfb\x2f\x6e\x57\xe3\x05\x29\x72\xcf\x7e\xed\x1d\x0f\x9c\xa5\x5e\xb1\x85\x4f\x39\x43\x58\x67\x5d\x53\x5b\xaf\x38\x4d\x58\x20\xde\x56\x59\x9e\x17\x1e\xe9\xee\x33\x8e\xee\x55\x78\x11\xb6\x34\xdb\x6e\xcf\x35\xbc\xf9\xfa\x53\xf9\x2c\xf6\xe1\x6e\x36\x2a\x17\x75\xd7\xfd\x5f\x3e\x38\x59\x35\x62\x21\xff\xd9\x0f\xc9\x41\x2a\x7c\x67\xac\xbf\x5c\xa1\x66\x8f\x5b\x17\x3c\xc3\x16\xde\x76\x36\xf3\x81\x13\x83\x23\xc5\x4b\xc7\x9d\xb7\x1d\x5b\xc2\x99\xa2\xbd\xc8\x15\xe4\x87\x71\x5b\xf1\x66\xa7\x1c\xf6\x62\x5f\xc9\xbe\xd8\x0f\x3f\x63\x5d\xbb\xbf\xa5\x58\x42\x5b\xda\x37\x8f\xdb\xd2\xcf\x13\xa7\x8e\xdf\xd8\x3a\x2f\x4a\xbb\xe2\x39\xe3\x4c\xc8\x1d\xc1\x65\x97\x55\xba\x36\x2c\xd3\x15\x3e\xf6\xa7\xae\x14\x2b\x4f\x3e\x0e\xee\x21\x07\x75\x8b\x6f\xfd\x19\x39\xfa\x25\xe3\x96\xad\xa7\x1b\x5a\x6c\xae\xdc\x25\x7e\x60\xf9\xda\x9d\x5a\x6c\x1f\xe5\x99\x66\xdb\xad\x76\x7e\x16\x5a\x40\x33\x13\xb3\x3c\xac\x9c\x72\x9c\xb6\x1c\xec\xf3\xe5\xe0\x56\x7c\x5a\xed\x09\x07\xb6\xe2\xda\x3f\xed\xa6\x57\x9a\x85\x50\x8f\x76\x57\xdc\xa5\x59\x7f\xe4\xc4\xe6\xbe\x63\x78\xf1\xe4\x8b\x8d\x8c\x3c\x7f\xc5\xae\xb0\x98\x8d\x97\x9d\x24\x39\x27\x5f\xe3\x60\xbb\x9c\xf1\x57\x9f\xb2\xc6\x7b\xb6\xfb\x92\xc3\x1b\x2f\xf2\x94\xad\xb1\x13\xbd\xf6\x5b\x16\x07\xb6\xa3\xcd\xdb\x91\x96\x9b\xef\x29\xff\x79\x85\xf6\x1f\xac\xfe\x1c\xda\x2d\xcb\x52\xf4\x61\xb6\x18\x71\x75\xe5\xa3\x2f\x1c\xd9\x92\x13\x1e\x50\xae\xf2\xad\x47\xbb\xc2\xca\xc1\xa6\x3d\xbe\x8e\x0e\x4e\x4a\xfd\x1f\xcd\x80\x3c\x9a\xdd\x5c\x39\x6f\xfa\xfb\x93\x5c\xb0\x72\xd9\x25\x9a\xcb\xc7\x0e\x1a\x4d\x4c\xc8\xaf\x50\xff\x98\x5f\x1c\x35\x31\xdc\x76\x7e\x9f\x7f\xfc\x8c\x1c\x85\xdf\x80\xb3\x8b\x0f\x39\xd7\xfe\x89\xf4\x8e\x65\xc2\xf1\x4e\xba\xe5\xcf\x4e\xdf\x9a\x3f\x62\xdb\xf1\xab\x15\x96\xb1\xea\x4b\x14\xdc\x46\xfb\x27\x71\x85\x5c\xba\x1a\x7e\x13\x1e\xb6\x8b\x03\xe1\x53\xe0\x8a\x89\xdd\x2c\xf6\xd9\xf2\x8c\xf1\x2c\x76\x09\xdf\x58\xa2\x23\x2e\xe9\x0c\xb6\xe5\x6f\x7c\xe8\x24\x67\x02\x27\xa7\x3c\x9f\xf8\x21\x59\x77\x9e\xb4\x96\xe4\x7d\x6d\x3b\xfd\x73\xe5\xc6\xc8\x71\xb2\xc7\x16\x8b\x0c\x5f\x2a\x26\x19\xef\x2f\x6b\xb3\x06\xbb\xb0\x1f\xbb\xfb\x1a\x3d\x2c\xfe\x7b\x17\x4f\xf1\x61\x86\x66\xd3\xed\x36\xe7\xf3\x0e\x8b\x3e\x7e\x2b\xb7\xbd\x68\xc8\x3c\x39\x67\x0c\xd9\x1e\x26\x2f\xc3\xbe\xbf\x23\x3e\x4e\xcf\x36\xbc\x73\xa5\x0f\x91\x2b\xfc\x32\x53\xf0\x93\xe4\xfd\xe1\xd1\x0b\x57\x5e\xac\x65\xf6\x8d\xf6\x95\x2d\x53\x4d\xf1\xe4\x99\xb2\x25\x6e\xfd\xbe\x69\x39\xe5\x49\x4f\x3b\xb4\xb9\xcf\x81\xe1\x0f\x3d\x93\x84\x20\x7e\x18\xe5\x72\x4d\x78\x54\x59\x14\x0b\x25\xec\x1b\x37\xd6\x46\x23\x82\x8a\xeb\x7b\xe3\x51\xef\x08\x5b\x6d\xd6\xbd\x6e\x65\xa1\x1b\xb5\xfd\x98\xe2\xbc\xdb\x4b\x73\xa3\x36\x35\x1a\xf6\xd1\x33\x4c\x18\x37\xd6\x3e\x7a\xea\x6d\xb3\xe5\x16\xef\x66\x36\x49\x62\x08\x33\x51\x7e\xb1\xc4\xdb\xde\x29\xac\xfa\x5a\xa7\x87\x09\x7a\x09\xf4\x9d\xc4\x32\x45\x7b\x84\xb9\x7f\xfd\xa7\xbd\x86\xdc\xbc\x9e\xef\x5f\xac\x13\xc3\x9e\xfe\x8b\xe7\xb9\xc2\xaa\xbf\x73\xf4\xb3\xe0\x07\x6c\x47\xec\x76\xf3\x12\x9e\x0b\x23\xc4\x38\xf1\xc5\xb3\x3d\x65\xfe\xc0\xb1\x8a\x51\x6f\xb6\xa8\xef\x92\x9f\x11\x1f\x77\xca\x0d\xc0\x8d\xed\x1e\xea\x48\x58\xd0\xdc\xc6\xd7\x0b\x67\x37\x72\xef\xf0\x87\xe7\x7e\x1c\xdc\x7f\xf1\xe4\x70\x10\x7a\x48\x12\x5c\xac\x7a\xe5\x1b\xcb\x93\xb1\x5c\x5b\x9f\x26\x04\x2f\x2d\xe4\xf0\x1b\x2c\xd2\x89\xf1\xc5\x78\xa0\x0e\x8b\xfd\xf2\xcf\x96\x3b\xb5\xc6\xc6\xb8\xe5\x36\x5f\x74\x07\x0b\x64\x67\x06\x27\xab\xff\x9a\x2c\x8f\x1d\x9a\x30\x23\xe8\xb2\x7f\x95\xe0\xb6\x9e\x7e\x9a\x56\x8b\x51\x9f\x62\x56\xed\x17\x96\x98\xd7\xb0\xe8\xd6\x84\xb5\xa7\x6c\x4f\xf3\x8a\xab\x1e\x82\x35\x97\xd7\xf8\xa3\xb8\x5b\x73\xd9\x22\xce\x43\x3d\xe0\x1b\xcb\xe2\xe1\x5e\x99\x26\xac\x99\x9c\x1a\x9e\xef\x1c\x12\xfc\xf0\xba\xe2\x9f\xe4\xaf\x54\x73\x02\xf2\xc2\xcd\x5e\x40\xfe\x34\x3d\x9c\x58\xc6\x9d\x97\xf9\x96\x70\x5f\xee\xf0\x62\xc7\xe1\xf6\x00\xa8\x70\x51\xf5\x52\xf2\xc3\xdf\x30\x35\xc7\x85\x6f\xa8\xe3\xb7\x47\xd7\xa0\x0b\x7f\xf6\xed\x76\x48\x7f\x9f\x56\x47\x76\x42\x82\x11\x7b\xe9\x0f\xef\x81\x1e\x78\x04\x31\xfa\x18\x6e\x24\xe4\xdd\xe0\xcf\x0b\x22\xa1\x13\x8b\x1c\x16\x1a\x0f\x39\x5c\x0c\xcf\xe4\x6f\xba\x87\xd6\xd0\x0d\x16\xc9\x4f\xb2\x18\xd8\xc9\x76\xa3\xd8\x91\xff\xaf\x27\xf8\x6e\x09\x73\xf5\xa1\xdb\x24\xdb\x63\xc7\x79\xd6\x18\xde\xa2\x62\x49\x2c\xd4\x22\xe9\x23\x4a\xe0\x55\x25\xfe\xc7\xee\x2f\xe5\x65\x76\x48\x02\xab\x09\xb1\x02\xb6\x58\x0e\xd9\xaf\x6c\x04\x22\x2c\xf1\x23\xbb\x3d\xf8\x52\x3e\x76\xb7\x5a\xe8\x1e\xef\x4a\xa2\x62\xeb\x5e\x32\x03\xb4\xaf\x23\xc1\x8a\xa8\x3b\xb5\x28\x2a\x2c\xf2\x6f\x30\x89\x6e\x03\x3b\x4c\x99\x5e\x37\x9e\xe0\x79\xd3\x4c\xc0\x3c\xd0\x2c\x36\xcc\x6c\xe7\x56\x3a\x3d\xd9\xbc\xc9\x65\xbf\xdd\x1e\xef\x39\xda\xfe\x5d\x35\x98\xd1\x93\x9f\x5f\x57\x84\x24\x6a\xce\x61\x5e\xb2\x5a\x11\x38\xe6\x5f\x89\x82\x63\xc5\x9e\x49\x24\x08\x6f\x34\x75\xbd\xdc\xc1\x83\x37\x44\xe3\x57\xe4\xc1\xb9\xd2\x7d\xe3\x17\x26\x79\x9d\x0f\xd6\xb4\xd4\x23\xd9\x75\x50\x6f\x21\xf9\x83\x65\x1d\xb5\xd8\xb7\xae\x82\xf0\x36\x17\x66\x27\xa1\x31\x2f\x34\x6b\xb1\xd4\xb0\x23\x18\xa6\x6b\x79\x3e\x3d\x96\xbf\x90\x06\x1e\x6e\xd6\xc3\x34\x21\xf8\x68\x6e\xff\x4e\xff\xa7\xad\x3f\x9f\xf6\x47\x1b\x4c\xce\x53\xf3\xc6\x5a\x2c\xd7\x71\x4f\x36\xf9\x61\xb1\xa8\xb1\xb9\x7f\xc0\x1f\xb8\xf1\xa2\x6a\xd1\x1f\x8c\xaf\x47\xce\x50\x70\xee\x66\x3d\xfb\xe6\x08\x34\x3f\xb5\xd3\x14\xee\xcd\x4c\x6d\x93\x1f\x16\x0f\xe0\xc9\x35\xcf\xee\x1c\x6c\x91\xed\x60\x30\xe5\xd9\xb8\x08\xe2\x72\xe2\x29\xfe\xfc\xc1\xa4\x0f\xe1\x40\x37\xcf\x20\x61\x64\xbe\xb0\x55\x62\x2f\x2c\xe7\xd6\x7d\xc7\x45\xd1\xbf\xee\x0f\x7f\x06\x5b\xc1\xd6\x75\x13\x2d\xa8\xf4\xc0\x51\x7e\x0a\xc9\x9c\xee\x16\x2f\x16\xa2\x32\x59\x76\xba\x33\x8d\xb1\xe1\x1a\xe7\x22\xa7\x08\xfb\xbc\x05\x3b\xbb\xe2\xc7\xe3\xa9\x41\xbe\x3d\xb2\x3d\xf9\xca\x6b\x1c\x3e\x21\xe3\xa6\xbc\x11\x3b\xbd\xc2\x2f\xff\xe2\x92\xe6\xf1\x6c\x76\xb3\xf5\xc7\x67\x49\xaf\x2c\xfe\x90\x60\xe7\xb4\xbb\x76\xa5\x13\xd1\x0a\xe3\x71\xa6\x09\x67\x2d\x11\xbb\x2d\x46\xe7\x01\x24\x81\xb9\x78\x51\xbb\x60\x98\x43\x33\x4f\x0d\x6f\x53\xf1\xe3\x19\xf1\xcd\x9e\xea\xf4\xe1\x49\xc0\xf9\x94\x8b\x77\x00\xb2\x58\x8a\xfa\x0f\x3c\x64\x61\x98\x21\xc7\x19\xe6\xf6\xb0\x5b\xbe\x8b\xd1\x64\x3c\x7d\xce\x38\xb8\xab\xde\xc1\x75\x63\x9c\x4f\xc7\x23\x4e\x4a\x0f\xa8\x7d\x85\xf5\x79\x0f\xd3\xab\xc7\x01\xf1\xb6\x5c\x89\x8c\xcd\x97\xd4\x6d\xc4\x05\x66\x07\xda\x81\xff\x1e\x1e\xa2\x68\xd2\x80\xbd\x99\x60\xd5\x96\x8f\xa7\x9b\xc8\x75\xe4\xe5\x6f\x0d\x9d\x6c\x63\x3f\xbc\xd8\xe1\x1f\xac\x62\x3b\x6d\xb1\xc4\x0d\xe1\x6f\x48\xe8\x23\xe7\xc7\x2c\x87\x86\x8e\x70\x25\x85\x5e\x66\x8a\x81\x3f\x86\x70\x3d\x0e\xc4\x66\x8b\xc1\x92\xb6\xc8\x54\x76\xfd\x26\x98\x92\x3d\x33\x68\x57\x1f\x67\x3b\xff\x42\x6c\x20\x7b\x67\x7f\x41\xde\x50\xf6\xee\x9b\x8e\x30\x9f\x94\x73\x03\x13\x76\x74\x8f\x1a\xf6\x78\xa8\x87\x17\x8f\x18\xfd\x6a\x99\xe7\x16\x67\x12\x19\xfb\x89\x69\xe3\x2d\xb2\x07\x8d\x81\x76\x4e\xb6\x39\xda\x99\x7f\x69\x32\xc4\x3b\x8a\x30\x71\xeb\xc7\xb5\x55\xbc\x97\x3d\xb7\xd9\x95\x8e\x23\xbd\x75\x09\xcc\x1c\x59\xda\xe0\xe3\x97\x52\x61\x26\x26\x89\x42\xb0\xd9\x62\x13\x98\xfa\xc7\xc0\x50\x96\xfb\x23\x21\xad\x1d\x78\x4d\x2b\x1e\xe6\x71\xe7\x9d\xf1\xe9\xa7\x98\x81\xb2\xc4\x02\xff\x74\xca\xb5\x96\x05\xbb\xcc\xa2\xc4\x57\xb2\x60\x5c\x82\x10\x39\xa3\x0e\x03\x15\x60\x22\x3d\xe1\xee\xe9\x21\xdb\x68\x6c\x25\x82\xf8\x04\xbc\xda\x82\xe0\x46\x7d\x94\x05\xf6\x92\xdc\xf3\x43\xe5\x3a\x3d\x99\x6a\xeb\x09\x8d\x6b\x21\x76\xd2\xfe\x9a\x1f\x5f\x5e\x5c\x20\x39\xda\x15\x9f\x07\x13\x41\x57\x8e\xe8\x84\x6e\x79\x2b\xf0\x77\x77\xe0\x0d\xdb\x4f\x98\x90\x20\x07\x78\xb6\xf7\x60\x8a\x0b\xb7\x96\x61\xc7\x6b\xf9\xb1\x57\x6b\xfa\xa4\xf6\xba\x07\x8d\xb3\x62\x03\x38\x0a\x51\xac\x23\x76\xa5\xbd\x35\x93\x44\x54\xca\x8c\x53\xef\x4a\xaf\xb0\x84\x27\x5c\x47\xc5\xa9\x90\xc4\xe1\xc4\x66\x3c\xc7\x5e\x9b\xc7\xc7\x3d\xaf\x06\x97\x9c\x84\x10\xde\x31\x21\x12\xab\xfd\x87\x29\xa3\xa1\x25\x38\xc3\xb9\x00\x4b\xea\x4c\xe6\x24\xcc\xa1\xc7\x15\x7f\x18\xe1\x97\x0a\xed\x99\xfa\x64\xf9\x38\xb3\x12\x1a\x2f\x00\x26\x94\xa4\xd8\x3e\x57\x73\x54\xfe\xfd\xc0\xcb\x22\x3f\x6e\x16\x31\xfe\xbb\xe9\xb7\x2c\xeb\x97\xd8\xda\x9e\x38\xea\xb9\x98\x1f\xc4\x58\x11\xe5\xbd\xc4\x9d\x7a\x92\x2b\x9f\x6f\x27\x4b\xbd\xee\x6a\x7f\x47\x6f\x0b\x36\xd5\xdc\x90\xbe\xa5\x1e\x4f\x5d\x88\x42\x53\x21\xd7\x2c\xc3\x7a\xcb\xde\xce\x88\x2b\xd4\x43\xdb\x33\xca\xd9\x07\x10\xa0\x36\x87\x69\x93\x1d\x03\xd9\x6a\x73\x27\xa1\x13\xfd\x7b\xe1\x83\xd4\x95\x13\x62\xb9\x24\x38\xc0\x03\x02\xd6\x44\xe6\x21\x11\x12\xc3\x9d\x65\xc0\x3a\xe6\x29\xd4\xde\x5c\xc4\x0b\x63\xdc\x79\xc8\xe5\x85\x0a\xee\x92\x27\x61\xf6\x4b\x92\xae\x6b\x1c\xcf\x38\x39\x2d\x8b\xfb\xcc\x66\xf9\x26\x1e\x84\x41\x48\xd6\xdb\x63\x7f\x0e\xf1\xb0\x1c\x52\xbc\x82\xc5\x88\xb8\xf7\xe1\x36\x40\x61\xdc\x18\x5b\xbd\x02\x14\xe5\x29\x61\x9b\xe5\x89\xcc\xe4\xec\xde\xaa\xcf\x7b\x58\x0a\x76\x0e\xf7\xae\x76\x8a\x50\xe7\x33\xb9\x97\x9c\x93\x66\x01\xfa\x20\x7b\xb1\xe7\x91\xcf\x55\x5b\x81\xc5\xb1\x0d\xdd\xf0\x3b\xb2\x86\xc8\x80\x25\x7c\x62\xd0\x14\x7f\x0c\xdc\xde\x81\xc2\x48\x7e\xe3\xd8\xa3\x44\xdf\xf6\xe7\x25\xc0\x94\x6f\xc2\xa0\x1d\x61\x59\x80\xc6\xf4\xd6\x0a\x36\xe9\x24\x54\xad\xbf\x61\x6b\xcd\xc4\x42\x81\x2c\xf8\x69\x79\x39\xf3\x76\x12\xc8\xac\xb6\x2d\x04\xc1\xe4\xca\xe0\x52\xfa\x91\xea\x75\x9c\xad\x07\x6c\x18\x63\xe7\x43\x2c\x7e\x31\x67\xf5\x0c\x3d\xef\xb8\xf2\xe2\x1c\x11\xad\xee\x67\xb8\xd4\x88\xed\x34\xbb\xb2\x08\x16\x4c\x01\xca\x20\x88\x8f\xb4\x5e\xce\x1c\x41\xc0\x8d\x41\xb6\x9e\x09\x2c\xce\x16\x99\x1f\xcb\xad\x59\x22\x6c\xa0\xad\xed\xa7\x12\xc6\xcf\xb4\x3d\x19\x96\x02\x10\xa6\xa3\x5e\xe7\xdd\x71\xd4\x92\x07\xd2\x1a\x3d\xd8\x3d\xe8\x0a\x52\xd9\x7d\x97\xef\x32\x1e\x95\x6f\xb9\x77\xb3\x75\xdb\x61\x68\x24\x5e\xc1\x9a\x78\xc6\xb9\x4a\x90\x67\xb7\x69\x19\x89\x88\x24\xeb\x37\xe3\xf8\x53\x0c\x20\xe2\xc6\x89\x80\xc5\x1c\xeb\x2b\xf0\xe3\x36\x26\x44\xe9\xf4\xc0\xb3\xd2\xf1\x1a\x26\x28\x5f\xfc\x06\x8d\x9a\x1f\x9a\xf8\xd0\xbe\xe9\xa2\xea\x4f\xb8\xf1\x11\xe7\x7b\x5b\x06\x52\xba\x1d\xde\x63\x16\xfb\xc6\xf0\xba\x05\x87\x5d\x65\xc9\xe8\xb5\x81\xd4\xca\xa5\x1a\x00\xaf\x5c\xa1\x6e\xb1\x59\xf4\xe4\x48\xae\x2b\xb1\x02\xcd\x29\x80\x18\x05\x3e\xda\x56\xae\x8b\x51\xcc\x88\xce\x35\xe9\x33\xf4\xa4\x7c\x46\x36\x6c\xab\xac\x39\x6a\xad\xdf\xac\x89\x16\x86\xf1\x03\xa1\x70\xc5\xf6\xcc\x25\x88\x45\xf3\x07\xff\x20\x21\xba\x34\x08\x44\x94\xce\x3b\x2d\x67\xb2\x98\x51\x3a\x73\xb8\xec\xae\x7e\x96\x48\x57\x45\x30\x60\xde\xed\xa1\xe6\x22\xb1\x15\x53\x0a\x31\x70\xe6\xff\xdf\x9e\x16\x17\xbf\x3b\x72\x07\x69\xb0\xe9\xa8\xe6\xc2\x54\x54\x8e\xd1\xc2\x97\x41\xea\xf0\x8c\xee\x37\xf4\x30\xf1\xcb\x67\xa7\x67\xcf\xc5\x5d\xb0\xfc\x16\x2c\x77\xf1\xb4\x6b\xf3\x0a\xc9\xc4\x1b\xa4\x80\x05\x30\x3f\x82\x8c\x6a\xbd\xf4\x8f\xc4\x97\xfe\x7f\x59\x86\x59\x9a\xe8\x6a\x17\x7d\xd2\x88\xd7\xd0\x7b\x0a\x30\x20\xf9\x27\x84\x4c\x43\x28\x7f\xd0\x00\x22\xa6\x6a\x7a\x55\x23\xff\xe4\x61\xeb\xf6\x7c\x22\xa5\xb9\x7f\x25\x4e\xaa\x48\xb4\xcc\x84\xfc\x4a\x60\x85\x32\x1a\x5c\x57\xc8\x0c\x0c\x8e\x59\x83\x40\x9c\x23\x71\x46\xaf\x68\xfe\xc1\xc0\x30\xd6\x7f\x0c\xdb\xa8\x7a\xdb\xb1\xf0\x9e\xdf\xab\xad\xe1\x16\xac\x72\x54\x60\x6c\xc2\xd3\xd6\x29\x47\x96\x4c\xde\x08\xe5\x0d\x68\x27\x54\x93\x90\xac\xde\x81\xf7\xa2\x9d\x04\x7a\xc4\x52\x8d\xa2\x0a\xcd\xda\x1b\xeb\xb3\xc1\x56\xd9\xa6\x99\x71\xd2\xda\x0c\xb2\x86\x81\xc7\xe9\x67\xca\xb5\x86\x5b\xd1\xbf\x1e\x91\xa6\xe7\xc0\xfa\x43\xd8\x08\xdf\x00\x0f\xd3\x2c\x95\x81\xa0\xbd\xa1\x1a\xea\xc0\x3b\x62\xf8\x92\x99\x1d\x89\x78\xdd\x71\xdc\x21\x7b\x62\x34\xa7\xc0\xda\x03\xcb\x6c\xe1\xce\x09\x69\xe5\x3c\xfb\x1d\x5c\xf0\xac\x7b\xe8\x9e\x65\x14\xfb\x91\x19\xb9\xd8\x54\xb2\xa3\x40\x4f\xff\x59\xf3\x64\xe6\x09\x18\x2f\x48\xaa\xe2\x30\xcc\x2e\xc7\x48\x9f\x48\x02\xc9\x01\xc5\xfa\xfa\x4b\x8c\x8c\x93\x98\x35\x49\x79\x20\x8c\x22\x14\x9b\xe0\x25\x9b\x2c\xae\xad\xc2\xf9\x85\xdc\x01\x62\x55\x0d\x58\xb0\x47\x2f\x7a\xee\x37\x51\xb9\x76\x88\xa7\xc1\xb6\x25\x5a\x69\xee\x88\x9c\x1e\x16\x83\xec\x78\xa1\x46\x5b\x68\xbe\x74\x6d\x59\x58\x7a\x79\x08\xb9\xc4\x9f\xd8\xba\xed\xe2\x67\x41\x7c\x52\x27\xfb\xb8\x15\xc8\xfb\xc3\xa0\xfe\xad\x26\xd6\xbb\x6e\x24\x24\x1d\xc0\x9e\x1c\xc8\x6c\xe9\xcb\x44\x14\x45\x5c\x5c\xd8\x86\xb3\x3c\x79\x80\xe4\x46\xe8\x8e\x76\x37\x2c\x9a\xba\x61\xd3\x24\x44\x7d\x67\x26\x49\xc5\x52\x72\x0d\xac\x3c\x1f\xd5\x7d\x76\xa3\x81\xd8\x40\xd2\x1c\xfb\x8b\xbd\xb1\x46\x7d\xfb\xe6\x7f\xfc\x40\x44\x9a\x10\x5d\xca\x04\x20\xee\x84\x3f\xe0\x75\xc8\x7d\x06\x6a\xa4\x51\x8d\x59\xb6\x66\x88\x8a\x1f\x48\x3b\x44\xe5\x1b\xa5\x48\xea\x69\xa9\xeb\x54\x3a\x33\xc1\x50\xf9\xc5\x54\x70\xe5\x97\xf2\xb5\x14\x28\x9b\xda\x32\x9d\x91\x21\xff\x4c\x18\xee\x8d\xd7\x78\xca\x72\x12\x68\x9b\xdc\xfa\xbe\x91\x6e\x2a\xfe\x42\x17\x18\x50\x7b\x92\xe4\x07\xd9\xdb\x27\x54\x09\x40\x11\xc0\xac\x8a\x46\x01\x28\x2a\xf3\x5d\xe0\x41\x10\xbd\x65\xc9\xcd\x11\xc6\xc3\x8b\xc9\x3a\xac\x96\x9b\x4d\xe2\xfa\xed\x68\x58\x10\xd2\xa3\xd5\x09\x24\xf5\x0b\xf1\x39\x2e\xba\x99\x99\x2d\x66\x54\xd2\xcb\xc7\x9e\x60\x05\x7e\xd6\x6e\x3f\x35\x19\x88\x4f\x52\x7b\x64\xc7\x97\x29\x9a\x2d\xf2\x6b\x04\x66\xfa\xd6\x74\x60\xc1\xc1\x75\x45\x5d\x99\xb9\xe1\x06\xbc\x0b\x81\x58\x31\xdd\xbb\x2d\xa5\x5d\x28\x7b\xeb\x8a\x6c\xcf\xbe\x26\x4c\x18\x2c\xff\x7c\x11\x76\x5b\xcc\xf0\xb6\x4b\x83\x6c\xfe\xf9\x8d\xe6\x6f\xf1\x9a\xf9\xa4\x85\x18\xc0\x33\x92\x29\xb2\xda\xb6\x0e\x63\x4d\x74\x43\xe3\x56\x2b\x61\xbd\x03\xad\x99\x64\xe4\xf6\x5b\x77\xb8\xfb\x01\x1b\x02\xe0\x58\x31\x0d\x5b\xe8\x6e\xa0\xde\x55\x3f\xb6\xfe\xa5\x96\xdf\x19\xf1\x5f\xd0\xbc\xe5\x26\x24\x64\x98\xbe\xd6\x55\x08\x3f\x14\xb9\xa1\x48\xd8\xc4\x5e\xca\x83\xd1\xe2\xcb\x59\x69\xe5\x40\x40\x09\xfc\x99\xe5\x0a\xe2\x10\x7a\x50\x68\x99\xd3\x61\xdb\xb3\x96\xe5\x89\xa4\xf9\x29\xff\x16\x56\xda\x7a\x7b\xd8\x59\xeb\xae\xb6\xc1\xb1\x49\xc0\x67\x90\xf5\x32\x92\x97\x26\xd6\x87\xb8\xb3\x46\x24\x5b\x37\x93\x5d\x7b\xe4\x22\xce\xab\x98\xef\x84\xcf\xa8\x84\xc2\x61\x52\xb5\xb6\x4c\x97\x2d\x76\x02\x09\xa9\xc4\x8f\x2c\x08\xdd\xc8\x52\xb4\x7e\xa0\xe7\xb5\x38\xa2\xa0\x27\x67\x25\x20\x26\x46\xd9\x0c\xdf\x25\xa9\x8b\x59\x80\x1a\xc6\x65\xc6\xc2\x8b\xa9\x9e\xc4\xd5\xc3\x8d\xd4\xfd\xeb\x32\xff\x62\x7a\xea\xa2\xfd\x61\x37\xe4\x44\xee\x86\x80\x33\x5b\x7c\x16\x56\x20\xe7\x12\xba\x60\x0d\xae\x82\x18\xef\x4a\xc4\xff\x3e\x8e\x19\x20\xa2\x3b\x34\x4d\xb3\xc7\x4a\x07\x2b\x26\x53\x47\xa0\xa5\xd9\xbd\xfd\xb0\x18\x18\x43\xf3\x70\x6a\x3a\x4c\xcc\x61\x96\x32\x4d\x13\x41\x5a\xe6\xa2\x72\xc9\x79\x03\x22\x53\xa8\x2b\x1b\xda\x67\x7f\x87\xe8\xd6\xec\xd0\x55\x1d\xe5\xf9\xe4\xce\xff\xa9\x9f\x7a\x16\x33\xee\x34\x70\xb5\x4f\xc9\x50\x68\x25\x17\x30\xe8\x8b\xf9\x0b\x1c\xa2\x54\x2e\xbb\x6f\x78\x6e\xe2\x7d\x13\x0f\x85\x27\x91\x2d\x85\xe9\xd9\xc3\xed\xa0\xdf\xc0\x1b\xd4\xb2\x36\x66\x83\x92\x56\x55\xa0\xb2\x33\x00\x3d\x8b\x8b\x08\x5c\x62\x19\x73\xdc\x68\xc6\x46\xe9\xfa\xb7\x4b\xf2\x63\x90\xb8\xd6\xcd\x80\x68\x85\x27\xcf\x8c\x23\x7b\x60\xc7\x18\x4a\x23\xe6\xbe\xc2\x09\x9e\xe9\x9f\xdd\x83\x98\x59\x6a\x0f\xbc\x38\x08\x91\xe4\xfa\x4d\x76\x26\xc4\x28\x30\xe9\x48\xe7\x4e\xe2\x05\x0a\x57\x17\x2f\x81\xae\x0c\x13\xab\xb5\xd0\x2e\x55\xb5\xde\xfa\x85\x1a\x3e\xfd\x8e\x96\x18\xca\xa8\xd8\xc5\x7a\x20\x98\x1c\xf9\x10\xcb\x72\x8a\x7d\x21\x32\xf1\x2c\xd0\x22\x41\xda\x41\x7d\x63\x1e\x64\xb5\x2e\x38\xc7\x8e\x34\x7a\x96\x5b\x1a\x37\x67\xfa\x20\xbf\x0a\x35\x31\xa3\x21\x2c\x3d\xe1\x8a\xc7\x7c\x0f\x18\x57\x22\x7e\x10\x06\xe4\xd6\xc2\xa0\x79\x00\x7d\x77\xa6\x3a\x33\x5f\xe9\x0a\x1f\x8f\x59\x62\xae\x56\xfe\x9e\x15\xbb\xf7\x3a\x6b\x41\x00\xa1\x11\xf9\x20\x51\x2d\x7e\x2e\xf4\xb2\x93\x99\x30\x15\x10\xbe\x64\x07\xf5\x60\xdc\x25\x33\xf9\x5b\xa2\xe4\x34\x08\xe7\x92\xbc\x55\x2d\xc0\x8b\xbc\xc8\x2b\x37\xda\x16\xe2\x95\x56\x0b\x34\xee\x70\x25\x78\x9e\x84\x47\x17\xff\x49\xf8\x5c\x41\xa3\xee\xd9\xab\x91\xd1\x12\xe3\xd6\xbd\x5b\x20\x68\x8c\xab\x40\x1e\xca\x49\x33\x7f\x47\x49\x6a\xcc\x13\x81\xd1\x61\x74\x06\xfe\xc5\x59\xd9\x4d\xf6\x69\x31\xe2\x6a\x17\x5c\x70\x46\x9a\xd7\x96\xae\x0b\x78\x02\x2e\x76\x48\xb0\xeb\x01\xe0\xac\xfc\x49\x46\x97\xca\x62\x00\x20\xa3\xcf\x80\x93\x04\xb0\xdb\x8e\xe7\x54\xde\xb0\x2f\x2f\xa0\xe4\xb6\xe5\x6f\xd4\x63\xab\xd2\xc9\xf0\xd4\xea\x50\x22\xe0\xa5\x2b\x5d\x34\x51\x12\x0a\x32\xf1\xd7\x7a\x21\x50\x40\xb1\x5a\x1b\x0b\xc7\xb7\x6d\xe3\xe2\x89\xb9\x16\x23\x8c\xef\x17\x9f\x1a\x29\x30\xc6\xce\x6a\x43\xe5\x16\xc3\x7c\x2a\x4f\x00\x41\xc6\x11\x0d\x8b\x97\x88\x12\xee\xd3\xc2\x16\x78\x58\xcf\x9d\xee\x72\xe4\xc1\xcd\x1b\xbb\x3a\x25\xfc\xcd\x87\x59\x53\x71\xb2\x76\x7a\x1d\xd5\x6e\xe6\x9c\xbc\x66\x14\xcb\x95\x79\x55\x24\x36\xb3\xcf\x55\x3a\x60\x70\x0a\x13\xf9\x45\x38\x76\x06\xc2\x15\x04\x1d\xca\x0a\x6c\x6e\x83\x71\x4c\x66\x62\x9a\xcd\xcb\x62\x72\x10\x81\xc2\x91\x89\xa3\x20\x87\x07\x63\xd5\x8e\xaf\xee\xc4\xa2\xb8\x87\x43\x4c\x0c\xfb\x42\x1e\x39\xc9\xd1\xc5\xe3\x99\x9b\x98\x4a\x7b\x3b\x81\x8e\xf0\x84\xde\xf5\x1b\x41\xdf\x15\xc4\x74\xdb\x36\x1b\x9d\x09\xa1\x02\x73\xa8\xf3\x2c\x58\xba\x74\xc2\x4e\xd9\xef\x01\x4d\xc8\xfd\x74\xca\x1b\x36\x5a\x63\x90\x08\xf2\x84\x78\x64\x98\x7d\x2e\xd7\xd6\xbd\xa2\x70\x27\xa7\x42\xb4\x3f\x6c\x4b\x45\xad\x89\xac\xeb\xd1\x38\x17\x2e\x90\xb2\x13\x1f\x56\x51\xe3\xc1\x09\x5e\xe0\x5c\x58\xab\x09\x53\x05\xcb\x51\x52\xcf\xd7\xa8\xf9\x9a\x8b\x96\xbc\x00\x56\x96\x94\xa1\x49\xa9\x2d\x86\x7c\x7a\x51\x58\x5e\x90\x7c\x1e\x85\x34\x02\x63\x42\x04\x09\x17\xc9\x2f\x6a\xea\x17\xc6\xb6\xef\x41\x37\x71\x90\x62\x76\x78\xc9\x99\x9d\xf6\xd2\xa4\x7d\x14\x16\x98\xaa\x1b\x40\x35\xd9\xfc\xb8\x99\x69\x6d\x3a\x1d\x2e\x32\x9e\xa1\x9d\x82\x64\xc9\xe9\xd2\x77\xb7\x52\x12\x3c\x99\x7d\x0b\x5f\x47\xab\x51\x63\x75\xdf\x41\x78\xfa\x8b\x5a\xc9\x39\x5e\x40\x86\x8b\x83\x3a\x91\x6a\x1b\x5c\x92\xa7\xbb\x4e\x4d\x3c\x90\xe5\xc8\xad\xc7\xb3\x2d\x08\x6e\x35\x98\xb1\xd4\x2e\x5c\xd9\xf3\xb8\x43\x79\x48\x08\x9d\x73\xb1\x54\x5e\xfd\x61\x72\x00\xd2\x8e\x50\x05\x90\xc1\x24\xa6\xa5\xdf\x48\xa7\xae\x25\x46\x7c\x40\xbf\x10\xcc\x34\x59\x27\x5b\x96\x78\x4b\x84\xf8\x7c\x78\xdc\xec\x95\x0d\x46\x0a\x4f\x87\x7a\xe7\xd0\x3d\x7a\x76\x02\xe5\xda\xb5\x78\xc0\x8b\xac\x0c\x40\x8b\x47\xcd\xf7\xbd\x35\x5b\xc5\xde\x88\x65\x03\xd0\x8d\xfd\x50\x4c\x04\x37\x03\x3c\x0c\x3c\x43\xc3\x69\xd9\xbc\xb2\x83\x83\xf1\xec\x66\x45\xe6\xe2\xa5\xc5\x2b\x48\xd2\x88\xda\xfe\xaa\x55\x10\x4b\x10\x66\x24\x18\x9b\x1a\x7e\xa0\x51\xcd\x03\xf0\x0b\x77\x3c\x9e\x83\x75\xd3\x82\xb3\x27\xd4\xa3\xc5\x3b\xb1\x74\x9e\x9c\xa4\x26\xf6\xd8\x73\xa1\xd2\xc9\xea\xcb\x49\x72\x57\xfb\x90\x5b\x7c\x4f\xf6\x92\x39\x10\x98\xe2\xa9\xd6\xc1\xa2\x97\x46\x95\x6f\x58\x14\x6c\x1d\x2c\xbe\xa1\xbc\xe5\x41\xe8\x18\x64\x0c\xb5\x5a\x4e\xb7\x94\x4f\xed\x04\x0c\xe8\x2b\x2f\x42\xce\x72\xa4\x59\x62\xb9\x87\xb3\x64\x7a\x23\xa7\x82\x06\x0e\xd5\x93\x6e\xe4\x54\xbb\x12\x62\x8f\x19\x75\x6c\x1d\xc1\x4d\x3f\x80\x48\xc4\x33\xa1\xf2\x1c\xaa\x39\x02\x56\x59\x70\x2b\x43\x5f\x12\x71\xa7\xb0\x5a\x09\x3d\x28\x2f\x17\x3e\x44\x62\xf3\x6a\x81\xd0\x9a\xae\x61\xc0\x5f\x08\x17\x98\xc2\xbc\x80\x21\xc9\xb9\x25\x1a\xd6\x5a\xb2\xba\xbe\x98\xfd\x41\xe1\x9a\xb5\x4a\x6d\xa9\xc1\x52\x70\xc0\x17\x86\x67\x9a\x31\x68\x4e\x25\x8e\x3d\x02\xd1\xc8\x8c\xfa\xdd\xf4\x6a\x21\xd3\x7e\x53\xa9\xe5\x8c\x13\x3b\xdd\x89\x12\x15\xdc\x56\xcd\xf8\xfb\x8e\x96\x28\x27\xe3\x76\x16\x4a\x43\xf9\x6c\x2b\x49\x1f\xac\x26\x03\xa7\x73\x5c\xf4\xde\x31\x98\x13\x3b\x4e\x93\x23\xfb\xcf\xf4\x38\x62\x27\x3f\x12\x00\x90\xb7\x8a\x38\xa4\xc9\x53\x7b\x02\x77\xe1\xc3\x3a\xaa\x5f\xea\x3c\x80\xbe\x89\x7d\x74\x6a\x80\x13\x64\x24\x53\x64\x24\xa1\x06\x09\xa6\x75\xea\x9e\x9b\x2a\x55\xd1\xd9\xab\x28\xca\x19\xc1\x97\xc8\x24\xd9\x43\xca\x7e\xe0\x61\xe9\x92\xa0\xb3\x8e\xe1\x11\x5e\xe0\x7f\x7f\x53\x5c\xc9\x53\xbe\xe4\x1a\xb3\xb4\xb1\x85\x85\x31\x78\xe5\xd3\xb6\x7e\x90\x94\xd4\xbd\x28\xf5\x1b\x5e\x2c\xd0\x6f\xb0\x90\x7a\x79\x19\xec\x88\x3d\x6c\x33\x89\xa4\x61\xcb\x38\xcf\xc0\xc4\x2a\x1c\xa4\x3d\xd7\x7d\x06\x64\xac\x7f\xfa\x04\x0e\xc9\x32\xc0\xb8\x3d\x70\xfe\x7e\x0a\x8d\x71\x0b\xe4\x81\x6d\x71\x61\xa6\xb5\x71\xb9\xf5\xbf\x66\x16\x12\x42\x14\x89\x38\x33\x56\x52\x6e\x3e\x3d\xbd\xfc\xb9\xf6\xd7\xbe\xd2\xec\xae\x3d\xa1\x96\x65\x10\x6e\x22\x6a\xc9\x1c\x44\x5d\x2d\x8e\x45\x4b\xf6\x6b\x0e\x0a\x04\x9a\x8c\x78\x82\x2f\x23\x2f\x12\x58\x3f\x1d\xf8\xad\x51\x2f\x4e\x2f\xaa\x9e\x44\x44\x32\xe2\x9f\xe8\x60\x87\x96\xea\xf0\xbe\xf0\xa5\x57\x0c\xa4\x05\xc4\xee\xec\x51\xc9\x6f\x9c\x89\xa4\xdc\x2a\x37\x61\x16\x0d\xb9\xf9\x63\x21\xee\x00\x4b\x36\x0f\x52\x3c\xbe\xad\x3c\x7a\x06\x6d\xb3\xa7\x39\x11\xa1\xe3\x3d\x08\xea\x65\xc3\x3b\xec\x91\xc4\xb0\x83\xc6\x16\x1c\x27\xa3\xa5\x34\xbe\xae\x25\xe5\xa7\xc4\xc6\x01\x95\x93\x16\x09\x70\xa9\x2f\xb2\xc8\xd9\xec\x8c\xfe\xd5\x00\x87\xb9\x77\x44\x51\x24\xde\x3c\x50\x92\x76\x0e\x8f\xbb\xf9\x7e\x27\x00\x1d\x6f\x7c\x68\xe3\xf5\x51\xcf\x4e\xaf\x7a\x79\x31\x2f\x7f\x70\x0d\x32\x41\x61\xcd\x7c\x71\x66\x96\xc1\xad\xde\xfe\xa4\x5a\xce\x26\xfa\x0c\x4a\x4e\x40\x49\x59\x83\x5d\x56\x12\xd6\x8e\x6d\x8c\xf6\xac\xb9\xfc\x85\x02\x9d\x76\xef\x21\xaa\x18\x5f\x52\x10\x36\xef\x45\x90\x9c\x3e\x7b\x0a\x21\xb9\x51\xb2\x9b\x6b\xa8\x9a\x5d\x06\xcb\x35\x4f\x5a\x16\x96\x55\x65\xbd\xf8\x57\x2c\x8e\xc0\xf9\x0d\x6c\x1a\xb1\x3e\x13\xf5\xea\xfa\x17\x3f\x8e\x63\x35\x05\x93\x19\x12\x6b\x7c\x6e\x11\x81\xf6\x68\x30\x7d\xfd\x86\xd6\x62\xf4\xf8\xb7\x2d\xcc\x02\x21\xed\x6d\xf9\x28\x7c\x5a\xc6\xe6\x1c\x86\x5d\x63\xc1\xbe\x09\x59\xe0\x49\x5b\x9d\x31\x8e\xdc\x03\x47\xdf\x2c\xe3\x19\x2e\xb8\x99\x56\xba\xc5\x65\x59\x29\xa5\x88\xf7\x90\xb7\xbc\xa3\x1d\x95\xc2\xb0\x56\x4c\xa9\xd3\xf7\x25\x2e\xd0\x3b\x61\x86\x2c\x4c\x3c\xf4\x12\xa0\xdf\xd8\x82\x26\xa6\x29\x22\xe1\x23\x9f\x4b\x7e\xb7\x59\xd2\x14\x6e\xae\xf1\x1b\x68\xf9\x6b\x30\x2d\x2f\x43\x6c\x2d\x02\x3d\xc3\xc7\xd7\xca\x01\x9c\x27\xb0\xe7\x5e\xf9\x8c\x53\xcc\xfe\x8a\x4a\x1d\x7d\x3e\xc8\xaf\x04\x86\x7e\x9a\x9b\x20\x7b\x09\xc8\xcd\x07\xa1\x52\x7b\x3b\x39\x41\x6c\x2b\xda\x98\x2e\x92\x83\x90\x2a\x2d\xfd\x15\x68\xfd\x40\x16\xfa\x97\xc0\x41\x50\x38\x9f\xd8\xc2\x16\x11\xce\x2b\xd8\xce\xf2\x6d\x49\xb7\xbd\x4d\x46\x85\x50\x31\x13\xbe\x56\x9a\xbd\xb2\xf0\x72\xd0\x98\x55\x21\x09\xf7\x91\x75\xbf\xed\x2c\xd1\x55\xe7\x17\x72\x27\x79\x7b\x22\x21\xd1\x5e\x6f\x8f\x0e\xfb\x2f\x0e\x63\xc9\xfd\xe4\xd5\x95\xf3\x93\x90\x33\x5b\x1d\x42\xcd\x97\x9d\x5e\x65\x9a\x58\xdc\x32\x31\x3e\x13\x4b\xb5\x74\xd4\x35\x79\xac\x25\x21\x25\x58\x78\x97\x89\xcc\xce\xb6\xb1\x0a\xbe\x15\x66\xc5\x8a\xd4\xe0\x80\x0f\x7e\x90\xc4\x12\x2f\xbb\xb1\x5a\x29\x1f\x3c\xa0\x8b\x5b\x16\xd1\x9d\xf1\x53\xb4\xca\xd0\x1c\x75\x4b\x03\x1d\xce\x65\xc2\xf3\x86\x5e\x09\x9d\x9e\x12\x14\x24\xb7\x62\xf0\xa0\xa4\xab\x68\x9e\xd8\xae\x52\xbe\x66\xc4\x36\x4e\x36\xd7\x96\x5d\xb6\x56\xd2\x80\x05\x0f\xc7\x1a\xb7\xc6\xf5\xfa\x0d\x6f\xe7\xc8\xbd\xed\xf8\x6d\x73\x6a\x1a\xda\x5a\x36\x7a\x6f\x2c\x8c\x35\xe7\x39\xab\x8d\xc4\x1d\x94\x70\x9a\xd8\x02\xb9\x26\x4e\xf4\x5a\x71\xce\x02\x69\x68\x4a\x38\x7a\x18\x1a\xee\x0c\x48\x83\x88\xe7\x4e\xb8\x27\xd8\x5a\x2e\x8e\x07\x07\x40\x3b\xb6\xf4\x28\x72\xc6\x76\x8f\x64\xe5\x2e\x3c\x80\x12\x47\xf0\xaf\x1d\x7d\xb0\xf8\xc9\x02\xf4\x0a\x9c\x62\x88\x76\x98\x7e\x98\x56\x6e\xa3\xf4\x5e\x16\x52\xab\xe6\x9e\x5f\x5e\xa6\x84\xf3\x89\xdc\xc1\x33\xae\x95\x01\x9f\x57\x22\xf4\xd3\xcd\x2d\x77\xad\x5d\xd2\xd1\xea\x79\x60\x99\xfc\x26\x8e\x6f\x70\xa1\xa4\x28\x83\x9e\x51\x24\xa0\x35\x67\x7c\xfe\x61\xf8\x1d\x1f\x7a\xfb\x9f\x6e\x90\x3b\xa8\x01\x5e\x0e\xb4\x3f\xbd\xc0\x1f\x69\xca\xc5\x94\x2e\x80\x5b\x3d\x75\xd0\x17\xb4\x4f\x6f\xf1\x96\x8a\x81\xb7\xe7\x70\xe0\xee\xdc\x5f\xaa\x8b\x7e\xf5\x09\xbf\xa0\x4c\x0e\xc3\x7d\x39\xb7\x73\x4d\xce\x89\x9a\xdd\xcf\x83\x19\x9b\xbc\x55\x43\xee\xb1\x36\x27\x90\x3d\x5e\x4f\x22\xf9\x62\x86\x26\x7b\xdf\x83\x39\x8e\x5e\x61\xec\x11\x48\x87\xa8\x64\x00\x3a\xb0\x5d\xc5\x21\x61\x5b\xcf\x4a\x33\x2e\x31\x8a\xbc\x07\x48\xde\xcb\xc2\x1b\x93\xfb\x0f\x49\x0d\x6f\x33\x06\x3f\xf2\xb6\x04\x86\xb6\xfe\x39\x51\x2f\xec\x8e\xfc\x2c\xde\x8e\x50\xb7\x12\xf0\x9c\xc5\x88\x73\xb0\x72\x7b\xf2\x8c\xeb\x0f\x8c\xbd\x81\x04\x89\x04\x85\x5c\x1b\x23\x42\xcb\x1b\xd4\x08\xb4\xd7\x36\x47\xa3\x48\xa6\x24\x2b\x2b\x98\x45\xb2\x0a\x83\x38\x63\x5a\x69\x76\x41\xe9\xb8\xe8\xce\x94\xe0\x95\x1c\x48\x79\x5b\xe8\x23\xf7\x54\xbe\x65\xce\x73\x63\xf3\xc6\x82\x53\x56\x95\x04\x7e\x3c\x10\xd1\xe3\x2a\xc9\x8b\x79\xc5\xc8\x96\x56\x21\x0c\xc0\x96\x28\xce\x5f\xcc\x54\xc3\xc7\xfb\xa3\x0c\x98\x63\x0e\x34\x3b\xd3\x49\x72\x05\xb8\x02\x59\x7c\xa6\xf0\x7b\x60\xf4\x21\x26\x09\xdf\xf2\x03\xba\x8d\x45\x2c\x87\x65\x49\x12\x93\x22\xf6\xc8\xc1\x16\x62\xaf\x28\x17\x48\xc8\x1d\x2d\xe7\xd3\x16\x08\x4b\xad\xb2\x64\xce\x96\x31\x86\x09\x05\xb1\xb2\x0d\xce\x04\x36\xfb\xe8\x0b\xf9\x1b\x1e\x91\x4e\x19\xa4\xb3\x1a\xd9\x8c\xf2\x14\xff\x0f\x4a\xed\x41\xff\x55\x78\x9a\xce\xa0\x68\xd0\xc3\x38\x7d\x2f\x04\x74\x92\x70\x8f\xa6\xab\xca\xbc\x76\xa4\x8a\x56\x51\x7a\x80\xdf\xc2\x9c\x65\x50\x4f\x73\xd2\x95\xde\xcf\x4e\xd6\xcf\xd3\xc1\xe5\xc5\xf9\xda\x9b\x86\x7f\xaf\xb2\xc2\x71\x81\x0b\x5f\xb5\x96\x03\x84\xd2\xd9\xa9\x6a\x83\x12\x13\xd3\x99\xbf\xfb\x1b\x2e\x60\xe7\x1b\xb9\xa6\x9b\xa4\xd0\x8c\x7b\x8c\x2a\x64\x9f\xf3\x82\xe7\xed\x8b\xbc\x7a\xff\x67\x57\x20\xac\xc3\xf4\xd1\x21\xdd\x35\xfe\x60\x49\x07\x27\x44\x0d\x26\xd3\x86\xbc\x00\x1a\xdb\xbc\x19\xcb\xda\x70\x0a\x0f\xe7\xc6\x6e\xa5\xa2\x9d\x19\x40\x96\x77\x76\x07\xae\x23\xad\x7a\x16\x10\x19\xc4\xdd\x36\xe7\x44\x6a\x59\x83\x1d\xcf\x4f\x46\x68\x12\x59\x35\x14\x9a\x1b\xb2\x19\x79\x68\xe7\x61\xcf\x19\x88\xf2\xc6\x44\x20\x36\xd9\xa7\x57\xc4\x96\xb1\xd3\x59\xe1\x1b\x5f\x26\x6f\x6b\xc8\x7d\x79\xdb\xfd\x91\x8d\x72\xc0\xd0\xb8\xa3\xc8\x78\xf8\xf7\xe3\x2f\xfe\x85\xf0\x23\x0c\xf2\x07\x38\x5b\x39\x6d\x0f\x4e\x80\xd2\x46\xae\xb0\xe3\xa1\x89\x1c\xaf\x03\x0e\x85\x42\xeb\x76\x68\x25\x23\xe6\x13\x55\x76\xff\x2d\xa3\x25\x64\x66\x09\x3d\x5c\x5a\xf2\xf1\xfb\x82\x4c\xdc\xe2\x5f\x5c\xbd\xb7\xf7\x67\xe4\x42\x36\xa3\x96\xf0\xf0\x1a\xff\x9c\xef\xce\x97\x00\xaf\x36\x39\xae\x26\xef\x09\xce\x0f\xb9\xf6\xd1\xfa\x21\xec\xe7\x1c\x50\x95\x38\xa7\xfc\x00\xef\x7e\x3a\x85\x43\xdc\x35\x69\xc8\xac\x2d\x69\xf4\x60\xb0\x48\xf9\xd3\xd0\x46\x73\x3c\x95\x77\x63\xbe\x55\xc5\x21\xa1\x84\x93\x10\x89\x99\xf5\x1d\xa6\x0f\x10\x36\xab\x2c\x7d\x06\xc3\x45\xb9\x02\xcc\x66\x99\x33\x3f\xfb\x0a\x26\x88\xa6\xc5\x48\xca\x03\x3a\xd4\x15\x8d\xb2\x87\x6f\x6e\x4b\x01\x96\xbd\x47\xac\xa9\x05\x54\xf0\xb2\x99\xd1\x4c\x85\x5d\x61\xcc\xd1\xe4\x3a\xd8\x9b\xcc\xf4\xef\xb2\xff\x16\x9b\x06\x92\x7d\x64\x39\x16\x42\x5f\x0a\xd8\x9a\xc5\x4c\x60\x11\x4e\x1f\xfe\xcf\xa8\x7c\x19\x0f\x17\xac\xbf\x66\xc1\xd8\x6f\xbc\x50\x83\x30\xd4\x7e\xbc\xb8\x96\xd1\x9d\xd7\x65\x1b\x21\x8f\xcc\xb4\xd8\x5a\x86\x82\x5d\xf9\x0a\x54\xa7\x46\x96\xf7\xcf\xf8\x26\x8b\x44\x33\x30\x84\x9a\x7d\xc2\xb2\x06\xee\x9e\xdc\xe0\xb6\x04\x46\xbd\xba\x49\x91\x75\xa5\x99\x2c\xe4\x0f\xf0\x2d\x6d\x63\x75\xc3\x43\xb6\x84\x2c\x7b\x44\xe6\x8f\xb8\xd7\x9a\x66\x48\x90\x91\xe4\x71\xc3\x46\x5b\x31\x99\x5c\x24\x2d\xfa\xf2\xaf\xd3\x75\x79\x05\x6d\x10\xfa\x9e\x39\x38\x54\xb7\x7c\xbc\xdd\x06\x21\xb8\xd8\x17\x1e\x83\x5b\x23\x30\x64\x8b\x4f\xf2\x06\x4a\x20\x61\xe8\x15\x7e\x0b\xeb\xb3\x4e\x74\x55\x4b\x0b\x06\x76\x20\x51\x5c\x92\x64\xfa\x2b\xd9\x22\x27\x5c\x94\x81\xe4\xb0\xea\xf0\x1e\x40\xa7\x93\xa0\xa6\x18\x6d\xfd\xac\xa9\x0b\xb4\xaa\x4b\xe3\x1c\x42\x3a\x84\x9b\xe7\xd2\x6f\xb0\x9b\xc3\xd7\x8e\x77\x25\xc8\x22\xd8\x8a\xe6\x95\x16\x72\xaf\x27\x60\x6c\x93\x86\xd0\xa0\x7e\xc6\xaa\x7c\x30\x5d\x97\xc8\x9a\xfa\x12\x47\x29\xe6\x76\x4a\x9b\x3a\x8b\x93\xd9\x33\xa9\x31\x5a\xf8\x64\xbf\x1b\x2c\x6f\xe9\x0d\xe4\x17\xb9\x8d\x12\x18\x03\x86\x77\xb6\x43\x42\x62\x6d\x0c\x76\x7b\x91\xff\x3a\xe3\x8e\x51\x7d\x42\x8c\x62\x27\xce\xd8\xcb\x33\xf2\xc6\xd3\x7e\xd1\x10\xa9\x07\xb3\xc7\x37\x57\x5a\x6d\xb7\x7c\xd5\x33\xb3\x8b\xb6\x15\x67\xc0\xea\xee\x23\xe6\x60\xa1\x9b\x2c\xee\xec\xb5\x48\xb9\xfc\xde\x96\x6e\xf1\x04\x8b\x16\x4c\xbc\xfe\xd3\xbe\x18\x8f\x00\x34\xe1\xbd\xd4\x4c\x19\x20\x90\xae\xc6\x83\x3e\x7b\xaf\xff\x1c\x26\x23\x9a\x1a\x3c\x96\x58\x3e\xd7\xbb\x07\x9f\x74\x1b\x95\x9c\xa7\xa9\x54\x4f\xba\xd8\x7f\xa0\xd4\xc9\xd1\x9f\x08\x26\xb0\x15\xf8\x18\x6d\xad\xac\x06\x99\x4a\x82\x61\x13\xc1\x5e\x7f\x4b\x15\xb7\x78\x01\x0f\xeb\x2a\xc9\x5d\xbc\xd2\xbd\x07\x54\x1f\x58\x73\x89\x60\x3d\x07\x64\x23\x77\xf5\xd8\xdf\xe0\xff\x07\x4a\x55\xf2\x33\x50\x84\x20\xa4\x9d\x03\x5b\x7a\x67\x39\xca\x97\x9d\x88\x13\xf5\xdb\xb7\x6c\x27\x5c\x58\x6c\x13\xab\x92\x6c\xfe\xfc\xd1\xc2\xa5\x77\x0a\x28\x55\xe7\x41\x01\x05\xbb\x82\x4e\x6b\x3d\x5c\xcf\x41\x93\x10\x67\x0d\x19\x5b\xdc\x9d\x14\xb0\xa7\x5a\x9e\x78\x7b\xb4\x71\x74\x27\x7d\x9d\xdd\xf9\x67\xf1\x6d\xa6\x2e\x27\xad\x3a\x6b\xa7\x84\x1d\x37\x76\x43\x9c\x4e\x0d\x8c\x23\xf7\x67\x02\xc5\x8e\x2a\x44\x75\xd5\x3a\xd2\x2d\x7d\xbc\x6a\x27\xed\x7d\xde\xf0\x54\x47\x98\xec\x87\x8c\xfd\x8e\xeb\x53\x09\x04\xcb\x76\x8e\x44\x29\xac\xcc\xde\xf9\x0c\x6d\x76\x59\x82\xb2\x79\x73\x8a\x98\x66\x43\xd0\xb7\x32\x84\x4a\x4e\x9c\x10\x74\xa6\xae\x88\xa4\x7e\x63\x22\x9a\x61\xcc\xa1\x4c\x5f\xe1\x7a\x12\x92\xf4\x18\xe5\xc8\x36\xdf\xdb\x57\x36\xea\xaa\xcc\x5f\xcd\x8b\xb7\x73\xcf\x3c\x5c\xda\xdd\x94\xc9\x51\xf9\x0d\xf5\xdb\x6e\x30\x7c\x9e\xd6\xae\xc8\x16\xb8\xd0\xfc\x3c\x1a\x23\x66\xed\x87\xc6\x99\xb4\x45\x68\x17\x43\x3c\x5b\xf8\x81\xb0\x62\xdf\xbc\x63\xc6\xdc\xb1\x5e\x2f\xd6\xc9\x66\x4d\x38\x02\x83\xf7\xc6\x12\xd7\x44\x2c\xa3\x5d\x68\x5b\x94\xc8\x99\x45\x5e\xbd\x86\xd6\x1d\x7c\x21\xf6\xd0\x8e\x31\xc2\xd0\x12\x85\x90\xdb\x5b\x9c\x6e\xab\x46\x8f\xb1\x53\xea\x76\xb4\xfc\x1a\x6d\x1e\xc8\x6e\x0a\x9d\x12\x72\x92\xac\x29\x93\xfb\xcb\x10\x4d\x8c\xcb\xc2\x54\xe2\x62\x4c\x1b\x07\x59\xce\x58\x96\xf6\x5f\xbf\x92\xe2\xd0\xc9\x12\x19\x60\xb3\xda\x39\xef\xe3\x91\xb8\x35\xa7\xd8\x71\xe6\x76\x77\x7c\xef\xb0\x8f\xc5\xd2\xde\x2a\x3a\x29\x03\xce\x11\xf2\xbe\x13\x1d\x24\x35\x08\x56\x32\x17\x25\x79\x59\x11\x2d\x7e\xfb\x2b\x79\xbf\xf4\xc1\xd9\x6b\x15\xdd\xd9\x6a\xd3\x54\x4c\x53\xf4\xac\xdd\x1b\xb3\x9c\x2a\x15\x14\xb2\xb3\xc4\x04\x74\xae\x5e\x27\x24\x3f\x6f\xbc\xba\x3c\xdc\xb2\x56\xf4\xe0\xa8\x4b\x61\x8d\x55\x0c\xab\x73\x80\x8b\x99\x72\x75\xd6\x7a\x48\x11\xdc\xe0\x18\x4b\xf2\x82\x4b\xb1\xb1\x19\xe7\xa3\x84\x1c\x33\x1c\x1a\xc4\xb3\x8a\x4e\x0b\x98\xc5\x9a\x32\x78\xc9\x86\x2a\x6f\x7d\x59\x9d\x48\xdf\x86\x56\x86\x16\x19\x60\xbc\x66\xd2\x52\x08\x33\xbd\x3d\x81\xb9\xd8\xfe\xaa\x26\x61\x08\x62\xac\x9e\x03\x1c\x5e\x9f\x8f\xa0\xc5\xbc\xd5\xb2\xe3\xca\x66\xf2\xc3\x14\x2a\xd5\x15\xff\x41\x58\x94\xe9\xe0\x16\x67\x24\xc9\xee\x1a\xfe\xd3\xc9\x09\xa9\x31\x50\x3e\x84\xa5\x58\x24\x0f\x70\x75\xee\xcf\xf5\xba\xd0\x96\xc3\xbf\x53\xbc\x63\x53\x9e\x15\x7c\x6b\xf4\xa6\xe4\x03\xd8\xa5\x49\x05\x21\x99\x56\x7f\x1f\xf0\x10\x1e\x3a\xc8\x61\x65\x91\xd2\x98\xc7\x56\xaf\x33\x3f\x4c\x1d\xa2\xe8\xe1\x4d\xf3\x60\x5e\x92\x0a\x4a\x3a\x1c\xe8\xff\x32\xc4\x3c\x1b\x00\x27\x97\xe4\xcb\x60\x5b\x3a\xa9\x3e\x8d\xe2\x43\x92\xeb\xb0\x90\x10\x2a\x8e\xff\x93\xf5\x68\x09\x08\x36\x0f\x46\x25\xa0\x95\x4d\xb4\x74\x17\xe5\x50\xbf\x8f\xde\x5e\xba\x39\x6b\xb9\x7a\x77\xbe\xd2\xfa\x3c\x41\xd2\xe5\x9d\x19\xde\xfe\xa1\xa8\x9a\x3d\x12\xc7\x85\x73\x27\xe6\xdb\x70\x01\xe3\x84\x93\x49\x2e\xd9\xaa\x83\xf4\x3b\x0f\xa5\x98\xb8\x9b\xc5\xac\xc8\x26\xcb\x1b\x57\x45\x99\x5c\xe6\x44\x1f\xa8\x40\xb0\x32\x1c\x16\x9a\x83\x77\x71\x4b\xd7\x89\x0d\x9b\x62\x14\xa3\x14\xb9\xa9\xc6\x57\xfb\xe1\xe3\x28\x6a\x30\xfa\x1a\xec\xea\xe9\xc5\x42\x80\xa9\x95\x3d\x3c\xf9\x83\x8d\x55\xcd\xd7\xfc\x64\x7f\xaf\x0a\x03\x10\x14\xa9\x86\xca\x5b\x3d\xb4\x72\x4d\xfe\xeb\xda\x59\x01\x78\x59\x0d\x0b\x5e\x08\xb6\xc8\x5e\xfd\xc7\x41\x27\xd3\x99\xe1\x0e\xa5\x0c\x44\xea\x61\xfe\xbe\x5d\x86\x15\xa5\xdd\x3b\x28\x49\xf3\x30\x3e\x2d\xb3\x7c\xcd\x70\xd0\x20\xf6\x79\xb1\x2d\xd0\xb2\xd7\x85\x95\x92\x77\x20\xfe\x2f\x21\x45\x03\x9f\x4e\xbc\x0c\xab\xb8\x92\xf5\x07\x76\x2b\x51\x90\xe2\xed\xc9\x69\x93\x34\x05\xed\x40\x65\xb4\x32\x6a\xb5\x1d\x2d\xf0\x97\xe7\x81\x9a\x20\x91\x6e\x81\x3e\x24\x25\x13\x06\x14\x67\xad\xbb\x92\x8d\x3d\xbd\x01\xf6\xa2\x4b\x54\x39\x0e\x74\x73\x1e\xe9\xd7\x9e\xea\x43\xbe\x99\x83\x34\x73\xf0\xce\x04\x67\xf2\x24\x09\x57\xf4\xab\xd4\x42\x98\x11\x3e\x29\x05\x66\x2a\x5c\xbc\x8d\xca\x49\xb7\x5b\xdd\x97\xe6\x19\x3b\x59\xa3\x07\x4a\x74\x2d\x01\x05\xde\x7a\x76\x0b\x46\x77\x61\xb4\x72\xe2\x44\x2f\x4d\xf0\xe1\xb1\x09\x15\x49\xb8\x30\xfa\x25\x2d\x1f\xc3\x65\x77\x0a\x43\xd9\x63\x34\xf9\x0a\xf9\xfe\x44\x8c\x98\xc1\xf6\x53\xfd\xb5\x9b\xbd\x49\x97\x9b\x37\x3b\x29\x2a\xeb\xb7\x5c\x8b\xc7\xee\x52\x0c\xc1\xb7\x45\xd8\x8b\xea\x65\xec\xe9\x01\x36\x31\xf0\x50\xf1\x21\x33\x18\x12\x72\x81\x0d\xb2\xad\x68\xf4\x91\x33\x16\x99\x22\x3b\x43\x3e\x7b\xac\x66\x14\x1e\x0b\x32\xbc\x17\x69\xe2\xce\xbf\x3f\x1e\x43\x7b\xc5\x42\xc5\x04\xa6\x07\x61\x2f\x5e\x75\x75\x38\x8d\xa1\x08\x3b\x6b\x00\x49\xdc\x9b\xf8\x17\xd6\xb6\x95\x4f\x6e\x5b\xb0\xba\x83\x90\x5b\x80\x7c\xfe\xea\x95\x3a\x00\xef\xcd\x75\xa1\x0e\x54\xad\x71\xee\x29\x28\xa2\x36\x1f\x5f\x82\xac\xac\x3f\x16\xc9\x86\x82\x10\x1c\xc4\x11\x82\xa6\x18\xce\x2e\x73\x85\xa2\x9b\x7d\xf6\x83\x92\xa0\x4d\xb5\x4d\x5c\xcc\xe7\x31\x78\x56\xa4\xfb\x59\x59\xcc\x1b\x25\x2a\xcb\x4c\xcf\x32\x52\x75\x00\x80\x12\xff\x49\x82\xe2\xa1\xed\xb9\xe2\xb3\x65\x25\xec\xc1\x9e\x5e\x2f\xf0\x0e\x05\x6d\x6a\x65\x29\x57\xcb\x0f\x6b\x00\x75\xf1\x3d\x64\x66\x7c\x87\x15\xa5\x60\x8f\xa7\xd7\x74\x20\xe1\x00\xf9\x2d\x74\x13\x92\x9b\x6f\xe1\x80\xed\x72\xa7\x92\x99\x04\x4b\x08\x1c\x7a\xbd\xe1\x2d\x5b\xcc\x38\x1a\x21\x11\x57\x15\x53\x7a\xb3\xf1\xa9\xe7\x2b\xb2\xb7\x6b\x2e\x23\xea\x50\x9b\xe5\x07\x1a\x00\x1d\x1e\x71\xf7\xd3\x41\xb6\xd1\x1d\x2d\x77\xd2\xe6\xbb\x87\xf5\xfd\xe2\x59\xb1\xf2\x3c\x1a\xb0\x0e\x3b\xda\x6c\x36\x0f\x67\xd7\x70\x50\x9f\xa9\x37\x92\x4a\xeb\x45\xcf\xe4\x2d\x28\xaa\x29\x40\x6f\x02\x6c\x20\x61\x69\xad\x55\xf3\xab\x6b\x15\xce\x5f\x36\xcb\x51\x0c\xaa\x16\x67\xbf\xc9\x9f\x28\x5f\xd6\xf2\xf9\x21\xd0\x7c\xcf\xde\x96\x33\x18\x8a\x47\x71\x77\x4e\x15\x00\x94\x80\x00\x2e\xfd\xbd\x7f\x1f\x8b\xa5\xea\xe8\xbd\xba\xbc\x5a\x70\x8d\xea\xb1\x2e\x0b\x9e\x6d\xfa\x2a\x82\x21\x30\xc9\xdf\xde\x84\xd0\xf4\xb4\xd9\x53\xca\xf9\x67\x00\xdf\x12\xcb\xb6\xc5\xfb\x4a\xac\x8f\x8d\xe5\x9c\xda\x57\x1a\xb9\xc9\x99\xbd\x1d\x60\x4b\x37\x3c\x1c\x34\x4b\x1c\xe7\xd5\x8b\x38\x9d\x72\x33\x46\xfc\x44\x5f\xf1\x33\x7a\x41\x67\xd6\x0d\xa1\x9b\x57\x6a\xd7\x11\x00\xfd\x89\x59\x59\x08\xfd\x6d\x49\x02\x5b\x3b\xdc\x4f\x0f\x93\xf4\xd4\x3c\x40\xd0\x23\x90\x71\x2b\xd1\xa9\x9b\x75\x31\x94\x1a\x50\x9a\x25\xf4\x20\xc7\x4a\xc6\xeb\xe2\x37\x15\xb6\x5d\x2c\x55\x23\x32\xe2\x52\x49\x68\xfc\x63\x30\x04\x2e\xab\x2c\xd1\x2f\xe4\xa8\xda\x9d\xac\x05\x5a\x7b\x3f\x50\x7f\x33\x4a\x3b\x62\x02\x6e\xed\x4a\xf0\x28\x0e\xde\x70\x66\x1f\xeb\x57\x0d\xc5\x3b\x97\xcb\xe4\x12\x69\x49\xc1\xae\xc6\x7b\xcf\x70\x60\xd6\xc6\xc8\xb3\x03\x51\xec\x15\x06\xff\xa2\xef\x1a\x0a\x95\x4e\x08\x8b\xaf\x41\xa4\x77\x24\x6d\x0b\x97\x31\x87\x83\xb3\x5d\x25\x7f\x17\x6f\xf8\xcf\xbb\x2d\xe5\x6e\xda\x17\xb3\x22\x22\x09\xbd\x00\xe2\x4e\xfe\x3d\xd8\x91\x80\x73\xee\x3a\x20\x5b\x01\x06\xf9\x86\xea\x5d\xf3\x36\xb7\x16\x4e\xb6\x58\xee\xee\x6b\x57\xf7\x91\xf2\x7d\x47\x95\xc5\x5a\x64\x04\x43\x13\x0d\x8a\xd3\xdf\xad\x54\x64\x1f\x36\xfa\x30\xd5\x3a\x78\x19\x9d\x21\xd0\x8b\x84\x4a\xe7\x38\x99\x53\x61\x04\x00\xbe\x80\x76\x37\xc3\x88\xcb\x9a\x77\xe3\x74\x6a\xdd\xf2\x6f\xd1\x33\x90\xa3\xbb\xf6\xf0\xc3\x74\x00\x1b\xa9\x6a\x28\x2c\xa8\x42\xd7\xc2\x79\xc9\xb1\xfe\x16\x2b\x30\x29\xa2\x0d\x8d\xcd\xb6\x79\x33\xe1\x9b\x2e\x6a\x04\xd7\x72\xc4\x00\x83\x66\x9c\x0a\xb4\x2e\x3b\xb3\x8d\x45\x86\xa9\xe0\x86\xb8\x5b\x92\x98\x9a\x01\xf0\x9d\xaa\xb3\x04\x4d\x93\x0b\xd5\xf5\x82\xcf\x3d\x8b\xb7\xdb\x9d\x08\x01\xa3\x27\xeb\xba\xcc\x80\x01\x87\x90\x8e\xc7\x04\xef\x33\xb3\x59\x5e\xdc\x3e\x52\x43\x2d\x76\x19\xda\x84\x20\xab\x64\xca\x69\x32\x45\x82\x2f\xef\x1e\x24\x3c\x9f\xd8\x2d\x49\x48\x47\xb5\x75\xd2\xea\xe2\x03\x2d\x60\x5d\xf1\x0d\x3b\x0e\xb0\x33\x92\x05\xb2\x0f\xef\x7a\xc6\xc4\x20\x50\x83\x10\x06\xa8\x91\xa4\x25\x57\x2a\xdd\xa2\x6b\x46\xad\xd6\xfd\x4e\x28\xa8\x1c\x9e\x89\x6c\xaa\x6a\xe7\x09\x68\x26\xea\xf3\xa2\xca\xa4\x3e\xd3\xf4\xb5\x51\x88\x38\xbb\xb9\x57\x5c\x6f\x4b\x07\x8b\x97\x55\x25\xe0\xa2\xfc\xc7\xed\xff\x8d\x66\x08\x24\x1f\xf5\x9f\x69\xb7\xb7\x05\x69\xa7\xa6\x9b\xdc\x62\xa2\x17\x38\x10\x95\xd6\x6c\xf3\x08\xca\x24\x4a\x24\x56\x80\xf6\x1d\xb4\x06\xb4\x01\xca\xfb\x9f\xb6\x04\xf4\x72\xed\x5f\xfa\xb3\x7c\x5a\xfb\xdc\x1b\xc2\xd0\x15\x48\xbc\x1c\x5b\xe7\x67\x68\x7b\x79\x75\x80\x6b\x27\xd7\x65\x80\xd9\xbd\x26\x43\x6d\x6b\xfa\xfd\x85\x5a\x1a\x35\x3c\xbf\x44\xea\x75\x84\x51\x0a\x8d\x90\xf4\xa4\x38\x33\x02\x15\x12\x14\xd6\x4e\xa4\x72\x4d\xf2\xf6\xa8\x31\x5a\x23\xf1\x68\xa0\x5c\x9c\x3e\x4b\x1a\xe9\x25\x77\x0d\xbc\x26\xad\x2f\x11\x25\x27\x0d\x8a\x22\x66\x96\xf8\xd7\xa8\xa5\x8a\x07\x42\x09\x42\x8f\x9e\x5d\x56\x7f\x48\x57\x1e\x04\xe2\x44\xae\x82\xd7\xb8\x0d\x36\xf8\xc0\x2a\xca\x5a\xce\x11\x8c\x1b\x12\x35\x8f\x81\x44\x47\x92\x86\x4c\x06\xce\x42\x5c\xcf\xd5\xad\x62\xc1\x00\x7f\xec\xa4\x92\x71\xb0\xd3\x65\xfc\x8d\x63\x3e\x3b\xf1\xb8\x53\xfe\x08\xbe\xb5\x6a\x8a\x85\x7d\xae\x8b\xcb\xdb\x31\x75\xd5\x36\xca\xea\x0a\x92\x0a\x76\xa0\xf8\x04\xfa\x9d\x69\x84\x51\xe0\x66\x75\x05\x46\x55\xa7\xc4\xe2\x2b\x8b\x08\xf7\x5f\x2c\x42\x7f\x50\x23\xd5\xd2\x8f\xbd\x46\xb3\x6d\x3f\xc4\x7e\xd0\x2b\xc4\xa3\x1c\xc7\x2b\xa8\x75\xa4\xcb\x3d\x59\x5e\x2e\x68\x9b\x6b\x1a\xfa\xd9\xb9\xae\x41\x33\x7f\xd2\x0b\x6e\x87\xc6\x0e\x38\x5f\xb9\xea\x41\x2d\x91\x6a\x21\x46\xb3\x49\xda\xde\x88\x0f\xd5\xf6\x5b\x5b\xd0\x27\x41\xa1\xd9\xf1\x75\x40\xa8\x0c\x39\xa8\x13\x60\x25\x79\xa0\xae\xfd\x9c\xb1\x70\x9a\xab\x77\xa4\x5c\x5f\x79\xbc\xa0\x14\x01\xdb\x3a\x92\x65\x74\x03\x5a\xa9\xd6\xc5\x65\x3a\x7a\xe4\x14\xb0\x02\xf8\x2c\x60\x4f\x68\x76\x05\x57\x32\xef\x5e\x27\x88\x5e\x1d\x38\xe8\xc1\xc4\x91\x02\x8f\xc7\xa7\x69\xce\x27\x11\x00\x68\xcc\xaa\x51\x61\x7f\x51\x92\xa6\x3a\x0a\x74\xe3\xee\xbe\x3f\xd9\x3c\x9f\x84\x75\xd4\x9f\xba\xba\xd7\x3f\xcc\x0b\xd9\xc5\xec\x6e\xa4\x26\x63\xc2\xe1\x6e\x58\x9f\x8f\xea\x5c\x21\x33\x20\xd4\x10\xea\x3f\x7b\x5c\xd8\x25\xed\x34\xe9\xe4\xc7\x33\x20\x9d\xbe\x4d\xf7\x72\xf2\x0f\x0b\xf6\x8d\x6e\xf1\x00\x53\x89\xbe\x1b\x77\xa1\x53\x09\x33\x7a\xc5\xe7\x9d\x66\xe7\x42\x65\x17\x0e\xbd\xb7\x34\x95\x85\xde\xe5\x24\x46\x9c\xc3\xab\x40\x65\x02\x19\xe5\xcc\xb4\xdd\x4a\x1a\x8d\xcd\x42\x48\x79\x02\xc4\x01\xb2\x15\x36\x2c\x43\xda\xe5\xb1\xcd\x70\x1d\x44\xb6\xb7\xea\x61\x96\x24\xdb\xa8\x4b\xbf\x63\x3c\x90\x91\x6f\xe8\xe5\xf1\xb4\xa9\x19\x52\xe9\xb7\x4c\x15\xfc\x6c\x8b\xa1\x27\x17\xeb\x82\xda\x9e\xe2\x61\xbf\x7f\x1f\x5e\x7f\x33\x88\x83\x5e\xd0\xb1\xab\x27\x96\x42\x3a\x31\x79\xeb\x85\x78\x0b\x42\x6e\x3f\x85\xaa\x35\x5a\x59\xb2\xa3\xb8\x7b\x5c\xaf\x64\x5b\x70\x13\xd0\x78\x8f\x9a\x94\x37\xe5\x04\x05\xe4\x58\x4e\xbd\x02\x94\xb1\x23\xfd\xef\x51\x7e\x92\xc5\x0d\x7b\x97\xbc\xdc\x29\x34\xf2\x6f\xe6\x24\xa2\x64\x5b\x87\x37\xdd\xda\x55\xf8\xe0\x12\x3a\x51\x56\x8f\x9f\x27\xd2\x2f\x0d\x59\x1f\x0a\xe3\x0d\xf9\x12\x79\xd3\xf5\x42\xec\x74\x6b\x78\x85\xb8\xbb\x76\x84\x07\x6d\x08\xbb\x0d\x29\xae\xea\x7b\xad\xf8\x15\x14\xbe\xb6\xd1\x6c\x6e\x5d\x18\xa3\x8c\x2c\xf1\xac\x9e\x13\x0b\x24\xdb\x03\xc4\xa9\x88\xd2\x97\x17\x83\xc8\x63\xc9\x65\x6d\x0b\xcb\x54\x1a\x0f\xc2\x92\xa4\x35\xf2\xef\xb2\x6f\x4b\x23\xa7\x27\xc5\xbe\x34\xe4\xfa\xa2\xef\x92\x44\xef\xbc\x64\xde\xd6\xc9\x16\xf3\xf7\x30\xe4\x5f\xd9\x39\x0a\x93\x4f\x09\x6b\x60\x6a\x19\x0f\x00\x5a\xe6\x6d\xe3\xa5\xe4\xaf\x05\xe7\x31\x29\xa1\xc9\x3a\x90\xaf\x5a\xd4\x97\xa0\x13\xed\xf4\x80\xb5\xb5\xc4\xf0\xe1\x04\x43\xe8\x08\x89\x55\x0f\xca\xea\xaa\x2a\x4a\xc1\x06\x9d\x05\x3d\xbd\x53\x60\x8c\xaf\x4d\x0a\x66\xbc\x5d\xdd\x7b\x2e\x68\x7b\x36\x1a\xbc\x9d\x92\xe4\x4a\x43\x33\x41\x10\xc9\xd8\x89\xa9\xdf\xc1\x43\x74\x48\xfd\x3e\x00\x9c\x21\x84\x5d\xbb\xeb\x05\x4b\x44\x16\x67\xe7\xc5\xb4\xa1\xc3\x39\x17\xba\xbc\x29\x7c\x49\xc8\x68\xdd\x79\x4b\xf0\xfb\x1e\x04\x3e\xcd\x4c\x2c\x2b\xb7\x8a\x07\x16\x71\x08\xf6\x06\x23\x03\xa8\xda\x1c\x2a\x44\xfd\xf0\x4d\x11\xdb\xfc\x19\x02\x15\x86\x61\xaa\xa8\xb9\xb7\xab\x7d\x65\x3d\x72\xf4\xe8\x46\x79\x01\xf0\x43\x2b\xe1\x83\xe7\x90\xef\x9c\x3c\x90\x97\x98\xe1\x2f\x24\xf0\xa7\xf3\x8e\x9d\x29\x57\x90\x70\x6b\x22\x20\xae\x9d\xca\xf6\x12\xe7\x6a\xd5\x9b\x0d\x5d\x05\x5e\xe8\x3d\xac\xe2\xfd\x60\x7b\x93\x2d\x46\x64\x7b\x31\x1e\x49\x52\x39\xb6\x9f\xdf\x24\x0c\xbf\x83\xa7\xf1\x97\x07\xfb\xe5\xa6\x14\xe3\xe1\x44\xfa\x83\xfb\x22\x97\xbd\x83\x61\x13\x4f\xa7\x74\xf0\x88\x2e\x29\x80\x31\x7b\x24\x34\xc3\xc8\x46\xf7\x8b\xe8\x6f\x21\x24\xe1\x0a\x36\xb0\x9e\xd6\xed\xe1\x8c\xa8\xb9\x8e\x06\x09\x79\x78\xab\xc4\xb2\x07\x08\x58\xf2\x4d\x37\x7f\x23\x2f\x68\x89\xe0\x1d\xe0\x1a\xcd\xc8\x1a\xff\x4b\x66\xb5\x96\x7a\xb1\x4d\xf3\x68\xf6\x9d\xc3\x33\x02\xb4\xcf\x8d\x3d\x4f\x2a\x1f\xc1\x48\x06\x42\xd5\x6a\x83\x32\x13\x64\x05\x9b\xd7\xd1\xb2\x96\x9c\xc3\x20\xef\xf0\x0d\xd3\x49\xd0\x70\xb2\x7f\xbb\xa1\xc1\x60\x81\x24\x39\x6a\x62\xd9\x9f\x15\xcd\x38\x6d\x28\xd0\xab\x06\x65\xc2\x42\x9c\x8e\x5e\x6a\xc9\x8c\xf6\x90\x50\xfe\xb3\x30\xc5\x75\xbf\xaa\x25\x22\xf2\x99\x6b\xf8\xc2\x37\x64\x3d\x07\x04\x4f\xc1\x45\xf1\xed\x38\x19\xf0\x42\x95\xd2\x76\x80\xb6\xd9\x32\xe5\x10\x54\x6c\x94\xfd\xb8\x38\xbe\x1d\xa2\x64\xdf\x40\x44\x8b\x9b\x16\xb6\x74\xaf\x9f\xb8\xf2\xdb\x97\x3a\x89\xc7\x50\x0a\xd6\xc3\xb2\x2e\x7a\xa6\x50\x71\xdc\x54\x61\x3d\x2d\x83\xfb\x3e\x0e\xf9\xe6\x15\x1b\xf1\x8f\x20\x97\x02\xfa\x38\x25\x20\x62\x59\x21\x7b\xa7\x9b\xe9\x41\xa1\x0e\x21\xa1\x15\x4a\x4e\x12\xc5\x82\x86\xfb\x53\x26\x40\x1e\x15\xac\x31\xb1\xab\x68\x30\x63\x23\x9d\xb5\x44\x23\x6a\xf6\xcc\xea\x2a\x0b\xba\x12\x0d\x19\x46\xa9\xb5\xa3\xb4\x74\x01\x88\x92\x9c\xc8\x23\xca\x2c\x8e\xf2\x54\x1d\x2c\x03\x62\xf7\xa0\xfd\x6d\xf6\xa5\xc4\x2d\xc1\x75\x4c\xbb\x13\xf2\x3c\x38\x0d\xca\x63\xb5\xfd\x4f\x2e\x0e\x96\x31\x78\x42\x25\x52\x8a\x35\xa2\xef\xf2\xaa\x16\x0c\x1a\x61\x93\x6d\x31\xb2\xc7\xcd\x8e\xfd\xf3\xdb\x09\x32\x4b\xda\xe3\x7a\xa1\xa0\x56\x62\xf1\xd0\x27\x0c\xa8\x04\xac\x0c\x89\x4f\xc8\xbc\xbf\x3a\x8a\xc9\x72\x01\xe1\xfe\x67\x6f\x51\x9a\xc3\xc4\x3a\x77\x67\x53\x4c\xa3\xd0\x18\x34\xfb\x87\x2b\xf2\xee\xc0\xf3\x03\x8b\xe0\x3e\x63\x3f\xbb\xf3\xb5\x94\x0f\x83\x27\x5f\xcb\x93\x5b\x2b\x9f\x5d\x2c\x2f\x98\x32\x9a\xed\x40\x94\x26\xe3\xe3\x64\xc2\xde\xa8\x7d\x57\xd1\x5c\xa8\xd4\x1c\x4e\xef\x80\x18\xf8\x6e\x2d\x32\xe8\x04\x29\xf5\x05\x4d\x07\x71\xbb\x24\x01\x3c\x25\xe2\xf5\x52\xba\xdf\xbf\xb9\xe7\xaf\xd6\xbf\xdc\x64\xef\x89\xd7\x49\x01\x10\xc5\x35\x15\x86\xc4\x83\xea\x47\x31\xfc\xf9\x2d\x89\x12\x6c\x47\x4c\x42\x6a\x50\xfa\xc1\x50\x24\x04\x87\x42\xc7\x9d\x7e\xb4\xaa\x13\x78\x72\x1f\xd4\xcb\x74\x6d\x63\x79\x61\x72\x39\x0f\x2a\x56\x30\xee\xdf\x0a\x04\x5a\x96\xa8\x81\x21\x3a\x5e\x92\x37\x07\xb3\x6a\x0c\xd2\x15\x75\xcc\xc9\xbf\x72\xd9\xf2\x3b\xf9\x84\x8c\x26\x41\x17\xca\xf8\x62\x28\x33\x59\x6e\xf4\x2c\x97\x02\xff\xf6\xe1\xe7\x8c\xda\x4b\x4e\x16\x0b\xab\x88\xa7\xb3\x56\x3c\xdf\x9d\xb5\x27\xd2\xd6\x4f\x3b\x4a\x07\xec\xea\xe5\xbd\xa9\x00\x92\x52\xe7\x19\x56\x2f\x6e\xea\x63\xfd\x82\xd2\x66\xa4\xbb\x41\xbf\x8b\xae\xba\x28\xfe\x06\x04\x93\xe6\xd5\x22\xa3\xdc\xc1\xf4\x98\x36\xdb\x03\x64\x30\x6f\x41\x5d\x57\x32\xb9\xa2\xab\x92\x2d\xf1\x05\x57\x66\xbc\x56\x36\x48\x98\xd5\x0d\x33\x84\x70\x5b\x61\xaf\x63\x1b\x7c\x0b\x45\x14\xd9\xeb\xa0\xab\xe1\x05\xf0\xa2\x61\x21\x97\xe7\x27\xc0\x22\x40\xa3\x2c\x7d\x7d\x90\x1f\x7d\x17\x46\x21\x56\xde\x7d\xe2\xcb\x14\x2e\x0d\x30\xec\x5d\x35\x9a\xe1\x6b\x25\x04\x85\xde\x48\x65\xba\xe5\x0f\x30\xb3\x48\xd8\x4f\xa4\x4b\xb3\xba\xa1\x11\x15\x6b\x9c\x69\x5d\x08\x2a\x6b\x23\x0a\x40\x99\x70\xb1\x11\x63\xdc\x45\xc9\x47\x40\x8a\x93\x80\x2e\x11\xc4\x8a\x2c\x4b\xca\x9d\xb7\x8f\xd5\xd2\x1b\xda\x35\x0e\x86\x99\xcc\x37\x7e\x02\x65\x57\x36\x70\x57\xb4\x7e\xf2\xb4\x3b\x64\x47\x53\xe5\x10\x8d\xa1\xc1\xe9\x3d\x26\x29\x7f\x72\x2a\x88\x69\x41\x57\x9e\x64\xb1\x4c\x01\x45\x7d\x58\x10\xeb\x6a\x32\x0f\x69\x14\x64\x58\xa2\x13\x66\xf3\x42\xd7\x9b\xb7\x73\x53\x1c\xd1\x6d\x61\xa9\xd0\xd5\xc8\xda\x93\xf9\x84\x5f\xa2\xc6\x0f\x44\xdc\xb5\xa3\x78\xc0\x02\xe7\xd0\x64\xf5\xf3\x3a\x5b\x7a\x38\x21\xe2\xf9\x4d\xba\x9a\x30\xca\xb7\x23\x33\x77\x95\xe7\x18\x7a\xa8\x97\x8d\x92\x78\x80\x1f\x93\x5d\x06\xf2\xf0\x4c\xf9\x0d\xcd\xbc\x63\xd8\xe1\x68\xde\x7a\xe4\x94\x3e\xdc\xc4\x25\x59\xb5\x8b\x17\x99\x3a\x39\x38\xb2\xf6\x9a\x2e\x96\x3f\x78\x33\x07\x42\xb0\xcd\x2e\x2e\x9c\x73\x81\x7a\xad\xa9\x52\x31\xa1\x12\x0b\xe6\x3d\x8b\x70\xd4\xd6\xb1\x0d\x50\x4e\x83\x46\xe2\x5c\x26\x42\x49\x93\xa5\xc7\xc1\x68\x01\x5a\x64\xa0\xa2\x7f\x82\xe8\x3a\xea\x01\xf6\xd9\x30\xdb\xc9\x73\x64\x53\x1c\xf6\x5b\x9d\x1d\xc6\xd9\xfc\x2e\x64\x73\xed\x04\x78\x47\x8d\x93\xb3\x52\xbc\x4d\xe4\xd6\x35\x96\x74\xcf\x15\x0e\xa6\xc8\x50\x12\x14\xbc\x81\xf2\xb1\xd5\x67\x87\xf1\x63\x4f\x84\x9d\x2a\xd3\x67\x70\x2a\xca\x4c\xcd\xd2\xd9\x65\x33\x8d\x0f\x01\x4e\x45\xc9\x4b\xb7\x7a\x80\x29\x3b\x71\x9e\x94\x83\xb2\x60\x90\x1b\x5d\x41\x76\xd6\xe5\x62\x03\x28\x6a\x2d\x4d\x03\xbd\x7a\xb2\x9b\x6a\x36\x41\x31\x70\xe3\x32\xa9\xc0\xc6\x7b\x71\xbd\xa9\x73\x58\x21\x26\x1d\x8a\xfa\xd8\xa9\x33\x5f\xbd\x44\x86\xaa\x64\xed\xcf\x23\xcb\x19\x03\x58\x9c\xe2\xfd\x1b\x2a\x12\xe0\xf6\xf6\xa1\x3c\x0d\xdd\x5d\x95\x94\xef\xcb\xf2\x6f\xec\xc9\xcb\x19\x5e\xe2\xec\x10\xd8\x3c\x73\x34\x80\x47\xad\x93\x41\x45\x96\x71\x43\x1f\x71\x5d\x47\xa3\x43\xa4\x4f\x1b\x95\x61\x6d\x60\xe5\xc8\x1e\xa8\xe6\x9c\x0f\xb6\xdc\x80\x77\x2d\xb9\xa1\x77\x0c\xb8\x04\xcd\x03\x1e\xa1\xba\x0b\x0f\x43\x39\x0e\x9b\x5f\x51\x3c\x9d\xb4\x8b\x0b\x21\x94\x9c\xcd\x09\x97\xc5\xdc\x66\xe3\xc1\x9c\x9d\xcd\x8f\x62\x41\x2b\x9e\xcd\x39\x6c\x70\x78\x01\x3d\x7f\x35\xda\x24\x0f\x62\x94\xbc\x17\x48\x89\x46\xe6\x64\x5b\xeb\x78\x34\x87\x1a\xeb\x8b\x5f\x2c\x0b\x68\x91\xa4\x9d\xd5\xaf\xcd\x16\x7f\xf3\x4d\xa1\x6e\xbe\x68\xbb\x03\xd9\x60\xc5\x89\xa1\xa1\xd3\x28\xbb\xe8\xa0\xbc\xfd\xac\xc7\xfa\x92\x70\xc5\xbb\xf4\x83\xa7\x74\x34\x3d\x79\x58\x50\xfb\x78\x05\x4d\xcc\x12\x3e\x51\x94\x15\xfb\xf3\xcb\xde\x1e\x6b\x20\x7c\x00\x94\x5b\x0a\x01\xe5\x09\x91\x42\x1d\x29\xed\xe5\x23\xcf\x8e\x32\xa8\x61\x6f\xde\x51\xf5\xdd\xf6\xd0\x6a\xf2\x0d\x75\x22\x9c\x8d\x17\x77\x59\xbd\x17\x69\xf7\x7a\xac\xf9\xb6\xef\xe0\x00\xa5\x4f\x97\xb2\x12\xb6\x57\xb0\xe5\x5c\xd9\xd3\xa6\xca\xfe\xf9\xd5\xc7\x67\x2c\xf1\x3b\x5b\xa2\x82\x56\x30\xbb\x4c\x9c\x5e\xc9\x1c\x9d\x8e\x35\x19\xe5\xce\xbe\xf0\x49\x62\xa4\x83\xbb\x26\xc7\xed\x83\x97\xc2\xc7\xb5\x74\x51\xe6\xa2\xc1\xb4\x30\xb5\xbb\x7e\xc7\xe1\x85\x1f\xf1\x55\x1b\xe2\x3e\x92\xaa\x36\xf6\xcb\x3d\x3b\x67\x34\xcd\x4e\x4e\x99\x46\x27\xea\x6d\xd7\xd5\x6e\xb5\xb2\x0f\x58\xb0\x8a\xab\xdb\xcc\x8b\x83\x7e\x1e\x24\x31\xa0\x52\x5d\x28\xd1\xa5\xd0\x0a\xc2\x6f\x2a\xe7\xaf\x1a\x96\xb0\xc9\x8f\x6f\xf1\x1c\x40\xcc\x42\x39\x94\x59\x36\xdf\xf3\x23\x13\x27\xc7\xdb\x53\x3d\x45\xbc\x4c\x31\x0c\xeb\x95\x6e\x2c\x8c\x45\x90\x48\x50\x29\xda\xd9\x56\x2a\xfd\x7b\xdc\x36\x8d\x72\x82\xa7\x21\x6f\x6f\xb0\xa0\x61\xb5\x08\xf1\xe5\x4a\x06\x2a\x18\xb3\x23\x7f\x91\x8d\x3e\x59\x05\xfa\x3a\x1d\xda\x83\xe2\x8a\x34\x47\xb4\x02\x03\xbd\xa9\xf7\x6b\x61\x55\x57\x9a\xe0\x35\xbb\x18\xe5\x4a\x6c\x37\x51\x8b\x0e\x63\x20\xac\x79\x07\xbe\x9e\x32\x91\xb7\xab\xcf\xb6\x61\x96\x6b\xf9\x70\xe5\xcb\x10\x0e\x3d\x7a\x22\x89\xc4\x25\xe4\xb3\x9b\xff\xfd\x4b\xdf\xd8\xd5\xc9\x60\xdd\x09\x28\xad\x9c\x06\x00\xf6\x1d\x52\x5b\xe0\x05\x1f\x8a\x78\x68\x35\xfb\x03\x9c\x64\xa4\xd6\x2e\x11\xa3\x03\x7f\xa8\x76\x6b\x9d\xa5\x78\xf5\x35\x8d\x2a\x30\x2e\x91\x0d\xc5\x41\xea\x16\xc5\x45\xa3\x67\xa9\xf5\xe3\x1a\x8a\x4d\xb1\x12\x6c\x30\x0b\x65\xe9\x1c\xfc\x88\xfe\xcb\x92\x00\x0f\xc0\x65\xcd\xa0\x0d\x6b\xde\xea\xb5\xc3\x00\x55\x8a\x49\x2e\xbe\x37\x4f\x7f\xe7\x0e\x39\x66\x97\x13\xff\x4d\xcd\x3a\x99\xe9\xb7\x49\x45\xdb\xc9\xee\x2e\x2d\x71\x23\x63\x6a\x8a\xd4\xf9\xa4\x0e\xe5\xc1\x75\x0e\xf4\x5a\xb3\x37\x41\x45\x0c\xe2\x61\x0f\xca\x62\xfa\x8c\xc8\x35\x50\x46\x90\x00\x9e\x9d\x9f\x19\x06\x7b\x1e\xda\x7f\x96\x4c\x79\x4f\xa7\xc4\x34\x88\x5c\x15\x26\x44\x73\xac\xeb\xf6\x19\x98\xc6\x99\x06\x13\xb0\x0c\xee\xea\x8f\x5b\x0e\x08\x2c\x9a\xdd\xda\xe2\x13\xc7\x54\x35\xe1\xd9\xb1\xe4\x7e\x40\x52\x21\x97\x18\x2f\xa3\x47\x31\x07\x52\x5c\x67\x04\x7d\xda\x73\x6b\xee\x7e\x0c\xf4\xba\x71\x41\x9b\x87\xdd\x67\xf8\x41\xa9\xd5\xb4\x4b\x1d\x72\xa2\x7e\xd8\xe5\xb0\x8e\x62\x3d\x60\x41\x8e\x1a\xf5\x52\xd8\x86\xf1\x61\xd7\x31\x7b\x98\xe7\x31\x93\x4c\xb6\x1c\x96\xd3\xa8\xe7\x16\xfd\xcb\xd9\xb4\xff\xca\x4d\x1e\xf1\xe6\x31\xa1\x06\xdf\x6e\x08\x68\xb4\xcb\xab\x6e\x36\x8d\xe4\x70\xf6\xcc\x3b\x3a\x99\xad\x05\x3e\x2a\x08\xfb\xb8\x42\x1c\x33\x15\xc1\x33\x8f\xd6\xc8\xf7\x74\x41\x37\x31\xcb\xa3\x76\xab\x08\xcc\x98\xb3\x32\x66\x4b\x99\xf7\xe6\x56\xf4\x8b\xfd\xdf\xad\xfc\x15\xdb\x6a\x83\xb5\xd3\x38\xd1\xef\xe0\x95\xb2\x66\xb7\xb6\x3b\xf2\x27\x3e\x8a\x00\x6d\xee\xa3\x93\xc4\x78\x26\x74\x8b\xf2\xe5\x8f\xb9\x44\x1d\x5f\x40\xa0\x30\xdc\x89\xf2\x83\xc7\xc4\xea\x12\xf1\xf5\x69\xc8\xb5\x80\xed\x41\xd9\x00\x66\x13\x77\x5f\x51\x5c\xb9\x0b\x86\x6f\xdd\xde\xc3\xa9\x49\xa2\x6d\x1a\x14\x28\x28\x58\x36\xba\x82\x50\x1a\x45\x78\xe6\xcd\x21\x7a\x09\x71\xa4\x28\x36\x75\x7f\x47\x7f\x49\x8e\x40\x45\xba\x76\x39\xd7\xab\x52\x4e\xd1\xe4\x09\x11\xb3\x2b\xe8\x77\xa2\x3f\xe5\x62\x1f\xbc\xb5\x9c\xb1\xf9\x3c\xba\x5e\x59\xbc\x7f\xb3\x43\xe8\xd4\x11\x34\x3c\x13\x71\xcf\x0c\x66\x80\x56\xc4\xbd\xa1\xf9\xf9\x74\x51\xea\x5a\x59\x98\x53\x29\x28\x43\x19\x5e\xf8\x1c\x3f\xae\x57\xfc\x8c\x44\x23\x2e\x6c\x90\x9e\x77\x67\x80\x91\x94\xa7\x93\x54\xc6\xd4\x4d\x6d\x3e\x44\x72\x4b\xb1\xe2\x38\x9f\x54\x67\x65\x0b\x47\x50\xba\x1d\x4c\xb9\xd7\x38\x6f\x30\x33\x6c\x69\xf8\xe0\x31\x7a\x79\xe4\x3b\x2f\x31\x27\x74\x20\xee\x3e\xa2\x61\x27\xa6\x7a\x1b\xdc\x20\x59\xdb\xca\x0e\xd6\x42\x55\x13\x9d\x4b\xc9\x56\x2d\x4d\xc1\xa8\x95\x74\x5a\x51\x07\xeb\x0e\x99\xaf\x07\xe0\x01\xf8\x47\xc6\x0d\xc5\xc5\x91\xeb\xc1\xf1\x2d\xae\x52\x63\x7c\x4b\x78\xb1\xbe\xba\xe4\xc9\x31\x54\x39\xf2\x30\x28\xe9\x1c\x4d\xcf\xfd\x58\x83\xb3\xe2\x54\xbb\x18\xc5\x51\x2d\x08\x80\x55\x76\xeb\xd9\xb2\xf3\xa1\xcd\x57\x08\xe0\xb5\xfe\x6a\x27\x61\xc8\x1a\x90\x48\x6a\xd3\x6c\xf5\x3d\x3b\xe5\xc8\x23\xbb\xc7\xe4\x04\x2e\xcb\x00\xe2\x5f\x3a\xac\x04\xaf\xfc\x21\x01\x6c\x52\x61\x5c\xb4\x19\x4e\x46\x06\x08\x1e\x43\x7f\xe9\xff\x88\xe1\x64\x11\xa6\xee\x58\x19\x09\x92\x65\x94\x6b\x4a\x4b\x5e\xbd\xea\xcc\xa4\xd9\x17\xfb\x9b\x1d\xac\xfc\x29\x63\x0c\xe1\x68\x42\xc1\x8c\x04\x59\x99\xe2\x1a\x92\xe9\x1a\xe4\x2c\x7e\x4a\x22\xc5\xe3\x52\xa7\xf0\x63\x84\x7a\x49\x6d\x21\x3d\x5b\xd0\x72\x60\x8d\x45\x66\x03\x76\x8d\x9a\x11\xb0\xf9\x48\x35\x31\xb2\x44\xce\xe3\xe9\xcd\xfc\xae\xaa\xb4\x71\x3e\x93\x58\x76\x07\x32\x54\x5e\x11\xfc\x0e\x2d\xa6\x74\x10\x27\x15\x24\x28\x00\xc4\xa9\xbf\xb7\x7a\xdf\xc3\x93\xb1\xa8\x26\xe9\x9e\x98\x7e\x7c\xbc\xd1\xe6\xcc\x9e\x77\x32\x08\x47\x7f\xb0\xdf\x43\x80\x7f\x30\xeb\xb5\xff\xd7\x27\x7f\xb0\xa8\x0b\x55\x1e\xd4\xba\x29\x4a\x08\x81\x89\xf2\x67\x36\x6b\x09\x18\x2b\xe5\x94\x86\x57\x20\x43\xa6\x92\x96\x25\x31\xc9\x66\xae\x6a\x3f\xe1\x06\x7e\xe2\xdb\xbb\x4d\x5c\xa4\xd2\x7a\x6a\x6d\x81\x94\xa6\xf4\x80\xf0\x2c\xc0\xb5\xdf\x38\xa6\x2a\x23\x97\xb1\x26\xdd\xe4\xe0\x45\x7f\xba\x94\x72\x3f\x29\x61\xab\xf7\x86\xc9\xe4\x1c\x9e\xcf\x3a\x8c\x37\xf6\xa9\xa2\x31\x58\xe3\x03\xea\x28\x74\xea\x52\x5a\x6b\x86\xae\x67\xf0\x2c\xe0\xc5\xce\x7d\x40\x66\x72\x8f\x00\xe2\x68\x5f\xc2\x48\xb3\xe4\x6b\xdc\x0e\x4c\x41\x81\xe1\xb8\x7d\x44\xc0\x51\x46\x2b\x5e\x1c\xbd\x60\x9c\x38\x79\x0d\x56\xe2\x11\xfa\x45\x38\x64\xc4\xd2\xca\x88\x77\x54\xb9\x35\x2f\x86\xf8\x7e\x6d\xc1\x25\xce\xb5\x42\x87\xc8\xfc\xfe\xfd\x33\xa2\x37\x6c\xd4\x1a\x0b\xe7\x98\x32\x9b\xd9\xed\xe4\xfc\x28\xfd\x4a\xf6\x3a\xa1\x63\xb6\x7c\xc5\xff\x24\x81\x6a\x18\xf1\x43\x7f\xac\xed\xbb\x19\x6c\xb1\x63\x8e\x6f\x8e\x17\x01\xc9\xfb\xd9\xff\xeb\x76\xb6\xe5\xb0\x0c\x7a\xca\x8c\x0c\x7d\x92\x20\x82\xdd\x89\xda\x6a\xd6\x6c\x99\x10\x96\xc8\xf1\x39\xa2\xc7\xef\x13\x29\xd3\x62\x2d\x60\xcb\x9b\x35\xfa\xb0\x38\xfb\x42\x13\xed\xa5\x4d\x6d\x88\x72\x24\xea\xb9\xd8\xe4\xa9\x85\x65\x30\x54\x36\x8a\x7d\x43\xfb\x9a\x47\x0c\x93\x08\xab\xd3\xb9\x75\x78\xef\xe2\x7d\xa6\x94\x96\x51\xce\x1c\xdd\xf0\x27\x70\x1e\x1d\x06\x01\x8a\x33\x76\xc1\xc9\x37\x0a\x0a\x6d\x50\x74\x6d\x24\x1a\x07\x3e\xb2\xa9\x40\xc7\xae\x0d\x91\x50\xb1\x43\x9c\x56\x52\x39\x94\x73\xd7\xb2\x34\x07\x05\xa1\x95\x03\xe4\x7d\x9c\x42\x55\x31\x65\xf7\xe5\x66\xaa\x37\xde\x37\xf8\x27\x05\x6b\x6f\x4c\xc0\x7d\x3a\x05\x70\xf1\x36\x29\xcc\x58\xc2\x6d\x0a\xce\x9f\xc0\xc0\x31\x7c\xf5\x2a\x08\x0a\xa7\xc2\x19\x3e\xae\x7f\xae\xfe\x13\x20\x49\x61\x21\xc9\x47\x56\x64\x96\x22\xc2\x64\x01\xed\x8f\xd8\xb7\x7e\x3d\x7c\x18\xab\x2b\x8e\x93\x6a\x61\xf2\xd0\x27\x59\xf5\x1a\x4f\xe1\x18\x44\x13\x53\xb6\x03\xd1\x41\x44\xd1\x16\xc6\x05\x89\x74\xce\xfe\x51\x6a\x20\x91\x90\x69\xff\x32\xcc\xd5\xc9\xab\x2b\x09\xf4\xe4\x15\x1a\x96\x9d\x9b\x73\xa9\x2a\xdd\x84\xc4\xb8\xae\x9d\x77\x68\x44\x3e\x0a\x86\x26\x8e\xf1\x6d\x0b\x64\x7a\x79\x92\x6f\x8e\xfa\xf8\x80\xea\x75\xf1\x8c\x35\xe7\x35\x19\x85\x71\x9d\x46\x16\x28\xfd\x9b\xae\x8b\x82\x94\x95\x38\xb0\x2a\x3b\x0e\x35\xf4\x53\x32\x02\x53\x58\x4b\xdf\xa4\xce\x5b\xea\x6c\xce\x97\x59\x98\x8c\xfd\x97\x70\x2c\x2c\xa8\x99\x49\xde\x03\x5e\x7b\xf4\xef\x18\x6b\xd0\x9f\xe4\x0a\x2e\x69\x57\x4d\xd2\xc2\x0a\x03\x57\xba\x2b\x98\x25\xec\x04\xb0\xd9\xcf\x48\xf2\xc4\xd3\x17\x16\xa8\x65\xad\x36\xa2\x35\xde\x5f\x9e\x25\xf4\xa4\xe0\x83\x87\x2e\x73\x27\x04\x33\xfb\x34\x73\x9d\x4a\xe4\x1a\x79\x13\x18\x24\xda\xe6\x40\x5e\x14\xe6\xb9\x4e\xce\x85\x08\x1d\xc3\x9e\xc4\x2e\x41\x4c\x4f\x89\x99\xb6\xb2\xec\x77\x96\xc5\x5c\x06\x35\x82\x6c\xbe\xb3\xfb\xf4\x20\xed\xf0\x65\xaf\x80\x3e\x06\x2b\x5f\x3b\x98\x4b\xcf\x38\xbd\x40\xcd\xe6\xc0\x57\x59\x99\xf2\x4e\x2c\x20\x5d\x17\x02\x4a\x33\x88\xa0\x38\x22\x19\x59\xe0\xdb\x39\xab\x0f\x75\x62\x95\x77\x22\x1b\x80\x6d\xae\x0a\x4c\xa9\x79\xfb\x4e\x6e\x07\x1a\xd2\xee\x0a\x69\xce\x46\x19\x26\x68\xce\x96\x11\x12\xb1\x13\x67\x1e\xe4\xfb\x32\x66\x71\xea\xfc\x1d\x4e\x63\x91\x70\xc0\x9e\x33\x21\x57\x07\x4b\x61\x28\x1c\xba\x2d\xa7\x03\xe2\xfc\xd2\x27\x47\x10\x4e\x48\xa0\xc3\x57\xb3\xf4\x9d\x24\x3e\x89\x0c\x67\xc4\x82\xbe\x5d\x90\x5b\x23\xd5\xc3\x6f\x06\xa8\xb1\x6e\xbb\x8f\x82\xb0\xb1\x7f\xe1\xc5\x0e\xd0\xbd\x6c\xc8\x8b\x03\x0b\xc7\xa6\x4d\x45\xe7\xc3\x95\x7e\xda\x7d\x20\xdd\xfe\xc9\xe9\xf9\x0b\x0a\x25\xea\xe2\x1d\xfd\x1d\xd7\x6f\xf2\x36\x68\xa3\xbf\x52\x84\x43\x25\x40\xa1\x16\xe7\xcd\x6f\xed\x00\x80\x07\x11\x45\x3b\xba\xd6\xf7\x87\xd9\x2c\x44\x19\x25\x35\x18\xa4\x91\x1c\xc1\x17\x1c\x57\x9b\x34\x41\x8d\xb6\xec\xfc\x79\x67\x8b\xb6\xe8\x05\x78\x95\xec\xc3\x2f\x35\xdb\x73\xd8\x8b\xc3\xd0\x0e\x8e\xda\x01\x38\xa1\xcb\xa0\x15\x03\x32\x68\x5d\x8c\x3c\x7a\xd3\xc3\xc2\x6e\x13\x0c\xa6\xe5\x9d\x58\x3c\xb9\x5a\x82\x0f\x81\x96\x13\xc0\x1a\xf8\x1c\x48\xa9\x9d\x6a\x77\x76\x52\x58\xa6\x01\x20\x9b\x92\xcd\xc3\x8a\x29\x2c\xcb\xca\xa6\x53\xf0\x52\x76\x49\x99\x22\x07\x4e\xa7\x85\xdc\x61\x72\xdd\x3d\x32\x00\xef\xf8\xc3\xe6\x33\x67\x03\x5d\xb0\x01\x67\xa9\x62\x5e\xd0\xa9\x47\x61\x36\xad\x54\x3d\xd0\x01\x0d\xda\xd2\x1d\xe9\x2a\x54\xed\x7a\x4c\xcb\x8e\xdd\x27\xa5\x9c\x44\x4a\x66\xa0\x3c\xea\x29\xf9\x4e\x81\xf5\xc5\xa9\xc4\x0d\x15\x19\x18\xf0\x4f\xf1\xfe\x3a\xae\xd8\xed\xe4\xc2\xe6\xf1\xb4\xf1\x51\x90\x21\x2e\xae\xd5\x20\x11\x5a\xdb\x47\x45\xec\xcf\x14\x07\xab\x06\x53\x4e\x21\xf8\xbc\x39\x6d\x5f\x27\x09\xec\xa4\xf2\x7b\xa5\x79\x5e\x4d\xcb\xda\x30\xf1\x94\x1d\x12\xa6\x90\x5c\xb3\x1a\x4b\x60\x1a\x6f\x98\xbf\xb8\x85\x5e\xe1\x01\xd8\x51\x6a\xd1\x9b\x1c\x52\xfc\x0f\xa3\xb8\xef\x14\x8d\xa6\x33\x7c\xb3\x53\x0f\x59\xdf\x86\x64\xda\xe9\x10\x30\x06\x2d\x92\x08\xd7\x35\x03\x03\xbc\x07\xaa\xfb\x83\x45\x7d\x84\x79\x87\xa3\x43\x3b\xc7\xd4\xef\xe5\x05\x00\xcc\xe4\x03\xd8\x43\x34\xb4\xac\x35\x86\xdc\xc6\x44\x24\x71\xc0\x7c\xac\x48\x31\xa4\xd3\x39\x41\xda\xd4\x82\x14\xc3\x09\xfc\x93\xea\xdd\xa0\x38\x35\xfa\x32\xdf\x0e\x88\xbc\x51\xab\x95\x68\x03\xad\x67\xa8\x5b\xda\xae\x21\xbf\xd1\x09\x40\x24\x45\xc4\x0f\xe9\x27\xd0\xdd\x45\xcf\x7c\x85\xe4\x13\xa5\xfd\x7f\x71\x0c\x1d\x30\x3b\xc5\x2f\x91\xe0\x93\xbd\x33\x1a\xb9\xad\xe6\x4a\x5c\x93\x48\xc5\xc3\x1c\xa7\xca\x8b\x48\xba\xe2\x3e\xaa\x6e\x9b\x1e\x2b\x80\xa5\x9b\x0f\x40\x5b\xa9\xbf\xd1\x89\x29\x85\xbc\xb2\xb6\x54\x00\xbc\x58\x4b\x00\x60\x6f\x1f\x05\x71\x72\xce\xca\x81\x6e\x3d\x6d\x72\x1c\x42\xdb\x43\x9c\x5c\x7d\xb4\x7d\xbc\xd2\x3a\x98\x21\x7c\xb9\x17\x9d\xc7\x68\x2d\x71\xb2\x34\xef\x99\xa1\x21\x3c\x77\xcf\xec\xe7\x9c\x98\xf0\x7a\x33\xf3\x83\x68\xbd\xdd\x6e\x4b\xbc\x7d\x22\xab\xc2\x5c\x08\x7d\x13\x48\x1b\xd5\x15\xeb\x4e\xa0\x96\xc7\x6c\x8c\x6d\x0c\xa4\xb1\xe9\xb9\x18\x09\x17\xa8\xf7\x89\x60\x7a\x0f\x8d\x89\xb8\x5b\xf0\xa7\xf6\xc2\x5a\xc3\x59\xa1\xf8\x4a\x57\xb1\x28\x2a\xe7\x4c\x75\x94\x2b\x46\x8b\xe1\x5b\x87\xbf\xde\xc4\x60\xd9\xf1\x83\x7e\x46\x5b\xd5\x78\xd1\x84\x36\x1d\x48\xce\xce\xdb\x9e\x7f\x71\x22\x54\x99\xea\x41\x0d\x42\xf3\x85\x53\xfd\x90\xfb\x61\xbe\x4a\xa3\x4c\xc8\xdd\x7b\x37\xbf\x4e\xe7\xc7\x69\x4e\xec\x25\xda\x5c\x22\x7b\xea\x56\x56\xd5\x19\x65\x18\x8b\xe3\x44\xf3\x49\x62\x15\x4f\xbc\x46\x2a\x64\xd2\x9f\x0f\xab\x46\x93\xd9\x7b\x43\x3d\x59\xe2\x8c\xe7\x93\x8a\x25\x15\x65\x67\xea\x32\x98\x1c\xc9\xce\xd9\x25\x3e\x92\x32\x62\xc6\xce\x0e\xa7\xbb\x21\x48\xf4\x29\x69\xb5\x8d\xa9\x22\x4e\x92\xef\x11\xf1\x9f\x0e\x23\x43\x6a\x91\x7e\x7f\x07\x97\x1b\xee\x4a\xbd\x0b\x55\x95\xd2\x93\x71\xb2\xd7\x45\xb5\x14\x15\x7e\x02\x82\xa9\x5f\x57\xe4\x2e\xa9\x31\x2d\x51\x62\x63\xa6\x64\x1c\xb5\x56\x59\x4d\xb4\x5e\x59\xd8\xd4\xa3\x05\x60\x44\x68\xbb\x28\x90\xae\x3e\x0a\x4d\xea\xae\x23\xdc\x71\xbe\xd7\x88\xa9\x2d\x72\xa9\x4c\x48\xdf\xec\x0b\x61\xa3\x5d\x47\xf1\x83\x45\xa9\xfe\x91\x64\xb7\x5a\x30\x8c\x08\x39\xd4\xb3\xc3\x22\xd8\xf0\x0c\x16\xbb\x7d\x10\xfe\x0d\x38\xcc\xa4\x83\xad\x01\x21\x91\x71\x21\x6e\xc1\xac\x0c\x49\xe8\xed\x62\x30\x2e\x67\xc8\x49\x8b\xe7\x87\x6d\x55\x79\x88\x48\x4f\x78\x25\xb6\xa1\xfb\xd9\x97\x97\x29\x6f\x6c\x19\xfb\x03\x33\x5b\xdb\x54\x87\x3b\x8d\xee\xbd\xf3\xdb\x3f\xe1\x56\x51\x7c\xdb\x9f\x11\x23\xfa\x7f\xd1\xb5\xec\x31\x6f\xc2\x6e\x2d\x10\x66\x1d\x7e\x3d\x76\xe8\x98\xdc\xac\xd6\x7d\x8c\xca\x53\xdf\x49\x47\x2e\x3f\xe8\x04\x79\x82\x89\x16\xb5\xd3\xb9\x98\x4c\x08\xa1\x1c\x09\x7f\x6f\xea\x98\xe2\x70\x6a\x25\x76\x1b\x9d\xb5\x1b\x21\xf2\x00\x51\x9f\x8a\xd3\x52\xd1\x0b\xa8\xd6\x95\xbc\xff\xa4\xe1\xce\xb7\x1c\x01\x2e\xac\x8b\xb7\x9d\xe4\xa8\x28\xaa\x0c\xe8\x79\xa4\x0a\x3f\xe2\x17\x3a\x8a\x3d\x30\x85\x6b\xe4\xe5\x73\x3d\xaa\x65\x60\xc5\x8b\x9c\x22\x8a\x4f\x6b\x3c\x0b\x98\xdd\x9b\xfb\x4b\xf8\x8e\x34\xb1\x8e\xe3\x42\x4a\xdb\x42\x56\x94\xf6\xb2\x83\x02\x33\xb9\xc4\xbf\xd9\x6e\xe2\x4c\xd9\xe8\xb6\x96\xbb\x29\xbf\xc7\x3a\xc0\xe4\x93\xe1\x77\xe5\xf5\x9c\x84\x6a\x96\xc4\xab\xc0\xda\x4b\xc9\x17\xf3\xd8\x39\x90\xb6\xba\x8a\x75\xf3\x42\xd8\x53\x4f\x9d\x11\xeb\xac\xd9\x1e\x72\x32\x16\xb6\xa3\x90\xe1\xad\x8a\x3e\x3c\x60\x62\x05\x74\x72\xcd\xe9\xc9\x45\xc0\xb4\x6f\x81\x70\x14\xcf\x95\xf6\x21\xc0\xd1\x70\x14\xe9\xc3\x62\x30\x0e\xd7\xbb\x7c\xba\x55\x0b\x98\xd3\xa9\xc5\xf4\x4a\x94\x27\xbb\x02\x3f\x2d\xba\x26\x8e\x4e\x8c\xbb\x3d\x19\x90\x07\xa7\x2e\xa3\x2a\xa0\x31\xfa\xdb\x3e\x74\xe2\xba\x6b\x56\xfa\x56\x2e\x18\x5e\x10\xb8\xc4\xe2\x0d\x8e\xd6\x39\x4a\x9d\xb0\xd4\xd8\xc8\x11\xfe\xd1\xdf\x6f\x1f\xcf\xc5\x56\x02\xbe\x27\xcb\xb7\xf2\x95\x9c\x56\x1e\xdd\xe4\x76\xdd\xaa\xbc\xac\xf8\x79\x48\x20\xaf\x68\x17\xf3\x52\xca\x87\x61\x05\x97\x58\x06\xa7\xa4\xca\x82\x81\x28\x1d\xce\x5f\x90\x49\x4c\x7b\x91\xb5\x90\x65\xc5\x47\x28\x9b\x43\x91\x69\x33\x2f\x5a\xca\xe5\x05\x78\xc2\xdd\x68\xea\x28\xc1\x4f\x63\x09\xd9\xb6\xed\x23\x56\x45\xfc\x12\x94\xe0\xc0\xaa\x50\xa1\x71\xab\xb8\x6e\x1c\x29\x43\x32\xff\xcd\x45\x46\x01\x58\xf9\xad\x3f\xc4\x68\x31\x0b\x40\xa1\xfb\x05\x0f\x49\x55\x36\x6f\xf0\xb6\xae\x14\x2c\xb5\xc4\xce\x17\x8d\x22\x4b\x89\xaa\xde\x42\xef\xaf\x31\x82\x17\x19\x46\x7d\xd8\x87\xee\xcc\x92\x8a\xa2\xd8\x3d\x69\x1a\xb9\x22\x4e\x76\x4e\xf9\xc4\x34\xcb\xed\x41\x26\xae\x7d\x79\xa0\x76\x05\x4b\x64\xcf\xee\x93\x54\xa8\x5b\xa4\x97\x8d\x21\x92\x6b\x3c\x6f\x63\x24\xa6\xd6\x0f\x57\x3b\x96\x51\x67\x06\x02\xbe\xd3\x59\x10\x1c\x3a\x14\x39\xea\x69\x72\x70\xf3\xfd\xe5\x62\xdf\xce\x62\x36\xe6\x97\x3b\x85\x8b\x65\x40\xa2\xb5\x0a\x02\x61\x8d\xe4\x35\x54\xce\xe3\xe0\xe7\xe3\x41\x46\x07\x60\x0e\xbf\xb8\x64\xb7\x0f\xc6\x57\x46\x10\xd7\xe5\xe4\xee\x2a\x9d\xcc\xba\xfe\xa6\xd3\x2d\x29\x91\xe8\x37\x87\x0a\xd8\xa8\x2e\x42\x80\x56\x97\x6a\xfc\x6a\x51\xbc\x19\x06\x94\x49\xdd\x35\x6a\x33\x62\xc2\x07\x8e\x7d\x45\xbe\x3a\x2a\x57\xd6\x35\x05\x36\xcd\xd9\xef\x6f\xac\xc7\xb0\xf4\xf7\x17\x09\xe7\x7a\xf6\x41\x7c\x22\xf3\x63\x2f\xbf\xd0\x47\x09\xe7\xee\x05\x04\xc9\x3e\x7c\xee\xb1\x44\x04\xce\xd9\xaa\x6f\xd7\xaa\xd6\xe1\xf7\x2c\xaf\x7c\x48\x5b\x60\xb3\x79\x27\xf3\xc9\x5c\x02\x68\xe7\x61\x60\x73\x13\xe4\x9f\x78\x50\x32\x14\x6d\x72\x8a\x43\x31\x49\x51\x6b\xc8\x34\x66\x4b\x04\x01\x78\xa0\x03\x66\x03\xa7\x4c\x62\x4a\x0c\x64\x29\xab\xcf\x81\x1d\x18\xa2\x58\x43\x4e\x46\xd1\xa9\xfe\x9c\x12\xa2\xcd\x84\xe0\x70\xa1\x39\xa3\x01\xf8\xf4\x84\xe7\x95\x58\xea\x56\xdd\x10\x2f\x31\x47\xe7\x8c\xc8\x26\x38\xfc\xb1\x0d\x6c\xf5\x28\xde\x2f\x9f\x93\x8f\x46\xc9\x66\xc0\x73\x78\xb3\x05\x56\x5b\xfa\xf5\x2c\x55\x04\xa5\x62\xe0\xc6\x1c\xda\xac\x58\x1d\x39\x58\x84\xd6\x36\x2d\x99\x5a\xd4\xb2\xca\x49\xa1\xe0\x87\x78\xdf\x21\x93\x01\x1f\x56\x32\xe7\xaa\xf9\xd4\x16\x9b\x23\xeb\xc3\xc7\x27\x96\x84\x13\x02\x5f\x1f\x52\x98\xb2\x37\x82\xbd\x3f\x38\x59\xf2\x92\x2b\x5a\xb9\x6c\xc4\x29\x4e\xdb\x3b\x06\x9b\x53\x80\x13\x29\x87\xca\x22\x30\x09\xe3\x26\xd7\xf4\x48\x43\x0f\xe1\x76\xa6\xe1\x19\x38\x97\x50\x33\x33\x84\xeb\xff\x86\x9e\x09\x00\x2e\x7f\x6b\x74\x3f\x66\x6d\x5b\x28\x48\x4f\xe5\xde\x10\x11\xa8\xc5\xa2\x55\xd6\xea\x16\x57\xe7\x91\x58\xd6\x04\x49\x92\x2b\x94\x7e\xa6\x00\x6d\x42\x09\x6f\x59\x04\x31\x0e\x27\x2a\x32\x56\x0e\x7e\x7c\x8b\x49\x4c\xb6\xf3\xa8\xcf\x7c\x29\x7e\xc8\x64\xb0\xc2\xf8\x4a\x9b\xa4\x6d\x6d\x65\xfd\x9d\xd6\xac\xf2\xa7\xa8\x8b\x0c\x05\xa7\x51\xa1\x98\x86\xda\xba\xcd\x53\x7b\xd8\x70\xe7\x89\x82\xc2\x35\xf6\xdb\x13\x32\x0b\x57\x95\xb6\x73\x70\x4e\x81\xfc\x51\x5e\x9e\x63\x28\xfa\x3a\xfd\x8b\x80\x7a\xf6\x27\xae\x94\x9f\x43\x1f\xfa\x4f\x5f\x7c\x5a\x4f\x52\xcd\x3f\x70\xe3\x15\xac\x82\x6f\x51\x59\x30\xaa\x8f\x8a\x2f\xb6\xa0\xcf\x7a\x6c\x71\x23\x96\x05\x97\x20\x62\xe0\x9e\xfa\x16\x02\x9c\x2a\x22\xc5\x40\xc7\xf2\x7a\xcb\x87\x87\xdc\xa2\x56\xe1\x69\x48\x9b\xc6\x31\x10\x0a\xd7\xb0\x05\x00\x50\x59\xcc\x79\x9c\x5a\xf5\x24\x0f\x49\x2c\x3f\x20\x03\xf3\x48\x4a\xd6\x41\x99\x7c\x4c\xea\x07\xa9\x52\xa7\x73\x11\xaf\x75\x94\x63\x21\xaa\x64\x93\x55\x79\xf8\xbf\x2c\xfd\x27\x78\xcc\xf2\x81\x68\x34\x58\x06\x3d\xed\x0b\x4e\x4e\xfd\xdf\x98\xa9\xce\x51\xaa\x0a\x3e\x9f\xd6\x9a\x3b\x45\x46\x21\x72\xfc\xbb\xf9\x4c\x6b\x5c\x64\xe5\x46\xb3\x26\xfe\xf3\xda\xc1\x30\x0a\xae\x4a\xf9\x21\xe9\x5e\x2c\x5d\xfb\x83\xee\xd2\xb0\x19\xf5\xc2\xbe\x9a\x39\xfb\x7a\x02\x6c\xbf\x11\xb7\xbf\xbc\x00\x50\xd0\xbe\x60\xaa\x0a\xdc\xe4\x85\x4a\x01\x84\xde\x4c\x88\x40\x7f\xe6\x67\x28\x77\x2b\x61\x8d\xda\x74\x36\xb7\x2c\x60\x78\x0e\x3d\xac\x0a\x4a\xa3\x4b\x75\xb6\x6e\x32\xaf\x34\x7a\x93\x4a\x91\xad\xf2\xa5\x88\x75\x34\x03\x9a\xde\x69\x64\x27\xdd\x2f\x9a\xac\x35\xf0\xd8\x5c\x63\xcd\x07\x51\xd8\x20\x98\x07\xab\x04\xf4\x3e\x43\x7a\x40\x23\xfa\xe2\x31\x96\x36\x3f\xa0\xa0\xea\x66\xf1\x4a\x8e\xe0\x5c\x1b\x41\x0a\xf9\xe1\x72\x9f\x78\xf5\x31\xc8\x58\xc3\x6a\x1c\x01\xc8\x55\xad\xde\x79\x25\x57\x79\x76\xe5\x8e\xac\xb2\x1c\x48\x8f\x5f\xa3\x03\xfa\x20\x7b\xc9\xc1\xca\xdb\x59\x48\xe0\xb0\xa0\x09\x14\x62\x2a\xed\xa4\xd6\x3e\xa9\x8b\xd7\x70\x02\x9a\x6e\x78\x38\xae\xb3\xd7\xaa\x43\x8c\x6f\x67\x83\x1e\xe0\x33\x69\x61\xd4\xd5\xac\x95\x22\x8f\xb6\xb6\x3e\xe2\x3e\x2d\x92\xc2\x5a\x49\x30\x25\x3e\x1a\x24\xa8\xf4\x8f\x35\xae\x18\xbc\x59\xfa\xa9\xca\x30\x80\x5a\x3d\xca\x5b\xbc\xfc\xb3\x44\x1b\x22\x62\x5b\x5b\x4f\x74\xa1\xcc\x3e\xae\xc6\xec\xa6\x9c\x3d\xa8\xbb\x97\x8f\xb7\x8a\x68\x59\x00\xf1\x82\x92\x81\x39\x1b\x45\xa3\x27\xd4\x64\x51\x5c\x97\x0b\x4c\xba\x8b\xe6\xcb\x0f\x83\x0b\xa0\x45\x2a\xfe\xb6\x78\x6b\x86\x55\x34\x57\x9f\x98\xa4\x24\x37\xea\x87\x84\xf7\x0c\x94\x21\x42\x5a\x44\x29\x1c\x2b\xee\x80\xd8\xca\x3c\xe2\x73\x72\xc4\x25\xb6\x77\x7a\xa3\x0f\x4c\x99\xc2\x97\x62\x3e\xfd\x99\xc2\xf6\x2a\xa6\xb4\xae\xe9\x80\x73\xb8\xef\x82\x89\x6a\xda\xe2\x44\xf2\x7a\x55\xe3\x03\x71\x13\xc4\xcc\x97\xa4\xbb\x85\xa1\xcd\x0d\xc0\xe8\x27\x1c\xc9\x73\x27\xe5\x77\xfa\x1e\x18\x07\x04\x0f\x2f\x01\x8d\xf7\xfd\xed\xfe\x00\x72\x4d\x09\x6a\x6d\xa6\x40\x39\x7b\x5b\xab\xc2\x38\xd4\x4d\x1b\x62\x77\x9d\x3a\x8f\x3a\x4c\x70\x88\x7b\xfb\x78\x0c\x56\x5b\xb4\x99\x51\xff\x90\xb3\x58\x1b\x67\xf4\x54\xe5\x5d\x42\x91\x3a\x91\x9b\x59\x29\xb6\x70\x25\x8f\xd3\x5c\xc7\xee\xe0\x54\x6c\xe5\xcb\xfa\xc4\x38\x05\x57\x3b\x80\x9c\xc5\xc0\x17\x1d\xbb\x80\x14\xb1\x42\xf5\xf9\x29\x79\x1f\xd4\x86\x9e\x98\x7c\x6c\x83\xb8\x9c\x1f\xc5\x4c\x64\x72\x04\xcb\x00\x61\x9d\xfd\x62\x1b\x60\x1a\xb8\x3e\x0e\x0f\xc9\x99\xb5\xdb\x56\x98\x35\x30\x75\xc4\x9a\x41\x89\xc5\x28\x77\xd9\x73\xbc\x3f\x93\xe5\x73\x14\xb3\xd3\x21\x41\x5d\x49\x18\xd1\x44\x22\xa0\xaf\x17\x0c\x10\xd5\x63\x77\x29\xee\x3c\x86\x47\xa0\x61\x56\xb7\xa4\x3b\x3b\x5f\x45\x83\x5c\x7d\xd9\x27\x94\xc6\x1f\x94\xdf\x39\xe7\xc5\x57\x3f\xe2\xd0\xd8\xf8\x46\x72\xce\x5e\x25\xad\x28\x39\x96\xf7\xe4\xfb\xdd\x8a\xc4\x47\x68\x8f\x67\xa7\x68\x2d\xdb\x82\x33\x25\x77\x26\x50\xe4\x01\x9a\xf6\x64\x97\x9f\x3e\x65\x04\x1d\xce\x18\x1c\xe5\xcd\x5e\xaa\xfc\xb7\x20\xd9\x5a\x86\xea\x82\xb5\x16\x2b\xe7\xac\x43\x89\xd1\x89\x32\xcf\x78\x32\xc7\x43\x20\xb8\x18\x79\x01\x12\x99\x63\xec\xdb\xec\x43\xaa\x59\x8d\x9a\xc3\x2f\xa9\x76\x4a\x2f\x3d\x31\x84\x79\x72\x39\x12\x39\x28\x2e\xd4\x34\xa1\x57\x9c\x97\x8c\x94\x83\x3b\x9c\xcd\xbd\xa5\x09\x70\x1d\x18\x3b\xc8\x5c\xa1\x5a\x1b\x8b\xbe\xab\x98\xf0\x93\x62\x9c\xb3\x4e\x34\xb5\x17\xee\x83\xcd\x7f\x47\x1b\x1f\xf2\x30\xb4\x82\x3a\x1f\xde\xeb\x7c\x70\x3c\x84\x1c\x05\x6b\xe9\x47\x48\x8f\x06\x25\x94\xde\x94\xa8\xfe\x00\xce\x08\x2e\xed\x3d\xda\x11\x5c\x0c\x6e\x95\xc0\xc3\xab\xb7\xae\x17\xe1\x52\xd4\x24\x33\x2d\xb1\xfd\x98\xae\x0d\x91\x01\x90\x0c\x4d\x06\x15\xd5\xe6\x34\xe0\x5f\xb7\x5d\x93\x0b\x1b\x5a\x75\x3c\x43\x57\xec\x8f\xca\x1b\x7a\xa2\xf5\x94\x94\xc6\x21\x84\xd6\xd6\x5b\x99\x76\xdb\x4e\x5c\x60\x95\xa3\x02\xcd\x15\x51\x5e\x30\xa8\x94\x3a\x52\xcc\x9c\x98\x92\x82\x20\xd3\x9f\xc1\xe2\xf6\xe6\x2f\x14\x3b\xe2\x50\xb0\x51\x85\x21\x32\x39\x2a\x1b\x6b\x34\xf0\x78\x80\xea\x64\xa5\x37\xbd\x47\xe6\x07\xe5\x8a\x36\x56\xbf\xc5\x12\x21\x73\x37\x4d\x6e\xfa\xc1\x8e\x99\x68\xb7\x62\x3e\x0f\xeb\x99\x2e\x9c\xa6\xa4\x2b\xec\x99\xa6\x93\x3a\x1a\x4b\x64\x72\xa0\xd9\x58\xd9\xd2\x7a\xb0\x75\x40\xab\x4e\x76\xef\x17\x17\x6e\x8c\x72\xa4\x41\x28\xa7\xae\x4c\xe6\x64\x02\x54\x45\x72\xd8\xc8\x00\x58\xbd\x3a\xf9\xe3\xac\x7c\x65\x1d\x3d\x90\xf7\x24\x1f\xbd\xf5\x1c\x5a\x3f\xde\xb8\xb1\x99\x58\xb1\x43\x5e\x12\x48\xb3\xfc\xc9\x0a\x43\x5c\x0e\x12\xe4\x97\xe0\x73\xd5\xe7\xaf\x8e\xbc\x36\xe3\x0c\x31\xe5\x8b\xad\x83\x43\xfe\x78\xc2\x9c\x93\xb7\x43\xb1\xaa\xf2\x6f\x86\x0f\xf8\x16\x66\x41\x7e\xd5\x38\x39\x6a\xbc\x95\x4c\x1d\x95\x5a\xe2\xd4\x39\x48\xfe\xed\x2c\x4d\x34\x1a\xde\xb0\xc4\x57\xa0\x56\xf6\xca\x01\x8c\x88\xa6\x72\xdf\x59\xf1\x19\x3a\xe5\x3f\x62\xa8\x92\x83\xb6\x04\xd1\x55\x5d\x81\x8f\xba\x48\xe2\x00\x7f\x3c\xbb\xc2\xff\xcc\x2c\x77\x0e\x57\xbf\x01\x7b\xc5\xf7\x57\x09\x22\x2e\xae\x04\x96\x91\x82\x5a\x40\xfb\x66\x59\x43\x92\xf8\x93\x1a\xa9\xc1\x1c\xac\x22\x60\x20\x80\x52\xe3\x05\x50\xdd\x95\x2c\xb0\xbe\xc2\xf7\x54\xef\x43\xac\x35\x4a\xbe\x42\x3c\x61\x0c\x1b\x46\x5b\x9f\x97\x91\x50\x26\xc9\x54\x2a\xcf\xe1\xe5\xc5\xf9\xbd\x86\x08\xf7\xb7\x33\xe9\xdf\x13\x3b\x7e\xb6\x82\x63\x21\xf1\xf0\x1f\x3e\xd0\x40\x0c\x4d\x32\x39\x71\x58\x64\x98\x1c\x98\x30\x45\x1d\xd6\xca\x03\x0b\xda\xcf\xe0\x7d\x4b\x2a\x22\xd3\x60\x1f\x82\x55\x56\xaa\xa7\x11\x06\xd7\x2e\x92\xe1\xea\x22\x28\x1a\xd3\x88\x17\x6a\xe3\x2a\x3d\xe2\x5b\xac\x91\xe9\xc8\xeb\x3a\x69\x85\x86\xf7\xc9\xbe\x71\x47\x9c\xba\x7b\x94\x63\x9a\xac\xe1\x48\x54\x13\x10\x1b\x89\xd3\x39\x87\x8e\x54\x6b\x24\x05\x3a\x83\x02\x02\xfb\xf9\x09\xe8\xc9\xb9\xa2\x87\x85\xa0\x18\xa9\xc8\xc6\x0b\x50\x0a\xed\x72\xbd\x90\x7e\x6e\xe1\x97\x0d\x1a\xf9\xe2\xab\x13\x47\x7e\x62\xd8\xbc\xca\xd4\xe1\xbf\x19\x47\x56\x92\x0b\x40\x9c\x9c\x9f\x81\x80\x1f\xe3\x7c\x27\x09\x62\x3a\x63\x7e\x03\xca\x82\xb6\x95\x60\xde\x43\xf0\xf1\xc8\xaf\x70\x2a\x9e\xfb\x00\x1d\x35\x8c\x40\x6c\x60\x21\x9c\xc0\xdf\xd8\xed\x0e\xa1\xab\x87\xc1\x04\x07\xd1\x53\x0a\x73\x9e\x81\x0d\x31\xc7\xbc\x17\x72\x26\x51\xdf\xd8\x1d\xd8\xd8\x8a\xac\xc2\x9e\xc8\x85\x68\x1c\xda\xef\xfa\x7a\x2b\x07\x0b\x4a\x92\xce\xe9\x14\xd1\xe7\xf8\x1a\x17\x9e\x36\x20\x90\x21\xab\x56\x7d\x05\xee\x69\xfd\x0f\x56\xa7\x99\x81\x16\x7c\xd8\x07\xa1\x27\x60\x54\x88\xbd\x03\x3c\xfc\x1d\x37\x66\x9d\x8c\x05\xdb\x3b\xba\x13\x0c\x7f\xa3\xb5\xc4\x68\x4c\x76\x03\xea\x90\x1f\xc8\xed\xd6\x72\x81\xc3\xa7\xfd\x54\xf6\xfa\xd1\xeb\xf4\x18\x33\x89\x6a\x66\x47\x51\x46\xef\xac\x0e\xfe\xb3\xd5\xe8\x07\x55\x82\xbc\x68\x8d\xba\xf0\xe9\x55\x8f\xdd\xe7\x06\xec\x71\x38\x90\x93\xec\x9c\x6d\x4c\x13\x05\x72\x8b\x12\x5e\x3e\xd8\xe0\xa7\xf7\xca\xac\x88\x7c\x95\x26\x7a\x18\x3b\x31\x6c\xd8\xd4\x88\x82\x65\x54\x57\x0e\x9e\x8a\x4f\x1b\x0f\x1f\xb6\xf9\x05\x42\x93\xc6\x05\x45\xe7\x8a\x77\x36\x6b\x8f\x0a\xc8\x5c\x31\x21\x49\x6b\xbf\xfa\x4f\xb8\xb6\x88\x3e\x3a\x4b\x62\x6b\x79\x8d\x29\xee\xef\xe0\x1d\x5b\x3e\x64\x3d\xd5\x9b\xf8\x2c\xe7\x47\x89\x35\x9d\x19\x68\x88\x51\xfa\xd3\x37\x89\xd2\x8f\xac\x33\xa8\xf6\xbb\x26\x4b\x98\x53\xb5\x67\xa0\x6f\x1a\x04\xb5\x42\x66\x01\x46\x24\xac\xea\x8e\xed\x9d\xe3\xc8\x5d\x3e\xbb\xde\x61\x3b\x59\x2e\xd9\xbc\xc4\xd9\x05\x14\x16\x9f\x2e\xb7\x00\x22\xd3\x45\x1a\xe2\xa3\x86\xf5\xb2\xc4\x5c\x86\xae\xf6\x06\xa6\xd8\x8c\x3e\x40\x33\xc9\x13\x48\x63\x07\xb3\xac\x4f\xf0\x29\x86\x9c\xe6\xf0\x1e\xd3\x27\x22\xdb\x28\x5e\xd6\xb6\x7e\x81\x0d\xad\x2b\x88\xe6\x67\xad\xc1\x90\x0c\x55\x15\xe6\xb4\x2f\xe2\x31\x5c\xc2\xf5\x44\x97\xf7\x15\x3d\x97\x47\xeb\x0c\x86\x64\xd6\x00\xed\x5a\xc8\x2f\x07\xad\xf5\x5c\x38\x74\xa8\x06\xcb\x3f\x38\x52\xa9\x32\xe3\x75\x54\xa1\xc8\x64\xd9\x86\xe9\x5a\x6c\x3c\xd9\x01\xbb\xb3\x71\xc2\x80\x6a\x3e\xbb\xaf\x13\x47\x05\xea\x4c\xf9\x0a\x21\x83\xe0\x3a\x79\x87\x82\x17\x83\x95\x57\x04\xdb\x84\xd2\x41\x22\x2e\x46\x1a\xa8\x42\x7c\x9b\x33\x68\x0d\x12\xec\xd3\x30\xbd\xef\xd3\x2f\x29\x69\x11\xa9\x3a\xe5\x3c\x0f\x5a\x6f\x8a\xf0\x51\x15\x12\x90\xf6\x15\x97\xf9\xfa\x8c\xce\x0e\x98\x29\x05\x65\x69\x0d\x74\x8d\xe2\x6d\x52\x2b\x76\x14\xb4\xa8\x87\x96\x84\x85\xfa\xd7\x1c\x4e\xb2\xf9\xbc\xcd\x8d\x48\x9a\xfc\xf2\xc4\x10\xf7\x3b\xb0\x7a\x0b\x9f\xd1\x02\x78\x24\x3a\x7f\xb2\x2b\x2a\xb9\x70\x98\x21\xab\x8b\x4a\xe5\xa7\x79\x34\xed\xc8\x7a\x3f\x8c\xf9\xb2\x91\x12\x36\x6f\x1b\xf2\xe6\x09\x72\x3b\xde\x63\x43\x56\x5d\x18\x3a\xa1\x81\xfa\xb2\x4a\xf3\xf9\x32\x63\x2e\x8c\x7a\x79\x69\xc7\x1e\x04\xeb\xb4\x55\x9f\x82\x80\xf7\x2f\x6d\xde\x56\x89\x8a\xd7\x0f\x28\x5a\x97\x5a\x7b\xbb\xa9\xaa\xb4\xb1\x7a\x66\x40\x94\x5f\x73\x00\xfc\xf7\x5c\x31\x73\x43\x12\xa2\xc5\xfe\xba\x1e\x21\x92\x61\x02\xa3\x81\xbe\x81\xee\x5d\xbd\x0d\xfb\x38\xfc\x9c\x1d\xcc\xbb\x9f\x5e\x7a\x5e\x0a\x78\x8e\x4b\x1c\x23\x5d\x30\x7a\xca\xa0\x69\x06\xc6\x83\x86\x3e\x51\x68\x6f\x4a\xa6\x15\x6c\xc2\xd1\xca\x2b\xb2\x0b\xa9\xd2\x3d\x00\x4d\xdf\x1f\xf1\x4a\x9c\x5c\x53\x8d\x2e\xe1\xa4\xbe\xa6\x61\x33\xb8\x21\x62\x32\x01\xb5\x49\xa4\x32\x7a\x16\xac\x91\x89\xd4\x2f\x09\x66\x0e\xb0\x2a\x74\x78\x74\x42\x26\x55\xbd\x8d\xac\x86\x99\x3a\x2b\x72\x4c\x2e\x8c\x50\x0c\x2a\xe7\x0e\x84\x3a\xe6\x2d\x9c\xc3\xaa\x79\x63\x7a\x5c\x59\xd9\xcd\x61\x88\x18\xec\x18\xd1\xb5\x57\x06\x55\x3f\xd4\xd7\xdd\xd2\xf0\x95\x1c\x9a\x63\x93\x46\x32\x58\x2e\x36\x2f\xcd\x8e\xd6\x3a\xd4\x73\x5b\xb5\x3e\x12\x44\xd4\xb9\x7e\x38\x36\x66\xf6\xea\xfe\x1c\x06\xc9\x47\x87\x31\x01\x78\x92\x34\x94\x25\xbe\x32\xc4\x1b\x95\x06\x0b\x9b\x02\x1d\xe8\xb7\x8f\x7e\xbc\xad\xb8\x41\x3a\x87\x16\xf3\x9b\xe3\x3a\x96\x8f\xe0\xea\x89\x47\x80\x54\xc5\xa5\x85\x01\xa4\xdf\xdf\xe6\x7c\xad\xd0\x41\xd7\x42\x63\x23\x0a\x48\xc6\x17\xe9\xdb\xa7\xf7\x18\xca\x21\x02\x8f\xa9\xd0\x03\xb2\xed\x62\xd5\x4b\x6a\xff\xa2\xcb\xe5\xfa\x0e\xc8\x5f\xc4\x94\x6b\x6c\xc0\xc9\x6d\xed\x00\xa4\x60\xa0\x0d\x88\x38\x55\xfd\x29\xca\x99\x6e\xd5\x23\x06\xbd\x29\x79\xd5\x9a\xc3\x94\xb7\x42\xc1\x2e\xb9\x66\x54\xeb\xbd\x5b\x81\xc4\x93\xce\x0b\x32\x52\x8f\x1c\x0e\xd4\x52\x34\xd2\xc0\x77\x34\x15\x4d\xa8\x9b\x56\xd8\x83\x5a\x0c\xe5\xab\xdb\x57\xcc\x5d\xdc\x9a\x51\x87\xd5\x74\x72\xa4\xe8\xc1\x89\x48\xdd\x3e\xd9\x6e\x5d\x1d\xe1\xeb\x31\x5b\x1f\x07\x20\x08\xc7\xe0\x72\xa1\x74\xbf\x6a\x2a\xe1\x10\xee\xdd\x4d\xbd\xff\xd0\x8a\x92\xd1\xfa\x2d\xb6\x3a\x59\x70\xf5\x2c\xce\x44\x3f\xa9\x31\x1e\x9d\x4b\xa6\xcd\x89\x38\x41\xa4\xa6\x29\xd3\x18\x23\x62\xd8\xcc\x68\x49\x98\x93\x42\x25\x8a\xf1\x01\x6b\x3a\x36\x8d\x3e\xf3\x43\xb0\x30\xfd\xe3\xd0\x8d\x91\xbb\xc7\xd1\xd0\xa1\xb7\x9c\x3e\x56\x2c\x85\x19\x95\x5f\x7a\x00\xb9\xbf\x5b\xa4\x3d\x23\x00\xfa\x4a\xcc\xaa\xb5\x1f\xf3\xfe\x7d\x5a\xb0\x6b\xec\x88\x05\xfb\xd1\x51\x82\xa5\x70\x56\xed\x93\x77\x01\xc9\xd9\xf7\xd5\x54\xb8\xfd\x2b\x4e\x8f\xa4\x40\x02\x38\x4d\x63\x39\x9c\x75\x00\x1c\xe5\x90\x83\xf4\x30\xe5\x22\xc3\x38\xf4\x9c\x90\x4e\x97\x5d\x72\xd4\xcb\xc8\x7f\x86\xa4\xca\x6a\xc2\xbb\x99\xcf\x25\x6c\x6f\x35\x2c\x56\xdb\xc9\x18\x28\xb8\x17\x16\x56\xb1\x0e\xb3\xa4\xa1\xa7\xa5\x3e\xb8\x7a\xea\x3b\xe8\x63\x12\x9e\x03\xa2\xd4\x56\x51\x28\x10\x2a\xeb\xd5\x87\x12\xce\xd8\x83\x3e\xc8\x94\xa4\xae\x8b\x2b\x42\xab\xda\x87\x90\x1d\x35\x38\x48\xe8\x0b\xd5\xa7\xa1\x18\x4a\xf9\xeb\x58\xe1\xdf\xf0\xd9\xce\xec\x0b\xb0\xa3\x6b\xca\x59\x4c\xa8\xff\xd5\x4e\x46\xb0\x2d\x4d\xea\xe7\x41\xb0\x81\x23\x40\x15\x89\xf2\xc4\xb5\x76\x0e\x4f\x91\x8f\x64\xcd\xcb\xa4\x13\x16\x48\x9d\xe8\xe4\x2a\x1e\x7f\x30\x4d\x4e\x9d\x28\x8a\x73\xfe\xd2\x1a\x9d\x5d\xd0\xdd\x8f\xb1\xce\x75\xf2\xcc\xce\x74\xe0\xb8\x27\x18\x79\xeb\xf0\xe9\xf6\x0d\xe5\x4c\x92\xd6\xfb\x52\x70\xe9\x56\xd4\xe0\x56\x76\xcc\xac\xda\x54\x6b\xd1\xf8\x1a\x07\x51\x53\xb3\x12\x5c\x8c\x4c\x55\xa7\xe8\x6d\x15\xa8\x63\x3c\x20\x28\x12\x79\x69\xec\xa6\xd4\x21\x31\xe7\xba\x65\xbc\x08\x7d\xd4\x1a\x21\x00\x87\xbd\x63\x7d\x7d\x02\x14\x31\xc5\xbf\x78\xba\x09\xb3\xf1\x96\xc0\xdf\x6b\x96\x83\x86\xe5\xe4\x16\x4a\x71\x10\xfb\xcf\xea\xdc\x4c\x38\x91\x5b\x89\xdc\x0f\x4b\x72\xbc\x4b\xf9\x18\xa2\xb2\xea\x43\x01\x5f\xcb\xd3\xb3\xbf\xde\xc6\xe4\xe9\xc1\x2f\x63\xa2\x45\x99\x2a\x9e\xe0\x00\xe6\x72\x30\xe1\xd3\x7e\x31\xe0\xa4\xe2\x1d\x21\x4b\x75\x2e\xce\xf5\xc8\x3a\x0f\xca\xbe\xb2\xf2\xea\x03\xc5\x08\x0b\xe3\x87\x3f\xde\x21\x87\xfa\x13\xd9\x2a\xa6\xa3\xca\xfc\x1e\x95\x0e\x00\xd1\x67\x5e\xaf\xbd\xdd\x54\xaa\xf6\x06\x37\x4a\x53\x89\x37\x39\xc9\x26\x22\xd1\xbb\x9f\x0b\x8a\xc6\xb2\x5f\x99\x3d\x85\xaa\xd7\x06\x8e\xbb\xcf\xd9\x56\x6e\x10\x81\xd7\x8b\xd1\x45\xf7\x32\xb0\xab\x89\x82\xbb\x39\x06\x8f\xa7\x7a\x7c\x49\xa8\xec\xc9\x9b\x02\x88\x45\xbf\xe1\x63\x4f\x21\xd7\xc4\x2c\xd1\xed\xb9\xcf\xed\x0d\xd3\xda\x48\xf0\x00\x0f\x21\xfc\xb2\x2f\xcb\x87\x66\x2a\x8f\x1c\xa5\x37\xa5\x47\x82\xbb\xd0\x06\xc1\x2e\x9e\x88\x38\x0e\xe4\xec\xa7\x57\x64\x21\x43\xe5\x4f\xb4\x33\xe6\xdd\x06\x70\xa6\x58\xb5\xeb\xb9\x47\x62\xd4\x3c\xd7\xe2\x44\xff\x0e\x65\x60\x55\xb1\xbb\xe8\xcf\x64\x58\x91\x1e\x36\x85\xed\x2c\x12\x90\x55\x57\xc1\x9c\x29\x39\xaf\x09\xb3\x2a\x63\x5a\xfc\xc8\x91\x27\xa9\x7c\xec\xfe\x6b\x26\x41\xd5\x8d\xd9\x76\xa8\xef\xba\xb8\x79\xa8\xc5\x27\x04\x87\x09\xfd\x29\xa0\x65\x3b\x47\xfc\x52\x22\x31\x08\x51\x72\x6a\xd0\x48\x3c\xae\xc4\xa9\x90\x79\x72\x8a\xc2\x20\x09\x1e\xb3\x69\x3b\xdb\x57\x4c\x1e\x25\x23\x32\x9b\x61\x89\x65\x25\x0b\xef\xa2\x02\xa6\x3c\x26\xee\xdf\xce\xd1\x23\x03\xb6\x1c\x57\x5e\x07\xb2\x62\x50\x48\x69\x2e\x74\xac\xb6\x2b\x7e\xbf\x7a\x50\xd2\x07\x09\xe0\x13\x3b\x15\xb7\x9a\x51\xd9\x45\x5f\x7f\x41\x0d\x05\x06\x01\xb7\x01\x5a\xbc\x19\xa6\x6c\x0e\xde\x64\xd1\x4f\x12\x64\x26\x4b\xbc\x27\x9d\x44\x3b\x84\x4b\x83\x73\xe1\xc5\xb7\xbc\x50\x6e\xd2\xb9\x31\x9c\x4d\x15\xa8\x20\x26\xef\xfe\xb9\x31\xbd\x53\x1b\x8d\x91\x71\xfd\x9e\x29\x00\x31\x94\xec\xdc\x89\x01\x86\x85\xa3\x98\x0a\xa3\xe0\x13\x6c\x8e\xee\x38\x61\xed\x28\xbd\x68\x08\xee\x33\x84\x64\xa3\x9e\x5f\x7d\x5b\xe3\x6b\xbc\xa9\xf4\x55\x24\x15\xb6\x3c\xa1\x9f\xa8\x26\x14\x9f\x9c\x4c\xb8\x94\x1e\x31\x8f\x59\x09\xbb\x6b\x68\x5b\x0b\x81\x7d\x27\xfd\x92\x1f\xa1\xc1\xa0\x35\x88\xfb\x0c\xbd\x1f\xdd\x6d\xdd\xa1\x3e\x8d\x91\x1a\x6e\xe6\x3b\x55\x8f\x0a\xb1\xd3\xe8\xc1\xec\x22\x09\x6f\xc0\x80\x39\x24\xa2\x73\xaf\xd1\x5b\x8d\x9f\xb4\xca\x63\xfc\x70\xf2\x55\xd0\xaa\x3a\x4b\xea\xa0\x84\x86\x69\xe9\x1a\x2f\xd9\x2a\x56\x56\x7a\x34\x89\xb5\x12\x5b\x1f\x1d\x1f\xd6\x85\x85\xa0\x18\x10\x4f\x63\x67\x4e\x0b\xa0\x09\x48\xde\xd8\xd8\xfd\xe2\xe2\xb1\x98\xea\x02\x87\xd8\xe6\x1e\xf2\x63\x64\x13\xf8\xb2\xee\xae\xc4\x57\x7e\x21\x96\x57\x9a\x0f\xe2\x32\xfa\xad\x9d\xe9\x4d\x5c\x24\x72\x3e\xc2\xb5\xb0\xb7\x24\xa9\xfc\xc8\x09\x29\xc4\x6e\x64\xbd\x80\xbe\xb9\x58\x8d\x0d\xd1\x71\x6d\x7d\xce\x54\x01\x4f\x52\xdf\x8b\x79\x18\x85\x16\xc4\x4c\x8c\x86\x0d\x2f\x71\x28\x98\xee\x24\xf0\x02\x6f\xa6\x8a\xe3\x08\xa4\x2b\xae\x05\x86\xe4\x93\x73\xd9\xa9\x68\xd0\x7e\xc1\x20\x6b\x61\xb4\x47\x9a\xbd\xa9\xfd\xa6\xe9\xaa\x28\x91\x5e\xe4\x8a\x5d\x91\xdc\xc2\x22\xae\xc0\x4c\x83\x4d\xa9\x27\xef\xb0\x71\x02\x14\x67\xc2\x1e\x9e\xf1\xab\x4a\x2b\x75\x07\xda\xe8\x63\x4d\x3e\x7a\x6b\x8f\x23\x6e\x30\x99\xce\xcd\xf2\xb9\x15\xa1\xe7\x5a\x96\x02\x98\xcd\xa7\x50\x8f\x61\x3d\x72\x35\x59\x4d\x91\x5f\xda\x7d\x54\x96\x13\x10\x16\xd2\xd7\x97\x31\x35\xd4\x9a\x8c\x33\xc7\x75\x54\x5c\x1e\x03\xf9\x75\x33\xed\xe6\x7c\x5d\xa0\xde\x29\x12\xe8\x17\xa2\x97\xe4\x02\x24\xe2\x65\x6d\xf3\x5a\x96\xc2\x22\x11\x62\xa1\x95\x64\x0d\x14\x9f\xa3\xb7\x7b\x66\x17\xd7\xa9\x63\xf0\xa0\x4a\x31\xe0\x92\x7e\xd5\x7c\x0e\x3d\x72\xe6\xad\x69\x1b\x0b\xdf\x65\x1f\xca\x73\xaa\xcd\xd8\xb0\x77\x5b\x27\xce\xb5\x75\xcf\x55\x37\x39\x07\x81\x1a\x65\xff\xec\xa0\xbd\x5c\x5c\xf4\xa6\x6f\xb1\x81\x40\xcc\x40\x5c\xb2\x42\x47\x62\x51\xcd\x10\xc5\xc4\xb5\xd0\xa4\x3c\x95\x9f\x82\x23\xf3\xfc\x2a\x5c\x5d\x11\xa9\x89\x9a\x08\xc3\xf8\x20\x62\xa9\x9c\x2e\x5c\x6c\x6d\x66\xea\x0e\x49\xf5\x8a\xee\x6d\x8d\x92\x0d\xb1\xab\x14\xf3\x52\x97\x3c\x06\x4c\xdd\x43\xbe\x52\xce\x95\xcd\x1c\xb2\xd4\x15\x7d\x86\x7a\x3d\x43\xa7\xda\x97\xf6\x05\x1b\x74\x74\x29\xc1\xe2\xc1\x01\x57\xfc\xc2\xbd\xf6\x19\x94\x14\x79\xb3\x78\xd4\x25\xa5\xde\x00\x07\x64\x30\x7d\x76\x88\xc1\x6c\x85\x74\x80\xed\xcf\x84\x87\xcd\x0e\xe6\x49\x9e\x50\x80\xeb\xb2\xc1\x06\x62\x47\x35\xb2\x59\x59\x43\x7b\xf6\x8d\xf6\xea\xe9\x7a\x52\xa6\x2e\xa3\x4b\x32\xc4\x3a\x16\x50\xd2\xd8\xf2\x9c\x6e\x8a\xdf\x01\x9a\x37\xd1\x20\x06\xab\x63\x34\x87\x36\xff\xd0\x33\x35\xd4\xa7\xfa\x0a\x8c\x80\x73\xe0\x19\xb6\x20\xb3\x34\x48\xd3\xc7\xf8\x66\x82\x61\x26\x87\xc1\x93\x2c\xdf\x61\xb7\x4b\x70\x4d\x46\x8d\x8a\xc0\xe8\x71\x16\xed\x6d\xc3\xa1\x40\x76\x39\xfe\x50\x7a\xe5\x6a\xf8\xcc\xf3\x56\x76\xe0\xa6\xd1\x07\xf2\x69\x17\xcf\x98\x69\xc1\xac\x45\xd3\x0f\xf2\x61\x8e\x91\xfd\xd6\x70\x79\x64\xd1\xf3\x3f\x9c\x7f\x67\x7f\xfb\x14\x5d\x53\x4d\x87\xb5\x80\xc2\x92\x49\x6d\x0d\x95\xe7\xcd\xa5\x17\x55\x6f\xc5\x76\x35\x77\xff\x85\x9f\x88\x9a\xed\x26\x0e\xfc\xc3\xc4\xde\x5a\x38\x12\x4e\x75\x3a\x78\xed\x9e\x20\x1f\x98\x4f\xf7\xde\x1a\x37\x21\xe7\x68\x9c\x8c\x99\xae\x6f\x51\xef\xe8\x43\x7f\xad\x26\x07\xc6\xa2\x67\x6f\xb5\x3b\x1b\xc7\x7d\x9a\x95\xc7\xf0\x5f\x07\xdb\x24\xee\x76\x52\x66\xe2\x6c\x12\x71\x6a\x3e\xcb\x57\x05\x64\x08\xf6\x64\xad\xb7\x60\xee\x9b\xea\x54\x44\xff\x8a\x22\xe1\x6f\x47\x91\x55\x71\xee\xe3\xeb\xba\x01\xd5\x71\x85\xdc\xc4\x02\x28\xc7\x26\xa3\xf8\xb5\x47\x97\xd2\x6d\x63\x36\x2c\x6a\x59\x4a\xb3\xf2\x01\x14\x79\xc8\xf9\xe7\xe0\x83\x69\x76\xea\x26\xee\x85\x92\xcd\xc9\xe7\xb2\xfd\xa4\x83\xf4\xce\x4d\x19\x36\x6e\x5a\x2a\x53\xee\xf0\x4b\x18\x67\xb5\xf9\x1e\xc8\xe5\x4e\x56\x1f\xac\x76\x02\x0b\x2e\x36\x89\xf3\xd8\x95\x87\x00\x24\x4d\xa7\xed\xc3\x13\x1e\x18\xaf\xa9\x53\x92\x4e\xd4\xa7\x81\x55\x83\x9e\xc7\x1d\xa0\x56\x8d\x0e\xf3\x46\xfa\xa6\xad\x2c\x5e\x87\xb2\xb7\xb4\x3e\x85\x46\xd1\x77\xa5\xde\xb1\x13\x5b\x47\xb4\x3c\x1f\x44\xba\xc7\xc8\x17\x8c\xfb\x11\x23\x74\x03\x7d\x1c\x7c\xc5\xcb\x37\xe0\x4c\x72\xb4\x61\x6e\xe8\x24\x25\x6e\xa4\x74\x03\xe7\xfb\xb1\xfd\x54\xa2\x2f\xab\x4e\x80\xcc\x20\xcb\x3b\x06\x95\xe5\xfe\x95\x86\xf2\x99\xe3\xb9\xc7\x17\x8f\xb6\x4e\x59\xb4\xff\x26\xe4\x68\xcf\xa7\xce\x59\x7f\x68\xfd\xe5\xc9\x68\x77\x4b\xa7\x03\x49\x18\x43\x2f\x81\xc3\xc3\xe0\x38\x14\xf4\x9a\x1e\x2c\x0e\xeb\xf0\x02\xff\x12\xa8\x9c\x34\xa3\x56\xac\x53\xae\x77\x45\xd3\xc1\xe0\xd1\x5a\x02\x43\x2b\xd5\xa9\xf4\x23\xbb\xa5\xc7\x90\x76\x7e\xa0\x69\x2e\xa1\xba\xeb\x94\x0d\xb1\xf7\xe6\x2d\x6f\x56\x6d\x25\x33\xff\x7c\xcd\x82\x7c\x25\x56\xd9\x15\x7d\x0f\x3a\x22\x6a\xad\x25\xb5\x7f\xf6\xf6\x24\xdf\xaa\x81\xae\x2b\xc5\xed\xb3\x49\x79\x44\x1e\xbd\x46\xe5\x20\xb6\xca\x1b\x06\x01\x8e\x20\xc6\x0b\x79\x4e\x69\x53\x0e\x88\xc3\x89\xd5\xf6\x3e\xdf\xa0\xda\x61\x0f\x6a\x18\xf0\xd3\x74\xd0\x9f\x3d\xa1\xc2\x99\x5e\x93\x12\xa3\x6c\x57\x72\x66\x4a\xa3\x41\xd8\x48\x3d\xd5\x37\x21\xe6\x6a\x9b\xf3\x7d\xa1\x2c\x32\x79\x55\xcb\xc0\x3b\x18\xe2\xb7\xd7\xb7\x5e\xe2\xe7\xb7\x48\x59\xc7\x6e\xd4\xd9\xb6\x55\x50\x06\x6d\x54\xbb\x57\x58\x02\x2c\xaa\x52\xe7\xa9\x4f\x23\x9e\x94\x43\xe2\x7e\x13\x49\xa1\x6d\xd0\x74\xaf\xc4\x9e\x93\x8b\xf3\x5e\x4e\x2f\x65\x9e\xf0\x74\x12\x7f\x5e\x1b\x8f\x9a\x58\x04\x2f\xfb\x8b\x0d\xf8\x33\x43\xd9\xe4\x9b\x01\x65\xb2\x10\xa6\xeb\x4d\x72\x09\xf8\x05\x3a\xa2\xda\x9b\x57\x2c\xb8\x90\xb8\xfa\x85\x03\x26\x5f\x2d\x94\x21\xb0\x59\xfb\x76\xb3\xfb\x8d\x00\xe2\xfe\xf5\x82\xe4\x82\x54\x0f\xcb\xb8\xc4\xc3\xcb\x82\xd4\x92\x9f\xf5\x05\x38\xad\x80\x3a\x1f\x62\xf3\x87\x36\x3f\x27\x5c\xea\x30\xc7\x93\xe4\x5c\x5d\xe3\x2f\xd5\x20\x79\x88\xf3\x56\x48\xcc\xb6\x5f\x6d\x26\x4c\x41\x42\x7c\x76\xab\x50\x01\xff\x26\x3b\x44\x5b\x67\x5c\xd3\x4c\xaf\xc0\x75\xdd\x6e\x6f\x2e\x6e\x91\xd8\x8d\x64\xbd\x3f\x16\xd1\x5c\x9c\xe7\x6d\x5f\xae\x28\x02\xa2\x3e\x24\x77\x9c\x2a\xc6\xc6\xdf\x5f\x88\xf9\x1d\x81\xb1\x7b\xee\x36\x9b\x0b\xe3\x05\x51\x8a\x90\xab\x8e\x76\xdc\x38\x8b\x01\x54\x95\xde\x9d\x5d\xb7\x0a\xd0\x00\xe2\x44\x4f\xb3\x24\x1a\x1e\xe6\x45\x6a\x08\x4a\x22\x71\x39\xdc\x4c\x3b\xb8\x8e\xae\x78\xe0\x58\xcd\xfb\xf2\xb0\x03\x92\x50\xd2\xa6\xbe\x1d\x35\x8b\x43\x81\x40\xe9\x3f\x4e\x39\x95\x1d\x4d\x15\x4c\xfd\x53\x49\xd9\x08\xf0\x00\x2b\xc1\x03\x82\x4a\x93\xaa\x2b\xc3\x1f\x83\x85\x05\x8a\x44\xf6\xe1\xc0\xe1\x4d\x71\x02\x6c\x8a\x31\x71\x41\x93\x8c\x95\x7d\x22\xda\x80\xe6\x3a\xb7\x1b\x6a\x5c\x4d\xeb\xa8\xce\xca\x6d\xf1\xc5\x81\x36\x12\x4b\x42\x5e\x68\x09\x95\xba\x6e\x0b\x95\xb3\x4f\x5c\x13\xad\x4c\xe0\x8b\xf9\x20\xff\x23\x50\x23\xfb\x40\x17\x6c\x0e\x3f\x18\x39\xb4\x17\xdf\xe5\x3d\x0d\x51\xbe\x9d\x14\x91\x8f\xcf\xe3\x55\x55\x49\xa2\x00\x56\x8a\x07\xaf\xe6\xf7\x7f\x98\x18\x08\xf9\x3e\x61\xc8\x11\x40\x80\x04\x5b\xa3\xa7\x23\x72\xcb\x5c\xab\x1d\xe3\xe3\x19\x00\x7e\x58\x8f\x9e\x94\xec\x43\xbe\x85\xab\x85\xd8\x5c\x72\x80\x08\x55\x65\x8d\xed\xab\x9f\x39\x81\x59\x7e\x7e\x25\x8c\x95\x8e\x81\x03\xff\x1d\x9b\xf9\x32\x2d\x20\x8c\x37\xb0\x36\x03\x8b\x4c\xad\x11\x05\x18\xf2\x98\xdc\xaf\xe8\x10\xb1\x88\x3e\x68\xe8\x9d\xf2\xbf\x65\x30\x35\xb8\x41\x99\x92\x6e\x1b\x35\x22\xda\x57\xbd\x94\x3a\x30\x92\x61\xbb\x9d\xa9\x7a\x66\x20\x02\x58\x32\x6b\x77\x0d\x24\x5d\xd5\x6e\x06\xf9\x55\x87\xc3\x93\x1e\x9e\xa9\x81\x20\x51\x21\xc2\xc3\x9c\x89\x78\xb2\x66\x9a\x3d\x9c\xca\xa3\x41\x21\x8b\xdd\x48\xa0\x77\x14\xc0\x19\x11\x9e\x7d\xeb\x3e\x73\x35\x3b\xd8\xc2\x72\x03\x37\xf1\x73\xb2\xb3\xbc\xee\x2e\x1c\xa2\xa2\x05\xe6\x25\x94\x17\x9b\xa2\xe9\x64\x55\x60\x09\x2a\xba\x0f\x36\x94\x38\xf9\xdd\x51\x68\x4f\x2f\xb5\x32\x76\x78\x3f\x7a\xa8\x4e\xb1\xd7\xd4\x17\xc6\xec\x83\x83\x6d\x52\xcb\x1c\x23\x8a\xce\xcf\xe9\x6f\x86\x4e\xaf\x51\xff\x6c\xe3\xb9\x90\xa5\xaf\xb3\xbc\xf2\x77\x7c\xa8\x2a\x4b\x90\x3b\xe3\x53\x3c\xdb\x97\x47\x21\x81\x5d\xc5\x84\x80\x8a\x33\x7b\xe9\x08\x9c\x87\x12\x2d\x19\x23\x9f\x72\xcb\x3d\xe8\x95\x58\x8b\xe3\x5d\x94\x91\xc9\x59\xc9\xc7\x2c\x79\x7f\x42\x40\xb9\xe8\xb0\x3b\x58\xb6\x1a\x38\xa4\x57\x7b\x34\x70\xaf\x4e\x5e\x3e\x55\xdc\x81\xe0\x4e\x56\x02\x26\x65\xb6\x20\x91\xb8\x93\x7d\xeb\xb2\xb4\x12\xae\xb7\x41\xb6\x41\x2b\xc6\x50\xd6\x26\x71\xb3\xfa\x98\xe2\xf4\x13\x60\xa4\x56\x15\x43\xa6\x81\xe3\x96\x46\xde\xbe\xd9\xf1\x87\x79\x04\xe6\xb3\xab\xcb\xcf\xe2\xee\x67\x8e\xc2\xd3\xe4\x74\xd4\xb4\xfb\x9f\x0b\x33\xb1\x2f\x4e\x25\x53\x4f\xfc\xf7\x2b\x36\xab\x24\x28\x0c\xcf\x4d\x36\x18\x0f\xe3\xe5\x7d\xf4\xb1\x46\x40\x50\xfa\xb1\x06\x7b\xce\x83\x90\x7d\xf1\x86\x96\xcd\x95\x7a\x55\x00\x88\xf3\xc2\x4c\x11\xc9\xa2\xcd\x7e\x20\xb6\xc1\x8c\x1b\x33\x6b\x2a\x8a\xe3\x2c\xbc\xcb\xb9\x55\x47\xa1\x4f\x38\x6c\x38\xdb\xe4\x6c\xa9\xbd\x13\xc6\xe3\x87\xac\x15\x85\xea\xd5\xda\xe9\x2c\x43\x95\x90\x1c\xf4\x57\xe4\xe8\xe2\xd5\x9d\x9d\x1f\xb5\x29\xc3\x4b\x5f\x87\x93\x3d\xe3\xf2\xa5\x00\x54\x94\xdf\x95\xaf\xcf\xbe\xb2\xae\x51\x31\xb2\x7a\x54\xa4\xb5\xf6\x45\x35\x6b\x50\xaa\xe4\x40\xee\xae\xb4\x24\xbe\x20\x68\x30\x0c\x40\x2a\x65\x6a\x7d\xbf\x55\x68\x9f\x72\xdb\x4c\x8c\x25\x36\x8a\x63\x92\x95\xb6\x61\xca\x4e\x18\x4d\x80\xed\x3f\x77\x5c\x9c\x22\x18\x9d\x26\xdf\xc4\x51\x25\xe6\xa3\x8c\xc8\x74\x62\xad\xbc\x8c\x41\x06\x5a\x77\x64\x57\xbd\xde\x00\x57\xbc\xf9\x0d\xe0\x03\x2b\x15\xc0\xa2\x55\x95\xfc\xb2\x28\xb6\xb0\x26\x28\x61\xc6\x4c\xfa\x0a\x15\x7b\x21\x9c\x7b\x90\xb2\x92\xdd\x9b\xc0\xf6\xa8\x54\x3e\xce\xf2\xfb\x4f\x91\xf2\xcb\x4c\x76\x99\x4d\xf2\x01\xb6\xb0\x8e\xd2\xd6\xca\x79\xd9\x4a\xb5\xcf\x4e\x2e\xd4\x2b\xcd\xa6\xb9\x43\x9b\xcb\x38\x2a\x27\x0d\x2b\xb3\x84\x0f\x2d\x0c\x5d\xa7\xdc\x9c\xce\x4e\x09\x0c\xf2\xed\x37\x27\x8b\x46\x12\x4a\xef\xcf\xe8\xe0\xb9\xb5\xa3\x8a\x0d\x99\x80\x51\x55\xce\x40\xfd\x87\x7e\xa9\x9e\x19\x3c\xc4\xf2\xd5\xc0\xad\x11\x91\x92\xfc\xd3\xc6\x71\xcd\x92\xc8\x2b\x63\xca\x6c\x37\x18\xae\xb6\x40\x1f\xba\x5b\xbf\x66\x47\xe0\xe4\xbe\xa8\xb2\x1a\x66\xc6\xbf\x0e\x37\x5b\xef\x11\x39\x1a\x38\x19\x3a\xf0\x11\x56\x24\xfe\xc4\xce\x64\xe0\x17\xa9\x91\xcb\x5c\x7b\x05\x58\xf3\x01\x26\xa0\x45\x3e\x9f\x57\x66\x1b\x21\xd2\xea\xa0\xb2\xb6\x8d\xb3\xdb\x1f\x0b\x18\x3b\x26\x20\x9f\xdc\x09\x28\xb6\x3b\x45\x68\xe1\x9a\xca\xc7\xa3\x9c\xbe\x7c\x6b\xbb\xf8\xd6\xfd\x71\x1e\x91\x6a\x44\xa1\x04\xfe\xf6\xa6\x43\x65\x65\x4c\x63\x3e\x86\x18\x9c\xa1\xa8\x9f\xda\x49\xbc\xb1\xf9\xec\xc3\x16\x5c\xf6\x5b\x35\xef\xb5\xf1\xd4\x0e\x2e\xd5\x8f\x90\x40\x17\xb6\x2b\x89\xa5\xfe\xaf\x83\xfd\x0a\x84\xc5\x8e\x26\xa7\xdd\xe9\xf4\xbc\x03\xbb\x94\x7d\x26\x51\xf6\xb6\x8d\x1c\xc9\x2d\x7f\x51\xf4\x16\x34\xff\xad\x70\xf9\x38\xdd\x6a\x53\x66\x80\x2d\x53\xf7\x2d\x8a\xff\xcc\x75\xc5\xd7\x40\xb2\x3a\x57\xdf\xa4\x48\x7c\xe2\x04\xdb\x59\x51\x53\xb5\x07\xb3\x1a\x0a\xa2\x3f\x1d\x46\x4d\xe5\x2d\x9f\x30\xa3\xa9\x84\x1b\x5f\x0d\xe6\x69\x6b\xc5\xf9\x77\x9f\xcd\xe1\x74\xf2\xd6\x8f\xea\x27\xf7\x4b\x38\x6c\xe4\x36\xb5\x21\x2e\x1e\x28\x9f\xa9\x41\x15\x96\x4d\xb1\x11\x24\x46\xa8\x8a\xcc\xab\xb2\x78\x38\x43\x24\x51\x77\x50\x4c\x55\x40\x2b\xc1\xa8\x58\x27\x79\x8d\x2d\x71\x88\xfa\xcb\x55\x34\xcf\xca\xce\x29\x45\xda\xf0\x40\x5a\xa5\x46\x03\x9a\x97\x97\x9f\xe8\x35\x93\x1b\x1f\x51\x0b\xa3\xb0\xb6\xf1\x23\x2d\x56\x28\x95\x63\x4c\x75\x01\x08\x14\xea\x35\x7b\x52\xf2\xa7\xc8\xee\xd9\x28\x05\x93\x40\xf7\xa8\xda\xf3\x4d\xf1\x17\xbe\xe8\x53\x59\x48\x4c\xd0\x84\x90\x9a\x7f\x41\xf3\x0b\x54\xab\x01\x2a\x71\xae\xee\x18\x96\xa7\x14\x04\x42\x23\xcb\xe0\x40\x28\xc4\xed\xfd\x89\x9e\x14\x29\x53\xeb\x4b\xcf\xca\xa5\xb3\xab\x3e\x79\x15\x5e\x75\x59\xfc\x94\xab\x6d\xeb\xac\x2d\xd6\xc3\x3a\x21\x1f\xd0\x7b\x27\x41\x50\x13\x27\xb3\x23\x27\x4d\xb3\xa6\x7e\xc8\xdd\xb5\x8d\xc2\x92\x47\xfd\x54\xa5\x9f\x91\x41\xe8\x74\x19\x33\x34\x0c\xef\x95\x8b\x8d\x3c\xea\x5d\x87\x4c\x44\xf4\x7e\xf7\x77\x66\x52\x4b\xfd\x82\x11\xee\x99\x40\xc4\x8c\x7e\xca\xad\x80\x5d\x2c\x26\xfa\x4f\x70\x6f\x0d\xfc\x34\xd3\xef\xdd\x7b\xe4\xef\xf1\x03\xde\x63\x01\x25\x2a\x09\x6f\xc4\x47\x83\xbb\x30\xa0\xd5\x67\x47\xe0\xff\xe4\x90\x5d\x8b\x19\x2d\xca\x93\xff\xda\x2d\xa9\xbc\x66\xb3\xea\xa7\xbb\x44\xa2\x4b\xc6\x2b\x0b\xbc\x23\x59\x72\x88\x52\x5b\x52\xf8\xa5\x75\xac\xe8\x2f\xfc\x92\x1a\x96\x08\x0e\x8a\xdb\xf4\xb3\x77\x43\xdc\x9d\x44\x63\x09\x44\xfe\x79\x45\xfb\x8a\xbc\x2b\x01\x2a\xd0\x36\xe9\xcc\x35\x53\x19\xca\xeb\x00\x2c\xd6\x21\xcf\xdb\x44\x1c\x51\x1f\xa6\xfa\x91\xfb\xd5\x7f\x66\x9e\xb8\x56\x55\xc7\xc7\x99\x53\xad\xd1\xf5\x56\x7d\x96\xcb\x59\xa8\x9d\xf7\xcf\xd5\x1a\xfd\x92\x9b\x80\x9b\x43\x94\x62\x80\xe8\x31\xbe\x53\x79\x73\x84\x38\x42\x76\x82\x86\x49\x0d\xa1\xd8\x3e\xd2\xb0\xc8\x29\xf3\x26\x61\x5a\x39\xd9\xac\x78\xc3\xeb\xef\x2f\x06\x32\x8c\x2e\xdb\xaf\x6e\xb5\x69\x48\xd9\x46\xe9\x8c\x18\xbb\x9c\x2c\x30\x99\xac\xa0\x43\x41\xec\x85\x96\x2c\x06\x93\xf0\x38\x49\x40\x8b\x1a\xe4\xf3\xa8\xd2\x04\xb7\x8e\xd3\x2e\xcb\xbc\xff\x09\x29\xb3\x03\x6a\xa4\x94\xa4\x3f\x83\x56\xa6\xf4\x37\xfa\xe4\xf4\xb2\x29\x6c\xa0\x07\xca\x36\x4d\xd5\xc3\x46\x87\xc2\x3f\x65\x63\x10\xc5\xbb\x72\x3b\x8b\xe1\x86\x4a\x33\xea\x5a\x5a\x5b\x09\x0f\x0a\xb3\x80\x3c\xde\xef\x6b\x73\x75\x0c\x3a\xfc\xb4\x17\x1c\x0d\x26\xe2\x26\xcb\x35\xc4\xf5\x87\xaf\xb6\xd1\x0e\xe4\x66\x51\x9c\x44\xb6\x7a\x4e\x9c\x01\x2f\x07\x9d\x02\x9c\xce\xef\xcc\xdd\xa8\xbd\x85\xd0\x46\xfa\x65\x75\x96\xca\x08\xa6\x2b\x3a\x5a\xb8\x00\x71\x9a\xbe\x76\x19\xa5\x98\x85\xd8\x72\xbd\xdb\xf7\xf6\x71\xd6\x7b\xa9\x08\x85\x9e\x92\xaa\xd9\x07\x68\x58\xe2\x51\xd3\xd2\xe5\x82\xdf\x63\xee\x24\xac\x69\x2d\x5e\x65\x91\x10\x1e\x49\x80\x91\x9b\x89\xe8\x99\x8a\x45\x44\x2c\x55\x6f\x09\x18\x80\x36\x87\xad\xb0\xed\xdd\xc5\xb9\x8c\xfc\x45\x54\x23\xbe\xb9\x6d\x9a\x24\x0c\x77\xf7\x1d\x7e\xed\x8a\xda\xdf\xd2\x85\xe6\x1d\xa4\x6d\xdc\x2e\x0d\x14\xd1\x0e\xf8\x0a\x33\x65\x8f\x6f\xda\xfd\xd6\x97\xa5\xdf\x23\x05\x77\xb9\x80\xbb\x94\x8b\x14\xc8\xb7\x23\x2c\xf6\xe1\x08\x1e\x4e\x70\x7b\x4c\xaa\x7f\xf0\x05\x26\x40\xad\xd5\x24\xcd\x1e\x9e\x64\xa8\x95\x50\x31\x05\x63\x0e\x98\xe4\x1a\x50\xd8\xa7\x42\xc0\x00\x62\x2d\x7d\x84\xf9\x2c\x20\x07\xb4\xdb\xe5\x13\xb2\xcf\x99\xda\x23\x8b\xd8\xb2\x17\xbc\x5b\x3a\x1c\xed\x2b\x5c\x0b\x5f\xa8\x9a\x67\xe4\x10\x95\x17\xef\xe2\x9d\x7d\xa2\x7e\x1a\xf5\x19\x17\xe6\xe8\x3e\x97\xf9\x99\x03\x7b\x8c\xc5\x2b\x20\x62\x95\x68\x36\x11\xe1\x93\xcb\x9a\xb8\xd0\xca\x47\xf7\xc4\x6b\xe9\x4f\x79\xb3\x93\x08\x4c\x23\xa3\x64\x19\xba\xec\x33\x04\xcd\x8c\x10\xf6\x60\x49\x8d\xc2\x64\x95\x65\x35\xb6\x75\x7d\x4a\xdb\xc0\x91\x33\x9f\x0a\x34\x25\x22\x9c\x35\x9d\xd2\x7e\x33\x35\x9f\x37\xc2\x8a\xd9\xf1\x45\x5e\x25\xd5\xb3\xfc\x78\xcd\x81\x79\x4e\x63\x74\xd2\x36\xa4\xe2\xd5\x26\x36\xd8\x26\x6e\x8c\xac\xa3\x2c\x99\x6d\xce\x89\x64\x84\xa9\x76\x27\xb7\xe3\x14\x47\x12\x4d\x9c\x43\xee\xa9\x9b\xd4\x6b\x63\xbb\x24\x0a\x11\xf6\x65\x34\xaf\x0a\x3b\x3b\x67\x24\x42\x4a\x4d\x29\x64\xe7\xd0\x39\xc6\xf4\x03\xaa\x88\x04\x90\x17\x6e\x6b\x78\xaa\x34\x7a\xa0\x15\x9e\x20\x40\xf1\x13\x9c\x7a\x1f\x03\xef\x65\xf3\x76\x70\x58\xdb\x30\x04\xb3\x13\xaa\x0d\x5a\x14\x45\x89\x22\x0d\x1b\xbb\x7a\xd1\x72\xf9\xa0\xe1\x4a\x2d\x2a\x22\xa2\xbe\x31\xc4\x84\xc3\x9f\xcb\x84\xd1\xc4\x8a\x37\x53\x8e\x69\xfa\x20\xae\x32\x63\xeb\xa2\x0c\x23\x47\x9a\x12\xef\x81\xce\x54\x43\xa5\x74\xed\xde\x79\x73\xe0\x9e\x85\x19\x1f\xff\xe9\x40\xd5\x3c\x6c\x7f\xa7\x95\xa3\x4e\xde\x31\xb2\xf2\xf3\x4a\x4b\x85\x9f\x79\xe9\x38\x76\xd6\x4b\x49\x73\x93\x9b\x68\xfd\x1e\x6d\x4b\xae\xd7\xa4\xca\x57\x0c\x83\x7c\x6b\x54\x80\x5c\x47\x28\x06\x12\xab\xd4\x00\xda\xba\x96\x86\xda\xd6\x99\xc8\xc3\x3b\xc7\xb0\x7f\x4f\x70\xb4\x32\x7a\x8e\xaf\x48\xb1\xd0\x38\x75\xe8\xc2\xcc\x3a\x2b\xe2\x74\xb0\x79\x4c\x39\x52\x84\x86\x15\xf0\xc0\x53\xa5\xa4\x65\x40\x66\x9a\xd8\xd8\x76\x9a\xe5\x57\x3e\x3f\x3d\x6a\xad\xac\x88\x93\x86\x27\xa1\x10\x02\x21\x6b\x00\x5a\xdf\xbc\xc4\x34\xc6\xfa\xd2\xec\xc0\x56\x6d\x3c\x58\xde\xa7\x05\xa0\x0f\x0a\xbb\x47\x0a\xbb\x9b\x2c\x83\xdd\xc3\x72\x96\x67\x25\x1c\xd1\x2c\xc3\xe4\xc4\x07\xaf\x70\xcc\x5e\xa4\x33\x45\x00\x90\x7a\x6d\x13\x41\x06\x52\x9e\xe4\x9c\x10\x08\x1d\x7d\x49\x20\x8c\x8b\x1d\x78\xb1\x54\xda\xc6\x00\x51\xef\xc6\xbe\x7d\x2c\x89\x7a\x44\x90\x33\x39\x89\xa2\x79\x3e\xa4\x60\x33\x89\x71\x2c\x0a\x57\x6c\xad\x24\x15\x07\xb3\x0b\x50\x13\x74\xdf\x98\xc6\x1b\x7f\x03\x88\x4c\x86\xd0\xf0\x21\xee\x7f\x9a\x86\x92\xbb\xe9\x7f\xda\xd7\x14\x52\x97\x2f\x96\x82\x4d\xf9\x14\xa0\x46\x2a\x2a\xe6\x5d\xef\x3a\x40\x16\x8a\x93\x62\x82\x90\xd2\x2d\x85\xb9\xb0\x29\x73\x3e\x4c\xf9\x95\xc8\x9f\x26\xbf\xb6\x50\x3f\xe9\x0f\x7d\x53\xfb\x3f\xbf\x0d\xd1\x53\x60\x3f\xf8\x77\x40\x28\x8a\x3d\x0a\xb0\x2d\x70\xe2\x9c\x76\xf2\x80\x4a\xc5\x37\xbf\x41\x84\x94\x5c\x7a\x3c\xba\x18\xca\x13\xc0\x8e\xca\x43\xa1\xeb\xad\x5f\x95\x1d\xe5\x57\x8a\xa0\xbd\x2e\x3e\x18\x71\xce\x3e\xbf\x4c\x8b\x01\x2e\x2b\xe0\x75\x55\xe5\x40\xc3\x2b\xda\xc9\x75\xba\x33\x1d\xc1\x4b\xbb\x44\x2d\xae\x1d\xfa\xe1\x37\xe9\x2a\xb7\x37\xaf\xdf\x61\x0f\x6a\x85\xcc\x6f\xea\x9f\x21\xbf\x0e\x1b\xd3\x4e\x6a\x5f\x6e\xde\x75\x53\xa8\x40\x65\x38\x86\xdd\x82\xab\x38\xd1\x1b\xda\xec\xd1\xbf\x7a\x18\x15\x0c\x74\x8c\x4b\x7b\xb3\xf8\xc5\x0e\xf0\x83\x1a\xea\x59\x92\x1d\xfb\xd9\xf3\x6f\xa5\xf0\x48\xcb\x4e\xfd\xa4\xe8\x12\xe5\x3a\x20\x1f\x2a\xac\x12\xfe\x5f\xf8\x20\x94\x83\x16\x24\x32\x4e\x99\xc8\xac\x62\xef\x27\x07\xfa\x25\x96\xe5\x3c\x3f\xc1\xff\xaf\xfd\xb2\x2b\xa1\xc2\x0f\x43\x8c\x40\x2e\xe5\x90\x32\xea\xd9\x00\x42\x55\x6b\x86\x7d\xb1\x59\x88\xf8\x8a\xd3\x5d\x6d\x30\x9b\xf7\xc8\x13\xcf\x14\xaf\xf9\xe1\x5d\x05\xd6\x34\x40\x96\x25\x7a\xcb\x91\x76\xd2\xbb\x82\x3f\xca\xa4\x5d\x05\x8b\x51\x29\xb2\x9b\x04\x9e\x25\xda\x16\x4e\x2c\xa2\xa2\xbb\x0c\x9c\xd2\x60\x63\x87\x4a\xa6\x64\xe1\xdb\xab\x64\x22\x44\x73\x6c\xac\xd3\xa8\x40\x90\x66\xd5\xd4\x5c\x8d\xce\x59\x34\x6c\x55\x88\xec\x7f\x5b\xd8\x55\x65\x99\x5c\xab\x00\x63\x71\x41\x4d\xe9\x74\xf9\x2a\xcc\x5c\x60\x47\x64\xce\x2d\x5c\xf6\x95\x37\xad\x81\x60\xaa\xd7\x99\xbc\x56\x67\x16\xf0\x96\xc2\xda\xa2\xa5\x55\xec\x2a\x65\x8a\xa1\xb7\x46\x95\x54\xd5\xd7\xc1\x7f\x41\xe8\x0a\xdd\x1b\xf5\x77\x1c\x4a\xed\x28\xa5\xdd\x45\xde\x68\x7d\x81\xe8\x0a\x58\xb3\x1f\x4a\x31\xad\x63\xcc\xcf\x53\xb5\x1f\xe0\x4d\xd9\xc1\xb1\xa4\x2f\x70\xb0\x04\x17\x8d\xa9\xe2\xbe\xfe\x94\x23\x28\x24\x69\xe3\x7f\x3d\xd4\x1d\xd9\x27\x6a\xfd\x5f\x92\xaf\x62\xb7\x43\xfd\x03\x3c\x6b\x10\xc3\x26\xf8\x7a\x08\x1d\xe9\x4f\xfe\x2a\xb7\x8d\x43\x40\x16\x96\xba\x65\xf3\x86\xec\x1d\x94\x81\x1f\x94\xda\xc5\x2e\x27\x2f\x7e\x1a\x57\xd9\xd2\x89\x5e\xff\xeb\x6c\xd2\xb9\xd1\x68\xae\xb2\x29\x64\x69\xb3\xc6\xf1\x0e\xb6\x0f\xad\x8f\x0d\x66\x41\x4a\x1c\xe5\x49\x14\x2c\xdc\x10\xaa\xf5\x51\x55\xb9\xc0\xdb\x25\x4a\xbd\x67\xe7\x78\xed\xf6\xaa\x66\x0c\xbb\xfb\xe8\x2d\x99\x31\xb6\x89\x9e\x63\x12\x52\xf0\xde\x39\x4a\xf7\xea\x05\xb5\xd6\x9e\x6f\xca\xa1\x81\x27\x9e\x08\x2d\xa9\x0b\x7a\x3e\xb4\xb0\x6d\x5e\xad\x57\xe8\xac\x9c\x4a\x72\xe3\x7c\x70\xd5\x50\xe3\x10\xc4\x1c\xd0\xe0\x13\x5a\x1a\x7a\xcf\x15\xa1\xbf\x7c\x95\x01\xa5\x4a\xa0\x73\x6d\x08\x73\x34\xa9\x01\x9d\x88\x2a\x1d\x71\x10\x4e\xa6\xc8\x82\x92\x82\x43\xd8\x49\x53\xd8\x7d\x18\x24\x44\x01\x3e\xa3\xed\xc1\x16\xe8\x04\xf6\x07\xa7\x50\xa3\x22\xcb\x72\x2c\xeb\x94\x36\x38\x09\xb3\x61\x9b\xa3\x9b\xe6\x21\xb5\xc1\x87\x56\xb6\x16\x26\x29\x3a\xd9\xc6\x54\x54\x1f\x26\x57\xb5\x22\x2c\xbb\xba\x1c\x63\x84\xae\x97\xb6\xff\x1a\x8b\x76\x83\xe1\x3c\xb5\x33\x13\x3a\xbc\x87\x16\xc5\x92\xdf\x45\x70\x2f\xbf\xc3\xff\x24\xb7\x47\xae\xc8\xc5\x27\xa3\xe9\xc7\xa7\x1e\x6c\xa4\x0c\x6d\x92\x5f\xd9\x76\x95\x6f\x0f\x87\x09\xbe\xa3\x5d\x59\xff\xb0\x8b\xa1\x35\x6f\xc3\x3d\x41\x12\x1a\xf5\x6a\xb1\xac\xd0\x48\x48\xa6\xee\xe2\x72\x33\x49\xad\x8f\x5d\xbf\x95\xda\xdd\xd0\x23\x71\x47\xab\x37\x9e\x12\x9a\x9e\x64\x68\x97\x33\xf6\x1f\xa1\xde\x5c\xc3\xd3\xa9\xcf\x63\xf6\x2d\x3f\x55\x4e\xc1\x10\x59\xfb\x16\xce\xbd\x31\x58\xf6\xe0\x43\xf4\xe0\xa4\x4a\x97\xcb\xd5\xaa\x3a\xe7\x97\xae\x67\xf3\xe1\x8d\xc9\x7a\x12\x26\x57\x9b\xfa\xb4\x5b\x07\xda\xb9\xd6\xca\x9c\x0e\xe8\xf3\xf6\x3a\xdd\x2a\xff\x1b\x51\xc0\x11\xaf\xe7\x23\x92\xe4\xe0\xba\xa8\x2b\xe7\xf2\x7d\x67\x00\x8f\x3a\x91\x21\x46\x73\xed\x66\x8c\x63\x82\xf5\xbc\x74\x8a\x0f\xe8\x28\xf4\x90\x54\x0a\x3b\xca\xdb\x8d\x70\xc8\x6c\xa9\x70\x89\xa6\xe4\x92\xd2\xc7\x57\x62\x5c\xfb\xfb\x9c\xbc\xf6\x15\x23\xcc\xe0\x5b\xa7\x3f\x03\xc7\x69\x86\xbd\x71\x2f\xc7\x0b\x7d\x1b\x39\xa8\x04\x91\x59\x65\x49\xda\x2a\x32\x56\x89\xe7\x03\xb2\x58\xba\xa0\xf4\x42\x41\xb8\xc7\xc9\x73\xd9\x65\x65\xc1\x79\x0b\x43\x2c\x42\xc2\x4f\xb3\xc0\xa6\x93\x89\x22\x4d\x67\xbf\xbb\x84\xf3\x34\xcb\x3e\xd0\x6c\x19\xf0\xd3\x32\xba\x6f\x96\xe8\x5b\xa6\x2d\xf0\xec\x23\x92\x6b\xe8\xea\x88\x4e\x7c\x51\x9f\xca\x48\x58\x85\x4c\x31\x8a\xa4\x74\xad\x00\x80\x5c\x08\x61\xa7\x09\xad\x54\x3a\xb5\x79\x1e\xd1\xf0\xa8\x0e\xbe\x03\x86\xb2\xbe\xd0\xd0\x75\x7f\x50\x5d\x23\x74\xe8\x7d\xd6\xed\x3d\xae\x74\xf7\xa1\x39\xe1\x7c\x52\xb5\x53\xb5\x5b\x48\xbc\x6c\xd9\x25\x49\x5b\xba\xd0\xe5\xda\x60\x9a\x95\x48\xc2\x6a\x27\xd4\x12\x15\x45\xfa\x77\x86\x8f\x4b\x5b\xd8\x46\x2b\x77\x03\x85\x0d\xf6\x9b\xf4\xfc\x62\x81\x1a\x44\x8e\x2b\x9e\x3e\xaa\xef\x4c\x2a\x1c\x68\x47\x41\xdb\x8b\x11\x39\x3d\x43\x1d\xc0\x61\x77\xc9\x14\x94\xe8\x02\xd9\x02\x61\xad\xdd\x5d\xc0\xe6\xc8\x30\x78\x46\x39\xf8\xb4\x97\x1f\xf1\x28\x6c\xda\x01\x19\x38\xff\x3a\xe5\x83\x8d\xe6\x09\xa6\xc1\xbb\x6b\xe5\x8f\x02\x07\x5d\x13\x69\xf0\xf2\x28\x6c\x66\x95\x50\x98\x02\x10\x0e\xe6\xca\x17\x63\x06\xc8\x02\x2d\x84\x37\xd2\x1c\x37\xfd\x9a\xa4\xbf\x10\xed\x8b\x7b\x70\x45\xcc\x62\x9a\x30\x8f\x01\x4c\x01\x67\xf4\x0e\x65\x2d\xb4\xb2\xe3\xb3\x57\xf2\xea\x6d\xc6\xe7\x90\xad\x19\xbd\x00\xda\x8e\x49\xaa\x54\xe8\x9e\x9b\xe9\x25\x20\x2e\xe1\xf9\xd8\xad\xcf\x13\x90\x14\x49\xe8\xb1\xa2\xde\x92\x39\x73\xa3\xf5\xe4\x53\x53\x83\x47\xa7\xcd\xb8\x48\xcc\x75\x73\xe7\x64\x96\x4b\x03\x40\x83\xed\x93\x49\x79\xeb\xe3\x88\x95\xa2\x3a\xbb\xa1\x7e\xd6\x31\x60\xb3\x1d\xe0\xb9\x9f\x4f\x44\x46\x47\xbc\x25\x20\x7e\x22\xf0\x8a\xfb\x98\xf6\x11\x07\x8b\xd5\x87\xd8\x9a\xd6\x89\x13\x5a\x55\x10\x17\xed\xb9\x95\xa3\x2f\xb3\x26\x44\x68\xc2\xd0\xa6\x62\x34\x64\xec\x20\x92\xee\xcc\x2e\xe4\xb2\x6b\xec\xf5\xa7\xfd\x73\x2b\xef\x5f\x94\x05\x94\x0f\xe9\x71\xb4\x2a\x0f\xb8\x54\x88\xf6\x2f\x02\x31\xc1\x24\x35\x8b\x6f\xd7\xde\x46\x69\x31\x7a\x2b\xc7\x00\xfc\x57\x5b\xf4\x07\x5a\x7d\x31\xfe\x4a\x32\x28\xc4\x8b\x18\x7e\xab\x5f\x7c\x2b\x60\xca\xcb\x67\x33\x9f\x0e\x72\xa5\x1e\xe7\x41\xe0\x42\xe2\xee\xd0\x90\x58\x4d\x63\x5e\x84\x26\xf1\x0b\xec\xdb\x34\xd2\x1a\x7d\x4e\x16\x0e\xe6\xbd\x71\x02\xb6\xbe\x28\x63\x98\xc9\xe7\x22\x4e\x61\x8c\xc4\x27\x81\x53\x75\x35\x18\xc4\x95\x7a\xbb\x9f\xb8\xc7\x4c\x60\x63\x61\x65\xb9\x96\x96\x2a\xaa\xe9\x28\xb7\xcf\xc9\x5e\x8d\xf8\xd7\xbe\xb3\x82\x8c\x43\x37\xbe\x0e\x07\xdb\x26\xa0\xf3\xd2\xa2\x73\x57\x66\x6b\x2c\x80\x90\x89\xcd\x97\x83\xa9\x40\x27\x71\xb9\x37\xc7\xf4\x34\x7c\xb0\xe5\xbc\x86\xe0\xb9\x29\xbd\x3b\xc2\x12\xcf\xa1\x86\x5d\x09\x5a\x5d\xda\x24\x66\x87\x8f\x1d\xa0\xaa\x60\xe0\x32\xb7\xab\x33\xca\x0b\x59\xd1\x12\xf4\x92\x15\xcd\xb1\xe6\x8a\xa5\xdb\x79\xa9\xb0\x2e\x7a\x2c\x99\x75\x57\x76\x46\xfe\x48\x06\x74\x71\xfa\x61\x3a\xa8\x8a\x60\x11\x24\xc2\x0c\x17\x54\x52\x28\x2c\xb1\x2e\x64\x03\x05\xa0\x33\xeb\xd9\xf0\xb3\x62\x64\xa1\x16\xa6\xb9\x23\x51\xfc\xba\x89\x73\x6b\xf9\x0f\xc4\xe8\x48\xe9\xa5\xa5\x7c\x70\x60\x34\x94\x23\x77\x77\xf6\x3f\x65\xb3\x32\x39\x1b\x42\xfb\x92\x11\xc5\xbb\xbc\xcf\x24\x2e\xc2\x92\x6e\x76\xe1\xbc\x3f\x4d\xcb\x15\x0f\x42\xd1\xc8\xb8\x73\x82\x70\xa1\xd3\x4f\x9a\x41\xf3\xa8\x53\x73\x41\x15\x23\x71\xea\x90\x0e\x72\x61\x21\x88\x38\x95\x32\xbb\xbc\xd5\xe8\x7a\x7a\x9f\x8f\x76\x71\x78\x54\x76\x11\x99\x2b\x7d\x42\x28\x58\x94\x78\x6c\x9c\x23\x9d\xc7\x36\xa8\xab\x0b\x88\x41\x90\x05\x34\xd6\xac\xd6\x8d\x28\xf3\xaa\x50\x8e\x15\x1a\xc4\x88\x9e\xec\x7c\xd8\x29\x80\xab\x85\xad\xdd\xf6\xec\xa0\x32\x6e\xce\xa4\xfa\x99\xd7\x27\x78\xad\x8d\xf7\xd8\x3b\x3a\x02\x00\x24\xc9\x3b\x5f\x37\x94\x53\xd6\x5e\x21\x78\x2a\x1e\x60\x62\x71\x09\xd7\x72\xa5\xed\x5b\x03\x1a\xb5\x56\x27\xc3\xad\x26\xfc\xeb\x02\x8c\xde\xb3\x35\x08\x43\xe2\x1c\x78\x70\xf5\x62\x57\x30\x6d\x0a\xfb\x14\x1d\x39\x36\x65\x7e\x0c\x54\xcb\x71\xa5\x0e\xb6\x76\x31\x72\xbe\xc5\xfc\x9d\xb8\xa7\x7f\x23\x71\x4a\x5e\xb6\xe1\x0e\xd3\x87\x30\xa0\x4e\x6c\x59\xbc\xbc\xa8\x23\xe2\x46\x09\x83\x85\x5d\x89\x22\x0a\x4d\x04\x07\x9c\x6d\xc8\x1d\xb4\x9e\x4f\x99\x13\xea\x1e\xe9\x35\xe6\x48\xb6\x51\x1d\xba\x3d\xd8\xc3\x0c\xc9\x87\x57\x16\xed\x5b\xf3\x98\xba\xd3\x4c\xaa\x07\xb4\x07\x1f\x58\xdd\x42\x00\x45\x67\x89\xe2\xd6\x1e\x06\xc9\x8c\xe6\x9f\x1a\x50\x18\xf6\x79\xf7\x12\xc1\xcd\x1a\xdf\x04\xde\xfc\x31\xa8\x3c\xb7\x5f\x1f\x6c\x26\xa9\x20\x2e\xbc\x8f\xcf\x3d\x53\x6e\xde\x1d\xf4\x22\x3d\xc5\x06\x86\xd9\x59\x93\x5c\x1a\x9d\x10\x07\xa5\x51\x29\x0a\xea\x3a\x6d\x63\x30\xf7\xac\x2c\x18\x4a\x8a\xcc\x1e\x07\x1a\xd4\xfa\xf6\x21\x0c\x1a\x6f\x1c\xc9\x52\x3f\x39\x7b\x83\xa0\xad\x7d\x15\xde\xea\x2a\x16\xca\xc6\x99\x32\x16\xff\x29\x0c\x33\x6a\xfc\xd3\x9d\x15\xc7\x98\xce\x0b\xb0\xbd\xae\x60\x5a\x98\xb7\x3c\x79\xb8\x3a\x25\x6e\xd8\x0d\x1b\xd3\x6d\xca\x98\x2a\x2e\x4f\x93\xab\xcd\x04\x26\xc8\x01\xea\x7f\x19\x3a\x4c\xc9\xcf\x9b\x24\x4d\x6c\x16\xc8\x97\xfd\x67\xed\x94\xa8\xd1\x84\x90\x1a\x08\x3a\xdb\x0f\xbd\x4a\x85\x83\x82\x24\xe8\x22\x17\x75\x0e\x90\xcf\x0b\x8b\xbf\xed\xe4\xba\x96\x56\x52\x21\x3b\xfe\xdf\x04\xd6\x07\xe9\x43\x93\x86\xcd\xde\x72\x10\x07\xab\x26\x54\xd7\xb0\x9c\x02\x41\xcd\x49\x89\x63\xbf\xa8\xac\x05\xd8\xe4\xf0\x61\x2d\x51\x49\x58\x48\xe6\xac\x54\x63\x49\xcf\xc4\xea\xe4\x77\xf6\xf2\xad\x31\x58\x70\x8e\x0e\x53\x2c\xf9\x0a\x33\x38\x4e\xea\x11\xda\x60\x00\x10\x1b\xb6\x94\x99\x00\x4a\x8c\xbb\x60\x68\x51\xe0\xc4\xc2\xa6\x21\x0a\x4e\x32\xa5\x79\x2a\xd4\x76\xea\xa0\x7c\x69\x81\x9c\xa5\xa4\xeb\xe9\xce\x55\x29\x12\xce\x62\x70\x56\xfd\xd9\x4f\x87\x93\xe4\x29\x48\x2f\x39\xbd\x1b\xec\x28\xcb\xb7\x7b\x67\x70\x7c\x7c\x98\xd9\x11\xd4\x69\xf2\xa5\x73\x4c\x93\xf3\xec\x54\xbf\xf6\x17\x13\x9e\xe5\xc1\x58\x60\x2d\x93\x36\xc7\x1b\x38\xba\xf9\xd0\x44\x89\x70\x0f\x6f\xfc\xd9\xc6\x6c\x2f\x2b\xb7\xb2\x59\x62\x4c\xce\x34\x63\x6c\xcf\xf9\x1c\x05\x07\x39\x55\x2e\xe0\x55\x02\x18\x97\x4a\x1d\x87\x65\xd3\xd3\x35\xda\xa1\x74\x9b\xbd\x33\xf5\xa7\x54\x96\x8e\x26\x25\xcd\x62\x30\x2e\xcb\x67\x92\xa6\x7a\xc2\xaa\x2d\x4f\xae\xa8\x44\x05\x93\xfb\xed\xfd\x6a\x3e\x2f\x55\x63\x57\x57\xbf\xf9\xe3\x05\x4d\x58\xf7\x03\xd6\x09\x05\x02\x53\x1c\x04\xe4\xc8\x65\xbe\x18\x53\x03\xac\x45\x76\x9a\xa0\xc0\x7a\x26\x27\x36\x1e\xdd\x85\xd1\x74\x72\x14\xcb\x6a\x26\xd8\x67\x5f\x59\xf9\x73\xec\x51\x3a\x7f\xa1\xce\xee\x23\xcf\x54\x98\xc9\x1f\x2a\xa7\x8b\x9d\x2b\x67\x72\xf2\xda\x19\xb9\xd3\x3b\x45\xaa\x76\xdc\xe4\x1f\xce\xf2\xda\x94\xaf\x5a\xd8\x55\xcd\xa1\x05\x6a\x48\x57\x34\xbe\x52\x8b\x38\xce\x6c\x85\x54\x90\xdc\x02\xc0\x87\xf3\xfe\x01\xde\xca\x7e\x5b\x05\x43\x12\x90\x08\x02\x83\xce\x3a\xfe\x78\x11\x34\x74\x54\x3e\xb4\xeb\xc4\x29\xc9\x1e\xf6\x9b\x9e\xfd\x20\x2a\xe8\xdb\x7a\xbd\x5b\x1c\xed\x50\x1f\x4b\xd6\x90\x35\xa9\x30\x1b\x75\x19\x26\x42\xe1\xa1\x4f\x6c\xd8\x38\xd4\x36\x21\xf5\xd0\xfa\x95\x5d\x54\x26\x90\x2a\xdf\x6b\xb7\xf9\x17\xe3\x92\x6d\xaa\x07\xc6\x23\x71\x2a\xce\x3b\xb0\x0c\x75\x73\xac\xa4\xa9\x91\x3f\xc0\x70\x61\x21\xa7\xde\x8e\x10\x7a\xef\x77\x85\x30\xa3\x05\xbf\xac\x62\xa8\x19\x70\x6d\x3f\x65\x3a\x92\xef\xc0\x22\x5e\x08\xf6\xd0\xd5\x37\xc8\x48\x77\x68\x85\x0e\x3b\xa9\x48\xc3\xe0\x5c\xde\x81\xa4\xc4\x75\x97\x2b\xcd\x61\xe1\x24\x55\x9d\x0f\xc9\xb9\x04\xb7\x8e\x6a\xa4\xce\x6b\x3e\xbd\x74\xb7\x51\xec\x78\x0d\xde\x2b\x42\x42\xb7\x7e\x35\x0a\xeb\xe0\x57\xdf\x37\xba\x75\xb4\xeb\x91\x34\x4f\x09\x7f\x87\x92\x5a\x7a\x21\xa5\x55\xe7\xc3\x17\xd5\xb2\xad\x55\x6d\x0d\xeb\x50\x9a\xc0\xfc\xfb\xd9\x71\xfc\xc6\x84\xe8\xe2\xf8\xcd\x8f\x0a\xc6\xc2\x77\xa6\x05\xd0\x07\xf2\x9d\x4e\x4a\xce\x36\x7a\xb7\xb6\x91\x59\xae\x9d\xac\x96\x55\xdc\x16\xf3\x61\x15\x8e\xd3\xa3\xb6\x4a\xf0\x80\x98\x29\x4e\x84\x74\xd7\x6f\xf6\xf9\x2c\xff\xfc\xec\x28\x6c\x69\x73\x89\x0c\x8f\xc4\x69\x45\xd0\x30\xa7\xc3\x65\x79\x5d\xf5\x5e\xa9\xad\x8e\x8a\xb9\x02\xba\xc3\x21\x9a\xe6\x30\xd8\x9c\x54\x59\x07\x9e\x83\xc7\x26\x7c\x48\x9f\x59\x48\xd8\x79\x7b\x90\xfa\xe2\x74\x00\xed\x3e\x91\x33\x6b\x67\xea\xcd\xca\x52\x43\x37\xdb\x8b\x0a\x8e\xfa\x6e\x1b\x25\x4a\xc5\xdc\xe2\x34\x61\x92\x86\x23\xcd\x98\x3a\x8d\x33\x85\x92\xbe\x86\x3e\xf5\xc9\x2f\x6c\x69\x6d\x42\xff\x7f\x9d\x33\xe2\x47\x2c\x5c\x87\xce\x8a\x7c\x0a\xfd\x84\xb1\x17\xa8\xf7\x8f\x0c\x59\xbb\x0e\x03\xe3\x18\xa6\xc7\x73\xca\x3e\x89\x59\xce\xd2\x86\x43\x35\x53\x36\xd2\x4a\x4d\x93\x9d\x1d\x1b\x32\x1c\x9c\x60\x05\xed\x0a\xb9\x3b\xce\xaa\x0a\xe8\xee\xf9\xb1\x61\xb6\x1d\x7e\xa2\xa1\x11\x61\xeb\xcb\x19\xbc\x6f\xc2\x49\x28\x2a\x8b\x47\xcc\xb4\x82\x7b\xa8\x34\xf3\x38\x06\xa6\x85\x0e\x92\xda\x33\x34\x27\x39\xda\xcd\xd7\x3d\x40\x8d\x62\xcb\x14\xf7\x97\x88\xb7\xa3\x74\xe4\xe0\x38\x5b\x8e\x48\x42\x2f\x59\xbc\x14\x9a\xf7\x8b\xd6\x96\xec\x5f\xb3\xaa\x67\xda\xab\xcd\x81\xc3\x27\x27\xa7\xa3\x4e\x94\xc9\x9a\xca\x1c\x54\x55\xdb\xd0\x91\x21\xa9\xaf\x43\x00\xf5\x24\x5a\xaa\x5b\x43\xc2\xdc\xf8\x93\x17\x37\x64\x58\xac\x5f\x57\xf5\xe1\xd4\xf8\xe8\x82\xa8\x6f\x2f\xcd\xbf\x82\x0f\xf7\x96\x8b\x60\xc5\x10\x33\x4c\x6f\xd8\x25\x67\xb8\x89\x6d\x76\x85\xb6\x4e\x8d\x75\x2d\xd4\x3c\x39\x61\xfb\x9e\x3f\x76\x5e\xb4\x4f\x03\xd1\xb3\x96\x33\xed\xc7\x13\x3a\x85\x08\xa5\x81\x4a\x60\x02\xe3\x66\xaa\x1a\xce\x4f\x74\xf6\xdd\x62\x1a\x5b\xf8\x5a\x4f\xfd\x8d\xaf\x86\x72\xbc\x76\x4c\xe0\x95\x2a\x44\x69\x6b\x08\x13\xcc\xd8\xe2\x72\x04\x56\x2d\x7d\xd8\x18\x68\xd8\xb8\xa3\x7c\xce\xc4\xa1\x2a\x8a\x40\xdc\x5e\xda\x1e\x0c\x04\x39\x72\x7f\xd4\x75\x31\x1c\x31\x74\x97\xdd\xab\xb5\x3f\x71\xce\x7c\xbc\x56\x66\x24\x27\xf6\xc9\x7b\xf0\x51\x69\xdb\x03\xba\x17\x20\x40\x8e\xa3\xb8\x21\xa3\xf8\x09\xca\xfe\xb0\xe3\x37\xe2\x63\x85\x41\x60\x99\x82\xc7\x16\x6b\x92\x13\x58\x68\x7c\xd8\xd8\x5d\x7c\xc4\x89\x31\xc2\xec\xbf\xe6\xf7\xd1\x12\x9b\x38\x6a\xc6\xa8\x4d\x88\x2c\x17\xce\x0c\xc0\x61\x85\xe8\x2f\x96\x1c\x9f\x38\x44\x71\x95\x57\x46\x51\x60\x53\x76\x65\x83\xc9\x4c\x32\xd5\x64\x1c\x33\x78\x47\xf6\x48\x44\x65\x59\x9c\x5e\x40\xb3\xab\xae\x9c\x03\x92\xfc\x64\xab\x2b\xb2\x1f\x6b\xd8\x32\x8b\x13\x66\xf6\x32\x6a\xb2\xe6\x3d\x8e\xfa\x8f\x24\x1b\x9d\x24\x85\x72\x44\xd4\x46\x4a\x7f\x78\x37\xd2\x8a\x8a\x2b\x91\x35\x8d\xb6\x59\xf0\x97\xe0\xce\x85\xa8\x6c\xc8\xc8\x77\x12\xf5\x56\xfc\x94\xd1\xae\x59\xa1\xf0\x64\xbd\x3c\xfa\xdc\x7a\x1d\x4f\x01\x0c\xd7\x73\xb4\x8d\x72\xa9\x61\x54\x9f\xeb\xb7\x41\xa9\x3a\xb1\xeb\xbf\x0e\x0b\xa8\xbd\x48\x3c\x72\xfa\xca\x70\xa6\x92\xe4\x1b\xd1\xc7\x39\xa2\xe5\xcb\xde\x38\xbf\xf3\x7c\xb4\x8d\x15\xc8\xcd\xa1\x41\x0c\xf0\xc1\x0a\x6a\xd6\x81\x32\xab\x0d\xf9\xa3\x4a\x40\x63\x63\x92\x55\x6a\x99\xed\x49\x3e\x04\x24\xb8\xdf\x94\x5c\x5f\xd8\xee\x9c\xcb\x18\x0b\x29\x67\x99\x3d\xd8\x8d\x89\x32\xfc\x26\x68\xe2\x7a\xfc\x69\xb6\xf7\x74\x78\x43\xf2\xf7\x9b\x4a\xb3\x01\x9f\x06\xdd\x97\x54\xf6\x30\x5c\x11\x3d\x21\xac\xab\x6f\xcc\xaf\x56\x1d\x42\x9b\x51\xc2\xc7\x0c\x1a\xf9\xe0\xe6\xa1\x5f\x1a\x8d\xcc\x21\x53\x68\x02\x3d\x15\x94\x5e\x33\x41\x5a\x27\x3e\xd6\x81\x3a\x13\x1d\xa2\x85\x17\x8f\x30\x51\xd8\x71\x76\x62\xa0\x61\xa8\x96\xd5\xb1\x01\x11\x75\x62\xad\xc2\xbd\x87\x34\xea\x54\xd0\x2f\xa5\x8a\x03\x48\x01\xb9\x22\x73\x18\x6d\x55\xa4\x85\x4e\x9f\x2f\x8f\xb5\x33\x06\x9e\x38\xa9\x47\xa9\x1e\x34\x89\xe1\x76\xf4\xc1\xda\xde\x0f\x57\x01\x0d\x8c\xb5\x43\x6e\x06\x94\x4a\x12\x20\x3e\x8b\xd2\xc8\x64\xbe\xbd\x0a\x35\x6a\x94\x50\x14\x59\x2a\xb6\xbb\x7e\x0f\xe9\x88\xf6\x8e\x43\x6a\x4d\x05\x8f\xc8\xeb\x51\xb4\xc7\x35\x9c\x25\x0b\x30\x13\x50\xe3\x81\x73\xf8\x3f\x84\x44\x29\x7b\xf5\x82\x55\xf4\xda\xd5\x85\x5b\x0a\x08\xf1\xc5\x32\x52\x79\x3e\xd9\x46\xa7\xc7\x23\xb9\x87\x55\x55\x00\xce\x74\xde\x49\xbe\xfa\x10\xe3\xee\x1c\x78\x43\xb3\x6a\x1a\x0d\x85\x26\x34\xd4\xa1\x0d\x20\x7f\x07\x7e\xfb\x9f\xab\x5f\xd4\x3f\x67\x98\xf5\x2d\x67\x3d\xca\xf9\xb0\x00\x4a\xcf\x61\x60\x5b\x87\xdc\x0e\x6f\xdd\xe6\x50\x08\xad\xb4\x40\xad\xba\x8f\x3a\x25\x1b\x57\xed\xbb\x35\x70\x1c\xe5\xea\x2d\xfd\x2a\x01\x83\x9e\x6e\x13\x71\x04\x87\xc4\x87\x9f\x12\xd4\xf2\x87\x1a\xa3\xd6\x0c\xd5\xce\x43\x48\x4c\x03\x3a\xd0\x31\x7d\x5c\x8f\xd5\x0a\x4d\x3c\x46\x52\xaa\x8c\xb6\x79\xad\xf5\x31\xa4\x53\x3d\xa4\xc4\xb6\x86\xba\xff\xa1\xf3\x0d\x0d\x38\xf3\xe4\x77\x18\xe2\xb3\xb9\x73\x16\x05\xfd\xbf\x43\xee\x92\x87\x74\x92\x91\xf5\x88\x22\x13\xc9\x24\x49\x58\x07\xb2\x4f\x00\xbe\x1d\xe6\xd6\xa6\x6a\xce\x47\x7b\x59\xb8\x6d\xca\x97\x01\x67\xf8\xd6\x1f\x44\x6e\xc1\x9b\xfd\x60\xea\xc9\x2e\xfd\xc6\xe5\x12\xcb\x0b\x7c\x5b\xb6\x11\xdc\xde\x16\xc6\x7c\xc4\x32\xa4\xa7\x1a\x9e\x84\x58\xc6\x59\x14\xdb\xb2\xfc\xd3\x74\x6b\x1e\x46\x43\xa9\x30\x71\xbb\xd3\x51\x16\x4e\xd9\x10\x7f\x4c\x60\xf6\x88\x3f\x9c\xe9\x3c\x1f\xfd\xdb\x78\x8e\xea\x91\xe6\xa2\x0e\x0c\x1c\x6c\x23\x3e\xa0\x50\xca\x48\x50\xeb\x68\x9c\xb7\xc4\x9c\x43\x72\xbb\x6b\x10\x25\x20\x01\xe2\xa3\xdb\x0c\xfc\xc6\x5f\x96\x2f\x47\xc7\xd5\x14\xd8\x92\x5e\xc9\x06\x75\x2e\x84\x72\x60\xbe\xd3\x74\x9b\x6b\x10\xe7\x81\x74\xe1\x10\xba\xca\xbd\x99\x3d\x62\x5b\x1c\x93\xb6\x06\xce\xd3\x59\x6d\x1a\x36\x07\xbe\xa8\x88\x03\xe8\x86\x09\x75\x6a\xa5\xa2\x6d\x78\xfc\xa8\xf5\x0e\x54\x0b\xac\x7b\xce\xc0\x2d\x37\x79\x4b\x94\x5f\x65\x21\xbe\xfb\xf0\xec\x59\x65\x9d\x0c\x36\x57\xa9\xf3\xe1\xcc\xc5\xe3\x26\xbe\xa2\x9a\xa9\xaf\xf9\xf3\x49\x18\x28\xa4\xfc\x1d\xae\xa7\x33\x31\x50\x87\x09\xd0\x53\x55\x58\x6c\xe4\xcf\x5a\xcb\x27\x4b\x95\x7a\xdd\xf5\x0b\x7c\xfd\xb2\x5c\x07\x40\x67\x82\xab\x1c\x95\xb8\x60\x1a\x44\x0f\x74\x4b\xe9\x0f\xfc\x6a\x03\xa9\x99\xcd\x77\xaf\x3e\x75\x1b\x1d\xe1\x8b\x8b\xc5\x84\xd5\x6f\x89\xca\x2f\xdd\xe8\xfc\x56\x83\xa8\x42\x63\x2c\xfa\x79\x0f\xb1\x0b\x89\xc8\x6f\x40\xb7\xab\x65\x88\x8d\x68\x71\x12\xb1\xe6\xba\x92\x39\x4e\x08\xb5\x46\xcc\x6d\x79\x98\x01\x75\xdf\x76\xa1\xfc\x31\xd4\x2e\x2f\x12\x01\x2f\xa2\x5b\x9a\xf8\x55\xb3\xef\x27\x47\x28\x1f\xca\x39\x02\xee\x1a\x31\x8a\xfb\xf0\x41\x0c\x7a\x54\x69\xfe\x56\xae\x7f\x76\xc2\x5f\x1e\xfd\xea\xec\xd0\x16\xc7\xb0\x23\xda\x9c\x5c\x56\x5c\xcb\x26\xf6\x4b\x9a\x0e\x5b\x65\x20\xb9\xa8\xd3\xae\xcf\x65\x69\x70\xaf\x20\x3d\x6a\x76\xf3\xff\xb9\x7a\xb3\x05\xd5\x75\xdf\x69\xf4\x5d\xb8\xfe\x5e\xca\x49\x4c\xe2\xce\xe0\xfc\x32\xc0\x86\xa7\x3f\x2a\x55\xc9\xac\xff\xb9\xd8\x1b\x77\xaf\x9e\x80\xc4\x96\x4a\x35\xa8\x25\x99\x9a\xd7\xd6\xf4\xc3\xcd\xf0\xa8\x24\xd9\xe9\x27\x78\x1b\xdd\x25\x9b\xc7\xb3\xae\xc9\x9a\x27\x99\x0f\x3e\x7f\x69\xa9\x6e\x5e\xfa\x70\x46\x00\xeb\xf4\xe1\xa0\x13\xc8\x50\x23\x86\x27\x13\x80\x5d\x12\x1b\x24\xbe\xee\x03\xb5\xd7\x3d\x88\xcd\x62\x03\x08\x89\xdd\x04\xe8\xf7\x61\x4b\xea\x61\x66\x72\x95\x76\xca\x6b\x43\x68\x6e\x97\x67\xd1\xa1\xab\x03\xa3\x43\x69\x5a\x0b\x27\x2b\x56\xa3\x7e\x7c\x88\x82\x1b\x99\x71\xaf\xb3\x9c\xd1\x9c\x8b\x28\x1c\x15\x83\x06\xdf\x57\x21\xcd\xa2\xb4\xcc\xbf\x2a\xb9\x1a\xe5\xeb\xd8\xb2\x57\xd9\x9f\xb0\x09\x7b\xd7\xe5\xa9\xa1\x61\x95\xdc\xef\x55\x5b\xe6\xf5\x41\x4f\x0b\x7b\x1f\xfe\x0b\x05\xe5\x41\x5f\x15\x4f\xca\xe4\xc6\xb9\x97\x9f\xc4\x17\xac\x04\x72\x8f\xec\xfc\x1e\x3d\xec\xce\xd5\x2a\x2d\x6b\x38\xb9\x35\x97\x75\x1d\xad\x6a\xe5\x71\x0b\x09\x84\xcc\x13\x4b\xdf\x83\xab\xec\xa4\xce\x7c\x04\xd7\x60\x64\x91\x0f\xee\xf0\x75\x7b\xc3\x5e\x9b\x64\xac\x0e\xb0\x95\xf6\x8b\xd7\x5f\x24\x9c\xd8\x41\xc5\xb6\x4d\x58\x17\xea\x51\xa3\xeb\xb1\xbf\xa8\x57\x5c\x53\x5a\x6e\x47\x11\x30\x9c\x62\xd2\xba\x55\x5a\x8d\xa4\x92\xce\x76\xbc\xcd\x6f\xeb\xf0\x7f\x1f\xd9\xe5\x47\x84\x96\x15\x11\x72\x63\xc3\xe0\xfe\x13\xfb\xe9\xab\xa9\x2b\xe2\x68\x99\x12\xcf\x4d\xdb\x56\x9b\xed\x71\xf6\x8b\xce\x37\x56\xb5\x49\x8e\xc8\x78\xb5\xf8\xb4\x83\x36\xa4\xb2\x4f\xee\x66\x31\xf5\xc8\xc0\x99\x49\x5c\xda\x26\xfa\x82\xd0\x37\x00\x0a\xce\x5f\x22\x5b\xf8\xe0\x77\xe1\xd1\xe1\x6e\x2c\x24\x6d\xea\x2c\x84\xfd\x8a\xb6\x23\x8e\x97\xed\xef\x16\xcb\xf8\x62\xe9\x74\xb5\xfc\xee\xf3\x2d\xc7\xf6\xf3\x65\x1d\xdf\xf0\xcb\x59\xd2\x8c\x09\xfc\x74\xa5\x36\x34\xed\xe3\xd9\x6c\xcf\x4f\x72\x34\x74\xb6\xb4\x00\xb5\x33\x48\x76\x3e\xc1\x54\xf2\xda\x5a\x20\x06\xf0\x2b\xe3\xa3\x22\xca\xae\x15\x62\x56\x47\xab\xd3\xad\xc1\x73\xea\xc0\x7e\x2f\x99\x2a\x27\x70\x8c\xf8\xf3\x37\x5d\x44\x80\x88\xc3\x67\xbe\x0c\x43\x0a\xdc\x6e\x71\xac\xd5\xbe\x8f\xfb\xcf\xc5\x7a\x6f\xbe\xff\xd2\x9b\xc7\x27\xf4\xdc\x7c\xaf\xc1\xc3\xf7\x31\xaf\x9d\x38\x8b\xfa\xed\x09\x51\xe6\x82\xdc\x32\x1d\xb5\xa6\xbc\x47\x9e\xcf\xba\xc5\x62\x0d\xe2\x5a\x33\xb7\x60\xa2\xb9\x37\x68\xa3\x77\x24\xdc\x8a\x10\x87\x54\x42\x98\xca\xe4\x78\x7f\xdf\xfd\xe5\xf5\xad\x2a\xb0\x9b\xb4\xcc\xfc\xf1\x56\x6d\xa8\x51\x1b\x6e\x9e\x79\x3e\x7e\xd6\x41\x9a\xc2\x5a\x11\x4f\x44\xad\xf2\x94\x86\x5f\xa4\x2b\x8c\xf7\x5a\x2f\x40\x34\x99\x2a\x6e\x28\xa8\xe8\x81\x76\x04\x27\xb3\xab\xef\x58\x2c\x83\x34\x38\x3a\x52\x6d\x2f\x57\x2e\xa9\x2d\x8b\x7c\x18\xbe\x31\x59\xee\x9a\xe8\xd2\xd3\x07\xa2\xb2\xa3\x49\x15\xcd\x7c\xf8\xfc\x61\x93\xcb\x0b\x02\xfa\x0d\x9e\x25\x57\x64\x67\x5b\x3b\x55\x3b\xd6\x2e\x9e\x3f\xf0\x1b\x6a\x8e\xc1\x7a\x51\x6f\x87\xfd\xc8\x8f\xc7\x3d\xf7\x24\x33\x9f\xb5\xc5\x41\xae\x59\x58\x8a\x03\x81\x7e\xa5\xf6\xb5\xb9\xe8\x25\xc2\xdb\x87\x33\xaf\xdc\xb4\xac\x8d\x8d\x4e\x5e\x7f\x4b\x98\xa4\x54\x7e\x62\x94\xdf\xab\x58\x3f\xf0\x25\x7b\x38\x94\xe3\x9b\x56\xb4\x58\x8c\x6d\x88\x21\x53\xd8\xbd\xee\x59\x50\x33\x8d\x55\x1e\x9c\x3b\x11\x34\xc4\x36\xd7\x93\xc5\x6b\x4d\xdf\x6f\x32\xaa\x91\xc7\x40\x61\xd8\x1a\xd5\xcf\x8a\x19\x06\xbb\x17\xf7\x05\xa1\xc8\x3c\x2f\xa3\x46\x52\xd0\x67\xd3\xc7\x0f\x54\x2c\x81\x02\xab\xac\x79\x96\x0f\x4f\x29\x1c\xd7\x38\x61\xfd\xa6\xf8\x74\x61\x9c\x9c\x5f\x52\x16\x8d\xcd\xa4\xfa\x99\x22\x7d\xd3\x2e\x9b\x90\xf7\xcb\x76\x6d\xf6\xb3\x71\x06\x67\xf5\x9c\xf1\xe4\xff\x52\xe0\x7a\xb0\xc2\xfc\x39\x60\xfa\xdd\xf4\x16\x06\x33\x31\xe7\xd5\xbf\xaa\x08\x5f\x03\x69\x5e\xcd\x77\x91\x41\x32\xa6\x5c\x34\x91\x18\xb1\x3d\xd0\xf1\xd2\xba\xcc\xb8\x31\xc2\xc5\xed\x19\x3c\x9f\x67\x8a\x30\x5c\x94\x42\x1a\xcd\x87\x64\x26\x93\x60\x34\x88\x1a\xd8\xc6\x31\x40\x48\x02\x26\xc1\x9c\x97\x69\x11\x0c\x0c\xeb\xad\xf2\xde\xa6\xc6\x0c\xf3\x31\x4c\xb3\xa4\xaf\xfe\x7a\x91\x24\xb0\xd4\x0f\x19\x01\xa9\x00\xd6\xe3\xc8\xff\x5e\xfe\x89\xcf\x91\x19\x9d\x1d\x41\x8a\x2a\xe8\x23\x4f\xdf\x09\x00\x72\xe3\xc4\xed\x1c\xa3\x39\xc5\x6f\xd6\x61\x93\x6a\x5f\xdb\x1a\x16\xbd\xdf\x7e\x8b\xec\xec\xa7\xc8\xa4\xb7\xfa\x74\xaa\x92\xc8\xc9\x68\xe0\x1d\x29\x3a\x90\x9a\x57\x9e\x04\x52\x71\xa0\x96\x50\xe4\xe5\x75\xb4\xb1\x5e\x26\xd1\xfd\xb2\x56\x29\x45\x3e\xdf\xf9\x19\xe2\x44\x08\x57\x08\x87\xc2\x25\xad\x5a\xaf\xc2\xad\x1f\x1b\x03\xff\x15\x4b\xd1\xdc\x62\xfd\x60\xff\x2f\x01\x48\x96\x37\xb9\x7b\x53\xd2\xa6\xf5\xd4\x20\xe4\xa0\x17\xd2\x71\x2b\x66\xba\xec\x3b\x41\x02\x69\xd8\x6c\xe1\x35\xed\x01\x2e\x5d\x74\x53\xff\xbb\xa9\xff\x03\xe5\x88\x88\x15\x47\x25\x5b\x24\x90\x6d\x1b\x37\x1e\xe4\x73\xbd\x5b\x83\xcf\xb7\x16\x3a\x82\x81\x7b\x3e\x66\x43\xa1\x1a\xb8\x83\xb3\x76\x29\xac\xdd\xae\xbe\x4d\x63\x89\xf9\x90\x06\x44\xdb\xdd\x9c\x78\x31\xfe\xe5\x3d\x62\x11\xc8\x5a\xb5\x22\x42\x8f\xfc\x58\x41\x39\x1c\x28\xea\x66\x74\x30\x24\x1c\x32\xbf\x55\x59\x89\x73\x18\xc1\xae\x30\xbc\x7d\xb8\xe9\xb6\x2e\x6a\x6b\xd1\xd4\x9f\x8f\xcb\x1d\xb0\x14\x3d\x3b\x1d\x9f\x6a\x81\x53\xa3\xbd\x54\x7e\x47\x3f\x23\xa6\xef\x29\xe0\x30\x7f\xe8\xb7\x71\xb6\xee\x30\xa7\xc8\x5b\xb7\x7b\xac\x84\x2b\xfa\x02\xc9\xbe\x84\x61\x88\xff\x0a\x68\xf5\x12\xfa\xd5\xb7\x88\x7d\xbb\xb6\x9b\xe3\x66\xf8\x23\x20\x5d\xcb\xcb\x51\x14\x2a\x5b\x14\x2a\x57\x0e\x4b\x04\xe9\x0b\x61\x90\xe6\xbf\x5a\xe0\x27\xb3\xfa\xed\x56\xeb\x63\x2b\x7d\x37\xfb\x4d\x51\xa2\x70\x61\xc7\x5f\x6e\x4d\xd6\xcf\x9b\x53\xaa\x0a\xa2\x4e\x2e\x1a\x04\x81\x83\xf2\xc1\xa4\xcd\xe9\x8e\x3e\xf8\xba\x35\xf3\xbd\x1a\xd2\x7f\x95\x65\xd4\xc8\xe7\x8a\x67\x64\xd7\xb0\x48\x96\x84\x64\x4e\xca\x49\x44\xa1\xa1\x07\x5a\xb4\x3f\x07\x19\x24\x47\xe5\xa8\xbb\x93\x67\x1b\xae\x56\x0d\xbd\x77\x58\xc3\x3d\x5c\xa5\x2d\x7d\x13\xfb\xbe\x1a\xec\x2d\x09\xf9\xb6\x4f\x7c\x62\xb3\xd2\x1c\xb9\xf1\x58\xa6\x36\xf6\x05\xbe\x80\x07\xb7\x31\xa4\x5b\x31\xc8\x33\x7a\xd4\xef\x5d\x7f\xcc\x99\xd2\x34\x8b\x7d\x63\x70\x06\x25\x17\x15\x33\x2d\x74\xd6\x34\xff\x0c\x09\xff\x72\x5b\xb6\x69\xd2\x72\xcb\x1a\x53\x30\xb0\x95\xad\xcd\xa3\x01\xcd\x9e\x7f\x4d\x92\x5b\x69\x14\x49\x2e\x51\xe5\xb9\x80\x3b\x51\x86\xb0\x9e\x00\x1c\xe8\x03\x70\x0e\xff\x0a\x79\xfb\xc0\x48\x40\x1c\xef\xa5\x58\xd5\xa2\xf3\x01\xb4\x06\xc9\xe3\xc4\x27\x47\x39\xbd\x0b\xa7\xf8\xb9\x0a\xd8\xee\x46\x24\x0d\x05\x96\x9f\x1d\xc0\x5c\x45\xf3\xa1\xc1\xcf\x15\x57\x3d\x2e\x6e\x04\x8f\xfa\xd1\x50\xd5\x80\xda\x13\xfe\x1d\x0c\x32\x30\xb0\x3f\xfc\x94\x07\x39\x06\x11\x11\x37\x0c\x2f\xcf\xed\x2b\x9e\xd8\xa2\x01\xb1\x9d\x0a\xed\x75\xe9\x3d\x68\xde\x57\x53\x49\xba\x5d\x80\x5e\x2a\x2e\xce\x7a\xc5\xdd\x7a\x6a\xf6\x8a\x2c\xaa\x8e\x3a\x27\x85\xd8\x23\x5a\x4a\x07\x04\xb7\x7c\x64\xca\xa9\x67\x2c\x2d\x3f\xc6\xd3\xf2\x89\xd8\xca\xdc\xc3\x4a\x1c\xa5\xf7\x47\x38\xd2\x37\xfd\xa5\xfe\x21\x09\x42\x15\x42\x21\xd9\x64\xcb\x23\xb0\x97\x2d\x0a\x31\xcc\xdc\xbd\x15\x0c\xc8\xe9\x72\xb7\x66\x3a\x13\xdc\xc1\x41\x8e\xf9\x2f\xaa\x30\x1a\xe9\xd7\xff\x63\x58\x8c\x39\x21\x8b\x7f\xc4\x43\x7b\x6d\x56\xc6\xe8\x22\x43\xb4\xe0\xe7\xac\x08\xea\x6d\xbe\xb6\xd2\xbc\xb4\xb1\x47\x4f\xf0\x7e\x1e\x4d\x44\x9f\x44\xf0\x21\xef\x0f\x16\x37\x85\x1e\x26\x4e\x81\x80\x5f\x57\xe1\x94\x65\x6f\x11\x10\xdb\x27\xac\xa4\x1c\xfa\x70\x2a\xc6\x86\xee\x8d\xfc\x00\x2b\xb7\xaa\x14\x2f\x56\xee\x84\x73\x2f\xd5\x0d\xd4\xf5\x76\x61\xd1\x0d\x0c\x18\x90\xa2\xdf\x0b\x78\xc3\x97\x90\xca\x92\xec\xf1\x64\xb4\x24\x7c\xc8\xe5\xbe\x76\x11\x6a\xb0\xea\xea\x16\x51\x28\xf5\xfc\xd4\x2c\xd4\xc8\xdb\x0f\x62\x7c\x24\xf8\xcc\x29\xc8\x93\x78\x0f\x1f\x60\x48\xab\x0d\x81\x78\x9f\xa6\xe3\xb2\x0c\xb4\xb2\x4a\x06\x94\x42\x42\x56\xa2\x73\x56\x71\x17\x11\x59\x2f\xfb\xbd\x9c\x03\xcb\x93\x02\x6c\x14\x41\xeb\xc3\x1d\xde\x7d\x43\xa5\x66\xaa\x8c\x05\xbe\x00\x9b\x7b\x5c\xe5\x68\xab\x06\x14\x7f\xf4\x1e\x3b\x8a\xbc\x7f\x8f\xdd\x25\x26\xb6\xb0\xa3\x9e\x95\x93\x2c\xcd\x40\xd1\xde\x74\xfc\xf7\x22\x40\xb8\xdb\x5e\xc0\xc7\x41\xe8\xf5\x45\xa3\x0c\x1c\x63\x50\xa1\x3a\xbb\x62\xf8\xb9\x1f\x63\xa1\x0b\x6b\xfc\xce\x67\x1d\x74\x7a\x05\x46\xe2\x67\x4e\xd2\x88\xef\x37\xfe\x4a\x11\xb3\xf0\xbd\x55\x0d\xbe\x9b\xfe\xc3\xb9\x4d\x67\x9c\x0f\x11\xc4\x17\x21\x4b\xf6\xc3\x5d\x15\xe6\xf7\xc2\xbd\xee\xba\xdc\x05\x8b\xe2\x58\x20\xc0\x5c\xe2\x82\x5d\x1a\x8b\xdc\xf1\x0f\x6f\x3a\xec\xee\x20\xea\x71\x51\x87\x73\xaa\x15\x38\x42\x95\x0f\x76\x34\x09\xd0\xa2\x52\x85\xcc\xbc\x92\x62\x88\x58\x60\x4d\x80\x7d\xeb\x70\x23\xe9\x5e\x83\x60\x4d\x54\x56\x98\xf6\x08\x64\xde\xe2\xc2\x4d\xbe\x0d\xb2\xf3\xff\x2d\x7c\x23\xcf\x9c\x54\xec\x52\x2f\x07\x03\x63\xc6\x64\xc7\xc7\xbc\x39\x6d\xe1\x2d\xd4\x86\x9a\x2e\x5d\xce\x82\x8d\xdf\x01\x24\xcb\x16\xa4\x53\x13\x1c\x68\xcf\xf8\xdb\x2b\xc7\x5b\x36\x30\x76\x35\x3f\x5c\xcf\xcc\xc9\x19\xd8\x97\xa4\xb1\xe1\x30\x6f\x38\xf1\x27\xfc\xbf\x97\xaa\x8b\x34\x54\x95\xde\x47\x0f\xcd\x81\xa8\xbf\xb5\x15\xd7\xa8\x4b\xec\x8e\x6e\x04\x7a\xf8\x3c\x96\x2b\xb6\xd7\xd2\xa6\x63\xea\xe9\xbb\x5b\x9b\x69\xb6\xfe\x35\x29\x64\xe4\xc6\x50\x89\x99\x75\x50\x5c\xaa\x62\x21\x1e\x8d\x47\x22\x6f\xb6\xb5\x7d\x71\x45\xbb\xe9\x6a\x96\x86\x63\xa0\x04\x14\x8d\x2f\xef\xc0\xe4\x5d\xf3\xe5\x1a\xc1\x77\xa8\xca\xde\xda\x14\xad\x8d\x71\x3a\x87\x73\xa7\x84\xca\xb9\x43\x8c\x9e\xc9\x25\x12\xf3\xf5\x8f\x70\xfe\x74\xb3\x76\x5f\xa4\xf0\xac\x80\x8b\x99\xae\xbe\x7a\xcf\xcc\x60\x03\x3f\x91\x7d\xaf\x06\x64\xf0\x12\x12\x47\x3c\x38\xdd\x27\x70\x44\x0e\x54\xf0\xea\x49\x4f\xe2\xc8\x1c\xdb\xaf\x23\x7b\x81\xbb\x6b\x98\x82\x9c\x37\x46\xb5\xfd\x93\x9a\x6c\xc5\x88\x04\x0c\xd0\x8c\x75\xec\xa8\xed\xfb\xbf\xff\x0a\x78\x4f\x2d\x25\xb9\xb7\x8a\x92\x3a\x68\x90\xa9\x08\xe8\xac\xa0\x08\x56\x7d\x72\x88\x09\x5b\xbf\x51\xeb\x03\xbd\x67\x11\xfb\xd4\xae\x48\x32\x07\x96\xe6\x6c\x0b\x63\x6e\x31\xf8\xdc\xfe\xd9\xaf\xe2\xc0\x2b\xe7\x72\xf0\x52\xf3\x4c\x25\xd2\x17\xd4\xc1\xfc\xd1\xaf\xc9\xce\x43\x31\x20\x6d\x67\x55\x88\x43\x73\x0a\x4c\xa2\xf8\xda\x5f\x1f\x23\xe2\x3d\x90\xe8\xdc\x54\x8a\x87\x22\xce\x80\xef\x04\xbc\x93\x35\x25\xe1\x56\xf4\x94\xf6\xea\x59\x82\x81\xfb\x94\x92\xfd\x9f\xe1\x31\xae\x78\x3d\x13\x4c\x4a\x24\x62\xcb\xd2\x81\x0e\x08\x36\xe3\x5d\x40\xc8\x62\x28\xe7\x7a\x47\x30\xb2\x4f\x53\x7d\x09\x3b\x02\x96\x22\x83\xb5\xe5\x61\x5d\xb6\x3d\x5b\x0f\x4b\x72\xcf\xa2\x43\xa3\xe7\xec\xaf\x87\x2a\x5a\x2d\xaa\x24\xe8\x7d\xe4\x8a\xf5\xe9\x7d\xc9\xfe\x23\x29\xe7\xc2\xfd\x27\x49\x9e\x58\x6e\xc6\x9f\x3a\xa0\xad\xa0\x98\xe6\x27\xc0\xb0\x6e\xec\x18\x4a\xea\xba\x69\x1b\xb1\x4a\x07\xb9\x3c\xed\x14\x4f\x1a\x4d\xbe\x43\xc0\x85\x4b\x54\x43\x44\x0c\x55\x04\x13\xb9\xcc\xa5\x0b\x31\xec\x41\xec\xd2\x4e\x6b\x3b\xe2\x1d\x9f\xa1\xa7\x8d\x6d\x08\x7a\x0b\xae\x8f\xd7\x4c\xd7\x3b\xb2\x7b\x55\x98\xd3\x26\xc4\x17\x9b\x72\xe8\x81\x6a\x86\xc8\xc6\x37\xa1\x8e\xcd\x24\x7c\x41\x55\x4a\x9d\xe9\x4c\xba\x6f\xf6\xbd\xcd\xd5\x4f\xf7\x81\x9d\xa3\x3d\x9d\x78\x1d\xb0\x53\xc5\x77\xd9\x21\xf6\xc7\xb7\x60\xf7\xf7\x6c\xb7\xd7\xb6\xd1\xe5\x6d\xcf\xd3\xb8\x66\x0a\xbf\x28\x4f\xe1\x63\x19\x0e\xa2\x79\x8e\xb0\x1b\xb5\x49\xf5\x3f\x4c\x1c\xfd\x38\x84\x23\x04\xef\xc7\x7a\x34\xf6\x32\xef\xbe\x83\x36\xb1\xf0\x92\x3c\x64\x81\xdd\x9c\x6a\xfa\x59\xaf\x3d\x49\x69\x4a\x2e\x95\xc8\x06\x3d\x8e\x46\x92\x42\x8b\x97\x9b\x9b\xfd\x22\xe2\x28\xec\x2e\x54\xe7\x37\xd3\x48\x34\x87\xac\x65\x3a\x3f\x03\x1c\x4e\x85\x18\x27\x1c\xe1\xc9\xc8\xc0\x79\xd2\xa5\xfd\xc2\x6d\x8b\x9b\xed\x6b\x45\x50\x61\xe0\x27\x82\x96\xea\x29\xf1\xc6\x84\xd1\xac\x22\xba\x30\xa4\x54\xc7\x6b\xc7\x8a\x4e\x95\xa2\x3d\x0d\x18\x98\x9f\x1d\x20\xb7\xd0\x33\xe1\x54\x4d\x83\x3e\x9d\xa5\x3c\xad\x51\xd1\xb8\xfa\xc7\x0e\x1b\x06\x2a\x03\xce\x9b\x6a\x41\x2b\xeb\x27\x7f\xb5\x78\x99\x2b\x09\xb1\x77\x80\xdb\xf9\x16\x56\x57\x46\x27\x0a\xd5\x41\x34\xc1\x77\xe8\x7b\xee\x16\x44\x11\xd6\x7e\x38\x72\xc8\x47\xd2\x6b\xd9\x05\x4f\xba\x93\x9d\x14\x8c\x72\x26\x1f\xf3\xd4\x31\xfd\x42\xc6\xfe\x99\xa5\x47\x67\x9d\x0e\x1d\xed\xee\xd3\xa0\x09\x7f\x1a\xe6\x00\x63\xbe\xde\x8c\x12\x63\xa5\xb2\xb8\x1e\xff\x68\x8d\x83\xf4\xec\xdd\x31\x1d\x75\xfc\xc9\xc4\x0b\xe1\x74\x54\xef\xb8\x91\x2a\x98\x34\x0c\xca\x9a\x06\xdc\xd9\xe9\xc5\xbe\x4e\xef\x08\x2b\x79\x87\x98\xf9\xaa\x7e\x3f\xf8\xaa\xe3\xf7\xe6\x18\x28\xdb\x2f\x79\xe5\x16\xed\x90\x9c\xc7\x79\xc6\xdf\x70\x4a\xc5\x9a\xc3\x93\x98\xe9\x7b\x4a\xd2\x3a\xa1\x1e\xa4\xd1\xdd\xfd\x9b\xc4\x40\xe9\xb3\x45\x99\x60\xbd\x1c\xe1\x13\x18\xf9\x48\x10\xb1\x43\xc4\x2d\xdc\x96\xce\xb2\x41\x24\xe9\x04\xd4\xec\x59\x25\xd7\xb5\x91\x90\x5c\xff\xa1\x8d\x57\x57\x2b\xf0\x6e\x3a\x7f\xba\x94\x07\xdb\x01\x2e\x64\x45\x81\xb0\xdb\x68\x04\xe0\xf5\x33\x8a\x40\x62\xbd\x19\x8f\x0a\x59\x1e\x46\x00\xe3\x22\xce\x32\x82\x49\xdd\xd5\xd2\xaa\x75\x15\xf6\x5e\x56\xfe\x55\xb9\xfc\x3a\xbf\xce\x27\x9c\x3e\x42\x60\x85\xdf\x6a\x23\x08\x20\x64\x84\x10\x16\x5a\xb8\x4d\xc8\x05\x5e\xa4\x0e\xc8\x0c\x59\xb1\xe7\xc5\x79\x7e\x1d\x48\xe9\xa4\x41\x82\xb7\xcc\xb9\xe8\x26\x1a\x22\x26\x80\x7e\x99\x48\x3e\xe2\xdd\x87\x13\x8a\x6e\x7c\x8d\x93\x88\xbb\x41\xb3\x88\xe6\x77\x6a\xdb\x19\x26\x48\x3d\xcb\x2b\x54\xbf\x2a\xde\x46\xee\xea\x79\x40\xf5\xaa\x31\x56\xba\x56\x49\x14\xdc\x6b\x12\xa6\x43\x4c\xbd\xc0\xd4\x75\x69\x7c\x94\xdf\x11\x66\x2d\x08\xa4\xc1\x5a\x47\x7f\x92\x5b\xa7\x35\xe4\x3f\x96\xc4\x76\x6b\xeb\xa2\xeb\x01\x18\x8a\x50\xb5\x4b\x68\xde\xd7\xf1\x8c\x49\x08\xd4\x5b\x7e\xd3\x16\xbb\xbd\xbd\x4f\x27\x03\x93\xae\xb2\xec\x2f\x50\xa3\x8b\x5d\x0c\x30\xab\x53\x71\x58\x9b\x40\x23\x74\x9c\x6c\x4d\x22\x2f\xbf\x13\x8f\xa9\xcb\x2f\x9d\x84\xd7\xa4\x19\x4b\x96\x69\x83\xd5\x90\x2c\x25\xcf\x5f\xa0\xe5\x42\xa2\x7e\xf8\x60\xe8\x18\x85\xa3\x25\x6d\xb1\x86\x39\x60\xb7\x4f\x4b\xbc\xb1\xc3\xd2\x73\x5e\xc9\xca\x9a\x6e\xd1\xb2\x4a\x98\x37\x67\x45\x4f\xbc\xaa\x10\x8f\x57\xf8\xd2\x7a\x6a\x56\x11\xe8\x15\x5c\xb6\xf3\xa6\x95\xef\x99\x2f\xb5\x36\xbd\xc8\x2b\xe8\xc6\x27\x3e\x1e\xf9\x47\x48\x14\xf6\xb5\xd2\xac\xf0\x50\x92\x5c\x1b\x20\x31\x77\xdc\xc7\x26\xe9\x37\xa4\xdb\xeb\x6e\xc7\x35\x71\x5a\x9f\x7e\x3c\x5c\x79\x9c\x74\x5f\x05\x5e\x80\x9b\xe8\x33\x70\x6a\x31\x82\xfa\x49\xf2\x5f\x95\x55\x6e\x0d\xca\xfb\xd2\x4c\x29\x86\xbb\x7f\xc8\xe7\x85\xf9\xa8\x89\x74\xc1\x35\x3a\xef\x3e\x1c\xa1\x0f\x36\x24\x73\x55\x84\x89\x2c\x20\xff\xd2\x2f\xaf\x6b\x8c\xc9\x04\xfd\xcc\xa6\x50\xbe\x4f\x39\x45\x7e\x37\xf5\xc7\x23\x1b\x42\x90\xb6\x94\xa2\x6a\x77\x1a\x5b\x96\xf1\x97\x39\x39\xfe\xc0\xa9\x71\x89\x59\xf7\x13\xfb\xcc\x49\x46\xb6\x75\xfe\xe2\xc7\xc2\xcf\x8d\x96\xe0\xf0\xc0\xf1\xbb\x67\x39\xb5\xe5\xe6\x85\x42\xc0\x1c\xa5\x51\x16\x7d\x65\x38\xea\x2d\x04\x2b\xec\xdb\xca\x57\x86\x06\xf6\x4f\x1a\x0f\x12\x79\x67\xe0\x0c\xdb\x7d\xb4\x44\xc1\x45\x54\x51\x78\x87\x39\x54\x7d\x2b\x1d\x03\x6c\x6e\xfe\x9d\xe4\x75\xa7\x70\x3e\x19\x58\xf5\x38\x38\xd5\x71\xff\xee\x8e\x08\xf7\xc7\x5e\xab\xf2\x10\x52\xc1\x9e\xdc\x9b\xba\x28\x22\xa9\xab\xcd\xc8\x66\xfb\x51\x14\x73\x1a\x24\x87\xbd\x75\x5b\xd0\x69\xd8\x9b\xab\xef\xed\x6f\x24\xa8\x89\xba\x3b\x0e\xdb\x80\xe1\x55\xce\x3b\xa5\xf0\x78\xec\x35\x35\xfb\xe4\xdd\x4b\xfa\x8f\xcb\x1f\xde\x6d\x03\x84\xa7\xfa\xcf\xdd\x18\x80\xa8\x8a\x4b\xec\x28\x0f\x72\x20\xfc\xfe\xf4\x44\x86\x07\x01\x30\x21\x60\xe9\x08\x73\xb2\xab\x04\x52\x75\xe9\x8e\xd8\xc1\xe7\x0e\xc1\xb4\x7d\x10\x3b\x15\x52\x66\x97\x36\xa8\xe4\xf7\x54\x2f\xd4\xad\x23\x73\x15\x97\x00\xaf\xf2\x7c\x8a\x10\x71\x0f\x81\x0e\x14\x9a\xf0\x82\x21\xe1\x0b\xbc\x5a\x78\x11\x30\xea\x80\x22\xfe\xf6\x89\xe1\x7e\xbb\xb8\x6c\x5f\x1a\x51\x92\xd6\xe9\x76\x83\x89\xd8\xc6\xf8\xc0\xb5\x89\x5b\xad\xd6\xfa\x91\xd0\x72\x52\x61\xb8\x35\xe1\x0b\x62\x90\x78\xa3\x7d\xab\x8f\x36\x31\x13\x09\xef\xef\x2a\x27\x12\xb0\x25\xd8\x73\xb9\xa8\xcd\x21\x2b\xf0\xdf\xfd\x9e\xb2\x6b\x98\x37\x19\x52\x77\x83\x44\x51\xb7\x23\x91\xf3\x23\x98\xf1\x2f\xc2\xfa\xfe\xea\xef\xe4\xf3\x3b\x90\x6c\xdd\x63\x8c\xd1\xbb\xdf\x86\x25\xe8\xde\xd6\x83\x69\xde\x2e\xa7\xce\x49\x54\x77\x18\x04\x47\x49\x04\x5f\xe0\x16\x73\xdc\x35\xbb\xf1\xbb\x44\x6d\xa2\xb9\x4a\x79\xd0\x81\x56\xb7\x2a\xee\x50\xbf\x70\x9f\x07\x0f\xcf\x67\x8d\x2d\xd2\xca\xcb\xfe\xe6\xbc\xd1\xeb\x71\x67\x59\xca\x91\x84\x61\xe1\x6c\x16\xb3\x60\x96\x01\xe9\x48\x0f\x5a\x08\x1c\xe4\x1a\x65\xd8\x92\xd1\x75\x82\xaa\xf2\x87\x33\x39\x48\xb9\x3c\x40\x17\x21\x8c\xcf\x90\xd4\x60\x11\x85\xf3\x5d\xdf\xa6\xf9\x04\xd0\xb4\xb4\x3f\x65\x8b\xbb\xae\x59\x16\xa7\x6f\xcb\x54\x14\x43\x00\xe7\x18\x07\x8a\x2e\x4f\xea\x60\x5d\xfe\x10\xec\x76\xb2\x4c\x42\x30\xf6\xe6\x77\x4c\x97\x36\x99\x16\xc9\x7e\x03\x4d\x2a\x91\x34\x76\xa3\xf0\x49\xd6\x63\x14\x78\x08\x83\x38\x1c\x38\xff\x64\xf6\x62\x6f\x8c\xf6\x38\x7f\x43\xc1\x19\x2d\x9d\x67\x80\x39\x69\xb8\x2e\x73\x98\x5b\x43\xae\xa0\x3d\xc7\x7e\xd5\xa2\x44\x7d\xf2\x97\xce\x4f\x88\x94\xdf\x0d\xed\x3e\xaf\xdb\x76\xf0\x97\xf2\xf1\xec\x36\x9d\x49\x4f\x29\x41\x10\x18\x23\x2a\x68\x14\xb4\x6b\x77\x5c\x0b\x7a\xb5\x9a\x52\xb3\xfc\x66\x9b\x2c\x77\xc1\x12\x36\x74\x6b\x98\x77\xd8\xed\x06\xec\x5b\xe3\x97\xfd\x3e\xca\xd3\x07\x6d\x7b\xb3\x81\xb1\xed\xbd\x51\x91\x77\x7b\xc6\x4a\x7e\xdb\x53\x4b\x96\xa4\x3f\x51\x3d\x23\x3f\x3a\xc4\xb4\xdb\x67\x4c\xdc\x13\xb7\xfc\xee\x65\x40\x64\x77\x2a\x33\x63\x63\x20\x93\xe1\x36\xc8\x46\xce\xeb\x0f\xf1\x93\xc5\x82\x5a\xfb\xf5\x27\xfa\x48\xc1\xd7\xe3\x63\x58\xfc\x8e\xe2\x74\x2f\xb9\x45\x7d\xda\x55\xc7\x56\xed\xe0\x86\xe4\x86\x0a\xfc\xa7\x5a\x95\x43\xf1\x97\xd9\x51\xd9\xbd\x08\x16\x0e\x4b\x51\xa2\x7f\x2e\x45\xe5\x4d\x78\x36\x8f\xe0\x30\x1d\x95\x97\x3d\x44\xef\x07\x41\x85\xf1\x10\xd4\x89\x05\x6f\xff\x71\x49\x43\x54\x50\xf6\x1e\xd5\x67\x5a\x78\xe3\xb5\xb1\x8a\xbd\xd5\x9d\x5f\x65\x79\xf8\xd7\x91\x43\x02\x61\x7c\xea\x66\x39\xf8\x4e\xac\x05\x4f\xc9\xec\xad\x76\xe9\x25\x16\xb1\x3b\x53\x54\xd2\x2b\x2b\x30\xc9\x2e\x52\x87\x11\xfa\xda\xe4\xbe\xfd\x72\x33\x1f\x3b\x9d\xfa\xc6\xb4\xdf\xe1\xfc\xb5\x09\x05\xea\x13\x0c\x57\x65\xf6\x71\xea\x28\x3b\x82\x19\xdb\xd5\xe3\x4f\xca\xa6\xf0\x50\xdc\xf8\xe4\xe4\x74\xca\xfb\xa6\xe7\x2f\xfa\x26\x77\x4a\xd9\x65\xd3\x16\x3e\xa7\x91\x1c\x5d\x42\x71\xee\x37\x06\x1b\xb1\x49\x26\x78\x57\x8e\x3c\x10\x78\x6f\xe8\x50\xf9\x88\x1c\xe7\x59\x07\x7c\xe4\xc9\xe4\xb2\x6b\x9e\x55\x35\x6e\x89\x2b\x0f\x0d\x4f\x94\x31\xa3\xed\xdc\xee\x82\xf1\xf0\x21\x7d\x9c\x59\x82\x57\xc1\x96\xa6\xcb\x50\x84\xc7\xc2\x03\x24\xd2\xf9\xa8\x70\x55\xcb\x7c\xd0\xec\xd6\x9a\x03\xe5\x31\xef\x20\xac\x8a\x89\x7a\x2f\xa5\x72\xc5\x40\xb2\x3d\x32\xb5\x41\x4a\x55\xfb\xef\x36\xac\xb8\x48\xc8\x08\x4c\xda\x9a\x80\x90\x13\xa9\xaa\x34\xcb\x80\x56\x3b\x87\x5b\x55\xec\x32\xec\xd6\x38\xf5\xbc\xb7\x60\x6b\xff\x94\x20\xd0\x4a\xf1\xb4\xeb\xef\xae\xd3\x08\x14\x58\x96\x68\xfb\x87\x8e\xb6\x97\x4c\x20\x31\x91\xba\xc8\x05\x38\xe0\x42\xa5\x62\x72\xd5\x6c\xf3\xd6\xa6\x61\xb7\x10\x3b\x3a\x85\x1b\xce\x59\xd8\x2d\xd2\x1c\x48\x05\xb4\x67\xea\x0e\x38\x53\xd6\x50\x7a\x8a\x50\x84\x29\x8d\x3c\xbb\x06\x3d\x89\x91\xee\x6d\xc8\x3a\x72\xa7\x86\x48\x71\x3e\x5b\x64\xde\xa9\xc1\xa6\xef\x6c\x3c\xcc\x52\x0b\x2b\x7e\x3a\xd0\xf4\x2c\x9c\x27\x3d\x19\xdf\xcb\xdb\x09\xf3\xd4\x80\xd8\x6d\xa3\xdc\x28\x57\xc1\xd6\x27\x02\xc0\x7a\x44\xad\xb9\x84\x87\x05\x68\x11\x9c\x09\x15\x69\x59\x40\xce\xa4\x5c\x20\x0b\x8f\x94\x7d\xf7\xd1\xca\xa9\xbe\x2a\xf5\xbf\xce\x41\x92\x25\x6b\x76\x89\xb4\x0f\x4f\x63\xe2\xf9\x37\x25\x19\x56\x61\xa5\x00\x18\xd1\xd5\x7c\x75\xec\x25\xac\x33\xfc\xd6\x0b\x6b\x40\x0a\xd9\x7f\xce\x19\xf8\x88\x8a\xc2\x23\x3a\xa9\xae\x46\x4c\x4e\x07\x97\x78\x2a\x0a\x69\x16\x82\x0c\x45\x39\xd7\x76\xb0\xe4\xe5\x02\x3f\xf7\xe3\xbe\x59\x10\xbf\x07\x60\xbf\xaa\xd4\x02\x74\x8f\x69\x91\x96\x4c\xa5\x7c\xc7\xfd\x8e\x72\x52\x70\x12\x18\x07\xb4\x95\x78\x15\x1e\x61\x6e\x4b\xed\x37\xca\xbd\xcd\x1b\xed\xfb\x2a\xd1\xaf\x2b\xa9\xde\xba\x30\x2b\x73\xc6\x8c\x1d\x67\xbd\xb4\xad\x8b\xfc\x73\x4f\xf7\x9d\x52\x30\xc0\xba\x87\x91\xc7\xfd\x69\x64\x5b\x44\xaa\x86\x37\xcc\x27\xa8\x09\xdf\xa8\x2a\xd1\x65\xa6\x17\x29\x09\x22\xb8\xe5\x2d\x34\xf8\x2e\x11\x79\x38\x95\x2d\x30\xa1\x43\x41\xbe\xbb\x84\x32\x7b\xdb\xb7\xf7\x2a\xa4\x65\x21\xdc\xbf\xff\x3c\x01\x77\x2b\x0e\xfd\x4b\xe0\x5d\xcf\x13\x07\x76\xe6\x1f\x3f\x68\x9c\xdd\xa0\x62\x10\x0e\x20\x74\x56\x2e\x79\x95\xfa\xca\x8e\x3d\x22\x85\x6b\xa3\x52\xc0\x51\x3b\x29\x55\xa5\xcf\xd4\x60\x64\x99\x63\xe3\xd8\x3b\xb5\xfd\xc1\x91\x21\x4b\xc1\x93\xae\xf3\x16\x2d\x7c\x8d\x3b\x1b\xc0\x26\xe7\x5d\x1e\x31\x44\x5f\x1f\x1f\xd2\x02\x6e\xd2\x18\x21\xbf\x23\x2b\x65\x69\xa2\x34\xb7\x98\xf4\x53\x12\x61\xec\x0f\x2a\x2e\x82\x03\xb7\x0b\x4b\x99\x33\x09\x45\x33\x55\xa0\xb3\x93\x5f\xd8\x55\xe6\xa7\x35\x7a\x3e\xf2\x9a\x3e\x67\x84\x9d\xdd\x67\x06\x50\xcc\xbb\xdf\x6a\x82\x00\x71\x06\x99\x54\xa0\x74\xd5\x00\x2d\x98\x77\xae\x0b\x92\x31\xfb\x92\xbe\x01\x84\xf2\x67\x3c\x8f\xea\x93\xde\xe7\x72\xc7\x5c\xef\x19\xac\xaf\xa7\x70\x9b\xc0\x8a\x20\xf9\xa5\x47\xa2\x95\xce\x32\xc9\x23\xc7\x7e\x80\xea\x86\x75\xa6\xbb\xa5\x52\xe0\x7b\xe6\x9f\x00\x78\xd4\xbd\x2d\x46\x4f\x2f\x8b\x66\xec\x9f\xf6\xb2\x8a\x9f\x6c\x87\xf7\x2d\x4f\x3a\x3b\x2b\x23\x0f\xc5\x27\x49\x9d\xdd\x00\x9a\x10\x2c\x6a\x59\xad\xc3\x3e\x97\x36\x63\x93\x2d\x4a\x07\xdd\x0d\x9d\x0b\xe6\x59\xca\xf1\x60\x3c\x7a\xf0\x3d\xcb\x4d\x8c\x5f\x56\x8a\x31\xac\x05\x1a\xc2\xf0\x54\x3d\x26\xe7\x6d\xbc\x41\x8f\x1c\x34\xa1\x26\x0d\x7d\xc7\x04\x11\x23\x5f\xea\x1f\x5e\xc1\xd8\x45\x51\x99\x5a\x4e\xad\x86\x75\xd5\xee\x3e\xc2\x9d\xaa\xd2\xe0\x20\x5f\x62\x54\x97\x1d\x67\xb7\x73\x15\xdd\x11\x2d\x8f\xe3\x4d\xb4\x13\x98\xc1\x0e\x57\x0c\xe5\xec\x88\x9c\x82\xfa\x06\xf7\x62\x21\xdc\xa0\x12\x78\x9d\x0a\x99\x4f\x58\x67\xc4\x93\x38\x93\xfc\x1c\xe1\x5a\xec\xdd\x20\xf2\x46\x39\x86\x3f\xe4\xbe\xf0\x33\xd5\x3f\xe8\x10\xab\x94\xf9\xa3\xa9\x9f\x8f\xb4\x33\x7f\x05\x93\x67\x19\xa8\xd8\x81\xfb\x24\x6d\xc4\xfe\x0c\xab\x33\xe9\xa4\x4d\xa0\x63\x53\x63\xb6\x95\xb8\xc3\xf5\x79\x14\xfa\x2c\x31\x3f\x93\x22\xa3\xf5\x7a\x5a\x21\xfc\x57\x5e\x62\xa2\x8e\x8b\xbe\xca\x0e\x59\xd9\x10\xad\xe9\xbf\xb8\x25\xfb\x12\x46\xe8\x9b\xd7\x71\x88\x8e\xd1\x30\x9f\x08\x20\x6f\x46\x78\x22\xea\x76\x8c\xf1\xf8\x52\x22\x11\x11\x34\xd6\x34\x2d\xba\x57\x45\xa9\x70\x22\x45\x84\x74\x58\x5d\xfb\x6e\x66\x0b\x1c\x43\xcf\x9b\x68\x2a\x08\x54\xcd\x91\xbe\x35\x67\xb2\x89\x6c\x2b\x79\x09\x98\x95\x3f\xf2\x1f\x36\x66\xd6\xc1\x1f\x3e\x80\x67\xba\x09\x99\x0d\x53\xef\xe5\x27\x38\xc5\x69\x4f\x7b\xef\x85\x79\xd7\xb6\x18\x73\x34\xa6\xa1\xc0\x9f\x42\x28\xe2\x9e\xb6\xfc\x37\x59\x5e\xfd\x54\x4a\x63\x25\x08\x0b\x33\x69\xc4\xbd\x3f\xdc\xba\x61\x54\xde\x51\xba\x11\xf7\x7e\xfb\x92\xd9\x5a\x5e\x09\xc8\xdf\x8c\x5d\xf1\xb3\x7a\xab\xfa\x6c\xee\x02\x3e\x7a\xf4\xaf\x08\xda\xa1\xd5\x00\x14\x5c\xe0\x5d\xd5\xa1\x6e\x7f\xee\xfd\x65\xaf\x6a\x15\x09\x55\xa7\xf7\x15\x0d\x14\xf4\x2f\x9c\xbc\xab\xae\xef\x2b\x77\x07\x58\x7c\xe1\x4a\xd6\x29\x9f\xdf\x41\x0e\xc4\x5c\x47\x83\x91\xab\x9f\xa4\xd6\x73\xd7\x23\x7a\xe1\xdd\x8b\xe0\xd0\x3b\x6f\x47\x0d\x57\x3d\x82\xaf\x8c\x81\x38\x92\x8c\x57\xef\xe5\x4d\xb3\x5a\xec\x28\x64\x71\x75\x25\x9a\xe0\x72\xd0\xae\x08\xe2\xee\x10\x2b\xba\x13\xa3\x6f\x1a\xb7\xb5\x0c\x97\x94\x87\x0b\xaf\x48\xa8\x14\xfc\x33\x9f\x9a\x66\xa2\x44\x65\x81\x5f\x9f\x6f\x0f\x53\xd1\x3d\xf5\x96\x76\x88\x1f\x84\x8e\xf7\x95\x4e\xf1\xda\xa8\x4f\x1c\xc4\x1a\x51\x1c\x1d\xb4\x34\xe2\xd5\x56\xbe\xb5\x57\x1c\xa3\x57\xf1\xf2\x01\x14\x24\xb7\xca\x68\x36\x48\x32\x7a\xdd\x73\xb0\xab\x5c\x00\xad\x3e\x37\xbc\xb1\xad\x72\x27\xd6\x44\x18\xe7\x0c\x9f\xd6\x73\x0b\x67\xa0\x53\x9b\x4d\x78\xec\x63\x90\x76\x35\x4f\x1e\x38\xf0\xe2\x31\xbf\x4a\x10\x04\xc2\xab\xe6\xb4\xf7\xf4\xb5\x45\x01\x71\x45\x05\x51\xcf\x9f\x09\xa3\x27\x3c\x70\x26\x84\x29\xa3\x2c\x0d\x78\x05\x1c\xe1\x59\x77\x20\x5e\x50\x69\x69\xa4\x86\xd8\xe6\x13\x41\x60\x3b\xf9\x24\x56\xda\xfd\x8a\x82\x3d\x38\x6d\xfe\xd4\x59\x63\xef\xc8\xde\xf3\xaf\x04\xe9\x57\x1d\x76\xe2\x1e\x24\x58\x64\xbd\xc9\x07\x5c\x1b\x98\xbd\xc2\x7a\x87\x75\x41\x38\xe7\xdb\x76\xa3\xf7\x09\xb6\x94\x94\x3b\xd8\x13\x73\x2a\xae\xc4\x98\x55\x93\x4f\x2f\x06\xca\xbe\x8b\xda\x38\x6b\x17\x21\x9b\xca\x6a\x37\x2f\x6d\xbd\x0e\xf0\x32\x6e\xce\xad\xea\x87\xe7\x11\xbb\x64\x30\x53\xed\x81\x05\x52\x51\x55\x8d\x6e\x87\xfc\x82\x03\x72\x36\x72\x0c\x28\xf0\x9a\x6a\x9e\x4f\x9a\xd5\x4e\xf9\x56\x5f\xbd\x48\x9c\x3c\xe5\x5f\x7c\x04\xc2\x20\x63\x1f\x21\xdf\x7f\x62\x35\xe3\x15\x02\x6b\x4e\xbb\x97\x7d\x47\xa8\x43\x27\xfb\x4b\xdc\xde\xe4\xd1\xc3\x56\x85\xab\x8e\x5b\x89\x5c\x9a\xc7\xb0\x1d\x1a\x3d\x28\xfb\x11\x9e\xe6\xfe\xe2\xc0\x57\xb9\xd0\xe4\x48\x4c\xca\x24\xad\x6c\x8e\x14\x9e\x2c\x28\x2f\xb7\x14\x65\xa6\x98\x73\xc4\x36\xdc\x3d\x4d\x48\x07\xbd\x8c\x83\x1d\x28\x7c\x51\x3c\x10\xa8\x17\xb1\xac\x05\x62\x82\xc8\xa7\x71\xcd\x7d\x8a\x8e\x70\x34\x8b\x9d\xfe\x28\x1a\x2e\xf5\x07\x3b\xf3\x6a\x07\x95\x3c\x17\x91\xd1\x25\x9b\xb4\x25\xfe\x16\xb4\x51\xca\x01\xee\xe3\x64\xef\x93\x20\xcd\x3e\xb9\x7f\x0d\x3b\x07\x3b\xdd\x7f\xfa\xd0\xf2\x4f\x70\x00\x44\x32\x67\x18\x68\x89\x5b\x4f\x98\x4d\x56\x38\x2d\x8e\x28\x85\xb1\x9f\xed\x53\x21\x53\x61\x5a\x4a\x57\x83\xdb\x8f\x12\x4f\x13\xa2\x2c\x8f\x26\xc9\x67\x7c\x40\x8e\x5d\x89\xc6\x1c\x48\x2a\x0a\x36\xd1\x81\x58\x1d\x02\x84\x80\x11\xa9\x50\x85\x4f\x38\x6f\x9f\x2f\x5a\x36\xa6\x61\xbf\x6d\xab\xd2\xfe\x85\x3e\xa5\xa8\x51\xb1\xba\xad\x70\x4c\x74\x5e\x71\x2b\xdb\xed\x11\x34\x69\x8c\xf9\x14\x8c\x5e\x44\x70\x7b\x01\xbd\x77\xb4\xcb\x56\x4a\xeb\x77\xad\x7d\x63\x30\x79\x4d\x44\x5a\x98\xe0\x6f\xfb\xec\x16\x38\x78\xfc\xc0\x73\x97\x4d\x31\xd8\xc8\xda\x8e\x68\x4f\x9c\x89\x75\x3e\xfe\x9f\x6c\x09\x7d\x91\x0a\x8d\x2c\x6e\x9f\x18\x03\x6c\x38\x78\x4a\x1c\x35\x24\x28\x47\xe1\x18\xd5\x76\x34\x9d\x25\x47\xb4\xac\x18\x39\x04\x17\x3b\x0d\xb4\xbc\x2e\x4a\x1a\xdc\xc3\x6a\x74\xff\xff\x65\xbf\x50\x18\xb8\x67\x6b\x5d\x19\x46\xb3\x63\x97\x20\xf4\xb0\xb9\x76\x0c\xa1\x43\xf4\x23\x73\xb6\x68\x48\xb6\xef\x63\xbc\x0b\xad\xa5\x6a\x78\xdf\xab\x0d\x94\x71\xa1\x32\x7f\xec\x28\x91\x29\xd6\xda\xfb\x8b\xa4\xd9\x71\xf8\x70\xa0\x48\x22\xad\xf4\x66\xc4\xc8\x61\xf5\x11\xa9\xd7\x47\x1f\xf1\x23\xdb\xcf\xa4\x7a\x4d\x1c\xc0\x02\x56\xf5\x9a\xe9\x1e\xb8\x23\x2d\x95\xfa\xdb\x42\x47\x58\x17\xe6\xaa\x3a\x5b\xec\xfe\xd4\xb7\xdb\xd6\xa6\x6c\x7f\xbd\xb6\x0b\x94\x63\x51\x18\xdd\x0b\x03\x2e\x62\xca\x3f\x33\xc8\x4c\x9c\xee\xf3\xfa\xe5\x9a\x7c\x2a\x39\xdb\xcb\x53\xbc\x1b\xf8\x4c\xb2\x67\x51\xd4\xd0\x58\x5b\x1b\x53\x6f\xfb\xfd\xbe\x5d\x2d\x81\x50\x8f\x0a\xcd\xc8\x92\xca\xc2\x1e\x91\xe4\x58\x7b\x73\x7e\x0c\x6e\xb0\x2e\x7c\xf5\x17\xa2\xdf\xe7\xf2\xf1\x3b\x1a\xde\x3d\x92\xcc\x05\x0b\xe2\xc9\x49\x61\x3e\x9b\xeb\x5a\xf6\x9a\xe4\xd0\xc0\xba\x9c\x94\xfe\x4a\x4d\xd7\xfc\x06\xc0\xad\x51\x33\xb4\x84\xfe\xe1\xa5\x46\x76\x48\x41\xef\x8e\xda\x65\x51\x02\x0d\xb9\x54\x42\x35\x5e\x6d\x75\x34\xef\x3d\x37\xf2\x51\x7e\xd9\x19\x29\x80\xae\x0e\x1e\x89\x26\x8a\xc1\x7a\x47\x56\xa7\xbd\x60\x82\x2f\x22\x86\xc7\x97\xd2\x89\x5a\x89\xb3\x09\x8d\x86\x64\xfd\x4c\xb2\x74\x0c\x46\x6b\x67\xad\xe2\x25\x7a\xec\x36\x6b\xa4\x90\xe8\xa5\xe9\xbd\xa9\x26\x68\x18\x2b\x0b\x77\x44\xd5\x47\xac\x23\x66\xab\x88\xbb\xe6\x03\xdf\x4d\xbf\xea\xb6\x7c\x3b\x15\xf6\x85\xa9\x11\x77\x85\x13\xbf\x9e\x5c\xac\x2d\xaa\x9e\x57\x1a\xb6\x44\x4b\xc0\x77\xe4\xcd\xc0\x89\xe9\xe0\x22\x70\x8c\x0b\x73\x7a\x61\x9b\x99\xf4\x8d\x2b\x2d\xbf\x9c\x3e\x50\xcd\xc1\x0c\x24\x7c\xf9\x96\x36\xfe\xdd\xa4\x7f\x17\x1c\x36\x7d\x11\x3e\x0f\x57\xea\x62\xda\x76\xb7\x04\xcc\x88\xd4\x25\xa6\x77\x96\x10\xa1\xc3\xf5\x78\x67\x8d\x54\x9e\x31\x06\x87\xae\x5f\x44\xf3\x60\xb7\x27\xaa\x16\x09\xa3\x70\xf8\x75\x84\x2b\xda\x71\xc7\xeb\x75\x54\x46\x71\x1d\x31\x20\xa3\xdb\x93\x0f\x1b\xf2\x44\x9d\xa0\x02\x7f\xed\x84\xf0\x9f\x86\x82\xe4\x57\x53\xed\xcd\x5e\x6a\x5f\x04\xea\x60\xa6\xa7\x32\x48\x67\x40\xfd\xbf\xbe\x29\xf5\x9f\x64\x12\x2b\x4c\xa8\x4d\x23\x47\x33\x6b\x28\xb0\x05\x5c\x82\xdd\xa9\x85\x53\x55\x34\x78\x8a\xbf\xa9\x03\x8d\xf3\xd7\xf2\x49\x01\x9c\x04\x30\xbb\xe6\x2e\xcc\x15\x41\xc1\x94\x9f\x22\x86\xb1\xdc\xa2\xec\x56\xa2\xfd\xc4\x72\x33\xf6\x6f\xa9\xaf\x48\x82\x6f\xa4\x1a\x05\xc0\x63\x1f\x3a\xc3\x29\x65\x18\xb4\x09\xbd\xb4\x2f\xfd\xb3\x45\xf5\xe1\xf6\xe9\xf6\x13\x41\xf2\xdc\x6b\x51\x72\x39\x50\xde\xd0\x66\x69\x3c\xe7\xa3\xfc\xbb\x2b\x89\xf6\xc2\x48\x3d\xf3\x5d\x2b\x6c\x8e\xe7\xd4\x7f\x23\xbb\x09\x00\x4c\x58\xb1\x48\xaa\x88\x6c\xa8\xb0\xcc\x08\x71\xef\xd8\x58\x35\x57\x88\xae\x50\x62\x4d\x51\x62\x3d\xc3\x93\xe5\xe5\xe6\x42\x4f\x84\x4a\xf9\x23\xcd\x24\xc0\x4a\x73\xfb\x03\x6b\xbd\x65\x5d\xfd\xd4\x3b\xf5\x84\x51\x5f\x6c\x4c\x4f\x7b\xae\xcd\xc2\x93\x43\x6e\x87\x61\xdd\x3a\x63\x95\xa0\x23\x0f\x63\xbb\x2b\x72\xe3\x46\xfe\x42\x7f\x6e\x04\xf8\x90\x09\xfa\xe1\xa0\x71\x79\xe5\xde\xba\x2c\x16\x53\x7e\x3c\x80\xcf\x11\x29\x34\x8d\x0b\x53\xaf\xeb\x88\x41\xff\x99\x5e\x51\x22\xb5\x64\x1a\xdb\xa1\xc2\xc0\x78\x8f\xaa\x69\x0d\x7a\xe8\xf2\x54\x60\x1f\xbd\xea\x90\xbf\x79\xcb\x17\x9f\xd1\xd9\x00\x80\xec\x1d\xf4\xa3\x0b\xf8\xec\xbb\x95\x48\xe9\xd2\x46\x77\xcc\x0d\x0a\xf3\x58\xe2\xc6\xaf\xf9\x63\x5a\x11\xc5\x62\x44\x7e\x76\xdf\xbf\x90\x8c\xe3\xcf\xe3\xa3\xc8\xa0\x8f\x6f\xcc\x4e\xe3\x8f\x17\xd2\xae\x41\xc1\x40\xf6\x94\xac\x5e\xf1\x69\x39\xf3\x74\xdf\xd0\x8c\xcb\xde\x00\x45\xe8\xc6\x6d\x47\x2a\x30\x46\xa6\xf0\xc6\xb0\xdd\xe4\x08\x2a\xff\x1e\x5e\xb4\x4b\x93\xb9\x03\x10\x61\x88\xe7\x14\x57\xd9\x99\xed\x96\x2e\x22\xc4\x9c\xc9\xdb\xee\x63\xfa\xac\x82\x5b\xbd\x9b\xc6\x6c\x90\x33\x90\x85\xc2\x5d\x6b\x6c\x0e\x82\xa6\xe9\x8a\x19\x08\x08\x99\x3a\x60\xf7\x44\x3b\x49\x20\xa6\x21\x0c\x16\x6f\xc6\xae\x2b\x6a\x74\x37\x36\x2d\x6b\x30\xeb\x3c\x88\x2e\x29\xac\xa3\xf7\xa1\xce\xea\x94\x54\x61\xa5\xe1\x4f\x88\xab\x44\xa4\xec\x2e\x07\x25\x5b\x9d\xf2\x2a\x9b\xc5\x55\xa7\xf9\x9a\xac\x6d\x91\xd0\x40\x57\xe5\x72\xef\xd1\x1b\x09\x40\xf1\x27\x2a\x78\x79\xba\xb7\x33\xac\x02\xbc\x3e\x0c\xef\x1f\x40\x26\xdf\x9e\xad\xca\xc4\x6e\x1d\xfc\x57\x5e\x7b\xc8\x35\x8f\x69\x22\xf7\xef\x09\xb6\xfb\x4a\x88\xc9\x63\xf8\x2e\x64\x65\x83\x30\x22\xa6\x0d\xf7\x2f\x8c\xe6\x1d\x06\xb1\xa2\x91\x45\x88\xd5\x50\x72\x05\x07\xe8\xc5\x9b\x6e\x0c\x6b\x1f\xbb\xba\x07\x71\x73\x14\xb1\x37\x36\x44\x62\x94\x73\xe9\xf3\xf8\x24\xc2\xac\xb9\xf5\x77\x4f\x18\x15\xea\xa6\xfe\xef\x3f\x7f\x38\x27\x91\xe5\x90\x3f\xfe\x20\x57\xce\xef\x5d\x18\x2b\x33\x17\xf3\x45\xcc\x28\xab\xb8\xce\xcb\xdc\x6c\x71\x04\x25\x0f\xf7\xa1\xd2\x44\xcc\x64\xfb\x8d\xe7\xcc\x3b\x74\x88\x81\xc4\xe0\x48\xb5\x2f\x14\x54\x31\xe4\x75\xe7\x24\xd4\xfa\x57\x6d\x00\xb6\x47\xf2\x6b\xe8\x95\x30\xb0\xd1\x18\x00\x7f\xbb\x7c\xd9\xe5\xd9\x5d\xe2\xae\x05\x3b\xb2\xb8\xe2\xac\xa7\xda\x22\x09\x47\x7e\xa3\x56\xf5\x08\xfc\x9d\x02\xaf\xc5\x30\x21\x0a\x19\xd6\x34\xdb\x6f\x4d\x56\x9d\x13\x8e\x84\xf2\x6c\x41\x8f\x3d\x22\x44\xbb\xab\xb0\x76\x52\x7b\xb4\xdc\x61\x64\xd9\x2d\xce\x8c\x13\x57\x21\x2b\x11\x32\x7d\xa4\xbb\x00\x51\xfb\xe1\x54\xb9\xfb\x4f\x69\xda\x33\x0f\x8b\xa4\x77\x52\x49\x2f\xdf\xbb\x91\x46\x29\xfa\xe1\x35\xf2\x65\x65\xf8\x3e\x6a\xd7\x98\x29\x60\x05\x34\xb2\xec\x70\xd2\x72\xf5\xed\x6c\xc3\x1f\x5b\x36\x0c\xa9\xce\x6b\xa4\x3b\xee\x5b\xe4\x6a\x40\x41\x8a\x45\x3b\x54\x80\xdb\xdb\x23\x8d\x6a\x2a\xc2\x6f\xaf\x36\xde\x53\x2a\x93\x76\x2a\xab\x5d\xa2\xa1\x97\xd3\x01\x19\x46\xc0\x80\x7d\xe6\xa4\x4a\xc6\x57\x25\x98\x20\xe7\xde\xd4\xa7\x7b\xe2\x9d\x7b\xda\xc9\xed\xe1\x85\xbe\xf6\x5f\xb4\x48\xda\x79\x4e\xc7\xa7\x45\xb2\xc3\xa9\x49\xbf\xcf\x4e\x36\xfd\x3a\x68\x78\xe5\x4c\x08\xc3\xcf\xf3\xd2\xb1\x7c\x04\x60\x73\x4c\x6c\xe4\x31\xb3\x23\x17\xa9\x6c\xf2\x1a\x62\x60\x9f\x1f\x90\x7b\x18\x49\x38\x35\xc2\x6f\xa7\xbd\x31\xdb\x27\x39\xe5\xc2\xa8\xb7\xca\x09\x01\x90\x87\xd7\xc5\x56\x35\x70\x5c\x5c\x23\xcd\x76\x63\x25\x42\x81\xc9\xc3\x85\x79\x8d\xbe\x44\xd6\x8f\xb7\x59\xb4\x55\xb5\x3d\xb7\x4b\xac\x57\x2e\xc1\x5d\xb6\x9a\xd8\x87\xae\xde\xe1\xf2\x73\x8b\x78\x60\x60\xef\xc2\x58\xc6\x83\x00\x6d\x1f\xfc\x64\x1a\xb5\x42\xe7\xa7\xe6\x0a\x83\x12\x6f\x9f\xee\x75\x9f\xa3\xa6\x79\x65\x52\x01\x96\xfa\x94\x35\x11\xe9\x7e\x4b\x89\x7e\xd0\x36\xbf\xbd\x79\x3b\x2e\xf9\x39\xff\xe3\x03\x2e\x89\x6c\xba\x49\xcf\x5b\x22\xc5\x15\x83\xcf\x48\xa7\xbb\x0f\x4f\xb5\xf0\xd2\x87\xcd\x56\xcb\xf4\x0d\x5a\xc4\x8c\x9e\x12\x8b\x3f\xa6\xcc\x72\xb7\x75\x84\x76\x3a\x8b\x4b\xcd\x26\xbd\x0a\xb6\xf1\xea\xd7\x4f\x9c\xc1\x4c\x35\x3f\xb9\xcf\x82\x1e\xf5\x70\x38\x7a\x56\x7a\x68\x0b\x33\x1a\xeb\x02\x98\x77\x21\x78\x64\x87\xb8\x9c\x94\xee\x45\x47\xa3\xb5\x7a\x97\x8c\xbe\x8e\xac\x19\x54\x20\xa2\x4f\x7b\xef\xc9\xc6\xf7\x8b\x65\xe3\x34\xba\x1e\x6d\x90\xf4\xb4\x77\xda\xb7\xbf\xe8\x2d\x7d\x02\xad\x0d\x30\x8b\xf1\x2c\x19\xf1\x70\x6f\x8c\x58\x1b\xea\x96\xc4\x14\x2e\xda\xd0\xca\xbf\xce\xb1\x03\xe0\x4b\xdd\xcf\xbd\xf0\xb6\xde\xaa\x5c\x56\x73\x7d\x8d\x44\x86\xfa\x0f\x93\xa4\x87\x14\xd9\x1f\x1d\xe1\xc0\xbb\xe0\x58\x72\x0f\x4f\xaa\xd8\xc1\x8a\xe6\xd1\xcb\x8f\x0e\xac\x3c\x37\xbc\x5e\x02\x7a\x5a\xe7\x15\x2a\xd9\x8e\x25\x7f\x97\x74\x0b\x76\x1c\xa2\x76\xbf\x3d\x31\xdd\x76\x20\x4e\x9a\x64\x7d\x93\x06\x25\x40\x73\x64\xde\xd4\x71\x17\xf2\x57\x05\x5d\x54\xef\xa3\x61\x41\x37\x3e\x64\x06\x36\x49\xd5\xe2\x47\x50\xd6\xeb\xa8\x21\x96\xd9\x7c\x93\x75\xac\xe6\xcf\x3b\x74\x10\xf9\x03\x16\x71\x6f\x53\xb2\xfb\xc5\x31\x72\x07\xbd\x96\x91\x2c\xa4\x1a\x43\x0e\x6e\x28\x20\x60\xf9\x75\x76\xc2\x0e\x90\xbd\x56\xf0\xdc\x6d\x47\xcc\xf5\x20\xac\x0c\xcd\xb2\x6f\x40\xc7\xef\xfd\x3e\x11\x20\xc9\xef\x69\x1c\x6f\xa4\x52\x65\xec\x84\xbe\x9e\xc3\xed\x1a\x3e\xac\xaf\x98\x67\xdf\xff\x3a\xa1\x1e\xb1\xa8\x6d\xcb\x52\x5f\xe8\xfe\xa2\x04\xa4\xce\xd4\x6a\x3b\x80\x86\x5e\x75\x45\x4a\x6c\x3d\x75\x2e\x21\x0d\xe0\x4a\x02\x41\x8e\x48\xaf\x3c\xf2\x28\x07\x4a\x8d\x60\xa0\x1d\xce\x9b\x26\x99\xb4\x74\x11\x99\x0b\xb9\x4e\xba\xbf\x3d\xf1\x82\x0c\x2f\x28\x9c\x15\xdc\x2d\x39\x4e\x7d\xff\x84\xf4\x5b\xea\xb9\x65\x05\xc3\x7c\x6d\x15\xea\xda\x03\x78\xf1\xc5\xf4\x9b\x96\x41\x52\xb6\x68\x1f\x0a\xc7\x35\xe9\x92\x45\xfb\x45\x13\xa0\xf6\xea\xcb\x71\x36\x3c\x19\x7c\xa3\xa9\x5b\x40\x05\x60\x76\x3c\xe8\x1c\xf8\xe6\xc9\x37\x97\x28\x6a\x67\x64\xc9\x3c\x98\xbc\x04\xfb\x16\xfc\x9d\x1e\xf9\xae\xba\x76\x02\xc9\x89\x44\x2e\x77\x9a\x4f\x6a\xa8\xa7\x7a\xfe\x58\x96\xb5\xa9\xec\x6d\x19\xfc\xcc\xda\xc9\xf0\x6a\x2a\x0b\xcb\xbb\xac\xfe\x6a\x64\x12\xde\x58\x47\xb6\xc0\x90\xc9\x09\xd0\x1b\xd3\xd0\xc9\xed\x05\x9b\x39\x6f\x2d\x1f\x5a\xad\xea\x9a\x7e\x8c\x4e\x34\x4d\xa2\xac\xf8\xae\x96\x8b\x80\x9f\xe1\x43\x8a\xa6\xed\x1b\xba\x25\x90\xf8\x16\x7a\x1f\x58\xfb\xb0\x80\x82\x99\x9e\x3f\xf9\xc1\x36\x1e\xcd\xbb\x72\xa0\x39\xcd\x9a\x99\xd0\xa4\xa7\x30\x93\x02\x3d\x45\x11\xfe\x4b\x1e\x81\x4d\xff\x3f\x36\x2f\x42\x9e\x53\x23\x4a\x87\xce\xdd\x89\xd0\x2d\x02\xb8\xab\x11\x72\x7e\xb4\x68\x8e\xa0\x63\x7a\xb7\x24\x08\x9b\xd9\x81\x3e\xe4\xb6\x3f\xc5\x6d\x51\xbe\x38\x2e\x0e\xca\xf0\xbe\x89\xdb\xe4\x27\xc9\x78\x9a\xb2\x7e\x0e\xc0\x1b\xd5\xe8\x9d\xc3\x23\xcc\xca\xc5\x2b\x52\xd2\x87\x7c\xf3\x7d\xbe\xee\x27\xa7\xd7\xee\xfb\xc6\x05\xf9\x59\x57\x84\xa2\xb9\xdc\x87\xff\x56\xbd\x2a\x01\x34\x63\xbb\xac\xdf\xde\x0d\xe2\x39\xaf\x1c\x42\x35\x0c\xaa\xc2\xa0\xc9\x7e\xbb\xd8\x66\x30\x00\xe2\x2e\x82\x2c\x3d\x3e\x0e\xf9\x9f\x15\x77\x0b\x09\x5b\x3b\xfd\xcb\x56\x79\xc1\x7b\xea\x17\xbf\xa6\x44\xa2\xdd\xc5\x61\x56\x4d\x4c\xfd\x0e\x43\x78\xdb\x3c\x68\x94\x60\xdb\x04\x77\x6b\xaf\x6b\x36\xde\xff\x67\xcc\xab\x16\xe5\xf1\x77\x65\x10\x0f\x14\x09\x69\xdc\x06\x94\x4e\x5b\xfe\x0b\x0a\xe7\x7b\x69\x73\x2a\x5b\xa7\xb0\x72\x72\x5e\xe7\x0b\xdb\x35\x47\xe8\xd9\xdf\xc6\x0a\x87\xb9\x83\xe1\x37\x43\x6b\x3e\xb7\xdb\x07\x0a\x0f\x9a\x78\x28\x72\x91\xdb\x41\x4b\xc8\x05\x4e\xe3\xfb\x43\x58\x75\x94\x05\xc2\xee\x07\xd1\xe4\xe6\xe5\x01\x93\xa7\x1c\x54\xb6\x5f\x18\x89\xcc\x98\x81\x75\xf2\xbb\xf1\xbd\xe2\x82\x36\x52\x4d\x0a\x69\xcb\x9a\xc2\x43\x3c\x61\xef\xcf\x5a\x22\xed\xc0\xd1\x95\xf0\x80\xc5\x60\xbe\x70\x2c\x7f\x37\x16\x5c\x69\x9b\x86\x03\xcc\x82\x9b\x16\xcf\xd0\xe0\xd6\x53\xf7\xaa\xf4\xc7\x74\xde\x02\x74\x5c\xec\xf9\x20\x58\xa3\xfc\x83\x53\x46\x07\x1b\xef\x65\xeb\xd5\x1b\x8c\x85\x24\x6d\xb1\x26\x66\x78\xfb\x71\xb7\x72\x8e\x79\x12\xdf\x76\x8a\x42\xe7\xc6\x50\x99\xdb\x53\xe4\xa9\x84\x3e\xaa\xaa\x3b\xc1\xa2\x71\xc8\xcb\x15\x2b\x9f\x84\x4d\x8d\xb3\x3f\x42\x73\xba\xc9\x94\x2a\x4a\xa3\x65\x10\xfb\xa6\x8a\x49\x64\x57\x77\xf2\x23\x96\x87\x1f\xf7\xad\x4b\x68\x2d\x7c\x50\x45\xb9\x13\xd6\xe7\x76\x93\x21\x9d\xda\xc9\x56\x81\x41\x86\xff\x50\xbb\x52\xc4\x93\xcb\x2e\xe6\x91\xba\x26\x8c\x7a\x90\x14\x46\x8d\xd3\xad\x30\x28\xf8\xa7\x86\x8d\xd5\x1b\xe1\x76\xc4\x7e\x16\xb4\x5f\x25\xa2\xe9\x86\x28\x83\x8e\x86\x73\xf4\xf5\x15\xc0\x8f\x55\xec\xaa\x11\xad\xcb\x2b\xa4\xc3\x4e\x32\xf4\xb9\xe4\x37\x04\xee\x1d\x8e\x71\x61\x41\x5b\x0a\x86\x8f\xb3\x74\x34\xdb\xeb\xbc\xf2\xa3\x6d\x16\x9a\x37\x4d\xe1\xa1\xf8\x5d\x42\x45\xef\x0f\xe1\x63\x95\x0e\x71\x53\x15\x1d\xe5\xbb\xdb\x72\x29\xa7\x0d\x81\x54\x43\x7d\xa9\xb7\x53\x35\x54\x64\xb8\xfb\xc5\xc9\x40\xa2\x0f\x78\xe7\x57\x43\xaf\xe9\x9c\xc5\x31\x3d\xde\x04\x69\xa4\x70\x68\xd3\xe7\xe2\x6d\x1b\x64\x7c\xc1\x3f\x25\xe0\x8b\xe2\xb1\x4b\x89\x37\x90\x08\x87\x52\xe8\xfa\xd1\x69\xf0\x13\x79\x4b\x5c\x22\x44\x83\x66\x1b\x25\xeb\x09\x25\x2e\x91\x20\x3b\x92\x03\xc0\x72\x61\xbe\xe6\x59\xb6\x9d\x7a\x55\x24\xc8\xc0\x8b\x9f\x23\x42\xf8\x30\x02\x26\xf4\x5c\x56\x9a\x0a\xb8\x4b\x96\xec\x86\xa6\xd4\xc0\x25\xbb\xfb\xdd\xdc\xc4\x1d\xb3\xb8\xbb\x25\x58\xe0\x88\x33\xa4\x46\x0d\x42\xdd\x26\x90\xac\xdf\xe0\xe8\xaa\x0a\x9a\xe0\xde\xc7\x46\xee\x9f\xac\x0b\x57\x18\x78\xd1\x13\x4a\x20\x1f\x93\x11\xd9\xdd\xd3\x27\x11\x88\xf2\xdb\xd3\x0a\x42\x96\x1d\xb6\x70\xdb\x85\x87\x3b\x02\xd4\x30\xbd\x81\xcb\x96\xb6\xf0\x2d\xbf\x3b\xee\x00\xb0\xdd\x1e\x69\xb4\xb8\x1e\x49\xc1\xb2\x97\x06\x50\x2b\x72\x4a\x64\x5f\x84\xf9\x74\xf3\xf2\x0c\xc3\x00\x28\x8f\x88\x16\xdf\xde\xbb\xd8\xd6\x73\x25\x46\x7a\xf9\x9f\x84\x49\x96\x88\xbb\xd6\x4d\x2b\x97\x1f\xa4\x34\xa5\x80\x21\x63\x94\xfe\xb3\x1d\xee\xde\x07\xd9\x7c\x78\x5d\xca\xd7\xde\xae\x4c\xd1\x02\x70\x3a\xf2\xf3\xd4\x10\x39\xb5\xe7\xc1\x96\x49\x34\x9d\x4f\x98\xbf\xd9\x2d\x7a\x36\x23\x65\x21\xa8\x58\xb5\x08\x0e\x2f\x76\x94\xfa\x01\xfa\x8c\xf2\xe7\x8e\xa6\xff\x77\x30\x19\x63\xd1\x07\x9d\x8c\xf4\xef\x69\x6b\xb1\x5d\x23\x8a\x68\x59\x48\xfa\x21\xa2\xe0\xe3\x3a\x52\xd3\xe5\x46\x5f\x9a\x31\x8d\x9c\xd6\x3c\x6f\x6d\x3b\xe4\x29\x41\x4a\xab\x7d\x19\x28\x14\xad\x97\x3f\xf4\x5a\x1c\x3e\x49\x0d\x56\xb3\x15\xb0\x43\x5e\x06\x48\x8b\x32\xb3\x7d\x43\x0b\x05\xe5\xa9\x7a\x27\xe8\x51\x7d\xc4\x8c\xf7\x61\x90\x87\xa9\xbd\xea\x9f\xbd\x4d\xde\xfa\x14\x8c\xc9\xee\x73\x28\x15\x33\xba\xa2\x23\x47\x80\x8c\x8f\xea\x1e\x31\x48\x8f\x9a\x48\xd3\x8f\x9f\x83\x8c\xad\x08\x76\x3f\x34\x45\xd7\x3c\x9d\x24\xdb\xae\x6a\x6c\x16\xf6\xcf\x56\xe5\x69\x25\xa7\xa0\xae\x41\x2c\x70\x27\x8f\x9c\xd7\x2e\x97\xd0\x6f\x4a\x35\x43\xd3\xea\x44\x73\xcb\x7e\xd2\x8b\x98\xf4\xf7\xd8\x6e\xa5\x5b\xd9\x7d\x3e\x19\x78\xb6\xc4\xd4\x22\x2d\x14\x09\xa0\x23\x25\xe4\xb4\x28\x4f\xfe\x13\xe5\xdf\x27\xff\xbb\x7d\x2d\x57\xb3\x6b\x2d\x8a\x63\x9f\x8a\x54\xbe\xe0\x4c\xad\x61\x74\x68\x55\x9b\x5d\x80\xa2\x1f\x82\xe0\xa4\x0e\xeb\x4a\xdb\xdc\x52\x4d\x56\xa1\x45\xf0\x4d\x17\x95\xe8\x48\x72\x36\xa0\xb9\xbb\x5d\x75\x94\xa0\x81\x42\xc8\xc6\xa9\x8c\xe2\x1e\x4e\x4a\xbd\x38\x73\x52\x6c\xa0\x15\xc1\xbe\x25\x9c\x50\x8e\x34\xfd\x27\xc9\x66\xf6\x46\x0e\xe4\x03\xad\x85\x93\x7a\x0d\xe5\x29\xec\x13\x17\x51\x47\xe7\x4e\x51\x8a\x15\x05\x2c\x9e\xea\xd4\x02\xd8\x20\x05\xf7\xc3\x05\x24\x37\x15\xd3\xb6\x54\xff\x87\xf7\x99\x07\x78\xb5\x6b\x9e\x4a\x7e\xdb\x7d\x9a\x8d\x53\x0c\x80\xb6\xa2\x6d\xe7\x96\x86\xdc\x95\xf2\x8e\x79\xdf\x11\xd8\x09\x7b\xd8\x32\xf1\x86\x45\x80\x65\x58\xc5\x7a\x2c\x54\x56\xa9\x94\xb7\x01\xc4\x63\x5f\xb6\x3a\xac\xe7\x97\x73\x47\x5a\x5a\xa6\x8a\xec\xd0\x15\xe8\x53\xa2\xc2\x02\x58\xec\xb5\xd1\x9d\x87\x6b\x56\xed\xb4\x9d\x72\xfb\x5a\x60\x7a\x95\x14\xef\x62\xa5\x9a\xc6\xef\xfb\x14\x0a\x83\x67\x8d\x32\xc9\xfd\x35\x2b\xfe\xff\x07\xc0\xc1\xff\x00\x64\xdf\x38\x9f\x3e\xa4\x30\x56\xd9\x8e\xf2\x30\x99\x49\x0a\x98\xee\x90\xe7\xd4\x41\x7c\x14\x28\x77\x02\x52\x4f\x7d\x78\xbf\x9f\x4c\x6f\xb7\x3d\x2b\x2d\xbf\xf8\xf5\x08\xda\xdc\x68\x9b\x8e\x83\x4c\x5f\x16\x74\xa1\x43\x49\x04\xe0\x26\x70\xb1\xae\xb2\x8f\x87\xb3\xed\xd6\xe8\x42\xf4\xb0\xbd\x7b\x89\xe8\x92\x17\x76\xd6\x8b\xb6\xa4\xfe\x71\xe4\xc8\xdd\xda\x11\x27\x04\xdd\x87\x6e\xf0\xe1\xee\x92\xb0\x1f\x0a\xea\xc4\x12\xda\xc1\xb9\x4b\x0a\xea\x46\x30\x6c\xb0\x14\x69\x9b\x80\xec\x4e\xd7\xe5\x0e\x80\xa8\xbd\xdf\xec\x5b\x82\x5a\x5f\xc3\x42\x7e\x09\xcb\x5d\xd8\xb4\xf9\xd5\xdc\xff\x13\xf1\xe3\xaa\x04\x4e\xbc\x60\x68\xa4\x6f\x16\x97\xa3\x47\x09\xc3\xfe\xee\x73\x88\x1d\x74\x8f\x2d\x54\xef\x57\x38\x74\x00\x47\x1d\x04\xe6\x07\xe7\xbf\xde\xf3\x02\x4d\xbb\x5a\xd5\x0a\x6a\xd3\x63\x35\xd7\x2d\x55\x63\x82\x8e\xb7\x46\x47\xd4\xd3\x5d\x2a\xe8\x12\x0e\x98\x8a\x6a\x62\x5f\x52\xc4\x6e\x17\xa1\x5c\x2c\xba\x1f\x49\xc5\x19\x08\x94\xca\x2e\x83\xb6\x2b\xde\xf9\xe0\x3e\x4d\x69\x0d\x75\x79\xe0\xf4\xbe\x29\x71\x47\x92\x79\x68\x26\x1e\x05\x45\x9e\xed\x1f\x7d\x98\x45\xf8\x5f\x78\x8b\xcd\x77\xd1\x52\xf2\xd2\x78\xd8\xf6\x9d\x46\x7e\x3c\x88\x24\x00\xa3\xfe\x05\x83\x5e\x18\x29\x6f\x5c\xf0\xeb\x9a\x87\xfc\x45\x73\xa4\xd3\x7e\x64\xb8\x47\xc4\x76\x01\x4b\x7c\x8f\xc3\x99\x4b\xc7\x3d\x8b\xee\xa3\x53\x46\x6c\x8d\xef\x5d\x65\xa0\xb3\x44\x5f\x03\x4b\xe2\xeb\x70\x7a\x76\xba\x55\x0d\x95\x9b\x97\xa0\xa1\x8d\x37\x08\x7a\x37\x79\x44\x58\x07\x2c\x39\x1f\x7a\xe1\x74\xce\x2d\xdf\x34\xee\x71\xf4\x7e\x3e\x69\xcf\x4d\xca\x20\x2d\xd9\x7a\x6b\x97\x5f\x9d\xcf\xc1\x80\xdc\xaa\xac\x21\xc8\x17\x38\x85\xfb\xd7\xc7\x11\x49\x11\x97\xe4\xb7\xab\xcc\x91\x7a\x0f\x77\x88\x4d\x66\xca\xbf\x5d\x46\xdd\xe4\x0a\x86\x09\x9b\xbc\x03\x88\x2d\xbf\x40\xc6\x8e\x6e\xfc\x15\x3e\x4a\xb3\x70\xa2\x57\x60\xd4\xc0\x73\x0e\x51\xaa\x35\x66\x83\xc6\x17\x8c\x0a\x6f\x02\x17\x40\xec\x41\x13\x8a\xc8\xa9\x85\x39\x1c\xf3\xbd\x7d\xb9\x35\x49\x48\x9f\x77\xdf\xdf\xe6\x30\x22\x9b\xf1\x37\x34\xeb\x74\x8d\x63\xf0\x4e\x9f\x7c\xcc\xc1\x9b\xfe\xb6\x56\x6c\x8b\xf6\xd1\x36\x85\xc0\x7b\xa6\x54\x24\x91\x48\xbc\xdc\x46\x0e\x97\x47\xbc\x5f\x6c\xcb\x40\xc9\x53\x14\x0f\xaa\xa9\xa4\x04\x06\xa6\x63\x3e\xbc\x6f\x63\x12\x1e\x64\xf9\xfa\x42\x3b\x56\x42\x97\x5f\x65\xb0\x0a\x71\x88\x33\x8f\x60\x6d\x58\x48\xa2\x0e\x91\xd1\x08\x49\x7e\x74\xdd\xcf\xfb\xe0\x80\x07\xf8\x37\xe1\x6f\x64\xed\xf9\xa1\xfa\xac\xe2\x2f\x23\xa9\x19\xb3\x28\x2c\x61\x85\xe1\x7b\x5c\x3f\xab\xbd\xa1\x91\xbb\x26\xf9\x29\xdc\xbc\x3b\x25\xb2\x08\x82\x72\x15\x41\x8a\x91\xde\x92\x76\x92\x87\x6c\x63\x2b\xbd\x84\xc4\x22\xef\x0f\xa9\x09\xf7\x0b\x6b\xa1\xfe\x70\x3b\xc7\x21\x46\x79\xd2\x3c\x9e\xaa\xde\xaa\x5d\xfb\x01\x8e\x5f\xdc\x10\xe5\x6c\xd5\xcb\x48\xd6\x23\x80\x72\x90\x97\x60\x16\x2e\x06\x51\x77\xaf\x3b\x5d\x28\x6f\x89\x93\xba\x20\xfd\x77\x1a\xd6\x3b\x40\xdf\x15\x1f\xea\xd3\x9a\xf8\x68\x3b\x14\x83\xfe\xbb\xac\x14\xb1\x2e\x12\x22\xd5\xec\x22\xc3\x18\xb9\x72\xec\xee\x2e\x5d\x95\xc0\xbf\xbf\xfe\x88\x6a\x82\x50\x78\xd6\x89\x4c\xe3\xbe\x32\xc8\xd6\xfe\xa3\x64\x22\xdb\x68\xce\x70\xef\xb3\x96\xef\x5b\xc8\x48\x8a\x9d\xed\xba\xf5\x52\x5c\x53\xbc\x9b\x17\x50\x09\x81\xda\x45\xf0\xb3\x1c\x6a\xc1\x01\x60\xc3\x77\x41\xc0\xb5\xc7\xd2\xf7\xb7\x33\xa8\xe1\x27\xdc\xc9\xa4\xa3\xec\x4b\x28\xc8\x6d\x43\x12\x1c\x95\x97\xdd\x77\xa8\x9c\x22\x5e\xfd\x8e\x21\xf3\x89\x69\x6b\xf3\x92\xba\x34\x1b\xc5\xb2\x99\x4b\x83\x84\x7d\x86\x8c\x4b\x1e\xe5\xde\xf7\x1c\x48\x6c\xf5\x7d\xcb\x8a\x10\x2f\xbf\xf2\xa6\xd6\xda\xda\x3b\x16\x66\x59\x37\x11\x34\x20\x3a\x7b\xec\xec\xf4\x16\xef\xc8\x2b\x29\x8f\x0c\x70\x47\x54\xc5\x45\x1e\x42\x8d\x19\x2d\xe4\xea\xd1\xd0\xa2\xf0\xc4\xc3\x14\xa9\xed\x5e\x7c\x79\xb7\x1d\x6e\xb4\x57\x78\xbf\xc0\x83\x8a\xd2\x65\x64\x44\x64\xc9\x98\x63\x0f\x58\xbd\xb8\x85\xd4\xe4\xe1\xc5\xd6\x57\x54\x23\xfb\xd4\x99\xb4\xfa\x99\xd6\x92\xe9\xb3\xf6\x40\x4d\x44\x4d\x00\x65\xb2\x10\x3b\xdf\x14\xe3\x94\xd0\xc5\x90\xa8\xb7\x46\x4d\xa7\x57\x1b\x3e\xb5\x9c\xde\x21\xc1\xf1\x22\x62\x75\xe8\x4d\x42\x2f\x29\xf1\xa7\x8e\x4d\x48\xa4\x6f\x0a\xc4\xac\x15\xd9\x3a\xca\x3f\x6f\x62\x24\x40\x3d\xd5\x61\xce\x6f\x9f\xb2\xce\x6f\x72\x8e\x8e\x5b\x76\xd0\xf7\x4e\x55\xb5\xdf\x98\xb3\x5d\x32\x92\x8d\xf5\x93\x7c\x3d\xc5\x48\x0a\xf3\xa0\x39\x9c\xf5\xe7\x9f\xa0\x68\xce\x1a\x48\xcf\xf4\x4b\x9c\x1b\x13\xe5\x0f\xf2\x05\x7c\xea\xaf\x2a\xe1\xfe\x2f\xb7\xc9\xcd\x9f\xb8\x1a\xd3\x1d\x76\xca\xb8\x0f\x83\xeb\xb4\xd5\x3f\x51\x35\x9f\x91\x1d\x7a\xc5\x88\x65\x92\x3b\xfc\xf4\x6b\x68\xdc\x4d\x28\xcc\x1e\xb1\x83\xbe\x3f\xbe\x6b\xc2\x6b\x97\x01\x93\x72\x5f\x78\xda\xe6\x23\x5a\xc6\x33\x62\x6c\x9f\x34\xd3\x7e\x46\x50\x2b\xf8\x12\xf4\x6f\x4f\x32\x00\x87\xb7\xa3\x47\xe9\x6f\x2c\xff\xec\x59\x7c\x39\x0d\x8c\xb0\xdf\x9c\x8e\x58\xb0\x73\xbd\x23\x13\x79\x38\x9a\x75\xda\x60\x57\x78\xa0\x63\x47\xa0\xe0\x83\x0b\x09\x38\x24\xe4\x38\xd1\x36\x09\x07\xb5\xb5\x46\xe7\x94\x46\x29\xc5\x39\x3b\xf2\x40\x0d\x51\x1b\x7c\x77\x97\x2f\x5e\x95\x3f\x2f\xd2\xf3\x9a\xf8\xa5\xb6\x99\x42\xaf\xea\x1d\x98\xfc\xbf\x58\x59\xe8\xa3\xd0\x09\x4b\x18\xb7\x5d\x8a\xc5\x6c\x0b\x19\xcc\x28\x76\x41\x27\x84\xcb\x5c\xd4\xf6\xde\x6d\x1b\xae\xde\x57\x59\xa3\x3b\x48\x40\x97\xce\x7f\x69\x12\x98\x09\x70\xe0\xd8\x2c\xc3\x5c\xe9\xe2\x25\x1e\xac\xcd\x13\x6b\x94\x14\x9d\x6c\x68\x5f\x30\x1d\xc4\xe3\x10\x56\xda\xa9\xd3\xe9\xf4\xa5\x81\xcb\xa7\x49\x5e\x34\x49\xc3\x5b\xdd\xc8\xe3\x25\x9c\xa1\x7c\x33\xd6\xc5\x04\xd8\xad\x59\x8b\x45\xf2\x42\xd3\xc3\xb9\xe0\xce\xa3\x75\xdc\x91\x68\xe3\x98\x72\x03\xf1\x88\x35\xbd\x97\x4f\x1d\xcf\xfc\x7b\x1f\xca\xcc\x51\x84\xb2\x87\xaf\xba\xd3\x6f\x13\x61\x11\xac\x19\x25\x6b\x87\x79\x26\xa9\x16\x77\x17\x2d\x32\xa9\x52\x97\x2a\x33\x47\xed\xc4\x51\x4f\x57\x54\x93\x56\x06\x76\x31\x69\x84\xa8\x2f\x26\x18\xe9\x37\xcc\x48\x91\xad\x85\xc0\x62\xf9\xd6\x03\x93\x08\x4c\x86\xa3\x09\xff\x74\x04\xaa\x9f\x2d\xb4\xda\xea\xd3\x4d\x5b\x93\x9b\x51\x68\x1c\xa1\xa3\x20\x2d\x91\xea\x7e\xc8\x66\xfe\x28\xdf\x2f\x73\x8f\x65\xa9\xd4\x8b\x68\x81\x81\x69\x66\xe9\x9a\x78\x59\x1f\x56\x8e\x9f\x62\x95\x21\x9e\xe2\xe1\x7a\x5b\xee\xea\xbe\x3f\xef\xe9\xb5\x14\x8e\x2a\xec\xea\x7c\xb2\x82\xaa\x76\x7e\x90\x8c\x56\x67\xb2\x3b\x37\x4e\x0b\x36\x4e\x0f\x6e\x2a\x29\xd6\xfa\x95\xbf\x92\x27\x42\x91\x0d\xb1\xf6\xaf\x20\x75\x79\x56\xb2\xe2\xb8\xfb\xa7\xfe\x22\xf7\xa0\x68\xbd\xb1\x12\x20\xa5\xcb\xc6\xbb\x4d\xa7\x40\x9c\xb2\x01\xd4\x95\x88\x91\xd2\x69\x89\x9c\xf2\x07\x41\xbd\x4d\x7c\x89\xcf\x2a\x20\xef\xad\x5a\xd5\x5d\x93\x4f\xed\xc2\x1b\x3d\x67\x34\x04\x99\xed\x49\xe8\xa6\x99\x9d\x27\x30\xb7\xb7\x14\x06\x6e\x5a\x41\xe0\xeb\xbb\x68\x92\xca\x70\x8a\xa2\x7d\xa2\x6f\xe8\x74\xff\xa6\x9d\xdc\x48\x27\xf6\xd0\xcf\xb5\x19\x8f\x4e\xf9\x1d\x7e\xe5\xaa\x5d\xff\x68\x60\xd8\x79\x1c\xf1\x48\x12\xe4\x78\x13\x75\x19\xc1\x92\x63\xe1\x79\x64\x5e\xcb\x63\x8c\xd0\x9d\xa5\x16\x2b\x45\xbc\xd7\xb7\x52\x9c\xfd\x96\xe6\xa4\xa1\x8d\xb7\x41\x4e\x0d\x9f\xf3\x63\x23\x7f\x77\xd4\xeb\xee\x0a\x40\xbd\xba\x4f\xea\x63\x9e\x0e\x3c\x91\x19\x8f\xdb\x39\xc2\x02\xa3\x9c\x85\x1f\x35\xeb\xa8\xac\x79\x57\x76\xeb\x3e\xff\xcc\xa6\x11\x1d\x0c\xc5\x1a\x53\xd5\xa5\xd7\xda\xf5\x07\xec\xf2\xaa\x97\x87\xf2\x7f\xba\xf3\xb3\xf2\xfa\xb4\x15\x04\x3d\x03\xf7\xdf\x1c\x3c\xb6\x41\x6e\x70\x50\xb2\xd2\x91\x11\x3d\x11\x79\xe2\xb0\x88\x67\x81\x0a\xa3\x97\xd5\xf5\xfa\xfa\xf0\x57\xcb\x8a\x9d\x26\x6d\x5d\x1f\xfa\xd0\xbe\x6a\xca\x31\xaa\xc9\xb6\x1e\x3f\xac\xda\xa7\xfa\xbf\x5b\x7d\x50\xef\x3d\x86\xb7\xfe\xa0\x22\xc1\x4a\xce\xd7\xcb\xa4\x5e\xfc\x3e\x64\x04\x0a\x6a\xbe\xb2\xb9\xee\x7e\x8a\x3e\xff\x9e\xb8\x4d\x93\xc6\xd1\x45\x60\x5f\x57\x24\x5a\x56\x3a\xba\x55\x97\xb6\xa7\xe2\x85\xfe\x0a\x22\xf8\x7a\x12\x36\xc9\xab\x87\x76\xd0\x0f\xf4\xab\x93\x0a\xe0\x7b\xf1\xb3\xc0\x1d\x51\x05\x12\x96\xb0\x1b\x70\x8b\xc8\x08\x75\xe2\xc5\x62\xbb\xef\x2e\xeb\x54\xb0\x8c\x02\xa5\xb6\xad\x76\x0d\x1b\xc8\xb4\xd0\x1b\xf8\x75\xb4\xfb\x03\x99\x9d\x65\x09\x95\xb3\xd3\x56\x41\xb6\xa2\x6e\x51\x91\xb3\xef\x88\x77\x3b\x01\xa1\x71\xca\x0b\x6b\x2c\x16\xc2\xf0\x84\xb9\x64\xe3\xe8\x29\xca\x1a\x03\x3f\x93\x46\x83\xb6\x5f\x86\x97\xc8\xc5\xf9\xc1\xb9\x23\x4d\x92\x64\x5a\xec\xe0\xfe\x55\x75\xf8\x67\x1e\xa2\x48\xb1\xa9\x2a\x38\xde\x6e\xb3\xc6\x04\xb1\x5e\x91\x17\x11\x2d\xb3\x62\x30\x92\xb6\xc0\x25\x95\x1c\xaf\x1d\x07\x67\x3e\xf7\xd6\xd0\x24\xb6\x5c\xbc\x43\x8e\xcf\x07\x2c\x9f\x1c\x85\xd4\x38\x78\x5f\x48\xa3\x40\xb8\x94\x78\x6a\xf9\xe8\x0b\x07\x06\x7b\xd2\x17\x51\x73\x0c\xbb\x73\xdf\x47\xd1\x30\x11\x96\x94\xab\x0e\x34\x42\x0e\x46\x46\x51\x7a\x37\xdd\xc8\x5a\xe5\xee\x63\xe5\xb6\xcc\x61\x6d\x2d\xba\xa6\x15\xc9\x8a\x72\x76\x8a\x7c\x68\x32\xd7\x9c\x49\x20\xf1\x7c\xd2\x22\xd4\xa0\x0f\xc1\xc8\x9a\xd8\xbb\x81\xbe\x1b\x64\x3c\x1f\xe4\x6a\xa4\x3b\xfd\x4a\x62\xa1\x99\x49\x09\x10\x56\xe9\x8b\x91\x8f\x1d\xc2\xff\x04\xe0\x23\x56\xae\xb2\xa1\xc1\x36\x4c\x18\x80\xc2\x6b\x3b\xe3\xfd\xd1\xed\x6b\x38\x4a\xf1\x5d\xee\x0f\xac\x6c\x2f\x39\xff\x72\xc4\x1a\xfd\xa9\x00\xb7\x46\x2b\x48\x68\xd0\x04\x68\x38\xfb\x0f\x9f\xe4\x08\x7a\xa5\xed\xb2\xb1\x08\xe6\x70\x91\x9c\x75\x92\x22\x73\x22\x20\x35\x35\x8c\x6d\x72\x6a\x67\x58\x10\xed\x6d\x42\x43\xed\x3b\xf6\x4b\x99\xc9\xe6\x41\xbc\x79\x44\xab\x2c\x9c\x82\x20\xa2\x4d\xf4\x93\x1b\xb5\x9b\xb8\xc3\x50\x2a\xc8\x84\xd5\xea\x5d\x09\xab\x0b\x0d\xf0\x1e\x3e\xfb\x9d\x05\x83\x0e\xa2\x4b\xbb\xb1\x33\x05\x03\x02\x9f\x6d\x77\x25\x78\x8a\x19\x27\x47\xa7\x43\x94\xc3\x43\x58\xb9\xd9\x26\xb7\x38\xa1\xd5\xea\xd0\xab\x0d\xe4\x6c\x8f\xbb\x28\x36\x42\x52\x32\xa7\xae\xf5\xe8\x38\x7e\x29\xa9\xb9\xcf\x1e\xa2\xe6\x1f\x9d\xa8\xb8\x5d\x33\xe9\xe8\x14\xe2\xde\x29\x4a\xc4\x0a\x46\x2f\x08\xba\xa2\x69\x2c\x4c\x07\x35\x0b\xb4\xf2\x51\x6c\xb5\x4b\x69\xd9\xe9\xdc\x54\xea\x7a\x66\xbf\xf3\xd5\xfe\x8b\xd0\x16\x1e\x6b\x4e\xbb\x6a\xfe\xfa\xb0\xfe\x92\x2f\x98\xff\xa3\x5b\xf8\x30\x87\x08\xfb\xde\xa7\x84\x0d\x9f\xe0\xc5\xfa\xd7\x17\xeb\xb3\x02\x25\x82\xad\x17\xa9\x0b\xae\x5c\x94\xd9\x0a\xfd\x61\xde\xad\xdf\x4f\xbf\xcd\x6c\xce\xe1\xdb\xa7\xe6\x09\xec\x36\x1f\xd0\x36\x61\xc6\x55\xc3\x41\x29\x3c\xa8\xac\xbc\xa2\x23\x34\x3a\x26\x55\x88\xb8\x3a\xbc\x32\xfb\xb2\xec\xc0\xee\xa6\xcd\x4d\xad\xc3\x59\xb9\x2f\x95\x55\x97\xb5\xed\x50\xda\x98\xd2\x2d\x03\xdb\x97\x7c\x59\x88\xc6\xdb\x63\x7a\x4a\x44\xb4\x84\x89\xe7\x99\x1c\xac\x3c\x3e\x2d\x01\x1e\xd9\xa4\x1c\xd9\xa6\x83\xda\xc6\x66\xae\x34\xb5\x38\x4b\xae\x7b\x2d\x45\xca\x6d\x27\xb8\x6d\x6b\xfa\x96\x42\xa5\x24\x6c\x9e\x09\x5b\x12\x95\xda\x8f\x3a\x7b\xc2\x05\xec\x5b\x48\x6b\x13\x92\x6b\xbd\x30\x1c\x99\x1f\x4e\xd1\x9d\x44\x73\xf4\x70\xd0\x4d\xcc\xb6\x83\x0e\x4c\x5b\x78\x9c\xd9\xe5\x16\xf3\x9a\xa3\xcc\xf8\x8b\x90\x25\x41\x53\x96\x1b\xfe\x6b\xab\xf2\xf7\xac\xe2\x24\x08\xc9\xf9\xca\x83\x2c\x17\xee\x79\x07\xda\x13\xd4\xd6\xfe\xd1\xc6\xc0\xd1\xfe\xa5\x84\x61\xe4\xd1\x28\xb5\xbe\x77\x97\x83\x66\x55\x1d\x49\xcc\xe9\x0c\x45\x64\x50\x58\x24\xfe\x5b\x93\xdf\xbb\xcb\x4d\xcd\xa1\x73\x52\xfc\x51\xe1\xa4\x91\x55\x53\xfa\xf0\x7a\x12\xc9\x05\x94\x79\xcd\x61\xc6\xc0\x47\x13\xc5\x45\xe7\xac\x20\x28\x28\x59\xf9\x0f\xbb\xae\x97\x59\xdd\x07\xa2\x78\x0f\x85\x37\xdb\xb3\x0e\xd9\x91\xb2\x10\xac\xce\xa4\xbb\xd3\x5f\x96\x34\xc3\x36\x7c\xaf\xc7\x81\x41\xcb\x3f\xc7\x1a\xf3\x9b\xb6\x12\x50\x4c\xb4\xcc\xf2\x77\x6c\x94\xd2\xcd\x4d\x6d\x1c\x0c\xec\x74\x62\xba\xfa\x24\xe0\xbc\x11\xd3\x26\x17\x34\xb8\x03\xd2\xc3\xc7\x3b\x31\xa4\xbe\xbe\xcd\x2c\x78\x2b\xe1\xd3\x46\x1e\x0a\x06\x3e\x2d\x01\xb8\xf9\x48\x21\x0c\x78\x90\xf0\xc9\x5a\x7f\x7f\xbc\x07\x0e\x76\x38\xbc\x16\xcb\x00\x1f\x24\xd5\xae\x3c\x4c\x80\xbb\xca\x95\x82\xb3\x0d\xc0\x07\xcd\xbd\xfb\x14\x7e\x0d\x40\x8f\x56\xf9\xba\x87\x9f\x9f\x27\xf1\x84\xbf\x42\x8b\x54\xc9\xa6\x60\x54\xc4\x37\xe6\x59\x17\xba\x16\x7a\x54\xa6\xf8\xc6\x4c\xcd\x54\x63\xfb\xcc\xe2\x7a\x3b\x93\x26\x72\xad\x7e\xf2\x8a\xfd\x0e\x28\xe1\xdf\x4c\xec\x96\x7e\x36\x38\x01\x21\x50\x84\x45\xe8\x87\x0b\x2e\xbc\x3e\xcd\xab\x26\xe3\x51\xee\x92\x5d\xd2\x1f\x4a\x36\x91\x55\xb1\xd3\x03\x6f\xc9\xcd\x79\x41\xf7\x28\x02\x79\x0d\xf6\x2d\x23\xb1\x87\xd8\x50\x4c\x1b\x6b\x63\x45\xbf\xd9\xc0\xa0\xe0\x60\x9d\x7b\xb9\x6d\x88\xec\x72\xec\x95\x53\xed\x79\xd7\xb0\xd8\x79\x0b\x48\x81\x13\x2b\x2b\xd4\x23\x02\x7d\x5c\xa9\x3e\x29\x09\xa1\xab\x92\xfa\x43\x97\xde\x2c\xeb\x36\x1d\x16\x3f\x9c\xe1\xd0\x79\x71\xc7\x16\xd4\x69\xde\x8f\x97\x4b\xa4\x77\x30\xaf\x5f\x42\x5e\x93\x17\x59\xbe\xc8\xe4\x08\x03\xca\xed\x6f\x4e\x9b\x80\xe5\x49\x49\xf4\xfd\x79\x81\xbe\x5b\x34\x57\xd9\xe6\x06\x15\x50\x9f\x8e\x3f\xe8\x45\x0b\xf5\x17\x0c\xfa\xb7\x81\x42\xd3\xcd\xde\xe8\x2c\x77\x8d\xac\x27\xe8\x34\xc6\xc3\x8e\x1a\xa5\x2a\x77\x51\x77\xda\x36\x26\x80\xcf\x8e\x39\x44\x97\xde\xe1\xab\x75\x13\xe7\xb5\x5d\x6e\x0e\xbf\x7f\xed\x83\xbe\xe3\xff\x93\x22\x4e\x46\xf3\xfe\xcb\x73\xd9\x73\x56\x2d\x5c\x9d\xca\x02\x31\x7b\x04\xdb\x5b\x6f\x1a\xcc\xe6\xb5\x71\xa5\xd1\x54\x2e\xad\x1a\xb7\x9d\x79\xa7\x0f\x65\xf4\xfe\x2d\x02\xf3\x44\xc2\x7f\x64\x5b\x02\x7a\x3a\xa3\xc6\x95\xf9\x2c\x38\x3f\xfd\x14\x82\xd4\xf0\xfc\x62\xca\x11\x4e\x96\xc6\x6d\xdc\x74\x44\x0c\x31\x10\x66\x4e\x74\x0b\xdb\x9d\x78\x0c\xd8\x2d\xeb\x08\x32\xd8\xce\x3c\x2f\xee\xcd\x8b\x97\xbd\x2a\x9c\x63\x5f\x58\x87\xef\x61\xc6\x4f\x0f\x75\x96\xa3\x98\x84\x15\xd9\xcd\x92\x2b\x0f\xcd\x03\x43\x4e\xa1\x9b\xe7\x75\xbb\xa7\xe1\x5e\x58\x1e\xd7\x0f\xcb\xe3\x22\x09\x14\x8a\xdc\x4a\xc0\xe0\x3e\x1c\x1c\x87\xe9\xd7\xa5\x47\xd6\xb4\x88\x32\xfb\x23\xe6\xd0\xa2\xd2\x71\x3e\x04\x47\xfa\x28\xcd\x52\x68\xfb\xf2\x54\xd8\x46\xf2\x8d\x30\x06\x53\x41\x3c\x69\x44\x36\x0c\x52\xb9\x8b\xc7\xb2\xc4\x4d\x12\x84\x23\x24\x1a\xf8\x03\x3d\x0d\xdc\xa9\x8e\x71\x67\xf6\x34\x7c\x7b\x3f\x48\x9b\x98\x17\x3a\xc3\xfe\xdd\xf2\xba\x76\x37\x60\xff\x59\x65\x5d\xa2\xe2\x55\xbe\x59\xa1\x8d\x03\x8c\x00\xa5\x30\xad\xef\xc6\x3e\x73\x62\xe2\x1a\xcb\x41\x59\xd7\xd5\x99\x59\xfa\x8a\x30\xb8\x2d\x61\xfa\xce\xdf\x8d\xfd\x4c\xfb\x34\x0a\xd7\x26\x7a\x43\xda\x1f\xc1\x06\x48\x55\x8b\x98\x46\x11\x5b\x83\x17\x8f\xcd\x0d\xcc\x05\xc3\xef\x71\x54\xdc\x9a\x1d\xb8\x57\x08\x6b\x9f\x32\xff\xb6\x47\x6a\xe0\x3c\xba\x55\x1b\xf7\x73\x09\x83\x75\xc4\xbc\xc3\x49\x84\x5d\x3c\x65\x72\xc3\x11\x19\x4b\x70\x77\x1f\x27\x7f\xed\x6c\x2f\xa5\x57\x75\x96\x3e\x1e\x11\x83\xb9\x65\x0c\x9e\xfb\x11\x75\xb1\xf6\xd7\x26\x29\xe8\x6f\x16\x66\xbd\xf3\xa7\x65\x26\x78\xb7\x68\xb1\x1e\xe8\xbe\xef\xaf\x25\x1c\x24\xd5\xf2\xf5\x53\xb8\x34\x23\x9d\x21\xfe\x4d\x34\x20\x68\xbb\x99\xb5\xb6\x68\xec\xd5\xb9\x92\xfb\x0c\x33\xc1\x74\xb7\xf3\x0d\x80\x2c\x39\x38\x0f\xa9\x3e\x1a\x17\xa9\xf9\xc5\xd7\x2e\x0d\x52\x42\x77\xd5\x09\x6e\x6e\x10\xbf\xa8\x32\xe8\xf2\x8f\x22\x69\x37\x8c\x94\x53\x7e\xfe\xc4\xac\x5e\x7b\x2c\xcc\xc0\xe9\x28\x92\x96\x93\xec\xc7\xce\x07\x0c\xe9\xeb\x93\xc9\xf4\xa6\x9a\xff\xbe\x5c\x17\xc9\x62\xfb\xa2\x7d\xfc\x82\x0e\xc6\x93\x21\x3a\xfa\xeb\x7e\xa3\xb9\xf9\x7a\xcc\x04\x82\x78\x67\xf1\x8f\x50\x1c\x9f\x0f\x8a\x68\x05\xe3\xbc\xa1\x5c\x66\xa5\x8d\x23\xd0\x6b\xef\xa9\x5c\x9a\xb0\xbe\x6d\xff\x69\x4e\x48\xce\x0b\x7b\x7b\x95\xf4\xc6\xda\x21\x0a\x9c\x6a\x51\xa9\x6f\x29\x5a\x76\x3b\x39\x31\x8e\xf5\xdd\x5a\xec\x26\xdf\xb5\xd7\x24\x33\x91\x4d\x6c\x0d\xf0\x77\x07\x62\x0f\x45\x20\x84\x9d\x8d\x3e\xee\xb4\xbf\x6a\x97\x3b\xce\x15\x94\x9e\x4b\x3a\x7d\xf2\xa0\xc8\x82\x9a\x53\xcf\xcd\xdb\x4a\xeb\xc0\x6b\x8f\x3b\x48\xb9\x0e\x5c\xc8\x2f\xe0\xa7\x9e\xbb\x88\x6b\x3a\x04\xd1\xb8\x9b\x91\xc7\x7f\xa2\x04\x88\xec\x25\xdc\xd4\xda\x24\xce\x60\x60\xc6\x1c\x18\x91\xa4\x0f\xf7\x4f\xe4\xbd\x00\x2f\xdd\x25\x29\xa5\x54\x00\xcf\x4e\xc9\x8d\x3d\x48\x11\x57\xe2\xf2\xd9\x23\x06\x0c\x45\x77\x54\xd8\x7e\xe3\xd7\xd3\x8d\xa8\x7c\xef\xfc\x84\xae\xe4\xf8\xc7\xc3\x7c\xab\xdd\xc2\x9d\x18\x55\x5d\x41\x21\x8e\x75\xe6\x91\xb5\x09\x95\xdd\x12\x48\x94\x4c\x7e\x5b\x6f\xb2\x51\xd7\x2a\xdb\x89\x55\x5a\x03\x47\x16\x7c\x97\x0d\x9f\x7b\x58\x0c\xa3\x16\xe1\xd6\x3b\xb7\xf9\x5d\x8f\xad\x4f\x1a\x62\xe8\x2b\x58\x99\xd3\x4c\x89\xcb\xad\xc4\xce\x3c\xde\x89\x94\xa8\xe5\x15\xa3\xbc\x24\x35\xdd\xbd\xaa\x79\x5e\x6a\x94\x6f\xb6\xb3\xb0\x5c\x07\x44\xc4\x7d\x3a\xff\x6c\x1e\x3d\xf3\xe9\xe1\xf5\xb9\x86\x76\xc9\x31\xac\x5f\xbc\xde\x11\x0e\xad\x74\xab\x9b\x89\x68\x58\x0b\x84\x77\x62\xde\x34\x6b\x9e\x65\xf4\xc6\xfe\x6d\x4e\x54\x64\xcd\x9a\xb8\xff\xdd\x27\x8b\xb2\x12\xdc\xa8\xfb\x57\x14\x4e\xf7\x28\xfe\x82\x75\xf7\x84\x2f\x04\x2d\xd9\xfd\x48\xc2\x10\x48\xfa\xe2\x81\xfe\x9c\x67\xed\x96\x6f\x76\xef\x1e\x13\xce\xc5\x12\x01\x34\xb6\x8b\x87\x88\x79\xed\x88\x0d\xdf\x8d\x13\x3a\x88\xc5\xe0\x41\xe2\x27\x97\x3d\x5e\x50\x5f\x2f\x59\x66\xd2\xe3\xcf\xdb\x3c\xcb\xc3\xdc\xf6\x1d\xe6\x0b\x3e\x83\xf5\xf2\xd4\xfc\xf1\x99\x1a\x08\x7c\xb0\x1f\x00\x84\x4f\x3d\xf2\x5b\xac\x05\x7b\xdd\x29\xaf\x39\x7b\x70\x65\xdc\xf0\x69\x3b\xd7\xe6\xa9\xe2\x01\xcf\xa4\x32\x2c\x0e\x4e\xba\x2a\xd5\xdb\xcb\x4c\x15\xed\xf0\x39\x55\x79\x23\x40\xd1\x4f\x89\xe2\xf8\x90\x0f\xec\x8a\xc4\x3a\xf9\x3a\xc2\x0c\x2a\xa4\x27\x43\xde\x79\x70\x60\xa3\x10\xcd\x11\xb2\x9d\xd3\xce\x23\x1e\x15\x50\xf5\x3f\xa8\x5c\x86\x45\xb2\xab\x98\x51\x0d\x8b\x6a\x0b\x37\x5a\xd9\xa6\x1f\xf4\xe5\xec\xeb\xe1\xcd\xa3\x5d\xfe\x23\x5f\x30\x17\x51\xe8\x88\xc0\x08\xca\xcb\xdd\xfe\x1f\x7c\xa5\xc6\xa9\x31\x40\x71\x2d\x27\x0d\xe0\x6b\xe2\x90\xc2\x66\x9a\xf6\x5b\xee\xc8\x1e\x96\xd0\x35\x0d\xc1\x99\x10\x87\xb8\x83\xbb\x0d\xb7\xff\x9f\x4b\x0b\x26\x16\x64\x98\x1e\xce\xe6\x73\x22\x80\x26\xef\x88\xc6\x92\x27\x9f\x47\x2e\x73\x5b\x5e\xbb\x12\xd8\x88\xf6\x56\xb9\x1d\xbe\x70\xd6\x39\xac\xf8\xca\xdc\x0d\x5f\x49\x2d\x33\x2c\x9c\x96\x0f\x3d\x5c\x50\x20\x1d\x83\x28\x59\xb6\x0f\x84\x3e\xd3\x0e\x10\xf7\xaa\xb8\x9a\x6d\x08\x1c\x97\x1c\x38\xb8\x92\xf6\x94\x0b\x9e\x53\x72\x8f\x7b\xa7\xcf\x97\x20\x09\x4e\xc3\x7f\x4c\xd8\x45\xa9\xb3\x9f\xbb\xc8\xf3\xb2\x8d\xc8\xae\x30\xca\x3e\x77\x25\x20\xec\x25\xfc\x7a\x7c\xc9\xc2\xee\x5c\x38\xa4\x3a\x71\x6f\x2b\x2a\x94\xb2\x39\x0a\x04\xab\x62\xa0\x4e\xbd\xc2\x56\x78\xe5\x3e\x11\x87\xd1\xdd\xe1\xf2\x9f\x66\x8b\x1b\x7a\x41\x18\xeb\xf9\x9f\x99\xe8\x61\x73\xc2\x9f\xdb\x1f\x97\xa0\x65\x02\x81\x41\xd1\x4d\x03\xdc\x7a\x16\x61\x2b\x9b\x0c\x1c\xac\x20\xd4\x8a\xf4\xfa\xcc\x54\x56\x70\x2b\x38\x02\x52\x40\x8a\x7b\xee\xfb\x0f\x94\xfa\x68\xd7\xc9\xb3\x0b\x6f\xb7\x72\x87\xc0\xfd\x1e\x0a\x78\x98\xe8\xca\x43\xb7\x70\x4a\xb4\x23\xf7\xe1\xe7\x8a\xfd\x8e\x7a\xfb\x2c\x2a\xb8\x7b\x46\xaf\xd4\x8b\x33\x54\x5c\xdd\x5c\xf8\x9c\x90\x0e\x22\x95\xcc\xa6\xda\xe3\x4a\x91\x69\xa4\x1f\x20\x3e\x0b\xd8\x4a\x16\x8e\xb0\x79\xd4\x2d\x95\x49\x22\xf5\x6d\xcd\x3c\x17\x94\x49\x99\x41\xd8\xe5\x27\x86\x47\xef\x2a\x05\x9e\x12\xee\x20\x7a\xd2\x3f\xe2\xa3\x8d\xff\x16\xce\x7c\x89\x74\x4c\x2f\x5e\xad\xd6\xaa\xb9\x33\x4c\xae\xe4\x71\xb5\x30\x8a\x4c\x34\x89\x05\x87\x93\x76\xb3\xa5\xe6\xb7\x4a\xf5\x57\x73\x89\x29\x17\xd9\x66\xcb\x8f\xee\xb3\xe4\xf0\x9e\xb1\xed\x8f\xd8\x0e\xb2\x57\x9c\xf0\x91\x73\xb8\xcb\x30\xfd\x64\x09\x07\xfe\x25\xc8\x8f\x50\x61\x4a\x50\x99\xc2\x8e\x6d\xde\x22\x46\x10\x1e\xc7\x7c\xc4\xef\x9b\xd6\xe2\xcd\x0a\x1c\xd1\xc2\x13\x8b\x25\xd7\xdf\x1d\xf4\x1f\x0f\xfe\x26\x94\xa3\x0b\x69\xaa\x7b\xd8\x57\xd4\xa5\x19\x2d\x4f\xb5\xf9\xfe\x16\x19\xe8\xe7\x12\x74\xb8\x2c\x1f\x9e\x29\x32\xf2\xec\xec\x28\xfc\xc4\x5f\x26\x29\x0e\xde\xa8\xfe\x1e\x8e\x77\xcc\xfe\x51\xe7\x2b\xf9\x09\xd2\x62\x9a\x69\xd5\xa6\x6f\xca\x34\xf8\x83\xf7\x1f\xd5\x98\xb6\x0d\x31\x3b\xce\x7e\xd4\x12\x33\x44\xd6\xfd\x61\x51\xea\x1a\x6d\xdf\xe2\x95\x0e\x99\x45\xa8\x76\x99\x86\x1a\x83\x3c\x68\x1b\xbf\xb3\x1c\x2b\x8e\x30\xf4\x8b\x4c\x7f\xdb\xfe\x6d\xff\x29\xd2\x5e\x0a\x8d\x81\x7b\x45\x8c\x04\xdd\xf6\xcc\x57\x61\x2d\x86\xb4\xcf\x8b\x5f\xaf\xfc\xbf\xfe\x9b\x7b\xed\xf7\xa4\xb7\xd9\xd5\xb7\xd3\x8b\xb8\x8e\x83\xfc\x6c\x96\x70\x93\xec\xcb\x1e\x6c\x1f\x44\x18\x5d\x9c\xce\x41\xe4\xed\x7d\x43\xb6\x1a\x47\x7b\x67\x8f\xfb\xfb\x92\xa9\xb1\x66\x8b\x89\xe0\x06\xcc\x6d\x8a\x02\x37\x26\x9e\x02\x56\xde\x0b\x34\x41\x5b\x45\xb0\xfd\x73\x0a\x62\x79\x91\xd9\x51\xe5\xfb\x27\x3a\xf0\x42\x5b\x63\x20\x8b\xfe\x43\xe1\x7c\xad\x53\x81\x74\xb9\xa5\xf5\x19\x2e\x97\xe2\x27\xdd\x62\x8a\x47\x06\x3a\x19\xf6\x0b\x5f\x11\x90\x11\xb9\xc7\x76\x24\xfd\x13\x97\x23\x56\xef\x03\x3d\x40\xe6\x41\xf9\xf9\x15\xff\x92\x4e\xc3\x04\x43\x28\xcc\xac\x29\xe2\x29\x1b\x96\x77\xdc\xdc\xef\x38\x2b\x5f\xb2\x7f\xc6\x71\xc2\xaf\x81\xe1\x17\x21\xfa\x70\x02\xc1\x38\x4a\x1e\x5f\x6f\x69\xc8\xc3\x53\xf9\x52\x33\x7a\xa5\x7a\xac\xdc\xc1\xc0\xaa\xc8\xed\xd3\x01\xc6\x58\x09\xc7\x62\xfd\xc7\x9f\xb8\x5a\x1e\x81\xd5\xc5\xcd\xd5\x8b\x84\x8a\x22\x5d\xbc\xf7\x57\x52\x4c\x71\x02\x72\xea\x8e\x77\x22\xeb\x2f\x9d\x57\x09\x45\x67\x80\x3a\x69\xe7\xf5\x00\xa9\xb3\x57\xf4\x94\xce\x40\x48\xaf\x90\xb9\xef\xf7\x96\xe7\x85\x8c\xbd\xb2\xc4\x0f\x49\x1f\xfb\xec\xde\xc1\xf2\x3b\x92\xed\xf7\x16\x85\xb4\xdb\x6e\xab\xbb\x7e\x07\xa2\xe1\xb7\xea\xae\xad\x7e\xf1\x89\xbe\xf6\x7f\xd9\x43\xec\x21\x76\xc3\x99\x18\x27\x13\xa8\x45\xb9\xe7\xa8\x6d\x4f\xff\x71\xc7\x42\xa4\x05\xfe\x10\xc9\xad\x9c\x6e\x4c\x36\xdc\x5d\x7e\x81\xf0\x32\x9b\x58\x49\xd1\x5c\x5b\x9c\xcd\xca\x53\x78\x45\x07\x23\xaf\xa0\xa5\x44\x78\xc2\x30\x84\x59\xd0\x1a\xbd\x12\x54\x0f\x39\x47\x46\xd0\x0f\x9c\x4f\xc4\x59\xfa\x92\xff\xc9\x8a\x47\x32\x22\x8d\x32\x28\xd1\xb5\xea\x33\xf6\xdf\x77\x18\x7a\x59\xa9\xa7\x28\xd7\x14\xdd\xcc\x1c\x1d\xe4\xbc\x51\xda\xe5\x71\xd8\xe4\xcd\xa5\x83\x0f\x0b\x4b\x20\x38\x7a\xd1\x9c\xb9\x46\x9e\xab\xc6\x77\x7f\x2d\x15\xe1\x2f\x91\x74\x1a\x16\x4c\x53\x33\x6a\x70\xdd\xfa\x36\x9c\x6d\x5a\x58\x85\xac\x77\x0c\x12\xca\x0a\xcd\x99\xe2\x7a\x99\xd2\x18\x61\xf4\xc3\xe0\x57\x1c\xdc\x86\x4a\xb3\x1b\xb2\xe3\x4e\x6c\x6d\xab\xb0\x4e\x0a\x6f\x3c\x03\x8c\x95\xb9\x1d\x48\x2f\x72\x34\xbe\x2d\xbb\xe1\xe6\x2c\x31\x85\xb5\x36\x26\x60\x7e\x86\x3c\xeb\x16\xf0\x06\x89\x1b\xe2\xd4\xd1\xbe\xff\x09\x27\x5b\xa2\x5a\xcf\xd4\x4a\x77\xbb\x91\x5d\x52\x8a\xb3\xd0\xeb\xf4\xc2\x9d\x68\xa8\x41\x91\x1e\xc0\xc9\xa4\x9c\xe2\x50\xd6\xd4\x10\xcf\xce\x36\x5b\xff\x38\x69\xe8\x30\x28\xb5\xdb\x97\xa2\x92\xd8\xc2\xa9\xc6\x47\xe8\x6b\x7b\x69\xdd\xed\x56\x09\x6e\x05\x36\xf6\xc0\x68\x22\xbe\xa8\xca\x25\xc3\xe5\xaf\x0c\x5b\xa9\x9e\x14\xf4\xf8\x7f\x4c\x9d\x0d\x41\x7d\x8e\x3b\x96\xd6\x42\xa9\x05\x41\x04\xd4\x83\x27\xb9\x68\x33\x1e\x05\x83\x3f\x9f\xb7\xf4\x17\x72\x1e\xb3\xf2\xa5\x89\x2c\x30\x3a\x09\x82\x72\xcb\x2b\xe9\x00\x2c\xf1\x73\x5b\xda\xc5\x7b\xec\xea\xda\x45\x36\x59\x79\x89\x7d\xe7\x86\xaf\x10\x9e\x2d\x94\x9b\x8d\xa1\x2a\x1b\x02\x48\x6f\xa6\x1f\x2d\xcb\x0f\x80\x3b\xcf\x82\x3c\x54\x01\x42\x42\xf1\x61\xb9\x1e\xfe\xcb\xe9\xc3\xa8\x4c\xd4\xb8\x03\xcf\xa6\x64\xa5\x15\x21\x4a\x3b\xe2\x07\xb1\xf3\x00\x6d\xf2\x0b\xe7\xf6\xad\x32\x33\xc2\x17\x7c\x53\xc8\xfe\xed\x92\x97\x70\x63\x23\xb0\xf3\x86\x2c\x81\x1c\x3d\xeb\xa5\x7f\xee\x45\xea\xb4\xde\x4d\x2f\x9b\x08\x2f\xd8\xa3\xca\xad\xd7\x2f\xb8\xdf\x6e\x5e\x16\xcd\xb7\x3c\x8a\x2e\x86\x8d\x5d\xc0\xb0\x1f\x0e\xb3\x6f\x3d\xfd\x09\xae\xa3\xfe\xba\xe4\xcb\xd5\x59\x6e\x30\x92\x6e\x29\x4d\x4e\x67\x58\xfd\x7c\x8f\x44\x96\xc1\x8e\x2f\x99\x5a\x85\x8e\xb7\x31\xf1\x64\x6a\x8f\xb7\xbf\xf9\x23\xe5\x67\x16\xd7\xc4\xae\xb8\xac\xd5\x16\xac\x93\x88\xc0\xb9\x5a\x3e\x06\x22\x47\xa3\xc3\xb8\x0f\xcd\x93\xce\xe6\x49\x74\xee\x39\x72\x9f\x4e\xb4\x99\x7e\xa2\xcc\xb5\x72\xde\xab\x84\xab\x93\xa1\x66\xe7\x74\x47\xf2\x94\x5b\x9d\xaa\x13\xca\x92\x88\x9f\xb6\x7d\x47\xe4\x8e\x22\xd5\xbc\x2c\xf5\xa6\x05\xa1\xe4\xbf\x76\x64\x6b\xae\x26\x25\xd2\x75\x30\x3c\x67\x4f\x82\xd3\xe8\x2b\x45\xef\x19\xb4\xf0\x21\x3f\x6f\x1e\x56\x8e\x6c\xf8\xf9\x74\xdb\x51\xed\xbd\x86\x55\x04\x4a\x52\xe5\x00\x79\x15\x45\xdc\x39\x61\xf8\xb9\xdb\x50\x18\x06\xb0\x94\x16\x03\x40\x9b\xa6\x23\xc5\x67\x80\x7c\xfd\xac\x28\x3d\xb5\x98\xd9\x71\x4d\x38\xb5\xdf\x8a\xa1\xdb\x6b\x90\xe3\xf7\xba\xef\x82\xaf\x24\x96\x29\xca\xf5\xb1\x86\x3e\x18\xe4\x11\xe7\xc3\x75\xd2\x88\xe0\x92\xb7\x9d\xec\xa2\x40\xb2\xf7\x5f\x22\x79\xd9\x0e\x16\xea\x03\x27\xdb\x8a\xb0\xa2\x87\x37\x28\x92\xb3\x7c\x82\xf2\xad\x00\xeb\xb5\xb6\xc6\x42\x56\xf9\x6b\x7d\x3e\x93\x7f\x49\xd1\x49\xb9\x96\xad\xb9\x29\x84\x5a\x70\x45\x21\x5a\x35\x55\x08\x43\x84\xbe\xff\xed\xbd\x6b\xb3\xd9\x5f\x09\xe4\x84\xb8\x2f\xa4\x3c\xa9\xf7\x53\xdf\x13\x5a\xd8\x8a\x60\x44\xd3\x70\x63\x97\x13\xf2\x0a\x58\xf2\x14\x5c\x46\xed\xb0\xb6\xbd\xd9\x6d\xae\x99\xf2\xae\xb6\x63\xe2\x17\x93\x99\x6c\x9d\x7d\x72\x33\xc7\xf9\x8e\xe4\x61\xb0\xc8\x39\x88\xfe\xbb\x9d\x7f\xf4\x27\xe8\x0e\xe1\xe6\xde\xad\xdb\xd9\x57\x4f\x1e\x82\xd4\x30\x2b\x77\x7c\xaa\x1c\x1b\xd4\x21\x8e\x31\x64\x90\x3d\x7c\x80\x3c\xb7\xc9\x71\xe4\x33\xda\x55\xf4\x55\x6c\x81\x08\x9e\x76\x20\xc2\xa3\x40\x33\x88\x77\x90\xc6\xc5\x95\x04\xed\x5c\x8c\x04\x70\x1b\x23\x0b\xd3\x2e\x1d\x8a\xcd\xec\x95\xf3\x20\x2e\x6f\x49\xec\x4f\xd5\x28\x18\x0e\x0a\xfe\xa9\x06\x41\x83\x1d\xeb\x9c\xc9\x54\x02\x1c\x79\x2e\xa0\xce\xfb\x26\xff\xc4\xc8\x77\xd1\x42\x04\xe1\x67\x59\x44\xcb\x78\x5a\x89\x74\x90\xd5\x83\x1c\x4d\x07\xa1\x90\x99\xa5\x61\xb0\xda\x98\x29\x46\xc1\xf9\x59\x82\x40\xae\x4e\x70\xa8\x30\xe5\x89\xc5\xa8\xb9\x05\xd4\x10\xec\x60\x36\xe1\x57\xff\x02\x52\xc1\xb3\x1f\xda\x46\x62\xf7\x2a\x6c\x69\xc9\xf7\xc9\x9e\x4d\x4d\x7f\x2a\x99\xa1\xf6\xf7\x25\x75\x65\x64\x4e\x3f\xfc\x0c\xe5\x80\xac\xe7\x3d\xec\xcd\x88\x63\xcb\x8d\x67\x5e\xdf\x24\x0c\xd5\x96\x0b\x88\xbb\x2f\x3c\xcf\x77\xf2\x1f\x21\x0f\x12\xed\xfc\xfe\x99\xd2\x28\x86\x7a\xed\xc2\x9c\x66\x6d\xe7\x50\x27\xc0\x06\x98\x96\x42\xaa\x49\x34\xbf\xb1\x6d\xd0\xe6\xa1\x71\xbc\x9b\x3b\x34\x30\xe9\xb1\xf2\x94\xf4\x1c\x5c\xc9\x97\xbb\x22\x81\xf7\x3f\xf2\xdf\x0e\x26\xa6\x14\x5c\x17\xda\x41\x58\xdf\xa9\xce\xe5\x68\x0d\x4b\x17\x22\x98\x2e\x49\x2b\x6d\xc7\xa5\x57\x2b\x00\x12\x44\x80\xf1\x28\x08\x3f\x0c\x97\xfc\x9f\xb3\xda\x1f\x3a\x0e\x95\x82\x7d\x87\x4a\x42\xb6\x07\xef\xcf\x36\x13\xc5\x02\x52\xa6\x83\x8f\xca\x91\x17\xc1\xb2\xf2\x45\x3c\xb7\xe3\x65\x71\x93\xbd\x44\x0c\xe4\x6c\xf9\x37\xa6\xb0\x0f\xb4\x59\x5e\x77\xeb\x66\x18\x6e\xe6\x10\x9a\x5d\x6a\xb2\x6c\xbb\xf2\x7b\x8e\xc2\xfd\x4a\x1c\x4d\xe8\xb5\x3f\xdf\x32\x4e\x86\x8d\xb1\x06\x78\x3c\x0a\x85\x8d\xd9\x7a\xd0\x8c\x62\x8d\x63\x2f\xf7\x8d\x9d\x4e\xc8\x6b\x27\xa5\xc0\x2d\x26\x74\x7a\x55\x3f\x0c\xce\x99\xac\x94\xb3\xac\x6a\x8b\x9a\xe9\xaa\xfd\x5a\x5d\x6c\x67\xe6\xae\x7b\x06\x26\xe2\xd3\xe7\x60\x50\xae\x61\x36\x02\x6f\x87\x62\xcd\x1f\x4f\x93\xae\xdd\x90\x58\xf7\x93\xb0\xfc\xc3\x6d\x6e\x98\x2a\x65\x97\xa3\x48\x49\x83\xd4\x2b\xc8\xdd\x89\xd5\x44\x1e\xd2\x01\xf7\x8d\x93\x6d\xd3\x1d\x4d\x18\xd8\xeb\x1c\x18\xec\x37\x5f\x9e\x1d\xe2\x4c\xff\xc4\x4f\x4c\xbb\xd7\xeb\x25\xa6\xa6\x1d\xe1\x67\x21\x1f\xf3\x48\x03\xdd\xbc\x77\xeb\x22\xc2\x87\x10\x85\xd3\x45\x4e\x7b\x7d\x73\xa4\x58\x2f\xbd\x10\xee\xa2\x29\xb0\xab\x9e\x82\x25\xac\x50\x57\x4f\x25\xe9\x1b\x21\xb0\xfb\x94\xcf\x36\xca\x77\xbf\x7c\xb6\xca\x93\x0a\x9c\x6b\x65\x94\x58\x13\xab\x21\x0b\x49\x3f\xd6\x87\x89\x11\x8e\x00\xc7\x38\xa3\x38\x2f\x5a\x5d\x1d\xf6\xf8\x7f\x4c\xac\xf2\xe3\xa4\xef\xd9\x29\xb9\xcf\x05\xe9\x9a\xd7\x79\x33\xf4\xca\x3e\xa1\x73\x74\xf5\xf4\x59\x2e\x66\x21\x67\x08\xc0\x55\xf7\xb5\x49\x1e\xba\x51\x0c\x8d\xa3\xc9\x93\x20\x97\x4f\x17\xc8\xd1\x22\xfb\x64\xb8\xa5\x04\xf1\xc5\x11\xb3\xf1\x0c\x05\x14\xb6\xa1\x87\x0f\x52\xae\xb2\xea\xbc\x0a\x03\xe6\xf9\xd6\x0d\x34\xab\x8c\x42\x98\x8e\x2c\x0a\x17\xb7\x16\xc5\x7c\x48\x34\xa8\x99\xa7\xd4\xfc\x4f\xf7\xe2\xb9\xf6\xe1\x7f\xc0\x0f\xd4\xd6\x79\xb9\xfc\x07\x77\x7d\x3c\x72\xa6\xf9\x97\x9f\x7d\x4d\x04\xc5\x82\xac\x87\x55\x90\x59\xa6\xfc\xab\xb9\x91\xcf\x2c\x87\x66\x5b\x2e\x4a\x8b\xcd\x0a\x2e\x85\xd1\xcf\xff\x4f\x68\xea\xcb\xbc\x89\xe5\xd4\xee\x3d\xb8\x6a\xb1\xc3\x8b\xe2\x75\x52\x12\x37\xea\x97\xb8\xf9\xec\x80\xfb\x51\x70\x47\xb1\x32\xc7\x72\xc8\x32\x67\x2c\xb5\x88\x22\x2a\xe5\x23\x56\xf6\x2b\xa2\xe7\xe3\xf1\x26\xad\xd9\x98\x64\xa6\x32\xb2\x68\x47\x33\xc8\x9d\xc5\x87\xf3\xed\x42\xb4\x63\xc4\x19\x9a\xcf\xd6\xcd\xd9\x4a\x60\x1b\x42\x32\xc9\x98\xb4\xbe\x51\x1c\x82\xe7\xe2\xe9\x1b\x51\x89\x3c\xdd\xf3\x58\x21\xb6\xbc\x99\x9e\x90\xa9\xfa\x23\x47\x6a\x59\xf6\xea\x39\x66\x6d\xd6\x8a\xea\xc8\x1c\x94\x3b\xed\x63\x38\x29\xb1\xce\x33\x5c\xf1\xef\x83\xc1\xe1\x3e\xd9\xd7\xae\x00\xb6\x7f\xbd\x68\xe0\xe8\x9b\x17\xa5\xaa\x47\x04\x88\x1d\x5b\xc4\x04\xca\x46\x1d\xfe\x8c\x6e\xe5\x8e\x86\x90\xae\x74\x3d\x50\x58\xb6\x08\xde\x1b\xae\xe1\x3f\xeb\x00\xa0\x47\x27\x91\x56\x75\xca\xa7\x06\xe3\x1a\x9d\x7d\x68\x5e\xc3\x6f\x76\xe3\x44\xd1\x5d\x8b\xe4\xea\xb8\x84\x22\xab\xcb\xdf\x14\x2b\x61\x7b\xd7\x25\x41\xff\x4d\x27\x25\x9e\x89\xa7\xa2\x2f\xc0\x63\x09\xa2\xd4\x4d\x61\xa8\xe7\xe7\x86\xa5\xc8\x71\xaf\xe2\x47\x45\xb3\xd1\xc9\x75\xb0\xb3\xca\x23\x93\xe1\x82\x2d\x25\x64\x05\xa9\x89\xf9\x35\x72\xc5\xed\xd0\x91\x35\x63\x7f\xe7\x10\xd4\x42\x0c\x0a\x8f\xb0\x77\xfe\x16\x3b\x53\xf1\x15\xdf\x14\xda\x0c\x8c\xf6\xc7\x18\x5f\x7f\x70\x2f\xd1\x88\xf1\x53\xbd\x20\x7f\xdf\xba\x90\x50\x67\x91\x9a\xfa\xae\x7f\x97\xd5\x14\xbd\xd4\xb5\x72\x92\x2c\x8b\xba\x7d\x98\xba\x21\x3d\xc0\x5b\xc9\x7c\xd6\x66\x7e\xe6\x29\x21\xca\x98\x4f\x8b\x6f\x45\xaf\x7a\x31\xeb\x50\xba\x58\x68\x25\xd9\x56\x3b\x23\xcb\x0f\x40\xdb\x98\xcb\x7e\x52\xd4\x7f\x1d\xa5\xf9\xcb\xc5\xd8\xf3\x0a\x6f\xbe\xcb\xfe\x70\xc7\x3d\xd9\xc5\x91\xaf\xfb\x74\xdf\x9f\x0b\x81\x93\xb2\xdf\x3a\x3f\xf4\x07\xb0\xee\x56\x2e\x4c\xde\x30\xf2\x98\x04\x76\xab\xd5\xd6\xd0\x00\x68\xb4\x1e\x24\x6d\x69\x24\x14\xc2\x2a\x6b\xed\xc2\x5a\x60\x72\xd9\xa2\x83\x89\x7a\x94\x84\x17\xf2\x3e\x1d\xd1\xc8\xa9\x57\xf7\x87\x76\xae\x8d\x78\xcf\x20\x0c\xa1\x63\x8b\x5e\x91\xdc\x2c\x6f\xb6\x3c\x40\x09\x1e\xe7\xfe\x81\x54\xf3\x02\x70\x8e\x48\xfe\x3a\xd0\x1e\x72\x94\x34\xea\x6c\x74\xa1\xb4\x98\xb7\xb7\x97\xe5\xfb\x01\x56\xe1\xc3\x3b\x30\x1e\xf8\x7b\x09\xa7\xca\x32\x2a\x07\x7c\xa2\x8d\xd5\xde\x86\xab\xbb\x82\x1c\xad\xe9\xba\x69\xec\x74\x04\xb6\xb3\x27\x50\x10\x1c\x46\x86\x73\x9d\xe0\xcc\x2a\xb6\xdf\x16\x9e\x67\x1b\x25\x66\x1b\x72\x2e\xc5\xd5\x52\xea\xaa\x46\x80\x2b\x03\xe9\xd6\xba\x28\x40\xce\x4e\x11\xa6\x8c\x2f\x3f\x0b\xde\x86\x12\xac\x34\x8b\x59\xfb\x4d\x0a\x4f\x1c\x8a\xc7\xa6\xf3\xb1\x69\x16\x5c\x31\x3f\x6b\xed\x65\xdf\x9b\x87\x9d\xe6\x89\xab\xbb\xa5\xf3\xe4\x04\x46\xcb\x03\x30\x74\x0e\x6e\x14\xad\x7e\x6d\xd9\x39\x09\x06\x5a\xb9\xc9\xac\x62\xf9\xe8\xae\xb2\xe6\x87\x74\xaf\x7b\x5d\x15\x47\x67\x47\xe3\x24\x95\x9a\xad\x11\x2f\xa2\x7c\x15\xf1\x82\xab\xea\x97\xc5\xf7\xbe\xf0\xff\x25\x83\x21\x3f\x45\x9b\x5c\x72\x0e\x61\xf1\xa0\x58\x48\x58\xbd\xc8\x24\x2f\x8d\xda\xb5\xe6\x26\xd5\x9e\x85\x49\xce\x9b\x68\x0b\x41\x7b\xb0\x7b\xf4\x0a\xa9\xf0\xa2\x2a\x6d\x4e\x00\xc5\x7d\xb1\xeb\xaa\xb7\xa3\x34\xef\x93\x9f\x64\x77\xff\xd3\x01\xbb\x9e\x67\xaa\xcf\xdf\x29\x3c\x55\xc2\x7e\x93\x5b\x5d\x87\x82\xc2\x4b\x77\xff\x1e\xda\x8d\x4d\x32\x53\x08\x57\x3a\x6c\xbd\x41\x46\x3b\xec\x0a\x69\xa4\x86\xf0\x56\xbf\x39\x3f\xb5\x47\x9e\x7f\x47\x6a\x4e\xec\xe4\x31\x60\xd1\xe4\x15\x34\x2f\x05\x41\x38\x7a\x12\xac\xdb\x6b\x8d\xb1\x95\xce\xc6\xaa\x36\x64\x2c\x37\x55\xdc\xd6\x56\xaa\x61\x1d\x73\x1d\x27\x05\xd7\x8d\x76\x2d\xe9\x70\x65\xbe\xeb\xf3\x66\x75\x84\x40\xab\x07\x7d\x44\x19\x1c\xf0\xd4\xbc\x98\x15\x9f\x0c\xf8\x02\x1e\x77\xd7\x22\x76\x74\xb6\xdf\xda\x7f\x90\x71\x0f\xf6\x12\xf8\xa7\x0e\x4f\xe1\x78\x90\xe3\x36\x69\xc2\x05\x49\x84\x8e\x42\x0d\xae\xfe\xf9\x94\xd8\xbd\x03\x27\x18\x8a\xb2\x16\xad\x11\xcd\x21\x8a\xc9\x55\x6b\x12\x1b\xbe\xe9\xaf\xf9\xc7\xc2\xad\xbd\x04\xdd\xa1\xf5\x8c\x9a\x2b\xf6\x87\x4c\x71\x11\xbb\xeb\x8f\x3f\xf3\x2d\xe4\x07\x73\x85\xb8\x52\x1e\x76\x1f\x75\xdb\xc8\x74\x50\x37\xdc\xb7\xf8\x4e\x82\x45\x08\xb8\x16\xad\xc6\x27\x5f\x04\x57\xed\xc8\xe3\x97\x77\xc7\xdd\x32\x2a\xd1\xd9\xfc\x43\x95\xe3\x11\x57\x3f\xe1\xb2\xd1\xd5\x78\x66\x5d\xa5\x3d\x1a\xec\x92\xd9\x6b\xb2\xf0\xf5\x16\xd2\x8a\x73\xb1\x2b\x9a\xa7\x47\x17\x5c\xc1\x2e\x5a\xb3\x70\xbc\x21\xf5\x39\x6f\x9e\xf7\xe0\xcb\xa5\x6a\x6a\x96\x25\x7f\x69\x2a\xf2\x2e\x7d\xda\x0e\xdd\xa5\x66\xcc\x83\x4d\x55\x5d\x68\xc4\x0b\xa2\x3e\xe1\x77\xfc\x82\x52\x94\xc7\xe5\xc7\x31\xb6\x61\x35\x18\x08\x0f\x75\xa6\x33\xcc\x2e\x15\x47\x8a\xbd\x8f\x4e\xad\x40\x96\xf0\xb3\xbf\x37\xe1\x6a\x74\xa3\x9b\x46\x6f\x4b\x33\xfd\xb2\xa2\x59\x38\x2d\xa9\x54\x34\x9c\xf7\xc5\x10\x70\xed\x42\xa8\x16\x42\xef\xe3\x12\xcb\x8e\xd5\xce\x4b\xd1\x25\x2f\x22\x1e\xf6\x4a\xf9\xcd\x66\xcd\xeb\x37\x58\x1f\x1b\x5f\x93\xeb\x56\x19\x73\xa1\xa7\x0a\xfb\x67\x62\x67\xf0\x3b\xdc\xb5\x2a\x0e\xef\x5f\x14\xee\x5b\x1b\x7a\x3a\x21\xdd\xcf\x5d\xda\x4d\x5c\x95\xe6\x23\x28\xb6\x73\x04\x9f\x1e\xd3\x4d\x06\x49\x22\x09\xf1\xbc\xd1\xd6\xfa\x62\x8d\xfc\xc2\x2c\x25\xa9\x1f\xc7\x3a\x22\xeb\x42\x1b\x83\x33\xb8\xce\x8b\x48\x82\x67\x0b\x5c\x3c\xa7\x98\x46\x43\x61\xd3\x72\x80\xfc\x42\xe1\x6a\x54\x84\xf4\x99\x05\x9f\xe6\x10\x5f\x43\xd9\x49\x86\x05\xd0\x55\xc5\xaa\x1e\x4c\xf3\x3c\xc8\xc3\x83\xde\x3f\x44\x36\x37\xbd\x9f\xeb\x25\x43\xd8\x5f\x90\x6a\xb1\x4f\x91\x41\x75\x48\x3b\xee\x1e\x12\x2f\x0e\x04\xb7\xb0\xc0\x47\x24\x16\x53\xb1\x0f\x2a\x02\x5c\x90\x4d\xdc\x54\xac\x2a\xb4\xb4\x53\x5d\x57\xd2\x3e\x4a\x18\x5b\xb8\x27\x45\x6c\x7d\xde\xed\xfe\x33\x2d\xf4\xcf\xd5\xe5\xab\xb6\x37\x11\x29\xdf\x9d\xba\x1c\xd3\xc2\x31\x0e\x7c\xcf\xec\xf1\xa6\x18\x47\xe7\x8d\xdd\x75\x0f\x05\xbc\x2d\xd6\x30\xb2\x05\xc0\xe2\xc7\xfb\x57\x04\xd2\x7a\x44\x17\x7c\x0c\xce\x5d\xa5\xb6\xc0\x36\x15\x92\xb7\x93\xd8\x7c\x35\x8c\x7c\xad\x1b\x66\x49\x01\x1a\x37\xe9\x05\xd6\x58\xb9\xd1\xc5\x06\x34\xf3\x41\x1a\xf7\xc3\x0b\x02\x96\x9b\x2b\xf9\xb6\x76\xfc\x6b\xc7\xb7\x55\x27\xa1\x0f\xa8\xcf\xa2\x6b\xf7\x79\x79\x47\x3a\x09\xdd\x97\xa3\x5d\xee\xfd\xd5\xf5\x18\xc7\x68\x93\x8f\x25\xcc\x3f\xac\x25\x7e\x91\x53\xb0\x26\x9f\xce\x2f\x77\xd8\x7a\xdc\xbd\x0e\xf3\x43\x31\x91\xcb\x2f\x68\x68\x08\xa6\x77\xe3\x86\x84\xb6\x77\xf1\x11\x14\xe1\xd9\xe1\xa6\xbf\xd1\xad\x61\xd9\x2c\x4b\x88\xd9\x5e\xa6\xdd\xab\xec\xd9\x03\xdb\xe1\x84\xe4\x33\x4b\x5d\xd2\xb3\xc6\x39\xb3\xed\xc1\x85\xf9\x28\x2a\x6e\xe6\xf0\x16\xfb\x93\x1e\x10\x12\x20\xef\x5a\xd1\x52\x6b\x12\xf9\x56\x58\xca\xc6\x0a\x04\x8a\xc8\x9f\x36\x32\x11\x58\x85\xbf\xb6\xeb\x2a\xdd\x86\xe5\xc7\x1a\x8f\x84\x11\x40\x89\xa1\xa8\x44\x14\xb5\xfe\x35\xc5\xf9\x7d\xce\x2d\x89\xf6\x8a\xc6\x19\x9d\x95\xce\x6f\xec\x51\x91\xdc\x76\x8f\xd6\x86\x33\x6f\x19\xc3\x4e\xce\x02\x9c\x66\x2e\x88\xc4\x7a\x61\xba\x77\x2f\xe2\x4b\x8d\x68\x26\xfc\xd6\xf1\xaa\x9d\xfb\xd6\x98\x06\xd9\x2a\x85\xeb\x77\x10\xe3\x9f\xd6\xd9\x2f\x4d\xf1\x73\xff\x5c\x93\xad\xf9\x1d\xb9\xb3\x3e\x43\xdf\x6d\xfd\xed\xaa\x2a\x05\x31\x2b\x9a\x8f\xe6\xee\x26\xfa\xab\x28\x17\x27\x34\xfa\x01\xff\x12\x57\x65\x8e\xb3\x08\xcd\x6c\xdc\x74\xc3\x47\xa4\x95\x67\xeb\x69\x61\x4c\xe9\x6f\x99\x1d\xf4\xbb\x22\x56\x04\x0a\xfb\x79\x6d\xb7\xc6\x37\xf9\xbf\x47\xe7\x34\x60\x64\x47\x82\x2d\x12\xfa\x38\x71\xcd\x2b\x6f\x4d\xca\xda\x0f\x8d\x56\xef\x2e\x1a\x62\xc2\xbc\x39\xb5\xdc\x15\x59\x8d\x78\xeb\xc9\x3e\x57\xaa\x57\xf4\xbd\x57\x3b\xb6\x8f\x35\x87\x54\x48\xd3\xd6\xf4\x54\x86\x33\x5e\xb3\x49\x90\x2e\x0f\x7e\x9c\xd5\xde\xbb\x1e\xf0\xed\x20\xd7\x1d\x94\xe9\x18\x66\x2e\x4a\x6b\xee\x6a\xe8\xb0\xba\x3a\xc8\x80\x24\x8e\xd7\x4d\xba\x26\xef\x66\x75\xfa\xfe\xe3\x9e\x99\xee\x17\xe3\xde\xb0\xdc\x15\x64\xf9\xe1\x6b\xdf\xb5\x8b\x0c\x31\xfb\xf4\x8f\x4a\xbc\x6a\xad\x89\x61\xcb\x1b\xad\x6f\x7f\x93\xbe\xbe\xcd\xfa\xcc\x88\x97\xc2\xf9\x61\xe2\xc0\x7e\xdf\x25\x2a\xdf\x2f\xe2\xbe\xf0\xf8\xcf\xdc\xf1\x9b\xa9\x33\xfe\x08\x10\x03\x75\x72\xa2\xf6\xd4\xad\xe6\xc6\x90\xcd\x7b\xe4\xaf\xf7\xb9\x99\xa3\x82\x77\xfa\x3f\xae\xc0\xae\x51\x73\x62\x65\xb3\x34\x7f\x85\x9e\xd7\xb1\xe0\x57\xd6\xcc\xeb\x05\x6a\xb5\x3d\xde\x67\x30\x5f\xf4\x78\x2b\x43\xfd\xaa\x82\x0c\xed\x67\xcf\x72\xbb\x0b\xb6\x65\x87\xd9\xb1\xff\xdb\xd4\x32\xba\x2f\xaf\x48\x78\xbc\x26\x46\x08\xb5\x20\x3c\xef\x8b\x3d\xf3\x8c\x47\xec\x48\xce\xcc\xf1\x0f\x42\xfc\x7c\xb2\x61\xdd\x19\x1b\x75\xc2\x3d\xec\xe1\x87\x69\x48\x95\x72\xc8\x87\x7c\xdf\x13\x61\x72\xd5\x7b\x72\xfa\x25\xa7\x7e\x96\x1b\x26\x9c\x8d\x48\x89\x81\x53\x0a\xa1\xa6\x23\xac\xca\xd0\xca\xa7\x50\xa3\x7a\x34\x25\xc7\x8e\x9e\x1d\x11\xc2\xd5\xc8\x99\x39\x52\x59\x4e\x0d\x6e\x6c\x1b\xe7\xc8\x03\x3d\xac\xce\xb7\xb0\x13\xc3\xd1\x47\x38\x18\xef\x2d\x4f\xb7\x5a\x2e\xd2\xb4\xf7\x72\xf6\x25\x42\x86\x76\x2b\xcd\x0f\xd2\x21\xdf\xfe\xff\x23\xeb\xc3\x4d\x70\xef\xce\xf4\x30\x8c\x18\xa7\x70\xee\xb3\x3d\x9d\x57\x31\xb7\xd6\x4a\xc8\xa7\x72\x77\xa3\x75\xbb\xdc\x48\x71\x6f\x10\xdf\xae\x63\xfc\x75\x70\x42\xf1\x33\xe9\xde\x7a\x06\xe2\xd5\x77\x47\x9f\x0d\xd4\x80\x5b\x28\x5d\x35\xfb\xb3\xb3\x95\x23\x48\xbe\xd3\x6b\x91\xe8\x15\xf4\xb4\x42\xa5\xff\xba\xf2\x33\xf6\x9c\xfe\xa1\xe2\xc7\xd5\xb8\xfe\x83\x7b\xae\xbe\xb3\xbb\xdd\xcc\x83\x9d\x33\x4f\x40\xef\xf3\xc2\x41\xb0\x69\x67\xd6\x5e\x64\xb8\xb5\x0f\x38\x1c\x86\xf0\x99\xdd\x70\xc3\x04\xed\xcc\xe4\xce\xb2\x02\xc1\x26\x6b\x27\x7c\x18\x97\x7a\x07\x47\xff\xac\x1f\xe6\x4c\x9c\xae\xa3\xb2\x1d\x90\xd9\x12\xd9\xdd\x4e\x96\xdc\x71\x6a\xa9\xa3\xcc\x16\x41\xdf\x84\x9b\xe9\xc5\x32\xd1\x0e\x51\xba\x64\xb5\xa6\xd7\xb7\xb7\x99\xfb\xa4\xbb\x65\xcd\x3c\x5b\x97\x8a\xbe\xeb\xef\xe1\x78\xb2\x20\xe4\x96\x42\x3f\x2b\x5b\x6b\x0e\xc1\x8e\xf5\x25\x62\x09\xcc\x56\xad\xe0\xcf\xfa\xab\x34\x38\xfe\x0b\xa2\xc3\x1f\x72\xa5\x94\x72\x41\x4b\x24\x6c\x5e\xee\xb7\xce\x53\xca\x2a\xd9\xaf\xd2\x9e\xa7\x5b\x6e\x2f\xf7\xc0\xb3\xf7\x0e\xe1\x1c\xb4\xb7\x71\x0c\xd3\x36\x3a\x7f\xe2\x80\xdd\x2e\x69\x03\xf2\xa0\xf9\xc2\x94\x3e\x3a\x54\x3f\x3d\x84\xe9\xbe\x84\x45\xc3\x8f\xce\x79\x34\xaf\xd5\x68\xb3\x80\x43\x53\x59\xd0\xca\xe5\xa9\xbd\x98\x53\x8a\x0c\xa8\xc9\xc7\x61\xde\xe8\xde\x3f\x53\x69\x82\xe6\x08\x8d\xef\xfd\xa9\x8e\xd5\x43\x5c\x78\x0c\x37\x28\x02\x5e\xe8\x1c\xb4\x46\x58\xb7\x67\xfb\xdc\xca\xf6\x61\x35\x0d\x92\x83\x58\xa0\x76\xcc\x2b\x12\x03\x46\x5e\xfa\xa4\x47\xe1\xf1\x9b\x0b\x4b\xec\x7f\x40\x79\xba\xb0\xf3\x80\x05\x39\xf3\x2a\x74\xbc\x8f\x21\x32\xea\xd7\xa5\x0d\x14\xf1\x91\x7d\x70\x45\x0f\xee\xb7\x55\x2e\x3f\xd7\xc4\x61\x53\x0e\x62\xe6\x53\x56\x64\x5a\x58\xd1\x08\xa0\x96\xb9\x42\x58\x45\x0d\x32\xdc\xc7\xc9\x3c\x32\x45\xa8\x9c\x69\x79\x69\x74\x2b\x4d\xad\x35\xdc\x6b\x34\xe1\x81\xa0\x7a\x56\x22\xcf\x64\xc1\xc7\x87\x4c\xc2\xc2\xc2\xcf\x5a\xed\x40\xae\xd3\x25\x1f\xd9\x1a\x97\x67\x1f\xf1\x26\x7d\x8d\xd8\xa1\xf4\x1f\x8d\x1a\x97\x48\x9f\xb0\x15\x50\x0e\x5e\xbe\x7d\x8b\x7b\xb2\xeb\xcc\xed\x1b\xf9\x81\xc8\x4f\x7f\xc4\xad\xcf\x14\x7e\x0f\xec\x5d\x5d\x47\x9c\x95\x61\xbd\x39\xa3\x1b\x78\xf7\x37\xc2\xac\x31\xa4\x95\x8f\x4d\x34\xc0\xf7\x28\xd9\x14\x3d\x1c\xf9\xaf\x56\x20\xf0\x82\xeb\x0e\x69\xe1\x8e\x38\xb3\x5c\xcb\xd0\x62\x1b\x05\x69\x1f\x79\x26\xf4\x7d\xa8\xef\x82\x42\x8e\xf2\x90\x2e\x64\xeb\xce\xbd\x3a\x7d\x06\xd9\x85\xbf\xed\xd8\x12\x4e\x9a\xb1\xad\xce\x2d\x4f\x7b\x94\x3b\x07\xca\xc9\xe8\xd6\xa3\x59\x2f\x5f\xd6\x14\x11\xe2\x2f\xab\x32\x34\x19\xe1\x6b\x56\xe5\xbd\x0d\x2e\xcd\x46\xab\xdb\x21\xad\xfd\xf7\x83\x27\xf4\xc9\x22\x98\xd8\xc2\xbb\x0e\x98\xed\x8c\xb8\xa9\x1f\x4e\x9e\x92\x79\xd7\xbb\xb2\xe1\x2e\xff\x55\x96\x09\xd8\x6a\x13\x57\x60\x1b\x3a\xba\xed\xde\xdc\x6c\xe2\x33\xdd\x9b\xde\xff\xb7\xfa\x7e\x13\x0b\x79\x63\x83\x6d\x59\xba\xee\xe6\x93\x22\xf1\xca\xfa\x61\xba\x9a\x20\x2d\xfd\xdb\x53\xaf\x61\x0d\xbb\x4e\x18\x74\xac\xa7\x82\xe9\x6f\x3f\x96\x15\x0b\x78\xdd\x47\xaf\xc7\xae\xe5\x40\x6b\x08\xfd\xf0\x52\xe3\x9a\x44\xa0\xb2\xf5\xa4\xe0\xdd\xb6\xd9\x21\x90\x80\x47\xfb\xc7\x5e\x27\xf2\x96\xde\xbf\x50\xb9\x77\x91\xc7\xa6\xad\x82\x4a\x25\x3e\xa6\xbb\x41\xb5\x0a\x23\xba\x79\x00\xf4\xe4\x5e\x11\x6b\x3e\x45\x8b\xb7\x8a\x8f\x1e\x6d\x11\xf3\xc4\x0a\x44\x86\x4e\x40\xd5\xa3\xaf\x0f\x81\x22\x05\xd0\xdf\x58\xdf\x0b\x7f\x4e\x3f\x39\xa7\x46\x94\x5f\xa0\xad\x2c\x4c\x6e\x2b\x23\x22\x6e\x5c\x48\x2f\xd0\x30\x36\xef\x27\xcb\xf0\xa3\xb6\x41\xb6\x6f\xe6\x47\xe1\xc0\x5a\x27\xc6\x91\xd7\x36\xa4\x0e\xcf\xcc\xbe\xd0\x9f\xec\x20\xd5\xf1\x48\xf4\x54\xde\xe1\x38\x25\xc6\xd3\xe7\x9a\x44\x32\xd9\x69\x7d\xb1\xd7\xb9\x92\x49\x0e\x51\xb4\xc8\xba\xaa\xf5\x6d\x11\xaa\x8d\x2d\xac\x33\x3e\x11\xeb\x09\x17\x2a\x5f\xec\x28\x48\x68\x57\x58\xef\x4b\xb3\x15\xe4\xa5\x05\xaa\x86\xd8\x18\x62\xd3\xdb\x1d\xb2\x69\xf9\x8d\x6e\x38\x12\x1f\xa8\x45\x8e\x8b\xf2\xa5\xf5\x17\xb9\xba\x22\xb6\x44\xf3\x67\xe6\xfd\x7a\x39\x41\xaa\xc0\xaa\xf4\xa7\xb5\x44\x67\x1e\xe5\xf7\x0a\x33\x0e\x75\xdd\x91\xbe\x62\x45\x05\x72\x94\xd4\x8a\x1f\x3f\x1e\x70\x33\xc9\x00\x5d\xba\xb2\xf5\xae\x74\x8d\xc3\xfd\x1c\xfd\x96\x5b\x63\xa9\xb5\xae\x9d\xd2\xc9\x85\x3d\x2e\x0d\x28\xf7\x90\x06\x18\x0e\xf1\xc3\x29\x1c\x3a\x26\xb6\xe4\xd4\x08\x5b\x41\xab\x16\xdd\xca\x79\x19\x18\x2c\x29\x1c\xe2\x86\x77\xa1\x43\x9c\xf0\xdc\xf9\xfe\xae\xf4\xe3\x44\x6b\xa0\xc1\x75\x65\xce\x0c\x45\x04\xb3\xa3\x1c\x33\x58\xe3\x00\x30\xfe\xa6\xc6\x27\x96\xf1\x0f\x1c\xb0\x38\x18\xbe\xbd\xf2\xc1\xb4\x48\x55\x81\x74\x7e\xf5\x19\x1e\x9b\x41\x80\xa7\x60\x3b\x54\x38\x76\xc1\xd3\xbb\x08\x3b\x5a\x6b\xb8\xe3\x90\x3f\x2e\x05\xf1\x85\x78\x3c\x2d\x31\x90\x56\x1a\x07\xbd\x89\xf8\xf4\x01\xab\x7b\x23\x7d\xb7\x19\xe3\xe8\x54\x0d\x5f\x64\x59\x24\x7b\x50\xf1\xe2\x17\x04\xdd\x90\xc3\x53\xce\xff\xaf\xac\x85\xb1\x6e\xcc\x3d\x1c\x79\xc8\x81\x2a\xa9\xb6\x70\x0c\x15\xd0\xd8\xb2\xe7\x47\x98\x20\x06\x77\x79\x95\x5f\xf2\xe2\x2f\xe5\x98\x7e\x55\xc4\x52\xc0\x87\x7b\x78\x0e\xa0\xbf\x7c\x98\x59\xb3\xb0\x80\xe6\x26\x45\x80\x97\xfd\xf1\x7e\xb9\xa0\x49\x67\xc5\xe0\xd9\xaa\x2a\x0e\xec\x06\x52\x36\xea\x66\x05\x8c\xf7\xf0\xf6\x2e\x28\xf6\x98\xbe\xa4\xaa\xa0\x70\xfc\xeb\x9e\xf9\x49\x99\x87\xdb\x9a\x6b\x97\x8c\x1c\xf5\x4b\xe3\x8e\xb3\xaa\x39\xa7\x17\xef\x80\x33\xcc\x8b\x83\xfc\xc7\xfd\x61\xc0\x70\x71\xe5\xd7\x3c\x05\xda\x7b\x6a\x8f\xaf\xba\xa5\x0a\xaa\x67\x0f\xdb\xdf\x8e\x63\x78\x02\x8a\x2a\x85\x4a\xfb\xc9\x3e\x4c\x18\xbc\x8f\xe7\xe2\x18\xd8\x1f\xf4\xc8\x61\x51\x25\x87\xc8\x94\xf1\xe0\x6b\x65\xeb\x38\x65\xed\x8e\xfa\xd5\x02\xc8\x3b\xfd\x85\xc8\xdb\x66\x59\xda\x88\xfb\xd2\xe3\xf0\x1b\x7f\xc7\x96\xd8\xd3\x69\x0d\xc4\x2f\x6b\x91\xc9\x3f\xeb\x69\x27\xec\x97\x49\x77\xcb\x27\xe4\x8e\x88\x9e\x26\xd9\xb7\xf3\x3e\x0c\xd2\xec\xb8\xff\x82\x04\x46\x7e\xd6\xd1\x22\xe2\xbb\x43\xdc\x90\x4e\xe6\xd2\x76\xbe\xff\x4f\xf1\x1b\x04\x1a\xc2\x09\xdb\x03\x3a\x34\x04\x98\xc2\xb6\xf4\x9b\xe5\x3f\x15\x35\x47\x0e\x2e\x77\xfe\xc9\xe9\x33\xa2\x2b\x00\xf1\xe0\x03\x8f\x58\xb8\x82\x32\x26\x46\x75\xf5\x6b\x9b\x19\xb4\x94\xbc\xdc\xa4\x99\xd9\xfb\x6e\xef\x80\x97\xda\xf0\x3c\x55\xbc\xd0\x11\x4a\x7c\x37\x57\xd9\x7c\xb3\x45\x25\xe8\xef\x04\x34\x67\x62\x90\xa1\x74\xf2\xba\xf9\x53\x45\xd3\x7d\x7f\xfc\x96\x7a\x7f\x7a\x96\xa2\x6e\x7f\xca\x29\x80\x1d\x9d\x51\xee\xe2\x14\x25\x2a\x0d\x80\x62\x15\xc6\x9f\x9b\x2d\xcd\xbb\xc5\x07\x4a\x3c\x3f\x51\xbf\xf2\x76\xf7\x46\xf2\xd2\x14\x72\x63\x0b\xfb\xf9\x01\x44\x2c\x1a\x6d\xd2\x2b\xf5\x95\xbd\xa9\x7f\x31\xf7\xfe\xc5\x69\x26\x94\xab\xf2\x81\xbb\x28\x49\xbe\xea\x51\x3a\x7f\xe2\x5e\x11\x68\xc2\x1e\x06\xaa\xd8\x34\xdb\xf1\x09\xd3\x54\x2f\x22\xb2\xba\x7e\x2b\x2d\x49\x34\xba\xd8\xd0\x5d\x2e\x87\x25\xc4\xef\x4f\xe5\xbc\xee\x86\x36\xd4\x66\xee\x57\x83\x59\x63\xef\xd4\x14\xe4\xeb\x4d\xbd\x3e\x5c\x53\x97\xfe\xa6\xfd\xdf\xcd\x33\x7c\xcf\x41\xcc\x26\xd3\xf8\xac\xdc\xeb\xce\x55\x3e\x7c\x30\xd5\x7e\xa8\x84\x38\x36\xe6\x5d\x9c\x11\x2d\x74\xc2\xdc\x81\x0b\xd5\x31\xb9\x6f\xc3\x80\x24\x97\x16\x4f\x81\xbb\xb4\xa4\x7b\x23\x42\x4c\x5c\x81\x26\xd8\x83\xa3\x4a\x2c\xd2\x8b\xcf\x52\x6b\xae\xd2\x21\x3f\x41\x85\xad\x9d\x2d\x44\x2d\xfa\x08\xb1\xde\xbd\x6a\x80\xb9\x89\x30\xcb\xa3\x91\x8e\x0e\x76\x14\x56\x10\x6c\x74\xb7\xb6\x36\x87\x44\x94\x23\x8f\x32\xfd\x3c\x22\x77\x00\x1e\x17\x7c\x3f\x44\xe7\xdf\x0f\x51\x3b\xf7\xf8\x44\xfd\xc8\x2b\x40\x26\x33\x04\x46\xb0\x70\xdb\x3e\x22\x24\x02\xec\x77\xb7\x7d\xb0\xf7\xb5\xa9\x48\x37\x29\xe6\xf6\x4c\xe3\x1c\xf6\xa4\x3b\xb2\xd5\x12\xab\x11\x6b\x3e\x12\xbb\xde\x5d\xf4\x0e\xeb\x73\x7d\x63\xab\x83\xdf\x36\x5b\x7d\x55\x67\x01\xd4\x2a\xef\xd8\xb7\xee\x87\x0d\x44\x6f\x3c\xea\xe0\x5c\x23\xc1\x62\x05\x21\x45\xab\x25\x18\x71\xae\xcf\x23\x40\x22\x81\x51\x09\x75\xce\x8a\x20\x7c\x32\x04\xf0\x7a\x63\xd1\x0b\x60\x91\x23\x41\x22\x6f\x7b\x0b\x13\x6d\x00\x20\x0b\x9d\xf4\xd6\xbe\x44\x71\xbb\xf6\x69\xa8\xac\x54\x5c\x73\xca\x85\xed\x64\x0a\x36\xff\x06\x47\x40\x66\x06\x10\x59\x90\x1f\xf0\x97\x43\xc9\x64\xd5\x2e\x73\x5c\x2a\xd1\x0e\xf5\x1e\xcb\x9d\x3b\xc6\xba\xdc\xf4\x1b\x83\x8f\x4b\xf2\xad\x76\xa9\xbe\x4f\x2e\x4e\xdf\x15\x04\x52\xf8\x8a\x78\x35\xfb\x60\xc9\xb3\x05\x2b\x80\x26\xee\x4b\xe3\x83\xce\x2c\x99\xe6\x60\x80\x88\xc2\x3a\xcb\xc9\x6f\x5e\x0a\x2b\x9a\xf2\x0f\xb9\x0e\x30\xb4\x33\x42\xe6\xb2\x8a\xcb\x34\x07\xd5\x70\x2e\x5d\x47\x36\x9e\xed\xe6\xce\x19\x08\xd9\xcf\xcc\x43\xf2\x2f\xeb\x05\x70\x83\x07\x56\x3a\xc7\xd2\xdd\x74\xb0\xd9\xce\xb8\xdf\x41\x27\x48\x14\x73\x4c\xf5\xbf\xc2\xc7\x38\x95\xdd\xb1\x71\x0b\x64\x6a\xaa\xe1\x7a\x31\x05\x42\x3e\xc1\x91\xc8\xaf\x54\x94\xf1\xff\x30\xf2\x58\xf3\x78\x9e\x16\xab\x9e\xbb\x93\x40\x74\x8a\x93\x10\x93\xc0\x12\x25\x51\x09\xe6\x5e\xfb\xd5\x69\x6c\x2e\x0a\x8a\x7a\xbf\x1b\x43\xef\x10\x08\xe0\x35\x51\x04\x44\xd4\x88\x23\x85\xd9\xb9\x80\x92\xe5\x47\x4c\xff\x0d\x30\xb0\x53\x2c\x98\x5f\xc6\x87\xcd\xfe\xd1\x1f\x3b\x61\x10\x63\x73\xd8\xb1\xfb\x67\x91\x80\x56\xf0\xe9\xf3\x08\x33\xa2\xa7\x40\xbc\xe7\x02\xfd\x16\xbd\x22\x19\x8c\xe3\x9f\xb5\x1e\xea\xe9\x96\x2a\x10\xd4\x0b\xf7\x58\x22\x29\x5e\xe4\x5b\x5c\x12\x72\x5c\xe8\x39\xcd\xb4\xca\x24\xc2\x01\x63\x14\x71\x87\xdb\xd9\xf1\x09\x4f\x6d\x64\xc7\x27\x49\x72\x39\x86\xa8\xaa\x83\xb4\xa1\x0e\xe5\x5c\xe5\x93\x6f\x17\x89\x5d\x7a\x4a\x16\xb2\x0f\xe4\xc8\xf3\xce\xb2\xe8\xb1\xee\x48\xb6\x0d\xb8\x5c\xc4\x64\x68\x26\xcb\x30\x69\xd8\x14\xd4\xb3\xc8\xce\xf7\x81\x7a\x69\xf3\x9a\x8a\xb1\xf3\x2a\xd8\xc0\x76\xbf\x54\x33\x9d\xa7\x5b\x88\xfb\x07\x19\xf3\x8d\x36\x1c\x11\xb5\xe1\xba\x82\x34\x78\x89\x5e\xdb\xfb\x93\x92\xf3\xef\xd9\xe4\x66\x0c\xa9\x70\x2f\xe0\x20\x1c\x76\x5d\x98\x05\x89\xfa\x62\x47\x03\x6b\xa4\xc9\xde\x5b\x37\x00\x86\x19\xbe\x94\x68\xf0\x03\x24\x19\x62\x5f\x52\x8c\x57\x84\xb8\x74\xdc\xbb\x41\x8a\x08\xb7\x5f\xc4\x66\xcb\x55\x08\xa4\x83\x98\xb9\x9c\x92\x03\x59\x49\x75\x37\xf2\xb4\xe3\x29\xc2\x47\x38\xff\x84\x1b\x9b\x10\x93\x23\xa2\x62\x2b\xb1\xf8\x4e\xc1\xa7\xe4\xd8\x8b\x4e\x51\x8e\xe1\x8c\x60\xc5\x41\x31\xbb\x5d\x8a\xf2\xe8\xaa\x62\x30\x1c\x9c\xc2\xdb\x6a\xd3\x63\x9f\x37\xd1\x1b\xb6\x20\x1f\xf6\xac\x65\xed\x1a\x54\xe6\x61\x8a\x69\x61\x0b\xe7\x08\xd2\x62\x83\x22\xbd\x27\x39\xb9\x08\xc5\x15\xc2\x84\xf4\x88\x0a\xcd\x7f\x40\x4b\x70\xf8\x56\x71\xf1\xcf\xaa\x48\x8f\xf0\xd9\x7f\xe3\x86\xf2\x9a\xc9\xde\xf9\x12\x8b\x90\x04\xdb\xba\x0f\x3f\xf9\xcf\x0a\xaa\xbb\x2f\xe9\x23\xf5\x26\x2d\xdf\x1e\x72\x84\xef\xf3\xb5\x82\x11\x86\x26\xef\x2f\x9a\x82\x60\x5c\x53\x8f\x7f\xec\xe2\xa6\x5a\xdd\x2d\x6e\x93\x8d\xc3\x1d\xd3\x93\x8b\x66\x89\x30\x59\x0f\x3b\x6e\x5b\x8f\xcc\x34\xaa\x76\xcf\x2f\x11\x90\xad\xe1\x25\xd0\x11\x0e\x72\x42\x55\xfc\xd6\x71\x6d\x8b\x48\x90\x39\xdf\x00\x43\x88\xb5\x80\xee\xf9\xf8\x7f\xf4\x52\xfe\x49\xe1\xee\x35\xaa\xa8\x9b\xe8\x37\xba\xe6\x6f\x09\x04\x66\x54\x8a\xe4\x55\x64\x06\xe0\x2e\xf5\x64\x4f\x5c\x41\x73\x06\xcb\x82\x1e\xf7\x4b\x92\xcf\xdf\xe9\xe2\x59\xbf\x53\xdc\xbb\xc8\xc7\x3b\x93\xdd\x9e\x87\x16\x68\xa3\x02\x88\x06\xd9\x42\xf2\xe9\x2e\x7d\xfd\x4b\x33\x8b\x20\xa4\x8c\xa8\x5a\x7a\xa7\x2d\x70\x99\x23\xdb\xf5\x34\x52\x90\x8c\xea\x0a\x95\x45\x88\xac\x8b\x22\xea\x50\x2d\xc5\x90\xf5\xb8\xbf\xf6\xe7\x7b\xd9\x53\xd9\x02\x1c\x76\x07\x24\xd6\x30\x28\x4a\x84\xe1\x40\x0e\xc9\x08\x39\x38\x54\x05\xfd\xc9\xaa\x2c\x5d\xff\xc0\x76\x0b\xc9\x17\x25\xe0\xb5\x23\x0f\x2a\xe9\x6d\xbb\xe0\x9c\xe9\xbd\x04\xa1\xed\x88\x7c\xfb\xb0\x68\xfc\xdf\xad\x67\x61\x95\xe8\x4e\x6b\x13\xb8\xdf\x31\x36\x6d\x3f\x6c\xf3\x95\x29\x13\x06\x38\xc2\xf6\xc1\x8e\x1c\x48\xaf\xb4\xb3\x8b\x0b\x7b\x5e\x2a\xb2\xbc\xf2\x70\xd6\xc3\xfe\xa4\xc8\xc5\xca\x2b\x47\x9b\xf6\x9c\xce\xa0\x5d\xf8\xb7\x41\xb9\x40\xc7\x68\xbb\xc1\x46\xb2\x2a\x46\x5a\xb2\x01\x1e\x65\x5d\x85\xc3\xb1\x65\x6c\x8a\x0b\xb4\x15\xbf\xe9\xe0\xce\x21\xef\x54\x95\x58\x87\x32\xe9\x6a\x04\x39\xad\x21\x07\xaf\x85\x55\xd5\xf7\x48\x8e\x73\x2a\x10\x1b\x83\xa7\x0f\xf3\x58\x5a\x6e\xc7\x5a\x66\x72\x8e\x31\x5f\x5a\x55\x7a\x15\x72\xb3\xd7\x1e\x2a\x4a\x71\x2e\xfa\x11\x58\xba\x3e\x9f\xdf\x8d\x9b\x71\x6a\x47\xc7\xba\x65\x98\xc3\x9f\x5b\xac\x4d\x0a\x76\x54\x6e\x5d\xee\x1b\xce\x25\x9c\x2a\x42\xea\xd0\xec\x41\xc2\x58\x61\xf5\x7c\x53\xf6\xb7\xa8\x0e\x38\xb7\xda\xe4\x5f\x95\x02\x6c\x81\xab\x35\x0b\xaf\x90\xe6\x83\x79\x21\x7f\xeb\x12\x0a\x07\x2f\xb2\xca\xfe\xab\xc8\xa2\x77\x5b\xb2\x60\x4c\x92\x0a\xb8\xc8\xfa\x76\xab\xaa\xba\xe0\x4b\x2e\xe9\x3d\x09\x66\xba\xae\x12\x18\x54\x0b\x2b\x06\xab\x33\xd8\x23\x76\xc8\x5d\x4c\x24\x5d\x99\xcc\xb9\x24\xa2\xb1\xf3\x91\x9a\x4c\x7e\x5e\xaa\x44\xee\xff\x04\xbf\x9c\xd7\x2f\x0d\x46\x85\x1c\x32\x67\x1e\x98\x68\x7d\xdf\xd4\x7b\xfc\xa5\xf9\xee\xe2\xde\xc0\xb4\x12\x3f\xd9\xca\x3d\xaf\x19\x8a\x0f\xb2\x59\xc5\x0d\x8d\xfe\x79\x9f\x2d\x9c\xdd\xad\x26\xe0\x37\x18\xd5\xa3\xf3\x40\xbd\x9a\x2b\xe3\xf4\x54\x98\x69\x11\x50\x95\x1b\x47\x24\x13\x94\x9d\xf2\x10\x01\x5d\x9f\x1f\x9c\xd5\x92\x09\xbc\x66\xf3\x47\x19\x1b\xda\x2b\x20\x97\xd9\x99\x53\x22\x3b\x5f\x4f\x96\xea\xb0\x6b\x57\x12\x4d\x3f\x77\x72\xf9\x18\xef\x56\xc1\xf5\x92\x26\x3a\xd6\x15\x44\x14\xa7\x9c\x88\xf9\x03\xa3\xd9\x23\x9c\x66\xe1\x6d\x44\x2e\xe9\xd0\xd1\x6e\x67\x5c\xd4\xca\x8c\xf2\x72\x1e\x4b\xcb\xca\x19\x1d\x4d\xe1\x88\x76\x74\x69\x02\x0d\x50\x96\xc8\x3e\xf2\x18\x05\x3e\x26\x32\xb1\xac\x92\x53\x8e\xf5\xd3\x7a\x22\x0e\xbe\xbe\x1a\x80\x35\xdb\x75\x64\x1e\x76\x51\xd6\xda\x07\xac\xfc\x34\x35\x02\x91\xd1\xe7\x58\xb6\x0d\x58\x8b\x88\xbf\x3b\xa3\x6b\xd3\xbf\xa2\x05\xf7\x4d\x3c\x33\x4e\x33\xf7\x2d\xf0\x46\x41\x88\x07\xd5\x02\x83\xa0\x7f\xa7\x9a\x28\x5f\xc1\x33\x8a\xc2\x5c\x71\xe5\x35\x81\x62\x8c\xb1\x4d\xb6\x6a\x38\xcd\x90\xee\x9d\x59\xd9\x83\x13\x48\x0a\x0b\x33\xa2\x52\x07\x3d\xc1\xfb\xfa\x4d\x44\xa9\x9c\x8e\xc9\x4f\xad\x32\x43\xe9\x17\x41\x40\x76\x27\x0e\xcd\xa0\xae\xb7\x67\x95\x07\x55\x4e\x9f\x20\xa6\x9c\xff\x86\x49\x37\xb3\xdb\x45\xf5\x55\x65\xd4\x09\x36\x0d\x6a\xe5\xef\xaf\x0a\xa5\xfb\x54\xf9\x64\xad\x88\xde\x32\xcf\xc9\x09\x2b\x14\x3f\x58\x9e\xa2\x4a\xe3\x6f\x08\x1b\x14\xa0\x1b\x41\x47\xad\xdb\xd3\xdf\x66\x5f\xd3\x87\x08\x19\xb1\xfe\x58\xa0\x39\xf1\xc5\xc2\x1b\xa9\x0b\x8f\xe6\x2e\x47\x42\x35\x4b\x55\x56\x52\x8a\x27\xe9\x72\x4c\x72\xbd\xca\xca\x62\x65\x58\x55\xc5\x9a\x6a\x51\xba\x19\x6e\xaf\x83\x38\x69\x97\x26\x7e\xae\xd7\x21\x9b\x74\x40\xa2\x28\x23\x12\x55\x94\x7d\xdd\x2c\xec\xbe\x10\x72\x33\x80\x27\x7c\xfa\x68\xda\xf8\x50\x52\x44\xa2\x1a\x23\x38\x8b\x0e\x4b\xb1\xa8\x92\xae\x1f\x97\xfa\x25\x44\xaa\xaf\xff\x69\x34\x35\xad\x31\xb4\x4a\xa4\xb3\xb2\x9a\x22\xc6\x25\xac\x0a\x46\x59\x5e\x68\xb8\x3c\x92\x90\xdb\x2b\x32\x96\x59\x43\xd9\x8b\xd6\x8c\x5f\xfc\xcd\x7f\xb5\xdd\xd8\xee\x10\x4a\x3e\x3e\xaa\x91\xc8\x60\xba\x00\xbb\x84\x23\x3b\x15\x68\x58\xe6\xff\xac\x5c\xa3\x1b\x8c\x13\xd4\xaf\xb4\x87\x9d\x93\x95\x43\x1a\x18\x5d\x52\x31\xdb\x82\x2d\x0a\x4e\x51\x2f\x36\x70\x03\x13\xec\x41\xe3\x19\x02\xfc\xb0\x12\xb3\xab\x54\xa0\x0e\xe7\x50\xac\x4a\x0a\x60\x0a\x16\x2e\xbf\x0c\x89\x49\xb6\xaf\x18\x4e\xc5\xe7\x30\x97\x10\xb8\x14\xc8\x84\xff\x6d\x02\x87\xd8\x41\xd0\x6c\xac\x4d\xb1\x0e\x25\x08\xa3\x26\xaa\x4b\x5c\x2a\xac\x90\x1a\x66\x0d\x14\x2a\xd1\x7a\x07\x41\x40\xf1\x87\xa5\xb5\xa9\x46\x96\x22\xb7\x78\x19\x8e\x7d\xd8\x0f\x1d\xbc\x1b\x59\x12\x61\xce\x7d\xd3\x84\x52\x46\x34\xa4\x6b\x1d\x19\xb8\xad\xd4\x24\x6d\xda\xb5\x49\xab\xd0\xb8\x42\xf0\xf9\xfd\x93\xc6\xb2\x3e\x83\x65\x73\x69\x72\x87\xe9\x16\xef\xcd\x1d\x73\x86\x91\x0a\xcb\xc6\x99\xab\x74\x59\xaf\x07\x3d\x12\x20\x45\xd6\x61\x82\x25\xfc\xe4\x9d\x51\xb3\x55\x99\x4e\x56\x6f\x05\x79\xa6\xd6\x49\x43\x80\x2a\x4b\xa9\x9a\x23\x53\xae\xfb\xc8\xeb\xf2\xfe\xba\xa8\xcc\xf9\xa5\x04\x99\x88\x35\x6f\x01\xee\x6d\x6e\x9a\xa0\xdc\xdf\xa4\x12\xca\xfa\x52\x5a\x5c\xd6\xc8\xfe\x5e\x6b\x40\x51\x63\x84\xb4\xaf\xda\xb3\xad\x1c\x42\xf4\x07\x2b\x21\x6c\x12\xac\x8e\x40\x45\xf0\xda\x04\x0a\x04\xff\x3a\xc8\x53\xb4\xf0\xdc\xd1\x15\xd1\xb1\x2c\x76\xc0\x42\x12\xfd\x16\xd6\x01\x0b\xfd\x9d\xd6\x08\x7e\x71\xb6\x6a\xd8\x0d\x80\xa4\xda\xfc\x71\x8e\xfe\xb7\xf3\xae\xc8\x64\xdd\xd2\x20\x4f\x81\xdc\x08\x38\xe2\xdd\x6c\x63\x51\x54\xdd\x29\x2f\xfb\xed\xb2\xde\xc8\xd1\xd2\xc5\xbd\xf1\x7c\x2c\x96\x75\x36\xd9\x22\x09\x51\x92\x83\x24\x82\x3b\x53\x04\xa4\x8b\x6e\xb2\x90\x6b\xb8\x24\x45\xb7\x22\x52\xe9\x94\xf9\xbd\xae\x94\xf9\x0e\x73\xa2\x79\xf3\x56\xac\x8b\x50\x8e\x99\x84\x72\x94\x58\x9a\xdf\x42\xa3\x12\x50\x14\x3c\xaf\x55\xcd\x58\xab\x47\xce\x3f\x21\x27\x15\x36\xc1\xd4\x11\x7b\x60\x4e\x0a\x7f\x56\x86\x88\x3f\x6d\x3a\x8c\xf2\xb3\xb9\x45\x8b\xa0\xa7\xf7\x9d\xf2\x2f\x2f\x5e\x41\xc2\x4e\xbf\x0f\x4e\x0f\x8e\xcc\x12\x3e\x65\xd3\x3d\xaa\xb1\xb6\xd5\x28\x5f\x83\xfa\x0e\xe3\x65\xd6\x3b\x35\x0c\x33\x81\x63\xb1\x02\x52\xa6\x10\xae\x60\xbd\x68\x53\x08\x73\x7c\xc1\x1a\x07\xc9\x92\xc4\xad\xae\x2b\x45\x5e\x9e\xed\x94\x44\xad\xe0\x48\xc7\xb4\x64\x7f\x12\x51\x07\x0d\x21\x26\x0d\x21\x0b\x5a\x7f\x55\x35\xb6\x64\x89\xed\x4a\x18\xce\xe9\xca\x4f\x42\x2a\xd9\x28\x0d\x9a\x7d\x09\x5b\x75\xf9\x07\x75\x1a\xe7\x25\xb8\xa7\xfa\x3f\x3f\x3f\x4f\x59\xe9\x1f\xfc\x59\xcf\x7b\xc1\x66\xf5\xf4\x19\xdb\x83\xa5\x8c\x4a\x9a\xa0\x50\x3c\x59\x33\xd9\x03\x5f\xda\xa7\x33\x18\x7d\xa1\x11\xaf\x87\x52\x73\xc1\xea\xa7\x76\x6e\x55\x8a\x92\x87\x5c\x71\xd4\x3b\xbc\x9e\x9e\x40\xfb\xca\xc1\xae\xe9\x99\x29\xa1\x78\xb6\x91\xe5\xb3\xed\xa8\x08\x7e\xf7\x5d\xdb\xdd\xa0\xf5\xb9\x5e\x3a\x93\x1c\xe1\x3c\xc3\xbd\x11\x81\x19\xee\x30\x92\x3b\x32\x99\x96\x83\x50\xb5\xa1\x68\x8e\x3f\x14\xbd\x39\x43\x69\xda\xd3\x5c\x22\x42\xba\x5f\x54\x03\xbd\xfd\xff\x38\x3f\xe8\xcf\xb0\xcd\x2c\x83\x6c\x0b\x19\x54\x11\xf5\x54\x53\xf7\x62\xd0\xc2\x52\x34\x6c\xe6\x8e\x9d\x0f\x94\xa3\xd6\x3d\x3e\xbf\x9d\x65\x88\x28\x80\x2d\x6a\x0f\x5b\x76\x95\x28\x56\xaf\x60\xc8\x5e\xc6\x58\xfd\x92\xdf\x8c\xac\x0a\xb6\x3b\x7d\xea\x52\x54\xff\x3d\xca\x08\x3a\x60\x5b\xcb\x2d\xb2\x0f\xcd\x6f\x44\x0f\x6a\x85\x56\x09\x7a\x10\xf2\x35\xf6\x49\x14\xa2\x63\x8d\x37\x12\x7a\x1f\xa1\x87\x30\xc0\x66\xb9\x74\x43\x8a\x46\xaa\x8f\x4c\x1e\xd2\x97\x51\x44\xee\xa5\x14\xb0\x95\x5c\x4e\x8f\x34\xc8\xde\x08\x16\x10\x99\x5a\x1f\xcf\xa5\x28\x5c\x76\xb4\xc0\xae\xd7\x24\x74\x6b\xd2\x24\x90\xdf\x84\xfa\xc3\x17\xb2\xb1\x13\x55\xbe\x6b\x52\xba\xce\x4e\x1d\xc1\x5b\x25\x6b\x26\x99\xdd\x13\xbb\x52\x13\x24\x05\x90\x52\xaf\x95\x38\xd0\x85\x12\xa3\x0b\xc4\xb4\x4b\xdf\x80\xc4\x84\x8f\xb6\x72\x2d\x10\xb2\x10\xfb\xf0\x4d\xee\x52\x0b\x37\xb3\x42\x3a\xea\xb4\x2b\xe0\x30\xf1\x2d\x1c\x0f\x7b\x38\xdf\xc8\x3f\xbf\x80\x46\x24\xec\x8b\x39\x92\x8e\x81\x25\x0d\x18\x21\x05\x72\x2a\x52\xe7\x9f\xf8\xde\x59\xe2\xae\x6f\x8d\x9a\xe1\x53\xe7\xfa\x61\x9d\xf8\xa9\xdc\x36\xde\x9f\x85\x0c\x38\x2b\xea\x38\x14\x7b\x53\xbb\x6f\x0f\x21\xbd\x2d\x4b\x2b\xf3\xd6\x86\xa1\x2d\x89\x35\xda\xdb\x8f\xfd\x57\xd6\x88\x11\x69\x21\x84\xd4\x30\xd1\x51\x0f\x69\x6b\x75\x2c\x2f\x6b\x91\x79\xd9\x05\x3e\xe6\x5b\xec\x4b\x7e\xfe\xf7\xa8\x34\xe3\x0b\x8a\x19\x86\x3e\xdb\x2d\x42\x3c\xe3\x6a\x96\xc3\x57\x19\xf8\x00\x4e\xb3\x17\x7c\xb8\x70\x65\x7b\x46\x2d\xaf\xaa\xbf\xe4\xc1\x89\x14\x0d\xb8\xa9\x2c\x2b\x41\x0f\xe1\x75\xe0\xe9\x9d\x9d\xc3\x55\x1a\x66\x26\x94\xe2\x84\x59\x16\x37\x2b\xf8\x7a\xaf\x7c\x8c\x11\xa3\x9d\xf9\x1c\x3b\x42\x1e\x21\xf3\x6f\xbb\xed\x45\x79\xc7\x2d\xb1\x89\xb6\xc4\x3b\xdd\x17\x25\x4b\x02\x6d\x1f\x71\x67\xc4\xc2\x27\x85\xfb\x11\xb3\x46\x64\xe0\xb0\x42\x5b\xe9\xb0\xc4\x52\x10\x46\xb5\xaa\x28\x37\x47\x24\xe8\x16\xee\x8b\xfc\x14\x93\x0a\xdd\xf8\xff\x6e\x5a\xd7\xde\xe1\x1d\xee\x32\x32\xff\x63\xed\x82\x93\xa8\x58\x9c\xb2\x96\x04\x11\x38\x5b\x95\xff\x02\x38\xda\x6d\xf0\x8a\x78\x6a\x9a\xd4\x92\x63\x01\x39\x1e\xd3\xcc\x6a\x23\xb9\x3a\x98\xe6\x9b\x3f\x92\x21\xd5\xe3\xd8\xd9\xc3\x6d\xfe\x98\x14\xe7\x79\xe4\x7d\xd1\xe1\x67\x4d\xa6\xc4\x39\xb6\xe3\x88\x55\x05\x87\x26\xfe\x9b\xdf\xc1\xff\xbb\x13\x3d\x08\x0f\x9f\x96\x84\x99\xed\x10\x3a\xe2\xea\x03\x47\xeb\xca\xbc\x5e\xb4\x43\x01\xa3\x53\xce\x24\x47\x5d\x28\x7b\x4c\x5a\xad\x9c\xfc\x8d\x1a\x8f\x7f\xb5\xc8\x76\xfb\xd8\x4f\x62\x91\x41\x2f\x0f\x9e\x1a\xf5\xfe\xfa\x0f\xab\xbf\x30\x41\x24\x64\x91\xaa\xad\x5b\x0d\xb8\x86\x17\x97\xb3\xfd\xc0\x8b\x03\x4b\x72\xfe\x36\x6a\x03\xb6\x92\x65\xe1\x89\x0c\x13\xb9\xdf\xaa\x78\xb1\x05\x47\x64\x5b\xe4\xad\xd9\x56\x10\x45\xdb\x51\x59\x49\x7e\x02\x71\xb3\xae\x84\xf8\x13\x1d\xb1\x34\xd7\x6c\x30\x1c\x30\x05\x31\xc0\x99\xe1\x66\xf7\x15\x65\xd3\xf9\xd2\xdc\xdc\xde\x02\x19\x2e\x82\x9d\xf1\xf8\x7f\x2d\xfa\x98\xd6\x8c\x99\xe1\x4a\x98\xce\x16\xd5\xa2\x31\x19\x41\xe2\xe6\x30\x04\x21\xeb\x0e\x23\x65\x56\xa1\xc1\x71\x03\x3f\xab\x36\xa2\x56\x11\xbc\x76\x0b\x34\xd3\x6c\x36\x1d\x41\xa6\x87\xc4\x20\x4c\x1d\x97\x18\x89\xaa\x44\x1d\x03\x29\x84\x0f\xa2\x6c\xe0\x53\x27\x89\x55\x48\xad\x94\xc8\xb3\xdc\x9a\x72\xd6\x2e\x29\xcb\xf3\xa5\xf4\xb8\x5f\xf4\x16\xd2\x96\x64\xe1\xb3\x84\x1d\xc6\x82\xc3\x4a\x0b\x79\x60\xb9\xa3\x05\x8f\x2a\x7c\x80\x89\xf5\x43\x00\xdb\x11\x0c\xaf\x18\x27\xcd\x76\x51\x32\x88\x29\x88\x94\x33\x62\xf7\x7d\xeb\x9d\x97\x24\x82\x83\x15\xa8\x51\x9d\x6e\x9f\x58\x00\xbd\xaa\x2c\x46\xdf\x71\xdd\x60\xa9\x50\x2b\x27\x91\x7f\x7c\x91\x36\xc7\x80\xf1\xb5\x91\x2f\x51\x4e\x1f\xec\x17\xb6\xe9\xb0\x92\x0c\xbf\x5f\x8c\x7d\x88\xbf\x85\xb6\x06\x0e\x5c\xbf\x9a\xb0\x54\x19\xff\x36\x21\x88\xbd\x82\x4b\x99\x48\x21\xdb\xa2\x5a\xb3\x9e\xf4\x15\xf5\xe6\x67\xba\xe9\x91\x35\x25\xb7\xe9\xe3\x2a\x87\xb1\xb0\xcf\x69\x02\x9f\x6b\x7e\xc0\x56\xd4\xa4\x57\x18\x74\xd9\x09\xab\xa9\x25\xe2\x0a\x78\xd6\x83\x24\x16\x45\xa9\x46\xea\x23\xe7\x36\xa3\x9d\xd9\xba\xa4\x47\x9c\xda\xf2\x9f\x97\x9b\xd3\x88\xe2\x4d\x26\x29\xa5\x59\x1f\x8f\x3e\x84\x7e\x38\xbd\xdc\x93\x50\xed\xaf\x56\x75\x9b\x02\x71\x93\x17\x71\x6a\x59\xc2\xb0\xcd\x7a\x90\x60\x7e\x8a\x3f\x16\x11\xfb\xe7\xa5\x47\x09\xbd\xe2\x1e\x7f\xd6\x16\x6d\x03\xa3\x4c\xff\x29\x1a\x83\xc0\x02\xcc\x91\x34\x5e\xed\x39\x62\x2a\xf3\xae\x72\x20\xdb\x35\x44\x86\x99\x15\x5b\x4d\xe5\x31\xdc\x17\x75\x5f\xf6\x6d\x99\x9f\x58\x29\x6d\x1a\x0e\xda\xb2\x0d\xf5\xad\x5a\x03\x3b\xe9\xe4\x44\x9c\xa1\x56\x95\xa9\xb5\xab\x9a\xa6\x72\x2c\x5a\xd4\x4d\x5a\x4d\xda\x55\x45\x48\xd9\x31\x91\x2f\x16\xa3\xd6\x8c\xc9\x3c\x65\x77\xe1\x25\x72\x4f\x36\x25\xed\x0e\x3c\xb3\xfb\xcf\xde\x0c\xc1\x14\xd4\x7f\xd4\x9e\x73\xd2\x12\xae\x28\x87\x64\xcf\x3d\x0e\x41\x45\x9f\x72\xbe\x59\xd7\xbd\x6a\x1c\xba\xa2\x2c\x66\x39\x4a\x26\x57\x0f\xe5\xd5\x2b\x48\xe5\xf0\x47\x69\x5c\x32\xe9\xc9\x1d\xc5\xe2\x6e\x0d\x4e\x92\x68\x63\xf5\x7e\x6b\xca\x89\x72\x2e\xa8\x60\x31\x95\x3c\x52\x4c\xbe\x50\x23\xfe\xef\x6e\x51\xd6\x7c\x84\x11\x9e\x57\x72\x56\xf8\x8a\x68\xb6\x24\x15\x81\x22\xf0\x77\x53\x9a\x2f\x62\x6e\x3a\x61\x03\x2e\xea\x22\xfd\xa3\x4b\x56\x77\xb8\xa8\x01\x09\x10\x5e\x62\xed\x92\x11\xa5\x35\x6a\x33\x5a\xe2\x61\x42\x2c\x7f\xb2\x34\xd3\x75\xf3\x1b\x56\x21\xdf\x70\xff\x7d\x7f\xc2\xf7\xe4\xa3\x44\xd5\xda\x92\x57\x3d\xe5\x04\x77\xa2\xb8\xac\xef\xa9\xaa\x56\x7e\x03\x30\xe7\xf8\xd1\x2b\x2e\xf1\x11\xde\xb6\x83\xab\x0e\x2b\x7a\x02\xef\xe6\x40\x07\x76\xb8\xd7\x28\xbe\x9e\xc2\x84\xf3\xfa\xe9\x7d\x90\x6e\xd8\x1c\x56\x08\x45\xbd\x0a\xa6\xfd\x61\x54\xb6\x30\x9a\x0f\x90\x1c\x7e\xcb\x2d\xb6\xc4\x1d\xe9\xf6\x3e\x62\x97\x11\xe7\x3b\x3b\x68\x7f\xdd\xd5\x37\x15\xb8\xcd\x44\xca\x8a\xe7\x9d\x3f\x94\x95\xf5\x90\x04\x9c\xdf\x55\x85\xd9\x51\xba\x84\xd3\x77\x13\x72\x77\x30\x83\xcb\x16\x49\xbe\xce\xf9\x8a\x0a\xd0\xae\x2f\x4a\x29\xad\xd6\xba\x02\x5b\xc1\xab\x36\x71\x6a\x89\x98\x06\x0d\x2b\x61\xfe\x15\xea\x6b\x7c\x34\x04\x91\xdc\x2e\xb6\xbe\xec\x53\xcc\x34\xa3\xdd\x90\x74\x9c\xab\xa0\x92\x59\xc9\x28\xf0\x6f\x3f\x5a\xa6\xd6\xfe\xfb\x61\x7b\x60\x71\xb8\x51\xa3\x86\xf4\x6e\xc5\xcb\x3a\x28\x98\x68\x9a\xe6\x55\xd2\x59\x8e\x90\xc3\x15\x08\xa3\x93\xff\xb6\xd2\xc6\xb8\x25\x54\xa4\xe7\xa4\x88\x09\xcc\x50\xc5\x63\x8f\x29\x6c\xef\x24\xb0\x07\xeb\xb8\xd2\xa6\xa5\xef\xf0\x59\x83\xbd\x1f\x56\xe9\xb3\x54\x05\x81\xbb\xba\x55\x8e\x9f\xfe\xe5\xd6\x36\xf0\x39\x27\x58\x36\xf8\xe8\xb2\x4a\xb2\x5e\x65\x8f\xc6\x01\xb7\x15\xa8\xa4\xe2\x1f\x68\x95\xf8\xa8\x48\xc9\xd2\x28\x8d\xc7\xcf\xc8\xe2\x40\xac\xa7\x78\x66\x7b\x14\x7a\xab\xa8\x30\x21\x7d\xd9\x3f\x87\x9c\xd8\xa2\x43\xdf\x8f\xef\xa7\xfb\xa8\x80\xdc\xad\x82\x6c\xc5\x9e\x6c\x27\x76\x9d\x0a\x3b\x93\x5f\xf9\xed\x59\xf1\xe5\x3b\x84\x3a\xab\x2f\x7a\xaf\x61\xf6\x8c\xa3\xc6\x57\x24\x41\x87\x17\xa8\x34\x7f\xd4\xdf\x15\xa1\x86\xb4\x61\x8c\x58\xd8\x0a\x63\x1e\x0e\x46\x3f\x8c\xc8\xb3\x27\x64\x0b\x67\x9d\x65\x9e\x39\xa0\xc2\x2b\xe0\xe6\xcd\xdb\x94\xaa\x3c\xad\x46\xe7\xd5\xcb\x9e\xed\x08\xdb\x1a\xfb\xab\x6f\x96\x6f\xcf\xa7\x6f\xe9\x6b\xf9\x13\xdf\x1c\x6a\x79\x2a\x3c\x1e\xac\xcf\x0a\x6d\x45\x56\x48\x91\x06\xe5\x22\xe4\xbf\xaa\xa6\x0f\x4c\xb4\xeb\x6a\x7a\xf6\xf0\xad\xc1\x38\x2f\x56\xcb\x8b\xab\xfc\xe3\xa9\xf5\x55\xe5\x16\xce\xe6\x1c\x13\x4f\x39\x77\x63\xb0\xc3\xc5\xe5\x2e\x83\xbe\x3c\xc3\xfc\x26\xe2\x2f\x8f\x6f\xd8\xdc\x1c\x11\x8b\xe6\x81\x5d\xc1\x71\x1b\x15\xc6\x1e\x70\xa2\xcf\x4f\xbd\x4c\xbb\x39\x3c\x3d\x39\xa7\x5b\x30\x4b\xd7\x3c\xb5\x0b\x15\x60\xd9\x83\xad\x76\xca\x73\x6e\xf1\x18\x67\x8e\x4d\xe9\x5a\x8a\x11\x80\x1c\x4d\x8f\xa8\xd8\x76\xe5\x22\x4b\x85\xea\x52\xc0\x5e\x2b\x8e\xfe\xe6\x3b\x36\xc1\x19\xc3\x5e\x2f\xd1\xed\x86\x44\xc3\x42\x21\xe0\xc4\xa2\x6d\x3b\xc3\x47\x2e\x7c\x6e\x7c\x42\x2a\x22\x5b\x2e\x2a\xee\x24\x70\xf9\xbb\x1d\x44\xc4\x14\x37\x73\x62\x6a\x3b\x18\x3b\xca\x72\xa6\x70\x3c\xfd\xec\x82\x0a\x5b\x38\xe6\xbd\xb0\x5c\x3b\xfc\x39\x4e\x4a\x2d\xe6\xe4\x08\xb4\x35\xcd\x38\x33\x84\x3c\x82\x09\x25\xc0\x17\x71\x2e\x3a\x89\xc9\x55\x69\x41\xf0\xd7\x60\x62\x8a\xc1\xcf\xe4\x87\xd4\x45\x51\xd9\xa4\x63\x6f\x24\x8e\xe5\xba\xfb\x1a\xc3\x4e\x98\x30\x38\xe0\x77\x88\x84\x86\xf1\xa7\x98\x47\xa3\x4c\x7d\xc6\x66\x33\x6e\x25\xc6\x2f\x9c\x62\xac\x52\x7f\xf8\x40\x94\xc4\xb7\xb2\xd0\xbd\xd4\xd7\x22\xb9\x75\xb4\xb0\xcb\xe4\xc8\x00\x6b\x9c\xc2\xa0\xb5\x29\x87\xc6\x1c\xc6\x74\x63\xa4\xf1\xc3\x67\x27\x87\xe3\x4e\x4e\x0a\x1c\x2a\x34\x26\x27\x6f\xd9\xc1\xc1\x16\xbd\xf1\x0b\x8b\xf3\xcf\x3b\x3c\xe0\xe7\xd5\x33\x2f\x4d\x14\x20\x26\xbf\xe8\x6f\x31\x18\x45\x77\xe4\x15\x93\x75\xc0\xcd\xb3\x7d\x20\xf3\x72\xb0\x83\x6c\x8a\xf6\x4b\x35\x19\x8b\x32\x99\xeb\x94\xff\x8a\x8a\x31\xbf\x2f\x39\x3f\x4d\x8b\x2a\xb0\xbe\x5e\xcd\xb5\x6e\x67\x20\xd4\x10\x16\xcd\x43\x5e\x6f\x25\x4f\xd8\xdd\x2e\xf9\x00\xa4\x6e\x03\x67\xae\x4f\xe9\x6c\xe1\xe0\xed\x3f\x2e\x2d\xe1\x9f\x5e\xc6\x66\x1c\xde\x7f\xba\x5b\xbb\x27\xb8\x6b\x12\x16\xd6\x43\x52\xc1\x43\x49\xf0\x56\xb1\xd1\x10\xc0\xde\xd4\xce\x1f\xb9\xcd\xba\x22\xf0\xc3\x05\x76\xf5\x20\xb0\xd1\x3b\xb7\x4f\x97\x5b\x84\x73\xae\x8a\x4d\x61\xbc\x29\x35\x60\x45\x26\x69\x40\xc4\x4c\x59\x99\x07\x52\xc5\xd5\x8c\xf0\xfc\xf1\xaf\xc8\xf9\xe6\xc3\x87\xfb\x4b\x96\x67\xa7\xe3\xc6\x67\xb0\x0a\x20\x7d\xca\x1a\xe0\xf8\xe6\x6f\x40\x0d\xa0\xbb\xf9\x83\x5d\xe6\x5b\x14\x84\x5d\x00\x85\x7a\x6b\xba\x66\x60\x61\x65\x07\xdf\x06\xaf\xf8\xf4\x39\x78\x8a\x71\x2e\xab\x8a\xef\x1b\x06\xd8\xae\x74\x4b\xfd\xff\x6e\x95\x84\x8e\xdc\xba\xc3\x9e\x06\xb2\x6d\xc6\x82\x11\x70\xd4\x9e\xe9\x5e\x9b\x69\x8e\x7d\x30\xd4\x10\x13\x14\xe2\x82\x76\xcc\x0e\xb7\xa8\x71\x91\xa9\xb1\x09\x77\xe9\x52\x04\xf7\xf7\xa1\x4a\xc4\xd6\x54\x64\x03\xe4\xde\x05\xb0\x9a\x75\xbd\x01\x9d\x3c\xac\x99\xe8\x03\xf3\x7b\x38\xfd\x0d\x3f\xf2\x8b\x00\xf7\x77\x28\x0a\xe6\xca\x70\xff\x1a\x5e\x2c\x56\x93\x68\xe4\xfc\xc6\x6c\xfd\xe1\x70\xde\x45\x47\xbe\xf1\x57\x0f\x3a\x29\x8e\xf2\x5b\x44\x74\x84\x39\x50\xb1\x66\x57\x39\xc3\x6f\x17\xd2\xf9\x22\xd8\x19\x56\x52\xca\x57\x28\xa1\xe1\x08\xf7\x3e\x2b\x8e\xfe\x29\x34\x83\x1f\xe2\xf1\x51\x7a\x11\x5f\x56\x1f\xc2\xf2\x49\x4b\x62\x60\x2f\xbc\xd9\x3e\x07\x86\x27\x0f\x6b\xd2\xb4\x4d\x0e\x2e\x78\x4a\x6b\xd1\x5e\x63\x1f\xe0\x6a\xc0\xab\x78\xef\xd6\xce\x1c\xb4\xf9\xba\x3e\x1c\xa6\x21\xdf\x8e\x13\x3d\x19\x24\xd0\xa2\x28\x2c\xde\xaf\xa2\x9a\x03\xfe\x36\x42\x0b\x3d\x2d\x4c\xd6\x7b\x77\x98\x31\x80\x41\x27\xa6\x9c\x15\xa4\xa1\x3c\x38\x7e\x7c\x53\xf0\xe1\x0e\xd5\x86\x4a\xa3\x71\xa5\x81\xc2\xfe\xc2\x02\x70\x4d\x2d\x1d\xd0\x75\x06\x84\xf1\xe8\xcc\x74\xda\xad\xfa\x7c\x86\x17\x91\xf2\x9e\xdd\x7e\x48\xc5\x5c\x0a\x9f\x04\xa4\x1e\x79\x35\x75\xc3\x6e\x9e\xe0\xdb\x5b\x56\x42\xf2\x50\x70\xf3\x3f\x66\x1c\x1d\x95\x1c\x7c\x2b\x4a\xfe\x77\x8b\x99\x7f\xd4\xb1\x71\xdd\xf2\x7f\x61\xd3\x9e\xe5\x57\x67\x5b\xc6\x97\xff\x14\xf5\x5b\x9e\x74\x43\xc3\x5f\x9a\x15\xdd\xf3\xc9\x82\xce\xfd\x90\x7d\x95\x3c\xf8\x1b\xc6\x2d\xa7\x4a\x37\x4f\x38\xd9\xeb\x1d\x62\x95\x5d\x2e\x0b\xab\x07\xd3\xee\x0b\x15\x33\x52\x13\xe8\xda\xf0\x4c\x70\x1e\xc6\x7b\x58\x27\x62\xa3\x3c\xf5\xe8\x3f\x1a\xef\x52\x08\x1b\xad\x2d\x2d\x72\x71\xdf\x46\x05\xc1\x6e\x7d\x15\x70\x68\x37\xc8\xf7\x4b\x4b\x6d\xbc\x46\x1c\x23\xbf\xe0\xf1\xcc\x81\x32\x19\xf8\xf5\x68\x86\x93\x4a\x02\x61\x42\x15\x41\xba\x81\x8e\xf0\x89\xa3\x8f\x2d\xfd\x71\x3e\xfb\x39\x97\x02\xd1\x8d\xaf\x55\xbc\xdd\x92\x5b\xaf\x6e\xff\xaf\x29\x71\x70\x59\x57\xbb\x8a\xf8\x99\x05\x6d\xbd\x4a\x44\xda\x52\xac\x7a\x07\x51\xec\xfe\x69\x96\x5c\x07\xf9\x3d\x88\x1d\xba\x96\x19\xd3\x4c\x8d\x95\x85\xc0\xe5\xeb\xfb\x33\x38\x5c\x54\xdf\xfd\xef\x96\x7c\x61\xcf\x99\x85\xe1\x96\xd5\x3c\xac\xfd\x68\xc7\xfd\xa4\x2f\x1c\x27\xfe\x8a\x7e\x08\x0b\x53\xc0\x7b\x6b\x17\x78\x9e\x87\xcd\xf8\xb2\xcb\xf2\xa6\xff\x4c\x2c\x07\x56\xb1\x4b\xe1\x82\x48\xd1\x17\x8a\x3d\x0e\x87\x3f\x0c\x28\x5a\x6e\xf5\xbb\x0b\xb1\x68\xa8\x18\x54\xff\x2f\xee\x61\xb4\xd4\xa7\x88\x6f\xb5\xaf\x52\x33\x70\x72\x4c\x00\x0f\x64\x7e\x7f\xb4\xfb\x8c\x85\x5d\x86\x6e\xed\xc3\xda\xb0\x04\x7d\x18\x66\x10\x6f\x3d\xde\x99\xb1\xba\x3a\x23\xed\xa8\xff\xf2\x81\xe6\x4b\x18\x64\xc2\xbd\x4c\xa3\xe7\xe6\xbc\x36\xcb\xa5\xc4\xea\x43\xb1\x6d\xe7\x43\x7e\x88\x47\xcd\x31\x87\xbe\xb3\xda\xea\x59\x20\x82\xeb\x1f\xe2\xb4\x9a\xcb\x12\x75\xf7\x2c\x67\x87\xf9\x67\x41\x0c\x96\x1f\x2b\xca\x10\x9a\x3b\xf8\x47\x4a\xe8\x2c\x27\x7a\x8c\x12\xfc\xef\x26\x05\x62\xfa\xf1\xe8\x6c\xd5\x71\xbc\x7c\x84\x25\xdf\x74\xc7\x9c\xf9\x1f\x77\x62\xdb\xb8\x5b\x3b\x3e\xa9\xb1\x06\xfb\xee\x97\x3d\x13\x33\x6f\x2b\x1e\x17\xd9\xee\x0b\xf4\x87\xeb\x68\xe5\x3c\x3a\x25\xc6\x7d\x5d\x71\x5f\xd0\xf5\xc1\x4b\xb8\xd6\xb8\x03\xc6\x13\x33\xee\xa0\x37\xac\xea\xc2\x98\xe4\x8f\x4b\xf8\x35\x5a\x71\x97\xc8\x78\x83\x43\x17\x89\xc5\xcf\xc3\xa3\xb9\x34\xfd\x05\x27\xe6\x41\xd9\x26\x2b\x94\x27\x90\x5f\xff\x94\xa8\x80\x4f\x95\xd4\x59\xba\x0a\x6c\x0f\x4f\x79\x38\xc8\x67\x38\x23\x0f\x45\xb3\x5c\x84\x75\x3c\x5c\x52\x50\xac\xae\x65\xb8\x2d\x7c\x93\x56\x6c\x6b\x72\x4e\xe2\x16\x8c\x32\xeb\x95\x47\x55\x59\x25\xb2\x2b\xdd\x7e\xc6\x47\xbc\x24\x0b\x0d\x76\xea\x32\xfc\xae\xbf\x77\x99\xc5\x42\xfc\x57\x7d\x56\x88\x98\x60\x15\x4b\x04\x74\x5d\x90\xf9\xf3\x20\xe6\x35\xd2\x07\x56\xec\x8e\x0e\x17\xbf\xb1\x36\x3a\x0b\xe1\x74\x8e\x58\xeb\x38\x2d\x4d\x50\x50\x54\xbe\x29\xdd\xbd\x36\xeb\xb0\xde\xef\x66\xfe\xde\xb2\xd7\x30\x72\xb2\xfd\xa1\x9d\xc2\x80\xf3\x07\xcd\x79\x71\x39\x5e\x21\x3c\xd8\x77\x29\x75\x29\x42\x70\x61\x42\x0e\xd7\xfe\xb3\x79\x0d\xf6\x71\xc1\x7a\xba\x3c\xf1\xbd\xa4\x49\x7f\xef\x06\x93\xa2\xd9\x15\x69\x3f\x8f\x5f\xfe\x71\x8d\x62\xa9\x57\x6c\xae\x3d\x22\xfc\xd2\x57\x6b\x47\x7d\xe5\x27\x48\x78\xe0\x88\x45\x0c\xbc\x62\xe4\xef\x9e\x25\x61\x3f\xe3\x5d\x67\xb1\xc6\x8a\xb5\x3b\x3e\x1c\x16\x97\xc6\x19\x44\x90\x32\xd3\x73\x8e\x9f\x8f\x44\xfd\x85\x21\x57\x77\xec\x17\x0a\xb8\x85\xf1\x63\x11\x9d\xdb\x6a\x41\xa6\x11\x00\x07\xf4\xdf\x9a\xc3\x3a\xd2\xb6\xd1\x78\x8c\x38\x4d\xaf\xfd\x58\xbe\xc1\x7b\x4a\xd3\xdd\x41\x04\xec\x46\x24\x4e\x88\x07\xd6\xcb\x81\x17\xc6\x5f\x04\x7b\x2d\x6e\xca\xc2\xd2\xd0\xc4\x99\x49\xba\x8a\x4f\x84\x98\x7d\xfc\x6a\x47\x64\x32\xa9\x76\x42\xe2\x31\xa6\x7d\xc5\xbb\x88\x0f\xec\x15\x18\x27\x0e\x6c\x9d\x65\x2b\xed\x02\xad\x3d\xa4\x10\xf4\xaa\x6e\xa1\x22\x14\x35\x08\xab\x5f\x3b\x73\xbc\xfc\x03\xce\xf9\x52\x69\x17\x86\x57\x6f\xf9\x84\xbd\xf1\x0e\xfb\xa1\xff\xaa\xf5\x10\x20\xf0\x72\xd7\xde\x07\x85\xa2\xc2\x63\x62\x5b\x7d\x65\x8a\x41\x5e\x09\xde\x60\x27\xe9\x7a\xdb\xfb\xa3\x60\x03\x4f\x7b\x96\xc3\xaf\x17\x75\x9c\x4d\xde\x6b\x58\x9e\x5f\xef\x90\x8e\x22\x19\x97\xb9\x3f\x79\xa4\xbc\x74\x8d\x60\xa0\x56\xd7\x4d\x35\x58\x7e\xdc\xb9\xae\x14\x16\xcf\x91\xb0\x43\x73\x7b\xbc\x0d\x81\xfa\x85\x76\x00\x1e\x8a\x2d\x9b\xc2\x9a\x99\x59\x9a\x86\xcb\x0f\x75\x40\x7f\xaa\xee\xf6\xa0\xe2\x58\x51\xd7\x10\x3a\xeb\xa8\x13\xb3\xe1\xe6\x9f\x5f\xb5\xab\x20\x54\x20\xce\xcd\xfa\x79\x8e\x81\xe8\x59\x22\xb6\x5f\x96\x7c\xf6\x0c\xad\x9f\x68\xda\x8c\x12\x91\xff\x82\x24\xa7\xb8\xcf\xec\x5a\x15\x17\xe9\x6c\xf6\x9e\x30\xb0\xf3\x3f\x20\x17\xa1\xfd\xa7\x9c\x12\x00\x0c\x36\x6b\x4a\xce\xd0\x81\x06\x36\x0a\xa1\x7b\x6e\x11\x0f\xbc\x5b\xe8\xdb\xe9\x1b\x34\x60\x41\x5d\xae\xb6\x94\x83\xa5\x95\x92\xd0\x22\x3d\x58\x4d\xf2\x71\xa3\xaa\x82\x9c\x9c\x03\x1b\x92\x30\x41\x57\xff\x41\xec\x42\x17\x3d\x41\xf2\xfb\x91\x6f\x19\x59\x58\x13\x42\xa1\xab\x27\xd4\x71\xb5\xd4\x9b\x1c\xa2\xdd\x36\x91\x9b\x8f\xb0\x54\x15\xf3\x17\x66\x17\x1f\x29\xe7\x77\x51\x12\x77\x3b\x52\x16\xe2\x8a\xf0\x9b\xcc\x84\x48\x76\x68\xd4\xb9\xf0\xd2\xef\xc1\x7a\x4f\xf7\x58\x1d\xed\xcc\xc5\xa3\xbb\xf6\xd1\xe4\x02\xde\xa0\x7e\x99\x6d\x75\x10\xc6\x27\xc4\x66\x0b\x6b\x3d\x5b\xb8\x5b\x84\xd3\x00\x0f\x4d\x6a\xf9\xc6\x58\x79\xe7\x90\x1c\xa4\x80\x2c\xe8\x36\xff\xa5\xee\xa9\x15\xfa\xd2\x2b\x6e\x97\x35\xc7\x76\x89\xda\xec\x8a\x29\xea\x8c\x99\x5e\x83\xf0\xb2\x6a\xb3\x9b\x11\x7a\x3d\xa9\x48\x18\xc1\xca\x26\xc8\x7d\x2b\xf5\x49\x98\x48\x46\x36\x76\x55\xa6\xb0\x67\x3b\xfc\x72\x85\x3e\xe2\x63\xb8\x14\x22\xb0\xba\x4b\xe1\x66\x20\x07\x56\x65\xa2\x06\x41\xd7\xb5\xaa\x81\x0a\x2e\x9a\xd0\xb2\xd0\x4d\xe5\x6a\x6a\x89\xc9\x5f\x13\xeb\x24\x11\x09\xcd\x42\xec\x23\x91\x2a\xcf\xd5\x05\x45\x6d\xcb\x27\xaa\x27\x4b\x3e\x24\xcd\xb2\x22\x07\xd9\x30\xf5\x72\xbc\x0e\xbb\x8e\xf0\xff\x4a\xe5\x4b\xff\x2f\x99\xac\x2e\xd0\x95\x12\x00\xf4\x44\x7b\x0a\x5b\x9f\x32\xd4\x5b\x12\x0f\x0f\xf1\x0e\xef\xa3\x28\x2d\x42\x85\x9a\xe7\xff\xb1\xc2\x4c\x47\x4b\x9b\xd8\xce\x66\x1a\xb6\xf5\x8b\x0a\xc6\x95\x49\x82\xc9\x89\x84\xd6\x70\x34\x1e\xe5\xec\x17\xb7\x7f\xfa\xc7\x7b\xf0\x88\x56\x61\x85\x4b\x10\xf1\x67\x04\x22\x68\x59\x9a\xc4\xa2\x84\xe2\x13\x2b\x89\x2d\x72\xd8\xcf\xda\xdb\xce\xf9\xc8\x1c\x71\xe8\x33\x47\x65\x7f\xd5\xdf\x81\xbf\xb2\x6a\xef\x28\xfa\xc9\x98\xcc\x92\x71\x78\x58\x99\xe2\x52\x07\xda\x3b\x4c\x7e\x03\x4d\xf5\x54\xf0\xf5\xa4\x0e\x70\x52\xc6\xd9\x04\xe7\x50\xfd\x0c\xa7\x4f\x72\x01\xe7\x2e\xaf\x16\x8b\x0a\x45\x3c\x99\x70\x1a\xa1\x2c\x13\x87\xe7\xb7\xb2\x66\xdc\x84\x44\x6e\x7a\x02\x93\xc6\x1f\x53\x1e\x89\x43\x9e\x11\x4b\x7e\xec\x2c\x29\x0f\xc9\x31\x08\x58\x8e\x76\x21\x04\x43\x71\x15\x4d\x74\x14\xaa\x6d\xdd\xa9\x2c\x43\xde\xe1\xc6\x5d\x9b\x59\x77\x38\x75\xe7\x98\x13\x5b\x91\x29\x39\x6c\xa3\x3a\x16\xcf\xdb\xf5\xef\x00\xa5\x84\xb0\xe1\xb1\x05\x9b\x31\x78\xa2\x10\x5b\xb8\x69\xc8\x1d\xc4\xfa\x27\xda\x49\x87\x1e\x90\x31\x2c\x53\x24\xe4\x61\x88\x47\x78\x7b\x94\x08\x63\x2e\x60\xa7\xe5\xc5\x67\x8b\x09\xce\x07\x99\xec\x59\x29\x15\xf6\xe2\x94\xd1\xff\x81\x15\x86\xe3\x86\xfa\x52\x08\xcb\xbd\xc1\x84\x55\x04\x87\xc5\x4a\xed\xc9\xf2\x54\x1a\xac\x71\x66\xb0\xd3\xbd\xf5\x01\x4f\x0d\xc0\x31\x18\x95\x78\xf7\xf4\xf8\xae\x88\x5a\xe2\xd5\x38\xe8\x36\x02\x17\x31\x86\xc2\x3e\xac\x0f\x79\xc6\xaf\xe0\x85\x48\xe7\x64\x36\xd2\x90\xff\xd8\xdd\x01\x41\xa1\xa5\xdb\x90\x7b\xcd\x85\x73\x0a\xbf\x5e\x98\x9c\x88\xea\x98\x5a\xf5\x1b\xf6\xd9\xf0\x43\x08\xed\xec\xbb\x99\x96\x6d\x4d\xcc\x71\x74\xac\x89\x35\x36\xb5\xa2\x57\xc1\xe9\xd0\x45\xf0\x4b\xa4\x3e\xe9\xb3\xb8\x22\x3d\x0c\x5c\x06\xc1\x8b\xf0\xa3\x50\xd9\x28\xcf\xed\x96\xe7\xd3\x35\xf9\xb5\x4b\x62\x05\x18\xda\x5b\xfa\x8f\xcd\x58\xe0\x84\x35\x1c\xc4\xf8\xc3\xbc\x4e\xa4\x2d\x78\xbd\xce\x36\x48\x3e\x24\x88\xfd\x31\x0d\x23\x5a\xcd\x0e\x5a\x89\x66\xa3\xc9\xf2\x8c\x05\xc1\x23\x56\xa6\x87\xa8\xb6\x84\x2b\x49\x3e\x94\xc4\x7e\xfc\x32\xaa\x8e\xa8\x2a\x2f\x3d\x0e\x81\x27\x0e\x24\x39\x62\x57\x67\xd2\x64\x38\xbf\x83\xdb\xe2\xdf\x0c\xa4\x5f\x30\x63\x5c\x9f\x9e\x6d\x55\xe8\x88\x06\x6d\x11\x1f\xc1\x15\xf6\x55\xe3\x28\x16\x4d\x4c\x52\x40\x3e\x68\x07\xe4\xc1\x68\x5f\x3f\x53\xe4\x61\xaf\x3a\xac\x09\x89\xda\x82\x11\xf3\xfd\x45\xad\x26\x79\x38\xe3\x71\xe0\x48\x2c\x0d\xcc\x7e\x4a\x1d\x63\xe5\xbf\x99\x26\x2c\x30\x47\x8d\xb1\xf7\xea\x2f\xb8\x2d\x3f\xc0\x3a\x69\x99\x3a\x2a\x77\x9e\xf4\x40\x17\x8d\x9c\x3f\x00\x92\x7b\x07\xb1\x48\x8a\x85\xdf\xf9\xa3\x29\x75\xf6\xf0\xc5\x4a\x93\x33\xc5\x0d\xbf\xb9\x0b\x13\xaa\xcc\x6d\xf8\x9d\x39\xeb\x78\x67\x96\x6f\x72\x43\x09\x7a\x22\x88\x9e\x5e\x86\xf2\xaa\xf0\x92\x7d\xf1\xa7\xfd\x2a\x7a\xb9\x5f\xbe\x3f\xbc\xec\xd3\xa0\xe1\xe0\x9f\x3c\x64\xc4\x51\x15\x58\x9d\xd3\xf3\xec\xa8\x7f\x3f\xf5\xa8\x55\x95\x3b\xdb\xc4\xab\xde\x1a\x6c\xdb\x16\xe8\x5f\x58\x57\x0a\xa8\xac\xe2\xb0\x7b\xc8\x17\xc1\x53\x14\x69\xe9\xca\xe2\x63\x5f\x9c\x0e\x5c\x34\x4d\xb8\x1c\x49\xf7\xaa\x0c\x66\x6a\x78\x74\xd7\x34\x95\x95\x10\xfd\xca\xb6\x8c\x00\x32\xd4\x2c\x21\xf3\x2d\x0a\xbc\xcc\x41\x49\xcc\x63\xd8\xac\x25\xe1\xa8\xe7\xbe\xa4\xb7\x17\xa6\x56\xea\xb2\x32\xf4\x29\xf6\xcf\x3e\xfd\xe3\x9f\x5c\x15\x09\xb6\xf8\xbe\x61\xbf\x7c\x0f\x58\xc1\x2a\xda\xaa\x69\xf6\xda\x38\x8b\x65\x70\x4b\x10\xe7\x22\x06\xb2\xe9\x94\x79\x01\xac\x13\x0c\xb4\x94\x62\x79\xc8\xcd\xd0\x6a\xd8\x68\xf8\x4f\xeb\x68\x8b\xe4\x2f\xef\x44\x2c\xc8\x45\x2f\xed\xa7\x41\xfd\xab\x2f\xc8\x51\xf1\x82\x82\xe5\x1c\xcb\xb4\xf2\x2a\x3a\xee\x41\x83\xc5\x03\x89\xd9\xfe\x78\xff\x13\x30\x76\xa9\xd6\x81\x97\x1b\x49\xa8\x47\x5d\x83\xd9\x72\x48\xbf\x11\x51\xc1\x18\x3a\xa0\xcd\xc3\x72\xaa\x52\x09\x4f\x9e\x48\x75\xb8\xa2\xce\x27\xe7\xd6\x12\xfa\xc3\xdd\x84\xc4\x69\x67\x39\x0c\x5d\x70\x94\x66\x47\xe2\x65\x89\x12\x99\x67\xfe\x0e\x75\x3f\xcb\x60\xd6\xae\x7b\xd9\xa8\x9a\xf7\x90\x4b\x2e\xc0\xd9\xd3\x14\x1b\xfa\x65\x89\x62\xe0\x3c\xeb\x78\x6b\x06\x45\x98\xfe\x2b\x12\xd7\x40\x48\xa3\x34\xcd\x9f\x0d\x2b\x48\x90\x2c\x45\xf7\xc4\x70\x18\x54\xcc\x4f\xbf\x97\xeb\x25\xfb\xa0\x7a\x5e\x52\xde\x70\xf0\xa8\x54\xee\x0a\xf7\x2a\xbe\x7c\x35\xfa\xad\xad\xaa\x9e\xc6\x88\xdc\x17\xf9\xad\xc2\x13\x60\x6a\xd1\x4a\xae\x68\x9b\x93\x12\x2f\x95\xd9\x5d\x4b\x92\x3e\x84\x52\xde\x34\x75\x43\xb8\x99\x8a\xd7\xaa\xa8\x95\xfa\x4f\xbf\x0f\xbf\x77\xee\x64\xb8\x78\x04\x8c\xda\x6f\xf0\x6f\x28\xa7\x0c\xea\xd7\x12\x5a\xe0\xdc\x24\xa0\xc0\x48\x8b\x6c\x8c\xd6\x5f\x01\x6b\x55\xf7\x41\xef\x1f\xf7\x9d\x53\x5f\x6d\x15\xe1\x3f\xd5\x34\xfc\x89\xf8\xd9\xfb\xe4\xa3\x8f\xc9\x63\x3e\xee\x1a\xe3\xac\xda\xba\xfd\x09\x84\x21\xad\x8c\x8e\xa3\x62\x75\xda\x8d\x2f\x86\xe3\x1e\x59\x2e\x93\x94\x87\xb2\x78\x0b\xdd\xd4\x52\x5b\x60\x1a\xfa\x79\xcd\xc7\x59\xef\x2d\xd8\x45\x6e\x55\xc7\xb6\xb7\xb7\x5e\x76\xc9\xaf\x43\x03\x74\x7b\xbf\xe3\x16\x05\x33\xc9\x1f\x9e\xcc\xb5\x5d\x32\x0f\x57\x38\xd2\x8b\xdb\x48\x65\x8c\xd4\xd9\xf3\xed\x66\xcb\xaa\x7d\xeb\x1a\x63\xf3\xa4\xa9\xf9\x06\x9b\x4a\xc2\x9d\x95\xf9\xd5\xb3\x95\x1f\x2c\x70\x65\x11\x39\x17\x82\xc4\x1e\xd6\xc6\x7f\x82\x47\xbc\x3f\x96\x6b\xe2\x9b\x6d\xcb\xa7\xc6\xe8\x83\x4b\x5a\xfe\xc0\x88\xf1\x8d\xfb\x0f\xb7\x94\x24\x3a\x56\x01\x69\x90\x7e\x6f\x57\x73\x94\x9a\x64\xca\x72\xb3\x53\x9b\xdc\x37\x90\x55\xee\xd9\xd4\x36\xc8\x75\x25\x9b\xca\x6b\x60\xff\x21\x55\x41\x63\xb6\x83\x9c\x01\xa6\xc1\xe0\x3e\x0a\x63\x7b\x3d\xe5\x43\x53\x3a\x7d\x2e\x7c\x10\x26\x52\xdd\x29\xd0\x09\xef\xdd\x1c\xf1\x70\x01\xa5\xda\x4f\x0a\x83\xdd\x08\x1c\xc0\xb5\xac\x8a\x66\xe4\x6b\xec\xe3\xf7\x44\x11\x6c\x78\xec\x79\x05\x0c\x8f\x80\x77\x62\xb4\x8d\x57\x59\xbe\x80\x80\x90\xb2\xe5\x83\xd0\xcd\x08\x2a\x9c\x72\x6f\xb6\xa1\x89\x8d\x37\x11\x48\x46\x7f\x25\xfd\x0b\xe9\xf1\xcc\x29\x29\x3e\xd0\x4a\x16\x7e\xc1\xe2\x1c\xed\x5d\x02\x5c\x46\xf6\x64\x5c\xbe\xb0\x71\xcd\xbb\xff\xd9\xa0\x31\x86\x74\x99\x75\x08\xe6\xf1\x11\xac\xf9\xbc\x35\x7a\x78\x1e\xaa\x44\x6d\x91\x17\x9e\x40\x1e\x32\x47\xe4\xb7\x56\xa2\xbe\x6b\x47\x81\x0f\x19\xbc\xcf\xe5\x73\x51\x03\x54\xfe\x87\x3d\x8c\x1a\x1f\x0d\xeb\x1c\x97\xf5\x47\x78\x22\x70\xa6\xcf\x8c\x39\xb7\xe9\xc5\x02\x09\xf9\x3c\x05\x9f\xe1\xf2\xfe\x14\xb5\xd7\xd9\x54\xf8\xa6\xac\xed\x01\x1c\x2a\xa7\x6a\xda\xdf\xa6\xe4\xb9\x70\xf8\xd3\x86\x36\x04\xff\xdd\x4b\xee\x93\x35\x37\xfc\x77\x55\x68\xdb\x0b\xdb\xcb\xd3\x66\x09\x8a\xe6\x70\x70\x9f\x43\x7c\xab\x1c\x3b\xe1\x55\xe3\x0f\xcb\x33\x62\x5c\x97\xbe\x90\x9b\x89\xf8\xb9\xac\x15\x77\xb4\x21\x67\xd9\x03\x2f\x2e\xb6\xec\x63\xa2\x04\xd2\x75\x20\xc3\xb7\x6d\x19\x6c\x94\xa0\x9f\x16\xf3\xc9\x96\x00\x2e\x20\x02\x7b\xb0\x32\xff\xd9\xde\x90\xa3\x69\xbb\x83\x78\x9b\x76\x52\xb1\x15\xea\xed\x94\xd7\x63\x44\xfa\x10\x92\xfe\xc9\xc4\xfb\x96\x62\x0f\x39\x91\x44\x1c\x3d\xf3\x9e\x91\x25\xd9\x09\xc0\xb3\xb5\x86\x39\x7d\x6b\x6f\xad\xc5\x79\x5b\x09\x70\x04\x9f\xa0\xea\x4f\xb5\x3b\x49\x90\x35\x88\x79\x2c\xf0\xc3\x3c\x14\x4c\x02\xc9\x92\x52\xb8\xec\x00\x38\xcd\xb2\x0e\x8c\xe4\xd6\x23\x6c\x05\xeb\xff\x47\xd6\x9b\xa5\x3b\xcb\xf3\x4c\xa3\x73\xc9\xf1\x3f\x29\x03\x0e\xb0\x68\xcc\x0b\x38\x79\x92\xd1\x6f\x95\xaa\xe4\xdc\xdf\xb5\x8f\xf0\x4a\xb7\xd2\x80\x6d\x95\xaa\xf9\x2f\x6e\xb1\xaa\x9c\x8e\xff\x30\x14\x8e\xcd\xbf\x1c\x09\x61\x1a\xcc\x01\xa4\x44\x2d\x64\x1d\x84\xc9\xb0\xc1\xd9\x25\x30\x22\xd5\xdf\x77\xfa\x0a\x09\x0a\x42\x98\xed\xfa\x03\x83\x5e\xa5\x31\xa2\x12\xad\x5b\x45\x23\xee\x20\x8e\x75\xc3\x97\x2e\xf3\xbd\xa0\xe6\x73\x0a\x01\xb4\xe9\xa7\x60\xb2\x8e\xed\x49\xb7\xd0\x59\x69\xc1\x06\x69\x90\x5d\x0f\x31\xf2\xa3\xfb\x03\x77\x69\x95\x76\x68\xf1\xd3\x3e\x31\x23\x39\xdd\xbb\x5f\x0f\xb6\x3f\xf4\xcf\xfb\xa0\x6e\xc8\x0f\x9d\xc8\xaa\x88\x85\x21\x15\xd5\xa1\x7e\x0c\xd6\x21\x89\x39\xc0\x44\xe6\x7f\xb2\xa0\x52\x97\xb8\xf9\xf8\x7e\x70\x1b\x3e\xd7\x97\xf3\xd6\xa7\x68\x3e\x79\x73\xff\xf4\xae\xbb\x28\xa7\xc1\x16\x79\x2b\x34\xec\x05\x5f\x4a\xba\x0a\xfb\x6e\x9a\xbe\x82\xf3\x40\x86\xdd\x2b\xbd\x28\xef\x4e\x57\x17\x8b\xf2\x2b\xed\x30\xe6\x10\x4c\x3c\xb0\x26\xae\xa3\x2c\x27\xdd\x7a\xb8\x61\x59\x77\xf5\xb3\xc0\xde\xef\x38\xc8\x1b\xa7\x52\x0a\x77\xcb\x1e\xce\x63\xf9\x7e\x18\x31\x27\xb9\x5b\xe6\xb3\xa1\x77\xb3\xf2\x42\x89\x07\x9c\xac\xef\xa4\x6e\x2c\xa2\xfc\xf8\x72\x8a\x22\x42\x93\xc0\xb1\xd1\xb7\xac\xd5\xaf\x7a\xf7\x00\x62\x39\xde\x9b\xa9\x05\xbe\xc3\xd6\xdb\xc7\xf7\x29\x9f\x40\x18\xf9\x3c\xc4\x34\x75\x19\x0e\xf7\xdd\xf0\xa5\xd3\x80\xdc\x52\xd0\xde\x7d\xe0\xdb\x63\xa1\xd0\x63\xbc\x36\x3a\xdb\x18\x80\xfa\xa8\x30\x86\xa3\x91\x4d\x25\x1b\x2a\xff\xc7\xe4\x87\x5d\x6c\xc0\xa7\xfe\xef\xe7\xf0\x20\xb4\xf5\x91\x8b\x97\x8d\x08\x34\xcb\x9f\x47\x26\x54\x61\xd4\xd3\xb4\xe3\x53\x18\xfd\x30\x68\xee\xca\x49\x0b\x1e\x29\xa7\x3f\xa7\x64\x6d\xc8\xd3\xfa\x25\x76\x7c\x29\x62\x5e\xdb\x53\xe0\xca\x34\xcc\x40\xd4\x7c\x83\x9c\x69\x94\x81\x7d\x37\xb7\xd0\x11\x78\x79\x7e\x3a\x29\x6d\xc1\x72\x98\x83\xe6\x80\x82\x2d\xa8\x08\xe0\x38\x04\x16\x73\x16\x5b\x30\x44\x14\x3d\x7f\x3d\xb4\xb3\x0c\x84\x13\xdd\xb3\xda\x8f\xc3\x2a\x63\x9f\xc0\xab\xfb\x5e\xb2\xd7\x33\xcf\x61\xd1\xac\xfa\xe8\xcc\x0a\x94\xf8\x70\xe7\xee\x50\xb5\xcd\xbc\x2d\x2a\xf4\x7f\xd5\x5f\xcc\xb7\x92\x0f\xa7\x3c\x44\x3e\xf3\xe1\xbb\xc0\x07\x71\xec\x33\xb2\x21\x62\xc3\x9d\x43\x2a\x61\x85\xa2\x6d\xa8\xb8\xb5\x76\x5d\x89\x53\x10\x3e\xc1\x8e\xdd\xcb\x0f\xa5\x56\x38\x62\x52\xe3\x71\x4f\x12\xa6\xc3\x55\x6f\x16\xaf\x00\xf3\x6e\xf8\x9a\x39\x05\x61\xe7\x80\xd6\xcb\x1e\x0d\xe5\xfc\x72\x1f\x12\x3d\xc2\x50\x02\xa3\xc0\xc7\x00\xf7\x50\xff\x43\xd7\xc4\x87\x6f\xa1\x1b\x41\x35\x5e\xc3\x65\x2c\x0e\x81\xff\x08\x3f\xa2\x28\x70\x2f\x6c\x97\xc9\x1a\x69\x4c\xc2\x76\xb6\x4c\x2d\x4f\x96\xa3\xe2\x29\xbd\xd0\xe1\x2d\x3a\x3e\xcd\xb9\x0a\x3e\xb0\x2a\x80\xb7\x78\xe8\xbe\xff\xab\x7e\x88\x06\x2a\x36\xe8\xa5\x05\x35\xaa\xe6\x80\xf2\xa8\x92\x3d\xe2\x22\xa4\xb3\x59\x06\x6d\x47\x17\x5b\x71\x90\x5e\xe7\x06\x7f\xd7\x2f\x01\xee\xaa\x40\x27\xc5\x2c\x93\xb8\x48\xf6\x84\xbd\xb5\xa4\xbd\xba\x2b\xe9\xf9\xf6\x12\x8d\x47\xd7\xda\xab\x2b\x0c\x26\x1d\x2f\xb3\xb5\x10\x27\x5e\x35\xf1\xac\xb3\x58\x69\xee\xc1\x4b\x98\x59\xb6\x59\xab\x67\x61\x3e\x88\x46\xc7\xa6\x76\x0d\x49\xfd\xb3\xc8\x66\x74\x05\xbc\x42\xc0\x65\x61\x2c\xe6\xb2\x57\xe5\x3f\xe9\x22\x07\x11\x38\x16\x6d\xdb\x77\xf7\x12\x56\xda\x42\xc8\x1d\xcf\x32\xff\x92\x6b\x6c\xdc\x8c\x54\x96\xdc\x3c\x80\x8e\xd8\x90\x6f\x81\x45\xf3\xfa\x5c\xb4\x77\xa7\x93\x93\x93\x15\x14\xa7\xbc\xbb\xda\x81\xfa\xa5\x8d\xb6\x41\x6a\x07\xfe\xa1\x25\x44\x27\x21\x5b\x71\x98\x95\xa4\x2e\xbd\x6d\xc2\x47\xf6\x3a\x27\x9a\x57\x4e\xb3\x90\x6c\x86\xe4\x4f\x2d\x69\x71\xca\x32\x8f\xb6\x19\xe6\xdb\x76\xda\x7b\x78\x57\xda\xa6\x5b\xf3\x85\x47\x4a\xf9\x31\x94\x4b\xb2\xf2\x1c\x2b\xb1\x1b\x19\x56\xe7\x9d\x7e\x89\x67\x73\x9c\x2e\x59\x99\xb8\xe4\xad\xfa\x86\xdb\xdd\x6f\x39\x52\x14\xc4\x88\x22\x20\xa0\xe5\x9e\x3f\xc4\xf8\x4f\x00\x85\xad\x57\xdf\xe0\xa9\xd2\xfc\x3a\xad\xcd\xe4\x47\xbe\xd9\x29\x9f\xfa\xa6\x9f\x55\x0b\x10\xa2\x2e\xb8\x21\x3e\xa9\x14\x7e\xce\x25\xec\x53\x9f\x6a\x83\x3e\x49\x21\xdf\xc2\x28\xe1\x99\xe5\xa0\xff\x04\x37\xd4\x8f\x9d\x16\x36\xe4\x6b\xfb\x26\xb7\x8a\x0e\x01\x84\xd0\x07\xb0\xde\x26\x2f\x82\xe9\xd9\xa2\x44\x02\x7b\x8e\x90\xe5\x33\x44\xd1\x43\x95\x47\xf2\x10\x26\xec\x03\x88\xc1\xf4\x78\xcc\x6f\x65\x52\xb1\x56\x1d\xca\xe5\x33\x3b\xc7\x91\xff\x39\xcc\xa7\xbc\xb5\x87\xf9\x29\xab\x0b\xdb\x5a\x9c\x0b\xef\xc4\xbe\x5a\x7b\xec\x30\x0e\xda\x23\x0c\x2b\x83\x4f\xa4\x3d\xf1\xa6\x4c\x3a\xec\x98\x1d\xb3\x7f\x38\x17\x63\x8d\x38\x4b\xf6\xc3\x07\x5d\x88\xbd\x9d\xcf\x14\x2e\x60\xaf\xfc\xd1\x31\x18\x18\x95\x5e\xc6\x8e\x58\x6b\x9b\xda\xa6\x8f\x1e\xeb\xf5\x49\xaa\xa6\xbb\x39\x93\x6b\xf1\x12\x52\x5d\x95\x7f\x61\x9b\x62\xca\xef\x4b\xb0\x3a\x40\x58\xe6\xb6\x19\x00\xfd\x1e\xbc\x0d\x6e\x5a\x01\xb6\xfb\xd1\x49\x17\x1d\xb3\x2e\xfa\x19\xd6\x49\x3e\x9a\x6a\x38\x13\xf6\x48\x7e\xf9\x6d\x93\x3d\x28\x97\x9f\x1c\x05\x38\x36\xa6\xdc\xac\x7b\x07\xc3\xf7\xd5\xdc\xcf\x71\x93\xbc\x1d\x24\xdc\x01\x23\x9f\x15\x63\xee\x61\x1c\x1f\x29\xa9\x7a\x99\x70\x75\x9e\x41\x18\x2e\x91\x57\x20\xe0\xb6\x97\xca\x1a\x69\xbb\x0b\xd5\x39\x31\xf6\x53\xc2\xd4\xae\xbc\xb9\xff\xa5\xe7\x91\x00\xe5\xb2\xcb\x5c\x72\x8d\xc4\x4c\xce\xc2\xdd\xda\x78\xb1\xb8\x8e\xe4\x24\x29\x69\x2f\x93\x24\x7c\x40\x05\x63\x17\xf6\x66\x5d\x22\x5e\xd7\xb9\x0f\x39\xd1\xef\x73\x4f\x54\xf6\xa3\x0b\xa1\xa8\xaf\xb5\xab\x59\x36\x93\x11\xfd\xdf\xa5\x9f\x60\x2a\xd5\x4f\xa6\x31\x75\x0a\x57\x49\xd7\x38\x68\x6f\xcc\x9d\x9d\x4d\xb6\xb4\x03\x4e\xc1\x77\x48\x08\x3e\x91\x44\x14\x3e\xec\xfe\x3d\xd9\xb4\x3f\x3b\x71\x28\x59\xa1\xbe\xcb\x5a\x2a\x21\x91\x09\xa7\xc8\x17\x6d\x46\xdf\x26\xbb\x86\x1a\x83\x14\x06\xaf\x9f\x7a\xfb\xc2\xf2\x09\xcb\xc3\xf7\x67\xe4\xbf\x7e\xd7\xd3\x35\x5c\x95\x33\xfb\x7b\xf6\xec\x1a\x42\xd2\x90\x6d\x89\x3a\x6b\xcb\x38\x69\x1f\x13\x04\xb9\x82\xaf\xe7\xeb\xab\x63\xb3\x02\x07\x0d\x2e\x25\x0e\x7d\x0f\xf9\x4e\xff\x05\x27\xf7\x5f\x7e\xbe\xfd\x25\xe7\xef\x71\x94\x63\x85\x82\xc7\x1e\xdc\xac\x13\x8e\xb2\x3a\x59\xd3\x00\x33\xc7\x9c\xd9\x01\x8b\x91\x4f\xe9\x39\x1c\x3e\x61\xc1\x94\x77\x6f\xd1\xf2\xa2\xab\xc2\x38\xea\x4e\xb9\x5f\x75\xf6\xe9\x5d\x37\xed\xa8\x49\x2f\xb8\x4f\x8a\x60\x99\x0e\xea\x83\x00\x6e\x6f\x39\x0e\xdf\xf3\x11\x7e\x97\x6b\x23\xea\xd2\xcd\xc9\xc7\x68\x9b\x09\x77\xcf\x77\x10\x7e\xee\xcc\xa8\xcb\xdb\x49\xfd\x3d\x87\xae\x8d\x79\x00\x32\x5f\xb4\xf5\x7c\xff\xe8\xbb\x6f\x24\x94\x11\xde\xae\x8d\x5b\x51\xb5\x0b\x0f\xff\x4a\x7b\x9b\x1d\x8f\xe9\xa3\x76\x1d\xb6\xf4\x5b\x90\x7e\x5f\xaa\x35\xae\x3b\x24\xd5\xbe\x23\x6d\x00\xf6\x21\x78\xfa\x08\x77\x89\xeb\x20\x7e\x03\x76\x15\xb9\xc1\xfe\x15\x62\x1b\xaf\x6c\x1d\x1b\x32\xe7\xcb\x4e\x85\x21\x9e\xb5\x0a\x21\xbb\xb4\x88\x13\x69\xf7\xd2\xdd\xce\x84\x59\x06\x04\x6e\x40\xe5\x83\x71\x6f\xbd\x2b\xfb\x63\x6c\x9b\xff\x88\x41\x9d\x90\x6a\x1d\x9b\x70\xa7\x83\xb0\x18\x98\x3d\x2f\xcb\x36\xfd\x47\x0b\x45\x3d\xb1\xe1\xe4\x93\x52\xe0\x58\x57\x13\xa7\x45\x68\xee\x95\x37\x39\x0b\x6c\x9b\x82\x53\x15\xac\x71\xe5\xf8\xf7\x40\x08\xb8\x53\xba\xfa\x19\x4a\xfa\x5d\xd0\xbd\xed\x02\x03\xce\x0f\x7f\x85\xe6\x60\xa5\x3f\x1a\x9b\x25\xcf\xa1\x72\xcb\x59\x89\xab\x77\x18\x5a\x21\x71\x75\xe6\xe8\x24\x2f\xf1\x42\x42\x8f\x2c\x3c\xaf\x5f\x10\xdb\x95\x9a\x5c\xff\x92\x50\xfd\x94\xf7\xe6\x59\x23\xa8\x85\xec\x6a\xdf\xf9\x57\x0a\xa3\xdd\x83\x57\x78\x7e\xe9\xa2\x0b\x30\x33\x91\xef\xcc\xff\xc1\x6d\xd7\x47\x11\xe7\x9a\x77\xd6\x13\xfb\xed\xd9\xed\xbc\x2d\xbe\x44\x48\xe5\x34\x48\xcd\x11\x61\x63\xa5\x11\x9e\x59\xe1\xd6\x7c\x7c\x04\xbc\x1c\xa7\xac\xb3\xd0\x6e\x65\x0f\x00\xa3\x08\xb2\x63\x59\x62\x0b\x02\xbf\xe7\x83\x79\xe8\xf0\xd4\x4a\xea\x11\x7c\xc5\xa7\x19\x4e\x12\x79\x8e\xf4\x33\x3e\x98\x05\xbc\x1c\x49\xf5\xb6\x9d\x44\x6e\xac\x75\x76\x2a\xa8\xa8\x96\xf3\x27\x20\xe5\x8b\x26\x59\x36\xd5\xb7\x82\xc5\xdd\xd9\x3f\x3e\x6a\x6b\xef\x9e\xde\x60\x5c\x79\x7d\x20\x9a\xed\x56\x37\x06\xac\x96\xf3\x98\x58\x12\xb8\xca\x3a\x52\xbf\xb7\xf9\x93\x62\x63\xb9\xe1\x6c\xa6\x99\x80\x5b\xd2\x06\xce\x8f\xb0\x69\x92\x52\xe4\xf7\xb3\x29\x36\x0b\xd8\x7e\x4e\xf2\x02\xdd\x23\x78\xc6\xca\x8a\x39\xd2\x65\xd6\xbc\x36\x07\x51\xb7\xd9\x3c\x03\xfc\xcf\xf9\x15\x05\xc5\x1e\x49\x33\xf7\x1d\x22\xb8\xe6\x5c\x20\xa5\x07\xea\x01\xf9\x37\x42\xcc\xa9\xf2\x29\xa1\x56\x92\x52\xcf\x16\xab\x70\x6e\xde\x1a\x53\x27\x31\x5c\x7b\xfd\xec\xe4\x21\x7f\xd8\xa5\x74\xda\x0c\xcb\x87\x65\x11\x0b\x46\x47\xdb\xc3\x7b\x5d\x30\xaf\xc1\x8c\x06\xf8\x0a\xa3\x95\x87\xa3\xfd\x22\xc6\x8c\x51\x65\xe4\x81\x39\x38\x20\x46\xb3\x73\xb3\x7a\xd9\x23\x02\x8c\x6c\xd5\xbd\xf6\xe0\x72\x03\xdf\xc1\x3d\x58\x31\xb6\x3e\x3c\x89\x23\xc0\x64\x54\x7b\x3a\x30\x68\x95\x39\x8f\xbd\x98\x5a\x23\x56\xa9\xa4\x60\x53\x0f\x36\x73\xe5\xca\xd7\xb0\x9d\xc2\x4e\xb9\x5e\xc2\x1e\xa0\xe7\x48\xba\xea\xe5\xe3\xaf\xb3\x9c\x64\x23\x2d\xa7\x0e\x3a\xed\x97\x52\x9d\x6b\x66\x45\x0c\x69\x20\x70\x4c\x50\xd9\xb2\x05\x3b\x3b\x3e\xeb\x92\xc9\xf9\x59\x52\x44\x20\x2c\x11\x3d\xb9\x24\x01\x97\x7f\xde\x93\x54\xcc\x4e\xab\xbd\x6d\xb5\x67\x51\x62\xe5\xc9\x4b\xbf\xec\xc4\x96\x8e\x1d\xd4\x69\xb8\x44\xba\xde\x20\x12\xe6\xf0\xb9\xa7\x51\x41\x3c\x3b\xf9\xd3\xb6\xff\x61\xf9\x52\x82\xc6\x34\x51\x84\x3c\x15\xba\xed\x4f\x0e\x76\xad\x9a\x11\x11\x03\x91\x3b\x6d\x44\xa6\xbc\x89\x5a\xb3\x86\x13\xd8\x8e\x85\xa9\x39\xd6\xb7\xde\x41\x56\x10\xe4\x94\xec\x6e\x19\x9a\x56\x76\x15\x2e\x9c\x64\xa1\xf9\x8b\x34\x1f\x09\xef\x30\x83\x5f\xc1\xef\x8e\x44\xb0\x09\xb6\xb6\x61\xa6\xea\xcb\xf5\x58\xef\xa9\x45\xe2\xd7\xb5\x3b\x83\x70\x32\x0a\xdc\xa0\x2b\x2a\x4d\xc0\x4e\x36\x5d\x50\xcb\xda\x2e\xc1\xb1\x82\x30\x39\x1c\xcf\xc8\x06\x64\xcd\x88\xbe\x44\x94\xb5\x63\x91\xb3\xcd\x38\xdb\xd9\x91\x1e\xcd\xc5\x41\xf1\xfa\xab\xba\x17\x88\x34\xf1\xb5\x90\x85\xd8\x83\xb2\x41\xde\x09\x22\xa0\x3a\x16\xf4\x7a\xf8\x75\x23\x54\x1e\x55\x75\x78\x40\xe0\xf1\xb6\x80\xad\x05\x4a\xe4\xff\x4f\x0d\xed\xe7\x3f\xe2\x01\x28\x06\xc3\x3e\x35\xd4\x36\xcf\xf6\x51\x9f\xaa\x80\x9f\xc0\xe5\x6f\x95\x5b\xf6\xe6\xed\xe4\x73\xe8\x22\x8b\x00\x9f\x2f\x6f\x8f\xd0\x8b\xd2\xdd\xe7\xa2\xc0\x52\x25\x96\xae\x9b\x60\xe4\x50\x9d\x00\xed\x95\x4b\xf5\x95\xc3\x7e\x61\xd5\x0e\x43\x79\xb3\xcb\x10\x05\xd8\x3f\x61\xb3\xc3\xec\xde\x34\x3e\x7a\x06\xaa\x0d\x75\xb0\xce\x0f\x7b\xe7\x3c\xed\x59\x42\xb1\x34\x12\x29\x12\xfd\x85\x93\x09\xad\x18\xee\xac\xb9\xfa\x6c\x0b\x25\xef\x6e\x4e\x07\x03\x08\xfb\x72\x75\x85\x16\x81\x44\xcd\x21\xf1\x7d\x25\x2d\x1a\x43\x3a\x43\x79\xb8\x6a\x13\xe5\x55\x18\x2b\x2d\xda\xf1\x80\x23\xef\x75\x54\xb5\x0b\xce\x5f\xa5\xa7\xfb\x8e\x8b\xd6\x55\xbc\x9c\x11\x38\xec\x49\xe4\xe1\x25\x51\x69\x3a\x0d\xf3\x4b\x16\x5c\x9b\xf3\xf1\xfb\xf5\x13\x9d\x06\xc5\x9a\xf5\x39\xde\x88\x0b\x13\x67\xc9\x0f\xe9\xd7\xe5\x48\x0d\x49\x31\xb6\x43\x8d\xbc\xe2\xf0\xc2\xb1\x65\xea\x90\xea\x70\x15\x38\xd5\xd5\xf9\x3e\xc5\x23\x1a\x68\x0f\xdb\xad\x9c\x3d\x3b\x99\x61\x80\x73\x1e\xc5\x92\xf7\x8f\x83\x77\x5e\x14\x62\x54\x4a\x10\xc4\xc5\x90\xb5\x12\xea\xc7\x3f\x3f\xa1\x21\xe6\xbd\x60\xd9\x6c\x1a\x0d\x72\xb2\x88\x67\xda\xb2\xdf\xc8\xeb\x9f\xa8\xac\x7e\x2e\x0b\x1d\xdc\x25\xfd\xe8\x62\xfb\x87\x97\x59\x55\xfe\x5b\x5e\x71\x45\xf5\x35\x7b\x9d\xcd\xf2\x2b\xaf\xca\xe8\xec\x24\xc1\x75\xa3\x33\x02\x1e\x36\x6c\xbc\xf6\x95\x65\x2f\xce\x88\xb2\x89\x05\xcf\xb5\xb2\x93\x6f\x73\x87\x28\x33\xd6\x67\x39\x28\x58\x9e\x24\x61\x05\xdc\x35\x45\xe1\x76\xb2\x9b\x22\x6f\x8f\x54\xff\x23\x0a\x02\x9d\x20\x7f\xbc\x24\xde\x51\xa1\xc1\x76\x02\xda\x4a\xe6\xb8\x4d\x3d\x07\x67\x75\x80\x28\x44\x31\x51\x92\xd1\x13\xcd\xe5\x2f\xfe\x54\x7e\x1d\x29\x84\x9e\xa9\x1b\xd0\x5f\xc5\xf3\xbe\xa2\x95\x7c\x23\x38\xf8\xeb\xb8\xde\xdb\x47\x7f\x73\x9f\xd9\xd5\x58\xfc\x27\xf8\x04\xf3\xec\x0d\xc9\xae\xbc\x32\x6c\x57\xd8\xdc\xea\xdf\x10\xf6\x68\x20\x03\x0e\xab\xce\xc2\x8a\x23\xd4\xdc\x1e\xba\x24\xee\xbc\x0d\xc3\xec\x36\x47\xe0\x21\x7e\x52\xc5\x9a\xbe\x73\x0b\xec\x79\x4b\xd2\x6d\xe5\x59\x78\xac\x59\xd9\x66\x67\x17\x26\xf8\xd7\x29\xdf\xd6\x17\x34\x01\x5e\x8a\x15\xbd\x15\x08\x44\x75\x15\xbd\x50\x5a\x53\xdb\xec\x74\xfc\x9e\x5c\xab\xd7\x1c\x62\x2b\xe4\x3f\x4a\x3a\xb9\xc7\x6e\x19\x32\xca\x7f\x54\x94\x36\x78\xb1\xf7\xe7\x7f\xa0\xd3\xac\x3f\x92\x15\xf7\x9f\x9f\xe9\x6e\x2f\x74\xaa\x5e\xbc\xa2\xec\x3e\xf2\x97\x70\x3d\x71\x20\xac\xd3\x26\x26\xed\x83\xef\xf2\xde\x9b\xe2\x52\x71\x55\x50\x26\xdb\x47\xf1\xd3\xcf\x4b\x3a\x9e\xf0\xb0\x71\x63\x94\x54\x8a\x4e\x1f\x4a\x3a\x36\x67\x4e\x7a\xe6\xbe\x4b\x47\xa6\xf6\xf5\x9e\xa5\xf6\xb9\x60\x39\xc4\x5a\xfc\x12\x13\xc0\x8a\x38\x15\x0d\xb7\x3a\xa4\x97\x67\x63\xb3\xb7\xd2\x74\xe9\xd0\xe8\x04\x19\x6a\xde\xa3\x93\x76\x15\xc2\x64\xb6\xb1\x66\x12\x14\x24\x68\x0f\xef\xab\xb0\xe8\x00\x13\xd8\x9f\x6e\x5b\x83\x44\x62\xd2\x14\x14\xa1\x6b\x2a\x6f\x15\x99\x53\xd6\xa3\x72\x64\x75\x5c\xf4\xaa\x45\x80\x94\x5c\x81\x5b\x7c\xab\x73\xf5\xef\x89\x10\x39\x43\xb2\x95\x91\x1d\xb9\x44\x50\x81\xea\x8a\xbb\x40\x45\xa7\x7b\x6f\x0d\x99\x02\xa9\xfb\x52\x13\x83\x07\x35\x37\x87\x5f\xee\x34\xaf\x14\xce\x21\xa8\x8b\x54\xff\x28\x89\xdb\x4a\x22\xef\x82\x7c\xe2\x72\x82\x0b\xb0\xf7\x4c\xb8\xb3\x38\x29\x27\xd1\x52\x8e\x37\xef\xbb\x5d\x26\x72\xcb\xe3\x6d\x97\x0d\xf0\x47\xa5\x4e\x25\x6a\x88\x85\x58\xb5\x4f\x13\xd7\xda\xf8\xdf\x3f\x3a\x9d\xcb\x1e\x4f\xcf\xda\xe7\x79\x73\xc7\x65\xd7\x00\xbd\x90\xce\x94\x88\xd4\x82\x2f\x85\x54\x2b\xaf\x64\xd0\x31\xf2\x8a\xa8\xfe\x32\x51\x0f\x04\xdd\xed\x7d\x52\x53\x26\x5c\x0d\x50\x2a\x85\xbb\xdf\xd1\x64\x53\x47\xa1\x1c\xb5\xa1\x58\x68\xdc\xab\xec\x82\x87\xda\xaf\x19\x76\xcc\x0a\x77\x3e\xb0\xec\xd2\xce\xc5\xd5\x08\xfe\xd8\xbc\xa7\xe7\x2a\xcf\x91\x95\xfe\x91\x18\xc8\x8e\x24\x49\x92\x6c\x03\xee\x91\x61\x3c\xc7\xc2\xcb\x63\xb9\xc3\x61\xa4\x5c\x61\x5e\x54\xce\x6f\x46\x03\x9d\x1b\xf4\x66\x74\xec\x6f\x00\x5e\x81\xf4\x9d\xf3\xb0\x5e\xee\x36\x19\xd7\xa9\xbc\xce\x33\x42\xa1\x11\x9c\xb5\x84\x8e\x7f\xe7\x7c\xbf\xcf\x5c\x59\xf7\x39\x98\x57\x7b\xae\x08\x45\xf2\x91\xae\xda\x3d\x89\x94\xb6\x7d\xe8\x75\xb8\x55\x5a\x32\x6d\xee\xec\x7e\x8a\x8d\x45\x28\x0e\x80\x25\x82\x24\x63\x38\xf2\xd6\xf9\xab\x6d\x2d\xba\x45\xfe\x5a\xce\xab\x61\x57\x06\xe7\xaa\x6e\x53\xd1\xb7\xc6\x67\xdf\xe6\x85\xf3\xdd\x06\x63\x57\x0e\x82\xd5\x89\xad\x3c\x83\x11\xb6\x1c\x6d\xa1\x3c\x8f\xea\x2b\x85\xca\xf5\x4c\x11\xd5\xd5\x2e\x75\xd4\x7a\xbd\x34\x75\x88\x2d\xcf\x11\x3b\xea\xdb\x3d\x0e\x87\xa2\xb4\x09\x10\x12\xe2\xb5\xac\x50\x60\x00\xf8\x96\xbe\x73\x3a\x59\xd8\x55\x49\x2a\x22\xe8\x1c\xff\xe4\x8a\xaa\x30\xd8\x60\x67\x78\xfe\x63\x19\x66\x07\x89\xe0\x14\x0a\x43\x3d\x66\xdc\x9b\x6a\x38\xfd\xfb\x0e\x13\x57\xa9\xb5\xf2\xdb\x5f\xab\x17\x51\x04\x62\xd7\xe2\xfd\x45\xaf\xb5\xca\x2e\x7a\xd8\x25\x7d\x33\xc2\x48\x22\x62\x62\xa7\x79\x0a\x1c\x84\x59\xc4\x01\xb2\xfc\xaa\xf3\x74\x93\xb7\xb8\xfe\x43\x21\xc0\x52\xdf\x52\x25\xdc\x75\x40\xc3\x9d\x7e\x04\x6e\x91\x73\x4a\x72\x31\x46\x93\x73\x11\x22\xee\x16\xcd\xf2\x6a\x5b\x38\xbd\x59\xc9\x76\xa8\xfd\xe4\x00\xdd\x32\xab\x6c\x3b\xa5\x9b\x84\x64\x36\x85\xaf\x8a\x7d\xe5\x4e\x7f\x41\x33\x49\x35\x1c\x55\xb3\x7f\xf9\x08\x1a\xd8\x19\xdd\x9f\x58\x5e\x66\x49\x47\x27\xf7\xd5\x98\x00\x40\xb2\xb9\x84\xdc\x1b\x3a\xa8\xd8\xef\x37\xf2\x27\x06\x41\x4c\x75\xe6\x74\xd2\xd2\x03\xab\x8f\x6a\xac\x22\x68\x7b\x0a\xb3\x21\x54\x70\xa2\x86\x9d\x2c\x35\x3c\x75\x95\xcd\x2a\xb1\xa9\xa6\xac\x84\xe9\xf0\xd0\x63\x01\x96\x23\xc2\x12\xa5\xdd\x5b\x4a\xdb\x88\x29\x4b\x57\x96\x14\x69\x92\x4e\x6d\xfa\xa7\xaa\xb6\xba\xfe\x29\x61\xcc\x64\x93\x0a\x1b\x4f\x75\x8e\x7a\x6c\x97\x6e\xa2\xee\x3f\x86\x18\x89\x53\xe3\x29\x9b\x95\x33\x54\x24\xce\x0b\x63\xd1\x96\x44\xc2\x1c\x11\xdb\x4c\x4b\xbe\x12\xd0\xd1\x58\x5a\x3f\x2c\x5c\x78\x6d\xe4\x1b\x33\x18\xbb\xc8\xa1\xa5\x74\xf2\x72\xd9\xd5\x9f\x1b\xb1\x33\xba\x38\x18\x64\xe8\x12\x2a\x60\xd8\x49\x91\x47\x96\xc3\xb7\x1b\x96\xd2\xd1\x0c\x0b\xe1\xc0\xa8\xb2\xd6\x4a\xb8\xaa\x7f\x88\x5d\x5c\xb4\xd5\xd6\xc1\xce\x00\x6a\x34\xa2\x2a\x7c\x42\xdc\x13\x66\xd1\xef\x55\xb5\x9d\x26\x27\x9c\x55\x51\xd8\x31\x34\x55\x36\xd0\xe9\x11\x75\x5d\x69\x23\xda\xc1\xec\x73\xbc\x70\x56\x3b\xc1\x06\xb2\xf9\xa3\xff\xd3\x33\x35\x27\xe9\x14\x7a\x97\x27\x5d\xff\x9f\x20\x3c\x68\x34\x9f\x22\x98\x3f\x94\x48\xaf\xec\xb4\x5f\x7c\xf4\xf0\x81\x03\x09\x06\xa8\x8e\xee\x34\xb0\x0c\xdc\xf7\x28\x0d\x97\x30\xa0\x66\xbb\xad\x46\xec\xcf\x50\x3b\xbf\xfe\x87\xf2\x9f\x02\xd5\xf6\x4d\xa8\xe9\x50\x86\x51\xbd\xb6\x4b\x22\xdb\xa1\x39\xa5\x0f\x33\xf0\x2b\x9a\x0d\xda\x4f\xc4\x5c\x8f\xf9\x1f\xdb\xbf\xd9\xf1\x48\xba\xce\xfc\xd4\x1c\x43\xf6\xde\x32\x4a\xc9\x8b\x24\x36\xb7\xd6\xf0\x8c\x61\x52\xd7\x52\xb4\xfa\x32\x25\xc9\x43\x7a\x29\xe3\x63\x1e\xd5\xa8\x1b\x86\xc4\xbd\x5c\xff\x45\xc4\x85\x90\x26\x77\x0f\xa5\x9a\xb9\x76\x1d\x1b\xdd\x36\x5b\x47\x76\x01\x38\x50\x2c\x5e\x91\x58\xcb\xc9\x13\x52\xe8\x39\xe2\x6a\x3f\x3e\x55\xf9\xf3\xdd\xc3\x1a\x4b\x34\x81\xc8\x1e\x34\x5c\xca\x9f\xa5\x5d\x45\x99\x52\x98\xa0\xd4\xc3\x3f\xeb\x0e\x62\xd9\x27\x78\x65\x25\x22\xd7\x6c\x03\x3a\xb2\x01\x98\x90\xa3\x1d\xcd\xb5\x30\xd7\x44\xb4\xa3\xba\x7e\x14\x42\x76\x1f\x76\x9b\x90\x21\xe0\xb5\x9d\x0b\x02\x23\x09\x7f\xc9\xea\xc2\x11\x50\xe9\x20\x1a\x18\xa3\x1b\x37\x45\xe0\x9a\x9c\x4b\x70\x1c\xa5\x68\xf3\x94\x5c\x49\x4d\xd0\xae\xcb\x54\x3b\x75\x44\xa3\x7d\x54\xd2\x10\xa6\x9c\x98\x30\xb4\xa0\x78\x1d\xc9\xc7\x96\xba\xec\x9a\x48\xba\x9f\xd8\xd2\x87\x5b\x84\xbf\x5d\x49\x16\x87\xa5\x15\x95\x9b\x5b\xd3\x44\x85\xa9\x94\xdc\x35\xd2\xf1\xd7\xcc\x32\xad\x9b\xc5\xac\xb4\xc1\xd2\x0a\xc3\x1b\xef\x42\x32\x97\xa8\x23\xa3\x8c\xf4\x44\x68\x1f\xa6\x08\xd8\x17\x75\xcf\x21\xde\x87\x68\x70\xe4\xc8\x85\x88\x65\x9f\xe4\xb3\x6e\x63\xba\xee\x59\x25\xb9\xbe\x33\xad\xb5\xe7\x2b\xda\x7e\x3c\x09\xbb\xc4\xd6\x17\x08\x6c\xe4\x47\x41\x09\x48\xfe\x5d\x6a\x25\x2a\xba\x68\x4a\x90\xab\x5f\x54\xe2\x0f\x6f\x11\x7a\xe9\x78\x51\x06\x97\x04\x83\x00\xd5\xb4\xb9\x15\xa3\x41\x55\x26\xe6\x21\x96\x94\x65\x6d\x92\xc0\xaf\x1c\x2a\xbe\xe9\x5b\x69\xf7\xf1\x4d\xee\xf3\x86\x8f\x81\x28\x15\x99\xe2\x80\x9a\xc1\x34\x3a\x4d\x4e\xb8\x1c\x78\xa9\xbd\x5d\xca\xec\x2a\x95\xb9\xb9\xe9\x58\x79\xa9\x62\x90\xc2\x3c\xd6\x8f\x2e\xdc\x66\x51\x29\x7e\x1e\x06\xe2\x79\xa0\xfb\xa7\x44\xdf\x51\x48\xc2\xab\xd0\xde\xe4\x05\xa3\x24\xaa\x68\x5e\x2d\x22\xec\x25\x0e\x93\x12\x56\xd4\x07\x74\x97\x36\x4a\x54\xaa\xb6\x7f\x6e\xdd\xc8\xde\x5d\x5d\x59\x0b\xd8\x28\x64\x2b\x5a\x43\xd1\x7b\xed\x56\x6a\x5c\xea\x8f\xaf\x87\xc8\x44\xef\x99\xde\xf6\x31\xc8\x6e\xbc\xe7\x9f\x88\x7b\x24\x76\x78\x43\xc1\x38\x71\x90\x25\x7d\xc6\xee\x9c\x3e\xe0\xf9\xed\xba\x18\xc4\xff\x8b\x9b\x47\x8f\x9f\x14\xf6\xd4\x9f\x57\xac\x39\xd0\xc6\xe0\xab\x21\x3b\x0f\x5b\x88\x08\xb3\x43\xe7\xbb\xf9\x3f\x2a\xdd\x0f\xdf\x61\x44\x0b\x9f\x29\x40\x13\xe8\xc0\x1b\x03\x6f\xde\xf5\xed\xca\xdc\x3b\x58\x71\xc7\x1c\x12\x1a\x4f\x98\xf5\x42\x6a\x07\xac\xe5\xcf\x73\x58\x88\x8d\xb8\x3d\xca\x3e\x0f\xb4\xa6\xe2\x7b\x2b\x2c\xd0\x82\x14\x74\xcd\x6a\x1d\xca\x37\x14\x1a\x99\x25\x64\xe0\x4d\x4d\xde\x4c\xc7\x00\x27\xdd\x6a\xc8\x89\x75\x07\x6b\x3f\x8a\xd8\x6d\x83\x91\xd4\x40\x3b\x25\xe9\x07\x29\x7a\x0b\xed\x0c\x04\x8c\x91\x2d\x33\x51\x94\x60\x95\x64\x11\x1b\xcf\x2b\x49\xe9\xc2\x63\xe0\x67\xd0\x09\x29\xbe\x1f\x37\xf1\x07\xcf\x3a\x05\x55\xaf\x7c\x7d\x9b\x73\xa2\x74\x89\x66\x87\xad\xe9\x82\xd9\x4f\x45\x7f\xda\x91\x5f\xe5\x59\xd8\xfe\x08\x19\xee\x39\x2b\xc2\x19\x9d\x34\xaf\xf3\x26\x91\xfa\x00\xad\x88\x74\x97\xc3\x88\xea\x74\xcb\x3b\x1f\xd9\x2c\xee\xef\x8d\xee\x1c\x27\x98\xe9\xbe\x53\xb0\xe2\xe4\x9a\x5b\xa5\x7a\xb8\xed\x96\x55\x84\x7d\xa1\x98\xc8\x0b\x42\x0d\x28\x62\x3d\xce\x30\x87\x3d\x4e\x32\xd3\x8e\x93\xb1\x80\x07\x64\x8a\xfe\x2a\x2b\x63\x1c\x8f\x95\x6c\xbd\x03\x84\xdf\x3b\x7b\x25\x39\xef\xa4\xfe\x80\xc8\x35\x2b\xae\x0f\x8a\x43\x7f\x1e\xec\x8c\x5e\x5c\x60\xe1\x4e\xe8\x87\xe9\x0c\xd7\x4a\x0a\x00\xdd\xa3\xdc\x8f\xf4\x57\xb7\xc2\xb0\x6f\xbe\x8c\x47\x52\xef\x2e\x81\x8d\x47\xfb\xa3\xf0\x20\xd0\x0e\xc0\xdb\x17\x82\x22\xe1\xdd\xc9\xca\xb0\xea\x78\x02\x71\xc6\x60\x40\x72\x0f\x19\x1a\x85\xae\x35\x0f\x54\x84\x57\xa2\x6c\x87\x4e\xa2\x3b\x9c\xd1\x9a\xe8\xfd\x24\xd5\xf0\x9e\xa2\x1b\xb0\x55\xc2\x04\x5b\xf9\xb2\xd7\x66\x6b\x1e\x45\xec\xe5\xfe\x2c\xad\x24\x64\xad\x67\x85\x60\x93\xe7\x38\x2e\xf5\x88\x3e\xdf\xc3\x85\x3a\x6a\xa2\x29\xcd\x7f\x9b\x7f\x11\x9f\x1b\x3a\x11\xd2\xf8\xb8\x2b\x2f\x1b\x77\xee\x81\x19\x3c\x42\x39\x98\xcf\xc3\xcf\xff\x28\xed\xd1\xf4\x53\x48\x0e\x62\xc2\xfd\x2e\xab\x02\x97\xf0\x3c\xd2\x44\x80\x1a\x8f\x25\x59\xfd\x97\x0e\x38\xac\xe1\x7e\xf9\x95\xd5\xa5\xf6\x4f\x36\xb0\x39\x85\x7e\x3d\xb6\x2d\x22\x44\xec\x9e\x47\xac\x54\x93\x4e\x11\x1b\xcc\x5f\x3d\x63\xda\x94\xdd\x93\x5a\x98\xfe\x06\x23\x75\x14\x2a\x48\x7e\x9b\xb2\x6f\xaf\xe0\x7a\xa4\xee\x1b\xfc\x20\x58\x39\xd9\xce\x83\x9d\x3b\xa4\x37\xb7\x56\x1f\xfa\x8c\xec\xd8\x65\x7d\x6f\xab\x2c\xd6\x91\xbb\xa3\x1b\xdc\x65\xd5\xbb\x62\xf4\x5c\x70\x19\xbb\x1f\xc3\xb8\xf2\xa4\x2f\xff\x52\xc2\x9f\x88\xe0\xdf\x52\xa6\x75\x93\x90\x7b\x29\x21\xb3\x5b\x5a\x7c\x84\x6d\x2c\x9c\x06\x6c\xcb\x89\x5e\x79\x0d\xfa\x86\x8b\xda\x59\xe1\xc9\xcb\x68\x26\xef\x74\x99\x57\x87\x6f\x96\x38\xe5\x96\xdc\x08\xba\x0b\x1d\x57\x96\xe4\xcd\x9c\xbf\x22\x5f\x90\x3f\x44\x7a\xfb\xc8\x6a\x1d\xab\x39\x7c\x37\x35\x2b\x67\x61\xaa\x57\x78\x22\x39\x63\x52\xba\x9b\x4a\xfb\xdb\x49\xbd\x7a\xdb\x3f\x50\xa0\x8e\x7d\xc4\x02\xa9\x86\xdf\x6b\xd5\x01\xef\x1d\xb5\xfc\x4e\xd8\xd6\xf9\x4d\xde\x77\x7b\x38\xe7\xf0\x9a\xa3\x84\x13\xbd\x64\x82\x21\x25\xc5\xf4\xf6\x6b\xab\x86\x83\xda\x9f\x0f\x03\x4d\x4e\xd0\x93\x87\x8d\xad\xf1\xd8\xd4\xfa\x6d\xdf\x36\xc8\xab\xc4\x32\x53\xc4\xd6\x21\xda\x90\x37\xbc\x17\x1e\xaf\xd6\xc4\x7b\xeb\x43\x87\x11\x88\x47\xb5\x84\xe9\x67\xcc\x03\x53\x1a\x76\x2f\xa6\xbd\x38\xa7\x7e\x88\x09\x0a\xae\xa4\xf7\x02\xad\x34\x57\x9e\xb1\x28\x4f\x6b\x8c\x5c\x43\x20\xc5\x7c\xd4\xa4\xec\x1f\x30\x06\x58\xe1\xb5\x16\xc2\xc8\x84\xfe\x31\x5c\x38\xe3\x53\x7a\x03\x78\xd7\xda\x35\xc2\x44\x42\x75\xdd\x18\x64\xc7\x73\x54\x3f\xf5\x59\xd1\x76\xf4\x13\xe7\x59\xff\xe6\x37\x2d\x01\x30\x64\x60\xcf\x29\x06\xf6\x53\x11\x60\x30\x61\x97\x2d\x54\x51\xa1\xf9\x8c\x55\xf3\x09\x97\xe8\x55\x05\x5e\xa7\x9b\xec\x1d\x11\xe1\x7a\x12\x9f\x91\xcd\x07\x95\x40\x33\x94\xbb\xcc\x47\xd4\x2e\x11\x11\x82\xde\xb6\xbb\xa6\x70\x76\xcf\xe4\x91\xc2\x46\x98\x7c\xc9\x33\xcc\x26\xf0\x5f\x1e\x0c\xc9\xf6\xe7\x12\x69\xfb\x25\x0b\x01\x7f\x89\x27\x4d\xfa\xce\x50\x3f\xc6\xf3\xad\xe4\x03\x54\xc9\x36\x5b\x5d\xdd\x80\x71\xa8\x30\x04\xc5\xe0\x94\x51\xd6\x50\xae\x50\x15\xf5\xa4\x4d\xbe\x66\xd5\x74\x98\xa8\x28\x20\x3a\x9b\x96\x1c\x56\xa1\xbc\x8d\x08\x14\x8b\x35\x65\xb6\x0f\xe1\x75\xeb\x46\xef\x7e\xf4\xce\x21\x96\x01\xfe\xc5\x64\xa5\xc1\x76\x5a\x61\x4a\xda\xb1\x14\xed\x3f\x92\xb2\xf6\x08\x81\xf2\x0a\xa8\x5e\x08\xa3\x67\x6b\xef\xe2\xc7\xe8\xcf\xc2\x0d\x94\x9b\x5f\x15\xde\x86\x3d\xdf\x2e\x72\x8f\xdb\x5d\x91\xf8\x58\x4e\x01\x43\xb6\x0e\x4f\x7a\xd2\x1e\x06\x57\x5e\xf7\xd2\x1f\x6b\x94\x56\xb7\x0f\x9c\x05\x46\x55\xe8\x5a\x4a\x3b\x84\x25\xe2\x8f\xd6\xa5\x90\xc2\xcb\xf7\xbb\x47\x2d\xa1\x4a\x0e\x64\xac\x4b\x71\x44\x87\x2a\x50\x6f\x1f\xb1\xbc\x5b\x95\xa0\xd5\x7d\x42\x2d\x61\x67\xac\xfc\x01\x6c\x1b\xaa\x3b\xc1\x9e\x8f\x9e\xde\xbb\xb9\x07\x74\x51\x76\xc5\x2d\xf3\x10\x65\xd9\x1c\xe9\x99\x4e\x9f\x94\x5a\x29\x69\x8d\xeb\xc4\x47\xea\x3c\x16\x38\x42\x8e\x9a\x43\x29\xfe\x83\x1f\xb9\xbd\xb6\x6d\x54\x25\x68\x63\x23\x61\x2f\x60\x81\xee\x4f\x42\xdd\x5d\xd1\x4d\xcd\x10\xbf\x34\xa8\xbf\x13\xd6\x8d\x3e\x20\x4a\xf4\xc9\x99\x02\x9d\x32\x0a\xed\xa8\x2a\xee\x96\x5e\xbc\x9b\x2f\x17\xb4\xb2\x8f\x29\x9f\x8f\x6e\x8e\x78\x4b\xd8\x49\x0d\x51\xd9\x31\x6b\x12\x33\x6e\xf4\x08\xcf\x06\xf7\x74\xed\x43\xa3\x83\xc8\xed\x3f\xda\x83\x11\x94\x94\x68\xc4\x6c\x83\x41\x9e\x5a\xdc\x48\x79\x64\x92\xcf\x6c\xc9\xea\x25\xac\x02\xe8\x8e\x7b\x0d\x56\xb7\xb8\xd6\x5d\x44\x42\xfa\x26\xe5\x3e\x76\x59\xbd\x79\x3e\xa4\x73\x13\xa2\x0f\x1f\xc2\x9b\x47\x5d\x86\xce\xd6\xf7\xbb\x76\x95\x77\xee\x94\xc1\x53\x33\x79\xd4\x9d\x0f\x9c\x35\x6a\x97\x73\xd4\x9c\x29\x9e\x3f\x52\xdf\xf1\x9d\x73\xc4\xc5\xda\xec\x9d\x49\x68\xfd\xa6\xe5\xfc\x86\xee\xe1\x53\x27\x3f\x25\x3f\x9e\xfe\x8b\x01\x5a\x1d\xb4\x36\x28\x9b\x2e\x81\xf7\x1c\xa9\xf5\xef\x86\xcf\xd0\xf8\x20\x47\xd1\x07\xe8\xd8\x75\x55\xb6\x34\x3c\x68\xd4\xcf\xce\x64\xb2\x2f\x7f\x27\x8c\xfd\xe2\x16\xee\x55\xb0\xf1\x60\x0c\xdd\xab\xd8\x05\xee\x9f\xf0\x35\xbf\x44\x0d\xb0\x9f\xe8\x96\xfa\x6a\x75\xe2\x1d\xef\x0e\x83\x55\x16\x83\xbb\x1a\x7b\xbb\x15\x8d\x65\x8f\x66\xa0\xfd\x91\xf7\x30\xe8\x5a\x73\x7d\xc5\x38\x55\xf2\x91\xec\x6b\x12\x09\xbb\x12\x55\xf5\x1a\x51\xdc\xad\x5b\x17\x0e\x0c\xf8\x07\x5d\x6b\x77\x04\x8c\xdf\xe5\x60\x61\xb8\xf5\xec\x9f\xdc\x65\x6b\x7b\x51\x1b\xa7\x6b\x96\x75\xc2\xaa\xce\xc4\x1d\x8e\xbe\x56\x42\x36\x13\xc3\x7b\x0e\x61\xcc\x2d\x52\xd7\x1d\xc9\x0c\x48\x5e\x6a\x56\x0c\x36\x5f\x55\x5a\x15\xb8\xdb\x03\x37\x3d\x17\xac\x82\xd8\x2a\x0c\x17\xda\x0b\xf1\xa1\x57\x0c\xfc\xc8\x8d\x8e\xc7\xa0\xab\xd0\xb1\x79\x65\xfb\xc7\x2e\x4c\xd8\x20\x54\x7d\x8a\x09\xbd\x7f\x54\x17\x1b\x4f\xaa\x14\x77\x31\x40\xa3\x71\x77\xc4\x72\x65\x23\xb2\xab\xed\x0a\x0a\x2f\xb1\xbd\x55\x79\x5b\x63\x26\x5f\x76\x7d\xcc\x4c\x1a\x2d\x30\xe7\xf4\x11\xd9\x1c\xb6\xbd\x20\x63\x73\x65\xa6\xd7\xc5\x66\xf2\x35\x33\x99\xd2\x8e\xa1\x8a\xba\x40\x41\x92\xa9\xac\xcd\x09\x61\xc0\xf0\x0f\x9d\x74\x0a\x8b\x50\xd4\x95\x7a\x87\x53\x6c\x56\x9d\xe8\x29\x7f\x86\x57\xe8\x98\x20\xc0\x6a\x5a\xa8\x4b\x61\x00\xb6\x3a\x86\x6d\x84\xeb\xca\xd5\xcc\x04\xdd\x49\xdd\x4c\x60\xeb\x2a\x4c\x73\x84\x9f\x82\xce\xf9\x93\x90\x31\xca\x81\x99\x5d\xde\xd5\x0c\xdb\x10\x11\x14\xed\xf0\x57\x59\xc5\x02\x16\xe0\xbb\x4e\x6a\x23\x5d\xe8\x99\x53\x24\x97\x00\x30\xb2\xf8\xfc\x24\x39\x40\xdc\x42\x60\x3c\x3a\xa1\x63\xb0\xff\x19\x7b\x4b\xcc\xe2\x59\x81\x3d\x67\x04\xe0\xdb\x49\xd2\xa2\xdf\xe7\x95\x8c\x4e\x85\x21\x79\xbb\xb2\x65\x57\xd1\xfa\x61\x4f\xdc\x66\xb1\x4b\xe9\x15\x1f\xe9\xe8\x87\xa8\x82\x56\x8b\x32\xb0\xca\xe6\x22\x68\xc2\xbc\x7b\xc8\xd0\x6d\xba\xa4\xb1\xc7\x08\x1a\x29\x5b\x9c\x00\xb9\xbd\x98\x2c\x68\x5c\x3f\xd8\x88\x64\xe2\xbb\x36\x32\x68\x48\x72\xdf\x70\xb8\xba\xc2\xdf\xf7\xc1\x8a\x0b\x30\xfc\x29\x8b\x88\x9b\x97\xe3\x0f\xe1\x3c\x94\x39\x6b\x47\x45\xd2\xc3\xc1\xf4\x08\x6c\x05\x7f\xd9\xdc\x17\x7f\x04\x17\xed\x48\x9c\x8a\x8e\x5f\x10\x37\x12\xe7\x07\x99\xf3\x8a\x8d\x9a\x02\x6f\x3d\x12\xbd\xaf\xed\xa8\x58\xfa\x1b\x8e\x6c\x7e\x31\x16\x69\x22\xc1\x2c\xf5\x4f\x6b\x15\x88\xb8\xa6\xc8\x05\x18\x29\x53\x2c\xbd\xbb\x5a\x78\x19\x1b\x97\xd8\x5e\x82\x46\x64\x0f\xd3\x9d\x7b\x7e\xff\x43\x45\x75\x2c\xd9\x87\xb3\x60\x9c\xbd\xa5\x0b\xed\x99\x32\x3a\xdb\xae\xf3\xd3\x6d\x95\x1e\x4d\x9b\xe3\xd3\x94\xb8\xb9\x3f\xf0\x2f\xd4\xe1\x0e\x03\x0a\x15\x8b\x76\x65\xc5\x16\x62\xc3\x62\x12\x37\x87\xc5\xdb\x4c\x34\x1e\xc5\xac\x5a\x89\x73\x2f\xc3\x5d\x58\x2c\x2b\xef\x61\x78\x39\x8c\xb0\xd9\x62\xa9\x0b\x1e\x6d\x4c\x16\xb2\x57\xb4\x28\xff\x57\x93\x03\x88\xae\x8e\xa3\x76\xce\x76\xef\x03\x7b\x95\xe0\xc8\xb0\x3f\x69\x43\x45\x6a\xf5\x03\x3d\x77\xb7\xf4\xf9\x35\x25\xa8\x6e\xf3\xee\xb5\xfc\x29\x1a\x8f\x55\x9d\x03\xfb\xe9\xe6\xcb\x77\x4b\xfe\xc7\xfc\x65\xc9\x3a\x6d\x91\xc7\x35\x0c\x2d\x4d\x9f\xdd\x04\xd0\x5a\x59\xec\xfa\x0c\xb6\xd6\xa0\xae\x7a\x07\x61\x0d\xef\x6b\xf7\x6f\x22\x75\x95\x55\xad\xcb\x1e\x34\xb3\xc2\xc8\xa2\xd1\xbd\xad\xee\xf5\x34\x87\x1c\xc0\x27\xed\x05\x58\xf3\x6e\xea\x27\xaf\x0c\x86\x59\x73\x5f\x3f\x7a\x50\xd7\xaa\xe1\x70\x7e\x3b\x75\xba\xfa\xb6\x7b\xac\xec\x5f\xee\xc8\xf8\x24\xaa\xb5\x2a\x73\xdd\x96\xc7\x1c\xd9\xb5\x5d\x92\xdc\x4e\x5a\x68\xab\x94\xc7\x48\xf7\x9a\x58\x33\xef\xcc\xd8\x87\xf0\x8e\x76\x19\xab\x4c\x2f\xe0\xcf\x11\x61\x5f\x30\x95\x50\xea\xe0\x42\x46\x27\x47\x4d\x27\xb4\xcc\xd1\x9d\x04\xa7\x7f\xe4\x2c\x0d\xbf\xe0\x10\xe3\x29\xd5\x67\x49\x84\x75\x97\x74\x70\xb7\xfe\x57\xa6\x66\x85\xf1\x97\x7f\x91\xaf\x7f\xd1\xaf\x9f\x61\x57\xe6\xe7\x6a\x6b\x04\x4f\x1f\x5b\x3b\x9c\x32\x8a\xf2\x98\xc6\xb8\x6e\x85\x71\x8a\xae\x9a\xfd\x23\x4d\xd0\x0a\xf2\x68\xeb\xff\x92\x7c\x6d\x99\xc2\xca\x6a\x9a\xc7\xb0\xcc\x08\x9b\x2d\xfb\x81\x0a\xbb\xae\x59\x90\xa5\x4d\x07\x4f\x35\x43\xe7\x65\x69\x15\x32\xfe\x6b\x52\x9a\x45\x50\x61\xa6\xfc\x4f\xb0\xed\xe7\x97\x75\x1b\xd5\x6d\x0d\x9f\x63\xd4\xbe\xe4\xbb\xda\xd2\x4e\x07\x0e\xd7\xef\x28\x82\x4c\x4e\x62\x48\xb9\x90\x09\x08\x02\xad\x6b\x23\xc2\xe6\x67\xfc\x93\x41\xbe\x04\xb0\x37\xce\x8d\xdf\xfa\xd9\xd5\x57\xfd\x8a\xb1\xaa\x38\x7f\x9b\x9e\xdd\x39\x43\xce\x27\xa3\xcc\x75\x47\x45\x9f\x8f\xf2\x96\xc6\xb1\xa7\x34\xf0\x25\xb2\xaa\xc8\x19\xd8\x1c\xb3\x14\x96\xb8\x60\x84\x30\x81\x53\xf0\x98\xba\xa2\xea\x58\x3e\x21\xb0\x9e\xf3\xea\xf4\x84\x96\xc2\x9d\x2d\x4e\x72\xe4\x9e\x45\x41\x15\x4f\x60\x61\xfb\x8f\xa3\x0a\x00\xfd\xc1\x36\x25\xeb\xeb\xf4\x52\x82\x06\xea\x5f\xe6\xea\x82\x68\xf0\x80\x69\x32\xbd\xea\x22\xbe\x09\x57\x97\x40\x18\x86\x49\x91\xbe\x7a\xbf\x67\xb5\x42\x68\x04\xdb\x76\x5c\x5e\xde\xc6\x2e\x38\x0f\x71\xa2\x67\xad\x91\xd8\x0c\xc9\xc4\x0f\xed\xcc\x2c\x9d\xd9\x80\x75\xd7\x8f\x1e\x30\x1b\xa4\x56\x76\x0a\xe7\x6e\x66\x2c\x30\x72\x31\xa6\x1c\x91\xb9\xe7\x16\x76\x75\x23\x7c\x5f\x59\xc0\x0d\x50\x82\x89\x68\x4a\xee\x2c\xf8\xf6\xfa\xb0\xc8\xb3\x96\x74\x11\x36\x59\xf4\xdb\x48\xd4\xa1\xd8\x36\xf3\xdc\xc5\x7c\xb4\xb9\x62\x8d\x5a\xf6\xe6\xe9\x63\xfb\xa9\xad\xd9\xa2\xf4\xd0\x3b\x66\x0d\xa8\xf9\xab\xd2\x7f\xd2\x73\xe3\xe1\x94\x55\x6d\xdd\xfb\x33\x87\x43\x46\xfd\x59\x35\x9f\xa1\x1a\x3c\xef\xe8\xe6\xf7\xb0\x1d\x96\x5f\x07\xbe\x07\xe5\x6f\xec\x10\x7d\xb2\xbd\xb9\x0f\x72\x5d\x2e\x6b\xb9\x28\x38\xb3\x21\x82\xbf\x7d\xf7\xd2\x17\x82\x20\x7d\xb0\x49\xb0\x8a\xa8\xbf\xd9\x33\x47\x38\x7b\x36\x3e\xab\x62\x99\x96\x62\xb0\x47\x9d\x2c\x42\x0e\xca\xe4\xa0\xd9\xe2\xaa\xfa\x7e\x45\x99\x3d\x09\xc5\xa0\x60\xe6\x71\x53\x3c\x48\xa4\x65\xf7\x29\xdc\x47\xa0\xa7\xe1\xba\xde\x7d\x76\x4a\xfa\x6a\x70\x77\xc1\xaa\x25\xe3\xf6\x24\x8e\xe0\x91\x40\x6c\x8a\x36\xfa\x6a\x8d\xa6\x66\xcd\x72\xc3\xb0\x72\x37\x3a\x9e\x5b\xab\xba\x61\x59\xa0\xe2\x79\xce\xb2\xed\xcb\xb3\xaa\xe8\x25\x8b\xcc\xd0\x85\x1f\x51\x57\xb4\x85\xef\xf0\xad\x45\xd1\x6c\x9f\x51\x83\xf0\xca\x73\xa3\x7b\x5a\xf9\xc5\xa3\x56\x6d\x70\x6c\x60\x25\x39\x87\xff\x98\x4a\xcf\x61\xbf\xdd\x4d\xaa\xe2\xed\xc8\x14\x5f\xbb\x1e\xd5\xd3\xb4\x4f\x37\xf1\x98\x9b\x8f\x60\xc0\x10\x1d\x35\x07\x0d\x6e\xf0\x2b\xe1\x2e\x4a\x15\x9e\xe2\x15\x7e\xa3\x40\x0b\x51\x85\x37\xef\x99\x4e\x56\x36\x1d\xe6\xf9\x30\x2d\x81\x45\x31\xab\xe9\xb5\xb0\x37\xea\xaf\x4d\xc2\x66\xfa\x2f\x2e\xd3\x74\x90\x8a\x92\x8a\x2c\xfe\xec\x92\x17\x9b\x36\xff\xd1\x55\x04\x92\x47\x16\xdb\xab\xa8\xab\x09\x25\x0b\xeb\xe6\x5e\x2d\xca\x2f\x16\x0c\x86\x64\x7d\x03\x5c\xfa\x12\x54\xfb\x26\x30\xaf\x59\x05\x97\x0a\x1a\x90\xaf\xe6\x9f\x42\xd8\xe0\x63\x35\x85\x6f\x3a\xde\x1f\x5e\x48\x3e\xb1\x44\x30\x49\x58\xda\xa3\x51\xba\xa9\x30\xc6\xfe\xe0\x17\x1e\x02\x95\xe3\xf9\xeb\x97\x46\x17\xd5\x7e\x4a\x2b\x7d\x2e\x8d\xf5\x4c\x8a\x05\x1f\xb2\x27\x69\x97\xb7\x4b\x2a\x99\xcf\xfb\xb6\x05\x32\x2c\x06\xb3\xf4\x6d\x0c\xd5\xd3\xdb\x7e\xbb\xe8\x92\x6d\xd7\xb1\x45\x9c\xfc\x0c\x36\x10\x73\x12\x0a\xcc\x74\xd2\x4f\xeb\x0d\x8e\xd7\xa6\x73\x09\x81\xc7\x22\xfb\x32\xe8\xca\xea\x75\x2f\xdb\xe7\xaf\x5f\x79\xdc\x6c\xbe\xe0\xca\x4d\xb2\xee\xba\x72\x03\xfe\x92\x08\xc8\x96\xc6\x4a\x4e\x6f\xe6\x14\x54\x57\x32\x6e\xeb\x44\xdb\xe7\xbb\x9e\x36\x21\x78\x6f\xd4\x56\x8c\x43\xe5\xb5\x62\x6a\x6e\xc0\x6d\xac\x9f\x27\x38\x9a\x2d\x73\xa3\xd7\x2a\xc1\xff\x86\x40\x6f\x8d\x60\x64\xd9\x0e\xd9\x74\xcf\x02\xfd\x42\x6a\x05\xc5\x74\x77\xbd\x55\xde\xba\xd9\x36\x87\x27\xed\x6f\x30\xa0\x55\x00\x58\xb6\x39\x24\x90\x67\x6e\xd5\xee\x5d\xe4\x13\x79\xb9\x0b\x8b\x9a\xc0\x94\x4e\x72\x70\xa8\xd0\x6e\x95\xef\x9d\x2a\x35\x7e\x28\xe5\xe3\xa6\x4d\x71\x75\x77\x04\xf4\xf5\x8b\xb2\x0a\x90\xc1\x17\xfd\xd2\x23\xf7\x73\xd2\x50\x05\x78\x79\x87\xf7\x09\xc3\xa1\xaf\x02\x46\x9d\x0f\x86\x96\xa1\x08\x49\x78\xc4\x38\xff\x15\x51\xc5\xae\x19\x5f\x26\x05\x8b\xb3\xad\x9f\x8a\x58\xee\x45\x4d\xbc\x58\x5e\x5d\x93\x9b\xd5\xf8\x08\x19\xbd\xac\xa5\xe7\x97\x3a\xb9\x92\x8a\x8a\x6b\x4a\x65\x25\x8b\xec\xac\xcd\xd7\x25\x34\xcd\xe6\x1b\x16\xd5\xa3\x2a\x11\x6f\xe6\x92\x04\x78\x41\x3b\xc1\x77\xdd\x4f\xf5\xd7\xc1\xad\xb2\xbf\x44\x55\x2d\x37\x5a\x28\x0d\x53\x0b\x71\x0e\x8e\x1c\x86\xf0\x55\xec\x72\x50\x8d\x69\x2e\x84\x82\x9a\x75\x74\x47\x8b\x8b\xb3\x32\x90\xc4\x8e\x76\x8b\x0f\xb2\xb7\x57\xcb\xbb\x85\x01\x16\x56\xd2\x3e\x3b\x88\x04\x7c\x51\xa1\x0c\x3d\x07\x1f\x65\x57\x87\xa0\xf8\x73\xd6\x6f\x7d\xfe\x5a\x75\xa7\x73\x19\x5a\xf0\xcc\xe6\xa7\xc0\xa9\x84\x49\x52\x7f\x1f\x4a\x99\xe1\x0c\xe4\xc5\xb4\xd7\x92\x6a\x91\xd6\x93\x12\xb7\x43\xd1\x30\xb0\xae\x53\x64\x20\x2b\x0f\x74\x77\xfb\xf9\x48\x2a\xb5\x95\x98\x86\xbc\x46\xb1\x7d\xd1\x48\x63\x55\xcd\xc2\x19\x7c\x48\x1c\xc1\x8d\x3d\xd5\xd8\xdd\xa9\x06\x38\x9e\xdc\xf2\xdb\x11\x1b\x63\x38\xb0\xb2\x93\x6b\x67\x47\x14\xbb\x75\x0d\x4b\x17\x77\x52\x7e\xb0\x8d\x3b\xc9\x59\xf1\x5c\x94\x64\xc3\x9f\xb6\x54\x3b\x31\xbc\xfc\x0d\x38\xc2\x6a\x62\x9d\x8b\xe5\xdc\x66\xf5\x7f\x4a\x0f\xaf\xf6\x99\xdc\xfe\xbd\x84\xb6\xdc\x6d\x61\x9c\xc7\x5b\xb8\xcd\xb2\x9a\x4d\x39\x9b\xfb\xac\xc8\x02\xd8\x30\x22\x50\x8c\x45\xae\x7f\xb2\xbd\xb1\x63\x3f\x11\xf5\xbb\x55\x92\x83\xe0\xb7\xc8\x96\x65\xb9\xdb\xc4\xe6\x96\x8b\xa2\xf4\x46\xdc\x20\xda\xfa\xba\x13\x6a\x9c\xca\x32\x79\xf8\xe6\x7f\x2c\x60\xd4\x22\xb5\x92\x8d\x74\xe1\x19\xc2\x37\xd5\xc6\xb3\x2b\xcb\xfc\xf2\xa6\x82\xd3\x81\xad\x0d\x04\xf1\x97\x0a\x63\x74\x3c\xd8\xda\x5d\xb9\x29\x41\x3f\xf7\x62\xc7\x36\xaf\x53\xd4\x4f\xee\xff\x22\xad\xe6\x49\x0e\x1a\xac\x5d\x9a\x6f\x79\x5d\xa3\x7b\x2b\xaa\x32\x8a\x60\x71\x70\xef\x24\x41\x67\x2f\x76\x18\x98\xba\xfb\xcf\x11\x7d\xac\x52\x6f\xee\xef\x14\x01\xdb\x29\x9a\x3b\x68\xf9\x46\xf6\xe1\xe8\x08\xf2\xaa\x8c\xba\xb5\xf2\xbc\x5d\x09\x23\x5b\x29\x6c\xab\x04\x39\xbc\xb3\xd7\xbc\x75\x60\xc0\xfe\x5a\x85\x7e\xba\x43\x7a\xf4\x76\x1b\xfb\x0e\x51\x6c\xd1\x00\xc5\xd8\xbf\x7a\xc0\xd5\xcd\xc9\x11\x32\x43\x66\x5d\xd3\xfb\x61\xe5\x17\x84\xe0\xc4\x45\x14\x60\x56\xbe\xb6\x16\xde\xf4\x64\x27\x6c\xe7\x05\x76\x13\x0c\xaf\x59\xd6\xce\x36\xf0\x2a\xc1\xc1\x6d\xda\xd1\x00\x34\x60\x8a\x4f\xaa\x4f\x15\xc7\xbb\xfb\xf4\x3c\x54\x46\x47\x11\xbd\xfd\x6e\xf5\xc6\x33\xbf\x8b\xf4\xd4\x31\xb2\x76\xc0\x2e\x7c\x40\xfc\xf9\x8d\x16\xb4\xb7\xa4\xf5\xa6\xc2\x24\x69\x41\xd8\x14\x2f\x2f\x38\x23\xd9\xb5\xc9\xb3\x61\x39\xa5\xf3\x2c\x84\x89\x16\xbb\x60\x16\xf9\xe0\x0c\x11\xb1\xbd\x27\x5e\x0f\x0b\x5d\xe3\xbd\x5a\x5f\xd3\xa4\x8a\x5c\xfd\x19\x2b\xbc\xbf\xb2\xb9\x09\x80\xc0\xb3\x1d\x23\xe0\x3b\xe7\x96\xcf\x23\x5c\x1a\xe6\xbe\xe8\x22\xdb\xd0\x2e\x5b\xbc\x9b\xb9\xcf\x60\xa8\x79\x55\xf9\x89\x30\xc7\xa1\x88\x44\x5c\x0f\x76\x96\x57\x65\x1f\xd8\x28\x72\x1a\x0b\x1a\x30\x1c\x0d\xe9\x50\x79\x2d\x47\x4a\x64\x5b\xab\x87\x6c\xef\x85\xcc\xe2\x5d\xe5\x9a\x4d\xaf\xaf\xe8\x3a\xb3\xcf\x0d\xf3\xc9\x10\x89\x0e\xcd\xde\x32\x0f\xaa\xcb\xbb\x10\x8b\x7e\x5a\xb6\x8f\x3a\xca\x03\xa9\x42\x13\x84\x3d\x99\x25\xf4\xa4\x7e\xf4\x18\x26\x0b\x63\xfd\xce\x0c\x00\xa2\x89\xce\x59\xf2\xa5\xad\xd3\x08\x78\x9f\x95\xb3\x62\x39\xc6\x13\xb4\xf8\x5d\x2e\x94\xd7\x1d\xb4\x63\xe5\x7a\x57\x59\xb9\x5f\x57\x92\x87\x7b\x98\xe2\x8e\x45\xfa\x18\x8f\xfa\xf6\xda\x5a\xa7\xcb\x38\x97\x73\xa4\x2a\xd4\x7b\x36\xb4\x72\xc7\xef\xc5\x57\xc7\xc8\xcf\xf8\x51\x76\x99\xf9\xd7\x8d\xae\x5c\x12\xc7\x96\x5b\x32\xa6\x96\x14\x9e\x24\xc1\x1d\xed\x57\x18\x55\x92\x8f\x9a\xd1\x9e\xcd\x1f\xd1\x2a\x70\xac\xb6\xbd\x3c\x6c\xd0\x33\xb5\x5f\xf2\xe1\x5d\x68\xda\x34\xb1\x25\xed\x5e\xf0\xfe\xd4\xb5\x2a\xb2\x08\xb2\x2e\x94\xd8\xfe\x00\x78\xf9\xf8\xd7\xf2\x9c\xf7\xa6\x59\xb4\x1d\x64\xbf\xf0\xee\x3e\x5a\xda\xbd\x62\xe4\x9e\xbe\x3d\x7b\x44\xdf\xda\x07\x81\x96\x64\x61\x08\x19\xdc\xa0\x9e\x69\x45\x7e\x02\x7b\x75\x7f\xea\x41\xe4\x07\xe6\xb0\x21\x77\x57\x8a\xe8\x45\xaf\x17\xb3\x10\x9c\x7e\x21\x6a\x32\x3e\xb1\x1e\x2a\xc0\x6b\xa8\xa2\xde\xc3\x21\xc0\x2b\x6d\xcc\x91\x2c\x44\x31\x14\xdf\x14\x21\xe4\x2a\xe7\x21\x11\x93\x7c\xf5\x97\xad\x34\xdf\xee\xca\xce\xe1\x4d\x16\xce\x30\x4b\xb8\x31\xcc\x48\xcb\xd4\xbd\x0b\x21\x80\x57\x78\x20\x23\xb2\x9c\x5d\x69\xec\x52\x36\xf2\x90\xcf\x1f\xb1\x1e\x6d\x8c\xad\xc8\xc7\x7e\xbe\xc5\x5d\x06\x9b\x4d\xb8\x40\x66\x66\xd3\xb4\x85\x1e\xb5\x67\xf1\x3f\xfc\x15\x92\x8d\xc3\x31\xb2\x07\x7c\x1f\x99\xe6\x3a\x5f\xb1\x96\xa8\xaa\x7f\xb7\x9e\x74\xb3\x71\xee\xdd\xb6\x82\xa2\xd4\x93\xbe\x8e\x18\xf8\xe1\x08\xbf\xcd\xb2\xbf\xa2\xc6\x9f\x18\xa5\xbe\xd6\x10\xae\x46\xa4\x8d\xed\x4c\xce\x08\x93\x82\xa3\x94\x28\x69\xb6\xa3\x63\x1a\x27\x0a\x17\x5f\x2a\xb0\x97\x10\x54\x31\x01\x09\x6c\x80\x0c\xff\xec\xd4\x74\xc7\x17\x15\xe6\x9a\x69\xa3\xea\xb7\x47\xc8\xc2\x47\x65\x7f\xcb\xe9\x87\xdd\x5d\xf3\x0e\x42\xe2\x1f\x47\xfb\x3f\x69\xec\xf6\x05\xa8\xa7\xda\x7d\xa2\x96\xad\xd8\x15\xb1\x90\x17\x25\x03\x2d\xf2\x20\x35\xc3\x2a\x86\xaf\xd9\x9d\xdf\xf2\x8d\xfe\x5b\x77\x7e\xa2\x51\x5e\xb7\xb9\x29\x64\x05\xd8\xe1\xba\x6e\x9d\xf5\x66\xa1\x7f\xce\xff\xa9\x8d\x3e\xef\xaa\xfe\x47\x25\xb7\x5a\xfd\xbf\xb5\xc0\xcf\xb7\xfa\xea\xbb\x2d\xe2\x5b\x30\xa0\x43\x1a\x02\x5f\x7f\x11\x49\xbb\xf2\xcd\x6c\xaa\xd7\x55\x4e\xff\x4d\x14\x0b\xd0\xbe\xd0\x34\xb3\x73\xa9\xb5\x0f\xa2\x0c\x2f\x91\xcf\x59\xdc\x99\x22\x88\xd0\x8a\x10\x98\x5f\x85\x26\x44\xa8\x41\x5d\xc4\xdc\xcd\x5d\x14\xf0\xfb\x33\x06\x36\x93\x88\xce\x9d\xd7\xca\x0e\xf8\xba\xa6\x96\x04\xb0\x36\xac\x40\x2e\xff\x3f\xbe\x08\x6a\x7b\x89\xab\xbb\xd6\x0b\xec\x90\x23\xca\x36\x12\xc8\xcf\x12\x07\x3b\x47\x8a\x0d\x7b\x1b\x8a\x5c\xcd\x99\xd4\xf6\x3f\xd1\x1f\xbf\xb8\x1b\x4c\x17\x59\xd6\xe0\x8b\x24\xea\x6d\x91\x66\xa6\x4a\x10\xaa\x41\xfa\x21\xf1\x9a\x6d\x05\x2d\x1a\xe4\x95\x09\xf7\x56\x7f\xea\x25\x57\x96\x56\x0a\x66\x4b\x02\x19\x70\xce\x2a\x8a\x25\x41\xd6\x1c\x1e\x14\x68\xc7\xf9\xc3\xd3\xa5\xbc\xd2\x49\x7c\xbf\x0f\xf7\xec\x1f\xa1\xfb\x1f\x10\xb5\xe9\x88\xf4\xd1\x67\xc4\x87\x70\x8f\x8d\xac\x3f\xa4\xb4\x2d\x32\x4e\xda\xc3\x87\x16\x38\xc0\x25\x39\x2f\x02\xe2\xf8\xfd\xbe\x83\xa8\xe1\xf6\x88\x81\x0d\xa0\x96\x92\x8f\x69\x92\x70\xd7\xca\x84\x7f\xac\x95\xf6\x26\xd2\x05\x43\xff\x6e\x99\xfa\x21\xe9\x4d\x08\xc7\x3f\x1e\x24\x5d\xf3\x96\xe0\x64\x59\x69\xef\xa1\xf4\x4c\xd1\x77\x33\xd4\x17\x53\x41\xfc\x0f\xcc\x24\x2c\xfc\x47\xa9\x75\xe1\x13\xf9\xe1\x88\xab\x06\x68\x7f\xf3\x16\x0c\x17\xe7\x69\xdb\xa7\x64\x57\xe0\x65\xaf\x56\x78\x4c\x5f\x41\x5b\xf5\xe6\xe7\xf6\x84\x02\xc6\x66\xd7\x3d\x78\xe9\x4e\xd0\x0e\x6a\xf7\x69\xb3\xed\x97\x94\xee\x1c\x26\x64\x18\x8a\x63\x73\x9f\x6e\x02\xe0\x0f\x28\x74\x7d\xbc\x35\xdb\xdf\x73\xe3\xee\x59\x11\x74\xff\xb1\xfb\x0f\x0f\x0e\x82\xe6\x37\x50\x7b\x3f\x6a\xf7\x78\x83\xc2\xc6\x9f\xf0\x46\x5b\x99\x9d\xe1\x0f\xa5\x0a\xd7\x7b\x16\xec\xa0\x49\x17\x28\x82\x68\xde\x34\x49\x6d\x1e\x40\xce\xc5\x16\xc3\x00\x7f\x84\xd7\x92\xfd\x54\x12\x41\x79\x23\xfe\xd7\xeb\xf2\x7e\x83\x0a\x65\x5b\xbd\xd5\x71\x87\x03\x6f\x74\xe4\xad\x12\x7c\xca\x17\x6a\x9a\x9f\x94\xaa\xda\xa8\x63\xf2\x85\x4d\x1f\x2f\xbd\x93\xa9\x6d\x14\x2f\xdb\x3a\x57\xbe\x65\x2b\x58\xb8\x01\xbe\x9a\x4f\x96\x9d\x9c\xcd\x0e\x29\x2b\x22\xc1\x76\xc6\x4f\x6e\x7c\x61\x9d\x1a\xd2\x15\xf0\x70\xb3\x3a\xe3\x6f\x69\x9d\xdc\x08\x69\xe3\x60\x83\x99\xff\xbf\x4d\x72\xff\xb8\x89\x76\x28\x57\x00\x51\x3e\x3e\xb5\x75\xb2\x4a\x3e\x3c\xb7\x5d\xfa\xcb\x81\x4b\xf7\x88\x53\xd0\xfe\xc8\x47\xdd\xcc\x52\xff\xba\xc5\xea\x16\x79\xea\xac\xe1\x7d\xa4\x44\xc8\x33\xf8\xa1\xee\xb6\xfa\x45\xf0\xaf\xff\x41\xef\xd4\x12\xf1\xff\x7a\xcc\xcc\x3a\x13\x36\xdf\x7e\x18\x74\x3f\xca\x64\x39\x1d\xb9\x11\x0b\x47\xba\x0f\xb1\xde\x0f\xb6\xd6\xc3\x7e\x75\x88\x86\xba\x2d\x34\xfe\x4a\xa9\x09\x2e\x20\x62\xc3\x3b\xb5\x53\x5e\xad\xa3\x03\xfd\x3e\x0a\x7d\x23\x38\xcf\x4d\x92\x58\xed\xc7\x5e\x1e\x29\xb4\x3d\xab\x7d\x89\x1a\x31\xe0\x44\x78\x34\x87\x6b\x9b\x06\xc9\xb5\x8e\x66\xe5\xe1\x60\xe3\x87\x23\xc9\xf9\x8e\xe8\x87\x1d\x53\x78\x93\x53\x04\xcc\x16\xfb\x89\x3e\x38\x6f\xc4\xe4\x36\x9e\x91\x44\x7b\x27\xf6\xc2\x3d\x88\x83\xda\x61\x58\x5d\x87\x4f\x97\xfd\xb1\xf0\x01\xe7\xa5\xcc\x83\x43\x21\xa6\x76\x6c\x64\x70\x2b\xfd\x3a\xf5\xd4\xd5\x6f\xb6\x25\xde\x49\x01\x58\x55\x57\x96\x20\x9e\x70\xcb\xed\xba\xff\x80\x6a\xa8\x07\x67\x86\x6a\x7b\xff\x80\xe5\x50\xb0\xc3\x22\x9a\x64\xe9\x04\x51\xec\x65\xa4\x2b\x15\x76\x74\xd7\xcc\xee\xfa\xed\x45\xe7\x0e\x7f\x00\xbc\xe2\x6e\x1f\xca\xdf\x3c\x6c\x68\xf5\x36\x76\x9b\x09\xfd\x41\x0a\xb7\xb0\x45\x67\x4e\x1c\x9c\x72\x47\xc7\x28\xd1\x92\x56\x3e\xd2\x5b\xb8\x28\x9d\x5a\xad\x21\x6e\x23\xad\xfc\x7d\x0a\x6f\x38\xe5\x13\x75\x26\x0d\xca\x19\x84\x2d\x44\xe4\xf6\xc2\x27\x06\x51\xcf\x7b\xba\xf6\x6e\x73\xcc\x22\x7c\x19\xfb\xc9\x3c\xa8\xd8\x06\x61\x2a\x05\x45\x32\x39\xdf\x33\x8c\xdc\xf9\x2c\x3b\x49\x92\x14\xc8\x54\x25\x6c\x50\xb5\x5f\xa1\x3b\x9e\xfb\x35\xe0\x0a\xd9\xca\xfa\xfc\xc4\x95\x7b\x6b\xf1\x3d\xb0\x9a\x0a\x94\xc2\xce\x04\x0d\xe0\x1b\xc4\xaf\x04\xf4\x62\x72\x71\xb7\x44\x4a\x25\x61\x8a\x70\x9e\xc2\x46\xb7\xf0\xe6\x06\x54\xf4\x44\x47\x30\xd0\x8b\x90\x59\x83\x4d\x8d\xb8\x3c\x9b\xad\x4b\x7a\xd5\x15\x12\x72\x1f\xcd\xa1\x4b\xda\x58\x11\xc3\xc0\x96\x6f\xd2\xc9\xe9\x3e\x68\x06\xb4\x98\x1a\xd8\xce\xcf\x2a\x4f\x1d\x7f\x50\xfe\x44\x81\x45\xd4\x2f\xa1\xe2\x19\x6d\xfc\x62\x1b\x4d\x06\x8d\xac\xb1\x9b\xf7\x81\xd0\x06\xf4\xc3\x28\x4b\x7e\xe9\x85\xf2\x4b\x09\x14\xbb\xca\xe2\x48\xc9\x5d\xf3\x44\x7c\x03\xcc\x3b\x01\x09\x7b\x7f\x17\x36\xe7\x37\xf2\xdd\xa5\xf7\x80\x46\xa4\xe8\x31\x92\xb1\x3d\xc0\x75\xff\x4b\x2d\xb8\x4d\x9e\x9c\xb6\xc2\xd6\x30\x87\xc2\xd9\x13\xa9\x15\x3c\xe3\x97\x36\x0f\x2c\x65\xd2\x71\x8c\x96\xbf\x6f\x9d\x96\xf9\xe5\xaf\xe7\x9d\xfe\x87\x62\x2d\xd8\xfb\xb7\x19\x77\xd4\x40\x77\x29\xc3\x68\x91\xdd\xca\xa2\x2d\xc6\x92\xc3\x82\x86\xe1\xc0\x1a\x1e\xad\xed\x1f\x46\x46\x08\x02\x24\x7f\xc0\x7e\xb6\x81\x14\x00\xff\xe9\xff\x40\x36\xc4\x07\xff\x2b\x92\xad\xfc\x15\x38\x7f\xd2\x57\xe0\x8f\x82\x51\x89\x03\xec\x84\xf0\x84\x08\x67\x03\x6c\x89\x1e\x57\xb6\xef\x09\xb9\xa5\x9d\xa6\x51\x0b\x4e\x35\xa8\x60\x53\x55\xbb\x60\xaa\xf3\x17\xd1\xe8\x3e\x1c\x43\x11\x7d\xee\x9f\x44\xa0\x43\x13\xbc\xa3\xcc\x8f\x80\x32\x3a\x35\xab\x27\xb1\x40\xa7\x79\xac\x56\xcf\x3c\xfe\x1f\xe3\xe3\xa2\xf7\x7f\x0e\xc2\x3a\x5a\x16\x1d\x35\xa7\x53\x76\x8d\x1c\xef\x43\xd6\x0e\x3b\xfa\xf6\x3c\xed\x63\xa7\xf4\xe1\xda\x69\x83\x4c\xcf\xab\xf7\x22\xab\xab\xfa\xf3\x03\x8e\x58\xba\x14\xd6\xab\xcc\xbb\x7d\x50\x69\xbd\x7e\x62\x90\x83\x26\x10\xaa\xb9\xb1\x7e\xbf\x4c\x3c\x6e\xa2\x1a\x68\xae\x43\xf1\x6c\xe7\x92\xe3\x0c\xe7\xfc\xcb\xe4\x10\xf4\x40\x5a\xd7\x58\xae\x80\x3e\xd6\x38\x0e\x5d\x50\xed\xd7\x3c\x48\xcb\x3a\xce\x67\xc4\xd6\x45\xea\x88\x53\x88\x88\x59\x84\xd3\x8c\x0d\x9e\xff\xb8\x60\x71\x47\x62\x57\x5f\x3c\xc3\x9b\x86\xb7\x46\x0f\x87\x3d\x6e\xc5\x72\x48\xa8\x09\x20\x84\x91\x79\xe9\x0c\xc9\xb5\x40\x62\x44\x77\xd0\x2c\xe6\x59\x6d\xd3\xe7\x80\x03\x2d\x30\x3d\xab\x43\xdf\xe5\xb3\x79\x81\xf9\x88\x98\x88\x55\x53\x6e\x7f\x08\x48\x84\x4b\xf9\xb3\xc4\xe3\x57\x77\x03\xb2\x03\x1b\x1b\xcf\xf9\x94\x58\x18\x9f\x64\x16\x0a\x02\xca\x16\x91\x4d\x67\x32\xd0\x97\x8b\x56\xc7\x1c\x49\x0a\x00\x67\x23\xbf\xc5\x36\x52\x04\x2a\x30\x37\x39\x06\x91\x0f\xd9\x54\x06\xcc\x11\x43\x87\x14\xb2\x70\x8c\xbc\xba\x77\x1e\x44\xf8\x3c\x81\xbc\xd1\x44\x0c\x27\x93\x24\x91\x27\xba\x15\xb5\x78\xf2\x21\x56\xa8\xa1\x1e\xa7\x32\xf8\xba\xfa\xbf\x2a\xaa\xbe\x95\x3f\x0f\x47\x3e\x56\xe1\x1e\xab\xb4\x6d\xc3\x1c\xdc\xc3\xa1\x61\x33\x43\xde\x96\x46\xd1\x7f\x38\x95\xe1\xd7\xd7\xec\xbf\xe9\x60\xd9\x8d\xad\x61\xf0\x0a\x48\x5a\x04\x26\x51\x4e\x0d\x74\x57\x68\xf8\x7b\x49\xbf\xfa\xb0\x9a\xb1\xc1\x44\x62\x39\x7c\x92\xb8\xb4\x79\x02\xb5\x4a\xfd\x72\x45\x38\xf5\x45\xf4\xbb\xf7\x1d\x9d\xa0\x8c\x60\xf1\xa3\xcc\x8c\xf8\xe9\x00\x09\xd6\x16\x6a\xd7\xd3\x8c\x2c\xfe\x48\x6a\x70\xda\x76\x95\x90\x7f\x0f\x0b\x11\x49\xb1\x53\x1d\xa6\x33\xe8\xfb\x91\x06\x22\xa2\x4c\xcf\x2e\xe4\xc3\x39\x0a\xa2\x9e\xc0\x6f\x63\x09\xbe\x02\x5f\x36\x6d\x47\xb4\x5b\x9d\x8f\x10\x8d\x7b\xe7\x24\x88\xb1\x91\x86\x12\x1c\xde\xae\xde\x4b\x48\xed\x10\xdc\x1d\x18\x85\x24\xcd\xf5\x6c\x36\x5f\x7b\x0b\x9c\x46\x2f\x8f\xe9\x1c\xd0\x65\x07\xbb\xa0\x84\x11\x57\xb6\xd5\xcc\x6b\xfc\x33\xac\x89\xcb\xa7\x59\x1d\x93\xfa\x50\xf6\xb2\x11\x3f\xa0\x79\x3f\x58\x05\xca\x72\xed\xd6\x1f\xaa\x61\x97\x46\x33\xe5\x02\xbe\x12\x3e\xcc\x4e\xa9\x22\xbd\x7e\xbf\x57\xf6\xfc\x27\x08\x08\x78\x9b\xe2\x80\xbb\x3c\x10\xc8\x01\xa3\xfe\x2e\xa4\xd9\x5b\xa5\x15\x41\x80\x1f\x2a\x0b\x51\xc2\xab\x17\x89\x21\x22\x39\x84\x0f\x84\x1f\x6a\xe7\x73\x81\xd8\x04\xa1\x3e\x48\x10\xa7\x8b\xb4\x0a\x7c\x81\x2e\x9a\x9d\x8a\x41\xbb\x6f\x24\xc3\x40\x0a\x0a\x9b\x7e\x25\x4a\xb2\x0d\xba\x84\xe8\x20\xe8\x0f\x61\xeb\x75\xe3\x56\x1f\xa1\x22\xa0\xb7\x17\x4b\x5b\x1c\xc7\x2a\x06\x7f\x38\x31\xab\xc7\x9b\xce\xf9\xeb\xdd\x1f\x8c\x77\x61\x0e\x1b\xd5\xd8\xb8\x1d\xc7\xf5\x98\x64\x06\xf6\x0b\xbd\x4e\xda\xbc\x7d\xeb\x91\x78\xfc\x99\xe6\x7c\xb3\x38\x5b\x5f\xf7\x3b\xfc\xfa\x68\x3a\xd3\xb0\xcf\xa4\x2c\x08\xea\x01\x61\xc1\x9f\x6c\xb5\x08\xbf\x32\xb7\x4b\xc7\x33\x3f\xff\x5e\xad\x9f\xd0\xef\xd8\x4c\x22\x9a\x42\x0b\xff\xb1\xd1\xb3\x21\x07\x25\xeb\x53\xbd\xad\x36\x22\xf7\x5f\xd0\xe7\x1b\x3c\x2a\xdd\x85\x47\x0b\xdd\x70\x2f\x39\x0e\x02\xd9\xf8\xc7\xf3\xf8\x1d\x72\x47\x04\x19\x2e\x91\xcf\xfd\xf9\x05\x7f\xdb\x5b\xd0\xde\xcc\x8a\x61\x5d\x46\x28\x8b\x19\x92\xf8\x2a\x5e\x97\xbd\x8a\x9f\xdf\xaf\x39\xd2\xb7\x6d\x0b\x7c\xce\xc1\x3e\x70\x37\x97\x87\x67\xb3\x10\x98\x48\x7a\xc7\xaf\x14\x6e\x3f\xb0\x2d\x4c\x91\xed\x43\xdd\x38\x99\x9b\xed\xcf\x7f\x7c\xc3\x7a\x49\x03\xdc\x27\xda\x5f\xd1\xe3\x52\x06\x0a\x6b\xef\x8f\x4e\xd4\xdb\xbe\x06\xa5\x7c\xbf\x43\xc3\x72\x57\x47\xf3\x65\xff\x05\x1c\xd9\x77\x5f\x77\xed\x42\x74\x1e\x95\x1e\x48\x07\xd5\x8f\xb6\x30\x55\x2a\x0b\xea\xa5\xf0\x16\xb7\x36\xe0\x72\x75\xc7\xe4\x7a\x13\x2b\x84\xd2\xc0\x0f\x2c\x3b\xee\x19\x79\x47\xfe\x3f\x62\x47\x77\xcf\x61\x5c\x1d\xc1\x59\x7e\xa2\xdf\xf3\x20\x42\xfd\x9d\x1d\xd6\xbb\x1b\x29\xfa\x4e\x7d\x64\xc5\x34\xd4\xa2\xca\x77\xab\xf1\x9a\xa8\x4a\x0f\xa9\xb9\xfd\x41\x6a\xc4\x19\xa1\x53\x1e\x1f\x37\x34\xd0\xc3\x6a\x54\xc5\xc7\x74\x0d\xf9\x90\x33\x75\xfa\x34\xb6\x44\xdc\x97\xc2\xca\xec\x28\x3b\xb6\x42\xe4\x41\x94\x96\x58\x0e\xf4\x23\xb0\x8f\x71\x17\x2d\xc3\xd3\xcb\x15\x23\xb9\x27\xea\xd7\x05\xf7\x5b\x41\xfa\x86\xd5\xb9\xa2\xcb\x59\x50\x82\xd3\xb0\x54\x29\x06\xa8\xa3\xb7\x85\x87\xf7\xa4\xb0\x32\xb0\xe1\x34\x33\x91\xdc\xfd\x74\xaf\xdc\xe4\xef\xe8\x5b\xed\x31\x08\x9d\xc0\xb9\x0a\x21\x89\x33\xda\x71\x91\x55\x68\x48\x95\x0b\xf4\xdc\xc8\x0e\x73\xc3\x46\xf8\x9e\xd2\xfb\x16\x74\xc8\x9c\x19\x8d\x88\xb5\xd8\x17\x1a\xa4\x70\x37\x42\x13\x32\x22\x71\x45\x4a\xe1\xa0\x70\x7e\x22\x5a\x06\xf4\x40\x07\x1e\x6a\xec\xed\xce\x3a\xec\x2d\x90\x86\x5a\x1a\xb0\xe7\x98\x4a\xf3\x56\xa8\xe4\x05\xbd\xe8\x83\xc0\x08\x09\x0f\xe2\xf2\x9f\xc1\x9c\x05\x93\xed\xd2\xd1\x9f\x33\x15\xee\xd6\x90\x97\xae\xbe\x10\x62\x07\x57\x0d\x1a\x25\xdc\xb6\x4a\x91\x49\xd3\x31\x5c\xf0\x4c\x61\xb7\x06\x8b\x7a\xce\x9b\x20\xfe\x3e\x18\x51\xf3\x70\x8c\x84\xd5\xdc\xff\xea\x4f\x1f\x0e\xbe\x84\xcc\xc4\xa1\x89\x9f\x76\xdf\x1a\x1e\xa7\x78\x12\x25\xd2\x15\x6c\x34\x9c\x61\x90\x06\xed\xb6\x72\x51\xec\x8f\x49\xb4\x02\x70\x28\xd8\x15\x3d\xca\x7c\xe9\xaa\x3b\xa0\x20\xe6\xac\x75\xac\xe5\x0e\xbf\xb4\x9b\x93\xf4\x41\xa1\x27\x60\x15\x09\xee\xe7\xbc\x44\xc6\x20\x24\x32\xfc\xdf\x4f\xda\xb5\x81\xa6\xce\x78\xf8\x7c\x37\x19\xea\x91\x01\x10\xea\x19\xc1\x19\x02\xef\x67\x2f\x2f\x11\x3a\x60\xb7\x49\xda\xc5\xdd\x74\x0e\x56\x4b\x93\xb7\x76\xa0\x2f\xc8\x27\xb9\x62\x4c\x9a\x85\x88\x00\x3c\xf8\xe3\x1e\xf4\x1d\x28\x81\xf9\x96\x30\xbd\x16\x48\xb5\x97\xb4\x88\x67\xe1\x97\x8a\xa7\x5e\x16\xef\xc5\xfb\xd0\x2f\xa4\x3d\xbf\xc9\x82\x74\x7d\x1d\x5e\x66\x67\x07\xce\xc3\xe1\x14\xe3\xb3\x10\x6e\xdc\x7f\x96\xe4\x36\xc1\xa8\x0a\x00\xd8\x31\x86\xbd\x9a\xa4\x1e\x50\x1a\x44\x4e\xcf\x3a\xdf\x0a\xea\x99\x04\x19\x30\xc6\x9b\x50\x07\x00\x0b\x9d\xda\x9b\x82\xf7\x40\xa5\xe0\x0d\xfd\x86\xb4\x78\x99\x9d\x8d\xb2\x15\xc3\xc8\x63\x7d\x42\x4c\x70\x36\x61\x41\x1f\x57\x26\xac\xb1\xcf\x57\x8b\xdd\x41\xdf\x88\x43\xd1\x13\x9d\x84\x41\xcb\xb3\xbb\x8a\x8b\x81\x73\x27\xa8\x54\x5b\x72\x84\x3d\x89\x9e\xd1\xe8\x15\x0d\x52\xa1\xfa\x6f\x6b\x3c\x8f\x24\xad\xcb\xd6\xfc\xe8\x20\xb1\xe1\xbf\x8a\x83\x1c\xd3\xec\xf4\xe1\x03\x86\x53\x20\x49\x6f\x53\x18\xa1\xf0\x4d\x54\xd9\x0d\x00\xf0\xad\x30\xfa\x1a\x70\x47\xff\xfd\x84\xae\xbe\xf6\x0a\xb1\xef\x1b\x36\x87\x9e\x8f\xdf\xe6\xde\x06\x1c\xc1\x36\x7a\x27\x31\x63\x22\xdd\x43\x70\xe8\x3a\x1f\x8c\x96\x58\xff\xf5\x10\xf5\xac\x4e\xa9\x28\xd6\xb9\xb9\x91\xe0\xb2\x74\x16\x85\xed\x43\x9d\x4c\xb3\x66\x27\xe7\xc3\xc2\x9b\xcf\xcb\x49\x09\xf5\x8a\x47\x5e\xd3\x5b\x72\x7f\xdb\x3a\x0f\x0a\x0d\x58\x53\x73\xa6\x5f\x93\xbc\x61\x16\x5b\x80\xc5\xb0\x20\xf1\x02\x2d\x73\x6e\x73\x96\xd3\xe5\x42\x91\xe8\x99\xbb\xab\x0b\x76\x85\xb2\x5c\x96\x5d\x11\x46\x80\x3e\x3c\x46\x48\xcd\xf4\xc5\xed\x08\x7c\x90\xc0\xd6\x14\xfc\x71\xcb\xdb\x1c\x34\x0b\x8f\x86\x16\x3c\xb2\x04\x4e\x32\x9e\x22\x5f\x34\x8f\x3f\x30\xf0\x5b\x1e\x91\x13\x30\x7c\xff\xbb\xc0\xc7\xe3\x01\x94\x03\xe8\xb4\xfa\x27\x7f\xfa\x2c\x7f\xe1\xa8\xf3\x57\x09\x1f\xc0\x32\x80\x3b\x0a\x44\x98\x33\x78\x88\x9a\x6f\x0c\xe7\x77\x5c\x53\x8d\x20\xe5\xaa\x0b\x3f\xd2\x43\x8e\xd6\x23\xd3\x87\x66\x6b\x7c\x67\x13\x73\xa7\xe0\x1a\x17\x4e\x03\xcf\x27\x18\xb3\xc1\xf4\x90\x6a\x6d\xaa\x91\x0c\x6a\xfb\x11\x62\x23\x4a\x04\x06\x1b\xef\xe2\x31\x52\xca\x6d\xa8\xe0\xd1\x71\xe4\x66\xdc\xd6\xb3\x44\xd5\xfc\x44\x9f\xfe\x49\x7a\x4c\x78\x0c\x34\x42\x07\x8c\x33\x9a\xa5\xdc\x6c\x55\xf1\xdc\xa0\x91\x5b\xc6\xf5\xc4\x49\x98\x97\x04\x79\x2d\xc9\x1c\xf5\xb8\x9b\x92\x62\x9c\xe4\x3e\x70\x87\xa0\x42\x0e\x05\xe9\x6c\x16\x02\xdb\x26\xd0\xc7\xb6\xdb\xbc\x65\x0d\x75\xd8\x44\xf1\xd1\x94\xc6\x86\xc1\x8c\xf1\x0e\xc7\xea\xad\xe3\x8b\x43\x30\x16\x9d\x97\x51\xd1\x98\x50\x4e\x12\x62\x97\x4a\x0c\x22\x59\xa9\xf0\x42\x02\x38\x22\x37\x02\x9d\x71\x23\x29\x4d\x0c\x2c\xbd\x62\x24\x02\x88\x14\xe8\x23\x5c\x27\x08\x77\xd4\x16\xc0\xe4\x66\x8b\x1c\xc2\x35\x4a\x8c\x8f\x96\x5d\xfe\xb4\x3d\x3e\x1d\xdf\xea\x3f\xe5\xc3\xb3\xca\x12\x1c\xbe\x88\x72\x16\x80\x09\x51\x34\xad\x9e\x72\x84\x79\xb6\xc0\x0a\x1b\x31\xcd\xea\x69\x57\x3e\xd9\x1f\x79\x8b\x52\xeb\x39\x13\xd0\xb6\x6d\x03\xe5\x17\x11\xde\xb4\x08\xd6\xb0\x4f\xab\xcc\xba\x67\x3a\x27\x17\x1b\x3d\x13\x82\x0a\xe5\x21\xf8\x4c\x23\x03\x4c\x79\xf9\xe7\xff\x56\x96\x61\xf6\x2f\x7c\xc2\x40\xfd\xe4\xd4\x0f\x59\x45\x35\x35\x0d\x1c\xc6\x15\x12\x31\xd4\x5d\x9b\x54\xa0\x16\xb3\xb4\x42\x36\x9e\x45\x79\x1c\xce\x14\xde\x71\x68\x58\x1d\xe2\x60\x94\xda\x45\x26\xf0\xa0\x7d\xc9\x10\xfa\x3c\xbb\xa4\x83\xe4\x31\xc5\x52\x3c\xcc\x17\xb1\x8e\x19\x69\xb4\x64\x73\xc0\xa6\x86\xb7\x49\xcf\x31\x5b\xed\x2b\x0f\x39\x94\x48\x3e\xb8\xd4\xd0\x47\x36\x6a\x9e\x48\xfa\x08\x83\x8d\x21\xcb\xe1\x78\xc8\xca\xd9\x82\xb5\x9c\x40\x15\x9f\xe6\x42\x82\xeb\xd1\xd7\xdc\xf5\xc1\x90\x9c\xe7\xd5\x90\x18\x95\x00\x96\x93\x64\x5e\x7d\x5d\xc3\x49\xfc\xac\xdf\x16\x9c\x3a\x78\x37\x1b\xb4\xb4\x66\x3c\x8e\x19\x95\x0b\xd3\x2f\x12\x00\xf9\x4f\xc4\x4b\xa2\x7b\x49\xaa\x87\x0f\xca\x1a\x13\xb9\x2d\x06\x81\xc8\xec\xa2\x5f\x00\xe6\x5f\x67\xf6\x98\xfa\xe2\x79\xd4\x64\x85\x74\x81\x79\xb8\xdf\x8d\xca\x9f\x1e\x93\x81\x84\x3f\xbd\x15\xf6\x76\x62\x8e\x14\x74\x4c\xe5\x6f\x17\xf3\x13\x7b\xe0\x30\x32\x00\xe5\x43\x9c\x15\xef\x23\x89\xe8\x91\x42\xd3\x71\xe9\x93\xda\x2a\xf6\xe6\xb1\x0b\x87\x84\xd4\xa5\xbf\x44\x11\xc8\xda\x83\x00\xe5\xc3\x99\x0a\x0e\xd1\x9b\x20\xa0\x22\xdf\xe3\x6c\xa2\xe7\xae\xee\x71\x54\x76\x49\x57\x83\x2d\xe1\x31\x53\xe1\x6b\x17\x1e\xe9\xb2\xa6\x9b\xe5\x92\x8e\x5e\x77\xcf\x91\xbe\x02\x7f\xe9\xe6\x7b\x2e\xf3\x70\xdb\xbf\x92\xdd\xd5\x95\x4f\xab\x03\xbb\x72\xd2\xf6\xa1\x2b\x41\xde\x98\x0a\x43\x7f\x9c\xb2\xc1\xdb\x60\x46\x42\xdf\xb7\x02\x6c\x47\xaf\xac\xb5\xcb\xf5\x1d\xa2\x48\x75\xb3\x14\xe9\x1d\x2d\x9b\xba\x68\x58\x38\x83\x51\x4e\x4b\x18\x87\xd3\x81\x48\x1c\xa4\x5d\x76\x9c\xd0\xe0\x6e\xa7\xf8\x0b\x1b\x86\x19\x7b\x68\xa2\x6c\xe0\xc8\x26\x86\xe9\x0b\xe2\x06\x9d\xe9\x3e\x52\x6c\x04\x0a\x65\x33\x97\xac\xea\xa4\xd3\x45\xcb\x43\x47\x3e\x63\x95\x60\xc6\x66\x91\x10\xda\x80\x72\x21\x92\x8d\xed\xf6\x57\x40\x24\x7c\x87\xf6\x11\xe5\xe4\x8b\xcb\xc6\x31\x8f\x54\xc9\xa1\x04\x17\x36\x3a\x3c\x49\xf8\xbb\x5d\x2e\xf1\xce\x52\x30\x60\xd3\x26\xf0\x5c\x7e\x09\xb4\x4b\xb8\x9b\x3d\x82\xd0\x63\x3a\xb1\xcf\x31\x12\xfc\x33\xff\x84\x65\x0c\xcd\xf2\x0d\x2c\x8c\xd8\x39\x69\x20\x52\x16\xc6\x23\x17\x45\x24\xce\xab\xfe\xb2\x8c\xfb\xa6\x53\x4b\xd6\x97\xee\x35\x5f\xa4\x1e\xda\xa7\x74\x4e\x48\x99\x88\xd0\xe0\xcb\x98\xe8\x66\x07\xb8\x46\xc0\x4c\xc8\xc3\x3e\x89\x0e\xdd\x36\xd7\x28\x48\x6b\xfe\x4f\xc0\x4b\x33\x58\x87\x09\xb9\x23\x24\xd3\x7c\x5f\xac\xcb\xdf\x50\xbf\xbf\x1b\x9e\x92\x3f\x22\x29\xbf\x43\x1b\x82\x8d\x8a\xc3\x3c\x24\x18\xc4\xb9\xfe\x76\x87\x8f\x9f\x53\x7c\x5a\x4f\xd2\xc8\x6d\x24\x7d\x82\xfb\xb9\x3b\x49\x04\x96\x0d\x2e\xed\x98\xed\x4b\xdf\x89\xc5\x90\x54\xf5\xca\xa7\x7c\x19\xf6\xfc\x0d\xce\x48\x8e\x44\x8c\x17\xca\xdb\xf3\x21\xcf\x3e\x5f\x13\xdd\xa5\x21\xfc\xda\x0b\x51\x03\xff\xc3\x73\x61\x32\x87\x83\xa4\x24\x75\x3a\xa5\x18\xd9\x83\xbb\x51\xd1\x68\x70\x27\x05\xb0\xfc\x68\xd6\x87\x95\x4e\x4c\x0d\x5b\x1f\xe4\xa0\x7f\x97\x8f\x00\x13\x3d\xb3\x30\x07\xa7\xb4\xb1\x3f\x0a\x00\x0a\xf1\x91\xdd\x56\x5c\xbf\xa2\x6e\x42\x41\xb7\xbd\x21\xdf\x79\x61\xb7\xf2\x43\x75\x26\x38\xf3\x73\x20\xaf\x91\x7b\x4a\xcb\xe2\x08\xf9\x1d\x67\xf9\x1d\x62\x52\x0f\xee\xba\x82\x22\x92\xee\xe6\xd5\x87\xac\x84\x16\xaa\x4b\x74\xe0\x9e\x7f\x7e\x80\x77\x6e\x26\x0a\x07\x65\x62\x82\x3e\x28\x00\xa1\x07\xa6\x9d\xec\x52\x60\xac\x3f\x37\x83\x59\x9f\xf7\x9a\x18\xb3\x49\x71\xc7\x97\xa3\x3b\x54\x1e\x27\x71\x24\x20\x21\x4d\xd6\x21\xa3\xa6\xcb\x79\x1b\xf2\x53\x78\x7b\x69\x84\x70\x04\x17\xc4\xf9\xfb\x0d\x4f\xee\x2b\x8f\x61\xfe\x03\xda\xc8\x4c\xb0\x63\xf6\xd4\x3f\xb7\x5e\x00\x14\x72\xde\xdf\xa5\x1c\xcf\xf8\xb3\x6b\xa2\x8f\x60\x91\x04\xf7\xdc\x83\xa6\xfa\xc0\x50\x9c\x69\x34\xc7\x58\x3e\xb1\x8e\x9f\xfc\x02\xb5\xde\x7a\xad\xdc\xc7\x2b\xa4\xf3\x88\xf0\xb0\xfe\x67\x5c\x76\xa5\x7a\xff\x8c\xe6\xe3\x01\x88\x8d\x7a\x50\x3f\xe2\xcb\x8d\x82\x32\xce\x4f\x80\x58\xe7\xa7\x13\xd9\x64\x0a\xbd\x47\x75\xc1\x98\xad\xc0\x11\x80\x85\x5d\x33\x9f\xcd\xb5\xcd\xa1\x94\xc0\xfe\xc1\x3d\x16\xf6\xe1\x7a\x08\x7f\xdc\xac\x36\xae\x0b\x4e\xa8\x3c\xd9\x84\xb9\x88\x2d\x03\xe2\x74\xc4\x00\xcf\x5d\x27\x44\xe5\x23\x07\x74\x6c\xf1\xfc\xe0\x93\x0a\xd0\x71\xe2\x27\x61\x3e\x7f\xed\x74\xf5\x3f\xd3\x71\xe8\x19\xe2\x45\x40\x09\x01\xc9\x0e\x45\x23\x75\x65\x2b\xea\xf0\x40\x61\x79\x08\xc6\xbf\x3b\xca\x87\x0d\x9c\x63\x85\x0a\x49\x00\xc8\x37\x06\xf7\x15\xd4\x12\x7d\xaf\x87\x07\x9f\xbe\x35\x52\x34\xca\x81\x74\xab\x9d\xd6\x0c\xe4\x00\x20\x3a\x8e\x20\x05\x61\xd9\x83\x6c\xa8\x03\xdc\x7d\x62\x17\x6f\x38\x86\xf4\xb4\x12\xbc\x99\x5e\x79\x44\x5f\xf0\x08\x56\x3b\xaa\x54\x45\x56\x01\x09\x19\x32\x1f\x4f\x40\xc7\xce\x9a\x27\xdb\x57\x3a\xd9\x4b\xe4\x67\x94\x0b\x76\xd9\xb4\x71\x68\x8a\x15\xe7\x8a\x15\xed\x19\xe1\x37\xcf\x23\x39\x4a\x85\x45\xe7\x2f\x96\x19\x16\x0f\x59\x83\xa8\x00\xcb\x60\x5f\xd1\x03\xe8\x89\xed\x74\x3d\xd6\x6b\xcf\x21\x00\xdf\xf3\xd6\xc9\x48\x68\xcf\xba\x54\x76\xce\xc3\x0e\xb3\x20\x03\x59\x64\x8e\x1f\x47\xa3\x12\x4b\xa9\x22\x47\x54\x72\x05\xb6\xca\x2d\x03\x8e\x7e\xf8\x93\x2d\x61\x65\x3b\x77\xb3\xad\x8b\x4c\xee\x4f\xd5\xcd\x5b\x09\x34\x06\x18\x3e\x29\x23\x21\x82\xc9\xf7\x97\xa4\xa0\x8d\x73\xde\x16\xa6\x7c\x5b\x26\xc0\xb7\xf5\x98\xba\x88\xad\x20\x8d\x3d\xc7\x48\xc9\xc6\x87\xad\x3b\x62\x8f\x6d\xfd\x12\x70\xd6\xd6\xcf\xb1\x8e\x80\x2e\x22\x6c\x06\xfb\x60\x3e\xb0\xaf\x7a\x49\xec\x20\x6f\xc1\x24\xff\x71\xf7\xe4\x36\x85\x4a\x29\x0b\x9f\xaa\x0d\x54\x61\xd9\x29\x02\x28\xe7\x6d\xbb\x98\x70\x8e\x9f\xd0\xdd\xe1\x8f\x16\x8e\x49\x55\xcc\x96\x26\x49\x7c\xd2\x70\x36\x64\x85\x64\x93\xb0\xbb\xc2\xc8\x21\x88\x8f\xae\xe6\xd5\xa7\x09\x51\x3e\xea\x21\x03\x1c\x9c\x2a\x2c\xd4\x57\x04\xab\x8a\x40\xba\x16\xd5\xf1\xa0\x94\xa8\x66\x59\x21\x54\x23\xf4\xf1\xca\x6c\xb9\xae\x4e\x7a\xd7\x3a\xb1\x46\x63\x77\xf5\xf4\x5a\x55\xa4\x6b\x7a\x87\x85\xc3\x5b\x86\xf9\x38\xf5\xe9\xf9\x40\x66\xd5\x9a\x84\xf1\x2d\x75\x3d\x88\x90\x34\x5e\x99\x0d\x19\xae\x5c\xbb\x72\x6b\x40\x41\xdc\x62\x1f\x58\xa0\x89\x7f\x72\x3b\x2c\xa1\xb2\x5c\x30\x71\xdf\xbc\x95\x4c\x75\x7b\x41\x2b\x20\xf4\x9a\x7b\xe9\xb4\xca\x2e\x6b\xb9\xda\x97\xa2\x3f\x7c\x04\xf5\x23\x11\x94\x53\xb0\xca\xec\x22\xd2\x36\x64\x22\x32\xf8\x18\x89\x5e\x12\x9e\x88\x4c\x22\xc9\x15\x9c\x91\xdc\x52\xd1\x04\xe9\x2d\x42\xd4\x17\x24\xbc\x11\xd0\x49\x41\x19\x58\x68\x60\xfd\x67\x9b\x81\xe2\xfb\xc4\x3f\x77\xe1\xf9\x10\x3b\x41\x76\x9e\xbb\x2d\x12\xdc\x06\x6e\xe2\xc7\x93\x7d\xfb\xf9\x29\x90\xa0\x48\x24\x37\x45\x1e\xf7\x84\x84\x0a\x62\x1e\x87\xe8\x18\x65\x09\xe2\xc0\xc4\xc0\x96\xa9\xf4\x55\xae\x8b\x58\x57\xfc\x9e\x99\xea\x52\xf8\xeb\x3b\x47\x64\x56\xf6\xec\xa4\xe0\x3e\x9b\x09\x1d\x82\xc4\xfa\xe5\x07\x51\x4e\x7a\xf9\x4d\x04\xe9\x5a\x5a\x19\x7f\xad\xbc\x21\xf2\x2c\x11\x11\x59\x45\x39\x04\x94\x12\x28\x09\x69\xa7\x53\x23\xf2\xda\x0a\x1d\x2e\x37\x36\xd1\xa0\xa2\x66\x94\xa3\xfd\x41\xcc\x60\x0a\xf0\x11\x83\xf4\xc3\x4c\x66\x21\x25\x37\xd1\x1a\xdb\x83\xea\x3e\x8f\x4d\x9b\x29\x4d\x76\xaa\x83\x98\x24\x08\x90\x72\xbc\x02\x6b\x5c\x2f\x32\x39\xf4\x35\xbb\x1f\x67\x39\x74\x3b\x54\x12\x71\x6c\xae\x83\xa4\x9b\x63\x15\x9a\x84\xeb\xc7\x6a\x10\x6e\x56\x88\xa1\x34\xef\xdd\xb1\x9d\x5f\x23\x75\xe6\x1c\xa9\x3f\x8b\x91\x2a\x95\x91\x44\xfe\xd1\xc5\x98\xfe\x46\x5d\xd1\x22\x8a\x89\x42\x83\x46\x78\x1d\xf9\xc3\x84\x19\x8c\xe9\xa5\xa3\xcd\xad\xc2\x5b\xee\x40\x75\x30\xe7\xf8\x86\x1f\xb6\xfe\x95\x9f\xf9\xa9\x28\x2c\x87\x58\x78\x43\x56\x26\xf9\x13\x32\x1f\xd9\xe7\xaf\x0a\xee\x79\x3a\x04\x1d\x1b\xb1\xa7\x00\x25\x17\xd3\xf8\x94\xfe\x54\xfe\x01\x8e\x61\xdc\x3f\x23\x1d\x3a\x18\x26\xb2\xff\x47\x33\xac\x38\xb7\xa5\xd9\x95\x3c\x73\x8b\xd0\x6e\xee\x02\x4f\x2f\x72\x1e\x61\x10\x39\x3b\x82\x22\xc5\x2a\xa6\x9d\x95\x34\xff\x2c\x52\xb9\xfb\x60\xc4\x97\xe4\xad\x7a\xed\x73\xb2\xd5\x00\xab\xf8\x21\x3b\xe1\x14\x85\xb8\x85\x2f\x34\x01\x10\xbf\x2b\x87\xec\x13\x62\x97\x41\x31\xa3\x2e\x78\x79\x08\x15\xe1\x9d\x3b\x93\xdd\x80\x8e\x3b\x98\x62\xef\x82\xd9\xae\xfe\x07\xb2\x30\x74\x5e\x0d\x39\xeb\x46\xe1\x42\x43\x72\xdb\xec\x21\x55\xf4\xcc\x09\x84\x0c\x34\x2c\x19\x92\x9c\xe4\x06\x57\xb3\xea\xfd\x5b\x11\xd9\xd1\xe9\xe2\x0c\x90\xbb\xaf\x6b\x40\x23\x55\x19\x9b\x80\x4b\x42\x3c\x53\xae\x8b\x26\x8f\x67\xf8\xca\x23\xd3\x88\x62\x9b\x33\x49\x1b\xf3\x09\xcc\x64\x97\x7f\x46\x59\x0f\xb1\x73\x01\xef\x50\x1e\x33\xfb\xe7\xec\x7d\x1f\x1e\xe9\xd9\xf9\x07\x86\x20\x86\x97\x8f\xb0\x69\x48\x3a\x99\x59\xec\x1a\xcf\xcb\x0e\xaa\x4a\x3e\x68\xfa\xde\x5b\xe5\x2d\xbf\x0b\xaf\x88\x4e\x72\x4d\x02\x06\xed\x93\x6d\xef\xbd\x80\xb4\x55\xe5\x67\x87\xb1\xca\xdf\x02\x89\xdb\x42\x6d\x12\x26\x78\x09\x64\xee\x48\x9a\x5b\xd7\x66\x40\x39\x06\x00\x63\x97\x88\x2f\x94\x48\x07\xd8\xb9\x6f\x80\x50\x26\x78\x26\x67\xb0\x4c\x58\x90\x75\x75\xf8\xb6\xf2\xbd\x62\x26\x93\xc9\x7f\xa8\x38\xce\x10\x61\x01\x4b\x19\x9b\x2c\x0f\x39\x73\xdf\x96\x2f\xc7\xff\x02\xae\xf3\xac\x9b\xe6\x4d\x82\x9c\x53\x3e\x7f\x6e\x4b\xd9\xd8\x2c\x41\xc4\x74\x1f\x8d\xdf\x59\xec\x84\xbd\x30\xac\x8c\x08\xf7\xae\x7c\x86\xcb\x0a\x46\x1f\x12\x3d\xeb\x90\x97\x4e\x41\x8c\xf0\x09\xbb\x7a\x9b\x94\xe6\x1c\xf4\xde\x6d\xdb\xf7\x4c\x64\x81\x76\x21\x4a\x77\xab\xca\x48\x14\x08\xca\xac\x2d\x8e\x65\xb8\xfc\x44\xe9\xfe\xc2\x5d\x72\x86\xe9\xb0\x0f\x7a\xb5\x08\x7d\x44\xe9\x51\xfe\x39\x4b\x9e\x12\xe9\x3b\xb3\x97\xb8\xcd\xfe\x0d\x00\x66\x8d\xd0\xf9\x2e\xcf\xa7\x32\xeb\xf0\x22\xfe\x72\xb9\xa3\x54\xc1\x29\x32\x22\xcb\xd8\x45\xe4\xaf\x61\xd3\x57\x43\x62\xb2\xe2\xc6\xcf\x3e\x8b\x1a\x43\x03\x58\x17\xe3\x28\xdc\x2e\xd2\xee\xe8\x0b\x6a\x05\x12\x71\x8f\x8b\x8b\x7a\xba\x84\x18\xa6\xf3\xcb\x48\x1a\xe0\x04\x44\x90\x12\xf5\x7d\x76\x82\xd1\x91\x83\xab\x5f\x3b\xcb\x6c\x66\x1b\x32\xb1\x18\x7d\xc3\xc9\x63\x33\x42\xfd\x85\x5c\x14\x17\xcd\x27\x82\x8a\x56\x11\x2a\x8a\xfc\x9d\x67\x16\x40\xdf\x2c\x43\x3c\x8f\x24\xf0\x23\x5e\x81\x7e\x95\x5f\x60\x33\xcc\xf1\xf9\x00\xd6\x41\x65\xf9\x89\x6c\xec\xf7\x47\x2c\x96\xb2\xca\x65\xf3\x6d\x3b\x75\x3b\x0b\xe9\x9b\xff\x76\x85\x94\xc3\x22\x91\x09\x46\xc2\xca\x3e\x22\x77\xcb\x87\xdb\x95\xe4\x74\xb9\x86\x4d\x96\xa7\xe8\x69\x3f\x00\x9a\x4b\x68\x74\x86\xd8\x17\xc1\x0b\xb3\xb5\xe1\xde\x73\x38\x7f\x4c\x45\x19\x8f\xb8\x52\xe9\x27\xf8\x56\xb3\xdf\x3e\xab\xf4\xb1\xef\x9f\x1c\xd6\x87\x7e\x24\x65\xe3\x0d\xe1\xd3\x4e\xd3\x8e\x53\x1e\x45\xef\x90\x2c\x02\x9c\x69\xb1\x7b\xbd\x80\x9b\xf8\x40\x29\x82\xd2\xdf\x54\xc6\x3e\x90\xc8\x37\x67\x5f\xc3\x5e\x50\x6b\xfb\x2d\x85\xce\xa6\x2f\xf0\xd1\x19\xb7\x50\x40\x74\x99\xf4\x36\x5e\xf3\xc9\x1d\x95\x95\x8c\xdc\x4b\xc0\x49\xd1\xef\xc9\x36\xff\xfd\x8f\xf1\x56\x2f\x9c\x3b\x95\x18\xcf\xe9\x51\x96\x04\x7e\x4e\xae\x3e\x2f\x30\xa2\x1a\xa1\x06\xea\x8e\xfc\x08\xce\xcd\xc6\x04\x2a\xc8\x20\x7c\x8f\x4e\x9a\x0d\x8b\x06\x20\x3f\xfe\xa9\xea\xa4\x65\xf1\xb6\x33\x88\x90\x8d\x7e\xda\xfb\xdd\x2c\x8c\x6d\x98\xb9\xd6\xdf\x55\xcd\x8e\xbb\x9e\x61\xb9\x76\x53\x9a\x78\xc3\x38\xd1\xd9\x06\xf7\x29\xa4\x0a\x7a\x22\xf7\x88\xe7\x98\x95\x0f\xe0\x24\xc5\x3b\xd8\xa8\xa7\xc4\x28\x51\xab\x72\x97\x43\xea\x20\xdb\xf7\xfc\x51\x60\x84\x42\x9e\xeb\xc2\x2d\x4e\xcf\x54\xb6\x4a\xaa\xcd\x94\x28\xf3\xbc\x7f\x89\x00\xb7\xb7\xd4\x05\x26\x65\x89\xf3\x6e\xab\xd6\xf9\x2e\x5a\x5c\xe4\xcd\x7e\xca\xf5\xed\xbf\xf9\x08\xef\xf4\xeb\xa3\xfe\x97\x95\x42\x89\x16\x9c\x35\x92\xf1\xea\x78\x56\xde\x32\x86\x93\xe1\x55\x95\x80\x7e\x7f\xe4\x67\x79\x0b\x7c\xb3\x19\xf2\x58\x44\xe1\x51\x66\xc4\x29\x05\x94\xe3\x54\x1c\x14\x24\xe1\xf0\xe1\x56\x49\xfe\x63\x21\xfa\x27\x93\x2a\xcc\xc4\x9b\x6c\x54\xf2\x3f\x9a\x27\x14\x02\x92\x36\xfd\xcc\x51\x26\x86\x4c\xdc\x21\x1b\x3a\xea\xa9\xfc\xc0\x43\xc8\xd3\x51\x22\xa4\xc8\x26\x22\x1a\xb6\x1c\x69\x0e\xc1\xfa\x25\x03\x98\x6b\x9b\x09\x63\xd9\x0f\x4c\xe7\x9f\xcb\x4a\x33\xa6\x47\xac\xa9\x05\x58\xd8\xa6\xe6\x0e\x53\x53\x5b\x35\x75\xf3\x90\x99\x27\xe3\x7f\x74\x2a\x87\x3c\xa3\x62\x8a\xe8\x2e\xfc\x85\x86\x94\x7f\x2c\x20\x5f\xf4\x34\x39\x68\x73\x73\xe5\x48\xbd\xe8\x4b\xc4\x1f\xce\x0a\x49\x43\x6b\x9a\x1d\x64\x4a\xa1\x7e\x4a\xa8\xca\x63\x0a\xc1\xf4\x15\x51\xba\x17\xfa\x67\x7c\xe1\x74\xb7\x88\xc4\x7d\x28\x1c\x58\x3d\xc0\x99\xf8\x54\x3c\x45\x7d\xe6\xc0\x94\xfe\x4d\x35\xb4\x8b\xda\xbf\x42\x0c\xd4\x12\x3f\x9b\x2a\xe0\x44\x82\x02\x0b\x52\x0c\xb3\x5c\x22\x6c\xbc\xc8\x19\xc5\xdf\x80\x34\xdf\xe0\xfc\xc8\x68\x14\xf9\xef\x1c\x78\xd2\xab\x4d\x13\x11\x94\x48\x32\x10\xa2\x45\x02\x64\x0a\xcb\x90\xd3\x85\x5d\x3e\xe8\x6d\xa7\x83\xcf\xf8\x3f\x2f\x29\x1c\xba\x69\x5b\x0b\xf8\x86\x9e\x2c\x22\x0e\x65\x7f\x79\x20\x3c\x81\xa5\x33\xbb\x31\xf7\x83\x04\x1f\x4a\xa2\x50\x1b\x38\x5c\x84\x7a\x8d\x32\xa3\x08\xf9\x39\xbc\x4d\xef\xaf\x35\x03\x63\x72\x6a\x8c\xed\xcb\xdd\xd6\x24\x33\x56\x05\x47\xf8\x2a\x3f\xc4\xca\x59\x84\x4c\x2d\x61\x23\x3a\x2a\x6b\x12\xe0\x14\x71\x22\xd8\xa4\x50\xbc\x54\x57\xee\xcf\x0e\xcc\x7b\xa9\xa5\x2e\x1c\x29\xf2\xe9\xed\x8a\x43\x1f\x43\xf4\x9c\x48\x7c\x29\xaf\x99\xab\x49\x18\x87\x0e\xf1\xc7\x2e\x86\x63\xc1\x14\xff\x70\xbe\x8e\xec\x54\x4e\x31\xb2\x0b\xc4\x90\x38\xee\xe5\xc9\x6d\x76\x59\x5f\x3c\x53\x4a\xb4\xf7\x8b\x55\xd0\x4f\xdf\x98\x17\x65\xa7\xee\x15\x7b\x25\x16\x84\x7b\x79\x91\xf1\xf3\x7c\x36\xbe\x82\x0c\x49\xac\x42\x13\xd1\xa7\x61\x51\x79\x23\x41\x6f\xc7\xf4\x4f\xf4\x0a\xfd\x19\x3f\x12\x45\xaa\x57\x5b\xe4\xa0\x7f\x6a\xb0\xc0\x66\xdf\x8e\xfa\xed\x1b\x9a\xcd\xaa\x6d\x30\x26\x88\x63\xdb\xe1\x2c\xf3\x6a\xc6\x2d\xd2\x6e\x90\xe3\x4e\xda\x9e\x42\x0b\x89\x4d\x1c\xe6\x4d\x45\xc8\x36\xef\xa4\xd5\xd8\x29\x24\xf6\xee\x36\x07\x36\xee\x6a\xa7\xc8\xd5\xcf\x6f\xd1\xba\xb7\x1c\x1c\x43\xfb\x4d\x84\x07\xd9\x4e\xc8\x31\x9f\x3c\x46\xde\xe2\x10\xe0\xd5\x15\xcb\xf4\xd6\xdb\xe3\x45\xe2\x80\xfe\xc9\x0b\x69\x64\x2f\x56\x67\xb3\xf9\x1f\x63\x15\x36\x8e\x61\xdf\x92\x39\x9c\xc6\x12\x8f\x69\x66\x08\x9b\x6d\xe6\xef\x8f\xa0\xb3\xfe\x94\xb0\xaa\x4f\xef\xf8\x7f\xa0\x2a\x63\x90\x3e\x74\x8b\x45\x76\x87\x7c\x4d\xef\x40\xc7\x4e\x32\x78\x08\x8a\x71\xd0\x50\xaf\x73\x91\x0e\xea\x47\xb2\x4a\xfb\x93\xff\x30\x2d\x55\x50\xd8\x10\x30\x60\x02\x57\x9d\xe2\xa6\xaa\x26\xe2\x6a\x3b\x23\xde\xf0\xe4\x87\x59\xab\x2a\x10\x20\x5c\xbc\xa5\xdc\x13\x6d\x2c\xd7\x42\xd1\xf1\x5a\xa4\xa6\x4a\x03\xda\x49\x0f\xa7\x0e\x09\x6d\x5e\xe1\x09\x11\xb2\x29\x6f\x49\xfa\x28\x2d\x85\x6c\xa1\xb7\xc8\x88\xc0\xc8\xe8\x19\xe3\x26\xa9\x04\xcb\x3c\x13\x74\xcd\xba\x96\xd7\xf4\x9e\xf8\x23\xac\x0a\x10\xb0\xa3\xc8\x45\x69\x8f\xb0\xc7\x5d\x46\xa9\xb2\x88\x49\x4c\x2b\x5d\xce\xd2\xf9\x61\x0e\x01\x55\x98\x7c\x2d\x67\xfe\xeb\x29\xae\x0a\x72\xf2\x52\xe4\x72\x5a\x82\xf6\x03\xa7\x0f\xff\xcf\x4b\xc9\xa1\xb5\xea\xaf\x39\x08\x46\xfa\x42\x17\x98\xf0\xfa\xe3\xd7\x72\x3c\x03\x18\x73\x18\x2b\x22\xeb\x3c\xc0\xd4\x9f\x9f\xcf\xce\x9f\x9e\xb7\x63\x09\xcc\x2b\x7c\xe4\x96\xc4\x50\x9a\x25\x55\x02\x5d\x2d\x43\x32\xd1\x2a\xc6\xf7\xd0\x9a\xd7\xff\x94\x3a\xf4\x97\x3e\x7d\xf9\xcf\x07\x9a\x71\xff\x12\x7b\x6c\xf3\x2b\x5a\x34\xd3\xe7\xf0\x50\x58\x52\x85\xa8\x1f\x45\x5f\x48\xfe\xa4\x55\x50\x56\xed\x9a\x44\xea\x6a\x5a\x29\x49\xa5\xd6\xef\x5b\xe6\x99\x36\xde\x82\x24\xf4\x6b\x38\xc1\x19\x86\xce\xa0\x93\xd8\xa3\x53\xe1\x53\x67\x4f\x64\x15\x04\x16\x41\x92\xa8\x83\x7e\x42\xab\x71\x8a\x66\xc9\x94\xc3\xe8\x15\x09\x1e\x04\xdd\x6c\x96\x3c\x3f\x34\x69\x5d\xd7\x1a\xc8\x97\x4c\x64\x22\x06\x1f\xdc\xa2\xb0\x60\xfd\x5c\xeb\x4c\xbf\xd5\x4f\x43\xba\x2e\xa6\xdc\xc2\x03\x29\x76\xf9\xb6\xe1\xf3\xd9\x7c\xb2\x12\x86\xe8\x17\x4b\x47\xa6\x91\x38\x32\xe4\x7b\x51\x99\xa1\x56\xd9\x22\x5b\xe1\x31\x9c\xc5\x9d\x53\x6b\xb8\xf4\x8c\xbe\x66\xf1\x39\x67\xfa\x0c\xf2\x97\x49\x5b\xb8\xba\x9c\xd4\x2c\x8e\xa5\x1e\xde\xec\x80\x5f\x6e\xd8\xbb\x14\x2c\xc6\x5e\x7b\x62\xf8\x0f\x28\x46\xa3\xd5\x22\x6c\x4b\xc9\x4d\xe3\x4f\xef\xe9\x46\xad\x34\x8b\x99\x51\x67\x05\x67\x28\xea\x20\xdb\xfe\x5f\x11\x5c\xf9\x1d\x84\x6a\xcd\x90\x48\x38\x41\x49\x92\xc5\x67\x55\x80\x1a\x5d\x5d\xea\xc2\x64\xa0\x67\x1d\x85\x67\x55\xd9\xc0\x3e\xdb\xea\x84\x5c\x13\xd5\xaf\x4f\xd8\x7d\xf9\x72\xfa\x14\x0c\xf2\x0c\x7f\x1d\x24\x6d\x79\xf2\xa5\xbd\x47\x02\x69\xfc\xd5\x9e\x84\x9d\x84\x8b\xa5\x33\xe2\x27\xed\xed\x93\xaa\x94\xb4\x13\x78\x26\x36\xfc\x32\x75\xe3\x19\x3a\xf8\x55\x43\x9a\xc0\x3a\x85\x1f\xcf\xf6\xce\xc3\x27\x06\x8e\x7d\x2d\x01\x6c\x8d\xde\xe9\x63\xc8\x0d\x99\x39\x1f\xd9\x46\x0d\x55\xf4\x1b\x12\x3d\x87\x3a\xf1\x2f\x99\xa6\x0e\x9e\xce\x79\x71\x24\x16\xd1\x89\x94\x2e\xde\xc9\x88\xc9\x52\xf4\x1b\x0f\x92\x54\x80\xfb\xa1\x81\x5f\xae\xe4\x06\x0c\x40\x9e\x31\x3d\x3e\xa8\xb2\x3a\x77\x49\x75\xad\x8e\x2c\x44\xbe\x3c\xfc\x5c\xd4\x21\xa9\xc7\x86\xc8\xae\x1f\xf2\x73\x8e\x4b\x6d\xc8\xee\x68\x87\x2d\xac\xfb\xd5\xca\x55\x60\x48\x84\x39\x06\x8f\x8d\x18\x59\xf4\x0f\xc9\xaa\x63\xfa\xca\xd8\xe2\x27\x2b\x5a\xe7\x26\xa0\x9b\x21\x25\x54\x55\x47\xad\xaf\x6c\x82\xf7\x41\xa2\x46\x9f\xa6\x17\x6c\x76\x88\x6d\xe4\x67\x49\x1f\xfa\xf7\x5e\xf5\x47\x2f\x9a\x45\x5f\xce\xaf\x7c\x5c\x8a\xdc\xcb\xfb\x12\x0a\xaf\x35\x64\x5d\x2b\xdb\xce\xd4\x6b\x05\x27\x0f\x7e\x6f\xac\xe8\xfb\xc2\x96\x4c\x5f\x84\x8d\xb7\xd8\x4c\x8c\xe7\xbd\x0f\x3a\xd4\x0c\xd2\x15\x51\x34\x45\x69\xfa\x78\xbe\x64\x3d\x83\x1f\x60\x23\xef\x08\xce\x06\xfe\x9a\x4c\x20\xf7\x21\x32\xe3\x15\xca\x02\x86\x9b\x0f\x9e\x69\xc5\x95\x68\x4b\x24\x7b\xa0\x7d\xf8\x31\xf4\x49\x4e\x33\x67\x88\xbc\xfa\xb4\xe9\x96\x4e\x6e\x34\x90\x63\x10\xb6\x3b\x7e\x16\x35\x81\xcf\xed\x7a\xab\x09\x0a\x5f\x9a\x21\xf6\xb6\x07\xe2\x11\x06\x28\x0d\x75\x6b\x5e\xb4\xc4\x8f\x6a\x03\xb5\x29\x02\xf3\x41\x38\xcf\xd8\xc5\x2b\x20\x09\x6e\xb4\xe2\x32\x21\xfa\x9c\x19\x2d\xf5\xa2\xa9\x0d\x12\xec\x1b\x5c\x16\xd1\x94\xb6\xe1\xa7\x60\x6c\xbe\x3a\xdd\x05\x02\xc9\x3f\x78\x1b\x31\x38\x47\xdb\x74\xbd\xda\xa4\xfd\x0d\x57\x97\x53\xeb\x15\x3c\x52\xde\xed\xb6\x4d\xea\x7f\xf7\xb0\x41\x39\x4d\xc8\xed\x13\x34\xa8\x77\xa4\x6e\x96\x08\x7e\xef\x3c\xbc\x97\x0f\xdb\x29\xc5\x83\xcd\xad\xdb\xe4\x72\x9c\x64\x19\x53\xb2\x88\x50\x9f\x4b\x1a\xc1\x0e\x29\x5d\x29\x94\x69\xc9\xff\xf1\x3c\x0c\xa4\x48\x89\x09\x96\x95\xc8\xd2\xd9\x36\x37\x32\x60\xee\xd8\x30\xe2\x52\x0c\xe2\xd4\xf8\x0c\x87\x5b\xb0\x53\xf8\x48\x71\x58\x5c\x17\x49\x63\x9a\x5f\xe1\xee\x90\x1a\x77\xe5\x9d\x7b\xc7\xf9\x60\x1c\x74\x67\xa4\x12\xa1\xe1\xc0\xb9\xd4\xae\xe0\xab\x31\x28\x12\x3b\x47\x60\x01\x12\x5b\xb3\x3d\x7e\x38\xcd\x9c\xcc\xb3\x4a\xb6\x98\x90\xda\x84\xeb\xd9\x6f\x01\x0e\xed\x13\x74\x92\xfd\xba\x5b\x8b\xfa\x0b\xac\xae\x2b\xb7\x43\xc8\x45\x12\x79\xad\x69\x46\x91\xed\x68\x9c\xb7\x75\x82\xdc\x34\x70\x8e\xb0\x2b\x98\x90\x1e\x4c\x78\xf1\xf8\xaf\xcd\x3f\x59\x84\xa6\xbc\x75\x14\xa0\xf5\x6e\x8f\xfb\x20\xde\x46\xc0\xed\x2d\x74\xe7\x7d\x32\xea\xf2\x5d\x34\xf3\x03\x79\xbb\x62\xb8\x6a\x9b\xf1\x6e\x85\xa8\x4d\xe7\xbd\x60\x36\x50\x09\x9b\xd9\x8d\x2a\x06\x8a\xc9\x6e\x62\x6e\x8b\xd2\x41\x67\xf1\xc9\xdf\x73\x00\xa1\x74\xc2\x51\x34\x4d\x8e\x08\x6c\x3b\x0f\x6f\xa2\x67\xca\xe9\x7b\xd3\xf9\x3b\x0c\x14\xde\x79\x0e\x98\xec\xa7\xd1\x7a\x27\x05\xab\x02\x15\x5b\x35\x3a\x89\x7e\x8d\xb4\xbc\x69\x06\x6d\xaf\xb9\x87\x4b\xad\x10\x30\x84\x13\xfa\x60\xef\x67\xc9\xca\xbe\xaa\xab\x5f\x2e\x35\xf3\xc1\xfe\x65\x06\xac\x8d\x5e\xc2\x5e\x51\xf7\x83\xb0\xa4\x9b\xff\x0f\x07\x0a\xab\xda\x2b\x8b\x31\xa5\x3f\x84\x9c\xd1\x3c\xa1\x7e\x58\x01\xd6\xed\x98\x9a\xa9\x2e\x51\x34\x9e\x84\x77\xbd\x83\x28\x75\x9e\x6d\x67\xed\x52\x34\xdd\x3f\x36\x05\x5a\xa5\xe5\x04\x9c\x6b\x98\x80\x71\x6b\x21\x63\x9c\xcd\xe3\xc7\x99\xe2\xb8\xe9\x5d\xee\x58\xe8\x6e\xe4\xaf\x11\xfa\x9a\xc0\x2f\xa3\x6d\xef\xbf\xc2\x43\x3b\xd3\xc5\x8c\x42\x2b\xdc\x31\x37\xfb\xec\x03\xb9\xaa\xb7\x5d\x08\xb4\xda\xd9\x9b\xd4\xfe\xf2\x4c\x43\x1f\x0c\x81\x1c\xd9\x67\x0a\x91\x59\xb3\x90\xbd\xc5\xcd\xc4\x44\x43\x92\xa1\xbb\xf7\x4a\x84\x65\x5f\xe9\xbb\x19\xf3\x84\x5f\x2a\x86\x13\xed\x77\x4a\x55\x68\x4e\x21\x9a\x76\xbb\xe2\x21\x72\x56\x61\xc4\x14\x34\xac\x10\xab\x85\x4a\xeb\x46\xc1\xbd\x5f\x91\x35\x78\x35\xbf\x20\xdf\xa3\xac\xba\x22\xaf\x43\x7a\x29\x48\xd7\x48\xa1\x42\x76\xab\x0a\xd8\x2b\x92\x81\xe0\xe4\xeb\xff\x67\xaf\xac\x4b\x3d\x46\x27\xb6\xa9\xd7\x16\xd9\x3e\x9b\x26\xb6\x0b\x3d\x3d\x25\xbf\x2e\x32\x13\x90\xd6\x0d\x23\xf8\x8f\xfa\x4d\x00\x7b\x66\xb6\x3a\xae\x59\xaf\x61\x2f\xa1\x4c\xd9\xa9\x28\xd1\x66\x6a\x69\x4e\xc8\xd3\x11\xe5\x6b\xee\x7e\x84\x30\xbd\x5d\x5b\x97\x76\xb6\xbf\x6d\x18\xaa\xc1\x0b\x5b\xeb\x2c\x45\xdc\x99\x95\xab\x13\x42\xb7\x42\xe2\xe6\x25\x26\xb4\x3b\xfe\xde\x77\x8c\xae\x9f\xb9\xef\x89\xfe\xc6\x3e\xd2\xb1\x8f\x71\x3b\x91\xb6\x43\x5b\x85\x0b\x09\x93\xf3\xf0\x4f\xfa\x6b\x73\x21\x73\x3e\x18\x7f\x96\x16\x10\xab\xc7\xc0\xb0\x41\xa3\x68\x95\xd3\x90\x88\x03\xc9\xd2\xdd\xa7\xaf\x6b\xe4\xb1\x66\x48\xf4\xca\xec\xf9\x21\x58\x56\x8c\x99\x2b\xac\x89\xe5\x67\x72\xa1\xfb\x1a\xa1\x3e\x2b\xc0\x0b\x7f\x1f\x09\x22\x11\x59\x12\x55\x72\xc6\x94\xa4\x46\x49\x9e\x13\xb1\xea\x20\xc7\x4d\x57\xe0\x11\xcc\xab\x21\x4f\x75\x0a\x59\x33\x43\x38\xcb\xd6\xac\x87\xff\x11\xb9\xc3\xc5\x5a\xd0\x5e\x8e\x57\xf8\x05\x91\x9c\x20\x6a\x0f\x44\xee\x86\x51\x2f\x6b\xd5\x59\x25\x68\xf7\x11\xb5\xf5\xcc\xff\x9d\xf4\xe7\x38\xdd\xd7\x84\xa3\x4d\xb4\x34\xf0\xf6\x49\x59\xb3\x79\x34\xb4\x7c\x73\x53\xf5\x91\x3d\x62\xab\x94\x9f\x65\xe8\x36\xee\x97\x6d\x12\x9d\x39\xe9\x7f\xe9\xdf\x02\x71\x10\xb3\xe9\x74\x23\xb7\x36\x5e\x78\x20\x66\x38\xfa\xad\xe8\x29\x50\x96\xf5\xbf\x8a\x5c\x3c\x87\xe3\xaa\x13\xf4\x0f\xd0\x04\xe3\x12\x3d\x6e\xda\x10\xc9\xe8\xf1\xc0\x3e\xe6\x14\x72\xe8\xe6\xc9\x82\x15\x89\x0e\x3b\x84\xe8\xb7\xc0\x4b\x49\x81\x44\x51\xd1\x1c\x10\xa8\x15\xc9\x02\x91\xbb\xbc\x47\x4c\x2e\x8d\x68\x0f\x6f\xfe\x48\x5a\x08\x9b\x1d\x09\xf8\xa6\x88\xdc\xc7\x22\x57\x7d\xf7\x7c\x48\x2e\x6c\x5f\x60\xef\x07\xa9\x6d\x8e\xac\x14\xdd\xa4\xab\xed\xb0\xef\x67\x25\xfe\x98\x5e\x32\x57\x8a\xb7\x93\xae\x10\x18\x26\x37\x97\xe0\xa3\x70\x6a\x52\xef\x77\xb8\x65\xa9\x0f\x65\x30\x77\xa4\xa0\xc5\xe9\x0d\x25\xfb\x6d\x08\xc6\x78\x1c\x92\x5e\xcd\xce\x7e\x72\xdf\xe0\x97\xf6\x70\xee\x9b\xf0\x32\x2b\xeb\xe9\x67\x61\x33\xbc\x0c\x11\x8a\x3a\x70\xc5\xde\xb1\x68\xf5\x45\x3e\x3f\x7b\x0d\x4f\xfd\x3d\x3c\xb6\x76\xab\xb5\x3f\x7e\xe1\x7b\xa0\xd1\xcc\x44\xa3\x19\x7a\xe2\x41\xa8\x63\x23\xc4\x51\x32\x63\x3f\x72\xf1\x7a\x7e\x4f\xa2\x74\xec\x29\xa1\xb6\xd9\xaa\xe0\xbe\x0a\xce\xcb\xa2\x08\xa3\x2a\x6f\x24\x9a\x5d\xb8\x97\x33\x31\x42\xad\xde\x9b\xad\x5f\x91\x7a\xb0\x95\xfe\xfb\x19\xb5\x7b\xb1\x3f\xee\xfc\xad\x5e\x74\x6c\x73\x40\xd4\x1b\x99\xf3\x5b\x98\x0e\xcd\x6b\xa1\xaf\xf3\xea\xcc\x2c\x0e\x25\x51\xda\xf0\x4d\xf0\xe9\x99\x1f\x7b\x6b\x81\x89\x36\xa2\xe7\x53\xc6\xf2\x50\x09\x2c\x9e\x81\x38\x9e\x9c\xfa\x6c\xd0\xb3\x1b\xb4\x81\x9d\x40\x58\x52\x70\xf8\xf6\xb3\x60\xe2\x1b\xeb\xb5\x88\xf8\xd8\x7e\x8b\x9b\x8c\xb0\xad\x07\x8f\xca\xe1\x11\x0c\x51\x43\xf2\x21\x90\xac\x69\x3e\xb6\x3f\xd2\xa6\x47\xcf\xeb\x4b\x26\x08\x44\x2c\x75\xb3\xfb\x37\x85\x72\x32\x85\x95\x55\xff\xcc\x4d\x99\x02\xc4\x52\x7e\xa2\x5b\x4f\xab\x3b\x90\xf6\xee\x3e\xe2\x9c\xfa\x24\x49\xd3\xd6\x23\x58\xfe\xe1\xe0\x65\x1f\xf8\xe2\xa7\x13\x56\x69\x3f\x45\x93\x4c\xea\x28\x3e\xdf\xf5\x4b\x2c\x3e\xa3\xe0\xb1\xe1\xa6\x9b\x96\x2b\x5a\x1a\xf8\xa3\xdd\xdd\xd3\x69\x7a\x83\x55\x1c\xb9\x8d\xee\x65\x9d\x64\x2d\x45\x07\xb0\x2d\xe4\x06\x5b\xc2\xcf\x47\xcb\xeb\xf1\x43\x9a\x23\xf0\x5e\x3e\x38\x48\x44\x18\xa1\xb1\x4c\xd8\xb4\x7f\x36\x5c\x75\x95\x3e\x79\xad\xa4\x48\x42\x58\x49\x62\x5e\xed\x7b\x42\xa3\x4a\x2a\x59\x21\xbc\xd1\x57\xb7\x32\x4f\x68\x2d\x32\xba\xbe\x28\xb1\x04\x63\x28\x09\x50\x0d\x0b\x5a\xc0\x87\xc2\x61\x0b\x7e\x35\xda\x4d\x85\xbb\xd2\x3a\x37\xfc\x54\x0e\xd8\xb3\x60\x46\xc7\x56\xff\xd9\xd2\x82\xc2\x4b\xc4\x14\x6f\xa3\x97\x2c\x13\x4b\x87\x8f\x2e\xe5\x6f\xac\x5c\x9c\x57\xcc\x58\x72\xbc\x5b\x31\x37\xe1\xab\x02\xb2\x4a\x94\x37\xd5\x9d\x6e\x05\x6b\x0a\x5d\xd1\x9a\xae\x48\xa4\xba\xc8\x29\x84\xb1\x39\xc1\x3e\x8c\x82\xf7\x88\xd2\x84\x0e\xf8\x98\x35\xc3\xf4\x2a\x8c\xb1\x3c\x8d\xe3\x41\x63\x6d\x29\x42\xb1\x3e\xb7\xb4\x66\xb4\x12\xfc\x28\x7c\x96\xa6\x47\x40\xaf\x83\xcf\xe8\xd0\x68\xa1\xd6\x64\x09\xb6\xde\x82\xec\x0d\x3f\x46\x2c\xe1\x52\x74\x9a\x2e\x7b\xf3\xb9\x52\xee\xbf\x8b\x3f\x79\xe4\x75\xb1\xc8\x7f\x77\x09\x2f\x23\xd7\x7f\x92\xd0\xc8\x15\x00\xd2\x4f\xcd\x01\xf4\xc6\xa2\x1e\x74\x6b\x72\x50\xe5\x94\xd9\xa8\x8f\xfd\xfc\xe2\x84\xad\x71\x0a\x71\xa9\xe8\xd4\xf6\xc2\x2f\x05\x48\x1f\xeb\x4f\x39\x5a\x7c\x2d\xc3\x49\x5e\x48\x8b\xa4\x2b\xc1\x12\x6e\xf7\x4b\xfa\xda\x05\x7d\xca\x71\x7d\x09\xbf\x11\xa0\xc3\xfe\x5e\x30\xef\x13\x1d\xfe\xb1\x0f\xfe\xe8\x7b\xfd\x17\xdb\xa5\x3f\xdb\x44\x45\xca\xd6\xdb\x11\xed\x3f\x59\xbb\xff\xa5\xff\xb9\x78\xe6\x0f\x60\xe8\x97\x03\xdf\xa0\x83\xdf\x43\x2d\xaf\xa4\x80\x2d\x9b\x78\xfa\xec\x22\x69\x4d\x1f\x56\x0a\x53\xe5\x7e\x77\x72\x79\xab\x63\xa8\x55\x1b\x2a\x4a\x4f\xeb\x3f\xd2\x53\x35\x0f\x27\x31\x67\xed\xe8\x74\xe9\xa9\xbc\xc2\xfb\xaa\x54\xa6\x78\x91\xe7\x62\x87\xe6\x8a\x45\x09\x01\x54\xa9\xee\xf9\xca\xb1\x90\x13\xcf\x03\x0b\x98\x02\xba\xf1\x9f\x81\xcc\x44\x7d\xef\x18\x44\x4d\x3f\xa3\x9a\x88\x60\x8a\xa0\x44\x68\x09\x1c\xa7\x86\xec\x29\x60\xe5\x73\x0b\xcf\x28\xec\x84\x77\xd2\xcc\xa7\xac\xa0\x31\xf7\xb6\x6c\xc0\xe2\x84\x75\x43\xd3\x20\x2c\xbf\x22\xce\xeb\x9b\x57\xb4\x53\x7d\xfc\x69\xf7\x9f\x04\x6e\xe0\x6c\x1e\x96\xe6\x2d\x79\xdb\xe6\xba\x5f\x5e\xb6\xbe\xf6\xb4\xaf\x11\xbc\xbd\x0b\xe9\x6e\x9d\x5f\x76\xad\x5b\x5a\xf7\x86\x7a\x98\xff\xe5\x6f\xa8\x1c\xd8\xa5\xd9\xfe\xb5\x9f\xb2\x53\x8b\x37\x9d\x92\x9c\xc3\xd6\xb0\xf9\xb2\xf5\xa7\x53\x76\x38\xf0\x6e\xe5\x36\x8d\x60\x78\x91\xfd\x19\xaa\xcc\xb1\xc5\x63\xc0\x7f\x85\x2e\x0c\x36\x0a\x5c\x5c\x56\xc9\x63\x61\xa3\x75\x5c\xab\x5c\xd8\xe7\xd6\x20\x06\x8a\x2d\x94\xda\xe9\x9b\x44\xae\x45\x70\x18\xb3\xd0\xef\xc4\x4c\xb3\xa4\x5e\xc5\x68\x1b\x24\x32\x96\xa1\x96\xfd\xd9\x82\x45\x84\x99\xc7\x08\x91\xd1\xc9\xcb\xe9\xe9\xb1\x88\x7a\x0e\xfe\xf0\x55\xcb\x6d\xc2\x82\x76\xfb\xb4\x8d\x2d\xe9\x98\x20\xaf\x2a\xc8\x3b\x63\x3f\x5d\x7d\x3f\xfd\x84\x98\x42\x5e\xeb\xe5\x2d\xef\xf3\xc2\x26\xd5\x73\xcd\x55\x86\x40\x36\xf4\x1e\x87\x1d\xc5\x66\x86\xad\x3a\x62\xb1\xe4\x21\x76\xb9\xd2\x82\xbe\xea\x5c\x18\x7c\x08\x44\x4f\xcc\x4f\x00\xc5\xbc\xdf\x83\x21\xfd\x69\xf1\x25\x3d\xa1\x59\xf3\xd7\xcf\xf2\x2e\x00\xce\xae\x92\xc7\x86\x07\x1f\xbd\xca\x44\xfb\x19\x3e\x04\xcf\x84\x20\x0b\xff\x57\xc9\xbe\x7c\x8a\x86\x6d\xed\x23\xc2\x7e\x5d\x6d\x46\x82\x72\x25\x86\x6e\xb9\xae\xe1\xd6\xba\xd5\xcc\xbf\xf7\x5b\xc1\xca\x20\xe3\x74\xd2\x54\x93\x15\x9b\x9e\x07\x17\x15\xfb\x4d\x83\x84\x16\xb9\xfb\x07\x82\x67\xda\x31\x76\x8c\x62\x7c\xd6\x38\xfb\x07\xac\x05\xba\xcd\x6a\xec\x90\x02\xd3\x80\x65\x10\xda\x7d\xe6\x50\xa6\x0f\xe5\xe3\xdf\xc3\x50\xc4\x1b\x80\x8e\x24\xcc\xdd\x3d\xb8\x6f\xd0\xe3\xba\x8f\xff\x04\x48\xe8\xe0\x91\xf6\x69\xc3\x3c\x04\xee\x7e\x41\x2f\x18\x63\x65\x54\x0c\x8a\xf8\x01\x5c\x76\x11\x80\xf7\xda\xd3\x47\xb0\x94\x23\xc9\xb5\x23\xb8\x2e\x04\x06\x8c\x99\x4c\xb6\x2a\x36\x73\x04\xe6\x77\x41\x57\x03\xbb\x41\xfd\x67\x14\x00\x7f\xcd\xbd\x1f\xcf\x67\xd8\xb9\xdb\xaa\x10\x23\xa2\xda\xd1\xc2\xec\x61\x7e\xdc\x38\xac\xc9\x69\xef\x3d\x4a\x2d\x12\x58\x93\xe8\xe7\xfd\x49\x3d\x6c\x8f\xa2\x4a\x47\xb5\x51\xfb\x02\x17\x77\x76\x9c\x7a\x30\xb2\x03\x9f\x47\x83\x97\xa3\x33\xcc\xd4\x6d\xab\x18\x98\xbd\xa0\xf8\x12\xf4\xb2\xbe\xa8\xf4\xe9\xad\x26\xd8\x63\xc4\x7f\x1e\xc2\xe0\x07\xb1\xfb\xe0\xa8\xae\x2a\xf0\xfa\xf9\xa9\x26\x94\xd5\xf7\x2e\x46\x21\x34\xef\xd8\x9e\xe4\xc4\x13\xbb\x4a\xb0\x03\xd9\x35\x13\xd9\xba\xfe\x4a\xdc\xa7\xf4\xb0\x02\x88\x91\xf6\x5c\x18\xad\x54\x4c\x03\x6e\x52\x70\x3a\x30\xcc\xef\x43\xf4\xd9\xb0\x90\xbf\x63\x87\xdf\xdb\x8a\xdd\xb1\x79\xdd\x47\x66\x81\xc7\xc9\x3d\x44\xa9\xa5\xf4\xf8\x88\xf0\xf5\x5d\xaf\xb5\x85\x2f\x9d\x0b\x16\x25\x54\x8e\x14\xb0\xee\xb3\x5e\x5e\x6b\xb8\x8d\x1b\x3d\xdb\xae\xf8\x9a\x81\xe1\xff\x78\xb5\x63\x73\x4b\xb7\x71\xdc\x5a\xf6\xd2\x8b\xff\x59\xf7\x9f\xeb\x5b\xbb\xbf\x6d\xf6\xbb\xba\xa9\xe0\xed\x2a\x75\x5c\x9d\x2d\xb0\x8b\xb0\x7c\x88\x9c\x45\xa7\xb5\xcb\xa0\x01\xf4\x14\x42\x38\xe7\x89\xe8\xbf\x4d\x71\xca\x40\x07\x46\x16\x36\x6f\x70\xa3\xe7\x72\xe1\x38\x7c\x0b\x7f\x03\xc5\x96\xe0\x74\x07\x1d\xb6\x02\xdd\xdf\x43\x91\xfb\xbc\xf2\x4c\x3b\x3b\x9b\x08\xde\x2b\xc0\x6e\x47\xae\x1f\xa1\xfd\x95\x22\xc1\x6e\x8d\x8c\x4b\x90\x6a\x25\x70\x66\x05\xd4\x45\xdd\xdd\x85\xd9\x57\x37\x7b\xdd\xef\x74\x55\xa7\x59\xeb\x99\x39\xb6\x16\x9d\x13\xea\x04\xf3\x07\x85\xaa\x63\xd1\xe6\x78\xbe\x48\xb6\xe3\x15\xcf\xdc\xa9\xa1\xeb\x10\xb7\xee\xc7\xe1\x2a\x91\xfb\x1e\x6a\x64\x38\xe1\x92\x59\xeb\xaf\x4e\xe9\xb5\x1c\xc6\x90\x26\xb9\xa9\x6b\x03\x09\x41\xd6\xa0\x59\xb6\x74\x80\x09\x82\x97\xab\x2e\x81\x0b\x99\xf4\xec\x67\x29\xa4\xe1\xc2\x5c\x8a\x2f\xb3\x22\x8e\xc2\x4f\xa7\xf4\xed\x89\xae\x3b\x45\xcc\x61\xfc\xea\xf8\xa3\x8f\x14\xd3\x9e\x18\xcc\x60\xdf\x8b\x48\xb6\x75\xf0\x29\xde\xdf\x6d\xc4\xe0\x01\x52\xa2\xed\x9c\x03\x2f\x99\xc3\x49\x9b\x89\x74\xc9\x92\x11\x7d\x88\x2f\xa9\xbd\x7b\xa1\xb3\x9d\x0c\x30\xec\xbc\x93\xff\x1d\xfe\x6d\xf2\xf3\x15\x06\xd3\x77\x92\xb6\x7a\xbb\xf8\x23\x61\xdf\xc1\xb3\x3c\xad\x4c\x78\xfa\x17\x65\x4f\xa0\x7d\xf0\x7f\xae\x3d\xf7\xac\x7e\xc9\x90\x08\x9f\xa6\x55\x2d\xe5\x34\x56\xda\xec\x8f\xa8\x32\xfc\xad\x01\x18\xf4\xc7\x0f\x65\x3d\x26\x7d\x84\x5e\xcc\xe1\x7a\xaa\x7d\x21\xba\xf0\x3b\x0a\xa5\x2f\xa1\xc3\x6f\xe6\x56\xfe\xd3\xa9\x53\xf7\xf1\xb6\x8d\x7f\x39\x30\x7d\x16\xe4\x8c\xa1\x4b\x62\x6d\xfc\xae\x04\x0e\x7f\xf6\x61\xef\x12\x61\x29\xef\x52\x84\xe5\xbd\xcb\x3e\x7a\x35\xf2\x0e\x35\x35\x53\xb9\xc8\xff\xbd\xbf\x0b\xbb\x19\xf7\xa4\x0e\xc7\x95\x6d\x4a\x79\xa9\x8d\xd1\x52\xfd\xec\x2b\x69\xbe\x7b\x34\xe9\x56\xe3\xc2\xca\x0d\xf0\x2e\x7c\xb8\xea\x1f\x45\x34\xeb\x5b\x6e\x66\xef\x39\xe6\x82\xb7\x17\x1d\xa4\x81\x79\x6e\xe0\x8f\xd8\x9c\x2f\xba\x29\xbf\x5b\xb3\xc5\xe5\xe0\xd4\x87\xc7\xa2\xfe\xce\x33\xfd\x3d\xd1\x21\x61\x27\x05\xb3\xb1\x97\xd4\xef\x74\x9f\x04\xc7\xde\x49\x3f\xb5\xfd\x30\x11\x4d\x90\xe6\xfd\x67\xe4\x37\x35\xeb\x97\x37\x18\x47\x98\x3e\x5f\xa7\x83\x30\xaf\x02\x00\xf1\xe1\x19\xff\x92\x91\xbf\x98\x14\xd8\xe2\xb5\x9c\x1e\x7f\xf5\xba\x75\x85\x01\x6b\x8f\xce\x4a\xdc\x87\xac\x5c\xfa\xfd\xa5\x35\xa2\xc8\x5e\x20\xf4\x49\x70\xbe\x47\xc3\xe5\x67\xf2\x17\xf7\x25\x7b\x8f\x47\xb4\x54\x40\xcf\xf1\xcf\x88\xf6\x4a\xfd\xfc\x43\x46\x3e\x8a\x3a\x36\x76\xcd\xd9\xd7\xe5\xaa\x77\xa0\x03\x5f\x32\x04\xaa\x0a\xc0\xbb\x9e\x7b\x34\xd9\x6e\x16\x80\xf7\xd9\x18\xff\xf6\x86\x36\x96\x7c\x08\x6f\x64\x3a\x81\xb7\xba\xc4\x0e\xf6\x73\xc5\x15\xe4\x0f\x8f\x34\xec\x25\x38\xcf\xe1\xdd\x97\xf7\xa4\xd4\x84\xfb\xa7\x69\xb7\xa9\x43\xcd\x94\x34\x8b\x1a\xdc\x11\xf0\x8f\x7e\xc2\x3b\xf9\xcf\x7c\xd5\x0f\x31\x19\x34\x51\x98\x05\x50\x83\x08\x6c\xfb\x3d\x7a\xde\xdd\x55\x84\xd6\xdb\xa6\x8c\x20\xa1\x7a\x1a\xe2\xe3\xff\x45\x02\xa2\x8f\x4a\x19\xd4\x09\xb8\x4b\x58\x66\x7b\x6f\xe4\xad\xc5\x1c\x6e\xb6\x4b\x74\x45\xd0\xe2\x6b\xd1\x89\xd9\x36\x9f\x0c\xfe\xbf\x73\x6e\xaf\x02\x1f\x0f\xa9\xdf\xed\xcc\x91\x91\xa8\x07\x22\x06\xe5\x77\xe6\x91\x02\xd2\xab\xb8\xe3\x20\x57\x1f\x9b\xa0\x6f\xa6\xce\xd8\xc8\xc1\x98\xab\x50\xf3\x79\x15\x09\x1f\x6d\x30\x4b\xc0\x68\x43\x55\xcd\x17\x53\xd1\x31\x5a\x55\x5f\x5f\x2b\x52\x8d\xa3\x8b\x62\x55\xa4\xfa\x19\x8b\x6d\x6b\x14\xaa\x38\x53\x93\x75\xfd\xfa\x23\x54\x03\xf8\xeb\x4c\x67\x51\x03\xa4\xa8\x27\x32\x13\x49\xf3\xd0\x44\xde\xe3\xea\x48\xf5\x4e\xa0\x0b\x61\x8f\x25\xe7\x18\xa1\x89\x10\x5d\x94\x68\x6c\xd8\x26\x26\xf9\xba\x64\x7b\xd9\xa7\xa2\x08\x6c\x28\x82\xed\xe5\x84\x10\x3e\x3d\x3b\xc9\xc9\x0e\xbf\x87\x89\x2f\x9c\x83\xeb\x67\x9b\x1b\x09\x9f\xae\xcc\x7d\xe0\x25\x05\xe3\xd5\x83\x07\x4f\x36\x78\x3f\x27\x22\x5a\xe8\xca\xac\xd4\xb8\x79\xe7\xa5\x7d\x3f\x60\x16\xc8\x75\xcf\x3b\x2f\x41\x7d\x9e\xd6\x1f\x95\xb9\x87\x6c\x51\xad\x1a\xef\xd4\x3c\xd8\x4e\x81\xda\x06\x43\x79\xe6\x80\xd5\x3c\x39\x0f\xda\xb6\x4e\x7e\x96\x26\xa6\x39\x7c\xda\x27\x3b\x3f\x32\x02\x3a\xeb\x3d\xb7\x0c\x92\xb3\x16\x3a\xd9\x9f\x55\xb5\xdd\x59\x07\xb1\x64\x4f\xfe\x06\xde\x34\x51\xb5\xc6\x71\x56\x83\x65\x6b\x68\x99\xbb\xe3\xaa\x99\x79\x96\xa9\xbd\xc0\xb4\x86\x55\x22\xe8\xee\x3a\x3e\x5c\xaa\x9f\xf4\x7f\x67\x65\x0c\x9c\x9a\x7c\xcf\x39\x30\x70\xbf\xf6\xfd\x5c\xe6\xff\xc8\xd7\x91\x82\x1b\x1d\xe2\x7f\x3a\x24\x72\x34\x6a\x07\x88\xb0\x38\xb2\x32\xce\xac\x66\xcc\x30\xb8\x5b\xdc\x09\x7a\x68\xf4\x54\xfc\xf9\x29\x90\xc1\xf3\xb7\x7d\x85\xc8\x5a\x1d\x20\x5b\x84\xe3\x93\xa7\x39\x6c\x00\x86\x30\x14\x86\x25\x0d\x8b\x0d\x24\xf1\x3b\x6b\xda\xf6\xa6\xcc\x83\xb0\xb5\x2e\xab\x53\x62\x27\xcb\x2a\xe6\xb5\x5d\xd6\xfc\x1f\x1c\x6b\x70\xf1\xee\xc2\x66\x4c\xf1\x74\x5a\xb6\x1f\x40\xf9\x6d\xa3\xc4\x63\x1f\xd9\x10\x70\x1c\x92\x52\xf9\x50\x7c\xf6\x81\xc6\x5e\x38\xdc\x1c\x45\xd0\x04\xb8\xdc\x9e\x7e\xb9\xfe\x3a\x30\x44\xd7\x0f\xae\xcc\x08\xb5\x14\xed\x7b\xde\x9b\xfc\xf9\xd0\xa4\x7a\xd8\x5e\x82\x1e\x02\xd3\xbc\xea\xc7\x07\x03\xdc\x93\x36\x41\x35\x95\xff\x00\x3b\x08\x87\x36\x06\x87\x78\xc2\x36\xe3\xac\x6a\xaa\x20\xe3\x82\x1d\x99\xc3\x4e\x76\x19\x6d\x00\xcc\xd8\xf8\x50\x0f\xab\xe0\x59\x18\x05\x63\x39\xc9\x6d\x2c\x64\xf8\x16\xc5\xaf\x17\x85\xe8\x95\xc3\xe7\xab\xd2\x4f\x3a\x21\x0a\xfc\xcf\x1c\xd5\xd8\x3f\xda\xda\xef\xa5\x39\x42\xed\x10\x4a\x71\xd3\xba\xcf\x2f\xb5\x4f\xfa\x30\xb1\xda\xf3\x5b\x69\x8e\x3b\x0c\x55\x33\xf9\xd9\x4c\xc9\x4c\xe1\x14\x80\xf6\x00\x5b\x21\x60\xbb\xac\x42\xc9\x91\xa2\x21\xf3\xc2\x72\x06\x1b\xdb\x8a\x81\x70\x4a\xb4\xcf\x1a\x59\x99\x56\x83\xfc\x93\x50\x31\x36\xc8\xbe\x68\x21\xda\xe6\xf3\xcb\x9e\x48\x58\x1c\xa2\x65\x7e\xa8\x3b\x87\xac\x4c\xb6\x47\x82\xa8\x3c\x47\x2e\x65\xbb\x6e\x3c\x43\x93\x0a\x7d\xe9\xca\xb6\xbc\xd6\x31\xba\x22\xb3\x6e\x6a\xde\xa2\x5b\x9e\xd8\x39\x49\x9c\x9a\xb7\x7e\xb7\x95\x5c\xbd\x91\x14\xad\x0e\x00\xd6\x62\x5e\xc3\xa4\x5e\x83\x55\xa9\x66\xe8\x64\x34\x9f\x82\xbe\xbc\xc3\x8a\x80\xf4\x32\x78\x12\x20\xb0\xb4\x7f\x44\x9f\x43\x04\xec\xef\x57\xac\x33\x1b\xf2\xf0\xe1\x37\x93\x44\xc6\xd8\xd2\x1d\x29\x8b\x5b\xba\xc3\x5c\xe0\xbc\x3f\x7c\xfe\x19\xe6\x8f\xe7\xde\xfa\x19\x6b\xcb\xe6\xac\xed\xc3\x42\x8b\xcd\xd6\x86\x7e\xeb\x2d\x4d\x6c\x9e\x3c\x9f\x91\x32\xe2\x25\x98\x0f\x38\xcb\xac\x9f\x9b\xe5\xc7\xfa\x51\xeb\x82\x9b\x34\x26\x78\x66\x79\x69\xac\xd5\xfd\xe1\xfd\x09\xe5\x97\xa6\x69\xc3\xd4\x62\x31\x6e\x71\xbd\x3b\xfa\x16\x14\x6d\x21\xd7\x99\x32\x21\x47\xb5\x3b\xe6\xdb\x7b\x1b\xc2\x07\xe8\x3b\x90\xc3\x8d\x18\x19\x7a\x47\xe6\x5f\x0e\xeb\x9a\x0f\x86\x85\xe6\xed\x9f\xb8\x4e\xa5\x7a\x92\xef\xb4\x2a\xc7\x0d\x56\x6b\x77\x50\xbb\x47\xde\xb2\x0f\x91\xf6\x69\x55\xf7\x29\x75\x06\x0c\xa3\xd8\x7f\x10\xe8\x5f\xd5\x25\xb0\xf3\xea\x4b\xdb\x83\xbc\xd3\xd8\x20\xcb\x44\xc2\x47\x3a\x06\x1b\x3c\x41\x88\xa2\x3c\xce\x9b\x6d\x96\xa5\x5c\x6a\x23\x9c\x7b\xec\x82\x96\x72\xfc\xb0\xf5\xc5\x66\x98\xde\x7d\x3a\xc5\x4d\x5a\x0a\xa3\x0b\x96\xd2\xcb\x56\x61\x4f\xd5\x69\xe7\x9b\xcc\x15\xec\x9b\xd7\xd2\xb6\x00\x84\x54\x0b\x22\x87\x4d\xc2\xfd\xdd\x22\xfc\x7b\x99\xcf\x8b\x0d\x89\x88\xf2\x98\x57\x36\xe0\x96\x19\x69\xd7\x97\x7f\xb6\xdc\xd8\x6b\x4b\xfe\xc7\x83\x72\xe2\xc1\xb9\x59\x4b\xaa\xdf\x35\xa9\x1d\x84\x14\x0a\x35\x0d\x26\xc7\x15\xfe\x0a\x09\x7f\x7f\x73\x00\xf6\x7f\x82\xe5\x61\xa6\x20\x5d\x0f\x5a\x03\xec\xe7\xfc\xa5\xf0\xf7\xfe\xb3\x2a\x0b\x34\x35\x7e\x15\x70\x57\x58\xac\x46\xc3\x90\x15\xa2\xbf\x19\x9e\xb7\xde\x2d\x99\x95\xc0\x8b\x0e\xc1\xb7\xc1\xe4\xb6\x73\x0e\x63\xca\x2c\x3f\x85\x77\x18\x2d\x54\x42\xe1\x45\x26\xf4\x13\x13\xae\xa6\x12\x5c\x50\x77\xae\x92\x13\xe5\x7a\x5f\xfd\xea\x82\xc8\x29\xa8\xa4\x8d\xa3\x2e\x4e\x7b\x9c\xc1\x36\x08\xa6\xfa\x2c\x83\xbe\x09\x41\x48\x20\x35\xfb\x78\x1c\x91\x4e\x26\x16\x8f\x55\x6c\x82\xfc\x7f\x81\x1f\x62\x73\xb9\xc9\xe5\x45\x35\x86\x53\xd3\xc9\x2b\xa7\x08\x91\xcf\x61\x32\x49\x16\x49\x79\x4a\x41\x37\xe6\x88\x03\xe2\xf8\xaf\x55\x28\x3d\x01\xc9\x29\xfd\x96\x15\x99\xa0\xd9\x34\x2f\x0d\x82\x13\xaa\x22\xeb\x24\x9d\x7f\x95\xc7\x21\x74\x12\xde\x02\x08\x07\xcc\x66\x06\xb1\x06\x6f\x76\x6a\xba\xd0\xb1\x1e\xb7\x22\x3c\xec\x57\xe1\x6a\x45\xa6\x7b\x04\x88\xe8\x64\x1d\x6d\xa3\x75\x2a\x40\xc4\xa6\x2d\x5d\xda\xe3\x49\xdd\xc7\x78\x8a\xce\x8c\xff\xbf\xd2\xf4\x01\x84\x77\xfa\x67\x16\xf5\x52\x6c\xf0\xfb\xe1\xc7\x88\xbd\x03\xfc\xef\xbc\x98\x31\xcc\x52\xc6\xb9\x59\xb0\x8d\x40\xfc\x14\x8b\x1a\x1f\x69\xcc\xe1\xb4\x3b\x46\x7c\xca\x98\xa4\x4c\x19\x45\x8c\x97\x42\x6c\x4c\x57\x2f\xae\x3b\xef\xb5\x4f\xe3\x9f\x21\xc9\x3a\x15\x79\xaa\xea\xe3\xd8\x30\xb1\xb3\xe1\x05\x17\x19\xd9\xcf\xca\x1e\x3b\xa8\xf0\xd8\xcc\x3b\x66\x8e\x2d\xb6\xa3\xfb\x27\xaf\xe9\xa7\xe6\x17\xf4\x0a\xf4\xbd\x3e\xb1\x55\xa3\x2b\x84\x7d\xaf\xea\xbc\xdb\xf9\xc5\x68\x55\x59\xa1\x3f\x23\x33\xf2\xe9\x20\xbf\x23\xf3\xab\xfb\x85\xf1\xe3\x3d\x3d\xa0\x55\x0d\x07\x32\xc6\xf9\x3a\x98\x5b\x05\xfb\xaf\x8d\x5e\xaf\x8e\x40\x58\xe6\x3f\xc3\x99\xf3\x95\x65\x09\x51\xd7\x3e\x02\x59\x5b\x0c\x89\x38\xed\x4f\xc4\xe6\xba\xa2\xd3\xff\xe8\xf3\xa6\xee\x47\xf2\xfa\x5b\x94\xb7\xd0\xe7\xe0\x94\xf7\x9b\xb1\xa5\x67\x27\x19\xe5\xb6\x53\xee\xed\xe7\x72\xc2\x19\xb3\x4b\xf8\x39\x40\x20\x12\x1f\x7f\x86\x73\x9a\x23\xe8\xad\x08\xb2\x02\x45\x0f\x1b\x04\x22\x23\xb1\x84\x3f\xf5\x50\x01\x93\xcb\xac\x13\x8e\xe0\x5e\x25\x0c\x60\x40\x25\x0e\xe0\x1d\x40\x1f\xd0\xf2\x4f\x13\xa0\xe3\x94\x36\x44\x59\x03\x1a\xd7\xcd\x9d\xce\x30\x87\xd8\x71\x98\x49\x55\x05\xdb\xfe\x55\x34\x58\x3b\x3a\x1e\xd8\xd8\xbe\x30\x1f\x34\x55\xdb\x90\xaf\x26\xac\xb1\xf1\x94\x28\x4d\xb0\xa9\x4d\x9b\x60\x74\x05\xce\x20\xeb\xc7\x73\x56\x66\x23\x7b\xd4\xab\x18\x63\x43\x0e\xf6\x0b\xae\x58\xc5\xad\x00\xa8\x4a\x8c\x7b\x75\x81\x0e\x5b\x08\x3b\x0f\x70\xfb\xe4\x20\xd1\xd8\xbe\x07\x37\xbc\xb5\xfc\x70\x37\x91\xfe\x72\x75\x33\x47\x59\x0d\x05\x47\xf8\xc5\xc7\x3f\x93\x06\x81\xd1\x97\xa5\x81\x4f\x76\x72\x28\xe9\xd5\xcf\xbf\x5f\x38\x6b\x59\x45\x2b\xef\x6d\xa5\xd0\x6b\x52\x0e\xec\x21\xb0\x0e\xb2\x3b\xe7\xfe\x9f\xf7\x33\x6d\xa4\x6c\x62\x4b\x47\x98\x7c\xf2\x75\x87\x0f\x4e\x94\x16\x00\x99\xf7\x97\x69\x71\xad\x91\xf0\x0a\xd4\x63\xd0\x98\x01\xf4\xbd\x73\x64\x57\xf1\xf0\x85\xfd\x20\x4a\x65\x8e\xf4\x95\x75\xfe\x4f\xc0\xfc\xdc\x93\xbc\xd4\x7d\x02\xe4\xea\xdc\xc7\x89\xa3\x73\x20\x1c\x7f\xf6\x0a\x2f\xea\xaa\x52\x7f\x30\x10\x2e\x9f\xc5\x6f\xeb\x6a\xaf\xc8\xd5\xda\x2f\x42\xe0\x60\x0f\x1a\x8c\xfb\x46\xb3\xaf\x9d\x98\xf2\x44\x90\x80\xb8\x2f\xd0\x4e\x11\x56\xaf\xdb\xa5\x20\xd8\x1a\x5e\x18\x60\xe1\x0b\x69\x2f\xc3\xee\x29\x43\xdd\x99\x7f\x56\x1a\x08\xa2\x26\x8c\xec\x55\x5f\xf3\x15\xdd\x25\x6b\xeb\x68\x66\x08\xe6\xbc\x50\x71\x89\x85\xc0\xa0\x57\x77\xc0\x16\x4e\xc6\xc5\xf2\x97\x74\x8f\x8b\x1c\x98\x3c\xeb\x98\x0e\x19\x2a\x84\xca\xd7\x26\xd8\x05\xe4\xbe\x3f\x02\x83\xdf\xc3\x4e\xc8\x2e\x8d\x3e\x0b\x8c\xea\xa8\xb8\xbc\xd6\xcc\xbf\xec\xe2\xf2\x7f\x85\xfd\x9f\xc2\x67\xf3\x15\x18\x91\x9d\x16\x81\x90\x75\x79\x87\xeb\x0b\x47\x20\x05\xd2\x4e\xd5\x76\x07\x5f\x19\x48\x77\xad\x4c\xee\x5a\x23\x11\x76\x59\x00\xe4\x65\x40\xfa\x11\x0f\x1f\xd3\xbf\x50\xf8\x8b\x86\xa1\xd7\x2e\xa3\x0d\xdb\x3c\xae\x82\xd9\x85\x52\x76\x89\x6c\x63\x40\xec\xe7\x9f\x72\x61\x58\x93\x76\x30\x3f\x91\x17\xa9\xef\x67\xf1\x8d\x8b\x3c\x96\xce\x6f\xf5\x03\x89\xa1\x6a\x3f\x01\xca\x58\xb9\xd7\x4f\x8d\x83\x98\xc4\x26\xc0\xa9\xea\x13\x55\xda\x60\x9d\x10\x68\x78\xe0\xdd\x87\x82\x66\xd7\xa7\x90\x74\xcd\xcb\xb6\x28\xd3\xeb\x94\x5e\xa4\x0e\x6d\xb9\xed\x28\x5d\x88\x12\xe5\x83\x70\xb5\x4a\x80\x4b\x7d\xec\x14\x55\x1b\x7d\x3f\xa3\x2c\x66\xbe\x1f\x5f\x74\xbf\x25\x2c\x42\xbf\xf2\xa2\xfe\x2a\x47\xff\x0b\x76\x22\xf9\x37\xdf\x84\x8e\xe0\x45\x52\xff\xca\x5b\x56\x40\x80\x78\xde\x27\x27\xb2\x32\xde\x9f\x20\xde\x97\xfa\x6f\x42\x0c\x77\x0b\x70\xd0\x68\xc1\xb4\x57\xcb\x85\x59\x7f\x38\xb7\x7b\x13\xbf\xdd\x8b\x3f\x28\x19\xef\xd9\xb6\x6b\x73\x83\xa7\x57\xf9\x33\x39\x8d\x9f\x40\xb1\x0f\xe3\x75\xb3\x80\xf2\x0c\x8e\xb7\x3c\x32\x82\xad\xf9\x0e\xa4\xf7\xdd\x02\xe6\x41\xf3\xc7\x55\xe5\x43\xab\x00\xdd\x90\xfc\xed\x3e\x49\x02\x93\xc2\xf9\x94\x4d\xd0\xb7\xb8\x3a\xef\xe6\x25\x0e\xbc\x3a\xd3\xa4\xfa\x25\x3c\xff\x45\x53\x3d\x41\xcf\x8f\xf0\xc0\x20\xd8\x7c\x6e\xb9\xae\xe2\xe5\x7f\x9b\x0d\x06\xad\xda\xec\xdb\x84\x59\x4f\xb0\xfb\x41\x71\x90\x13\xea\xfe\x6c\xc6\x1b\x76\x4e\xd2\x22\x51\xd0\xf3\x5a\xf2\xc8\xbf\x7a\xf1\x94\x00\x3d\x9f\xa4\x63\xd5\x0f\xda\x54\x8e\x41\xff\x5c\x54\x00\x51\x73\x25\xbc\xdb\xf2\x79\x8b\xa2\x21\x17\x3d\xf0\xfb\x15\x95\x6b\x8b\xc2\x90\xc2\x2a\x75\x8f\x7c\xdb\x13\x27\xb2\x54\x03\x05\x01\x38\xcd\x28\x55\xdb\x13\x67\xfc\xfb\x91\xb1\xab\xee\x74\xa1\xb0\x19\x46\x52\xdc\xd3\xec\xf3\xac\xbd\x24\xda\xed\x62\xfc\x9f\xaa\xac\x00\x6b\xfb\xbd\x1e\x9c\xe0\x03\x5a\xc3\x5c\x88\x63\x95\xb1\x44\xc5\x4f\xa3\xb8\xda\xba\xe9\xa3\x5c\xb4\x09\xbc\x6e\x66\xba\xd9\x89\xf0\xb5\x73\x8c\xc2\x61\x60\xd5\xd7\x3e\x37\x76\x7f\x66\x2e\x84\xfd\x77\x9c\x5a\x5b\xf0\xf5\xcb\x29\x9e\x01\x86\x42\x8c\xa1\x1c\x8a\x0c\x11\xfb\x63\xbc\x06\xae\x09\x58\x81\x08\xa5\xd8\x68\xd3\xbf\x86\x23\x04\x19\x0e\xd0\x4f\x0d\xba\xb7\x5f\xe4\x63\x0f\xe2\xff\x2d\x18\x3c\xed\x11\x59\x03\x84\x2e\x6c\x5b\xd9\xff\xbc\x8e\xd0\x24\x34\xcc\xcb\x46\xb4\xee\x88\x98\x54\x24\xfa\x16\x81\xdb\x35\x2c\xcd\xae\x88\xb9\xe9\xbc\x33\x71\x6d\x14\x92\x03\xbd\xfe\x79\xbd\xda\xbb\x96\x56\xc0\x2d\xb8\x09\x5a\x6f\xad\xf1\x76\x39\xf6\x01\xbf\x0c\x3e\xdc\x7e\x35\x92\x4e\x80\x5c\xf1\x73\x4e\xed\x5d\xd9\x59\x76\xb4\x10\x9c\x73\xf6\x2f\xf5\xc7\xd4\x87\x2a\x57\x78\xb2\xc0\x23\x1b\xa4\x27\x27\xae\x2b\x87\xbf\x46\x5e\xc9\x47\xb0\x32\x49\xed\x25\xac\x0b\xfa\x0f\x39\x49\xf1\x67\x73\x33\x1b\x16\xfd\x0c\x63\xd4\x5b\x3c\xfa\x3e\xbe\xf0\xde\x0d\xb2\x05\x2e\xef\xf3\x2f\x51\x78\xcd\xf5\xda\x7f\x68\xb4\x26\x18\xea\x00\x38\xc8\x9a\x70\x41\xed\x6f\xcc\x7e\xa2\xb7\xf0\x81\xdd\x83\xc4\x7f\xab\x2d\xee\x96\xb0\x67\xe4\xee\x0c\xe4\x63\x5d\x90\xe7\x25\x0d\x44\xd9\xb1\x61\xa8\x50\x82\x6d\x00\x52\xa4\x06\x59\x8a\x04\x7b\x4b\x74\xf5\xf0\x2b\xe0\xac\xd2\x10\x9e\xe0\xbd\x6f\x3e\x10\xec\x8c\xad\x81\x43\xca\xb5\xf9\x91\x20\x42\x56\x2c\xfc\xf2\xbe\xc5\xf3\xaf\x31\xb8\x19\xb4\x0f\xca\x3b\x23\x7e\x0a\x08\x0c\x61\x9d\x77\x96\x33\x2c\x69\xc9\xe2\x43\xd4\x31\x51\x6f\x34\x5a\xf9\xa2\x1d\x77\x60\x11\x32\x7c\x36\x97\x59\xd2\x5c\x4e\xb1\x90\xcf\x9f\x34\xf3\x6c\x64\x46\x0f\xfe\xe1\x20\xd4\x6d\x27\xfd\xad\xf8\x1a\x19\xbe\x03\xee\x40\x92\x5b\x76\x39\x70\x17\x96\xc2\x67\x56\xf7\x04\x97\xab\x5f\x46\x1c\x0f\x9a\x94\xcf\x3c\x3c\x33\xcb\x36\x00\xe1\x44\x47\xcf\xf8\x3f\x29\xfc\x6b\x9b\x46\x07\xd0\xaa\x9f\x41\x36\x70\x7a\xdd\x71\x16\xdf\xd5\xe3\xa8\x73\x9c\xc3\x30\xb9\x1d\x46\xc6\x8b\xda\x25\xfa\xe1\xa2\x73\x20\x8b\xcb\x8f\x21\x47\x3c\x5a\x6a\x0e\x54\x01\x4e\x94\x70\x16\x7d\x99\xcf\x96\x28\x34\x33\xf7\xf4\x80\xd1\x8c\x70\xe9\xff\xc4\xea\x47\x3a\x0b\x07\x32\x86\x07\x67\xec\x52\x22\x32\x4f\xd0\x83\x66\xbb\x04\xaa\x8b\x3b\xd8\x4e\xb2\xc8\xb3\x01\x17\x95\xc3\x23\x4e\xfc\x21\xb9\xfd\xdf\x9f\xab\xf0\x91\x15\xd7\x73\xe4\x2e\x14\x08\x89\x14\xff\x77\xe4\x61\x61\x38\xd3\x0e\xf7\x25\xb1\xf9\xa1\x12\x55\x6e\xd2\x07\x30\x4b\x3e\xeb\xea\x85\x2e\x62\xe8\x42\x05\xfa\xaf\x36\x8b\x93\xbc\xf0\x56\xb8\x75\x64\xde\x06\x6a\x59\xe6\x68\xe1\x97\x69\x73\xa0\xe4\x04\x47\x1a\x93\xc4\x0a\xd1\x06\xb0\xad\x31\x0f\x52\x19\x15\xa8\xe7\x5c\x71\x80\xeb\xdc\x41\xf2\x75\x56\xe9\x6a\x8b\x0e\x4d\x4e\xb8\x4f\x2a\x70\xf8\xb8\x09\x74\xe0\x54\xe6\x37\xbb\x7f\x04\x96\xec\xa4\x83\xef\x25\x20\xd5\x3d\x30\xea\xdd\x56\x2d\xc5\x40\xf3\x02\x86\xf5\xc9\xac\x54\x15\x2b\x83\xd8\x84\xdd\xb3\x56\x4d\x9a\xf6\xca\x24\xc5\x5b\x5e\x04\xd9\xd9\x7e\xdf\x43\xba\xe1\xba\x04\xd1\xcd\xdb\x66\x1c\xd0\x3a\xd5\x06\xb6\xf3\xcd\x36\x79\xf9\xb8\xdc\x04\xba\x43\x35\x6b\x55\x36\x97\x6c\x80\x99\x9d\x40\xdf\x79\xd7\x9e\xce\x15\x07\x6b\x03\xcc\x19\x54\xb6\xcd\x7f\x40\x43\x03\x30\x8f\x7c\x24\x60\xe7\x6b\x28\x0f\xa6\x97\x40\x73\x44\x1e\x3d\x88\xa7\xf3\x61\xf6\x70\xc2\x33\x4e\xb5\xa6\x3a\x61\x8a\xd8\xa2\x2c\x18\xdd\xb6\x5e\x02\xca\xdf\x32\x23\xe9\x8f\x29\x49\xd8\x81\xd4\x25\x78\x75\x73\xe8\x19\x16\x3e\x04\x3a\x44\x44\xdd\x6a\x69\x3e\x70\x9c\x77\xd1\x71\xa1\x2d\xb0\x47\xf2\x7e\x8f\x48\xe5\xcb\x3e\x59\x46\xd2\xfb\x77\x17\x20\xaf\x1d\x26\x00\x77\x31\xa6\xa4\x31\x10\x8c\xaf\x20\x45\x84\x4b\xcf\xf2\x79\x49\xe7\xcf\x1a\x45\xe1\xd9\xd8\xaf\x13\x1f\x57\x52\xd3\x1e\xe6\x9c\xf0\x3c\x39\x53\x66\x22\xf4\xc1\x98\xa4\x9d\x7b\xeb\xf5\x67\xa2\xbb\xd6\x5e\x1a\xa0\xb5\xf4\xb3\xba\xf2\x6b\xe1\xea\x42\x66\x7e\xb8\xf5\x8a\xf8\x3f\xcb\x07\x6c\x9d\x25\xd8\x07\xb2\x44\xe6\x3b\xa6\xcc\x60\xba\xc7\xa4\xb9\x02\x56\x1c\x5a\xf0\xb4\x20\xb2\x15\x4c\x43\x31\xf4\xbf\x04\xcf\xf7\x24\x40\x7d\xfe\x07\x45\x8f\xc2\xc8\x26\xd6\xdc\x55\x71\xee\xbf\x11\xaf\x14\xbc\xe2\x35\x85\x8f\xb0\x4e\x69\x06\x2a\x91\x2d\x8f\x71\x7b\x41\x7c\xd5\x1c\x48\x61\x03\xe8\x29\xc0\xf7\x63\xca\x8d\xd5\xaf\x3b\xb7\x23\x12\x9a\x20\xca\x78\x00\x6d\x77\x13\xea\xa5\xb6\xdc\x69\xdb\xa0\x6d\x51\xcd\x2e\x70\x21\x0f\x28\x9d\x77\x17\xd5\xb1\x0b\x1c\xb4\x79\x94\xab\x4a\x84\x35\x81\xc0\x42\x08\x7d\x0e\x6f\x15\xef\x72\x2f\x3b\xd2\x8f\x7f\x9a\x87\x05\x7d\x42\xbf\x03\x70\x44\x84\x58\xdf\x21\x5c\x5a\xe6\xe6\x32\x0c\x40\xbd\x85\x5c\x87\x9d\x71\x8b\x84\x5d\x66\x2d\xc0\x70\x68\x09\x13\x62\x51\x05\x90\xad\xda\xa2\xac\x27\xde\x92\xce\x20\xe5\xdb\x2e\x2f\x18\xf9\xdb\x41\xc4\x7d\x8b\x34\xec\xc4\x8d\xc1\x22\xed\xc3\x92\xec\xc5\x83\xb5\xaf\x78\xed\xc4\x4f\xfc\xa7\x44\x92\xbf\x1a\xe4\x91\xbf\xf2\xd9\x79\x0c\xe0\xf7\x2f\xd2\xd7\xfe\xb2\xd2\xdb\xe8\x78\x3c\x13\xaf\x77\x8f\x62\x3f\x7f\x6c\x96\xc8\x6c\x96\xfe\x25\x7b\x00\x6e\xb2\x69\x87\x27\x08\x4c\x48\xfd\x06\x08\x3a\x75\x16\xcf\x07\x4f\x19\xf8\x96\x29\x34\x7a\xfa\xfc\x8c\x62\xea\x26\xd6\xc5\x54\x7c\xb1\x98\x6c\xab\x21\x50\x7d\x4f\x93\x5f\x62\xee\x0b\xf3\xc3\xdc\x87\x50\xc1\x60\xdc\xec\x60\x56\x55\xb1\x30\x51\xa6\x23\xcc\x1d\x64\xc8\x69\x8e\x08\xe4\xa9\xe5\xb6\x4d\xe1\x12\x36\xcd\x71\x57\x17\x59\xdb\x2f\xf1\xf3\xf9\x0f\xb3\xdc\x76\x1c\x92\x0f\xbe\xfe\x16\x24\x78\x26\xf2\x5c\xbf\xa1\x1e\x1b\x2c\x4d\x8e\xbc\x37\x60\x1f\x4b\x74\x7e\xed\x87\xa7\x90\x06\xe8\x62\x9a\x52\x15\x65\x3e\x44\x31\x13\xe8\x72\x57\x80\xf1\x77\x1b\x5d\x53\x3b\x47\x01\xd8\x7f\x22\xc2\xea\x94\xa7\xc9\x94\x44\xe1\xdf\x24\x9e\x9e\xd2\x38\xaa\x09\x02\x97\x46\xc9\x61\xa6\x14\xb3\xc4\x94\x06\x7d\x7a\x6c\x64\x17\x2e\xae\x63\x0d\x03\x5e\x8c\x66\xec\x5c\x69\x48\x73\x4f\xad\x95\xe4\x52\xff\xdc\xd8\xfc\x22\xe4\xa3\xb9\x4a\xd0\xbe\x82\xa3\x44\xe2\xbe\x5d\x75\xc4\xef\x73\xeb\xde\x38\x9b\x1f\x47\x06\xf7\x8e\xa5\x85\x68\x8f\x56\x60\x94\xa0\xe1\x17\xa4\xc1\x73\x40\xc6\xdc\x08\xba\x90\xe6\x53\x97\x6d\xf0\x39\xf3\x7a\xa9\x10\xf0\x8c\xf0\x91\xfd\x82\x79\x5d\xf4\x1d\x8e\xb3\xe2\xc9\x67\x24\x25\x73\x8f\x0b\x7b\x67\xf6\x03\x9c\x66\xe0\x6f\x35\x5f\xd1\x0f\x38\x15\xef\x3a\x26\xc1\x6b\x28\x25\x55\x38\x8e\x58\x13\x06\x82\xfe\x6e\xe8\xcc\x07\x62\xfe\xfc\x70\xb4\xce\xea\x19\xfc\x93\xd4\x3a\xda\x05\x4c\xc3\x9b\x33\x12\x44\x9f\x60\xa2\x13\x40\xaf\xea\x49\x3c\x4f\x6f\x75\x3f\x4f\xb5\x19\x40\x00\x8f\xec\x7a\xf8\xe2\xf0\x24\x8e\x80\x98\xe7\x1a\xad\x60\x1b\x69\xf3\xf0\xa4\x3c\xfb\x39\x87\x55\xcd\xb3\x31\xc7\x9e\x12\x2b\x38\xb5\xdf\xbf\x23\x1b\xf6\x22\xf9\xd3\x2f\xa7\x29\xaf\x6d\x11\x90\x67\x74\xfa\xe6\x25\x9c\x73\xe4\x2d\x9d\xe6\x80\xf9\xf3\x9b\xb4\x7c\x4f\x8a\x21\x5c\x7f\x7b\xa9\xed\x80\xfc\xda\xb0\x04\xec\xd1\x9e\x64\xf7\xe7\x8e\xc8\xfd\x70\xf2\xed\xb8\x37\xe9\xc3\xc1\xfa\xdc\x97\xad\xf3\xa1\x0b\x8b\x91\x0f\xe8\x5e\x34\xf6\x2a\xa4\x1d\x0f\xf5\xfa\x41\xc2\x1e\x43\xce\xbd\xb7\x0d\xcb\x11\x3c\xff\xa7\x20\xff\x7d\xd4\x71\x50\xc2\xd7\x2f\xcf\x7e\xa8\x5d\x1c\x23\x6e\x4a\xae\x4b\x03\x0a\x94\x88\x37\x1b\xce\xe8\xcb\x0d\xf8\xc9\x68\xf2\x34\x94\x4b\x36\xeb\x43\x69\x5d\x85\x42\xbd\xd0\x50\x14\xe7\xe0\x44\x5b\x1a\xe2\xfc\x92\x1d\x86\xe2\x3d\x96\xa1\x65\xa9\xc1\xcc\x67\x8f\x51\xb4\x1a\xac\x88\xe1\x13\x63\x87\x3f\x80\xcf\xde\x47\x3b\x61\xd7\x40\x6d\x88\x0c\x49\x81\x77\x2e\x40\x78\x61\x7f\xa0\xa5\x8c\xe5\xdd\x4d\xf9\x87\xfc\xb3\x03\xda\x0a\x9f\xb6\x25\x35\x15\xc6\xb9\x0e\x64\x77\x23\x41\xc1\x0f\x48\xa6\xe0\x3e\x68\xd0\x36\x74\x08\x8b\x6c\xa5\x22\xf6\xf5\x06\x95\x9b\x68\x7e\xbd\x66\x05\xcf\xf5\xf5\x9f\xec\x76\x68\xb4\xd8\x37\xf8\x48\x6b\x50\xaf\xa2\x96\x82\xf2\x3e\xfa\x93\xd6\x4d\x3d\xbe\xe7\xd0\x26\xa4\x83\x1d\x85\x37\x3f\x61\x5f\xea\x41\x8d\x40\x0d\x17\x8c\xbe\xdc\x6c\x2b\xdc\x7b\xa1\x51\x35\x7e\x88\x59\x0e\xda\x87\x3a\x0b\xfb\x05\x76\x17\x5f\xcb\xde\x0a\x5f\x4b\x6a\x69\xec\x66\xce\x78\x31\xca\x16\x6c\xda\x08\xdd\x03\x82\x5b\x7d\x30\xe5\x4b\x9e\x04\x3d\xe7\xd8\x93\xc3\x41\xa0\x5f\x0f\x21\x94\xe0\xeb\xde\x76\x4d\x6b\x34\x1b\xce\x4a\x6a\x24\xc6\xf3\xda\x6e\x0e\x59\x02\x64\x01\x74\xe0\x40\x10\xd9\x18\xb4\x83\x5e\xa9\xda\x2e\x11\xe0\xec\x09\x43\xa0\x70\xd6\x56\x26\x55\x9f\x0e\xf9\xf4\xda\xe8\x8c\x9b\x8e\xf9\xe7\x09\xc4\x6d\x7f\x1f\xa6\xfc\xbd\x1b\x58\xab\xbb\x31\x5b\xa5\x22\x73\x20\x3b\xad\xe4\x19\xb4\x22\x49\x8a\x0f\x98\x41\x45\xf1\x51\x18\xed\xd4\xab\xa7\xcd\x11\xa8\xd0\xd1\xa5\x38\x47\x4d\xb8\xe8\x7e\x38\x06\x6e\x4b\x58\x5a\x99\xfa\x5e\x3d\x9e\x98\x7c\xfe\xb7\x3a\x12\x55\xd4\x7b\x78\x77\x97\xb0\x0f\xda\x18\x68\x56\x42\x46\xa0\xfd\x31\x86\x6c\xec\x33\xe0\x2a\x06\xea\x73\x60\xef\xdb\xf4\x06\x59\xc6\x68\x5d\x63\x62\xb8\x71\x90\x8c\xc2\x13\x13\xd7\xd2\xe7\xd7\xec\x18\x66\xa9\x0b\xa2\x6d\x62\x7b\x26\x75\x3b\x3e\x8e\xae\xa2\xdb\x41\x3b\xa0\x4b\x2c\x28\x74\x3b\xd8\x38\xf1\xd0\x16\xfa\x5d\x17\x84\xc9\xcb\x1d\xb9\x53\x1b\xa7\xb8\x44\x30\x9a\x26\x91\x4e\x8c\x51\x98\x12\x79\x7d\x45\x31\x02\x9d\xb3\xba\xb5\x28\x9a\xa4\x8b\xec\xf9\x55\xf1\x6e\xc8\x63\x8b\x99\xa8\xc3\xee\x4c\x2f\x22\xd3\x8d\xce\x4d\x6b\xf4\xc1\x82\x24\x8b\xb6\x49\xba\xbe\xb1\x46\xba\x27\xb8\x1f\xa3\x4b\x94\x7f\xa4\xba\x0e\x93\x9b\x1f\xcf\xe8\x71\x00\x76\xfe\x63\x2b\x45\xc8\xa6\xdb\x85\x87\x6f\x51\xa4\x29\x76\xe9\x3f\xc4\x56\x49\x93\x70\xc7\xa9\xe0\xdb\x7f\x0e\x26\x29\x4c\xd0\x21\xd9\xba\xd0\x2e\xac\x0d\x5b\x81\x78\x81\x47\xb7\x33\x89\x30\xfd\x49\x1d\x13\x76\x2f\xea\x2d\x61\x41\x1c\xe5\xe6\x1d\x53\xa5\xaf\xbe\xd4\x14\x74\x21\x3a\x06\xd2\xe6\x2d\x0f\xa7\x8a\x3c\x3c\xa5\x8d\x17\x2b\xea\x8a\xe6\x65\xf4\x9a\x07\x6e\x6a\x90\xdc\xa6\x40\x38\x58\x04\x29\x49\xff\x29\x36\x42\xf2\xd8\x3a\xbe\xf0\x12\xad\xbf\xb4\x70\xf3\xe2\xd9\xfa\xfe\x4c\xe2\xc4\xc9\xa6\x00\xb6\x09\xd9\x45\xa1\x6c\x80\xea\xac\xd4\xc9\x4a\x0e\x3b\x73\x1e\xbb\xe2\x1c\x85\xaf\x67\xe4\xe3\x65\xbe\x48\x5f\xe5\x1e\x9f\x81\x70\x0f\x6f\xad\xf8\x72\xf9\x0d\x04\xf5\x9b\xe3\xe1\x1f\xac\x43\xaa\x48\x3e\x85\x22\x0e\x44\xbe\xf9\x91\x94\x6b\x88\x0e\x76\xba\x3b\xbe\x03\xcc\x78\x57\xf7\x2b\xf5\xee\x44\xa4\x93\xd8\x60\x20\xd3\x05\xbd\x97\x87\xf7\x56\xae\x35\xcb\xa3\xfc\x67\x61\xbe\x6e\x5c\xb2\xe1\xa1\x24\xa6\xef\x7b\x0e\xb3\x73\x79\xb6\x41\x29\xf0\x9e\x65\xb5\x14\x3b\x71\xef\xc9\xa8\x11\x32\xcd\x03\x17\x86\xb7\x1b\x48\x8a\x9c\xf8\x8e\xdc\x1a\x5c\xd9\xf2\x15\x87\xff\x22\x82\x5a\x39\xde\x43\x53\x90\xd5\x44\x4a\x9f\x70\x65\x72\xc1\x7f\x9c\xf3\xf0\xd6\xe2\xfd\x6b\xec\x3e\x64\xc7\xd4\x24\x05\x34\x90\x79\xa7\x1c\xf4\xe2\xd7\xfc\x9a\x59\xd6\xbf\x04\xe4\xb4\xd0\x7f\x2a\x02\xd8\x53\x41\xd6\xec\x83\x39\x74\xce\xdb\x78\x81\x6e\x77\x69\x60\x67\x2d\xbb\x2a\x67\xcb\xfc\xf7\x7c\x29\x8d\xcb\xef\x66\x3b\x5d\x68\x6d\xe6\xe3\xa0\x14\xfb\x1f\xdc\xdf\xbd\x64\xad\x85\xa3\xd6\x04\x06\xd6\xd9\xac\xa5\x3f\x92\xaf\x18\xe1\x54\x54\xe5\x4f\x78\x57\x6f\xbd\xdd\x95\xe1\x72\x30\x5d\x5a\x65\x46\xbe\x1d\xec\xcd\x60\x37\xe8\x0f\xb5\xef\x86\xec\xa7\x1b\xab\x27\x5b\x2e\xf0\x5e\x65\x1b\x86\x26\x2b\xf7\x5f\xe2\xbb\xbe\x23\x1a\x2f\x7c\x30\xef\xa9\x9e\x9b\xc2\xea\xe8\xef\x82\xc8\x9b\xca\x46\xec\x6d\xdb\xb2\x53\x36\xe8\x76\xdd\xf8\x07\xb8\x53\x78\x83\xdd\x12\xae\x5e\x9f\x9b\x42\xb6\xeb\x23\xa3\x6f\x88\x52\xe4\x1f\x64\xe7\x4e\x56\x0c\x12\x14\x08\x02\x12\x2f\xdb\xcc\x85\x02\xe4\xaa\xdb\x2e\x18\x3e\x1c\x05\x21\x37\xb8\xee\xe8\xb7\x9c\xc5\x89\x36\xde\x91\x09\x26\xfc\x0d\x1f\x4b\xf6\x69\x10\x85\x24\xe8\x16\x1d\x99\x8d\xc7\x79\x6f\x61\xb3\xfe\x57\x96\x35\x39\x1d\x99\xa2\x6d\x03\xe9\x41\x92\xb2\xe0\x0c\x89\x41\x38\x71\x1f\xf3\xa0\x4e\x0d\x1d\xe6\xae\xf2\x0e\xd4\xe2\x2a\x57\xa3\xb7\x5d\x45\x96\x02\xd0\x20\xb0\x0b\xb3\xca\x31\xd9\x46\x5a\x59\xae\xf2\xc7\xbb\xe0\x14\x1a\x1f\x5c\x51\xda\xd7\x4f\x77\x77\x15\x36\x7a\xaf\x55\x9f\xd9\x36\xb1\x31\x6d\x5f\x00\x21\xd8\x9b\x80\xd0\x87\xb8\xd3\x45\x44\xfb\xf2\xab\x96\x1d\x9d\x3c\x2a\x08\xe0\x9a\xbc\x0d\x7b\x81\x01\xc7\xce\xc3\xb4\xc0\x4f\xd4\x47\x79\x5e\x28\x43\x70\xc2\x78\x34\x72\x64\x8e\x85\x7e\x76\xb4\x8d\xa6\xa4\x47\x66\x71\x44\xd4\xca\x91\x85\x93\xf4\x1a\xe1\x78\x95\xd9\x5a\xc9\x43\x6c\xee\x2f\xab\x05\x28\x6e\xe8\x27\xca\xf7\x30\x88\x4e\x4c\x51\x77\xc6\xd6\xdd\xcc\xd1\x9e\x35\xd8\xc2\xcb\x8d\xb2\x82\xd6\xbb\xd9\x22\x1f\x30\xb3\x63\x44\xa3\x3d\xdb\xa8\xc8\xe2\x2b\x58\xbd\x57\xb2\x5a\xe3\x92\x65\xbb\xbb\x86\xca\xaf\x69\xff\x86\xfd\xd2\x4a\x65\x11\x2c\x99\xe2\x96\x79\xa0\x0a\x61\xe6\x4f\x9b\x26\x35\x64\x5a\x67\x26\x33\xd4\x2f\xf2\xf8\x2a\x01\x9a\xb3\xdc\xd3\x93\x84\x17\x0c\xff\x81\xa0\x60\x41\xf2\x8f\x57\x53\x90\x1d\xbc\x21\x23\xce\x1d\x6c\x9b\xee\xa2\x81\x3c\xd3\xcb\x2a\xc7\x26\x49\x05\x4a\x56\xba\x25\x2e\x80\x49\x36\xea\x85\x7a\x8c\xb3\x74\x55\x4a\x03\xbe\x03\xfb\xac\x2b\xbb\x20\xe4\x2b\xca\xe2\x3d\x43\xe2\x26\x4b\xf6\xd6\x6b\xca\xf0\x0d\xe0\x6d\xbb\x1e\xb6\xc9\xfa\x69\xe3\x65\x72\x46\x5e\x9b\x0d\xbc\x1b\xd5\xb8\xe4\x36\x8a\x57\x99\xa4\xc9\xb0\xdf\x4c\xef\x38\x29\x2c\x0e\x79\xb7\x4e\x79\x80\xb8\xc0\xaf\x52\xfb\x0e\x98\x2c\xf3\xbf\x1a\x2e\x44\x90\xb5\x5e\x6a\xb9\x90\x86\x7f\xda\xaa\x2a\xe5\xe3\xa1\x8b\xfe\xf0\xf6\x94\x3c\x9c\xf6\xdb\xc9\x03\x70\x86\x67\xd3\x03\x5e\x0c\x2f\x45\x05\xda\x67\x49\x4a\x29\x4c\x37\x61\x5f\x4c\xa0\x6c\x58\x94\x61\x29\x6f\x1a\x2a\xc1\x83\x98\x2a\x04\x38\x1a\xec\xd1\xac\xe1\x76\xca\x06\xd8\x47\xc8\xcc\xe9\xbe\x37\x8d\x50\x9e\x8d\xd1\xc4\x61\x8b\x63\x86\x24\x6c\xa7\x58\xa0\xd9\x29\x49\x6b\x80\x8d\x1a\x31\xae\xa3\x69\x4f\x61\x2f\x1f\xdd\x17\xf8\xe9\xf8\xa7\xcb\x6a\xe2\xa2\x27\xa9\x3d\xbb\x0d\xd7\x2d\xf2\x28\xed\xdf\xe6\x23\xeb\xbf\xe4\x17\x5f\x11\x46\xa5\x14\x3e\x64\xcd\xf0\x47\xfa\xfa\xb6\xf2\x48\xec\x4a\xa2\x51\xd3\x22\xfe\x61\x3d\xcf\xc3\xdc\x3a\x36\x4f\x72\xae\x50\x6c\xaa\x7b\x79\xc0\xa3\xac\xe3\x93\x57\x28\xeb\x38\x1c\xd3\x50\xe9\x24\xff\x9f\xb4\x09\xf6\x7d\xc7\xec\x84\x71\xcf\x28\x12\x70\x41\xd4\xb7\xf2\x3c\x45\x3a\xd4\x5f\x6e\xf2\x53\xb0\x79\xc3\xcf\x80\x2d\x36\x57\xfd\x42\xc7\xcc\x42\xf4\x60\xff\x48\xdf\xb9\x23\x7b\xcd\xdb\x2f\x58\xfc\x95\x73\xb1\x17\x85\x82\xec\x60\xe5\x7a\xb3\x25\xab\x26\xd8\x49\x86\xd8\x73\x6d\x62\xc5\xdd\x36\x1e\xab\x9a\x34\x62\xe7\x58\x01\xdd\xcf\x4e\x22\xdf\x69\x6f\x61\x27\x3b\xbf\x1e\xf5\x64\xce\x7f\xbc\xa1\xce\x63\xf2\x2e\x25\xbc\x05\xe4\x21\xb5\xdf\x21\xbd\xc0\xb8\x3e\xe9\x17\x14\x9e\xf5\x57\xfc\x71\x9e\xb4\xf4\x81\x20\x82\xa7\xc3\x66\x0b\xc0\x29\x6d\x85\x5d\xed\xbc\x17\xf6\x16\x6a\x46\xcc\x91\xbc\x09\xaf\x5a\x12\x26\xe0\xc4\x40\x26\x3f\x69\x3e\xdb\x3c\x7c\xb8\x97\x43\x53\x87\x56\x44\xb9\x46\x12\xe3\x75\x25\xdd\xb4\x7f\x69\xba\x44\xea\x29\x0c\x19\xe6\xc8\x40\xcc\x9a\xe9\xdc\x54\xea\x92\xd9\xbd\x9a\x55\x9a\xc2\xa1\xa3\x60\x33\x2a\x43\xab\xe5\x83\x14\x42\x88\x37\xe3\xde\x1e\x6a\x06\xa9\x47\xe3\x7d\xd5\x14\xfd\x20\x3c\xe4\x60\x6f\xe7\xf8\x65\xa6\x41\x83\xa1\x4e\xf8\xd6\x6f\xa9\xb9\xe1\xbb\x19\xbe\xc6\x88\x02\x64\x57\x67\xb1\x39\x26\x5c\xa4\x10\xe1\x43\xcd\x85\x9b\x5a\xbf\xce\xc4\xc7\x00\xb4\xd3\x80\x09\x6f\xe8\x1a\xd5\x55\x83\x41\xaf\x99\x5b\x88\x64\x95\x35\x2b\x44\x1c\xbb\x7a\x41\xde\x53\x8a\x07\xc4\x93\x3d\xfb\xaa\x75\x97\xc2\x1d\x66\x4b\x5f\x1d\x3f\xa1\xb8\xf8\xe8\x2d\x7a\x3a\xff\xdc\x46\x3e\xb0\x2b\xe9\x3f\x0d\xf2\x9b\x5d\x29\x5b\xd4\xb5\x65\x88\x40\x61\xb4\xa7\x66\x3e\x3e\x9c\xc0\x12\xba\x82\x72\xad\x3a\xa3\x75\x15\xdb\x03\x0c\x65\x5a\x85\x49\xfe\x8c\x7e\xd6\xbc\xeb\xe5\x1c\x10\xa4\xcc\x44\xed\x89\x2d\xfd\xe5\xcc\x00\x80\x49\xbe\x61\x36\x21\x77\xfa\xaf\x23\xb5\x37\x89\x4e\x28\x3e\x1c\xb2\x22\x01\x40\xa6\xf0\x56\xd1\x27\x2c\xd8\xd7\xaa\xa0\xca\xea\x9a\x63\x6f\x69\xd5\xac\x6c\x04\xb4\xc5\xd8\x78\x62\x02\xbf\xb7\xa8\xca\xdb\x9b\x42\x85\xf6\x51\x36\x75\xaa\x0f\x15\xd6\x84\x68\x84\x59\x7d\xcc\xdb\xc6\xe9\xee\x93\x47\x07\x58\xb5\x7b\xb3\xfd\xb9\x06\x40\xb6\xe6\x12\xdd\xcf\x96\x06\x6a\x83\x91\xa6\x1e\x6b\x5b\x93\x7c\xab\xc0\xae\xd8\xaf\xc5\xd5\x45\xa5\x09\xd1\x64\x95\x37\x55\x0d\x31\x09\x6c\x7c\x9a\x39\xd5\x2f\xd4\x13\x7c\x4a\x1e\xa9\xa7\xc0\x72\x50\x83\x66\xb0\x26\xe7\x17\x6b\x44\xb8\x7d\x75\x7b\xc8\x9b\x4f\x92\x27\xd6\xb3\x8a\xc7\x68\xc3\xdc\xe2\xb6\xd6\xd4\x17\xde\xbc\x7c\xd4\xa9\xaa\x07\xf3\x60\x97\xba\x7f\x23\xbf\x33\x4f\xb4\x9f\x52\x8e\x01\xc2\x3a\xdd\xfc\xea\x94\x19\xd4\xd9\xec\x2a\x97\x33\xbd\xf5\xfc\x33\x31\xbf\xca\x06\xf7\x42\xfd\xc9\x7d\xcb\x30\x78\x29\x8e\xc3\x2e\xc5\x67\x83\xa5\x44\x43\x0e\x01\xcb\xd2\xb7\x64\xed\x6c\xd8\x4a\x63\x13\x0d\xa5\x73\x14\x8f\xcb\x1f\x83\x36\xdb\xe3\xc0\xeb\x56\x1f\x8c\xff\xc3\x66\x8b\x32\xd1\x6a\x4a\x89\x09\xa1\x6a\x5a\x12\x3b\x50\x4b\xda\xce\x16\x49\x6a\xe7\x9e\x4b\x43\xfe\xca\x64\x13\x28\x5e\xe9\x6f\x5e\xd5\x9e\x92\xe7\x31\x66\x4e\xe9\x4a\xe4\xce\xca\x04\xcf\x4d\x41\x06\x91\x6c\xd0\x17\x4f\xbd\x9d\xa1\x53\xf3\x6f\x17\xdc\x94\x70\xe6\x9f\xed\x67\xe2\x0a\x09\x5b\x3b\x62\xad\x53\xfd\x70\xfa\x9c\xe0\x9e\x2d\xa3\x29\x02\x1a\x90\xa2\x94\xb0\x9e\x8a\xec\xcf\x8f\x7c\x97\xca\x19\xca\x94\xd2\x1a\x07\xa0\x23\xb5\x74\xcd\x72\xce\xf2\x0b\x98\xca\x11\x39\xdf\xce\x53\xd2\x40\x5c\x5b\x6c\x7c\x5b\xcf\xa9\x88\x6c\x3c\xd9\xf4\x39\xfd\xda\x5d\xd1\x03\x5b\x7d\x56\x86\x04\x05\x41\x3a\xa2\x01\xc1\x9e\xea\xe1\x4a\x14\x36\x8c\x64\xd1\xc3\x61\x68\x52\x00\xd8\xb0\x57\x66\xeb\xb5\xa2\x0e\x64\x18\xc4\x3c\xd1\x25\xac\xab\xd8\x21\xd3\x98\xa5\xe8\xe4\xa2\xa4\xd0\x2f\xa3\x73\x16\x6e\xdd\x68\x88\xa9\x89\x16\x6e\xcb\x53\x8e\x46\x86\xdb\x59\xb1\x05\xf6\x55\x73\xeb\x03\xe3\x04\x86\x35\x58\x69\x16\x91\xa4\xe8\x8a\x65\x8d\x28\x84\x81\x0b\x86\x5a\x5d\x67\xf8\xd9\x80\x6f\x36\xab\x47\x36\xd8\x94\xfc\xd1\x50\x8e\x58\x2d\x4d\xd6\x87\x8c\x47\xa5\x89\x93\x2c\xaf\xc4\x6a\x43\x96\x69\x68\x63\xd6\x2b\xde\xe0\x2f\x9c\xcd\xdb\x6c\x0f\x76\xd1\x28\x7d\x09\xd3\x29\x68\x3c\x16\x45\x3c\x9c\xbc\x45\x40\x07\xfc\xb0\x94\xd1\x00\x33\x15\x45\x9c\x16\xce\xe7\x9e\x70\x2a\x60\xc0\xc6\x1d\x0f\x91\x2d\x7a\x4a\x93\x72\xa6\x3f\xe1\xc4\xa3\x4d\x31\x91\xe8\x70\xd3\x26\x2b\x12\x67\x50\x45\xdb\x66\xee\x66\x4f\x7f\x2c\x83\x7a\x61\xab\x9d\x6c\xdc\x4f\xd8\xd0\x0b\x2a\x48\x1b\xd5\xe7\x1f\x95\x70\x3b\xce\x27\x21\xf6\x71\x6e\x4c\x65\xd8\x64\x85\x36\xa6\xb6\xe6\x98\x6e\x40\x47\x84\x3a\x19\x59\xb4\x61\x89\x88\x8e\x1c\x41\x0b\x2b\x78\x5a\xb3\xcb\xbb\xec\x60\x2d\xb3\x77\x75\xba\x8c\x41\x72\x17\x14\x5c\x33\x87\x23\xb1\xc6\x67\xc4\x6b\xa2\xf7\x30\xf3\x28\x42\xc8\xb3\xf0\x3c\xb7\x23\x70\x54\x1a\x50\x15\x7e\xd4\xe7\xea\xe7\xfc\x53\x88\x16\x2c\xb4\x6e\x76\xb4\x6c\xea\xf5\x7b\xb8\xcd\x71\x97\xac\x9d\x83\x9e\x45\x30\x45\x32\xda\x05\x3f\xc3\xca\xed\x99\x2b\x17\x8b\x67\xa4\x08\x3e\x33\x12\x23\xbd\x8a\x78\xe6\x34\x4a\x30\x93\x42\x15\x53\xe1\xef\x1a\xa5\xad\x6d\xfe\x5a\xb0\xc9\x33\xc2\x25\x9f\xda\x2b\xd9\xae\xfa\x25\xa1\x8c\x44\x6f\x4f\x59\x91\x64\xed\x9e\x7d\x65\xa3\xf3\x95\xdc\x76\xe0\x4d\xe8\xc7\x43\x18\x5b\x06\xb9\x96\x83\xe9\x64\x15\x65\x4b\x5e\x24\x56\xa8\xb7\x93\xff\x8d\x47\x70\x1b\x73\x5d\xc2\xb9\xc1\x76\x0a\x81\xc9\x83\x08\x66\x43\x3d\x63\xa0\xc6\x3a\xe5\x34\x3e\xd8\x56\x2f\xf3\x87\xaa\xa9\x62\x08\x4f\xd0\xa1\x86\x1b\xe6\xa0\x36\xf5\x50\xaa\x42\x2d\xce\xc0\x6b\xd1\x30\x9b\x0a\x1b\x66\x5b\x6c\xf1\x07\xba\x11\xfb\x48\xed\xe4\x61\x3e\xd0\xa2\xf0\xf6\x96\x95\x70\x6c\x94\x65\x66\x4e\xcc\x60\x7a\xb6\xf6\xd8\x51\xa4\x87\xb9\x40\xd3\xe5\xf2\x0b\x08\x8d\xb1\x16\x39\xfa\xc0\x68\x85\x85\xe6\xe6\x9c\x23\x3c\x76\x2d\xee\xcf\xc5\x3b\x18\x37\xcc\x8e\x9a\xad\x68\x61\xaa\x44\x7b\xae\x5e\x96\xe3\xf6\xd7\x0f\xfc\x1f\x32\x1d\xcb\xe1\xdb\xb5\x2b\x9c\x36\xb1\x69\x69\xf3\x1b\x8f\xe7\xc8\x6f\x87\x3b\x02\xbe\x7c\x62\xc6\xc0\xa0\x68\x12\x25\x99\xf7\x27\x59\x45\x1e\x2a\xab\x2e\x99\xfd\x46\xb2\x8f\x3a\x15\x03\xde\x23\x51\x58\x1d\x33\x25\xd4\xda\xd7\x2b\x1d\x0e\xf8\xc3\x6c\xb5\x15\x39\xb6\xb9\xe7\x16\x3b\x46\x85\xe2\x99\x12\x41\xc0\x9e\x8c\x41\xf1\xba\x0d\xc3\x7b\xeb\x64\x9b\x2c\xfe\x09\x4e\x9e\xa4\x76\xd9\x76\x90\xb1\x64\xc3\x4c\xcb\xb0\xb9\xaa\xeb\x05\xa2\xb0\xbf\x77\x6f\xbd\x3f\x42\xd8\xc3\xb7\x30\x95\xc4\xe6\xda\xec\x34\x45\x34\x47\xe4\x2b\x36\xa5\x17\x7b\x41\x7d\xba\xd5\xcd\xba\xd3\xfa\x8c\x4c\x8c\xeb\xd7\x64\xee\x9d\xf8\xad\x2b\xb4\xf7\x70\x51\x35\xce\xec\x97\x4e\xea\x9c\xad\x52\xfc\xc0\x41\x34\xee\xd7\x46\xc4\xb6\xea\x6a\x82\x85\x1d\x57\x18\x8f\x81\xe5\x42\x0a\x5a\xf7\x61\xd4\xeb\x87\x8d\x9a\x7a\xb7\x3e\x18\x55\x30\xf0\x38\xd8\xd5\x94\xf3\x00\x06\x79\x6e\xb5\xce\x89\xcd\xbd\x6c\x77\x54\x36\x55\x6a\x84\xab\xd9\xa8\xf0\x59\x72\x22\xed\xe8\x98\x16\x4d\xb3\x68\x6e\xd5\xfc\xff\x8b\xbc\xf5\x3f\xe6\x3b\xfa\x64\x8a\x99\xb0\x3d\x69\x4b\xc8\x98\xe1\x55\xe2\x50\x0f\xfa\x64\x6b\xeb\x8e\x45\x2f\x2c\xfa\xd8\x36\xd4\x01\xe8\x6a\xf0\x81\xba\xf2\x65\xcf\xab\xc4\xcc\xd6\x45\x08\x75\x67\x13\x6a\x24\xcc\xfe\x10\x10\xc8\x7d\x66\x1e\xc3\x55\xeb\x97\x75\xdf\x49\xaf\x0f\xa7\x2d\x7a\x89\xb5\x4e\x1a\xfe\xa1\x37\xb4\x3c\xf8\x42\x1d\xa9\x33\x52\x33\xf6\xa4\xce\x54\x17\xf6\xb3\x1d\xbe\xeb\x52\x9e\x1c\x22\x57\xff\x6a\x21\x1a\x76\x11\xd6\x96\x82\x3b\xec\xe1\xa4\xdd\x65\x9f\x82\xfe\xed\x64\xe5\x9f\x10\x28\x3c\xbb\xe0\xff\x46\x3d\x0f\x12\x25\x69\xa3\x75\x7d\xd4\xc7\x8a\xe4\x5f\x60\xd1\x6b\x69\x2e\x5b\x77\xe3\xf7\xc1\x86\x4b\xee\xa6\x1d\x38\x6f\x52\x0d\x59\x15\xf0\x95\x5f\xd7\xc6\x97\xfa\xb9\x85\x74\xb6\x99\x08\xbd\xd1\xba\x86\x49\x97\xc2\xcb\xba\x34\xb0\xb9\xd3\x14\xbe\x20\x15\x9f\x3c\xe6\x5f\xd6\xed\x7f\xea\x4a\xfd\x97\x57\x2a\x54\xdc\x24\x90\x92\xa2\x78\x22\x52\xfa\x71\x3c\xe6\x50\xc6\xd9\x8e\x58\x5d\xb7\xb4\x2f\xc4\x95\xd1\x32\x7b\xc5\x6d\xc3\x99\x65\xcb\x09\xcb\x00\x3f\x34\xef\x05\xd8\x23\xc5\x68\x53\x5c\xc8\xea\xaa\x57\x7f\x77\x6b\x96\x29\x05\xf3\x40\x38\x71\xa4\xf1\x54\x30\x08\xcd\x90\x93\xad\x7c\x89\x86\x32\xee\x6b\xa2\x0f\x61\x27\xea\xcb\x8f\x83\x7e\x81\x84\x6a\x96\xbf\xef\xf7\xb3\xfb\xff\xfa\x56\xce\xf2\xdf\x59\x06\x5c\x37\xb5\x9b\x5f\x02\x17\x5f\xf8\x91\x29\xc8\x17\xeb\x0c\xcf\xd3\x4f\xd1\x8a\xfe\xb1\x89\x58\x5f\xfa\x07\x5a\x34\x57\x09\xd9\x22\x68\xab\x96\xf7\x92\xca\xb4\xc2\xb6\xca\x87\x79\x65\x58\xf0\x1b\xc5\x3c\x8f\xd1\xe4\xda\xbf\x31\x58\x34\x68\x6c\xa3\x37\xd0\x53\xde\x06\xc2\x97\x63\x30\x6f\x70\xd1\xbc\x4f\x06\xc5\x91\x4f\x4b\x6f\x29\xb8\xdf\x3c\x27\xdf\x79\xee\x14\xdb\x2b\x1b\x28\xd7\x17\x71\x94\xde\xa7\x7d\x7a\xb6\x1c\xde\x52\x01\xbd\x71\xfd\xee\x11\x31\xc2\xe9\xfa\x9d\xb8\x0b\x7a\xd3\xa1\x94\xef\x82\x94\x5b\x98\x67\xa9\x2f\x66\xf3\xae\x3c\xe4\x5e\x25\xec\xb1\x61\x93\xc5\xfc\xdd\x59\xd1\x24\xab\x57\xa1\x2c\x88\xac\x9e\xfa\x9c\xf2\xc6\xc2\x4c\xfd\xb2\xf7\x1e\x42\x25\xd1\x22\x5e\x10\x3f\xb1\x79\x55\xaf\xd6\xdc\xb2\x33\x70\xfd\xa7\xed\x15\xfa\xc5\x97\x4d\xba\x73\xeb\x70\xf1\x18\x61\x9c\x2f\x21\x93\xf5\xfb\xf5\xef\x03\x1b\x0b\xff\x3d\x42\x4b\x59\xa3\x13\x6e\x5b\x5a\x52\xf6\x6e\x25\x0e\xde\x1e\x28\x43\xf1\x52\x73\x89\xbd\x91\x7f\xc0\xc7\x17\xcd\xba\x77\x21\xb6\x72\xcf\xfc\x09\xec\xe8\x93\xd3\x1d\x93\xd3\x3d\xcd\x08\x65\xf1\x3d\xfd\x8d\x70\x3e\x99\x03\xde\x61\xba\x7a\x2b\xb3\x37\x6c\xe1\xd0\x72\xef\xbf\x1f\xef\x53\xbc\x21\x32\x50\x6a\x6b\xe5\xb7\x89\xdd\x77\x6b\x41\x81\x73\xf4\x60\x83\x6a\x4f\xe1\x83\x55\x23\x0d\xd7\x8a\xf6\x2d\x9e\x84\x98\xa2\x48\xbb\xcd\xf4\x39\xb3\xc1\xdf\xb5\x24\x29\x85\x22\x1f\x24\x4b\xc2\x94\xb5\x41\xb5\x51\xb3\x7c\xb2\xe1\xd2\xdc\xb1\xf6\x60\xed\x60\x2c\x3a\xcd\x75\x2a\xfa\x16\x09\xb9\x7e\xc3\x91\x95\x74\x5b\xac\xc4\x27\x0e\xeb\x4d\x27\xbd\xe2\x2e\xdf\xdd\xab\xb9\x4c\x5c\x00\x4b\x9a\x60\x28\x52\xd8\xd1\x79\x8a\xde\x12\xdf\x31\xbc\x7e\x67\x0e\xc2\xf3\x6a\x68\xc7\x3d\x46\xc3\xfc\x3f\x36\x9b\xec\x7b\x08\xdd\x10\x85\xf2\xa8\x6f\x4b\x28\x89\x86\xa1\xfe\x7f\x54\x5d\xe9\x92\xb3\x3c\xaf\xbc\x97\xf9\x7d\x6e\xca\x80\x03\x0c\x8b\x79\x01\x27\x4f\x72\xf5\x47\xad\x6e\x39\xf3\x55\x4d\x15\x0a\x01\x86\xb0\x78\x91\x7a\x91\x0c\xd5\x9b\x47\x47\xca\x64\x55\xa4\x9e\xd3\xa2\x45\x15\xa7\x80\x53\x41\xd9\x4a\xd5\xa6\xac\x46\xfd\xca\x6d\x40\x7e\x61\x18\xc5\x32\x5f\x3f\xa1\xea\x7c\xc5\x1d\x00\xa8\xe5\xbc\x3f\xdf\x8f\x9c\xcf\x51\xd5\x8a\x41\xb8\x96\x40\xdd\x4a\x60\x64\xaf\x48\xdd\x31\x6f\x47\x4d\x6a\x0e\x05\x33\x4a\x5d\x29\x4c\xb1\x27\x49\x3d\xd6\x6d\x3f\x2b\x91\x38\xc8\x33\x1f\x2c\x41\x61\xf4\xd7\xd4\xae\x46\x45\x83\x0d\x95\x48\x32\x72\x1f\x5f\x46\x1b\x0b\x54\x84\x1c\xda\xe8\xfd\xd3\x5c\x44\xde\x89\xae\x1d\x5e\xc6\xf2\xe5\x3c\xb0\x66\xc5\xe4\xde\x95\x12\x39\x44\xfb\x4c\xae\x17\x26\xa7\xdf\xf2\x13\x2d\x44\xb2\xc7\x77\xdd\x77\xd1\x85\x64\x06\xbc\xed\xf2\xfa\x9d\xc2\x3f\x58\xfa\x57\xfd\x22\x6a\x34\xa0\x3d\x1b\xeb\xce\xe7\x2c\x4a\x1b\xca\x4d\x64\x0f\x39\x8c\xe5\xc1\x32\xd2\xbf\x93\xa3\xc3\x53\x0a\xd0\x67\x56\xa1\x2c\x53\x2d\x06\x2c\xc3\x07\x4b\x45\x48\x28\x90\x55\x94\x25\x6d\x6a\x01\xa6\xe7\x1e\x85\xfd\xc7\x2b\x7c\x45\x40\x12\xee\xcf\xd4\x6c\x44\xc8\x79\x82\x3b\xb6\x2f\xf7\xc1\xaf\x27\x34\xaf\xe6\x6f\x4d\x8a\x6e\xc6\xc9\x1e\xc4\x39\x02\x5f\xca\xcd\xef\xd4\x2c\xcd\x9a\x1d\x66\xf5\x30\xc7\x13\x73\xeb\x3f\x38\x39\x06\xa1\xe9\xa8\xbf\x85\x0a\x59\x20\x58\xa8\x98\x73\xba\x3a\x2e\x23\x9b\x35\xa9\x56\x35\x4a\x21\xab\x91\xa1\x0e\xbb\x16\x2c\x4a\x9d\xb7\x26\xfe\xd0\xc1\xe2\xaa\x03\x6a\x5c\x8c\x58\xd9\xb1\xe5\xaf\x1b\xd2\x1e\x65\x3b\xbc\x8a\x72\xac\x75\x50\x89\x2a\xaa\x56\x36\xcf\xaf\xd4\xad\x9a\x3f\x5a\x13\xe6\x67\x47\x60\x7d\x51\xa7\x3a\x25\x84\x35\x89\x61\xa4\xc4\x8d\x4d\xc6\x2e\x71\x26\x8f\x89\x3e\x47\xc8\xe4\xb3\x67\x3e\xb2\x9c\x54\xe0\xad\x28\x7c\x1e\xe5\xb1\x58\x8c\x3a\x1f\x51\x38\xca\x2e\xe6\xe5\xd1\x3e\x15\x8d\xc4\xed\x2d\x95\xa2\xd6\xba\x86\xc1\xc8\x93\x82\x06\x30\x4b\x66\x19\x0a\xcc\xcb\x59\xb6\xc9\xa7\x32\x33\xc7\x17\x3f\x07\x66\xd1\x2f\xcb\x50\xed\x5f\x84\x0d\xfa\x91\x7e\xfd\x60\xe5\x55\x2f\x74\x6a\x61\xb8\x80\xdc\x1c\x2b\x4c\x8e\xf6\x42\x10\xe8\x55\x97\x27\xd6\x64\xb6\x64\xcd\x42\xfc\xc3\xf0\x92\x74\x57\x0f\x31\x08\x92\x8b\x00\xa7\x8a\x6b\xb8\x7f\x05\xe8\xf6\xba\x1e\xae\xa5\x85\x51\xa4\xdf\x62\x2f\x55\xf9\x6b\xb7\x97\xb3\xa9\x75\x0d\xde\xaa\xed\x24\x75\x51\x9e\xeb\x27\xc4\xb9\x7c\x0b\x37\x2c\xf5\x40\x6e\x28\x98\x18\xb3\xf6\x85\x49\x91\x97\xb2\x00\xca\x20\xf3\x6a\x77\x1b\xe4\x81\xc9\xf6\xad\x5e\x3e\xd1\xda\x50\x41\xe5\x32\x95\x85\xd1\xc6\xfc\xed\x56\x6d\x3c\x29\x0e\xd2\x7e\xff\x2a\x94\x08\xb6\xdb\xa4\xb0\xcc\x60\x11\x6b\x60\x60\xa6\xa9\x78\x65\x03\xfa\x21\x97\x3e\xd6\xa3\x10\x34\x9e\x51\xfd\x4a\xfb\xfb\x1b\x0e\x3a\xc8\xcc\x89\xed\x46\xfc\xd1\xe6\x15\x56\x16\xb3\x6e\x39\xa3\x68\x14\x68\xc1\xaf\x44\x97\xa1\x01\x56\xe4\xa1\xee\x7c\xa5\xaf\x4d\x33\x6a\xe9\x3c\xd1\x2c\x6a\xc2\xd6\xc4\x3c\x36\x38\x25\xb0\x52\x85\x7c\x0f\x7f\x7f\x5e\x47\x3c\x2a\x52\x04\x6b\xbc\x18\x54\xb5\x5a\xdc\x3b\x1a\x8f\xa5\x28\x7b\xfe\x55\x7c\x82\x4a\x8f\xea\x48\x76\x63\x54\x82\xeb\x47\x78\x74\xa8\x94\xd2\x8f\x73\xd7\x85\x64\x98\xb5\xd6\xb2\x52\x41\x82\x44\x3b\xda\xb4\xfa\x8c\xca\x56\x8f\x24\x67\x14\xbf\x80\x23\x53\x0d\xa7\x87\xb7\x63\xf3\x50\x01\x4c\x53\x3c\xa4\xb7\x0a\x36\xff\x82\xb2\xf4\x8a\x42\x93\xfd\x03\x55\xbd\x5c\xcf\x3a\x56\x5b\xac\x62\xcf\x7d\x55\xf9\x34\x6d\xe9\x62\xbd\xd6\xcd\xa0\xb9\x24\x31\x03\x81\xf8\x51\x71\x8e\x98\xde\x68\x9b\x7e\x66\x07\xcb\xd0\x83\xbd\x95\x3a\xad\x9b\x7a\x87\x63\x74\x78\xb3\xec\x32\x5e\xf1\xec\xb0\x6b\x39\xfe\xb0\xec\x94\x54\x0a\x5b\x51\x9c\x66\x24\x38\x38\x6a\x51\x34\xe6\xd8\x52\x0f\x35\x38\x86\xcc\x4c\xac\xe1\xc2\xbb\x72\x32\xbe\x56\xb4\x26\x3e\x38\x5d\xab\xf2\x58\xa8\x36\x51\x11\x0c\x6d\xa4\x17\xa7\x00\xf2\x94\x42\x99\x3d\xf7\xd1\xfb\x43\xa4\x4c\xd6\x23\x45\xbe\xcd\x6b\xf9\x22\xa4\x31\x16\x3a\x09\xf1\x5b\x9b\xe2\x13\x74\x26\x58\x81\x77\x7a\xa1\xd7\x76\x9c\xcd\x75\xf0\xe5\x5e\x23\xab\xb8\xce\x9a\xc3\xc3\x6b\xe5\x12\xed\x2b\x77\x41\xbb\x0a\x55\x35\x14\xb1\x52\x44\x2a\x77\xe5\xa7\x9c\xb4\x91\xd8\xe2\x32\x8a\x8d\x2b\x11\x9d\xfe\xff\x9d\x49\x2d\xd2\xd7\xe5\xb0\x0f\x3b\x8b\xf0\xbd\x16\x45\x7c\x95\x00\xaf\xbd\x91\xb4\xde\x5a\xe9\x5d\x1b\x76\x2c\x4e\xfe\xe1\x86\x8f\x1c\xbf\x27\x4b\x6c\x77\xcd\x29\x4a\x68\xd1\x9a\x59\xb4\x85\x2d\xf6\x47\x95\xad\xe7\x5c\xa2\x92\xbf\xe2\x21\x54\x0d\x2d\xc4\xd8\x6c\x12\xc0\xc2\x98\x2e\x42\x0a\x77\x98\x10\x77\x5c\xc3\x76\x6a\xa5\x26\xb4\xb7\xbd\x78\x27\x58\x23\x93\x25\x30\xea\x66\x57\x69\x15\xb4\x1f\x71\xc7\xe2\xbb\x50\x75\x4b\x23\x9d\x71\xec\x21\xaa\xbb\x20\xaf\xb0\x89\x41\xeb\x73\x33\x96\x27\x4e\xea\x08\x22\x59\x53\x5a\x3c\x31\x0e\x85\x26\x5f\x64\xc1\x5e\x60\x18\xc3\x82\xd7\xe9\xbe\x17\x0b\xec\x85\xf9\xb9\xb0\x78\xf6\xad\xac\xe5\xca\x0e\x75\x29\x4f\x82\x32\x97\x72\x7c\xcb\x57\x45\xea\x8c\xa0\xa2\x91\x40\xb9\x48\x30\x64\x89\x14\x1b\x1c\x61\xd4\x13\x2d\xf2\xe6\x5b\xbc\x4e\xf1\xe3\x22\x6d\x7d\xb0\xc7\xce\x2b\xd4\x63\x16\xc7\x55\xb0\xaa\x26\xf9\xa4\x25\xcc\x7e\x97\x1c\xf5\xb6\xed\x58\x65\x14\x83\xbe\x29\x34\x18\xed\x8d\x6e\xac\xb6\x9c\x48\x30\x8b\x0d\x31\x55\x67\x21\x6e\x23\xba\xfe\xb7\xde\xac\x03\x39\xc3\xec\x24\x55\x2c\xb4\x5e\x7e\x53\xc3\xb8\xfd\x26\x9a\x2e\xcf\xd7\xe2\xad\x38\x50\x88\x27\xa1\xdd\xf3\x42\xf1\x5a\x1b\xc7\x7a\xe5\xe4\xdd\x48\x56\x6f\x56\xa5\xa7\x7a\x25\x39\x87\x07\xcd\x02\x23\xed\x18\x8a\x4e\x35\x46\x05\x93\x84\x32\x26\xcf\x7b\x90\x0b\xf6\x26\xc9\x0c\xf9\xd2\x39\xaa\x6c\x61\x51\x8b\x2a\x9b\x2f\xc4\x38\x44\x01\xed\xe2\xb2\xa9\xac\x95\xb5\x15\xef\xd6\x21\xb7\xb0\x93\x34\x5c\xbc\x23\x10\xe3\xf5\x13\x83\x34\x65\xf3\x74\x19\x44\x00\x5b\x55\xac\xfb\x3a\xc5\x64\xa1\x8e\x5c\xc2\x36\x28\x58\x79\x18\x8f\xe0\x9c\x59\xeb\x2e\x65\xe2\x49\x32\xe3\x28\x8d\xbd\xa8\x1c\x3f\x49\x49\x74\x6a\xa4\xc4\x09\x98\x68\x1a\xb9\x5c\x61\x49\x8a\x82\x98\x0d\x62\x1f\xaa\x84\x85\x4c\xdc\x79\x8a\x21\x76\x1e\x5a\x0e\x73\xe8\xc5\xad\x34\x28\x5f\x57\x49\xbd\x01\xf1\xa0\xba\x56\x98\x48\x4d\xae\x0a\x19\x94\xb6\x14\x6c\xba\x94\x0e\x9a\x36\x8d\x55\x86\x56\xa3\x9b\x07\x8b\x77\x55\x49\xa0\x1f\x6b\x64\xcc\xc6\x3a\x48\xac\xed\x94\x71\xf3\x78\x42\xbd\x47\x91\x7e\x33\xa8\x62\x45\x72\x70\x59\xcf\xed\x78\x46\xb1\xc9\xba\x7c\x2f\x60\x95\x53\x2c\xb1\x73\xe4\x80\x75\x2c\xe5\x0a\xe6\x18\xd1\x33\xa3\xa4\x56\x00\xa6\x69\x59\x37\x74\xcd\x90\x37\xf3\x33\x03\x6a\x4b\x1d\x88\xc5\xc3\x4b\xa6\x31\xe8\x39\xfd\x90\xf3\x6a\xf7\x43\x6f\xe1\x88\x5b\x4b\xa2\x98\x3a\x07\x97\x8b\x63\x95\x4b\x39\x77\x15\xc7\xa8\x25\x77\x4b\xb4\x66\x4c\x41\x6f\x01\x77\xac\x44\xa5\x2c\x82\xf0\x92\xb1\x57\x49\xa4\xcc\x31\xb2\xd1\x16\xb8\x90\xf6\x08\x86\x46\x94\xa1\xea\x1f\xd9\xfb\x50\xed\x94\x76\x8e\x2b\xc9\xb1\x86\x75\xfe\x29\xa5\xb1\x44\xa1\x72\x05\x2b\x69\x2f\x55\xd4\xaa\xda\x91\x07\xf0\x87\xe7\x38\xab\x72\x76\xaa\x24\x6d\xed\xe4\xa0\x60\xbe\xc8\x3d\xe3\x5c\xfe\x61\x43\x16\x96\xb3\xc0\xb4\x56\x8d\x13\x5e\x34\x28\x0d\xfb\xfa\xf9\x0a\xf9\xe1\x47\xe4\xa9\x1e\x73\xd8\xf0\xa2\xb0\xe6\x17\xec\x91\xd9\x79\x3e\x9c\x30\xa9\x3c\xcb\x03\x4f\x78\x50\xc1\x1f\x3e\x34\x61\x85\x0e\x30\x97\x29\xae\xa1\x97\xc8\x9c\x28\x76\x35\x73\xb9\x46\x24\x42\x81\x5a\xba\x39\x20\x68\xfa\x0d\xcc\x0b\x7f\x54\x5e\x06\x52\x0d\x72\x48\x45\x5a\x67\x18\xdc\x30\xa6\x8a\xb9\x5d\x22\xab\x22\x27\x9a\xb4\x0d\x95\xf2\x3c\x43\xdd\x91\x32\x24\xbd\x0c\x30\x45\x96\xb9\x64\x69\x3d\x54\xde\x01\x5b\xb2\xb2\x84\xe4\x3f\x8b\x64\xa0\xab\x88\x4e\xe6\x4d\x97\x8d\x7e\xd7\xc6\x17\x13\x72\x69\x28\x9b\x37\xa3\x43\x81\xa4\x57\xd1\x14\xd1\xcf\xc9\x57\xc3\x96\x9c\x3b\x07\xbb\x78\x98\xd7\xba\xae\x51\xf8\x8a\x4a\x97\x8f\x10\x3c\xba\x38\xa0\xb7\x60\x2f\x72\x7b\x85\xff\x4c\xbd\x59\xaa\x05\x87\xac\x4a\x82\xee\xec\xde\x9e\x76\xf1\x0f\x3b\xdb\x07\x6b\xff\x74\x0e\x39\xcc\xe9\x50\x2c\x0b\x26\xda\x2a\x99\xa8\x21\x10\x61\x43\xfe\xad\x32\x83\xf7\x9c\x26\xd7\x11\x93\x32\x40\x67\x88\x5f\xc9\xc9\x67\x80\x90\x07\xbf\xfa\xaa\xe2\xf5\x75\xe6\x77\x21\xeb\x62\x33\xe0\xeb\xad\x25\x37\x96\xbb\xea\x90\x5e\xbc\x0f\x78\x5a\x06\x71\xba\x86\xc6\x0c\x19\x42\x79\x78\x80\x06\x16\x03\x96\xf0\xfc\x94\x6d\x64\x21\x06\x55\x25\xc8\xce\x96\x60\x7c\x65\x86\x49\x7a\x28\xfd\x59\xe7\xe5\x9a\xe8\xc5\xe3\x55\xb8\xf0\x9c\x39\xa3\xc5\xe9\x31\x12\x78\x33\x78\x84\x7f\xce\x4c\x51\x91\xfe\xcc\xd6\xc7\x73\x96\xda\x17\x8d\xb3\x51\x88\xe3\xa4\xd2\x46\xb9\x36\xdf\x29\x32\xa7\x07\x1f\xc5\x9d\x01\xdc\xd1\x7e\xe7\xa6\xd1\x95\x48\x49\x4f\xc7\x2f\x5d\xd7\x3c\x72\xc8\xe8\xe8\x57\x11\x41\x7a\x52\xb1\x7a\xf8\x41\xb3\x32\x87\x4e\xcc\xbf\x41\xfd\xcd\x8f\x3f\x41\x96\x99\xc5\x39\x74\x32\x2c\x15\x42\x2b\x85\x92\x78\x13\x9b\x25\xd0\xdd\x7c\x09\xa3\x68\x52\xec\x6c\x30\xae\x72\x1b\xd2\x22\xfe\x0b\xf3\x10\x5c\xb2\x77\xbe\x23\x7c\x41\x9a\x51\x45\xba\x27\xac\x6c\x64\x4b\x6f\x5d\x01\x01\xb0\x20\xbc\xad\x73\x78\xe5\x9c\x7f\xab\x73\x5f\x42\xdf\xb7\xac\x62\xe1\x18\x72\x7d\xf8\x7f\x1f\x71\xda\x82\x8d\x97\xd6\x10\xc2\xf5\xe4\x8f\xbc\x7c\x6c\x1a\x91\x46\x9d\x11\x8d\x91\xc2\x97\xaa\x7b\x4f\xad\xa0\x17\x52\x7e\x6d\x49\x8f\x3b\xf8\xe9\xcc\x61\x94\xcf\xf1\xb2\x17\xf8\xa2\xfe\xd7\x26\x95\x80\x52\xc3\x82\x04\xe1\xf9\x6e\xb5\xba\xad\x15\xf0\xec\x14\xa8\x0c\x67\x61\x54\xef\xca\x15\x06\x86\x16\xeb\xd4\x41\x87\x53\x16\x17\x8a\x63\xd1\x95\xba\x8d\x7e\x44\xfb\xf5\x35\xcc\x91\x7f\x9e\x33\xe1\x32\x83\xf1\x8f\xb4\x5f\x14\xf8\x36\x11\x91\x3a\x38\x37\x35\x1b\x9e\x02\x99\x76\x0f\xae\x28\xfa\x9d\x55\xe5\xa7\xa6\x53\x23\x01\xa0\x8b\xe1\x18\x76\x3e\x45\xa6\xc5\x9d\x30\xbf\xce\x7e\xd3\x6f\x28\xaa\x6d\x58\x20\xf9\xbf\xf2\xf6\x7f\xa8\x6a\x57\xa3\xa0\xcd\x50\x90\xfd\x61\x59\x90\xdf\xc0\xad\x59\xba\x7e\x1f\x5e\xef\x4c\x51\x99\xae\x65\x8e\x9c\xa6\xc6\xaf\xc0\x43\x5c\xf5\x6d\xaf\xf3\x09\x9d\x2d\x0b\x9a\x7b\xcf\xba\x3b\x42\xb0\x73\xd0\x03\x2b\x82\x84\xb6\x41\x11\x90\xd5\x01\xd6\x0b\x95\xc4\xef\xb2\xa6\xc1\xac\xf7\x69\x1f\x8f\x19\x9c\x5f\xa3\x9d\x2e\xab\x6a\x68\xbd\x97\xa2\xfd\x29\x43\xfd\x0e\x25\x26\xd5\xf5\x00\x3b\x0c\x91\xc0\x60\xb8\x5d\x04\xcd\x27\xcc\xc8\xfd\x19\xb5\x48\xd2\x42\x4e\x73\xdb\xed\x76\xea\x26\xda\xb1\x59\x0e\x3c\xb7\xb0\x14\x77\xf3\x2f\xb6\xc3\xee\x58\x18\x08\x13\x7d\xf0\x4a\x9e\x2b\xe2\x7b\xef\x15\x80\x06\x2f\xee\x09\x77\xee\x56\x9a\x7e\xd4\x0d\x49\x3a\xb2\x56\x41\x8f\xe3\x41\x21\x81\x8c\xe1\x35\xeb\x7a\xf2\xf9\x59\xd9\x3f\x24\x70\xc4\x3c\x6d\x9b\x5c\xca\x17\x41\xf6\x04\xb7\x4c\x7a\xfa\x5e\xd7\x28\x75\x83\xe0\x00\x89\xa5\x8f\xcf\x8b\xed\xef\xa7\x62\x1c\x86\x60\x3e\x9b\xa4\xe0\xc8\x8b\xf8\x99\xdd\x34\xc3\xa5\x08\x91\xd8\xf7\x13\xfa\x10\xc7\xfe\xc9\xec\x22\x3f\xe9\x9d\x7a\x5f\x9e\x01\xc5\x26\x2b\x8e\x02\x3c\x28\xf5\x71\x96\xf5\x92\xe9\x3c\xac\x74\x82\xf2\xc6\x5f\x83\xa1\xf5\xc9\x1a\xe0\x3a\x90\xd0\xc6\xd9\x17\x96\xe2\xa1\x79\xb3\x44\x0d\xc1\x79\x97\x1d\x8a\x45\xb1\x2a\xa0\x6f\xa2\xbb\x79\x38\xcd\x41\x51\xa5\x02\x21\xc9\x75\x6e\xce\xe1\x49\xec\xd7\x14\xf3\xf7\x97\xad\x08\xbe\xdb\xfe\x91\xec\x20\xeb\x80\xae\x28\x28\xae\x1c\x35\x9d\x5f\x8d\x75\xf3\x92\x97\x95\x93\xd9\x54\xf2\x1b\x64\x3b\x53\xf6\x53\x43\x10\xba\xe2\x78\x35\xae\x50\x64\x08\x39\x4e\x1b\xe8\xb4\x12\xdf\x2a\xcb\x1b\x0b\xfb\x59\xe4\xb6\xbb\xaa\x5f\x50\xcc\xa8\x69\x0c\xae\xac\xc0\x58\x34\x15\xc9\x99\xbb\xd3\xcd\xa7\x85\x52\x57\x64\xcd\x8f\x26\x4b\xaa\xff\x45\x96\xe3\x09\x73\xc9\xbc\x44\xd4\x75\xda\x01\xa5\xbd\x9e\xd5\xc3\x55\xcc\x3a\x62\x90\xaa\xbd\x5c\x7e\xc3\xea\x71\x4d\x5e\xae\xaa\xdb\x63\x25\xa0\xf0\xbe\xea\x2f\x59\x6e\x6f\xd2\xa0\x6e\x78\x4b\x15\x06\x4d\xb7\xe7\xb6\xbe\xca\x5f\x2c\xe8\x13\x0a\x86\x7c\x17\x15\x10\x61\xa8\x3e\xee\xe2\xc5\x9d\xb1\x4c\x92\x28\x6c\x8d\xde\x5d\x36\x19\xee\x5c\xa1\xd3\x7a\xcf\xfb\xd2\xa2\x41\x41\xbf\xec\x85\x51\x27\xdc\x12\x22\xea\x18\xbb\x0f\x5e\x7b\x55\xd1\x45\x6e\x51\x9a\xbd\xa7\x50\xf1\xf5\x88\x23\x39\x7b\x08\x2a\x59\xa1\x37\xa6\xee\xfc\xc5\x79\x94\xcd\xcb\x0d\x8c\x99\x5a\xe2\x3b\x55\x65\x93\xee\x3f\x6e\x53\x77\x6a\xbc\x3b\x4f\xb2\xf5\x5c\xd9\x15\x56\xee\xaf\xf7\xa1\x7a\xd4\x5b\xff\xfb\x7a\x69\x90\x78\xd9\x93\x1f\x25\x0a\x1b\xf1\x0c\x51\xfa\x0a\x12\xd0\x55\x4f\xa7\xcc\x50\x02\xb1\x79\xe0\x54\xca\x82\x5d\x75\x1c\xb4\x42\x35\xbe\x2a\xeb\x5c\xf0\xe3\x5b\x09\xb4\x1c\x8a\x40\x95\x09\x4d\x28\x62\x45\xc9\xbc\x73\x09\x49\xed\x96\x42\xbc\xfa\xba\x09\xe5\xf5\xd3\xbc\xed\x26\x53\x52\x0f\x51\xcf\xe0\x11\x47\xb2\x2b\x25\x01\x3d\xb2\xfb\x82\xd0\x37\xad\xe1\x1e\xe4\xd4\x3e\x11\x08\xb3\x38\x58\xfc\xe4\x16\xa8\x2a\x9d\xae\xc1\xf6\x3a\xce\x34\xba\x5e\xc8\x75\xb0\x9d\xb9\x8e\xf9\xab\xba\xc8\x3a\xdb\x91\x50\x32\xe6\x19\x1c\xaa\x71\x5b\x80\x41\x80\x7f\x5f\x9a\xd3\x8f\x18\x5b\x2e\xc1\xc8\x13\xda\xf3\x3a\x9e\x52\x5c\xdc\xb3\xc6\x2a\xd7\x46\x08\xe4\xb5\x8a\xe8\x8c\x1c\xf1\x22\x6b\xa6\xf9\xf4\x6a\xb4\xbd\x18\x71\xfd\xec\x15\xb9\x55\xb3\xd0\xeb\x82\xc7\x81\x68\x07\x9b\x8c\xbc\x36\x51\x13\xa1\x14\xc3\x65\x38\x28\xb9\x22\x0c\x9e\xbb\x6b\x3c\x45\xdb\x7b\xd0\x07\xe2\xca\x87\x26\xfb\x57\x16\xf8\x1a\x81\x7e\x5c\x4e\x8d\xba\x67\xc3\x65\xf1\x31\xad\x49\x71\x2a\x21\x4b\xa9\x3f\xdf\x2a\xaa\x7e\x3c\x3e\x09\x00\xe8\x7a\x8c\x51\x0d\x2d\x50\xcb\x0f\x99\x46\xe4\x0e\xd4\xae\x82\xc8\xd7\xd4\x22\xfb\xf6\x62\xb8\x24\xe3\xac\xa3\x20\xb5\x32\x86\x52\x62\xd5\xab\x61\x91\xf8\x06\x57\x9b\x8b\x5f\x40\xea\xdb\x88\x5a\x64\x3e\xbc\xbc\x0f\x5e\x2a\x60\x7c\x19\x70\xde\x77\x25\xe5\x30\xaf\x44\x52\xc4\x95\xbe\x44\xc5\x44\x9d\xe6\xf3\xed\xed\x96\x35\x41\x89\x54\x3f\x4d\x74\xa1\x72\x20\x22\x1f\xe6\x83\x51\x05\xd5\x15\x38\x69\x2b\x75\x16\xa9\x97\x82\xdf\xd7\x58\x7f\x5c\x6e\x7a\xed\xce\xf2\x2b\x66\x5c\xe9\x27\x6d\xdc\x79\x69\xf0\x47\xba\x8b\x34\xed\x29\xcc\x1f\x41\x30\x81\x5c\xbe\x9d\x7e\x4a\x08\x34\xed\x05\x64\x83\x15\xd8\xe8\xbb\xce\xb9\x4b\x2c\xae\x3e\xc3\x3b\xc8\xcd\xe2\x7e\x58\x5d\x95\x9c\xc3\x99\xc3\x0f\x28\x13\xb5\x0a\x87\x50\x99\x01\xdd\x4c\xfe\x41\xbe\xe3\xc3\x7a\x6a\x28\x7a\x9d\xcd\x30\xe0\x4c\xa4\xa1\x9c\x32\x65\x12\xdb\xe6\x4c\x54\x93\x4c\xfd\xc2\x01\xf9\x29\xc7\xa8\x13\xe2\xe4\xe1\xa6\x94\x3a\x07\xe1\xfd\x57\x9d\xc6\xef\x35\x3d\xe0\x3f\xbc\xca\x57\xf7\x41\x8c\xbf\x5b\x37\xf7\x80\xf2\x35\xe1\xcd\x70\x16\xaa\x77\x33\x19\x1a\x51\xf2\x65\xe9\xf4\x3d\x30\x8b\xe9\x9a\x8c\x2a\x95\xa2\xb2\x9a\x6e\x99\x08\xe9\xd9\x3c\xe0\xc1\xc1\x52\x28\xa0\x91\x94\x56\x64\x75\x1a\x96\x42\x49\x93\x37\x6b\x9c\xe7\x10\x65\x94\x1f\xbb\x3d\x95\x7d\x39\x58\x5f\x45\x77\xc2\x28\x7e\x13\x2a\xac\x89\x81\x3c\x57\xa0\xdd\xc8\xfd\xfa\x9e\xf3\x05\x94\x53\x43\xb2\xf1\xce\x8d\x11\x48\x12\xc9\x01\x2f\x32\x7e\x79\x42\x7b\x49\xc5\xd4\x18\x0c\x1d\xe8\x1a\x28\x4e\x99\x99\x03\x80\xb6\x6e\x33\x55\xca\x5f\x41\x9c\xa3\xbd\x15\x87\x3b\x47\xb3\xda\xba\x6b\x00\x77\x04\x3a\xcd\x02\x32\x20\xd9\xa2\x1e\x61\x24\x77\xa4\x41\x8f\x3b\xdc\x0b\x59\x84\xd3\xd0\xa0\x9c\xf1\xf2\xba\xda\x80\x73\xfe\xd0\x3d\x73\x29\x26\x1f\xb7\x5c\x60\xde\x8e\x60\x4a\x3e\xfd\x0f\xda\x26\x96\x34\xe6\xf0\xf2\x69\x79\x7d\x75\x19\x7d\x08\xe1\xdc\x7a\x91\xff\x9c\x6a\xba\x97\x51\x36\x47\x50\x0b\xf1\xd2\xe8\xdc\xb3\x7d\xb3\xa0\x57\xd1\xf4\xeb\x92\xb8\x43\x1e\x23\x92\xb4\xf6\x61\xcc\xd1\x78\xef\x9e\xf9\x8f\xff\x97\x45\x48\x01\x41\xf0\xd2\x92\x0b\x55\x76\x33\x67\x65\x40\xb1\x52\xb2\xb0\xba\x0d\x1d\x23\x5d\x60\x8b\x84\xc5\xdd\xc2\xa1\xd4\x02\x89\x3c\xd6\xfd\xad\x35\xd0\x47\x22\x89\xf0\x8e\x42\xab\x3d\x4a\x62\x07\xa2\x99\x6b\xd4\x43\xd7\x2d\xc2\xe5\x5f\x45\x24\x04\x16\xc1\x03\x91\xf6\xfc\x31\x4e\x7b\xab\xcd\x6e\xf5\x6c\x85\xda\x14\xdc\x43\x25\x1a\xb7\xd2\xea\xd4\xdb\x9a\xfb\x0f\x8b\x7b\xf3\x9b\xfd\xc5\x36\xdf\x91\xe5\x6f\x9a\x67\x5b\x7b\x9c\x11\x91\xd8\x88\x02\x60\xe8\x48\x4e\x59\x0c\x5a\x67\x66\x6f\x2c\xe6\x92\x51\x97\x75\x4b\x95\xc7\x72\x79\x49\x11\x02\x33\x69\xf5\x5b\xfe\x52\xd3\x32\xec\x68\x83\x8f\x98\x59\xe2\xdb\x90\xf5\xe6\x21\x42\x6e\x65\x8b\x77\x75\xcb\x83\x4d\xfe\x9d\xac\x99\x99\x1e\x01\x23\x51\x1b\x61\xf0\x10\x25\x62\x88\x51\x8a\xc3\xb7\x85\x90\x8a\x85\x59\xd5\xdb\x2d\x11\x25\xb4\xd9\x23\x24\x41\xcb\xb1\xbc\x9a\xe0\xa4\x3d\x1d\x8a\xe1\xfc\x4c\x9e\x65\x0f\xde\xa9\x8a\xc6\x48\xf4\x30\x08\xb3\xc5\xad\xef\xc4\x45\x14\x58\x6d\x4b\x6f\xbb\x69\x64\xea\xbd\x73\x15\x01\xf0\xaa\x2c\xac\x5e\x95\x95\xd3\xf6\x00\xa5\xf3\x13\x5c\xd3\xe4\xc8\xe3\xd0\xac\x54\x41\x04\xf5\x59\x6b\xe5\x1e\x3f\xdf\x0a\x6d\x23\x10\xda\x60\x18\x73\x52\x56\x50\xad\x2d\x50\x51\x96\x86\x25\x9b\x63\xc2\x3c\x08\x33\x65\x2f\x9f\xa9\xd0\x0c\xfd\x68\x56\x66\xa5\x69\x8d\x80\x2b\x6c\xa2\x16\x46\x50\xf6\xe1\x7f\x08\x97\xa9\x1f\x4a\xe3\x21\x72\x36\x03\x22\x60\xd0\xf7\xca\xf3\x29\x1a\x5c\x79\x4a\x87\x6d\x75\x70\xc1\x8f\x97\x4e\x3f\x3f\xaa\xa0\xb2\x3a\x69\x4f\xd8\xb7\xee\xea\x68\xe5\xe1\xfe\x09\x3e\x60\x29\x54\xcb\xdc\x9a\xaf\x54\xee\x58\x05\xb5\xbb\x7a\xe7\x08\xa7\xaf\x79\xe9\xea\x8a\xeb\x08\xf2\xeb\x5b\x6b\x95\x08\xbb\x47\xbe\x14\xa3\x0d\x0a\x9a\x2a\xbf\x7e\xfd\x6c\x90\x58\x3b\xf9\xed\xac\x3a\x6f\x34\x07\x00\x3d\x84\xa3\x54\x58\x01\xad\x10\xd4\xad\x1e\xa4\x8b\x75\xd8\xd4\xb4\x35\x85\xd2\x5a\x61\x4c\xab\x92\xa8\xea\xbd\x09\x4e\x75\x51\x1a\xfd\x51\x61\x94\xe2\xa0\x10\x6f\x65\x89\xd7\x27\xf6\xb4\xec\x4a\x22\x14\xad\x5f\x47\xab\x72\xaa\x20\x3b\x9f\x97\x34\x37\x67\xa9\xbe\xac\x09\x25\x60\x0f\x6c\xe6\xa2\x62\x68\x9f\x3e\xdc\xbe\xab\x03\xff\x73\x47\x9c\x03\x5c\xb1\xf8\x4a\x42\x1c\xdb\xeb\x8e\x6f\x55\x47\xdf\xa2\xfe\xd5\x06\xc1\x45\xe8\x35\xc5\x3a\x49\x4b\x63\xf1\x8a\x83\x17\x31\x6b\x37\x13\xeb\xb9\x20\xeb\x14\x58\x47\x7c\x98\x88\xc1\x43\x3d\x55\x45\xd4\x5b\x5e\x59\xe7\x2e\xdd\xc8\xa5\x9c\x92\xf7\x2c\x92\xbe\x5b\x98\x0a\x5a\x58\x2d\x5b\x8a\x88\xab\xd0\x03\x65\xcd\xf5\x51\x32\xf7\x6c\x86\x38\x8b\x50\x03\x8b\x9c\x01\x97\xb5\x1c\x98\x0d\xa8\x54\xb8\x90\xfd\xf3\x13\x6c\x46\x0f\xf8\x92\xd9\x52\x7b\xb0\x13\x58\xe4\x70\xb1\x34\xf8\x23\x68\xc9\x11\x8c\x57\x8c\x17\xe1\xbd\x55\xb8\x76\x5d\x8f\xa4\xf2\x2e\xa0\x39\x7e\xf4\xf9\xb7\x95\x88\xc9\x95\x59\x20\xb0\x73\x53\xc1\x7a\x71\x6c\x7e\x14\x69\x81\xc4\xd3\x7f\xca\xee\xe1\xc1\x7e\xc2\x3e\x04\xc1\x02\xa6\x5d\x2a\x0c\x5b\xe3\xc0\xe5\x2a\x8a\xe5\xa5\x65\x92\xf0\xff\x22\x08\xf1\x92\xa2\xe8\x4b\x54\xf5\x6f\x25\x81\xee\xb7\xee\x10\xff\x67\x28\x83\x3c\xa8\x89\x2a\x60\x8e\xee\xb7\x78\x17\xf4\x3b\xbb\xea\x24\xcb\xc2\x02\xdb\x58\x60\xcf\x8d\xf3\x33\x71\xb6\x7c\xd9\x7e\x93\x77\x68\xbf\xc9\xdf\x73\x1b\x34\xf9\xa5\x85\x86\x32\x34\xa4\x70\x15\xa0\x1e\x2a\x20\xfa\xf4\x26\x53\x67\xaa\x42\xfb\x21\x33\xe5\x55\x4e\xd8\x7a\xed\x1e\x9c\xcf\xc4\x35\xe7\x75\xb7\x5a\xee\x41\x71\x3e\x0c\x21\x16\xb1\xfb\xca\x7a\xdd\xda\x65\xca\x7f\x2a\xc3\xf9\xa1\x4d\xe5\x1c\x3b\x05\x55\x71\x76\x02\xa3\x93\x10\xb3\xac\x18\x41\x90\x54\x05\xf7\x4b\x3d\xb4\x66\x21\x48\x91\xe7\xf0\xad\x08\xc7\x76\x9d\xf6\x5c\x1b\xdb\x72\x8d\x55\x73\x3b\xb9\x3c\xbb\xef\xc4\xa5\x58\xee\x84\x13\xb8\x66\x71\x44\x58\x1d\x91\x42\x99\x9e\xa7\x38\x9a\xa9\xd9\x77\xbd\xbf\x0e\x5d\xf5\xcf\xaf\x73\x56\xbd\x78\x8a\x54\x05\x71\x87\xaf\xfd\x6b\xe2\xb5\x2f\xda\x72\xd3\x2c\x13\xaa\xa1\xfb\xa5\x80\xa5\xd9\xa6\x35\x3d\xba\xa3\x67\x04\x5e\xfc\x3c\x83\xd5\x3b\x9e\xe2\x80\x8c\x85\x88\x8b\xb1\xe8\x22\xa0\x14\xab\x1a\x6d\x70\x3e\x47\x37\x66\x3e\xc8\x37\x5c\x81\x71\x62\xe9\x95\xff\x70\x0a\xd2\x9f\xab\x72\xaa\x8c\xfa\x07\xa2\x35\x52\x6f\x5a\xf1\x90\xb5\x6d\x48\x92\xa6\x7f\xf2\x19\xb7\x29\x6b\xf3\xdb\x1e\xad\xdf\x0d\x57\x69\x8b\xfb\xa9\x29\x7b\x8e\x33\x91\x85\x08\x7f\x68\xe1\xa5\x62\xee\xa0\x67\xf6\xf1\x96\xa9\x55\xb5\xc1\x35\xb9\x87\x95\x22\x5d\x8f\x1a\xfd\xd4\xe3\x9c\x3f\x11\xc1\x92\x4f\x51\x0e\xeb\x2d\xeb\x16\x16\x07\x5e\x9c\xfa\xc4\x14\xd5\xa3\xd4\x75\x54\xb5\xf5\xdc\x78\xcc\xa2\x59\xfb\x43\xcc\xef\x47\xb8\x74\x82\xe9\xa8\x61\xc0\x03\xfd\x04\xff\x9d\x7b\x7c\xfb\xd7\x33\x2d\xc0\xfc\xb8\xe0\x21\x78\xe7\xcc\x88\x75\x57\x3a\xcd\xf9\xb1\x81\x05\x92\xc7\x57\x9e\xa4\xf1\xf9\xf9\x88\x15\x69\xfd\x40\x63\x28\x3f\xa2\xce\xfd\x40\x59\xe6\x97\x41\xfc\x68\x0c\x40\xb8\xd1\xa9\x8e\xfe\x61\xf3\x85\x68\xcf\x1e\x89\x43\xb6\x66\x7a\x67\x43\xeb\xd6\x68\xe5\x3f\x8c\xae\xa8\x57\x66\x29\xfe\xe5\xf3\x1f\xb2\xd9\xbe\xd9\xee\x5d\x7f\xde\xc2\xe0\x6b\x6d\x75\x5f\x8c\x38\x83\xcc\xb8\x3e\x50\x16\x41\xb4\x4c\x44\x20\xa3\x60\xef\xcf\x43\x53\x5d\xcf\xae\x75\xbe\xd2\x4b\x31\x8f\xcc\xd7\x21\x71\x60\xbd\x1c\xc9\x9d\x6e\x23\xe6\xcb\x60\x4e\x92\xa0\x21\xf5\x5e\x7b\xdf\x82\x03\xf9\x62\xfd\x97\x93\x25\x5b\x9e\x59\x0e\xdd\x03\xe0\xd2\xa2\xcd\x0e\x55\xb3\x07\xaf\x1e\x13\x85\x3f\x54\x61\xfd\x87\xba\xe8\xdd\x1e\xea\x43\x95\xcd\xaa\xad\x75\x28\x7b\x7c\x58\xfa\x2c\x9f\xa4\x1a\x72\x00\xed\x86\xf9\xd6\xf4\x0b\x1c\x67\x19\x95\x6d\xf5\x23\x1d\xd1\x55\xe2\x27\xc3\x0c\xe7\x31\x0e\xc6\x87\xf9\x5b\x5b\x9d\x07\x15\x1a\x87\x29\x34\xba\x6c\x3c\x48\x80\xa3\xe7\x72\xf3\x9f\x92\xb2\x07\xd4\x7a\xb5\x65\x8a\xff\x9b\x4f\x91\xc1\xa1\x03\xb1\xa9\xb2\x7b\x8a\x59\xb9\x5f\xab\x58\x97\xaa\x19\x33\xef\xc2\xb0\xd3\x57\x14\xc1\x02\xfb\x21\xea\xd6\x2b\x2b\x80\x10\xaf\x7c\x0d\x3a\xe4\x9a\xda\xba\x09\xf0\x42\x8f\x7f\x0b\xfb\xe6\xe1\x0f\x5a\x73\xc8\xc3\x2c\xfe\x26\x4a\x81\xe4\x93\xa2\x0e\x2b\x1e\x69\x66\xaa\xd8\x96\x67\x7d\xf3\xc4\x3b\xb4\x51\x1e\xc9\x35\xad\xa2\x52\x5c\x55\x67\xd6\xa8\x69\xc0\xa8\xd9\xb7\x4a\x61\xc8\x96\x24\x4e\xdb\x98\x83\x9f\xe4\x98\xe3\xfe\xdd\x31\xdb\xd8\xd7\x5b\x29\x1b\xd4\x99\xff\x48\xa4\xa2\xea\xc1\x0a\x71\xc3\x61\xf4\x78\x74\x39\x2c\xeb\x4f\x49\x15\xf4\xb0\x67\x66\x71\x39\x35\x8f\x06\x20\xd8\x55\xc7\xeb\xcb\x4b\x3b\xf3\x99\x98\x55\x1f\xae\xc8\xde\xeb\x0b\xf5\x55\x7d\xf9\x8a\xa1\xaa\x1e\x04\x7a\x68\x94\xb8\x30\x59\x71\xdc\x85\x8e\xb0\x13\x9b\x86\xba\x73\x21\x95\x34\xc4\x84\xfb\x36\x2c\xee\x5d\x32\x43\x97\xbd\x97\x13\x30\x96\xbc\x2c\x3d\x74\x15\x58\xbf\xb5\x6e\x42\xac\xd0\x9e\xf2\xca\xc0\x75\xb0\x55\x60\x84\x60\x7a\xdb\x0c\x9f\x78\xa8\x7e\x3a\x3f\x5f\x30\x19\x99\xa3\x73\x50\x47\x6d\xc8\x4e\x3e\xe9\x7c\xe8\xb2\xdb\xd3\x2b\xbd\x9f\x3e\x84\x0d\x20\xe1\x2a\xb7\x72\x97\xde\x94\x97\x6c\xef\x9a\x65\x8a\xc6\x22\xae\xea\x94\xba\x08\x91\xc3\x88\x5a\xf7\x2d\xc1\x54\x19\xc4\x55\xb2\x46\x21\x17\x96\x7f\x82\x7f\xaa\x80\x6f\x28\x44\x5d\x45\x5e\x85\x34\xd8\x8f\x0a\xdd\xb9\x15\xba\x07\x28\x44\x29\x16\x1a\x00\xf6\x5f\xfb\xce\x64\x1e\x3f\x68\x3f\x1f\x6c\xb3\x7a\x7d\x94\xcf\xa7\xe8\x78\x47\x73\xb8\xdb\xf7\x24\xd5\xd8\xed\x00\x31\x47\x52\xb2\xdb\x11\x07\xc3\xb4\x59\x41\x66\xda\xa8\x4f\x61\x57\x6c\x11\x3b\x6c\x0b\x36\x29\xce\xae\xc1\x7f\x5d\xd7\xb6\xd1\xac\x1f\xd6\xf7\x73\x89\xeb\x61\x71\xba\xa5\xf3\xe9\x89\xe7\x26\xee\x69\xbd\x5c\x54\xca\xd3\xa6\x82\xba\x04\x5b\x2b\xe7\x23\x5d\xdd\xba\x14\x3a\xd8\x5d\xd4\xda\xf3\x5f\x1b\xbc\x24\xa2\x32\x68\xb0\x5c\x16\x79\xdf\x95\x14\x12\x5f\x4e\x80\x55\xb1\x3b\x32\x97\x16\x0d\xaa\x74\xab\x0e\x6c\x13\xd1\xae\x90\x1d\x9b\xae\x28\x5a\xbf\x69\x33\x57\xfe\xb9\x4f\x1e\x13\x2b\xb6\x60\xc9\xb9\x9c\x54\x64\x47\x8b\xc0\xcd\x60\xf6\xf0\x94\xb4\x0b\x2a\xdd\xe7\x2f\x07\x88\x5d\x69\xd2\x65\xdd\x4a\xae\xa8\x4d\x25\x62\xc8\x85\x38\xc7\x84\xa5\xc3\x3c\xd6\x97\x8d\x91\xe5\xec\x59\xed\xcc\x9f\xf1\x0b\xa0\x22\x7f\xb7\x0d\x71\x30\xf7\x66\x79\x3c\xb4\x22\xba\xd9\x1a\x4f\x2a\xc8\x42\x82\x4f\x22\xaf\x6e\xe1\xf6\xc3\x1a\xb9\x96\x8d\xb3\xd6\xfd\x49\xf9\x75\xe8\xf4\xbf\x2c\x58\xd4\x79\xc9\xbc\x5d\xbf\x14\x5a\x59\xed\xd9\xe8\x48\xe7\x98\xb3\x63\x5a\x3c\x4c\x75\x38\x25\xae\x9b\xde\xbb\x2e\x78\xaa\x9c\x9c\x59\xa0\xb2\x7f\x8a\x25\x1e\x7b\x1d\x1a\x3a\x1d\x19\xe6\x59\xfc\x20\x4e\x6c\x30\x93\x3a\x57\x4c\x3a\x09\x11\xe9\xd2\x1c\x42\xb3\x76\x05\x64\xa3\xe7\x82\x85\xf6\x1f\x6e\x95\xce\x47\x0b\xbd\xe1\x4e\x8e\x5b\x65\x8d\x1a\x0a\x68\xd0\x78\xeb\x3a\xa7\xae\x9e\xbf\xaa\x6e\x9f\xb9\x23\xcf\xdb\x69\xdf\xba\x32\xae\x11\x7b\x2b\x58\x05\xa4\xc3\x24\xfa\xcd\xa5\x8d\x37\x79\xee\xe8\xe5\x64\xbf\x07\x2f\xdd\x9e\x45\x72\x55\x82\xf1\x2e\xf1\xd0\x16\xb0\x0c\xbe\xd6\xa6\x83\x08\x9f\xed\x18\x5f\xdb\x2f\x19\x45\xd9\x73\x34\x80\xbf\x09\xc9\x85\xd0\x3d\x5a\xe6\x37\xf5\x3d\xd3\xb4\x06\xcc\x35\x0d\x65\x3d\x26\x9d\xc6\x30\x28\x35\x91\x7a\x0e\x6b\x52\xbb\x73\x09\xea\xbb\x64\xc7\x02\x72\xfd\xe3\x35\x75\x20\x88\x11\x65\xb6\x1a\x1f\x8c\x9b\x7a\x4f\xe0\x7e\xd2\x6b\xbe\xc8\xa5\xf9\x40\x3c\x9a\xeb\x22\xd5\xf6\xf6\x27\x43\xbd\x14\x34\x65\xd9\xd1\xbd\xf5\xfc\xbf\x4e\xde\x0c\x3b\x84\x40\xf4\x2f\xd4\x43\xc3\x5d\xe0\x35\x13\xc5\x0d\x15\xd8\xfd\x87\xba\xb1\xa5\x36\xe1\xd8\x39\x0a\xe8\x68\x5e\x6e\x45\xa9\x85\x2c\xc2\xdb\x93\x72\x44\x7d\xdc\x21\x01\x94\x7e\x05\x4b\x30\x0c\xff\x5c\x37\x5e\xe5\x76\x96\xca\xc3\xbe\x2f\x2d\xa1\x75\x0b\xe8\x6c\x51\xa4\xc9\x06\x39\x1c\x62\xcd\x6e\x71\xb0\x10\xef\x44\xa1\x83\x65\xfd\x94\x5c\x94\xb5\x9c\x07\x6b\x26\xcf\xb2\x2e\xfa\x77\xcf\x59\xb8\x30\x7b\x16\xe6\x1b\xc3\x5a\xae\xb5\x91\x71\x8c\xc8\x9f\xa0\xb4\x85\xab\x1f\x3b\xb3\xe7\x3c\x48\x4d\x96\xbf\xf5\x89\x32\x23\x2b\xee\xe7\x40\xe6\x09\x5c\xa5\x78\x2c\x8c\x73\x1c\xed\x8e\x64\x94\xca\xed\xa7\x2a\xf8\xe4\xd5\x7e\xed\x01\xf7\xc1\x13\x41\x4f\x60\xad\x83\x79\x2b\xf8\x7b\xfd\xba\x23\x03\xba\x68\xcd\xd2\xe8\xbe\x7d\xf5\x68\xaf\xcc\xfd\xe2\x24\xe6\xae\x5b\x93\xd7\x42\x92\x97\x53\x72\x18\xfc\x69\x78\x0f\x8b\x3f\xbf\xd9\x08\xac\x0d\x8b\x66\xc4\x3e\xfe\x8a\xb2\x0b\x87\x84\x42\x5b\x3e\x1b\xa8\x70\x14\x78\x63\x64\x23\xd7\x3e\x58\x34\xa9\x37\x03\x6d\xcb\xaf\xc6\x6d\x2d\x39\x29\xb5\xc5\x7b\xf9\xbb\x84\xf1\x06\xac\x00\x7d\xa4\xe5\x15\xf3\x2b\xc4\x66\xd9\x4d\xdd\x50\xa5\xf4\x03\xe5\x7f\x2c\x64\xdf\x00\xcf\xfa\xf3\x79\x03\x04\xaa\x8a\x39\x6a\x21\x0c\x56\x49\xda\xa0\x8a\xae\x75\x1b\x92\x90\x2c\xd4\x07\x90\xf7\xb6\x17\xc3\xbf\x54\x13\x76\x27\xfd\xbf\xb4\x6a\x43\x98\x26\xfa\x70\xc6\xe5\x6a\x1b\x29\xf6\x95\x73\x68\xd6\x5d\xaf\xe0\x75\xd6\x28\x24\x7a\x71\xdc\xeb\x8c\x95\x3e\x87\x60\x04\x8f\x62\xab\xda\xa5\x3f\xe4\x2f\x58\x31\xb1\xb7\x66\x96\x6a\xb4\xf5\xeb\x46\x38\x35\x0a\xa9\x5d\xe4\x4e\x24\x5f\x2f\x8d\x47\x49\xdd\xfa\xa3\x2e\x6c\x03\x1b\x39\x05\x72\x0e\x1f\x2e\xd7\x28\x88\xdb\x46\x51\x4e\x45\xac\xde\x09\xe4\x61\x95\xf5\xed\x32\x86\x87\x1e\xae\xa8\xb5\x13\x21\x59\x7b\x47\x6d\xc0\xa6\x68\x9f\x59\x15\xe9\x42\x67\xb7\x2b\xdc\x05\xbb\x72\x7b\x1b\x73\x6d\x44\x95\x5e\x84\x95\x63\x36\x27\x00\xd7\xf5\x1b\xc6\xe9\xb0\x8d\x98\x19\x60\x48\xc4\x3a\x02\x62\x5f\x60\x94\x32\xff\x09\x13\x43\x92\xb5\x51\x4a\x0d\x9e\xb1\xcd\xb1\x5b\xfe\xf9\xb2\x47\xa4\x70\xa9\xfa\x77\x09\x6a\x31\xd2\x4a\xbe\xcc\x32\x89\xb0\x48\x96\x29\x57\xfe\x27\x08\x40\x0e\xcb\x45\xa0\xa1\x7d\xb9\xba\x68\xe6\xe5\x69\x5d\x16\xc2\xf3\xd8\x88\x7d\xd0\x63\xe5\xef\xcb\xc9\x86\x39\x2a\x80\x97\x51\x04\x70\x1b\x42\xbf\x9b\x59\xa0\xb5\x2e\x1d\xa3\xda\x48\xc4\x6b\x4b\x46\xfb\x27\x49\xde\x9e\x02\x34\x80\xaa\xaf\x25\x0f\x37\xed\x55\x15\xef\x3d\xf0\xb6\x4e\x31\x1e\xee\x45\x3b\xb4\x21\x85\x3b\x16\x46\xe1\x1a\xa3\xdf\xa5\xf9\x17\xe6\xb6\x67\xb2\xc9\xab\x88\x08\x97\x3b\x2a\x7f\x14\xe1\x61\xd0\xea\xf4\xf0\x43\xa7\x77\xe6\x33\x65\x7d\xe0\x20\x0b\x43\xd5\xcb\x4f\x49\x01\xbb\x76\xee\x57\x3c\x77\x91\x71\xe1\xd9\x87\x50\xee\x58\x39\xdc\xf7\x8a\xba\xbc\x11\x63\xea\x70\x25\x3d\x80\xe7\x9b\x4f\xcc\x59\x9f\xf2\x23\x8c\x5e\xff\xac\x4c\xe5\x9d\x35\x3c\x35\x4e\x3a\xfa\x63\xbc\xc6\x2d\xc9\x50\xb5\x65\x50\x54\xcf\xd0\x61\xb3\x80\x2c\xe3\xd0\xc7\x80\x17\x62\x57\x64\x8b\xf8\x96\xa9\xaf\x45\x81\x41\x02\x2b\x17\x93\x0a\x39\x21\x92\x46\x5c\xae\x8c\x59\x98\x17\xcb\x8b\x37\xd1\x60\x37\x47\xb1\xbf\x74\x22\x9e\x53\x73\xd6\xbf\x9e\x65\x72\x7e\x36\xfd\xdb\xb9\x39\x1a\xce\xbd\x88\x11\x67\x54\x80\xcf\xfc\x71\x08\x0b\x50\x89\x82\x02\xe4\xf9\x6e\xc6\x86\xfb\xc0\x55\x2e\x09\x06\x1b\xc3\xb9\x57\x10\x1b\x87\xa8\x6f\x4e\xaa\x9b\x5f\x2a\xc7\xdf\x93\x17\xd0\xc1\x6b\xd6\x4f\x4d\x9b\xd0\x03\x09\x63\x0c\x0f\x46\x55\xe2\xf1\x18\x40\xc6\xe3\xd6\x94\x41\xb1\x22\x76\xa5\xae\xdf\xe9\xf7\x03\x95\x78\x49\x10\x1f\x00\x8f\xa9\xa8\x5c\xcf\x81\xaa\xbc\x75\xdb\x58\x31\x3a\x6a\x54\x8c\xab\x28\xbd\x77\x08\xd1\x9e\x9f\x77\x47\xb6\xa0\xd3\xa4\xfc\xb0\x80\x82\xbe\xa4\xbf\x6b\x27\xe5\x69\x99\xa3\x54\x32\x3a\x8f\x36\x67\x3e\x7c\x76\xeb\xbb\x16\xaf\x3c\xb0\x94\xcf\x77\xc8\x35\x7a\xfd\xcc\x66\xcd\xaf\x0e\xf8\x16\x73\x14\x72\xcc\x1a\x3f\x1d\x73\xa4\x64\x8f\x39\x0c\xef\x2d\x62\x71\x9e\x08\xbb\x63\x1e\xa9\xa7\x64\xbd\x92\x18\x94\xa8\xdb\x13\x40\x2e\xd6\x73\x54\xec\x27\x78\x60\x23\x7e\xac\x55\xb6\x9d\xc7\xc3\x06\x19\x2a\xcb\xb9\xba\x6f\x27\x9a\x75\xd0\xa1\x51\xd3\x17\x42\xc0\x9e\x0f\x9d\x8c\x5b\xdb\x69\x5d\x69\x6c\x69\xe1\x37\x0e\x0e\x97\x6d\x41\x52\x22\xc8\x55\xc1\x99\xde\xf7\xf7\xab\x71\xaa\x77\xda\x32\xa2\xc5\x90\x1e\xca\xd1\x20\x02\x83\x34\x67\x9d\x53\x7d\x29\x10\x85\xe5\x48\x02\x40\x48\x78\xf4\xb0\x71\xfb\xc6\x8d\x8b\x80\xa7\x30\x65\xb4\x4e\xa2\x13\x78\x60\x67\xe5\xee\xb0\x31\xb0\xdf\xb6\xf2\x49\xde\x75\x94\xd7\x4e\xfc\xbd\x8b\xff\x12\xf5\x69\x37\x92\xcf\x75\x51\x26\xc1\x75\xaa\x1c\x21\xa0\x9e\xb4\x2c\xac\x3d\xc1\xff\xc9\x7f\x42\xc9\xd4\xee\xdf\x09\x26\xe0\xfd\x44\xfd\xdf\xbb\x04\x04\x1c\xad\xed\x25\x5a\xb5\xbd\x74\x9c\xd2\x58\x47\xf6\x2a\x6a\x6a\xdc\xa6\x91\x7c\x67\xbd\xaf\x7b\x56\xf2\x66\xcf\x71\xd4\xa4\x67\x0a\x22\x1d\xd2\x0a\x4e\x2c\x97\xd7\x4b\x15\xed\x2a\x9d\x0f\x57\xf8\x3c\x19\xec\xc1\x90\xb6\x7e\x44\x25\x7a\x87\xb5\x4f\x94\x53\x2d\x52\xef\xb0\xa0\x0b\x2a\x35\x73\x2f\x5e\xc2\xcf\xdf\x7a\x7c\x55\x8d\x3e\xbf\x8f\xf0\x32\x2c\x54\xc0\xd9\x8a\x44\xb7\xb7\xa2\x1b\xb4\xd1\xc5\x66\x43\xeb\x1b\xe9\x3e\xfb\xf0\x2d\xca\xce\x67\xab\xda\x41\x29\x58\x72\xc0\x3b\x0d\x0d\xec\xe5\xf4\xf7\x74\x03\x0a\x99\x95\xfc\x25\x26\x16\x20\x50\x8b\x77\xb6\xe1\x2e\x90\x38\x6d\x4f\xa5\x34\x8e\x6d\x60\x55\xb4\x4e\xc5\xef\xbc\xee\x6f\xd6\xee\x07\x11\xa0\xa5\xc8\xbe\xf5\x4f\x52\xc5\x31\x5b\x3d\xa9\xc3\xef\x71\x68\xfa\xd6\x3e\x45\xa1\xbd\x95\xca\x97\x39\xe8\x57\xce\xb5\xde\xc5\xa3\x76\x27\x48\x8f\xc6\xb3\x10\x3d\xd0\xbb\x5d\x43\x93\xff\x5d\x53\xc0\x0a\xfa\x71\x7e\x46\xa9\x7f\xb4\x91\x44\x28\x01\xf7\x35\x0a\x65\x1e\x2b\x38\xdf\x00\xfb\x31\x2e\x5f\x45\x5f\xeb\x31\x1b\x43\xdb\x7e\x56\xec\x96\x36\x6d\x60\x17\x4e\xf7\x04\xae\x89\x73\x96\xa3\xa6\xcd\x92\x55\x5a\xb7\x97\x97\xd3\xb0\x2d\x51\xfa\x45\x9e\x9c\xe9\x0a\x17\x49\x2f\xeb\x33\x6a\x0c\x70\x8c\xfa\x57\x56\x02\x11\x0b\xb5\xbc\x01\xa3\x24\x0d\x72\xc0\x13\x78\xad\xd2\x2c\xbc\xce\x06\xe7\x0a\x21\x0b\xb2\xd8\xdc\x83\x1a\x35\x28\xf5\xca\x41\x06\xda\x4d\xc4\xee\xa3\x80\x1f\xec\x70\x66\xda\xd7\x37\xe7\xf4\x28\xd5\x3b\x09\xcf\x63\xa7\x63\xaf\x75\x91\x69\x65\x15\x95\xd8\xda\xf2\x25\xc7\x00\x75\xad\x6e\xab\xa1\x61\xe5\x5a\x9d\x9e\x63\x0b\x15\xa4\xad\x77\x22\x95\x98\x72\xbf\xe4\x5a\xbf\x72\xd4\xfd\x43\xe0\x57\xa6\x8e\x6b\x71\xc7\x5b\x8f\x68\xe7\x0c\x14\x26\x39\xd6\x4f\x95\xb1\x67\xf9\xc2\xac\x52\xf1\x41\x02\x40\x45\x6c\xf8\x65\xbe\x7e\x1a\x3e\x40\xf0\x00\xd1\x95\x67\x9c\xa7\x28\xd4\xfe\x73\xf2\xb5\xcf\xe1\x22\x02\x99\x71\x12\xa8\xf1\xd2\x91\x04\x4d\x02\xdc\x1a\x45\xd5\x15\x9c\xa6\x9d\xc1\xb6\xd4\x89\xf4\xe9\x38\x56\x5e\xb8\x53\xe0\x84\x6d\x58\x7f\xfd\xf1\xd0\x64\x21\x1d\x52\x33\xd2\x81\xe1\xe7\x71\x8a\x82\xc6\xea\x88\x45\x95\x21\xfd\xc3\x7e\x85\x5e\xcc\xea\xd9\x47\x15\xf6\xcf\x22\x75\x65\xb4\xcd\x39\xa2\xfe\x2b\x20\x7c\xe4\xf7\x29\x92\x2f\xa4\xfa\xce\xa0\x4c\x8b\xdb\x2d\x54\x9a\x1d\x5b\x87\x79\x00\x73\x58\xe6\x20\x4a\x27\x51\xb2\x21\x2b\x48\xe8\x81\xeb\x47\x79\xb3\xb2\xbc\xa5\xff\x0b\x15\x61\x31\x8f\xc1\xf7\xf6\x5a\x73\xcd\x7c\x87\x9d\x24\xdd\x74\x80\x91\xc3\x0d\xc8\x0a\x3f\x10\x00\x20\xec\xc7\xe2\xb5\x44\xdf\x9f\x12\x70\xb6\xe8\x3a\x7e\x93\xe5\xd9\xb4\x9c\xe2\x11\x2c\x25\x30\x31\x16\xa5\xe5\x4b\xae\xbe\xe8\x9d\x59\x0e\x0e\x64\x96\xb2\x2f\x85\xd0\x80\x7c\x08\x60\xb9\x34\x89\x1e\x44\x3f\xce\xbf\x76\x1c\x07\x14\x8c\x1d\x88\xeb\x44\x6c\xdf\x1d\x19\x64\xd5\xfe\x77\x7e\x03\x39\x3e\x0f\x12\x7d\x42\xe7\x56\x43\x5c\xc8\xa1\xc0\x38\x38\x4a\xfa\xe7\x12\x35\x4d\x8b\xfb\x49\xf3\x92\x25\x06\x54\x78\x31\xf9\x8b\x71\x0d\xf4\xe2\x2f\x21\x16\x60\x81\xd0\x0d\xa8\xcd\xc8\xea\x34\x2c\x5f\x17\x48\xc8\x24\x05\x3e\xb2\x70\x88\x00\x4b\xfd\x67\xe6\xb8\x65\xc9\x8d\xf0\x1d\x2f\x06\x1a\x52\xcc\x5b\x5e\x3f\xc2\x0b\x08\x2e\xb0\x46\x4a\x76\xc9\x1a\x34\x81\x0b\x2e\x11\xe6\xca\xe1\xcd\x22\x09\x80\x25\xd8\x45\x8b\x94\x1d\x17\x66\xea\xac\x71\xf8\x94\x3f\x3a\xcd\xfd\x67\x0b\x19\x3e\x00\x07\xfc\xc6\xfd\x96\x33\xca\xa9\x36\x15\xf4\x44\xd7\xaf\x4d\x28\x73\x6c\x97\x01\x9f\x3e\x29\xdd\x7c\x4a\x4d\xee\xd7\xc6\xa6\xc3\x27\x70\x25\xb0\x06\x72\x77\x51\xf9\x7f\xd9\x3c\xd6\x7d\xa4\x9c\x5a\xbe\x39\x59\xcb\x09\xe7\x12\x39\x95\x46\xf0\xbb\x09\xfa\xbe\xd5\xd0\x4c\xef\xe0\xd8\x4f\x50\xd0\xf5\xe5\x1e\x92\xce\x91\xfa\x98\xaa\x2b\x74\x6b\x8f\x02\x00\x9f\x9c\x4b\x6f\xc1\x05\xc4\xb2\x26\xfc\x61\x2a\x02\x1e\x4c\x2b\xc1\x08\x83\xe4\x9b\x6d\x1c\xc1\x3c\xd8\x34\xff\xfb\x8a\x85\x4e\x36\xc3\x76\xd7\x16\x0b\xbc\xee\x03\x59\x37\x02\x8d\x26\xd8\x15\x84\x2d\x01\xca\x1b\x61\x62\x3a\x46\x59\x3e\x9f\x5f\xde\xf9\x99\x04\x50\x80\x6e\x80\xa6\xb3\xfc\xb0\x70\x5b\xc9\x01\xca\xb1\x54\x0e\xaa\xf9\xaf\x7b\xe9\x18\x0e\xa5\xf3\x25\x7c\xc1\xdc\x38\xf1\x74\xa9\x9d\x32\xf5\x14\x26\x9f\x5a\xfa\x9a\xf4\xb2\x19\x1b\xcd\x57\xd1\xb4\x49\x64\xb9\x86\x02\xb3\xcd\xff\xc9\x2f\x0f\x9b\xb1\x89\xb6\x8e\x13\xdf\x1d\x64\x9a\x88\x7c\xb7\x48\xe7\x94\xc8\x1b\xb7\xa5\x68\xe8\xdb\xe9\xf3\x1c\xa8\x32\x37\x68\x03\x21\xa9\x8e\x50\xe8\xaa\x58\xee\x1d\x29\x30\x76\xc7\x3f\x03\x3d\x4b\x09\x59\x1b\x6b\x02\x29\x94\xe9\xd5\xf1\x6c\x62\x65\xa3\xcd\xce\xfc\x30\x6e\x53\xaa\x4c\x9d\x13\xd1\x7d\x19\x12\xdc\x22\xa4\x4b\x9a\x39\x7d\x98\xe8\xb4\x48\xb5\x60\x8b\x82\xa2\x3c\x86\x71\x61\xe8\x2e\x0b\xf5\x60\xcd\xb5\x9f\x59\x99\xf8\xf9\xf1\xd8\x25\xcf\x2c\x3b\xac\x71\xbe\x45\x5e\x0f\xf7\x09\xe0\x22\xde\xe1\x5c\x6a\xaf\x66\x09\x5f\xd3\xc6\x91\x03\x5c\xc2\xff\xe9\x0c\x90\x2a\x4f\x6f\xce\x5d\xec\x2e\xe8\x02\x0a\xe9\xbe\x3c\xed\xb7\xfa\xf4\x1b\x79\xef\x3d\xd0\x13\x9a\xf7\x8c\x59\xed\xf5\xc8\xe2\xd5\x88\x11\x4b\x90\xd5\x6f\x4a\x6e\x00\x37\x11\x66\xa9\x1c\x60\x80\xda\xae\x23\x21\xfb\x28\xcc\x05\x06\x9b\x52\x1a\x1a\xd3\xa6\x63\x6c\x9a\x4a\x8e\x2e\x44\xe4\x01\xe4\x7b\x44\xa4\x4f\x0e\x21\xa8\x85\x4c\x75\xcf\x97\xbd\x04\xfe\x7a\xd8\x24\x80\x1c\x71\xeb\x2f\x58\x2f\x0b\xe5\xdd\x49\x00\x87\x72\x6a\x68\x4c\x5d\x0f\x79\x2e\x3e\x30\xa2\x78\x47\xe0\xcb\xcc\x09\xeb\x23\x54\x66\x1e\x71\xb1\x1f\x48\x61\x2b\x1d\xfc\x98\xed\x66\xdf\xfc\x7a\xa4\x08\xe7\x63\x6e\x10\x89\x39\xf0\x6b\x0f\x55\x5b\x1e\xe9\x25\x17\xd6\xf4\xea\x28\x19\x02\x02\xbb\xd4\xa6\xc3\x23\x0c\xa8\x88\x91\x80\x0a\xeb\x05\x9a\x07\x6c\x9a\x66\x22\x25\x3a\x39\x90\xe5\x67\x78\xe1\xe6\x7a\xf3\x2d\xcc\xd7\x14\x0f\x6c\x96\x15\x61\x3e\x94\x1f\xcf\x10\x51\x0b\x49\x62\xa4\x52\x56\xc9\x43\xe5\xed\x7a\xa9\x8c\xd2\xe0\x46\x8e\x96\xe5\x30\x36\xab\xa3\xc9\x0f\x6f\x73\x72\xf3\xd5\x42\x99\x92\xb7\x2c\xf7\x02\x67\xf4\xf0\x46\xa6\x04\x58\xee\x02\x37\x93\xd3\x39\x65\xf1\xa8\x05\x03\x1b\xde\x2a\xa8\x0c\x6f\x14\x5e\x11\x40\xb4\x91\xb5\xf9\x3a\x11\xde\x60\xef\x30\x39\xf0\xb5\x3b\x03\xe1\xc0\x11\x39\xdc\x58\x45\x98\x7f\x73\xf1\x12\xc1\xfd\xcc\xcd\x71\xf5\x99\x08\x7e\x38\x75\x01\xa1\x21\x4d\x85\xe5\xb2\xf3\xfd\x71\x2e\xbd\x52\x89\x43\xa1\x34\x74\xc9\x37\x9b\xe8\xa1\x10\x84\x66\xdd\x4f\x27\x29\xe8\x66\x8f\x0f\x54\x6b\x42\xf3\xff\xfe\xfb\xc1\x63\xb1\xe6\x75\x16\x73\x8e\x52\x00\x1e\xf2\x31\x8b\x7b\x2f\x2c\xfe\x80\xe7\x32\x09\x72\xe1\x3a\xd3\x24\xcf\xef\x41\x78\xdf\xde\xe2\xb4\x37\xe6\x3c\xdc\x6d\x08\x65\x58\xe7\x60\xf2\xdb\x10\xb2\x12\x19\x31\x33\xdf\x3b\x64\xb9\x62\x0e\xd9\xa5\x76\x15\x52\x90\xf6\xc7\xf1\x0a\xa8\xf5\x1d\x6c\x02\x07\x3c\x77\x42\x2e\xc8\x00\xd7\x1a\x5a\xc9\xe2\xc3\xe3\x95\x6c\x34\x9b\x01\x84\xed\xbc\xbd\x64\xd0\xd6\x66\xd4\xb0\x0d\x60\x28\xb0\x7e\x5c\x9d\xb0\xce\x70\x3b\xae\xaf\xcd\x2b\x89\xf3\xa2\xc1\xf6\x67\xfd\x48\xa3\xda\x46\x5d\xf2\x7a\xe9\xcf\x39\xee\x88\x85\x1b\x97\x59\x48\x25\x8b\xb4\x4c\xef\x90\xe5\x23\xd0\x41\x06\xad\x6f\xca\x53\xd7\xd3\x0d\x59\x33\x3f\x3c\x12\xbf\x3c\x9d\x45\x52\x18\x1f\x1c\xd2\xd8\xbc\x2d\xa0\x31\x7d\xa1\x58\x6c\xcf\xd9\x03\x64\xab\xfd\x39\xe8\x51\x09\xdb\x03\xb2\xf0\x54\xd9\xc6\xc2\xf0\xcb\x06\x6c\x01\xb9\xaa\x3d\x90\x0b\x30\xc5\xfd\x83\x43\xe0\xa4\x15\x67\x44\x06\x73\x3f\x85\x8d\x54\xaf\x17\xd0\xf1\xde\x3b\x8b\xd1\xee\xfe\xba\x33\x4b\xe3\x40\x01\xe1\x03\xce\xca\x9a\x50\xef\x77\x4b\x65\xfe\xa3\xa9\x59\x7b\x96\x86\xe5\x79\xdb\x99\xb5\xf8\xbd\xd7\x76\x4c\x41\x73\xc3\x35\x76\x66\xb9\xda\x86\x3f\xe1\x5d\x5a\xcf\x4e\x33\x8a\xae\xae\x0f\x55\xd7\xe9\x81\x5a\x87\xef\x7c\xc7\xc5\xa6\x59\x23\x57\x51\xde\xc5\xc6\x55\x51\x2f\x83\x35\xde\x8d\x94\xde\x2f\x4d\xa1\x7a\x1e\x9b\x9b\xab\x3c\x4a\xec\xa5\x6e\x52\xd5\x7c\x12\xbb\xb3\xd9\xb1\xd6\x7b\x1e\x49\xb9\x3f\xa3\x44\x7c\xd2\x03\xb1\x49\x22\x43\xec\xdf\x8f\x5d\x84\xa0\xeb\x6c\x28\x29\x8e\x7d\xa1\x88\x76\xd1\xe0\xae\x53\xeb\x8e\x1c\x6e\x24\xdc\x1c\x45\x11\x37\xdf\x3e\xf0\xbf\x95\x66\x9e\xaa\x79\x4d\x57\x68\xfb\xd4\xc1\x62\x5c\xff\xb9\xf8\x58\xca\x23\x6b\x3f\x68\xdd\x0a\x49\x2f\xd6\xec\xa3\xd3\xb7\x48\x06\x37\xac\xd5\x33\x61\x05\xa9\xeb\xa8\xda\xfb\xb5\x9d\x9b\x02\x34\x87\xa0\xd7\x4a\x7a\x7b\x1a\xe7\x22\x02\xba\x06\x8f\x18\x59\xaa\xf6\xec\x0a\xd8\xca\xab\x39\xc9\x5d\xb5\x7c\x49\x65\x50\x04\xfb\x87\x25\xfd\x28\xce\xef\x03\xc6\x92\x1e\xae\xb7\x0d\x70\x78\x9c\x20\x2b\x76\x5f\xdb\xd8\x3f\x30\xc9\x2e\x8f\x84\x06\xa4\x37\xeb\xe3\x9d\x75\xf0\xce\x86\x0f\x30\x0b\x04\xaf\xf5\x4e\xa8\x81\x11\xe3\xbe\xb5\x36\x5e\xcf\x2f\x52\x2f\x4f\xe7\x92\x54\xe4\x97\x79\x6d\x0a\xae\xbe\x8c\x50\x3b\x97\x69\xf0\x6b\xea\xb8\x92\x60\xd4\x87\xce\x36\x2d\x9e\xba\x06\x8c\x00\x83\x86\x47\xb4\x3e\x9e\x5d\x75\x87\x2e\xf2\x74\xd3\xd6\x37\x2d\x50\x43\xf2\x0e\xda\xd7\x58\xbc\x44\x8b\x87\x0c\xf2\x8f\x83\x05\x1a\x99\x25\xb9\x17\x9f\x98\xe9\x55\x7a\x30\x90\x5c\xf1\x05\xc6\x11\xaa\xa3\x87\xef\xa9\x5d\x00\x8e\x85\x5d\x48\x3e\xe6\x0d\x18\x43\xfa\x7f\x3e\x77\xb9\xe9\x41\x67\x77\x24\x32\x25\xf1\x71\x44\xb9\x83\xdb\xf4\x60\xf1\x72\x76\x9e\x1c\x3d\x4d\xfa\x7d\x58\x42\x03\x6a\xc0\xa3\x6d\xef\x7f\xbe\x98\x03\x6f\xc0\xd7\x19\xe8\x08\x19\xd6\x1e\x0d\x36\xc0\xe5\x4c\xea\x69\x9a\xf5\x6c\xa4\xf1\xb6\x3e\xe4\xf0\xc8\x6e\x2b\x87\x12\xc8\xef\xf8\x12\x8e\x89\x7e\x60\xf8\x5a\xc9\xa2\x56\x4b\x20\x72\xfc\xda\x27\x3e\x5b\x42\x6e\x42\x64\x3b\x88\xf8\x23\xd9\xfd\x7a\x01\x3e\xae\xd2\xb4\x2a\x22\x70\x60\xfb\xf8\x73\xf0\x61\xf2\xf5\x5d\x4f\xa7\x0d\xd9\x92\xd7\xd9\x9e\xb2\x85\x3d\xe8\x3b\xab\x9c\xfd\xce\x29\x40\x05\xe7\x17\x5d\xe0\x8b\x89\x0b\x76\x5a\xb6\x74\x5b\x09\xf4\x51\x4d\xfd\xd0\x55\x71\x14\x7c\xeb\x67\xaf\xf2\x7b\xf7\xd4\x5a\x7f\xed\x2e\xcf\x46\xe6\xfe\x2c\xed\xee\x5d\xb4\x65\x8b\xe4\x1b\x3b\x87\xbe\xb6\x17\x5f\x9c\x45\xef\x0f\x1b\x01\x0a\x28\x93\xf9\x95\x79\x21\x23\x41\x2c\x02\x26\x66\x12\xe7\xde\x6f\x3d\x01\x2f\xa7\x65\xff\xd0\xb0\x96\xae\x2d\xfc\x3f\x6d\x68\xfd\xca\xb1\x26\xf9\x4c\xec\xe5\xaf\x19\xc5\xbc\x1b\xd2\xe3\x95\x54\xa7\x7a\xa5\xc6\x14\x02\x0a\xc1\x2b\xfa\xd6\x2a\x90\xa8\xbf\xb2\x5a\x42\x79\xee\x70\xa7\x85\xcf\x79\xd8\xd6\xf2\x3f\x41\x2f\x50\xd6\xb4\x73\x62\xfe\xea\x89\x5f\xb1\x08\x80\x00\xa2\xf3\xc8\x90\xc0\x83\x73\x2a\xd2\x16\x7c\xa2\xf0\xa4\xa3\x39\x0c\xc1\x37\x88\xa2\xe2\x13\x5a\x52\x00\x3a\x30\xeb\x0b\xe3\x5b\x5f\xd8\xcc\x4d\x4f\xa2\xc5\xcf\x52\xce\x49\xfa\xde\xfb\x24\xfc\xba\x6b\x04\xbc\x03\xb2\x60\x73\x91\x52\xa4\xf7\xed\x48\x1b\xc9\x41\x3d\xc3\x54\x13\x58\x53\x29\x8b\x43\xb1\xc4\x96\x35\x14\x8d\xea\x79\x39\xda\xa9\xa2\x46\xe6\xc1\xa1\x9c\xca\x5d\x05\x7f\xbd\x83\x5a\x67\xc1\xde\x69\xa8\x68\xb1\xdf\xf7\x1b\xf2\x35\x8e\x1d\xe0\xb4\xf1\x46\xd5\x90\xdd\xa5\x8d\x81\x8f\x4c\x91\x80\x12\x39\x68\xd8\xe8\x36\xf3\xdc\x46\xa0\x77\xbc\x19\x64\x46\x7f\xa4\x3b\xee\x4f\x08\x64\xc7\x6b\xfe\x09\xfd\x71\x04\xe5\x22\xac\x21\x39\xac\x20\x6a\x67\xb7\x8b\x1f\x13\xe6\xb0\x95\xdb\x1b\xd7\x7b\x16\x0d\xc7\x66\x5b\xfc\x6a\x0a\x87\xe6\xdb\x39\x06\x42\x12\xdf\x53\x16\x80\x21\xd2\x5f\x77\x5e\xea\xea\x1d\xda\x9d\x7f\x39\x0a\xb9\x95\x36\x93\x40\x80\xaf\x4a\xe7\xc1\x7f\xee\xe3\x3b\xbe\x4b\x30\xe8\x95\x5e\xc6\xf5\x71\x50\xdd\xf5\xb6\x43\x5d\x54\x02\x40\x02\x55\x86\xbc\x39\xb5\xfe\xe3\x72\xd6\xf6\xa2\x28\x56\x59\x13\xaa\xca\x79\xf5\x9f\x7c\x55\xaf\xf7\x7b\xe1\xb7\x52\xfd\x84\x58\x88\x61\xc8\x12\x21\xaf\x40\x34\x29\x54\x5e\x18\xf8\x06\x91\xf9\xad\x71\xfc\xf2\xfd\x0b\x07\xfe\x16\x08\xd0\x60\x8f\xc9\x24\x9c\xc4\x1c\x76\xb2\xb7\x3a\xfe\x0b\x4a\xee\xea\x05\xae\xdb\x26\x7b\x55\xd0\x07\x29\x86\xc3\x96\x42\x58\x87\x3e\xc6\xb7\x97\x74\x8b\xae\x5b\x19\x06\x0b\x94\xbb\xbd\x5c\xe7\x9d\x05\x7d\xf4\x5c\xf6\x12\xb0\xc4\xfd\x9f\x1b\x1c\xd0\x43\xf8\x38\x9f\xd2\x53\x87\x09\x69\x44\x3e\xf2\xb9\x8e\xa2\x5a\xfa\xc1\x79\xe8\x75\xb8\x42\xa8\x90\x14\xb9\xa9\x07\x90\x49\x86\x25\x53\x77\xd0\x11\x18\xb9\x1c\xf7\x1a\xda\x01\xd7\xd7\x4e\xf8\xe4\x8d\x2b\x9a\xd4\x21\x2b\xec\xb8\xa5\x0b\x0d\x23\x61\x07\x85\x33\xe6\x6b\xd7\xc1\xb7\x33\x2f\x64\xb5\x6f\x85\xed\x81\x8d\x4d\x42\x53\x00\x1c\x3c\x26\xe5\xed\x85\x24\xc5\x1f\x72\x9e\x0b\x83\x00\x16\x2c\xb3\x04\xe0\x97\xec\xa9\x1d\x94\xb4\x88\xcd\xb0\xe6\xf1\x62\x10\x2c\xfd\x39\x7e\xa7\x84\x62\xaf\x79\x13\x26\x66\xce\xe3\x1f\x11\x78\x39\x0d\xdb\x00\xbe\x51\xa5\xe0\xf2\x71\x84\x00\x7c\x49\x42\x73\xa0\x04\x1e\x70\x8e\x8d\xf8\xe8\x4b\xe2\xe2\xd7\x17\xe4\x30\xe5\x2d\xbc\x91\xa7\xac\xde\xef\xca\x1a\xa1\x5f\x98\x79\xf1\x44\x51\xe6\x22\x8c\xc3\xde\x20\x5d\x55\x81\xa4\xae\xdc\xf2\x2c\x2e\x60\x70\x46\xa0\x2c\x81\xc3\x38\x7e\x84\xbf\xc8\x57\xd3\x7b\x5f\x9b\x0a\x7c\x13\x44\x02\xe6\xa2\xfb\xca\x16\xa4\x80\x5b\x1c\x94\xe8\x00\x8e\x42\xcb\xd8\x21\x55\x21\x40\xfc\xa2\xd3\xbe\xd9\x3a\xf8\x59\x9a\xed\xc8\x37\x49\xd1\xfd\xf0\xd6\x9c\xd7\x21\xed\x5b\x58\x71\x59\xbc\xd6\x39\xa4\x0d\x6e\x2d\xc3\x84\x00\xee\x66\x5c\x47\x5d\xfd\xb4\x72\xf5\xc2\x6c\x3f\x54\x33\xfd\x12\xa4\xbe\x51\xf0\x60\x7c\x58\x37\x05\xb3\x96\x9e\x3b\x38\x4b\x50\x82\x4e\x6a\x72\xc1\x47\x40\x6f\x3e\x30\x14\x4d\xc0\x20\xf4\xeb\xe5\x62\xdc\xcb\xc5\x38\xe9\x71\xf6\x70\x6e\x2b\xb9\x6a\x0e\x3f\x64\xd9\x18\xd3\xea\x00\xca\x58\x7f\xcd\x92\xc1\x07\xf8\xa1\x34\x82\x30\xf1\xee\x32\xf4\x62\x20\x17\x74\x69\xc2\xa1\xc2\x10\x81\x6c\x98\x21\xbb\x42\x14\x46\x97\x2a\x2d\x90\x69\x64\x04\x55\x2d\xbb\x25\x0c\x25\xa5\x7b\x5a\x37\xaf\x94\x2c\x42\xb9\x1f\x4b\x79\xc5\xda\x2c\xf9\x20\xa7\x4b\x82\x08\x3b\xaf\x9a\x80\x64\x2e\xc5\x25\xf4\x85\xf6\xf8\x8d\x49\x18\x10\xfd\x42\xd5\x9d\x54\x5c\x75\xf3\x63\xff\x07\x89\xf2\xb9\x48\x86\xef\x9e\x31\x3a\x44\xb6\x3c\xea\xad\xde\xef\x60\x42\xe3\xc0\x45\xf2\x7a\xbf\xfd\x9c\x37\x83\xdc\xf4\x0f\xee\x00\x56\x74\x04\x49\xbc\x85\xa0\x78\x45\xc1\xd7\xc1\x14\x34\x5f\x3a\x50\x5a\xba\x25\x77\x20\x3e\xff\x51\x34\x25\x3d\xa0\xa1\xec\x4b\x02\xf1\xc1\x50\x65\x95\xe3\x98\x05\xa9\xd8\x35\xd0\x3a\xe0\x2c\x2d\x0c\xc5\x4a\xdc\xfd\xa1\x96\xda\x96\x49\xfa\xf3\x23\xd5\xb1\x80\xae\x68\xe5\x03\x48\xcf\x6f\x21\x53\x07\xb4\x85\xee\x2e\xf4\x11\x84\xe0\xb0\x77\xde\x2d\x53\x8f\x4c\x2d\x9f\xe3\xef\xee\x70\x49\xc6\x50\x85\x40\x0b\x81\x18\x60\x02\x16\x50\x8a\x43\x46\x66\x88\x74\xc0\xbc\x3f\xc2\x8a\xd9\xc6\x09\xf7\xac\xef\x63\x22\x75\x64\x2f\x7f\xfd\x10\x3b\xd1\xac\x8f\x6b\x08\xce\x23\x0b\x4a\x08\xc4\x45\x29\x87\x44\x3d\x2f\x17\x51\x60\x4d\x0e\x32\x2a\x0e\x44\xb1\x9e\x86\x57\x21\xf5\xed\x40\xb0\x73\x70\xcc\x83\x20\xfa\x58\xf2\xce\x94\xaa\xb6\xb4\x5c\x5f\x91\xf9\x2f\x92\x13\x4a\xb5\xbb\x38\x84\x65\x62\x6e\xbe\x50\x1d\x1a\xd1\x50\x5d\xea\x05\xf2\x09\xd2\x4a\x78\xb1\xb8\x08\xd4\xc4\x5a\xa5\xa8\xd0\x69\xfa\x63\x91\x57\x6a\xf6\x39\x66\x4d\x3b\xf2\x9e\x7a\x46\xf6\xb9\x13\xab\x62\xcf\xaf\x2e\x52\x71\x7b\xde\x88\x36\xdc\x33\x4a\xff\x81\x99\x78\x12\x7e\x81\x0c\x88\xb7\x1b\x16\xc5\xbf\x73\x38\x89\xf7\x88\x7b\x5a\x88\x7f\xb2\x20\x39\x3c\xc3\xe3\x66\x7e\x6c\xd3\xb7\x93\xa9\xd5\xad\x52\x43\xa0\x2a\xe7\xb1\x41\x32\xbf\xb1\x62\x70\x38\xd1\xe4\x8b\xf4\x9b\xb6\xa2\x0a\x52\xb3\x58\x26\x86\x62\xbf\xac\x35\x9d\x85\xb7\x50\xfb\x62\xd1\x6f\xe6\x52\x84\xed\xcd\xde\x2d\x61\x2e\x24\xcb\xb7\x15\x8e\xac\x80\xaf\xf0\xd9\xc6\x26\x19\x52\x2c\x57\x29\xd3\x53\xe0\x70\x0b\xc8\xd1\x36\xaf\xc2\x2b\x00\x4c\x44\xa1\x7d\xbf\x9e\x3c\x31\x4c\x3a\xbe\x2e\xbf\xf9\x82\x8c\x3f\x41\x15\x6e\x6e\x46\xa5\x83\xb0\xea\x71\x18\x06\x71\x15\x02\x98\x84\xb1\x2d\x3c\x9b\x2f\x2d\xfd\x57\xe4\x6d\x2b\x01\xca\xa0\xfe\xe4\x96\x03\x20\xb1\xff\x91\x3a\x40\x16\x4d\x18\x8c\x2f\xc4\x02\x05\x6f\x1a\x1f\xaf\xca\x66\x4a\xed\xfe\xad\x50\xba\xf4\x0b\xb8\x02\xda\x7d\xbe\x92\xa3\xd7\x2c\xe2\x0e\x63\x9d\x25\x7f\xe0\xa2\x00\x84\x6c\x3c\x52\xd3\x6b\xa5\x82\x42\x88\x32\xe4\xf5\x4c\x4f\x1e\x1e\x59\x5c\x3a\xcc\xcb\x83\x99\x91\xf8\x49\xef\xff\xf9\x14\x18\x8d\xaf\xb5\xdc\xd6\x77\xb9\x69\x2b\x10\x1c\x71\xbf\x09\x2e\xb1\x0e\x75\x9a\x73\x68\x26\xdc\x57\x65\x82\x7f\x83\xe8\x4b\x40\x35\xa6\xb7\x36\xb5\x07\x94\x2e\x2c\x5b\x12\x1b\xd1\x02\x69\x75\x24\x79\x20\x79\x97\x2b\x80\x06\x40\xcd\xfc\xd2\x22\x65\x75\x80\xdb\xc8\x94\xe7\xdd\xe0\x1a\x3d\x72\x27\x4c\x47\x19\xb4\x23\xef\xa5\x8c\xf2\x67\x08\x6b\x2a\xf4\x83\xb1\xc3\x1c\x9b\x49\xe9\x65\xb3\x01\x6e\xc0\x3b\x56\xaa\x01\x6c\x89\xe9\xe3\x0d\x09\x95\x4f\x6e\xb2\x0f\x33\x94\x51\x18\x82\xef\xfd\xeb\xd1\x28\x5c\xfd\x26\x99\xa3\x0d\xa0\x4c\x21\x50\xfa\xad\xa9\x55\x58\x2b\xd1\x84\x1c\x9a\x10\xc3\x3b\xa4\xe1\x2b\x51\xd5\x6b\x60\xb9\x56\xd0\x64\x7c\xb9\x7f\x88\xdb\x40\xf3\x24\x48\xf3\x5a\x1e\x77\x08\x8e\xaf\x25\x6b\xfa\xb4\x12\x47\xb9\x12\xff\x81\x68\xfe\x70\x5c\xb2\xba\xde\x88\xb0\x1c\x57\xbc\x20\xf0\x09\x6f\x01\x4f\xd8\x22\x8e\x80\x21\x19\x21\x34\x48\x08\x8a\xaf\xd0\xc9\x15\x52\x65\x26\x85\x7c\x9d\x25\xcd\xdf\x54\x1c\x6c\x28\xc0\x46\x7f\x9d\x9b\x19\xb3\xb3\xc5\x42\x14\x22\x4b\x46\xff\xaa\x9c\x4c\x00\xcf\x2d\x60\x87\x2c\xc0\xd6\x2c\x21\xeb\x95\xc6\x90\x74\x4d\x58\xf3\x10\xd2\xfa\x36\x13\xeb\xa1\xba\xea\x71\x55\x39\x91\x51\x13\x54\xb0\x2e\x73\x94\x72\xfd\x4e\x68\xba\x63\x36\x44\x49\x75\x2f\xe9\x5d\xb9\xe7\x35\x35\xfb\x69\x64\x5c\xf5\x3d\x99\x4e\xae\x84\x1f\xb8\x10\xbb\x73\xed\xdf\x86\x6b\x80\xdd\x72\x5a\x1a\xa4\xbe\x09\x43\xc8\x6e\x61\x79\x7b\xf6\xd0\x6b\xfc\x6f\xfe\xb4\xa5\xba\xe2\x8c\x47\x42\xe7\x2f\x62\x72\x2d\xb5\xff\x6a\x3d\x00\xae\xe2\x91\x0d\x41\x24\xbe\x80\x53\xc7\xb2\x7c\xc8\xe7\x86\x1c\x7e\x3f\x6b\x1d\xb4\x6e\x67\x45\x5c\x10\x0d\x02\xa6\xc2\xca\xa1\xa0\xc5\xd6\x7f\xf3\xa8\x7b\x61\x27\xbb\xec\x89\xf8\x8f\x0d\xb0\x13\x22\x32\xac\x09\x11\x14\x04\x9a\xb7\xc8\xf5\x71\x53\x18\x1a\xc4\x17\x59\xab\xd2\xdc\x09\x52\x71\xdf\x81\xda\x68\x06\xc8\xcb\x97\x5a\xbe\x34\x24\xd1\x02\x97\xfe\x29\xa0\x1d\xc2\x93\x83\xc2\xcd\xd3\x8f\xc7\xc8\xda\xc4\x99\x2b\xe6\x93\x9f\xb3\x4c\x00\x52\x75\x9c\x48\x88\x3c\xc3\xe3\x9a\xeb\x4f\x79\x96\x2c\xe9\xa8\x5c\x13\x53\xee\x25\xc9\xf7\x79\x49\x13\x87\x09\xbf\xf5\x0a\x75\x25\x10\x9b\x02\x40\x31\xed\xd7\x59\x07\x0f\x9b\x21\xcd\x2f\x44\xc2\xf0\xca\xfd\x3a\x9d\x5a\x3b\x65\x26\x48\x7f\xc9\xfd\xf6\x68\x08\x4a\xd0\x6f\x5a\x0a\x6e\xff\x7c\x69\x60\x0a\x79\x24\x02\x5e\xdd\xc8\x7b\xa6\x6c\x3f\xea\xe8\xec\x76\xa6\x37\xd1\x04\xf5\x9f\x4f\x8f\x3d\x7c\x71\x71\x4a\xe6\x72\xaa\x9b\x8d\x75\x3c\x18\xa7\xe6\x94\x1d\xb5\x09\x8b\xc6\x90\xf0\x67\x0b\x34\x35\xa7\x05\x1b\x63\x0d\xef\xe8\xf1\xe0\x95\x00\x00\x3b\xc1\x15\xd6\x7c\x34\x40\x07\x9e\x0c\x22\x30\xd6\xab\x11\xea\xdd\x11\x9b\x08\x8c\x06\x99\x01\x18\x43\x5f\xe6\x47\x44\xc3\x35\x08\xd2\xd1\xec\xb2\x7f\x8b\x84\xfd\x01\x49\x6d\x91\x2f\xc3\x60\xdd\x49\xd2\x52\x87\x08\xcd\xfd\xfc\xd6\xa2\x7d\x96\x5b\xf6\x18\x06\xd7\x3b\x50\x18\x01\xc9\x00\xf2\xa8\x79\x67\x0b\x1d\x92\xd7\x51\xf6\xd8\xae\x59\xe4\xca\x10\xd2\x88\x00\x1e\x24\x50\x1e\xb2\xca\x9a\x98\x24\x99\x32\x4d\xcc\x27\x32\xa0\x1a\x69\x0b\x73\x4c\xff\x3e\x52\xe4\x5e\x9d\xb2\x3b\xd0\x37\x98\xc6\x4a\x84\xc5\x1d\x86\xda\x17\xec\x96\x76\x85\x3f\x94\x8b\xf8\x08\xc5\x37\x25\x0d\x39\x26\x64\x26\x14\x8c\x9c\x6e\xa0\x3f\xe1\x9a\xc0\xcd\x00\xa4\xc1\x63\x6e\x49\xa9\xa5\x29\x32\xd8\x13\x14\x01\x7c\xd9\xe3\x65\x28\x42\x90\x74\x70\x80\xf4\x88\x42\x53\x70\x11\x28\x0a\x16\x59\x6a\xaf\xec\xc0\x31\xa2\x20\x72\xa2\xce\x6b\xc7\xe1\x95\x85\x52\x97\xa8\xa8\xc9\x7b\x55\xd5\xad\x06\xb8\x27\xaa\x12\x9e\xb5\x1b\x1d\xc6\xbf\x47\xa4\x89\xa9\xc5\x8e\x44\xb1\x25\x9d\xe0\x47\xa0\xf7\x7d\x49\xa4\x8c\x30\x1e\x39\xbc\x34\xe0\x45\xf0\xb5\x13\x40\xa6\xaf\x89\x6c\x00\xfe\x71\x12\xfe\x61\x57\x5b\xc2\xe9\x16\x0b\x31\x52\x30\x6f\xa5\xb2\x3f\xfa\xc7\x30\xef\x5e\xe7\x41\xe6\xdd\x51\x15\xb2\x48\x28\x12\x21\x7c\xc7\xf5\xbd\xb9\x6d\xd4\xb8\x6a\x44\xed\xde\x05\xfc\x59\x6b\x1a\x1e\x41\xf0\x19\x21\xe2\x23\x20\xc9\xad\x97\x66\x9c\xcf\x71\x66\x20\xf2\x93\x0b\x6d\x08\xee\xf2\xbd\x8c\xa8\xef\x72\xcf\xaf\xec\xca\x68\x6f\x51\x22\x54\xc4\xf1\x23\x8c\x54\x4e\x1f\xdd\xa0\xd5\x2f\x77\x3e\x02\x85\x60\x8d\x05\x4f\x8b\x73\xf5\x11\x2c\x05\x02\x3f\x28\x05\x92\x7c\xfe\xa5\x48\xf3\xcb\x31\x1d\x73\x13\xe7\xd8\x03\x88\xa2\x2e\x16\xd6\x08\xfa\x66\xcc\x1b\x23\x54\xd5\x08\x49\x59\x07\xae\x99\x75\x35\x53\x28\x90\x8f\x69\x68\x77\xc0\x99\xcb\xde\x85\x3f\xbc\x2f\xa3\x6f\xee\xa3\x9e\xf7\x1c\x6a\x1e\x0f\xb9\x92\xd7\x28\x7e\x3f\xa0\x63\x79\x31\x00\xa4\xd5\x61\x22\xa0\xf6\xf8\x61\xce\xfc\xbe\x13\xd5\x3c\x32\xd0\x99\xd6\xb2\x4b\xe6\x83\x8e\x26\x72\x30\x8f\x70\x91\x3f\x03\x68\xee\xff\x8b\x60\xf9\x71\xfc\x8a\x8f\x38\x21\x85\xcf\x93\x28\xab\x2a\x26\x8f\xb5\xfa\x3f\x59\x67\x65\xdf\x1e\x0e\x49\x25\x12\x45\xfa\x69\x8f\xf9\x74\x8d\xc1\x87\x8d\x0b\x29\xfa\x00\xf5\x0f\x65\x8b\x81\x5c\xf9\x21\x6c\x45\x9d\xd9\xc3\x65\x84\x28\x95\xf7\x70\x82\x81\x44\x47\xf2\x4c\x1d\x0f\x72\xe7\x1f\x9a\x7b\x3d\x92\xee\x5f\xe6\x50\x38\x7f\xe4\x00\x68\x8d\x1e\xf1\x21\x92\x3d\x03\x5e\x45\x80\x93\xca\x86\x1b\xde\xd8\x4f\x16\x9a\x32\x95\xe0\x33\x9a\x41\xc1\x5b\xac\x49\xb4\xf7\x15\x43\xee\xbc\x1d\x5a\x07\xee\x45\x68\x82\xb8\x27\x79\xa4\x8d\xf3\x3c\x7c\x65\x3b\xda\xcb\x67\x4f\x58\xb8\x96\x0f\x73\x98\x36\x58\xd3\xf6\x92\x94\x48\xea\xcf\x4c\xf1\x8d\x8f\xd4\x35\xfe\x28\x25\x0d\xb5\x17\x63\x72\xa8\x1d\x47\x27\xc3\x59\xf8\x72\x0d\xa7\xb0\xb6\x83\xa8\x58\xd6\x53\x30\xdd\xec\xf0\x12\x5f\xb6\x99\xf7\x50\x34\x79\x1d\x4a\x3e\x64\xde\x10\xa3\xbe\xa1\x60\x00\x2b\x48\x89\x4f\x81\x29\xca\xc1\x6c\x9c\x87\xbb\xd7\x5e\x87\x39\x1e\xf3\x61\x1e\xe6\x50\xe1\xf8\x48\x7e\x60\xc8\x77\xa8\x6a\x5c\x93\xbe\x6c\x26\x07\x4d\xe8\x1c\x90\x92\x2a\x81\x8c\x4d\x70\x2e\x44\x82\xe6\x03\x68\xf2\x2e\x0a\xce\x25\x02\x21\x49\x56\x25\xf1\x2c\x12\x16\x61\x70\x25\xda\xeb\x62\xfc\xdb\x00\x7c\x43\x76\x63\x66\xa1\x66\xdc\x6a\x94\x58\x92\x6b\xda\x3d\x23\x08\x9e\x32\xd7\x50\x6a\x0e\xcb\x00\x9e\x28\xe9\x3b\xa0\x74\x19\xd2\x28\xe9\xe1\xaf\xcc\xc0\x31\x69\x5f\x79\xd3\x7a\x87\x5d\x36\xdd\x84\xba\x2e\x61\xb5\xf0\x12\x72\xe3\x74\x39\x0a\x8f\xd2\x4b\x06\xdc\x16\x56\x6e\x96\xf6\xef\x2a\x19\x07\xd8\xd5\x61\x93\xd2\xfb\x10\x93\x12\x0a\xd0\x14\xd3\x80\xa2\x47\x26\x5d\xb0\x93\x82\x71\x93\xea\xbe\x36\xd3\xfc\x58\xcb\x23\x0b\x87\x70\x17\xf5\x48\x02\x19\x30\xa2\xd6\xba\x21\x14\x1b\xca\x16\xfe\x0c\x53\x91\xa0\x06\xdb\xf9\xde\x5e\xea\x00\xd0\xac\xf8\x35\xc2\xa6\x44\x0e\x0a\x30\x15\xb7\xdb\xf9\x7e\xa4\x38\xc3\xda\xd2\x50\x36\x29\x3f\xf8\x40\xf6\xce\x57\xe3\x11\x52\xd4\xba\x30\xc4\x7e\x73\x89\xd4\x9b\xff\xf8\xa9\x4a\x30\xe3\x56\xa3\x07\x35\x5d\xca\x7a\x00\x83\x1a\x22\x1a\x10\x23\x0a\x45\x8c\x0c\x44\x81\xef\x8c\xa7\x4d\x7a\x21\x39\xf8\x7c\xbd\x44\x32\x92\x10\x44\x7d\x7a\x6a\x5c\x09\xbb\xf6\x1d\x3c\x08\xc2\x6a\xd2\x95\x04\x83\x39\x25\x54\x64\x91\x27\x40\x92\x3e\xc4\xca\x81\xea\xb1\x7d\x78\x4f\xf5\x6e\x59\xc8\x80\x08\x1e\x1b\x5d\xf0\xc2\xa1\x0b\xd0\x1c\xd5\xe3\x31\x84\x2c\xf4\x98\x24\x8c\xd3\xaa\xbc\xd8\x3d\x43\xed\x90\x03\x3a\xed\x76\xd5\x8b\x45\x92\xa9\x20\xcc\xce\x2b\xb7\x52\x2b\xa8\x3e\x22\xe8\x6a\x08\x23\xa8\x48\xdb\x01\xdd\xce\x6d\xa1\x8d\x19\xe0\x99\x0a\xee\x2d\x0f\xcb\x9c\x40\xe7\x04\xc4\x70\x5f\x70\x85\x4e\x57\x01\x08\x54\xce\xc9\x81\x38\xc1\x34\x22\x4e\x74\x67\xb0\x62\x5d\xd6\x22\xab\xbd\xa1\xf3\x03\xe1\x11\xc8\x9a\x6c\x52\xc5\x68\x7b\xe5\x41\xb8\x9b\xb4\x7c\x6d\x21\xec\x83\xe0\x3c\xf6\x36\x04\xb2\x26\xe6\x96\x04\xdd\xbc\xe6\xac\x98\xea\x98\x5d\x34\x79\x1d\xc4\xbb\x08\x89\x79\x06\x06\x07\x49\x66\x3f\xf3\x12\x83\x48\x8b\x84\x96\xb9\xc2\x8e\xde\x75\x49\x09\xab\xb1\xd7\xe9\x25\xc5\x0a\x78\x80\xc4\x06\xa7\x8e\x7b\x52\xc2\x02\xe3\xad\x13\x25\x1d\xa1\x75\xa8\xae\x81\xe1\x23\x67\x1b\x36\x27\x8f\x3d\x91\x65\x26\xc0\xc8\xae\x45\x60\x78\x26\xb2\xc1\xba\x42\xcf\x12\xc0\xe2\x26\x2d\x69\xc4\xbe\xda\x65\xb8\x9a\xd6\x46\xbf\x68\xe0\xce\x58\x5e\x14\xf1\x36\x00\xb3\x23\xec\x0e\x15\xca\x3a\xa0\xf5\x79\xc2\x10\xe8\xa3\x10\x87\x36\xa5\x99\xbd\xe7\x07\x02\xb9\xd3\x70\x3b\x77\x58\x4c\x84\x77\x07\x73\xb1\x1d\x20\x54\x76\x24\x81\x7e\x46\x54\x3a\xaf\x2b\x00\x3d\x82\xeb\x40\xa2\x86\x84\x97\x2e\x4b\x93\xc0\x81\x3d\x01\xe6\x21\xb7\xd9\xad\xee\x4f\xa6\xb5\x3b\xeb\xb5\x79\x15\x6c\x4a\x10\x9e\xf6\x7a\x1a\x73\x8a\x43\x48\x48\x12\x79\x98\x40\xe8\xbc\xf5\x24\x7d\xfd\xef\xed\x95\x18\xe4\x42\xcf\x76\xa1\x13\xf8\x5f\x42\x1e\x8c\x4a\x98\x61\x14\x3d\xeb\xde\x2f\x30\xb0\x96\x8a\x4a\x21\x5f\x17\xc8\x2e\xc1\xd2\xe6\xe3\xb9\x0c\xc7\xfe\x7c\x84\xf8\x59\x7a\xfd\x0f\xa5\x4f\x81\xf9\xe1\x23\x99\x88\xaf\x4a\xe3\x58\x04\x22\xa2\xbe\x1f\x0a\x6f\x7a\x98\x53\xd7\xba\xf1\x2e\xb1\x8a\x93\x2e\x4e\x3b\xbc\x8d\x71\x6c\x8b\x4d\x5a\x46\xca\x7f\x00\x29\x60\x0d\x73\xd1\x87\x14\xab\x51\x8c\xa1\x0c\xc7\xd9\x4a\x1d\xf8\x85\x92\xd7\x50\xb1\x1f\x09\xbc\xfd\x96\x7f\x4d\x42\xe5\x7f\x6d\x08\x1f\xa5\x8d\x6c\xf6\xce\xd1\x91\x0d\x5e\x05\x42\x41\x9f\xb7\x53\x4a\x64\xd4\x88\xc9\x23\x3e\x1d\x48\x59\xef\x5f\xb8\x8f\x2f\x12\x8a\x15\x44\x04\xad\x77\x08\x85\xdc\x84\x94\xfc\xc8\x82\xbf\xa7\xaa\x08\x93\xd8\x76\x39\xaf\xaf\xba\x48\xfe\x57\x8e\x22\x12\x67\x5a\x59\x9a\x75\x79\x11\xca\x94\x38\x85\x81\x90\xa6\x25\x1e\xcd\xa4\xde\x38\xe4\xdf\x21\x31\xe2\x65\x82\xd4\x17\xc7\xf6\x24\xef\x0c\xfc\xdc\xbb\xc3\x3a\x1a\x9b\xc7\xfd\x50\x77\x44\x86\x1e\x7c\x29\x3f\xaf\xd2\x24\x3f\x3f\x30\x29\xea\x8b\x98\xe7\x1f\xa2\xa6\x3f\x14\xa3\xff\x04\x0a\xf6\x13\xa5\xd1\x4f\x5e\x55\x25\xb2\x08\xf5\x0b\x8f\x04\x1f\xfe\xa4\x4a\xec\x97\x05\x2c\x31\x7f\x40\x37\xf5\xfb\xf0\x49\x63\x39\x9b\xcf\xbf\x90\xd2\xef\xc4\xd1\x0a\x60\xf3\x99\xcb\xe8\x11\xad\x8f\x5a\x65\x4c\x81\xee\x0a\x82\xfa\xeb\x4f\x78\xff\xaf\xf4\xfe\xa7\x16\xe3\x2b\xc8\x13\xf0\x00\x79\x09\x33\x74\x02\xbf\xc9\x95\x7b\x7c\x8b\xe1\x9f\xb4\x4c\x96\x60\x13\x59\x6c\x53\x77\xbf\x86\xf0\xff\xff\x74\x32\x31\x01\x43\x6a\x0a\x85\x92\xb7\xf2\x91\x2f\x17\xf1\x14\x9e\xa8\xad\xdb\xe3\x50\x99\xee\x75\x2f\x09\x42\x32\x94\xb4\x49\x16\x36\x19\xad\xa2\x36\xc7\x8b\xe6\x4b\x01\x86\x5f\x7f\x74\x64\x9e\x04\xfa\x3d\x9d\x40\xeb\x7a\x26\xf3\x2d\x24\xd1\x7e\x71\xc2\xec\x02\x26\xbb\xe4\x4b\x38\xe5\x7b\xce\xda\xb6\x27\xe0\xee\x89\xb4\x9a\x14\x4e\x90\xf3\xf2\xa5\xee\xe8\x33\x03\x22\x89\x20\xbd\xbd\xb3\x7f\x26\x0d\xbf\x9e\xe9\x94\x77\xc8\x19\x5e\x8a\x72\x09\xa1\x45\x37\x2c\x40\xd0\x11\xb2\xc7\x7f\x42\xfc\x3d\xbc\x3a\x2a\xd8\x3f\x14\x27\xae\x24\x8c\xc2\x08\xc4\x2f\x45\x65\x6e\xaa\x86\x61\x4c\xdd\x48\x78\xb7\x2e\x9b\x60\x9f\x3b\xe0\xef\x8e\x01\x4a\x81\x01\xda\x28\x58\x4d\xfc\x0f\xc7\x27\x77\x0d\x2d\x85\xfb\xb4\xf7\x87\x40\x9a\xb3\x4c\x04\x02\xcd\x72\xc3\xb1\xe8\x1a\xa8\x79\x41\x48\xd0\x4d\xc4\xd0\xdc\x75\x11\xe6\x83\x35\x67\x00\x82\x64\x1e\xe2\x06\xe3\x21\xe0\x0a\xc5\x8e\x5b\x56\x08\x1e\x6b\xf2\x72\xf3\xb7\xd8\x42\x48\xb1\xbb\x89\x61\x20\x89\xe0\xd5\x46\x0b\xf8\x0c\x58\x90\xae\x2a\x15\x15\x0b\x75\x76\x45\x2a\x2a\x03\x1e\x0c\x1d\x04\x50\xd9\x1f\xd9\x95\x08\x6c\x74\x1e\xde\x38\x59\xd0\x57\x37\x1a\xb7\xde\x8a\xd3\xb8\x7b\x16\x4f\xfc\xd6\x68\xf3\x9e\x37\x42\x88\x22\xcd\x75\xcf\x1c\x6b\xdc\x93\x5d\xc5\x1f\x0a\xb3\x8c\x10\x16\x0e\x6d\x96\xd3\x06\x6b\x4b\x8e\xd8\x61\xc8\xfa\x20\x7f\x93\x12\x3f\x78\xca\x61\xea\x69\x9d\x43\xe5\xc2\xfa\xfd\x27\xc7\x45\x77\x0e\xd2\x2f\x30\x4e\xb3\x54\xc6\x2c\x16\x94\xf8\xce\xe1\xa2\x6f\x17\x56\x25\xe1\xdb\xd5\xa7\xb8\x52\xa2\x7d\xf6\xca\x32\x51\x0c\x4d\x7f\xef\x38\x81\xed\xa9\xf2\x83\x49\xba\xf9\x09\x6c\x85\x9f\xff\xa3\x84\x0b\x01\x4c\x5f\x4d\x0c\x77\xab\xb0\xb7\x22\x1c\x4b\x5e\x01\xab\xbf\x9a\x2b\xe9\x55\x9b\xf2\xd9\x55\x69\x22\x7f\xd9\xcc\x85\x10\x96\xca\x61\xf8\x65\x8f\x63\x83\x25\xdb\x07\x02\x32\xc0\xb8\x10\xc0\xe8\x64\xe6\x09\x63\x82\x5d\x80\xa7\x3d\xd3\x06\x85\x9f\x62\xbc\x72\xdd\x2c\x1c\x7a\xa8\x1a\x25\x46\xc1\xa8\x99\x52\x58\x26\x47\x3a\x38\x0c\x4f\xf4\xcf\xec\x83\xe8\xa4\x17\xaf\x9f\xff\x54\x98\xc6\xe8\xac\x92\xea\x99\x16\x9d\xb4\xc1\x02\x26\x6a\x14\x28\xca\xfd\x31\x02\x0e\xe5\x14\x50\x6e\x7c\x38\x75\x53\x17\xee\x38\x93\x58\x4c\x08\x71\xce\x8c\x0b\x28\x8e\x01\x78\x42\x6b\xa9\x08\x3e\x36\xcd\x3a\x05\x50\xaa\x37\xc3\x59\x18\xa1\xc3\xda\x14\x22\x41\x2c\x0c\xe9\x99\x14\xca\x95\x57\x61\x59\xed\x2a\x07\x7f\x7b\x51\x89\xf1\x6a\xb8\x50\xee\xe2\xf0\x49\xee\xb1\xc2\x84\xd5\xa3\x05\x28\x52\xff\x7f\x4a\xe9\x5c\xdb\x5b\xff\x76\x9b\xa5\x16\x75\xad\x85\xf6\x20\x3e\x21\x23\xe6\x89\x36\x5c\xd6\xf3\x10\x90\xb4\xb0\x07\xbc\x80\x89\x1a\x14\xac\x04\x41\xb1\xd0\x6b\x33\xfe\x73\x90\x76\x0d\x0a\x35\x02\x0a\xcd\x5b\xe0\xa1\xb8\xbb\x64\xa1\xaf\x70\x02\x03\x3e\x2a\x80\x52\x1a\x64\x5c\x53\x5d\x08\xb4\x9a\x6c\x10\x42\x00\xd3\x54\x7c\xb6\x6b\x4b\x31\x83\x90\x1e\x58\xa2\xe9\x03\x6e\x8a\x27\x03\xb0\x94\xa0\x51\x73\x5b\xfa\x33\x0b\x59\x62\x1b\x1b\x49\x0f\x67\x8f\x6b\x3b\x09\x53\x84\x54\x72\x89\x88\x4b\x42\x51\x60\x88\x38\xc9\x60\xd5\xe2\xc7\x19\xdf\x0a\xa8\x95\x42\x2a\x12\x4e\x32\x04\x54\xbd\xd8\xe7\x3a\x10\x8b\x9a\x3a\xf7\xc4\x6f\xa8\x47\x7d\xe5\xff\xaa\x10\x5a\x47\x5d\x9f\xb4\x4f\xbc\xe4\x5d\x71\x35\x1b\x30\xc7\xe4\xfc\x32\xb2\x5f\x45\x61\xa5\x3c\xfb\x94\xed\x6a\xa8\xb2\x9c\xc7\xd0\xeb\x19\x34\x17\xb1\x48\x00\xbc\xc6\xcb\xbe\xb2\x8f\x7b\x75\xcd\x7b\x09\x59\x5f\x7d\xd8\x2e\xc0\xbf\x23\x74\x73\x5e\x74\x8a\x93\x89\x4d\x03\x7c\xd5\xaf\xe0\x8e\xe4\x36\x5d\xaf\xe7\x52\xb0\x36\x83\x9b\x3d\x0c\x0f\x42\xbe\x87\xe1\x56\x43\x75\x87\xea\xf4\x01\x13\x0b\xd6\x16\x90\x62\xdb\x15\x81\xff\xc8\x3e\x89\x0a\x70\xf5\x49\x78\x9c\xcb\x65\x1a\x67\x46\x8f\xc7\x3c\x10\xc0\xf5\x8f\x62\x49\xe9\x45\x70\xfa\x95\xea\xd3\x46\xd3\x84\x25\xa6\x70\x66\x4a\x4d\x99\x29\xb1\x6a\x8b\x21\xc1\xdc\x22\x0d\xf6\x10\x36\x01\xa8\xf0\x0c\xf0\x80\x42\x3e\x9a\x98\x5f\xb0\xcf\xe1\x9d\x4b\xab\x40\x6c\x23\x1f\xd2\xf4\xe0\x49\xc5\x31\x7a\xcf\xc4\x72\x9d\x14\x10\xaf\x94\xbc\x8f\x81\xf0\xd8\x2c\x89\x1f\x95\x00\x4e\x71\x8d\xce\x2a\x7b\x41\x0b\x3c\xb7\x09\xf5\x78\xe9\x02\xd9\xf5\x23\x36\xad\xe3\x63\x01\xaa\xa7\x30\x67\x9f\x77\x2f\x98\x59\x3d\x08\x4d\xab\x0f\xdf\x04\x35\x01\xa2\xd7\xae\x4d\x5e\xe4\x98\x5b\x9d\x01\x55\x6b\xf6\x37\x65\xa7\x2d\x8d\x4d\xc2\xb8\xe3\x2a\xd4\x59\x00\xbd\x11\x49\xec\xe2\x2c\x01\x89\xcb\x13\x15\x86\xec\xb7\xaa\xbd\x46\xec\x8b\x0e\x15\x50\xff\x16\x3c\x93\xe8\x2f\xcf\x59\x8f\x9c\x9b\xf9\x72\xca\xec\xc6\xe2\x7e\x66\x33\x8d\x7a\x26\x0a\x2b\xd8\x5d\xdd\xc3\xb1\x47\x8f\xe1\x89\x8a\xdf\x25\xc8\xda\x41\x7a\x8e\x45\x74\x03\xb2\xde\x65\xa6\x52\x92\xbd\xde\x8b\xce\xda\x7b\x03\xcf\xc4\x59\x7f\x43\x38\x1b\x34\x7a\x89\x63\x43\xe3\xb3\xf2\x7e\xda\x88\x85\x58\xb4\x3b\xfd\x6a\x92\x73\x26\xf1\x10\xa1\xbd\x11\x4e\x3d\x4c\x19\x9d\x89\x59\xc7\xd3\x26\x17\x9c\x4f\x9d\xf2\x16\x05\x7f\x9f\xdf\xfd\x57\x13\x95\x84\x38\x04\x3d\xea\x05\xd5\x15\x07\x42\x55\x80\xa1\x5b\x13\x7e\x58\x8f\x7e\x4b\x1c\xc8\x2e\x15\x67\x9e\x16\xf3\xe6\x1e\x18\xa4\x85\x04\xd0\x19\x29\xe3\x03\x1d\xf4\x8b\x3e\x8f\x36\x31\xba\x02\xdf\x16\x90\xae\x72\xea\x2b\x6b\x39\x99\x17\x38\x94\x06\x74\x0c\x1c\xf5\x71\x10\x11\x00\x77\xaa\x88\x7e\x94\x10\xe3\x3c\xbe\x93\x1e\x0b\x1f\x81\x8f\x4b\xdf\x75\x90\xb4\xf2\x2d\xd7\xca\xb3\x5f\xe5\x5e\x6d\x73\xa9\xca\xb4\xcb\x31\x2b\x2d\x0f\xd8\xdc\xbd\x7c\x91\x6f\xee\xd0\xff\x43\x0c\x5c\xe0\xd5\x38\x1f\x3d\xe6\x14\x23\xf5\x03\x0c\x58\x3a\x7f\xc2\xc5\x04\x8d\xc3\xf1\x20\x1d\xe5\x20\xf6\x4b\xf0\x35\xd8\xfe\xfd\x08\x11\x47\x41\x25\x29\xf5\x00\x7b\x2f\xe0\xdc\x79\x25\x22\xdc\xce\x35\xea\x59\x16\x2f\xda\x8c\x18\xba\x5d\x22\x68\x87\x2b\x69\x7b\xc0\x73\x0a\x12\xec\xc1\x1c\x0c\xb0\xcc\x1c\x6d\x40\x34\x68\xe0\x37\x74\x8f\x07\xa8\x19\xbe\x95\x21\x19\x34\xd8\xb5\xe3\x21\x84\xfd\x3f\x20\x46\x7b\xde\xd4\x55\x3b\xe4\x12\x05\x47\x95\xb7\x96\x78\x65\x0a\x06\x4d\xbc\x56\x7f\xcc\xdd\xcb\xb9\x49\x19\x03\x62\x42\x4c\x1f\xe0\xb5\xf5\xe7\xae\xac\x3e\x30\xf7\xe7\xab\x84\x56\xb0\x0c\xd8\x8a\x30\x67\x85\x32\x16\x65\x28\x82\xba\x14\x22\x91\x7e\xdc\x87\xc8\x27\x97\xb6\x9c\x59\xcd\x68\x12\xf6\x7b\x55\x26\x67\x2f\xcf\xc2\x85\xe6\x7d\xfb\xd7\x32\x07\xbe\x0c\xec\x74\xf6\x79\x61\x93\xb6\x4b\x39\x1f\xd8\x9e\x46\x64\xd9\x95\xc5\xb3\xe5\xf0\xc1\xbc\x51\x07\x02\xed\x53\x81\x76\x8e\x39\xd3\x9e\xdf\xca\x2c\xee\xf9\x75\xc9\xdf\xe8\x29\x97\x86\xdd\xda\xfb\xc2\x49\x9b\x85\xee\x37\xa5\x1e\x67\x83\xc5\x71\x52\x20\x61\xec\xcd\xfd\xdb\x57\xaf\x67\x6f\x75\x3b\xc4\xab\xdb\xd0\xd4\x3a\xd4\x28\xda\x5a\x0b\x26\x01\x50\xb6\x52\x57\xcc\x05\x0e\x8f\x6f\xc9\x1b\x2d\x61\x47\x14\xba\x88\x80\x99\x09\xe9\x65\xb7\x89\x89\x30\x57\x56\x65\x8d\x18\xe1\x42\x5f\xb9\xcd\x3d\xdd\x19\x80\xf6\x4a\x79\xa4\x1d\x36\x8d\x8a\xb8\x90\x59\x50\xdc\x2a\xf8\x14\x5d\x84\x93\x2a\xcb\x65\x73\xbd\x4b\x42\x45\x9a\xcc\x6c\xf3\x1a\xa0\xbb\xa6\x45\xba\xcd\x13\xe5\x8f\x06\xc0\x16\xc9\xe0\xdb\x34\x24\xd8\xf2\x25\xf9\x21\xf7\x81\xfa\x21\xba\xce\x1e\x2e\x21\xef\xa8\xf9\x93\xd7\x50\xa8\xdc\x5a\xb7\xbd\x65\xbc\xfd\x7e\xe0\xfe\x9e\x47\xa2\xbd\xfa\x3d\x07\x0a\x0e\xf6\x4f\x67\x0e\x30\x9d\x5a\x27\x87\xd8\xfd\x11\x3c\x5a\x9f\x02\xd2\xfd\xd6\x3d\x14\x7c\xfa\xb1\x29\x1e\xcd\x8e\xe9\xb7\x20\x2b\xb5\xb1\xf5\x0d\x09\xd9\xe7\x75\xc2\x94\x4e\xe0\xb8\x56\xfa\x06\x6e\x4e\x5d\x35\x04\x8c\x02\xef\x97\xd0\x3f\x7a\xf0\x92\xdc\x53\xaa\x72\xed\xb9\xab\xdc\x86\x1c\x39\xf7\x43\xec\xdb\xc4\x79\x27\x64\x8b\x02\xf2\x86\xc4\x27\x37\xf4\x0a\xaf\xc2\x6d\xd6\x3f\x38\x97\x10\x33\xf5\x6e\x36\x7d\xe1\x71\xe5\x8f\xc4\x51\x0e\x21\x22\x4a\x04\x63\x29\x93\x21\x6b\x4f\x56\x41\xe6\xde\xa8\x8b\x73\x8f\xbd\xa1\xe7\xb8\xf9\x2e\xc4\xf4\x46\x5e\x91\x0e\xc6\x61\xaa\x2d\xb7\xc8\x88\x58\x9c\xfa\x24\xec\xdf\x6f\x33\x54\x1a\x03\xc0\x07\x52\x15\x6d\xe8\x60\x73\xb4\x2b\x88\x1d\x7a\x4a\xd9\x6e\x3c\x07\xba\x49\x31\xc4\x80\xcc\xdf\xc0\xf5\x4d\x85\x9e\x37\x6b\x3e\xd6\x03\x04\x0a\xad\x3a\x8b\x71\x85\x7d\x1d\xbf\x81\x44\x37\xa9\x80\x6b\x9d\xce\x58\xfe\xb8\x34\x12\xde\x0f\xc2\xbc\xca\x4b\x50\x74\x74\x22\xaa\xb1\x43\x47\x80\xa2\x3f\x90\x40\x70\x8c\x57\xd9\x3d\xb5\x4c\x1d\xa2\x42\xdc\x58\x79\xc4\xcf\x5e\x4b\x7e\x7d\xe1\x0a\xab\x0d\x41\x02\x71\xc7\x72\x1e\xc4\xe3\x5b\xaf\xbb\xda\xff\x5e\xb9\x76\xfd\xcd\x7b\x3b\xc6\x3c\xd6\x42\xdf\x24\x34\x4a\xbe\xec\xd8\xaf\xad\xb9\x2e\x31\xe5\x5c\xb3\x8d\x21\x89\x3b\xcb\x8d\xb3\xb2\xc2\xa9\x91\x8a\x4e\x79\xff\x30\x17\xba\x7a\xa9\x57\x6d\x83\x7d\x38\x04\xc5\x13\xeb\xdf\x3b\x42\xc2\xfc\xe0\x49\x48\x1c\x1e\xaa\x5f\xc2\xbc\x7d\x84\x93\xfb\x30\xad\x0b\x9c\x1d\x31\x6e\xc2\xbf\x61\x94\x6b\x6f\xbb\x84\x94\x24\xa8\x64\x13\x0e\x0e\xc0\xd7\x14\x32\xcf\x70\x47\x8d\x48\xe0\x45\x98\xe2\xf0\x58\x68\x15\x2f\x46\x23\xdd\xc2\xd1\xaa\x29\xf7\x80\x0a\x1c\xdb\x26\x47\xe8\x29\xf8\x6a\xc6\xac\xd1\xb3\xb9\xfb\x92\x56\x35\x33\xd2\x35\x8d\x67\x96\xbb\x53\xcf\x27\x0d\x66\x4b\x51\x97\x81\xcb\xd2\x4d\xa8\x1d\x50\x38\x4a\x7f\x2d\x75\x3e\x78\x46\x4b\xf5\xc9\x8c\x47\x3e\x3e\x86\x98\x12\x67\xb6\xcb\x99\x84\x89\x5a\x5a\x6a\x69\x09\x21\xc5\x05\x0c\xeb\x39\x24\x93\x3e\x14\x72\x2a\xb0\x9e\x22\xee\xee\x0e\x57\xa5\x5b\x3d\x90\xa0\x78\x6f\x86\x42\xf3\x39\x66\x6c\x81\x8b\x66\x83\x01\x96\xf9\x53\x37\x46\x53\x38\x14\x95\x86\xc2\x83\xbf\xbe\x2f\xf9\x54\x2f\x3b\xe7\x8b\x8e\xe5\xfb\x71\x74\x9e\xe7\x96\x96\xc6\x90\x5a\xe6\x06\xc2\x43\x01\xc5\xff\xfd\x7c\x04\x2c\x6f\x68\xea\x48\x8d\xfa\xb2\x7c\x4d\x64\xac\x6d\x88\x92\xc1\xd2\x04\x35\x97\x3c\xf8\x78\x64\xf1\xaa\x48\x56\x18\xc3\x20\x87\xe4\x49\x10\xc9\xde\x76\xb6\xd6\x0b\x58\xf9\x0a\x64\x7a\xb6\x34\x91\x04\x44\x3e\x1e\xb5\x60\x26\x60\x8f\x73\x80\x5f\xb8\xc7\x87\x04\xdf\x2f\x34\xa2\xd2\x52\x3d\x5c\x55\x57\xfc\xcd\xd5\x9f\x0b\xe0\xdc\x89\xa3\xf9\x45\x51\xc1\x93\x88\xbf\x36\xa2\x76\x89\x23\xa9\xb4\x23\x85\xe1\xcb\x2b\xc7\x93\x63\x4d\x70\x1f\x83\x61\x8b\x5d\xf1\x68\x9b\x0a\xae\x27\x3c\x17\xbc\x81\x06\x4c\x8f\x58\x3c\xa7\x39\x8f\xdf\xd0\xa3\x35\xf4\x83\x2c\x9a\x39\x3b\x9d\x6c\x7e\x15\x58\x3d\xf4\x88\x82\xad\x9d\x54\xce\xb0\x93\xf3\xc7\xdf\xe1\x76\x0a\xf6\x2f\x52\x4f\xbd\xe5\x44\xdf\x47\x46\x0b\x37\x3b\x59\xe7\x9b\x4a\x69\x5f\xd9\xd5\x7f\xc5\x9e\x16\x83\x72\xe9\x03\x00\x1a\x34\x31\x98\x03\xcb\xe7\x95\x0e\x62\x00\xed\x69\x52\xfd\x73\x2a\x1c\x41\x41\xe5\x97\x9e\x46\xf6\x86\xf5\x53\x00\xd7\x3c\x75\xe9\xf5\x4e\xff\x34\xea\xb7\x80\x36\x4d\x25\xa5\x2e\x60\x7e\x9e\xc5\xf5\xa3\xe7\xd7\x2a\x4b\xa5\x2b\x5c\x96\xac\x3f\x5b\x85\xe8\xdb\x04\xb0\x3b\xa4\x43\xd4\x6c\x67\x80\xd8\x5b\xbf\x90\xbd\x86\xd8\x9b\x87\xbb\x93\x1f\xcd\x04\x2c\x03\x61\x70\x6c\xa3\x26\xc0\xf6\x42\xf4\xc8\x5a\x0b\xa4\x4d\x05\xbe\xe3\x14\x70\x4a\x2a\x06\x4c\x3e\xcf\xde\x42\x0f\x29\x39\x26\x3f\x3e\x10\xa3\x67\xef\xe0\xc7\x83\x79\x8c\x53\x02\xc6\x5b\xfe\x4d\xfd\xa2\x74\xfc\xa8\x4a\x95\xcd\xc9\x07\xe6\xcf\x02\x49\xb7\x12\x37\x67\x3d\x5f\x65\xfa\xdd\x63\xfb\xaf\x0e\xa1\x42\x33\x2a\x4d\x9f\xb3\x36\x61\xa4\x2a\x38\x9b\x05\xe2\x2f\x5b\x18\x10\xbb\xfb\xb3\x08\x4f\x77\x5d\x8d\x6f\x40\x98\xdd\xc5\x68\xd0\x1a\x3c\xf3\x82\x6a\x9d\x45\xe2\x4b\x04\xb1\xd9\xf2\xf1\x23\xf8\x9d\xf2\x47\x1e\x57\x42\xf6\xac\x21\xe7\x2a\x40\xa7\x2f\x45\x43\x0c\x82\xc6\xa0\x5d\x01\xa5\x17\xc7\x67\x4e\x78\xb4\x41\xeb\xd0\xb8\x86\xa3\x24\xbc\x80\xd0\x0b\x41\x24\x80\xf5\x4e\x90\x73\x7f\x08\xd7\x4b\x42\xe6\xad\xb9\x9b\xdb\xd9\xda\x73\x21\x91\x24\xdb\x91\xa5\x83\x71\xfd\x56\x3f\xc7\x18\x6e\x8e\x9e\xf5\xd4\xa6\x36\xda\x69\x1a\x4f\x90\x60\x5c\x7d\xd2\x30\xe2\x9d\xdd\xd8\x25\x7a\x4c\x0a\xce\x38\x85\x66\x13\xab\xa0\xa8\x45\x7e\xb8\x0c\x49\x2a\x1b\x7e\x9e\x1b\xc7\x63\x16\x4b\x93\x2a\xcf\x4d\xe3\x69\xde\x84\xa3\xab\x36\x4c\x97\xaf\xf8\x88\xfb\x2d\x24\x65\x3a\x39\xcc\x44\xef\x4a\x24\x60\x42\x99\xc5\xff\x4b\x12\x10\xd3\xfa\xbd\x57\xa0\xf1\x06\x56\x2a\xfd\xc3\xfa\xe6\x59\x91\x6d\x0b\x60\x5e\x3c\x5f\x29\xd8\xe3\xa3\x80\x05\x23\x90\x64\x3c\x98\xce\xd1\x96\x6a\xca\xc6\x94\xce\x90\x7b\xba\x66\x51\xbf\xe1\xb1\xc5\xe9\x1a\x3c\xb6\x68\x29\x55\xc7\x4a\xfc\x1c\xa5\xd4\x1e\xd0\x46\x17\x5c\x6f\x26\xe5\xce\x02\x92\xcc\x2c\x18\xe2\xbb\x7c\x51\x5c\xf4\x11\x5a\x42\x8f\x53\xb2\xa6\x0e\xd4\x63\x23\xe2\x40\x3d\x9f\x87\xf9\x43\xf1\x10\x83\xe8\x21\xf1\x67\xc0\xf3\xd4\x5c\x5a\xd8\x31\xc9\xf2\x58\x59\x7f\x7d\xc0\x18\x98\x2b\x72\x00\xf5\xb2\xbe\x08\xd5\x29\x9b\x94\x74\x95\x6f\x38\xf4\xa6\x92\x6a\xc1\x8f\xb9\xc1\xfe\x1f\xb1\x66\x9d\x0f\x26\xd8\x21\x41\x95\x67\x89\x50\x09\x1d\x28\xdb\xc4\xc7\x9c\xae\x96\x41\x03\x92\xcf\xef\x84\xcb\x52\xb1\xd6\xfd\x08\x60\xba\x05\x63\x24\x40\x1e\xd6\xe7\x51\xe6\x2a\xcf\x01\x08\xf4\x96\xf3\x01\xe1\x5f\xff\x05\x18\x08\x31\xa0\xea\x3d\x7c\xbc\x1c\x4f\xf7\x92\xfa\x17\x66\x1b\x6a\x1c\x11\x4a\x2e\x38\xdf\xa1\x33\xa5\xa9\x86\x0b\xfa\x79\xb0\x37\x79\xa9\xdd\x2d\xb6\x54\x89\xce\x33\xd5\x0a\x91\x81\xfa\x03\xea\x13\x8a\xf0\x6b\xdf\x35\x7c\xa1\xcd\x79\xd8\x4a\x30\xcd\x73\xc0\x61\x32\x07\xc6\xd9\xf1\x62\x04\xf0\xbd\x66\xcd\x0a\x87\x7a\x64\x82\xfc\xe0\xd4\xaf\x68\x0c\x2b\xae\x44\x05\xdf\xa1\x62\x12\xab\x70\x24\x1a\x7d\xb0\x76\xcc\x73\x9c\x43\x1d\x3e\x5e\x83\x1b\x00\xf4\x04\x82\x9c\xa9\x8f\xa1\xf6\x52\xaa\x2a\x4e\x04\xf7\xb1\x20\xc4\xaa\x3a\x80\xb7\x2f\x7e\x08\x18\x61\x8e\x47\x67\x70\xd9\x7e\x62\xf8\x80\xfd\x62\x80\xf7\xd7\x8f\x55\x36\x89\xd8\x0d\x01\x7e\x19\x4a\x90\x87\x07\x1b\x14\x01\x4d\xa2\x0f\x6a\xd0\x1d\x4c\x48\x94\xe0\xfc\x75\xd6\x9a\x03\xaf\x38\xab\x65\x18\x9a\xc4\xf5\x30\x07\xfa\x94\xeb\x7b\xd5\x43\x87\xe8\x0d\x01\x32\x14\x74\x64\x80\xd4\xa4\x10\x82\xc8\x8a\xb3\x7d\xb0\x37\xb5\xbe\x03\x5f\xa8\x81\xfb\x90\x43\xab\x75\x50\x72\x6a\xc8\x9b\x4c\xb6\xc0\xba\x2a\x11\x9e\x17\xd3\x58\xd6\x9f\x55\x19\x6e\x2d\x85\x68\xc4\x4e\xc0\x1f\x98\x71\xf1\x02\xe6\xe6\xae\xf5\x5e\x65\xc0\xf5\x0a\x27\xae\x4a\x4b\x80\x21\x69\x98\x0d\x31\x01\x7e\x63\x0d\xad\x70\x88\x67\xa2\x4c\x18\x98\x57\x17\xf4\xe6\xfc\x54\x31\xf6\x7f\x72\x5f\x6b\x2e\xfd\xd6\x5a\xdb\xf4\xf3\x7f\xb4\xea\x22\xa6\x8c\x6e\x17\x7d\x0d\x34\x5f\x4d\x83\xec\x93\xce\x2a\x55\x2b\x6c\x2c\xdc\x1c\x68\xc6\xd2\xa1\xb2\xd0\xde\x5e\x45\x54\x9f\x3a\xf3\x53\x46\x5c\xf6\x1c\x2c\x5b\x65\x28\x9d\xf9\x5e\x7e\x2a\x7d\x00\xb0\xfa\xf2\x21\xd6\xad\x50\x29\xac\x2f\xff\x04\x1f\x7c\xb2\x95\xed\xff\x3c\x6f\x40\x32\x06\xcc\x10\x98\xb6\x42\x05\x2c\x9b\xdf\xcc\xf4\xba\x2a\xe7\xb3\x09\x64\xfd\x91\xb4\x82\x56\xf6\xa9\xa8\x29\xab\x20\xd6\xbf\x2c\x3c\x13\xe5\xde\x7b\x08\x16\xf3\x9f\x40\x66\x88\x08\x3b\xd0\x2b\x0e\xad\x1c\x63\x55\xd6\x52\xde\xc6\x7d\x99\xf5\x5f\xc0\x6d\xf6\xa0\x9f\x38\xec\xf0\x0e\x4e\x66\x62\xa5\x99\x89\x61\x26\xe9\x87\x70\xf6\xfc\x2e\x90\xa4\x24\xe9\x10\x64\x3e\xeb\xbd\xcd\xe5\xbc\xdc\x01\xa5\x2e\xa9\x74\x11\x8e\x38\x41\x29\x8c\xaf\x8e\x7b\x82\xd5\xf0\x04\x93\x01\x1f\x40\x6a\xca\xbb\x03\x81\x78\x0b\x02\x09\xd1\xb8\x50\xf5\xb2\x96\xf9\xc1\x79\x9c\xbd\x25\x7e\xd0\x6c\x93\x75\xa2\x53\xe0\x09\xa6\x47\xc2\x1d\x2d\xb4\xae\x0b\x4c\x64\x1f\xaa\x84\x08\xd3\xd9\xc4\xbc\x5e\x41\x77\x80\x95\x58\x13\xec\xba\xd4\x49\x23\xd2\xb5\x43\x3d\x2a\xbc\xc5\xec\xe5\x8d\x0d\xad\x8d\x70\x02\x00\xcc\xc4\x24\xa7\xef\x61\x03\x54\x6a\x43\xc0\x82\x75\x48\x15\x0b\x90\xe6\x78\x36\xc3\xb0\xe3\xd0\xca\x43\xff\x6e\xbf\x65\xf8\xd5\x04\xca\xec\xb5\x55\x70\xd8\x24\x24\xf4\xc6\xba\x53\x48\xcc\xf5\x93\xb4\xa1\xbd\x46\xbf\x8a\xe6\x71\xae\x52\x23\x7b\xc4\x2e\x83\xb2\x40\x7d\xea\x09\xed\xed\xde\x62\x67\x74\x55\xf2\xe7\x76\xb7\xa5\x83\x85\x82\xfa\xc5\xa0\x19\x24\xa2\x24\x22\xcd\xfc\xae\x86\x1a\x72\x57\x77\x61\x38\xc9\x31\xeb\xaa\x90\x66\x95\xa2\xdb\x5d\x1d\x56\xe1\x2b\x6d\xb6\x3f\x25\x11\x23\xec\x83\xf3\xec\x6e\x01\x34\x25\x81\xd0\x85\xd7\x17\x6a\x46\x12\x2c\x2b\x55\x4b\xb2\x5e\xa0\x85\xb6\x10\x4d\x59\x02\x4c\x89\x77\x9c\xdf\xcd\x9b\x60\x9a\xeb\xda\xe0\x9d\xf6\x3a\x35\xf4\xe6\x10\xf2\x67\x40\xca\x50\x0b\x07\x96\x21\x02\x6f\x86\x0e\xa3\xe3\x38\x2f\xfd\x3b\xb7\x61\x7e\x47\xc8\xd4\x52\x47\x6d\x85\xae\xbc\xa9\xdd\x55\x5e\x64\xe2\x22\x10\xa4\xaf\x54\x69\x8f\x76\xf0\x01\x14\xd4\xb2\x92\x30\xd4\x15\xd5\x1b\x3a\xdc\x93\xc6\x97\x06\x9e\x73\x0e\xf4\xe6\xa5\x2d\x35\x36\x25\x9e\x93\xbf\xa8\x9c\x25\x1c\xd1\xd4\x1e\x76\x45\xec\x0c\x0b\xbe\x47\x93\xee\xda\x3a\x54\x06\x23\x47\x91\x5d\x91\x29\x64\xb7\x5a\x5b\xe6\xe7\xbf\x52\x74\xe9\xe6\xf4\xbf\x5b\x9d\xea\x2c\x00\xea\x9a\x21\xa2\x4e\xd3\x34\x16\x9b\x51\x1d\xc4\x50\xe7\x47\xe0\xcd\x90\x84\x03\x44\x53\x67\x3b\x07\x6e\xaa\xfb\xe3\x78\xde\xcd\xcc\xfc\x75\x18\x12\xfa\xab\x6a\x11\x94\x84\x89\xfd\x1c\x05\x94\x14\xb6\xcf\x96\xcc\xa4\x74\xb0\xc6\xe6\xc3\x00\x6d\x73\x62\x28\x5d\xc6\x3f\x22\xe2\x62\xf9\x16\x3b\x1a\xbe\x27\xac\x04\xba\x6d\x60\xce\xf1\xee\xc3\x16\x73\x9a\x85\xde\xdc\xad\x05\xa9\xc2\x76\xee\x57\x40\x35\x85\x35\xec\x30\x8f\x87\x8d\x22\x35\xdf\xe2\xdb\x55\x0a\x6b\x8e\xfb\x26\xe6\x74\x7d\xac\x21\x76\x97\x57\x8a\xe8\xe5\x25\xce\xf1\x37\x71\x39\x33\x3f\xd3\x61\x60\x15\xea\x71\x54\x10\xb4\x89\x1e\x7f\x44\xda\x3b\x48\x38\x08\x84\xf9\x22\x22\x97\xda\xaf\x9d\xdb\xab\x79\x60\x83\xcb\xac\xc0\x4d\x0c\xe4\xc6\x76\x37\x5b\xb7\x8b\x34\x53\x0b\x94\x0c\x73\x8d\x38\xc2\x59\xd1\x16\x85\x0c\xdc\x42\xac\x28\x85\x03\xd3\xd6\x36\x5e\x01\xa6\x62\xa4\x23\x42\xf8\xe9\xa5\x88\x97\x31\xd9\xf3\xe4\xa8\xc4\xb7\x46\x30\x68\xce\xd8\xe8\xa7\x6b\xd2\x5c\xd4\x22\xbd\x4b\xf0\xea\x14\xf1\x1e\xad\xe2\x5b\x60\x4d\x24\xee\x81\x2c\xf3\x0f\xe4\x72\xba\xa9\x16\xb5\xdb\xdc\x8b\xe7\xd6\xda\xde\xbb\x99\x1f\x87\x73\x96\xc8\x54\x83\xb2\x4d\xa9\xb4\x9d\x0e\x01\x49\x6e\xc7\xe9\xeb\x03\x95\x36\x41\xd2\x93\x35\xdc\xbe\xd3\x26\x25\x33\xf7\x5c\x74\x79\x4d\x78\x74\xa8\x0a\x65\xa1\x10\x51\x0e\xf4\xb4\xe1\x2e\x01\x9a\xa1\xde\x63\x01\x9f\x50\x5c\x0a\xe6\x09\xac\xeb\xf0\xdf\xbd\xf6\x52\xe4\x40\xc4\xda\x68\x5a\xa6\x9b\x10\xd8\xa9\xd2\xf7\x0e\xf3\x5f\x19\x07\x83\xe4\xed\x17\x67\xf8\xa2\x3f\xa1\x0b\xe7\xeb\x3a\xcf\xfd\x26\x60\x71\x7f\x20\xfe\xe6\xfd\xd4\x07\x25\x6c\xc1\x35\x67\xa7\x8b\x7c\x98\xca\xff\xa8\xaf\xf9\x40\xea\xce\x97\x2c\xcc\x7f\x92\x70\x3b\x1f\x89\xca\x7f\xec\x3f\x78\x23\xf5\x2e\x8e\x48\x82\xdc\x20\x0e\xf0\xde\x83\xf9\xf3\xce\xcc\xab\xbd\x73\x90\x2a\xdf\x6e\xe5\xfb\x43\xe3\xb9\x9b\xdf\x35\x89\x80\xb7\x0d\xf5\xa8\x2d\xe7\x75\x0b\xfc\xae\x97\x9c\xbb\x90\xf2\xf8\x23\x10\xf7\xe0\x74\xc3\xa2\x14\xa0\x4f\x98\x5b\xec\x1e\xfd\xee\x89\xab\x7e\x7b\x4e\xb2\x80\xf4\x0c\x33\x39\x07\x84\xd3\xad\xee\x7a\x85\x40\x9c\xca\x7b\x2f\x17\x6d\x87\x7a\x1a\x37\xdd\xa3\xf9\x46\x39\x40\x8f\xf4\xcb\x7a\x42\x51\x21\x2d\x8c\xa6\xf1\xd5\xb8\xe6\x2f\x2a\x9d\xbd\xe6\x24\x91\xb7\x69\xbe\xf9\xb6\x22\x02\x74\x5f\xf2\x73\x02\x4c\xbd\x90\x97\x52\xbe\xfe\xd5\x08\xbe\x2f\x8d\xb1\x5f\x4a\x7b\x41\x85\x8e\x9f\xd7\xc0\x8c\x36\x7d\x3a\x1b\x0f\xf8\xb0\xf1\x85\xd6\x20\x30\xa8\x43\x73\x6c\x78\xb9\x7b\x10\x82\xf4\xd6\x63\x6f\xd1\x1f\x11\x3a\x18\xae\x27\x8f\xee\x4f\x04\xfa\x81\xcd\xfd\xd9\x23\xf6\x6e\x2f\x10\x82\x88\x65\x8d\x39\xfd\x4b\x22\x74\x40\x52\x3c\xcb\xa8\xff\xb6\x7e\x6f\x58\x9a\xf8\x35\xf2\x55\xda\x63\x18\x38\xc8\x79\x92\xf7\xf9\x04\x8e\x97\x50\x56\xbe\xa5\xcf\x22\xc6\xdf\x73\x3e\x47\x0a\xe1\x09\x89\x8a\xdc\x0b\x47\x3b\xae\x93\x17\xa6\xa8\xfe\x41\x82\xac\x40\xb6\x72\x1d\x1c\x7c\xde\x8c\x22\x95\xfa\x04\x75\xca\xbf\xf5\xc2\x01\xad\x42\x41\x99\xf5\x3e\xd9\x02\x01\xb2\x01\xd5\xf7\xc4\x02\x63\xee\x2a\x38\x06\x82\x76\xb8\x35\xfd\xc7\xc7\xfd\x99\x7d\x26\x2c\x78\xac\x27\x8b\x9f\xc8\x3e\x64\x06\x2f\x01\x68\x77\x0d\x00\x9f\x42\xe6\x26\xb8\x44\x14\xd6\xe7\xdc\x85\x54\x5a\x79\x2e\xf7\x12\x36\x80\x4a\x0a\x84\x9e\x1e\x1d\x52\xf9\xe9\x28\x79\x6d\x1f\x60\x86\xc1\xdd\x5d\x61\xcf\x2f\x61\x0d\x05\xa7\x6a\x4b\x57\xf0\xa8\x67\x62\x5f\x54\x0f\xe9\x0d\xd4\x63\x78\xfb\xfd\xad\x1c\x5c\x55\x40\xe2\xbd\xd0\xe3\xf5\x2b\xee\xde\x8c\x2f\x11\x69\x3e\x5f\xd7\x37\xf3\x27\x64\x56\xee\x8c\x92\x50\xba\x0b\x91\xb8\xa7\xfd\x9d\x0a\x77\xb9\x62\x23\x94\x5c\x9e\x0d\xe9\x59\xae\xbf\x61\xb6\x2e\x69\xe0\x1b\x27\xc9\x7d\xfe\x82\x7a\x61\xbe\x1d\x46\x86\xc5\xe1\xa2\x67\xbd\x62\xf7\x33\x28\x3d\xb0\x31\xd4\xd8\x1b\x76\x2c\xfe\xff\xdc\x7b\x39\xe0\xb9\x59\x80\x39\x8b\x06\x36\xbd\x16\x91\xc2\x6a\x77\xf0\xad\xe9\xb5\x85\x8e\xf8\x2d\x74\x68\xbe\x0b\xc5\x23\x6c\x69\x53\x7f\x97\xbc\xe2\x27\x9f\xe0\x5a\xbb\xaf\x22\x2e\x7a\x00\x01\x77\xe5\xa6\x72\xcf\x67\x28\x03\x86\xaa\xdf\xcc\x57\xda\x2e\xb3\x75\x0b\x81\x69\xd5\x35\xbf\xe7\xa1\x4b\x02\xd0\xbe\x00\x65\xf5\xa3\x4e\xca\x61\xb8\x65\x62\xd8\xcd\x3b\xfe\x56\x9b\x96\x53\xd3\x2b\x94\x72\x1b\x18\x77\xce\x4d\x66\x50\x3a\x66\x9a\x1f\x41\x2f\x87\xf9\x77\x9b\x1c\x3d\xd8\x3b\xdc\xe9\xdd\x11\x11\x78\x27\xa5\x94\x6e\xc1\xee\x6f\x75\x0c\x8e\x95\x97\xcb\xea\x6d\x2f\x32\xe5\x0d\x53\xc7\xf9\x8f\x05\xb4\xe6\xbe\x3e\x79\x21\x7f\xee\xfa\xa4\x2f\xf4\xc5\x05\x08\xb1\x44\xfb\xaa\x46\x95\x40\xdc\x47\xfa\x23\x40\x28\xa8\x2e\x56\xdd\xc4\xda\x22\x01\x46\x04\x69\xd5\x83\xee\xf1\xf6\x0d\x39\x5e\x83\xee\xe0\x2c\xd9\xc1\x2e\x94\xca\xdc\x86\x51\x01\xc5\x81\xdd\x80\x31\x0b\x61\x7b\xbe\x03\x12\x6c\x8f\x90\xbe\x86\x02\x9a\x02\xa6\x6a\x91\xe4\x0a\xe3\x45\x28\x9f\x35\x9f\xc5\x33\x4d\x02\xd9\xb6\x87\xce\xc2\xb2\x5c\x61\x19\x88\x11\x53\x7b\x61\xec\xd3\x22\xb0\x6f\xd0\x28\x6c\x94\x1e\xf8\x4d\x3c\x0e\xbe\x04\xe3\x9a\x28\x43\xf7\xb4\xe4\x66\xa0\x81\xef\x47\x99\x56\x7d\xd2\x7c\xc7\x3b\x10\x9d\x42\xe3\x09\x59\xf8\x78\x84\xc4\x61\x0e\xd9\xc5\x66\x0c\x6f\x63\x73\x41\x1f\xed\x7e\x22\x53\xfe\x56\x9c\xc2\xe8\xd2\x15\x12\xb5\x76\xd4\x44\x51\xe8\x60\xdd\xca\xff\xea\xdc\xb9\xe7\xe1\x71\xd6\x10\x80\x74\x7d\xc4\xcc\x28\x4f\x11\x08\x29\x7c\x36\x1b\xca\xa3\x08\xcb\x71\x1d\x73\xe0\x45\xed\xea\x09\xd1\x7b\x68\xab\x02\x48\x2d\xaa\x3d\x7e\x9b\xda\xe5\xda\xcb\xeb\x0b\xff\xdd\x13\x6f\xec\xf6\x26\xc1\xc8\x02\x7e\xb1\xa6\x37\x07\x67\x2e\x6e\x18\x11\xbb\x19\x1b\x67\x3c\x67\x41\x7d\xb3\xae\xf3\x6f\x91\x72\x95\x45\x44\x25\x5d\xb6\x89\x0d\x21\x89\xee\x05\xbb\xe7\x0e\x14\x2f\x81\xc8\xc0\xea\xec\x29\xc0\xb8\xb5\xe9\xbb\x5d\x13\x25\x06\x30\x3b\x10\xca\x73\x9a\xb7\xd4\xd5\x45\x52\x80\x93\x4d\x5c\x08\xdc\x75\xd5\xe7\x9b\xa1\x80\xf8\x00\xe7\x4a\xdb\x70\xd0\xb0\xc9\x71\xba\xfc\x2e\xd5\x8e\x4b\xe4\x24\x78\xb0\x40\xe9\x01\xb3\x3b\xf0\xb0\x69\xd2\x25\x07\x60\x57\x8f\x24\x18\xfe\x36\xac\x70\x5c\xa7\xaa\x0c\x97\x2a\xa4\x97\xb4\xaa\xae\x3c\x7f\x4d\x65\xe9\x89\xe9\xc1\xc4\x05\xd1\xe2\x79\x0c\xa0\xaf\x83\x6b\xf9\x7f\xfa\x79\x63\x95\x9b\x48\xda\x10\x49\x84\x6a\xae\xc0\xad\x2f\x92\x92\xe5\x94\x99\x19\xd6\xaf\x3d\x25\x00\x98\x5a\x7b\x26\x21\x63\xf1\x44\xec\x0a\x65\x44\xda\x83\x66\xd0\x76\x0a\xdd\x4b\x57\x69\x14\x7c\x77\x4b\x4d\x97\xd1\x21\xb7\x0a\xe3\x98\x71\x8f\x00\xf8\x13\x2e\x78\x52\x93\x80\x87\xb3\x0b\xcd\x52\xd8\x67\x86\xac\x35\x3f\x04\x12\xf7\x3c\x42\x9e\x11\xb9\x37\x5f\x09\xc4\x81\xbe\x4f\x47\x51\x9e\xf3\xfa\x56\x8a\x2e\x0a\x43\xcf\x0a\x13\xc5\x7d\xaf\xb4\x7e\xa2\x15\x49\x01\x95\xc7\xb0\x5e\xf0\xef\x06\xbb\x4e\xbd\x48\xb9\x30\xac\x77\x40\x26\x20\x69\x04\x60\x56\xd1\x8a\x4f\xef\x1a\xf9\x25\xd0\x95\x04\xc8\xe2\x75\xfd\x71\x80\x6c\xde\x08\x67\xfd\x47\x88\x29\x58\x55\x44\xe0\x96\x7a\x52\x96\x51\x43\x30\x6b\xdb\x2a\xa5\x77\x91\x22\x39\x37\xbe\x14\x8e\x9f\xe5\xf6\xda\xbc\x50\x02\x51\xae\x65\x67\x11\x12\x36\x50\x01\x27\x9b\x29\x48\xed\x6a\xc3\xe0\x5d\x38\x15\x56\xcb\x3d\xbc\x32\x69\x64\x39\x93\x2a\x79\xce\xcc\xf6\x9d\xf3\x78\x71\x8a\x74\xba\x1b\xa5\x9f\xf9\x1c\xda\xbd\xa7\x6c\x0e\x4e\x70\x4c\x03\x97\xe2\x98\xd9\x4a\x58\xec\x5b\xdd\xe1\x99\x5f\x33\xe1\xae\x4e\x6c\xda\xb9\xae\x86\x63\x27\x75\xed\x4e\xe9\xaa\xb9\xee\x70\x78\x73\x4a\x01\xde\xbd\x39\x39\x69\x73\x3c\x6d\xfb\x5f\xd3\x49\xe5\xca\xb0\x0d\xb3\x06\x2e\x0b\x3b\x0d\x47\x2b\x62\x67\x2b\xcf\x92\xb9\x0e\x20\x01\xe8\x07\x71\xa6\xab\x63\x53\x7b\xda\x33\xc3\xdf\x2d\x2a\xfb\x99\xe4\x47\xe2\x12\x91\x52\x92\xec\x39\x3c\x07\xc2\x23\xeb\x88\xf1\xfb\x52\x77\xca\xf4\xb3\x2b\x7e\x37\x6c\x44\x7a\x69\xec\xee\x16\x7c\xb7\xb2\xd1\x07\x64\xcb\x82\x7d\x84\x56\x5b\x58\x5a\xc9\x76\x1d\x70\xa9\x8c\x48\xad\xed\x81\xfa\x6d\xe2\xff\xb6\xb8\x13\xfb\xf6\x40\xd6\x5b\xea\x91\xa7\x84\x96\x1c\x46\x2b\xa0\xec\x51\x7a\xe2\x69\x37\x8e\xc0\x2d\x10\xbe\x36\x1c\x3c\x57\x50\x07\x9e\x3c\xc2\x3a\x0b\xbb\x7f\xd8\x3c\x2f\x51\x52\x71\x7d\x6f\x2c\xca\x5a\xf3\x26\x67\x18\x8b\xa8\xad\xe8\x72\xb5\x08\xe4\xfb\x0c\xd7\xcf\x49\xc2\x94\xbd\x34\x28\x9b\x28\x88\x85\xc9\x8b\x96\xee\xe3\xa9\xf6\xd6\xba\x98\x8e\x3a\x98\x0f\x3e\xa8\x47\x8e\x5f\x91\x39\xde\xb1\x65\x48\x3a\x5a\xd3\x39\x13\x17\x0b\x90\xa3\xa2\x06\xac\x5d\x22\xb0\x06\xd3\xcf\x32\xef\x49\xfb\xad\xb2\xdf\xf4\xe9\xde\x2c\x4c\x2e\xba\x65\xee\x01\x87\x57\x69\x5e\x66\x25\xb8\x8f\xac\x1f\x9d\xde\xa1\xce\x99\xfe\x6d\x42\xd3\xbe\xd6\xf9\x8b\x21\xa6\x98\xde\x91\x08\x58\xb2\x25\xb3\xe8\x16\x78\xf7\x71\x68\x06\x6a\xcb\x44\xa4\xae\xf5\xd7\xd4\xaa\xbc\x04\x4e\x46\x45\x85\xf7\xd5\x7a\x77\xa1\x78\xcf\x90\x96\x38\x84\x30\x3d\x84\x1d\x3b\x12\x48\x4b\x97\x54\x2f\x0b\xcb\xc0\xc8\x20\x6f\x59\x5e\xa1\x91\x61\x39\x92\x73\xcf\xa0\x3e\xfa\x9c\x59\xbb\x6b\x5e\x6e\xc5\x7b\x42\x04\xe7\xbd\xd2\x89\xa9\x9c\x74\x74\x2b\x9a\x0e\xd8\xc4\xd6\x1e\x19\x3a\x84\xb2\x85\x2e\x13\x69\x39\x25\xae\x5c\x71\xf6\x54\xd0\xc4\x00\xe2\x95\x30\x5d\x19\x38\xb4\x2d\x43\x56\x46\x44\x0e\x64\x05\xf9\xed\x5d\x91\x04\xd7\x5c\xc3\x8f\x80\xde\x97\x26\xde\x16\x0d\x12\xd0\x7c\xea\x2d\xd9\x09\xb8\xde\xed\x34\x55\xe5\x84\xa6\xa6\xbc\xb4\xf6\x28\x6d\xec\x65\x0d\xf4\xef\xf5\xc7\x88\x74\x11\xaf\x74\xb7\x6e\x72\xa2\x62\x2f\x42\x55\xc8\xf7\x5c\xc1\x3d\xa5\xbc\x66\x8d\x19\xfb\x9e\x39\x03\xa4\xfa\xa1\xaf\x49\x73\xe8\x6d\x8e\x69\x93\x5c\x63\xa8\xa1\x6f\x15\xa3\xf8\x3b\xa0\xbe\x83\xa6\x68\xd6\x6c\x53\xf7\xaf\x4e\x6b\xa7\xe9\x85\xbd\x8a\x7f\x84\x0d\x36\xa8\x70\xf9\xd2\xad\x68\x36\xb4\x01\x44\xf7\xda\xe4\x8f\x15\x0b\xff\x64\xef\xbb\x70\xf6\x1e\xff\x10\xeb\xab\x65\xbe\x78\x28\xd9\x1f\x6d\xc5\x5e\x73\x1e\xcd\x66\xcf\xe5\xad\x88\x45\x09\x44\x72\xe1\x72\x13\x54\x8d\x29\x01\xf5\xd5\xb5\xde\x9c\xe3\x25\x70\xf0\x1a\x83\x77\x0b\x07\xdd\xdf\xad\x4c\xfc\x55\xf2\xe4\xf3\xaf\xe7\x6b\x22\x70\xd3\xc1\x53\x81\x87\x70\x74\x70\x12\x74\x18\xcf\xeb\xcc\xc8\xae\x7f\x43\xd1\xc2\xcb\x97\xca\x99\x5e\x3b\xd3\xe3\x60\x1f\xac\x07\xf0\xa0\x09\x61\xe2\xde\x04\xf2\x57\x6a\x09\x5b\xf3\x31\xcd\xca\xa9\x83\xff\xf5\xa2\x58\x97\x7f\xda\xaf\xd8\x65\xbf\x12\xa1\xc4\x3b\x71\xd6\x70\xf6\xe8\x84\xc9\xb6\x46\x82\xef\xe3\x16\xb2\x78\xa8\xfa\xf0\x8a\x79\x11\x9f\xff\x6c\x3a\x55\x2f\xdf\xf2\xa0\x2f\x81\xb9\xf1\xff\xd4\xdf\x32\x78\xdc\x7a\x1b\x4f\x9f\xfe\xfa\x59\x38\x35\x47\xd6\xf2\xda\x15\x85\xb5\x39\x84\x3f\xcf\xc0\x0f\x2f\x73\x8b\xf2\x77\xed\x58\xdf\x01\x3b\x4e\x01\x8f\xb6\x70\x78\x0b\x69\x4c\xa3\xa1\x08\xf9\xec\xf4\x7d\x0d\xc1\x4c\xa0\x92\x43\x3a\x14\x56\x3e\x82\x2a\xaf\x55\x72\xa3\x3d\xdf\x47\xc2\x94\x53\x44\xc3\x1a\xc2\xa4\x69\x2d\xc2\xf7\xbe\x75\x1b\x95\xec\xdb\x92\x0e\x61\x3d\xaa\xd4\x3d\xaf\x2a\x2b\x62\x8b\x25\x8a\x8a\xdb\x40\x69\xcb\x3b\xcb\x11\xf5\x2a\x61\xa6\x7a\x7d\xf1\xcd\x35\x44\x35\x4f\x82\x88\xcf\xab\xac\x59\xe0\x64\x58\xd9\xca\xa9\xd5\xa1\x18\x82\x2a\x07\x84\x64\x83\xe9\x52\x18\xbc\x42\xf2\x93\x67\xc0\x4a\x22\xa4\x3c\x4f\xe9\x76\xee\x63\x60\x9b\x99\x1d\x03\x7c\x59\x50\x76\xe4\x91\xe2\x6e\x43\x13\x90\xbf\x78\x0d\x4c\xf2\xda\xfe\xeb\x2a\xa0\xe6\x96\x7e\x97\x2a\x65\x4f\xe1\xaa\xbd\x35\xf4\xa8\xb7\x59\xe3\xcc\xff\xc0\x37\x35\xf5\xd6\x9c\xe8\xea\xc1\x40\x88\xd8\x65\x71\x41\x57\x6a\x41\xad\x55\x3a\x72\x2b\xd2\x41\x04\x31\x03\xef\x75\x32\xb4\xa1\x48\xc7\x43\x40\xfe\x93\x57\x6b\x85\x66\x88\xa4\x40\xa1\x5b\xe8\xbd\xf2\x2a\xee\xd5\x2a\x42\xcb\x5a\xdc\xe3\x9b\xdb\xef\x41\xaf\x76\x08\x73\x94\x00\xd7\x32\xc9\xf1\xb5\x8c\x4c\x26\x5b\x10\xa9\x8b\xb5\xa8\xd6\xb8\x16\x59\xf1\xd9\x9b\xe2\xfe\xba\x08\x81\x9c\x1c\x09\x6f\xfe\xce\x59\x20\xa4\xc5\x75\xca\x23\xa2\x5d\xe7\x3d\x84\xcc\x4e\x38\xae\xda\x68\x6e\x42\x7b\xee\x31\x7f\x1a\x21\xa5\xab\xb3\x2e\x6e\x1d\xab\x6b\x0a\xa2\xc0\xc6\x04\xc4\x19\x89\xb6\xa6\x56\x6a\xad\x8d\x30\xbb\x6b\x70\x66\x2c\x18\x40\x0c\x64\x48\xd7\xda\xb0\xf9\xf2\xc0\xff\x91\xbd\xcf\x8b\x82\x7d\xa2\x39\xe9\xca\x09\x16\x94\x2c\xf9\x8d\x0c\x68\x07\xae\xd5\x7f\xe9\x56\xb2\xbe\x6c\x28\x34\x09\x0a\xfd\xf9\xb0\xe0\xb0\xa6\x57\xc0\x0e\x56\x8c\x15\x08\x69\x7e\x05\xee\xd0\x42\x5e\x6b\x9b\xb7\x8c\x7b\xa0\xa9\x25\x5f\x5a\xe5\x00\xe3\xbe\xb5\x0a\xe2\x3e\xd8\xfb\x35\x71\x73\x90\x34\xd4\x2a\xe3\x83\x0e\x22\xa4\x99\x3d\xc5\x0b\x57\x08\xc4\x62\x41\x53\x31\xd5\xaf\x05\x52\x43\x27\x78\xc4\x3f\x74\x2d\x21\xfe\xa3\x6d\xd3\x91\x36\x6d\xbe\x81\x4a\x40\x50\x37\x44\x6b\xeb\xd5\x42\xed\xdc\xe7\xa6\xc0\xda\x71\x32\xb8\xa6\x44\x4f\xd9\x7a\x0b\x4c\x2d\x3c\x31\xcb\x26\x4b\x25\x7a\x67\x39\x21\xfa\x24\xab\xda\xda\x04\xea\x16\xf0\xea\xf8\x7d\xfe\x82\xaa\xf7\x08\xc2\xb8\x15\x7d\x24\xdd\x67\xa9\xf4\xbb\x14\x60\x3a\xfd\xf8\x28\xff\x30\x58\x65\x61\xb2\xa8\x45\x59\x8a\x4c\x85\x97\x12\xfa\x2f\x88\x2e\x09\xa6\x42\x1a\xcb\x37\xdb\x39\xb5\x5a\x6c\x04\x40\xf8\xb4\xf5\xfb\x34\xd0\xb5\x91\x03\x16\x6b\x65\x4b\xb8\x48\xfd\x7c\x11\x8e\x76\x59\x93\x4f\xd5\xe1\x53\x3b\x92\x6d\xe3\xe2\xa7\x42\x51\x37\x2f\x81\x25\x64\x01\x10\x34\xaf\x58\x8b\xe5\xe7\xb8\xb8\xf9\x82\xff\xc7\x79\x1d\x89\x95\xb0\xa8\x2f\x6f\xb9\xde\x46\xd7\x67\x91\x4f\xfb\x16\xd7\xba\x50\x0f\x09\x77\xda\xeb\x96\x3d\x2d\x80\x9b\xf2\x94\xcd\x7e\x7c\x02\xc3\xf3\x1f\x8b\xda\x75\xf8\x4a\xac\xce\x61\x70\xfb\x48\xf1\x1b\x13\x38\xfa\x2d\xdf\xb3\x10\x49\xbd\xa4\x4b\x0b\x61\xd5\x53\x5c\x75\x9b\x48\x07\x8a\x3e\xcd\x21\xd0\x3a\xbf\x66\x2e\x77\x47\x6b\xd7\xb3\x27\x07\xfe\xb7\xda\x0f\x8e\x49\xfa\x6f\x05\x67\x08\x01\x78\x7c\xcc\x8b\x43\x80\x75\xa0\x16\xe6\x2f\x7a\xb3\x3d\x14\x58\x11\x37\xb9\xd6\xc7\xe3\x9c\xc3\xf0\xd6\x7f\xf7\xaf\xaa\x12\xbf\x48\xb4\xf0\xfe\xfe\x42\x34\x77\xe7\x97\x52\x87\xfb\xf5\x69\x41\x80\xc5\x31\xdf\xf2\xf4\xcd\x6f\xaa\x7e\x91\x7e\x13\x38\x19\xbe\xaf\x3d\x80\xcc\x52\xff\x72\x9c\x35\xbf\xd2\x95\xc2\x2b\x57\x8a\xae\x17\xe7\x4d\xf3\xce\x7b\x38\xef\xac\x2e\xdb\xcc\x88\x7d\x37\xd0\xd2\x0f\x30\x99\x10\x67\x8e\x87\x20\x05\xc9\x25\xc8\xd1\xd4\x4d\x9c\xde\xab\x8c\xf6\xa7\xfa\x4f\x65\xa6\x29\xd4\x57\xeb\x86\xdc\xdc\x9b\x98\xf3\x48\x8f\x4c\x04\x17\x4f\x6e\x04\x33\x68\xc3\x15\xe0\x0f\x8f\xfa\xc5\x5e\x4d\xa2\xc4\x5f\x59\xd2\xae\x3e\x11\x97\xbf\xee\x53\x18\xf0\xda\x90\xe3\xf5\x14\x62\x72\x2a\x97\x70\xe9\xc7\x28\xd4\x73\xd9\x04\x8c\x75\x18\x79\xa0\xce\xed\x25\x5a\xe9\xaf\x5b\x16\xca\x4b\x63\xb4\x74\xa4\xea\x59\xed\xa9\x44\x16\x0c\x6a\xb0\xf1\x16\x30\x0e\xe3\x13\x7c\x12\x47\x6f\xb2\x1b\xe3\xae\xb6\x40\x78\xb9\x02\xeb\x1c\x9e\x6c\x53\x53\x9e\x46\xb4\xea\xdf\xcf\x7b\x48\xc6\x76\xce\x85\x9e\xbc\xbf\xf0\xe0\xfc\x04\x6a\xfc\xbc\x3f\x71\x7c\x17\x79\x6f\xa7\x01\xa0\x51\x62\xf0\x47\xe3\x75\x3f\xa5\x05\xbb\x07\xd6\x1c\xd4\x1f\x0f\x34\x4c\x80\x14\xcd\xc2\x6d\xd6\x31\x9e\x23\xa0\xd3\xaf\x22\x37\x5f\x1b\xd5\xfa\x72\x54\x1e\x03\x0a\xb3\xda\x75\x94\xc4\xec\xc3\xbb\x6a\x97\x98\xed\x43\x40\xf6\xbd\x86\x97\x6f\x0d\xa1\x5c\x6b\x89\x42\x4b\xf6\xa6\x58\x0f\xbc\x7f\x67\x7a\xfc\xba\x7e\xd4\xa5\xd0\x25\xf6\xa7\x90\x4d\x07\xd6\xbd\x84\xcc\xec\x76\x85\xbc\xac\xde\x50\xc8\xca\x6a\xd5\x57\xb9\x71\x8a\x61\x10\xcc\x44\x43\x04\x0e\x23\xf9\x98\x7f\xb9\xa2\xec\xc5\x89\x1e\xe2\x51\x00\xe4\x1a\x93\x4c\x80\xdf\x1d\xfa\x5c\x5d\x57\x33\xd3\x2c\xf8\x2b\xfd\x0c\x48\xfc\xa3\xbc\x03\xb0\x5e\x57\x87\x4c\x9f\xf3\x25\x24\x35\x52\x4c\x3a\x43\x1b\xc2\xbd\xb8\x59\xa0\xd3\xe5\xa1\xef\x08\x73\x45\xa7\x44\x8e\x46\xf2\xa6\xec\x49\x95\x46\xab\xe3\xca\x1d\x16\x60\x21\x07\xd3\x6e\x16\xcc\x60\x3d\x26\x6d\xa6\x31\xf9\x58\x1a\x0c\xdc\xc1\x1b\x8a\xfc\xab\x55\x4c\xd9\x71\x3e\x87\xaf\x83\xb0\xab\x5d\x78\x90\xa4\x05\x8b\xb1\xe4\x41\x1c\x36\x66\x37\xcd\x56\x78\x10\x38\xdd\x3a\xfe\xde\x83\x66\x24\xdc\x75\xb3\xf6\xc5\x4c\x7f\x17\x58\x7d\xa3\x55\x07\x3e\xe4\x5b\x2f\xe6\xf8\x47\x4f\x14\x31\x99\xb3\x23\x2a\x30\x82\xaa\xc3\xbd\xd1\xab\x72\x0c\x3d\x00\x5a\x25\xf6\x57\x5b\x05\xbc\xb9\x5f\xcd\x24\x03\x59\x97\x7c\xd5\x57\x42\x08\x20\x48\x9c\xe2\x5a\x98\x89\x47\xb7\xd3\xf3\x7f\xce\x6e\x0e\xb8\x71\x59\xf2\x86\xfd\xe5\xa3\x36\xd1\xd0\xc7\xd9\x54\x55\xcf\x79\x26\x3c\x7c\xce\xc3\x4a\xf0\xb7\xcb\xba\xb6\x5e\xc5\x3e\x0b\x84\xf8\x38\x13\x47\x19\xc8\xbd\xfa\xd2\x6e\x34\x91\xe7\x01\x9d\x7a\x14\xe8\x89\x31\xc2\x34\xd6\x83\x55\xc3\x90\x47\x89\xd2\xfe\xa3\xb0\xfc\xfd\x50\x9f\xe0\x3e\xc5\xc4\x8c\x0b\x80\xbd\x66\xd9\x2b\xc3\xba\xd8\xbb\x31\xa8\xbe\x8a\x4f\xff\x10\xdc\xe2\x31\x9f\x34\x42\x7a\x40\x4a\x4d\x0d\xde\x63\xde\x1f\xf2\xd8\x79\x60\x5c\xcb\x76\xe5\x31\xb7\xc3\x38\x79\xd8\xd7\xb9\x44\x78\x03\x8a\xef\x89\xd9\xc9\x47\x30\x09\x1e\x36\x82\xd5\x3b\xf6\x80\xa2\x1c\x45\x62\x9f\x74\x54\x46\x2e\xc1\x77\xb4\x6b\x42\xd3\x65\x9f\x43\xfd\x48\x3f\x96\x5f\xb5\x25\x9f\x9d\x07\xb0\x30\xef\x1f\xe1\xca\x83\xf6\x6c\x1f\x62\x65\x40\xda\x13\xfb\x82\x07\x6c\x88\x42\x51\xd6\xe7\x0f\xf9\x5a\x08\x73\xce\x51\xd8\xcd\xf4\x22\xca\xa7\x8f\xcc\x90\x71\x3b\x7d\x49\x14\xea\x17\xf0\x96\xb7\x47\x3c\x9f\x79\x53\xf5\x0d\xee\x6d\x92\xfe\x41\xcb\x5d\x09\x0d\xc8\xab\x3c\x0e\xa0\x22\x90\x62\x9f\x15\x43\x70\xef\x66\x90\x5c\x6a\x87\x62\x5b\x89\x6b\x2b\x5b\xe5\x41\xd2\xe6\x39\xfc\x04\x81\xe1\xf6\xff\x60\xc3\x12\x99\xa6\x0e\x35\xe0\xd4\xf0\x9e\xd7\x2a\xbf\xac\x43\xdd\x57\x26\xef\x1c\xab\xce\x55\xe1\xc8\x39\xd4\x31\xfc\x92\x65\x67\x6a\x11\x21\xd1\x57\xa9\xde\xc3\x0f\x27\x79\x8e\xc3\x29\x96\xe3\x50\xaa\x8c\x8f\x85\x15\x1a\x90\x88\xd5\x92\x1b\x48\x82\x77\x28\x2b\xc7\xea\x83\x3d\x3c\x82\x78\x37\xe2\xfa\x00\xdd\x2b\x07\xd5\xe3\xc7\x73\x38\x05\xfc\xf8\xd9\x11\xb5\x0d\xc3\x38\x02\xc8\x5f\x84\xae\xe7\x60\xd8\x0d\xe0\xa3\x8f\xd4\x89\x85\x50\xb0\x12\x73\x03\xa4\x82\x7d\x79\x07\x9c\xfc\xe2\x36\xe7\xb7\xa7\xb3\x0f\xc5\xc9\x94\x8c\xb9\xea\x28\x01\x32\xdf\x6d\x3e\xa8\xff\xf6\x15\xa5\x15\x0c\xc3\xf1\xe6\xef\x08\xe4\x14\x9d\xd7\xda\xeb\x4c\x56\x0a\xcf\x66\xa8\xef\x34\x54\x3a\x65\x54\x10\x68\x05\x94\x82\x3d\x9a\x27\x9e\xec\x14\x62\xbc\xae\x23\xcb\x68\x62\x0b\xef\x36\xcd\x95\xdf\x76\x76\x1b\xf8\xad\x14\x7c\x86\xf0\xce\x1c\x32\xf9\xcb\x43\x92\xc5\x34\x06\x72\xe1\x0f\x34\x00\x6b\x4b\xdf\xc0\x21\x85\xb6\xae\xc8\x09\x69\x0a\x67\xe6\x89\xbe\xfa\x43\x92\x6b\xe0\xe0\x34\x64\xc7\x02\xd7\x5b\xaf\x4b\x5f\x65\xb9\x85\x9c\x8b\x80\xc6\x5e\x26\xbf\x18\x91\xbc\xd5\xd7\x50\x33\xeb\x21\x8f\x76\x31\x5b\x0d\xd7\x15\xbe\x00\x16\x59\xef\x8b\x33\xeb\x4f\x72\xc2\x9d\x4d\x1e\x6a\xbc\xc7\x25\x08\x7c\x91\x56\x2c\x0a\x03\xcc\x59\x3b\x02\x5e\xc6\x75\x88\x13\xd4\xb5\x1d\x24\x7c\x72\x26\x0b\x3c\xfc\x20\xc8\xf8\xb3\xe9\xe6\x3e\x85\x39\x3f\x29\x5c\x0b\x2c\x05\xad\x95\xed\xa8\xfa\x15\xd0\x54\x52\x74\x04\xe6\xfc\xd0\xb8\xab\x47\x16\x91\x10\x6c\xb7\xfc\x21\xde\x19\x9c\x7d\x12\xad\xa0\xd1\xcb\x69\x53\x5f\x36\x15\x92\x01\x66\x97\xad\x5d\x2f\x49\x88\xbe\x4c\xa1\x05\x6c\x23\x2b\xc1\xc1\xcb\x1f\x04\x3d\x19\x03\xf6\x9a\x48\x31\xbb\x27\xf1\x1b\x92\xbe\xc4\x2b\x22\x81\xc6\x84\x3f\x45\x7a\x19\x58\xf7\x2c\x15\x60\xe1\xfe\xe7\x9b\x0d\xa4\x05\x81\x64\xb7\x6e\x48\x17\x03\x45\x8a\x2f\x8a\x7c\x3a\x21\xc0\xfa\xfa\xeb\x44\x1d\xc2\xbd\x8d\x6c\x10\xae\x75\x54\xf5\x3d\x19\xd9\xdc\x81\x5f\x4a\x88\x1b\xc1\xdc\xc7\xaa\x59\x68\xf9\x3c\xf3\x86\xdb\x60\x6e\xd2\x2a\x7b\x16\x25\xc5\x2b\xbb\xfb\x1e\x7a\x4c\x84\xbf\xaf\x6b\x20\xdd\x91\x82\xe0\x46\xbf\x04\xc9\xa7\x67\x43\x04\xf4\x98\xa3\xea\xce\x40\x17\x58\x72\xc4\x48\xd3\x48\x82\xd7\x63\x37\x2d\xe6\x87\x4b\xf6\x04\x3d\x6c\x32\xb2\x20\xec\x72\x8b\x81\x14\x21\xf7\x77\x6e\x07\x7f\x1f\xe8\x92\x4d\xa1\x13\xda\xc2\x65\x7f\x2b\x8a\x0d\x56\xe5\x77\x01\x86\x1f\x02\x5f\x7f\x1c\x25\x50\xf1\xb9\xd7\x2a\x09\xca\x23\xf7\x18\xff\x7b\x97\xf2\x71\x72\x55\x0d\x7d\x9b\x05\x6d\x5f\x7b\x94\x51\x78\x69\xd2\xd0\x64\x88\x3b\xbd\xe9\xdd\x9b\xea\xaf\xef\x3f\x72\xc1\x77\x3f\x11\x0e\x5b\x03\x6a\x5d\xc5\xe5\xe9\xaa\x97\xa7\x89\x92\xad\x9c\x55\x76\xc4\xd5\x63\x26\x25\x58\x70\x85\x9c\x5a\xd3\x0d\xae\x0d\xf8\xcc\x91\x8d\x47\x83\x06\x9c\x80\xc0\x87\x86\x31\xc0\xf0\x9a\xd5\xc3\xef\xfb\x97\x70\xfb\xb3\x6e\xab\xdc\xb0\xd1\x6b\xe8\x00\x36\xe6\x9d\x9a\xfd\x77\x6d\x68\xf6\x12\x23\x6c\x0b\x63\xcc\xe0\xe2\x8d\x3c\x94\x32\xf3\xe0\xbb\xea\x94\x2c\x1a\xd8\x9e\x42\xe3\x40\xbd\xac\x85\xb1\x9c\x83\x33\x06\x2c\xfc\xa0\xb5\x78\x02\xc8\x52\x04\x75\x37\xc5\xbf\x49\xa1\x7d\x9c\xa8\x92\x21\x5f\x73\xb1\x64\xba\xf2\x4f\x92\xbe\xa5\x86\x6a\x30\x72\xd9\x9c\x76\x03\xf6\xae\x7f\xa4\xdc\x23\x00\xef\x01\xd8\x2f\xa7\xd3\x49\x5d\xab\xf8\x64\x00\xd9\x97\x33\x80\xee\x36\x26\x2e\x52\x1f\x46\x2b\x42\x4d\xe4\xe0\x15\x94\x5f\xde\x4c\xa4\x6e\xb8\x0c\x98\x5b\x57\x46\xfd\xa4\x22\x0c\xbf\xb5\x75\x40\x0a\x10\x93\x6e\x97\x4d\x33\xdb\xae\x74\xba\xd7\x48\xdd\x91\x58\x80\x21\xa0\x4c\xb4\xe1\x57\xc4\x75\x48\x4b\xcf\x8c\x9c\xa9\x28\x4f\xed\xdf\x72\xc6\x8d\x43\x8b\x20\x41\x65\x3f\x80\x07\xf6\x6b\xfc\xe8\x33\x44\x70\xd7\x10\xbe\x6e\x96\xf6\x9d\xd0\x90\xdd\x3c\x0c\x6e\xa6\xe7\x41\xa0\xf1\x67\x40\xef\x43\xd3\xf8\x29\x86\x01\x5e\x04\xfa\x8a\xa3\x70\xec\x6d\x23\x60\xf0\x36\x2e\x8b\x9e\xdc\x89\x6a\xb7\x02\x00\x41\x3f\x8c\xd7\xb9\xd9\x99\x37\x6f\xf2\x73\x69\x45\x99\x0e\x4a\x38\xdc\x6d\x97\x47\xbc\x0d\x21\x22\x5a\x55\xde\xed\x32\xce\xaf\xf1\x0e\xb2\x3d\x80\xf1\x60\x22\x09\xae\x20\xa8\x01\x50\x62\x51\x98\x02\xe9\xd8\x25\x2f\x54\x3a\x5a\x1d\x40\x34\xc6\xfa\x3f\x29\xc4\x1d\x21\x6e\x2c\x49\x64\x0b\x3a\xed\x11\xe9\x78\x0b\x3b\xf1\x2c\x00\xea\x1e\xd8\x31\x74\x54\xef\x3d\xb9\x71\x53\x41\x9e\xf9\xab\xd4\xbc\x60\xd9\x04\xa2\x3d\x66\x4e\x02\xb1\xb5\x97\x8c\xfa\x44\x59\x06\x0b\xbd\xc7\xe8\x92\x6c\xda\x67\x32\xd1\xd2\xc7\x7f\xa9\x7b\xbb\x24\x06\x5f\x88\x6b\xaa\x97\x8a\xff\x09\x43\x0b\x5f\x02\xed\xee\xe8\x71\x88\xa3\x3a\x84\xdd\x26\xe1\x2c\x32\x41\x2d\xb9\x95\x2d\x92\x46\x06\xf8\xd1\x75\x50\xa0\x1d\xf2\x4e\x04\xfd\x50\xa5\x64\x0c\x61\x2c\x1e\xff\x4c\x12\xf0\x49\xf0\x55\x14\x22\x5e\x4a\xee\x8e\xd8\x0b\xed\xe3\x8b\x94\x23\xaf\x7a\xf0\x4b\x71\xeb\xd2\xf6\x72\x63\x6a\xe0\xe8\x29\x52\x94\x02\x2a\x0f\xd0\x0e\x91\xef\x92\x08\x4b\x6b\xe1\x62\x0d\x4b\x56\xcd\x20\xa0\xf6\xa9\x67\xdc\xae\x94\xde\x71\x9b\x0f\x66\x0e\x4e\x5d\x09\x43\x28\x7d\xd8\x3b\x10\x2a\x3f\x70\xa0\x05\xb4\x22\x3b\x1f\x46\x4d\x16\xd9\xb3\x44\x10\x3e\x26\xc2\xbe\xeb\xbe\xc8\xfa\x2e\xf9\x2d\xfd\x28\x39\xfb\xa9\x92\xc4\xfb\x40\xbb\xd6\xef\xea\xa7\x66\xce\x10\x3f\x1c\x97\x7f\x8a\xf7\xfc\x1f\x96\xd4\x3f\xd9\xc7\x82\x1f\xe6\xef\x3f\x99\xb2\x33\x1f\xa7\x89\xdd\x0c\x3d\x6d\xfd\x2e\xf5\x72\x32\xfc\xbb\x10\x34\x5f\x98\xe2\x7c\x67\x80\x42\x1c\x67\x9f\xf6\x58\xfa\x33\xf0\x4e\x32\x2d\x04\x4a\xde\x11\xd6\x6f\x19\xa7\x07\xb9\x14\x91\x98\x84\x40\xd0\xd3\xf0\xbc\x08\xdd\xf6\xf2\x4c\x04\xbd\xcf\xcb\xa6\x6a\xe4\xcb\x86\x73\xd7\xe7\xcd\xef\xc5\xd0\x78\xed\x39\x60\xf4\x3e\x04\x74\xe7\x75\x06\x91\x71\x79\xcd\xca\xc7\x00\x43\x4f\xbf\x74\x1b\xec\x2c\xde\x72\xbc\xa8\xde\x41\xd0\xfd\xba\x4e\x25\x3c\xdb\x57\x0d\xf7\x5e\xb3\x7e\x06\xf4\xb2\xb8\x42\x08\x7c\x41\xce\xe7\x74\xf3\xe2\x03\x43\x9f\x55\xd4\x7d\x4d\x76\x61\x35\x1d\x76\x3b\x77\x1f\x94\x41\x7e\x39\x55\xc9\x29\xbf\x39\xc2\x7d\x79\xb6\xfa\x0b\xad\x1f\x70\xbb\xfc\x1f\x64\x31\x20\x5c\x84\xd9\xc1\x08\x0e\xad\x8f\xd6\xca\x3e\xac\x78\xf8\xe2\x83\x67\xfa\x5f\x01\x8b\x6b\x3a\xb2\x2f\x81\xfa\xc3\x6e\xc9\x82\xa5\xe9\x48\x27\x25\x95\x2c\xf8\xca\x85\xd8\x87\x59\x10\x7a\xbb\xb6\x99\x11\xaf\xbf\xcf\x45\x79\x7f\x30\x38\xe3\x77\x71\xbe\x02\xbe\xbd\xdc\x5a\xd4\x6f\x9c\x4d\x14\x04\x45\x40\x18\x66\xf9\xa1\xfc\xfd\x02\xfc\x0b\x3f\xeb\x09\xcf\x76\xe2\xe9\xc1\xec\x42\xc6\x0c\xfb\xc0\xdb\x65\xa6\xaa\xf1\x93\x22\x16\x0e\xa3\x3f\xcf\x2a\x69\xe8\xd5\x5f\xb6\x16\x4a\x37\x7a\x85\xe4\xcd\x93\x1e\xf1\x20\xe1\x73\x6d\x7f\x87\xc1\xcd\xd3\x26\xf2\x42\xd5\x43\xe7\xed\x62\x68\x0f\xcb\xbc\x09\x32\xaf\x84\x12\x3d\xf3\xe4\x09\x9f\x89\xe6\x7a\x7a\x66\x21\x33\xa2\x0a\x75\xda\x9f\x39\x80\xf0\x37\x00\xef\x45\x80\xf7\x1c\xa0\xf9\xa1\xc1\xe7\x7b\x4e\x9a\x2d\xb2\x26\x92\x65\xfc\xa7\x2b\x9b\x4f\x3a\x82\xb5\xd0\xcf\x39\x3b\x7a\x25\x7c\xe4\x2b\xc1\xd9\xf5\x18\x72\x40\x92\xea\x7e\x7a\x41\xbd\x42\x6d\x86\x10\xee\x77\xcb\x1f\x5a\xfb\x4a\x91\x99\xbb\xee\x81\x37\xaf\x19\x18\x4d\xa2\xd0\x35\x66\xb9\x25\x18\x0a\x14\x39\xd5\x02\x6e\x98\x00\x4b\xd1\x58\x23\xd6\x1b\xa2\xee\x44\x5f\xdd\x85\xed\xf7\xad\x69\x27\xae\xe7\xc7\x97\xac\x07\xb5\x26\xf6\x2e\x84\x11\xdc\xa5\xa1\x9f\x6f\x7b\xe5\x67\xae\xa3\x28\x32\xe6\xf2\xfe\x85\x9d\x03\xa1\xe8\x32\x0d\xb9\x27\x02\x9b\xef\xe9\x14\x2c\x9c\x6a\xce\xfc\x4e\x58\xf9\x29\xd7\x7d\x16\xcf\x1f\x7c\xb8\x4d\x22\xd0\x59\xa8\xf4\xfc\xef\xf6\x44\xdc\x0d\xb0\x90\x1f\x2d\x53\x98\x5b\x9a\x69\x77\x5e\x12\x5b\x78\x7b\x5f\x4f\x8a\xfc\xde\xd9\xad\x75\x6e\xb9\x97\xc1\xbb\x5e\xea\xe2\xb7\x5d\x38\x97\xe4\xe2\x3f\x0f\x89\xc5\x3b\x51\xf4\x03\x3a\xc5\xf4\x85\xbd\xdd\x23\x8a\xa7\xad\xc6\xf0\x8e\x6c\xb1\x05\x60\x99\xce\x2d\x0c\xca\x40\x5a\x2f\xed\xbc\x12\x21\x0c\x9d\xe8\x81\x6b\x88\x01\xbe\xd3\x94\xb5\xa3\x35\xe9\x3c\xaa\xf5\x0f\x5c\x03\xdf\x4c\x0b\xac\x51\xdc\x24\x8b\x79\x7d\xfa\x4f\x3e\x00\x7e\xe0\x27\x87\x69\x0a\x12\xfd\x7a\xcb\x14\xee\x7a\xd1\x3d\xdd\xa1\xed\x6b\x53\x56\xad\xa1\x3d\x8c\xcc\x3a\xfd\x74\x1c\xcf\x2e\xb1\xe9\xd5\xb5\x8c\x7d\xff\xda\xaa\x62\x16\x06\x92\xbc\xaa\x99\x00\x8c\x5d\x1a\xb1\x18\x19\x35\x79\xe9\x50\xd3\xb5\x50\x62\xd0\xd6\xac\xf1\x4b\x58\xea\xeb\x41\xb3\x0f\x94\x2c\xbe\x0b\x91\xbd\x36\xba\x19\xc3\x66\x1d\x54\x85\xe6\xae\x1f\x66\xed\xf7\x57\xe3\x01\x31\x0f\x02\xcd\xe9\x2f\xa2\xfb\x96\xf3\xf9\x0d\x75\x51\x9e\x63\xda\x8f\x76\x54\xdc\xcb\xe6\x59\xea\x38\xf3\xb3\xa1\xcc\xa7\xca\xff\x46\xe5\x69\x6f\x21\xaf\x13\x46\xfc\xf6\x8c\xf8\x79\x1e\x45\x4e\xf9\x21\xa9\x0d\x04\x2e\x71\xd9\xc7\x1c\x12\xb5\x87\xfc\xdf\xcb\x4b\x87\xa6\xf1\xdb\x55\x4a\xc0\xca\x77\x76\x64\x57\x09\x88\x7b\x41\xa5\x4f\xf0\xe8\x22\x6d\xd7\xab\xb8\x1f\x89\x4e\x74\xa7\xe2\x9f\xff\x9e\x8d\xd3\x49\x6b\xd0\x6f\xe1\x77\x37\x5a\xdc\x40\x59\xd6\x77\x5d\x09\x72\xbc\xe0\x57\xe3\x2b\x16\x10\x85\x42\x96\x78\x49\xa1\x49\x3d\x8b\x99\x79\xcd\x67\xe8\x44\xcb\xe3\x7f\x46\x5e\x32\xb8\xe4\xd0\x8d\xe6\x13\x30\x2f\x0b\xdb\x67\x4e\x56\x28\xaf\x78\x59\x2b\x24\x7d\xe4\xa9\x6e\x3a\xf4\x14\x96\xee\x10\x8f\x5e\x69\xf8\x0e\xd1\x68\x2e\x1b\x74\xda\x9d\x0f\xb9\xc3\xbc\xd1\xbf\xc7\x8d\xf6\xed\xb7\x06\xe6\x79\x9a\x01\xb7\x23\x36\x7d\x96\xea\xf5\x94\x8f\x49\xbe\x45\x16\xab\xa3\x02\x50\xbd\x41\xd7\x81\x49\x12\x06\x1d\xb8\xb4\x90\x8b\x3e\xbf\x18\x75\x24\x19\x89\x6f\xc7\x2b\xfc\x20\x7b\x0d\xc3\x07\x59\xb0\x5b\x98\x05\xf4\x9f\xec\x35\x0c\x04\x3c\x25\x1c\xc0\x43\xf5\x24\xa8\xcb\x4c\x0b\xc2\x6c\x13\xa1\x4f\x08\x3e\x1f\x6c\xad\x2f\x99\xae\x5c\xc0\x62\x86\x2c\xb4\xab\x0b\x08\x85\x9d\x57\x16\x54\x2d\x28\x4f\xed\x1b\xd2\xaf\x57\x86\xc8\x92\x70\xe0\xf9\x21\xd4\x7f\x06\xa4\xd0\x7f\x5a\x86\xd4\x04\xd7\xa5\x90\xcd\xb3\x30\xc0\xfa\x19\x70\xdc\xfc\xf1\xff\xd9\x9f\xca\x3b\x5d\xbd\xf4\x33\x80\x7a\xd7\x53\x0c\x56\xea\xc2\xcd\xa6\xf3\x1d\xdb\x4d\xa7\xe6\x50\x8e\x84\xdf\xb2\x50\xf3\x45\x8a\xd5\xa8\xe9\x3c\xbe\x20\xf6\x3f\x82\x7e\xc0\xc3\xcf\x79\xf8\xc6\x7f\x70\xf0\x4d\x84\x3a\xd0\x69\xc0\xbf\x9f\x7f\xc0\xfe\x78\x8d\x03\xe1\x3e\x85\xeb\x2d\xc2\xef\xc9\xe4\xb9\x0b\x55\x6b\x32\x89\x11\x2c\xa1\x9f\x9d\x7c\x10\x74\xa5\x7f\xbe\x09\x26\x2b\x6f\x06\x4c\xd1\x5e\x5e\x5c\xa2\xde\x3a\x10\xf0\x91\xf4\xba\x9c\xfe\x96\x46\x3d\x4a\xd6\x64\x64\x16\xc7\xa1\x3d\xad\x07\x02\xd1\x18\xeb\x34\xdc\xb9\xa8\x59\x2f\x02\x11\x46\xc4\x64\xc5\xa4\xf5\x03\xfa\x63\xc4\x7c\x0d\xd3\xfa\x0c\xc8\xfe\xfa\x4c\x6d\xfe\x06\xd0\xbd\x84\xb6\x2c\x8c\x8e\xf2\x4a\x31\x8e\xba\xd2\xd4\xc4\xad\x33\xd5\x65\x2e\xf9\xd1\x9c\xef\x1d\xaa\x72\x04\xaf\xcb\x48\xf7\xac\x47\x33\xf3\x3d\x6b\x13\x54\x3c\xeb\x76\xb2\x15\x3c\xeb\xb4\x0b\x0b\x2f\xf6\x89\x2b\x69\x46\x8d\xc4\x26\x49\x92\x52\x2e\x17\xf3\x4f\x08\x9a\x03\x3b\x8c\x2c\x99\x6c\x3d\x61\x5f\xe6\xcb\x4d\x60\x7e\x01\xff\xad\x39\x17\xc4\x09\xe8\x7b\x09\x60\x8f\xcd\x89\xef\x2c\x92\xc2\x1e\xb0\x6a\xe6\x2a\x64\xf3\xa5\x80\x1d\xfb\xce\x14\x5d\x14\xbd\xdb\xed\x30\xfc\xdf\xcd\xfa\xf7\xf2\x2f\xd3\x63\x07\x68\xfe\xe5\x30\x27\x68\x53\x48\xda\x5a\x1c\x80\xd9\x75\x0f\xb8\x4e\x52\x09\xa7\x46\xf5\x8e\xda\xef\x88\x93\x3a\xa7\x10\x28\x83\xff\x8e\x9f\x43\x0e\xc6\x2d\xb0\xf7\xe1\xd4\x76\x8a\xe3\xea\xc7\xce\x23\x6b\x52\x67\x1e\x9a\x59\x1f\xa8\x7b\x85\x4b\x24\xaa\xa9\x50\x4d\xa9\xeb\x44\xd7\x2f\xd4\x8c\xe2\x08\x30\x47\x92\xed\xc1\x29\x33\x24\x87\xe2\x53\xe8\x7a\x17\x5e\x7f\xd7\x11\x09\x54\xb3\x37\x72\x4f\x54\xa7\x3a\xed\xf1\xf0\x9f\x86\x19\xa4\x6f\xfc\x5f\x05\xba\xbf\x48\x2d\x00\xc2\xd7\xb8\xaf\x47\x95\x73\x81\x05\x69\x17\x7a\x9e\xfa\xa1\x07\x20\x92\x97\xb0\xf5\x73\xc8\xd2\x1e\x1c\xf7\x01\x62\x4f\xb4\x0b\x1f\x2b\x42\xec\xe5\x21\x7a\x40\xbe\x82\x40\x79\xe0\xef\x2f\x06\x0d\x1c\x5e\x30\x47\xbf\xa5\x89\xbd\x87\x1b\x1a\x80\xf6\x92\xc5\xb6\x80\x1b\x4a\x87\x7a\x2d\x84\x2d\x1e\x40\x71\x31\x33\x65\x61\xcf\xdc\x09\x60\xf6\x55\x22\xd5\x00\x9b\xf9\xb1\x66\x58\xa4\x0b\xf1\xed\xa3\x4d\x3f\x87\x39\x94\x6c\x2d\x92\x8c\xd8\xc1\x51\xab\x2d\xc6\x70\xfb\x00\xf6\x8b\xf2\x4a\xc0\xe9\x17\x6d\xe6\x88\xfd\x39\x42\x5d\x0b\xeb\x95\xbc\xea\xe7\x81\x96\xf1\x85\x10\xef\xc0\x5d\x50\xd0\xfa\xb1\x86\xb1\xc2\x01\x7b\x45\xfd\xea\x0c\x32\x5f\x44\x99\x28\xfb\x7b\x96\xea\x36\xab\x90\x47\x86\xc0\xc0\x92\xb8\x2a\x0c\xb1\x21\xc2\x29\x55\x6e\x8f\x42\x8e\xcb\x4d\x97\x43\x24\xd7\x49\x01\xc1\x09\xf0\xb9\xd4\x11\xd9\x77\x0b\x46\xfd\xb8\x3f\x40\xa8\x23\xa3\x02\xe4\x2f\x01\xba\x42\x81\xfb\x33\xbc\x26\xfc\x62\xe7\x90\x11\x3d\x72\x8a\xab\x94\x45\x33\x3b\xd2\x73\x15\x17\xf8\x48\x97\x60\xac\x84\xf6\xc7\x80\xf7\x80\x0d\xa5\xef\x0e\xa7\x8f\x5e\x7a\xdd\x9f\x39\x78\x05\xe4\x53\x1f\xc8\x85\xcf\x11\x0d\x69\xe1\x4f\x48\x71\xc4\x9e\x37\xad\x10\xec\x0b\x29\x2b\xc7\xf1\xbf\x6c\x0c\x7c\x72\x18\x5c\x62\x2c\x5b\x50\x53\x65\xf5\x12\xc6\x0a\x9c\x35\xb4\x8c\xac\x4d\x66\x5e\xce\xbc\xf5\x58\x14\xf4\xe6\xb7\x6d\xa3\x38\x0a\x50\x15\x5d\x3b\x1b\xdc\x40\xbc\xc8\x0f\x71\xe6\x07\xa1\xf0\xf0\x79\xe6\x33\x2e\xc7\x67\x5b\x4c\x2c\xd4\x4a\x4d\xbf\xac\x07\x25\xc2\xbd\xce\xe3\xbf\xab\x2c\x7e\x15\xec\x42\x9f\x22\xf6\x95\x25\x09\x26\x5c\xa6\x5b\x84\x82\xe8\xf5\xf6\x37\x41\xf7\xf5\xe6\x14\x67\x47\xff\x4f\xf9\xef\x72\x92\x7d\xb2\x53\x36\xc9\xfa\x58\x21\xb1\xd9\xdb\x4e\x21\xc3\x8a\xa4\x7c\x11\x01\xd9\x66\x2c\xbe\x6f\x7e\xb3\xee\x01\xe9\xee\xa0\x0f\xb8\x57\x8c\x07\xfe\x2e\xee\xc4\xce\x95\x81\x12\xdf\x94\x34\xdc\x31\xd3\xf2\x23\xa4\xca\x5b\x60\x7d\x27\x48\x76\x1c\x44\x78\xca\x95\x6f\xd7\x2e\x08\xd7\xf6\x66\xc7\xb8\x41\x20\x29\xd2\x0f\x5b\xd8\x05\x81\xc5\xc5\x8e\xdd\x3f\xfc\x4a\xce\x9f\x82\xdf\x01\xd4\xaf\xa4\x77\x6c\x45\x1e\x9d\x5b\x91\x66\xdc\x56\x6e\xb9\x18\x6d\x21\xfd\x8c\xb2\xdd\x8f\x70\xfe\xb9\x12\x73\x5c\xc4\xdb\xf5\xdc\xd9\x2a\xab\x13\xfb\xb0\x47\xf3\x6c\xf1\x54\x84\xef\x70\xd8\x7f\x6c\x0e\x60\xc3\xa6\xe3\x61\x0e\xce\xc8\xfa\x8e\x59\xbb\xf5\x2a\x19\x68\x0c\xbe\x95\x91\x8b\x70\xe6\xda\x4a\xcf\x9b\x84\x0a\x67\xd1\x32\x73\xc9\xe2\x8b\xf5\xf4\x37\x47\x6d\x90\x70\x09\xd8\xf6\xdc\xc4\xf0\x37\xb5\x56\x10\x13\x26\x25\xa0\x9f\x44\x02\x98\x72\x80\x16\xf0\x81\x69\x0a\x48\x87\xfb\xe6\x4d\x86\x5a\xa5\x48\xbb\x45\xf3\xc3\xfb\xcb\x0d\x39\xa9\xf2\x23\x3c\x3f\x97\x21\x37\x1e\x2c\xb9\xcd\x61\x7f\x44\xf8\x0f\x14\x01\xda\x72\xef\x95\xc6\x2d\x6b\xb0\x6e\x01\xcf\xc4\x5a\xe3\xc4\xe0\x45\xc5\x16\x8f\xa1\x29\xe7\x9b\xf5\x28\x73\x0b\x69\xde\x2f\x4a\x51\x6c\x3d\x34\x33\x0b\x37\x18\x73\x0a\x71\x71\x5e\x93\x1e\x6e\xf7\x5c\xe3\x30\x4c\xc5\x43\x38\x61\x02\xcf\x6f\xcd\xd6\x1c\x82\xe2\x67\xd5\x48\x5f\x9a\xe3\xb3\xe2\x22\xcc\x3e\xb4\xc2\x02\xf2\xdf\x80\xfc\x10\xfb\x4a\x8d\x09\xc0\x9c\x22\xb0\xfe\xe7\x1d\xf8\xff\x48\xc8\x6f\x7d\x97\x04\xa7\x7f\x6b\xca\x8b\x48\xa0\xf8\x00\x5c\x59\xd4\x73\xbe\xbe\x25\x2a\x0c\x50\xbc\x5c\x41\x26\xfd\x00\x6e\xfc\xdc\xe6\xaa\x21\x46\x6e\x27\x4c\x1e\x06\xea\x02\x92\x17\x40\x4c\x25\x40\x6a\x0c\xdb\x7b\xd5\xd3\x88\x65\xf3\x8a\x01\x9f\xa2\x84\x21\x9b\xa4\xc7\xff\xe8\xfe\x36\xad\x73\x1d\x69\xca\x0f\x81\xff\xcf\x9e\x3a\x68\x5b\x90\xd9\x31\xe8\x08\x8c\x8c\xc5\x92\x13\x05\x25\x60\x0a\x96\x80\xa8\x75\x5b\x5a\x41\x59\x63\xb4\xa2\xd0\xc2\x2d\xd7\x50\x7e\xb6\x70\x2a\xb7\xc4\xca\x75\x95\x93\x44\x81\xa0\x3c\x10\x80\xd6\x2d\x61\x12\x26\x16\xc4\x38\x93\xc9\x90\x4f\xa9\xa5\x0f\xdc\x1f\x52\x6b\xe3\x24\xd9\xfc\x90\xa8\xb6\x80\xf2\xa8\x24\x1a\xfc\xb5\x63\x25\x8f\x60\x1f\x07\x0e\x6e\xb6\x24\x7c\xf6\x7b\xe7\x62\x70\x31\xc5\xb5\xbe\x25\x53\xb9\xd6\x9b\x58\x6e\x60\xac\x25\xb6\x63\xed\x31\x45\xd2\xad\x51\x22\x00\x70\xb5\x26\x69\xe7\xe6\x13\x3f\x4b\x6f\xc4\x02\xd2\xef\xd7\x3a\x00\x31\xe7\x51\x14\x32\xd7\xe0\x0f\x7b\xd0\x68\x0a\xfc\x20\x58\x7e\xed\x67\x9e\x47\x58\xf1\xae\x41\x67\x02\x9d\x61\x24\x0c\x04\xa2\xeb\x5a\x2e\x5c\x0c\x6f\x2d\x73\x0c\xa9\xd7\x12\x12\xef\x70\x3c\x0b\x75\x73\xc4\xec\xf1\x3c\xf4\xdf\x5e\xb6\x7a\xf5\x3a\x2e\xdc\xeb\xd9\x89\x61\x7a\xb8\x07\x8d\x61\x91\x3a\x38\xf4\xf2\x54\xea\xb4\x11\x99\x5e\x78\x70\xa0\x5e\x5c\x75\xd8\x6d\x3e\x3c\x82\x1c\x11\xd7\x41\x20\x9a\x40\xfe\xd9\x45\xca\x23\xcc\xfb\xc6\x81\xa9\x7d\x80\x62\x39\xe9\x0d\x0e\xb2\x06\xd5\xae\xfd\x6c\x94\xf9\x04\x73\x5a\x81\xb5\x7a\xba\x46\xe2\xaa\x32\xdc\x9a\x8b\x74\xdb\x77\xe5\x68\xd6\x3c\xea\xbc\xa1\x90\x11\x01\x17\x43\x93\xc3\x82\x44\xbb\x2c\x1b\xd7\xf4\x56\x86\x65\x85\x8c\x23\x19\x20\xf6\xfa\x56\x2e\xc7\x29\xe8\x01\x16\xea\x46\x92\xeb\x46\xec\x3f\xdd\xd8\x00\xfe\xb3\xc6\x5e\x24\x06\xa0\x06\xc4\x24\x90\x5a\x0f\xd2\xd4\x3e\x62\x58\x93\xf0\xb3\x6b\x3a\x84\xd2\x05\xd6\x86\xc2\x53\xab\xeb\x3a\x05\xd1\x60\x6c\x5c\x01\xf7\xea\x8c\xd5\x43\x5c\x77\xe4\x20\xb8\x14\xc7\xa2\xe1\x23\x10\xcd\xfc\xff\x1a\xdf\xda\xa9\x61\x8c\xed\x91\x4d\xb1\x38\x13\x5b\x3d\x17\xd8\x17\x9d\xf5\xe3\x31\x07\x59\x42\x3e\x01\xf6\x28\xf1\x6a\xf4\x4c\x6d\xad\xc9\x2b\xb0\x24\x26\xc8\x77\x7f\xa9\xec\xba\x96\x2a\xf2\xd7\x52\x3b\x1f\x0b\x2c\xd5\x4e\x0c\xbb\x2f\x2e\x70\xe7\xb8\xf7\x93\xf9\xee\xc5\x9e\x3d\x92\x07\xce\xf0\x57\x73\x7d\xf8\x0f\x03\x4e\x31\x2c\x68\x38\x85\x05\x3e\xc0\x7e\x84\xf2\x91\x26\xfc\x87\x5c\x85\x86\x40\x71\x49\x78\x87\xe8\x17\x19\x52\x2d\xe5\x97\xec\xcd\xa5\x68\x84\x0c\x46\xc2\x2f\x01\x26\xcb\x5a\x95\xe5\xb4\xc8\x67\xac\xc0\x20\x2e\x53\xe3\x4c\xe0\x59\x54\x94\x83\xbf\xa0\x2c\xe6\xb2\x8a\xcc\x64\x41\x22\xef\xe0\xf3\x21\x18\x7f\x21\xd5\xcf\x23\x24\xab\xe8\x27\xb2\xb8\x10\x78\x60\xa3\xe3\x93\x54\x49\xc0\x64\xe0\xab\x8a\x88\x0b\xd1\x15\xf6\x81\x24\x8d\x39\x48\xde\x16\xf5\x49\x7a\xf2\xb3\x06\x6f\x8b\x27\xc2\x78\x1a\xe3\x74\xb7\xd9\xa5\xb5\xfb\xfb\x87\x62\xfc\xec\x47\x96\x69\x26\xfb\xc1\x75\x45\xaf\x26\x4b\x4f\x0a\x83\x1d\x8d\x9f\x65\x89\xb7\xe4\xe0\x23\x64\x1f\xd6\xa0\x9c\xea\x6d\x9e\xea\xaa\xcb\x57\xb1\x3e\x5d\x87\x66\x03\x4b\x3a\x5a\x83\xb5\xe0\xb1\xfe\xfb\x81\x0b\x0e\x46\x97\x38\x16\x3a\x0a\xb2\x1d\x56\x6e\xa0\x3a\xa3\x05\x8d\x18\x81\xb7\x51\xdb\xa4\xf8\x92\xd7\x3d\xcd\xaa\x1d\x2d\x49\x97\xfd\xb7\x72\xcc\xfd\x5b\x5d\xec\xe3\xb7\x28\x39\xe4\x81\x2f\x31\xe6\xe6\x73\x6e\x31\xf9\x0f\x74\xd6\xfb\x9d\x41\x00\xf6\x9d\x6d\x9c\x53\x49\x7e\xb0\xf3\x26\x93\xe1\x7c\xce\x5c\x12\x46\xf1\x0b\xb9\x04\x1e\xd9\x7e\x0a\x47\x71\x8c\xb8\xd9\xdf\x04\xb2\x7f\x0a\x66\x84\xeb\xe8\xbf\x22\x05\x3f\xd7\xc0\x70\xcd\xa7\xdf\x2b\xbb\x7b\x92\x1c\x9a\x35\x52\x9a\x7f\x25\x1c\x3e\xcb\xe4\x63\x16\x8f\x1f\xb8\x47\xdd\x8c\xe9\xbd\xfb\xb3\x34\x85\x4c\x00\x9c\xe0\x48\x7b\x90\xec\xb1\x05\x3e\x0f\x9a\x6c\x54\x39\x48\x51\x7f\xbf\x5b\x91\x88\x8a\x73\xd9\xa3\xc0\x80\x4d\x75\xac\x79\xd7\xde\x23\xa9\x13\x04\x80\x4e\x15\x70\x0d\x8e\x3b\xa6\x2a\x7e\xc3\x4b\xc4\x08\xaf\x74\x79\x14\xd2\xfd\xa8\x10\x85\x85\x02\x98\x6c\x8f\x24\x9d\x7d\xf0\xb8\x79\x1d\x6c\xa0\xfd\x99\x44\xfc\xc5\xa0\x7b\x96\xb6\x7d\xf9\x43\xd2\xb0\x51\xf4\x29\x42\x86\x0d\xf8\x1a\xe8\x68\x9a\xdd\x12\x92\xd1\x95\x45\x85\xe0\x55\x98\xcf\xf4\x16\xc1\xc2\x5e\x2a\x70\xeb\x44\xac\x90\xf0\x94\x5b\xcc\xc3\x29\xfa\xc1\x4f\xeb\xfd\x15\xe8\x5f\xbf\x17\x67\xfe\x03\xe7\x9f\x83\x5a\xe1\xdd\x36\xa6\x90\x9b\x3e\x12\xe2\x3f\x77\xa1\xd4\xff\x0e\x2e\xc5\x2b\xc8\x0e\x61\x3f\x84\xd7\x8f\x57\x0d\x93\xf0\x60\x5c\x0c\xa4\x55\xec\xba\x83\xb9\x31\x82\xa1\xa0\xa0\x23\xac\x0f\x31\x22\xe6\x18\x32\x58\xb8\x2c\x04\x3d\x80\x27\x41\x1e\x07\x86\xe9\xda\xa3\x63\xb7\x28\xce\x04\x6f\x31\x54\xec\xb8\xec\xa2\x24\x37\x29\x4d\x34\xb9\x77\xe9\x2c\x12\x84\xf7\x7e\x6f\x86\xe2\x40\xd8\x0f\xf6\xd1\x2c\x18\x15\xfa\xea\xbc\x0f\xf1\xe4\x30\x82\xa3\x0b\x24\x7a\xfd\x38\xeb\x04\x06\x3a\x47\x90\x93\x4f\x05\x3d\xd8\xaf\x96\x2c\x74\x29\x98\xa9\x64\x85\x2b\x0f\x6b\xf3\x91\x85\xdc\x8d\x35\xd6\x84\xa9\x41\x5a\x1f\xaa\xd5\x20\x94\xea\xd2\x94\xac\x55\x89\x7f\xc9\x6c\xa6\x2d\xcf\xef\xbf\x56\x4f\x3e\x11\xeb\x38\xa5\x07\xa1\x46\x53\x0a\x15\xa1\xb1\x02\x34\x10\x14\x8c\x7b\xd9\x95\x1f\x1d\x2b\x46\x17\xa2\x38\xd4\x5d\xa9\xf3\x11\xdd\x1a\x84\x28\x43\xd3\x1e\x52\x62\x7c\x89\xc0\xe5\x10\xa7\x63\x0d\x9b\xe2\xb1\xfa\xbc\x08\xef\x54\x5b\xf3\xe5\x30\xd8\xfc\x64\xe2\x93\x30\x86\xe2\xc8\x78\xd2\x47\x72\x74\x6c\xe2\x1d\x91\xf3\x14\x6c\x16\x1b\x34\x06\x28\x82\x34\x9e\x83\xac\x09\x06\x6e\x2d\xab\x75\xb7\x26\x08\x47\x84\x14\x09\x5c\x0b\x85\x60\x40\xf4\x78\x44\xc0\x65\x27\x2f\x85\xd4\x51\x85\xbf\xdc\x2c\xc8\x8e\xe5\x7a\x05\x7f\xc4\xe7\xdf\x23\xe0\xaf\x3b\x45\x64\x2d\xee\x3f\xef\x9e\xab\xcb\x10\xcf\xd1\x58\x56\xb2\x4a\xec\xe4\x27\x2f\x54\xc3\xab\xe0\xc5\x66\x0e\x61\xd0\x57\x40\x4d\x23\x81\x74\xc4\xc8\x49\x98\x21\x8b\xe3\x44\xf1\xf3\x89\x88\x1d\x81\x6e\xe4\x55\x9c\xa9\x90\x37\x46\x32\xcd\xe6\x0e\x36\x27\x75\xe7\x86\x19\xe8\x00\x0a\x64\x8c\x60\x8f\x29\x8d\x3f\x66\xe5\x25\xad\x1b\xaf\x42\x41\x8d\x39\xa6\x4f\x76\x94\x97\x37\xb0\x23\x44\x88\x2e\xae\xfa\xb2\xe6\xdc\x80\x1d\x42\x97\x59\x1f\x58\xa2\x18\x95\x0b\xb1\x65\xd7\xf0\x1b\xee\xb6\xce\xb5\xb3\x9c\x12\xb2\xd4\x7a\xc7\x3c\xc8\xf2\x20\x87\xe2\x81\x4d\x73\xf9\xd6\x8e\xe9\x39\x93\x42\x52\x9b\x2f\x42\x15\x2b\x6a\x04\x46\x83\x1b\x9d\x54\x46\x85\xaf\x06\x4f\x3c\xa8\x01\x23\x4a\x08\xfc\x6a\x6d\xe3\x01\x8f\x7b\x39\x38\xa4\x35\xb8\xda\x63\xa2\x93\xf2\x98\x06\x9d\x58\xea\xf8\x1f\x3b\x20\x7f\x2f\xfd\xaf\xce\xde\x7a\x65\xbd\x1f\x55\xfc\x7a\x0b\x1a\x93\xeb\x51\x7f\xe7\xe2\xaf\xeb\xa3\x8a\x6f\xf0\xa8\x21\x34\x63\x97\x6e\x97\x0b\xc2\x28\xde\x8a\x17\xc4\x3c\x4a\x3e\x92\x9c\x19\x2f\xda\x3e\x0d\x7a\x47\x48\x68\x21\xb5\xe5\xbc\x94\x1a\x7b\x80\xc4\xf4\x43\x46\x4b\x7b\x9b\x1e\x65\xd7\x76\xfe\xb4\xd9\x82\xaf\xa5\x05\x6b\x38\x25\xac\x9a\x8a\x3d\x56\x2f\xa5\x3a\xc7\x03\x59\x8e\x46\x6c\x01\x93\x74\x14\xdd\x05\x56\xd5\x49\x53\xb6\x47\x2b\x8a\x5b\xa4\xb4\xa0\x0d\x97\xa7\x59\x56\x09\x4d\x8b\x58\xe1\xac\x30\x3d\xa5\x87\xff\x98\xb3\x08\x30\x9c\x29\x3e\x66\x90\xd3\x2f\x46\xbd\xcf\xa4\x1f\x59\x3c\x9d\xbf\x84\x03\xb0\x60\x26\xfd\x22\xa5\x0a\xdd\x41\x21\x2e\x7b\xe6\xcb\xf8\xc8\x82\x44\x3d\xd2\x73\x16\xf9\xc5\x2e\x5c\x9b\xef\x3c\x92\xe8\x2b\xe7\xae\xcf\x82\x8c\x3e\xd2\xe2\x0c\x46\x0f\xe7\xd3\x86\x07\xde\x70\x3e\xa8\x82\x99\x05\x86\xf6\x50\x7e\x3a\xd9\x25\xd2\x3c\xd0\xc4\x03\x9a\x49\x6a\xeb\xed\x25\xaa\xfc\x0e\x50\x64\xa7\xc9\x9c\xb9\xf3\xd3\xce\x47\x93\xa1\xc9\x87\x6c\x15\xf6\x37\x67\x96\x50\x9a\x58\xa5\x75\x93\xbf\x66\x69\x79\xf3\x76\x21\xbb\xcc\x86\xb7\x0b\xad\xd0\x92\x75\x27\xf5\xfb\xff\x94\x5a\x20\x73\xc4\xdd\x26\xd1\x5c\x06\xd4\xd8\x75\xc4\x41\x67\xd0\x20\x04\x2a\xbf\x81\x0b\x43\xe3\x06\x67\x6b\x37\x83\x1f\xc7\xb3\x69\xdf\x34\x8a\x36\x43\xab\x00\x52\xf2\x06\xeb\x27\x5a\x86\x65\x40\x47\xc1\x9c\x92\x85\xbd\x27\x1a\x87\x1a\x5e\x07\x36\x0f\x1a\x0b\x75\xa9\x07\xa0\x86\x9c\x0c\x72\xe6\x37\xc7\x9a\xc0\xff\xc9\x64\xe1\x45\xef\x80\xd2\xa8\x29\xf6\xbc\xeb\xab\x33\x8c\x0c\x4a\x89\xad\xf7\xe4\xaa\x59\x1e\x88\x2e\xb3\x75\x6d\xea\x34\x14\x09\xc1\x0d\x12\xc4\x1f\x4a\x27\x75\xa7\x01\xfe\xd0\xdc\x03\x33\x3f\xb1\x9d\x87\x59\x33\xbb\x61\x96\xa0\x32\x44\xbd\x39\x1e\x1b\xe6\xf8\x0a\xd9\x05\xb1\x74\xe6\xc3\x0b\x34\x7e\x0e\xf3\x16\x32\x35\xc3\xbc\xca\x6f\xc2\x35\x23\x9b\x65\xf7\xf0\xb5\x85\x70\x0c\x31\xb7\xed\xc2\xf7\x21\x3f\xc5\x9f\xb9\xef\x20\xb6\x5c\xa1\x1b\x6d\xa1\x4d\x6b\x77\x71\x6f\x2a\x69\xfe\x36\x62\xd7\x1c\x77\x00\x29\xae\xb3\x9e\x91\x07\xd8\x50\xef\xfd\x11\xc7\x86\x0d\x8c\x85\xf6\xec\x8a\x25\xb3\x16\x92\x71\x56\x2f\x2d\x0e\x41\x8f\xb5\xc0\x73\x18\x43\x9e\x83\x06\x34\xcb\x89\x6f\xc8\xf4\xc5\xe1\x89\x5a\xc3\x79\x73\xe8\x3e\xe4\x47\x95\x81\x45\xee\x4a\xb0\x7d\xec\xa9\x27\x23\x2a\xa7\x48\x71\x59\x38\x84\x48\xf0\x80\xa6\xc1\x81\x5d\x2e\x82\x73\x37\xdb\x08\x7d\x2b\x49\xf3\x01\x78\x54\x3d\x70\xca\x51\x0d\x50\x5a\xd5\x97\xeb\x87\x16\xda\x16\xf1\xe6\x26\x11\x70\xa8\xb0\x82\x8a\x58\x4c\x35\xfa\x7a\xbf\xce\x99\xaa\x69\x16\xaf\x34\x0d\xef\x2b\xe7\x1d\x7d\x95\x48\x52\x4f\x0c\x3f\xcf\xc7\x3f\xa8\x09\xe9\x6b\xb4\x8d\x3d\x2a\x77\xda\xe0\x9c\xd5\x95\xba\xed\x84\xa8\x3e\x00\xdf\xdb\x06\x9b\x68\x35\x3e\x33\xc6\xd2\x9f\x9d\xfe\x4c\xb3\xce\x0e\xee\xbb\x24\xc4\x88\x10\x51\x68\xf6\xd4\x17\xaf\xcb\xf7\xf6\x6f\xb2\x9c\x23\x9e\x8d\x85\x22\x58\x90\x2d\xc5\x88\xb9\xef\xa0\xdf\x5c\x04\x66\xad\x22\xe8\xfc\x3f\x55\x5f\x96\xe0\x2c\xcf\xf4\xba\x97\x77\x67\x06\x9c\xe0\x66\x30\xbf\x81\xe4\x4b\x56\x7f\x4a\x25\x95\xf3\x9c\x9b\xb6\xa0\x99\xc2\xe0\xa1\x5c\x92\x82\xa8\x73\x44\xdc\xdd\xf0\x46\xed\x72\x43\x62\x01\x1b\x5a\x98\x5f\xe8\x0c\x1d\x1a\x45\xec\xe2\x1d\x21\x03\x9e\x6c\x24\xfb\xb8\x2e\xae\x5a\xab\xf7\x2d\xc0\xd2\xb9\x75\x06\xb8\x12\xcb\xb4\x42\x43\x71\x0f\x2d\x5f\x5a\xa5\xfa\x03\x48\xde\x11\xd0\x86\x62\x97\x73\x8c\xa9\x80\x11\xfe\x89\xb3\xba\xb2\xe3\x1a\x42\x11\xe3\x9a\xc3\x6f\xdf\x20\x12\x15\xb4\x1f\x74\x2b\xc8\xca\xb1\xb7\xcb\x4f\x19\x0c\x96\xf2\x08\x8a\x4e\xb1\xce\x86\x87\x08\x49\xcf\x11\x25\x02\x79\x4b\x71\x43\x5c\xb4\xd0\x7b\x78\x80\x5a\x67\xbd\x36\xd1\x4d\xe6\x7f\x9b\x22\xc4\x2c\xd4\xd6\xc2\xb7\xe2\x15\x9b\xa4\xce\xd4\x89\xfc\x97\x11\xc9\x09\x7c\xd8\x39\x1c\x31\x92\x24\x3b\x5c\xcc\x79\xfd\x4f\xe4\x9a\x35\xd2\xbc\x9d\x5f\x43\x03\x8b\x24\x33\x4a\xf7\x8f\x10\xe3\x66\x63\xfc\x00\x3e\x12\xf1\x1e\x24\xb8\x86\x74\x4b\x8b\xd6\xed\x25\x50\xff\x68\x8b\xa3\xe8\x0b\x1d\x99\xf2\x2d\x94\x7c\x3c\x30\x86\x28\x32\x02\xe8\x9d\xfd\x83\xf1\x26\x8f\x84\x19\x06\xad\x4b\x9b\xec\x26\x98\x49\x3c\x6a\x7e\x00\xe5\x1a\x1c\x9b\x14\x13\xc8\xc0\x64\xdb\xbc\x25\x6a\x36\x7c\xd8\x33\x19\x3e\xf8\xc9\x38\xc2\x80\xa0\x59\x23\x98\xbd\xbb\x65\x20\x2c\x25\x44\x46\x47\x7e\x20\x5f\x65\xb0\x6f\x62\x73\xeb\xf8\x24\xfd\x57\xc3\x10\x43\xa2\xc4\xdc\xf4\x29\x85\x09\x05\x77\x13\x1d\x6f\xb8\x73\xa7\xe5\xa8\xd3\x39\xd8\x70\x2e\x34\xe3\x1d\x07\x83\xa4\x51\x0f\xd5\x4a\x45\x56\xac\x25\xfc\x5f\x50\x69\x38\xb4\x03\xf5\xc6\xa9\x09\x48\x92\x95\x37\x44\x61\x4e\x05\x38\x36\x4c\x3d\x71\x9f\xb9\xce\x0b\x6a\x65\x5b\x63\xcb\x50\xa9\x1a\x64\x0c\x3a\x84\x31\xd1\xa0\xec\x5b\x0c\xc1\x69\x7a\xa1\xee\x20\xfc\x26\x7c\x45\x7d\x73\x86\x6f\x80\xeb\xc0\xa9\x9f\x6f\xf5\x41\xe7\xd8\x9c\xb3\x7c\x23\xc4\x6a\x69\x93\x3e\xa3\xa1\xfe\x63\xc4\x80\xcc\xd4\x2c\xf0\x20\x9b\xc1\x27\xf5\x72\x16\x9b\x46\xb4\x97\x3a\x77\x02\x39\x98\x34\xd1\x03\x1c\xba\x36\xe4\x50\x47\x3a\x6e\x01\xe8\x6a\x06\xc9\x11\x0c\xdd\x52\x66\x60\x78\x09\x2d\x83\x2e\x74\xd5\x78\xc6\x1a\xc0\x6f\xfe\x8b\x30\xd1\x00\x1b\x19\xcd\xef\x39\xe6\x1d\x43\x16\x06\x9a\x05\xe2\xf2\x15\xe8\x5c\x2a\x40\xa9\xe0\x39\x16\x19\x46\x2d\xb5\xdb\x52\xe8\x3a\xf4\x82\x0e\xd0\xd6\xf0\xcd\x4b\x18\x38\xa1\x59\x85\x25\x04\x61\xb7\x12\xb7\x26\x4c\xec\x92\x02\x8d\x47\xd2\x6e\x3e\xa2\x6d\xe5\x8f\xdf\x2d\xb7\xf6\xe6\x7f\xc2\xaf\x68\x70\xd3\x2d\x12\x67\x90\x4d\x97\x88\xc4\x9b\x69\x41\xb4\xe9\xd2\x49\x8e\x45\xb2\xca\x9a\x4e\x1f\x82\xdc\xb3\xe7\x09\xc9\xda\xc4\x0a\x83\x5a\x6f\x41\x31\x2a\x47\x32\xb8\x58\xd7\x68\x67\x87\x1c\xbf\x3e\x2b\x7e\x31\xe4\x7f\x28\x38\xe3\x42\xdf\x12\x68\xb5\x34\x02\xde\x5e\x28\xc4\x4e\xa4\xbe\x38\xe1\x49\x2b\xf5\xea\x3a\x3d\x67\x0e\xb4\xfe\x8c\x25\x12\xeb\x6e\x2e\x05\xd9\x0d\x1a\x42\xe1\x37\x61\x5d\x2f\x31\x78\xfa\xfb\xe4\x55\x97\x38\x3c\x83\x6c\x4f\xa0\x34\x41\x3e\x58\xff\x34\xc3\x0b\x1b\x5c\x1c\x11\x7a\xd2\x79\x83\x7f\xc6\x34\xe5\x21\xf9\x48\x75\x48\x33\x37\xb7\x5e\x70\x0e\x1a\x90\x55\x37\xbc\x12\x11\x84\x98\x0f\x98\x3e\x9c\xe6\x85\x1c\x2c\xaf\xa5\x1b\xf2\x80\xa6\x13\xfd\x5e\x39\x2e\x5b\xd7\x44\xf5\x6d\xba\x16\xb5\x63\x40\x61\xdb\x80\xc1\xaf\x88\x29\x92\xd8\x82\xb8\xac\xd3\x74\xce\x3c\x70\x2c\xef\xd5\x38\x37\x97\x65\xa5\x95\x91\x14\x88\xf9\x49\xae\x72\x7e\xa7\xac\x32\x50\x5b\x67\xe5\x21\xd9\xa0\x56\x34\x39\xe8\xf1\x50\x42\x28\xc9\x9b\x10\x6e\x0f\xe4\x15\x6d\xac\xe8\xd3\x7a\x87\xaf\xc6\x6a\x2d\x30\x6f\x45\xfa\x31\xe5\xd5\x4d\x02\xb3\xc9\xc3\x82\x60\xc0\xa2\x98\x25\x7e\x97\x62\x14\x61\x1d\x28\x3a\x0b\xa5\x27\x7c\x69\xf6\x7f\xa0\xda\x9c\x14\x75\x4b\x9a\x4e\xd7\xb0\xc3\x14\x45\x7c\xbf\x69\x64\x25\x9e\xc6\x48\xd4\x43\x80\xda\x2f\x9d\x5d\x1e\x2b\xd8\xb3\xf8\xba\x6c\x38\x2b\x86\xef\x9d\x43\x93\xf4\x5b\x98\xc9\xf5\xed\x91\xd3\x2f\x63\x34\xce\xc5\xf9\x92\x8a\xc3\xa8\xec\xd7\x33\x67\xfc\x83\xfd\xa6\xe3\x50\xed\xff\xb5\x1b\xc2\xdb\x06\xeb\x0b\x1d\x3f\xe8\x21\x9f\x9b\xfc\xa3\xcf\xc9\xef\xfb\x63\x35\xbb\x78\x3c\x4c\x24\xfb\x60\xd4\xc3\x96\xe6\x23\x49\x5c\x78\x5f\xd0\x5a\xeb\x83\xb7\xc2\x0f\x83\xdb\x8e\xff\xfd\x8f\xd3\xab\xd6\xb7\x25\xaf\xa6\xa1\x1a\x64\x1d\xf2\xb6\xc1\xc6\x38\x87\x21\x05\x3c\x2d\xc9\xc6\xa9\x2b\xeb\x37\x18\x64\x20\x98\xa9\xfc\xbc\x77\xc5\xa8\x4d\x29\x9b\xef\xee\xbc\x0b\x94\x77\x89\x65\xdb\xc2\xe2\x0e\xef\xef\xae\x27\x04\x26\xcf\x97\xe5\x92\xd7\xee\x9f\xb1\x4f\x3a\xce\xae\xf7\xe4\x5d\x36\xae\x58\x5d\x74\x9f\x70\xd5\xd4\xb1\x1b\x69\x9f\xe1\x97\x11\x9d\x35\x43\x9e\x41\xeb\x57\x5d\xf2\xf4\x21\xbd\x68\xea\x27\xb1\xfe\x59\x38\x6f\x8c\x0c\x55\xbc\x4b\xb8\x15\xc9\x51\x83\x14\x20\x28\x3e\x6a\xcb\xb9\x4c\xde\x60\xbe\x99\x55\x03\xd2\xcf\xcf\x03\x03\xb9\x3c\x3e\x55\xf3\xf6\xfa\x99\x7e\x19\xec\xa6\xbf\xc9\x4f\x14\xf3\x28\xab\xa5\x72\xe1\xea\xdc\x7f\x63\xee\x32\x72\x80\xf2\x9f\x7d\x4b\x31\x00\x1e\x1c\xf3\x15\xd6\x1b\x45\xd3\x26\x6f\x28\x36\xc6\x49\x9f\x4d\x9c\xaa\xfc\x8c\x1f\x99\x4e\x36\xc1\x6f\x6b\x9a\xbd\xf2\x44\xd2\xfd\x4c\x22\xfa\xdb\x3b\x0f\x9a\xb5\xb0\x05\x05\xaa\xdc\x54\x43\x64\x9f\x18\xdb\x1b\x4a\xef\x08\xe8\x83\x03\xc4\x78\x0e\x50\x6c\x30\x8b\xe9\xff\xe6\x68\xe1\x65\xc3\xe8\x60\x0a\xbf\x60\x4c\xe5\x44\x53\xff\x0f\xf3\xc4\x5f\x1e\xde\x69\x8e\x20\x9a\xe3\x95\xee\x6b\xb5\x31\x31\x19\x3f\x90\xc0\xf6\x00\xdf\xab\x34\x79\x74\x1c\x5d\x63\xe4\x85\xc1\x02\xc3\x88\xaf\x32\x05\x1d\xe8\x94\xb9\xc6\x49\x7e\xc2\x4b\xb1\x74\x2b\x9f\x24\x00\xc2\x77\x43\x6c\x9e\x8c\x3c\x9d\x22\xc3\x8d\x15\x7d\xdb\xe7\xcd\x4d\x9e\x6c\x93\x5f\xea\x9a\xc2\x53\x83\xc7\x4d\xfb\x8b\xe6\x9e\x5a\xa0\xf2\xc8\xcd\x8d\xa1\xdc\x3a\x31\xf9\x0d\x0b\x5e\xd3\x00\x8c\xf3\xd5\xdd\x35\x74\xdc\x3d\x4b\x8b\xfe\x95\xc2\xa2\x63\x52\x72\x97\x9c\x37\xae\x4e\x44\x12\x67\x03\x28\x09\x29\xa6\xec\x60\x17\xa0\x33\x08\xc0\xca\x03\x43\xbd\xc2\x2b\xb2\x9b\xca\x71\x38\xbe\xb3\x8f\xe4\xc8\xb1\x2a\x56\x80\xe4\x45\xb7\x58\xf8\x6c\xd1\x79\xb8\xde\x55\x95\xae\xdd\x21\xa7\x5d\xc3\x24\x43\x44\x24\x0d\x8e\xae\x76\xff\x4f\x5d\x6f\xd8\x61\xb0\x0f\x65\x5f\xe6\x8b\x7a\x93\x86\x8a\x5c\x2e\x8a\xcf\x75\xbb\x2d\xc6\x49\x60\x55\xff\xaa\x75\x62\x16\x5e\x56\xf3\xc7\xf6\xf9\x41\x8e\x90\xd4\xfc\xc1\x2c\x41\xd0\x56\x27\xb0\xc6\x2f\xd3\x26\xc2\x2d\xc8\x7b\x52\xf3\x55\x6f\x86\x50\x0c\x50\x21\x04\x2e\x1a\x35\xda\x6a\x43\x9b\xfe\x1d\x23\x0f\x43\xf6\xd9\x71\x5f\x51\x94\x2a\xb3\xe0\xe1\xab\x71\xf2\x77\xd7\x9f\x72\x9a\x61\xce\x31\x82\xa3\xeb\xc5\xc0\xf5\x50\xf1\x8a\xc1\xdd\x55\x2e\x46\x79\x2e\x68\x74\xf1\x87\x94\x4c\xb1\xaa\x6b\x0e\x12\x88\x21\xbf\xe5\x73\x63\x82\x35\xb9\x51\x31\xa7\x72\xcd\x65\x80\xdf\xbc\x37\xa8\x17\x03\x99\xbc\xfb\xf6\xcb\x69\x8d\x81\xc1\xdd\x25\xc4\xea\xff\xca\xa2\x8f\xe5\x51\x97\x9b\x35\x86\x31\x80\x3a\x45\xe4\x25\x3d\xf5\x0b\xae\xe4\xe2\x2d\x1d\x4a\x39\xbe\xfa\x04\xc3\x45\xfb\x74\x78\x68\x90\x62\x63\x5f\xbe\xa7\x76\x19\x98\xc2\xbd\xe0\xbe\x1e\x32\xc4\x60\x92\xbb\x1a\x26\x94\xe1\x99\xd1\x93\xd6\x0c\xc3\xf6\xe7\x76\xe7\xf2\xd3\xde\xbd\x1c\x9d\x1d\x5b\x28\x2f\xd1\x8f\xc2\xa1\x1f\x16\x1b\xdd\xf5\xc2\xed\xcf\x85\x9e\x59\x5e\x19\x90\x87\x23\xc8\x94\x26\x83\x7d\xc6\x18\x4e\xaf\x98\xf1\x13\x01\x09\x79\xe7\x8d\xc8\xad\xa3\xf7\x8e\x09\x72\x19\x02\xb0\x84\xc3\x62\xec\x91\x2e\x11\x8c\x5a\x9a\x49\x49\xaa\x4c\x6a\x3a\xdd\x99\xa5\x09\x29\xe5\xff\xaa\x73\xd8\x60\x84\x73\x46\xd5\xeb\x8d\xbe\xc7\xde\x2d\x42\xc4\xaa\xb9\x4a\xa7\x56\x61\xd6\xcf\x5a\xce\xa7\x2e\x2f\x3f\xac\xda\x08\x82\xd2\x95\x95\xc4\x70\xfa\x83\x0c\x9a\xd3\x75\xf3\x8a\xba\x29\xc3\x15\x53\xd5\x3e\xa7\x5a\x82\xb1\x80\x05\x1d\xd6\x3a\x61\xe2\x4b\xcd\x6b\xee\x46\x24\x69\x52\x43\x7d\x1e\x2d\x05\x6b\xec\x88\x01\x1c\x0c\x34\x4a\x18\x68\xc4\x08\xc5\x6a\x57\xca\x23\x1b\x20\x09\x07\x19\xc4\x72\xa0\x38\x42\x85\xd6\xd0\xaa\xae\xf6\x59\xef\x35\xc8\x52\x22\x4f\x1d\x24\x24\xb1\x9a\x71\xca\x94\x5f\x64\xe5\x84\x21\xb4\x71\xa5\xc8\x6f\x50\x94\xf8\xb3\x52\xa0\xc8\x4a\xc9\x2a\x9e\x36\xc4\xc8\xa2\x40\x19\x94\x0d\xc7\x56\xfb\x30\xc8\xb1\x6f\x69\x55\x26\x29\x53\xe9\xa5\x87\xbf\x40\xaf\x41\x9b\x2d\x55\x92\xc8\xb6\x0c\xa7\x53\x47\x59\x64\x24\xc8\x39\x0a\xb0\xa3\x6c\x80\xe7\x2c\xd6\xab\x18\x1d\xbc\xc4\x70\x3e\x83\x86\x53\xc8\x2f\x38\xe9\x79\x76\x86\x99\xd7\x29\x65\x31\x2b\x47\xd1\x93\x60\x9b\xde\x15\x33\xcf\x92\x7b\x35\x8c\x3c\x8f\x3e\x07\x74\x5a\xe5\x21\xde\xcc\xec\x73\x9a\x9e\x18\xe2\xbe\x80\xfc\x77\x99\xc2\x20\xa4\x3b\xd5\xcc\x1e\xb8\xd7\x17\x38\x63\xcc\xa6\x03\xdb\xd3\xe3\xeb\x83\x19\x60\x4d\x9b\x83\x31\x15\x8c\xab\xb4\xa7\x7f\xf9\x52\x2c\x83\xee\xe5\x6c\xf0\xe0\x2a\xc9\x03\xc5\xda\xd9\x85\x6c\x29\x29\x1d\xc0\xef\x57\x16\x28\x59\x09\x90\xd6\xcd\x79\x42\xf8\xd7\xe1\xfa\x8f\xfb\x87\xf8\x4f\xfd\x07\xe4\x0c\x19\x48\x1e\x3f\xbb\x96\x1a\x20\xa5\x7d\x88\xda\x37\x58\x3d\x63\x45\xa3\x2a\x92\x10\x7a\x8d\x81\xa2\x6d\xd0\x72\x50\xc8\xdc\x35\x24\xb8\x50\x6f\xfc\xa6\xd8\xe4\x0e\xfd\x52\xe0\x10\x5f\x74\x12\x95\xb4\x09\x9d\x46\xe5\x69\x9f\x3a\x5d\xab\xba\x1e\x43\xf9\x78\xc4\xce\xad\x4a\x8c\xc8\x29\x56\xdb\xf1\x20\x64\xb8\xc1\x41\xac\xf9\xc7\xf6\xa3\xe6\xbd\xfb\xcf\xf8\x52\x1c\x4d\xc9\xe0\xee\x3c\x12\x6e\x27\x6b\x56\x00\x06\xb3\x6f\xe8\x20\xe8\xf7\x94\x7e\x00\x43\x7d\x0a\x1c\x53\x11\x61\xfb\x03\x1d\xb1\x47\x6f\x33\x4f\x8f\x24\xd5\x40\x3a\x4a\x8a\x53\x93\x68\x35\x3a\x39\xc0\x51\x9f\xf3\x3e\xd3\x9b\x8f\x3b\x75\xbd\xe2\x33\xbd\xe8\xc4\x00\x4a\x96\x98\x58\xb7\xdc\x67\x52\x3b\x44\xcd\x2a\x5a\x76\x63\x69\xc2\xf4\x08\x46\xd6\x22\x12\xd5\xfe\x77\xb7\xbf\xa0\x61\xc5\xe4\x83\x61\x78\xe4\x5c\xda\x78\xbb\xa7\xc2\x4d\x36\xa6\xd3\x6a\x41\xc4\x2f\x9f\x75\x03\xed\x2a\x48\x57\x8b\x88\x6c\x69\x95\xfe\xcb\x99\x16\x6b\xb7\x78\xe5\x0b\x1b\xf4\x33\x49\xf6\xff\x14\x51\xd1\x3a\x18\xde\x8c\x80\x44\xb3\x7b\x19\xcc\x25\xcc\x4c\xbf\x05\xe4\x85\xf2\x0f\x25\x6b\x63\xbe\x1e\xfd\x51\x9c\xe4\x03\xef\x6f\xdd\x28\xc3\x24\x2a\x45\x8a\x33\x92\x1c\x1a\xe9\x56\x60\x3a\x30\x1c\x0d\xa7\x8c\x53\x86\x27\x37\x8d\xcf\xdc\x3c\xa3\x11\x3c\xc5\x71\xb2\xca\x79\x24\xbf\x6b\x13\x33\x0b\xd1\x05\x8d\xfb\xed\xfd\x0c\x6a\x56\xf0\xbd\x9e\xb4\xff\x00\x31\xeb\xff\x6e\xf1\xb2\x5a\xfe\xbf\xa0\x6d\x4d\x3a\xe3\x28\xe7\x90\x2a\x0a\x15\x98\xec\x4d\x48\xcd\x58\xf8\xe4\x37\x17\xc3\xaf\xe2\x36\x42\xa8\x47\xf3\xaf\x5c\xb2\x7e\x0c\xa9\x5c\x1b\xeb\xee\x56\xfc\xb9\xa3\x72\xf0\x9b\x51\x94\xd6\x63\x40\x73\x6d\x4d\x99\x5c\x8d\xf9\x06\x2d\x7f\xea\x26\x60\x9d\x6a\x4d\x06\xb6\x7c\xeb\x14\x5d\xfc\xbe\x85\xd2\x8b\x5b\xe4\xd2\xb8\xc4\xc7\x2b\x95\xb0\x74\x76\x40\x73\xae\xa1\xd6\x8a\x49\x16\x62\x6c\x2d\x3f\x8b\x00\x6f\x59\xd6\xdc\x7a\x73\x09\x01\xf2\xc0\xd2\xad\x2b\x4a\x4f\x12\xd8\xa8\xdd\x09\xcb\x15\x29\x37\x45\x26\xa4\x95\x73\xa6\xf5\xca\x7e\x71\xc5\x2e\xb6\xd8\x3e\x29\xdc\xd8\xd2\x56\x78\xff\x53\x24\xf0\xb7\xa4\x08\x4a\x4b\x7f\x3a\x5e\x39\x35\x6e\x31\xb8\x69\x4a\xd5\xe0\x10\x3e\x2e\x61\xec\x02\x41\x46\xe6\x44\xfd\xdf\x1d\x9e\xd6\x44\x4f\x47\x7c\xd5\x60\xd6\xe2\x1b\x1d\x94\xf2\x38\xee\x06\xc3\x7a\x86\xae\x0e\x74\xfa\x1f\x8c\x22\x19\x0e\x96\xd6\xad\xfc\xb9\xe3\xe6\xec\xb7\x7b\xbc\x90\x51\x86\xcc\x31\xfa\xbe\xb4\x30\xaa\x38\x1a\xf3\xe3\xe1\xce\xe5\x7b\x35\x26\x00\x1e\x60\xc0\x2d\x04\x7a\x1e\x07\xb4\xb4\x88\xbc\xcb\x2e\x20\xf3\x92\xda\xe4\xfe\x72\x70\xbe\xcc\x00\xa6\xaa\x1d\x6d\x69\x15\x23\x6d\xad\x6f\x6e\xbd\xea\xae\x92\x79\x76\x06\x62\xa9\x23\xaa\x9f\xe6\x9c\x34\xd2\xd5\x26\x1b\x4c\xfa\x45\x55\xb7\x6f\x82\x27\x0c\x4f\xb6\xd6\xe0\x79\xad\xca\x27\x3c\xec\xc5\x67\x8f\xf7\x28\xa1\x20\xe1\x88\xa5\x8c\x58\x22\x85\xfe\x70\x3d\x3e\x07\xe8\x71\x71\xe2\x9a\x5c\xb4\xba\x05\x5f\x6d\xe4\x54\xfa\x51\x40\xbd\xf1\xcb\x98\xd9\xfb\x75\x46\xd9\x37\x1a\x3b\x5b\x62\xe2\x0e\x1a\xf5\x60\x6e\xc1\x3c\xfc\xd2\xca\x9f\x67\x4b\x8e\x74\x23\x43\x63\x89\x88\x1d\x98\xeb\x4c\x04\x39\xb2\x24\x18\x8f\xac\x5c\x45\x78\xc8\x5c\xe1\x26\x13\xee\x3a\xb9\xfc\xaf\xca\x4c\x46\xde\x35\x09\x5d\x6b\x92\xd0\x52\xeb\x31\xb3\x23\x47\x78\xea\x48\xaa\x2b\x0d\x90\xf4\xf5\xce\xab\x68\x64\x9c\x1b\x3a\xac\x4a\xe0\x7f\xae\x59\x86\x31\x2d\x24\xdc\x0e\xe8\x94\xf0\xf0\x76\x74\xef\xfe\x1d\x89\x63\xc5\x03\x39\x40\x83\x7a\xa5\xd6\xb3\xac\x8c\x1c\x00\xb1\xd8\xd4\xa8\x1d\x98\x8b\x8f\x3b\x96\x1e\x0f\x89\x77\x1e\xc9\xbd\x3d\xb8\xf1\x14\xd7\xe8\xce\x60\x4e\xd4\x7a\x69\x8a\x96\x86\x7b\x00\x67\xdc\xcd\xd0\x63\xb5\xca\xd6\x3f\x6a\x1b\xe7\x2b\xbf\xc9\xc9\x64\x7c\xd0\xb5\x49\x13\x15\x1f\x80\xdf\xe3\xaa\x2e\x41\x0d\xcb\xcb\x2a\xa7\x24\xe4\xb1\xfa\x17\x54\x97\x7b\xe3\xab\x54\x17\x0e\xec\x2a\x87\xae\x14\x6b\x5f\x93\xde\x3d\x8d\x61\xac\x7d\x65\xe7\x0a\x26\x33\xe4\x9c\x49\x8a\xbe\x52\xea\x62\xff\xcc\x3e\xf6\xdc\x3f\x3f\x17\x98\xdb\x53\xa7\x48\x30\xbb\xa1\x82\xc6\x3b\xb8\x83\xbb\xe0\x95\xe4\x4e\x0f\x1c\x78\xd3\x90\xa0\x66\xb7\x62\x09\x12\xca\x5e\xaf\x3c\xd4\x70\xa3\x79\xf1\x0b\xdc\x35\x0a\xda\x31\xa6\xde\x65\x5a\xa3\xf1\xef\x4e\xa2\x2d\xc2\x1f\x7e\x45\x45\x33\x50\x7b\xb9\xfc\xde\xed\x2e\x13\xff\x21\x9a\x8b\x52\x3c\xf7\x12\xbb\x23\x1f\xc3\x9e\xd4\xc6\x3b\x6f\x8b\x31\x4e\x44\xab\x16\xdb\x8c\x8b\x8e\x30\x42\x90\x9b\xac\xb5\x7c\x3f\x75\x2d\x99\xcc\x35\x7c\x19\xfc\x4f\x59\xd8\x09\xdf\x11\x3f\xf3\x52\x4a\xeb\x7b\xe6\x7c\xe2\x9e\xbe\x89\x31\xb0\x1d\x34\xf2\x2a\x40\x55\x03\xbb\xe5\xce\xd1\x27\x49\x8e\xd2\x49\xbb\xd5\x3b\x07\xff\xb9\xa4\xaa\xd2\x2f\x38\x41\xf9\xd4\x89\x30\x1f\x24\x9d\xd3\x17\x7a\xbb\xa5\x1e\x6f\x4f\x9b\x07\xc2\x63\x17\x45\xd5\xe1\x7f\x64\xce\x91\x5b\x74\x43\x9b\x41\x39\x2e\xdb\x6d\xa3\xc5\x50\x68\xdb\xee\xac\x24\x61\xeb\xfe\x90\x96\x73\x87\x44\xec\xd6\x6a\xef\x62\x80\x58\x47\xda\x9a\x93\x23\xfd\xfe\x6d\x35\xf8\x63\xb5\x49\x80\x1d\x68\x97\xb9\x4f\x70\x21\xb7\x8a\xf8\x08\x6f\xb4\x61\x71\xbf\xea\x7e\x45\x29\x3e\x9b\xa3\x49\xfc\x39\xcc\xe2\x0b\x16\xfd\x97\x3f\x09\xea\x77\xf4\x0e\xa9\x72\x3f\xf1\xfa\x99\x20\x91\x5e\xba\xd5\x85\x04\xac\xda\x15\x95\xe1\xbe\x33\x88\x91\xf7\x4c\xaf\x38\xb8\x8e\x14\x9c\xac\xf2\x01\xbb\xd3\xd1\xf5\x95\xe1\x4a\x39\xe5\xd0\xd3\xb8\xa8\x21\x31\xd2\x1f\x32\xcb\xee\x65\x53\x56\xd1\x11\x83\xc4\x54\x22\xee\xbd\xe1\x6d\x94\xe1\x50\x11\xc9\x6f\xf6\xde\xbb\x95\x89\xe3\x49\x6b\xb2\xff\xb5\x38\xca\xae\x36\x29\xb8\xe8\xd1\x41\xb6\xdf\x27\x53\x7d\x41\x1a\x40\x1b\x66\xcd\xff\x35\xef\x69\x0b\x37\x56\xd6\xc8\x16\x62\x35\x9b\x38\x94\x59\xe3\x77\x03\x36\x40\xf3\x56\xd6\xda\xa6\x1e\xd7\xde\xf2\x42\x67\x21\xab\x89\xbf\xda\x31\x18\x7b\x57\x09\xb7\x9e\xff\x83\x42\x39\x89\x76\x90\x2a\x15\xcd\x0e\x36\x4e\x7c\x60\x48\x50\x10\x27\x0f\x6c\x12\xe5\x32\xb8\x61\xcf\x37\xd0\x30\x88\x85\x67\x1d\x82\x60\xd9\xc1\xc9\x9a\x07\x78\xde\x8d\xcc\x3a\x34\x05\x4d\xff\x9e\xea\x5b\x96\x3a\x4e\xf3\x2a\x41\xc4\xbb\x39\xf7\x05\x72\x9f\x0c\xe5\x9d\xdb\xb7\x04\x17\xd0\x3a\x18\x4b\x40\x3b\x85\xc8\x8c\x20\xf8\x51\xf6\xd3\x1d\x7c\x94\x05\xb2\xa5\xef\xf7\x1e\x45\x68\xfb\x7e\x6b\x87\xf7\xff\xc7\x9f\x83\xf7\x07\xd5\x62\xb7\xf4\x91\xee\xc9\x96\xfe\x37\x92\xb4\xf6\x62\xd2\xef\x96\xee\xaf\x68\x6c\x37\x02\xf4\x24\x9e\x5d\xf7\x8f\x10\xb0\xc1\x4e\x5c\x1f\x92\x41\x91\xf6\xae\xf3\x0e\xa2\xb1\x2f\x88\x7c\x27\x9b\xf1\xcd\x75\xba\x08\x66\x0f\x16\xd3\x57\x44\xcc\xcc\x14\x0a\x8d\x40\xec\x09\x10\x89\x4b\x38\x93\x76\x9b\xdc\xfd\x4e\xa4\x3f\x51\x70\x53\x7b\xd6\xee\x16\xc4\x46\x02\x8c\x40\x06\xfc\x21\x55\xac\xad\xac\x7f\x95\x48\x64\x4d\xfb\xfb\xb7\x77\x70\x76\x93\xd3\x68\x79\x16\xfa\x36\x80\x2e\xd8\x0d\x87\x3c\xb3\x77\x2f\x22\x0a\x06\x37\xd2\x5e\xc1\xec\x12\x83\x1b\x32\x29\x74\xf4\x25\x75\xb6\xa0\xee\x51\x39\x83\xfe\xc8\x5c\x13\x2b\xd7\xb9\x54\x19\x0b\xe9\x37\x4e\x7a\x23\x51\x7b\xf2\xa4\xe3\x12\x5e\x4c\xa3\x54\x76\xc1\x07\x6c\xe1\x00\xf3\xa1\x01\xe9\x0a\x65\xe2\xec\x20\xc4\x61\xdd\x31\x88\xe5\x73\xde\xc2\x85\x8a\x19\x3a\xde\x13\xb7\x6e\xce\x79\x28\x58\x2a\x86\x9d\x23\x6b\xe5\x09\x58\x55\xa0\x74\xe1\x2f\xed\xbf\x67\x11\xf7\xe6\x98\xed\xb1\xae\xa1\x4f\xe6\xad\xb0\x7e\x63\x3f\xc8\x60\x30\xef\x64\x24\x54\x42\xd3\x1e\xac\x3b\xb9\xa7\xac\x85\x1e\xa3\x78\xb5\xc9\xa9\xeb\x89\x04\x10\xf4\xe1\xaa\xac\x79\x17\xa2\x70\xc8\x81\x55\x10\x27\x9a\xd7\xfc\x1d\xc5\x64\xcc\x2f\x3b\x10\x25\x22\xf0\x4d\x78\x07\x1a\x73\x80\xf9\x26\x0a\x65\xd7\x35\xcb\x16\x6a\xcd\xdb\x21\x53\x20\x0f\xee\x40\x65\x91\x9b\x5a\x4d\xaa\x7f\x94\xfd\x1d\x66\x48\x20\x4a\xf3\x2e\xd3\xdd\x77\x95\xf6\xcb\x9a\xde\x68\x8b\xc9\x64\xd3\x5d\x4f\x9e\x67\xe2\x34\xbb\x24\x9a\xdd\xd9\xc9\x72\xe7\xfc\x16\x3b\xad\x61\x20\x99\x09\x19\xf5\x5c\x93\x68\x77\x7b\xbd\xbb\xf3\x0f\x83\x13\x78\xbd\x54\x49\x3b\xa1\x8e\x9b\x87\x70\xe2\x9a\x82\x39\xa7\xc1\x90\x73\xe3\xe4\xca\x94\x1e\x1a\x3e\x58\x6b\x73\xe7\x53\x2b\x47\x5d\xfe\x10\xa6\x41\x43\x55\xc9\x61\xf5\xf2\x4e\x27\x7a\x22\xa2\xa7\x51\x28\x97\x04\xb0\x9b\xbe\x37\x77\xa4\x24\x2c\x36\x6e\xd2\xbf\x1a\xe9\xa0\xec\x85\x2e\xad\x2a\x45\xc1\x50\x16\x31\xad\x91\xda\x60\xe5\x33\x18\x64\x8d\xb2\x7c\xce\xa6\x8b\xe3\xe4\x50\x5c\x36\xf8\xe0\x0e\xe9\xa4\x94\xe6\xc2\x19\xa9\x85\xce\xb5\x4b\xbd\x3f\x9c\x8d\x59\x34\x75\xb0\x54\xb9\x05\x21\x5a\x4e\x02\x9d\xa6\x1c\x00\x06\x4a\x4b\x2f\x9e\xeb\x1f\x3e\x44\xd6\x5f\xe5\x86\x87\x82\xaa\x4b\xdd\x79\x8c\xed\xa7\xf3\xb1\x04\x9b\x7e\xa9\x3d\x99\x66\xa9\xf9\x08\xfa\x5b\xcd\x94\xad\x05\x5f\x8f\xae\x47\xca\x22\x5b\x76\x9f\xb3\xe3\xaa\x94\x29\x6a\xb3\xac\xa4\x47\x80\xb7\x07\x09\x56\xde\x95\xb5\x6c\x71\x05\xa2\x4c\x5a\x53\xad\x0e\xe6\xb2\xa6\x03\x1a\x40\xff\x39\x3b\x2f\x7c\x86\xec\xd3\xe5\x1a\xce\x5f\x38\xeb\x8e\xbc\xb6\xa2\x10\x0c\x0c\x86\x58\xe8\xcc\x25\x38\x72\x98\x1a\xf6\x46\x00\x0d\x1b\x77\x12\x8b\xc3\x59\x76\x3c\x30\x24\xe9\xe9\x1d\x64\x8d\xf2\xcf\x52\x08\x46\x08\xfc\xdd\xc0\xe2\xb8\x00\x4f\x02\x7d\xd3\x6d\x17\x89\x2e\x6f\xba\x68\xbc\x0b\xdc\x6a\xdd\xe7\xfa\xd0\xca\x1f\x4b\x69\x89\xe4\x09\x00\xb2\xc6\x96\xc8\x67\x05\x9f\x0f\xb3\x00\x93\xe3\xbc\xf3\x38\xf2\x54\x36\xf0\xf3\x32\xca\xda\x13\xe2\x78\x7a\xad\x38\x2f\xbf\xd8\x28\x5c\x9d\xee\x25\x8c\xd7\x01\xc8\x2f\x74\x3a\xa0\x58\x96\x4c\x81\xb5\x0a\xfd\x2d\x66\xdc\x92\x0e\xd0\x8d\xe3\xbd\x01\x21\x4e\x44\xc0\x4e\x9a\x00\x29\x70\x12\xcd\x6f\x09\xba\x9f\x5f\x90\xb5\x0c\xa3\x78\x86\xe3\xd7\xdb\x28\xf9\x22\x35\x06\xb0\xfe\x94\xc5\x40\xa1\x34\xaf\xfa\xff\x40\x3a\x61\x28\x03\xee\x48\x8c\xb3\xfd\x95\x9d\x3d\xdf\x3f\x68\x54\xb8\xc9\x52\x50\x02\x7d\xd2\x37\x4c\x8e\x1a\xc9\xd8\x7f\x79\x6f\xfd\xff\x3b\x05\xa6\xff\xa8\x90\xfa\x97\x3e\xcd\x3f\x7b\x03\xa4\x00\x4a\x95\xf8\x2f\x59\xb3\x7d\x39\x60\xec\x08\xfe\x48\x99\x2b\x30\x2d\xe3\x8d\xeb\x9f\x8f\xcd\xf8\x5f\x4d\xf2\x94\xef\xbd\x39\x31\xd0\x3b\xd0\x00\xfb\xf7\xa6\x74\x4e\xb0\x49\x0b\x15\xf4\x40\x7d\x94\xa0\x6c\xf1\x19\xf2\x2c\x94\x64\xbf\x3c\xdf\x4a\x35\x46\x54\x3a\x31\x4c\x30\xf7\xb4\x41\xa0\xe1\xa6\x73\xce\x7d\x84\x4b\x11\xa6\xc4\xfc\xcd\x9a\xef\xf5\xec\x1f\xa9\xf3\xfd\x64\xec\x83\xc0\xa3\x48\x56\x06\xf9\x38\x66\xaa\x4b\xcc\x2d\x7b\x5c\x6e\xae\xc1\xa8\x9e\xeb\x9b\x86\x48\xd6\xb3\xa9\x6b\xf0\xf8\xee\x73\x0a\x13\x25\x6f\x94\xe7\x2a\x57\x5a\xfb\x76\x64\xa4\xc4\x9f\x08\x63\xba\x6e\x43\x84\x49\x9d\x88\x41\x38\x45\x90\xc9\xc8\xa4\x08\x0e\x32\x12\xb5\x1e\x76\xb8\x34\xcf\xf5\x69\x35\x1f\x0f\xf3\xec\xd7\x43\xf2\x59\x9d\x9e\x4b\x77\x55\xea\xc9\x8f\xf3\x9f\x4c\x3a\x67\x18\xf3\xf1\x20\x6e\xa5\xdc\x3d\x94\x20\x4e\x16\x5c\x41\x5e\x61\x59\x83\x85\xb8\x0e\xe4\x05\x0e\x91\x93\x32\xe7\x8f\x47\xd2\xe7\xc8\xe5\x35\x10\x64\xbc\xeb\xf6\xa2\x33\x17\x61\xca\xe9\x6c\x4a\xcc\xc9\x4e\x72\x3b\x52\xa5\x0b\x90\xc4\x57\xf4\x6b\xa1\x7e\x35\xf1\xa8\xd5\x72\x75\x98\x73\x18\x32\x5d\x61\xa4\x67\xb8\xc4\x4a\x1f\x66\xce\x98\x1c\x22\xe7\x70\xdd\x82\xc5\x69\x57\xd8\xfd\x9e\x70\x01\x6d\x09\xa2\xa1\x32\x44\x66\x06\x6f\x66\x7b\x63\x99\x54\x03\xf5\xbc\xf4\xcf\x59\xc2\x63\x65\x4e\x1f\xb6\x54\x06\x94\x1e\x39\xff\x74\x80\x90\xd9\xb3\x26\x3d\x7f\xab\x1d\x74\xab\x12\xd4\xf1\xb4\xad\x8d\x71\xc8\xcf\x6b\x74\xa5\x4a\x3a\x43\x8a\x0e\x0c\xb8\x85\x2c\x31\xdc\x21\x53\x71\x57\x30\x16\x33\x10\xf1\x9b\xd2\xd6\xc8\x1f\xdc\x82\xa9\xb8\xe5\x60\x3b\xba\xcf\x8a\x23\xab\xd6\xc9\x67\x7c\x8a\x0d\xf8\xd4\x26\x0c\x39\x5a\x29\xa3\x29\x0c\xfa\xe2\x72\x13\x63\xda\x10\xf5\xda\xbd\x2c\x2d\x28\x84\x12\xb1\x7e\xde\xcf\x67\xe6\x5c\xa9\x8d\x9f\xa0\x2e\x48\xbb\x25\x60\xab\x81\xab\x30\x7d\xe8\x9f\xf7\xc4\xa8\xc8\xf3\xee\x46\xd0\xcf\x3b\x4c\x8f\xda\x4d\x52\x9f\x63\xf1\xc8\x5a\xe9\xe4\xbd\x2c\xcf\xa7\xcc\x2c\x99\xa7\xe7\xd3\x8a\xb7\xa5\x2f\xe0\xd9\x72\x90\x05\x39\xb2\x71\x6e\xa0\x58\x88\xcd\x95\x46\x88\xc0\x35\xda\x04\x75\x78\xcc\x03\x13\xcc\x41\x21\xec\x8e\x92\xc0\x22\xa8\xc1\xf2\xfb\x43\x5e\xe0\x1d\x22\x06\x18\x57\x60\x0a\x88\x66\x54\xb5\xc5\x5d\x83\xc8\xa5\xc0\xb4\xec\x9a\xe2\x7b\xba\xd4\x33\x57\xc3\x89\x8a\x26\x4a\xee\x9a\x21\xee\x62\x45\xc4\x84\x4c\x42\x6f\x90\x9f\x35\x1e\x08\x48\x84\x97\x1d\x75\x16\x3e\x15\x61\x00\xe6\xe5\xc9\x48\xe3\x59\xe5\x82\x95\x7f\x2e\x56\xbc\xb8\x1c\xd7\x36\x65\xfe\x0a\x6e\x2f\xf1\x8f\xe7\xca\x00\x97\x95\xbf\x1f\xdf\x27\x88\x9f\xab\x7c\xb7\xec\xb9\x4e\x2c\xd5\x87\x78\x16\xdd\xbd\xd2\x2b\xac\x67\x97\x92\x7e\x96\x20\x2c\xe6\xb3\xfb\x3c\x63\xe6\x80\x57\xa2\x7b\x58\x06\x51\x85\x9e\x05\xfa\x8d\x94\x54\x72\x36\x63\xb2\xee\x16\x04\x79\xb9\x4c\xb9\xe2\x67\x37\x06\x06\x4a\x9a\x3a\x7f\x66\xc5\x8e\xec\x84\x90\x49\x22\xf2\x20\x9b\x95\xb2\xcd\x33\x94\x98\x5f\xfd\x8c\x3a\xf4\x89\x29\xc4\xec\x99\xb9\xc8\x03\x4d\x17\x0d\xba\x62\xc2\xe3\x19\x96\xf2\x4f\x70\xad\xbc\x84\x95\x20\x0f\x21\x36\xd3\x13\xb1\xdb\xff\xc8\x69\xdc\x45\x6a\x3c\xc9\x4d\xbc\xde\x9c\xb8\x7f\xa6\xab\x5b\x6d\x49\xb7\x05\xe2\x95\xe2\x34\xb6\x46\x59\xb9\x27\x2c\xd9\x58\x96\xa0\x3d\x2a\xa4\xf5\x84\x6a\x03\x5f\x32\xfb\xb0\x99\x3d\xe5\xc8\x4b\x90\xc9\x49\xd6\x4c\x13\x87\x0e\x06\x2a\xd3\x6a\x1e\xb7\x52\x30\x1e\x91\xc5\x85\x29\x0b\xd6\xd2\x8f\x7b\xb9\x49\x4b\xfc\xb8\x4b\x18\x26\x91\xc3\x02\xcb\xbd\xb9\x4a\x47\x01\xc2\x3f\xaa\x61\x74\x27\x7d\x13\x5b\x90\x25\x16\xd2\xd7\x68\xbb\xd5\x92\xa7\x71\x3f\x7a\x1e\xd8\x83\xef\xee\xa3\x8a\xca\xd8\x94\x54\x0d\xa6\xa3\x7f\x08\x0f\xa7\xae\xd0\x91\xab\xca\x52\xca\xc0\xb3\x93\x1f\x45\xc9\xac\xa1\x7c\xf3\xa8\x24\xae\xf9\xf5\xac\xf2\xfc\x7b\x90\x57\xc2\x53\xda\xe3\x0c\x50\x68\xc8\x75\xf1\x15\x70\x7c\x86\x71\x3f\x2c\xbd\xc4\x72\xdc\x95\xda\xf7\x88\x2c\x4b\xe2\x7e\x4a\x3e\xe0\x87\xdb\x2e\x28\x8b\xc6\xc9\x8c\xe1\xd8\x45\x9f\xa7\x07\xfa\xbb\xe7\xe9\xb7\x3f\xaf\xf2\x2f\x33\x40\xfe\xa2\x8d\x19\x75\x60\xf0\x40\x58\xf2\x51\xe1\x4d\xe0\xed\x4b\x1c\xb9\x3c\x5c\x32\x33\x85\x91\x17\x7f\x61\x8f\x23\x88\xd9\xc8\x5d\x0b\x18\x7d\x8a\x96\x3d\x42\xa2\xde\xc0\x93\x49\x19\x8f\x34\x8a\x06\x23\xd1\x8f\xfc\x1e\x48\x42\xc8\x2f\xb9\x7b\xbd\x34\x9b\x9e\x31\x7d\x2c\x6e\xe4\x3b\x18\x91\xa2\x3a\x52\x91\x2c\xbb\xe0\x6f\x78\x82\xdd\x57\x11\xea\x04\x45\x98\xaf\x2a\x2e\x04\x9f\x8a\xae\x4d\xd9\x7b\xc0\x99\xc9\xac\x3e\xd2\x9e\x83\x21\x39\x50\xa1\x3f\x6b\x06\x3f\xff\xf4\x09\x5c\xf5\x57\xdf\x92\x14\x80\x49\x54\x04\xe1\x57\x9e\x64\xe0\x82\xd2\x2a\xec\xb7\x1b\x26\x5d\x91\x2a\xff\xe5\xc2\x48\xd1\xf1\xa0\xb3\x83\xf6\x26\x60\x7d\x24\x19\x10\xe7\x99\x99\x52\x19\x2f\x97\x7c\xca\x9e\xe1\x2f\x66\xdf\x50\x18\x8e\x85\x20\x6d\x8e\x09\x82\xcc\xe1\x51\xfe\x71\xa3\x9d\x59\x29\xda\xdb\xbb\xc4\x93\x98\xde\x6c\x6e\x27\xd2\xd0\x4e\xc1\x24\xa1\xe9\xe9\xd6\x10\x68\x92\xb1\xca\x74\x33\x3a\x39\x59\x5b\xba\xf0\x67\x4f\xe8\x97\xd2\x72\xcc\x7e\x95\xb8\xe5\xd6\xdf\xa7\x81\x3c\x40\xf4\x16\x0d\x8b\xf4\x67\x2f\xd4\xb3\x92\x45\xf9\x26\x6b\x4f\xce\x4f\x53\xfd\x91\xf3\xea\x3d\x91\x7a\x89\x04\x7c\x17\xc4\x81\x85\x19\x8b\x33\xee\x10\xb8\x9a\xac\xb9\xa7\xda\x19\x97\x7e\xfd\x8e\x10\x4d\x5e\x02\x3d\xbd\x35\x9b\xa2\x67\x3c\xc9\x99\xab\x0e\x54\xbf\x9f\x8a\xc4\x3b\xa6\x88\x7f\x4d\x45\x29\x78\x53\x81\xf2\x27\x57\xc5\xbc\xe1\x64\xf5\x1c\x7f\x77\x59\xc5\xef\x5c\x11\x95\xd0\x6f\x2d\x41\xf5\x5c\x74\xf0\xe7\x33\x40\x06\xd1\x88\xb4\xc6\xd2\x3d\xd5\xf4\xd9\x41\x0f\x8a\x7d\x86\xa9\x88\xf8\xb0\x65\x2e\x26\xb1\xb2\x60\xb6\x46\xea\xa8\x7d\x2d\x22\x51\x5a\x7b\xa3\xa8\x97\xfd\xb6\x3e\x7f\x80\x89\x74\x9f\xc6\x10\x1d\xb2\x91\xa0\x79\x24\x37\xf5\x07\x10\x0b\x14\x2e\xd3\xba\x29\x39\x42\x65\x40\x34\x79\x84\x95\x85\xec\x95\xad\x46\xa5\x0b\xc0\x7f\xe2\x74\x66\x81\xb3\x95\xaa\x0d\xd0\xeb\xe1\x6e\x1e\x28\x12\x52\x18\x6b\xca\x7f\x56\x49\xfa\x1d\xca\x9d\xf9\x9a\x9f\xc5\xcd\x21\x85\x27\x21\x18\x2f\x8a\xd3\xf9\x90\x5b\xdb\x08\x83\x34\xf2\x50\x9d\xab\xa2\x59\x11\x30\x3d\xb3\x12\x04\x27\x5a\xdd\x4e\xe9\xfe\xfa\x84\x86\xf3\x3d\x7f\x43\x16\x79\xab\x4f\x89\x6e\x73\x36\x54\xe6\x74\xc3\x64\x37\xf7\xae\x7c\xec\x21\xcb\x36\xb9\xe7\x34\xef\x06\x3c\x47\x78\xdc\x95\xcf\x83\xb2\x16\x53\x1a\xe5\x99\x46\x97\xba\xf1\x26\xe1\x11\x53\x78\x74\xad\xe2\xcc\x50\xd0\x28\xef\xd1\x19\x0d\xce\x5c\xbb\x07\x9f\xe3\x86\x18\xdc\xc6\x72\x22\x85\xb3\xbe\xbb\x8b\x1a\xb3\xe6\x47\xd7\x02\x26\x71\x10\x73\xb9\x31\x30\x1b\x9b\xe4\xff\x47\x06\x22\x46\xf5\x57\x41\x09\xbd\xc2\xce\x0a\xf7\x45\x4d\xb2\x61\xcd\x08\x8c\xd6\x4c\x66\x71\xef\x5a\xb8\x98\x41\xac\x96\x36\x25\x63\x45\x54\xa2\x33\x3d\xaf\x8b\x9c\x54\xa8\xfa\x91\x15\x2a\xe9\x38\xb8\xb1\x05\x3a\xe4\xe2\x55\x8f\x24\xaa\x27\xdf\x4e\x2b\x27\xd1\x3b\x2f\x8a\x5f\xc1\x91\x4d\xac\xd1\xdf\x4c\x27\x30\x7b\x02\x86\x7a\x5d\x05\x4e\x68\x13\x13\xf4\xd2\x76\xd0\x33\x13\x63\xb2\xae\xa1\x37\x62\xf0\xa1\xab\x05\x67\xa7\xf1\x56\xba\x14\x15\x79\xab\xf1\xc3\xd7\x5f\x87\x19\x73\x16\x27\x4b\x0d\x85\xc7\xb5\x7c\x5d\x61\x0c\xba\x84\xec\xe0\x8f\xab\xeb\x0d\x76\x58\x3b\xbe\xcf\x20\x8d\x26\xf2\xf1\xe0\xeb\x26\x3b\x3d\x28\x4d\x86\x59\x5b\x01\xab\x33\xc6\x19\xf6\xfc\x49\xf3\x1e\x3b\xb5\x65\x2c\x1b\x88\xb4\x7e\x43\x4a\x0a\xa3\x02\x91\x48\x49\x3c\xe2\x42\x30\x5b\x67\x0c\xe9\x48\x34\x2d\x5f\x86\x81\x91\x54\xb6\x97\x91\xdb\x96\x4e\x77\xb2\x9f\xff\x92\xd3\x04\x60\xa7\x99\xca\xfe\x0f\xe3\x57\x92\x50\x23\x3b\x70\x74\xb7\x5d\x82\x8d\xc5\x5a\x9e\xf5\x0f\x30\xaf\x39\xb8\x39\x63\xfa\xc8\x35\x2d\xc8\xa3\xaf\x14\x49\x0d\x23\xbe\x34\xf9\xda\xa5\x6b\x13\x6f\x38\x79\xce\x32\xf7\x95\xea\xe6\x98\xda\xdd\xd9\xa5\x37\x74\x0b\x09\xe1\x34\x21\xf4\x23\x9c\xb6\x7b\x10\x5d\xb5\x31\xad\xc9\x50\xa5\xaf\xcb\xe8\x59\x0d\xfa\x02\x12\x74\xbf\xb5\xe5\x8f\xbb\x3a\x24\xd1\x5e\x8f\x10\x5e\x1a\xa9\x4f\xf2\x9f\x98\xa9\x41\x26\xdd\x82\x54\xed\x39\x6a\x55\x3f\x68\x2b\x3a\xd0\x96\xff\x04\x92\x72\x63\xc6\x54\x4e\xd9\xd2\xd9\x23\x3e\x0e\xfe\x7b\x18\xa0\x59\x66\x9f\xe0\xc2\xc5\xe4\x11\x08\xe7\xad\x92\x1e\x26\x57\x9e\xe1\xfe\x9f\xa2\xca\xc3\x1d\x72\x45\xfe\x02\x04\x0b\x18\x6f\xde\x99\xb5\x7a\xef\x02\x13\xe0\xa3\xfe\xbb\xf0\x24\xcb\xb4\x5c\x8d\x1e\xe0\xc3\x9d\x49\x6d\x83\x28\x48\x27\xa3\x26\x71\x51\x17\xfd\x72\x50\x51\x87\x12\x3c\xcb\x7b\x14\x67\xf4\x3e\x3b\x3b\x75\x3f\xaf\x4e\x46\x2d\x9a\x06\x76\x87\xca\x7a\x93\x3b\x5a\xeb\x26\x16\xa9\x12\xd1\x00\x26\x1e\xa7\x8e\xdd\x48\x2e\xaa\x7a\xa0\xa0\x2d\xe2\xbd\x0c\x9a\x6a\xfe\xb2\x33\x6b\xe8\x0c\xcf\xb8\x1f\x27\xd0\x30\xc5\xe6\x86\x16\x33\xab\x30\x90\xd3\x61\x32\xb7\x8f\x38\x08\x4c\xe4\xee\xe0\xbc\x42\x40\x9e\x04\x72\x60\x5d\x60\x8a\x90\xb3\xc1\x60\x29\x3a\x31\x41\x80\xd3\x14\xe2\xc6\x0e\xe2\x7d\xca\xde\x7e\xa8\xff\x0b\x8a\xeb\x9b\x41\xe1\xa1\x86\x21\x1a\xc8\xdd\x62\x44\xd6\x2e\xf0\x45\xfb\x39\x12\x61\x2f\x49\x27\x0c\xd2\x41\xb6\x92\xd9\x58\x00\xa1\xff\x09\xe9\x17\xde\x2b\xa8\x4f\x88\x6a\x6b\xd5\xd9\x47\x20\xac\xee\xd0\x35\xe1\xf3\xa8\x0a\x75\x0e\x35\xec\xf8\xea\x1a\x6e\x76\x85\x3d\x7f\x03\x6b\x5c\xe5\xbc\xeb\xe4\x73\xf8\xd9\x75\xbe\x72\xb0\xe3\x01\x82\x30\x49\xcf\x98\x61\xfd\x90\x3e\x5c\x5e\x52\x50\x03\xa1\x8e\x25\xc7\xba\x70\x8a\xbe\x44\xcb\x55\x70\x1f\x14\x58\x8d\x46\x86\x22\x1d\x78\x03\xb3\xec\x3d\xc1\x7d\x4d\xc1\xc6\xfd\x35\x52\xe4\xb8\x0a\x90\x9c\xc6\x0b\xce\x8a\x7d\x1b\x08\x2f\x38\x4d\xd0\x83\xd6\x5e\xc9\x7b\x6d\xfb\x15\x5b\x85\x81\x1b\xec\x85\xd4\xa0\x0e\x68\x8a\xdf\x04\x4f\x5a\xbf\x58\x3f\xe2\x23\xbe\xeb\x27\x9c\xed\x38\x4b\x35\x60\x16\x4e\xc7\xda\xe9\xf4\x97\xe5\xd2\x01\xfa\x6b\x8a\x04\x3a\x58\x83\x51\x90\xc2\x4d\xc2\xc8\x1c\xed\xa9\xd1\xc3\xef\x0b\xcc\x4f\x99\xaa\x19\xe2\xa9\x1f\xec\xd1\x0e\xb4\x00\x02\x83\x53\xb7\x60\x9a\xe2\x5a\x7e\x5f\x2b\xcc\x35\x4e\xf1\xbd\x81\x83\x9d\xab\xa1\xff\x90\x3e\x2b\x99\xb3\xb4\x02\x1e\xe4\xb2\x63\x65\x50\xb6\x53\xf4\xe7\x07\x6a\x04\xcb\x84\x0e\xb7\xb9\x3b\xea\x59\xe3\x1d\x37\x2b\xb1\x66\x1b\x10\x38\xd4\x85\x59\x45\x1d\x46\x79\x56\x4d\xaf\xb4\xab\x0b\xa2\x75\x6a\x32\xab\x83\x8e\x15\x59\xb0\xa8\xa3\xf8\x13\x13\x85\x74\x87\xb4\x9e\xac\xb5\x9c\x6c\xab\xb7\x03\x7e\xde\xa2\xf4\xae\x52\x35\xb0\x1a\x54\x84\xac\xc1\xdd\x3e\x89\x46\xd6\x3b\x70\x83\x61\x39\xfa\xc1\x92\x52\x96\x90\x78\x12\xb6\x6f\x9c\x56\x01\xc9\x5b\x8e\x77\x77\x37\xb9\x43\x2e\x84\xd3\x42\xaf\x51\x39\x16\x98\x62\x41\x61\x75\x76\x63\x52\x00\x1e\xe9\xca\xfe\x27\x6c\x7a\x78\x30\x1b\x13\x87\xdf\x80\x4f\x82\x0e\x91\x31\x8f\x9c\x99\x91\xf4\xdb\xb4\x7b\x1b\xe1\x4e\x78\x7a\xa3\xe0\x6b\x02\xa1\x39\xde\xfc\x84\x04\x43\xb2\x5d\x3d\x37\x47\x74\xdb\x5d\x1c\xd6\x9d\xaf\x2d\x14\x49\xe5\x9f\x37\x35\xfd\x67\x6a\x1c\xaa\x00\x65\xa9\x78\x28\x07\x0e\x71\x43\xfd\xcb\xda\xc7\x27\xad\xf4\x3e\x3e\x27\x02\xfa\x6e\x7d\x39\xc8\x2b\x55\x27\x3c\x34\xeb\xbb\x6d\x49\x77\xce\x3a\xf6\x34\xb8\x73\x0d\xc0\xda\x1e\x8e\xed\x3c\x8c\x8f\xdb\x80\xbf\xbc\xb4\x01\xd3\x04\xad\x3c\x3c\x81\x2a\x95\x5f\x7b\x95\x58\x8f\xa7\x42\x6b\xfa\x34\x33\xaa\x0c\xa2\xaf\xf7\x46\xd3\xb3\x69\x4a\x17\x6e\x57\x9a\x09\x48\xd9\x7d\x48\x82\x0b\xac\x18\x27\xba\xf4\x8a\x95\x25\xeb\x66\x7b\xda\x50\x1a\x17\xa6\xf2\xcb\x69\x3f\x0d\x13\xc6\xec\x9b\xeb\x17\xdb\x4b\xe7\x47\xe3\x2c\xd8\xb7\x36\xb5\xea\xdf\x48\x94\xfa\x76\x6d\xaa\x2f\x67\x3a\xbf\xe5\x78\xd0\xb0\x0f\xb3\x3c\xe4\x7c\x7e\x0b\xfd\xfa\x38\x34\xfb\xb2\x8f\x6c\xcf\x8e\x93\x64\xdf\x14\xdc\x5e\xa9\xe4\x7c\xed\x49\x36\xfe\x67\x24\xc1\xe6\x8b\x39\xea\xe8\xfc\xd9\x42\x84\x2d\x3f\xdf\xff\xbb\xad\xcd\xf2\xa7\xe5\x69\x50\x5f\xaa\x49\x7e\xa8\xf0\xf7\x29\x3d\x4c\xf1\x89\x7a\xed\xc3\x59\x00\xe7\x02\x53\x6a\xc6\xe0\x12\x8e\x7f\x5b\x22\x81\xe3\xa3\x58\xfa\x27\xc1\x44\x54\x3c\xe0\x0f\x93\x3f\xde\x6c\xb6\xde\x61\x77\xd7\xbe\xf9\x8c\x0b\x7a\x37\x1f\xbe\xbd\x5b\x67\xe1\xda\x10\xfa\x26\x63\x37\x06\xd3\x4e\x21\xe6\x07\x49\x36\xb1\x03\xe5\x3d\xc1\x1a\xb0\x5b\xe0\xd9\xbd\xf5\xfc\xc7\x67\x5f\xd4\x01\x56\x65\xe8\xbf\xa3\x87\xfe\x56\x8f\xe9\x5d\x82\xab\x5b\x9a\x12\xc2\xdf\x1e\xf4\xda\xb5\x83\x2d\x74\x04\x41\xaf\x22\x18\x5a\xc2\x6f\xa5\x88\xbd\xd1\xde\x64\x91\x8a\x83\x7c\x09\x16\xb2\xbe\x77\x30\x8d\xf5\x25\x02\xe6\xed\x94\x87\xe0\xba\xd4\x93\x3c\x66\xe8\x90\x2c\xdc\x76\x91\x47\x61\x90\xa8\x8b\xc8\x27\x00\x39\xd0\xef\xdd\x7c\x97\xf0\xf8\x7e\x43\xcd\x99\x74\x65\x64\x15\x74\xed\x7f\xb0\x92\x65\xc8\x0f\x56\xf2\x1e\x2b\x25\xaf\xfe\xee\xa4\x7e\x43\x13\x09\x54\x86\x12\x33\xb7\xdf\xca\xc9\x78\x5b\xeb\xd7\x6d\x0a\x7d\x4c\xef\xcf\x29\xff\x04\xf4\xc1\xf4\x6d\x0c\x6c\x3a\xe9\xd7\xcb\x6e\xf1\x98\x8b\xc6\xc7\x20\x29\x2f\x34\x1a\x05\x0d\x39\xa8\xc9\x59\xbe\x88\x9e\xe4\x26\x1a\x17\x2c\x08\xc7\xb0\x20\x9c\x28\x9a\xec\x8c\x63\xf6\xb5\xe1\x3d\x18\xef\xb9\xe1\x91\xd3\x9b\xef\xa4\x2a\xf1\x9d\x94\x65\x61\x40\xc3\xa2\x77\x9a\x74\x57\x5f\xf5\xbd\xca\x73\xb0\x6d\x9a\x6d\x79\x61\xac\x47\xa2\x70\x9d\xc9\x1d\xf6\xbd\x5e\x48\xc3\x71\xca\xaf\x98\xd8\x2f\x99\x05\xb9\x00\x85\xd7\x37\x1e\x94\xd8\xf3\xfd\xea\x0b\xae\xb5\xfc\x5b\x62\x75\x04\xb1\x36\xff\x8c\x5e\xc5\x46\xae\xce\xb9\x2d\x49\x07\x90\xe9\x60\x0b\xf5\xaa\x97\x8c\x80\x5e\x56\x73\xfb\x3d\xc0\x54\xb3\xc0\x2a\xca\x32\x59\x81\xaf\xac\xe4\x9a\x57\xd2\x06\x68\x3b\x76\xb1\x7b\xe1\x7d\x2f\xc6\x6f\x1b\x82\x69\xfc\x66\xa2\x10\xc2\x6c\xe1\x57\x68\xbb\xd4\xe0\x30\xef\x08\xbd\x68\xd3\x2d\xa5\x3d\x78\xc5\x73\x37\x30\xb4\xe7\xa4\x83\xee\xa4\xc8\xbf\xd8\x3a\x04\xed\xb8\xb6\x43\x28\x87\x2e\x0e\x59\xca\xaf\xac\x6c\xb0\x4e\x5a\x5e\xaf\xbe\x34\x57\x17\xa4\xe4\xc2\xd0\x1d\x14\x07\xa4\x5b\xe8\x10\x78\xfc\x25\x08\xd0\x49\xba\x8c\x70\x4e\x64\x1c\xd6\x17\x8a\x38\xcf\x9a\x9b\x73\x25\x75\xf5\x8b\x40\x53\xf0\xde\xf0\x2d\x61\xee\x1b\x59\x45\x2c\x33\xf3\xa8\x7c\x52\x9c\x79\x82\x12\xe7\xbf\xc3\x4c\xf7\x92\x1a\x0a\x5c\x43\xfc\x82\x2e\x7b\xe7\xb8\x06\x13\xe5\xce\xf0\xbd\xd3\x97\x6b\xc0\x66\x16\x63\xd9\x0d\x6f\x2e\x62\xf2\x8c\xbd\xb9\xd6\xaa\x3e\xe9\x7a\x49\x0e\xf3\x6a\x91\x66\x77\xd5\xaf\xca\xff\x05\x13\x19\xe4\xd1\x51\x5c\x64\x0e\xa3\xaf\x7a\x54\xf9\x29\x1b\x2c\xe4\x19\x33\x55\xdf\x4a\x38\xa9\xf8\xf5\x54\xba\x8e\x21\x07\x97\x26\x87\x55\x2c\xe7\xcc\xc4\x2e\x6b\xfe\x55\x75\x5d\x4e\xa3\xcf\xb4\x66\x74\x9a\x71\x95\x9e\xb9\x2d\x6d\x4a\xd3\xbb\x66\x48\xbe\xf9\x8f\x99\xb3\x4c\x39\xf0\xed\x16\x19\x33\x26\x89\x12\x80\x8c\xcc\x2a\x1a\xce\x8c\x02\xed\x4b\x22\xf2\x0e\xef\x19\xd2\x94\x6d\xe4\xcb\x86\xf9\xca\x59\xee\x8c\xc9\xe7\x14\xae\xae\x58\x72\x61\x2e\xa9\x90\x6c\x1c\xf7\x3d\x29\xb3\xe7\xf2\x14\x45\xef\x0f\x5f\x49\x29\xaa\x20\x2c\xeb\x61\xa4\xee\x33\x74\x61\xe0\xfd\x21\x90\xa8\xce\x15\x19\x62\x34\x67\xe4\x56\x83\x68\xa8\x5f\xfb\x66\x9f\x1f\x27\x7d\x7d\x90\x07\x4b\x6e\x33\xe2\x5b\x0b\x11\x7c\x1e\xc2\x92\x0f\x72\x0b\xe1\xb1\x98\x3e\x7c\x76\x86\x68\xf6\xf7\xb2\xd6\x94\xac\xe7\x4b\x8e\x9f\x27\x0c\xd9\xbd\xa4\x61\xe0\x4d\x53\x51\x2b\xc9\x22\x75\xd1\xf2\x08\x01\xfa\x92\x76\x23\x3d\xe2\x44\xa0\xc3\x83\x39\x27\x0c\x08\x50\x5e\x37\xa7\xcd\x0c\x1c\xf2\x3c\x14\xe9\xb7\x1b\x13\xa0\xfa\x14\x9b\xb8\xd5\xb9\xaf\x63\xa6\x23\x4d\x19\x83\xe9\x8c\xfb\xcd\x73\x5f\x6e\x8d\xce\x53\x1c\xe9\x16\x55\x99\x31\x47\x67\x36\xb3\xdc\x36\x59\x40\x6e\x72\x56\xbb\xfe\x6e\x65\x30\xda\x67\x4b\xfe\xdf\x85\x50\x34\x6f\xe3\xd5\x2d\x18\x73\x30\x6c\xfb\xfc\xaf\xa1\x43\x53\x60\x06\xb7\xa0\x49\xc7\x44\x14\x51\xec\x83\x26\x85\x9c\x53\x77\x7e\x94\xd9\xdd\x35\xda\x37\xf8\x3f\xa2\x19\xe4\x10\x51\x99\xef\x6e\x31\xee\x8d\xbd\xa8\xd0\xd6\xa2\x75\x63\x3d\x2c\x91\xd6\xa7\x86\xf3\x3c\x10\xda\xe4\x3e\x07\x19\x53\x7e\x15\x56\xe7\xdc\xec\x07\x1b\x0c\xab\xcc\x23\x7f\x44\x11\x3d\x7a\x35\x66\xd0\x86\xfd\x8c\x15\x1a\xee\x0f\x03\x9e\x47\x37\xb7\xf5\xb7\x30\x68\x96\xd6\xb5\x25\xcb\xf9\xcd\x67\x55\x6f\x25\xb8\x9d\xd5\xd5\x88\xe8\x39\x07\x57\x7c\xfe\xbb\x89\xe6\xbc\xc7\x56\x5b\xd5\x5b\x0b\x5f\x4a\xf2\xa7\x29\x2a\x74\x56\x76\x1e\xfc\xb5\xd8\xad\xd2\x3f\x05\x16\x01\x52\x5d\xf7\xda\x0d\xa9\x4f\xba\x7c\xf8\xc8\x43\xa3\x9b\x13\x69\xb2\xe4\xd8\x6f\xf4\x2a\xdc\x52\x74\x6e\xad\x9f\xf0\xa3\x78\xae\x9a\xdd\x32\x40\x93\xc4\xe8\xa1\x9d\x4b\x15\x77\x7d\xf1\x04\x90\x13\x44\xf1\x44\x00\x91\x56\x32\xa3\xdf\x7c\x01\xca\x6f\xb8\x78\x22\x2a\xc9\x52\x2f\xa4\x55\xfb\x07\x47\x6a\x67\xb1\xcb\x0a\xd2\xf4\x46\x65\xd4\x13\x49\x5f\xb1\xab\x3b\x58\x2a\x73\xc0\x96\xc8\x6d\xef\xb3\x51\x27\xdc\xe8\xfd\xda\xca\x40\x8b\x8a\xb3\x68\xd8\x08\x2a\x75\xb8\x44\xde\x9c\x7f\x30\x90\xc7\x96\x1e\xa4\x3c\xb7\x90\xc8\x3b\x95\xa2\xd4\x9d\x0f\xe7\x2a\xff\x49\x90\x57\xf8\x04\xe0\x70\x79\x0a\x68\x27\xeb\x52\xc8\x79\x32\xb7\x6e\xa1\x38\x33\xd1\xc0\xfd\x2a\xd9\x80\x38\xfc\x51\xae\xe3\x2d\xb2\xe6\x8a\x9f\xb9\x35\xb0\xe2\x1a\x7b\x53\xbb\x92\x44\xdd\xe8\xf2\x71\x4a\x75\xfc\xec\x9f\x13\x72\x25\x65\x59\x29\xc3\xb2\x33\x26\x8c\x4f\xf5\x32\x40\xb2\x16\xb3\x58\xc2\x07\x79\x7e\x26\xbf\xd4\x9c\x43\xb5\xff\xcc\x53\x79\x71\xeb\x41\x1e\x88\xd6\x2b\xf5\x22\x6e\x60\x4e\x1e\x2a\x3f\x03\x6b\xab\x34\x30\x54\x4b\xc2\xf5\xd5\x39\xcc\x6f\x51\xa3\x9c\x72\xdd\xbe\xb1\xba\xdd\xbb\x08\xcd\x30\xf0\xec\x6b\x7f\xce\x92\x52\x93\x03\x21\x9a\xb5\x2e\x6c\x28\x1f\x3f\x4b\xca\x6c\x7d\x9f\xeb\x8a\xad\xe3\xa6\x03\xfd\x93\x89\xab\x65\xa2\xed\xee\x0e\x9e\x20\x54\xff\xd8\xdf\x36\xea\x7b\xf6\xfd\xd7\x5b\x67\x5b\x23\x75\x97\xde\x96\xc1\xa6\xee\xe2\x14\xc0\x0f\xb1\xb9\x73\xbc\x29\x5c\x4c\xc1\x37\x4f\x79\xea\x67\x94\xa9\xa3\xb5\xc8\x0a\x8e\x9d\xc9\x87\x4d\x67\xba\x29\x7d\x75\xba\xcc\xe0\x4e\xb4\xeb\x35\x01\xbf\x99\xa5\xdc\x24\xe5\x92\x01\x7f\xcb\xfa\x11\x80\xf5\xbf\x38\xd3\x56\xbf\x8c\x5a\x3d\x05\x8d\x5a\xf5\x25\x44\x64\x5c\x82\x43\x09\x56\x60\x4c\x8b\x88\xbd\x96\x6e\x62\xe9\x61\x10\xe4\x1d\x4a\x50\xc1\x9a\x7f\x35\x1d\x29\x5f\x59\x46\x95\xd3\x2d\x27\xd8\xd4\xeb\x3f\xc4\xa6\xb5\x1d\xfd\x22\xef\x6b\xa6\x8e\x79\xbb\xcf\x1f\x5b\xa6\x81\xbb\x9b\xff\x23\x53\xda\x49\xb6\xf7\xcc\xfd\x0c\xe4\xa8\xfd\xdb\xfd\x70\x8e\xeb\x2d\x42\xf4\x3d\x71\xc4\xd8\x50\x8b\x0a\x30\xc8\xdd\x7e\xb3\x30\x4e\x8d\xf6\x40\x45\x83\x71\xd1\x26\x1f\xca\x8b\x24\xda\x2a\x13\x20\x04\x9d\x83\x22\x5d\xa5\x69\x61\x68\xe2\x51\xf7\xa7\x72\x25\xc0\xef\x2f\x5c\x27\x8b\xd3\x56\x99\x42\xd8\xaa\x3d\x1a\x91\x89\xab\x06\x6a\x08\x51\x8b\x46\x6d\x0f\xda\x2f\xa1\xbc\x38\x09\x0c\x8f\x61\x44\x58\xe5\xc5\x19\x43\x64\x88\x30\x92\x1c\x8d\xae\x41\xe7\xeb\xb7\x42\xea\xaf\xdb\x5d\xca\x10\x33\xcb\xf9\xb3\x64\x0e\xcf\x91\xb1\x42\x3a\xb5\xab\x1e\x64\xc1\x4d\xc7\xb5\xb7\x8b\x36\x8a\x80\xea\x2d\x23\xd1\x36\xb4\x94\xdb\xec\x6e\x3d\x2d\xbf\x82\xec\xbc\x9d\x61\x0e\x9a\xb7\x7f\xfc\x2f\x6c\x49\x03\xd2\x98\x05\x02\x79\x9a\x89\x2f\x86\x34\xcb\xd0\x60\x55\x4f\x1a\xb5\xcf\x3e\xfb\xaf\xc8\xb3\x0c\x36\x95\xb9\x63\x60\xa7\xea\x94\xa1\x22\x37\xcc\x3c\x49\x4e\x88\xfa\x50\x9a\x57\xb5\x05\xcd\x92\x59\xbf\x7e\x92\xad\xe6\x67\xb8\xc5\x7f\xbe\x67\x26\x9f\x36\x31\x49\x9b\x44\x00\xc1\xf4\xfc\x23\x38\xa4\x8a\x82\xf8\x3c\x77\xdf\x42\xab\xc2\xe1\x1d\x68\x4a\x22\x50\x6f\x24\x54\xc3\x68\x8b\x5c\xec\x22\x5b\xce\xe0\xb2\xc3\xb1\x50\x1c\xeb\x99\x7b\xdb\x57\xc1\xd3\x3f\x92\xba\xec\xcd\xae\x76\xa7\x41\x67\x16\x1b\xcb\xd0\x5f\x62\x99\x45\xaf\xfe\xf8\x17\x74\x58\x55\x24\xd6\x33\xdc\x12\xc8\xa8\xfe\x67\xb6\xe6\xa0\xb4\xf7\x71\x93\x3e\x7c\x6b\xa6\xfc\x68\x9c\xfd\x3d\x14\x08\x86\x9d\x27\x23\xd8\xb0\xf1\xd4\x44\x81\xc1\x3b\x87\xa3\x27\xa4\x75\x7d\x25\x8c\xa9\xc8\x3f\x36\x94\x84\xae\x4b\xe9\x3e\x07\x3e\x18\x99\x76\x92\x31\xab\x21\xe4\x61\x9f\xcd\x4c\x5f\xf4\x43\x93\x0d\x47\x6d\x32\x6d\x3d\xea\xc1\x77\xfc\xa8\x55\x12\x81\x68\xbf\x27\xd2\x59\x0d\x8e\x1f\xae\x5b\x18\xd9\x3a\x2a\x8f\xb9\xde\xaf\x42\x8d\x9e\x43\x21\x21\x7b\x62\x3e\xe2\x3e\x56\xe5\x1a\x1e\x45\xcc\xdf\xf2\x9b\xc2\x3d\x90\xc8\x2b\x11\x06\x78\x7b\x86\x4f\x26\x08\xd5\x62\x62\xff\xd3\x85\x77\x57\x70\x92\x8e\xe7\x42\xf1\xbe\x63\x4e\x5c\x56\xa0\xfb\x78\x50\xcd\xe8\xd0\xdb\x71\x78\x83\xa2\x5f\x19\x8d\x8b\xd3\xb0\xc7\x1f\x0b\xbb\x31\x79\xef\xd4\x52\x12\xf0\xee\xe9\x11\x71\x68\xa7\x58\xb7\x30\xdd\x0c\xa3\x20\x43\x0c\xd7\x3a\xdf\x9a\xa5\x4c\x3d\xa5\x1a\x62\xfd\xc9\x59\xdc\xee\xe4\x8f\x28\x7d\xb7\x30\xcb\x04\x31\xc1\xff\x95\xac\x7b\xd3\x86\xff\x48\x9e\xe6\x29\x52\x84\x28\xa1\xe2\x42\xa2\x75\x1b\x15\xda\x39\x92\xbf\xad\x42\x93\x88\xd5\x87\x8e\xc5\xf9\x08\x74\x59\x6d\x88\x55\x02\x23\xdf\x56\x47\xde\x27\x71\xb9\xf7\x51\xc1\xf3\x43\x73\x8f\xe0\x5d\x6b\x56\x06\x6a\x3f\x62\x7b\x62\x9c\xba\x2f\xb1\x81\xd8\xdf\x3e\x6e\xe7\xc1\xc7\x85\x93\x8d\x70\xff\xe4\x03\xac\xfa\x14\x20\x23\x11\xb5\x80\x55\x16\x41\xbc\xbe\xcb\xf6\x33\xfe\xd4\xcc\xb5\xf5\x75\x6f\xd2\xb1\x35\xc7\x5b\x4f\x37\x9f\x24\x6e\xf5\xeb\xc5\x93\x83\x12\xce\x70\x51\xa8\xb4\x36\xf2\x1f\xac\x4f\xe1\xc4\xed\x43\xea\x79\xd5\x86\x90\xce\xdb\x5e\x41\xd9\xf2\x4e\x64\x0d\x45\x3c\xeb\xa5\x3f\x74\x64\x58\xe7\xeb\x62\xd7\xfc\x67\x8f\x55\x2e\xa5\x4b\x30\xb9\x6f\x0f\x02\xd5\xd9\xab\xc1\x3a\xab\xd5\xae\x70\x0f\x21\xc3\x5c\xe9\xa1\x61\x0a\xea\x47\x0d\x62\x6f\xb5\xa7\x7f\xd2\xc3\x74\xaa\xa1\x29\x6d\xbd\x77\xbf\x49\x48\xcb\x11\x39\x1d\x81\xa5\x0a\xeb\x11\x6f\x41\xea\xd0\x3e\x22\x84\xa3\x2a\x27\x77\xfa\x23\x01\xb5\xfd\x23\xc5\x2e\x03\x5a\xa1\x11\xc3\x7e\x93\x9b\x7d\x4b\x00\x6c\xc7\x67\xed\xaf\x29\xbc\xdf\xb5\x46\x03\x21\x43\xd2\x33\xdb\x91\x8b\xd1\x1c\x88\xbd\xbe\x73\x0c\xba\x87\x40\xda\x6e\x37\x2b\x46\x65\x7b\x1d\xa9\x1f\x05\x13\x53\x68\xa0\x12\xa2\xb5\x91\x01\x6a\xf1\x2c\x0c\xbf\xe4\x2c\x41\xec\x3d\xdf\xc3\xad\x3e\x37\x9c\x4a\xad\xdd\x65\x67\x03\x0b\xcc\x46\x07\x67\x5b\x61\x0d\x7b\x15\xaf\x56\x04\xbc\x78\x56\x45\x75\x61\x56\x8a\x02\x22\x0a\xe4\x5d\x5f\x67\x50\xb7\x90\xe4\x29\x60\x1d\xee\x4c\xeb\x46\x78\x50\x25\x5a\x2b\xde\x68\x14\x48\x94\x36\x38\xa9\xa4\x37\xe3\xed\xaf\x87\x15\xe4\x36\xde\xfb\x88\x2e\xac\x43\xfb\x88\xc8\x35\x0e\x67\xc1\x4d\x71\x9f\xad\x7e\x44\x7d\x56\xb8\xd2\x3d\x4e\x49\x44\xad\x9a\x55\x33\x30\x52\xed\x7a\x83\xae\x18\x77\x67\xe2\xd3\x06\x4b\x0a\x57\x9b\x02\x22\x3f\x7b\xd7\x71\xf6\x91\xb4\x52\xe7\x5b\x3b\xdf\xb5\xae\x5f\x0a\xcb\xda\xd0\x91\xe9\x20\xe0\x4f\xf3\xe2\x99\x42\x63\x45\x09\xe2\x69\xb9\x54\x3d\x61\x54\x28\x5d\xaa\xad\x78\x6d\xc3\x4b\xb7\x2e\x0b\xe9\xc1\x88\x9a\xca\x05\x60\xb3\xee\xcc\x50\xc4\x9d\xde\x5d\xe5\x5d\x98\x94\xe9\xbd\x33\xad\xf7\xae\x40\x01\x0c\x72\x95\xd8\xe0\x3d\xc0\xbe\x21\x0a\xcb\xd8\x16\x46\x7d\x59\x3b\xae\xeb\x9b\xf4\x62\xb7\x71\xfa\xf9\xa7\x32\x0a\xb1\x95\xa5\xdd\xbc\xfe\x39\xad\xbc\xbc\x18\xfe\xb9\x81\xaa\x77\xec\xb6\xec\xf1\x29\xab\x40\x82\x24\x9e\x4f\xd0\xa1\xfd\x27\x20\xc5\xd5\xc6\x10\x3f\x2e\x36\x0b\x8d\x31\x36\x88\x67\x68\x95\x9b\x51\xd2\x92\x16\x86\x85\x2c\x27\xeb\x33\x53\x32\x7c\x0b\x69\x4c\x37\x63\x25\x79\x39\xaf\x61\xcd\xee\xba\x2e\x64\xcb\x6b\x54\x8a\x72\x67\xbe\xf5\x96\x17\xca\x57\x6f\xb9\xd0\x05\x6a\xcb\xfe\x33\xc4\x17\x17\xf9\x19\xa1\x7d\xf1\x75\xc7\xb7\xb2\xac\xb6\x11\x31\x07\xd1\x79\x47\x06\x41\xb7\x71\xa7\x74\x0d\x40\x12\x5d\x7a\xf3\x9e\x85\xc3\x25\x27\x72\xaf\x67\xe4\xb2\xf9\xe9\xc6\xe7\x5d\xa4\xbb\x61\x38\x46\xdd\xb0\x6a\x6d\x99\xea\x19\xc0\xb3\x08\xc1\x80\x59\x71\x37\xb7\x73\x0d\xc3\x4b\x10\xbb\xdb\x95\xc2\xab\xf5\xe6\xa7\x47\x62\x77\x13\x6c\x41\xf6\x6e\x9b\xfb\x77\x92\xe1\xad\x0d\x57\xe9\x31\xb8\x7b\x2b\x72\x3b\xb4\x71\x6a\xfd\x04\xe0\x8d\x38\x4a\x61\xef\x9b\xbe\x5f\x59\xb2\x7e\x45\xb2\xfe\xca\xc6\x6c\xb3\xc1\x95\x5c\x84\x35\x7d\xbd\x91\x9e\x0a\xe6\x36\x3d\x74\x9d\x90\xdd\xb9\xd9\x20\x66\x10\xbb\xe9\x92\x60\xf0\xb0\x8b\x76\xc1\x5c\x6e\x15\x0a\xbb\x5e\x83\xe9\x19\x3c\xed\x2f\xe7\xb2\x0d\xc9\x06\x0b\xdc\x6d\xdd\x96\xd4\xce\xaf\x9a\x44\xe4\x04\xc5\x86\x9b\x8e\xdd\x38\xb7\x00\xef\x57\x65\x05\xbb\xf3\xab\x14\x10\x92\xd2\xaf\x00\xc2\x23\x76\xac\x9b\x40\x89\xf7\x13\x82\xcc\xc1\x2d\x6f\xb2\x0d\xda\x90\xb7\xa4\x7d\xf6\x6f\xa7\x76\x5f\x62\xbb\x63\xe6\x47\xfc\x70\x7c\x36\x64\x67\xef\xa9\xca\xe2\x76\xdd\x9e\xc1\x19\x77\x49\xbf\x2c\x58\x62\x9d\xc4\x03\xd2\xda\x6f\xf8\xd2\x5f\xe0\x30\xb2\x74\x76\xf8\x8f\x90\x5f\xc0\x44\x0f\x8a\x7d\x61\x52\x2e\x18\xe1\xbc\xda\x67\xbd\x3f\xfc\x1a\x23\x0d\x6e\x4b\x39\x18\xee\xd3\x47\x8f\x71\xba\xb3\x2e\x3b\x58\xe9\x93\x06\x59\x08\x27\xc6\x69\xc5\xcf\x73\xd3\x59\xd5\x98\x49\x26\xce\x69\x7c\x62\x14\xf9\x1f\x28\xe5\xcc\x6e\x01\x29\x6c\x25\x48\xec\x3c\xad\xb7\xba\x81\xeb\xad\xb7\x7e\xbd\x17\xef\xf9\xaf\xf7\x94\xbb\x41\x6c\xf1\x36\x7a\xd5\x34\xc3\x5a\xdf\x93\xdc\x5a\xd5\x5b\x70\x0a\x7a\xf7\x60\xad\x67\x10\xd0\x79\x97\xc8\x44\x17\xa1\xbb\xb6\xd8\x57\x23\xfd\xb5\x1e\x1a\xc5\x81\xa7\x1e\xc7\xab\x9b\x80\x8d\x58\x35\xc9\x0e\x8c\x6c\x3b\x8f\x48\xd9\xc2\x48\x5e\x77\x0d\x16\xfb\x53\x6f\xd5\x5a\x21\x39\xb9\xca\x9e\x08\x3d\xf0\x1f\x55\xbe\xbc\x2a\x19\xe6\x65\xd7\x04\x1b\xbe\xd0\x5f\x10\x0c\x29\x93\x2b\xc7\x0c\x74\x83\x55\xd6\x19\xc8\x1a\x4f\xf9\xc5\x8a\xf1\x8d\xd9\x3b\x85\x85\x1d\x77\xd5\x0a\xab\xc1\x4f\x11\xcd\xc9\xa1\xcf\x3e\xd1\x24\x12\x7c\xe6\x48\x6f\xcd\x07\xd3\xf3\x60\x15\xab\x61\xe5\x2a\xaa\x14\xfc\x9e\xb8\xa3\xc2\x5f\x2b\xd3\xe3\x89\x30\x52\xe3\x56\x33\x26\x98\x04\xf9\xa4\x72\xbe\xe4\x3f\x1b\x64\xba\xd5\xc6\xd2\xe1\x40\x3b\xbd\x99\x91\xb1\xe6\x7f\x1c\x67\xf3\xb8\xc6\x85\xe8\xc2\x82\x9c\x9f\x93\x52\x4a\xd7\xe4\x32\x4d\x8e\x3e\x24\xad\x7b\x64\x67\x4d\xff\x13\x5f\x3d\x74\x45\x41\x38\x6a\xca\xf6\x59\xd3\xab\x72\x6f\xc4\x32\xef\x40\x13\xff\x77\xd5\x9b\x71\x12\x83\xe1\x64\x7b\xcd\x3a\x9c\xf2\xcb\x41\xaa\x8f\x0e\x33\x32\x87\xd8\xc1\x72\xcf\x5a\xa6\x86\xae\x29\xdc\x84\xad\xe7\x5f\xf8\x4f\x1b\x7d\x6d\xfc\xdf\xfe\x2d\x6b\x0d\xae\xbc\x75\x66\x68\xb0\x8b\xd9\xbf\xa7\xfc\x72\xf7\x89\x8a\x3e\xce\x28\xf5\x26\x62\x4d\x5b\xb7\xa6\x5d\x75\xba\x95\x67\x58\xe2\x73\xb0\x4f\xfd\xaa\xdc\xc4\xe3\x8c\x6b\x9a\xc5\xfc\x77\xfa\x1a\x51\x6e\xd1\x3e\x82\xa7\xcf\x7f\x3f\xec\x16\xeb\x37\x87\xc8\xc0\xd0\xc4\x29\x86\xd8\x66\xa7\x1f\xdf\xba\x25\x60\xe7\x2b\xf8\xb3\xdc\xeb\x3f\xbe\xb3\xf7\x9f\xf7\xcb\x97\x3b\xcf\x3b\xb9\xe0\xf7\x60\xaf\x08\x57\xb6\xef\x4f\xed\x18\x14\x7e\xa7\xaa\x37\x56\x77\x8b\x0d\x6d\xcb\x52\x05\x69\xde\x8a\x99\x30\x9e\xa3\x85\x27\xe4\x02\xf7\x3e\xa7\x86\xb4\x58\x62\x79\x86\x51\x6e\xb0\xb6\x1b\x19\x1a\xc2\x22\xa2\x37\x66\x0d\x59\x19\xdc\xe9\x96\x90\x5a\x4e\xab\xc1\xa5\xb2\x87\xb9\x54\x65\xa0\x2d\x55\x8a\x83\x4b\x3d\x10\x4e\x72\xb4\x33\xbd\x60\x89\xdc\x17\x03\x64\xe3\xcb\x60\x70\xa9\x73\x90\xff\x31\x18\xe5\x71\xf2\x5e\x9e\x3c\xb4\xa8\xfb\x48\xd3\x0d\x96\xff\x40\x92\x38\xc4\xa0\x74\x55\xbb\x98\xbe\x06\xb2\x42\xb7\xe4\xf4\x93\xc8\x5f\x69\xfb\xe0\x3f\x7d\x75\x5d\x3d\xd1\xf5\xd5\x42\xb9\xcf\x2e\x5b\xd5\xa5\x9c\x29\x88\xf1\x45\xd4\xfb\xc6\x7e\xdb\xa2\x18\x7a\x6f\x34\xe0\xa7\x9b\xc3\x73\x37\x22\x22\x0b\x3b\x0a\x20\xf3\x6b\x76\x7a\x91\x2c\xc4\x52\x86\x3a\x69\x78\x87\xb1\xe5\x0b\x55\xa2\xdf\xa3\xfc\x61\x26\xf9\x12\x4e\xc0\xe2\x5e\x2e\xb2\x27\x5f\x5c\xb6\xff\x95\xfc\x68\x48\x7a\xf0\x41\x3c\x10\x0f\x9c\xb7\xe3\x52\xa9\x15\x99\x44\x74\xeb\x46\x49\xa1\x6e\xc1\x00\x3d\xb7\xa0\xc5\x7f\x45\x80\xff\xfe\x98\xf4\xef\x04\x43\x61\xae\x0e\xef\xe5\x74\xde\xfc\xe7\xa9\x24\x0e\xa0\x1c\x2c\xfc\xdf\xe1\xfa\x40\xca\xa0\x6c\xb1\x16\x37\xe8\x74\xb0\x6b\xe2\x77\x61\xe6\x01\xf8\xf7\xdc\x6d\x8d\xb0\xce\xa2\x6f\x14\x4c\xfc\x9d\xff\x9b\x29\xc7\x90\x66\x4d\xc9\x2d\x31\xa9\x0b\x07\x5e\x8a\xf3\xfd\xdd\x08\x00\x9c\x8e\x48\x3a\xf9\xbb\x33\xd5\x24\xff\x6e\xf9\x8c\xff\x31\xfa\xf6\x57\x31\x77\xe3\x51\x9e\xbf\xba\x2b\xad\xd2\x10\x69\xf2\x55\x9c\xfa\x3a\x04\x33\xff\x49\xd3\x42\xf8\xf3\xfe\xa5\xd1\x2a\x29\xff\x7f\x7a\xff\xf2\x9c\xfe\x78\x03\x1c\xed\x32\x3d\xff\x4b\x92\x45\xfa\x43\xb8\xa2\x93\xf0\x77\x15\xb1\x3c\x2e\xbf\x83\xc4\xbc\xde\x5f\x1a\x06\x06\x1e\x0b\x63\x3c\xe5\xa5\x10\x42\xe4\xcf\xd8\xa0\x44\x12\x98\x05\xd6\xdd\x6a\x92\x0d\xcb\xc6\xd7\xde\xb7\xfa\xa4\xd5\x2f\xdf\xb7\xb2\x64\x99\xa3\xfc\x63\x4d\x39\x7f\x82\x5c\xff\x3f\xb5\x9b\x90\xb1\xa5\x35\xeb\x2d\x42\x74\x58\x17\xfa\x0c\x14\xd7\x74\xb2\xd4\x7c\x6f\xdd\x4b\x61\x16\x3b\x19\x2a\x6e\x85\xa4\x38\xef\xae\x8b\xf4\xff\x64\x2b\x30\xdf\x53\xc8\x04\x0c\xbc\x03\x30\xd0\xf3\x68\xce\x8c\x48\x8a\xc2\xdd\x33\xc6\x99\x24\xcf\x9f\x1c\xdb\x22\x6b\x56\xde\xbf\x9d\x99\x6d\x97\x4c\xf2\x7a\xdd\xd5\x92\xfd\x04\x6a\xc1\xe6\xd7\xa1\xd6\xa0\xf7\x5b\x9f\x44\xde\x35\xa4\xf4\xf7\x09\x57\x5f\xfc\xb1\xfa\x3b\x39\xbd\xce\xfd\x54\xf6\x52\x28\xf1\x63\xae\x53\x9c\x6c\x92\xaf\xaf\x0e\x5a\x1a\x7f\x48\xd9\xbf\xb1\x5f\xd9\x51\xab\xf0\x85\x98\x7f\x16\xbf\xd2\x75\xb0\x8e\xc6\x97\x85\x94\x3d\x66\xeb\x72\x84\xe5\xef\x33\x51\x2b\xcc\x9a\x0a\x72\x52\xe9\x02\x0c\x90\x43\xbb\x79\xce\xdd\xda\x57\x16\x97\x48\x11\x98\x97\x70\xc2\x06\xc3\x3f\x33\xda\xa7\x39\x20\xc1\x27\xf9\xe8\xd9\x2e\xef\x79\x8b\xe1\x2f\xc6\xc7\x1c\x9a\x3b\x60\xec\x77\xd1\x63\x2c\x14\xf1\xf8\xa7\x90\x1c\x90\x69\x37\x0c\x82\xe3\x7c\xe5\x11\x3a\x02\x88\xa8\x72\xb3\xa7\x78\xf0\xd6\x81\xd3\xc3\xcf\x43\x6c\xd5\xd9\xb8\x06\xcf\xb5\x90\x4a\xff\x0d\xf0\xe1\x84\xbe\x01\x0e\x0b\xe7\xf4\x66\xa5\x38\xa3\xd3\xb1\x25\x91\xe8\x5f\xfc\x85\x9e\xe2\xea\xe0\xa4\xfb\x38\x38\xff\x7b\x18\x3d\x83\xe1\xb9\x12\x5c\x7b\x37\x13\x8e\x55\xf4\xf7\x33\x70\x94\x10\x00\x58\x9a\xe6\x50\xe6\xf0\x8d\x81\x16\x80\x56\xa0\x7f\xc5\xe9\x17\xab\x93\xbe\x59\x9e\xc1\x3b\x75\x2f\x52\x37\x66\x9c\x9d\x52\x41\xb0\xb9\x11\xf3\x56\xa8\xd3\x90\xd8\x0a\x23\x0b\x9d\x49\x87\x73\x8c\x4e\x30\x11\xb7\xc6\x55\x14\x2a\x0e\x3c\x9f\xec\x4c\xc2\x47\x98\xdb\xe8\x42\xa6\x22\x86\xb2\xc1\xc4\x6d\xc7\x25\x3e\xe6\x34\xe4\xec\xd6\xb4\xf7\x87\x81\xee\xa7\x0f\x0c\x9c\x95\x7c\xef\x9a\xac\x7b\x92\xda\x10\x56\xc2\x61\x11\x0e\x38\xa8\x81\x32\x2c\xdf\xd9\x1b\xf6\x84\x8d\xda\x03\x4a\x2f\x7c\xde\xb4\x74\xbd\x47\xd1\xaa\x6f\x5c\x94\x23\xeb\xb3\x0c\x3f\xf6\x3e\xb2\x48\x9d\xed\xee\x12\xc4\x91\x64\x6b\x4b\x9b\xbf\x8c\xae\x3c\x10\x8a\xf2\xcf\x7f\xf5\x87\x3d\x68\xa8\x2f\xca\x30\xf7\xd2\x4c\x14\x80\x5c\x4f\x0d\x3e\x1e\x85\x56\xe4\x2e\x59\xa0\x5e\xfe\x33\x26\x82\x20\x56\xa0\x89\x2c\xef\xd3\xe9\x26\xbb\xea\x80\x22\x20\x54\x20\x88\xa4\x64\x5b\x12\xf7\x5f\xa6\x6a\x36\xf4\x92\xd7\x6e\xbd\x99\xe4\xf3\x8c\x38\xd8\x93\xb9\xec\x88\xe3\xc5\x71\x6b\x8b\x59\x16\xc8\x0d\x44\x34\x08\x98\x32\x2e\x40\x6b\x5f\x07\x51\x8b\x90\x26\x98\x03\xe2\xd5\x92\x0a\xc1\xfe\x13\x3a\x58\xa7\x9f\xc0\x23\x96\x68\xd9\xf8\xc4\x14\x5f\x08\x0c\xb4\x55\xda\x0d\xd4\xf9\xe6\xd1\xb2\x3a\x57\xcf\x3a\x48\x99\x60\xc8\x6a\x1f\xec\x1b\xf5\xa6\xfc\xb9\x4a\x68\xcc\x4d\x91\x79\xaf\x3c\xbf\x9f\x1b\x75\x1f\xa2\x67\xb9\x4f\x89\x03\xb4\xae\x30\x70\xb0\xc7\xf1\x8c\x81\x19\xb4\x06\x74\xbf\x8a\xdd\xc9\x4a\x4a\x3e\x6a\x70\x09\x16\x64\x4d\xc0\x3c\x73\x37\xab\x7a\xa2\x72\x12\xf3\x1f\x63\x49\x5d\x4d\x3e\x74\x12\xb6\xce\x56\xe8\x66\x64\xaa\xb7\x3c\xff\xa1\x28\x43\x32\x40\x0a\x01\xc3\x20\x6f\xe3\x8f\x76\x4f\xca\xaa\x7c\x26\x37\x9b\xfd\x8f\x32\x00\x65\xe7\x84\xd4\x13\xb3\x14\xf1\xc2\xe2\x57\x17\xca\x4f\x43\x18\xa0\xb2\x94\x6d\xf1\x2e\xff\x6c\xf1\xbf\x9f\x69\x3d\xf4\x9f\x75\x95\x51\xf2\x5a\xbe\x3a\xf9\xdf\xc4\xd2\x2d\x19\x9e\xf1\x8a\x27\x7b\x5b\x9f\xe1\x6d\x2c\x39\x80\xc6\xbc\x65\x98\x1d\x2b\x6f\xf2\xd1\x6c\x4c\xcc\x80\xfd\xa3\x75\x0b\x22\x97\x07\x38\x05\xda\xc4\x9b\xe4\x8c\x51\x89\x51\x3c\x42\xbe\xd9\x69\xff\x21\x8e\x0a\x5b\x63\x16\x2d\x77\x80\xfb\x82\x0c\x51\x66\x87\x3c\x38\xb9\xf3\x08\x0d\x95\x87\x3c\x0a\xad\x64\xde\xc5\xa3\x87\x49\x9d\x99\xbf\x77\x0a\x95\x2d\xe6\x87\x12\x96\x1e\x65\x75\x2e\x98\x95\xe4\x90\x3c\x4a\xee\xcc\xfe\xde\x82\x80\xaf\x9f\x3f\x8d\x3c\x7e\xeb\x67\x48\x6c\x06\x9e\xc3\x39\x2c\x87\xf5\x5f\xf9\xdb\x3e\xa4\xbd\xff\xc8\x4a\xb3\x7d\x64\xa7\xbe\x96\x80\xbc\x12\xcc\x4b\x4b\x3c\x21\xf8\x21\x8f\x74\x9f\xbb\x9a\xf8\x87\xb7\x14\xbe\x3b\x69\x6c\x8f\x20\x71\x3c\x3c\x76\xb5\xca\xd4\xd8\xee\xa0\xd2\xab\x0c\xb3\xf7\x84\x89\x53\x49\x00\x40\xbf\xd0\xdf\x9a\x47\x82\x67\x2b\x37\x1b\xfc\x3d\xc9\xff\x53\x48\x3a\xbf\xc5\x5b\x27\x51\x10\x0d\x77\x66\x0d\x65\x3d\x78\xe6\xaf\x79\x2c\xb7\x8f\x18\xdc\x04\x59\x29\x53\xf9\x47\xfe\x4f\x3f\x8f\x60\xe5\x20\xe6\xf3\x60\xdd\x01\x11\x29\xe8\x88\xd3\x74\xc0\x6e\xe3\x97\xcc\xfa\x83\xb9\x3d\x56\xb2\xc2\xce\xfb\xf5\x2e\x9c\xa8\xc9\x91\x45\x49\xb3\x63\x07\xe8\x06\x28\x32\x9c\x43\xd2\x1d\x09\x36\xe2\xeb\xcb\x43\x59\x4a\xe7\xa8\x23\x82\xa4\xdf\xed\xa7\x34\xd1\x9f\x9f\x32\x73\x44\x48\x38\x54\x08\x28\x8c\x92\x7f\xd3\x41\x18\x33\xaf\x3f\x31\x02\x4e\x75\x66\xe8\x20\x00\x58\xdd\x4f\x3b\xdd\x0f\x87\xd6\x13\xf9\x96\x93\x24\xd4\xa7\x5b\x92\xce\x53\xa4\x45\x4d\x90\xa4\xe5\x26\xbf\x6e\xdd\x74\x53\x3f\x60\x52\xc2\xe2\xc4\x91\xea\x04\xd7\x96\xdd\xc1\x43\x2d\xe5\x74\xf7\x33\xde\x63\xcb\x87\xfe\x3d\xd0\x7b\xda\x01\xcb\xa2\xd2\x3b\x1c\x93\xa7\x67\x38\x28\xd4\xcd\x9c\x6c\xac\xad\x9e\xfd\x54\xff\x77\x25\x4a\x83\xc3\x63\x99\x47\xac\xef\xdd\x9b\x24\xb0\xfa\x33\x09\xfc\x72\xb4\x9d\x22\x48\x36\x49\x22\x76\xfa\x51\xfc\xcf\x1c\x5c\xff\x21\x6e\x00\x1a\x06\x96\x14\x23\x9b\x6a\xb7\xca\x00\x7c\x32\xa8\x32\xd5\x59\xfa\x02\x21\xef\x04\xa4\x4e\xd8\x54\x07\x9d\x79\x08\x4f\xe3\x42\xf7\x83\xa9\x58\xed\xbe\x8a\xf0\xff\x8f\xf3\x70\x89\x29\xbf\xa9\x28\xb3\x65\x0a\x37\x24\xfb\x70\x93\xf7\xf6\xc0\xca\xff\xa8\xe4\x23\xca\x2f\xcd\xf2\x4e\xfe\xf6\x3b\xb8\x38\x58\x9a\xf2\x09\x9f\x2b\xb6\xad\x13\xec\x1b\xb9\xeb\xc9\x18\xa9\x2b\xe1\xf3\x5f\x47\x9e\x44\xc6\x0f\x52\xc4\xe4\xa4\x2c\xde\xa2\xee\x26\x0d\xc0\x3d\xb7\xd2\xfe\x8a\x8c\x94\x31\x17\x32\x87\x7a\x00\x22\xe9\xd4\x79\x77\x9c\xc5\xd5\x7f\x2a\x5a\x69\xf0\x11\xfa\xbd\xc8\xa9\xbe\x5d\x70\xf8\xd4\xd2\x16\xd4\xff\x34\xf2\x1a\xca\xa5\x06\xca\xe0\x29\xff\xec\x0c\x79\x22\xb2\xe5\x01\xd3\xe3\x3f\xd2\xf9\xa5\xb3\x90\x51\x27\x1e\x87\x8e\x39\x62\xce\x58\x3f\x68\x68\x37\x5f\x13\xb7\x73\x2e\x44\x3a\x63\xd8\x21\x19\xf2\xc6\x71\x82\x24\xac\x0f\x97\x27\x68\xdd\x90\x93\x2f\x1f\x98\xc9\x4d\x10\x9d\x64\x6c\x70\xf8\xb8\xe5\x8a\x2f\x40\xd2\x87\x9b\x1e\x8d\xf3\x60\x53\xda\xc5\xff\x4f\x33\x8f\xbb\x7a\x92\x16\x21\x58\x7e\x92\x4f\x48\x52\x30\xb0\x6e\x26\xc5\x1b\x74\x0b\xd2\x78\xd3\x18\x63\xb2\xda\x83\x7d\xa2\xf1\xfb\x71\x1a\xfe\x7d\xf5\x26\x79\x8c\x61\xe4\x78\x23\xc7\x20\xb8\xc7\xf7\x1e\x12\x67\xa3\x0d\x25\xf9\x7f\x0c\xd3\x04\x30\x9d\xcf\x7f\xca\x10\x74\xbc\xa7\xe0\xf2\x73\x28\x3f\xde\xe3\x2d\x1b\xe7\x5b\x86\xcf\x71\xa0\x6e\x3e\xe1\xb6\xd1\xe4\x54\x3b\xff\x56\x84\xfb\x96\xd5\x21\x01\x0a\xd7\x68\x79\xed\x36\x69\x0b\x8f\x4d\xd4\x46\x03\x72\x9b\xb6\xa1\xb9\x9f\x1f\x0e\xd6\xab\xe8\xe0\x91\x0b\x30\x56\x75\x33\x20\x20\xe0\x93\x7e\x5a\xf8\xe9\x4a\xc1\x49\xfa\x9a\x83\xe9\x7f\x83\xb4\xe0\x8c\x79\xab\x07\xc8\xe0\xae\x4c\x4e\xb7\x72\xbf\xe5\x06\xdd\x96\xa6\x7d\x99\x40\x39\x22\xbd\x9e\x34\xf5\xce\xdc\xaf\xfb\x29\x23\xe2\xba\x33\x59\x1e\x20\xfe\xb7\x07\xa5\x0a\x6e\xd3\x3f\x63\x6e\xcf\xc5\xe7\xea\x6d\xa5\xba\x80\xb5\x0c\x8b\x74\x06\xc8\x3d\x0f\xf9\x6b\xd0\x01\xc3\x14\x7b\x55\x56\x31\xa6\x55\xdd\x58\xc4\x71\x89\xdf\x1e\xf4\x77\x64\xfa\xf2\x5e\x0d\x72\x9d\xe6\x04\xdb\xb8\xba\xd7\xb6\x23\xbc\x20\xfb\xc1\xd9\x44\x57\x1d\x08\xf7\xe9\x7d\x72\x5b\x10\x2e\xf4\x1c\x5a\xe8\x29\xeb\x18\x39\x74\xb1\x5d\x97\x40\x2d\x3b\xc4\x08\xb8\x61\xe9\xec\xfa\x02\x0b\x01\xa2\xca\x86\xc0\xbe\xce\xed\xf8\x09\x0b\x54\x4f\xac\xf7\xa3\xce\x50\xd5\xf7\xa9\x38\x64\x70\x92\x49\x3f\x7b\x6e\x3b\x1a\x58\x4a\x02\x58\xa5\x13\x8f\x66\xce\xbf\x8c\x71\x0a\x0b\xd0\xe7\xdc\x07\xdb\x94\x28\xc8\xeb\xcf\xc6\x1a\x3a\xd4\xd4\xc9\xf0\x81\x66\x12\xaa\x2a\x9f\xd2\x87\xc0\xa0\x31\x8e\x49\x3e\x22\x7f\xe7\x0c\xe3\x19\xdf\xc2\xfa\x4d\xb7\x7f\xa3\xd6\xcd\xa0\x42\x80\x35\xfa\xe1\x34\x6d\x9f\xac\x7a\x2d\x23\x34\x6e\xa5\x26\x70\x53\x85\x22\x75\x09\x83\xee\x56\x0d\xe1\xe6\x1f\x8a\x7f\xb7\x16\xff\x6f\x61\x94\x9e\xf4\x62\x25\xcc\x84\x32\x90\x0b\xf1\x81\x4f\xe8\x15\xac\x59\xff\x9e\x58\x7f\x8f\xe9\x60\x4e\xdf\x18\x96\x5f\x06\xa0\xc2\xec\x68\x3f\x29\x94\x32\xd2\x30\x30\x04\x08\xf4\x9e\x1a\xaa\x67\x5c\x01\x86\x13\x1e\xdd\x76\xf8\x97\x02\xc9\x21\x71\x44\xc6\xb6\x56\x62\x7a\x51\x48\x76\xde\xbf\xef\x30\xd1\x9f\x65\x4c\x2e\xf9\xe2\xa3\x65\xc7\x72\xf3\x1e\xd8\xeb\x1a\x6e\x7a\xd3\x0f\x1a\xe0\x58\x19\x83\xcf\x01\x5c\x6c\x78\xec\x3a\x6e\x5d\x91\x76\xf0\xbc\x9f\x8f\x50\xa8\x0b\x90\x50\x69\xe5\x42\xcd\x94\x81\xc6\x24\x5a\x29\x01\x3b\xf8\x67\xcb\x31\xf9\x2e\x98\x8c\x70\xe4\x0f\x73\x97\x20\xae\x2d\x3e\xb9\x53\x16\x53\x67\xb0\x7a\x43\x1c\xf0\x7b\x9a\x65\x49\x0c\x61\x03\x22\x12\x1b\xa1\x64\x30\x6b\xf2\x6c\x40\xad\x98\x42\x45\xe0\x0e\xdf\x0c\x6f\x70\x42\x6a\x01\x2e\x3f\xec\x3e\xd3\xd6\x2f\x60\x77\xcf\x35\xac\x0b\x69\x3e\x5a\xd5\x3a\xbc\x09\x61\xc8\x5d\xd5\x02\x0c\xad\x7c\x6f\x32\x92\x06\x89\xb2\x0c\x6e\xbb\x23\xd1\xbd\x21\x32\xbf\xc0\x20\x7e\x06\xcf\x1f\x43\x99\xb8\x18\xcc\x3a\x06\x11\xdb\xc6\x08\x2e\x30\x46\xfc\xa2\x09\x71\xcb\x77\xe7\xfe\x0d\x31\x85\x01\xf0\x93\x4d\x58\xcf\xdf\x11\xca\xa5\xc9\x64\x48\x25\x74\xfd\x03\x66\x19\x85\x50\x82\xa0\x86\xb3\x83\x2c\xf4\xa0\x82\x50\xb5\xfd\x30\x48\xe0\xe0\x1b\xd2\x04\x1f\xc9\x1c\x84\xeb\x7a\xfd\x2c\xd2\x17\xe8\x66\xe0\xef\x30\x0c\xbf\xa4\x53\xc0\x74\x48\xaa\x24\x04\xf0\xf2\xea\xea\x10\xf5\x54\x78\x17\xdc\xa7\x38\x50\x93\x3a\x81\x54\xef\x0c\x74\x89\x89\xba\x87\x50\xc3\x1e\x3a\x0a\xa2\xff\x5b\x69\x5f\x2b\x57\x45\x6e\x18\x90\xce\x23\xe9\xde\x21\x26\x60\x06\xb1\xda\xc3\x03\xbe\x5a\x37\x81\x6f\x5a\x9d\xd3\xe8\xe5\x53\x06\x24\x43\xd5\x14\xfb\x10\x8e\x33\x43\xb5\x6e\x74\xdd\x29\xb9\x40\x21\x80\xda\x15\x1b\x94\x03\xe9\x52\x0a\xdc\x7a\x05\xb5\x94\x28\x85\x02\xc6\xca\x40\x9f\x95\x5b\xfc\x0b\xef\x35\xef\x00\x30\x9c\x84\x80\xf5\x56\x89\x13\x39\xfc\xac\x57\x6d\x3c\xca\xff\x28\x29\x04\x32\x0c\xf1\xbe\x3b\x19\xf2\xce\x32\x38\x2f\xcf\x78\x3f\x3c\x49\x99\xc7\x11\xd7\x6f\x98\x91\x78\x4e\xda\x3d\x12\x2d\x63\xba\x7a\xc8\xaf\x50\xb9\x70\x7d\x86\xbe\x1a\x75\xa7\x34\x16\x2e\x25\x8b\x43\x83\x41\x65\x8f\xa3\xba\x1c\x83\xbc\xc9\x35\x99\x01\xc1\x85\x10\x6d\x30\x24\xb5\x87\x5d\xd3\xa5\x86\xf8\xf6\x59\xc9\xa9\x47\x43\xfa\xf1\x19\x93\xe7\x7c\x29\x40\xa0\x90\x78\x02\x75\x8c\x07\x28\xa3\xb3\xb6\x88\x69\x3d\x07\x5c\x23\x62\x3a\xda\x71\x6e\x0c\x56\x92\xae\x81\x5d\x1f\x94\x3c\x5e\x92\xc6\x49\x86\xbc\xd7\x25\xc4\x03\xa6\xaf\x12\x00\x06\x3c\x40\x59\x90\x5b\x97\x4c\x8e\xe7\x4d\x9a\x07\x67\x7c\x0b\xe9\xe4\x1c\xeb\x90\xba\x30\x09\x94\x14\xa8\xd7\xd0\xe2\xed\xa4\xbb\x4e\x98\xb4\x93\x7c\x52\x03\x52\x1f\xc5\x9a\xa2\x5d\x7b\x2d\xf3\xcd\x91\x92\xbb\xa0\xb3\x9c\x52\x38\x99\x87\xb5\x00\x90\x76\x48\xf2\x55\xdf\x1f\x45\x57\x85\x04\x11\x6e\xb4\x6e\xe1\x80\x4e\xc5\x0b\x1c\xe7\x0a\x39\x55\xa8\x33\xe8\xeb\x02\xe3\x73\x95\x3c\x83\x3e\xf8\x54\xa4\x3b\xa1\x20\xdb\x90\x74\xcc\x89\x53\x39\x06\xc2\xdb\x7d\xe2\x33\xe3\xdb\x4c\xa4\xaf\x0f\x8a\x0e\xff\xc1\x37\xfd\x1c\xbd\xad\x4a\xaa\x57\xd2\x7d\x46\x0a\x5a\xba\x7b\xac\x2c\x85\xfe\x6b\x02\x13\x4c\xc6\x15\xe9\x5c\xde\xa1\xd1\x00\x73\x2f\x07\xad\xae\x01\xa9\x48\xa0\x4e\x0b\xa5\x92\x74\x0e\x1b\xb6\xb0\xe1\x4e\xfb\xa5\x14\x7d\xc8\x82\x69\x15\x38\x3a\x54\x5c\x60\x7a\x82\x12\x6e\x6b\x87\x04\x48\x11\xe2\x0e\x53\x4b\x7e\x08\xa4\xed\x51\x6c\x21\xcb\x0f\x3d\x0e\x0e\x3e\x14\xff\xb5\x52\x6b\xcd\xca\x3a\xc4\xef\x84\xe5\xaa\x87\xd2\x12\xb2\xd2\x46\x66\xc9\xd8\x6f\x3c\x98\x51\x8c\x0e\xc0\xc6\x6c\x7e\x0c\x54\x5c\x07\x03\x66\x02\x3c\xe0\x72\xf0\xf7\xf5\x80\x43\x2a\x7c\x63\xd2\xf3\xa3\xf6\x06\x2c\x20\xf9\x74\x62\x50\xe3\x69\x73\x00\x7a\x63\xd2\x48\x52\x9b\xdd\x94\x97\xf4\x2a\xac\x1d\x51\x99\xe6\x24\x1a\x3d\x12\x0a\x71\x0d\x5f\xa7\xbd\x37\x47\xd9\x9b\xa8\xef\xad\xa8\xdb\xf7\xde\x24\xfe\xf4\xbd\xd7\x9b\xb6\x16\xdf\x7b\xf0\xd7\xe3\x5b\xe5\x86\xfb\xb5\x16\x4e\xdd\x8c\x6f\xe9\x33\xfd\x5f\x97\x81\x97\xe8\x02\x66\x3f\xa5\xca\xe0\x71\x85\x6f\xfe\x79\xad\xdf\xa1\xc7\xb0\x85\x34\xf2\x17\xf1\x45\x3f\xeb\xe7\x3e\x9d\xf0\xfe\xb9\xee\xc4\xaa\xef\xc3\x2e\xc7\x07\x06\x5c\xba\xc8\x4f\x96\x30\xe0\x27\x9d\x1c\x5b\x7d\x48\x1c\xfd\x28\x53\xe9\xe3\x9a\xc4\xd4\x5e\x18\xbb\x5e\xcf\xfb\xc3\xdc\x94\x37\xd3\x56\xdf\x61\x33\xe8\x31\x18\x11\xf6\xc1\x71\x67\x3f\x04\x30\x49\x4e\x21\xd2\x74\xdf\x1e\x0f\xf7\xa8\xfe\xdb\x55\x79\x9f\x5d\x8e\x61\x9a\x53\xe8\x31\x4c\x7d\xeb\x55\xea\x3e\xef\xfa\xf7\xcb\x9b\xb7\x85\x51\xc6\x0e\xef\xca\x80\xdf\x7b\xad\x53\x57\x6d\x7f\x97\xff\x5d\x99\x2a\x05\x7e\x59\xe1\xbb\x6e\x4d\x91\x73\xf9\x23\xc0\xf5\xee\x3a\x01\xf0\x89\x69\x5d\xad\xbb\x33\x0a\xdf\x65\x97\x39\xfb\x9e\x7b\x02\xd2\xdb\x93\xbd\x1d\xac\xd7\xa3\x4a\xcb\x01\x73\x8a\x2d\xcc\xda\xd7\x55\x66\xed\x2b\xf3\x34\xdc\xe8\x9d\xd7\xb0\x30\x4a\xf4\xee\xc4\x54\x20\x2f\x7e\x96\xe8\x85\x26\x79\x0e\x35\x3b\x09\x5d\x85\x30\xac\x9f\xdd\xb1\x5e\x3f\xca\x16\x42\x2a\x01\x50\xb1\x3c\x60\x0c\x73\x8e\xda\xb7\xd2\xd9\x80\x7c\xfc\xd9\xd7\x0f\x85\xaa\x09\x73\x0e\xf5\x05\x8f\x51\xf2\x01\x64\xa7\xd7\x2e\x82\x49\xce\xf2\xed\xfa\x27\xdf\xea\x6d\xd7\x12\x36\xf5\x98\x38\x95\x60\x42\x34\xf7\x5d\x15\xf6\x2d\x15\xab\x37\x9a\xdf\xa2\x88\xed\x3b\x4f\x59\xfa\xa1\x6f\x24\x1e\xf9\x35\x06\xfd\xf7\x6d\x15\x96\xd4\xd0\xdf\x64\xf0\x4b\xaa\x61\xbd\x64\xa0\xef\x4f\xec\xc4\xf4\x23\x97\x86\x48\x60\x7b\x5b\x5d\xe0\x83\x5c\x00\xa6\x49\xbd\x15\x99\x7f\xa7\x42\xc7\x71\x03\x30\xb0\xe5\x1b\x1d\xea\x58\x06\x7e\x6f\x4c\x7a\x8a\x2c\x01\x27\x56\xe7\xf9\xdf\x9f\xf8\x84\x5e\x37\x87\xe0\xaf\x7b\x8d\x15\xfd\xf3\x7a\x61\x1a\xba\x11\xac\x32\x78\x03\xec\x52\xcc\xaf\x6a\x75\xe8\x33\xdb\x98\xe7\xe9\x4b\x21\x97\xf3\x5a\xe5\x4d\x2f\xfd\x99\x57\xb9\x34\xb0\x85\x30\x04\xe9\x93\xaf\xa2\x7c\x55\x03\x89\xcd\xd2\xab\x50\x8d\xe8\x55\xc2\x4b\xe5\xe5\xd2\x21\xae\x34\x81\x6e\xd0\x21\xaa\x9d\x2f\x84\x41\xbd\x0e\xa2\x13\xc9\xbc\x1d\xb3\xf2\x0d\x34\x3a\x2e\xd8\x5b\xef\xdc\x0a\x64\x7d\x51\x84\x21\xef\x2c\x34\xe3\xfe\x12\x3f\x04\xde\x75\xaf\x10\x5b\xa8\x2d\x44\x22\xf6\xd5\x5b\x36\xe8\x39\xe4\x2e\xd8\x90\xdb\xbb\x72\x1e\x51\x52\x0c\x88\x34\xc7\x92\x6d\x78\xbf\x7e\x9b\xee\x43\x3d\x03\xeb\x5b\xa0\xd5\x3c\xc1\xb0\x46\x8a\x29\x24\x1a\xba\xf3\x7d\x52\xed\x8c\x80\x96\xd4\x2e\xd2\x1a\x52\x0e\x2b\x3d\x3b\x5e\x29\x56\x48\xda\x22\x69\x2c\x80\x69\x53\x5a\xcf\x43\xe8\xd3\xef\xc4\xdd\xac\x07\xe1\x6d\xc6\xbd\xeb\x79\x42\x3a\x56\x0e\xf5\x4e\x8f\xab\x8a\xb4\xde\x91\x7a\x71\x89\x98\x7d\x41\x52\x31\x8c\xeb\x93\x62\xa9\x36\xda\xdb\x95\x62\x60\x6d\xae\xdf\x41\x7f\xd2\x61\x53\xef\x95\x31\x64\x1d\xe4\x60\x1f\xa2\x72\x86\x24\xa5\x60\x63\x29\xfd\x6b\x70\x0d\x69\x00\xd6\xd8\x17\xdc\x77\xf3\x3b\xb0\x07\x55\x31\xff\xc5\xdd\x72\xa8\xde\xc3\xc9\x3e\x2b\x5f\xef\xf2\x71\x52\x23\xfa\x1f\x8b\x9b\x8a\x11\x6a\x86\xa0\xb6\x17\x1b\xa8\x9a\xbd\xac\x3e\x5e\x43\x2a\xc2\xde\x6c\xbc\x5b\x27\x97\xf6\x70\x8f\xb3\x82\x9b\xc2\xa3\xb5\x0a\xa9\xcd\xbf\x20\xbc\xc3\x1d\x38\xad\x07\xb2\xb9\xa7\x45\x5d\x52\xe4\xb9\x68\xd9\x7b\x39\x25\xbb\x7f\x96\x57\x39\x10\xf5\x77\xc1\x07\x9f\x9e\xea\xb6\xf0\x98\xc0\xfb\x10\x70\x96\xe2\x9a\x3f\xcf\xee\x8f\x3f\x93\xb3\x27\x8f\xab\xcb\xe7\x34\xd2\xf4\x5f\xe8\x4e\xe8\x18\x33\x02\xcd\xd2\x95\x80\xcf\xbe\xce\x39\xc7\x25\x79\x62\x86\x03\x48\xa2\xf0\x48\x79\x08\x33\x7c\xf5\x63\x61\x1d\xc9\xff\xe5\x37\x27\x8b\xae\x7c\x53\x7e\xc9\xc6\x85\xba\x69\x98\x0f\x1b\x42\x29\x23\x47\x08\xe6\x82\x88\xb3\xbe\xe2\x4b\x0f\x8d\x5d\x8c\xeb\x37\xfb\x72\xd9\x18\x2c\x32\xa8\xae\x4c\x83\xe5\x2b\x5d\xfe\x29\x5f\xd6\xe5\x55\x34\xfc\x52\x0f\xe0\x0a\x95\x57\x98\xb0\x8b\x34\x72\x59\x27\xc8\xbb\x78\x97\x5c\x89\xaf\xa4\xae\xc7\x95\xfc\xeb\x3a\xbf\x9f\x69\xed\x6a\x13\xdf\xda\x55\x28\xaa\x34\x22\xac\xba\x9e\x18\x65\x30\x18\x9e\xd2\x56\xa1\x3b\x5f\xf7\x9d\x64\x63\xfd\x0e\xab\x8f\xf3\x96\xb1\xcb\x79\x37\xa5\x1d\x9c\xe4\x10\x42\xc2\x94\x9c\x5e\xb8\x14\xef\x0c\xde\xbb\x04\x05\xc1\xd6\xdd\xe9\x4e\xda\x83\x42\x94\xa2\x88\xd4\x7b\x3f\x1e\x51\xe9\x9f\xf7\x40\x7d\x85\xcf\x28\xf7\xfd\x89\x26\x34\x70\xdf\x27\x43\xdb\x6d\xf8\xbb\x0e\x05\x39\x31\xee\xb7\x1f\x4d\x98\xcf\xc8\x8d\xe4\x1c\xe3\x63\xa1\x64\x44\x50\x75\x4f\x11\x9a\x2f\x1b\x1b\xd9\xfd\xa3\x1e\xc2\x05\x36\xd7\xbd\x08\x4e\x29\xcc\xf5\x8f\x30\xea\xaf\x21\xf0\x60\xe8\x51\x08\x72\x88\x42\x54\xb0\xcd\xc2\x4f\xdc\x25\x2c\x74\x75\x05\x42\x39\x55\x38\xec\xd9\x2f\x10\x0c\x3b\x67\xff\x2a\xcf\xd0\xaa\xc0\xa0\xb6\xc8\xfc\x1f\x33\xed\xaa\xdc\xac\xfa\xba\xb5\x45\xbe\xbb\x9d\xff\x2d\x61\x85\x2b\x1f\x57\xac\x3b\xf6\xfe\xac\x5d\x0d\x23\x2e\x23\x44\xbb\x69\xfb\x1f\xbe\xe6\x57\x24\xf5\xc0\xef\x47\x02\x1b\x99\x29\xa3\xf6\x9e\x7c\xe2\x7f\xe9\xa6\x12\xad\x21\x5d\x5b\x3a\xe9\x63\x7f\x71\x06\x09\x65\x1c\x1f\xc9\x65\xbc\x75\xc8\xfb\x95\x24\xca\xf5\xcf\x84\xb6\x2d\x6c\x1c\x5e\x19\xa2\x3e\xc0\x85\x86\x39\x9e\xa7\x1a\x69\x03\xe3\xcc\x68\xda\xf9\x7f\x77\x9a\x1a\x09\x5e\xa7\x53\x52\xc3\x9f\x08\xca\x1a\x4f\xaa\x5f\xc0\x72\x60\xd3\x3b\x7d\x74\x59\x86\x23\xf1\x4c\xf5\x55\xc7\x4a\xc1\x93\x7a\x23\x4a\x90\x08\xc9\x5a\x3a\x91\x7e\xda\xf4\xc6\x56\x19\x91\xd7\x9d\xa3\xf9\xd3\xfa\xb4\x94\xc5\x60\x24\xe9\xdc\x31\xff\xe1\xff\xd9\x14\x17\x82\xce\x05\xe7\x04\x5d\xf1\x42\xda\x17\x59\x9c\x6d\x48\x81\xfe\xd4\x2d\xea\x4b\x1e\x45\xa7\x7b\xf3\x6a\xb5\xf5\x8a\x9b\x46\xb4\x86\x17\xde\x8a\x45\xa3\x3c\xb7\xfb\xbf\x42\x91\x22\x44\x1f\x4f\xb0\x1f\x59\xc6\x0a\x58\x63\xee\x1d\x25\xc1\xcd\x9e\x02\x0f\xb3\x4a\x41\xe8\x2c\x91\x2f\x67\x43\xd4\x07\xdb\xdb\xb3\x4c\x22\xe2\xcf\x37\x7e\xcc\x4f\xda\x64\x8e\xc7\x6f\x40\xd2\x24\x33\x0d\x4a\x50\x26\x99\xfd\x17\x59\x32\xbb\x30\x05\x4b\x79\xfb\x43\x9f\xec\x24\x9a\xe2\xb9\xcc\xd1\xf5\x35\xb4\xbe\x24\x93\x81\xd7\x92\x0f\x79\x06\x6b\x80\xf1\x7d\x48\x5a\x8c\xad\x4a\x34\x23\x5d\xda\xc0\x9d\x48\x42\x5f\x65\x86\x00\xaa\x84\x02\xe6\xf0\xdd\x3d\xb3\xf4\x55\x0d\x3c\x92\x7e\x8b\x55\xdb\x92\x67\x41\x55\xcd\xc8\x06\xa0\x17\x87\x0d\xf5\x54\xf1\x59\xfd\x45\x71\x8a\xf0\xb4\xc7\x00\x94\x1b\xaf\x8c\x66\x9e\x90\xcc\x6f\x8b\x10\x8b\xf2\x3c\x43\x12\x83\xda\xb2\x67\x2e\xcc\xaf\x39\xd1\x6d\x56\xf5\x90\xbb\x0b\xf9\xe9\x93\xf3\xe7\x15\xc3\xd0\xae\x89\x7e\xe2\x73\x94\xca\x45\xe5\x2c\x27\x08\x41\x92\x46\x33\xf8\xe4\x80\xc9\x10\xc9\xfb\x90\x65\x95\x67\x15\xe0\x3a\xd5\x30\xda\x7f\xbb\x50\x60\xe0\xa8\xb0\x20\x97\xd1\x11\x33\xab\x0d\xdd\x76\x43\xbb\x5f\x18\x96\x31\x7b\x4c\xd8\x6a\xb8\xfe\x5b\x7d\xfb\x8a\x0d\x5a\x19\x3a\xa4\xcb\x9f\x23\xe8\x61\xd7\x33\xb6\x4f\x3f\xad\x8d\xfa\x8f\x76\x45\xa5\x08\xbd\xa3\x55\x67\x41\x3a\xd4\x0f\x7b\x72\xf6\xfe\x5b\x22\x82\xc9\x6f\x1c\x6f\x63\x7a\x80\xe7\xe8\xf1\x01\x43\x4e\x03\xfd\x8c\x78\x53\x31\x77\x88\xac\xf4\x90\x1d\xc1\xb2\x0e\x14\x13\xe1\x80\x43\x21\xc8\x21\xa8\xa1\xce\xac\xa3\xe1\x87\x08\xd2\xbd\xfd\xa3\xc1\x61\xdd\xe4\x2e\xc3\x81\xd6\xd7\xa1\x55\x26\x12\xfc\x71\x98\x45\x6b\x3d\xd1\x43\xf3\xd2\x6a\x0e\xe9\x66\xdc\xd2\x07\xf1\xe4\x2d\xfe\xb3\x61\x3a\x9a\x8d\x2b\x72\xdc\xbd\xe4\x43\x42\x82\x8b\xe6\xa1\x1c\x6b\x23\xa0\xe8\x91\x9d\xae\x61\x4a\x30\xbd\x58\x51\xbb\xc0\x5a\x5a\x03\x0e\x1d\x82\x68\x39\x6a\x13\xef\x5f\x14\x1e\xc2\xde\x72\x2a\x47\x1b\x3c\x43\x96\xf0\x84\xa6\xfc\x49\x90\xf4\xed\xc1\x14\x9e\x25\x7c\x23\x75\xfd\x6a\x02\x92\x64\x5f\x30\x12\x61\x43\x91\xb2\xc6\xda\x67\x9a\xca\xff\x79\x39\x50\x4d\x12\xba\x1e\x2b\xe9\xee\xed\xf3\xe4\x34\x22\xa6\x43\xd8\x37\x69\xf7\x3e\x52\xa8\x83\x4d\x17\x4a\x3f\x7f\xb3\x9a\x7b\x64\xd6\x6a\xbb\xd9\x4b\xb6\x72\xb8\x3d\x60\xd8\xee\x69\xe5\xe9\xda\xcd\x0f\xd2\x85\x3e\xe8\xca\x6f\xdd\x8a\x93\xe3\x63\x9f\x46\x61\xf3\xd3\x14\x28\x6f\x77\x27\x81\xb4\x5b\x8d\x64\x43\x3d\x28\x69\x0b\xa4\x42\xef\xd4\xf6\xf8\xf6\x26\xd7\x70\x64\x74\x20\xcd\x9c\x5b\x56\x1a\x86\x34\x70\x2d\xd2\x4b\xa9\x7b\xb6\x34\xa5\xb8\xc8\x6a\x63\x07\x8a\x67\xc0\x81\x64\x95\x98\x88\x75\x10\xaa\x34\x43\x4e\x85\x0c\x0d\x49\xee\xa2\x9e\x4a\x77\x04\xda\x31\xd7\x4c\x4d\x90\xa6\x73\xad\xeb\x27\x4e\x2f\xc1\xc8\x56\xff\x32\x77\x2d\xf2\x0a\x6d\x95\x29\xfa\xad\x3e\x25\x60\x42\xa3\x28\xfb\xd2\x6d\x54\xa2\x13\x4e\x55\x35\x92\xc1\xde\x07\x37\x9c\xa2\x94\x28\x09\x69\x40\x10\x25\x11\xdb\xab\xd5\x01\xfd\x5c\xef\x50\x37\x97\xb2\x8f\x7d\x93\x34\x45\xec\x17\x2a\xc6\xef\x72\x77\x11\x20\xc5\x42\xbe\x2a\xd7\xbb\x4a\x0a\x65\x38\xca\x3f\x5a\x57\x2d\x9a\xbc\x16\x5d\x27\x9f\x1f\x57\x57\xae\x05\xc3\x12\x8a\x26\x5e\x4c\xfa\x39\x76\x45\x7a\x94\x86\xfa\xb1\x34\xcb\x61\xdd\x59\xbf\x0d\xb3\xfa\x3d\x36\xa0\xe5\x5e\x9a\xa8\x6a\x99\x3c\xec\xe6\xb4\x0d\xdd\x43\xb9\xa3\x34\xf7\x4a\x51\xda\x32\xc4\x4b\x7e\x59\xc3\x58\x9a\xc9\x6e\x02\xd4\x75\xe7\xf2\x38\xf7\x8f\xf4\x5e\xf2\x2c\x2a\xb0\xfc\xf1\x60\xd0\xa5\x90\x85\x8d\xee\x1e\xcc\x7e\x6c\xa1\xf0\x03\x61\x13\xd2\xda\x9a\x4b\x2d\x37\xa1\x50\x44\xc1\x04\x51\xa5\xe4\x8b\xe3\xc4\x0d\x30\xc9\x5e\x5f\x39\x16\xf6\x8e\x02\xe8\xc7\x51\xd5\x18\xfe\x25\x7e\x37\x5c\x76\x59\x3f\x36\x5d\x4a\x7e\x6c\x52\xe0\x69\xe9\x74\x11\x1d\xf4\x53\x78\xaf\x9c\xc9\x45\xd9\x94\xad\xf0\x00\xdb\x44\x99\x9e\x14\x75\x66\x4b\xe0\x52\xe4\x8d\x5a\x26\x4f\xda\x68\x34\xd5\x19\x90\x90\xe0\x97\xca\x0c\x59\xd0\xb2\xf4\x06\x52\xae\x9e\x7b\x0d\x10\x08\x66\x7f\x12\x81\x66\xce\x6c\xff\x9f\x4f\x13\xb5\x8d\x52\xef\xb6\xe4\x3f\xe1\xff\xd0\x4f\x16\x61\xd2\xb0\xc6\x31\x86\x28\x4c\xa1\x68\x3d\x84\x52\xfc\x02\x0e\x5a\x4c\x5b\x5f\xa0\x70\x46\xfe\xb8\x5d\x54\x82\x5b\x59\x8d\xe4\xaf\xbc\x01\xf1\x12\x0e\x17\xfe\xa9\x44\x24\x18\x41\x2d\x45\xea\x21\x4d\xfe\xb8\x47\x88\x08\x1f\x78\x55\x62\xcf\xf4\x5e\x59\x6a\x30\x79\x54\xce\x63\x1f\x4e\xde\xf7\x43\xd5\x97\xfe\x63\x1d\x9f\x26\x05\x95\xfc\x1f\xf5\x52\x62\xa7\x26\x3d\x8d\x1a\x59\xf2\x47\xa5\xdf\xf0\x01\x03\x8b\x26\xdf\x74\x5f\x50\xc9\x62\x84\xdf\x54\xee\x72\x2a\x5b\xde\xb9\x13\xf4\x89\x6f\x9e\x9d\x04\x91\xc3\xda\x65\x3e\x84\x03\x41\x2e\xcd\x65\x1d\x56\x25\x44\x4c\xe4\x40\xab\xeb\xbb\xac\x32\xc0\x3e\xdc\x9e\x97\x3b\xad\xe0\x00\xc7\xc7\x6b\x4b\x45\x6b\x23\x56\x7a\xe0\x56\x8b\x64\x74\x94\x9f\x9d\xbc\x61\xe4\x46\x50\x58\x06\xf4\xb6\x90\x6c\xb1\xc3\x51\x9c\x65\xe7\x68\xd9\x00\x95\x2d\x01\x3c\xe0\x6f\x40\xb7\xa7\x88\x1b\x71\x78\x96\x28\xff\x07\xc3\x57\x9e\x2c\xfa\x54\x47\xd1\x0f\x2c\x5d\x6b\xa5\x30\x6d\xc5\xf5\x5d\xf8\x5c\x91\x50\xcb\xad\x1e\x1c\x1f\x1e\xf9\xfb\x65\xbe\x04\xd5\x5b\xfe\x95\x6f\x89\x93\x65\xc8\x53\x3a\xb0\xae\x80\x02\x2a\xd6\x55\x56\xbe\x07\x10\xeb\x17\x43\x22\x49\x02\xed\x02\x4c\xa2\x02\x45\x75\x0d\x95\x97\x36\x96\xc7\xcd\xb3\x1f\x52\x57\x81\xf2\xcb\xff\x02\xf0\x65\xb4\xea\x48\x71\x98\x23\x6f\x1c\xb9\x1c\xd9\xab\x66\x2b\xd4\x71\x36\x04\xe5\xd2\xcf\x7f\x52\x8a\xd1\x46\x34\xf0\x05\xc8\x50\x00\xe1\x31\xd6\xb2\xe8\xfc\x7a\xfa\x79\x88\x11\xc2\xa1\x98\xf3\x61\xef\xb3\x22\xfa\x87\xe7\xea\xfb\xf7\x79\x84\x67\x9b\x01\x7c\x8b\x39\x04\x64\xe4\xc4\x6e\x40\xdf\x48\x3a\x95\x2c\x71\x58\xef\x97\xed\x3e\x84\x07\x02\xf4\x35\xca\xbe\x70\xc4\x1d\x1b\x5f\x39\x65\x92\x1c\xe9\x40\x8e\x83\x24\x64\x90\x1a\x7a\x49\x6f\x06\x59\x7a\xbb\xb6\xd9\xa5\x5f\xb3\x4f\xba\x8c\x0d\x4f\x3a\x13\xda\x2e\xb7\x74\x69\x36\x9d\x63\xbd\x4f\xee\xc8\xd4\x90\x43\xd5\xd2\x11\xd4\xf1\x23\xfd\xe3\xd5\x5f\xbf\xd2\x96\xb2\x5a\x2a\x22\x5e\xa8\x7d\x43\x9a\xb3\xc2\xf0\xc1\x5f\x0b\xc4\x26\x9b\xb4\x53\xe0\x0e\xee\x5b\x9e\x19\x5c\x0d\x57\x74\x39\x39\xc0\x82\x9e\xaa\x57\x29\xb5\x6d\x12\xec\xaa\xcd\x9e\x49\x3f\x61\xb3\xaa\xb9\xb0\x8e\xb1\x77\x8c\xf5\xb4\x81\xae\x73\x63\x75\xa6\x78\xec\x15\xc9\xc7\x34\x34\xb1\x0f\x09\x63\x4b\xca\xd8\x48\x38\x66\xab\xf3\xbd\x33\x75\x13\x2d\xc9\xf6\xdf\x4f\x8e\xc6\xdf\x5d\xab\x29\x5e\xc8\xed\xf6\x43\xf1\x1b\xb3\x2f\x37\x12\x91\xeb\xc2\xf7\x0f\xba\x33\xbf\xd9\x85\xea\x33\x10\x14\xa1\x51\x87\xa2\x32\x4d\xb2\xda\x6d\x3c\x25\x10\xf3\xbd\xa9\x17\xc3\xf3\xec\xbf\x51\xb3\xd5\xf5\x95\x93\x09\x7b\xed\x74\x5e\xc4\xa7\xa9\x9c\xb6\xd7\xda\x18\x9f\xdb\xab\x33\x3a\x95\x92\xb1\x87\x71\xd6\xfe\x57\x17\x74\x44\xf6\xf2\x65\xfc\x77\x2f\x6f\x4a\xc4\x78\x25\xbb\xff\x78\xa0\x7b\xd9\xfc\xed\x46\xfb\xa4\x00\xe9\x5e\x34\x7e\x71\x0d\x99\x4a\x30\xb3\xa7\xba\x17\x4f\x70\x00\x42\xda\xf8\x7b\x27\x62\xb7\xd2\x00\x83\xda\x3b\x06\x4c\xfa\x91\xf9\x16\x51\xc4\x90\xe7\x80\xbf\x65\xdd\x6a\xcd\x90\xa6\x86\x76\x75\x34\xf6\xfc\xa7\x8b\xca\x88\x38\xf0\x5f\x1c\x3b\x6a\x41\xa2\x2b\xfb\x8f\x06\xba\xa7\x97\xcf\x4c\xed\xfc\x22\x91\x01\xff\xd1\x3f\x76\xa6\xe3\x40\x78\x66\xe3\x5e\xcc\x6a\xb0\x92\x52\x37\x53\x68\x02\x5b\xe5\x3f\xa6\xd0\xa4\x89\x0a\xc2\xa0\xe2\x19\x86\x64\x5d\x0d\xa9\x9a\x4b\xd4\x4f\xc8\xd4\x50\xc8\xc1\xde\x9e\x77\x4c\xb3\x41\xb4\x46\xbb\x93\x8c\x0e\x77\x9d\x3e\x1a\xb3\x05\xcc\x57\xed\x84\x63\x92\x16\xc7\xed\xcc\x2b\x6a\xe1\x8c\x74\xbf\xd8\x90\x90\xa9\x0e\xf1\x86\x1c\x4c\x8a\xcb\x30\x85\x7b\xab\xed\x95\xb8\x02\xd2\x42\xfe\x1e\x6e\x54\x98\x85\xbb\xf8\xc9\x32\xb4\x36\xaa\xbc\xe0\x91\x7c\x21\xe7\x86\x2d\xb4\x10\x00\x42\xab\x9d\x52\x38\xfa\xb7\x4c\x4e\x37\xbc\x69\xa2\x4a\xb9\xe7\xca\x5f\x0d\xf4\xa4\x48\x4c\xdd\xaf\xf8\x6f\x24\xe5\x1a\xec\x57\xbe\xd3\x0e\x62\x43\x1e\x6a\x12\xd2\x39\x66\x6e\x91\x67\x26\xa1\x39\x92\x72\x45\x9d\xc0\x29\x8a\xcb\x9a\x12\xde\x06\x62\x26\x8d\x6f\x95\xae\xe5\x5b\xf9\xa4\x59\xd2\x14\x98\x69\xf2\x6d\x4a\xd4\xaa\x6e\x28\x77\x12\xa8\x12\xde\xca\xa6\x35\x9a\xec\x34\x30\xb4\xd0\xeb\x89\x2c\x83\xad\xc0\x90\xfd\x4f\xdf\x83\x2d\x25\xef\xbb\x6d\x59\xbd\xaa\x2d\x5f\xa1\x37\x13\x52\x8c\x86\xa8\x27\x64\xdd\x18\x65\xb9\x00\xfe\x4b\xa7\xb3\x65\x0c\x12\x78\xb7\xb2\xab\x8f\x52\xd0\xa6\x9d\xec\xb8\x6d\x9a\x33\xdf\xa4\xc6\xb0\x41\x5c\x8b\x2b\x56\xc6\xb1\xb6\xb0\xe2\x32\xf0\xe0\x95\x94\xd0\x70\xd8\xf2\x13\x55\x1b\x77\x7c\x86\xc2\x4c\x8e\x8e\xf5\x96\xbb\x46\x8e\x2a\x4b\x7b\xe3\x18\x8a\xdf\xa2\xa7\xbd\xe5\x98\x3e\xdf\xfa\xfe\x4a\xc6\xd8\x6c\xc4\xc3\x29\xdf\x6d\x3c\x22\xe2\xb4\x45\xb6\x2c\xf4\x6f\xac\xca\x3c\x4e\xe1\xa2\x09\x9a\x6d\x7c\xba\xb8\x82\xf4\x69\x9e\x5d\x29\x64\x7c\x32\x8b\x79\x1b\x11\xb8\xf1\x1f\x3f\x7a\x5a\xf5\x46\xd8\x34\x29\xe6\x70\x5c\x42\xfc\xc6\x7a\x09\x64\x94\x41\xcb\xa6\x66\x01\x77\xb3\x8a\xb5\xe9\xb1\xc6\xd6\x3f\x0d\x9f\x91\x86\x9e\xdb\x98\xee\x95\x1f\xce\x88\x77\xca\x0f\x90\xac\x47\x33\x52\xb6\xe4\x73\xdd\xc3\x40\xc9\x15\x8d\x89\x0c\xf0\xd1\xdb\xb0\xa0\xeb\x2b\x19\x9e\x25\xe1\x72\x5d\x1a\x34\x39\x4c\xda\xd4\xba\x10\xa1\xd2\x82\x89\x65\xeb\xf0\xbf\xba\xee\xcd\x15\x93\xa6\x1b\x4c\xb6\x46\x49\xd6\xc4\x8b\x05\x3f\xc1\x96\xf6\x90\xb7\xb1\xce\xd0\x57\x50\xfb\xb8\xe1\x56\x0a\xe1\x9a\x7f\x8f\xd6\x9a\xae\x09\xea\xb7\x3a\x40\x0d\xf1\x1c\x10\xf0\x84\xe8\x5d\x08\xb1\x9b\x8b\x97\x64\x3d\x75\xa9\xde\x64\x0e\x55\xa5\x6f\x23\x35\x9b\x36\xf0\x43\x87\x6d\x4a\xc8\xdf\x24\xf2\xb6\xb7\xc4\xfe\xbf\xd5\x34\x76\x76\x65\x03\xba\xec\xcd\x19\x80\xe5\xb3\xeb\xe0\x3c\x7b\x30\xde\xbd\xba\x75\xa1\x9b\xee\xd6\x5a\x25\xa6\x63\x77\x9a\xdd\xc5\x2d\xf5\x1f\xf8\x37\xce\x8d\x69\x2f\x1b\x06\x54\xbc\x80\xdf\x97\x9d\x9c\xd2\xea\xc8\x67\xfa\x79\x09\x93\x8e\x3c\x7a\x04\x8b\xb0\x54\xd6\xcc\x88\xb7\x3f\x93\x50\xa8\xee\x20\x9d\x99\xeb\x68\x63\xb8\x7e\x86\x90\x25\xb9\x5f\x01\xd8\xf5\x47\xd9\x68\x9e\xb6\xde\x87\xff\xf8\xf5\x5e\xc2\x83\x1e\x32\x36\x6f\x7f\xed\x56\xab\xd4\x3b\xcb\x00\x02\xd7\xa3\xe4\x83\xd6\x6a\x4d\x3b\x14\x61\xaf\xff\xa4\x64\x23\x25\x9c\xea\xc2\xe3\x8e\x34\x85\x04\x5e\xb8\x54\x40\xa0\x5a\xc3\xbd\x77\x89\xd7\x6c\x4a\x0b\x5e\xad\x33\x92\x39\x52\x58\x6b\x59\xf8\xcf\xb9\xd1\xd1\xdf\x90\xf6\x7f\x5c\xa4\x79\x20\x32\xbc\x4a\xca\xc6\x86\xd3\x03\x4f\xdd\x7b\x8c\x48\xba\xf2\x12\xcc\x7e\x29\xcf\x5c\xaa\x4d\x56\x8d\xc8\xdd\xf1\x99\xff\xda\x25\x41\xb3\x4f\x0a\x45\x5a\x77\xf8\x56\x2e\xad\x55\x2a\xb7\x5f\x20\xa4\x51\xff\xa3\xf4\x8c\x5e\x03\x6b\x1c\x7d\x08\x88\x92\xff\xda\x79\x73\xad\x12\x49\xfc\x47\x99\x22\x4c\x6c\x58\xc2\xa9\x2b\x78\xa2\x5e\x4a\x52\x25\x3f\xa4\x80\x92\xb3\xd7\x10\x6b\x9e\xa8\xd4\x92\x9d\xdb\x40\x94\x79\x11\x91\xe5\x06\x79\x19\x7e\x2b\x86\x46\xfd\x5a\xeb\x5b\xac\x92\x8a\x79\x15\x16\x50\x68\x4c\x01\xbd\x94\xaa\x0c\x9d\x89\x56\xf6\x1d\x56\xcf\x47\x67\xea\xd4\x9a\x8e\x42\x89\x55\x43\x1a\x7b\xc1\x61\xb3\x2b\x15\x21\x1d\xed\xc5\x20\x48\x77\xde\x74\x59\x18\x75\x6f\x81\x95\xba\x0a\xe8\x66\xca\x8e\xbd\x26\xda\x79\xbc\xc8\x1a\xc7\x00\x53\x8d\x0e\x24\x61\x78\x8b\xed\x42\x78\x63\x53\x88\xce\xd0\xa2\x13\x35\xe4\xad\xdd\x1e\x48\x61\x25\xf2\x2e\x9b\xc3\x89\x07\x1f\xab\x74\x69\xc6\x78\x2b\x91\x5b\x26\xd1\x1b\x4f\x09\xe1\x9e\x83\x57\xa8\xcb\x3b\x9d\xec\x6a\x2e\x37\x9b\xcd\xe5\x0e\x69\xb8\xe5\x3e\xe7\xbd\x08\x51\x1b\xe3\x6e\x97\xd2\x93\x97\x3b\x04\x3d\xac\x6f\x14\xd2\x19\xf7\xb2\xfe\x24\x69\x38\x19\xba\xdc\x63\xcf\x65\x87\xe6\x8c\x74\x61\xee\x18\x24\x2f\x8d\x51\x10\x2b\x45\x67\x5f\x5a\x6c\x54\x42\x66\xc4\x23\x65\x0e\x42\x48\xd6\x65\x64\xbc\x4c\xd7\x57\x07\x22\xa1\x67\x91\xe2\xa0\x95\xa1\x2e\x53\xbf\xf7\xc0\x92\xb2\x23\x95\x13\xc1\x0b\xd2\x42\x98\xd1\xba\xd4\x48\x8f\x36\xe4\xfd\x9d\xa5\x62\xba\x83\x80\xea\x31\x27\xa5\x6d\xea\xe1\x1f\x28\x4a\xee\xe9\x73\xb4\x4b\x85\x90\x8c\x22\x07\x0b\x94\x07\x43\x44\x46\xb9\x39\x4b\x9d\xd7\x18\x54\x2d\x95\x44\x9f\xc5\x06\x04\x07\xcb\x68\xca\x0d\x72\xc2\xdf\x65\x65\xd6\x8e\x7a\x54\x62\x81\x36\x61\xcb\x9e\x3e\xbe\xec\x59\x16\x71\xcb\x4a\x2f\xf2\x05\xbe\x9b\x7a\x7a\x6b\x58\x30\x59\x65\xf1\x0e\xf5\x1d\xe4\x40\xfb\x53\xf3\x89\x40\xdf\x05\x55\x19\xc5\x6f\x4a\x8b\x79\x69\x83\x6e\xa3\xac\xbe\xa8\x2d\x8e\x8b\xa7\xd1\x2d\xe5\x50\x3d\x09\x62\x9f\xba\x53\x06\x8b\x34\x69\xd6\x10\xa7\x09\xc9\x7c\xab\xd9\xa8\xe1\xc3\xd9\xef\x85\x7d\x36\x1b\x4b\x85\xa4\x8e\xa4\xaf\x16\x17\xe8\xf0\xca\x7c\xc9\xf1\xcb\xa1\x12\xed\x33\xe9\x50\x9c\x91\x16\x4e\x8e\xd7\x44\xf7\x20\x2b\x95\x6a\xc9\x99\x9c\x24\x03\x13\x35\x69\xd4\x00\x2c\x99\x4a\x98\x56\xb6\x41\x22\x35\xef\xf4\xbc\xe9\x46\xb3\xa4\xfb\xb8\x59\x32\xfa\xb0\x24\xcc\x2e\xa9\xe7\xbf\xa4\xfe\x62\xa0\x0d\x3f\x09\x8e\x2a\xe1\x36\x60\xea\xca\x20\x65\xc7\xc1\x2e\x49\x75\x88\xd0\x34\x29\xca\x6c\x87\x56\xe9\x9e\x80\xe8\xfb\x26\x48\x0f\x6f\xa6\x97\x14\x52\x3c\xc9\x35\x78\x04\x2b\x0b\xb9\x57\xfd\xdd\x5d\x31\xee\xef\x5e\x43\x5b\xe6\x2b\xf7\xee\xbf\x10\xe8\xfe\xab\xf6\xd9\xb2\xf2\x30\xe8\xed\xdc\x9f\x35\xd0\x3e\x68\xfd\xcb\x6f\x29\xd8\xa0\x83\x29\xe0\x2f\xf2\x5f\x6e\x2e\xe8\xf0\x17\xea\xeb\x7f\xa9\xc9\x19\xca\x5a\x6d\x3a\xfa\xa1\xf9\xe6\x8c\xfa\x5f\xf7\x4e\xfb\x4b\x9b\x84\x65\x5c\x08\xfa\xcf\x2a\x20\xfc\x20\x8c\xbd\xf1\xef\x72\x85\xba\x0c\x66\xec\x38\xc7\x50\xaa\x68\x85\xe8\xac\x9c\x23\xee\xbe\xfd\xc0\xe6\x17\x58\x42\x6a\xa7\x6c\xcd\x7f\x4d\xd9\xd8\xf5\x2b\xf6\x9c\x43\xee\xbe\xcc\x5e\xd3\x97\xa7\xe7\x22\x15\x3d\xab\xf9\xfe\x84\x24\x8c\x7d\x30\x5e\xae\x57\xe8\xad\x60\x3e\x26\x74\x62\x9e\xae\xa4\x71\x67\x8f\x93\xa0\x94\x4c\xc8\x59\xbc\xe5\x9a\x63\x86\x7b\xae\x2f\xca\x70\x60\xfa\xbc\x53\xdb\x6c\x09\xb5\xf5\x4e\x28\x55\xb4\xd9\xd5\xc9\xb8\x2e\x3e\x8b\xb9\x9e\x21\x09\xa3\xd4\x3b\x64\xfe\x94\x45\x80\xf2\x23\x95\x59\xca\x73\xdd\x3f\xf7\x76\x20\x81\x87\x4b\xf9\xa3\x8b\x92\xbc\xec\x8c\xac\xa5\xa6\x17\x61\xae\xeb\xff\x27\x30\x53\xff\x11\x98\x41\x0d\x2c\x06\x0d\x96\x24\xf9\x4d\x48\x20\xcd\x58\xb8\x6c\x31\x54\x3b\xd7\x45\x17\x13\x1a\x16\x73\xb5\x0f\xd4\x75\x47\xea\xd4\x65\xa8\x0c\x3f\xe3\xa2\x26\x89\x25\xcc\xf6\x62\xd9\xaf\x0c\xe5\x1d\xcc\xab\x84\x39\xd7\x8c\xd9\x14\xde\x3e\xa6\xfb\xcf\x7b\xa2\xd6\x8c\x02\x6a\xb3\x57\x32\x44\xbb\x77\x9d\x3c\xe3\xd0\x16\x16\x16\xfe\x53\x51\x93\xf0\x78\x25\xb2\xeb\x5c\xc8\x5a\xc1\x7e\x1b\xe0\x5f\xec\xf9\x41\xc2\xc2\x3b\x58\xb3\x67\xb4\x6a\x6f\x0c\x42\x9e\x43\x50\x20\x6d\xe0\xee\x57\x02\x45\xdd\x90\xb7\x41\xfa\x27\xbd\x18\xfc\x9e\x41\x04\xb5\x4a\xee\x26\x54\x6b\x9a\x0d\x05\xd3\x7f\x21\x74\xe3\xbf\x1b\xdd\x1d\x8d\x57\x67\x5a\x85\x9d\x21\x71\x23\x2f\x33\x88\xf7\xb3\xf8\x47\xe8\xe6\x52\x0e\x9b\x0b\xe0\x64\x81\x22\x5e\x37\x30\x1f\x44\x8e\x4c\x64\x20\x48\xf3\x12\xea\xb8\xe5\x8a\x96\xc4\xb0\xae\xba\xec\x3a\x1c\x34\x65\x63\xbb\x3d\x49\xf6\x00\xa2\xce\x67\xd5\x61\xca\xa0\xcb\xa6\x8d\x8a\x95\x12\x94\xc9\x0f\xfd\xc3\x6a\xd2\x95\x19\x9a\x73\x9e\x7e\xa9\x70\xf6\x5a\x8d\x15\xe1\x5b\xc8\xe8\xe4\xf0\x87\xb6\x85\xac\x48\x27\x54\x75\xd8\x9f\x81\x88\xce\x2e\xb1\x43\x7b\x23\xe2\x11\xb9\x26\x9f\x64\x71\xd8\xa5\x44\x6b\xa2\xbc\x68\xc0\x78\x6e\xa9\x75\x39\x1c\x49\x43\xb9\x2b\x64\x4b\x21\x9b\x83\x97\xfc\x4d\x14\x02\x39\x8b\xce\xb1\x4b\xa4\x7e\x46\x87\x98\x97\xbb\x85\x68\x12\x26\x77\xd4\x46\xd9\x80\x21\x04\xa9\xd0\x23\x3b\xa5\xdb\xb3\x4a\xd6\x10\x48\xda\x3f\x25\x1e\x9a\xf5\xd7\x78\xf6\x12\x2a\x3f\x22\xa6\x1a\x58\xc3\xa3\x75\xf6\x71\x8c\xcb\x7f\xdc\x5f\xff\xd2\x9e\x51\x31\x3d\xc3\x00\xd8\x80\xb2\xc2\x9f\x77\x93\xa0\x92\x21\xaf\x88\x9f\x37\xf8\xb6\x14\xfa\xb8\x83\xa6\x6c\x88\x63\xf7\x27\x66\x79\xaa\x34\x64\xee\x4c\x56\x36\x80\x94\xb5\x3d\x1e\x2b\x15\x9d\x29\x15\xf2\xcb\xa1\x9b\x93\x6d\xa0\xe2\xb5\xe2\xd3\x13\x2c\xbc\x99\xb7\x3a\x97\xab\x9c\xc3\x47\x35\x14\x48\xe5\x50\x90\x06\x48\xd5\xe7\x93\x69\xd6\x56\x4c\x5d\x37\x47\xd7\xd3\xca\x97\x13\x52\xcf\xe6\x99\xe2\x44\xa7\x9c\xe7\x00\x67\x51\x7a\x9e\x98\x0e\x7d\xf2\x7c\x25\xd2\xf6\x01\x79\xb6\xf2\x78\x48\x3f\x07\x5a\x3b\xba\x04\x70\xa6\xb8\x12\x2e\x7d\x54\x76\x69\xb1\xc6\xf3\xbf\xb3\x14\x7e\x72\x5a\x19\x27\x74\xfd\x1d\xde\x6e\xa4\x4a\x7d\x05\x22\x8d\x0d\x38\x44\x6d\x42\x70\x06\x72\x82\x14\xe3\x69\xd2\xea\xa9\x2d\xc4\x78\xea\xc4\x19\x1b\x20\x74\xe7\x79\xe3\xe0\x8f\xb1\x71\x22\xe2\x59\xad\xfe\x65\xb9\x4a\x38\x1a\x30\xd1\x14\x1a\x1a\x3a\x6f\xd7\x83\x73\x6d\x9d\x81\x56\x00\x8e\x55\xb6\xd8\x67\xee\xe7\x9c\xb9\x3d\x18\xbd\x7c\x28\x15\x02\x7d\xd2\xd5\xe1\xe4\xc8\x13\x74\x86\x99\xc7\x62\x95\xf4\x5c\xef\xcd\x69\x74\x48\x78\x57\x27\xd0\x20\xf7\x5a\xbb\xb2\xde\x13\x3a\x97\x81\x38\xd6\x7a\xfe\xe5\x35\xc1\x88\xfc\x59\x5e\x6c\xd8\x9e\x85\x5a\xeb\x56\x32\x9e\x60\xe0\x99\xa5\x85\x54\x20\x24\xaf\x20\xc1\xb3\xac\x47\xba\x98\xfa\x01\x6d\x9e\x12\xfb\x6b\xd6\xc0\x00\xe3\xe7\x90\x79\xbf\x2f\x81\xd0\x20\x2a\xea\xa1\xe1\x70\xf1\x4e\xbb\x86\xdc\x25\xfd\x1f\xfb\x5c\x35\xaa\x85\xdb\xad\x6e\x4f\xbe\xa9\x6e\x82\xf9\x8c\x12\x80\xaf\x20\x45\x1b\x3c\x23\x91\x97\x87\x26\x93\x9b\x34\x8d\x3d\x9e\xee\x44\xc0\xcd\xba\xda\x9c\x4b\xfd\xf8\x65\x22\x6d\x6a\x27\x58\x63\x33\xf5\x30\x9f\x79\x9a\xfa\xf6\xa9\x49\x5d\x48\x9f\x8d\x0d\x3f\x43\xee\x28\x7d\xba\xde\x52\xba\xf9\xce\xa5\x9b\x29\x3c\x4f\xe7\xc2\x39\x08\x0a\xcb\x13\xb3\x4d\xdc\xcf\x7a\xc0\xac\x01\x92\xb8\x07\xd0\x06\xea\xca\x43\x76\xc6\x87\xf4\xb3\x52\x28\x06\xed\xac\xb5\x40\x56\x0e\xe5\x1f\x48\x1f\x9f\x44\x03\x8f\xb1\xd7\x3b\x04\x85\xfa\x47\x60\xdf\x00\x8d\x48\x9e\x98\x37\xe2\xd1\x56\x09\x34\x3f\xe1\x44\xa8\x73\xae\x6b\x4c\x52\x40\x74\x28\x44\xb3\x0c\x6e\x01\x16\x96\x39\x31\xf0\xf3\x4c\xf4\xe2\x42\x29\x76\x1f\x14\x43\xa2\x64\x88\xe9\x89\x24\x1c\x6e\x95\xe1\xc5\xc5\x7b\x35\xd9\x90\x1b\x57\xfc\xb8\xe3\x5d\x7d\xdc\xed\x5e\xe8\xe5\xfa\xb8\xdd\xb8\xb2\x33\x2c\x01\xce\xdf\x64\xed\xa3\x7d\x1a\x83\xae\x8f\x56\x39\x58\x7c\xf4\x5b\xe7\xfa\x18\x52\x3c\xf2\x9c\x8b\x99\x6a\xf0\x58\x5a\xfb\x16\x13\x77\xb2\x8e\x17\xdb\x5b\x87\x52\x44\xca\xc8\xed\xed\xa7\xca\x7d\xb5\x7f\xbc\x1c\x51\x3d\x5a\x96\x52\x92\x12\xf5\x3d\x51\xd3\xfb\xb4\xae\x93\x54\x76\x21\x0f\x5a\x3e\x64\x27\xff\x68\xdd\x8c\xf9\x51\x6f\xd9\x8d\x3f\xaa\x44\x42\x1f\x10\x0b\xbf\xb8\x4a\x2a\x2b\x0f\x18\x8b\xf8\x74\x29\x50\x11\x8f\xfd\x21\x05\x9f\x07\x33\x28\x1f\x21\x1f\xf1\x50\x00\xfa\xb1\x4a\xcd\x0f\x92\x4b\x3c\x0e\x34\x83\xb5\xf5\x8a\xf0\x07\xaf\x6f\x4d\x5b\xec\x99\xc4\x80\x7b\xb8\x3d\x1c\xab\xf3\x07\xd4\x8e\xf9\xde\x3e\xca\x4f\x15\xeb\xe1\x36\xb0\x6f\xa1\xdf\x5a\xf0\x4f\x08\x35\x27\xf7\x08\x62\x07\x3a\xf2\xb2\x36\x7a\xe0\xcd\xf1\x53\xa6\x17\x15\xaa\xec\xab\x19\xd5\xed\x7d\xa0\x63\xbb\x87\xca\x12\x3b\x2e\x8f\x14\x8d\xa7\x21\xee\xd8\x38\xf7\xf3\xf0\x20\x6a\x26\xda\xa3\x87\x6c\x58\xc3\xda\x07\x1a\x70\xaf\xcd\x1f\xf1\xb5\x3e\x52\xf8\x4a\x50\x6a\x89\xca\x4d\xc3\x2d\x99\xa6\x61\xf0\x50\x41\xfe\xac\x65\xf4\x8b\xc4\xb0\xc9\xbb\x3d\xc8\x93\x58\x29\xa5\x84\xe9\xef\xd1\x1e\x1d\x97\xc6\xf4\xc7\xe4\xcc\xfc\x7f\xb7\xff\x9e\xfc\x4f\x94\x35\xef\x55\x19\xf1\x36\xb8\xa6\x3c\xd2\xfe\x8c\xf4\x41\xaf\x84\x24\x9a\xa4\x7c\xce\x10\x74\xce\xc1\xaf\xce\x70\x01\xa0\x38\x13\x2a\xa8\xae\xb5\xc4\xe7\x6c\x20\x04\x66\xf3\xda\xc5\x93\x30\xf6\x1a\x18\x2f\x89\x80\x41\x2e\xf4\x04\xf3\xf8\x3b\x32\xfd\x95\x0e\x65\x83\x6a\x9e\xef\x97\x45\x85\x88\x91\x4b\x82\x02\x0f\xe1\xa8\xab\xf4\xeb\xe9\x5b\x6e\xaa\x29\x2d\xb3\x92\x8c\x27\xbb\xef\xd2\x3b\xf2\x6b\x9a\xee\x83\x92\x46\x37\x1b\x63\xc8\x28\xcd\x12\x3f\xda\x11\x61\xe7\x46\x7b\xd4\x48\x93\x67\xe0\xeb\x23\x9c\xee\xb8\x8c\xe9\xb6\x81\xa5\x04\x99\xe4\x09\x03\x90\x4f\xe9\xab\x4d\x8a\xc5\x4f\xf7\x10\xc2\x4b\xf1\xfb\x20\xaa\x44\x25\x1d\xeb\xe7\x80\xe4\xed\x97\x13\x92\x03\x93\x58\xd3\x53\xcb\xff\xa3\x0e\x4d\xd3\xc7\x02\xd0\xf7\xcc\x92\x5c\x9d\x1a\x4d\x49\x1c\x5b\x0f\xc2\x2f\xa4\x4a\x14\x7d\xaa\xa7\x02\x53\x86\xb4\xa6\x69\x64\x31\xd5\x26\xa5\x26\x1f\x74\x4d\x75\xcb\x21\xcb\xb4\xf6\x92\x35\x8b\xa1\xc1\x5f\x53\x00\xed\x34\x34\x9a\x29\x01\x25\x45\x6f\x27\x04\x92\x7a\xdc\x64\xb2\x1e\x1e\x85\x76\x4a\x3d\x27\xf2\x4a\xd1\x60\xe9\xed\x9a\x22\xc1\xd6\x40\x50\x2c\x09\x5d\x62\xc7\x1d\x7f\xa5\x70\x6a\x0b\x89\x29\xed\x13\xcc\xb6\xb8\x37\x99\x03\x0e\xa1\x55\x10\x48\xff\x4d\x9b\x04\xf7\x11\x3d\x67\x20\x16\x06\xb9\xfa\xe9\xf9\x15\x52\x46\xaf\xd0\x61\x31\x48\xf6\xca\x94\x7f\x89\x34\x93\xcf\xb5\x65\xb6\xef\x93\x93\xed\x36\x21\xed\x2f\x0f\x79\x57\x66\x93\xc9\xe9\x84\xcc\x09\xae\xd4\x68\xda\x00\xa2\xdf\x45\x82\x4d\x11\x92\x31\x74\xb9\x34\x0f\xe5\x9a\xf6\x50\xa9\xca\xdb\x39\x4b\x0b\x49\x12\x00\x10\x79\x0a\x51\x27\xf7\xb0\xd5\x76\x12\xa4\x5a\xbf\x3a\xde\xca\xec\x33\x74\xae\xd9\xda\x06\x75\xdb\xca\xce\x0e\x84\xa6\xd3\x2e\xe5\x2a\x43\x63\xa0\xb9\x7e\x78\x7d\x8b\x0e\x5f\x62\x24\x34\x85\x8a\xd4\x2c\x09\x69\x57\x7b\xd2\xed\x7d\x84\x5e\xfa\x24\x3b\x59\x2b\xfd\xa1\xe5\x31\x02\x0e\x06\x43\xe6\x0a\x84\x07\xfe\x12\x4f\x73\xba\x28\xac\xc4\x0f\x13\x39\x2b\x8c\x1b\x4c\xd1\x27\xa4\x32\x4e\xa4\xac\x4d\x50\xaf\x14\xcf\x74\x42\xcb\xa5\x2f\x9c\xa6\xd1\x56\x78\x57\x93\x5b\xc6\x20\xc9\x5e\x3c\x69\x2b\x1b\x9a\x98\x1f\x04\xc4\x07\x93\x46\xd1\x95\xc7\xcf\xa1\x00\xf2\x78\x87\x81\xed\x78\x37\x8a\x3b\x49\x6c\x6a\x24\x3f\xbc\xcf\x01\xf5\x99\xc6\xf1\x9e\xa2\x72\x1f\x25\x98\x36\xb6\x8f\x3e\x4a\x43\x3d\x4d\x16\x92\x4d\x9b\x34\x99\xec\x28\xab\xd4\x9b\xac\xb1\x9c\x25\x07\x37\xa2\xbb\xe0\xa5\x8d\x1a\x35\x79\x36\x36\x58\x2e\x0b\x8c\x02\x29\x14\x9f\x52\xc8\x1d\xbd\xa5\x93\xf4\x9e\xd8\xfa\x8c\xf5\x8e\x60\xe1\x58\xaf\x7f\x64\x99\x10\x61\xe8\xd8\x87\x37\x0e\xc3\x13\xd2\x90\xac\x9b\x0d\x1d\xb5\x0b\x32\x85\x46\x11\x90\x34\x91\xda\x44\x8d\x9f\xda\x49\x5b\x50\x60\x8a\xff\xee\x98\xde\x21\xca\x21\xb1\xb4\x91\xa9\x30\x86\x66\x31\x45\x97\xfc\x1b\x32\x38\x89\xf9\x69\x10\xf2\x46\xd2\x50\x42\x36\xdd\x75\x57\x09\x50\xad\x49\x09\x86\xc8\xe6\x87\x90\x1a\xd5\xaa\x6a\x30\x9b\x80\xa2\x5f\x39\x56\x37\x35\x10\x62\x61\x6f\x19\xb2\x90\xfd\xba\xad\x97\x40\x75\x23\xc8\x22\x7c\x04\x28\xae\xb5\x6a\x8a\xdf\xd5\x97\x74\x63\xd6\x2c\xf9\x20\xc8\x30\x85\x5b\xc2\x88\x09\x9e\x9d\x20\x3e\x31\xcc\x1f\x3f\x78\xc0\x42\xa7\xc4\xd1\x87\x40\x44\x47\x75\x9f\x0e\xc7\x08\x3c\x53\xa7\x29\xff\x79\x6f\x17\xf2\x08\x4a\xc2\x32\x38\xce\x4a\x58\x06\x4f\xe1\x11\x7b\xcd\x98\xaf\xd0\xd3\x70\x59\x27\x25\xee\xba\x6b\x26\x58\x33\x92\x75\x2a\x21\x71\xe4\xb1\x2a\xff\x35\xd6\xb0\xde\xdc\x34\x53\x4a\xd8\x80\x0d\x88\x1d\xa0\xbf\xa3\xed\xd3\x4b\xf5\xb0\x41\x09\x55\x23\x63\xdf\x09\x7b\x94\x83\x0a\xef\x7e\x97\x70\xaa\x5e\x73\x3a\xd4\x4e\x6b\x79\x68\xa5\x74\x0f\xc6\x6c\xad\x92\xd7\x4e\x23\x3b\xa6\x63\x0c\x8b\x01\xf6\x89\xdf\x2d\xf2\x67\x98\x8d\x3c\xfa\x9c\x37\x32\x98\xb4\x80\xfc\x3a\x36\xa9\x5a\x92\x42\x13\x48\x52\x7c\x3c\x49\xf2\x5c\xce\xa0\x3e\x85\xf4\x80\x5d\xf0\x89\x20\x64\xa2\x10\xe8\x91\x3a\x18\x6a\x9e\x53\x00\xbc\x62\xef\x1b\xf9\xc2\x53\x80\x19\x2b\x30\xbb\x29\x91\xac\xe8\xce\x37\x3a\x7a\x5b\x69\xd8\x6e\x48\x83\x49\x77\x76\x7c\x25\x21\x89\xc0\xf9\x8c\x38\x81\x87\x69\x47\xc8\xd4\x49\x93\x49\x3c\x20\x03\xf1\x7e\xa7\xa8\x9b\x30\x04\x95\xa6\x15\xcc\xbb\x79\xa2\x35\x5c\x67\x47\xb7\x7d\x11\x90\x6a\x96\xcf\xe9\x75\x24\xc3\x7e\x88\x47\xfd\x4f\xa5\xb6\xd7\xb7\x8f\xa9\xca\x16\xd2\x58\x6b\xd8\xb4\x8c\x29\x8c\xe6\x0d\x31\xb3\x15\x80\x5f\x19\x84\x11\x36\xfe\xe8\xc1\xa5\x96\x43\x94\xe5\xbe\x12\xe5\xa1\x40\x58\xcb\xbb\x44\xa0\x4e\xca\x3b\x59\xed\xaf\x36\x01\x12\x51\x92\x91\xb9\xdb\xcc\xde\x1b\x6d\xc5\xa8\x10\xa5\x83\x35\xf2\x0f\xa0\x1d\x25\x1d\xa2\x7b\xd5\x9a\xbc\xee\xd2\x8c\x0a\x19\x92\x3b\x36\xa6\x08\xd0\x8d\x2e\xc2\x2e\x95\xa5\x0f\x89\x7a\xd6\x6d\xb9\x42\x49\xe9\xd6\x8a\x8d\x99\xe2\x03\x23\xfe\x56\xc4\xff\x95\xcd\x35\xb4\xfa\x51\xec\x10\x90\xc5\x1b\x7a\xe2\x94\x67\xaa\x78\xb7\x78\x84\x8a\x3c\xbe\x27\x8f\xab\x88\x94\x74\xa3\xba\x74\x4b\xf3\x4c\x3e\xc2\x22\xdd\xad\x16\x31\xe8\xa1\x95\x3d\xe4\x9f\xf8\x00\x5d\xf0\x29\x54\xa3\xd2\xfe\xe9\x68\xe2\x7f\xd3\x26\x41\x1d\x08\x38\x4b\x7b\xa6\xf5\x29\xdd\xa1\x7e\x43\xec\xe8\xc3\x9e\xb6\x75\xe4\xc0\xf2\xf5\x38\xdb\xe0\xec\x39\x02\xde\xb9\x8a\xc6\x2f\x76\x95\x8b\x22\x80\xa4\x6d\x07\xb4\xcf\x7a\x7e\x08\x90\x40\x4a\x99\x7a\x31\xd5\x6a\x55\xff\xfa\x5c\xc5\x09\xaa\xa3\x3c\x81\xc4\x77\x6a\x5b\xe4\x1f\x63\x10\xce\x56\xfa\x2f\x42\x66\x59\xa7\x6e\x12\x8f\xaa\x57\xe8\x3b\x39\x61\x5f\xff\xdd\xd9\x24\x0d\x35\xb2\x91\x91\xa3\xcf\x63\x97\x6b\xe7\x70\xc3\x20\x65\x71\x60\xad\x71\x05\x08\xcf\x5a\x5b\x90\xd0\x52\xcd\x21\x21\x95\x15\x1c\x87\x43\x05\xb5\x9b\x90\x54\x21\x59\xa9\xa1\x9b\x1e\x0c\x60\x65\xa0\x5c\xef\xdc\x2f\x18\x1e\x3f\xf6\xb4\x26\x4a\xb3\xdb\x22\x5f\x87\xd5\x3a\x9c\xbc\x9a\xd5\x27\x2c\x89\x24\x08\x95\x4e\x69\xf4\xac\xc9\x05\x85\xb3\x52\xf0\x87\x55\x1a\x05\x00\x3f\x8e\xab\x2f\xe9\x25\x5c\xd3\x1a\xb2\x52\xc8\x6a\x02\x73\x90\x0b\xa3\xc6\xe4\x43\x81\x34\x8e\x5f\x43\x69\x6a\xb2\x06\xcc\x94\x86\x80\x59\x7f\xb5\x42\x03\x7a\x70\xc1\x38\x7f\x2b\x40\x1e\xf7\x29\xdb\x41\xaf\x5d\xd8\x2f\x0f\xb3\x12\x15\x5d\x1b\x6a\xd5\x6f\xcf\x57\x96\xb4\x93\x7a\x39\xd6\x23\xfa\x16\x09\x2d\x35\x8c\x44\xb9\x59\xfb\xa9\x3c\x31\xd7\xd8\xad\x05\x29\xe5\x30\x64\x65\xf8\x0c\xd1\x07\x1f\x5c\x36\x43\xc2\x4f\x7f\xa8\x3d\x08\x8b\xdd\x84\x53\xbb\x3c\xab\x52\xab\x87\xb0\x01\x19\x5c\x40\x51\xab\x46\xf1\xc2\x0d\xfa\xd7\xe8\x70\x3d\xd9\xeb\x00\x62\x02\xef\x90\x7b\xca\xc9\x20\xad\x76\x1b\x91\x46\xe5\x83\x6c\xd1\x50\x6a\xcb\xb3\xae\xe2\xc1\xea\x22\x4b\x56\x0a\x7a\xb4\x3c\xbb\xf5\xfb\x2f\x75\x4a\x07\xcf\x30\xd1\x55\xe9\xb9\x59\x3f\x57\x04\x2b\x9f\x15\xe3\x51\x41\x79\xe1\x66\x49\x41\x43\x03\x2a\x43\xd7\x7a\x40\x3b\xc6\xd6\x7f\x40\xab\xe5\xb7\x59\x8a\x55\xa0\x66\x84\x90\x53\x83\x60\x40\x6c\xc7\x29\x27\x2b\xf7\x28\xcb\xa8\x13\xb5\x95\xd1\x6e\x88\x47\xf1\xb7\x18\x98\x6e\x09\x43\xb9\x1d\xa9\x14\xab\xda\x10\x03\x32\x68\x48\x85\xf2\x1f\x20\xbb\x76\xd0\x2e\xd6\xad\xa2\xeb\x28\x37\x85\xf3\x8b\xfe\x1f\x9c\x15\x47\x5d\x71\xea\x9a\x9b\x27\x18\x40\x7c\x2a\x8d\x8c\x4a\x0c\xdd\xcd\x7e\x40\xc4\xfc\x5f\x05\xaa\xae\x9b\x05\xfd\x18\xad\x1b\x58\x3e\x43\x78\xea\xc9\xd1\xc4\x90\xa6\xc4\xae\xf1\xa0\x09\x5f\x0c\x94\xf9\x6b\x87\x12\xf7\x53\x1f\x31\xe2\xff\xfc\xa1\xd4\x62\x86\x50\x7a\x98\x25\x26\xe5\xaf\xa0\x4f\xe2\x1a\x4a\x2f\x8d\x6e\x6c\xac\xc1\x0e\x0f\x62\xa7\xcd\x4b\x12\x74\xd2\xcd\xcc\x36\x24\x28\x32\x27\xd7\xe7\xe8\x59\xc3\xe0\x61\xea\x56\xa6\xf3\x50\xba\x7a\x3a\xd7\x60\x98\x27\x98\xf1\xbb\x9c\xd4\x69\x95\x53\xe2\x7f\x3d\x83\xca\x5e\x78\x5f\x8f\x94\x3d\x91\x20\x61\x54\xb7\xb2\x23\x92\xda\xff\x29\xc0\xe3\x88\x8a\x52\xae\xd1\xfc\xbc\xb9\x9b\x74\x67\x31\xde\xa2\x5e\x55\x53\x96\x3b\x8c\x39\x39\x6b\x25\x9d\x30\x3c\x6e\xe9\x50\x1d\x74\x29\x74\x87\xe6\x3d\xc0\x53\x24\xe0\xa4\xfc\x0d\xa7\x3b\x86\x72\x15\x3c\x6e\xa8\x51\xb5\x5f\xdd\xfa\x05\x99\x7a\xa7\xca\x2a\x71\xad\x67\x57\xa7\x72\x05\xad\x55\x22\x5a\xd3\xcd\xdf\xb3\xc7\xbc\x83\x13\x8d\xf4\xf6\x58\x07\x83\x2c\x49\x1b\xcd\xaf\x8f\x22\xbe\x10\x38\x96\xa7\x80\x77\xd4\xec\xc5\xa2\xe9\x0d\x04\x71\xfc\xbc\xeb\x26\xab\x4d\xfb\x61\x6c\x4d\x60\x17\xd7\xd1\x53\xd3\x95\x2e\xac\x19\x36\xc5\xf6\xf6\x31\xe9\xca\x86\x91\x9c\x0b\x4a\xc1\xf6\x4e\x98\x34\xd0\xf1\xe0\x92\x3a\xe9\x21\x2c\xde\x49\x49\xe5\x62\x34\x15\x2a\x3a\x27\xcb\xd8\xf1\xd9\x6f\x14\xe4\xd4\x98\x6c\xeb\x0a\x5c\xbe\x1d\xda\x6c\xff\x36\xd2\x40\xa6\x9b\xbd\x85\x17\x89\x97\x5f\x1b\xe7\xaa\xf1\xf9\xde\x0f\x8e\x71\xbe\xf7\xe0\x8d\xd5\xb7\x90\xee\xf4\x75\xee\xb8\xff\x92\x6f\x51\x38\xc8\x80\x7f\x56\xdf\xc8\x24\xff\xb2\xe3\xf2\x85\x8d\x12\xbf\xef\x6f\xb6\x4e\xb1\x1f\x38\x4b\x40\xdd\xfe\x6a\xec\x07\x73\x99\x90\xe6\xfa\x55\x91\xdf\x41\x24\xd8\x2f\x98\x22\xfe\x40\xbf\x90\x37\xf5\xe9\x1d\xe4\xb9\xb1\xfa\xfd\x26\xce\xdb\x7f\x45\xf1\x80\xc9\xa9\x44\x0b\xbf\x1e\xb6\xe7\x21\x06\x46\xd0\x01\xf6\xec\x7a\x5c\x36\x94\xf2\x00\x9c\x01\xb6\x08\x9f\xca\x18\xe8\xa7\xf2\xe3\xfb\xf0\x0d\x87\x88\x17\x65\xb9\xd8\x9e\x7e\xec\x83\xe3\x5c\xf1\x27\x51\x13\xf9\x03\x3f\x14\x55\x58\x1f\xab\x7b\xb0\xd7\xdb\x3e\xc0\x9d\xa5\x75\x5e\xd4\x97\x7a\x47\x46\x24\x00\x3f\x6c\x43\xa1\xe4\x54\xeb\x1a\x03\x7a\xe0\x47\xd3\x06\x5d\x0c\xf4\x6d\xcf\xeb\xcf\x7f\x99\xa1\x07\x3e\x28\x1e\x6b\xed\x00\x55\xc0\xc2\xbd\xfe\xa4\xb3\x65\xdd\x0e\x4d\x24\xbc\x5d\xd3\xe5\x24\x0a\x65\xcb\x77\x39\xbb\x34\xd7\xb9\x97\x1c\x2a\x5c\xe7\xbf\x82\x5c\xec\xe0\x18\xfa\xe9\x81\xc1\x9a\x60\xe5\xb1\xc0\xf5\xec\x2a\x5e\x6a\xc1\x80\x20\xf9\x1d\x67\x5e\xbb\x4a\x54\x59\x15\x7b\x7a\x33\x95\xe1\xec\x50\x82\x6f\xb6\x30\x93\xcf\x0d\x94\xd7\x7f\x56\x4b\x21\x0f\xb1\xb5\x2e\x23\x16\x3f\x3e\x9c\xe7\xdf\xe2\x86\x41\xe4\xeb\x1f\x61\x04\x1b\x9e\x46\xb6\xc3\x1b\xe2\x09\x62\xfc\xbe\x4b\xd2\xc5\x48\xe8\x81\x52\x5f\x97\x50\x96\x00\x81\x63\x7f\x49\xde\xb3\x9c\x0c\x21\xce\x65\x0d\x4b\x16\x0c\x92\x19\xb0\x7f\x97\xef\xde\x83\x06\x8a\xd8\x9e\xe1\x08\x74\xbf\xa5\xea\x0a\x6d\xaf\x39\x24\x3f\xde\x9e\xf6\x45\x44\x35\x17\x2b\x9f\x7d\xcd\x24\xb0\x1e\x5e\x94\xc8\x52\x34\x78\x46\xe2\xdd\x9b\xa1\x6e\xd2\xac\x6d\x41\x13\x7d\xef\x90\x6f\x31\x30\xf5\x4b\x98\x1b\xdd\x0e\x80\xfa\xe4\xce\x3b\x67\x86\x21\x00\xb8\x4f\xe4\x94\xbf\x43\x9d\xc7\x80\x0c\x2c\xde\x49\xda\xae\x6f\x69\x79\xbf\x21\x7e\xc8\x9b\x6e\xb5\x7c\x17\x07\x87\x72\x58\x1c\x04\x61\xd6\xe8\xd2\x62\x92\xbc\x4a\x11\xcc\x3e\x6a\xaa\x90\xad\x92\x0d\xe3\x7b\x90\x7e\x3c\x1a\xc3\x12\xad\x7b\x85\x4c\x57\xad\x90\xb3\xf2\x55\x75\x77\xbf\xac\xdd\xb1\xb2\x47\xa0\x02\x86\xec\x9c\x4b\x58\x5a\x5c\x4c\xc5\x85\x2a\xd8\x7f\x2e\xf5\x75\x29\x6c\xfa\x2a\x55\x89\x1c\x2f\x25\x38\x58\x39\x16\xb7\xe3\xa1\x16\xd8\xee\xd7\xe8\xc3\x84\x2b\x05\x6a\x72\x3f\x7a\xe1\x3d\xe4\x10\xfc\x85\xee\x6d\xa2\x7c\x0e\xbc\x90\x37\xaa\x7a\x7d\x76\x96\x88\x17\xac\x12\xfa\xb2\xeb\xcb\x1e\x83\x74\xab\x47\x59\x7c\xbe\xc2\x83\xc5\x23\x15\x6c\x2f\x5e\x39\xcc\x19\x5f\xf9\x2f\x49\xbf\xe8\xd5\x99\x7f\x44\x00\x29\xec\xb4\x0c\xc9\x81\xea\xd5\xe9\x7c\xd6\xbd\xf0\xb6\x1d\x99\xfd\x69\x26\x98\x3f\xa1\x63\x66\x98\x89\x94\x40\xec\xd8\x3a\x52\xbc\x07\x5a\x60\xe5\x45\x37\x1e\xd7\x08\x7b\xc5\x7b\x49\x69\x31\x7a\x4b\x11\x2f\xb5\x96\x58\xc0\x28\x73\xe1\x82\x94\x4d\x5c\x2a\x4c\xc7\x54\x9e\x89\x83\xfd\x67\xb4\x09\x0d\x31\x74\x3a\x28\x8e\x86\x39\xd4\xcc\x63\x90\x7d\xf4\x4a\x4f\x1e\xea\x86\xe8\xe4\x75\x62\x7c\x41\xe1\x30\x07\xfe\xa4\x7b\x56\xc7\x45\x21\xed\xeb\xe3\x0f\xe2\x7a\x7f\x44\xf4\xb9\xde\x75\x0b\x1d\xaf\x77\x9d\x29\xfc\xe4\xd1\x05\x4e\x93\x41\x38\x8c\xa2\x59\x77\xfb\x59\x0f\x5e\xb7\xc2\x2a\xd6\x27\x5e\xf8\x5f\x06\x59\x2f\x9c\xd0\xdb\xf4\xeb\x46\x7a\x28\x95\xc9\xe0\x4c\x1f\x32\x50\xa7\x07\xd8\xfc\x96\x5d\x4e\x28\xa5\xda\xd7\x3d\x48\x3f\xac\xba\xb6\x3d\xcf\xd2\x98\xb0\x74\x79\x37\x8b\xdb\xe9\x1f\x52\x94\xb9\x64\x0a\x6e\x65\x6a\x2e\xde\x0a\x31\x32\x3f\x4f\x93\xc7\xb1\x0b\x89\x9d\x6c\x13\xaf\x5a\x25\x39\x65\x4d\x8c\x54\xba\xea\x2e\xc2\xc2\x55\xb7\xa5\x44\x76\xe5\x05\x29\x2e\xef\xf2\x44\x6f\xeb\xf2\x46\xc9\x4f\x56\x25\xd2\x6b\xc3\x07\x1f\x22\x5c\xa5\x51\x05\x6c\x3f\xa5\x58\x70\xf9\x4c\x8b\x2a\x1a\x88\xa6\xf0\x6b\xbc\x0a\xb4\xb9\xa9\x4b\x36\xdf\x3f\x09\xb0\x56\xf7\x2e\xfb\x85\x34\xd7\xed\x3f\xe9\x7d\x79\x29\x73\xeb\x8b\x46\x5d\xfe\xd0\x30\x3d\xa5\x43\x76\xcf\x8a\x0b\xf3\xba\x04\xf1\x9d\x5f\xdd\x67\xdb\x50\x76\x89\xb7\xac\x34\x81\x2b\xcb\x3f\xc4\x80\x77\x57\xae\xf4\x71\x23\xec\xcb\xfa\xe6\xfe\x44\x9d\x4f\x7f\xc6\x3f\xa1\xf8\x41\xf5\xae\x76\xce\xda\x40\x9c\x5e\x00\xbf\xc0\xd4\x2f\x25\x29\x6e\x6a\x20\xde\xb3\x54\xd4\x4e\x5e\xc9\x53\xf2\xaf\x9e\x7f\x0b\xf9\x24\xa6\x8a\xdb\x98\x80\x9a\x5f\xdf\x4f\x70\x1a\xcf\xef\x3b\xb9\x26\xfb\xf9\xad\x47\x17\x21\xfa\xb8\xcc\x90\x14\xc0\x10\x94\x6e\x8e\x4a\x44\xdf\x0d\x52\xd4\xc5\x5a\xbe\x36\x32\x07\x0b\x98\xca\x2a\x48\x97\x95\x3e\x8d\x35\x7d\x97\xfe\x9b\x69\x89\x6f\x40\x89\xc6\x50\x19\xfb\x72\x55\xa2\xa8\x03\x71\xd8\xaf\x19\x94\xdc\xc5\xf9\x66\x3e\xd1\xf9\xca\x1e\xd0\x3a\x5f\x4a\x2c\xe1\x2e\xb4\xa9\x3f\x6f\x30\x91\x1c\x1c\xd4\x28\xb9\x69\xef\x44\xc1\x31\x85\xbb\xb0\x20\x81\xab\x7b\x2d\x54\xcd\xfa\xf8\xa2\x7d\x95\x91\xa7\x6a\xf8\xe0\x8d\xb2\xef\xcd\xad\x90\xce\x2b\x64\x46\x0c\x65\xeb\x04\xfc\xdd\x99\x3e\xf9\xb6\x1c\xe1\x0a\xc2\x55\x68\x5e\xa5\x8c\x75\xeb\x5e\xd8\x47\x78\x85\xec\x17\x47\x8d\x2e\xae\x19\x22\x5d\xad\x2a\x9b\x9e\x3a\x65\x14\x62\xc3\x67\x46\xf1\xa3\x4b\xf5\xb1\x83\x90\x1f\x93\x84\xcc\x85\x94\x05\x82\x83\x8e\x4c\x86\xea\xc5\x9f\x55\xd9\x01\x85\x58\xd9\xd5\xe7\x1f\x6d\x29\x04\x7b\xae\x9a\x55\x4b\x58\xad\x1e\x3a\x4a\x57\x91\x80\x59\x89\x04\x03\xab\x21\xf8\x1d\x1b\x78\xe7\xae\x37\x16\x37\xc5\x9a\xdb\xa5\x71\xf6\x12\x43\xb6\xae\xd7\x63\x6d\xcf\xa1\x87\x68\xf0\x77\x7a\x90\xee\xf8\x23\x22\x80\x04\xb4\xf0\x57\xd8\x28\x3e\x94\xd2\xd2\xae\xe4\x6b\x57\x0e\xcb\x02\x45\xc2\x44\x57\x30\x44\x20\x20\x86\xe2\xff\xee\xc2\x5e\xa8\xeb\x81\x49\x77\x07\x90\x91\x1c\xc0\x2e\x61\x03\x05\x03\xd5\xa0\xe7\x51\xa8\xe6\x60\x80\x3e\x90\xe7\xa1\xf4\x05\x03\x92\x21\x3b\x72\x88\x8a\x65\xf1\xc7\xdc\xbc\xc9\xef\xc4\x91\xe8\xef\x0a\x10\x67\x4d\xae\x53\x41\xa8\xf7\xea\x48\xa5\xe7\x68\x9f\xf5\xd6\xa5\x7a\x66\x33\x15\xc7\x44\xe8\x3e\xd9\xc1\xb2\xa2\x48\x82\xac\x79\xc7\xff\xac\xff\xe7\x83\x61\xd8\x85\x70\x03\x29\xda\xc1\xad\x43\x69\x7b\x67\x65\x52\xf0\xb9\xdd\xa1\x21\xb5\x9d\xd2\x44\xda\x44\xd3\x76\x10\x3a\x59\xc2\x45\x58\x12\x4d\x9b\xcb\xc6\xfa\xf5\xad\x37\x67\x1a\xc0\xc4\xe7\xee\x6b\xed\x2a\x66\x3d\xc9\xe5\xb4\x1e\x8b\x74\xd2\x0c\x65\xad\xd2\xab\xba\xa6\xa7\x5e\xa3\x35\x51\x61\xfc\x84\xa5\x32\xbf\x10\xd8\x23\x1f\x52\x3c\x2b\x24\x6e\x9c\x7f\x35\x94\x67\x0d\x06\xd1\xfb\x2c\xaf\xc6\x1b\x65\x03\x54\xbf\x1f\x50\x8e\xf7\x32\x0c\x11\x4f\x44\x02\xf9\x63\x5d\xfa\x8c\x80\x97\x6f\x25\x77\xb2\x51\xf6\x49\xcd\x6a\xe8\xfe\xa8\x1b\x6a\x90\x5a\x49\x65\x55\x72\x24\xc5\xd1\x66\x3d\x0b\xeb\x7b\x71\xb2\xc6\x3a\xb5\x2d\xae\x27\xff\xc3\xf4\xe7\x12\x15\xcf\x18\xec\xb7\xd2\x73\x3e\xac\x44\xae\xa7\x23\x19\x4f\x9e\x73\x7d\xe9\x45\xb1\x13\x5c\x0c\x6d\x60\x22\x47\x07\x28\x1f\xcf\xbc\x77\x5d\xb5\x42\x53\x29\xc4\x47\xd5\x7b\x3c\xe7\xcc\xff\xe6\x50\xdb\x9b\xb3\x06\x2d\xc8\xfc\xf0\x2e\x03\x26\x60\xf9\x4e\xce\x69\x29\xdc\xc8\x7a\x36\xce\x96\x82\x36\x5a\xe5\xaf\x86\x0b\xa5\x56\x75\xef\x49\x24\x94\x7a\xc8\xe9\xcc\xb7\xe4\xc9\x6e\xf9\x10\x22\x7e\xf7\x51\xa9\xd7\x23\x37\xb6\x68\x50\x4f\xcb\x1d\xf0\x07\xe5\xc6\xd4\x14\xab\xa9\x4f\x38\x41\x13\x52\xc2\x0d\xc6\x30\xac\xb9\xf3\xa6\x8d\xb6\xc4\x8b\x82\x12\x1c\x3f\x4c\xb4\xb5\xf1\xdd\x61\xd0\xe7\x65\xc8\x8e\x59\x85\xc2\xd7\x2b\x43\x71\x2d\xd6\xc1\x66\x53\x30\xb4\xbe\xb2\xb2\x28\xce\x9c\x94\x2b\x72\x4a\xa9\x02\xe5\xaa\x92\x6a\x93\x98\x03\x96\xe2\xd6\x58\x63\x36\xe0\x74\xb7\x1c\xae\x84\x78\x9a\xd2\x2a\x99\xd5\xde\xa5\xd1\xde\x9a\xb7\x83\x60\xda\x1d\xe0\xf8\x09\x8e\x21\x7d\xb4\x2b\x96\xf1\x2b\xb0\x72\x0a\xed\xb1\x7b\x08\x7d\x31\x68\x23\xc5\x76\x5d\xab\xd2\xe5\xd1\xfa\xcd\x70\x21\xb4\x59\x82\x75\x58\x08\x7f\x56\x5f\xe8\x7b\x43\x43\x41\xa8\x76\x14\x23\x2f\x60\xf2\xc5\x1d\x95\x21\x14\xd7\x36\xd5\xe4\xa1\xa1\x46\x5c\xf6\xd8\x16\x52\x82\xfb\x33\x70\x96\xbe\x5a\x59\xaa\x74\xe7\xec\xf5\x64\xb8\xcd\x8d\xc8\x8f\xbe\x6d\x7f\x5d\x01\x25\xa1\x96\xb7\xae\x1c\x67\x0f\x7b\xa6\x85\x04\x16\xa6\xfe\x7b\xac\x53\xaf\x53\xa4\xc7\xa3\xd3\x1f\xb8\xd8\x1f\x44\xea\xdb\x87\x8a\x3d\xd0\xa3\x06\x78\x10\xec\x91\x01\x7f\x8e\xd4\x93\x3a\xd3\x27\xde\x09\x38\xd5\x7a\xf9\xbf\x2e\xd2\xf6\x92\x7e\xd9\xcb\x67\x65\xfd\x60\x1a\x75\x9d\xb0\x48\xf0\x12\xd3\x7e\x0e\x76\xef\x10\xee\xc4\x53\x64\x1f\x9d\xf0\x2b\x62\x35\x85\xf8\x9e\x3a\x2d\x08\xf1\xf1\x64\x36\xd2\xe4\xbb\xe0\x93\x9d\x94\x44\x5b\x59\xed\xdb\x98\xe4\x0a\xf5\x37\x0d\x08\x01\x36\x5e\x9b\x73\x22\x05\xd7\x38\xd7\x80\xb0\x3b\xeb\x3d\xeb\xed\x5d\x21\xf0\x99\x42\xe8\x0d\x96\x06\x38\x6f\xfb\x4c\xdf\x70\xcb\x6c\x9f\xc1\xab\x1d\x2b\xd5\x2d\xc4\x74\x3c\x6b\xcf\x66\xbd\x29\x4d\x2a\xb7\x5b\x9a\xf4\x90\x5a\x63\x86\x0a\xd4\xd4\xa4\xd6\x05\xc4\xd2\x0f\x75\x0f\x1f\xef\xff\xb6\xfa\xf1\x08\x6c\xab\xff\x8b\x96\xb7\xd5\x9b\xdd\x0c\x03\x9b\x67\xd6\xf9\x9d\xf8\xc7\x28\xb6\x49\xcf\xc1\x45\xcd\x72\x80\x61\x65\xca\x1a\x16\x14\x90\x31\xa8\xc0\x40\x0b\xc7\x21\x00\xaa\x98\x59\x5b\x2f\xd6\x8e\xb5\x24\x85\x87\xd9\xf8\x15\xb7\x6e\xa7\x02\x89\x34\x0a\xcb\x21\x3d\x7c\xe4\xd6\xcf\x99\xca\x44\x86\x18\x2d\xb0\xce\xd9\xc5\x6b\xa4\x9e\x58\xb5\x66\x2f\x36\x96\x51\xb9\x81\x10\x48\xb3\x57\x57\xa5\xf4\xd4\xa6\x3c\xe7\x24\xc9\x2d\xa8\xe1\xef\xbf\x5b\x81\xb9\x16\x5d\x88\x86\xd7\xd0\x42\x68\x13\x0f\xa9\x7a\xcb\x00\x26\x03\x6a\x48\xbd\x0d\x89\xf9\x58\xa0\x06\x54\xa6\x72\x03\x52\x3f\xab\xf7\xd2\x61\xb3\x2b\xc1\xae\x72\x4a\xd0\xb7\x95\x83\x3b\x52\x83\xcd\x06\x0d\x90\xb7\xde\x28\xc4\xb6\xbb\x3c\x3b\x77\xdd\x25\x85\xa6\x57\x12\x56\x67\x67\x90\xc1\x99\xe3\xdb\x65\xd3\xdc\xe1\x0b\xd5\x4a\xa1\x12\x5b\x7f\x38\xee\x79\xdd\x9f\x81\x96\x78\xd7\xe6\x4a\xf1\x37\x77\x64\xf1\xdf\x9a\xef\x2b\x94\xce\xce\x43\x32\xd7\x56\xb0\x8f\xd0\x22\x5d\xb3\xfd\xc3\xcd\x01\xee\xe2\x6b\xb1\x6f\xe9\xb1\x1f\xe0\xaa\x72\xc9\xff\xec\xf4\xd0\xc5\xc6\x72\x57\xb3\x43\x9a\x26\x55\xf9\x90\xa5\x59\xf4\xff\x99\xe7\xcd\x7c\x24\xcc\x62\x6e\x79\x94\x9a\xda\x90\xa9\x80\xda\x94\x40\xe3\xf0\x4d\x01\xb3\xfb\x92\xa2\xd9\x0d\x5e\x0a\x11\x1e\xbe\xe4\xfe\x30\xdf\x22\x3d\x3f\x1b\x0c\x4a\x4f\xed\xb8\xf5\x1a\xa7\xa3\x1e\x0c\x84\xb8\x1b\x2c\x7d\x3b\x30\x77\xd5\xb9\x56\x4d\xcd\x8d\x3d\x3f\x70\x4c\xe9\x0d\x00\x75\xb5\x2a\x36\xb1\x75\x1d\x78\xa8\x12\xba\x23\x2e\xa2\x16\xc9\x67\xe8\x0b\xef\x61\x05\x6a\x7d\x2c\x4d\x14\xa1\xb7\xe5\x03\x4e\x37\x1c\xcc\x5a\xb5\x96\xd0\x4d\xbb\x2e\x51\x16\x80\xf5\x6f\x4d\x29\x19\xa0\x6e\xd2\xdd\xc2\x36\xc6\xe0\xc1\x66\xe1\xb0\x36\x21\x80\xde\xd0\xc3\x13\x1a\x58\xff\xdb\xa0\x4d\xe5\xcd\x01\xb9\x01\x1f\xa0\x1d\x50\x65\xdc\xd9\x95\x3e\x30\x81\xcd\x32\x78\xd2\x87\x8b\x25\x36\xfd\xd7\x53\x85\x03\x26\x8a\xaa\x95\x17\xe3\x38\x07\x3f\x0d\x41\xcf\x7b\x3c\xe4\x42\x79\xa8\xc3\x62\xa5\x2e\x22\xf3\x12\xf2\x44\x71\xa8\xc6\xb9\xbd\x03\x72\x78\x92\x6f\x43\xb0\xd0\x2f\xa0\x5e\xa1\x31\x05\x4a\x33\xf7\x47\x44\xc5\x43\x41\x70\x42\xe3\xb4\x9a\xdd\x06\x6b\xeb\xb8\xce\x1a\x04\x0a\x38\x42\x83\x8d\xbb\x6e\x4d\x41\xd9\x03\x95\x14\x5d\x9f\x3c\xaf\x8a\x9b\xad\x88\x0e\xa9\x2f\x7a\x44\xf6\xf9\xc1\x08\xf2\xb1\x4a\x86\x4f\x4a\x6a\x40\x45\x72\xd5\xd6\xf8\x7e\xf9\xb8\x0e\x17\x57\xe7\x08\xf4\xf8\xd9\x86\x3b\xf4\x07\x6f\x28\x1d\x85\x0a\x69\xcf\x3d\x76\x7a\xee\xbd\x15\x39\xfc\x53\xe1\x06\xf9\x70\x6e\x8c\xfd\x90\x38\x22\x58\xd3\xfc\x5f\xc4\xff\x8f\xa2\x2a\xed\x88\x30\xc0\x21\xc7\xbc\x03\x9d\x53\xa4\xb2\xdc\x2f\xee\xcd\x14\x55\x8e\xd1\xb9\xc0\xc9\x66\xe8\xa7\xd9\xfb\x4a\x45\x39\x1b\x92\x30\x4e\x75\x64\xf9\x44\x50\x9e\x0c\x21\x13\xaf\xa5\x0e\xf4\x3c\xbf\x02\x89\xe5\x93\x89\x49\x98\x57\x92\x5c\x59\x93\x2e\x9d\xf3\x80\x79\xe0\xbd\x2b\xc2\x11\x72\xdf\x9d\xea\x42\x50\x50\x93\x5c\x9a\x18\x50\x47\x46\x47\x82\xff\x63\x3a\xfc\x91\x3e\x67\x95\x78\xd9\xab\xa7\x77\x1a\x66\xaf\xdc\x55\xd0\xbc\x49\x3b\xac\xfb\x72\x15\x29\x8a\x85\xfa\x9a\xdd\x1d\x3e\x0b\x7b\xb7\xe8\x2f\x64\xdd\xe6\xef\x2f\x5c\x76\xb0\x57\x04\x5d\x34\xc9\x9e\x02\x7a\x93\xf0\x9f\xa4\xd1\xf4\x30\xad\x32\xff\xe3\x1d\x4d\x52\xc3\x38\xdc\x2d\x82\xd3\xa0\x47\xd2\x53\x45\x8d\x12\x82\x7f\x29\x4c\x26\xa0\x12\x3e\x8e\x3a\xfd\x9e\xba\x6e\x1d\x8c\x95\x42\xb8\x4d\xfd\x9b\xc3\x1d\x94\xa9\x96\xd6\xab\xca\x23\x4d\x4f\x7e\x5d\x52\xee\xb6\x26\xbf\x2a\xce\x59\x5f\x9a\x3f\xae\xf7\xf5\xd6\xdb\x8a\x60\x33\x33\xd2\xeb\xc5\x6e\xa8\xa7\xd7\x76\x65\xb4\x73\x53\x19\x71\x29\xd8\xec\x8e\x12\x39\xab\x89\x1a\x68\xbb\xb4\x7a\xa0\x86\xe6\x05\xc7\xa0\xe8\xc4\xeb\xd8\x07\xb5\xca\x6d\x94\x2a\xde\x35\x22\x22\xfe\x5a\xd6\x18\x35\xd4\xe5\xf6\xf1\x75\x5d\xaa\xd3\x57\x21\x25\x20\x91\xb2\x70\x19\x36\xc0\xda\x16\x7a\x66\xa1\x0a\x5f\x3d\xf7\xcb\xab\xbc\xaa\x94\xf6\x3a\x21\xa3\xd0\xff\x39\x65\xae\xc8\x48\x5f\xd2\x2f\xe0\xd8\x1c\x19\xd8\x71\xd7\x80\xbb\x62\x3c\xbc\xdd\xcf\x99\x19\x42\xd5\x93\x3f\x88\xf7\xdb\x1f\xef\x6e\x5d\x4f\x57\xf3\xaa\xed\x47\x1a\xdd\xab\x42\x0e\x7b\x95\x5c\x5a\x0b\x0a\x09\xa0\xa2\xec\xdd\xc4\x03\x20\x69\xf3\xe4\x4d\xef\x8e\x5e\x4e\x26\x1a\x2a\xe5\xd0\xc2\x06\x6d\x2f\x3d\xce\x6a\x70\x57\x82\x3a\x94\xd0\x9a\xc7\xd1\xfb\xe2\x10\x3f\xd1\xc5\xd1\xf4\x09\x39\xa6\x97\xe3\xee\xda\x9c\xd1\xef\x41\xce\x61\x77\x12\xd8\xf3\x3b\xd2\x83\xf6\x7c\xa3\x1f\x46\x14\x77\x68\xf7\xb1\x66\x28\xe6\xee\x90\x85\x3a\x25\x72\x46\x59\xa7\x5d\x19\x99\x56\xfa\x18\x0e\xa5\x8c\x18\x76\x89\x14\xee\xff\x88\xa5\xec\x39\x85\x06\x9b\xf7\xbc\xf6\xf4\x82\xa2\xe9\x4a\xc8\x1b\x81\xee\xbe\x7f\x73\x86\xbc\x2b\x08\x77\x10\x8a\xa5\x29\x26\x0e\x10\xd3\x02\x12\x73\x95\xc8\x9a\x64\xd6\x0e\x8f\x1c\xee\x69\x73\xe5\x37\x4f\xf9\xe0\x7e\x8b\x01\xeb\x81\x50\x96\x2d\x12\x5c\xb6\x8f\x94\xac\xac\x8f\x1b\x8a\x69\x10\x5f\x0b\x6d\x25\x1b\xc7\x31\xb1\x69\xbb\xc9\x72\xd8\xee\x5a\xf8\x9f\x7d\xd2\x3f\xf6\xf4\xe0\x9a\xc5\xea\xfe\x3f\xaa\x38\xc1\x26\x7b\x15\xdd\x6f\xbb\xc3\x80\xdc\x63\x43\x94\x58\x53\x53\xbc\xb5\x9b\xea\x63\x1f\xaa\x8f\xa9\xf9\x36\xc0\x7c\xd3\xcd\x3e\xdc\x26\x6d\x35\x55\x5b\x5b\x64\xc3\x53\x49\x4d\xb3\x09\x58\xa0\x14\x19\x24\xe1\x3e\x84\xfb\x25\x37\x68\x57\x48\xfb\xde\x5b\xac\xc6\x3c\xc8\xb7\x2f\x6c\x31\xeb\xe4\x4b\xfa\xe5\x06\x07\x64\xb2\x4b\x37\xed\x4a\x6c\x32\x1c\x92\x34\x83\x40\xcf\x93\x73\x6e\x94\x58\xbb\xb8\x76\x55\x3c\x1f\x11\xae\xd0\x4c\x83\xe0\x10\x01\x1f\xfe\x56\xe3\xdd\xdb\x6a\x26\xc5\x60\x8b\xf1\xf3\x86\xcc\xac\xef\xcd\x3d\xcb\x27\x7d\x53\x87\x67\x40\x66\x00\xc3\x79\x97\xc7\x2d\x2e\x2c\xee\x7b\x63\x72\x5b\x3a\x65\x45\xa9\xd2\x5b\x59\x6d\xf0\xf2\xf2\xac\x9d\xcd\xa7\x97\x95\xed\x8d\x34\x67\x56\x20\x5b\x09\x11\x31\xe4\x4d\xab\xe6\x03\x16\x87\x1d\x30\xec\x63\x48\x9d\xd0\xb0\x80\x0b\x3f\xf4\x08\x53\x30\x2c\xea\xf6\x59\x3d\x4e\xe2\xb3\x55\xad\x0b\xc5\xd1\x9a\xf2\xf4\x88\xfc\x51\x64\x06\x8e\x10\x9f\x09\xd5\xb3\x5d\x19\x10\x5b\xd8\xc6\x6d\x88\xb3\x3c\xf8\x4f\x76\x91\xad\xe4\x45\x65\x1b\x44\xbe\x29\xd8\x36\xbe\x23\x46\xb3\xfd\x84\xa6\x46\xeb\x4c\x89\x06\xb2\x8d\x7b\xd6\xca\x5d\xfa\x63\x5b\x61\x02\xa1\xa1\xae\x75\xb6\xba\x6f\x5e\x0e\xfc\x0c\x25\xb3\x88\x2f\x6e\xe3\x72\xeb\xdf\x8b\xdf\xf5\x93\x38\x3d\xf9\x7e\x8c\x73\xd2\xe4\xc0\x36\x5a\x7f\xe0\xe0\x8b\x3a\x3e\xd7\xfa\xd3\x63\x93\xae\xdb\xf8\x08\x7b\x8f\x6d\x9c\xd2\xae\xa4\x23\x88\xb0\x31\x81\x03\x62\x6b\x94\x95\x1b\xc7\xf5\x13\x5c\x27\x2c\x14\x89\x75\x41\x6b\x6d\xd3\xa6\x3d\x3f\xce\x55\xd7\xae\x0e\xa3\xec\x7a\x71\x10\x09\xd5\x05\x02\x4a\x78\x6f\xf4\x2c\x5f\x47\x98\x4d\x5d\x57\x49\xb4\xad\xf9\xd5\x11\x33\x4f\xa1\xd1\x86\xe4\x53\x2a\x74\x7d\x64\x61\x61\x48\x63\x31\x43\xf2\xb2\xd9\x6c\x58\xc2\x5b\xee\x22\x6d\xef\x10\x64\xbb\xe6\xe2\x7c\x7f\xf8\xaa\xb7\x3d\x00\x8f\x27\x0d\xd6\xcd\x93\xf6\x78\xba\xf8\x00\x20\xd1\x56\x09\x70\x01\xbc\x5b\xc8\xf1\xda\x25\x7a\xd6\x1a\x13\xa2\x28\xc4\xa6\x93\x35\xe6\xec\xc3\x8b\x8c\xad\x34\x10\xf5\x19\x01\xa8\xbe\x67\x68\x0b\x61\xb5\x2e\x31\xd7\x86\xa6\xed\x07\xfd\xef\x08\xcd\xb9\xfd\x26\x8d\x68\x73\xbf\x43\x02\xca\xaa\x71\x57\xf4\x82\x74\x7d\xf6\xca\x7f\x25\xbc\xa6\x2a\x02\xd3\x4e\xdc\x6c\xbd\x06\xfe\x0b\x79\xe6\x8b\x90\xe4\xd5\x64\x3a\x05\xd0\xe3\xbe\x5b\xfa\x93\xd8\x1f\x94\xd8\x74\x6b\x8b\x9e\x0a\xbf\x0c\xb8\xf9\x5d\x02\x99\x67\x99\xee\x2c\xa5\x36\x64\x66\x12\x80\xa1\x10\xba\x6b\xaf\xa4\x33\x85\x53\xb6\x21\x37\x03\xd7\x2f\x50\xcf\x7a\x73\x3b\x3f\xb6\x2c\xeb\xc7\x1b\x7d\x2b\xa2\x03\xbe\xde\xd1\xb7\x33\xc4\x0c\x59\x64\xec\x51\x3d\xcb\xbe\x47\x4a\x7c\xf9\x87\x49\x65\x36\xca\x35\xac\xf7\xcf\xc9\x6f\xb5\x96\x82\x73\x9c\x20\x2b\xcc\x0c\x2d\xad\x48\x24\x67\x8d\xb6\xde\x49\x86\x70\x6b\xfd\xea\x14\x22\xed\x41\xd1\x4d\xf2\x6b\xaf\x48\x76\x59\xeb\xad\x12\x62\x80\xdc\xaa\x49\xf3\xad\x29\xf8\xb0\x56\x4e\xad\x59\xa9\x86\x0b\xda\xfd\xf6\xa1\x36\x0a\xb1\x51\x2f\x13\x06\x04\x71\xbe\xb9\x85\x04\x9c\x04\xc3\x30\x20\xf3\x43\xd8\xbd\xe3\x47\x01\xdd\xda\xfe\x9b\x8a\x34\x97\xad\x3b\xaf\x94\x4c\xb8\xfa\x67\xb6\x74\x6b\xf1\xd0\xfb\xea\x02\x92\x13\xd7\xa0\x89\x75\x79\x26\x97\x71\xf3\x4b\xb2\x8b\x0d\x42\xa6\x61\x26\xa2\xae\xe2\x59\x59\x29\xd5\x5f\xeb\x61\x3e\x39\x3f\xb2\x96\xa9\x6b\xc4\x4d\x12\x50\x95\x30\x32\xfc\x12\x9a\xb4\xd2\xf2\xe7\x64\x4d\xb4\x66\x34\xc1\x7a\x7a\x20\x61\x52\xbc\x8d\xb1\x4f\xb0\x8d\x25\xcb\x06\xc5\x38\x76\xfe\x57\x4a\xa6\x10\x15\x8a\xac\x61\x7a\x46\x65\x1c\x6a\xa3\x7a\xc9\x9a\x9f\xcf\xbe\x23\xb8\xa8\x94\x57\xcb\x6a\x33\xd7\x88\xb2\x1b\xd0\x46\xe4\x22\x40\x37\x6e\xd3\x9a\xb1\x8e\xff\xe7\x40\x79\x72\x06\xfc\x66\x42\x86\xd6\xa5\xf5\xd2\xa7\x4a\xec\xed\xdd\xa4\x95\xf6\xe6\x2c\xba\x55\xe3\x94\xb5\x53\x1e\x81\x8b\x55\xfa\x10\x57\xf7\x34\x5d\xb7\x3f\x14\xe2\x4b\x87\xb9\x68\x26\xe4\xb8\xdf\x82\xf0\x1c\x02\x5d\x89\x9b\x85\xc2\x93\x0d\x68\x5b\xd1\xf1\x38\x9b\x0e\xf3\xbc\x50\x9a\xe3\xce\x98\x47\xe6\x1d\x4f\xbb\x46\xcf\x40\xaa\x38\x5d\xdf\x47\x53\x51\x8e\x15\xf6\x72\xec\x99\xd7\xab\xfb\xad\xf0\xff\x50\x8e\xd6\x89\x37\xf8\x0b\xaf\x94\x8c\xf3\x0f\x8c\xbf\x1a\x04\x0e\xea\xd8\xfd\x85\xc2\xde\x93\x43\xa4\x3e\x76\x47\x3a\xa6\x04\x12\x53\x27\xda\xad\xd1\x56\xac\x69\xe4\x5c\x92\x01\xe8\xdb\x09\xce\x9e\xb2\x4b\x51\x3a\x5c\x00\x7c\x64\x51\xbc\x12\xcd\xd8\x00\xfe\x73\xf1\xb9\x33\xd4\xaf\x6e\x8e\x82\x96\x5b\x29\x87\xd6\x86\xf2\xf5\x59\xdc\x02\xcf\x77\xbf\xd7\x3f\xd7\xee\xba\x17\x06\x62\x97\xdb\xeb\x82\x45\x4e\x40\x56\x52\xb2\x0b\x54\x60\x09\xb2\xb5\xcf\x79\x59\xa7\xca\x6b\xcc\x05\x46\x5d\x89\x80\xd2\x73\x77\xaf\x37\x0d\xe7\xa7\x04\xe9\xec\x1d\xee\xda\x6d\x92\xb7\x5e\x9c\x4b\xe2\x27\xc1\xc8\x5d\xff\xa3\x5d\xe1\xd2\xa4\x8b\x60\xe0\xe1\xb3\x49\x0b\x9c\x00\x56\x89\xd1\x15\x89\xe9\xb9\xce\x36\x7f\x5a\x7d\x25\x7a\x9d\x02\x35\xee\x5b\xaf\xee\x87\x66\x6d\xcf\xc6\x48\x2f\x58\x1d\xd4\x9b\x63\xbf\x7f\xa1\x16\xc0\x52\xdb\xc1\x0d\x5b\x8f\x43\x2f\x15\xb2\x71\x0e\xe0\xee\xdc\xb8\xdd\xbc\xce\x99\x92\x6a\x55\x69\x7b\x8b\x0c\x6c\x08\x43\x32\x6f\xfc\xde\x3c\xfe\xc0\xfe\xfc\x82\x18\xef\x5c\x9e\xcc\x25\xc5\x12\x0a\x0c\xfc\xa4\xcb\xb6\xb3\x59\x5c\x82\x55\x4b\xd9\xba\x46\x64\x23\xa0\x2e\x58\xc7\xee\x0b\x14\xeb\x74\x52\xab\x0e\xbd\xb0\xa1\x90\x34\xe1\x56\xa4\x5b\x0a\x40\x94\x28\xc4\xeb\xec\x29\x48\x4e\x6c\x01\x31\xe2\xc8\x5d\xd5\x4e\x93\x2e\x0b\x3e\xc1\x4c\xc0\xf9\x66\x07\xff\x49\xe6\xce\xaf\xa1\xb4\xa5\x4b\xd8\xd9\x87\x18\xc3\x38\xeb\x99\x7d\x23\xd3\x0e\x1e\xa1\xd2\xb8\x73\xb6\x2e\x4a\x5e\x75\xf8\xd9\x2d\xd4\x39\x88\x14\x4b\x5b\x1c\x6f\x35\x34\x8b\xd7\xae\xdc\x3a\x77\xb9\x44\x1b\x9d\xf2\xb6\x60\x6c\xda\x9f\x6b\x99\x24\x5a\x08\x67\x28\x96\x56\x39\xfb\xbf\x7c\x54\xd9\x45\xf2\x10\x19\xa4\xdc\x1d\xe8\xfd\xac\x97\x17\xe6\xd1\xfd\x47\xc5\x3c\x2f\x76\x69\xe4\x45\xbb\x0a\x94\xa4\x86\xb7\xc3\x40\xdd\x7f\x5d\xde\x0e\x4c\xf5\xf3\x6a\xb3\xb4\xb4\xb8\x50\xc8\x1f\x5a\xe0\x36\xa8\x35\x92\xf4\x5e\x32\x05\x97\x96\x9c\x57\xe9\xec\x25\xf7\x61\x59\x22\x97\xc8\xc0\xca\x53\xe9\x94\xd6\x52\x84\xe0\x5d\x97\x0a\x24\x7d\xf0\x24\x3c\xbd\x3e\x86\x88\x1e\xdf\x31\xab\x72\x6e\x95\x08\xbf\xd9\xd8\x96\x1b\x1e\x2a\xd6\x9b\x9b\x1d\x49\x00\x7e\xd1\x21\x8c\x68\x15\x97\x57\xc7\x4b\x0c\xdc\x1d\xfc\x3f\xaa\xde\x2e\xd9\x59\x9e\x07\x16\xbd\x3f\xc3\x78\xe7\x75\x2e\x0c\x18\x70\x02\x98\x8f\x9f\xe4\x49\xaa\xf6\xdc\xb7\x5a\xdd\x72\xd6\xae\x5a\x55\x6e\x58\x04\x0c\x18\x5b\x96\xa5\xee\x85\x80\x9c\x8e\x9a\xfc\x3e\xd3\x4a\xce\xc1\xe4\x31\x27\xc1\xf4\xb7\x2c\x7f\x98\xe0\x9f\x21\xfc\xfc\x4c\xa3\xbe\x73\x2d\xdc\x3e\x53\x17\xf7\x92\xca\xec\x7d\xdd\xe3\x66\x7b\x79\x54\x17\x26\xe5\xeb\x79\xd4\x07\x3b\x58\xf0\xf1\x69\x15\xe5\x51\x07\xb1\xf6\xa5\x9e\x52\xd3\x8f\xb2\xc5\x02\xd6\x03\xd3\x5f\x52\xf9\x79\x08\x87\xb7\x88\x47\xa6\xa7\xe0\x61\x13\x30\x72\xf0\x65\xbb\x6b\xf9\xd3\x80\xa7\xa0\xc4\x7e\xa4\x77\xd0\x17\x3d\x40\x95\xe1\x4d\xdc\x7a\xf1\xe3\xa3\x52\xc3\xa9\x43\x7f\xce\x60\xf8\xbb\x78\xd4\x56\x79\x8a\x4d\xaa\xed\x86\xa4\x55\xfd\x48\x4b\x65\x4d\xcc\x02\x1d\xf8\xbf\x89\xf7\x95\x7a\x0f\x68\x71\xc8\xc9\x53\x79\xd3\xfd\x0c\x35\x41\xf6\x07\x66\x53\xfb\xc1\x88\x4b\x67\x63\x28\x27\x5d\x2e\x56\x9a\xf5\xe8\x68\x73\xde\x79\x47\x63\x7c\x83\xa5\xa3\xcf\x69\xfe\x88\x4e\xee\x23\x02\xbb\x8f\xc7\xb9\xfa\xae\xb7\x3b\x11\x66\x4f\xbb\x0e\x92\x3a\x6c\x84\x0e\xdb\xac\xd0\x90\xf9\x3e\x16\x28\x0c\x9c\x8e\x23\x4e\x6d\x76\xcb\x32\x7e\xb6\x9c\xfc\xc5\xa2\xed\x39\x38\xcb\x6e\xf9\x7d\x0d\xc0\x04\xe0\x39\xc6\x71\x0d\xb9\x88\x19\xce\x0a\xef\x0e\x67\xeb\xe5\xeb\x8b\xaf\x60\xae\xef\x41\x7c\x80\x8d\x54\xdf\x8c\x07\xf7\x14\xc3\x88\xb0\xcf\x88\xbd\x23\x36\x48\xdd\xa7\xbc\x5f\x9b\xc3\x50\xd8\xc3\x40\x4c\xd6\xe7\xba\x93\xd3\xad\x56\x95\x9b\xa8\xf5\x16\xeb\x10\x0b\xd1\x59\x57\x49\x04\xd9\x46\xe5\xc4\xc5\xd0\x1a\x22\x84\x33\x86\x81\xa2\x9a\xcc\x6d\x21\xca\x70\xb0\x82\xcc\x75\x22\x03\x8d\x01\xa6\xae\x19\x68\x4f\xd6\x5e\xa8\xfd\xe6\x0a\x4e\xc3\x58\xd1\x87\x0e\x9b\x17\x79\x0c\xf6\xb6\xaa\x99\xed\x5c\x38\x30\x92\xe9\x2f\x3c\x19\x10\xe5\x03\xbb\x3f\xa1\xf8\x82\x90\x04\x78\xf1\x37\x5b\x63\x21\x2c\x5b\x5a\x1b\xe3\x1f\x6f\xd8\x5b\xbb\x5c\xeb\x33\x5c\x14\x3f\x1e\x40\xf4\x2c\x24\xc3\x8b\x26\x93\x3d\x76\x0e\xb3\x48\x2f\x8e\x20\x66\x74\xf9\xd3\x60\xc6\x13\x57\x64\x0e\x02\x3d\xe4\x09\xfe\xe4\x06\xc1\x7d\x12\xf4\x78\x7f\xe8\xf4\x22\x0a\x90\xf1\x01\x41\xef\xa7\xaa\xd8\xa7\xaa\x1d\x8b\x96\x61\xe7\xfc\xa3\xc5\x81\xef\x23\x18\xef\xe0\x03\xd9\x42\xc9\x1b\x84\x7d\x75\xd3\x55\xcb\x26\x1a\x3a\x30\xb5\xb0\xfa\x25\x18\x73\xe6\x3c\x91\xff\x79\xce\xe3\x78\xe8\x07\xea\x95\x67\x30\xb7\x74\xe2\xb5\xcb\x9e\xa3\xbf\x09\xc6\xb3\x84\xb4\x3b\x6b\x02\xe6\xbe\xc8\x93\x9d\xd3\x5b\x77\x8c\xa5\xe0\xb8\x12\xd6\x93\x38\x7d\x83\x33\x55\xc2\x51\x73\x0a\x82\xbd\xe3\xfd\xa3\xee\x3b\x79\xd4\x11\x4a\x62\x4e\xdd\xc7\xd2\xd9\xe2\xb4\x6f\x50\xc3\x4c\x4e\x66\xd9\xf6\xe6\x55\xa4\x7c\xa1\x88\x37\xd3\x47\x60\x03\xa3\x08\x05\x23\xc5\x0d\x2e\xa9\x68\xe7\xa9\xc4\x47\x43\x02\x3e\xd5\x74\x26\xc5\xe0\x14\xe4\x7c\x41\x5b\x69\xa6\x75\x63\x4e\x4c\xb9\x71\xf0\x21\xb6\x48\x0e\xc5\x99\x74\xda\x73\xb2\x7e\xcc\x03\x4f\xa7\x17\x16\x6d\xc8\x90\x75\xdb\x15\xd4\xb9\x03\x93\xb8\x7a\xba\x49\x60\x6b\x25\x59\x6d\x9a\xd6\xd0\xd4\x3c\x25\xd3\x2d\x6e\x3e\x0a\x2d\x80\xe1\x2f\x18\x21\xc0\xe7\xe7\x95\x98\xee\x01\xbe\x52\x7e\x6b\x13\xa6\xb8\x24\xcd\xba\x91\x5a\x76\x90\x73\xcf\x3a\x2c\xce\xf0\x0d\x92\x48\x0b\x0b\x8d\x2c\xcf\xd3\x0c\xbe\x51\x58\xe7\x36\xd4\xb1\x83\x06\xf7\xde\x15\x68\x09\x22\xbd\x90\x59\x9c\x8e\xfc\x4e\xe4\xf0\xcb\x8d\x98\xb0\x51\x5e\x4c\x8a\x9c\xb4\xb2\x9d\xf8\xf7\xaf\x6d\x8b\x5d\x1b\x67\x86\x58\xd1\x8a\x7f\x76\x4a\x42\x9e\x2a\x17\x4d\x27\x08\x46\xf9\xff\x6c\x04\x0b\x8a\x40\x0c\x66\x71\x62\x28\xc7\x89\x80\x8f\x87\xf9\xf2\x1a\x09\xef\x18\xbe\x35\x55\x67\xc7\x12\x1c\x74\x7b\xc8\x14\x22\x8b\x5a\xdd\x5c\x83\x42\x30\x31\x05\x6b\xaa\x6d\xdd\x0c\x74\x7d\xab\xf8\xfa\x1a\x5b\x9f\x7d\xc5\x25\xc2\x4d\xa6\xca\x14\x65\x1b\xed\xbe\x64\x37\xab\x64\xb0\x03\xb7\x32\xaf\xb6\x78\x02\x92\x6c\x2c\x6d\xc5\x46\x52\x07\x39\x89\xf4\x8c\x32\x51\x01\xf9\xaf\x42\x2f\xef\xc4\xf9\xbd\x5f\xac\xac\x8b\xf6\x41\x5c\x54\x04\x7d\x25\x7c\x3c\x13\x13\x86\x06\x42\xaf\x7d\x28\xca\x4d\xe1\x90\x05\x9b\x9e\x0f\xeb\x93\xf3\xf3\xfc\x47\x2e\xbd\x5b\xf3\x51\xe7\xd5\xc3\xfc\xf4\xe4\xa6\x73\x54\xea\xa1\xe7\x83\xb3\x08\x00\xbe\x92\x1c\x99\x59\x86\x0a\x49\xf7\x40\x06\x9c\xff\x13\x93\x1e\x69\xe8\x48\x50\x6f\x05\x96\xcf\x48\xb7\xc7\x34\x70\x9b\x37\x1d\x37\x7f\x5e\xae\x89\x8d\x0d\x0a\x74\xc1\xa3\x37\x47\x5e\xb3\x4d\x27\x0e\xef\xa3\xa7\xf4\x6a\x32\x1e\xce\x9a\x17\x6c\x76\xd7\x0c\x05\xfd\xcc\x8d\xb5\x0a\x4c\xa2\x91\xe3\x67\x95\x22\xbc\x12\x7c\x75\x65\x6a\x7c\x75\xcb\x0f\xfc\xf6\x89\xba\x77\x6a\xb1\xe9\x13\xc9\xe0\x26\x65\x41\x4d\x49\xa6\xd3\x18\x64\x9d\x60\xa1\xfb\x90\x83\x8e\x52\xaf\xe3\xfd\x28\x92\x5d\x1f\xef\xf9\x28\x41\x07\x77\x67\xf2\xbe\x49\x60\x7c\x64\x1c\x0e\xd9\xd2\xc0\x38\x40\x27\x09\xb8\xe6\xc8\x31\x6e\x7d\x77\x09\xfe\xb9\x0c\xd6\x1d\xfe\x2a\x77\xb1\x4f\x84\x76\xa0\x36\x0d\x90\x1b\x87\xdf\x18\x91\xd6\x4e\x08\xe4\xef\x66\xac\xff\x7c\x46\xe2\x7b\x2b\x32\x51\x17\x31\xc5\x35\x4e\xb6\x7a\x70\x68\x1e\xf1\x45\x9c\xea\x6b\x47\xa4\xfd\x91\x97\x8e\x94\xc1\x8e\x06\xba\xc7\x46\x86\x84\x8e\xf0\x77\x71\x91\xc9\x06\xfe\xfb\x60\x1e\x30\x88\xe5\xe6\xc6\x28\xb7\x51\x4c\x65\x04\x2b\x10\x98\x80\xb5\xa2\x65\xdb\xea\x68\x0c\x69\x7a\x3f\xd2\xba\xb6\xc2\x87\xf2\xb1\xbc\x72\x5f\x9d\x1f\x78\x2c\x3f\x35\xa8\xb1\xd4\x48\x17\x35\xc3\xf0\xee\x39\x5b\x1a\xa3\x07\x1b\x8b\xf5\xac\xbe\x24\x38\x7a\x96\xd2\x40\x38\x0c\xa2\xca\x1d\x41\x15\x49\xf7\xe0\xc8\xf9\x94\xff\x2a\x22\xca\xc7\xbc\x92\x70\xce\x5e\x83\xdf\x72\x9e\x83\xdc\x2e\x4b\x2c\x6b\x04\x3f\xa1\x97\x8d\xca\x2e\xbd\x5f\x02\x60\xbe\xd7\xbb\xf2\x14\x64\xfe\x02\x4c\x9b\x66\xdd\x0b\x33\xfd\xdd\x80\xa8\xeb\x8e\xc3\xec\xc9\x24\x28\x5e\xbb\x43\x54\x00\x63\x23\x33\x19\x29\x7d\x34\xa6\x45\xe3\x92\x21\x27\x66\xb1\xf2\xf9\xcb\x1f\x1b\x45\x2c\x02\x36\xbb\xca\x7a\x14\x11\xf9\x99\x05\xc6\x85\x93\x51\xb6\x18\xe9\xed\x08\x90\x75\x45\x9f\xd1\x98\xf8\xa5\xe5\x8f\xdd\x80\xfc\xdf\xf9\xc3\xfe\x13\x1e\x36\x3a\xa3\xf2\x8b\x49\xc4\x6e\x28\x20\x38\x99\x04\x74\x88\xec\x77\x87\x70\x3e\xf7\xca\xc5\x19\x10\x27\x85\x63\x29\x87\x2b\x27\x6b\xb5\x16\xcd\xe7\x40\x9c\xac\xf0\xc6\xc9\x10\x14\x75\x58\x8a\xde\x3a\xef\xbc\xad\x4c\xb1\x38\x64\x01\x32\xc3\x22\x83\xfd\xc1\x07\xf7\xcc\x00\x30\xc4\x5d\x71\x56\xe3\x76\xd8\x27\x40\xd0\xe2\x5d\x5f\x2d\xfb\x99\x49\x96\x3e\x59\x04\x7a\x5a\x87\xcd\x9e\x8a\xe7\x40\x0e\x69\xac\x4a\x1d\xdc\x63\xa6\x11\x33\xbb\xb3\x66\x2e\x7e\x42\x7c\xae\xf2\x40\x80\x46\xd9\xcb\xf9\x88\x5f\xcf\x47\x8c\x63\x79\xa2\xba\x64\x36\x13\xe4\xed\xa5\x42\xfc\xf3\xc8\x1c\x26\xc4\xc3\xf2\x3b\xcb\xc3\xea\x41\x61\xfe\xa0\x20\x00\x7a\x89\x75\xef\x19\xef\x28\x23\x0a\x89\x57\x68\x31\xd0\x19\xe1\xb6\x7c\x4b\x9a\x27\x65\x17\xec\x59\xc8\xcd\x67\xd3\x6d\x0e\x1c\xc8\x90\x0f\x36\x52\x70\xf4\xdd\x85\x19\xc8\xc3\x47\x9c\x7c\x36\x0c\x47\x27\x64\x1b\x6e\x94\x0f\x58\xed\x97\x0b\x83\x38\x0b\x93\xc4\x71\xc0\x62\xd0\x22\xc0\xe6\x69\xa8\xe8\x5f\xec\x5e\x49\x1b\x56\x1d\x21\x9a\xe9\x14\xc9\xdf\x09\x65\x2a\xc1\x5c\x44\xf1\xb7\x6d\x1e\x61\x6a\x40\xf4\x77\xf7\xa6\x25\xa4\x81\x4b\xb1\xc3\x0d\x55\x16\x02\xa6\x6b\x0f\x77\x51\x73\x1e\x6e\xc6\x5d\x0f\xf7\x98\xc9\x3a\xd8\x57\xde\x9a\x47\x27\x64\xc1\xff\xc8\x0c\xe8\xa4\x7a\x20\x93\xc9\x04\xfe\xdd\x0d\x47\xfd\x99\xe7\xb6\xf1\xe4\x3f\x0b\x12\xae\xf8\xff\xf4\x56\x6c\x93\x41\xf1\x01\xda\x97\xef\x97\x93\x65\x33\xd4\xfb\xc9\x47\x5a\x11\x0b\x40\x20\x0e\xbf\x43\x56\x90\xa1\x8d\xcc\x88\x55\x2e\x61\x46\x63\x88\xc6\xce\xe6\x74\x2f\xd1\xff\xc1\x83\xe1\x0f\x0f\xa2\x18\x5e\x32\xba\x60\xa8\x72\x65\x0f\xc5\x17\x88\x86\xf2\x2a\x36\x18\x92\x4c\x75\x28\x52\x6d\x1c\x8a\x12\xe6\x0c\x60\xd9\x4f\xfb\x3a\x9e\xb2\xec\x12\xda\x1c\x1a\x69\xf9\x50\x96\xfb\xab\x73\x60\xb0\x24\x0f\x63\x21\xd3\x2f\x77\x8f\xb1\x10\x32\xc0\xa9\xa5\x06\xc3\xa8\x4b\x21\x45\x9a\xc0\x22\x63\xe3\x07\x7a\xea\x40\x50\xd3\x25\x2a\xc6\xd8\x46\x3e\xe9\x94\x1b\xf2\xbf\x96\xfe\x3b\x38\x8d\x4b\x16\xe2\x4b\xce\xaf\x7a\x91\x4d\x0f\x89\xa8\xe0\x1f\x3e\xb9\xc5\xe8\xf8\x21\xbb\xbb\x58\x8d\x06\x92\xdc\xf4\x39\x18\x2c\x72\x9b\x0c\x98\x15\x9e\x22\x05\x64\x23\xcc\xc8\x7d\x21\xc9\x1f\xd2\x48\x08\xc2\x6f\x68\xd0\xd9\x57\x79\x1d\x39\xe7\x07\xac\xd2\xb0\xee\x36\x5b\x14\x0d\x64\xde\x29\xb2\x07\x1e\xc1\x8f\x76\x6d\x55\x84\x84\x14\xc6\x1b\x5a\x25\xb6\x70\x16\x03\x3e\xb5\x4f\x17\x59\x83\x06\x70\x0d\xf2\x3f\x03\x3d\x7f\x6f\xf5\x3b\xb8\xde\x33\x60\x72\x19\x74\x82\x2f\xfe\x20\x56\xa2\x86\x90\x17\x1b\xd0\x47\xeb\x66\x97\xb9\x7e\x84\xa6\x43\xda\x7d\xc0\x52\xa6\x37\xd8\x33\x22\x0c\x54\x84\x85\xa0\xfc\xa1\x1b\xbc\x3f\x02\x9c\xe6\x18\xb2\x31\x42\xf7\xa9\x40\x6b\x03\xe3\x2f\x82\x76\xc8\x39\x1e\x4d\x4f\x22\x98\xc1\xe3\xd0\x1c\x74\x95\x27\x71\x82\x1d\x47\xa1\x25\xeb\x14\x85\x24\xf0\x31\x28\x26\x44\x1b\xb3\x0a\xbf\x6a\xc9\x02\x0e\xa2\x9e\x74\xc7\x96\x5e\x38\x03\xa7\x86\x24\x4e\x42\x36\x09\xeb\xea\xc4\x6d\x58\x29\xfe\x33\x90\xef\x63\x48\x4d\xfe\x1a\x4c\x86\xfc\x3c\xa8\x0f\x37\xa4\xc6\xc1\x60\x90\x62\xaa\x03\x88\x27\xbc\x7c\xea\x62\x6a\x0a\x29\x42\x8e\x07\x19\x92\x43\x8c\xab\x06\x26\xd6\x91\xae\x81\xfe\x8b\xb5\x62\x5d\xd3\x36\xf6\xc6\x9f\x0c\x7a\x30\x52\xbc\x99\x79\xc2\x14\xaa\xde\x0d\xaa\xaa\x9d\x9d\x93\x5e\xf5\x9c\x35\x7a\x4c\x5d\xfc\x0b\x29\x3f\xe2\x3d\xf4\xc4\x67\x1d\xd1\x75\x1a\xbb\xfb\x13\x09\x6b\x78\x72\xfd\xf1\xb9\x82\xb2\x30\x42\x62\x7a\x74\x78\xc1\xdb\xef\x1b\x6d\x37\xed\x94\x3e\x74\xc6\xfa\xa3\xb4\xdf\x20\x08\x5a\x64\x83\x98\x1e\x92\x43\x31\xbb\x4f\xd6\x4a\xd1\xa3\x21\x44\xa9\x27\x28\x53\x0d\xc0\xbb\x34\xd4\xb1\x74\x05\xad\x45\xff\xe6\x24\xa5\xaf\x1f\xf8\xa6\x83\x55\xf0\x1d\x64\x86\x11\x50\xd5\x23\x1c\x9f\x24\x6b\xce\xaf\x48\x70\xb6\x5d\x67\x12\x0d\xe3\x9f\x49\xb5\x6d\x78\x12\x04\x69\x18\xc5\x05\xc7\x00\x26\x2b\x98\x5e\x0d\xd6\xc5\x74\x73\x17\x82\x97\x44\x7d\x78\x40\x6c\xf1\x20\x14\xf5\x5e\x3d\x9e\xb7\x7e\x91\xc9\x1d\x67\x63\x30\x17\xe8\x7b\xd0\x7e\xea\xa0\x14\x19\x69\x7d\xdd\xa4\xac\xd9\x23\xb2\x28\x7e\xb3\x9d\x37\xbe\x26\xe1\x4b\xa4\x4c\x86\x2b\x03\x27\x40\xf4\x41\xd2\x39\xd0\x5c\xf1\x37\x2b\x13\xee\xa0\xe7\x79\x16\x01\x56\x65\x41\xd0\x0b\x67\xc4\xa0\x74\x0c\xdd\x05\xa7\x77\x0c\x06\xc9\x85\x61\xf8\x08\x72\x7b\xab\xbc\x8e\x5b\xd4\xdb\x64\x79\x3c\x7e\x50\x92\xfa\x3d\x42\x8f\xbc\x9c\xe2\x26\x86\xe0\xa9\x1c\x86\x20\x9d\xec\x9f\x22\xd4\xab\x58\xe5\x0c\x01\xe5\xbe\xca\x3f\xd1\x2f\x11\xc1\x05\x24\xee\xc7\xaa\x44\x16\x43\x4f\x1e\x04\x6d\x80\x95\x68\xc3\xca\xd3\x42\x3c\x8e\xfc\xb7\xcf\xab\xd9\x84\xac\x79\xc7\x97\x54\x8e\xe0\x03\x2c\x47\x52\x63\x2a\xd6\x6c\xbd\xd2\x90\xd0\xe5\x33\x36\x9b\x83\x2d\xb9\x74\x41\x34\x3a\xdf\xd4\x6c\x82\x7e\x9c\xdf\x86\x73\x39\xea\xd3\x37\x83\x6a\xe5\x3f\x11\x69\xae\xef\x8d\x8c\x8e\x27\xa1\xaa\x32\xbb\x2a\x73\x11\xe4\x4f\xf2\xfd\x3a\x48\xed\x81\x98\xdd\x45\x3b\x4f\x3d\x15\xd0\xaf\x71\xf0\x85\x41\xcc\x93\xd8\x8c\x94\xc4\xf8\x10\x9f\x27\x4d\xb8\x53\x3f\xea\xf9\xcd\x69\x0f\x5a\x4f\x83\xca\x38\x07\xdf\xa3\xb2\xf5\xfb\x7c\xf4\x2c\x17\x2e\xa5\xf7\x79\x09\xf2\x42\x66\xb4\xf4\x08\xee\x0d\xf6\xc3\xeb\x0a\x42\x44\x6b\x77\x49\x74\x90\x57\xb2\x8e\x48\xe4\x82\x8d\xee\xb1\xfe\x36\xb9\x12\x0c\x26\xc7\x70\x9a\xf5\x64\xc8\x4a\x82\x94\x1c\x33\xb4\xeb\xc0\x39\x5a\xa1\xf5\xc9\x72\x3c\x81\x48\x6c\x13\xe3\xe2\xc1\x58\xf8\xd8\x30\x13\x66\x09\xf8\xa2\xeb\xc2\x99\x1f\x15\xba\x01\x9c\x24\x44\x87\x20\x92\xe0\x8c\x70\x4e\xc8\xd8\xbb\x54\x55\xf2\x78\xc2\x29\x4f\x38\xe8\x63\x01\x62\x69\xcf\x61\x08\x1e\xc8\x1f\x8d\xe3\x11\xdf\x60\xf2\xd8\x32\x36\x6d\x7c\x14\xea\x8c\xd2\xb6\x8b\xa5\x92\xf4\xbd\x88\x72\x2a\x90\x9e\xe3\xcf\x25\x7a\x05\xa0\x07\xb1\xa9\x39\x63\xa4\x5e\x4b\x20\x96\xce\x62\xa9\x5e\x26\xc2\x7a\x7a\x9b\xe6\x1d\xfa\x2d\x72\x0a\xf5\x6e\x13\x02\x45\x82\x1c\x12\x5f\x81\x98\x34\xa5\xfa\x06\x9a\x48\xcd\x7a\xfa\x14\x6c\x60\xce\x0f\xf9\xe2\xf8\xd9\x43\xe7\x2d\x08\x36\xbb\x23\x0e\xe8\x24\x30\xd9\xc7\x02\x56\x77\xab\x8f\x77\x42\x49\x96\xc1\x28\x16\x92\x2d\xa0\x95\x2c\xa4\x29\xbb\x35\x5c\x18\x20\xaf\xd7\x7d\xca\x2a\x06\xd2\xbf\x8e\xb6\x2e\x02\xc2\x49\xcd\x71\x3a\x67\x8f\x38\x85\x1a\xe3\xe4\xa4\x20\x23\xc0\x83\xe9\x6e\xe0\xa1\x5c\x58\xaa\x1e\x9c\xab\x75\xf7\xaa\x99\x4f\x77\x2b\x56\xa7\xbb\x1f\x6d\x48\xef\xec\x83\x26\x3b\x5c\x48\x17\x76\x68\xcc\x22\x4a\xbb\xd5\x16\xbb\xe3\x5b\x29\x71\xd0\x1d\x9f\x61\x12\xdd\xe3\xbd\x32\x7c\xcc\x79\x25\xc5\xd3\x47\x8a\x60\xfd\xa8\x3e\x49\x59\xd8\x52\xa8\x9c\xd8\x85\xbc\x7c\xe8\x40\x82\x3e\x92\xdf\x5f\x0b\x06\x75\x7d\xda\xc8\xe5\xe1\xf6\xb9\x89\x4d\x12\xa1\x30\x35\xe0\x46\x06\x97\xae\xf1\x84\xd9\x8b\xfa\xea\xa4\xf6\x95\x15\x51\x4c\x9e\x2d\x40\xc8\x09\xc0\x35\x39\x04\x6e\x3b\xa3\x7a\x56\x01\x91\xc4\x3b\xd6\x3d\x25\x59\x89\x86\x4a\x43\x74\x0e\x74\x07\x27\xd8\x9d\xd8\x30\x6c\xa2\x24\xbe\xcb\xfa\x0e\x0e\xc7\xa0\x02\xe9\x68\x36\x76\x9e\x17\x4e\x0e\x47\x9a\x94\x56\x4a\x4e\xc1\xe3\xea\xc5\x7e\xc9\x4c\x87\xae\x4d\x82\xc0\x37\x39\x1f\x8c\xe3\x35\xcc\x77\x5a\xc9\x85\xd0\x55\xe5\x55\x00\x60\x6e\xc4\x9d\x4b\x7e\x88\x89\xb1\x2e\xa2\x43\xed\xaa\x88\xae\xba\x3a\x71\x8a\x62\x20\xf5\xba\x02\x02\xac\xf9\x74\x6a\x9e\xb7\xe0\x50\x44\x36\x3a\x6f\x72\x48\xa4\xe8\xec\xc8\x10\x5a\x45\x11\x79\x8b\xe0\xbb\x43\x0b\xe3\x7a\x44\xb7\x14\xda\x1c\x06\xb6\xde\x8d\x58\xfb\x78\xd4\x9c\x96\xf4\x19\xf8\x48\x16\x7b\x3b\xa4\xf8\x2b\x92\x22\xb1\x81\xf0\x94\x5a\x59\x57\xb4\x0e\xdd\x95\x08\x86\x75\x22\x48\x71\x68\xba\x92\xb1\x37\xc0\xe2\x8a\x76\x68\xac\x67\xb0\x49\x5a\xaf\xce\x83\x7e\x41\x03\x5d\x89\x98\x3b\xb0\x2a\xfb\x9e\x99\x0e\x97\x6e\xe6\xc1\x59\x46\x54\x97\x25\x1a\x63\xe0\x0a\x16\xaa\x2e\xa8\x0b\x63\xee\xe6\x94\x92\x24\x66\x3c\x83\x5a\x30\x8b\xfd\x11\x54\x87\x3c\xb8\xb1\xcc\x23\x46\xbe\xe4\x1f\x12\x10\x13\x2a\x12\x07\x04\xf2\x36\x89\xd8\x35\x44\x1b\x91\x3d\x20\xc2\x4c\x1b\xd7\x07\x77\xee\x74\x79\xe5\x15\x97\x22\x1a\xc7\xb2\x34\x26\xc8\x7b\xd3\x25\xe7\xf0\xc0\x74\xf6\x33\x31\x10\x66\xfa\xd8\xe0\xf4\x0e\x02\xc8\x1b\x89\x24\x34\xfd\x3a\x10\x9b\x68\xf7\xa4\xfb\x4b\x9f\x9b\xc4\x88\x1f\x31\x16\x7e\xc4\xc7\x8b\x29\x4c\x50\x46\x82\x1e\xf2\x0a\xfa\xc3\x4b\x14\xb5\xa9\x25\x79\x18\x9c\x52\x50\x38\x5e\x30\xc7\xb2\xc8\x1a\xc5\x17\x00\x14\xff\xf6\x20\x16\x00\x2c\xc8\xae\x29\x98\x24\x45\x9f\x01\x38\x89\x83\x12\x6f\xfc\x0c\x18\xf4\x8f\x91\x03\x05\x76\xc8\x60\xdb\x6e\x24\x4f\x5d\x92\x94\x06\xa8\x24\xdb\x7d\x1c\xdd\xcd\xd2\xe6\x2d\x97\x50\x9f\xb6\xe0\x93\xec\x74\x8e\x4d\xc4\x93\x9b\x6a\x18\x3c\x41\x5d\x62\x42\x17\xca\xe9\xe6\xb1\xb4\x4e\xad\x1c\x4a\x90\x42\x8a\x74\x72\xe9\xf9\x5a\xc1\x4d\xc4\x5f\x17\x3e\x2d\x79\x23\xbb\xc4\x06\x9d\xc8\xae\xdf\xa5\x29\xd8\x40\x13\x48\x5b\x74\xde\x81\x6d\xc8\x8c\x09\x3e\xb6\xfe\x79\xf3\xa3\x4a\x5a\x67\xc0\x57\x26\x1d\x93\xf4\xee\x18\xa8\xe7\xb4\x91\xa2\x27\x7c\x25\x34\x2b\x87\x37\x79\x8e\xd2\xad\xf5\xcb\x74\x3f\xfc\xcb\xc0\xfb\x6d\x5e\x53\x84\xa0\xd0\xd0\x01\x45\xa4\x76\xb1\xdf\x49\x71\xce\xe3\x2e\xdf\xfb\xbf\xa0\x80\x74\x10\x7c\x89\xd6\x0d\x86\x13\x9b\x64\x8f\x1b\x11\xfa\xf7\x2b\x76\x43\x86\xd5\x2b\x7c\x3c\x83\x17\x32\x66\xd2\x66\xb5\x9b\xe5\x43\xe6\x43\xf9\x10\xcc\x0e\xb9\x69\xcb\x03\x25\x8a\x3e\x9b\x21\xf2\x26\x67\xa3\xf3\xe3\x6f\x84\xd2\x1a\xf7\x65\x37\xe7\x4c\x44\x88\xbe\x5f\x69\x9b\x38\xaf\x73\x01\xfd\x55\x84\x8b\xee\x3a\xe5\xd9\x26\x0d\x50\xee\xca\x65\x03\xc7\xd8\xf0\xe1\x7f\x7b\xb6\x74\xc4\x19\xb3\x71\xa4\x75\x2d\x07\x79\x0b\xa1\xe3\xd3\xbb\x10\x30\x28\x47\xab\xba\xba\xd4\xe8\x02\x80\xe6\x40\xc1\xeb\xb9\xbc\x8a\xbb\x4c\xdc\xb0\x61\x86\x9a\x41\x5a\x20\x56\xe3\xda\xc9\x8a\x0f\x4c\xd8\xd2\xe7\x11\x9a\x13\x04\x96\x0b\x58\x89\x68\xfc\xa5\xe5\x74\x8e\x2d\x24\x7d\x7d\x78\x63\x2d\xfd\x2b\x2d\x71\x3c\x72\x15\x98\xee\xeb\x51\x3c\xda\x5b\x44\x1f\x96\x3c\xa0\x55\x1c\x94\x20\xdb\x1a\x08\xbb\xd2\x18\x2b\xbb\xe6\x8b\x46\x26\x59\xe1\x0d\xe9\xf1\x95\x20\x9d\x3c\x79\xde\xd9\x49\x95\x2b\x53\xf8\x93\x02\x36\x92\xdd\xb3\x9f\xb5\x0f\x1b\xc8\x1e\x11\xdf\x5c\x2f\xf6\xd0\x3e\x7b\xa4\x6e\xea\x4e\x05\x65\x9a\x81\x76\x93\x68\xd2\x90\x0f\x5f\xa9\x13\x31\x66\xc7\xa9\x28\x32\x62\xb9\x9d\x7d\x02\x03\x79\x1a\xfb\x09\xa7\xe0\x29\x69\x81\xfc\xfb\x71\x15\x80\xef\x2d\xa3\xff\x7b\xaf\xd1\xf4\xbf\x37\x03\x6e\xbf\xf7\xcc\x15\x92\xef\x6d\x83\xe2\x39\xa8\xf6\x5f\x3a\x4a\xad\x50\x7a\xfd\xb7\x1e\xe2\xb4\xfa\x52\x94\x0c\x21\x72\x22\xa4\x34\x34\xa9\xe4\x27\xf2\xe5\x6c\xe8\x8b\x0e\xda\x7d\xa3\x86\xfc\x88\x7c\x90\xfa\xfe\x1b\x91\x12\x5f\x4a\x23\x7e\xf3\xc2\xa0\xdd\x2f\x38\xfc\x58\x0e\x9c\x1f\x7c\xad\x9f\x0f\x52\xcb\x97\x4d\x40\x9d\x15\xd3\x1a\xcf\xd7\x9b\xe1\x17\xd9\x4b\x41\x8b\xb9\xd3\x50\x05\x41\x26\x05\x9b\xbf\xd6\xdf\x89\xc0\xf6\x0b\xba\x5c\x82\x75\x47\x52\x98\xc3\x60\xea\x4c\x36\x89\x7d\x30\x18\x13\x1f\x24\xf7\xd9\x40\xfb\xd5\xe5\xdc\x0e\x28\x82\xb8\xda\x87\x79\xd2\x9f\xdb\xab\x0f\xf6\xcc\x89\x79\x35\x9f\x08\xde\xf9\x38\xd9\xf8\x49\x34\x7b\x77\xe8\x24\x9a\x2c\x19\xeb\xfd\x51\xf7\xfb\xfe\xfc\xb8\x11\x83\x63\xf0\x88\xf8\xc8\xb7\xd5\xa2\xf0\xea\x58\xd2\xfb\xb2\x54\xbc\xdf\xdb\x6c\x28\xd1\x59\x56\x91\x6a\xd6\xa5\x17\x5f\xa3\x35\x19\xb2\x27\xc2\x7e\x92\xd7\xe3\x5d\x17\x8e\x4e\xf0\xb1\x86\x50\xe1\x9b\x4b\xd3\x44\xee\x3d\xb3\x32\x56\x17\xc1\xa6\x65\x36\x39\x09\x22\x1b\xf3\x01\x20\xa9\xec\x0c\x85\x62\xfa\x1b\xcb\xd8\xb7\xb8\x24\x0f\x45\xb0\xbc\x8b\x07\x23\xbc\x0b\x93\xa8\xde\x3c\xe7\xa6\x65\x35\xe8\x0f\x8a\x84\x32\x94\x08\x88\xfa\x80\x24\x61\x2c\xc8\x76\xd0\xf5\xe0\xa4\x0b\x42\xcb\xa0\xed\xb4\x96\xdf\xa0\x59\x86\xa5\xe1\x9f\xbd\xe2\x5b\xdb\x9c\x75\x2b\x39\x88\x1e\xed\xe1\xbe\xe9\x85\x06\x7c\xca\x9c\x04\xd6\x33\x06\x1b\xa6\xe6\xb3\xc0\xa2\xd6\x04\x1b\xe6\xc4\x85\x4a\xe0\xf6\x8c\x7c\xf1\x4c\x84\x9a\x86\x49\x6f\x39\x9b\x8d\x15\x67\xcb\xba\x61\x2c\x9f\x05\x89\xa6\x56\x3c\xdf\xe8\x2c\x82\x1a\x34\x37\x0a\x10\x90\x5e\xa2\xf3\xd0\xee\x9e\x1c\x9b\xe5\x74\x65\xb7\x83\x1b\xdb\x8f\x5d\x53\x89\x49\x06\x94\x6c\x06\xa4\xdf\xf4\x33\xe8\xf1\x79\xa2\xf9\xf0\x7e\xd7\x00\x0f\xa7\x0a\xd2\x5b\x99\xc2\xef\x14\x1a\xd0\xef\xf4\x7b\xed\xe9\xf8\x66\xba\x53\xdf\x18\x08\xff\x09\xe8\xde\x6c\x48\xa4\x7d\xfe\x4e\x62\xd3\x7a\x83\xc8\x24\x4e\xd3\xe8\x2b\x91\xe1\x52\x88\x78\xdf\x50\xb2\x60\xa5\x13\xe8\x5d\x37\x21\x57\xfb\x61\xff\x6d\x9b\x76\x3b\x07\xaf\xbc\xa8\x4f\x76\xa4\x9f\x89\x11\xe4\x9d\x34\xc1\x44\xb4\x40\x10\x77\x16\x5e\xb6\xd0\xb1\xf5\x4e\xf3\x6e\x13\x38\x2e\x45\xda\xc6\x12\x87\xc9\x3c\x7c\x83\xdb\xfa\xb9\xc9\xac\x04\xf1\x26\x0f\x1c\x86\x7a\xef\xa7\xa0\x78\x22\x25\x8e\xf2\xba\x9d\xa0\xc9\xad\xc9\xd7\x3d\x41\x19\x4c\x04\x9c\x27\xe5\x1b\x5e\x9e\xf0\xfb\x24\x62\xde\xd7\xab\x22\x1e\xdf\x01\x95\x1c\x5f\xb0\x9e\x9c\x2a\xb2\x44\x26\x3b\xf8\xb1\x05\xce\x3e\x35\xf2\xcd\x4c\x3a\x4d\xb5\x14\x77\x90\x4e\xe9\xbf\x60\xe1\x74\x4e\x4f\x64\xc6\x93\xe7\xb2\x64\xa9\xbe\xbf\xca\xa0\x3c\x95\x57\xe9\xdd\x8f\xf1\x6a\x81\xc3\x2f\x38\xc9\x5e\x64\xd7\x94\x24\x07\x46\xeb\xe0\xde\xb4\xf7\x3a\x2d\x62\xd7\x3c\xb8\xb0\xfc\x72\x05\xb7\x2c\xd4\x0e\xc4\xa4\x9f\xe4\x5f\x4e\xce\xf9\x9f\x93\x6f\xd2\x12\x78\x71\x92\xf2\x42\x2b\x19\xf8\x3d\xbd\xe0\xf5\xf0\x51\xf0\x05\x17\xcc\x76\x05\xbb\xe6\x79\xaf\xf9\x68\x1b\xfd\x7c\x7f\xb8\xd6\xfe\x82\xfa\x49\xbe\xdf\xf1\x2f\xb0\x94\x05\xaa\xdf\x38\x24\x07\x75\x27\x94\xa7\x96\x67\xec\xc6\x32\x12\xd2\x8b\xdb\x56\xfe\xfd\xe7\xf8\xbe\x53\x9a\xda\xd6\x5b\x73\x75\x6e\xcd\x9f\xdf\x86\x82\xbb\x0d\x9b\x0d\x44\x9a\x38\x60\x46\x8d\x3a\x9b\x27\xff\xbd\x90\xa3\x13\xb1\x10\x34\x97\x0d\xf7\x4e\xf6\xfe\x9f\x58\x3c\xfb\x2a\x34\x24\x22\xd9\x4e\xf7\x61\x0f\xd0\x73\xd1\x6e\xef\x33\xb3\xb4\x65\xef\x0d\x51\xc1\x01\x55\x32\x50\xda\x13\x21\xfa\x16\x89\x79\x8f\x63\x78\xc0\x6d\x2a\xcb\xd5\x83\x9b\xcc\x10\xd7\xe7\xfc\xba\xad\x6f\x80\xdc\x99\x9f\x55\x94\xa0\xbe\xfe\x70\x7d\x12\x43\x48\xcc\xe6\xdc\xdc\x89\x75\xdd\x0c\x52\xb9\xee\x53\x93\xb6\xeb\xd6\xe4\x0e\x64\xa0\x31\x1b\x05\x8e\x9d\x1e\x02\xe1\x27\xbf\x77\x1a\xcb\xd7\xfd\x14\x7d\xe8\xe8\x33\x99\x8b\xe2\x3c\xd7\x2d\xba\x50\xb6\x42\x27\x26\x94\xb1\x77\x9d\xb7\xfd\xb9\xf3\xcc\x4a\x1b\x42\xdd\x3e\xf5\xfc\xdc\xe0\x32\x3d\xca\x8b\xeb\x45\x86\xf6\xbd\x04\x00\x87\x57\xe6\xc6\xfa\x3b\x94\xbe\x3e\x07\xfe\x8c\x0c\x71\x2c\x07\xe8\x48\x3a\x9a\x5f\x85\xb6\xdc\x15\xca\x6f\xd7\x91\x3c\xc7\x97\xd0\x6d\x23\x2b\x9d\xc9\x12\x6b\x0f\xfe\xb3\x7a\xb1\x09\x5f\x2e\xc0\xa1\x10\x2a\x6c\x68\xe9\xe8\x92\x5f\x1f\xd9\x26\x0b\x4b\x4e\xc4\xcc\xde\x6d\x2a\x7d\x57\xa9\x93\x75\x90\xde\x26\xc4\xa5\x74\x81\x2d\xee\x24\x98\xc2\xcd\x73\x05\x03\xe5\xe5\xe1\x15\x0e\x98\x5a\x82\x92\xf7\x5d\xbc\x7f\xb8\x20\x68\xce\x43\x21\x5b\x14\xcc\xa2\x47\x8d\xd8\x42\xa8\x91\x6c\xa7\xd6\xf7\xaf\xa6\x44\x8e\x04\x4b\xed\x2a\xd1\x8a\x00\xff\x13\x19\xa9\x4e\x49\x82\x19\xb3\xd3\x2f\x95\xc1\x4e\xba\xc4\x6c\xc0\xf0\x20\x92\xd1\x3b\x22\x40\xd0\x9e\x6d\xce\xe5\xe6\x4f\x5b\x91\x41\xfe\x26\x1b\x67\xde\x47\x1d\x86\x5b\x67\x2d\xf3\xaa\x80\x3f\x43\x34\xb2\x2e\x0f\xdd\x28\xfc\x85\x72\xab\xc1\x5f\xaa\x9f\x26\xb9\xa3\x6c\xb2\x90\x9e\x74\x5b\x5c\x50\xb1\x3b\x09\xa2\x26\x98\xd1\x4d\xca\x12\xb6\x8d\x9d\x3f\xd9\x82\x3d\x55\xaf\x88\x3c\xcb\x56\x88\xfe\x35\x31\x4c\xef\x4a\xd2\x75\x82\x77\x34\x14\xef\x2e\x9b\x6b\x7f\x18\xb0\x03\xc8\xcf\x02\x74\x65\x5e\x72\x31\xe3\xb2\x67\x72\x9e\x81\xbc\xe8\x62\x55\x0d\xbc\xa7\xce\xab\xf5\xe5\xba\x88\x95\xce\x3b\xf7\xa5\x61\x6b\xa5\x2f\x0b\x9d\x1f\x91\x40\x7e\x16\x11\xcc\x7d\x44\x29\x66\x96\x5e\xe3\x40\x9d\x64\x61\x9d\xd6\x57\x9e\x8d\xc9\x94\x5c\x5e\x36\xd6\x1f\x01\xfc\xb5\x19\x48\xdb\x27\x40\x0e\xb6\xd3\xeb\x5d\x6f\x9e\x43\x5c\x5e\x6f\x11\x30\x9c\x91\xb3\x73\xde\x47\x80\x6d\xe8\x16\xff\x32\x48\x64\x7a\x92\x01\x91\xa4\xa6\x89\xdc\xa5\xe8\xd4\x93\xf8\x4d\xc9\xe1\x76\x4f\x91\x48\x73\xde\x20\xe5\x0f\x79\x86\xd3\x7b\x11\x82\x9d\x1c\xa4\x66\x6b\x07\x43\xe9\xbd\x6a\x9f\xcd\xce\x9f\xcc\x86\x20\x11\xf9\x29\xa4\x59\xaf\xc3\xc6\xca\x78\x89\x14\x09\x29\x50\xdf\xaf\x7e\x94\x6e\x27\x57\xbb\xa0\xad\xc6\x39\x38\xf0\xa3\xce\x41\x0c\x5a\x19\x93\xe2\xae\x20\x9d\x14\xbc\x08\x1f\xf1\xd6\xf8\x56\x8b\xfc\x06\x7b\x29\x23\xb2\x0d\x3d\xc5\x03\x5a\x9f\xba\xa5\x0a\x37\xdc\x22\xd8\x28\x58\x6b\xd0\x02\x3b\x0c\x33\xce\x37\x82\xbf\xd4\xe0\x20\x5e\xd8\xab\x8a\x7d\xcf\x49\x50\x83\x0e\x35\x2f\x24\x29\x2d\xc7\x24\x6e\x53\x30\x86\x10\x59\x33\x88\xf3\x23\x32\x22\x60\xf0\x7f\x42\xe5\x82\xf1\x9b\xa0\x02\xe2\x63\xc5\x12\xd0\x25\x12\xd5\xb2\x35\x2e\xbf\xeb\x8f\x99\xe9\x1b\x0a\x03\x3f\x9d\x4e\xf8\x78\xd3\xff\x80\x2d\xf1\xfa\xd9\x17\x78\x90\x1f\xd2\xbe\xbb\xf1\x4f\x52\x0d\x62\x31\x4e\x5e\xc0\xac\x7a\x55\xb9\x49\x11\x43\xbf\x6a\x8d\xe3\x9e\x99\x8f\x3f\x89\x60\xee\x32\x83\x6d\x0b\x8a\x0c\xdb\x9a\xb6\xc2\xb1\xc6\x70\x56\x1c\x97\xc1\xe4\x8f\xe6\x10\x8b\x1d\xc8\x97\x48\x80\x2a\xed\x28\x50\x77\x6d\x4f\x31\xa7\x3a\xa3\xd1\x22\xd8\xb8\x65\xf7\x43\x5d\x0b\xd0\x16\xe4\xaa\x62\xd9\xdc\xcb\xab\x11\xb1\x26\x96\x66\x6a\x68\x0f\x83\xee\x01\xa4\x54\xed\x54\xac\x6f\x02\xfe\x2b\xfb\x9b\x26\x14\x95\xe0\xce\xe4\x0c\x04\xa5\xe8\x21\x81\xe8\x26\x10\xdf\xfe\x8f\x93\xf5\x9a\x35\xa3\x3b\xeb\xe9\xbd\xe9\x29\x19\xa7\xb3\x56\x2d\xc9\x9e\x1e\x3d\xe4\xcf\xa3\x32\x31\xfc\x24\x67\x89\x15\x3c\x9b\x07\x92\xfa\x53\xad\x43\xcb\x31\x3b\xab\x1c\x14\x67\xed\xd8\x6d\xac\x4c\x7d\xb5\x32\x24\x07\xce\xb5\x5c\xf3\x83\xc0\x2b\xdd\x42\xa2\xa1\xaa\x27\x90\xb7\x86\x74\x43\x0b\x83\xea\xcf\x67\x15\x8f\xcc\xa9\xf4\x39\xc2\xae\x11\x3b\x3f\x33\x3f\xe5\x27\x47\x15\x2b\xc7\x42\x76\xaa\xb3\x90\x95\xe2\x2c\x7f\xb8\x9f\x83\x12\xec\x2c\xe2\xc9\xa5\xea\xd0\x69\xef\xa6\xa7\x00\x1a\x74\x15\x7b\xd1\x9f\xae\x7b\x1c\xbd\x8a\x66\xd6\xec\x04\x9a\xc8\xa4\x52\xd5\xcc\xf0\x2c\x41\xe0\x59\xf2\xf4\xd6\x94\xf3\x2c\x43\xf0\xb0\x0e\x9c\x18\x9c\xe0\x82\x15\xdd\xe9\xbd\xaa\x8b\x9a\x6f\x2e\x1d\x39\x96\xb6\x26\x16\xc9\x3a\x96\xfb\xe5\xee\xcf\x13\x69\x3b\x1f\x02\xe4\xd4\xe8\x2d\xfb\xc2\x73\xa3\x81\x01\x69\x54\xfb\xea\xb0\xd0\x2c\x4a\x03\xc3\xe3\x21\x6a\xd6\xd2\x58\x35\xe7\x88\x1b\x3c\x83\x74\x73\xce\x22\x79\xcd\x20\x81\x62\xed\xf2\xd2\x7a\xc5\x39\xc7\x2f\xd5\x15\xce\xae\x9d\x2c\xb4\x6b\x42\x87\xf5\x79\xdc\x31\xa7\x35\xe7\x6f\xfd\xd9\x65\x20\xff\x7e\xd9\x3f\x82\xcb\x59\x5e\xe6\x73\x14\xcb\xd5\xc9\x34\x9c\x33\xff\x8b\x8e\xc3\x1e\xf0\x47\x25\x59\x4b\xf3\xa5\x96\x4a\x8d\x9d\x33\x8b\x6b\x39\xc7\x97\x8c\x53\x29\xba\xcd\xf0\x40\x79\x66\xb0\x2f\x42\xfa\xd0\xe1\x32\xd4\x35\xd6\x38\xce\x0c\xb6\x5c\xf5\x25\x39\xe8\x98\xa1\x32\xc5\x5c\x82\xb3\x09\xab\x9e\x66\x1e\xfb\x47\x83\x05\x7a\x5e\x49\x7c\x9f\x99\xf1\x7d\x67\x0e\x85\x22\x27\xb7\x4a\x07\xa9\x3e\x8f\xaa\xfb\x66\xa0\x8a\xa3\x02\xc6\xab\x24\x78\x04\xd7\x68\x1b\x74\x9d\x8c\x35\xb8\x58\xef\xb5\x51\x8d\xde\x12\xd1\x01\xec\x23\xa7\xbf\x6d\x10\x1f\x25\x37\x1a\xd2\x83\xce\x33\x47\x76\x6b\x8b\x7e\x5b\x39\x73\xd6\x86\x9d\xf4\x2b\xf8\xe3\x5d\x1d\xa3\xa6\x22\x5d\x72\x02\x56\x09\xda\xf8\x42\x65\x1b\x27\x20\x6c\x79\x05\x9d\xea\x5a\xe8\x22\x70\xbe\x55\x32\x83\x3b\xd5\x62\x3c\x01\xa8\xf0\x9c\x8d\xb2\x75\x09\xba\x7d\x60\x9b\x52\xff\xf8\x59\x39\x3d\x73\x34\xb6\x9d\xa0\xcb\x21\x85\xad\x35\xd8\x6f\xec\xce\xe2\xba\x04\xb4\xf9\x78\x3b\x79\x70\x90\xe3\xd1\xec\x6d\x6f\x6a\x07\xa7\xc4\xea\xc1\x2b\x78\x15\x11\x22\xf7\xd6\x34\xe9\xf1\x3a\xed\x6a\xa1\x17\x71\x42\x45\x94\xe4\xa3\xf1\x82\xcd\x70\x9c\xb8\x00\x7c\xa6\xfb\xc5\xd0\xe7\x33\x18\xce\xd1\x76\xb5\xa8\x7a\x8a\x9c\x37\x41\xda\x59\x88\x5c\x52\x8e\x37\xe5\x21\x9d\x14\x6b\x12\x4a\xcb\xad\xd6\x81\x8d\xfe\xb8\xc5\xa1\xba\x81\x60\xbb\x08\x8b\x5b\xe4\x54\x26\xc4\xe9\x32\x4d\x02\x31\x43\x3a\x13\x38\xe2\x1b\xdd\x2b\x34\x65\x64\xe7\xc1\x15\xae\x3a\xc0\x80\xe7\x19\xac\xcd\x7e\x09\x44\xe8\xfa\x64\x4c\xad\x81\x46\x0d\x9d\x1e\x03\x3f\x3f\x2e\xd8\x9c\x69\x42\x14\x94\x23\xeb\x6b\x48\x27\xab\x78\x9e\x13\xf2\x5e\x2a\xc9\x04\xf9\x91\x96\x52\x38\xaf\x0f\x1b\xa4\x62\xda\x62\x38\x79\x71\x32\xe3\x05\x20\x89\xc1\x95\x14\xaa\xb7\x98\x89\x0c\x04\x7b\xda\xd1\xa6\x0d\xc7\xbd\x93\xc7\xf2\xfe\x65\xd9\x1d\xb7\x48\xde\x0c\x58\xff\x27\x6e\xcc\x3b\xdc\x39\x07\x0c\x0e\x85\xdf\x1f\xb7\x02\x67\xc0\xfe\x2a\x8f\x07\xa0\x17\x6c\x1c\xc7\x6d\x1d\x1a\x0f\xf1\xd5\x4e\x21\xb2\x8b\xda\xb0\xeb\x1d\x3c\xac\x88\x1f\x9d\xd8\x21\xbd\x44\x04\xb3\x4c\xb9\x0b\xa4\x4c\x00\x86\xb8\x34\xf6\xd5\x93\x9e\x70\x4c\x5f\xd9\xda\xb0\x0c\x1e\xe1\x9a\x58\x4a\x21\x7d\x29\x03\x56\x10\xe3\xa2\x78\x7f\x64\x6e\x89\xc0\x54\xc4\x99\x95\x6b\xa1\xf6\x5c\xf3\x29\x07\x04\xf0\xa2\x3a\xb5\x53\xfe\x54\xe1\x1d\x8f\x0d\xd3\x3f\x76\x54\x19\x4b\x47\x15\xad\xed\xcc\x0c\x1f\x80\x2e\x7c\x11\xb6\xd1\x28\x6c\x67\x71\xbc\x86\x60\xa1\x04\x93\xad\x38\xa2\xf6\x79\x96\x81\x00\x95\x00\xef\x36\x8e\x3a\xd8\xa4\xd8\x93\x25\x0d\xfe\x61\xba\x75\x03\x88\x61\x32\x47\x8d\x67\x5f\x3b\x31\xd1\xa6\xa1\xf7\x2f\xd8\x99\x63\x49\xe1\x6b\x03\x7f\x56\xc3\x2a\x2f\x86\xf0\x02\x1c\x62\x88\x3d\x45\x2f\x1b\x69\x6b\xf4\x04\x48\xc1\xc0\x36\xc8\x1c\x4c\x41\x2a\xd4\x37\x04\x55\x98\x11\x13\xec\xb1\x9b\x7c\x5a\x87\x2f\xd5\xfb\xed\x95\xac\xc4\xf7\xc3\x5d\xcb\x9b\x10\x8f\x52\x2e\x14\xba\x65\xb2\xf3\xba\x55\xd0\x12\x93\x8f\xb9\x7a\xa4\x0e\x78\x64\xfd\x21\xda\x74\x59\x59\xa4\xc7\xcc\x20\x9e\x03\xd6\xb5\x5f\x08\x71\xcf\x22\x1c\x86\x66\x18\x9f\x48\x6e\xbb\x7e\xe2\xe8\xee\x9c\xe0\x75\xf3\xff\x7c\x1d\xe4\x50\x0e\xf1\x91\xeb\xae\x76\x83\xb5\xf6\xf8\x14\x14\x7e\x7c\x84\x45\xa6\x91\xe3\x00\x81\x40\xdb\x33\x88\xab\x37\xcb\xd6\x3d\xf2\x4a\xd5\x3c\x92\xda\xf2\x04\xab\x8d\x6d\x5f\xa2\xf4\x54\x93\x03\xf4\xb2\x5c\x9c\x8b\x91\xfc\x36\x78\x69\xcb\x49\x92\x10\xa0\x59\xbf\x28\xca\x92\x3b\x98\xbf\x6b\xc5\xf0\xbb\x39\xb5\x20\x38\xab\x79\xd1\x89\x5d\xd8\x81\x17\xc1\xef\xd0\xcc\x43\xb2\x36\xe7\x4c\x02\x1d\xeb\xa6\x76\x66\x1d\x82\x0a\xb5\xe8\x28\xfb\xfa\xbf\x65\xd2\xdd\xa8\x2b\x80\x0d\x24\x8e\xda\xcf\x56\x3d\xd9\xfc\x48\xff\x44\xcb\x9b\xae\xd6\x17\x1e\xc9\xe6\xa8\xfc\xef\x09\x27\xee\xa9\x1f\x29\x65\x28\x12\x1f\x0f\x57\xea\x73\x36\xc6\x43\x02\x28\x98\x2d\x58\x87\x25\x14\xbc\xfd\x4d\x2f\xf6\xc0\x0a\x94\x7f\xa2\x69\xf8\x36\xea\x4c\xb3\xc7\x8e\x44\x42\x38\xf0\xd9\x5e\x1f\x51\xd7\x16\xcd\x13\x49\x68\xcb\xae\xc3\xf0\xb3\xd1\xf3\xda\x06\xdd\xab\xfb\x47\xa4\xa3\xb7\x16\xd7\x77\x69\x7d\x81\xe5\x96\x81\x06\xfb\xcd\xe8\xf5\xfd\x16\xa7\xc1\x7e\x4f\x2b\xa9\x59\xf6\x3b\x68\x34\x6d\xf0\x7d\xfa\x47\xb3\x1f\x94\x40\x02\xc1\x6d\x16\x33\x6d\x7d\xf1\x44\xf6\x59\xcd\xe2\x02\xb5\x4f\x19\x8a\x03\x1b\xe9\x6c\x69\x57\xed\x70\xb3\xa5\x20\xbe\xd5\x80\x6f\x93\xab\x4b\xf5\xc0\x0a\x39\x09\x3c\x0f\x08\x89\x0b\xf4\xbe\xc6\x09\x29\x44\x91\xdd\x3a\x59\x18\x11\xec\x4a\xa2\xa0\x94\x65\x80\xd2\x8e\xc8\x5e\x35\x1d\xc3\x8c\x3f\x00\x93\x2f\x79\x70\x2b\xde\x4e\x50\xfc\x56\xdd\x7d\x55\x06\xec\x5e\xd7\x35\x78\x70\x23\xdb\x0d\x9c\xb5\x43\x25\x37\xaf\x8d\xe3\xcc\xe1\xdf\x39\xa2\xef\xb5\x50\x0c\x68\x97\x54\xf6\x1e\xd4\x29\x7b\x1d\xea\x12\xc7\x4a\x73\x7a\x5f\x6e\xcd\x7b\xf6\x85\x5e\x6a\x2b\xf9\x74\xac\xe4\x2a\x9c\x21\xbe\x22\x08\xb6\xfb\x25\x96\x26\x55\xb7\x2f\x8c\x4a\x06\x6b\xee\x37\xf6\x90\x79\x17\x1c\xda\x7c\x0a\xc5\xba\x63\x2f\x9b\x40\x14\x18\x75\xf9\xaf\xca\xef\x67\x47\x28\xec\x49\x20\x8f\xc2\xae\x25\x84\xbd\xc0\x39\xde\xa8\x76\x59\x95\xb2\x90\x14\x89\x94\xba\x22\x67\x75\x7a\x4d\xbb\x00\x37\xfa\x78\xf8\x2d\x02\x09\xa8\x55\x20\xc8\x0a\xf6\xb9\x20\x80\x91\x54\xae\xb3\x08\x68\x67\x86\x57\xee\xa3\x0f\x24\x7e\xd8\xe8\xb4\xd8\xc4\xde\xcd\x05\x8f\x6f\xbe\x0e\x31\x60\x89\x5f\xb7\x17\x54\xe8\x92\xf3\xbe\x34\x5a\xd7\x1c\x79\xdf\xbb\xc7\x5f\xc4\x02\xe6\x9e\x65\x73\xec\xf2\xe9\x62\xfa\xc7\x57\x65\x93\x89\x9b\x9e\x20\x40\xb1\x2f\x23\x98\xa9\xa1\x76\x21\xe4\x7e\xeb\xc0\xaa\xf4\xa8\xa6\xaf\xb2\x6b\x10\x72\xee\x5e\x96\x83\x07\x72\x19\x48\x1c\xb2\x76\xb1\x0b\xec\x0c\x74\xdc\x7d\xc6\xc4\xfc\x4c\xd0\xf6\x4e\x7c\x41\xca\x57\x46\xa9\x9a\x96\x5d\xf7\x26\x6a\x90\xdd\x07\x23\x7d\x58\xe8\xe9\x78\x52\x50\x96\xef\x1a\x5a\x6d\x96\xe7\xff\x4d\x9f\xcc\xf2\xb5\x28\xe2\x0e\x90\x0c\xff\x76\xac\x16\x2a\x76\xbc\xd5\x53\x0c\xbb\xe7\x0f\xe4\x8f\x00\xfc\xb9\x1c\x64\x6c\x0b\x5d\x13\xcf\x74\xb2\x5b\x82\xa0\x8c\x28\x84\xed\xb1\xe9\x09\x72\x41\x30\x09\x92\x93\xd8\x23\x64\x78\xce\x83\xf1\x20\x3b\xc2\x99\x83\x61\x18\xcc\xc1\xc7\xc4\x53\x5b\x73\x7d\x7c\x03\xb1\x0c\x9a\xe2\x4d\xb3\xbc\x3d\x2d\x3e\x95\xdc\x53\x08\xfa\xef\xa9\x65\x55\xec\x49\xd4\xc3\x8b\x18\x2c\xc1\xa4\xb5\xf0\xf0\x1e\x79\xa4\x64\x3a\xee\xf9\x99\xa4\xbe\xd5\xd5\x4d\xcc\xfa\xed\x3c\xde\x09\x6c\x1d\x6f\x0f\x5b\x00\x02\xab\x86\xbf\x89\x8a\x25\x1e\x7f\x4b\x48\x42\xa0\xc7\xcb\x93\x0b\x3a\x4e\x09\x14\x5a\xe2\x94\xa3\x52\xba\xa9\x0a\x20\x35\x4b\x8e\xd4\xc2\xbe\x86\xea\x40\xf1\x79\xc8\x3b\x70\xcf\x7f\x75\x27\xa6\x9f\xe0\xa0\xfc\x60\x3d\x9a\x0e\x5a\x05\x13\xae\x37\x47\x24\x8b\xf9\x9e\x95\xd3\x8e\x1a\x0b\xa2\xf5\x19\x4c\x9c\xb5\x31\x14\x99\x05\xa7\x49\x54\x9d\x13\xc3\xa2\xea\x74\xb3\xb2\x5a\xf1\xac\xe3\x95\x83\x2a\x18\x99\x3c\x5e\x86\xb8\x7d\xcd\xb1\x24\x5b\x39\x45\x66\x4c\xe4\x40\x54\x28\xf8\x46\xa6\x60\x11\x22\x23\x68\xa6\x1e\xce\x76\xdc\x25\xc9\xa3\xd6\x4e\x61\x78\x95\x73\xd9\x9a\x9e\x1c\x86\xb7\x0f\xd3\x67\xb6\x0f\xcf\xbe\xdd\xab\x68\x7f\x79\xb6\xa0\xb8\xb1\x72\x8e\xa5\x7c\x30\x84\x44\x84\xbc\x8d\xe1\x3a\xce\x67\xe4\x1b\x94\x81\x50\x82\x3b\x59\xb9\xd9\xc1\xa3\x6c\x7d\x2e\x52\xfd\x1d\x29\x64\x61\xb3\x91\xed\xde\x83\x11\x58\xeb\x25\x06\x66\x11\x06\x0f\xc1\x7a\x84\x69\x9d\x04\xf0\x39\xc3\xfb\xc4\xb1\xd6\x05\xfa\xa0\xe6\x48\xa5\xbe\xa8\xad\xc9\x52\x01\x55\x1f\x16\xb6\xe9\xe3\xed\x6b\xcb\x6f\x25\x3d\x18\x0a\xea\xe2\xfc\xca\x4e\x7e\xb9\xe5\xfb\x4b\xa2\xdf\x7c\x83\x12\xd3\x4f\x91\x63\x25\x7d\xcb\xb2\x46\xb6\xc6\x80\x12\x5d\xe6\xa6\x84\xe7\x0d\x9c\x77\xcc\xdf\xd8\xc0\x72\xe7\xff\xb3\x49\x2f\x49\x7c\x0d\x49\x56\x17\xc8\xcf\x14\x6c\x05\x1b\xc9\x05\xb7\x14\x9b\xd4\x2e\xde\xd2\xf1\x4a\x85\xbf\x38\x86\xb8\x58\x72\x37\x3c\x28\x5c\xe2\x49\xa7\x42\xa6\xe3\xe4\x5e\xd6\x2d\x4d\x3a\xcb\xc4\x59\xea\x96\xc6\x6f\x89\x7d\xe3\x1f\x3c\x44\xf2\xe3\x8a\x0e\xff\x0f\x97\xb0\x4a\x91\x00\x1f\x4c\x99\x5e\x21\x9f\x47\x40\x72\xca\x5b\x8e\xd2\xf5\x5e\x6c\x20\x24\x57\xa9\xf7\x70\xc5\x5d\x70\x86\x15\x15\x60\xc8\x07\xde\xf5\x9e\xb9\x09\xc7\x19\xd9\x82\x3f\x9b\x12\xb9\x56\x84\xf3\xf0\x74\xf5\xba\xe2\xdf\x24\x5e\x5e\xeb\x29\x7b\x0e\xa8\xe3\xbf\xfe\xcc\xa0\xd7\x26\x92\xb4\xe2\xa3\x3e\x0f\x8f\x14\x06\x16\xb1\xae\x7d\xd7\x22\xa6\x6d\xa2\xb7\x4e\x4b\x7c\x04\x00\xdf\xfc\x12\x9b\xbc\x13\xe7\x2b\xd5\x79\xd2\xef\x42\x62\x7a\x06\xf2\x8f\xdf\x01\x4f\x58\xb9\x0d\x71\xe1\x8f\x90\x38\x52\x81\xb2\x80\xa8\x3b\x81\xa6\x80\x27\x8b\x1a\x8f\x22\x18\x15\x0c\x3d\x83\xf3\xd0\x70\x2f\x2a\xd1\xba\xee\x73\x23\x2f\xfe\x6c\xc1\xdd\x5a\xd5\x47\x3a\xd8\xc4\x47\xb2\xd6\x88\x63\x05\xbd\x31\x8b\x35\x6a\xac\x30\xb4\xf5\x27\xd5\x0b\x9e\x63\x1e\xd5\x73\x85\xc0\x00\xa3\x35\xb1\x16\x27\xbe\xe2\xef\x37\x1e\x4d\x09\x9f\x95\x21\x36\x1b\x04\x6c\xdf\x1a\xd2\x7c\x43\x74\xc5\xf0\x49\x5c\x42\xf1\x93\xe0\x29\x06\x79\x35\x1b\x9f\x19\x4b\x62\x4a\x6e\xde\x57\xa7\x47\x2e\x3a\x72\x8b\xa8\xa7\xb5\xfc\x5c\x32\x8e\x05\xa2\x35\x95\x60\x02\x5d\x23\x7a\x13\x60\x6e\x16\xe7\xea\x4c\x46\xed\x1f\xee\x7e\x6e\xff\xa0\xef\xc9\x10\x24\x44\xae\xc0\xd2\xca\x33\x38\xd7\xb8\xc8\xf0\xc7\x09\xbb\x36\xe6\xb9\x15\x5d\x5e\x7a\x88\xc2\x0b\x9c\xc5\x64\xaf\x2d\xcf\x27\x6d\xda\xf5\x8f\x40\xeb\x5a\xc6\xf3\xe6\x95\x33\x89\x2c\x0d\xc4\xea\x23\x98\x9f\xc3\x87\xbe\x96\x81\x27\x07\xf3\xb3\x40\xa9\xcf\x24\x48\xd7\x99\xd3\x39\x37\xa2\x67\x8e\xc9\xae\xd6\xc6\x52\xca\xe7\x6b\xe3\x99\x59\x65\x9e\xad\x5a\x2b\x41\xa9\xe6\x9b\xff\xdf\xef\x22\x73\x3d\x78\xcd\xdb\x1d\xb4\xcf\x21\x25\xb2\x36\x36\x96\x95\x14\x8c\x56\x4c\x62\x83\x05\xd3\x25\x7b\x8d\x2c\xa7\xeb\x8a\x70\x62\x96\x88\xd3\x20\x11\x9b\x93\x65\x88\x86\xba\x81\xf2\x4f\x8d\xd2\x85\x85\x5b\xae\xf2\x8a\xd8\x0c\x36\xdc\x5c\x62\xb5\x65\x75\x1d\x61\xa1\x85\x3c\xe6\x59\xc9\x76\x6b\x1e\xce\x60\xa8\x56\x6a\xb7\x03\x66\xd6\xac\x39\x75\x22\x67\x7e\x07\x19\xdc\xda\xff\x41\x39\xd8\x9f\x0f\x0e\xfc\x2b\x88\xd7\x0e\xf1\x24\xf7\xd0\xa3\x6f\xbb\xe3\x80\x7d\x16\x5f\xf3\x06\xee\x33\x9e\x7c\xcb\x8d\x3b\x7a\xcb\xfa\x6f\x2a\xfa\x01\x28\x86\xd7\x80\x41\x3b\xfd\x14\x57\x2e\x88\xab\x5f\xba\x9c\x7d\x1f\x8b\xc8\xb0\x9f\x39\x89\xed\xbd\x2f\xc7\x2b\xd0\x76\x1d\x1f\x9e\x1d\xea\x2d\x42\xe0\x18\x26\x9c\x0e\xb3\x05\x44\x56\x3d\x89\x4b\xb9\x9f\x70\x7a\xfe\x3e\x6f\x51\xa5\xbc\xcc\x25\x18\xb4\xed\x15\x1e\x5c\x74\x02\xd9\x35\x1c\xf1\xa7\x70\xd7\xa9\x8f\xef\xed\x91\x35\x92\x6b\x58\x6f\x41\x88\x1d\x1a\x80\xe0\xbe\x66\xf2\x8a\xf3\x5d\xcb\x67\xe5\x38\x07\x23\xf6\x12\x0f\xab\x53\x96\xc3\x0a\x66\xec\x99\xa4\xce\x9f\x2a\xd6\xe3\x0f\xf9\x45\xd7\xf4\x2f\x91\x1c\xf9\x3e\x92\x68\x9e\xcd\x1a\xe7\xbf\x2e\x0a\x8f\x62\xfe\xc4\xff\x5c\xfa\xcd\x35\x8b\x8f\xf9\xa2\x27\x1a\x4c\xd6\x59\xff\x4a\x2f\xee\xe1\x84\x13\x25\x7b\x3c\x00\x4e\xde\x56\x4f\x04\x4a\xa2\x92\x76\x39\x31\x47\xc7\xef\x43\x81\x43\x81\x97\x3c\x74\x9a\xe3\x9c\x1b\x57\x75\x78\x82\x57\x27\x20\xd1\xff\xe9\xc2\x75\x40\xbe\xe9\x48\xd8\x32\x34\xdd\xed\x07\x43\x81\xab\x32\xce\x04\x4f\x7a\x9c\x40\x82\x31\x4e\x91\x2f\xfe\x0f\xc7\x04\xdb\xad\x0a\xa9\x33\x43\xa4\x57\x50\xcd\x62\x43\xe5\xa6\x33\x37\xa1\x0f\xe4\xbc\x07\xa7\xb5\x22\x34\x30\x74\x14\x71\x61\x7b\x52\x9d\xa3\x95\x3b\x96\x70\x3c\x1b\xac\x37\x69\xba\xed\x85\x8e\xc1\x37\x8e\xee\x92\x7c\x30\x6b\x72\x62\x5c\x44\x3a\x2f\xb1\x2d\x12\x6d\xac\x5f\xa9\x22\x4f\x0d\xda\xc9\xa3\x3b\x5f\xac\x48\xd9\xaa\x48\xb9\xdb\x7f\x1b\x4d\x77\x8b\xc4\xb3\x6f\x68\xbd\x05\x06\xde\xde\x6c\xd3\x9d\x4b\x94\xe5\x78\x57\x1f\x21\xf1\xd4\xdb\x6c\xd0\x3e\x6a\xd6\x01\xfc\x6e\x1c\x3c\xcc\x84\xba\x54\x7a\x61\x9f\x11\xc1\x60\x33\xac\x1a\x28\xb8\xba\xe9\x86\x61\x60\xfe\x15\x48\xa1\x09\xc2\x44\xb3\xfe\xdb\x47\x13\xec\xff\x34\xf3\xe4\xc3\xf4\xf2\xa1\x78\xc0\x72\x33\xa1\x1c\x25\x17\xf8\x97\x5b\x33\x83\x25\x2c\x26\x03\xa4\x01\xe6\xda\xc2\x72\xab\xf5\x2c\xf7\x2c\x1b\x77\x91\x9c\xe8\x72\x0f\xaf\x22\x69\x5c\xc3\xf4\x47\x81\xdd\xa3\x1d\xf7\x87\xc5\x3b\xb3\xb9\x2c\x77\xa7\x5b\x5c\x44\xe1\xb1\xdc\x9d\xae\x2b\x5f\xbd\xd9\xe2\x22\x90\xae\x2f\xb1\x7b\xbb\x2b\xce\x0a\x91\x75\x33\xee\x6e\x31\x43\xed\x64\x29\x1b\x04\xb4\x46\xdb\x57\x13\x7e\xb0\x8b\x28\xcf\x10\x50\x94\xc6\x86\xac\x02\xa4\xa5\xae\x4f\xbb\xa3\xab\xbe\xfe\x23\x9d\xb7\x17\x39\x2e\x19\xf4\xd7\x95\x6c\xb1\x56\x36\x71\x22\x1b\xc9\x7d\xa0\x04\x89\x8e\xca\xba\x31\xbb\x7b\x71\xd1\xfa\x80\xc1\x1a\x5d\x0e\xf1\x65\x40\x05\x9c\x9e\xa9\xa5\xec\xb2\x61\x60\x0b\x44\x95\xed\x49\x37\x96\xf0\xad\xa9\x67\x2d\x6e\xbf\x04\xa1\xb8\xbd\x26\xd0\xe8\x12\x6b\xa1\x16\x88\x34\xce\x25\xd2\xef\x91\xe3\xe6\x96\x33\x2c\x2d\x25\x23\x3a\xf4\x32\xe2\x1e\x81\x8e\x60\x08\x8f\x50\x3b\x30\xce\xf3\x97\x36\x6a\xcd\x71\xba\x78\x96\xe0\x0c\xe1\xf3\xc3\x42\xe6\x7f\x41\x32\xce\xf0\xe7\xc5\xdd\x49\x1f\xa2\x39\x48\xc8\xd3\x4e\xb2\xec\xcc\x87\x00\x3d\x16\x2f\x45\x07\x67\x20\x9f\x4c\x5e\x04\xd4\x92\xc7\x92\x5f\xad\xde\xf9\xfe\x65\x93\x98\x05\xda\x05\x73\xa5\x59\x5d\x77\x80\xb5\x49\xb3\xd9\x86\xfb\x29\x16\x79\xa4\x96\xbc\xef\x52\x35\x33\x18\x3e\x86\xa5\x89\x5a\x82\x32\x2e\x1e\x02\xd2\x69\x32\x51\xf0\x11\x2e\xca\x68\x40\xee\x61\x94\x53\x12\xc5\x79\xf9\x99\x6c\xbe\xa1\x27\x98\xcb\x93\x5f\x03\xdc\xe2\x3c\x72\xbe\x6a\xd4\x21\x56\xac\x17\xf1\xf0\x2e\x79\x12\x97\x77\x1e\x21\x6e\xe9\xa8\xe7\x2d\xf4\x8a\x5d\x6c\x6c\x5b\x4b\xee\xc2\x13\xbe\xe4\x44\xe7\xd2\x92\xde\x57\x80\x83\xa1\x40\x4b\x7a\x91\x06\xfc\xe6\xfe\x1b\x82\xfc\xf2\x2f\xf9\xd6\x26\x20\x36\xf8\x14\x09\x04\x86\xb8\x9e\xb7\x84\xbc\xe3\x02\x67\x92\xd8\xbf\x0d\x15\xce\x34\x16\xe4\x8a\x93\xf0\x3e\x9d\x3c\xfe\x24\xb1\x27\x7c\xae\xf5\xad\x77\x9b\x48\xa9\x08\xc9\xfb\xad\x63\x8d\xce\x3e\xd8\xca\x61\x34\x09\xf5\x8c\xa5\x5b\xd2\x2e\x12\xf8\xe4\x7e\x3a\x31\xaa\x6f\xf7\x97\xc7\x6d\xad\x96\x6d\xca\x63\xf0\x1c\x38\x88\x1a\xfc\xd1\xa2\x83\x62\xbd\x88\x58\x7d\x92\x87\xc8\x79\xe9\x74\xa4\x8d\x18\xfe\x3a\xd3\xba\x33\x5c\xda\x99\xd4\x09\xca\xf7\xd6\x0d\x97\x4b\x64\x01\x06\x27\x5d\x64\x26\x05\xbb\x35\x59\x9f\x2b\x19\x0a\x65\x01\x43\x18\x96\x8b\x08\xdf\x27\x4d\xf0\x80\xf4\xa8\x46\x46\x7c\x1b\x90\x96\x41\x1a\x31\xa9\x17\x92\x1c\x41\x1a\xee\x5e\x77\x31\x6c\x71\x3f\xe0\xb8\xe1\x39\xfa\xea\x7e\x5d\xe2\x67\xa3\x8f\xef\xf3\xa1\x47\xd7\x31\x7f\xde\x40\x3d\xc4\x45\x2f\x36\x07\x8c\x88\xb7\x0f\x7e\x4f\xd1\x04\x3d\xef\xe6\xba\x35\x18\x44\xde\x37\xd7\x48\x9e\x77\xf0\xb4\x1f\x8c\x20\x32\x30\x7c\x48\x5f\x2e\x06\x86\xe7\xbd\x89\x43\x08\xac\xee\xf1\x73\x6f\x38\x90\x4a\xf9\xb0\x74\x5a\xf0\x5b\xa2\x66\x00\x13\x7f\xbb\x24\x81\x22\x26\xea\x9b\x16\x95\x95\xd4\xa8\x7b\xde\x6e\xea\x2f\xc2\x4f\x5d\x69\x50\x1d\x40\x0f\x47\xd4\x42\x8f\x00\xfb\x40\x89\xac\xef\xdf\xcf\xf7\x47\x1a\x03\xaa\xf8\x42\x6e\x64\x83\xeb\x5b\x4b\x81\xc0\x24\xb0\x3f\xea\x5e\x86\x1f\x63\xfb\x51\xd9\x1f\x3e\x91\x08\xcd\xf3\x95\x46\x13\x5f\x3a\xfe\x2b\x17\xad\x8c\x3c\x7d\x95\x4c\xff\x4d\x9f\x4d\xec\xf4\xe9\xe4\x71\xa9\x3d\x2d\xe8\x27\xa2\xac\xdf\x8f\x08\xdd\xbf\x51\x6f\xb8\x28\xc5\x1c\x8f\x98\xf3\xb9\xe1\x5e\xfe\xd0\x67\xbd\x6e\x91\xc8\x5f\x47\x27\x20\xdf\xb3\x55\xf6\x0f\xd9\xfc\x33\x07\xd9\x7c\xd1\x71\x67\xdf\x9e\x54\xd5\xdb\x85\x80\xa9\x97\x58\x6b\x11\x60\x25\x01\x96\x60\x94\x07\x76\xb0\xbd\x7f\xa7\x58\x1a\x8d\x7d\x38\xa9\x9f\xbe\x40\x1d\x3b\xff\x1c\xa9\x01\x1e\xf3\x56\x51\xdd\x67\x77\x2b\x3c\x49\x46\xfd\x84\xbf\xeb\x95\x8e\x80\x24\x76\xaf\x7c\xc0\x1b\xf3\xb5\xac\xcc\x51\x0d\x32\x39\x90\x92\x9e\x8c\xea\xfa\x56\x9e\x3e\x3b\x22\xc2\x8a\x1e\x4f\xa4\x80\x21\x10\xd1\x5f\x8d\xec\x1d\xe3\xec\x8f\x77\x7c\x91\x18\xa9\x53\xd2\x23\x3b\x99\xd7\x5c\xb2\xc7\x06\x58\xa9\xb5\xd6\xe7\xc3\x2e\x10\x3f\xc2\x9a\x69\x84\xa4\x3f\x0b\x57\x35\xad\xa4\x86\xd6\x93\x5c\x43\xcf\x42\x59\x75\x30\xd3\x8b\x3b\x7e\x9b\x4e\x0d\xa9\xc0\x22\x99\x5f\x77\x11\x94\x97\x55\x3f\x5f\x96\x55\x7b\x82\x40\xd4\xd0\x14\x9e\x5d\x90\xd5\xab\xad\x95\x99\x3b\x82\x9e\xee\x59\x5a\x6d\x75\x04\x0c\x55\x9e\x35\x7f\x4b\x22\xed\x7c\xb0\x89\x90\x96\xde\xcf\x90\xcf\x19\xde\x56\x11\xc3\x1f\xe7\x15\xfc\xf3\x87\x5a\x0b\xd6\xda\x4f\x82\xb8\x78\xde\xf2\xa8\xda\xe5\x75\x9f\x55\xfa\x4b\xcd\xcb\xd9\x26\xdb\x30\x5a\x37\x95\x7f\x09\xda\xf3\x33\xcd\xf7\xc6\xc3\xcb\xd9\x98\xf2\xcb\x21\x92\x77\xfb\x7d\xe1\x2e\xde\x64\xe6\x67\x9d\x18\x58\x6c\xe5\x38\x73\xb2\x03\x38\x06\x03\x3e\xa6\x6d\x3f\x4a\x79\x7b\xb0\x7e\x81\x74\x7e\x3f\xd1\x24\x0d\xdf\x9d\xf6\x9e\x60\xaa\x24\x4b\x3c\x1b\x27\x66\xb6\x59\xa0\x69\x5d\x24\x24\xa3\xf1\xee\x9d\xea\x5e\xcd\xbc\xad\x8d\x3c\xd3\x21\x9e\x7a\x2c\x2f\xfb\x03\x49\xfb\xa1\xea\xea\x9b\x82\x50\x10\x3b\x14\x1b\x14\x66\xae\xcb\x23\xc9\xb9\xd5\x7b\x15\xf9\xfd\xc2\xbb\x5b\xb8\x4a\xef\xac\xf6\x2f\x3e\x87\x98\xdb\x3e\x31\x01\x6c\xea\xca\x4f\x28\x2d\xce\x34\x3a\x9f\xe8\x37\xda\x3c\xf2\xe9\x31\xee\x02\xd6\x9d\xf0\x68\xc9\x13\xd8\xbc\xe5\x9d\x88\x48\xf0\xfa\x00\x51\x88\xf7\x57\x8f\x3f\xe9\x21\x86\x91\x79\x1d\x6b\x04\x0f\xeb\xb2\x5d\x8e\xf8\x51\x77\xbd\xb1\x07\x98\x23\x7d\x0d\xe2\x51\xb9\xf6\xf1\xa8\xe4\xc4\x62\x34\x76\x38\xbd\x1f\x2d\x76\xfa\x91\x5b\xe5\x01\x49\x98\x7f\x8a\x4d\xf8\x01\x36\x49\xaf\x8f\x7d\xf2\xfa\x25\x9c\x43\x64\xfb\x70\x36\x7d\x52\xde\x43\x7a\x46\x68\x64\x95\xcc\xea\x51\x0e\xd3\xc3\x66\xa0\x6c\xb7\x40\xa4\xe4\x17\x3d\xac\x95\xc9\x6c\x00\x3c\xad\xf2\x4e\xe1\x19\xfb\xb3\xea\x50\xce\x3f\x8d\xb4\x9c\x5d\x18\x98\x05\x7a\x22\x3e\xa7\x2b\xa2\x43\x28\xdb\x87\xae\x44\xb7\xe0\x7d\x04\x2b\xdb\x56\x7b\xb1\x0b\xd9\xe3\xd1\xd0\x57\x7c\x02\x1b\xaa\xaa\x08\x56\x79\xd1\x88\x2a\x10\xe8\x43\x89\x85\x19\xff\xcd\x6a\x1d\x90\x0f\x81\x25\x54\x93\x42\x1e\xa0\x80\x11\xcd\x63\x9e\xc0\x04\xac\xf0\xa7\xf9\xa6\x57\x67\xbe\xaf\xe0\xc4\x57\xf4\xde\x7c\x47\xf2\xdf\x7c\x7b\x7f\x6b\xdf\xda\x75\xfe\xb8\xce\x6f\x7b\x90\xc1\x99\x7d\x5b\x63\x73\x3e\x76\x2a\xe2\x5a\x41\x46\x7b\x33\x1c\x83\x5d\x5f\x74\xfc\x4e\x20\x2f\xd2\xed\x3b\xc7\x95\x06\xb1\x48\xce\xf5\x15\x9c\xf9\x0a\x14\xf6\x18\xda\xab\xd1\xca\x9f\x67\xf0\xdb\x2b\xd6\x01\x0e\x26\x75\x88\xa0\xcd\x27\xe3\xfd\xb1\xb5\x38\xa6\xd9\x35\x50\xec\xa2\x1e\x54\x09\x16\xfd\xd2\xd8\xec\xc9\xfd\x03\xd6\x27\xde\x66\x5d\xbe\xbf\xe9\x80\x6d\x7d\x7a\xd1\x43\x1a\xfe\x43\xe1\xbf\x5c\xdf\x25\xe6\x18\xb6\x25\x26\x13\xf0\xec\x2f\x8c\x9b\x41\xaf\x92\x83\xd2\x95\x1b\x8d\xf9\xbe\x2e\xaa\xff\x63\x13\x59\xc9\x5c\xe7\x65\x8c\xff\xce\x7f\x9a\xce\x8c\xc4\xa0\xb8\xa8\xc1\x5e\x8c\xec\x88\x18\x26\x21\x7c\xcd\x4a\x63\x98\x6b\x3c\xbf\x2c\xc1\xf9\xb9\x0e\x7c\x23\xc1\xbb\x5e\x5d\xdd\xfa\xd6\x13\xa2\x7b\xd9\x7a\xec\x33\x1c\xdb\x33\x7a\x50\x3f\x3f\x32\x78\xfd\xf4\x45\x8f\x8e\xca\x00\x45\x62\x32\x90\xd9\xff\xd1\xf3\x33\x4d\xd7\x40\x3f\x07\x07\x05\xd4\xf7\xf5\x13\x1f\xee\x78\xc9\xb2\x0c\x0a\xb6\x72\xa2\x7e\x56\xaa\xe4\x91\x0e\xfd\x19\xb1\xd4\x7c\x8b\x45\x9a\xe2\x73\xfe\x84\xfc\x29\x20\x0b\x11\xe1\xbf\xdc\x67\x31\x3b\xa9\x89\x48\xf2\x79\xd7\x90\xda\xe5\xbf\x30\x08\x0c\xa4\xea\x57\x7e\xda\x9c\x95\x35\x3b\xbb\x1c\x38\xd3\x4b\x67\x4f\x38\xfa\x10\x2d\x2a\x73\xe3\xfb\xcf\x54\x73\x9b\xa1\x74\xea\xe5\x2a\xf5\x2b\x43\x2b\x8b\x50\x09\x68\x3c\xfb\x8b\xf2\x33\x40\xf9\xef\x04\xeb\xdc\x78\x6c\x3c\x53\xd9\x16\x95\x14\x72\xf8\x13\x02\x35\xdb\xc1\x41\xd1\x8f\xe5\xc7\xc6\xe7\xcf\xf5\xc7\x39\x77\x4c\x21\x06\x9f\x6b\xc8\x04\x24\x89\x17\xa4\x89\xa7\x4f\x83\xc8\xc6\x01\xfd\x0e\xc1\xf1\xdf\x49\x1e\x71\xf6\x48\x00\x32\xe7\xc3\x48\xb7\xb7\x72\x71\x83\xf7\xe9\x59\x69\xa3\x82\x79\x66\x9f\x7d\x05\x19\xbf\x99\xf6\x5a\x79\x75\xae\xff\xd0\x8e\x30\xac\x9f\x1e\x57\x7c\xc5\x18\xee\x82\xcc\x7f\xaf\x55\xfb\x56\xf1\xf4\x1f\x43\xfb\xc6\xb0\xa4\xf8\x09\xe0\xde\x21\xd0\xff\xd3\x15\x09\xd4\xbe\x55\xeb\x59\x43\x35\x80\xb9\x19\x36\x30\xb4\xee\x29\x29\x29\x72\x4e\x5b\xc8\x12\x6c\x67\x6b\x78\x49\x39\x1e\x70\x36\xea\xbd\x62\x28\x8d\x5b\xe1\xa8\x09\xf5\x1d\x36\x7a\xcf\x40\xf9\x09\x09\xb4\x59\xc4\x2c\x9d\xaa\xd9\x83\x6f\xb5\xc2\x8f\x50\xbe\x25\x6e\xf5\x8f\x0e\x44\x6a\xc1\xfa\x36\x94\xee\x87\x9e\xd4\x12\x22\x04\x66\x93\xcd\x01\xa5\xff\x33\xa7\xa2\x05\x49\x68\x14\xb0\x26\x13\xb2\x65\x79\xd8\xf0\xd6\x39\xba\xc2\x49\xbc\xf3\x4e\xaf\x14\xb6\x9c\x44\xd7\x3c\xdd\x21\x36\x64\x28\x63\x5e\x75\xfd\x27\xa1\x82\x10\x15\x60\x78\x69\xfe\x8f\xda\x04\x0c\xf6\x70\x95\x02\xee\xda\x1a\xe5\xc6\x74\xaf\x1c\x61\x26\x2c\xa7\x72\x8c\x30\xe8\x6c\x52\x71\xb2\x82\x19\xa0\x13\xc0\xdf\x30\xee\x2a\x11\x35\xa9\xa7\xbb\x7f\xf3\xe7\x9e\xe9\x7b\xd0\x8e\xb0\x2d\xa4\x13\xeb\xd0\x04\xe6\x4c\x87\xc7\xcd\x55\xc0\xe9\xa8\xd4\x00\x01\xf0\xa5\xa6\x49\x42\x2e\x56\x66\x8c\x37\x13\x31\xa2\xdd\x1d\x49\x81\xcd\x40\x53\x39\x81\xaa\x01\x23\x40\x79\x70\xb6\x16\xcd\xfe\xd5\xb1\x1a\xd2\x74\xd0\x0f\x35\x1d\xc9\x66\xb4\x24\xaf\x3f\x12\x4f\x9a\x34\xba\x4e\xce\xbe\xa5\xa3\xb6\xaf\x2e\x69\x4f\xe0\x91\x9b\xe4\x41\xc7\x93\x81\x01\xc5\x51\xfd\x9a\xf1\x44\xb9\x02\x5a\x3c\x53\x65\xa0\xcc\x54\x4f\x71\xda\xdb\x4b\xe5\xa1\x87\x12\xb9\x21\x5e\x30\x31\xd6\x63\xaa\x9b\xe7\xbb\xba\x5e\x41\xbc\x0b\x70\x00\x1f\x3c\x50\x06\x01\xc0\xd4\xa4\x15\x9c\x13\x8b\x47\x16\x29\x17\x20\x7c\xd4\xc1\xa0\xa5\xd9\x09\xa2\x77\x2c\x15\xc9\x08\xe4\xc9\x8f\x53\x1b\xce\xa6\x82\x44\x16\x6a\x5d\x94\x9f\x3c\x00\x3c\x95\x6e\x5d\x18\x5a\x1b\x3c\x5e\xfa\xef\x71\x49\x7e\xe0\x10\x07\xb8\xdd\x0b\xe8\x7a\x79\x1a\x7d\x7e\x13\xe4\x39\x74\x11\x76\xb8\x53\x69\x37\x08\x5f\x20\x05\x1d\x8a\xab\x82\xf0\x02\xf6\xc1\xb2\x79\x30\x9e\x61\xca\x52\x16\x56\x82\xe3\x14\x52\x60\xd3\x4f\xa0\x23\x1f\x8a\x50\xc1\xba\x21\xaf\x92\xc9\x09\x32\xd1\xf0\xcb\x84\xcc\x13\x02\x38\x59\x0e\xa1\x4c\xc0\xb4\x89\x49\xde\x3b\x28\x15\x48\xd0\x60\xfa\xb3\xfc\x37\x79\xf7\xcc\xfd\x59\x33\x9d\x49\xa9\x09\x53\x7a\xc5\x75\x0c\xe9\x81\xd8\x1c\x41\xf5\x4a\xf7\x50\x18\x7d\x3a\x25\xb9\x44\x01\x78\xd4\x75\x9d\xf2\x9c\x4e\x89\xdd\xca\xe4\xbc\xf2\xdc\x83\x6c\xb2\x42\xf4\xe4\xf1\x67\xa9\x8d\x15\x6c\xc2\x7a\x56\x3c\x11\x60\x66\x04\x4e\x29\xb2\xf9\x1d\x51\x4c\x82\x0f\x39\xf6\x62\x49\x44\x87\xb2\x0d\x43\x7d\x8b\x15\xc3\x4a\xce\xc8\xb3\xfb\x78\x37\xa5\x55\x95\x59\xe3\x6e\x63\xaa\xe9\x2a\x0b\xa2\xcb\x70\x99\x85\x8b\xbf\x2b\x5b\x55\x45\x8a\x4b\x1f\x7b\xf3\x4d\xb3\x8c\x81\x09\xe9\xb1\xbc\x68\xee\x42\xae\x61\xf8\xc6\xf4\xdf\xb0\x6c\x96\x29\x75\xba\x79\x37\x78\xc7\x0f\xf9\xfd\x41\xef\xff\x26\xe0\x4d\x18\xe8\x19\x95\x37\xde\x9b\x0a\x4a\x1c\xdc\xcb\x9b\x64\x3a\x86\x5e\xee\xc4\x32\xb0\xa8\xc6\xe3\xfd\xbc\x15\x37\x35\xde\x0c\x32\xb7\xd2\xaf\x3c\x1e\x1f\xc5\xca\x8e\x07\x43\x6f\x47\x44\xc7\xd3\x6c\x82\xc4\x03\x53\x8e\x0c\x05\xa0\x8e\x9d\xdf\x8c\x0b\xfd\x08\xc1\xd4\x72\xe7\x3a\x64\x1f\x36\x95\x14\x72\x00\x3d\x3c\xd7\xf1\xc6\x43\xfc\xcc\xc8\xd1\xf3\x10\x48\x57\x77\xb0\xd9\xc8\xcd\x9f\xda\xdc\x0d\xdf\xf3\x58\xdf\x4c\x63\x1f\x3d\xdf\xa0\xc5\xd4\x8f\x15\xfe\xd4\x8d\x88\x4b\xc0\x23\x39\x7c\xb8\xef\x60\xa7\x6f\x40\xd9\xa7\xa3\x59\xc8\xea\x92\x47\xc6\x86\x30\x92\x6d\x44\x30\x48\xa6\x0a\x41\x95\xd4\xc8\x58\x23\xee\x73\x5c\x6e\x5f\xc1\xb5\x52\xdc\xfe\x86\xfc\x5e\x16\x2e\x55\xc0\x51\x59\xf9\xfc\x16\x8f\x98\x10\xcc\x7c\x02\x4b\xfa\x27\xd1\x0c\xef\x72\xfc\x2a\xf6\xd8\x18\x3c\xe3\x1b\x36\x65\xeb\x6f\x2a\x43\x6c\xcf\x50\x7b\xd8\x64\x14\x21\x91\x29\x94\x12\x6c\x3e\xea\x1c\x00\x63\x79\x24\x57\x4a\x80\xad\xcd\xef\x60\x2c\x41\x9e\x31\xba\xcd\xa9\x4b\xc2\x34\x68\x50\x4f\xc1\xc6\x6e\x8d\x92\x63\xa4\xa7\x8f\xe8\x32\x0e\xee\x52\xe0\x00\x06\x92\x88\x3b\x1d\xb3\xe6\x2f\x23\x46\x15\x1e\x36\x64\x19\xa4\x06\x13\xcb\xce\x5d\x38\x63\x86\x19\xe6\x67\x45\x64\x28\x55\x2c\x12\x75\x1a\x47\x2c\x52\x5c\x04\x07\xc7\x3f\x48\x4b\xa8\xd9\xa4\xbb\xe3\xd0\x30\xa6\x3f\xcd\x2a\x49\x28\xce\xe6\x85\x93\x13\x75\x8f\xce\xcc\x25\xd1\x8c\xb4\xf0\x29\x73\x5a\x3d\xa6\x54\xfc\x95\xe6\x7f\x7c\x79\xf9\x2d\xd9\x03\xe4\x41\xfb\x62\x68\xd6\xaa\x05\x4a\x01\x9b\x08\xbf\x7c\x20\xcb\xd6\xef\x3c\x78\xd0\x5a\xdc\x98\xf3\x75\x17\x3e\x57\x71\xec\x66\x12\x14\x99\xe5\xb4\x2b\xeb\xce\x1e\xdc\xc9\x45\x68\x90\xa8\x4b\x14\xa1\x09\x25\x1c\x1a\x95\xf2\xd1\x27\xc8\x5f\x51\xaa\xe0\xa0\x58\x67\x96\x3f\x11\x5d\xf8\x1a\x77\x8c\x01\xbd\xd0\xd6\xb6\xca\xd3\x69\xda\xe6\xf2\x79\x9b\x22\xaf\x11\x5a\x11\x59\x33\xdf\xbc\xfd\x86\x36\xc4\x8a\x6b\x66\xeb\x61\xe3\xfa\xbf\xec\xed\x4c\x96\x56\xf0\x97\x4b\xdc\x61\x55\x8a\x5c\x5e\xf9\x9f\x45\xdd\x3c\x7c\x51\x3c\x4b\x38\xa3\x7e\x58\x5f\x7a\x66\xc8\x87\x6b\x49\xf8\x65\x16\xb3\x39\xfc\x1f\x50\x74\x90\xe8\x43\x48\x6d\xf8\x88\x7a\x11\x49\x09\x2c\xb6\x54\x35\x38\x22\xf4\xbe\xe6\x63\xa2\xcb\x22\x2b\x80\x33\xc7\x9d\x20\xfb\x64\x0d\xdd\x88\x76\x13\x0e\x75\x64\xff\x8c\xca\xc2\x5f\x2a\xeb\xd8\x9f\x94\x9f\x1a\x8b\xbf\xba\x20\x14\x23\xf8\x6d\x61\x76\xc1\x5d\x3d\x03\x23\x87\x8f\x34\x0e\x6e\xb7\x98\x5c\x08\x42\x72\x0c\x66\xe6\x6d\x0e\x36\x79\x71\x0c\x85\xb8\x91\xcd\x45\x59\xc8\x01\xe5\x20\x11\x50\x6a\xe0\x8e\x14\x8f\xe1\xee\xf8\xe3\x4e\x1c\xfd\x87\xa8\xe4\x41\x99\x2a\xe1\x85\x99\xfa\x0e\x47\x12\x13\xd9\x50\xdf\xcc\x8d\x02\x60\x37\x68\x88\x52\x01\xf5\x3d\x2b\x79\x7a\x08\xfa\xe9\xc1\x1d\xe5\x6e\xe1\x0d\x75\xe7\x22\xfa\x10\x39\x74\x4e\x3a\x4a\xb0\xc6\x28\x34\x78\x72\xff\x4c\x66\x98\xa1\x46\x14\x9a\x21\x25\xac\x0d\xf0\x52\xf3\x9f\xcc\x94\x1c\x22\x76\x6d\xa8\xdd\x53\x55\x03\xe7\x07\x40\x79\x31\x20\x14\xd2\x0d\x5c\xe9\x18\x42\x17\x6a\x08\xea\xa2\xc1\x69\xb6\xa4\x47\xe1\x9f\x8f\x3f\xe4\x72\x70\xb9\x7c\x00\x43\x08\x9f\x24\x8d\x25\x4c\xfa\xde\x49\xa7\x75\x5d\xea\x4a\xb8\x86\x20\x44\x4b\xcb\x00\xad\x5a\xa0\xd2\x24\x1d\xa6\x59\xf2\x04\x25\x02\x23\x87\x92\x6f\x57\xa7\x20\x0e\x3d\x89\x7c\xfc\xea\x65\x5f\xef\x93\xb7\x94\xa3\xc5\xc0\xfb\x74\xa4\x76\xc0\x08\x91\xe1\x83\x5a\x10\xcc\x98\xb2\x32\xad\x0a\xf4\x1c\xc0\xcc\xad\x9a\x76\x74\xb4\x0e\xf9\x0b\x9a\x1f\x47\xef\x50\xa6\x73\xf5\x87\x33\x80\x44\x03\xee\xfd\xe0\x23\x47\x68\x99\x64\x6a\x06\x57\x9f\xa9\x44\xab\xd4\x45\xf2\xf9\x48\xf6\xdc\xa4\x1c\x11\x62\x45\x83\xf2\x4f\xac\xb4\xe1\x35\x74\x25\x8e\x23\x84\x27\xb0\x8a\x18\xda\x0f\xd5\xd9\x02\x75\x04\x92\xa1\x79\x02\x1b\xb5\x79\x29\x25\x16\x0d\x19\x4b\x82\xbb\xb4\x35\xb0\x31\x4a\x8a\xc1\x46\x58\x9d\xd6\xf3\x0d\x1c\x6d\x17\xef\x63\xab\x1b\x74\x18\x78\x9f\x1b\x16\xe6\xa4\xf6\x20\x89\x8d\xbc\x42\xa4\x56\x22\x08\xd9\x23\xee\xf8\x2a\xf2\xca\x88\x71\x00\x89\x69\xac\x49\x5c\xb8\x43\x46\x64\x86\x40\x68\x58\xa0\x31\xa4\x45\x30\x9d\x97\x2a\x02\x4c\x80\xf5\xb5\x4b\x7a\x1b\x92\x1b\xb1\xbe\xeb\xec\x7e\x32\x11\xc7\x1a\x7b\x8f\xe9\x23\x84\x59\x50\xa8\x50\xf8\x42\x65\x88\xae\xe4\xa2\xb7\x47\x5b\x7a\x40\x2e\x83\x6e\x63\x0e\x9d\x0e\x9f\x7e\xf1\x2e\xa6\x90\xf1\xf8\xa3\xbf\x31\x22\x80\x56\x67\xa1\xe5\x00\x6d\x09\x1f\x3a\xa0\x2d\x81\xa4\x46\xee\xec\xee\x99\x75\x54\x27\x37\x20\x17\xef\x08\xc0\x5b\x82\x7c\xd8\x26\x94\x55\x5a\xbf\xc5\x9a\xa4\x8f\x68\x2c\x06\x85\x5d\x0f\x58\x60\xe5\x7b\x4d\xce\xaa\x15\x90\xef\xc7\xc6\xee\x7c\x53\xe6\x05\x10\x19\x89\xc4\xdc\x75\x6c\xfa\x8a\x40\xc4\xfa\x20\xda\xe9\xd5\x1e\xe0\x7d\x95\x28\xdb\x00\x3f\xb3\xce\xbc\x45\x08\xea\x00\x52\x70\xfd\x7b\x51\xee\xd2\x10\xbc\x33\x58\xb3\xe6\xe2\x38\x50\x0c\x40\x03\x63\xb4\xad\xe0\x77\x82\x57\x40\x30\x69\xe9\x7a\x20\x87\x8c\x15\xa4\x18\xff\x7e\x9a\x0f\x1d\xd2\x14\x54\x3c\x80\x46\xc5\x9f\xbd\x0f\x92\xe5\x7f\x26\xd4\x4d\xf9\x6a\x90\xae\xe0\x27\x66\xe8\xe0\x47\xdb\x7b\x76\xef\x49\x94\x22\x24\xbd\xbf\xad\xb1\x93\x9a\xfe\xde\xa8\x6b\x70\xaf\xe5\x94\x2a\xc0\x1d\x12\x07\x77\xf9\x4a\xda\x42\x91\x4d\x00\x59\x25\x5f\x1d\xa4\x2e\xc4\xb9\x0e\x26\x6f\x94\xc7\x87\x5f\x61\x0f\x57\xbc\xff\xfc\x70\xfd\x07\xa2\x55\x3a\x0a\x47\x53\xad\xd0\xc1\x66\xa0\xdd\x42\x7e\xa9\x0a\x7e\x4b\x02\x31\xca\xd7\xb7\x04\x17\xde\x03\xcd\x7f\xd7\x9a\x38\x08\x8e\x8b\xce\x03\xc0\x16\x5c\xd3\xd7\x2b\x44\x27\x4e\xad\xe6\x02\x85\xe8\x44\x28\x09\x40\x6d\x42\x62\x0b\xae\x36\x51\x02\x4a\xee\x82\x5c\xf7\xda\x1b\x3f\xc9\x2a\x3b\xed\xa7\xb5\xd6\xd7\x9d\xcf\xd5\x9d\xdb\x93\x6a\x57\xed\x71\x6b\x86\x63\x1b\x9b\x1b\x64\x00\x94\x17\x71\x21\x8a\x10\x9f\xd8\x24\xd4\x0c\xc5\x09\x55\x73\xbd\x98\x0e\xdc\xbb\xb3\x81\x3f\x59\x7f\x32\x14\xab\xbc\x8a\x06\xf3\xf4\x21\x5a\x3e\xa4\xa5\x97\x04\x03\x24\x94\x34\xc5\x77\xac\xfb\xb2\x8e\x44\xb4\xfd\x22\x22\xb0\xb2\xa3\x5f\xab\xaf\x73\x28\x55\x4c\x5d\x68\x4d\x28\x0f\xa5\xaf\xbd\xd2\xc9\x7b\xc4\x97\xfb\x15\x97\x1a\xe7\x87\xf2\x3e\x4b\x84\x07\x4e\xd1\xd7\xf7\x10\x08\xd7\x0d\x9b\x61\xa1\xd7\xb2\x44\x7e\x53\xbf\x48\x88\x42\xe1\x9e\xfd\x22\xf6\xf3\xbe\x68\xcd\xbf\x47\x0c\x39\x5b\xb7\x99\xcc\x91\xcd\xd4\x3b\xa9\xa5\x03\xa7\x1d\xe5\xbb\x2b\x48\xcd\xa7\x92\x02\x95\x9a\xfb\x42\x5e\x59\x62\xb8\xa8\x42\xb0\xc2\x03\x4c\x25\x3a\x80\x90\x68\x3f\xe9\x2c\x75\x97\xf9\xee\xba\x5b\x4a\x0f\x87\x22\xbc\x29\x4c\x81\x58\x29\xdf\x28\x5c\xe4\xea\xc1\x48\x19\xce\x73\xdb\x30\x9b\x97\x17\xa3\x22\x85\xe0\x32\x70\x60\xf4\xe0\x4c\x89\xb3\x98\xc5\xd7\x14\x03\xe6\x4c\x59\x8c\xd9\x95\xb8\x29\x83\x81\x18\x4f\xab\xd6\xc8\x8d\x7d\x95\xf8\x05\xa4\x10\x3e\x44\xcc\xbf\x42\x18\x6f\x66\xcb\xca\x52\x99\xb1\x6f\x1c\x41\x9d\x27\x71\xf0\xad\xf5\x39\x62\x4b\x7b\xd0\x10\xe6\x21\x0e\xa0\xe2\x4e\x8f\xd8\xa5\x45\x8f\x27\x79\x6a\x95\x34\x0a\x98\x05\xe8\x0f\x06\x21\xaf\x21\x35\x41\x0f\x78\x1f\xf4\xcc\x00\x11\xba\x88\xe5\x38\xbe\x7d\xf8\xa2\x9b\x2e\x82\xbc\x52\xe8\x38\x7e\x1a\x12\xdb\x41\x07\x19\x60\x68\x3c\x20\x57\x5f\xaa\x09\xc7\x22\x35\x0c\x03\x5c\x1f\x33\xc8\x17\x62\x25\x83\x4e\x7a\x77\x9e\xe4\x40\xca\xea\x00\x0e\xb9\x8c\x03\xb6\x8c\x84\x16\xf6\x5b\xe5\x1e\x3a\x15\x88\x43\x68\x4a\x1d\x48\x58\xd5\xb5\x37\x2e\x4b\xbb\xfc\x44\x5e\x84\xd4\xec\xf0\x80\x8f\xf0\x68\x4b\x84\xa2\x0a\xaa\xc6\x58\x1d\x16\x60\xf3\x40\x94\x6c\xa8\x58\x0c\x6c\x10\x69\xdd\x3b\x9d\xda\x53\x1a\xf8\x83\x55\x32\x3c\x69\xa1\xc8\x25\x42\xa0\x4b\x88\xe5\x90\xce\xb0\x4f\x6a\x68\x11\xc4\xdd\x8b\xe1\xcd\x4a\x1b\x11\xa9\x64\x41\x39\x12\xcc\x14\x54\x56\xbd\xc6\xee\xc8\x0f\x21\x51\x27\x1b\x0a\xe9\x10\x58\xa5\x38\x75\xf7\x61\x57\xd0\x31\x53\xad\xbb\x3f\xd2\x78\x88\xe0\x22\x20\x29\x3c\x84\x0d\xd7\xdd\xb1\xd0\xd0\x61\x22\x72\x06\x0c\xa6\xf4\x1b\x42\xff\x9a\xb9\x77\xe2\xd7\xb6\xd2\x4d\x3f\x67\x21\xbf\xd7\x9d\x11\x36\xdd\xfd\xd3\x7f\x03\xe6\x62\x84\xa1\x89\x2e\xc7\xee\x9e\x48\xde\x7e\x73\x5a\x6f\x1f\x6c\x8b\x07\xc0\xc7\xdb\xcc\x5c\xa9\x4e\x70\x7f\x47\x62\x85\xee\xf8\x6c\x91\x6b\xd9\x1d\xb7\x07\xda\xa1\x0c\x47\x02\xf0\x53\x0a\x14\xf7\x26\xaa\x4e\x87\x1c\x48\xa1\x50\xa1\x95\x56\x83\x93\xec\x24\x83\x52\x3b\x40\x0c\xf3\xbd\xe8\xa2\x87\x0d\x69\x8a\x00\x35\xac\x98\x58\x43\x57\xfc\xaa\xf6\x21\x12\x51\xd3\x00\x6e\x34\x77\x3d\x74\xe4\x74\xed\x9a\x0b\xd1\x10\xfd\x66\x06\xd6\x95\xe6\x35\x22\xe2\xa9\x00\x68\x68\xc0\xda\xa0\xa4\x36\x8e\x0c\x85\x26\xd5\x06\x16\x3c\xc1\x8b\x7c\xf8\x87\x18\x6d\x75\x59\x6c\x49\x7c\x02\x84\x61\x11\xfd\xd8\x1d\xb1\xbc\xef\x91\x8a\x5e\xa6\x6f\x93\xc1\xb8\x63\x86\x02\x1c\xff\x6e\x22\x1c\xd0\xb5\x6d\x72\x17\x73\x0a\x14\x8a\x21\x10\x4a\x3d\x04\x9a\xc2\x06\xd9\xe7\x8f\x84\x15\xba\x10\x17\x80\xa8\x9f\x2b\x9b\xd4\xcf\xc5\x22\x74\x27\xde\x1c\x8b\x3b\x7b\xb8\xd2\x9f\xb8\x65\xcd\xba\xfa\x85\x80\x2e\x53\xcf\xff\x89\xc2\x1e\x39\x9f\x89\x80\x0f\x4e\x63\x3b\x46\x40\xac\x5d\xf3\x2a\x47\xa3\x79\xec\x6a\x0c\x3a\x1d\x42\x38\xd7\xbd\x12\x6e\x25\x5a\x40\xdd\x42\x85\x63\x53\x5f\x8b\xee\x86\xdf\x90\x8d\xb0\xac\xed\x5c\x79\xec\x9c\x7c\x96\xd1\xd9\xa8\xca\x73\x7a\xc0\x0e\xeb\xa8\xa4\x47\x03\xf2\xb7\x50\x1f\x43\xff\x6c\x34\x54\x1d\xc2\x2e\xbc\x6d\x54\x77\xfc\x77\x35\xf9\xb9\x17\x33\x35\xce\x88\x88\xf2\xad\x89\xc6\x47\xb7\x90\xfb\xc2\xc9\xb3\x39\xa9\xef\x96\xf2\x8f\x25\x32\x79\x63\x9f\x3d\xda\x98\xe1\xf9\x06\x49\xc2\xba\x07\xd6\xef\x69\xd6\x76\x11\x31\xe7\xaa\x1a\x5b\xd7\xf4\x51\xca\x15\x02\x18\xc8\x3b\xe0\x83\x71\x8f\xa1\xea\x0c\x6a\x72\xa6\x20\xc1\xae\xc8\x4d\x67\x43\x5d\x68\x57\x26\x29\x30\x94\x50\x21\xb0\xe1\xa8\x13\x98\x42\xc0\xe2\xeb\xe6\x42\xa7\x00\x49\x94\x58\xb0\x5a\x42\x10\xe3\x20\xdf\x1b\xd2\xb6\x34\xfb\x45\xb5\xa9\x17\x91\xff\x28\xbc\xd8\xfc\xf0\xc3\xd2\x86\xbd\xcc\x7e\xc4\x3f\x02\x01\x9b\x3a\x4c\x14\x3a\xf1\x64\x27\xfe\x1e\x11\x21\x92\xd1\xc9\x1b\x95\x73\xf2\xb6\x85\xac\xc7\xf6\xf0\x1e\x9c\x38\x7f\x03\xf0\x3e\xb3\x52\x93\xf0\xb8\x18\x90\x0a\x24\xb1\x8d\x6d\x68\xe7\x48\x77\x91\x7a\x49\x6e\x8a\x1b\x8a\x1a\x02\x90\x3a\x09\x66\x82\xae\xf7\xab\x0d\x3d\x93\xe5\x11\xbf\x98\xe8\x55\xe9\xa4\xe6\x89\x7c\x68\x39\xc7\x3b\x74\x70\xba\xcd\x99\x6d\x22\x67\x71\x2d\x19\xfa\x49\x7a\x64\x84\xfc\x53\x16\x64\x78\xc7\xe9\x86\x96\x69\x62\x58\x1f\x96\x35\x4b\xbd\xe1\xac\x65\x11\x03\x34\x6b\xa1\xfe\x81\x4e\x93\xf0\x18\xe4\xf9\x80\x0c\xc8\x9a\x25\x3f\xf1\x65\x1f\x91\xd4\xa8\xd2\xbd\x22\x86\x42\x70\x4a\xf2\x05\xdb\x46\x63\x81\x86\x1c\x88\x5e\x23\x7a\x1b\xce\x45\x3b\x50\x84\xb0\x97\x6e\x5a\x47\xa9\x7d\xf2\xe9\x54\x84\x3f\x50\x28\x3e\x75\xa2\x78\xee\xc0\x00\xf6\x11\x60\xc1\x8c\x3b\x07\xd4\x30\x03\xcc\x52\x12\xd1\xc2\x96\xa3\xf8\xe7\x56\x9b\x40\x08\x9d\x85\x06\x9e\x4d\x51\x24\x44\xc0\x0d\xfe\xd4\x43\x54\xb3\xc3\x49\x89\x63\x5f\x04\x06\x41\x34\x44\x95\x84\x91\xa1\x53\x77\x48\x9e\x4f\xba\x91\x43\xaa\x1d\xb1\x38\x01\xf4\xab\xc6\x7e\xd1\xc5\x00\xa6\xf6\x78\x58\xdb\x53\xb7\x19\xec\x9d\x86\x24\x8f\x82\x00\x1a\xfd\x53\xfe\xc0\x0e\x64\xcd\xda\x23\x19\x1d\x64\xea\xb4\xd1\x3d\x3d\x19\xb7\x6a\x20\x2d\x7a\x97\xe3\x78\xab\x2a\x99\x1a\xc4\x1d\x04\x83\x58\xa1\xfe\x59\x75\x16\xa4\x13\x2c\x42\x47\x48\x9c\xc0\x1c\xe7\x81\x5d\x3c\x0d\x28\xf3\x6c\x85\xa7\xf1\x0b\xa4\xaf\x66\x24\xe9\xeb\xab\xca\xe9\xd3\xd7\x7f\x5e\x26\x09\x51\xbc\x2a\xfd\xbc\xe9\x3e\xff\x58\x67\x3e\x50\x09\x8d\xa0\x90\x16\xa1\xb4\x06\x2d\x0c\x54\x9b\xf4\x48\x93\x7d\xec\x24\x98\x4e\xd1\x2b\xfb\xda\x2c\x35\x2a\xce\x10\x26\xa1\xcb\xcf\xda\x8f\x26\x77\x86\x0e\xb7\xd4\x0d\xb4\x38\xa7\x74\x76\xc1\x7d\x87\xd0\x03\x4e\x2d\xac\x89\x49\x66\xc4\xec\x72\x2e\xff\xa2\x1d\x7d\x55\xfe\xef\x96\x72\x89\xbd\x4b\x77\x02\xc3\x14\xe6\x28\xed\xd1\x1a\x97\x24\xc4\x5c\xda\xa4\x34\x69\x13\x0f\xf3\xc7\x84\x61\x25\x45\x18\x20\x69\xab\x21\x48\xa3\x18\xb5\xa4\x44\x6d\x04\x71\x50\xd1\xe2\x48\x2f\x1e\xc4\x78\x46\xd0\x7e\x2f\xfc\xfd\x76\xdd\x2c\xcc\x38\x67\x8d\xdc\xf0\xbd\x7c\xe8\xf2\xd8\xfd\x7a\xe8\xc0\xf9\xa7\x82\xa2\x00\x36\x08\x61\x71\xdd\xdf\x43\x05\xfd\x01\x6c\xcf\xd0\x32\x81\x1c\xd0\xa9\xdf\x0c\x9a\x3d\x43\xf3\xc4\x79\x16\x1d\xdb\x07\xcc\x9b\xdc\x24\xf1\x90\xd6\x73\xaf\x21\x6b\x72\xb0\x7d\x18\x90\x72\xcc\xaa\xd0\x78\xad\x83\x4a\x0e\x85\x4a\x1f\xcb\xd9\xc9\x59\x9d\x96\xed\xa6\x5c\x0d\x9c\x6b\xfc\x27\x08\x5e\xf8\xda\xb1\x36\x9a\xb3\xbf\x41\x7b\x02\x94\x52\x41\x6b\x27\x4f\x3c\x42\x42\xfc\x0d\x99\x15\xeb\x97\xd0\xf7\x93\x24\x01\x0a\x31\x7c\xff\x89\x07\x0d\x72\xc1\x27\x15\x7e\x3e\x2e\x45\xe2\xb2\x5d\x60\xb7\x47\xe1\x51\x16\x0e\xc8\xa5\x94\x86\x66\xec\xa4\x01\x8c\xc4\x5e\xa3\x41\x41\x11\x09\xae\xa7\x42\x57\x27\xc2\xd3\xf4\xe8\xba\x5b\x9a\x23\xf6\x0c\xa9\xa6\x9d\xba\xc1\x07\x04\x2b\x19\xdf\x63\x76\x3c\xe5\x05\xbe\x9f\x77\x71\x13\xea\xfb\x86\xfb\x77\x99\x2b\x79\x14\xbe\x20\xdc\xf7\x8f\xfd\x7b\x6b\x5d\xdf\xa5\x47\x50\xae\xb7\xd7\xf1\xbb\xca\x7e\xf9\x16\x92\xb3\x7c\x45\x8e\x83\xe5\x60\x9e\x04\xc9\xd9\x59\xe6\x6d\x5b\x0c\x73\x01\x92\xd0\x84\xfc\x16\x24\xf5\x70\x37\xd6\xec\x47\x72\x19\x7d\x41\xd4\xe5\x17\x73\x8e\xc3\x55\xc1\x19\xdf\x2c\xb5\x2e\x48\x94\x78\x3b\x80\x32\x49\xf6\x72\xed\xb4\x70\xf0\x75\x5a\x96\x98\xcd\x7f\xb3\x67\xaa\xfc\x47\xd1\x12\x1e\xca\x5a\x33\xde\xfb\x0b\xd9\x26\xaa\x99\x19\x94\xb6\x08\xc5\x07\xbf\x69\xde\xbc\x9b\x06\xe0\x55\x40\xa0\xe0\x8f\xfb\x9b\x7a\x2e\xba\x7f\x93\xdb\xe8\x3c\xde\xcc\xc1\x84\x87\xf9\xb9\x23\xbf\xf2\x73\x63\x55\x8b\xa3\x25\xf4\x47\x7c\x4a\xf4\x21\x73\xc1\x49\x48\x43\xf1\xc3\xf1\xfb\x83\x55\x60\x7f\xdc\x9f\x7a\x3c\x79\x04\xed\x11\xe8\x93\x64\xe9\x93\x90\x85\xe4\x93\xa5\x28\xf5\xc9\x52\xc3\xf8\xd8\xc8\xc9\x16\x0a\x44\xcb\xe1\x13\x4c\xd1\x9f\x4e\xb1\x22\x1f\x1b\x0f\x2f\xa6\xc7\x7d\x12\x39\x77\x3e\x69\xd3\xf6\xc6\x88\x95\x8f\xba\x7f\xf0\x64\x7f\x19\x1b\x05\xe8\x55\x84\xd7\xda\xca\x7f\xf0\x62\x78\xef\xf4\xfe\x7c\xd9\xe8\xdf\x1f\x59\x7c\xef\x8f\x9b\x69\xef\x8f\x34\x2e\x6e\xc5\x4e\xbc\x95\x19\xf7\xbe\xb5\x04\x69\xa0\x93\x52\x87\xe7\x4f\x33\xd6\xc5\xec\x54\x51\xcc\x19\x62\xbe\x91\x01\x4d\x0f\xde\xa2\x86\x81\x86\xca\x19\x6a\x2a\xc3\x41\x2b\x16\x90\x97\x06\x90\xfc\x44\xdd\xc8\x4f\xf5\x0e\x29\x87\x77\x5d\xcc\x58\x67\x68\xa0\xe1\x85\x2d\xf0\xfd\xcb\x63\x30\xa8\xd5\xc0\x77\x7d\x5c\x7a\x16\x0e\x09\xe6\x65\x0c\x6d\x93\x0a\x0f\x8f\x83\xce\x7a\xa4\x4b\x2a\x29\xbc\x39\x28\x92\x28\x92\xfa\xed\xcc\x06\x5e\x33\xfb\x4e\xf1\x38\x84\xb7\xf0\xe0\xbe\x63\xdd\xf5\x5d\x0e\xc9\xaa\x44\x5a\xc3\xbb\x6c\x3f\x1d\x94\x75\xf7\x09\xdb\xbb\x98\xed\xc7\xab\x2c\xd7\xa9\xb3\x2d\xcc\x63\x77\x90\x0e\x21\x29\xcf\x60\x4d\x69\x38\x25\xf6\xb2\x0c\xb3\x08\x10\x81\x13\x7f\xad\x98\x04\xeb\x09\xde\x7c\xf8\x25\x37\xc1\x99\x7c\x79\x3b\xff\x2f\x54\x5c\x02\xcc\xaa\x32\x98\x5f\x3d\xfa\xe0\x5d\x62\x95\xf3\x5d\x86\x4d\xf5\x1a\x56\x12\xb1\x82\x48\x47\xef\xac\x38\xb3\x20\x6b\x16\x32\xab\xef\xf9\xe2\xcb\x86\xe7\xac\xa1\x36\x19\xc5\x86\x38\x51\x5d\xd7\xc5\xcc\x20\x1f\x4a\x20\xe0\xb2\x37\xdd\x16\x69\x35\x1b\xca\x12\x0a\x71\x74\x36\xc4\xff\xda\x20\x2e\xad\x9d\xfc\x09\xe9\x13\x91\x9f\xba\xe6\x8b\xa7\xa3\x02\xed\x88\xaf\xdf\xb4\xc1\xb6\x05\x10\x3f\x09\x5e\x04\x43\x55\x5e\xd4\x77\xfe\xed\x93\x7c\x89\xcc\x8d\x37\xed\x43\x08\xc8\x88\x92\xd0\xe0\x73\x56\x0e\x2c\xe6\x8c\xe0\x97\x7e\x0a\xc7\xf3\xc9\x4b\x34\xfd\xdc\x16\xc8\xc1\x11\x7f\x4e\xf9\xa7\x2c\x83\x04\x0c\xde\x8e\x84\x55\xb2\x9e\x94\xe7\xf2\xf1\xa0\x26\xcb\xe7\xaa\x32\x7c\x38\x30\xf5\xa5\xc3\x13\xca\x32\x6a\x30\x8d\x49\xf7\x9d\xae\x23\x94\x5c\xce\xd3\xc7\x6a\x89\xcd\x08\xf9\xc2\xca\x3b\x39\x95\x30\x87\x38\xdb\x90\xa9\x13\x8c\xcc\x6f\x0f\x10\x95\x13\xfe\x8d\xf8\x68\x2f\xd7\x35\xa4\x68\x56\xa8\x45\xfa\x1d\xfc\xd1\x12\x83\x2e\xc8\x9f\x5f\x99\xdd\xd9\xe6\xe1\xef\x1f\x95\xd4\x3b\x3d\x9b\x26\x0c\xab\x29\x72\xb7\xd7\xdd\xb7\x0c\xdb\x57\xad\x93\x6b\xae\x54\xe5\xf1\xbc\xea\x53\x0a\x2f\xcc\xf9\xb3\x32\x2f\x41\xe3\xf9\xaa\x83\xd2\xc7\x5e\x0c\xbb\xa6\x90\x4c\xd9\x19\x18\x86\x80\xb9\x42\xb5\x16\xfb\xac\x16\x2e\xac\xbc\xca\x4f\x17\xe3\x15\x43\x95\x01\x2e\xef\xbe\x10\x99\xd1\xfe\x2b\x59\x91\xec\xd3\x88\x42\x78\xba\xd9\xfe\xca\x4c\xf6\x7a\x79\x0c\xc5\x49\x94\x7b\xc9\xba\x80\xf5\x51\x48\x01\x47\xaf\x6c\x1f\xf3\x5d\xa8\x1c\xf9\xb2\x17\xeb\x3f\xc1\x7b\x90\x02\x8a\xbd\x9e\xba\xd7\x9b\xe9\x28\x2f\x64\x66\xf6\xb7\xa3\xe3\x25\x95\x93\x83\xea\x38\xf0\xa3\x9e\x01\x78\x9b\x5a\xd9\x02\x0f\xd1\x49\x65\xae\x57\xda\xde\x1f\x6f\xd4\x40\xf6\x3d\x0d\x3f\xfd\x97\xd2\xd4\x5f\x36\x2d\x5f\x43\x08\xe6\x7e\xc5\x11\x4b\xad\x02\x08\x16\x54\x77\xf3\x02\x8d\xee\xfa\x53\x8a\x71\x36\xbd\x97\x6b\xf3\xee\x4b\x9c\x6f\xaa\xc7\xde\x50\xbb\xa2\x7c\x2f\x2f\xb7\x1c\x4f\xc9\xd8\x40\x96\x4a\x20\x7f\x94\x39\x4f\x8d\x19\x84\x96\xc7\x31\xf9\x75\xc6\x59\x06\xe7\xc4\x5f\xda\x86\xd4\xf0\xb9\x11\xcd\x21\x31\xd2\x9f\x3f\x59\x9e\xbc\x8f\x05\x71\x37\x3e\x04\x02\x87\xa2\xec\x2b\x85\xf6\x0c\xc8\xa2\x93\x10\xca\x9b\xbe\xb6\xbb\x09\x9f\xdf\x10\x55\x3e\x08\x2a\x77\x30\x9c\xfe\xde\xaf\x39\x87\xf2\x3b\x96\x1f\xff\x28\xca\x78\x55\x6f\x39\x17\xee\xb5\x05\xc7\x63\x8d\xce\x0b\x04\xbf\xfa\xc1\x74\x38\xdd\x8b\xb2\x5e\xee\x79\x71\x89\xa5\x7b\xa0\x8c\xdb\xf5\x91\x5f\xe6\xfa\xac\x21\xe3\xf1\x61\x57\x7b\xd9\x68\x34\xd0\xd7\x7d\xbd\xb3\xbe\x26\x20\x92\xa9\x5f\xc1\xf5\x74\xc9\x4f\x8b\x52\xda\x30\x9e\x11\x76\xdd\x92\x7c\x41\xe2\x7f\x48\xc3\xdb\x44\x8b\x3d\x11\x64\x3a\x77\x99\x6b\xc0\x1d\x27\xff\x88\x01\x96\x6e\xc9\x51\x25\xe2\x52\x9b\xc2\x4b\x79\xc1\xc0\xa5\x0e\x08\xa8\xff\x1a\xd0\x2f\xca\x15\x3a\x31\xc8\x04\xff\x10\x82\x79\xa3\x38\x44\xdc\x01\xcd\x66\xc3\xbc\xd7\x23\x3f\xfc\x9b\xb8\x10\x1a\xa0\xc8\x98\xeb\x68\xc1\xa2\xd7\x61\xb3\x48\x84\xf6\x13\xbf\xe2\x4c\x8a\x03\x03\x80\xf6\x12\x5b\x18\x22\x85\x4e\x17\x51\x00\xca\x2a\xfd\x51\xd8\x4b\xf4\xab\x54\xfa\x19\x2e\x36\x65\x48\x59\x44\x14\xd4\xd5\x44\x5d\x2a\x48\x3a\xfc\xce\xaa\xbf\xc2\x4b\xec\xf4\xd6\x01\xaa\x5c\xa1\x6c\xab\x97\x55\x57\x2e\x4e\x41\xce\xa2\x48\x39\xef\x0a\x52\xa7\xab\x3e\xd9\xf1\x5d\x55\x62\x3e\x55\xbc\xb8\xae\x49\xe1\xe5\x2a\x59\xbe\xcb\x75\xb1\xfc\x54\x05\x16\x35\x8f\xea\x16\x0e\x0c\xd7\xfc\xd1\x67\x70\xc1\x03\x48\x15\x9e\xf9\x0e\x3d\x31\x40\xd7\xbc\x99\x8f\x3b\x74\x68\xd0\x47\xe7\x41\x90\x7d\xf3\x05\x92\x5d\x9e\xae\x4a\x9c\xa6\x84\xaa\x4e\x0b\xd5\xc5\x1c\x9b\x7b\xd2\xb6\xa7\xcc\x57\xe2\x41\xfc\xaf\x6a\xa3\x61\xe2\x66\xc8\xdd\xe4\x7f\x85\xde\x24\xa0\x81\x0f\x2c\xbf\x7e\x39\xf0\x36\xf5\xa2\x78\x92\xbd\x27\x7d\x0c\x57\xde\x45\xae\x68\xc8\x6f\x9d\x54\x8f\x17\x06\x96\x85\x63\xeb\xe5\xac\xad\x7c\x76\x08\xf2\xe2\x4c\xcf\x2c\x46\xde\x79\x7e\xe4\xb8\x56\xb9\x9a\x4f\xf5\x02\x11\xae\x86\xd7\x4b\x66\xd1\x95\xa7\x66\x00\x5e\x59\xea\x38\x6e\xc4\x5f\x94\x5a\xb9\xd2\xad\xe0\x37\x43\xf2\xfb\x5c\xe9\x0c\x66\x4e\x17\xbf\x59\xd5\xcb\xd9\x86\xf3\x3a\x56\x62\x68\x80\xb3\x15\x60\x8d\x2a\x16\x34\x6c\xa3\x3c\x78\xf4\xc6\x61\x19\x5a\x39\x69\xa6\x2d\x70\x21\x6f\xc2\xe6\x0c\x54\xb1\x59\xd2\xfc\xb9\xf2\x9b\x47\x3f\xd3\x29\x75\xfc\x2b\x85\x2f\xf9\xf2\x09\x0d\x81\x64\x75\x3a\x55\xf7\xfc\x7e\xd6\x1f\x1f\xf9\x17\x6b\xb7\x6d\x0b\x1a\x60\x92\x01\xf8\x1c\x92\x5c\xf8\x6c\x9b\x74\x30\x3e\x83\xb8\xd2\xcd\x92\x75\xf2\xf2\x77\x0e\x25\x1b\x04\x7a\x4c\xa1\x74\x63\xb7\x2e\xe1\x94\xf7\x4f\x3d\xfb\x7c\x65\x76\x5d\xe7\x7d\x7c\x24\x68\xb3\xe3\x85\x66\xc2\x4d\x6a\x36\xb5\x63\xb6\xb1\xcb\xdd\xf8\xc9\xc1\xcf\x97\x04\xba\xc4\xa3\x16\x8e\x99\xe7\x8d\xac\x6b\x0a\x3c\xdc\x4f\x5f\x0f\x3b\xef\xa9\xd1\xb7\xdf\xd6\x85\xe8\xce\xee\xe1\xad\xb8\x16\x83\xa1\x45\xe1\xbc\xff\x3a\xe0\xba\x2f\x99\x61\x50\xbf\x19\xab\xc4\x60\x9c\xbe\xce\xd1\x71\xbb\xfc\x1d\x00\x09\xff\xbd\x2b\xa4\xb6\xcd\x51\xdf\xcd\x2b\xe4\xe4\x67\x8a\xf0\x03\x66\x3a\x12\xd0\xec\xfd\x7d\xec\x1f\xc7\x70\xe1\x9c\x2e\x90\x15\x97\xc9\x2f\x89\xce\x1c\x9a\x20\x03\x79\x58\x80\xf0\x1c\x47\x26\x26\xaf\x03\xad\x2a\xfb\x50\xc4\xb1\x89\xbe\x62\xa9\xa1\xb6\xc3\x1b\xbf\xaa\x44\x7d\x20\xa8\x63\x06\x78\x28\xbc\x54\xa9\xca\x54\x7e\x5d\x06\xc6\xf8\x4f\xff\x9c\x75\x6e\x83\xa3\x32\x35\xcf\x0b\x19\xa8\x3c\x77\x09\x21\x72\x83\x71\xcf\xe4\xd3\x3c\x03\xa7\x98\xde\xdb\x96\xbc\xd4\xae\x8e\x93\xfa\x80\xbc\x0b\x64\x91\x12\x6c\x3f\x45\xa1\xbc\x35\x69\x9d\x65\x89\x0b\x64\x05\xdd\x38\x8a\x1e\x43\x1b\x7d\xa8\x06\x99\x11\x1d\xcf\x0c\x9e\x41\x3e\xd3\x2c\x05\x2f\x08\xe7\x14\x70\xf0\x09\xeb\x37\xe9\xbc\xf4\xb6\xa0\x32\xce\x6a\x35\xf6\x2a\xc0\x45\x74\xb1\xc4\xac\x8d\x19\x44\x32\xf2\x81\xe3\xdf\xb0\x6b\x02\x85\x08\x03\x70\xfa\x61\x09\x2a\xba\x2a\x49\x04\x56\x9c\xd7\x2f\x71\x07\x7a\x3b\xf1\xb2\x5b\xd0\xbe\xc1\x41\x55\x1c\xae\x96\xd8\x6e\xc6\xcc\x87\x7d\xd0\xb9\xc3\x10\x61\x85\xf7\x83\xf2\xa9\xae\xaf\x13\x92\x37\xd5\x2c\x5e\xb9\xec\x4f\x50\x29\xfb\x4d\xec\x85\xa4\x9e\x66\xe7\x32\x5e\xca\xa3\xeb\x4e\x01\x18\x66\xda\xd9\x6b\x90\xc3\x64\x47\xda\x28\xbb\xe2\xb8\x5c\x43\x87\xd1\x9e\x06\xed\x46\xd5\xba\xcd\x8a\xf7\x53\x83\x01\x81\xfa\x4f\x08\xd5\x39\x04\xe2\x20\x5d\xcd\x4a\xea\x2a\xd4\xa3\x6b\x87\x47\x1b\x40\xd6\x8b\xdf\xbb\x02\x5f\x6d\xda\x56\x25\x68\x54\xa5\x4b\x54\x5b\xd8\xb0\x2b\xee\x60\x89\xcc\x31\x7b\xc1\x73\x2b\x4a\x63\xb6\xc9\xbb\x04\x25\x56\x3a\x3c\xa0\xc1\xb3\x85\xf6\x4e\xad\xee\xa6\x35\xc0\x79\x0d\xde\xce\x57\xa5\x3d\x66\xf6\x18\x6b\x38\xd5\x0d\x71\x79\xe7\x04\x39\x53\x48\xf2\xdc\x3e\x7e\x21\x80\x7a\x63\xf9\xd2\xd9\x97\x2a\x12\xa1\xf3\xa7\xec\xb1\x14\x8f\xc4\xb2\xc9\x67\xa8\x78\x2c\x29\xc4\x7d\x9e\xf7\xc8\xef\x12\xcc\x19\x7e\x99\x67\xf8\xad\xce\xa7\xd3\x23\x9d\xcf\x47\xe6\x66\x51\x92\x9c\x0d\x9e\x94\xc2\x35\x40\x06\x0e\x00\x76\x17\x4f\xaa\xec\x9d\xe5\xdf\xb3\x35\x1f\x99\x15\x67\xd1\xd5\x8b\x54\x61\x4a\x74\xa5\xe5\xa0\xfa\x49\x09\xb6\x58\x20\xfe\x67\x93\x1c\x2e\x50\x6e\x5f\xa3\x0d\x11\x75\xe5\x4f\x40\xa8\x28\x19\xa1\xe5\xc5\x55\x87\xb3\x55\x14\x4b\xab\x7a\x87\xe5\xf9\xa4\x25\x73\xc2\x36\x08\x31\xa1\x2c\x01\xa5\xa2\xb0\x6b\x00\x55\x32\x8f\xcf\x40\x3f\x45\xa1\x4e\x2d\x41\xf4\x47\x67\xe9\xef\x58\xe9\x3d\x9b\x72\x88\xf3\x20\x01\xcc\xf7\xaa\xda\x1b\xb2\xd9\x16\x1f\xcd\x7c\xff\x15\x0b\xba\xc1\xbf\xcf\x0b\xcd\x37\xd7\x1f\xcf\xf9\x88\x43\x0f\xac\xb4\x39\x82\x95\xc8\x53\x55\xd1\xc2\x01\xf1\x77\xe5\x15\x6a\x3e\x65\xb3\xa3\x6e\x41\x4a\x7f\xd8\xf0\x5d\x25\x22\x69\xb8\xb8\x8f\x0e\x1c\x25\x54\xb4\x42\x3a\xed\x4d\x51\xac\x39\xfa\xc8\xd9\x19\x1d\x98\x6c\x07\x81\xa1\x97\x64\x8a\x6c\x16\x7c\xa5\xd8\xb9\xb3\x2c\x71\x94\x26\x76\xd0\x21\x92\x42\xc9\xec\xeb\x5e\x42\x7b\xec\x92\xca\xdb\x6c\xd6\xc9\x47\x40\xe2\x41\x20\x2f\xd0\xe1\x0c\x34\x3c\xf3\x27\x9a\x50\x16\xad\xe0\x09\x5f\x37\x85\x7e\x14\x11\x7f\x62\x1e\xa1\x1e\xc4\x03\x7f\x09\xb8\x2d\x46\x0d\x00\x9d\xa7\xd9\x1b\x19\xa2\x8c\x7a\x3b\x79\xe3\xe3\xce\x5a\x7a\x38\xf3\xba\x8b\xac\xff\x44\xc3\xd0\x3e\x04\x28\x38\x5a\x9c\xdd\xef\x8c\x17\x9f\x97\xce\xdd\x6b\x67\x48\x7d\x42\xab\x88\x05\x3f\x70\x9b\x7c\xb3\x8c\xf4\x62\x43\xf6\x1b\xb9\x6c\xce\x3c\x30\x7c\xfe\x8c\x8c\x87\x33\x27\x24\xa2\x13\xf5\xea\xfe\x73\x12\x6f\xd7\xd9\xdf\x1b\xe3\x83\x5d\xbf\x68\x62\x07\xd8\x1f\xf0\x24\xb6\xd7\x06\x0e\xc5\x5d\x54\x34\xb6\x91\xa2\xd7\xb6\xb3\xf1\x7b\xef\x6b\xc7\xe7\x67\xb3\xf3\x44\x85\x4b\x48\x1b\xdd\x54\x42\x05\xa4\xd8\x86\x23\xcd\x02\x5d\x06\xe9\x88\x43\xc5\x89\x4e\x41\x24\xa7\xdb\xb8\x79\x35\xb3\xfb\x25\x5a\x74\xd3\x94\xc3\x7c\xd7\xff\x3d\x69\x2b\x64\x8b\x6e\xa7\xe6\xd6\xc1\x87\x7a\x7a\x2c\xf1\x35\x4d\xa2\x2a\xe2\x11\xc0\xed\x8f\xaa\x52\x65\x3a\x68\x3b\x2c\x6e\xcf\x50\x47\xb0\x95\xeb\xdb\x64\x90\xa2\xf3\x35\x94\xee\x90\x54\x22\xaf\x08\x40\xfe\x49\x2e\xad\x2e\x5c\xb1\x70\xc3\xba\xdc\xfc\x93\x47\x6a\x5a\x2e\xbe\xd1\xd5\x59\x37\xb7\x48\x80\x0f\x28\x78\xd4\x7c\xa3\xfc\x95\x53\x0a\xd1\x0c\x6e\xc4\x13\x58\x9a\xda\x1e\x16\xcf\x55\xc7\x12\x8f\xe4\x27\x0c\xe6\x12\x4b\x85\x28\xdf\xb1\x0f\x8d\x53\x52\x55\x73\xfe\xa9\x2b\x2d\x7a\x04\x0c\x63\x76\x60\x0f\xcb\x1d\x15\xda\x6e\x62\x51\xbf\x27\x9a\x8e\x5f\xd5\x53\x17\x2a\xa0\x67\x4f\x93\xa6\xc7\x44\x4b\x48\x52\x6a\x06\xc6\xb6\x6f\xd1\x97\x93\x3e\xd2\x4c\xfa\xf8\x34\x0f\xea\x4c\x7c\xdc\xce\x50\x2d\x6a\x05\x6c\x24\xac\x9e\x10\xb3\x27\x42\x28\x3b\x7f\xca\x5e\x2d\x1d\xc8\xcf\xbd\x08\x25\xa7\xe4\xdc\xd5\x7e\xbf\x58\x5c\xfc\x34\x24\x01\x28\x04\xad\x13\xfc\xd1\x44\xda\x46\x9e\xc3\xba\xc3\x77\x80\x2c\x20\x29\xa0\xf3\x8f\x19\x15\xf2\x1e\x0e\x24\xda\x84\x5c\x71\xf7\xcb\x19\x94\xf6\xd3\xca\x81\xde\x7a\x3d\x1d\x9c\x24\xea\x64\x7d\x83\xd4\x91\x0d\x5f\xa2\x8f\x30\x78\x4e\xf2\x82\x9d\xa1\x5b\x0f\x6e\x7e\x49\x3a\x85\x08\x94\x0d\xa3\x2a\xbd\xb0\xde\x7c\xa9\xb7\xf4\x9e\x28\x13\x72\x5a\xcf\x40\x6d\xb5\x34\x4d\xf1\xcc\xa7\x84\x69\x05\x71\x08\x4d\x85\x12\x94\x35\x07\x5e\x24\x2d\xae\x17\x28\xcc\x62\x70\x3d\xc3\xe3\x43\x59\x9d\x8f\xb4\x87\x3e\x54\x8b\xf9\xb8\xcf\xf4\xc0\xfa\x90\x8c\xc9\xe3\xfe\x4a\x10\xe9\xfe\x48\x72\xe5\x7e\x55\x2f\x48\xcb\x74\xdc\x9c\x05\x98\x69\x38\x28\x5e\x07\x30\xab\x4c\x92\x6e\x62\x48\xda\x61\x46\x8c\x52\xb4\x11\x0f\x27\xbb\xf4\xb8\x3d\x53\xf5\xb8\x7d\x69\xf5\xf0\xe8\x3b\xa9\x40\x31\x07\x0f\xd3\x21\xc4\x6a\x51\x7a\x48\xb1\x5a\x2d\x32\x0e\xd3\x23\xb8\xb8\x63\xef\x6c\x83\xd1\x4f\x73\x0a\x29\x09\x1e\x56\x41\xb4\x09\x25\x96\x3c\xbb\x8d\xed\x92\x7a\x62\x60\x2a\xd4\x9c\x36\x70\x77\x9d\xb1\x11\xe7\xca\x5b\x8f\x35\xf7\xef\xdf\xad\x38\x88\xd6\xe5\x51\x15\x4e\xe2\x6a\x82\x17\x17\x59\x91\x5f\xcd\x7f\x82\x5f\x80\x79\xcd\xc0\x52\x5e\x7a\x4a\x09\x6a\x86\x7f\x8d\x57\xca\xd7\x1c\x37\x64\xf3\xdc\xb8\x97\xa1\x69\x4c\x0d\x91\x26\x7c\xd4\xa1\x71\x3d\x00\x8f\x71\x68\x92\xe4\x33\x6c\xbb\x78\x50\x36\xfd\x92\xe2\x57\x24\xe2\x03\xa8\xba\xdd\x22\x99\xaa\x6e\x91\xfa\x13\xe7\x90\x9e\x45\xc3\x43\x7c\x30\x3c\xb4\xc8\x6b\xa5\x77\x4f\x56\xfa\xc5\xcb\x3f\x0a\xfb\x58\xa3\x2f\x3e\x18\x1f\xb4\xcd\x10\x99\x18\x82\x78\x86\x0f\xcc\xb0\xf9\x6f\x4a\x67\x61\xbd\xc2\x4b\x84\xda\x3a\xa0\x83\x0b\x2e\x3b\x35\x4f\x8f\x0a\x8b\xd8\x32\xcf\x63\x2a\x52\xf7\xc1\x24\x91\x8f\xb6\x84\x8e\x56\xa1\x2b\xef\x28\xf9\x19\xa5\x8e\xcd\xfd\x1c\x72\x50\xf0\x5e\xf2\x68\x0d\xb6\x98\x94\xfd\xc4\xae\x4a\xdf\xd6\x42\x80\xb3\x34\xd0\xf2\xc7\x0c\x78\x3f\x69\xfe\xc7\x9e\x13\xc1\x18\x59\x32\x4c\xfb\x1e\xd2\x64\x79\x27\xd1\x21\xc8\x19\x6f\x2e\x6f\x1f\x59\x1a\x36\x47\xf6\x4c\x7c\x4a\x8a\x21\xf4\x4a\x7b\xed\x69\x48\x60\x69\xb4\x69\xc5\x10\x7b\xc7\x00\xdd\x29\x65\xaa\x5c\x64\x33\xd8\xd4\x5d\x9a\x4a\xe1\x6d\x32\xdb\x71\x90\x60\xd3\xa0\x50\xd9\x23\xf7\x9c\xa1\x18\xb8\x65\x77\x19\xd4\x0f\x7b\xc5\x28\x1c\x41\x37\x69\xd3\xf8\xa5\x63\x7c\xce\xa1\x68\xab\x23\xe2\xf1\xe1\x04\x3d\x92\xf7\xad\x06\x7d\x7d\xe0\x48\x6f\xb6\x91\x04\xbd\xfb\xc2\x5e\xf9\xf0\x44\xfa\x2a\xb4\x85\x92\x9b\x9c\xa5\x56\x52\x69\xe9\xba\xc4\x3b\xe3\x50\x02\x4b\xd7\x3c\x4a\x73\xe9\x4a\x0f\x2f\xed\x61\xf4\x0e\x28\xdb\x94\x88\x11\x80\xcd\x8a\x6c\x41\x23\x74\xe8\x19\x78\x2f\x7e\x26\xee\x5a\x29\x95\x96\xc0\x63\x7d\xe8\x2e\xca\x36\x27\x81\xc8\x6b\x3f\x22\xcc\x02\x40\x72\x6b\x50\x30\xe5\xed\x20\xc5\x91\xd5\x1b\xee\x5e\xbf\x1d\x48\x05\x76\x84\x78\x26\xf4\x8b\xd8\x2f\xa4\xfe\xfb\x47\x93\xca\xa6\x0f\xe4\x2a\xfb\x9f\x13\x4d\xbb\x63\x16\xce\x3b\x85\x2f\x82\x01\x9c\x0b\x3a\x20\x00\x6f\xde\xad\x3d\xc8\x9c\xf6\x7b\xf7\x48\xd2\x3d\xb4\xf3\x76\xd9\xda\xfb\x3d\x49\x3c\x8a\xfa\x30\x07\x09\xd5\x76\xeb\x2b\x39\x84\x42\xfa\x49\x7e\x0d\x83\x3e\x0a\xc0\xc2\x7d\xc6\xca\x34\x36\xd4\x3d\xef\xf8\x8c\x78\xa0\x07\x61\xd0\x2a\xdb\xdd\x03\x25\x19\x29\x10\xe5\x52\xdb\xe5\xc8\x14\xd8\x39\x22\x2b\x17\x68\x8b\x13\x21\xb3\x9a\x2a\x36\x58\x86\x39\x03\x78\x19\x52\x30\x1e\xfe\x7b\x0a\x95\x10\xb1\x12\x37\x82\x01\xaf\xaa\xd2\x40\x77\x4f\xe7\xa1\x0e\x0d\xb2\x74\xf2\x3b\xb9\x53\x8d\x1b\x04\xbd\x14\xa4\xa0\x5a\x4e\x24\x09\x9b\xba\x37\x21\xa9\x9d\xd3\x2b\x03\x08\x02\xf1\x1b\x80\x1d\x75\x85\xd0\x4e\x0d\x57\x03\x10\xcf\xbb\xc8\x55\xba\xbb\x0a\x82\x8e\x6a\x3f\x7e\xc2\x51\xcf\x6a\xd9\x98\x3d\xe4\x7f\xa1\x50\x85\x90\x17\xe9\x2a\x99\xf9\xf8\xf5\x56\xbc\x2f\x12\x28\x5a\xb2\xf4\xa4\xf2\xa5\x6a\x23\xf1\x65\x21\xb0\xc9\x32\xff\x99\x3e\x6a\xd2\x06\xff\xb9\x15\x02\x49\x29\xaa\xe0\x2c\x29\xa4\x6f\x77\x68\x0d\x78\x53\x35\xd4\x17\xef\x05\xf7\x72\x86\x07\x72\x2f\xdb\x9d\xb9\x8a\xb7\x97\x46\xa1\xb1\x17\xd9\xc5\x7b\x51\x06\xd1\x8e\xf5\xcb\x10\x8d\xda\xf8\x86\x68\x8d\x5a\x71\xa9\x94\xa0\x0d\xe4\xa7\x20\x4e\x45\xfc\xbc\x79\x30\x48\x5b\x64\xe9\xdb\x86\x6c\x5e\x43\x21\x2a\x55\x32\x52\x7f\x99\xc9\xbe\xa3\x13\x96\xb7\x01\xac\x9c\xad\x12\x08\x21\x3f\xe5\xfa\xdf\xa5\x1e\x04\x12\x80\xc8\xad\xdb\xcd\xcc\x77\x0f\x1a\x34\xaa\x96\xb2\x4b\xf8\x68\xf6\xd8\x60\x7f\xb6\x20\x53\x98\x78\xa8\x58\x42\x77\xcc\x21\xa5\x7f\x94\xaf\x43\x52\x63\x40\x2b\x17\xb4\x1c\xc3\x3f\xe6\xd8\xa7\x39\xbb\xe7\x60\x73\xe4\xda\x99\x56\x7a\x85\x56\xd4\xa1\x06\x91\x0f\xc9\x3b\xa3\x3f\xbd\x58\x86\xc2\x93\x9d\xaf\x48\xd3\x6a\x8b\xd0\x00\x83\xdb\xa7\xe7\x14\x68\xa7\xde\x01\xef\xd3\x70\x92\x03\x0c\x8e\xfb\xd6\x88\x6c\x44\x91\x52\xbc\x19\x59\xf4\x7d\xef\x31\x61\x31\x40\x49\x36\xe6\x8d\xec\x59\xca\x5b\x45\xf4\xe8\x7b\xd6\xd0\xb6\x5b\x87\xcf\x8a\x88\xa2\x6e\x87\xe4\x2e\x4f\x91\x8a\x8f\xe8\x7b\x52\x86\x96\x01\xda\x62\x7b\xb2\x4f\x86\x72\x4b\x2f\x49\xaf\x52\x84\x8a\x72\x4e\xaf\x06\xa4\xc9\x74\x6b\xdd\x10\xfa\x54\x14\x48\x4b\x97\xbc\x81\xbb\x33\x68\x27\x21\xed\x09\xc3\x17\xe9\x1b\x4d\x99\xea\xa4\xd5\x4d\x8d\x2a\xbf\x59\xa7\x41\x91\xd2\x94\x3d\x5a\x5e\xc1\xf5\xff\x1c\xed\x2d\x6a\x0c\x92\x53\xe4\xf4\xdb\xc5\x50\xef\xe2\x53\xda\x31\xdc\x87\x50\x08\x76\x91\xce\x68\x87\xf6\x2a\xcb\x45\xfa\x4f\x2e\x47\x35\x11\x24\xe5\x06\xed\x52\x6f\x4b\x8f\x8b\xdf\x91\x59\xe4\x58\xc5\xa2\x30\x58\x8a\xf6\x8d\x6c\x76\x96\xe8\x43\xde\x29\x36\xa4\xc8\x35\x51\x62\x2c\xe9\xd3\x4f\xda\x8c\xb7\x94\xfa\xc5\xe7\xaf\xd0\xb7\xd2\x2b\x45\x0a\x53\x88\x64\x71\xca\x5e\x15\x78\x5b\xbf\x85\xd3\x90\xfa\x4d\xbe\x5c\x53\xdf\xec\x85\xea\x2d\xaf\xbd\x4d\x1e\x96\x50\x6d\xa9\xb7\x47\x5d\xfb\xf1\x18\x59\x38\x64\x79\x2e\x5e\xf3\xf4\x56\xd1\x6b\x6c\x81\x97\x3a\x07\x14\xfb\x52\x75\x6e\x31\x8e\xb7\x66\x32\x1f\xfe\xb9\x40\x23\xcb\xdf\x55\x0d\xfe\x63\x9f\xb7\x67\x27\xfc\x74\x32\x10\x49\x67\x55\x3a\x0e\x99\x1e\xa1\x03\xf3\xe3\x66\xec\x82\x75\x73\x95\xd3\xc7\xba\x9f\xd2\xb3\xda\xf5\xd5\xd4\xad\x8e\xde\x90\xea\xc6\x60\x58\x33\x98\xe7\xea\xfe\xf4\xba\x9c\xdf\xfa\xf6\x6f\xdc\x20\x57\xd8\xaa\x22\x9e\xaa\xac\x8a\x2a\x27\xa1\xf5\xde\xff\xf8\x9b\x21\x96\x54\x0c\x8e\x49\x86\x97\x75\xe9\x74\x55\x9b\x15\xbe\xb1\x08\x19\xae\x27\x1c\xf9\xbd\x23\x8a\x6f\x31\x14\xbe\x62\xce\x1f\xeb\x02\x20\xba\xcc\x8a\x76\xaf\xc3\x46\xa6\x52\xf0\xc9\xf8\xf7\x05\x97\xf1\x93\xca\x00\x75\xa0\x6b\xbc\xf6\xcc\x77\xa9\x1d\xa2\x5a\xf5\x3d\xd5\xc4\x20\xb0\xed\x96\x8e\x9b\xe7\x19\xd3\x4c\x36\x58\xbf\x2c\x37\x9b\x95\x90\x4b\xd0\x36\xa8\xac\xb3\xc1\x1b\xce\x1e\x72\x73\xc2\x60\x4a\x6a\x41\x79\x2b\xd4\xb6\xcc\xc2\xf1\x40\x9d\xad\x6a\x2d\x6e\xa3\x90\x91\xd7\x1f\x22\x5c\x54\xce\xaa\xc7\x94\x68\x0c\x6e\x48\x5f\x4d\x02\xdc\xd1\x41\xd3\x13\xa8\x7c\x8b\x62\x3d\x37\x0c\x2a\x21\x83\x55\xf2\xcb\xbf\x10\x70\x13\x41\x92\x93\xd0\xbf\xa7\x0d\x7c\xcb\x4d\x6a\x4c\x5b\xa9\x68\x0b\x69\xa6\xfe\xc3\xfc\x6e\xda\x5a\x3d\x0d\x5d\x43\x6d\x1a\xb7\x65\x44\x0e\xf0\xc0\x17\x07\xbd\x4d\x0b\x5b\x10\xdc\x8a\xc6\x1c\xcc\xfa\x5b\x8b\xc5\x42\xce\xa7\x57\xdc\x86\xd9\xcb\x4b\x32\xe9\x62\xdc\x69\x40\x47\x96\xd0\xe8\x2a\x74\x8e\x6d\x61\x7d\x6f\x8c\xaa\x40\x82\x1e\xfb\x08\x43\x21\xad\xb4\x31\x20\xd8\x8a\x21\xce\x83\x5a\x73\x7c\x40\x7b\x70\x33\x66\x03\x8f\x94\x37\x4b\x43\x63\x0b\x80\xdd\x12\xe3\x9d\xb7\x14\x32\x5f\x20\x5c\x63\xf7\x89\x4c\x44\xbd\xc2\x44\x2f\xed\x96\x1a\xa9\xe3\x06\xc3\x96\xcb\x75\x1e\x5f\xe3\x5f\x05\x8c\xb0\xc2\x1f\x3c\xb9\xce\xb3\x29\xfd\x6f\x6b\x91\xf3\x5b\xea\xd0\xd9\xac\x9f\x23\x31\x77\x72\x85\x9a\x9b\xab\x5d\xbc\xd3\xb3\x5c\x6f\xfa\x1c\x20\xea\xf5\xf1\x12\xec\xed\x12\xcb\xb8\x19\xa9\x84\x52\xf2\x0e\x80\x14\xb9\x00\xa0\x40\xc6\xfd\x4b\xf0\x5c\x29\xe5\xef\xb2\x60\x3e\xf6\x1a\x1a\x29\xa5\x62\xc6\x30\xcf\xbf\xe1\x05\x08\x4a\x58\xe3\xde\x44\x60\x03\x39\x5b\x69\x88\xdc\x41\x35\x6b\x28\x49\x64\x0c\xcb\x25\x5b\xfe\xd7\xb0\x9c\x30\xd0\x0e\x9b\x75\x26\x68\x29\xe8\x80\xd0\xfa\xb9\x75\x1a\x30\x43\x52\xb6\xe8\x6e\x6b\x3c\xeb\x3d\xb0\xb7\x76\xc0\x3d\xc8\x11\x54\x2c\xb2\x6d\xcc\xa1\xf4\xf5\xe5\x27\xb2\x56\xad\x40\xac\xf5\x5e\xb8\x20\x8d\xee\x83\x2a\x2a\x35\x46\xb6\x55\x1e\x87\xd5\xdd\x04\x5e\x49\x52\x2c\xad\xf8\x26\xff\xa8\x8f\xcd\x44\xa1\xc5\x75\x14\x1d\x3b\x29\x59\x1e\x6d\x43\x6f\xa1\x1e\xd4\xc0\x01\x0b\x14\xff\x55\xcf\x10\x3a\xb3\x31\xb1\xea\xc2\xb0\xd6\x6a\xa0\x09\x71\x17\x1f\x6d\x51\x19\xc6\x85\xc6\x4a\x6d\x90\x00\x4e\x2d\x5e\x7a\xfb\xfb\x9b\x29\xe9\xff\x43\x88\x6e\xd5\x0d\xce\xe4\x38\x55\x08\x8b\xb9\x7c\x9c\x23\x8a\xd3\xb8\xa8\xd8\x9c\x6e\x9d\x1e\x89\xce\xab\x50\x0d\x55\x97\xaa\xd1\x61\xad\xcf\x78\xc6\xe5\x94\xfa\x4b\x05\x5d\xbc\xa4\xd5\xe6\x50\xee\xaa\x60\x6b\xe4\xf5\x42\x8d\x8d\x53\xa2\x35\xf2\x86\x1d\xa8\xec\x59\xfa\xec\x6d\x45\x62\x3a\xe5\xb5\xdc\xa1\x08\x29\x32\x32\xd4\x03\x49\x9f\xcb\xe9\xdb\x2e\x66\x64\xdb\xc7\x86\xb1\xf0\x22\x94\x3b\xc8\xe9\x3d\xb0\x02\x4b\x2d\x9c\x72\x84\xce\xe4\x5a\x2a\x15\xf8\x0d\x70\xc8\x35\x7b\xe2\xca\x02\x3c\x62\x83\xfd\x13\x0a\x64\x92\x46\x5b\x43\x23\xac\x49\x8b\xd5\x8f\x40\x69\x72\x63\xfc\xa0\x0d\x68\x40\x86\x72\x98\x8d\x24\x84\x60\xda\x97\xda\xd9\x33\x64\xc3\x16\x49\x99\xd9\xdd\xf4\x7c\x0a\x05\xd2\x56\xf2\xbe\xc4\x86\xc3\xd0\xf3\x2a\x03\x17\x8f\x3c\xbf\x5a\x02\x70\xee\x22\xd7\x4e\x49\x18\xe5\xaf\xc4\xbc\xde\x27\x0b\x89\x7c\xc1\xed\xeb\xf5\xc9\x4d\x39\xeb\x54\xb5\x6c\x7a\xa8\x81\x73\x85\x9a\x2a\x9b\xb7\x7d\x98\x7a\xa5\x88\x16\x39\x24\x46\xe6\xaa\xae\x14\xcc\x70\xcc\x45\x9c\xb5\x31\xbd\x80\x3b\x85\x24\x7d\xab\x13\x93\xeb\x04\xc8\xe7\xae\x84\x24\xb2\x43\x3e\x0c\x4d\x68\x20\x3e\x62\x74\x5f\xda\x53\xaf\x38\x8a\xbf\xda\x26\xdd\x1c\x18\xf4\x24\x3e\x16\x9c\x08\xae\x3a\xa6\x6a\xe0\x41\xf3\xbd\x21\x55\x82\x02\x58\x88\x28\x1e\x42\xea\x0c\x3c\xfe\xca\x1d\x5d\x73\x5b\x98\x58\xf3\x7c\xa9\xc1\x60\x89\x28\x54\xc7\x42\xfe\xc7\x06\xc7\x22\xf9\x20\x84\xde\x38\xe8\x2f\x26\x86\x3b\xf0\x72\x6f\x51\x99\x6b\xef\xe4\x23\x44\x39\x94\x89\x5c\x11\xec\x2d\xa5\xaf\x25\x4b\x58\x2b\xa4\xbf\x96\xf4\x0a\xe9\xaf\xa7\x7d\xe5\x4d\x25\x6c\xd3\xeb\xec\x9f\xb1\x2b\xc7\x45\x9e\x7a\xe3\x7d\x59\xde\x36\x2a\x49\xd2\xac\x80\x42\xd9\xd1\x74\xd0\xf0\x81\x2c\xd8\xad\x4f\xbf\x87\xef\xa0\xb4\x8d\x26\xd4\x35\xfa\x92\x34\xef\xcc\x1e\x1d\xe3\xf3\xd6\x7e\x50\x3a\x85\xab\x7e\xfd\x50\xa7\x5b\xef\xb9\x38\xe6\x60\x69\xaa\x61\x55\x9a\xb8\x84\x29\x8e\xa0\x6b\xce\x35\xc3\x42\x49\x0c\x43\x45\x08\x8c\xb9\xe9\x17\xd8\x5a\x61\x53\x10\x13\xfb\x16\x25\xc6\xd8\x40\x00\xd5\xeb\xf5\xbd\x8b\x83\xeb\xd2\x18\x80\x75\x15\xad\xa5\x01\x51\xfc\xaf\x47\x34\x4c\x08\xa5\xa1\xe3\x0a\xf4\xd3\x28\x43\x36\x5c\x08\xa4\x21\x84\x83\x07\x4b\x7b\xd8\x4a\x6d\xde\x8d\x25\xd2\x36\x0a\x19\x74\x21\x66\x16\x12\x53\x1f\x89\x17\xa6\x7f\x91\xc6\xb6\xa6\x7b\x09\x31\x31\x9b\x19\x95\xd4\x94\xcc\x42\xa8\x2c\x6e\xd8\xec\xb4\x10\x0a\x83\x9c\x59\xfb\x3f\xf5\xfe\x92\xbc\x29\x50\x36\xe3\x6c\x7a\x45\xf2\xa4\x14\xb7\x6c\x38\xe3\x39\xce\x10\x0f\xc3\xc2\xa4\xbb\x06\xa1\x61\x56\x34\x2c\x3b\x97\x83\x1e\x2a\x74\x52\x34\xd5\x06\xd6\xd4\x10\xba\x65\xf7\xa0\xbb\x39\x14\x0b\xb0\x46\x42\x27\x72\x05\x42\x0f\x4d\x12\x6a\x47\x0e\xdd\xb2\xdc\xc4\xcc\x3c\xb9\x48\x34\x37\x2e\x68\xc6\x77\x07\x4e\xfe\x68\x80\x69\xfb\xde\xf9\x4a\x82\x09\x19\xd8\xda\x40\x44\xa0\x1a\x4c\xda\xa2\x6f\x49\x1b\x1c\x73\xbc\xd0\x56\x43\xec\x2c\xe4\xcc\x7e\xe2\xa1\x69\x7b\xa0\xa9\xb0\x8a\xdb\xa4\x47\xb8\x85\xf2\xd7\xf6\x1b\xde\x52\x48\x19\x82\x13\x52\xf7\x1b\x7e\xfb\x35\xad\x0c\x57\x59\x11\xef\xce\xbb\x58\xe2\x51\x35\x0d\xb0\xa8\xb1\x9a\x50\xfa\x29\xf2\x00\x4b\xbb\xd2\xd0\xa4\x9e\x1d\x3a\xd0\xf3\xaf\xa6\x5e\x69\xbe\xb4\xa7\x34\x6e\x10\x86\xb0\x67\xc6\x4d\xad\xe9\xf1\xe7\xb6\x1e\xd4\xeb\x4b\x0f\x78\x27\x79\x68\x39\xd5\x6c\x8a\x88\x0b\x5d\x32\xad\x34\xf9\xb4\x29\x94\xd7\xe6\x25\x04\xeb\x38\xd8\xa6\x09\x6b\x67\xdc\x33\xed\x66\x12\xf2\x79\x4c\x5b\x68\x9f\x4d\x21\xa8\x36\x65\x1b\x4c\x79\xfb\xbf\xbc\x30\x60\x44\x29\xf3\xd8\x88\xdf\x5a\xa1\x94\xa3\x87\x37\x0c\xea\x5b\x10\x24\x24\xe6\x21\x9b\xa0\x6d\x3f\xea\x01\xdb\x5a\x83\xc2\x0e\xab\x2e\x71\x2b\xfd\x73\x0b\xa9\x46\xc5\x87\x58\x69\x56\x87\xa7\x6e\x01\xe6\x87\xd6\xc1\xb0\x91\xd8\x34\x30\xd6\xe8\x53\xeb\x67\x91\x26\x1a\x9c\xee\x22\x71\xc0\x9e\x94\x50\xc4\x3d\xe4\xca\x54\x51\xeb\x20\x3a\x1e\xd2\x75\x1a\xce\x68\x8e\x9b\x3d\xc2\xed\xe5\xd3\x4b\x8d\xed\x22\x71\x34\x58\x3a\xc5\xc5\xb7\xc8\xfe\xb6\x12\xfc\x25\x8e\x38\xb9\x59\xee\xe0\xfe\xc4\x6a\x6b\xf8\xc3\xa1\xe8\x76\x7a\xc9\x06\xb1\xdc\x90\xd8\xa2\xf4\xd1\xed\x02\xe4\x22\xf3\x5a\xcc\xd8\x4d\x92\x6b\x1b\xf8\xcb\x7e\x0e\xc7\xd9\x72\x77\xce\xeb\xb7\xd4\x2f\x5b\x32\x74\x18\xa5\x9b\x04\xe2\xa6\xe8\x77\x20\xd7\x36\x1f\xb9\xc1\xa4\xbd\x0a\x4d\x5d\xea\x19\x0a\x6b\x48\x7e\x95\x30\x95\x99\xc3\xd2\x66\xc3\x7a\x32\xe5\xc2\x2a\xd3\xc1\x16\xb0\x22\xd1\xc6\x5e\x14\x3e\x65\xe5\x49\x49\xa6\x5a\xd9\xc4\xa0\xef\x76\x5e\x8c\x3f\x02\xae\x5d\x3c\x19\x08\xbf\xc5\x92\xe1\x52\xb9\x64\x8b\xf2\xdc\x54\xc5\x96\x00\x63\xb0\xcd\x1f\x97\x6a\x13\xb7\x8e\xd5\xc8\xaa\x06\x42\xfc\x16\x47\x83\xc4\xbf\x17\xae\xbe\xb1\x26\xfd\x93\xb6\x2c\xce\xe0\x95\x05\x47\x11\xab\x6d\xb6\x24\x47\x11\xa0\xb8\x5a\x89\xe7\x80\xd8\x74\x96\x9b\x76\xfc\xe6\xd3\x70\x45\x85\xbe\x5b\x5e\xb9\x6e\xe2\x98\x1a\x52\x91\xdc\xb5\x94\x10\xeb\x2b\xf9\x06\xd9\x6c\x26\x3e\xdb\x69\xb2\x2c\x2b\xf8\x42\xf8\xf6\x4a\x08\x0c\xc2\x80\x59\x4a\x48\xa8\x7d\x38\x7f\x37\x10\x22\x6a\x6f\x7f\x4e\xf9\xee\x54\x3d\x4c\xa9\xa9\xf7\x95\xcf\x2d\xe4\xd3\x90\xb1\x59\xc8\xf9\x66\xf7\x4f\x19\x2c\xa4\x27\x53\x4d\x0b\xa4\x79\x89\x80\xdd\xc4\x02\xb2\x11\xfe\x6b\xa3\x99\xb7\xe4\x95\xba\x52\x0b\x98\x5a\x37\x01\x16\x12\x21\xcb\x25\x58\x1a\x16\x04\x6a\x44\x75\x4a\x13\x48\x2b\x9d\x6e\x32\xcf\xab\xd6\x08\x97\x1c\xba\x38\xe0\x14\x1d\x78\x01\x2c\x46\x89\xea\xc2\x36\x34\x59\x01\xbb\xb6\x62\xaf\x16\x1c\x70\x85\x12\xd8\x2b\x49\xb9\xeb\x0e\x9d\x39\x9b\xc3\xfb\xf2\x13\x18\x92\xdd\xd1\x0c\x25\x34\xe9\x72\x9d\x73\xfc\xee\x14\xb5\x24\x84\xcf\xb0\xe4\xe3\x35\xc3\x98\xa8\x16\x83\x01\x8f\xa9\x60\x4b\x62\x38\x8d\x95\x76\x6e\x5d\x6d\x57\x1c\x2b\x44\xd0\xa8\x3b\xb6\x9d\x4a\xba\x70\xb9\x33\x55\x65\xab\xd2\x5e\x8b\x2c\x7d\xe4\xd1\xe8\x03\x45\x66\x8c\x7a\x72\x60\x59\xa7\x80\x11\x2b\xe4\x58\x13\x9a\x45\xc4\xa5\x74\x09\xe9\x49\x7a\xaa\x35\x09\x36\x21\x98\x16\xa5\xb4\xdd\xd6\x90\xa1\xd3\xcc\xd0\xd7\x24\x04\x10\x8d\xc0\x4b\x78\x1e\x81\xb4\xca\xca\xd9\x04\xd6\xf4\xe4\x26\x29\xf9\xa5\xe9\x77\xd1\x20\x75\x5d\xbc\xfb\xf7\xb7\x97\xfa\x7a\x4a\xc1\x0d\xfc\x56\xbb\xa6\xcf\x2e\x75\xb6\x09\x70\x5c\x86\xc0\x59\xf2\xd2\x7b\x52\x2b\x3c\xe8\xfd\x79\x1f\x1f\x37\x5a\x9e\x3f\x5b\xca\xa0\x96\xd4\x9f\xf7\x6e\x63\xa4\x83\x45\x12\x5f\xf7\xc2\xf5\x2a\x07\x5e\xce\x2a\x78\x99\x50\x46\x23\x43\x1e\xc4\xc5\x24\x2d\x56\x9b\xf8\xd0\x51\xd7\xd8\xc7\x78\x8d\xe7\x21\x22\x30\x03\x48\x56\x12\xdc\x38\x5f\x75\x09\x31\x9e\x2c\x5d\x75\x64\xb5\x0f\x4a\xb5\x5b\xd9\x06\xee\x67\xc4\x63\x3f\xeb\x57\xe2\x63\x55\xda\x39\xcf\xca\xe5\x02\x17\x10\xcb\xa1\xc0\x75\x99\x71\xc6\x06\xfa\xac\xe2\x09\x86\x44\x98\x46\x7f\x68\x84\x29\x19\xf7\x59\x19\x6e\x63\x65\x27\x41\x2e\xc4\xd2\xf3\xa8\x06\x28\x62\x54\xb7\x0f\x37\x37\x69\x54\x3f\xab\xbb\x24\xad\xf0\x07\x09\x46\x55\x9e\x62\xe1\x82\x0b\xc0\xf3\x27\x51\xe6\x21\x54\xcf\x3a\xa9\x0e\xf9\x54\x3c\x87\x41\x91\x55\x43\x1f\x8c\xea\x45\xb5\x97\x14\x5a\x3f\xcf\x91\xd9\xfc\xac\x5d\x1c\xd6\xd1\xe5\xf4\x04\x91\x8a\x3c\xa9\x4f\x38\xed\xfc\x01\x6e\xc9\x5e\x2d\xe9\xd9\x9e\xf2\x7a\x3d\x97\x5b\x1c\x4c\x50\xd5\xd6\x65\x97\xf6\x1a\x96\x62\xf3\x53\xc9\x84\xf9\x37\xa8\x54\xc2\xe7\xa2\xb8\x37\x03\xeb\x53\xa0\x6c\xa1\xdf\xc4\x0e\x8a\x22\x51\xa1\x10\x49\x3c\x8e\x71\x8d\xdc\xb7\x73\x25\x29\x84\x9d\x3e\x91\x7f\x82\x2f\x56\x3a\x5f\xa0\x90\xd0\x3a\xda\xb3\xc0\x99\xae\xba\x16\xc6\xdb\xa3\xd4\xf9\x0c\xf1\x6c\x70\x41\xc4\x51\x5b\x24\x57\x19\xdc\x6e\xbd\x63\x48\xba\x7c\x02\x48\x56\xaf\x80\x38\x8c\x3b\x97\x2b\x4a\x5e\x01\x09\xd0\x1f\x5d\xc3\x06\x19\x72\xfd\x02\x26\x1e\x47\x1a\x95\x27\x08\x64\x79\xaa\x1c\x72\x6b\xa5\xe9\xc6\xcd\x69\xf1\x1e\xe0\x99\xad\x71\x7a\x79\x86\x06\x5e\x16\x9f\xdf\x33\x1c\x02\xcf\xc6\x7c\x88\x75\x0a\xc5\xe1\x3c\x3d\x11\x96\x0d\x32\x4b\x48\x0d\x41\xe3\xd2\x2b\xab\xeb\x6d\xdd\x17\xe3\xbe\x9e\x11\x55\xf1\xcc\x5b\xd3\xc8\x7e\x86\x0e\x35\xd8\xbd\xbe\xc8\x47\x66\xd3\xb3\xa1\x46\xcf\x0f\xf9\x0f\x27\x41\x3e\x9a\xfe\xd8\xc2\x49\xcd\x13\x53\xf7\x38\x90\x6a\x7a\x08\xd3\xa0\xde\x57\x2e\x6c\x09\x79\xe2\x6f\x74\x58\xe2\x9c\xea\x99\x3e\x71\x3b\x89\x92\x37\x4f\x9b\x87\x4b\x5e\xea\x69\xf3\xa7\xae\x36\x29\xb2\x0b\x8c\x3e\x52\xc8\xf3\xc5\x30\x82\xfd\xa0\xc1\xf9\x84\x2b\x58\x82\x6e\xc8\xca\x1c\xe3\x87\xea\x2d\x9f\x29\xd4\xd4\xa4\x22\xf9\x6c\xac\x87\x4f\x10\xd9\x35\x8d\xb0\x7d\x9f\x19\x76\xf7\xc4\x04\xc6\x5f\x88\x4d\x37\x98\xf1\xfe\x24\x9d\x1c\x51\x6a\xea\x66\xcb\xcd\xd3\x70\xf9\xd7\xca\xdd\x3e\x60\xbe\x0f\x9b\x0b\x82\x08\x2c\x7e\xe4\x81\xc7\xcf\x54\xfc\xeb\x4a\xb1\xe2\xfb\x4c\xad\xb9\x2b\xb5\x1c\x09\x1e\xd5\x4b\xce\xf2\x51\x52\xa5\x8b\x2a\x63\xde\x2a\x0c\xfb\xe4\xe7\x71\x33\x2a\xf8\x71\xcb\x8c\x7e\x20\x7c\x4c\xe7\x99\x78\x1a\x2d\xfa\x3e\xaa\x56\xf8\x1f\x15\xb6\x2d\xd5\x12\x1f\x35\x7a\xbb\x87\x75\x52\x6e\xd7\x3c\xaa\x77\x28\x8f\x3a\x6f\xbe\x58\x48\x81\x32\xfe\x67\xa8\x85\xc7\x3a\xe9\xb6\x23\x7e\xcd\x8f\xb2\x35\x51\xf7\x07\x3a\x4d\xd6\xae\x44\x86\xe4\xc3\x99\xa4\x2e\x22\x2d\xc5\x3f\x82\x53\xd8\x41\xfe\x8f\x6a\x66\xc1\xeb\xfd\x80\x7f\x88\x15\x32\x03\xc4\xf5\x01\x48\xde\xf9\x48\x5f\x5f\xce\x7b\x24\x90\xe5\x39\x90\xd5\x61\xf3\xcb\x2f\x26\x00\x54\x30\xb3\x47\xcf\x75\x9f\x87\xdd\x2d\x43\x51\x1e\xe9\xe9\x6a\x8e\x8f\x04\x0d\x4b\x52\x78\x3c\x6c\xd8\xec\x28\x1f\xfa\xe0\xdc\xb6\x7c\x13\xcc\x32\x4a\x95\xbd\x13\xa5\xff\x6d\x2c\x62\xd0\x45\xb9\x90\xe4\x01\x70\x1e\x29\x8b\x4b\xa1\x9c\x92\x1f\x43\x44\x21\xbd\x03\x98\x1a\xa1\x22\x25\xfc\x4f\x2e\x88\xa6\x10\x0e\xe4\x22\x4e\x12\x37\x9b\xf2\x24\xb6\x4d\xcf\xbb\x3c\x79\x72\xd4\xdd\xbf\xb5\x02\x07\x31\xe5\xd7\x7a\x9b\xcf\xd3\xe5\x36\x7f\xf8\x30\xad\xa4\xa2\x90\x98\x04\xe7\x9b\x2e\x3a\x94\x14\xef\xb9\x2f\x2e\x7d\x40\xf2\x8c\x2a\x54\xf7\x21\xb1\x9f\x3b\xb8\x21\xe6\x9b\x44\x31\xf3\x2d\x6d\xcb\xf9\xde\xb8\x8c\x07\x90\x75\xb4\xa4\x9c\xee\x26\x98\x36\x5f\xc9\xee\xf1\xe0\xf5\xa4\x00\x75\x8f\xd2\x40\x0b\x49\xa8\xc3\x1e\xae\x7f\x2a\xb3\xd4\x84\xaa\x84\x7c\xe6\xfa\x3a\xf3\x4e\xbf\xae\xa7\xf2\xbb\x53\x79\x46\xc4\x5b\x88\x93\x61\x49\xd6\x01\x33\x24\xa0\x8c\x26\x55\x35\x43\x4d\x4e\x0c\xba\xbb\x3e\xfd\xf2\xd0\xff\x4d\x80\x05\x0f\x56\x06\xf2\x5c\xa3\x0a\x75\x2f\xfc\x81\x84\xc7\xea\xa4\x77\x34\x8b\x52\x76\x46\xca\xac\xce\xbf\x66\xf5\x53\x73\x5d\xa4\xcf\x54\x97\x6b\x3a\xf2\x3b\x0b\xeb\x1e\x9a\x2c\xbb\x43\xd2\x02\x18\xfc\xfd\xc8\xa9\x09\xe6\xac\x73\x2d\xce\x0a\xf8\xd4\x46\x92\xbc\x9b\x87\x68\xa8\x5a\x0a\x49\x9a\xe1\xab\x8f\xfa\x8d\x4a\x32\x98\x6d\xd2\x15\x12\x6a\xba\xab\x34\x85\x36\x5b\x39\x68\xca\x20\xe1\x01\xeb\x89\x4d\x8b\x0a\xf3\xa6\x8d\x20\xb4\xa8\x1c\xc5\x7f\xcd\xd8\xf6\x5a\x43\x63\x73\xb4\xae\x91\x58\x8a\x62\x45\x66\x9f\x07\x95\x48\xe5\x2c\x78\x32\x1c\x51\xf8\x0f\xe4\xac\x4d\xec\xab\x4c\xe1\x35\x30\xa8\xe1\x17\x32\xc4\x88\x6b\xe7\x11\x99\xec\x50\x60\xa0\x90\x48\x1b\x5c\xe1\xd2\x1f\x3b\xbe\xf1\x9a\x0d\xea\xf8\x03\x19\x18\x92\xcc\xc2\x86\x6c\x08\xa9\xa1\x55\x42\x90\xac\x4c\x65\x89\xcd\x76\x3e\x29\x3a\x00\x0d\x02\x9d\x74\xc7\x0e\x26\x33\x02\xf0\x5d\xe4\x9d\x47\xb8\x90\xda\x42\x88\xae\x58\xa2\x7d\x59\x1c\x85\x0e\x7e\xb2\x58\x1e\xd4\x23\xd3\x04\x1b\x2a\x87\x43\x35\xde\x86\x26\x91\x32\xe7\x95\xa4\x84\x00\x2a\x9d\x83\x38\xd4\xd6\xd4\xda\x91\x63\xc2\x7f\x2f\xab\xe4\xdb\x96\xb5\x49\x5d\xcf\x39\xde\x46\x0e\x09\xb5\x65\xa4\x77\x01\xb1\xee\xba\x80\xcd\x26\x0e\xa2\xf2\x0d\xf9\xb5\x98\x5f\x23\x33\xf2\xa3\x32\xfc\x0d\x4e\xe9\xd5\xe4\xde\x1c\x13\xfd\xb9\xd3\x50\x7d\xcb\x8c\x92\x37\xc0\x6a\x64\xd9\x72\x73\xfe\x31\x3b\x38\x8e\x7c\xdb\x39\xa7\x97\xaa\x9a\x86\x10\x0c\x43\x78\x4d\xa4\x05\xdb\x96\x22\x6a\x0c\xa5\x27\xe3\x37\xe6\xf4\x7e\x9e\xa1\x1f\xf6\x72\xf9\xd5\x50\x79\xbb\x58\xe8\xc1\xa5\x9b\x5d\x55\xba\x3b\x7d\xc4\x49\x4b\x55\x0e\xda\x2d\x9a\x29\x11\x9a\x89\x66\x7e\xbb\x6b\xc2\x25\xe0\x82\x2b\x03\x29\x3a\x25\xd0\x1e\x00\x61\xad\xa1\xf2\xb6\x96\xa6\xdd\xa6\xfc\x29\xa7\x3a\x8a\x7d\x85\x9d\x63\x3a\xa8\x6b\xe3\x60\x15\xc0\xbf\x59\xfb\xed\x1b\xda\x6d\x2f\xdd\x9c\x3a\x21\x97\x7a\xcb\x42\x7f\x84\xd7\x36\xb5\xaa\xa4\x9c\xf3\x39\xb5\xdc\x0f\x83\x23\x0b\xad\x20\x52\xb2\x6d\x09\x75\x36\x11\x0f\x1a\x3a\x93\x74\xec\x16\x56\x6d\x59\x22\xf3\x73\x6e\x72\x47\x33\x99\x71\xad\x50\xf2\x83\xa1\xc4\x43\x1e\x74\x44\xcf\x88\x00\xfd\xbd\x35\xdd\xd1\xcc\x9b\x9c\xa2\xc2\x13\x97\x05\xe6\x24\x9d\xc6\x34\xe8\x8e\x87\x21\x5e\xa1\x07\x5e\xf3\x9a\x29\x04\xee\x9c\x00\x6e\xba\xbf\x5f\x79\xec\x0c\x7e\xbc\xb8\xfe\x48\xb8\x85\xb7\xc8\x50\x9b\x39\x41\x0e\x8e\xcb\x9a\xd3\x7d\x70\xf8\x71\x01\xb8\xc3\x81\x75\x1e\xb2\x7e\xa7\xbb\x89\x8e\xdd\xb4\x07\xa7\xbb\x2c\x2b\x1d\x11\x80\x85\xeb\x4f\x06\xf5\x5b\xf6\x76\x28\x25\xc6\x75\x87\xe7\xcd\x90\xe6\x58\xa0\x23\x2b\x49\xe2\x6a\x77\xa8\x8e\x21\xda\x9e\x86\xa1\xc1\x53\xfd\x39\xe0\xcc\xd2\x67\x8c\x13\x32\x95\x55\xc6\x7d\x1d\x35\x6b\x58\x9e\x0e\xb2\x93\x4e\x1e\x56\x3e\x8e\xc2\x3f\x15\x0d\x97\x8a\xd3\x5e\xe6\x15\x18\x50\xda\xed\xe4\x5c\x0e\x3c\x2a\x6f\xbf\x5f\x64\x7a\x19\x26\xb0\x39\xc8\xfa\x07\x8e\xff\x4e\xbc\x8d\xfc\x47\xb3\x13\x72\x06\xdc\x1b\x9f\x95\x6b\xc8\xf1\x22\xd2\xbd\x83\x82\xf7\x87\x60\xd0\xfa\xc1\x54\xf9\xcd\x4f\x88\xbf\xca\x2b\x19\x4b\x6c\x83\x8b\xae\x93\x8d\x58\xfe\x83\x1a\x22\xc7\x56\x1d\xbe\x24\x33\x52\xf4\xea\x6a\x15\xf5\x01\x5c\x53\xef\x24\xa9\x35\x68\x9c\xb2\x9a\x36\xdf\xae\x2c\x25\x1e\x86\x65\xf8\x85\x44\x12\x53\x5d\x5b\x6c\xa6\xe1\x8e\xcf\xbb\x2e\x75\xd3\xa1\x91\xe3\x0c\xa9\xb9\xeb\x48\xbb\x60\x70\x2b\x4f\x35\x2a\xba\xb8\x0d\x39\x55\xdd\xd5\xa4\x5f\xb9\xba\xb2\x23\x4a\x61\xe8\x57\xc3\x53\x77\x3d\x84\x62\xda\x22\xfd\xb0\x85\xa9\xa8\x13\x1c\x5a\x3c\xd8\x65\xeb\x3e\x42\xda\x57\x5e\x45\x92\x7a\xc5\x8c\xab\xa3\x48\x7c\xee\x64\x80\x30\x54\xe8\x9a\x18\xc1\x54\xb6\xca\x37\x53\x36\x29\xcd\xf9\x2a\xdc\xde\xde\x5c\x41\x52\x0f\x1b\xbc\x73\x57\x10\xc4\x3f\x27\x6a\x1c\x4f\x25\x77\x71\xf1\xae\x93\xd8\x5a\x31\x33\xd9\x9d\x16\x8e\x16\x86\xdc\x01\xf7\x7f\xe0\xdc\xb4\xec\x78\x87\x1c\x4b\x25\x75\xa7\x0d\xc0\x31\x02\x89\xb0\xee\xe9\x15\xb5\xb9\x6d\x91\x28\xdd\xa1\x66\x82\x0e\x9e\x2a\x8e\x59\x91\x30\x06\x32\x33\x8c\x26\x2c\x5e\xbb\xca\x9b\x02\x0c\xac\xec\xdb\x32\x08\x04\xed\x24\x9a\xd7\x5c\xd7\x54\xb3\xe3\xb5\xe8\x89\x9b\x72\x34\xa0\xf4\x66\x3c\xa6\xeb\xd6\xf1\x0e\xb0\x02\x97\x08\x0e\xad\x17\x39\x04\xb9\xf2\xc5\x8d\x3a\x52\x13\xc0\x60\xf9\x09\x3e\xa6\x83\x52\x5e\xc4\x79\xf9\x89\xd1\x29\x92\x17\xc2\x73\xa3\xda\x4e\xa2\x5f\xdb\x5a\x90\x1c\x3b\x40\x73\xa0\xc9\x89\x05\xbd\xc7\x80\x58\x06\xfb\x96\x14\x59\xa6\x93\x96\x52\x51\xaa\xf6\xcb\x12\x72\x77\x8b\x19\x6d\x3a\x9c\xd7\x44\xcb\x22\x2a\x13\xeb\x36\x90\x91\x64\x4a\x1d\x0d\x20\x68\xce\xb9\xd8\xd6\x87\x03\x03\xc4\xe6\x28\x48\x76\x73\x32\x07\x6d\x39\xe5\x5a\x18\x94\x8c\xd8\xfd\xbc\x15\x71\x38\xde\x8f\xa2\x81\x7a\x04\x0f\x44\xfe\x8f\xea\x72\xcc\xc6\x1d\xc9\x7e\x38\x1e\x1f\xfd\x10\x04\x0e\x5e\x5a\xb7\xf8\xd3\x75\x3b\x82\x50\x06\xa8\xd7\x64\x89\x4a\x74\xb1\x5b\xe6\xde\x88\x0c\x1b\x3f\xb3\x67\x76\x4a\xe9\xec\x28\x21\xd4\x76\x20\x92\xba\x19\x6b\xbe\x19\xf2\x70\xae\x6e\xc3\x7b\x3c\x42\x9a\xee\x94\xd0\x5d\x0b\x80\x18\xb5\x78\x6a\x25\xfd\x87\xd0\xb1\xd3\xf2\x82\x4b\xda\xc5\x51\x22\x2d\x01\xf2\x07\xea\x80\x39\xc3\x06\x21\x30\x24\x64\x66\xdf\x33\x6a\x93\xb5\x3c\x31\x1e\x4d\x31\xed\x48\x1f\x29\xad\x1d\xa1\x99\x38\x92\xfc\xbb\xfc\x27\xa1\x3c\xed\xdb\x42\x95\x0f\xab\x18\x67\xaf\xe7\xa9\xd5\x35\x03\x53\x3c\x0c\xce\x2e\xc6\xfa\xcf\x7f\x58\xdf\x8b\xa8\x66\xc7\x96\xaa\x89\xc0\x18\x1f\xa8\x0c\x6c\xe9\x92\x3a\x5f\x70\x45\x8d\x5a\x4c\x1a\xa5\x53\x3a\xd6\xe9\x62\xc1\x05\xd7\x11\xc1\x44\x97\xfe\x13\x53\xe1\x71\xf9\x4c\x94\x9e\x19\xed\xc1\xb1\x2c\x72\x82\x8c\xec\x72\xc6\x85\x2e\x15\x94\xda\x9f\x60\xd1\xf1\xe7\xde\xba\xc7\x07\xed\xfe\xb1\x20\x04\x84\x29\xfe\x23\xd4\x22\x78\x7c\xf9\x2d\x8c\x8d\xee\xfe\x53\xf3\x28\xb2\xa5\x46\x84\xee\x27\x21\x1a\x1f\x63\x59\x98\x68\x04\x2d\xbd\x5e\xc1\x8f\x86\xf9\xd2\xe0\xf9\xba\x04\x24\xd4\x07\xbf\xcc\x46\xc4\x4f\x12\xfd\x47\x13\xd2\x43\x22\x1e\x1b\x08\x30\x73\x99\x46\xda\x04\x23\x9c\x87\x0c\xb7\x31\xf8\xd4\x2e\xef\xcf\x38\x3f\x1c\x95\x6a\x3a\x4a\x1f\x09\x25\x8b\xad\x84\x54\x63\xe6\x4a\xc3\x98\x49\x1f\x69\x3d\xa6\x0b\x02\x8d\xe1\x3a\xc4\xbc\x48\xf9\x99\x23\xe6\x77\xb3\xc8\x5a\x6d\xe3\x51\x79\xc9\xe2\x5c\xde\x0e\xe7\xa6\x0a\x48\xb1\xbc\x9c\xa3\xd9\x59\x63\x4d\x21\xe6\xa7\x00\x00\x43\xfa\xcc\x73\x38\xf7\x46\x31\x36\x8c\x29\xbe\x5f\x18\xe8\xfe\x1f\xd7\x08\xfa\x47\x74\x46\x6b\x4d\x5c\xc2\xb0\xb2\xd7\x36\xe7\x90\x23\x1a\xb4\x57\xc9\xfa\xb3\x5b\x3a\x4e\xc0\xf4\x08\x19\x4a\xfa\xc1\xd6\x4e\xb5\x91\xff\xde\x00\x9f\x77\xf2\xa4\x00\x1e\x2e\xa7\xd7\x88\x05\xff\x37\x41\x28\x0a\x3a\xe9\x5d\xe2\xcc\x7f\x4c\x0f\x95\x73\x48\x21\xa6\x4e\xbf\x6c\xb4\x25\xf9\x83\x30\x36\x89\xca\x7d\x34\x63\xce\xff\xe4\x23\xc9\xff\xfc\xf8\xfc\xd2\x88\xe5\xa1\xb5\xec\x89\xb3\x73\xb1\x01\x80\xd6\xcb\xdb\x55\xd6\xfd\xe6\xab\x2f\xa4\xf8\xcd\xd1\x76\xf3\xa9\x46\x94\x7f\x76\x9b\x0d\xd1\x0b\x8d\x4e\x0c\xd6\xdb\x09\x03\x99\x1b\x88\x1b\xf0\x06\x90\x91\xb3\xde\x71\x90\xc9\x9e\xa7\xee\x9f\xe0\xdf\xac\x8b\xe6\x62\x73\xa9\x58\xfe\x77\x09\xf2\x11\x98\xa9\xf2\x23\x1a\x14\x9d\x64\x10\x21\x3a\x63\x76\x48\x10\x36\x73\x3e\x07\x6b\x5a\xe6\x93\xcf\x72\x1e\x90\x9b\x95\x40\x24\x1f\x40\x4d\x90\xcf\x3a\xee\x8d\x61\x1b\x79\xe3\xe2\x84\xf2\x76\xf2\xf2\xf5\x79\x2c\xf8\x02\x19\x2c\x18\xad\x19\xf6\x00\x14\x12\x7d\x42\x8b\x8c\x14\xee\x1c\xe3\x8b\x01\xd1\x8d\xce\xbe\x64\xe5\xda\x01\xf1\x7f\x58\x62\xe6\xd9\x44\x87\x01\x32\xc7\x3d\xb9\x58\x56\xa6\x16\x8a\x75\xd8\x48\xa8\xf7\x33\x94\x48\xf2\x64\x2e\x7b\x0e\x84\x72\xa6\x46\x73\x9e\x6e\xf2\x82\xf9\xda\xa8\x9f\x72\xa8\xb7\x74\x1d\x07\x33\x2b\x98\xdd\x12\x71\x67\x59\xf9\xa0\xb9\xfb\x49\x0d\xbe\xc2\x49\x61\x9f\x0e\x2d\x1a\x10\xb6\x4b\xa7\x50\x69\x10\x59\x13\xa2\xe1\x7b\x73\xf0\x1d\x3e\xb4\x95\x87\x4f\xd4\x71\x78\x73\xde\x3e\x40\x88\xea\x4d\xc0\xb8\xe3\xe1\x3e\xdd\xd8\xb3\x32\x04\xf5\xee\xb3\x09\x15\x7e\xa8\x1b\x77\x1f\xf7\xae\xff\x1d\x97\xb8\x08\x05\xf5\x7f\xbe\xe2\xe1\x66\x5c\x34\x4a\x29\xc3\x0f\xb7\xa6\x97\x03\x79\x0a\x20\x63\xe8\x75\xbc\xf9\x89\xa1\x5c\x54\x25\x05\x58\x1a\x28\x8b\xaf\x11\xba\xd6\x21\xaf\x10\x92\x87\xf4\xdf\x0f\xf7\x4f\xf8\x76\x40\x34\x53\xe1\xde\xfe\x19\xd2\x7e\x58\xf1\x25\x27\xdf\x70\x77\x71\xcf\x64\x5f\x41\x29\x31\xc2\xdb\xed\x9b\xe1\xb8\x29\x9c\x06\x9d\x10\x3d\x84\x23\x9c\x5c\x03\x5c\x74\xbc\xf8\x21\x6e\xd6\xe1\xf8\x85\x17\x0e\x48\xe9\x8c\xdf\x24\xa6\x0e\x0e\x9e\xc0\xa8\x5d\x1d\x4d\xbd\xa1\x7e\xb8\xa7\xbe\x07\x3d\xde\x7a\x77\x5e\x1c\x21\xa7\x88\x21\x48\x92\x7b\xf5\xcf\x17\x39\x44\xe2\xdb\x50\x5b\x1c\xff\xa0\xf5\x43\x2b\x7b\xfe\x6b\xa3\x16\xfa\x50\x23\xbc\xdb\x91\x97\x88\x8f\xf2\xa5\xf0\xa1\x2e\x3c\xc6\xda\x07\x48\x5c\x89\xa5\x7a\x56\xe9\xe7\x1a\x6a\x91\x1e\x5b\x0b\xfc\x35\xa4\x82\x81\x75\x66\x84\x32\x25\x71\xa8\x14\xfd\x47\xf9\x53\x80\xec\x24\x8a\x08\x9e\xce\x2c\x4d\xb6\xc2\xd1\x95\x75\x2f\xf6\xbc\x4f\xee\x3e\x6a\xdf\x13\x85\xb2\xee\x50\xea\xee\xc5\x26\x9e\x41\x84\x16\x37\xc9\xc4\x35\x98\x11\x5d\x4e\x88\x3d\x91\x41\x24\x39\xf9\xe4\xde\xa7\x89\x2f\xe7\x02\x34\x18\x2a\x71\x65\x54\x24\x0f\x34\x1b\x55\x3b\xbc\xa1\x01\x19\x37\x47\x6c\xf2\xb4\x63\xd6\x5a\xa4\xc1\xd4\x49\xf3\x31\x87\x5c\xbc\xcb\x3b\x36\x88\x34\x3c\x31\xf3\xfa\xca\x92\x6c\x43\x17\x6e\xdc\xdd\x66\x1e\x4a\x68\xea\x95\xf4\x35\x6b\x2f\x4b\x3a\x32\xa9\x66\x59\x13\x16\x00\x69\x15\x86\xe6\x25\x00\x4b\x7f\x8e\x97\x30\xeb\x8b\x81\x23\xe4\x22\xf3\x8b\x0e\x96\x21\xe8\x77\x87\x3f\x49\x76\xd0\x7c\xac\x21\x5b\x78\x85\xab\x70\x08\x96\x5c\x17\xf8\x31\xa3\x2d\x24\x1f\x53\x68\x2d\xb6\x17\x03\x2f\x28\x8f\x3c\xa4\x2a\x18\x42\x94\xd0\xa8\x5d\xdb\x3f\x53\xa8\x4d\xfe\xef\xb6\xe1\x39\x84\x1d\x93\xf4\x0c\xf7\x34\xdc\xac\xce\xf6\x91\x60\xe1\x26\xf1\x40\x1f\x36\x78\xf8\xd6\xf8\x94\x86\xac\xcc\x9e\x21\x2b\x70\x1f\xa0\x31\xa9\x62\xc3\x7e\x36\x6d\x9f\xb6\xc1\xdb\x5a\x33\x7b\x33\xd8\x91\xfc\xf9\x02\xf2\x01\x9e\x7e\xd9\xcb\x25\x90\x76\x1d\x56\x7f\x9a\x8f\x52\x48\xcd\x7a\x33\x0b\x52\x20\x02\x4a\x1b\x14\xeb\x7d\x7b\xaa\x8a\x3c\xb2\x4d\x4e\x29\x1d\x34\xb9\x4a\xbc\x64\xcc\x51\x4f\x6d\x29\x76\xc7\x20\xd4\xb2\x6a\x40\x06\x79\x08\x12\xa5\x77\x3c\x13\x04\xad\xf4\xba\x46\x5a\xee\xaf\xd0\x7c\xab\x1d\x20\x9c\x03\xd7\xe0\x21\xcf\x2c\xab\xdf\x20\x45\x62\xf3\x23\xe5\x78\x92\x0f\x88\x3c\x48\x8d\x72\xd6\xbf\xe7\x78\x31\x13\xfb\x2a\xcc\x6c\xb8\xfd\xd3\x27\x8d\x1e\xdc\x85\x48\x08\x86\x23\xee\x05\xd9\xa3\x90\x02\xe1\xc6\x5c\xd4\x58\xfa\x1c\xcd\x06\xba\x63\xbc\x44\xfb\x44\x72\x53\x2e\x85\xf0\x86\xc4\x7c\x29\x3f\xa9\xb7\x6a\x66\x14\x0f\x80\x00\x65\x28\x4f\x62\x0a\x4b\x85\x58\x17\x28\xa1\xef\x62\x48\x5a\x4f\x19\xd2\x4b\x2d\xcf\xe6\xdd\x7e\x42\x30\x26\x7b\x79\x4e\xf7\xce\x5e\x21\x35\xb5\x52\xcc\x80\x45\x3e\x3d\x84\x47\x75\x48\xf5\xe6\xa9\xb6\x53\x1a\xea\x83\x2f\xdd\xf2\x9f\x5b\xd3\x5f\x33\x9c\x67\xfd\xbb\xd7\x45\x57\x49\x7e\x42\xdf\x80\xd7\x5a\xfa\xd8\x35\x2f\x8d\x5c\x73\x48\x43\x30\xd5\x0c\xa9\xbf\x99\xda\x6d\x28\x49\xd2\x7b\x60\xa0\x62\xff\x39\x94\xd6\xd9\x7f\x10\x7a\x84\x7a\xf6\x6f\xee\xb8\xe5\xb6\xea\x6f\xa5\xab\x18\xf0\x06\x6d\xa5\xcf\x3f\x50\x56\x2f\x29\xf0\x8e\xd2\x8d\x0d\x00\xa9\x9f\xd9\x84\x59\x2a\x8b\xb7\x35\x3c\x9f\xcd\xf7\x66\x5e\x6f\x4a\xca\x84\x7e\xa4\x9f\xe2\x94\x2d\xd1\x43\x31\xae\xf0\x2c\x3f\x71\x2d\xc0\x95\x19\xd6\x80\xa1\x21\x59\xef\x36\x83\x80\xb2\x3c\xd5\x05\x1b\xed\x86\x99\x9e\x67\x72\x77\x23\x88\x93\xa8\x52\x07\x53\xb7\x0a\xc4\x60\xd1\x3b\x61\x83\xe4\xf8\x40\xb0\x20\xd5\x4a\x48\xa6\x90\x62\xa1\xaf\xdf\x6f\x96\xde\xe2\xcb\xbb\x09\x2a\x4e\xc6\xda\x31\xb6\x7a\x05\x2e\x19\x8e\xfc\xc6\x1e\x22\x00\xb3\x64\x14\xef\x58\x2d\xb3\xd6\x70\x84\xb4\xe3\x39\x81\x40\xc1\xa1\x8d\xb9\x14\x2d\xe9\xab\xd2\x84\xc0\x5b\x51\x29\x52\x07\x99\x49\x5e\xc8\x97\x9a\x43\x53\xb1\xd6\x2d\x02\xbe\x6c\x42\x1d\xd1\x21\x14\x94\xd4\x15\xa0\xc7\xc1\x3b\xae\x5b\x88\x65\x6e\x87\xa4\x28\xb1\x5c\xa8\x5f\x04\xff\x30\x1c\x63\x12\xd7\xc4\x4a\x36\x91\x52\x95\x0d\x34\x3d\xcd\xc5\x66\xc5\xaa\xe5\x22\x82\x0d\x50\xa5\xbd\xa4\x46\x57\x17\x09\x50\x16\x5d\x60\x9a\xf4\xc3\xbc\x49\x44\x32\x28\xfb\x7b\xd2\x77\xc2\x08\x56\x80\xb9\x43\xbe\x55\x5f\xb2\x0f\xfd\x49\x6a\xf0\x80\xe4\x4a\x34\xb3\xfd\x82\xd5\xd6\x8d\xc8\x5f\xcf\xa2\x47\xb7\x80\x62\x44\x69\x2c\xfd\xd2\x84\x19\x97\x18\x40\x30\xc1\x49\x43\xd3\x93\x0c\x41\x44\xb3\x5a\xd9\x44\x8b\x34\x5d\x9b\x40\x24\xf7\x42\x23\xac\x49\x47\xaa\x73\x05\x18\x14\x37\x0f\x71\x48\xc5\xc3\xf6\x33\x69\x73\xac\xaf\x3a\xa4\xea\x58\xec\x49\xf9\xb4\x08\xa2\x8f\x89\xa9\x5f\x98\xbc\xd1\x42\x75\x62\x7c\x21\x3e\xf7\xd9\x17\x95\x1f\x92\x8c\x4c\x1a\x6d\x81\xd2\xb4\x71\x60\x76\xa9\xc8\xa2\xdd\xfb\x7e\x4b\xa2\x12\xbd\x89\x4e\xb1\xc6\x24\x0d\xe4\x0c\xf7\xce\x75\x05\xe0\x31\xf6\x8e\x63\x9c\x6a\x90\x96\x25\xf4\x47\x42\x1c\x35\x9f\x21\x75\xe8\x4f\xb1\x0a\x49\xfe\x35\xab\x2b\xed\x7d\xf1\x92\xff\xdc\x94\x1d\xd7\x43\x27\x4a\x2a\x89\xd0\xbe\x08\xf9\xc7\x97\x74\x11\x41\xb2\x1c\x4a\x87\x27\x73\x2d\x7b\x10\x46\x95\x3e\x7e\x05\x26\xd2\x1c\xaa\x92\x21\x1a\x09\x22\xcc\x90\x28\x3c\x29\xfb\xa5\x8d\xf3\x8a\xd4\x51\x17\xa8\x44\xba\x2c\xf1\xae\xda\x26\x91\xa7\x01\x24\xa9\x28\xd2\x50\xf3\xd1\xe3\xd4\xe1\x89\xaa\x89\x8d\xcb\x1a\x39\x24\xf1\x49\x23\xe1\x61\xe2\xda\x09\xd2\x49\xf8\x94\x93\xf2\x6e\x7b\xef\xf7\x6b\x20\xc9\x3e\x1e\xbb\x27\x87\xe9\x4e\x8f\x75\x89\x1f\x85\x67\xcb\xe1\x19\xb0\x0c\x43\x11\xea\xeb\xa8\xeb\x4b\x1b\x92\xf2\x20\x28\xef\x26\x82\xd9\x69\x97\x8f\x5b\xbc\xc6\xee\xc9\x54\x25\x70\x12\x10\xc1\xbb\xc1\xa6\x48\x79\xf3\xbc\xdb\x25\x31\xcc\x2d\x64\x72\x95\x71\x6a\xe5\x64\xc6\x63\xe5\x57\x61\xc3\xd1\xae\xe3\x86\xf6\x40\xcc\xae\xd0\xd9\xd6\x9d\x47\x21\xe7\x91\x20\x1e\xc6\xda\xba\x29\x74\xd7\xda\x97\x8e\xa6\x73\x99\xf2\x4c\xb4\xbc\x42\x7f\x93\x92\xb5\x84\xc3\x4f\xef\xd5\x46\xbb\x3b\xf4\x2f\xfb\x38\xb6\x48\x7f\xb2\x17\xc7\x5c\x8f\x40\x7c\x0a\xbf\xa6\xae\x5e\x4c\xda\x31\xc8\x65\xe5\xee\x13\x22\x94\x5f\xea\xca\xfd\x59\x83\x83\x94\xa5\xf4\xeb\xee\x93\x46\x06\x44\x2d\xeb\xab\xf0\xc3\xb1\x8d\x4e\xb2\x93\xa7\xc4\xb3\x6e\x70\x7d\xf5\x9c\xa4\xda\x68\x7e\x35\x85\x4b\xc9\xb3\x00\x29\x2d\x1d\x50\x27\x67\x28\x75\x77\x87\x88\xe5\x31\x85\x24\xd8\xed\x4e\x77\xa1\x12\x87\x83\x57\x57\xd7\xdb\xf8\x25\x18\x08\xe5\xcb\x6d\x90\x0a\xe4\xbd\xea\x8e\x5c\xf3\x84\xa8\x34\x31\xcc\x45\x35\xc3\xa9\xb8\x07\x9c\x38\xfa\xa7\x99\xdb\x7c\x3a\xb7\xce\x4e\xa7\x5b\x67\x33\xdf\xbf\x72\x9a\x12\xc1\x3b\x3c\x84\xcd\x8a\x21\xb4\x33\x3f\x52\x53\x3b\xee\xed\x7c\x71\x01\x1b\x38\x94\xef\x40\x2c\xcb\x7b\x31\x14\xad\x01\xf2\x99\x7a\x27\xc7\xcd\x02\x6a\xcf\x89\xa8\xc9\x8b\x82\x7b\xeb\x43\x50\x9f\xfc\xc0\x0d\x7a\x52\x21\x61\x98\x89\x6e\xf9\xfd\xb4\x3b\x9d\x0d\x8b\x1d\x0b\x54\xb2\xf8\x02\x9d\x21\x27\x50\x9f\x7f\x55\x29\x10\x51\xf0\x8f\xa6\x73\x6a\x8a\x4b\xbf\x63\x07\x04\x30\x68\x8f\x2b\x63\xd2\x8f\xdb\xe1\xca\x71\x8b\x52\xff\x46\xf8\x9c\x6a\x80\x14\xe4\x9f\xa2\xe6\x65\xa3\x1a\x63\x8b\x7d\xab\x93\x32\xe1\xd1\x22\x5d\x00\xb7\xd0\x05\x85\x26\x7b\x3b\x8f\x3c\xf7\x1d\x18\xa1\x36\x01\x4e\x8e\x3d\x13\x6d\x10\x78\x86\x16\x69\xfa\x96\x40\x9f\x96\xfc\x00\x7d\xce\x55\x25\x8b\x50\x5b\x75\x42\x7b\xfd\x6b\x6b\x4a\x61\xd8\xe8\xd3\x15\x50\x29\xbf\xdd\xc1\x74\x8c\xce\x55\xb1\x59\xd7\xd4\x94\x4b\x81\x36\x19\xb5\xee\x1b\x8b\xfd\x5a\x1f\xef\x10\x49\xb5\x11\x78\x27\x0e\x21\xcf\x2d\x54\x3a\xdf\xd2\x38\x84\xd3\xe2\xd2\xf1\xf7\xe5\x0c\x2c\x00\xf3\x8f\xf9\x02\xca\xf7\x67\xf9\xdf\x1d\xff\x3b\x1e\x95\x6e\x08\xe0\xac\x9d\xd0\x69\xe8\x75\x34\xeb\x81\xc8\xfc\x22\x62\x00\x6e\xf0\xdf\x21\x57\x5a\xc1\x02\xed\x7d\x11\x02\x39\x55\x86\xd0\xe8\xf1\x61\xe5\x0e\x15\x12\x03\xb5\x4b\xf3\xb7\xc7\xc4\x4e\xa0\x1e\x89\x55\xaf\xfa\x52\xab\x04\x0f\xeb\xe6\x33\xab\x08\x68\xef\x6a\x48\x43\x75\x10\x55\xe3\xd9\xb6\xa4\x29\xba\x6b\x87\x52\x35\x4f\x64\xfa\x5d\x0d\x51\xbe\x16\x7a\xe5\x88\x65\x61\x6d\x4b\xd0\xe1\x19\x94\x1a\xa9\xe2\x01\xa0\x3d\x7a\x48\xc3\xa2\xab\x13\x62\xd0\x42\x30\x35\x9f\xfb\xb8\xdc\x93\xb0\x2e\xa6\x0e\xa0\x86\xea\xe6\xef\x33\xf3\xe5\x61\x9e\xdb\xb5\x66\xb3\x1e\xde\x10\x42\xa8\x7d\x84\xab\x1b\xec\x79\x6d\x26\x19\x41\xa2\x54\xe5\xcc\xdf\xa4\x81\x91\x66\x1d\xb2\x99\x58\x86\x0a\x29\x65\x11\xe0\xac\x55\xeb\x5a\x62\x7c\x87\xfa\x12\x13\x82\x0d\x35\x39\xd4\x05\xd1\x38\x3c\x57\xd2\xa7\x88\x60\x6c\x8e\x84\x2e\x70\x4a\x85\x4c\x28\x9a\xea\xcb\x7a\xd4\xe3\x19\x11\x24\xdd\x83\x4c\x2a\x5d\xf9\xba\xa7\xac\x43\x48\x4c\x25\xa0\xb3\xcb\x40\x6a\xad\xa7\x78\x9c\x9e\x15\x14\xfa\xea\x8a\xa4\x1c\x6d\xf2\x72\x25\x0f\xf1\x07\x94\x8a\xa1\x3a\x35\xfc\x98\x8f\xb0\xfc\x51\x35\x8e\x6c\x1f\x0a\xa2\x3a\x98\x34\xdb\x34\x44\x67\x0b\x54\x50\xe5\x45\xec\x4a\x04\xa0\x74\x66\x9d\xf2\x8b\x2e\x69\x54\x1b\x9b\x13\x73\x13\x3b\x7a\x26\xba\xfc\x61\x63\x0c\x57\x4e\x07\x56\x0d\xaf\x9d\xf5\x45\xfe\x8b\x1c\x4a\xae\xf6\xa4\x3c\x1c\x49\x1b\xb3\xf4\x50\xaf\x18\x79\x9a\x38\x33\x32\xbb\x6d\x06\x20\xc8\x79\x2b\xd2\xc4\x62\x98\x43\xa2\xd0\x25\x90\x9a\x12\xab\xcd\x38\x0b\xe1\xb3\xc9\xaf\x42\xb1\x87\xad\x1d\x3c\x2a\x92\x26\x85\xe6\x6e\xd6\x01\x64\x6b\x80\x68\xeb\x97\xdd\x5d\x3e\x98\xf2\xdf\x65\x24\xa1\xa9\xae\x1b\x96\x63\x89\xe6\x54\x24\xd3\x6a\x56\x1a\xdf\xbc\xc1\x90\x44\x5d\x99\x87\xd1\x99\x11\x92\x74\xf7\xcb\x22\x5b\x4a\xca\xaa\x01\x43\xf6\xd4\x90\xbb\x65\x5c\x61\xb5\x1c\xf1\x23\x9b\x59\x86\x8c\x69\x7c\x0c\x59\x51\xa7\x5d\xe6\x2c\xa4\xcb\xb3\x9c\xd1\x90\x54\x65\xef\x94\x07\x9d\x0c\xc3\x46\x00\x2f\x43\x1c\x34\x43\x8f\x32\x85\x84\xac\x6d\x68\x6c\xcf\x69\x68\x75\x6a\xfd\x6a\xfa\x4a\x73\xf3\x73\xba\x03\xe3\xff\xff\x3f\xff\xdf\xff\x0d\x00\x00\xff\xff\x9b\xc6\xef\xe7\x7d\x0c\x06\x00") - -func dataSurnamesJsonBytes() ([]byte, error) { - return bindataRead( - _dataSurnamesJson, - "data/Surnames.json", - ) -} - -func dataSurnamesJson() (*asset, error) { - bytes, err := dataSurnamesJsonBytes() - if err != nil { - return nil, err - } - - info := bindataFileInfo{name: "data/Surnames.json", size: 396413, mode: os.FileMode(420), modTime: time.Unix(1452717629, 0)} - a := &asset{bytes: bytes, info: info} - return a, nil -} - -// Asset loads and returns the asset for the given name. -// It returns an error if the asset could not be found or -// could not be loaded. -func Asset(name string) ([]byte, error) { - cannonicalName := strings.Replace(name, "\\", "/", -1) - if f, ok := _bindata[cannonicalName]; ok { - a, err := f() - if err != nil { - return nil, fmt.Errorf("Asset %s can't read by error: %v", name, err) - } - return a.bytes, nil - } - return nil, fmt.Errorf("Asset %s not found", name) -} - -// MustAsset is like Asset but panics when Asset would return an error. -// It simplifies safe initialization of global variables. -func MustAsset(name string) []byte { - a, err := Asset(name) - if err != nil { - panic("asset: Asset(" + name + "): " + err.Error()) - } - - return a -} - -// AssetInfo loads and returns the asset info for the given name. -// It returns an error if the asset could not be found or -// could not be loaded. -func AssetInfo(name string) (os.FileInfo, error) { - cannonicalName := strings.Replace(name, "\\", "/", -1) - if f, ok := _bindata[cannonicalName]; ok { - a, err := f() - if err != nil { - return nil, fmt.Errorf("AssetInfo %s can't read by error: %v", name, err) - } - return a.info, nil - } - return nil, fmt.Errorf("AssetInfo %s not found", name) -} - -// AssetNames returns the names of the assets. -func AssetNames() []string { - names := make([]string, 0, len(_bindata)) - for name := range _bindata { - names = append(names, name) - } - return names -} - -// _bindata is a table, holding each asset generator, mapped to its name. -var _bindata = map[string]func() (*asset, error){ - "data/Dvorak.json": dataDvorakJson, - "data/English.json": dataEnglishJson, - "data/FemaleNames.json": dataFemalenamesJson, - "data/Keypad.json": dataKeypadJson, - "data/L33t.json": dataL33tJson, - "data/MacKeypad.json": dataMackeypadJson, - "data/MaleNames.json": dataMalenamesJson, - "data/Passwords.json": dataPasswordsJson, - "data/Qwerty.json": dataQwertyJson, - "data/Surnames.json": dataSurnamesJson, -} - -// AssetDir returns the file names below a certain -// directory embedded in the file by go-bindata. -// For example if you run go-bindata on data/... and data contains the -// following hierarchy: -// -// data/ -// foo.txt -// img/ -// a.png -// b.png -// -// then AssetDir("data") would return []string{"foo.txt", "img"} -// AssetDir("data/img") would return []string{"a.png", "b.png"} -// AssetDir("foo.txt") and AssetDir("notexist") would return an error -// AssetDir("") will return []string{"data"}. -func AssetDir(name string) ([]string, error) { - node := _bintree - if len(name) != 0 { - cannonicalName := strings.Replace(name, "\\", "/", -1) - pathList := strings.Split(cannonicalName, "/") - for _, p := range pathList { - node = node.Children[p] - if node == nil { - return nil, fmt.Errorf("Asset %s not found", name) - } - } - } - if node.Func != nil { - return nil, fmt.Errorf("Asset %s not found", name) - } - rv := make([]string, 0, len(node.Children)) - for childName := range node.Children { - rv = append(rv, childName) - } - return rv, nil -} - -type bintree struct { - Func func() (*asset, error) - Children map[string]*bintree -} - -var _bintree = &bintree{nil, map[string]*bintree{ - "data": &bintree{nil, map[string]*bintree{ - "Dvorak.json": &bintree{dataDvorakJson, map[string]*bintree{}}, - "English.json": &bintree{dataEnglishJson, map[string]*bintree{}}, - "FemaleNames.json": &bintree{dataFemalenamesJson, map[string]*bintree{}}, - "Keypad.json": &bintree{dataKeypadJson, map[string]*bintree{}}, - "L33t.json": &bintree{dataL33tJson, map[string]*bintree{}}, - "MacKeypad.json": &bintree{dataMackeypadJson, map[string]*bintree{}}, - "MaleNames.json": &bintree{dataMalenamesJson, map[string]*bintree{}}, - "Passwords.json": &bintree{dataPasswordsJson, map[string]*bintree{}}, - "Qwerty.json": &bintree{dataQwertyJson, map[string]*bintree{}}, - "Surnames.json": &bintree{dataSurnamesJson, map[string]*bintree{}}, - }}, -}} - -// RestoreAsset restores an asset under the given directory -func RestoreAsset(dir, name string) error { - data, err := Asset(name) - if err != nil { - return err - } - info, err := AssetInfo(name) - if err != nil { - return err - } - err = os.MkdirAll(_filePath(dir, filepath.Dir(name)), os.FileMode(0755)) - if err != nil { - return err - } - err = ioutil.WriteFile(_filePath(dir, name), data, info.Mode()) - if err != nil { - return err - } - err = os.Chtimes(_filePath(dir, name), info.ModTime(), info.ModTime()) - if err != nil { - return err - } - return nil -} - -// RestoreAssets restores an asset under the given directory recursively -func RestoreAssets(dir, name string) error { - children, err := AssetDir(name) - // File - if err != nil { - return RestoreAsset(dir, name) - } - // Dir - for _, child := range children { - err = RestoreAssets(dir, filepath.Join(name, child)) - if err != nil { - return err - } - } - return nil -} - -func _filePath(dir, name string) string { - cannonicalName := strings.Replace(name, "\\", "/", -1) - return filepath.Join(append([]string{dir}, strings.Split(cannonicalName, "/")...)...) -} diff --git a/vendor/github.com/ccojocar/zxcvbn-go/entropy/entropyCalculator.go b/vendor/github.com/ccojocar/zxcvbn-go/entropy/entropyCalculator.go deleted file mode 100644 index 80432572b..000000000 --- a/vendor/github.com/ccojocar/zxcvbn-go/entropy/entropyCalculator.go +++ /dev/null @@ -1,217 +0,0 @@ -package entropy - -import ( - "math" - "regexp" - "unicode" - - "github.com/ccojocar/zxcvbn-go/adjacency" - "github.com/ccojocar/zxcvbn-go/match" - zxcvbnmath "github.com/ccojocar/zxcvbn-go/utils/math" -) - -const ( - numYears = float64(119) // years match against 1900 - 2019 - numMonths = float64(12) - numDays = float64(31) -) - -var ( - startUpperRx = regexp.MustCompile(`^[A-Z][^A-Z]+$`) - endUpperRx = regexp.MustCompile(`^[^A-Z]+[A-Z]$'`) - allUpperRx = regexp.MustCompile(`^[A-Z]+$`) - keyPadStartingPositions = len(adjacency.GraphMap["keypad"].Graph) - keyPadAvgDegree = adjacency.GraphMap["keypad"].CalculateAvgDegree() -) - -// DictionaryEntropy calculates the entropy of a dictionary match -func DictionaryEntropy(match match.Match, rank float64) float64 { - baseEntropy := math.Log2(rank) - upperCaseEntropy := extraUpperCaseEntropy(match) - // TODO: L33t - return baseEntropy + upperCaseEntropy -} - -func extraUpperCaseEntropy(match match.Match) float64 { - word := match.Token - - allLower := true - - for _, char := range word { - if unicode.IsUpper(char) { - allLower = false - break - } - } - if allLower { - return float64(0) - } - - // a capitalized word is the most common capitalization scheme, - // so it only doubles the search space (uncapitalized + capitalized): 1 extra bit of entropy. - // allcaps and end-capitalized are common enough too, underestimate as 1 extra bit to be safe. - - for _, matcher := range []*regexp.Regexp{startUpperRx, endUpperRx, allUpperRx} { - if matcher.MatchString(word) { - return float64(1) - } - } - // Otherwise calculate the number of ways to capitalize U+L uppercase+lowercase letters with U uppercase letters or - // less. Or, if there's more uppercase than lower (for e.g. PASSwORD), the number of ways to lowercase U+L letters - // with L lowercase letters or less. - - countUpper, countLower := float64(0), float64(0) - for _, char := range word { - if unicode.IsUpper(char) { - countUpper++ - } else if unicode.IsLower(char) { - countLower++ - } - } - totalLenght := countLower + countUpper - var possibililities float64 - - for i := float64(0); i <= math.Min(countUpper, countLower); i++ { - possibililities += zxcvbnmath.NChoseK(totalLenght, i) - } - - if possibililities < 1 { - return float64(1) - } - - return (math.Log2(possibililities)) -} - -// SpatialEntropy calculates the entropy for spatial matches -func SpatialEntropy(match match.Match, turns int, shiftCount int) float64 { - var s, d float64 - if match.DictionaryName == "qwerty" || match.DictionaryName == "dvorak" { - // todo: verify qwerty and dvorak have the same length and degree - s = float64(len(adjacency.BuildQwerty().Graph)) - d = adjacency.BuildQwerty().CalculateAvgDegree() - } else { - s = float64(keyPadStartingPositions) - d = keyPadAvgDegree - } - - possibilities := float64(0) - - length := float64(len(match.Token)) - - // TODO: Should this be <= or just < ? - // Estimate the number of possible patterns w/ length L or less with t turns or less - for i := float64(2); i <= length+1; i++ { - possibleTurns := math.Min(float64(turns), i-1) - for j := float64(1); j <= possibleTurns+1; j++ { - x := zxcvbnmath.NChoseK(i-1, j-1) * s * math.Pow(d, j) - possibilities += x - } - } - - entropy := math.Log2(possibilities) - // add extra entropu for shifted keys. ( % instead of 5 A instead of a) - // Math is similar to extra entropy for uppercase letters in dictionary matches. - - if S := float64(shiftCount); S > float64(0) { - possibilities = float64(0) - U := length - S - - for i := float64(0); i < math.Min(S, U)+1; i++ { - possibilities += zxcvbnmath.NChoseK(S+U, i) - } - - entropy += math.Log2(possibilities) - } - - return entropy -} - -// RepeatEntropy calculates the entropy for repeating entropy -func RepeatEntropy(match match.Match) float64 { - cardinality := CalcBruteForceCardinality(match.Token) - entropy := math.Log2(cardinality * float64(len(match.Token))) - - return entropy -} - -// CalcBruteForceCardinality calculates the brute force cardinality -// TODO: Validate against python -func CalcBruteForceCardinality(password string) float64 { - lower, upper, digits, symbols := float64(0), float64(0), float64(0), float64(0) - - for _, char := range password { - if unicode.IsLower(char) { - lower = float64(26) - } else if unicode.IsDigit(char) { - digits = float64(10) - } else if unicode.IsUpper(char) { - upper = float64(26) - } else { - symbols = float64(33) - } - } - - cardinality := lower + upper + digits + symbols - return cardinality -} - -// SequenceEntropy calculates the entropy for sequences such as 4567 or cdef -func SequenceEntropy(match match.Match, dictionaryLength int, ascending bool) float64 { - firstChar := match.Token[0] - var baseEntropy float64 - if string(firstChar) == "a" || string(firstChar) == "1" { - baseEntropy = float64(0) - } else { - baseEntropy = math.Log2(float64(dictionaryLength)) - // TODO: should this be just the first or any char? - if unicode.IsUpper(rune(firstChar)) { - baseEntropy++ - } - } - - if !ascending { - baseEntropy++ - } - return baseEntropy + math.Log2(float64(len(match.Token))) -} - -// ExtraLeetEntropy calulates the added entropy provied by l33t substitustions -func ExtraLeetEntropy(match match.Match, password string) float64 { - var subsitutions float64 - var unsub float64 - subPassword := password[match.I:match.J] - for index, char := range subPassword { - if string(char) != string(match.Token[index]) { - subsitutions++ - } else { - // TODO: Make this only true for 1337 chars that are not subs? - unsub++ - } - } - - var possibilities float64 - - for i := float64(0); i <= math.Min(subsitutions, unsub)+1; i++ { - possibilities += zxcvbnmath.NChoseK(subsitutions+unsub, i) - } - - if possibilities <= 1 { - return float64(1) - } - return math.Log2(possibilities) -} - -// DateEntropy calculates the entropy provided by a date -func DateEntropy(dateMatch match.DateMatch) float64 { - var entropy float64 - if dateMatch.Year < 100 { - entropy = math.Log2(numDays * numMonths * 100) - } else { - entropy = math.Log2(numDays * numMonths * numYears) - } - - if dateMatch.Separator != "" { - entropy += 2 // add two bits for separator selection [/,-,.,etc] - } - return entropy -} diff --git a/vendor/github.com/ccojocar/zxcvbn-go/frequency/frequency.go b/vendor/github.com/ccojocar/zxcvbn-go/frequency/frequency.go deleted file mode 100644 index 4f51369e1..000000000 --- a/vendor/github.com/ccojocar/zxcvbn-go/frequency/frequency.go +++ /dev/null @@ -1,50 +0,0 @@ -package frequency - -import ( - "encoding/json" - "log" - - "github.com/ccojocar/zxcvbn-go/data" -) - -// List holds a frequency list -type List struct { - Name string - List []string -} - -// Lists holds all the frequency list in a map -var Lists = make(map[string]List) - -func init() { - maleFilePath := getAsset("data/MaleNames.json") - femaleFilePath := getAsset("data/FemaleNames.json") - surnameFilePath := getAsset("data/Surnames.json") - englishFilePath := getAsset("data/English.json") - passwordsFilePath := getAsset("data/Passwords.json") - - Lists["MaleNames"] = getStringListFromAsset(maleFilePath, "MaleNames") - Lists["FemaleNames"] = getStringListFromAsset(femaleFilePath, "FemaleNames") - Lists["Surname"] = getStringListFromAsset(surnameFilePath, "Surname") - Lists["English"] = getStringListFromAsset(englishFilePath, "English") - Lists["Passwords"] = getStringListFromAsset(passwordsFilePath, "Passwords") -} - -func getAsset(name string) []byte { - data, err := data.Asset(name) - if err != nil { - panic("Error getting asset " + name) - } - - return data -} - -func getStringListFromAsset(data []byte, name string) List { - var tempList List - err := json.Unmarshal(data, &tempList) - if err != nil { - log.Fatal(err) - } - tempList.Name = name - return tempList -} diff --git a/vendor/github.com/ccojocar/zxcvbn-go/match/match.go b/vendor/github.com/ccojocar/zxcvbn-go/match/match.go deleted file mode 100644 index da3e894ec..000000000 --- a/vendor/github.com/ccojocar/zxcvbn-go/match/match.go +++ /dev/null @@ -1,45 +0,0 @@ -package match - -// Matches is an alies for []Match used for sorting -type Matches []Match - -func (s Matches) Len() int { - return len(s) -} - -func (s Matches) Swap(i, j int) { - s[i], s[j] = s[j], s[i] -} - -func (s Matches) Less(i, j int) bool { - if s[i].I < s[j].I { - return true - } else if s[i].I == s[j].I { - return s[i].J < s[j].J - } - return false -} - -// Match represents different matches -type Match struct { - Pattern string - I, J int - Token string - DictionaryName string - Entropy float64 -} - -// DateMatch is specifilly a match for type date -type DateMatch struct { - Pattern string - I, J int - Token string - Separator string - Day, Month, Year int64 -} - -// Matcher are a func and ID that can be used to match different passwords -type Matcher struct { - MatchingFunc func(password string) []Match - ID string -} diff --git a/vendor/github.com/ccojocar/zxcvbn-go/matching/dateMatchers.go b/vendor/github.com/ccojocar/zxcvbn-go/matching/dateMatchers.go deleted file mode 100644 index fd7f38332..000000000 --- a/vendor/github.com/ccojocar/zxcvbn-go/matching/dateMatchers.go +++ /dev/null @@ -1,206 +0,0 @@ -package matching - -import ( - "regexp" - "strconv" - "strings" - - "github.com/ccojocar/zxcvbn-go/entropy" - "github.com/ccojocar/zxcvbn-go/match" -) - -const ( - dateSepMatcherName = "DATESEP" - dateWithOutSepMatcherName = "DATEWITHOUT" -) - -var ( - dateRxYearSuffix = regexp.MustCompile(`((\d{1,2})(\s|-|\/|\\|_|\.)(\d{1,2})(\s|-|\/|\\|_|\.)(19\d{2}|200\d|201\d|\d{2}))`) - dateRxYearPrefix = regexp.MustCompile(`((19\d{2}|200\d|201\d|\d{2})(\s|-|/|\\|_|\.)(\d{1,2})(\s|-|/|\\|_|\.)(\d{1,2}))`) - dateWithOutSepMatch = regexp.MustCompile(`\d{4,8}`) -) - -// FilterDateSepMatcher can be pass to zxcvbn-go.PasswordStrength to skip that matcher -func FilterDateSepMatcher(m match.Matcher) bool { - return m.ID == dateSepMatcherName -} - -// FilterDateWithoutSepMatcher can be pass to zxcvbn-go.PasswordStrength to skip that matcher -func FilterDateWithoutSepMatcher(m match.Matcher) bool { - return m.ID == dateWithOutSepMatcherName -} - -func checkDate(day, month, year int64) (bool, int64, int64, int64) { - if (12 <= month && month <= 31) && day <= 12 { - day, month = month, day - } - - if day > 31 || month > 12 { - return false, 0, 0, 0 - } - - if !((1900 <= year && year <= 2019) || (0 <= year && year <= 99)) { - return false, 0, 0, 0 - } - - return true, day, month, year -} - -func dateSepMatcher(password string) []match.Match { - dateMatches := dateSepMatchHelper(password) - - var matches []match.Match - for _, dateMatch := range dateMatches { - match := match.Match{ - I: dateMatch.I, - J: dateMatch.J, - Entropy: entropy.DateEntropy(dateMatch), - DictionaryName: "date_match", - Token: dateMatch.Token, - } - - matches = append(matches, match) - } - - return matches -} - -func dateSepMatchHelper(password string) []match.DateMatch { - var matches []match.DateMatch - - for _, v := range dateRxYearSuffix.FindAllString(password, len(password)) { - splitV := dateRxYearSuffix.FindAllStringSubmatch(v, len(v)) - i := strings.Index(password, v) - j := i + len(v) - day, _ := strconv.ParseInt(splitV[0][4], 10, 16) - month, _ := strconv.ParseInt(splitV[0][2], 10, 16) - year, _ := strconv.ParseInt(splitV[0][6], 10, 16) - match := match.DateMatch{Day: day, Month: month, Year: year, Separator: splitV[0][5], I: i, J: j, Token: password[i:j]} - matches = append(matches, match) - } - - for _, v := range dateRxYearPrefix.FindAllString(password, len(password)) { - splitV := dateRxYearPrefix.FindAllStringSubmatch(v, len(v)) - i := strings.Index(password, v) - j := i + len(v) - day, _ := strconv.ParseInt(splitV[0][4], 10, 16) - month, _ := strconv.ParseInt(splitV[0][6], 10, 16) - year, _ := strconv.ParseInt(splitV[0][2], 10, 16) - match := match.DateMatch{Day: day, Month: month, Year: year, Separator: splitV[0][5], I: i, J: j, Token: password[i:j]} - matches = append(matches, match) - } - - var out []match.DateMatch - for _, match := range matches { - if valid, day, month, year := checkDate(match.Day, match.Month, match.Year); valid { - match.Pattern = "date" - match.Day = day - match.Month = month - match.Year = year - out = append(out, match) - } - } - return out -} - -type dateMatchCandidate struct { - DayMonth string - Year string - I, J int -} - -type dateMatchCandidateTwo struct { - Day string - Month string - Year string - I, J int -} - -func dateWithoutSepMatch(password string) []match.Match { - dateMatches := dateWithoutSepMatchHelper(password) - - var matches []match.Match - for _, dateMatch := range dateMatches { - match := match.Match{ - I: dateMatch.I, - J: dateMatch.J, - Entropy: entropy.DateEntropy(dateMatch), - DictionaryName: "date_match", - Token: dateMatch.Token, - } - - matches = append(matches, match) - } - - return matches -} - -// TODO Has issues with 6 digit dates -func dateWithoutSepMatchHelper(password string) (matches []match.DateMatch) { - for _, v := range dateWithOutSepMatch.FindAllString(password, len(password)) { - i := strings.Index(password, v) - j := i + len(v) - length := len(v) - lastIndex := length - 1 - var candidatesRoundOne []dateMatchCandidate - - if length <= 6 { - // 2-digit year prefix - candidatesRoundOne = append(candidatesRoundOne, buildDateMatchCandidate(v[2:], v[0:2], i, j)) - - // 2-digityear suffix - candidatesRoundOne = append(candidatesRoundOne, buildDateMatchCandidate(v[0:lastIndex-2], v[lastIndex-2:], i, j)) - } - if length >= 6 { - // 4-digit year prefix - candidatesRoundOne = append(candidatesRoundOne, buildDateMatchCandidate(v[4:], v[0:4], i, j)) - - // 4-digit year sufix - candidatesRoundOne = append(candidatesRoundOne, buildDateMatchCandidate(v[0:lastIndex-3], v[lastIndex-3:], i, j)) - } - - var candidatesRoundTwo []dateMatchCandidateTwo - for _, c := range candidatesRoundOne { - if len(c.DayMonth) == 2 { - candidatesRoundTwo = append(candidatesRoundTwo, buildDateMatchCandidateTwo(c.DayMonth[0:0], c.DayMonth[1:1], c.Year, c.I, c.J)) - } else if len(c.DayMonth) == 3 { - candidatesRoundTwo = append(candidatesRoundTwo, buildDateMatchCandidateTwo(c.DayMonth[0:2], c.DayMonth[2:2], c.Year, c.I, c.J)) - candidatesRoundTwo = append(candidatesRoundTwo, buildDateMatchCandidateTwo(c.DayMonth[0:0], c.DayMonth[1:3], c.Year, c.I, c.J)) - } else if len(c.DayMonth) == 4 { - candidatesRoundTwo = append(candidatesRoundTwo, buildDateMatchCandidateTwo(c.DayMonth[0:2], c.DayMonth[2:4], c.Year, c.I, c.J)) - } - } - - for _, candidate := range candidatesRoundTwo { - intDay, err := strconv.ParseInt(candidate.Day, 10, 16) - if err != nil { - continue - } - - intMonth, err := strconv.ParseInt(candidate.Month, 10, 16) - if err != nil { - continue - } - - intYear, err := strconv.ParseInt(candidate.Year, 10, 16) - if err != nil { - continue - } - - if ok, _, _, _ := checkDate(intDay, intMonth, intYear); ok { - matches = append(matches, match.DateMatch{Token: password, Pattern: "date", Day: intDay, Month: intMonth, Year: intYear, I: i, J: j}) - } - - } - } - - return matches -} - -func buildDateMatchCandidate(dayMonth, year string, i, j int) dateMatchCandidate { - return dateMatchCandidate{DayMonth: dayMonth, Year: year, I: i, J: j} -} - -func buildDateMatchCandidateTwo(day, month string, year string, i, j int) dateMatchCandidateTwo { - return dateMatchCandidateTwo{Day: day, Month: month, Year: year, I: i, J: j} -} diff --git a/vendor/github.com/ccojocar/zxcvbn-go/matching/dictionaryMatch.go b/vendor/github.com/ccojocar/zxcvbn-go/matching/dictionaryMatch.go deleted file mode 100644 index d0d450188..000000000 --- a/vendor/github.com/ccojocar/zxcvbn-go/matching/dictionaryMatch.go +++ /dev/null @@ -1,56 +0,0 @@ -package matching - -import ( - "strings" - - "github.com/ccojocar/zxcvbn-go/entropy" - "github.com/ccojocar/zxcvbn-go/match" -) - -func buildDictMatcher(dictName string, rankedDict map[string]int) func(password string) []match.Match { - return func(password string) []match.Match { - matches := dictionaryMatch(password, dictName, rankedDict) - for _, v := range matches { - v.DictionaryName = dictName - } - return matches - } -} - -func dictionaryMatch(password string, dictionaryName string, rankedDict map[string]int) []match.Match { - var results []match.Match - pwLower := strings.ToLower(password) - - pwLowerRunes := []rune(pwLower) - length := len(pwLowerRunes) - - for i := 0; i < length; i++ { - for j := i; j < length; j++ { - word := pwLowerRunes[i : j+1] - if val, ok := rankedDict[string(word)]; ok { - matchDic := match.Match{ - Pattern: "dictionary", - DictionaryName: dictionaryName, - I: i, - J: j, - Token: string([]rune(password)[i : j+1]), - } - matchDic.Entropy = entropy.DictionaryEntropy(matchDic, float64(val)) - - results = append(results, matchDic) - } - } - } - - return results -} - -func buildRankedDict(unrankedList []string) map[string]int { - result := make(map[string]int) - - for i, v := range unrankedList { - result[strings.ToLower(v)] = i + 1 - } - - return result -} diff --git a/vendor/github.com/ccojocar/zxcvbn-go/matching/leet.go b/vendor/github.com/ccojocar/zxcvbn-go/matching/leet.go deleted file mode 100644 index 1f303aa6e..000000000 --- a/vendor/github.com/ccojocar/zxcvbn-go/matching/leet.go +++ /dev/null @@ -1,234 +0,0 @@ -package matching - -import ( - "strings" - - "github.com/ccojocar/zxcvbn-go/entropy" - "github.com/ccojocar/zxcvbn-go/match" -) - -// L33TMatcherName id -const L33TMatcherName = "l33t" - -// FilterL33tMatcher can be pass to zxcvbn-go.PasswordStrength to skip that matcher -func FilterL33tMatcher(m match.Matcher) bool { - return m.ID == L33TMatcherName -} - -func l33tMatch(password string) []match.Match { - permutations := getPermutations(password) - - var matches []match.Match - - for _, permutation := range permutations { - for _, mather := range dictionaryMatchers { - matches = append(matches, mather.MatchingFunc(permutation)...) - } - } - - for _, match := range matches { - match.Entropy += entropy.ExtraLeetEntropy(match, password) - match.DictionaryName = match.DictionaryName + "_3117" - } - - return matches -} - -// This function creates a list of permutations based on a fixed table stored on data. The table -// will be reduced in order to proceed in the function using only relevant values (see -// relevantL33tSubtable). -func getPermutations(password string) []string { - substitutions := relevantL33tSubtable(password) - permutations := getAllPermutationsOfLeetSubstitutions(password, substitutions) - return permutations -} - -// This function loads the table from data but only keep in memory the values that are present -// inside the provided password. -func relevantL33tSubtable(password string) map[string][]string { - relevantSubs := make(map[string][]string) - for key, values := range l33tTable.Graph { - for _, value := range values { - if strings.Contains(password, value) { - relevantSubs[key] = append(relevantSubs[key], value) - } - } - } - - return relevantSubs -} - -// This function creates the list of permutations of a given password using the provided table as -// reference for its operation. -func getAllPermutationsOfLeetSubstitutions(password string, table map[string][]string) []string { - result := []string{} - - // create a list of tables without conflicting keys/values (this happens for "|", "7" and "1") - noConflictsTables := createListOfMapsWithoutConflicts(table) - for _, noConflictsTable := range noConflictsTables { - substitutionsMaps := createSubstitutionsMapsFromTable(noConflictsTable) - for _, substitutionsMap := range substitutionsMaps { - newValue := createWordForSubstitutionMap(password, substitutionsMap) - if !stringSliceContainsValue(result, newValue) { - result = append(result, newValue) - } - } - } - - return result -} - -// Create the possible list of maps removing the conflicts from it. As an example, the value "|" -// may represent "i" and "l". For each representation of the conflicting value, a new map is -// created. This may grow exponencialy according to the number of conflicts. The number of maps -// returned by this function may be reduced if the relevantL33tSubtable function was called to -// identify only relevant items. -func createListOfMapsWithoutConflicts(table map[string][]string) []map[string][]string { - // the resulting list starts with the provided table - result := []map[string][]string{} - result = append(result, table) - - // iterate over the list of conflicts in order to expand the maps for each one - conflicts := retrieveConflictsListFromTable(table) - for _, value := range conflicts { - newMapList := []map[string][]string{} - - // for each conflict a new list of maps will be created for every already known map - for _, currentMap := range result { - newMaps := createDifferentMapsForLeetChar(currentMap, value) - newMapList = append(newMapList, newMaps...) - } - - result = newMapList - } - - return result -} - -// This function retrieves the list of values that appear for one or more keys. This is useful to -// know which l33t chars can represent more than one letter. -func retrieveConflictsListFromTable(table map[string][]string) []string { - result := []string{} - foundValues := []string{} - - for _, values := range table { - for _, value := range values { - if stringSliceContainsValue(foundValues, value) { - // only add on results if it was not identified as conflict before - if !stringSliceContainsValue(result, value) { - result = append(result, value) - } - } else { - foundValues = append(foundValues, value) - } - } - } - - return result -} - -// This function aims to create different maps for a given char if this char represents a conflict. -// If the specified char is not a conflict one, the same map will be returned. In scenarios which -// the provided char can not be found on map, an empty list will be returned. This function was -// designed to be used on conflicts situations. -func createDifferentMapsForLeetChar(table map[string][]string, leetChar string) []map[string][]string { - result := []map[string][]string{} - - keysWithSameValue := retrieveListOfKeysWithSpecificValueFromTable(table, leetChar) - for _, key := range keysWithSameValue { - newMap := copyMapRemovingSameValueFromOtherKeys(table, key, leetChar) - result = append(result, newMap) - } - - return result -} - -// This function retrieves the list of keys that can be represented using the given value. -func retrieveListOfKeysWithSpecificValueFromTable(table map[string][]string, valueToFind string) []string { - result := []string{} - - for key, values := range table { - for _, value := range values { - if value == valueToFind && !stringSliceContainsValue(result, key) { - result = append(result, key) - } - } - } - - return result -} - -// This function returns a list of substitution map from a given table. Each map in the result will -// provide only one representation for each value. As an example, if the provided map contains the -// values "@" and "4" in the possibilities to represent "a", two maps will be created where one -// will contain "a" mapping to "@" and the other one will provide "a" mapping to "4". -func createSubstitutionsMapsFromTable(table map[string][]string) []map[string]string { - result := []map[string]string{{"": ""}} - - for key, values := range table { - newResult := []map[string]string{} - - for _, mapInCurrentResult := range result { - for _, value := range values { - newMapForValue := copyMap(mapInCurrentResult) - newMapForValue[key] = value - newResult = append(newResult, newMapForValue) - } - } - - result = newResult - } - - // verification to make sure that the slice was filled - if len(result) == 1 && len(result[0]) == 1 && result[0][""] == "" { - return []map[string]string{} - } - - return result -} - -// This function replaces the values provided on substitution map over the provided word. -func createWordForSubstitutionMap(word string, substitutionMap map[string]string) string { - result := word - for key, value := range substitutionMap { - result = strings.Replace(result, value, key, -1) - } - - return result -} - -func stringSliceContainsValue(slice []string, value string) bool { - for _, valueInSlice := range slice { - if valueInSlice == value { - return true - } - } - - return false -} - -func copyMap(table map[string]string) map[string]string { - result := make(map[string]string) - - for key, value := range table { - result[key] = value - } - - return result -} - -// This function creates a new map based on the one provided but excluding possible representations -// of the same value on other keys. -func copyMapRemovingSameValueFromOtherKeys(table map[string][]string, keyToFix string, valueToFix string) map[string][]string { - result := make(map[string][]string) - - for key, values := range table { - for _, value := range values { - if !(value == valueToFix && key != keyToFix) { - result[key] = append(result[key], value) - } - } - } - - return result -} diff --git a/vendor/github.com/ccojocar/zxcvbn-go/matching/matching.go b/vendor/github.com/ccojocar/zxcvbn-go/matching/matching.go deleted file mode 100644 index c6948067b..000000000 --- a/vendor/github.com/ccojocar/zxcvbn-go/matching/matching.go +++ /dev/null @@ -1,79 +0,0 @@ -package matching - -import ( - "sort" - - "github.com/ccojocar/zxcvbn-go/adjacency" - "github.com/ccojocar/zxcvbn-go/frequency" - "github.com/ccojocar/zxcvbn-go/match" -) - -var ( - dictionaryMatchers []match.Matcher - matchers []match.Matcher - adjacencyGraphs []adjacency.Graph - l33tTable adjacency.Graph - - sequences map[string]string -) - -func init() { - loadFrequencyList() -} - -// Omnimatch runs all matchers against the password -func Omnimatch(password string, userInputs []string, filters ...func(match.Matcher) bool) (matches []match.Match) { - // Can I run into the issue where nil is not equal to nil? - if dictionaryMatchers == nil || adjacencyGraphs == nil { - loadFrequencyList() - } - - if userInputs != nil { - userInputMatcher := buildDictMatcher("user_inputs", buildRankedDict(userInputs)) - matches = userInputMatcher(password) - } - - for _, matcher := range matchers { - shouldBeFiltered := false - for i := range filters { - if filters[i](matcher) { - shouldBeFiltered = true - break - } - } - if !shouldBeFiltered { - matches = append(matches, matcher.MatchingFunc(password)...) - } - } - sort.Sort(match.Matches(matches)) - return matches -} - -func loadFrequencyList() { - for n, list := range frequency.Lists { - dictionaryMatchers = append(dictionaryMatchers, match.Matcher{MatchingFunc: buildDictMatcher(n, buildRankedDict(list.List)), ID: n}) - } - - l33tTable = adjacency.GraphMap["l33t"] - - adjacencyGraphs = append(adjacencyGraphs, adjacency.GraphMap["qwerty"]) - adjacencyGraphs = append(adjacencyGraphs, adjacency.GraphMap["dvorak"]) - adjacencyGraphs = append(adjacencyGraphs, adjacency.GraphMap["keypad"]) - adjacencyGraphs = append(adjacencyGraphs, adjacency.GraphMap["macKeypad"]) - - // l33tFilePath, _ := filepath.Abs("adjacency/L33t.json") - // L33T_TABLE = adjacency.GetAdjancencyGraphFromFile(l33tFilePath, "l33t") - - sequences = make(map[string]string) - sequences["lower"] = "abcdefghijklmnopqrstuvwxyz" - sequences["upper"] = "ABCDEFGHIJKLMNOPQRSTUVWXYZ" - sequences["digits"] = "0123456789" - - matchers = append(matchers, dictionaryMatchers...) - matchers = append(matchers, match.Matcher{MatchingFunc: spatialMatch, ID: spatialMatcherName}) - matchers = append(matchers, match.Matcher{MatchingFunc: repeatMatch, ID: repeatMatcherName}) - matchers = append(matchers, match.Matcher{MatchingFunc: sequenceMatch, ID: sequenceMatcherName}) - matchers = append(matchers, match.Matcher{MatchingFunc: l33tMatch, ID: L33TMatcherName}) - matchers = append(matchers, match.Matcher{MatchingFunc: dateSepMatcher, ID: dateSepMatcherName}) - matchers = append(matchers, match.Matcher{MatchingFunc: dateWithoutSepMatch, ID: dateWithOutSepMatcherName}) -} diff --git a/vendor/github.com/ccojocar/zxcvbn-go/matching/repeatMatch.go b/vendor/github.com/ccojocar/zxcvbn-go/matching/repeatMatch.go deleted file mode 100644 index d52ba4254..000000000 --- a/vendor/github.com/ccojocar/zxcvbn-go/matching/repeatMatch.go +++ /dev/null @@ -1,68 +0,0 @@ -package matching - -import ( - "strings" - - "github.com/ccojocar/zxcvbn-go/entropy" - "github.com/ccojocar/zxcvbn-go/match" -) - -const repeatMatcherName = "REPEAT" - -// FilterRepeatMatcher can be pass to zxcvbn-go.PasswordStrength to skip that matcher -func FilterRepeatMatcher(m match.Matcher) bool { - return m.ID == repeatMatcherName -} - -func repeatMatch(password string) []match.Match { - var matches []match.Match - - // Loop through password. if current == prev currentStreak++ else if currentStreak > 2 {buildMatch; currentStreak = 1} prev = current - var current, prev string - currentStreak := 1 - var i int - var char rune - for i, char = range password { - current = string(char) - if i == 0 { - prev = current - continue - } - - if strings.EqualFold(current, prev) { - currentStreak++ - } else if currentStreak > 2 { - iPos := i - currentStreak - jPos := i - 1 - matchRepeat := match.Match{ - Pattern: "repeat", - I: iPos, - J: jPos, - Token: password[iPos : jPos+1], - DictionaryName: prev, - } - matchRepeat.Entropy = entropy.RepeatEntropy(matchRepeat) - matches = append(matches, matchRepeat) - currentStreak = 1 - } else { - currentStreak = 1 - } - - prev = current - } - - if currentStreak > 2 { - iPos := i - currentStreak + 1 - jPos := i - matchRepeat := match.Match{ - Pattern: "repeat", - I: iPos, - J: jPos, - Token: password[iPos : jPos+1], - DictionaryName: prev, - } - matchRepeat.Entropy = entropy.RepeatEntropy(matchRepeat) - matches = append(matches, matchRepeat) - } - return matches -} diff --git a/vendor/github.com/ccojocar/zxcvbn-go/matching/sequenceMatch.go b/vendor/github.com/ccojocar/zxcvbn-go/matching/sequenceMatch.go deleted file mode 100644 index 697194583..000000000 --- a/vendor/github.com/ccojocar/zxcvbn-go/matching/sequenceMatch.go +++ /dev/null @@ -1,74 +0,0 @@ -package matching - -import ( - "strings" - - "github.com/ccojocar/zxcvbn-go/entropy" - "github.com/ccojocar/zxcvbn-go/match" -) - -const sequenceMatcherName = "SEQ" - -// FilterSequenceMatcher can be pass to zxcvbn-go.PasswordStrength to skip that matcher -func FilterSequenceMatcher(m match.Matcher) bool { - return m.ID == sequenceMatcherName -} - -func sequenceMatch(password string) []match.Match { - var matches []match.Match - for i := 0; i < len(password); { - j := i + 1 - var seq string - var seqName string - seqDirection := 0 - for seqCandidateName, seqCandidate := range sequences { - iN := strings.Index(seqCandidate, string(password[i])) - var jN int - if j < len(password) { - jN = strings.Index(seqCandidate, string(password[j])) - } else { - jN = -1 - } - - if iN > -1 && jN > -1 { - direction := jN - iN - if direction == 1 || direction == -1 { - seq = seqCandidate - seqName = seqCandidateName - seqDirection = direction - break - } - } - - } - - if seq != "" { - for { - var prevN, curN int - if j < len(password) { - prevChar, curChar := password[j-1], password[j] - prevN, curN = strings.Index(seq, string(prevChar)), strings.Index(seq, string(curChar)) - } - - if j == len(password) || curN-prevN != seqDirection { - if j-i > 2 { - matchSequence := match.Match{ - Pattern: "sequence", - I: i, - J: j - 1, - Token: password[i:j], - DictionaryName: seqName, - } - - matchSequence.Entropy = entropy.SequenceEntropy(matchSequence, len(seq), (seqDirection == 1)) - matches = append(matches, matchSequence) - } - break - } - j++ - } - } - i = j - } - return matches -} diff --git a/vendor/github.com/ccojocar/zxcvbn-go/matching/spatialMatch.go b/vendor/github.com/ccojocar/zxcvbn-go/matching/spatialMatch.go deleted file mode 100644 index 101ccea5e..000000000 --- a/vendor/github.com/ccojocar/zxcvbn-go/matching/spatialMatch.go +++ /dev/null @@ -1,87 +0,0 @@ -package matching - -import ( - "strings" - - "github.com/ccojocar/zxcvbn-go/adjacency" - "github.com/ccojocar/zxcvbn-go/entropy" - "github.com/ccojocar/zxcvbn-go/match" -) - -const spatialMatcherName = "SPATIAL" - -// FilterSpatialMatcher can be pass to zxcvbn-go.PasswordStrength to skip that matcher -func FilterSpatialMatcher(m match.Matcher) bool { - return m.ID == spatialMatcherName -} - -func spatialMatch(password string) (matches []match.Match) { - for _, graph := range adjacencyGraphs { - if graph.Graph != nil { - matches = append(matches, spatialMatchHelper(password, graph)...) - } - } - return matches -} - -func spatialMatchHelper(password string, graph adjacency.Graph) (matches []match.Match) { - for i := 0; i < len(password)-1; { - j := i + 1 - lastDirection := -99 // an int that it should never be! - turns := 0 - shiftedCount := 0 - - for { - prevChar := password[j-1] - found := false - var foundDirection int - curDirection := -1 - // My graphs seem to be wrong. . . and where the hell is qwerty - adjacents := graph.Graph[string(prevChar)] - // Consider growing pattern by one character if j hasn't gone over the edge - if j < len(password) { - curChar := password[j] - for _, adj := range adjacents { - curDirection++ - - if strings.Contains(adj, string(curChar)) { - found = true - foundDirection = curDirection - - if strings.Index(adj, string(curChar)) == 1 { - // index 1 in the adjacency means the key is shifted, 0 means unshifted: A vs a, % vs 5, etc. - // for example, 'q' is adjacent to the entry '2@'. @ is shifted w/ index 1, 2 is unshifted. - shiftedCount++ - } - - if lastDirection != foundDirection { - // adding a turn is correct even in the initial case when last_direction is null: - // every spatial pattern starts with a turn. - turns++ - lastDirection = foundDirection - } - break - } - } - } - - // if the current pattern continued, extend j and try to grow again - if found { - j++ - } else { - // otherwise push the pattern discovered so far, if any... - // don't consider length 1 or 2 chains. - if j-i > 2 { - matchSpc := match.Match{Pattern: "spatial", I: i, J: j - 1, Token: password[i:j], DictionaryName: graph.Name} - matchSpc.Entropy = entropy.SpatialEntropy(matchSpc, turns, shiftedCount) - matches = append(matches, matchSpc) - } - //. . . and then start a new search from the rest of the password - i = j - break - } - } - - } - return matches -} diff --git a/vendor/github.com/ccojocar/zxcvbn-go/renovate.json b/vendor/github.com/ccojocar/zxcvbn-go/renovate.json deleted file mode 100644 index 58ee1e0ea..000000000 --- a/vendor/github.com/ccojocar/zxcvbn-go/renovate.json +++ /dev/null @@ -1,25 +0,0 @@ -{ - "dependencyDashboard": true, - "dependencyDashboardTitle" : "Renovate(bot) : dependency dashboard", - "vulnerabilityAlerts": { - "enabled": true - }, - "extends": [ - ":preserveSemverRanges", - "group:all", - "schedule:weekly" - ], - "lockFileMaintenance": { - "commitMessageAction": "Update", - "enabled": true, - "extends": [ - "group:all", - "schedule:weekly" - ] - }, - "postUpdateOptions": [ - "gomodTidy", - "gomodUpdateImportPaths" - ], - "separateMajorMinor": false -} diff --git a/vendor/github.com/ccojocar/zxcvbn-go/scoring/scoring.go b/vendor/github.com/ccojocar/zxcvbn-go/scoring/scoring.go deleted file mode 100644 index f25606a8d..000000000 --- a/vendor/github.com/ccojocar/zxcvbn-go/scoring/scoring.go +++ /dev/null @@ -1,180 +0,0 @@ -package scoring - -import ( - "fmt" - "math" - "sort" - - "github.com/ccojocar/zxcvbn-go/entropy" - "github.com/ccojocar/zxcvbn-go/match" - zxcvbnmath "github.com/ccojocar/zxcvbn-go/utils/math" -) - -const ( - //for a hash function like bcrypt/scrypt/PBKDF2, 10ms per guess is a safe lower bound. - //(usually a guess would take longer -- this assumes fast hardware and a small work factor.) - //adjust for your site accordingly if you use another hash function, possibly by - //several orders of magnitude! - singleGuess float64 = 0.010 - numAttackers float64 = 100 // Cores used to make guesses - secondsPerGuess float64 = singleGuess / numAttackers -) - -// MinEntropyMatch is the lowest entropy match found -type MinEntropyMatch struct { - Password string - Entropy float64 - MatchSequence []match.Match - CrackTime float64 - CrackTimeDisplay string - Score int - CalcTime float64 -} - -/* -MinimumEntropyMatchSequence returns the minimum entropy - - Takes a list of overlapping matches, returns the non-overlapping sublist with - minimum entropy. O(nm) dp alg for length-n password with m candidate matches. -*/ -func MinimumEntropyMatchSequence(password string, matches []match.Match) MinEntropyMatch { - bruteforceCardinality := entropy.CalcBruteForceCardinality(password) - upToK := make([]float64, len(password)) - backPointers := make([]match.Match, len(password)) - - for k := 0; k < len(password); k++ { - upToK[k] = get(upToK, k-1) + math.Log2(bruteforceCardinality) - - for _, match := range matches { - if match.J != k { - continue - } - - i, j := match.I, match.J - // see if best entropy up to i-1 + entropy of match is less that current min at j - upTo := get(upToK, i-1) - candidateEntropy := upTo + match.Entropy - - if candidateEntropy < upToK[j] { - upToK[j] = candidateEntropy - match.Entropy = candidateEntropy - backPointers[j] = match - } - } - } - - // walk backwards and decode the best sequence - var matchSequence []match.Match - passwordLen := len(password) - passwordLen-- - for k := passwordLen; k >= 0; { - match := backPointers[k] - if match.Pattern != "" { - matchSequence = append(matchSequence, match) - k = match.I - 1 - - } else { - k-- - } - - } - sort.Sort(match.Matches(matchSequence)) - - makeBruteForceMatch := func(i, j int) match.Match { - return match.Match{ - Pattern: "bruteforce", - I: i, - J: j, - Token: password[i : j+1], - Entropy: math.Log2(math.Pow(bruteforceCardinality, float64(j-i))), - } - } - - k := 0 - var matchSequenceCopy []match.Match - for _, match := range matchSequence { - i, j := match.I, match.J - if i-k > 0 { - matchSequenceCopy = append(matchSequenceCopy, makeBruteForceMatch(k, i-1)) - } - k = j + 1 - matchSequenceCopy = append(matchSequenceCopy, match) - } - - if k < len(password) { - matchSequenceCopy = append(matchSequenceCopy, makeBruteForceMatch(k, len(password)-1)) - } - var minEntropy float64 - if len(password) == 0 { - minEntropy = float64(0) - } else { - minEntropy = upToK[len(password)-1] - } - - crackTime := roundToXDigits(entropyToCrackTime(minEntropy), 3) - return MinEntropyMatch{ - Password: password, - Entropy: roundToXDigits(minEntropy, 3), - MatchSequence: matchSequenceCopy, - CrackTime: crackTime, - CrackTimeDisplay: displayTime(crackTime), - Score: crackTimeToScore(crackTime), - } -} - -func get(a []float64, i int) float64 { - if i < 0 || i >= len(a) { - return float64(0) - } - - return a[i] -} - -func entropyToCrackTime(entropy float64) float64 { - crackTime := (0.5 * math.Pow(float64(2), entropy)) * secondsPerGuess - - return crackTime -} - -func roundToXDigits(number float64, digits int) float64 { - return zxcvbnmath.Round(number, .5, digits) -} - -func displayTime(seconds float64) string { - formater := "%.1f %s" - minute := float64(60) - hour := minute * float64(60) - day := hour * float64(24) - month := day * float64(31) - year := month * float64(12) - century := year * float64(100) - - if seconds < minute { - return "instant" - } else if seconds < hour { - return fmt.Sprintf(formater, (1 + math.Ceil(seconds/minute)), "minutes") - } else if seconds < day { - return fmt.Sprintf(formater, (1 + math.Ceil(seconds/hour)), "hours") - } else if seconds < month { - return fmt.Sprintf(formater, (1 + math.Ceil(seconds/day)), "days") - } else if seconds < year { - return fmt.Sprintf(formater, (1 + math.Ceil(seconds/month)), "months") - } else if seconds < century { - return fmt.Sprintf(formater, (1 + math.Ceil(seconds/century)), "years") - } - return "centuries" -} - -func crackTimeToScore(seconds float64) int { - if seconds < math.Pow(10, 2) { - return 0 - } else if seconds < math.Pow(10, 4) { - return 1 - } else if seconds < math.Pow(10, 6) { - return 2 - } else if seconds < math.Pow(10, 8) { - return 3 - } - - return 4 -} diff --git a/vendor/github.com/ccojocar/zxcvbn-go/utils/math/mathutils.go b/vendor/github.com/ccojocar/zxcvbn-go/utils/math/mathutils.go deleted file mode 100644 index 1b989d194..000000000 --- a/vendor/github.com/ccojocar/zxcvbn-go/utils/math/mathutils.go +++ /dev/null @@ -1,40 +0,0 @@ -package zxcvbnmath - -import "math" - -/* -NChoseK http://blog.plover.com/math/choose.html -I am surprised that I have to define these. . . Maybe i just didn't look hard enough for a lib. -*/ -func NChoseK(n, k float64) float64 { - if k > n { - return 0 - } else if k == 0 { - return 1 - } - - var r float64 = 1 - - for d := float64(1); d <= k; d++ { - r *= n - r /= d - n-- - } - - return r -} - -// Round a number -func Round(val float64, roundOn float64, places int) (newVal float64) { - var round float64 - pow := math.Pow(10, float64(places)) - digit := pow * val - _, div := math.Modf(digit) - if div >= roundOn { - round = math.Ceil(digit) - } else { - round = math.Floor(digit) - } - newVal = round / pow - return -} diff --git a/vendor/github.com/ccojocar/zxcvbn-go/zxcvbn.go b/vendor/github.com/ccojocar/zxcvbn-go/zxcvbn.go deleted file mode 100644 index f3dc19e4c..000000000 --- a/vendor/github.com/ccojocar/zxcvbn-go/zxcvbn.go +++ /dev/null @@ -1,22 +0,0 @@ -package zxcvbn - -import ( - "time" - - "github.com/ccojocar/zxcvbn-go/match" - "github.com/ccojocar/zxcvbn-go/matching" - "github.com/ccojocar/zxcvbn-go/scoring" - zxcvbnmath "github.com/ccojocar/zxcvbn-go/utils/math" -) - -// PasswordStrength takes a password, userInputs and optional filters and returns a MinEntropyMatch -func PasswordStrength(password string, userInputs []string, filters ...func(match.Matcher) bool) scoring.MinEntropyMatch { - start := time.Now() - matches := matching.Omnimatch(password, userInputs, filters...) - result := scoring.MinimumEntropyMatchSequence(password, matches) - end := time.Now() - - calcTime := end.Nanosecond() - start.Nanosecond() - result.CalcTime = zxcvbnmath.Round(float64(calcTime)*time.Nanosecond.Seconds(), .5, 3) - return result -} diff --git a/vendor/github.com/cespare/xxhash/v2/LICENSE.txt b/vendor/github.com/cespare/xxhash/v2/LICENSE.txt deleted file mode 100644 index 24b53065f..000000000 --- a/vendor/github.com/cespare/xxhash/v2/LICENSE.txt +++ /dev/null @@ -1,22 +0,0 @@ -Copyright (c) 2016 Caleb Spare - -MIT License - -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -"Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/github.com/cespare/xxhash/v2/README.md b/vendor/github.com/cespare/xxhash/v2/README.md deleted file mode 100644 index 8bf0e5b78..000000000 --- a/vendor/github.com/cespare/xxhash/v2/README.md +++ /dev/null @@ -1,72 +0,0 @@ -# xxhash - -[![Go Reference](https://pkg.go.dev/badge/github.com/cespare/xxhash/v2.svg)](https://pkg.go.dev/github.com/cespare/xxhash/v2) -[![Test](https://github.com/cespare/xxhash/actions/workflows/test.yml/badge.svg)](https://github.com/cespare/xxhash/actions/workflows/test.yml) - -xxhash is a Go implementation of the 64-bit [xxHash] algorithm, XXH64. This is a -high-quality hashing algorithm that is much faster than anything in the Go -standard library. - -This package provides a straightforward API: - -``` -func Sum64(b []byte) uint64 -func Sum64String(s string) uint64 -type Digest struct{ ... } - func New() *Digest -``` - -The `Digest` type implements hash.Hash64. Its key methods are: - -``` -func (*Digest) Write([]byte) (int, error) -func (*Digest) WriteString(string) (int, error) -func (*Digest) Sum64() uint64 -``` - -The package is written with optimized pure Go and also contains even faster -assembly implementations for amd64 and arm64. If desired, the `purego` build tag -opts into using the Go code even on those architectures. - -[xxHash]: http://cyan4973.github.io/xxHash/ - -## Compatibility - -This package is in a module and the latest code is in version 2 of the module. -You need a version of Go with at least "minimal module compatibility" to use -github.com/cespare/xxhash/v2: - -* 1.9.7+ for Go 1.9 -* 1.10.3+ for Go 1.10 -* Go 1.11 or later - -I recommend using the latest release of Go. - -## Benchmarks - -Here are some quick benchmarks comparing the pure-Go and assembly -implementations of Sum64. - -| input size | purego | asm | -| ---------- | --------- | --------- | -| 4 B | 1.3 GB/s | 1.2 GB/s | -| 16 B | 2.9 GB/s | 3.5 GB/s | -| 100 B | 6.9 GB/s | 8.1 GB/s | -| 4 KB | 11.7 GB/s | 16.7 GB/s | -| 10 MB | 12.0 GB/s | 17.3 GB/s | - -These numbers were generated on Ubuntu 20.04 with an Intel Xeon Platinum 8252C -CPU using the following commands under Go 1.19.2: - -``` -benchstat <(go test -tags purego -benchtime 500ms -count 15 -bench 'Sum64$') -benchstat <(go test -benchtime 500ms -count 15 -bench 'Sum64$') -``` - -## Projects using this package - -- [InfluxDB](https://github.com/influxdata/influxdb) -- [Prometheus](https://github.com/prometheus/prometheus) -- [VictoriaMetrics](https://github.com/VictoriaMetrics/VictoriaMetrics) -- [FreeCache](https://github.com/coocood/freecache) -- [FastCache](https://github.com/VictoriaMetrics/fastcache) diff --git a/vendor/github.com/cespare/xxhash/v2/testall.sh b/vendor/github.com/cespare/xxhash/v2/testall.sh deleted file mode 100644 index 94b9c4439..000000000 --- a/vendor/github.com/cespare/xxhash/v2/testall.sh +++ /dev/null @@ -1,10 +0,0 @@ -#!/bin/bash -set -eu -o pipefail - -# Small convenience script for running the tests with various combinations of -# arch/tags. This assumes we're running on amd64 and have qemu available. - -go test ./... -go test -tags purego ./... -GOARCH=arm64 go test -GOARCH=arm64 go test -tags purego diff --git a/vendor/github.com/cespare/xxhash/v2/xxhash.go b/vendor/github.com/cespare/xxhash/v2/xxhash.go deleted file mode 100644 index a9e0d45c9..000000000 --- a/vendor/github.com/cespare/xxhash/v2/xxhash.go +++ /dev/null @@ -1,228 +0,0 @@ -// Package xxhash implements the 64-bit variant of xxHash (XXH64) as described -// at http://cyan4973.github.io/xxHash/. -package xxhash - -import ( - "encoding/binary" - "errors" - "math/bits" -) - -const ( - prime1 uint64 = 11400714785074694791 - prime2 uint64 = 14029467366897019727 - prime3 uint64 = 1609587929392839161 - prime4 uint64 = 9650029242287828579 - prime5 uint64 = 2870177450012600261 -) - -// Store the primes in an array as well. -// -// The consts are used when possible in Go code to avoid MOVs but we need a -// contiguous array of the assembly code. -var primes = [...]uint64{prime1, prime2, prime3, prime4, prime5} - -// Digest implements hash.Hash64. -type Digest struct { - v1 uint64 - v2 uint64 - v3 uint64 - v4 uint64 - total uint64 - mem [32]byte - n int // how much of mem is used -} - -// New creates a new Digest that computes the 64-bit xxHash algorithm. -func New() *Digest { - var d Digest - d.Reset() - return &d -} - -// Reset clears the Digest's state so that it can be reused. -func (d *Digest) Reset() { - d.v1 = primes[0] + prime2 - d.v2 = prime2 - d.v3 = 0 - d.v4 = -primes[0] - d.total = 0 - d.n = 0 -} - -// Size always returns 8 bytes. -func (d *Digest) Size() int { return 8 } - -// BlockSize always returns 32 bytes. -func (d *Digest) BlockSize() int { return 32 } - -// Write adds more data to d. It always returns len(b), nil. -func (d *Digest) Write(b []byte) (n int, err error) { - n = len(b) - d.total += uint64(n) - - memleft := d.mem[d.n&(len(d.mem)-1):] - - if d.n+n < 32 { - // This new data doesn't even fill the current block. - copy(memleft, b) - d.n += n - return - } - - if d.n > 0 { - // Finish off the partial block. - c := copy(memleft, b) - d.v1 = round(d.v1, u64(d.mem[0:8])) - d.v2 = round(d.v2, u64(d.mem[8:16])) - d.v3 = round(d.v3, u64(d.mem[16:24])) - d.v4 = round(d.v4, u64(d.mem[24:32])) - b = b[c:] - d.n = 0 - } - - if len(b) >= 32 { - // One or more full blocks left. - nw := writeBlocks(d, b) - b = b[nw:] - } - - // Store any remaining partial block. - copy(d.mem[:], b) - d.n = len(b) - - return -} - -// Sum appends the current hash to b and returns the resulting slice. -func (d *Digest) Sum(b []byte) []byte { - s := d.Sum64() - return append( - b, - byte(s>>56), - byte(s>>48), - byte(s>>40), - byte(s>>32), - byte(s>>24), - byte(s>>16), - byte(s>>8), - byte(s), - ) -} - -// Sum64 returns the current hash. -func (d *Digest) Sum64() uint64 { - var h uint64 - - if d.total >= 32 { - v1, v2, v3, v4 := d.v1, d.v2, d.v3, d.v4 - h = rol1(v1) + rol7(v2) + rol12(v3) + rol18(v4) - h = mergeRound(h, v1) - h = mergeRound(h, v2) - h = mergeRound(h, v3) - h = mergeRound(h, v4) - } else { - h = d.v3 + prime5 - } - - h += d.total - - b := d.mem[:d.n&(len(d.mem)-1)] - for ; len(b) >= 8; b = b[8:] { - k1 := round(0, u64(b[:8])) - h ^= k1 - h = rol27(h)*prime1 + prime4 - } - if len(b) >= 4 { - h ^= uint64(u32(b[:4])) * prime1 - h = rol23(h)*prime2 + prime3 - b = b[4:] - } - for ; len(b) > 0; b = b[1:] { - h ^= uint64(b[0]) * prime5 - h = rol11(h) * prime1 - } - - h ^= h >> 33 - h *= prime2 - h ^= h >> 29 - h *= prime3 - h ^= h >> 32 - - return h -} - -const ( - magic = "xxh\x06" - marshaledSize = len(magic) + 8*5 + 32 -) - -// MarshalBinary implements the encoding.BinaryMarshaler interface. -func (d *Digest) MarshalBinary() ([]byte, error) { - b := make([]byte, 0, marshaledSize) - b = append(b, magic...) - b = appendUint64(b, d.v1) - b = appendUint64(b, d.v2) - b = appendUint64(b, d.v3) - b = appendUint64(b, d.v4) - b = appendUint64(b, d.total) - b = append(b, d.mem[:d.n]...) - b = b[:len(b)+len(d.mem)-d.n] - return b, nil -} - -// UnmarshalBinary implements the encoding.BinaryUnmarshaler interface. -func (d *Digest) UnmarshalBinary(b []byte) error { - if len(b) < len(magic) || string(b[:len(magic)]) != magic { - return errors.New("xxhash: invalid hash state identifier") - } - if len(b) != marshaledSize { - return errors.New("xxhash: invalid hash state size") - } - b = b[len(magic):] - b, d.v1 = consumeUint64(b) - b, d.v2 = consumeUint64(b) - b, d.v3 = consumeUint64(b) - b, d.v4 = consumeUint64(b) - b, d.total = consumeUint64(b) - copy(d.mem[:], b) - d.n = int(d.total % uint64(len(d.mem))) - return nil -} - -func appendUint64(b []byte, x uint64) []byte { - var a [8]byte - binary.LittleEndian.PutUint64(a[:], x) - return append(b, a[:]...) -} - -func consumeUint64(b []byte) ([]byte, uint64) { - x := u64(b) - return b[8:], x -} - -func u64(b []byte) uint64 { return binary.LittleEndian.Uint64(b) } -func u32(b []byte) uint32 { return binary.LittleEndian.Uint32(b) } - -func round(acc, input uint64) uint64 { - acc += input * prime2 - acc = rol31(acc) - acc *= prime1 - return acc -} - -func mergeRound(acc, val uint64) uint64 { - val = round(0, val) - acc ^= val - acc = acc*prime1 + prime4 - return acc -} - -func rol1(x uint64) uint64 { return bits.RotateLeft64(x, 1) } -func rol7(x uint64) uint64 { return bits.RotateLeft64(x, 7) } -func rol11(x uint64) uint64 { return bits.RotateLeft64(x, 11) } -func rol12(x uint64) uint64 { return bits.RotateLeft64(x, 12) } -func rol18(x uint64) uint64 { return bits.RotateLeft64(x, 18) } -func rol23(x uint64) uint64 { return bits.RotateLeft64(x, 23) } -func rol27(x uint64) uint64 { return bits.RotateLeft64(x, 27) } -func rol31(x uint64) uint64 { return bits.RotateLeft64(x, 31) } diff --git a/vendor/github.com/cespare/xxhash/v2/xxhash_amd64.s b/vendor/github.com/cespare/xxhash/v2/xxhash_amd64.s deleted file mode 100644 index 3e8b13257..000000000 --- a/vendor/github.com/cespare/xxhash/v2/xxhash_amd64.s +++ /dev/null @@ -1,209 +0,0 @@ -//go:build !appengine && gc && !purego -// +build !appengine -// +build gc -// +build !purego - -#include "textflag.h" - -// Registers: -#define h AX -#define d AX -#define p SI // pointer to advance through b -#define n DX -#define end BX // loop end -#define v1 R8 -#define v2 R9 -#define v3 R10 -#define v4 R11 -#define x R12 -#define prime1 R13 -#define prime2 R14 -#define prime4 DI - -#define round(acc, x) \ - IMULQ prime2, x \ - ADDQ x, acc \ - ROLQ $31, acc \ - IMULQ prime1, acc - -// round0 performs the operation x = round(0, x). -#define round0(x) \ - IMULQ prime2, x \ - ROLQ $31, x \ - IMULQ prime1, x - -// mergeRound applies a merge round on the two registers acc and x. -// It assumes that prime1, prime2, and prime4 have been loaded. -#define mergeRound(acc, x) \ - round0(x) \ - XORQ x, acc \ - IMULQ prime1, acc \ - ADDQ prime4, acc - -// blockLoop processes as many 32-byte blocks as possible, -// updating v1, v2, v3, and v4. It assumes that there is at least one block -// to process. -#define blockLoop() \ -loop: \ - MOVQ +0(p), x \ - round(v1, x) \ - MOVQ +8(p), x \ - round(v2, x) \ - MOVQ +16(p), x \ - round(v3, x) \ - MOVQ +24(p), x \ - round(v4, x) \ - ADDQ $32, p \ - CMPQ p, end \ - JLE loop - -// func Sum64(b []byte) uint64 -TEXT ·Sum64(SB), NOSPLIT|NOFRAME, $0-32 - // Load fixed primes. - MOVQ ·primes+0(SB), prime1 - MOVQ ·primes+8(SB), prime2 - MOVQ ·primes+24(SB), prime4 - - // Load slice. - MOVQ b_base+0(FP), p - MOVQ b_len+8(FP), n - LEAQ (p)(n*1), end - - // The first loop limit will be len(b)-32. - SUBQ $32, end - - // Check whether we have at least one block. - CMPQ n, $32 - JLT noBlocks - - // Set up initial state (v1, v2, v3, v4). - MOVQ prime1, v1 - ADDQ prime2, v1 - MOVQ prime2, v2 - XORQ v3, v3 - XORQ v4, v4 - SUBQ prime1, v4 - - blockLoop() - - MOVQ v1, h - ROLQ $1, h - MOVQ v2, x - ROLQ $7, x - ADDQ x, h - MOVQ v3, x - ROLQ $12, x - ADDQ x, h - MOVQ v4, x - ROLQ $18, x - ADDQ x, h - - mergeRound(h, v1) - mergeRound(h, v2) - mergeRound(h, v3) - mergeRound(h, v4) - - JMP afterBlocks - -noBlocks: - MOVQ ·primes+32(SB), h - -afterBlocks: - ADDQ n, h - - ADDQ $24, end - CMPQ p, end - JG try4 - -loop8: - MOVQ (p), x - ADDQ $8, p - round0(x) - XORQ x, h - ROLQ $27, h - IMULQ prime1, h - ADDQ prime4, h - - CMPQ p, end - JLE loop8 - -try4: - ADDQ $4, end - CMPQ p, end - JG try1 - - MOVL (p), x - ADDQ $4, p - IMULQ prime1, x - XORQ x, h - - ROLQ $23, h - IMULQ prime2, h - ADDQ ·primes+16(SB), h - -try1: - ADDQ $4, end - CMPQ p, end - JGE finalize - -loop1: - MOVBQZX (p), x - ADDQ $1, p - IMULQ ·primes+32(SB), x - XORQ x, h - ROLQ $11, h - IMULQ prime1, h - - CMPQ p, end - JL loop1 - -finalize: - MOVQ h, x - SHRQ $33, x - XORQ x, h - IMULQ prime2, h - MOVQ h, x - SHRQ $29, x - XORQ x, h - IMULQ ·primes+16(SB), h - MOVQ h, x - SHRQ $32, x - XORQ x, h - - MOVQ h, ret+24(FP) - RET - -// func writeBlocks(d *Digest, b []byte) int -TEXT ·writeBlocks(SB), NOSPLIT|NOFRAME, $0-40 - // Load fixed primes needed for round. - MOVQ ·primes+0(SB), prime1 - MOVQ ·primes+8(SB), prime2 - - // Load slice. - MOVQ b_base+8(FP), p - MOVQ b_len+16(FP), n - LEAQ (p)(n*1), end - SUBQ $32, end - - // Load vN from d. - MOVQ s+0(FP), d - MOVQ 0(d), v1 - MOVQ 8(d), v2 - MOVQ 16(d), v3 - MOVQ 24(d), v4 - - // We don't need to check the loop condition here; this function is - // always called with at least one block of data to process. - blockLoop() - - // Copy vN back to d. - MOVQ v1, 0(d) - MOVQ v2, 8(d) - MOVQ v3, 16(d) - MOVQ v4, 24(d) - - // The number of bytes written is p minus the old base pointer. - SUBQ b_base+8(FP), p - MOVQ p, ret+32(FP) - - RET diff --git a/vendor/github.com/cespare/xxhash/v2/xxhash_arm64.s b/vendor/github.com/cespare/xxhash/v2/xxhash_arm64.s deleted file mode 100644 index 7e3145a22..000000000 --- a/vendor/github.com/cespare/xxhash/v2/xxhash_arm64.s +++ /dev/null @@ -1,183 +0,0 @@ -//go:build !appengine && gc && !purego -// +build !appengine -// +build gc -// +build !purego - -#include "textflag.h" - -// Registers: -#define digest R1 -#define h R2 // return value -#define p R3 // input pointer -#define n R4 // input length -#define nblocks R5 // n / 32 -#define prime1 R7 -#define prime2 R8 -#define prime3 R9 -#define prime4 R10 -#define prime5 R11 -#define v1 R12 -#define v2 R13 -#define v3 R14 -#define v4 R15 -#define x1 R20 -#define x2 R21 -#define x3 R22 -#define x4 R23 - -#define round(acc, x) \ - MADD prime2, acc, x, acc \ - ROR $64-31, acc \ - MUL prime1, acc - -// round0 performs the operation x = round(0, x). -#define round0(x) \ - MUL prime2, x \ - ROR $64-31, x \ - MUL prime1, x - -#define mergeRound(acc, x) \ - round0(x) \ - EOR x, acc \ - MADD acc, prime4, prime1, acc - -// blockLoop processes as many 32-byte blocks as possible, -// updating v1, v2, v3, and v4. It assumes that n >= 32. -#define blockLoop() \ - LSR $5, n, nblocks \ - PCALIGN $16 \ - loop: \ - LDP.P 16(p), (x1, x2) \ - LDP.P 16(p), (x3, x4) \ - round(v1, x1) \ - round(v2, x2) \ - round(v3, x3) \ - round(v4, x4) \ - SUB $1, nblocks \ - CBNZ nblocks, loop - -// func Sum64(b []byte) uint64 -TEXT ·Sum64(SB), NOSPLIT|NOFRAME, $0-32 - LDP b_base+0(FP), (p, n) - - LDP ·primes+0(SB), (prime1, prime2) - LDP ·primes+16(SB), (prime3, prime4) - MOVD ·primes+32(SB), prime5 - - CMP $32, n - CSEL LT, prime5, ZR, h // if n < 32 { h = prime5 } else { h = 0 } - BLT afterLoop - - ADD prime1, prime2, v1 - MOVD prime2, v2 - MOVD $0, v3 - NEG prime1, v4 - - blockLoop() - - ROR $64-1, v1, x1 - ROR $64-7, v2, x2 - ADD x1, x2 - ROR $64-12, v3, x3 - ROR $64-18, v4, x4 - ADD x3, x4 - ADD x2, x4, h - - mergeRound(h, v1) - mergeRound(h, v2) - mergeRound(h, v3) - mergeRound(h, v4) - -afterLoop: - ADD n, h - - TBZ $4, n, try8 - LDP.P 16(p), (x1, x2) - - round0(x1) - - // NOTE: here and below, sequencing the EOR after the ROR (using a - // rotated register) is worth a small but measurable speedup for small - // inputs. - ROR $64-27, h - EOR x1 @> 64-27, h, h - MADD h, prime4, prime1, h - - round0(x2) - ROR $64-27, h - EOR x2 @> 64-27, h, h - MADD h, prime4, prime1, h - -try8: - TBZ $3, n, try4 - MOVD.P 8(p), x1 - - round0(x1) - ROR $64-27, h - EOR x1 @> 64-27, h, h - MADD h, prime4, prime1, h - -try4: - TBZ $2, n, try2 - MOVWU.P 4(p), x2 - - MUL prime1, x2 - ROR $64-23, h - EOR x2 @> 64-23, h, h - MADD h, prime3, prime2, h - -try2: - TBZ $1, n, try1 - MOVHU.P 2(p), x3 - AND $255, x3, x1 - LSR $8, x3, x2 - - MUL prime5, x1 - ROR $64-11, h - EOR x1 @> 64-11, h, h - MUL prime1, h - - MUL prime5, x2 - ROR $64-11, h - EOR x2 @> 64-11, h, h - MUL prime1, h - -try1: - TBZ $0, n, finalize - MOVBU (p), x4 - - MUL prime5, x4 - ROR $64-11, h - EOR x4 @> 64-11, h, h - MUL prime1, h - -finalize: - EOR h >> 33, h - MUL prime2, h - EOR h >> 29, h - MUL prime3, h - EOR h >> 32, h - - MOVD h, ret+24(FP) - RET - -// func writeBlocks(d *Digest, b []byte) int -TEXT ·writeBlocks(SB), NOSPLIT|NOFRAME, $0-40 - LDP ·primes+0(SB), (prime1, prime2) - - // Load state. Assume v[1-4] are stored contiguously. - MOVD d+0(FP), digest - LDP 0(digest), (v1, v2) - LDP 16(digest), (v3, v4) - - LDP b_base+8(FP), (p, n) - - blockLoop() - - // Store updated state. - STP (v1, v2), 0(digest) - STP (v3, v4), 16(digest) - - BIC $31, n - MOVD n, ret+32(FP) - RET diff --git a/vendor/github.com/cespare/xxhash/v2/xxhash_asm.go b/vendor/github.com/cespare/xxhash/v2/xxhash_asm.go deleted file mode 100644 index 9216e0a40..000000000 --- a/vendor/github.com/cespare/xxhash/v2/xxhash_asm.go +++ /dev/null @@ -1,15 +0,0 @@ -//go:build (amd64 || arm64) && !appengine && gc && !purego -// +build amd64 arm64 -// +build !appengine -// +build gc -// +build !purego - -package xxhash - -// Sum64 computes the 64-bit xxHash digest of b. -// -//go:noescape -func Sum64(b []byte) uint64 - -//go:noescape -func writeBlocks(d *Digest, b []byte) int diff --git a/vendor/github.com/cespare/xxhash/v2/xxhash_other.go b/vendor/github.com/cespare/xxhash/v2/xxhash_other.go deleted file mode 100644 index 26df13bba..000000000 --- a/vendor/github.com/cespare/xxhash/v2/xxhash_other.go +++ /dev/null @@ -1,76 +0,0 @@ -//go:build (!amd64 && !arm64) || appengine || !gc || purego -// +build !amd64,!arm64 appengine !gc purego - -package xxhash - -// Sum64 computes the 64-bit xxHash digest of b. -func Sum64(b []byte) uint64 { - // A simpler version would be - // d := New() - // d.Write(b) - // return d.Sum64() - // but this is faster, particularly for small inputs. - - n := len(b) - var h uint64 - - if n >= 32 { - v1 := primes[0] + prime2 - v2 := prime2 - v3 := uint64(0) - v4 := -primes[0] - for len(b) >= 32 { - v1 = round(v1, u64(b[0:8:len(b)])) - v2 = round(v2, u64(b[8:16:len(b)])) - v3 = round(v3, u64(b[16:24:len(b)])) - v4 = round(v4, u64(b[24:32:len(b)])) - b = b[32:len(b):len(b)] - } - h = rol1(v1) + rol7(v2) + rol12(v3) + rol18(v4) - h = mergeRound(h, v1) - h = mergeRound(h, v2) - h = mergeRound(h, v3) - h = mergeRound(h, v4) - } else { - h = prime5 - } - - h += uint64(n) - - for ; len(b) >= 8; b = b[8:] { - k1 := round(0, u64(b[:8])) - h ^= k1 - h = rol27(h)*prime1 + prime4 - } - if len(b) >= 4 { - h ^= uint64(u32(b[:4])) * prime1 - h = rol23(h)*prime2 + prime3 - b = b[4:] - } - for ; len(b) > 0; b = b[1:] { - h ^= uint64(b[0]) * prime5 - h = rol11(h) * prime1 - } - - h ^= h >> 33 - h *= prime2 - h ^= h >> 29 - h *= prime3 - h ^= h >> 32 - - return h -} - -func writeBlocks(d *Digest, b []byte) int { - v1, v2, v3, v4 := d.v1, d.v2, d.v3, d.v4 - n := len(b) - for len(b) >= 32 { - v1 = round(v1, u64(b[0:8:len(b)])) - v2 = round(v2, u64(b[8:16:len(b)])) - v3 = round(v3, u64(b[16:24:len(b)])) - v4 = round(v4, u64(b[24:32:len(b)])) - b = b[32:len(b):len(b)] - } - d.v1, d.v2, d.v3, d.v4 = v1, v2, v3, v4 - return n - len(b) -} diff --git a/vendor/github.com/cespare/xxhash/v2/xxhash_safe.go b/vendor/github.com/cespare/xxhash/v2/xxhash_safe.go deleted file mode 100644 index e86f1b5fd..000000000 --- a/vendor/github.com/cespare/xxhash/v2/xxhash_safe.go +++ /dev/null @@ -1,16 +0,0 @@ -//go:build appengine -// +build appengine - -// This file contains the safe implementations of otherwise unsafe-using code. - -package xxhash - -// Sum64String computes the 64-bit xxHash digest of s. -func Sum64String(s string) uint64 { - return Sum64([]byte(s)) -} - -// WriteString adds more data to d. It always returns len(s), nil. -func (d *Digest) WriteString(s string) (n int, err error) { - return d.Write([]byte(s)) -} diff --git a/vendor/github.com/cespare/xxhash/v2/xxhash_unsafe.go b/vendor/github.com/cespare/xxhash/v2/xxhash_unsafe.go deleted file mode 100644 index 1c1638fd8..000000000 --- a/vendor/github.com/cespare/xxhash/v2/xxhash_unsafe.go +++ /dev/null @@ -1,58 +0,0 @@ -//go:build !appengine -// +build !appengine - -// This file encapsulates usage of unsafe. -// xxhash_safe.go contains the safe implementations. - -package xxhash - -import ( - "unsafe" -) - -// In the future it's possible that compiler optimizations will make these -// XxxString functions unnecessary by realizing that calls such as -// Sum64([]byte(s)) don't need to copy s. See https://go.dev/issue/2205. -// If that happens, even if we keep these functions they can be replaced with -// the trivial safe code. - -// NOTE: The usual way of doing an unsafe string-to-[]byte conversion is: -// -// var b []byte -// bh := (*reflect.SliceHeader)(unsafe.Pointer(&b)) -// bh.Data = (*reflect.StringHeader)(unsafe.Pointer(&s)).Data -// bh.Len = len(s) -// bh.Cap = len(s) -// -// Unfortunately, as of Go 1.15.3 the inliner's cost model assigns a high enough -// weight to this sequence of expressions that any function that uses it will -// not be inlined. Instead, the functions below use a different unsafe -// conversion designed to minimize the inliner weight and allow both to be -// inlined. There is also a test (TestInlining) which verifies that these are -// inlined. -// -// See https://github.com/golang/go/issues/42739 for discussion. - -// Sum64String computes the 64-bit xxHash digest of s. -// It may be faster than Sum64([]byte(s)) by avoiding a copy. -func Sum64String(s string) uint64 { - b := *(*[]byte)(unsafe.Pointer(&sliceHeader{s, len(s)})) - return Sum64(b) -} - -// WriteString adds more data to d. It always returns len(s), nil. -// It may be faster than Write([]byte(s)) by avoiding a copy. -func (d *Digest) WriteString(s string) (n int, err error) { - d.Write(*(*[]byte)(unsafe.Pointer(&sliceHeader{s, len(s)}))) - // d.Write always returns len(s), nil. - // Ignoring the return output and returning these fixed values buys a - // savings of 6 in the inliner's cost model. - return len(s), nil -} - -// sliceHeader is similar to reflect.SliceHeader, but it assumes that the layout -// of the first two words is the same as the layout of a string. -type sliceHeader struct { - s string - cap int -} diff --git a/vendor/github.com/charithe/durationcheck/.gitignore b/vendor/github.com/charithe/durationcheck/.gitignore deleted file mode 100644 index c2b126a84..000000000 --- a/vendor/github.com/charithe/durationcheck/.gitignore +++ /dev/null @@ -1 +0,0 @@ -/durationcheck diff --git a/vendor/github.com/charithe/durationcheck/LICENSE b/vendor/github.com/charithe/durationcheck/LICENSE deleted file mode 100644 index 261eeb9e9..000000000 --- a/vendor/github.com/charithe/durationcheck/LICENSE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/vendor/github.com/charithe/durationcheck/Makefile b/vendor/github.com/charithe/durationcheck/Makefile deleted file mode 100644 index 8e2f81ae8..000000000 --- a/vendor/github.com/charithe/durationcheck/Makefile +++ /dev/null @@ -1,5 +0,0 @@ -build: - @GO111MODULE=on go build -ldflags '-s -w' -o durationcheck ./cmd/durationcheck/main.go - -install: - @GO111MODULE=on go install -ldflags '-s -w' ./cmd/durationcheck diff --git a/vendor/github.com/charithe/durationcheck/README.md b/vendor/github.com/charithe/durationcheck/README.md deleted file mode 100644 index 6f4279bd3..000000000 --- a/vendor/github.com/charithe/durationcheck/README.md +++ /dev/null @@ -1,51 +0,0 @@ -[![CircleCI](https://circleci.com/gh/charithe/durationcheck.svg?style=svg)](https://circleci.com/gh/charithe/durationcheck) - - - -Duration Check -=============== - -A Go linter to detect cases where two `time.Duration` values are being multiplied in possibly erroneous ways. - -Consider the following (highly contrived) code: - -```go -func waitForSeconds(someDuration time.Duration) { - timeToWait := someDuration * time.Second - fmt.Printf("Waiting for %s\n", timeToWait) -} - -func main() { - waitForSeconds(5) // waits for 5 seconds - waitForSeconds(5 * time.Second) // waits for 1388888h 53m 20s -} -``` - -Both invocations of the function are syntactically correct but the second one is probably not what most people want. -In this contrived example it is quite easy to spot the mistake. However, if the incorrect `waitForSeconds` invocation is -nested deep within a complex piece of code that runs in the background, the mistake could go unnoticed for months (which -is exactly what happened in a production backend system of fairly well-known software service). - - -See the [test cases](testdata/src/a/a.go) for more examples of the types of errors detected by the linter. - - -Installation -------------- - -Requires Go 1.11 or above. - -``` -go get -u github.com/charithe/durationcheck/cmd/durationcheck -``` - -Usage ------ - -Invoke `durationcheck` with your package name - -``` -durationcheck ./... -# or -durationcheck github.com/you/yourproject/... -``` diff --git a/vendor/github.com/charithe/durationcheck/durationcheck.go b/vendor/github.com/charithe/durationcheck/durationcheck.go deleted file mode 100644 index c47b3a761..000000000 --- a/vendor/github.com/charithe/durationcheck/durationcheck.go +++ /dev/null @@ -1,191 +0,0 @@ -package durationcheck - -import ( - "bytes" - "fmt" - "go/ast" - "go/format" - "go/token" - "go/types" - "log" - "os" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/ast/inspector" -) - -var Analyzer = &analysis.Analyzer{ - Name: "durationcheck", - Doc: "check for two durations multiplied together", - Run: run, - Requires: []*analysis.Analyzer{inspect.Analyzer}, -} - -func run(pass *analysis.Pass) (interface{}, error) { - // if the package does not import time, it can be skipped from analysis - if !hasImport(pass.Pkg, "time") { - return nil, nil - } - - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - - nodeTypes := []ast.Node{ - (*ast.BinaryExpr)(nil), - } - - inspect.Preorder(nodeTypes, check(pass)) - - return nil, nil -} - -func hasImport(pkg *types.Package, importPath string) bool { - for _, imp := range pkg.Imports() { - if imp.Path() == importPath { - return true - } - } - - return false -} - -// check contains the logic for checking that time.Duration is used correctly in the code being analysed -func check(pass *analysis.Pass) func(ast.Node) { - return func(node ast.Node) { - expr := node.(*ast.BinaryExpr) - // we are only interested in multiplication - if expr.Op != token.MUL { - return - } - - // get the types of the two operands - x, xOK := pass.TypesInfo.Types[expr.X] - y, yOK := pass.TypesInfo.Types[expr.Y] - - if !xOK || !yOK { - return - } - - if isDuration(x.Type) && isDuration(y.Type) { - // check that both sides are acceptable expressions - if isUnacceptableExpr(pass, expr.X) && isUnacceptableExpr(pass, expr.Y) { - pass.Reportf(expr.Pos(), "Multiplication of durations: `%s`", formatNode(expr)) - } - } - } -} - -func isDuration(x types.Type) bool { - return x.String() == "time.Duration" || x.String() == "*time.Duration" -} - -// isUnacceptableExpr returns true if the argument is not an acceptable time.Duration expression -func isUnacceptableExpr(pass *analysis.Pass, expr ast.Expr) bool { - switch e := expr.(type) { - case *ast.BasicLit: - return false - case *ast.Ident: - return !isAcceptableNestedExpr(pass, e) - case *ast.CallExpr: - return !isAcceptableCast(pass, e) - case *ast.BinaryExpr: - return !isAcceptableNestedExpr(pass, e) - case *ast.UnaryExpr: - return !isAcceptableNestedExpr(pass, e) - case *ast.SelectorExpr: - return !isAcceptableNestedExpr(pass, e) - case *ast.StarExpr: - return !isAcceptableNestedExpr(pass, e) - case *ast.ParenExpr: - return !isAcceptableNestedExpr(pass, e) - case *ast.IndexExpr: - return !isAcceptableNestedExpr(pass, e) - default: - return true - } -} - -// isAcceptableCast returns true if the argument is an acceptable expression cast to time.Duration -func isAcceptableCast(pass *analysis.Pass, e *ast.CallExpr) bool { - // check that there's a single argument - if len(e.Args) != 1 { - return false - } - - // check that the argument is acceptable - if !isAcceptableNestedExpr(pass, e.Args[0]) { - return false - } - - // check for time.Duration cast - selector, ok := e.Fun.(*ast.SelectorExpr) - if !ok { - return false - } - - return isDurationCast(selector) -} - -func isDurationCast(selector *ast.SelectorExpr) bool { - pkg, ok := selector.X.(*ast.Ident) - if !ok { - return false - } - - if pkg.Name != "time" { - return false - } - - return selector.Sel.Name == "Duration" -} - -func isAcceptableNestedExpr(pass *analysis.Pass, n ast.Expr) bool { - switch e := n.(type) { - case *ast.BasicLit: - return true - case *ast.BinaryExpr: - return isAcceptableNestedExpr(pass, e.X) && isAcceptableNestedExpr(pass, e.Y) - case *ast.UnaryExpr: - return isAcceptableNestedExpr(pass, e.X) - case *ast.Ident: - return isAcceptableIdent(pass, e) - case *ast.CallExpr: - if isAcceptableCast(pass, e) { - return true - } - t := pass.TypesInfo.TypeOf(e) - return !isDuration(t) - case *ast.SelectorExpr: - return isAcceptableNestedExpr(pass, e.X) && isAcceptableIdent(pass, e.Sel) - case *ast.StarExpr: - return isAcceptableNestedExpr(pass, e.X) - case *ast.ParenExpr: - return isAcceptableNestedExpr(pass, e.X) - case *ast.IndexExpr: - t := pass.TypesInfo.TypeOf(e) - return !isDuration(t) - default: - return false - } -} - -func isAcceptableIdent(pass *analysis.Pass, ident *ast.Ident) bool { - obj := pass.TypesInfo.ObjectOf(ident) - return !isDuration(obj.Type()) -} - -func formatNode(node ast.Node) string { - buf := new(bytes.Buffer) - if err := format.Node(buf, token.NewFileSet(), node); err != nil { - log.Printf("Error formatting expression: %v", err) - return "" - } - - return buf.String() -} - -func printAST(msg string, node ast.Node) { - fmt.Printf(">>> %s:\n%s\n\n\n", msg, formatNode(node)) - ast.Fprint(os.Stdout, nil, node, nil) - fmt.Println("--------------") -} diff --git a/vendor/github.com/chavacava/garif/.gitignore b/vendor/github.com/chavacava/garif/.gitignore deleted file mode 100644 index 5dee1052c..000000000 --- a/vendor/github.com/chavacava/garif/.gitignore +++ /dev/null @@ -1,3 +0,0 @@ -*.test -*.out -.devcontainer/ \ No newline at end of file diff --git a/vendor/github.com/chavacava/garif/LICENSE b/vendor/github.com/chavacava/garif/LICENSE deleted file mode 100644 index 2bba73fb7..000000000 --- a/vendor/github.com/chavacava/garif/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2021 Salvador Cavadini - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/chavacava/garif/README.md b/vendor/github.com/chavacava/garif/README.md deleted file mode 100644 index 6a19c6147..000000000 --- a/vendor/github.com/chavacava/garif/README.md +++ /dev/null @@ -1,52 +0,0 @@ -# garif - -A GO package to create and manipulate SARIF logs. - -SARIF, from _Static Analysis Results Interchange Format_, is a standard JSON-based format for the output of static analysis tools defined and promoted by [OASIS](https://www.oasis-open.org/). - -Current supported version of the standard is [SARIF-v2.1.0](https://docs.oasis-open.org/sarif/sarif/v2.1.0/csprd01/sarif-v2.1.0-csprd01.html -). - -## Usage - -The package provides access to every element of the SARIF model, therefore you are free to manipulate it at every detail. - -The package also provides constructors functions (`New...`) and decorators methods (`With...`) that simplify the creation of SARIF files for common use cases. - -Using these constructors and decorators we can easily create the example SARIF file of the [Microsoft SARIF pages](https://github.com/microsoft/sarif-tutorials/blob/master/docs/1-Introduction.md) - - -```go -import to `github.com/chavacava/garif` - -// ... - -rule := garif.NewRule("no-unused-vars"). - WithHelpUri("https://eslint.org/docs/rules/no-unused-vars"). - WithShortDescription("disallow unused variables"). - WithProperties("category", "Variables") - -driver := garif.NewDriver("ESLint"). - WithInformationUri("https://eslint.org"). - WithRules(rule) - -run := garif.NewRun(NewTool(driver)). - WithArtifactsURIs("file:///C:/dev/sarif/sarif-tutorials/samples/Introduction/simple-example.js") - -run.WithResult(rule.Id, "'x' is assigned a value but never used.", "file:///C:/dev/sarif/sarif-tutorials/samples/Introduction/simple-example.js", 1, 5) - -logFile := garif.NewLogFile([]*Run{run}, Version210) - -logFile.Write(os.Stdout) -``` - -## Why this package? -This package was initiated during my works on adding to [`revive`](https://github.com/mgechev/revive) a SARIF output formatter. -I've tried to use [go-sarif](https://github.com/owenrumney/go-sarif) by [Owen Rumney](https://github.com/owenrumney) but it is too focused in the use case of the static analyzer [tfsec](https://tfsec.dev) so I've decided to create a package flexible enough to generate SARIF files in broader cases. - -## More information about SARIF -For more information about SARIF, you can visit the [Oasis Open](https://www.oasis-open.org/committees/tc_home.php?wg_abbrev=sarif) site. - - -## Contributing -Of course, contributions are welcome! \ No newline at end of file diff --git a/vendor/github.com/chavacava/garif/constructors.go b/vendor/github.com/chavacava/garif/constructors.go deleted file mode 100644 index 8910e396e..000000000 --- a/vendor/github.com/chavacava/garif/constructors.go +++ /dev/null @@ -1,338 +0,0 @@ -package garif - -// NewAddress creates a valid Address -func NewAddress() *Address { - return &Address{} -} - -// NewArtifact creates a valid Artifact -func NewArtifact() *Artifact { - return &Artifact{} -} - -// NewArtifactChange creates a valid ArtifactChange -func NewArtifactChange(location *ArtifactLocation, replacements ...*Replacement) *ArtifactChange { - return &ArtifactChange{ - ArtifactLocation: location, - Replacements: replacements, - } -} - -// NewArtifactContent creates a valid ArtifactContent -func NewArtifactContent() *ArtifactContent { - return &ArtifactContent{} -} - -// NewArtifactLocation creates a valid ArtifactLocation -func NewArtifactLocation() *ArtifactLocation { - return &ArtifactLocation{} -} - -// NewAttachment creates a valid Attachment -func NewAttachment(location *ArtifactLocation) *Attachment { - return &Attachment{ArtifactLocation: location} -} - -// NewCodeFlow creates a valid CodeFlow -func NewCodeFlow(threadFlows ...*ThreadFlow) *CodeFlow { - return &CodeFlow{ThreadFlows: threadFlows} -} - -// NewConfigurationOverride creates a valid ConfigurationOverride -func NewConfigurationOverride(configuration *ReportingConfiguration, descriptor *ReportingDescriptorReference) *ConfigurationOverride { - return &ConfigurationOverride{ - Configuration: configuration, - Descriptor: descriptor, - } -} - -// NewConversion creates a valid Conversion -func NewConversion(tool *Tool) *Conversion { - return &Conversion{Tool: tool} -} - -// NewEdge creates a valid Edge -func NewEdge(id, sourceNodeId, targetNodeId string) *Edge { - return &Edge{ - Id: id, - SourceNodeId: sourceNodeId, - TargetNodeId: targetNodeId, - } -} - -// NewEdgeTraversal creates a valid EdgeTraversal -func NewEdgeTraversal(edgeId string) *EdgeTraversal { - return &EdgeTraversal{ - EdgeId: edgeId, - } -} - -// NewException creates a valid Exception -func NewException() *Exception { - return &Exception{} -} - -// NewExternalProperties creates a valid ExternalProperties -func NewExternalProperties() *ExternalProperties { - return &ExternalProperties{} -} - -// NewExternalPropertyFileReference creates a valid ExternalPropertyFileReference -func NewExternalPropertyFileReference() *ExternalPropertyFileReference { - return &ExternalPropertyFileReference{} -} - -// NewExternalPropertyFileReferences creates a valid ExternalPropertyFileReferences -func NewExternalPropertyFileReferences() *ExternalPropertyFileReferences { - return &ExternalPropertyFileReferences{} -} - -// NewFix creates a valid Fix -func NewFix(artifactChanges ...*ArtifactChange) *Fix { - return &Fix{ - ArtifactChanges: artifactChanges, - } -} - -// NewGraph creates a valid Graph -func NewGraph() *Graph { - return &Graph{} -} - -// NewGraphTraversal creates a valid GraphTraversal -func NewGraphTraversal() *GraphTraversal { - return &GraphTraversal{} -} - -// NewInvocation creates a valid Invocation -func NewInvocation(executionSuccessful bool) *Invocation { - return &Invocation{ - ExecutionSuccessful: executionSuccessful, - } -} - -// NewLocation creates a valid Location -func NewLocation() *Location { - return &Location{} -} - -// NewLocationRelationship creates a valid LocationRelationship -func NewLocationRelationship(target int) *LocationRelationship { - return &LocationRelationship{ - Target: target, - } -} - -type LogFileVersion string - -const Version210 LogFileVersion = "2.1.0" - -// NewLogFile creates a valid LogFile -func NewLogFile(runs []*Run, version LogFileVersion) *LogFile { - return &LogFile{ - Runs: runs, - Version: version, - } -} - -// NewLogicalLocation creates a valid LogicalLocation -func NewLogicalLocation() *LogicalLocation { - return &LogicalLocation{} -} - -// NewMessage creates a valid Message -func NewMessage() *Message { - return &Message{} -} - -// NewMessageFromText creates a valid Message with the given text -func NewMessageFromText(text string) *Message { - return &Message{ - Text: text, - } -} - -// NewMultiformatMessageString creates a valid MultiformatMessageString -func NewMultiformatMessageString(text string) *MultiformatMessageString { - return &MultiformatMessageString{ - Text: text, - } -} - -// NewNode creates a valid Node -func NewNode(id string) *Node { - return &Node{ - Id: id, - } -} - -// NewNotification creates a valid Notification -func NewNotification(message *Message) *Notification { - return &Notification{ - Message: message, - } -} - -// NewPhysicalLocation creates a valid PhysicalLocation -func NewPhysicalLocation() *PhysicalLocation { - return &PhysicalLocation{} -} - -// NewPropertyBag creates a valid PropertyBag -func NewPropertyBag() *PropertyBag { - return &PropertyBag{} -} - -// NewRectangle creates a valid Rectangle -func NewRectangle() *Rectangle { - return &Rectangle{} -} - -// NewRegion creates a valid Region -func NewRegion() *Region { - return &Region{} -} - -// NewReplacement creates a valid Replacement -func NewReplacement(deletedRegion *Region) *Replacement { - return &Replacement{ - DeletedRegion: deletedRegion, - } -} - -// NewReportingConfiguration creates a valid ReportingConfiguration -func NewReportingConfiguration() *ReportingConfiguration { - return &ReportingConfiguration{} -} - -// NewReportingDescriptor creates a valid ReportingDescriptor -func NewReportingDescriptor(id string) *ReportingDescriptor { - return &ReportingDescriptor{ - Id: id, - } -} - -// NewRule is an alias for NewReportingDescriptor -func NewRule(id string) *ReportingDescriptor { - return NewReportingDescriptor(id) -} - -// NewReportingDescriptorReference creates a valid ReportingDescriptorReference -func NewReportingDescriptorReference() *ReportingDescriptorReference { - return &ReportingDescriptorReference{} -} - -// NewReportingDescriptorRelationship creates a valid ReportingDescriptorRelationship -func NewReportingDescriptorRelationship(target *ReportingDescriptorReference) *ReportingDescriptorRelationship { - return &ReportingDescriptorRelationship{ - Target: target, - } -} - -// NewResult creates a valid Result -func NewResult(message *Message) *Result { - return &Result{ - Message: message, - } -} - -// NewResultProvenance creates a valid ResultProvenance -func NewResultProvenance() *ResultProvenance { - return &ResultProvenance{} -} - -// NewRun creates a valid Run -func NewRun(tool *Tool) *Run { - return &Run{ - Tool: tool, - } -} - -// NewRunAutomationDetails creates a valid RunAutomationDetails -func NewRunAutomationDetails() *RunAutomationDetails { - return &RunAutomationDetails{} -} - -// New creates a valid -func NewSpecialLocations() *SpecialLocations { - return &SpecialLocations{} -} - -// NewStack creates a valid Stack -func NewStack(frames ...*StackFrame) *Stack { - return &Stack{ - Frames: frames, - } -} - -// NewStackFrame creates a valid StackFrame -func NewStackFrame() *StackFrame { - return &StackFrame{} -} - -// NewSuppression creates a valid Suppression -func NewSuppression(kind string) *Suppression { - return &Suppression{ - Kind: kind, - } -} - -// NewThreadFlow creates a valid ThreadFlow -func NewThreadFlow(locations []*ThreadFlowLocation) *ThreadFlow { - return &ThreadFlow{ - Locations: locations, - } -} - -// NewThreadFlowLocation creates a valid ThreadFlowLocation -func NewThreadFlowLocation() *ThreadFlowLocation { - return &ThreadFlowLocation{} -} - -// NewTool creates a valid Tool -func NewTool(driver *ToolComponent) *Tool { - return &Tool{ - Driver: driver, - } -} - -// NewToolComponent creates a valid ToolComponent -func NewToolComponent(name string) *ToolComponent { - return &ToolComponent{ - Name: name, - } -} - -// NewDriver is an alias for NewToolComponent -func NewDriver(name string) *ToolComponent { - return NewToolComponent(name) -} - -// NewToolComponentReference creates a valid ToolComponentReference -func NewToolComponentReference() *ToolComponentReference { - return &ToolComponentReference{} -} - -// NewTranslationMetadata creates a valid TranslationMetadata -func NewTranslationMetadata(name string) *TranslationMetadata { - return &TranslationMetadata{ - Name: name, - } -} - -// NewVersionControlDetails creates a valid VersionControlDetails -func NewVersionControlDetails(repositoryUri string) *VersionControlDetails { - return &VersionControlDetails{ - RepositoryUri: repositoryUri, - } -} - -// NewWebRequest creates a valid WebRequest -func NewWebRequest() *WebRequest { - return &WebRequest{} -} - -// NewWebResponse creates a valid WebResponse -func NewWebResponse() *WebResponse { - return &WebResponse{} -} diff --git a/vendor/github.com/chavacava/garif/decorators.go b/vendor/github.com/chavacava/garif/decorators.go deleted file mode 100644 index 00b599fb8..000000000 --- a/vendor/github.com/chavacava/garif/decorators.go +++ /dev/null @@ -1,94 +0,0 @@ -package garif - -// WithLineColumn sets a physical location with the given line and column -func (l *Location) WithLineColumn(line, column int) *Location { - if l.PhysicalLocation == nil { - l.PhysicalLocation = NewPhysicalLocation() - } - - l.PhysicalLocation.Region = NewRegion() - l.PhysicalLocation.Region.StartLine = line - l.PhysicalLocation.Region.StartColumn = column - - return l -} - -// WithURI sets a physical location with the given URI -func (l *Location) WithURI(uri string) *Location { - if l.PhysicalLocation == nil { - l.PhysicalLocation = NewPhysicalLocation() - } - - l.PhysicalLocation.ArtifactLocation = NewArtifactLocation() - l.PhysicalLocation.ArtifactLocation.Uri = uri - - return l -} - -// WithKeyValue sets (overwrites) the value of the given key -func (b PropertyBag) WithKeyValue(key string, value interface{}) PropertyBag { - b[key] = value - return b -} - -// WithHelpUri sets the help URI for this ReportingDescriptor -func (r *ReportingDescriptor) WithHelpUri(uri string) *ReportingDescriptor { - r.HelpUri = uri - return r -} - -// WithProperties adds the key & value to the properties of this ReportingDescriptor -func (r *ReportingDescriptor) WithProperties(key string, value interface{}) *ReportingDescriptor { - if r.Properties == nil { - r.Properties = NewPropertyBag() - } - - r.Properties.WithKeyValue(key, value) - - return r -} - -// WithArtifactsURIs adds the given URI as artifacts of this Run -func (r *Run) WithArtifactsURIs(uris ...string) *Run { - if r.Artifacts == nil { - r.Artifacts = []*Artifact{} - } - - for _, uri := range uris { - a := NewArtifact() - a.Location = NewArtifactLocation() - a.Location.Uri = uri - r.Artifacts = append(r.Artifacts, a) - } - - return r -} - -// WithResult adds a result to this Run -func (r *Run) WithResult(ruleId string, message string, uri string, line int, column int) *Run { - if r.Results == nil { - r.Results = []*Result{} - } - - msg := NewMessage() - msg.Text = message - result := NewResult(msg) - location := NewLocation().WithURI(uri).WithLineColumn(line, column) - - result.Locations = append(result.Locations, location) - result.RuleId = ruleId - r.Results = append(r.Results, result) - return r -} - -// WithInformationUri sets the information URI -func (t *ToolComponent) WithInformationUri(uri string) *ToolComponent { - t.InformationUri = uri - return t -} - -// WithRules sets (overwrites) the rules -func (t *ToolComponent) WithRules(rules ...*ReportingDescriptor) *ToolComponent { - t.Rules = rules - return t -} diff --git a/vendor/github.com/chavacava/garif/doc.go b/vendor/github.com/chavacava/garif/doc.go deleted file mode 100644 index 50fa6dfe5..000000000 --- a/vendor/github.com/chavacava/garif/doc.go +++ /dev/null @@ -1,11 +0,0 @@ -// Package garif defines all the GO structures required to model a SARIF log file. -// These structures were created using the JSON-schema sarif-schema-2.1.0.json of SARIF logfiles -// available at https://github.com/oasis-tcs/sarif-spec/tree/master/Schemata. -// -// The package provides constructors for all structures (see constructors.go) These constructors -// ensure that the returned structure instantiation is valid with respect to the JSON schema and -// should be used in place of plain structure instantiation. -// The root structure is LogFile. -// -// The package provides utility decorators for the most commonly used structures (see decorators.go) -package garif diff --git a/vendor/github.com/chavacava/garif/enums.go b/vendor/github.com/chavacava/garif/enums.go deleted file mode 100644 index dea2daf13..000000000 --- a/vendor/github.com/chavacava/garif/enums.go +++ /dev/null @@ -1,41 +0,0 @@ -package garif - -type ResultKind string - -// declare JSON values -const ( - _pass ResultKind = "pass" - _open ResultKind = "open" - _informational ResultKind = "informational" - _notApplicable ResultKind = "notApplicable" - _review ResultKind = "review" - _fail ResultKind = "fail" -) - -// create public visible constants with a namespace as enums -const ( - ResultKind_Pass ResultKind = _pass - ResultKind_Open ResultKind = _open - ResultKind_Informational ResultKind = _informational - ResultKind_NotApplicable ResultKind = _notApplicable - ResultKind_Review ResultKind = _review - ResultKind_Fail ResultKind = _fail -) - -type ResultLevel string - -// declare JSON values -const ( - _warning ResultLevel = "warning" - _error ResultLevel = "error" - _note ResultLevel = "note" - _none ResultLevel = "none" -) - -// create public visible constants with a namespace as enums -const ( - ResultLevel_Warning ResultLevel = _warning - ResultLevel_Error ResultLevel = _error - ResultLevel_Note ResultLevel = _note - ResultLevel_None ResultLevel = _none -) diff --git a/vendor/github.com/chavacava/garif/io.go b/vendor/github.com/chavacava/garif/io.go deleted file mode 100644 index ce5719c96..000000000 --- a/vendor/github.com/chavacava/garif/io.go +++ /dev/null @@ -1,26 +0,0 @@ -package garif - -import ( - "encoding/json" - "io" -) - -// Write writes the JSON -func (l *LogFile) Write(w io.Writer) error { - marshal, err := json.Marshal(l) - if err != nil { - return err - } - _, err = w.Write(marshal) - return err -} - -// PrettyWrite writes indented JSON -func (l *LogFile) PrettyWrite(w io.Writer) error { - marshal, err := json.MarshalIndent(l, "", " ") - if err != nil { - return err - } - _, err = w.Write(marshal) - return err -} diff --git a/vendor/github.com/chavacava/garif/models.go b/vendor/github.com/chavacava/garif/models.go deleted file mode 100644 index f16a86136..000000000 --- a/vendor/github.com/chavacava/garif/models.go +++ /dev/null @@ -1,1486 +0,0 @@ -package garif - -// Address A physical or virtual address, or a range of addresses, in an 'addressable region' (memory or a binary file). -type Address struct { - - // The address expressed as a byte offset from the start of the addressable region. - AbsoluteAddress int `json:"absoluteAddress,omitempty"` - - // A human-readable fully qualified name that is associated with the address. - FullyQualifiedName string `json:"fullyQualifiedName,omitempty"` - - // The index within run.addresses of the cached object for this address. - Index int `json:"index,omitempty"` - - // An open-ended string that identifies the address kind. - // 'data', 'function', 'header','instruction', 'module', 'page', 'section', - // 'segment', 'stack', 'stackFrame', 'table' are well-known values. - Kind string `json:"kind,omitempty"` - - // The number of bytes in this range of addresses. - Length int `json:"length,omitempty"` - - // A name that is associated with the address, e.g., '.text'. - Name string `json:"name,omitempty"` - - // The byte offset of this address from the absolute or relative address of the parent object. - OffsetFromParent int `json:"offsetFromParent,omitempty"` - - // The index within run.addresses of the parent object. - ParentIndex int `json:"parentIndex,omitempty"` - - // Key/value pairs that provide additional information about the address. - Properties *PropertyBag `json:"properties,omitempty"` - - // The address expressed as a byte offset from the absolute address of the top-most parent object. - RelativeAddress int `json:"relativeAddress,omitempty"` -} - -// Artifact A single artifact. In some cases, this artifact might be nested within another artifact. -type Artifact struct { - - // The contents of the artifact. - Contents *ArtifactContent `json:"contents,omitempty"` - - // A short description of the artifact. - Description *Message `json:"description,omitempty"` - - // Specifies the encoding for an artifact object that refers to a text file. - Encoding string `json:"encoding,omitempty"` - - // A dictionary, each of whose keys is the name of a hash function and each of whose values is - // the hashed value of the artifact produced by the specified hash function. - Hashes map[string]string `json:"hashes,omitempty"` - - // The Coordinated Universal Time (UTC) date and time at which the artifact was most recently modified. - // See "Date/time properties" in the SARIF spec for the required format. - LastModifiedTimeUtc string `json:"lastModifiedTimeUtc,omitempty"` - - // The length of the artifact in bytes. - Length int `json:"length,omitempty"` - - // The location of the artifact. - Location *ArtifactLocation `json:"location,omitempty"` - - // The MIME type (RFC 2045) of the artifact. - MimeType string `json:"mimeType,omitempty"` - - // The offset in bytes of the artifact within its containing artifact. - Offset int `json:"offset,omitempty"` - - // Identifies the index of the immediate parent of the artifact, if this artifact is nested. - ParentIndex int `json:"parentIndex,omitempty"` - - // Key/value pairs that provide additional information about the artifact. - Properties *PropertyBag `json:"properties,omitempty"` - - // The role or roles played by the artifact in the analysis. - Roles []interface{} `json:"roles,omitempty"` - - // Specifies the source language for any artifact object that refers to a text file that contains source code. - SourceLanguage string `json:"sourceLanguage,omitempty"` -} - -// ArtifactChange A change to a single artifact. -type ArtifactChange struct { - - // The location of the artifact to change. - ArtifactLocation *ArtifactLocation `json:"artifactLocation"` - - // Key/value pairs that provide additional information about the change. - Properties *PropertyBag `json:"properties,omitempty"` - - // An array of replacement objects, each of which represents the replacement of a single region in a - // single artifact specified by 'artifactLocation'. - Replacements []*Replacement `json:"replacements"` -} - -// ArtifactContent Represents the contents of an artifact. -type ArtifactContent struct { - - // MIME Base64-encoded content from a binary artifact, or from a text artifact in its original encoding. - Binary string `json:"binary,omitempty"` - - // Key/value pairs that provide additional information about the artifact content. - Properties *PropertyBag `json:"properties,omitempty"` - - // An alternate rendered representation of the artifact (e.g., a decompiled representation of a binary region). - Rendered *MultiformatMessageString `json:"rendered,omitempty"` - - // UTF-8-encoded content from a text artifact. - Text string `json:"text,omitempty"` -} - -// ArtifactLocation Specifies the location of an artifact. -type ArtifactLocation struct { - - // A short description of the artifact location. - Description *Message `json:"description,omitempty"` - - // The index within the run artifacts array of the artifact object associated with the artifact location. - Index int `json:"index,omitempty"` - - // Key/value pairs that provide additional information about the artifact location. - Properties *PropertyBag `json:"properties,omitempty"` - - // A string containing a valid relative or absolute URI. - Uri string `json:"uri,omitempty"` - - // A string which indirectly specifies the absolute URI with respect to which a relative URI in the "uri" property is interpreted. - UriBaseId string `json:"uriBaseId,omitempty"` -} - -// Attachment An artifact relevant to a result. -type Attachment struct { - - // The location of the attachment. - ArtifactLocation *ArtifactLocation `json:"artifactLocation"` - - // A message describing the role played by the attachment. - Description *Message `json:"description,omitempty"` - - // Key/value pairs that provide additional information about the attachment. - Properties *PropertyBag `json:"properties,omitempty"` - - // An array of rectangles specifying areas of interest within the image. - Rectangles []*Rectangle `json:"rectangles,omitempty"` - - // An array of regions of interest within the attachment. - Regions []*Region `json:"regions,omitempty"` -} - -// CodeFlow A set of threadFlows which together describe a pattern of code execution relevant to detecting a result. -type CodeFlow struct { - - // A message relevant to the code flow. - Message *Message `json:"message,omitempty"` - - // Key/value pairs that provide additional information about the code flow. - Properties *PropertyBag `json:"properties,omitempty"` - - // An array of one or more unique threadFlow objects, each of which describes the progress of a program - // through a thread of execution. - ThreadFlows []*ThreadFlow `json:"threadFlows"` -} - -// ConfigurationOverride Information about how a specific rule or notification was reconfigured at runtime. -type ConfigurationOverride struct { - - // Specifies how the rule or notification was configured during the scan. - Configuration *ReportingConfiguration `json:"configuration"` - - // A reference used to locate the descriptor whose configuration was overridden. - Descriptor *ReportingDescriptorReference `json:"descriptor"` - - // Key/value pairs that provide additional information about the configuration override. - Properties *PropertyBag `json:"properties,omitempty"` -} - -// Conversion Describes how a converter transformed the output of a static analysis tool from the analysis tool's native output format into the SARIF format. -type Conversion struct { - - // The locations of the analysis tool's per-run log files. - AnalysisToolLogFiles []*ArtifactLocation `json:"analysisToolLogFiles,omitempty"` - - // An invocation object that describes the invocation of the converter. - Invocation *Invocation `json:"invocation,omitempty"` - - // Key/value pairs that provide additional information about the conversion. - Properties *PropertyBag `json:"properties,omitempty"` - - // A tool object that describes the converter. - Tool *Tool `json:"tool"` -} - -// Edge Represents a directed edge in a graph. -type Edge struct { - - // A string that uniquely identifies the edge within its graph. - Id string `json:"id"` - - // A short description of the edge. - Label *Message `json:"label,omitempty"` - - // Key/value pairs that provide additional information about the edge. - Properties *PropertyBag `json:"properties,omitempty"` - - // Identifies the source node (the node at which the edge starts). - SourceNodeId string `json:"sourceNodeId"` - - // Identifies the target node (the node at which the edge ends). - TargetNodeId string `json:"targetNodeId"` -} - -// EdgeTraversal Represents the traversal of a single edge during a graph traversal. -type EdgeTraversal struct { - - // Identifies the edge being traversed. - EdgeId string `json:"edgeId"` - - // The values of relevant expressions after the edge has been traversed. - FinalState map[string]*MultiformatMessageString `json:"finalState,omitempty"` - - // A message to display to the user as the edge is traversed. - Message *Message `json:"message,omitempty"` - - // Key/value pairs that provide additional information about the edge traversal. - Properties *PropertyBag `json:"properties,omitempty"` - - // The number of edge traversals necessary to return from a nested graph. - StepOverEdgeCount int `json:"stepOverEdgeCount,omitempty"` -} - -// Exception Describes a runtime exception encountered during the execution of an analysis tool. -type Exception struct { - - // An array of exception objects each of which is considered a cause of this exception. - InnerExceptions []*Exception `json:"innerExceptions,omitempty"` - - // A string that identifies the kind of exception, for example, the fully qualified type name of an object that was thrown, or the symbolic name of a signal. - Kind string `json:"kind,omitempty"` - - // A message that describes the exception. - Message string `json:"message,omitempty"` - - // Key/value pairs that provide additional information about the exception. - Properties *PropertyBag `json:"properties,omitempty"` - - // The sequence of function calls leading to the exception. - Stack *Stack `json:"stack,omitempty"` -} - -// ExternalProperties The top-level element of an external property file. -type ExternalProperties struct { - - // Addresses that will be merged with a separate run. - Addresses []*Address `json:"addresses,omitempty"` - - // An array of artifact objects that will be merged with a separate run. - Artifacts []*Artifact `json:"artifacts,omitempty"` - - // A conversion object that will be merged with a separate run. - Conversion *Conversion `json:"conversion,omitempty"` - - // The analysis tool object that will be merged with a separate run. - Driver *ToolComponent `json:"driver,omitempty"` - - // Tool extensions that will be merged with a separate run. - Extensions []*ToolComponent `json:"extensions,omitempty"` - - // Key/value pairs that provide additional information that will be merged with a separate run. - ExternalizedProperties *PropertyBag `json:"externalizedProperties,omitempty"` - - // An array of graph objects that will be merged with a separate run. - Graphs []*Graph `json:"graphs,omitempty"` - - // A stable, unique identifer for this external properties object, in the form of a GUID. - Guid string `json:"guid,omitempty"` - - // Describes the invocation of the analysis tool that will be merged with a separate run. - Invocations []*Invocation `json:"invocations,omitempty"` - - // An array of logical locations such as namespaces, types or functions that will be merged with a separate run. - LogicalLocations []*LogicalLocation `json:"logicalLocations,omitempty"` - - // Tool policies that will be merged with a separate run. - Policies []*ToolComponent `json:"policies,omitempty"` - - // Key/value pairs that provide additional information about the external properties. - Properties *PropertyBag `json:"properties,omitempty"` - - // An array of result objects that will be merged with a separate run. - Results []*Result `json:"results,omitempty"` - - // A stable, unique identifer for the run associated with this external properties object, in the form of a GUID. - RunGuid string `json:"runGuid,omitempty"` - - // The URI of the JSON schema corresponding to the version of the external property file format. - Schema string `json:"schema,omitempty"` - - // Tool taxonomies that will be merged with a separate run. - Taxonomies []*ToolComponent `json:"taxonomies,omitempty"` - - // An array of threadFlowLocation objects that will be merged with a separate run. - ThreadFlowLocations []*ThreadFlowLocation `json:"threadFlowLocations,omitempty"` - - // Tool translations that will be merged with a separate run. - Translations []*ToolComponent `json:"translations,omitempty"` - - // The SARIF format version of this external properties object. - Version interface{} `json:"version,omitempty"` - - // Requests that will be merged with a separate run. - WebRequests []*WebRequest `json:"webRequests,omitempty"` - - // Responses that will be merged with a separate run. - WebResponses []*WebResponse `json:"webResponses,omitempty"` -} - -// ExternalPropertyFileReference Contains information that enables a SARIF consumer to locate the external property file that contains the value of an externalized property associated with the run. -type ExternalPropertyFileReference struct { - - // A stable, unique identifer for the external property file in the form of a GUID. - Guid string `json:"guid,omitempty"` - - // A non-negative integer specifying the number of items contained in the external property file. - ItemCount int `json:"itemCount,omitempty"` - - // The location of the external property file. - Location *ArtifactLocation `json:"location,omitempty"` - - // Key/value pairs that provide additional information about the external property file. - Properties *PropertyBag `json:"properties,omitempty"` -} - -// ExternalPropertyFileReferences References to external property files that should be inlined with the content of a root log file. -type ExternalPropertyFileReferences struct { - - // An array of external property files containing run.addresses arrays to be merged with the root log file. - Addresses []*ExternalPropertyFileReference `json:"addresses,omitempty"` - - // An array of external property files containing run.artifacts arrays to be merged with the root log file. - Artifacts []*ExternalPropertyFileReference `json:"artifacts,omitempty"` - - // An external property file containing a run.conversion object to be merged with the root log file. - Conversion *ExternalPropertyFileReference `json:"conversion,omitempty"` - - // An external property file containing a run.driver object to be merged with the root log file. - Driver *ExternalPropertyFileReference `json:"driver,omitempty"` - - // An array of external property files containing run.extensions arrays to be merged with the root log file. - Extensions []*ExternalPropertyFileReference `json:"extensions,omitempty"` - - // An external property file containing a run.properties object to be merged with the root log file. - ExternalizedProperties *ExternalPropertyFileReference `json:"externalizedProperties,omitempty"` - - // An array of external property files containing a run.graphs object to be merged with the root log file. - Graphs []*ExternalPropertyFileReference `json:"graphs,omitempty"` - - // An array of external property files containing run.invocations arrays to be merged with the root log file. - Invocations []*ExternalPropertyFileReference `json:"invocations,omitempty"` - - // An array of external property files containing run.logicalLocations arrays to be merged with the root log file. - LogicalLocations []*ExternalPropertyFileReference `json:"logicalLocations,omitempty"` - - // An array of external property files containing run.policies arrays to be merged with the root log file. - Policies []*ExternalPropertyFileReference `json:"policies,omitempty"` - - // Key/value pairs that provide additional information about the external property files. - Properties *PropertyBag `json:"properties,omitempty"` - - // An array of external property files containing run.results arrays to be merged with the root log file. - Results []*ExternalPropertyFileReference `json:"results,omitempty"` - - // An array of external property files containing run.taxonomies arrays to be merged with the root log file. - Taxonomies []*ExternalPropertyFileReference `json:"taxonomies,omitempty"` - - // An array of external property files containing run.threadFlowLocations arrays to be merged with the root log file. - ThreadFlowLocations []*ExternalPropertyFileReference `json:"threadFlowLocations,omitempty"` - - // An array of external property files containing run.translations arrays to be merged with the root log file. - Translations []*ExternalPropertyFileReference `json:"translations,omitempty"` - - // An array of external property files containing run.requests arrays to be merged with the root log file. - WebRequests []*ExternalPropertyFileReference `json:"webRequests,omitempty"` - - // An array of external property files containing run.responses arrays to be merged with the root log file. - WebResponses []*ExternalPropertyFileReference `json:"webResponses,omitempty"` -} - -// Fix A proposed fix for the problem represented by a result object. -// A fix specifies a set of artifacts to modify. For each artifact, -// it specifies a set of bytes to remove, and provides a set of new bytes to replace them. -type Fix struct { - - // One or more artifact changes that comprise a fix for a result. - ArtifactChanges []*ArtifactChange `json:"artifactChanges"` - - // A message that describes the proposed fix, enabling viewers to present the proposed change to an end user. - Description *Message `json:"description,omitempty"` - - // Key/value pairs that provide additional information about the fix. - Properties *PropertyBag `json:"properties,omitempty"` -} - -// Graph A network of nodes and directed edges that describes some aspect of the -// structure of the code (for example, a call graph). -type Graph struct { - - // A description of the graph. - Description *Message `json:"description,omitempty"` - - // An array of edge objects representing the edges of the graph. - Edges []*Edge `json:"edges,omitempty"` - - // An array of node objects representing the nodes of the graph. - Nodes []*Node `json:"nodes,omitempty"` - - // Key/value pairs that provide additional information about the graph. - Properties *PropertyBag `json:"properties,omitempty"` -} - -// GraphTraversal Represents a path through a graph. -type GraphTraversal struct { - - // A description of this graph traversal. - Description *Message `json:"description,omitempty"` - - // The sequences of edges traversed by this graph traversal. - EdgeTraversals []*EdgeTraversal `json:"edgeTraversals,omitempty"` - - // Values of relevant expressions at the start of the graph traversal that remain constant for the graph traversal. - ImmutableState map[string]*MultiformatMessageString `json:"immutableState,omitempty"` - - // Values of relevant expressions at the start of the graph traversal that may change during graph traversal. - InitialState map[string]*MultiformatMessageString `json:"initialState,omitempty"` - - // Key/value pairs that provide additional information about the graph traversal. - Properties *PropertyBag `json:"properties,omitempty"` - - // The index within the result.graphs to be associated with the result. - ResultGraphIndex int `json:"resultGraphIndex,omitempty"` - - // The index within the run.graphs to be associated with the result. - RunGraphIndex int `json:"runGraphIndex,omitempty"` -} - -// Invocation The runtime environment of the analysis tool run. -type Invocation struct { - - // The account under which the invocation occurred. - Account string `json:"account,omitempty"` - - // An array of strings, containing in order the command line arguments passed to the tool from the operating system. - Arguments []string `json:"arguments,omitempty"` - - // The command line used to invoke the tool. - CommandLine string `json:"commandLine,omitempty"` - - // The Coordinated Universal Time (UTC) date and time at which the invocation ended. See "Date/time properties" in the SARIF spec for the required format. - EndTimeUtc string `json:"endTimeUtc,omitempty"` - - // The environment variables associated with the analysis tool process, expressed as key/value pairs. - EnvironmentVariables map[string]string `json:"environmentVariables,omitempty"` - - // An absolute URI specifying the location of the executable that was invoked. - ExecutableLocation *ArtifactLocation `json:"executableLocation,omitempty"` - - // Specifies whether the tool's execution completed successfully. - ExecutionSuccessful bool `json:"executionSuccessful"` - - // The process exit code. - ExitCode int `json:"exitCode,omitempty"` - - // The reason for the process exit. - ExitCodeDescription string `json:"exitCodeDescription,omitempty"` - - // The name of the signal that caused the process to exit. - ExitSignalName string `json:"exitSignalName,omitempty"` - - // The numeric value of the signal that caused the process to exit. - ExitSignalNumber int `json:"exitSignalNumber,omitempty"` - - // The machine on which the invocation occurred. - Machine string `json:"machine,omitempty"` - - // An array of configurationOverride objects that describe notifications related runtime overrides. - NotificationConfigurationOverrides []*ConfigurationOverride `json:"notificationConfigurationOverrides,omitempty"` - - // The id of the process in which the invocation occurred. - ProcessId int `json:"processId,omitempty"` - - // The reason given by the operating system that the process failed to start. - ProcessStartFailureMessage string `json:"processStartFailureMessage,omitempty"` - - // Key/value pairs that provide additional information about the invocation. - Properties *PropertyBag `json:"properties,omitempty"` - - // The locations of any response files specified on the tool's command line. - ResponseFiles []*ArtifactLocation `json:"responseFiles,omitempty"` - - // An array of configurationOverride objects that describe rules related runtime overrides. - RuleConfigurationOverrides []*ConfigurationOverride `json:"ruleConfigurationOverrides,omitempty"` - - // The Coordinated Universal Time (UTC) date and time at which the invocation started. See "Date/time properties" in the SARIF spec for the required format. - StartTimeUtc string `json:"startTimeUtc,omitempty"` - - // A file containing the standard error stream from the process that was invoked. - Stderr *ArtifactLocation `json:"stderr,omitempty"` - - // A file containing the standard input stream to the process that was invoked. - Stdin *ArtifactLocation `json:"stdin,omitempty"` - - // A file containing the standard output stream from the process that was invoked. - Stdout *ArtifactLocation `json:"stdout,omitempty"` - - // A file containing the interleaved standard output and standard error stream from the process that was invoked. - StdoutStderr *ArtifactLocation `json:"stdoutStderr,omitempty"` - - // A list of conditions detected by the tool that are relevant to the tool's configuration. - ToolConfigurationNotifications []*Notification `json:"toolConfigurationNotifications,omitempty"` - - // A list of runtime conditions detected by the tool during the analysis. - ToolExecutionNotifications []*Notification `json:"toolExecutionNotifications,omitempty"` - - // The working directory for the invocation. - WorkingDirectory *ArtifactLocation `json:"workingDirectory,omitempty"` -} - -// Location A location within a programming artifact. -type Location struct { - - // A set of regions relevant to the location. - Annotations []*Region `json:"annotations,omitempty"` - - // Value that distinguishes this location from all other locations within a single result object. - Id int `json:"id,omitempty"` - - // The logical locations associated with the result. - LogicalLocations []*LogicalLocation `json:"logicalLocations,omitempty"` - - // A message relevant to the location. - Message *Message `json:"message,omitempty"` - - // Identifies the artifact and region. - PhysicalLocation *PhysicalLocation `json:"physicalLocation,omitempty"` - - // Key/value pairs that provide additional information about the location. - Properties *PropertyBag `json:"properties,omitempty"` - - // An array of objects that describe relationships between this location and others. - Relationships []*LocationRelationship `json:"relationships,omitempty"` -} - -// LocationRelationship Information about the relation of one location to another. -type LocationRelationship struct { - - // A description of the location relationship. - Description *Message `json:"description,omitempty"` - - // A set of distinct strings that categorize the relationship. Well-known kinds include 'includes', 'isIncludedBy' and 'relevant'. - Kinds []string `json:"kinds,omitempty"` - - // Key/value pairs that provide additional information about the location relationship. - Properties *PropertyBag `json:"properties,omitempty"` - - // A reference to the related location. - Target int `json:"target"` -} - -// LogFile Static Analysis Results Format (SARIF) Version 2.1.0 JSON Schema. -type LogFile struct { - - // References to external property files that share data between runs. - InlineExternalProperties []*ExternalProperties `json:"inlineExternalProperties,omitempty"` - - // Key/value pairs that provide additional information about the log file. - Properties *PropertyBag `json:"properties,omitempty"` - - // The set of runs contained in this log file. - Runs []*Run `json:"runs"` - - // The URI of the JSON schema corresponding to the version. - Schema string `json:"$schema,omitempty"` - - // The SARIF format version of this log file. - Version interface{} `json:"version"` -} - -// LogicalLocation A logical location of a construct that produced a result. -type LogicalLocation struct { - - // The machine-readable name for the logical location, such as a mangled function name provided by a C++ compiler that encodes calling convention, return type and other details along with the function name. - DecoratedName string `json:"decoratedName,omitempty"` - - // The human-readable fully qualified name of the logical location. - FullyQualifiedName string `json:"fullyQualifiedName,omitempty"` - - // The index within the logical locations array. - Index int `json:"index,omitempty"` - - // The type of construct this logical location component refers to. Should be one of 'function', 'member', 'module', 'namespace', 'parameter', 'resource', 'returnType', 'type', 'variable', 'object', 'array', 'property', 'value', 'element', 'text', 'attribute', 'comment', 'declaration', 'dtd' or 'processingInstruction', if any of those accurately describe the construct. - Kind string `json:"kind,omitempty"` - - // Identifies the construct in which the result occurred. For example, this property might contain the name of a class or a method. - Name string `json:"name,omitempty"` - - // Identifies the index of the immediate parent of the construct in which the result was detected. For example, this property might point to a logical location that represents the namespace that holds a type. - ParentIndex int `json:"parentIndex,omitempty"` - - // Key/value pairs that provide additional information about the logical location. - Properties *PropertyBag `json:"properties,omitempty"` -} - -// Message Encapsulates a message intended to be read by the end user. -type Message struct { - - // An array of strings to substitute into the message string. - Arguments []string `json:"arguments,omitempty"` - - // The identifier for this message. - Id string `json:"id,omitempty"` - - // A Markdown message string. - Markdown string `json:"markdown,omitempty"` - - // Key/value pairs that provide additional information about the message. - Properties *PropertyBag `json:"properties,omitempty"` - - // A plain text message string. - Text string `json:"text,omitempty"` -} - -// MultiformatMessageString A message string or message format string rendered in multiple formats. -type MultiformatMessageString struct { - - // A Markdown message string or format string. - Markdown string `json:"markdown,omitempty"` - - // Key/value pairs that provide additional information about the message. - Properties *PropertyBag `json:"properties,omitempty"` - - // A plain text message string or format string. - Text string `json:"text"` -} - -// Node Represents a node in a graph. -type Node struct { - - // Array of child nodes. - Children []*Node `json:"children,omitempty"` - - // A string that uniquely identifies the node within its graph. - Id string `json:"id"` - - // A short description of the node. - Label *Message `json:"label,omitempty"` - - // A code location associated with the node. - Location *Location `json:"location,omitempty"` - - // Key/value pairs that provide additional information about the node. - Properties *PropertyBag `json:"properties,omitempty"` -} - -// Notification Describes a condition relevant to the tool itself, as opposed to being relevant to a target being analyzed by the tool. -type Notification struct { - - // A reference used to locate the rule descriptor associated with this notification. - AssociatedRule *ReportingDescriptorReference `json:"associatedRule,omitempty"` - - // A reference used to locate the descriptor relevant to this notification. - Descriptor *ReportingDescriptorReference `json:"descriptor,omitempty"` - - // The runtime exception, if any, relevant to this notification. - Exception *Exception `json:"exception,omitempty"` - - // A value specifying the severity level of the notification. - Level interface{} `json:"level,omitempty"` - - // The locations relevant to this notification. - Locations []*Location `json:"locations,omitempty"` - - // A message that describes the condition that was encountered. - Message *Message `json:"message"` - - // Key/value pairs that provide additional information about the notification. - Properties *PropertyBag `json:"properties,omitempty"` - - // The thread identifier of the code that generated the notification. - ThreadId int `json:"threadId,omitempty"` - - // The Coordinated Universal Time (UTC) date and time at which the analysis tool generated the notification. - TimeUtc string `json:"timeUtc,omitempty"` -} - -// PhysicalLocation A physical location relevant to a result. Specifies a reference to a programming artifact together with a range of bytes or characters within that artifact. -type PhysicalLocation struct { - - // The address of the location. - Address *Address `json:"address,omitempty"` - - // The location of the artifact. - ArtifactLocation *ArtifactLocation `json:"artifactLocation,omitempty"` - - // Specifies a portion of the artifact that encloses the region. Allows a viewer to display additional context around the region. - ContextRegion *Region `json:"contextRegion,omitempty"` - - // Key/value pairs that provide additional information about the physical location. - Properties *PropertyBag `json:"properties,omitempty"` - - // Specifies a portion of the artifact. - Region *Region `json:"region,omitempty"` -} - -type PropertyBag map[string]interface{} - -/* -// PropertyBag Key/value pairs that provide additional information about the object. -type PropertyBag struct { - AdditionalProperties map[string]interface{} `json:"-,omitempty"` - - // A set of distinct strings that provide additional information. - Tags []string `json:"tags,omitempty"` -} -*/ -// Rectangle An area within an image. -type Rectangle struct { - - // The Y coordinate of the bottom edge of the rectangle, measured in the image's natural units. - Bottom float64 `json:"bottom,omitempty"` - - // The X coordinate of the left edge of the rectangle, measured in the image's natural units. - Left float64 `json:"left,omitempty"` - - // A message relevant to the rectangle. - Message *Message `json:"message,omitempty"` - - // Key/value pairs that provide additional information about the rectangle. - Properties *PropertyBag `json:"properties,omitempty"` - - // The X coordinate of the right edge of the rectangle, measured in the image's natural units. - Right float64 `json:"right,omitempty"` - - // The Y coordinate of the top edge of the rectangle, measured in the image's natural units. - Top float64 `json:"top,omitempty"` -} - -// Region A region within an artifact where a result was detected. -type Region struct { - - // The length of the region in bytes. - ByteLength int `json:"byteLength,omitempty"` - - // The zero-based offset from the beginning of the artifact of the first byte in the region. - ByteOffset int `json:"byteOffset,omitempty"` - - // The length of the region in characters. - CharLength int `json:"charLength,omitempty"` - - // The zero-based offset from the beginning of the artifact of the first character in the region. - CharOffset int `json:"charOffset,omitempty"` - - // The column number of the character following the end of the region. - EndColumn int `json:"endColumn,omitempty"` - - // The line number of the last character in the region. - EndLine int `json:"endLine,omitempty"` - - // A message relevant to the region. - Message *Message `json:"message,omitempty"` - - // Key/value pairs that provide additional information about the region. - Properties *PropertyBag `json:"properties,omitempty"` - - // The portion of the artifact contents within the specified region. - Snippet *ArtifactContent `json:"snippet,omitempty"` - - // Specifies the source language, if any, of the portion of the artifact specified by the region object. - SourceLanguage string `json:"sourceLanguage,omitempty"` - - // The column number of the first character in the region. - StartColumn int `json:"startColumn,omitempty"` - - // The line number of the first character in the region. - StartLine int `json:"startLine,omitempty"` -} - -// Replacement The replacement of a single region of an artifact. -type Replacement struct { - - // The region of the artifact to delete. - DeletedRegion *Region `json:"deletedRegion"` - - // The content to insert at the location specified by the 'deletedRegion' property. - InsertedContent *ArtifactContent `json:"insertedContent,omitempty"` - - // Key/value pairs that provide additional information about the replacement. - Properties *PropertyBag `json:"properties,omitempty"` -} - -// ReportingConfiguration Information about a rule or notification that can be configured at runtime. -type ReportingConfiguration struct { - - // Specifies whether the report may be produced during the scan. - Enabled bool `json:"enabled,omitempty"` - - // Specifies the failure level for the report. - Level interface{} `json:"level,omitempty"` - - // Contains configuration information specific to a report. - Parameters *PropertyBag `json:"parameters,omitempty"` - - // Key/value pairs that provide additional information about the reporting configuration. - Properties *PropertyBag `json:"properties,omitempty"` - - // Specifies the relative priority of the report. Used for analysis output only. - Rank float64 `json:"rank,omitempty"` -} - -// ReportingDescriptor Metadata that describes a specific report produced by the tool, as part of the analysis it provides or its runtime reporting. -type ReportingDescriptor struct { - - // Default reporting configuration information. - DefaultConfiguration *ReportingConfiguration `json:"defaultConfiguration,omitempty"` - - // An array of unique identifies in the form of a GUID by which this report was known in some previous version of the analysis tool. - DeprecatedGuids []string `json:"deprecatedGuids,omitempty"` - - // An array of stable, opaque identifiers by which this report was known in some previous version of the analysis tool. - DeprecatedIds []string `json:"deprecatedIds,omitempty"` - - // An array of readable identifiers by which this report was known in some previous version of the analysis tool. - DeprecatedNames []string `json:"deprecatedNames,omitempty"` - - // A description of the report. Should, as far as possible, provide details sufficient to enable resolution of any problem indicated by the result. - FullDescription *MultiformatMessageString `json:"fullDescription,omitempty"` - - // A unique identifer for the reporting descriptor in the form of a GUID. - Guid string `json:"guid,omitempty"` - - // Provides the primary documentation for the report, useful when there is no online documentation. - Help *MultiformatMessageString `json:"help,omitempty"` - - // A URI where the primary documentation for the report can be found. - HelpUri string `json:"helpUri,omitempty"` - - // A stable, opaque identifier for the report. - Id string `json:"id"` - - // A set of name/value pairs with arbitrary names. Each value is a multiformatMessageString object, which holds message strings in plain text and (optionally) Markdown format. The strings can include placeholders, which can be used to construct a message in combination with an arbitrary number of additional string arguments. - MessageStrings map[string]*MultiformatMessageString `json:"messageStrings,omitempty"` - - // A report identifier that is understandable to an end user. - Name string `json:"name,omitempty"` - - // Key/value pairs that provide additional information about the report. - Properties *PropertyBag `json:"properties,omitempty"` - - // An array of objects that describe relationships between this reporting descriptor and others. - Relationships []*ReportingDescriptorRelationship `json:"relationships,omitempty"` - - // A concise description of the report. Should be a single sentence that is understandable when visible space is limited to a single line of text. - ShortDescription *MultiformatMessageString `json:"shortDescription,omitempty"` -} - -// ReportingDescriptorReference Information about how to locate a relevant reporting descriptor. -type ReportingDescriptorReference struct { - - // A guid that uniquely identifies the descriptor. - Guid string `json:"guid,omitempty"` - - // The id of the descriptor. - Id string `json:"id,omitempty"` - - // The index into an array of descriptors in toolComponent.ruleDescriptors, toolComponent.notificationDescriptors, or toolComponent.taxonomyDescriptors, depending on context. - Index int `json:"index,omitempty"` - - // Key/value pairs that provide additional information about the reporting descriptor reference. - Properties *PropertyBag `json:"properties,omitempty"` - - // A reference used to locate the toolComponent associated with the descriptor. - ToolComponent *ToolComponentReference `json:"toolComponent,omitempty"` -} - -// ReportingDescriptorRelationship Information about the relation of one reporting descriptor to another. -type ReportingDescriptorRelationship struct { - - // A description of the reporting descriptor relationship. - Description *Message `json:"description,omitempty"` - - // A set of distinct strings that categorize the relationship. Well-known kinds include 'canPrecede', 'canFollow', 'willPrecede', 'willFollow', 'superset', 'subset', 'equal', 'disjoint', 'relevant', and 'incomparable'. - Kinds []string `json:"kinds,omitempty"` - - // Key/value pairs that provide additional information about the reporting descriptor reference. - Properties *PropertyBag `json:"properties,omitempty"` - - // A reference to the related reporting descriptor. - Target *ReportingDescriptorReference `json:"target"` -} - -// Result A result produced by an analysis tool. -type Result struct { - - // Identifies the artifact that the analysis tool was instructed to scan. This need not be the same as the artifact where the result actually occurred. - AnalysisTarget *ArtifactLocation `json:"analysisTarget,omitempty"` - - // A set of artifacts relevant to the result. - Attachments []*Attachment `json:"attachments,omitempty"` - - // The state of a result relative to a baseline of a previous run. - BaselineState interface{} `json:"baselineState,omitempty"` - - // An array of 'codeFlow' objects relevant to the result. - CodeFlows []*CodeFlow `json:"codeFlows,omitempty"` - - // A stable, unique identifier for the equivalence class of logically identical results to which this result belongs, in the form of a GUID. - CorrelationGuid string `json:"correlationGuid,omitempty"` - - // A set of strings each of which individually defines a stable, unique identity for the result. - Fingerprints map[string]string `json:"fingerprints,omitempty"` - - // An array of 'fix' objects, each of which represents a proposed fix to the problem indicated by the result. - Fixes []*Fix `json:"fixes,omitempty"` - - // An array of one or more unique 'graphTraversal' objects. - GraphTraversals []*GraphTraversal `json:"graphTraversals,omitempty"` - - // An array of zero or more unique graph objects associated with the result. - Graphs []*Graph `json:"graphs,omitempty"` - - // A stable, unique identifer for the result in the form of a GUID. - Guid string `json:"guid,omitempty"` - - // An absolute URI at which the result can be viewed. - HostedViewerUri string `json:"hostedViewerUri,omitempty"` - - // A value that categorizes results by evaluation state. - Kind ResultKind `json:"kind,omitempty"` - - // A value specifying the severity level of the result. - Level ResultLevel `json:"level,omitempty"` - - // The set of locations where the result was detected. Specify only one location unless the problem indicated by the result can only be corrected by making a change at every specified location. - Locations []*Location `json:"locations,omitempty"` - - // A message that describes the result. The first sentence of the message only will be displayed when visible space is limited. - Message *Message `json:"message"` - - // A positive integer specifying the number of times this logically unique result was observed in this run. - OccurrenceCount int `json:"occurrenceCount,omitempty"` - - // A set of strings that contribute to the stable, unique identity of the result. - PartialFingerprints map[string]string `json:"partialFingerprints,omitempty"` - - // Key/value pairs that provide additional information about the result. - Properties *PropertyBag `json:"properties,omitempty"` - - // Information about how and when the result was detected. - Provenance *ResultProvenance `json:"provenance,omitempty"` - - // A number representing the priority or importance of the result. - Rank float64 `json:"rank,omitempty"` - - // A set of locations relevant to this result. - RelatedLocations []*Location `json:"relatedLocations,omitempty"` - - // A reference used to locate the rule descriptor relevant to this result. - Rule *ReportingDescriptorReference `json:"rule,omitempty"` - - // The stable, unique identifier of the rule, if any, to which this result is relevant. - RuleId string `json:"ruleId,omitempty"` - - // The index within the tool component rules array of the rule object associated with this result. - RuleIndex int `json:"ruleIndex,omitempty"` - - // An array of 'stack' objects relevant to the result. - Stacks []*Stack `json:"stacks,omitempty"` - - // A set of suppressions relevant to this result. - Suppressions []*Suppression `json:"suppressions,omitempty"` - - // An array of references to taxonomy reporting descriptors that are applicable to the result. - Taxa []*ReportingDescriptorReference `json:"taxa,omitempty"` - - // A web request associated with this result. - WebRequest *WebRequest `json:"webRequest,omitempty"` - - // A web response associated with this result. - WebResponse *WebResponse `json:"webResponse,omitempty"` - - // The URIs of the work items associated with this result. - WorkItemUris []string `json:"workItemUris,omitempty"` -} - -// ResultProvenance Contains information about how and when a result was detected. -type ResultProvenance struct { - - // An array of physicalLocation objects which specify the portions of an analysis tool's output that a converter transformed into the result. - ConversionSources []*PhysicalLocation `json:"conversionSources,omitempty"` - - // A GUID-valued string equal to the automationDetails.guid property of the run in which the result was first detected. - FirstDetectionRunGuid string `json:"firstDetectionRunGuid,omitempty"` - - // The Coordinated Universal Time (UTC) date and time at which the result was first detected. See "Date/time properties" in the SARIF spec for the required format. - FirstDetectionTimeUtc string `json:"firstDetectionTimeUtc,omitempty"` - - // The index within the run.invocations array of the invocation object which describes the tool invocation that detected the result. - InvocationIndex int `json:"invocationIndex,omitempty"` - - // A GUID-valued string equal to the automationDetails.guid property of the run in which the result was most recently detected. - LastDetectionRunGuid string `json:"lastDetectionRunGuid,omitempty"` - - // The Coordinated Universal Time (UTC) date and time at which the result was most recently detected. See "Date/time properties" in the SARIF spec for the required format. - LastDetectionTimeUtc string `json:"lastDetectionTimeUtc,omitempty"` - - // Key/value pairs that provide additional information about the result. - Properties *PropertyBag `json:"properties,omitempty"` -} - -// Run Describes a single run of an analysis tool, and contains the reported output of that run. -type Run struct { - - // Addresses associated with this run instance, if any. - Addresses []*Address `json:"addresses,omitempty"` - - // An array of artifact objects relevant to the run. - Artifacts []*Artifact `json:"artifacts,omitempty"` - - // Automation details that describe this run. - AutomationDetails *RunAutomationDetails `json:"automationDetails,omitempty"` - - // The 'guid' property of a previous SARIF 'run' that comprises the baseline that was used to compute result 'baselineState' properties for the run. - BaselineGuid string `json:"baselineGuid,omitempty"` - - // Specifies the unit in which the tool measures columns. - ColumnKind interface{} `json:"columnKind,omitempty"` - - // A conversion object that describes how a converter transformed an analysis tool's native reporting format into the SARIF format. - Conversion *Conversion `json:"conversion,omitempty"` - - // Specifies the default encoding for any artifact object that refers to a text file. - DefaultEncoding string `json:"defaultEncoding,omitempty"` - - // Specifies the default source language for any artifact object that refers to a text file that contains source code. - DefaultSourceLanguage string `json:"defaultSourceLanguage,omitempty"` - - // References to external property files that should be inlined with the content of a root log file. - ExternalPropertyFileReferences *ExternalPropertyFileReferences `json:"externalPropertyFileReferences,omitempty"` - - // An array of zero or more unique graph objects associated with the run. - Graphs []*Graph `json:"graphs,omitempty"` - - // Describes the invocation of the analysis tool. - Invocations []*Invocation `json:"invocations,omitempty"` - - // The language of the messages emitted into the log file during this run (expressed as an ISO 639-1 two-letter lowercase culture code) and an optional region (expressed as an ISO 3166-1 two-letter uppercase subculture code associated with a country or region). The casing is recommended but not required (in order for this data to conform to RFC5646). - Language string `json:"language,omitempty"` - - // An array of logical locations such as namespaces, types or functions. - LogicalLocations []*LogicalLocation `json:"logicalLocations,omitempty"` - - // An ordered list of character sequences that were treated as line breaks when computing region information for the run. - NewlineSequences []string `json:"newlineSequences,omitempty"` - - // The artifact location specified by each uriBaseId symbol on the machine where the tool originally ran. - OriginalUriBaseIds map[string]*ArtifactLocation `json:"originalUriBaseIds,omitempty"` - - // Contains configurations that may potentially override both reportingDescriptor.defaultConfiguration (the tool's default severities) and invocation.configurationOverrides (severities established at run-time from the command line). - Policies []*ToolComponent `json:"policies,omitempty"` - - // Key/value pairs that provide additional information about the run. - Properties *PropertyBag `json:"properties,omitempty"` - - // An array of strings used to replace sensitive information in a redaction-aware property. - RedactionTokens []string `json:"redactionTokens,omitempty"` - - // The set of results contained in an SARIF log. The results array can be omitted when a run is solely exporting rules metadata. It must be present (but may be empty) if a log file represents an actual scan. - Results []*Result `json:"results,omitempty"` - - // Automation details that describe the aggregate of runs to which this run belongs. - RunAggregates []*RunAutomationDetails `json:"runAggregates,omitempty"` - - // A specialLocations object that defines locations of special significance to SARIF consumers. - SpecialLocations *SpecialLocations `json:"specialLocations,omitempty"` - - // An array of toolComponent objects relevant to a taxonomy in which results are categorized. - Taxonomies []*ToolComponent `json:"taxonomies,omitempty"` - - // An array of threadFlowLocation objects cached at run level. - ThreadFlowLocations []*ThreadFlowLocation `json:"threadFlowLocations,omitempty"` - - // Information about the tool or tool pipeline that generated the results in this run. A run can only contain results produced by a single tool or tool pipeline. A run can aggregate results from multiple log files, as long as context around the tool run (tool command-line arguments and the like) is identical for all aggregated files. - Tool *Tool `json:"tool"` - - // The set of available translations of the localized data provided by the tool. - Translations []*ToolComponent `json:"translations,omitempty"` - - // Specifies the revision in version control of the artifacts that were scanned. - VersionControlProvenance []*VersionControlDetails `json:"versionControlProvenance,omitempty"` - - // An array of request objects cached at run level. - WebRequests []*WebRequest `json:"webRequests,omitempty"` - - // An array of response objects cached at run level. - WebResponses []*WebResponse `json:"webResponses,omitempty"` -} - -// RunAutomationDetails Information that describes a run's identity and role within an engineering system process. -type RunAutomationDetails struct { - - // A stable, unique identifier for the equivalence class of runs to which this object's containing run object belongs in the form of a GUID. - CorrelationGuid string `json:"correlationGuid,omitempty"` - - // A description of the identity and role played within the engineering system by this object's containing run object. - Description *Message `json:"description,omitempty"` - - // A stable, unique identifer for this object's containing run object in the form of a GUID. - Guid string `json:"guid,omitempty"` - - // A hierarchical string that uniquely identifies this object's containing run object. - Id string `json:"id,omitempty"` - - // Key/value pairs that provide additional information about the run automation details. - Properties *PropertyBag `json:"properties,omitempty"` -} - -// SpecialLocations Defines locations of special significance to SARIF consumers. -type SpecialLocations struct { - - // Provides a suggestion to SARIF consumers to display file paths relative to the specified location. - DisplayBase *ArtifactLocation `json:"displayBase,omitempty"` - - // Key/value pairs that provide additional information about the special locations. - Properties *PropertyBag `json:"properties,omitempty"` -} - -// Stack A call stack that is relevant to a result. -type Stack struct { - - // An array of stack frames that represents a sequence of calls, rendered in reverse chronological order, that comprise the call stack. - Frames []*StackFrame `json:"frames"` - - // A message relevant to this call stack. - Message *Message `json:"message,omitempty"` - - // Key/value pairs that provide additional information about the stack. - Properties *PropertyBag `json:"properties,omitempty"` -} - -// StackFrame A function call within a stack trace. -type StackFrame struct { - - // The location to which this stack frame refers. - Location *Location `json:"location,omitempty"` - - // The name of the module that contains the code of this stack frame. - Module string `json:"module,omitempty"` - - // The parameters of the call that is executing. - Parameters []string `json:"parameters,omitempty"` - - // Key/value pairs that provide additional information about the stack frame. - Properties *PropertyBag `json:"properties,omitempty"` - - // The thread identifier of the stack frame. - ThreadId int `json:"threadId,omitempty"` -} - -// Suppression A suppression that is relevant to a result. -type Suppression struct { - - // A stable, unique identifer for the supression in the form of a GUID. - Guid string `json:"guid,omitempty"` - - // A string representing the justification for the suppression. - Justification string `json:"justification,omitempty"` - - // A string that indicates where the suppression is persisted. - Kind string `json:"kind"` - - // Identifies the location associated with the suppression. - Location *Location `json:"location,omitempty"` - - // Key/value pairs that provide additional information about the suppression. - Properties *PropertyBag `json:"properties,omitempty"` - - // A string that indicates the review status of the suppression. - Status interface{} `json:"status,omitempty"` -} - -// ThreadFlow Describes a sequence of code locations that specify a path through a single thread of execution such as an operating system or fiber. -type ThreadFlow struct { - - // An string that uniquely identifies the threadFlow within the codeFlow in which it occurs. - Id string `json:"id,omitempty"` - - // Values of relevant expressions at the start of the thread flow that remain constant. - ImmutableState map[string]*MultiformatMessageString `json:"immutableState,omitempty"` - - // Values of relevant expressions at the start of the thread flow that may change during thread flow execution. - InitialState map[string]*MultiformatMessageString `json:"initialState,omitempty"` - - // A temporally ordered array of 'threadFlowLocation' objects, each of which describes a location visited by the tool while producing the result. - Locations []*ThreadFlowLocation `json:"locations"` - - // A message relevant to the thread flow. - Message *Message `json:"message,omitempty"` - - // Key/value pairs that provide additional information about the thread flow. - Properties *PropertyBag `json:"properties,omitempty"` -} - -// ThreadFlowLocation A location visited by an analysis tool while simulating or monitoring the execution of a program. -type ThreadFlowLocation struct { - - // An integer representing the temporal order in which execution reached this location. - ExecutionOrder int `json:"executionOrder,omitempty"` - - // The Coordinated Universal Time (UTC) date and time at which this location was executed. - ExecutionTimeUtc string `json:"executionTimeUtc,omitempty"` - - // Specifies the importance of this location in understanding the code flow in which it occurs. The order from most to least important is "essential", "important", "unimportant". Default: "important". - Importance interface{} `json:"importance,omitempty"` - - // The index within the run threadFlowLocations array. - Index int `json:"index,omitempty"` - - // A set of distinct strings that categorize the thread flow location. Well-known kinds include 'acquire', 'release', 'enter', 'exit', 'call', 'return', 'branch', 'implicit', 'false', 'true', 'caution', 'danger', 'unknown', 'unreachable', 'taint', 'function', 'handler', 'lock', 'memory', 'resource', 'scope' and 'value'. - Kinds []string `json:"kinds,omitempty"` - - // The code location. - Location *Location `json:"location,omitempty"` - - // The name of the module that contains the code that is executing. - Module string `json:"module,omitempty"` - - // An integer representing a containment hierarchy within the thread flow. - NestingLevel int `json:"nestingLevel,omitempty"` - - // Key/value pairs that provide additional information about the threadflow location. - Properties *PropertyBag `json:"properties,omitempty"` - - // The call stack leading to this location. - Stack *Stack `json:"stack,omitempty"` - - // A dictionary, each of whose keys specifies a variable or expression, the associated value of which represents the variable or expression value. For an annotation of kind 'continuation', for example, this dictionary might hold the current assumed values of a set of global variables. - State map[string]*MultiformatMessageString `json:"state,omitempty"` - - // An array of references to rule or taxonomy reporting descriptors that are applicable to the thread flow location. - Taxa []*ReportingDescriptorReference `json:"taxa,omitempty"` - - // A web request associated with this thread flow location. - WebRequest *WebRequest `json:"webRequest,omitempty"` - - // A web response associated with this thread flow location. - WebResponse *WebResponse `json:"webResponse,omitempty"` -} - -// Tool The analysis tool that was run. -type Tool struct { - - // The analysis tool that was run. - Driver *ToolComponent `json:"driver"` - - // Tool extensions that contributed to or reconfigured the analysis tool that was run. - Extensions []*ToolComponent `json:"extensions,omitempty"` - - // Key/value pairs that provide additional information about the tool. - Properties *PropertyBag `json:"properties,omitempty"` -} - -// ToolComponent A component, such as a plug-in or the driver, of the analysis tool that was run. -type ToolComponent struct { - - // The component which is strongly associated with this component. For a translation, this refers to the component which has been translated. For an extension, this is the driver that provides the extension's plugin model. - AssociatedComponent *ToolComponentReference `json:"associatedComponent,omitempty"` - - // The kinds of data contained in this object. - Contents []interface{} `json:"contents,omitempty"` - - // The binary version of the tool component's primary executable file expressed as four non-negative integers separated by a period (for operating systems that express file versions in this way). - DottedQuadFileVersion string `json:"dottedQuadFileVersion,omitempty"` - - // The absolute URI from which the tool component can be downloaded. - DownloadUri string `json:"downloadUri,omitempty"` - - // A comprehensive description of the tool component. - FullDescription *MultiformatMessageString `json:"fullDescription,omitempty"` - - // The name of the tool component along with its version and any other useful identifying information, such as its locale. - FullName string `json:"fullName,omitempty"` - - // A dictionary, each of whose keys is a resource identifier and each of whose values is a multiformatMessageString object, which holds message strings in plain text and (optionally) Markdown format. The strings can include placeholders, which can be used to construct a message in combination with an arbitrary number of additional string arguments. - GlobalMessageStrings map[string]*MultiformatMessageString `json:"globalMessageStrings,omitempty"` - - // A unique identifer for the tool component in the form of a GUID. - Guid string `json:"guid,omitempty"` - - // The absolute URI at which information about this version of the tool component can be found. - InformationUri string `json:"informationUri,omitempty"` - - // Specifies whether this object contains a complete definition of the localizable and/or non-localizable data for this component, as opposed to including only data that is relevant to the results persisted to this log file. - IsComprehensive bool `json:"isComprehensive,omitempty"` - - // The language of the messages emitted into the log file during this run (expressed as an ISO 639-1 two-letter lowercase language code) and an optional region (expressed as an ISO 3166-1 two-letter uppercase subculture code associated with a country or region). The casing is recommended but not required (in order for this data to conform to RFC5646). - Language string `json:"language,omitempty"` - - // The semantic version of the localized strings defined in this component; maintained by components that provide translations. - LocalizedDataSemanticVersion string `json:"localizedDataSemanticVersion,omitempty"` - - // An array of the artifactLocation objects associated with the tool component. - Locations []*ArtifactLocation `json:"locations,omitempty"` - - // The minimum value of localizedDataSemanticVersion required in translations consumed by this component; used by components that consume translations. - MinimumRequiredLocalizedDataSemanticVersion string `json:"minimumRequiredLocalizedDataSemanticVersion,omitempty"` - - // The name of the tool component. - Name string `json:"name"` - - // An array of reportingDescriptor objects relevant to the notifications related to the configuration and runtime execution of the tool component. - Notifications []*ReportingDescriptor `json:"notifications,omitempty"` - - // The organization or company that produced the tool component. - Organization string `json:"organization,omitempty"` - - // A product suite to which the tool component belongs. - Product string `json:"product,omitempty"` - - // A localizable string containing the name of the suite of products to which the tool component belongs. - ProductSuite string `json:"productSuite,omitempty"` - - // Key/value pairs that provide additional information about the tool component. - Properties *PropertyBag `json:"properties,omitempty"` - - // A string specifying the UTC date (and optionally, the time) of the component's release. - ReleaseDateUtc string `json:"releaseDateUtc,omitempty"` - - // An array of reportingDescriptor objects relevant to the analysis performed by the tool component. - Rules []*ReportingDescriptor `json:"rules,omitempty"` - - // The tool component version in the format specified by Semantic Versioning 2.0. - SemanticVersion string `json:"semanticVersion,omitempty"` - - // A brief description of the tool component. - ShortDescription *MultiformatMessageString `json:"shortDescription,omitempty"` - - // An array of toolComponentReference objects to declare the taxonomies supported by the tool component. - SupportedTaxonomies []*ToolComponentReference `json:"supportedTaxonomies,omitempty"` - - // An array of reportingDescriptor objects relevant to the definitions of both standalone and tool-defined taxonomies. - Taxa []*ReportingDescriptor `json:"taxa,omitempty"` - - // Translation metadata, required for a translation, not populated by other component types. - TranslationMetadata *TranslationMetadata `json:"translationMetadata,omitempty"` - - // The tool component version, in whatever format the component natively provides. - Version string `json:"version,omitempty"` -} - -// ToolComponentReference Identifies a particular toolComponent object, either the driver or an extension. -type ToolComponentReference struct { - - // The 'guid' property of the referenced toolComponent. - Guid string `json:"guid,omitempty"` - - // An index into the referenced toolComponent in tool.extensions. - Index int `json:"index,omitempty"` - - // The 'name' property of the referenced toolComponent. - Name string `json:"name,omitempty"` - - // Key/value pairs that provide additional information about the toolComponentReference. - Properties *PropertyBag `json:"properties,omitempty"` -} - -// TranslationMetadata Provides additional metadata related to translation. -type TranslationMetadata struct { - - // The absolute URI from which the translation metadata can be downloaded. - DownloadUri string `json:"downloadUri,omitempty"` - - // A comprehensive description of the translation metadata. - FullDescription *MultiformatMessageString `json:"fullDescription,omitempty"` - - // The full name associated with the translation metadata. - FullName string `json:"fullName,omitempty"` - - // The absolute URI from which information related to the translation metadata can be downloaded. - InformationUri string `json:"informationUri,omitempty"` - - // The name associated with the translation metadata. - Name string `json:"name"` - - // Key/value pairs that provide additional information about the translation metadata. - Properties *PropertyBag `json:"properties,omitempty"` - - // A brief description of the translation metadata. - ShortDescription *MultiformatMessageString `json:"shortDescription,omitempty"` -} - -// VersionControlDetails Specifies the information necessary to retrieve a desired revision from a version control system. -type VersionControlDetails struct { - - // A Coordinated Universal Time (UTC) date and time that can be used to synchronize an enlistment to the state of the repository at that time. - AsOfTimeUtc string `json:"asOfTimeUtc,omitempty"` - - // The name of a branch containing the revision. - Branch string `json:"branch,omitempty"` - - // The location in the local file system to which the root of the repository was mapped at the time of the analysis. - MappedTo *ArtifactLocation `json:"mappedTo,omitempty"` - - // Key/value pairs that provide additional information about the version control details. - Properties *PropertyBag `json:"properties,omitempty"` - - // The absolute URI of the repository. - RepositoryUri string `json:"repositoryUri"` - - // A string that uniquely and permanently identifies the revision within the repository. - RevisionId string `json:"revisionId,omitempty"` - - // A tag that has been applied to the revision. - RevisionTag string `json:"revisionTag,omitempty"` -} - -// WebRequest Describes an HTTP request. -type WebRequest struct { - - // The body of the request. - Body *ArtifactContent `json:"body,omitempty"` - - // The request headers. - Headers map[string]string `json:"headers,omitempty"` - - // The index within the run.webRequests array of the request object associated with this result. - Index int `json:"index,omitempty"` - - // The HTTP method. Well-known values are 'GET', 'PUT', 'POST', 'DELETE', 'PATCH', 'HEAD', 'OPTIONS', 'TRACE', 'CONNECT'. - Method string `json:"method,omitempty"` - - // The request parameters. - Parameters map[string]string `json:"parameters,omitempty"` - - // Key/value pairs that provide additional information about the request. - Properties *PropertyBag `json:"properties,omitempty"` - - // The request protocol. Example: 'http'. - Protocol string `json:"protocol,omitempty"` - - // The target of the request. - Target string `json:"target,omitempty"` - - // The request version. Example: '1.1'. - Version string `json:"version,omitempty"` -} - -// WebResponse Describes the response to an HTTP request. -type WebResponse struct { - - // The body of the response. - Body *ArtifactContent `json:"body,omitempty"` - - // The response headers. - Headers map[string]string `json:"headers,omitempty"` - - // The index within the run.webResponses array of the response object associated with this result. - Index int `json:"index,omitempty"` - - // Specifies whether a response was received from the server. - NoResponseReceived bool `json:"noResponseReceived,omitempty"` - - // Key/value pairs that provide additional information about the response. - Properties *PropertyBag `json:"properties,omitempty"` - - // The response protocol. Example: 'http'. - Protocol string `json:"protocol,omitempty"` - - // The response reason. Example: 'Not found'. - ReasonPhrase string `json:"reasonPhrase,omitempty"` - - // The response status code. Example: 451. - StatusCode int `json:"statusCode,omitempty"` - - // The response version. Example: '1.1'. - Version string `json:"version,omitempty"` -} diff --git a/vendor/github.com/curioswitch/go-reassign/.gitattributes b/vendor/github.com/curioswitch/go-reassign/.gitattributes deleted file mode 100644 index d020be8ea..000000000 --- a/vendor/github.com/curioswitch/go-reassign/.gitattributes +++ /dev/null @@ -1,2 +0,0 @@ -*.go text eol=lf - diff --git a/vendor/github.com/curioswitch/go-reassign/.gitignore b/vendor/github.com/curioswitch/go-reassign/.gitignore deleted file mode 100644 index 59fa33613..000000000 --- a/vendor/github.com/curioswitch/go-reassign/.gitignore +++ /dev/null @@ -1,6 +0,0 @@ -.idea -.VSCode -.envrc - -build -dist diff --git a/vendor/github.com/curioswitch/go-reassign/.golangci.yml b/vendor/github.com/curioswitch/go-reassign/.golangci.yml deleted file mode 100644 index e3bf79ae7..000000000 --- a/vendor/github.com/curioswitch/go-reassign/.golangci.yml +++ /dev/null @@ -1,38 +0,0 @@ -linters: - enable: - - asasalint - - bidichk - - bodyclose - - decorder - - durationcheck - - errchkjson - - errname - - errorlint - - execinquery - - exhaustive - - exportloopref - - gocritic - - goerr113 - - gofmt - - goimports - - goprintffuncname - - gosec - - importas - - misspell - - nolintlint - - nosnakecase - - prealloc - - predeclared - - promlinter - - revive - - stylecheck - - tagliatelle - - tenv - - thelper - - unconvert - - usestdlibvars -issues: - exclude-rules: - - path: magefile\.go - linters: - - deadcode diff --git a/vendor/github.com/curioswitch/go-reassign/.goreleaser.yaml b/vendor/github.com/curioswitch/go-reassign/.goreleaser.yaml deleted file mode 100644 index 25f2dc0c1..000000000 --- a/vendor/github.com/curioswitch/go-reassign/.goreleaser.yaml +++ /dev/null @@ -1,27 +0,0 @@ -builds: - - main: ./cmd - env: - - CGO_ENABLED=0 - targets: - - linux_amd64 - - linux_arm64 - - darwin_amd64 - - darwin_arm64 - - windows_amd64 - - windows_arm64 -archives: - - format_overrides: - - goos: windows - format: zip -release: - mode: append -checksum: - name_template: 'checksums.txt' -snapshot: - name_template: "{{ incpatch .Version }}-next" -changelog: - sort: asc - filters: - exclude: - - '^docs:' - - '^test:' diff --git a/vendor/github.com/curioswitch/go-reassign/LICENSE b/vendor/github.com/curioswitch/go-reassign/LICENSE deleted file mode 100644 index 9f18bde00..000000000 --- a/vendor/github.com/curioswitch/go-reassign/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) Choko (choko@curioswitch.org) - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/curioswitch/go-reassign/README.md b/vendor/github.com/curioswitch/go-reassign/README.md deleted file mode 100644 index ac9c131df..000000000 --- a/vendor/github.com/curioswitch/go-reassign/README.md +++ /dev/null @@ -1,53 +0,0 @@ -# reassign - -A linter that detects when reassigning a top-level variable in another package. - -## Install - -```bash -go install github.com/curioswitch/go-reassign -``` - -## Usage - -```bash -reassign ./... -``` - -Change the pattern to match against variables being reassigned. By default, only `EOF` and `Err*` variables are checked. - -```bash -reassign -pattern ".*" ./... -``` - -## Background - -Package variables are commonly used to define sentinel errors which callers can use with `errors.Is` to determine the -type of a returned `error`. Some examples exist in the standard [os](https://pkg.go.dev/os#pkg-variables) library. - -Unfortunately, as with any variable, these are mutable, and it is possible to write this very dangerous code. - -```go -package main -import "io" -func bad() { - // breaks file reading - io.EOF = nil -} -``` - -This caused a new pattern for [constant errors](https://dave.cheney.net/2016/04/07/constant-errors) -to gain popularity, but they don't work well with improvements to the `errors` package in recent versions of Go and may -be considered to be non-idiomatic compared to normal `errors.New`. If we can catch reassignment of sentinel errors, we -gain much of the safety of constant errors. - -This linter will catch reassignment of variables in other packages. By default it intends to apply to as many codebases -as possible and only checks a restricted set of variable names, `EOF` and `ErrFoo`, to restrict to sentinel errors. -Package variable reassignment is generally confusing, though, and we recommend avoiding it for all variables, not just errors. -The `pattern` flag can be set to a regular expression to define what variables cannot be reassigned, and `.*` is -recommended if it works with your code. - -## Limitations - -If a variable shadows the name of an import, an assignment of a field in the variable will trigger the linter. Shadowing -can be confusing, so it's recommended to rename the variable. diff --git a/vendor/github.com/curioswitch/go-reassign/analyzer.go b/vendor/github.com/curioswitch/go-reassign/analyzer.go deleted file mode 100644 index 48707adeb..000000000 --- a/vendor/github.com/curioswitch/go-reassign/analyzer.go +++ /dev/null @@ -1,13 +0,0 @@ -package reassign - -import ( - "github.com/curioswitch/go-reassign/internal/analyzer" - "golang.org/x/tools/go/analysis" -) - -const FlagPattern = analyzer.FlagPattern - -// NewAnalyzer returns an analyzer for checking that package variables are not reassigned. -func NewAnalyzer() *analysis.Analyzer { - return analyzer.New() -} diff --git a/vendor/github.com/curioswitch/go-reassign/internal/analyzer/analyzer.go b/vendor/github.com/curioswitch/go-reassign/internal/analyzer/analyzer.go deleted file mode 100644 index e1b47d5b9..000000000 --- a/vendor/github.com/curioswitch/go-reassign/internal/analyzer/analyzer.go +++ /dev/null @@ -1,84 +0,0 @@ -package analyzer - -import ( - "fmt" - "go/ast" - "go/types" - "regexp" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/ast/inspector" -) - -const FlagPattern = "pattern" - -func New() *analysis.Analyzer { - a := &analysis.Analyzer{ - Name: "reassign", - Doc: "Checks that package variables are not reassigned", - Requires: []*analysis.Analyzer{inspect.Analyzer}, - Run: run, - } - a.Flags.String(FlagPattern, `^(Err.*|EOF)$`, "Pattern to match package variables against to prevent reassignment") - return a -} - -func run(pass *analysis.Pass) (interface{}, error) { - checkRE, err := regexp.Compile(pass.Analyzer.Flags.Lookup(FlagPattern).Value.String()) - if err != nil { - return nil, fmt.Errorf("invalid pattern: %w", err) - } - - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - inspect.Preorder([]ast.Node{(*ast.AssignStmt)(nil), (*ast.UnaryExpr)(nil)}, func(node ast.Node) { - switch node := node.(type) { - case *ast.AssignStmt: - for _, lhs := range node.Lhs { - reportImported(pass, lhs, checkRE, "reassigning") - } - default: - // TODO(chokoswitch): Consider handling operations other than assignment on globals, for example - // taking their address. - } - }) - return nil, nil -} - -func reportImported(pass *analysis.Pass, expr ast.Expr, checkRE *regexp.Regexp, prefix string) { - switch x := expr.(type) { - case *ast.SelectorExpr: - if !checkRE.MatchString(x.Sel.Name) { - return - } - - selectIdent, ok := x.X.(*ast.Ident) - if !ok { - return - } - - if selectObj, ok := pass.TypesInfo.Uses[selectIdent]; ok { - if pkg, ok := selectObj.(*types.PkgName); !ok || pkg.Imported() == pass.Pkg { - return - } - } - - pass.Reportf(expr.Pos(), "%s variable %s in other package %s", prefix, x.Sel.Name, selectIdent.Name) - - case *ast.Ident: - use, ok := pass.TypesInfo.Uses[x].(*types.Var) - if !ok { - return - } - - if use.Pkg() == pass.Pkg { - return - } - - if !checkRE.MatchString(x.Name) { - return - } - - pass.Reportf(expr.Pos(), "%s variable %s from other package %s", prefix, x.Name, use.Pkg().Path()) - } -} diff --git a/vendor/github.com/daixiang0/gci/LICENSE b/vendor/github.com/daixiang0/gci/LICENSE deleted file mode 100644 index e1292f738..000000000 --- a/vendor/github.com/daixiang0/gci/LICENSE +++ /dev/null @@ -1,29 +0,0 @@ -BSD 3-Clause License - -Copyright (c) 2020, Xiang Dai -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - -1. Redistributions of source code must retain the above copyright notice, this - list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - -3. Neither the name of the copyright holder nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE -FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/daixiang0/gci/pkg/config/config.go b/vendor/github.com/daixiang0/gci/pkg/config/config.go deleted file mode 100644 index 18b9c74b6..000000000 --- a/vendor/github.com/daixiang0/gci/pkg/config/config.go +++ /dev/null @@ -1,106 +0,0 @@ -package config - -import ( - "sort" - "strings" - - "gopkg.in/yaml.v3" - - "github.com/daixiang0/gci/pkg/section" -) - -var defaultOrder = map[string]int{ - section.StandardType: 0, - section.DefaultType: 1, - section.CustomType: 2, - section.BlankType: 3, - section.DotType: 4, - section.AliasType: 5, - section.LocalModuleType: 6, -} - -type BoolConfig struct { - NoInlineComments bool `yaml:"no-inlineComments"` - NoPrefixComments bool `yaml:"no-prefixComments"` - Debug bool `yaml:"-"` - SkipGenerated bool `yaml:"skipGenerated"` - SkipVendor bool `yaml:"skipVendor"` - CustomOrder bool `yaml:"customOrder"` -} - -type Config struct { - BoolConfig - Sections section.SectionList - SectionSeparators section.SectionList -} - -type YamlConfig struct { - Cfg BoolConfig `yaml:",inline"` - SectionStrings []string `yaml:"sections"` - SectionSeparatorStrings []string `yaml:"sectionseparators"` -} - -func (g YamlConfig) Parse() (*Config, error) { - var err error - - sections, err := section.Parse(g.SectionStrings) - if err != nil { - return nil, err - } - if sections == nil { - sections = section.DefaultSections() - } - if err := configureSections(sections); err != nil { - return nil, err - } - - // if default order sorted sections - if !g.Cfg.CustomOrder { - sort.Slice(sections, func(i, j int) bool { - sectionI, sectionJ := sections[i].Type(), sections[j].Type() - - if strings.Compare(sectionI, sectionJ) == 0 { - return strings.Compare(sections[i].String(), sections[j].String()) < 0 - } - return defaultOrder[sectionI] < defaultOrder[sectionJ] - }) - } - - sectionSeparators, err := section.Parse(g.SectionSeparatorStrings) - if err != nil { - return nil, err - } - if sectionSeparators == nil { - sectionSeparators = section.DefaultSectionSeparators() - } - - return &Config{g.Cfg, sections, sectionSeparators}, nil -} - -func ParseConfig(in string) (*Config, error) { - config := YamlConfig{} - - err := yaml.Unmarshal([]byte(in), &config) - if err != nil { - return nil, err - } - - gciCfg, err := config.Parse() - if err != nil { - return nil, err - } - - return gciCfg, nil -} - -func configureSections(sections section.SectionList) error { - for _, sec := range sections { - switch s := sec.(type) { - case *section.LocalModule: - if err := s.Configure(); err != nil { - return err - } - } - } - return nil -} diff --git a/vendor/github.com/daixiang0/gci/pkg/format/format.go b/vendor/github.com/daixiang0/gci/pkg/format/format.go deleted file mode 100644 index 062701d2e..000000000 --- a/vendor/github.com/daixiang0/gci/pkg/format/format.go +++ /dev/null @@ -1,46 +0,0 @@ -package format - -import ( - "fmt" - - "github.com/daixiang0/gci/pkg/config" - "github.com/daixiang0/gci/pkg/log" - "github.com/daixiang0/gci/pkg/parse" - "github.com/daixiang0/gci/pkg/section" - "github.com/daixiang0/gci/pkg/specificity" -) - -type Block struct { - Start, End int -} - -type resultMap map[string][]*Block - -func Format(data []*parse.GciImports, cfg *config.Config) (resultMap, error) { - result := make(resultMap, len(cfg.Sections)) - for _, d := range data { - // determine match specificity for every available section - var bestSection section.Section - var bestSectionSpecificity specificity.MatchSpecificity = specificity.MisMatch{} - for _, section := range cfg.Sections { - sectionSpecificity := section.MatchSpecificity(d) - if sectionSpecificity.IsMoreSpecific(specificity.MisMatch{}) && sectionSpecificity.Equal(bestSectionSpecificity) { - // specificity is identical - // return nil, section.EqualSpecificityMatchError{} - return nil, nil - } - if sectionSpecificity.IsMoreSpecific(bestSectionSpecificity) { - // better match found - bestSectionSpecificity = sectionSpecificity - bestSection = section - } - } - if bestSection == nil { - return nil, section.NoMatchingSectionForImportError{Imports: d} - } - log.L().Debug(fmt.Sprintf("Matched import %v to section %s", d, bestSection)) - result[bestSection.String()] = append(result[bestSection.String()], &Block{d.Start, d.End}) - } - - return result, nil -} diff --git a/vendor/github.com/daixiang0/gci/pkg/gci/gci.go b/vendor/github.com/daixiang0/gci/pkg/gci/gci.go deleted file mode 100644 index 163e95a86..000000000 --- a/vendor/github.com/daixiang0/gci/pkg/gci/gci.go +++ /dev/null @@ -1,229 +0,0 @@ -package gci - -import ( - "bytes" - "errors" - "fmt" - goFormat "go/format" - "os" - "sync" - - "github.com/hexops/gotextdiff" - "github.com/hexops/gotextdiff/myers" - "github.com/hexops/gotextdiff/span" - "golang.org/x/sync/errgroup" - - "github.com/daixiang0/gci/pkg/config" - "github.com/daixiang0/gci/pkg/format" - "github.com/daixiang0/gci/pkg/io" - "github.com/daixiang0/gci/pkg/log" - "github.com/daixiang0/gci/pkg/parse" - "github.com/daixiang0/gci/pkg/section" - "github.com/daixiang0/gci/pkg/utils" -) - -func LocalFlagsToSections(localFlags []string) section.SectionList { - sections := section.DefaultSections() - // Add all local arguments as ImportPrefix sections - // for _, l := range localFlags { - // sections = append(sections, section.Section{l, nil, nil}) - // } - return sections -} - -func PrintFormattedFiles(paths []string, cfg config.Config) error { - return processStdInAndGoFilesInPaths(paths, cfg, func(filePath string, unmodifiedFile, formattedFile []byte) error { - fmt.Print(string(formattedFile)) - return nil - }) -} - -func WriteFormattedFiles(paths []string, cfg config.Config) error { - return processGoFilesInPaths(paths, cfg, func(filePath string, unmodifiedFile, formattedFile []byte) error { - if bytes.Equal(unmodifiedFile, formattedFile) { - log.L().Debug(fmt.Sprintf("Skipping correctly formatted File: %s", filePath)) - return nil - } - log.L().Info(fmt.Sprintf("Writing formatted File: %s", filePath)) - return os.WriteFile(filePath, formattedFile, 0o644) - }) -} - -func ListUnFormattedFiles(paths []string, cfg config.Config) error { - return processGoFilesInPaths(paths, cfg, func(filePath string, unmodifiedFile, formattedFile []byte) error { - if bytes.Equal(unmodifiedFile, formattedFile) { - return nil - } - fmt.Println(filePath) - return nil - }) -} - -func DiffFormattedFiles(paths []string, cfg config.Config) error { - return processStdInAndGoFilesInPaths(paths, cfg, func(filePath string, unmodifiedFile, formattedFile []byte) error { - fileURI := span.URIFromPath(filePath) - edits := myers.ComputeEdits(fileURI, string(unmodifiedFile), string(formattedFile)) - unifiedEdits := gotextdiff.ToUnified(filePath, filePath, string(unmodifiedFile), edits) - fmt.Printf("%v", unifiedEdits) - return nil - }) -} - -func DiffFormattedFilesToArray(paths []string, cfg config.Config, diffs *[]string, lock *sync.Mutex) error { - log.InitLogger() - defer log.L().Sync() - return processStdInAndGoFilesInPaths(paths, cfg, func(filePath string, unmodifiedFile, formattedFile []byte) error { - fileURI := span.URIFromPath(filePath) - edits := myers.ComputeEdits(fileURI, string(unmodifiedFile), string(formattedFile)) - unifiedEdits := gotextdiff.ToUnified(filePath, filePath, string(unmodifiedFile), edits) - lock.Lock() - *diffs = append(*diffs, fmt.Sprint(unifiedEdits)) - lock.Unlock() - return nil - }) -} - -type fileFormattingFunc func(filePath string, unmodifiedFile, formattedFile []byte) error - -func processStdInAndGoFilesInPaths(paths []string, cfg config.Config, fileFunc fileFormattingFunc) error { - return ProcessFiles(io.StdInGenerator.Combine(io.GoFilesInPathsGenerator(paths, cfg.SkipVendor)), cfg, fileFunc) -} - -func processGoFilesInPaths(paths []string, cfg config.Config, fileFunc fileFormattingFunc) error { - return ProcessFiles(io.GoFilesInPathsGenerator(paths, cfg.SkipVendor), cfg, fileFunc) -} - -func ProcessFiles(fileGenerator io.FileGeneratorFunc, cfg config.Config, fileFunc fileFormattingFunc) error { - var taskGroup errgroup.Group - files, err := fileGenerator() - if err != nil { - return err - } - for _, file := range files { - // run file processing in parallel - taskGroup.Go(processingFunc(file, cfg, fileFunc)) - } - return taskGroup.Wait() -} - -func processingFunc(file io.FileObj, cfg config.Config, formattingFunc fileFormattingFunc) func() error { - return func() error { - unmodifiedFile, formattedFile, err := LoadFormatGoFile(file, cfg) - if err != nil { - // if errors.Is(err, FileParsingError{}) { - // // do not process files that are improperly formatted - // return nil - // } - return err - } - return formattingFunc(file.Path(), unmodifiedFile, formattedFile) - } -} - -func LoadFormatGoFile(file io.FileObj, cfg config.Config) (src, dist []byte, err error) { - src, err = file.Load() - log.L().Debug(fmt.Sprintf("Loaded File: %s", file.Path())) - if err != nil { - return nil, nil, err - } - - return LoadFormat(src, file.Path(), cfg) -} - -func LoadFormat(in []byte, path string, cfg config.Config) (src, dist []byte, err error) { - src = in - - if cfg.SkipGenerated && parse.IsGeneratedFileByComment(string(src)) { - return src, src, nil - } - - imports, headEnd, tailStart, cStart, cEnd, err := parse.ParseFile(src, path) - if err != nil { - if errors.Is(err, parse.NoImportError{}) { - return src, src, nil - } - return nil, nil, err - } - - // do not do format if only one import - if len(imports) <= 1 { - return src, src, nil - } - - result, err := format.Format(imports, &cfg) - if err != nil { - return nil, nil, err - } - - firstWithIndex := true - - var body []byte - - // order by section list - for _, s := range cfg.Sections { - if len(result[s.String()]) > 0 { - if len(body) > 0 { - body = append(body, utils.Linebreak) - } - for _, d := range result[s.String()] { - AddIndent(&body, &firstWithIndex) - body = append(body, src[d.Start:d.End]...) - } - } - } - - head := make([]byte, headEnd) - copy(head, src[:headEnd]) - tail := make([]byte, len(src)-tailStart) - copy(tail, src[tailStart:]) - - // ensure C - if cStart != 0 { - head = append(head, src[cStart:cEnd]...) - head = append(head, utils.Linebreak) - } - - // add beginning of import block - head = append(head, `import (`...) - head = append(head, utils.Linebreak) - // add end of import block - body = append(body, []byte{utils.RightParenthesis, utils.Linebreak}...) - - log.L().Debug(fmt.Sprintf("head:\n%s", head)) - log.L().Debug(fmt.Sprintf("body:\n%s", body)) - if len(tail) > 20 { - log.L().Debug(fmt.Sprintf("tail:\n%s", tail[:20])) - } else { - log.L().Debug(fmt.Sprintf("tail:\n%s", tail)) - } - - var totalLen int - slices := [][]byte{head, body, tail} - for _, s := range slices { - totalLen += len(s) - } - dist = make([]byte, totalLen) - var i int - for _, s := range slices { - i += copy(dist[i:], s) - } - - // remove ^M(\r\n) from Win to Unix - dist = bytes.ReplaceAll(dist, []byte{utils.WinLinebreak}, []byte{utils.Linebreak}) - - log.L().Debug(fmt.Sprintf("raw:\n%s", dist)) - dist, err = goFormat.Source(dist) - if err != nil { - return nil, nil, err - } - - return src, dist, nil -} - -func AddIndent(in *[]byte, first *bool) { - if *first { - *first = false - return - } - *in = append(*in, utils.Indent) -} diff --git a/vendor/github.com/daixiang0/gci/pkg/gci/testdata.go b/vendor/github.com/daixiang0/gci/pkg/gci/testdata.go deleted file mode 100644 index 972398196..000000000 --- a/vendor/github.com/daixiang0/gci/pkg/gci/testdata.go +++ /dev/null @@ -1,1321 +0,0 @@ -package gci - -type Cases struct { - name, config, in, out string -} - -var commonConfig = `sections: - - Standard - - Default - - Prefix(github.com/daixiang0) -` - -var testCases = []Cases{ - { - "already-good", - - commonConfig, - - `package main - -import ( - "fmt" - - g "github.com/golang" - - "github.com/daixiang0/gci" -) -`, - `package main - -import ( - "fmt" - - g "github.com/golang" - - "github.com/daixiang0/gci" -) -`, - }, - { - "blank-format", - - commonConfig, - - `package main -import ( - "fmt" - - // comment - g "github.com/golang" // comment - - "github.com/daixiang0/gci" -) -`, - `package main - -import ( - "fmt" - - // comment - g "github.com/golang" // comment - - "github.com/daixiang0/gci" -) -`, - }, - { - "cgo-block", - - commonConfig, - - `package main - -import ( - /* - #include "types.h" - */ - "C" -) -`, - `package main - -import ( - /* - #include "types.h" - */ - "C" -) -`, - }, - { - "cgo-block-after-import", - - commonConfig, - - `package main - -import ( - "fmt" - - "github.com/daixiang0/gci" - g "github.com/golang" -) - -// #cgo CFLAGS: -DPNG_DEBUG=1 -// #cgo amd64 386 CFLAGS: -DX86=1 -// #cgo LDFLAGS: -lpng -// #include -import "C" -`, - `package main - -// #cgo CFLAGS: -DPNG_DEBUG=1 -// #cgo amd64 386 CFLAGS: -DX86=1 -// #cgo LDFLAGS: -lpng -// #include -import "C" - -import ( - "fmt" - - g "github.com/golang" - - "github.com/daixiang0/gci" -) -`, - }, - { - "cgo-block-before-import", - - commonConfig, - - `package main - -// #cgo CFLAGS: -DPNG_DEBUG=1 -// #cgo amd64 386 CFLAGS: -DX86=1 -// #cgo LDFLAGS: -lpng -// #include -import "C" - -import ( - "fmt" - - "github.com/daixiang0/gci" - - g "github.com/golang" -) -`, - `package main - -// #cgo CFLAGS: -DPNG_DEBUG=1 -// #cgo amd64 386 CFLAGS: -DX86=1 -// #cgo LDFLAGS: -lpng -// #include -import "C" - -import ( - "fmt" - - g "github.com/golang" - - "github.com/daixiang0/gci" -) -`, - }, - { - "cgo-block-mixed", - - commonConfig, - - `package main - -import ( - /* #include "types.h" - */"C" -) -`, - `package main - -import ( - /* #include "types.h" - */"C" -) -`, - }, - { - "cgo-block-mixed-with-content", - - commonConfig, - - `package main - -import ( - /* #include "types.h" - #include "other.h" */"C" -) -`, - `package main - -import ( - /* #include "types.h" - #include "other.h" */"C" -) -`, - }, - { - "cgo-block-prefix", - - commonConfig, - - `package main - -import ( - /* #include "types.h" */ "C" -) -`, - `package main - -import ( - /* #include "types.h" */ "C" -) -`, - }, - { - "cgo-block-single-line", - - commonConfig, - - `package main - -import ( - /* #include "types.h" */ - "C" -) -`, - `package main - -import ( - /* #include "types.h" */ - "C" -) -`, - }, - { - "cgo-line", - - commonConfig, - - `package main - -import ( - // #include "types.h" - "C" -) -`, - `package main - -import ( - // #include "types.h" - "C" -) -`, - }, - { - "cgo-multiline", - - commonConfig, - - `package main - -import ( - // #include "types.h" - // #include "other.h" - "C" -) -`, - `package main - -import ( - // #include "types.h" - // #include "other.h" - "C" -) -`, - }, - { - "cgo-single", - - commonConfig, - - `package main - -import ( - "fmt" - - "github.com/daixiang0/gci" -) - -import "C" - -import "github.com/golang" - -import ( - "github.com/daixiang0/gci" -) -`, - `package main - -import "C" - -import ( - "fmt" - - "github.com/golang" - - "github.com/daixiang0/gci" -) -`, - }, - { - "comment", - - commonConfig, - - `package main -import ( - //Do not forget to run Gci - "fmt" -) -`, - `package main -import ( - //Do not forget to run Gci - "fmt" -) -`, - }, - { - "comment-before-import", - - commonConfig, - - `package main - -// comment -import ( - "fmt" - "os" - - "github.com/daixiang0/gci" -) -`, - `package main - -// comment -import ( - "fmt" - "os" - - "github.com/daixiang0/gci" -) -`, - }, - { - "comment-in-the-tail", - - `sections: - - Standard - - Default - - Prefix(github.com/daixiang0) -`, - `package main - -import ( - "fmt" - - g "github.com/golang" - - "github.com/daixiang0/gci" -) - -type test int - -// test -`, - `package main - -import ( - "fmt" - - g "github.com/golang" - - "github.com/daixiang0/gci" -) - -type test int - -// test -`, - }, - { - "comment-top", - - commonConfig, - - `package main - -import ( - "os" // https://pkg.go.dev/os - // https://pkg.go.dev/fmt - "fmt" -) -`, - `package main - -import ( - // https://pkg.go.dev/fmt - "fmt" - "os" // https://pkg.go.dev/os -) -`, - }, - { - "comment-whithout-whitespace", - - commonConfig, - - `package proc - -import ( - "context"// no separating whitespace here //nolint:confusion -) -`, - `package proc - -import ( - "context"// no separating whitespace here //nolint:confusion -) -`, - }, - { - "comment-with-slashslash", - - commonConfig, - - `package main - -import ( - "fmt" // https://pkg.go.dev/fmt -) -`, - `package main - -import ( - "fmt" // https://pkg.go.dev/fmt -) -`, - }, - { - "custom-order", - - `customOrder: true -sections: - - Prefix(github.com/daixiang0) - - Default - - Standard -`, - `package main - -import ( - "fmt" - - g "github.com/golang" - - "github.com/daixiang0/a" -) -`, - `package main - -import ( - "github.com/daixiang0/a" - - g "github.com/golang" - - "fmt" -) -`, - }, - { - "default-order", - - `sections: - - Standard - - Prefix(github.com/daixiang0) - - Default -`, - `package main - -import ( - "fmt" - - g "github.com/golang" - - "github.com/daixiang0/a" -) -`, - `package main - -import ( - "fmt" - - g "github.com/golang" - - "github.com/daixiang0/a" -) -`, - }, - { - "dot-and-blank", - - `sections: - - Standard - - Default - - Prefix(github.com/daixiang0) - - Blank - - Dot -`, - `package main - -import ( - "fmt" - - g "github.com/golang" - . "github.com/golang/dot" - _ "github.com/golang/blank" - - "github.com/daixiang0/a" - "github.com/daixiang0/gci" - "github.com/daixiang0/gci/subtest" - . "github.com/daixiang0/gci/dot" - _ "github.com/daixiang0/gci/blank" -) -`, - `package main - -import ( - "fmt" - - g "github.com/golang" - - "github.com/daixiang0/a" - "github.com/daixiang0/gci" - "github.com/daixiang0/gci/subtest" - - _ "github.com/daixiang0/gci/blank" - _ "github.com/golang/blank" - - . "github.com/daixiang0/gci/dot" - . "github.com/golang/dot" -) -`, - }, - { - "duplicate-imports", - - `sections: - - Standard - - Default - - Prefix(github.com/daixiang0) -`, - `package main - -import ( - "fmt" - - g "github.com/golang" - - a "github.com/daixiang0/gci" - "github.com/daixiang0/gci" -) -`, - `package main - -import ( - "fmt" - - g "github.com/golang" - - "github.com/daixiang0/gci" - a "github.com/daixiang0/gci" -) -`, - }, - { - "grouped-multiple-custom", - - `sections: - - Standard - - Default - - Prefix(github.com/daixiang0,gitlab.com/daixiang0,daixiang0) -`, - `package main - -import ( - "daixiang0/lib1" - "fmt" - "github.com/daixiang0/gci" - "gitlab.com/daixiang0/gci" - g "github.com/golang" - "github.com/daixiang0/gci/subtest" -) -`, - `package main - -import ( - "fmt" - - g "github.com/golang" - - "daixiang0/lib1" - "github.com/daixiang0/gci" - "github.com/daixiang0/gci/subtest" - "gitlab.com/daixiang0/gci" -) -`, - }, - { - "leading-comment", - - commonConfig, - - `package main - -import ( - // foo - "fmt" -) -`, - `package main - -import ( - // foo - "fmt" -) -`, - }, - { - "linebreak", - - `sections: - - Standard - - Default - - Prefix(github.com/daixiang0) -`, - `package main - -import ( - g "github.com/golang" - - "fmt" - - "github.com/daixiang0/gci" - -) -`, - `package main - -import ( - "fmt" - - g "github.com/golang" - - "github.com/daixiang0/gci" -) -`, - }, - { - "linebreak-no-custom", - - `sections: - - Standard - - Default - - Prefix(github.com/daixiang0) -`, - `package main - -import ( - g "github.com/golang" - - "fmt" - -) -`, - `package main - -import ( - "fmt" - - g "github.com/golang" -) -`, - }, - { - "mismatch-section", - - `sections: - - Standard - - Default - - Prefix(github.com/daixiang0) - - Prefix(github.com/daixiang0/gci) -`, - `package main - -import ( - "fmt" - - g "github.com/golang" - - "github.com/daixiang0/gci" -) -`, - `package main - -import ( - "fmt" - - g "github.com/golang" - - "github.com/daixiang0/gci" -) -`, - }, - { - "multiple-custom", - - `sections: - - Standard - - Default - - Prefix(github.com/daixiang0) - - Prefix(github.com/daixiang0/gci) - - Prefix(github.com/daixiang0/gci/subtest) -`, - `package main - -import ( - "fmt" - - g "github.com/golang" - - "github.com/daixiang0/a" - "github.com/daixiang0/gci" - "github.com/daixiang0/gci/subtest" -) -`, - `package main - -import ( - "fmt" - - g "github.com/golang" - - "github.com/daixiang0/a" - - "github.com/daixiang0/gci" - - "github.com/daixiang0/gci/subtest" -) -`, - }, - { - "multiple-imports", - - commonConfig, - - `package main - -import "fmt" - -import "context" - -import ( - "os" - - "github.com/daixiang0/test" -) - -import "math" - - -// main -func main() { -} -`, - `package main - -import ( - "context" - "fmt" - "math" - "os" - - "github.com/daixiang0/test" -) - -// main -func main() { -} -`, - }, - { - "multiple-line-comment", - - commonConfig, - - `package proc - -import ( - "context" // in-line comment - "fmt" - "os" - - //nolint:depguard // A multi-line comment explaining why in - // this one case it's OK to use os/exec even though depguard - // is configured to force us to use dlib/exec instead. - "os/exec" - - "golang.org/x/sys/unix" - "github.com/local/dlib/dexec" -) -`, - `package proc - -import ( - "context" // in-line comment - "fmt" - "os" - //nolint:depguard // A multi-line comment explaining why in - // this one case it's OK to use os/exec even though depguard - // is configured to force us to use dlib/exec instead. - "os/exec" - - "github.com/local/dlib/dexec" - "golang.org/x/sys/unix" -) -`, - }, - { - "nochar-after-import", - - commonConfig, - - `package main - -import ( - "fmt" -) -`, - `package main - -import ( - "fmt" -) -`, - }, - { - "no-format", - - commonConfig, - - `package main - -import( -"fmt" - -g "github.com/golang" - -"github.com/daixiang0/gci" -) -`, - `package main - -import ( - "fmt" - - g "github.com/golang" - - "github.com/daixiang0/gci" -) -`, - }, - { - "nolint", - - commonConfig, - - `package main - -import ( - "fmt" - - "github.com/forbidden/pkg" //nolint:depguard - - _ "github.com/daixiang0/gci" //nolint:depguard -) -`, - `package main - -import ( - "fmt" - - "github.com/forbidden/pkg" //nolint:depguard - - _ "github.com/daixiang0/gci" //nolint:depguard -) -`, - }, - { - "number-in-alias", - - commonConfig, - - `package main - -import ( - "fmt" - - go_V1 "github.com/golang" - - "github.com/daixiang0/gci" -) -`, - `package main - -import ( - "fmt" - - go_V1 "github.com/golang" - - "github.com/daixiang0/gci" -) -`, - }, - { - "one-import", - - commonConfig, - - `package main -import ( - "fmt" -) - -func main() { -} -`, - `package main -import ( - "fmt" -) - -func main() { -} -`, - }, - { - "one-import-one-line", - - commonConfig, - - `package main - -import "fmt" - -func main() { -} -`, - `package main - -import "fmt" - -func main() { -} -`, - }, - { - "one-line-import-after-import", - - `sections: - - Standard - - Default - - Prefix(github.com/daixiang0) -`, - `package main - -import ( - "fmt" - "os" - - "github.com/daixiang0/test" -) - -import "context" -`, - `package main - -import ( - "context" - "fmt" - "os" - - "github.com/daixiang0/test" -) -`, - }, - { - "same-prefix-custom", - - `sections: - - Standard - - Default - - Prefix(github.com/daixiang0/gci) - - Prefix(github.com/daixiang0/gci/subtest) -`, - `package main - -import ( - "fmt" - - g "github.com/golang" - - "github.com/daixiang0/gci" - "github.com/daixiang0/gci/subtest" -) -`, - `package main - -import ( - "fmt" - - g "github.com/golang" - - "github.com/daixiang0/gci" - - "github.com/daixiang0/gci/subtest" -) -`, - }, - { - "simple-case", - - commonConfig, - - `package main - -import ( - "golang.org/x/tools" - - "fmt" - - "github.com/daixiang0/gci" -) -`, - `package main - -import ( - "fmt" - - "golang.org/x/tools" - - "github.com/daixiang0/gci" -) -`, - }, - { - "whitespace-test", - - commonConfig, - - `package main - -import ( - "fmt" - "github.com/golang" // golang - alias "github.com/daixiang0/gci" -) -`, - `package main - -import ( - "fmt" - - "github.com/golang" // golang - - alias "github.com/daixiang0/gci" -) -`, - }, - { - "with-above-comment-and-alias", - - commonConfig, - - `package main - -import ( - "fmt" - // golang - _ "github.com/golang" - "github.com/daixiang0/gci" -) -`, - `package main - -import ( - "fmt" - - // golang - _ "github.com/golang" - - "github.com/daixiang0/gci" -) -`, - }, - { - "with-comment-and-alias", - - commonConfig, - - `package main - -import ( - "fmt" - _ "github.com/golang" // golang - "github.com/daixiang0/gci" -) -`, - `package main - -import ( - "fmt" - - _ "github.com/golang" // golang - - "github.com/daixiang0/gci" -) -`, - }, - { - "same-prefix-custom", - - `sections: - - Standard - - Default - - Prefix(github.com/daixiang0/gci) - - Prefix(github.com/daixiang0/gci/subtest) -`, - `package main - -import ( - "fmt" - - g "github.com/golang" - - "github.com/daixiang0/gci" - "github.com/daixiang0/gci/subtest" -) -`, - `package main - -import ( - "fmt" - - g "github.com/golang" - - "github.com/daixiang0/gci" - - "github.com/daixiang0/gci/subtest" -) -`, - }, - { - "same-prefix-custom", - - `sections: - - Standard - - Default - - Prefix(github.com/daixiang0/gci) - - Prefix(github.com/daixiang0/gci/subtest) -`, - `package main - -import ( - "fmt" - - g "github.com/golang" - - "github.com/daixiang0/gci" - "github.com/daixiang0/gci/subtest" -) -`, - `package main - -import ( - "fmt" - - g "github.com/golang" - - "github.com/daixiang0/gci" - - "github.com/daixiang0/gci/subtest" -) -`, - }, - { - "blank-in-config", - - `sections: - - Standard - - Default - - Prefix( github.com/daixiang0/gci, github.com/daixiang0/gci/subtest ) -`, - `package main - -import ( - "fmt" - - g "github.com/golang" - - "github.com/daixiang0/gci" - "github.com/daixiang0/gci/subtest" -) -`, - `package main - -import ( - "fmt" - - g "github.com/golang" - - "github.com/daixiang0/gci" - "github.com/daixiang0/gci/subtest" -) -`, - }, - { - "alias", - - `sections: - - Standard - - Default - - Alias -`, - `package main - -import ( - testing "github.com/daixiang0/test" - "fmt" - - g "github.com/golang" - - "github.com/daixiang0/gci" - "github.com/daixiang0/gci/subtest" -) -`, - `package main - -import ( - "fmt" - - "github.com/daixiang0/gci" - "github.com/daixiang0/gci/subtest" - - testing "github.com/daixiang0/test" - g "github.com/golang" -) -`, - }, - { - "no-trailing-newline", - - `sections: - - Standard -`, - `package main - -import ( - "net" - "fmt" -)`, - `package main - -import ( - "fmt" - "net" -) -`, - }, - { - "basic module", - // implicitly relies on the local module name being: github.com/daixiang0/gci - `sections: - - Standard - - LocalModule -`, - `package main - -import ( - "os" - "github.com/daixiang0/gci/cmd/gci" -) -`, - `package main - -import ( - "os" - - "github.com/daixiang0/gci/cmd/gci" -) -`, - }, -} diff --git a/vendor/github.com/daixiang0/gci/pkg/io/file.go b/vendor/github.com/daixiang0/gci/pkg/io/file.go deleted file mode 100644 index 79950792c..000000000 --- a/vendor/github.com/daixiang0/gci/pkg/io/file.go +++ /dev/null @@ -1,64 +0,0 @@ -package io - -import "io/ioutil" - -// FileObj allows mocking the access to files -type FileObj interface { - Load() ([]byte, error) - Path() string -} - -// File represents a file that can be loaded from the file system -type File struct { - FilePath string -} - -func (f File) Path() string { - return f.FilePath -} - -func (f File) Load() ([]byte, error) { - return ioutil.ReadFile(f.FilePath) -} - -// FileGeneratorFunc returns a list of files that can be loaded and processed -type FileGeneratorFunc func() ([]FileObj, error) - -func (a FileGeneratorFunc) Combine(b FileGeneratorFunc) FileGeneratorFunc { - return func() ([]FileObj, error) { - files, err := a() - if err != nil { - return nil, err - } - additionalFiles, err := b() - if err != nil { - return nil, err - } - files = append(files, additionalFiles...) - return files, err - } -} - -func GoFilesInPathsGenerator(paths []string, skipVendor bool) FileGeneratorFunc { - checkFunc := isGoFile - if skipVendor { - checkFunc = checkChains(isGoFile, isOutsideVendorDir) - } - - return FilesInPathsGenerator(paths, checkFunc) -} - -func FilesInPathsGenerator(paths []string, fileCheckFun fileCheckFunction) FileGeneratorFunc { - return func() (foundFiles []FileObj, err error) { - for _, path := range paths { - files, err := FindFilesForPath(path, fileCheckFun) - if err != nil { - return nil, err - } - for _, filePath := range files { - foundFiles = append(foundFiles, File{filePath}) - } - } - return foundFiles, nil - } -} diff --git a/vendor/github.com/daixiang0/gci/pkg/io/search.go b/vendor/github.com/daixiang0/gci/pkg/io/search.go deleted file mode 100644 index cd821582e..000000000 --- a/vendor/github.com/daixiang0/gci/pkg/io/search.go +++ /dev/null @@ -1,77 +0,0 @@ -package io - -import ( - "io/fs" - "os" - "path/filepath" -) - -type fileCheckFunction func(path string, file os.FileInfo) bool - -func FindFilesForPath(path string, fileCheckFun fileCheckFunction) ([]string, error) { - switch entry, err := os.Stat(path); { - case err != nil: - return nil, err - case entry.IsDir(): - return findFilesForDirectory(path, fileCheckFun) - case fileCheckFun(path, entry): - return []string{filepath.Clean(path)}, nil - default: - return []string{}, nil - } -} - -func findFilesForDirectory(dirPath string, fileCheckFun fileCheckFunction) ([]string, error) { - var filePaths []string - err := filepath.WalkDir(dirPath, func(path string, entry fs.DirEntry, err error) error { - if err != nil { - return err - } - file, err := entry.Info() - if err != nil { - return err - } - if !entry.IsDir() && fileCheckFun(path, file) { - filePaths = append(filePaths, filepath.Clean(path)) - } - return nil - }) - if err != nil { - return nil, err - } - return filePaths, nil -} - -func isGoFile(_ string, file os.FileInfo) bool { - return !file.IsDir() && filepath.Ext(file.Name()) == ".go" -} - -func isOutsideVendorDir(path string, _ os.FileInfo) bool { - for { - base := filepath.Base(path) - if base == "vendor" { - return false - } - - prevPath := path - path = filepath.Dir(path) - - if prevPath == path { - break - } - } - - return true -} - -func checkChains(funcs ...fileCheckFunction) fileCheckFunction { - return func(path string, file os.FileInfo) bool { - for _, checkFunc := range funcs { - if !checkFunc(path, file) { - return false - } - } - - return true - } -} diff --git a/vendor/github.com/daixiang0/gci/pkg/io/stdin.go b/vendor/github.com/daixiang0/gci/pkg/io/stdin.go deleted file mode 100644 index ccab2844f..000000000 --- a/vendor/github.com/daixiang0/gci/pkg/io/stdin.go +++ /dev/null @@ -1,27 +0,0 @@ -package io - -import ( - "io/ioutil" - "os" -) - -type stdInFile struct{} - -func (s stdInFile) Load() ([]byte, error) { - return ioutil.ReadAll(os.Stdin) -} - -func (s stdInFile) Path() string { - return "StdIn" -} - -var StdInGenerator FileGeneratorFunc = func() ([]FileObj, error) { - stat, err := os.Stdin.Stat() - if err != nil { - return nil, err - } - if (stat.Mode() & os.ModeCharDevice) == 0 { - return []FileObj{stdInFile{}}, nil - } - return []FileObj{}, nil -} diff --git a/vendor/github.com/daixiang0/gci/pkg/log/log.go b/vendor/github.com/daixiang0/gci/pkg/log/log.go deleted file mode 100644 index ab33739ca..000000000 --- a/vendor/github.com/daixiang0/gci/pkg/log/log.go +++ /dev/null @@ -1,50 +0,0 @@ -package log - -import ( - "sync" - - "go.uber.org/zap" - "go.uber.org/zap/zapcore" -) - -// Use L to log with Zap -var logger *zap.Logger - -// Keep the config to reference the atomicLevel for changing levels -var logConfig zap.Config - -var doOnce sync.Once - -// InitLogger sets up the logger -func InitLogger() { - doOnce.Do(func() { - logConfig = zap.NewDevelopmentConfig() - - logConfig.EncoderConfig.TimeKey = "timestamp" - logConfig.EncoderConfig.EncodeTime = zapcore.ISO8601TimeEncoder - logConfig.Level.SetLevel(zapcore.InfoLevel) - logConfig.OutputPaths = []string{"stderr"} - - var err error - logger, err = logConfig.Build() - if err != nil { - panic(err) - } - }) -} - -// SetLevel allows you to set the level of the default gci logger. -// This will not work if you replace the logger -func SetLevel(level zapcore.Level) { - logConfig.Level.SetLevel(level) -} - -// L returns the logger -func L() *zap.Logger { - return logger -} - -// SetLogger allows you to set the logger to whatever you want -func SetLogger(l *zap.Logger) { - logger = l -} diff --git a/vendor/github.com/daixiang0/gci/pkg/parse/parse.go b/vendor/github.com/daixiang0/gci/pkg/parse/parse.go deleted file mode 100644 index e8532f850..000000000 --- a/vendor/github.com/daixiang0/gci/pkg/parse/parse.go +++ /dev/null @@ -1,200 +0,0 @@ -package parse - -import ( - "go/ast" - "go/parser" - "go/token" - "sort" - "strings" -) - -const C = "\"C\"" - -type GciImports struct { - // original index of import group, include doc, name, path and comment - Start, End int - Name, Path string -} -type ImportList []*GciImports - -func (l ImportList) Len() int { - return len(l) -} - -func (l ImportList) Less(i, j int) bool { - if strings.Compare(l[i].Path, l[j].Path) == 0 { - return strings.Compare(l[i].Name, l[j].Name) < 0 - } - - return strings.Compare(l[i].Path, l[j].Path) < 0 -} - -func (l ImportList) Swap(i, j int) { l[i], l[j] = l[j], l[i] } - -/* - * AST considers a import block as below: - * ``` - * Doc - * Name Path Comment - * ``` - * An example is like below: - * ``` - * // test - * test "fmt" // test - * ``` - * getImports return a import block with name, start and end index - */ -func getImports(imp *ast.ImportSpec) (start, end int, name string) { - if imp.Doc != nil { - // doc poc need minus one to get the first index of comment - start = int(imp.Doc.Pos()) - 1 - } else { - if imp.Name != nil { - // name pos need minus one too - start = int(imp.Name.Pos()) - 1 - } else { - // path pos start without quote, need minus one for it - start = int(imp.Path.Pos()) - 1 - } - } - - if imp.Name != nil { - name = imp.Name.Name - } - - if imp.Comment != nil { - end = int(imp.Comment.End()) - } else { - end = int(imp.Path.End()) - } - return -} - -func ParseFile(src []byte, filename string) (ImportList, int, int, int, int, error) { - fileSet := token.NewFileSet() - f, err := parser.ParseFile(fileSet, filename, src, parser.ParseComments) - if err != nil { - return nil, 0, 0, 0, 0, err - } - - if len(f.Imports) == 0 { - return nil, 0, 0, 0, 0, NoImportError{} - } - - var ( - // headEnd means the start of import block - headEnd int - // tailStart means the end + 1 of import block - tailStart int - // cStart means the start of C import block - cStart int - // cEnd means the end of C import block - cEnd int - data ImportList - ) - - for index, decl := range f.Decls { - switch decl.(type) { - // skip BadDecl and FuncDecl - case *ast.GenDecl: - genDecl := decl.(*ast.GenDecl) - - if genDecl.Tok == token.IMPORT { - // there are two cases, both end with linebreak: - // 1. - // import ( - // "xxxx" - // ) - // 2. - // import "xxx" - if headEnd == 0 { - headEnd = int(decl.Pos()) - 1 - } - tailStart = int(decl.End()) - if tailStart > len(src) { - tailStart = len(src) - } - - for _, spec := range genDecl.Specs { - imp := spec.(*ast.ImportSpec) - // there are only one C import block - // ensure C import block is the first import block - if imp.Path.Value == C { - /* - common case: - - // #include - import "C" - - notice that decl.Pos() == genDecl.Pos() > genDecl.Doc.Pos() - */ - if genDecl.Doc != nil { - cStart = int(genDecl.Doc.Pos()) - 1 - // if C import block is the first, update headEnd - if index == 0 { - headEnd = cStart - } - } else { - /* - special case: - - import "C" - */ - cStart = int(decl.Pos()) - 1 - } - - cEnd = int(decl.End()) - - continue - } - - start, end, name := getImports(imp) - - data = append(data, &GciImports{ - Start: start, - End: end, - Name: name, - Path: strings.Trim(imp.Path.Value, `"`), - }) - } - } - } - } - - sort.Sort(data) - return data, headEnd, tailStart, cStart, cEnd, nil -} - -// IsGeneratedFileByComment reports whether the source file is generated code. -// Using a bit laxer rules than https://golang.org/s/generatedcode to -// match more generated code. -// Taken from https://github.com/golangci/golangci-lint. -func IsGeneratedFileByComment(in string) bool { - const ( - genCodeGenerated = "code generated" - genDoNotEdit = "do not edit" - genAutoFile = "autogenerated file" // easyjson - genAutoGenerated = "automatically generated" // genny - ) - - markers := []string{genCodeGenerated, genDoNotEdit, genAutoFile, genAutoGenerated} - in = strings.ToLower(in) - for _, marker := range markers { - if strings.Contains(in, marker) { - return true - } - } - - return false -} - -type NoImportError struct{} - -func (n NoImportError) Error() string { - return "No imports" -} - -func (i NoImportError) Is(err error) bool { - _, ok := err.(NoImportError) - return ok -} diff --git a/vendor/github.com/daixiang0/gci/pkg/section/alias.go b/vendor/github.com/daixiang0/gci/pkg/section/alias.go deleted file mode 100644 index 423e96acf..000000000 --- a/vendor/github.com/daixiang0/gci/pkg/section/alias.go +++ /dev/null @@ -1,25 +0,0 @@ -package section - -import ( - "github.com/daixiang0/gci/pkg/parse" - "github.com/daixiang0/gci/pkg/specificity" -) - -type Alias struct{} - -const AliasType = "alias" - -func (b Alias) MatchSpecificity(spec *parse.GciImports) specificity.MatchSpecificity { - if spec.Name != "." && spec.Name != "_" && spec.Name != "" { - return specificity.NameMatch{} - } - return specificity.MisMatch{} -} - -func (b Alias) String() string { - return AliasType -} - -func (b Alias) Type() string { - return AliasType -} diff --git a/vendor/github.com/daixiang0/gci/pkg/section/blank.go b/vendor/github.com/daixiang0/gci/pkg/section/blank.go deleted file mode 100644 index 4a2741773..000000000 --- a/vendor/github.com/daixiang0/gci/pkg/section/blank.go +++ /dev/null @@ -1,25 +0,0 @@ -package section - -import ( - "github.com/daixiang0/gci/pkg/parse" - "github.com/daixiang0/gci/pkg/specificity" -) - -type Blank struct{} - -const BlankType = "blank" - -func (b Blank) MatchSpecificity(spec *parse.GciImports) specificity.MatchSpecificity { - if spec.Name == "_" { - return specificity.NameMatch{} - } - return specificity.MisMatch{} -} - -func (b Blank) String() string { - return BlankType -} - -func (b Blank) Type() string { - return BlankType -} diff --git a/vendor/github.com/daixiang0/gci/pkg/section/commentline.go b/vendor/github.com/daixiang0/gci/pkg/section/commentline.go deleted file mode 100644 index c3ddd0824..000000000 --- a/vendor/github.com/daixiang0/gci/pkg/section/commentline.go +++ /dev/null @@ -1,24 +0,0 @@ -package section - -import ( - "fmt" - - "github.com/daixiang0/gci/pkg/parse" - "github.com/daixiang0/gci/pkg/specificity" -) - -type CommentLine struct { - Comment string -} - -func (c CommentLine) MatchSpecificity(spec *parse.GciImports) specificity.MatchSpecificity { - return specificity.MisMatch{} -} - -func (c CommentLine) String() string { - return fmt.Sprintf("commentline(%s)", c.Comment) -} - -func (c CommentLine) Type() string { - return "commentline" -} diff --git a/vendor/github.com/daixiang0/gci/pkg/section/default.go b/vendor/github.com/daixiang0/gci/pkg/section/default.go deleted file mode 100644 index 3af07a092..000000000 --- a/vendor/github.com/daixiang0/gci/pkg/section/default.go +++ /dev/null @@ -1,22 +0,0 @@ -package section - -import ( - "github.com/daixiang0/gci/pkg/parse" - "github.com/daixiang0/gci/pkg/specificity" -) - -const DefaultType = "default" - -type Default struct{} - -func (d Default) MatchSpecificity(spec *parse.GciImports) specificity.MatchSpecificity { - return specificity.Default{} -} - -func (d Default) String() string { - return DefaultType -} - -func (d Default) Type() string { - return DefaultType -} diff --git a/vendor/github.com/daixiang0/gci/pkg/section/dot.go b/vendor/github.com/daixiang0/gci/pkg/section/dot.go deleted file mode 100644 index 8112eeb1d..000000000 --- a/vendor/github.com/daixiang0/gci/pkg/section/dot.go +++ /dev/null @@ -1,25 +0,0 @@ -package section - -import ( - "github.com/daixiang0/gci/pkg/parse" - "github.com/daixiang0/gci/pkg/specificity" -) - -type Dot struct{} - -const DotType = "dot" - -func (d Dot) MatchSpecificity(spec *parse.GciImports) specificity.MatchSpecificity { - if spec.Name == "." { - return specificity.NameMatch{} - } - return specificity.MisMatch{} -} - -func (d Dot) String() string { - return DotType -} - -func (d Dot) Type() string { - return DotType -} diff --git a/vendor/github.com/daixiang0/gci/pkg/section/errors.go b/vendor/github.com/daixiang0/gci/pkg/section/errors.go deleted file mode 100644 index 0a1209135..000000000 --- a/vendor/github.com/daixiang0/gci/pkg/section/errors.go +++ /dev/null @@ -1,107 +0,0 @@ -package section - -import ( - "errors" - "fmt" - - "github.com/daixiang0/gci/pkg/parse" - "github.com/daixiang0/gci/pkg/utils" -) - -type SectionParsingError struct { - error -} - -func (s SectionParsingError) Unwrap() error { - return s.error -} - -func (s SectionParsingError) Wrap(sectionStr string) error { - return fmt.Errorf("failed to parse section %q: %w", sectionStr, s) -} - -func (s SectionParsingError) Is(err error) bool { - _, ok := err.(SectionParsingError) - return ok -} - -var MissingParameterClosingBracketsError = fmt.Errorf("section parameter is missing closing %q", utils.RightParenthesis) - -var MoreThanOneOpeningQuotesError = fmt.Errorf("found more than one %q parameter start sequences", utils.RightParenthesis) - -var SectionTypeDoesNotAcceptParametersError = errors.New("section type does not accept a parameter") - -var SectionTypeDoesNotAcceptPrefixError = errors.New("section may not contain a Prefix") - -var SectionTypeDoesNotAcceptSuffixError = errors.New("section may not contain a Suffix") - -type EqualSpecificityMatchError struct { - Imports *parse.GciImports - SectionA, SectionB Section -} - -func (e EqualSpecificityMatchError) Error() string { - return fmt.Sprintf("Import %v matched section %s and %s equally", e.Imports, e.SectionA, e.SectionB) -} - -func (e EqualSpecificityMatchError) Is(err error) bool { - _, ok := err.(EqualSpecificityMatchError) - return ok -} - -type NoMatchingSectionForImportError struct { - Imports *parse.GciImports -} - -func (n NoMatchingSectionForImportError) Error() string { - return fmt.Sprintf("No section found for Import: %v", n.Imports) -} - -func (n NoMatchingSectionForImportError) Is(err error) bool { - _, ok := err.(NoMatchingSectionForImportError) - return ok -} - -type InvalidImportSplitError struct { - segments []string -} - -func (i InvalidImportSplitError) Error() string { - return fmt.Sprintf("separating the inline comment from the import yielded an invalid number of segments: %v", i.segments) -} - -func (i InvalidImportSplitError) Is(err error) bool { - _, ok := err.(InvalidImportSplitError) - return ok -} - -type InvalidAliasSplitError struct { - segments []string -} - -func (i InvalidAliasSplitError) Error() string { - return fmt.Sprintf("separating the alias from the path yielded an invalid number of segments: %v", i.segments) -} - -func (i InvalidAliasSplitError) Is(err error) bool { - _, ok := err.(InvalidAliasSplitError) - return ok -} - -var ( - MissingImportStatementError = FileParsingError{errors.New("no import statement present in File")} - ImportStatementNotClosedError = FileParsingError{errors.New("import statement not closed")} -) - -type FileParsingError struct { - error -} - -func (f FileParsingError) Unwrap() error { - return f.error -} - -func (f FileParsingError) Is(err error) bool { - _, ok := err.(FileParsingError) - return ok -} diff --git a/vendor/github.com/daixiang0/gci/pkg/section/local_module.go b/vendor/github.com/daixiang0/gci/pkg/section/local_module.go deleted file mode 100644 index 4e1edcba3..000000000 --- a/vendor/github.com/daixiang0/gci/pkg/section/local_module.go +++ /dev/null @@ -1,77 +0,0 @@ -package section - -import ( - "fmt" - "strings" - - "golang.org/x/tools/go/packages" - - "github.com/daixiang0/gci/pkg/parse" - "github.com/daixiang0/gci/pkg/specificity" -) - -const LocalModuleType = "localmodule" - -type LocalModule struct { - Paths []string -} - -func (m *LocalModule) MatchSpecificity(spec *parse.GciImports) specificity.MatchSpecificity { - for _, modPath := range m.Paths { - // also check path etc. - if strings.HasPrefix(spec.Path, modPath) { - return specificity.LocalModule{} - } - } - - return specificity.MisMatch{} -} - -func (m *LocalModule) String() string { - return LocalModuleType -} - -func (m *LocalModule) Type() string { - return LocalModuleType -} - -// Configure configures the module section by finding the module -// for the current path -func (m *LocalModule) Configure() error { - modPaths, err := findLocalModules() - if err != nil { - return err - } - m.Paths = modPaths - return nil -} - -func findLocalModules() ([]string, error) { - packages, err := packages.Load( - // find the package in the current dir and load its module - // NeedFiles so there is some more info in package errors - &packages.Config{Mode: packages.NeedModule | packages.NeedFiles}, - ".", - ) - if err != nil { - return nil, fmt.Errorf("failed to load local modules: %v", err) - } - - uniqueModules := make(map[string]struct{}) - - for _, pkg := range packages { - if len(pkg.Errors) != 0 { - return nil, fmt.Errorf("error reading local packages: %v", pkg.Errors) - } - if pkg.Module != nil { - uniqueModules[pkg.Module.Path] = struct{}{} - } - } - - modPaths := make([]string, 0, len(uniqueModules)) - for mod := range uniqueModules { - modPaths = append(modPaths, mod) - } - - return modPaths, nil -} diff --git a/vendor/github.com/daixiang0/gci/pkg/section/newline.go b/vendor/github.com/daixiang0/gci/pkg/section/newline.go deleted file mode 100644 index 4bff91b9d..000000000 --- a/vendor/github.com/daixiang0/gci/pkg/section/newline.go +++ /dev/null @@ -1,22 +0,0 @@ -package section - -import ( - "github.com/daixiang0/gci/pkg/parse" - "github.com/daixiang0/gci/pkg/specificity" -) - -const newLineName = "newline" - -type NewLine struct{} - -func (n NewLine) MatchSpecificity(spec *parse.GciImports) specificity.MatchSpecificity { - return specificity.MisMatch{} -} - -func (n NewLine) String() string { - return newLineName -} - -func (n NewLine) Type() string { - return newLineName -} diff --git a/vendor/github.com/daixiang0/gci/pkg/section/parser.go b/vendor/github.com/daixiang0/gci/pkg/section/parser.go deleted file mode 100644 index 62ed1582a..000000000 --- a/vendor/github.com/daixiang0/gci/pkg/section/parser.go +++ /dev/null @@ -1,49 +0,0 @@ -package section - -import ( - "errors" - "fmt" - "strings" -) - -func Parse(data []string) (SectionList, error) { - if len(data) == 0 { - return nil, nil - } - - var list SectionList - var errString string - for _, d := range data { - s := strings.ToLower(d) - if len(s) == 0 { - return nil, nil - } - - if s == "default" { - list = append(list, Default{}) - } else if s == "standard" { - list = append(list, Standard{}) - } else if s == "newline" { - list = append(list, NewLine{}) - } else if strings.HasPrefix(s, "prefix(") && len(d) > 8 { - list = append(list, Custom{d[7 : len(d)-1]}) - } else if strings.HasPrefix(s, "commentline(") && len(d) > 13 { - list = append(list, Custom{d[12 : len(d)-1]}) - } else if s == "dot" { - list = append(list, Dot{}) - } else if s == "blank" { - list = append(list, Blank{}) - } else if s == "alias" { - list = append(list, Alias{}) - } else if s == "localmodule" { - // pointer because we need to mutate the section at configuration time - list = append(list, &LocalModule{}) - } else { - errString += fmt.Sprintf(" %s", s) - } - } - if errString != "" { - return nil, errors.New(fmt.Sprintf("invalid params:%s", errString)) - } - return list, nil -} diff --git a/vendor/github.com/daixiang0/gci/pkg/section/prefix.go b/vendor/github.com/daixiang0/gci/pkg/section/prefix.go deleted file mode 100644 index 30bdd8f4e..000000000 --- a/vendor/github.com/daixiang0/gci/pkg/section/prefix.go +++ /dev/null @@ -1,38 +0,0 @@ -package section - -import ( - "fmt" - "strings" - - "github.com/daixiang0/gci/pkg/parse" - "github.com/daixiang0/gci/pkg/specificity" -) - -type Custom struct { - Prefix string -} - -// CustomSeparator allows you to group multiple custom prefix together in the same section -// gci diff -s standard -s default -s prefix(github.com/company,gitlab.com/company,companysuffix) -const CustomSeparator = "," - -const CustomType = "custom" - -func (c Custom) MatchSpecificity(spec *parse.GciImports) specificity.MatchSpecificity { - for _, prefix := range strings.Split(c.Prefix, CustomSeparator) { - prefix = strings.TrimSpace(prefix) - if strings.HasPrefix(spec.Path, prefix) { - return specificity.Match{Length: len(prefix)} - } - } - - return specificity.MisMatch{} -} - -func (c Custom) String() string { - return fmt.Sprintf("prefix(%s)", c.Prefix) -} - -func (c Custom) Type() string { - return CustomType -} diff --git a/vendor/github.com/daixiang0/gci/pkg/section/section.go b/vendor/github.com/daixiang0/gci/pkg/section/section.go deleted file mode 100644 index cc0a43f2f..000000000 --- a/vendor/github.com/daixiang0/gci/pkg/section/section.go +++ /dev/null @@ -1,36 +0,0 @@ -package section - -import ( - "github.com/daixiang0/gci/pkg/parse" - "github.com/daixiang0/gci/pkg/specificity" -) - -// Section defines a part of the formatted output. -type Section interface { - // MatchSpecificity returns how well an Import matches to this Section - MatchSpecificity(spec *parse.GciImports) specificity.MatchSpecificity - - // String Implements the stringer interface - String() string - - // return section type - Type() string -} - -type SectionList []Section - -func (list SectionList) String() []string { - var output []string - for _, section := range list { - output = append(output, section.String()) - } - return output -} - -func DefaultSections() SectionList { - return SectionList{Standard{}, Default{}} -} - -func DefaultSectionSeparators() SectionList { - return SectionList{NewLine{}} -} diff --git a/vendor/github.com/daixiang0/gci/pkg/section/standard.go b/vendor/github.com/daixiang0/gci/pkg/section/standard.go deleted file mode 100644 index 26c7e9dc7..000000000 --- a/vendor/github.com/daixiang0/gci/pkg/section/standard.go +++ /dev/null @@ -1,30 +0,0 @@ -package section - -import ( - "github.com/daixiang0/gci/pkg/parse" - "github.com/daixiang0/gci/pkg/specificity" -) - -const StandardType = "standard" - -type Standard struct{} - -func (s Standard) MatchSpecificity(spec *parse.GciImports) specificity.MatchSpecificity { - if isStandard(spec.Path) { - return specificity.StandardMatch{} - } - return specificity.MisMatch{} -} - -func (s Standard) String() string { - return StandardType -} - -func (s Standard) Type() string { - return StandardType -} - -func isStandard(pkg string) bool { - _, ok := standardPackages[pkg] - return ok -} diff --git a/vendor/github.com/daixiang0/gci/pkg/section/standard_list.go b/vendor/github.com/daixiang0/gci/pkg/section/standard_list.go deleted file mode 100644 index 551bba428..000000000 --- a/vendor/github.com/daixiang0/gci/pkg/section/standard_list.go +++ /dev/null @@ -1,170 +0,0 @@ -package section - -// Code generated based on go1.22.0 X:arenas. DO NOT EDIT. - -var standardPackages = map[string]struct{}{ - "archive/tar": {}, - "archive/zip": {}, - "arena": {}, - "bufio": {}, - "bytes": {}, - "cmp": {}, - "compress/bzip2": {}, - "compress/flate": {}, - "compress/gzip": {}, - "compress/lzw": {}, - "compress/zlib": {}, - "container/heap": {}, - "container/list": {}, - "container/ring": {}, - "context": {}, - "crypto": {}, - "crypto/aes": {}, - "crypto/cipher": {}, - "crypto/des": {}, - "crypto/dsa": {}, - "crypto/ecdh": {}, - "crypto/ecdsa": {}, - "crypto/ed25519": {}, - "crypto/elliptic": {}, - "crypto/hmac": {}, - "crypto/md5": {}, - "crypto/rand": {}, - "crypto/rc4": {}, - "crypto/rsa": {}, - "crypto/sha1": {}, - "crypto/sha256": {}, - "crypto/sha512": {}, - "crypto/subtle": {}, - "crypto/tls": {}, - "crypto/x509": {}, - "crypto/x509/pkix": {}, - "database/sql": {}, - "database/sql/driver": {}, - "debug/buildinfo": {}, - "debug/dwarf": {}, - "debug/elf": {}, - "debug/gosym": {}, - "debug/macho": {}, - "debug/pe": {}, - "debug/plan9obj": {}, - "embed": {}, - "encoding": {}, - "encoding/ascii85": {}, - "encoding/asn1": {}, - "encoding/base32": {}, - "encoding/base64": {}, - "encoding/binary": {}, - "encoding/csv": {}, - "encoding/gob": {}, - "encoding/hex": {}, - "encoding/json": {}, - "encoding/pem": {}, - "encoding/xml": {}, - "errors": {}, - "expvar": {}, - "flag": {}, - "fmt": {}, - "go/ast": {}, - "go/build": {}, - "go/build/constraint": {}, - "go/constant": {}, - "go/doc": {}, - "go/doc/comment": {}, - "go/format": {}, - "go/importer": {}, - "go/parser": {}, - "go/printer": {}, - "go/scanner": {}, - "go/token": {}, - "go/types": {}, - "go/version": {}, - "hash": {}, - "hash/adler32": {}, - "hash/crc32": {}, - "hash/crc64": {}, - "hash/fnv": {}, - "hash/maphash": {}, - "html": {}, - "html/template": {}, - "image": {}, - "image/color": {}, - "image/color/palette": {}, - "image/draw": {}, - "image/gif": {}, - "image/jpeg": {}, - "image/png": {}, - "index/suffixarray": {}, - "io": {}, - "io/fs": {}, - "io/ioutil": {}, - "log": {}, - "log/slog": {}, - "log/syslog": {}, - "maps": {}, - "math": {}, - "math/big": {}, - "math/bits": {}, - "math/cmplx": {}, - "math/rand": {}, - "math/rand/v2": {}, - "mime": {}, - "mime/multipart": {}, - "mime/quotedprintable": {}, - "net": {}, - "net/http": {}, - "net/http/cgi": {}, - "net/http/cookiejar": {}, - "net/http/fcgi": {}, - "net/http/httptest": {}, - "net/http/httptrace": {}, - "net/http/httputil": {}, - "net/http/pprof": {}, - "net/mail": {}, - "net/netip": {}, - "net/rpc": {}, - "net/rpc/jsonrpc": {}, - "net/smtp": {}, - "net/textproto": {}, - "net/url": {}, - "os": {}, - "os/exec": {}, - "os/signal": {}, - "os/user": {}, - "path": {}, - "path/filepath": {}, - "plugin": {}, - "reflect": {}, - "regexp": {}, - "regexp/syntax": {}, - "runtime": {}, - "runtime/cgo": {}, - "runtime/coverage": {}, - "runtime/debug": {}, - "runtime/metrics": {}, - "runtime/pprof": {}, - "runtime/race": {}, - "runtime/trace": {}, - "slices": {}, - "sort": {}, - "strconv": {}, - "strings": {}, - "sync": {}, - "sync/atomic": {}, - "syscall": {}, - "testing": {}, - "testing/fstest": {}, - "testing/iotest": {}, - "testing/quick": {}, - "testing/slogtest": {}, - "text/scanner": {}, - "text/tabwriter": {}, - "text/template": {}, - "text/template/parse": {}, - "time": {}, - "time/tzdata": {}, - "unicode": {}, - "unicode/utf16": {}, - "unicode/utf8": {}, - "unsafe": {}, -} diff --git a/vendor/github.com/daixiang0/gci/pkg/specificity/default.go b/vendor/github.com/daixiang0/gci/pkg/specificity/default.go deleted file mode 100644 index f7ae4b87b..000000000 --- a/vendor/github.com/daixiang0/gci/pkg/specificity/default.go +++ /dev/null @@ -1,19 +0,0 @@ -package specificity - -type Default struct{} - -func (d Default) IsMoreSpecific(than MatchSpecificity) bool { - return isMoreSpecific(d, than) -} - -func (d Default) Equal(to MatchSpecificity) bool { - return equalSpecificity(d, to) -} - -func (d Default) class() specificityClass { - return DefaultClass -} - -func (d Default) String() string { - return "Default" -} diff --git a/vendor/github.com/daixiang0/gci/pkg/specificity/local_module.go b/vendor/github.com/daixiang0/gci/pkg/specificity/local_module.go deleted file mode 100644 index ae482fec4..000000000 --- a/vendor/github.com/daixiang0/gci/pkg/specificity/local_module.go +++ /dev/null @@ -1,15 +0,0 @@ -package specificity - -type LocalModule struct{} - -func (m LocalModule) IsMoreSpecific(than MatchSpecificity) bool { - return isMoreSpecific(m, than) -} - -func (m LocalModule) Equal(to MatchSpecificity) bool { - return equalSpecificity(m, to) -} - -func (LocalModule) class() specificityClass { - return LocalModuleClass -} diff --git a/vendor/github.com/daixiang0/gci/pkg/specificity/match.go b/vendor/github.com/daixiang0/gci/pkg/specificity/match.go deleted file mode 100644 index f08d2b66b..000000000 --- a/vendor/github.com/daixiang0/gci/pkg/specificity/match.go +++ /dev/null @@ -1,24 +0,0 @@ -package specificity - -import "fmt" - -type Match struct { - Length int -} - -func (m Match) IsMoreSpecific(than MatchSpecificity) bool { - otherMatch, isMatch := than.(Match) - return isMoreSpecific(m, than) || (isMatch && m.Length > otherMatch.Length) -} - -func (m Match) Equal(to MatchSpecificity) bool { - return equalSpecificity(m, to) -} - -func (m Match) class() specificityClass { - return MatchClass -} - -func (m Match) String() string { - return fmt.Sprintf("Match(length: %d)", m.Length) -} diff --git a/vendor/github.com/daixiang0/gci/pkg/specificity/mismatch.go b/vendor/github.com/daixiang0/gci/pkg/specificity/mismatch.go deleted file mode 100644 index 8e8711146..000000000 --- a/vendor/github.com/daixiang0/gci/pkg/specificity/mismatch.go +++ /dev/null @@ -1,19 +0,0 @@ -package specificity - -type MisMatch struct{} - -func (m MisMatch) IsMoreSpecific(than MatchSpecificity) bool { - return isMoreSpecific(m, than) -} - -func (m MisMatch) Equal(to MatchSpecificity) bool { - return equalSpecificity(m, to) -} - -func (m MisMatch) class() specificityClass { - return MisMatchClass -} - -func (m MisMatch) String() string { - return "Mismatch" -} diff --git a/vendor/github.com/daixiang0/gci/pkg/specificity/name.go b/vendor/github.com/daixiang0/gci/pkg/specificity/name.go deleted file mode 100644 index 1900a0ac5..000000000 --- a/vendor/github.com/daixiang0/gci/pkg/specificity/name.go +++ /dev/null @@ -1,19 +0,0 @@ -package specificity - -type NameMatch struct{} - -func (n NameMatch) IsMoreSpecific(than MatchSpecificity) bool { - return isMoreSpecific(n, than) -} - -func (n NameMatch) Equal(to MatchSpecificity) bool { - return equalSpecificity(n, to) -} - -func (n NameMatch) class() specificityClass { - return NameClass -} - -func (n NameMatch) String() string { - return "Name" -} diff --git a/vendor/github.com/daixiang0/gci/pkg/specificity/specificity.go b/vendor/github.com/daixiang0/gci/pkg/specificity/specificity.go deleted file mode 100644 index 4a188b3bb..000000000 --- a/vendor/github.com/daixiang0/gci/pkg/specificity/specificity.go +++ /dev/null @@ -1,28 +0,0 @@ -package specificity - -type specificityClass int - -const ( - MisMatchClass = 0 - DefaultClass = 10 - StandardClass = 20 - MatchClass = 30 - NameClass = 40 - LocalModuleClass = 50 -) - -// MatchSpecificity is used to determine which section matches an import best -type MatchSpecificity interface { - IsMoreSpecific(than MatchSpecificity) bool - Equal(to MatchSpecificity) bool - class() specificityClass -} - -func isMoreSpecific(this, than MatchSpecificity) bool { - return this.class() > than.class() -} - -func equalSpecificity(base, to MatchSpecificity) bool { - // m.class() == to.class() would not work for Match - return !base.IsMoreSpecific(to) && !to.IsMoreSpecific(base) -} diff --git a/vendor/github.com/daixiang0/gci/pkg/specificity/standard.go b/vendor/github.com/daixiang0/gci/pkg/specificity/standard.go deleted file mode 100644 index 72ccaf7e1..000000000 --- a/vendor/github.com/daixiang0/gci/pkg/specificity/standard.go +++ /dev/null @@ -1,19 +0,0 @@ -package specificity - -type StandardMatch struct{} - -func (s StandardMatch) IsMoreSpecific(than MatchSpecificity) bool { - return isMoreSpecific(s, than) -} - -func (s StandardMatch) Equal(to MatchSpecificity) bool { - return equalSpecificity(s, to) -} - -func (s StandardMatch) class() specificityClass { - return StandardClass -} - -func (s StandardMatch) String() string { - return "Standard" -} diff --git a/vendor/github.com/daixiang0/gci/pkg/utils/constants.go b/vendor/github.com/daixiang0/gci/pkg/utils/constants.go deleted file mode 100644 index 2fafbc32c..000000000 --- a/vendor/github.com/daixiang0/gci/pkg/utils/constants.go +++ /dev/null @@ -1,12 +0,0 @@ -package utils - -const ( - Indent = '\t' - Linebreak = '\n' - WinLinebreak = '\r' - - Colon = ":" - - LeftParenthesis = '(' - RightParenthesis = ')' -) diff --git a/vendor/github.com/davecgh/go-spew/LICENSE b/vendor/github.com/davecgh/go-spew/LICENSE deleted file mode 100644 index bc52e96f2..000000000 --- a/vendor/github.com/davecgh/go-spew/LICENSE +++ /dev/null @@ -1,15 +0,0 @@ -ISC License - -Copyright (c) 2012-2016 Dave Collins - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF -OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/vendor/github.com/davecgh/go-spew/spew/bypass.go b/vendor/github.com/davecgh/go-spew/spew/bypass.go deleted file mode 100644 index 792994785..000000000 --- a/vendor/github.com/davecgh/go-spew/spew/bypass.go +++ /dev/null @@ -1,145 +0,0 @@ -// Copyright (c) 2015-2016 Dave Collins -// -// Permission to use, copy, modify, and distribute this software for any -// purpose with or without fee is hereby granted, provided that the above -// copyright notice and this permission notice appear in all copies. -// -// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -// WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -// MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -// ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -// WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -// ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF -// OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -// NOTE: Due to the following build constraints, this file will only be compiled -// when the code is not running on Google App Engine, compiled by GopherJS, and -// "-tags safe" is not added to the go build command line. The "disableunsafe" -// tag is deprecated and thus should not be used. -// Go versions prior to 1.4 are disabled because they use a different layout -// for interfaces which make the implementation of unsafeReflectValue more complex. -// +build !js,!appengine,!safe,!disableunsafe,go1.4 - -package spew - -import ( - "reflect" - "unsafe" -) - -const ( - // UnsafeDisabled is a build-time constant which specifies whether or - // not access to the unsafe package is available. - UnsafeDisabled = false - - // ptrSize is the size of a pointer on the current arch. - ptrSize = unsafe.Sizeof((*byte)(nil)) -) - -type flag uintptr - -var ( - // flagRO indicates whether the value field of a reflect.Value - // is read-only. - flagRO flag - - // flagAddr indicates whether the address of the reflect.Value's - // value may be taken. - flagAddr flag -) - -// flagKindMask holds the bits that make up the kind -// part of the flags field. In all the supported versions, -// it is in the lower 5 bits. -const flagKindMask = flag(0x1f) - -// Different versions of Go have used different -// bit layouts for the flags type. This table -// records the known combinations. -var okFlags = []struct { - ro, addr flag -}{{ - // From Go 1.4 to 1.5 - ro: 1 << 5, - addr: 1 << 7, -}, { - // Up to Go tip. - ro: 1<<5 | 1<<6, - addr: 1 << 8, -}} - -var flagValOffset = func() uintptr { - field, ok := reflect.TypeOf(reflect.Value{}).FieldByName("flag") - if !ok { - panic("reflect.Value has no flag field") - } - return field.Offset -}() - -// flagField returns a pointer to the flag field of a reflect.Value. -func flagField(v *reflect.Value) *flag { - return (*flag)(unsafe.Pointer(uintptr(unsafe.Pointer(v)) + flagValOffset)) -} - -// unsafeReflectValue converts the passed reflect.Value into a one that bypasses -// the typical safety restrictions preventing access to unaddressable and -// unexported data. It works by digging the raw pointer to the underlying -// value out of the protected value and generating a new unprotected (unsafe) -// reflect.Value to it. -// -// This allows us to check for implementations of the Stringer and error -// interfaces to be used for pretty printing ordinarily unaddressable and -// inaccessible values such as unexported struct fields. -func unsafeReflectValue(v reflect.Value) reflect.Value { - if !v.IsValid() || (v.CanInterface() && v.CanAddr()) { - return v - } - flagFieldPtr := flagField(&v) - *flagFieldPtr &^= flagRO - *flagFieldPtr |= flagAddr - return v -} - -// Sanity checks against future reflect package changes -// to the type or semantics of the Value.flag field. -func init() { - field, ok := reflect.TypeOf(reflect.Value{}).FieldByName("flag") - if !ok { - panic("reflect.Value has no flag field") - } - if field.Type.Kind() != reflect.TypeOf(flag(0)).Kind() { - panic("reflect.Value flag field has changed kind") - } - type t0 int - var t struct { - A t0 - // t0 will have flagEmbedRO set. - t0 - // a will have flagStickyRO set - a t0 - } - vA := reflect.ValueOf(t).FieldByName("A") - va := reflect.ValueOf(t).FieldByName("a") - vt0 := reflect.ValueOf(t).FieldByName("t0") - - // Infer flagRO from the difference between the flags - // for the (otherwise identical) fields in t. - flagPublic := *flagField(&vA) - flagWithRO := *flagField(&va) | *flagField(&vt0) - flagRO = flagPublic ^ flagWithRO - - // Infer flagAddr from the difference between a value - // taken from a pointer and not. - vPtrA := reflect.ValueOf(&t).Elem().FieldByName("A") - flagNoPtr := *flagField(&vA) - flagPtr := *flagField(&vPtrA) - flagAddr = flagNoPtr ^ flagPtr - - // Check that the inferred flags tally with one of the known versions. - for _, f := range okFlags { - if flagRO == f.ro && flagAddr == f.addr { - return - } - } - panic("reflect.Value read-only flag has changed semantics") -} diff --git a/vendor/github.com/davecgh/go-spew/spew/bypasssafe.go b/vendor/github.com/davecgh/go-spew/spew/bypasssafe.go deleted file mode 100644 index 205c28d68..000000000 --- a/vendor/github.com/davecgh/go-spew/spew/bypasssafe.go +++ /dev/null @@ -1,38 +0,0 @@ -// Copyright (c) 2015-2016 Dave Collins -// -// Permission to use, copy, modify, and distribute this software for any -// purpose with or without fee is hereby granted, provided that the above -// copyright notice and this permission notice appear in all copies. -// -// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -// WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -// MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -// ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -// WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -// ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF -// OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -// NOTE: Due to the following build constraints, this file will only be compiled -// when the code is running on Google App Engine, compiled by GopherJS, or -// "-tags safe" is added to the go build command line. The "disableunsafe" -// tag is deprecated and thus should not be used. -// +build js appengine safe disableunsafe !go1.4 - -package spew - -import "reflect" - -const ( - // UnsafeDisabled is a build-time constant which specifies whether or - // not access to the unsafe package is available. - UnsafeDisabled = true -) - -// unsafeReflectValue typically converts the passed reflect.Value into a one -// that bypasses the typical safety restrictions preventing access to -// unaddressable and unexported data. However, doing this relies on access to -// the unsafe package. This is a stub version which simply returns the passed -// reflect.Value when the unsafe package is not available. -func unsafeReflectValue(v reflect.Value) reflect.Value { - return v -} diff --git a/vendor/github.com/davecgh/go-spew/spew/common.go b/vendor/github.com/davecgh/go-spew/spew/common.go deleted file mode 100644 index 1be8ce945..000000000 --- a/vendor/github.com/davecgh/go-spew/spew/common.go +++ /dev/null @@ -1,341 +0,0 @@ -/* - * Copyright (c) 2013-2016 Dave Collins - * - * Permission to use, copy, modify, and distribute this software for any - * purpose with or without fee is hereby granted, provided that the above - * copyright notice and this permission notice appear in all copies. - * - * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES - * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF - * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR - * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES - * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN - * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF - * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - */ - -package spew - -import ( - "bytes" - "fmt" - "io" - "reflect" - "sort" - "strconv" -) - -// Some constants in the form of bytes to avoid string overhead. This mirrors -// the technique used in the fmt package. -var ( - panicBytes = []byte("(PANIC=") - plusBytes = []byte("+") - iBytes = []byte("i") - trueBytes = []byte("true") - falseBytes = []byte("false") - interfaceBytes = []byte("(interface {})") - commaNewlineBytes = []byte(",\n") - newlineBytes = []byte("\n") - openBraceBytes = []byte("{") - openBraceNewlineBytes = []byte("{\n") - closeBraceBytes = []byte("}") - asteriskBytes = []byte("*") - colonBytes = []byte(":") - colonSpaceBytes = []byte(": ") - openParenBytes = []byte("(") - closeParenBytes = []byte(")") - spaceBytes = []byte(" ") - pointerChainBytes = []byte("->") - nilAngleBytes = []byte("") - maxNewlineBytes = []byte("\n") - maxShortBytes = []byte("") - circularBytes = []byte("") - circularShortBytes = []byte("") - invalidAngleBytes = []byte("") - openBracketBytes = []byte("[") - closeBracketBytes = []byte("]") - percentBytes = []byte("%") - precisionBytes = []byte(".") - openAngleBytes = []byte("<") - closeAngleBytes = []byte(">") - openMapBytes = []byte("map[") - closeMapBytes = []byte("]") - lenEqualsBytes = []byte("len=") - capEqualsBytes = []byte("cap=") -) - -// hexDigits is used to map a decimal value to a hex digit. -var hexDigits = "0123456789abcdef" - -// catchPanic handles any panics that might occur during the handleMethods -// calls. -func catchPanic(w io.Writer, v reflect.Value) { - if err := recover(); err != nil { - w.Write(panicBytes) - fmt.Fprintf(w, "%v", err) - w.Write(closeParenBytes) - } -} - -// handleMethods attempts to call the Error and String methods on the underlying -// type the passed reflect.Value represents and outputes the result to Writer w. -// -// It handles panics in any called methods by catching and displaying the error -// as the formatted value. -func handleMethods(cs *ConfigState, w io.Writer, v reflect.Value) (handled bool) { - // We need an interface to check if the type implements the error or - // Stringer interface. However, the reflect package won't give us an - // interface on certain things like unexported struct fields in order - // to enforce visibility rules. We use unsafe, when it's available, - // to bypass these restrictions since this package does not mutate the - // values. - if !v.CanInterface() { - if UnsafeDisabled { - return false - } - - v = unsafeReflectValue(v) - } - - // Choose whether or not to do error and Stringer interface lookups against - // the base type or a pointer to the base type depending on settings. - // Technically calling one of these methods with a pointer receiver can - // mutate the value, however, types which choose to satisify an error or - // Stringer interface with a pointer receiver should not be mutating their - // state inside these interface methods. - if !cs.DisablePointerMethods && !UnsafeDisabled && !v.CanAddr() { - v = unsafeReflectValue(v) - } - if v.CanAddr() { - v = v.Addr() - } - - // Is it an error or Stringer? - switch iface := v.Interface().(type) { - case error: - defer catchPanic(w, v) - if cs.ContinueOnMethod { - w.Write(openParenBytes) - w.Write([]byte(iface.Error())) - w.Write(closeParenBytes) - w.Write(spaceBytes) - return false - } - - w.Write([]byte(iface.Error())) - return true - - case fmt.Stringer: - defer catchPanic(w, v) - if cs.ContinueOnMethod { - w.Write(openParenBytes) - w.Write([]byte(iface.String())) - w.Write(closeParenBytes) - w.Write(spaceBytes) - return false - } - w.Write([]byte(iface.String())) - return true - } - return false -} - -// printBool outputs a boolean value as true or false to Writer w. -func printBool(w io.Writer, val bool) { - if val { - w.Write(trueBytes) - } else { - w.Write(falseBytes) - } -} - -// printInt outputs a signed integer value to Writer w. -func printInt(w io.Writer, val int64, base int) { - w.Write([]byte(strconv.FormatInt(val, base))) -} - -// printUint outputs an unsigned integer value to Writer w. -func printUint(w io.Writer, val uint64, base int) { - w.Write([]byte(strconv.FormatUint(val, base))) -} - -// printFloat outputs a floating point value using the specified precision, -// which is expected to be 32 or 64bit, to Writer w. -func printFloat(w io.Writer, val float64, precision int) { - w.Write([]byte(strconv.FormatFloat(val, 'g', -1, precision))) -} - -// printComplex outputs a complex value using the specified float precision -// for the real and imaginary parts to Writer w. -func printComplex(w io.Writer, c complex128, floatPrecision int) { - r := real(c) - w.Write(openParenBytes) - w.Write([]byte(strconv.FormatFloat(r, 'g', -1, floatPrecision))) - i := imag(c) - if i >= 0 { - w.Write(plusBytes) - } - w.Write([]byte(strconv.FormatFloat(i, 'g', -1, floatPrecision))) - w.Write(iBytes) - w.Write(closeParenBytes) -} - -// printHexPtr outputs a uintptr formatted as hexadecimal with a leading '0x' -// prefix to Writer w. -func printHexPtr(w io.Writer, p uintptr) { - // Null pointer. - num := uint64(p) - if num == 0 { - w.Write(nilAngleBytes) - return - } - - // Max uint64 is 16 bytes in hex + 2 bytes for '0x' prefix - buf := make([]byte, 18) - - // It's simpler to construct the hex string right to left. - base := uint64(16) - i := len(buf) - 1 - for num >= base { - buf[i] = hexDigits[num%base] - num /= base - i-- - } - buf[i] = hexDigits[num] - - // Add '0x' prefix. - i-- - buf[i] = 'x' - i-- - buf[i] = '0' - - // Strip unused leading bytes. - buf = buf[i:] - w.Write(buf) -} - -// valuesSorter implements sort.Interface to allow a slice of reflect.Value -// elements to be sorted. -type valuesSorter struct { - values []reflect.Value - strings []string // either nil or same len and values - cs *ConfigState -} - -// newValuesSorter initializes a valuesSorter instance, which holds a set of -// surrogate keys on which the data should be sorted. It uses flags in -// ConfigState to decide if and how to populate those surrogate keys. -func newValuesSorter(values []reflect.Value, cs *ConfigState) sort.Interface { - vs := &valuesSorter{values: values, cs: cs} - if canSortSimply(vs.values[0].Kind()) { - return vs - } - if !cs.DisableMethods { - vs.strings = make([]string, len(values)) - for i := range vs.values { - b := bytes.Buffer{} - if !handleMethods(cs, &b, vs.values[i]) { - vs.strings = nil - break - } - vs.strings[i] = b.String() - } - } - if vs.strings == nil && cs.SpewKeys { - vs.strings = make([]string, len(values)) - for i := range vs.values { - vs.strings[i] = Sprintf("%#v", vs.values[i].Interface()) - } - } - return vs -} - -// canSortSimply tests whether a reflect.Kind is a primitive that can be sorted -// directly, or whether it should be considered for sorting by surrogate keys -// (if the ConfigState allows it). -func canSortSimply(kind reflect.Kind) bool { - // This switch parallels valueSortLess, except for the default case. - switch kind { - case reflect.Bool: - return true - case reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Int: - return true - case reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uint: - return true - case reflect.Float32, reflect.Float64: - return true - case reflect.String: - return true - case reflect.Uintptr: - return true - case reflect.Array: - return true - } - return false -} - -// Len returns the number of values in the slice. It is part of the -// sort.Interface implementation. -func (s *valuesSorter) Len() int { - return len(s.values) -} - -// Swap swaps the values at the passed indices. It is part of the -// sort.Interface implementation. -func (s *valuesSorter) Swap(i, j int) { - s.values[i], s.values[j] = s.values[j], s.values[i] - if s.strings != nil { - s.strings[i], s.strings[j] = s.strings[j], s.strings[i] - } -} - -// valueSortLess returns whether the first value should sort before the second -// value. It is used by valueSorter.Less as part of the sort.Interface -// implementation. -func valueSortLess(a, b reflect.Value) bool { - switch a.Kind() { - case reflect.Bool: - return !a.Bool() && b.Bool() - case reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Int: - return a.Int() < b.Int() - case reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uint: - return a.Uint() < b.Uint() - case reflect.Float32, reflect.Float64: - return a.Float() < b.Float() - case reflect.String: - return a.String() < b.String() - case reflect.Uintptr: - return a.Uint() < b.Uint() - case reflect.Array: - // Compare the contents of both arrays. - l := a.Len() - for i := 0; i < l; i++ { - av := a.Index(i) - bv := b.Index(i) - if av.Interface() == bv.Interface() { - continue - } - return valueSortLess(av, bv) - } - } - return a.String() < b.String() -} - -// Less returns whether the value at index i should sort before the -// value at index j. It is part of the sort.Interface implementation. -func (s *valuesSorter) Less(i, j int) bool { - if s.strings == nil { - return valueSortLess(s.values[i], s.values[j]) - } - return s.strings[i] < s.strings[j] -} - -// sortValues is a sort function that handles both native types and any type that -// can be converted to error or Stringer. Other inputs are sorted according to -// their Value.String() value to ensure display stability. -func sortValues(values []reflect.Value, cs *ConfigState) { - if len(values) == 0 { - return - } - sort.Sort(newValuesSorter(values, cs)) -} diff --git a/vendor/github.com/davecgh/go-spew/spew/config.go b/vendor/github.com/davecgh/go-spew/spew/config.go deleted file mode 100644 index 2e3d22f31..000000000 --- a/vendor/github.com/davecgh/go-spew/spew/config.go +++ /dev/null @@ -1,306 +0,0 @@ -/* - * Copyright (c) 2013-2016 Dave Collins - * - * Permission to use, copy, modify, and distribute this software for any - * purpose with or without fee is hereby granted, provided that the above - * copyright notice and this permission notice appear in all copies. - * - * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES - * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF - * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR - * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES - * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN - * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF - * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - */ - -package spew - -import ( - "bytes" - "fmt" - "io" - "os" -) - -// ConfigState houses the configuration options used by spew to format and -// display values. There is a global instance, Config, that is used to control -// all top-level Formatter and Dump functionality. Each ConfigState instance -// provides methods equivalent to the top-level functions. -// -// The zero value for ConfigState provides no indentation. You would typically -// want to set it to a space or a tab. -// -// Alternatively, you can use NewDefaultConfig to get a ConfigState instance -// with default settings. See the documentation of NewDefaultConfig for default -// values. -type ConfigState struct { - // Indent specifies the string to use for each indentation level. The - // global config instance that all top-level functions use set this to a - // single space by default. If you would like more indentation, you might - // set this to a tab with "\t" or perhaps two spaces with " ". - Indent string - - // MaxDepth controls the maximum number of levels to descend into nested - // data structures. The default, 0, means there is no limit. - // - // NOTE: Circular data structures are properly detected, so it is not - // necessary to set this value unless you specifically want to limit deeply - // nested data structures. - MaxDepth int - - // DisableMethods specifies whether or not error and Stringer interfaces are - // invoked for types that implement them. - DisableMethods bool - - // DisablePointerMethods specifies whether or not to check for and invoke - // error and Stringer interfaces on types which only accept a pointer - // receiver when the current type is not a pointer. - // - // NOTE: This might be an unsafe action since calling one of these methods - // with a pointer receiver could technically mutate the value, however, - // in practice, types which choose to satisify an error or Stringer - // interface with a pointer receiver should not be mutating their state - // inside these interface methods. As a result, this option relies on - // access to the unsafe package, so it will not have any effect when - // running in environments without access to the unsafe package such as - // Google App Engine or with the "safe" build tag specified. - DisablePointerMethods bool - - // DisablePointerAddresses specifies whether to disable the printing of - // pointer addresses. This is useful when diffing data structures in tests. - DisablePointerAddresses bool - - // DisableCapacities specifies whether to disable the printing of capacities - // for arrays, slices, maps and channels. This is useful when diffing - // data structures in tests. - DisableCapacities bool - - // ContinueOnMethod specifies whether or not recursion should continue once - // a custom error or Stringer interface is invoked. The default, false, - // means it will print the results of invoking the custom error or Stringer - // interface and return immediately instead of continuing to recurse into - // the internals of the data type. - // - // NOTE: This flag does not have any effect if method invocation is disabled - // via the DisableMethods or DisablePointerMethods options. - ContinueOnMethod bool - - // SortKeys specifies map keys should be sorted before being printed. Use - // this to have a more deterministic, diffable output. Note that only - // native types (bool, int, uint, floats, uintptr and string) and types - // that support the error or Stringer interfaces (if methods are - // enabled) are supported, with other types sorted according to the - // reflect.Value.String() output which guarantees display stability. - SortKeys bool - - // SpewKeys specifies that, as a last resort attempt, map keys should - // be spewed to strings and sorted by those strings. This is only - // considered if SortKeys is true. - SpewKeys bool -} - -// Config is the active configuration of the top-level functions. -// The configuration can be changed by modifying the contents of spew.Config. -var Config = ConfigState{Indent: " "} - -// Errorf is a wrapper for fmt.Errorf that treats each argument as if it were -// passed with a Formatter interface returned by c.NewFormatter. It returns -// the formatted string as a value that satisfies error. See NewFormatter -// for formatting details. -// -// This function is shorthand for the following syntax: -// -// fmt.Errorf(format, c.NewFormatter(a), c.NewFormatter(b)) -func (c *ConfigState) Errorf(format string, a ...interface{}) (err error) { - return fmt.Errorf(format, c.convertArgs(a)...) -} - -// Fprint is a wrapper for fmt.Fprint that treats each argument as if it were -// passed with a Formatter interface returned by c.NewFormatter. It returns -// the number of bytes written and any write error encountered. See -// NewFormatter for formatting details. -// -// This function is shorthand for the following syntax: -// -// fmt.Fprint(w, c.NewFormatter(a), c.NewFormatter(b)) -func (c *ConfigState) Fprint(w io.Writer, a ...interface{}) (n int, err error) { - return fmt.Fprint(w, c.convertArgs(a)...) -} - -// Fprintf is a wrapper for fmt.Fprintf that treats each argument as if it were -// passed with a Formatter interface returned by c.NewFormatter. It returns -// the number of bytes written and any write error encountered. See -// NewFormatter for formatting details. -// -// This function is shorthand for the following syntax: -// -// fmt.Fprintf(w, format, c.NewFormatter(a), c.NewFormatter(b)) -func (c *ConfigState) Fprintf(w io.Writer, format string, a ...interface{}) (n int, err error) { - return fmt.Fprintf(w, format, c.convertArgs(a)...) -} - -// Fprintln is a wrapper for fmt.Fprintln that treats each argument as if it -// passed with a Formatter interface returned by c.NewFormatter. See -// NewFormatter for formatting details. -// -// This function is shorthand for the following syntax: -// -// fmt.Fprintln(w, c.NewFormatter(a), c.NewFormatter(b)) -func (c *ConfigState) Fprintln(w io.Writer, a ...interface{}) (n int, err error) { - return fmt.Fprintln(w, c.convertArgs(a)...) -} - -// Print is a wrapper for fmt.Print that treats each argument as if it were -// passed with a Formatter interface returned by c.NewFormatter. It returns -// the number of bytes written and any write error encountered. See -// NewFormatter for formatting details. -// -// This function is shorthand for the following syntax: -// -// fmt.Print(c.NewFormatter(a), c.NewFormatter(b)) -func (c *ConfigState) Print(a ...interface{}) (n int, err error) { - return fmt.Print(c.convertArgs(a)...) -} - -// Printf is a wrapper for fmt.Printf that treats each argument as if it were -// passed with a Formatter interface returned by c.NewFormatter. It returns -// the number of bytes written and any write error encountered. See -// NewFormatter for formatting details. -// -// This function is shorthand for the following syntax: -// -// fmt.Printf(format, c.NewFormatter(a), c.NewFormatter(b)) -func (c *ConfigState) Printf(format string, a ...interface{}) (n int, err error) { - return fmt.Printf(format, c.convertArgs(a)...) -} - -// Println is a wrapper for fmt.Println that treats each argument as if it were -// passed with a Formatter interface returned by c.NewFormatter. It returns -// the number of bytes written and any write error encountered. See -// NewFormatter for formatting details. -// -// This function is shorthand for the following syntax: -// -// fmt.Println(c.NewFormatter(a), c.NewFormatter(b)) -func (c *ConfigState) Println(a ...interface{}) (n int, err error) { - return fmt.Println(c.convertArgs(a)...) -} - -// Sprint is a wrapper for fmt.Sprint that treats each argument as if it were -// passed with a Formatter interface returned by c.NewFormatter. It returns -// the resulting string. See NewFormatter for formatting details. -// -// This function is shorthand for the following syntax: -// -// fmt.Sprint(c.NewFormatter(a), c.NewFormatter(b)) -func (c *ConfigState) Sprint(a ...interface{}) string { - return fmt.Sprint(c.convertArgs(a)...) -} - -// Sprintf is a wrapper for fmt.Sprintf that treats each argument as if it were -// passed with a Formatter interface returned by c.NewFormatter. It returns -// the resulting string. See NewFormatter for formatting details. -// -// This function is shorthand for the following syntax: -// -// fmt.Sprintf(format, c.NewFormatter(a), c.NewFormatter(b)) -func (c *ConfigState) Sprintf(format string, a ...interface{}) string { - return fmt.Sprintf(format, c.convertArgs(a)...) -} - -// Sprintln is a wrapper for fmt.Sprintln that treats each argument as if it -// were passed with a Formatter interface returned by c.NewFormatter. It -// returns the resulting string. See NewFormatter for formatting details. -// -// This function is shorthand for the following syntax: -// -// fmt.Sprintln(c.NewFormatter(a), c.NewFormatter(b)) -func (c *ConfigState) Sprintln(a ...interface{}) string { - return fmt.Sprintln(c.convertArgs(a)...) -} - -/* -NewFormatter returns a custom formatter that satisfies the fmt.Formatter -interface. As a result, it integrates cleanly with standard fmt package -printing functions. The formatter is useful for inline printing of smaller data -types similar to the standard %v format specifier. - -The custom formatter only responds to the %v (most compact), %+v (adds pointer -addresses), %#v (adds types), and %#+v (adds types and pointer addresses) verb -combinations. Any other verbs such as %x and %q will be sent to the the -standard fmt package for formatting. In addition, the custom formatter ignores -the width and precision arguments (however they will still work on the format -specifiers not handled by the custom formatter). - -Typically this function shouldn't be called directly. It is much easier to make -use of the custom formatter by calling one of the convenience functions such as -c.Printf, c.Println, or c.Printf. -*/ -func (c *ConfigState) NewFormatter(v interface{}) fmt.Formatter { - return newFormatter(c, v) -} - -// Fdump formats and displays the passed arguments to io.Writer w. It formats -// exactly the same as Dump. -func (c *ConfigState) Fdump(w io.Writer, a ...interface{}) { - fdump(c, w, a...) -} - -/* -Dump displays the passed parameters to standard out with newlines, customizable -indentation, and additional debug information such as complete types and all -pointer addresses used to indirect to the final value. It provides the -following features over the built-in printing facilities provided by the fmt -package: - - * Pointers are dereferenced and followed - * Circular data structures are detected and handled properly - * Custom Stringer/error interfaces are optionally invoked, including - on unexported types - * Custom types which only implement the Stringer/error interfaces via - a pointer receiver are optionally invoked when passing non-pointer - variables - * Byte arrays and slices are dumped like the hexdump -C command which - includes offsets, byte values in hex, and ASCII output - -The configuration options are controlled by modifying the public members -of c. See ConfigState for options documentation. - -See Fdump if you would prefer dumping to an arbitrary io.Writer or Sdump to -get the formatted result as a string. -*/ -func (c *ConfigState) Dump(a ...interface{}) { - fdump(c, os.Stdout, a...) -} - -// Sdump returns a string with the passed arguments formatted exactly the same -// as Dump. -func (c *ConfigState) Sdump(a ...interface{}) string { - var buf bytes.Buffer - fdump(c, &buf, a...) - return buf.String() -} - -// convertArgs accepts a slice of arguments and returns a slice of the same -// length with each argument converted to a spew Formatter interface using -// the ConfigState associated with s. -func (c *ConfigState) convertArgs(args []interface{}) (formatters []interface{}) { - formatters = make([]interface{}, len(args)) - for index, arg := range args { - formatters[index] = newFormatter(c, arg) - } - return formatters -} - -// NewDefaultConfig returns a ConfigState with the following default settings. -// -// Indent: " " -// MaxDepth: 0 -// DisableMethods: false -// DisablePointerMethods: false -// ContinueOnMethod: false -// SortKeys: false -func NewDefaultConfig() *ConfigState { - return &ConfigState{Indent: " "} -} diff --git a/vendor/github.com/davecgh/go-spew/spew/doc.go b/vendor/github.com/davecgh/go-spew/spew/doc.go deleted file mode 100644 index aacaac6f1..000000000 --- a/vendor/github.com/davecgh/go-spew/spew/doc.go +++ /dev/null @@ -1,211 +0,0 @@ -/* - * Copyright (c) 2013-2016 Dave Collins - * - * Permission to use, copy, modify, and distribute this software for any - * purpose with or without fee is hereby granted, provided that the above - * copyright notice and this permission notice appear in all copies. - * - * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES - * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF - * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR - * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES - * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN - * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF - * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - */ - -/* -Package spew implements a deep pretty printer for Go data structures to aid in -debugging. - -A quick overview of the additional features spew provides over the built-in -printing facilities for Go data types are as follows: - - * Pointers are dereferenced and followed - * Circular data structures are detected and handled properly - * Custom Stringer/error interfaces are optionally invoked, including - on unexported types - * Custom types which only implement the Stringer/error interfaces via - a pointer receiver are optionally invoked when passing non-pointer - variables - * Byte arrays and slices are dumped like the hexdump -C command which - includes offsets, byte values in hex, and ASCII output (only when using - Dump style) - -There are two different approaches spew allows for dumping Go data structures: - - * Dump style which prints with newlines, customizable indentation, - and additional debug information such as types and all pointer addresses - used to indirect to the final value - * A custom Formatter interface that integrates cleanly with the standard fmt - package and replaces %v, %+v, %#v, and %#+v to provide inline printing - similar to the default %v while providing the additional functionality - outlined above and passing unsupported format verbs such as %x and %q - along to fmt - -Quick Start - -This section demonstrates how to quickly get started with spew. See the -sections below for further details on formatting and configuration options. - -To dump a variable with full newlines, indentation, type, and pointer -information use Dump, Fdump, or Sdump: - spew.Dump(myVar1, myVar2, ...) - spew.Fdump(someWriter, myVar1, myVar2, ...) - str := spew.Sdump(myVar1, myVar2, ...) - -Alternatively, if you would prefer to use format strings with a compacted inline -printing style, use the convenience wrappers Printf, Fprintf, etc with -%v (most compact), %+v (adds pointer addresses), %#v (adds types), or -%#+v (adds types and pointer addresses): - spew.Printf("myVar1: %v -- myVar2: %+v", myVar1, myVar2) - spew.Printf("myVar3: %#v -- myVar4: %#+v", myVar3, myVar4) - spew.Fprintf(someWriter, "myVar1: %v -- myVar2: %+v", myVar1, myVar2) - spew.Fprintf(someWriter, "myVar3: %#v -- myVar4: %#+v", myVar3, myVar4) - -Configuration Options - -Configuration of spew is handled by fields in the ConfigState type. For -convenience, all of the top-level functions use a global state available -via the spew.Config global. - -It is also possible to create a ConfigState instance that provides methods -equivalent to the top-level functions. This allows concurrent configuration -options. See the ConfigState documentation for more details. - -The following configuration options are available: - * Indent - String to use for each indentation level for Dump functions. - It is a single space by default. A popular alternative is "\t". - - * MaxDepth - Maximum number of levels to descend into nested data structures. - There is no limit by default. - - * DisableMethods - Disables invocation of error and Stringer interface methods. - Method invocation is enabled by default. - - * DisablePointerMethods - Disables invocation of error and Stringer interface methods on types - which only accept pointer receivers from non-pointer variables. - Pointer method invocation is enabled by default. - - * DisablePointerAddresses - DisablePointerAddresses specifies whether to disable the printing of - pointer addresses. This is useful when diffing data structures in tests. - - * DisableCapacities - DisableCapacities specifies whether to disable the printing of - capacities for arrays, slices, maps and channels. This is useful when - diffing data structures in tests. - - * ContinueOnMethod - Enables recursion into types after invoking error and Stringer interface - methods. Recursion after method invocation is disabled by default. - - * SortKeys - Specifies map keys should be sorted before being printed. Use - this to have a more deterministic, diffable output. Note that - only native types (bool, int, uint, floats, uintptr and string) - and types which implement error or Stringer interfaces are - supported with other types sorted according to the - reflect.Value.String() output which guarantees display - stability. Natural map order is used by default. - - * SpewKeys - Specifies that, as a last resort attempt, map keys should be - spewed to strings and sorted by those strings. This is only - considered if SortKeys is true. - -Dump Usage - -Simply call spew.Dump with a list of variables you want to dump: - - spew.Dump(myVar1, myVar2, ...) - -You may also call spew.Fdump if you would prefer to output to an arbitrary -io.Writer. For example, to dump to standard error: - - spew.Fdump(os.Stderr, myVar1, myVar2, ...) - -A third option is to call spew.Sdump to get the formatted output as a string: - - str := spew.Sdump(myVar1, myVar2, ...) - -Sample Dump Output - -See the Dump example for details on the setup of the types and variables being -shown here. - - (main.Foo) { - unexportedField: (*main.Bar)(0xf84002e210)({ - flag: (main.Flag) flagTwo, - data: (uintptr) - }), - ExportedField: (map[interface {}]interface {}) (len=1) { - (string) (len=3) "one": (bool) true - } - } - -Byte (and uint8) arrays and slices are displayed uniquely like the hexdump -C -command as shown. - ([]uint8) (len=32 cap=32) { - 00000000 11 12 13 14 15 16 17 18 19 1a 1b 1c 1d 1e 1f 20 |............... | - 00000010 21 22 23 24 25 26 27 28 29 2a 2b 2c 2d 2e 2f 30 |!"#$%&'()*+,-./0| - 00000020 31 32 |12| - } - -Custom Formatter - -Spew provides a custom formatter that implements the fmt.Formatter interface -so that it integrates cleanly with standard fmt package printing functions. The -formatter is useful for inline printing of smaller data types similar to the -standard %v format specifier. - -The custom formatter only responds to the %v (most compact), %+v (adds pointer -addresses), %#v (adds types), or %#+v (adds types and pointer addresses) verb -combinations. Any other verbs such as %x and %q will be sent to the the -standard fmt package for formatting. In addition, the custom formatter ignores -the width and precision arguments (however they will still work on the format -specifiers not handled by the custom formatter). - -Custom Formatter Usage - -The simplest way to make use of the spew custom formatter is to call one of the -convenience functions such as spew.Printf, spew.Println, or spew.Printf. The -functions have syntax you are most likely already familiar with: - - spew.Printf("myVar1: %v -- myVar2: %+v", myVar1, myVar2) - spew.Printf("myVar3: %#v -- myVar4: %#+v", myVar3, myVar4) - spew.Println(myVar, myVar2) - spew.Fprintf(os.Stderr, "myVar1: %v -- myVar2: %+v", myVar1, myVar2) - spew.Fprintf(os.Stderr, "myVar3: %#v -- myVar4: %#+v", myVar3, myVar4) - -See the Index for the full list convenience functions. - -Sample Formatter Output - -Double pointer to a uint8: - %v: <**>5 - %+v: <**>(0xf8400420d0->0xf8400420c8)5 - %#v: (**uint8)5 - %#+v: (**uint8)(0xf8400420d0->0xf8400420c8)5 - -Pointer to circular struct with a uint8 field and a pointer to itself: - %v: <*>{1 <*>} - %+v: <*>(0xf84003e260){ui8:1 c:<*>(0xf84003e260)} - %#v: (*main.circular){ui8:(uint8)1 c:(*main.circular)} - %#+v: (*main.circular)(0xf84003e260){ui8:(uint8)1 c:(*main.circular)(0xf84003e260)} - -See the Printf example for details on the setup of variables being shown -here. - -Errors - -Since it is possible for custom Stringer/error interfaces to panic, spew -detects them and handles them internally by printing the panic information -inline with the output. Since spew is intended to provide deep pretty printing -capabilities on structures, it intentionally does not return any errors. -*/ -package spew diff --git a/vendor/github.com/davecgh/go-spew/spew/dump.go b/vendor/github.com/davecgh/go-spew/spew/dump.go deleted file mode 100644 index f78d89fc1..000000000 --- a/vendor/github.com/davecgh/go-spew/spew/dump.go +++ /dev/null @@ -1,509 +0,0 @@ -/* - * Copyright (c) 2013-2016 Dave Collins - * - * Permission to use, copy, modify, and distribute this software for any - * purpose with or without fee is hereby granted, provided that the above - * copyright notice and this permission notice appear in all copies. - * - * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES - * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF - * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR - * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES - * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN - * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF - * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - */ - -package spew - -import ( - "bytes" - "encoding/hex" - "fmt" - "io" - "os" - "reflect" - "regexp" - "strconv" - "strings" -) - -var ( - // uint8Type is a reflect.Type representing a uint8. It is used to - // convert cgo types to uint8 slices for hexdumping. - uint8Type = reflect.TypeOf(uint8(0)) - - // cCharRE is a regular expression that matches a cgo char. - // It is used to detect character arrays to hexdump them. - cCharRE = regexp.MustCompile(`^.*\._Ctype_char$`) - - // cUnsignedCharRE is a regular expression that matches a cgo unsigned - // char. It is used to detect unsigned character arrays to hexdump - // them. - cUnsignedCharRE = regexp.MustCompile(`^.*\._Ctype_unsignedchar$`) - - // cUint8tCharRE is a regular expression that matches a cgo uint8_t. - // It is used to detect uint8_t arrays to hexdump them. - cUint8tCharRE = regexp.MustCompile(`^.*\._Ctype_uint8_t$`) -) - -// dumpState contains information about the state of a dump operation. -type dumpState struct { - w io.Writer - depth int - pointers map[uintptr]int - ignoreNextType bool - ignoreNextIndent bool - cs *ConfigState -} - -// indent performs indentation according to the depth level and cs.Indent -// option. -func (d *dumpState) indent() { - if d.ignoreNextIndent { - d.ignoreNextIndent = false - return - } - d.w.Write(bytes.Repeat([]byte(d.cs.Indent), d.depth)) -} - -// unpackValue returns values inside of non-nil interfaces when possible. -// This is useful for data types like structs, arrays, slices, and maps which -// can contain varying types packed inside an interface. -func (d *dumpState) unpackValue(v reflect.Value) reflect.Value { - if v.Kind() == reflect.Interface && !v.IsNil() { - v = v.Elem() - } - return v -} - -// dumpPtr handles formatting of pointers by indirecting them as necessary. -func (d *dumpState) dumpPtr(v reflect.Value) { - // Remove pointers at or below the current depth from map used to detect - // circular refs. - for k, depth := range d.pointers { - if depth >= d.depth { - delete(d.pointers, k) - } - } - - // Keep list of all dereferenced pointers to show later. - pointerChain := make([]uintptr, 0) - - // Figure out how many levels of indirection there are by dereferencing - // pointers and unpacking interfaces down the chain while detecting circular - // references. - nilFound := false - cycleFound := false - indirects := 0 - ve := v - for ve.Kind() == reflect.Ptr { - if ve.IsNil() { - nilFound = true - break - } - indirects++ - addr := ve.Pointer() - pointerChain = append(pointerChain, addr) - if pd, ok := d.pointers[addr]; ok && pd < d.depth { - cycleFound = true - indirects-- - break - } - d.pointers[addr] = d.depth - - ve = ve.Elem() - if ve.Kind() == reflect.Interface { - if ve.IsNil() { - nilFound = true - break - } - ve = ve.Elem() - } - } - - // Display type information. - d.w.Write(openParenBytes) - d.w.Write(bytes.Repeat(asteriskBytes, indirects)) - d.w.Write([]byte(ve.Type().String())) - d.w.Write(closeParenBytes) - - // Display pointer information. - if !d.cs.DisablePointerAddresses && len(pointerChain) > 0 { - d.w.Write(openParenBytes) - for i, addr := range pointerChain { - if i > 0 { - d.w.Write(pointerChainBytes) - } - printHexPtr(d.w, addr) - } - d.w.Write(closeParenBytes) - } - - // Display dereferenced value. - d.w.Write(openParenBytes) - switch { - case nilFound: - d.w.Write(nilAngleBytes) - - case cycleFound: - d.w.Write(circularBytes) - - default: - d.ignoreNextType = true - d.dump(ve) - } - d.w.Write(closeParenBytes) -} - -// dumpSlice handles formatting of arrays and slices. Byte (uint8 under -// reflection) arrays and slices are dumped in hexdump -C fashion. -func (d *dumpState) dumpSlice(v reflect.Value) { - // Determine whether this type should be hex dumped or not. Also, - // for types which should be hexdumped, try to use the underlying data - // first, then fall back to trying to convert them to a uint8 slice. - var buf []uint8 - doConvert := false - doHexDump := false - numEntries := v.Len() - if numEntries > 0 { - vt := v.Index(0).Type() - vts := vt.String() - switch { - // C types that need to be converted. - case cCharRE.MatchString(vts): - fallthrough - case cUnsignedCharRE.MatchString(vts): - fallthrough - case cUint8tCharRE.MatchString(vts): - doConvert = true - - // Try to use existing uint8 slices and fall back to converting - // and copying if that fails. - case vt.Kind() == reflect.Uint8: - // We need an addressable interface to convert the type - // to a byte slice. However, the reflect package won't - // give us an interface on certain things like - // unexported struct fields in order to enforce - // visibility rules. We use unsafe, when available, to - // bypass these restrictions since this package does not - // mutate the values. - vs := v - if !vs.CanInterface() || !vs.CanAddr() { - vs = unsafeReflectValue(vs) - } - if !UnsafeDisabled { - vs = vs.Slice(0, numEntries) - - // Use the existing uint8 slice if it can be - // type asserted. - iface := vs.Interface() - if slice, ok := iface.([]uint8); ok { - buf = slice - doHexDump = true - break - } - } - - // The underlying data needs to be converted if it can't - // be type asserted to a uint8 slice. - doConvert = true - } - - // Copy and convert the underlying type if needed. - if doConvert && vt.ConvertibleTo(uint8Type) { - // Convert and copy each element into a uint8 byte - // slice. - buf = make([]uint8, numEntries) - for i := 0; i < numEntries; i++ { - vv := v.Index(i) - buf[i] = uint8(vv.Convert(uint8Type).Uint()) - } - doHexDump = true - } - } - - // Hexdump the entire slice as needed. - if doHexDump { - indent := strings.Repeat(d.cs.Indent, d.depth) - str := indent + hex.Dump(buf) - str = strings.Replace(str, "\n", "\n"+indent, -1) - str = strings.TrimRight(str, d.cs.Indent) - d.w.Write([]byte(str)) - return - } - - // Recursively call dump for each item. - for i := 0; i < numEntries; i++ { - d.dump(d.unpackValue(v.Index(i))) - if i < (numEntries - 1) { - d.w.Write(commaNewlineBytes) - } else { - d.w.Write(newlineBytes) - } - } -} - -// dump is the main workhorse for dumping a value. It uses the passed reflect -// value to figure out what kind of object we are dealing with and formats it -// appropriately. It is a recursive function, however circular data structures -// are detected and handled properly. -func (d *dumpState) dump(v reflect.Value) { - // Handle invalid reflect values immediately. - kind := v.Kind() - if kind == reflect.Invalid { - d.w.Write(invalidAngleBytes) - return - } - - // Handle pointers specially. - if kind == reflect.Ptr { - d.indent() - d.dumpPtr(v) - return - } - - // Print type information unless already handled elsewhere. - if !d.ignoreNextType { - d.indent() - d.w.Write(openParenBytes) - d.w.Write([]byte(v.Type().String())) - d.w.Write(closeParenBytes) - d.w.Write(spaceBytes) - } - d.ignoreNextType = false - - // Display length and capacity if the built-in len and cap functions - // work with the value's kind and the len/cap itself is non-zero. - valueLen, valueCap := 0, 0 - switch v.Kind() { - case reflect.Array, reflect.Slice, reflect.Chan: - valueLen, valueCap = v.Len(), v.Cap() - case reflect.Map, reflect.String: - valueLen = v.Len() - } - if valueLen != 0 || !d.cs.DisableCapacities && valueCap != 0 { - d.w.Write(openParenBytes) - if valueLen != 0 { - d.w.Write(lenEqualsBytes) - printInt(d.w, int64(valueLen), 10) - } - if !d.cs.DisableCapacities && valueCap != 0 { - if valueLen != 0 { - d.w.Write(spaceBytes) - } - d.w.Write(capEqualsBytes) - printInt(d.w, int64(valueCap), 10) - } - d.w.Write(closeParenBytes) - d.w.Write(spaceBytes) - } - - // Call Stringer/error interfaces if they exist and the handle methods flag - // is enabled - if !d.cs.DisableMethods { - if (kind != reflect.Invalid) && (kind != reflect.Interface) { - if handled := handleMethods(d.cs, d.w, v); handled { - return - } - } - } - - switch kind { - case reflect.Invalid: - // Do nothing. We should never get here since invalid has already - // been handled above. - - case reflect.Bool: - printBool(d.w, v.Bool()) - - case reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Int: - printInt(d.w, v.Int(), 10) - - case reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uint: - printUint(d.w, v.Uint(), 10) - - case reflect.Float32: - printFloat(d.w, v.Float(), 32) - - case reflect.Float64: - printFloat(d.w, v.Float(), 64) - - case reflect.Complex64: - printComplex(d.w, v.Complex(), 32) - - case reflect.Complex128: - printComplex(d.w, v.Complex(), 64) - - case reflect.Slice: - if v.IsNil() { - d.w.Write(nilAngleBytes) - break - } - fallthrough - - case reflect.Array: - d.w.Write(openBraceNewlineBytes) - d.depth++ - if (d.cs.MaxDepth != 0) && (d.depth > d.cs.MaxDepth) { - d.indent() - d.w.Write(maxNewlineBytes) - } else { - d.dumpSlice(v) - } - d.depth-- - d.indent() - d.w.Write(closeBraceBytes) - - case reflect.String: - d.w.Write([]byte(strconv.Quote(v.String()))) - - case reflect.Interface: - // The only time we should get here is for nil interfaces due to - // unpackValue calls. - if v.IsNil() { - d.w.Write(nilAngleBytes) - } - - case reflect.Ptr: - // Do nothing. We should never get here since pointers have already - // been handled above. - - case reflect.Map: - // nil maps should be indicated as different than empty maps - if v.IsNil() { - d.w.Write(nilAngleBytes) - break - } - - d.w.Write(openBraceNewlineBytes) - d.depth++ - if (d.cs.MaxDepth != 0) && (d.depth > d.cs.MaxDepth) { - d.indent() - d.w.Write(maxNewlineBytes) - } else { - numEntries := v.Len() - keys := v.MapKeys() - if d.cs.SortKeys { - sortValues(keys, d.cs) - } - for i, key := range keys { - d.dump(d.unpackValue(key)) - d.w.Write(colonSpaceBytes) - d.ignoreNextIndent = true - d.dump(d.unpackValue(v.MapIndex(key))) - if i < (numEntries - 1) { - d.w.Write(commaNewlineBytes) - } else { - d.w.Write(newlineBytes) - } - } - } - d.depth-- - d.indent() - d.w.Write(closeBraceBytes) - - case reflect.Struct: - d.w.Write(openBraceNewlineBytes) - d.depth++ - if (d.cs.MaxDepth != 0) && (d.depth > d.cs.MaxDepth) { - d.indent() - d.w.Write(maxNewlineBytes) - } else { - vt := v.Type() - numFields := v.NumField() - for i := 0; i < numFields; i++ { - d.indent() - vtf := vt.Field(i) - d.w.Write([]byte(vtf.Name)) - d.w.Write(colonSpaceBytes) - d.ignoreNextIndent = true - d.dump(d.unpackValue(v.Field(i))) - if i < (numFields - 1) { - d.w.Write(commaNewlineBytes) - } else { - d.w.Write(newlineBytes) - } - } - } - d.depth-- - d.indent() - d.w.Write(closeBraceBytes) - - case reflect.Uintptr: - printHexPtr(d.w, uintptr(v.Uint())) - - case reflect.UnsafePointer, reflect.Chan, reflect.Func: - printHexPtr(d.w, v.Pointer()) - - // There were not any other types at the time this code was written, but - // fall back to letting the default fmt package handle it in case any new - // types are added. - default: - if v.CanInterface() { - fmt.Fprintf(d.w, "%v", v.Interface()) - } else { - fmt.Fprintf(d.w, "%v", v.String()) - } - } -} - -// fdump is a helper function to consolidate the logic from the various public -// methods which take varying writers and config states. -func fdump(cs *ConfigState, w io.Writer, a ...interface{}) { - for _, arg := range a { - if arg == nil { - w.Write(interfaceBytes) - w.Write(spaceBytes) - w.Write(nilAngleBytes) - w.Write(newlineBytes) - continue - } - - d := dumpState{w: w, cs: cs} - d.pointers = make(map[uintptr]int) - d.dump(reflect.ValueOf(arg)) - d.w.Write(newlineBytes) - } -} - -// Fdump formats and displays the passed arguments to io.Writer w. It formats -// exactly the same as Dump. -func Fdump(w io.Writer, a ...interface{}) { - fdump(&Config, w, a...) -} - -// Sdump returns a string with the passed arguments formatted exactly the same -// as Dump. -func Sdump(a ...interface{}) string { - var buf bytes.Buffer - fdump(&Config, &buf, a...) - return buf.String() -} - -/* -Dump displays the passed parameters to standard out with newlines, customizable -indentation, and additional debug information such as complete types and all -pointer addresses used to indirect to the final value. It provides the -following features over the built-in printing facilities provided by the fmt -package: - - * Pointers are dereferenced and followed - * Circular data structures are detected and handled properly - * Custom Stringer/error interfaces are optionally invoked, including - on unexported types - * Custom types which only implement the Stringer/error interfaces via - a pointer receiver are optionally invoked when passing non-pointer - variables - * Byte arrays and slices are dumped like the hexdump -C command which - includes offsets, byte values in hex, and ASCII output - -The configuration options are controlled by an exported package global, -spew.Config. See ConfigState for options documentation. - -See Fdump if you would prefer dumping to an arbitrary io.Writer or Sdump to -get the formatted result as a string. -*/ -func Dump(a ...interface{}) { - fdump(&Config, os.Stdout, a...) -} diff --git a/vendor/github.com/davecgh/go-spew/spew/format.go b/vendor/github.com/davecgh/go-spew/spew/format.go deleted file mode 100644 index b04edb7d7..000000000 --- a/vendor/github.com/davecgh/go-spew/spew/format.go +++ /dev/null @@ -1,419 +0,0 @@ -/* - * Copyright (c) 2013-2016 Dave Collins - * - * Permission to use, copy, modify, and distribute this software for any - * purpose with or without fee is hereby granted, provided that the above - * copyright notice and this permission notice appear in all copies. - * - * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES - * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF - * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR - * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES - * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN - * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF - * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - */ - -package spew - -import ( - "bytes" - "fmt" - "reflect" - "strconv" - "strings" -) - -// supportedFlags is a list of all the character flags supported by fmt package. -const supportedFlags = "0-+# " - -// formatState implements the fmt.Formatter interface and contains information -// about the state of a formatting operation. The NewFormatter function can -// be used to get a new Formatter which can be used directly as arguments -// in standard fmt package printing calls. -type formatState struct { - value interface{} - fs fmt.State - depth int - pointers map[uintptr]int - ignoreNextType bool - cs *ConfigState -} - -// buildDefaultFormat recreates the original format string without precision -// and width information to pass in to fmt.Sprintf in the case of an -// unrecognized type. Unless new types are added to the language, this -// function won't ever be called. -func (f *formatState) buildDefaultFormat() (format string) { - buf := bytes.NewBuffer(percentBytes) - - for _, flag := range supportedFlags { - if f.fs.Flag(int(flag)) { - buf.WriteRune(flag) - } - } - - buf.WriteRune('v') - - format = buf.String() - return format -} - -// constructOrigFormat recreates the original format string including precision -// and width information to pass along to the standard fmt package. This allows -// automatic deferral of all format strings this package doesn't support. -func (f *formatState) constructOrigFormat(verb rune) (format string) { - buf := bytes.NewBuffer(percentBytes) - - for _, flag := range supportedFlags { - if f.fs.Flag(int(flag)) { - buf.WriteRune(flag) - } - } - - if width, ok := f.fs.Width(); ok { - buf.WriteString(strconv.Itoa(width)) - } - - if precision, ok := f.fs.Precision(); ok { - buf.Write(precisionBytes) - buf.WriteString(strconv.Itoa(precision)) - } - - buf.WriteRune(verb) - - format = buf.String() - return format -} - -// unpackValue returns values inside of non-nil interfaces when possible and -// ensures that types for values which have been unpacked from an interface -// are displayed when the show types flag is also set. -// This is useful for data types like structs, arrays, slices, and maps which -// can contain varying types packed inside an interface. -func (f *formatState) unpackValue(v reflect.Value) reflect.Value { - if v.Kind() == reflect.Interface { - f.ignoreNextType = false - if !v.IsNil() { - v = v.Elem() - } - } - return v -} - -// formatPtr handles formatting of pointers by indirecting them as necessary. -func (f *formatState) formatPtr(v reflect.Value) { - // Display nil if top level pointer is nil. - showTypes := f.fs.Flag('#') - if v.IsNil() && (!showTypes || f.ignoreNextType) { - f.fs.Write(nilAngleBytes) - return - } - - // Remove pointers at or below the current depth from map used to detect - // circular refs. - for k, depth := range f.pointers { - if depth >= f.depth { - delete(f.pointers, k) - } - } - - // Keep list of all dereferenced pointers to possibly show later. - pointerChain := make([]uintptr, 0) - - // Figure out how many levels of indirection there are by derferencing - // pointers and unpacking interfaces down the chain while detecting circular - // references. - nilFound := false - cycleFound := false - indirects := 0 - ve := v - for ve.Kind() == reflect.Ptr { - if ve.IsNil() { - nilFound = true - break - } - indirects++ - addr := ve.Pointer() - pointerChain = append(pointerChain, addr) - if pd, ok := f.pointers[addr]; ok && pd < f.depth { - cycleFound = true - indirects-- - break - } - f.pointers[addr] = f.depth - - ve = ve.Elem() - if ve.Kind() == reflect.Interface { - if ve.IsNil() { - nilFound = true - break - } - ve = ve.Elem() - } - } - - // Display type or indirection level depending on flags. - if showTypes && !f.ignoreNextType { - f.fs.Write(openParenBytes) - f.fs.Write(bytes.Repeat(asteriskBytes, indirects)) - f.fs.Write([]byte(ve.Type().String())) - f.fs.Write(closeParenBytes) - } else { - if nilFound || cycleFound { - indirects += strings.Count(ve.Type().String(), "*") - } - f.fs.Write(openAngleBytes) - f.fs.Write([]byte(strings.Repeat("*", indirects))) - f.fs.Write(closeAngleBytes) - } - - // Display pointer information depending on flags. - if f.fs.Flag('+') && (len(pointerChain) > 0) { - f.fs.Write(openParenBytes) - for i, addr := range pointerChain { - if i > 0 { - f.fs.Write(pointerChainBytes) - } - printHexPtr(f.fs, addr) - } - f.fs.Write(closeParenBytes) - } - - // Display dereferenced value. - switch { - case nilFound: - f.fs.Write(nilAngleBytes) - - case cycleFound: - f.fs.Write(circularShortBytes) - - default: - f.ignoreNextType = true - f.format(ve) - } -} - -// format is the main workhorse for providing the Formatter interface. It -// uses the passed reflect value to figure out what kind of object we are -// dealing with and formats it appropriately. It is a recursive function, -// however circular data structures are detected and handled properly. -func (f *formatState) format(v reflect.Value) { - // Handle invalid reflect values immediately. - kind := v.Kind() - if kind == reflect.Invalid { - f.fs.Write(invalidAngleBytes) - return - } - - // Handle pointers specially. - if kind == reflect.Ptr { - f.formatPtr(v) - return - } - - // Print type information unless already handled elsewhere. - if !f.ignoreNextType && f.fs.Flag('#') { - f.fs.Write(openParenBytes) - f.fs.Write([]byte(v.Type().String())) - f.fs.Write(closeParenBytes) - } - f.ignoreNextType = false - - // Call Stringer/error interfaces if they exist and the handle methods - // flag is enabled. - if !f.cs.DisableMethods { - if (kind != reflect.Invalid) && (kind != reflect.Interface) { - if handled := handleMethods(f.cs, f.fs, v); handled { - return - } - } - } - - switch kind { - case reflect.Invalid: - // Do nothing. We should never get here since invalid has already - // been handled above. - - case reflect.Bool: - printBool(f.fs, v.Bool()) - - case reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Int: - printInt(f.fs, v.Int(), 10) - - case reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uint: - printUint(f.fs, v.Uint(), 10) - - case reflect.Float32: - printFloat(f.fs, v.Float(), 32) - - case reflect.Float64: - printFloat(f.fs, v.Float(), 64) - - case reflect.Complex64: - printComplex(f.fs, v.Complex(), 32) - - case reflect.Complex128: - printComplex(f.fs, v.Complex(), 64) - - case reflect.Slice: - if v.IsNil() { - f.fs.Write(nilAngleBytes) - break - } - fallthrough - - case reflect.Array: - f.fs.Write(openBracketBytes) - f.depth++ - if (f.cs.MaxDepth != 0) && (f.depth > f.cs.MaxDepth) { - f.fs.Write(maxShortBytes) - } else { - numEntries := v.Len() - for i := 0; i < numEntries; i++ { - if i > 0 { - f.fs.Write(spaceBytes) - } - f.ignoreNextType = true - f.format(f.unpackValue(v.Index(i))) - } - } - f.depth-- - f.fs.Write(closeBracketBytes) - - case reflect.String: - f.fs.Write([]byte(v.String())) - - case reflect.Interface: - // The only time we should get here is for nil interfaces due to - // unpackValue calls. - if v.IsNil() { - f.fs.Write(nilAngleBytes) - } - - case reflect.Ptr: - // Do nothing. We should never get here since pointers have already - // been handled above. - - case reflect.Map: - // nil maps should be indicated as different than empty maps - if v.IsNil() { - f.fs.Write(nilAngleBytes) - break - } - - f.fs.Write(openMapBytes) - f.depth++ - if (f.cs.MaxDepth != 0) && (f.depth > f.cs.MaxDepth) { - f.fs.Write(maxShortBytes) - } else { - keys := v.MapKeys() - if f.cs.SortKeys { - sortValues(keys, f.cs) - } - for i, key := range keys { - if i > 0 { - f.fs.Write(spaceBytes) - } - f.ignoreNextType = true - f.format(f.unpackValue(key)) - f.fs.Write(colonBytes) - f.ignoreNextType = true - f.format(f.unpackValue(v.MapIndex(key))) - } - } - f.depth-- - f.fs.Write(closeMapBytes) - - case reflect.Struct: - numFields := v.NumField() - f.fs.Write(openBraceBytes) - f.depth++ - if (f.cs.MaxDepth != 0) && (f.depth > f.cs.MaxDepth) { - f.fs.Write(maxShortBytes) - } else { - vt := v.Type() - for i := 0; i < numFields; i++ { - if i > 0 { - f.fs.Write(spaceBytes) - } - vtf := vt.Field(i) - if f.fs.Flag('+') || f.fs.Flag('#') { - f.fs.Write([]byte(vtf.Name)) - f.fs.Write(colonBytes) - } - f.format(f.unpackValue(v.Field(i))) - } - } - f.depth-- - f.fs.Write(closeBraceBytes) - - case reflect.Uintptr: - printHexPtr(f.fs, uintptr(v.Uint())) - - case reflect.UnsafePointer, reflect.Chan, reflect.Func: - printHexPtr(f.fs, v.Pointer()) - - // There were not any other types at the time this code was written, but - // fall back to letting the default fmt package handle it if any get added. - default: - format := f.buildDefaultFormat() - if v.CanInterface() { - fmt.Fprintf(f.fs, format, v.Interface()) - } else { - fmt.Fprintf(f.fs, format, v.String()) - } - } -} - -// Format satisfies the fmt.Formatter interface. See NewFormatter for usage -// details. -func (f *formatState) Format(fs fmt.State, verb rune) { - f.fs = fs - - // Use standard formatting for verbs that are not v. - if verb != 'v' { - format := f.constructOrigFormat(verb) - fmt.Fprintf(fs, format, f.value) - return - } - - if f.value == nil { - if fs.Flag('#') { - fs.Write(interfaceBytes) - } - fs.Write(nilAngleBytes) - return - } - - f.format(reflect.ValueOf(f.value)) -} - -// newFormatter is a helper function to consolidate the logic from the various -// public methods which take varying config states. -func newFormatter(cs *ConfigState, v interface{}) fmt.Formatter { - fs := &formatState{value: v, cs: cs} - fs.pointers = make(map[uintptr]int) - return fs -} - -/* -NewFormatter returns a custom formatter that satisfies the fmt.Formatter -interface. As a result, it integrates cleanly with standard fmt package -printing functions. The formatter is useful for inline printing of smaller data -types similar to the standard %v format specifier. - -The custom formatter only responds to the %v (most compact), %+v (adds pointer -addresses), %#v (adds types), or %#+v (adds types and pointer addresses) verb -combinations. Any other verbs such as %x and %q will be sent to the the -standard fmt package for formatting. In addition, the custom formatter ignores -the width and precision arguments (however they will still work on the format -specifiers not handled by the custom formatter). - -Typically this function shouldn't be called directly. It is much easier to make -use of the custom formatter by calling one of the convenience functions such as -Printf, Println, or Fprintf. -*/ -func NewFormatter(v interface{}) fmt.Formatter { - return newFormatter(&Config, v) -} diff --git a/vendor/github.com/davecgh/go-spew/spew/spew.go b/vendor/github.com/davecgh/go-spew/spew/spew.go deleted file mode 100644 index 32c0e3388..000000000 --- a/vendor/github.com/davecgh/go-spew/spew/spew.go +++ /dev/null @@ -1,148 +0,0 @@ -/* - * Copyright (c) 2013-2016 Dave Collins - * - * Permission to use, copy, modify, and distribute this software for any - * purpose with or without fee is hereby granted, provided that the above - * copyright notice and this permission notice appear in all copies. - * - * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES - * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF - * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR - * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES - * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN - * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF - * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - */ - -package spew - -import ( - "fmt" - "io" -) - -// Errorf is a wrapper for fmt.Errorf that treats each argument as if it were -// passed with a default Formatter interface returned by NewFormatter. It -// returns the formatted string as a value that satisfies error. See -// NewFormatter for formatting details. -// -// This function is shorthand for the following syntax: -// -// fmt.Errorf(format, spew.NewFormatter(a), spew.NewFormatter(b)) -func Errorf(format string, a ...interface{}) (err error) { - return fmt.Errorf(format, convertArgs(a)...) -} - -// Fprint is a wrapper for fmt.Fprint that treats each argument as if it were -// passed with a default Formatter interface returned by NewFormatter. It -// returns the number of bytes written and any write error encountered. See -// NewFormatter for formatting details. -// -// This function is shorthand for the following syntax: -// -// fmt.Fprint(w, spew.NewFormatter(a), spew.NewFormatter(b)) -func Fprint(w io.Writer, a ...interface{}) (n int, err error) { - return fmt.Fprint(w, convertArgs(a)...) -} - -// Fprintf is a wrapper for fmt.Fprintf that treats each argument as if it were -// passed with a default Formatter interface returned by NewFormatter. It -// returns the number of bytes written and any write error encountered. See -// NewFormatter for formatting details. -// -// This function is shorthand for the following syntax: -// -// fmt.Fprintf(w, format, spew.NewFormatter(a), spew.NewFormatter(b)) -func Fprintf(w io.Writer, format string, a ...interface{}) (n int, err error) { - return fmt.Fprintf(w, format, convertArgs(a)...) -} - -// Fprintln is a wrapper for fmt.Fprintln that treats each argument as if it -// passed with a default Formatter interface returned by NewFormatter. See -// NewFormatter for formatting details. -// -// This function is shorthand for the following syntax: -// -// fmt.Fprintln(w, spew.NewFormatter(a), spew.NewFormatter(b)) -func Fprintln(w io.Writer, a ...interface{}) (n int, err error) { - return fmt.Fprintln(w, convertArgs(a)...) -} - -// Print is a wrapper for fmt.Print that treats each argument as if it were -// passed with a default Formatter interface returned by NewFormatter. It -// returns the number of bytes written and any write error encountered. See -// NewFormatter for formatting details. -// -// This function is shorthand for the following syntax: -// -// fmt.Print(spew.NewFormatter(a), spew.NewFormatter(b)) -func Print(a ...interface{}) (n int, err error) { - return fmt.Print(convertArgs(a)...) -} - -// Printf is a wrapper for fmt.Printf that treats each argument as if it were -// passed with a default Formatter interface returned by NewFormatter. It -// returns the number of bytes written and any write error encountered. See -// NewFormatter for formatting details. -// -// This function is shorthand for the following syntax: -// -// fmt.Printf(format, spew.NewFormatter(a), spew.NewFormatter(b)) -func Printf(format string, a ...interface{}) (n int, err error) { - return fmt.Printf(format, convertArgs(a)...) -} - -// Println is a wrapper for fmt.Println that treats each argument as if it were -// passed with a default Formatter interface returned by NewFormatter. It -// returns the number of bytes written and any write error encountered. See -// NewFormatter for formatting details. -// -// This function is shorthand for the following syntax: -// -// fmt.Println(spew.NewFormatter(a), spew.NewFormatter(b)) -func Println(a ...interface{}) (n int, err error) { - return fmt.Println(convertArgs(a)...) -} - -// Sprint is a wrapper for fmt.Sprint that treats each argument as if it were -// passed with a default Formatter interface returned by NewFormatter. It -// returns the resulting string. See NewFormatter for formatting details. -// -// This function is shorthand for the following syntax: -// -// fmt.Sprint(spew.NewFormatter(a), spew.NewFormatter(b)) -func Sprint(a ...interface{}) string { - return fmt.Sprint(convertArgs(a)...) -} - -// Sprintf is a wrapper for fmt.Sprintf that treats each argument as if it were -// passed with a default Formatter interface returned by NewFormatter. It -// returns the resulting string. See NewFormatter for formatting details. -// -// This function is shorthand for the following syntax: -// -// fmt.Sprintf(format, spew.NewFormatter(a), spew.NewFormatter(b)) -func Sprintf(format string, a ...interface{}) string { - return fmt.Sprintf(format, convertArgs(a)...) -} - -// Sprintln is a wrapper for fmt.Sprintln that treats each argument as if it -// were passed with a default Formatter interface returned by NewFormatter. It -// returns the resulting string. See NewFormatter for formatting details. -// -// This function is shorthand for the following syntax: -// -// fmt.Sprintln(spew.NewFormatter(a), spew.NewFormatter(b)) -func Sprintln(a ...interface{}) string { - return fmt.Sprintln(convertArgs(a)...) -} - -// convertArgs accepts a slice of arguments and returns a slice of the same -// length with each argument converted to a default spew Formatter interface. -func convertArgs(args []interface{}) (formatters []interface{}) { - formatters = make([]interface{}, len(args)) - for index, arg := range args { - formatters[index] = NewFormatter(arg) - } - return formatters -} diff --git a/vendor/github.com/denis-tingaikin/go-header/.gitignore b/vendor/github.com/denis-tingaikin/go-header/.gitignore deleted file mode 100644 index 62c893550..000000000 --- a/vendor/github.com/denis-tingaikin/go-header/.gitignore +++ /dev/null @@ -1 +0,0 @@ -.idea/ \ No newline at end of file diff --git a/vendor/github.com/denis-tingaikin/go-header/.go-header.yml b/vendor/github.com/denis-tingaikin/go-header/.go-header.yml deleted file mode 100644 index 3aa6d060d..000000000 --- a/vendor/github.com/denis-tingaikin/go-header/.go-header.yml +++ /dev/null @@ -1,19 +0,0 @@ -values: - regexp: - copyright-holder: Copyright \(c\) {{mod-year-range}} Denis Tingaikin -template: | - {{copyright-holder}} - - SPDX-License-Identifier: Apache-2.0 - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at: - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. \ No newline at end of file diff --git a/vendor/github.com/denis-tingaikin/go-header/LICENSE b/vendor/github.com/denis-tingaikin/go-header/LICENSE deleted file mode 100644 index a2c9fda21..000000000 --- a/vendor/github.com/denis-tingaikin/go-header/LICENSE +++ /dev/null @@ -1,674 +0,0 @@ - GNU GENERAL PUBLIC LICENSE - Version 3, 29 June 2007 - - Copyright (C) 2007 Free Software Foundation, Inc. - Everyone is permitted to copy and distribute verbatim copies - of this license document, but changing it is not allowed. - - Preamble - - The GNU General Public License is a free, copyleft license for -software and other kinds of works. - - The licenses for most software and other practical works are designed -to take away your freedom to share and change the works. By contrast, -the GNU General Public License is intended to guarantee your freedom to -share and change all versions of a program--to make sure it remains free -software for all its users. We, the Free Software Foundation, use the -GNU General Public License for most of our software; it applies also to -any other work released this way by its authors. You can apply it to -your programs, too. - - When we speak of free software, we are referring to freedom, not -price. Our General Public Licenses are designed to make sure that you -have the freedom to distribute copies of free software (and charge for -them if you wish), that you receive source code or can get it if you -want it, that you can change the software or use pieces of it in new -free programs, and that you know you can do these things. - - To protect your rights, we need to prevent others from denying you -these rights or asking you to surrender the rights. Therefore, you have -certain responsibilities if you distribute copies of the software, or if -you modify it: responsibilities to respect the freedom of others. - - For example, if you distribute copies of such a program, whether -gratis or for a fee, you must pass on to the recipients the same -freedoms that you received. You must make sure that they, too, receive -or can get the source code. And you must show them these terms so they -know their rights. - - Developers that use the GNU GPL protect your rights with two steps: -(1) assert copyright on the software, and (2) offer you this License -giving you legal permission to copy, distribute and/or modify it. - - For the developers' and authors' protection, the GPL clearly explains -that there is no warranty for this free software. For both users' and -authors' sake, the GPL requires that modified versions be marked as -changed, so that their problems will not be attributed erroneously to -authors of previous versions. - - Some devices are designed to deny users access to install or run -modified versions of the software inside them, although the manufacturer -can do so. This is fundamentally incompatible with the aim of -protecting users' freedom to change the software. The systematic -pattern of such abuse occurs in the area of products for individuals to -use, which is precisely where it is most unacceptable. Therefore, we -have designed this version of the GPL to prohibit the practice for those -products. If such problems arise substantially in other domains, we -stand ready to extend this provision to those domains in future versions -of the GPL, as needed to protect the freedom of users. - - Finally, every program is threatened constantly by software patents. -States should not allow patents to restrict development and use of -software on general-purpose computers, but in those that do, we wish to -avoid the special danger that patents applied to a free program could -make it effectively proprietary. To prevent this, the GPL assures that -patents cannot be used to render the program non-free. - - The precise terms and conditions for copying, distribution and -modification follow. - - TERMS AND CONDITIONS - - 0. Definitions. - - "This License" refers to version 3 of the GNU General Public License. - - "Copyright" also means copyright-like laws that apply to other kinds of -works, such as semiconductor masks. - - "The Program" refers to any copyrightable work licensed under this -License. Each licensee is addressed as "you". "Licensees" and -"recipients" may be individuals or organizations. - - To "modify" a work means to copy from or adapt all or part of the work -in a fashion requiring copyright permission, other than the making of an -exact copy. The resulting work is called a "modified version" of the -earlier work or a work "based on" the earlier work. - - A "covered work" means either the unmodified Program or a work based -on the Program. - - To "propagate" a work means to do anything with it that, without -permission, would make you directly or secondarily liable for -infringement under applicable copyright law, except executing it on a -computer or modifying a private copy. Propagation includes copying, -distribution (with or without modification), making available to the -public, and in some countries other activities as well. - - To "convey" a work means any kind of propagation that enables other -parties to make or receive copies. Mere interaction with a user through -a computer network, with no transfer of a copy, is not conveying. - - An interactive user interface displays "Appropriate Legal Notices" -to the extent that it includes a convenient and prominently visible -feature that (1) displays an appropriate copyright notice, and (2) -tells the user that there is no warranty for the work (except to the -extent that warranties are provided), that licensees may convey the -work under this License, and how to view a copy of this License. If -the interface presents a list of user commands or options, such as a -menu, a prominent item in the list meets this criterion. - - 1. Source Code. - - The "source code" for a work means the preferred form of the work -for making modifications to it. "Object code" means any non-source -form of a work. - - A "Standard Interface" means an interface that either is an official -standard defined by a recognized standards body, or, in the case of -interfaces specified for a particular programming language, one that -is widely used among developers working in that language. - - The "System Libraries" of an executable work include anything, other -than the work as a whole, that (a) is included in the normal form of -packaging a Major Component, but which is not part of that Major -Component, and (b) serves only to enable use of the work with that -Major Component, or to implement a Standard Interface for which an -implementation is available to the public in source code form. A -"Major Component", in this context, means a major essential component -(kernel, window system, and so on) of the specific operating system -(if any) on which the executable work runs, or a compiler used to -produce the work, or an object code interpreter used to run it. - - The "Corresponding Source" for a work in object code form means all -the source code needed to generate, install, and (for an executable -work) run the object code and to modify the work, including scripts to -control those activities. However, it does not include the work's -System Libraries, or general-purpose tools or generally available free -programs which are used unmodified in performing those activities but -which are not part of the work. For example, Corresponding Source -includes interface definition files associated with source files for -the work, and the source code for shared libraries and dynamically -linked subprograms that the work is specifically designed to require, -such as by intimate data communication or control flow between those -subprograms and other parts of the work. - - The Corresponding Source need not include anything that users -can regenerate automatically from other parts of the Corresponding -Source. - - The Corresponding Source for a work in source code form is that -same work. - - 2. Basic Permissions. - - All rights granted under this License are granted for the term of -copyright on the Program, and are irrevocable provided the stated -conditions are met. This License explicitly affirms your unlimited -permission to run the unmodified Program. The output from running a -covered work is covered by this License only if the output, given its -content, constitutes a covered work. This License acknowledges your -rights of fair use or other equivalent, as provided by copyright law. - - You may make, run and propagate covered works that you do not -convey, without conditions so long as your license otherwise remains -in force. You may convey covered works to others for the sole purpose -of having them make modifications exclusively for you, or provide you -with facilities for running those works, provided that you comply with -the terms of this License in conveying all material for which you do -not control copyright. Those thus making or running the covered works -for you must do so exclusively on your behalf, under your direction -and control, on terms that prohibit them from making any copies of -your copyrighted material outside their relationship with you. - - Conveying under any other circumstances is permitted solely under -the conditions stated below. Sublicensing is not allowed; section 10 -makes it unnecessary. - - 3. Protecting Users' Legal Rights From Anti-Circumvention Law. - - No covered work shall be deemed part of an effective technological -measure under any applicable law fulfilling obligations under article -11 of the WIPO copyright treaty adopted on 20 December 1996, or -similar laws prohibiting or restricting circumvention of such -measures. - - When you convey a covered work, you waive any legal power to forbid -circumvention of technological measures to the extent such circumvention -is effected by exercising rights under this License with respect to -the covered work, and you disclaim any intention to limit operation or -modification of the work as a means of enforcing, against the work's -users, your or third parties' legal rights to forbid circumvention of -technological measures. - - 4. Conveying Verbatim Copies. - - You may convey verbatim copies of the Program's source code as you -receive it, in any medium, provided that you conspicuously and -appropriately publish on each copy an appropriate copyright notice; -keep intact all notices stating that this License and any -non-permissive terms added in accord with section 7 apply to the code; -keep intact all notices of the absence of any warranty; and give all -recipients a copy of this License along with the Program. - - You may charge any price or no price for each copy that you convey, -and you may offer support or warranty protection for a fee. - - 5. Conveying Modified Source Versions. - - You may convey a work based on the Program, or the modifications to -produce it from the Program, in the form of source code under the -terms of section 4, provided that you also meet all of these conditions: - - a) The work must carry prominent notices stating that you modified - it, and giving a relevant date. - - b) The work must carry prominent notices stating that it is - released under this License and any conditions added under section - 7. This requirement modifies the requirement in section 4 to - "keep intact all notices". - - c) You must license the entire work, as a whole, under this - License to anyone who comes into possession of a copy. This - License will therefore apply, along with any applicable section 7 - additional terms, to the whole of the work, and all its parts, - regardless of how they are packaged. This License gives no - permission to license the work in any other way, but it does not - invalidate such permission if you have separately received it. - - d) If the work has interactive user interfaces, each must display - Appropriate Legal Notices; however, if the Program has interactive - interfaces that do not display Appropriate Legal Notices, your - work need not make them do so. - - A compilation of a covered work with other separate and independent -works, which are not by their nature extensions of the covered work, -and which are not combined with it such as to form a larger program, -in or on a volume of a storage or distribution medium, is called an -"aggregate" if the compilation and its resulting copyright are not -used to limit the access or legal rights of the compilation's users -beyond what the individual works permit. Inclusion of a covered work -in an aggregate does not cause this License to apply to the other -parts of the aggregate. - - 6. Conveying Non-Source Forms. - - You may convey a covered work in object code form under the terms -of sections 4 and 5, provided that you also convey the -machine-readable Corresponding Source under the terms of this License, -in one of these ways: - - a) Convey the object code in, or embodied in, a physical product - (including a physical distribution medium), accompanied by the - Corresponding Source fixed on a durable physical medium - customarily used for software interchange. - - b) Convey the object code in, or embodied in, a physical product - (including a physical distribution medium), accompanied by a - written offer, valid for at least three years and valid for as - long as you offer spare parts or customer support for that product - model, to give anyone who possesses the object code either (1) a - copy of the Corresponding Source for all the software in the - product that is covered by this License, on a durable physical - medium customarily used for software interchange, for a price no - more than your reasonable cost of physically performing this - conveying of source, or (2) access to copy the - Corresponding Source from a network server at no charge. - - c) Convey individual copies of the object code with a copy of the - written offer to provide the Corresponding Source. This - alternative is allowed only occasionally and noncommercially, and - only if you received the object code with such an offer, in accord - with subsection 6b. - - d) Convey the object code by offering access from a designated - place (gratis or for a charge), and offer equivalent access to the - Corresponding Source in the same way through the same place at no - further charge. You need not require recipients to copy the - Corresponding Source along with the object code. If the place to - copy the object code is a network server, the Corresponding Source - may be on a different server (operated by you or a third party) - that supports equivalent copying facilities, provided you maintain - clear directions next to the object code saying where to find the - Corresponding Source. Regardless of what server hosts the - Corresponding Source, you remain obligated to ensure that it is - available for as long as needed to satisfy these requirements. - - e) Convey the object code using peer-to-peer transmission, provided - you inform other peers where the object code and Corresponding - Source of the work are being offered to the general public at no - charge under subsection 6d. - - A separable portion of the object code, whose source code is excluded -from the Corresponding Source as a System Library, need not be -included in conveying the object code work. - - A "User Product" is either (1) a "consumer product", which means any -tangible personal property which is normally used for personal, family, -or household purposes, or (2) anything designed or sold for incorporation -into a dwelling. In determining whether a product is a consumer product, -doubtful cases shall be resolved in favor of coverage. For a particular -product received by a particular user, "normally used" refers to a -typical or common use of that class of product, regardless of the status -of the particular user or of the way in which the particular user -actually uses, or expects or is expected to use, the product. A product -is a consumer product regardless of whether the product has substantial -commercial, industrial or non-consumer uses, unless such uses represent -the only significant mode of use of the product. - - "Installation Information" for a User Product means any methods, -procedures, authorization keys, or other information required to install -and execute modified versions of a covered work in that User Product from -a modified version of its Corresponding Source. The information must -suffice to ensure that the continued functioning of the modified object -code is in no case prevented or interfered with solely because -modification has been made. - - If you convey an object code work under this section in, or with, or -specifically for use in, a User Product, and the conveying occurs as -part of a transaction in which the right of possession and use of the -User Product is transferred to the recipient in perpetuity or for a -fixed term (regardless of how the transaction is characterized), the -Corresponding Source conveyed under this section must be accompanied -by the Installation Information. But this requirement does not apply -if neither you nor any third party retains the ability to install -modified object code on the User Product (for example, the work has -been installed in ROM). - - The requirement to provide Installation Information does not include a -requirement to continue to provide support service, warranty, or updates -for a work that has been modified or installed by the recipient, or for -the User Product in which it has been modified or installed. Access to a -network may be denied when the modification itself materially and -adversely affects the operation of the network or violates the rules and -protocols for communication across the network. - - Corresponding Source conveyed, and Installation Information provided, -in accord with this section must be in a format that is publicly -documented (and with an implementation available to the public in -source code form), and must require no special password or key for -unpacking, reading or copying. - - 7. Additional Terms. - - "Additional permissions" are terms that supplement the terms of this -License by making exceptions from one or more of its conditions. -Additional permissions that are applicable to the entire Program shall -be treated as though they were included in this License, to the extent -that they are valid under applicable law. If additional permissions -apply only to part of the Program, that part may be used separately -under those permissions, but the entire Program remains governed by -this License without regard to the additional permissions. - - When you convey a copy of a covered work, you may at your option -remove any additional permissions from that copy, or from any part of -it. (Additional permissions may be written to require their own -removal in certain cases when you modify the work.) You may place -additional permissions on material, added by you to a covered work, -for which you have or can give appropriate copyright permission. - - Notwithstanding any other provision of this License, for material you -add to a covered work, you may (if authorized by the copyright holders of -that material) supplement the terms of this License with terms: - - a) Disclaiming warranty or limiting liability differently from the - terms of sections 15 and 16 of this License; or - - b) Requiring preservation of specified reasonable legal notices or - author attributions in that material or in the Appropriate Legal - Notices displayed by works containing it; or - - c) Prohibiting misrepresentation of the origin of that material, or - requiring that modified versions of such material be marked in - reasonable ways as different from the original version; or - - d) Limiting the use for publicity purposes of names of licensors or - authors of the material; or - - e) Declining to grant rights under trademark law for use of some - trade names, trademarks, or service marks; or - - f) Requiring indemnification of licensors and authors of that - material by anyone who conveys the material (or modified versions of - it) with contractual assumptions of liability to the recipient, for - any liability that these contractual assumptions directly impose on - those licensors and authors. - - All other non-permissive additional terms are considered "further -restrictions" within the meaning of section 10. If the Program as you -received it, or any part of it, contains a notice stating that it is -governed by this License along with a term that is a further -restriction, you may remove that term. If a license document contains -a further restriction but permits relicensing or conveying under this -License, you may add to a covered work material governed by the terms -of that license document, provided that the further restriction does -not survive such relicensing or conveying. - - If you add terms to a covered work in accord with this section, you -must place, in the relevant source files, a statement of the -additional terms that apply to those files, or a notice indicating -where to find the applicable terms. - - Additional terms, permissive or non-permissive, may be stated in the -form of a separately written license, or stated as exceptions; -the above requirements apply either way. - - 8. Termination. - - You may not propagate or modify a covered work except as expressly -provided under this License. Any attempt otherwise to propagate or -modify it is void, and will automatically terminate your rights under -this License (including any patent licenses granted under the third -paragraph of section 11). - - However, if you cease all violation of this License, then your -license from a particular copyright holder is reinstated (a) -provisionally, unless and until the copyright holder explicitly and -finally terminates your license, and (b) permanently, if the copyright -holder fails to notify you of the violation by some reasonable means -prior to 60 days after the cessation. - - Moreover, your license from a particular copyright holder is -reinstated permanently if the copyright holder notifies you of the -violation by some reasonable means, this is the first time you have -received notice of violation of this License (for any work) from that -copyright holder, and you cure the violation prior to 30 days after -your receipt of the notice. - - Termination of your rights under this section does not terminate the -licenses of parties who have received copies or rights from you under -this License. If your rights have been terminated and not permanently -reinstated, you do not qualify to receive new licenses for the same -material under section 10. - - 9. Acceptance Not Required for Having Copies. - - You are not required to accept this License in order to receive or -run a copy of the Program. Ancillary propagation of a covered work -occurring solely as a consequence of using peer-to-peer transmission -to receive a copy likewise does not require acceptance. However, -nothing other than this License grants you permission to propagate or -modify any covered work. These actions infringe copyright if you do -not accept this License. Therefore, by modifying or propagating a -covered work, you indicate your acceptance of this License to do so. - - 10. Automatic Licensing of Downstream Recipients. - - Each time you convey a covered work, the recipient automatically -receives a license from the original licensors, to run, modify and -propagate that work, subject to this License. You are not responsible -for enforcing compliance by third parties with this License. - - An "entity transaction" is a transaction transferring control of an -organization, or substantially all assets of one, or subdividing an -organization, or merging organizations. If propagation of a covered -work results from an entity transaction, each party to that -transaction who receives a copy of the work also receives whatever -licenses to the work the party's predecessor in interest had or could -give under the previous paragraph, plus a right to possession of the -Corresponding Source of the work from the predecessor in interest, if -the predecessor has it or can get it with reasonable efforts. - - You may not impose any further restrictions on the exercise of the -rights granted or affirmed under this License. For example, you may -not impose a license fee, royalty, or other charge for exercise of -rights granted under this License, and you may not initiate litigation -(including a cross-claim or counterclaim in a lawsuit) alleging that -any patent claim is infringed by making, using, selling, offering for -sale, or importing the Program or any portion of it. - - 11. Patents. - - A "contributor" is a copyright holder who authorizes use under this -License of the Program or a work on which the Program is based. The -work thus licensed is called the contributor's "contributor version". - - A contributor's "essential patent claims" are all patent claims -owned or controlled by the contributor, whether already acquired or -hereafter acquired, that would be infringed by some manner, permitted -by this License, of making, using, or selling its contributor version, -but do not include claims that would be infringed only as a -consequence of further modification of the contributor version. For -purposes of this definition, "control" includes the right to grant -patent sublicenses in a manner consistent with the requirements of -this License. - - Each contributor grants you a non-exclusive, worldwide, royalty-free -patent license under the contributor's essential patent claims, to -make, use, sell, offer for sale, import and otherwise run, modify and -propagate the contents of its contributor version. - - In the following three paragraphs, a "patent license" is any express -agreement or commitment, however denominated, not to enforce a patent -(such as an express permission to practice a patent or covenant not to -sue for patent infringement). To "grant" such a patent license to a -party means to make such an agreement or commitment not to enforce a -patent against the party. - - If you convey a covered work, knowingly relying on a patent license, -and the Corresponding Source of the work is not available for anyone -to copy, free of charge and under the terms of this License, through a -publicly available network server or other readily accessible means, -then you must either (1) cause the Corresponding Source to be so -available, or (2) arrange to deprive yourself of the benefit of the -patent license for this particular work, or (3) arrange, in a manner -consistent with the requirements of this License, to extend the patent -license to downstream recipients. "Knowingly relying" means you have -actual knowledge that, but for the patent license, your conveying the -covered work in a country, or your recipient's use of the covered work -in a country, would infringe one or more identifiable patents in that -country that you have reason to believe are valid. - - If, pursuant to or in connection with a single transaction or -arrangement, you convey, or propagate by procuring conveyance of, a -covered work, and grant a patent license to some of the parties -receiving the covered work authorizing them to use, propagate, modify -or convey a specific copy of the covered work, then the patent license -you grant is automatically extended to all recipients of the covered -work and works based on it. - - A patent license is "discriminatory" if it does not include within -the scope of its coverage, prohibits the exercise of, or is -conditioned on the non-exercise of one or more of the rights that are -specifically granted under this License. You may not convey a covered -work if you are a party to an arrangement with a third party that is -in the business of distributing software, under which you make payment -to the third party based on the extent of your activity of conveying -the work, and under which the third party grants, to any of the -parties who would receive the covered work from you, a discriminatory -patent license (a) in connection with copies of the covered work -conveyed by you (or copies made from those copies), or (b) primarily -for and in connection with specific products or compilations that -contain the covered work, unless you entered into that arrangement, -or that patent license was granted, prior to 28 March 2007. - - Nothing in this License shall be construed as excluding or limiting -any implied license or other defenses to infringement that may -otherwise be available to you under applicable patent law. - - 12. No Surrender of Others' Freedom. - - If conditions are imposed on you (whether by court order, agreement or -otherwise) that contradict the conditions of this License, they do not -excuse you from the conditions of this License. If you cannot convey a -covered work so as to satisfy simultaneously your obligations under this -License and any other pertinent obligations, then as a consequence you may -not convey it at all. For example, if you agree to terms that obligate you -to collect a royalty for further conveying from those to whom you convey -the Program, the only way you could satisfy both those terms and this -License would be to refrain entirely from conveying the Program. - - 13. Use with the GNU Affero General Public License. - - Notwithstanding any other provision of this License, you have -permission to link or combine any covered work with a work licensed -under version 3 of the GNU Affero General Public License into a single -combined work, and to convey the resulting work. The terms of this -License will continue to apply to the part which is the covered work, -but the special requirements of the GNU Affero General Public License, -section 13, concerning interaction through a network will apply to the -combination as such. - - 14. Revised Versions of this License. - - The Free Software Foundation may publish revised and/or new versions of -the GNU General Public License from time to time. Such new versions will -be similar in spirit to the present version, but may differ in detail to -address new problems or concerns. - - Each version is given a distinguishing version number. If the -Program specifies that a certain numbered version of the GNU General -Public License "or any later version" applies to it, you have the -option of following the terms and conditions either of that numbered -version or of any later version published by the Free Software -Foundation. If the Program does not specify a version number of the -GNU General Public License, you may choose any version ever published -by the Free Software Foundation. - - If the Program specifies that a proxy can decide which future -versions of the GNU General Public License can be used, that proxy's -public statement of acceptance of a version permanently authorizes you -to choose that version for the Program. - - Later license versions may give you additional or different -permissions. However, no additional obligations are imposed on any -author or copyright holder as a result of your choosing to follow a -later version. - - 15. Disclaimer of Warranty. - - THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY -APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT -HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY -OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, -THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR -PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM -IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF -ALL NECESSARY SERVICING, REPAIR OR CORRECTION. - - 16. Limitation of Liability. - - IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING -WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS -THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY -GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE -USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF -DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD -PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), -EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF -SUCH DAMAGES. - - 17. Interpretation of Sections 15 and 16. - - If the disclaimer of warranty and limitation of liability provided -above cannot be given local legal effect according to their terms, -reviewing courts shall apply local law that most closely approximates -an absolute waiver of all civil liability in connection with the -Program, unless a warranty or assumption of liability accompanies a -copy of the Program in return for a fee. - - END OF TERMS AND CONDITIONS - - How to Apply These Terms to Your New Programs - - If you develop a new program, and you want it to be of the greatest -possible use to the public, the best way to achieve this is to make it -free software which everyone can redistribute and change under these terms. - - To do so, attach the following notices to the program. It is safest -to attach them to the start of each source file to most effectively -state the exclusion of warranty; and each file should have at least -the "copyright" line and a pointer to where the full notice is found. - - - Copyright (C) - - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . - -Also add information on how to contact you by electronic and paper mail. - - If the program does terminal interaction, make it output a short -notice like this when it starts in an interactive mode: - - Copyright (C) - This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. - This is free software, and you are welcome to redistribute it - under certain conditions; type `show c' for details. - -The hypothetical commands `show w' and `show c' should show the appropriate -parts of the General Public License. Of course, your program's commands -might be different; for a GUI interface, you would use an "about box". - - You should also get your employer (if you work as a programmer) or school, -if any, to sign a "copyright disclaimer" for the program, if necessary. -For more information on this, and how to apply and follow the GNU GPL, see -. - - The GNU General Public License does not permit incorporating your program -into proprietary programs. If your program is a subroutine library, you -may consider it more useful to permit linking proprietary applications with -the library. If this is what you want to do, use the GNU Lesser General -Public License instead of this License. But first, please read -. diff --git a/vendor/github.com/denis-tingaikin/go-header/README.md b/vendor/github.com/denis-tingaikin/go-header/README.md deleted file mode 100644 index fcddad1fa..000000000 --- a/vendor/github.com/denis-tingaikin/go-header/README.md +++ /dev/null @@ -1,83 +0,0 @@ -# go-header -[![ci](https://github.com/denis-tingaikin/go-header/actions/workflows/ci.yml/badge.svg?branch=main)](https://github.com/denis-tingaikin/go-header/actions/workflows/ci.yml) - -Go source code linter providing checks for license headers. - -## Installation - -For installation you can simply use `go get`. - -```bash -go install github.com/denis-tingaikin/go-header/cmd/go-header -``` - -## Configuration - -To configuring `.go-header.yml` linter you simply need to fill the next fields: - -```yaml ---- -template: # expects header template string. -template-path: # expects path to file with license header string. -values: # expects `const` or `regexp` node with values where values is a map string to string. - const: - key1: value1 # const value just checks equality. Note `key1` should be used in template string as {{ key1 }} or {{ KEY1 }}. - regexp: - key2: value2 # regexp value just checks regex match. The value should be a valid regexp pattern. Note `key2` should be used in template string as {{ key2 }} or {{ KEY2 }}. -``` - -Where `values` also can be used recursively. Example: - -```yaml -values: - const: - key1: "value" - regexp: - key2: "{{key1}} value1" # Reads as regex pattern "value value1" -``` - -## Bult-in values - -- **MOD-YEAR** - Returns the year when the file was modified. -- **MOD-YEAR-RANGE** - Returns a year-range where the range starts from the year when the file was modified. -- **YEAR** - Expects current year. Example header value: `2020`. Example of template using: `{{YEAR}}` or `{{year}}`. -- **YEAR-RANGE** - Expects any valid year interval or current year. Example header value: `2020` or `2000-2020`. Example of template using: `{{year-range}}` or `{{YEAR-RANGE}}`. - -## Execution - -`go-header` linter expects file paths on input. If you want to run `go-header` only on diff files, then you can use this command: - -```bash -go-header $(git diff --name-only | grep -E '.*\.go') -``` - -## Setup example - -### Step 1 - -Create configuration file `.go-header.yml` in the root of project. - -```yaml ---- -values: - const: - MY COMPANY: mycompany.com -template: | - {{ MY COMPANY }} - SPDX-License-Identifier: Apache-2.0 - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at: - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. -``` - -### Step 2 -You are ready! Execute `go-header ${PATH_TO_FILES}` from the root of the project. diff --git a/vendor/github.com/denis-tingaikin/go-header/analyzer.go b/vendor/github.com/denis-tingaikin/go-header/analyzer.go deleted file mode 100644 index c6b361f01..000000000 --- a/vendor/github.com/denis-tingaikin/go-header/analyzer.go +++ /dev/null @@ -1,256 +0,0 @@ -// Copyright (c) 2020-2024 Denis Tingaikin -// -// SPDX-License-Identifier: Apache-2.0 -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at: -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package goheader - -import ( - "fmt" - "go/ast" - "os" - "os/exec" - "strings" - "time" -) - -type Target struct { - Path string - File *ast.File -} - -const iso = "2006-01-02 15:04:05 -0700" - -func (t *Target) ModTime() (time.Time, error) { - diff, err := exec.Command("git", "diff", t.Path).CombinedOutput() - if err == nil && len(diff) == 0 { - line, err := exec.Command("git", "log", "-1", "--pretty=format:%cd", "--date=iso", "--", t.Path).CombinedOutput() - if err == nil { - return time.Parse(iso, string(line)) - } - } - info, err := os.Stat(t.Path) - if err != nil { - return time.Time{}, err - } - return info.ModTime(), nil -} - -type Analyzer struct { - values map[string]Value - template string -} - -func (a *Analyzer) processPerTargetValues(target *Target) error { - a.values["mod-year"] = a.values["year"] - a.values["mod-year-range"] = a.values["year-range"] - if t, err := target.ModTime(); err == nil { - a.values["mod-year"] = &ConstValue{RawValue: fmt.Sprint(t.Year())} - a.values["mod-year-range"] = &RegexpValue{RawValue: `((20\d\d\-{{mod-year}})|({{mod-year}}))`} - } - - for _, v := range a.values { - if err := v.Calculate(a.values); err != nil { - return err - } - } - return nil -} - -func (a *Analyzer) Analyze(target *Target) (i Issue) { - if a.template == "" { - return NewIssue("Missed template for check") - } - - if err := a.processPerTargetValues(target); err != nil { - return &issue{msg: err.Error()} - } - - file := target.File - var header string - var offset = Location{ - Position: 1, - } - if len(file.Comments) > 0 && file.Comments[0].Pos() < file.Package { - if strings.HasPrefix(file.Comments[0].List[0].Text, "/*") { - header = (&ast.CommentGroup{List: []*ast.Comment{file.Comments[0].List[0]}}).Text() - } else { - header = file.Comments[0].Text() - offset.Position += 3 - } - } - defer func() { - if i == nil { - return - } - fix, ok := a.generateFix(i, file, header) - if !ok { - return - } - i = NewIssueWithFix(i.Message(), i.Location(), fix) - }() - header = strings.TrimSpace(header) - if header == "" { - return NewIssue("Missed header for check") - } - s := NewReader(header) - s.SetOffset(offset) - t := NewReader(a.template) - for !s.Done() && !t.Done() { - templateCh := t.Peek() - if templateCh == '{' { - name := a.readField(t) - if a.values[name] == nil { - return NewIssue(fmt.Sprintf("Template has unknown value: %v", name)) - } - if i := a.values[name].Read(s); i != nil { - return i - } - continue - } - sourceCh := s.Peek() - if sourceCh != templateCh { - l := s.Location() - notNextLine := func(r rune) bool { - return r != '\n' - } - actual := s.ReadWhile(notNextLine) - expected := t.ReadWhile(notNextLine) - return NewIssueWithLocation(fmt.Sprintf("Actual: %v\nExpected:%v", actual, expected), l) - } - s.Next() - t.Next() - } - if !s.Done() { - l := s.Location() - return NewIssueWithLocation(fmt.Sprintf("Unexpected string: %v", s.Finish()), l) - } - if !t.Done() { - l := s.Location() - return NewIssueWithLocation(fmt.Sprintf("Missed string: %v", t.Finish()), l) - } - return nil -} - -func (a *Analyzer) readField(reader *Reader) string { - _ = reader.Next() - _ = reader.Next() - - r := reader.ReadWhile(func(r rune) bool { - return r != '}' - }) - - _ = reader.Next() - _ = reader.Next() - - return strings.ToLower(strings.TrimSpace(r)) -} - -func New(options ...Option) *Analyzer { - a := &Analyzer{values: make(map[string]Value)} - for _, o := range options { - o.apply(a) - } - return a -} - -func (a *Analyzer) generateFix(i Issue, file *ast.File, header string) (Fix, bool) { - var expect string - t := NewReader(a.template) - for !t.Done() { - ch := t.Peek() - if ch == '{' { - f := a.values[a.readField(t)] - if f == nil { - return Fix{}, false - } - if f.Calculate(a.values) != nil { - return Fix{}, false - } - expect += f.Get() - continue - } - - expect += string(ch) - t.Next() - } - - fix := Fix{Expected: strings.Split(expect, "\n")} - if !(len(file.Comments) > 0 && file.Comments[0].Pos() < file.Package) { - for i := range fix.Expected { - fix.Expected[i] = "// " + fix.Expected[i] - } - return fix, true - } - - actual := file.Comments[0].List[0].Text - if !strings.HasPrefix(actual, "/*") { - for i := range fix.Expected { - fix.Expected[i] = "// " + fix.Expected[i] - } - for _, c := range file.Comments[0].List { - fix.Actual = append(fix.Actual, c.Text) - } - i = NewIssueWithFix(i.Message(), i.Location(), fix) - return fix, true - } - - gets := func(i int, end bool) string { - if i < 0 { - return header - } - if end { - return header[i+1:] - } - return header[:i] - } - start := strings.Index(actual, gets(strings.IndexByte(header, '\n'), false)) - if start < 0 { - return Fix{}, false // Should be impossible - } - nl := strings.LastIndexByte(actual[:start], '\n') - if nl >= 0 { - fix.Actual = strings.Split(actual[:nl], "\n") - fix.Expected = append(fix.Actual, fix.Expected...) - actual = actual[nl+1:] - start -= nl + 1 - } - - prefix := actual[:start] - if nl < 0 { - fix.Expected[0] = prefix + fix.Expected[0] - } else { - n := len(fix.Actual) - for i := range fix.Expected[n:] { - fix.Expected[n+i] = prefix + fix.Expected[n+i] - } - } - - last := gets(strings.LastIndexByte(header, '\n'), true) - end := strings.Index(actual, last) - if end < 0 { - return Fix{}, false // Should be impossible - } - - trailing := actual[end+len(last):] - if i := strings.IndexRune(trailing, '\n'); i < 0 { - fix.Expected[len(fix.Expected)-1] += trailing - } else { - fix.Expected[len(fix.Expected)-1] += trailing[:i] - fix.Expected = append(fix.Expected, strings.Split(trailing[i+1:], "\n")...) - } - - fix.Actual = append(fix.Actual, strings.Split(actual, "\n")...) - return fix, true -} diff --git a/vendor/github.com/denis-tingaikin/go-header/config.go b/vendor/github.com/denis-tingaikin/go-header/config.go deleted file mode 100644 index c881b63ac..000000000 --- a/vendor/github.com/denis-tingaikin/go-header/config.go +++ /dev/null @@ -1,99 +0,0 @@ -// Copyright (c) 2020-2024 Denis Tingaikin -// -// SPDX-License-Identifier: Apache-2.0 -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at: -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package goheader - -import ( - "errors" - "fmt" - "os" - "strings" - "time" - - "gopkg.in/yaml.v3" -) - -// Configuration represents go-header linter setup parameters -type Configuration struct { - // Values is map of values. Supports two types 'const` and `regexp`. Values can be used recursively. - Values map[string]map[string]string `yaml:"values"'` - // Template is template for checking. Uses values. - Template string `yaml:"template"` - // TemplatePath path to the template file. Useful if need to load the template from a specific file. - TemplatePath string `yaml:"template-path"` -} - -func (c *Configuration) builtInValues() map[string]Value { - var result = make(map[string]Value) - year := fmt.Sprint(time.Now().Year()) - result["year-range"] = &RegexpValue{ - RawValue: `((20\d\d\-{{YEAR}})|({{YEAR}}))`, - } - result["year"] = &ConstValue{ - RawValue: year, - } - return result -} - -func (c *Configuration) GetValues() (map[string]Value, error) { - var result = c.builtInValues() - createConst := func(raw string) Value { - return &ConstValue{RawValue: raw} - } - createRegexp := func(raw string) Value { - return &RegexpValue{RawValue: raw} - } - appendValues := func(m map[string]string, create func(string) Value) { - for k, v := range m { - key := strings.ToLower(k) - result[key] = create(v) - } - } - for k, v := range c.Values { - switch k { - case "const": - appendValues(v, createConst) - case "regexp": - appendValues(v, createRegexp) - default: - return nil, fmt.Errorf("unknown value type %v", k) - } - } - return result, nil -} - -func (c *Configuration) GetTemplate() (string, error) { - if c.Template != "" { - return c.Template, nil - } - if c.TemplatePath == "" { - return "", errors.New("template has not passed") - } - if b, err := os.ReadFile(c.TemplatePath); err != nil { - return "", err - } else { - c.Template = strings.TrimSpace(string(b)) - return c.Template, nil - } -} - -func (c *Configuration) Parse(p string) error { - b, err := os.ReadFile(p) - if err != nil { - return err - } - return yaml.Unmarshal(b, c) -} diff --git a/vendor/github.com/denis-tingaikin/go-header/issue.go b/vendor/github.com/denis-tingaikin/go-header/issue.go deleted file mode 100644 index e92279793..000000000 --- a/vendor/github.com/denis-tingaikin/go-header/issue.go +++ /dev/null @@ -1,67 +0,0 @@ -// Copyright (c) 2020-2024 Denis Tingaikin -// -// SPDX-License-Identifier: Apache-2.0 -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at: -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package goheader - -type Issue interface { - Location() Location - Message() string - Fix() *Fix -} - -type issue struct { - msg string - location Location - fix *Fix -} - -type Fix struct { - Actual []string - Expected []string -} - -func (i *issue) Location() Location { - return i.location -} - -func (i *issue) Message() string { - return i.msg -} - -func (i *issue) Fix() *Fix { - return i.fix -} - -func NewIssueWithLocation(msg string, location Location) Issue { - return &issue{ - msg: msg, - location: location, - } -} - -func NewIssueWithFix(msg string, location Location, fix Fix) Issue { - return &issue{ - msg: msg, - location: location, - fix: &fix, - } -} - -func NewIssue(msg string) Issue { - return &issue{ - msg: msg, - } -} diff --git a/vendor/github.com/denis-tingaikin/go-header/location.go b/vendor/github.com/denis-tingaikin/go-header/location.go deleted file mode 100644 index 9f1839485..000000000 --- a/vendor/github.com/denis-tingaikin/go-header/location.go +++ /dev/null @@ -1,35 +0,0 @@ -// Copyright (c) 2020-2022 Denis Tingaikin -// -// SPDX-License-Identifier: Apache-2.0 -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at: -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package goheader - -import "fmt" - -type Location struct { - Line int - Position int -} - -func (l Location) String() string { - return fmt.Sprintf("%v:%v", l.Line+1, l.Position) -} - -func (l Location) Add(other Location) Location { - return Location{ - Line: l.Line + other.Line, - Position: l.Position + other.Position, - } -} diff --git a/vendor/github.com/denis-tingaikin/go-header/option.go b/vendor/github.com/denis-tingaikin/go-header/option.go deleted file mode 100644 index a9689e811..000000000 --- a/vendor/github.com/denis-tingaikin/go-header/option.go +++ /dev/null @@ -1,44 +0,0 @@ -// Copyright (c) 2020-2022 Denis Tingaikin -// -// SPDX-License-Identifier: Apache-2.0 -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at: -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package goheader - -import "strings" - -type Option interface { - apply(*Analyzer) -} - -type applyAnalyzerOptionFunc func(*Analyzer) - -func (f applyAnalyzerOptionFunc) apply(a *Analyzer) { - f(a) -} - -func WithValues(values map[string]Value) Option { - return applyAnalyzerOptionFunc(func(a *Analyzer) { - a.values = make(map[string]Value) - for k, v := range values { - a.values[strings.ToLower(k)] = v - } - }) -} - -func WithTemplate(template string) Option { - return applyAnalyzerOptionFunc(func(a *Analyzer) { - a.template = template - }) -} diff --git a/vendor/github.com/denis-tingaikin/go-header/reader.go b/vendor/github.com/denis-tingaikin/go-header/reader.go deleted file mode 100644 index 9c9e88a17..000000000 --- a/vendor/github.com/denis-tingaikin/go-header/reader.go +++ /dev/null @@ -1,116 +0,0 @@ -/* -Copyright (c) 2020-2022 Denis Tingaikin - -SPDX-License-Identifier: Apache-2.0 - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at: - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -package goheader - -func NewReader(text string) *Reader { - return &Reader{source: text} -} - -type Reader struct { - source string - position int - location Location - offset Location -} - -func (r *Reader) SetOffset(offset Location) { - r.offset = offset -} - -func (r *Reader) Position() int { - return r.position -} - -func (r *Reader) Location() Location { - return r.location.Add(r.offset) -} - -func (r *Reader) Peek() rune { - if r.Done() { - return rune(0) - } - return rune(r.source[r.position]) -} - -func (r *Reader) Done() bool { - return r.position >= len(r.source) -} - -func (r *Reader) Next() rune { - if r.Done() { - return rune(0) - } - reuslt := r.Peek() - if reuslt == '\n' { - r.location.Line++ - r.location.Position = 0 - } else { - r.location.Position++ - } - r.position++ - return reuslt -} - -func (r *Reader) Finish() string { - if r.position >= len(r.source) { - return "" - } - defer r.till() - return r.source[r.position:] -} - -func (r *Reader) SetPosition(pos int) { - if pos < 0 { - r.position = 0 - } - r.position = pos - r.location = r.calculateLocation() -} - -func (r *Reader) ReadWhile(match func(rune) bool) string { - if match == nil { - return "" - } - start := r.position - for !r.Done() && match(r.Peek()) { - r.Next() - } - return r.source[start:r.position] -} - -func (r *Reader) till() { - r.position = len(r.source) - r.location = r.calculateLocation() -} - -func (r *Reader) calculateLocation() Location { - min := len(r.source) - if min > r.position { - min = r.position - } - x, y := 0, 0 - for i := 0; i < min; i++ { - if r.source[i] == '\n' { - y++ - x = 0 - } else { - x++ - } - } - return Location{Line: y, Position: x} -} diff --git a/vendor/github.com/denis-tingaikin/go-header/value.go b/vendor/github.com/denis-tingaikin/go-header/value.go deleted file mode 100644 index 706a84f18..000000000 --- a/vendor/github.com/denis-tingaikin/go-header/value.go +++ /dev/null @@ -1,150 +0,0 @@ -// Copyright (c) 2020-2024 Denis Tingaikin -// -// SPDX-License-Identifier: Apache-2.0 -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at: -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package goheader - -import ( - "errors" - "fmt" - "regexp" - "strings" -) - -type Calculable interface { - Calculate(map[string]Value) error - Get() string - Raw() string -} - -type Value interface { - Calculable - Read(*Reader) Issue -} - -func calculateValue(calculable Calculable, values map[string]Value) (string, error) { - sb := strings.Builder{} - r := calculable.Raw() - var endIndex int - var startIndex int - for startIndex = strings.Index(r, "{{"); startIndex >= 0; startIndex = strings.Index(r, "{{") { - _, _ = sb.WriteString(r[:startIndex]) - endIndex = strings.Index(r, "}}") - if endIndex < 0 { - return "", errors.New("missed value ending") - } - subVal := strings.ToLower(strings.TrimSpace(r[startIndex+2 : endIndex])) - if val := values[subVal]; val != nil { - if err := val.Calculate(values); err != nil { - return "", err - } - sb.WriteString(val.Get()) - } else { - return "", fmt.Errorf("unknown value name %v", subVal) - } - endIndex += 2 - r = r[endIndex:] - } - _, _ = sb.WriteString(r) - return sb.String(), nil -} - -type ConstValue struct { - RawValue, Value string -} - -func (c *ConstValue) Calculate(values map[string]Value) error { - v, err := calculateValue(c, values) - if err != nil { - return err - } - c.Value = v - return nil -} - -func (c *ConstValue) Raw() string { - return c.RawValue -} - -func (c *ConstValue) Get() string { - if c.Value != "" { - return c.Value - } - return c.RawValue -} - -func (c *ConstValue) String() string { - return c.Get() -} - -func (c *ConstValue) Read(s *Reader) Issue { - l := s.Location() - p := s.Position() - for _, ch := range c.Get() { - if ch != s.Peek() { - s.SetPosition(p) - f := s.ReadWhile(func(r rune) bool { - return r != '\n' - }) - return NewIssueWithLocation(fmt.Sprintf("Expected:%v, Actual: %v", c.Get(), f), l) - } - s.Next() - } - return nil -} - -type RegexpValue struct { - RawValue, Value string -} - -func (r *RegexpValue) Calculate(values map[string]Value) error { - v, err := calculateValue(r, values) - if err != nil { - return err - } - r.Value = v - return nil -} - -func (r *RegexpValue) Raw() string { - return r.RawValue -} -func (r *RegexpValue) Get() string { - if r.Value != "" { - return r.Value - } - return r.RawValue -} - -func (r *RegexpValue) String() string { - return r.Get() -} - -func (r *RegexpValue) Read(s *Reader) Issue { - l := s.Location() - p := regexp.MustCompile(r.Get()) - pos := s.Position() - str := s.Finish() - s.SetPosition(pos) - indexes := p.FindAllIndex([]byte(str), -1) - if len(indexes) == 0 { - return NewIssueWithLocation(fmt.Sprintf("Pattern %v doesn't match.", p.String()), l) - } - s.SetPosition(pos + indexes[0][1]) - return nil -} - -var _ Value = &ConstValue{} -var _ Value = &RegexpValue{} diff --git a/vendor/github.com/esimonov/ifshort/LICENSE b/vendor/github.com/esimonov/ifshort/LICENSE deleted file mode 100644 index a04e339c0..000000000 --- a/vendor/github.com/esimonov/ifshort/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2020 Eugene Simonov - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/esimonov/ifshort/pkg/analyzer/analyzer.go b/vendor/github.com/esimonov/ifshort/pkg/analyzer/analyzer.go deleted file mode 100644 index b2d06881d..000000000 --- a/vendor/github.com/esimonov/ifshort/pkg/analyzer/analyzer.go +++ /dev/null @@ -1,280 +0,0 @@ -package analyzer - -import ( - "go/ast" - "go/token" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/ast/inspector" -) - -var maxDeclChars, maxDeclLines int - -const ( - maxDeclLinesUsage = `maximum length of variable declaration measured in number of lines, after which the linter won't suggest using short syntax. -Has precedence over max-decl-chars.` - maxDeclCharsUsage = `maximum length of variable declaration measured in number of characters, after which the linter won't suggest using short syntax.` -) - -func init() { - Analyzer.Flags.IntVar(&maxDeclLines, "max-decl-lines", 1, maxDeclLinesUsage) - Analyzer.Flags.IntVar(&maxDeclChars, "max-decl-chars", 30, maxDeclCharsUsage) -} - -// Analyzer is an analysis.Analyzer instance for ifshort linter. -var Analyzer = &analysis.Analyzer{ - Name: "ifshort", - Doc: "Checks that your code uses short syntax for if-statements whenever possible.", - Run: run, - Requires: []*analysis.Analyzer{inspect.Analyzer}, -} - -func run(pass *analysis.Pass) (interface{}, error) { - inspector := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - nodeFilter := []ast.Node{ - (*ast.FuncDecl)(nil), - } - - inspector.Preorder(nodeFilter, func(node ast.Node) { - fdecl := node.(*ast.FuncDecl) - - /*if fdecl.Name.Name != "notUsed_BinaryExpressionInIndex_OK" { - return - }*/ - - if fdecl == nil || fdecl.Body == nil { - return - } - - candidates := getNamedOccurrenceMap(fdecl, pass) - - for _, stmt := range fdecl.Body.List { - candidates.checkStatement(stmt, token.NoPos) - } - - for varName := range candidates { - for marker, occ := range candidates[varName] { - // If two or more vars with the same scope marker - skip them. - if candidates.isFoundByScopeMarker(marker) { - continue - } - - pass.Reportf(occ.declarationPos, - "variable '%s' is only used in the if-statement (%s); consider using short syntax", - varName, pass.Fset.Position(occ.ifStmtPos)) - } - } - }) - return nil, nil -} - -func (nom namedOccurrenceMap) checkStatement(stmt ast.Stmt, ifPos token.Pos) { - switch v := stmt.(type) { - case *ast.AssignStmt: - for _, el := range v.Rhs { - nom.checkExpression(el, ifPos) - } - if isAssign(v.Tok) { - for _, el := range v.Lhs { - nom.checkExpression(el, ifPos) - } - } - case *ast.DeferStmt: - for _, a := range v.Call.Args { - nom.checkExpression(a, ifPos) - } - case *ast.ExprStmt: - switch v.X.(type) { - case *ast.CallExpr, *ast.UnaryExpr: - nom.checkExpression(v.X, ifPos) - } - case *ast.ForStmt: - for _, el := range v.Body.List { - nom.checkStatement(el, ifPos) - } - - if bexpr, ok := v.Cond.(*ast.BinaryExpr); ok { - nom.checkExpression(bexpr.X, ifPos) - nom.checkExpression(bexpr.Y, ifPos) - } - - nom.checkStatement(v.Post, ifPos) - case *ast.GoStmt: - for _, a := range v.Call.Args { - nom.checkExpression(a, ifPos) - } - case *ast.IfStmt: - for _, el := range v.Body.List { - nom.checkStatement(el, v.If) - } - if elseBlock, ok := v.Else.(*ast.BlockStmt); ok { - for _, el := range elseBlock.List { - nom.checkStatement(el, v.If) - } - } - - switch cond := v.Cond.(type) { - case *ast.UnaryExpr: - nom.checkExpression(cond.X, v.If) - case *ast.BinaryExpr: - nom.checkExpression(cond.X, v.If) - nom.checkExpression(cond.Y, v.If) - case *ast.CallExpr: - nom.checkExpression(cond, v.If) - } - - if init, ok := v.Init.(*ast.AssignStmt); ok { - for _, e := range init.Rhs { - nom.checkExpression(e, v.If) - } - } - case *ast.IncDecStmt: - nom.checkExpression(v.X, ifPos) - case *ast.RangeStmt: - nom.checkExpression(v.X, ifPos) - if v.Body != nil { - for _, e := range v.Body.List { - nom.checkStatement(e, ifPos) - } - } - case *ast.ReturnStmt: - for _, r := range v.Results { - nom.checkExpression(r, ifPos) - } - case *ast.SendStmt: - nom.checkExpression(v.Chan, ifPos) - nom.checkExpression(v.Value, ifPos) - case *ast.SwitchStmt: - nom.checkExpression(v.Tag, ifPos) - - for _, el := range v.Body.List { - clauses, ok := el.(*ast.CaseClause) - if !ok { - continue - } - - for _, c := range clauses.List { - switch v := c.(type) { - case *ast.BinaryExpr: - nom.checkExpression(v.X, ifPos) - nom.checkExpression(v.Y, ifPos) - case *ast.Ident: - nom.checkExpression(v, ifPos) - } - } - - for _, c := range clauses.Body { - switch v := c.(type) { - case *ast.AssignStmt: - for _, el := range v.Lhs { - nom.checkExpression(el, ifPos) - } - for _, el := range v.Rhs { - nom.checkExpression(el, ifPos) - } - case *ast.ExprStmt: - nom.checkExpression(v.X, ifPos) - } - } - } - case *ast.SelectStmt: - for _, el := range v.Body.List { - clause := el.(*ast.CommClause) - - nom.checkStatement(clause.Comm, ifPos) - - for _, c := range clause.Body { - switch v := c.(type) { - case *ast.AssignStmt: - for _, el := range v.Lhs { - nom.checkExpression(el, ifPos) - } - for _, el := range v.Rhs { - nom.checkExpression(el, ifPos) - } - case *ast.ExprStmt: - nom.checkExpression(v.X, ifPos) - } - } - } - case *ast.LabeledStmt: - nom.checkStatement(v.Stmt, ifPos) - } -} - -func (nom namedOccurrenceMap) checkExpression(candidate ast.Expr, ifPos token.Pos) { - switch v := candidate.(type) { - case *ast.BinaryExpr: - nom.checkExpression(v.X, ifPos) - nom.checkExpression(v.Y, ifPos) - case *ast.CallExpr: - for _, arg := range v.Args { - nom.checkExpression(arg, ifPos) - } - nom.checkExpression(v.Fun, ifPos) - if fun, ok := v.Fun.(*ast.SelectorExpr); ok { - nom.checkExpression(fun.X, ifPos) - } - case *ast.CompositeLit: - for _, el := range v.Elts { - switch v := el.(type) { - case *ast.Ident, *ast.CompositeLit: - nom.checkExpression(v, ifPos) - case *ast.KeyValueExpr: - nom.checkExpression(v.Key, ifPos) - nom.checkExpression(v.Value, ifPos) - case *ast.SelectorExpr: - nom.checkExpression(v.X, ifPos) - } - } - case *ast.FuncLit: - for _, el := range v.Body.List { - nom.checkStatement(el, ifPos) - } - case *ast.Ident: - if _, ok := nom[v.Name]; !ok || nom[v.Name].isEmponymousKey(ifPos) { - return - } - - scopeMarker1 := nom[v.Name].getScopeMarkerForPosition(v.Pos()) - - delete(nom[v.Name], scopeMarker1) - - for k := range nom { - for scopeMarker2 := range nom[k] { - if scopeMarker1 == scopeMarker2 { - delete(nom[k], scopeMarker2) - } - } - } - case *ast.StarExpr: - nom.checkExpression(v.X, ifPos) - case *ast.IndexExpr: - nom.checkExpression(v.X, ifPos) - switch index := v.Index.(type) { - case *ast.BinaryExpr: - nom.checkExpression(index.X, ifPos) - case *ast.Ident: - nom.checkExpression(index, ifPos) - } - case *ast.SelectorExpr: - nom.checkExpression(v.X, ifPos) - case *ast.SliceExpr: - nom.checkExpression(v.High, ifPos) - nom.checkExpression(v.Low, ifPos) - nom.checkExpression(v.X, ifPos) - case *ast.TypeAssertExpr: - nom.checkExpression(v.X, ifPos) - case *ast.UnaryExpr: - nom.checkExpression(v.X, ifPos) - } -} - -func isAssign(tok token.Token) bool { - return (tok == token.ASSIGN || - tok == token.ADD_ASSIGN || tok == token.SUB_ASSIGN || - tok == token.MUL_ASSIGN || tok == token.QUO_ASSIGN || tok == token.REM_ASSIGN || - tok == token.AND_ASSIGN || tok == token.OR_ASSIGN || tok == token.XOR_ASSIGN || tok == token.AND_NOT_ASSIGN || - tok == token.SHL_ASSIGN || tok == token.SHR_ASSIGN) -} diff --git a/vendor/github.com/esimonov/ifshort/pkg/analyzer/occurrences.go b/vendor/github.com/esimonov/ifshort/pkg/analyzer/occurrences.go deleted file mode 100644 index 0d3793a57..000000000 --- a/vendor/github.com/esimonov/ifshort/pkg/analyzer/occurrences.go +++ /dev/null @@ -1,268 +0,0 @@ -package analyzer - -import ( - "go/ast" - "go/token" - "time" - - "golang.org/x/tools/go/analysis" -) - -// occurrence is a variable occurrence. -type occurrence struct { - declarationPos token.Pos - ifStmtPos token.Pos -} - -func (occ *occurrence) isComplete() bool { - return occ.ifStmtPos != token.NoPos && occ.declarationPos != token.NoPos -} - -// scopeMarkeredOccurences is a map of scope markers to variable occurrences. -type scopeMarkeredOccurences map[int64]occurrence - -func (smo scopeMarkeredOccurences) getGreatestMarker() int64 { - var maxScopeMarker int64 - - for marker := range smo { - if marker > maxScopeMarker { - maxScopeMarker = marker - } - } - return maxScopeMarker -} - -// find scope marker of the greatest token.Pos that is smaller than provided. -func (smo scopeMarkeredOccurences) getScopeMarkerForPosition(pos token.Pos) int64 { - var m int64 - var foundPos token.Pos - - for marker, occ := range smo { - if occ.declarationPos < pos && occ.declarationPos >= foundPos { - m = marker - foundPos = occ.declarationPos - } - } - return m -} - -func (smo scopeMarkeredOccurences) isEmponymousKey(pos token.Pos) bool { - if pos == token.NoPos { - return false - } - - for _, occ := range smo { - if occ.ifStmtPos == pos { - return true - } - } - return false -} - -// namedOccurrenceMap is a map of variable names to scopeMarkeredOccurences. -type namedOccurrenceMap map[string]scopeMarkeredOccurences - -func getNamedOccurrenceMap(fdecl *ast.FuncDecl, pass *analysis.Pass) namedOccurrenceMap { - nom := namedOccurrenceMap(map[string]scopeMarkeredOccurences{}) - - if fdecl == nil || fdecl.Body == nil { - return nom - } - - for _, stmt := range fdecl.Body.List { - switch v := stmt.(type) { - case *ast.AssignStmt: - nom.addFromAssignment(pass, v) - case *ast.IfStmt: - nom.addFromCondition(v) - nom.addFromIfClause(v) - nom.addFromElseClause(v) - } - } - - candidates := namedOccurrenceMap(map[string]scopeMarkeredOccurences{}) - - for varName, markeredOccs := range nom { - for marker, occ := range markeredOccs { - if !occ.isComplete() && !nom.isFoundByScopeMarker(marker) { - continue - } - if _, ok := candidates[varName]; !ok { - candidates[varName] = scopeMarkeredOccurences{ - marker: occ, - } - } else { - candidates[varName][marker] = occ - } - } - } - return candidates -} - -func (nom namedOccurrenceMap) isFoundByScopeMarker(scopeMarker int64) bool { - var i int - - for _, markeredOccs := range nom { - for marker := range markeredOccs { - if marker == scopeMarker { - i++ - } - } - } - return i >= 2 -} - -func (nom namedOccurrenceMap) addFromAssignment(pass *analysis.Pass, assignment *ast.AssignStmt) { - if assignment.Tok != token.DEFINE { - return - } - - scopeMarker := time.Now().UnixNano() - - for i, el := range assignment.Lhs { - ident, ok := el.(*ast.Ident) - if !ok { - continue - } - - if ident.Name == "_" || ident.Obj == nil || isUnshortenableAssignment(ident.Obj.Decl) { - continue - } - - if markeredOccs, ok := nom[ident.Name]; ok { - markeredOccs[scopeMarker] = occurrence{ - declarationPos: ident.Pos(), - } - nom[ident.Name] = markeredOccs - } else { - newOcc := occurrence{} - if areFlagSettingsSatisfied(pass, assignment, i) { - newOcc.declarationPos = ident.Pos() - } - nom[ident.Name] = scopeMarkeredOccurences{scopeMarker: newOcc} - } - } -} - -func isUnshortenableAssignment(decl interface{}) bool { - assign, ok := decl.(*ast.AssignStmt) - if !ok { - return false - } - - for _, el := range assign.Rhs { - u, ok := el.(*ast.UnaryExpr) - if !ok { - continue - } - - if u.Op == token.AND { - if _, ok := u.X.(*ast.CompositeLit); ok { - return true - } - } - } - return false -} - -func areFlagSettingsSatisfied(pass *analysis.Pass, assignment *ast.AssignStmt, i int) bool { - lh := assignment.Lhs[i] - rh := assignment.Rhs[len(assignment.Rhs)-1] - - if len(assignment.Rhs) == len(assignment.Lhs) { - rh = assignment.Rhs[i] - } - - if pass.Fset.Position(rh.End()).Line-pass.Fset.Position(rh.Pos()).Line > maxDeclLines { - return false - } - if int(rh.End()-lh.Pos()) > maxDeclChars { - return false - } - return true -} - -func (nom namedOccurrenceMap) addFromCondition(stmt *ast.IfStmt) { - switch v := stmt.Cond.(type) { - case *ast.BinaryExpr: - for _, v := range [2]ast.Expr{v.X, v.Y} { - switch e := v.(type) { - case *ast.CallExpr: - nom.addFromCallExpr(stmt.If, e) - case *ast.Ident: - nom.addFromIdent(stmt.If, e) - case *ast.SelectorExpr: - nom.addFromIdent(stmt.If, e.X) - } - } - case *ast.CallExpr: - for _, a := range v.Args { - switch e := a.(type) { - case *ast.Ident: - nom.addFromIdent(stmt.If, e) - case *ast.CallExpr: - nom.addFromCallExpr(stmt.If, e) - } - } - case *ast.Ident: - nom.addFromIdent(stmt.If, v) - case *ast.UnaryExpr: - switch e := v.X.(type) { - case *ast.Ident: - nom.addFromIdent(stmt.If, e) - case *ast.SelectorExpr: - nom.addFromIdent(stmt.If, e.X) - } - } -} - -func (nom namedOccurrenceMap) addFromIfClause(stmt *ast.IfStmt) { - nom.addFromBlockStmt(stmt.Body, stmt.If) -} - -func (nom namedOccurrenceMap) addFromElseClause(stmt *ast.IfStmt) { - nom.addFromBlockStmt(stmt.Else, stmt.If) -} - -func (nom namedOccurrenceMap) addFromBlockStmt(stmt ast.Stmt, ifPos token.Pos) { - blockStmt, ok := stmt.(*ast.BlockStmt) - if !ok { - return - } - - for _, el := range blockStmt.List { - exptStmt, ok := el.(*ast.ExprStmt) - if !ok { - continue - } - - if callExpr, ok := exptStmt.X.(*ast.CallExpr); ok { - nom.addFromCallExpr(ifPos, callExpr) - } - } -} - -func (nom namedOccurrenceMap) addFromCallExpr(ifPos token.Pos, callExpr *ast.CallExpr) { - for _, arg := range callExpr.Args { - nom.addFromIdent(ifPos, arg) - } -} - -func (nom namedOccurrenceMap) addFromIdent(ifPos token.Pos, v ast.Expr) { - ident, ok := v.(*ast.Ident) - if !ok { - return - } - - if markeredOccs, ok := nom[ident.Name]; ok { - marker := nom[ident.Name].getGreatestMarker() - - occ := markeredOccs[marker] - if occ.isComplete() { - return - } - - occ.ifStmtPos = ifPos - nom[ident.Name][marker] = occ - } -} diff --git a/vendor/github.com/ettle/strcase/.gitignore b/vendor/github.com/ettle/strcase/.gitignore deleted file mode 100644 index 54bc1fbff..000000000 --- a/vendor/github.com/ettle/strcase/.gitignore +++ /dev/null @@ -1,18 +0,0 @@ -# Binaries for programs and plugins -*.exe -*.exe~ -*.dll -*.so -*.dylib - -# Test binary, built with `go test -c` -*.test - -# Output of the go coverage tool, specifically when used with LiteIDE -*.out - -# CPU and memory profiles -*.prof - -# Dependency directories -vendor/ diff --git a/vendor/github.com/ettle/strcase/.golangci.yml b/vendor/github.com/ettle/strcase/.golangci.yml deleted file mode 100644 index b7ce85d42..000000000 --- a/vendor/github.com/ettle/strcase/.golangci.yml +++ /dev/null @@ -1,82 +0,0 @@ -linters-settings: - dupl: - threshold: 100 - gocyclo: - min-complexity: 15 - gocritic: - enabled-tags: - - diagnostic - - experimental - - opinionated - - performance - - style - disabled-checks: - - ifElseChain - - whyNoLint - - wrapperFunc - govet: - check-shadowing: true - lll: - line-length: 140 - maligned: - suggest-new: true - misspell: - locale: US - nolintlint: - allow-leading-space: false - allow-unused: false - require-specific: true - - require-explanation: true - allow-no-explanation: - - gocyclo - -linters: - disable-all: true - enable: - - bodyclose - - depguard - - dogsled - - dupl - - errcheck - - gochecknoinits - - gocritic - - gocyclo - - gofmt - - goimports - - goprintffuncname - - gosec - - gosimple - - govet - - ineffassign - - lll - - misspell - - nakedret - - nolintlint - - revive - - rowserrcheck - - staticcheck - - stylecheck - - typecheck - - unconvert - - unparam - - unused - - whitespace - - # don't enable: - # - asciicheck - # - gochecknoglobals - # - gocognit - # - godot - # - godox - # - goerr113 - # - maligned - # - nestif - # - prealloc - # - testpackage - # - wsl - -issues: - exclude-use-default: false - max-issues-per-linter: 0 - max-same-issues: 0 diff --git a/vendor/github.com/ettle/strcase/.readme.tmpl b/vendor/github.com/ettle/strcase/.readme.tmpl deleted file mode 100644 index 4d7a894f0..000000000 --- a/vendor/github.com/ettle/strcase/.readme.tmpl +++ /dev/null @@ -1,80 +0,0 @@ -{{with .PDoc}} -# Go Strcase - -[![Go Report Card](https://goreportcard.com/badge/github.com/ettle/strcase)](https://goreportcard.com/report/github.com/ettle/strcase) -[![Coverage](http://gocover.io/_badge/github.com/ettle/strcase?0)](http://gocover.io/github.com/ettle/strcase) -[![GoDoc](https://godoc.org/github.com/ettle/strcase?status.svg)](https://pkg.go.dev/github.com/ettle/strcase) - -Convert strings to `snake_case`, `camelCase`, `PascalCase`, `kebab-case` and more! Supports Go initialisms, customization, and Unicode. - -`import "{{.ImportPath}}"` - -## Overview -{{comment_md .Doc}} -{{example_html $ ""}} - -## Index{{if .Consts}} -* [Constants](#pkg-constants){{end}}{{if .Vars}} -* [Variables](#pkg-variables){{end}}{{- range .Funcs -}}{{$name_html := html .Name}} -* [{{node_html $ .Decl false | sanitize}}](#func-{{$name_html}}){{- end}}{{- range .Types}}{{$tname_html := html .Name}} -* [type {{$tname_html}}](#type-{{$tname_html}}){{- range .Funcs}}{{$name_html := html .Name}} - * [{{node_html $ .Decl false | sanitize}}](#func-{{$name_html}}){{- end}}{{- range .Methods}}{{$name_html := html .Name}} - * [{{node_html $ .Decl false | sanitize}}](#type-{{$tname_html}}.{{$name_html}}){{- end}}{{- end}}{{- if $.Notes}}{{- range $marker, $item := $.Notes}} -* [{{noteTitle $marker | html}}s](#pkg-note-{{$marker}}){{end}}{{end}} -{{if $.Examples}} -#### Examples{{- range $.Examples}} -* [{{example_name .Name}}](#example_{{.Name}}){{- end}}{{- end}} - -{{with .Consts}}## Constants -{{range .}}{{node $ .Decl | pre}} -{{comment_md .Doc}}{{end}}{{end}} -{{with .Vars}}## Variables -{{range .}}{{node $ .Decl | pre}} -{{comment_md .Doc}}{{end}}{{end}} - -{{range .Funcs}}{{$name_html := html .Name}}## func [{{$name_html}}]({{gh_url $ .Decl}}) -{{node $ .Decl | pre}} -{{comment_md .Doc}} -{{example_html $ .Name}} -{{callgraph_html $ "" .Name}}{{end}} -{{range .Types}}{{$tname := .Name}}{{$tname_html := html .Name}}## type [{{$tname_html}}]({{gh_url $ .Decl}}) -{{node $ .Decl | pre}} -{{comment_md .Doc}}{{range .Consts}} -{{node $ .Decl | pre }} -{{comment_md .Doc}}{{end}}{{range .Vars}} -{{node $ .Decl | pre }} -{{comment_md .Doc}}{{end}} - -{{example_html $ $tname}} -{{implements_html $ $tname}} -{{methodset_html $ $tname}} - -{{range .Funcs}}{{$name_html := html .Name}}### func [{{$name_html}}]({{gh_url $ .Decl}}) -{{node $ .Decl | pre}} -{{comment_md .Doc}} -{{example_html $ .Name}}{{end}} -{{callgraph_html $ "" .Name}} - -{{range .Methods}}{{$name_html := html .Name}}### func ({{md .Recv}}) [{{$name_html}}]({{gh_url $ .Decl}}) -{{node $ .Decl | pre}} -{{comment_md .Doc}} -{{$name := printf "%s_%s" $tname .Name}}{{example_html $ $name}} -{{callgraph_html $ .Recv .Name}} -{{end}}{{end}}{{end}} - -{{with $.Notes}} -{{range $marker, $content := .}} -## {{noteTitle $marker | html}}s - -{{end}} -{{end}} -{{if .Dirs}} -## Subdirectories -{{range $.Dirs.List}} -{{indent .Depth}}* [{{.Name | html}}]({{print "./" .Path}}){{if .Synopsis}} {{ .Synopsis}}{{end -}} -{{end}} -{{end}} diff --git a/vendor/github.com/ettle/strcase/LICENSE b/vendor/github.com/ettle/strcase/LICENSE deleted file mode 100644 index 4f0116be2..000000000 --- a/vendor/github.com/ettle/strcase/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2020 Liyan David Chang - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/ettle/strcase/Makefile b/vendor/github.com/ettle/strcase/Makefile deleted file mode 100644 index ac98b4aa5..000000000 --- a/vendor/github.com/ettle/strcase/Makefile +++ /dev/null @@ -1,19 +0,0 @@ -.PHONY: benchmark docs lint test - -docs: - which godoc2ghmd || go get github.com/DevotedHealth/godoc2ghmd - godoc2ghmd -template .readme.tmpl github.com/ettle/strcase > README.md - go mod tidy - -test: - go test -cover ./... - -lint: - which golangci-lint || go get github.com/golangci/golangci-lint/cmd/golangci-lint@v1.50.1 - golangci-lint run - golangci-lint run benchmark/*.go - go mod tidy - -benchmark: - cd benchmark && go test -bench=. -test.benchmem - go mod tidy diff --git a/vendor/github.com/ettle/strcase/README.md b/vendor/github.com/ettle/strcase/README.md deleted file mode 100644 index a984da80d..000000000 --- a/vendor/github.com/ettle/strcase/README.md +++ /dev/null @@ -1,553 +0,0 @@ - -# Go Strcase - -[![Go Report Card](https://goreportcard.com/badge/github.com/ettle/strcase)](https://goreportcard.com/report/github.com/ettle/strcase) -[![Coverage](http://gocover.io/_badge/github.com/ettle/strcase?0)](http://gocover.io/github.com/ettle/strcase) -[![GoDoc](https://godoc.org/github.com/ettle/strcase?status.svg)](https://pkg.go.dev/github.com/ettle/strcase) - -Convert strings to `snake_case`, `camelCase`, `PascalCase`, `kebab-case` and more! Supports Go initialisms, customization, and Unicode. - -`import "github.com/ettle/strcase"` - -## Overview -Package strcase is a package for converting strings into various word cases -(e.g. snake_case, camelCase) - - - go get -u github.com/ettle/strcase - -Example usage - - - strcase.ToSnake("Hello World") // hello_world - strcase.ToSNAKE("Hello World") // HELLO_WORLD - - strcase.ToKebab("helloWorld") // hello-world - strcase.ToKEBAB("helloWorld") // HELLO-WORLD - - strcase.ToPascal("hello-world") // HelloWorld - strcase.ToCamel("hello-world") // helloWorld - - // Handle odd cases - strcase.ToSnake("FOOBar") // foo_bar - - // Support Go initialisms - strcase.ToGoPascal("http_response") // HTTPResponse - - // Specify case and delimiter - strcase.ToCase("HelloWorld", strcase.UpperCase, '.') // HELLO.WORLD - -## Why this package - -String strcase is pretty straight forward and there are a number of methods to -do it. This package is fully featured, more customizable, better tested, and -faster than other packages and what you would probably whip up yourself. - -### Unicode support - -We work for with unicode strings and pay very little performance penalty for it -as we optimized for the common use case of ASCII only strings. - -### Customization - -You can create a custom caser that changes the behavior to what you want. This -customization also reduces the pressure for us to change the default behavior -which means that things are more stable for everyone involved. The goal is to -make the common path easy and fast, while making the uncommon path possible. - - - c := NewCaser( - // Use Go's default initialisms e.g. ID, HTML - true, - // Override initialisms (e.g. don't initialize HTML but initialize SSL - map[string]bool{"SSL": true, "HTML": false}, - // Write your own custom SplitFn - // - NewSplitFn( - []rune{'*', '.', ','}, - SplitCase, - SplitAcronym, - PreserveNumberFormatting, - SplitBeforeNumber, - SplitAfterNumber, - )) - assert.Equal(t, "http_200", c.ToSnake("http200")) - -### Initialism support - -By default, we use the golint intialisms list. You can customize and override -the initialisms if you wish to add additional ones, such as "SSL" or "CMS" or -domain specific ones to your industry. - - - ToGoPascal("http_response") // HTTPResponse - ToGoSnake("http_response") // HTTP_response - -### Test coverage - -We have a wide ranging test suite to make sure that we understand our behavior. -Test coverage isn't everything, but we aim for 100% coverage. - -### Fast - -Optimized to reduce memory allocations with Builder. Benchmarked and optimized -around common cases. - -We're on par with the fastest packages (that have less features) and much -faster than others. We also benchmarked against code snippets. Using string -builders to reduce memory allocation and reordering boolean checks for the -common cases have a large performance impact. - -Hopefully I was fair to each library and happy to rerun benchmarks differently -or reword my commentary based on suggestions or updates. - - - // This package - faster then almost all libraries - // Initialisms are more complicated and slightly slower, but still fast - BenchmarkToTitle-96 9617142 125.7 ns/op 16 B/op 1 allocs/op - BenchmarkToSnake-96 10659919 120.7 ns/op 16 B/op 1 allocs/op - BenchmarkToSNAKE-96 9018282 126.4 ns/op 16 B/op 1 allocs/op - BenchmarkToGoSnake-96 4903687 254.5 ns/op 26 B/op 4 allocs/op - BenchmarkToCustomCaser-96 4434489 265.0 ns/op 28 B/op 4 allocs/op - - // Segment has very fast snake case and camel case libraries - // No features or customization, but very very fast - BenchmarkSegment-96 33625734 35.54 ns/op 16 B/op 1 allocs/op - - // Iancoleman has gotten some performance improvements, but remains - // without unicode support and lacks fine-grained customization - BenchmarkToSnakeIan-96 13141522 92.99 ns/op 16 B/op 1 allocs/op - - // Stdlib strings.Title is deprecated; using golang.org/x.text - BenchmarkGolangOrgXTextCases-96 4665676 262.5 ns/op 272 B/op 2 allocs/op - - // Other libraries or code snippets - // - Most are slower, by up to an order of magnitude - // - No support for initialisms or customization - // - Some generate only camelCase or snake_case - // - Many lack unicode support - BenchmarkToSnakeStoewer-96 8095468 148.9 ns/op 64 B/op 2 allocs/op - // Copying small rune arrays is slow - BenchmarkToSnakeSiongui-96 2912593 401.7 ns/op 112 B/op 19 allocs/op - BenchmarkGoValidator-96 3493800 342.6 ns/op 184 B/op 9 allocs/op - // String alloction is slow - BenchmarkToSnakeFatih-96 1282648 945.1 ns/op 616 B/op 26 allocs/op - // Regexp is slow - BenchmarkToSnakeGolangPrograms-96 778674 1495 ns/op 227 B/op 11 allocs/op - - // These results aren't a surprise - my initial version of this library was - // painfully slow. I think most of us, without spending some time with - // profilers and benchmarks, would write also something on the slower side. - -### Zero dependencies - -That's right - zero. We only import the Go standard library. No hassles with -dependencies, licensing, security alerts. - -## Why not this package - -If every nanosecond matters and this is used in a tight loop, use segment.io's -libraries (https://github.com/segmentio/go-snakecase and -https://github.com/segmentio/go-camelcase). They lack features, but make up for -it by being blazing fast. - -## Migrating from other packages - -If you are migrating from from another package, you may find slight differences -in output. To reduce the delta, you may find it helpful to use the following -custom casers to mimic the behavior of the other package. - - - // From https://github.com/iancoleman/strcase - var c = NewCaser(false, nil, NewSplitFn([]rune{'_', '-', '.'}, SplitCase, SplitAcronym, SplitBeforeNumber)) - - // From https://github.com/stoewer/go-strcase - var c = NewCaser(false, nil, NewSplitFn([]rune{'_', '-'}, SplitCase), SplitAcronym) - - - - -## Index -* [func ToCamel(s string) string](#func-ToCamel) -* [func ToCase(s string, wordCase WordCase, delimiter rune) string](#func-ToCase) -* [func ToGoCamel(s string) string](#func-ToGoCamel) -* [func ToGoCase(s string, wordCase WordCase, delimiter rune) string](#func-ToGoCase) -* [func ToGoKebab(s string) string](#func-ToGoKebab) -* [func ToGoPascal(s string) string](#func-ToGoPascal) -* [func ToGoSnake(s string) string](#func-ToGoSnake) -* [func ToKEBAB(s string) string](#func-ToKEBAB) -* [func ToKebab(s string) string](#func-ToKebab) -* [func ToPascal(s string) string](#func-ToPascal) -* [func ToSNAKE(s string) string](#func-ToSNAKE) -* [func ToSnake(s string) string](#func-ToSnake) -* [type Caser](#type-Caser) - * [func NewCaser(goInitialisms bool, initialismOverrides map[string]bool, splitFn SplitFn) *Caser](#func-NewCaser) - * [func (c *Caser) ToCamel(s string) string](#type-Caser.ToCamel) - * [func (c *Caser) ToCase(s string, wordCase WordCase, delimiter rune) string](#type-Caser.ToCase) - * [func (c *Caser) ToKEBAB(s string) string](#type-Caser.ToKEBAB) - * [func (c *Caser) ToKebab(s string) string](#type-Caser.ToKebab) - * [func (c *Caser) ToPascal(s string) string](#type-Caser.ToPascal) - * [func (c *Caser) ToSNAKE(s string) string](#type-Caser.ToSNAKE) - * [func (c *Caser) ToSnake(s string) string](#type-Caser.ToSnake) -* [type SplitAction](#type-SplitAction) -* [type SplitFn](#type-SplitFn) - * [func NewSplitFn(delimiters []rune, splitOptions ...SplitOption) SplitFn](#func-NewSplitFn) -* [type SplitOption](#type-SplitOption) -* [type WordCase](#type-WordCase) - - - - - -## func [ToCamel](./strcase.go#L57) -``` go -func ToCamel(s string) string -``` -ToCamel returns words in camelCase (capitalized words concatenated together, with first word lower case). -Also known as lowerCamelCase or mixedCase. - - - -## func [ToCase](./strcase.go#L72) -``` go -func ToCase(s string, wordCase WordCase, delimiter rune) string -``` -ToCase returns words in given case and delimiter. - - - -## func [ToGoCamel](./strcase.go#L67) -``` go -func ToGoCamel(s string) string -``` -ToGoCamel returns words in camelCase (capitalized words concatenated together, with first word lower case). -Also known as lowerCamelCase or mixedCase. - -Respects Go's common initialisms, but first word remains lowercased which is -important for code generator use cases (e.g. toJson -> toJSON, httpResponse --> httpResponse). - - - -## func [ToGoCase](./strcase.go#L79) -``` go -func ToGoCase(s string, wordCase WordCase, delimiter rune) string -``` -ToGoCase returns words in given case and delimiter. - -Respects Go's common initialisms (e.g. httpResponse -> HTTPResponse). - - - -## func [ToGoKebab](./strcase.go#L31) -``` go -func ToGoKebab(s string) string -``` -ToGoKebab returns words in kebab-case (lower case words with dashes). -Also known as dash-case. - -Respects Go's common initialisms (e.g. http-response -> HTTP-response). - - - -## func [ToGoPascal](./strcase.go#L51) -``` go -func ToGoPascal(s string) string -``` -ToGoPascal returns words in PascalCase (capitalized words concatenated together). -Also known as UpperPascalCase. - -Respects Go's common initialisms (e.g. HttpResponse -> HTTPResponse). - - - -## func [ToGoSnake](./strcase.go#L11) -``` go -func ToGoSnake(s string) string -``` -ToGoSnake returns words in snake_case (lower case words with underscores). - -Respects Go's common initialisms (e.g. http_response -> HTTP_response). - - - -## func [ToKEBAB](./strcase.go#L37) -``` go -func ToKEBAB(s string) string -``` -ToKEBAB returns words in KEBAB-CASE (upper case words with dashes). -Also known as SCREAMING-KEBAB-CASE or SCREAMING-DASH-CASE. - - - -## func [ToKebab](./strcase.go#L23) -``` go -func ToKebab(s string) string -``` -ToKebab returns words in kebab-case (lower case words with dashes). -Also known as dash-case. - - - -## func [ToPascal](./strcase.go#L43) -``` go -func ToPascal(s string) string -``` -ToPascal returns words in PascalCase (capitalized words concatenated together). -Also known as UpperPascalCase. - - - -## func [ToSNAKE](./strcase.go#L17) -``` go -func ToSNAKE(s string) string -``` -ToSNAKE returns words in SNAKE_CASE (upper case words with underscores). -Also known as SCREAMING_SNAKE_CASE or UPPER_CASE. - - - -## func [ToSnake](./strcase.go#L4) -``` go -func ToSnake(s string) string -``` -ToSnake returns words in snake_case (lower case words with underscores). - - - - -## type [Caser](./caser.go#L4-L7) -``` go -type Caser struct { - // contains filtered or unexported fields -} - -``` -Caser allows for customization of parsing and intialisms - - - - - - - -### func [NewCaser](./caser.go#L24) -``` go -func NewCaser(goInitialisms bool, initialismOverrides map[string]bool, splitFn SplitFn) *Caser -``` -NewCaser returns a configured Caser. - -A Caser should be created when you want fine grained control over how the words are split. - - - Notes on function arguments - - goInitialisms: Whether to use Golint's intialisms - - initialismOverrides: A mapping of extra initialisms - Keys must be in ALL CAPS. Merged with Golint's if goInitialisms is set. - Setting a key to false will override Golint's. - - splitFn: How to separate words - Override the default split function. Consider using NewSplitFn to - configure one instead of writing your own. - - - - - -### func (\*Caser) [ToCamel](./caser.go#L80) -``` go -func (c *Caser) ToCamel(s string) string -``` -ToCamel returns words in camelCase (capitalized words concatenated together, with first word lower case). -Also known as lowerCamelCase or mixedCase. - - - - -### func (\*Caser) [ToCase](./caser.go#L85) -``` go -func (c *Caser) ToCase(s string, wordCase WordCase, delimiter rune) string -``` -ToCase returns words with a given case and delimiter. - - - - -### func (\*Caser) [ToKEBAB](./caser.go#L68) -``` go -func (c *Caser) ToKEBAB(s string) string -``` -ToKEBAB returns words in KEBAB-CASE (upper case words with dashes). -Also known as SCREAMING-KEBAB-CASE or SCREAMING-DASH-CASE. - - - - -### func (\*Caser) [ToKebab](./caser.go#L62) -``` go -func (c *Caser) ToKebab(s string) string -``` -ToKebab returns words in kebab-case (lower case words with dashes). -Also known as dash-case. - - - - -### func (\*Caser) [ToPascal](./caser.go#L74) -``` go -func (c *Caser) ToPascal(s string) string -``` -ToPascal returns words in PascalCase (capitalized words concatenated together). -Also known as UpperPascalCase. - - - - -### func (\*Caser) [ToSNAKE](./caser.go#L56) -``` go -func (c *Caser) ToSNAKE(s string) string -``` -ToSNAKE returns words in SNAKE_CASE (upper case words with underscores). -Also known as SCREAMING_SNAKE_CASE or UPPER_CASE. - - - - -### func (\*Caser) [ToSnake](./caser.go#L50) -``` go -func (c *Caser) ToSnake(s string) string -``` -ToSnake returns words in snake_case (lower case words with underscores). - - - - -## type [SplitAction](./split.go#L111) -``` go -type SplitAction int -``` -SplitAction defines if and how to split a string - - -``` go -const ( - // Noop - Continue to next character - Noop SplitAction = iota - // Split - Split between words - // e.g. to split between wordsWithoutDelimiters - Split - // SkipSplit - Split the word and drop the character - // e.g. to split words with delimiters - SkipSplit - // Skip - Remove the character completely - Skip -) -``` - - - - - - - - - -## type [SplitFn](./split.go#L6) -``` go -type SplitFn func(prev, curr, next rune) SplitAction -``` -SplitFn defines how to split a string into words - - - - - - - -### func [NewSplitFn](./split.go#L15-L18) -``` go -func NewSplitFn( - delimiters []rune, - splitOptions ...SplitOption, -) SplitFn -``` -NewSplitFn returns a SplitFn based on the options provided. - -NewSplitFn covers the majority of common options that other strcase -libraries provide and should allow you to simply create a custom caser. -For more complicated use cases, feel free to write your own SplitFn - - - - - -## type [SplitOption](./split.go#L94) -``` go -type SplitOption int -``` -SplitOption are options that allow for configuring NewSplitFn - - -``` go -const ( - // SplitCase - FooBar -> Foo_Bar - SplitCase SplitOption = iota - // SplitAcronym - FOOBar -> Foo_Bar - // It won't preserve FOO's case. If you want, you can set the Caser's initialisms so FOO will be in all caps - SplitAcronym - // SplitBeforeNumber - port80 -> port_80 - SplitBeforeNumber - // SplitAfterNumber - 200status -> 200_status - SplitAfterNumber - // PreserveNumberFormatting - a.b.2,000.3.c -> a_b_2,000.3_c - PreserveNumberFormatting -) -``` - - - - - - - - - -## type [WordCase](./convert.go#L6) -``` go -type WordCase int -``` -WordCase is an enumeration of the ways to format a word. - - -``` go -const ( - // Original - Preserve the original input strcase - Original WordCase = iota - // LowerCase - All letters lower cased (example) - LowerCase - // UpperCase - All letters upper cased (EXAMPLE) - UpperCase - // TitleCase - Only first letter upper cased (Example) - TitleCase - // CamelCase - TitleCase except lower case first word (exampleText) - // Notably, even if the first word is an initialism, it will be lower - // cased. This is important for code generators where capital letters - // mean exported functions. i.e. jsonString(), not JSONString() - CamelCase -) -``` - - - - - - - - - - - - - diff --git a/vendor/github.com/ettle/strcase/assert.go b/vendor/github.com/ettle/strcase/assert.go deleted file mode 100644 index 09344e40f..000000000 --- a/vendor/github.com/ettle/strcase/assert.go +++ /dev/null @@ -1,24 +0,0 @@ -package strcase - -// We use a lightweight replacement for testify/assert to reduce dependencies - -// testingT interface allows us to test our assert functions -type testingT interface { - Logf(format string, args ...interface{}) - Fail() -} - -// assertTrue will fail if the value is not true -func assertTrue(t testingT, value bool) { - if !value { - t.Fail() - } -} - -// assertEqual will fail if the two strings are not equal -func assertEqual(t testingT, expected, actual string) { - if expected != actual { - t.Logf("Expected: %s Actual: %s", expected, actual) - t.Fail() - } -} diff --git a/vendor/github.com/ettle/strcase/caser.go b/vendor/github.com/ettle/strcase/caser.go deleted file mode 100644 index 2e7eb955b..000000000 --- a/vendor/github.com/ettle/strcase/caser.go +++ /dev/null @@ -1,87 +0,0 @@ -package strcase - -// Caser allows for customization of parsing and intialisms -type Caser struct { - initialisms map[string]bool - splitFn SplitFn -} - -// NewCaser returns a configured Caser. -// -// A Caser should be created when you want fine grained control over how the words are split. -// -// Notes on function arguments -// -// goInitialisms: Whether to use Golint's intialisms -// -// initialismOverrides: A mapping of extra initialisms -// Keys must be in ALL CAPS. Merged with Golint's if goInitialisms is set. -// Setting a key to false will override Golint's. -// -// splitFn: How to separate words -// Override the default split function. Consider using NewSplitFn to -// configure one instead of writing your own. -func NewCaser(goInitialisms bool, initialismOverrides map[string]bool, splitFn SplitFn) *Caser { - c := &Caser{ - initialisms: golintInitialisms, - splitFn: splitFn, - } - - if c.splitFn == nil { - c.splitFn = defaultSplitFn - } - - if goInitialisms && initialismOverrides != nil { - c.initialisms = map[string]bool{} - for k, v := range golintInitialisms { - c.initialisms[k] = v - } - for k, v := range initialismOverrides { - c.initialisms[k] = v - } - } else if !goInitialisms { - c.initialisms = initialismOverrides - } - - return c -} - -// ToSnake returns words in snake_case (lower case words with underscores). -func (c *Caser) ToSnake(s string) string { - return convert(s, c.splitFn, '_', LowerCase, c.initialisms) -} - -// ToSNAKE returns words in SNAKE_CASE (upper case words with underscores). -// Also known as SCREAMING_SNAKE_CASE or UPPER_CASE. -func (c *Caser) ToSNAKE(s string) string { - return convert(s, c.splitFn, '_', UpperCase, c.initialisms) -} - -// ToKebab returns words in kebab-case (lower case words with dashes). -// Also known as dash-case. -func (c *Caser) ToKebab(s string) string { - return convert(s, c.splitFn, '-', LowerCase, c.initialisms) -} - -// ToKEBAB returns words in KEBAB-CASE (upper case words with dashes). -// Also known as SCREAMING-KEBAB-CASE or SCREAMING-DASH-CASE. -func (c *Caser) ToKEBAB(s string) string { - return convert(s, c.splitFn, '-', UpperCase, c.initialisms) -} - -// ToPascal returns words in PascalCase (capitalized words concatenated together). -// Also known as UpperPascalCase. -func (c *Caser) ToPascal(s string) string { - return convert(s, c.splitFn, '\x00', TitleCase, c.initialisms) -} - -// ToCamel returns words in camelCase (capitalized words concatenated together, with first word lower case). -// Also known as lowerCamelCase or mixedCase. -func (c *Caser) ToCamel(s string) string { - return convert(s, c.splitFn, '\x00', CamelCase, c.initialisms) -} - -// ToCase returns words with a given case and delimiter. -func (c *Caser) ToCase(s string, wordCase WordCase, delimiter rune) string { - return convert(s, c.splitFn, delimiter, wordCase, c.initialisms) -} diff --git a/vendor/github.com/ettle/strcase/convert.go b/vendor/github.com/ettle/strcase/convert.go deleted file mode 100644 index cb901d079..000000000 --- a/vendor/github.com/ettle/strcase/convert.go +++ /dev/null @@ -1,306 +0,0 @@ -package strcase - -import "strings" - -// WordCase is an enumeration of the ways to format a word. -type WordCase int - -const ( - // Original - Preserve the original input strcase - Original WordCase = iota - // LowerCase - All letters lower cased (example) - LowerCase - // UpperCase - All letters upper cased (EXAMPLE) - UpperCase - // TitleCase - Only first letter upper cased (Example) - TitleCase - // CamelCase - TitleCase except lower case first word (exampleText) - // Notably, even if the first word is an initialism, it will be lower - // cased. This is important for code generators where capital letters - // mean exported functions. i.e. jsonString(), not JSONString() - CamelCase -) - -// We have 3 convert functions for performance reasons -// The general convert could handle everything, but is not optimized -// -// The other two functions are optimized for the general use cases - that is the non-custom caser functions -// Case 1: Any Case and supports Go Initialisms -// Case 2: UpperCase words, which don't need to support initialisms since everything is in upper case - -// convertWithoutInitialims only works for to UpperCase and LowerCase -// -//nolint:gocyclo -func convertWithoutInitialisms(input string, delimiter rune, wordCase WordCase) string { - input = strings.TrimSpace(input) - runes := []rune(input) - if len(runes) == 0 { - return "" - } - - var b strings.Builder - b.Grow(len(input) + 4) // In case we need to write delimiters where they weren't before - - var prev, curr rune - next := runes[0] // 0 length will have already returned so safe to index - inWord := false - firstWord := true - for i := 0; i < len(runes); i++ { - prev = curr - curr = next - if i+1 == len(runes) { - next = 0 - } else { - next = runes[i+1] - } - - switch defaultSplitFn(prev, curr, next) { - case SkipSplit: - if inWord && delimiter != 0 { - b.WriteRune(delimiter) - } - inWord = false - continue - case Split: - if inWord && delimiter != 0 { - b.WriteRune(delimiter) - } - inWord = false - } - switch wordCase { - case UpperCase: - b.WriteRune(toUpper(curr)) - case LowerCase: - b.WriteRune(toLower(curr)) - case TitleCase: - if inWord { - b.WriteRune(toLower(curr)) - } else { - b.WriteRune(toUpper(curr)) - } - case CamelCase: - if inWord { - b.WriteRune(toLower(curr)) - } else if firstWord { - b.WriteRune(toLower(curr)) - firstWord = false - } else { - b.WriteRune(toUpper(curr)) - } - default: - // Must be original case - b.WriteRune(curr) - } - inWord = true - } - return b.String() -} - -// convertWithGoInitialisms changes a input string to a certain case with a -// delimiter, respecting go initialisms but not skip runes -// -//nolint:gocyclo -func convertWithGoInitialisms(input string, delimiter rune, wordCase WordCase) string { - input = strings.TrimSpace(input) - runes := []rune(input) - if len(runes) == 0 { - return "" - } - - var b strings.Builder - b.Grow(len(input) + 4) // In case we need to write delimiters where they weren't before - - firstWord := true - - addWord := func(start, end int) { - if start == end { - return - } - - if !firstWord && delimiter != 0 { - b.WriteRune(delimiter) - } - - // Don't bother with initialisms if the word is longer than 5 - // A quick proxy to avoid the extra memory allocations - if end-start <= 5 { - var word strings.Builder - word.Grow(end - start) - for i := start; i < end; i++ { - word.WriteRune(toUpper(runes[i])) - } - w := word.String() - if golintInitialisms[w] { - if !firstWord || wordCase != CamelCase { - b.WriteString(w) - firstWord = false - return - } - } - } - - for i := start; i < end; i++ { - r := runes[i] - switch wordCase { - case UpperCase: - panic("use convertWithoutInitialisms instead") - case LowerCase: - b.WriteRune(toLower(r)) - case TitleCase: - if i == start { - b.WriteRune(toUpper(r)) - } else { - b.WriteRune(toLower(r)) - } - case CamelCase: - if !firstWord && i == start { - b.WriteRune(toUpper(r)) - } else { - b.WriteRune(toLower(r)) - } - default: - b.WriteRune(r) - } - } - firstWord = false - } - - var prev, curr rune - next := runes[0] // 0 length will have already returned so safe to index - wordStart := 0 - for i := 0; i < len(runes); i++ { - prev = curr - curr = next - if i+1 == len(runes) { - next = 0 - } else { - next = runes[i+1] - } - - switch defaultSplitFn(prev, curr, next) { - case Split: - addWord(wordStart, i) - wordStart = i - case SkipSplit: - addWord(wordStart, i) - wordStart = i + 1 - } - } - - if wordStart != len(runes) { - addWord(wordStart, len(runes)) - } - return b.String() -} - -// convert changes a input string to a certain case with a delimiter, -// respecting arbitrary initialisms and skip characters -// -//nolint:gocyclo -func convert(input string, fn SplitFn, delimiter rune, wordCase WordCase, - initialisms map[string]bool) string { - input = strings.TrimSpace(input) - runes := []rune(input) - if len(runes) == 0 { - return "" - } - - var b strings.Builder - b.Grow(len(input) + 4) // In case we need to write delimiters where they weren't before - - firstWord := true - var skipIndexes []int - - addWord := func(start, end int) { - // If you have nothing good to say, say nothing at all - if start == end || len(skipIndexes) == end-start { - skipIndexes = nil - return - } - - // If you have something to say, start with a delimiter - if !firstWord && delimiter != 0 { - b.WriteRune(delimiter) - } - - // Check if you're an initialism - // Note - we don't check skip characters here since initialisms - // will probably never have junk characters in between - // I'm open to it if there is a use case - if initialisms != nil { - var word strings.Builder - word.Grow(end - start) - for i := start; i < end; i++ { - word.WriteRune(toUpper(runes[i])) - } - w := word.String() - if initialisms[w] { - if !firstWord || wordCase != CamelCase { - b.WriteString(w) - firstWord = false - return - } - } - } - - skipIdx := 0 - for i := start; i < end; i++ { - if len(skipIndexes) > 0 && skipIdx < len(skipIndexes) && i == skipIndexes[skipIdx] { - skipIdx++ - continue - } - r := runes[i] - switch wordCase { - case UpperCase: - b.WriteRune(toUpper(r)) - case LowerCase: - b.WriteRune(toLower(r)) - case TitleCase: - if i == start { - b.WriteRune(toUpper(r)) - } else { - b.WriteRune(toLower(r)) - } - case CamelCase: - if !firstWord && i == start { - b.WriteRune(toUpper(r)) - } else { - b.WriteRune(toLower(r)) - } - default: - b.WriteRune(r) - } - } - firstWord = false - skipIndexes = nil - } - - var prev, curr rune - next := runes[0] // 0 length will have already returned so safe to index - wordStart := 0 - for i := 0; i < len(runes); i++ { - prev = curr - curr = next - if i+1 == len(runes) { - next = 0 - } else { - next = runes[i+1] - } - - switch fn(prev, curr, next) { - case Skip: - skipIndexes = append(skipIndexes, i) - case Split: - addWord(wordStart, i) - wordStart = i - case SkipSplit: - addWord(wordStart, i) - wordStart = i + 1 - } - } - - if wordStart != len(runes) { - addWord(wordStart, len(runes)) - } - return b.String() -} diff --git a/vendor/github.com/ettle/strcase/doc.go b/vendor/github.com/ettle/strcase/doc.go deleted file mode 100644 index c3bf14a8f..000000000 --- a/vendor/github.com/ettle/strcase/doc.go +++ /dev/null @@ -1,150 +0,0 @@ -/* -Package strcase is a package for converting strings into various word cases -(e.g. snake_case, camelCase) - - go get -u github.com/ettle/strcase - -Example usage - - strcase.ToSnake("Hello World") // hello_world - strcase.ToSNAKE("Hello World") // HELLO_WORLD - - strcase.ToKebab("helloWorld") // hello-world - strcase.ToKEBAB("helloWorld") // HELLO-WORLD - - strcase.ToPascal("hello-world") // HelloWorld - strcase.ToCamel("hello-world") // helloWorld - - // Handle odd cases - strcase.ToSnake("FOOBar") // foo_bar - - // Support Go initialisms - strcase.ToGoPascal("http_response") // HTTPResponse - - // Specify case and delimiter - strcase.ToCase("HelloWorld", strcase.UpperCase, '.') // HELLO.WORLD - -## Why this package - -String strcase is pretty straight forward and there are a number of methods to -do it. This package is fully featured, more customizable, better tested, and -faster than other packages and what you would probably whip up yourself. - -### Unicode support - -We work for with unicode strings and pay very little performance penalty for it -as we optimized for the common use case of ASCII only strings. - -### Customization - -You can create a custom caser that changes the behavior to what you want. This -customization also reduces the pressure for us to change the default behavior -which means that things are more stable for everyone involved. The goal is to -make the common path easy and fast, while making the uncommon path possible. - - c := NewCaser( - // Use Go's default initialisms e.g. ID, HTML - true, - // Override initialisms (e.g. don't initialize HTML but initialize SSL - map[string]bool{"SSL": true, "HTML": false}, - // Write your own custom SplitFn - // - NewSplitFn( - []rune{'*', '.', ','}, - SplitCase, - SplitAcronym, - PreserveNumberFormatting, - SplitBeforeNumber, - SplitAfterNumber, - )) - assert.Equal(t, "http_200", c.ToSnake("http200")) - -### Initialism support - -By default, we use the golint intialisms list. You can customize and override -the initialisms if you wish to add additional ones, such as "SSL" or "CMS" or -domain specific ones to your industry. - - ToGoPascal("http_response") // HTTPResponse - ToGoSnake("http_response") // HTTP_response - -### Test coverage - -We have a wide ranging test suite to make sure that we understand our behavior. -Test coverage isn't everything, but we aim for 100% coverage. - -### Fast - -Optimized to reduce memory allocations with Builder. Benchmarked and optimized -around common cases. - -We're on par with the fastest packages (that have less features) and much -faster than others. We also benchmarked against code snippets. Using string -builders to reduce memory allocation and reordering boolean checks for the -common cases have a large performance impact. - -Hopefully I was fair to each library and happy to rerun benchmarks differently -or reword my commentary based on suggestions or updates. - - // This package - faster then almost all libraries - // Initialisms are more complicated and slightly slower, but still fast - BenchmarkToTitle-96 9617142 125.7 ns/op 16 B/op 1 allocs/op - BenchmarkToSnake-96 10659919 120.7 ns/op 16 B/op 1 allocs/op - BenchmarkToSNAKE-96 9018282 126.4 ns/op 16 B/op 1 allocs/op - BenchmarkToGoSnake-96 4903687 254.5 ns/op 26 B/op 4 allocs/op - BenchmarkToCustomCaser-96 4434489 265.0 ns/op 28 B/op 4 allocs/op - - // Segment has very fast snake case and camel case libraries - // No features or customization, but very very fast - BenchmarkSegment-96 33625734 35.54 ns/op 16 B/op 1 allocs/op - - // Iancoleman has gotten some performance improvements, but remains - // without unicode support and lacks fine-grained customization - BenchmarkToSnakeIan-96 13141522 92.99 ns/op 16 B/op 1 allocs/op - - // Stdlib strings.Title is deprecated; using golang.org/x.text - BenchmarkGolangOrgXTextCases-96 4665676 262.5 ns/op 272 B/op 2 allocs/op - - // Other libraries or code snippets - // - Most are slower, by up to an order of magnitude - // - No support for initialisms or customization - // - Some generate only camelCase or snake_case - // - Many lack unicode support - BenchmarkToSnakeStoewer-96 8095468 148.9 ns/op 64 B/op 2 allocs/op - // Copying small rune arrays is slow - BenchmarkToSnakeSiongui-96 2912593 401.7 ns/op 112 B/op 19 allocs/op - BenchmarkGoValidator-96 3493800 342.6 ns/op 184 B/op 9 allocs/op - // String alloction is slow - BenchmarkToSnakeFatih-96 1282648 945.1 ns/op 616 B/op 26 allocs/op - // Regexp is slow - BenchmarkToSnakeGolangPrograms-96 778674 1495 ns/op 227 B/op 11 allocs/op - - // These results aren't a surprise - my initial version of this library was - // painfully slow. I think most of us, without spending some time with - // profilers and benchmarks, would write also something on the slower side. - -### Zero dependencies - -That's right - zero. We only import the Go standard library. No hassles with -dependencies, licensing, security alerts. - -## Why not this package - -If every nanosecond matters and this is used in a tight loop, use segment.io's -libraries (https://github.com/segmentio/go-snakecase and -https://github.com/segmentio/go-camelcase). They lack features, but make up for -it by being blazing fast. - -## Migrating from other packages - -If you are migrating from from another package, you may find slight differences -in output. To reduce the delta, you may find it helpful to use the following -custom casers to mimic the behavior of the other package. - - // From https://github.com/iancoleman/strcase - var c = NewCaser(false, nil, NewSplitFn([]rune{'_', '-', '.'}, SplitCase, SplitAcronym, SplitBeforeNumber)) - - // From https://github.com/stoewer/go-strcase - var c = NewCaser(false, nil, NewSplitFn([]rune{'_', '-'}, SplitCase), SplitAcronym) -*/ -package strcase diff --git a/vendor/github.com/ettle/strcase/initialism.go b/vendor/github.com/ettle/strcase/initialism.go deleted file mode 100644 index 3c313d3e9..000000000 --- a/vendor/github.com/ettle/strcase/initialism.go +++ /dev/null @@ -1,43 +0,0 @@ -package strcase - -// golintInitialisms are the golint initialisms -var golintInitialisms = map[string]bool{ - "ACL": true, - "API": true, - "ASCII": true, - "CPU": true, - "CSS": true, - "DNS": true, - "EOF": true, - "GUID": true, - "HTML": true, - "HTTP": true, - "HTTPS": true, - "ID": true, - "IP": true, - "JSON": true, - "LHS": true, - "QPS": true, - "RAM": true, - "RHS": true, - "RPC": true, - "SLA": true, - "SMTP": true, - "SQL": true, - "SSH": true, - "TCP": true, - "TLS": true, - "TTL": true, - "UDP": true, - "UI": true, - "UID": true, - "UUID": true, - "URI": true, - "URL": true, - "UTF8": true, - "VM": true, - "XML": true, - "XMPP": true, - "XSRF": true, - "XSS": true, -} diff --git a/vendor/github.com/ettle/strcase/split.go b/vendor/github.com/ettle/strcase/split.go deleted file mode 100644 index 32bc29759..000000000 --- a/vendor/github.com/ettle/strcase/split.go +++ /dev/null @@ -1,165 +0,0 @@ -package strcase - -import "unicode" - -// SplitFn defines how to split a string into words -type SplitFn func(prev, curr, next rune) SplitAction - -// NewSplitFn returns a SplitFn based on the options provided. -// -// NewSplitFn covers the majority of common options that other strcase -// libraries provide and should allow you to simply create a custom caser. -// For more complicated use cases, feel free to write your own SplitFn -// -//nolint:gocyclo -func NewSplitFn( - delimiters []rune, - splitOptions ...SplitOption, -) SplitFn { - var splitCase, splitAcronym, splitBeforeNumber, splitAfterNumber, preserveNumberFormatting bool - - for _, option := range splitOptions { - switch option { - case SplitCase: - splitCase = true - case SplitAcronym: - splitAcronym = true - case SplitBeforeNumber: - splitBeforeNumber = true - case SplitAfterNumber: - splitAfterNumber = true - case PreserveNumberFormatting: - preserveNumberFormatting = true - } - } - - return func(prev, curr, next rune) SplitAction { - // The most common case will be that it's just a letter - // There are safe cases to process - if isLower(curr) && !isNumber(prev) { - return Noop - } - if isUpper(prev) && isUpper(curr) && isUpper(next) { - return Noop - } - - if preserveNumberFormatting { - if (curr == '.' || curr == ',') && - isNumber(prev) && isNumber(next) { - return Noop - } - } - - if unicode.IsSpace(curr) { - return SkipSplit - } - for _, d := range delimiters { - if curr == d { - return SkipSplit - } - } - - if splitBeforeNumber { - if isNumber(curr) && !isNumber(prev) { - if preserveNumberFormatting && (prev == '.' || prev == ',') { - return Noop - } - return Split - } - } - - if splitAfterNumber { - if isNumber(prev) && !isNumber(curr) { - return Split - } - } - - if splitCase { - if !isUpper(prev) && isUpper(curr) { - return Split - } - } - - if splitAcronym { - if isUpper(prev) && isUpper(curr) && isLower(next) { - return Split - } - } - - return Noop - } -} - -// SplitOption are options that allow for configuring NewSplitFn -type SplitOption int - -const ( - // SplitCase - FooBar -> Foo_Bar - SplitCase SplitOption = iota - // SplitAcronym - FOOBar -> Foo_Bar - // It won't preserve FOO's case. If you want, you can set the Caser's initialisms so FOO will be in all caps - SplitAcronym - // SplitBeforeNumber - port80 -> port_80 - SplitBeforeNumber - // SplitAfterNumber - 200status -> 200_status - SplitAfterNumber - // PreserveNumberFormatting - a.b.2,000.3.c -> a_b_2,000.3_c - PreserveNumberFormatting -) - -// SplitAction defines if and how to split a string -type SplitAction int - -const ( - // Noop - Continue to next character - Noop SplitAction = iota - // Split - Split between words - // e.g. to split between wordsWithoutDelimiters - Split - // SkipSplit - Split the word and drop the character - // e.g. to split words with delimiters - SkipSplit - // Skip - Remove the character completely - Skip -) - -//nolint:gocyclo -func defaultSplitFn(prev, curr, next rune) SplitAction { - // The most common case will be that it's just a letter so let lowercase letters return early since we know what they should do - if isLower(curr) { - return Noop - } - // Delimiters are _, -, ., and unicode spaces - // Handle . lower down as it needs to happen after number exceptions - if curr == '_' || curr == '-' || isSpace(curr) { - return SkipSplit - } - - if isUpper(curr) { - if isLower(prev) { - // fooBar - return Split - } else if isUpper(prev) && isLower(next) { - // FOOBar - return Split - } - } - - // Do numeric exceptions last to avoid perf penalty - if unicode.IsNumber(prev) { - // v4.3 is not split - if (curr == '.' || curr == ',') && unicode.IsNumber(next) { - return Noop - } - if !unicode.IsNumber(curr) && curr != '.' { - return Split - } - } - // While period is a default delimiter, keep it down here to avoid - // penalty for other delimiters - if curr == '.' { - return SkipSplit - } - - return Noop -} diff --git a/vendor/github.com/ettle/strcase/strcase.go b/vendor/github.com/ettle/strcase/strcase.go deleted file mode 100644 index 46b4f7a68..000000000 --- a/vendor/github.com/ettle/strcase/strcase.go +++ /dev/null @@ -1,81 +0,0 @@ -package strcase - -// ToSnake returns words in snake_case (lower case words with underscores). -func ToSnake(s string) string { - return convertWithoutInitialisms(s, '_', LowerCase) -} - -// ToGoSnake returns words in snake_case (lower case words with underscores). -// -// Respects Go's common initialisms (e.g. http_response -> HTTP_response). -func ToGoSnake(s string) string { - return convertWithGoInitialisms(s, '_', LowerCase) -} - -// ToSNAKE returns words in SNAKE_CASE (upper case words with underscores). -// Also known as SCREAMING_SNAKE_CASE or UPPER_CASE. -func ToSNAKE(s string) string { - return convertWithoutInitialisms(s, '_', UpperCase) -} - -// ToKebab returns words in kebab-case (lower case words with dashes). -// Also known as dash-case. -func ToKebab(s string) string { - return convertWithoutInitialisms(s, '-', LowerCase) -} - -// ToGoKebab returns words in kebab-case (lower case words with dashes). -// Also known as dash-case. -// -// Respects Go's common initialisms (e.g. http-response -> HTTP-response). -func ToGoKebab(s string) string { - return convertWithGoInitialisms(s, '-', LowerCase) -} - -// ToKEBAB returns words in KEBAB-CASE (upper case words with dashes). -// Also known as SCREAMING-KEBAB-CASE or SCREAMING-DASH-CASE. -func ToKEBAB(s string) string { - return convertWithoutInitialisms(s, '-', UpperCase) -} - -// ToPascal returns words in PascalCase (capitalized words concatenated together). -// Also known as UpperPascalCase. -func ToPascal(s string) string { - return convertWithoutInitialisms(s, 0, TitleCase) -} - -// ToGoPascal returns words in PascalCase (capitalized words concatenated together). -// Also known as UpperPascalCase. -// -// Respects Go's common initialisms (e.g. HttpResponse -> HTTPResponse). -func ToGoPascal(s string) string { - return convertWithGoInitialisms(s, 0, TitleCase) -} - -// ToCamel returns words in camelCase (capitalized words concatenated together, with first word lower case). -// Also known as lowerCamelCase or mixedCase. -func ToCamel(s string) string { - return convertWithoutInitialisms(s, 0, CamelCase) -} - -// ToGoCamel returns words in camelCase (capitalized words concatenated together, with first word lower case). -// Also known as lowerCamelCase or mixedCase. -// -// Respects Go's common initialisms, but first word remains lowercased which is -// important for code generator use cases (e.g. toJson -> toJSON, httpResponse -// -> httpResponse). -func ToGoCamel(s string) string { - return convertWithGoInitialisms(s, 0, CamelCase) -} - -// ToCase returns words in given case and delimiter. -func ToCase(s string, wordCase WordCase, delimiter rune) string { - return convertWithoutInitialisms(s, delimiter, wordCase) -} - -// ToGoCase returns words in given case and delimiter. -// -// Respects Go's common initialisms (e.g. httpResponse -> HTTPResponse). -func ToGoCase(s string, wordCase WordCase, delimiter rune) string { - return convertWithGoInitialisms(s, delimiter, wordCase) -} diff --git a/vendor/github.com/ettle/strcase/unicode.go b/vendor/github.com/ettle/strcase/unicode.go deleted file mode 100644 index b75e25a51..000000000 --- a/vendor/github.com/ettle/strcase/unicode.go +++ /dev/null @@ -1,48 +0,0 @@ -package strcase - -import "unicode" - -// Unicode functions, optimized for the common case of ascii -// No performance lost by wrapping since these functions get inlined by the compiler - -func isUpper(r rune) bool { - return unicode.IsUpper(r) -} - -func isLower(r rune) bool { - return unicode.IsLower(r) -} - -func isNumber(r rune) bool { - if r >= '0' && r <= '9' { - return true - } - return unicode.IsNumber(r) -} - -func isSpace(r rune) bool { - if r == ' ' || r == '\t' || r == '\n' || r == '\r' { - return true - } else if r < 128 { - return false - } - return unicode.IsSpace(r) -} - -func toUpper(r rune) rune { - if r >= 'a' && r <= 'z' { - return r - 32 - } else if r < 128 { - return r - } - return unicode.ToUpper(r) -} - -func toLower(r rune) rune { - if r >= 'A' && r <= 'Z' { - return r + 32 - } else if r < 128 { - return r - } - return unicode.ToLower(r) -} diff --git a/vendor/github.com/fatih/structtag/LICENSE b/vendor/github.com/fatih/structtag/LICENSE deleted file mode 100644 index 4fd15f9f8..000000000 --- a/vendor/github.com/fatih/structtag/LICENSE +++ /dev/null @@ -1,60 +0,0 @@ -Copyright (c) 2017, Fatih Arslan -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - -* Redistributions of source code must retain the above copyright notice, this - list of conditions and the following disclaimer. - -* Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - -* Neither the name of structtag nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE -FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -This software includes some portions from Go. Go is used under the terms of the -BSD like license. - -Copyright (c) 2012 The Go Authors. All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above -copyright notice, this list of conditions and the following disclaimer -in the documentation and/or other materials provided with the -distribution. - * Neither the name of Google Inc. nor the names of its -contributors may be used to endorse or promote products derived from -this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -The Go gopher was designed by Renee French. http://reneefrench.blogspot.com/ The design is licensed under the Creative Commons 3.0 Attributions license. Read this article for more details: https://blog.golang.org/gopher diff --git a/vendor/github.com/fatih/structtag/README.md b/vendor/github.com/fatih/structtag/README.md deleted file mode 100644 index c4e8b1e86..000000000 --- a/vendor/github.com/fatih/structtag/README.md +++ /dev/null @@ -1,73 +0,0 @@ -# structtag [![GoDoc](http://img.shields.io/badge/go-documentation-blue.svg?style=flat-square)](http://godoc.org/github.com/fatih/structtag) - -structtag provides an easy way of parsing and manipulating struct tag fields. -Please vendor the library as it might change in future versions. - -# Install - -```bash -go get github.com/fatih/structtag -``` - -# Example - -```go -package main - -import ( - "fmt" - "reflect" - "sort" - - "github.com/fatih/structtag" -) - -func main() { - type t struct { - t string `json:"foo,omitempty,string" xml:"foo"` - } - - // get field tag - tag := reflect.TypeOf(t{}).Field(0).Tag - - // ... and start using structtag by parsing the tag - tags, err := structtag.Parse(string(tag)) - if err != nil { - panic(err) - } - - // iterate over all tags - for _, t := range tags.Tags() { - fmt.Printf("tag: %+v\n", t) - } - - // get a single tag - jsonTag, err := tags.Get("json") - if err != nil { - panic(err) - } - fmt.Println(jsonTag) // Output: json:"foo,omitempty,string" - fmt.Println(jsonTag.Key) // Output: json - fmt.Println(jsonTag.Name) // Output: foo - fmt.Println(jsonTag.Options) // Output: [omitempty string] - - // change existing tag - jsonTag.Name = "foo_bar" - jsonTag.Options = nil - tags.Set(jsonTag) - - // add new tag - tags.Set(&structtag.Tag{ - Key: "hcl", - Name: "foo", - Options: []string{"squash"}, - }) - - // print the tags - fmt.Println(tags) // Output: json:"foo_bar" xml:"foo" hcl:"foo,squash" - - // sort tags according to keys - sort.Sort(tags) - fmt.Println(tags) // Output: hcl:"foo,squash" json:"foo_bar" xml:"foo" -} -``` diff --git a/vendor/github.com/fatih/structtag/tags.go b/vendor/github.com/fatih/structtag/tags.go deleted file mode 100644 index c168fb21c..000000000 --- a/vendor/github.com/fatih/structtag/tags.go +++ /dev/null @@ -1,315 +0,0 @@ -package structtag - -import ( - "bytes" - "errors" - "fmt" - "strconv" - "strings" -) - -var ( - errTagSyntax = errors.New("bad syntax for struct tag pair") - errTagKeySyntax = errors.New("bad syntax for struct tag key") - errTagValueSyntax = errors.New("bad syntax for struct tag value") - - errKeyNotSet = errors.New("tag key does not exist") - errTagNotExist = errors.New("tag does not exist") - errTagKeyMismatch = errors.New("mismatch between key and tag.key") -) - -// Tags represent a set of tags from a single struct field -type Tags struct { - tags []*Tag -} - -// Tag defines a single struct's string literal tag -type Tag struct { - // Key is the tag key, such as json, xml, etc.. - // i.e: `json:"foo,omitempty". Here key is: "json" - Key string - - // Name is a part of the value - // i.e: `json:"foo,omitempty". Here name is: "foo" - Name string - - // Options is a part of the value. It contains a slice of tag options i.e: - // `json:"foo,omitempty". Here options is: ["omitempty"] - Options []string -} - -// Parse parses a single struct field tag and returns the set of tags. -func Parse(tag string) (*Tags, error) { - var tags []*Tag - - hasTag := tag != "" - - // NOTE(arslan) following code is from reflect and vet package with some - // modifications to collect all necessary information and extend it with - // usable methods - for tag != "" { - // Skip leading space. - i := 0 - for i < len(tag) && tag[i] == ' ' { - i++ - } - tag = tag[i:] - if tag == "" { - break - } - - // Scan to colon. A space, a quote or a control character is a syntax - // error. Strictly speaking, control chars include the range [0x7f, - // 0x9f], not just [0x00, 0x1f], but in practice, we ignore the - // multi-byte control characters as it is simpler to inspect the tag's - // bytes than the tag's runes. - i = 0 - for i < len(tag) && tag[i] > ' ' && tag[i] != ':' && tag[i] != '"' && tag[i] != 0x7f { - i++ - } - - if i == 0 { - return nil, errTagKeySyntax - } - if i+1 >= len(tag) || tag[i] != ':' { - return nil, errTagSyntax - } - if tag[i+1] != '"' { - return nil, errTagValueSyntax - } - - key := string(tag[:i]) - tag = tag[i+1:] - - // Scan quoted string to find value. - i = 1 - for i < len(tag) && tag[i] != '"' { - if tag[i] == '\\' { - i++ - } - i++ - } - if i >= len(tag) { - return nil, errTagValueSyntax - } - - qvalue := string(tag[:i+1]) - tag = tag[i+1:] - - value, err := strconv.Unquote(qvalue) - if err != nil { - return nil, errTagValueSyntax - } - - res := strings.Split(value, ",") - name := res[0] - options := res[1:] - if len(options) == 0 { - options = nil - } - - tags = append(tags, &Tag{ - Key: key, - Name: name, - Options: options, - }) - } - - if hasTag && len(tags) == 0 { - return nil, nil - } - - return &Tags{ - tags: tags, - }, nil -} - -// Get returns the tag associated with the given key. If the key is present -// in the tag the value (which may be empty) is returned. Otherwise the -// returned value will be the empty string. The ok return value reports whether -// the tag exists or not (which the return value is nil). -func (t *Tags) Get(key string) (*Tag, error) { - for _, tag := range t.tags { - if tag.Key == key { - return tag, nil - } - } - - return nil, errTagNotExist -} - -// Set sets the given tag. If the tag key already exists it'll override it -func (t *Tags) Set(tag *Tag) error { - if tag.Key == "" { - return errKeyNotSet - } - - added := false - for i, tg := range t.tags { - if tg.Key == tag.Key { - added = true - t.tags[i] = tag - } - } - - if !added { - // this means this is a new tag, add it - t.tags = append(t.tags, tag) - } - - return nil -} - -// AddOptions adds the given option for the given key. If the option already -// exists it doesn't add it again. -func (t *Tags) AddOptions(key string, options ...string) { - for i, tag := range t.tags { - if tag.Key != key { - continue - } - - for _, opt := range options { - if !tag.HasOption(opt) { - tag.Options = append(tag.Options, opt) - } - } - - t.tags[i] = tag - } -} - -// DeleteOptions deletes the given options for the given key -func (t *Tags) DeleteOptions(key string, options ...string) { - hasOption := func(option string) bool { - for _, opt := range options { - if opt == option { - return true - } - } - return false - } - - for i, tag := range t.tags { - if tag.Key != key { - continue - } - - var updated []string - for _, opt := range tag.Options { - if !hasOption(opt) { - updated = append(updated, opt) - } - } - - tag.Options = updated - t.tags[i] = tag - } -} - -// Delete deletes the tag for the given keys -func (t *Tags) Delete(keys ...string) { - hasKey := func(key string) bool { - for _, k := range keys { - if k == key { - return true - } - } - return false - } - - var updated []*Tag - for _, tag := range t.tags { - if !hasKey(tag.Key) { - updated = append(updated, tag) - } - } - - t.tags = updated -} - -// Tags returns a slice of tags. The order is the original tag order unless it -// was changed. -func (t *Tags) Tags() []*Tag { - return t.tags -} - -// Tags returns a slice of tags. The order is the original tag order unless it -// was changed. -func (t *Tags) Keys() []string { - var keys []string - for _, tag := range t.tags { - keys = append(keys, tag.Key) - } - return keys -} - -// String reassembles the tags into a valid literal tag field representation -func (t *Tags) String() string { - tags := t.Tags() - if len(tags) == 0 { - return "" - } - - var buf bytes.Buffer - for i, tag := range t.Tags() { - buf.WriteString(tag.String()) - if i != len(tags)-1 { - buf.WriteString(" ") - } - } - return buf.String() -} - -// HasOption returns true if the given option is available in options -func (t *Tag) HasOption(opt string) bool { - for _, tagOpt := range t.Options { - if tagOpt == opt { - return true - } - } - - return false -} - -// Value returns the raw value of the tag, i.e. if the tag is -// `json:"foo,omitempty", the Value is "foo,omitempty" -func (t *Tag) Value() string { - options := strings.Join(t.Options, ",") - if options != "" { - return fmt.Sprintf(`%s,%s`, t.Name, options) - } - return t.Name -} - -// String reassembles the tag into a valid tag field representation -func (t *Tag) String() string { - return fmt.Sprintf(`%s:%q`, t.Key, t.Value()) -} - -// GoString implements the fmt.GoStringer interface -func (t *Tag) GoString() string { - template := `{ - Key: '%s', - Name: '%s', - Option: '%s', - }` - - if t.Options == nil { - return fmt.Sprintf(template, t.Key, t.Name, "nil") - } - - options := strings.Join(t.Options, ",") - return fmt.Sprintf(template, t.Key, t.Name, options) -} - -func (t *Tags) Len() int { - return len(t.tags) -} - -func (t *Tags) Less(i int, j int) bool { - return t.tags[i].Key < t.tags[j].Key -} - -func (t *Tags) Swap(i int, j int) { - t.tags[i], t.tags[j] = t.tags[j], t.tags[i] -} diff --git a/vendor/github.com/firefart/nonamedreturns/LICENSE b/vendor/github.com/firefart/nonamedreturns/LICENSE deleted file mode 100644 index f288702d2..000000000 --- a/vendor/github.com/firefart/nonamedreturns/LICENSE +++ /dev/null @@ -1,674 +0,0 @@ - GNU GENERAL PUBLIC LICENSE - Version 3, 29 June 2007 - - Copyright (C) 2007 Free Software Foundation, Inc. - Everyone is permitted to copy and distribute verbatim copies - of this license document, but changing it is not allowed. - - Preamble - - The GNU General Public License is a free, copyleft license for -software and other kinds of works. - - The licenses for most software and other practical works are designed -to take away your freedom to share and change the works. By contrast, -the GNU General Public License is intended to guarantee your freedom to -share and change all versions of a program--to make sure it remains free -software for all its users. We, the Free Software Foundation, use the -GNU General Public License for most of our software; it applies also to -any other work released this way by its authors. You can apply it to -your programs, too. - - When we speak of free software, we are referring to freedom, not -price. Our General Public Licenses are designed to make sure that you -have the freedom to distribute copies of free software (and charge for -them if you wish), that you receive source code or can get it if you -want it, that you can change the software or use pieces of it in new -free programs, and that you know you can do these things. - - To protect your rights, we need to prevent others from denying you -these rights or asking you to surrender the rights. Therefore, you have -certain responsibilities if you distribute copies of the software, or if -you modify it: responsibilities to respect the freedom of others. - - For example, if you distribute copies of such a program, whether -gratis or for a fee, you must pass on to the recipients the same -freedoms that you received. You must make sure that they, too, receive -or can get the source code. And you must show them these terms so they -know their rights. - - Developers that use the GNU GPL protect your rights with two steps: -(1) assert copyright on the software, and (2) offer you this License -giving you legal permission to copy, distribute and/or modify it. - - For the developers' and authors' protection, the GPL clearly explains -that there is no warranty for this free software. For both users' and -authors' sake, the GPL requires that modified versions be marked as -changed, so that their problems will not be attributed erroneously to -authors of previous versions. - - Some devices are designed to deny users access to install or run -modified versions of the software inside them, although the manufacturer -can do so. This is fundamentally incompatible with the aim of -protecting users' freedom to change the software. The systematic -pattern of such abuse occurs in the area of products for individuals to -use, which is precisely where it is most unacceptable. Therefore, we -have designed this version of the GPL to prohibit the practice for those -products. If such problems arise substantially in other domains, we -stand ready to extend this provision to those domains in future versions -of the GPL, as needed to protect the freedom of users. - - Finally, every program is threatened constantly by software patents. -States should not allow patents to restrict development and use of -software on general-purpose computers, but in those that do, we wish to -avoid the special danger that patents applied to a free program could -make it effectively proprietary. To prevent this, the GPL assures that -patents cannot be used to render the program non-free. - - The precise terms and conditions for copying, distribution and -modification follow. - - TERMS AND CONDITIONS - - 0. Definitions. - - "This License" refers to version 3 of the GNU General Public License. - - "Copyright" also means copyright-like laws that apply to other kinds of -works, such as semiconductor masks. - - "The Program" refers to any copyrightable work licensed under this -License. Each licensee is addressed as "you". "Licensees" and -"recipients" may be individuals or organizations. - - To "modify" a work means to copy from or adapt all or part of the work -in a fashion requiring copyright permission, other than the making of an -exact copy. The resulting work is called a "modified version" of the -earlier work or a work "based on" the earlier work. - - A "covered work" means either the unmodified Program or a work based -on the Program. - - To "propagate" a work means to do anything with it that, without -permission, would make you directly or secondarily liable for -infringement under applicable copyright law, except executing it on a -computer or modifying a private copy. Propagation includes copying, -distribution (with or without modification), making available to the -public, and in some countries other activities as well. - - To "convey" a work means any kind of propagation that enables other -parties to make or receive copies. Mere interaction with a user through -a computer network, with no transfer of a copy, is not conveying. - - An interactive user interface displays "Appropriate Legal Notices" -to the extent that it includes a convenient and prominently visible -feature that (1) displays an appropriate copyright notice, and (2) -tells the user that there is no warranty for the work (except to the -extent that warranties are provided), that licensees may convey the -work under this License, and how to view a copy of this License. If -the interface presents a list of user commands or options, such as a -menu, a prominent item in the list meets this criterion. - - 1. Source Code. - - The "source code" for a work means the preferred form of the work -for making modifications to it. "Object code" means any non-source -form of a work. - - A "Standard Interface" means an interface that either is an official -standard defined by a recognized standards body, or, in the case of -interfaces specified for a particular programming language, one that -is widely used among developers working in that language. - - The "System Libraries" of an executable work include anything, other -than the work as a whole, that (a) is included in the normal form of -packaging a Major Component, but which is not part of that Major -Component, and (b) serves only to enable use of the work with that -Major Component, or to implement a Standard Interface for which an -implementation is available to the public in source code form. A -"Major Component", in this context, means a major essential component -(kernel, window system, and so on) of the specific operating system -(if any) on which the executable work runs, or a compiler used to -produce the work, or an object code interpreter used to run it. - - The "Corresponding Source" for a work in object code form means all -the source code needed to generate, install, and (for an executable -work) run the object code and to modify the work, including scripts to -control those activities. However, it does not include the work's -System Libraries, or general-purpose tools or generally available free -programs which are used unmodified in performing those activities but -which are not part of the work. For example, Corresponding Source -includes interface definition files associated with source files for -the work, and the source code for shared libraries and dynamically -linked subprograms that the work is specifically designed to require, -such as by intimate data communication or control flow between those -subprograms and other parts of the work. - - The Corresponding Source need not include anything that users -can regenerate automatically from other parts of the Corresponding -Source. - - The Corresponding Source for a work in source code form is that -same work. - - 2. Basic Permissions. - - All rights granted under this License are granted for the term of -copyright on the Program, and are irrevocable provided the stated -conditions are met. This License explicitly affirms your unlimited -permission to run the unmodified Program. The output from running a -covered work is covered by this License only if the output, given its -content, constitutes a covered work. This License acknowledges your -rights of fair use or other equivalent, as provided by copyright law. - - You may make, run and propagate covered works that you do not -convey, without conditions so long as your license otherwise remains -in force. You may convey covered works to others for the sole purpose -of having them make modifications exclusively for you, or provide you -with facilities for running those works, provided that you comply with -the terms of this License in conveying all material for which you do -not control copyright. Those thus making or running the covered works -for you must do so exclusively on your behalf, under your direction -and control, on terms that prohibit them from making any copies of -your copyrighted material outside their relationship with you. - - Conveying under any other circumstances is permitted solely under -the conditions stated below. Sublicensing is not allowed; section 10 -makes it unnecessary. - - 3. Protecting Users' Legal Rights From Anti-Circumvention Law. - - No covered work shall be deemed part of an effective technological -measure under any applicable law fulfilling obligations under article -11 of the WIPO copyright treaty adopted on 20 December 1996, or -similar laws prohibiting or restricting circumvention of such -measures. - - When you convey a covered work, you waive any legal power to forbid -circumvention of technological measures to the extent such circumvention -is effected by exercising rights under this License with respect to -the covered work, and you disclaim any intention to limit operation or -modification of the work as a means of enforcing, against the work's -users, your or third parties' legal rights to forbid circumvention of -technological measures. - - 4. Conveying Verbatim Copies. - - You may convey verbatim copies of the Program's source code as you -receive it, in any medium, provided that you conspicuously and -appropriately publish on each copy an appropriate copyright notice; -keep intact all notices stating that this License and any -non-permissive terms added in accord with section 7 apply to the code; -keep intact all notices of the absence of any warranty; and give all -recipients a copy of this License along with the Program. - - You may charge any price or no price for each copy that you convey, -and you may offer support or warranty protection for a fee. - - 5. Conveying Modified Source Versions. - - You may convey a work based on the Program, or the modifications to -produce it from the Program, in the form of source code under the -terms of section 4, provided that you also meet all of these conditions: - - a) The work must carry prominent notices stating that you modified - it, and giving a relevant date. - - b) The work must carry prominent notices stating that it is - released under this License and any conditions added under section - 7. This requirement modifies the requirement in section 4 to - "keep intact all notices". - - c) You must license the entire work, as a whole, under this - License to anyone who comes into possession of a copy. This - License will therefore apply, along with any applicable section 7 - additional terms, to the whole of the work, and all its parts, - regardless of how they are packaged. This License gives no - permission to license the work in any other way, but it does not - invalidate such permission if you have separately received it. - - d) If the work has interactive user interfaces, each must display - Appropriate Legal Notices; however, if the Program has interactive - interfaces that do not display Appropriate Legal Notices, your - work need not make them do so. - - A compilation of a covered work with other separate and independent -works, which are not by their nature extensions of the covered work, -and which are not combined with it such as to form a larger program, -in or on a volume of a storage or distribution medium, is called an -"aggregate" if the compilation and its resulting copyright are not -used to limit the access or legal rights of the compilation's users -beyond what the individual works permit. Inclusion of a covered work -in an aggregate does not cause this License to apply to the other -parts of the aggregate. - - 6. Conveying Non-Source Forms. - - You may convey a covered work in object code form under the terms -of sections 4 and 5, provided that you also convey the -machine-readable Corresponding Source under the terms of this License, -in one of these ways: - - a) Convey the object code in, or embodied in, a physical product - (including a physical distribution medium), accompanied by the - Corresponding Source fixed on a durable physical medium - customarily used for software interchange. - - b) Convey the object code in, or embodied in, a physical product - (including a physical distribution medium), accompanied by a - written offer, valid for at least three years and valid for as - long as you offer spare parts or customer support for that product - model, to give anyone who possesses the object code either (1) a - copy of the Corresponding Source for all the software in the - product that is covered by this License, on a durable physical - medium customarily used for software interchange, for a price no - more than your reasonable cost of physically performing this - conveying of source, or (2) access to copy the - Corresponding Source from a network server at no charge. - - c) Convey individual copies of the object code with a copy of the - written offer to provide the Corresponding Source. This - alternative is allowed only occasionally and noncommercially, and - only if you received the object code with such an offer, in accord - with subsection 6b. - - d) Convey the object code by offering access from a designated - place (gratis or for a charge), and offer equivalent access to the - Corresponding Source in the same way through the same place at no - further charge. You need not require recipients to copy the - Corresponding Source along with the object code. If the place to - copy the object code is a network server, the Corresponding Source - may be on a different server (operated by you or a third party) - that supports equivalent copying facilities, provided you maintain - clear directions next to the object code saying where to find the - Corresponding Source. Regardless of what server hosts the - Corresponding Source, you remain obligated to ensure that it is - available for as long as needed to satisfy these requirements. - - e) Convey the object code using peer-to-peer transmission, provided - you inform other peers where the object code and Corresponding - Source of the work are being offered to the general public at no - charge under subsection 6d. - - A separable portion of the object code, whose source code is excluded -from the Corresponding Source as a System Library, need not be -included in conveying the object code work. - - A "User Product" is either (1) a "consumer product", which means any -tangible personal property which is normally used for personal, family, -or household purposes, or (2) anything designed or sold for incorporation -into a dwelling. In determining whether a product is a consumer product, -doubtful cases shall be resolved in favor of coverage. For a particular -product received by a particular user, "normally used" refers to a -typical or common use of that class of product, regardless of the status -of the particular user or of the way in which the particular user -actually uses, or expects or is expected to use, the product. A product -is a consumer product regardless of whether the product has substantial -commercial, industrial or non-consumer uses, unless such uses represent -the only significant mode of use of the product. - - "Installation Information" for a User Product means any methods, -procedures, authorization keys, or other information required to install -and execute modified versions of a covered work in that User Product from -a modified version of its Corresponding Source. The information must -suffice to ensure that the continued functioning of the modified object -code is in no case prevented or interfered with solely because -modification has been made. - - If you convey an object code work under this section in, or with, or -specifically for use in, a User Product, and the conveying occurs as -part of a transaction in which the right of possession and use of the -User Product is transferred to the recipient in perpetuity or for a -fixed term (regardless of how the transaction is characterized), the -Corresponding Source conveyed under this section must be accompanied -by the Installation Information. But this requirement does not apply -if neither you nor any third party retains the ability to install -modified object code on the User Product (for example, the work has -been installed in ROM). - - The requirement to provide Installation Information does not include a -requirement to continue to provide support service, warranty, or updates -for a work that has been modified or installed by the recipient, or for -the User Product in which it has been modified or installed. Access to a -network may be denied when the modification itself materially and -adversely affects the operation of the network or violates the rules and -protocols for communication across the network. - - Corresponding Source conveyed, and Installation Information provided, -in accord with this section must be in a format that is publicly -documented (and with an implementation available to the public in -source code form), and must require no special password or key for -unpacking, reading or copying. - - 7. Additional Terms. - - "Additional permissions" are terms that supplement the terms of this -License by making exceptions from one or more of its conditions. -Additional permissions that are applicable to the entire Program shall -be treated as though they were included in this License, to the extent -that they are valid under applicable law. If additional permissions -apply only to part of the Program, that part may be used separately -under those permissions, but the entire Program remains governed by -this License without regard to the additional permissions. - - When you convey a copy of a covered work, you may at your option -remove any additional permissions from that copy, or from any part of -it. (Additional permissions may be written to require their own -removal in certain cases when you modify the work.) You may place -additional permissions on material, added by you to a covered work, -for which you have or can give appropriate copyright permission. - - Notwithstanding any other provision of this License, for material you -add to a covered work, you may (if authorized by the copyright holders of -that material) supplement the terms of this License with terms: - - a) Disclaiming warranty or limiting liability differently from the - terms of sections 15 and 16 of this License; or - - b) Requiring preservation of specified reasonable legal notices or - author attributions in that material or in the Appropriate Legal - Notices displayed by works containing it; or - - c) Prohibiting misrepresentation of the origin of that material, or - requiring that modified versions of such material be marked in - reasonable ways as different from the original version; or - - d) Limiting the use for publicity purposes of names of licensors or - authors of the material; or - - e) Declining to grant rights under trademark law for use of some - trade names, trademarks, or service marks; or - - f) Requiring indemnification of licensors and authors of that - material by anyone who conveys the material (or modified versions of - it) with contractual assumptions of liability to the recipient, for - any liability that these contractual assumptions directly impose on - those licensors and authors. - - All other non-permissive additional terms are considered "further -restrictions" within the meaning of section 10. If the Program as you -received it, or any part of it, contains a notice stating that it is -governed by this License along with a term that is a further -restriction, you may remove that term. If a license document contains -a further restriction but permits relicensing or conveying under this -License, you may add to a covered work material governed by the terms -of that license document, provided that the further restriction does -not survive such relicensing or conveying. - - If you add terms to a covered work in accord with this section, you -must place, in the relevant source files, a statement of the -additional terms that apply to those files, or a notice indicating -where to find the applicable terms. - - Additional terms, permissive or non-permissive, may be stated in the -form of a separately written license, or stated as exceptions; -the above requirements apply either way. - - 8. Termination. - - You may not propagate or modify a covered work except as expressly -provided under this License. Any attempt otherwise to propagate or -modify it is void, and will automatically terminate your rights under -this License (including any patent licenses granted under the third -paragraph of section 11). - - However, if you cease all violation of this License, then your -license from a particular copyright holder is reinstated (a) -provisionally, unless and until the copyright holder explicitly and -finally terminates your license, and (b) permanently, if the copyright -holder fails to notify you of the violation by some reasonable means -prior to 60 days after the cessation. - - Moreover, your license from a particular copyright holder is -reinstated permanently if the copyright holder notifies you of the -violation by some reasonable means, this is the first time you have -received notice of violation of this License (for any work) from that -copyright holder, and you cure the violation prior to 30 days after -your receipt of the notice. - - Termination of your rights under this section does not terminate the -licenses of parties who have received copies or rights from you under -this License. If your rights have been terminated and not permanently -reinstated, you do not qualify to receive new licenses for the same -material under section 10. - - 9. Acceptance Not Required for Having Copies. - - You are not required to accept this License in order to receive or -run a copy of the Program. Ancillary propagation of a covered work -occurring solely as a consequence of using peer-to-peer transmission -to receive a copy likewise does not require acceptance. However, -nothing other than this License grants you permission to propagate or -modify any covered work. These actions infringe copyright if you do -not accept this License. Therefore, by modifying or propagating a -covered work, you indicate your acceptance of this License to do so. - - 10. Automatic Licensing of Downstream Recipients. - - Each time you convey a covered work, the recipient automatically -receives a license from the original licensors, to run, modify and -propagate that work, subject to this License. You are not responsible -for enforcing compliance by third parties with this License. - - An "entity transaction" is a transaction transferring control of an -organization, or substantially all assets of one, or subdividing an -organization, or merging organizations. If propagation of a covered -work results from an entity transaction, each party to that -transaction who receives a copy of the work also receives whatever -licenses to the work the party's predecessor in interest had or could -give under the previous paragraph, plus a right to possession of the -Corresponding Source of the work from the predecessor in interest, if -the predecessor has it or can get it with reasonable efforts. - - You may not impose any further restrictions on the exercise of the -rights granted or affirmed under this License. For example, you may -not impose a license fee, royalty, or other charge for exercise of -rights granted under this License, and you may not initiate litigation -(including a cross-claim or counterclaim in a lawsuit) alleging that -any patent claim is infringed by making, using, selling, offering for -sale, or importing the Program or any portion of it. - - 11. Patents. - - A "contributor" is a copyright holder who authorizes use under this -License of the Program or a work on which the Program is based. The -work thus licensed is called the contributor's "contributor version". - - A contributor's "essential patent claims" are all patent claims -owned or controlled by the contributor, whether already acquired or -hereafter acquired, that would be infringed by some manner, permitted -by this License, of making, using, or selling its contributor version, -but do not include claims that would be infringed only as a -consequence of further modification of the contributor version. For -purposes of this definition, "control" includes the right to grant -patent sublicenses in a manner consistent with the requirements of -this License. - - Each contributor grants you a non-exclusive, worldwide, royalty-free -patent license under the contributor's essential patent claims, to -make, use, sell, offer for sale, import and otherwise run, modify and -propagate the contents of its contributor version. - - In the following three paragraphs, a "patent license" is any express -agreement or commitment, however denominated, not to enforce a patent -(such as an express permission to practice a patent or covenant not to -sue for patent infringement). To "grant" such a patent license to a -party means to make such an agreement or commitment not to enforce a -patent against the party. - - If you convey a covered work, knowingly relying on a patent license, -and the Corresponding Source of the work is not available for anyone -to copy, free of charge and under the terms of this License, through a -publicly available network server or other readily accessible means, -then you must either (1) cause the Corresponding Source to be so -available, or (2) arrange to deprive yourself of the benefit of the -patent license for this particular work, or (3) arrange, in a manner -consistent with the requirements of this License, to extend the patent -license to downstream recipients. "Knowingly relying" means you have -actual knowledge that, but for the patent license, your conveying the -covered work in a country, or your recipient's use of the covered work -in a country, would infringe one or more identifiable patents in that -country that you have reason to believe are valid. - - If, pursuant to or in connection with a single transaction or -arrangement, you convey, or propagate by procuring conveyance of, a -covered work, and grant a patent license to some of the parties -receiving the covered work authorizing them to use, propagate, modify -or convey a specific copy of the covered work, then the patent license -you grant is automatically extended to all recipients of the covered -work and works based on it. - - A patent license is "discriminatory" if it does not include within -the scope of its coverage, prohibits the exercise of, or is -conditioned on the non-exercise of one or more of the rights that are -specifically granted under this License. You may not convey a covered -work if you are a party to an arrangement with a third party that is -in the business of distributing software, under which you make payment -to the third party based on the extent of your activity of conveying -the work, and under which the third party grants, to any of the -parties who would receive the covered work from you, a discriminatory -patent license (a) in connection with copies of the covered work -conveyed by you (or copies made from those copies), or (b) primarily -for and in connection with specific products or compilations that -contain the covered work, unless you entered into that arrangement, -or that patent license was granted, prior to 28 March 2007. - - Nothing in this License shall be construed as excluding or limiting -any implied license or other defenses to infringement that may -otherwise be available to you under applicable patent law. - - 12. No Surrender of Others' Freedom. - - If conditions are imposed on you (whether by court order, agreement or -otherwise) that contradict the conditions of this License, they do not -excuse you from the conditions of this License. If you cannot convey a -covered work so as to satisfy simultaneously your obligations under this -License and any other pertinent obligations, then as a consequence you may -not convey it at all. For example, if you agree to terms that obligate you -to collect a royalty for further conveying from those to whom you convey -the Program, the only way you could satisfy both those terms and this -License would be to refrain entirely from conveying the Program. - - 13. Use with the GNU Affero General Public License. - - Notwithstanding any other provision of this License, you have -permission to link or combine any covered work with a work licensed -under version 3 of the GNU Affero General Public License into a single -combined work, and to convey the resulting work. The terms of this -License will continue to apply to the part which is the covered work, -but the special requirements of the GNU Affero General Public License, -section 13, concerning interaction through a network will apply to the -combination as such. - - 14. Revised Versions of this License. - - The Free Software Foundation may publish revised and/or new versions of -the GNU General Public License from time to time. Such new versions will -be similar in spirit to the present version, but may differ in detail to -address new problems or concerns. - - Each version is given a distinguishing version number. If the -Program specifies that a certain numbered version of the GNU General -Public License "or any later version" applies to it, you have the -option of following the terms and conditions either of that numbered -version or of any later version published by the Free Software -Foundation. If the Program does not specify a version number of the -GNU General Public License, you may choose any version ever published -by the Free Software Foundation. - - If the Program specifies that a proxy can decide which future -versions of the GNU General Public License can be used, that proxy's -public statement of acceptance of a version permanently authorizes you -to choose that version for the Program. - - Later license versions may give you additional or different -permissions. However, no additional obligations are imposed on any -author or copyright holder as a result of your choosing to follow a -later version. - - 15. Disclaimer of Warranty. - - THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY -APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT -HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY -OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, -THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR -PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM -IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF -ALL NECESSARY SERVICING, REPAIR OR CORRECTION. - - 16. Limitation of Liability. - - IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING -WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS -THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY -GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE -USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF -DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD -PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), -EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF -SUCH DAMAGES. - - 17. Interpretation of Sections 15 and 16. - - If the disclaimer of warranty and limitation of liability provided -above cannot be given local legal effect according to their terms, -reviewing courts shall apply local law that most closely approximates -an absolute waiver of all civil liability in connection with the -Program, unless a warranty or assumption of liability accompanies a -copy of the Program in return for a fee. - - END OF TERMS AND CONDITIONS - - How to Apply These Terms to Your New Programs - - If you develop a new program, and you want it to be of the greatest -possible use to the public, the best way to achieve this is to make it -free software which everyone can redistribute and change under these terms. - - To do so, attach the following notices to the program. It is safest -to attach them to the start of each source file to most effectively -state the exclusion of warranty; and each file should have at least -the "copyright" line and a pointer to where the full notice is found. - - - Copyright (C) - - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . - -Also add information on how to contact you by electronic and paper mail. - - If the program does terminal interaction, make it output a short -notice like this when it starts in an interactive mode: - - Copyright (C) - This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. - This is free software, and you are welcome to redistribute it - under certain conditions; type `show c' for details. - -The hypothetical commands `show w' and `show c' should show the appropriate -parts of the General Public License. Of course, your program's commands -might be different; for a GUI interface, you would use an "about box". - - You should also get your employer (if you work as a programmer) or school, -if any, to sign a "copyright disclaimer" for the program, if necessary. -For more information on this, and how to apply and follow the GNU GPL, see -. - - The GNU General Public License does not permit incorporating your program -into proprietary programs. If your program is a subroutine library, you -may consider it more useful to permit linking proprietary applications with -the library. If this is what you want to do, use the GNU Lesser General -Public License instead of this License. But first, please read -. diff --git a/vendor/github.com/firefart/nonamedreturns/analyzer/analyzer.go b/vendor/github.com/firefart/nonamedreturns/analyzer/analyzer.go deleted file mode 100644 index 6ad97ab49..000000000 --- a/vendor/github.com/firefart/nonamedreturns/analyzer/analyzer.go +++ /dev/null @@ -1,134 +0,0 @@ -package analyzer - -import ( - "flag" - "go/ast" - "go/types" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/ast/inspector" -) - -const FlagReportErrorInDefer = "report-error-in-defer" - -var Analyzer = &analysis.Analyzer{ - Name: "nonamedreturns", - Doc: "Reports all named returns", - Flags: flags(), - Run: run, - Requires: []*analysis.Analyzer{inspect.Analyzer}, -} - -func flags() flag.FlagSet { - fs := flag.FlagSet{} - fs.Bool(FlagReportErrorInDefer, false, "report named error if it is assigned inside defer") - return fs -} - -func run(pass *analysis.Pass) (interface{}, error) { - reportErrorInDefer := pass.Analyzer.Flags.Lookup(FlagReportErrorInDefer).Value.String() == "true" - errorType := types.Universe.Lookup("error").Type() - - inspector := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - - // only filter function defintions - nodeFilter := []ast.Node{ - (*ast.FuncDecl)(nil), - (*ast.FuncLit)(nil), - } - - inspector.Preorder(nodeFilter, func(node ast.Node) { - var funcResults *ast.FieldList - var funcBody *ast.BlockStmt - - switch n := node.(type) { - case *ast.FuncLit: - funcResults = n.Type.Results - funcBody = n.Body - case *ast.FuncDecl: - funcResults = n.Type.Results - funcBody = n.Body - default: - return - } - - // no return values - if funcResults == nil { - return - } - - resultsList := funcResults.List - - for _, p := range resultsList { - if len(p.Names) == 0 { - // all good, the parameter is not named - continue - } - - for _, n := range p.Names { - if n.Name == "_" { - continue - } - - if !reportErrorInDefer && - types.Identical(pass.TypesInfo.TypeOf(p.Type), errorType) && - findDeferWithVariableAssignment(funcBody, pass.TypesInfo, pass.TypesInfo.ObjectOf(n)) { - continue - } - - pass.Reportf(node.Pos(), "named return %q with type %q found", n.Name, types.ExprString(p.Type)) - } - } - }) - - return nil, nil -} - -func findDeferWithVariableAssignment(body *ast.BlockStmt, info *types.Info, variable types.Object) bool { - found := false - - ast.Inspect(body, func(node ast.Node) bool { - if found { - return false // stop inspection - } - - if d, ok := node.(*ast.DeferStmt); ok { - if fn, ok2 := d.Call.Fun.(*ast.FuncLit); ok2 { - if findVariableAssignment(fn.Body, info, variable) { - found = true - return false - } - } - } - - return true - }) - - return found -} - -func findVariableAssignment(body *ast.BlockStmt, info *types.Info, variable types.Object) bool { - found := false - - ast.Inspect(body, func(node ast.Node) bool { - if found { - return false // stop inspection - } - - if a, ok := node.(*ast.AssignStmt); ok { - for _, lh := range a.Lhs { - if i, ok2 := lh.(*ast.Ident); ok2 { - if info.ObjectOf(i) == variable { - found = true - return false - } - } - } - } - - return true - }) - - return found -} diff --git a/vendor/github.com/fsnotify/fsnotify/.cirrus.yml b/vendor/github.com/fsnotify/fsnotify/.cirrus.yml deleted file mode 100644 index ffc7b992b..000000000 --- a/vendor/github.com/fsnotify/fsnotify/.cirrus.yml +++ /dev/null @@ -1,13 +0,0 @@ -freebsd_task: - name: 'FreeBSD' - freebsd_instance: - image_family: freebsd-13-2 - install_script: - - pkg update -f - - pkg install -y go - test_script: - # run tests as user "cirrus" instead of root - - pw useradd cirrus -m - - chown -R cirrus:cirrus . - - FSNOTIFY_BUFFER=4096 sudo --preserve-env=FSNOTIFY_BUFFER -u cirrus go test -parallel 1 -race ./... - - sudo --preserve-env=FSNOTIFY_BUFFER -u cirrus go test -parallel 1 -race ./... diff --git a/vendor/github.com/fsnotify/fsnotify/.editorconfig b/vendor/github.com/fsnotify/fsnotify/.editorconfig deleted file mode 100644 index fad895851..000000000 --- a/vendor/github.com/fsnotify/fsnotify/.editorconfig +++ /dev/null @@ -1,12 +0,0 @@ -root = true - -[*.go] -indent_style = tab -indent_size = 4 -insert_final_newline = true - -[*.{yml,yaml}] -indent_style = space -indent_size = 2 -insert_final_newline = true -trim_trailing_whitespace = true diff --git a/vendor/github.com/fsnotify/fsnotify/.gitattributes b/vendor/github.com/fsnotify/fsnotify/.gitattributes deleted file mode 100644 index 32f1001be..000000000 --- a/vendor/github.com/fsnotify/fsnotify/.gitattributes +++ /dev/null @@ -1 +0,0 @@ -go.sum linguist-generated diff --git a/vendor/github.com/fsnotify/fsnotify/.gitignore b/vendor/github.com/fsnotify/fsnotify/.gitignore deleted file mode 100644 index 391cc076b..000000000 --- a/vendor/github.com/fsnotify/fsnotify/.gitignore +++ /dev/null @@ -1,7 +0,0 @@ -# go test -c output -*.test -*.test.exe - -# Output of go build ./cmd/fsnotify -/fsnotify -/fsnotify.exe diff --git a/vendor/github.com/fsnotify/fsnotify/.mailmap b/vendor/github.com/fsnotify/fsnotify/.mailmap deleted file mode 100644 index a04f2907f..000000000 --- a/vendor/github.com/fsnotify/fsnotify/.mailmap +++ /dev/null @@ -1,2 +0,0 @@ -Chris Howey -Nathan Youngman <4566+nathany@users.noreply.github.com> diff --git a/vendor/github.com/fsnotify/fsnotify/CHANGELOG.md b/vendor/github.com/fsnotify/fsnotify/CHANGELOG.md deleted file mode 100644 index e0e575754..000000000 --- a/vendor/github.com/fsnotify/fsnotify/CHANGELOG.md +++ /dev/null @@ -1,541 +0,0 @@ -# Changelog - -Unreleased ----------- -Nothing yet. - -1.7.0 - 2023-10-22 ------------------- -This version of fsnotify needs Go 1.17. - -### Additions - -- illumos: add FEN backend to support illumos and Solaris. ([#371]) - -- all: add `NewBufferedWatcher()` to use a buffered channel, which can be useful - in cases where you can't control the kernel buffer and receive a large number - of events in bursts. ([#550], [#572]) - -- all: add `AddWith()`, which is identical to `Add()` but allows passing - options. ([#521]) - -- windows: allow setting the ReadDirectoryChangesW() buffer size with - `fsnotify.WithBufferSize()`; the default of 64K is the highest value that - works on all platforms and is enough for most purposes, but in some cases a - highest buffer is needed. ([#521]) - -### Changes and fixes - -- inotify: remove watcher if a watched path is renamed ([#518]) - - After a rename the reported name wasn't updated, or even an empty string. - Inotify doesn't provide any good facilities to update it, so just remove the - watcher. This is already how it worked on kqueue and FEN. - - On Windows this does work, and remains working. - -- windows: don't listen for file attribute changes ([#520]) - - File attribute changes are sent as `FILE_ACTION_MODIFIED` by the Windows API, - with no way to see if they're a file write or attribute change, so would show - up as a fsnotify.Write event. This is never useful, and could result in many - spurious Write events. - -- windows: return `ErrEventOverflow` if the buffer is full ([#525]) - - Before it would merely return "short read", making it hard to detect this - error. - -- kqueue: make sure events for all files are delivered properly when removing a - watched directory ([#526]) - - Previously they would get sent with `""` (empty string) or `"."` as the path - name. - -- kqueue: don't emit spurious Create events for symbolic links ([#524]) - - The link would get resolved but kqueue would "forget" it already saw the link - itself, resulting on a Create for every Write event for the directory. - -- all: return `ErrClosed` on `Add()` when the watcher is closed ([#516]) - -- other: add `Watcher.Errors` and `Watcher.Events` to the no-op `Watcher` in - `backend_other.go`, making it easier to use on unsupported platforms such as - WASM, AIX, etc. ([#528]) - -- other: use the `backend_other.go` no-op if the `appengine` build tag is set; - Google AppEngine forbids usage of the unsafe package so the inotify backend - won't compile there. - -[#371]: https://github.com/fsnotify/fsnotify/pull/371 -[#516]: https://github.com/fsnotify/fsnotify/pull/516 -[#518]: https://github.com/fsnotify/fsnotify/pull/518 -[#520]: https://github.com/fsnotify/fsnotify/pull/520 -[#521]: https://github.com/fsnotify/fsnotify/pull/521 -[#524]: https://github.com/fsnotify/fsnotify/pull/524 -[#525]: https://github.com/fsnotify/fsnotify/pull/525 -[#526]: https://github.com/fsnotify/fsnotify/pull/526 -[#528]: https://github.com/fsnotify/fsnotify/pull/528 -[#537]: https://github.com/fsnotify/fsnotify/pull/537 -[#550]: https://github.com/fsnotify/fsnotify/pull/550 -[#572]: https://github.com/fsnotify/fsnotify/pull/572 - -1.6.0 - 2022-10-13 ------------------- -This version of fsnotify needs Go 1.16 (this was already the case since 1.5.1, -but not documented). It also increases the minimum Linux version to 2.6.32. - -### Additions - -- all: add `Event.Has()` and `Op.Has()` ([#477]) - - This makes checking events a lot easier; for example: - - if event.Op&Write == Write && !(event.Op&Remove == Remove) { - } - - Becomes: - - if event.Has(Write) && !event.Has(Remove) { - } - -- all: add cmd/fsnotify ([#463]) - - A command-line utility for testing and some examples. - -### Changes and fixes - -- inotify: don't ignore events for files that don't exist ([#260], [#470]) - - Previously the inotify watcher would call `os.Lstat()` to check if a file - still exists before emitting events. - - This was inconsistent with other platforms and resulted in inconsistent event - reporting (e.g. when a file is quickly removed and re-created), and generally - a source of confusion. It was added in 2013 to fix a memory leak that no - longer exists. - -- all: return `ErrNonExistentWatch` when `Remove()` is called on a path that's - not watched ([#460]) - -- inotify: replace epoll() with non-blocking inotify ([#434]) - - Non-blocking inotify was not generally available at the time this library was - written in 2014, but now it is. As a result, the minimum Linux version is - bumped from 2.6.27 to 2.6.32. This hugely simplifies the code and is faster. - -- kqueue: don't check for events every 100ms ([#480]) - - The watcher would wake up every 100ms, even when there was nothing to do. Now - it waits until there is something to do. - -- macos: retry opening files on EINTR ([#475]) - -- kqueue: skip unreadable files ([#479]) - - kqueue requires a file descriptor for every file in a directory; this would - fail if a file was unreadable by the current user. Now these files are simply - skipped. - -- windows: fix renaming a watched directory if the parent is also watched ([#370]) - -- windows: increase buffer size from 4K to 64K ([#485]) - -- windows: close file handle on Remove() ([#288]) - -- kqueue: put pathname in the error if watching a file fails ([#471]) - -- inotify, windows: calling Close() more than once could race ([#465]) - -- kqueue: improve Close() performance ([#233]) - -- all: various documentation additions and clarifications. - -[#233]: https://github.com/fsnotify/fsnotify/pull/233 -[#260]: https://github.com/fsnotify/fsnotify/pull/260 -[#288]: https://github.com/fsnotify/fsnotify/pull/288 -[#370]: https://github.com/fsnotify/fsnotify/pull/370 -[#434]: https://github.com/fsnotify/fsnotify/pull/434 -[#460]: https://github.com/fsnotify/fsnotify/pull/460 -[#463]: https://github.com/fsnotify/fsnotify/pull/463 -[#465]: https://github.com/fsnotify/fsnotify/pull/465 -[#470]: https://github.com/fsnotify/fsnotify/pull/470 -[#471]: https://github.com/fsnotify/fsnotify/pull/471 -[#475]: https://github.com/fsnotify/fsnotify/pull/475 -[#477]: https://github.com/fsnotify/fsnotify/pull/477 -[#479]: https://github.com/fsnotify/fsnotify/pull/479 -[#480]: https://github.com/fsnotify/fsnotify/pull/480 -[#485]: https://github.com/fsnotify/fsnotify/pull/485 - -## [1.5.4] - 2022-04-25 - -* Windows: add missing defer to `Watcher.WatchList` [#447](https://github.com/fsnotify/fsnotify/pull/447) -* go.mod: use latest x/sys [#444](https://github.com/fsnotify/fsnotify/pull/444) -* Fix compilation for OpenBSD [#443](https://github.com/fsnotify/fsnotify/pull/443) - -## [1.5.3] - 2022-04-22 - -* This version is retracted. An incorrect branch is published accidentally [#445](https://github.com/fsnotify/fsnotify/issues/445) - -## [1.5.2] - 2022-04-21 - -* Add a feature to return the directories and files that are being monitored [#374](https://github.com/fsnotify/fsnotify/pull/374) -* Fix potential crash on windows if `raw.FileNameLength` exceeds `syscall.MAX_PATH` [#361](https://github.com/fsnotify/fsnotify/pull/361) -* Allow build on unsupported GOOS [#424](https://github.com/fsnotify/fsnotify/pull/424) -* Don't set `poller.fd` twice in `newFdPoller` [#406](https://github.com/fsnotify/fsnotify/pull/406) -* fix go vet warnings: call to `(*T).Fatalf` from a non-test goroutine [#416](https://github.com/fsnotify/fsnotify/pull/416) - -## [1.5.1] - 2021-08-24 - -* Revert Add AddRaw to not follow symlinks [#394](https://github.com/fsnotify/fsnotify/pull/394) - -## [1.5.0] - 2021-08-20 - -* Go: Increase minimum required version to Go 1.12 [#381](https://github.com/fsnotify/fsnotify/pull/381) -* Feature: Add AddRaw method which does not follow symlinks when adding a watch [#289](https://github.com/fsnotify/fsnotify/pull/298) -* Windows: Follow symlinks by default like on all other systems [#289](https://github.com/fsnotify/fsnotify/pull/289) -* CI: Use GitHub Actions for CI and cover go 1.12-1.17 - [#378](https://github.com/fsnotify/fsnotify/pull/378) - [#381](https://github.com/fsnotify/fsnotify/pull/381) - [#385](https://github.com/fsnotify/fsnotify/pull/385) -* Go 1.14+: Fix unsafe pointer conversion [#325](https://github.com/fsnotify/fsnotify/pull/325) - -## [1.4.9] - 2020-03-11 - -* Move example usage to the readme #329. This may resolve #328. - -## [1.4.8] - 2020-03-10 - -* CI: test more go versions (@nathany 1d13583d846ea9d66dcabbfefbfb9d8e6fb05216) -* Tests: Queued inotify events could have been read by the test before max_queued_events was hit (@matthias-stone #265) -* Tests: t.Fatalf -> t.Errorf in go routines (@gdey #266) -* CI: Less verbosity (@nathany #267) -* Tests: Darwin: Exchangedata is deprecated on 10.13 (@nathany #267) -* Tests: Check if channels are closed in the example (@alexeykazakov #244) -* CI: Only run golint on latest version of go and fix issues (@cpuguy83 #284) -* CI: Add windows to travis matrix (@cpuguy83 #284) -* Docs: Remover appveyor badge (@nathany 11844c0959f6fff69ba325d097fce35bd85a8e93) -* Linux: create epoll and pipe fds with close-on-exec (@JohannesEbke #219) -* Linux: open files with close-on-exec (@linxiulei #273) -* Docs: Plan to support fanotify (@nathany ab058b44498e8b7566a799372a39d150d9ea0119 ) -* Project: Add go.mod (@nathany #309) -* Project: Revise editor config (@nathany #309) -* Project: Update copyright for 2019 (@nathany #309) -* CI: Drop go1.8 from CI matrix (@nathany #309) -* Docs: Updating the FAQ section for supportability with NFS & FUSE filesystems (@Pratik32 4bf2d1fec78374803a39307bfb8d340688f4f28e ) - -## [1.4.7] - 2018-01-09 - -* BSD/macOS: Fix possible deadlock on closing the watcher on kqueue (thanks @nhooyr and @glycerine) -* Tests: Fix missing verb on format string (thanks @rchiossi) -* Linux: Fix deadlock in Remove (thanks @aarondl) -* Linux: Watch.Add improvements (avoid race, fix consistency, reduce garbage) (thanks @twpayne) -* Docs: Moved FAQ into the README (thanks @vahe) -* Linux: Properly handle inotify's IN_Q_OVERFLOW event (thanks @zeldovich) -* Docs: replace references to OS X with macOS - -## [1.4.2] - 2016-10-10 - -* Linux: use InotifyInit1 with IN_CLOEXEC to stop leaking a file descriptor to a child process when using fork/exec [#178](https://github.com/fsnotify/fsnotify/pull/178) (thanks @pattyshack) - -## [1.4.1] - 2016-10-04 - -* Fix flaky inotify stress test on Linux [#177](https://github.com/fsnotify/fsnotify/pull/177) (thanks @pattyshack) - -## [1.4.0] - 2016-10-01 - -* add a String() method to Event.Op [#165](https://github.com/fsnotify/fsnotify/pull/165) (thanks @oozie) - -## [1.3.1] - 2016-06-28 - -* Windows: fix for double backslash when watching the root of a drive [#151](https://github.com/fsnotify/fsnotify/issues/151) (thanks @brunoqc) - -## [1.3.0] - 2016-04-19 - -* Support linux/arm64 by [patching](https://go-review.googlesource.com/#/c/21971/) x/sys/unix and switching to to it from syscall (thanks @suihkulokki) [#135](https://github.com/fsnotify/fsnotify/pull/135) - -## [1.2.10] - 2016-03-02 - -* Fix golint errors in windows.go [#121](https://github.com/fsnotify/fsnotify/pull/121) (thanks @tiffanyfj) - -## [1.2.9] - 2016-01-13 - -kqueue: Fix logic for CREATE after REMOVE [#111](https://github.com/fsnotify/fsnotify/pull/111) (thanks @bep) - -## [1.2.8] - 2015-12-17 - -* kqueue: fix race condition in Close [#105](https://github.com/fsnotify/fsnotify/pull/105) (thanks @djui for reporting the issue and @ppknap for writing a failing test) -* inotify: fix race in test -* enable race detection for continuous integration (Linux, Mac, Windows) - -## [1.2.5] - 2015-10-17 - -* inotify: use epoll_create1 for arm64 support (requires Linux 2.6.27 or later) [#100](https://github.com/fsnotify/fsnotify/pull/100) (thanks @suihkulokki) -* inotify: fix path leaks [#73](https://github.com/fsnotify/fsnotify/pull/73) (thanks @chamaken) -* kqueue: watch for rename events on subdirectories [#83](https://github.com/fsnotify/fsnotify/pull/83) (thanks @guotie) -* kqueue: avoid infinite loops from symlinks cycles [#101](https://github.com/fsnotify/fsnotify/pull/101) (thanks @illicitonion) - -## [1.2.1] - 2015-10-14 - -* kqueue: don't watch named pipes [#98](https://github.com/fsnotify/fsnotify/pull/98) (thanks @evanphx) - -## [1.2.0] - 2015-02-08 - -* inotify: use epoll to wake up readEvents [#66](https://github.com/fsnotify/fsnotify/pull/66) (thanks @PieterD) -* inotify: closing watcher should now always shut down goroutine [#63](https://github.com/fsnotify/fsnotify/pull/63) (thanks @PieterD) -* kqueue: close kqueue after removing watches, fixes [#59](https://github.com/fsnotify/fsnotify/issues/59) - -## [1.1.1] - 2015-02-05 - -* inotify: Retry read on EINTR [#61](https://github.com/fsnotify/fsnotify/issues/61) (thanks @PieterD) - -## [1.1.0] - 2014-12-12 - -* kqueue: rework internals [#43](https://github.com/fsnotify/fsnotify/pull/43) - * add low-level functions - * only need to store flags on directories - * less mutexes [#13](https://github.com/fsnotify/fsnotify/issues/13) - * done can be an unbuffered channel - * remove calls to os.NewSyscallError -* More efficient string concatenation for Event.String() [#52](https://github.com/fsnotify/fsnotify/pull/52) (thanks @mdlayher) -* kqueue: fix regression in rework causing subdirectories to be watched [#48](https://github.com/fsnotify/fsnotify/issues/48) -* kqueue: cleanup internal watch before sending remove event [#51](https://github.com/fsnotify/fsnotify/issues/51) - -## [1.0.4] - 2014-09-07 - -* kqueue: add dragonfly to the build tags. -* Rename source code files, rearrange code so exported APIs are at the top. -* Add done channel to example code. [#37](https://github.com/fsnotify/fsnotify/pull/37) (thanks @chenyukang) - -## [1.0.3] - 2014-08-19 - -* [Fix] Windows MOVED_TO now translates to Create like on BSD and Linux. [#36](https://github.com/fsnotify/fsnotify/issues/36) - -## [1.0.2] - 2014-08-17 - -* [Fix] Missing create events on macOS. [#14](https://github.com/fsnotify/fsnotify/issues/14) (thanks @zhsso) -* [Fix] Make ./path and path equivalent. (thanks @zhsso) - -## [1.0.0] - 2014-08-15 - -* [API] Remove AddWatch on Windows, use Add. -* Improve documentation for exported identifiers. [#30](https://github.com/fsnotify/fsnotify/issues/30) -* Minor updates based on feedback from golint. - -## dev / 2014-07-09 - -* Moved to [github.com/fsnotify/fsnotify](https://github.com/fsnotify/fsnotify). -* Use os.NewSyscallError instead of returning errno (thanks @hariharan-uno) - -## dev / 2014-07-04 - -* kqueue: fix incorrect mutex used in Close() -* Update example to demonstrate usage of Op. - -## dev / 2014-06-28 - -* [API] Don't set the Write Op for attribute notifications [#4](https://github.com/fsnotify/fsnotify/issues/4) -* Fix for String() method on Event (thanks Alex Brainman) -* Don't build on Plan 9 or Solaris (thanks @4ad) - -## dev / 2014-06-21 - -* Events channel of type Event rather than *Event. -* [internal] use syscall constants directly for inotify and kqueue. -* [internal] kqueue: rename events to kevents and fileEvent to event. - -## dev / 2014-06-19 - -* Go 1.3+ required on Windows (uses syscall.ERROR_MORE_DATA internally). -* [internal] remove cookie from Event struct (unused). -* [internal] Event struct has the same definition across every OS. -* [internal] remove internal watch and removeWatch methods. - -## dev / 2014-06-12 - -* [API] Renamed Watch() to Add() and RemoveWatch() to Remove(). -* [API] Pluralized channel names: Events and Errors. -* [API] Renamed FileEvent struct to Event. -* [API] Op constants replace methods like IsCreate(). - -## dev / 2014-06-12 - -* Fix data race on kevent buffer (thanks @tilaks) [#98](https://github.com/howeyc/fsnotify/pull/98) - -## dev / 2014-05-23 - -* [API] Remove current implementation of WatchFlags. - * current implementation doesn't take advantage of OS for efficiency - * provides little benefit over filtering events as they are received, but has extra bookkeeping and mutexes - * no tests for the current implementation - * not fully implemented on Windows [#93](https://github.com/howeyc/fsnotify/issues/93#issuecomment-39285195) - -## [0.9.3] - 2014-12-31 - -* kqueue: cleanup internal watch before sending remove event [#51](https://github.com/fsnotify/fsnotify/issues/51) - -## [0.9.2] - 2014-08-17 - -* [Backport] Fix missing create events on macOS. [#14](https://github.com/fsnotify/fsnotify/issues/14) (thanks @zhsso) - -## [0.9.1] - 2014-06-12 - -* Fix data race on kevent buffer (thanks @tilaks) [#98](https://github.com/howeyc/fsnotify/pull/98) - -## [0.9.0] - 2014-01-17 - -* IsAttrib() for events that only concern a file's metadata [#79][] (thanks @abustany) -* [Fix] kqueue: fix deadlock [#77][] (thanks @cespare) -* [NOTICE] Development has moved to `code.google.com/p/go.exp/fsnotify` in preparation for inclusion in the Go standard library. - -## [0.8.12] - 2013-11-13 - -* [API] Remove FD_SET and friends from Linux adapter - -## [0.8.11] - 2013-11-02 - -* [Doc] Add Changelog [#72][] (thanks @nathany) -* [Doc] Spotlight and double modify events on macOS [#62][] (reported by @paulhammond) - -## [0.8.10] - 2013-10-19 - -* [Fix] kqueue: remove file watches when parent directory is removed [#71][] (reported by @mdwhatcott) -* [Fix] kqueue: race between Close and readEvents [#70][] (reported by @bernerdschaefer) -* [Doc] specify OS-specific limits in README (thanks @debrando) - -## [0.8.9] - 2013-09-08 - -* [Doc] Contributing (thanks @nathany) -* [Doc] update package path in example code [#63][] (thanks @paulhammond) -* [Doc] GoCI badge in README (Linux only) [#60][] -* [Doc] Cross-platform testing with Vagrant [#59][] (thanks @nathany) - -## [0.8.8] - 2013-06-17 - -* [Fix] Windows: handle `ERROR_MORE_DATA` on Windows [#49][] (thanks @jbowtie) - -## [0.8.7] - 2013-06-03 - -* [API] Make syscall flags internal -* [Fix] inotify: ignore event changes -* [Fix] race in symlink test [#45][] (reported by @srid) -* [Fix] tests on Windows -* lower case error messages - -## [0.8.6] - 2013-05-23 - -* kqueue: Use EVT_ONLY flag on Darwin -* [Doc] Update README with full example - -## [0.8.5] - 2013-05-09 - -* [Fix] inotify: allow monitoring of "broken" symlinks (thanks @tsg) - -## [0.8.4] - 2013-04-07 - -* [Fix] kqueue: watch all file events [#40][] (thanks @ChrisBuchholz) - -## [0.8.3] - 2013-03-13 - -* [Fix] inoitfy/kqueue memory leak [#36][] (reported by @nbkolchin) -* [Fix] kqueue: use fsnFlags for watching a directory [#33][] (reported by @nbkolchin) - -## [0.8.2] - 2013-02-07 - -* [Doc] add Authors -* [Fix] fix data races for map access [#29][] (thanks @fsouza) - -## [0.8.1] - 2013-01-09 - -* [Fix] Windows path separators -* [Doc] BSD License - -## [0.8.0] - 2012-11-09 - -* kqueue: directory watching improvements (thanks @vmirage) -* inotify: add `IN_MOVED_TO` [#25][] (requested by @cpisto) -* [Fix] kqueue: deleting watched directory [#24][] (reported by @jakerr) - -## [0.7.4] - 2012-10-09 - -* [Fix] inotify: fixes from https://codereview.appspot.com/5418045/ (ugorji) -* [Fix] kqueue: preserve watch flags when watching for delete [#21][] (reported by @robfig) -* [Fix] kqueue: watch the directory even if it isn't a new watch (thanks @robfig) -* [Fix] kqueue: modify after recreation of file - -## [0.7.3] - 2012-09-27 - -* [Fix] kqueue: watch with an existing folder inside the watched folder (thanks @vmirage) -* [Fix] kqueue: no longer get duplicate CREATE events - -## [0.7.2] - 2012-09-01 - -* kqueue: events for created directories - -## [0.7.1] - 2012-07-14 - -* [Fix] for renaming files - -## [0.7.0] - 2012-07-02 - -* [Feature] FSNotify flags -* [Fix] inotify: Added file name back to event path - -## [0.6.0] - 2012-06-06 - -* kqueue: watch files after directory created (thanks @tmc) - -## [0.5.1] - 2012-05-22 - -* [Fix] inotify: remove all watches before Close() - -## [0.5.0] - 2012-05-03 - -* [API] kqueue: return errors during watch instead of sending over channel -* kqueue: match symlink behavior on Linux -* inotify: add `DELETE_SELF` (requested by @taralx) -* [Fix] kqueue: handle EINTR (reported by @robfig) -* [Doc] Godoc example [#1][] (thanks @davecheney) - -## [0.4.0] - 2012-03-30 - -* Go 1 released: build with go tool -* [Feature] Windows support using winfsnotify -* Windows does not have attribute change notifications -* Roll attribute notifications into IsModify - -## [0.3.0] - 2012-02-19 - -* kqueue: add files when watch directory - -## [0.2.0] - 2011-12-30 - -* update to latest Go weekly code - -## [0.1.0] - 2011-10-19 - -* kqueue: add watch on file creation to match inotify -* kqueue: create file event -* inotify: ignore `IN_IGNORED` events -* event String() -* linux: common FileEvent functions -* initial commit - -[#79]: https://github.com/howeyc/fsnotify/pull/79 -[#77]: https://github.com/howeyc/fsnotify/pull/77 -[#72]: https://github.com/howeyc/fsnotify/issues/72 -[#71]: https://github.com/howeyc/fsnotify/issues/71 -[#70]: https://github.com/howeyc/fsnotify/issues/70 -[#63]: https://github.com/howeyc/fsnotify/issues/63 -[#62]: https://github.com/howeyc/fsnotify/issues/62 -[#60]: https://github.com/howeyc/fsnotify/issues/60 -[#59]: https://github.com/howeyc/fsnotify/issues/59 -[#49]: https://github.com/howeyc/fsnotify/issues/49 -[#45]: https://github.com/howeyc/fsnotify/issues/45 -[#40]: https://github.com/howeyc/fsnotify/issues/40 -[#36]: https://github.com/howeyc/fsnotify/issues/36 -[#33]: https://github.com/howeyc/fsnotify/issues/33 -[#29]: https://github.com/howeyc/fsnotify/issues/29 -[#25]: https://github.com/howeyc/fsnotify/issues/25 -[#24]: https://github.com/howeyc/fsnotify/issues/24 -[#21]: https://github.com/howeyc/fsnotify/issues/21 diff --git a/vendor/github.com/fsnotify/fsnotify/CONTRIBUTING.md b/vendor/github.com/fsnotify/fsnotify/CONTRIBUTING.md deleted file mode 100644 index ea379759d..000000000 --- a/vendor/github.com/fsnotify/fsnotify/CONTRIBUTING.md +++ /dev/null @@ -1,26 +0,0 @@ -Thank you for your interest in contributing to fsnotify! We try to review and -merge PRs in a reasonable timeframe, but please be aware that: - -- To avoid "wasted" work, please discus changes on the issue tracker first. You - can just send PRs, but they may end up being rejected for one reason or the - other. - -- fsnotify is a cross-platform library, and changes must work reasonably well on - all supported platforms. - -- Changes will need to be compatible; old code should still compile, and the - runtime behaviour can't change in ways that are likely to lead to problems for - users. - -Testing -------- -Just `go test ./...` runs all the tests; the CI runs this on all supported -platforms. Testing different platforms locally can be done with something like -[goon] or [Vagrant], but this isn't super-easy to set up at the moment. - -Use the `-short` flag to make the "stress test" run faster. - - -[goon]: https://github.com/arp242/goon -[Vagrant]: https://www.vagrantup.com/ -[integration_test.go]: /integration_test.go diff --git a/vendor/github.com/fsnotify/fsnotify/LICENSE b/vendor/github.com/fsnotify/fsnotify/LICENSE deleted file mode 100644 index fb03ade75..000000000 --- a/vendor/github.com/fsnotify/fsnotify/LICENSE +++ /dev/null @@ -1,25 +0,0 @@ -Copyright © 2012 The Go Authors. All rights reserved. -Copyright © fsnotify Authors. All rights reserved. - -Redistribution and use in source and binary forms, with or without modification, -are permitted provided that the following conditions are met: - -* Redistributions of source code must retain the above copyright notice, this - list of conditions and the following disclaimer. -* Redistributions in binary form must reproduce the above copyright notice, this - list of conditions and the following disclaimer in the documentation and/or - other materials provided with the distribution. -* Neither the name of Google Inc. nor the names of its contributors may be used - to endorse or promote products derived from this software without specific - prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR -ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON -ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/fsnotify/fsnotify/README.md b/vendor/github.com/fsnotify/fsnotify/README.md deleted file mode 100644 index e480733d1..000000000 --- a/vendor/github.com/fsnotify/fsnotify/README.md +++ /dev/null @@ -1,184 +0,0 @@ -fsnotify is a Go library to provide cross-platform filesystem notifications on -Windows, Linux, macOS, BSD, and illumos. - -Go 1.17 or newer is required; the full documentation is at -https://pkg.go.dev/github.com/fsnotify/fsnotify - ---- - -Platform support: - -| Backend | OS | Status | -| :-------------------- | :--------- | :------------------------------------------------------------------------ | -| inotify | Linux | Supported | -| kqueue | BSD, macOS | Supported | -| ReadDirectoryChangesW | Windows | Supported | -| FEN | illumos | Supported | -| fanotify | Linux 5.9+ | [Not yet](https://github.com/fsnotify/fsnotify/issues/114) | -| AHAFS | AIX | [aix branch]; experimental due to lack of maintainer and test environment | -| FSEvents | macOS | [Needs support in x/sys/unix][fsevents] | -| USN Journals | Windows | [Needs support in x/sys/windows][usn] | -| Polling | *All* | [Not yet](https://github.com/fsnotify/fsnotify/issues/9) | - -Linux and illumos should include Android and Solaris, but these are currently -untested. - -[fsevents]: https://github.com/fsnotify/fsnotify/issues/11#issuecomment-1279133120 -[usn]: https://github.com/fsnotify/fsnotify/issues/53#issuecomment-1279829847 -[aix branch]: https://github.com/fsnotify/fsnotify/issues/353#issuecomment-1284590129 - -Usage ------ -A basic example: - -```go -package main - -import ( - "log" - - "github.com/fsnotify/fsnotify" -) - -func main() { - // Create new watcher. - watcher, err := fsnotify.NewWatcher() - if err != nil { - log.Fatal(err) - } - defer watcher.Close() - - // Start listening for events. - go func() { - for { - select { - case event, ok := <-watcher.Events: - if !ok { - return - } - log.Println("event:", event) - if event.Has(fsnotify.Write) { - log.Println("modified file:", event.Name) - } - case err, ok := <-watcher.Errors: - if !ok { - return - } - log.Println("error:", err) - } - } - }() - - // Add a path. - err = watcher.Add("/tmp") - if err != nil { - log.Fatal(err) - } - - // Block main goroutine forever. - <-make(chan struct{}) -} -``` - -Some more examples can be found in [cmd/fsnotify](cmd/fsnotify), which can be -run with: - - % go run ./cmd/fsnotify - -Further detailed documentation can be found in godoc: -https://pkg.go.dev/github.com/fsnotify/fsnotify - -FAQ ---- -### Will a file still be watched when it's moved to another directory? -No, not unless you are watching the location it was moved to. - -### Are subdirectories watched? -No, you must add watches for any directory you want to watch (a recursive -watcher is on the roadmap: [#18]). - -[#18]: https://github.com/fsnotify/fsnotify/issues/18 - -### Do I have to watch the Error and Event channels in a goroutine? -Yes. You can read both channels in the same goroutine using `select` (you don't -need a separate goroutine for both channels; see the example). - -### Why don't notifications work with NFS, SMB, FUSE, /proc, or /sys? -fsnotify requires support from underlying OS to work. The current NFS and SMB -protocols does not provide network level support for file notifications, and -neither do the /proc and /sys virtual filesystems. - -This could be fixed with a polling watcher ([#9]), but it's not yet implemented. - -[#9]: https://github.com/fsnotify/fsnotify/issues/9 - -### Why do I get many Chmod events? -Some programs may generate a lot of attribute changes; for example Spotlight on -macOS, anti-virus programs, backup applications, and some others are known to do -this. As a rule, it's typically best to ignore Chmod events. They're often not -useful, and tend to cause problems. - -Spotlight indexing on macOS can result in multiple events (see [#15]). A -temporary workaround is to add your folder(s) to the *Spotlight Privacy -settings* until we have a native FSEvents implementation (see [#11]). - -[#11]: https://github.com/fsnotify/fsnotify/issues/11 -[#15]: https://github.com/fsnotify/fsnotify/issues/15 - -### Watching a file doesn't work well -Watching individual files (rather than directories) is generally not recommended -as many programs (especially editors) update files atomically: it will write to -a temporary file which is then moved to to destination, overwriting the original -(or some variant thereof). The watcher on the original file is now lost, as that -no longer exists. - -The upshot of this is that a power failure or crash won't leave a half-written -file. - -Watch the parent directory and use `Event.Name` to filter out files you're not -interested in. There is an example of this in `cmd/fsnotify/file.go`. - -Platform-specific notes ------------------------ -### Linux -When a file is removed a REMOVE event won't be emitted until all file -descriptors are closed; it will emit a CHMOD instead: - - fp := os.Open("file") - os.Remove("file") // CHMOD - fp.Close() // REMOVE - -This is the event that inotify sends, so not much can be changed about this. - -The `fs.inotify.max_user_watches` sysctl variable specifies the upper limit for -the number of watches per user, and `fs.inotify.max_user_instances` specifies -the maximum number of inotify instances per user. Every Watcher you create is an -"instance", and every path you add is a "watch". - -These are also exposed in `/proc` as `/proc/sys/fs/inotify/max_user_watches` and -`/proc/sys/fs/inotify/max_user_instances` - -To increase them you can use `sysctl` or write the value to proc file: - - # The default values on Linux 5.18 - sysctl fs.inotify.max_user_watches=124983 - sysctl fs.inotify.max_user_instances=128 - -To make the changes persist on reboot edit `/etc/sysctl.conf` or -`/usr/lib/sysctl.d/50-default.conf` (details differ per Linux distro; check your -distro's documentation): - - fs.inotify.max_user_watches=124983 - fs.inotify.max_user_instances=128 - -Reaching the limit will result in a "no space left on device" or "too many open -files" error. - -### kqueue (macOS, all BSD systems) -kqueue requires opening a file descriptor for every file that's being watched; -so if you're watching a directory with five files then that's six file -descriptors. You will run in to your system's "max open files" limit faster on -these platforms. - -The sysctl variables `kern.maxfiles` and `kern.maxfilesperproc` can be used to -control the maximum number of open files. diff --git a/vendor/github.com/fsnotify/fsnotify/backend_fen.go b/vendor/github.com/fsnotify/fsnotify/backend_fen.go deleted file mode 100644 index 28497f1dd..000000000 --- a/vendor/github.com/fsnotify/fsnotify/backend_fen.go +++ /dev/null @@ -1,640 +0,0 @@ -//go:build solaris -// +build solaris - -// Note: the documentation on the Watcher type and methods is generated from -// mkdoc.zsh - -package fsnotify - -import ( - "errors" - "fmt" - "os" - "path/filepath" - "sync" - - "golang.org/x/sys/unix" -) - -// Watcher watches a set of paths, delivering events on a channel. -// -// A watcher should not be copied (e.g. pass it by pointer, rather than by -// value). -// -// # Linux notes -// -// When a file is removed a Remove event won't be emitted until all file -// descriptors are closed, and deletes will always emit a Chmod. For example: -// -// fp := os.Open("file") -// os.Remove("file") // Triggers Chmod -// fp.Close() // Triggers Remove -// -// This is the event that inotify sends, so not much can be changed about this. -// -// The fs.inotify.max_user_watches sysctl variable specifies the upper limit -// for the number of watches per user, and fs.inotify.max_user_instances -// specifies the maximum number of inotify instances per user. Every Watcher you -// create is an "instance", and every path you add is a "watch". -// -// These are also exposed in /proc as /proc/sys/fs/inotify/max_user_watches and -// /proc/sys/fs/inotify/max_user_instances -// -// To increase them you can use sysctl or write the value to the /proc file: -// -// # Default values on Linux 5.18 -// sysctl fs.inotify.max_user_watches=124983 -// sysctl fs.inotify.max_user_instances=128 -// -// To make the changes persist on reboot edit /etc/sysctl.conf or -// /usr/lib/sysctl.d/50-default.conf (details differ per Linux distro; check -// your distro's documentation): -// -// fs.inotify.max_user_watches=124983 -// fs.inotify.max_user_instances=128 -// -// Reaching the limit will result in a "no space left on device" or "too many open -// files" error. -// -// # kqueue notes (macOS, BSD) -// -// kqueue requires opening a file descriptor for every file that's being watched; -// so if you're watching a directory with five files then that's six file -// descriptors. You will run in to your system's "max open files" limit faster on -// these platforms. -// -// The sysctl variables kern.maxfiles and kern.maxfilesperproc can be used to -// control the maximum number of open files, as well as /etc/login.conf on BSD -// systems. -// -// # Windows notes -// -// Paths can be added as "C:\path\to\dir", but forward slashes -// ("C:/path/to/dir") will also work. -// -// When a watched directory is removed it will always send an event for the -// directory itself, but may not send events for all files in that directory. -// Sometimes it will send events for all times, sometimes it will send no -// events, and often only for some files. -// -// The default ReadDirectoryChangesW() buffer size is 64K, which is the largest -// value that is guaranteed to work with SMB filesystems. If you have many -// events in quick succession this may not be enough, and you will have to use -// [WithBufferSize] to increase the value. -type Watcher struct { - // Events sends the filesystem change events. - // - // fsnotify can send the following events; a "path" here can refer to a - // file, directory, symbolic link, or special file like a FIFO. - // - // fsnotify.Create A new path was created; this may be followed by one - // or more Write events if data also gets written to a - // file. - // - // fsnotify.Remove A path was removed. - // - // fsnotify.Rename A path was renamed. A rename is always sent with the - // old path as Event.Name, and a Create event will be - // sent with the new name. Renames are only sent for - // paths that are currently watched; e.g. moving an - // unmonitored file into a monitored directory will - // show up as just a Create. Similarly, renaming a file - // to outside a monitored directory will show up as - // only a Rename. - // - // fsnotify.Write A file or named pipe was written to. A Truncate will - // also trigger a Write. A single "write action" - // initiated by the user may show up as one or multiple - // writes, depending on when the system syncs things to - // disk. For example when compiling a large Go program - // you may get hundreds of Write events, and you may - // want to wait until you've stopped receiving them - // (see the dedup example in cmd/fsnotify). - // - // Some systems may send Write event for directories - // when the directory content changes. - // - // fsnotify.Chmod Attributes were changed. On Linux this is also sent - // when a file is removed (or more accurately, when a - // link to an inode is removed). On kqueue it's sent - // when a file is truncated. On Windows it's never - // sent. - Events chan Event - - // Errors sends any errors. - // - // ErrEventOverflow is used to indicate there are too many events: - // - // - inotify: There are too many queued events (fs.inotify.max_queued_events sysctl) - // - windows: The buffer size is too small; WithBufferSize() can be used to increase it. - // - kqueue, fen: Not used. - Errors chan error - - mu sync.Mutex - port *unix.EventPort - done chan struct{} // Channel for sending a "quit message" to the reader goroutine - dirs map[string]struct{} // Explicitly watched directories - watches map[string]struct{} // Explicitly watched non-directories -} - -// NewWatcher creates a new Watcher. -func NewWatcher() (*Watcher, error) { - return NewBufferedWatcher(0) -} - -// NewBufferedWatcher creates a new Watcher with a buffered Watcher.Events -// channel. -// -// The main use case for this is situations with a very large number of events -// where the kernel buffer size can't be increased (e.g. due to lack of -// permissions). An unbuffered Watcher will perform better for almost all use -// cases, and whenever possible you will be better off increasing the kernel -// buffers instead of adding a large userspace buffer. -func NewBufferedWatcher(sz uint) (*Watcher, error) { - w := &Watcher{ - Events: make(chan Event, sz), - Errors: make(chan error), - dirs: make(map[string]struct{}), - watches: make(map[string]struct{}), - done: make(chan struct{}), - } - - var err error - w.port, err = unix.NewEventPort() - if err != nil { - return nil, fmt.Errorf("fsnotify.NewWatcher: %w", err) - } - - go w.readEvents() - return w, nil -} - -// sendEvent attempts to send an event to the user, returning true if the event -// was put in the channel successfully and false if the watcher has been closed. -func (w *Watcher) sendEvent(name string, op Op) (sent bool) { - select { - case w.Events <- Event{Name: name, Op: op}: - return true - case <-w.done: - return false - } -} - -// sendError attempts to send an error to the user, returning true if the error -// was put in the channel successfully and false if the watcher has been closed. -func (w *Watcher) sendError(err error) (sent bool) { - select { - case w.Errors <- err: - return true - case <-w.done: - return false - } -} - -func (w *Watcher) isClosed() bool { - select { - case <-w.done: - return true - default: - return false - } -} - -// Close removes all watches and closes the Events channel. -func (w *Watcher) Close() error { - // Take the lock used by associateFile to prevent lingering events from - // being processed after the close - w.mu.Lock() - defer w.mu.Unlock() - if w.isClosed() { - return nil - } - close(w.done) - return w.port.Close() -} - -// Add starts monitoring the path for changes. -// -// A path can only be watched once; watching it more than once is a no-op and will -// not return an error. Paths that do not yet exist on the filesystem cannot be -// watched. -// -// A watch will be automatically removed if the watched path is deleted or -// renamed. The exception is the Windows backend, which doesn't remove the -// watcher on renames. -// -// Notifications on network filesystems (NFS, SMB, FUSE, etc.) or special -// filesystems (/proc, /sys, etc.) generally don't work. -// -// Returns [ErrClosed] if [Watcher.Close] was called. -// -// See [Watcher.AddWith] for a version that allows adding options. -// -// # Watching directories -// -// All files in a directory are monitored, including new files that are created -// after the watcher is started. Subdirectories are not watched (i.e. it's -// non-recursive). -// -// # Watching files -// -// Watching individual files (rather than directories) is generally not -// recommended as many programs (especially editors) update files atomically: it -// will write to a temporary file which is then moved to to destination, -// overwriting the original (or some variant thereof). The watcher on the -// original file is now lost, as that no longer exists. -// -// The upshot of this is that a power failure or crash won't leave a -// half-written file. -// -// Watch the parent directory and use Event.Name to filter out files you're not -// interested in. There is an example of this in cmd/fsnotify/file.go. -func (w *Watcher) Add(name string) error { return w.AddWith(name) } - -// AddWith is like [Watcher.Add], but allows adding options. When using Add() -// the defaults described below are used. -// -// Possible options are: -// -// - [WithBufferSize] sets the buffer size for the Windows backend; no-op on -// other platforms. The default is 64K (65536 bytes). -func (w *Watcher) AddWith(name string, opts ...addOpt) error { - if w.isClosed() { - return ErrClosed - } - if w.port.PathIsWatched(name) { - return nil - } - - _ = getOptions(opts...) - - // Currently we resolve symlinks that were explicitly requested to be - // watched. Otherwise we would use LStat here. - stat, err := os.Stat(name) - if err != nil { - return err - } - - // Associate all files in the directory. - if stat.IsDir() { - err := w.handleDirectory(name, stat, true, w.associateFile) - if err != nil { - return err - } - - w.mu.Lock() - w.dirs[name] = struct{}{} - w.mu.Unlock() - return nil - } - - err = w.associateFile(name, stat, true) - if err != nil { - return err - } - - w.mu.Lock() - w.watches[name] = struct{}{} - w.mu.Unlock() - return nil -} - -// Remove stops monitoring the path for changes. -// -// Directories are always removed non-recursively. For example, if you added -// /tmp/dir and /tmp/dir/subdir then you will need to remove both. -// -// Removing a path that has not yet been added returns [ErrNonExistentWatch]. -// -// Returns nil if [Watcher.Close] was called. -func (w *Watcher) Remove(name string) error { - if w.isClosed() { - return nil - } - if !w.port.PathIsWatched(name) { - return fmt.Errorf("%w: %s", ErrNonExistentWatch, name) - } - - // The user has expressed an intent. Immediately remove this name from - // whichever watch list it might be in. If it's not in there the delete - // doesn't cause harm. - w.mu.Lock() - delete(w.watches, name) - delete(w.dirs, name) - w.mu.Unlock() - - stat, err := os.Stat(name) - if err != nil { - return err - } - - // Remove associations for every file in the directory. - if stat.IsDir() { - err := w.handleDirectory(name, stat, false, w.dissociateFile) - if err != nil { - return err - } - return nil - } - - err = w.port.DissociatePath(name) - if err != nil { - return err - } - - return nil -} - -// readEvents contains the main loop that runs in a goroutine watching for events. -func (w *Watcher) readEvents() { - // If this function returns, the watcher has been closed and we can close - // these channels - defer func() { - close(w.Errors) - close(w.Events) - }() - - pevents := make([]unix.PortEvent, 8) - for { - count, err := w.port.Get(pevents, 1, nil) - if err != nil && err != unix.ETIME { - // Interrupted system call (count should be 0) ignore and continue - if errors.Is(err, unix.EINTR) && count == 0 { - continue - } - // Get failed because we called w.Close() - if errors.Is(err, unix.EBADF) && w.isClosed() { - return - } - // There was an error not caused by calling w.Close() - if !w.sendError(err) { - return - } - } - - p := pevents[:count] - for _, pevent := range p { - if pevent.Source != unix.PORT_SOURCE_FILE { - // Event from unexpected source received; should never happen. - if !w.sendError(errors.New("Event from unexpected source received")) { - return - } - continue - } - - err = w.handleEvent(&pevent) - if err != nil { - if !w.sendError(err) { - return - } - } - } - } -} - -func (w *Watcher) handleDirectory(path string, stat os.FileInfo, follow bool, handler func(string, os.FileInfo, bool) error) error { - files, err := os.ReadDir(path) - if err != nil { - return err - } - - // Handle all children of the directory. - for _, entry := range files { - finfo, err := entry.Info() - if err != nil { - return err - } - err = handler(filepath.Join(path, finfo.Name()), finfo, false) - if err != nil { - return err - } - } - - // And finally handle the directory itself. - return handler(path, stat, follow) -} - -// handleEvent might need to emit more than one fsnotify event if the events -// bitmap matches more than one event type (e.g. the file was both modified and -// had the attributes changed between when the association was created and the -// when event was returned) -func (w *Watcher) handleEvent(event *unix.PortEvent) error { - var ( - events = event.Events - path = event.Path - fmode = event.Cookie.(os.FileMode) - reRegister = true - ) - - w.mu.Lock() - _, watchedDir := w.dirs[path] - _, watchedPath := w.watches[path] - w.mu.Unlock() - isWatched := watchedDir || watchedPath - - if events&unix.FILE_DELETE != 0 { - if !w.sendEvent(path, Remove) { - return nil - } - reRegister = false - } - if events&unix.FILE_RENAME_FROM != 0 { - if !w.sendEvent(path, Rename) { - return nil - } - // Don't keep watching the new file name - reRegister = false - } - if events&unix.FILE_RENAME_TO != 0 { - // We don't report a Rename event for this case, because Rename events - // are interpreted as referring to the _old_ name of the file, and in - // this case the event would refer to the new name of the file. This - // type of rename event is not supported by fsnotify. - - // inotify reports a Remove event in this case, so we simulate this - // here. - if !w.sendEvent(path, Remove) { - return nil - } - // Don't keep watching the file that was removed - reRegister = false - } - - // The file is gone, nothing left to do. - if !reRegister { - if watchedDir { - w.mu.Lock() - delete(w.dirs, path) - w.mu.Unlock() - } - if watchedPath { - w.mu.Lock() - delete(w.watches, path) - w.mu.Unlock() - } - return nil - } - - // If we didn't get a deletion the file still exists and we're going to have - // to watch it again. Let's Stat it now so that we can compare permissions - // and have what we need to continue watching the file - - stat, err := os.Lstat(path) - if err != nil { - // This is unexpected, but we should still emit an event. This happens - // most often on "rm -r" of a subdirectory inside a watched directory We - // get a modify event of something happening inside, but by the time we - // get here, the sudirectory is already gone. Clearly we were watching - // this path but now it is gone. Let's tell the user that it was - // removed. - if !w.sendEvent(path, Remove) { - return nil - } - // Suppress extra write events on removed directories; they are not - // informative and can be confusing. - return nil - } - - // resolve symlinks that were explicitly watched as we would have at Add() - // time. this helps suppress spurious Chmod events on watched symlinks - if isWatched { - stat, err = os.Stat(path) - if err != nil { - // The symlink still exists, but the target is gone. Report the - // Remove similar to above. - if !w.sendEvent(path, Remove) { - return nil - } - // Don't return the error - } - } - - if events&unix.FILE_MODIFIED != 0 { - if fmode.IsDir() { - if watchedDir { - if err := w.updateDirectory(path); err != nil { - return err - } - } else { - if !w.sendEvent(path, Write) { - return nil - } - } - } else { - if !w.sendEvent(path, Write) { - return nil - } - } - } - if events&unix.FILE_ATTRIB != 0 && stat != nil { - // Only send Chmod if perms changed - if stat.Mode().Perm() != fmode.Perm() { - if !w.sendEvent(path, Chmod) { - return nil - } - } - } - - if stat != nil { - // If we get here, it means we've hit an event above that requires us to - // continue watching the file or directory - return w.associateFile(path, stat, isWatched) - } - return nil -} - -func (w *Watcher) updateDirectory(path string) error { - // The directory was modified, so we must find unwatched entities and watch - // them. If something was removed from the directory, nothing will happen, - // as everything else should still be watched. - files, err := os.ReadDir(path) - if err != nil { - return err - } - - for _, entry := range files { - path := filepath.Join(path, entry.Name()) - if w.port.PathIsWatched(path) { - continue - } - - finfo, err := entry.Info() - if err != nil { - return err - } - err = w.associateFile(path, finfo, false) - if err != nil { - if !w.sendError(err) { - return nil - } - } - if !w.sendEvent(path, Create) { - return nil - } - } - return nil -} - -func (w *Watcher) associateFile(path string, stat os.FileInfo, follow bool) error { - if w.isClosed() { - return ErrClosed - } - // This is primarily protecting the call to AssociatePath but it is - // important and intentional that the call to PathIsWatched is also - // protected by this mutex. Without this mutex, AssociatePath has been seen - // to error out that the path is already associated. - w.mu.Lock() - defer w.mu.Unlock() - - if w.port.PathIsWatched(path) { - // Remove the old association in favor of this one If we get ENOENT, - // then while the x/sys/unix wrapper still thought that this path was - // associated, the underlying event port did not. This call will have - // cleared up that discrepancy. The most likely cause is that the event - // has fired but we haven't processed it yet. - err := w.port.DissociatePath(path) - if err != nil && err != unix.ENOENT { - return err - } - } - // FILE_NOFOLLOW means we watch symlinks themselves rather than their - // targets. - events := unix.FILE_MODIFIED | unix.FILE_ATTRIB | unix.FILE_NOFOLLOW - if follow { - // We *DO* follow symlinks for explicitly watched entries. - events = unix.FILE_MODIFIED | unix.FILE_ATTRIB - } - return w.port.AssociatePath(path, stat, - events, - stat.Mode()) -} - -func (w *Watcher) dissociateFile(path string, stat os.FileInfo, unused bool) error { - if !w.port.PathIsWatched(path) { - return nil - } - return w.port.DissociatePath(path) -} - -// WatchList returns all paths explicitly added with [Watcher.Add] (and are not -// yet removed). -// -// Returns nil if [Watcher.Close] was called. -func (w *Watcher) WatchList() []string { - if w.isClosed() { - return nil - } - - w.mu.Lock() - defer w.mu.Unlock() - - entries := make([]string, 0, len(w.watches)+len(w.dirs)) - for pathname := range w.dirs { - entries = append(entries, pathname) - } - for pathname := range w.watches { - entries = append(entries, pathname) - } - - return entries -} diff --git a/vendor/github.com/fsnotify/fsnotify/backend_inotify.go b/vendor/github.com/fsnotify/fsnotify/backend_inotify.go deleted file mode 100644 index 921c1c1e4..000000000 --- a/vendor/github.com/fsnotify/fsnotify/backend_inotify.go +++ /dev/null @@ -1,594 +0,0 @@ -//go:build linux && !appengine -// +build linux,!appengine - -// Note: the documentation on the Watcher type and methods is generated from -// mkdoc.zsh - -package fsnotify - -import ( - "errors" - "fmt" - "io" - "os" - "path/filepath" - "strings" - "sync" - "unsafe" - - "golang.org/x/sys/unix" -) - -// Watcher watches a set of paths, delivering events on a channel. -// -// A watcher should not be copied (e.g. pass it by pointer, rather than by -// value). -// -// # Linux notes -// -// When a file is removed a Remove event won't be emitted until all file -// descriptors are closed, and deletes will always emit a Chmod. For example: -// -// fp := os.Open("file") -// os.Remove("file") // Triggers Chmod -// fp.Close() // Triggers Remove -// -// This is the event that inotify sends, so not much can be changed about this. -// -// The fs.inotify.max_user_watches sysctl variable specifies the upper limit -// for the number of watches per user, and fs.inotify.max_user_instances -// specifies the maximum number of inotify instances per user. Every Watcher you -// create is an "instance", and every path you add is a "watch". -// -// These are also exposed in /proc as /proc/sys/fs/inotify/max_user_watches and -// /proc/sys/fs/inotify/max_user_instances -// -// To increase them you can use sysctl or write the value to the /proc file: -// -// # Default values on Linux 5.18 -// sysctl fs.inotify.max_user_watches=124983 -// sysctl fs.inotify.max_user_instances=128 -// -// To make the changes persist on reboot edit /etc/sysctl.conf or -// /usr/lib/sysctl.d/50-default.conf (details differ per Linux distro; check -// your distro's documentation): -// -// fs.inotify.max_user_watches=124983 -// fs.inotify.max_user_instances=128 -// -// Reaching the limit will result in a "no space left on device" or "too many open -// files" error. -// -// # kqueue notes (macOS, BSD) -// -// kqueue requires opening a file descriptor for every file that's being watched; -// so if you're watching a directory with five files then that's six file -// descriptors. You will run in to your system's "max open files" limit faster on -// these platforms. -// -// The sysctl variables kern.maxfiles and kern.maxfilesperproc can be used to -// control the maximum number of open files, as well as /etc/login.conf on BSD -// systems. -// -// # Windows notes -// -// Paths can be added as "C:\path\to\dir", but forward slashes -// ("C:/path/to/dir") will also work. -// -// When a watched directory is removed it will always send an event for the -// directory itself, but may not send events for all files in that directory. -// Sometimes it will send events for all times, sometimes it will send no -// events, and often only for some files. -// -// The default ReadDirectoryChangesW() buffer size is 64K, which is the largest -// value that is guaranteed to work with SMB filesystems. If you have many -// events in quick succession this may not be enough, and you will have to use -// [WithBufferSize] to increase the value. -type Watcher struct { - // Events sends the filesystem change events. - // - // fsnotify can send the following events; a "path" here can refer to a - // file, directory, symbolic link, or special file like a FIFO. - // - // fsnotify.Create A new path was created; this may be followed by one - // or more Write events if data also gets written to a - // file. - // - // fsnotify.Remove A path was removed. - // - // fsnotify.Rename A path was renamed. A rename is always sent with the - // old path as Event.Name, and a Create event will be - // sent with the new name. Renames are only sent for - // paths that are currently watched; e.g. moving an - // unmonitored file into a monitored directory will - // show up as just a Create. Similarly, renaming a file - // to outside a monitored directory will show up as - // only a Rename. - // - // fsnotify.Write A file or named pipe was written to. A Truncate will - // also trigger a Write. A single "write action" - // initiated by the user may show up as one or multiple - // writes, depending on when the system syncs things to - // disk. For example when compiling a large Go program - // you may get hundreds of Write events, and you may - // want to wait until you've stopped receiving them - // (see the dedup example in cmd/fsnotify). - // - // Some systems may send Write event for directories - // when the directory content changes. - // - // fsnotify.Chmod Attributes were changed. On Linux this is also sent - // when a file is removed (or more accurately, when a - // link to an inode is removed). On kqueue it's sent - // when a file is truncated. On Windows it's never - // sent. - Events chan Event - - // Errors sends any errors. - // - // ErrEventOverflow is used to indicate there are too many events: - // - // - inotify: There are too many queued events (fs.inotify.max_queued_events sysctl) - // - windows: The buffer size is too small; WithBufferSize() can be used to increase it. - // - kqueue, fen: Not used. - Errors chan error - - // Store fd here as os.File.Read() will no longer return on close after - // calling Fd(). See: https://github.com/golang/go/issues/26439 - fd int - inotifyFile *os.File - watches *watches - done chan struct{} // Channel for sending a "quit message" to the reader goroutine - closeMu sync.Mutex - doneResp chan struct{} // Channel to respond to Close -} - -type ( - watches struct { - mu sync.RWMutex - wd map[uint32]*watch // wd → watch - path map[string]uint32 // pathname → wd - } - watch struct { - wd uint32 // Watch descriptor (as returned by the inotify_add_watch() syscall) - flags uint32 // inotify flags of this watch (see inotify(7) for the list of valid flags) - path string // Watch path. - } -) - -func newWatches() *watches { - return &watches{ - wd: make(map[uint32]*watch), - path: make(map[string]uint32), - } -} - -func (w *watches) len() int { - w.mu.RLock() - defer w.mu.RUnlock() - return len(w.wd) -} - -func (w *watches) add(ww *watch) { - w.mu.Lock() - defer w.mu.Unlock() - w.wd[ww.wd] = ww - w.path[ww.path] = ww.wd -} - -func (w *watches) remove(wd uint32) { - w.mu.Lock() - defer w.mu.Unlock() - delete(w.path, w.wd[wd].path) - delete(w.wd, wd) -} - -func (w *watches) removePath(path string) (uint32, bool) { - w.mu.Lock() - defer w.mu.Unlock() - - wd, ok := w.path[path] - if !ok { - return 0, false - } - - delete(w.path, path) - delete(w.wd, wd) - - return wd, true -} - -func (w *watches) byPath(path string) *watch { - w.mu.RLock() - defer w.mu.RUnlock() - return w.wd[w.path[path]] -} - -func (w *watches) byWd(wd uint32) *watch { - w.mu.RLock() - defer w.mu.RUnlock() - return w.wd[wd] -} - -func (w *watches) updatePath(path string, f func(*watch) (*watch, error)) error { - w.mu.Lock() - defer w.mu.Unlock() - - var existing *watch - wd, ok := w.path[path] - if ok { - existing = w.wd[wd] - } - - upd, err := f(existing) - if err != nil { - return err - } - if upd != nil { - w.wd[upd.wd] = upd - w.path[upd.path] = upd.wd - - if upd.wd != wd { - delete(w.wd, wd) - } - } - - return nil -} - -// NewWatcher creates a new Watcher. -func NewWatcher() (*Watcher, error) { - return NewBufferedWatcher(0) -} - -// NewBufferedWatcher creates a new Watcher with a buffered Watcher.Events -// channel. -// -// The main use case for this is situations with a very large number of events -// where the kernel buffer size can't be increased (e.g. due to lack of -// permissions). An unbuffered Watcher will perform better for almost all use -// cases, and whenever possible you will be better off increasing the kernel -// buffers instead of adding a large userspace buffer. -func NewBufferedWatcher(sz uint) (*Watcher, error) { - // Need to set nonblocking mode for SetDeadline to work, otherwise blocking - // I/O operations won't terminate on close. - fd, errno := unix.InotifyInit1(unix.IN_CLOEXEC | unix.IN_NONBLOCK) - if fd == -1 { - return nil, errno - } - - w := &Watcher{ - fd: fd, - inotifyFile: os.NewFile(uintptr(fd), ""), - watches: newWatches(), - Events: make(chan Event, sz), - Errors: make(chan error), - done: make(chan struct{}), - doneResp: make(chan struct{}), - } - - go w.readEvents() - return w, nil -} - -// Returns true if the event was sent, or false if watcher is closed. -func (w *Watcher) sendEvent(e Event) bool { - select { - case w.Events <- e: - return true - case <-w.done: - return false - } -} - -// Returns true if the error was sent, or false if watcher is closed. -func (w *Watcher) sendError(err error) bool { - select { - case w.Errors <- err: - return true - case <-w.done: - return false - } -} - -func (w *Watcher) isClosed() bool { - select { - case <-w.done: - return true - default: - return false - } -} - -// Close removes all watches and closes the Events channel. -func (w *Watcher) Close() error { - w.closeMu.Lock() - if w.isClosed() { - w.closeMu.Unlock() - return nil - } - close(w.done) - w.closeMu.Unlock() - - // Causes any blocking reads to return with an error, provided the file - // still supports deadline operations. - err := w.inotifyFile.Close() - if err != nil { - return err - } - - // Wait for goroutine to close - <-w.doneResp - - return nil -} - -// Add starts monitoring the path for changes. -// -// A path can only be watched once; watching it more than once is a no-op and will -// not return an error. Paths that do not yet exist on the filesystem cannot be -// watched. -// -// A watch will be automatically removed if the watched path is deleted or -// renamed. The exception is the Windows backend, which doesn't remove the -// watcher on renames. -// -// Notifications on network filesystems (NFS, SMB, FUSE, etc.) or special -// filesystems (/proc, /sys, etc.) generally don't work. -// -// Returns [ErrClosed] if [Watcher.Close] was called. -// -// See [Watcher.AddWith] for a version that allows adding options. -// -// # Watching directories -// -// All files in a directory are monitored, including new files that are created -// after the watcher is started. Subdirectories are not watched (i.e. it's -// non-recursive). -// -// # Watching files -// -// Watching individual files (rather than directories) is generally not -// recommended as many programs (especially editors) update files atomically: it -// will write to a temporary file which is then moved to to destination, -// overwriting the original (or some variant thereof). The watcher on the -// original file is now lost, as that no longer exists. -// -// The upshot of this is that a power failure or crash won't leave a -// half-written file. -// -// Watch the parent directory and use Event.Name to filter out files you're not -// interested in. There is an example of this in cmd/fsnotify/file.go. -func (w *Watcher) Add(name string) error { return w.AddWith(name) } - -// AddWith is like [Watcher.Add], but allows adding options. When using Add() -// the defaults described below are used. -// -// Possible options are: -// -// - [WithBufferSize] sets the buffer size for the Windows backend; no-op on -// other platforms. The default is 64K (65536 bytes). -func (w *Watcher) AddWith(name string, opts ...addOpt) error { - if w.isClosed() { - return ErrClosed - } - - name = filepath.Clean(name) - _ = getOptions(opts...) - - var flags uint32 = unix.IN_MOVED_TO | unix.IN_MOVED_FROM | - unix.IN_CREATE | unix.IN_ATTRIB | unix.IN_MODIFY | - unix.IN_MOVE_SELF | unix.IN_DELETE | unix.IN_DELETE_SELF - - return w.watches.updatePath(name, func(existing *watch) (*watch, error) { - if existing != nil { - flags |= existing.flags | unix.IN_MASK_ADD - } - - wd, err := unix.InotifyAddWatch(w.fd, name, flags) - if wd == -1 { - return nil, err - } - - if existing == nil { - return &watch{ - wd: uint32(wd), - path: name, - flags: flags, - }, nil - } - - existing.wd = uint32(wd) - existing.flags = flags - return existing, nil - }) -} - -// Remove stops monitoring the path for changes. -// -// Directories are always removed non-recursively. For example, if you added -// /tmp/dir and /tmp/dir/subdir then you will need to remove both. -// -// Removing a path that has not yet been added returns [ErrNonExistentWatch]. -// -// Returns nil if [Watcher.Close] was called. -func (w *Watcher) Remove(name string) error { - if w.isClosed() { - return nil - } - return w.remove(filepath.Clean(name)) -} - -func (w *Watcher) remove(name string) error { - wd, ok := w.watches.removePath(name) - if !ok { - return fmt.Errorf("%w: %s", ErrNonExistentWatch, name) - } - - success, errno := unix.InotifyRmWatch(w.fd, wd) - if success == -1 { - // TODO: Perhaps it's not helpful to return an error here in every case; - // The only two possible errors are: - // - // - EBADF, which happens when w.fd is not a valid file descriptor - // of any kind. - // - EINVAL, which is when fd is not an inotify descriptor or wd - // is not a valid watch descriptor. Watch descriptors are - // invalidated when they are removed explicitly or implicitly; - // explicitly by inotify_rm_watch, implicitly when the file they - // are watching is deleted. - return errno - } - return nil -} - -// WatchList returns all paths explicitly added with [Watcher.Add] (and are not -// yet removed). -// -// Returns nil if [Watcher.Close] was called. -func (w *Watcher) WatchList() []string { - if w.isClosed() { - return nil - } - - entries := make([]string, 0, w.watches.len()) - w.watches.mu.RLock() - for pathname := range w.watches.path { - entries = append(entries, pathname) - } - w.watches.mu.RUnlock() - - return entries -} - -// readEvents reads from the inotify file descriptor, converts the -// received events into Event objects and sends them via the Events channel -func (w *Watcher) readEvents() { - defer func() { - close(w.doneResp) - close(w.Errors) - close(w.Events) - }() - - var ( - buf [unix.SizeofInotifyEvent * 4096]byte // Buffer for a maximum of 4096 raw events - errno error // Syscall errno - ) - for { - // See if we have been closed. - if w.isClosed() { - return - } - - n, err := w.inotifyFile.Read(buf[:]) - switch { - case errors.Unwrap(err) == os.ErrClosed: - return - case err != nil: - if !w.sendError(err) { - return - } - continue - } - - if n < unix.SizeofInotifyEvent { - var err error - if n == 0 { - err = io.EOF // If EOF is received. This should really never happen. - } else if n < 0 { - err = errno // If an error occurred while reading. - } else { - err = errors.New("notify: short read in readEvents()") // Read was too short. - } - if !w.sendError(err) { - return - } - continue - } - - var offset uint32 - // We don't know how many events we just read into the buffer - // While the offset points to at least one whole event... - for offset <= uint32(n-unix.SizeofInotifyEvent) { - var ( - // Point "raw" to the event in the buffer - raw = (*unix.InotifyEvent)(unsafe.Pointer(&buf[offset])) - mask = uint32(raw.Mask) - nameLen = uint32(raw.Len) - ) - - if mask&unix.IN_Q_OVERFLOW != 0 { - if !w.sendError(ErrEventOverflow) { - return - } - } - - // If the event happened to the watched directory or the watched file, the kernel - // doesn't append the filename to the event, but we would like to always fill the - // the "Name" field with a valid filename. We retrieve the path of the watch from - // the "paths" map. - watch := w.watches.byWd(uint32(raw.Wd)) - - // inotify will automatically remove the watch on deletes; just need - // to clean our state here. - if watch != nil && mask&unix.IN_DELETE_SELF == unix.IN_DELETE_SELF { - w.watches.remove(watch.wd) - } - // We can't really update the state when a watched path is moved; - // only IN_MOVE_SELF is sent and not IN_MOVED_{FROM,TO}. So remove - // the watch. - if watch != nil && mask&unix.IN_MOVE_SELF == unix.IN_MOVE_SELF { - err := w.remove(watch.path) - if err != nil && !errors.Is(err, ErrNonExistentWatch) { - if !w.sendError(err) { - return - } - } - } - - var name string - if watch != nil { - name = watch.path - } - if nameLen > 0 { - // Point "bytes" at the first byte of the filename - bytes := (*[unix.PathMax]byte)(unsafe.Pointer(&buf[offset+unix.SizeofInotifyEvent]))[:nameLen:nameLen] - // The filename is padded with NULL bytes. TrimRight() gets rid of those. - name += "/" + strings.TrimRight(string(bytes[0:nameLen]), "\000") - } - - event := w.newEvent(name, mask) - - // Send the events that are not ignored on the events channel - if mask&unix.IN_IGNORED == 0 { - if !w.sendEvent(event) { - return - } - } - - // Move to the next event in the buffer - offset += unix.SizeofInotifyEvent + nameLen - } - } -} - -// newEvent returns an platform-independent Event based on an inotify mask. -func (w *Watcher) newEvent(name string, mask uint32) Event { - e := Event{Name: name} - if mask&unix.IN_CREATE == unix.IN_CREATE || mask&unix.IN_MOVED_TO == unix.IN_MOVED_TO { - e.Op |= Create - } - if mask&unix.IN_DELETE_SELF == unix.IN_DELETE_SELF || mask&unix.IN_DELETE == unix.IN_DELETE { - e.Op |= Remove - } - if mask&unix.IN_MODIFY == unix.IN_MODIFY { - e.Op |= Write - } - if mask&unix.IN_MOVE_SELF == unix.IN_MOVE_SELF || mask&unix.IN_MOVED_FROM == unix.IN_MOVED_FROM { - e.Op |= Rename - } - if mask&unix.IN_ATTRIB == unix.IN_ATTRIB { - e.Op |= Chmod - } - return e -} diff --git a/vendor/github.com/fsnotify/fsnotify/backend_kqueue.go b/vendor/github.com/fsnotify/fsnotify/backend_kqueue.go deleted file mode 100644 index 063a0915a..000000000 --- a/vendor/github.com/fsnotify/fsnotify/backend_kqueue.go +++ /dev/null @@ -1,782 +0,0 @@ -//go:build freebsd || openbsd || netbsd || dragonfly || darwin -// +build freebsd openbsd netbsd dragonfly darwin - -// Note: the documentation on the Watcher type and methods is generated from -// mkdoc.zsh - -package fsnotify - -import ( - "errors" - "fmt" - "os" - "path/filepath" - "sync" - - "golang.org/x/sys/unix" -) - -// Watcher watches a set of paths, delivering events on a channel. -// -// A watcher should not be copied (e.g. pass it by pointer, rather than by -// value). -// -// # Linux notes -// -// When a file is removed a Remove event won't be emitted until all file -// descriptors are closed, and deletes will always emit a Chmod. For example: -// -// fp := os.Open("file") -// os.Remove("file") // Triggers Chmod -// fp.Close() // Triggers Remove -// -// This is the event that inotify sends, so not much can be changed about this. -// -// The fs.inotify.max_user_watches sysctl variable specifies the upper limit -// for the number of watches per user, and fs.inotify.max_user_instances -// specifies the maximum number of inotify instances per user. Every Watcher you -// create is an "instance", and every path you add is a "watch". -// -// These are also exposed in /proc as /proc/sys/fs/inotify/max_user_watches and -// /proc/sys/fs/inotify/max_user_instances -// -// To increase them you can use sysctl or write the value to the /proc file: -// -// # Default values on Linux 5.18 -// sysctl fs.inotify.max_user_watches=124983 -// sysctl fs.inotify.max_user_instances=128 -// -// To make the changes persist on reboot edit /etc/sysctl.conf or -// /usr/lib/sysctl.d/50-default.conf (details differ per Linux distro; check -// your distro's documentation): -// -// fs.inotify.max_user_watches=124983 -// fs.inotify.max_user_instances=128 -// -// Reaching the limit will result in a "no space left on device" or "too many open -// files" error. -// -// # kqueue notes (macOS, BSD) -// -// kqueue requires opening a file descriptor for every file that's being watched; -// so if you're watching a directory with five files then that's six file -// descriptors. You will run in to your system's "max open files" limit faster on -// these platforms. -// -// The sysctl variables kern.maxfiles and kern.maxfilesperproc can be used to -// control the maximum number of open files, as well as /etc/login.conf on BSD -// systems. -// -// # Windows notes -// -// Paths can be added as "C:\path\to\dir", but forward slashes -// ("C:/path/to/dir") will also work. -// -// When a watched directory is removed it will always send an event for the -// directory itself, but may not send events for all files in that directory. -// Sometimes it will send events for all times, sometimes it will send no -// events, and often only for some files. -// -// The default ReadDirectoryChangesW() buffer size is 64K, which is the largest -// value that is guaranteed to work with SMB filesystems. If you have many -// events in quick succession this may not be enough, and you will have to use -// [WithBufferSize] to increase the value. -type Watcher struct { - // Events sends the filesystem change events. - // - // fsnotify can send the following events; a "path" here can refer to a - // file, directory, symbolic link, or special file like a FIFO. - // - // fsnotify.Create A new path was created; this may be followed by one - // or more Write events if data also gets written to a - // file. - // - // fsnotify.Remove A path was removed. - // - // fsnotify.Rename A path was renamed. A rename is always sent with the - // old path as Event.Name, and a Create event will be - // sent with the new name. Renames are only sent for - // paths that are currently watched; e.g. moving an - // unmonitored file into a monitored directory will - // show up as just a Create. Similarly, renaming a file - // to outside a monitored directory will show up as - // only a Rename. - // - // fsnotify.Write A file or named pipe was written to. A Truncate will - // also trigger a Write. A single "write action" - // initiated by the user may show up as one or multiple - // writes, depending on when the system syncs things to - // disk. For example when compiling a large Go program - // you may get hundreds of Write events, and you may - // want to wait until you've stopped receiving them - // (see the dedup example in cmd/fsnotify). - // - // Some systems may send Write event for directories - // when the directory content changes. - // - // fsnotify.Chmod Attributes were changed. On Linux this is also sent - // when a file is removed (or more accurately, when a - // link to an inode is removed). On kqueue it's sent - // when a file is truncated. On Windows it's never - // sent. - Events chan Event - - // Errors sends any errors. - // - // ErrEventOverflow is used to indicate there are too many events: - // - // - inotify: There are too many queued events (fs.inotify.max_queued_events sysctl) - // - windows: The buffer size is too small; WithBufferSize() can be used to increase it. - // - kqueue, fen: Not used. - Errors chan error - - done chan struct{} - kq int // File descriptor (as returned by the kqueue() syscall). - closepipe [2]int // Pipe used for closing. - mu sync.Mutex // Protects access to watcher data - watches map[string]int // Watched file descriptors (key: path). - watchesByDir map[string]map[int]struct{} // Watched file descriptors indexed by the parent directory (key: dirname(path)). - userWatches map[string]struct{} // Watches added with Watcher.Add() - dirFlags map[string]uint32 // Watched directories to fflags used in kqueue. - paths map[int]pathInfo // File descriptors to path names for processing kqueue events. - fileExists map[string]struct{} // Keep track of if we know this file exists (to stop duplicate create events). - isClosed bool // Set to true when Close() is first called -} - -type pathInfo struct { - name string - isDir bool -} - -// NewWatcher creates a new Watcher. -func NewWatcher() (*Watcher, error) { - return NewBufferedWatcher(0) -} - -// NewBufferedWatcher creates a new Watcher with a buffered Watcher.Events -// channel. -// -// The main use case for this is situations with a very large number of events -// where the kernel buffer size can't be increased (e.g. due to lack of -// permissions). An unbuffered Watcher will perform better for almost all use -// cases, and whenever possible you will be better off increasing the kernel -// buffers instead of adding a large userspace buffer. -func NewBufferedWatcher(sz uint) (*Watcher, error) { - kq, closepipe, err := newKqueue() - if err != nil { - return nil, err - } - - w := &Watcher{ - kq: kq, - closepipe: closepipe, - watches: make(map[string]int), - watchesByDir: make(map[string]map[int]struct{}), - dirFlags: make(map[string]uint32), - paths: make(map[int]pathInfo), - fileExists: make(map[string]struct{}), - userWatches: make(map[string]struct{}), - Events: make(chan Event, sz), - Errors: make(chan error), - done: make(chan struct{}), - } - - go w.readEvents() - return w, nil -} - -// newKqueue creates a new kernel event queue and returns a descriptor. -// -// This registers a new event on closepipe, which will trigger an event when -// it's closed. This way we can use kevent() without timeout/polling; without -// the closepipe, it would block forever and we wouldn't be able to stop it at -// all. -func newKqueue() (kq int, closepipe [2]int, err error) { - kq, err = unix.Kqueue() - if kq == -1 { - return kq, closepipe, err - } - - // Register the close pipe. - err = unix.Pipe(closepipe[:]) - if err != nil { - unix.Close(kq) - return kq, closepipe, err - } - - // Register changes to listen on the closepipe. - changes := make([]unix.Kevent_t, 1) - // SetKevent converts int to the platform-specific types. - unix.SetKevent(&changes[0], closepipe[0], unix.EVFILT_READ, - unix.EV_ADD|unix.EV_ENABLE|unix.EV_ONESHOT) - - ok, err := unix.Kevent(kq, changes, nil, nil) - if ok == -1 { - unix.Close(kq) - unix.Close(closepipe[0]) - unix.Close(closepipe[1]) - return kq, closepipe, err - } - return kq, closepipe, nil -} - -// Returns true if the event was sent, or false if watcher is closed. -func (w *Watcher) sendEvent(e Event) bool { - select { - case w.Events <- e: - return true - case <-w.done: - return false - } -} - -// Returns true if the error was sent, or false if watcher is closed. -func (w *Watcher) sendError(err error) bool { - select { - case w.Errors <- err: - return true - case <-w.done: - return false - } -} - -// Close removes all watches and closes the Events channel. -func (w *Watcher) Close() error { - w.mu.Lock() - if w.isClosed { - w.mu.Unlock() - return nil - } - w.isClosed = true - - // copy paths to remove while locked - pathsToRemove := make([]string, 0, len(w.watches)) - for name := range w.watches { - pathsToRemove = append(pathsToRemove, name) - } - w.mu.Unlock() // Unlock before calling Remove, which also locks - for _, name := range pathsToRemove { - w.Remove(name) - } - - // Send "quit" message to the reader goroutine. - unix.Close(w.closepipe[1]) - close(w.done) - - return nil -} - -// Add starts monitoring the path for changes. -// -// A path can only be watched once; watching it more than once is a no-op and will -// not return an error. Paths that do not yet exist on the filesystem cannot be -// watched. -// -// A watch will be automatically removed if the watched path is deleted or -// renamed. The exception is the Windows backend, which doesn't remove the -// watcher on renames. -// -// Notifications on network filesystems (NFS, SMB, FUSE, etc.) or special -// filesystems (/proc, /sys, etc.) generally don't work. -// -// Returns [ErrClosed] if [Watcher.Close] was called. -// -// See [Watcher.AddWith] for a version that allows adding options. -// -// # Watching directories -// -// All files in a directory are monitored, including new files that are created -// after the watcher is started. Subdirectories are not watched (i.e. it's -// non-recursive). -// -// # Watching files -// -// Watching individual files (rather than directories) is generally not -// recommended as many programs (especially editors) update files atomically: it -// will write to a temporary file which is then moved to to destination, -// overwriting the original (or some variant thereof). The watcher on the -// original file is now lost, as that no longer exists. -// -// The upshot of this is that a power failure or crash won't leave a -// half-written file. -// -// Watch the parent directory and use Event.Name to filter out files you're not -// interested in. There is an example of this in cmd/fsnotify/file.go. -func (w *Watcher) Add(name string) error { return w.AddWith(name) } - -// AddWith is like [Watcher.Add], but allows adding options. When using Add() -// the defaults described below are used. -// -// Possible options are: -// -// - [WithBufferSize] sets the buffer size for the Windows backend; no-op on -// other platforms. The default is 64K (65536 bytes). -func (w *Watcher) AddWith(name string, opts ...addOpt) error { - _ = getOptions(opts...) - - w.mu.Lock() - w.userWatches[name] = struct{}{} - w.mu.Unlock() - _, err := w.addWatch(name, noteAllEvents) - return err -} - -// Remove stops monitoring the path for changes. -// -// Directories are always removed non-recursively. For example, if you added -// /tmp/dir and /tmp/dir/subdir then you will need to remove both. -// -// Removing a path that has not yet been added returns [ErrNonExistentWatch]. -// -// Returns nil if [Watcher.Close] was called. -func (w *Watcher) Remove(name string) error { - return w.remove(name, true) -} - -func (w *Watcher) remove(name string, unwatchFiles bool) error { - name = filepath.Clean(name) - w.mu.Lock() - if w.isClosed { - w.mu.Unlock() - return nil - } - watchfd, ok := w.watches[name] - w.mu.Unlock() - if !ok { - return fmt.Errorf("%w: %s", ErrNonExistentWatch, name) - } - - err := w.register([]int{watchfd}, unix.EV_DELETE, 0) - if err != nil { - return err - } - - unix.Close(watchfd) - - w.mu.Lock() - isDir := w.paths[watchfd].isDir - delete(w.watches, name) - delete(w.userWatches, name) - - parentName := filepath.Dir(name) - delete(w.watchesByDir[parentName], watchfd) - - if len(w.watchesByDir[parentName]) == 0 { - delete(w.watchesByDir, parentName) - } - - delete(w.paths, watchfd) - delete(w.dirFlags, name) - delete(w.fileExists, name) - w.mu.Unlock() - - // Find all watched paths that are in this directory that are not external. - if unwatchFiles && isDir { - var pathsToRemove []string - w.mu.Lock() - for fd := range w.watchesByDir[name] { - path := w.paths[fd] - if _, ok := w.userWatches[path.name]; !ok { - pathsToRemove = append(pathsToRemove, path.name) - } - } - w.mu.Unlock() - for _, name := range pathsToRemove { - // Since these are internal, not much sense in propagating error to - // the user, as that will just confuse them with an error about a - // path they did not explicitly watch themselves. - w.Remove(name) - } - } - return nil -} - -// WatchList returns all paths explicitly added with [Watcher.Add] (and are not -// yet removed). -// -// Returns nil if [Watcher.Close] was called. -func (w *Watcher) WatchList() []string { - w.mu.Lock() - defer w.mu.Unlock() - if w.isClosed { - return nil - } - - entries := make([]string, 0, len(w.userWatches)) - for pathname := range w.userWatches { - entries = append(entries, pathname) - } - - return entries -} - -// Watch all events (except NOTE_EXTEND, NOTE_LINK, NOTE_REVOKE) -const noteAllEvents = unix.NOTE_DELETE | unix.NOTE_WRITE | unix.NOTE_ATTRIB | unix.NOTE_RENAME - -// addWatch adds name to the watched file set; the flags are interpreted as -// described in kevent(2). -// -// Returns the real path to the file which was added, with symlinks resolved. -func (w *Watcher) addWatch(name string, flags uint32) (string, error) { - var isDir bool - name = filepath.Clean(name) - - w.mu.Lock() - if w.isClosed { - w.mu.Unlock() - return "", ErrClosed - } - watchfd, alreadyWatching := w.watches[name] - // We already have a watch, but we can still override flags. - if alreadyWatching { - isDir = w.paths[watchfd].isDir - } - w.mu.Unlock() - - if !alreadyWatching { - fi, err := os.Lstat(name) - if err != nil { - return "", err - } - - // Don't watch sockets or named pipes - if (fi.Mode()&os.ModeSocket == os.ModeSocket) || (fi.Mode()&os.ModeNamedPipe == os.ModeNamedPipe) { - return "", nil - } - - // Follow Symlinks. - if fi.Mode()&os.ModeSymlink == os.ModeSymlink { - link, err := os.Readlink(name) - if err != nil { - // Return nil because Linux can add unresolvable symlinks to the - // watch list without problems, so maintain consistency with - // that. There will be no file events for broken symlinks. - // TODO: more specific check; returns os.PathError; ENOENT? - return "", nil - } - - w.mu.Lock() - _, alreadyWatching = w.watches[link] - w.mu.Unlock() - - if alreadyWatching { - // Add to watches so we don't get spurious Create events later - // on when we diff the directories. - w.watches[name] = 0 - w.fileExists[name] = struct{}{} - return link, nil - } - - name = link - fi, err = os.Lstat(name) - if err != nil { - return "", nil - } - } - - // Retry on EINTR; open() can return EINTR in practice on macOS. - // See #354, and Go issues 11180 and 39237. - for { - watchfd, err = unix.Open(name, openMode, 0) - if err == nil { - break - } - if errors.Is(err, unix.EINTR) { - continue - } - - return "", err - } - - isDir = fi.IsDir() - } - - err := w.register([]int{watchfd}, unix.EV_ADD|unix.EV_CLEAR|unix.EV_ENABLE, flags) - if err != nil { - unix.Close(watchfd) - return "", err - } - - if !alreadyWatching { - w.mu.Lock() - parentName := filepath.Dir(name) - w.watches[name] = watchfd - - watchesByDir, ok := w.watchesByDir[parentName] - if !ok { - watchesByDir = make(map[int]struct{}, 1) - w.watchesByDir[parentName] = watchesByDir - } - watchesByDir[watchfd] = struct{}{} - w.paths[watchfd] = pathInfo{name: name, isDir: isDir} - w.mu.Unlock() - } - - if isDir { - // Watch the directory if it has not been watched before, or if it was - // watched before, but perhaps only a NOTE_DELETE (watchDirectoryFiles) - w.mu.Lock() - - watchDir := (flags&unix.NOTE_WRITE) == unix.NOTE_WRITE && - (!alreadyWatching || (w.dirFlags[name]&unix.NOTE_WRITE) != unix.NOTE_WRITE) - // Store flags so this watch can be updated later - w.dirFlags[name] = flags - w.mu.Unlock() - - if watchDir { - if err := w.watchDirectoryFiles(name); err != nil { - return "", err - } - } - } - return name, nil -} - -// readEvents reads from kqueue and converts the received kevents into -// Event values that it sends down the Events channel. -func (w *Watcher) readEvents() { - defer func() { - close(w.Events) - close(w.Errors) - _ = unix.Close(w.kq) - unix.Close(w.closepipe[0]) - }() - - eventBuffer := make([]unix.Kevent_t, 10) - for closed := false; !closed; { - kevents, err := w.read(eventBuffer) - // EINTR is okay, the syscall was interrupted before timeout expired. - if err != nil && err != unix.EINTR { - if !w.sendError(fmt.Errorf("fsnotify.readEvents: %w", err)) { - closed = true - } - continue - } - - // Flush the events we received to the Events channel - for _, kevent := range kevents { - var ( - watchfd = int(kevent.Ident) - mask = uint32(kevent.Fflags) - ) - - // Shut down the loop when the pipe is closed, but only after all - // other events have been processed. - if watchfd == w.closepipe[0] { - closed = true - continue - } - - w.mu.Lock() - path := w.paths[watchfd] - w.mu.Unlock() - - event := w.newEvent(path.name, mask) - - if event.Has(Rename) || event.Has(Remove) { - w.remove(event.Name, false) - w.mu.Lock() - delete(w.fileExists, event.Name) - w.mu.Unlock() - } - - if path.isDir && event.Has(Write) && !event.Has(Remove) { - w.sendDirectoryChangeEvents(event.Name) - } else { - if !w.sendEvent(event) { - closed = true - continue - } - } - - if event.Has(Remove) { - // Look for a file that may have overwritten this; for example, - // mv f1 f2 will delete f2, then create f2. - if path.isDir { - fileDir := filepath.Clean(event.Name) - w.mu.Lock() - _, found := w.watches[fileDir] - w.mu.Unlock() - if found { - err := w.sendDirectoryChangeEvents(fileDir) - if err != nil { - if !w.sendError(err) { - closed = true - } - } - } - } else { - filePath := filepath.Clean(event.Name) - if fi, err := os.Lstat(filePath); err == nil { - err := w.sendFileCreatedEventIfNew(filePath, fi) - if err != nil { - if !w.sendError(err) { - closed = true - } - } - } - } - } - } - } -} - -// newEvent returns an platform-independent Event based on kqueue Fflags. -func (w *Watcher) newEvent(name string, mask uint32) Event { - e := Event{Name: name} - if mask&unix.NOTE_DELETE == unix.NOTE_DELETE { - e.Op |= Remove - } - if mask&unix.NOTE_WRITE == unix.NOTE_WRITE { - e.Op |= Write - } - if mask&unix.NOTE_RENAME == unix.NOTE_RENAME { - e.Op |= Rename - } - if mask&unix.NOTE_ATTRIB == unix.NOTE_ATTRIB { - e.Op |= Chmod - } - // No point sending a write and delete event at the same time: if it's gone, - // then it's gone. - if e.Op.Has(Write) && e.Op.Has(Remove) { - e.Op &^= Write - } - return e -} - -// watchDirectoryFiles to mimic inotify when adding a watch on a directory -func (w *Watcher) watchDirectoryFiles(dirPath string) error { - // Get all files - files, err := os.ReadDir(dirPath) - if err != nil { - return err - } - - for _, f := range files { - path := filepath.Join(dirPath, f.Name()) - - fi, err := f.Info() - if err != nil { - return fmt.Errorf("%q: %w", path, err) - } - - cleanPath, err := w.internalWatch(path, fi) - if err != nil { - // No permission to read the file; that's not a problem: just skip. - // But do add it to w.fileExists to prevent it from being picked up - // as a "new" file later (it still shows up in the directory - // listing). - switch { - case errors.Is(err, unix.EACCES) || errors.Is(err, unix.EPERM): - cleanPath = filepath.Clean(path) - default: - return fmt.Errorf("%q: %w", path, err) - } - } - - w.mu.Lock() - w.fileExists[cleanPath] = struct{}{} - w.mu.Unlock() - } - - return nil -} - -// Search the directory for new files and send an event for them. -// -// This functionality is to have the BSD watcher match the inotify, which sends -// a create event for files created in a watched directory. -func (w *Watcher) sendDirectoryChangeEvents(dir string) error { - files, err := os.ReadDir(dir) - if err != nil { - // Directory no longer exists: we can ignore this safely. kqueue will - // still give us the correct events. - if errors.Is(err, os.ErrNotExist) { - return nil - } - return fmt.Errorf("fsnotify.sendDirectoryChangeEvents: %w", err) - } - - for _, f := range files { - fi, err := f.Info() - if err != nil { - return fmt.Errorf("fsnotify.sendDirectoryChangeEvents: %w", err) - } - - err = w.sendFileCreatedEventIfNew(filepath.Join(dir, fi.Name()), fi) - if err != nil { - // Don't need to send an error if this file isn't readable. - if errors.Is(err, unix.EACCES) || errors.Is(err, unix.EPERM) { - return nil - } - return fmt.Errorf("fsnotify.sendDirectoryChangeEvents: %w", err) - } - } - return nil -} - -// sendFileCreatedEvent sends a create event if the file isn't already being tracked. -func (w *Watcher) sendFileCreatedEventIfNew(filePath string, fi os.FileInfo) (err error) { - w.mu.Lock() - _, doesExist := w.fileExists[filePath] - w.mu.Unlock() - if !doesExist { - if !w.sendEvent(Event{Name: filePath, Op: Create}) { - return - } - } - - // like watchDirectoryFiles (but without doing another ReadDir) - filePath, err = w.internalWatch(filePath, fi) - if err != nil { - return err - } - - w.mu.Lock() - w.fileExists[filePath] = struct{}{} - w.mu.Unlock() - - return nil -} - -func (w *Watcher) internalWatch(name string, fi os.FileInfo) (string, error) { - if fi.IsDir() { - // mimic Linux providing delete events for subdirectories, but preserve - // the flags used if currently watching subdirectory - w.mu.Lock() - flags := w.dirFlags[name] - w.mu.Unlock() - - flags |= unix.NOTE_DELETE | unix.NOTE_RENAME - return w.addWatch(name, flags) - } - - // watch file to mimic Linux inotify - return w.addWatch(name, noteAllEvents) -} - -// Register events with the queue. -func (w *Watcher) register(fds []int, flags int, fflags uint32) error { - changes := make([]unix.Kevent_t, len(fds)) - for i, fd := range fds { - // SetKevent converts int to the platform-specific types. - unix.SetKevent(&changes[i], fd, unix.EVFILT_VNODE, flags) - changes[i].Fflags = fflags - } - - // Register the events. - success, err := unix.Kevent(w.kq, changes, nil, nil) - if success == -1 { - return err - } - return nil -} - -// read retrieves pending events, or waits until an event occurs. -func (w *Watcher) read(events []unix.Kevent_t) ([]unix.Kevent_t, error) { - n, err := unix.Kevent(w.kq, nil, events, nil) - if err != nil { - return nil, err - } - return events[0:n], nil -} diff --git a/vendor/github.com/fsnotify/fsnotify/backend_other.go b/vendor/github.com/fsnotify/fsnotify/backend_other.go deleted file mode 100644 index d34a23c01..000000000 --- a/vendor/github.com/fsnotify/fsnotify/backend_other.go +++ /dev/null @@ -1,205 +0,0 @@ -//go:build appengine || (!darwin && !dragonfly && !freebsd && !openbsd && !linux && !netbsd && !solaris && !windows) -// +build appengine !darwin,!dragonfly,!freebsd,!openbsd,!linux,!netbsd,!solaris,!windows - -// Note: the documentation on the Watcher type and methods is generated from -// mkdoc.zsh - -package fsnotify - -import "errors" - -// Watcher watches a set of paths, delivering events on a channel. -// -// A watcher should not be copied (e.g. pass it by pointer, rather than by -// value). -// -// # Linux notes -// -// When a file is removed a Remove event won't be emitted until all file -// descriptors are closed, and deletes will always emit a Chmod. For example: -// -// fp := os.Open("file") -// os.Remove("file") // Triggers Chmod -// fp.Close() // Triggers Remove -// -// This is the event that inotify sends, so not much can be changed about this. -// -// The fs.inotify.max_user_watches sysctl variable specifies the upper limit -// for the number of watches per user, and fs.inotify.max_user_instances -// specifies the maximum number of inotify instances per user. Every Watcher you -// create is an "instance", and every path you add is a "watch". -// -// These are also exposed in /proc as /proc/sys/fs/inotify/max_user_watches and -// /proc/sys/fs/inotify/max_user_instances -// -// To increase them you can use sysctl or write the value to the /proc file: -// -// # Default values on Linux 5.18 -// sysctl fs.inotify.max_user_watches=124983 -// sysctl fs.inotify.max_user_instances=128 -// -// To make the changes persist on reboot edit /etc/sysctl.conf or -// /usr/lib/sysctl.d/50-default.conf (details differ per Linux distro; check -// your distro's documentation): -// -// fs.inotify.max_user_watches=124983 -// fs.inotify.max_user_instances=128 -// -// Reaching the limit will result in a "no space left on device" or "too many open -// files" error. -// -// # kqueue notes (macOS, BSD) -// -// kqueue requires opening a file descriptor for every file that's being watched; -// so if you're watching a directory with five files then that's six file -// descriptors. You will run in to your system's "max open files" limit faster on -// these platforms. -// -// The sysctl variables kern.maxfiles and kern.maxfilesperproc can be used to -// control the maximum number of open files, as well as /etc/login.conf on BSD -// systems. -// -// # Windows notes -// -// Paths can be added as "C:\path\to\dir", but forward slashes -// ("C:/path/to/dir") will also work. -// -// When a watched directory is removed it will always send an event for the -// directory itself, but may not send events for all files in that directory. -// Sometimes it will send events for all times, sometimes it will send no -// events, and often only for some files. -// -// The default ReadDirectoryChangesW() buffer size is 64K, which is the largest -// value that is guaranteed to work with SMB filesystems. If you have many -// events in quick succession this may not be enough, and you will have to use -// [WithBufferSize] to increase the value. -type Watcher struct { - // Events sends the filesystem change events. - // - // fsnotify can send the following events; a "path" here can refer to a - // file, directory, symbolic link, or special file like a FIFO. - // - // fsnotify.Create A new path was created; this may be followed by one - // or more Write events if data also gets written to a - // file. - // - // fsnotify.Remove A path was removed. - // - // fsnotify.Rename A path was renamed. A rename is always sent with the - // old path as Event.Name, and a Create event will be - // sent with the new name. Renames are only sent for - // paths that are currently watched; e.g. moving an - // unmonitored file into a monitored directory will - // show up as just a Create. Similarly, renaming a file - // to outside a monitored directory will show up as - // only a Rename. - // - // fsnotify.Write A file or named pipe was written to. A Truncate will - // also trigger a Write. A single "write action" - // initiated by the user may show up as one or multiple - // writes, depending on when the system syncs things to - // disk. For example when compiling a large Go program - // you may get hundreds of Write events, and you may - // want to wait until you've stopped receiving them - // (see the dedup example in cmd/fsnotify). - // - // Some systems may send Write event for directories - // when the directory content changes. - // - // fsnotify.Chmod Attributes were changed. On Linux this is also sent - // when a file is removed (or more accurately, when a - // link to an inode is removed). On kqueue it's sent - // when a file is truncated. On Windows it's never - // sent. - Events chan Event - - // Errors sends any errors. - // - // ErrEventOverflow is used to indicate there are too many events: - // - // - inotify: There are too many queued events (fs.inotify.max_queued_events sysctl) - // - windows: The buffer size is too small; WithBufferSize() can be used to increase it. - // - kqueue, fen: Not used. - Errors chan error -} - -// NewWatcher creates a new Watcher. -func NewWatcher() (*Watcher, error) { - return nil, errors.New("fsnotify not supported on the current platform") -} - -// NewBufferedWatcher creates a new Watcher with a buffered Watcher.Events -// channel. -// -// The main use case for this is situations with a very large number of events -// where the kernel buffer size can't be increased (e.g. due to lack of -// permissions). An unbuffered Watcher will perform better for almost all use -// cases, and whenever possible you will be better off increasing the kernel -// buffers instead of adding a large userspace buffer. -func NewBufferedWatcher(sz uint) (*Watcher, error) { return NewWatcher() } - -// Close removes all watches and closes the Events channel. -func (w *Watcher) Close() error { return nil } - -// WatchList returns all paths explicitly added with [Watcher.Add] (and are not -// yet removed). -// -// Returns nil if [Watcher.Close] was called. -func (w *Watcher) WatchList() []string { return nil } - -// Add starts monitoring the path for changes. -// -// A path can only be watched once; watching it more than once is a no-op and will -// not return an error. Paths that do not yet exist on the filesystem cannot be -// watched. -// -// A watch will be automatically removed if the watched path is deleted or -// renamed. The exception is the Windows backend, which doesn't remove the -// watcher on renames. -// -// Notifications on network filesystems (NFS, SMB, FUSE, etc.) or special -// filesystems (/proc, /sys, etc.) generally don't work. -// -// Returns [ErrClosed] if [Watcher.Close] was called. -// -// See [Watcher.AddWith] for a version that allows adding options. -// -// # Watching directories -// -// All files in a directory are monitored, including new files that are created -// after the watcher is started. Subdirectories are not watched (i.e. it's -// non-recursive). -// -// # Watching files -// -// Watching individual files (rather than directories) is generally not -// recommended as many programs (especially editors) update files atomically: it -// will write to a temporary file which is then moved to to destination, -// overwriting the original (or some variant thereof). The watcher on the -// original file is now lost, as that no longer exists. -// -// The upshot of this is that a power failure or crash won't leave a -// half-written file. -// -// Watch the parent directory and use Event.Name to filter out files you're not -// interested in. There is an example of this in cmd/fsnotify/file.go. -func (w *Watcher) Add(name string) error { return nil } - -// AddWith is like [Watcher.Add], but allows adding options. When using Add() -// the defaults described below are used. -// -// Possible options are: -// -// - [WithBufferSize] sets the buffer size for the Windows backend; no-op on -// other platforms. The default is 64K (65536 bytes). -func (w *Watcher) AddWith(name string, opts ...addOpt) error { return nil } - -// Remove stops monitoring the path for changes. -// -// Directories are always removed non-recursively. For example, if you added -// /tmp/dir and /tmp/dir/subdir then you will need to remove both. -// -// Removing a path that has not yet been added returns [ErrNonExistentWatch]. -// -// Returns nil if [Watcher.Close] was called. -func (w *Watcher) Remove(name string) error { return nil } diff --git a/vendor/github.com/fsnotify/fsnotify/backend_windows.go b/vendor/github.com/fsnotify/fsnotify/backend_windows.go deleted file mode 100644 index 9bc91e5d6..000000000 --- a/vendor/github.com/fsnotify/fsnotify/backend_windows.go +++ /dev/null @@ -1,827 +0,0 @@ -//go:build windows -// +build windows - -// Windows backend based on ReadDirectoryChangesW() -// -// https://learn.microsoft.com/en-us/windows/win32/api/winbase/nf-winbase-readdirectorychangesw -// -// Note: the documentation on the Watcher type and methods is generated from -// mkdoc.zsh - -package fsnotify - -import ( - "errors" - "fmt" - "os" - "path/filepath" - "reflect" - "runtime" - "strings" - "sync" - "unsafe" - - "golang.org/x/sys/windows" -) - -// Watcher watches a set of paths, delivering events on a channel. -// -// A watcher should not be copied (e.g. pass it by pointer, rather than by -// value). -// -// # Linux notes -// -// When a file is removed a Remove event won't be emitted until all file -// descriptors are closed, and deletes will always emit a Chmod. For example: -// -// fp := os.Open("file") -// os.Remove("file") // Triggers Chmod -// fp.Close() // Triggers Remove -// -// This is the event that inotify sends, so not much can be changed about this. -// -// The fs.inotify.max_user_watches sysctl variable specifies the upper limit -// for the number of watches per user, and fs.inotify.max_user_instances -// specifies the maximum number of inotify instances per user. Every Watcher you -// create is an "instance", and every path you add is a "watch". -// -// These are also exposed in /proc as /proc/sys/fs/inotify/max_user_watches and -// /proc/sys/fs/inotify/max_user_instances -// -// To increase them you can use sysctl or write the value to the /proc file: -// -// # Default values on Linux 5.18 -// sysctl fs.inotify.max_user_watches=124983 -// sysctl fs.inotify.max_user_instances=128 -// -// To make the changes persist on reboot edit /etc/sysctl.conf or -// /usr/lib/sysctl.d/50-default.conf (details differ per Linux distro; check -// your distro's documentation): -// -// fs.inotify.max_user_watches=124983 -// fs.inotify.max_user_instances=128 -// -// Reaching the limit will result in a "no space left on device" or "too many open -// files" error. -// -// # kqueue notes (macOS, BSD) -// -// kqueue requires opening a file descriptor for every file that's being watched; -// so if you're watching a directory with five files then that's six file -// descriptors. You will run in to your system's "max open files" limit faster on -// these platforms. -// -// The sysctl variables kern.maxfiles and kern.maxfilesperproc can be used to -// control the maximum number of open files, as well as /etc/login.conf on BSD -// systems. -// -// # Windows notes -// -// Paths can be added as "C:\path\to\dir", but forward slashes -// ("C:/path/to/dir") will also work. -// -// When a watched directory is removed it will always send an event for the -// directory itself, but may not send events for all files in that directory. -// Sometimes it will send events for all times, sometimes it will send no -// events, and often only for some files. -// -// The default ReadDirectoryChangesW() buffer size is 64K, which is the largest -// value that is guaranteed to work with SMB filesystems. If you have many -// events in quick succession this may not be enough, and you will have to use -// [WithBufferSize] to increase the value. -type Watcher struct { - // Events sends the filesystem change events. - // - // fsnotify can send the following events; a "path" here can refer to a - // file, directory, symbolic link, or special file like a FIFO. - // - // fsnotify.Create A new path was created; this may be followed by one - // or more Write events if data also gets written to a - // file. - // - // fsnotify.Remove A path was removed. - // - // fsnotify.Rename A path was renamed. A rename is always sent with the - // old path as Event.Name, and a Create event will be - // sent with the new name. Renames are only sent for - // paths that are currently watched; e.g. moving an - // unmonitored file into a monitored directory will - // show up as just a Create. Similarly, renaming a file - // to outside a monitored directory will show up as - // only a Rename. - // - // fsnotify.Write A file or named pipe was written to. A Truncate will - // also trigger a Write. A single "write action" - // initiated by the user may show up as one or multiple - // writes, depending on when the system syncs things to - // disk. For example when compiling a large Go program - // you may get hundreds of Write events, and you may - // want to wait until you've stopped receiving them - // (see the dedup example in cmd/fsnotify). - // - // Some systems may send Write event for directories - // when the directory content changes. - // - // fsnotify.Chmod Attributes were changed. On Linux this is also sent - // when a file is removed (or more accurately, when a - // link to an inode is removed). On kqueue it's sent - // when a file is truncated. On Windows it's never - // sent. - Events chan Event - - // Errors sends any errors. - // - // ErrEventOverflow is used to indicate there are too many events: - // - // - inotify: There are too many queued events (fs.inotify.max_queued_events sysctl) - // - windows: The buffer size is too small; WithBufferSize() can be used to increase it. - // - kqueue, fen: Not used. - Errors chan error - - port windows.Handle // Handle to completion port - input chan *input // Inputs to the reader are sent on this channel - quit chan chan<- error - - mu sync.Mutex // Protects access to watches, closed - watches watchMap // Map of watches (key: i-number) - closed bool // Set to true when Close() is first called -} - -// NewWatcher creates a new Watcher. -func NewWatcher() (*Watcher, error) { - return NewBufferedWatcher(50) -} - -// NewBufferedWatcher creates a new Watcher with a buffered Watcher.Events -// channel. -// -// The main use case for this is situations with a very large number of events -// where the kernel buffer size can't be increased (e.g. due to lack of -// permissions). An unbuffered Watcher will perform better for almost all use -// cases, and whenever possible you will be better off increasing the kernel -// buffers instead of adding a large userspace buffer. -func NewBufferedWatcher(sz uint) (*Watcher, error) { - port, err := windows.CreateIoCompletionPort(windows.InvalidHandle, 0, 0, 0) - if err != nil { - return nil, os.NewSyscallError("CreateIoCompletionPort", err) - } - w := &Watcher{ - port: port, - watches: make(watchMap), - input: make(chan *input, 1), - Events: make(chan Event, sz), - Errors: make(chan error), - quit: make(chan chan<- error, 1), - } - go w.readEvents() - return w, nil -} - -func (w *Watcher) isClosed() bool { - w.mu.Lock() - defer w.mu.Unlock() - return w.closed -} - -func (w *Watcher) sendEvent(name string, mask uint64) bool { - if mask == 0 { - return false - } - - event := w.newEvent(name, uint32(mask)) - select { - case ch := <-w.quit: - w.quit <- ch - case w.Events <- event: - } - return true -} - -// Returns true if the error was sent, or false if watcher is closed. -func (w *Watcher) sendError(err error) bool { - select { - case w.Errors <- err: - return true - case <-w.quit: - } - return false -} - -// Close removes all watches and closes the Events channel. -func (w *Watcher) Close() error { - if w.isClosed() { - return nil - } - - w.mu.Lock() - w.closed = true - w.mu.Unlock() - - // Send "quit" message to the reader goroutine - ch := make(chan error) - w.quit <- ch - if err := w.wakeupReader(); err != nil { - return err - } - return <-ch -} - -// Add starts monitoring the path for changes. -// -// A path can only be watched once; watching it more than once is a no-op and will -// not return an error. Paths that do not yet exist on the filesystem cannot be -// watched. -// -// A watch will be automatically removed if the watched path is deleted or -// renamed. The exception is the Windows backend, which doesn't remove the -// watcher on renames. -// -// Notifications on network filesystems (NFS, SMB, FUSE, etc.) or special -// filesystems (/proc, /sys, etc.) generally don't work. -// -// Returns [ErrClosed] if [Watcher.Close] was called. -// -// See [Watcher.AddWith] for a version that allows adding options. -// -// # Watching directories -// -// All files in a directory are monitored, including new files that are created -// after the watcher is started. Subdirectories are not watched (i.e. it's -// non-recursive). -// -// # Watching files -// -// Watching individual files (rather than directories) is generally not -// recommended as many programs (especially editors) update files atomically: it -// will write to a temporary file which is then moved to to destination, -// overwriting the original (or some variant thereof). The watcher on the -// original file is now lost, as that no longer exists. -// -// The upshot of this is that a power failure or crash won't leave a -// half-written file. -// -// Watch the parent directory and use Event.Name to filter out files you're not -// interested in. There is an example of this in cmd/fsnotify/file.go. -func (w *Watcher) Add(name string) error { return w.AddWith(name) } - -// AddWith is like [Watcher.Add], but allows adding options. When using Add() -// the defaults described below are used. -// -// Possible options are: -// -// - [WithBufferSize] sets the buffer size for the Windows backend; no-op on -// other platforms. The default is 64K (65536 bytes). -func (w *Watcher) AddWith(name string, opts ...addOpt) error { - if w.isClosed() { - return ErrClosed - } - - with := getOptions(opts...) - if with.bufsize < 4096 { - return fmt.Errorf("fsnotify.WithBufferSize: buffer size cannot be smaller than 4096 bytes") - } - - in := &input{ - op: opAddWatch, - path: filepath.Clean(name), - flags: sysFSALLEVENTS, - reply: make(chan error), - bufsize: with.bufsize, - } - w.input <- in - if err := w.wakeupReader(); err != nil { - return err - } - return <-in.reply -} - -// Remove stops monitoring the path for changes. -// -// Directories are always removed non-recursively. For example, if you added -// /tmp/dir and /tmp/dir/subdir then you will need to remove both. -// -// Removing a path that has not yet been added returns [ErrNonExistentWatch]. -// -// Returns nil if [Watcher.Close] was called. -func (w *Watcher) Remove(name string) error { - if w.isClosed() { - return nil - } - - in := &input{ - op: opRemoveWatch, - path: filepath.Clean(name), - reply: make(chan error), - } - w.input <- in - if err := w.wakeupReader(); err != nil { - return err - } - return <-in.reply -} - -// WatchList returns all paths explicitly added with [Watcher.Add] (and are not -// yet removed). -// -// Returns nil if [Watcher.Close] was called. -func (w *Watcher) WatchList() []string { - if w.isClosed() { - return nil - } - - w.mu.Lock() - defer w.mu.Unlock() - - entries := make([]string, 0, len(w.watches)) - for _, entry := range w.watches { - for _, watchEntry := range entry { - entries = append(entries, watchEntry.path) - } - } - - return entries -} - -// These options are from the old golang.org/x/exp/winfsnotify, where you could -// add various options to the watch. This has long since been removed. -// -// The "sys" in the name is misleading as they're not part of any "system". -// -// This should all be removed at some point, and just use windows.FILE_NOTIFY_* -const ( - sysFSALLEVENTS = 0xfff - sysFSCREATE = 0x100 - sysFSDELETE = 0x200 - sysFSDELETESELF = 0x400 - sysFSMODIFY = 0x2 - sysFSMOVE = 0xc0 - sysFSMOVEDFROM = 0x40 - sysFSMOVEDTO = 0x80 - sysFSMOVESELF = 0x800 - sysFSIGNORED = 0x8000 -) - -func (w *Watcher) newEvent(name string, mask uint32) Event { - e := Event{Name: name} - if mask&sysFSCREATE == sysFSCREATE || mask&sysFSMOVEDTO == sysFSMOVEDTO { - e.Op |= Create - } - if mask&sysFSDELETE == sysFSDELETE || mask&sysFSDELETESELF == sysFSDELETESELF { - e.Op |= Remove - } - if mask&sysFSMODIFY == sysFSMODIFY { - e.Op |= Write - } - if mask&sysFSMOVE == sysFSMOVE || mask&sysFSMOVESELF == sysFSMOVESELF || mask&sysFSMOVEDFROM == sysFSMOVEDFROM { - e.Op |= Rename - } - return e -} - -const ( - opAddWatch = iota - opRemoveWatch -) - -const ( - provisional uint64 = 1 << (32 + iota) -) - -type input struct { - op int - path string - flags uint32 - bufsize int - reply chan error -} - -type inode struct { - handle windows.Handle - volume uint32 - index uint64 -} - -type watch struct { - ov windows.Overlapped - ino *inode // i-number - recurse bool // Recursive watch? - path string // Directory path - mask uint64 // Directory itself is being watched with these notify flags - names map[string]uint64 // Map of names being watched and their notify flags - rename string // Remembers the old name while renaming a file - buf []byte // buffer, allocated later -} - -type ( - indexMap map[uint64]*watch - watchMap map[uint32]indexMap -) - -func (w *Watcher) wakeupReader() error { - err := windows.PostQueuedCompletionStatus(w.port, 0, 0, nil) - if err != nil { - return os.NewSyscallError("PostQueuedCompletionStatus", err) - } - return nil -} - -func (w *Watcher) getDir(pathname string) (dir string, err error) { - attr, err := windows.GetFileAttributes(windows.StringToUTF16Ptr(pathname)) - if err != nil { - return "", os.NewSyscallError("GetFileAttributes", err) - } - if attr&windows.FILE_ATTRIBUTE_DIRECTORY != 0 { - dir = pathname - } else { - dir, _ = filepath.Split(pathname) - dir = filepath.Clean(dir) - } - return -} - -func (w *Watcher) getIno(path string) (ino *inode, err error) { - h, err := windows.CreateFile(windows.StringToUTF16Ptr(path), - windows.FILE_LIST_DIRECTORY, - windows.FILE_SHARE_READ|windows.FILE_SHARE_WRITE|windows.FILE_SHARE_DELETE, - nil, windows.OPEN_EXISTING, - windows.FILE_FLAG_BACKUP_SEMANTICS|windows.FILE_FLAG_OVERLAPPED, 0) - if err != nil { - return nil, os.NewSyscallError("CreateFile", err) - } - - var fi windows.ByHandleFileInformation - err = windows.GetFileInformationByHandle(h, &fi) - if err != nil { - windows.CloseHandle(h) - return nil, os.NewSyscallError("GetFileInformationByHandle", err) - } - ino = &inode{ - handle: h, - volume: fi.VolumeSerialNumber, - index: uint64(fi.FileIndexHigh)<<32 | uint64(fi.FileIndexLow), - } - return ino, nil -} - -// Must run within the I/O thread. -func (m watchMap) get(ino *inode) *watch { - if i := m[ino.volume]; i != nil { - return i[ino.index] - } - return nil -} - -// Must run within the I/O thread. -func (m watchMap) set(ino *inode, watch *watch) { - i := m[ino.volume] - if i == nil { - i = make(indexMap) - m[ino.volume] = i - } - i[ino.index] = watch -} - -// Must run within the I/O thread. -func (w *Watcher) addWatch(pathname string, flags uint64, bufsize int) error { - //pathname, recurse := recursivePath(pathname) - recurse := false - - dir, err := w.getDir(pathname) - if err != nil { - return err - } - - ino, err := w.getIno(dir) - if err != nil { - return err - } - w.mu.Lock() - watchEntry := w.watches.get(ino) - w.mu.Unlock() - if watchEntry == nil { - _, err := windows.CreateIoCompletionPort(ino.handle, w.port, 0, 0) - if err != nil { - windows.CloseHandle(ino.handle) - return os.NewSyscallError("CreateIoCompletionPort", err) - } - watchEntry = &watch{ - ino: ino, - path: dir, - names: make(map[string]uint64), - recurse: recurse, - buf: make([]byte, bufsize), - } - w.mu.Lock() - w.watches.set(ino, watchEntry) - w.mu.Unlock() - flags |= provisional - } else { - windows.CloseHandle(ino.handle) - } - if pathname == dir { - watchEntry.mask |= flags - } else { - watchEntry.names[filepath.Base(pathname)] |= flags - } - - err = w.startRead(watchEntry) - if err != nil { - return err - } - - if pathname == dir { - watchEntry.mask &= ^provisional - } else { - watchEntry.names[filepath.Base(pathname)] &= ^provisional - } - return nil -} - -// Must run within the I/O thread. -func (w *Watcher) remWatch(pathname string) error { - pathname, recurse := recursivePath(pathname) - - dir, err := w.getDir(pathname) - if err != nil { - return err - } - ino, err := w.getIno(dir) - if err != nil { - return err - } - - w.mu.Lock() - watch := w.watches.get(ino) - w.mu.Unlock() - - if recurse && !watch.recurse { - return fmt.Errorf("can't use \\... with non-recursive watch %q", pathname) - } - - err = windows.CloseHandle(ino.handle) - if err != nil { - w.sendError(os.NewSyscallError("CloseHandle", err)) - } - if watch == nil { - return fmt.Errorf("%w: %s", ErrNonExistentWatch, pathname) - } - if pathname == dir { - w.sendEvent(watch.path, watch.mask&sysFSIGNORED) - watch.mask = 0 - } else { - name := filepath.Base(pathname) - w.sendEvent(filepath.Join(watch.path, name), watch.names[name]&sysFSIGNORED) - delete(watch.names, name) - } - - return w.startRead(watch) -} - -// Must run within the I/O thread. -func (w *Watcher) deleteWatch(watch *watch) { - for name, mask := range watch.names { - if mask&provisional == 0 { - w.sendEvent(filepath.Join(watch.path, name), mask&sysFSIGNORED) - } - delete(watch.names, name) - } - if watch.mask != 0 { - if watch.mask&provisional == 0 { - w.sendEvent(watch.path, watch.mask&sysFSIGNORED) - } - watch.mask = 0 - } -} - -// Must run within the I/O thread. -func (w *Watcher) startRead(watch *watch) error { - err := windows.CancelIo(watch.ino.handle) - if err != nil { - w.sendError(os.NewSyscallError("CancelIo", err)) - w.deleteWatch(watch) - } - mask := w.toWindowsFlags(watch.mask) - for _, m := range watch.names { - mask |= w.toWindowsFlags(m) - } - if mask == 0 { - err := windows.CloseHandle(watch.ino.handle) - if err != nil { - w.sendError(os.NewSyscallError("CloseHandle", err)) - } - w.mu.Lock() - delete(w.watches[watch.ino.volume], watch.ino.index) - w.mu.Unlock() - return nil - } - - // We need to pass the array, rather than the slice. - hdr := (*reflect.SliceHeader)(unsafe.Pointer(&watch.buf)) - rdErr := windows.ReadDirectoryChanges(watch.ino.handle, - (*byte)(unsafe.Pointer(hdr.Data)), uint32(hdr.Len), - watch.recurse, mask, nil, &watch.ov, 0) - if rdErr != nil { - err := os.NewSyscallError("ReadDirectoryChanges", rdErr) - if rdErr == windows.ERROR_ACCESS_DENIED && watch.mask&provisional == 0 { - // Watched directory was probably removed - w.sendEvent(watch.path, watch.mask&sysFSDELETESELF) - err = nil - } - w.deleteWatch(watch) - w.startRead(watch) - return err - } - return nil -} - -// readEvents reads from the I/O completion port, converts the -// received events into Event objects and sends them via the Events channel. -// Entry point to the I/O thread. -func (w *Watcher) readEvents() { - var ( - n uint32 - key uintptr - ov *windows.Overlapped - ) - runtime.LockOSThread() - - for { - // This error is handled after the watch == nil check below. - qErr := windows.GetQueuedCompletionStatus(w.port, &n, &key, &ov, windows.INFINITE) - - watch := (*watch)(unsafe.Pointer(ov)) - if watch == nil { - select { - case ch := <-w.quit: - w.mu.Lock() - var indexes []indexMap - for _, index := range w.watches { - indexes = append(indexes, index) - } - w.mu.Unlock() - for _, index := range indexes { - for _, watch := range index { - w.deleteWatch(watch) - w.startRead(watch) - } - } - - err := windows.CloseHandle(w.port) - if err != nil { - err = os.NewSyscallError("CloseHandle", err) - } - close(w.Events) - close(w.Errors) - ch <- err - return - case in := <-w.input: - switch in.op { - case opAddWatch: - in.reply <- w.addWatch(in.path, uint64(in.flags), in.bufsize) - case opRemoveWatch: - in.reply <- w.remWatch(in.path) - } - default: - } - continue - } - - switch qErr { - case nil: - // No error - case windows.ERROR_MORE_DATA: - if watch == nil { - w.sendError(errors.New("ERROR_MORE_DATA has unexpectedly null lpOverlapped buffer")) - } else { - // The i/o succeeded but the buffer is full. - // In theory we should be building up a full packet. - // In practice we can get away with just carrying on. - n = uint32(unsafe.Sizeof(watch.buf)) - } - case windows.ERROR_ACCESS_DENIED: - // Watched directory was probably removed - w.sendEvent(watch.path, watch.mask&sysFSDELETESELF) - w.deleteWatch(watch) - w.startRead(watch) - continue - case windows.ERROR_OPERATION_ABORTED: - // CancelIo was called on this handle - continue - default: - w.sendError(os.NewSyscallError("GetQueuedCompletionPort", qErr)) - continue - } - - var offset uint32 - for { - if n == 0 { - w.sendError(ErrEventOverflow) - break - } - - // Point "raw" to the event in the buffer - raw := (*windows.FileNotifyInformation)(unsafe.Pointer(&watch.buf[offset])) - - // Create a buf that is the size of the path name - size := int(raw.FileNameLength / 2) - var buf []uint16 - // TODO: Use unsafe.Slice in Go 1.17; https://stackoverflow.com/questions/51187973 - sh := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) - sh.Data = uintptr(unsafe.Pointer(&raw.FileName)) - sh.Len = size - sh.Cap = size - name := windows.UTF16ToString(buf) - fullname := filepath.Join(watch.path, name) - - var mask uint64 - switch raw.Action { - case windows.FILE_ACTION_REMOVED: - mask = sysFSDELETESELF - case windows.FILE_ACTION_MODIFIED: - mask = sysFSMODIFY - case windows.FILE_ACTION_RENAMED_OLD_NAME: - watch.rename = name - case windows.FILE_ACTION_RENAMED_NEW_NAME: - // Update saved path of all sub-watches. - old := filepath.Join(watch.path, watch.rename) - w.mu.Lock() - for _, watchMap := range w.watches { - for _, ww := range watchMap { - if strings.HasPrefix(ww.path, old) { - ww.path = filepath.Join(fullname, strings.TrimPrefix(ww.path, old)) - } - } - } - w.mu.Unlock() - - if watch.names[watch.rename] != 0 { - watch.names[name] |= watch.names[watch.rename] - delete(watch.names, watch.rename) - mask = sysFSMOVESELF - } - } - - sendNameEvent := func() { - w.sendEvent(fullname, watch.names[name]&mask) - } - if raw.Action != windows.FILE_ACTION_RENAMED_NEW_NAME { - sendNameEvent() - } - if raw.Action == windows.FILE_ACTION_REMOVED { - w.sendEvent(fullname, watch.names[name]&sysFSIGNORED) - delete(watch.names, name) - } - - w.sendEvent(fullname, watch.mask&w.toFSnotifyFlags(raw.Action)) - if raw.Action == windows.FILE_ACTION_RENAMED_NEW_NAME { - fullname = filepath.Join(watch.path, watch.rename) - sendNameEvent() - } - - // Move to the next event in the buffer - if raw.NextEntryOffset == 0 { - break - } - offset += raw.NextEntryOffset - - // Error! - if offset >= n { - //lint:ignore ST1005 Windows should be capitalized - w.sendError(errors.New( - "Windows system assumed buffer larger than it is, events have likely been missed")) - break - } - } - - if err := w.startRead(watch); err != nil { - w.sendError(err) - } - } -} - -func (w *Watcher) toWindowsFlags(mask uint64) uint32 { - var m uint32 - if mask&sysFSMODIFY != 0 { - m |= windows.FILE_NOTIFY_CHANGE_LAST_WRITE - } - if mask&(sysFSMOVE|sysFSCREATE|sysFSDELETE) != 0 { - m |= windows.FILE_NOTIFY_CHANGE_FILE_NAME | windows.FILE_NOTIFY_CHANGE_DIR_NAME - } - return m -} - -func (w *Watcher) toFSnotifyFlags(action uint32) uint64 { - switch action { - case windows.FILE_ACTION_ADDED: - return sysFSCREATE - case windows.FILE_ACTION_REMOVED: - return sysFSDELETE - case windows.FILE_ACTION_MODIFIED: - return sysFSMODIFY - case windows.FILE_ACTION_RENAMED_OLD_NAME: - return sysFSMOVEDFROM - case windows.FILE_ACTION_RENAMED_NEW_NAME: - return sysFSMOVEDTO - } - return 0 -} diff --git a/vendor/github.com/fsnotify/fsnotify/fsnotify.go b/vendor/github.com/fsnotify/fsnotify/fsnotify.go deleted file mode 100644 index 24c99cc49..000000000 --- a/vendor/github.com/fsnotify/fsnotify/fsnotify.go +++ /dev/null @@ -1,146 +0,0 @@ -// Package fsnotify provides a cross-platform interface for file system -// notifications. -// -// Currently supported systems: -// -// Linux 2.6.32+ via inotify -// BSD, macOS via kqueue -// Windows via ReadDirectoryChangesW -// illumos via FEN -package fsnotify - -import ( - "errors" - "fmt" - "path/filepath" - "strings" -) - -// Event represents a file system notification. -type Event struct { - // Path to the file or directory. - // - // Paths are relative to the input; for example with Add("dir") the Name - // will be set to "dir/file" if you create that file, but if you use - // Add("/path/to/dir") it will be "/path/to/dir/file". - Name string - - // File operation that triggered the event. - // - // This is a bitmask and some systems may send multiple operations at once. - // Use the Event.Has() method instead of comparing with ==. - Op Op -} - -// Op describes a set of file operations. -type Op uint32 - -// The operations fsnotify can trigger; see the documentation on [Watcher] for a -// full description, and check them with [Event.Has]. -const ( - // A new pathname was created. - Create Op = 1 << iota - - // The pathname was written to; this does *not* mean the write has finished, - // and a write can be followed by more writes. - Write - - // The path was removed; any watches on it will be removed. Some "remove" - // operations may trigger a Rename if the file is actually moved (for - // example "remove to trash" is often a rename). - Remove - - // The path was renamed to something else; any watched on it will be - // removed. - Rename - - // File attributes were changed. - // - // It's generally not recommended to take action on this event, as it may - // get triggered very frequently by some software. For example, Spotlight - // indexing on macOS, anti-virus software, backup software, etc. - Chmod -) - -// Common errors that can be reported. -var ( - ErrNonExistentWatch = errors.New("fsnotify: can't remove non-existent watch") - ErrEventOverflow = errors.New("fsnotify: queue or buffer overflow") - ErrClosed = errors.New("fsnotify: watcher already closed") -) - -func (o Op) String() string { - var b strings.Builder - if o.Has(Create) { - b.WriteString("|CREATE") - } - if o.Has(Remove) { - b.WriteString("|REMOVE") - } - if o.Has(Write) { - b.WriteString("|WRITE") - } - if o.Has(Rename) { - b.WriteString("|RENAME") - } - if o.Has(Chmod) { - b.WriteString("|CHMOD") - } - if b.Len() == 0 { - return "[no events]" - } - return b.String()[1:] -} - -// Has reports if this operation has the given operation. -func (o Op) Has(h Op) bool { return o&h != 0 } - -// Has reports if this event has the given operation. -func (e Event) Has(op Op) bool { return e.Op.Has(op) } - -// String returns a string representation of the event with their path. -func (e Event) String() string { - return fmt.Sprintf("%-13s %q", e.Op.String(), e.Name) -} - -type ( - addOpt func(opt *withOpts) - withOpts struct { - bufsize int - } -) - -var defaultOpts = withOpts{ - bufsize: 65536, // 64K -} - -func getOptions(opts ...addOpt) withOpts { - with := defaultOpts - for _, o := range opts { - o(&with) - } - return with -} - -// WithBufferSize sets the [ReadDirectoryChangesW] buffer size. -// -// This only has effect on Windows systems, and is a no-op for other backends. -// -// The default value is 64K (65536 bytes) which is the highest value that works -// on all filesystems and should be enough for most applications, but if you -// have a large burst of events it may not be enough. You can increase it if -// you're hitting "queue or buffer overflow" errors ([ErrEventOverflow]). -// -// [ReadDirectoryChangesW]: https://learn.microsoft.com/en-gb/windows/win32/api/winbase/nf-winbase-readdirectorychangesw -func WithBufferSize(bytes int) addOpt { - return func(opt *withOpts) { opt.bufsize = bytes } -} - -// Check if this path is recursive (ends with "/..." or "\..."), and return the -// path with the /... stripped. -func recursivePath(path string) (string, bool) { - if filepath.Base(path) == "..." { - return filepath.Dir(path), true - } - return path, false -} diff --git a/vendor/github.com/fsnotify/fsnotify/mkdoc.zsh b/vendor/github.com/fsnotify/fsnotify/mkdoc.zsh deleted file mode 100644 index 99012ae65..000000000 --- a/vendor/github.com/fsnotify/fsnotify/mkdoc.zsh +++ /dev/null @@ -1,259 +0,0 @@ -#!/usr/bin/env zsh -[ "${ZSH_VERSION:-}" = "" ] && echo >&2 "Only works with zsh" && exit 1 -setopt err_exit no_unset pipefail extended_glob - -# Simple script to update the godoc comments on all watchers so you don't need -# to update the same comment 5 times. - -watcher=$(</tmp/x - print -r -- $cmt >>/tmp/x - tail -n+$(( end + 1 )) $file >>/tmp/x - mv /tmp/x $file - done -} - -set-cmt '^type Watcher struct ' $watcher -set-cmt '^func NewWatcher(' $new -set-cmt '^func NewBufferedWatcher(' $newbuffered -set-cmt '^func (w \*Watcher) Add(' $add -set-cmt '^func (w \*Watcher) AddWith(' $addwith -set-cmt '^func (w \*Watcher) Remove(' $remove -set-cmt '^func (w \*Watcher) Close(' $close -set-cmt '^func (w \*Watcher) WatchList(' $watchlist -set-cmt '^[[:space:]]*Events *chan Event$' $events -set-cmt '^[[:space:]]*Errors *chan error$' $errors diff --git a/vendor/github.com/fsnotify/fsnotify/system_bsd.go b/vendor/github.com/fsnotify/fsnotify/system_bsd.go deleted file mode 100644 index 4322b0b88..000000000 --- a/vendor/github.com/fsnotify/fsnotify/system_bsd.go +++ /dev/null @@ -1,8 +0,0 @@ -//go:build freebsd || openbsd || netbsd || dragonfly -// +build freebsd openbsd netbsd dragonfly - -package fsnotify - -import "golang.org/x/sys/unix" - -const openMode = unix.O_NONBLOCK | unix.O_RDONLY | unix.O_CLOEXEC diff --git a/vendor/github.com/fsnotify/fsnotify/system_darwin.go b/vendor/github.com/fsnotify/fsnotify/system_darwin.go deleted file mode 100644 index 5da5ffa78..000000000 --- a/vendor/github.com/fsnotify/fsnotify/system_darwin.go +++ /dev/null @@ -1,9 +0,0 @@ -//go:build darwin -// +build darwin - -package fsnotify - -import "golang.org/x/sys/unix" - -// note: this constant is not defined on BSD -const openMode = unix.O_EVTONLY | unix.O_CLOEXEC diff --git a/vendor/github.com/fzipp/gocyclo/CHANGELOG.md b/vendor/github.com/fzipp/gocyclo/CHANGELOG.md deleted file mode 100644 index c9bedfb3b..000000000 --- a/vendor/github.com/fzipp/gocyclo/CHANGELOG.md +++ /dev/null @@ -1,58 +0,0 @@ -# Changelog -All notable changes to this project will be documented in this file. - -The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), -and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). - -## [0.6.0] - 2022-06-15 -### Changed -- Breaking: remove meaningless `-total` and `-total-short` options - -## [0.5.1] - 2022-04-06 -### Fixed -- Don't skip directories `.` and `..` - -## [0.5.0] - 2022-03-22 -### Changed -- Ignore `vendor` and `testdata` directories and directories with names - that begin with `.` or `_` - -## [0.4.0] - 2021-12-19 -### Added -- Support method receivers with type parameters introduced in Go 1.18 - -### Changed -- Use more efficient filepath.WalkDir instead of filepath.Walk - -## [0.3.1] - 2020-10-20 -### Added -- Test coverage - -### Fixed -- Fix cyclomatic complexity for function literals (base complexity of 1 was missing) - -## [0.3.0] - 2020-10-17 -### Added -- New `-avg-short` and `-total-short` options for printing average and total cyclomatic complexities without label -- Export the `AnalyzeASTFile` function in package API -- Doc comments for exported functions and types - -### Fixed -- Ignore `default` cases - -## [0.2.0] - 2020-10-17 -### Added -- Support for gocyclo as a package -- Support for ignoring of individual functions via a new `gocyclo:ignore` directive -- New `-total` option to compute total cyclomatic complexity -- New `-ignore` option to ignore files matching a regular expression -- Analysis of function literals at declaration level - -### Changed -- Breaking: installation changed to `go get github.com/fzipp/gocyclo/cmd/gocyclo` - -## [0.1.0] - 2020-10-17 - -### Added -- `go.mod` file; beginning of versioning - diff --git a/vendor/github.com/fzipp/gocyclo/CONTRIBUTORS b/vendor/github.com/fzipp/gocyclo/CONTRIBUTORS deleted file mode 100644 index 1c09f1a06..000000000 --- a/vendor/github.com/fzipp/gocyclo/CONTRIBUTORS +++ /dev/null @@ -1,7 +0,0 @@ -# Names should be added to this file like so: -# Name - -# Please keep the list sorted. - -Frederik Zipp -Harshavardhana diff --git a/vendor/github.com/fzipp/gocyclo/LICENSE b/vendor/github.com/fzipp/gocyclo/LICENSE deleted file mode 100644 index 45f88d6cb..000000000 --- a/vendor/github.com/fzipp/gocyclo/LICENSE +++ /dev/null @@ -1,27 +0,0 @@ -Copyright (c) 2013 Frederik Zipp. All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above -copyright notice, this list of conditions and the following disclaimer -in the documentation and/or other materials provided with the -distribution. - * Neither the name of the copyright owner nor the names of its -contributors may be used to endorse or promote products derived from -this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/fzipp/gocyclo/README.md b/vendor/github.com/fzipp/gocyclo/README.md deleted file mode 100644 index d357b8ef7..000000000 --- a/vendor/github.com/fzipp/gocyclo/README.md +++ /dev/null @@ -1,106 +0,0 @@ -# gocyclo - -[![PkgGoDev](https://pkg.go.dev/badge/github.com/fzipp/gocyclo)](https://pkg.go.dev/github.com/fzipp/gocyclo) -![Build Status](https://github.com/fzipp/gocyclo/workflows/build/badge.svg) -[![Go Report Card](https://goreportcard.com/badge/github.com/fzipp/gocyclo)](https://goreportcard.com/report/github.com/fzipp/gocyclo) - -Gocyclo calculates -[cyclomatic complexities](https://en.wikipedia.org/wiki/Cyclomatic_complexity) -of functions in Go source code. - -Cyclomatic complexity is a -[code quality metric](https://en.wikipedia.org/wiki/Software_metric) -which can be used to identify code that needs refactoring. -It measures the number of linearly independent paths through a function's -source code. - -The cyclomatic complexity of a function is calculated according to the -following rules: - -``` - 1 is the base complexity of a function -+1 for each 'if', 'for', 'case', '&&' or '||' -``` - -A function with a higher cyclomatic complexity requires more test cases to -cover all possible paths and is potentially harder to understand. The -complexity can be reduced by applying common refactoring techniques that lead -to smaller functions. - -## Installation - -To install the `gocyclo` command, run - -``` -$ go install github.com/fzipp/gocyclo/cmd/gocyclo@latest -``` - -and put the resulting binary in one of your PATH directories if -`$GOPATH/bin` isn't already in your PATH. - -## Usage - -``` -Calculate cyclomatic complexities of Go functions. -Usage: - gocyclo [flags] ... - -Flags: - -over N show functions with complexity > N only and - return exit code 1 if the set is non-empty - -top N show the top N most complex functions only - -avg, -avg-short show the average complexity over all functions; - the short option prints the value without a label - -ignore REGEX exclude files matching the given regular expression - -The output fields for each line are: - -``` - -## Examples - -``` -$ gocyclo . -$ gocyclo main.go -$ gocyclo -top 10 src/ -$ gocyclo -over 25 docker -$ gocyclo -avg . -$ gocyclo -top 20 -ignore "_test|Godeps|vendor/" . -$ gocyclo -over 3 -avg gocyclo/ -``` - -Example output: - -``` -9 gocyclo (*complexityVisitor).Visit complexity.go:30:1 -8 main main cmd/gocyclo/main.go:53:1 -7 gocyclo (*fileAnalyzer).analyzeDecl analyze.go:96:1 -4 gocyclo Analyze analyze.go:24:1 -4 gocyclo parseDirectives directives.go:27:1 -4 gocyclo (Stats).SortAndFilter stats.go:52:1 -Average: 2.72 -``` - -Note that the average is calculated over all analyzed functions, -not just the printed ones. - -### Ignoring individual functions - -Individual functions can be ignored with a `gocyclo:ignore` directive: - -``` -//gocyclo:ignore -func f1() { - // ... -} - -//gocyclo:ignore -var f2 = func() { - // ... -} -``` - -## License - -This project is free and open source software licensed under the -[BSD 3-Clause License](LICENSE). diff --git a/vendor/github.com/fzipp/gocyclo/analyze.go b/vendor/github.com/fzipp/gocyclo/analyze.go deleted file mode 100644 index 2d8bcff25..000000000 --- a/vendor/github.com/fzipp/gocyclo/analyze.go +++ /dev/null @@ -1,154 +0,0 @@ -// Copyright 2020 Frederik Zipp. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package gocyclo - -import ( - "fmt" - "go/ast" - "go/parser" - "go/token" - "io/fs" - "log" - "os" - "path/filepath" - "regexp" - "strings" -) - -// Analyze calculates the cyclomatic complexities of the functions and methods -// in the Go source code files in the given paths. If a path is a directory -// all Go files under that directory are analyzed recursively. -// Files with paths matching the 'ignore' regular expressions are skipped. -// The 'ignore' parameter can be nil, meaning that no files are skipped. -func Analyze(paths []string, ignore *regexp.Regexp) Stats { - var stats Stats - for _, path := range paths { - info, err := os.Stat(path) - if err != nil { - log.Printf("could not get file info for path %q: %s\n", path, err) - continue - } - if info.IsDir() { - stats = analyzeDir(path, ignore, stats) - } else { - stats = analyzeFile(path, ignore, stats) - } - } - return stats -} - -func analyzeDir(dirname string, ignore *regexp.Regexp, stats Stats) Stats { - filepath.WalkDir(dirname, func(path string, entry fs.DirEntry, err error) error { - if isSkipDir(entry) { - return filepath.SkipDir - } - if err == nil && isGoFile(entry) { - stats = analyzeFile(path, ignore, stats) - } - return err - }) - return stats -} - -var skipDirs = map[string]bool{ - "testdata": true, - "vendor": true, -} - -func isSkipDir(entry fs.DirEntry) bool { - return entry.IsDir() && (skipDirs[entry.Name()] || - (strings.HasPrefix(entry.Name(), ".") && entry.Name() != "." && entry.Name() != "..") || - strings.HasPrefix(entry.Name(), "_")) -} - -func isGoFile(entry fs.DirEntry) bool { - return !entry.IsDir() && strings.HasSuffix(entry.Name(), ".go") -} - -func analyzeFile(path string, ignore *regexp.Regexp, stats Stats) Stats { - if isIgnored(path, ignore) { - return stats - } - fset := token.NewFileSet() - f, err := parser.ParseFile(fset, path, nil, parser.ParseComments) - if err != nil { - log.Fatal(err) - } - return AnalyzeASTFile(f, fset, stats) -} - -func isIgnored(path string, ignore *regexp.Regexp) bool { - return ignore != nil && ignore.MatchString(path) -} - -// AnalyzeASTFile calculates the cyclomatic complexities of the functions -// and methods in the abstract syntax tree (AST) of a parsed Go file and -// appends the results to the given Stats slice. -func AnalyzeASTFile(f *ast.File, fs *token.FileSet, s Stats) Stats { - analyzer := &fileAnalyzer{ - file: f, - fileSet: fs, - stats: s, - } - return analyzer.analyze() -} - -type fileAnalyzer struct { - file *ast.File - fileSet *token.FileSet - stats Stats -} - -func (a *fileAnalyzer) analyze() Stats { - for _, decl := range a.file.Decls { - a.analyzeDecl(decl) - } - return a.stats -} - -func (a *fileAnalyzer) analyzeDecl(d ast.Decl) { - switch decl := d.(type) { - case *ast.FuncDecl: - a.addStatIfNotIgnored(decl, funcName(decl), decl.Doc) - case *ast.GenDecl: - for _, spec := range decl.Specs { - valueSpec, ok := spec.(*ast.ValueSpec) - if !ok { - continue - } - for _, value := range valueSpec.Values { - funcLit, ok := value.(*ast.FuncLit) - if !ok { - continue - } - a.addStatIfNotIgnored(funcLit, valueSpec.Names[0].Name, decl.Doc) - } - } - } -} - -func (a *fileAnalyzer) addStatIfNotIgnored(node ast.Node, funcName string, doc *ast.CommentGroup) { - if parseDirectives(doc).HasIgnore() { - return - } - a.stats = append(a.stats, Stat{ - PkgName: a.file.Name.Name, - FuncName: funcName, - Complexity: Complexity(node), - Pos: a.fileSet.Position(node.Pos()), - }) -} - -// funcName returns the name representation of a function or method: -// "(Type).Name" for methods or simply "Name" for functions. -func funcName(fn *ast.FuncDecl) string { - if fn.Recv != nil { - if fn.Recv.NumFields() > 0 { - typ := fn.Recv.List[0].Type - return fmt.Sprintf("(%s).%s", recvString(typ), fn.Name) - } - } - return fn.Name.Name -} diff --git a/vendor/github.com/fzipp/gocyclo/complexity.go b/vendor/github.com/fzipp/gocyclo/complexity.go deleted file mode 100644 index 65f5077e8..000000000 --- a/vendor/github.com/fzipp/gocyclo/complexity.go +++ /dev/null @@ -1,48 +0,0 @@ -// Copyright 2020 Frederik Zipp. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package gocyclo calculates the cyclomatic complexities of functions and -// methods in Go source code. -package gocyclo - -import ( - "go/ast" - "go/token" -) - -// Complexity calculates the cyclomatic complexity of a function. -// The 'fn' node is either a *ast.FuncDecl or a *ast.FuncLit. -func Complexity(fn ast.Node) int { - v := complexityVisitor{ - complexity: 1, - } - ast.Walk(&v, fn) - return v.complexity -} - -type complexityVisitor struct { - // complexity is the cyclomatic complexity - complexity int -} - -// Visit implements the ast.Visitor interface. -func (v *complexityVisitor) Visit(n ast.Node) ast.Visitor { - switch n := n.(type) { - case *ast.IfStmt, *ast.ForStmt, *ast.RangeStmt: - v.complexity++ - case *ast.CaseClause: - if n.List != nil { // ignore default case - v.complexity++ - } - case *ast.CommClause: - if n.Comm != nil { // ignore default case - v.complexity++ - } - case *ast.BinaryExpr: - if n.Op == token.LAND || n.Op == token.LOR { - v.complexity++ - } - } - return v -} diff --git a/vendor/github.com/fzipp/gocyclo/directives.go b/vendor/github.com/fzipp/gocyclo/directives.go deleted file mode 100644 index b4ee3c448..000000000 --- a/vendor/github.com/fzipp/gocyclo/directives.go +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright 2020 Frederik Zipp. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package gocyclo - -import ( - "go/ast" - "strings" -) - -type directives []string - -func (ds directives) HasIgnore() bool { - return ds.isPresent("ignore") -} - -func (ds directives) isPresent(name string) bool { - for _, d := range ds { - if d == name { - return true - } - } - return false -} - -func parseDirectives(doc *ast.CommentGroup) directives { - if doc == nil { - return directives{} - } - const prefix = "//gocyclo:" - var ds directives - for _, comment := range doc.List { - if strings.HasPrefix(comment.Text, prefix) { - ds = append(ds, strings.TrimSpace(strings.TrimPrefix(comment.Text, prefix))) - } - } - return ds -} diff --git a/vendor/github.com/fzipp/gocyclo/recv.go b/vendor/github.com/fzipp/gocyclo/recv.go deleted file mode 100644 index a5c82fef5..000000000 --- a/vendor/github.com/fzipp/gocyclo/recv.go +++ /dev/null @@ -1,26 +0,0 @@ -// Copyright 2021 Frederik Zipp. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.18 -// +build go1.18 - -package gocyclo - -import "go/ast" - -// recvString returns a string representation of recv of the -// form "T", "*T", or "BADRECV" (if not a proper receiver type). -func recvString(recv ast.Expr) string { - switch t := recv.(type) { - case *ast.Ident: - return t.Name - case *ast.StarExpr: - return "*" + recvString(t.X) - case *ast.IndexExpr: - return recvString(t.X) - case *ast.IndexListExpr: - return recvString(t.X) - } - return "BADRECV" -} diff --git a/vendor/github.com/fzipp/gocyclo/recv_pre118.go b/vendor/github.com/fzipp/gocyclo/recv_pre118.go deleted file mode 100644 index 2fe2d0cdb..000000000 --- a/vendor/github.com/fzipp/gocyclo/recv_pre118.go +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright 2021 Frederik Zipp. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build !go1.18 -// +build !go1.18 - -package gocyclo - -import "go/ast" - -// recvString returns a string representation of recv of the -// form "T", "*T", or "BADRECV" (if not a proper receiver type). -func recvString(recv ast.Expr) string { - switch t := recv.(type) { - case *ast.Ident: - return t.Name - case *ast.StarExpr: - return "*" + recvString(t.X) - case *ast.IndexExpr: - return recvString(t.X) - } - return "BADRECV" -} diff --git a/vendor/github.com/fzipp/gocyclo/stats.go b/vendor/github.com/fzipp/gocyclo/stats.go deleted file mode 100644 index 0a377e4b6..000000000 --- a/vendor/github.com/fzipp/gocyclo/stats.go +++ /dev/null @@ -1,73 +0,0 @@ -// Copyright 2020 Frederik Zipp. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package gocyclo - -import ( - "fmt" - "go/token" - "sort" -) - -// Stat holds the cyclomatic complexity of a function, along with its package -// and and function name and its position in the source code. -type Stat struct { - PkgName string - FuncName string - Complexity int - Pos token.Position -} - -// String formats the cyclomatic complexity information of a function in -// the following format: " " -func (s Stat) String() string { - return fmt.Sprintf("%d %s %s %s", s.Complexity, s.PkgName, s.FuncName, s.Pos) -} - -// Stats hold the cyclomatic complexities of many functions. -type Stats []Stat - -// AverageComplexity calculates the average cyclomatic complexity of the -// cyclomatic complexities in s. -func (s Stats) AverageComplexity() float64 { - return float64(s.TotalComplexity()) / float64(len(s)) -} - -// TotalComplexity calculates the total sum of all cyclomatic -// complexities in s. -func (s Stats) TotalComplexity() uint64 { - total := uint64(0) - for _, stat := range s { - total += uint64(stat.Complexity) - } - return total -} - -// SortAndFilter sorts the cyclomatic complexities in s in descending order -// and returns a slice of s limited to the 'top' N entries with a cyclomatic -// complexity greater than 'over'. If 'top' is negative, i.e. -1, it does -// not limit the result. If 'over' is <= 0 it does not limit the result either, -// because a function has a base cyclomatic complexity of at least 1. -func (s Stats) SortAndFilter(top, over int) Stats { - result := make(Stats, len(s)) - copy(result, s) - sort.Stable(byComplexityDesc(result)) - for i, stat := range result { - if i == top { - return result[:i] - } - if stat.Complexity <= over { - return result[:i] - } - } - return result -} - -type byComplexityDesc Stats - -func (s byComplexityDesc) Len() int { return len(s) } -func (s byComplexityDesc) Swap(i, j int) { s[i], s[j] = s[j], s[i] } -func (s byComplexityDesc) Less(i, j int) bool { - return s[i].Complexity >= s[j].Complexity -} diff --git a/vendor/github.com/ghostiam/protogetter/.goreleaser.yaml b/vendor/github.com/ghostiam/protogetter/.goreleaser.yaml deleted file mode 100644 index a70d0fb00..000000000 --- a/vendor/github.com/ghostiam/protogetter/.goreleaser.yaml +++ /dev/null @@ -1,24 +0,0 @@ -before: - hooks: - - go mod tidy -builds: - - id: protogetter - main: ./cmd/protogetter - binary: protogetter - env: - - CGO_ENABLED=0 - goos: - - linux - - windows - - darwin -checksum: - name_template: 'checksums.txt' -snapshot: - name_template: "{{ incpatch .Version }}-next" -changelog: - sort: asc - filters: - exclude: - - '^docs:' - - '^test:' - - '^ci:' \ No newline at end of file diff --git a/vendor/github.com/ghostiam/protogetter/LICENSE b/vendor/github.com/ghostiam/protogetter/LICENSE deleted file mode 100644 index b4449661b..000000000 --- a/vendor/github.com/ghostiam/protogetter/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2023 Vladislav Fursov (GhostIAm) - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. \ No newline at end of file diff --git a/vendor/github.com/ghostiam/protogetter/Makefile b/vendor/github.com/ghostiam/protogetter/Makefile deleted file mode 100644 index 4c2a62af1..000000000 --- a/vendor/github.com/ghostiam/protogetter/Makefile +++ /dev/null @@ -1,9 +0,0 @@ -.PHONY: test -test: - $(MAKE) -C testdata vendor - go test -v ./... - -.PHONY: install -install: - go install ./cmd/protogetter - @echo "Installed in $(shell which protogetter)" diff --git a/vendor/github.com/ghostiam/protogetter/README.md b/vendor/github.com/ghostiam/protogetter/README.md deleted file mode 100644 index c033e9597..000000000 --- a/vendor/github.com/ghostiam/protogetter/README.md +++ /dev/null @@ -1,73 +0,0 @@ -# Protogetter -Welcome to the Protogetter project! - -## Overview -Protogetter is a linter developed specifically for Go programmers working with nested `protobuf` types.\ -It's designed to aid developers in preventing `invalid memory address or nil pointer dereference` errors arising from direct access of nested `protobuf` fields. - -When working with `protobuf`, it's quite common to have complex structures where a message field is contained within another message, which itself can be part of another message, and so on. -If these fields are accessed directly and some field in the call chain will not be initialized, it can result in application panic. - -Protogetter addresses this issue by suggesting use of getter methods for field access. - -## How does it work? -Protogetter analyzes your Go code and helps detect direct `protobuf` field accesses that could give rise to panic.\ -The linter suggests using getters: -```go -m.GetFoo().GetBar().GetBaz() -``` -instead of direct field access: -```go -m.Foo.Bar.Baz -``` - -And you will then only need to perform a nil check after the final call: -```go -if m.GetFoo().GetBar().GetBaz() != nil { - // Do something with m.GetFoo().GetBar().GetBaz() -} -``` -instead of: -```go -if m.Foo != nil { - if m.Foo.Bar != nil { - if m.Foo.Bar.Baz != nil { - // Do something with m.Foo.Bar.Baz - } - } -} -``` - -or use zero values: - -```go -// If one of the methods returns `nil` we will receive 0 instead of panic. -v := m.GetFoo().GetBar().GetBaz().GetInt() -``` - -instead of panic: - -```go -// If at least one structure in the chains is not initialized, we will get a panic. -v := m.Foo.Bar.Baz.Int -``` - -which simplifies the code and makes it more reliable. - -## Installation - -```bash -go install github.com/ghostiam/protogetter/cmd/protogetter@latest -``` - -## Usage - -To run the linter: -```bash -protogetter ./... -``` - -Or to apply suggested fixes directly: -```bash -protogetter --fix ./... -``` diff --git a/vendor/github.com/ghostiam/protogetter/posfilter.go b/vendor/github.com/ghostiam/protogetter/posfilter.go deleted file mode 100644 index 82075ccb1..000000000 --- a/vendor/github.com/ghostiam/protogetter/posfilter.go +++ /dev/null @@ -1,65 +0,0 @@ -package protogetter - -import ( - "go/token" -) - -type PosFilter struct { - positions map[token.Pos]struct{} - alreadyReplaced map[string]map[int][2]int // map[filename][line][start, end] -} - -func NewPosFilter() *PosFilter { - return &PosFilter{ - positions: make(map[token.Pos]struct{}), - alreadyReplaced: make(map[string]map[int][2]int), - } -} - -func (f *PosFilter) IsFiltered(pos token.Pos) bool { - _, ok := f.positions[pos] - return ok -} - -func (f *PosFilter) AddPos(pos token.Pos) { - f.positions[pos] = struct{}{} -} - -func (f *PosFilter) IsAlreadyReplaced(fset *token.FileSet, pos, end token.Pos) bool { - filePos := fset.Position(pos) - fileEnd := fset.Position(end) - - lines, ok := f.alreadyReplaced[filePos.Filename] - if !ok { - return false - } - - lineRange, ok := lines[filePos.Line] - if !ok { - return false - } - - if lineRange[0] <= filePos.Offset && fileEnd.Offset <= lineRange[1] { - return true - } - - return false -} - -func (f *PosFilter) AddAlreadyReplaced(fset *token.FileSet, pos, end token.Pos) { - filePos := fset.Position(pos) - fileEnd := fset.Position(end) - - lines, ok := f.alreadyReplaced[filePos.Filename] - if !ok { - lines = make(map[int][2]int) - f.alreadyReplaced[filePos.Filename] = lines - } - - lineRange, ok := lines[filePos.Line] - if ok && lineRange[0] <= filePos.Offset && fileEnd.Offset <= lineRange[1] { - return - } - - lines[filePos.Line] = [2]int{filePos.Offset, fileEnd.Offset} -} diff --git a/vendor/github.com/ghostiam/protogetter/processor.go b/vendor/github.com/ghostiam/protogetter/processor.go deleted file mode 100644 index d65199dd2..000000000 --- a/vendor/github.com/ghostiam/protogetter/processor.go +++ /dev/null @@ -1,353 +0,0 @@ -package protogetter - -import ( - "fmt" - "go/ast" - "go/token" - "go/types" - "reflect" - "strings" -) - -type processor struct { - info *types.Info - filter *PosFilter - cfg *Config - - to strings.Builder - from strings.Builder - err error -} - -func Process(info *types.Info, filter *PosFilter, n ast.Node, cfg *Config) (*Result, error) { - p := &processor{ - info: info, - filter: filter, - cfg: cfg, - } - - return p.process(n) -} - -func (c *processor) process(n ast.Node) (*Result, error) { - switch x := n.(type) { - case *ast.AssignStmt: - // Skip any assignment to the field. - for _, s := range x.Lhs { - c.filter.AddPos(s.Pos()) - - if se, ok := s.(*ast.StarExpr); ok { - c.filter.AddPos(se.X.Pos()) - } - } - - case *ast.IncDecStmt: - // Skip any increment/decrement to the field. - c.filter.AddPos(x.X.Pos()) - - case *ast.UnaryExpr: - if x.Op == token.AND { - // Skip all expressions when the field is used as a pointer. - // Because this is not direct reading, but most likely writing by pointer (for example like sql.Scan). - c.filter.AddPos(x.X.Pos()) - } - - case *ast.CallExpr: - if !c.cfg.ReplaceFirstArgInAppend && len(x.Args) > 0 { - if v, ok := x.Fun.(*ast.Ident); ok && v.Name == "append" { - // Skip first argument of append function. - c.filter.AddPos(x.Args[0].Pos()) - break - } - } - - f, ok := x.Fun.(*ast.SelectorExpr) - if !ok { - return &Result{}, nil - } - - if !isProtoMessage(c.info, f.X) { - return &Result{}, nil - } - - c.processInner(x) - - case *ast.SelectorExpr: - if !isProtoMessage(c.info, x.X) { - // If the selector is not on a proto message, skip it. - return &Result{}, nil - } - - c.processInner(x) - - case *ast.StarExpr: - f, ok := x.X.(*ast.SelectorExpr) - if !ok { - return &Result{}, nil - } - - if !isProtoMessage(c.info, f.X) { - return &Result{}, nil - } - - // proto2 generates fields as pointers. Hence, the indirection - // must be removed when generating the fix for the case. - // The `*` is retained in `c.from`, but excluded from the fix - // present in the `c.to`. - c.writeFrom("*") - c.processInner(x.X) - - case *ast.BinaryExpr: - // Check if the expression is a comparison. - if x.Op != token.EQL && x.Op != token.NEQ { - return &Result{}, nil - } - - // Check if one of the operands is nil. - - xIdent, xOk := x.X.(*ast.Ident) - yIdent, yOk := x.Y.(*ast.Ident) - - xIsNil := xOk && xIdent.Name == "nil" - yIsNil := yOk && yIdent.Name == "nil" - - if !xIsNil && !yIsNil { - return &Result{}, nil - } - - // Extract the non-nil operand for further checks - - var expr ast.Expr - if xIsNil { - expr = x.Y - } else { - expr = x.X - } - - se, ok := expr.(*ast.SelectorExpr) - if !ok { - return &Result{}, nil - } - - if !isProtoMessage(c.info, se.X) { - return &Result{}, nil - } - - // Check if the Getter function of the protobuf message returns a pointer. - hasPointer, ok := getterResultHasPointer(c.info, se.X, se.Sel.Name) - if !ok || hasPointer { - return &Result{}, nil - } - - c.filter.AddPos(x.X.Pos()) - - default: - return nil, fmt.Errorf("not implemented for type: %s (%s)", reflect.TypeOf(x), formatNode(n)) - } - - if c.err != nil { - return nil, c.err - } - - return &Result{ - From: c.from.String(), - To: c.to.String(), - }, nil -} - -func (c *processor) processInner(expr ast.Expr) { - switch x := expr.(type) { - case *ast.Ident: - c.write(x.Name) - - case *ast.BasicLit: - c.write(x.Value) - - case *ast.UnaryExpr: - if x.Op == token.AND { - c.write(formatNode(x)) - return - } - - c.write(x.Op.String()) - c.processInner(x.X) - - case *ast.SelectorExpr: - c.processInner(x.X) - c.write(".") - - // If getter exists, use it. - if methodIsExists(c.info, x.X, "Get"+x.Sel.Name) { - c.writeFrom(x.Sel.Name) - c.writeTo("Get" + x.Sel.Name + "()") - return - } - - // If the selector is not a proto-message or the method has already been called, we leave it unchanged. - // This approach is significantly more efficient than verifying the presence of methods in all cases. - c.write(x.Sel.Name) - - case *ast.CallExpr: - c.processInner(x.Fun) - c.write("(") - for i, arg := range x.Args { - if i > 0 { - c.write(",") - } - c.processInner(arg) - } - c.write(")") - - case *ast.IndexExpr: - c.processInner(x.X) - c.write("[") - c.processInner(x.Index) - c.write("]") - - case *ast.BinaryExpr: - c.processInner(x.X) - c.write(x.Op.String()) - c.processInner(x.Y) - - case *ast.ParenExpr: - c.write("(") - c.processInner(x.X) - c.write(")") - - case *ast.StarExpr: - c.write("*") - c.processInner(x.X) - - case *ast.CompositeLit: - c.write(formatNode(x)) - - case *ast.TypeAssertExpr: - c.write(formatNode(x)) - - default: - c.err = fmt.Errorf("processInner: not implemented for type: %s", reflect.TypeOf(x)) - } -} - -func (c *processor) write(s string) { - c.writeTo(s) - c.writeFrom(s) -} - -func (c *processor) writeTo(s string) { - c.to.WriteString(s) -} - -func (c *processor) writeFrom(s string) { - c.from.WriteString(s) -} - -// Result contains source code (from) and suggested change (to) -type Result struct { - From string - To string -} - -func (r *Result) Skipped() bool { - // If from and to are the same, skip it. - return r.From == r.To -} - -func isProtoMessage(info *types.Info, expr ast.Expr) bool { - // First, we are checking for the presence of the ProtoReflect method which is currently being generated - // and corresponds to v2 version. - // https://pkg.go.dev/google.golang.org/protobuf@v1.31.0/proto#Message - const protoV2Method = "ProtoReflect" - ok := methodIsExists(info, expr, protoV2Method) - if ok { - return true - } - - // Afterwards, we are checking the ProtoMessage method. All the structures that implement the proto.Message interface - // have a ProtoMessage method and are proto-structures. This interface has been generated since version 1.0.0 and - // continues to exist for compatibility. - // https://pkg.go.dev/github.com/golang/protobuf/proto?utm_source=godoc#Message - const protoV1Method = "ProtoMessage" - ok = methodIsExists(info, expr, protoV1Method) - if ok { - // Since there is a protoc-gen-gogo generator that implements the proto.Message interface, but may not generate - // getters or generate from without checking for nil, so even if getters exist, we skip them. - const protocGenGoGoMethod = "MarshalToSizedBuffer" - return !methodIsExists(info, expr, protocGenGoGoMethod) - } - - return false -} - -func typesNamed(info *types.Info, x ast.Expr) (*types.Named, bool) { - if info == nil { - return nil, false - } - - t := info.TypeOf(x) - if t == nil { - return nil, false - } - - ptr, ok := t.Underlying().(*types.Pointer) - if ok { - t = ptr.Elem() - } - - named, ok := t.(*types.Named) - if !ok { - return nil, false - } - - return named, true -} - -func methodIsExists(info *types.Info, x ast.Expr, name string) bool { - named, ok := typesNamed(info, x) - if !ok { - return false - } - - for i := 0; i < named.NumMethods(); i++ { - if named.Method(i).Name() == name { - return true - } - } - - return false -} - -func getterResultHasPointer(info *types.Info, x ast.Expr, name string) (hasPointer, ok bool) { - named, ok := typesNamed(info, x) - if !ok { - return false, false - } - - for i := 0; i < named.NumMethods(); i++ { - method := named.Method(i) - if method.Name() != "Get"+name { - continue - } - - var sig *types.Signature - sig, ok = method.Type().(*types.Signature) - if !ok { - return false, false - } - - results := sig.Results() - if results.Len() == 0 { - return false, false - } - - firstType := results.At(0) - _, ok = firstType.Type().(*types.Pointer) - if !ok { - return false, true - } - - return true, true - } - - return false, false -} diff --git a/vendor/github.com/ghostiam/protogetter/protogetter.go b/vendor/github.com/ghostiam/protogetter/protogetter.go deleted file mode 100644 index 31eee8572..000000000 --- a/vendor/github.com/ghostiam/protogetter/protogetter.go +++ /dev/null @@ -1,279 +0,0 @@ -package protogetter - -import ( - "bytes" - "flag" - "fmt" - "go/ast" - "go/format" - "go/token" - "log" - "path/filepath" - "strings" - - "github.com/gobwas/glob" - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/ast/inspector" -) - -type Mode int - -const ( - StandaloneMode Mode = iota - GolangciLintMode -) - -const msgFormat = "avoid direct access to proto field %s, use %s instead" - -func NewAnalyzer(cfg *Config) *analysis.Analyzer { - if cfg == nil { - cfg = &Config{} - } - - return &analysis.Analyzer{ - Name: "protogetter", - Doc: "Reports direct reads from proto message fields when getters should be used", - Flags: flags(cfg), - Run: func(pass *analysis.Pass) (any, error) { - _, err := Run(pass, cfg) - return nil, err - }, - } -} - -func flags(opts *Config) flag.FlagSet { - fs := flag.NewFlagSet("protogetter", flag.ContinueOnError) - - fs.Func("skip-generated-by", "skip files generated with the given prefixes", func(s string) error { - for _, prefix := range strings.Split(s, ",") { - opts.SkipGeneratedBy = append(opts.SkipGeneratedBy, prefix) - } - return nil - }) - fs.Func("skip-files", "skip files with the given glob patterns", func(s string) error { - for _, pattern := range strings.Split(s, ",") { - opts.SkipFiles = append(opts.SkipFiles, pattern) - } - return nil - }) - fs.BoolVar(&opts.SkipAnyGenerated, "skip-any-generated", false, "skip any generated files") - - return *fs -} - -type Config struct { - Mode Mode // Zero value is StandaloneMode. - SkipGeneratedBy []string - SkipFiles []string - SkipAnyGenerated bool - ReplaceFirstArgInAppend bool -} - -func Run(pass *analysis.Pass, cfg *Config) ([]Issue, error) { - skipGeneratedBy := make([]string, 0, len(cfg.SkipGeneratedBy)+3) - // Always skip files generated by protoc-gen-go, protoc-gen-go-grpc and protoc-gen-grpc-gateway. - skipGeneratedBy = append(skipGeneratedBy, "protoc-gen-go", "protoc-gen-go-grpc", "protoc-gen-grpc-gateway") - for _, s := range cfg.SkipGeneratedBy { - s = strings.TrimSpace(s) - if s == "" { - continue - } - skipGeneratedBy = append(skipGeneratedBy, s) - } - - skipFilesGlobPatterns := make([]glob.Glob, 0, len(cfg.SkipFiles)) - for _, s := range cfg.SkipFiles { - s = strings.TrimSpace(s) - if s == "" { - continue - } - - compile, err := glob.Compile(s) - if err != nil { - return nil, fmt.Errorf("invalid glob pattern: %w", err) - } - - skipFilesGlobPatterns = append(skipFilesGlobPatterns, compile) - } - - nodeTypes := []ast.Node{ - (*ast.AssignStmt)(nil), - (*ast.BinaryExpr)(nil), - (*ast.CallExpr)(nil), - (*ast.SelectorExpr)(nil), - (*ast.StarExpr)(nil), - (*ast.IncDecStmt)(nil), - (*ast.UnaryExpr)(nil), - } - - // Skip filtered files. - var files []*ast.File - for _, f := range pass.Files { - if skipGeneratedFile(f, skipGeneratedBy, cfg.SkipAnyGenerated) { - continue - } - - if skipFilesByGlob(pass.Fset.File(f.Pos()).Name(), skipFilesGlobPatterns) { - continue - } - - files = append(files, f) - - // ast.Print(pass.Fset, f) - } - - ins := inspector.New(files) - - var issues []Issue - - filter := NewPosFilter() - ins.Preorder(nodeTypes, func(node ast.Node) { - report := analyse(pass, filter, node, cfg) - if report == nil { - return - } - - switch cfg.Mode { - case StandaloneMode: - pass.Report(report.ToDiagReport()) - case GolangciLintMode: - issues = append(issues, report.ToIssue(pass.Fset)) - } - }) - - return issues, nil -} - -func analyse(pass *analysis.Pass, filter *PosFilter, n ast.Node, cfg *Config) *Report { - // fmt.Printf("\n>>> check: %s\n", formatNode(n)) - // ast.Print(pass.Fset, n) - if filter.IsFiltered(n.Pos()) { - // fmt.Printf(">>> filtered\n") - return nil - } - - result, err := Process(pass.TypesInfo, filter, n, cfg) - if err != nil { - pass.Report(analysis.Diagnostic{ - Pos: n.Pos(), - End: n.End(), - Message: fmt.Sprintf("error: %v", err), - }) - - return nil - } - - // If existing in filter, skip it. - if filter.IsFiltered(n.Pos()) { - return nil - } - - if result.Skipped() { - return nil - } - - // If the expression has already been replaced, skip it. - if filter.IsAlreadyReplaced(pass.Fset, n.Pos(), n.End()) { - return nil - } - // Add the expression to the filter. - filter.AddAlreadyReplaced(pass.Fset, n.Pos(), n.End()) - - return &Report{ - node: n, - result: result, - } -} - -// Issue is used to integrate with golangci-lint's inline auto fix. -type Issue struct { - Pos token.Position - Message string - InlineFix InlineFix -} - -type InlineFix struct { - StartCol int // zero-based - Length int - NewString string -} - -type Report struct { - node ast.Node - result *Result -} - -func (r *Report) ToDiagReport() analysis.Diagnostic { - msg := fmt.Sprintf(msgFormat, r.result.From, r.result.To) - - return analysis.Diagnostic{ - Pos: r.node.Pos(), - End: r.node.End(), - Message: msg, - SuggestedFixes: []analysis.SuggestedFix{ - { - Message: msg, - TextEdits: []analysis.TextEdit{ - { - Pos: r.node.Pos(), - End: r.node.End(), - NewText: []byte(r.result.To), - }, - }, - }, - }, - } -} - -func (r *Report) ToIssue(fset *token.FileSet) Issue { - msg := fmt.Sprintf(msgFormat, r.result.From, r.result.To) - return Issue{ - Pos: fset.Position(r.node.Pos()), - Message: msg, - InlineFix: InlineFix{ - StartCol: fset.Position(r.node.Pos()).Column - 1, - Length: len(r.result.From), - NewString: r.result.To, - }, - } -} - -func skipGeneratedFile(f *ast.File, prefixes []string, skipAny bool) bool { - if len(f.Comments) == 0 { - return false - } - firstComment := f.Comments[0].Text() - - // https://golang.org/s/generatedcode - if skipAny && strings.HasPrefix(firstComment, "Code generated") { - return true - } - - for _, prefix := range prefixes { - if strings.HasPrefix(firstComment, "Code generated by "+prefix) { - return true - } - } - - return false -} - -func skipFilesByGlob(filename string, patterns []glob.Glob) bool { - for _, pattern := range patterns { - if pattern.Match(filename) || pattern.Match(filepath.Base(filename)) { - return true - } - } - - return false -} - -func formatNode(node ast.Node) string { - buf := new(bytes.Buffer) - if err := format.Node(buf, token.NewFileSet(), node); err != nil { - log.Printf("Error formatting expression: %v", err) - return "" - } - - return buf.String() -} diff --git a/vendor/github.com/go-critic/go-critic/LICENSE b/vendor/github.com/go-critic/go-critic/LICENSE deleted file mode 100644 index 5198a4a94..000000000 --- a/vendor/github.com/go-critic/go-critic/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2018-2021 go-critic team - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/go-critic/go-critic/checkers/appendAssign_checker.go b/vendor/github.com/go-critic/go-critic/checkers/appendAssign_checker.go deleted file mode 100644 index 2a67dccec..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/appendAssign_checker.go +++ /dev/null @@ -1,103 +0,0 @@ -package checkers - -import ( - "go/ast" - "go/token" - "go/types" - - "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/linter" - - "github.com/go-toolsmith/astequal" - "github.com/go-toolsmith/astp" - "golang.org/x/tools/go/ast/astutil" -) - -func init() { - var info linter.CheckerInfo - info.Name = "appendAssign" - info.Tags = []string{linter.DiagnosticTag} - info.Summary = "Detects suspicious append result assignments" - info.Before = ` -p.positives = append(p.negatives, x) -p.negatives = append(p.negatives, y)` - info.After = ` -p.positives = append(p.positives, x) -p.negatives = append(p.negatives, y)` - - collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { - return astwalk.WalkerForStmt(&appendAssignChecker{ctx: ctx}), nil - }) -} - -type appendAssignChecker struct { - astwalk.WalkHandler - ctx *linter.CheckerContext -} - -func (c *appendAssignChecker) VisitStmt(stmt ast.Stmt) { - assign, ok := stmt.(*ast.AssignStmt) - if !ok || (assign.Tok != token.ASSIGN && assign.Tok != token.DEFINE) || len(assign.Lhs) != len(assign.Rhs) { - return - } - for i, rhs := range assign.Rhs { - call, ok := rhs.(*ast.CallExpr) - if !ok || qualifiedName(call.Fun) != "append" { - continue - } - c.checkAppend(assign.Lhs[i], call) - } -} - -func (c *appendAssignChecker) checkAppend(x ast.Expr, call *ast.CallExpr) { - if call.Ellipsis != token.NoPos { - // Try to detect `xs = append(ys, xs...)` idiom. - for _, arg := range call.Args[1:] { - y := arg - if arg, ok := arg.(*ast.SliceExpr); ok { - y = arg.X - } - if astequal.Expr(x, y) { - return - } - } - } - - switch x := x.(type) { - case *ast.Ident: - if x.Name == "_" { - return // Don't check assignments to blank ident - } - case *ast.IndexExpr: - if !astp.IsIndexExpr(call.Args[0]) { - // Most likely `m[k] = append(x, ...)` - // pattern, where x was retrieved by m[k] before. - // - // TODO: it's possible to record such map/slice reads - // and check whether it was done before this call. - // But for now, treat it like x belongs to m[k]. - return - } - } - - switch y := call.Args[0].(type) { - case *ast.SliceExpr: - if _, ok := c.ctx.TypeOf(y.X).(*types.Array); ok { - // Arrays are frequently used as scratch storages. - return - } - c.matchSlices(call, x, y.X) - case *ast.IndexExpr, *ast.Ident, *ast.SelectorExpr: - c.matchSlices(call, x, y) - } -} - -func (c *appendAssignChecker) matchSlices(cause ast.Node, x, y ast.Expr) { - if !astequal.Expr(x, astutil.Unparen(y)) { - c.warn(cause) - } -} - -func (c *appendAssignChecker) warn(cause ast.Node) { - c.ctx.Warn(cause, "append result not assigned to the same slice") -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/appendCombine_checker.go b/vendor/github.com/go-critic/go-critic/checkers/appendCombine_checker.go deleted file mode 100644 index 81a7aa30b..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/appendCombine_checker.go +++ /dev/null @@ -1,103 +0,0 @@ -package checkers - -import ( - "go/ast" - "go/token" - - "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/linter" - - "github.com/go-toolsmith/astcast" - "github.com/go-toolsmith/astequal" -) - -func init() { - var info linter.CheckerInfo - info.Name = "appendCombine" - info.Tags = []string{linter.PerformanceTag} - info.Summary = "Detects `append` chains to the same slice that can be done in a single `append` call" - info.Before = ` -xs = append(xs, 1) -xs = append(xs, 2)` - info.After = `xs = append(xs, 1, 2)` - - collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { - return astwalk.WalkerForStmtList(&appendCombineChecker{ctx: ctx}), nil - }) -} - -type appendCombineChecker struct { - astwalk.WalkHandler - ctx *linter.CheckerContext -} - -func (c *appendCombineChecker) VisitStmtList(_ ast.Node, list []ast.Stmt) { - var cause ast.Node // First append - var slice ast.Expr // Slice being appended to - chain := 0 // How much appends in a row we've seen - - // Break the chain. - // If enough appends are in chain, print warning. - flush := func() { - if chain > 1 { - c.warn(cause, chain) - } - chain = 0 - slice = nil - } - - for _, stmt := range list { - call := c.matchAppend(stmt, slice) - if call == nil { - flush() - continue - } - - if chain == 0 { - // First append in a chain. - chain = 1 - slice = call.Args[0] - cause = stmt - } else { - chain++ - } - } - - // Required for printing chains that consist of trailing - // statements from the list. - flush() -} - -func (c *appendCombineChecker) matchAppend(stmt ast.Stmt, slice ast.Expr) *ast.CallExpr { - // Seeking for: - // slice = append(slice, xs...) - // xs are 0-N append arguments, but not variadic argument, - // because it makes append combining impossible. - - assign := astcast.ToAssignStmt(stmt) - if len(assign.Lhs) != 1 || len(assign.Rhs) != 1 { - return nil - } - - call, ok := assign.Rhs[0].(*ast.CallExpr) - { - cond := ok && - qualifiedName(call.Fun) == "append" && - call.Ellipsis == token.NoPos && - astequal.Expr(assign.Lhs[0], call.Args[0]) - if !cond { - return nil - } - } - - // Check that current append slice match previous append slice. - // Otherwise we should break the chain. - if slice == nil || astequal.Expr(slice, call.Args[0]) { - return call - } - return nil -} - -func (c *appendCombineChecker) warn(cause ast.Node, chain int) { - c.ctx.Warn(cause, "can combine chain of %d appends into one", chain) -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/badCond_checker.go b/vendor/github.com/go-critic/go-critic/checkers/badCond_checker.go deleted file mode 100644 index 9be45ccc7..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/badCond_checker.go +++ /dev/null @@ -1,161 +0,0 @@ -package checkers - -import ( - "go/ast" - "go/constant" - "go/token" - - "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/checkers/internal/lintutil" - "github.com/go-critic/go-critic/linter" - - "github.com/go-toolsmith/astcast" - "github.com/go-toolsmith/astcopy" - "github.com/go-toolsmith/astequal" - "github.com/go-toolsmith/typep" - "golang.org/x/tools/go/ast/astutil" -) - -func init() { - var info linter.CheckerInfo - info.Name = "badCond" - info.Tags = []string{linter.DiagnosticTag} - info.Summary = "Detects suspicious condition expressions" - info.Before = ` -for i := 0; i > n; i++ { - xs[i] = 0 -}` - info.After = ` -for i := 0; i < n; i++ { - xs[i] = 0 -}` - - collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { - return astwalk.WalkerForFuncDecl(&badCondChecker{ctx: ctx}), nil - }) -} - -type badCondChecker struct { - astwalk.WalkHandler - ctx *linter.CheckerContext -} - -func (c *badCondChecker) VisitFuncDecl(decl *ast.FuncDecl) { - ast.Inspect(decl.Body, func(n ast.Node) bool { - switch n := n.(type) { - case *ast.ForStmt: - c.checkForStmt(n) - case ast.Expr: - c.checkExpr(n) - } - return true - }) -} - -func (c *badCondChecker) checkExpr(expr ast.Expr) { - // TODO(quasilyte): recognize more patterns. - - cond := astcast.ToBinaryExpr(expr) - lhs := astcast.ToBinaryExpr(astutil.Unparen(cond.X)) - rhs := astcast.ToBinaryExpr(astutil.Unparen(cond.Y)) - - if cond.Op != token.LAND { - return - } - - // Notes: - // `x != a || x != b` handled by go vet. - - // Pattern 1. - // `x < a && x > b`; Where `a` is less than `b`. - if c.lessAndGreater(lhs, rhs) { - c.warnCond(cond, "always false") - return - } - - // Pattern 2. - // `x == a && x == b` - // - // Valid when `b == a` is intended, but still reported. - // We can disable "just suspicious" warnings by default - // is users are upset with the current behavior. - if c.equalToBoth(lhs, rhs) { - c.warnCond(cond, "suspicious") - return - } -} - -func (c *badCondChecker) equalToBoth(lhs, rhs *ast.BinaryExpr) bool { - return lhs.Op == token.EQL && rhs.Op == token.EQL && - astequal.Expr(lhs.X, rhs.X) -} - -func (c *badCondChecker) lessAndGreater(lhs, rhs *ast.BinaryExpr) bool { - if lhs.Op != token.LSS || rhs.Op != token.GTR { - return false - } - if !astequal.Expr(lhs.X, rhs.X) { - return false - } - a := c.ctx.TypesInfo.Types[lhs.Y].Value - b := c.ctx.TypesInfo.Types[rhs.Y].Value - return a != nil && b != nil && constant.Compare(a, token.LSS, b) -} - -func (c *badCondChecker) checkForStmt(stmt *ast.ForStmt) { - // TODO(quasilyte): handle other kinds of bad conditionals. - - init := astcast.ToAssignStmt(stmt.Init) - if init.Tok != token.DEFINE || len(init.Lhs) != 1 || len(init.Rhs) != 1 { - return - } - if astcast.ToBasicLit(init.Rhs[0]).Value != "0" { - return - } - - iter := astcast.ToIdent(init.Lhs[0]) - cond := astcast.ToBinaryExpr(stmt.Cond) - - var i, n ast.Expr - var op token.Token - switch { - case cond.Op == token.GTR && astequal.Expr(iter, cond.X): - i = cond.X - n = cond.Y - op = token.LSS - case cond.Op == token.LSS && astequal.Expr(iter, cond.Y): - i = cond.Y - n = cond.X - op = token.GTR - default: - return - } - - if !typep.SideEffectFree(c.ctx.TypesInfo, n) { - return - } - - post := astcast.ToIncDecStmt(stmt.Post) - if post.Tok != token.INC || !astequal.Expr(iter, i) { - return - } - - mutated := lintutil.CouldBeMutated(c.ctx.TypesInfo, stmt.Body, n) || - lintutil.CouldBeMutated(c.ctx.TypesInfo, stmt.Body, iter) - if mutated { - return - } - - c.warnForStmt(stmt, op, cond) -} - -func (c *badCondChecker) warnForStmt(cause ast.Node, op token.Token, cond *ast.BinaryExpr) { - suggest := astcopy.BinaryExpr(cond) - suggest.Op = op - c.ctx.Warn(cause, "`%s` in loop; probably meant `%s`?", - cond, suggest) -} - -func (c *badCondChecker) warnCond(cond *ast.BinaryExpr, tag string) { - c.ctx.Warn(cond, "`%s` condition is %s", cond, tag) -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/badRegexp_checker.go b/vendor/github.com/go-critic/go-critic/checkers/badRegexp_checker.go deleted file mode 100644 index 6c6845053..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/badRegexp_checker.go +++ /dev/null @@ -1,446 +0,0 @@ -package checkers - -import ( - "go/ast" - "go/constant" - "sort" - "strconv" - "unicode" - "unicode/utf8" - - "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/linter" - - "github.com/quasilyte/regex/syntax" -) - -func init() { - var info linter.CheckerInfo - info.Name = "badRegexp" - info.Tags = []string{linter.DiagnosticTag, linter.ExperimentalTag} - info.Summary = "Detects suspicious regexp patterns" - info.Before = "regexp.MustCompile(`(?:^aa|bb|cc)foo[aba]`)" - info.After = "regexp.MustCompile(`^(?:aa|bb|cc)foo[ab]`)" - - collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { - opts := &syntax.ParserOptions{} - c := &badRegexpChecker{ - ctx: ctx, - parser: syntax.NewParser(opts), - } - return astwalk.WalkerForExpr(c), nil - }) -} - -type badRegexpChecker struct { - astwalk.WalkHandler - ctx *linter.CheckerContext - - parser *syntax.Parser - cause ast.Expr - - flagStates []regexpFlagState - goodAnchors []syntax.Position -} - -type regexpFlagState [utf8.RuneSelf]bool - -func (c *badRegexpChecker) VisitExpr(x ast.Expr) { - call, ok := x.(*ast.CallExpr) - if !ok { - return - } - - switch qualifiedName(call.Fun) { - case "regexp.Compile", "regexp.MustCompile": - cv := c.ctx.TypesInfo.Types[call.Args[0]].Value - if cv == nil || cv.Kind() != constant.String { - return - } - pat := constant.StringVal(cv) - c.cause = call.Args[0] - c.checkPattern(pat) - } -} - -func (c *badRegexpChecker) checkPattern(pat string) { - re, err := c.parser.Parse(pat) - if err != nil { - return - } - - c.flagStates = c.flagStates[:0] - c.goodAnchors = c.goodAnchors[:0] - - // In Go all flags (modifiers) are set to false by default, - // so we start from the empty flag set. - c.flagStates = append(c.flagStates, regexpFlagState{}) - - c.markGoodCarets(re.Expr) - c.walk(re.Expr) -} - -func (c *badRegexpChecker) markGoodCarets(e syntax.Expr) { - canSkip := func(e syntax.Expr) bool { - switch e.Op { - case syntax.OpFlagOnlyGroup: - return true - case syntax.OpGroup: - x := e.Args[0] - return x.Op == syntax.OpConcat && len(x.Args) == 0 - } - return false - } - - if e.Op == syntax.OpConcat && len(e.Args) > 1 { - i := 0 - for i < len(e.Args) && canSkip(e.Args[i]) { - i++ - } - if i < len(e.Args) { - c.markGoodCarets(e.Args[i]) - } - return - } - if e.Op == syntax.OpCaret { - c.addGoodAnchor(e.Pos) - } - for _, a := range e.Args { - c.markGoodCarets(a) - } -} - -func (c *badRegexpChecker) walk(e syntax.Expr) { - switch e.Op { - case syntax.OpAlt: - c.checkAltAnchor(e) - c.checkAltDups(e) - for _, a := range e.Args { - c.walk(a) - } - - case syntax.OpCharClass, syntax.OpNegCharClass: - if c.checkCharClassRanges(e) { - c.checkCharClassDups(e) - } - - case syntax.OpStar, syntax.OpPlus: - c.checkNestedQuantifier(e) - c.walk(e.Args[0]) - - case syntax.OpFlagOnlyGroup: - c.updateFlagState(c.currentFlagState(), e, e.Args[0].Value) - case syntax.OpGroupWithFlags: - // Creates a new context using the current context copy. - // New flags are evaluated inside a new context. - // After nested expressions are processed, previous context is restored. - nflags := len(c.flagStates) - c.flagStates = append(c.flagStates, *c.currentFlagState()) - c.updateFlagState(c.currentFlagState(), e, e.Args[1].Value) - c.walk(e.Args[0]) - c.flagStates = c.flagStates[:nflags] - case syntax.OpGroup, syntax.OpCapture, syntax.OpNamedCapture: - // Like with OpGroupWithFlags, but doesn't evaluate any new flags. - nflags := len(c.flagStates) - c.flagStates = append(c.flagStates, *c.currentFlagState()) - c.walk(e.Args[0]) - c.flagStates = c.flagStates[:nflags] - - case syntax.OpCaret: - if !c.isGoodAnchor(e) { - c.warn("dangling or redundant ^, maybe \\^ is intended?") - } - - default: - for _, a := range e.Args { - c.walk(a) - } - } -} - -func (c *badRegexpChecker) currentFlagState() *regexpFlagState { - return &c.flagStates[len(c.flagStates)-1] -} - -func (c *badRegexpChecker) updateFlagState(state *regexpFlagState, e syntax.Expr, flagString string) { - clearing := false - for i := 0; i < len(flagString); i++ { - ch := flagString[i] - if ch == '-' { - clearing = true - continue - } - if int(ch) >= len(state) { - continue // Should never happen in practice, but we don't want a panic - } - - if clearing { - if !state[ch] { - c.warn("clearing unset flag %c in %s", ch, e.Value) - } - } else { - if state[ch] { - c.warn("redundant flag %c in %s", ch, e.Value) - } - } - state[ch] = !clearing - } -} - -func (c *badRegexpChecker) checkNestedQuantifier(e syntax.Expr) { - x := e.Args[0] - switch x.Op { - case syntax.OpGroup, syntax.OpCapture, syntax.OpGroupWithFlags: - if len(e.Args) == 1 { - x = x.Args[0] - } - } - - switch x.Op { - case syntax.OpPlus, syntax.OpStar: - c.warn("repeated greedy quantifier in %s", e.Value) - } -} - -func (c *badRegexpChecker) checkAltDups(alt syntax.Expr) { - // Seek duplicated alternation expressions. - - set := make(map[string]struct{}, len(alt.Args)) - for _, a := range alt.Args { - if _, ok := set[a.Value]; ok { - c.warn("`%s` is duplicated in %s", a.Value, alt.Value) - } - set[a.Value] = struct{}{} - } -} - -func (c *badRegexpChecker) isCharOrLit(e syntax.Expr) bool { - return e.Op == syntax.OpChar || e.Op == syntax.OpLiteral -} - -func (c *badRegexpChecker) checkAltAnchor(alt syntax.Expr) { - // Seek suspicious anchors. - - // Case 1: an alternation of literals where 1st expr begins with ^ anchor. - first := alt.Args[0] - if first.Op == syntax.OpConcat && len(first.Args) == 2 && first.Args[0].Op == syntax.OpCaret && c.isCharOrLit(first.Args[1]) { - matched := true - for _, a := range alt.Args[1:] { - if !c.isCharOrLit(a) { - matched = false - break - } - } - if matched { - c.warn("^ applied only to `%s` in %s", first.Value[len(`^`):], alt.Value) - } - } - - // Case 2: an alternation of literals where last expr ends with $ anchor. - last := alt.Args[len(alt.Args)-1] - if last.Op == syntax.OpConcat && len(last.Args) == 2 && last.Args[1].Op == syntax.OpDollar && c.isCharOrLit(last.Args[0]) { - matched := true - for _, a := range alt.Args[:len(alt.Args)-1] { - if !c.isCharOrLit(a) { - matched = false - break - } - } - if matched { - c.warn("$ applied only to `%s` in %s", last.Value[:len(last.Value)-len(`$`)], alt.Value) - } - } -} - -func (c *badRegexpChecker) checkCharClassRanges(cc syntax.Expr) bool { - // Seek for suspicious ranges like `!-_`. - // - // We permit numerical ranges (0-9, hex and octal literals) - // and simple ascii letter ranges. - - for _, e := range cc.Args { - if e.Op != syntax.OpCharRange { - continue - } - switch e.Args[0].Op { - case syntax.OpEscapeOctal, syntax.OpEscapeHex: - continue - } - ch := c.charClassBoundRune(e.Args[0]) - if ch == 0 { - return false - } - good := unicode.IsLetter(ch) || (ch >= '0' && ch <= '9') - if !good { - c.warnSloppyCharRange(e.Value, cc.Value) - } - } - - return true -} - -func (c *badRegexpChecker) checkCharClassDups(cc syntax.Expr) { - // Seek for excessive elements inside a character class. - // Report them as intersections. - - if len(cc.Args) == 1 { - return // Can't had duplicates. - } - - type charRange struct { - low rune - high rune - source string - } - ranges := make([]charRange, 0, 8) - addRange := func(source string, low, high rune) { - ranges = append(ranges, charRange{source: source, low: low, high: high}) - } - addRange1 := func(source string, ch rune) { - addRange(source, ch, ch) - } - - // 1. Collect ranges, O(n). - for _, e := range cc.Args { - switch e.Op { - case syntax.OpEscapeOctal: - addRange1(e.Value, c.octalToRune(e)) - case syntax.OpEscapeHex: - addRange1(e.Value, c.hexToRune(e)) - case syntax.OpChar: - addRange1(e.Value, c.stringToRune(e.Value)) - case syntax.OpCharRange: - addRange(e.Value, c.charClassBoundRune(e.Args[0]), c.charClassBoundRune(e.Args[1])) - case syntax.OpEscapeMeta: - addRange1(e.Value, rune(e.Value[1])) - case syntax.OpEscapeChar: - ch := c.stringToRune(e.Value[len(`\`):]) - if unicode.IsPunct(ch) { - addRange1(e.Value, ch) - break - } - switch e.Value { - case `\|`, `\<`, `\>`, `\+`, `\=`: // How to cover all symbols? - addRange1(e.Value, c.stringToRune(e.Value[len(`\`):])) - case `\t`: - addRange1(e.Value, '\t') - case `\n`: - addRange1(e.Value, '\n') - case `\r`: - addRange1(e.Value, '\r') - case `\v`: - addRange1(e.Value, '\v') - case `\d`: - addRange(e.Value, '0', '9') - case `\D`: - addRange(e.Value, 0, '0'-1) - addRange(e.Value, '9'+1, utf8.MaxRune) - case `\s`: - addRange(e.Value, '\t', '\n') // 9-10 - addRange(e.Value, '\f', '\r') // 12-13 - addRange1(e.Value, ' ') // 32 - case `\S`: - addRange(e.Value, 0, '\t'-1) - addRange(e.Value, '\n'+1, '\f'-1) - addRange(e.Value, '\r'+1, ' '-1) - addRange(e.Value, ' '+1, utf8.MaxRune) - case `\w`: - addRange(e.Value, '0', '9') // 48-57 - addRange(e.Value, 'A', 'Z') // 65-90 - addRange1(e.Value, '_') // 95 - addRange(e.Value, 'a', 'z') // 97-122 - case `\W`: - addRange(e.Value, 0, '0'-1) - addRange(e.Value, '9'+1, 'A'-1) - addRange(e.Value, 'Z'+1, '_'-1) - addRange(e.Value, '_'+1, 'a'-1) - addRange(e.Value, 'z'+1, utf8.MaxRune) - default: - // Give up: unknown escape sequence. - return - } - default: - // Give up: unexpected operation inside char class. - return - } - } - - // 2. Sort ranges, O(nlogn). - sort.SliceStable(ranges, func(i, j int) bool { - return ranges[i].low < ranges[j].low - }) - - // 3. Search for duplicates, O(n). - for i := 0; i < len(ranges)-1; i++ { - x := ranges[i+0] - y := ranges[i+1] - if x.high >= y.low { - c.warnCharClassDup(x.source, y.source, cc.Value) - break - } - } -} - -func (c *badRegexpChecker) charClassBoundRune(e syntax.Expr) rune { - switch e.Op { - case syntax.OpChar: - return c.stringToRune(e.Value) - case syntax.OpEscapeHex: - return c.hexToRune(e) - case syntax.OpEscapeOctal: - return c.octalToRune(e) - default: - return 0 - } -} - -func (c *badRegexpChecker) octalToRune(e syntax.Expr) rune { - v, _ := strconv.ParseInt(e.Value[len(`\`):], 8, 32) - return rune(v) -} - -func (c *badRegexpChecker) hexToRune(e syntax.Expr) rune { - var s string - switch e.Form { - case syntax.FormEscapeHexFull: - s = e.Value[len(`\x{`) : len(e.Value)-len(`}`)] - default: - s = e.Value[len(`\x`):] - } - v, _ := strconv.ParseInt(s, 16, 32) - return rune(v) -} - -func (c *badRegexpChecker) stringToRune(s string) rune { - ch, _ := utf8.DecodeRuneInString(s) - return ch -} - -func (c *badRegexpChecker) addGoodAnchor(pos syntax.Position) { - c.goodAnchors = append(c.goodAnchors, pos) -} - -func (c *badRegexpChecker) isGoodAnchor(e syntax.Expr) bool { - for _, pos := range c.goodAnchors { - if e.Pos == pos { - return true - } - } - return false -} - -func (c *badRegexpChecker) warn(format string, args ...interface{}) { - c.ctx.Warn(c.cause, format, args...) -} - -func (c *badRegexpChecker) warnSloppyCharRange(rng, charClass string) { - c.ctx.Warn(c.cause, "suspicious char range `%s` in %s", rng, charClass) -} - -func (c *badRegexpChecker) warnCharClassDup(x, y, charClass string) { - if x == y { - c.ctx.Warn(c.cause, "`%s` is duplicated in %s", x, charClass) - } else { - c.ctx.Warn(c.cause, "`%s` intersects with `%s` in %s", x, y, charClass) - } -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/boolExprSimplify_checker.go b/vendor/github.com/go-critic/go-critic/checkers/boolExprSimplify_checker.go deleted file mode 100644 index a1c69cb7a..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/boolExprSimplify_checker.go +++ /dev/null @@ -1,346 +0,0 @@ -package checkers - -import ( - "go/ast" - "go/token" - "strconv" - - "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/checkers/internal/lintutil" - "github.com/go-critic/go-critic/linter" - - "github.com/go-toolsmith/astcast" - "github.com/go-toolsmith/astcopy" - "github.com/go-toolsmith/astequal" - "github.com/go-toolsmith/astp" - "github.com/go-toolsmith/typep" - "golang.org/x/tools/go/ast/astutil" -) - -func init() { - var info linter.CheckerInfo - info.Name = "boolExprSimplify" - info.Tags = []string{linter.StyleTag, linter.ExperimentalTag} - info.Summary = "Detects bool expressions that can be simplified" - info.Before = ` -a := !(elapsed >= expectElapsedMin) -b := !(x) == !(y)` - info.After = ` -a := elapsed < expectElapsedMin -b := (x) == (y)` - - collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { - return astwalk.WalkerForExpr(&boolExprSimplifyChecker{ctx: ctx}), nil - }) -} - -type boolExprSimplifyChecker struct { - astwalk.WalkHandler - ctx *linter.CheckerContext - hasFloats bool -} - -func (c *boolExprSimplifyChecker) VisitExpr(x ast.Expr) { - if !astp.IsBinaryExpr(x) && !astp.IsUnaryExpr(x) { - return - } - - // Throw away non-bool expressions and avoid redundant - // AST copying below. - if typ := c.ctx.TypeOf(x); typ == nil || !typep.HasBoolKind(typ.Underlying()) { - return - } - - // We'll loose all types info after a copy, - // this is why we record valuable info before doing it. - c.hasFloats = lintutil.ContainsNode(x, func(n ast.Node) bool { - if x, ok := n.(*ast.BinaryExpr); ok { - return typep.HasFloatProp(c.ctx.TypeOf(x.X).Underlying()) || - typep.HasFloatProp(c.ctx.TypeOf(x.Y).Underlying()) - } - return false - }) - - y := c.simplifyBool(astcopy.Expr(x)) - if !astequal.Expr(x, y) { - c.warn(x, y) - } -} - -func (c *boolExprSimplifyChecker) simplifyBool(x ast.Expr) ast.Expr { - return astutil.Apply(x, nil, func(cur *astutil.Cursor) bool { - return c.doubleNegation(cur) || - c.negatedEquals(cur) || - c.invertComparison(cur) || - c.combineChecks(cur) || - c.removeIncDec(cur) || - c.foldRanges(cur) || - true - }).(ast.Expr) -} - -func (c *boolExprSimplifyChecker) doubleNegation(cur *astutil.Cursor) bool { - neg1 := astcast.ToUnaryExpr(cur.Node()) - neg2 := astcast.ToUnaryExpr(astutil.Unparen(neg1.X)) - if neg1.Op == token.NOT && neg2.Op == token.NOT { - cur.Replace(astutil.Unparen(neg2.X)) - return true - } - return false -} - -func (c *boolExprSimplifyChecker) negatedEquals(cur *astutil.Cursor) bool { - x, ok := cur.Node().(*ast.BinaryExpr) - if !ok || x.Op != token.EQL { - return false - } - neg1 := astcast.ToUnaryExpr(x.X) - neg2 := astcast.ToUnaryExpr(x.Y) - if neg1.Op == token.NOT && neg2.Op == token.NOT { - x.X = neg1.X - x.Y = neg2.X - return true - } - return false -} - -func (c *boolExprSimplifyChecker) invertComparison(cur *astutil.Cursor) bool { - if c.hasFloats { // See #673 - return false - } - - neg := astcast.ToUnaryExpr(cur.Node()) - cmp := astcast.ToBinaryExpr(astutil.Unparen(neg.X)) - if neg.Op != token.NOT { - return false - } - - // Replace operator to its negated form. - switch cmp.Op { - case token.EQL: - cmp.Op = token.NEQ - case token.NEQ: - cmp.Op = token.EQL - case token.LSS: - cmp.Op = token.GEQ - case token.GTR: - cmp.Op = token.LEQ - case token.LEQ: - cmp.Op = token.GTR - case token.GEQ: - cmp.Op = token.LSS - - default: - return false - } - cur.Replace(cmp) - return true -} - -func (c *boolExprSimplifyChecker) isSafe(x ast.Expr) bool { - return typep.SideEffectFree(c.ctx.TypesInfo, x) -} - -func (c *boolExprSimplifyChecker) combineChecks(cur *astutil.Cursor) bool { - or, ok := cur.Node().(*ast.BinaryExpr) - if !ok || or.Op != token.LOR { - return false - } - - lhs := astcast.ToBinaryExpr(astutil.Unparen(or.X)) - rhs := astcast.ToBinaryExpr(astutil.Unparen(or.Y)) - - if !astequal.Expr(lhs.X, rhs.X) || !astequal.Expr(lhs.Y, rhs.Y) { - return false - } - if !c.isSafe(lhs.X) || !c.isSafe(lhs.Y) { - return false - } - - combTable := [...]struct { - x token.Token - y token.Token - result token.Token - }{ - {token.GTR, token.EQL, token.GEQ}, - {token.EQL, token.GTR, token.GEQ}, - {token.LSS, token.EQL, token.LEQ}, - {token.EQL, token.LSS, token.LEQ}, - } - for _, comb := range &combTable { - if comb.x == lhs.Op && comb.y == rhs.Op { - lhs.Op = comb.result - cur.Replace(lhs) - return true - } - } - return false -} - -func (c *boolExprSimplifyChecker) removeIncDec(cur *astutil.Cursor) bool { - cmp := astcast.ToBinaryExpr(cur.Node()) - - matchOneWay := func(op token.Token, x, y *ast.BinaryExpr) bool { - if x.Op != op || astcast.ToBasicLit(x.Y).Value != "1" { - return false - } - if y.Op == op && astcast.ToBasicLit(y.Y).Value == "1" { - return false - } - return true - } - replace := func(lhsOp, rhsOp, replacement token.Token) bool { - lhs := astcast.ToBinaryExpr(cmp.X) - rhs := astcast.ToBinaryExpr(cmp.Y) - switch { - case matchOneWay(lhsOp, lhs, rhs): - cmp.X = lhs.X - cmp.Op = replacement - cur.Replace(cmp) - return true - case matchOneWay(rhsOp, rhs, lhs): - cmp.Y = rhs.X - cmp.Op = replacement - cur.Replace(cmp) - return true - default: - return false - } - } - - switch cmp.Op { - case token.GTR: - // `x > y-1` => `x >= y` - // `x+1 > y` => `x >= y` - return replace(token.ADD, token.SUB, token.GEQ) - - case token.GEQ: - // `x >= y+1` => `x > y` - // `x-1 >= y` => `x > y` - return replace(token.SUB, token.ADD, token.GTR) - - case token.LSS: - // `x < y+1` => `x <= y` - // `x-1 < y` => `x <= y` - return replace(token.SUB, token.ADD, token.LEQ) - - case token.LEQ: - // `x <= y-1` => `x < y` - // `x+1 <= y` => `x < y` - return replace(token.ADD, token.SUB, token.LSS) - - default: - return false - } -} - -func (c *boolExprSimplifyChecker) foldRanges(cur *astutil.Cursor) bool { - if c.hasFloats { // See #848 - return false - } - - e, ok := cur.Node().(*ast.BinaryExpr) - if !ok { - return false - } - lhs := astcast.ToBinaryExpr(e.X) - rhs := astcast.ToBinaryExpr(e.Y) - if !c.isSafe(lhs.X) || !c.isSafe(rhs.X) { - return false - } - if !astequal.Expr(lhs.X, rhs.X) { - return false - } - - c1, ok := c.int64val(lhs.Y) - if !ok { - return false - } - c2, ok := c.int64val(rhs.Y) - if !ok { - return false - } - - type combination struct { - lhsOp token.Token - rhsOp token.Token - rhsDiff int64 - resDelta int64 - } - match := func(comb *combination) bool { - if lhs.Op != comb.lhsOp || rhs.Op != comb.rhsOp { - return false - } - if c2-c1 != comb.rhsDiff { - return false - } - return true - } - - switch e.Op { - case token.LAND: - combTable := [...]combination{ - // `x > c && x < c+2` => `x == c+1` - {token.GTR, token.LSS, 2, 1}, - // `x >= c && x < c+1` => `x == c` - {token.GEQ, token.LSS, 1, 0}, - // `x > c && x <= c+1` => `x == c+1` - {token.GTR, token.LEQ, 1, 1}, - // `x >= c && x <= c` => `x == c` - {token.GEQ, token.LEQ, 0, 0}, - } - for i := range combTable { - comb := combTable[i] - if match(&comb) { - lhs.Op = token.EQL - v := c1 + comb.resDelta - lhs.Y.(*ast.BasicLit).Value = strconv.FormatInt(v, 10) - cur.Replace(lhs) - return true - } - } - - case token.LOR: - combTable := [...]combination{ - // `x < c || x > c` => `x != c` - {token.LSS, token.GTR, 0, 0}, - // `x <= c || x > c+1` => `x != c+1` - {token.LEQ, token.GTR, 1, 1}, - // `x < c || x >= c+1` => `x != c` - {token.LSS, token.GEQ, 1, 0}, - // `x <= c || x >= c+2` => `x != c+1` - {token.LEQ, token.GEQ, 2, 1}, - } - for i := range combTable { - comb := combTable[i] - if match(&comb) { - lhs.Op = token.NEQ - v := c1 + comb.resDelta - lhs.Y.(*ast.BasicLit).Value = strconv.FormatInt(v, 10) - cur.Replace(lhs) - return true - } - } - } - - return false -} - -func (c *boolExprSimplifyChecker) int64val(x ast.Expr) (int64, bool) { - // TODO(quasilyte): if we had types info, we could use TypesInfo.Types[x].Value, - // but since copying erases leaves us without it, only basic literals are handled. - lit, ok := x.(*ast.BasicLit) - if !ok { - return 0, false - } - v, err := strconv.ParseInt(lit.Value, 10, 64) - if err != nil { - return 0, false - } - return v, true -} - -func (c *boolExprSimplifyChecker) warn(cause, suggestion ast.Expr) { - c.SkipChilds = true - c.ctx.Warn(cause, "can simplify `%s` to `%s`", cause, suggestion) -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/builtinShadowDecl_checker.go b/vendor/github.com/go-critic/go-critic/checkers/builtinShadowDecl_checker.go deleted file mode 100644 index d8be10ce9..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/builtinShadowDecl_checker.go +++ /dev/null @@ -1,63 +0,0 @@ -package checkers - -import ( - "go/ast" - - "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/linter" -) - -func init() { - var info linter.CheckerInfo - info.Name = "builtinShadowDecl" - info.Tags = []string{linter.DiagnosticTag, linter.ExperimentalTag} - info.Summary = "Detects top-level declarations that shadow the predeclared identifiers" - info.Before = `type int struct {}` - info.After = `type myInt struct {}` - - collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { - return &builtinShadowDeclChecker{ctx: ctx}, nil - }) -} - -type builtinShadowDeclChecker struct { - astwalk.WalkHandler - ctx *linter.CheckerContext -} - -func (c *builtinShadowDeclChecker) WalkFile(f *ast.File) { - for _, decl := range f.Decls { - switch decl := decl.(type) { - case *ast.FuncDecl: - // Don't check methods. They can shadow anything safely. - if decl.Recv == nil { - c.checkName(decl.Name) - } - case *ast.GenDecl: - c.visitGenDecl(decl) - } - } -} - -func (c *builtinShadowDeclChecker) visitGenDecl(decl *ast.GenDecl) { - for _, spec := range decl.Specs { - switch spec := spec.(type) { - case *ast.ValueSpec: - for _, name := range spec.Names { - c.checkName(name) - } - case *ast.TypeSpec: - c.checkName(spec.Name) - } - } -} - -func (c *builtinShadowDeclChecker) checkName(name *ast.Ident) { - if isBuiltin(name.Name) { - c.warn(name) - } -} - -func (c *builtinShadowDeclChecker) warn(ident *ast.Ident) { - c.ctx.Warn(ident, "shadowing of predeclared identifier: %s", ident) -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/builtinShadow_checker.go b/vendor/github.com/go-critic/go-critic/checkers/builtinShadow_checker.go deleted file mode 100644 index 0b4b7bafb..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/builtinShadow_checker.go +++ /dev/null @@ -1,36 +0,0 @@ -package checkers - -import ( - "go/ast" - - "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/linter" -) - -func init() { - var info linter.CheckerInfo - info.Name = "builtinShadow" - info.Tags = []string{linter.StyleTag, linter.OpinionatedTag} - info.Summary = "Detects when predeclared identifiers are shadowed in assignments" - info.Before = `len := 10` - info.After = `length := 10` - - collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { - return astwalk.WalkerForLocalDef(&builtinShadowChecker{ctx: ctx}, ctx.TypesInfo), nil - }) -} - -type builtinShadowChecker struct { - astwalk.WalkHandler - ctx *linter.CheckerContext -} - -func (c *builtinShadowChecker) VisitLocalDef(name astwalk.Name, _ ast.Expr) { - if isBuiltin(name.ID.Name) { - c.warn(name.ID) - } -} - -func (c *builtinShadowChecker) warn(ident *ast.Ident) { - c.ctx.Warn(ident, "shadowing of predeclared identifier: %s", ident) -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/captLocal_checker.go b/vendor/github.com/go-critic/go-critic/checkers/captLocal_checker.go deleted file mode 100644 index b31a6f7fd..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/captLocal_checker.go +++ /dev/null @@ -1,49 +0,0 @@ -package checkers - -import ( - "go/ast" - - "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/linter" -) - -func init() { - var info linter.CheckerInfo - info.Name = "captLocal" - info.Tags = []string{linter.StyleTag} - info.Params = linter.CheckerParams{ - "paramsOnly": { - Value: true, - Usage: "whether to restrict checker to params only", - }, - } - info.Summary = "Detects capitalized names for local variables" - info.Before = `func f(IN int, OUT *int) (ERR error) {}` - info.After = `func f(in int, out *int) (err error) {}` - - collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { - c := &captLocalChecker{ctx: ctx} - c.paramsOnly = info.Params.Bool("paramsOnly") - return astwalk.WalkerForLocalDef(c, ctx.TypesInfo), nil - }) -} - -type captLocalChecker struct { - astwalk.WalkHandler - ctx *linter.CheckerContext - - paramsOnly bool -} - -func (c *captLocalChecker) VisitLocalDef(def astwalk.Name, _ ast.Expr) { - if c.paramsOnly && def.Kind != astwalk.NameParam { - return - } - if ast.IsExported(def.ID.Name) { - c.warn(def.ID) - } -} - -func (c *captLocalChecker) warn(id ast.Node) { - c.ctx.Warn(id, "`%s' should not be capitalized", id) -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/caseOrder_checker.go b/vendor/github.com/go-critic/go-critic/checkers/caseOrder_checker.go deleted file mode 100644 index 306756834..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/caseOrder_checker.go +++ /dev/null @@ -1,88 +0,0 @@ -package checkers - -import ( - "go/ast" - "go/types" - - "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/linter" -) - -func init() { - var info linter.CheckerInfo - info.Name = "caseOrder" - info.Tags = []string{linter.DiagnosticTag} - info.Summary = "Detects erroneous case order inside switch statements" - info.Before = ` -switch x.(type) { -case ast.Expr: - fmt.Println("expr") -case *ast.BasicLit: - fmt.Println("basic lit") // Never executed -}` - info.After = ` -switch x.(type) { -case *ast.BasicLit: - fmt.Println("basic lit") // Now reachable -case ast.Expr: - fmt.Println("expr") -}` - - collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { - return astwalk.WalkerForStmt(&caseOrderChecker{ctx: ctx}), nil - }) -} - -type caseOrderChecker struct { - astwalk.WalkHandler - ctx *linter.CheckerContext -} - -func (c *caseOrderChecker) VisitStmt(stmt ast.Stmt) { - switch stmt := stmt.(type) { - case *ast.TypeSwitchStmt: - c.checkTypeSwitch(stmt) - case *ast.SwitchStmt: - c.checkSwitch(stmt) - } -} - -func (c *caseOrderChecker) checkTypeSwitch(s *ast.TypeSwitchStmt) { - type ifaceType struct { - node ast.Node - typ *types.Interface - } - var ifaces []ifaceType // Interfaces seen so far - for _, cc := range s.Body.List { - cc := cc.(*ast.CaseClause) - for _, x := range cc.List { - typ := c.ctx.TypeOf(x) - if typ == linter.UnknownType { - c.warnUnknownType(cc, x) - return - } - for _, iface := range ifaces { - if types.Implements(typ, iface.typ) { - c.warnTypeSwitch(cc, x, iface.node) - break - } - } - if iface, ok := typ.Underlying().(*types.Interface); ok { - ifaces = append(ifaces, ifaceType{node: x, typ: iface}) - } - } - } -} - -func (c *caseOrderChecker) warnTypeSwitch(cause, concrete, iface ast.Node) { - c.ctx.Warn(cause, "case %s must go before the %s case", concrete, iface) -} - -func (c *caseOrderChecker) warnUnknownType(cause, concrete ast.Node) { - c.ctx.Warn(cause, "type is not defined %s", concrete) -} - -func (c *caseOrderChecker) checkSwitch(s *ast.SwitchStmt) { - // TODO(quasilyte): can handle expression cases that overlap. - // Cases that have narrower value range should go before wider ones. -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/checkers.go b/vendor/github.com/go-critic/go-critic/checkers/checkers.go deleted file mode 100644 index 5797dafdf..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/checkers.go +++ /dev/null @@ -1,19 +0,0 @@ -// Package checkers is a gocritic linter main checkers collection. -package checkers - -import ( - "os" - - "github.com/go-critic/go-critic/linter" -) - -var collection = &linter.CheckerCollection{ - URL: "https://github.com/go-critic/go-critic/checkers", -} - -var debug = func() func() bool { - v := os.Getenv("DEBUG") != "" - return func() bool { - return v - } -}() diff --git a/vendor/github.com/go-critic/go-critic/checkers/codegenComment_checker.go b/vendor/github.com/go-critic/go-critic/checkers/codegenComment_checker.go deleted file mode 100644 index 6eeb0bb5d..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/codegenComment_checker.go +++ /dev/null @@ -1,61 +0,0 @@ -package checkers - -import ( - "go/ast" - "regexp" - "strings" - - "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/linter" -) - -func init() { - var info linter.CheckerInfo - info.Name = "codegenComment" - info.Tags = []string{linter.DiagnosticTag} - info.Summary = "Detects malformed 'code generated' file comments" - info.Before = `// This file was automatically generated by foogen` - info.After = `// Code generated by foogen. DO NOT EDIT.` - - collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { - patterns := []string{ - "this (?:file|code) (?:was|is) auto(?:matically)? generated", - "this (?:file|code) (?:was|is) generated automatically", - "this (?:file|code) (?:was|is) generated by", - "this (?:file|code) (?:was|is) (?:auto(?:matically)? )?generated", - "this (?:file|code) (?:was|is) generated", - "code in this file (?:was|is) auto(?:matically)? generated", - "generated (?:file|code) - do not edit", - // TODO(quasilyte): more of these. - } - re := regexp.MustCompile("(?i)" + strings.Join(patterns, "|")) - return &codegenCommentChecker{ - ctx: ctx, - badCommentRE: re, - }, nil - }) -} - -type codegenCommentChecker struct { - astwalk.WalkHandler - ctx *linter.CheckerContext - - badCommentRE *regexp.Regexp -} - -func (c *codegenCommentChecker) WalkFile(f *ast.File) { - if f.Doc == nil { - return - } - - for _, comment := range f.Doc.List { - if c.badCommentRE.MatchString(comment.Text) { - c.warn(comment) - return - } - } -} - -func (c *codegenCommentChecker) warn(cause ast.Node) { - c.ctx.Warn(cause, "comment should match `Code generated .* DO NOT EDIT.` regexp") -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/commentFormatting_checker.go b/vendor/github.com/go-critic/go-critic/checkers/commentFormatting_checker.go deleted file mode 100644 index b834158ec..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/commentFormatting_checker.go +++ /dev/null @@ -1,123 +0,0 @@ -package checkers - -import ( - "go/ast" - "regexp" - "strings" - "unicode" - "unicode/utf8" - - "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/linter" -) - -func init() { - var info linter.CheckerInfo - info.Name = "commentFormatting" - info.Tags = []string{linter.StyleTag} - info.Summary = "Detects comments with non-idiomatic formatting" - info.Before = `//This is a comment` - info.After = `// This is a comment` - - collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { - regexpPatterns := []*regexp.Regexp{ - regexp.MustCompile(`^//[\w-]+:.*$`), // e.g.: key: value - } - equalPatterns := []string{ - "//nolint", - } - parts := []string{ - "//go:generate ", // e.g.: go:generate value - "//line /", // e.g.: line /path/to/file:123 - "//nolint ", // e.g.: nolint - "//noinspection ", // e.g.: noinspection ALL, some GoLand and friends versions - "//region", // e.g.: region awawa, used by GoLand and friends for custom folding - "//endregion", // e.g.: endregion awawa or endregion, closes GoLand regions - "// or , used by VSCode for custom folding - "//", // e.g.: , closes VSCode regions - "//export ", // e.g.: export Foo - "///", // e.g.: vertical breaker ///////////// - "//+", - "//#", - "//-", - "//!", - } - - return astwalk.WalkerForComment(&commentFormattingChecker{ - ctx: ctx, - partPatterns: parts, - equalPatterns: equalPatterns, - regexpPatterns: regexpPatterns, - }), nil - }) -} - -type commentFormattingChecker struct { - astwalk.WalkHandler - ctx *linter.CheckerContext - - partPatterns []string - equalPatterns []string - regexpPatterns []*regexp.Regexp -} - -func (c *commentFormattingChecker) VisitComment(cg *ast.CommentGroup) { - if strings.HasPrefix(cg.List[0].Text, "/*") { - return - } - -outerLoop: - for _, comment := range cg.List { - commentLen := len(comment.Text) - if commentLen <= len("// ") { - continue - } - - for _, p := range c.partPatterns { - if commentLen < len(p) { - continue - } - - if strings.EqualFold(comment.Text[:len(p)], p) { - continue outerLoop - } - } - - for _, p := range c.equalPatterns { - if strings.EqualFold(comment.Text, p) { - continue outerLoop - } - } - - for _, p := range c.regexpPatterns { - if p.MatchString(comment.Text) { - continue outerLoop - } - } - - // Make a decision based on a first comment text rune. - r, _ := utf8.DecodeRuneInString(comment.Text[len("//"):]) - if !c.specialChar(r) && !unicode.IsSpace(r) { - c.warn(comment) - return - } - } -} - -func (c *commentFormattingChecker) specialChar(r rune) bool { - // Permitted list to avoid false-positives. - switch r { - case '+', '-', '#', '!': - return true - default: - return false - } -} - -func (c *commentFormattingChecker) warn(comment *ast.Comment) { - c.ctx.WarnFixable(comment, linter.QuickFix{ - From: comment.Pos(), - To: comment.End(), - Replacement: []byte(strings.Replace(comment.Text, "//", "// ", 1)), - }, "put a space between `//` and comment text") -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/commentedOutCode_checker.go b/vendor/github.com/go-critic/go-critic/checkers/commentedOutCode_checker.go deleted file mode 100644 index 8595b7951..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/commentedOutCode_checker.go +++ /dev/null @@ -1,167 +0,0 @@ -package checkers - -import ( - "fmt" - "go/ast" - "go/token" - "regexp" - "strings" - "unicode/utf8" - - "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/linter" - - "github.com/go-toolsmith/strparse" -) - -func init() { - var info linter.CheckerInfo - info.Name = "commentedOutCode" - info.Tags = []string{linter.DiagnosticTag, linter.ExperimentalTag} - info.Summary = "Detects commented-out code inside function bodies" - info.Params = linter.CheckerParams{ - "minLength": { - Value: 15, - Usage: "min length of the comment that triggers a warning", - }, - } - info.Before = ` -// fmt.Println("Debugging hard") -foo(1, 2)` - info.After = `foo(1, 2)` - - collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { - return astwalk.WalkerForLocalComment(&commentedOutCodeChecker{ - ctx: ctx, - notQuiteFuncCall: regexp.MustCompile(`\w+\s+\([^)]*\)\s*$`), - minLength: info.Params.Int("minLength"), - }), nil - }) -} - -type commentedOutCodeChecker struct { - astwalk.WalkHandler - ctx *linter.CheckerContext - fn *ast.FuncDecl - - notQuiteFuncCall *regexp.Regexp - minLength int -} - -func (c *commentedOutCodeChecker) EnterFunc(fn *ast.FuncDecl) bool { - c.fn = fn // Need to store current function inside checker context - return fn.Body != nil -} - -func (c *commentedOutCodeChecker) VisitLocalComment(cg *ast.CommentGroup) { - s := cg.Text() // Collect text once - - // We do multiple heuristics to avoid false positives. - // Many things can be improved here. - - markers := []string{ - "TODO", // TODO comments with code are permitted. - - // "http://" is interpreted as a label with comment. - // There are other protocols we might want to include. - "http://", - "https://", - - "e.g. ", // Clearly not a "selector expr" (mostly due to extra space) - } - for _, m := range markers { - if strings.Contains(s, m) { - return - } - } - - // Some very short comment that can be skipped. - // Usually triggering on these results in false positive. - // Unless there is a very popular call like print/println. - cond := utf8.RuneCountInString(s) < c.minLength && - !strings.Contains(s, "print") && - !strings.Contains(s, "fmt.") && - !strings.Contains(s, "log.") - if cond { - return - } - - // Almost looks like a commented-out function call, - // but there is a whitespace between function name and - // parameters list. Skip these to avoid false positives. - if c.notQuiteFuncCall.MatchString(s) { - return - } - - stmt := strparse.Stmt(s) - - if c.isPermittedStmt(stmt) { - return - } - - if stmt != strparse.BadStmt { - c.warn(cg) - return - } - - // Don't try to parse one-liner as block statement - if len(cg.List) == 1 && !strings.Contains(s, "\n") { - return - } - - // Some attempts to avoid false positives. - if c.skipBlock(s) { - return - } - - // Add braces to make block statement from - // multiple statements. - stmt = strparse.Stmt(fmt.Sprintf("{ %s }", s)) - - if stmt, ok := stmt.(*ast.BlockStmt); ok && len(stmt.List) != 0 { - c.warn(cg) - } -} - -func (c *commentedOutCodeChecker) skipBlock(s string) bool { - lines := strings.Split(s, "\n") // There is at least 1 line, that's invariant - - // Special example test block. - if isExampleTestFunc(c.fn) && strings.Contains(lines[0], "Output:") { - return true - } - - return false -} - -func (c *commentedOutCodeChecker) isPermittedStmt(stmt ast.Stmt) bool { - switch stmt := stmt.(type) { - case *ast.ExprStmt: - return c.isPermittedExpr(stmt.X) - case *ast.LabeledStmt: - return c.isPermittedStmt(stmt.Stmt) - case *ast.DeclStmt: - decl := stmt.Decl.(*ast.GenDecl) - return decl.Tok == token.TYPE - default: - return false - } -} - -func (c *commentedOutCodeChecker) isPermittedExpr(x ast.Expr) bool { - // Permit anything except expressions that can be used - // with complete result discarding. - switch x := x.(type) { - case *ast.CallExpr: - return false - case *ast.UnaryExpr: - // "<-" channel receive is not permitted. - return x.Op != token.ARROW - default: - return true - } -} - -func (c *commentedOutCodeChecker) warn(cause ast.Node) { - c.ctx.Warn(cause, "may want to remove commented-out code") -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/commentedOutImport_checker.go b/vendor/github.com/go-critic/go-critic/checkers/commentedOutImport_checker.go deleted file mode 100644 index e0855da81..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/commentedOutImport_checker.go +++ /dev/null @@ -1,76 +0,0 @@ -package checkers - -import ( - "go/ast" - "go/token" - "regexp" - - "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/linter" -) - -func init() { - var info linter.CheckerInfo - info.Name = "commentedOutImport" - info.Tags = []string{linter.StyleTag, linter.ExperimentalTag} - info.Summary = "Detects commented-out imports" - info.Before = ` -import ( - "fmt" - //"os" -)` - info.After = ` -import ( - "fmt" -)` - - collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { - const pattern = `(?m)^(?://|/\*)?\s*"([a-zA-Z0-9_/]+)"\s*(?:\*/)?$` - return &commentedOutImportChecker{ - ctx: ctx, - importStringRE: regexp.MustCompile(pattern), - }, nil - }) -} - -type commentedOutImportChecker struct { - astwalk.WalkHandler - ctx *linter.CheckerContext - - importStringRE *regexp.Regexp -} - -func (c *commentedOutImportChecker) WalkFile(f *ast.File) { - // TODO(quasilyte): handle commented-out import spec, - // for example: // import "errors". - - for _, decl := range f.Decls { - decl, ok := decl.(*ast.GenDecl) - if !ok || decl.Tok != token.IMPORT { - // Import decls can only be in the beginning of the file. - // If we've met some other decl, there will be no more - // import decls. - break - } - - // Find comments inside this import decl span. - for _, cg := range f.Comments { - if cg.Pos() > decl.Rparen { - break // Below the decl, stop. - } - if cg.Pos() < decl.Lparen { - continue // Before the decl, skip. - } - - for _, comment := range cg.List { - for _, m := range c.importStringRE.FindAllStringSubmatch(comment.Text, -1) { - c.warn(comment, m[1]) - } - } - } - } -} - -func (c *commentedOutImportChecker) warn(cause ast.Node, path string) { - c.ctx.Warn(cause, "remove commented-out %q import", path) -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/defaultCaseOrder_checker.go b/vendor/github.com/go-critic/go-critic/checkers/defaultCaseOrder_checker.go deleted file mode 100644 index cdebaef98..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/defaultCaseOrder_checker.go +++ /dev/null @@ -1,65 +0,0 @@ -package checkers - -import ( - "go/ast" - - "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/linter" -) - -func init() { - var info linter.CheckerInfo - info.Name = "defaultCaseOrder" - info.Tags = []string{linter.StyleTag} - info.Summary = "Detects when default case in switch isn't on 1st or last position" - info.Before = ` -switch { -case x > y: - // ... -default: // <- not the best position - // ... -case x == 10: - // ... -}` - info.After = ` -switch { -case x > y: - // ... -case x == 10: - // ... -default: // <- last case (could also be the first one) - // ... -}` - - collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { - return astwalk.WalkerForStmt(&defaultCaseOrderChecker{ctx: ctx}), nil - }) -} - -type defaultCaseOrderChecker struct { - astwalk.WalkHandler - ctx *linter.CheckerContext -} - -func (c *defaultCaseOrderChecker) VisitStmt(stmt ast.Stmt) { - swtch, ok := stmt.(*ast.SwitchStmt) - if !ok { - return - } - for i, stmt := range swtch.Body.List { - caseStmt, ok := stmt.(*ast.CaseClause) - if !ok { - continue - } - // is `default` case - if caseStmt.List == nil { - if i != 0 && i != len(swtch.Body.List)-1 { - c.warn(caseStmt) - } - } - } -} - -func (c *defaultCaseOrderChecker) warn(cause *ast.CaseClause) { - c.ctx.Warn(cause, "consider to make `default` case as first or as last case") -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/deferInLoop_checker.go b/vendor/github.com/go-critic/go-critic/checkers/deferInLoop_checker.go deleted file mode 100644 index 37c80c864..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/deferInLoop_checker.go +++ /dev/null @@ -1,70 +0,0 @@ -package checkers - -import ( - "go/ast" - - "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/linter" -) - -func init() { - var info linter.CheckerInfo - info.Name = "deferInLoop" - info.Tags = []string{linter.DiagnosticTag, linter.ExperimentalTag} - info.Summary = "Detects loops inside functions that use defer" - info.Before = ` -for _, filename := range []string{"foo", "bar"} { - f, err := os.Open(filename) - - defer f.Close() -} -` - info.After = ` -func process(filename string) { - f, err := os.Open(filename) - - defer f.Close() -} -/* ... */ -for _, filename := range []string{"foo", "bar"} { - process(filename) -}` - - collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { - return astwalk.WalkerForFuncDecl(&deferInLoopChecker{ctx: ctx}), nil - }) -} - -type deferInLoopChecker struct { - astwalk.WalkHandler - ctx *linter.CheckerContext - inFor bool -} - -func (c *deferInLoopChecker) VisitFuncDecl(fn *ast.FuncDecl) { - ast.Inspect(fn.Body, c.traversalFunc) -} - -func (c deferInLoopChecker) traversalFunc(cur ast.Node) bool { - switch n := cur.(type) { - case *ast.DeferStmt: - if c.inFor { - c.warn(n) - } - case *ast.RangeStmt, *ast.ForStmt: - if !c.inFor { - ast.Inspect(cur, deferInLoopChecker{ctx: c.ctx, inFor: true}.traversalFunc) - return false - } - case *ast.FuncLit: - ast.Inspect(n.Body, deferInLoopChecker{ctx: c.ctx, inFor: false}.traversalFunc) - return false - case nil: - return false - } - return true -} - -func (c *deferInLoopChecker) warn(cause *ast.DeferStmt) { - c.ctx.Warn(cause, "Possible resource leak, 'defer' is called in the 'for' loop") -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/deprecatedComment_checker.go b/vendor/github.com/go-critic/go-critic/checkers/deprecatedComment_checker.go deleted file mode 100644 index c61d773da..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/deprecatedComment_checker.go +++ /dev/null @@ -1,156 +0,0 @@ -package checkers - -import ( - "go/ast" - "strings" - - "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/linter" -) - -func init() { - var info linter.CheckerInfo - info.Name = "deprecatedComment" - info.Tags = []string{linter.DiagnosticTag} - info.Summary = "Detects malformed 'deprecated' doc-comments" - info.Before = ` -// deprecated, use FuncNew instead -func FuncOld() int` - info.After = ` -// Deprecated: use FuncNew instead -func FuncOld() int` - - collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { - c := &deprecatedCommentChecker{ctx: ctx} - - c.commonPatterns = []string{ - "this type is deprecated", - "this function is deprecated", - "[[deprecated]]", - "note: deprecated", - "deprecated in", - "deprecated. use", - "deprecated! use", - "deprecated use", - // TODO(quasilyte): more of these? - } - - // TODO(quasilyte): may want to generate this list programmatically. - // - // TODO(quasilyte): currently it only handles a single missing letter. - // Might want to handle other kinds of common misspell/typo kinds. - c.commonTypos = []string{ - "Dprecated: ", - "Derecated: ", - "Depecated: ", - "Depekated: ", - "Deprcated: ", - "Depreated: ", - "Deprected: ", - "Deprecaed: ", - "Deprecatd: ", - "Deprecate: ", - "Derpecate: ", - "Derpecated: ", - "Depreacted: ", - } - for i := range c.commonTypos { - c.commonTypos[i] = strings.ToUpper(c.commonTypos[i]) - } - - return astwalk.WalkerForDocComment(c), nil - }) -} - -type deprecatedCommentChecker struct { - astwalk.WalkHandler - ctx *linter.CheckerContext - - commonPatterns []string - commonTypos []string -} - -func (c *deprecatedCommentChecker) VisitDocComment(doc *ast.CommentGroup) { - // There are 3 accepted forms of deprecation comments: - // - // 1. inline, that can't be handled with a DocCommentVisitor. - // Note that "Deprecated: " may not even be the comment prefix there. - // Example: "The line number in the input. Deprecated: Kept for compatibility." - // TODO(quasilyte): fix it. - // - // 2. Longer form-1. It's a doc-comment that only contains "deprecation" notice. - // - // 3. Like form-2, but may also include doc-comment text. - // Distinguished by an empty line. - // - // See https://github.com/golang/go/issues/10909#issuecomment-136492606. - // - // It's desirable to see how people make mistakes with the format, - // this is why there is currently no special treatment for these cases. - // TODO(quasilyte): do more audits and grow the negative tests suite. - // - // TODO(quasilyte): there are also multi-line deprecation comments. - - for _, comment := range doc.List { - if strings.HasPrefix(comment.Text, "/*") { - // TODO(quasilyte): handle multi-line doc comments. - continue - } - l := comment.Text[len("//"):] - if len(l) < len("Deprecated: ") { - continue - } - l = strings.TrimSpace(l) - - // Check whether someone messed up with a prefix casing. - upcase := strings.ToUpper(l) - if strings.HasPrefix(upcase, "DEPRECATED: ") && !strings.HasPrefix(l, "Deprecated: ") { - c.warnCasing(comment, l) - return - } - - // Check is someone used comma instead of a colon. - if strings.HasPrefix(l, "Deprecated, ") { - c.warnComma(comment) - return - } - - // Check for other commonly used patterns. - for _, pat := range c.commonPatterns { - if len(l) < len(pat) { - continue - } - - if strings.EqualFold(l[:len(pat)], pat) { - c.warnPattern(comment) - return - } - } - - // Detect some simple typos. - for _, prefixWithTypo := range c.commonTypos { - if strings.HasPrefix(upcase, prefixWithTypo) { - c.warnTypo(comment, l) - return - } - } - } -} - -func (c *deprecatedCommentChecker) warnCasing(cause ast.Node, line string) { - prefix := line[:len("DEPRECATED: ")] - c.ctx.Warn(cause, "use `Deprecated: ` (note the casing) instead of `%s`", prefix) -} - -func (c *deprecatedCommentChecker) warnPattern(cause ast.Node) { - c.ctx.Warn(cause, "the proper format is `Deprecated: `") -} - -func (c *deprecatedCommentChecker) warnComma(cause ast.Node) { - c.ctx.Warn(cause, "use `:` instead of `,` in `Deprecated, `") -} - -func (c *deprecatedCommentChecker) warnTypo(cause ast.Node, line string) { - word := strings.Split(line, ":")[0] - c.ctx.Warn(cause, "typo in `%s`; should be `Deprecated`", word) -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/docStub_checker.go b/vendor/github.com/go-critic/go-critic/checkers/docStub_checker.go deleted file mode 100644 index aa23de42c..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/docStub_checker.go +++ /dev/null @@ -1,95 +0,0 @@ -package checkers - -import ( - "go/ast" - "go/token" - "regexp" - "strings" - - "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/linter" -) - -func init() { - var info linter.CheckerInfo - info.Name = "docStub" - info.Tags = []string{linter.StyleTag, linter.ExperimentalTag} - info.Summary = "Detects comments that silence go lint complaints about doc-comment" - info.Before = ` -// Foo ... -func Foo() { -}` - info.After = ` -// (A) - remove the doc-comment stub -func Foo() {} -// (B) - replace it with meaningful comment -// Foo is a demonstration-only function. -func Foo() {}` - - collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { - re := `(?i)^\.\.\.$|^\.$|^xxx\.?$|^whatever\.?$` - c := &docStubChecker{ - ctx: ctx, - stubCommentRE: regexp.MustCompile(re), - } - return c, nil - }) -} - -type docStubChecker struct { - astwalk.WalkHandler - ctx *linter.CheckerContext - - stubCommentRE *regexp.Regexp -} - -func (c *docStubChecker) WalkFile(f *ast.File) { - for _, decl := range f.Decls { - switch decl := decl.(type) { - case *ast.FuncDecl: - c.visitDoc(decl, decl.Name, decl.Doc, false) - case *ast.GenDecl: - if decl.Tok != token.TYPE { - continue - } - if len(decl.Specs) == 1 { - spec := decl.Specs[0].(*ast.TypeSpec) - // Only 1 spec, use doc from the decl itself. - c.visitDoc(spec, spec.Name, decl.Doc, true) - } - // N specs, use per-spec doc. - for _, spec := range decl.Specs { - spec := spec.(*ast.TypeSpec) - c.visitDoc(spec, spec.Name, spec.Doc, true) - } - } - } -} - -func (c *docStubChecker) visitDoc(decl ast.Node, sym *ast.Ident, doc *ast.CommentGroup, article bool) { - if !sym.IsExported() || doc == nil { - return - } - line := strings.TrimSpace(doc.List[0].Text[len("//"):]) - if article { - // Skip optional article. - for _, a := range []string{"The ", "An ", "A "} { - if strings.HasPrefix(line, a) { - line = line[len(a):] - break - } - } - } - if !strings.HasPrefix(line, sym.Name) { - return - } - line = strings.TrimSpace(line[len(sym.Name):]) - // Now try to detect the "stub" part. - if c.stubCommentRE.MatchString(line) { - c.warn(decl) - } -} - -func (c *docStubChecker) warn(cause ast.Node) { - c.ctx.Warn(cause, "silencing go lint doc-comment warnings is unadvised") -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/dupBranchBody_checker.go b/vendor/github.com/go-critic/go-critic/checkers/dupBranchBody_checker.go deleted file mode 100644 index c4f018387..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/dupBranchBody_checker.go +++ /dev/null @@ -1,59 +0,0 @@ -package checkers - -import ( - "go/ast" - - "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/linter" - - "github.com/go-toolsmith/astequal" -) - -func init() { - var info linter.CheckerInfo - info.Name = "dupBranchBody" - info.Tags = []string{linter.DiagnosticTag} - info.Summary = "Detects duplicated branch bodies inside conditional statements" - info.Before = ` -if cond { - println("cond=true") -} else { - println("cond=true") -}` - info.After = ` -if cond { - println("cond=true") -} else { - println("cond=false") -}` - - collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { - return astwalk.WalkerForStmt(&dupBranchBodyChecker{ctx: ctx}), nil - }) -} - -type dupBranchBodyChecker struct { - astwalk.WalkHandler - ctx *linter.CheckerContext -} - -func (c *dupBranchBodyChecker) VisitStmt(stmt ast.Stmt) { - // TODO(quasilyte): extend to check switch statements as well. - // Should be very careful with type switches. - - if stmt, ok := stmt.(*ast.IfStmt); ok { - c.checkIf(stmt) - } -} - -func (c *dupBranchBodyChecker) checkIf(stmt *ast.IfStmt) { - thenBody := stmt.Body - elseBody, ok := stmt.Else.(*ast.BlockStmt) - if ok && astequal.Stmt(thenBody, elseBody) { - c.warnIf(stmt) - } -} - -func (c *dupBranchBodyChecker) warnIf(cause ast.Node) { - c.ctx.Warn(cause, "both branches in if statement have same body") -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/dupCase_checker.go b/vendor/github.com/go-critic/go-critic/checkers/dupCase_checker.go deleted file mode 100644 index 381bad68b..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/dupCase_checker.go +++ /dev/null @@ -1,70 +0,0 @@ -package checkers - -import ( - "go/ast" - - "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/checkers/internal/lintutil" - "github.com/go-critic/go-critic/linter" -) - -func init() { - var info linter.CheckerInfo - info.Name = "dupCase" - info.Tags = []string{linter.DiagnosticTag} - info.Summary = "Detects duplicated case clauses inside switch or select statements" - info.Before = ` -switch x { -case ys[0], ys[1], ys[2], ys[0], ys[4]: -}` - info.After = ` -switch x { -case ys[0], ys[1], ys[2], ys[3], ys[4]: -}` - - collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { - return astwalk.WalkerForStmt(&dupCaseChecker{ctx: ctx}), nil - }) -} - -type dupCaseChecker struct { - astwalk.WalkHandler - ctx *linter.CheckerContext - - astSet lintutil.AstSet -} - -func (c *dupCaseChecker) VisitStmt(stmt ast.Stmt) { - switch stmt := stmt.(type) { - case *ast.SwitchStmt: - c.checkSwitch(stmt) - case *ast.SelectStmt: - c.checkSelect(stmt) - } -} - -func (c *dupCaseChecker) checkSwitch(stmt *ast.SwitchStmt) { - c.astSet.Clear() - for i := range stmt.Body.List { - cc := stmt.Body.List[i].(*ast.CaseClause) - for _, x := range cc.List { - if !c.astSet.Insert(x) { - c.warn(x) - } - } - } -} - -func (c *dupCaseChecker) checkSelect(stmt *ast.SelectStmt) { - c.astSet.Clear() - for i := range stmt.Body.List { - x := stmt.Body.List[i].(*ast.CommClause).Comm - if !c.astSet.Insert(x) { - c.warn(x) - } - } -} - -func (c *dupCaseChecker) warn(cause ast.Node) { - c.ctx.Warn(cause, "'case %s' is duplicated", cause) -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/dupImports_checker.go b/vendor/github.com/go-critic/go-critic/checkers/dupImports_checker.go deleted file mode 100644 index ed674eb85..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/dupImports_checker.go +++ /dev/null @@ -1,63 +0,0 @@ -package checkers - -import ( - "fmt" - "go/ast" - - "github.com/go-critic/go-critic/linter" -) - -func init() { - var info linter.CheckerInfo - info.Name = "dupImport" - info.Tags = []string{linter.StyleTag, linter.ExperimentalTag} - info.Summary = "Detects multiple imports of the same package under different aliases" - info.Before = ` -import ( - "fmt" - printing "fmt" // Imported the second time -)` - info.After = ` -import( - "fmt" -)` - - collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { - return &dupImportChecker{ctx: ctx}, nil - }) -} - -type dupImportChecker struct { - ctx *linter.CheckerContext -} - -func (c *dupImportChecker) WalkFile(f *ast.File) { - imports := make(map[string][]*ast.ImportSpec) - for _, importDcl := range f.Imports { - pkg := importDcl.Path.Value - imports[pkg] = append(imports[pkg], importDcl) - } - - for _, importList := range imports { - if len(importList) == 1 { - continue - } - c.warn(importList) - } -} - -func (c *dupImportChecker) warn(importList []*ast.ImportSpec) { - msg := fmt.Sprintf("package is imported %d times under different aliases on lines", len(importList)) - for idx, importDcl := range importList { - switch { - case idx == len(importList)-1: - msg += " and" - case idx > 0: - msg += "," - } - msg += fmt.Sprintf(" %d", c.ctx.FileSet.Position(importDcl.Pos()).Line) - } - for _, importDcl := range importList { - c.ctx.Warn(importDcl, msg) - } -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/dupSubExpr_checker.go b/vendor/github.com/go-critic/go-critic/checkers/dupSubExpr_checker.go deleted file mode 100644 index 9ab75945c..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/dupSubExpr_checker.go +++ /dev/null @@ -1,103 +0,0 @@ -package checkers - -import ( - "go/ast" - "go/token" - "go/types" - - "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/linter" - - "github.com/go-toolsmith/astequal" - "github.com/go-toolsmith/typep" -) - -func init() { - var info linter.CheckerInfo - info.Name = "dupSubExpr" - info.Tags = []string{linter.DiagnosticTag} - info.Summary = "Detects suspicious duplicated sub-expressions" - info.Before = ` -sort.Slice(xs, func(i, j int) bool { - return xs[i].v < xs[i].v // Duplicated index -})` - info.After = ` -sort.Slice(xs, func(i, j int) bool { - return xs[i].v < xs[j].v -})` - - collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { - c := &dupSubExprChecker{ctx: ctx} - - ops := []struct { - op token.Token - float bool // Whether float args require special care - }{ - {op: token.LOR}, // x || x - {op: token.LAND}, // x && x - {op: token.OR}, // x | x - {op: token.AND}, // x & x - {op: token.XOR}, // x ^ x - {op: token.LSS}, // x < x - {op: token.GTR}, // x > x - {op: token.AND_NOT}, // x &^ x - {op: token.REM}, // x % x - - {op: token.EQL, float: true}, // x == x - {op: token.NEQ, float: true}, // x != x - {op: token.LEQ, float: true}, // x <= x - {op: token.GEQ, float: true}, // x >= x - {op: token.QUO, float: true}, // x / x - {op: token.SUB, float: true}, // x - x - } - - c.opSet = make(map[token.Token]bool) - c.floatOpsSet = make(map[token.Token]bool) - for _, opInfo := range ops { - c.opSet[opInfo.op] = true - if opInfo.float { - c.floatOpsSet[opInfo.op] = true - } - } - - return astwalk.WalkerForExpr(c), nil - }) -} - -type dupSubExprChecker struct { - astwalk.WalkHandler - ctx *linter.CheckerContext - - // opSet is a set of binary operations that do not make - // sense with duplicated (same) RHS and LHS. - opSet map[token.Token]bool - - floatOpsSet map[token.Token]bool -} - -func (c *dupSubExprChecker) VisitExpr(expr ast.Expr) { - if expr, ok := expr.(*ast.BinaryExpr); ok { - c.checkBinaryExpr(expr) - } -} - -func (c *dupSubExprChecker) checkBinaryExpr(expr *ast.BinaryExpr) { - if !c.opSet[expr.Op] { - return - } - if c.resultIsFloat(expr.X) && c.floatOpsSet[expr.Op] { - return - } - if typep.SideEffectFree(c.ctx.TypesInfo, expr) && c.opSet[expr.Op] && astequal.Expr(expr.X, expr.Y) { - c.warn(expr) - } -} - -func (c *dupSubExprChecker) resultIsFloat(expr ast.Expr) bool { - typ, ok := c.ctx.TypeOf(expr).(*types.Basic) - return ok && typ.Info()&types.IsFloat != 0 -} - -func (c *dupSubExprChecker) warn(cause *ast.BinaryExpr) { - c.ctx.Warn(cause, "suspicious identical LHS and RHS for `%s` operator", cause.Op) -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/elseif_checker.go b/vendor/github.com/go-critic/go-critic/checkers/elseif_checker.go deleted file mode 100644 index 857d09fa0..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/elseif_checker.go +++ /dev/null @@ -1,72 +0,0 @@ -package checkers - -import ( - "go/ast" - - "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/linter" - - "github.com/go-toolsmith/astp" -) - -func init() { - var info linter.CheckerInfo - info.Name = "elseif" - info.Tags = []string{linter.StyleTag} - info.Params = linter.CheckerParams{ - "skipBalanced": { - Value: true, - Usage: "whether to skip balanced if-else pairs", - }, - } - info.Summary = "Detects else with nested if statement that can be replaced with else-if" - info.Before = ` -if cond1 { -} else { - if x := cond2; x { - } -}` - info.After = ` -if cond1 { -} else if x := cond2; x { -}` - - collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { - c := &elseifChecker{ctx: ctx} - c.skipBalanced = info.Params.Bool("skipBalanced") - return astwalk.WalkerForStmt(c), nil - }) -} - -type elseifChecker struct { - astwalk.WalkHandler - ctx *linter.CheckerContext - - skipBalanced bool -} - -func (c *elseifChecker) VisitStmt(stmt ast.Stmt) { - if stmt, ok := stmt.(*ast.IfStmt); ok { - elseBody, ok := stmt.Else.(*ast.BlockStmt) - if !ok || len(elseBody.List) != 1 { - return - } - innerIfStmt, ok := elseBody.List[0].(*ast.IfStmt) - if !ok { - return - } - balanced := len(stmt.Body.List) == 1 && - astp.IsIfStmt(stmt.Body.List[0]) - if balanced && c.skipBalanced { - return // Configured to skip balanced statements - } - if innerIfStmt.Else != nil || innerIfStmt.Init != nil { - return - } - c.warn(stmt.Else) - } -} - -func (c *elseifChecker) warn(cause ast.Node) { - c.ctx.Warn(cause, "can replace 'else {if cond {}}' with 'else if cond {}'") -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/embedded_rules.go b/vendor/github.com/go-critic/go-critic/checkers/embedded_rules.go deleted file mode 100644 index ad507425e..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/embedded_rules.go +++ /dev/null @@ -1,108 +0,0 @@ -package checkers - -import ( - "fmt" - "go/ast" - "go/build" - "go/token" - "os" - - "github.com/go-critic/go-critic/checkers/rulesdata" - "github.com/go-critic/go-critic/linter" - - "github.com/quasilyte/go-ruleguard/ruleguard" -) - -//go:generate go run ./rules/precompile.go -rules ./rules/rules.go -o ./rulesdata/rulesdata.go - -func InitEmbeddedRules() error { - filename := "rules/rules.go" - - fset := token.NewFileSet() - var groups []ruleguard.GoRuleGroup - - var buildContext *build.Context - - ruleguardDebug := os.Getenv("GOCRITIC_RULEGUARD_DEBUG") != "" - - // First we create an Engine to parse all rules. - // We need it to get the structured info about our rules - // that will be used to generate checkers. - // We introduce an extra scope in hope that rootEngine - // will be garbage-collected after we don't need it. - // LoadedGroups() returns a slice copy and that's all what we need. - { - rootEngine := ruleguard.NewEngine() - rootEngine.InferBuildContext() - buildContext = rootEngine.BuildContext - - loadContext := &ruleguard.LoadContext{ - Fset: fset, - DebugImports: ruleguardDebug, - DebugPrint: func(s string) { - fmt.Println("debug:", s) - }, - } - if err := rootEngine.LoadFromIR(loadContext, filename, rulesdata.PrecompiledRules); err != nil { - return fmt.Errorf("load embedded ruleguard rules: %w", err) - } - groups = rootEngine.LoadedGroups() - } - - // For every rules group we create a new checker and a separate engine. - // That dedicated ruleguard engine will contain rules only from one group. - for i := range groups { - g := groups[i] - info := &linter.CheckerInfo{ - Name: g.Name, - Summary: g.DocSummary, - Before: g.DocBefore, - After: g.DocAfter, - Note: g.DocNote, - Tags: g.DocTags, - - EmbeddedRuleguard: true, - } - collection.AddChecker(info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { - parseContext := &ruleguard.LoadContext{ - Fset: fset, - GroupFilter: func(gr *ruleguard.GoRuleGroup) bool { - return gr.Name == g.Name - }, - DebugImports: ruleguardDebug, - DebugPrint: func(s string) { - fmt.Println("debug:", s) - }, - } - engine := ruleguard.NewEngine() - engine.BuildContext = buildContext - err := engine.LoadFromIR(parseContext, filename, rulesdata.PrecompiledRules) - if err != nil { - return nil, err - } - c := &embeddedRuleguardChecker{ - ctx: ctx, - engine: engine, - } - return c, nil - }) - } - - return nil -} - -type embeddedRuleguardChecker struct { - ctx *linter.CheckerContext - engine *ruleguard.Engine -} - -func (c *embeddedRuleguardChecker) WalkFile(f *ast.File) { - runRuleguardEngine(c.ctx, f, c.engine, &ruleguard.RunContext{ - Pkg: c.ctx.Pkg, - Types: c.ctx.TypesInfo, - Sizes: c.ctx.SizesInfo, - GoVersion: ruleguard.GoVersion(c.ctx.GoVersion), - Fset: c.ctx.FileSet, - TruncateLen: 100, - }) -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/emptyFallthrough_checker.go b/vendor/github.com/go-critic/go-critic/checkers/emptyFallthrough_checker.go deleted file mode 100644 index a008c6187..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/emptyFallthrough_checker.go +++ /dev/null @@ -1,70 +0,0 @@ -package checkers - -import ( - "go/ast" - "go/token" - - "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/linter" -) - -func init() { - var info linter.CheckerInfo - info.Name = "emptyFallthrough" - info.Tags = []string{linter.StyleTag, linter.ExperimentalTag} - info.Summary = "Detects fallthrough that can be avoided by using multi case values" - info.Before = `switch kind { -case reflect.Int: - fallthrough -case reflect.Int32: - return Int -}` - info.After = `switch kind { -case reflect.Int, reflect.Int32: - return Int -}` - - collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { - return astwalk.WalkerForStmt(&emptyFallthroughChecker{ctx: ctx}), nil - }) -} - -type emptyFallthroughChecker struct { - astwalk.WalkHandler - ctx *linter.CheckerContext -} - -func (c *emptyFallthroughChecker) VisitStmt(stmt ast.Stmt) { - ss, ok := stmt.(*ast.SwitchStmt) - if !ok { - return - } - - prevCaseDefault := false - for i := len(ss.Body.List) - 1; i >= 0; i-- { - if cc, ok := ss.Body.List[i].(*ast.CaseClause); ok { - warn := false - if len(cc.Body) == 1 { - if bs, ok := cc.Body[0].(*ast.BranchStmt); ok && bs.Tok == token.FALLTHROUGH { - warn = true - if prevCaseDefault { - c.warnDefault(bs) - } else if cc.List != nil { - c.warn(bs) - } - } - } - if !warn { - prevCaseDefault = cc.List == nil - } - } - } -} - -func (c *emptyFallthroughChecker) warnDefault(cause ast.Node) { - c.ctx.Warn(cause, "remove empty case containing only fallthrough to default case") -} - -func (c *emptyFallthroughChecker) warn(cause ast.Node) { - c.ctx.Warn(cause, "replace empty case containing only fallthrough with expression list") -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/evalOrder_checker.go b/vendor/github.com/go-critic/go-critic/checkers/evalOrder_checker.go deleted file mode 100644 index f8c5ae542..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/evalOrder_checker.go +++ /dev/null @@ -1,88 +0,0 @@ -package checkers - -import ( - "go/ast" - "go/token" - "go/types" - - "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/checkers/internal/lintutil" - "github.com/go-critic/go-critic/linter" - - "github.com/go-toolsmith/astcast" - "github.com/go-toolsmith/astequal" - "github.com/go-toolsmith/typep" -) - -func init() { - var info linter.CheckerInfo - info.Name = "evalOrder" - info.Tags = []string{linter.DiagnosticTag, linter.ExperimentalTag} - info.Summary = "Detects unwanted dependencies on the evaluation order" - info.Before = `return x, f(&x)` - info.After = ` -err := f(&x) -return x, err -` - - collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { - return astwalk.WalkerForStmt(&evalOrderChecker{ctx: ctx}), nil - }) -} - -type evalOrderChecker struct { - astwalk.WalkHandler - ctx *linter.CheckerContext -} - -func (c *evalOrderChecker) VisitStmt(stmt ast.Stmt) { - ret := astcast.ToReturnStmt(stmt) - if len(ret.Results) < 2 { - return - } - - // TODO(quasilyte): handle selector expressions like o.val in addition - // to bare identifiers. - addrTake := &ast.UnaryExpr{Op: token.AND} - for _, res := range ret.Results { - id, ok := res.(*ast.Ident) - if !ok { - continue - } - addrTake.X = id // addrTake is &id now - for _, res := range ret.Results { - call, ok := res.(*ast.CallExpr) - if !ok { - continue - } - - // 1. Check if there is a call in form of id.method() where - // method takes id by a pointer. - if sel, ok := call.Fun.(*ast.SelectorExpr); ok { - if astequal.Node(sel.X, id) && c.hasPtrRecv(sel.Sel) { - c.warn(call) - } - } - - // 2. Check that there is no call that uses &id as an argument. - dependency := lintutil.ContainsNode(call, func(n ast.Node) bool { - return astequal.Node(addrTake, n) - }) - if dependency { - c.warn(call) - } - } - } -} - -func (c *evalOrderChecker) hasPtrRecv(fn *ast.Ident) bool { - sig, ok := c.ctx.TypeOf(fn).(*types.Signature) - if !ok { - return false - } - return typep.IsPointer(sig.Recv().Type()) -} - -func (c *evalOrderChecker) warn(call *ast.CallExpr) { - c.ctx.Warn(call, "may want to evaluate %s before the return statement", call) -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/exitAfterDefer_checker.go b/vendor/github.com/go-critic/go-critic/checkers/exitAfterDefer_checker.go deleted file mode 100644 index 9889f48e8..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/exitAfterDefer_checker.go +++ /dev/null @@ -1,85 +0,0 @@ -package checkers - -import ( - "go/ast" - - "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/linter" - - "github.com/go-toolsmith/astfmt" - "github.com/go-toolsmith/astp" - "golang.org/x/tools/go/ast/astutil" -) - -func init() { - var info linter.CheckerInfo - info.Name = "exitAfterDefer" - info.Tags = []string{linter.DiagnosticTag} - info.Summary = "Detects calls to exit/fatal inside functions that use defer" - info.Before = ` -defer os.Remove(filename) -if bad { - log.Fatalf("something bad happened") -}` - info.After = ` -defer os.Remove(filename) -if bad { - log.Printf("something bad happened") - return -}` - - collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { - return astwalk.WalkerForFuncDecl(&exitAfterDeferChecker{ctx: ctx}), nil - }) -} - -type exitAfterDeferChecker struct { - astwalk.WalkHandler - ctx *linter.CheckerContext -} - -func (c *exitAfterDeferChecker) VisitFuncDecl(fn *ast.FuncDecl) { - // TODO(quasilyte): handle goto and other kinds of flow that break - // the algorithm below that expects the latter statement to be - // executed after the ones that come before it. - - var deferStmt *ast.DeferStmt - pre := func(cur *astutil.Cursor) bool { - // Don't recurse into local anonymous functions. - return !astp.IsFuncLit(cur.Node()) - } - post := func(cur *astutil.Cursor) bool { - switch n := cur.Node().(type) { - case *ast.DeferStmt: - deferStmt = n - case *ast.CallExpr: - // See #995. We allow `defer os.Exit()` calls - // as it's harder to determine whether they're going - // to clutter anything without actually trying to - // simulate the defer stack + understanding the control flow. - // TODO: can we use CFG here? - if _, ok := cur.Parent().(*ast.DeferStmt); ok { - return true - } - if deferStmt != nil { - switch qualifiedName(n.Fun) { - case "log.Fatal", "log.Fatalf", "log.Fatalln", "os.Exit": - c.warn(n, deferStmt) - return false - } - } - } - return true - } - astutil.Apply(fn.Body, pre, post) -} - -func (c *exitAfterDeferChecker) warn(cause *ast.CallExpr, deferStmt *ast.DeferStmt) { - s := astfmt.Sprint(deferStmt) - if fnlit, ok := deferStmt.Call.Fun.(*ast.FuncLit); ok { - // To avoid long and multi-line warning messages, - // collapse the function literals. - s = "defer " + astfmt.Sprint(fnlit.Type) + "{...}(...)" - } - c.ctx.Warn(cause, "%s will exit, and `%s` will not run", cause.Fun, s) -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/filepathJoin_checker.go b/vendor/github.com/go-critic/go-critic/checkers/filepathJoin_checker.go deleted file mode 100644 index 17ab0ea83..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/filepathJoin_checker.go +++ /dev/null @@ -1,51 +0,0 @@ -package checkers - -import ( - "go/ast" - "strings" - - "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/linter" - - "github.com/go-toolsmith/astcast" -) - -func init() { - var info linter.CheckerInfo - info.Name = "filepathJoin" - info.Tags = []string{linter.DiagnosticTag, linter.ExperimentalTag} - info.Summary = "Detects problems in filepath.Join() function calls" - info.Before = `filepath.Join("dir/", filename)` - info.After = `filepath.Join("dir", filename)` - - collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { - return astwalk.WalkerForExpr(&filepathJoinChecker{ctx: ctx}), nil - }) -} - -type filepathJoinChecker struct { - astwalk.WalkHandler - ctx *linter.CheckerContext -} - -func (c *filepathJoinChecker) VisitExpr(expr ast.Expr) { - call := astcast.ToCallExpr(expr) - if qualifiedName(call.Fun) != "filepath.Join" { - return - } - - for _, arg := range call.Args { - arg, ok := arg.(*ast.BasicLit) - if ok && c.hasSeparator(arg) { - c.warnSeparator(arg) - } - } -} - -func (c *filepathJoinChecker) hasSeparator(v *ast.BasicLit) bool { - return strings.ContainsAny(v.Value, `/\`) -} - -func (c *filepathJoinChecker) warnSeparator(sep ast.Expr) { - c.ctx.Warn(sep, "%s contains a path separator", sep) -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/flagName_checker.go b/vendor/github.com/go-critic/go-critic/checkers/flagName_checker.go deleted file mode 100644 index 701066860..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/flagName_checker.go +++ /dev/null @@ -1,89 +0,0 @@ -package checkers - -import ( - "go/ast" - "go/constant" - "go/types" - "strings" - - "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/linter" - - "github.com/go-toolsmith/astcast" -) - -func init() { - var info linter.CheckerInfo - info.Name = "flagName" - info.Tags = []string{linter.DiagnosticTag} - info.Summary = "Detects suspicious flag names" - info.Before = `b := flag.Bool(" foo ", false, "description")` - info.After = `b := flag.Bool("foo", false, "description")` - info.Note = "https://github.com/golang/go/issues/41792" - - collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { - return astwalk.WalkerForExpr(&flagNameChecker{ctx: ctx}), nil - }) -} - -type flagNameChecker struct { - astwalk.WalkHandler - ctx *linter.CheckerContext -} - -func (c *flagNameChecker) VisitExpr(expr ast.Expr) { - call := astcast.ToCallExpr(expr) - calledExpr := astcast.ToSelectorExpr(call.Fun) - obj, ok := c.ctx.TypesInfo.ObjectOf(astcast.ToIdent(calledExpr.X)).(*types.PkgName) - if !ok { - return - } - sym := calledExpr.Sel - pkg := obj.Imported() - if pkg.Path() != "flag" { - return - } - - switch sym.Name { - case "Bool", "Duration", "Float64", "String", - "Int", "Int64", "Uint", "Uint64": - c.checkFlagName(call, call.Args[0]) - case "BoolVar", "DurationVar", "Float64Var", "StringVar", - "IntVar", "Int64Var", "UintVar", "Uint64Var": - c.checkFlagName(call, call.Args[1]) - } -} - -func (c *flagNameChecker) checkFlagName(call *ast.CallExpr, arg ast.Expr) { - cv := c.ctx.TypesInfo.Types[arg].Value - if cv == nil { - return // Non-constant name - } - name := constant.StringVal(cv) - switch { - case name == "": - c.warnEmpty(call) - case strings.HasPrefix(name, "-"): - c.warnHyphenPrefix(call, name) - case strings.Contains(name, "="): - c.warnEq(call, name) - case strings.Contains(name, " "): - c.warnWhitespace(call, name) - } -} - -func (c *flagNameChecker) warnEmpty(cause ast.Node) { - c.ctx.Warn(cause, "empty flag name") -} - -func (c *flagNameChecker) warnHyphenPrefix(cause ast.Node, name string) { - c.ctx.Warn(cause, "flag name %q should not start with a hyphen", name) -} - -func (c *flagNameChecker) warnEq(cause ast.Node, name string) { - c.ctx.Warn(cause, "flag name %q should not contain '='", name) -} - -func (c *flagNameChecker) warnWhitespace(cause ast.Node, name string) { - c.ctx.Warn(cause, "flag name %q contains whitespace", name) -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/hexLiteral_checker.go b/vendor/github.com/go-critic/go-critic/checkers/hexLiteral_checker.go deleted file mode 100644 index 7301bd325..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/hexLiteral_checker.go +++ /dev/null @@ -1,61 +0,0 @@ -package checkers - -import ( - "go/ast" - "go/token" - "strings" - - "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/linter" - - "github.com/go-toolsmith/astcast" -) - -func init() { - var info linter.CheckerInfo - info.Name = "hexLiteral" - info.Tags = []string{linter.StyleTag, linter.ExperimentalTag} - info.Summary = "Detects hex literals that have mixed case letter digits" - info.Before = ` -x := 0X12 -y := 0xfF` - info.After = ` -x := 0x12 -// (A) -y := 0xff -// (B) -y := 0xFF` - - collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { - return astwalk.WalkerForExpr(&hexLiteralChecker{ctx: ctx}), nil - }) -} - -type hexLiteralChecker struct { - astwalk.WalkHandler - ctx *linter.CheckerContext -} - -func (c *hexLiteralChecker) warn0X(lit *ast.BasicLit) { - suggest := "0x" + lit.Value[len("0X"):] - c.ctx.Warn(lit, "prefer 0x over 0X, s/%s/%s/", lit.Value, suggest) -} - -func (c *hexLiteralChecker) warnMixedDigits(lit *ast.BasicLit) { - c.ctx.Warn(lit, "don't mix hex literal letter digits casing") -} - -func (c *hexLiteralChecker) VisitExpr(expr ast.Expr) { - lit := astcast.ToBasicLit(expr) - if lit.Kind != token.INT || len(lit.Value) < 3 { - return - } - if strings.HasPrefix(lit.Value, "0X") { - c.warn0X(lit) - return - } - digits := lit.Value[len("0x"):] - if strings.ToLower(digits) != digits && strings.ToUpper(digits) != digits { - c.warnMixedDigits(lit) - } -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/hugeParam_checker.go b/vendor/github.com/go-critic/go-critic/checkers/hugeParam_checker.go deleted file mode 100644 index 7b7a3c538..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/hugeParam_checker.go +++ /dev/null @@ -1,83 +0,0 @@ -package checkers - -import ( - "go/ast" - - "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/linter" - - "github.com/go-toolsmith/astcast" -) - -func init() { - var info linter.CheckerInfo - info.Name = "hugeParam" - info.Tags = []string{linter.PerformanceTag} - info.Params = linter.CheckerParams{ - "sizeThreshold": { - Value: 80, - Usage: "size in bytes that makes the warning trigger", - }, - } - info.Summary = "Detects params that incur excessive amount of copying" - info.Before = `func f(x [1024]int) {}` - info.After = `func f(x *[1024]int) {}` - - collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { - return astwalk.WalkerForFuncDecl(&hugeParamChecker{ - ctx: ctx, - sizeThreshold: int64(info.Params.Int("sizeThreshold")), - }), nil - }) -} - -type hugeParamChecker struct { - astwalk.WalkHandler - ctx *linter.CheckerContext - - sizeThreshold int64 -} - -func (c *hugeParamChecker) VisitFuncDecl(decl *ast.FuncDecl) { - // TODO(quasilyte): maybe it's worthwhile to permit skipping - // test files for this checker? - if c.isImplementStringer(decl) { - return - } - - if decl.Recv != nil { - c.checkParams(decl.Recv.List) - } - c.checkParams(decl.Type.Params.List) -} - -// isImplementStringer check method signature is: String() string. -func (*hugeParamChecker) isImplementStringer(decl *ast.FuncDecl) bool { - if decl.Recv != nil && - decl.Name.Name == "String" && - decl.Type != nil && - len(decl.Type.Params.List) == 0 && - len(decl.Type.Results.List) == 1 && - astcast.ToIdent(decl.Type.Results.List[0].Type).Name == "string" { - return true - } - - return false -} - -func (c *hugeParamChecker) checkParams(params []*ast.Field) { - for _, p := range params { - for _, id := range p.Names { - typ := c.ctx.TypeOf(id) - size, ok := c.ctx.SizeOf(typ) - if ok && size >= c.sizeThreshold { - c.warn(id, size) - } - } - } -} - -func (c *hugeParamChecker) warn(cause *ast.Ident, size int64) { - c.ctx.Warn(cause, "%s is heavy (%d bytes); consider passing it by pointer", - cause, size) -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/ifElseChain_checker.go b/vendor/github.com/go-critic/go-critic/checkers/ifElseChain_checker.go deleted file mode 100644 index e73c609d5..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/ifElseChain_checker.go +++ /dev/null @@ -1,110 +0,0 @@ -package checkers - -import ( - "go/ast" - - "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/linter" -) - -func init() { - var info linter.CheckerInfo - info.Name = "ifElseChain" - info.Tags = []string{linter.StyleTag} - info.Params = linter.CheckerParams{ - "minThreshold": { - Value: 2, - Usage: "min number of if-else blocks that makes the warning trigger", - }, - } - info.Summary = "Detects repeated if-else statements and suggests to replace them with switch statement" - info.Before = ` -if cond1 { - // Code A. -} else if cond2 { - // Code B. -} else { - // Code C. -}` - info.After = ` -switch { -case cond1: - // Code A. -case cond2: - // Code B. -default: - // Code C. -}` - info.Note = ` -Permits single else or else-if; repeated else-if or else + else-if -will trigger suggestion to use switch statement. -See [EffectiveGo#switch](https://golang.org/doc/effective_go.html#switch).` - - collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { - return astwalk.WalkerForStmt(&ifElseChainChecker{ - ctx: ctx, - minThreshold: info.Params.Int("minThreshold"), - }), nil - }) -} - -type ifElseChainChecker struct { - astwalk.WalkHandler - ctx *linter.CheckerContext - - cause *ast.IfStmt - visited map[*ast.IfStmt]bool - - minThreshold int -} - -func (c *ifElseChainChecker) EnterFunc(fn *ast.FuncDecl) bool { - if fn.Body == nil { - return false - } - c.visited = make(map[*ast.IfStmt]bool) - return true -} - -func (c *ifElseChainChecker) VisitStmt(stmt ast.Stmt) { - if stmt, ok := stmt.(*ast.IfStmt); ok { - if c.visited[stmt] { - return - } - c.cause = stmt - c.checkIfStmt(stmt) - } -} - -func (c *ifElseChainChecker) checkIfStmt(stmt *ast.IfStmt) { - if c.countIfelseLen(stmt) >= c.minThreshold { - c.warn() - } -} - -func (c *ifElseChainChecker) countIfelseLen(stmt *ast.IfStmt) int { - count := 0 - for { - if stmt.Init != nil { - return 0 // Give up - } - - switch e := stmt.Else.(type) { - case *ast.IfStmt: - // Else if. - stmt = e - count++ - c.visited[e] = true - case *ast.BlockStmt: - // Else branch. - return count + 1 - default: - // No else or else if. - return count - } - } -} - -func (c *ifElseChainChecker) warn() { - c.ctx.Warn(c.cause, "rewrite if-else to switch statement") -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/importShadow_checker.go b/vendor/github.com/go-critic/go-critic/checkers/importShadow_checker.go deleted file mode 100644 index b690487b7..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/importShadow_checker.go +++ /dev/null @@ -1,47 +0,0 @@ -package checkers - -import ( - "go/ast" - "go/types" - - "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/linter" -) - -func init() { - var info linter.CheckerInfo - info.Name = "importShadow" - info.Tags = []string{linter.StyleTag, linter.OpinionatedTag} - info.Summary = "Detects when imported package names shadowed in the assignments" - info.Before = ` -// "path/filepath" is imported. -filepath := "foo.txt"` - info.After = ` -filename := "foo.txt"` - - collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { - ctx.Require.PkgObjects = true - return astwalk.WalkerForLocalDef(&importShadowChecker{ctx: ctx}, ctx.TypesInfo), nil - }) -} - -type importShadowChecker struct { - astwalk.WalkHandler - ctx *linter.CheckerContext -} - -func (c *importShadowChecker) VisitLocalDef(def astwalk.Name, _ ast.Expr) { - for pkgObj, name := range c.ctx.PkgObjects { - if name == def.ID.Name && name != "_" { - c.warn(def.ID, name, pkgObj.Imported()) - } - } -} - -func (c *importShadowChecker) warn(id ast.Node, importedName string, pkg *types.Package) { - if isStdlibPkg(pkg) { - c.ctx.Warn(id, "shadow of imported package '%s'", importedName) - } else { - c.ctx.Warn(id, "shadow of imported from '%s' package '%s'", pkg.Path(), importedName) - } -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/initClause_checker.go b/vendor/github.com/go-critic/go-critic/checkers/initClause_checker.go deleted file mode 100644 index 8612717b2..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/initClause_checker.go +++ /dev/null @@ -1,57 +0,0 @@ -package checkers - -import ( - "go/ast" - - "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/linter" - - "github.com/go-toolsmith/astp" -) - -func init() { - var info linter.CheckerInfo - info.Name = "initClause" - info.Tags = []string{linter.StyleTag, linter.OpinionatedTag, linter.ExperimentalTag} - info.Summary = "Detects non-assignment statements inside if/switch init clause" - info.Before = `if sideEffect(); cond { -}` - info.After = `sideEffect() -if cond { -}` - - collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { - return astwalk.WalkerForStmt(&initClauseChecker{ctx: ctx}), nil - }) -} - -type initClauseChecker struct { - astwalk.WalkHandler - ctx *linter.CheckerContext -} - -func (c *initClauseChecker) VisitStmt(stmt ast.Stmt) { - initClause := c.getInitClause(stmt) - if initClause != nil && !astp.IsAssignStmt(initClause) { - c.warn(stmt, initClause) - } -} - -func (c *initClauseChecker) getInitClause(x ast.Stmt) ast.Stmt { - switch x := x.(type) { - case *ast.IfStmt: - return x.Init - case *ast.SwitchStmt: - return x.Init - default: - return nil - } -} - -func (c *initClauseChecker) warn(stmt, clause ast.Stmt) { - name := "if" - if astp.IsSwitchStmt(stmt) { - name = "switch" - } - c.ctx.Warn(stmt, "consider to move `%s` before %s", clause, name) -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/comment_walker.go b/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/comment_walker.go deleted file mode 100644 index 6c60e3fed..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/comment_walker.go +++ /dev/null @@ -1,41 +0,0 @@ -package astwalk - -import ( - "go/ast" - "strings" -) - -type commentWalker struct { - visitor CommentVisitor -} - -func (w *commentWalker) WalkFile(f *ast.File) { - if !w.visitor.EnterFile(f) { - return - } - - for _, cg := range f.Comments { - visitCommentGroups(cg, w.visitor.VisitComment) - } -} - -func visitCommentGroups(cg *ast.CommentGroup, visit func(*ast.CommentGroup)) { - var group []*ast.Comment - visitGroup := func(list []*ast.Comment) { - if len(list) == 0 { - return - } - cg := &ast.CommentGroup{List: list} - visit(cg) - } - for _, comment := range cg.List { - if strings.HasPrefix(comment.Text, "/*") { - visitGroup(group) - group = group[:0] - visitGroup([]*ast.Comment{comment}) - } else { - group = append(group, comment) - } - } - visitGroup(group) -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/doc_comment_walker.go b/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/doc_comment_walker.go deleted file mode 100644 index 39b536508..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/doc_comment_walker.go +++ /dev/null @@ -1,48 +0,0 @@ -package astwalk - -import ( - "go/ast" -) - -type docCommentWalker struct { - visitor DocCommentVisitor -} - -func (w *docCommentWalker) WalkFile(f *ast.File) { - for _, decl := range f.Decls { - switch decl := decl.(type) { - case *ast.FuncDecl: - if decl.Doc != nil { - w.visitor.VisitDocComment(decl.Doc) - } - case *ast.GenDecl: - if decl.Doc != nil { - w.visitor.VisitDocComment(decl.Doc) - } - for _, spec := range decl.Specs { - switch spec := spec.(type) { - case *ast.ImportSpec: - if spec.Doc != nil { - w.visitor.VisitDocComment(spec.Doc) - } - case *ast.ValueSpec: - if spec.Doc != nil { - w.visitor.VisitDocComment(spec.Doc) - } - case *ast.TypeSpec: - if spec.Doc != nil { - w.visitor.VisitDocComment(spec.Doc) - } - ast.Inspect(spec.Type, func(n ast.Node) bool { - if n, ok := n.(*ast.Field); ok { - if n.Doc != nil { - w.visitor.VisitDocComment(n.Doc) - } - } - return true - }) - } - } - } - } -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/expr_walker.go b/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/expr_walker.go deleted file mode 100644 index de66c1081..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/expr_walker.go +++ /dev/null @@ -1,31 +0,0 @@ -package astwalk - -import ( - "go/ast" -) - -type exprWalker struct { - visitor ExprVisitor -} - -func (w *exprWalker) WalkFile(f *ast.File) { - if !w.visitor.EnterFile(f) { - return - } - - for _, decl := range f.Decls { - if decl, ok := decl.(*ast.FuncDecl); ok { - if !w.visitor.EnterFunc(decl) { - continue - } - } - - ast.Inspect(decl, func(x ast.Node) bool { - if x, ok := x.(ast.Expr); ok { - w.visitor.VisitExpr(x) - return !w.visitor.skipChilds() - } - return true - }) - } -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/func_decl_walker.go b/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/func_decl_walker.go deleted file mode 100644 index c7e3a4371..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/func_decl_walker.go +++ /dev/null @@ -1,23 +0,0 @@ -package astwalk - -import ( - "go/ast" -) - -type funcDeclWalker struct { - visitor FuncDeclVisitor -} - -func (w *funcDeclWalker) WalkFile(f *ast.File) { - if !w.visitor.EnterFile(f) { - return - } - - for _, decl := range f.Decls { - decl, ok := decl.(*ast.FuncDecl) - if !ok || !w.visitor.EnterFunc(decl) { - continue - } - w.visitor.VisitFuncDecl(decl) - } -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/local_comment_walker.go b/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/local_comment_walker.go deleted file mode 100644 index e042f0d5e..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/local_comment_walker.go +++ /dev/null @@ -1,32 +0,0 @@ -package astwalk - -import ( - "go/ast" -) - -type localCommentWalker struct { - visitor LocalCommentVisitor -} - -func (w *localCommentWalker) WalkFile(f *ast.File) { - if !w.visitor.EnterFile(f) { - return - } - - for _, decl := range f.Decls { - decl, ok := decl.(*ast.FuncDecl) - if !ok || !w.visitor.EnterFunc(decl) { - continue - } - - for _, cg := range f.Comments { - // Not sure that decls/comments are sorted - // by positions, so do a naive full scan for now. - if cg.Pos() < decl.Pos() || cg.Pos() > decl.End() { - continue - } - - visitCommentGroups(cg, w.visitor.VisitLocalComment) - } - } -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/local_def_visitor.go b/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/local_def_visitor.go deleted file mode 100644 index 0c9c14955..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/local_def_visitor.go +++ /dev/null @@ -1,49 +0,0 @@ -package astwalk - -import ( - "go/ast" -) - -// LocalDefVisitor visits every name definitions inside a function. -// -// Next elements are considered as name definitions: -// - Function parameters (input, output, receiver) -// - Every LHS of ":=" assignment that defines a new name -// - Every local var/const declaration. -// -// NOTE: this visitor is experimental. -// This is also why it lives in a separate file. -type LocalDefVisitor interface { - walkerEvents - VisitLocalDef(Name, ast.Expr) -} - -// NameKind describes what kind of name Name object holds. -type NameKind int - -// Name holds ver/const/param definition symbol info. -type Name struct { - ID *ast.Ident - Kind NameKind - - // Index is NameVar-specific field that is used to - // specify nth tuple element being assigned to the name. - Index int -} - -// NOTE: set of name kinds is not stable and may change over time. -// -// TODO(quasilyte): is NameRecv/NameParam/NameResult granularity desired? -// TODO(quasilyte): is NameVar/NameBind (var vs :=) granularity desired? -const ( - // NameParam is function/method receiver/input/output name. - // Initializing expression is always nil. - NameParam NameKind = iota - // NameVar is var or ":=" declared name. - // Initializing expression may be nil for var-declared names - // without explicit initializing expression. - NameVar - // NameConst is const-declared name. - // Initializing expression is never nil. - NameConst -) diff --git a/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/local_def_walker.go b/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/local_def_walker.go deleted file mode 100644 index f6808cbb4..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/local_def_walker.go +++ /dev/null @@ -1,118 +0,0 @@ -package astwalk - -import ( - "go/ast" - "go/token" - "go/types" -) - -type localDefWalker struct { - visitor LocalDefVisitor - info *types.Info -} - -func (w *localDefWalker) WalkFile(f *ast.File) { - for _, decl := range f.Decls { - decl, ok := decl.(*ast.FuncDecl) - if !ok || !w.visitor.EnterFunc(decl) { - continue - } - w.walkFunc(decl) - } -} - -func (w *localDefWalker) walkFunc(decl *ast.FuncDecl) { - w.walkSignature(decl) - w.walkFuncBody(decl) -} - -func (w *localDefWalker) walkFuncBody(decl *ast.FuncDecl) { - ast.Inspect(decl.Body, func(x ast.Node) bool { - switch x := x.(type) { - case *ast.AssignStmt: - if x.Tok != token.DEFINE { - return false - } - if len(x.Lhs) != len(x.Rhs) { - // Multi-value assignment. - // Invariant: there is only 1 RHS. - for i, lhs := range x.Lhs { - id, ok := lhs.(*ast.Ident) - if !ok || w.info.Defs[id] == nil { - continue - } - def := Name{ID: id, Kind: NameVar, Index: i} - w.visitor.VisitLocalDef(def, x.Rhs[0]) - } - } else { - // Simple 1-1 assignments. - for i, lhs := range x.Lhs { - id, ok := lhs.(*ast.Ident) - if !ok || w.info.Defs[id] == nil { - continue - } - def := Name{ID: id, Kind: NameVar} - w.visitor.VisitLocalDef(def, x.Rhs[i]) - } - } - return false - - case *ast.GenDecl: - // Decls always introduce new names. - for _, spec := range x.Specs { - spec, ok := spec.(*ast.ValueSpec) - if !ok { // Ignore type/import specs - return false - } - switch { - case len(spec.Values) == 0: - // var-specific decls without explicit init. - for _, id := range spec.Names { - def := Name{ID: id, Kind: NameVar} - w.visitor.VisitLocalDef(def, nil) - } - case len(spec.Names) != len(spec.Values): - // var-specific decls that assign tuple results. - for i, id := range spec.Names { - def := Name{ID: id, Kind: NameVar, Index: i} - w.visitor.VisitLocalDef(def, spec.Values[0]) - } - default: - // Can be either var or const decl. - kind := NameVar - if x.Tok == token.CONST { - kind = NameConst - } - for i, id := range spec.Names { - def := Name{ID: id, Kind: kind} - w.visitor.VisitLocalDef(def, spec.Values[i]) - } - } - } - return false - } - - return true - }) -} - -func (w *localDefWalker) walkSignature(decl *ast.FuncDecl) { - for _, p := range decl.Type.Params.List { - for _, id := range p.Names { - def := Name{ID: id, Kind: NameParam} - w.visitor.VisitLocalDef(def, nil) - } - } - if decl.Type.Results != nil { - for _, p := range decl.Type.Results.List { - for _, id := range p.Names { - def := Name{ID: id, Kind: NameParam} - w.visitor.VisitLocalDef(def, nil) - } - } - } - if decl.Recv != nil && len(decl.Recv.List[0].Names) != 0 { - def := Name{ID: decl.Recv.List[0].Names[0], Kind: NameParam} - w.visitor.VisitLocalDef(def, nil) - } -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/local_expr_walker.go b/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/local_expr_walker.go deleted file mode 100644 index e455b3f8b..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/local_expr_walker.go +++ /dev/null @@ -1,29 +0,0 @@ -package astwalk - -import ( - "go/ast" -) - -type localExprWalker struct { - visitor LocalExprVisitor -} - -func (w *localExprWalker) WalkFile(f *ast.File) { - if !w.visitor.EnterFile(f) { - return - } - - for _, decl := range f.Decls { - decl, ok := decl.(*ast.FuncDecl) - if !ok || !w.visitor.EnterFunc(decl) { - continue - } - ast.Inspect(decl.Body, func(x ast.Node) bool { - if x, ok := x.(ast.Expr); ok { - w.visitor.VisitLocalExpr(x) - return !w.visitor.skipChilds() - } - return true - }) - } -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/stmt_list_walker.go b/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/stmt_list_walker.go deleted file mode 100644 index 403292f66..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/stmt_list_walker.go +++ /dev/null @@ -1,33 +0,0 @@ -package astwalk - -import ( - "go/ast" -) - -type stmtListWalker struct { - visitor StmtListVisitor -} - -func (w *stmtListWalker) WalkFile(f *ast.File) { - if !w.visitor.EnterFile(f) { - return - } - - for _, decl := range f.Decls { - decl, ok := decl.(*ast.FuncDecl) - if !ok || !w.visitor.EnterFunc(decl) { - continue - } - ast.Inspect(decl.Body, func(x ast.Node) bool { - switch x := x.(type) { - case *ast.BlockStmt: - w.visitor.VisitStmtList(x, x.List) - case *ast.CaseClause: - w.visitor.VisitStmtList(x, x.Body) - case *ast.CommClause: - w.visitor.VisitStmtList(x, x.Body) - } - return !w.visitor.skipChilds() - }) - } -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/stmt_walker.go b/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/stmt_walker.go deleted file mode 100644 index 912de867d..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/stmt_walker.go +++ /dev/null @@ -1,29 +0,0 @@ -package astwalk - -import ( - "go/ast" -) - -type stmtWalker struct { - visitor StmtVisitor -} - -func (w *stmtWalker) WalkFile(f *ast.File) { - if !w.visitor.EnterFile(f) { - return - } - - for _, decl := range f.Decls { - decl, ok := decl.(*ast.FuncDecl) - if !ok || !w.visitor.EnterFunc(decl) { - continue - } - ast.Inspect(decl.Body, func(x ast.Node) bool { - if x, ok := x.(ast.Stmt); ok { - w.visitor.VisitStmt(x) - return !w.visitor.skipChilds() - } - return true - }) - } -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/type_expr_walker.go b/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/type_expr_walker.go deleted file mode 100644 index bc9bdef47..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/type_expr_walker.go +++ /dev/null @@ -1,119 +0,0 @@ -package astwalk - -import ( - "go/ast" - "go/token" - "go/types" - - "github.com/go-toolsmith/astp" - "github.com/go-toolsmith/typep" -) - -type typeExprWalker struct { - visitor TypeExprVisitor - info *types.Info -} - -func (w *typeExprWalker) WalkFile(f *ast.File) { - if !w.visitor.EnterFile(f) { - return - } - - for _, decl := range f.Decls { - if decl, ok := decl.(*ast.FuncDecl); ok { - if !w.visitor.EnterFunc(decl) { - continue - } - } - switch decl := decl.(type) { - case *ast.FuncDecl: - if !w.visitor.EnterFunc(decl) { - continue - } - w.walkSignature(decl.Type) - ast.Inspect(decl.Body, w.walk) - case *ast.GenDecl: - if decl.Tok == token.IMPORT { - continue - } - ast.Inspect(decl, w.walk) - } - } -} - -func (w *typeExprWalker) visit(x ast.Expr) bool { - w.visitor.VisitTypeExpr(x) - return !w.visitor.skipChilds() -} - -func (w *typeExprWalker) walk(x ast.Node) bool { - switch x := x.(type) { - case *ast.ChanType: - return w.visit(x) - case *ast.ParenExpr: - if typep.IsTypeExpr(w.info, x.X) { - return w.visit(x) - } - return true - case *ast.CallExpr: - // Pointer conversions require parenthesis around pointer type. - // These casts are represented as call expressions. - // Because it's impossible for the visitor to distinguish such - // "required" parenthesis, walker skips outmost parenthesis in such cases. - return w.inspectInner(x.Fun) - case *ast.SelectorExpr: - // Like with conversions, method expressions are another special. - return w.inspectInner(x.X) - case *ast.StarExpr: - if typep.IsTypeExpr(w.info, x.X) { - return w.visit(x) - } - return true - case *ast.MapType: - return w.visit(x) - case *ast.FuncType: - return w.visit(x) - case *ast.StructType: - return w.visit(x) - case *ast.InterfaceType: - if !w.visit(x) { - return false - } - for _, method := range x.Methods.List { - switch x := method.Type.(type) { - case *ast.FuncType: - w.walkSignature(x) - default: - // Embedded interface. - w.walk(x) - } - } - return false - case *ast.ArrayType: - return w.visit(x) - } - return true -} - -func (w *typeExprWalker) inspectInner(x ast.Expr) bool { - parens, ok := x.(*ast.ParenExpr) - shouldInspect := ok && - typep.IsTypeExpr(w.info, parens.X) && - (astp.IsStarExpr(parens.X) || astp.IsFuncType(parens.X)) - if shouldInspect { - ast.Inspect(parens.X, w.walk) - return false - } - return true -} - -func (w *typeExprWalker) walkSignature(typ *ast.FuncType) { - for _, p := range typ.Params.List { - ast.Inspect(p.Type, w.walk) - } - if typ.Results != nil { - for _, p := range typ.Results.List { - ast.Inspect(p.Type, w.walk) - } - } -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/visitor.go b/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/visitor.go deleted file mode 100644 index 3486a8e62..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/visitor.go +++ /dev/null @@ -1,77 +0,0 @@ -package astwalk - -import ( - "go/ast" -) - -// DocCommentVisitor visits every doc-comment. -// Does not visit doc-comments for function-local definitions (types, etc). -// Also does not visit package doc-comment (file-level doc-comments). -type DocCommentVisitor interface { - VisitDocComment(*ast.CommentGroup) -} - -// FuncDeclVisitor visits every top-level function declaration. -type FuncDeclVisitor interface { - walkerEvents - VisitFuncDecl(*ast.FuncDecl) -} - -// ExprVisitor visits every expression inside AST file. -type ExprVisitor interface { - walkerEvents - VisitExpr(ast.Expr) -} - -// LocalExprVisitor visits every expression inside function body. -type LocalExprVisitor interface { - walkerEvents - VisitLocalExpr(ast.Expr) -} - -// StmtListVisitor visits every statement list inside function body. -// This includes block statement bodies as well as implicit blocks -// introduced by case clauses and alike. -type StmtListVisitor interface { - walkerEvents - VisitStmtList(ast.Node, []ast.Stmt) -} - -// StmtVisitor visits every statement inside function body. -type StmtVisitor interface { - walkerEvents - VisitStmt(ast.Stmt) -} - -// TypeExprVisitor visits every type describing expression. -// It also traverses struct types and interface types to run -// checker over their fields/method signatures. -type TypeExprVisitor interface { - walkerEvents - VisitTypeExpr(ast.Expr) -} - -// LocalCommentVisitor visits every comment inside function body. -type LocalCommentVisitor interface { - walkerEvents - VisitLocalComment(*ast.CommentGroup) -} - -// CommentVisitor visits every comment. -type CommentVisitor interface { - walkerEvents - VisitComment(*ast.CommentGroup) -} - -// walkerEvents describes common hooks available for most visitor types. -type walkerEvents interface { - // EnterFile is called for every file that is about to be traversed. - // If false is returned, file is not visited. - EnterFile(*ast.File) bool - - // EnterFunc is called for every function declaration that is about - // to be traversed. If false is returned, function is not visited. - EnterFunc(*ast.FuncDecl) bool - - skipChilds() bool -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/walk_handler.go b/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/walk_handler.go deleted file mode 100644 index 1f6e948d5..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/walk_handler.go +++ /dev/null @@ -1,34 +0,0 @@ -package astwalk - -import ( - "go/ast" -) - -// WalkHandler is a type to be embedded into every checker -// that uses astwalk walkers. -type WalkHandler struct { - // SkipChilds controls whether currently analyzed - // node childs should be traversed. - // - // Value is reset after each visitor invocation, - // so there is no need to set value back to false. - SkipChilds bool -} - -// EnterFile is a default walkerEvents.EnterFile implementation -// that reports every file as accepted candidate for checking. -func (w *WalkHandler) EnterFile(f *ast.File) bool { - return true -} - -// EnterFunc is a default walkerEvents.EnterFunc implementation -// that skips extern function (ones that do not have body). -func (w *WalkHandler) EnterFunc(decl *ast.FuncDecl) bool { - return decl.Body != nil -} - -func (w *WalkHandler) skipChilds() bool { - v := w.SkipChilds - w.SkipChilds = false - return v -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/walker.go b/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/walker.go deleted file mode 100644 index f838a64c1..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/walker.go +++ /dev/null @@ -1,57 +0,0 @@ -package astwalk - -import ( - "go/types" - - "github.com/go-critic/go-critic/linter" -) - -// WalkerForFuncDecl returns file walker implementation for FuncDeclVisitor. -func WalkerForFuncDecl(v FuncDeclVisitor) linter.FileWalker { - return &funcDeclWalker{visitor: v} -} - -// WalkerForExpr returns file walker implementation for ExprVisitor. -func WalkerForExpr(v ExprVisitor) linter.FileWalker { - return &exprWalker{visitor: v} -} - -// WalkerForLocalExpr returns file walker implementation for LocalExprVisitor. -func WalkerForLocalExpr(v LocalExprVisitor) linter.FileWalker { - return &localExprWalker{visitor: v} -} - -// WalkerForStmtList returns file walker implementation for StmtListVisitor. -func WalkerForStmtList(v StmtListVisitor) linter.FileWalker { - return &stmtListWalker{visitor: v} -} - -// WalkerForStmt returns file walker implementation for StmtVisitor. -func WalkerForStmt(v StmtVisitor) linter.FileWalker { - return &stmtWalker{visitor: v} -} - -// WalkerForTypeExpr returns file walker implementation for TypeExprVisitor. -func WalkerForTypeExpr(v TypeExprVisitor, info *types.Info) linter.FileWalker { - return &typeExprWalker{visitor: v, info: info} -} - -// WalkerForLocalComment returns file walker implementation for LocalCommentVisitor. -func WalkerForLocalComment(v LocalCommentVisitor) linter.FileWalker { - return &localCommentWalker{visitor: v} -} - -// WalkerForComment returns file walker implementation for CommentVisitor. -func WalkerForComment(v CommentVisitor) linter.FileWalker { - return &commentWalker{visitor: v} -} - -// WalkerForDocComment returns file walker implementation for DocCommentVisitor. -func WalkerForDocComment(v DocCommentVisitor) linter.FileWalker { - return &docCommentWalker{visitor: v} -} - -// WalkerForLocalDef returns file walker implementation for LocalDefVisitor. -func WalkerForLocalDef(v LocalDefVisitor, info *types.Info) linter.FileWalker { - return &localDefWalker{visitor: v, info: info} -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/internal/lintutil/astfind.go b/vendor/github.com/go-critic/go-critic/checkers/internal/lintutil/astfind.go deleted file mode 100644 index a6d0ad7c4..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/internal/lintutil/astfind.go +++ /dev/null @@ -1,41 +0,0 @@ -package lintutil - -import ( - "go/ast" - - "golang.org/x/tools/go/ast/astutil" -) - -// FindNode applies pred for root and all it's childs until it returns true. -// If followFunc is defined, it's called before following any node to check whether it needs to be followed. -// followFunc has to return true in order to continuing traversing the node and return false otherwise. -// Matched node is returned. -// If none of the nodes matched predicate, nil is returned. -func FindNode(root ast.Node, followFunc, pred func(ast.Node) bool) ast.Node { - var ( - found ast.Node - preFunc func(*astutil.Cursor) bool - ) - - if followFunc != nil { - preFunc = func(cur *astutil.Cursor) bool { - return followFunc(cur.Node()) - } - } - - astutil.Apply(root, - preFunc, - func(cur *astutil.Cursor) bool { - if pred(cur.Node()) { - found = cur.Node() - return false - } - return true - }) - return found -} - -// ContainsNode reports whether `FindNode(root, pred)!=nil`. -func ContainsNode(root ast.Node, pred func(ast.Node) bool) bool { - return FindNode(root, nil, pred) != nil -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/internal/lintutil/astflow.go b/vendor/github.com/go-critic/go-critic/checkers/internal/lintutil/astflow.go deleted file mode 100644 index f64907d69..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/internal/lintutil/astflow.go +++ /dev/null @@ -1,86 +0,0 @@ -package lintutil - -import ( - "go/ast" - "go/token" - "go/types" - - "github.com/go-toolsmith/astequal" - "github.com/go-toolsmith/astp" - "github.com/go-toolsmith/typep" -) - -// Different utilities to make simple analysis over typed ast values flow. -// -// It's primitive and can't replace SSA, but the bright side is that -// it does not require building an additional IR eagerly. -// Expected to be used sparingly inside a few checkers. -// -// If proven really useful, can be moved to go-toolsmith library. - -// IsImmutable reports whether n can be modified through any operation. -func IsImmutable(info *types.Info, n ast.Expr) bool { - if astp.IsBasicLit(n) { - return true - } - tv, ok := info.Types[n] - return ok && !tv.Assignable() && !tv.Addressable() -} - -// CouldBeMutated reports whether dst can be modified inside body. -// -// Note that it does not take already existing pointers to dst. -// An example of safe and correct usage is checking of something -// that was just defined, so the dst is a result of that definition. -func CouldBeMutated(info *types.Info, body ast.Node, dst ast.Expr) bool { - if IsImmutable(info, dst) { // Fast path. - return false - } - - // We don't track pass-by-value. - // If it's already a pointer, passing it by value - // means that there can be a potential indirect modification. - // - // It's possible to be less conservative here and find at least - // one such value pass before giving up. - if typep.IsPointer(info.TypeOf(dst)) { - return true - } - - var isDst func(x ast.Expr) bool - if dst, ok := dst.(*ast.Ident); ok { - // Identifier can be shadowed, - // so we need to check the object as well. - obj := info.ObjectOf(dst) - if obj == nil { - return true // Being conservative - } - isDst = func(x ast.Expr) bool { - id, ok := x.(*ast.Ident) - return ok && id.Name == dst.Name && info.ObjectOf(id) == obj - } - } else { - isDst = func(x ast.Expr) bool { - return astequal.Expr(dst, x) - } - } - - return ContainsNode(body, func(n ast.Node) bool { - switch n := n.(type) { - case *ast.UnaryExpr: - if n.Op == token.AND && isDst(n.X) { - return true // Address taken - } - case *ast.AssignStmt: - for _, lhs := range n.Lhs { - if isDst(lhs) { - return true - } - } - case *ast.IncDecStmt: - // Incremented or decremented. - return isDst(n.X) - } - return false - }) -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/internal/lintutil/astset.go b/vendor/github.com/go-critic/go-critic/checkers/internal/lintutil/astset.go deleted file mode 100644 index ebe7835e5..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/internal/lintutil/astset.go +++ /dev/null @@ -1,44 +0,0 @@ -package lintutil - -import ( - "go/ast" - - "github.com/go-toolsmith/astequal" -) - -// AstSet is a simple ast.Node set. -// Zero value is ready to use set. -// Can be reused after Clear call. -type AstSet struct { - items []ast.Node -} - -// Contains reports whether s contains x. -func (s *AstSet) Contains(x ast.Node) bool { - for i := range s.items { - if astequal.Node(s.items[i], x) { - return true - } - } - return false -} - -// Insert pushes x in s if it's not already there. -// Returns true if element was inserted. -func (s *AstSet) Insert(x ast.Node) bool { - if s.Contains(x) { - return false - } - s.items = append(s.items, x) - return true -} - -// Clear removes all element from set. -func (s *AstSet) Clear() { - s.items = s.items[:0] -} - -// Len returns the number of elements contained inside s. -func (s *AstSet) Len() int { - return len(s.items) -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/internal/lintutil/zero_value.go b/vendor/github.com/go-critic/go-critic/checkers/internal/lintutil/zero_value.go deleted file mode 100644 index 4370f5818..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/internal/lintutil/zero_value.go +++ /dev/null @@ -1,94 +0,0 @@ -package lintutil - -import ( - "go/ast" - "go/constant" - "go/token" - "go/types" -) - -// IsZeroValue reports whether x represents zero value of its type. -// -// The functions is conservative and may return false for zero values -// if some cases are not handled in a comprehensive way -// but is should never return true for something that's not a proper zv. -func IsZeroValue(info *types.Info, x ast.Expr) bool { - switch x := x.(type) { - case *ast.BasicLit: - typ := info.TypeOf(x).Underlying().(*types.Basic) - v := info.Types[x].Value - var z constant.Value - switch { - case typ.Kind() == types.String: - z = constant.MakeString("") - case typ.Info()&types.IsInteger != 0: - z = constant.MakeInt64(0) - case typ.Info()&types.IsUnsigned != 0: - z = constant.MakeUint64(0) - case typ.Info()&types.IsFloat != 0: - z = constant.MakeFloat64(0) - default: - return false - } - return constant.Compare(v, token.EQL, z) - - case *ast.CompositeLit: - return len(x.Elts) == 0 - - default: - // Note that this function is not comprehensive. - return false - } -} - -// ZeroValueOf returns a zero value expression for typeExpr of type typ. -// If function can't find such a value, nil is returned. -func ZeroValueOf(typeExpr ast.Expr, typ types.Type) ast.Expr { - switch utyp := typ.Underlying().(type) { - case *types.Basic: - info := utyp.Info() - var zv ast.Expr - switch { - case info&types.IsInteger != 0: - zv = &ast.BasicLit{Kind: token.INT, Value: "0"} - case info&types.IsFloat != 0: - zv = &ast.BasicLit{Kind: token.FLOAT, Value: "0.0"} - case info&types.IsString != 0: - zv = &ast.BasicLit{Kind: token.STRING, Value: `""`} - case info&types.IsBoolean != 0: - zv = &ast.Ident{Name: "false"} - } - if isDefaultLiteralType(typ) { - return zv - } - return &ast.CallExpr{ - Fun: typeExpr, - Args: []ast.Expr{zv}, - } - - case *types.Slice, *types.Map, *types.Pointer, *types.Interface: - return &ast.CallExpr{ - Fun: typeExpr, - Args: []ast.Expr{&ast.Ident{Name: "nil"}}, - } - - case *types.Array, *types.Struct: - return &ast.CompositeLit{Type: typeExpr} - - default: - return nil - } -} - -func isDefaultLiteralType(typ types.Type) bool { - btyp, ok := typ.(*types.Basic) - if !ok { - return false - } - switch btyp.Kind() { - case types.Bool, types.Int, types.Float64, types.String: - return true - default: - return false - } -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/mapKey_checker.go b/vendor/github.com/go-critic/go-critic/checkers/mapKey_checker.go deleted file mode 100644 index 2885dc725..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/mapKey_checker.go +++ /dev/null @@ -1,125 +0,0 @@ -package checkers - -import ( - "go/ast" - "go/types" - "strings" - - "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/checkers/internal/lintutil" - "github.com/go-critic/go-critic/linter" - - "github.com/go-toolsmith/astcast" - "github.com/go-toolsmith/astp" - "github.com/go-toolsmith/typep" -) - -func init() { - var info linter.CheckerInfo - info.Name = "mapKey" - info.Tags = []string{linter.DiagnosticTag} - info.Summary = "Detects suspicious map literal keys" - info.Before = ` -_ = map[string]int{ - "foo": 1, - "bar ": 2, -}` - info.After = ` -_ = map[string]int{ - "foo": 1, - "bar": 2, -}` - - collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { - return astwalk.WalkerForExpr(&mapKeyChecker{ctx: ctx}), nil - }) -} - -type mapKeyChecker struct { - astwalk.WalkHandler - ctx *linter.CheckerContext - - astSet lintutil.AstSet -} - -func (c *mapKeyChecker) VisitExpr(expr ast.Expr) { - lit := astcast.ToCompositeLit(expr) - if len(lit.Elts) < 2 { - return - } - - typ, ok := c.ctx.TypeOf(lit).Underlying().(*types.Map) - if !ok { - return - } - if !typep.HasStringKind(typ.Key().Underlying()) { - return - } - - c.checkWhitespace(lit) - c.checkDuplicates(lit) -} - -func (c *mapKeyChecker) checkDuplicates(lit *ast.CompositeLit) { - c.astSet.Clear() - - for _, elt := range lit.Elts { - kv := astcast.ToKeyValueExpr(elt) - if astp.IsBasicLit(kv.Key) { - // Basic lits are handled by the compiler. - continue - } - if !typep.SideEffectFree(c.ctx.TypesInfo, kv.Key) { - continue - } - if !c.astSet.Insert(kv.Key) { - c.warnDupKey(kv.Key) - } - } -} - -func (c *mapKeyChecker) checkWhitespace(lit *ast.CompositeLit) { - var whitespaceKey ast.Node - for _, elt := range lit.Elts { - key := astcast.ToBasicLit(astcast.ToKeyValueExpr(elt).Key) - if len(key.Value) < len(`" "`) { - continue - } - // s is unquoted string literal value. - s := key.Value[len(`"`) : len(key.Value)-len(`"`)] - if !strings.Contains(s, " ") { - continue - } - if whitespaceKey != nil { - // Already seen something with a whitespace. - // More than one entry => not suspicious. - return - } - if s == " " { - // If space is used as a key, maybe this map - // has something to do with spaces. Give up. - return - } - // Check if it has exactly 1 space prefix or suffix. - bad := strings.HasPrefix(s, " ") && !strings.HasPrefix(s, " ") || - strings.HasSuffix(s, " ") && !strings.HasSuffix(s, " ") - if !bad { - // These spaces can be a padding, - // or a legitimate part of a key. Give up. - return - } - whitespaceKey = key - } - - if whitespaceKey != nil { - c.warnWhitespace(whitespaceKey) - } -} - -func (c *mapKeyChecker) warnWhitespace(key ast.Node) { - c.ctx.Warn(key, "suspicious whitespace in %s key", key) -} - -func (c *mapKeyChecker) warnDupKey(key ast.Node) { - c.ctx.Warn(key, "suspicious duplicate %s key", key) -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/methodExprCall_checker.go b/vendor/github.com/go-critic/go-critic/checkers/methodExprCall_checker.go deleted file mode 100644 index 755d3b472..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/methodExprCall_checker.go +++ /dev/null @@ -1,58 +0,0 @@ -package checkers - -import ( - "go/ast" - "go/token" - - "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/linter" - - "github.com/go-toolsmith/astcast" - "github.com/go-toolsmith/astcopy" - "github.com/go-toolsmith/typep" -) - -func init() { - var info linter.CheckerInfo - info.Name = "methodExprCall" - info.Tags = []string{linter.StyleTag, linter.ExperimentalTag} - info.Summary = "Detects method expression call that can be replaced with a method call" - info.Before = `f := foo{} -foo.bar(f)` - info.After = `f := foo{} -f.bar()` - - collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { - return astwalk.WalkerForExpr(&methodExprCallChecker{ctx: ctx}), nil - }) -} - -type methodExprCallChecker struct { - astwalk.WalkHandler - ctx *linter.CheckerContext -} - -func (c *methodExprCallChecker) VisitExpr(x ast.Expr) { - call := astcast.ToCallExpr(x) - s := astcast.ToSelectorExpr(call.Fun) - - if len(call.Args) < 1 || astcast.ToIdent(call.Args[0]).Name == "nil" { - return - } - - if typep.IsTypeExpr(c.ctx.TypesInfo, s.X) { - c.warn(call, s) - } -} - -func (c *methodExprCallChecker) warn(cause *ast.CallExpr, s *ast.SelectorExpr) { - selector := astcopy.SelectorExpr(s) - selector.X = cause.Args[0] - - // Remove "&" from the receiver (if any). - if u, ok := selector.X.(*ast.UnaryExpr); ok && u.Op == token.AND { - selector.X = u.X - } - - c.ctx.Warn(cause, "consider to change `%s` to `%s`", cause.Fun, selector) -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/nestingReduce_checker.go b/vendor/github.com/go-critic/go-critic/checkers/nestingReduce_checker.go deleted file mode 100644 index dfe73018c..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/nestingReduce_checker.go +++ /dev/null @@ -1,73 +0,0 @@ -package checkers - -import ( - "go/ast" - - "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/linter" -) - -func init() { - var info linter.CheckerInfo - info.Name = "nestingReduce" - info.Tags = []string{linter.StyleTag, linter.OpinionatedTag, linter.ExperimentalTag} - info.Params = linter.CheckerParams{ - "bodyWidth": { - Value: 5, - Usage: "min number of statements inside a branch to trigger a warning", - }, - } - info.Summary = "Finds where nesting level could be reduced" - info.Before = ` -for _, v := range a { - if v.Bool { - body() - } -}` - info.After = ` -for _, v := range a { - if !v.Bool { - continue - } - body() -}` - - collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { - c := &nestingReduceChecker{ctx: ctx} - c.bodyWidth = info.Params.Int("bodyWidth") - return astwalk.WalkerForStmt(c), nil - }) -} - -type nestingReduceChecker struct { - astwalk.WalkHandler - ctx *linter.CheckerContext - - bodyWidth int -} - -func (c *nestingReduceChecker) VisitStmt(stmt ast.Stmt) { - switch stmt := stmt.(type) { - case *ast.ForStmt: - c.checkLoopBody(stmt.Body.List) - case *ast.RangeStmt: - c.checkLoopBody(stmt.Body.List) - } -} - -func (c *nestingReduceChecker) checkLoopBody(body []ast.Stmt) { - if len(body) != 1 { - return - } - stmt, ok := body[0].(*ast.IfStmt) - if !ok { - return - } - if len(stmt.Body.List) >= c.bodyWidth && stmt.Else == nil { - c.warnLoop(stmt) - } -} - -func (c *nestingReduceChecker) warnLoop(cause ast.Node) { - c.ctx.Warn(cause, "invert if cond, replace body with `continue`, move old body after the statement") -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/newDeref_checker.go b/vendor/github.com/go-critic/go-critic/checkers/newDeref_checker.go deleted file mode 100644 index 1a1b05e0d..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/newDeref_checker.go +++ /dev/null @@ -1,51 +0,0 @@ -package checkers - -import ( - "go/ast" - "go/types" - - "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/checkers/internal/lintutil" - "github.com/go-critic/go-critic/linter" - - "github.com/go-toolsmith/astcast" - "golang.org/x/tools/go/ast/astutil" -) - -func init() { - var info linter.CheckerInfo - info.Name = "newDeref" - info.Tags = []string{linter.StyleTag} - info.Summary = "Detects immediate dereferencing of `new` expressions" - info.Before = `x := *new(bool)` - info.After = `x := false` - - collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { - return astwalk.WalkerForExpr(&newDerefChecker{ctx: ctx}), nil - }) -} - -type newDerefChecker struct { - astwalk.WalkHandler - ctx *linter.CheckerContext -} - -func (c *newDerefChecker) VisitExpr(expr ast.Expr) { - deref := astcast.ToStarExpr(expr) - call := astcast.ToCallExpr(deref.X) - if astcast.ToIdent(call.Fun).Name == "new" { - typ := c.ctx.TypeOf(call.Args[0]) - // allow *new(T) if T is a type parameter, see #1272 for details - if _, ok := typ.(*types.TypeParam); ok { - return - } - zv := lintutil.ZeroValueOf(astutil.Unparen(call.Args[0]), typ) - if zv != nil { - c.warn(expr, zv) - } - } -} - -func (c *newDerefChecker) warn(cause, suggestion ast.Expr) { - c.ctx.Warn(cause, "replace `%s` with `%s`", cause, suggestion) -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/nilValReturn_checker.go b/vendor/github.com/go-critic/go-critic/checkers/nilValReturn_checker.go deleted file mode 100644 index 9a1213f5c..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/nilValReturn_checker.go +++ /dev/null @@ -1,72 +0,0 @@ -package checkers - -import ( - "go/ast" - "go/token" - - "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/linter" - - "github.com/go-toolsmith/astequal" - "github.com/go-toolsmith/typep" -) - -func init() { - var info linter.CheckerInfo - info.Name = "nilValReturn" - info.Tags = []string{linter.DiagnosticTag, linter.ExperimentalTag} - info.Summary = "Detects return statements those results evaluate to nil" - info.Before = ` -if err == nil { - return err -}` - info.After = ` -// (A) - return nil explicitly -if err == nil { - return nil -} -// (B) - typo in "==", change to "!=" -if err != nil { - return err -}` - - collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { - return astwalk.WalkerForStmt(&nilValReturnChecker{ctx: ctx}), nil - }) -} - -type nilValReturnChecker struct { - astwalk.WalkHandler - ctx *linter.CheckerContext -} - -func (c *nilValReturnChecker) VisitStmt(stmt ast.Stmt) { - ifStmt, ok := stmt.(*ast.IfStmt) - if !ok || len(ifStmt.Body.List) != 1 { - return - } - ret, ok := ifStmt.Body.List[0].(*ast.ReturnStmt) - if !ok { - return - } - expr, ok := ifStmt.Cond.(*ast.BinaryExpr) - if !ok { - return - } - xIsNil := expr.Op == token.EQL && - typep.SideEffectFree(c.ctx.TypesInfo, expr.X) && - qualifiedName(expr.Y) == "nil" - if !xIsNil { - return - } - for _, res := range ret.Results { - if astequal.Expr(expr.X, res) { - c.warn(ret, expr.X) - break - } - } -} - -func (c *nilValReturnChecker) warn(cause, val ast.Node) { - c.ctx.Warn(cause, "returned expr is always nil; replace %s with nil", val) -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/octalLiteral_checker.go b/vendor/github.com/go-critic/go-critic/checkers/octalLiteral_checker.go deleted file mode 100644 index a25fac85c..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/octalLiteral_checker.go +++ /dev/null @@ -1,51 +0,0 @@ -package checkers - -import ( - "go/ast" - "go/token" - "strings" - "unicode" - - "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/linter" - - "github.com/go-toolsmith/astcast" -) - -func init() { - var info linter.CheckerInfo - info.Name = "octalLiteral" - info.Tags = []string{linter.StyleTag, linter.ExperimentalTag, linter.OpinionatedTag} - info.Summary = "Detects old-style octal literals" - info.Before = `foo(02)` - info.After = `foo(0o2)` - - collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { - return astwalk.WalkerForExpr(&octalLiteralChecker{ctx: ctx}), nil - }) -} - -type octalLiteralChecker struct { - astwalk.WalkHandler - ctx *linter.CheckerContext -} - -func (c *octalLiteralChecker) VisitExpr(expr ast.Expr) { - if !c.ctx.GoVersion.GreaterOrEqual(linter.GoVersion{Major: 1, Minor: 13}) { - return - } - lit := astcast.ToBasicLit(expr) - if lit.Kind != token.INT { - return - } - if !strings.HasPrefix(lit.Value, "0") || len(lit.Value) == 1 { - return - } - if unicode.IsDigit(rune(lit.Value[1])) { - c.warn(lit) - } -} - -func (c *octalLiteralChecker) warn(lit *ast.BasicLit) { - c.ctx.Warn(lit, "use new octal literal style, 0o%s", lit.Value[len("0"):]) -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/paramTypeCombine_checker.go b/vendor/github.com/go-critic/go-critic/checkers/paramTypeCombine_checker.go deleted file mode 100644 index c777fec9e..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/paramTypeCombine_checker.go +++ /dev/null @@ -1,97 +0,0 @@ -package checkers - -import ( - "go/ast" - - "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/linter" - - "github.com/go-toolsmith/astcopy" - "github.com/go-toolsmith/astequal" -) - -func init() { - var info linter.CheckerInfo - info.Name = "paramTypeCombine" - info.Tags = []string{linter.StyleTag, linter.OpinionatedTag} - info.Summary = "Detects if function parameters could be combined by type and suggest the way to do it" - info.Before = `func foo(a, b int, c, d int, e, f int, g int) {}` - info.After = `func foo(a, b, c, d, e, f, g int) {}` - - collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { - return astwalk.WalkerForFuncDecl(¶mTypeCombineChecker{ctx: ctx}), nil - }) -} - -type paramTypeCombineChecker struct { - astwalk.WalkHandler - ctx *linter.CheckerContext -} - -func (c *paramTypeCombineChecker) EnterFunc(*ast.FuncDecl) bool { - return true -} - -func (c *paramTypeCombineChecker) VisitFuncDecl(decl *ast.FuncDecl) { - typ := c.optimizeFuncType(decl.Type) - if !astequal.Expr(typ, decl.Type) { - c.warn(decl.Type, typ) - } -} - -func (c *paramTypeCombineChecker) optimizeFuncType(f *ast.FuncType) *ast.FuncType { - optimizedParamFunc := astcopy.FuncType(f) - - optimizedParamFunc.Params = c.optimizeParams(f.Params) - optimizedParamFunc.Results = c.optimizeParams(f.Results) - - return optimizedParamFunc -} - -func (c *paramTypeCombineChecker) optimizeParams(params *ast.FieldList) *ast.FieldList { - // To avoid false positives, skip unnamed param lists. - // - // We're using a property that Go only permits unnamed params - // for the whole list, so it's enough to check whether any of - // ast.Field have empty name list. - skip := params == nil || - len(params.List) < 2 || - len(params.List[0].Names) == 0 || - c.paramsAreMultiLine(params) - if skip { - return params - } - - list := []*ast.Field{} - names := make([]*ast.Ident, len(params.List[0].Names)) - copy(names, params.List[0].Names) - list = append(list, &ast.Field{ - Names: names, - Type: params.List[0].Type, - }) - for i, p := range params.List[1:] { - names = make([]*ast.Ident, len(p.Names)) - copy(names, p.Names) - if astequal.Expr(p.Type, params.List[i].Type) { - list[len(list)-1].Names = append(list[len(list)-1].Names, names...) - } else { - list = append(list, &ast.Field{ - Names: names, - Type: params.List[i+1].Type, - }) - } - } - return &ast.FieldList{ - List: list, - } -} - -func (c *paramTypeCombineChecker) warn(f1, f2 *ast.FuncType) { - c.ctx.Warn(f1, "%s could be replaced with %s", f1, f2) -} - -func (c *paramTypeCombineChecker) paramsAreMultiLine(params *ast.FieldList) bool { - startPos := c.ctx.FileSet.Position(params.Opening) - endPos := c.ctx.FileSet.Position(params.Closing) - return startPos.Line != endPos.Line -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/ptrToRefParam_checker.go b/vendor/github.com/go-critic/go-critic/checkers/ptrToRefParam_checker.go deleted file mode 100644 index 172a4acb5..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/ptrToRefParam_checker.go +++ /dev/null @@ -1,70 +0,0 @@ -package checkers - -import ( - "go/ast" - "go/types" - - "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/linter" -) - -func init() { - var info linter.CheckerInfo - info.Name = "ptrToRefParam" - info.Tags = []string{linter.StyleTag, linter.OpinionatedTag, linter.ExperimentalTag} - info.Summary = "Detects input and output parameters that have a type of pointer to referential type" - info.Before = `func f(m *map[string]int) (*chan *int)` - info.After = `func f(m map[string]int) (chan *int)` - - collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { - return astwalk.WalkerForFuncDecl(&ptrToRefParamChecker{ctx: ctx}), nil - }) -} - -type ptrToRefParamChecker struct { - astwalk.WalkHandler - ctx *linter.CheckerContext -} - -func (c *ptrToRefParamChecker) VisitFuncDecl(fn *ast.FuncDecl) { - c.checkParams(fn.Type.Params.List) - if fn.Type.Results != nil { - c.checkParams(fn.Type.Results.List) - } -} - -func (c *ptrToRefParamChecker) checkParams(params []*ast.Field) { - for _, param := range params { - ptr, ok := c.ctx.TypeOf(param.Type).(*types.Pointer) - if !ok { - continue - } - - if c.isRefType(ptr.Elem()) { - if len(param.Names) == 0 { - c.ctx.Warn(param, "consider to make non-pointer type for `%s`", param.Type) - } else { - for i := range param.Names { - c.warn(param.Names[i]) - } - } - } - } -} - -func (c *ptrToRefParamChecker) isRefType(x types.Type) bool { - switch typ := x.(type) { - case *types.Map, *types.Chan, *types.Interface: - return true - case *types.Named: - // Handle underlying type only for interfaces. - if _, ok := typ.Underlying().(*types.Interface); ok { - return true - } - } - return false -} - -func (c *ptrToRefParamChecker) warn(id *ast.Ident) { - c.ctx.Warn(id, "consider `%s' to be of non-pointer type", id) -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/rangeExprCopy_checker.go b/vendor/github.com/go-critic/go-critic/checkers/rangeExprCopy_checker.go deleted file mode 100644 index 3f61ee0bd..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/rangeExprCopy_checker.go +++ /dev/null @@ -1,80 +0,0 @@ -package checkers - -import ( - "go/ast" - "go/types" - - "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/linter" -) - -func init() { - var info linter.CheckerInfo - info.Name = "rangeExprCopy" - info.Tags = []string{linter.PerformanceTag} - info.Params = linter.CheckerParams{ - "sizeThreshold": { - Value: 512, - Usage: "size in bytes that makes the warning trigger", - }, - "skipTestFuncs": { - Value: true, - Usage: "whether to check test functions", - }, - } - info.Summary = "Detects expensive copies of `for` loop range expressions" - info.Details = "Suggests to use pointer to array to avoid the copy using `&` on range expression." - info.Before = ` -var xs [2048]byte -for _, x := range xs { // Copies 2048 bytes - // Loop body. -}` - info.After = ` -var xs [2048]byte -for _, x := range &xs { // No copy - // Loop body. -}` - info.Note = "See Go issue for details: https://github.com/golang/go/issues/15812." - - collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { - c := &rangeExprCopyChecker{ctx: ctx} - c.sizeThreshold = int64(info.Params.Int("sizeThreshold")) - c.skipTestFuncs = info.Params.Bool("skipTestFuncs") - return astwalk.WalkerForStmt(c), nil - }) -} - -type rangeExprCopyChecker struct { - astwalk.WalkHandler - ctx *linter.CheckerContext - - sizeThreshold int64 - skipTestFuncs bool -} - -func (c *rangeExprCopyChecker) EnterFunc(fn *ast.FuncDecl) bool { - return fn.Body != nil && - !(c.skipTestFuncs && isUnitTestFunc(c.ctx, fn)) -} - -func (c *rangeExprCopyChecker) VisitStmt(stmt ast.Stmt) { - rng, ok := stmt.(*ast.RangeStmt) - if !ok || rng.Key == nil || rng.Value == nil { - return - } - tv := c.ctx.TypesInfo.Types[rng.X] - if !tv.Addressable() { - return - } - if _, ok := tv.Type.(*types.Array); !ok { - return - } - if size, ok := c.ctx.SizeOf(tv.Type); ok && size >= c.sizeThreshold { - c.warn(rng, size) - } -} - -func (c *rangeExprCopyChecker) warn(rng *ast.RangeStmt, size int64) { - c.ctx.Warn(rng, "copy of %s (%d bytes) can be avoided with &%s", - rng.X, size, rng.X) -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/rangeValCopy_checker.go b/vendor/github.com/go-critic/go-critic/checkers/rangeValCopy_checker.go deleted file mode 100644 index 6d15c30cd..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/rangeValCopy_checker.go +++ /dev/null @@ -1,76 +0,0 @@ -package checkers - -import ( - "go/ast" - - "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/linter" -) - -func init() { - var info linter.CheckerInfo - info.Name = "rangeValCopy" - info.Tags = []string{linter.PerformanceTag} - info.Params = linter.CheckerParams{ - "sizeThreshold": { - Value: 128, - Usage: "size in bytes that makes the warning trigger", - }, - "skipTestFuncs": { - Value: true, - Usage: "whether to check test functions", - }, - } - info.Summary = "Detects loops that copy big objects during each iteration" - info.Details = "Suggests to use index access or take address and make use pointer instead." - info.Before = ` -xs := make([][1024]byte, length) -for _, x := range xs { - // Loop body. -}` - info.After = ` -xs := make([][1024]byte, length) -for i := range xs { - x := &xs[i] - // Loop body. -}` - - collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { - c := &rangeValCopyChecker{ctx: ctx} - c.sizeThreshold = int64(info.Params.Int("sizeThreshold")) - c.skipTestFuncs = info.Params.Bool("skipTestFuncs") - return astwalk.WalkerForStmt(c), nil - }) -} - -type rangeValCopyChecker struct { - astwalk.WalkHandler - ctx *linter.CheckerContext - - sizeThreshold int64 - skipTestFuncs bool -} - -func (c *rangeValCopyChecker) EnterFunc(fn *ast.FuncDecl) bool { - return fn.Body != nil && - !(c.skipTestFuncs && isUnitTestFunc(c.ctx, fn)) -} - -func (c *rangeValCopyChecker) VisitStmt(stmt ast.Stmt) { - rng, ok := stmt.(*ast.RangeStmt) - if !ok || rng.Value == nil { - return - } - typ := c.ctx.TypeOf(rng.Value) - if typ == nil { - return - } - size, ok := c.ctx.SizeOf(typ) - if ok && size >= c.sizeThreshold { - c.warn(rng, size) - } -} - -func (c *rangeValCopyChecker) warn(n ast.Node, size int64) { - c.ctx.Warn(n, "each iteration copies %d bytes (consider pointers or indexing)", size) -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/regexpPattern_checker.go b/vendor/github.com/go-critic/go-critic/checkers/regexpPattern_checker.go deleted file mode 100644 index 45aba261b..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/regexpPattern_checker.go +++ /dev/null @@ -1,68 +0,0 @@ -package checkers - -import ( - "go/ast" - "go/constant" - "regexp" - "strings" - - "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/linter" -) - -func init() { - var info linter.CheckerInfo - info.Name = "regexpPattern" - info.Tags = []string{linter.DiagnosticTag, linter.ExperimentalTag} - info.Summary = "Detects suspicious regexp patterns" - info.Before = "regexp.MustCompile(`google.com|yandex.ru`)" - info.After = "regexp.MustCompile(`google\\.com|yandex\\.ru`)" - - collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { - domains := []string{ - "com", - "org", - "info", - "net", - "ru", - "de", - } - - allDomains := strings.Join(domains, "|") - domainRE := regexp.MustCompile(`[^\\]\.(` + allDomains + `)\b`) - return astwalk.WalkerForExpr(®expPatternChecker{ - ctx: ctx, - domainRE: domainRE, - }), nil - }) -} - -type regexpPatternChecker struct { - astwalk.WalkHandler - ctx *linter.CheckerContext - - domainRE *regexp.Regexp -} - -func (c *regexpPatternChecker) VisitExpr(x ast.Expr) { - call, ok := x.(*ast.CallExpr) - if !ok { - return - } - - switch qualifiedName(call.Fun) { - case "regexp.Compile", "regexp.CompilePOSIX", "regexp.MustCompile", "regexp.MustCompilePosix": - cv := c.ctx.TypesInfo.Types[call.Args[0]].Value - if cv == nil || cv.Kind() != constant.String { - return - } - s := constant.StringVal(cv) - if m := c.domainRE.FindStringSubmatch(s); m != nil { - c.warnDomain(call.Args[0], m[1]) - } - } -} - -func (c *regexpPatternChecker) warnDomain(cause ast.Expr, domain string) { - c.ctx.Warn(cause, "'.%s' should probably be '\\.%s'", domain, domain) -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/regexpSimplify_checker.go b/vendor/github.com/go-critic/go-critic/checkers/regexpSimplify_checker.go deleted file mode 100644 index f500f4350..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/regexpSimplify_checker.go +++ /dev/null @@ -1,512 +0,0 @@ -package checkers - -import ( - "fmt" - "go/ast" - "go/constant" - "log" - "strings" - "unicode/utf8" - - "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/linter" - - "github.com/quasilyte/regex/syntax" -) - -func init() { - var info linter.CheckerInfo - info.Name = "regexpSimplify" - info.Tags = []string{linter.StyleTag, linter.ExperimentalTag, linter.OpinionatedTag} - info.Summary = "Detects regexp patterns that can be simplified" - info.Before = "regexp.MustCompile(`(?:a|b|c) [a-z][a-z]*`)" - info.After = "regexp.MustCompile(`[abc] {3}[a-z]+`)" - - // TODO(quasilyte): add params to control most opinionated replacements - // like `[0-9] -> \d` - // `[[:digit:]] -> \d` - // `[A-Za-z0-9_]` -> `\w` - - collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { - opts := &syntax.ParserOptions{ - NoLiterals: true, - } - c := ®expSimplifyChecker{ - ctx: ctx, - parser: syntax.NewParser(opts), - out: &strings.Builder{}, - } - return astwalk.WalkerForExpr(c), nil - }) -} - -type regexpSimplifyChecker struct { - astwalk.WalkHandler - ctx *linter.CheckerContext - parser *syntax.Parser - - // out is a tmp buffer where we build a simplified regexp pattern. - out *strings.Builder - // score is a number of applied simplifications - score int -} - -func (c *regexpSimplifyChecker) VisitExpr(x ast.Expr) { - call, ok := x.(*ast.CallExpr) - if !ok { - return - } - - switch qualifiedName(call.Fun) { - case "regexp.Compile", "regexp.MustCompile": - cv := c.ctx.TypesInfo.Types[call.Args[0]].Value - if cv == nil || cv.Kind() != constant.String { - return - } - pat := constant.StringVal(cv) - if len(pat) > 60 { - // Skip scary regexp patterns for now. - break - } - - // Only do 2 passes. - simplified := pat - for pass := 0; pass < 2; pass++ { - candidate := c.simplify(pass, simplified) - if candidate == "" { - break - } - simplified = candidate - } - if simplified != "" && simplified != pat { - c.warn(call.Args[0], pat, simplified) - } - } -} - -func (c *regexpSimplifyChecker) simplify(pass int, pat string) string { - re, err := c.parser.Parse(pat) - if err != nil { - return "" - } - - c.score = 0 - c.out.Reset() - - // TODO(quasilyte): suggest char ranges for things like [012345689]? - // TODO(quasilyte): evaluate char range to suggest better replacements. - // TODO(quasilyte): (?:ab|ac) -> a[bc] - // TODO(quasilyte): suggest "s" and "." flag if things like [\w\W] are used. - // TODO(quasilyte): x{n}x? -> x{n,n+1} - - c.walk(re.Expr) - - if debug() { - // This happens only in one of two cases: - // 1. Parser has a bug and we got invalid AST for the given pattern. - // 2. Simplifier incorrectly built a replacement string from the AST. - if c.score == 0 && c.out.String() != pat { - log.Printf("pass %d: unexpected pattern diff:\n\thave: %q\n\twant: %q", - pass, c.out.String(), pat) - } - } - - if c.score > 0 { - return c.out.String() - } - return "" -} - -func (c *regexpSimplifyChecker) walk(e syntax.Expr) { - out := c.out - - switch e.Op { - case syntax.OpConcat: - c.walkConcat(e) - - case syntax.OpAlt: - c.walkAlt(e) - - case syntax.OpCharRange: - s := c.simplifyCharRange(e) - if s != "" { - out.WriteString(s) - c.score++ - } else { - out.WriteString(e.Value) - } - - case syntax.OpGroupWithFlags: - out.WriteString("(") - out.WriteString(e.Args[1].Value) - out.WriteString(":") - c.walk(e.Args[0]) - out.WriteString(")") - case syntax.OpGroup: - c.walkGroup(e) - case syntax.OpCapture: - out.WriteString("(") - c.walk(e.Args[0]) - out.WriteString(")") - case syntax.OpNamedCapture: - out.WriteString("(?P<") - out.WriteString(e.Args[1].Value) - out.WriteString(">") - c.walk(e.Args[0]) - out.WriteString(")") - - case syntax.OpRepeat: - // TODO(quasilyte): is it worth it to analyze repeat argument - // more closely and handle `{n,n} -> {n}` cases? - rep := e.Args[1].Value - switch rep { - case "{0,1}": - c.walk(e.Args[0]) - out.WriteString("?") - c.score++ - case "{1,}": - c.walk(e.Args[0]) - out.WriteString("+") - c.score++ - case "{0,}": - c.walk(e.Args[0]) - out.WriteString("*") - c.score++ - case "{0}": - // Maybe {0} should be reported by another check, regexpLint? - c.score++ - case "{1}": - c.walk(e.Args[0]) - c.score++ - default: - c.walk(e.Args[0]) - out.WriteString(rep) - } - - case syntax.OpPosixClass: - out.WriteString(e.Value) - - case syntax.OpNegCharClass: - s := c.simplifyNegCharClass(e) - if s != "" { - c.out.WriteString(s) - c.score++ - } else { - out.WriteString("[^") - for _, e := range e.Args { - c.walk(e) - } - out.WriteString("]") - } - - case syntax.OpCharClass: - s := c.simplifyCharClass(e) - if s != "" { - c.out.WriteString(s) - c.score++ - } else { - out.WriteString("[") - for _, e := range e.Args { - c.walk(e) - } - out.WriteString("]") - } - - case syntax.OpEscapeChar: - switch e.Value { - case `\&`, `\#`, `\!`, `\@`, `\%`, `\<`, `\>`, `\:`, `\;`, `\/`, `\,`, `\=`, `\.`: - c.score++ - out.WriteString(e.Value[len(`\`):]) - default: - out.WriteString(e.Value) - } - - case syntax.OpQuestion, syntax.OpNonGreedy: - c.walk(e.Args[0]) - out.WriteString("?") - case syntax.OpStar: - c.walk(e.Args[0]) - out.WriteString("*") - case syntax.OpPlus: - c.walk(e.Args[0]) - out.WriteString("+") - - default: - out.WriteString(e.Value) - } -} - -func (c *regexpSimplifyChecker) walkGroup(g syntax.Expr) { - switch g.Args[0].Op { - case syntax.OpChar, syntax.OpEscapeChar, syntax.OpEscapeMeta, syntax.OpCharClass: - c.walk(g.Args[0]) - c.score++ - return - } - - c.out.WriteString("(?:") - c.walk(g.Args[0]) - c.out.WriteString(")") -} - -func (c *regexpSimplifyChecker) simplifyNegCharClass(e syntax.Expr) string { - switch e.Value { - case `[^0-9]`: - return `\D` - case `[^\s]`: - return `\S` - case `[^\S]`: - return `\s` - case `[^\w]`: - return `\W` - case `[^\W]`: - return `\w` - case `[^\d]`: - return `\D` - case `[^\D]`: - return `\d` - case `[^[:^space:]]`: - return `\s` - case `[^[:space:]]`: - return `\S` - case `[^[:^word:]]`: - return `\w` - case `[^[:word:]]`: - return `\W` - case `[^[:^digit:]]`: - return `\d` - case `[^[:digit:]]`: - return `\D` - } - - return "" -} - -func (c *regexpSimplifyChecker) simplifyCharClass(e syntax.Expr) string { - switch e.Value { - case `[0-9]`: - return `\d` - case `[[:word:]]`: - return `\w` - case `[[:^word:]]`: - return `\W` - case `[[:digit:]]`: - return `\d` - case `[[:^digit:]]`: - return `\D` - case `[[:space:]]`: - return `\s` - case `[[:^space:]]`: - return `\S` - case `[][]`: - return `\]\[` - case `[]]`: - return `\]` - } - - if len(e.Args) == 1 { - switch e.Args[0].Op { - case syntax.OpChar: - switch v := e.Args[0].Value; v { - case "|", "*", "+", "?", ".", "[", "^", "$", "(", ")": - // Can't take outside of the char group without escaping. - default: - return v - } - case syntax.OpEscapeChar: - return e.Args[0].Value - } - } - - return "" -} - -func (c *regexpSimplifyChecker) canMerge(x, y syntax.Expr) bool { - if x.Op != y.Op { - return false - } - switch x.Op { - case syntax.OpChar, syntax.OpCharClass, syntax.OpEscapeMeta, syntax.OpEscapeChar, syntax.OpNegCharClass, syntax.OpGroup: - return x.Value == y.Value - default: - return false - } -} - -func (c *regexpSimplifyChecker) canCombine(x, y syntax.Expr) (threshold int, ok bool) { - if x.Op != y.Op { - return 0, false - } - - switch x.Op { - case syntax.OpDot: - return 3, true - - case syntax.OpChar: - if x.Value != y.Value { - return 0, false - } - if x.Value == " " { - return 1, true - } - return 4, true - - case syntax.OpEscapeMeta, syntax.OpEscapeChar: - if x.Value == y.Value { - return 2, true - } - - case syntax.OpCharClass, syntax.OpNegCharClass, syntax.OpGroup: - if x.Value == y.Value { - return 1, true - } - } - - return 0, false -} - -func (c *regexpSimplifyChecker) concatLiteral(e syntax.Expr) string { - if e.Op == syntax.OpConcat && c.allChars(e) { - return e.Value - } - return "" -} - -func (c *regexpSimplifyChecker) allChars(e syntax.Expr) bool { - for _, a := range e.Args { - if a.Op != syntax.OpChar { - return false - } - } - return true -} - -func (c *regexpSimplifyChecker) factorPrefixSuffix(alt syntax.Expr) bool { - // TODO: more forms of prefixes/suffixes? - // - // A more generalized algorithm could handle `fo|fo1|fo2` -> `fo[12]?`. - // but it's an open question whether the latter form universally better. - // - // Right now it handles only the simplest cases: - // `http|https` -> `https?` - // `xfoo|foo` -> `x?foo` - if len(alt.Args) != 2 { - return false - } - x := c.concatLiteral(alt.Args[0]) - y := c.concatLiteral(alt.Args[1]) - if x == y { - return false // Reject non-literals and identical strings early - } - - // Let x be a shorter string. - if len(x) > len(y) { - x, y = y, x - } - // Do we have a common prefix? - tail := strings.TrimPrefix(y, x) - if len(tail) <= utf8.UTFMax && utf8.RuneCountInString(tail) == 1 { - c.out.WriteString(x + tail + "?") - c.score++ - return true - } - // Do we have a common suffix? - head := strings.TrimSuffix(y, x) - if len(head) <= utf8.UTFMax && utf8.RuneCountInString(head) == 1 { - c.out.WriteString(head + "?" + x) - c.score++ - return true - } - return false -} - -func (c *regexpSimplifyChecker) walkAlt(alt syntax.Expr) { - // `x|y|z` -> `[xyz]`. - if c.allChars(alt) { - c.score++ - c.out.WriteString("[") - for _, e := range alt.Args { - c.out.WriteString(e.Value) - } - c.out.WriteString("]") - return - } - - if c.factorPrefixSuffix(alt) { - return - } - - for i, e := range alt.Args { - c.walk(e) - if i != len(alt.Args)-1 { - c.out.WriteString("|") - } - } -} - -func (c *regexpSimplifyChecker) walkConcat(concat syntax.Expr) { - i := 0 - for i < len(concat.Args) { - x := concat.Args[i] - c.walk(x) - i++ - - if i >= len(concat.Args) { - break - } - - // Try merging `xy*` into `x+` where x=y. - if concat.Args[i].Op == syntax.OpStar { - if c.canMerge(x, concat.Args[i].Args[0]) { - c.out.WriteString("+") - c.score++ - i++ - continue - } - } - - // Try combining `xy` into `x{2}` where x=y. - threshold, ok := c.canCombine(x, concat.Args[i]) - if !ok { - continue - } - n := 1 // Can combine at least 1 pair. - for j := i + 1; j < len(concat.Args); j++ { - _, ok := c.canCombine(x, concat.Args[j]) - if !ok { - break - } - n++ - } - if n >= threshold { - fmt.Fprintf(c.out, "{%d}", n+1) - c.score++ - i += n - } - } -} - -func (c *regexpSimplifyChecker) simplifyCharRange(rng syntax.Expr) string { - if rng.Args[0].Op != syntax.OpChar || rng.Args[1].Op != syntax.OpChar { - return "" - } - - lo := rng.Args[0].Value - hi := rng.Args[1].Value - if len(lo) == 1 && len(hi) == 1 { - switch hi[0] - lo[0] { - case 0: - return lo - case 1: - return lo + hi - case 2: - return lo + string(lo[0]+1) + hi - } - } - - return "" -} - -func (c *regexpSimplifyChecker) warn(cause ast.Expr, orig, suggest string) { - c.ctx.Warn(cause, "can re-write `%s` as `%s`", orig, suggest) -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/ruleguard_checker.go b/vendor/github.com/go-critic/go-critic/checkers/ruleguard_checker.go deleted file mode 100644 index 29723a69a..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/ruleguard_checker.go +++ /dev/null @@ -1,322 +0,0 @@ -package checkers - -import ( - "bytes" - "errors" - "fmt" - "go/ast" - "go/token" - "log" - "os" - "path/filepath" - "sort" - "strings" - - "github.com/go-critic/go-critic/linter" - - "github.com/quasilyte/go-ruleguard/ruleguard" -) - -func init() { - var info linter.CheckerInfo - info.Name = "ruleguard" - info.Tags = []string{linter.StyleTag, linter.ExperimentalTag} - info.Params = linter.CheckerParams{ - "rules": { - Value: "", - Usage: "comma-separated list of gorule file paths. Glob patterns such as 'rules-*.go' may be specified", - }, - "debug": { - Value: "", - Usage: "enable debug for the specified named rules group", - }, - "failOnError": { - Value: false, - Usage: "deprecated, use failOn param; if set to true, identical to failOn='all', otherwise failOn=''", - }, - "failOn": { - Value: "", - Usage: `Determines the behavior when an error occurs while parsing ruleguard files. -If flag is not set, log error and skip rule files that contain an error. -If flag is set, the value must be a comma-separated list of error conditions. -* 'import': rule refers to a package that cannot be loaded. -* 'dsl': gorule file does not comply with the ruleguard DSL.`, - }, - "enable": { - Value: "", - Usage: "comma-separated list of enabled groups or skip empty to enable everything", - }, - "disable": { - Value: "", - Usage: "comma-separated list of disabled groups or skip empty to enable everything", - }, - } - info.Summary = "Runs user-defined rules using ruleguard linter" - info.Details = "Reads a rules file and turns them into go-critic checkers." - info.Before = `N/A` - info.After = `N/A` - info.Note = "See https://github.com/quasilyte/go-ruleguard." - - collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { - return newRuleguardChecker(&info, ctx) - }) -} - -// parseErrorHandler is used to determine whether to ignore or fail ruleguard parsing errors. -type parseErrorHandler struct { - // failureConditions is a map of predicates which are evaluated against a ruleguard parsing error. - // If at least one predicate returns true, then an error is returned. - // Otherwise, the ruleguard file is skipped. - failureConditions map[string]func(err error) bool -} - -// failOnParseError returns true if a parseError occurred and that error should be not be ignored. -func (e parseErrorHandler) failOnParseError(parseError error) bool { - for _, p := range e.failureConditions { - if p(parseError) { - return true - } - } - return false -} - -func newErrorHandler(failOnErrorFlag string) (*parseErrorHandler, error) { - h := parseErrorHandler{ - failureConditions: make(map[string]func(err error) bool), - } - failOnErrorPredicates := map[string]func(error) bool{ - "dsl": func(err error) bool { var e *ruleguard.ImportError; return !errors.As(err, &e) }, - "import": func(err error) bool { var e *ruleguard.ImportError; return errors.As(err, &e) }, - "all": func(err error) bool { return true }, - } - for _, k := range strings.Split(failOnErrorFlag, ",") { - if k == "" { - continue - } - if p, ok := failOnErrorPredicates[k]; ok { - h.failureConditions[k] = p - } else { - // Wrong flag value. - supportedValues := []string{} - for key := range failOnErrorPredicates { - supportedValues = append(supportedValues, key) - } - return nil, fmt.Errorf("ruleguard init error: 'failOnError' flag '%s' is invalid. It must be a comma-separated list and supported values are '%s'", - k, strings.Join(supportedValues, ",")) - } - } - return &h, nil -} - -func newRuleguardChecker(info *linter.CheckerInfo, ctx *linter.CheckerContext) (*ruleguardChecker, error) { - c := &ruleguardChecker{ - ctx: ctx, - debugGroup: info.Params.String("debug"), - } - rulesFlag := info.Params.String("rules") - if rulesFlag == "" { - return c, nil - } - failOn := info.Params.String("failOn") - if failOn == "" { - if info.Params.Bool("failOnError") { - failOn = "all" - } - } - h, err := newErrorHandler(failOn) - if err != nil { - return nil, err - } - - engine := ruleguard.NewEngine() - engine.InferBuildContext() - fset := token.NewFileSet() - filePatterns := strings.Split(rulesFlag, ",") - - enabledGroups := make(map[string]bool) - disabledGroups := make(map[string]bool) - enabledTags := make(map[string]bool) - disabledTags := make(map[string]bool) - - for _, g := range strings.Split(info.Params.String("disable"), ",") { - g = strings.TrimSpace(g) - if strings.HasPrefix(g, "#") { - disabledTags[strings.TrimPrefix(g, "#")] = true - continue - } - - disabledGroups[g] = true - } - flagEnable := info.Params.String("enable") - if flagEnable != "" { - for _, g := range strings.Split(flagEnable, ",") { - g = strings.TrimSpace(g) - if strings.HasPrefix(g, "#") { - enabledTags[strings.TrimPrefix(g, "#")] = true - continue - } - - enabledGroups[g] = true - } - } - - if !enabledTags[linter.ExperimentalTag] { - disabledTags[linter.ExperimentalTag] = true - } - ruleguardDebug := os.Getenv("GOCRITIC_RULEGUARD_DEBUG") != "" - - inEnabledTags := func(g *ruleguard.GoRuleGroup) bool { - for _, t := range g.DocTags { - if enabledTags[t] { - return true - } - } - return false - } - inDisabledTags := func(g *ruleguard.GoRuleGroup) string { - for _, t := range g.DocTags { - if disabledTags[t] { - return t - } - } - return "" - } - - loadContext := &ruleguard.LoadContext{ - Fset: fset, - DebugImports: ruleguardDebug, - DebugPrint: debugPrint, - GroupFilter: func(g *ruleguard.GoRuleGroup) bool { - enabled := flagEnable == "" || enabledGroups[g.Name] || inEnabledTags(g) - whyDisabled := "" - - switch { - case !enabled: - whyDisabled = "not enabled by name or tag (-enable flag)" - case disabledGroups[g.Name]: - whyDisabled = "disabled by name (-disable flag)" - default: - if tag := inDisabledTags(g); tag != "" { - whyDisabled = fmt.Sprintf("disabled by %s tag (-disable flag)", tag) - } - } - - if ruleguardDebug { - if whyDisabled != "" { - debugPrint(fmt.Sprintf("(-) %s is %s", g.Name, whyDisabled)) - } else { - debugPrint(fmt.Sprintf("(+) %s is enabled", g.Name)) - } - } - return whyDisabled == "" - }, - } - - loaded := 0 - for _, filePattern := range filePatterns { - filenames, err := filepath.Glob(strings.TrimSpace(filePattern)) - if err != nil { - // The only possible returned error is ErrBadPattern, when pattern is malformed. - log.Printf("ruleguard init error: %+v", err) - continue - } - if len(filenames) == 0 { - return nil, fmt.Errorf("ruleguard init error: no file matching '%s'", strings.TrimSpace(filePattern)) - } - for _, filename := range filenames { - data, err := os.ReadFile(filename) - if err != nil { - if h.failOnParseError(err) { - return nil, fmt.Errorf("ruleguard init error: %+v", err) - } - log.Printf("ruleguard init error, skip %s: %+v", filename, err) - } - if err := engine.Load(loadContext, filename, bytes.NewReader(data)); err != nil { - if h.failOnParseError(err) { - return nil, fmt.Errorf("ruleguard init error: %+v", err) - } - log.Printf("ruleguard init error, skip %s: %+v", filename, err) - } - loaded++ - } - } - - if loaded != 0 { - c.engine = engine - } - return c, nil -} - -type ruleguardChecker struct { - ctx *linter.CheckerContext - - debugGroup string - engine *ruleguard.Engine -} - -func (c *ruleguardChecker) WalkFile(f *ast.File) { - if c.engine == nil { - return - } - - runRuleguardEngine(c.ctx, f, c.engine, &ruleguard.RunContext{ - Debug: c.debugGroup, - DebugPrint: func(s string) { - fmt.Fprintln(os.Stderr, s) - }, - Pkg: c.ctx.Pkg, - Types: c.ctx.TypesInfo, - Sizes: c.ctx.SizesInfo, - Fset: c.ctx.FileSet, - TruncateLen: 100, - }) -} - -func runRuleguardEngine(ctx *linter.CheckerContext, f *ast.File, e *ruleguard.Engine, runCtx *ruleguard.RunContext) { - type ruleguardReport struct { - pos token.Pos - message string - fix linter.QuickFix - } - var reports []ruleguardReport - - runCtx.Report = func(data *ruleguard.ReportData) { - // TODO(quasilyte): investigate whether we should add a rule name as - // a message prefix here. - r := ruleguardReport{ - pos: data.Node.Pos(), - message: data.Message, - } - fix := data.Suggestion - if fix != nil { - r.fix = linter.QuickFix{ - From: fix.From, - To: fix.To, - Replacement: fix.Replacement, - } - } - reports = append(reports, r) - } - - if err := e.Run(runCtx, f); err != nil { - // Normally this should never happen, but since - // we don't have a better mechanism to report errors, - // emit a warning. - ctx.Warn(f, "execution error: %v", err) - } - - sort.Slice(reports, func(i, j int) bool { - return reports[i].message < reports[j].message - }) - for _, report := range reports { - if report.fix.Replacement != nil { - ctx.WarnFixableWithPos(report.pos, report.fix, "%s", report.message) - } else { - ctx.WarnWithPos(report.pos, "%s", report.message) - } - } -} - -func debugPrint(s string) { - fmt.Println("debug:", s) -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/rulesdata/rulesdata.go b/vendor/github.com/go-critic/go-critic/checkers/rulesdata/rulesdata.go deleted file mode 100644 index 4ab31076f..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/rulesdata/rulesdata.go +++ /dev/null @@ -1,2521 +0,0 @@ -// Code generated by "precompile.go". DO NOT EDIT. - -package rulesdata - -import "github.com/quasilyte/go-ruleguard/ruleguard/ir" - -var PrecompiledRules = &ir.File{ - PkgPath: "gorules", - CustomDecls: []string{}, - BundleImports: []ir.BundleImport{}, - RuleGroups: []ir.RuleGroup{ - { - Line: 11, - Name: "redundantSprint", - MatcherName: "m", - DocTags: []string{"style", "experimental"}, - DocSummary: "Detects redundant fmt.Sprint calls", - DocBefore: "fmt.Sprint(x)", - DocAfter: "x.String()", - Rules: []ir.Rule{ - { - Line: 12, - SyntaxPatterns: []ir.PatternString{ - {Line: 12, Value: "fmt.Sprint($x)"}, - {Line: 12, Value: "fmt.Sprintf(\"%s\", $x)"}, - {Line: 12, Value: "fmt.Sprintf(\"%v\", $x)"}, - }, - ReportTemplate: "use $x.String() instead", - SuggestTemplate: "$x.String()", - WhereExpr: ir.FilterExpr{ - Line: 13, - Op: ir.FilterAndOp, - Src: "!m[\"x\"].Type.Is(`reflect.Value`) && m[\"x\"].Type.Implements(`fmt.Stringer`)", - Args: []ir.FilterExpr{ - { - Line: 13, - Op: ir.FilterNotOp, - Src: "!m[\"x\"].Type.Is(`reflect.Value`)", - Args: []ir.FilterExpr{{ - Line: 13, - Op: ir.FilterVarTypeIsOp, - Src: "m[\"x\"].Type.Is(`reflect.Value`)", - Value: "x", - Args: []ir.FilterExpr{{Line: 13, Op: ir.FilterStringOp, Src: "`reflect.Value`", Value: "reflect.Value"}}, - }}, - }, - { - Line: 13, - Op: ir.FilterVarTypeImplementsOp, - Src: "m[\"x\"].Type.Implements(`fmt.Stringer`)", - Value: "x", - Args: []ir.FilterExpr{{Line: 13, Op: ir.FilterStringOp, Src: "`fmt.Stringer`", Value: "fmt.Stringer"}}, - }, - }, - }, - }, - { - Line: 17, - SyntaxPatterns: []ir.PatternString{ - {Line: 17, Value: "fmt.Sprint($x)"}, - {Line: 17, Value: "fmt.Sprintf(\"%s\", $x)"}, - {Line: 17, Value: "fmt.Sprintf(\"%v\", $x)"}, - }, - ReportTemplate: "$x is already string", - SuggestTemplate: "$x", - WhereExpr: ir.FilterExpr{ - Line: 18, - Op: ir.FilterVarTypeIsOp, - Src: "m[\"x\"].Type.Is(`string`)", - Value: "x", - Args: []ir.FilterExpr{{Line: 18, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}}, - }, - }, - }, - }, - { - Line: 27, - Name: "deferUnlambda", - MatcherName: "m", - DocTags: []string{"style", "experimental"}, - DocSummary: "Detects deferred function literals that can be simplified", - DocBefore: "defer func() { f() }()", - DocAfter: "defer f()", - Rules: []ir.Rule{ - { - Line: 28, - SyntaxPatterns: []ir.PatternString{{Line: 28, Value: "defer func() { $f($*args) }()"}}, - ReportTemplate: "can rewrite as `defer $f($args)`", - WhereExpr: ir.FilterExpr{ - Line: 29, - Op: ir.FilterAndOp, - Src: "m[\"f\"].Node.Is(`Ident`) && m[\"f\"].Text != \"panic\" && m[\"f\"].Text != \"recover\" && m[\"args\"].Const", - Args: []ir.FilterExpr{ - { - Line: 29, - Op: ir.FilterAndOp, - Src: "m[\"f\"].Node.Is(`Ident`) && m[\"f\"].Text != \"panic\" && m[\"f\"].Text != \"recover\"", - Args: []ir.FilterExpr{ - { - Line: 29, - Op: ir.FilterAndOp, - Src: "m[\"f\"].Node.Is(`Ident`) && m[\"f\"].Text != \"panic\"", - Args: []ir.FilterExpr{ - { - Line: 29, - Op: ir.FilterVarNodeIsOp, - Src: "m[\"f\"].Node.Is(`Ident`)", - Value: "f", - Args: []ir.FilterExpr{{Line: 29, Op: ir.FilterStringOp, Src: "`Ident`", Value: "Ident"}}, - }, - { - Line: 29, - Op: ir.FilterNeqOp, - Src: "m[\"f\"].Text != \"panic\"", - Args: []ir.FilterExpr{ - {Line: 29, Op: ir.FilterVarTextOp, Src: "m[\"f\"].Text", Value: "f"}, - {Line: 29, Op: ir.FilterStringOp, Src: "\"panic\"", Value: "panic"}, - }, - }, - }, - }, - { - Line: 29, - Op: ir.FilterNeqOp, - Src: "m[\"f\"].Text != \"recover\"", - Args: []ir.FilterExpr{ - {Line: 29, Op: ir.FilterVarTextOp, Src: "m[\"f\"].Text", Value: "f"}, - {Line: 29, Op: ir.FilterStringOp, Src: "\"recover\"", Value: "recover"}, - }, - }, - }, - }, - { - Line: 29, - Op: ir.FilterVarConstOp, - Src: "m[\"args\"].Const", - Value: "args", - }, - }, - }, - }, - { - Line: 32, - SyntaxPatterns: []ir.PatternString{{Line: 32, Value: "defer func() { $pkg.$f($*args) }()"}}, - ReportTemplate: "can rewrite as `defer $pkg.$f($args)`", - WhereExpr: ir.FilterExpr{ - Line: 33, - Op: ir.FilterAndOp, - Src: "m[\"f\"].Node.Is(`Ident`) && m[\"args\"].Const && m[\"pkg\"].Object.Is(`PkgName`)", - Args: []ir.FilterExpr{ - { - Line: 33, - Op: ir.FilterAndOp, - Src: "m[\"f\"].Node.Is(`Ident`) && m[\"args\"].Const", - Args: []ir.FilterExpr{ - { - Line: 33, - Op: ir.FilterVarNodeIsOp, - Src: "m[\"f\"].Node.Is(`Ident`)", - Value: "f", - Args: []ir.FilterExpr{{Line: 33, Op: ir.FilterStringOp, Src: "`Ident`", Value: "Ident"}}, - }, - { - Line: 33, - Op: ir.FilterVarConstOp, - Src: "m[\"args\"].Const", - Value: "args", - }, - }, - }, - { - Line: 33, - Op: ir.FilterVarObjectIsOp, - Src: "m[\"pkg\"].Object.Is(`PkgName`)", - Value: "pkg", - Args: []ir.FilterExpr{{Line: 33, Op: ir.FilterStringOp, Src: "`PkgName`", Value: "PkgName"}}, - }, - }, - }, - }, - }, - }, - { - Line: 41, - Name: "badLock", - MatcherName: "m", - DocTags: []string{"diagnostic", "experimental"}, - DocSummary: "Detects suspicious mutex lock/unlock operations", - DocBefore: "mu.Lock(); mu.Unlock()", - DocAfter: "mu.Lock(); defer mu.Unlock()", - Rules: []ir.Rule{ - { - Line: 45, - SyntaxPatterns: []ir.PatternString{{Line: 45, Value: "$mu1.Lock(); $mu2.Unlock()"}}, - ReportTemplate: "defer is missing, mutex is unlocked immediately", - WhereExpr: ir.FilterExpr{ - Line: 46, - Op: ir.FilterEqOp, - Src: "m[\"mu1\"].Text == m[\"mu2\"].Text", - Args: []ir.FilterExpr{ - {Line: 46, Op: ir.FilterVarTextOp, Src: "m[\"mu1\"].Text", Value: "mu1"}, - {Line: 46, Op: ir.FilterVarTextOp, Src: "m[\"mu2\"].Text", Value: "mu2"}, - }, - }, - LocationVar: "mu2", - }, - { - Line: 50, - SyntaxPatterns: []ir.PatternString{{Line: 50, Value: "$mu1.RLock(); $mu2.RUnlock()"}}, - ReportTemplate: "defer is missing, mutex is unlocked immediately", - WhereExpr: ir.FilterExpr{ - Line: 51, - Op: ir.FilterEqOp, - Src: "m[\"mu1\"].Text == m[\"mu2\"].Text", - Args: []ir.FilterExpr{ - {Line: 51, Op: ir.FilterVarTextOp, Src: "m[\"mu1\"].Text", Value: "mu1"}, - {Line: 51, Op: ir.FilterVarTextOp, Src: "m[\"mu2\"].Text", Value: "mu2"}, - }, - }, - LocationVar: "mu2", - }, - { - Line: 56, - SyntaxPatterns: []ir.PatternString{{Line: 56, Value: "$mu1.Lock(); defer $mu2.RUnlock()"}}, - ReportTemplate: "suspicious unlock, maybe Unlock was intended?", - WhereExpr: ir.FilterExpr{ - Line: 57, - Op: ir.FilterEqOp, - Src: "m[\"mu1\"].Text == m[\"mu2\"].Text", - Args: []ir.FilterExpr{ - {Line: 57, Op: ir.FilterVarTextOp, Src: "m[\"mu1\"].Text", Value: "mu1"}, - {Line: 57, Op: ir.FilterVarTextOp, Src: "m[\"mu2\"].Text", Value: "mu2"}, - }, - }, - LocationVar: "mu2", - }, - { - Line: 61, - SyntaxPatterns: []ir.PatternString{{Line: 61, Value: "$mu1.RLock(); defer $mu2.Unlock()"}}, - ReportTemplate: "suspicious unlock, maybe RUnlock was intended?", - WhereExpr: ir.FilterExpr{ - Line: 62, - Op: ir.FilterEqOp, - Src: "m[\"mu1\"].Text == m[\"mu2\"].Text", - Args: []ir.FilterExpr{ - {Line: 62, Op: ir.FilterVarTextOp, Src: "m[\"mu1\"].Text", Value: "mu1"}, - {Line: 62, Op: ir.FilterVarTextOp, Src: "m[\"mu2\"].Text", Value: "mu2"}, - }, - }, - LocationVar: "mu2", - }, - { - Line: 67, - SyntaxPatterns: []ir.PatternString{{Line: 67, Value: "$mu1.Lock(); defer $mu2.Lock()"}}, - ReportTemplate: "maybe defer $mu1.Unlock() was intended?", - WhereExpr: ir.FilterExpr{ - Line: 68, - Op: ir.FilterEqOp, - Src: "m[\"mu1\"].Text == m[\"mu2\"].Text", - Args: []ir.FilterExpr{ - {Line: 68, Op: ir.FilterVarTextOp, Src: "m[\"mu1\"].Text", Value: "mu1"}, - {Line: 68, Op: ir.FilterVarTextOp, Src: "m[\"mu2\"].Text", Value: "mu2"}, - }, - }, - LocationVar: "mu2", - }, - { - Line: 72, - SyntaxPatterns: []ir.PatternString{{Line: 72, Value: "$mu1.RLock(); defer $mu2.RLock()"}}, - ReportTemplate: "maybe defer $mu1.RUnlock() was intended?", - WhereExpr: ir.FilterExpr{ - Line: 73, - Op: ir.FilterEqOp, - Src: "m[\"mu1\"].Text == m[\"mu2\"].Text", - Args: []ir.FilterExpr{ - {Line: 73, Op: ir.FilterVarTextOp, Src: "m[\"mu1\"].Text", Value: "mu1"}, - {Line: 73, Op: ir.FilterVarTextOp, Src: "m[\"mu2\"].Text", Value: "mu2"}, - }, - }, - LocationVar: "mu2", - }, - }, - }, - { - Line: 82, - Name: "httpNoBody", - MatcherName: "m", - DocTags: []string{"style", "experimental"}, - DocSummary: "Detects nil usages in http.NewRequest calls, suggesting http.NoBody as an alternative", - DocBefore: "http.NewRequest(\"GET\", url, nil)", - DocAfter: "http.NewRequest(\"GET\", url, http.NoBody)", - Rules: []ir.Rule{ - { - Line: 83, - SyntaxPatterns: []ir.PatternString{{Line: 83, Value: "http.NewRequest($method, $url, $nil)"}}, - ReportTemplate: "http.NoBody should be preferred to the nil request body", - SuggestTemplate: "http.NewRequest($method, $url, http.NoBody)", - WhereExpr: ir.FilterExpr{ - Line: 84, - Op: ir.FilterEqOp, - Src: "m[\"nil\"].Text == \"nil\"", - Args: []ir.FilterExpr{ - {Line: 84, Op: ir.FilterVarTextOp, Src: "m[\"nil\"].Text", Value: "nil"}, - {Line: 84, Op: ir.FilterStringOp, Src: "\"nil\"", Value: "nil"}, - }, - }, - }, - { - Line: 88, - SyntaxPatterns: []ir.PatternString{{Line: 88, Value: "http.NewRequestWithContext($ctx, $method, $url, $nil)"}}, - ReportTemplate: "http.NoBody should be preferred to the nil request body", - SuggestTemplate: "http.NewRequestWithContext($ctx, $method, $url, http.NoBody)", - WhereExpr: ir.FilterExpr{ - Line: 89, - Op: ir.FilterEqOp, - Src: "m[\"nil\"].Text == \"nil\"", - Args: []ir.FilterExpr{ - {Line: 89, Op: ir.FilterVarTextOp, Src: "m[\"nil\"].Text", Value: "nil"}, - {Line: 89, Op: ir.FilterStringOp, Src: "\"nil\"", Value: "nil"}, - }, - }, - }, - { - Line: 93, - SyntaxPatterns: []ir.PatternString{{Line: 93, Value: "httptest.NewRequest($method, $url, $nil)"}}, - ReportTemplate: "http.NoBody should be preferred to the nil request body", - SuggestTemplate: "httptest.NewRequest($method, $url, http.NoBody)", - WhereExpr: ir.FilterExpr{ - Line: 94, - Op: ir.FilterEqOp, - Src: "m[\"nil\"].Text == \"nil\"", - Args: []ir.FilterExpr{ - {Line: 94, Op: ir.FilterVarTextOp, Src: "m[\"nil\"].Text", Value: "nil"}, - {Line: 94, Op: ir.FilterStringOp, Src: "\"nil\"", Value: "nil"}, - }, - }, - }, - }, - }, - { - Line: 104, - Name: "preferDecodeRune", - MatcherName: "m", - DocTags: []string{"performance", "experimental"}, - DocSummary: "Detects expressions like []rune(s)[0] that may cause unwanted rune slice allocation", - DocBefore: "r := []rune(s)[0]", - DocAfter: "r, _ := utf8.DecodeRuneInString(s)", - DocNote: "See Go issue for details: https://github.com/golang/go/issues/45260", - Rules: []ir.Rule{{ - Line: 105, - SyntaxPatterns: []ir.PatternString{{Line: 105, Value: "[]rune($s)[0]"}}, - ReportTemplate: "consider replacing $$ with utf8.DecodeRuneInString($s)", - WhereExpr: ir.FilterExpr{ - Line: 106, - Op: ir.FilterVarTypeIsOp, - Src: "m[\"s\"].Type.Is(`string`)", - Value: "s", - Args: []ir.FilterExpr{{Line: 106, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}}, - }, - }}, - }, - { - Line: 114, - Name: "sloppyLen", - MatcherName: "m", - DocTags: []string{"diagnostic"}, - DocSummary: "Detects usage of `len` when result is obvious or doesn't make sense", - DocBefore: "len(arr) <= 0", - DocAfter: "len(arr) == 0", - Rules: []ir.Rule{ - { - Line: 115, - SyntaxPatterns: []ir.PatternString{{Line: 115, Value: "len($_) >= 0"}}, - ReportTemplate: "$$ is always true", - }, - { - Line: 116, - SyntaxPatterns: []ir.PatternString{{Line: 116, Value: "len($_) < 0"}}, - ReportTemplate: "$$ is always false", - }, - { - Line: 117, - SyntaxPatterns: []ir.PatternString{{Line: 117, Value: "len($x) <= 0"}}, - ReportTemplate: "$$ can be len($x) == 0", - }, - }, - }, - { - Line: 124, - Name: "valSwap", - MatcherName: "m", - DocTags: []string{"style"}, - DocSummary: "Detects value swapping code that are not using parallel assignment", - DocBefore: "*tmp = *x; *x = *y; *y = *tmp", - DocAfter: "*x, *y = *y, *x", - Rules: []ir.Rule{{ - Line: 125, - SyntaxPatterns: []ir.PatternString{{Line: 125, Value: "$tmp := $y; $y = $x; $x = $tmp"}}, - ReportTemplate: "can re-write as `$y, $x = $x, $y`", - }}, - }, - { - Line: 133, - Name: "switchTrue", - MatcherName: "m", - DocTags: []string{"style"}, - DocSummary: "Detects switch-over-bool statements that use explicit `true` tag value", - DocBefore: "switch true {...}", - DocAfter: "switch {...}", - Rules: []ir.Rule{ - { - Line: 134, - SyntaxPatterns: []ir.PatternString{{Line: 134, Value: "switch true { $*_ }"}}, - ReportTemplate: "replace 'switch true {}' with 'switch {}'", - }, - { - Line: 136, - SyntaxPatterns: []ir.PatternString{{Line: 136, Value: "switch $x; true { $*_ }"}}, - ReportTemplate: "replace 'switch $x; true {}' with 'switch $x; {}'", - }, - }, - }, - { - Line: 144, - Name: "flagDeref", - MatcherName: "m", - DocTags: []string{"diagnostic"}, - DocSummary: "Detects immediate dereferencing of `flag` package pointers", - DocBefore: "b := *flag.Bool(\"b\", false, \"b docs\")", - DocAfter: "var b bool; flag.BoolVar(&b, \"b\", false, \"b docs\")", - Rules: []ir.Rule{ - { - Line: 145, - SyntaxPatterns: []ir.PatternString{{Line: 145, Value: "*flag.Bool($*_)"}}, - ReportTemplate: "immediate deref in $$ is most likely an error; consider using flag.BoolVar", - }, - { - Line: 146, - SyntaxPatterns: []ir.PatternString{{Line: 146, Value: "*flag.Duration($*_)"}}, - ReportTemplate: "immediate deref in $$ is most likely an error; consider using flag.DurationVar", - }, - { - Line: 147, - SyntaxPatterns: []ir.PatternString{{Line: 147, Value: "*flag.Float64($*_)"}}, - ReportTemplate: "immediate deref in $$ is most likely an error; consider using flag.Float64Var", - }, - { - Line: 148, - SyntaxPatterns: []ir.PatternString{{Line: 148, Value: "*flag.Int($*_)"}}, - ReportTemplate: "immediate deref in $$ is most likely an error; consider using flag.IntVar", - }, - { - Line: 149, - SyntaxPatterns: []ir.PatternString{{Line: 149, Value: "*flag.Int64($*_)"}}, - ReportTemplate: "immediate deref in $$ is most likely an error; consider using flag.Int64Var", - }, - { - Line: 150, - SyntaxPatterns: []ir.PatternString{{Line: 150, Value: "*flag.String($*_)"}}, - ReportTemplate: "immediate deref in $$ is most likely an error; consider using flag.StringVar", - }, - { - Line: 151, - SyntaxPatterns: []ir.PatternString{{Line: 151, Value: "*flag.Uint($*_)"}}, - ReportTemplate: "immediate deref in $$ is most likely an error; consider using flag.UintVar", - }, - { - Line: 152, - SyntaxPatterns: []ir.PatternString{{Line: 152, Value: "*flag.Uint64($*_)"}}, - ReportTemplate: "immediate deref in $$ is most likely an error; consider using flag.Uint64Var", - }, - }, - }, - { - Line: 159, - Name: "emptyStringTest", - MatcherName: "m", - DocTags: []string{"style", "experimental"}, - DocSummary: "Detects empty string checks that can be written more idiomatically", - DocBefore: "len(s) == 0", - DocAfter: "s == \"\"", - Rules: []ir.Rule{ - { - Line: 160, - SyntaxPatterns: []ir.PatternString{{Line: 160, Value: "len($s) != 0"}}, - ReportTemplate: "replace `$$` with `$s != \"\"`", - WhereExpr: ir.FilterExpr{ - Line: 161, - Op: ir.FilterVarTypeIsOp, - Src: "m[\"s\"].Type.Is(`string`)", - Value: "s", - Args: []ir.FilterExpr{{Line: 161, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}}, - }, - }, - { - Line: 163, - SyntaxPatterns: []ir.PatternString{{Line: 163, Value: "len($s) > 0"}}, - ReportTemplate: "replace `$$` with `$s != \"\"`", - WhereExpr: ir.FilterExpr{ - Line: 164, - Op: ir.FilterVarTypeIsOp, - Src: "m[\"s\"].Type.Is(`string`)", - Value: "s", - Args: []ir.FilterExpr{{Line: 164, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}}, - }, - }, - { - Line: 167, - SyntaxPatterns: []ir.PatternString{{Line: 167, Value: "len($s) == 0"}}, - ReportTemplate: "replace `$$` with `$s == \"\"`", - WhereExpr: ir.FilterExpr{ - Line: 168, - Op: ir.FilterVarTypeIsOp, - Src: "m[\"s\"].Type.Is(`string`)", - Value: "s", - Args: []ir.FilterExpr{{Line: 168, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}}, - }, - }, - { - Line: 170, - SyntaxPatterns: []ir.PatternString{{Line: 170, Value: "len($s) <= 0"}}, - ReportTemplate: "replace `$$` with `$s == \"\"`", - WhereExpr: ir.FilterExpr{ - Line: 171, - Op: ir.FilterVarTypeIsOp, - Src: "m[\"s\"].Type.Is(`string`)", - Value: "s", - Args: []ir.FilterExpr{{Line: 171, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}}, - }, - }, - }, - }, - { - Line: 179, - Name: "stringXbytes", - MatcherName: "m", - DocTags: []string{"performance"}, - DocSummary: "Detects redundant conversions between string and []byte", - DocBefore: "copy(b, []byte(s))", - DocAfter: "copy(b, s)", - Rules: []ir.Rule{ - { - Line: 180, - SyntaxPatterns: []ir.PatternString{{Line: 180, Value: "copy($_, []byte($s))"}}, - ReportTemplate: "can simplify `[]byte($s)` to `$s`", - }, - { - Line: 182, - SyntaxPatterns: []ir.PatternString{{Line: 182, Value: "string($b) == \"\""}}, - ReportTemplate: "suggestion: len($b) == 0", - SuggestTemplate: "len($b) == 0", - WhereExpr: ir.FilterExpr{ - Line: 182, - Op: ir.FilterVarTypeIsOp, - Src: "m[\"b\"].Type.Is(`[]byte`)", - Value: "b", - Args: []ir.FilterExpr{{Line: 182, Op: ir.FilterStringOp, Src: "`[]byte`", Value: "[]byte"}}, - }, - }, - { - Line: 183, - SyntaxPatterns: []ir.PatternString{{Line: 183, Value: "string($b) != \"\""}}, - ReportTemplate: "suggestion: len($b) != 0", - SuggestTemplate: "len($b) != 0", - WhereExpr: ir.FilterExpr{ - Line: 183, - Op: ir.FilterVarTypeIsOp, - Src: "m[\"b\"].Type.Is(`[]byte`)", - Value: "b", - Args: []ir.FilterExpr{{Line: 183, Op: ir.FilterStringOp, Src: "`[]byte`", Value: "[]byte"}}, - }, - }, - { - Line: 185, - SyntaxPatterns: []ir.PatternString{{Line: 185, Value: "len(string($b))"}}, - ReportTemplate: "suggestion: len($b)", - SuggestTemplate: "len($b)", - WhereExpr: ir.FilterExpr{ - Line: 185, - Op: ir.FilterVarTypeIsOp, - Src: "m[\"b\"].Type.Is(`[]byte`)", - Value: "b", - Args: []ir.FilterExpr{{Line: 185, Op: ir.FilterStringOp, Src: "`[]byte`", Value: "[]byte"}}, - }, - }, - { - Line: 187, - SyntaxPatterns: []ir.PatternString{{Line: 187, Value: "string($x) == string($y)"}}, - ReportTemplate: "suggestion: bytes.Equal($x, $y)", - SuggestTemplate: "bytes.Equal($x, $y)", - WhereExpr: ir.FilterExpr{ - Line: 188, - Op: ir.FilterAndOp, - Src: "m[\"x\"].Type.Is(`[]byte`) && m[\"y\"].Type.Is(`[]byte`)", - Args: []ir.FilterExpr{ - { - Line: 188, - Op: ir.FilterVarTypeIsOp, - Src: "m[\"x\"].Type.Is(`[]byte`)", - Value: "x", - Args: []ir.FilterExpr{{Line: 188, Op: ir.FilterStringOp, Src: "`[]byte`", Value: "[]byte"}}, - }, - { - Line: 188, - Op: ir.FilterVarTypeIsOp, - Src: "m[\"y\"].Type.Is(`[]byte`)", - Value: "y", - Args: []ir.FilterExpr{{Line: 188, Op: ir.FilterStringOp, Src: "`[]byte`", Value: "[]byte"}}, - }, - }, - }, - }, - { - Line: 191, - SyntaxPatterns: []ir.PatternString{{Line: 191, Value: "string($x) != string($y)"}}, - ReportTemplate: "suggestion: !bytes.Equal($x, $y)", - SuggestTemplate: "!bytes.Equal($x, $y)", - WhereExpr: ir.FilterExpr{ - Line: 192, - Op: ir.FilterAndOp, - Src: "m[\"x\"].Type.Is(`[]byte`) && m[\"y\"].Type.Is(`[]byte`)", - Args: []ir.FilterExpr{ - { - Line: 192, - Op: ir.FilterVarTypeIsOp, - Src: "m[\"x\"].Type.Is(`[]byte`)", - Value: "x", - Args: []ir.FilterExpr{{Line: 192, Op: ir.FilterStringOp, Src: "`[]byte`", Value: "[]byte"}}, - }, - { - Line: 192, - Op: ir.FilterVarTypeIsOp, - Src: "m[\"y\"].Type.Is(`[]byte`)", - Value: "y", - Args: []ir.FilterExpr{{Line: 192, Op: ir.FilterStringOp, Src: "`[]byte`", Value: "[]byte"}}, - }, - }, - }, - }, - { - Line: 195, - SyntaxPatterns: []ir.PatternString{{Line: 195, Value: "$re.Match([]byte($s))"}}, - ReportTemplate: "suggestion: $re.MatchString($s)", - SuggestTemplate: "$re.MatchString($s)", - WhereExpr: ir.FilterExpr{ - Line: 196, - Op: ir.FilterAndOp, - Src: "m[\"re\"].Type.Is(`*regexp.Regexp`) && m[\"s\"].Type.Is(`string`)", - Args: []ir.FilterExpr{ - { - Line: 196, - Op: ir.FilterVarTypeIsOp, - Src: "m[\"re\"].Type.Is(`*regexp.Regexp`)", - Value: "re", - Args: []ir.FilterExpr{{Line: 196, Op: ir.FilterStringOp, Src: "`*regexp.Regexp`", Value: "*regexp.Regexp"}}, - }, - { - Line: 196, - Op: ir.FilterVarTypeIsOp, - Src: "m[\"s\"].Type.Is(`string`)", - Value: "s", - Args: []ir.FilterExpr{{Line: 196, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}}, - }, - }, - }, - }, - { - Line: 199, - SyntaxPatterns: []ir.PatternString{{Line: 199, Value: "$re.FindIndex([]byte($s))"}}, - ReportTemplate: "suggestion: $re.FindStringIndex($s)", - SuggestTemplate: "$re.FindStringIndex($s)", - WhereExpr: ir.FilterExpr{ - Line: 200, - Op: ir.FilterAndOp, - Src: "m[\"re\"].Type.Is(`*regexp.Regexp`) && m[\"s\"].Type.Is(`string`)", - Args: []ir.FilterExpr{ - { - Line: 200, - Op: ir.FilterVarTypeIsOp, - Src: "m[\"re\"].Type.Is(`*regexp.Regexp`)", - Value: "re", - Args: []ir.FilterExpr{{Line: 200, Op: ir.FilterStringOp, Src: "`*regexp.Regexp`", Value: "*regexp.Regexp"}}, - }, - { - Line: 200, - Op: ir.FilterVarTypeIsOp, - Src: "m[\"s\"].Type.Is(`string`)", - Value: "s", - Args: []ir.FilterExpr{{Line: 200, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}}, - }, - }, - }, - }, - { - Line: 203, - SyntaxPatterns: []ir.PatternString{{Line: 203, Value: "$re.FindAllIndex([]byte($s), $n)"}}, - ReportTemplate: "suggestion: $re.FindAllStringIndex($s, $n)", - SuggestTemplate: "$re.FindAllStringIndex($s, $n)", - WhereExpr: ir.FilterExpr{ - Line: 204, - Op: ir.FilterAndOp, - Src: "m[\"re\"].Type.Is(`*regexp.Regexp`) && m[\"s\"].Type.Is(`string`)", - Args: []ir.FilterExpr{ - { - Line: 204, - Op: ir.FilterVarTypeIsOp, - Src: "m[\"re\"].Type.Is(`*regexp.Regexp`)", - Value: "re", - Args: []ir.FilterExpr{{Line: 204, Op: ir.FilterStringOp, Src: "`*regexp.Regexp`", Value: "*regexp.Regexp"}}, - }, - { - Line: 204, - Op: ir.FilterVarTypeIsOp, - Src: "m[\"s\"].Type.Is(`string`)", - Value: "s", - Args: []ir.FilterExpr{{Line: 204, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}}, - }, - }, - }, - }, - }, - }, - { - Line: 213, - Name: "indexAlloc", - MatcherName: "m", - DocTags: []string{"performance"}, - DocSummary: "Detects strings.Index calls that may cause unwanted allocs", - DocBefore: "strings.Index(string(x), y)", - DocAfter: "bytes.Index(x, []byte(y))", - DocNote: "See Go issue for details: https://github.com/golang/go/issues/25864", - Rules: []ir.Rule{{ - Line: 214, - SyntaxPatterns: []ir.PatternString{{Line: 214, Value: "strings.Index(string($x), $y)"}}, - ReportTemplate: "consider replacing $$ with bytes.Index($x, []byte($y))", - WhereExpr: ir.FilterExpr{ - Line: 215, - Op: ir.FilterAndOp, - Src: "m[\"x\"].Pure && m[\"y\"].Pure", - Args: []ir.FilterExpr{ - {Line: 215, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, - {Line: 215, Op: ir.FilterVarPureOp, Src: "m[\"y\"].Pure", Value: "y"}, - }, - }, - }}, - }, - { - Line: 223, - Name: "wrapperFunc", - MatcherName: "m", - DocTags: []string{"style"}, - DocSummary: "Detects function calls that can be replaced with convenience wrappers", - DocBefore: "wg.Add(-1)", - DocAfter: "wg.Done()", - Rules: []ir.Rule{ - { - Line: 224, - SyntaxPatterns: []ir.PatternString{{Line: 224, Value: "$wg.Add(-1)"}}, - ReportTemplate: "use WaitGroup.Done method in `$$`", - WhereExpr: ir.FilterExpr{ - Line: 225, - Op: ir.FilterVarTypeIsOp, - Src: "m[\"wg\"].Type.Is(`sync.WaitGroup`)", - Value: "wg", - Args: []ir.FilterExpr{{Line: 225, Op: ir.FilterStringOp, Src: "`sync.WaitGroup`", Value: "sync.WaitGroup"}}, - }, - }, - { - Line: 228, - SyntaxPatterns: []ir.PatternString{{Line: 228, Value: "$buf.Truncate(0)"}}, - ReportTemplate: "use Buffer.Reset method in `$$`", - WhereExpr: ir.FilterExpr{ - Line: 229, - Op: ir.FilterVarTypeIsOp, - Src: "m[\"buf\"].Type.Is(`bytes.Buffer`)", - Value: "buf", - Args: []ir.FilterExpr{{Line: 229, Op: ir.FilterStringOp, Src: "`bytes.Buffer`", Value: "bytes.Buffer"}}, - }, - }, - { - Line: 232, - SyntaxPatterns: []ir.PatternString{{Line: 232, Value: "http.HandlerFunc(http.NotFound)"}}, - ReportTemplate: "use http.NotFoundHandler method in `$$`", - }, - { - Line: 234, - SyntaxPatterns: []ir.PatternString{{Line: 234, Value: "strings.SplitN($_, $_, -1)"}}, - ReportTemplate: "use strings.Split method in `$$`", - }, - { - Line: 235, - SyntaxPatterns: []ir.PatternString{{Line: 235, Value: "strings.Replace($_, $_, $_, -1)"}}, - ReportTemplate: "use strings.ReplaceAll method in `$$`", - }, - { - Line: 236, - SyntaxPatterns: []ir.PatternString{{Line: 236, Value: "strings.Map(unicode.ToTitle, $_)"}}, - ReportTemplate: "use strings.ToTitle method in `$$`", - }, - { - Line: 237, - SyntaxPatterns: []ir.PatternString{ - {Line: 237, Value: "strings.Index($s1, $s2) >= 0"}, - {Line: 237, Value: "strings.Index($s1, $s2) != -1"}, - }, - ReportTemplate: "suggestion: strings.Contains($s1, $s2)", - SuggestTemplate: "strings.Contains($s1, $s2)", - }, - { - Line: 238, - SyntaxPatterns: []ir.PatternString{ - {Line: 238, Value: "strings.IndexAny($s1, $s2) >= 0"}, - {Line: 238, Value: "strings.IndexAny($s1, $s2) != -1"}, - }, - ReportTemplate: "suggestion: strings.ContainsAny($s1, $s2)", - SuggestTemplate: "strings.ContainsAny($s1, $s2)", - }, - { - Line: 239, - SyntaxPatterns: []ir.PatternString{ - {Line: 239, Value: "strings.IndexRune($s1, $s2) >= 0"}, - {Line: 239, Value: "strings.IndexRune($s1, $s2) != -1"}, - }, - ReportTemplate: "suggestion: strings.ContainsRune($s1, $s2)", - SuggestTemplate: "strings.ContainsRune($s1, $s2)", - }, - { - Line: 241, - SyntaxPatterns: []ir.PatternString{ - {Line: 241, Value: "$i := strings.Index($s, $sep); $*_; $x, $y = $s[:$i], $s[$i+1:]"}, - {Line: 242, Value: "$i := strings.Index($s, $sep); $*_; $x = $s[:$i]; $*_; $y = $s[$i+1:]"}, - }, - ReportTemplate: "suggestion: $x, $y, _ = strings.Cut($s, $sep)", - SuggestTemplate: "$x, $y, _ = strings.Cut($s, $sep)", - WhereExpr: ir.FilterExpr{ - Line: 243, - Op: ir.FilterGoVersionGreaterEqThanOp, - Src: "m.GoVersion().GreaterEqThan(\"1.18\")", - Value: "1.18", - }, - }, - { - Line: 246, - SyntaxPatterns: []ir.PatternString{ - {Line: 247, Value: "if $i := strings.Index($s, $sep); $i != -1 { $*_; $x, $y = $s[:$i], $s[$i+1:]; $*_ }"}, - {Line: 248, Value: "if $i := strings.Index($s, $sep); $i != -1 { $*_; $x = $s[:$i]; $*_; $y = $s[$i+1:]; $*_ }"}, - {Line: 249, Value: "if $i := strings.Index($s, $sep); $i >= 0 { $*_; $x, $y = $s[:$i], $s[$i+1:]; $*_ }"}, - {Line: 250, Value: "if $i := strings.Index($s, $sep); $i >= 0 { $*_; $x = $s[:$i]; $*_; $y = $s[$i+1:]; $*_ }"}, - }, - ReportTemplate: "suggestion: if $x, $y, ok = strings.Cut($s, $sep); ok { ... }", - SuggestTemplate: "if $x, $y, ok = strings.Cut($s, $sep); ok { ... }", - WhereExpr: ir.FilterExpr{ - Line: 251, - Op: ir.FilterGoVersionGreaterEqThanOp, - Src: "m.GoVersion().GreaterEqThan(\"1.18\")", - Value: "1.18", - }, - }, - { - Line: 254, - SyntaxPatterns: []ir.PatternString{{Line: 254, Value: "bytes.SplitN(b, []byte(\".\"), -1)"}}, - ReportTemplate: "use bytes.Split method in `$$`", - }, - { - Line: 255, - SyntaxPatterns: []ir.PatternString{{Line: 255, Value: "bytes.Replace($_, $_, $_, -1)"}}, - ReportTemplate: "use bytes.ReplaceAll method in `$$`", - }, - { - Line: 256, - SyntaxPatterns: []ir.PatternString{{Line: 256, Value: "bytes.Map(unicode.ToUpper, $_)"}}, - ReportTemplate: "use bytes.ToUpper method in `$$`", - }, - { - Line: 257, - SyntaxPatterns: []ir.PatternString{{Line: 257, Value: "bytes.Map(unicode.ToLower, $_)"}}, - ReportTemplate: "use bytes.ToLower method in `$$`", - }, - { - Line: 258, - SyntaxPatterns: []ir.PatternString{{Line: 258, Value: "bytes.Map(unicode.ToTitle, $_)"}}, - ReportTemplate: "use bytes.ToTitle method in `$$`", - }, - { - Line: 259, - SyntaxPatterns: []ir.PatternString{ - {Line: 259, Value: "bytes.Index($b1, $b2) >= 0"}, - {Line: 259, Value: "bytes.Index($b1, $b2) != -1"}, - }, - ReportTemplate: "suggestion: bytes.Contains($b1, $b2)", - SuggestTemplate: "bytes.Contains($b1, $b2)", - }, - { - Line: 260, - SyntaxPatterns: []ir.PatternString{ - {Line: 260, Value: "bytes.IndexAny($b1, $b2) >= 0"}, - {Line: 260, Value: "bytes.IndexAny($b1, $b2) != -1"}, - }, - ReportTemplate: "suggestion: bytes.ContainsAny($b1, $b2)", - SuggestTemplate: "bytes.ContainsAny($b1, $b2)", - }, - { - Line: 261, - SyntaxPatterns: []ir.PatternString{ - {Line: 261, Value: "bytes.IndexRune($b1, $b2) >= 0"}, - {Line: 261, Value: "bytes.IndexRune($b1, $b2) != -1"}, - }, - ReportTemplate: "suggestion: bytes.ContainsRune($b1, $b2)", - SuggestTemplate: "bytes.ContainsRune($b1, $b2)", - }, - { - Line: 263, - SyntaxPatterns: []ir.PatternString{{Line: 263, Value: "draw.DrawMask($_, $_, $_, $_, nil, image.Point{}, $_)"}}, - ReportTemplate: "use draw.Draw method in `$$`", - }, - }, - }, - { - Line: 271, - Name: "regexpMust", - MatcherName: "m", - DocTags: []string{"style"}, - DocSummary: "Detects `regexp.Compile*` that can be replaced with `regexp.MustCompile*`", - DocBefore: "re, _ := regexp.Compile(\"const pattern\")", - DocAfter: "re := regexp.MustCompile(\"const pattern\")", - Rules: []ir.Rule{ - { - Line: 272, - SyntaxPatterns: []ir.PatternString{{Line: 272, Value: "regexp.Compile($pat)"}}, - ReportTemplate: "for const patterns like $pat, use regexp.MustCompile", - WhereExpr: ir.FilterExpr{ - Line: 273, - Op: ir.FilterVarConstOp, - Src: "m[\"pat\"].Const", - Value: "pat", - }, - }, - { - Line: 276, - SyntaxPatterns: []ir.PatternString{{Line: 276, Value: "regexp.CompilePOSIX($pat)"}}, - ReportTemplate: "for const patterns like $pat, use regexp.MustCompilePOSIX", - WhereExpr: ir.FilterExpr{ - Line: 277, - Op: ir.FilterVarConstOp, - Src: "m[\"pat\"].Const", - Value: "pat", - }, - }, - }, - }, - { - Line: 285, - Name: "badCall", - MatcherName: "m", - DocTags: []string{"diagnostic"}, - DocSummary: "Detects suspicious function calls", - DocBefore: "strings.Replace(s, from, to, 0)", - DocAfter: "strings.Replace(s, from, to, -1)", - Rules: []ir.Rule{ - { - Line: 286, - SyntaxPatterns: []ir.PatternString{{Line: 286, Value: "strings.Replace($_, $_, $_, $zero)"}}, - ReportTemplate: "suspicious arg 0, probably meant -1", - WhereExpr: ir.FilterExpr{ - Line: 287, - Op: ir.FilterEqOp, - Src: "m[\"zero\"].Value.Int() == 0", - Args: []ir.FilterExpr{ - { - Line: 287, - Op: ir.FilterVarValueIntOp, - Src: "m[\"zero\"].Value.Int()", - Value: "zero", - }, - { - Line: 287, - Op: ir.FilterIntOp, - Src: "0", - Value: int64(0), - }, - }, - }, - LocationVar: "zero", - }, - { - Line: 289, - SyntaxPatterns: []ir.PatternString{{Line: 289, Value: "bytes.Replace($_, $_, $_, $zero)"}}, - ReportTemplate: "suspicious arg 0, probably meant -1", - WhereExpr: ir.FilterExpr{ - Line: 290, - Op: ir.FilterEqOp, - Src: "m[\"zero\"].Value.Int() == 0", - Args: []ir.FilterExpr{ - { - Line: 290, - Op: ir.FilterVarValueIntOp, - Src: "m[\"zero\"].Value.Int()", - Value: "zero", - }, - { - Line: 290, - Op: ir.FilterIntOp, - Src: "0", - Value: int64(0), - }, - }, - }, - LocationVar: "zero", - }, - { - Line: 293, - SyntaxPatterns: []ir.PatternString{{Line: 293, Value: "strings.SplitN($_, $_, $zero)"}}, - ReportTemplate: "suspicious arg 0, probably meant -1", - WhereExpr: ir.FilterExpr{ - Line: 294, - Op: ir.FilterEqOp, - Src: "m[\"zero\"].Value.Int() == 0", - Args: []ir.FilterExpr{ - { - Line: 294, - Op: ir.FilterVarValueIntOp, - Src: "m[\"zero\"].Value.Int()", - Value: "zero", - }, - { - Line: 294, - Op: ir.FilterIntOp, - Src: "0", - Value: int64(0), - }, - }, - }, - LocationVar: "zero", - }, - { - Line: 296, - SyntaxPatterns: []ir.PatternString{{Line: 296, Value: "bytes.SplitN($_, $_, $zero)"}}, - ReportTemplate: "suspicious arg 0, probably meant -1", - WhereExpr: ir.FilterExpr{ - Line: 297, - Op: ir.FilterEqOp, - Src: "m[\"zero\"].Value.Int() == 0", - Args: []ir.FilterExpr{ - { - Line: 297, - Op: ir.FilterVarValueIntOp, - Src: "m[\"zero\"].Value.Int()", - Value: "zero", - }, - { - Line: 297, - Op: ir.FilterIntOp, - Src: "0", - Value: int64(0), - }, - }, - }, - LocationVar: "zero", - }, - { - Line: 300, - SyntaxPatterns: []ir.PatternString{{Line: 300, Value: "append($_)"}}, - ReportTemplate: "no-op append call, probably missing arguments", - }, - { - Line: 302, - SyntaxPatterns: []ir.PatternString{{Line: 302, Value: "filepath.Join($_)"}}, - ReportTemplate: "suspicious Join on 1 argument", - }, - }, - }, - { - Line: 309, - Name: "assignOp", - MatcherName: "m", - DocTags: []string{"style"}, - DocSummary: "Detects assignments that can be simplified by using assignment operators", - DocBefore: "x = x * 2", - DocAfter: "x *= 2", - Rules: []ir.Rule{ - { - Line: 310, - SyntaxPatterns: []ir.PatternString{{Line: 310, Value: "$x = $x + 1"}}, - ReportTemplate: "replace `$$` with `$x++`", - WhereExpr: ir.FilterExpr{Line: 310, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, - }, - { - Line: 311, - SyntaxPatterns: []ir.PatternString{{Line: 311, Value: "$x = $x - 1"}}, - ReportTemplate: "replace `$$` with `$x--`", - WhereExpr: ir.FilterExpr{Line: 311, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, - }, - { - Line: 313, - SyntaxPatterns: []ir.PatternString{{Line: 313, Value: "$x = $x + $y"}}, - ReportTemplate: "replace `$$` with `$x += $y`", - WhereExpr: ir.FilterExpr{Line: 313, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, - }, - { - Line: 314, - SyntaxPatterns: []ir.PatternString{{Line: 314, Value: "$x = $x - $y"}}, - ReportTemplate: "replace `$$` with `$x -= $y`", - WhereExpr: ir.FilterExpr{Line: 314, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, - }, - { - Line: 316, - SyntaxPatterns: []ir.PatternString{{Line: 316, Value: "$x = $x * $y"}}, - ReportTemplate: "replace `$$` with `$x *= $y`", - WhereExpr: ir.FilterExpr{Line: 316, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, - }, - { - Line: 317, - SyntaxPatterns: []ir.PatternString{{Line: 317, Value: "$x = $x / $y"}}, - ReportTemplate: "replace `$$` with `$x /= $y`", - WhereExpr: ir.FilterExpr{Line: 317, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, - }, - { - Line: 318, - SyntaxPatterns: []ir.PatternString{{Line: 318, Value: "$x = $x % $y"}}, - ReportTemplate: "replace `$$` with `$x %= $y`", - WhereExpr: ir.FilterExpr{Line: 318, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, - }, - { - Line: 319, - SyntaxPatterns: []ir.PatternString{{Line: 319, Value: "$x = $x & $y"}}, - ReportTemplate: "replace `$$` with `$x &= $y`", - WhereExpr: ir.FilterExpr{Line: 319, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, - }, - { - Line: 320, - SyntaxPatterns: []ir.PatternString{{Line: 320, Value: "$x = $x | $y"}}, - ReportTemplate: "replace `$$` with `$x |= $y`", - WhereExpr: ir.FilterExpr{Line: 320, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, - }, - { - Line: 321, - SyntaxPatterns: []ir.PatternString{{Line: 321, Value: "$x = $x ^ $y"}}, - ReportTemplate: "replace `$$` with `$x ^= $y`", - WhereExpr: ir.FilterExpr{Line: 321, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, - }, - { - Line: 322, - SyntaxPatterns: []ir.PatternString{{Line: 322, Value: "$x = $x << $y"}}, - ReportTemplate: "replace `$$` with `$x <<= $y`", - WhereExpr: ir.FilterExpr{Line: 322, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, - }, - { - Line: 323, - SyntaxPatterns: []ir.PatternString{{Line: 323, Value: "$x = $x >> $y"}}, - ReportTemplate: "replace `$$` with `$x >>= $y`", - WhereExpr: ir.FilterExpr{Line: 323, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, - }, - { - Line: 324, - SyntaxPatterns: []ir.PatternString{{Line: 324, Value: "$x = $x &^ $y"}}, - ReportTemplate: "replace `$$` with `$x &^= $y`", - WhereExpr: ir.FilterExpr{Line: 324, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, - }, - }, - }, - { - Line: 331, - Name: "preferWriteByte", - MatcherName: "m", - DocTags: []string{"performance", "experimental", "opinionated"}, - DocSummary: "Detects WriteRune calls with rune literal argument that is single byte and reports to use WriteByte instead", - DocBefore: "w.WriteRune('\\n')", - DocAfter: "w.WriteByte('\\n')", - Rules: []ir.Rule{{ - Line: 335, - SyntaxPatterns: []ir.PatternString{{Line: 335, Value: "$w.WriteRune($c)"}}, - ReportTemplate: "consider writing single byte rune $c with $w.WriteByte($c)", - WhereExpr: ir.FilterExpr{ - Line: 336, - Op: ir.FilterAndOp, - Src: "m[\"w\"].Type.Implements(\"io.ByteWriter\") && (m[\"c\"].Const && m[\"c\"].Value.Int() < runeSelf)", - Args: []ir.FilterExpr{ - { - Line: 336, - Op: ir.FilterVarTypeImplementsOp, - Src: "m[\"w\"].Type.Implements(\"io.ByteWriter\")", - Value: "w", - Args: []ir.FilterExpr{{Line: 336, Op: ir.FilterStringOp, Src: "\"io.ByteWriter\"", Value: "io.ByteWriter"}}, - }, - { - Line: 336, - Op: ir.FilterAndOp, - Src: "(m[\"c\"].Const && m[\"c\"].Value.Int() < runeSelf)", - Args: []ir.FilterExpr{ - { - Line: 336, - Op: ir.FilterVarConstOp, - Src: "m[\"c\"].Const", - Value: "c", - }, - { - Line: 336, - Op: ir.FilterLtOp, - Src: "m[\"c\"].Value.Int() < runeSelf", - Args: []ir.FilterExpr{ - { - Line: 336, - Op: ir.FilterVarValueIntOp, - Src: "m[\"c\"].Value.Int()", - Value: "c", - }, - { - Line: 336, - Op: ir.FilterIntOp, - Src: "runeSelf", - Value: int64(128), - }, - }, - }, - }, - }, - }, - }, - }}, - }, - { - Line: 344, - Name: "preferFprint", - MatcherName: "m", - DocTags: []string{"performance", "experimental"}, - DocSummary: "Detects fmt.Sprint(f/ln) calls which can be replaced with fmt.Fprint(f/ln)", - DocBefore: "w.Write([]byte(fmt.Sprintf(\"%x\", 10)))", - DocAfter: "fmt.Fprintf(w, \"%x\", 10)", - Rules: []ir.Rule{ - { - Line: 345, - SyntaxPatterns: []ir.PatternString{{Line: 345, Value: "$w.Write([]byte(fmt.Sprint($*args)))"}}, - ReportTemplate: "fmt.Fprint($w, $args) should be preferred to the $$", - SuggestTemplate: "fmt.Fprint($w, $args)", - WhereExpr: ir.FilterExpr{ - Line: 346, - Op: ir.FilterVarTypeImplementsOp, - Src: "m[\"w\"].Type.Implements(\"io.Writer\")", - Value: "w", - Args: []ir.FilterExpr{{Line: 346, Op: ir.FilterStringOp, Src: "\"io.Writer\"", Value: "io.Writer"}}, - }, - }, - { - Line: 350, - SyntaxPatterns: []ir.PatternString{{Line: 350, Value: "$w.Write([]byte(fmt.Sprintf($*args)))"}}, - ReportTemplate: "fmt.Fprintf($w, $args) should be preferred to the $$", - SuggestTemplate: "fmt.Fprintf($w, $args)", - WhereExpr: ir.FilterExpr{ - Line: 351, - Op: ir.FilterVarTypeImplementsOp, - Src: "m[\"w\"].Type.Implements(\"io.Writer\")", - Value: "w", - Args: []ir.FilterExpr{{Line: 351, Op: ir.FilterStringOp, Src: "\"io.Writer\"", Value: "io.Writer"}}, - }, - }, - { - Line: 355, - SyntaxPatterns: []ir.PatternString{{Line: 355, Value: "$w.Write([]byte(fmt.Sprintln($*args)))"}}, - ReportTemplate: "fmt.Fprintln($w, $args) should be preferred to the $$", - SuggestTemplate: "fmt.Fprintln($w, $args)", - WhereExpr: ir.FilterExpr{ - Line: 356, - Op: ir.FilterVarTypeImplementsOp, - Src: "m[\"w\"].Type.Implements(\"io.Writer\")", - Value: "w", - Args: []ir.FilterExpr{{Line: 356, Op: ir.FilterStringOp, Src: "\"io.Writer\"", Value: "io.Writer"}}, - }, - }, - { - Line: 360, - SyntaxPatterns: []ir.PatternString{{Line: 360, Value: "io.WriteString($w, fmt.Sprint($*args))"}}, - ReportTemplate: "suggestion: fmt.Fprint($w, $args)", - SuggestTemplate: "fmt.Fprint($w, $args)", - }, - { - Line: 361, - SyntaxPatterns: []ir.PatternString{{Line: 361, Value: "io.WriteString($w, fmt.Sprintf($*args))"}}, - ReportTemplate: "suggestion: fmt.Fprintf($w, $args)", - SuggestTemplate: "fmt.Fprintf($w, $args)", - }, - { - Line: 362, - SyntaxPatterns: []ir.PatternString{{Line: 362, Value: "io.WriteString($w, fmt.Sprintln($*args))"}}, - ReportTemplate: "suggestion: fmt.Fprintln($w, $args)", - SuggestTemplate: "fmt.Fprintln($w, $args)", - }, - { - Line: 364, - SyntaxPatterns: []ir.PatternString{{Line: 364, Value: "$w.WriteString(fmt.Sprint($*args))"}}, - ReportTemplate: "suggestion: fmt.Fprint($w, $args)", - SuggestTemplate: "fmt.Fprint($w, $args)", - WhereExpr: ir.FilterExpr{ - Line: 365, - Op: ir.FilterAndOp, - Src: "m[\"w\"].Type.Implements(\"io.Writer\") && m[\"w\"].Type.Implements(\"io.StringWriter\")", - Args: []ir.FilterExpr{ - { - Line: 365, - Op: ir.FilterVarTypeImplementsOp, - Src: "m[\"w\"].Type.Implements(\"io.Writer\")", - Value: "w", - Args: []ir.FilterExpr{{Line: 365, Op: ir.FilterStringOp, Src: "\"io.Writer\"", Value: "io.Writer"}}, - }, - { - Line: 365, - Op: ir.FilterVarTypeImplementsOp, - Src: "m[\"w\"].Type.Implements(\"io.StringWriter\")", - Value: "w", - Args: []ir.FilterExpr{{Line: 365, Op: ir.FilterStringOp, Src: "\"io.StringWriter\"", Value: "io.StringWriter"}}, - }, - }, - }, - }, - { - Line: 367, - SyntaxPatterns: []ir.PatternString{{Line: 367, Value: "$w.WriteString(fmt.Sprintf($*args))"}}, - ReportTemplate: "suggestion: fmt.Fprintf($w, $args)", - SuggestTemplate: "fmt.Fprintf($w, $args)", - WhereExpr: ir.FilterExpr{ - Line: 368, - Op: ir.FilterAndOp, - Src: "m[\"w\"].Type.Implements(\"io.Writer\") && m[\"w\"].Type.Implements(\"io.StringWriter\")", - Args: []ir.FilterExpr{ - { - Line: 368, - Op: ir.FilterVarTypeImplementsOp, - Src: "m[\"w\"].Type.Implements(\"io.Writer\")", - Value: "w", - Args: []ir.FilterExpr{{Line: 368, Op: ir.FilterStringOp, Src: "\"io.Writer\"", Value: "io.Writer"}}, - }, - { - Line: 368, - Op: ir.FilterVarTypeImplementsOp, - Src: "m[\"w\"].Type.Implements(\"io.StringWriter\")", - Value: "w", - Args: []ir.FilterExpr{{Line: 368, Op: ir.FilterStringOp, Src: "\"io.StringWriter\"", Value: "io.StringWriter"}}, - }, - }, - }, - }, - { - Line: 370, - SyntaxPatterns: []ir.PatternString{{Line: 370, Value: "$w.WriteString(fmt.Sprintln($*args))"}}, - ReportTemplate: "suggestion: fmt.Fprintln($w, $args)", - SuggestTemplate: "fmt.Fprintln($w, $args)", - WhereExpr: ir.FilterExpr{ - Line: 371, - Op: ir.FilterAndOp, - Src: "m[\"w\"].Type.Implements(\"io.Writer\") && m[\"w\"].Type.Implements(\"io.StringWriter\")", - Args: []ir.FilterExpr{ - { - Line: 371, - Op: ir.FilterVarTypeImplementsOp, - Src: "m[\"w\"].Type.Implements(\"io.Writer\")", - Value: "w", - Args: []ir.FilterExpr{{Line: 371, Op: ir.FilterStringOp, Src: "\"io.Writer\"", Value: "io.Writer"}}, - }, - { - Line: 371, - Op: ir.FilterVarTypeImplementsOp, - Src: "m[\"w\"].Type.Implements(\"io.StringWriter\")", - Value: "w", - Args: []ir.FilterExpr{{Line: 371, Op: ir.FilterStringOp, Src: "\"io.StringWriter\"", Value: "io.StringWriter"}}, - }, - }, - }, - }, - }, - }, - { - Line: 379, - Name: "dupArg", - MatcherName: "m", - DocTags: []string{"diagnostic"}, - DocSummary: "Detects suspicious duplicated arguments", - DocBefore: "copy(dst, dst)", - DocAfter: "copy(dst, src)", - Rules: []ir.Rule{ - { - Line: 380, - SyntaxPatterns: []ir.PatternString{ - {Line: 380, Value: "$x.Equal($x)"}, - {Line: 380, Value: "$x.Equals($x)"}, - {Line: 380, Value: "$x.Compare($x)"}, - {Line: 380, Value: "$x.Cmp($x)"}, - }, - ReportTemplate: "suspicious method call with the same argument and receiver", - WhereExpr: ir.FilterExpr{Line: 381, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, - }, - { - Line: 384, - SyntaxPatterns: []ir.PatternString{ - {Line: 384, Value: "copy($x, $x)"}, - {Line: 385, Value: "math.Max($x, $x)"}, - {Line: 386, Value: "math.Min($x, $x)"}, - {Line: 387, Value: "reflect.Copy($x, $x)"}, - {Line: 388, Value: "reflect.DeepEqual($x, $x)"}, - {Line: 389, Value: "strings.Contains($x, $x)"}, - {Line: 390, Value: "strings.Compare($x, $x)"}, - {Line: 391, Value: "strings.EqualFold($x, $x)"}, - {Line: 392, Value: "strings.HasPrefix($x, $x)"}, - {Line: 393, Value: "strings.HasSuffix($x, $x)"}, - {Line: 394, Value: "strings.Index($x, $x)"}, - {Line: 395, Value: "strings.LastIndex($x, $x)"}, - {Line: 396, Value: "strings.Split($x, $x)"}, - {Line: 397, Value: "strings.SplitAfter($x, $x)"}, - {Line: 398, Value: "strings.SplitAfterN($x, $x, $_)"}, - {Line: 399, Value: "strings.SplitN($x, $x, $_)"}, - {Line: 400, Value: "strings.Replace($_, $x, $x, $_)"}, - {Line: 401, Value: "strings.ReplaceAll($_, $x, $x)"}, - {Line: 402, Value: "bytes.Contains($x, $x)"}, - {Line: 403, Value: "bytes.Compare($x, $x)"}, - {Line: 404, Value: "bytes.Equal($x, $x)"}, - {Line: 405, Value: "bytes.EqualFold($x, $x)"}, - {Line: 406, Value: "bytes.HasPrefix($x, $x)"}, - {Line: 407, Value: "bytes.HasSuffix($x, $x)"}, - {Line: 408, Value: "bytes.Index($x, $x)"}, - {Line: 409, Value: "bytes.LastIndex($x, $x)"}, - {Line: 410, Value: "bytes.Split($x, $x)"}, - {Line: 411, Value: "bytes.SplitAfter($x, $x)"}, - {Line: 412, Value: "bytes.SplitAfterN($x, $x, $_)"}, - {Line: 413, Value: "bytes.SplitN($x, $x, $_)"}, - {Line: 414, Value: "bytes.Replace($_, $x, $x, $_)"}, - {Line: 415, Value: "bytes.ReplaceAll($_, $x, $x)"}, - {Line: 416, Value: "types.Identical($x, $x)"}, - {Line: 417, Value: "types.IdenticalIgnoreTags($x, $x)"}, - {Line: 418, Value: "draw.Draw($x, $_, $x, $_, $_)"}, - }, - ReportTemplate: "suspicious duplicated args in $$", - WhereExpr: ir.FilterExpr{Line: 419, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, - }, - }, - }, - { - Line: 427, - Name: "returnAfterHttpError", - MatcherName: "m", - DocTags: []string{"diagnostic", "experimental"}, - DocSummary: "Detects suspicious http.Error call without following return", - DocBefore: "if err != nil { http.Error(...); }", - DocAfter: "if err != nil { http.Error(...); return; }", - Rules: []ir.Rule{{ - Line: 428, - SyntaxPatterns: []ir.PatternString{{Line: 428, Value: "if $_ { $*_; http.Error($w, $err, $code) }"}}, - ReportTemplate: "Possibly return is missed after the http.Error call", - LocationVar: "w", - }}, - }, - { - Line: 437, - Name: "preferFilepathJoin", - MatcherName: "m", - DocTags: []string{"style", "experimental"}, - DocSummary: "Detects concatenation with os.PathSeparator which can be replaced with filepath.Join", - DocBefore: "x + string(os.PathSeparator) + y", - DocAfter: "filepath.Join(x, y)", - Rules: []ir.Rule{{ - Line: 438, - SyntaxPatterns: []ir.PatternString{{Line: 438, Value: "$x + string(os.PathSeparator) + $y"}}, - ReportTemplate: "filepath.Join($x, $y) should be preferred to the $$", - SuggestTemplate: "filepath.Join($x, $y)", - WhereExpr: ir.FilterExpr{ - Line: 439, - Op: ir.FilterAndOp, - Src: "m[\"x\"].Type.Is(`string`) && m[\"y\"].Type.Is(`string`)", - Args: []ir.FilterExpr{ - { - Line: 439, - Op: ir.FilterVarTypeIsOp, - Src: "m[\"x\"].Type.Is(`string`)", - Value: "x", - Args: []ir.FilterExpr{{Line: 439, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}}, - }, - { - Line: 439, - Op: ir.FilterVarTypeIsOp, - Src: "m[\"y\"].Type.Is(`string`)", - Value: "y", - Args: []ir.FilterExpr{{Line: 439, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}}, - }, - }, - }, - }}, - }, - { - Line: 448, - Name: "preferStringWriter", - MatcherName: "m", - DocTags: []string{"performance", "experimental"}, - DocSummary: "Detects w.Write or io.WriteString calls which can be replaced with w.WriteString", - DocBefore: "w.Write([]byte(\"foo\"))", - DocAfter: "w.WriteString(\"foo\")", - Rules: []ir.Rule{ - { - Line: 449, - SyntaxPatterns: []ir.PatternString{{Line: 449, Value: "$w.Write([]byte($s))"}}, - ReportTemplate: "$w.WriteString($s) should be preferred to the $$", - SuggestTemplate: "$w.WriteString($s)", - WhereExpr: ir.FilterExpr{ - Line: 450, - Op: ir.FilterVarTypeImplementsOp, - Src: "m[\"w\"].Type.Implements(\"io.StringWriter\")", - Value: "w", - Args: []ir.FilterExpr{{Line: 450, Op: ir.FilterStringOp, Src: "\"io.StringWriter\"", Value: "io.StringWriter"}}, - }, - }, - { - Line: 454, - SyntaxPatterns: []ir.PatternString{{Line: 454, Value: "io.WriteString($w, $s)"}}, - ReportTemplate: "$w.WriteString($s) should be preferred to the $$", - SuggestTemplate: "$w.WriteString($s)", - WhereExpr: ir.FilterExpr{ - Line: 455, - Op: ir.FilterVarTypeImplementsOp, - Src: "m[\"w\"].Type.Implements(\"io.StringWriter\")", - Value: "w", - Args: []ir.FilterExpr{{Line: 455, Op: ir.FilterStringOp, Src: "\"io.StringWriter\"", Value: "io.StringWriter"}}, - }, - }, - }, - }, - { - Line: 464, - Name: "sliceClear", - MatcherName: "m", - DocTags: []string{"performance", "experimental"}, - DocSummary: "Detects slice clear loops, suggests an idiom that is recognized by the Go compiler", - DocBefore: "for i := 0; i < len(buf); i++ { buf[i] = 0 }", - DocAfter: "for i := range buf { buf[i] = 0 }", - Rules: []ir.Rule{{ - Line: 465, - SyntaxPatterns: []ir.PatternString{{Line: 465, Value: "for $i := 0; $i < len($xs); $i++ { $xs[$i] = $zero }"}}, - ReportTemplate: "rewrite as for-range so compiler can recognize this pattern", - WhereExpr: ir.FilterExpr{ - Line: 466, - Op: ir.FilterEqOp, - Src: "m[\"zero\"].Value.Int() == 0", - Args: []ir.FilterExpr{ - { - Line: 466, - Op: ir.FilterVarValueIntOp, - Src: "m[\"zero\"].Value.Int()", - Value: "zero", - }, - { - Line: 466, - Op: ir.FilterIntOp, - Src: "0", - Value: int64(0), - }, - }, - }, - }}, - }, - { - Line: 474, - Name: "syncMapLoadAndDelete", - MatcherName: "m", - DocTags: []string{"diagnostic", "experimental"}, - DocSummary: "Detects sync.Map load+delete operations that can be replaced with LoadAndDelete", - DocBefore: "v, ok := m.Load(k); if ok { m.Delete($k); f(v); }", - DocAfter: "v, deleted := m.LoadAndDelete(k); if deleted { f(v) }", - Rules: []ir.Rule{{ - Line: 475, - SyntaxPatterns: []ir.PatternString{{Line: 475, Value: "$_, $ok := $m.Load($k); if $ok { $m.Delete($k); $*_ }"}}, - ReportTemplate: "use $m.LoadAndDelete to perform load+delete operations atomically", - WhereExpr: ir.FilterExpr{ - Line: 476, - Op: ir.FilterAndOp, - Src: "m.GoVersion().GreaterEqThan(\"1.15\") &&\n\tm[\"m\"].Type.Is(`*sync.Map`)", - Args: []ir.FilterExpr{ - { - Line: 476, - Op: ir.FilterGoVersionGreaterEqThanOp, - Src: "m.GoVersion().GreaterEqThan(\"1.15\")", - Value: "1.15", - }, - { - Line: 477, - Op: ir.FilterVarTypeIsOp, - Src: "m[\"m\"].Type.Is(`*sync.Map`)", - Value: "m", - Args: []ir.FilterExpr{{Line: 477, Op: ir.FilterStringOp, Src: "`*sync.Map`", Value: "*sync.Map"}}, - }, - }, - }, - }}, - }, - { - Line: 485, - Name: "sprintfQuotedString", - MatcherName: "m", - DocTags: []string{"diagnostic", "experimental"}, - DocSummary: "Detects \"%s\" formatting directives that can be replaced with %q", - DocBefore: "fmt.Sprintf(`\"%s\"`, s)", - DocAfter: "fmt.Sprintf(`%q`, s)", - Rules: []ir.Rule{{ - Line: 486, - SyntaxPatterns: []ir.PatternString{{Line: 486, Value: "fmt.Sprintf($s, $*_)"}}, - ReportTemplate: "use %q instead of \"%s\" for quoted strings", - WhereExpr: ir.FilterExpr{ - Line: 487, - Op: ir.FilterOrOp, - Src: "m[\"s\"].Text.Matches(\"^`.*\\\"%s\\\".*`$\") ||\n\tm[\"s\"].Text.Matches(`^\".*\\\\\"%s\\\\\".*\"$`)", - Args: []ir.FilterExpr{ - { - Line: 487, - Op: ir.FilterVarTextMatchesOp, - Src: "m[\"s\"].Text.Matches(\"^`.*\\\"%s\\\".*`$\")", - Value: "s", - Args: []ir.FilterExpr{{Line: 487, Op: ir.FilterStringOp, Src: "\"^`.*\\\"%s\\\".*`$\"", Value: "^`.*\"%s\".*`$"}}, - }, - { - Line: 488, - Op: ir.FilterVarTextMatchesOp, - Src: "m[\"s\"].Text.Matches(`^\".*\\\\\"%s\\\\\".*\"$`)", - Value: "s", - Args: []ir.FilterExpr{{Line: 488, Op: ir.FilterStringOp, Src: "`^\".*\\\\\"%s\\\\\".*\"$`", Value: "^\".*\\\\\"%s\\\\\".*\"$"}}, - }, - }, - }, - }}, - }, - { - Line: 496, - Name: "offBy1", - MatcherName: "m", - DocTags: []string{"diagnostic"}, - DocSummary: "Detects various off-by-one kind of errors", - DocBefore: "xs[len(xs)]", - DocAfter: "xs[len(xs)-1]", - Rules: []ir.Rule{ - { - Line: 497, - SyntaxPatterns: []ir.PatternString{{Line: 497, Value: "$x[len($x)]"}}, - ReportTemplate: "index expr always panics; maybe you wanted $x[len($x)-1]?", - SuggestTemplate: "$x[len($x)-1]", - WhereExpr: ir.FilterExpr{ - Line: 498, - Op: ir.FilterAndOp, - Src: "m[\"x\"].Pure && m[\"x\"].Type.Is(`[]$_`)", - Args: []ir.FilterExpr{ - {Line: 498, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, - { - Line: 498, - Op: ir.FilterVarTypeIsOp, - Src: "m[\"x\"].Type.Is(`[]$_`)", - Value: "x", - Args: []ir.FilterExpr{{Line: 498, Op: ir.FilterStringOp, Src: "`[]$_`", Value: "[]$_"}}, - }, - }, - }, - }, - { - Line: 505, - SyntaxPatterns: []ir.PatternString{ - {Line: 506, Value: "$i := strings.Index($s, $_); $_ := $slicing[$i:]"}, - {Line: 507, Value: "$i := strings.Index($s, $_); $_ = $slicing[$i:]"}, - {Line: 508, Value: "$i := bytes.Index($s, $_); $_ := $slicing[$i:]"}, - {Line: 509, Value: "$i := bytes.Index($s, $_); $_ = $slicing[$i:]"}, - }, - ReportTemplate: "Index() can return -1; maybe you wanted to do $s[$i+1:]", - WhereExpr: ir.FilterExpr{ - Line: 510, - Op: ir.FilterEqOp, - Src: "m[\"s\"].Text == m[\"slicing\"].Text", - Args: []ir.FilterExpr{ - {Line: 510, Op: ir.FilterVarTextOp, Src: "m[\"s\"].Text", Value: "s"}, - {Line: 510, Op: ir.FilterVarTextOp, Src: "m[\"slicing\"].Text", Value: "slicing"}, - }, - }, - LocationVar: "slicing", - }, - { - Line: 514, - SyntaxPatterns: []ir.PatternString{ - {Line: 515, Value: "$i := strings.Index($s, $_); $_ := $slicing[:$i]"}, - {Line: 516, Value: "$i := strings.Index($s, $_); $_ = $slicing[:$i]"}, - {Line: 517, Value: "$i := bytes.Index($s, $_); $_ := $slicing[:$i]"}, - {Line: 518, Value: "$i := bytes.Index($s, $_); $_ = $slicing[:$i]"}, - }, - ReportTemplate: "Index() can return -1; maybe you wanted to do $s[:$i+1]", - WhereExpr: ir.FilterExpr{ - Line: 519, - Op: ir.FilterEqOp, - Src: "m[\"s\"].Text == m[\"slicing\"].Text", - Args: []ir.FilterExpr{ - {Line: 519, Op: ir.FilterVarTextOp, Src: "m[\"s\"].Text", Value: "s"}, - {Line: 519, Op: ir.FilterVarTextOp, Src: "m[\"slicing\"].Text", Value: "slicing"}, - }, - }, - LocationVar: "slicing", - }, - { - Line: 523, - SyntaxPatterns: []ir.PatternString{ - {Line: 524, Value: "$s[strings.Index($s, $_):]"}, - {Line: 525, Value: "$s[:strings.Index($s, $_)]"}, - {Line: 526, Value: "$s[bytes.Index($s, $_):]"}, - {Line: 527, Value: "$s[:bytes.Index($s, $_)]"}, - }, - ReportTemplate: "Index() can return -1; maybe you wanted to do Index()+1", - }, - }, - }, - { - Line: 535, - Name: "unslice", - MatcherName: "m", - DocTags: []string{"style"}, - DocSummary: "Detects slice expressions that can be simplified to sliced expression itself", - DocBefore: "copy(b[:], values...)", - DocAfter: "copy(b, values...)", - Rules: []ir.Rule{{ - Line: 536, - SyntaxPatterns: []ir.PatternString{{Line: 536, Value: "$s[:]"}}, - ReportTemplate: "could simplify $$ to $s", - SuggestTemplate: "$s", - WhereExpr: ir.FilterExpr{ - Line: 537, - Op: ir.FilterOrOp, - Src: "m[\"s\"].Type.Is(`string`) || m[\"s\"].Type.Is(`[]$_`)", - Args: []ir.FilterExpr{ - { - Line: 537, - Op: ir.FilterVarTypeIsOp, - Src: "m[\"s\"].Type.Is(`string`)", - Value: "s", - Args: []ir.FilterExpr{{Line: 537, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}}, - }, - { - Line: 537, - Op: ir.FilterVarTypeIsOp, - Src: "m[\"s\"].Type.Is(`[]$_`)", - Value: "s", - Args: []ir.FilterExpr{{Line: 537, Op: ir.FilterStringOp, Src: "`[]$_`", Value: "[]$_"}}, - }, - }, - }, - }}, - }, - { - Line: 546, - Name: "yodaStyleExpr", - MatcherName: "m", - DocTags: []string{"style", "experimental"}, - DocSummary: "Detects Yoda style expressions and suggests to replace them", - DocBefore: "return nil != ptr", - DocAfter: "return ptr != nil", - Rules: []ir.Rule{ - { - Line: 547, - SyntaxPatterns: []ir.PatternString{{Line: 547, Value: "$constval != $x"}}, - ReportTemplate: "consider to change order in expression to $x != $constval", - WhereExpr: ir.FilterExpr{ - Line: 547, - Op: ir.FilterAndOp, - Src: "m[\"constval\"].Node.Is(`BasicLit`) && !m[\"x\"].Node.Is(`BasicLit`)", - Args: []ir.FilterExpr{ - { - Line: 547, - Op: ir.FilterVarNodeIsOp, - Src: "m[\"constval\"].Node.Is(`BasicLit`)", - Value: "constval", - Args: []ir.FilterExpr{{Line: 547, Op: ir.FilterStringOp, Src: "`BasicLit`", Value: "BasicLit"}}, - }, - { - Line: 547, - Op: ir.FilterNotOp, - Src: "!m[\"x\"].Node.Is(`BasicLit`)", - Args: []ir.FilterExpr{{ - Line: 547, - Op: ir.FilterVarNodeIsOp, - Src: "m[\"x\"].Node.Is(`BasicLit`)", - Value: "x", - Args: []ir.FilterExpr{{Line: 547, Op: ir.FilterStringOp, Src: "`BasicLit`", Value: "BasicLit"}}, - }}, - }, - }, - }, - }, - { - Line: 549, - SyntaxPatterns: []ir.PatternString{{Line: 549, Value: "$constval == $x"}}, - ReportTemplate: "consider to change order in expression to $x == $constval", - WhereExpr: ir.FilterExpr{ - Line: 549, - Op: ir.FilterAndOp, - Src: "m[\"constval\"].Node.Is(`BasicLit`) && !m[\"x\"].Node.Is(`BasicLit`)", - Args: []ir.FilterExpr{ - { - Line: 549, - Op: ir.FilterVarNodeIsOp, - Src: "m[\"constval\"].Node.Is(`BasicLit`)", - Value: "constval", - Args: []ir.FilterExpr{{Line: 549, Op: ir.FilterStringOp, Src: "`BasicLit`", Value: "BasicLit"}}, - }, - { - Line: 549, - Op: ir.FilterNotOp, - Src: "!m[\"x\"].Node.Is(`BasicLit`)", - Args: []ir.FilterExpr{{ - Line: 549, - Op: ir.FilterVarNodeIsOp, - Src: "m[\"x\"].Node.Is(`BasicLit`)", - Value: "x", - Args: []ir.FilterExpr{{Line: 549, Op: ir.FilterStringOp, Src: "`BasicLit`", Value: "BasicLit"}}, - }}, - }, - }, - }, - }, - { - Line: 552, - SyntaxPatterns: []ir.PatternString{{Line: 552, Value: "nil != $x"}}, - ReportTemplate: "consider to change order in expression to $x != nil", - WhereExpr: ir.FilterExpr{ - Line: 552, - Op: ir.FilterNotOp, - Src: "!m[\"x\"].Node.Is(`BasicLit`)", - Args: []ir.FilterExpr{{ - Line: 552, - Op: ir.FilterVarNodeIsOp, - Src: "m[\"x\"].Node.Is(`BasicLit`)", - Value: "x", - Args: []ir.FilterExpr{{Line: 552, Op: ir.FilterStringOp, Src: "`BasicLit`", Value: "BasicLit"}}, - }}, - }, - }, - { - Line: 554, - SyntaxPatterns: []ir.PatternString{{Line: 554, Value: "nil == $x"}}, - ReportTemplate: "consider to change order in expression to $x == nil", - WhereExpr: ir.FilterExpr{ - Line: 554, - Op: ir.FilterNotOp, - Src: "!m[\"x\"].Node.Is(`BasicLit`)", - Args: []ir.FilterExpr{{ - Line: 554, - Op: ir.FilterVarNodeIsOp, - Src: "m[\"x\"].Node.Is(`BasicLit`)", - Value: "x", - Args: []ir.FilterExpr{{Line: 554, Op: ir.FilterStringOp, Src: "`BasicLit`", Value: "BasicLit"}}, - }}, - }, - }, - }, - }, - { - Line: 562, - Name: "equalFold", - MatcherName: "m", - DocTags: []string{"performance", "experimental"}, - DocSummary: "Detects unoptimal strings/bytes case-insensitive comparison", - DocBefore: "strings.ToLower(x) == strings.ToLower(y)", - DocAfter: "strings.EqualFold(x, y)", - Rules: []ir.Rule{ - { - Line: 571, - SyntaxPatterns: []ir.PatternString{ - {Line: 572, Value: "strings.ToLower($x) == $y"}, - {Line: 573, Value: "strings.ToLower($x) == strings.ToLower($y)"}, - {Line: 574, Value: "$x == strings.ToLower($y)"}, - {Line: 575, Value: "strings.ToUpper($x) == $y"}, - {Line: 576, Value: "strings.ToUpper($x) == strings.ToUpper($y)"}, - {Line: 577, Value: "$x == strings.ToUpper($y)"}, - }, - ReportTemplate: "consider replacing with strings.EqualFold($x, $y)", - SuggestTemplate: "strings.EqualFold($x, $y)", - WhereExpr: ir.FilterExpr{ - Line: 578, - Op: ir.FilterAndOp, - Src: "m[\"x\"].Pure && m[\"y\"].Pure && m[\"x\"].Text != m[\"y\"].Text", - Args: []ir.FilterExpr{ - { - Line: 578, - Op: ir.FilterAndOp, - Src: "m[\"x\"].Pure && m[\"y\"].Pure", - Args: []ir.FilterExpr{ - {Line: 578, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, - {Line: 578, Op: ir.FilterVarPureOp, Src: "m[\"y\"].Pure", Value: "y"}, - }, - }, - { - Line: 578, - Op: ir.FilterNeqOp, - Src: "m[\"x\"].Text != m[\"y\"].Text", - Args: []ir.FilterExpr{ - {Line: 578, Op: ir.FilterVarTextOp, Src: "m[\"x\"].Text", Value: "x"}, - {Line: 578, Op: ir.FilterVarTextOp, Src: "m[\"y\"].Text", Value: "y"}, - }, - }, - }, - }, - }, - { - Line: 583, - SyntaxPatterns: []ir.PatternString{ - {Line: 584, Value: "strings.ToLower($x) != $y"}, - {Line: 585, Value: "strings.ToLower($x) != strings.ToLower($y)"}, - {Line: 586, Value: "$x != strings.ToLower($y)"}, - {Line: 587, Value: "strings.ToUpper($x) != $y"}, - {Line: 588, Value: "strings.ToUpper($x) != strings.ToUpper($y)"}, - {Line: 589, Value: "$x != strings.ToUpper($y)"}, - }, - ReportTemplate: "consider replacing with !strings.EqualFold($x, $y)", - SuggestTemplate: "!strings.EqualFold($x, $y)", - WhereExpr: ir.FilterExpr{ - Line: 590, - Op: ir.FilterAndOp, - Src: "m[\"x\"].Pure && m[\"y\"].Pure && m[\"x\"].Text != m[\"y\"].Text", - Args: []ir.FilterExpr{ - { - Line: 590, - Op: ir.FilterAndOp, - Src: "m[\"x\"].Pure && m[\"y\"].Pure", - Args: []ir.FilterExpr{ - {Line: 590, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, - {Line: 590, Op: ir.FilterVarPureOp, Src: "m[\"y\"].Pure", Value: "y"}, - }, - }, - { - Line: 590, - Op: ir.FilterNeqOp, - Src: "m[\"x\"].Text != m[\"y\"].Text", - Args: []ir.FilterExpr{ - {Line: 590, Op: ir.FilterVarTextOp, Src: "m[\"x\"].Text", Value: "x"}, - {Line: 590, Op: ir.FilterVarTextOp, Src: "m[\"y\"].Text", Value: "y"}, - }, - }, - }, - }, - }, - { - Line: 595, - SyntaxPatterns: []ir.PatternString{ - {Line: 596, Value: "bytes.Equal(bytes.ToLower($x), $y)"}, - {Line: 597, Value: "bytes.Equal(bytes.ToLower($x), bytes.ToLower($y))"}, - {Line: 598, Value: "bytes.Equal($x, bytes.ToLower($y))"}, - {Line: 599, Value: "bytes.Equal(bytes.ToUpper($x), $y)"}, - {Line: 600, Value: "bytes.Equal(bytes.ToUpper($x), bytes.ToUpper($y))"}, - {Line: 601, Value: "bytes.Equal($x, bytes.ToUpper($y))"}, - }, - ReportTemplate: "consider replacing with bytes.EqualFold($x, $y)", - SuggestTemplate: "bytes.EqualFold($x, $y)", - WhereExpr: ir.FilterExpr{ - Line: 602, - Op: ir.FilterAndOp, - Src: "m[\"x\"].Pure && m[\"y\"].Pure && m[\"x\"].Text != m[\"y\"].Text", - Args: []ir.FilterExpr{ - { - Line: 602, - Op: ir.FilterAndOp, - Src: "m[\"x\"].Pure && m[\"y\"].Pure", - Args: []ir.FilterExpr{ - {Line: 602, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, - {Line: 602, Op: ir.FilterVarPureOp, Src: "m[\"y\"].Pure", Value: "y"}, - }, - }, - { - Line: 602, - Op: ir.FilterNeqOp, - Src: "m[\"x\"].Text != m[\"y\"].Text", - Args: []ir.FilterExpr{ - {Line: 602, Op: ir.FilterVarTextOp, Src: "m[\"x\"].Text", Value: "x"}, - {Line: 602, Op: ir.FilterVarTextOp, Src: "m[\"y\"].Text", Value: "y"}, - }, - }, - }, - }, - }, - }, - }, - { - Line: 611, - Name: "argOrder", - MatcherName: "m", - DocTags: []string{"diagnostic"}, - DocSummary: "Detects suspicious arguments order", - DocBefore: "strings.HasPrefix(\"#\", userpass)", - DocAfter: "strings.HasPrefix(userpass, \"#\")", - Rules: []ir.Rule{{ - Line: 612, - SyntaxPatterns: []ir.PatternString{ - {Line: 613, Value: "strings.HasPrefix($lit, $s)"}, - {Line: 614, Value: "bytes.HasPrefix($lit, $s)"}, - {Line: 615, Value: "strings.HasSuffix($lit, $s)"}, - {Line: 616, Value: "bytes.HasSuffix($lit, $s)"}, - {Line: 617, Value: "strings.Contains($lit, $s)"}, - {Line: 618, Value: "bytes.Contains($lit, $s)"}, - {Line: 619, Value: "strings.TrimPrefix($lit, $s)"}, - {Line: 620, Value: "bytes.TrimPrefix($lit, $s)"}, - {Line: 621, Value: "strings.TrimSuffix($lit, $s)"}, - {Line: 622, Value: "bytes.TrimSuffix($lit, $s)"}, - {Line: 623, Value: "strings.Split($lit, $s)"}, - {Line: 624, Value: "bytes.Split($lit, $s)"}, - }, - ReportTemplate: "$lit and $s arguments order looks reversed", - WhereExpr: ir.FilterExpr{ - Line: 625, - Op: ir.FilterAndOp, - Src: "(m[\"lit\"].Const || m[\"lit\"].ConstSlice) &&\n\t!(m[\"s\"].Const || m[\"s\"].ConstSlice) &&\n\t!m[\"lit\"].Node.Is(`Ident`)", - Args: []ir.FilterExpr{ - { - Line: 625, - Op: ir.FilterAndOp, - Src: "(m[\"lit\"].Const || m[\"lit\"].ConstSlice) &&\n\t!(m[\"s\"].Const || m[\"s\"].ConstSlice)", - Args: []ir.FilterExpr{ - { - Line: 625, - Op: ir.FilterOrOp, - Src: "(m[\"lit\"].Const || m[\"lit\"].ConstSlice)", - Args: []ir.FilterExpr{ - { - Line: 625, - Op: ir.FilterVarConstOp, - Src: "m[\"lit\"].Const", - Value: "lit", - }, - { - Line: 625, - Op: ir.FilterVarConstSliceOp, - Src: "m[\"lit\"].ConstSlice", - Value: "lit", - }, - }, - }, - { - Line: 626, - Op: ir.FilterNotOp, - Src: "!(m[\"s\"].Const || m[\"s\"].ConstSlice)", - Args: []ir.FilterExpr{{ - Line: 626, - Op: ir.FilterOrOp, - Src: "(m[\"s\"].Const || m[\"s\"].ConstSlice)", - Args: []ir.FilterExpr{ - { - Line: 626, - Op: ir.FilterVarConstOp, - Src: "m[\"s\"].Const", - Value: "s", - }, - { - Line: 626, - Op: ir.FilterVarConstSliceOp, - Src: "m[\"s\"].ConstSlice", - Value: "s", - }, - }, - }}, - }, - }, - }, - { - Line: 627, - Op: ir.FilterNotOp, - Src: "!m[\"lit\"].Node.Is(`Ident`)", - Args: []ir.FilterExpr{{ - Line: 627, - Op: ir.FilterVarNodeIsOp, - Src: "m[\"lit\"].Node.Is(`Ident`)", - Value: "lit", - Args: []ir.FilterExpr{{Line: 627, Op: ir.FilterStringOp, Src: "`Ident`", Value: "Ident"}}, - }}, - }, - }, - }, - }}, - }, - { - Line: 635, - Name: "stringConcatSimplify", - MatcherName: "m", - DocTags: []string{"style", "experimental"}, - DocSummary: "Detects string concat operations that can be simplified", - DocBefore: "strings.Join([]string{x, y}, \"_\")", - DocAfter: "x + \"_\" + y", - Rules: []ir.Rule{ - { - Line: 636, - SyntaxPatterns: []ir.PatternString{{Line: 636, Value: "strings.Join([]string{$x, $y}, \"\")"}}, - ReportTemplate: "suggestion: $x + $y", - SuggestTemplate: "$x + $y", - }, - { - Line: 637, - SyntaxPatterns: []ir.PatternString{{Line: 637, Value: "strings.Join([]string{$x, $y, $z}, \"\")"}}, - ReportTemplate: "suggestion: $x + $y + $z", - SuggestTemplate: "$x + $y + $z", - }, - { - Line: 638, - SyntaxPatterns: []ir.PatternString{{Line: 638, Value: "strings.Join([]string{$x, $y}, $glue)"}}, - ReportTemplate: "suggestion: $x + $glue + $y", - SuggestTemplate: "$x + $glue + $y", - }, - }, - }, - { - Line: 645, - Name: "timeExprSimplify", - MatcherName: "m", - DocTags: []string{"style", "experimental"}, - DocSummary: "Detects manual conversion to milli- or microseconds", - DocBefore: "t.Unix() / 1000", - DocAfter: "t.UnixMilli()", - Rules: []ir.Rule{ - { - Line: 650, - SyntaxPatterns: []ir.PatternString{{Line: 650, Value: "$t.Unix() / 1000"}}, - ReportTemplate: "use $t.UnixMilli() instead of $$", - SuggestTemplate: "$t.UnixMilli()", - WhereExpr: ir.FilterExpr{ - Line: 651, - Op: ir.FilterAndOp, - Src: "m.GoVersion().GreaterEqThan(\"1.17\") && isTime(m[\"t\"])", - Args: []ir.FilterExpr{ - { - Line: 651, - Op: ir.FilterGoVersionGreaterEqThanOp, - Src: "m.GoVersion().GreaterEqThan(\"1.17\")", - Value: "1.17", - }, - { - Line: 651, - Op: ir.FilterOrOp, - Src: "isTime(m[\"t\"])", - Args: []ir.FilterExpr{ - { - Line: 651, - Op: ir.FilterVarTypeIsOp, - Src: "m[\"t\"].Type.Is(`time.Time`)", - Value: "t", - Args: []ir.FilterExpr{{Line: 647, Op: ir.FilterStringOp, Src: "`time.Time`", Value: "time.Time"}}, - }, - { - Line: 651, - Op: ir.FilterVarTypeIsOp, - Src: "m[\"t\"].Type.Is(`*time.Time`)", - Value: "t", - Args: []ir.FilterExpr{{Line: 647, Op: ir.FilterStringOp, Src: "`*time.Time`", Value: "*time.Time"}}, - }, - }, - }, - }, - }, - }, - { - Line: 655, - SyntaxPatterns: []ir.PatternString{{Line: 655, Value: "$t.UnixNano() * 1000"}}, - ReportTemplate: "use $t.UnixMicro() instead of $$", - SuggestTemplate: "$t.UnixMicro()", - WhereExpr: ir.FilterExpr{ - Line: 656, - Op: ir.FilterAndOp, - Src: "m.GoVersion().GreaterEqThan(\"1.17\") && isTime(m[\"t\"])", - Args: []ir.FilterExpr{ - { - Line: 656, - Op: ir.FilterGoVersionGreaterEqThanOp, - Src: "m.GoVersion().GreaterEqThan(\"1.17\")", - Value: "1.17", - }, - { - Line: 656, - Op: ir.FilterOrOp, - Src: "isTime(m[\"t\"])", - Args: []ir.FilterExpr{ - { - Line: 656, - Op: ir.FilterVarTypeIsOp, - Src: "m[\"t\"].Type.Is(`time.Time`)", - Value: "t", - Args: []ir.FilterExpr{{Line: 647, Op: ir.FilterStringOp, Src: "`time.Time`", Value: "time.Time"}}, - }, - { - Line: 656, - Op: ir.FilterVarTypeIsOp, - Src: "m[\"t\"].Type.Is(`*time.Time`)", - Value: "t", - Args: []ir.FilterExpr{{Line: 647, Op: ir.FilterStringOp, Src: "`*time.Time`", Value: "*time.Time"}}, - }, - }, - }, - }, - }, - }, - }, - }, - { - Line: 665, - Name: "exposedSyncMutex", - MatcherName: "m", - DocTags: []string{"style", "experimental"}, - DocSummary: "Detects exposed methods from sync.Mutex and sync.RWMutex", - DocBefore: "type Foo struct{ ...; sync.Mutex; ... }", - DocAfter: "type Foo struct{ ...; mu sync.Mutex; ... }", - Rules: []ir.Rule{ - { - Line: 670, - SyntaxPatterns: []ir.PatternString{{Line: 670, Value: "type $x struct { $*_; sync.Mutex; $*_ }"}}, - ReportTemplate: "don't embed sync.Mutex", - WhereExpr: ir.FilterExpr{ - Line: 671, - Op: ir.FilterVarTextMatchesOp, - Src: "isExported(m[\"x\"])", - Value: "x", - Args: []ir.FilterExpr{{Line: 667, Op: ir.FilterStringOp, Src: "`^\\p{Lu}`", Value: "^\\p{Lu}"}}, - }, - }, - { - Line: 674, - SyntaxPatterns: []ir.PatternString{{Line: 674, Value: "type $x struct { $*_; *sync.Mutex; $*_ }"}}, - ReportTemplate: "don't embed *sync.Mutex", - WhereExpr: ir.FilterExpr{ - Line: 675, - Op: ir.FilterVarTextMatchesOp, - Src: "isExported(m[\"x\"])", - Value: "x", - Args: []ir.FilterExpr{{Line: 667, Op: ir.FilterStringOp, Src: "`^\\p{Lu}`", Value: "^\\p{Lu}"}}, - }, - }, - { - Line: 678, - SyntaxPatterns: []ir.PatternString{{Line: 678, Value: "type $x struct { $*_; sync.RWMutex; $*_ }"}}, - ReportTemplate: "don't embed sync.RWMutex", - WhereExpr: ir.FilterExpr{ - Line: 679, - Op: ir.FilterVarTextMatchesOp, - Src: "isExported(m[\"x\"])", - Value: "x", - Args: []ir.FilterExpr{{Line: 667, Op: ir.FilterStringOp, Src: "`^\\p{Lu}`", Value: "^\\p{Lu}"}}, - }, - }, - { - Line: 682, - SyntaxPatterns: []ir.PatternString{{Line: 682, Value: "type $x struct { $*_; *sync.RWMutex; $*_ }"}}, - ReportTemplate: "don't embed *sync.RWMutex", - WhereExpr: ir.FilterExpr{ - Line: 683, - Op: ir.FilterVarTextMatchesOp, - Src: "isExported(m[\"x\"])", - Value: "x", - Args: []ir.FilterExpr{{Line: 667, Op: ir.FilterStringOp, Src: "`^\\p{Lu}`", Value: "^\\p{Lu}"}}, - }, - }, - }, - }, - { - Line: 691, - Name: "badSorting", - MatcherName: "m", - DocTags: []string{"diagnostic", "experimental"}, - DocSummary: "Detects bad usage of sort package", - DocBefore: "xs = sort.StringSlice(xs)", - DocAfter: "sort.Strings(xs)", - Rules: []ir.Rule{ - { - Line: 692, - SyntaxPatterns: []ir.PatternString{{Line: 692, Value: "$x = sort.IntSlice($x)"}}, - ReportTemplate: "suspicious sort.IntSlice usage, maybe sort.Ints was intended?", - SuggestTemplate: "sort.Ints($x)", - WhereExpr: ir.FilterExpr{ - Line: 693, - Op: ir.FilterVarTypeIsOp, - Src: "m[\"x\"].Type.Is(`[]int`)", - Value: "x", - Args: []ir.FilterExpr{{Line: 693, Op: ir.FilterStringOp, Src: "`[]int`", Value: "[]int"}}, - }, - }, - { - Line: 697, - SyntaxPatterns: []ir.PatternString{{Line: 697, Value: "$x = sort.Float64Slice($x)"}}, - ReportTemplate: "suspicious sort.Float64s usage, maybe sort.Float64s was intended?", - SuggestTemplate: "sort.Float64s($x)", - WhereExpr: ir.FilterExpr{ - Line: 698, - Op: ir.FilterVarTypeIsOp, - Src: "m[\"x\"].Type.Is(`[]float64`)", - Value: "x", - Args: []ir.FilterExpr{{Line: 698, Op: ir.FilterStringOp, Src: "`[]float64`", Value: "[]float64"}}, - }, - }, - { - Line: 702, - SyntaxPatterns: []ir.PatternString{{Line: 702, Value: "$x = sort.StringSlice($x)"}}, - ReportTemplate: "suspicious sort.StringSlice usage, maybe sort.Strings was intended?", - SuggestTemplate: "sort.Strings($x)", - WhereExpr: ir.FilterExpr{ - Line: 703, - Op: ir.FilterVarTypeIsOp, - Src: "m[\"x\"].Type.Is(`[]string`)", - Value: "x", - Args: []ir.FilterExpr{{Line: 703, Op: ir.FilterStringOp, Src: "`[]string`", Value: "[]string"}}, - }, - }, - }, - }, - { - Line: 712, - Name: "externalErrorReassign", - MatcherName: "m", - DocTags: []string{"diagnostic", "experimental"}, - DocSummary: "Detects suspicious reassignment of error from another package", - DocBefore: "io.EOF = nil", - DocAfter: "/* don't do it */", - Rules: []ir.Rule{{ - Line: 713, - SyntaxPatterns: []ir.PatternString{{Line: 713, Value: "$pkg.$err = $x"}}, - ReportTemplate: "suspicious reassignment of error from another package", - WhereExpr: ir.FilterExpr{ - Line: 714, - Op: ir.FilterAndOp, - Src: "m[\"err\"].Type.Is(`error`) && m[\"pkg\"].Object.Is(`PkgName`)", - Args: []ir.FilterExpr{ - { - Line: 714, - Op: ir.FilterVarTypeIsOp, - Src: "m[\"err\"].Type.Is(`error`)", - Value: "err", - Args: []ir.FilterExpr{{Line: 714, Op: ir.FilterStringOp, Src: "`error`", Value: "error"}}, - }, - { - Line: 714, - Op: ir.FilterVarObjectIsOp, - Src: "m[\"pkg\"].Object.Is(`PkgName`)", - Value: "pkg", - Args: []ir.FilterExpr{{Line: 714, Op: ir.FilterStringOp, Src: "`PkgName`", Value: "PkgName"}}, - }, - }, - }, - }}, - }, - { - Line: 722, - Name: "emptyDecl", - MatcherName: "m", - DocTags: []string{"diagnostic", "experimental"}, - DocSummary: "Detects suspicious empty declarations blocks", - DocBefore: "var()", - DocAfter: "/* nothing */", - Rules: []ir.Rule{ - { - Line: 723, - SyntaxPatterns: []ir.PatternString{{Line: 723, Value: "var()"}}, - ReportTemplate: "empty var() block", - }, - { - Line: 724, - SyntaxPatterns: []ir.PatternString{{Line: 724, Value: "const()"}}, - ReportTemplate: "empty const() block", - }, - { - Line: 725, - SyntaxPatterns: []ir.PatternString{{Line: 725, Value: "type()"}}, - ReportTemplate: "empty type() block", - }, - }, - }, - { - Line: 732, - Name: "dynamicFmtString", - MatcherName: "m", - DocTags: []string{"diagnostic", "experimental"}, - DocSummary: "Detects suspicious formatting strings usage", - DocBefore: "fmt.Errorf(msg)", - DocAfter: "fmt.Errorf(\"%s\", msg)", - Rules: []ir.Rule{ - { - Line: 733, - SyntaxPatterns: []ir.PatternString{{Line: 733, Value: "fmt.Errorf($f)"}}, - ReportTemplate: "use errors.New($f) or fmt.Errorf(\"%s\", $f) instead", - SuggestTemplate: "errors.New($f)", - WhereExpr: ir.FilterExpr{ - Line: 734, - Op: ir.FilterNotOp, - Src: "!m[\"f\"].Const", - Args: []ir.FilterExpr{{ - Line: 734, - Op: ir.FilterVarConstOp, - Src: "m[\"f\"].Const", - Value: "f", - }}, - }, - }, - { - Line: 738, - SyntaxPatterns: []ir.PatternString{{Line: 738, Value: "fmt.Errorf($f($*args))"}}, - ReportTemplate: "use errors.New($f($*args)) or fmt.Errorf(\"%s\", $f($*args)) instead", - SuggestTemplate: "errors.New($f($*args))", - }, - }, - }, - { - Line: 747, - Name: "stringsCompare", - MatcherName: "m", - DocTags: []string{"style", "experimental"}, - DocSummary: "Detects strings.Compare usage", - DocBefore: "strings.Compare(x, y)", - DocAfter: "x < y", - Rules: []ir.Rule{ - { - Line: 748, - SyntaxPatterns: []ir.PatternString{{Line: 748, Value: "strings.Compare($s1, $s2) == 0"}}, - ReportTemplate: "suggestion: $s1 == $s2", - SuggestTemplate: "$s1 == $s2", - }, - { - Line: 751, - SyntaxPatterns: []ir.PatternString{ - {Line: 751, Value: "strings.Compare($s1, $s2) == -1"}, - {Line: 752, Value: "strings.Compare($s1, $s2) < 0"}, - }, - ReportTemplate: "suggestion: $s1 < $s2", - SuggestTemplate: "$s1 < $s2", - }, - { - Line: 755, - SyntaxPatterns: []ir.PatternString{ - {Line: 755, Value: "strings.Compare($s1, $s2) == 1"}, - {Line: 756, Value: "strings.Compare($s1, $s2) > 0"}, - }, - ReportTemplate: "suggestion: $s1 > $s2", - SuggestTemplate: "$s1 > $s2", - }, - }, - }, - { - Line: 764, - Name: "uncheckedInlineErr", - MatcherName: "m", - DocTags: []string{"diagnostic", "experimental"}, - DocSummary: "Detects unchecked errors in if statements", - DocBefore: "if err := expr(); err2 != nil { /*...*/ }", - DocAfter: "if err := expr(); err != nil { /*...*/ }", - Rules: []ir.Rule{{ - Line: 765, - SyntaxPatterns: []ir.PatternString{ - {Line: 766, Value: "if $err := $_($*_); $err2 != nil { $*_ }"}, - {Line: 767, Value: "if $err = $_($*_); $err2 != nil { $*_ }"}, - {Line: 768, Value: "if $*_, $err := $_($*_); $err2 != nil { $*_ }"}, - {Line: 769, Value: "if $*_, $err = $_($*_); $err2 != nil { $*_ }"}, - }, - ReportTemplate: "$err error is unchecked, maybe intended to check it instead of $err2", - WhereExpr: ir.FilterExpr{ - Line: 770, - Op: ir.FilterAndOp, - Src: "m[\"err\"].Type.Implements(\"error\") && m[\"err2\"].Type.Implements(\"error\") &&\n\tm[\"err\"].Text != m[\"err2\"].Text", - Args: []ir.FilterExpr{ - { - Line: 770, - Op: ir.FilterAndOp, - Src: "m[\"err\"].Type.Implements(\"error\") && m[\"err2\"].Type.Implements(\"error\")", - Args: []ir.FilterExpr{ - { - Line: 770, - Op: ir.FilterVarTypeImplementsOp, - Src: "m[\"err\"].Type.Implements(\"error\")", - Value: "err", - Args: []ir.FilterExpr{{Line: 770, Op: ir.FilterStringOp, Src: "\"error\"", Value: "error"}}, - }, - { - Line: 770, - Op: ir.FilterVarTypeImplementsOp, - Src: "m[\"err2\"].Type.Implements(\"error\")", - Value: "err2", - Args: []ir.FilterExpr{{Line: 770, Op: ir.FilterStringOp, Src: "\"error\"", Value: "error"}}, - }, - }, - }, - { - Line: 771, - Op: ir.FilterNeqOp, - Src: "m[\"err\"].Text != m[\"err2\"].Text", - Args: []ir.FilterExpr{ - {Line: 771, Op: ir.FilterVarTextOp, Src: "m[\"err\"].Text", Value: "err"}, - {Line: 771, Op: ir.FilterVarTextOp, Src: "m[\"err2\"].Text", Value: "err2"}, - }, - }, - }, - }, - LocationVar: "err", - }}, - }, - { - Line: 780, - Name: "badSyncOnceFunc", - MatcherName: "m", - DocTags: []string{"diagnostic", "experimental"}, - DocSummary: "Detects bad usage of sync.OnceFunc", - DocBefore: "sync.OnceFunc(foo)()", - DocAfter: "fooOnce := sync.OnceFunc(foo); ...; fooOnce()", - Rules: []ir.Rule{ - { - Line: 781, - SyntaxPatterns: []ir.PatternString{{Line: 781, Value: "$*_; sync.OnceFunc($x); $*_;"}}, - ReportTemplate: "possible sync.OnceFunc misuse, sync.OnceFunc($x) result is not used", - WhereExpr: ir.FilterExpr{ - Line: 783, - Op: ir.FilterGoVersionGreaterEqThanOp, - Src: "m.GoVersion().GreaterEqThan(\"1.21\")", - Value: "1.21", - }, - }, - { - Line: 785, - SyntaxPatterns: []ir.PatternString{{Line: 785, Value: "sync.OnceFunc($x)()"}}, - ReportTemplate: "possible sync.OnceFunc misuse, consider to assign sync.OnceFunc($x) to a variable", - WhereExpr: ir.FilterExpr{ - Line: 787, - Op: ir.FilterGoVersionGreaterEqThanOp, - Src: "m.GoVersion().GreaterEqThan(\"1.21\")", - Value: "1.21", - }, - }, - }, - }, - }, -} - diff --git a/vendor/github.com/go-critic/go-critic/checkers/singleCaseSwitch_checker.go b/vendor/github.com/go-critic/go-critic/checkers/singleCaseSwitch_checker.go deleted file mode 100644 index a1a399fda..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/singleCaseSwitch_checker.go +++ /dev/null @@ -1,85 +0,0 @@ -package checkers - -import ( - "go/ast" - "go/token" - - "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/linter" - - "golang.org/x/tools/go/ast/astutil" -) - -func init() { - var info linter.CheckerInfo - info.Name = "singleCaseSwitch" - info.Tags = []string{linter.StyleTag} - info.Summary = "Detects switch statements that could be better written as if statement" - info.Before = ` -switch x := x.(type) { -case int: - body() -}` - info.After = ` -if x, ok := x.(int); ok { - body() -}` - - collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { - return astwalk.WalkerForStmt(&singleCaseSwitchChecker{ctx: ctx}), nil - }) -} - -type singleCaseSwitchChecker struct { - astwalk.WalkHandler - ctx *linter.CheckerContext -} - -func (c *singleCaseSwitchChecker) VisitStmt(stmt ast.Stmt) { - switch stmt := stmt.(type) { - case *ast.SwitchStmt: - c.checkSwitchStmt(stmt, stmt.Body) - case *ast.TypeSwitchStmt: - c.checkSwitchStmt(stmt, stmt.Body) - } -} - -func (c *singleCaseSwitchChecker) checkSwitchStmt(stmt ast.Stmt, body *ast.BlockStmt) { - if len(body.List) != 1 { - return - } - cc := body.List[0].(*ast.CaseClause) - if c.hasBreak(cc) { - return - } - switch { - case cc.List == nil: - c.warnDefault(stmt) - case len(cc.List) == 1: - c.warn(stmt) - } -} - -func (c *singleCaseSwitchChecker) hasBreak(stmt ast.Stmt) bool { - found := false - astutil.Apply(stmt, func(cur *astutil.Cursor) bool { - switch n := cur.Node().(type) { - case *ast.BranchStmt: - if n.Tok == token.BREAK { - found = true - } - case *ast.ForStmt, *ast.RangeStmt, *ast.SelectStmt, *ast.SwitchStmt: - return false - } - return true - }, nil) - return found -} - -func (c *singleCaseSwitchChecker) warn(stmt ast.Stmt) { - c.ctx.Warn(stmt, "should rewrite switch statement to if statement") -} - -func (c *singleCaseSwitchChecker) warnDefault(stmt ast.Stmt) { - c.ctx.Warn(stmt, "found switch with default case only") -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/sloppyReassign_checker.go b/vendor/github.com/go-critic/go-critic/checkers/sloppyReassign_checker.go deleted file mode 100644 index d83d7fd5a..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/sloppyReassign_checker.go +++ /dev/null @@ -1,81 +0,0 @@ -package checkers - -import ( - "go/ast" - "go/token" - - "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/linter" - - "github.com/go-toolsmith/astcast" - "github.com/go-toolsmith/astcopy" - "github.com/go-toolsmith/astequal" -) - -func init() { - var info linter.CheckerInfo - info.Name = "sloppyReassign" - info.Tags = []string{linter.DiagnosticTag, linter.ExperimentalTag} - info.Summary = "Detects suspicious/confusing re-assignments" - info.Before = `if err = f(); err != nil { return err }` - info.After = `if err := f(); err != nil { return err }` - - collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { - return astwalk.WalkerForStmt(&sloppyReassignChecker{ctx: ctx}), nil - }) -} - -type sloppyReassignChecker struct { - astwalk.WalkHandler - ctx *linter.CheckerContext -} - -func (c *sloppyReassignChecker) VisitStmt(stmt ast.Stmt) { - // Right now only check assignments in if statements init. - ifStmt := astcast.ToIfStmt(stmt) - assign := astcast.ToAssignStmt(ifStmt.Init) - if assign.Tok != token.ASSIGN { - return - } - - // TODO(quasilyte): is handling of multi-value assignments worthwhile? - if len(assign.Lhs) != 1 || len(assign.Rhs) != 1 { - return - } - - // TODO(quasilyte): handle not only the simplest, return-only case. - body := ifStmt.Body.List - if len(body) != 1 { - return - } - - // Variable that is being re-assigned. - reAssigned := astcast.ToIdent(assign.Lhs[0]) - if reAssigned.Name == "" { - return - } - - // TODO(quasilyte): handle not only nil comparisons. - eqToNil := &ast.BinaryExpr{ - Op: token.NEQ, - X: reAssigned, - Y: &ast.Ident{Name: "nil"}, - } - if !astequal.Expr(ifStmt.Cond, eqToNil) { - return - } - - results := astcast.ToReturnStmt(body[0]).Results - for _, res := range results { - if astequal.Expr(reAssigned, res) { - c.warnAssignToDefine(assign, reAssigned.Name) - break - } - } -} - -func (c *sloppyReassignChecker) warnAssignToDefine(assign *ast.AssignStmt, name string) { - suggest := astcopy.AssignStmt(assign) - suggest.Tok = token.DEFINE - c.ctx.Warn(assign, "re-assignment to `%s` can be replaced with `%s`", name, suggest) -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/sloppyTypeAssert_checker.go b/vendor/github.com/go-critic/go-critic/checkers/sloppyTypeAssert_checker.go deleted file mode 100644 index 454ab78b1..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/sloppyTypeAssert_checker.go +++ /dev/null @@ -1,56 +0,0 @@ -package checkers - -import ( - "go/ast" - "go/types" - - "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/linter" - - "github.com/go-toolsmith/astcast" -) - -func init() { - var info linter.CheckerInfo - info.Name = "sloppyTypeAssert" - info.Tags = []string{linter.DiagnosticTag} - info.Summary = "Detects redundant type assertions" - info.Before = ` -func f(r io.Reader) interface{} { - return r.(interface{}) -} -` - info.After = ` -func f(r io.Reader) interface{} { - return r -} -` - - collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { - return astwalk.WalkerForExpr(&sloppyTypeAssertChecker{ctx: ctx}), nil - }) -} - -type sloppyTypeAssertChecker struct { - astwalk.WalkHandler - ctx *linter.CheckerContext -} - -func (c *sloppyTypeAssertChecker) VisitExpr(expr ast.Expr) { - assert := astcast.ToTypeAssertExpr(expr) - if assert.Type == nil { - return - } - - toType := c.ctx.TypeOf(expr) - fromType := c.ctx.TypeOf(assert.X) - - if types.Identical(toType, fromType) { - c.warnIdentical(expr) - return - } -} - -func (c *sloppyTypeAssertChecker) warnIdentical(cause ast.Expr) { - c.ctx.Warn(cause, "type assertion from/to types are identical") -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/sortSlice_checker.go b/vendor/github.com/go-critic/go-critic/checkers/sortSlice_checker.go deleted file mode 100644 index 22ef3b16a..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/sortSlice_checker.go +++ /dev/null @@ -1,136 +0,0 @@ -package checkers - -import ( - "go/ast" - "go/token" - - "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/checkers/internal/lintutil" - "github.com/go-critic/go-critic/linter" - - "github.com/go-toolsmith/astcast" - "github.com/go-toolsmith/astequal" - "github.com/go-toolsmith/typep" - "golang.org/x/tools/go/ast/astutil" -) - -func init() { - var info linter.CheckerInfo - info.Name = "sortSlice" - info.Tags = []string{linter.DiagnosticTag, linter.ExperimentalTag} - info.Summary = "Detects suspicious sort.Slice calls" - info.Before = `sort.Slice(xs, func(i, j) bool { return keys[i] < keys[j] })` - info.After = `sort.Slice(kv, func(i, j) bool { return kv[i].key < kv[j].key })` - - collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { - return astwalk.WalkerForExpr(&sortSliceChecker{ctx: ctx}), nil - }) -} - -type sortSliceChecker struct { - astwalk.WalkHandler - ctx *linter.CheckerContext -} - -func (c *sortSliceChecker) VisitExpr(expr ast.Expr) { - call := astcast.ToCallExpr(expr) - if len(call.Args) != 2 { - return - } - switch qualifiedName(call.Fun) { - case "sort.Slice", "sort.SliceStable": - // OK. - default: - return - } - - slice := c.unwrapSlice(call.Args[0]) - lessFunc, ok := call.Args[1].(*ast.FuncLit) - if !ok { - return - } - if !typep.SideEffectFree(c.ctx.TypesInfo, slice) { - return // Don't check unpredictable slice values - } - - ivar, jvar := c.paramIdents(lessFunc.Type) - if ivar == nil || jvar == nil { - return - } - - if len(lessFunc.Body.List) != 1 { - return - } - ret, ok := lessFunc.Body.List[0].(*ast.ReturnStmt) - if !ok { - return - } - cmp := astcast.ToBinaryExpr(astutil.Unparen(ret.Results[0])) - if !typep.SideEffectFree(c.ctx.TypesInfo, cmp) { - return - } - switch cmp.Op { - case token.LSS, token.LEQ, token.GTR, token.GEQ: - // Both cmp.X and cmp.Y are expected to be some expressions - // over the `slice` expression. In the simplest case, - // it's a `slice[i] slice[j]`. - if !c.containsSlice(cmp.X, slice) && !c.containsSlice(cmp.Y, slice) { - c.warnSlice(cmp, slice) - } - - // This one is more about the style, but can reveal potential issue - // or misprint in sorting condition. - // We give a warn if X contains indexing with `i` index and Y - // contains indexing with `j`. - if c.containsIndex(cmp.X, jvar) && c.containsIndex(cmp.Y, ivar) { - c.warnIndex(cmp, ivar, jvar) - } - } -} - -func (c *sortSliceChecker) paramIdents(e *ast.FuncType) (ivar, jvar *ast.Ident) { - // Covers both `i, j int` and `i int, j int`. - idents := make([]*ast.Ident, 0, 2) - for _, field := range e.Params.List { - idents = append(idents, field.Names...) - } - if len(idents) == 2 { - return idents[0], idents[1] - } - return nil, nil -} - -func (c *sortSliceChecker) unwrapSlice(e ast.Expr) ast.Expr { - switch e := e.(type) { - case *ast.ParenExpr: - return c.unwrapSlice(e.X) - case *ast.SliceExpr: - return e.X - default: - return e - } -} - -func (c *sortSliceChecker) containsIndex(e, index ast.Expr) bool { - return lintutil.ContainsNode(e, func(n ast.Node) bool { - indexing, ok := n.(*ast.IndexExpr) - if !ok { - return false - } - return astequal.Expr(indexing.Index, index) - }) -} - -func (c *sortSliceChecker) containsSlice(e, slice ast.Expr) bool { - return lintutil.ContainsNode(e, func(n ast.Node) bool { - return astequal.Node(n, slice) - }) -} - -func (c *sortSliceChecker) warnSlice(cause ast.Node, slice ast.Expr) { - c.ctx.Warn(cause, "cmp func must use %s slice in comparison", slice) -} - -func (c *sortSliceChecker) warnIndex(cause ast.Node, ivar, jvar *ast.Ident) { - c.ctx.Warn(cause, "unusual order of {%s,%s} params in comparison", ivar, jvar) -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/sqlQuery_checker.go b/vendor/github.com/go-critic/go-critic/checkers/sqlQuery_checker.go deleted file mode 100644 index 8a132b586..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/sqlQuery_checker.go +++ /dev/null @@ -1,168 +0,0 @@ -package checkers - -import ( - "go/ast" - "go/types" - - "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/linter" - - "github.com/go-toolsmith/astcast" -) - -func init() { - var info linter.CheckerInfo - info.Name = "sqlQuery" - info.Tags = []string{linter.DiagnosticTag, linter.ExperimentalTag} - info.Summary = "Detects issue in Query() and Exec() calls" - info.Before = `_, err := db.Query("UPDATE ...")` - info.After = `_, err := db.Exec("UPDATE ...")` - - collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { - return astwalk.WalkerForStmt(&sqlQueryChecker{ctx: ctx}), nil - }) -} - -type sqlQueryChecker struct { - astwalk.WalkHandler - ctx *linter.CheckerContext -} - -func (c *sqlQueryChecker) VisitStmt(stmt ast.Stmt) { - assign := astcast.ToAssignStmt(stmt) - if len(assign.Lhs) != 2 { // Query() has 2 return values. - return - } - if len(assign.Rhs) != 1 { - return - } - - // If Query() is called, but first return value is ignored, - // there is no way to close/read the returned rows. - // This can cause a connection leak. - if id, ok := assign.Lhs[0].(*ast.Ident); ok && id.Name != "_" { - return - } - - call := astcast.ToCallExpr(assign.Rhs[0]) - funcExpr := astcast.ToSelectorExpr(call.Fun) - if !c.funcIsQuery(funcExpr) { - return - } - - if c.typeHasExecMethod(c.ctx.TypeOf(funcExpr.X)) { - c.warnAndSuggestExec(funcExpr) - } else { - c.warnRowsIgnored(funcExpr) - } -} - -func (c *sqlQueryChecker) funcIsQuery(funcExpr *ast.SelectorExpr) bool { - if funcExpr.Sel == nil { - return false - } - switch funcExpr.Sel.Name { - case "Query", "QueryContext": - // Stdlib and friends. - case "Queryx", "QueryxContext": - // sqlx. - default: - return false - } - - // To avoid false positives (unrelated types can have Query method) - // check that the 1st returned type has Row-like name. - typ, ok := c.ctx.TypeOf(funcExpr).Underlying().(*types.Signature) - if !ok || typ.Results() == nil || typ.Results().Len() != 2 { - return false - } - if !c.typeIsRowsLike(typ.Results().At(0).Type()) { - return false - } - - return true -} - -func (c *sqlQueryChecker) typeIsRowsLike(typ types.Type) bool { - switch typ := typ.(type) { - case *types.Pointer: - return c.typeIsRowsLike(typ.Elem()) - case *types.Named: - return typ.Obj().Name() == "Rows" - default: - return false - } -} - -func (c *sqlQueryChecker) funcIsExec(fn *types.Func) bool { - if fn.Name() != "Exec" { - return false - } - - // Expect exactly 2 results. - sig := fn.Type().(*types.Signature) - if sig.Results() == nil || sig.Results().Len() != 2 { - return false - } - - // Expect at least 1 param and it should be a string (query). - params := sig.Params() - if params == nil || params.Len() == 0 { - return false - } - if typ, ok := params.At(0).Type().(*types.Basic); !ok || typ.Kind() != types.String { - return false - } - - return true -} - -func (c *sqlQueryChecker) typeHasExecMethod(typ types.Type) bool { - switch typ := typ.(type) { - case *types.Struct: - for i := 0; i < typ.NumFields(); i++ { - if c.typeHasExecMethod(typ.Field(i).Type()) { - return true - } - } - case *types.Interface: - for i := 0; i < typ.NumMethods(); i++ { - if c.funcIsExec(typ.Method(i)) { - return true - } - } - case *types.Pointer: - return c.typeHasExecMethod(typ.Elem()) - case *types.Named: - for i := 0; i < typ.NumMethods(); i++ { - if c.funcIsExec(typ.Method(i)) { - return true - } - } - switch ut := typ.Underlying().(type) { - case *types.Interface: - return c.typeHasExecMethod(ut) - case *types.Struct: - // Check embedded types. - for i := 0; i < ut.NumFields(); i++ { - field := ut.Field(i) - if !field.Embedded() { - continue - } - if c.typeHasExecMethod(field.Type()) { - return true - } - } - } - } - - return false -} - -func (c *sqlQueryChecker) warnAndSuggestExec(funcExpr *ast.SelectorExpr) { - c.ctx.Warn(funcExpr, "use %s.Exec() if returned result is not needed", funcExpr.X) -} - -func (c *sqlQueryChecker) warnRowsIgnored(funcExpr *ast.SelectorExpr) { - c.ctx.Warn(funcExpr, "ignoring Query() rows result may lead to a connection leak") -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/todoCommentWithoutDetail_checker.go b/vendor/github.com/go-critic/go-critic/checkers/todoCommentWithoutDetail_checker.go deleted file mode 100644 index f8e4b9b3c..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/todoCommentWithoutDetail_checker.go +++ /dev/null @@ -1,50 +0,0 @@ -package checkers - -import ( - "go/ast" - "regexp" - - "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/linter" -) - -func init() { - var info linter.CheckerInfo - info.Name = "todoCommentWithoutDetail" - info.Tags = []string{linter.StyleTag, linter.OpinionatedTag, linter.ExperimentalTag} - info.Summary = "Detects TODO comments without detail/assignee" - info.Before = ` -// TODO -fiiWithCtx(nil, a, b) -` - info.After = ` -// TODO(admin): pass context.TODO() instead of nil -fiiWithCtx(nil, a, b) -` - collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { - visitor := &todoCommentWithoutCodeChecker{ - ctx: ctx, - regex: regexp.MustCompile(`^(//|/\*)?\s*(TODO|FIX|FIXME|BUG)\s*(\*/)?$`), - } - return astwalk.WalkerForComment(visitor), nil - }) -} - -type todoCommentWithoutCodeChecker struct { - astwalk.WalkHandler - ctx *linter.CheckerContext - regex *regexp.Regexp -} - -func (c *todoCommentWithoutCodeChecker) VisitComment(cg *ast.CommentGroup) { - for _, comment := range cg.List { - if c.regex.MatchString(comment.Text) { - c.warn(cg) - break - } - } -} - -func (c *todoCommentWithoutCodeChecker) warn(cause ast.Node) { - c.ctx.Warn(cause, "may want to add detail/assignee to this TODO/FIXME/BUG comment") -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/tooManyResults_checker.go b/vendor/github.com/go-critic/go-critic/checkers/tooManyResults_checker.go deleted file mode 100644 index 57411ba24..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/tooManyResults_checker.go +++ /dev/null @@ -1,54 +0,0 @@ -package checkers - -import ( - "go/ast" - "go/types" - - "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/linter" -) - -func init() { - var info linter.CheckerInfo - info.Name = "tooManyResultsChecker" - info.Tags = []string{linter.StyleTag, linter.OpinionatedTag, linter.ExperimentalTag} - info.Params = linter.CheckerParams{ - "maxResults": { - Value: 5, - Usage: "maximum number of results", - }, - } - info.Summary = "Detects function with too many results" - info.Before = `func fn() (a, b, c, d float32, _ int, _ bool)` - info.After = `func fn() (resultStruct, bool)` - - collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { - c := astwalk.WalkerForFuncDecl(&tooManyResultsChecker{ - ctx: ctx, - maxParams: info.Params.Int("maxResults"), - }) - return c, nil - }) -} - -type tooManyResultsChecker struct { - astwalk.WalkHandler - ctx *linter.CheckerContext - maxParams int -} - -func (c *tooManyResultsChecker) VisitFuncDecl(decl *ast.FuncDecl) { - typ := c.ctx.TypeOf(decl.Name) - sig, ok := typ.(*types.Signature) - if !ok { - return - } - - if count := sig.Results().Len(); count > c.maxParams { - c.warn(decl) - } -} - -func (c *tooManyResultsChecker) warn(n ast.Node) { - c.ctx.Warn(n, "function has more than %d results, consider to simplify the function", c.maxParams) -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/truncateCmp_checker.go b/vendor/github.com/go-critic/go-critic/checkers/truncateCmp_checker.go deleted file mode 100644 index b36902526..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/truncateCmp_checker.go +++ /dev/null @@ -1,124 +0,0 @@ -package checkers - -import ( - "go/ast" - "go/token" - "go/types" - - "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/linter" - - "github.com/go-toolsmith/astcast" - "github.com/go-toolsmith/astp" -) - -func init() { - var info linter.CheckerInfo - info.Name = "truncateCmp" - info.Tags = []string{linter.DiagnosticTag, linter.ExperimentalTag} - info.Params = linter.CheckerParams{ - "skipArchDependent": { - Value: true, - Usage: "whether to skip int/uint/uintptr types", - }, - } - info.Summary = "Detects potential truncation issues when comparing ints of different sizes" - info.Before = ` -func f(x int32, y int16) bool { - return int16(x) < y -}` - info.After = ` -func f(x int32, int16) bool { - return x < int32(y) -}` - - collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { - c := &truncateCmpChecker{ctx: ctx} - c.skipArchDependent = info.Params.Bool("skipArchDependent") - return astwalk.WalkerForExpr(c), nil - }) -} - -type truncateCmpChecker struct { - astwalk.WalkHandler - ctx *linter.CheckerContext - - skipArchDependent bool -} - -func (c *truncateCmpChecker) VisitExpr(expr ast.Expr) { - cmp := astcast.ToBinaryExpr(expr) - switch cmp.Op { - case token.LSS, token.GTR, token.LEQ, token.GEQ, token.EQL, token.NEQ: - if astp.IsBasicLit(cmp.X) || astp.IsBasicLit(cmp.Y) { - return // Don't bother about untyped consts - } - leftCast := c.isTruncCast(cmp.X) - rightCast := c.isTruncCast(cmp.Y) - switch { - case leftCast && rightCast: - return - case leftCast: - c.checkCmp(cmp.X, cmp.Y) - case rightCast: - c.checkCmp(cmp.Y, cmp.X) - } - default: - return - } -} - -func (c *truncateCmpChecker) isTruncCast(x ast.Expr) bool { - switch astcast.ToIdent(astcast.ToCallExpr(x).Fun).Name { - case "int8", "int16", "int32", "uint8", "uint16", "uint32": - return true - default: - return false - } -} - -func (c *truncateCmpChecker) checkCmp(cmpX, cmpY ast.Expr) { - // Check if we have a cast to a type that can truncate. - xcast := astcast.ToCallExpr(cmpX) - if len(xcast.Args) != 1 { - return // Just in case of the shadowed builtin - } - - x := xcast.Args[0] - y := cmpY - - // Check that both x and y are signed or unsigned int-typed. - xtyp, ok := c.ctx.TypeOf(x).Underlying().(*types.Basic) - if !ok || xtyp.Info()&types.IsInteger == 0 { - return - } - ytyp, ok := c.ctx.TypeOf(y).Underlying().(*types.Basic) - if !ok || xtyp.Info() != ytyp.Info() { - return - } - - xsize, ok := c.ctx.SizeOf(xtyp) - if !ok { - return - } - ysize, ok := c.ctx.SizeOf(ytyp) - if !ok { - return - } - if xsize <= ysize { - return - } - - if c.skipArchDependent { - switch xtyp.Kind() { - case types.Int, types.Uint, types.Uintptr: - return - } - } - - c.warn(xcast, xsize*8, ysize*8, xtyp.String()) -} - -func (c *truncateCmpChecker) warn(cause ast.Expr, xsize, ysize int64, suggest string) { - c.ctx.Warn(cause, "truncation in comparison %d->%d bit; cast the other operand to %s instead", xsize, ysize, suggest) -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/typeAssertChain_checker.go b/vendor/github.com/go-critic/go-critic/checkers/typeAssertChain_checker.go deleted file mode 100644 index e0d20fd4c..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/typeAssertChain_checker.go +++ /dev/null @@ -1,133 +0,0 @@ -package checkers - -import ( - "go/ast" - "go/token" - - "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/checkers/internal/lintutil" - "github.com/go-critic/go-critic/linter" - - "github.com/go-toolsmith/astcast" - "github.com/go-toolsmith/astequal" - "github.com/go-toolsmith/astp" -) - -func init() { - var info linter.CheckerInfo - info.Name = "typeAssertChain" - info.Tags = []string{linter.StyleTag, linter.ExperimentalTag} - info.Summary = "Detects repeated type assertions and suggests to replace them with type switch statement" - info.Before = ` -if x, ok := v.(T1); ok { - // Code A, uses x. -} else if x, ok := v.(T2); ok { - // Code B, uses x. -} else if x, ok := v.(T3); ok { - // Code C, uses x. -}` - info.After = ` -switch x := v.(T1) { -case cond1: - // Code A, uses x. -case cond2: - // Code B, uses x. -default: - // Code C, uses x. -}` - - collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { - return astwalk.WalkerForStmt(&typeAssertChainChecker{ctx: ctx}), nil - }) -} - -type typeAssertChainChecker struct { - astwalk.WalkHandler - ctx *linter.CheckerContext - - cause *ast.IfStmt - visited map[*ast.IfStmt]bool - typeSet lintutil.AstSet -} - -func (c *typeAssertChainChecker) EnterFunc(fn *ast.FuncDecl) bool { - if fn.Body == nil { - return false - } - c.visited = make(map[*ast.IfStmt]bool) - return true -} - -func (c *typeAssertChainChecker) VisitStmt(stmt ast.Stmt) { - ifstmt, ok := stmt.(*ast.IfStmt) - if !ok || c.visited[ifstmt] || ifstmt.Init == nil { - return - } - assertion := c.getTypeAssert(ifstmt) - if assertion == nil { - return - } - c.cause = ifstmt - c.checkIfStmt(ifstmt, assertion) -} - -func (c *typeAssertChainChecker) getTypeAssert(ifstmt *ast.IfStmt) *ast.TypeAssertExpr { - assign := astcast.ToAssignStmt(ifstmt.Init) - if len(assign.Lhs) != 2 || len(assign.Rhs) != 1 { - return nil - } - if !astp.IsIdent(assign.Lhs[0]) || assign.Tok != token.DEFINE { - return nil - } - if !astequal.Expr(assign.Lhs[1], ifstmt.Cond) { - return nil - } - - assertion, ok := assign.Rhs[0].(*ast.TypeAssertExpr) - if !ok { - return nil - } - return assertion -} - -func (c *typeAssertChainChecker) checkIfStmt(stmt *ast.IfStmt, assertion *ast.TypeAssertExpr) { - if c.countTypeAssertions(stmt, assertion) >= 2 { - c.warn() - } -} - -func (c *typeAssertChainChecker) countTypeAssertions(stmt *ast.IfStmt, assertion *ast.TypeAssertExpr) int { - c.typeSet.Clear() - - count := 1 - x := assertion.X - c.typeSet.Insert(assertion.Type) - for { - e, ok := stmt.Else.(*ast.IfStmt) - if !ok { - return count - } - assertion = c.getTypeAssert(e) - if assertion == nil { - return count - } - if !c.typeSet.Insert(assertion.Type) { - // Asserted type is duplicated. - // Type switch does not permit duplicate cases, - // so give up. - return 0 - } - if !astequal.Expr(x, assertion.X) { - // Mixed type asserting chain. - // Can't be easily translated to a type switch. - return 0 - } - stmt = e - count++ - c.visited[e] = true - } -} - -func (c *typeAssertChainChecker) warn() { - c.ctx.Warn(c.cause, "rewrite if-else to type switch statement") -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/typeDefFirst_checker.go b/vendor/github.com/go-critic/go-critic/checkers/typeDefFirst_checker.go deleted file mode 100644 index 11381c401..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/typeDefFirst_checker.go +++ /dev/null @@ -1,92 +0,0 @@ -package checkers - -import ( - "go/ast" - "go/token" - - "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/linter" -) - -func init() { - var info linter.CheckerInfo - info.Name = "typeDefFirst" - info.Tags = []string{linter.StyleTag, linter.ExperimentalTag} - info.Summary = "Detects method declarations preceding the type definition itself" - info.Before = ` -func (r rec) Method() {} -type rec struct{} -` - info.After = ` -type rec struct{} -func (r rec) Method() {} -` - collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { - return &typeDefFirstChecker{ - ctx: ctx, - }, nil - }) -} - -type typeDefFirstChecker struct { - astwalk.WalkHandler - ctx *linter.CheckerContext - trackedTypes map[string]bool -} - -func (c *typeDefFirstChecker) WalkFile(f *ast.File) { - if len(f.Decls) == 0 { - return - } - - c.trackedTypes = make(map[string]bool) - for _, decl := range f.Decls { - c.walkDecl(decl) - } -} - -func (c *typeDefFirstChecker) walkDecl(decl ast.Decl) { - switch decl := decl.(type) { - case *ast.FuncDecl: - if decl.Recv == nil { - return - } - receiver := decl.Recv.List[0] - typeName := c.receiverType(receiver.Type) - c.trackedTypes[typeName] = true - - case *ast.GenDecl: - if decl.Tok != token.TYPE { - return - } - for _, spec := range decl.Specs { - spec, ok := spec.(*ast.TypeSpec) - if !ok { - return - } - typeName := spec.Name.Name - if val, ok := c.trackedTypes[typeName]; ok && val { - c.warn(decl, typeName) - } - } - } -} - -func (c *typeDefFirstChecker) receiverType(e ast.Expr) string { - switch e := e.(type) { - case *ast.StarExpr: - return c.receiverType(e.X) - case *ast.Ident: - return e.Name - case *ast.IndexExpr: - return c.receiverType(e.X) - case *ast.IndexListExpr: - return c.receiverType(e.X) - default: - panic("unreachable") - } -} - -func (c *typeDefFirstChecker) warn(cause ast.Node, typeName string) { - c.ctx.Warn(cause, "definition of type '%s' should appear before its methods", typeName) -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/typeSwitchVar_checker.go b/vendor/github.com/go-critic/go-critic/checkers/typeSwitchVar_checker.go deleted file mode 100644 index 4b27b1792..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/typeSwitchVar_checker.go +++ /dev/null @@ -1,98 +0,0 @@ -package checkers - -import ( - "go/ast" - - "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/checkers/internal/lintutil" - "github.com/go-critic/go-critic/linter" - - "github.com/go-toolsmith/astequal" - "github.com/go-toolsmith/astp" -) - -func init() { - var info linter.CheckerInfo - info.Name = "typeSwitchVar" - info.Tags = []string{linter.StyleTag} - info.Summary = "Detects type switches that can benefit from type guard clause with variable" - info.Before = ` -switch v.(type) { -case int: - return v.(int) -case point: - return v.(point).x + v.(point).y -default: - return 0 -}` - info.After = ` -switch v := v.(type) { -case int: - return v -case point: - return v.x + v.y -default: - return 0 -}` - - collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { - return astwalk.WalkerForStmt(&typeSwitchVarChecker{ctx: ctx}), nil - }) -} - -type typeSwitchVarChecker struct { - astwalk.WalkHandler - ctx *linter.CheckerContext - count int -} - -func (c *typeSwitchVarChecker) VisitStmt(stmt ast.Stmt) { - if stmt, ok := stmt.(*ast.TypeSwitchStmt); ok { - c.count = 0 - c.checkTypeSwitch(stmt) - } -} - -func (c *typeSwitchVarChecker) checkTypeSwitch(root *ast.TypeSwitchStmt) { - if astp.IsAssignStmt(root.Assign) { - return // Already with type guard - } - // Must be a *ast.ExprStmt then. - expr := root.Assign.(*ast.ExprStmt).X.(*ast.TypeAssertExpr).X - object := c.ctx.TypesInfo.ObjectOf(identOf(expr)) - if object == nil { - return // Give up: can't handle shadowing without object - } - - for _, clause := range root.Body.List { - clause := clause.(*ast.CaseClause) - // Multiple types in a list mean that assert.X will have - // a type of interface{} inside clause body. - // We are looking for precise type case. - if len(clause.List) != 1 { - continue - } - // Create artificial node just for matching. - assert1 := ast.TypeAssertExpr{X: expr, Type: clause.List[0]} - for _, stmt := range clause.Body { - assert2 := lintutil.FindNode(stmt, nil, func(x ast.Node) bool { - return astequal.Node(&assert1, x) - }) - if object == c.ctx.TypesInfo.ObjectOf(identOf(assert2)) { - c.count++ - break - } - } - } - if c.count > 0 { - c.warn(root) - } -} - -func (c *typeSwitchVarChecker) warn(n ast.Node) { - msg := "case" - if c.count > 1 { - msg = "cases" - } - c.ctx.Warn(n, "%d "+msg+" can benefit from type switch with assignment", c.count) -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/typeUnparen_checker.go b/vendor/github.com/go-critic/go-critic/checkers/typeUnparen_checker.go deleted file mode 100644 index e2e225ebf..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/typeUnparen_checker.go +++ /dev/null @@ -1,96 +0,0 @@ -package checkers - -import ( - "go/ast" - - "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/linter" - - "github.com/go-toolsmith/astcopy" - "github.com/go-toolsmith/astequal" -) - -func init() { - var info linter.CheckerInfo - info.Name = "typeUnparen" - info.Tags = []string{linter.StyleTag, linter.OpinionatedTag} - info.Summary = "Detects unneeded parenthesis inside type expressions and suggests to remove them" - info.Before = `type foo [](func([](func())))` - info.After = `type foo []func([]func())` - - collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { - return astwalk.WalkerForTypeExpr(&typeUnparenChecker{ctx: ctx}, ctx.TypesInfo), nil - }) -} - -type typeUnparenChecker struct { - astwalk.WalkHandler - ctx *linter.CheckerContext -} - -func (c *typeUnparenChecker) VisitTypeExpr(e ast.Expr) { - switch e := e.(type) { - case *ast.ParenExpr: - switch e.X.(type) { - case *ast.StructType: - c.ctx.Warn(e, "could simplify (struct{...}) to struct{...}") - case *ast.InterfaceType: - c.ctx.Warn(e, "could simplify (interface{...}) to interface{...}") - default: - c.checkType(e) - } - case *ast.StructType, *ast.InterfaceType: - // Only nested fields are to be reported. - default: - c.checkType(e) - } -} - -func (c *typeUnparenChecker) checkType(e ast.Expr) { - noParens := c.removeRedundantParens(astcopy.Expr(e)) - if !astequal.Expr(e, noParens) { - c.warn(e, noParens) - } - c.SkipChilds = true -} - -func (c *typeUnparenChecker) removeRedundantParens(e ast.Expr) ast.Expr { - switch e := e.(type) { - case *ast.ParenExpr: - return c.removeRedundantParens(e.X) - case *ast.ArrayType: - e.Elt = c.removeRedundantParens(e.Elt) - case *ast.StarExpr: - e.X = c.removeRedundantParens(e.X) - case *ast.TypeAssertExpr: - e.Type = c.removeRedundantParens(e.Type) - case *ast.FuncType: - for _, field := range e.Params.List { - field.Type = c.removeRedundantParens(field.Type) - } - if e.Results != nil { - for _, field := range e.Results.List { - field.Type = c.removeRedundantParens(field.Type) - } - } - case *ast.MapType: - e.Key = c.removeRedundantParens(e.Key) - e.Value = c.removeRedundantParens(e.Value) - case *ast.ChanType: - if valueWithParens, ok := e.Value.(*ast.ParenExpr); ok { - if nestedChan, ok := valueWithParens.X.(*ast.ChanType); ok { - const anyDir = ast.SEND | ast.RECV - if nestedChan.Dir != anyDir || e.Dir != anyDir { - valueWithParens.X = c.removeRedundantParens(valueWithParens.X) - return e - } - } - } - e.Value = c.removeRedundantParens(e.Value) - } - return e -} - -func (c *typeUnparenChecker) warn(cause, noParens ast.Expr) { - c.ctx.Warn(cause, "could simplify %s to %s", cause, noParens) -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/underef_checker.go b/vendor/github.com/go-critic/go-critic/checkers/underef_checker.go deleted file mode 100644 index 0ce2c89ba..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/underef_checker.go +++ /dev/null @@ -1,128 +0,0 @@ -package checkers - -import ( - "go/ast" - "go/types" - - "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/linter" - - "github.com/go-toolsmith/astcast" - "github.com/go-toolsmith/astp" -) - -func init() { - var info linter.CheckerInfo - info.Name = "underef" - info.Tags = []string{linter.StyleTag} - info.Params = linter.CheckerParams{ - "skipRecvDeref": { - Value: true, - Usage: "whether to skip (*x).method() calls where x is a pointer receiver", - }, - } - info.Summary = "Detects dereference expressions that can be omitted" - info.Before = ` -(*k).field = 5 -v := (*a)[5] // only if a is array` - info.After = ` -k.field = 5 -v := a[5]` - - collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { - c := &underefChecker{ctx: ctx} - c.skipRecvDeref = info.Params.Bool("skipRecvDeref") - return astwalk.WalkerForExpr(c), nil - }) -} - -type underefChecker struct { - astwalk.WalkHandler - ctx *linter.CheckerContext - - skipRecvDeref bool -} - -func (c *underefChecker) VisitExpr(expr ast.Expr) { - switch n := expr.(type) { - case *ast.SelectorExpr: - expr := astcast.ToParenExpr(n.X) - if c.skipRecvDeref && c.isPtrRecvMethodCall(n.Sel) { - return - } - - if expr, ok := expr.X.(*ast.StarExpr); ok { - if c.checkStarExpr(expr) { - c.warnSelect(n) - } - } - case *ast.IndexExpr: - expr := astcast.ToParenExpr(n.X) - if expr, ok := expr.X.(*ast.StarExpr); ok { - if !c.checkStarExpr(expr) { - return - } - if c.checkArray(expr) { - c.warnArray(n) - } - } - } -} - -func (c *underefChecker) isPtrRecvMethodCall(fn *ast.Ident) bool { - typ, ok := c.ctx.TypeOf(fn).(*types.Signature) - if ok && typ != nil && typ.Recv() != nil { - _, ok := typ.Recv().Type().(*types.Pointer) - return ok - } - return false -} - -func (c *underefChecker) underef(x *ast.ParenExpr) ast.Expr { - // If there is only 1 deref, can remove parenthesis, - // otherwise can remove StarExpr only. - dereferenced := x.X.(*ast.StarExpr).X - if astp.IsStarExpr(dereferenced) { - return &ast.ParenExpr{X: dereferenced} - } - return dereferenced -} - -func (c *underefChecker) warnSelect(expr *ast.SelectorExpr) { - // TODO: add () to function output. - c.ctx.Warn(expr, "could simplify %s to %s.%s", - expr, - c.underef(expr.X.(*ast.ParenExpr)), - expr.Sel.Name) -} - -func (c *underefChecker) warnArray(expr *ast.IndexExpr) { - c.ctx.Warn(expr, "could simplify %s to %s[%s]", - expr, - c.underef(expr.X.(*ast.ParenExpr)), - expr.Index) -} - -// checkStarExpr checks if ast.StarExpr could be simplified. -func (c *underefChecker) checkStarExpr(expr *ast.StarExpr) bool { - typ, ok := c.ctx.TypeOf(expr.X).Underlying().(*types.Pointer) - if !ok { - return false - } - - switch typ.Elem().Underlying().(type) { - case *types.Pointer, *types.Interface: - return false - default: - return true - } -} - -func (c *underefChecker) checkArray(expr *ast.StarExpr) bool { - typ, ok := c.ctx.TypeOf(expr.X).(*types.Pointer) - if !ok { - return false - } - _, ok = typ.Elem().(*types.Array) - return ok -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/unlabelStmt_checker.go b/vendor/github.com/go-critic/go-critic/checkers/unlabelStmt_checker.go deleted file mode 100644 index d0e83f3c2..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/unlabelStmt_checker.go +++ /dev/null @@ -1,181 +0,0 @@ -package checkers - -import ( - "go/ast" - "go/token" - - "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/checkers/internal/lintutil" - "github.com/go-critic/go-critic/linter" -) - -func init() { - var info linter.CheckerInfo - info.Name = "unlabelStmt" - info.Tags = []string{linter.StyleTag, linter.ExperimentalTag} - info.Summary = "Detects redundant statement labels" - info.Before = ` -derp: -for x := range xs { - if x == 0 { - break derp - } -}` - info.After = ` -for x := range xs { - if x == 0 { - break - } -}` - - collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { - return astwalk.WalkerForStmt(&unlabelStmtChecker{ctx: ctx}), nil - }) -} - -type unlabelStmtChecker struct { - astwalk.WalkHandler - ctx *linter.CheckerContext -} - -func (c *unlabelStmtChecker) EnterFunc(fn *ast.FuncDecl) bool { - if fn.Body == nil { - return false - } - // TODO(quasilyte): should not do additional traversal here. - // For now, skip all functions that contain goto statement. - return !lintutil.ContainsNode(fn.Body, func(n ast.Node) bool { - br, ok := n.(*ast.BranchStmt) - return ok && br.Tok == token.GOTO - }) -} - -func (c *unlabelStmtChecker) VisitStmt(stmt ast.Stmt) { - labeled, ok := stmt.(*ast.LabeledStmt) - if !ok || !c.canBreakFrom(labeled.Stmt) { - return - } - - // We have a labeled statement from that have labeled continue/break. - // This is an invariant, since unused label is a compile-time error - // and we're currently skipping functions containing goto. - // - // Also note that Go labels are function-scoped and there - // can be no re-definitions. This means that we don't - // need to care about label shadowing or things like that. - // - // The task is to find cases where labeled branch (continue/break) - // is redundant and can be re-written, decreasing the label usages - // and potentially leading to its redundancy, - // or finding the redundant labels right away. - - name := labeled.Label.Name - - // Simplest case that can prove that label is redundant. - // - // If labeled branch is somewhere inside the statement block itself - // and none of the nested break'able statements refer to that label, - // the label can be removed. - matchUsage := func(n ast.Node) bool { - return c.canBreakFrom(n) && c.usesLabel(c.blockStmtOf(n), name) - } - if !lintutil.ContainsNode(c.blockStmtOf(labeled.Stmt), matchUsage) { - c.warnRedundant(labeled) - return - } - - // Only for loops: if last stmt in list is a loop - // that contains labeled "continue" to the outer loop label, - // it can be refactored to use "break" instead. - // Exceptions: select statements with a labeled "continue" are ignored. - if c.isLoop(labeled.Stmt) { - body := c.blockStmtOf(labeled.Stmt) - if len(body.List) == 0 { - return - } - last := body.List[len(body.List)-1] - if !c.isLoop(last) { - return - } - br := lintutil.FindNode(c.blockStmtOf(last), - func(n ast.Node) bool { - switch n.(type) { - case *ast.SelectStmt: - return false - default: - return true - } - }, - func(n ast.Node) bool { - br, ok := n.(*ast.BranchStmt) - return ok && br.Label != nil && - br.Label.Name == name && br.Tok == token.CONTINUE - }) - - if br != nil { - c.warnLabeledContinue(br, name) - } - } -} - -// isLoop reports whether n is a loop of some kind. -// In other words, it tells whether n body can contain "continue" -// associated with n. -func (c *unlabelStmtChecker) isLoop(n ast.Node) bool { - switch n.(type) { - case *ast.ForStmt, *ast.RangeStmt: - return true - default: - return false - } -} - -// canBreakFrom reports whether it is possible to "break" or "continue" from n body. -func (c *unlabelStmtChecker) canBreakFrom(n ast.Node) bool { - switch n.(type) { - case *ast.RangeStmt, *ast.ForStmt, *ast.SwitchStmt, *ast.TypeSwitchStmt, *ast.SelectStmt: - return true - default: - return false - } -} - -// blockStmtOf returns body of specified node. -// -// TODO(quasilyte): handle other statements and see if it can be useful -// in other checkers. -func (c *unlabelStmtChecker) blockStmtOf(n ast.Node) *ast.BlockStmt { - switch n := n.(type) { - case *ast.RangeStmt: - return n.Body - case *ast.ForStmt: - return n.Body - case *ast.SwitchStmt: - return n.Body - case *ast.TypeSwitchStmt: - return n.Body - case *ast.SelectStmt: - return n.Body - - default: - return nil - } -} - -// usesLabel reports whether n contains a usage of label. -func (c *unlabelStmtChecker) usesLabel(n *ast.BlockStmt, label string) bool { - return lintutil.ContainsNode(n, func(n ast.Node) bool { - branch, ok := n.(*ast.BranchStmt) - return ok && branch.Label != nil && - branch.Label.Name == label && - (branch.Tok == token.CONTINUE || branch.Tok == token.BREAK) - }) -} - -func (c *unlabelStmtChecker) warnRedundant(cause *ast.LabeledStmt) { - c.ctx.Warn(cause, "label %s is redundant", cause.Label) -} - -func (c *unlabelStmtChecker) warnLabeledContinue(cause ast.Node, label string) { - c.ctx.Warn(cause, "change `continue %s` to `break`", label) -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/unlambda_checker.go b/vendor/github.com/go-critic/go-critic/checkers/unlambda_checker.go deleted file mode 100644 index 0401bf5d3..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/unlambda_checker.go +++ /dev/null @@ -1,118 +0,0 @@ -package checkers - -import ( - "go/ast" - "go/token" - "go/types" - - "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/checkers/internal/lintutil" - "github.com/go-critic/go-critic/linter" - - "github.com/go-toolsmith/astcast" - "github.com/go-toolsmith/astequal" - "github.com/go-toolsmith/typep" -) - -func init() { - var info linter.CheckerInfo - info.Name = "unlambda" - info.Tags = []string{linter.StyleTag} - info.Summary = "Detects function literals that can be simplified" - info.Before = `func(x int) int { return fn(x) }` - info.After = `fn` - - collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { - return astwalk.WalkerForExpr(&unlambdaChecker{ctx: ctx}), nil - }) -} - -type unlambdaChecker struct { - astwalk.WalkHandler - ctx *linter.CheckerContext -} - -func (c *unlambdaChecker) VisitExpr(x ast.Expr) { - fn, ok := x.(*ast.FuncLit) - if !ok || len(fn.Body.List) != 1 { - return - } - - ret, ok := fn.Body.List[0].(*ast.ReturnStmt) - if !ok || len(ret.Results) != 1 { - return - } - - result := astcast.ToCallExpr(ret.Results[0]) - callable := qualifiedName(result.Fun) - if callable == "" { - return // Skip tricky cases; only handle simple calls - } - if isBuiltin(callable) { - return // See #762 - } - hasVars := lintutil.ContainsNode(result.Fun, func(n ast.Node) bool { - id, ok := n.(*ast.Ident) - if !ok { - return false - } - obj, ok := c.ctx.TypesInfo.ObjectOf(id).(*types.Var) - if !ok { - return false - } - // Permit only non-pointer struct method values. - return !typep.IsStruct(obj.Type().Underlying()) - }) - if hasVars { - return // See #888 #1007 - } - - fnType := c.ctx.TypeOf(fn) - resultType := c.ctx.TypeOf(result.Fun) - if !types.Identical(fnType, resultType) { - return - } - // Now check that all arguments match the parameters. - n := 0 - for _, params := range fn.Type.Params.List { - if _, ok := params.Type.(*ast.Ellipsis); ok { - if result.Ellipsis == token.NoPos { - return - } - n++ - continue - } - - for _, id := range params.Names { - if !astequal.Expr(id, result.Args[n]) { - return - } - n++ - } - } - - if c.lenArgs(result.Args) == n { - c.warn(fn, callable) - } -} - -func (c *unlambdaChecker) warn(cause ast.Node, suggestion string) { - c.ctx.Warn(cause, "replace `%s` with `%s`", cause, suggestion) -} - -func (c *unlambdaChecker) lenArgs(args []ast.Expr) int { - lenArgs := len(args) - - for _, arg := range args { - callExp, ok := arg.(*ast.CallExpr) - if !ok { - continue - } - - // Don't count function call. only args. - lenArgs-- - lenArgs += c.lenArgs(callExp.Args) - } - - return lenArgs -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/unnamedResult_checker.go b/vendor/github.com/go-critic/go-critic/checkers/unnamedResult_checker.go deleted file mode 100644 index 0d40addf7..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/unnamedResult_checker.go +++ /dev/null @@ -1,103 +0,0 @@ -package checkers - -import ( - "go/ast" - "go/types" - - "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/linter" -) - -func init() { - var info linter.CheckerInfo - info.Name = "unnamedResult" - info.Tags = []string{linter.StyleTag, linter.OpinionatedTag, linter.ExperimentalTag} - info.Params = linter.CheckerParams{ - "checkExported": { - Value: false, - Usage: "whether to check exported functions", - }, - } - info.Summary = "Detects unnamed results that may benefit from names" - info.Before = `func f() (float64, float64)` - info.After = `func f() (x, y float64)` - - collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { - c := &unnamedResultChecker{ctx: ctx} - c.checkExported = info.Params.Bool("checkExported") - return astwalk.WalkerForFuncDecl(c), nil - }) -} - -type unnamedResultChecker struct { - astwalk.WalkHandler - ctx *linter.CheckerContext - - checkExported bool -} - -func (c *unnamedResultChecker) VisitFuncDecl(decl *ast.FuncDecl) { - if c.checkExported && !ast.IsExported(decl.Name.Name) { - return - } - results := decl.Type.Results - switch { - case results == nil: - return // Function has no results - case len(results.List) != 0 && results.List[0].Names != nil: - return // Skip named results - } - - typeName := func(x ast.Expr) string { return c.typeName(c.ctx.TypeOf(x)) } - isError := func(x ast.Expr) bool { return qualifiedName(x) == "error" } - isBool := func(x ast.Expr) bool { return qualifiedName(x) == "bool" } - - // Main difference with case of len=2 is that we permit any - // typ1 as long as second type is either error or bool. - if results.NumFields() == 2 { - typ1, typ2 := results.List[0].Type, results.List[1].Type - name1, name2 := typeName(typ1), typeName(typ2) - cond := (name1 != name2 && name2 != "") || - (!isError(typ1) && isError(typ2)) || - (!isBool(typ1) && isBool(typ2)) - if !cond { - c.warn(decl) - } - return - } - - seen := make(map[string]bool, len(results.List)) - for i := range results.List { - typ := results.List[i].Type - name := typeName(typ) - isLast := i == len(results.List)-1 - - cond := !seen[name] || - (isLast && (isError(typ) || isBool(typ))) - if !cond { - c.warn(decl) - return - } - - seen[name] = true - } -} - -func (c *unnamedResultChecker) typeName(typ types.Type) string { - switch typ := typ.(type) { - case *types.Array: - return c.typeName(typ.Elem()) - case *types.Pointer: - return c.typeName(typ.Elem()) - case *types.Slice: - return c.typeName(typ.Elem()) - case *types.Named: - return typ.Obj().Name() - default: - return "" - } -} - -func (c *unnamedResultChecker) warn(n ast.Node) { - c.ctx.Warn(n, "consider giving a name to these results") -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/unnecessaryBlock_checker.go b/vendor/github.com/go-critic/go-critic/checkers/unnecessaryBlock_checker.go deleted file mode 100644 index b577ff421..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/unnecessaryBlock_checker.go +++ /dev/null @@ -1,78 +0,0 @@ -package checkers - -import ( - "go/ast" - "go/token" - - "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/linter" - - "github.com/go-toolsmith/astp" -) - -func init() { - var info linter.CheckerInfo - info.Name = "unnecessaryBlock" - info.Tags = []string{linter.StyleTag, linter.OpinionatedTag, linter.ExperimentalTag} - info.Summary = "Detects unnecessary braced statement blocks" - info.Before = ` -x := 1 -{ - print(x) -}` - info.After = ` -x := 1 -print(x)` - - collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { - return astwalk.WalkerForStmtList(&unnecessaryBlockChecker{ctx: ctx}), nil - }) -} - -type unnecessaryBlockChecker struct { - astwalk.WalkHandler - ctx *linter.CheckerContext -} - -func (c *unnecessaryBlockChecker) VisitStmtList(x ast.Node, statements []ast.Stmt) { - // Using StmtListVisitor instead of StmtVisitor makes it easier to avoid - // false positives on IfStmt, RangeStmt, ForStmt and alike. - // We only inspect BlockStmt inside statement lists, so this method is not - // called for IfStmt itself, for example. - - if (astp.IsCaseClause(x) || astp.IsCommClause(x)) && len(statements) == 1 { - if _, ok := statements[0].(*ast.BlockStmt); ok { - c.ctx.Warn(statements[0], "case statement doesn't require a block statement") - return - } - } - - for _, stmt := range statements { - stmt, ok := stmt.(*ast.BlockStmt) - if ok && !c.hasDefinitions(stmt) { - c.warn(stmt) - } - } -} - -func (c *unnecessaryBlockChecker) hasDefinitions(stmt *ast.BlockStmt) bool { - for _, bs := range stmt.List { - switch stmt := bs.(type) { - case *ast.AssignStmt: - if stmt.Tok == token.DEFINE { - return true - } - case *ast.DeclStmt: - decl := stmt.Decl.(*ast.GenDecl) - if len(decl.Specs) != 0 { - return true - } - } - } - - return false -} - -func (c *unnecessaryBlockChecker) warn(expr ast.Stmt) { - c.ctx.Warn(expr, "block doesn't have definitions, can be simply deleted") -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/unnecessaryDefer_checker.go b/vendor/github.com/go-critic/go-critic/checkers/unnecessaryDefer_checker.go deleted file mode 100644 index 4c1ed41f6..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/unnecessaryDefer_checker.go +++ /dev/null @@ -1,112 +0,0 @@ -package checkers - -import ( - "go/ast" - - "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/linter" - - "github.com/go-toolsmith/astfmt" -) - -func init() { - var info linter.CheckerInfo - info.Name = "unnecessaryDefer" - info.Tags = []string{linter.DiagnosticTag, linter.ExperimentalTag} - info.Summary = "Detects redundantly deferred calls" - info.Before = ` -func() { - defer os.Remove(filename) -}` - info.After = ` -func() { - os.Remove(filename) -}` - - collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { - return astwalk.WalkerForFuncDecl(&unnecessaryDeferChecker{ctx: ctx}), nil - }) -} - -type unnecessaryDeferChecker struct { - astwalk.WalkHandler - ctx *linter.CheckerContext - isFunc bool -} - -// Visit implements the ast.Visitor. This visitor keeps track of the block -// statement belongs to a function or any other block. If the block is not a -// function and ends with a defer statement that should be OK since it's -// deferring the outer function. -func (c *unnecessaryDeferChecker) Visit(node ast.Node) ast.Visitor { - switch n := node.(type) { - case *ast.FuncDecl, *ast.FuncLit: - c.isFunc = true - case *ast.BlockStmt: - c.checkDeferBeforeReturn(n) - default: - c.isFunc = false - } - - return c -} - -func (c *unnecessaryDeferChecker) VisitFuncDecl(funcDecl *ast.FuncDecl) { - // We always start as a function (*ast.FuncDecl.Body passed) - c.isFunc = true - - ast.Walk(c, funcDecl.Body) -} - -func (c *unnecessaryDeferChecker) checkDeferBeforeReturn(funcDecl *ast.BlockStmt) { - // Check if we have an explicit return or if it's just the end of the scope. - explicitReturn := false - retIndex := len(funcDecl.List) - for i, stmt := range funcDecl.List { - retStmt, ok := stmt.(*ast.ReturnStmt) - if !ok { - continue - } - explicitReturn = true - if !c.isTrivialReturn(retStmt) { - continue - } - retIndex = i - break - } - if retIndex == 0 { - return - } - - if deferStmt, ok := funcDecl.List[retIndex-1].(*ast.DeferStmt); ok { - // If the block is a function and ending with return or if we have an - // explicit return in any other block we should warn about - // unnecessary defer. - if c.isFunc || explicitReturn { - c.warn(deferStmt) - } - } -} - -func (c *unnecessaryDeferChecker) isTrivialReturn(ret *ast.ReturnStmt) bool { - for _, e := range ret.Results { - if !c.isConstExpr(e) { - return false - } - } - return true -} - -func (c *unnecessaryDeferChecker) isConstExpr(e ast.Expr) bool { - return c.ctx.TypesInfo.Types[e].Value != nil -} - -func (c *unnecessaryDeferChecker) warn(deferStmt *ast.DeferStmt) { - s := astfmt.Sprint(deferStmt) - if fnlit, ok := deferStmt.Call.Fun.(*ast.FuncLit); ok { - // To avoid long and multi-line warning messages, - // collapse the function literals. - s = "defer " + astfmt.Sprint(fnlit.Type) + "{...}(...)" - } - c.ctx.Warn(deferStmt, "%s is placed just before return", s) -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/utils.go b/vendor/github.com/go-critic/go-critic/checkers/utils.go deleted file mode 100644 index 4757bbf5f..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/utils.go +++ /dev/null @@ -1,309 +0,0 @@ -package checkers - -import ( - "go/ast" - "go/types" - "strings" - - "github.com/go-critic/go-critic/linter" -) - -// goStdlib contains `go list std` command output list. -// Used to detect packages that belong to standard Go packages distribution. -var goStdlib = map[string]bool{ - "archive/tar": true, - "archive/zip": true, - "bufio": true, - "bytes": true, - "compress/bzip2": true, - "compress/flate": true, - "compress/gzip": true, - "compress/lzw": true, - "compress/zlib": true, - "container/heap": true, - "container/list": true, - "container/ring": true, - "context": true, - "crypto": true, - "crypto/aes": true, - "crypto/cipher": true, - "crypto/des": true, - "crypto/dsa": true, - "crypto/ecdsa": true, - "crypto/elliptic": true, - "crypto/hmac": true, - "crypto/internal/randutil": true, - "crypto/internal/subtle": true, - "crypto/md5": true, - "crypto/rand": true, - "crypto/rc4": true, - "crypto/rsa": true, - "crypto/sha1": true, - "crypto/sha256": true, - "crypto/sha512": true, - "crypto/subtle": true, - "crypto/tls": true, - "crypto/x509": true, - "crypto/x509/pkix": true, - "database/sql": true, - "database/sql/driver": true, - "debug/dwarf": true, - "debug/elf": true, - "debug/gosym": true, - "debug/macho": true, - "debug/pe": true, - "debug/plan9obj": true, - "encoding": true, - "encoding/ascii85": true, - "encoding/asn1": true, - "encoding/base32": true, - "encoding/base64": true, - "encoding/binary": true, - "encoding/csv": true, - "encoding/gob": true, - "encoding/hex": true, - "encoding/json": true, - "encoding/pem": true, - "encoding/xml": true, - "errors": true, - "expvar": true, - "flag": true, - "fmt": true, - "go/ast": true, - "go/build": true, - "go/constant": true, - "go/doc": true, - "go/format": true, - "go/importer": true, - "go/internal/gccgoimporter": true, - "go/internal/gcimporter": true, - "go/internal/srcimporter": true, - "go/parser": true, - "go/printer": true, - "go/scanner": true, - "go/token": true, - "go/types": true, - "hash": true, - "hash/adler32": true, - "hash/crc32": true, - "hash/crc64": true, - "hash/fnv": true, - "html": true, - "html/template": true, - "image": true, - "image/color": true, - "image/color/palette": true, - "image/draw": true, - "image/gif": true, - "image/internal/imageutil": true, - "image/jpeg": true, - "image/png": true, - "index/suffixarray": true, - "internal/bytealg": true, - "internal/cpu": true, - "internal/nettrace": true, - "internal/poll": true, - "internal/race": true, - "internal/singleflight": true, - "internal/syscall/unix": true, - "internal/syscall/windows": true, - "internal/syscall/windows/registry": true, - "internal/syscall/windows/sysdll": true, - "internal/testenv": true, - "internal/testlog": true, - "internal/trace": true, - "io": true, - "io/ioutil": true, - "log": true, - "log/syslog": true, - "math": true, - "math/big": true, - "math/bits": true, - "math/cmplx": true, - "math/rand": true, - "mime": true, - "mime/multipart": true, - "mime/quotedprintable": true, - "net": true, - "net/http": true, - "net/http/cgi": true, - "net/http/cookiejar": true, - "net/http/fcgi": true, - "net/http/httptest": true, - "net/http/httptrace": true, - "net/http/httputil": true, - "net/http/internal": true, - "net/http/pprof": true, - "net/internal/socktest": true, - "net/mail": true, - "net/rpc": true, - "net/rpc/jsonrpc": true, - "net/smtp": true, - "net/textproto": true, - "net/url": true, - "os": true, - "os/exec": true, - "os/signal": true, - "os/signal/internal/pty": true, - "os/user": true, - "path": true, - "path/filepath": true, - "plugin": true, - "reflect": true, - "regexp": true, - "regexp/syntax": true, - "runtime": true, - "runtime/cgo": true, - "runtime/debug": true, - "runtime/internal/atomic": true, - "runtime/internal/sys": true, - "runtime/pprof": true, - "runtime/pprof/internal/profile": true, - "runtime/race": true, - "runtime/trace": true, - "sort": true, - "strconv": true, - "strings": true, - "sync": true, - "sync/atomic": true, - "syscall": true, - "testing": true, - "testing/internal/testdeps": true, - "testing/iotest": true, - "testing/quick": true, - "text/scanner": true, - "text/tabwriter": true, - "text/template": true, - "text/template/parse": true, - "time": true, - "unicode": true, - "unicode/utf16": true, - "unicode/utf8": true, - "unsafe": true, -} - -var goBuiltins = map[string]bool{ - // Types - "bool": true, - "byte": true, - "complex64": true, - "complex128": true, - "error": true, - "float32": true, - "float64": true, - "int": true, - "int8": true, - "int16": true, - "int32": true, - "int64": true, - "rune": true, - "string": true, - "uint": true, - "uint8": true, - "uint16": true, - "uint32": true, - "uint64": true, - "uintptr": true, - - // Constants - "true": true, - "false": true, - "iota": true, - - // Zero value - "nil": true, - - // Functions - "append": true, - "cap": true, - "close": true, - "complex": true, - "copy": true, - "delete": true, - "imag": true, - "len": true, - "make": true, - "new": true, - "panic": true, - "print": true, - "println": true, - "real": true, - "recover": true, -} - -// isBuiltin reports whether sym belongs to a predefined identifier set. -func isBuiltin(sym string) bool { - return goBuiltins[sym] -} - -// isStdlibPkg reports whether pkg is a package from the Go standard library. -func isStdlibPkg(pkg *types.Package) bool { - return pkg != nil && goStdlib[pkg.Path()] -} - -// isExampleTestFunc reports whether FuncDecl looks like a testable example function. -func isExampleTestFunc(fn *ast.FuncDecl) bool { - return len(fn.Type.Params.List) == 0 && strings.HasPrefix(fn.Name.String(), "Example") -} - -// isUnitTestFunc reports whether FuncDecl declares testing function. -func isUnitTestFunc(ctx *linter.CheckerContext, fn *ast.FuncDecl) bool { - if !strings.HasPrefix(fn.Name.Name, "Test") { - return false - } - typ := ctx.TypesInfo.TypeOf(fn.Name) - if sig, ok := typ.(*types.Signature); ok { - return sig.Results().Len() == 0 && - sig.Params().Len() == 1 && - sig.Params().At(0).Type().String() == "*testing.T" - } - return false -} - -// qualifiedName returns called expr fully-qualified name. -// -// It works for simple identifiers like f => "f" and identifiers -// from other package like pkg.f => "pkg.f". -// -// For all unexpected expressions returns empty string. -func qualifiedName(x ast.Expr) string { - switch x := x.(type) { - case *ast.SelectorExpr: - pkg, ok := x.X.(*ast.Ident) - if !ok { - return "" - } - return pkg.Name + "." + x.Sel.Name - case *ast.Ident: - return x.Name - default: - return "" - } -} - -// identOf returns identifier for x that can be used to obtain associated types.Object. -// Returns nil for expressions that yield temporary results, like `f().field`. -func identOf(x ast.Node) *ast.Ident { - switch x := x.(type) { - case *ast.Ident: - return x - case *ast.SelectorExpr: - return identOf(x.Sel) - case *ast.TypeAssertExpr: - // x.(type) - x may contain ident. - return identOf(x.X) - case *ast.IndexExpr: - // x[i] - x may contain ident. - return identOf(x.X) - case *ast.StarExpr: - // *x - x may contain ident. - return identOf(x.X) - case *ast.SliceExpr: - // x[:] - x may contain ident. - return identOf(x.X) - - default: - // Note that this function is not comprehensive. - return nil - } -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/weakCond_checker.go b/vendor/github.com/go-critic/go-critic/checkers/weakCond_checker.go deleted file mode 100644 index 3d7c9c122..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/weakCond_checker.go +++ /dev/null @@ -1,78 +0,0 @@ -package checkers - -import ( - "go/ast" - "go/token" - - "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/checkers/internal/lintutil" - "github.com/go-critic/go-critic/linter" - - "github.com/go-toolsmith/astcast" - "github.com/go-toolsmith/astequal" - "github.com/go-toolsmith/typep" - "golang.org/x/tools/go/ast/astutil" -) - -func init() { - var info linter.CheckerInfo - info.Name = "weakCond" - info.Tags = []string{linter.DiagnosticTag, linter.ExperimentalTag} - info.Summary = "Detects conditions that are unsafe due to not being exhaustive" - info.Before = `xs != nil && xs[0] != nil` - info.After = `len(xs) != 0 && xs[0] != nil` - - collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { - return astwalk.WalkerForExpr(&weakCondChecker{ctx: ctx}), nil - }) -} - -type weakCondChecker struct { - astwalk.WalkHandler - ctx *linter.CheckerContext -} - -func (c *weakCondChecker) VisitExpr(expr ast.Expr) { - // TODO(Quasilyte): more patterns. - // TODO(Quasilyte): analyze and fix false positives. - - cond := astcast.ToBinaryExpr(expr) - lhs := astcast.ToBinaryExpr(astutil.Unparen(cond.X)) - rhs := astutil.Unparen(cond.Y) - - // Pattern 1. - // `x != nil && usageOf(x[i])` - // Pattern 2. - // `x == nil || usageOf(x[i])` - - // lhs is `x nil` - x := lhs.X - if !typep.IsSlice(c.ctx.TypeOf(x)) { - return - } - if astcast.ToIdent(lhs.Y).Name != "nil" { - return - } - - pat1prefix := cond.Op == token.LAND && lhs.Op == token.NEQ - pat2prefix := cond.Op == token.LOR && lhs.Op == token.EQL - if !pat1prefix && !pat2prefix { - return - } - - if c.isIndexed(rhs, x) { - c.warn(expr, "nil check may not be enough, check for len") - } -} - -// isIndexed reports whether x is indexed inside given expr tree. -func (c *weakCondChecker) isIndexed(tree, x ast.Expr) bool { - return lintutil.ContainsNode(tree, func(n ast.Node) bool { - indexing := astcast.ToIndexExpr(n) - return astequal.Expr(x, indexing.X) - }) -} - -func (c *weakCondChecker) warn(cause ast.Node, suggest string) { - c.ctx.Warn(cause, "suspicious `%s`; %s", cause, suggest) -} diff --git a/vendor/github.com/go-critic/go-critic/checkers/whyNoLint_checker.go b/vendor/github.com/go-critic/go-critic/checkers/whyNoLint_checker.go deleted file mode 100644 index eaa53e5d5..000000000 --- a/vendor/github.com/go-critic/go-critic/checkers/whyNoLint_checker.go +++ /dev/null @@ -1,50 +0,0 @@ -package checkers - -import ( - "go/ast" - "regexp" - "strings" - - "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/linter" -) - -func init() { - var info linter.CheckerInfo - info.Name = "whyNoLint" - info.Tags = []string{linter.StyleTag, linter.ExperimentalTag} - info.Summary = "Ensures that `//nolint` comments include an explanation" - info.Before = `//nolint` - info.After = `//nolint // reason` - - collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { - return astwalk.WalkerForComment(&whyNoLintChecker{ - ctx: ctx, - re: regexp.MustCompile(`^// *nolint(?::[^ ]+)? *(.*)$`), - }), nil - }) -} - -type whyNoLintChecker struct { - astwalk.WalkHandler - - ctx *linter.CheckerContext - re *regexp.Regexp -} - -func (c whyNoLintChecker) VisitComment(cg *ast.CommentGroup) { - if strings.HasPrefix(cg.List[0].Text, "/*") { - return - } - for _, comment := range cg.List { - sl := c.re.FindStringSubmatch(comment.Text) - if len(sl) < 2 { - continue - } - - if s := sl[1]; !strings.HasPrefix(s, "//") || strings.TrimPrefix(s, "//") == "" { - c.ctx.Warn(cg, "include an explanation for nolint directive") - return - } - } -} diff --git a/vendor/github.com/go-critic/go-critic/linter/go_version.go b/vendor/github.com/go-critic/go-critic/linter/go_version.go deleted file mode 100644 index b5ef2f75f..000000000 --- a/vendor/github.com/go-critic/go-critic/linter/go_version.go +++ /dev/null @@ -1,52 +0,0 @@ -package linter - -import ( - "fmt" - "strconv" - "strings" -) - -type GoVersion struct { - Major int - Minor int -} - -// GreaterOrEqual performs $v >= $other operation. -// -// In other words, it reports whether $v version constraint can use -// a feature from the $other Go version. -// -// As a special case, Major=0 covers all versions. -func (v GoVersion) GreaterOrEqual(other GoVersion) bool { - switch { - case v.Major == 0: - return true - case v.Major == other.Major: - return v.Minor >= other.Minor - default: - return v.Major >= other.Major - } -} - -func ParseGoVersion(version string) (GoVersion, error) { - var result GoVersion - version = strings.TrimPrefix(version, "go") - if version == "" { - return result, nil - } - parts := strings.Split(version, ".") - if len(parts) != 2 { - return result, fmt.Errorf("invalid Go version format: %s", version) - } - major, err := strconv.Atoi(parts[0]) - if err != nil { - return result, fmt.Errorf("invalid major version part: %s: %w", parts[0], err) - } - minor, err := strconv.Atoi(parts[1]) - if err != nil { - return result, fmt.Errorf("invalid minor version part: %s: %w", parts[1], err) - } - result.Major = major - result.Minor = minor - return result, nil -} diff --git a/vendor/github.com/go-critic/go-critic/linter/helpers.go b/vendor/github.com/go-critic/go-critic/linter/helpers.go deleted file mode 100644 index 0a3fc0292..000000000 --- a/vendor/github.com/go-critic/go-critic/linter/helpers.go +++ /dev/null @@ -1,136 +0,0 @@ -package linter - -import ( - "fmt" - "regexp" - "sort" - "strings" - - "github.com/go-toolsmith/astfmt" -) - -type checkerProto struct { - info *CheckerInfo - constructor func(*Context) (*Checker, error) -} - -// prototypes is a set of registered checkers that are not yet instantiated. -// Registration should be done with AddChecker function. -// Initialized checkers can be obtained with NewChecker function. -var prototypes = make(map[string]checkerProto) - -func getCheckersInfo() []*CheckerInfo { - infoList := make([]*CheckerInfo, 0, len(prototypes)) - for _, proto := range prototypes { - infoCopy := *proto.info - infoList = append(infoList, &infoCopy) - } - sort.Slice(infoList, func(i, j int) bool { - return infoList[i].Name < infoList[j].Name - }) - return infoList -} - -func addChecker(info *CheckerInfo, constructor func(*CheckerContext) (FileWalker, error)) { - if _, ok := prototypes[info.Name]; ok { - panic(fmt.Sprintf("checker with name %q already registered", info.Name)) - } - - // Validate param value type. - for pname, param := range info.Params { - switch param.Value.(type) { - case string, int, bool: - // OK. - default: - panic(fmt.Sprintf("unsupported %q param type value: %T", - pname, param.Value)) - } - } - - trimDocumentation := func(info *CheckerInfo) { - fields := []*string{ - &info.Summary, - &info.Details, - &info.Before, - &info.After, - &info.Note, - } - for _, f := range fields { - *f = strings.TrimSpace(*f) - } - } - - trimDocumentation(info) - - if err := validateCheckerInfo(info); err != nil { - panic(err) - } - - proto := checkerProto{ - info: info, - constructor: func(ctx *Context) (*Checker, error) { - var c Checker - c.Info = info - c.ctx = CheckerContext{ - Context: ctx, - printer: astfmt.NewPrinter(ctx.FileSet), - } - var err error - c.fileWalker, err = constructor(&c.ctx) - return &c, err - }, - } - - prototypes[info.Name] = proto -} - -func newChecker(ctx *Context, info *CheckerInfo) (*Checker, error) { - proto, ok := prototypes[info.Name] - if !ok { - panic(fmt.Sprintf("checker with name %q not registered", info.Name)) - } - return proto.constructor(ctx) -} - -func validateCheckerInfo(info *CheckerInfo) error { - steps := []func(*CheckerInfo) error{ - validateCheckerName, - validateCheckerDocumentation, - validateCheckerTags, - } - - for _, step := range steps { - if err := step(info); err != nil { - return fmt.Errorf("%q validation error: %v", info.Name, err) - } - } - return nil -} - -var validIdentRE = regexp.MustCompile(`^\w+$`) - -func validateCheckerName(info *CheckerInfo) error { - if !validIdentRE.MatchString(info.Name) { - return fmt.Errorf("checker name contains illegal chars") - } - return nil -} - -func validateCheckerDocumentation(info *CheckerInfo) error { - // TODO(quasilyte): validate documentation. - return nil -} - -func validateCheckerTags(info *CheckerInfo) error { - tagSet := make(map[string]bool) - for _, tag := range info.Tags { - if tagSet[tag] { - return fmt.Errorf("duplicated tag %q", tag) - } - if !validIdentRE.MatchString(tag) { - return fmt.Errorf("checker tag %q contains illegal chars", tag) - } - tagSet[tag] = true - } - return nil -} diff --git a/vendor/github.com/go-critic/go-critic/linter/linter.go b/vendor/github.com/go-critic/go-critic/linter/linter.go deleted file mode 100644 index d4bc17536..000000000 --- a/vendor/github.com/go-critic/go-critic/linter/linter.go +++ /dev/null @@ -1,401 +0,0 @@ -package linter - -import ( - "go/ast" - "go/token" - "go/types" - "strconv" - "strings" - - "github.com/go-toolsmith/astfmt" -) - -const ( - DiagnosticTag = "diagnostic" - ExperimentalTag = "experimental" - OpinionatedTag = "opinionated" - PerformanceTag = "performance" - SecurityTag = "security" - StyleTag = "style" -) - -// UnknownType is a special sentinel value that is returned from the CheckerContext.TypeOf -// method instead of the nil type. -var UnknownType types.Type = types.Typ[types.Invalid] - -// FileWalker is an interface every checker should implement. -// -// The WalkFile method is executed for every Go file inside the -// package that is being checked. -type FileWalker interface { - WalkFile(*ast.File) -} - -// CheckerCollection provides additional information for a group of checkers. -type CheckerCollection struct { - // URL is a link for a main source of information on the collection. - URL string -} - -// AddChecker registers a new checker into a checkers pool. -// Constructor is used to create a new checker instance. -// Checker name (defined in CheckerInfo.Name) must be unique. -// -// CheckerInfo.Collection is automatically set to the coll (the receiver). -// -// If checker is never needed, for example if it is disabled, -// constructor will not be called. -func (coll *CheckerCollection) AddChecker(info *CheckerInfo, constructor func(*CheckerContext) (FileWalker, error)) { - if coll == nil { - panic("adding checker to a nil collection") - } - info.Collection = coll - addChecker(info, constructor) -} - -// CheckerParam describes a single checker customizable parameter. -type CheckerParam struct { - // Value holds parameter bound value. - // It might be overwritten by the integrating linter. - // - // Permitted types include: - // - int - // - bool - // - string - Value interface{} - - // Usage gives an overview about what parameter does. - Usage string -} - -// CheckerParams holds all checker-specific parameters. -// -// Provides convenient access to the loosely typed underlying map. -type CheckerParams map[string]*CheckerParam - -// Int lookups pname key in underlying map and type-asserts it to int. -func (params CheckerParams) Int(pname string) int { return params[pname].Value.(int) } - -// Bool lookups pname key in underlying map and type-asserts it to bool. -func (params CheckerParams) Bool(pname string) bool { return params[pname].Value.(bool) } - -// String lookups pname key in underlying map and type-asserts it to string. -func (params CheckerParams) String(pname string) string { return params[pname].Value.(string) } - -// CheckerInfo holds checker metadata and structured documentation. -type CheckerInfo struct { - // Name is a checker name. - Name string - - // Tags is a list of labels that can be used to enable or disable checker. - // Common tags are "experimental" and "performance". - Tags []string - - // Params declares checker-specific parameters. Optional. - Params CheckerParams - - // Summary is a short one sentence description. - // Should not end with a period. - Summary string - - // Details extends summary with additional info. Optional. - Details string - - // Before is a code snippet of code that will violate rule. - Before string - - // After is a code snippet of fixed code that complies to the rule. - After string - - // Note is an optional caution message or advice. - Note string - - // EmbeddedRuleguard tells whether this checker is auto-generated - // from the embedded ruleguard rules. - EmbeddedRuleguard bool - - // Collection establishes a checker-to-collection relationship. - Collection *CheckerCollection -} - -// GetCheckersInfo returns a checkers info list for all registered checkers. -// The slice is sorted by a checker name. -// -// Info objects can be used to instantiate checkers with NewChecker function. -func GetCheckersInfo() []*CheckerInfo { - return getCheckersInfo() -} - -// HasTag reports whether checker described by the info has specified tag. -func (info *CheckerInfo) HasTag(tag string) bool { - for i := range info.Tags { - if info.Tags[i] == tag { - return true - } - } - return false -} - -// Checker is an implementation of a check that is described by the associated info. -type Checker struct { - // Info is an info object that was used to instantiate this checker. - Info *CheckerInfo - - ctx CheckerContext - - fileWalker FileWalker -} - -// NewChecker returns initialized checker identified by an info. -// info must be non-nil. -// Returns an error if info describes a checker that was not properly registered, -// or if checker fails to initialize. -func NewChecker(ctx *Context, info *CheckerInfo) (*Checker, error) { - return newChecker(ctx, info) -} - -// Check runs rule checker over file f. -func (c *Checker) Check(f *ast.File) []Warning { - c.ctx.warnings = c.ctx.warnings[:0] - c.fileWalker.WalkFile(f) - return c.ctx.warnings -} - -// QuickFix is our analysis.TextEdit; we're using it here to avoid -// direct analysis package dependency for now. -type QuickFix struct { - From token.Pos - To token.Pos - Replacement []byte -} - -// Warning represents issue that is found by checker. -type Warning struct { - Pos token.Pos - - // Text is warning message without source location info. - Text string - - // Suggestion is a quick fix for a given problem. - // QuickFix is analysis.TextEdit and can be used to - // construct an analysis.SuggestedFix object. - // - // For convenience, there is Warning.HasQuickFix() method - // that reports whether Suggestion has something meaningful. - Suggestion QuickFix -} - -// HasQuickFix reports whether this warning has a suggested fix. -func (warn Warning) HasQuickFix() bool { - return warn.Suggestion.Replacement != nil -} - -// Context is a readonly state shared among every checker. -type Context struct { - // TypesInfo carries parsed packages types information. - TypesInfo *types.Info - - // SizesInfo carries alignment and type size information. - // Arch-dependent. - SizesInfo types.Sizes - - // GoVersion is a target Go version. - GoVersion GoVersion - - // FileSet is a file set that was used during the program loading. - FileSet *token.FileSet - - // Pkg describes package that is being checked. - Pkg *types.Package - - // Filename is a currently checked file name. - Filename string - - // Require records what optional resources are required - // by the checkers set that use this context. - // - // Every require fields makes associated context field - // to be properly initialized. - // For example, Context.require.PkgObjects => Context.PkgObjects. - Require struct { - PkgObjects bool - PkgRenames bool - } - - // PkgObjects stores all imported packages and their local names. - PkgObjects map[*types.PkgName]string - - // PkgRenames maps package path to its local renaming. - // Contains no entries for packages that were imported without - // explicit local names. - PkgRenames map[string]string -} - -// NewContext returns new shared context to be used by every checker. -// -// All data carried by the context is readonly for checkers, -// but can be modified by the integrating application. -func NewContext(fset *token.FileSet, sizes types.Sizes) *Context { - return &Context{ - FileSet: fset, - SizesInfo: sizes, - TypesInfo: &types.Info{}, - } -} - -// SetGoVersion adjust the target Go language version. -// -// The format is like "1.5", "1.8", etc. -// It's permitted to have "go" prefix (e.g. "go1.5"). -// -// Empty string (the default) means that we make no -// Go version assumptions and (like gocritic does) behave -// like all features are available. To make gocritic -// more conservative, the upper Go version level should be adjusted. -func (c *Context) SetGoVersion(version string) { - v, err := ParseGoVersion(version) - if err != nil { - panic(err) - } - c.GoVersion = v -} - -// SetPackageInfo sets package-related metadata. -// -// Must be called for every package being checked. -func (c *Context) SetPackageInfo(info *types.Info, pkg *types.Package) { - if info != nil { - // We do this kind of assignment to avoid - // changing c.typesInfo field address after - // every re-assignment. - *c.TypesInfo = *info - } - c.Pkg = pkg -} - -// SetFileInfo sets file-related metadata. -// -// Must be called for every source code file being checked. -func (c *Context) SetFileInfo(name string, f *ast.File) { - c.Filename = name - if c.Require.PkgObjects { - resolvePkgObjects(c, f) - } - if c.Require.PkgRenames { - resolvePkgRenames(c, f) - } -} - -// CheckerContext is checker-local context copy. -// Fields that are not from Context itself are writeable. -type CheckerContext struct { - *Context - - // printer used to format warning text. - printer *astfmt.Printer - - warnings []Warning -} - -// Warn adds a Warning to checker output. -func (ctx *CheckerContext) Warn(node ast.Node, format string, args ...interface{}) { - ctx.WarnWithPos(node.Pos(), format, args...) -} - -// WarnFixable emits a warning with a fix suggestion provided by the caller. -func (ctx *CheckerContext) WarnFixable(node ast.Node, fix QuickFix, format string, args ...interface{}) { - ctx.WarnFixableWithPos(node.Pos(), fix, format, args...) -} - -// WarnWithPos adds a Warning to checker output. Useful for ruleguard's Report func. -func (ctx *CheckerContext) WarnWithPos(pos token.Pos, format string, args ...interface{}) { - ctx.warnings = append(ctx.warnings, Warning{ - Text: ctx.printer.Sprintf(format, args...), - Pos: pos, - }) -} - -// WarnFixableWithPos adds a Warning to checker output. Useful for ruleguard's Report func. -func (ctx *CheckerContext) WarnFixableWithPos(pos token.Pos, fix QuickFix, format string, args ...interface{}) { - ctx.warnings = append(ctx.warnings, Warning{ - Text: ctx.printer.Sprintf(format, args...), - Pos: pos, - Suggestion: fix, - }) -} - -// TypeOf returns the type of expression x. -// -// Unlike TypesInfo.TypeOf, it never returns nil. -// Instead, it returns the Invalid type as a sentinel UnknownType value. -func (ctx *CheckerContext) TypeOf(x ast.Expr) types.Type { - typ := ctx.TypesInfo.TypeOf(x) - if typ != nil { - return typ - } - // Usually it means that some incorrect type info was loaded - // or the analyzed package was only partially (?) correct. - // To avoid nil pointer panics we can return a sentinel value - // that will fail most type assertions as well as kind checks - // (if the call side expects a *types.Basic). - return UnknownType -} - -// SizeOf returns the size of the typ in bytes. -// -// Unlike SizesInfo.SizeOf, it will not panic on generic types. -func (ctx *CheckerContext) SizeOf(typ types.Type) (int64, bool) { - if _, ok := typ.(*types.TypeParam); ok { - return 0, false - } - if named, ok := typ.(*types.Named); ok && named.TypeParams() != nil { - return 0, false - } - return ctx.safeSizesInfoSizeof(typ) -} - -// safeSizesInfoSizeof unlike SizesInfo.Sizeof will not panic on struct with generic fields. -// it will catch a panic and recover from it, see https://github.com/go-critic/go-critic/issues/1354 -func (ctx *CheckerContext) safeSizesInfoSizeof(typ types.Type) (size int64, ok bool) { - ok = true - defer func() { - if r := recover(); r != nil { - if strings.Contains(r.(string), "assertion failed") { - size, ok = 0, false - } else { - panic(r) - } - } - }() - - size = ctx.SizesInfo.Sizeof(typ) - return size, ok -} - -func resolvePkgObjects(ctx *Context, f *ast.File) { - ctx.PkgObjects = make(map[*types.PkgName]string, len(f.Imports)) - - for _, spec := range f.Imports { - if spec.Name != nil { - obj := ctx.TypesInfo.ObjectOf(spec.Name) - ctx.PkgObjects[obj.(*types.PkgName)] = spec.Name.Name - } else { - obj := ctx.TypesInfo.Implicits[spec] - ctx.PkgObjects[obj.(*types.PkgName)] = obj.Name() - } - } -} - -func resolvePkgRenames(ctx *Context, f *ast.File) { - ctx.PkgRenames = make(map[string]string) - - for _, spec := range f.Imports { - if spec.Name != nil { - path, err := strconv.Unquote(spec.Path.Value) - if err != nil { - panic(err) - } - ctx.PkgRenames[path] = spec.Name.Name - } - } -} diff --git a/vendor/github.com/go-toolsmith/astcast/LICENSE b/vendor/github.com/go-toolsmith/astcast/LICENSE deleted file mode 100644 index eef17180f..000000000 --- a/vendor/github.com/go-toolsmith/astcast/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2018 go-toolsmith - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/go-toolsmith/astcast/README.md b/vendor/github.com/go-toolsmith/astcast/README.md deleted file mode 100644 index 19ca0e71d..000000000 --- a/vendor/github.com/go-toolsmith/astcast/README.md +++ /dev/null @@ -1,103 +0,0 @@ -# astcast - -[![build-img]][build-url] -[![pkg-img]][pkg-url] -[![reportcard-img]][reportcard-url] -[![version-img]][version-url] - -Package `astcast` wraps type assertion operations in such way that you don't have -to worry about nil pointer results anymore. - -## Installation - -Go version 1.16+ - -```bash -go get github.com/go-toolsmith/astcast -``` - -## Example - -```go -package main - -import ( - "fmt" - - "github.com/go-toolsmith/astcast" - "github.com/go-toolsmith/strparse" -) - -func main() { - x := strparse.Expr(`(foo * bar) + 1`) - - // x type is ast.Expr, we want to access bar operand - // that is a RHS of the LHS of the addition. - // Note that addition LHS (X field) is has parenthesis, - // so we have to remove them too. - - add := astcast.ToBinaryExpr(x) - mul := astcast.ToBinaryExpr(astcast.ToParenExpr(add.X).X) - bar := astcast.ToIdent(mul.Y) - fmt.Printf("%T %s\n", bar, bar.Name) // => *ast.Ident bar - - // If argument has different dynamic type, - // non-nil sentinel object of requested type is returned. - // Those sentinel objects are exported so if you need - // to know whether it was a nil interface value of - // failed type assertion, you can compare returned - // object with such a sentinel. - - y := astcast.ToCallExpr(strparse.Expr(`x`)) - if y == astcast.NilCallExpr { - fmt.Println("it is a sentinel, type assertion failed") - } -} -``` - -Without `astcast`, you would have to do a lots of type assertions: - -```go -package main - -import ( - "fmt" - - "github.com/go-toolsmith/strparse" -) - -func main() { - x := strparse.Expr(`(foo * bar) + 1`) - - add, ok := x.(*ast.BinaryExpr) - if !ok || add == nil { - return - } - additionLHS, ok := add.X.(*ast.ParenExpr) - if !ok || additionLHS == nil { - return - } - mul, ok := additionLHS.X.(*ast.BinaryExpr) - if !ok || mul == nil { - return - } - bar, ok := mul.Y.(*ast.Ident) - if !ok || bar == nil { - return - } - fmt.Printf("%T %s\n", bar, bar.Name) -} -``` - -## License - -[MIT License](LICENSE). - -[build-img]: https://github.com/go-toolsmith/astcast/workflows/build/badge.svg -[build-url]: https://github.com/go-toolsmith/astcast/actions -[pkg-img]: https://pkg.go.dev/badge/go-toolsmith/astcast -[pkg-url]: https://pkg.go.dev/github.com/go-toolsmith/astcast -[reportcard-img]: https://goreportcard.com/badge/go-toolsmith/astcast -[reportcard-url]: https://goreportcard.com/report/go-toolsmith/astcast -[version-img]: https://img.shields.io/github/v/release/go-toolsmith/astcast -[version-url]: https://github.com/go-toolsmith/astcast/releases diff --git a/vendor/github.com/go-toolsmith/astcast/astcast.go b/vendor/github.com/go-toolsmith/astcast/astcast.go deleted file mode 100644 index 746d568aa..000000000 --- a/vendor/github.com/go-toolsmith/astcast/astcast.go +++ /dev/null @@ -1,590 +0,0 @@ -// Code generated by astcast_generate.go; DO NOT EDIT - -// Package astcast wraps type assertion operations in such way that you don't have -// to worry about nil pointer results anymore. -package astcast - -import ( - "go/ast" -) - -// A set of sentinel nil-like values that are returned -// by all "casting" functions in case of failed type assertion. -var ( - NilArrayType = &ast.ArrayType{} - NilBadExpr = &ast.BadExpr{} - NilBasicLit = &ast.BasicLit{} - NilBinaryExpr = &ast.BinaryExpr{} - NilCallExpr = &ast.CallExpr{} - NilChanType = &ast.ChanType{} - NilCompositeLit = &ast.CompositeLit{} - NilEllipsis = &ast.Ellipsis{} - NilFuncLit = &ast.FuncLit{} - NilFuncType = &ast.FuncType{} - NilIdent = &ast.Ident{} - NilIndexExpr = &ast.IndexExpr{} - NilInterfaceType = &ast.InterfaceType{} - NilKeyValueExpr = &ast.KeyValueExpr{} - NilMapType = &ast.MapType{} - NilParenExpr = &ast.ParenExpr{} - NilSelectorExpr = &ast.SelectorExpr{} - NilSliceExpr = &ast.SliceExpr{} - NilStarExpr = &ast.StarExpr{} - NilStructType = &ast.StructType{} - NilTypeAssertExpr = &ast.TypeAssertExpr{} - NilUnaryExpr = &ast.UnaryExpr{} - NilAssignStmt = &ast.AssignStmt{} - NilBadStmt = &ast.BadStmt{} - NilBlockStmt = &ast.BlockStmt{} - NilBranchStmt = &ast.BranchStmt{} - NilCaseClause = &ast.CaseClause{} - NilCommClause = &ast.CommClause{} - NilDeclStmt = &ast.DeclStmt{} - NilDeferStmt = &ast.DeferStmt{} - NilEmptyStmt = &ast.EmptyStmt{} - NilExprStmt = &ast.ExprStmt{} - NilForStmt = &ast.ForStmt{} - NilGoStmt = &ast.GoStmt{} - NilIfStmt = &ast.IfStmt{} - NilIncDecStmt = &ast.IncDecStmt{} - NilLabeledStmt = &ast.LabeledStmt{} - NilRangeStmt = &ast.RangeStmt{} - NilReturnStmt = &ast.ReturnStmt{} - NilSelectStmt = &ast.SelectStmt{} - NilSendStmt = &ast.SendStmt{} - NilSwitchStmt = &ast.SwitchStmt{} - NilTypeSwitchStmt = &ast.TypeSwitchStmt{} - NilComment = &ast.Comment{} - NilCommentGroup = &ast.CommentGroup{} - NilFieldList = &ast.FieldList{} - NilFile = &ast.File{} - NilPackage = &ast.Package{} -) - -// ToArrayType returns x as a non-nil *ast.ArrayType. -// If ast.Node actually has such dynamic type, the result is -// identical to normal type assertion. In case if it has -// different type, the returned value is NilArrayType. -func ToArrayType(x ast.Node) *ast.ArrayType { - if x, ok := x.(*ast.ArrayType); ok { - return x - } - return NilArrayType -} - -// ToBadExpr returns x as a non-nil *ast.BadExpr. -// If ast.Node actually has such dynamic type, the result is -// identical to normal type assertion. In case if it has -// different type, the returned value is NilBadExpr. -func ToBadExpr(x ast.Node) *ast.BadExpr { - if x, ok := x.(*ast.BadExpr); ok { - return x - } - return NilBadExpr -} - -// ToBasicLit returns x as a non-nil *ast.BasicLit. -// If ast.Node actually has such dynamic type, the result is -// identical to normal type assertion. In case if it has -// different type, the returned value is NilBasicLit. -func ToBasicLit(x ast.Node) *ast.BasicLit { - if x, ok := x.(*ast.BasicLit); ok { - return x - } - return NilBasicLit -} - -// ToBinaryExpr returns x as a non-nil *ast.BinaryExpr. -// If ast.Node actually has such dynamic type, the result is -// identical to normal type assertion. In case if it has -// different type, the returned value is NilBinaryExpr. -func ToBinaryExpr(x ast.Node) *ast.BinaryExpr { - if x, ok := x.(*ast.BinaryExpr); ok { - return x - } - return NilBinaryExpr -} - -// ToCallExpr returns x as a non-nil *ast.CallExpr. -// If ast.Node actually has such dynamic type, the result is -// identical to normal type assertion. In case if it has -// different type, the returned value is NilCallExpr. -func ToCallExpr(x ast.Node) *ast.CallExpr { - if x, ok := x.(*ast.CallExpr); ok { - return x - } - return NilCallExpr -} - -// ToChanType returns x as a non-nil *ast.ChanType. -// If ast.Node actually has such dynamic type, the result is -// identical to normal type assertion. In case if it has -// different type, the returned value is NilChanType. -func ToChanType(x ast.Node) *ast.ChanType { - if x, ok := x.(*ast.ChanType); ok { - return x - } - return NilChanType -} - -// ToCompositeLit returns x as a non-nil *ast.CompositeLit. -// If ast.Node actually has such dynamic type, the result is -// identical to normal type assertion. In case if it has -// different type, the returned value is NilCompositeLit. -func ToCompositeLit(x ast.Node) *ast.CompositeLit { - if x, ok := x.(*ast.CompositeLit); ok { - return x - } - return NilCompositeLit -} - -// ToEllipsis returns x as a non-nil *ast.Ellipsis. -// If ast.Node actually has such dynamic type, the result is -// identical to normal type assertion. In case if it has -// different type, the returned value is NilEllipsis. -func ToEllipsis(x ast.Node) *ast.Ellipsis { - if x, ok := x.(*ast.Ellipsis); ok { - return x - } - return NilEllipsis -} - -// ToFuncLit returns x as a non-nil *ast.FuncLit. -// If ast.Node actually has such dynamic type, the result is -// identical to normal type assertion. In case if it has -// different type, the returned value is NilFuncLit. -func ToFuncLit(x ast.Node) *ast.FuncLit { - if x, ok := x.(*ast.FuncLit); ok { - return x - } - return NilFuncLit -} - -// ToFuncType returns x as a non-nil *ast.FuncType. -// If ast.Node actually has such dynamic type, the result is -// identical to normal type assertion. In case if it has -// different type, the returned value is NilFuncType. -func ToFuncType(x ast.Node) *ast.FuncType { - if x, ok := x.(*ast.FuncType); ok { - return x - } - return NilFuncType -} - -// ToIdent returns x as a non-nil *ast.Ident. -// If ast.Node actually has such dynamic type, the result is -// identical to normal type assertion. In case if it has -// different type, the returned value is NilIdent. -func ToIdent(x ast.Node) *ast.Ident { - if x, ok := x.(*ast.Ident); ok { - return x - } - return NilIdent -} - -// ToIndexExpr returns x as a non-nil *ast.IndexExpr. -// If ast.Node actually has such dynamic type, the result is -// identical to normal type assertion. In case if it has -// different type, the returned value is NilIndexExpr. -func ToIndexExpr(x ast.Node) *ast.IndexExpr { - if x, ok := x.(*ast.IndexExpr); ok { - return x - } - return NilIndexExpr -} - -// ToInterfaceType returns x as a non-nil *ast.InterfaceType. -// If ast.Node actually has such dynamic type, the result is -// identical to normal type assertion. In case if it has -// different type, the returned value is NilInterfaceType. -func ToInterfaceType(x ast.Node) *ast.InterfaceType { - if x, ok := x.(*ast.InterfaceType); ok { - return x - } - return NilInterfaceType -} - -// ToKeyValueExpr returns x as a non-nil *ast.KeyValueExpr. -// If ast.Node actually has such dynamic type, the result is -// identical to normal type assertion. In case if it has -// different type, the returned value is NilKeyValueExpr. -func ToKeyValueExpr(x ast.Node) *ast.KeyValueExpr { - if x, ok := x.(*ast.KeyValueExpr); ok { - return x - } - return NilKeyValueExpr -} - -// ToMapType returns x as a non-nil *ast.MapType. -// If ast.Node actually has such dynamic type, the result is -// identical to normal type assertion. In case if it has -// different type, the returned value is NilMapType. -func ToMapType(x ast.Node) *ast.MapType { - if x, ok := x.(*ast.MapType); ok { - return x - } - return NilMapType -} - -// ToParenExpr returns x as a non-nil *ast.ParenExpr. -// If ast.Node actually has such dynamic type, the result is -// identical to normal type assertion. In case if it has -// different type, the returned value is NilParenExpr. -func ToParenExpr(x ast.Node) *ast.ParenExpr { - if x, ok := x.(*ast.ParenExpr); ok { - return x - } - return NilParenExpr -} - -// ToSelectorExpr returns x as a non-nil *ast.SelectorExpr. -// If ast.Node actually has such dynamic type, the result is -// identical to normal type assertion. In case if it has -// different type, the returned value is NilSelectorExpr. -func ToSelectorExpr(x ast.Node) *ast.SelectorExpr { - if x, ok := x.(*ast.SelectorExpr); ok { - return x - } - return NilSelectorExpr -} - -// ToSliceExpr returns x as a non-nil *ast.SliceExpr. -// If ast.Node actually has such dynamic type, the result is -// identical to normal type assertion. In case if it has -// different type, the returned value is NilSliceExpr. -func ToSliceExpr(x ast.Node) *ast.SliceExpr { - if x, ok := x.(*ast.SliceExpr); ok { - return x - } - return NilSliceExpr -} - -// ToStarExpr returns x as a non-nil *ast.StarExpr. -// If ast.Node actually has such dynamic type, the result is -// identical to normal type assertion. In case if it has -// different type, the returned value is NilStarExpr. -func ToStarExpr(x ast.Node) *ast.StarExpr { - if x, ok := x.(*ast.StarExpr); ok { - return x - } - return NilStarExpr -} - -// ToStructType returns x as a non-nil *ast.StructType. -// If ast.Node actually has such dynamic type, the result is -// identical to normal type assertion. In case if it has -// different type, the returned value is NilStructType. -func ToStructType(x ast.Node) *ast.StructType { - if x, ok := x.(*ast.StructType); ok { - return x - } - return NilStructType -} - -// ToTypeAssertExpr returns x as a non-nil *ast.TypeAssertExpr. -// If ast.Node actually has such dynamic type, the result is -// identical to normal type assertion. In case if it has -// different type, the returned value is NilTypeAssertExpr. -func ToTypeAssertExpr(x ast.Node) *ast.TypeAssertExpr { - if x, ok := x.(*ast.TypeAssertExpr); ok { - return x - } - return NilTypeAssertExpr -} - -// ToUnaryExpr returns x as a non-nil *ast.UnaryExpr. -// If ast.Node actually has such dynamic type, the result is -// identical to normal type assertion. In case if it has -// different type, the returned value is NilUnaryExpr. -func ToUnaryExpr(x ast.Node) *ast.UnaryExpr { - if x, ok := x.(*ast.UnaryExpr); ok { - return x - } - return NilUnaryExpr -} - -// ToAssignStmt returns x as a non-nil *ast.AssignStmt. -// If ast.Node actually has such dynamic type, the result is -// identical to normal type assertion. In case if it has -// different type, the returned value is NilAssignStmt. -func ToAssignStmt(x ast.Node) *ast.AssignStmt { - if x, ok := x.(*ast.AssignStmt); ok { - return x - } - return NilAssignStmt -} - -// ToBadStmt returns x as a non-nil *ast.BadStmt. -// If ast.Node actually has such dynamic type, the result is -// identical to normal type assertion. In case if it has -// different type, the returned value is NilBadStmt. -func ToBadStmt(x ast.Node) *ast.BadStmt { - if x, ok := x.(*ast.BadStmt); ok { - return x - } - return NilBadStmt -} - -// ToBlockStmt returns x as a non-nil *ast.BlockStmt. -// If ast.Node actually has such dynamic type, the result is -// identical to normal type assertion. In case if it has -// different type, the returned value is NilBlockStmt. -func ToBlockStmt(x ast.Node) *ast.BlockStmt { - if x, ok := x.(*ast.BlockStmt); ok { - return x - } - return NilBlockStmt -} - -// ToBranchStmt returns x as a non-nil *ast.BranchStmt. -// If ast.Node actually has such dynamic type, the result is -// identical to normal type assertion. In case if it has -// different type, the returned value is NilBranchStmt. -func ToBranchStmt(x ast.Node) *ast.BranchStmt { - if x, ok := x.(*ast.BranchStmt); ok { - return x - } - return NilBranchStmt -} - -// ToCaseClause returns x as a non-nil *ast.CaseClause. -// If ast.Node actually has such dynamic type, the result is -// identical to normal type assertion. In case if it has -// different type, the returned value is NilCaseClause. -func ToCaseClause(x ast.Node) *ast.CaseClause { - if x, ok := x.(*ast.CaseClause); ok { - return x - } - return NilCaseClause -} - -// ToCommClause returns x as a non-nil *ast.CommClause. -// If ast.Node actually has such dynamic type, the result is -// identical to normal type assertion. In case if it has -// different type, the returned value is NilCommClause. -func ToCommClause(x ast.Node) *ast.CommClause { - if x, ok := x.(*ast.CommClause); ok { - return x - } - return NilCommClause -} - -// ToDeclStmt returns x as a non-nil *ast.DeclStmt. -// If ast.Node actually has such dynamic type, the result is -// identical to normal type assertion. In case if it has -// different type, the returned value is NilDeclStmt. -func ToDeclStmt(x ast.Node) *ast.DeclStmt { - if x, ok := x.(*ast.DeclStmt); ok { - return x - } - return NilDeclStmt -} - -// ToDeferStmt returns x as a non-nil *ast.DeferStmt. -// If ast.Node actually has such dynamic type, the result is -// identical to normal type assertion. In case if it has -// different type, the returned value is NilDeferStmt. -func ToDeferStmt(x ast.Node) *ast.DeferStmt { - if x, ok := x.(*ast.DeferStmt); ok { - return x - } - return NilDeferStmt -} - -// ToEmptyStmt returns x as a non-nil *ast.EmptyStmt. -// If ast.Node actually has such dynamic type, the result is -// identical to normal type assertion. In case if it has -// different type, the returned value is NilEmptyStmt. -func ToEmptyStmt(x ast.Node) *ast.EmptyStmt { - if x, ok := x.(*ast.EmptyStmt); ok { - return x - } - return NilEmptyStmt -} - -// ToExprStmt returns x as a non-nil *ast.ExprStmt. -// If ast.Node actually has such dynamic type, the result is -// identical to normal type assertion. In case if it has -// different type, the returned value is NilExprStmt. -func ToExprStmt(x ast.Node) *ast.ExprStmt { - if x, ok := x.(*ast.ExprStmt); ok { - return x - } - return NilExprStmt -} - -// ToForStmt returns x as a non-nil *ast.ForStmt. -// If ast.Node actually has such dynamic type, the result is -// identical to normal type assertion. In case if it has -// different type, the returned value is NilForStmt. -func ToForStmt(x ast.Node) *ast.ForStmt { - if x, ok := x.(*ast.ForStmt); ok { - return x - } - return NilForStmt -} - -// ToGoStmt returns x as a non-nil *ast.GoStmt. -// If ast.Node actually has such dynamic type, the result is -// identical to normal type assertion. In case if it has -// different type, the returned value is NilGoStmt. -func ToGoStmt(x ast.Node) *ast.GoStmt { - if x, ok := x.(*ast.GoStmt); ok { - return x - } - return NilGoStmt -} - -// ToIfStmt returns x as a non-nil *ast.IfStmt. -// If ast.Node actually has such dynamic type, the result is -// identical to normal type assertion. In case if it has -// different type, the returned value is NilIfStmt. -func ToIfStmt(x ast.Node) *ast.IfStmt { - if x, ok := x.(*ast.IfStmt); ok { - return x - } - return NilIfStmt -} - -// ToIncDecStmt returns x as a non-nil *ast.IncDecStmt. -// If ast.Node actually has such dynamic type, the result is -// identical to normal type assertion. In case if it has -// different type, the returned value is NilIncDecStmt. -func ToIncDecStmt(x ast.Node) *ast.IncDecStmt { - if x, ok := x.(*ast.IncDecStmt); ok { - return x - } - return NilIncDecStmt -} - -// ToLabeledStmt returns x as a non-nil *ast.LabeledStmt. -// If ast.Node actually has such dynamic type, the result is -// identical to normal type assertion. In case if it has -// different type, the returned value is NilLabeledStmt. -func ToLabeledStmt(x ast.Node) *ast.LabeledStmt { - if x, ok := x.(*ast.LabeledStmt); ok { - return x - } - return NilLabeledStmt -} - -// ToRangeStmt returns x as a non-nil *ast.RangeStmt. -// If ast.Node actually has such dynamic type, the result is -// identical to normal type assertion. In case if it has -// different type, the returned value is NilRangeStmt. -func ToRangeStmt(x ast.Node) *ast.RangeStmt { - if x, ok := x.(*ast.RangeStmt); ok { - return x - } - return NilRangeStmt -} - -// ToReturnStmt returns x as a non-nil *ast.ReturnStmt. -// If ast.Node actually has such dynamic type, the result is -// identical to normal type assertion. In case if it has -// different type, the returned value is NilReturnStmt. -func ToReturnStmt(x ast.Node) *ast.ReturnStmt { - if x, ok := x.(*ast.ReturnStmt); ok { - return x - } - return NilReturnStmt -} - -// ToSelectStmt returns x as a non-nil *ast.SelectStmt. -// If ast.Node actually has such dynamic type, the result is -// identical to normal type assertion. In case if it has -// different type, the returned value is NilSelectStmt. -func ToSelectStmt(x ast.Node) *ast.SelectStmt { - if x, ok := x.(*ast.SelectStmt); ok { - return x - } - return NilSelectStmt -} - -// ToSendStmt returns x as a non-nil *ast.SendStmt. -// If ast.Node actually has such dynamic type, the result is -// identical to normal type assertion. In case if it has -// different type, the returned value is NilSendStmt. -func ToSendStmt(x ast.Node) *ast.SendStmt { - if x, ok := x.(*ast.SendStmt); ok { - return x - } - return NilSendStmt -} - -// ToSwitchStmt returns x as a non-nil *ast.SwitchStmt. -// If ast.Node actually has such dynamic type, the result is -// identical to normal type assertion. In case if it has -// different type, the returned value is NilSwitchStmt. -func ToSwitchStmt(x ast.Node) *ast.SwitchStmt { - if x, ok := x.(*ast.SwitchStmt); ok { - return x - } - return NilSwitchStmt -} - -// ToTypeSwitchStmt returns x as a non-nil *ast.TypeSwitchStmt. -// If ast.Node actually has such dynamic type, the result is -// identical to normal type assertion. In case if it has -// different type, the returned value is NilTypeSwitchStmt. -func ToTypeSwitchStmt(x ast.Node) *ast.TypeSwitchStmt { - if x, ok := x.(*ast.TypeSwitchStmt); ok { - return x - } - return NilTypeSwitchStmt -} - -// ToComment returns x as a non-nil *ast.Comment. -// If ast.Node actually has such dynamic type, the result is -// identical to normal type assertion. In case if it has -// different type, the returned value is NilComment. -func ToComment(x ast.Node) *ast.Comment { - if x, ok := x.(*ast.Comment); ok { - return x - } - return NilComment -} - -// ToCommentGroup returns x as a non-nil *ast.CommentGroup. -// If ast.Node actually has such dynamic type, the result is -// identical to normal type assertion. In case if it has -// different type, the returned value is NilCommentGroup. -func ToCommentGroup(x ast.Node) *ast.CommentGroup { - if x, ok := x.(*ast.CommentGroup); ok { - return x - } - return NilCommentGroup -} - -// ToFieldList returns x as a non-nil *ast.FieldList. -// If ast.Node actually has such dynamic type, the result is -// identical to normal type assertion. In case if it has -// different type, the returned value is NilFieldList. -func ToFieldList(x ast.Node) *ast.FieldList { - if x, ok := x.(*ast.FieldList); ok { - return x - } - return NilFieldList -} - -// ToFile returns x as a non-nil *ast.File. -// If ast.Node actually has such dynamic type, the result is -// identical to normal type assertion. In case if it has -// different type, the returned value is NilFile. -func ToFile(x ast.Node) *ast.File { - if x, ok := x.(*ast.File); ok { - return x - } - return NilFile -} - -// ToPackage returns x as a non-nil *ast.Package. -// If ast.Node actually has such dynamic type, the result is -// identical to normal type assertion. In case if it has -// different type, the returned value is NilPackage. -func ToPackage(x ast.Node) *ast.Package { - if x, ok := x.(*ast.Package); ok { - return x - } - return NilPackage -} diff --git a/vendor/github.com/go-toolsmith/astcopy/LICENSE b/vendor/github.com/go-toolsmith/astcopy/LICENSE deleted file mode 100644 index eef17180f..000000000 --- a/vendor/github.com/go-toolsmith/astcopy/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2018 go-toolsmith - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/go-toolsmith/astcopy/README.md b/vendor/github.com/go-toolsmith/astcopy/README.md deleted file mode 100644 index 7adc66525..000000000 --- a/vendor/github.com/go-toolsmith/astcopy/README.md +++ /dev/null @@ -1,57 +0,0 @@ -# astcopy - -[![build-img]][build-url] -[![pkg-img]][pkg-url] -[![reportcard-img]][reportcard-url] -[![version-img]][version-url] - -Package `astcopy` implements Go AST reflection-free deep copy operations. - -## Installation: - -Go version 1.16+ - -```bash -go get github.com/go-toolsmith/astcopy -``` - -## Example - -```go -package main - -import ( - "fmt" - "go/ast" - "go/token" - - "github.com/go-toolsmith/astcopy" - "github.com/go-toolsmith/astequal" - "github.com/go-toolsmith/strparse" -) - -func main() { - x := strparse.Expr(`1 + 2`).(*ast.BinaryExpr) - y := astcopy.BinaryExpr(x) - fmt.Println(astequal.Expr(x, y)) // => true - - // Now modify x and make sure y is not modified. - z := astcopy.BinaryExpr(y) - x.Op = token.SUB - fmt.Println(astequal.Expr(y, z)) // => true - fmt.Println(astequal.Expr(x, y)) // => false -} -``` - -## License - -[MIT License](LICENSE). - -[build-img]: https://github.com/go-toolsmith/astp/workflows/build/badge.svg -[build-url]: https://github.com/go-toolsmith/astp/actions -[pkg-img]: https://pkg.go.dev/badge/go-toolsmith/astp -[pkg-url]: https://pkg.go.dev/github.com/go-toolsmith/astp -[reportcard-img]: https://goreportcard.com/badge/go-toolsmith/astp -[reportcard-url]: https://goreportcard.com/report/go-toolsmith/astp -[version-img]: https://img.shields.io/github/v/release/go-toolsmith/astp -[version-url]: https://github.com/go-toolsmith/astp/releases diff --git a/vendor/github.com/go-toolsmith/astcopy/astcopy.go b/vendor/github.com/go-toolsmith/astcopy/astcopy.go deleted file mode 100644 index 72bc58ce6..000000000 --- a/vendor/github.com/go-toolsmith/astcopy/astcopy.go +++ /dev/null @@ -1,945 +0,0 @@ -// Package astcopy implements Go AST reflection-free deep copy operations. -package astcopy - -import ( - "go/ast" - - "golang.org/x/exp/typeparams" -) - -// Node returns x node deep copy. -// Copy of nil argument is nil. -func Node(x ast.Node) ast.Node { - return copyNode(x) -} - -// NodeList returns xs node slice deep copy. -// Copy of nil argument is nil. -func NodeList(xs []ast.Node) []ast.Node { - if xs == nil { - return nil - } - cp := make([]ast.Node, len(xs)) - for i := range xs { - cp[i] = copyNode(xs[i]) - } - return cp -} - -// Expr returns x expression deep copy. -// Copy of nil argument is nil. -func Expr(x ast.Expr) ast.Expr { - return copyExpr(x) -} - -// ExprList returns xs expression slice deep copy. -// Copy of nil argument is nil. -func ExprList(xs []ast.Expr) []ast.Expr { - if xs == nil { - return nil - } - cp := make([]ast.Expr, len(xs)) - for i := range xs { - cp[i] = copyExpr(xs[i]) - } - return cp -} - -// Stmt returns x statement deep copy. -// Copy of nil argument is nil. -func Stmt(x ast.Stmt) ast.Stmt { - return copyStmt(x) -} - -// StmtList returns xs statement slice deep copy. -// Copy of nil argument is nil. -func StmtList(xs []ast.Stmt) []ast.Stmt { - if xs == nil { - return nil - } - cp := make([]ast.Stmt, len(xs)) - for i := range xs { - cp[i] = copyStmt(xs[i]) - } - return cp -} - -// Decl returns x declaration deep copy. -// Copy of nil argument is nil. -func Decl(x ast.Decl) ast.Decl { - return copyDecl(x) -} - -// DeclList returns xs declaration slice deep copy. -// Copy of nil argument is nil. -func DeclList(xs []ast.Decl) []ast.Decl { - if xs == nil { - return nil - } - cp := make([]ast.Decl, len(xs)) - for i := range xs { - cp[i] = copyDecl(xs[i]) - } - return cp -} - -// BadExpr returns x deep copy. -// Copy of nil argument is nil. -func BadExpr(x *ast.BadExpr) *ast.BadExpr { - if x == nil { - return nil - } - cp := *x - return &cp -} - -// Ident returns x deep copy. -// Copy of nil argument is nil. -func Ident(x *ast.Ident) *ast.Ident { - if x == nil { - return nil - } - cp := *x - return &cp -} - -// IdentList returns xs identifier slice deep copy. -// Copy of nil argument is nil. -func IdentList(xs []*ast.Ident) []*ast.Ident { - if xs == nil { - return nil - } - cp := make([]*ast.Ident, len(xs)) - for i := range xs { - cp[i] = Ident(xs[i]) - } - return cp -} - -// Ellipsis returns x deep copy. -// Copy of nil argument is nil. -func Ellipsis(x *ast.Ellipsis) *ast.Ellipsis { - if x == nil { - return nil - } - cp := *x - cp.Elt = copyExpr(x.Elt) - return &cp -} - -// BasicLit returns x deep copy. -// Copy of nil argument is nil. -func BasicLit(x *ast.BasicLit) *ast.BasicLit { - if x == nil { - return nil - } - cp := *x - return &cp -} - -// FuncLit returns x deep copy. -// Copy of nil argument is nil. -func FuncLit(x *ast.FuncLit) *ast.FuncLit { - if x == nil { - return nil - } - cp := *x - cp.Type = FuncType(x.Type) - cp.Body = BlockStmt(x.Body) - return &cp -} - -// CompositeLit returns x deep copy. -// Copy of nil argument is nil. -func CompositeLit(x *ast.CompositeLit) *ast.CompositeLit { - if x == nil { - return nil - } - cp := *x - cp.Type = copyExpr(x.Type) - cp.Elts = ExprList(x.Elts) - return &cp -} - -// ParenExpr returns x deep copy. -// Copy of nil argument is nil. -func ParenExpr(x *ast.ParenExpr) *ast.ParenExpr { - if x == nil { - return nil - } - cp := *x - cp.X = copyExpr(x.X) - return &cp -} - -// SelectorExpr returns x deep copy. -// Copy of nil argument is nil. -func SelectorExpr(x *ast.SelectorExpr) *ast.SelectorExpr { - if x == nil { - return nil - } - cp := *x - cp.X = copyExpr(x.X) - cp.Sel = Ident(x.Sel) - return &cp -} - -// IndexExpr returns x deep copy. -// Copy of nil argument is nil. -func IndexExpr(x *ast.IndexExpr) *ast.IndexExpr { - if x == nil { - return nil - } - cp := *x - cp.X = copyExpr(x.X) - cp.Index = copyExpr(x.Index) - return &cp -} - -// IndexListExpr returns x deep copy. -// Copy of nil argument is nil. -func IndexListExpr(x *typeparams.IndexListExpr) *typeparams.IndexListExpr { - if x == nil { - return nil - } - cp := *x - cp.X = copyExpr(x.X) - cp.Indices = ExprList(x.Indices) - return &cp -} - -// SliceExpr returns x deep copy. -// Copy of nil argument is nil. -func SliceExpr(x *ast.SliceExpr) *ast.SliceExpr { - if x == nil { - return nil - } - cp := *x - cp.X = copyExpr(x.X) - cp.Low = copyExpr(x.Low) - cp.High = copyExpr(x.High) - cp.Max = copyExpr(x.Max) - return &cp -} - -// TypeAssertExpr returns x deep copy. -// Copy of nil argument is nil. -func TypeAssertExpr(x *ast.TypeAssertExpr) *ast.TypeAssertExpr { - if x == nil { - return nil - } - cp := *x - cp.X = copyExpr(x.X) - cp.Type = copyExpr(x.Type) - return &cp -} - -// CallExpr returns x deep copy. -// Copy of nil argument is nil. -func CallExpr(x *ast.CallExpr) *ast.CallExpr { - if x == nil { - return nil - } - cp := *x - cp.Fun = copyExpr(x.Fun) - cp.Args = ExprList(x.Args) - return &cp -} - -// StarExpr returns x deep copy. -// Copy of nil argument is nil. -func StarExpr(x *ast.StarExpr) *ast.StarExpr { - if x == nil { - return nil - } - cp := *x - cp.X = copyExpr(x.X) - return &cp -} - -// UnaryExpr returns x deep copy. -// Copy of nil argument is nil. -func UnaryExpr(x *ast.UnaryExpr) *ast.UnaryExpr { - if x == nil { - return nil - } - cp := *x - cp.X = copyExpr(x.X) - return &cp -} - -// BinaryExpr returns x deep copy. -// Copy of nil argument is nil. -func BinaryExpr(x *ast.BinaryExpr) *ast.BinaryExpr { - if x == nil { - return nil - } - cp := *x - cp.X = copyExpr(x.X) - cp.Y = copyExpr(x.Y) - return &cp -} - -// KeyValueExpr returns x deep copy. -// Copy of nil argument is nil. -func KeyValueExpr(x *ast.KeyValueExpr) *ast.KeyValueExpr { - if x == nil { - return nil - } - cp := *x - cp.Key = copyExpr(x.Key) - cp.Value = copyExpr(x.Value) - return &cp -} - -// ArrayType returns x deep copy. -// Copy of nil argument is nil. -func ArrayType(x *ast.ArrayType) *ast.ArrayType { - if x == nil { - return nil - } - cp := *x - cp.Len = copyExpr(x.Len) - cp.Elt = copyExpr(x.Elt) - return &cp -} - -// StructType returns x deep copy. -// Copy of nil argument is nil. -func StructType(x *ast.StructType) *ast.StructType { - if x == nil { - return nil - } - cp := *x - cp.Fields = FieldList(x.Fields) - return &cp -} - -// Field returns x deep copy. -// Copy of nil argument is nil. -func Field(x *ast.Field) *ast.Field { - if x == nil { - return nil - } - cp := *x - cp.Names = IdentList(x.Names) - cp.Type = copyExpr(x.Type) - cp.Tag = BasicLit(x.Tag) - cp.Doc = CommentGroup(x.Doc) - cp.Comment = CommentGroup(x.Comment) - return &cp -} - -// FieldList returns x deep copy. -// Copy of nil argument is nil. -func FieldList(x *ast.FieldList) *ast.FieldList { - if x == nil { - return nil - } - cp := *x - if x.List != nil { - cp.List = make([]*ast.Field, len(x.List)) - for i := range x.List { - cp.List[i] = Field(x.List[i]) - } - } - return &cp -} - -// InterfaceType returns x deep copy. -// Copy of nil argument is nil. -func InterfaceType(x *ast.InterfaceType) *ast.InterfaceType { - if x == nil { - return nil - } - cp := *x - cp.Methods = FieldList(x.Methods) - return &cp -} - -// MapType returns x deep copy. -// Copy of nil argument is nil. -func MapType(x *ast.MapType) *ast.MapType { - if x == nil { - return nil - } - cp := *x - cp.Key = copyExpr(x.Key) - cp.Value = copyExpr(x.Value) - return &cp -} - -// ChanType returns x deep copy. -// Copy of nil argument is nil. -func ChanType(x *ast.ChanType) *ast.ChanType { - if x == nil { - return nil - } - cp := *x - cp.Value = copyExpr(x.Value) - return &cp -} - -// BlockStmt returns x deep copy. -// Copy of nil argument is nil. -func BlockStmt(x *ast.BlockStmt) *ast.BlockStmt { - if x == nil { - return nil - } - cp := *x - cp.List = StmtList(x.List) - return &cp -} - -// ImportSpec returns x deep copy. -// Copy of nil argument is nil. -func ImportSpec(x *ast.ImportSpec) *ast.ImportSpec { - if x == nil { - return nil - } - cp := *x - cp.Name = Ident(x.Name) - cp.Path = BasicLit(x.Path) - cp.Doc = CommentGroup(x.Doc) - cp.Comment = CommentGroup(x.Comment) - return &cp -} - -// ValueSpec returns x deep copy. -// Copy of nil argument is nil. -func ValueSpec(x *ast.ValueSpec) *ast.ValueSpec { - if x == nil { - return nil - } - cp := *x - cp.Names = IdentList(x.Names) - cp.Values = ExprList(x.Values) - cp.Type = copyExpr(x.Type) - cp.Doc = CommentGroup(x.Doc) - cp.Comment = CommentGroup(x.Comment) - return &cp -} - -// Spec returns x deep copy. -// Copy of nil argument is nil. -func Spec(x ast.Spec) ast.Spec { - if x == nil { - return nil - } - - switch x := x.(type) { - case *ast.ImportSpec: - return ImportSpec(x) - case *ast.ValueSpec: - return ValueSpec(x) - case *ast.TypeSpec: - return TypeSpec(x) - default: - panic("unhandled spec") - } -} - -// SpecList returns xs spec slice deep copy. -// Copy of nil argument is nil. -func SpecList(xs []ast.Spec) []ast.Spec { - if xs == nil { - return nil - } - cp := make([]ast.Spec, len(xs)) - for i := range xs { - cp[i] = Spec(xs[i]) - } - return cp -} - -// BadStmt returns x deep copy. -// Copy of nil argument is nil. -func BadStmt(x *ast.BadStmt) *ast.BadStmt { - if x == nil { - return nil - } - cp := *x - return &cp -} - -// DeclStmt returns x deep copy. -// Copy of nil argument is nil. -func DeclStmt(x *ast.DeclStmt) *ast.DeclStmt { - if x == nil { - return nil - } - cp := *x - cp.Decl = copyDecl(x.Decl) - return &cp -} - -// EmptyStmt returns x deep copy. -// Copy of nil argument is nil. -func EmptyStmt(x *ast.EmptyStmt) *ast.EmptyStmt { - if x == nil { - return nil - } - cp := *x - return &cp -} - -// LabeledStmt returns x deep copy. -// Copy of nil argument is nil. -func LabeledStmt(x *ast.LabeledStmt) *ast.LabeledStmt { - if x == nil { - return nil - } - cp := *x - cp.Label = Ident(x.Label) - cp.Stmt = copyStmt(x.Stmt) - return &cp -} - -// ExprStmt returns x deep copy. -// Copy of nil argument is nil. -func ExprStmt(x *ast.ExprStmt) *ast.ExprStmt { - if x == nil { - return nil - } - cp := *x - cp.X = copyExpr(x.X) - return &cp -} - -// SendStmt returns x deep copy. -// Copy of nil argument is nil. -func SendStmt(x *ast.SendStmt) *ast.SendStmt { - if x == nil { - return nil - } - cp := *x - cp.Chan = copyExpr(x.Chan) - cp.Value = copyExpr(x.Value) - return &cp -} - -// IncDecStmt returns x deep copy. -// Copy of nil argument is nil. -func IncDecStmt(x *ast.IncDecStmt) *ast.IncDecStmt { - if x == nil { - return nil - } - cp := *x - cp.X = copyExpr(x.X) - return &cp -} - -// AssignStmt returns x deep copy. -// Copy of nil argument is nil. -func AssignStmt(x *ast.AssignStmt) *ast.AssignStmt { - if x == nil { - return nil - } - cp := *x - cp.Lhs = ExprList(x.Lhs) - cp.Rhs = ExprList(x.Rhs) - return &cp -} - -// GoStmt returns x deep copy. -// Copy of nil argument is nil. -func GoStmt(x *ast.GoStmt) *ast.GoStmt { - if x == nil { - return nil - } - cp := *x - cp.Call = CallExpr(x.Call) - return &cp -} - -// DeferStmt returns x deep copy. -// Copy of nil argument is nil. -func DeferStmt(x *ast.DeferStmt) *ast.DeferStmt { - if x == nil { - return nil - } - cp := *x - cp.Call = CallExpr(x.Call) - return &cp -} - -// ReturnStmt returns x deep copy. -// Copy of nil argument is nil. -func ReturnStmt(x *ast.ReturnStmt) *ast.ReturnStmt { - if x == nil { - return nil - } - cp := *x - cp.Results = ExprList(x.Results) - return &cp -} - -// BranchStmt returns x deep copy. -// Copy of nil argument is nil. -func BranchStmt(x *ast.BranchStmt) *ast.BranchStmt { - if x == nil { - return nil - } - cp := *x - cp.Label = Ident(x.Label) - return &cp -} - -// IfStmt returns x deep copy. -// Copy of nil argument is nil. -func IfStmt(x *ast.IfStmt) *ast.IfStmt { - if x == nil { - return nil - } - cp := *x - cp.Init = copyStmt(x.Init) - cp.Cond = copyExpr(x.Cond) - cp.Body = BlockStmt(x.Body) - cp.Else = copyStmt(x.Else) - return &cp -} - -// CaseClause returns x deep copy. -// Copy of nil argument is nil. -func CaseClause(x *ast.CaseClause) *ast.CaseClause { - if x == nil { - return nil - } - cp := *x - cp.List = ExprList(x.List) - cp.Body = StmtList(x.Body) - return &cp -} - -// SwitchStmt returns x deep copy. -// Copy of nil argument is nil. -func SwitchStmt(x *ast.SwitchStmt) *ast.SwitchStmt { - if x == nil { - return nil - } - cp := *x - cp.Init = copyStmt(x.Init) - cp.Tag = copyExpr(x.Tag) - cp.Body = BlockStmt(x.Body) - return &cp -} - -// TypeSwitchStmt returns x deep copy. -// Copy of nil argument is nil. -func TypeSwitchStmt(x *ast.TypeSwitchStmt) *ast.TypeSwitchStmt { - if x == nil { - return nil - } - cp := *x - cp.Init = copyStmt(x.Init) - cp.Assign = copyStmt(x.Assign) - cp.Body = BlockStmt(x.Body) - return &cp -} - -// CommClause returns x deep copy. -// Copy of nil argument is nil. -func CommClause(x *ast.CommClause) *ast.CommClause { - if x == nil { - return nil - } - cp := *x - cp.Comm = copyStmt(x.Comm) - cp.Body = StmtList(x.Body) - return &cp -} - -// SelectStmt returns x deep copy. -// Copy of nil argument is nil. -func SelectStmt(x *ast.SelectStmt) *ast.SelectStmt { - if x == nil { - return nil - } - cp := *x - cp.Body = BlockStmt(x.Body) - return &cp -} - -// ForStmt returns x deep copy. -// Copy of nil argument is nil. -func ForStmt(x *ast.ForStmt) *ast.ForStmt { - if x == nil { - return nil - } - cp := *x - cp.Init = copyStmt(x.Init) - cp.Cond = copyExpr(x.Cond) - cp.Post = copyStmt(x.Post) - cp.Body = BlockStmt(x.Body) - return &cp -} - -// RangeStmt returns x deep copy. -// Copy of nil argument is nil. -func RangeStmt(x *ast.RangeStmt) *ast.RangeStmt { - if x == nil { - return nil - } - cp := *x - cp.Key = copyExpr(x.Key) - cp.Value = copyExpr(x.Value) - cp.X = copyExpr(x.X) - cp.Body = BlockStmt(x.Body) - return &cp -} - -// Comment returns x deep copy. -// Copy of nil argument is nil. -func Comment(x *ast.Comment) *ast.Comment { - if x == nil { - return nil - } - cp := *x - return &cp -} - -// CommentGroup returns x deep copy. -// Copy of nil argument is nil. -func CommentGroup(x *ast.CommentGroup) *ast.CommentGroup { - if x == nil { - return nil - } - cp := *x - if x.List != nil { - cp.List = make([]*ast.Comment, len(x.List)) - for i := range x.List { - cp.List[i] = Comment(x.List[i]) - } - } - return &cp -} - -// File returns x deep copy. -// Copy of nil argument is nil. -func File(x *ast.File) *ast.File { - if x == nil { - return nil - } - cp := *x - cp.Doc = CommentGroup(x.Doc) - cp.Name = Ident(x.Name) - cp.Decls = DeclList(x.Decls) - cp.Imports = make([]*ast.ImportSpec, len(x.Imports)) - for i := range x.Imports { - cp.Imports[i] = ImportSpec(x.Imports[i]) - } - cp.Unresolved = IdentList(x.Unresolved) - cp.Comments = make([]*ast.CommentGroup, len(x.Comments)) - for i := range x.Comments { - cp.Comments[i] = CommentGroup(x.Comments[i]) - } - return &cp -} - -// Package returns x deep copy. -// Copy of nil argument is nil. -func Package(x *ast.Package) *ast.Package { - if x == nil { - return nil - } - cp := *x - cp.Files = make(map[string]*ast.File) - for filename, f := range x.Files { - cp.Files[filename] = f - } - return &cp -} - -// BadDecl returns x deep copy. -// Copy of nil argument is nil. -func BadDecl(x *ast.BadDecl) *ast.BadDecl { - if x == nil { - return nil - } - cp := *x - return &cp -} - -// GenDecl returns x deep copy. -// Copy of nil argument is nil. -func GenDecl(x *ast.GenDecl) *ast.GenDecl { - if x == nil { - return nil - } - cp := *x - cp.Specs = SpecList(x.Specs) - cp.Doc = CommentGroup(x.Doc) - return &cp -} - -// FuncDecl returns x deep copy. -// Copy of nil argument is nil. -func FuncDecl(x *ast.FuncDecl) *ast.FuncDecl { - if x == nil { - return nil - } - cp := *x - cp.Recv = FieldList(x.Recv) - cp.Name = Ident(x.Name) - cp.Type = FuncType(x.Type) - cp.Body = BlockStmt(x.Body) - cp.Doc = CommentGroup(x.Doc) - return &cp -} - -func copyNode(x ast.Node) ast.Node { - switch x := x.(type) { - case ast.Expr: - return copyExpr(x) - case ast.Stmt: - return copyStmt(x) - case ast.Decl: - return copyDecl(x) - - case ast.Spec: - return Spec(x) - case *ast.FieldList: - return FieldList(x) - case *ast.Comment: - return Comment(x) - case *ast.CommentGroup: - return CommentGroup(x) - case *ast.File: - return File(x) - case *ast.Package: - return Package(x) - - default: - panic("unhandled node") - } -} - -func copyExpr(x ast.Expr) ast.Expr { - if x == nil { - return nil - } - - switch x := x.(type) { - case *ast.BadExpr: - return BadExpr(x) - case *ast.Ident: - return Ident(x) - case *ast.Ellipsis: - return Ellipsis(x) - case *ast.BasicLit: - return BasicLit(x) - case *ast.FuncLit: - return FuncLit(x) - case *ast.CompositeLit: - return CompositeLit(x) - case *ast.ParenExpr: - return ParenExpr(x) - case *ast.SelectorExpr: - return SelectorExpr(x) - case *ast.IndexExpr: - return IndexExpr(x) - case *typeparams.IndexListExpr: - return IndexListExpr(x) - case *ast.SliceExpr: - return SliceExpr(x) - case *ast.TypeAssertExpr: - return TypeAssertExpr(x) - case *ast.CallExpr: - return CallExpr(x) - case *ast.StarExpr: - return StarExpr(x) - case *ast.UnaryExpr: - return UnaryExpr(x) - case *ast.BinaryExpr: - return BinaryExpr(x) - case *ast.KeyValueExpr: - return KeyValueExpr(x) - case *ast.ArrayType: - return ArrayType(x) - case *ast.StructType: - return StructType(x) - case *ast.FuncType: - return FuncType(x) - case *ast.InterfaceType: - return InterfaceType(x) - case *ast.MapType: - return MapType(x) - case *ast.ChanType: - return ChanType(x) - - default: - panic("unhandled expr") - } -} - -func copyStmt(x ast.Stmt) ast.Stmt { - if x == nil { - return nil - } - - switch x := x.(type) { - case *ast.BadStmt: - return BadStmt(x) - case *ast.DeclStmt: - return DeclStmt(x) - case *ast.EmptyStmt: - return EmptyStmt(x) - case *ast.LabeledStmt: - return LabeledStmt(x) - case *ast.ExprStmt: - return ExprStmt(x) - case *ast.SendStmt: - return SendStmt(x) - case *ast.IncDecStmt: - return IncDecStmt(x) - case *ast.AssignStmt: - return AssignStmt(x) - case *ast.GoStmt: - return GoStmt(x) - case *ast.DeferStmt: - return DeferStmt(x) - case *ast.ReturnStmt: - return ReturnStmt(x) - case *ast.BranchStmt: - return BranchStmt(x) - case *ast.BlockStmt: - return BlockStmt(x) - case *ast.IfStmt: - return IfStmt(x) - case *ast.CaseClause: - return CaseClause(x) - case *ast.SwitchStmt: - return SwitchStmt(x) - case *ast.TypeSwitchStmt: - return TypeSwitchStmt(x) - case *ast.CommClause: - return CommClause(x) - case *ast.SelectStmt: - return SelectStmt(x) - case *ast.ForStmt: - return ForStmt(x) - case *ast.RangeStmt: - return RangeStmt(x) - - default: - panic("unhandled stmt") - } -} - -func copyDecl(x ast.Decl) ast.Decl { - if x == nil { - return nil - } - - switch x := x.(type) { - case *ast.BadDecl: - return BadDecl(x) - case *ast.GenDecl: - return GenDecl(x) - case *ast.FuncDecl: - return FuncDecl(x) - - default: - panic("unhandled decl") - } -} diff --git a/vendor/github.com/go-toolsmith/astcopy/astcopy_go117.go b/vendor/github.com/go-toolsmith/astcopy/astcopy_go117.go deleted file mode 100644 index 1b748bae5..000000000 --- a/vendor/github.com/go-toolsmith/astcopy/astcopy_go117.go +++ /dev/null @@ -1,30 +0,0 @@ -//go:build !go1.18 -// +build !go1.18 - -package astcopy - -// FuncType returns x deep copy. -// Copy of nil argument is nil. -func FuncType(x *ast.FuncType) *ast.FuncType { - if x == nil { - return nil - } - cp := *x - cp.Params = FieldList(x.Params) - cp.Results = FieldList(x.Results) - return &cp -} - -// TypeSpec returns x deep copy. -// Copy of nil argument is nil. -func TypeSpec(x *ast.TypeSpec) *ast.TypeSpec { - if x == nil { - return nil - } - cp := *x - cp.Name = Ident(x.Name) - cp.Type = copyExpr(x.Type) - cp.Doc = CommentGroup(x.Doc) - cp.Comment = CommentGroup(x.Comment) - return &cp -} diff --git a/vendor/github.com/go-toolsmith/astcopy/astcopy_go118.go b/vendor/github.com/go-toolsmith/astcopy/astcopy_go118.go deleted file mode 100644 index 72f800acc..000000000 --- a/vendor/github.com/go-toolsmith/astcopy/astcopy_go118.go +++ /dev/null @@ -1,36 +0,0 @@ -//go:build go1.18 -// +build go1.18 - -package astcopy - -import ( - "go/ast" -) - -// FuncType returns x deep copy. -// Copy of nil argument is nil. -func FuncType(x *ast.FuncType) *ast.FuncType { - if x == nil { - return nil - } - cp := *x - cp.Params = FieldList(x.Params) - cp.Results = FieldList(x.Results) - cp.TypeParams = FieldList(x.TypeParams) - return &cp -} - -// TypeSpec returns x deep copy. -// Copy of nil argument is nil. -func TypeSpec(x *ast.TypeSpec) *ast.TypeSpec { - if x == nil { - return nil - } - cp := *x - cp.Name = Ident(x.Name) - cp.Type = copyExpr(x.Type) - cp.Doc = CommentGroup(x.Doc) - cp.Comment = CommentGroup(x.Comment) - cp.TypeParams = FieldList(x.TypeParams) - return &cp -} diff --git a/vendor/github.com/go-toolsmith/astequal/.gitignore b/vendor/github.com/go-toolsmith/astequal/.gitignore deleted file mode 100644 index f38c2b852..000000000 --- a/vendor/github.com/go-toolsmith/astequal/.gitignore +++ /dev/null @@ -1,5 +0,0 @@ -bin -pkg -src/main -tmp - diff --git a/vendor/github.com/go-toolsmith/astequal/LICENSE b/vendor/github.com/go-toolsmith/astequal/LICENSE deleted file mode 100644 index 717f894f5..000000000 --- a/vendor/github.com/go-toolsmith/astequal/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2017 Iskander Sharipov / Quasilyte - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/go-toolsmith/astequal/README.md b/vendor/github.com/go-toolsmith/astequal/README.md deleted file mode 100644 index db5e3a8c9..000000000 --- a/vendor/github.com/go-toolsmith/astequal/README.md +++ /dev/null @@ -1,82 +0,0 @@ -# astequal - -[![build-img]][build-url] -[![pkg-img]][pkg-url] -[![reportcard-img]][reportcard-url] -[![version-img]][version-url] - -Package `astequal` provides AST (deep) equallity check operations. - -## Installation: - -Go version 1.16+ - -```bash -go get github.com/go-toolsmith/astequal -``` - -## Example - -```go -package main - -import ( - "fmt" - "go/ast" - "go/parser" - "go/token" - "log" - "reflect" - - "github.com/go-toolsmith/astequal" -) - -func main() { - const code = ` - package foo - - func main() { - x := []int{1, 2, 3} - x := []int{1, 2, 3} - }` - - fset := token.NewFileSet() - pkg, err := parser.ParseFile(fset, "string", code, 0) - if err != nil { - log.Fatalf("parse error: %+v", err) - } - - fn := pkg.Decls[0].(*ast.FuncDecl) - x := fn.Body.List[0] - y := fn.Body.List[1] - - // Reflect DeepEqual will fail due to different Pos values. - // astequal only checks whether two nodes describe AST. - fmt.Println(reflect.DeepEqual(x, y)) // => false - fmt.Println(astequal.Node(x, y)) // => true - fmt.Println(astequal.Stmt(x, y)) // => true -} -``` - -## Performance - -`astequal` outperforms reflection-based comparison by a big margin: - -``` -BenchmarkEqualExpr/astequal.Expr-8 5000000 298 ns/op 0 B/op 0 allocs/op -BenchmarkEqualExpr/astequal.Node-8 3000000 409 ns/op 0 B/op 0 allocs/op -BenchmarkEqualExpr/reflect.DeepEqual-8 50000 38898 ns/op 10185 B/op 156 allocs/op -``` - -## License - -[MIT License](LICENSE). - -[build-img]: https://github.com/go-toolsmith/astequal/workflows/build/badge.svg -[build-url]: https://github.com/go-toolsmith/astequal/actions -[pkg-img]: https://pkg.go.dev/badge/go-toolsmith/astequal -[pkg-url]: https://pkg.go.dev/github.com/go-toolsmith/astequal -[reportcard-img]: https://goreportcard.com/badge/go-toolsmith/astequal -[reportcard-url]: https://goreportcard.com/report/go-toolsmith/astequal -[version-img]: https://img.shields.io/github/v/release/go-toolsmith/astequal -[version-url]: https://github.com/go-toolsmith/astequal/releases diff --git a/vendor/github.com/go-toolsmith/astequal/astequal.go b/vendor/github.com/go-toolsmith/astequal/astequal.go deleted file mode 100644 index d1a04e942..000000000 --- a/vendor/github.com/go-toolsmith/astequal/astequal.go +++ /dev/null @@ -1,771 +0,0 @@ -// Package astequal provides AST (deep) equallity check operations. -package astequal - -import ( - "go/ast" - "go/token" -) - -// Node reports whether two AST nodes are structurally (deep) equal. -// -// Nil arguments are permitted: true is returned if x and y are both nils. -// -// See also: Expr, Stmt, Decl functions. -func Node(x, y ast.Node) bool { - return astNodeEq(x, y) -} - -// Expr reports whether two AST expressions are structurally (deep) equal. -// -// Nil arguments are permitted: true is returned if x and y are both nils. -// ast.BadExpr comparison always yields false. -func Expr(x, y ast.Expr) bool { - return astExprEq(x, y) -} - -// Stmt reports whether two AST statements are structurally (deep) equal. -// -// Nil arguments are permitted: true is returned if x and y are both nils. -// ast.BadStmt comparison always yields false. -func Stmt(x, y ast.Stmt) bool { - return astStmtEq(x, y) -} - -// Decl reports whether two AST declarations are structurally (deep) equal. -// -// Nil arguments are permitted: true is returned if x and y are both nils. -// ast.BadDecl comparison always yields false. -func Decl(x, y ast.Decl) bool { - return astDeclEq(x, y) -} - -// Functions to perform deep equallity checks between arbitrary AST nodes. - -// Compare interface node types. -// -// Interfaces, as well as their values, can be nil. -// -// Even if AST does expect field X to be mandatory, -// nil checks are required as nodes can be constructed -// manually, or be partially invalid/incomplete. - -func astNodeEq(x, y ast.Node) bool { - switch x := x.(type) { - case ast.Expr: - y, ok := y.(ast.Expr) - return ok && astExprEq(x, y) - case ast.Stmt: - y, ok := y.(ast.Stmt) - return ok && astStmtEq(x, y) - case ast.Decl: - y, ok := y.(ast.Decl) - return ok && astDeclEq(x, y) - - case *ast.Field: - y, ok := y.(*ast.Field) - return ok && astFieldEq(x, y) - case *ast.FieldList: - y, ok := y.(*ast.FieldList) - return ok && astFieldListEq(x, y) - - default: - return false - } -} - -func astExprEq(x, y ast.Expr) bool { - if x == nil || y == nil { - return x == y - } - - switch x := x.(type) { - case *ast.Ident: - y, ok := y.(*ast.Ident) - return ok && astIdentEq(x, y) - - case *ast.BasicLit: - y, ok := y.(*ast.BasicLit) - return ok && astBasicLitEq(x, y) - - case *ast.FuncLit: - y, ok := y.(*ast.FuncLit) - return ok && astFuncLitEq(x, y) - - case *ast.CompositeLit: - y, ok := y.(*ast.CompositeLit) - return ok && astCompositeLitEq(x, y) - - case *ast.ParenExpr: - y, ok := y.(*ast.ParenExpr) - return ok && astParenExprEq(x, y) - - case *ast.SelectorExpr: - y, ok := y.(*ast.SelectorExpr) - return ok && astSelectorExprEq(x, y) - - case *ast.IndexExpr: - y, ok := y.(*ast.IndexExpr) - return ok && astIndexExprEq(x, y) - - case *ast.IndexListExpr: - y, ok := y.(*ast.IndexListExpr) - return ok && astIndexListExprEq(x, y) - - case *ast.SliceExpr: - y, ok := y.(*ast.SliceExpr) - return ok && astSliceExprEq(x, y) - - case *ast.TypeAssertExpr: - y, ok := y.(*ast.TypeAssertExpr) - return ok && astTypeAssertExprEq(x, y) - - case *ast.CallExpr: - y, ok := y.(*ast.CallExpr) - return ok && astCallExprEq(x, y) - - case *ast.StarExpr: - y, ok := y.(*ast.StarExpr) - return ok && astStarExprEq(x, y) - - case *ast.UnaryExpr: - y, ok := y.(*ast.UnaryExpr) - return ok && astUnaryExprEq(x, y) - - case *ast.BinaryExpr: - y, ok := y.(*ast.BinaryExpr) - return ok && astBinaryExprEq(x, y) - - case *ast.KeyValueExpr: - y, ok := y.(*ast.KeyValueExpr) - return ok && astKeyValueExprEq(x, y) - - case *ast.ArrayType: - y, ok := y.(*ast.ArrayType) - return ok && astArrayTypeEq(x, y) - - case *ast.StructType: - y, ok := y.(*ast.StructType) - return ok && astStructTypeEq(x, y) - - case *ast.FuncType: - y, ok := y.(*ast.FuncType) - return ok && astFuncTypeEq(x, y) - - case *ast.InterfaceType: - y, ok := y.(*ast.InterfaceType) - return ok && astInterfaceTypeEq(x, y) - - case *ast.MapType: - y, ok := y.(*ast.MapType) - return ok && astMapTypeEq(x, y) - - case *ast.ChanType: - y, ok := y.(*ast.ChanType) - return ok && astChanTypeEq(x, y) - - case *ast.Ellipsis: - y, ok := y.(*ast.Ellipsis) - return ok && astEllipsisEq(x, y) - - default: - return false - } -} - -func astStmtEq(x, y ast.Stmt) bool { - if x == nil || y == nil { - return x == y - } - - switch x := x.(type) { - case *ast.ExprStmt: - y, ok := y.(*ast.ExprStmt) - return ok && astExprStmtEq(x, y) - - case *ast.SendStmt: - y, ok := y.(*ast.SendStmt) - return ok && astSendStmtEq(x, y) - - case *ast.IncDecStmt: - y, ok := y.(*ast.IncDecStmt) - return ok && astIncDecStmtEq(x, y) - - case *ast.AssignStmt: - y, ok := y.(*ast.AssignStmt) - return ok && astAssignStmtEq(x, y) - - case *ast.GoStmt: - y, ok := y.(*ast.GoStmt) - return ok && astGoStmtEq(x, y) - - case *ast.DeferStmt: - y, ok := y.(*ast.DeferStmt) - return ok && astDeferStmtEq(x, y) - - case *ast.ReturnStmt: - y, ok := y.(*ast.ReturnStmt) - return ok && astReturnStmtEq(x, y) - - case *ast.BranchStmt: - y, ok := y.(*ast.BranchStmt) - return ok && astBranchStmtEq(x, y) - - case *ast.BlockStmt: - y, ok := y.(*ast.BlockStmt) - return ok && astBlockStmtEq(x, y) - - case *ast.IfStmt: - y, ok := y.(*ast.IfStmt) - return ok && astIfStmtEq(x, y) - - case *ast.CaseClause: - y, ok := y.(*ast.CaseClause) - return ok && astCaseClauseEq(x, y) - - case *ast.SwitchStmt: - y, ok := y.(*ast.SwitchStmt) - return ok && astSwitchStmtEq(x, y) - - case *ast.TypeSwitchStmt: - y, ok := y.(*ast.TypeSwitchStmt) - return ok && astTypeSwitchStmtEq(x, y) - - case *ast.CommClause: - y, ok := y.(*ast.CommClause) - return ok && astCommClauseEq(x, y) - - case *ast.SelectStmt: - y, ok := y.(*ast.SelectStmt) - return ok && astSelectStmtEq(x, y) - - case *ast.ForStmt: - y, ok := y.(*ast.ForStmt) - return ok && astForStmtEq(x, y) - - case *ast.RangeStmt: - y, ok := y.(*ast.RangeStmt) - return ok && astRangeStmtEq(x, y) - - case *ast.DeclStmt: - y, ok := y.(*ast.DeclStmt) - return ok && astDeclStmtEq(x, y) - - case *ast.LabeledStmt: - y, ok := y.(*ast.LabeledStmt) - return ok && astLabeledStmtEq(x, y) - - case *ast.EmptyStmt: - y, ok := y.(*ast.EmptyStmt) - return ok && astEmptyStmtEq(x, y) - - default: - return false - } -} - -func astDeclEq(x, y ast.Decl) bool { - if x == nil || y == nil { - return x == y - } - - switch x := x.(type) { - case *ast.GenDecl: - y, ok := y.(*ast.GenDecl) - return ok && astGenDeclEq(x, y) - - case *ast.FuncDecl: - y, ok := y.(*ast.FuncDecl) - return ok && astFuncDeclEq(x, y) - - default: - return false - } -} - -// Compare concrete nodes for equallity. -// -// Any node of pointer type permitted to be nil, -// hence nil checks are mandatory. - -func astIdentEq(x, y *ast.Ident) bool { - if x == nil || y == nil { - return x == y - } - return x.Name == y.Name -} - -func astKeyValueExprEq(x, y *ast.KeyValueExpr) bool { - if x == nil || y == nil { - return x == y - } - return astExprEq(x.Key, y.Key) && astExprEq(x.Value, y.Value) -} - -func astArrayTypeEq(x, y *ast.ArrayType) bool { - if x == nil || y == nil { - return x == y - } - return astExprEq(x.Len, y.Len) && astExprEq(x.Elt, y.Elt) -} - -func astStructTypeEq(x, y *ast.StructType) bool { - if x == nil || y == nil { - return x == y - } - return astFieldListEq(x.Fields, y.Fields) -} - -func astFuncTypeEq(x, y *ast.FuncType) bool { - if x == nil || y == nil { - return x == y - } - return astFieldListEq(x.Params, y.Params) && - astFieldListEq(x.Results, y.Results) && - astFieldListEq(forFuncType(x), forFuncType(y)) -} - -func astBasicLitEq(x, y *ast.BasicLit) bool { - if x == nil || y == nil { - return x == y - } - return x.Kind == y.Kind && x.Value == y.Value -} - -func astBlockStmtEq(x, y *ast.BlockStmt) bool { - if x == nil || y == nil { - return x == y - } - return astStmtSliceEq(x.List, y.List) -} - -func astFieldEq(x, y *ast.Field) bool { - if x == nil || y == nil { - return x == y - } - return astIdentSliceEq(x.Names, y.Names) && - astExprEq(x.Type, y.Type) -} - -func astFuncLitEq(x, y *ast.FuncLit) bool { - if x == nil || y == nil { - return x == y - } - return astFuncTypeEq(x.Type, y.Type) && - astBlockStmtEq(x.Body, y.Body) -} - -func astCompositeLitEq(x, y *ast.CompositeLit) bool { - if x == nil || y == nil { - return x == y - } - return astExprEq(x.Type, y.Type) && - astExprSliceEq(x.Elts, y.Elts) -} - -func astSelectorExprEq(x, y *ast.SelectorExpr) bool { - if x == nil || y == nil { - return x == y - } - return astExprEq(x.X, y.X) && astIdentEq(x.Sel, y.Sel) -} - -func astIndexExprEq(x, y *ast.IndexExpr) bool { - if x == nil || y == nil { - return x == y - } - return astExprEq(x.X, y.X) && astExprEq(x.Index, y.Index) -} - -func astIndexListExprEq(x, y *ast.IndexListExpr) bool { - if x == nil || y == nil { - return x == y - } - return astExprEq(x.X, y.X) && astExprSliceEq(x.Indices, y.Indices) -} - -func astSliceExprEq(x, y *ast.SliceExpr) bool { - if x == nil || y == nil { - return x == y - } - return astExprEq(x.X, y.X) && - astExprEq(x.Low, y.Low) && - astExprEq(x.High, y.High) && - astExprEq(x.Max, y.Max) -} - -func astTypeAssertExprEq(x, y *ast.TypeAssertExpr) bool { - if x == nil || y == nil { - return x == y - } - return astExprEq(x.X, y.X) && astExprEq(x.Type, y.Type) -} - -func astInterfaceTypeEq(x, y *ast.InterfaceType) bool { - if x == nil || y == nil { - return x == y - } - return astFieldListEq(x.Methods, y.Methods) -} - -func astMapTypeEq(x, y *ast.MapType) bool { - if x == nil || y == nil { - return x == y - } - return astExprEq(x.Key, y.Key) && astExprEq(x.Value, y.Value) -} - -func astChanTypeEq(x, y *ast.ChanType) bool { - if x == nil || y == nil { - return x == y - } - return x.Dir == y.Dir && astExprEq(x.Value, y.Value) -} - -func astCallExprEq(x, y *ast.CallExpr) bool { - if x == nil || y == nil { - return x == y - } - return astExprEq(x.Fun, y.Fun) && - astExprSliceEq(x.Args, y.Args) && - (x.Ellipsis == 0) == (y.Ellipsis == 0) -} - -func astEllipsisEq(x, y *ast.Ellipsis) bool { - if x == nil || y == nil { - return x == y - } - return astExprEq(x.Elt, y.Elt) -} - -func astUnaryExprEq(x, y *ast.UnaryExpr) bool { - if x == nil || y == nil { - return x == y - } - return x.Op == y.Op && astExprEq(x.X, y.X) -} - -func astBinaryExprEq(x, y *ast.BinaryExpr) bool { - if x == nil || y == nil { - return x == y - } - return x.Op == y.Op && - astExprEq(x.X, y.X) && - astExprEq(x.Y, y.Y) -} - -func astParenExprEq(x, y *ast.ParenExpr) bool { - if x == nil || y == nil { - return x == y - } - return astExprEq(x.X, y.X) -} - -func astStarExprEq(x, y *ast.StarExpr) bool { - if x == nil || y == nil { - return x == y - } - return astExprEq(x.X, y.X) -} - -func astFieldListEq(x, y *ast.FieldList) bool { - if x == nil || y == nil { - return x == y - } - return astFieldSliceEq(x.List, y.List) -} - -func astEmptyStmtEq(x, y *ast.EmptyStmt) bool { - if x == nil || y == nil { - return x == y - } - return x.Implicit == y.Implicit -} - -func astLabeledStmtEq(x, y *ast.LabeledStmt) bool { - if x == nil || y == nil { - return x == y - } - return astIdentEq(x.Label, y.Label) && astStmtEq(x.Stmt, y.Stmt) -} - -func astExprStmtEq(x, y *ast.ExprStmt) bool { - if x == nil || y == nil { - return x == y - } - return astExprEq(x.X, y.X) -} - -func astSendStmtEq(x, y *ast.SendStmt) bool { - if x == nil || y == nil { - return x == y - } - return astExprEq(x.Chan, y.Chan) && astExprEq(x.Value, y.Value) -} - -func astDeclStmtEq(x, y *ast.DeclStmt) bool { - if x == nil || y == nil { - return x == y - } - return astDeclEq(x.Decl, y.Decl) -} - -func astIncDecStmtEq(x, y *ast.IncDecStmt) bool { - if x == nil || y == nil { - return x == y - } - return x.Tok == y.Tok && astExprEq(x.X, y.X) -} - -func astAssignStmtEq(x, y *ast.AssignStmt) bool { - if x == nil || y == nil { - return x == y - } - return x.Tok == y.Tok && - astExprSliceEq(x.Lhs, y.Lhs) && - astExprSliceEq(x.Rhs, y.Rhs) -} - -func astGoStmtEq(x, y *ast.GoStmt) bool { - if x == nil || y == nil { - return x == y - } - return astCallExprEq(x.Call, y.Call) -} - -func astDeferStmtEq(x, y *ast.DeferStmt) bool { - if x == nil || y == nil { - return x == y - } - return astCallExprEq(x.Call, y.Call) -} - -func astReturnStmtEq(x, y *ast.ReturnStmt) bool { - if x == nil || y == nil { - return x == y - } - return astExprSliceEq(x.Results, y.Results) -} - -func astBranchStmtEq(x, y *ast.BranchStmt) bool { - if x == nil || y == nil { - return x == y - } - return x.Tok == y.Tok && astIdentEq(x.Label, y.Label) -} - -func astIfStmtEq(x, y *ast.IfStmt) bool { - if x == nil || y == nil { - return x == y - } - return astStmtEq(x.Init, y.Init) && - astExprEq(x.Cond, y.Cond) && - astBlockStmtEq(x.Body, y.Body) && - astStmtEq(x.Else, y.Else) -} - -func astCaseClauseEq(x, y *ast.CaseClause) bool { - if x == nil || y == nil { - return x == y - } - return astExprSliceEq(x.List, y.List) && - astStmtSliceEq(x.Body, y.Body) -} - -func astSwitchStmtEq(x, y *ast.SwitchStmt) bool { - if x == nil || y == nil { - return x == y - } - return astStmtEq(x.Init, y.Init) && - astExprEq(x.Tag, y.Tag) && - astBlockStmtEq(x.Body, y.Body) -} - -func astTypeSwitchStmtEq(x, y *ast.TypeSwitchStmt) bool { - if x == nil || y == nil { - return x == y - } - return astStmtEq(x.Init, y.Init) && - astStmtEq(x.Assign, y.Assign) && - astBlockStmtEq(x.Body, y.Body) -} - -func astCommClauseEq(x, y *ast.CommClause) bool { - if x == nil || y == nil { - return x == y - } - return astStmtEq(x.Comm, y.Comm) && astStmtSliceEq(x.Body, y.Body) -} - -func astSelectStmtEq(x, y *ast.SelectStmt) bool { - if x == nil || y == nil { - return x == y - } - return astBlockStmtEq(x.Body, y.Body) -} - -func astForStmtEq(x, y *ast.ForStmt) bool { - if x == nil || y == nil { - return x == y - } - return astStmtEq(x.Init, y.Init) && - astExprEq(x.Cond, y.Cond) && - astStmtEq(x.Post, y.Post) && - astBlockStmtEq(x.Body, y.Body) -} - -func astRangeStmtEq(x, y *ast.RangeStmt) bool { - if x == nil || y == nil { - return x == y - } - return x.Tok == y.Tok && - astExprEq(x.Key, y.Key) && - astExprEq(x.Value, y.Value) && - astExprEq(x.X, y.X) && - astBlockStmtEq(x.Body, y.Body) -} - -func astFuncDeclEq(x, y *ast.FuncDecl) bool { - if x == nil || y == nil { - return x == y - } - return astFieldListEq(x.Recv, y.Recv) && - astIdentEq(x.Name, y.Name) && - astFuncTypeEq(x.Type, y.Type) && - astBlockStmtEq(x.Body, y.Body) -} - -func astGenDeclEq(x, y *ast.GenDecl) bool { - if x == nil || y == nil { - return x == y - } - - if x.Tok != y.Tok { - return false - } - if len(x.Specs) != len(y.Specs) { - return false - } - - switch x.Tok { - case token.IMPORT: - for i := range x.Specs { - xspec := x.Specs[i].(*ast.ImportSpec) - yspec := y.Specs[i].(*ast.ImportSpec) - if !astImportSpecEq(xspec, yspec) { - return false - } - } - case token.TYPE: - for i := range x.Specs { - xspec := x.Specs[i].(*ast.TypeSpec) - yspec := y.Specs[i].(*ast.TypeSpec) - if !astTypeSpecEq(xspec, yspec) { - return false - } - } - default: - for i := range x.Specs { - xspec := x.Specs[i].(*ast.ValueSpec) - yspec := y.Specs[i].(*ast.ValueSpec) - if !astValueSpecEq(xspec, yspec) { - return false - } - } - } - - return true -} - -func astImportSpecEq(x, y *ast.ImportSpec) bool { - if x == nil || y == nil { - return x == y - } - return astIdentEq(x.Name, y.Name) && astBasicLitEq(x.Path, y.Path) -} - -func astTypeSpecEq(x, y *ast.TypeSpec) bool { - if x == nil || y == nil { - return x == y - } - return astIdentEq(x.Name, y.Name) && astExprEq(x.Type, y.Type) && - astFieldListEq(forTypeSpec(x), forTypeSpec(y)) -} - -func astValueSpecEq(x, y *ast.ValueSpec) bool { - if x == nil || y == nil { - return x == y - } - return astIdentSliceEq(x.Names, y.Names) && - astExprEq(x.Type, y.Type) && - astExprSliceEq(x.Values, y.Values) -} - -// Compare slices for equallity. -// -// Each slice element that has pointer type permitted to be nil, -// hence instead of using adhoc comparison of values, -// equallity functions that are defined above are used. - -func astIdentSliceEq(xs, ys []*ast.Ident) bool { - if len(xs) != len(ys) { - return false - } - for i := range xs { - if !astIdentEq(xs[i], ys[i]) { - return false - } - } - return true -} - -func astFieldSliceEq(xs, ys []*ast.Field) bool { - if len(xs) != len(ys) { - return false - } - for i := range xs { - if !astFieldEq(xs[i], ys[i]) { - return false - } - } - return true -} - -func astStmtSliceEq(xs, ys []ast.Stmt) bool { - if len(xs) != len(ys) { - return false - } - for i := range xs { - if !astStmtEq(xs[i], ys[i]) { - return false - } - } - return true -} - -func astExprSliceEq(xs, ys []ast.Expr) bool { - if len(xs) != len(ys) { - return false - } - for i := range xs { - if !astExprEq(xs[i], ys[i]) { - return false - } - } - return true -} - -// forTypeSpec returns n.TypeParams. -func forTypeSpec(n *ast.TypeSpec) *ast.FieldList { - if n == nil { - return nil - } - return n.TypeParams -} - -// forFuncType returns n.TypeParams. -func forFuncType(n *ast.FuncType) *ast.FieldList { - if n == nil { - return nil - } - return n.TypeParams -} diff --git a/vendor/github.com/go-toolsmith/astequal/diff.go b/vendor/github.com/go-toolsmith/astequal/diff.go deleted file mode 100644 index cd69b4525..000000000 --- a/vendor/github.com/go-toolsmith/astequal/diff.go +++ /dev/null @@ -1,23 +0,0 @@ -package astequal - -import ( - "bytes" - "go/ast" - "go/format" - "go/token" - - "github.com/google/go-cmp/cmp" -) - -func Diff(x, y ast.Node) string { - var buf bytes.Buffer - format.Node(&buf, token.NewFileSet(), x) - s1 := buf.String() - - buf.Reset() - format.Node(&buf, token.NewFileSet(), y) - s2 := buf.String() - - // TODO(cristaloleg): replace with a more lightweight diff impl. - return cmp.Diff(s1, s2) -} diff --git a/vendor/github.com/go-toolsmith/astfmt/LICENSE b/vendor/github.com/go-toolsmith/astfmt/LICENSE deleted file mode 100644 index eef17180f..000000000 --- a/vendor/github.com/go-toolsmith/astfmt/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2018 go-toolsmith - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/go-toolsmith/astfmt/README.md b/vendor/github.com/go-toolsmith/astfmt/README.md deleted file mode 100644 index 00f790fd2..000000000 --- a/vendor/github.com/go-toolsmith/astfmt/README.md +++ /dev/null @@ -1,55 +0,0 @@ -# astfmt - -[![build-img]][build-url] -[![pkg-img]][pkg-url] -[![reportcard-img]][reportcard-url] -[![version-img]][version-url] - -Package `astfmt` implements ast.Node formatting with fmt-like API. - -## Installation - -Go version 1.16+ - -```bash -go get github.com/go-toolsmith/astfmt -``` - -## Example - -```go -package main - -import ( - "go/token" - "os" - - "github.com/go-toolsmith/astfmt" - "github.com/go-toolsmith/strparse" -) - -func Example() { - x := strparse.Expr(`foo(bar(baz(1+2)))`) - // astfmt functions add %s support for ast.Node arguments. - astfmt.Println(x) // => foo(bar(baz(1 + 2))) - astfmt.Fprintf(os.Stdout, "node=%s\n", x) // => node=foo(bar(baz(1 + 2))) - - // Can use specific file set with printer. - fset := token.NewFileSet() // Suppose this fset is used when parsing - pp := astfmt.NewPrinter(fset) - pp.Println(x) // => foo(bar(baz(1 + 2))) -} -``` - -## License - -[MIT License](LICENSE). - -[build-img]: https://github.com/go-toolsmith/astfmt/workflows/build/badge.svg -[build-url]: https://github.com/go-toolsmith/astfmt/actions -[pkg-img]: https://pkg.go.dev/badge/go-toolsmith/astfmt -[pkg-url]: https://pkg.go.dev/github.com/go-toolsmith/astfmt -[reportcard-img]: https://goreportcard.com/badge/go-toolsmith/astfmt -[reportcard-url]: https://goreportcard.com/report/go-toolsmith/astfmt -[version-img]: https://img.shields.io/github/v/release/go-toolsmith/astfmt -[version-url]: https://github.com/go-toolsmith/astfmt/releases diff --git a/vendor/github.com/go-toolsmith/astfmt/astfmt.go b/vendor/github.com/go-toolsmith/astfmt/astfmt.go deleted file mode 100644 index ca993e033..000000000 --- a/vendor/github.com/go-toolsmith/astfmt/astfmt.go +++ /dev/null @@ -1,111 +0,0 @@ -// Package astfmt implements `ast.Node` formatting with fmt-like API. -package astfmt - -import ( - "bytes" - "fmt" - "go/ast" - "go/printer" - "go/token" - "io" -) - -// Println calls fmt.Println with additional support of %s format -// for ast.Node arguments. -// -// Uses empty file set for AST printing. -func Println(args ...interface{}) error { - return defaultPrinter.Println(args...) -} - -// Fprintf calls fmt.Fprintf with additional support of %s format -// for ast.Node arguments. -// -// Uses empty file set for AST printing. -func Fprintf(w io.Writer, format string, args ...interface{}) error { - return defaultPrinter.Fprintf(w, format, args...) -} - -// Sprintf calls fmt.Sprintf with additional support of %s format -// for ast.Node arguments. -// -// Uses empty file set for AST printing. -func Sprintf(format string, args ...interface{}) string { - return defaultPrinter.Sprintf(format, args...) -} - -// Sprint calls fmt.Sprint with additional support of %s format -// for ast.Node arguments. -// -// Uses empty file set for AST printing. -func Sprint(args ...interface{}) string { - return defaultPrinter.Sprint(args...) -} - -// NewPrinter returns printer that uses bound file set when printing AST nodes. -func NewPrinter(fset *token.FileSet) *Printer { - return &Printer{fset: fset} -} - -// Printer provides API close to fmt package for printing AST nodes. -// Unlike freestanding functions from this package, it makes it possible -// to associate appropriate file set for better output. -type Printer struct { - fset *token.FileSet -} - -// Println printer method is like Println function, but uses bound file set when printing. -func (p *Printer) Println(args ...interface{}) error { - _, err := fmt.Println(wrapArgs(p.fset, args)...) - return err -} - -// Fprintf printer method is like Fprintf function, but uses bound file set when printing. -func (p *Printer) Fprintf(w io.Writer, format string, args ...interface{}) error { - _, err := fmt.Fprintf(w, format, wrapArgs(p.fset, args)...) - return err -} - -// Sprintf printer method is like Sprintf function, but uses bound file set when printing. -func (p *Printer) Sprintf(format string, args ...interface{}) string { - return fmt.Sprintf(format, wrapArgs(p.fset, args)...) -} - -// Sprint printer method is like Sprint function, but uses bound file set when printing. -func (p *Printer) Sprint(args ...interface{}) string { - return fmt.Sprint(wrapArgs(p.fset, args)...) -} - -// defaultPrinter is used in printing functions like Println. -// Uses empty file set. -var defaultPrinter = NewPrinter(token.NewFileSet()) - -// wrapArgs returns arguments slice with every ast.Node element -// replaced with fmtNode wrapper that supports additional formatting. -func wrapArgs(fset *token.FileSet, args []interface{}) []interface{} { - for i := range args { - if x, ok := args[i].(ast.Node); ok { - args[i] = fmtNode{fset: fset, node: x} - } - } - return args -} - -type fmtNode struct { - fset *token.FileSet - node ast.Node -} - -func (n fmtNode) String() string { - var buf bytes.Buffer - if err := printer.Fprint(&buf, n.fset, n.node); err != nil { - return fmt.Sprintf("%%!s(ast.Node=%s)", err) - } - return buf.String() -} - -func (n fmtNode) GoString() string { - var buf bytes.Buffer - fmt.Fprintf(&buf, "%#v", n.node) - return buf.String() -} diff --git a/vendor/github.com/go-toolsmith/astp/LICENSE b/vendor/github.com/go-toolsmith/astp/LICENSE deleted file mode 100644 index eef17180f..000000000 --- a/vendor/github.com/go-toolsmith/astp/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2018 go-toolsmith - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/go-toolsmith/astp/README.md b/vendor/github.com/go-toolsmith/astp/README.md deleted file mode 100644 index cf5197e81..000000000 --- a/vendor/github.com/go-toolsmith/astp/README.md +++ /dev/null @@ -1,54 +0,0 @@ -# astp - -[![build-img]][build-url] -[![pkg-img]][pkg-url] -[![reportcard-img]][reportcard-url] -[![version-img]][version-url] - -Package `astp` provides AST predicates. - -## Installation: - -Go version 1.16+ - -```bash -go get github.com/go-toolsmith/astp -``` - -## Example - -```go -package main - -import ( - "fmt" - - "github.com/go-toolsmith/astp" - "github.com/go-toolsmith/strparse" -) - -func main() { - if astp.IsIdent(strparse.Expr(`x`)) { - fmt.Println("ident") - } - if astp.IsBlockStmt(strparse.Stmt(`{f()}`)) { - fmt.Println("block stmt") - } - if astp.IsGenDecl(strparse.Decl(`var x int = 10`)) { - fmt.Println("gen decl") - } -} -``` - -## License - -[MIT License](LICENSE). - -[build-img]: https://github.com/go-toolsmith/astp/workflows/build/badge.svg -[build-url]: https://github.com/go-toolsmith/astp/actions -[pkg-img]: https://pkg.go.dev/badge/go-toolsmith/astp -[pkg-url]: https://pkg.go.dev/github.com/go-toolsmith/astp -[reportcard-img]: https://goreportcard.com/badge/go-toolsmith/astp -[reportcard-url]: https://goreportcard.com/report/go-toolsmith/astp -[version-img]: https://img.shields.io/github/v/release/go-toolsmith/astp -[version-url]: https://github.com/go-toolsmith/astp/releases diff --git a/vendor/github.com/go-toolsmith/astp/decl.go b/vendor/github.com/go-toolsmith/astp/decl.go deleted file mode 100644 index 4654ad95c..000000000 --- a/vendor/github.com/go-toolsmith/astp/decl.go +++ /dev/null @@ -1,39 +0,0 @@ -package astp - -import "go/ast" - -// IsDecl reports whether a node is a ast.Decl. -func IsDecl(node ast.Node) bool { - _, ok := node.(ast.Decl) - return ok -} - -// IsFuncDecl reports whether a given ast.Node is a function declaration (*ast.FuncDecl). -func IsFuncDecl(node ast.Node) bool { - _, ok := node.(*ast.FuncDecl) - return ok -} - -// IsGenDecl reports whether a given ast.Node is a generic declaration (*ast.GenDecl). -func IsGenDecl(node ast.Node) bool { - _, ok := node.(*ast.GenDecl) - return ok -} - -// IsImportSpec reports whether a given ast.Node is an import declaration (*ast.ImportSpec). -func IsImportSpec(node ast.Node) bool { - _, ok := node.(*ast.ImportSpec) - return ok -} - -// IsValueSpec reports whether a given ast.Node is a value declaration (*ast.ValueSpec). -func IsValueSpec(node ast.Node) bool { - _, ok := node.(*ast.ValueSpec) - return ok -} - -// IsTypeSpec reports whether a given ast.Node is a type declaration (*ast.TypeSpec). -func IsTypeSpec(node ast.Node) bool { - _, ok := node.(*ast.TypeSpec) - return ok -} diff --git a/vendor/github.com/go-toolsmith/astp/expr.go b/vendor/github.com/go-toolsmith/astp/expr.go deleted file mode 100644 index adf9668ce..000000000 --- a/vendor/github.com/go-toolsmith/astp/expr.go +++ /dev/null @@ -1,141 +0,0 @@ -package astp - -import "go/ast" - -// IsExpr reports whether a given ast.Node is an expression(ast.Expr). -func IsExpr(node ast.Node) bool { - _, ok := node.(ast.Expr) - return ok -} - -// IsBadExpr reports whether a given ast.Node is a bad expression (*ast.IsBadExpr). -func IsBadExpr(node ast.Node) bool { - _, ok := node.(*ast.BadExpr) - return ok -} - -// IsIdent reports whether a given ast.Node is an identifier (*ast.IsIdent). -func IsIdent(node ast.Node) bool { - _, ok := node.(*ast.Ident) - return ok -} - -// IsEllipsis reports whether a given ast.Node is an `...` (ellipsis) (*ast.IsEllipsis). -func IsEllipsis(node ast.Node) bool { - _, ok := node.(*ast.Ellipsis) - return ok -} - -// IsBasicLit reports whether a given ast.Node is a literal of basic type (*ast.IsBasicLit). -func IsBasicLit(node ast.Node) bool { - _, ok := node.(*ast.BasicLit) - return ok -} - -// IsFuncLit reports whether a given ast.Node is a function literal (*ast.IsFuncLit). -func IsFuncLit(node ast.Node) bool { - _, ok := node.(*ast.FuncLit) - return ok -} - -// IsCompositeLit reports whether a given ast.Node is a composite literal (*ast.IsCompositeLit). -func IsCompositeLit(node ast.Node) bool { - _, ok := node.(*ast.CompositeLit) - return ok -} - -// IsParenExpr reports whether a given ast.Node is a parenthesized expression (*ast.IsParenExpr). -func IsParenExpr(node ast.Node) bool { - _, ok := node.(*ast.ParenExpr) - return ok -} - -// IsSelectorExpr reports whether a given ast.Node is a selector expression (*ast.IsSelectorExpr). -func IsSelectorExpr(node ast.Node) bool { - _, ok := node.(*ast.SelectorExpr) - return ok -} - -// IsIndexExpr reports whether a given ast.Node is an index expression (*ast.IsIndexExpr). -func IsIndexExpr(node ast.Node) bool { - _, ok := node.(*ast.IndexExpr) - return ok -} - -// IsSliceExpr reports whether a given ast.Node is a slice expression (*ast.IsSliceExpr). -func IsSliceExpr(node ast.Node) bool { - _, ok := node.(*ast.SliceExpr) - return ok -} - -// IsTypeAssertExpr reports whether a given ast.Node is a type assert expression (*ast.IsTypeAssertExpr). -func IsTypeAssertExpr(node ast.Node) bool { - _, ok := node.(*ast.TypeAssertExpr) - return ok -} - -// IsCallExpr reports whether a given ast.Node is an expression followed by an argument list (*ast.IsCallExpr). -func IsCallExpr(node ast.Node) bool { - _, ok := node.(*ast.CallExpr) - return ok -} - -// IsStarExpr reports whether a given ast.Node is a star expression(unary "*" or apointer) (*ast.IsStarExpr) -func IsStarExpr(node ast.Node) bool { - _, ok := node.(*ast.StarExpr) - return ok -} - -// IsUnaryExpr reports whether a given ast.Node is a unary expression (*ast.IsUnaryExpr). -func IsUnaryExpr(node ast.Node) bool { - _, ok := node.(*ast.UnaryExpr) - return ok -} - -// IsBinaryExpr reports whether a given ast.Node is a binary expression (*ast.IsBinaryExpr). -func IsBinaryExpr(node ast.Node) bool { - _, ok := node.(*ast.BinaryExpr) - return ok -} - -// IsKeyValueExpr reports whether a given ast.Node is a (key:value) pair (*ast.IsKeyValueExpr). -func IsKeyValueExpr(node ast.Node) bool { - _, ok := node.(*ast.KeyValueExpr) - return ok -} - -// IsArrayType reports whether a given ast.Node is an array or slice type (*ast.IsArrayType). -func IsArrayType(node ast.Node) bool { - _, ok := node.(*ast.ArrayType) - return ok -} - -// IsStructType reports whether a given ast.Node is a struct type (*ast.IsStructType). -func IsStructType(node ast.Node) bool { - _, ok := node.(*ast.StructType) - return ok -} - -// IsFuncType reports whether a given ast.Node is a function type (*ast.IsFuncType). -func IsFuncType(node ast.Node) bool { - _, ok := node.(*ast.FuncType) - return ok -} - -// IsInterfaceType reports whether a given ast.Node is an interface type (*ast.IsInterfaceType). -func IsInterfaceType(node ast.Node) bool { - _, ok := node.(*ast.InterfaceType) - return ok -} - -// IsMapType reports whether a given ast.Node is a map type (*ast.IsMapType). -func IsMapType(node ast.Node) bool { - _, ok := node.(*ast.MapType) - return ok -} - -// IsChanType reports whether a given ast.Node is a channel type (*ast.IsChanType). -func IsChanType(node ast.Node) bool { - _, ok := node.(*ast.ChanType) - return ok -} diff --git a/vendor/github.com/go-toolsmith/astp/stmt.go b/vendor/github.com/go-toolsmith/astp/stmt.go deleted file mode 100644 index 19645d212..000000000 --- a/vendor/github.com/go-toolsmith/astp/stmt.go +++ /dev/null @@ -1,135 +0,0 @@ -package astp - -import "go/ast" - -// IsStmt reports whether a given ast.Node is a statement(ast.Stmt). -func IsStmt(node ast.Node) bool { - _, ok := node.(ast.Stmt) - return ok -} - -// IsBadStmt reports whether a given ast.Node is a bad statement(*ast.BadStmt) -func IsBadStmt(node ast.Node) bool { - _, ok := node.(*ast.BadStmt) - return ok -} - -// IsDeclStmt reports whether a given ast.Node is a declaration statement(*ast.DeclStmt) -func IsDeclStmt(node ast.Node) bool { - _, ok := node.(*ast.DeclStmt) - return ok -} - -// IsEmptyStmt reports whether a given ast.Node is an empty statement(*ast.EmptyStmt) -func IsEmptyStmt(node ast.Node) bool { - _, ok := node.(*ast.EmptyStmt) - return ok -} - -// IsLabeledStmt reports whether a given ast.Node is a label statement(*ast.LabeledStmt) -func IsLabeledStmt(node ast.Node) bool { - _, ok := node.(*ast.LabeledStmt) - return ok -} - -// IsExprStmt reports whether a given ast.Node is an expression statement(*ast.ExprStmt) -func IsExprStmt(node ast.Node) bool { - _, ok := node.(*ast.ExprStmt) - return ok -} - -// IsSendStmt reports whether a given ast.Node is a send to chan statement(*ast.SendStmt) -func IsSendStmt(node ast.Node) bool { - _, ok := node.(*ast.SendStmt) - return ok -} - -// IsIncDecStmt reports whether a given ast.Node is a increment/decrement statement(*ast.IncDecStmt) -func IsIncDecStmt(node ast.Node) bool { - _, ok := node.(*ast.IncDecStmt) - return ok -} - -// IsAssignStmt reports whether a given ast.Node is an assignment statement(*ast.AssignStmt) -func IsAssignStmt(node ast.Node) bool { - _, ok := node.(*ast.AssignStmt) - return ok -} - -// IsGoStmt reports whether a given ast.Node is a go statement(*ast.GoStmt) -func IsGoStmt(node ast.Node) bool { - _, ok := node.(*ast.GoStmt) - return ok -} - -// IsDeferStmt reports whether a given ast.Node is a defer statement(*ast.DeferStmt) -func IsDeferStmt(node ast.Node) bool { - _, ok := node.(*ast.DeferStmt) - return ok -} - -// IsReturnStmt reports whether a given ast.Node is a return statement(*ast.ReturnStmt) -func IsReturnStmt(node ast.Node) bool { - _, ok := node.(*ast.ReturnStmt) - return ok -} - -// IsBranchStmt reports whether a given ast.Node is a branch(goto/continue/break/fallthrough)statement(*ast.BranchStmt) -func IsBranchStmt(node ast.Node) bool { - _, ok := node.(*ast.BranchStmt) - return ok -} - -// IsBlockStmt reports whether a given ast.Node is a block statement(*ast.BlockStmt) -func IsBlockStmt(node ast.Node) bool { - _, ok := node.(*ast.BlockStmt) - return ok -} - -// IsIfStmt reports whether a given ast.Node is an if statement(*ast.IfStmt) -func IsIfStmt(node ast.Node) bool { - _, ok := node.(*ast.IfStmt) - return ok -} - -// IsCaseClause reports whether a given ast.Node is a case statement(*ast.CaseClause) -func IsCaseClause(node ast.Node) bool { - _, ok := node.(*ast.CaseClause) - return ok -} - -// IsSwitchStmt reports whether a given ast.Node is a switch statement(*ast.SwitchStmt) -func IsSwitchStmt(node ast.Node) bool { - _, ok := node.(*ast.SwitchStmt) - return ok -} - -// IsTypeSwitchStmt reports whether a given ast.Node is a type switch statement(*ast.TypeSwitchStmt) -func IsTypeSwitchStmt(node ast.Node) bool { - _, ok := node.(*ast.TypeSwitchStmt) - return ok -} - -// IsCommClause reports whether a given ast.Node is a select statement(*ast.CommClause) -func IsCommClause(node ast.Node) bool { - _, ok := node.(*ast.CommClause) - return ok -} - -// IsSelectStmt reports whether a given ast.Node is a selection statement(*ast.SelectStmt) -func IsSelectStmt(node ast.Node) bool { - _, ok := node.(*ast.SelectStmt) - return ok -} - -// IsForStmt reports whether a given ast.Node is a for statement(*ast.ForStmt) -func IsForStmt(node ast.Node) bool { - _, ok := node.(*ast.ForStmt) - return ok -} - -// IsRangeStmt reports whether a given ast.Node is a range statement(*ast.RangeStmt) -func IsRangeStmt(node ast.Node) bool { - _, ok := node.(*ast.RangeStmt) - return ok -} diff --git a/vendor/github.com/go-toolsmith/strparse/LICENSE b/vendor/github.com/go-toolsmith/strparse/LICENSE deleted file mode 100644 index eef17180f..000000000 --- a/vendor/github.com/go-toolsmith/strparse/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2018 go-toolsmith - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/go-toolsmith/strparse/README.md b/vendor/github.com/go-toolsmith/strparse/README.md deleted file mode 100644 index ac04d516f..000000000 --- a/vendor/github.com/go-toolsmith/strparse/README.md +++ /dev/null @@ -1,48 +0,0 @@ -# strparse - -[![build-img]][build-url] -[![pkg-img]][pkg-url] -[![reportcard-img]][reportcard-url] -[![version-img]][version-url] - -Package `strparse` provides convenience wrappers around `go/parser` for simple -expression, statement and declaretion parsing from string. - -## Installation - -Go version 1.16+ - -```bash -go get github.com/go-toolsmith/strparse -``` - -## Example - -```go -package main - -import ( - "github.com/go-toolsmith/astequal" - "github.com/go-toolsmith/strparse" -) - -func main() { - // Comparing AST strings for equallity (note different spacing): - x := strparse.Expr(`1 + f(v[0].X)`) - y := strparse.Expr(` 1+f( v[0].X ) `) - fmt.Println(astequal.Expr(x, y)) // => true -} -``` - -## License - -[MIT License](LICENSE). - -[build-img]: https://github.com/go-toolsmith/strparse/workflows/build/badge.svg -[build-url]: https://github.com/go-toolsmith/strparse/actions -[pkg-img]: https://pkg.go.dev/badge/go-toolsmith/strparse -[pkg-url]: https://pkg.go.dev/github.com/go-toolsmith/strparse -[reportcard-img]: https://goreportcard.com/badge/go-toolsmith/strparse -[reportcard-url]: https://goreportcard.com/report/go-toolsmith/strparse -[version-img]: https://img.shields.io/github/v/release/go-toolsmith/strparse -[version-url]: https://github.com/go-toolsmith/strparse/releases diff --git a/vendor/github.com/go-toolsmith/strparse/strparse.go b/vendor/github.com/go-toolsmith/strparse/strparse.go deleted file mode 100644 index 894c7ebac..000000000 --- a/vendor/github.com/go-toolsmith/strparse/strparse.go +++ /dev/null @@ -1,59 +0,0 @@ -// Package strparse provides convenience wrappers around `go/parser` for simple -// expression, statement and declaration parsing from string. -// -// Can be used to construct AST nodes using source syntax. -package strparse - -import ( - "go/ast" - "go/parser" - "go/token" -) - -var ( - // BadExpr is returned as a parse result for malformed expressions. - // Should be treated as constant or readonly variable. - BadExpr = &ast.BadExpr{} - - // BadStmt is returned as a parse result for malformed statmenents. - // Should be treated as constant or readonly variable. - BadStmt = &ast.BadStmt{} - - // BadDecl is returned as a parse result for malformed declarations. - // Should be treated as constant or readonly variable. - BadDecl = &ast.BadDecl{} -) - -// Expr parses single expression node from s. -// In case of parse error, BadExpr is returned. -func Expr(s string) ast.Expr { - node, err := parser.ParseExpr(s) - if err != nil { - return BadExpr - } - return node -} - -// Stmt parses single statement node from s. -// In case of parse error, BadStmt is returned. -func Stmt(s string) ast.Stmt { - node, err := parser.ParseFile(token.NewFileSet(), "", "package main;func main() {"+s+"}", 0) - if err != nil { - return BadStmt - } - fn := node.Decls[0].(*ast.FuncDecl) - if len(fn.Body.List) != 1 { - return BadStmt - } - return fn.Body.List[0] -} - -// Decl parses single declaration node from s. -// In case of parse error, BadDecl is returned. -func Decl(s string) ast.Decl { - node, err := parser.ParseFile(token.NewFileSet(), "", "package main;"+s, 0) - if err != nil || len(node.Decls) != 1 { - return BadDecl - } - return node.Decls[0] -} diff --git a/vendor/github.com/go-toolsmith/typep/LICENSE b/vendor/github.com/go-toolsmith/typep/LICENSE deleted file mode 100644 index eef17180f..000000000 --- a/vendor/github.com/go-toolsmith/typep/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2018 go-toolsmith - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/go-toolsmith/typep/README.md b/vendor/github.com/go-toolsmith/typep/README.md deleted file mode 100644 index 77478c443..000000000 --- a/vendor/github.com/go-toolsmith/typep/README.md +++ /dev/null @@ -1,54 +0,0 @@ -# typep - -[![build-img]][build-url] -[![pkg-img]][pkg-url] -[![reportcard-img]][reportcard-url] -[![version-img]][version-url] - -Package `typep` provides type predicates. - -## Installation: - -Go version 1.16+ - -```bash -go get github.com/go-toolsmith/typep -``` - -## Example - -```go -package main - -import ( - "fmt" - - "github.com/go-toolsmith/typep" - "github.com/go-toolsmith/strparse" -) - -func main() { - floatTyp := types.Typ[types.Float32] - intTyp := types.Typ[types.Int] - ptr := types.NewPointer(intTyp) - arr := types.NewArray(intTyp, 64) - - fmt.Println(typep.HasFloatProp(floatTyp)) // => true - fmt.Println(typep.HasFloatProp(intTyp)) // => false - fmt.Println(typep.IsPointer(ptr)) // => true - fmt.Println(typep.IsArray(arr)) // => true -} -``` - -## License - -[MIT License](LICENSE). - -[build-img]: https://github.com/go-toolsmith/typep/workflows/build/badge.svg -[build-url]: https://github.com/go-toolsmith/typep/actions -[pkg-img]: https://pkg.go.dev/badge/go-toolsmith/typep -[pkg-url]: https://pkg.go.dev/github.com/go-toolsmith/typep -[reportcard-img]: https://goreportcard.com/badge/go-toolsmith/typep -[reportcard-url]: https://goreportcard.com/report/go-toolsmith/typep -[version-img]: https://img.shields.io/github/v/release/go-toolsmith/typep -[version-url]: https://github.com/go-toolsmith/typep/releases diff --git a/vendor/github.com/go-toolsmith/typep/doc.go b/vendor/github.com/go-toolsmith/typep/doc.go deleted file mode 100644 index 990bc402c..000000000 --- a/vendor/github.com/go-toolsmith/typep/doc.go +++ /dev/null @@ -1,2 +0,0 @@ -// Package typep provides type predicates. -package typep diff --git a/vendor/github.com/go-toolsmith/typep/predicates.go b/vendor/github.com/go-toolsmith/typep/predicates.go deleted file mode 100644 index b07325a72..000000000 --- a/vendor/github.com/go-toolsmith/typep/predicates.go +++ /dev/null @@ -1,36 +0,0 @@ -package typep - -import ( - "go/ast" - "go/types" -) - -// IsTypeExpr reports whether x represents a type expression. -// -// Type expression does not evaluate to any run time value, -// but rather describes a type that is used inside Go expression. -// -// For example, (*T)(v) is a CallExpr that "calls" (*T). -// (*T) is a type expression that tells Go compiler type v should be converted to. -func IsTypeExpr(info *types.Info, x ast.Expr) bool { - switch x := x.(type) { - case *ast.StarExpr: - return IsTypeExpr(info, x.X) - case *ast.ParenExpr: - return IsTypeExpr(info, x.X) - case *ast.SelectorExpr: - return IsTypeExpr(info, x.Sel) - - case *ast.Ident: - // Identifier may be a type expression if object - // it reffers to is a type name. - _, ok := info.ObjectOf(x).(*types.TypeName) - return ok - - case *ast.FuncType, *ast.StructType, *ast.InterfaceType, *ast.ArrayType, *ast.MapType, *ast.ChanType: - return true - - default: - return false - } -} diff --git a/vendor/github.com/go-toolsmith/typep/safe_expr.go b/vendor/github.com/go-toolsmith/typep/safe_expr.go deleted file mode 100644 index d5835d97b..000000000 --- a/vendor/github.com/go-toolsmith/typep/safe_expr.go +++ /dev/null @@ -1,73 +0,0 @@ -package typep - -import ( - "go/ast" - "go/token" - "go/types" -) - -// SideEffectFree reports whether expr is softly safe expression and contains -// no significant side-effects. As opposed to strictly safe expressions, -// soft safe expressions permit some forms of side-effects, like -// panic possibility during indexing or nil pointer dereference. -// -// Uses types info to determine type conversion expressions that -// are the only permitted kinds of call expressions. -// Note that is does not check whether called function really -// has any side effects. The analysis is very conservative. -func SideEffectFree(info *types.Info, expr ast.Expr) bool { - // This list switch is not comprehensive and uses - // whitelist to be on the conservative side. - // Can be extended as needed. - - if expr == nil { - return true - } - - switch expr := expr.(type) { - case *ast.StarExpr: - return SideEffectFree(info, expr.X) - case *ast.BinaryExpr: - return SideEffectFree(info, expr.X) && - SideEffectFree(info, expr.Y) - case *ast.UnaryExpr: - return expr.Op != token.ARROW && - SideEffectFree(info, expr.X) - case *ast.BasicLit, *ast.Ident: - return true - case *ast.SliceExpr: - return SideEffectFree(info, expr.X) && - SideEffectFree(info, expr.Low) && - SideEffectFree(info, expr.High) && - SideEffectFree(info, expr.Max) - case *ast.IndexExpr: - return SideEffectFree(info, expr.X) && - SideEffectFree(info, expr.Index) - case *ast.SelectorExpr: - return SideEffectFree(info, expr.X) - case *ast.ParenExpr: - return SideEffectFree(info, expr.X) - case *ast.TypeAssertExpr: - return SideEffectFree(info, expr.X) - case *ast.CompositeLit: - return SideEffectFreeList(info, expr.Elts) - case *ast.CallExpr: - return IsTypeExpr(info, expr.Fun) && - SideEffectFreeList(info, expr.Args) - - default: - return false - } -} - -// SideEffectFreeList reports whether every expr in list is safe. -// -// See SideEffectFree. -func SideEffectFreeList(info *types.Info, list []ast.Expr) bool { - for _, expr := range list { - if !SideEffectFree(info, expr) { - return false - } - } - return true -} diff --git a/vendor/github.com/go-toolsmith/typep/simple_predicates.go b/vendor/github.com/go-toolsmith/typep/simple_predicates.go deleted file mode 100644 index 61e7d5b7f..000000000 --- a/vendor/github.com/go-toolsmith/typep/simple_predicates.go +++ /dev/null @@ -1,359 +0,0 @@ -// Code generated by simple_predicates_generate.go; DO NOT EDIT - -package typep - -import ( - "go/types" -) - -// Simple 1-to-1 type predicates via type assertion. - -// IsBasic reports whether a given type has *types.Basic type. -func IsBasic(typ types.Type) bool { - _, ok := typ.(*types.Basic) - return ok -} - -// IsArray reports whether a given type has *types.Array type. -func IsArray(typ types.Type) bool { - _, ok := typ.(*types.Array) - return ok -} - -// IsSlice reports whether a given type has *types.Slice type. -func IsSlice(typ types.Type) bool { - _, ok := typ.(*types.Slice) - return ok -} - -// IsStruct reports whether a given type has *types.Struct type. -func IsStruct(typ types.Type) bool { - _, ok := typ.(*types.Struct) - return ok -} - -// IsPointer reports whether a given type has *types.Pointer type. -func IsPointer(typ types.Type) bool { - _, ok := typ.(*types.Pointer) - return ok -} - -// IsTuple reports whether a given type has *types.Tuple type. -func IsTuple(typ types.Type) bool { - _, ok := typ.(*types.Tuple) - return ok -} - -// IsSignature reports whether a given type has *types.Signature type. -func IsSignature(typ types.Type) bool { - _, ok := typ.(*types.Signature) - return ok -} - -// IsInterface reports whether a given type has *types.Interface type. -func IsInterface(typ types.Type) bool { - _, ok := typ.(*types.Interface) - return ok -} - -// IsMap reports whether a given type has *types.Map type. -func IsMap(typ types.Type) bool { - _, ok := typ.(*types.Map) - return ok -} - -// IsChan reports whether a given type has *types.Chan type. -func IsChan(typ types.Type) bool { - _, ok := typ.(*types.Chan) - return ok -} - -// IsNamed reports whether a given type has *types.Named type. -func IsNamed(typ types.Type) bool { - _, ok := typ.(*types.Named) - return ok -} - -// *types.Basic predicates for the info field. - -// HasBooleanProp reports whether typ is a *types.Basic has IsBoolean property. -func HasBooleanProp(typ types.Type) bool { - if typ, ok := typ.(*types.Basic); ok { - return typ.Info()&types.IsBoolean != 0 - } - return false -} - -// HasIntegerProp reports whether typ is a *types.Basic has IsInteger property. -func HasIntegerProp(typ types.Type) bool { - if typ, ok := typ.(*types.Basic); ok { - return typ.Info()&types.IsInteger != 0 - } - return false -} - -// HasUnsignedProp reports whether typ is a *types.Basic has IsUnsigned property. -func HasUnsignedProp(typ types.Type) bool { - if typ, ok := typ.(*types.Basic); ok { - return typ.Info()&types.IsUnsigned != 0 - } - return false -} - -// HasFloatProp reports whether typ is a *types.Basic has IsFloat property. -func HasFloatProp(typ types.Type) bool { - if typ, ok := typ.(*types.Basic); ok { - return typ.Info()&types.IsFloat != 0 - } - return false -} - -// HasComplexProp reports whether typ is a *types.Basic has IsComplex property. -func HasComplexProp(typ types.Type) bool { - if typ, ok := typ.(*types.Basic); ok { - return typ.Info()&types.IsComplex != 0 - } - return false -} - -// HasStringProp reports whether typ is a *types.Basic has IsString property. -func HasStringProp(typ types.Type) bool { - if typ, ok := typ.(*types.Basic); ok { - return typ.Info()&types.IsString != 0 - } - return false -} - -// HasUntypedProp reports whether typ is a *types.Basic has IsUntyped property. -func HasUntypedProp(typ types.Type) bool { - if typ, ok := typ.(*types.Basic); ok { - return typ.Info()&types.IsUntyped != 0 - } - return false -} - -// HasOrderedProp reports whether typ is a *types.Basic has IsOrdered property. -func HasOrderedProp(typ types.Type) bool { - if typ, ok := typ.(*types.Basic); ok { - return typ.Info()&types.IsOrdered != 0 - } - return false -} - -// HasNumericProp reports whether typ is a *types.Basic has IsNumeric property. -func HasNumericProp(typ types.Type) bool { - if typ, ok := typ.(*types.Basic); ok { - return typ.Info()&types.IsNumeric != 0 - } - return false -} - -// HasConstTypeProp reports whether typ is a *types.Basic has IsConstType property. -func HasConstTypeProp(typ types.Type) bool { - if typ, ok := typ.(*types.Basic); ok { - return typ.Info()&types.IsConstType != 0 - } - return false -} - -// *types.Basic predicates for the kind field. - -// HasBoolKind reports whether typ is a *types.Basic with its kind set to types.Bool. -func HasBoolKind(typ types.Type) bool { - if typ, ok := typ.(*types.Basic); ok { - return typ.Kind() == types.Bool - } - return false -} - -// HasIntKind reports whether typ is a *types.Basic with its kind set to types.Int. -func HasIntKind(typ types.Type) bool { - if typ, ok := typ.(*types.Basic); ok { - return typ.Kind() == types.Int - } - return false -} - -// HasInt8Kind reports whether typ is a *types.Basic with its kind set to types.Int8. -func HasInt8Kind(typ types.Type) bool { - if typ, ok := typ.(*types.Basic); ok { - return typ.Kind() == types.Int8 - } - return false -} - -// HasInt16Kind reports whether typ is a *types.Basic with its kind set to types.Int16. -func HasInt16Kind(typ types.Type) bool { - if typ, ok := typ.(*types.Basic); ok { - return typ.Kind() == types.Int16 - } - return false -} - -// HasInt32Kind reports whether typ is a *types.Basic with its kind set to types.Int32. -func HasInt32Kind(typ types.Type) bool { - if typ, ok := typ.(*types.Basic); ok { - return typ.Kind() == types.Int32 - } - return false -} - -// HasInt64Kind reports whether typ is a *types.Basic with its kind set to types.Int64. -func HasInt64Kind(typ types.Type) bool { - if typ, ok := typ.(*types.Basic); ok { - return typ.Kind() == types.Int64 - } - return false -} - -// HasUintKind reports whether typ is a *types.Basic with its kind set to types.Uint. -func HasUintKind(typ types.Type) bool { - if typ, ok := typ.(*types.Basic); ok { - return typ.Kind() == types.Uint - } - return false -} - -// HasUint8Kind reports whether typ is a *types.Basic with its kind set to types.Uint8. -func HasUint8Kind(typ types.Type) bool { - if typ, ok := typ.(*types.Basic); ok { - return typ.Kind() == types.Uint8 - } - return false -} - -// HasUint16Kind reports whether typ is a *types.Basic with its kind set to types.Uint16. -func HasUint16Kind(typ types.Type) bool { - if typ, ok := typ.(*types.Basic); ok { - return typ.Kind() == types.Uint16 - } - return false -} - -// HasUint32Kind reports whether typ is a *types.Basic with its kind set to types.Uint32. -func HasUint32Kind(typ types.Type) bool { - if typ, ok := typ.(*types.Basic); ok { - return typ.Kind() == types.Uint32 - } - return false -} - -// HasUint64Kind reports whether typ is a *types.Basic with its kind set to types.Uint64. -func HasUint64Kind(typ types.Type) bool { - if typ, ok := typ.(*types.Basic); ok { - return typ.Kind() == types.Uint64 - } - return false -} - -// HasUintptrKind reports whether typ is a *types.Basic with its kind set to types.Uintptr. -func HasUintptrKind(typ types.Type) bool { - if typ, ok := typ.(*types.Basic); ok { - return typ.Kind() == types.Uintptr - } - return false -} - -// HasFloat32Kind reports whether typ is a *types.Basic with its kind set to types.Float32. -func HasFloat32Kind(typ types.Type) bool { - if typ, ok := typ.(*types.Basic); ok { - return typ.Kind() == types.Float32 - } - return false -} - -// HasFloat64Kind reports whether typ is a *types.Basic with its kind set to types.Float64. -func HasFloat64Kind(typ types.Type) bool { - if typ, ok := typ.(*types.Basic); ok { - return typ.Kind() == types.Float64 - } - return false -} - -// HasComplex64Kind reports whether typ is a *types.Basic with its kind set to types.Complex64. -func HasComplex64Kind(typ types.Type) bool { - if typ, ok := typ.(*types.Basic); ok { - return typ.Kind() == types.Complex64 - } - return false -} - -// HasComplex128Kind reports whether typ is a *types.Basic with its kind set to types.Complex128. -func HasComplex128Kind(typ types.Type) bool { - if typ, ok := typ.(*types.Basic); ok { - return typ.Kind() == types.Complex128 - } - return false -} - -// HasStringKind reports whether typ is a *types.Basic with its kind set to types.String. -func HasStringKind(typ types.Type) bool { - if typ, ok := typ.(*types.Basic); ok { - return typ.Kind() == types.String - } - return false -} - -// HasUnsafePointerKind reports whether typ is a *types.Basic with its kind set to types.UnsafePointer. -func HasUnsafePointerKind(typ types.Type) bool { - if typ, ok := typ.(*types.Basic); ok { - return typ.Kind() == types.UnsafePointer - } - return false -} - -// HasUntypedBoolKind reports whether typ is a *types.Basic with its kind set to types.UntypedBool. -func HasUntypedBoolKind(typ types.Type) bool { - if typ, ok := typ.(*types.Basic); ok { - return typ.Kind() == types.UntypedBool - } - return false -} - -// HasUntypedIntKind reports whether typ is a *types.Basic with its kind set to types.UntypedInt. -func HasUntypedIntKind(typ types.Type) bool { - if typ, ok := typ.(*types.Basic); ok { - return typ.Kind() == types.UntypedInt - } - return false -} - -// HasUntypedRuneKind reports whether typ is a *types.Basic with its kind set to types.UntypedRune. -func HasUntypedRuneKind(typ types.Type) bool { - if typ, ok := typ.(*types.Basic); ok { - return typ.Kind() == types.UntypedRune - } - return false -} - -// HasUntypedFloatKind reports whether typ is a *types.Basic with its kind set to types.UntypedFloat. -func HasUntypedFloatKind(typ types.Type) bool { - if typ, ok := typ.(*types.Basic); ok { - return typ.Kind() == types.UntypedFloat - } - return false -} - -// HasUntypedComplexKind reports whether typ is a *types.Basic with its kind set to types.UntypedComplex. -func HasUntypedComplexKind(typ types.Type) bool { - if typ, ok := typ.(*types.Basic); ok { - return typ.Kind() == types.UntypedComplex - } - return false -} - -// HasUntypedStringKind reports whether typ is a *types.Basic with its kind set to types.UntypedString. -func HasUntypedStringKind(typ types.Type) bool { - if typ, ok := typ.(*types.Basic); ok { - return typ.Kind() == types.UntypedString - } - return false -} - -// HasUntypedNilKind reports whether typ is a *types.Basic with its kind set to types.UntypedNil. -func HasUntypedNilKind(typ types.Type) bool { - if typ, ok := typ.(*types.Basic); ok { - return typ.Kind() == types.UntypedNil - } - return false -} diff --git a/vendor/github.com/go-viper/mapstructure/v2/.editorconfig b/vendor/github.com/go-viper/mapstructure/v2/.editorconfig deleted file mode 100644 index 1f664d13a..000000000 --- a/vendor/github.com/go-viper/mapstructure/v2/.editorconfig +++ /dev/null @@ -1,18 +0,0 @@ -root = true - -[*] -charset = utf-8 -end_of_line = lf -indent_size = 4 -indent_style = space -insert_final_newline = true -trim_trailing_whitespace = true - -[*.go] -indent_style = tab - -[{Makefile,*.mk}] -indent_style = tab - -[*.nix] -indent_size = 2 diff --git a/vendor/github.com/go-viper/mapstructure/v2/.envrc b/vendor/github.com/go-viper/mapstructure/v2/.envrc deleted file mode 100644 index c66fc0d35..000000000 --- a/vendor/github.com/go-viper/mapstructure/v2/.envrc +++ /dev/null @@ -1,4 +0,0 @@ -if ! has nix_direnv_version || ! nix_direnv_version 3.0.1; then - source_url "https://raw.githubusercontent.com/nix-community/nix-direnv/3.0.1/direnvrc" "sha256-17G+Mvt/JsyJrwsf7bqMr7ho7liHP+0Lo4RMIHgp0F8=" -fi -use flake . --impure diff --git a/vendor/github.com/go-viper/mapstructure/v2/.gitignore b/vendor/github.com/go-viper/mapstructure/v2/.gitignore deleted file mode 100644 index 470e7ca2b..000000000 --- a/vendor/github.com/go-viper/mapstructure/v2/.gitignore +++ /dev/null @@ -1,6 +0,0 @@ -/.devenv/ -/.direnv/ -/.pre-commit-config.yaml -/bin/ -/build/ -/var/ diff --git a/vendor/github.com/go-viper/mapstructure/v2/.golangci.yaml b/vendor/github.com/go-viper/mapstructure/v2/.golangci.yaml deleted file mode 100644 index 763143aa7..000000000 --- a/vendor/github.com/go-viper/mapstructure/v2/.golangci.yaml +++ /dev/null @@ -1,23 +0,0 @@ -run: - timeout: 5m - -linters-settings: - gci: - sections: - - standard - - default - - prefix(github.com/go-viper/mapstructure) - golint: - min-confidence: 0 - goimports: - local-prefixes: github.com/go-viper/maptstructure - -linters: - disable-all: true - enable: - - gci - - gofmt - - gofumpt - - goimports - - staticcheck - # - stylecheck diff --git a/vendor/github.com/go-viper/mapstructure/v2/CHANGELOG.md b/vendor/github.com/go-viper/mapstructure/v2/CHANGELOG.md deleted file mode 100644 index ae634d1cc..000000000 --- a/vendor/github.com/go-viper/mapstructure/v2/CHANGELOG.md +++ /dev/null @@ -1,101 +0,0 @@ -## 1.5.1 - -* Wrap errors so they're compatible with `errors.Is` and `errors.As` [GH-282] -* Fix map of slices not decoding properly in certain cases. [GH-266] - -## 1.5.0 - -* New option `IgnoreUntaggedFields` to ignore decoding to any fields - without `mapstructure` (or the configured tag name) set [GH-277] -* New option `ErrorUnset` which makes it an error if any fields - in a target struct are not set by the decoding process. [GH-225] -* New function `OrComposeDecodeHookFunc` to help compose decode hooks. [GH-240] -* Decoding to slice from array no longer crashes [GH-265] -* Decode nested struct pointers to map [GH-271] -* Fix issue where `,squash` was ignored if `Squash` option was set. [GH-280] -* Fix issue where fields with `,omitempty` would sometimes decode - into a map with an empty string key [GH-281] - -## 1.4.3 - -* Fix cases where `json.Number` didn't decode properly [GH-261] - -## 1.4.2 - -* Custom name matchers to support any sort of casing, formatting, etc. for - field names. [GH-250] -* Fix possible panic in ComposeDecodeHookFunc [GH-251] - -## 1.4.1 - -* Fix regression where `*time.Time` value would be set to empty and not be sent - to decode hooks properly [GH-232] - -## 1.4.0 - -* A new decode hook type `DecodeHookFuncValue` has been added that has - access to the full values. [GH-183] -* Squash is now supported with embedded fields that are struct pointers [GH-205] -* Empty strings will convert to 0 for all numeric types when weakly decoding [GH-206] - -## 1.3.3 - -* Decoding maps from maps creates a settable value for decode hooks [GH-203] - -## 1.3.2 - -* Decode into interface type with a struct value is supported [GH-187] - -## 1.3.1 - -* Squash should only squash embedded structs. [GH-194] - -## 1.3.0 - -* Added `",omitempty"` support. This will ignore zero values in the source - structure when encoding. [GH-145] - -## 1.2.3 - -* Fix duplicate entries in Keys list with pointer values. [GH-185] - -## 1.2.2 - -* Do not add unsettable (unexported) values to the unused metadata key - or "remain" value. [GH-150] - -## 1.2.1 - -* Go modules checksum mismatch fix - -## 1.2.0 - -* Added support to capture unused values in a field using the `",remain"` value - in the mapstructure tag. There is an example to showcase usage. -* Added `DecoderConfig` option to always squash embedded structs -* `json.Number` can decode into `uint` types -* Empty slices are preserved and not replaced with nil slices -* Fix panic that can occur in when decoding a map into a nil slice of structs -* Improved package documentation for godoc - -## 1.1.2 - -* Fix error when decode hook decodes interface implementation into interface - type. [GH-140] - -## 1.1.1 - -* Fix panic that can happen in `decodePtr` - -## 1.1.0 - -* Added `StringToIPHookFunc` to convert `string` to `net.IP` and `net.IPNet` [GH-133] -* Support struct to struct decoding [GH-137] -* If source map value is nil, then destination map value is nil (instead of empty) -* If source slice value is nil, then destination slice value is nil (instead of empty) -* If source pointer is nil, then destination pointer is set to nil (instead of - allocated zero value of type) - -## 1.0.0 - -* Initial tagged stable release. diff --git a/vendor/github.com/go-viper/mapstructure/v2/LICENSE b/vendor/github.com/go-viper/mapstructure/v2/LICENSE deleted file mode 100644 index f9c841a51..000000000 --- a/vendor/github.com/go-viper/mapstructure/v2/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2013 Mitchell Hashimoto - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/vendor/github.com/go-viper/mapstructure/v2/README.md b/vendor/github.com/go-viper/mapstructure/v2/README.md deleted file mode 100644 index 2b28db894..000000000 --- a/vendor/github.com/go-viper/mapstructure/v2/README.md +++ /dev/null @@ -1,59 +0,0 @@ -# mapstructure - -[![GitHub Workflow Status](https://img.shields.io/github/actions/workflow/status/go-viper/mapstructure/ci.yaml?branch=main&style=flat-square)](https://github.com/go-viper/mapstructure/actions?query=workflow%3ACI) -[![go.dev reference](https://img.shields.io/badge/go.dev-reference-007d9c?logo=go&logoColor=white&style=flat-square)](https://pkg.go.dev/mod/github.com/go-viper/mapstructure/v2) -![Go Version](https://img.shields.io/badge/go%20version-%3E=1.18-61CFDD.svg?style=flat-square) - -mapstructure is a Go library for decoding generic map values to structures -and vice versa, while providing helpful error handling. - -This library is most useful when decoding values from some data stream (JSON, -Gob, etc.) where you don't _quite_ know the structure of the underlying data -until you read a part of it. You can therefore read a `map[string]interface{}` -and use this library to decode it into the proper underlying native Go -structure. - -## Installation - -```shell -go get github.com/go-viper/mapstructure/v2 -``` - -## Usage & Example - -For usage and examples see the [documentation](https://pkg.go.dev/mod/github.com/go-viper/mapstructure/v2). - -The `Decode` function has examples associated with it there. - -## But Why?! - -Go offers fantastic standard libraries for decoding formats such as JSON. -The standard method is to have a struct pre-created, and populate that struct -from the bytes of the encoded format. This is great, but the problem is if -you have configuration or an encoding that changes slightly depending on -specific fields. For example, consider this JSON: - -```json -{ - "type": "person", - "name": "Mitchell" -} -``` - -Perhaps we can't populate a specific structure without first reading -the "type" field from the JSON. We could always do two passes over the -decoding of the JSON (reading the "type" first, and the rest later). -However, it is much simpler to just decode this into a `map[string]interface{}` -structure, read the "type" key, then use something like this library -to decode it into the proper structure. - -## Credits - -Mapstructure was originally created by [@mitchellh](https://github.com/mitchellh). -This is a maintained fork of the original library. - -Read more about the reasons for the fork [here](https://github.com/mitchellh/mapstructure/issues/349). - -## License - -The project is licensed under the [MIT License](LICENSE). diff --git a/vendor/github.com/go-viper/mapstructure/v2/decode_hooks.go b/vendor/github.com/go-viper/mapstructure/v2/decode_hooks.go deleted file mode 100644 index 840d6adce..000000000 --- a/vendor/github.com/go-viper/mapstructure/v2/decode_hooks.go +++ /dev/null @@ -1,334 +0,0 @@ -package mapstructure - -import ( - "encoding" - "errors" - "fmt" - "net" - "net/netip" - "reflect" - "strconv" - "strings" - "time" -) - -// typedDecodeHook takes a raw DecodeHookFunc (an interface{}) and turns -// it into the proper DecodeHookFunc type, such as DecodeHookFuncType. -func typedDecodeHook(h DecodeHookFunc) DecodeHookFunc { - // Create variables here so we can reference them with the reflect pkg - var f1 DecodeHookFuncType - var f2 DecodeHookFuncKind - var f3 DecodeHookFuncValue - - // Fill in the variables into this interface and the rest is done - // automatically using the reflect package. - potential := []interface{}{f1, f2, f3} - - v := reflect.ValueOf(h) - vt := v.Type() - for _, raw := range potential { - pt := reflect.ValueOf(raw).Type() - if vt.ConvertibleTo(pt) { - return v.Convert(pt).Interface() - } - } - - return nil -} - -// DecodeHookExec executes the given decode hook. This should be used -// since it'll naturally degrade to the older backwards compatible DecodeHookFunc -// that took reflect.Kind instead of reflect.Type. -func DecodeHookExec( - raw DecodeHookFunc, - from reflect.Value, to reflect.Value, -) (interface{}, error) { - switch f := typedDecodeHook(raw).(type) { - case DecodeHookFuncType: - return f(from.Type(), to.Type(), from.Interface()) - case DecodeHookFuncKind: - return f(from.Kind(), to.Kind(), from.Interface()) - case DecodeHookFuncValue: - return f(from, to) - default: - return nil, errors.New("invalid decode hook signature") - } -} - -// ComposeDecodeHookFunc creates a single DecodeHookFunc that -// automatically composes multiple DecodeHookFuncs. -// -// The composed funcs are called in order, with the result of the -// previous transformation. -func ComposeDecodeHookFunc(fs ...DecodeHookFunc) DecodeHookFunc { - return func(f reflect.Value, t reflect.Value) (interface{}, error) { - var err error - data := f.Interface() - - newFrom := f - for _, f1 := range fs { - data, err = DecodeHookExec(f1, newFrom, t) - if err != nil { - return nil, err - } - newFrom = reflect.ValueOf(data) - } - - return data, nil - } -} - -// OrComposeDecodeHookFunc executes all input hook functions until one of them returns no error. In that case its value is returned. -// If all hooks return an error, OrComposeDecodeHookFunc returns an error concatenating all error messages. -func OrComposeDecodeHookFunc(ff ...DecodeHookFunc) DecodeHookFunc { - return func(a, b reflect.Value) (interface{}, error) { - var allErrs string - var out interface{} - var err error - - for _, f := range ff { - out, err = DecodeHookExec(f, a, b) - if err != nil { - allErrs += err.Error() + "\n" - continue - } - - return out, nil - } - - return nil, errors.New(allErrs) - } -} - -// StringToSliceHookFunc returns a DecodeHookFunc that converts -// string to []string by splitting on the given sep. -func StringToSliceHookFunc(sep string) DecodeHookFunc { - return func( - f reflect.Type, - t reflect.Type, - data interface{}, - ) (interface{}, error) { - if f.Kind() != reflect.String { - return data, nil - } - if t != reflect.SliceOf(f) { - return data, nil - } - - raw := data.(string) - if raw == "" { - return []string{}, nil - } - - return strings.Split(raw, sep), nil - } -} - -// StringToTimeDurationHookFunc returns a DecodeHookFunc that converts -// strings to time.Duration. -func StringToTimeDurationHookFunc() DecodeHookFunc { - return func( - f reflect.Type, - t reflect.Type, - data interface{}, - ) (interface{}, error) { - if f.Kind() != reflect.String { - return data, nil - } - if t != reflect.TypeOf(time.Duration(5)) { - return data, nil - } - - // Convert it by parsing - return time.ParseDuration(data.(string)) - } -} - -// StringToIPHookFunc returns a DecodeHookFunc that converts -// strings to net.IP -func StringToIPHookFunc() DecodeHookFunc { - return func( - f reflect.Type, - t reflect.Type, - data interface{}, - ) (interface{}, error) { - if f.Kind() != reflect.String { - return data, nil - } - if t != reflect.TypeOf(net.IP{}) { - return data, nil - } - - // Convert it by parsing - ip := net.ParseIP(data.(string)) - if ip == nil { - return net.IP{}, fmt.Errorf("failed parsing ip %v", data) - } - - return ip, nil - } -} - -// StringToIPNetHookFunc returns a DecodeHookFunc that converts -// strings to net.IPNet -func StringToIPNetHookFunc() DecodeHookFunc { - return func( - f reflect.Type, - t reflect.Type, - data interface{}, - ) (interface{}, error) { - if f.Kind() != reflect.String { - return data, nil - } - if t != reflect.TypeOf(net.IPNet{}) { - return data, nil - } - - // Convert it by parsing - _, net, err := net.ParseCIDR(data.(string)) - return net, err - } -} - -// StringToTimeHookFunc returns a DecodeHookFunc that converts -// strings to time.Time. -func StringToTimeHookFunc(layout string) DecodeHookFunc { - return func( - f reflect.Type, - t reflect.Type, - data interface{}, - ) (interface{}, error) { - if f.Kind() != reflect.String { - return data, nil - } - if t != reflect.TypeOf(time.Time{}) { - return data, nil - } - - // Convert it by parsing - return time.Parse(layout, data.(string)) - } -} - -// WeaklyTypedHook is a DecodeHookFunc which adds support for weak typing to -// the decoder. -// -// Note that this is significantly different from the WeaklyTypedInput option -// of the DecoderConfig. -func WeaklyTypedHook( - f reflect.Kind, - t reflect.Kind, - data interface{}, -) (interface{}, error) { - dataVal := reflect.ValueOf(data) - switch t { - case reflect.String: - switch f { - case reflect.Bool: - if dataVal.Bool() { - return "1", nil - } - return "0", nil - case reflect.Float32: - return strconv.FormatFloat(dataVal.Float(), 'f', -1, 64), nil - case reflect.Int: - return strconv.FormatInt(dataVal.Int(), 10), nil - case reflect.Slice: - dataType := dataVal.Type() - elemKind := dataType.Elem().Kind() - if elemKind == reflect.Uint8 { - return string(dataVal.Interface().([]uint8)), nil - } - case reflect.Uint: - return strconv.FormatUint(dataVal.Uint(), 10), nil - } - } - - return data, nil -} - -func RecursiveStructToMapHookFunc() DecodeHookFunc { - return func(f reflect.Value, t reflect.Value) (interface{}, error) { - if f.Kind() != reflect.Struct { - return f.Interface(), nil - } - - var i interface{} = struct{}{} - if t.Type() != reflect.TypeOf(&i).Elem() { - return f.Interface(), nil - } - - m := make(map[string]interface{}) - t.Set(reflect.ValueOf(m)) - - return f.Interface(), nil - } -} - -// TextUnmarshallerHookFunc returns a DecodeHookFunc that applies -// strings to the UnmarshalText function, when the target type -// implements the encoding.TextUnmarshaler interface -func TextUnmarshallerHookFunc() DecodeHookFuncType { - return func( - f reflect.Type, - t reflect.Type, - data interface{}, - ) (interface{}, error) { - if f.Kind() != reflect.String { - return data, nil - } - result := reflect.New(t).Interface() - unmarshaller, ok := result.(encoding.TextUnmarshaler) - if !ok { - return data, nil - } - str, ok := data.(string) - if !ok { - str = reflect.Indirect(reflect.ValueOf(&data)).Elem().String() - } - if err := unmarshaller.UnmarshalText([]byte(str)); err != nil { - return nil, err - } - return result, nil - } -} - -// StringToNetIPAddrHookFunc returns a DecodeHookFunc that converts -// strings to netip.Addr. -func StringToNetIPAddrHookFunc() DecodeHookFunc { - return func( - f reflect.Type, - t reflect.Type, - data interface{}, - ) (interface{}, error) { - if f.Kind() != reflect.String { - return data, nil - } - if t != reflect.TypeOf(netip.Addr{}) { - return data, nil - } - - // Convert it by parsing - return netip.ParseAddr(data.(string)) - } -} - -// StringToNetIPAddrPortHookFunc returns a DecodeHookFunc that converts -// strings to netip.AddrPort. -func StringToNetIPAddrPortHookFunc() DecodeHookFunc { - return func( - f reflect.Type, - t reflect.Type, - data interface{}, - ) (interface{}, error) { - if f.Kind() != reflect.String { - return data, nil - } - if t != reflect.TypeOf(netip.AddrPort{}) { - return data, nil - } - - // Convert it by parsing - return netip.ParseAddrPort(data.(string)) - } -} diff --git a/vendor/github.com/go-viper/mapstructure/v2/error.go b/vendor/github.com/go-viper/mapstructure/v2/error.go deleted file mode 100644 index 47a99e5af..000000000 --- a/vendor/github.com/go-viper/mapstructure/v2/error.go +++ /dev/null @@ -1,50 +0,0 @@ -package mapstructure - -import ( - "errors" - "fmt" - "sort" - "strings" -) - -// Error implements the error interface and can represents multiple -// errors that occur in the course of a single decode. -type Error struct { - Errors []string -} - -func (e *Error) Error() string { - points := make([]string, len(e.Errors)) - for i, err := range e.Errors { - points[i] = fmt.Sprintf("* %s", err) - } - - sort.Strings(points) - return fmt.Sprintf( - "%d error(s) decoding:\n\n%s", - len(e.Errors), strings.Join(points, "\n")) -} - -// WrappedErrors implements the errwrap.Wrapper interface to make this -// return value more useful with the errwrap and go-multierror libraries. -func (e *Error) WrappedErrors() []error { - if e == nil { - return nil - } - - result := make([]error, len(e.Errors)) - for i, e := range e.Errors { - result[i] = errors.New(e) - } - - return result -} - -func appendErrors(errors []string, err error) []string { - switch e := err.(type) { - case *Error: - return append(errors, e.Errors...) - default: - return append(errors, e.Error()) - } -} diff --git a/vendor/github.com/go-viper/mapstructure/v2/flake.lock b/vendor/github.com/go-viper/mapstructure/v2/flake.lock deleted file mode 100644 index 5a387d329..000000000 --- a/vendor/github.com/go-viper/mapstructure/v2/flake.lock +++ /dev/null @@ -1,273 +0,0 @@ -{ - "nodes": { - "devenv": { - "inputs": { - "flake-compat": "flake-compat", - "nix": "nix", - "nixpkgs": "nixpkgs", - "pre-commit-hooks": "pre-commit-hooks" - }, - "locked": { - "lastModified": 1702549996, - "narHash": "sha256-mEN+8gjWUXRxBCcixeth+jlDNuzxbpFwZNOEc4K22vw=", - "owner": "cachix", - "repo": "devenv", - "rev": "e681a99ffe2d2882f413a5d771129223c838ddce", - "type": "github" - }, - "original": { - "owner": "cachix", - "repo": "devenv", - "type": "github" - } - }, - "flake-compat": { - "flake": false, - "locked": { - "lastModified": 1673956053, - "narHash": "sha256-4gtG9iQuiKITOjNQQeQIpoIB6b16fm+504Ch3sNKLd8=", - "owner": "edolstra", - "repo": "flake-compat", - "rev": "35bb57c0c8d8b62bbfd284272c928ceb64ddbde9", - "type": "github" - }, - "original": { - "owner": "edolstra", - "repo": "flake-compat", - "type": "github" - } - }, - "flake-parts": { - "inputs": { - "nixpkgs-lib": "nixpkgs-lib" - }, - "locked": { - "lastModified": 1701473968, - "narHash": "sha256-YcVE5emp1qQ8ieHUnxt1wCZCC3ZfAS+SRRWZ2TMda7E=", - "owner": "hercules-ci", - "repo": "flake-parts", - "rev": "34fed993f1674c8d06d58b37ce1e0fe5eebcb9f5", - "type": "github" - }, - "original": { - "owner": "hercules-ci", - "repo": "flake-parts", - "type": "github" - } - }, - "flake-utils": { - "inputs": { - "systems": "systems" - }, - "locked": { - "lastModified": 1685518550, - "narHash": "sha256-o2d0KcvaXzTrPRIo0kOLV0/QXHhDQ5DTi+OxcjO8xqY=", - "owner": "numtide", - "repo": "flake-utils", - "rev": "a1720a10a6cfe8234c0e93907ffe81be440f4cef", - "type": "github" - }, - "original": { - "owner": "numtide", - "repo": "flake-utils", - "type": "github" - } - }, - "gitignore": { - "inputs": { - "nixpkgs": [ - "devenv", - "pre-commit-hooks", - "nixpkgs" - ] - }, - "locked": { - "lastModified": 1660459072, - "narHash": "sha256-8DFJjXG8zqoONA1vXtgeKXy68KdJL5UaXR8NtVMUbx8=", - "owner": "hercules-ci", - "repo": "gitignore.nix", - "rev": "a20de23b925fd8264fd7fad6454652e142fd7f73", - "type": "github" - }, - "original": { - "owner": "hercules-ci", - "repo": "gitignore.nix", - "type": "github" - } - }, - "lowdown-src": { - "flake": false, - "locked": { - "lastModified": 1633514407, - "narHash": "sha256-Dw32tiMjdK9t3ETl5fzGrutQTzh2rufgZV4A/BbxuD4=", - "owner": "kristapsdz", - "repo": "lowdown", - "rev": "d2c2b44ff6c27b936ec27358a2653caaef8f73b8", - "type": "github" - }, - "original": { - "owner": "kristapsdz", - "repo": "lowdown", - "type": "github" - } - }, - "nix": { - "inputs": { - "lowdown-src": "lowdown-src", - "nixpkgs": [ - "devenv", - "nixpkgs" - ], - "nixpkgs-regression": "nixpkgs-regression" - }, - "locked": { - "lastModified": 1676545802, - "narHash": "sha256-EK4rZ+Hd5hsvXnzSzk2ikhStJnD63odF7SzsQ8CuSPU=", - "owner": "domenkozar", - "repo": "nix", - "rev": "7c91803598ffbcfe4a55c44ac6d49b2cf07a527f", - "type": "github" - }, - "original": { - "owner": "domenkozar", - "ref": "relaxed-flakes", - "repo": "nix", - "type": "github" - } - }, - "nixpkgs": { - "locked": { - "lastModified": 1678875422, - "narHash": "sha256-T3o6NcQPwXjxJMn2shz86Chch4ljXgZn746c2caGxd8=", - "owner": "NixOS", - "repo": "nixpkgs", - "rev": "126f49a01de5b7e35a43fd43f891ecf6d3a51459", - "type": "github" - }, - "original": { - "owner": "NixOS", - "ref": "nixpkgs-unstable", - "repo": "nixpkgs", - "type": "github" - } - }, - "nixpkgs-lib": { - "locked": { - "dir": "lib", - "lastModified": 1701253981, - "narHash": "sha256-ztaDIyZ7HrTAfEEUt9AtTDNoCYxUdSd6NrRHaYOIxtk=", - "owner": "NixOS", - "repo": "nixpkgs", - "rev": "e92039b55bcd58469325ded85d4f58dd5a4eaf58", - "type": "github" - }, - "original": { - "dir": "lib", - "owner": "NixOS", - "ref": "nixos-unstable", - "repo": "nixpkgs", - "type": "github" - } - }, - "nixpkgs-regression": { - "locked": { - "lastModified": 1643052045, - "narHash": "sha256-uGJ0VXIhWKGXxkeNnq4TvV3CIOkUJ3PAoLZ3HMzNVMw=", - "owner": "NixOS", - "repo": "nixpkgs", - "rev": "215d4d0fd80ca5163643b03a33fde804a29cc1e2", - "type": "github" - }, - "original": { - "owner": "NixOS", - "repo": "nixpkgs", - "rev": "215d4d0fd80ca5163643b03a33fde804a29cc1e2", - "type": "github" - } - }, - "nixpkgs-stable": { - "locked": { - "lastModified": 1685801374, - "narHash": "sha256-otaSUoFEMM+LjBI1XL/xGB5ao6IwnZOXc47qhIgJe8U=", - "owner": "NixOS", - "repo": "nixpkgs", - "rev": "c37ca420157f4abc31e26f436c1145f8951ff373", - "type": "github" - }, - "original": { - "owner": "NixOS", - "ref": "nixos-23.05", - "repo": "nixpkgs", - "type": "github" - } - }, - "nixpkgs_2": { - "locked": { - "lastModified": 1702539185, - "narHash": "sha256-KnIRG5NMdLIpEkZTnN5zovNYc0hhXjAgv6pfd5Z4c7U=", - "owner": "NixOS", - "repo": "nixpkgs", - "rev": "aa9d4729cbc99dabacb50e3994dcefb3ea0f7447", - "type": "github" - }, - "original": { - "owner": "NixOS", - "ref": "nixpkgs-unstable", - "repo": "nixpkgs", - "type": "github" - } - }, - "pre-commit-hooks": { - "inputs": { - "flake-compat": [ - "devenv", - "flake-compat" - ], - "flake-utils": "flake-utils", - "gitignore": "gitignore", - "nixpkgs": [ - "devenv", - "nixpkgs" - ], - "nixpkgs-stable": "nixpkgs-stable" - }, - "locked": { - "lastModified": 1688056373, - "narHash": "sha256-2+SDlNRTKsgo3LBRiMUcoEUb6sDViRNQhzJquZ4koOI=", - "owner": "cachix", - "repo": "pre-commit-hooks.nix", - "rev": "5843cf069272d92b60c3ed9e55b7a8989c01d4c7", - "type": "github" - }, - "original": { - "owner": "cachix", - "repo": "pre-commit-hooks.nix", - "type": "github" - } - }, - "root": { - "inputs": { - "devenv": "devenv", - "flake-parts": "flake-parts", - "nixpkgs": "nixpkgs_2" - } - }, - "systems": { - "locked": { - "lastModified": 1681028828, - "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=", - "owner": "nix-systems", - "repo": "default", - "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e", - "type": "github" - }, - "original": { - "owner": "nix-systems", - "repo": "default", - "type": "github" - } - } - }, - "root": "root", - "version": 7 -} diff --git a/vendor/github.com/go-viper/mapstructure/v2/flake.nix b/vendor/github.com/go-viper/mapstructure/v2/flake.nix deleted file mode 100644 index 4ed0f5331..000000000 --- a/vendor/github.com/go-viper/mapstructure/v2/flake.nix +++ /dev/null @@ -1,39 +0,0 @@ -{ - inputs = { - nixpkgs.url = "github:NixOS/nixpkgs/nixpkgs-unstable"; - flake-parts.url = "github:hercules-ci/flake-parts"; - devenv.url = "github:cachix/devenv"; - }; - - outputs = inputs@{ flake-parts, ... }: - flake-parts.lib.mkFlake { inherit inputs; } { - imports = [ - inputs.devenv.flakeModule - ]; - - systems = [ "x86_64-linux" "x86_64-darwin" "aarch64-darwin" ]; - - perSystem = { config, self', inputs', pkgs, system, ... }: rec { - devenv.shells = { - default = { - languages = { - go.enable = true; - }; - - pre-commit.hooks = { - nixpkgs-fmt.enable = true; - }; - - packages = with pkgs; [ - golangci-lint - ]; - - # https://github.com/cachix/devenv/issues/528#issuecomment-1556108767 - containers = pkgs.lib.mkForce { }; - }; - - ci = devenv.shells.default; - }; - }; - }; -} diff --git a/vendor/github.com/go-viper/mapstructure/v2/mapstructure.go b/vendor/github.com/go-viper/mapstructure/v2/mapstructure.go deleted file mode 100644 index 27f21bc72..000000000 --- a/vendor/github.com/go-viper/mapstructure/v2/mapstructure.go +++ /dev/null @@ -1,1562 +0,0 @@ -// Package mapstructure exposes functionality to convert one arbitrary -// Go type into another, typically to convert a map[string]interface{} -// into a native Go structure. -// -// The Go structure can be arbitrarily complex, containing slices, -// other structs, etc. and the decoder will properly decode nested -// maps and so on into the proper structures in the native Go struct. -// See the examples to see what the decoder is capable of. -// -// The simplest function to start with is Decode. -// -// # Field Tags -// -// When decoding to a struct, mapstructure will use the field name by -// default to perform the mapping. For example, if a struct has a field -// "Username" then mapstructure will look for a key in the source value -// of "username" (case insensitive). -// -// type User struct { -// Username string -// } -// -// You can change the behavior of mapstructure by using struct tags. -// The default struct tag that mapstructure looks for is "mapstructure" -// but you can customize it using DecoderConfig. -// -// # Renaming Fields -// -// To rename the key that mapstructure looks for, use the "mapstructure" -// tag and set a value directly. For example, to change the "username" example -// above to "user": -// -// type User struct { -// Username string `mapstructure:"user"` -// } -// -// # Embedded Structs and Squashing -// -// Embedded structs are treated as if they're another field with that name. -// By default, the two structs below are equivalent when decoding with -// mapstructure: -// -// type Person struct { -// Name string -// } -// -// type Friend struct { -// Person -// } -// -// type Friend struct { -// Person Person -// } -// -// This would require an input that looks like below: -// -// map[string]interface{}{ -// "person": map[string]interface{}{"name": "alice"}, -// } -// -// If your "person" value is NOT nested, then you can append ",squash" to -// your tag value and mapstructure will treat it as if the embedded struct -// were part of the struct directly. Example: -// -// type Friend struct { -// Person `mapstructure:",squash"` -// } -// -// Now the following input would be accepted: -// -// map[string]interface{}{ -// "name": "alice", -// } -// -// When decoding from a struct to a map, the squash tag squashes the struct -// fields into a single map. Using the example structs from above: -// -// Friend{Person: Person{Name: "alice"}} -// -// Will be decoded into a map: -// -// map[string]interface{}{ -// "name": "alice", -// } -// -// DecoderConfig has a field that changes the behavior of mapstructure -// to always squash embedded structs. -// -// # Remainder Values -// -// If there are any unmapped keys in the source value, mapstructure by -// default will silently ignore them. You can error by setting ErrorUnused -// in DecoderConfig. If you're using Metadata you can also maintain a slice -// of the unused keys. -// -// You can also use the ",remain" suffix on your tag to collect all unused -// values in a map. The field with this tag MUST be a map type and should -// probably be a "map[string]interface{}" or "map[interface{}]interface{}". -// See example below: -// -// type Friend struct { -// Name string -// Other map[string]interface{} `mapstructure:",remain"` -// } -// -// Given the input below, Other would be populated with the other -// values that weren't used (everything but "name"): -// -// map[string]interface{}{ -// "name": "bob", -// "address": "123 Maple St.", -// } -// -// # Omit Empty Values -// -// When decoding from a struct to any other value, you may use the -// ",omitempty" suffix on your tag to omit that value if it equates to -// the zero value. The zero value of all types is specified in the Go -// specification. -// -// For example, the zero type of a numeric type is zero ("0"). If the struct -// field value is zero and a numeric type, the field is empty, and it won't -// be encoded into the destination type. -// -// type Source struct { -// Age int `mapstructure:",omitempty"` -// } -// -// # Unexported fields -// -// Since unexported (private) struct fields cannot be set outside the package -// where they are defined, the decoder will simply skip them. -// -// For this output type definition: -// -// type Exported struct { -// private string // this unexported field will be skipped -// Public string -// } -// -// Using this map as input: -// -// map[string]interface{}{ -// "private": "I will be ignored", -// "Public": "I made it through!", -// } -// -// The following struct will be decoded: -// -// type Exported struct { -// private: "" // field is left with an empty string (zero value) -// Public: "I made it through!" -// } -// -// # Other Configuration -// -// mapstructure is highly configurable. See the DecoderConfig struct -// for other features and options that are supported. -package mapstructure - -import ( - "encoding/json" - "errors" - "fmt" - "reflect" - "sort" - "strconv" - "strings" -) - -// DecodeHookFunc is the callback function that can be used for -// data transformations. See "DecodeHook" in the DecoderConfig -// struct. -// -// The type must be one of DecodeHookFuncType, DecodeHookFuncKind, or -// DecodeHookFuncValue. -// Values are a superset of Types (Values can return types), and Types are a -// superset of Kinds (Types can return Kinds) and are generally a richer thing -// to use, but Kinds are simpler if you only need those. -// -// The reason DecodeHookFunc is multi-typed is for backwards compatibility: -// we started with Kinds and then realized Types were the better solution, -// but have a promise to not break backwards compat so we now support -// both. -type DecodeHookFunc interface{} - -// DecodeHookFuncType is a DecodeHookFunc which has complete information about -// the source and target types. -type DecodeHookFuncType func(reflect.Type, reflect.Type, interface{}) (interface{}, error) - -// DecodeHookFuncKind is a DecodeHookFunc which knows only the Kinds of the -// source and target types. -type DecodeHookFuncKind func(reflect.Kind, reflect.Kind, interface{}) (interface{}, error) - -// DecodeHookFuncValue is a DecodeHookFunc which has complete access to both the source and target -// values. -type DecodeHookFuncValue func(from reflect.Value, to reflect.Value) (interface{}, error) - -// DecoderConfig is the configuration that is used to create a new decoder -// and allows customization of various aspects of decoding. -type DecoderConfig struct { - // DecodeHook, if set, will be called before any decoding and any - // type conversion (if WeaklyTypedInput is on). This lets you modify - // the values before they're set down onto the resulting struct. The - // DecodeHook is called for every map and value in the input. This means - // that if a struct has embedded fields with squash tags the decode hook - // is called only once with all of the input data, not once for each - // embedded struct. - // - // If an error is returned, the entire decode will fail with that error. - DecodeHook DecodeHookFunc - - // If ErrorUnused is true, then it is an error for there to exist - // keys in the original map that were unused in the decoding process - // (extra keys). - ErrorUnused bool - - // If ErrorUnset is true, then it is an error for there to exist - // fields in the result that were not set in the decoding process - // (extra fields). This only applies to decoding to a struct. This - // will affect all nested structs as well. - ErrorUnset bool - - // ZeroFields, if set to true, will zero fields before writing them. - // For example, a map will be emptied before decoded values are put in - // it. If this is false, a map will be merged. - ZeroFields bool - - // If WeaklyTypedInput is true, the decoder will make the following - // "weak" conversions: - // - // - bools to string (true = "1", false = "0") - // - numbers to string (base 10) - // - bools to int/uint (true = 1, false = 0) - // - strings to int/uint (base implied by prefix) - // - int to bool (true if value != 0) - // - string to bool (accepts: 1, t, T, TRUE, true, True, 0, f, F, - // FALSE, false, False. Anything else is an error) - // - empty array = empty map and vice versa - // - negative numbers to overflowed uint values (base 10) - // - slice of maps to a merged map - // - single values are converted to slices if required. Each - // element is weakly decoded. For example: "4" can become []int{4} - // if the target type is an int slice. - // - WeaklyTypedInput bool - - // Squash will squash embedded structs. A squash tag may also be - // added to an individual struct field using a tag. For example: - // - // type Parent struct { - // Child `mapstructure:",squash"` - // } - Squash bool - - // Metadata is the struct that will contain extra metadata about - // the decoding. If this is nil, then no metadata will be tracked. - Metadata *Metadata - - // Result is a pointer to the struct that will contain the decoded - // value. - Result interface{} - - // The tag name that mapstructure reads for field names. This - // defaults to "mapstructure" - TagName string - - // IgnoreUntaggedFields ignores all struct fields without explicit - // TagName, comparable to `mapstructure:"-"` as default behaviour. - IgnoreUntaggedFields bool - - // MatchName is the function used to match the map key to the struct - // field name or tag. Defaults to `strings.EqualFold`. This can be used - // to implement case-sensitive tag values, support snake casing, etc. - MatchName func(mapKey, fieldName string) bool -} - -// A Decoder takes a raw interface value and turns it into structured -// data, keeping track of rich error information along the way in case -// anything goes wrong. Unlike the basic top-level Decode method, you can -// more finely control how the Decoder behaves using the DecoderConfig -// structure. The top-level Decode method is just a convenience that sets -// up the most basic Decoder. -type Decoder struct { - config *DecoderConfig -} - -// Metadata contains information about decoding a structure that -// is tedious or difficult to get otherwise. -type Metadata struct { - // Keys are the keys of the structure which were successfully decoded - Keys []string - - // Unused is a slice of keys that were found in the raw value but - // weren't decoded since there was no matching field in the result interface - Unused []string - - // Unset is a slice of field names that were found in the result interface - // but weren't set in the decoding process since there was no matching value - // in the input - Unset []string -} - -// Decode takes an input structure and uses reflection to translate it to -// the output structure. output must be a pointer to a map or struct. -func Decode(input interface{}, output interface{}) error { - config := &DecoderConfig{ - Metadata: nil, - Result: output, - } - - decoder, err := NewDecoder(config) - if err != nil { - return err - } - - return decoder.Decode(input) -} - -// WeakDecode is the same as Decode but is shorthand to enable -// WeaklyTypedInput. See DecoderConfig for more info. -func WeakDecode(input, output interface{}) error { - config := &DecoderConfig{ - Metadata: nil, - Result: output, - WeaklyTypedInput: true, - } - - decoder, err := NewDecoder(config) - if err != nil { - return err - } - - return decoder.Decode(input) -} - -// DecodeMetadata is the same as Decode, but is shorthand to -// enable metadata collection. See DecoderConfig for more info. -func DecodeMetadata(input interface{}, output interface{}, metadata *Metadata) error { - config := &DecoderConfig{ - Metadata: metadata, - Result: output, - } - - decoder, err := NewDecoder(config) - if err != nil { - return err - } - - return decoder.Decode(input) -} - -// WeakDecodeMetadata is the same as Decode, but is shorthand to -// enable both WeaklyTypedInput and metadata collection. See -// DecoderConfig for more info. -func WeakDecodeMetadata(input interface{}, output interface{}, metadata *Metadata) error { - config := &DecoderConfig{ - Metadata: metadata, - Result: output, - WeaklyTypedInput: true, - } - - decoder, err := NewDecoder(config) - if err != nil { - return err - } - - return decoder.Decode(input) -} - -// NewDecoder returns a new decoder for the given configuration. Once -// a decoder has been returned, the same configuration must not be used -// again. -func NewDecoder(config *DecoderConfig) (*Decoder, error) { - val := reflect.ValueOf(config.Result) - if val.Kind() != reflect.Ptr { - return nil, errors.New("result must be a pointer") - } - - val = val.Elem() - if !val.CanAddr() { - return nil, errors.New("result must be addressable (a pointer)") - } - - if config.Metadata != nil { - if config.Metadata.Keys == nil { - config.Metadata.Keys = make([]string, 0) - } - - if config.Metadata.Unused == nil { - config.Metadata.Unused = make([]string, 0) - } - - if config.Metadata.Unset == nil { - config.Metadata.Unset = make([]string, 0) - } - } - - if config.TagName == "" { - config.TagName = "mapstructure" - } - - if config.MatchName == nil { - config.MatchName = strings.EqualFold - } - - result := &Decoder{ - config: config, - } - - return result, nil -} - -// Decode decodes the given raw interface to the target pointer specified -// by the configuration. -func (d *Decoder) Decode(input interface{}) error { - return d.decode("", input, reflect.ValueOf(d.config.Result).Elem()) -} - -// Decodes an unknown data type into a specific reflection value. -func (d *Decoder) decode(name string, input interface{}, outVal reflect.Value) error { - var inputVal reflect.Value - if input != nil { - inputVal = reflect.ValueOf(input) - - // We need to check here if input is a typed nil. Typed nils won't - // match the "input == nil" below so we check that here. - if inputVal.Kind() == reflect.Ptr && inputVal.IsNil() { - input = nil - } - } - - if input == nil { - // If the data is nil, then we don't set anything, unless ZeroFields is set - // to true. - if d.config.ZeroFields { - outVal.Set(reflect.Zero(outVal.Type())) - - if d.config.Metadata != nil && name != "" { - d.config.Metadata.Keys = append(d.config.Metadata.Keys, name) - } - } - return nil - } - - if !inputVal.IsValid() { - // If the input value is invalid, then we just set the value - // to be the zero value. - outVal.Set(reflect.Zero(outVal.Type())) - if d.config.Metadata != nil && name != "" { - d.config.Metadata.Keys = append(d.config.Metadata.Keys, name) - } - return nil - } - - if d.config.DecodeHook != nil { - // We have a DecodeHook, so let's pre-process the input. - var err error - input, err = DecodeHookExec(d.config.DecodeHook, inputVal, outVal) - if err != nil { - return fmt.Errorf("error decoding '%s': %w", name, err) - } - } - - var err error - outputKind := getKind(outVal) - addMetaKey := true - switch outputKind { - case reflect.Bool: - err = d.decodeBool(name, input, outVal) - case reflect.Interface: - err = d.decodeBasic(name, input, outVal) - case reflect.String: - err = d.decodeString(name, input, outVal) - case reflect.Int: - err = d.decodeInt(name, input, outVal) - case reflect.Uint: - err = d.decodeUint(name, input, outVal) - case reflect.Float32: - err = d.decodeFloat(name, input, outVal) - case reflect.Struct: - err = d.decodeStruct(name, input, outVal) - case reflect.Map: - err = d.decodeMap(name, input, outVal) - case reflect.Ptr: - addMetaKey, err = d.decodePtr(name, input, outVal) - case reflect.Slice: - err = d.decodeSlice(name, input, outVal) - case reflect.Array: - err = d.decodeArray(name, input, outVal) - case reflect.Func: - err = d.decodeFunc(name, input, outVal) - default: - // If we reached this point then we weren't able to decode it - return fmt.Errorf("%s: unsupported type: %s", name, outputKind) - } - - // If we reached here, then we successfully decoded SOMETHING, so - // mark the key as used if we're tracking metainput. - if addMetaKey && d.config.Metadata != nil && name != "" { - d.config.Metadata.Keys = append(d.config.Metadata.Keys, name) - } - - return err -} - -// This decodes a basic type (bool, int, string, etc.) and sets the -// value to "data" of that type. -func (d *Decoder) decodeBasic(name string, data interface{}, val reflect.Value) error { - if val.IsValid() && val.Elem().IsValid() { - elem := val.Elem() - - // If we can't address this element, then its not writable. Instead, - // we make a copy of the value (which is a pointer and therefore - // writable), decode into that, and replace the whole value. - copied := false - if !elem.CanAddr() { - copied = true - - // Make *T - copy := reflect.New(elem.Type()) - - // *T = elem - copy.Elem().Set(elem) - - // Set elem so we decode into it - elem = copy - } - - // Decode. If we have an error then return. We also return right - // away if we're not a copy because that means we decoded directly. - if err := d.decode(name, data, elem); err != nil || !copied { - return err - } - - // If we're a copy, we need to set te final result - val.Set(elem.Elem()) - return nil - } - - dataVal := reflect.ValueOf(data) - - // If the input data is a pointer, and the assigned type is the dereference - // of that exact pointer, then indirect it so that we can assign it. - // Example: *string to string - if dataVal.Kind() == reflect.Ptr && dataVal.Type().Elem() == val.Type() { - dataVal = reflect.Indirect(dataVal) - } - - if !dataVal.IsValid() { - dataVal = reflect.Zero(val.Type()) - } - - dataValType := dataVal.Type() - if !dataValType.AssignableTo(val.Type()) { - return fmt.Errorf( - "'%s' expected type '%s', got '%s'", - name, val.Type(), dataValType) - } - - val.Set(dataVal) - return nil -} - -func (d *Decoder) decodeString(name string, data interface{}, val reflect.Value) error { - dataVal := reflect.Indirect(reflect.ValueOf(data)) - dataKind := getKind(dataVal) - - converted := true - switch { - case dataKind == reflect.String: - val.SetString(dataVal.String()) - case dataKind == reflect.Bool && d.config.WeaklyTypedInput: - if dataVal.Bool() { - val.SetString("1") - } else { - val.SetString("0") - } - case dataKind == reflect.Int && d.config.WeaklyTypedInput: - val.SetString(strconv.FormatInt(dataVal.Int(), 10)) - case dataKind == reflect.Uint && d.config.WeaklyTypedInput: - val.SetString(strconv.FormatUint(dataVal.Uint(), 10)) - case dataKind == reflect.Float32 && d.config.WeaklyTypedInput: - val.SetString(strconv.FormatFloat(dataVal.Float(), 'f', -1, 64)) - case dataKind == reflect.Slice && d.config.WeaklyTypedInput, - dataKind == reflect.Array && d.config.WeaklyTypedInput: - dataType := dataVal.Type() - elemKind := dataType.Elem().Kind() - switch elemKind { - case reflect.Uint8: - var uints []uint8 - if dataKind == reflect.Array { - uints = make([]uint8, dataVal.Len(), dataVal.Len()) - for i := range uints { - uints[i] = dataVal.Index(i).Interface().(uint8) - } - } else { - uints = dataVal.Interface().([]uint8) - } - val.SetString(string(uints)) - default: - converted = false - } - default: - converted = false - } - - if !converted { - return fmt.Errorf( - "'%s' expected type '%s', got unconvertible type '%s', value: '%v'", - name, val.Type(), dataVal.Type(), data) - } - - return nil -} - -func (d *Decoder) decodeInt(name string, data interface{}, val reflect.Value) error { - dataVal := reflect.Indirect(reflect.ValueOf(data)) - dataKind := getKind(dataVal) - dataType := dataVal.Type() - - switch { - case dataKind == reflect.Int: - val.SetInt(dataVal.Int()) - case dataKind == reflect.Uint: - val.SetInt(int64(dataVal.Uint())) - case dataKind == reflect.Float32: - val.SetInt(int64(dataVal.Float())) - case dataKind == reflect.Bool && d.config.WeaklyTypedInput: - if dataVal.Bool() { - val.SetInt(1) - } else { - val.SetInt(0) - } - case dataKind == reflect.String && d.config.WeaklyTypedInput: - str := dataVal.String() - if str == "" { - str = "0" - } - - i, err := strconv.ParseInt(str, 0, val.Type().Bits()) - if err == nil { - val.SetInt(i) - } else { - return fmt.Errorf("cannot parse '%s' as int: %s", name, err) - } - case dataType.PkgPath() == "encoding/json" && dataType.Name() == "Number": - jn := data.(json.Number) - i, err := jn.Int64() - if err != nil { - return fmt.Errorf( - "error decoding json.Number into %s: %s", name, err) - } - val.SetInt(i) - default: - return fmt.Errorf( - "'%s' expected type '%s', got unconvertible type '%s', value: '%v'", - name, val.Type(), dataVal.Type(), data) - } - - return nil -} - -func (d *Decoder) decodeUint(name string, data interface{}, val reflect.Value) error { - dataVal := reflect.Indirect(reflect.ValueOf(data)) - dataKind := getKind(dataVal) - dataType := dataVal.Type() - - switch { - case dataKind == reflect.Int: - i := dataVal.Int() - if i < 0 && !d.config.WeaklyTypedInput { - return fmt.Errorf("cannot parse '%s', %d overflows uint", - name, i) - } - val.SetUint(uint64(i)) - case dataKind == reflect.Uint: - val.SetUint(dataVal.Uint()) - case dataKind == reflect.Float32: - f := dataVal.Float() - if f < 0 && !d.config.WeaklyTypedInput { - return fmt.Errorf("cannot parse '%s', %f overflows uint", - name, f) - } - val.SetUint(uint64(f)) - case dataKind == reflect.Bool && d.config.WeaklyTypedInput: - if dataVal.Bool() { - val.SetUint(1) - } else { - val.SetUint(0) - } - case dataKind == reflect.String && d.config.WeaklyTypedInput: - str := dataVal.String() - if str == "" { - str = "0" - } - - i, err := strconv.ParseUint(str, 0, val.Type().Bits()) - if err == nil { - val.SetUint(i) - } else { - return fmt.Errorf("cannot parse '%s' as uint: %s", name, err) - } - case dataType.PkgPath() == "encoding/json" && dataType.Name() == "Number": - jn := data.(json.Number) - i, err := strconv.ParseUint(string(jn), 0, 64) - if err != nil { - return fmt.Errorf( - "error decoding json.Number into %s: %s", name, err) - } - val.SetUint(i) - default: - return fmt.Errorf( - "'%s' expected type '%s', got unconvertible type '%s', value: '%v'", - name, val.Type(), dataVal.Type(), data) - } - - return nil -} - -func (d *Decoder) decodeBool(name string, data interface{}, val reflect.Value) error { - dataVal := reflect.Indirect(reflect.ValueOf(data)) - dataKind := getKind(dataVal) - - switch { - case dataKind == reflect.Bool: - val.SetBool(dataVal.Bool()) - case dataKind == reflect.Int && d.config.WeaklyTypedInput: - val.SetBool(dataVal.Int() != 0) - case dataKind == reflect.Uint && d.config.WeaklyTypedInput: - val.SetBool(dataVal.Uint() != 0) - case dataKind == reflect.Float32 && d.config.WeaklyTypedInput: - val.SetBool(dataVal.Float() != 0) - case dataKind == reflect.String && d.config.WeaklyTypedInput: - b, err := strconv.ParseBool(dataVal.String()) - if err == nil { - val.SetBool(b) - } else if dataVal.String() == "" { - val.SetBool(false) - } else { - return fmt.Errorf("cannot parse '%s' as bool: %s", name, err) - } - default: - return fmt.Errorf( - "'%s' expected type '%s', got unconvertible type '%s', value: '%v'", - name, val.Type(), dataVal.Type(), data) - } - - return nil -} - -func (d *Decoder) decodeFloat(name string, data interface{}, val reflect.Value) error { - dataVal := reflect.Indirect(reflect.ValueOf(data)) - dataKind := getKind(dataVal) - dataType := dataVal.Type() - - switch { - case dataKind == reflect.Int: - val.SetFloat(float64(dataVal.Int())) - case dataKind == reflect.Uint: - val.SetFloat(float64(dataVal.Uint())) - case dataKind == reflect.Float32: - val.SetFloat(dataVal.Float()) - case dataKind == reflect.Bool && d.config.WeaklyTypedInput: - if dataVal.Bool() { - val.SetFloat(1) - } else { - val.SetFloat(0) - } - case dataKind == reflect.String && d.config.WeaklyTypedInput: - str := dataVal.String() - if str == "" { - str = "0" - } - - f, err := strconv.ParseFloat(str, val.Type().Bits()) - if err == nil { - val.SetFloat(f) - } else { - return fmt.Errorf("cannot parse '%s' as float: %s", name, err) - } - case dataType.PkgPath() == "encoding/json" && dataType.Name() == "Number": - jn := data.(json.Number) - i, err := jn.Float64() - if err != nil { - return fmt.Errorf( - "error decoding json.Number into %s: %s", name, err) - } - val.SetFloat(i) - default: - return fmt.Errorf( - "'%s' expected type '%s', got unconvertible type '%s', value: '%v'", - name, val.Type(), dataVal.Type(), data) - } - - return nil -} - -func (d *Decoder) decodeMap(name string, data interface{}, val reflect.Value) error { - valType := val.Type() - valKeyType := valType.Key() - valElemType := valType.Elem() - - // By default we overwrite keys in the current map - valMap := val - - // If the map is nil or we're purposely zeroing fields, make a new map - if valMap.IsNil() || d.config.ZeroFields { - // Make a new map to hold our result - mapType := reflect.MapOf(valKeyType, valElemType) - valMap = reflect.MakeMap(mapType) - } - - dataVal := reflect.ValueOf(data) - - // Resolve any levels of indirection - for dataVal.Kind() == reflect.Pointer { - dataVal = reflect.Indirect(dataVal) - } - - // Check input type and based on the input type jump to the proper func - switch dataVal.Kind() { - case reflect.Map: - return d.decodeMapFromMap(name, dataVal, val, valMap) - - case reflect.Struct: - return d.decodeMapFromStruct(name, dataVal, val, valMap) - - case reflect.Array, reflect.Slice: - if d.config.WeaklyTypedInput { - return d.decodeMapFromSlice(name, dataVal, val, valMap) - } - - fallthrough - - default: - return fmt.Errorf("'%s' expected a map, got '%s'", name, dataVal.Kind()) - } -} - -func (d *Decoder) decodeMapFromSlice(name string, dataVal reflect.Value, val reflect.Value, valMap reflect.Value) error { - // Special case for BC reasons (covered by tests) - if dataVal.Len() == 0 { - val.Set(valMap) - return nil - } - - for i := 0; i < dataVal.Len(); i++ { - err := d.decode( - name+"["+strconv.Itoa(i)+"]", - dataVal.Index(i).Interface(), val) - if err != nil { - return err - } - } - - return nil -} - -func (d *Decoder) decodeMapFromMap(name string, dataVal reflect.Value, val reflect.Value, valMap reflect.Value) error { - valType := val.Type() - valKeyType := valType.Key() - valElemType := valType.Elem() - - // Accumulate errors - errors := make([]string, 0) - - // If the input data is empty, then we just match what the input data is. - if dataVal.Len() == 0 { - if dataVal.IsNil() { - if !val.IsNil() { - val.Set(dataVal) - } - } else { - // Set to empty allocated value - val.Set(valMap) - } - - return nil - } - - for _, k := range dataVal.MapKeys() { - fieldName := name + "[" + k.String() + "]" - - // First decode the key into the proper type - currentKey := reflect.Indirect(reflect.New(valKeyType)) - if err := d.decode(fieldName, k.Interface(), currentKey); err != nil { - errors = appendErrors(errors, err) - continue - } - - // Next decode the data into the proper type - v := dataVal.MapIndex(k).Interface() - currentVal := reflect.Indirect(reflect.New(valElemType)) - if err := d.decode(fieldName, v, currentVal); err != nil { - errors = appendErrors(errors, err) - continue - } - - valMap.SetMapIndex(currentKey, currentVal) - } - - // Set the built up map to the value - val.Set(valMap) - - // If we had errors, return those - if len(errors) > 0 { - return &Error{errors} - } - - return nil -} - -func (d *Decoder) decodeMapFromStruct(name string, dataVal reflect.Value, val reflect.Value, valMap reflect.Value) error { - typ := dataVal.Type() - for i := 0; i < typ.NumField(); i++ { - // Get the StructField first since this is a cheap operation. If the - // field is unexported, then ignore it. - f := typ.Field(i) - if f.PkgPath != "" { - continue - } - - // Next get the actual value of this field and verify it is assignable - // to the map value. - v := dataVal.Field(i) - if !v.Type().AssignableTo(valMap.Type().Elem()) { - return fmt.Errorf("cannot assign type '%s' to map value field of type '%s'", v.Type(), valMap.Type().Elem()) - } - - tagValue := f.Tag.Get(d.config.TagName) - keyName := f.Name - - if tagValue == "" && d.config.IgnoreUntaggedFields { - continue - } - - // If Squash is set in the config, we squash the field down. - squash := d.config.Squash && v.Kind() == reflect.Struct && f.Anonymous - - v = dereferencePtrToStructIfNeeded(v, d.config.TagName) - - // Determine the name of the key in the map - if index := strings.Index(tagValue, ","); index != -1 { - if tagValue[:index] == "-" { - continue - } - // If "omitempty" is specified in the tag, it ignores empty values. - if strings.Index(tagValue[index+1:], "omitempty") != -1 && isEmptyValue(v) { - continue - } - - // If "squash" is specified in the tag, we squash the field down. - squash = squash || strings.Index(tagValue[index+1:], "squash") != -1 - if squash { - // When squashing, the embedded type can be a pointer to a struct. - if v.Kind() == reflect.Ptr && v.Elem().Kind() == reflect.Struct { - v = v.Elem() - } - - // The final type must be a struct - if v.Kind() != reflect.Struct { - return fmt.Errorf("cannot squash non-struct type '%s'", v.Type()) - } - } else { - if strings.Index(tagValue[index+1:], "remain") != -1 { - if v.Kind() != reflect.Map { - return fmt.Errorf("error remain-tag field with invalid type: '%s'", v.Type()) - } - - ptr := v.MapRange() - for ptr.Next() { - valMap.SetMapIndex(ptr.Key(), ptr.Value()) - } - continue - } - } - if keyNameTagValue := tagValue[:index]; keyNameTagValue != "" { - keyName = keyNameTagValue - } - } else if len(tagValue) > 0 { - if tagValue == "-" { - continue - } - keyName = tagValue - } - - switch v.Kind() { - // this is an embedded struct, so handle it differently - case reflect.Struct: - x := reflect.New(v.Type()) - x.Elem().Set(v) - - vType := valMap.Type() - vKeyType := vType.Key() - vElemType := vType.Elem() - mType := reflect.MapOf(vKeyType, vElemType) - vMap := reflect.MakeMap(mType) - - // Creating a pointer to a map so that other methods can completely - // overwrite the map if need be (looking at you decodeMapFromMap). The - // indirection allows the underlying map to be settable (CanSet() == true) - // where as reflect.MakeMap returns an unsettable map. - addrVal := reflect.New(vMap.Type()) - reflect.Indirect(addrVal).Set(vMap) - - err := d.decode(keyName, x.Interface(), reflect.Indirect(addrVal)) - if err != nil { - return err - } - - // the underlying map may have been completely overwritten so pull - // it indirectly out of the enclosing value. - vMap = reflect.Indirect(addrVal) - - if squash { - for _, k := range vMap.MapKeys() { - valMap.SetMapIndex(k, vMap.MapIndex(k)) - } - } else { - valMap.SetMapIndex(reflect.ValueOf(keyName), vMap) - } - - default: - valMap.SetMapIndex(reflect.ValueOf(keyName), v) - } - } - - if val.CanAddr() { - val.Set(valMap) - } - - return nil -} - -func (d *Decoder) decodePtr(name string, data interface{}, val reflect.Value) (bool, error) { - // If the input data is nil, then we want to just set the output - // pointer to be nil as well. - isNil := data == nil - if !isNil { - switch v := reflect.Indirect(reflect.ValueOf(data)); v.Kind() { - case reflect.Chan, - reflect.Func, - reflect.Interface, - reflect.Map, - reflect.Ptr, - reflect.Slice: - isNil = v.IsNil() - } - } - if isNil { - if !val.IsNil() && val.CanSet() { - nilValue := reflect.New(val.Type()).Elem() - val.Set(nilValue) - } - - return true, nil - } - - // Create an element of the concrete (non pointer) type and decode - // into that. Then set the value of the pointer to this type. - valType := val.Type() - valElemType := valType.Elem() - if val.CanSet() { - realVal := val - if realVal.IsNil() || d.config.ZeroFields { - realVal = reflect.New(valElemType) - } - - if err := d.decode(name, data, reflect.Indirect(realVal)); err != nil { - return false, err - } - - val.Set(realVal) - } else { - if err := d.decode(name, data, reflect.Indirect(val)); err != nil { - return false, err - } - } - return false, nil -} - -func (d *Decoder) decodeFunc(name string, data interface{}, val reflect.Value) error { - // Create an element of the concrete (non pointer) type and decode - // into that. Then set the value of the pointer to this type. - dataVal := reflect.Indirect(reflect.ValueOf(data)) - if val.Type() != dataVal.Type() { - return fmt.Errorf( - "'%s' expected type '%s', got unconvertible type '%s', value: '%v'", - name, val.Type(), dataVal.Type(), data) - } - val.Set(dataVal) - return nil -} - -func (d *Decoder) decodeSlice(name string, data interface{}, val reflect.Value) error { - dataVal := reflect.Indirect(reflect.ValueOf(data)) - dataValKind := dataVal.Kind() - valType := val.Type() - valElemType := valType.Elem() - sliceType := reflect.SliceOf(valElemType) - - // If we have a non array/slice type then we first attempt to convert. - if dataValKind != reflect.Array && dataValKind != reflect.Slice { - if d.config.WeaklyTypedInput { - switch { - // Slice and array we use the normal logic - case dataValKind == reflect.Slice, dataValKind == reflect.Array: - break - - // Empty maps turn into empty slices - case dataValKind == reflect.Map: - if dataVal.Len() == 0 { - val.Set(reflect.MakeSlice(sliceType, 0, 0)) - return nil - } - // Create slice of maps of other sizes - return d.decodeSlice(name, []interface{}{data}, val) - - case dataValKind == reflect.String && valElemType.Kind() == reflect.Uint8: - return d.decodeSlice(name, []byte(dataVal.String()), val) - - // All other types we try to convert to the slice type - // and "lift" it into it. i.e. a string becomes a string slice. - default: - // Just re-try this function with data as a slice. - return d.decodeSlice(name, []interface{}{data}, val) - } - } - - return fmt.Errorf( - "'%s': source data must be an array or slice, got %s", name, dataValKind) - } - - // If the input value is nil, then don't allocate since empty != nil - if dataValKind != reflect.Array && dataVal.IsNil() { - return nil - } - - valSlice := val - if valSlice.IsNil() || d.config.ZeroFields { - // Make a new slice to hold our result, same size as the original data. - valSlice = reflect.MakeSlice(sliceType, dataVal.Len(), dataVal.Len()) - } else if valSlice.Len() > dataVal.Len() { - valSlice = valSlice.Slice(0, dataVal.Len()) - } - - // Accumulate any errors - errors := make([]string, 0) - - for i := 0; i < dataVal.Len(); i++ { - currentData := dataVal.Index(i).Interface() - for valSlice.Len() <= i { - valSlice = reflect.Append(valSlice, reflect.Zero(valElemType)) - } - currentField := valSlice.Index(i) - - fieldName := name + "[" + strconv.Itoa(i) + "]" - if err := d.decode(fieldName, currentData, currentField); err != nil { - errors = appendErrors(errors, err) - } - } - - // Finally, set the value to the slice we built up - val.Set(valSlice) - - // If there were errors, we return those - if len(errors) > 0 { - return &Error{errors} - } - - return nil -} - -func (d *Decoder) decodeArray(name string, data interface{}, val reflect.Value) error { - dataVal := reflect.Indirect(reflect.ValueOf(data)) - dataValKind := dataVal.Kind() - valType := val.Type() - valElemType := valType.Elem() - arrayType := reflect.ArrayOf(valType.Len(), valElemType) - - valArray := val - - if isComparable(valArray) && valArray.Interface() == reflect.Zero(valArray.Type()).Interface() || d.config.ZeroFields { - // Check input type - if dataValKind != reflect.Array && dataValKind != reflect.Slice { - if d.config.WeaklyTypedInput { - switch { - // Empty maps turn into empty arrays - case dataValKind == reflect.Map: - if dataVal.Len() == 0 { - val.Set(reflect.Zero(arrayType)) - return nil - } - - // All other types we try to convert to the array type - // and "lift" it into it. i.e. a string becomes a string array. - default: - // Just re-try this function with data as a slice. - return d.decodeArray(name, []interface{}{data}, val) - } - } - - return fmt.Errorf( - "'%s': source data must be an array or slice, got %s", name, dataValKind) - - } - if dataVal.Len() > arrayType.Len() { - return fmt.Errorf( - "'%s': expected source data to have length less or equal to %d, got %d", name, arrayType.Len(), dataVal.Len()) - } - - // Make a new array to hold our result, same size as the original data. - valArray = reflect.New(arrayType).Elem() - } - - // Accumulate any errors - errors := make([]string, 0) - - for i := 0; i < dataVal.Len(); i++ { - currentData := dataVal.Index(i).Interface() - currentField := valArray.Index(i) - - fieldName := name + "[" + strconv.Itoa(i) + "]" - if err := d.decode(fieldName, currentData, currentField); err != nil { - errors = appendErrors(errors, err) - } - } - - // Finally, set the value to the array we built up - val.Set(valArray) - - // If there were errors, we return those - if len(errors) > 0 { - return &Error{errors} - } - - return nil -} - -func (d *Decoder) decodeStruct(name string, data interface{}, val reflect.Value) error { - dataVal := reflect.Indirect(reflect.ValueOf(data)) - - // If the type of the value to write to and the data match directly, - // then we just set it directly instead of recursing into the structure. - if dataVal.Type() == val.Type() { - val.Set(dataVal) - return nil - } - - dataValKind := dataVal.Kind() - switch dataValKind { - case reflect.Map: - return d.decodeStructFromMap(name, dataVal, val) - - case reflect.Struct: - // Not the most efficient way to do this but we can optimize later if - // we want to. To convert from struct to struct we go to map first - // as an intermediary. - - // Make a new map to hold our result - mapType := reflect.TypeOf((map[string]interface{})(nil)) - mval := reflect.MakeMap(mapType) - - // Creating a pointer to a map so that other methods can completely - // overwrite the map if need be (looking at you decodeMapFromMap). The - // indirection allows the underlying map to be settable (CanSet() == true) - // where as reflect.MakeMap returns an unsettable map. - addrVal := reflect.New(mval.Type()) - - reflect.Indirect(addrVal).Set(mval) - if err := d.decodeMapFromStruct(name, dataVal, reflect.Indirect(addrVal), mval); err != nil { - return err - } - - result := d.decodeStructFromMap(name, reflect.Indirect(addrVal), val) - return result - - default: - return fmt.Errorf("'%s' expected a map, got '%s'", name, dataVal.Kind()) - } -} - -func (d *Decoder) decodeStructFromMap(name string, dataVal, val reflect.Value) error { - dataValType := dataVal.Type() - if kind := dataValType.Key().Kind(); kind != reflect.String && kind != reflect.Interface { - return fmt.Errorf( - "'%s' needs a map with string keys, has '%s' keys", - name, dataValType.Key().Kind()) - } - - dataValKeys := make(map[reflect.Value]struct{}) - dataValKeysUnused := make(map[interface{}]struct{}) - for _, dataValKey := range dataVal.MapKeys() { - dataValKeys[dataValKey] = struct{}{} - dataValKeysUnused[dataValKey.Interface()] = struct{}{} - } - - targetValKeysUnused := make(map[interface{}]struct{}) - errors := make([]string, 0) - - // This slice will keep track of all the structs we'll be decoding. - // There can be more than one struct if there are embedded structs - // that are squashed. - structs := make([]reflect.Value, 1, 5) - structs[0] = val - - // Compile the list of all the fields that we're going to be decoding - // from all the structs. - type field struct { - field reflect.StructField - val reflect.Value - } - - // remainField is set to a valid field set with the "remain" tag if - // we are keeping track of remaining values. - var remainField *field - - fields := []field{} - for len(structs) > 0 { - structVal := structs[0] - structs = structs[1:] - - structType := structVal.Type() - - for i := 0; i < structType.NumField(); i++ { - fieldType := structType.Field(i) - fieldVal := structVal.Field(i) - if fieldVal.Kind() == reflect.Ptr && fieldVal.Elem().Kind() == reflect.Struct { - // Handle embedded struct pointers as embedded structs. - fieldVal = fieldVal.Elem() - } - - // If "squash" is specified in the tag, we squash the field down. - squash := d.config.Squash && fieldVal.Kind() == reflect.Struct && fieldType.Anonymous - remain := false - - // We always parse the tags cause we're looking for other tags too - tagParts := strings.Split(fieldType.Tag.Get(d.config.TagName), ",") - for _, tag := range tagParts[1:] { - if tag == "squash" { - squash = true - break - } - - if tag == "remain" { - remain = true - break - } - } - - if squash { - if fieldVal.Kind() != reflect.Struct { - errors = appendErrors(errors, - fmt.Errorf("%s: unsupported type for squash: %s", fieldType.Name, fieldVal.Kind())) - } else { - structs = append(structs, fieldVal) - } - continue - } - - // Build our field - if remain { - remainField = &field{fieldType, fieldVal} - } else { - // Normal struct field, store it away - fields = append(fields, field{fieldType, fieldVal}) - } - } - } - - // for fieldType, field := range fields { - for _, f := range fields { - field, fieldValue := f.field, f.val - fieldName := field.Name - - tagValue := field.Tag.Get(d.config.TagName) - if tagValue == "" && d.config.IgnoreUntaggedFields { - continue - } - tagValue = strings.SplitN(tagValue, ",", 2)[0] - if tagValue != "" { - fieldName = tagValue - } - - rawMapKey := reflect.ValueOf(fieldName) - rawMapVal := dataVal.MapIndex(rawMapKey) - if !rawMapVal.IsValid() { - // Do a slower search by iterating over each key and - // doing case-insensitive search. - for dataValKey := range dataValKeys { - mK, ok := dataValKey.Interface().(string) - if !ok { - // Not a string key - continue - } - - if d.config.MatchName(mK, fieldName) { - rawMapKey = dataValKey - rawMapVal = dataVal.MapIndex(dataValKey) - break - } - } - - if !rawMapVal.IsValid() { - // There was no matching key in the map for the value in - // the struct. Remember it for potential errors and metadata. - targetValKeysUnused[fieldName] = struct{}{} - continue - } - } - - if !fieldValue.IsValid() { - // This should never happen - panic("field is not valid") - } - - // If we can't set the field, then it is unexported or something, - // and we just continue onwards. - if !fieldValue.CanSet() { - continue - } - - // Delete the key we're using from the unused map so we stop tracking - delete(dataValKeysUnused, rawMapKey.Interface()) - - // If the name is empty string, then we're at the root, and we - // don't dot-join the fields. - if name != "" { - fieldName = name + "." + fieldName - } - - if err := d.decode(fieldName, rawMapVal.Interface(), fieldValue); err != nil { - errors = appendErrors(errors, err) - } - } - - // If we have a "remain"-tagged field and we have unused keys then - // we put the unused keys directly into the remain field. - if remainField != nil && len(dataValKeysUnused) > 0 { - // Build a map of only the unused values - remain := map[interface{}]interface{}{} - for key := range dataValKeysUnused { - remain[key] = dataVal.MapIndex(reflect.ValueOf(key)).Interface() - } - - // Decode it as-if we were just decoding this map onto our map. - if err := d.decodeMap(name, remain, remainField.val); err != nil { - errors = appendErrors(errors, err) - } - - // Set the map to nil so we have none so that the next check will - // not error (ErrorUnused) - dataValKeysUnused = nil - } - - if d.config.ErrorUnused && len(dataValKeysUnused) > 0 { - keys := make([]string, 0, len(dataValKeysUnused)) - for rawKey := range dataValKeysUnused { - keys = append(keys, rawKey.(string)) - } - sort.Strings(keys) - - err := fmt.Errorf("'%s' has invalid keys: %s", name, strings.Join(keys, ", ")) - errors = appendErrors(errors, err) - } - - if d.config.ErrorUnset && len(targetValKeysUnused) > 0 { - keys := make([]string, 0, len(targetValKeysUnused)) - for rawKey := range targetValKeysUnused { - keys = append(keys, rawKey.(string)) - } - sort.Strings(keys) - - err := fmt.Errorf("'%s' has unset fields: %s", name, strings.Join(keys, ", ")) - errors = appendErrors(errors, err) - } - - if len(errors) > 0 { - return &Error{errors} - } - - // Add the unused keys to the list of unused keys if we're tracking metadata - if d.config.Metadata != nil { - for rawKey := range dataValKeysUnused { - key := rawKey.(string) - if name != "" { - key = name + "." + key - } - - d.config.Metadata.Unused = append(d.config.Metadata.Unused, key) - } - for rawKey := range targetValKeysUnused { - key := rawKey.(string) - if name != "" { - key = name + "." + key - } - - d.config.Metadata.Unset = append(d.config.Metadata.Unset, key) - } - } - - return nil -} - -func isEmptyValue(v reflect.Value) bool { - switch getKind(v) { - case reflect.Array, reflect.Map, reflect.Slice, reflect.String: - return v.Len() == 0 - case reflect.Bool: - return !v.Bool() - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: - return v.Int() == 0 - case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: - return v.Uint() == 0 - case reflect.Float32, reflect.Float64: - return v.Float() == 0 - case reflect.Interface, reflect.Ptr: - return v.IsNil() - } - return false -} - -func getKind(val reflect.Value) reflect.Kind { - kind := val.Kind() - - switch { - case kind >= reflect.Int && kind <= reflect.Int64: - return reflect.Int - case kind >= reflect.Uint && kind <= reflect.Uint64: - return reflect.Uint - case kind >= reflect.Float32 && kind <= reflect.Float64: - return reflect.Float32 - default: - return kind - } -} - -func isStructTypeConvertibleToMap(typ reflect.Type, checkMapstructureTags bool, tagName string) bool { - for i := 0; i < typ.NumField(); i++ { - f := typ.Field(i) - if f.PkgPath == "" && !checkMapstructureTags { // check for unexported fields - return true - } - if checkMapstructureTags && f.Tag.Get(tagName) != "" { // check for mapstructure tags inside - return true - } - } - return false -} - -func dereferencePtrToStructIfNeeded(v reflect.Value, tagName string) reflect.Value { - if v.Kind() != reflect.Ptr || v.Elem().Kind() != reflect.Struct { - return v - } - deref := v.Elem() - derefT := deref.Type() - if isStructTypeConvertibleToMap(derefT, true, tagName) { - return deref - } - return v -} diff --git a/vendor/github.com/go-viper/mapstructure/v2/reflect_go1_19.go b/vendor/github.com/go-viper/mapstructure/v2/reflect_go1_19.go deleted file mode 100644 index d0913fff6..000000000 --- a/vendor/github.com/go-viper/mapstructure/v2/reflect_go1_19.go +++ /dev/null @@ -1,44 +0,0 @@ -//go:build !go1.20 - -package mapstructure - -import "reflect" - -func isComparable(v reflect.Value) bool { - k := v.Kind() - switch k { - case reflect.Invalid: - return false - - case reflect.Array: - switch v.Type().Elem().Kind() { - case reflect.Interface, reflect.Array, reflect.Struct: - for i := 0; i < v.Type().Len(); i++ { - // if !v.Index(i).Comparable() { - if !isComparable(v.Index(i)) { - return false - } - } - return true - } - return v.Type().Comparable() - - case reflect.Interface: - // return v.Elem().Comparable() - return isComparable(v.Elem()) - - case reflect.Struct: - for i := 0; i < v.NumField(); i++ { - return false - - // if !v.Field(i).Comparable() { - if !isComparable(v.Field(i)) { - return false - } - } - return true - - default: - return v.Type().Comparable() - } -} diff --git a/vendor/github.com/go-viper/mapstructure/v2/reflect_go1_20.go b/vendor/github.com/go-viper/mapstructure/v2/reflect_go1_20.go deleted file mode 100644 index f8255a1b1..000000000 --- a/vendor/github.com/go-viper/mapstructure/v2/reflect_go1_20.go +++ /dev/null @@ -1,10 +0,0 @@ -//go:build go1.20 - -package mapstructure - -import "reflect" - -// TODO: remove once we drop support for Go <1.20 -func isComparable(v reflect.Value) bool { - return v.Comparable() -} diff --git a/vendor/github.com/go-xmlfmt/xmlfmt/LICENSE b/vendor/github.com/go-xmlfmt/xmlfmt/LICENSE deleted file mode 100644 index 890776ab7..000000000 --- a/vendor/github.com/go-xmlfmt/xmlfmt/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2016 go-xmlfmt - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/go-xmlfmt/xmlfmt/README.md b/vendor/github.com/go-xmlfmt/xmlfmt/README.md deleted file mode 100644 index 9f661ea2d..000000000 --- a/vendor/github.com/go-xmlfmt/xmlfmt/README.md +++ /dev/null @@ -1,245 +0,0 @@ -# Go XML Formatter - -[![MIT License](http://img.shields.io/badge/License-MIT-blue.svg)](LICENSE) -[![Go Doc](https://img.shields.io/badge/godoc-reference-4b68a3.svg)](https://godoc.org/github.com/go-xmlfmt/xmlfmt) -[![Go Report Card](https://goreportcard.com/badge/github.com/go-xmlfmt/xmlfmt)](https://goreportcard.com/report/github.com/go-xmlfmt/xmlfmt) - -## Synopsis - -The Go XML Formatter, xmlfmt, will format the XML string in a readable way. - -```go -package main - -import "github.com/go-xmlfmt/xmlfmt" - -func main() { - xml1 := `aSome org-or-otherWouldnt you like to knowPatCalifia` - x := xmlfmt.FormatXML(xml1, "\t", " ") - print(x) - - // If the XML Comments have nested tags in them - xml1 = ` Fred - - 23456 ` - x = xmlfmt.FormatXML(xml1, "", " ", true) - print(x) -} - -``` - -Output: - -```xml - - - a - - - - - Some org-or-other - Wouldnt you like to know - - - Pat - Califia - - - - - - - - Fred - - 23456 - -``` - -There is no XML decoding and encoding involved, only pure regular expression matching and replacing. So it is much faster than going through decoding and encoding procedures. Moreover, the exact XML source string is preserved, instead of being changed by the encoder. This is why this package exists in the first place. - -Note that - -- the default line ending is handled by the package automatically now. For Windows it's `CRLF`, and standard for anywhere else. No need to change the default line ending now. -- the case of XML comments nested within XML comments is ***not*** supported. Please avoid them or use any other tools to correct them before using this package. -- don't turn on the `nestedTagsInComments` parameter blindly, as the code has become 10+ times more complicated because of it. - -## Command - -To use it on command line, check out [xmlfmt](https://github.com/AntonioSun/xmlfmt): - - -``` -$ xmlfmt -V -xmlfmt - XML Formatter -Copyright (C) 2016-2022, Antonio Sun - -The xmlfmt will format the XML string without rewriting the document - -Built on 2022-02-06 -Version 1.1.1 - -$ xmlfmt -the required flag `-f, --file' was not specified - -Usage: - xmlfmt [OPTIONS] - -Application Options: - -f, --file= The xml file to read from (or "-" for stdin) [$XMLFMT_FILEI] - -p, --prefix= Each element begins on a new line and this prefix [$XMLFMT_PREFIX] - -i, --indent= Indent string for nested elements (default: ) [$XMLFMT_INDENT] - -n, --nested Nested tags in comments [$XMLFMT_NESTED] - -v, --verbose Verbose mode (Multiple -v options increase the verbosity) - -V, --version Show program version and exit - -Help Options: - -h, --help Show this help message - - -$ curl -sL https://pastebin.com/raw/z3euQ5PR | xmlfmt -f - - - - - a - - - - - Some org-or-other - Wouldnt you like to know - - - Pat - Califia - - - - - -$ curl -sL https://pastebin.com/raw/Zs0qy0qz | tee /tmp/xmlfmt.xml | xmlfmt -f - -n - - - Fred - - 23456 - - -$ XMLFMT_NESTED=true XMLFMT_PREFIX='|' xmlfmt -f /tmp/xmlfmt.xml - -| -| -| Fred -| -| 23456 -| -``` - - -## Justification - -### The format - -The Go XML Formatter is not called XML Beautifier because the result is not *exactly* as what people would expect -- most of the closing tags stays on the same line, just as shown above. Having been looking at the result and thinking over it, I now think it is actually a better way to present it, as those closing tags on the same line are better stay that way in my opinion. I.e., - -When it comes to very big XML strings, which is what I’m dealing every day, saving spaces by not allowing those closing tags taking extra lines is plus instead of negative to me. - -### The alternative - -To format it “properly”, i.e., as what people would normally see, is very hard using pure regular expression. In fact, according to Sam Whited from the go-nuts mlist, - -> Regular expression is, well, regular. This means that they can parse regular grammars, but can't parse context free grammars (like XML). It is actually impossible to use a regex to do this task; it will always be fragile, unfortunately. - -So if the output format is so important to you, then unfortunately you have to go through decoding and encoding procedures. But there are some drawbacks as well, as put by James McGill, in http://stackoverflow.com/questions/21117161, besides such method being slow: - -> I like this solution, but am still in search of a Golang XML formatter/prettyprinter that doesn't rewrite the document (other than formatting whitespace). Marshalling or using the Encoder will change namespace declarations. -> -> For example an element like "< ns1:Element />" will be translated to something like '< Element xmlns="http://bla...bla/ns1" >< /Element >' which seems harmless enough except when the intent is to not alter the xml other than formatting. -- James McGill Nov 12 '15 - -Using Sam's code as an example, - -https://play.golang.org/p/JUqQY3WpW5 - -The above code formats the following XML - -```xml - - - - - - 123 - John Brown - - - - -``` - -into this: - -```xml - -
- - - - 123 - John Brown - - - -
-``` - -I know they are syntactically the same, however the problem is that they *look* totally different. - -That's why there is this package, an XML Beautifier that doesn't rewrite the document. - -## Credit - -The credit goes to **diotalevi** from his post at http://www.perlmonks.org/?node_id=261292. - -However, it does not work for all cases. For example, - -```sh -$ echo '
123John Brown
' | perl -pe 's/(?<=>)\s+(?=<)//g; s(<(/?)([^/>]+)(/?)>\s*(?=(".($1&&($4 eq"
-123 -John Brown - - - - -``` - -I simplified the algorithm, and now it should work for all cases: - -```sh -echo '
123John Brown
' | perl -pe 's/(?<=>)\s+(?=<)//g; s(<(/?)([^>]+)(/?)>)($indent+=$3?0:$1?-1:1;"<$1$2$3>"."\n".(" "x$indent))ge' -``` -```xml - -
-
- - - - - 123 - - John Brown - - - -
-``` - -This package is a direct translate from above Perl code into Go, -then further enhanced by @ruandao and @chenbihao. diff --git a/vendor/github.com/go-xmlfmt/xmlfmt/xmlfmt.go b/vendor/github.com/go-xmlfmt/xmlfmt/xmlfmt.go deleted file mode 100644 index 4245e5ad7..000000000 --- a/vendor/github.com/go-xmlfmt/xmlfmt/xmlfmt.go +++ /dev/null @@ -1,92 +0,0 @@ -//////////////////////////////////////////////////////////////////////////// -// Porgram: xmlfmt.go -// Purpose: Go XML Beautify from XML string using pure string manipulation -// Authors: Antonio Sun (c) 2016-2022, All rights reserved -//////////////////////////////////////////////////////////////////////////// - -package xmlfmt - -import ( - "html" - "regexp" - "runtime" - "strings" -) - -var ( - reg = regexp.MustCompile(`<([/!]?)([^>]+?)(/?)>`) - // NL is the newline string used in XML output. - NL = "\n" -) - -func init() { - // define NL for Windows - if runtime.GOOS == "windows" { - NL = "\r\n" - } -} - -// FormatXML will (purly) reformat the XML string in a readable way, without any rewriting/altering the structure. -// If your XML Comments have nested tags in them, or you're not 100% sure otherwise, pass `true` as the third parameter to this function. But don't turn it on blindly, as the code has become ten times more complicated because of it. -func FormatXML(xmls, prefix, indent string, nestedTagsInComments ...bool) string { - nestedTagsInComment := false - if len(nestedTagsInComments) > 0 { - nestedTagsInComment = nestedTagsInComments[0] - } - reXmlComments := regexp.MustCompile(`(?s)()`) - src := regexp.MustCompile(`(?s)>\s+<`).ReplaceAllString(xmls, "><") - if nestedTagsInComment { - src = reXmlComments.ReplaceAllStringFunc(src, func(m string) string { - parts := reXmlComments.FindStringSubmatch(m) - p2 := regexp.MustCompile(`\r*\n`).ReplaceAllString(parts[2], " ") - return parts[1] + html.EscapeString(p2) + parts[3] - }) - } - rf := replaceTag(prefix, indent) - r := prefix + reg.ReplaceAllStringFunc(src, rf) - if nestedTagsInComment { - r = reXmlComments.ReplaceAllStringFunc(r, func(m string) string { - parts := reXmlComments.FindStringSubmatch(m) - return parts[1] + html.UnescapeString(parts[2]) + parts[3] - }) - } - - return r -} - -// replaceTag returns a closure function to do 's/(?<=>)\s+(?=<)//g; s(<(/?)([^>]+?)(/?)>)($indent+=$3?0:$1?-1:1;"<$1$2$3>"."\n".(" "x$indent))ge' as in Perl -// and deal with comments as well -func replaceTag(prefix, indent string) func(string) string { - indentLevel := 0 - lastEndElem := true - return func(m string) string { - // head elem - if strings.HasPrefix(m, "") { - lastEndElem = true - return NL + prefix + strings.Repeat(indent, indentLevel) + m - } - // comment elem - if strings.HasPrefix(m, " "${filename}" -benchmem - echo "OK" - git checkout ${backup} - sleep 5 - fi -} - - -to=$1 -current=`git rev-parse --abbrev-ref HEAD` - -bench ${to} $2 -bench ${current} $2 - -benchcmp $3 "/tmp/${to}-$2.bench" "/tmp/${current}-$2.bench" diff --git a/vendor/github.com/gobwas/glob/compiler/compiler.go b/vendor/github.com/gobwas/glob/compiler/compiler.go deleted file mode 100644 index 02e7de80a..000000000 --- a/vendor/github.com/gobwas/glob/compiler/compiler.go +++ /dev/null @@ -1,525 +0,0 @@ -package compiler - -// TODO use constructor with all matchers, and to their structs private -// TODO glue multiple Text nodes (like after QuoteMeta) - -import ( - "fmt" - "reflect" - - "github.com/gobwas/glob/match" - "github.com/gobwas/glob/syntax/ast" - "github.com/gobwas/glob/util/runes" -) - -func optimizeMatcher(matcher match.Matcher) match.Matcher { - switch m := matcher.(type) { - - case match.Any: - if len(m.Separators) == 0 { - return match.NewSuper() - } - - case match.AnyOf: - if len(m.Matchers) == 1 { - return m.Matchers[0] - } - - return m - - case match.List: - if m.Not == false && len(m.List) == 1 { - return match.NewText(string(m.List)) - } - - return m - - case match.BTree: - m.Left = optimizeMatcher(m.Left) - m.Right = optimizeMatcher(m.Right) - - r, ok := m.Value.(match.Text) - if !ok { - return m - } - - var ( - leftNil = m.Left == nil - rightNil = m.Right == nil - ) - if leftNil && rightNil { - return match.NewText(r.Str) - } - - _, leftSuper := m.Left.(match.Super) - lp, leftPrefix := m.Left.(match.Prefix) - la, leftAny := m.Left.(match.Any) - - _, rightSuper := m.Right.(match.Super) - rs, rightSuffix := m.Right.(match.Suffix) - ra, rightAny := m.Right.(match.Any) - - switch { - case leftSuper && rightSuper: - return match.NewContains(r.Str, false) - - case leftSuper && rightNil: - return match.NewSuffix(r.Str) - - case rightSuper && leftNil: - return match.NewPrefix(r.Str) - - case leftNil && rightSuffix: - return match.NewPrefixSuffix(r.Str, rs.Suffix) - - case rightNil && leftPrefix: - return match.NewPrefixSuffix(lp.Prefix, r.Str) - - case rightNil && leftAny: - return match.NewSuffixAny(r.Str, la.Separators) - - case leftNil && rightAny: - return match.NewPrefixAny(r.Str, ra.Separators) - } - - return m - } - - return matcher -} - -func compileMatchers(matchers []match.Matcher) (match.Matcher, error) { - if len(matchers) == 0 { - return nil, fmt.Errorf("compile error: need at least one matcher") - } - if len(matchers) == 1 { - return matchers[0], nil - } - if m := glueMatchers(matchers); m != nil { - return m, nil - } - - idx := -1 - maxLen := -1 - var val match.Matcher - for i, matcher := range matchers { - if l := matcher.Len(); l != -1 && l >= maxLen { - maxLen = l - idx = i - val = matcher - } - } - - if val == nil { // not found matcher with static length - r, err := compileMatchers(matchers[1:]) - if err != nil { - return nil, err - } - return match.NewBTree(matchers[0], nil, r), nil - } - - left := matchers[:idx] - var right []match.Matcher - if len(matchers) > idx+1 { - right = matchers[idx+1:] - } - - var l, r match.Matcher - var err error - if len(left) > 0 { - l, err = compileMatchers(left) - if err != nil { - return nil, err - } - } - - if len(right) > 0 { - r, err = compileMatchers(right) - if err != nil { - return nil, err - } - } - - return match.NewBTree(val, l, r), nil -} - -func glueMatchers(matchers []match.Matcher) match.Matcher { - if m := glueMatchersAsEvery(matchers); m != nil { - return m - } - if m := glueMatchersAsRow(matchers); m != nil { - return m - } - return nil -} - -func glueMatchersAsRow(matchers []match.Matcher) match.Matcher { - if len(matchers) <= 1 { - return nil - } - - var ( - c []match.Matcher - l int - ) - for _, matcher := range matchers { - if ml := matcher.Len(); ml == -1 { - return nil - } else { - c = append(c, matcher) - l += ml - } - } - return match.NewRow(l, c...) -} - -func glueMatchersAsEvery(matchers []match.Matcher) match.Matcher { - if len(matchers) <= 1 { - return nil - } - - var ( - hasAny bool - hasSuper bool - hasSingle bool - min int - separator []rune - ) - - for i, matcher := range matchers { - var sep []rune - - switch m := matcher.(type) { - case match.Super: - sep = []rune{} - hasSuper = true - - case match.Any: - sep = m.Separators - hasAny = true - - case match.Single: - sep = m.Separators - hasSingle = true - min++ - - case match.List: - if !m.Not { - return nil - } - sep = m.List - hasSingle = true - min++ - - default: - return nil - } - - // initialize - if i == 0 { - separator = sep - } - - if runes.Equal(sep, separator) { - continue - } - - return nil - } - - if hasSuper && !hasAny && !hasSingle { - return match.NewSuper() - } - - if hasAny && !hasSuper && !hasSingle { - return match.NewAny(separator) - } - - if (hasAny || hasSuper) && min > 0 && len(separator) == 0 { - return match.NewMin(min) - } - - every := match.NewEveryOf() - - if min > 0 { - every.Add(match.NewMin(min)) - - if !hasAny && !hasSuper { - every.Add(match.NewMax(min)) - } - } - - if len(separator) > 0 { - every.Add(match.NewContains(string(separator), true)) - } - - return every -} - -func minimizeMatchers(matchers []match.Matcher) []match.Matcher { - var done match.Matcher - var left, right, count int - - for l := 0; l < len(matchers); l++ { - for r := len(matchers); r > l; r-- { - if glued := glueMatchers(matchers[l:r]); glued != nil { - var swap bool - - if done == nil { - swap = true - } else { - cl, gl := done.Len(), glued.Len() - swap = cl > -1 && gl > -1 && gl > cl - swap = swap || count < r-l - } - - if swap { - done = glued - left = l - right = r - count = r - l - } - } - } - } - - if done == nil { - return matchers - } - - next := append(append([]match.Matcher{}, matchers[:left]...), done) - if right < len(matchers) { - next = append(next, matchers[right:]...) - } - - if len(next) == len(matchers) { - return next - } - - return minimizeMatchers(next) -} - -// minimizeAnyOf tries to apply some heuristics to minimize number of nodes in given tree -func minimizeTree(tree *ast.Node) *ast.Node { - switch tree.Kind { - case ast.KindAnyOf: - return minimizeTreeAnyOf(tree) - default: - return nil - } -} - -// minimizeAnyOf tries to find common children of given node of AnyOf pattern -// it searches for common children from left and from right -// if any common children are found – then it returns new optimized ast tree -// else it returns nil -func minimizeTreeAnyOf(tree *ast.Node) *ast.Node { - if !areOfSameKind(tree.Children, ast.KindPattern) { - return nil - } - - commonLeft, commonRight := commonChildren(tree.Children) - commonLeftCount, commonRightCount := len(commonLeft), len(commonRight) - if commonLeftCount == 0 && commonRightCount == 0 { // there are no common parts - return nil - } - - var result []*ast.Node - if commonLeftCount > 0 { - result = append(result, ast.NewNode(ast.KindPattern, nil, commonLeft...)) - } - - var anyOf []*ast.Node - for _, child := range tree.Children { - reuse := child.Children[commonLeftCount : len(child.Children)-commonRightCount] - var node *ast.Node - if len(reuse) == 0 { - // this pattern is completely reduced by commonLeft and commonRight patterns - // so it become nothing - node = ast.NewNode(ast.KindNothing, nil) - } else { - node = ast.NewNode(ast.KindPattern, nil, reuse...) - } - anyOf = appendIfUnique(anyOf, node) - } - switch { - case len(anyOf) == 1 && anyOf[0].Kind != ast.KindNothing: - result = append(result, anyOf[0]) - case len(anyOf) > 1: - result = append(result, ast.NewNode(ast.KindAnyOf, nil, anyOf...)) - } - - if commonRightCount > 0 { - result = append(result, ast.NewNode(ast.KindPattern, nil, commonRight...)) - } - - return ast.NewNode(ast.KindPattern, nil, result...) -} - -func commonChildren(nodes []*ast.Node) (commonLeft, commonRight []*ast.Node) { - if len(nodes) <= 1 { - return - } - - // find node that has least number of children - idx := leastChildren(nodes) - if idx == -1 { - return - } - tree := nodes[idx] - treeLength := len(tree.Children) - - // allocate max able size for rightCommon slice - // to get ability insert elements in reverse order (from end to start) - // without sorting - commonRight = make([]*ast.Node, treeLength) - lastRight := treeLength // will use this to get results as commonRight[lastRight:] - - var ( - breakLeft bool - breakRight bool - commonTotal int - ) - for i, j := 0, treeLength-1; commonTotal < treeLength && j >= 0 && !(breakLeft && breakRight); i, j = i+1, j-1 { - treeLeft := tree.Children[i] - treeRight := tree.Children[j] - - for k := 0; k < len(nodes) && !(breakLeft && breakRight); k++ { - // skip least children node - if k == idx { - continue - } - - restLeft := nodes[k].Children[i] - restRight := nodes[k].Children[j+len(nodes[k].Children)-treeLength] - - breakLeft = breakLeft || !treeLeft.Equal(restLeft) - - // disable searching for right common parts, if left part is already overlapping - breakRight = breakRight || (!breakLeft && j <= i) - breakRight = breakRight || !treeRight.Equal(restRight) - } - - if !breakLeft { - commonTotal++ - commonLeft = append(commonLeft, treeLeft) - } - if !breakRight { - commonTotal++ - lastRight = j - commonRight[j] = treeRight - } - } - - commonRight = commonRight[lastRight:] - - return -} - -func appendIfUnique(target []*ast.Node, val *ast.Node) []*ast.Node { - for _, n := range target { - if reflect.DeepEqual(n, val) { - return target - } - } - return append(target, val) -} - -func areOfSameKind(nodes []*ast.Node, kind ast.Kind) bool { - for _, n := range nodes { - if n.Kind != kind { - return false - } - } - return true -} - -func leastChildren(nodes []*ast.Node) int { - min := -1 - idx := -1 - for i, n := range nodes { - if idx == -1 || (len(n.Children) < min) { - min = len(n.Children) - idx = i - } - } - return idx -} - -func compileTreeChildren(tree *ast.Node, sep []rune) ([]match.Matcher, error) { - var matchers []match.Matcher - for _, desc := range tree.Children { - m, err := compile(desc, sep) - if err != nil { - return nil, err - } - matchers = append(matchers, optimizeMatcher(m)) - } - return matchers, nil -} - -func compile(tree *ast.Node, sep []rune) (m match.Matcher, err error) { - switch tree.Kind { - case ast.KindAnyOf: - // todo this could be faster on pattern_alternatives_combine_lite (see glob_test.go) - if n := minimizeTree(tree); n != nil { - return compile(n, sep) - } - matchers, err := compileTreeChildren(tree, sep) - if err != nil { - return nil, err - } - return match.NewAnyOf(matchers...), nil - - case ast.KindPattern: - if len(tree.Children) == 0 { - return match.NewNothing(), nil - } - matchers, err := compileTreeChildren(tree, sep) - if err != nil { - return nil, err - } - m, err = compileMatchers(minimizeMatchers(matchers)) - if err != nil { - return nil, err - } - - case ast.KindAny: - m = match.NewAny(sep) - - case ast.KindSuper: - m = match.NewSuper() - - case ast.KindSingle: - m = match.NewSingle(sep) - - case ast.KindNothing: - m = match.NewNothing() - - case ast.KindList: - l := tree.Value.(ast.List) - m = match.NewList([]rune(l.Chars), l.Not) - - case ast.KindRange: - r := tree.Value.(ast.Range) - m = match.NewRange(r.Lo, r.Hi, r.Not) - - case ast.KindText: - t := tree.Value.(ast.Text) - m = match.NewText(t.Text) - - default: - return nil, fmt.Errorf("could not compile tree: unknown node type") - } - - return optimizeMatcher(m), nil -} - -func Compile(tree *ast.Node, sep []rune) (match.Matcher, error) { - m, err := compile(tree, sep) - if err != nil { - return nil, err - } - - return m, nil -} diff --git a/vendor/github.com/gobwas/glob/glob.go b/vendor/github.com/gobwas/glob/glob.go deleted file mode 100644 index 2afde343a..000000000 --- a/vendor/github.com/gobwas/glob/glob.go +++ /dev/null @@ -1,80 +0,0 @@ -package glob - -import ( - "github.com/gobwas/glob/compiler" - "github.com/gobwas/glob/syntax" -) - -// Glob represents compiled glob pattern. -type Glob interface { - Match(string) bool -} - -// Compile creates Glob for given pattern and strings (if any present after pattern) as separators. -// The pattern syntax is: -// -// pattern: -// { term } -// -// term: -// `*` matches any sequence of non-separator characters -// `**` matches any sequence of characters -// `?` matches any single non-separator character -// `[` [ `!` ] { character-range } `]` -// character class (must be non-empty) -// `{` pattern-list `}` -// pattern alternatives -// c matches character c (c != `*`, `**`, `?`, `\`, `[`, `{`, `}`) -// `\` c matches character c -// -// character-range: -// c matches character c (c != `\\`, `-`, `]`) -// `\` c matches character c -// lo `-` hi matches character c for lo <= c <= hi -// -// pattern-list: -// pattern { `,` pattern } -// comma-separated (without spaces) patterns -// -func Compile(pattern string, separators ...rune) (Glob, error) { - ast, err := syntax.Parse(pattern) - if err != nil { - return nil, err - } - - matcher, err := compiler.Compile(ast, separators) - if err != nil { - return nil, err - } - - return matcher, nil -} - -// MustCompile is the same as Compile, except that if Compile returns error, this will panic -func MustCompile(pattern string, separators ...rune) Glob { - g, err := Compile(pattern, separators...) - if err != nil { - panic(err) - } - - return g -} - -// QuoteMeta returns a string that quotes all glob pattern meta characters -// inside the argument text; For example, QuoteMeta(`{foo*}`) returns `\[foo\*\]`. -func QuoteMeta(s string) string { - b := make([]byte, 2*len(s)) - - // a byte loop is correct because all meta characters are ASCII - j := 0 - for i := 0; i < len(s); i++ { - if syntax.Special(s[i]) { - b[j] = '\\' - j++ - } - b[j] = s[i] - j++ - } - - return string(b[0:j]) -} diff --git a/vendor/github.com/gobwas/glob/match/any.go b/vendor/github.com/gobwas/glob/match/any.go deleted file mode 100644 index 514a9a5c4..000000000 --- a/vendor/github.com/gobwas/glob/match/any.go +++ /dev/null @@ -1,45 +0,0 @@ -package match - -import ( - "fmt" - "github.com/gobwas/glob/util/strings" -) - -type Any struct { - Separators []rune -} - -func NewAny(s []rune) Any { - return Any{s} -} - -func (self Any) Match(s string) bool { - return strings.IndexAnyRunes(s, self.Separators) == -1 -} - -func (self Any) Index(s string) (int, []int) { - found := strings.IndexAnyRunes(s, self.Separators) - switch found { - case -1: - case 0: - return 0, segments0 - default: - s = s[:found] - } - - segments := acquireSegments(len(s)) - for i := range s { - segments = append(segments, i) - } - segments = append(segments, len(s)) - - return 0, segments -} - -func (self Any) Len() int { - return lenNo -} - -func (self Any) String() string { - return fmt.Sprintf("", string(self.Separators)) -} diff --git a/vendor/github.com/gobwas/glob/match/any_of.go b/vendor/github.com/gobwas/glob/match/any_of.go deleted file mode 100644 index 8e65356cd..000000000 --- a/vendor/github.com/gobwas/glob/match/any_of.go +++ /dev/null @@ -1,82 +0,0 @@ -package match - -import "fmt" - -type AnyOf struct { - Matchers Matchers -} - -func NewAnyOf(m ...Matcher) AnyOf { - return AnyOf{Matchers(m)} -} - -func (self *AnyOf) Add(m Matcher) error { - self.Matchers = append(self.Matchers, m) - return nil -} - -func (self AnyOf) Match(s string) bool { - for _, m := range self.Matchers { - if m.Match(s) { - return true - } - } - - return false -} - -func (self AnyOf) Index(s string) (int, []int) { - index := -1 - - segments := acquireSegments(len(s)) - for _, m := range self.Matchers { - idx, seg := m.Index(s) - if idx == -1 { - continue - } - - if index == -1 || idx < index { - index = idx - segments = append(segments[:0], seg...) - continue - } - - if idx > index { - continue - } - - // here idx == index - segments = appendMerge(segments, seg) - } - - if index == -1 { - releaseSegments(segments) - return -1, nil - } - - return index, segments -} - -func (self AnyOf) Len() (l int) { - l = -1 - for _, m := range self.Matchers { - ml := m.Len() - switch { - case l == -1: - l = ml - continue - - case ml == -1: - return -1 - - case l != ml: - return -1 - } - } - - return -} - -func (self AnyOf) String() string { - return fmt.Sprintf("", self.Matchers) -} diff --git a/vendor/github.com/gobwas/glob/match/btree.go b/vendor/github.com/gobwas/glob/match/btree.go deleted file mode 100644 index a8130e93e..000000000 --- a/vendor/github.com/gobwas/glob/match/btree.go +++ /dev/null @@ -1,146 +0,0 @@ -package match - -import ( - "fmt" - "unicode/utf8" -) - -type BTree struct { - Value Matcher - Left Matcher - Right Matcher - ValueLengthRunes int - LeftLengthRunes int - RightLengthRunes int - LengthRunes int -} - -func NewBTree(Value, Left, Right Matcher) (tree BTree) { - tree.Value = Value - tree.Left = Left - tree.Right = Right - - lenOk := true - if tree.ValueLengthRunes = Value.Len(); tree.ValueLengthRunes == -1 { - lenOk = false - } - - if Left != nil { - if tree.LeftLengthRunes = Left.Len(); tree.LeftLengthRunes == -1 { - lenOk = false - } - } - - if Right != nil { - if tree.RightLengthRunes = Right.Len(); tree.RightLengthRunes == -1 { - lenOk = false - } - } - - if lenOk { - tree.LengthRunes = tree.LeftLengthRunes + tree.ValueLengthRunes + tree.RightLengthRunes - } else { - tree.LengthRunes = -1 - } - - return tree -} - -func (self BTree) Len() int { - return self.LengthRunes -} - -// todo? -func (self BTree) Index(s string) (int, []int) { - return -1, nil -} - -func (self BTree) Match(s string) bool { - inputLen := len(s) - - // self.Length, self.RLen and self.LLen are values meaning the length of runes for each part - // here we manipulating byte length for better optimizations - // but these checks still works, cause minLen of 1-rune string is 1 byte. - if self.LengthRunes != -1 && self.LengthRunes > inputLen { - return false - } - - // try to cut unnecessary parts - // by knowledge of length of right and left part - var offset, limit int - if self.LeftLengthRunes >= 0 { - offset = self.LeftLengthRunes - } - if self.RightLengthRunes >= 0 { - limit = inputLen - self.RightLengthRunes - } else { - limit = inputLen - } - - for offset < limit { - // search for matching part in substring - index, segments := self.Value.Index(s[offset:limit]) - if index == -1 { - releaseSegments(segments) - return false - } - - l := s[:offset+index] - var left bool - if self.Left != nil { - left = self.Left.Match(l) - } else { - left = l == "" - } - - if left { - for i := len(segments) - 1; i >= 0; i-- { - length := segments[i] - - var right bool - var r string - // if there is no string for the right branch - if inputLen <= offset+index+length { - r = "" - } else { - r = s[offset+index+length:] - } - - if self.Right != nil { - right = self.Right.Match(r) - } else { - right = r == "" - } - - if right { - releaseSegments(segments) - return true - } - } - } - - _, step := utf8.DecodeRuneInString(s[offset+index:]) - offset += index + step - - releaseSegments(segments) - } - - return false -} - -func (self BTree) String() string { - const n string = "" - var l, r string - if self.Left == nil { - l = n - } else { - l = self.Left.String() - } - if self.Right == nil { - r = n - } else { - r = self.Right.String() - } - - return fmt.Sprintf("%s]>", l, self.Value, r) -} diff --git a/vendor/github.com/gobwas/glob/match/contains.go b/vendor/github.com/gobwas/glob/match/contains.go deleted file mode 100644 index 0998e95b0..000000000 --- a/vendor/github.com/gobwas/glob/match/contains.go +++ /dev/null @@ -1,58 +0,0 @@ -package match - -import ( - "fmt" - "strings" -) - -type Contains struct { - Needle string - Not bool -} - -func NewContains(needle string, not bool) Contains { - return Contains{needle, not} -} - -func (self Contains) Match(s string) bool { - return strings.Contains(s, self.Needle) != self.Not -} - -func (self Contains) Index(s string) (int, []int) { - var offset int - - idx := strings.Index(s, self.Needle) - - if !self.Not { - if idx == -1 { - return -1, nil - } - - offset = idx + len(self.Needle) - if len(s) <= offset { - return 0, []int{offset} - } - s = s[offset:] - } else if idx != -1 { - s = s[:idx] - } - - segments := acquireSegments(len(s) + 1) - for i := range s { - segments = append(segments, offset+i) - } - - return 0, append(segments, offset+len(s)) -} - -func (self Contains) Len() int { - return lenNo -} - -func (self Contains) String() string { - var not string - if self.Not { - not = "!" - } - return fmt.Sprintf("", not, self.Needle) -} diff --git a/vendor/github.com/gobwas/glob/match/every_of.go b/vendor/github.com/gobwas/glob/match/every_of.go deleted file mode 100644 index 7c968ee36..000000000 --- a/vendor/github.com/gobwas/glob/match/every_of.go +++ /dev/null @@ -1,99 +0,0 @@ -package match - -import ( - "fmt" -) - -type EveryOf struct { - Matchers Matchers -} - -func NewEveryOf(m ...Matcher) EveryOf { - return EveryOf{Matchers(m)} -} - -func (self *EveryOf) Add(m Matcher) error { - self.Matchers = append(self.Matchers, m) - return nil -} - -func (self EveryOf) Len() (l int) { - for _, m := range self.Matchers { - if ml := m.Len(); l > 0 { - l += ml - } else { - return -1 - } - } - - return -} - -func (self EveryOf) Index(s string) (int, []int) { - var index int - var offset int - - // make `in` with cap as len(s), - // cause it is the maximum size of output segments values - next := acquireSegments(len(s)) - current := acquireSegments(len(s)) - - sub := s - for i, m := range self.Matchers { - idx, seg := m.Index(sub) - if idx == -1 { - releaseSegments(next) - releaseSegments(current) - return -1, nil - } - - if i == 0 { - // we use copy here instead of `current = seg` - // cause seg is a slice from reusable buffer `in` - // and it could be overwritten in next iteration - current = append(current, seg...) - } else { - // clear the next - next = next[:0] - - delta := index - (idx + offset) - for _, ex := range current { - for _, n := range seg { - if ex+delta == n { - next = append(next, n) - } - } - } - - if len(next) == 0 { - releaseSegments(next) - releaseSegments(current) - return -1, nil - } - - current = append(current[:0], next...) - } - - index = idx + offset - sub = s[index:] - offset += idx - } - - releaseSegments(next) - - return index, current -} - -func (self EveryOf) Match(s string) bool { - for _, m := range self.Matchers { - if !m.Match(s) { - return false - } - } - - return true -} - -func (self EveryOf) String() string { - return fmt.Sprintf("", self.Matchers) -} diff --git a/vendor/github.com/gobwas/glob/match/list.go b/vendor/github.com/gobwas/glob/match/list.go deleted file mode 100644 index 7fd763ecd..000000000 --- a/vendor/github.com/gobwas/glob/match/list.go +++ /dev/null @@ -1,49 +0,0 @@ -package match - -import ( - "fmt" - "github.com/gobwas/glob/util/runes" - "unicode/utf8" -) - -type List struct { - List []rune - Not bool -} - -func NewList(list []rune, not bool) List { - return List{list, not} -} - -func (self List) Match(s string) bool { - r, w := utf8.DecodeRuneInString(s) - if len(s) > w { - return false - } - - inList := runes.IndexRune(self.List, r) != -1 - return inList == !self.Not -} - -func (self List) Len() int { - return lenOne -} - -func (self List) Index(s string) (int, []int) { - for i, r := range s { - if self.Not == (runes.IndexRune(self.List, r) == -1) { - return i, segmentsByRuneLength[utf8.RuneLen(r)] - } - } - - return -1, nil -} - -func (self List) String() string { - var not string - if self.Not { - not = "!" - } - - return fmt.Sprintf("", not, string(self.List)) -} diff --git a/vendor/github.com/gobwas/glob/match/match.go b/vendor/github.com/gobwas/glob/match/match.go deleted file mode 100644 index f80e007fb..000000000 --- a/vendor/github.com/gobwas/glob/match/match.go +++ /dev/null @@ -1,81 +0,0 @@ -package match - -// todo common table of rune's length - -import ( - "fmt" - "strings" -) - -const lenOne = 1 -const lenZero = 0 -const lenNo = -1 - -type Matcher interface { - Match(string) bool - Index(string) (int, []int) - Len() int - String() string -} - -type Matchers []Matcher - -func (m Matchers) String() string { - var s []string - for _, matcher := range m { - s = append(s, fmt.Sprint(matcher)) - } - - return fmt.Sprintf("%s", strings.Join(s, ",")) -} - -// appendMerge merges and sorts given already SORTED and UNIQUE segments. -func appendMerge(target, sub []int) []int { - lt, ls := len(target), len(sub) - out := make([]int, 0, lt+ls) - - for x, y := 0, 0; x < lt || y < ls; { - if x >= lt { - out = append(out, sub[y:]...) - break - } - - if y >= ls { - out = append(out, target[x:]...) - break - } - - xValue := target[x] - yValue := sub[y] - - switch { - - case xValue == yValue: - out = append(out, xValue) - x++ - y++ - - case xValue < yValue: - out = append(out, xValue) - x++ - - case yValue < xValue: - out = append(out, yValue) - y++ - - } - } - - target = append(target[:0], out...) - - return target -} - -func reverseSegments(input []int) { - l := len(input) - m := l / 2 - - for i := 0; i < m; i++ { - input[i], input[l-i-1] = input[l-i-1], input[i] - } -} diff --git a/vendor/github.com/gobwas/glob/match/max.go b/vendor/github.com/gobwas/glob/match/max.go deleted file mode 100644 index d72f69eff..000000000 --- a/vendor/github.com/gobwas/glob/match/max.go +++ /dev/null @@ -1,49 +0,0 @@ -package match - -import ( - "fmt" - "unicode/utf8" -) - -type Max struct { - Limit int -} - -func NewMax(l int) Max { - return Max{l} -} - -func (self Max) Match(s string) bool { - var l int - for range s { - l += 1 - if l > self.Limit { - return false - } - } - - return true -} - -func (self Max) Index(s string) (int, []int) { - segments := acquireSegments(self.Limit + 1) - segments = append(segments, 0) - var count int - for i, r := range s { - count++ - if count > self.Limit { - break - } - segments = append(segments, i+utf8.RuneLen(r)) - } - - return 0, segments -} - -func (self Max) Len() int { - return lenNo -} - -func (self Max) String() string { - return fmt.Sprintf("", self.Limit) -} diff --git a/vendor/github.com/gobwas/glob/match/min.go b/vendor/github.com/gobwas/glob/match/min.go deleted file mode 100644 index db57ac8eb..000000000 --- a/vendor/github.com/gobwas/glob/match/min.go +++ /dev/null @@ -1,57 +0,0 @@ -package match - -import ( - "fmt" - "unicode/utf8" -) - -type Min struct { - Limit int -} - -func NewMin(l int) Min { - return Min{l} -} - -func (self Min) Match(s string) bool { - var l int - for range s { - l += 1 - if l >= self.Limit { - return true - } - } - - return false -} - -func (self Min) Index(s string) (int, []int) { - var count int - - c := len(s) - self.Limit + 1 - if c <= 0 { - return -1, nil - } - - segments := acquireSegments(c) - for i, r := range s { - count++ - if count >= self.Limit { - segments = append(segments, i+utf8.RuneLen(r)) - } - } - - if len(segments) == 0 { - return -1, nil - } - - return 0, segments -} - -func (self Min) Len() int { - return lenNo -} - -func (self Min) String() string { - return fmt.Sprintf("", self.Limit) -} diff --git a/vendor/github.com/gobwas/glob/match/nothing.go b/vendor/github.com/gobwas/glob/match/nothing.go deleted file mode 100644 index 0d4ecd36b..000000000 --- a/vendor/github.com/gobwas/glob/match/nothing.go +++ /dev/null @@ -1,27 +0,0 @@ -package match - -import ( - "fmt" -) - -type Nothing struct{} - -func NewNothing() Nothing { - return Nothing{} -} - -func (self Nothing) Match(s string) bool { - return len(s) == 0 -} - -func (self Nothing) Index(s string) (int, []int) { - return 0, segments0 -} - -func (self Nothing) Len() int { - return lenZero -} - -func (self Nothing) String() string { - return fmt.Sprintf("") -} diff --git a/vendor/github.com/gobwas/glob/match/prefix.go b/vendor/github.com/gobwas/glob/match/prefix.go deleted file mode 100644 index a7347250e..000000000 --- a/vendor/github.com/gobwas/glob/match/prefix.go +++ /dev/null @@ -1,50 +0,0 @@ -package match - -import ( - "fmt" - "strings" - "unicode/utf8" -) - -type Prefix struct { - Prefix string -} - -func NewPrefix(p string) Prefix { - return Prefix{p} -} - -func (self Prefix) Index(s string) (int, []int) { - idx := strings.Index(s, self.Prefix) - if idx == -1 { - return -1, nil - } - - length := len(self.Prefix) - var sub string - if len(s) > idx+length { - sub = s[idx+length:] - } else { - sub = "" - } - - segments := acquireSegments(len(sub) + 1) - segments = append(segments, length) - for i, r := range sub { - segments = append(segments, length+i+utf8.RuneLen(r)) - } - - return idx, segments -} - -func (self Prefix) Len() int { - return lenNo -} - -func (self Prefix) Match(s string) bool { - return strings.HasPrefix(s, self.Prefix) -} - -func (self Prefix) String() string { - return fmt.Sprintf("", self.Prefix) -} diff --git a/vendor/github.com/gobwas/glob/match/prefix_any.go b/vendor/github.com/gobwas/glob/match/prefix_any.go deleted file mode 100644 index 8ee58fe1b..000000000 --- a/vendor/github.com/gobwas/glob/match/prefix_any.go +++ /dev/null @@ -1,55 +0,0 @@ -package match - -import ( - "fmt" - "strings" - "unicode/utf8" - - sutil "github.com/gobwas/glob/util/strings" -) - -type PrefixAny struct { - Prefix string - Separators []rune -} - -func NewPrefixAny(s string, sep []rune) PrefixAny { - return PrefixAny{s, sep} -} - -func (self PrefixAny) Index(s string) (int, []int) { - idx := strings.Index(s, self.Prefix) - if idx == -1 { - return -1, nil - } - - n := len(self.Prefix) - sub := s[idx+n:] - i := sutil.IndexAnyRunes(sub, self.Separators) - if i > -1 { - sub = sub[:i] - } - - seg := acquireSegments(len(sub) + 1) - seg = append(seg, n) - for i, r := range sub { - seg = append(seg, n+i+utf8.RuneLen(r)) - } - - return idx, seg -} - -func (self PrefixAny) Len() int { - return lenNo -} - -func (self PrefixAny) Match(s string) bool { - if !strings.HasPrefix(s, self.Prefix) { - return false - } - return sutil.IndexAnyRunes(s[len(self.Prefix):], self.Separators) == -1 -} - -func (self PrefixAny) String() string { - return fmt.Sprintf("", self.Prefix, string(self.Separators)) -} diff --git a/vendor/github.com/gobwas/glob/match/prefix_suffix.go b/vendor/github.com/gobwas/glob/match/prefix_suffix.go deleted file mode 100644 index 8208085a1..000000000 --- a/vendor/github.com/gobwas/glob/match/prefix_suffix.go +++ /dev/null @@ -1,62 +0,0 @@ -package match - -import ( - "fmt" - "strings" -) - -type PrefixSuffix struct { - Prefix, Suffix string -} - -func NewPrefixSuffix(p, s string) PrefixSuffix { - return PrefixSuffix{p, s} -} - -func (self PrefixSuffix) Index(s string) (int, []int) { - prefixIdx := strings.Index(s, self.Prefix) - if prefixIdx == -1 { - return -1, nil - } - - suffixLen := len(self.Suffix) - if suffixLen <= 0 { - return prefixIdx, []int{len(s) - prefixIdx} - } - - if (len(s) - prefixIdx) <= 0 { - return -1, nil - } - - segments := acquireSegments(len(s) - prefixIdx) - for sub := s[prefixIdx:]; ; { - suffixIdx := strings.LastIndex(sub, self.Suffix) - if suffixIdx == -1 { - break - } - - segments = append(segments, suffixIdx+suffixLen) - sub = sub[:suffixIdx] - } - - if len(segments) == 0 { - releaseSegments(segments) - return -1, nil - } - - reverseSegments(segments) - - return prefixIdx, segments -} - -func (self PrefixSuffix) Len() int { - return lenNo -} - -func (self PrefixSuffix) Match(s string) bool { - return strings.HasPrefix(s, self.Prefix) && strings.HasSuffix(s, self.Suffix) -} - -func (self PrefixSuffix) String() string { - return fmt.Sprintf("", self.Prefix, self.Suffix) -} diff --git a/vendor/github.com/gobwas/glob/match/range.go b/vendor/github.com/gobwas/glob/match/range.go deleted file mode 100644 index ce30245a4..000000000 --- a/vendor/github.com/gobwas/glob/match/range.go +++ /dev/null @@ -1,48 +0,0 @@ -package match - -import ( - "fmt" - "unicode/utf8" -) - -type Range struct { - Lo, Hi rune - Not bool -} - -func NewRange(lo, hi rune, not bool) Range { - return Range{lo, hi, not} -} - -func (self Range) Len() int { - return lenOne -} - -func (self Range) Match(s string) bool { - r, w := utf8.DecodeRuneInString(s) - if len(s) > w { - return false - } - - inRange := r >= self.Lo && r <= self.Hi - - return inRange == !self.Not -} - -func (self Range) Index(s string) (int, []int) { - for i, r := range s { - if self.Not != (r >= self.Lo && r <= self.Hi) { - return i, segmentsByRuneLength[utf8.RuneLen(r)] - } - } - - return -1, nil -} - -func (self Range) String() string { - var not string - if self.Not { - not = "!" - } - return fmt.Sprintf("", not, string(self.Lo), string(self.Hi)) -} diff --git a/vendor/github.com/gobwas/glob/match/row.go b/vendor/github.com/gobwas/glob/match/row.go deleted file mode 100644 index 4379042e4..000000000 --- a/vendor/github.com/gobwas/glob/match/row.go +++ /dev/null @@ -1,77 +0,0 @@ -package match - -import ( - "fmt" -) - -type Row struct { - Matchers Matchers - RunesLength int - Segments []int -} - -func NewRow(len int, m ...Matcher) Row { - return Row{ - Matchers: Matchers(m), - RunesLength: len, - Segments: []int{len}, - } -} - -func (self Row) matchAll(s string) bool { - var idx int - for _, m := range self.Matchers { - length := m.Len() - - var next, i int - for next = range s[idx:] { - i++ - if i == length { - break - } - } - - if i < length || !m.Match(s[idx:idx+next+1]) { - return false - } - - idx += next + 1 - } - - return true -} - -func (self Row) lenOk(s string) bool { - var i int - for range s { - i++ - if i > self.RunesLength { - return false - } - } - return self.RunesLength == i -} - -func (self Row) Match(s string) bool { - return self.lenOk(s) && self.matchAll(s) -} - -func (self Row) Len() (l int) { - return self.RunesLength -} - -func (self Row) Index(s string) (int, []int) { - for i := range s { - if len(s[i:]) < self.RunesLength { - break - } - if self.matchAll(s[i:]) { - return i, self.Segments - } - } - return -1, nil -} - -func (self Row) String() string { - return fmt.Sprintf("", self.RunesLength, self.Matchers) -} diff --git a/vendor/github.com/gobwas/glob/match/segments.go b/vendor/github.com/gobwas/glob/match/segments.go deleted file mode 100644 index 9ea6f3094..000000000 --- a/vendor/github.com/gobwas/glob/match/segments.go +++ /dev/null @@ -1,91 +0,0 @@ -package match - -import ( - "sync" -) - -type SomePool interface { - Get() []int - Put([]int) -} - -var segmentsPools [1024]sync.Pool - -func toPowerOfTwo(v int) int { - v-- - v |= v >> 1 - v |= v >> 2 - v |= v >> 4 - v |= v >> 8 - v |= v >> 16 - v++ - - return v -} - -const ( - cacheFrom = 16 - cacheToAndHigher = 1024 - cacheFromIndex = 15 - cacheToAndHigherIndex = 1023 -) - -var ( - segments0 = []int{0} - segments1 = []int{1} - segments2 = []int{2} - segments3 = []int{3} - segments4 = []int{4} -) - -var segmentsByRuneLength [5][]int = [5][]int{ - 0: segments0, - 1: segments1, - 2: segments2, - 3: segments3, - 4: segments4, -} - -func init() { - for i := cacheToAndHigher; i >= cacheFrom; i >>= 1 { - func(i int) { - segmentsPools[i-1] = sync.Pool{New: func() interface{} { - return make([]int, 0, i) - }} - }(i) - } -} - -func getTableIndex(c int) int { - p := toPowerOfTwo(c) - switch { - case p >= cacheToAndHigher: - return cacheToAndHigherIndex - case p <= cacheFrom: - return cacheFromIndex - default: - return p - 1 - } -} - -func acquireSegments(c int) []int { - // make []int with less capacity than cacheFrom - // is faster than acquiring it from pool - if c < cacheFrom { - return make([]int, 0, c) - } - - return segmentsPools[getTableIndex(c)].Get().([]int)[:0] -} - -func releaseSegments(s []int) { - c := cap(s) - - // make []int with less capacity than cacheFrom - // is faster than acquiring it from pool - if c < cacheFrom { - return - } - - segmentsPools[getTableIndex(c)].Put(s) -} diff --git a/vendor/github.com/gobwas/glob/match/single.go b/vendor/github.com/gobwas/glob/match/single.go deleted file mode 100644 index ee6e3954c..000000000 --- a/vendor/github.com/gobwas/glob/match/single.go +++ /dev/null @@ -1,43 +0,0 @@ -package match - -import ( - "fmt" - "github.com/gobwas/glob/util/runes" - "unicode/utf8" -) - -// single represents ? -type Single struct { - Separators []rune -} - -func NewSingle(s []rune) Single { - return Single{s} -} - -func (self Single) Match(s string) bool { - r, w := utf8.DecodeRuneInString(s) - if len(s) > w { - return false - } - - return runes.IndexRune(self.Separators, r) == -1 -} - -func (self Single) Len() int { - return lenOne -} - -func (self Single) Index(s string) (int, []int) { - for i, r := range s { - if runes.IndexRune(self.Separators, r) == -1 { - return i, segmentsByRuneLength[utf8.RuneLen(r)] - } - } - - return -1, nil -} - -func (self Single) String() string { - return fmt.Sprintf("", string(self.Separators)) -} diff --git a/vendor/github.com/gobwas/glob/match/suffix.go b/vendor/github.com/gobwas/glob/match/suffix.go deleted file mode 100644 index 85bea8c68..000000000 --- a/vendor/github.com/gobwas/glob/match/suffix.go +++ /dev/null @@ -1,35 +0,0 @@ -package match - -import ( - "fmt" - "strings" -) - -type Suffix struct { - Suffix string -} - -func NewSuffix(s string) Suffix { - return Suffix{s} -} - -func (self Suffix) Len() int { - return lenNo -} - -func (self Suffix) Match(s string) bool { - return strings.HasSuffix(s, self.Suffix) -} - -func (self Suffix) Index(s string) (int, []int) { - idx := strings.Index(s, self.Suffix) - if idx == -1 { - return -1, nil - } - - return 0, []int{idx + len(self.Suffix)} -} - -func (self Suffix) String() string { - return fmt.Sprintf("", self.Suffix) -} diff --git a/vendor/github.com/gobwas/glob/match/suffix_any.go b/vendor/github.com/gobwas/glob/match/suffix_any.go deleted file mode 100644 index c5106f819..000000000 --- a/vendor/github.com/gobwas/glob/match/suffix_any.go +++ /dev/null @@ -1,43 +0,0 @@ -package match - -import ( - "fmt" - "strings" - - sutil "github.com/gobwas/glob/util/strings" -) - -type SuffixAny struct { - Suffix string - Separators []rune -} - -func NewSuffixAny(s string, sep []rune) SuffixAny { - return SuffixAny{s, sep} -} - -func (self SuffixAny) Index(s string) (int, []int) { - idx := strings.Index(s, self.Suffix) - if idx == -1 { - return -1, nil - } - - i := sutil.LastIndexAnyRunes(s[:idx], self.Separators) + 1 - - return i, []int{idx + len(self.Suffix) - i} -} - -func (self SuffixAny) Len() int { - return lenNo -} - -func (self SuffixAny) Match(s string) bool { - if !strings.HasSuffix(s, self.Suffix) { - return false - } - return sutil.IndexAnyRunes(s[:len(s)-len(self.Suffix)], self.Separators) == -1 -} - -func (self SuffixAny) String() string { - return fmt.Sprintf("", string(self.Separators), self.Suffix) -} diff --git a/vendor/github.com/gobwas/glob/match/super.go b/vendor/github.com/gobwas/glob/match/super.go deleted file mode 100644 index 3875950bb..000000000 --- a/vendor/github.com/gobwas/glob/match/super.go +++ /dev/null @@ -1,33 +0,0 @@ -package match - -import ( - "fmt" -) - -type Super struct{} - -func NewSuper() Super { - return Super{} -} - -func (self Super) Match(s string) bool { - return true -} - -func (self Super) Len() int { - return lenNo -} - -func (self Super) Index(s string) (int, []int) { - segments := acquireSegments(len(s) + 1) - for i := range s { - segments = append(segments, i) - } - segments = append(segments, len(s)) - - return 0, segments -} - -func (self Super) String() string { - return fmt.Sprintf("") -} diff --git a/vendor/github.com/gobwas/glob/match/text.go b/vendor/github.com/gobwas/glob/match/text.go deleted file mode 100644 index 0a17616d3..000000000 --- a/vendor/github.com/gobwas/glob/match/text.go +++ /dev/null @@ -1,45 +0,0 @@ -package match - -import ( - "fmt" - "strings" - "unicode/utf8" -) - -// raw represents raw string to match -type Text struct { - Str string - RunesLength int - BytesLength int - Segments []int -} - -func NewText(s string) Text { - return Text{ - Str: s, - RunesLength: utf8.RuneCountInString(s), - BytesLength: len(s), - Segments: []int{len(s)}, - } -} - -func (self Text) Match(s string) bool { - return self.Str == s -} - -func (self Text) Len() int { - return self.RunesLength -} - -func (self Text) Index(s string) (int, []int) { - index := strings.Index(s, self.Str) - if index == -1 { - return -1, nil - } - - return index, self.Segments -} - -func (self Text) String() string { - return fmt.Sprintf("", self.Str) -} diff --git a/vendor/github.com/gobwas/glob/readme.md b/vendor/github.com/gobwas/glob/readme.md deleted file mode 100644 index f58144e73..000000000 --- a/vendor/github.com/gobwas/glob/readme.md +++ /dev/null @@ -1,148 +0,0 @@ -# glob.[go](https://golang.org) - -[![GoDoc][godoc-image]][godoc-url] [![Build Status][travis-image]][travis-url] - -> Go Globbing Library. - -## Install - -```shell - go get github.com/gobwas/glob -``` - -## Example - -```go - -package main - -import "github.com/gobwas/glob" - -func main() { - var g glob.Glob - - // create simple glob - g = glob.MustCompile("*.github.com") - g.Match("api.github.com") // true - - // quote meta characters and then create simple glob - g = glob.MustCompile(glob.QuoteMeta("*.github.com")) - g.Match("*.github.com") // true - - // create new glob with set of delimiters as ["."] - g = glob.MustCompile("api.*.com", '.') - g.Match("api.github.com") // true - g.Match("api.gi.hub.com") // false - - // create new glob with set of delimiters as ["."] - // but now with super wildcard - g = glob.MustCompile("api.**.com", '.') - g.Match("api.github.com") // true - g.Match("api.gi.hub.com") // true - - // create glob with single symbol wildcard - g = glob.MustCompile("?at") - g.Match("cat") // true - g.Match("fat") // true - g.Match("at") // false - - // create glob with single symbol wildcard and delimiters ['f'] - g = glob.MustCompile("?at", 'f') - g.Match("cat") // true - g.Match("fat") // false - g.Match("at") // false - - // create glob with character-list matchers - g = glob.MustCompile("[abc]at") - g.Match("cat") // true - g.Match("bat") // true - g.Match("fat") // false - g.Match("at") // false - - // create glob with character-list matchers - g = glob.MustCompile("[!abc]at") - g.Match("cat") // false - g.Match("bat") // false - g.Match("fat") // true - g.Match("at") // false - - // create glob with character-range matchers - g = glob.MustCompile("[a-c]at") - g.Match("cat") // true - g.Match("bat") // true - g.Match("fat") // false - g.Match("at") // false - - // create glob with character-range matchers - g = glob.MustCompile("[!a-c]at") - g.Match("cat") // false - g.Match("bat") // false - g.Match("fat") // true - g.Match("at") // false - - // create glob with pattern-alternatives list - g = glob.MustCompile("{cat,bat,[fr]at}") - g.Match("cat") // true - g.Match("bat") // true - g.Match("fat") // true - g.Match("rat") // true - g.Match("at") // false - g.Match("zat") // false -} - -``` - -## Performance - -This library is created for compile-once patterns. This means, that compilation could take time, but -strings matching is done faster, than in case when always parsing template. - -If you will not use compiled `glob.Glob` object, and do `g := glob.MustCompile(pattern); g.Match(...)` every time, then your code will be much more slower. - -Run `go test -bench=.` from source root to see the benchmarks: - -Pattern | Fixture | Match | Speed (ns/op) ---------|---------|-------|-------------- -`[a-z][!a-x]*cat*[h][!b]*eyes*` | `my cat has very bright eyes` | `true` | 432 -`[a-z][!a-x]*cat*[h][!b]*eyes*` | `my dog has very bright eyes` | `false` | 199 -`https://*.google.*` | `https://account.google.com` | `true` | 96 -`https://*.google.*` | `https://google.com` | `false` | 66 -`{https://*.google.*,*yandex.*,*yahoo.*,*mail.ru}` | `http://yahoo.com` | `true` | 163 -`{https://*.google.*,*yandex.*,*yahoo.*,*mail.ru}` | `http://google.com` | `false` | 197 -`{https://*gobwas.com,http://exclude.gobwas.com}` | `https://safe.gobwas.com` | `true` | 22 -`{https://*gobwas.com,http://exclude.gobwas.com}` | `http://safe.gobwas.com` | `false` | 24 -`abc*` | `abcdef` | `true` | 8.15 -`abc*` | `af` | `false` | 5.68 -`*def` | `abcdef` | `true` | 8.84 -`*def` | `af` | `false` | 5.74 -`ab*ef` | `abcdef` | `true` | 15.2 -`ab*ef` | `af` | `false` | 10.4 - -The same things with `regexp` package: - -Pattern | Fixture | Match | Speed (ns/op) ---------|---------|-------|-------------- -`^[a-z][^a-x].*cat.*[h][^b].*eyes.*$` | `my cat has very bright eyes` | `true` | 2553 -`^[a-z][^a-x].*cat.*[h][^b].*eyes.*$` | `my dog has very bright eyes` | `false` | 1383 -`^https:\/\/.*\.google\..*$` | `https://account.google.com` | `true` | 1205 -`^https:\/\/.*\.google\..*$` | `https://google.com` | `false` | 767 -`^(https:\/\/.*\.google\..*|.*yandex\..*|.*yahoo\..*|.*mail\.ru)$` | `http://yahoo.com` | `true` | 1435 -`^(https:\/\/.*\.google\..*|.*yandex\..*|.*yahoo\..*|.*mail\.ru)$` | `http://google.com` | `false` | 1674 -`^(https:\/\/.*gobwas\.com|http://exclude.gobwas.com)$` | `https://safe.gobwas.com` | `true` | 1039 -`^(https:\/\/.*gobwas\.com|http://exclude.gobwas.com)$` | `http://safe.gobwas.com` | `false` | 272 -`^abc.*$` | `abcdef` | `true` | 237 -`^abc.*$` | `af` | `false` | 100 -`^.*def$` | `abcdef` | `true` | 464 -`^.*def$` | `af` | `false` | 265 -`^ab.*ef$` | `abcdef` | `true` | 375 -`^ab.*ef$` | `af` | `false` | 145 - -[godoc-image]: https://godoc.org/github.com/gobwas/glob?status.svg -[godoc-url]: https://godoc.org/github.com/gobwas/glob -[travis-image]: https://travis-ci.org/gobwas/glob.svg?branch=master -[travis-url]: https://travis-ci.org/gobwas/glob - -## Syntax - -Syntax is inspired by [standard wildcards](http://tldp.org/LDP/GNU-Linux-Tools-Summary/html/x11655.htm), -except that `**` is aka super-asterisk, that do not sensitive for separators. \ No newline at end of file diff --git a/vendor/github.com/gobwas/glob/syntax/ast/ast.go b/vendor/github.com/gobwas/glob/syntax/ast/ast.go deleted file mode 100644 index 3220a694a..000000000 --- a/vendor/github.com/gobwas/glob/syntax/ast/ast.go +++ /dev/null @@ -1,122 +0,0 @@ -package ast - -import ( - "bytes" - "fmt" -) - -type Node struct { - Parent *Node - Children []*Node - Value interface{} - Kind Kind -} - -func NewNode(k Kind, v interface{}, ch ...*Node) *Node { - n := &Node{ - Kind: k, - Value: v, - } - for _, c := range ch { - Insert(n, c) - } - return n -} - -func (a *Node) Equal(b *Node) bool { - if a.Kind != b.Kind { - return false - } - if a.Value != b.Value { - return false - } - if len(a.Children) != len(b.Children) { - return false - } - for i, c := range a.Children { - if !c.Equal(b.Children[i]) { - return false - } - } - return true -} - -func (a *Node) String() string { - var buf bytes.Buffer - buf.WriteString(a.Kind.String()) - if a.Value != nil { - buf.WriteString(" =") - buf.WriteString(fmt.Sprintf("%v", a.Value)) - } - if len(a.Children) > 0 { - buf.WriteString(" [") - for i, c := range a.Children { - if i > 0 { - buf.WriteString(", ") - } - buf.WriteString(c.String()) - } - buf.WriteString("]") - } - return buf.String() -} - -func Insert(parent *Node, children ...*Node) { - parent.Children = append(parent.Children, children...) - for _, ch := range children { - ch.Parent = parent - } -} - -type List struct { - Not bool - Chars string -} - -type Range struct { - Not bool - Lo, Hi rune -} - -type Text struct { - Text string -} - -type Kind int - -const ( - KindNothing Kind = iota - KindPattern - KindList - KindRange - KindText - KindAny - KindSuper - KindSingle - KindAnyOf -) - -func (k Kind) String() string { - switch k { - case KindNothing: - return "Nothing" - case KindPattern: - return "Pattern" - case KindList: - return "List" - case KindRange: - return "Range" - case KindText: - return "Text" - case KindAny: - return "Any" - case KindSuper: - return "Super" - case KindSingle: - return "Single" - case KindAnyOf: - return "AnyOf" - default: - return "" - } -} diff --git a/vendor/github.com/gobwas/glob/syntax/ast/parser.go b/vendor/github.com/gobwas/glob/syntax/ast/parser.go deleted file mode 100644 index 429b40943..000000000 --- a/vendor/github.com/gobwas/glob/syntax/ast/parser.go +++ /dev/null @@ -1,157 +0,0 @@ -package ast - -import ( - "errors" - "fmt" - "github.com/gobwas/glob/syntax/lexer" - "unicode/utf8" -) - -type Lexer interface { - Next() lexer.Token -} - -type parseFn func(*Node, Lexer) (parseFn, *Node, error) - -func Parse(lexer Lexer) (*Node, error) { - var parser parseFn - - root := NewNode(KindPattern, nil) - - var ( - tree *Node - err error - ) - for parser, tree = parserMain, root; parser != nil; { - parser, tree, err = parser(tree, lexer) - if err != nil { - return nil, err - } - } - - return root, nil -} - -func parserMain(tree *Node, lex Lexer) (parseFn, *Node, error) { - for { - token := lex.Next() - switch token.Type { - case lexer.EOF: - return nil, tree, nil - - case lexer.Error: - return nil, tree, errors.New(token.Raw) - - case lexer.Text: - Insert(tree, NewNode(KindText, Text{token.Raw})) - return parserMain, tree, nil - - case lexer.Any: - Insert(tree, NewNode(KindAny, nil)) - return parserMain, tree, nil - - case lexer.Super: - Insert(tree, NewNode(KindSuper, nil)) - return parserMain, tree, nil - - case lexer.Single: - Insert(tree, NewNode(KindSingle, nil)) - return parserMain, tree, nil - - case lexer.RangeOpen: - return parserRange, tree, nil - - case lexer.TermsOpen: - a := NewNode(KindAnyOf, nil) - Insert(tree, a) - - p := NewNode(KindPattern, nil) - Insert(a, p) - - return parserMain, p, nil - - case lexer.Separator: - p := NewNode(KindPattern, nil) - Insert(tree.Parent, p) - - return parserMain, p, nil - - case lexer.TermsClose: - return parserMain, tree.Parent.Parent, nil - - default: - return nil, tree, fmt.Errorf("unexpected token: %s", token) - } - } - return nil, tree, fmt.Errorf("unknown error") -} - -func parserRange(tree *Node, lex Lexer) (parseFn, *Node, error) { - var ( - not bool - lo rune - hi rune - chars string - ) - for { - token := lex.Next() - switch token.Type { - case lexer.EOF: - return nil, tree, errors.New("unexpected end") - - case lexer.Error: - return nil, tree, errors.New(token.Raw) - - case lexer.Not: - not = true - - case lexer.RangeLo: - r, w := utf8.DecodeRuneInString(token.Raw) - if len(token.Raw) > w { - return nil, tree, fmt.Errorf("unexpected length of lo character") - } - lo = r - - case lexer.RangeBetween: - // - - case lexer.RangeHi: - r, w := utf8.DecodeRuneInString(token.Raw) - if len(token.Raw) > w { - return nil, tree, fmt.Errorf("unexpected length of lo character") - } - - hi = r - - if hi < lo { - return nil, tree, fmt.Errorf("hi character '%s' should be greater than lo '%s'", string(hi), string(lo)) - } - - case lexer.Text: - chars = token.Raw - - case lexer.RangeClose: - isRange := lo != 0 && hi != 0 - isChars := chars != "" - - if isChars == isRange { - return nil, tree, fmt.Errorf("could not parse range") - } - - if isRange { - Insert(tree, NewNode(KindRange, Range{ - Lo: lo, - Hi: hi, - Not: not, - })) - } else { - Insert(tree, NewNode(KindList, List{ - Chars: chars, - Not: not, - })) - } - - return parserMain, tree, nil - } - } -} diff --git a/vendor/github.com/gobwas/glob/syntax/lexer/lexer.go b/vendor/github.com/gobwas/glob/syntax/lexer/lexer.go deleted file mode 100644 index a1c8d1962..000000000 --- a/vendor/github.com/gobwas/glob/syntax/lexer/lexer.go +++ /dev/null @@ -1,273 +0,0 @@ -package lexer - -import ( - "bytes" - "fmt" - "github.com/gobwas/glob/util/runes" - "unicode/utf8" -) - -const ( - char_any = '*' - char_comma = ',' - char_single = '?' - char_escape = '\\' - char_range_open = '[' - char_range_close = ']' - char_terms_open = '{' - char_terms_close = '}' - char_range_not = '!' - char_range_between = '-' -) - -var specials = []byte{ - char_any, - char_single, - char_escape, - char_range_open, - char_range_close, - char_terms_open, - char_terms_close, -} - -func Special(c byte) bool { - return bytes.IndexByte(specials, c) != -1 -} - -type tokens []Token - -func (i *tokens) shift() (ret Token) { - ret = (*i)[0] - copy(*i, (*i)[1:]) - *i = (*i)[:len(*i)-1] - return -} - -func (i *tokens) push(v Token) { - *i = append(*i, v) -} - -func (i *tokens) empty() bool { - return len(*i) == 0 -} - -var eof rune = 0 - -type lexer struct { - data string - pos int - err error - - tokens tokens - termsLevel int - - lastRune rune - lastRuneSize int - hasRune bool -} - -func NewLexer(source string) *lexer { - l := &lexer{ - data: source, - tokens: tokens(make([]Token, 0, 4)), - } - return l -} - -func (l *lexer) Next() Token { - if l.err != nil { - return Token{Error, l.err.Error()} - } - if !l.tokens.empty() { - return l.tokens.shift() - } - - l.fetchItem() - return l.Next() -} - -func (l *lexer) peek() (r rune, w int) { - if l.pos == len(l.data) { - return eof, 0 - } - - r, w = utf8.DecodeRuneInString(l.data[l.pos:]) - if r == utf8.RuneError { - l.errorf("could not read rune") - r = eof - w = 0 - } - - return -} - -func (l *lexer) read() rune { - if l.hasRune { - l.hasRune = false - l.seek(l.lastRuneSize) - return l.lastRune - } - - r, s := l.peek() - l.seek(s) - - l.lastRune = r - l.lastRuneSize = s - - return r -} - -func (l *lexer) seek(w int) { - l.pos += w -} - -func (l *lexer) unread() { - if l.hasRune { - l.errorf("could not unread rune") - return - } - l.seek(-l.lastRuneSize) - l.hasRune = true -} - -func (l *lexer) errorf(f string, v ...interface{}) { - l.err = fmt.Errorf(f, v...) -} - -func (l *lexer) inTerms() bool { - return l.termsLevel > 0 -} - -func (l *lexer) termsEnter() { - l.termsLevel++ -} - -func (l *lexer) termsLeave() { - l.termsLevel-- -} - -var inTextBreakers = []rune{char_single, char_any, char_range_open, char_terms_open} -var inTermsBreakers = append(inTextBreakers, char_terms_close, char_comma) - -func (l *lexer) fetchItem() { - r := l.read() - switch { - case r == eof: - l.tokens.push(Token{EOF, ""}) - - case r == char_terms_open: - l.termsEnter() - l.tokens.push(Token{TermsOpen, string(r)}) - - case r == char_comma && l.inTerms(): - l.tokens.push(Token{Separator, string(r)}) - - case r == char_terms_close && l.inTerms(): - l.tokens.push(Token{TermsClose, string(r)}) - l.termsLeave() - - case r == char_range_open: - l.tokens.push(Token{RangeOpen, string(r)}) - l.fetchRange() - - case r == char_single: - l.tokens.push(Token{Single, string(r)}) - - case r == char_any: - if l.read() == char_any { - l.tokens.push(Token{Super, string(r) + string(r)}) - } else { - l.unread() - l.tokens.push(Token{Any, string(r)}) - } - - default: - l.unread() - - var breakers []rune - if l.inTerms() { - breakers = inTermsBreakers - } else { - breakers = inTextBreakers - } - l.fetchText(breakers) - } -} - -func (l *lexer) fetchRange() { - var wantHi bool - var wantClose bool - var seenNot bool - for { - r := l.read() - if r == eof { - l.errorf("unexpected end of input") - return - } - - if wantClose { - if r != char_range_close { - l.errorf("expected close range character") - } else { - l.tokens.push(Token{RangeClose, string(r)}) - } - return - } - - if wantHi { - l.tokens.push(Token{RangeHi, string(r)}) - wantClose = true - continue - } - - if !seenNot && r == char_range_not { - l.tokens.push(Token{Not, string(r)}) - seenNot = true - continue - } - - if n, w := l.peek(); n == char_range_between { - l.seek(w) - l.tokens.push(Token{RangeLo, string(r)}) - l.tokens.push(Token{RangeBetween, string(n)}) - wantHi = true - continue - } - - l.unread() // unread first peek and fetch as text - l.fetchText([]rune{char_range_close}) - wantClose = true - } -} - -func (l *lexer) fetchText(breakers []rune) { - var data []rune - var escaped bool - -reading: - for { - r := l.read() - if r == eof { - break - } - - if !escaped { - if r == char_escape { - escaped = true - continue - } - - if runes.IndexRune(breakers, r) != -1 { - l.unread() - break reading - } - } - - escaped = false - data = append(data, r) - } - - if len(data) > 0 { - l.tokens.push(Token{Text, string(data)}) - } -} diff --git a/vendor/github.com/gobwas/glob/syntax/lexer/token.go b/vendor/github.com/gobwas/glob/syntax/lexer/token.go deleted file mode 100644 index 2797c4e83..000000000 --- a/vendor/github.com/gobwas/glob/syntax/lexer/token.go +++ /dev/null @@ -1,88 +0,0 @@ -package lexer - -import "fmt" - -type TokenType int - -const ( - EOF TokenType = iota - Error - Text - Char - Any - Super - Single - Not - Separator - RangeOpen - RangeClose - RangeLo - RangeHi - RangeBetween - TermsOpen - TermsClose -) - -func (tt TokenType) String() string { - switch tt { - case EOF: - return "eof" - - case Error: - return "error" - - case Text: - return "text" - - case Char: - return "char" - - case Any: - return "any" - - case Super: - return "super" - - case Single: - return "single" - - case Not: - return "not" - - case Separator: - return "separator" - - case RangeOpen: - return "range_open" - - case RangeClose: - return "range_close" - - case RangeLo: - return "range_lo" - - case RangeHi: - return "range_hi" - - case RangeBetween: - return "range_between" - - case TermsOpen: - return "terms_open" - - case TermsClose: - return "terms_close" - - default: - return "undef" - } -} - -type Token struct { - Type TokenType - Raw string -} - -func (t Token) String() string { - return fmt.Sprintf("%v<%q>", t.Type, t.Raw) -} diff --git a/vendor/github.com/gobwas/glob/syntax/syntax.go b/vendor/github.com/gobwas/glob/syntax/syntax.go deleted file mode 100644 index 1d168b148..000000000 --- a/vendor/github.com/gobwas/glob/syntax/syntax.go +++ /dev/null @@ -1,14 +0,0 @@ -package syntax - -import ( - "github.com/gobwas/glob/syntax/ast" - "github.com/gobwas/glob/syntax/lexer" -) - -func Parse(s string) (*ast.Node, error) { - return ast.Parse(lexer.NewLexer(s)) -} - -func Special(b byte) bool { - return lexer.Special(b) -} diff --git a/vendor/github.com/gobwas/glob/util/runes/runes.go b/vendor/github.com/gobwas/glob/util/runes/runes.go deleted file mode 100644 index a72355641..000000000 --- a/vendor/github.com/gobwas/glob/util/runes/runes.go +++ /dev/null @@ -1,154 +0,0 @@ -package runes - -func Index(s, needle []rune) int { - ls, ln := len(s), len(needle) - - switch { - case ln == 0: - return 0 - case ln == 1: - return IndexRune(s, needle[0]) - case ln == ls: - if Equal(s, needle) { - return 0 - } - return -1 - case ln > ls: - return -1 - } - -head: - for i := 0; i < ls && ls-i >= ln; i++ { - for y := 0; y < ln; y++ { - if s[i+y] != needle[y] { - continue head - } - } - - return i - } - - return -1 -} - -func LastIndex(s, needle []rune) int { - ls, ln := len(s), len(needle) - - switch { - case ln == 0: - if ls == 0 { - return 0 - } - return ls - case ln == 1: - return IndexLastRune(s, needle[0]) - case ln == ls: - if Equal(s, needle) { - return 0 - } - return -1 - case ln > ls: - return -1 - } - -head: - for i := ls - 1; i >= 0 && i >= ln; i-- { - for y := ln - 1; y >= 0; y-- { - if s[i-(ln-y-1)] != needle[y] { - continue head - } - } - - return i - ln + 1 - } - - return -1 -} - -// IndexAny returns the index of the first instance of any Unicode code point -// from chars in s, or -1 if no Unicode code point from chars is present in s. -func IndexAny(s, chars []rune) int { - if len(chars) > 0 { - for i, c := range s { - for _, m := range chars { - if c == m { - return i - } - } - } - } - return -1 -} - -func Contains(s, needle []rune) bool { - return Index(s, needle) >= 0 -} - -func Max(s []rune) (max rune) { - for _, r := range s { - if r > max { - max = r - } - } - - return -} - -func Min(s []rune) rune { - min := rune(-1) - for _, r := range s { - if min == -1 { - min = r - continue - } - - if r < min { - min = r - } - } - - return min -} - -func IndexRune(s []rune, r rune) int { - for i, c := range s { - if c == r { - return i - } - } - return -1 -} - -func IndexLastRune(s []rune, r rune) int { - for i := len(s) - 1; i >= 0; i-- { - if s[i] == r { - return i - } - } - - return -1 -} - -func Equal(a, b []rune) bool { - if len(a) == len(b) { - for i := 0; i < len(a); i++ { - if a[i] != b[i] { - return false - } - } - - return true - } - - return false -} - -// HasPrefix tests whether the string s begins with prefix. -func HasPrefix(s, prefix []rune) bool { - return len(s) >= len(prefix) && Equal(s[0:len(prefix)], prefix) -} - -// HasSuffix tests whether the string s ends with suffix. -func HasSuffix(s, suffix []rune) bool { - return len(s) >= len(suffix) && Equal(s[len(s)-len(suffix):], suffix) -} diff --git a/vendor/github.com/gobwas/glob/util/strings/strings.go b/vendor/github.com/gobwas/glob/util/strings/strings.go deleted file mode 100644 index e8ee1920b..000000000 --- a/vendor/github.com/gobwas/glob/util/strings/strings.go +++ /dev/null @@ -1,39 +0,0 @@ -package strings - -import ( - "strings" - "unicode/utf8" -) - -func IndexAnyRunes(s string, rs []rune) int { - for _, r := range rs { - if i := strings.IndexRune(s, r); i != -1 { - return i - } - } - - return -1 -} - -func LastIndexAnyRunes(s string, rs []rune) int { - for _, r := range rs { - i := -1 - if 0 <= r && r < utf8.RuneSelf { - i = strings.LastIndexByte(s, byte(r)) - } else { - sub := s - for len(sub) > 0 { - j := strings.IndexRune(s, r) - if j == -1 { - break - } - i = j - sub = sub[i+1:] - } - } - if i != -1 { - return i - } - } - return -1 -} diff --git a/vendor/github.com/gofrs/flock/.gitignore b/vendor/github.com/gofrs/flock/.gitignore deleted file mode 100644 index daf913b1b..000000000 --- a/vendor/github.com/gofrs/flock/.gitignore +++ /dev/null @@ -1,24 +0,0 @@ -# Compiled Object files, Static and Dynamic libs (Shared Objects) -*.o -*.a -*.so - -# Folders -_obj -_test - -# Architecture specific extensions/prefixes -*.[568vq] -[568vq].out - -*.cgo1.go -*.cgo2.c -_cgo_defun.c -_cgo_gotypes.go -_cgo_export.* - -_testmain.go - -*.exe -*.test -*.prof diff --git a/vendor/github.com/gofrs/flock/.travis.yml b/vendor/github.com/gofrs/flock/.travis.yml deleted file mode 100644 index b16d040fa..000000000 --- a/vendor/github.com/gofrs/flock/.travis.yml +++ /dev/null @@ -1,10 +0,0 @@ -language: go -go: - - 1.14.x - - 1.15.x -script: go test -v -check.vv -race ./... -sudo: false -notifications: - email: - on_success: never - on_failure: always diff --git a/vendor/github.com/gofrs/flock/LICENSE b/vendor/github.com/gofrs/flock/LICENSE deleted file mode 100644 index 8b8ff36fe..000000000 --- a/vendor/github.com/gofrs/flock/LICENSE +++ /dev/null @@ -1,27 +0,0 @@ -Copyright (c) 2015-2020, Tim Heckman -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - -* Redistributions of source code must retain the above copyright notice, this - list of conditions and the following disclaimer. - -* Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - -* Neither the name of gofrs nor the names of its contributors may be used - to endorse or promote products derived from this software without - specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE -FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/gofrs/flock/README.md b/vendor/github.com/gofrs/flock/README.md deleted file mode 100644 index 71ce63692..000000000 --- a/vendor/github.com/gofrs/flock/README.md +++ /dev/null @@ -1,41 +0,0 @@ -# flock -[![TravisCI Build Status](https://img.shields.io/travis/gofrs/flock/master.svg?style=flat)](https://travis-ci.org/gofrs/flock) -[![GoDoc](https://img.shields.io/badge/godoc-flock-blue.svg?style=flat)](https://godoc.org/github.com/gofrs/flock) -[![License](https://img.shields.io/badge/license-BSD_3--Clause-brightgreen.svg?style=flat)](https://github.com/gofrs/flock/blob/master/LICENSE) -[![Go Report Card](https://goreportcard.com/badge/github.com/gofrs/flock)](https://goreportcard.com/report/github.com/gofrs/flock) - -`flock` implements a thread-safe sync.Locker interface for file locking. It also -includes a non-blocking TryLock() function to allow locking without blocking execution. - -## License -`flock` is released under the BSD 3-Clause License. See the `LICENSE` file for more details. - -## Go Compatibility -This package makes use of the `context` package that was introduced in Go 1.7. As such, this -package has an implicit dependency on Go 1.7+. - -## Installation -``` -go get -u github.com/gofrs/flock -``` - -## Usage -```Go -import "github.com/gofrs/flock" - -fileLock := flock.New("/var/lock/go-lock.lock") - -locked, err := fileLock.TryLock() - -if err != nil { - // handle locking error -} - -if locked { - // do work - fileLock.Unlock() -} -``` - -For more detailed usage information take a look at the package API docs on -[GoDoc](https://godoc.org/github.com/gofrs/flock). diff --git a/vendor/github.com/gofrs/flock/appveyor.yml b/vendor/github.com/gofrs/flock/appveyor.yml deleted file mode 100644 index 909b4bf7c..000000000 --- a/vendor/github.com/gofrs/flock/appveyor.yml +++ /dev/null @@ -1,25 +0,0 @@ -version: '{build}' - -build: false -deploy: false - -clone_folder: 'c:\gopath\src\github.com\gofrs\flock' - -environment: - GOPATH: 'c:\gopath' - GOVERSION: '1.15' - -init: - - git config --global core.autocrlf input - -install: - - rmdir c:\go /s /q - - appveyor DownloadFile https://storage.googleapis.com/golang/go%GOVERSION%.windows-amd64.msi - - msiexec /i go%GOVERSION%.windows-amd64.msi /q - - set Path=c:\go\bin;c:\gopath\bin;%Path% - - go version - - go env - -test_script: - - go get -t ./... - - go test -race -v ./... diff --git a/vendor/github.com/gofrs/flock/flock.go b/vendor/github.com/gofrs/flock/flock.go deleted file mode 100644 index 95c784ca5..000000000 --- a/vendor/github.com/gofrs/flock/flock.go +++ /dev/null @@ -1,144 +0,0 @@ -// Copyright 2015 Tim Heckman. All rights reserved. -// Use of this source code is governed by the BSD 3-Clause -// license that can be found in the LICENSE file. - -// Package flock implements a thread-safe interface for file locking. -// It also includes a non-blocking TryLock() function to allow locking -// without blocking execution. -// -// Package flock is released under the BSD 3-Clause License. See the LICENSE file -// for more details. -// -// While using this library, remember that the locking behaviors are not -// guaranteed to be the same on each platform. For example, some UNIX-like -// operating systems will transparently convert a shared lock to an exclusive -// lock. If you Unlock() the flock from a location where you believe that you -// have the shared lock, you may accidentally drop the exclusive lock. -package flock - -import ( - "context" - "os" - "runtime" - "sync" - "time" -) - -// Flock is the struct type to handle file locking. All fields are unexported, -// with access to some of the fields provided by getter methods (Path() and Locked()). -type Flock struct { - path string - m sync.RWMutex - fh *os.File - l bool - r bool -} - -// New returns a new instance of *Flock. The only parameter -// it takes is the path to the desired lockfile. -func New(path string) *Flock { - return &Flock{path: path} -} - -// NewFlock returns a new instance of *Flock. The only parameter -// it takes is the path to the desired lockfile. -// -// Deprecated: Use New instead. -func NewFlock(path string) *Flock { - return New(path) -} - -// Close is equivalent to calling Unlock. -// -// This will release the lock and close the underlying file descriptor. -// It will not remove the file from disk, that's up to your application. -func (f *Flock) Close() error { - return f.Unlock() -} - -// Path returns the path as provided in NewFlock(). -func (f *Flock) Path() string { - return f.path -} - -// Locked returns the lock state (locked: true, unlocked: false). -// -// Warning: by the time you use the returned value, the state may have changed. -func (f *Flock) Locked() bool { - f.m.RLock() - defer f.m.RUnlock() - return f.l -} - -// RLocked returns the read lock state (locked: true, unlocked: false). -// -// Warning: by the time you use the returned value, the state may have changed. -func (f *Flock) RLocked() bool { - f.m.RLock() - defer f.m.RUnlock() - return f.r -} - -func (f *Flock) String() string { - return f.path -} - -// TryLockContext repeatedly tries to take an exclusive lock until one of the -// conditions is met: TryLock succeeds, TryLock fails with error, or Context -// Done channel is closed. -func (f *Flock) TryLockContext(ctx context.Context, retryDelay time.Duration) (bool, error) { - return tryCtx(ctx, f.TryLock, retryDelay) -} - -// TryRLockContext repeatedly tries to take a shared lock until one of the -// conditions is met: TryRLock succeeds, TryRLock fails with error, or Context -// Done channel is closed. -func (f *Flock) TryRLockContext(ctx context.Context, retryDelay time.Duration) (bool, error) { - return tryCtx(ctx, f.TryRLock, retryDelay) -} - -func tryCtx(ctx context.Context, fn func() (bool, error), retryDelay time.Duration) (bool, error) { - if ctx.Err() != nil { - return false, ctx.Err() - } - for { - if ok, err := fn(); ok || err != nil { - return ok, err - } - select { - case <-ctx.Done(): - return false, ctx.Err() - case <-time.After(retryDelay): - // try again - } - } -} - -func (f *Flock) setFh() error { - // open a new os.File instance - // create it if it doesn't exist, and open the file read-only. - flags := os.O_CREATE - if runtime.GOOS == "aix" { - // AIX cannot preform write-lock (ie exclusive) on a - // read-only file. - flags |= os.O_RDWR - } else { - flags |= os.O_RDONLY - } - fh, err := os.OpenFile(f.path, flags, os.FileMode(0600)) - if err != nil { - return err - } - - // set the filehandle on the struct - f.fh = fh - return nil -} - -// ensure the file handle is closed if no lock is held -func (f *Flock) ensureFhState() { - if !f.l && !f.r && f.fh != nil { - f.fh.Close() - f.fh = nil - } -} diff --git a/vendor/github.com/gofrs/flock/flock_aix.go b/vendor/github.com/gofrs/flock/flock_aix.go deleted file mode 100644 index 7277c1b6b..000000000 --- a/vendor/github.com/gofrs/flock/flock_aix.go +++ /dev/null @@ -1,281 +0,0 @@ -// Copyright 2019 Tim Heckman. All rights reserved. Use of this source code is -// governed by the BSD 3-Clause license that can be found in the LICENSE file. - -// Copyright 2018 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// This code implements the filelock API using POSIX 'fcntl' locks, which attach -// to an (inode, process) pair rather than a file descriptor. To avoid unlocking -// files prematurely when the same file is opened through different descriptors, -// we allow only one read-lock at a time. -// -// This code is adapted from the Go package: -// cmd/go/internal/lockedfile/internal/filelock - -//+build aix - -package flock - -import ( - "errors" - "io" - "os" - "sync" - "syscall" - - "golang.org/x/sys/unix" -) - -type lockType int16 - -const ( - readLock lockType = unix.F_RDLCK - writeLock lockType = unix.F_WRLCK -) - -type cmdType int - -const ( - tryLock cmdType = unix.F_SETLK - waitLock cmdType = unix.F_SETLKW -) - -type inode = uint64 - -type inodeLock struct { - owner *Flock - queue []<-chan *Flock -} - -var ( - mu sync.Mutex - inodes = map[*Flock]inode{} - locks = map[inode]inodeLock{} -) - -// Lock is a blocking call to try and take an exclusive file lock. It will wait -// until it is able to obtain the exclusive file lock. It's recommended that -// TryLock() be used over this function. This function may block the ability to -// query the current Locked() or RLocked() status due to a RW-mutex lock. -// -// If we are already exclusive-locked, this function short-circuits and returns -// immediately assuming it can take the mutex lock. -// -// If the *Flock has a shared lock (RLock), this may transparently replace the -// shared lock with an exclusive lock on some UNIX-like operating systems. Be -// careful when using exclusive locks in conjunction with shared locks -// (RLock()), because calling Unlock() may accidentally release the exclusive -// lock that was once a shared lock. -func (f *Flock) Lock() error { - return f.lock(&f.l, writeLock) -} - -// RLock is a blocking call to try and take a shared file lock. It will wait -// until it is able to obtain the shared file lock. It's recommended that -// TryRLock() be used over this function. This function may block the ability to -// query the current Locked() or RLocked() status due to a RW-mutex lock. -// -// If we are already shared-locked, this function short-circuits and returns -// immediately assuming it can take the mutex lock. -func (f *Flock) RLock() error { - return f.lock(&f.r, readLock) -} - -func (f *Flock) lock(locked *bool, flag lockType) error { - f.m.Lock() - defer f.m.Unlock() - - if *locked { - return nil - } - - if f.fh == nil { - if err := f.setFh(); err != nil { - return err - } - defer f.ensureFhState() - } - - if _, err := f.doLock(waitLock, flag, true); err != nil { - return err - } - - *locked = true - return nil -} - -func (f *Flock) doLock(cmd cmdType, lt lockType, blocking bool) (bool, error) { - // POSIX locks apply per inode and process, and the lock for an inode is - // released when *any* descriptor for that inode is closed. So we need to - // synchronize access to each inode internally, and must serialize lock and - // unlock calls that refer to the same inode through different descriptors. - fi, err := f.fh.Stat() - if err != nil { - return false, err - } - ino := inode(fi.Sys().(*syscall.Stat_t).Ino) - - mu.Lock() - if i, dup := inodes[f]; dup && i != ino { - mu.Unlock() - return false, &os.PathError{ - Path: f.Path(), - Err: errors.New("inode for file changed since last Lock or RLock"), - } - } - - inodes[f] = ino - - var wait chan *Flock - l := locks[ino] - if l.owner == f { - // This file already owns the lock, but the call may change its lock type. - } else if l.owner == nil { - // No owner: it's ours now. - l.owner = f - } else if !blocking { - // Already owned: cannot take the lock. - mu.Unlock() - return false, nil - } else { - // Already owned: add a channel to wait on. - wait = make(chan *Flock) - l.queue = append(l.queue, wait) - } - locks[ino] = l - mu.Unlock() - - if wait != nil { - wait <- f - } - - err = setlkw(f.fh.Fd(), cmd, lt) - - if err != nil { - f.doUnlock() - if cmd == tryLock && err == unix.EACCES { - return false, nil - } - return false, err - } - - return true, nil -} - -func (f *Flock) Unlock() error { - f.m.Lock() - defer f.m.Unlock() - - // if we aren't locked or if the lockfile instance is nil - // just return a nil error because we are unlocked - if (!f.l && !f.r) || f.fh == nil { - return nil - } - - if err := f.doUnlock(); err != nil { - return err - } - - f.fh.Close() - - f.l = false - f.r = false - f.fh = nil - - return nil -} - -func (f *Flock) doUnlock() (err error) { - var owner *Flock - mu.Lock() - ino, ok := inodes[f] - if ok { - owner = locks[ino].owner - } - mu.Unlock() - - if owner == f { - err = setlkw(f.fh.Fd(), waitLock, unix.F_UNLCK) - } - - mu.Lock() - l := locks[ino] - if len(l.queue) == 0 { - // No waiters: remove the map entry. - delete(locks, ino) - } else { - // The first waiter is sending us their file now. - // Receive it and update the queue. - l.owner = <-l.queue[0] - l.queue = l.queue[1:] - locks[ino] = l - } - delete(inodes, f) - mu.Unlock() - - return err -} - -// TryLock is the preferred function for taking an exclusive file lock. This -// function takes an RW-mutex lock before it tries to lock the file, so there is -// the possibility that this function may block for a short time if another -// goroutine is trying to take any action. -// -// The actual file lock is non-blocking. If we are unable to get the exclusive -// file lock, the function will return false instead of waiting for the lock. If -// we get the lock, we also set the *Flock instance as being exclusive-locked. -func (f *Flock) TryLock() (bool, error) { - return f.try(&f.l, writeLock) -} - -// TryRLock is the preferred function for taking a shared file lock. This -// function takes an RW-mutex lock before it tries to lock the file, so there is -// the possibility that this function may block for a short time if another -// goroutine is trying to take any action. -// -// The actual file lock is non-blocking. If we are unable to get the shared file -// lock, the function will return false instead of waiting for the lock. If we -// get the lock, we also set the *Flock instance as being share-locked. -func (f *Flock) TryRLock() (bool, error) { - return f.try(&f.r, readLock) -} - -func (f *Flock) try(locked *bool, flag lockType) (bool, error) { - f.m.Lock() - defer f.m.Unlock() - - if *locked { - return true, nil - } - - if f.fh == nil { - if err := f.setFh(); err != nil { - return false, err - } - defer f.ensureFhState() - } - - haslock, err := f.doLock(tryLock, flag, false) - if err != nil { - return false, err - } - - *locked = haslock - return haslock, nil -} - -// setlkw calls FcntlFlock with cmd for the entire file indicated by fd. -func setlkw(fd uintptr, cmd cmdType, lt lockType) error { - for { - err := unix.FcntlFlock(fd, int(cmd), &unix.Flock_t{ - Type: int16(lt), - Whence: io.SeekStart, - Start: 0, - Len: 0, // All bytes. - }) - if err != unix.EINTR { - return err - } - } -} diff --git a/vendor/github.com/gofrs/flock/flock_unix.go b/vendor/github.com/gofrs/flock/flock_unix.go deleted file mode 100644 index c315a3e29..000000000 --- a/vendor/github.com/gofrs/flock/flock_unix.go +++ /dev/null @@ -1,197 +0,0 @@ -// Copyright 2015 Tim Heckman. All rights reserved. -// Use of this source code is governed by the BSD 3-Clause -// license that can be found in the LICENSE file. - -// +build !aix,!windows - -package flock - -import ( - "os" - "syscall" -) - -// Lock is a blocking call to try and take an exclusive file lock. It will wait -// until it is able to obtain the exclusive file lock. It's recommended that -// TryLock() be used over this function. This function may block the ability to -// query the current Locked() or RLocked() status due to a RW-mutex lock. -// -// If we are already exclusive-locked, this function short-circuits and returns -// immediately assuming it can take the mutex lock. -// -// If the *Flock has a shared lock (RLock), this may transparently replace the -// shared lock with an exclusive lock on some UNIX-like operating systems. Be -// careful when using exclusive locks in conjunction with shared locks -// (RLock()), because calling Unlock() may accidentally release the exclusive -// lock that was once a shared lock. -func (f *Flock) Lock() error { - return f.lock(&f.l, syscall.LOCK_EX) -} - -// RLock is a blocking call to try and take a shared file lock. It will wait -// until it is able to obtain the shared file lock. It's recommended that -// TryRLock() be used over this function. This function may block the ability to -// query the current Locked() or RLocked() status due to a RW-mutex lock. -// -// If we are already shared-locked, this function short-circuits and returns -// immediately assuming it can take the mutex lock. -func (f *Flock) RLock() error { - return f.lock(&f.r, syscall.LOCK_SH) -} - -func (f *Flock) lock(locked *bool, flag int) error { - f.m.Lock() - defer f.m.Unlock() - - if *locked { - return nil - } - - if f.fh == nil { - if err := f.setFh(); err != nil { - return err - } - defer f.ensureFhState() - } - - if err := syscall.Flock(int(f.fh.Fd()), flag); err != nil { - shouldRetry, reopenErr := f.reopenFDOnError(err) - if reopenErr != nil { - return reopenErr - } - - if !shouldRetry { - return err - } - - if err = syscall.Flock(int(f.fh.Fd()), flag); err != nil { - return err - } - } - - *locked = true - return nil -} - -// Unlock is a function to unlock the file. This file takes a RW-mutex lock, so -// while it is running the Locked() and RLocked() functions will be blocked. -// -// This function short-circuits if we are unlocked already. If not, it calls -// syscall.LOCK_UN on the file and closes the file descriptor. It does not -// remove the file from disk. It's up to your application to do. -// -// Please note, if your shared lock became an exclusive lock this may -// unintentionally drop the exclusive lock if called by the consumer that -// believes they have a shared lock. Please see Lock() for more details. -func (f *Flock) Unlock() error { - f.m.Lock() - defer f.m.Unlock() - - // if we aren't locked or if the lockfile instance is nil - // just return a nil error because we are unlocked - if (!f.l && !f.r) || f.fh == nil { - return nil - } - - // mark the file as unlocked - if err := syscall.Flock(int(f.fh.Fd()), syscall.LOCK_UN); err != nil { - return err - } - - f.fh.Close() - - f.l = false - f.r = false - f.fh = nil - - return nil -} - -// TryLock is the preferred function for taking an exclusive file lock. This -// function takes an RW-mutex lock before it tries to lock the file, so there is -// the possibility that this function may block for a short time if another -// goroutine is trying to take any action. -// -// The actual file lock is non-blocking. If we are unable to get the exclusive -// file lock, the function will return false instead of waiting for the lock. If -// we get the lock, we also set the *Flock instance as being exclusive-locked. -func (f *Flock) TryLock() (bool, error) { - return f.try(&f.l, syscall.LOCK_EX) -} - -// TryRLock is the preferred function for taking a shared file lock. This -// function takes an RW-mutex lock before it tries to lock the file, so there is -// the possibility that this function may block for a short time if another -// goroutine is trying to take any action. -// -// The actual file lock is non-blocking. If we are unable to get the shared file -// lock, the function will return false instead of waiting for the lock. If we -// get the lock, we also set the *Flock instance as being share-locked. -func (f *Flock) TryRLock() (bool, error) { - return f.try(&f.r, syscall.LOCK_SH) -} - -func (f *Flock) try(locked *bool, flag int) (bool, error) { - f.m.Lock() - defer f.m.Unlock() - - if *locked { - return true, nil - } - - if f.fh == nil { - if err := f.setFh(); err != nil { - return false, err - } - defer f.ensureFhState() - } - - var retried bool -retry: - err := syscall.Flock(int(f.fh.Fd()), flag|syscall.LOCK_NB) - - switch err { - case syscall.EWOULDBLOCK: - return false, nil - case nil: - *locked = true - return true, nil - } - if !retried { - if shouldRetry, reopenErr := f.reopenFDOnError(err); reopenErr != nil { - return false, reopenErr - } else if shouldRetry { - retried = true - goto retry - } - } - - return false, err -} - -// reopenFDOnError determines whether we should reopen the file handle -// in readwrite mode and try again. This comes from util-linux/sys-utils/flock.c: -// Since Linux 3.4 (commit 55725513) -// Probably NFSv4 where flock() is emulated by fcntl(). -func (f *Flock) reopenFDOnError(err error) (bool, error) { - if err != syscall.EIO && err != syscall.EBADF { - return false, nil - } - if st, err := f.fh.Stat(); err == nil { - // if the file is able to be read and written - if st.Mode()&0600 == 0600 { - f.fh.Close() - f.fh = nil - - // reopen in read-write mode and set the filehandle - fh, err := os.OpenFile(f.path, os.O_CREATE|os.O_RDWR, os.FileMode(0600)) - if err != nil { - return false, err - } - f.fh = fh - return true, nil - } - } - - return false, nil -} diff --git a/vendor/github.com/gofrs/flock/flock_winapi.go b/vendor/github.com/gofrs/flock/flock_winapi.go deleted file mode 100644 index fe405a255..000000000 --- a/vendor/github.com/gofrs/flock/flock_winapi.go +++ /dev/null @@ -1,76 +0,0 @@ -// Copyright 2015 Tim Heckman. All rights reserved. -// Use of this source code is governed by the BSD 3-Clause -// license that can be found in the LICENSE file. - -// +build windows - -package flock - -import ( - "syscall" - "unsafe" -) - -var ( - kernel32, _ = syscall.LoadLibrary("kernel32.dll") - procLockFileEx, _ = syscall.GetProcAddress(kernel32, "LockFileEx") - procUnlockFileEx, _ = syscall.GetProcAddress(kernel32, "UnlockFileEx") -) - -const ( - winLockfileFailImmediately = 0x00000001 - winLockfileExclusiveLock = 0x00000002 - winLockfileSharedLock = 0x00000000 -) - -// Use of 0x00000000 for the shared lock is a guess based on some the MS Windows -// `LockFileEX` docs, which document the `LOCKFILE_EXCLUSIVE_LOCK` flag as: -// -// > The function requests an exclusive lock. Otherwise, it requests a shared -// > lock. -// -// https://msdn.microsoft.com/en-us/library/windows/desktop/aa365203(v=vs.85).aspx - -func lockFileEx(handle syscall.Handle, flags uint32, reserved uint32, numberOfBytesToLockLow uint32, numberOfBytesToLockHigh uint32, offset *syscall.Overlapped) (bool, syscall.Errno) { - r1, _, errNo := syscall.Syscall6( - uintptr(procLockFileEx), - 6, - uintptr(handle), - uintptr(flags), - uintptr(reserved), - uintptr(numberOfBytesToLockLow), - uintptr(numberOfBytesToLockHigh), - uintptr(unsafe.Pointer(offset))) - - if r1 != 1 { - if errNo == 0 { - return false, syscall.EINVAL - } - - return false, errNo - } - - return true, 0 -} - -func unlockFileEx(handle syscall.Handle, reserved uint32, numberOfBytesToLockLow uint32, numberOfBytesToLockHigh uint32, offset *syscall.Overlapped) (bool, syscall.Errno) { - r1, _, errNo := syscall.Syscall6( - uintptr(procUnlockFileEx), - 5, - uintptr(handle), - uintptr(reserved), - uintptr(numberOfBytesToLockLow), - uintptr(numberOfBytesToLockHigh), - uintptr(unsafe.Pointer(offset)), - 0) - - if r1 != 1 { - if errNo == 0 { - return false, syscall.EINVAL - } - - return false, errNo - } - - return true, 0 -} diff --git a/vendor/github.com/gofrs/flock/flock_windows.go b/vendor/github.com/gofrs/flock/flock_windows.go deleted file mode 100644 index ddb534cce..000000000 --- a/vendor/github.com/gofrs/flock/flock_windows.go +++ /dev/null @@ -1,142 +0,0 @@ -// Copyright 2015 Tim Heckman. All rights reserved. -// Use of this source code is governed by the BSD 3-Clause -// license that can be found in the LICENSE file. - -package flock - -import ( - "syscall" -) - -// ErrorLockViolation is the error code returned from the Windows syscall when a -// lock would block and you ask to fail immediately. -const ErrorLockViolation syscall.Errno = 0x21 // 33 - -// Lock is a blocking call to try and take an exclusive file lock. It will wait -// until it is able to obtain the exclusive file lock. It's recommended that -// TryLock() be used over this function. This function may block the ability to -// query the current Locked() or RLocked() status due to a RW-mutex lock. -// -// If we are already locked, this function short-circuits and returns -// immediately assuming it can take the mutex lock. -func (f *Flock) Lock() error { - return f.lock(&f.l, winLockfileExclusiveLock) -} - -// RLock is a blocking call to try and take a shared file lock. It will wait -// until it is able to obtain the shared file lock. It's recommended that -// TryRLock() be used over this function. This function may block the ability to -// query the current Locked() or RLocked() status due to a RW-mutex lock. -// -// If we are already locked, this function short-circuits and returns -// immediately assuming it can take the mutex lock. -func (f *Flock) RLock() error { - return f.lock(&f.r, winLockfileSharedLock) -} - -func (f *Flock) lock(locked *bool, flag uint32) error { - f.m.Lock() - defer f.m.Unlock() - - if *locked { - return nil - } - - if f.fh == nil { - if err := f.setFh(); err != nil { - return err - } - defer f.ensureFhState() - } - - if _, errNo := lockFileEx(syscall.Handle(f.fh.Fd()), flag, 0, 1, 0, &syscall.Overlapped{}); errNo > 0 { - return errNo - } - - *locked = true - return nil -} - -// Unlock is a function to unlock the file. This file takes a RW-mutex lock, so -// while it is running the Locked() and RLocked() functions will be blocked. -// -// This function short-circuits if we are unlocked already. If not, it calls -// UnlockFileEx() on the file and closes the file descriptor. It does not remove -// the file from disk. It's up to your application to do. -func (f *Flock) Unlock() error { - f.m.Lock() - defer f.m.Unlock() - - // if we aren't locked or if the lockfile instance is nil - // just return a nil error because we are unlocked - if (!f.l && !f.r) || f.fh == nil { - return nil - } - - // mark the file as unlocked - if _, errNo := unlockFileEx(syscall.Handle(f.fh.Fd()), 0, 1, 0, &syscall.Overlapped{}); errNo > 0 { - return errNo - } - - f.fh.Close() - - f.l = false - f.r = false - f.fh = nil - - return nil -} - -// TryLock is the preferred function for taking an exclusive file lock. This -// function does take a RW-mutex lock before it tries to lock the file, so there -// is the possibility that this function may block for a short time if another -// goroutine is trying to take any action. -// -// The actual file lock is non-blocking. If we are unable to get the exclusive -// file lock, the function will return false instead of waiting for the lock. If -// we get the lock, we also set the *Flock instance as being exclusive-locked. -func (f *Flock) TryLock() (bool, error) { - return f.try(&f.l, winLockfileExclusiveLock) -} - -// TryRLock is the preferred function for taking a shared file lock. This -// function does take a RW-mutex lock before it tries to lock the file, so there -// is the possibility that this function may block for a short time if another -// goroutine is trying to take any action. -// -// The actual file lock is non-blocking. If we are unable to get the shared file -// lock, the function will return false instead of waiting for the lock. If we -// get the lock, we also set the *Flock instance as being shared-locked. -func (f *Flock) TryRLock() (bool, error) { - return f.try(&f.r, winLockfileSharedLock) -} - -func (f *Flock) try(locked *bool, flag uint32) (bool, error) { - f.m.Lock() - defer f.m.Unlock() - - if *locked { - return true, nil - } - - if f.fh == nil { - if err := f.setFh(); err != nil { - return false, err - } - defer f.ensureFhState() - } - - _, errNo := lockFileEx(syscall.Handle(f.fh.Fd()), flag|winLockfileFailImmediately, 0, 1, 0, &syscall.Overlapped{}) - - if errNo > 0 { - if errNo == ErrorLockViolation || errNo == syscall.ERROR_IO_PENDING { - return false, nil - } - - return false, errNo - } - - *locked = true - - return true, nil -} diff --git a/vendor/github.com/golangci/check/LICENSE b/vendor/github.com/golangci/check/LICENSE deleted file mode 100644 index 5a1774b8e..000000000 --- a/vendor/github.com/golangci/check/LICENSE +++ /dev/null @@ -1,674 +0,0 @@ -GNU GENERAL PUBLIC LICENSE - Version 3, 29 June 2007 - - Copyright (C) 2007 Free Software Foundation, Inc. {http://fsf.org/} - Everyone is permitted to copy and distribute verbatim copies - of this license document, but changing it is not allowed. - - Preamble - - The GNU General Public License is a free, copyleft license for -software and other kinds of works. - - The licenses for most software and other practical works are designed -to take away your freedom to share and change the works. By contrast, -the GNU General Public License is intended to guarantee your freedom to -share and change all versions of a program--to make sure it remains free -software for all its users. We, the Free Software Foundation, use the -GNU General Public License for most of our software; it applies also to -any other work released this way by its authors. You can apply it to -your programs, too. - - When we speak of free software, we are referring to freedom, not -price. Our General Public Licenses are designed to make sure that you -have the freedom to distribute copies of free software (and charge for -them if you wish), that you receive source code or can get it if you -want it, that you can change the software or use pieces of it in new -free programs, and that you know you can do these things. - - To protect your rights, we need to prevent others from denying you -these rights or asking you to surrender the rights. Therefore, you have -certain responsibilities if you distribute copies of the software, or if -you modify it: responsibilities to respect the freedom of others. - - For example, if you distribute copies of such a program, whether -gratis or for a fee, you must pass on to the recipients the same -freedoms that you received. You must make sure that they, too, receive -or can get the source code. And you must show them these terms so they -know their rights. - - Developers that use the GNU GPL protect your rights with two steps: -(1) assert copyright on the software, and (2) offer you this License -giving you legal permission to copy, distribute and/or modify it. - - For the developers' and authors' protection, the GPL clearly explains -that there is no warranty for this free software. For both users' and -authors' sake, the GPL requires that modified versions be marked as -changed, so that their problems will not be attributed erroneously to -authors of previous versions. - - Some devices are designed to deny users access to install or run -modified versions of the software inside them, although the manufacturer -can do so. This is fundamentally incompatible with the aim of -protecting users' freedom to change the software. The systematic -pattern of such abuse occurs in the area of products for individuals to -use, which is precisely where it is most unacceptable. Therefore, we -have designed this version of the GPL to prohibit the practice for those -products. If such problems arise substantially in other domains, we -stand ready to extend this provision to those domains in future versions -of the GPL, as needed to protect the freedom of users. - - Finally, every program is threatened constantly by software patents. -States should not allow patents to restrict development and use of -software on general-purpose computers, but in those that do, we wish to -avoid the special danger that patents applied to a free program could -make it effectively proprietary. To prevent this, the GPL assures that -patents cannot be used to render the program non-free. - - The precise terms and conditions for copying, distribution and -modification follow. - - TERMS AND CONDITIONS - - 0. Definitions. - - "This License" refers to version 3 of the GNU General Public License. - - "Copyright" also means copyright-like laws that apply to other kinds of -works, such as semiconductor masks. - - "The Program" refers to any copyrightable work licensed under this -License. Each licensee is addressed as "you". "Licensees" and -"recipients" may be individuals or organizations. - - To "modify" a work means to copy from or adapt all or part of the work -in a fashion requiring copyright permission, other than the making of an -exact copy. The resulting work is called a "modified version" of the -earlier work or a work "based on" the earlier work. - - A "covered work" means either the unmodified Program or a work based -on the Program. - - To "propagate" a work means to do anything with it that, without -permission, would make you directly or secondarily liable for -infringement under applicable copyright law, except executing it on a -computer or modifying a private copy. Propagation includes copying, -distribution (with or without modification), making available to the -public, and in some countries other activities as well. - - To "convey" a work means any kind of propagation that enables other -parties to make or receive copies. Mere interaction with a user through -a computer network, with no transfer of a copy, is not conveying. - - An interactive user interface displays "Appropriate Legal Notices" -to the extent that it includes a convenient and prominently visible -feature that (1) displays an appropriate copyright notice, and (2) -tells the user that there is no warranty for the work (except to the -extent that warranties are provided), that licensees may convey the -work under this License, and how to view a copy of this License. If -the interface presents a list of user commands or options, such as a -menu, a prominent item in the list meets this criterion. - - 1. Source Code. - - The "source code" for a work means the preferred form of the work -for making modifications to it. "Object code" means any non-source -form of a work. - - A "Standard Interface" means an interface that either is an official -standard defined by a recognized standards body, or, in the case of -interfaces specified for a particular programming language, one that -is widely used among developers working in that language. - - The "System Libraries" of an executable work include anything, other -than the work as a whole, that (a) is included in the normal form of -packaging a Major Component, but which is not part of that Major -Component, and (b) serves only to enable use of the work with that -Major Component, or to implement a Standard Interface for which an -implementation is available to the public in source code form. A -"Major Component", in this context, means a major essential component -(kernel, window system, and so on) of the specific operating system -(if any) on which the executable work runs, or a compiler used to -produce the work, or an object code interpreter used to run it. - - The "Corresponding Source" for a work in object code form means all -the source code needed to generate, install, and (for an executable -work) run the object code and to modify the work, including scripts to -control those activities. However, it does not include the work's -System Libraries, or general-purpose tools or generally available free -programs which are used unmodified in performing those activities but -which are not part of the work. For example, Corresponding Source -includes interface definition files associated with source files for -the work, and the source code for shared libraries and dynamically -linked subprograms that the work is specifically designed to require, -such as by intimate data communication or control flow between those -subprograms and other parts of the work. - - The Corresponding Source need not include anything that users -can regenerate automatically from other parts of the Corresponding -Source. - - The Corresponding Source for a work in source code form is that -same work. - - 2. Basic Permissions. - - All rights granted under this License are granted for the term of -copyright on the Program, and are irrevocable provided the stated -conditions are met. This License explicitly affirms your unlimited -permission to run the unmodified Program. The output from running a -covered work is covered by this License only if the output, given its -content, constitutes a covered work. This License acknowledges your -rights of fair use or other equivalent, as provided by copyright law. - - You may make, run and propagate covered works that you do not -convey, without conditions so long as your license otherwise remains -in force. You may convey covered works to others for the sole purpose -of having them make modifications exclusively for you, or provide you -with facilities for running those works, provided that you comply with -the terms of this License in conveying all material for which you do -not control copyright. Those thus making or running the covered works -for you must do so exclusively on your behalf, under your direction -and control, on terms that prohibit them from making any copies of -your copyrighted material outside their relationship with you. - - Conveying under any other circumstances is permitted solely under -the conditions stated below. Sublicensing is not allowed; section 10 -makes it unnecessary. - - 3. Protecting Users' Legal Rights From Anti-Circumvention Law. - - No covered work shall be deemed part of an effective technological -measure under any applicable law fulfilling obligations under article -11 of the WIPO copyright treaty adopted on 20 December 1996, or -similar laws prohibiting or restricting circumvention of such -measures. - - When you convey a covered work, you waive any legal power to forbid -circumvention of technological measures to the extent such circumvention -is effected by exercising rights under this License with respect to -the covered work, and you disclaim any intention to limit operation or -modification of the work as a means of enforcing, against the work's -users, your or third parties' legal rights to forbid circumvention of -technological measures. - - 4. Conveying Verbatim Copies. - - You may convey verbatim copies of the Program's source code as you -receive it, in any medium, provided that you conspicuously and -appropriately publish on each copy an appropriate copyright notice; -keep intact all notices stating that this License and any -non-permissive terms added in accord with section 7 apply to the code; -keep intact all notices of the absence of any warranty; and give all -recipients a copy of this License along with the Program. - - You may charge any price or no price for each copy that you convey, -and you may offer support or warranty protection for a fee. - - 5. Conveying Modified Source Versions. - - You may convey a work based on the Program, or the modifications to -produce it from the Program, in the form of source code under the -terms of section 4, provided that you also meet all of these conditions: - - a) The work must carry prominent notices stating that you modified - it, and giving a relevant date. - - b) The work must carry prominent notices stating that it is - released under this License and any conditions added under section - 7. This requirement modifies the requirement in section 4 to - "keep intact all notices". - - c) You must license the entire work, as a whole, under this - License to anyone who comes into possession of a copy. This - License will therefore apply, along with any applicable section 7 - additional terms, to the whole of the work, and all its parts, - regardless of how they are packaged. This License gives no - permission to license the work in any other way, but it does not - invalidate such permission if you have separately received it. - - d) If the work has interactive user interfaces, each must display - Appropriate Legal Notices; however, if the Program has interactive - interfaces that do not display Appropriate Legal Notices, your - work need not make them do so. - - A compilation of a covered work with other separate and independent -works, which are not by their nature extensions of the covered work, -and which are not combined with it such as to form a larger program, -in or on a volume of a storage or distribution medium, is called an -"aggregate" if the compilation and its resulting copyright are not -used to limit the access or legal rights of the compilation's users -beyond what the individual works permit. Inclusion of a covered work -in an aggregate does not cause this License to apply to the other -parts of the aggregate. - - 6. Conveying Non-Source Forms. - - You may convey a covered work in object code form under the terms -of sections 4 and 5, provided that you also convey the -machine-readable Corresponding Source under the terms of this License, -in one of these ways: - - a) Convey the object code in, or embodied in, a physical product - (including a physical distribution medium), accompanied by the - Corresponding Source fixed on a durable physical medium - customarily used for software interchange. - - b) Convey the object code in, or embodied in, a physical product - (including a physical distribution medium), accompanied by a - written offer, valid for at least three years and valid for as - long as you offer spare parts or customer support for that product - model, to give anyone who possesses the object code either (1) a - copy of the Corresponding Source for all the software in the - product that is covered by this License, on a durable physical - medium customarily used for software interchange, for a price no - more than your reasonable cost of physically performing this - conveying of source, or (2) access to copy the - Corresponding Source from a network server at no charge. - - c) Convey individual copies of the object code with a copy of the - written offer to provide the Corresponding Source. This - alternative is allowed only occasionally and noncommercially, and - only if you received the object code with such an offer, in accord - with subsection 6b. - - d) Convey the object code by offering access from a designated - place (gratis or for a charge), and offer equivalent access to the - Corresponding Source in the same way through the same place at no - further charge. You need not require recipients to copy the - Corresponding Source along with the object code. If the place to - copy the object code is a network server, the Corresponding Source - may be on a different server (operated by you or a third party) - that supports equivalent copying facilities, provided you maintain - clear directions next to the object code saying where to find the - Corresponding Source. Regardless of what server hosts the - Corresponding Source, you remain obligated to ensure that it is - available for as long as needed to satisfy these requirements. - - e) Convey the object code using peer-to-peer transmission, provided - you inform other peers where the object code and Corresponding - Source of the work are being offered to the general public at no - charge under subsection 6d. - - A separable portion of the object code, whose source code is excluded -from the Corresponding Source as a System Library, need not be -included in conveying the object code work. - - A "User Product" is either (1) a "consumer product", which means any -tangible personal property which is normally used for personal, family, -or household purposes, or (2) anything designed or sold for incorporation -into a dwelling. In determining whether a product is a consumer product, -doubtful cases shall be resolved in favor of coverage. For a particular -product received by a particular user, "normally used" refers to a -typical or common use of that class of product, regardless of the status -of the particular user or of the way in which the particular user -actually uses, or expects or is expected to use, the product. A product -is a consumer product regardless of whether the product has substantial -commercial, industrial or non-consumer uses, unless such uses represent -the only significant mode of use of the product. - - "Installation Information" for a User Product means any methods, -procedures, authorization keys, or other information required to install -and execute modified versions of a covered work in that User Product from -a modified version of its Corresponding Source. The information must -suffice to ensure that the continued functioning of the modified object -code is in no case prevented or interfered with solely because -modification has been made. - - If you convey an object code work under this section in, or with, or -specifically for use in, a User Product, and the conveying occurs as -part of a transaction in which the right of possession and use of the -User Product is transferred to the recipient in perpetuity or for a -fixed term (regardless of how the transaction is characterized), the -Corresponding Source conveyed under this section must be accompanied -by the Installation Information. But this requirement does not apply -if neither you nor any third party retains the ability to install -modified object code on the User Product (for example, the work has -been installed in ROM). - - The requirement to provide Installation Information does not include a -requirement to continue to provide support service, warranty, or updates -for a work that has been modified or installed by the recipient, or for -the User Product in which it has been modified or installed. Access to a -network may be denied when the modification itself materially and -adversely affects the operation of the network or violates the rules and -protocols for communication across the network. - - Corresponding Source conveyed, and Installation Information provided, -in accord with this section must be in a format that is publicly -documented (and with an implementation available to the public in -source code form), and must require no special password or key for -unpacking, reading or copying. - - 7. Additional Terms. - - "Additional permissions" are terms that supplement the terms of this -License by making exceptions from one or more of its conditions. -Additional permissions that are applicable to the entire Program shall -be treated as though they were included in this License, to the extent -that they are valid under applicable law. If additional permissions -apply only to part of the Program, that part may be used separately -under those permissions, but the entire Program remains governed by -this License without regard to the additional permissions. - - When you convey a copy of a covered work, you may at your option -remove any additional permissions from that copy, or from any part of -it. (Additional permissions may be written to require their own -removal in certain cases when you modify the work.) You may place -additional permissions on material, added by you to a covered work, -for which you have or can give appropriate copyright permission. - - Notwithstanding any other provision of this License, for material you -add to a covered work, you may (if authorized by the copyright holders of -that material) supplement the terms of this License with terms: - - a) Disclaiming warranty or limiting liability differently from the - terms of sections 15 and 16 of this License; or - - b) Requiring preservation of specified reasonable legal notices or - author attributions in that material or in the Appropriate Legal - Notices displayed by works containing it; or - - c) Prohibiting misrepresentation of the origin of that material, or - requiring that modified versions of such material be marked in - reasonable ways as different from the original version; or - - d) Limiting the use for publicity purposes of names of licensors or - authors of the material; or - - e) Declining to grant rights under trademark law for use of some - trade names, trademarks, or service marks; or - - f) Requiring indemnification of licensors and authors of that - material by anyone who conveys the material (or modified versions of - it) with contractual assumptions of liability to the recipient, for - any liability that these contractual assumptions directly impose on - those licensors and authors. - - All other non-permissive additional terms are considered "further -restrictions" within the meaning of section 10. If the Program as you -received it, or any part of it, contains a notice stating that it is -governed by this License along with a term that is a further -restriction, you may remove that term. If a license document contains -a further restriction but permits relicensing or conveying under this -License, you may add to a covered work material governed by the terms -of that license document, provided that the further restriction does -not survive such relicensing or conveying. - - If you add terms to a covered work in accord with this section, you -must place, in the relevant source files, a statement of the -additional terms that apply to those files, or a notice indicating -where to find the applicable terms. - - Additional terms, permissive or non-permissive, may be stated in the -form of a separately written license, or stated as exceptions; -the above requirements apply either way. - - 8. Termination. - - You may not propagate or modify a covered work except as expressly -provided under this License. Any attempt otherwise to propagate or -modify it is void, and will automatically terminate your rights under -this License (including any patent licenses granted under the third -paragraph of section 11). - - However, if you cease all violation of this License, then your -license from a particular copyright holder is reinstated (a) -provisionally, unless and until the copyright holder explicitly and -finally terminates your license, and (b) permanently, if the copyright -holder fails to notify you of the violation by some reasonable means -prior to 60 days after the cessation. - - Moreover, your license from a particular copyright holder is -reinstated permanently if the copyright holder notifies you of the -violation by some reasonable means, this is the first time you have -received notice of violation of this License (for any work) from that -copyright holder, and you cure the violation prior to 30 days after -your receipt of the notice. - - Termination of your rights under this section does not terminate the -licenses of parties who have received copies or rights from you under -this License. If your rights have been terminated and not permanently -reinstated, you do not qualify to receive new licenses for the same -material under section 10. - - 9. Acceptance Not Required for Having Copies. - - You are not required to accept this License in order to receive or -run a copy of the Program. Ancillary propagation of a covered work -occurring solely as a consequence of using peer-to-peer transmission -to receive a copy likewise does not require acceptance. However, -nothing other than this License grants you permission to propagate or -modify any covered work. These actions infringe copyright if you do -not accept this License. Therefore, by modifying or propagating a -covered work, you indicate your acceptance of this License to do so. - - 10. Automatic Licensing of Downstream Recipients. - - Each time you convey a covered work, the recipient automatically -receives a license from the original licensors, to run, modify and -propagate that work, subject to this License. You are not responsible -for enforcing compliance by third parties with this License. - - An "entity transaction" is a transaction transferring control of an -organization, or substantially all assets of one, or subdividing an -organization, or merging organizations. If propagation of a covered -work results from an entity transaction, each party to that -transaction who receives a copy of the work also receives whatever -licenses to the work the party's predecessor in interest had or could -give under the previous paragraph, plus a right to possession of the -Corresponding Source of the work from the predecessor in interest, if -the predecessor has it or can get it with reasonable efforts. - - You may not impose any further restrictions on the exercise of the -rights granted or affirmed under this License. For example, you may -not impose a license fee, royalty, or other charge for exercise of -rights granted under this License, and you may not initiate litigation -(including a cross-claim or counterclaim in a lawsuit) alleging that -any patent claim is infringed by making, using, selling, offering for -sale, or importing the Program or any portion of it. - - 11. Patents. - - A "contributor" is a copyright holder who authorizes use under this -License of the Program or a work on which the Program is based. The -work thus licensed is called the contributor's "contributor version". - - A contributor's "essential patent claims" are all patent claims -owned or controlled by the contributor, whether already acquired or -hereafter acquired, that would be infringed by some manner, permitted -by this License, of making, using, or selling its contributor version, -but do not include claims that would be infringed only as a -consequence of further modification of the contributor version. For -purposes of this definition, "control" includes the right to grant -patent sublicenses in a manner consistent with the requirements of -this License. - - Each contributor grants you a non-exclusive, worldwide, royalty-free -patent license under the contributor's essential patent claims, to -make, use, sell, offer for sale, import and otherwise run, modify and -propagate the contents of its contributor version. - - In the following three paragraphs, a "patent license" is any express -agreement or commitment, however denominated, not to enforce a patent -(such as an express permission to practice a patent or covenant not to -sue for patent infringement). To "grant" such a patent license to a -party means to make such an agreement or commitment not to enforce a -patent against the party. - - If you convey a covered work, knowingly relying on a patent license, -and the Corresponding Source of the work is not available for anyone -to copy, free of charge and under the terms of this License, through a -publicly available network server or other readily accessible means, -then you must either (1) cause the Corresponding Source to be so -available, or (2) arrange to deprive yourself of the benefit of the -patent license for this particular work, or (3) arrange, in a manner -consistent with the requirements of this License, to extend the patent -license to downstream recipients. "Knowingly relying" means you have -actual knowledge that, but for the patent license, your conveying the -covered work in a country, or your recipient's use of the covered work -in a country, would infringe one or more identifiable patents in that -country that you have reason to believe are valid. - - If, pursuant to or in connection with a single transaction or -arrangement, you convey, or propagate by procuring conveyance of, a -covered work, and grant a patent license to some of the parties -receiving the covered work authorizing them to use, propagate, modify -or convey a specific copy of the covered work, then the patent license -you grant is automatically extended to all recipients of the covered -work and works based on it. - - A patent license is "discriminatory" if it does not include within -the scope of its coverage, prohibits the exercise of, or is -conditioned on the non-exercise of one or more of the rights that are -specifically granted under this License. You may not convey a covered -work if you are a party to an arrangement with a third party that is -in the business of distributing software, under which you make payment -to the third party based on the extent of your activity of conveying -the work, and under which the third party grants, to any of the -parties who would receive the covered work from you, a discriminatory -patent license (a) in connection with copies of the covered work -conveyed by you (or copies made from those copies), or (b) primarily -for and in connection with specific products or compilations that -contain the covered work, unless you entered into that arrangement, -or that patent license was granted, prior to 28 March 2007. - - Nothing in this License shall be construed as excluding or limiting -any implied license or other defenses to infringement that may -otherwise be available to you under applicable patent law. - - 12. No Surrender of Others' Freedom. - - If conditions are imposed on you (whether by court order, agreement or -otherwise) that contradict the conditions of this License, they do not -excuse you from the conditions of this License. If you cannot convey a -covered work so as to satisfy simultaneously your obligations under this -License and any other pertinent obligations, then as a consequence you may -not convey it at all. For example, if you agree to terms that obligate you -to collect a royalty for further conveying from those to whom you convey -the Program, the only way you could satisfy both those terms and this -License would be to refrain entirely from conveying the Program. - - 13. Use with the GNU Affero General Public License. - - Notwithstanding any other provision of this License, you have -permission to link or combine any covered work with a work licensed -under version 3 of the GNU Affero General Public License into a single -combined work, and to convey the resulting work. The terms of this -License will continue to apply to the part which is the covered work, -but the special requirements of the GNU Affero General Public License, -section 13, concerning interaction through a network will apply to the -combination as such. - - 14. Revised Versions of this License. - - The Free Software Foundation may publish revised and/or new versions of -the GNU General Public License from time to time. Such new versions will -be similar in spirit to the present version, but may differ in detail to -address new problems or concerns. - - Each version is given a distinguishing version number. If the -Program specifies that a certain numbered version of the GNU General -Public License "or any later version" applies to it, you have the -option of following the terms and conditions either of that numbered -version or of any later version published by the Free Software -Foundation. If the Program does not specify a version number of the -GNU General Public License, you may choose any version ever published -by the Free Software Foundation. - - If the Program specifies that a proxy can decide which future -versions of the GNU General Public License can be used, that proxy's -public statement of acceptance of a version permanently authorizes you -to choose that version for the Program. - - Later license versions may give you additional or different -permissions. However, no additional obligations are imposed on any -author or copyright holder as a result of your choosing to follow a -later version. - - 15. Disclaimer of Warranty. - - THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY -APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT -HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY -OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, -THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR -PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM -IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF -ALL NECESSARY SERVICING, REPAIR OR CORRECTION. - - 16. Limitation of Liability. - - IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING -WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS -THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY -GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE -USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF -DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD -PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), -EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF -SUCH DAMAGES. - - 17. Interpretation of Sections 15 and 16. - - If the disclaimer of warranty and limitation of liability provided -above cannot be given local legal effect according to their terms, -reviewing courts shall apply local law that most closely approximates -an absolute waiver of all civil liability in connection with the -Program, unless a warranty or assumption of liability accompanies a -copy of the Program in return for a fee. - - END OF TERMS AND CONDITIONS - - How to Apply These Terms to Your New Programs - - If you develop a new program, and you want it to be of the greatest -possible use to the public, the best way to achieve this is to make it -free software which everyone can redistribute and change under these terms. - - To do so, attach the following notices to the program. It is safest -to attach them to the start of each source file to most effectively -state the exclusion of warranty; and each file should have at least -the "copyright" line and a pointer to where the full notice is found. - - {one line to give the program's name and a brief idea of what it does.} - Copyright (C) {year} {name of author} - - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see {http://www.gnu.org/licenses/}. - -Also add information on how to contact you by electronic and paper mail. - - If the program does terminal interaction, make it output a short -notice like this when it starts in an interactive mode: - - opennota Copyright (C) 2013 opennota - This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. - This is free software, and you are welcome to redistribute it - under certain conditions; type `show c' for details. - -The hypothetical commands `show w' and `show c' should show the appropriate -parts of the General Public License. Of course, your program's commands -might be different; for a GUI interface, you would use an "about box". - - You should also get your employer (if you work as a programmer) or school, -if any, to sign a "copyright disclaimer" for the program, if necessary. -For more information on this, and how to apply and follow the GNU GPL, see -{http://www.gnu.org/licenses/}. - - The GNU General Public License does not permit incorporating your program -into proprietary programs. If your program is a subroutine library, you -may consider it more useful to permit linking proprietary applications with -the library. If this is what you want to do, use the GNU Lesser General -Public License instead of this License. But first, please read -{http://www.gnu.org/philosophy/why-not-lgpl.html}. diff --git a/vendor/github.com/golangci/check/cmd/structcheck/structcheck.go b/vendor/github.com/golangci/check/cmd/structcheck/structcheck.go deleted file mode 100644 index 5dc5f8380..000000000 --- a/vendor/github.com/golangci/check/cmd/structcheck/structcheck.go +++ /dev/null @@ -1,193 +0,0 @@ -// structcheck -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. -// -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. -// -// You should have received a copy of the GNU General Public License -// along with this program. If not, see . - -package structcheck - -import ( - "flag" - "fmt" - "go/ast" - "go/token" - "go/types" - - "golang.org/x/tools/go/loader" -) - -var ( - assignmentsOnly = flag.Bool("structcheck.a", false, "Count assignments only") - loadTestFiles = flag.Bool("structcheck.t", false, "Load test files too") - buildTags = flag.String("structcheck.tags", "", "Build tags") -) - -type visitor struct { - prog *loader.Program - pkg *loader.PackageInfo - m map[types.Type]map[string]int - skip map[types.Type]struct{} -} - -func (v *visitor) decl(t types.Type, fieldName string) { - if _, ok := v.m[t]; !ok { - v.m[t] = make(map[string]int) - } - if _, ok := v.m[t][fieldName]; !ok { - v.m[t][fieldName] = 0 - } -} - -func (v *visitor) assignment(t types.Type, fieldName string) { - if _, ok := v.m[t]; !ok { - v.m[t] = make(map[string]int) - } - if _, ok := v.m[t][fieldName]; ok { - v.m[t][fieldName]++ - } else { - v.m[t][fieldName] = 1 - } -} - -func (v *visitor) typeSpec(node *ast.TypeSpec) { - if strukt, ok := node.Type.(*ast.StructType); ok { - t := v.pkg.Info.Defs[node.Name].Type() - for _, f := range strukt.Fields.List { - if len(f.Names) > 0 { - fieldName := f.Names[0].Name - v.decl(t, fieldName) - } - } - } -} - -func (v *visitor) typeAndFieldName(expr *ast.SelectorExpr) (types.Type, string, bool) { - selection := v.pkg.Info.Selections[expr] - if selection == nil { - return nil, "", false - } - recv := selection.Recv() - if ptr, ok := recv.(*types.Pointer); ok { - recv = ptr.Elem() - } - return recv, selection.Obj().Name(), true -} - -func (v *visitor) assignStmt(node *ast.AssignStmt) { - for _, lhs := range node.Lhs { - var selector *ast.SelectorExpr - switch expr := lhs.(type) { - case *ast.SelectorExpr: - selector = expr - case *ast.IndexExpr: - if expr, ok := expr.X.(*ast.SelectorExpr); ok { - selector = expr - } - } - if selector != nil { - if t, fn, ok := v.typeAndFieldName(selector); ok { - v.assignment(t, fn) - } - } - } -} - -func (v *visitor) compositeLiteral(node *ast.CompositeLit) { - t := v.pkg.Info.Types[node.Type].Type - for _, expr := range node.Elts { - if kv, ok := expr.(*ast.KeyValueExpr); ok { - if ident, ok := kv.Key.(*ast.Ident); ok { - v.assignment(t, ident.Name) - } - } else { - // Struct literal with positional values. - // All the fields are assigned. - v.skip[t] = struct{}{} - break - } - } -} - -func (v *visitor) Visit(node ast.Node) ast.Visitor { - switch node := node.(type) { - case *ast.TypeSpec: - v.typeSpec(node) - - case *ast.AssignStmt: - if *assignmentsOnly { - v.assignStmt(node) - } - - case *ast.SelectorExpr: - if !*assignmentsOnly { - if t, fn, ok := v.typeAndFieldName(node); ok { - v.assignment(t, fn) - } - } - - case *ast.CompositeLit: - v.compositeLiteral(node) - } - - return v -} - -type Issue struct { - Pos token.Position - Type string - FieldName string -} - -func Run(program *loader.Program, reportExported bool) []Issue { - var issues []Issue - for _, pkg := range program.InitialPackages() { - visitor := &visitor{ - m: make(map[types.Type]map[string]int), - skip: make(map[types.Type]struct{}), - prog: program, - pkg: pkg, - } - for _, f := range pkg.Files { - ast.Walk(visitor, f) - } - - for t := range visitor.m { - if _, skip := visitor.skip[t]; skip { - continue - } - for fieldName, v := range visitor.m[t] { - if !reportExported && ast.IsExported(fieldName) { - continue - } - if v == 0 { - field, _, _ := types.LookupFieldOrMethod(t, false, pkg.Pkg, fieldName) - if field == nil { - fmt.Printf("%s: unknown field or method: %s.%s\n", pkg.Pkg.Path(), t, fieldName) - continue - } - if fieldName == "XMLName" { - if named, ok := field.Type().(*types.Named); ok && named.Obj().Pkg().Path() == "encoding/xml" { - continue - } - } - pos := program.Fset.Position(field.Pos()) - issues = append(issues, Issue{ - Pos: pos, - Type: types.TypeString(t, nil), - FieldName: fieldName, - }) - } - } - } - } - - return issues -} diff --git a/vendor/github.com/golangci/check/cmd/varcheck/varcheck.go b/vendor/github.com/golangci/check/cmd/varcheck/varcheck.go deleted file mode 100644 index 8e93e0473..000000000 --- a/vendor/github.com/golangci/check/cmd/varcheck/varcheck.go +++ /dev/null @@ -1,163 +0,0 @@ -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. -// -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. -// -// You should have received a copy of the GNU General Public License -// along with this program. If not, see . - -package varcheck - -import ( - "flag" - "go/ast" - "go/token" - "strings" - - "go/types" - - "golang.org/x/tools/go/loader" -) - -var ( - buildTags = flag.String("varcheck.tags", "", "Build tags") -) - -type object struct { - pkgPath string - name string -} - -type visitor struct { - prog *loader.Program - pkg *loader.PackageInfo - uses map[object]int - positions map[object]token.Position - insideFunc bool -} - -func getKey(obj types.Object) object { - if obj == nil { - return object{} - } - - pkg := obj.Pkg() - pkgPath := "" - if pkg != nil { - pkgPath = pkg.Path() - } - - return object{ - pkgPath: pkgPath, - name: obj.Name(), - } -} - -func (v *visitor) decl(obj types.Object) { - key := getKey(obj) - if _, ok := v.uses[key]; !ok { - v.uses[key] = 0 - } - if _, ok := v.positions[key]; !ok { - v.positions[key] = v.prog.Fset.Position(obj.Pos()) - } -} - -func (v *visitor) use(obj types.Object) { - key := getKey(obj) - if _, ok := v.uses[key]; ok { - v.uses[key]++ - } else { - v.uses[key] = 1 - } -} - -func isReserved(name string) bool { - return name == "_" || strings.HasPrefix(strings.ToLower(name), "_cgo_") -} - -func (v *visitor) Visit(node ast.Node) ast.Visitor { - switch node := node.(type) { - case *ast.Ident: - v.use(v.pkg.Info.Uses[node]) - - case *ast.ValueSpec: - if !v.insideFunc { - for _, ident := range node.Names { - if !isReserved(ident.Name) { - v.decl(v.pkg.Info.Defs[ident]) - } - } - } - for _, val := range node.Values { - ast.Walk(v, val) - } - if node.Type != nil { - ast.Walk(v, node.Type) - } - return nil - - case *ast.FuncDecl: - if node.Body != nil { - v.insideFunc = true - ast.Walk(v, node.Body) - v.insideFunc = false - } - - if node.Recv != nil { - ast.Walk(v, node.Recv) - } - if node.Type != nil { - ast.Walk(v, node.Type) - } - - return nil - } - - return v -} - -type Issue struct { - Pos token.Position - VarName string -} - -func Run(program *loader.Program, reportExported bool) []Issue { - var issues []Issue - uses := make(map[object]int) - positions := make(map[object]token.Position) - - for _, pkgInfo := range program.InitialPackages() { - if pkgInfo.Pkg.Path() == "unsafe" { - continue - } - - v := &visitor{ - prog: program, - pkg: pkgInfo, - uses: uses, - positions: positions, - } - - for _, f := range v.pkg.Files { - ast.Walk(v, f) - } - } - - for obj, useCount := range uses { - if useCount == 0 && (reportExported || !ast.IsExported(obj.name)) { - pos := positions[obj] - issues = append(issues, Issue{ - Pos: pos, - VarName: obj.name, - }) - } - } - - return issues -} diff --git a/vendor/github.com/golangci/dupl/.travis.yml b/vendor/github.com/golangci/dupl/.travis.yml deleted file mode 100644 index 33de24c0f..000000000 --- a/vendor/github.com/golangci/dupl/.travis.yml +++ /dev/null @@ -1,5 +0,0 @@ -language: go -go: - - 1.3 - - 1.8 - - 1.9 diff --git a/vendor/github.com/golangci/dupl/LICENSE b/vendor/github.com/golangci/dupl/LICENSE deleted file mode 100644 index ab317d841..000000000 --- a/vendor/github.com/golangci/dupl/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2015 Michal Bohuslávek - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/golangci/dupl/README.md b/vendor/github.com/golangci/dupl/README.md deleted file mode 100644 index f34901d7a..000000000 --- a/vendor/github.com/golangci/dupl/README.md +++ /dev/null @@ -1,63 +0,0 @@ -# dupl [![Build Status](https://travis-ci.org/mibk/dupl.png)](https://travis-ci.org/mibk/dupl) - -**dupl** is a tool written in Go for finding code clones. So far it can find clones only -in the Go source files. The method uses suffix tree for serialized ASTs. It ignores values -of AST nodes. It just operates with their types (e.g. `if a == 13 {}` and `if x == 100 {}` are -considered the same provided it exceeds the minimal token sequence size). - -Due to the used method dupl can report so called "false positives" on the output. These are -the ones we do not consider clones (whether they are too small, or the values of the matched -tokens are completely different). - -## Installation - -```bash -go get -u github.com/golangci/dupl -``` - -## Usage - -``` -Usage of dupl: - dupl [flags] [paths] - -Paths: - If the given path is a file, dupl will use it regardless of - the file extension. If it is a directory it will recursively - search for *.go files in that directory. - - If no path is given dupl will recursively search for *.go - files in the current directory. - -Flags: - -files - read file names from stdin one at each line - -html - output the results as HTML, including duplicate code fragments - -plumbing - plumbing (easy-to-parse) output for consumption by scripts or tools - -t, -threshold size - minimum token sequence size as a clone (default 15) - -vendor - check files in vendor directory - -v, -verbose - explain what is being done - -Examples: - dupl -t 100 - Search clones in the current directory of size at least - 100 tokens. - dupl $(find app/ -name '*_test.go') - Search for clones in tests in the app directory. - find app/ -name '*_test.go' |dupl -files - The same as above. -``` - -## Example - -The reduced output of this command with the following parameters for the [Docker](https://www.docker.com) source code -looks like [this](http://htmlpreview.github.io/?https://github.com/golangci/dupl/blob/master/_output_example/docker.html). - -```bash -$ dupl -t 200 -html >docker.html -``` diff --git a/vendor/github.com/golangci/dupl/job/buildtree.go b/vendor/github.com/golangci/dupl/job/buildtree.go deleted file mode 100644 index e9aad54c0..000000000 --- a/vendor/github.com/golangci/dupl/job/buildtree.go +++ /dev/null @@ -1,22 +0,0 @@ -package job - -import ( - "github.com/golangci/dupl/suffixtree" - "github.com/golangci/dupl/syntax" -) - -func BuildTree(schan chan []*syntax.Node) (t *suffixtree.STree, d *[]*syntax.Node, done chan bool) { - t = suffixtree.New() - data := make([]*syntax.Node, 0, 100) - done = make(chan bool) - go func() { - for seq := range schan { - data = append(data, seq...) - for _, node := range seq { - t.Update(node) - } - } - done <- true - }() - return t, &data, done -} diff --git a/vendor/github.com/golangci/dupl/job/parse.go b/vendor/github.com/golangci/dupl/job/parse.go deleted file mode 100644 index eb9d7c625..000000000 --- a/vendor/github.com/golangci/dupl/job/parse.go +++ /dev/null @@ -1,36 +0,0 @@ -package job - -import ( - "log" - - "github.com/golangci/dupl/syntax" - "github.com/golangci/dupl/syntax/golang" -) - -func Parse(fchan chan string) chan []*syntax.Node { - - // parse AST - achan := make(chan *syntax.Node) - go func() { - for file := range fchan { - ast, err := golang.Parse(file) - if err != nil { - log.Println(err) - continue - } - achan <- ast - } - close(achan) - }() - - // serialize - schan := make(chan []*syntax.Node) - go func() { - for ast := range achan { - seq := syntax.Serialize(ast) - schan <- seq - } - close(schan) - }() - return schan -} diff --git a/vendor/github.com/golangci/dupl/main.go b/vendor/github.com/golangci/dupl/main.go deleted file mode 100644 index 3030a97ae..000000000 --- a/vendor/github.com/golangci/dupl/main.go +++ /dev/null @@ -1,148 +0,0 @@ -package dupl - -import ( - "flag" - "fmt" - "io/ioutil" - "os" - "path/filepath" - "sort" - - "github.com/golangci/dupl/job" - "github.com/golangci/dupl/printer" - "github.com/golangci/dupl/syntax" -) - -const defaultThreshold = 15 - -var ( - paths = []string{"."} - vendor = flag.Bool("dupl.vendor", false, "") - verbose = flag.Bool("dupl.verbose", false, "") - files = flag.Bool("dupl.files", false, "") - - html = flag.Bool("dupl.html", false, "") - plumbing = flag.Bool("dupl.plumbing", false, "") -) - -const ( - vendorDirPrefix = "vendor" + string(filepath.Separator) - vendorDirInPath = string(filepath.Separator) + vendorDirPrefix -) - -func init() { - flag.BoolVar(verbose, "dupl.v", false, "alias for -verbose") -} - -func Run(files []string, threshold int) ([]printer.Issue, error) { - fchan := make(chan string, 1024) - go func() { - for _, f := range files { - fchan <- f - } - close(fchan) - }() - schan := job.Parse(fchan) - t, data, done := job.BuildTree(schan) - <-done - - // finish stream - t.Update(&syntax.Node{Type: -1}) - - mchan := t.FindDuplOver(threshold) - duplChan := make(chan syntax.Match) - go func() { - for m := range mchan { - match := syntax.FindSyntaxUnits(*data, m, threshold) - if len(match.Frags) > 0 { - duplChan <- match - } - } - close(duplChan) - }() - - return makeIssues(duplChan) -} - -func makeIssues(duplChan <-chan syntax.Match) ([]printer.Issue, error) { - groups := make(map[string][][]*syntax.Node) - for dupl := range duplChan { - groups[dupl.Hash] = append(groups[dupl.Hash], dupl.Frags...) - } - keys := make([]string, 0, len(groups)) - for k := range groups { - keys = append(keys, k) - } - sort.Strings(keys) - - p := printer.NewPlumbing(ioutil.ReadFile) - - var issues []printer.Issue - for _, k := range keys { - uniq := unique(groups[k]) - if len(uniq) > 1 { - i, err := p.MakeIssues(uniq) - if err != nil { - return nil, err - } - issues = append(issues, i...) - } - } - - return issues, nil -} - -func unique(group [][]*syntax.Node) [][]*syntax.Node { - fileMap := make(map[string]map[int]struct{}) - - var newGroup [][]*syntax.Node - for _, seq := range group { - node := seq[0] - file, ok := fileMap[node.Filename] - if !ok { - file = make(map[int]struct{}) - fileMap[node.Filename] = file - } - if _, ok := file[node.Pos]; !ok { - file[node.Pos] = struct{}{} - newGroup = append(newGroup, seq) - } - } - return newGroup -} - -func usage() { - fmt.Fprintln(os.Stderr, `Usage: dupl [flags] [paths] - -Paths: - If the given path is a file, dupl will use it regardless of - the file extension. If it is a directory, it will recursively - search for *.go files in that directory. - - If no path is given, dupl will recursively search for *.go - files in the current directory. - -Flags: - -files - read file names from stdin one at each line - -html - output the results as HTML, including duplicate code fragments - -plumbing - plumbing (easy-to-parse) output for consumption by scripts or tools - -t, -threshold size - minimum token sequence size as a clone (default 15) - -vendor - check files in vendor directory - -v, -verbose - explain what is being done - -Examples: - dupl -t 100 - Search clones in the current directory of size at least - 100 tokens. - dupl $(find app/ -name '*_test.go') - Search for clones in tests in the app directory. - find app/ -name '*_test.go' |dupl -files - The same as above.`) - os.Exit(2) -} diff --git a/vendor/github.com/golangci/dupl/printer/html.go b/vendor/github.com/golangci/dupl/printer/html.go deleted file mode 100644 index 5ad9e25c7..000000000 --- a/vendor/github.com/golangci/dupl/printer/html.go +++ /dev/null @@ -1,120 +0,0 @@ -package printer - -import ( - "bytes" - "fmt" - "io" - "regexp" - "sort" - - "github.com/golangci/dupl/syntax" -) - -type html struct { - iota int - w io.Writer - ReadFile -} - -func NewHTML(w io.Writer, fread ReadFile) Printer { - return &html{w: w, ReadFile: fread} -} - -func (p *html) PrintHeader() error { - _, err := fmt.Fprint(p.w, ` - -Duplicates - -`) - return err -} - -func (p *html) PrintClones(dups [][]*syntax.Node) error { - p.iota++ - fmt.Fprintf(p.w, "

#%d found %d clones

\n", p.iota, len(dups)) - - clones := make([]clone, len(dups)) - for i, dup := range dups { - cnt := len(dup) - if cnt == 0 { - panic("zero length dup") - } - nstart := dup[0] - nend := dup[cnt-1] - - file, err := p.ReadFile(nstart.Filename) - if err != nil { - return err - } - - lineStart, _ := blockLines(file, nstart.Pos, nend.End) - cl := clone{filename: nstart.Filename, lineStart: lineStart} - start := findLineBeg(file, nstart.Pos) - content := append(toWhitespace(file[start:nstart.Pos]), file[nstart.Pos:nend.End]...) - cl.fragment = deindent(content) - clones[i] = cl - } - - sort.Sort(byNameAndLine(clones)) - for _, cl := range clones { - fmt.Fprintf(p.w, "

%s:%d

\n
%s
\n", cl.filename, cl.lineStart, cl.fragment) - } - return nil -} - -func (*html) PrintFooter() error { return nil } - -func findLineBeg(file []byte, index int) int { - for i := index; i >= 0; i-- { - if file[i] == '\n' { - return i + 1 - } - } - return 0 -} - -func toWhitespace(str []byte) []byte { - var out []byte - for _, c := range bytes.Runes(str) { - if c == '\t' { - out = append(out, '\t') - } else { - out = append(out, ' ') - } - } - return out -} - -func deindent(block []byte) []byte { - const maxVal = 99 - min := maxVal - re := regexp.MustCompile(`(^|\n)(\t*)\S`) - for _, line := range re.FindAllSubmatch(block, -1) { - indent := line[2] - if len(indent) < min { - min = len(indent) - } - } - if min == 0 || min == maxVal { - return block - } - block = block[min:] -Loop: - for i := 0; i < len(block); i++ { - if block[i] == '\n' && i != len(block)-1 { - for j := 0; j < min; j++ { - if block[i+j+1] != '\t' { - continue Loop - } - } - block = append(block[:i+1], block[i+1+min:]...) - } - } - return block -} diff --git a/vendor/github.com/golangci/dupl/printer/plumbing.go b/vendor/github.com/golangci/dupl/printer/plumbing.go deleted file mode 100644 index cf39d01b7..000000000 --- a/vendor/github.com/golangci/dupl/printer/plumbing.go +++ /dev/null @@ -1,50 +0,0 @@ -package printer - -import ( - "sort" - - "github.com/golangci/dupl/syntax" -) - -type Clone clone - -func (c Clone) Filename() string { - return c.filename -} - -func (c Clone) LineStart() int { - return c.lineStart -} - -func (c Clone) LineEnd() int { - return c.lineEnd -} - -type Issue struct { - From, To Clone -} - -type Plumbing struct { - ReadFile -} - -func NewPlumbing(fread ReadFile) *Plumbing { - return &Plumbing{fread} -} - -func (p *Plumbing) MakeIssues(dups [][]*syntax.Node) ([]Issue, error) { - clones, err := prepareClonesInfo(p.ReadFile, dups) - if err != nil { - return nil, err - } - sort.Sort(byNameAndLine(clones)) - var issues []Issue - for i, cl := range clones { - nextCl := clones[(i+1)%len(clones)] - issues = append(issues, Issue{ - From: Clone(cl), - To: Clone(nextCl), - }) - } - return issues, nil -} diff --git a/vendor/github.com/golangci/dupl/printer/printer.go b/vendor/github.com/golangci/dupl/printer/printer.go deleted file mode 100644 index 385217bfc..000000000 --- a/vendor/github.com/golangci/dupl/printer/printer.go +++ /dev/null @@ -1,11 +0,0 @@ -package printer - -import "github.com/golangci/dupl/syntax" - -type ReadFile func(filename string) ([]byte, error) - -type Printer interface { - PrintHeader() error - PrintClones(dups [][]*syntax.Node) error - PrintFooter() error -} diff --git a/vendor/github.com/golangci/dupl/printer/text.go b/vendor/github.com/golangci/dupl/printer/text.go deleted file mode 100644 index 8359fa76f..000000000 --- a/vendor/github.com/golangci/dupl/printer/text.go +++ /dev/null @@ -1,100 +0,0 @@ -package printer - -import ( - "fmt" - "io" - "sort" - - "github.com/golangci/dupl/syntax" -) - -type text struct { - cnt int - w io.Writer - ReadFile -} - -func NewText(w io.Writer, fread ReadFile) Printer { - return &text{w: w, ReadFile: fread} -} - -func (p *text) PrintHeader() error { return nil } - -func (p *text) PrintClones(dups [][]*syntax.Node) error { - p.cnt++ - fmt.Fprintf(p.w, "found %d clones:\n", len(dups)) - clones, err := prepareClonesInfo(p.ReadFile, dups) - if err != nil { - return err - } - sort.Sort(byNameAndLine(clones)) - for _, cl := range clones { - fmt.Fprintf(p.w, " %s:%d,%d\n", cl.filename, cl.lineStart, cl.lineEnd) - } - return nil -} - -func (p *text) PrintFooter() error { - _, err := fmt.Fprintf(p.w, "\nFound total %d clone groups.\n", p.cnt) - return err -} - -func prepareClonesInfo(fread ReadFile, dups [][]*syntax.Node) ([]clone, error) { - clones := make([]clone, len(dups)) - for i, dup := range dups { - cnt := len(dup) - if cnt == 0 { - panic("zero length dup") - } - nstart := dup[0] - nend := dup[cnt-1] - - file, err := fread(nstart.Filename) - if err != nil { - return nil, err - } - - cl := clone{filename: nstart.Filename} - cl.lineStart, cl.lineEnd = blockLines(file, nstart.Pos, nend.End) - clones[i] = cl - } - return clones, nil -} - -func blockLines(file []byte, from, to int) (int, int) { - line := 1 - lineStart, lineEnd := 0, 0 - for offset, b := range file { - if b == '\n' { - line++ - } - if offset == from { - lineStart = line - } - if offset == to-1 { - lineEnd = line - break - } - } - return lineStart, lineEnd -} - -type clone struct { - filename string - lineStart int - lineEnd int - fragment []byte -} - -type byNameAndLine []clone - -func (c byNameAndLine) Len() int { return len(c) } - -func (c byNameAndLine) Swap(i, j int) { c[i], c[j] = c[j], c[i] } - -func (c byNameAndLine) Less(i, j int) bool { - if c[i].filename == c[j].filename { - return c[i].lineStart < c[j].lineStart - } - return c[i].filename < c[j].filename -} diff --git a/vendor/github.com/golangci/dupl/suffixtree/dupl.go b/vendor/github.com/golangci/dupl/suffixtree/dupl.go deleted file mode 100644 index ab145b4f3..000000000 --- a/vendor/github.com/golangci/dupl/suffixtree/dupl.go +++ /dev/null @@ -1,98 +0,0 @@ -package suffixtree - -import "sort" - -type Match struct { - Ps []Pos - Len Pos -} - -type posList struct { - positions []Pos -} - -func newPosList() *posList { - return &posList{make([]Pos, 0)} -} - -func (p *posList) append(p2 *posList) { - p.positions = append(p.positions, p2.positions...) -} - -func (p *posList) add(pos Pos) { - p.positions = append(p.positions, pos) -} - -type contextList struct { - lists map[int]*posList -} - -func newContextList() *contextList { - return &contextList{make(map[int]*posList)} -} - -func (c *contextList) getAll() []Pos { - keys := make([]int, 0, len(c.lists)) - for k := range c.lists { - keys = append(keys, k) - } - sort.Ints(keys) - var ps []Pos - for _, k := range keys { - ps = append(ps, c.lists[k].positions...) - } - return ps -} - -func (c *contextList) append(c2 *contextList) { - for lc, pl := range c2.lists { - if _, ok := c.lists[lc]; ok { - c.lists[lc].append(pl) - } else { - c.lists[lc] = pl - } - } -} - -// FindDuplOver find pairs of maximal duplicities over a threshold -// length. -func (t *STree) FindDuplOver(threshold int) <-chan Match { - auxTran := newTran(0, 0, t.root) - ch := make(chan Match) - go func() { - walkTrans(auxTran, 0, threshold, ch) - close(ch) - }() - return ch -} - -func walkTrans(parent *tran, length, threshold int, ch chan<- Match) *contextList { - s := parent.state - - cl := newContextList() - - if len(s.trans) == 0 { - pl := newPosList() - start := parent.end + 1 - Pos(length) - pl.add(start) - ch := 0 - if start > 0 { - ch = s.tree.data[start-1].Val() - } - cl.lists[ch] = pl - return cl - } - - for _, t := range s.trans { - ln := length + t.len() - cl2 := walkTrans(t, ln, threshold, ch) - if ln >= threshold { - cl.append(cl2) - } - } - if length >= threshold && len(cl.lists) > 1 { - m := Match{cl.getAll(), Pos(length)} - ch <- m - } - return cl -} diff --git a/vendor/github.com/golangci/dupl/suffixtree/suffixtree.go b/vendor/github.com/golangci/dupl/suffixtree/suffixtree.go deleted file mode 100644 index 738015025..000000000 --- a/vendor/github.com/golangci/dupl/suffixtree/suffixtree.go +++ /dev/null @@ -1,216 +0,0 @@ -package suffixtree - -import ( - "bytes" - "fmt" - "math" - "strings" -) - -const infinity = math.MaxInt32 - -// Pos denotes position in data slice. -type Pos int32 - -type Token interface { - Val() int -} - -// STree is a struct representing a suffix tree. -type STree struct { - data []Token - root *state - auxState *state // auxiliary state - - // active point - s *state - start, end Pos -} - -// New creates new suffix tree. -func New() *STree { - t := new(STree) - t.data = make([]Token, 0, 50) - t.root = newState(t) - t.auxState = newState(t) - t.root.linkState = t.auxState - t.s = t.root - return t -} - -// Update refreshes the suffix tree to by new data. -func (t *STree) Update(data ...Token) { - t.data = append(t.data, data...) - for _ = range data { - t.update() - t.s, t.start = t.canonize(t.s, t.start, t.end) - t.end++ - } -} - -// update transforms suffix tree T(n) to T(n+1). -func (t *STree) update() { - oldr := t.root - - // (s, (start, end)) is the canonical reference pair for the active point - s := t.s - start, end := t.start, t.end - var r *state - for { - var endPoint bool - r, endPoint = t.testAndSplit(s, start, end-1) - if endPoint { - break - } - r.fork(end) - if oldr != t.root { - oldr.linkState = r - } - oldr = r - s, start = t.canonize(s.linkState, start, end-1) - } - if oldr != t.root { - oldr.linkState = r - } - - // update active point - t.s = s - t.start = start -} - -// testAndSplit tests whether a state with canonical ref. pair -// (s, (start, end)) is the end point, that is, a state that have -// a c-transition. If not, then state (exs, (start, end)) is made -// explicit (if not already so). -func (t *STree) testAndSplit(s *state, start, end Pos) (exs *state, endPoint bool) { - c := t.data[t.end] - if start <= end { - tr := s.findTran(t.data[start]) - splitPoint := tr.start + end - start + 1 - if t.data[splitPoint].Val() == c.Val() { - return s, true - } - // make the (s, (start, end)) state explicit - newSt := newState(s.tree) - newSt.addTran(splitPoint, tr.end, tr.state) - tr.end = splitPoint - 1 - tr.state = newSt - return newSt, false - } - if s == t.auxState || s.findTran(c) != nil { - return s, true - } - return s, false -} - -// canonize returns updated state and start position for ref. pair -// (s, (start, end)) of state r so the new ref. pair is canonical, -// that is, referenced from the closest explicit ancestor of r. -func (t *STree) canonize(s *state, start, end Pos) (*state, Pos) { - if s == t.auxState { - s, start = t.root, start+1 - } - if start > end { - return s, start - } - - var tr *tran - for { - if start <= end { - tr = s.findTran(t.data[start]) - if tr == nil { - panic(fmt.Sprintf("there should be some transition for '%d' at %d", - t.data[start].Val(), start)) - } - } - if tr.end-tr.start > end-start { - break - } - start += tr.end - tr.start + 1 - s = tr.state - } - if s == nil { - panic("there should always be some suffix link resolution") - } - return s, start -} - -func (t *STree) At(p Pos) Token { - if p < 0 || p >= Pos(len(t.data)) { - panic("position out of bounds") - } - return t.data[p] -} - -func (t *STree) String() string { - buf := new(bytes.Buffer) - printState(buf, t.root, 0) - return buf.String() -} - -func printState(buf *bytes.Buffer, s *state, ident int) { - for _, tr := range s.trans { - fmt.Fprint(buf, strings.Repeat(" ", ident)) - fmt.Fprintf(buf, "* (%d, %d)\n", tr.start, tr.ActEnd()) - printState(buf, tr.state, ident+1) - } -} - -// state is an explicit state of the suffix tree. -type state struct { - tree *STree - trans []*tran - linkState *state -} - -func newState(t *STree) *state { - return &state{ - tree: t, - trans: make([]*tran, 0), - linkState: nil, - } -} - -func (s *state) addTran(start, end Pos, r *state) { - s.trans = append(s.trans, newTran(start, end, r)) -} - -// fork creates a new branch from the state s. -func (s *state) fork(i Pos) *state { - r := newState(s.tree) - s.addTran(i, infinity, r) - return r -} - -// findTran finds c-transition. -func (s *state) findTran(c Token) *tran { - for _, tran := range s.trans { - if s.tree.data[tran.start].Val() == c.Val() { - return tran - } - } - return nil -} - -// tran represents a state's transition. -type tran struct { - start, end Pos - state *state -} - -func newTran(start, end Pos, s *state) *tran { - return &tran{start, end, s} -} - -func (t *tran) len() int { - return int(t.end - t.start + 1) -} - -// ActEnd returns actual end position as consistent with -// the actual length of the data in the STree. -func (t *tran) ActEnd() Pos { - if t.end == infinity { - return Pos(len(t.state.tree.data)) - 1 - } - return t.end -} diff --git a/vendor/github.com/golangci/dupl/syntax/golang/golang.go b/vendor/github.com/golangci/dupl/syntax/golang/golang.go deleted file mode 100644 index a0b1e77e1..000000000 --- a/vendor/github.com/golangci/dupl/syntax/golang/golang.go +++ /dev/null @@ -1,392 +0,0 @@ -package golang - -import ( - "go/ast" - "go/parser" - "go/token" - - "github.com/golangci/dupl/syntax" -) - -const ( - BadNode = iota - File - ArrayType - AssignStmt - BasicLit - BinaryExpr - BlockStmt - BranchStmt - CallExpr - CaseClause - ChanType - CommClause - CompositeLit - DeclStmt - DeferStmt - Ellipsis - EmptyStmt - ExprStmt - Field - FieldList - ForStmt - FuncDecl - FuncLit - FuncType - GenDecl - GoStmt - Ident - IfStmt - IncDecStmt - IndexExpr - InterfaceType - KeyValueExpr - LabeledStmt - MapType - ParenExpr - RangeStmt - ReturnStmt - SelectStmt - SelectorExpr - SendStmt - SliceExpr - StarExpr - StructType - SwitchStmt - TypeAssertExpr - TypeSpec - TypeSwitchStmt - UnaryExpr - ValueSpec -) - -// Parse the given file and return uniform syntax tree. -func Parse(filename string) (*syntax.Node, error) { - fset := token.NewFileSet() - file, err := parser.ParseFile(fset, filename, nil, 0) - if err != nil { - return nil, err - } - t := &transformer{ - fileset: fset, - filename: filename, - } - return t.trans(file), nil -} - -type transformer struct { - fileset *token.FileSet - filename string -} - -// trans transforms given golang AST to uniform tree structure. -func (t *transformer) trans(node ast.Node) (o *syntax.Node) { - o = syntax.NewNode() - o.Filename = t.filename - st, end := node.Pos(), node.End() - o.Pos, o.End = t.fileset.File(st).Offset(st), t.fileset.File(end).Offset(end) - - switch n := node.(type) { - case *ast.ArrayType: - o.Type = ArrayType - if n.Len != nil { - o.AddChildren(t.trans(n.Len)) - } - o.AddChildren(t.trans(n.Elt)) - - case *ast.AssignStmt: - o.Type = AssignStmt - for _, e := range n.Rhs { - o.AddChildren(t.trans(e)) - } - - for _, e := range n.Lhs { - o.AddChildren(t.trans(e)) - } - - case *ast.BasicLit: - o.Type = BasicLit - - case *ast.BinaryExpr: - o.Type = BinaryExpr - o.AddChildren(t.trans(n.X), t.trans(n.Y)) - - case *ast.BlockStmt: - o.Type = BlockStmt - for _, stmt := range n.List { - o.AddChildren(t.trans(stmt)) - } - - case *ast.BranchStmt: - o.Type = BranchStmt - if n.Label != nil { - o.AddChildren(t.trans(n.Label)) - } - - case *ast.CallExpr: - o.Type = CallExpr - o.AddChildren(t.trans(n.Fun)) - for _, arg := range n.Args { - o.AddChildren(t.trans(arg)) - } - - case *ast.CaseClause: - o.Type = CaseClause - for _, e := range n.List { - o.AddChildren(t.trans(e)) - } - for _, stmt := range n.Body { - o.AddChildren(t.trans(stmt)) - } - - case *ast.ChanType: - o.Type = ChanType - o.AddChildren(t.trans(n.Value)) - - case *ast.CommClause: - o.Type = CommClause - if n.Comm != nil { - o.AddChildren(t.trans(n.Comm)) - } - for _, stmt := range n.Body { - o.AddChildren(t.trans(stmt)) - } - - case *ast.CompositeLit: - o.Type = CompositeLit - if n.Type != nil { - o.AddChildren(t.trans(n.Type)) - } - for _, e := range n.Elts { - o.AddChildren(t.trans(e)) - } - - case *ast.DeclStmt: - o.Type = DeclStmt - o.AddChildren(t.trans(n.Decl)) - - case *ast.DeferStmt: - o.Type = DeferStmt - o.AddChildren(t.trans(n.Call)) - - case *ast.Ellipsis: - o.Type = Ellipsis - if n.Elt != nil { - o.AddChildren(t.trans(n.Elt)) - } - - case *ast.EmptyStmt: - o.Type = EmptyStmt - - case *ast.ExprStmt: - o.Type = ExprStmt - o.AddChildren(t.trans(n.X)) - - case *ast.Field: - o.Type = Field - for _, name := range n.Names { - o.AddChildren(t.trans(name)) - } - o.AddChildren(t.trans(n.Type)) - - case *ast.FieldList: - o.Type = FieldList - for _, field := range n.List { - o.AddChildren(t.trans(field)) - } - - case *ast.File: - o.Type = File - for _, decl := range n.Decls { - if genDecl, ok := decl.(*ast.GenDecl); ok && genDecl.Tok == token.IMPORT { - // skip import declarations - continue - } - o.AddChildren(t.trans(decl)) - } - - case *ast.ForStmt: - o.Type = ForStmt - if n.Init != nil { - o.AddChildren(t.trans(n.Init)) - } - if n.Cond != nil { - o.AddChildren(t.trans(n.Cond)) - } - if n.Post != nil { - o.AddChildren(t.trans(n.Post)) - } - o.AddChildren(t.trans(n.Body)) - - case *ast.FuncDecl: - o.Type = FuncDecl - if n.Recv != nil { - o.AddChildren(t.trans(n.Recv)) - } - o.AddChildren(t.trans(n.Name), t.trans(n.Type)) - if n.Body != nil { - o.AddChildren(t.trans(n.Body)) - } - - case *ast.FuncLit: - o.Type = FuncLit - o.AddChildren(t.trans(n.Type), t.trans(n.Body)) - - case *ast.FuncType: - o.Type = FuncType - o.AddChildren(t.trans(n.Params)) - if n.Results != nil { - o.AddChildren(t.trans(n.Results)) - } - - case *ast.GenDecl: - o.Type = GenDecl - for _, spec := range n.Specs { - o.AddChildren(t.trans(spec)) - } - - case *ast.GoStmt: - o.Type = GoStmt - o.AddChildren(t.trans(n.Call)) - - case *ast.Ident: - o.Type = Ident - - case *ast.IfStmt: - o.Type = IfStmt - if n.Init != nil { - o.AddChildren(t.trans(n.Init)) - } - o.AddChildren(t.trans(n.Cond), t.trans(n.Body)) - if n.Else != nil { - o.AddChildren(t.trans(n.Else)) - } - - case *ast.IncDecStmt: - o.Type = IncDecStmt - o.AddChildren(t.trans(n.X)) - - case *ast.IndexExpr: - o.Type = IndexExpr - o.AddChildren(t.trans(n.X), t.trans(n.Index)) - - case *ast.InterfaceType: - o.Type = InterfaceType - o.AddChildren(t.trans(n.Methods)) - - case *ast.KeyValueExpr: - o.Type = KeyValueExpr - o.AddChildren(t.trans(n.Key), t.trans(n.Value)) - - case *ast.LabeledStmt: - o.Type = LabeledStmt - o.AddChildren(t.trans(n.Label), t.trans(n.Stmt)) - - case *ast.MapType: - o.Type = MapType - o.AddChildren(t.trans(n.Key), t.trans(n.Value)) - - case *ast.ParenExpr: - o.Type = ParenExpr - o.AddChildren(t.trans(n.X)) - - case *ast.RangeStmt: - o.Type = RangeStmt - if n.Key != nil { - o.AddChildren(t.trans(n.Key)) - } - if n.Value != nil { - o.AddChildren(t.trans(n.Value)) - } - o.AddChildren(t.trans(n.X), t.trans(n.Body)) - - case *ast.ReturnStmt: - o.Type = ReturnStmt - for _, e := range n.Results { - o.AddChildren(t.trans(e)) - } - - case *ast.SelectStmt: - o.Type = SelectStmt - o.AddChildren(t.trans(n.Body)) - - case *ast.SelectorExpr: - o.Type = SelectorExpr - o.AddChildren(t.trans(n.X), t.trans(n.Sel)) - - case *ast.SendStmt: - o.Type = SendStmt - o.AddChildren(t.trans(n.Chan), t.trans(n.Value)) - - case *ast.SliceExpr: - o.Type = SliceExpr - o.AddChildren(t.trans(n.X)) - if n.Low != nil { - o.AddChildren(t.trans(n.Low)) - } - if n.High != nil { - o.AddChildren(t.trans(n.High)) - } - if n.Max != nil { - o.AddChildren(t.trans(n.Max)) - } - - case *ast.StarExpr: - o.Type = StarExpr - o.AddChildren(t.trans(n.X)) - - case *ast.StructType: - o.Type = StructType - o.AddChildren(t.trans(n.Fields)) - - case *ast.SwitchStmt: - o.Type = SwitchStmt - if n.Init != nil { - o.AddChildren(t.trans(n.Init)) - } - if n.Tag != nil { - o.AddChildren(t.trans(n.Tag)) - } - o.AddChildren(t.trans(n.Body)) - - case *ast.TypeAssertExpr: - o.Type = TypeAssertExpr - o.AddChildren(t.trans(n.X)) - if n.Type != nil { - o.AddChildren(t.trans(n.Type)) - } - - case *ast.TypeSpec: - o.Type = TypeSpec - o.AddChildren(t.trans(n.Name), t.trans(n.Type)) - - case *ast.TypeSwitchStmt: - o.Type = TypeSwitchStmt - if n.Init != nil { - o.AddChildren(t.trans(n.Init)) - } - o.AddChildren(t.trans(n.Assign), t.trans(n.Body)) - - case *ast.UnaryExpr: - o.Type = UnaryExpr - o.AddChildren(t.trans(n.X)) - - case *ast.ValueSpec: - o.Type = ValueSpec - for _, name := range n.Names { - o.AddChildren(t.trans(name)) - } - if n.Type != nil { - o.AddChildren(t.trans(n.Type)) - } - for _, val := range n.Values { - o.AddChildren(t.trans(val)) - } - - default: - o.Type = BadNode - - } - - return o -} diff --git a/vendor/github.com/golangci/dupl/syntax/syntax.go b/vendor/github.com/golangci/dupl/syntax/syntax.go deleted file mode 100644 index e2c750afd..000000000 --- a/vendor/github.com/golangci/dupl/syntax/syntax.go +++ /dev/null @@ -1,175 +0,0 @@ -package syntax - -import ( - "crypto/sha1" - - "github.com/golangci/dupl/suffixtree" -) - -type Node struct { - Type int - Filename string - Pos, End int - Children []*Node - Owns int -} - -func NewNode() *Node { - return &Node{} -} - -func (n *Node) AddChildren(children ...*Node) { - n.Children = append(n.Children, children...) -} - -func (n *Node) Val() int { - return n.Type -} - -type Match struct { - Hash string - Frags [][]*Node -} - -func Serialize(n *Node) []*Node { - stream := make([]*Node, 0, 10) - serial(n, &stream) - return stream -} - -func serial(n *Node, stream *[]*Node) int { - *stream = append(*stream, n) - var count int - for _, child := range n.Children { - count += serial(child, stream) - } - n.Owns = count - return count + 1 -} - -// FindSyntaxUnits finds all complete syntax units in the match group and returns them -// with the corresponding hash. -func FindSyntaxUnits(data []*Node, m suffixtree.Match, threshold int) Match { - if len(m.Ps) == 0 { - return Match{} - } - firstSeq := data[m.Ps[0] : m.Ps[0]+m.Len] - indexes := getUnitsIndexes(firstSeq, threshold) - - // TODO: is this really working? - indexCnt := len(indexes) - if indexCnt > 0 { - lasti := indexes[indexCnt-1] - firstn := firstSeq[lasti] - for i := 1; i < len(m.Ps); i++ { - n := data[int(m.Ps[i])+lasti] - if firstn.Owns != n.Owns { - indexes = indexes[:indexCnt-1] - break - } - } - } - if len(indexes) == 0 || isCyclic(indexes, firstSeq) || spansMultipleFiles(indexes, firstSeq) { - return Match{} - } - - match := Match{Frags: make([][]*Node, len(m.Ps))} - for i, pos := range m.Ps { - match.Frags[i] = make([]*Node, len(indexes)) - for j, index := range indexes { - match.Frags[i][j] = data[int(pos)+index] - } - } - - lastIndex := indexes[len(indexes)-1] - match.Hash = hashSeq(firstSeq[indexes[0] : lastIndex+firstSeq[lastIndex].Owns]) - return match -} - -func getUnitsIndexes(nodeSeq []*Node, threshold int) []int { - var indexes []int - var split bool - for i := 0; i < len(nodeSeq); { - n := nodeSeq[i] - switch { - case n.Owns >= len(nodeSeq)-i: - // not complete syntax unit - i++ - split = true - continue - case n.Owns+1 < threshold: - split = true - default: - if split { - indexes = indexes[:0] - split = false - } - indexes = append(indexes, i) - } - i += n.Owns + 1 - } - return indexes -} - -// isCyclic finds out whether there is a repetive pattern in the found clone. If positive, -// it return false to point out that the clone would be redundant. -func isCyclic(indexes []int, nodes []*Node) bool { - cnt := len(indexes) - if cnt <= 1 { - return false - } - - alts := make(map[int]bool) - for i := 1; i <= cnt/2; i++ { - if cnt%i == 0 { - alts[i] = true - } - } - - for i := 0; i < indexes[cnt/2]; i++ { - nstart := nodes[i+indexes[0]] - AltLoop: - for alt := range alts { - for j := alt; j < cnt; j += alt { - index := i + indexes[j] - if index < len(nodes) { - nalt := nodes[index] - if nstart.Owns == nalt.Owns && nstart.Type == nalt.Type { - continue - } - } else if i >= indexes[alt] { - return true - } - delete(alts, alt) - continue AltLoop - } - } - if len(alts) == 0 { - return false - } - } - return true -} - -func spansMultipleFiles(indexes []int, nodes []*Node) bool { - if len(indexes) < 2 { - return false - } - f := nodes[indexes[0]].Filename - for i := 1; i < len(indexes); i++ { - if nodes[indexes[i]].Filename != f { - return true - } - } - return false -} - -func hashSeq(nodes []*Node) string { - h := sha1.New() - bytes := make([]byte, len(nodes)) - for i, node := range nodes { - bytes[i] = byte(node.Type) - } - h.Write(bytes) - return string(h.Sum(nil)) -} diff --git a/vendor/github.com/golangci/go-misc/LICENSE b/vendor/github.com/golangci/go-misc/LICENSE deleted file mode 100644 index cc42dd45d..000000000 --- a/vendor/github.com/golangci/go-misc/LICENSE +++ /dev/null @@ -1,27 +0,0 @@ -Copyright (c) 2012 Rémy Oudompheng. All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above -copyright notice, this list of conditions and the following disclaimer -in the documentation and/or other materials provided with the -distribution. - * The name of Rémy Oudompheng may not be used to endorse or promote products derived from -this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - diff --git a/vendor/github.com/golangci/go-misc/deadcode/README.md b/vendor/github.com/golangci/go-misc/deadcode/README.md deleted file mode 100644 index 550423128..000000000 --- a/vendor/github.com/golangci/go-misc/deadcode/README.md +++ /dev/null @@ -1,18 +0,0 @@ -# deadcode - -`deadcode` is a very simple utility which detects unused declarations in a Go package. - -## Usage -``` -deadcode [-test] [packages] - - -test Include test files - packages A list of packages using the same conventions as the go tool -``` - -## Limitations - -* Self-referential unused code is not currently reported -* A single package can be tested at a time -* Unused methods are not reported - diff --git a/vendor/github.com/golangci/go-misc/deadcode/deadcode.go b/vendor/github.com/golangci/go-misc/deadcode/deadcode.go deleted file mode 100644 index c154a576b..000000000 --- a/vendor/github.com/golangci/go-misc/deadcode/deadcode.go +++ /dev/null @@ -1,138 +0,0 @@ -package deadcode - -import ( - "fmt" - "go/ast" - "go/token" - "go/types" - "os" - "path/filepath" - "sort" - "strings" - - "golang.org/x/tools/go/loader" -) - -var exitCode int - -var ( - withTestFiles bool -) - -type Issue struct { - Pos token.Position - UnusedIdentName string -} - -func Run(program *loader.Program) ([]Issue, error) { - ctx := &Context{ - program: program, - } - report := ctx.Process() - var issues []Issue - for _, obj := range report { - issues = append(issues, Issue{ - Pos: program.Fset.Position(obj.Pos()), - UnusedIdentName: obj.Name(), - }) - } - - return issues, nil -} - -func fatalf(format string, args ...interface{}) { - panic(fmt.Errorf(format, args...)) -} - -type Context struct { - cwd string - withTests bool - - program *loader.Program -} - -// pos resolves a compact position encoding into a verbose one -func (ctx *Context) pos(pos token.Pos) token.Position { - if ctx.cwd == "" { - ctx.cwd, _ = os.Getwd() - } - p := ctx.program.Fset.Position(pos) - f, err := filepath.Rel(ctx.cwd, p.Filename) - if err == nil { - p.Filename = f - } - return p -} - -// error formats the error to standard error, adding program -// identification and a newline -func (ctx *Context) errorf(pos token.Pos, format string, args ...interface{}) { - p := ctx.pos(pos) - fmt.Fprintf(os.Stderr, p.String()+": "+format+"\n", args...) - exitCode = 2 -} - -func (ctx *Context) Load(args ...string) { - // TODO -} - -func (ctx *Context) Process() []types.Object { - prog := ctx.program - var allUnused []types.Object - for _, pkg := range prog.Imported { - unused := ctx.doPackage(prog, pkg) - allUnused = append(allUnused, unused...) - } - for _, pkg := range prog.Created { - unused := ctx.doPackage(prog, pkg) - allUnused = append(allUnused, unused...) - } - sort.Sort(objects(allUnused)) - return allUnused -} - -func isTestFuncByName(name string) bool { - return strings.HasPrefix(name, "Test") || - strings.HasPrefix(name, "Benchmark") || - strings.HasPrefix(name, "Fuzz") || - strings.HasPrefix(name, "Example") -} - -func (ctx *Context) doPackage(prog *loader.Program, pkg *loader.PackageInfo) []types.Object { - used := make(map[types.Object]bool) - for _, file := range pkg.Files { - ast.Inspect(file, func(n ast.Node) bool { - id, ok := n.(*ast.Ident) - if !ok { - return true - } - obj := pkg.Info.Uses[id] - if obj != nil { - used[obj] = true - } - return false - }) - } - - global := pkg.Pkg.Scope() - var unused []types.Object - for _, name := range global.Names() { - if pkg.Pkg.Name() == "main" && name == "main" { - continue - } - obj := global.Lookup(name) - _, isSig := obj.Type().(*types.Signature) - pos := ctx.pos(obj.Pos()) - isTestMethod := isSig && isTestFuncByName(obj.Name()) && strings.HasSuffix(pos.Filename, "_test.go") - if !used[obj] && ((pkg.Pkg.Name() == "main" && !isTestMethod) || !ast.IsExported(name)) { - unused = append(unused, obj) - } - } - return unused -} - -type objects []types.Object - -func (s objects) Len() int { return len(s) } -func (s objects) Swap(i, j int) { s[i], s[j] = s[j], s[i] } -func (s objects) Less(i, j int) bool { return s[i].Pos() < s[j].Pos() } diff --git a/vendor/github.com/golangci/gofmt/gofmt/LICENSE b/vendor/github.com/golangci/gofmt/gofmt/LICENSE deleted file mode 100644 index 6a66aea5e..000000000 --- a/vendor/github.com/golangci/gofmt/gofmt/LICENSE +++ /dev/null @@ -1,27 +0,0 @@ -Copyright (c) 2009 The Go Authors. All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above -copyright notice, this list of conditions and the following disclaimer -in the documentation and/or other materials provided with the -distribution. - * Neither the name of Google Inc. nor the names of its -contributors may be used to endorse or promote products derived from -this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/golangci/gofmt/gofmt/doc.go b/vendor/github.com/golangci/gofmt/gofmt/doc.go deleted file mode 100644 index d0a458021..000000000 --- a/vendor/github.com/golangci/gofmt/gofmt/doc.go +++ /dev/null @@ -1,106 +0,0 @@ -// Copyright 2009 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -/* -Gofmt formats Go programs. -It uses tabs for indentation and blanks for alignment. -Alignment assumes that an editor is using a fixed-width font. - -Without an explicit path, it processes the standard input. Given a file, -it operates on that file; given a directory, it operates on all .go files in -that directory, recursively. (Files starting with a period are ignored.) -By default, gofmt prints the reformatted sources to standard output. - -Usage: - - gofmt [flags] [path ...] - -The flags are: - - -d - Do not print reformatted sources to standard output. - If a file's formatting is different than gofmt's, print diffs - to standard output. - -e - Print all (including spurious) errors. - -l - Do not print reformatted sources to standard output. - If a file's formatting is different from gofmt's, print its name - to standard output. - -r rule - Apply the rewrite rule to the source before reformatting. - -s - Try to simplify code (after applying the rewrite rule, if any). - -w - Do not print reformatted sources to standard output. - If a file's formatting is different from gofmt's, overwrite it - with gofmt's version. If an error occurred during overwriting, - the original file is restored from an automatic backup. - -Debugging support: - - -cpuprofile filename - Write cpu profile to the specified file. - -The rewrite rule specified with the -r flag must be a string of the form: - - pattern -> replacement - -Both pattern and replacement must be valid Go expressions. -In the pattern, single-character lowercase identifiers serve as -wildcards matching arbitrary sub-expressions; those expressions -will be substituted for the same identifiers in the replacement. - -When gofmt reads from standard input, it accepts either a full Go program -or a program fragment. A program fragment must be a syntactically -valid declaration list, statement list, or expression. When formatting -such a fragment, gofmt preserves leading indentation as well as leading -and trailing spaces, so that individual sections of a Go program can be -formatted by piping them through gofmt. - -# Examples - -To check files for unnecessary parentheses: - - gofmt -r '(a) -> a' -l *.go - -To remove the parentheses: - - gofmt -r '(a) -> a' -w *.go - -To convert the package tree from explicit slice upper bounds to implicit ones: - - gofmt -r 'α[β:len(α)] -> α[β:]' -w $GOROOT/src - -# The simplify command - -When invoked with -s gofmt will make the following source transformations where possible. - - An array, slice, or map composite literal of the form: - []T{T{}, T{}} - will be simplified to: - []T{{}, {}} - - A slice expression of the form: - s[a:len(s)] - will be simplified to: - s[a:] - - A range of the form: - for x, _ = range v {...} - will be simplified to: - for x = range v {...} - - A range of the form: - for _ = range v {...} - will be simplified to: - for range v {...} - -This may result in changes that are incompatible with earlier versions of Go. -*/ -package gofmt - -// BUG(rsc): The implementation of -r is a bit slow. -// BUG(gri): If -w fails, the restored original file may not have some of the -// original file attributes. diff --git a/vendor/github.com/golangci/gofmt/gofmt/gofmt.go b/vendor/github.com/golangci/gofmt/gofmt/gofmt.go deleted file mode 100644 index be046f34c..000000000 --- a/vendor/github.com/golangci/gofmt/gofmt/gofmt.go +++ /dev/null @@ -1,504 +0,0 @@ -// Copyright 2009 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package gofmt - -import ( - "bytes" - "context" - "flag" - "fmt" - "go/ast" - "go/parser" - "go/printer" - "go/scanner" - "go/token" - "io" - "io/fs" - "os" - "path/filepath" - "runtime" - "runtime/pprof" - "strings" - - "github.com/golangci/gofmt/gofmt/internal/diff" - "golang.org/x/sync/semaphore" -) - -var ( - // main operation modes - list = flag.Bool("gofmt.l", false, "list files whose formatting differs from gofmt's") - write = flag.Bool("gofmt.w", false, "write result to (source) file instead of stdout") - rewriteRule = flag.String("gofmt.r", "", "rewrite rule (e.g., 'a[b:len(a)] -> a[b:]')") - simplifyAST = flag.Bool("gofmt.s", false, "simplify code") - doDiff = flag.Bool("gofmt.d", false, "display diffs instead of rewriting files") - allErrors = flag.Bool("gofmt.e", false, "report all errors (not just the first 10 on different lines)") - - // debugging - cpuprofile = flag.String("gofmt.cpuprofile", "", "write cpu profile to this file") -) - -// Keep these in sync with go/format/format.go. -const ( - tabWidth = 8 - printerMode = printer.UseSpaces | printer.TabIndent | printerNormalizeNumbers - - // printerNormalizeNumbers means to canonicalize number literal prefixes - // and exponents while printing. See https://golang.org/doc/go1.13#gofmt. - // - // This value is defined in go/printer specifically for go/format and cmd/gofmt. - printerNormalizeNumbers = 1 << 30 -) - -// fdSem guards the number of concurrently-open file descriptors. -// -// For now, this is arbitrarily set to 200, based on the observation that many -// platforms default to a kernel limit of 256. Ideally, perhaps we should derive -// it from rlimit on platforms that support that system call. -// -// File descriptors opened from outside of this package are not tracked, -// so this limit may be approximate. -var fdSem = make(chan bool, 200) - -var ( - rewrite func(*token.FileSet, *ast.File) *ast.File - parserMode parser.Mode -) - -func usage() { - fmt.Fprintf(os.Stderr, "usage: gofmt [flags] [path ...]\n") - flag.PrintDefaults() -} - -func initParserMode() { - parserMode = parser.ParseComments - if *allErrors { - parserMode |= parser.AllErrors - } - // It's only -r that makes use of go/ast's object resolution, - // so avoid the unnecessary work if the flag isn't used. - if *rewriteRule == "" { - parserMode |= parser.SkipObjectResolution - } -} - -func isGoFile(f fs.DirEntry) bool { - // ignore non-Go files - name := f.Name() - return !strings.HasPrefix(name, ".") && strings.HasSuffix(name, ".go") && !f.IsDir() -} - -// A sequencer performs concurrent tasks that may write output, but emits that -// output in a deterministic order. -type sequencer struct { - maxWeight int64 - sem *semaphore.Weighted // weighted by input bytes (an approximate proxy for memory overhead) - prev <-chan *reporterState // 1-buffered -} - -// newSequencer returns a sequencer that allows concurrent tasks up to maxWeight -// and writes tasks' output to out and err. -func newSequencer(maxWeight int64, out, err io.Writer) *sequencer { - sem := semaphore.NewWeighted(maxWeight) - prev := make(chan *reporterState, 1) - prev <- &reporterState{out: out, err: err} - return &sequencer{ - maxWeight: maxWeight, - sem: sem, - prev: prev, - } -} - -// exclusive is a weight that can be passed to a sequencer to cause -// a task to be executed without any other concurrent tasks. -const exclusive = -1 - -// Add blocks until the sequencer has enough weight to spare, then adds f as a -// task to be executed concurrently. -// -// If the weight is either negative or larger than the sequencer's maximum -// weight, Add blocks until all other tasks have completed, then the task -// executes exclusively (blocking all other calls to Add until it completes). -// -// f may run concurrently in a goroutine, but its output to the passed-in -// reporter will be sequential relative to the other tasks in the sequencer. -// -// If f invokes a method on the reporter, execution of that method may block -// until the previous task has finished. (To maximize concurrency, f should -// avoid invoking the reporter until it has finished any parallelizable work.) -// -// If f returns a non-nil error, that error will be reported after f's output -// (if any) and will cause a nonzero final exit code. -func (s *sequencer) Add(weight int64, f func(*reporter) error) { - if weight < 0 || weight > s.maxWeight { - weight = s.maxWeight - } - if err := s.sem.Acquire(context.TODO(), weight); err != nil { - // Change the task from "execute f" to "report err". - weight = 0 - f = func(*reporter) error { return err } - } - - r := &reporter{prev: s.prev} - next := make(chan *reporterState, 1) - s.prev = next - - // Start f in parallel: it can run until it invokes a method on r, at which - // point it will block until the previous task releases the output state. - go func() { - if err := f(r); err != nil { - r.Report(err) - } - next <- r.getState() // Release the next task. - s.sem.Release(weight) - }() -} - -// AddReport prints an error to s after the output of any previously-added -// tasks, causing the final exit code to be nonzero. -func (s *sequencer) AddReport(err error) { - s.Add(0, func(*reporter) error { return err }) -} - -// GetExitCode waits for all previously-added tasks to complete, then returns an -// exit code for the sequence suitable for passing to os.Exit. -func (s *sequencer) GetExitCode() int { - c := make(chan int, 1) - s.Add(0, func(r *reporter) error { - c <- r.ExitCode() - return nil - }) - return <-c -} - -// A reporter reports output, warnings, and errors. -type reporter struct { - prev <-chan *reporterState - state *reporterState -} - -// reporterState carries the state of a reporter instance. -// -// Only one reporter at a time may have access to a reporterState. -type reporterState struct { - out, err io.Writer - exitCode int -} - -// getState blocks until any prior reporters are finished with the reporter -// state, then returns the state for manipulation. -func (r *reporter) getState() *reporterState { - if r.state == nil { - r.state = <-r.prev - } - return r.state -} - -// Warnf emits a warning message to the reporter's error stream, -// without changing its exit code. -func (r *reporter) Warnf(format string, args ...any) { - fmt.Fprintf(r.getState().err, format, args...) -} - -// Write emits a slice to the reporter's output stream. -// -// Any error is returned to the caller, and does not otherwise affect the -// reporter's exit code. -func (r *reporter) Write(p []byte) (int, error) { - return r.getState().out.Write(p) -} - -// Report emits a non-nil error to the reporter's error stream, -// changing its exit code to a nonzero value. -func (r *reporter) Report(err error) { - if err == nil { - panic("Report with nil error") - } - st := r.getState() - scanner.PrintError(st.err, err) - st.exitCode = 2 -} - -func (r *reporter) ExitCode() int { - return r.getState().exitCode -} - -// If info == nil, we are formatting stdin instead of a file. -// If in == nil, the source is the contents of the file with the given filename. -func processFile(filename string, info fs.FileInfo, in io.Reader, r *reporter) error { - src, err := readFile(filename, info, in) - if err != nil { - return err - } - - fileSet := token.NewFileSet() - fragmentOk := false - if info == nil { - // If we are formatting stdin, we accept a program fragment in lieu of a - // complete source file. - fragmentOk = true - } - file, sourceAdj, indentAdj, err := parse(fileSet, filename, src, fragmentOk) - if err != nil { - return err - } - - if rewrite != nil { - if sourceAdj == nil { - file = rewrite(fileSet, file) - } else { - r.Warnf("warning: rewrite ignored for incomplete programs\n") - } - } - - ast.SortImports(fileSet, file) - - if *simplifyAST { - simplify(file) - } - - res, err := format(fileSet, file, sourceAdj, indentAdj, src, printer.Config{Mode: printerMode, Tabwidth: tabWidth}) - if err != nil { - return err - } - - if !bytes.Equal(src, res) { - // formatting has changed - if *list { - fmt.Fprintln(r, filename) - } - if *write { - if info == nil { - panic("-w should not have been allowed with stdin") - } - // make a temporary backup before overwriting original - perm := info.Mode().Perm() - bakname, err := backupFile(filename+".", src, perm) - if err != nil { - return err - } - fdSem <- true - err = os.WriteFile(filename, res, perm) - <-fdSem - if err != nil { - os.Rename(bakname, filename) - return err - } - err = os.Remove(bakname) - if err != nil { - return err - } - } - if *doDiff { - newName := filepath.ToSlash(filename) - oldName := newName + ".orig" - r.Write(diff.Diff(oldName, src, newName, res)) - } - } - - if !*list && !*write && !*doDiff { - _, err = r.Write(res) - } - - return err -} - -// readFile reads the contents of filename, described by info. -// If in is non-nil, readFile reads directly from it. -// Otherwise, readFile opens and reads the file itself, -// with the number of concurrently-open files limited by fdSem. -func readFile(filename string, info fs.FileInfo, in io.Reader) ([]byte, error) { - if in == nil { - fdSem <- true - var err error - f, err := os.Open(filename) - if err != nil { - return nil, err - } - in = f - defer func() { - f.Close() - <-fdSem - }() - } - - // Compute the file's size and read its contents with minimal allocations. - // - // If we have the FileInfo from filepath.WalkDir, use it to make - // a buffer of the right size and avoid ReadAll's reallocations. - // - // If the size is unknown (or bogus, or overflows an int), fall back to - // a size-independent ReadAll. - size := -1 - if info != nil && info.Mode().IsRegular() && int64(int(info.Size())) == info.Size() { - size = int(info.Size()) - } - if size+1 <= 0 { - // The file is not known to be regular, so we don't have a reliable size for it. - var err error - src, err := io.ReadAll(in) - if err != nil { - return nil, err - } - return src, nil - } - - // We try to read size+1 bytes so that we can detect modifications: if we - // read more than size bytes, then the file was modified concurrently. - // (If that happens, we could, say, append to src to finish the read, or - // proceed with a truncated buffer — but the fact that it changed at all - // indicates a possible race with someone editing the file, so we prefer to - // stop to avoid corrupting it.) - src := make([]byte, size+1) - n, err := io.ReadFull(in, src) - switch err { - case nil, io.EOF, io.ErrUnexpectedEOF: - // io.ReadFull returns io.EOF (for an empty file) or io.ErrUnexpectedEOF - // (for a non-empty file) if the file was changed unexpectedly. Continue - // with comparing file sizes in those cases. - default: - return nil, err - } - if n < size { - return nil, fmt.Errorf("error: size of %s changed during reading (from %d to %d bytes)", filename, size, n) - } else if n > size { - return nil, fmt.Errorf("error: size of %s changed during reading (from %d to >=%d bytes)", filename, size, len(src)) - } - return src[:n], nil -} - -func main() { - // Arbitrarily limit in-flight work to 2MiB times the number of threads. - // - // The actual overhead for the parse tree and output will depend on the - // specifics of the file, but this at least keeps the footprint of the process - // roughly proportional to GOMAXPROCS. - maxWeight := (2 << 20) * int64(runtime.GOMAXPROCS(0)) - s := newSequencer(maxWeight, os.Stdout, os.Stderr) - - // call gofmtMain in a separate function - // so that it can use defer and have them - // run before the exit. - gofmtMain(s) - os.Exit(s.GetExitCode()) -} - -func gofmtMain(s *sequencer) { - flag.Usage = usage - flag.Parse() - - if *cpuprofile != "" { - fdSem <- true - f, err := os.Create(*cpuprofile) - if err != nil { - s.AddReport(fmt.Errorf("creating cpu profile: %s", err)) - return - } - defer func() { - f.Close() - <-fdSem - }() - pprof.StartCPUProfile(f) - defer pprof.StopCPUProfile() - } - - initParserMode() - initRewrite() - - args := flag.Args() - if len(args) == 0 { - if *write { - s.AddReport(fmt.Errorf("error: cannot use -w with standard input")) - return - } - s.Add(0, func(r *reporter) error { - return processFile("", nil, os.Stdin, r) - }) - return - } - - for _, arg := range args { - switch info, err := os.Stat(arg); { - case err != nil: - s.AddReport(err) - case !info.IsDir(): - // Non-directory arguments are always formatted. - arg := arg - s.Add(fileWeight(arg, info), func(r *reporter) error { - return processFile(arg, info, nil, r) - }) - default: - // Directories are walked, ignoring non-Go files. - err := filepath.WalkDir(arg, func(path string, f fs.DirEntry, err error) error { - if err != nil || !isGoFile(f) { - return err - } - info, err := f.Info() - if err != nil { - s.AddReport(err) - return nil - } - s.Add(fileWeight(path, info), func(r *reporter) error { - return processFile(path, info, nil, r) - }) - return nil - }) - if err != nil { - s.AddReport(err) - } - } - } -} - -func fileWeight(path string, info fs.FileInfo) int64 { - if info == nil { - return exclusive - } - if info.Mode().Type() == fs.ModeSymlink { - var err error - info, err = os.Stat(path) - if err != nil { - return exclusive - } - } - if !info.Mode().IsRegular() { - // For non-regular files, FileInfo.Size is system-dependent and thus not a - // reliable indicator of weight. - return exclusive - } - return info.Size() -} - -const chmodSupported = runtime.GOOS != "windows" - -// backupFile writes data to a new file named filename with permissions perm, -// with 0 && isSpace(src[i-1]) { - i-- - } - return append(res, src[i:]...), nil -} - -// isSpace reports whether the byte is a space character. -// isSpace defines a space as being among the following bytes: ' ', '\t', '\n' and '\r'. -func isSpace(b byte) bool { - return b == ' ' || b == '\t' || b == '\n' || b == '\r' -} diff --git a/vendor/github.com/golangci/gofmt/gofmt/internal/diff/diff.go b/vendor/github.com/golangci/gofmt/gofmt/internal/diff/diff.go deleted file mode 100644 index 47b285671..000000000 --- a/vendor/github.com/golangci/gofmt/gofmt/internal/diff/diff.go +++ /dev/null @@ -1,261 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package diff - -import ( - "bytes" - "fmt" - "sort" - "strings" -) - -// A pair is a pair of values tracked for both the x and y side of a diff. -// It is typically a pair of line indexes. -type pair struct{ x, y int } - -// Diff returns an anchored diff of the two texts old and new -// in the “unified diff” format. If old and new are identical, -// Diff returns a nil slice (no output). -// -// Unix diff implementations typically look for a diff with -// the smallest number of lines inserted and removed, -// which can in the worst case take time quadratic in the -// number of lines in the texts. As a result, many implementations -// either can be made to run for a long time or cut off the search -// after a predetermined amount of work. -// -// In contrast, this implementation looks for a diff with the -// smallest number of “unique” lines inserted and removed, -// where unique means a line that appears just once in both old and new. -// We call this an “anchored diff” because the unique lines anchor -// the chosen matching regions. An anchored diff is usually clearer -// than a standard diff, because the algorithm does not try to -// reuse unrelated blank lines or closing braces. -// The algorithm also guarantees to run in O(n log n) time -// instead of the standard O(n²) time. -// -// Some systems call this approach a “patience diff,” named for -// the “patience sorting” algorithm, itself named for a solitaire card game. -// We avoid that name for two reasons. First, the name has been used -// for a few different variants of the algorithm, so it is imprecise. -// Second, the name is frequently interpreted as meaning that you have -// to wait longer (to be patient) for the diff, meaning that it is a slower algorithm, -// when in fact the algorithm is faster than the standard one. -func Diff(oldName string, old []byte, newName string, new []byte) []byte { - if bytes.Equal(old, new) { - return nil - } - x := lines(old) - y := lines(new) - - // Print diff header. - var out bytes.Buffer - fmt.Fprintf(&out, "diff %s %s\n", oldName, newName) - fmt.Fprintf(&out, "--- %s\n", oldName) - fmt.Fprintf(&out, "+++ %s\n", newName) - - // Loop over matches to consider, - // expanding each match to include surrounding lines, - // and then printing diff chunks. - // To avoid setup/teardown cases outside the loop, - // tgs returns a leading {0,0} and trailing {len(x), len(y)} pair - // in the sequence of matches. - var ( - done pair // printed up to x[:done.x] and y[:done.y] - chunk pair // start lines of current chunk - count pair // number of lines from each side in current chunk - ctext []string // lines for current chunk - ) - for _, m := range tgs(x, y) { - if m.x < done.x { - // Already handled scanning forward from earlier match. - continue - } - - // Expand matching lines as far possible, - // establishing that x[start.x:end.x] == y[start.y:end.y]. - // Note that on the first (or last) iteration we may (or definitey do) - // have an empty match: start.x==end.x and start.y==end.y. - start := m - for start.x > done.x && start.y > done.y && x[start.x-1] == y[start.y-1] { - start.x-- - start.y-- - } - end := m - for end.x < len(x) && end.y < len(y) && x[end.x] == y[end.y] { - end.x++ - end.y++ - } - - // Emit the mismatched lines before start into this chunk. - // (No effect on first sentinel iteration, when start = {0,0}.) - for _, s := range x[done.x:start.x] { - ctext = append(ctext, "-"+s) - count.x++ - } - for _, s := range y[done.y:start.y] { - ctext = append(ctext, "+"+s) - count.y++ - } - - // If we're not at EOF and have too few common lines, - // the chunk includes all the common lines and continues. - const C = 3 // number of context lines - if (end.x < len(x) || end.y < len(y)) && - (end.x-start.x < C || (len(ctext) > 0 && end.x-start.x < 2*C)) { - for _, s := range x[start.x:end.x] { - ctext = append(ctext, " "+s) - count.x++ - count.y++ - } - done = end - continue - } - - // End chunk with common lines for context. - if len(ctext) > 0 { - n := end.x - start.x - if n > C { - n = C - } - for _, s := range x[start.x : start.x+n] { - ctext = append(ctext, " "+s) - count.x++ - count.y++ - } - done = pair{start.x + n, start.y + n} - - // Format and emit chunk. - // Convert line numbers to 1-indexed. - // Special case: empty file shows up as 0,0 not 1,0. - if count.x > 0 { - chunk.x++ - } - if count.y > 0 { - chunk.y++ - } - fmt.Fprintf(&out, "@@ -%d,%d +%d,%d @@\n", chunk.x, count.x, chunk.y, count.y) - for _, s := range ctext { - out.WriteString(s) - } - count.x = 0 - count.y = 0 - ctext = ctext[:0] - } - - // If we reached EOF, we're done. - if end.x >= len(x) && end.y >= len(y) { - break - } - - // Otherwise start a new chunk. - chunk = pair{end.x - C, end.y - C} - for _, s := range x[chunk.x:end.x] { - ctext = append(ctext, " "+s) - count.x++ - count.y++ - } - done = end - } - - return out.Bytes() -} - -// lines returns the lines in the file x, including newlines. -// If the file does not end in a newline, one is supplied -// along with a warning about the missing newline. -func lines(x []byte) []string { - l := strings.SplitAfter(string(x), "\n") - if l[len(l)-1] == "" { - l = l[:len(l)-1] - } else { - // Treat last line as having a message about the missing newline attached, - // using the same text as BSD/GNU diff (including the leading backslash). - l[len(l)-1] += "\n\\ No newline at end of file\n" - } - return l -} - -// tgs returns the pairs of indexes of the longest common subsequence -// of unique lines in x and y, where a unique line is one that appears -// once in x and once in y. -// -// The longest common subsequence algorithm is as described in -// Thomas G. Szymanski, “A Special Case of the Maximal Common -// Subsequence Problem,” Princeton TR #170 (January 1975), -// available at https://research.swtch.com/tgs170.pdf. -func tgs(x, y []string) []pair { - // Count the number of times each string appears in a and b. - // We only care about 0, 1, many, counted as 0, -1, -2 - // for the x side and 0, -4, -8 for the y side. - // Using negative numbers now lets us distinguish positive line numbers later. - m := make(map[string]int) - for _, s := range x { - if c := m[s]; c > -2 { - m[s] = c - 1 - } - } - for _, s := range y { - if c := m[s]; c > -8 { - m[s] = c - 4 - } - } - - // Now unique strings can be identified by m[s] = -1+-4. - // - // Gather the indexes of those strings in x and y, building: - // xi[i] = increasing indexes of unique strings in x. - // yi[i] = increasing indexes of unique strings in y. - // inv[i] = index j such that x[xi[i]] = y[yi[j]]. - var xi, yi, inv []int - for i, s := range y { - if m[s] == -1+-4 { - m[s] = len(yi) - yi = append(yi, i) - } - } - for i, s := range x { - if j, ok := m[s]; ok && j >= 0 { - xi = append(xi, i) - inv = append(inv, j) - } - } - - // Apply Algorithm A from Szymanski's paper. - // In those terms, A = J = inv and B = [0, n). - // We add sentinel pairs {0,0}, and {len(x),len(y)} - // to the returned sequence, to help the processing loop. - J := inv - n := len(xi) - T := make([]int, n) - L := make([]int, n) - for i := range T { - T[i] = n + 1 - } - for i := 0; i < n; i++ { - k := sort.Search(n, func(k int) bool { - return T[k] >= J[i] - }) - T[k] = J[i] - L[i] = k + 1 - } - k := 0 - for _, v := range L { - if k < v { - k = v - } - } - seq := make([]pair, 2+k) - seq[1+k] = pair{len(x), len(y)} // sentinel at end - lastj := n - for i := n - 1; i >= 0; i-- { - if L[i] == k && J[i] < lastj { - seq[k] = pair{xi[i], yi[J[i]]} - k-- - } - } - seq[0] = pair{0, 0} // sentinel at start - return seq -} diff --git a/vendor/github.com/golangci/gofmt/gofmt/readme.md b/vendor/github.com/golangci/gofmt/gofmt/readme.md deleted file mode 100644 index c2faaab82..000000000 --- a/vendor/github.com/golangci/gofmt/gofmt/readme.md +++ /dev/null @@ -1,5 +0,0 @@ -# Hard Fork of gofmt - -2022-08-31: Sync with go1.18.5 -2023-10-04: Sync with go1.19.13 -2023-10-04: Sync with go1.20.8 diff --git a/vendor/github.com/golangci/gofmt/gofmt/rewrite.go b/vendor/github.com/golangci/gofmt/gofmt/rewrite.go deleted file mode 100644 index f1299a42b..000000000 --- a/vendor/github.com/golangci/gofmt/gofmt/rewrite.go +++ /dev/null @@ -1,311 +0,0 @@ -// Copyright 2009 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package gofmt - -import ( - "fmt" - "go/ast" - "go/parser" - "go/token" - "os" - "reflect" - "strings" - "unicode" - "unicode/utf8" -) - -func initRewrite() { - if *rewriteRule == "" { - rewrite = nil // disable any previous rewrite - return - } - f := strings.Split(*rewriteRule, "->") - if len(f) != 2 { - fmt.Fprintf(os.Stderr, "rewrite rule must be of the form 'pattern -> replacement'\n") - os.Exit(2) - } - pattern := parseExpr(f[0], "pattern") - replace := parseExpr(f[1], "replacement") - rewrite = func(fset *token.FileSet, p *ast.File) *ast.File { - return rewriteFile(fset, pattern, replace, p) - } -} - -// parseExpr parses s as an expression. -// It might make sense to expand this to allow statement patterns, -// but there are problems with preserving formatting and also -// with what a wildcard for a statement looks like. -func parseExpr(s, what string) ast.Expr { - x, err := parser.ParseExpr(s) - if err != nil { - fmt.Fprintf(os.Stderr, "parsing %s %s at %s\n", what, s, err) - os.Exit(2) - } - return x -} - -// Keep this function for debugging. -/* -func dump(msg string, val reflect.Value) { - fmt.Printf("%s:\n", msg) - ast.Print(fileSet, val.Interface()) - fmt.Println() -} -*/ - -// rewriteFile applies the rewrite rule 'pattern -> replace' to an entire file. -func rewriteFile(fileSet *token.FileSet, pattern, replace ast.Expr, p *ast.File) *ast.File { - cmap := ast.NewCommentMap(fileSet, p, p.Comments) - m := make(map[string]reflect.Value) - pat := reflect.ValueOf(pattern) - repl := reflect.ValueOf(replace) - - var rewriteVal func(val reflect.Value) reflect.Value - rewriteVal = func(val reflect.Value) reflect.Value { - // don't bother if val is invalid to start with - if !val.IsValid() { - return reflect.Value{} - } - val = apply(rewriteVal, val) - for k := range m { - delete(m, k) - } - if match(m, pat, val) { - val = subst(m, repl, reflect.ValueOf(val.Interface().(ast.Node).Pos())) - } - return val - } - - r := apply(rewriteVal, reflect.ValueOf(p)).Interface().(*ast.File) - r.Comments = cmap.Filter(r).Comments() // recreate comments list - return r -} - -// set is a wrapper for x.Set(y); it protects the caller from panics if x cannot be changed to y. -func set(x, y reflect.Value) { - // don't bother if x cannot be set or y is invalid - if !x.CanSet() || !y.IsValid() { - return - } - defer func() { - if x := recover(); x != nil { - if s, ok := x.(string); ok && - (strings.Contains(s, "type mismatch") || strings.Contains(s, "not assignable")) { - // x cannot be set to y - ignore this rewrite - return - } - panic(x) - } - }() - x.Set(y) -} - -// Values/types for special cases. -var ( - objectPtrNil = reflect.ValueOf((*ast.Object)(nil)) - scopePtrNil = reflect.ValueOf((*ast.Scope)(nil)) - - identType = reflect.TypeOf((*ast.Ident)(nil)) - objectPtrType = reflect.TypeOf((*ast.Object)(nil)) - positionType = reflect.TypeOf(token.NoPos) - callExprType = reflect.TypeOf((*ast.CallExpr)(nil)) - scopePtrType = reflect.TypeOf((*ast.Scope)(nil)) -) - -// apply replaces each AST field x in val with f(x), returning val. -// To avoid extra conversions, f operates on the reflect.Value form. -func apply(f func(reflect.Value) reflect.Value, val reflect.Value) reflect.Value { - if !val.IsValid() { - return reflect.Value{} - } - - // *ast.Objects introduce cycles and are likely incorrect after - // rewrite; don't follow them but replace with nil instead - if val.Type() == objectPtrType { - return objectPtrNil - } - - // similarly for scopes: they are likely incorrect after a rewrite; - // replace them with nil - if val.Type() == scopePtrType { - return scopePtrNil - } - - switch v := reflect.Indirect(val); v.Kind() { - case reflect.Slice: - for i := 0; i < v.Len(); i++ { - e := v.Index(i) - set(e, f(e)) - } - case reflect.Struct: - for i := 0; i < v.NumField(); i++ { - e := v.Field(i) - set(e, f(e)) - } - case reflect.Interface: - e := v.Elem() - set(v, f(e)) - } - return val -} - -func isWildcard(s string) bool { - rune, size := utf8.DecodeRuneInString(s) - return size == len(s) && unicode.IsLower(rune) -} - -// match reports whether pattern matches val, -// recording wildcard submatches in m. -// If m == nil, match checks whether pattern == val. -func match(m map[string]reflect.Value, pattern, val reflect.Value) bool { - // Wildcard matches any expression. If it appears multiple - // times in the pattern, it must match the same expression - // each time. - if m != nil && pattern.IsValid() && pattern.Type() == identType { - name := pattern.Interface().(*ast.Ident).Name - if isWildcard(name) && val.IsValid() { - // wildcards only match valid (non-nil) expressions. - if _, ok := val.Interface().(ast.Expr); ok && !val.IsNil() { - if old, ok := m[name]; ok { - return match(nil, old, val) - } - m[name] = val - return true - } - } - } - - // Otherwise, pattern and val must match recursively. - if !pattern.IsValid() || !val.IsValid() { - return !pattern.IsValid() && !val.IsValid() - } - if pattern.Type() != val.Type() { - return false - } - - // Special cases. - switch pattern.Type() { - case identType: - // For identifiers, only the names need to match - // (and none of the other *ast.Object information). - // This is a common case, handle it all here instead - // of recursing down any further via reflection. - p := pattern.Interface().(*ast.Ident) - v := val.Interface().(*ast.Ident) - return p == nil && v == nil || p != nil && v != nil && p.Name == v.Name - case objectPtrType, positionType: - // object pointers and token positions always match - return true - case callExprType: - // For calls, the Ellipsis fields (token.Position) must - // match since that is how f(x) and f(x...) are different. - // Check them here but fall through for the remaining fields. - p := pattern.Interface().(*ast.CallExpr) - v := val.Interface().(*ast.CallExpr) - if p.Ellipsis.IsValid() != v.Ellipsis.IsValid() { - return false - } - } - - p := reflect.Indirect(pattern) - v := reflect.Indirect(val) - if !p.IsValid() || !v.IsValid() { - return !p.IsValid() && !v.IsValid() - } - - switch p.Kind() { - case reflect.Slice: - if p.Len() != v.Len() { - return false - } - for i := 0; i < p.Len(); i++ { - if !match(m, p.Index(i), v.Index(i)) { - return false - } - } - return true - - case reflect.Struct: - for i := 0; i < p.NumField(); i++ { - if !match(m, p.Field(i), v.Field(i)) { - return false - } - } - return true - - case reflect.Interface: - return match(m, p.Elem(), v.Elem()) - } - - // Handle token integers, etc. - return p.Interface() == v.Interface() -} - -// subst returns a copy of pattern with values from m substituted in place -// of wildcards and pos used as the position of tokens from the pattern. -// if m == nil, subst returns a copy of pattern and doesn't change the line -// number information. -func subst(m map[string]reflect.Value, pattern reflect.Value, pos reflect.Value) reflect.Value { - if !pattern.IsValid() { - return reflect.Value{} - } - - // Wildcard gets replaced with map value. - if m != nil && pattern.Type() == identType { - name := pattern.Interface().(*ast.Ident).Name - if isWildcard(name) { - if old, ok := m[name]; ok { - return subst(nil, old, reflect.Value{}) - } - } - } - - if pos.IsValid() && pattern.Type() == positionType { - // use new position only if old position was valid in the first place - if old := pattern.Interface().(token.Pos); !old.IsValid() { - return pattern - } - return pos - } - - // Otherwise copy. - switch p := pattern; p.Kind() { - case reflect.Slice: - if p.IsNil() { - // Do not turn nil slices into empty slices. go/ast - // guarantees that certain lists will be nil if not - // populated. - return reflect.Zero(p.Type()) - } - v := reflect.MakeSlice(p.Type(), p.Len(), p.Len()) - for i := 0; i < p.Len(); i++ { - v.Index(i).Set(subst(m, p.Index(i), pos)) - } - return v - - case reflect.Struct: - v := reflect.New(p.Type()).Elem() - for i := 0; i < p.NumField(); i++ { - v.Field(i).Set(subst(m, p.Field(i), pos)) - } - return v - - case reflect.Pointer: - v := reflect.New(p.Type()).Elem() - if elem := p.Elem(); elem.IsValid() { - v.Set(subst(m, elem, pos).Addr()) - } - return v - - case reflect.Interface: - v := reflect.New(p.Type()).Elem() - if elem := p.Elem(); elem.IsValid() { - v.Set(subst(m, elem, pos)) - } - return v - } - - return pattern -} diff --git a/vendor/github.com/golangci/gofmt/gofmt/simplify.go b/vendor/github.com/golangci/gofmt/gofmt/simplify.go deleted file mode 100644 index 3b34d562b..000000000 --- a/vendor/github.com/golangci/gofmt/gofmt/simplify.go +++ /dev/null @@ -1,169 +0,0 @@ -// Copyright 2010 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package gofmt - -import ( - "go/ast" - "go/token" - "reflect" -) - -type simplifier struct{} - -func (s simplifier) Visit(node ast.Node) ast.Visitor { - switch n := node.(type) { - case *ast.CompositeLit: - // array, slice, and map composite literals may be simplified - outer := n - var keyType, eltType ast.Expr - switch typ := outer.Type.(type) { - case *ast.ArrayType: - eltType = typ.Elt - case *ast.MapType: - keyType = typ.Key - eltType = typ.Value - } - - if eltType != nil { - var ktyp reflect.Value - if keyType != nil { - ktyp = reflect.ValueOf(keyType) - } - typ := reflect.ValueOf(eltType) - for i, x := range outer.Elts { - px := &outer.Elts[i] - // look at value of indexed/named elements - if t, ok := x.(*ast.KeyValueExpr); ok { - if keyType != nil { - s.simplifyLiteral(ktyp, keyType, t.Key, &t.Key) - } - x = t.Value - px = &t.Value - } - s.simplifyLiteral(typ, eltType, x, px) - } - // node was simplified - stop walk (there are no subnodes to simplify) - return nil - } - - case *ast.SliceExpr: - // a slice expression of the form: s[a:len(s)] - // can be simplified to: s[a:] - // if s is "simple enough" (for now we only accept identifiers) - // - // Note: This may not be correct because len may have been redeclared in - // the same package. However, this is extremely unlikely and so far - // (April 2022, after years of supporting this rewrite feature) - // has never come up, so let's keep it working as is (see also #15153). - // - // Also note that this code used to use go/ast's object tracking, - // which was removed in exchange for go/parser.Mode.SkipObjectResolution. - // False positives are extremely unlikely as described above, - // and go/ast's object tracking is incomplete in any case. - if n.Max != nil { - // - 3-index slices always require the 2nd and 3rd index - break - } - if s, _ := n.X.(*ast.Ident); s != nil { - // the array/slice object is a single identifier - if call, _ := n.High.(*ast.CallExpr); call != nil && len(call.Args) == 1 && !call.Ellipsis.IsValid() { - // the high expression is a function call with a single argument - if fun, _ := call.Fun.(*ast.Ident); fun != nil && fun.Name == "len" { - // the function called is "len" - if arg, _ := call.Args[0].(*ast.Ident); arg != nil && arg.Name == s.Name { - // the len argument is the array/slice object - n.High = nil - } - } - } - } - // Note: We could also simplify slice expressions of the form s[0:b] to s[:b] - // but we leave them as is since sometimes we want to be very explicit - // about the lower bound. - // An example where the 0 helps: - // x, y, z := b[0:2], b[2:4], b[4:6] - // An example where it does not: - // x, y := b[:n], b[n:] - - case *ast.RangeStmt: - // - a range of the form: for x, _ = range v {...} - // can be simplified to: for x = range v {...} - // - a range of the form: for _ = range v {...} - // can be simplified to: for range v {...} - if isBlank(n.Value) { - n.Value = nil - } - if isBlank(n.Key) && n.Value == nil { - n.Key = nil - } - } - - return s -} - -func (s simplifier) simplifyLiteral(typ reflect.Value, astType, x ast.Expr, px *ast.Expr) { - ast.Walk(s, x) // simplify x - - // if the element is a composite literal and its literal type - // matches the outer literal's element type exactly, the inner - // literal type may be omitted - if inner, ok := x.(*ast.CompositeLit); ok { - if match(nil, typ, reflect.ValueOf(inner.Type)) { - inner.Type = nil - } - } - // if the outer literal's element type is a pointer type *T - // and the element is & of a composite literal of type T, - // the inner &T may be omitted. - if ptr, ok := astType.(*ast.StarExpr); ok { - if addr, ok := x.(*ast.UnaryExpr); ok && addr.Op == token.AND { - if inner, ok := addr.X.(*ast.CompositeLit); ok { - if match(nil, reflect.ValueOf(ptr.X), reflect.ValueOf(inner.Type)) { - inner.Type = nil // drop T - *px = inner // drop & - } - } - } - } -} - -func isBlank(x ast.Expr) bool { - ident, ok := x.(*ast.Ident) - return ok && ident.Name == "_" -} - -func simplify(f *ast.File) { - // remove empty declarations such as "const ()", etc - removeEmptyDeclGroups(f) - - var s simplifier - ast.Walk(s, f) -} - -func removeEmptyDeclGroups(f *ast.File) { - i := 0 - for _, d := range f.Decls { - if g, ok := d.(*ast.GenDecl); !ok || !isEmpty(f, g) { - f.Decls[i] = d - i++ - } - } - f.Decls = f.Decls[:i] -} - -func isEmpty(f *ast.File, g *ast.GenDecl) bool { - if g.Doc != nil || g.Specs != nil { - return false - } - - for _, c := range f.Comments { - // if there is a comment in the declaration, it is not considered empty - if g.Pos() <= c.Pos() && c.End() <= g.End() { - return false - } - } - - return true -} diff --git a/vendor/github.com/golangci/gofmt/goimports/LICENSE b/vendor/github.com/golangci/gofmt/goimports/LICENSE deleted file mode 100644 index 6a66aea5e..000000000 --- a/vendor/github.com/golangci/gofmt/goimports/LICENSE +++ /dev/null @@ -1,27 +0,0 @@ -Copyright (c) 2009 The Go Authors. All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above -copyright notice, this list of conditions and the following disclaimer -in the documentation and/or other materials provided with the -distribution. - * Neither the name of Google Inc. nor the names of its -contributors may be used to endorse or promote products derived from -this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/golangci/gofmt/goimports/goimports.go b/vendor/github.com/golangci/gofmt/goimports/goimports.go deleted file mode 100644 index 20d92e119..000000000 --- a/vendor/github.com/golangci/gofmt/goimports/goimports.go +++ /dev/null @@ -1,89 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package goimports - -import ( - "bytes" - "fmt" - "io/ioutil" - "os" - "os/exec" - "path/filepath" - "runtime" -) - -// Extracted from golang.org/x/tools@v0.13.0/cmd/goimports/goimports.go - -func writeTempFile(dir, prefix string, data []byte) (string, error) { - file, err := ioutil.TempFile(dir, prefix) - if err != nil { - return "", err - } - _, err = file.Write(data) - if err1 := file.Close(); err == nil { - err = err1 - } - if err != nil { - os.Remove(file.Name()) - return "", err - } - return file.Name(), nil -} - -func diff(b1, b2 []byte, filename string) (data []byte, err error) { - f1, err := writeTempFile("", "gofmt", b1) - if err != nil { - return - } - defer os.Remove(f1) - - f2, err := writeTempFile("", "gofmt", b2) - if err != nil { - return - } - defer os.Remove(f2) - - cmd := "diff" - if runtime.GOOS == "plan9" { - cmd = "/bin/ape/diff" - } - - data, err = exec.Command(cmd, "-u", f1, f2).CombinedOutput() - if len(data) > 0 { - // diff exits with a non-zero status when the files don't match. - // Ignore that failure as long as we get output. - return replaceTempFilename(data, filename) - } - return -} - -// replaceTempFilename replaces temporary filenames in diff with actual one. -// -// --- /tmp/gofmt316145376 2017-02-03 19:13:00.280468375 -0500 -// +++ /tmp/gofmt617882815 2017-02-03 19:13:00.280468375 -0500 -// ... -// -> -// --- path/to/file.go.orig 2017-02-03 19:13:00.280468375 -0500 -// +++ path/to/file.go 2017-02-03 19:13:00.280468375 -0500 -// ... -func replaceTempFilename(diff []byte, filename string) ([]byte, error) { - bs := bytes.SplitN(diff, []byte{'\n'}, 3) - if len(bs) < 3 { - return nil, fmt.Errorf("got unexpected diff for %s", filename) - } - // Preserve timestamps. - var t0, t1 []byte - if i := bytes.LastIndexByte(bs[0], '\t'); i != -1 { - t0 = bs[0][i:] - } - if i := bytes.LastIndexByte(bs[1], '\t'); i != -1 { - t1 = bs[1][i:] - } - // Always print filepath with slash separator. - f := filepath.ToSlash(filename) - bs[0] = []byte(fmt.Sprintf("--- %s%s", f+".orig", t0)) - bs[1] = []byte(fmt.Sprintf("+++ %s%s", f, t1)) - return bytes.Join(bs, []byte{'\n'}), nil -} diff --git a/vendor/github.com/golangci/gofmt/goimports/golangci.go b/vendor/github.com/golangci/gofmt/goimports/golangci.go deleted file mode 100644 index 6ff286ae0..000000000 --- a/vendor/github.com/golangci/gofmt/goimports/golangci.go +++ /dev/null @@ -1,35 +0,0 @@ -package goimports - -import ( - "bytes" - "fmt" - "os" - - "golang.org/x/tools/imports" -) - -// Run runs goimports. -// The local prefixes (comma separated) must be defined through the global variable imports.LocalPrefix. -func Run(filename string) ([]byte, error) { - src, err := os.ReadFile(filename) - if err != nil { - return nil, err - } - - res, err := imports.Process(filename, src, nil) - if err != nil { - return nil, err - } - - if bytes.Equal(src, res) { - return nil, nil - } - - // formatting has changed - data, err := diff(src, res, filename) - if err != nil { - return nil, fmt.Errorf("error computing diff: %s", err) - } - - return data, nil -} diff --git a/vendor/github.com/golangci/gofmt/goimports/readme.md b/vendor/github.com/golangci/gofmt/goimports/readme.md deleted file mode 100644 index e57ed550b..000000000 --- a/vendor/github.com/golangci/gofmt/goimports/readme.md +++ /dev/null @@ -1,4 +0,0 @@ -# Hard Fork of goimports - -2022-08-31: Sync with golang.org/x/tools v0.1.12 -2023-10-04: Sync with golang.org/x/tools v0.13.0 diff --git a/vendor/github.com/golangci/golangci-lint/LICENSE b/vendor/github.com/golangci/golangci-lint/LICENSE deleted file mode 100644 index e72bfddab..000000000 --- a/vendor/github.com/golangci/golangci-lint/LICENSE +++ /dev/null @@ -1,674 +0,0 @@ - GNU GENERAL PUBLIC LICENSE - Version 3, 29 June 2007 - - Copyright (C) 2007 Free Software Foundation, Inc. - Everyone is permitted to copy and distribute verbatim copies - of this license document, but changing it is not allowed. - - Preamble - - The GNU General Public License is a free, copyleft license for -software and other kinds of works. - - The licenses for most software and other practical works are designed -to take away your freedom to share and change the works. By contrast, -the GNU General Public License is intended to guarantee your freedom to -share and change all versions of a program--to make sure it remains free -software for all its users. We, the Free Software Foundation, use the -GNU General Public License for most of our software; it applies also to -any other work released this way by its authors. You can apply it to -your programs, too. - - When we speak of free software, we are referring to freedom, not -price. Our General Public Licenses are designed to make sure that you -have the freedom to distribute copies of free software (and charge for -them if you wish), that you receive source code or can get it if you -want it, that you can change the software or use pieces of it in new -free programs, and that you know you can do these things. - - To protect your rights, we need to prevent others from denying you -these rights or asking you to surrender the rights. Therefore, you have -certain responsibilities if you distribute copies of the software, or if -you modify it: responsibilities to respect the freedom of others. - - For example, if you distribute copies of such a program, whether -gratis or for a fee, you must pass on to the recipients the same -freedoms that you received. You must make sure that they, too, receive -or can get the source code. And you must show them these terms so they -know their rights. - - Developers that use the GNU GPL protect your rights with two steps: -(1) assert copyright on the software, and (2) offer you this License -giving you legal permission to copy, distribute and/or modify it. - - For the developers' and authors' protection, the GPL clearly explains -that there is no warranty for this free software. For both users' and -authors' sake, the GPL requires that modified versions be marked as -changed, so that their problems will not be attributed erroneously to -authors of previous versions. - - Some devices are designed to deny users access to install or run -modified versions of the software inside them, although the manufacturer -can do so. This is fundamentally incompatible with the aim of -protecting users' freedom to change the software. The systematic -pattern of such abuse occurs in the area of products for individuals to -use, which is precisely where it is most unacceptable. Therefore, we -have designed this version of the GPL to prohibit the practice for those -products. If such problems arise substantially in other domains, we -stand ready to extend this provision to those domains in future versions -of the GPL, as needed to protect the freedom of users. - - Finally, every program is threatened constantly by software patents. -States should not allow patents to restrict development and use of -software on general-purpose computers, but in those that do, we wish to -avoid the special danger that patents applied to a free program could -make it effectively proprietary. To prevent this, the GPL assures that -patents cannot be used to render the program non-free. - - The precise terms and conditions for copying, distribution and -modification follow. - - TERMS AND CONDITIONS - - 0. Definitions. - - "This License" refers to version 3 of the GNU General Public License. - - "Copyright" also means copyright-like laws that apply to other kinds of -works, such as semiconductor masks. - - "The Program" refers to any copyrightable work licensed under this -License. Each licensee is addressed as "you". "Licensees" and -"recipients" may be individuals or organizations. - - To "modify" a work means to copy from or adapt all or part of the work -in a fashion requiring copyright permission, other than the making of an -exact copy. The resulting work is called a "modified version" of the -earlier work or a work "based on" the earlier work. - - A "covered work" means either the unmodified Program or a work based -on the Program. - - To "propagate" a work means to do anything with it that, without -permission, would make you directly or secondarily liable for -infringement under applicable copyright law, except executing it on a -computer or modifying a private copy. Propagation includes copying, -distribution (with or without modification), making available to the -public, and in some countries other activities as well. - - To "convey" a work means any kind of propagation that enables other -parties to make or receive copies. Mere interaction with a user through -a computer network, with no transfer of a copy, is not conveying. - - An interactive user interface displays "Appropriate Legal Notices" -to the extent that it includes a convenient and prominently visible -feature that (1) displays an appropriate copyright notice, and (2) -tells the user that there is no warranty for the work (except to the -extent that warranties are provided), that licensees may convey the -work under this License, and how to view a copy of this License. If -the interface presents a list of user commands or options, such as a -menu, a prominent item in the list meets this criterion. - - 1. Source Code. - - The "source code" for a work means the preferred form of the work -for making modifications to it. "Object code" means any non-source -form of a work. - - A "Standard Interface" means an interface that either is an official -standard defined by a recognized standards body, or, in the case of -interfaces specified for a particular programming language, one that -is widely used among developers working in that language. - - The "System Libraries" of an executable work include anything, other -than the work as a whole, that (a) is included in the normal form of -packaging a Major Component, but which is not part of that Major -Component, and (b) serves only to enable use of the work with that -Major Component, or to implement a Standard Interface for which an -implementation is available to the public in source code form. A -"Major Component", in this context, means a major essential component -(kernel, window system, and so on) of the specific operating system -(if any) on which the executable work runs, or a compiler used to -produce the work, or an object code interpreter used to run it. - - The "Corresponding Source" for a work in object code form means all -the source code needed to generate, install, and (for an executable -work) run the object code and to modify the work, including scripts to -control those activities. However, it does not include the work's -System Libraries, or general-purpose tools or generally available free -programs which are used unmodified in performing those activities but -which are not part of the work. For example, Corresponding Source -includes interface definition files associated with source files for -the work, and the source code for shared libraries and dynamically -linked subprograms that the work is specifically designed to require, -such as by intimate data communication or control flow between those -subprograms and other parts of the work. - - The Corresponding Source need not include anything that users -can regenerate automatically from other parts of the Corresponding -Source. - - The Corresponding Source for a work in source code form is that -same work. - - 2. Basic Permissions. - - All rights granted under this License are granted for the term of -copyright on the Program, and are irrevocable provided the stated -conditions are met. This License explicitly affirms your unlimited -permission to run the unmodified Program. The output from running a -covered work is covered by this License only if the output, given its -content, constitutes a covered work. This License acknowledges your -rights of fair use or other equivalent, as provided by copyright law. - - You may make, run and propagate covered works that you do not -convey, without conditions so long as your license otherwise remains -in force. You may convey covered works to others for the sole purpose -of having them make modifications exclusively for you, or provide you -with facilities for running those works, provided that you comply with -the terms of this License in conveying all material for which you do -not control copyright. Those thus making or running the covered works -for you must do so exclusively on your behalf, under your direction -and control, on terms that prohibit them from making any copies of -your copyrighted material outside their relationship with you. - - Conveying under any other circumstances is permitted solely under -the conditions stated below. Sublicensing is not allowed; section 10 -makes it unnecessary. - - 3. Protecting Users' Legal Rights From Anti-Circumvention Law. - - No covered work shall be deemed part of an effective technological -measure under any applicable law fulfilling obligations under article -11 of the WIPO copyright treaty adopted on 20 December 1996, or -similar laws prohibiting or restricting circumvention of such -measures. - - When you convey a covered work, you waive any legal power to forbid -circumvention of technological measures to the extent such circumvention -is effected by exercising rights under this License with respect to -the covered work, and you disclaim any intention to limit operation or -modification of the work as a means of enforcing, against the work's -users, your or third parties' legal rights to forbid circumvention of -technological measures. - - 4. Conveying Verbatim Copies. - - You may convey verbatim copies of the Program's source code as you -receive it, in any medium, provided that you conspicuously and -appropriately publish on each copy an appropriate copyright notice; -keep intact all notices stating that this License and any -non-permissive terms added in accord with section 7 apply to the code; -keep intact all notices of the absence of any warranty; and give all -recipients a copy of this License along with the Program. - - You may charge any price or no price for each copy that you convey, -and you may offer support or warranty protection for a fee. - - 5. Conveying Modified Source Versions. - - You may convey a work based on the Program, or the modifications to -produce it from the Program, in the form of source code under the -terms of section 4, provided that you also meet all of these conditions: - - a) The work must carry prominent notices stating that you modified - it, and giving a relevant date. - - b) The work must carry prominent notices stating that it is - released under this License and any conditions added under section - 7. This requirement modifies the requirement in section 4 to - "keep intact all notices". - - c) You must license the entire work, as a whole, under this - License to anyone who comes into possession of a copy. This - License will therefore apply, along with any applicable section 7 - additional terms, to the whole of the work, and all its parts, - regardless of how they are packaged. This License gives no - permission to license the work in any other way, but it does not - invalidate such permission if you have separately received it. - - d) If the work has interactive user interfaces, each must display - Appropriate Legal Notices; however, if the Program has interactive - interfaces that do not display Appropriate Legal Notices, your - work need not make them do so. - - A compilation of a covered work with other separate and independent -works, which are not by their nature extensions of the covered work, -and which are not combined with it such as to form a larger program, -in or on a volume of a storage or distribution medium, is called an -"aggregate" if the compilation and its resulting copyright are not -used to limit the access or legal rights of the compilation's users -beyond what the individual works permit. Inclusion of a covered work -in an aggregate does not cause this License to apply to the other -parts of the aggregate. - - 6. Conveying Non-Source Forms. - - You may convey a covered work in object code form under the terms -of sections 4 and 5, provided that you also convey the -machine-readable Corresponding Source under the terms of this License, -in one of these ways: - - a) Convey the object code in, or embodied in, a physical product - (including a physical distribution medium), accompanied by the - Corresponding Source fixed on a durable physical medium - customarily used for software interchange. - - b) Convey the object code in, or embodied in, a physical product - (including a physical distribution medium), accompanied by a - written offer, valid for at least three years and valid for as - long as you offer spare parts or customer support for that product - model, to give anyone who possesses the object code either (1) a - copy of the Corresponding Source for all the software in the - product that is covered by this License, on a durable physical - medium customarily used for software interchange, for a price no - more than your reasonable cost of physically performing this - conveying of source, or (2) access to copy the - Corresponding Source from a network server at no charge. - - c) Convey individual copies of the object code with a copy of the - written offer to provide the Corresponding Source. This - alternative is allowed only occasionally and noncommercially, and - only if you received the object code with such an offer, in accord - with subsection 6b. - - d) Convey the object code by offering access from a designated - place (gratis or for a charge), and offer equivalent access to the - Corresponding Source in the same way through the same place at no - further charge. You need not require recipients to copy the - Corresponding Source along with the object code. If the place to - copy the object code is a network server, the Corresponding Source - may be on a different server (operated by you or a third party) - that supports equivalent copying facilities, provided you maintain - clear directions next to the object code saying where to find the - Corresponding Source. Regardless of what server hosts the - Corresponding Source, you remain obligated to ensure that it is - available for as long as needed to satisfy these requirements. - - e) Convey the object code using peer-to-peer transmission, provided - you inform other peers where the object code and Corresponding - Source of the work are being offered to the general public at no - charge under subsection 6d. - - A separable portion of the object code, whose source code is excluded -from the Corresponding Source as a System Library, need not be -included in conveying the object code work. - - A "User Product" is either (1) a "consumer product", which means any -tangible personal property which is normally used for personal, family, -or household purposes, or (2) anything designed or sold for incorporation -into a dwelling. In determining whether a product is a consumer product, -doubtful cases shall be resolved in favor of coverage. For a particular -product received by a particular user, "normally used" refers to a -typical or common use of that class of product, regardless of the status -of the particular user or of the way in which the particular user -actually uses, or expects or is expected to use, the product. A product -is a consumer product regardless of whether the product has substantial -commercial, industrial or non-consumer uses, unless such uses represent -the only significant mode of use of the product. - - "Installation Information" for a User Product means any methods, -procedures, authorization keys, or other information required to install -and execute modified versions of a covered work in that User Product from -a modified version of its Corresponding Source. The information must -suffice to ensure that the continued functioning of the modified object -code is in no case prevented or interfered with solely because -modification has been made. - - If you convey an object code work under this section in, or with, or -specifically for use in, a User Product, and the conveying occurs as -part of a transaction in which the right of possession and use of the -User Product is transferred to the recipient in perpetuity or for a -fixed term (regardless of how the transaction is characterized), the -Corresponding Source conveyed under this section must be accompanied -by the Installation Information. But this requirement does not apply -if neither you nor any third party retains the ability to install -modified object code on the User Product (for example, the work has -been installed in ROM). - - The requirement to provide Installation Information does not include a -requirement to continue to provide support service, warranty, or updates -for a work that has been modified or installed by the recipient, or for -the User Product in which it has been modified or installed. Access to a -network may be denied when the modification itself materially and -adversely affects the operation of the network or violates the rules and -protocols for communication across the network. - - Corresponding Source conveyed, and Installation Information provided, -in accord with this section must be in a format that is publicly -documented (and with an implementation available to the public in -source code form), and must require no special password or key for -unpacking, reading or copying. - - 7. Additional Terms. - - "Additional permissions" are terms that supplement the terms of this -License by making exceptions from one or more of its conditions. -Additional permissions that are applicable to the entire Program shall -be treated as though they were included in this License, to the extent -that they are valid under applicable law. If additional permissions -apply only to part of the Program, that part may be used separately -under those permissions, but the entire Program remains governed by -this License without regard to the additional permissions. - - When you convey a copy of a covered work, you may at your option -remove any additional permissions from that copy, or from any part of -it. (Additional permissions may be written to require their own -removal in certain cases when you modify the work.) You may place -additional permissions on material, added by you to a covered work, -for which you have or can give appropriate copyright permission. - - Notwithstanding any other provision of this License, for material you -add to a covered work, you may (if authorized by the copyright holders of -that material) supplement the terms of this License with terms: - - a) Disclaiming warranty or limiting liability differently from the - terms of sections 15 and 16 of this License; or - - b) Requiring preservation of specified reasonable legal notices or - author attributions in that material or in the Appropriate Legal - Notices displayed by works containing it; or - - c) Prohibiting misrepresentation of the origin of that material, or - requiring that modified versions of such material be marked in - reasonable ways as different from the original version; or - - d) Limiting the use for publicity purposes of names of licensors or - authors of the material; or - - e) Declining to grant rights under trademark law for use of some - trade names, trademarks, or service marks; or - - f) Requiring indemnification of licensors and authors of that - material by anyone who conveys the material (or modified versions of - it) with contractual assumptions of liability to the recipient, for - any liability that these contractual assumptions directly impose on - those licensors and authors. - - All other non-permissive additional terms are considered "further -restrictions" within the meaning of section 10. If the Program as you -received it, or any part of it, contains a notice stating that it is -governed by this License along with a term that is a further -restriction, you may remove that term. If a license document contains -a further restriction but permits relicensing or conveying under this -License, you may add to a covered work material governed by the terms -of that license document, provided that the further restriction does -not survive such relicensing or conveying. - - If you add terms to a covered work in accord with this section, you -must place, in the relevant source files, a statement of the -additional terms that apply to those files, or a notice indicating -where to find the applicable terms. - - Additional terms, permissive or non-permissive, may be stated in the -form of a separately written license, or stated as exceptions; -the above requirements apply either way. - - 8. Termination. - - You may not propagate or modify a covered work except as expressly -provided under this License. Any attempt otherwise to propagate or -modify it is void, and will automatically terminate your rights under -this License (including any patent licenses granted under the third -paragraph of section 11). - - However, if you cease all violation of this License, then your -license from a particular copyright holder is reinstated (a) -provisionally, unless and until the copyright holder explicitly and -finally terminates your license, and (b) permanently, if the copyright -holder fails to notify you of the violation by some reasonable means -prior to 60 days after the cessation. - - Moreover, your license from a particular copyright holder is -reinstated permanently if the copyright holder notifies you of the -violation by some reasonable means, this is the first time you have -received notice of violation of this License (for any work) from that -copyright holder, and you cure the violation prior to 30 days after -your receipt of the notice. - - Termination of your rights under this section does not terminate the -licenses of parties who have received copies or rights from you under -this License. If your rights have been terminated and not permanently -reinstated, you do not qualify to receive new licenses for the same -material under section 10. - - 9. Acceptance Not Required for Having Copies. - - You are not required to accept this License in order to receive or -run a copy of the Program. Ancillary propagation of a covered work -occurring solely as a consequence of using peer-to-peer transmission -to receive a copy likewise does not require acceptance. However, -nothing other than this License grants you permission to propagate or -modify any covered work. These actions infringe copyright if you do -not accept this License. Therefore, by modifying or propagating a -covered work, you indicate your acceptance of this License to do so. - - 10. Automatic Licensing of Downstream Recipients. - - Each time you convey a covered work, the recipient automatically -receives a license from the original licensors, to run, modify and -propagate that work, subject to this License. You are not responsible -for enforcing compliance by third parties with this License. - - An "entity transaction" is a transaction transferring control of an -organization, or substantially all assets of one, or subdividing an -organization, or merging organizations. If propagation of a covered -work results from an entity transaction, each party to that -transaction who receives a copy of the work also receives whatever -licenses to the work the party's predecessor in interest had or could -give under the previous paragraph, plus a right to possession of the -Corresponding Source of the work from the predecessor in interest, if -the predecessor has it or can get it with reasonable efforts. - - You may not impose any further restrictions on the exercise of the -rights granted or affirmed under this License. For example, you may -not impose a license fee, royalty, or other charge for exercise of -rights granted under this License, and you may not initiate litigation -(including a cross-claim or counterclaim in a lawsuit) alleging that -any patent claim is infringed by making, using, selling, offering for -sale, or importing the Program or any portion of it. - - 11. Patents. - - A "contributor" is a copyright holder who authorizes use under this -License of the Program or a work on which the Program is based. The -work thus licensed is called the contributor's "contributor version". - - A contributor's "essential patent claims" are all patent claims -owned or controlled by the contributor, whether already acquired or -hereafter acquired, that would be infringed by some manner, permitted -by this License, of making, using, or selling its contributor version, -but do not include claims that would be infringed only as a -consequence of further modification of the contributor version. For -purposes of this definition, "control" includes the right to grant -patent sublicenses in a manner consistent with the requirements of -this License. - - Each contributor grants you a non-exclusive, worldwide, royalty-free -patent license under the contributor's essential patent claims, to -make, use, sell, offer for sale, import and otherwise run, modify and -propagate the contents of its contributor version. - - In the following three paragraphs, a "patent license" is any express -agreement or commitment, however denominated, not to enforce a patent -(such as an express permission to practice a patent or covenant not to -sue for patent infringement). To "grant" such a patent license to a -party means to make such an agreement or commitment not to enforce a -patent against the party. - - If you convey a covered work, knowingly relying on a patent license, -and the Corresponding Source of the work is not available for anyone -to copy, free of charge and under the terms of this License, through a -publicly available network server or other readily accessible means, -then you must either (1) cause the Corresponding Source to be so -available, or (2) arrange to deprive yourself of the benefit of the -patent license for this particular work, or (3) arrange, in a manner -consistent with the requirements of this License, to extend the patent -license to downstream recipients. "Knowingly relying" means you have -actual knowledge that, but for the patent license, your conveying the -covered work in a country, or your recipient's use of the covered work -in a country, would infringe one or more identifiable patents in that -country that you have reason to believe are valid. - - If, pursuant to or in connection with a single transaction or -arrangement, you convey, or propagate by procuring conveyance of, a -covered work, and grant a patent license to some of the parties -receiving the covered work authorizing them to use, propagate, modify -or convey a specific copy of the covered work, then the patent license -you grant is automatically extended to all recipients of the covered -work and works based on it. - - A patent license is "discriminatory" if it does not include within -the scope of its coverage, prohibits the exercise of, or is -conditioned on the non-exercise of one or more of the rights that are -specifically granted under this License. You may not convey a covered -work if you are a party to an arrangement with a third party that is -in the business of distributing software, under which you make payment -to the third party based on the extent of your activity of conveying -the work, and under which the third party grants, to any of the -parties who would receive the covered work from you, a discriminatory -patent license (a) in connection with copies of the covered work -conveyed by you (or copies made from those copies), or (b) primarily -for and in connection with specific products or compilations that -contain the covered work, unless you entered into that arrangement, -or that patent license was granted, prior to 28 March 2007. - - Nothing in this License shall be construed as excluding or limiting -any implied license or other defenses to infringement that may -otherwise be available to you under applicable patent law. - - 12. No Surrender of Others' Freedom. - - If conditions are imposed on you (whether by court order, agreement or -otherwise) that contradict the conditions of this License, they do not -excuse you from the conditions of this License. If you cannot convey a -covered work so as to satisfy simultaneously your obligations under this -License and any other pertinent obligations, then as a consequence you may -not convey it at all. For example, if you agree to terms that obligate you -to collect a royalty for further conveying from those to whom you convey -the Program, the only way you could satisfy both those terms and this -License would be to refrain entirely from conveying the Program. - - 13. Use with the GNU Affero General Public License. - - Notwithstanding any other provision of this License, you have -permission to link or combine any covered work with a work licensed -under version 3 of the GNU Affero General Public License into a single -combined work, and to convey the resulting work. The terms of this -License will continue to apply to the part which is the covered work, -but the special requirements of the GNU Affero General Public License, -section 13, concerning interaction through a network will apply to the -combination as such. - - 14. Revised Versions of this License. - - The Free Software Foundation may publish revised and/or new versions of -the GNU General Public License from time to time. Such new versions will -be similar in spirit to the present version, but may differ in detail to -address new problems or concerns. - - Each version is given a distinguishing version number. If the -Program specifies that a certain numbered version of the GNU General -Public License "or any later version" applies to it, you have the -option of following the terms and conditions either of that numbered -version or of any later version published by the Free Software -Foundation. If the Program does not specify a version number of the -GNU General Public License, you may choose any version ever published -by the Free Software Foundation. - - If the Program specifies that a proxy can decide which future -versions of the GNU General Public License can be used, that proxy's -public statement of acceptance of a version permanently authorizes you -to choose that version for the Program. - - Later license versions may give you additional or different -permissions. However, no additional obligations are imposed on any -author or copyright holder as a result of your choosing to follow a -later version. - - 15. Disclaimer of Warranty. - - THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY -APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT -HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY -OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, -THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR -PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM -IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF -ALL NECESSARY SERVICING, REPAIR OR CORRECTION. - - 16. Limitation of Liability. - - IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING -WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS -THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY -GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE -USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF -DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD -PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), -EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF -SUCH DAMAGES. - - 17. Interpretation of Sections 15 and 16. - - If the disclaimer of warranty and limitation of liability provided -above cannot be given local legal effect according to their terms, -reviewing courts shall apply local law that most closely approximates -an absolute waiver of all civil liability in connection with the -Program, unless a warranty or assumption of liability accompanies a -copy of the Program in return for a fee. - - END OF TERMS AND CONDITIONS - - How to Apply These Terms to Your New Programs - - If you develop a new program, and you want it to be of the greatest -possible use to the public, the best way to achieve this is to make it -free software which everyone can redistribute and change under these terms. - - To do so, attach the following notices to the program. It is safest -to attach them to the start of each source file to most effectively -state the exclusion of warranty; and each file should have at least -the "copyright" line and a pointer to where the full notice is found. - - - Copyright (C) - - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . - -Also add information on how to contact you by electronic and paper mail. - - If the program does terminal interaction, make it output a short -notice like this when it starts in an interactive mode: - - Copyright (C) - This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. - This is free software, and you are welcome to redistribute it - under certain conditions; type `show c' for details. - -The hypothetical commands `show w' and `show c' should show the appropriate -parts of the General Public License. Of course, your program's commands -might be different; for a GUI interface, you would use an "about box". - - You should also get your employer (if you work as a programmer) or school, -if any, to sign a "copyright disclaimer" for the program, if necessary. -For more information on this, and how to apply and follow the GNU GPL, see -. - - The GNU General Public License does not permit incorporating your program -into proprietary programs. If your program is a subroutine library, you -may consider it more useful to permit linking proprietary applications with -the library. If this is what you want to do, use the GNU Lesser General -Public License instead of this License. But first, please read -. \ No newline at end of file diff --git a/vendor/github.com/golangci/golangci-lint/cmd/golangci-lint/main.go b/vendor/github.com/golangci/golangci-lint/cmd/golangci-lint/main.go deleted file mode 100644 index 9d1daa81d..000000000 --- a/vendor/github.com/golangci/golangci-lint/cmd/golangci-lint/main.go +++ /dev/null @@ -1,45 +0,0 @@ -package main - -import ( - "fmt" - "os" - "runtime/debug" - - "github.com/golangci/golangci-lint/pkg/commands" - "github.com/golangci/golangci-lint/pkg/exitcodes" -) - -var ( - goVersion = "unknown" - - // Populated by goreleaser during build - version = "master" - commit = "?" - date = "" -) - -func main() { - if buildInfo, available := debug.ReadBuildInfo(); available { - goVersion = buildInfo.GoVersion - - if date == "" { - version = buildInfo.Main.Version - commit = fmt.Sprintf("(unknown, mod sum: %q)", buildInfo.Main.Sum) - date = "(unknown)" - } - } - - info := commands.BuildInfo{ - GoVersion: goVersion, - Version: version, - Commit: commit, - Date: date, - } - - e := commands.NewExecutor(info) - - if err := e.Execute(); err != nil { - fmt.Fprintf(os.Stderr, "failed executing command with error %v\n", err) - os.Exit(exitcodes.Failure) - } -} diff --git a/vendor/github.com/golangci/golangci-lint/internal/cache/cache.go b/vendor/github.com/golangci/golangci-lint/internal/cache/cache.go deleted file mode 100644 index 299fd5279..000000000 --- a/vendor/github.com/golangci/golangci-lint/internal/cache/cache.go +++ /dev/null @@ -1,525 +0,0 @@ -// Copyright 2017 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package cache implements a build artifact cache. -// -// This package is a slightly modified fork of Go's -// cmd/go/internal/cache package. -package cache - -import ( - "bytes" - "crypto/sha256" - "encoding/hex" - "errors" - "fmt" - "io" - "os" - "path/filepath" - "strconv" - "strings" - "time" - - "github.com/golangci/golangci-lint/internal/renameio" - "github.com/golangci/golangci-lint/internal/robustio" -) - -// An ActionID is a cache action key, the hash of a complete description of a -// repeatable computation (command line, environment variables, -// input file contents, executable contents). -type ActionID [HashSize]byte - -// An OutputID is a cache output key, the hash of an output of a computation. -type OutputID [HashSize]byte - -// A Cache is a package cache, backed by a file system directory tree. -type Cache struct { - dir string - now func() time.Time -} - -// Open opens and returns the cache in the given directory. -// -// It is safe for multiple processes on a single machine to use the -// same cache directory in a local file system simultaneously. -// They will coordinate using operating system file locks and may -// duplicate effort but will not corrupt the cache. -// -// However, it is NOT safe for multiple processes on different machines -// to share a cache directory (for example, if the directory were stored -// in a network file system). File locking is notoriously unreliable in -// network file systems and may not suffice to protect the cache. -func Open(dir string) (*Cache, error) { - info, err := os.Stat(dir) - if err != nil { - return nil, err - } - if !info.IsDir() { - return nil, &os.PathError{Op: "open", Path: dir, Err: errors.New("not a directory")} - } - for i := 0; i < 256; i++ { - name := filepath.Join(dir, fmt.Sprintf("%02x", i)) - if err := os.MkdirAll(name, 0744); err != nil { - return nil, err - } - } - c := &Cache{ - dir: dir, - now: time.Now, - } - return c, nil -} - -// fileName returns the name of the file corresponding to the given id. -func (c *Cache) fileName(id [HashSize]byte, key string) string { - return filepath.Join(c.dir, fmt.Sprintf("%02x", id[0]), fmt.Sprintf("%x", id)+"-"+key) -} - -var errMissing = errors.New("cache entry not found") - -func IsErrMissing(err error) bool { - return errors.Is(err, errMissing) -} - -const ( - // action entry file is "v1 \n" - hexSize = HashSize * 2 - entrySize = 2 + 1 + hexSize + 1 + hexSize + 1 + 20 + 1 + 20 + 1 -) - -// verify controls whether to run the cache in verify mode. -// In verify mode, the cache always returns errMissing from Get -// but then double-checks in Put that the data being written -// exactly matches any existing entry. This provides an easy -// way to detect program behavior that would have been different -// had the cache entry been returned from Get. -// -// verify is enabled by setting the environment variable -// GODEBUG=gocacheverify=1. -var verify = false - -// DebugTest is set when GODEBUG=gocachetest=1 is in the environment. -var DebugTest = false - -func init() { initEnv() } - -func initEnv() { - verify = false - debugHash = false - debug := strings.Split(os.Getenv("GODEBUG"), ",") - for _, f := range debug { - if f == "gocacheverify=1" { - verify = true - } - if f == "gocachehash=1" { - debugHash = true - } - if f == "gocachetest=1" { - DebugTest = true - } - } -} - -// Get looks up the action ID in the cache, -// returning the corresponding output ID and file size, if any. -// Note that finding an output ID does not guarantee that the -// saved file for that output ID is still available. -func (c *Cache) Get(id ActionID) (Entry, error) { - if verify { - return Entry{}, errMissing - } - return c.get(id) -} - -type Entry struct { - OutputID OutputID - Size int64 - Time time.Time -} - -// get is Get but does not respect verify mode, so that Put can use it. -func (c *Cache) get(id ActionID) (Entry, error) { - missing := func() (Entry, error) { - return Entry{}, errMissing - } - failed := func(err error) (Entry, error) { - return Entry{}, err - } - fileName := c.fileName(id, "a") - f, err := os.Open(fileName) - if err != nil { - if os.IsNotExist(err) { - return missing() - } - return failed(err) - } - defer f.Close() - entry := make([]byte, entrySize+1) // +1 to detect whether f is too long - if n, readErr := io.ReadFull(f, entry); n != entrySize || readErr != io.ErrUnexpectedEOF { - return failed(fmt.Errorf("read %d/%d bytes from %s with error %w", n, entrySize, fileName, readErr)) - } - if entry[0] != 'v' || entry[1] != '1' || entry[2] != ' ' || entry[3+hexSize] != ' ' || entry[3+hexSize+1+hexSize] != ' ' || entry[3+hexSize+1+hexSize+1+20] != ' ' || entry[entrySize-1] != '\n' { - return failed(fmt.Errorf("bad data in %s", fileName)) - } - eid, entry := entry[3:3+hexSize], entry[3+hexSize:] - eout, entry := entry[1:1+hexSize], entry[1+hexSize:] - esize, entry := entry[1:1+20], entry[1+20:] - etime := entry[1 : 1+20] - var buf [HashSize]byte - if _, err = hex.Decode(buf[:], eid); err != nil || buf != id { - return failed(fmt.Errorf("failed to hex decode eid data in %s: %w", fileName, err)) - } - if _, err = hex.Decode(buf[:], eout); err != nil { - return failed(fmt.Errorf("failed to hex decode eout data in %s: %w", fileName, err)) - } - i := 0 - for i < len(esize) && esize[i] == ' ' { - i++ - } - size, err := strconv.ParseInt(string(esize[i:]), 10, 64) - if err != nil || size < 0 { - return failed(fmt.Errorf("failed to parse esize int from %s with error %w", fileName, err)) - } - i = 0 - for i < len(etime) && etime[i] == ' ' { - i++ - } - tm, err := strconv.ParseInt(string(etime[i:]), 10, 64) - if err != nil || tm < 0 { - return failed(fmt.Errorf("failed to parse etime int from %s with error %w", fileName, err)) - } - - if err = c.used(fileName); err != nil { - return failed(fmt.Errorf("failed to mark %s as used: %w", fileName, err)) - } - - return Entry{buf, size, time.Unix(0, tm)}, nil -} - -// GetBytes looks up the action ID in the cache and returns -// the corresponding output bytes. -// GetBytes should only be used for data that can be expected to fit in memory. -func (c *Cache) GetBytes(id ActionID) ([]byte, Entry, error) { - entry, err := c.Get(id) - if err != nil { - return nil, entry, err - } - outputFile, err := c.OutputFile(entry.OutputID) - if err != nil { - return nil, entry, err - } - - data, err := robustio.ReadFile(outputFile) - if err != nil { - return nil, entry, err - } - - if sha256.Sum256(data) != entry.OutputID { - return nil, entry, errMissing - } - return data, entry, nil -} - -// OutputFile returns the name of the cache file storing output with the given OutputID. -func (c *Cache) OutputFile(out OutputID) (string, error) { - file := c.fileName(out, "d") - if err := c.used(file); err != nil { - return "", err - } - return file, nil -} - -// Time constants for cache expiration. -// -// We set the mtime on a cache file on each use, but at most one per mtimeInterval (1 hour), -// to avoid causing many unnecessary inode updates. The mtimes therefore -// roughly reflect "time of last use" but may in fact be older by at most an hour. -// -// We scan the cache for entries to delete at most once per trimInterval (1 day). -// -// When we do scan the cache, we delete entries that have not been used for -// at least trimLimit (5 days). Statistics gathered from a month of usage by -// Go developers found that essentially all reuse of cached entries happened -// within 5 days of the previous reuse. See golang.org/issue/22990. -const ( - mtimeInterval = 1 * time.Hour - trimInterval = 24 * time.Hour - trimLimit = 5 * 24 * time.Hour -) - -// used makes a best-effort attempt to update mtime on file, -// so that mtime reflects cache access time. -// -// Because the reflection only needs to be approximate, -// and to reduce the amount of disk activity caused by using -// cache entries, used only updates the mtime if the current -// mtime is more than an hour old. This heuristic eliminates -// nearly all the mtime updates that would otherwise happen, -// while still keeping the mtimes useful for cache trimming. -func (c *Cache) used(file string) error { - info, err := os.Stat(file) - if err != nil { - if os.IsNotExist(err) { - return errMissing - } - return fmt.Errorf("failed to stat file %s: %w", file, err) - } - - if c.now().Sub(info.ModTime()) < mtimeInterval { - return nil - } - - if err := os.Chtimes(file, c.now(), c.now()); err != nil { - return fmt.Errorf("failed to change time of file %s: %w", file, err) - } - - return nil -} - -// Trim removes old cache entries that are likely not to be reused. -func (c *Cache) Trim() { - now := c.now() - - // We maintain in dir/trim.txt the time of the last completed cache trim. - // If the cache has been trimmed recently enough, do nothing. - // This is the common case. - data, _ := renameio.ReadFile(filepath.Join(c.dir, "trim.txt")) - t, err := strconv.ParseInt(strings.TrimSpace(string(data)), 10, 64) - if err == nil && now.Sub(time.Unix(t, 0)) < trimInterval { - return - } - - // Trim each of the 256 subdirectories. - // We subtract an additional mtimeInterval - // to account for the imprecision of our "last used" mtimes. - cutoff := now.Add(-trimLimit - mtimeInterval) - for i := 0; i < 256; i++ { - subdir := filepath.Join(c.dir, fmt.Sprintf("%02x", i)) - c.trimSubdir(subdir, cutoff) - } - - // Ignore errors from here: if we don't write the complete timestamp, the - // cache will appear older than it is, and we'll trim it again next time. - _ = renameio.WriteFile(filepath.Join(c.dir, "trim.txt"), []byte(fmt.Sprintf("%d", now.Unix())), 0666) -} - -// trimSubdir trims a single cache subdirectory. -func (c *Cache) trimSubdir(subdir string, cutoff time.Time) { - // Read all directory entries from subdir before removing - // any files, in case removing files invalidates the file offset - // in the directory scan. Also, ignore error from f.Readdirnames, - // because we don't care about reporting the error, and we still - // want to process any entries found before the error. - f, err := os.Open(subdir) - if err != nil { - return - } - names, _ := f.Readdirnames(-1) - f.Close() - - for _, name := range names { - // Remove only cache entries (xxxx-a and xxxx-d). - if !strings.HasSuffix(name, "-a") && !strings.HasSuffix(name, "-d") { - continue - } - entry := filepath.Join(subdir, name) - info, err := os.Stat(entry) - if err == nil && info.ModTime().Before(cutoff) { - os.Remove(entry) - } - } -} - -// putIndexEntry adds an entry to the cache recording that executing the action -// with the given id produces an output with the given output id (hash) and size. -func (c *Cache) putIndexEntry(id ActionID, out OutputID, size int64, allowVerify bool) error { - // Note: We expect that for one reason or another it may happen - // that repeating an action produces a different output hash - // (for example, if the output contains a time stamp or temp dir name). - // While not ideal, this is also not a correctness problem, so we - // don't make a big deal about it. In particular, we leave the action - // cache entries writable specifically so that they can be overwritten. - // - // Setting GODEBUG=gocacheverify=1 does make a big deal: - // in verify mode we are double-checking that the cache entries - // are entirely reproducible. As just noted, this may be unrealistic - // in some cases but the check is also useful for shaking out real bugs. - entry := fmt.Sprintf("v1 %x %x %20d %20d\n", id, out, size, time.Now().UnixNano()) - - if verify && allowVerify { - old, err := c.get(id) - if err == nil && (old.OutputID != out || old.Size != size) { - // panic to show stack trace, so we can see what code is generating this cache entry. - msg := fmt.Sprintf("go: internal cache error: cache verify failed: id=%x changed:<<<\n%s\n>>>\nold: %x %d\nnew: %x %d", id, reverseHash(id), out, size, old.OutputID, old.Size) - panic(msg) - } - } - file := c.fileName(id, "a") - - // Copy file to cache directory. - mode := os.O_WRONLY | os.O_CREATE - f, err := os.OpenFile(file, mode, 0666) - if err != nil { - return err - } - _, err = f.WriteString(entry) - if err == nil { - // Truncate the file only *after* writing it. - // (This should be a no-op, but truncate just in case of previous corruption.) - // - // This differs from os.WriteFile, which truncates to 0 *before* writing - // via os.O_TRUNC. Truncating only after writing ensures that a second write - // of the same content to the same file is idempotent, and does not — even - // temporarily! — undo the effect of the first write. - err = f.Truncate(int64(len(entry))) - } - if closeErr := f.Close(); err == nil { - err = closeErr - } - if err != nil { - // TODO(bcmills): This Remove potentially races with another go command writing to file. - // Can we eliminate it? - os.Remove(file) - return err - } - if err = os.Chtimes(file, c.now(), c.now()); err != nil { // mainly for tests - return fmt.Errorf("failed to change time of file %s: %w", file, err) - } - - return nil -} - -// Put stores the given output in the cache as the output for the action ID. -// It may read file twice. The content of file must not change between the two passes. -func (c *Cache) Put(id ActionID, file io.ReadSeeker) (OutputID, int64, error) { - return c.put(id, file, true) -} - -// PutNoVerify is like Put but disables the verify check -// when GODEBUG=goverifycache=1 is set. -// It is meant for data that is OK to cache but that we expect to vary slightly from run to run, -// like test output containing times and the like. -func (c *Cache) PutNoVerify(id ActionID, file io.ReadSeeker) (OutputID, int64, error) { - return c.put(id, file, false) -} - -func (c *Cache) put(id ActionID, file io.ReadSeeker, allowVerify bool) (OutputID, int64, error) { - // Compute output ID. - h := sha256.New() - if _, err := file.Seek(0, 0); err != nil { - return OutputID{}, 0, err - } - size, err := io.Copy(h, file) - if err != nil { - return OutputID{}, 0, err - } - var out OutputID - h.Sum(out[:0]) - - // Copy to cached output file (if not already present). - if err := c.copyFile(file, out, size); err != nil { - return out, size, err - } - - // Add to cache index. - return out, size, c.putIndexEntry(id, out, size, allowVerify) -} - -// PutBytes stores the given bytes in the cache as the output for the action ID. -func (c *Cache) PutBytes(id ActionID, data []byte) error { - _, _, err := c.Put(id, bytes.NewReader(data)) - return err -} - -// copyFile copies file into the cache, expecting it to have the given -// output ID and size, if that file is not present already. -func (c *Cache) copyFile(file io.ReadSeeker, out OutputID, size int64) error { - name := c.fileName(out, "d") - info, err := os.Stat(name) - if err == nil && info.Size() == size { - // Check hash. - if f, openErr := os.Open(name); openErr == nil { - h := sha256.New() - if _, copyErr := io.Copy(h, f); copyErr != nil { - return fmt.Errorf("failed to copy to sha256: %w", copyErr) - } - - f.Close() - var out2 OutputID - h.Sum(out2[:0]) - if out == out2 { - return nil - } - } - // Hash did not match. Fall through and rewrite file. - } - - // Copy file to cache directory. - mode := os.O_RDWR | os.O_CREATE - if err == nil && info.Size() > size { // shouldn't happen but fix in case - mode |= os.O_TRUNC - } - f, err := os.OpenFile(name, mode, 0666) - if err != nil { - return err - } - defer f.Close() - if size == 0 { - // File now exists with correct size. - // Only one possible zero-length file, so contents are OK too. - // Early return here makes sure there's a "last byte" for code below. - return nil - } - - // From here on, if any of the I/O writing the file fails, - // we make a best-effort attempt to truncate the file f - // before returning, to avoid leaving bad bytes in the file. - - // Copy file to f, but also into h to double-check hash. - if _, err = file.Seek(0, 0); err != nil { - _ = f.Truncate(0) - return err - } - h := sha256.New() - w := io.MultiWriter(f, h) - if _, err = io.CopyN(w, file, size-1); err != nil { - _ = f.Truncate(0) - return err - } - // Check last byte before writing it; writing it will make the size match - // what other processes expect to find and might cause them to start - // using the file. - buf := make([]byte, 1) - if _, err = file.Read(buf); err != nil { - _ = f.Truncate(0) - return err - } - if n, wErr := h.Write(buf); n != len(buf) { - return fmt.Errorf("wrote to hash %d/%d bytes with error %w", n, len(buf), wErr) - } - - sum := h.Sum(nil) - if !bytes.Equal(sum, out[:]) { - _ = f.Truncate(0) - return errors.New("file content changed underfoot") - } - - // Commit cache file entry. - if _, err = f.Write(buf); err != nil { - _ = f.Truncate(0) - return err - } - if err = f.Close(); err != nil { - // Data might not have been written, - // but file may look like it is the right size. - // To be extra careful, remove cached file. - os.Remove(name) - return err - } - if err = os.Chtimes(name, c.now(), c.now()); err != nil { // mainly for tests - return fmt.Errorf("failed to change time of file %s: %w", name, err) - } - - return nil -} diff --git a/vendor/github.com/golangci/golangci-lint/internal/cache/default.go b/vendor/github.com/golangci/golangci-lint/internal/cache/default.go deleted file mode 100644 index 399cc84cf..000000000 --- a/vendor/github.com/golangci/golangci-lint/internal/cache/default.go +++ /dev/null @@ -1,88 +0,0 @@ -// Copyright 2017 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package cache - -import ( - "fmt" - "log" - "os" - "path/filepath" - "sync" -) - -const envGolangciLintCache = "GOLANGCI_LINT_CACHE" - -// Default returns the default cache to use. -func Default() (*Cache, error) { - defaultOnce.Do(initDefaultCache) - return defaultCache, defaultDirErr -} - -var ( - defaultOnce sync.Once - defaultCache *Cache -) - -// cacheREADME is a message stored in a README in the cache directory. -// Because the cache lives outside the normal Go trees, we leave the -// README as a courtesy to explain where it came from. -const cacheREADME = `This directory holds cached build artifacts from golangci-lint. -` - -// initDefaultCache does the work of finding the default cache -// the first time Default is called. -func initDefaultCache() { - dir := DefaultDir() - if err := os.MkdirAll(dir, 0744); err != nil { - log.Fatalf("failed to initialize build cache at %s: %s\n", dir, err) - } - if _, err := os.Stat(filepath.Join(dir, "README")); err != nil { - // Best effort. - if wErr := os.WriteFile(filepath.Join(dir, "README"), []byte(cacheREADME), 0666); wErr != nil { - log.Fatalf("Failed to write README file to cache dir %s: %s", dir, err) - } - } - - c, err := Open(dir) - if err != nil { - log.Fatalf("failed to initialize build cache at %s: %s\n", dir, err) - } - defaultCache = c -} - -var ( - defaultDirOnce sync.Once - defaultDir string - defaultDirErr error -) - -// DefaultDir returns the effective GOLANGCI_LINT_CACHE setting. -func DefaultDir() string { - // Save the result of the first call to DefaultDir for later use in - // initDefaultCache. cmd/go/main.go explicitly sets GOCACHE so that - // subprocesses will inherit it, but that means initDefaultCache can't - // otherwise distinguish between an explicit "off" and a UserCacheDir error. - - defaultDirOnce.Do(func() { - defaultDir = os.Getenv(envGolangciLintCache) - if filepath.IsAbs(defaultDir) { - return - } - if defaultDir != "" { - defaultDirErr = fmt.Errorf("%s is not an absolute path", envGolangciLintCache) - return - } - - // Compute default location. - dir, err := os.UserCacheDir() - if err != nil { - defaultDirErr = fmt.Errorf("%s is not defined and %w", envGolangciLintCache, err) - return - } - defaultDir = filepath.Join(dir, "golangci-lint") - }) - - return defaultDir -} diff --git a/vendor/github.com/golangci/golangci-lint/internal/cache/hash.go b/vendor/github.com/golangci/golangci-lint/internal/cache/hash.go deleted file mode 100644 index 4ce79e325..000000000 --- a/vendor/github.com/golangci/golangci-lint/internal/cache/hash.go +++ /dev/null @@ -1,186 +0,0 @@ -// Copyright 2017 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package cache - -import ( - "bytes" - "crypto/sha256" - "fmt" - "hash" - "io" - "os" - "sync" -) - -var debugHash = false // set when GODEBUG=gocachehash=1 - -// HashSize is the number of bytes in a hash. -const HashSize = 32 - -// A Hash provides access to the canonical hash function used to index the cache. -// The current implementation uses salted SHA256, but clients must not assume this. -type Hash struct { - h hash.Hash - name string // for debugging - buf *bytes.Buffer // for verify -} - -// hashSalt is a salt string added to the beginning of every hash -// created by NewHash. Using the golangci-lint version makes sure that different -// versions of the command do not address the same cache -// entries, so that a bug in one version does not affect the execution -// of other versions. This salt will result in additional ActionID files -// in the cache, but not additional copies of the large output files, -// which are still addressed by unsalted SHA256. -var hashSalt []byte - -func SetSalt(b []byte) { - hashSalt = b -} - -// Subkey returns an action ID corresponding to mixing a parent -// action ID with a string description of the subkey. -func Subkey(parent ActionID, desc string) (ActionID, error) { - h := sha256.New() - const subkeyPrefix = "subkey:" - if n, err := h.Write([]byte(subkeyPrefix)); n != len(subkeyPrefix) { - return ActionID{}, fmt.Errorf("wrote %d/%d bytes of subkey prefix with error %s", n, len(subkeyPrefix), err) - } - if n, err := h.Write(parent[:]); n != len(parent) { - return ActionID{}, fmt.Errorf("wrote %d/%d bytes of parent with error %s", n, len(parent), err) - } - if n, err := h.Write([]byte(desc)); n != len(desc) { - return ActionID{}, fmt.Errorf("wrote %d/%d bytes of desc with error %s", n, len(desc), err) - } - - var out ActionID - h.Sum(out[:0]) - if debugHash { - fmt.Fprintf(os.Stderr, "HASH subkey %x %q = %x\n", parent, desc, out) - } - if verify { - hashDebug.Lock() - hashDebug.m[out] = fmt.Sprintf("subkey %x %q", parent, desc) - hashDebug.Unlock() - } - return out, nil -} - -// NewHash returns a new Hash. -// The caller is expected to Write data to it and then call Sum. -func NewHash(name string) (*Hash, error) { - h := &Hash{h: sha256.New(), name: name} - if debugHash { - fmt.Fprintf(os.Stderr, "HASH[%s]\n", h.name) - } - if n, err := h.Write(hashSalt); n != len(hashSalt) { - return nil, fmt.Errorf("wrote %d/%d bytes of hash salt with error %s", n, len(hashSalt), err) - } - if verify { - h.buf = new(bytes.Buffer) - } - return h, nil -} - -// Write writes data to the running hash. -func (h *Hash) Write(b []byte) (int, error) { - if debugHash { - fmt.Fprintf(os.Stderr, "HASH[%s]: %q\n", h.name, b) - } - if h.buf != nil { - h.buf.Write(b) - } - return h.h.Write(b) -} - -// Sum returns the hash of the data written previously. -func (h *Hash) Sum() [HashSize]byte { - var out [HashSize]byte - h.h.Sum(out[:0]) - if debugHash { - fmt.Fprintf(os.Stderr, "HASH[%s]: %x\n", h.name, out) - } - if h.buf != nil { - hashDebug.Lock() - if hashDebug.m == nil { - hashDebug.m = make(map[[HashSize]byte]string) - } - hashDebug.m[out] = h.buf.String() - hashDebug.Unlock() - } - return out -} - -// In GODEBUG=gocacheverify=1 mode, -// hashDebug holds the input to every computed hash ID, -// so that we can work backward from the ID involved in a -// cache entry mismatch to a description of what should be there. -var hashDebug struct { - sync.Mutex - m map[[HashSize]byte]string -} - -// reverseHash returns the input used to compute the hash id. -func reverseHash(id [HashSize]byte) string { - hashDebug.Lock() - s := hashDebug.m[id] - hashDebug.Unlock() - return s -} - -var hashFileCache struct { - sync.Mutex - m map[string][HashSize]byte -} - -// FileHash returns the hash of the named file. -// It caches repeated lookups for a given file, -// and the cache entry for a file can be initialized -// using SetFileHash. -// The hash used by FileHash is not the same as -// the hash used by NewHash. -func FileHash(file string) ([HashSize]byte, error) { - hashFileCache.Lock() - out, ok := hashFileCache.m[file] - hashFileCache.Unlock() - - if ok { - return out, nil - } - - h := sha256.New() - f, err := os.Open(file) - if err != nil { - if debugHash { - fmt.Fprintf(os.Stderr, "HASH %s: %v\n", file, err) - } - return [HashSize]byte{}, err - } - _, err = io.Copy(h, f) - f.Close() - if err != nil { - if debugHash { - fmt.Fprintf(os.Stderr, "HASH %s: %v\n", file, err) - } - return [HashSize]byte{}, err - } - h.Sum(out[:0]) - if debugHash { - fmt.Fprintf(os.Stderr, "HASH %s: %x\n", file, out) - } - - SetFileHash(file, out) - return out, nil -} - -// SetFileHash sets the hash returned by FileHash for file. -func SetFileHash(file string, sum [HashSize]byte) { - hashFileCache.Lock() - if hashFileCache.m == nil { - hashFileCache.m = make(map[string][HashSize]byte) - } - hashFileCache.m[file] = sum - hashFileCache.Unlock() -} diff --git a/vendor/github.com/golangci/golangci-lint/internal/cache/readme.md b/vendor/github.com/golangci/golangci-lint/internal/cache/readme.md deleted file mode 100644 index b469711ed..000000000 --- a/vendor/github.com/golangci/golangci-lint/internal/cache/readme.md +++ /dev/null @@ -1,18 +0,0 @@ -# cache - -Extracted from go/src/cmd/go/internal/cache/ -I don't know what version of Go this package was pulled from. - -Adapted for golangci-lint: -- https://github.com/golangci/golangci-lint/pull/699 -- https://github.com/golangci/golangci-lint/pull/779 -- https://github.com/golangci/golangci-lint/pull/788 -- https://github.com/golangci/golangci-lint/pull/808 -- https://github.com/golangci/golangci-lint/pull/1063 -- https://github.com/golangci/golangci-lint/pull/1070 -- https://github.com/golangci/golangci-lint/pull/1162 -- https://github.com/golangci/golangci-lint/pull/2318 -- https://github.com/golangci/golangci-lint/pull/2352 -- https://github.com/golangci/golangci-lint/pull/3012 -- https://github.com/golangci/golangci-lint/pull/3096 -- https://github.com/golangci/golangci-lint/pull/3204 diff --git a/vendor/github.com/golangci/golangci-lint/internal/errorutil/errors.go b/vendor/github.com/golangci/golangci-lint/internal/errorutil/errors.go deleted file mode 100644 index c8a3a0357..000000000 --- a/vendor/github.com/golangci/golangci-lint/internal/errorutil/errors.go +++ /dev/null @@ -1,23 +0,0 @@ -package errorutil - -import ( - "fmt" -) - -// PanicError can be used to not print stacktrace twice -type PanicError struct { - recovered any - stack []byte -} - -func NewPanicError(recovered any, stack []byte) *PanicError { - return &PanicError{recovered: recovered, stack: stack} -} - -func (e PanicError) Error() string { - return fmt.Sprint(e.recovered) -} - -func (e PanicError) Stack() []byte { - return e.stack -} diff --git a/vendor/github.com/golangci/golangci-lint/internal/pkgcache/pkgcache.go b/vendor/github.com/golangci/golangci-lint/internal/pkgcache/pkgcache.go deleted file mode 100644 index 3b3422eb7..000000000 --- a/vendor/github.com/golangci/golangci-lint/internal/pkgcache/pkgcache.go +++ /dev/null @@ -1,229 +0,0 @@ -package pkgcache - -import ( - "bytes" - "encoding/gob" - "encoding/hex" - "errors" - "fmt" - "runtime" - "sort" - "sync" - - "golang.org/x/tools/go/packages" - - "github.com/golangci/golangci-lint/internal/cache" - "github.com/golangci/golangci-lint/pkg/logutils" - "github.com/golangci/golangci-lint/pkg/timeutils" -) - -type HashMode int - -const ( - HashModeNeedOnlySelf HashMode = iota - HashModeNeedDirectDeps - HashModeNeedAllDeps -) - -// Cache is a per-package data cache. A cached data is invalidated when -// package, or it's dependencies change. -type Cache struct { - lowLevelCache *cache.Cache - pkgHashes sync.Map - sw *timeutils.Stopwatch - log logutils.Log // not used now, but may be needed for future debugging purposes - ioSem chan struct{} // semaphore limiting parallel IO -} - -func NewCache(sw *timeutils.Stopwatch, log logutils.Log) (*Cache, error) { - c, err := cache.Default() - if err != nil { - return nil, err - } - return &Cache{ - lowLevelCache: c, - sw: sw, - log: log, - ioSem: make(chan struct{}, runtime.GOMAXPROCS(-1)), - }, nil -} - -func (c *Cache) Trim() { - c.sw.TrackStage("trim", func() { - c.lowLevelCache.Trim() - }) -} - -func (c *Cache) Put(pkg *packages.Package, mode HashMode, key string, data any) error { - var err error - buf := &bytes.Buffer{} - c.sw.TrackStage("gob", func() { - err = gob.NewEncoder(buf).Encode(data) - }) - if err != nil { - return fmt.Errorf("failed to gob encode: %w", err) - } - - var aID cache.ActionID - - c.sw.TrackStage("key build", func() { - aID, err = c.pkgActionID(pkg, mode) - if err == nil { - subkey, subkeyErr := cache.Subkey(aID, key) - if subkeyErr != nil { - err = fmt.Errorf("failed to build subkey: %w", subkeyErr) - } - aID = subkey - } - }) - if err != nil { - return fmt.Errorf("failed to calculate package %s action id: %w", pkg.Name, err) - } - c.ioSem <- struct{}{} - c.sw.TrackStage("cache io", func() { - err = c.lowLevelCache.PutBytes(aID, buf.Bytes()) - }) - <-c.ioSem - if err != nil { - return fmt.Errorf("failed to save data to low-level cache by key %s for package %s: %w", key, pkg.Name, err) - } - - return nil -} - -var ErrMissing = errors.New("missing data") - -func (c *Cache) Get(pkg *packages.Package, mode HashMode, key string, data any) error { - var aID cache.ActionID - var err error - c.sw.TrackStage("key build", func() { - aID, err = c.pkgActionID(pkg, mode) - if err == nil { - subkey, subkeyErr := cache.Subkey(aID, key) - if subkeyErr != nil { - err = fmt.Errorf("failed to build subkey: %w", subkeyErr) - } - aID = subkey - } - }) - if err != nil { - return fmt.Errorf("failed to calculate package %s action id: %w", pkg.Name, err) - } - - var b []byte - c.ioSem <- struct{}{} - c.sw.TrackStage("cache io", func() { - b, _, err = c.lowLevelCache.GetBytes(aID) - }) - <-c.ioSem - if err != nil { - if cache.IsErrMissing(err) { - return ErrMissing - } - return fmt.Errorf("failed to get data from low-level cache by key %s for package %s: %w", key, pkg.Name, err) - } - - c.sw.TrackStage("gob", func() { - err = gob.NewDecoder(bytes.NewReader(b)).Decode(data) - }) - if err != nil { - return fmt.Errorf("failed to gob decode: %w", err) - } - - return nil -} - -func (c *Cache) pkgActionID(pkg *packages.Package, mode HashMode) (cache.ActionID, error) { - hash, err := c.packageHash(pkg, mode) - if err != nil { - return cache.ActionID{}, fmt.Errorf("failed to get package hash: %w", err) - } - - key, err := cache.NewHash("action ID") - if err != nil { - return cache.ActionID{}, fmt.Errorf("failed to make a hash: %w", err) - } - fmt.Fprintf(key, "pkgpath %s\n", pkg.PkgPath) - fmt.Fprintf(key, "pkghash %s\n", hash) - - return key.Sum(), nil -} - -// packageHash computes a package's hash. The hash is based on all Go -// files that make up the package, as well as the hashes of imported -// packages. -func (c *Cache) packageHash(pkg *packages.Package, mode HashMode) (string, error) { - type hashResults map[HashMode]string - hashResI, ok := c.pkgHashes.Load(pkg) - if ok { - hashRes := hashResI.(hashResults) - if _, ok := hashRes[mode]; !ok { - return "", fmt.Errorf("no mode %d in hash result", mode) - } - return hashRes[mode], nil - } - - hashRes := hashResults{} - - key, err := cache.NewHash("package hash") - if err != nil { - return "", fmt.Errorf("failed to make a hash: %w", err) - } - - fmt.Fprintf(key, "pkgpath %s\n", pkg.PkgPath) - for _, f := range pkg.CompiledGoFiles { - c.ioSem <- struct{}{} - h, fErr := cache.FileHash(f) - <-c.ioSem - if fErr != nil { - return "", fmt.Errorf("failed to calculate file %s hash: %w", f, fErr) - } - fmt.Fprintf(key, "file %s %x\n", f, h) - } - curSum := key.Sum() - hashRes[HashModeNeedOnlySelf] = hex.EncodeToString(curSum[:]) - - imps := make([]*packages.Package, 0, len(pkg.Imports)) - for _, imp := range pkg.Imports { - imps = append(imps, imp) - } - sort.Slice(imps, func(i, j int) bool { - return imps[i].PkgPath < imps[j].PkgPath - }) - - calcDepsHash := func(depMode HashMode) error { - for _, dep := range imps { - if dep.PkgPath == "unsafe" { - continue - } - - depHash, depErr := c.packageHash(dep, depMode) - if depErr != nil { - return fmt.Errorf("failed to calculate hash for dependency %s with mode %d: %w", dep.Name, depMode, depErr) - } - - fmt.Fprintf(key, "import %s %s\n", dep.PkgPath, depHash) - } - return nil - } - - if err := calcDepsHash(HashModeNeedOnlySelf); err != nil { - return "", err - } - - curSum = key.Sum() - hashRes[HashModeNeedDirectDeps] = hex.EncodeToString(curSum[:]) - - if err := calcDepsHash(HashModeNeedAllDeps); err != nil { - return "", err - } - curSum = key.Sum() - hashRes[HashModeNeedAllDeps] = hex.EncodeToString(curSum[:]) - - if _, ok := hashRes[mode]; !ok { - return "", fmt.Errorf("invalid mode %d", mode) - } - - c.pkgHashes.Store(pkg, hashRes) - return hashRes[mode], nil -} diff --git a/vendor/github.com/golangci/golangci-lint/internal/renameio/readme.md b/vendor/github.com/golangci/golangci-lint/internal/renameio/readme.md deleted file mode 100644 index 36ec6ed49..000000000 --- a/vendor/github.com/golangci/golangci-lint/internal/renameio/readme.md +++ /dev/null @@ -1,10 +0,0 @@ -# renameio - -Extracted from go/src/cmd/go/internal/renameio/ -I don't know what version of Go this package was pulled from. - -Adapted for golangci-lint: -- https://github.com/golangci/golangci-lint/pull/699 -- https://github.com/golangci/golangci-lint/pull/808 -- https://github.com/golangci/golangci-lint/pull/1063 -- https://github.com/golangci/golangci-lint/pull/3204 diff --git a/vendor/github.com/golangci/golangci-lint/internal/renameio/renameio.go b/vendor/github.com/golangci/golangci-lint/internal/renameio/renameio.go deleted file mode 100644 index 2f88f4f7c..000000000 --- a/vendor/github.com/golangci/golangci-lint/internal/renameio/renameio.go +++ /dev/null @@ -1,93 +0,0 @@ -// Copyright 2018 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package renameio writes files atomically by renaming temporary files. -package renameio - -import ( - "bytes" - "io" - "math/rand" - "os" - "path/filepath" - "strconv" - - "github.com/golangci/golangci-lint/internal/robustio" -) - -const patternSuffix = ".tmp" - -// Pattern returns a glob pattern that matches the unrenamed temporary files -// created when writing to filename. -func Pattern(filename string) string { - return filepath.Join(filepath.Dir(filename), filepath.Base(filename)+patternSuffix) -} - -// WriteFile is like os.WriteFile, but first writes data to an arbitrary -// file in the same directory as filename, then renames it atomically to the -// final name. -// -// That ensures that the final location, if it exists, is always a complete file. -func WriteFile(filename string, data []byte, perm os.FileMode) (err error) { - return WriteToFile(filename, bytes.NewReader(data), perm) -} - -// WriteToFile is a variant of WriteFile that accepts the data as an io.Reader -// instead of a slice. -func WriteToFile(filename string, data io.Reader, perm os.FileMode) (err error) { - f, err := tempFile(filepath.Dir(filename), filepath.Base(filename), perm) - if err != nil { - return err - } - defer func() { - // Only call os.Remove on f.Name() if we failed to rename it: otherwise, - // some other process may have created a new file with the same name after - // that. - if err != nil { - f.Close() - os.Remove(f.Name()) - } - }() - - if _, err := io.Copy(f, data); err != nil { - return err - } - // Sync the file before renaming it: otherwise, after a crash the reader may - // observe a 0-length file instead of the actual contents. - // See https://golang.org/issue/22397#issuecomment-380831736. - if err := f.Sync(); err != nil { - return err - } - if err := f.Close(); err != nil { - return err - } - - return robustio.Rename(f.Name(), filename) -} - -// tempFile creates a new temporary file with given permission bits. -func tempFile(dir, prefix string, perm os.FileMode) (f *os.File, err error) { - for i := 0; i < 10000; i++ { - name := filepath.Join(dir, prefix+strconv.Itoa(rand.Intn(1000000000))+patternSuffix) - f, err = os.OpenFile(name, os.O_RDWR|os.O_CREATE|os.O_EXCL, perm) - if os.IsExist(err) { - continue - } - break - } - return -} - -// ReadFile is like os.ReadFile, but on Windows retries spurious errors that -// may occur if the file is concurrently replaced. -// -// Errors are classified heuristically and retries are bounded, so even this -// function may occasionally return a spurious error on Windows. -// If so, the error will likely wrap one of: -// - syscall.ERROR_ACCESS_DENIED -// - syscall.ERROR_FILE_NOT_FOUND -// - internal/syscall/windows.ERROR_SHARING_VIOLATION -func ReadFile(filename string) ([]byte, error) { - return robustio.ReadFile(filename) -} diff --git a/vendor/github.com/golangci/golangci-lint/internal/robustio/readme.md b/vendor/github.com/golangci/golangci-lint/internal/robustio/readme.md deleted file mode 100644 index 7c7ba0483..000000000 --- a/vendor/github.com/golangci/golangci-lint/internal/robustio/readme.md +++ /dev/null @@ -1,6 +0,0 @@ -# robustio - -Extracted from go1.19.1/src/cmd/go/internal/robustio - -There is only one modification: -- ERROR_SHARING_VIOLATION extracted from go1.19.1/src/internal/syscall/windows/syscall_windows.go to remove the dependencies to `internal/syscall/windows` diff --git a/vendor/github.com/golangci/golangci-lint/internal/robustio/robustio.go b/vendor/github.com/golangci/golangci-lint/internal/robustio/robustio.go deleted file mode 100644 index 15b33773c..000000000 --- a/vendor/github.com/golangci/golangci-lint/internal/robustio/robustio.go +++ /dev/null @@ -1,53 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package robustio wraps I/O functions that are prone to failure on Windows, -// transparently retrying errors up to an arbitrary timeout. -// -// Errors are classified heuristically and retries are bounded, so the functions -// in this package do not completely eliminate spurious errors. However, they do -// significantly reduce the rate of failure in practice. -// -// If so, the error will likely wrap one of: -// The functions in this package do not completely eliminate spurious errors, -// but substantially reduce their rate of occurrence in practice. -package robustio - -// Rename is like os.Rename, but on Windows retries errors that may occur if the -// file is concurrently read or overwritten. -// -// (See golang.org/issue/31247 and golang.org/issue/32188.) -func Rename(oldpath, newpath string) error { - return rename(oldpath, newpath) -} - -// ReadFile is like os.ReadFile, but on Windows retries errors that may -// occur if the file is concurrently replaced. -// -// (See golang.org/issue/31247 and golang.org/issue/32188.) -func ReadFile(filename string) ([]byte, error) { - return readFile(filename) -} - -// RemoveAll is like os.RemoveAll, but on Windows retries errors that may occur -// if an executable file in the directory has recently been executed. -// -// (See golang.org/issue/19491.) -func RemoveAll(path string) error { - return removeAll(path) -} - -// IsEphemeralError reports whether err is one of the errors that the functions -// in this package attempt to mitigate. -// -// Errors considered ephemeral include: -// - syscall.ERROR_ACCESS_DENIED -// - syscall.ERROR_FILE_NOT_FOUND -// - internal/syscall/windows.ERROR_SHARING_VIOLATION -// -// This set may be expanded in the future; programs must not rely on the -// non-ephemerality of any given error. -func IsEphemeralError(err error) bool { - return isEphemeralError(err) -} diff --git a/vendor/github.com/golangci/golangci-lint/internal/robustio/robustio_darwin.go b/vendor/github.com/golangci/golangci-lint/internal/robustio/robustio_darwin.go deleted file mode 100644 index 99fd8ebc2..000000000 --- a/vendor/github.com/golangci/golangci-lint/internal/robustio/robustio_darwin.go +++ /dev/null @@ -1,21 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package robustio - -import ( - "errors" - "syscall" -) - -const errFileNotFound = syscall.ENOENT - -// isEphemeralError returns true if err may be resolved by waiting. -func isEphemeralError(err error) bool { - var errno syscall.Errno - if errors.As(err, &errno) { - return errno == errFileNotFound - } - return false -} diff --git a/vendor/github.com/golangci/golangci-lint/internal/robustio/robustio_flaky.go b/vendor/github.com/golangci/golangci-lint/internal/robustio/robustio_flaky.go deleted file mode 100644 index c56e36ca6..000000000 --- a/vendor/github.com/golangci/golangci-lint/internal/robustio/robustio_flaky.go +++ /dev/null @@ -1,91 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build windows || darwin - -package robustio - -import ( - "errors" - "math/rand" - "os" - "syscall" - "time" -) - -const arbitraryTimeout = 2000 * time.Millisecond - -// retry retries ephemeral errors from f up to an arbitrary timeout -// to work around filesystem flakiness on Windows and Darwin. -func retry(f func() (err error, mayRetry bool)) error { - var ( - bestErr error - lowestErrno syscall.Errno - start time.Time - nextSleep time.Duration = 1 * time.Millisecond - ) - for { - err, mayRetry := f() - if err == nil || !mayRetry { - return err - } - - var errno syscall.Errno - if errors.As(err, &errno) && (lowestErrno == 0 || errno < lowestErrno) { - bestErr = err - lowestErrno = errno - } else if bestErr == nil { - bestErr = err - } - - if start.IsZero() { - start = time.Now() - } else if d := time.Since(start) + nextSleep; d >= arbitraryTimeout { - break - } - time.Sleep(nextSleep) - nextSleep += time.Duration(rand.Int63n(int64(nextSleep))) - } - - return bestErr -} - -// rename is like os.Rename, but retries ephemeral errors. -// -// On Windows it wraps os.Rename, which (as of 2019-06-04) uses MoveFileEx with -// MOVEFILE_REPLACE_EXISTING. -// -// Windows also provides a different system call, ReplaceFile, -// that provides similar semantics, but perhaps preserves more metadata. (The -// documentation on the differences between the two is very sparse.) -// -// Empirical error rates with MoveFileEx are lower under modest concurrency, so -// for now we're sticking with what the os package already provides. -func rename(oldpath, newpath string) (err error) { - return retry(func() (err error, mayRetry bool) { - err = os.Rename(oldpath, newpath) - return err, isEphemeralError(err) - }) -} - -// readFile is like os.ReadFile, but retries ephemeral errors. -func readFile(filename string) ([]byte, error) { - var b []byte - err := retry(func() (err error, mayRetry bool) { - b, err = os.ReadFile(filename) - - // Unlike in rename, we do not retry errFileNotFound here: it can occur - // as a spurious error, but the file may also genuinely not exist, so the - // increase in robustness is probably not worth the extra latency. - return err, isEphemeralError(err) && !errors.Is(err, errFileNotFound) - }) - return b, err -} - -func removeAll(path string) error { - return retry(func() (err error, mayRetry bool) { - err = os.RemoveAll(path) - return err, isEphemeralError(err) - }) -} diff --git a/vendor/github.com/golangci/golangci-lint/internal/robustio/robustio_other.go b/vendor/github.com/golangci/golangci-lint/internal/robustio/robustio_other.go deleted file mode 100644 index da9a46e4f..000000000 --- a/vendor/github.com/golangci/golangci-lint/internal/robustio/robustio_other.go +++ /dev/null @@ -1,27 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build !windows && !darwin - -package robustio - -import ( - "os" -) - -func rename(oldpath, newpath string) error { - return os.Rename(oldpath, newpath) -} - -func readFile(filename string) ([]byte, error) { - return os.ReadFile(filename) -} - -func removeAll(path string) error { - return os.RemoveAll(path) -} - -func isEphemeralError(err error) bool { - return false -} diff --git a/vendor/github.com/golangci/golangci-lint/internal/robustio/robustio_windows.go b/vendor/github.com/golangci/golangci-lint/internal/robustio/robustio_windows.go deleted file mode 100644 index fe1728954..000000000 --- a/vendor/github.com/golangci/golangci-lint/internal/robustio/robustio_windows.go +++ /dev/null @@ -1,30 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package robustio - -import ( - "errors" - "syscall" -) - -const errFileNotFound = syscall.ERROR_FILE_NOT_FOUND - -// ERROR_SHARING_VIOLATION (ldez) extract from go1.19.1/src/internal/syscall/windows/syscall_windows.go. -// This is the only modification of this file. -const ERROR_SHARING_VIOLATION syscall.Errno = 32 - -// isEphemeralError returns true if err may be resolved by waiting. -func isEphemeralError(err error) bool { - var errno syscall.Errno - if errors.As(err, &errno) { - switch errno { - case syscall.ERROR_ACCESS_DENIED, - syscall.ERROR_FILE_NOT_FOUND, - ERROR_SHARING_VIOLATION: - return true - } - } - return false -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/commands/cache.go b/vendor/github.com/golangci/golangci-lint/pkg/commands/cache.go deleted file mode 100644 index 6fdaebaed..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/commands/cache.go +++ /dev/null @@ -1,72 +0,0 @@ -package commands - -import ( - "fmt" - "os" - "path/filepath" - - "github.com/spf13/cobra" - - "github.com/golangci/golangci-lint/internal/cache" - "github.com/golangci/golangci-lint/pkg/fsutils" - "github.com/golangci/golangci-lint/pkg/logutils" -) - -func (e *Executor) initCache() { - cacheCmd := &cobra.Command{ - Use: "cache", - Short: "Cache control and information", - Args: cobra.NoArgs, - RunE: func(cmd *cobra.Command, _ []string) error { - return cmd.Help() - }, - } - e.rootCmd.AddCommand(cacheCmd) - - cacheCmd.AddCommand(&cobra.Command{ - Use: "clean", - Short: "Clean cache", - Args: cobra.NoArgs, - ValidArgsFunction: cobra.NoFileCompletions, - RunE: e.executeCleanCache, - }) - cacheCmd.AddCommand(&cobra.Command{ - Use: "status", - Short: "Show cache status", - Args: cobra.NoArgs, - ValidArgsFunction: cobra.NoFileCompletions, - Run: e.executeCacheStatus, - }) - - // TODO: add trim command? -} - -func (e *Executor) executeCleanCache(_ *cobra.Command, _ []string) error { - cacheDir := cache.DefaultDir() - if err := os.RemoveAll(cacheDir); err != nil { - return fmt.Errorf("failed to remove dir %s: %w", cacheDir, err) - } - - return nil -} - -func (e *Executor) executeCacheStatus(_ *cobra.Command, _ []string) { - cacheDir := cache.DefaultDir() - fmt.Fprintf(logutils.StdOut, "Dir: %s\n", cacheDir) - - cacheSizeBytes, err := dirSizeBytes(cacheDir) - if err == nil { - fmt.Fprintf(logutils.StdOut, "Size: %s\n", fsutils.PrettifyBytesCount(cacheSizeBytes)) - } -} - -func dirSizeBytes(path string) (int64, error) { - var size int64 - err := filepath.Walk(path, func(_ string, info os.FileInfo, err error) error { - if err == nil && !info.IsDir() { - size += info.Size() - } - return err - }) - return size, err -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/commands/config.go b/vendor/github.com/golangci/golangci-lint/pkg/commands/config.go deleted file mode 100644 index 45c4fcd77..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/commands/config.go +++ /dev/null @@ -1,73 +0,0 @@ -package commands - -import ( - "fmt" - "os" - - "github.com/spf13/cobra" - "github.com/spf13/pflag" - "github.com/spf13/viper" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/exitcodes" - "github.com/golangci/golangci-lint/pkg/fsutils" -) - -func (e *Executor) initConfig() { - cmd := &cobra.Command{ - Use: "config", - Short: "Config file information", - Args: cobra.NoArgs, - RunE: func(cmd *cobra.Command, _ []string) error { - return cmd.Help() - }, - } - e.rootCmd.AddCommand(cmd) - - pathCmd := &cobra.Command{ - Use: "path", - Short: "Print used config path", - Args: cobra.NoArgs, - ValidArgsFunction: cobra.NoFileCompletions, - Run: e.executePathCmd, - } - - fs := pathCmd.Flags() - fs.SortFlags = false // sort them as they are defined here - - initConfigFileFlagSet(fs, &e.cfg.Run) - - cmd.AddCommand(pathCmd) -} - -// getUsedConfig returns the resolved path to the golangci config file, or the empty string -// if no configuration could be found. -func (e *Executor) getUsedConfig() string { - usedConfigFile := viper.ConfigFileUsed() - if usedConfigFile == "" { - return "" - } - - prettyUsedConfigFile, err := fsutils.ShortestRelPath(usedConfigFile, "") - if err != nil { - e.log.Warnf("Can't pretty print config file path: %s", err) - return usedConfigFile - } - - return prettyUsedConfigFile -} - -func (e *Executor) executePathCmd(_ *cobra.Command, _ []string) { - usedConfigFile := e.getUsedConfig() - if usedConfigFile == "" { - e.log.Warnf("No config file detected") - os.Exit(exitcodes.NoConfigFileDetected) - } - - fmt.Println(usedConfigFile) -} - -func initConfigFileFlagSet(fs *pflag.FlagSet, cfg *config.Run) { - fs.StringVarP(&cfg.Config, "config", "c", "", wh("Read config from file path `PATH`")) - fs.BoolVar(&cfg.NoConfig, "no-config", false, wh("Don't read config file")) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/commands/executor.go b/vendor/github.com/golangci/golangci-lint/pkg/commands/executor.go deleted file mode 100644 index d241f5656..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/commands/executor.go +++ /dev/null @@ -1,254 +0,0 @@ -package commands - -import ( - "bytes" - "context" - "crypto/sha256" - "errors" - "fmt" - "io" - "os" - "path/filepath" - "strings" - "time" - - "github.com/fatih/color" - "github.com/gofrs/flock" - "github.com/spf13/cobra" - "github.com/spf13/pflag" - "gopkg.in/yaml.v3" - - "github.com/golangci/golangci-lint/internal/cache" - "github.com/golangci/golangci-lint/internal/pkgcache" - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/fsutils" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis/load" - "github.com/golangci/golangci-lint/pkg/goutil" - "github.com/golangci/golangci-lint/pkg/lint" - "github.com/golangci/golangci-lint/pkg/lint/lintersdb" - "github.com/golangci/golangci-lint/pkg/logutils" - "github.com/golangci/golangci-lint/pkg/report" - "github.com/golangci/golangci-lint/pkg/timeutils" -) - -type BuildInfo struct { - GoVersion string `json:"goVersion"` - Version string `json:"version"` - Commit string `json:"commit"` - Date string `json:"date"` -} - -type Executor struct { - rootCmd *cobra.Command - runCmd *cobra.Command - lintersCmd *cobra.Command - - exitCode int - buildInfo BuildInfo - - cfg *config.Config // cfg is the unmarshaled data from the golangci config file. - log logutils.Log - reportData report.Data - DBManager *lintersdb.Manager - EnabledLintersSet *lintersdb.EnabledSet - contextLoader *lint.ContextLoader - goenv *goutil.Env - fileCache *fsutils.FileCache - lineCache *fsutils.LineCache - pkgCache *pkgcache.Cache - debugf logutils.DebugFunc - sw *timeutils.Stopwatch - - loadGuard *load.Guard - flock *flock.Flock -} - -// NewExecutor creates and initializes a new command executor. -func NewExecutor(buildInfo BuildInfo) *Executor { - startedAt := time.Now() - e := &Executor{ - cfg: config.NewDefault(), - buildInfo: buildInfo, - DBManager: lintersdb.NewManager(nil, nil), - debugf: logutils.Debug(logutils.DebugKeyExec), - } - - e.debugf("Starting execution...") - e.log = report.NewLogWrapper(logutils.NewStderrLog(logutils.DebugKeyEmpty), &e.reportData) - - // to setup log level early we need to parse config from command line extra time to - // find `-v` option - commandLineCfg, err := e.getConfigForCommandLine() - if err != nil && !errors.Is(err, pflag.ErrHelp) { - e.log.Fatalf("Can't get config for command line: %s", err) - } - if commandLineCfg != nil { - logutils.SetupVerboseLog(e.log, commandLineCfg.Run.IsVerbose) - - switch commandLineCfg.Output.Color { - case "always": - color.NoColor = false - case "never": - color.NoColor = true - case "auto": - // nothing - default: - e.log.Fatalf("invalid value %q for --color; must be 'always', 'auto', or 'never'", commandLineCfg.Output.Color) - } - } - - // init of commands must be done before config file reading because - // init sets config with the default values of flags - e.initRoot() - e.initRun() - e.initHelp() - e.initLinters() - e.initConfig() - e.initVersion() - e.initCache() - - // init e.cfg by values from config: flags parse will see these values - // like the default ones. It will overwrite them only if the same option - // is found in command-line: it's ok, command-line has higher priority. - - r := config.NewFileReader(e.cfg, commandLineCfg, e.log.Child(logutils.DebugKeyConfigReader)) - if err = r.Read(); err != nil { - e.log.Fatalf("Can't read config: %s", err) - } - - if (commandLineCfg == nil || commandLineCfg.Run.Go == "") && e.cfg != nil && e.cfg.Run.Go == "" { - e.cfg.Run.Go = config.DetectGoVersion() - } - - // recreate after getting config - e.DBManager = lintersdb.NewManager(e.cfg, e.log) - - // Slice options must be explicitly set for proper merging of config and command-line options. - fixSlicesFlags(e.runCmd.Flags()) - fixSlicesFlags(e.lintersCmd.Flags()) - - e.EnabledLintersSet = lintersdb.NewEnabledSet(e.DBManager, - lintersdb.NewValidator(e.DBManager), e.log.Child(logutils.DebugKeyLintersDB), e.cfg) - e.goenv = goutil.NewEnv(e.log.Child(logutils.DebugKeyGoEnv)) - e.fileCache = fsutils.NewFileCache() - e.lineCache = fsutils.NewLineCache(e.fileCache) - - e.sw = timeutils.NewStopwatch("pkgcache", e.log.Child(logutils.DebugKeyStopwatch)) - e.pkgCache, err = pkgcache.NewCache(e.sw, e.log.Child(logutils.DebugKeyPkgCache)) - if err != nil { - e.log.Fatalf("Failed to build packages cache: %s", err) - } - e.loadGuard = load.NewGuard() - e.contextLoader = lint.NewContextLoader(e.cfg, e.log.Child(logutils.DebugKeyLoader), e.goenv, - e.lineCache, e.fileCache, e.pkgCache, e.loadGuard) - if err = e.initHashSalt(buildInfo.Version); err != nil { - e.log.Fatalf("Failed to init hash salt: %s", err) - } - e.debugf("Initialized executor in %s", time.Since(startedAt)) - return e -} - -func (e *Executor) Execute() error { - return e.rootCmd.Execute() -} - -func (e *Executor) initHashSalt(version string) error { - binSalt, err := computeBinarySalt(version) - if err != nil { - return fmt.Errorf("failed to calculate binary salt: %w", err) - } - - configSalt, err := computeConfigSalt(e.cfg) - if err != nil { - return fmt.Errorf("failed to calculate config salt: %w", err) - } - - b := bytes.NewBuffer(binSalt) - b.Write(configSalt) - cache.SetSalt(b.Bytes()) - return nil -} - -func computeBinarySalt(version string) ([]byte, error) { - if version != "" && version != "(devel)" { - return []byte(version), nil - } - - if logutils.HaveDebugTag(logutils.DebugKeyBinSalt) { - return []byte("debug"), nil - } - - p, err := os.Executable() - if err != nil { - return nil, err - } - f, err := os.Open(p) - if err != nil { - return nil, err - } - defer f.Close() - h := sha256.New() - if _, err := io.Copy(h, f); err != nil { - return nil, err - } - return h.Sum(nil), nil -} - -func computeConfigSalt(cfg *config.Config) ([]byte, error) { - // We don't hash all config fields to reduce meaningless cache - // invalidations. At least, it has a huge impact on tests speed. - - lintersSettingsBytes, err := yaml.Marshal(cfg.LintersSettings) - if err != nil { - return nil, fmt.Errorf("failed to json marshal config linter settings: %w", err) - } - - configData := bytes.NewBufferString("linters-settings=") - configData.Write(lintersSettingsBytes) - configData.WriteString("\nbuild-tags=%s" + strings.Join(cfg.Run.BuildTags, ",")) - - h := sha256.New() - if _, err := h.Write(configData.Bytes()); err != nil { - return nil, err - } - return h.Sum(nil), nil -} - -func (e *Executor) acquireFileLock() bool { - if e.cfg.Run.AllowParallelRunners { - e.debugf("Parallel runners are allowed, no locking") - return true - } - - lockFile := filepath.Join(os.TempDir(), "golangci-lint.lock") - e.debugf("Locking on file %s...", lockFile) - f := flock.New(lockFile) - const retryDelay = time.Second - - ctx := context.Background() - if !e.cfg.Run.AllowSerialRunners { - const totalTimeout = 5 * time.Second - var cancel context.CancelFunc - ctx, cancel = context.WithTimeout(ctx, totalTimeout) - defer cancel() - } - if ok, _ := f.TryLockContext(ctx, retryDelay); !ok { - return false - } - - e.flock = f - return true -} - -func (e *Executor) releaseFileLock() { - if e.cfg.Run.AllowParallelRunners { - return - } - - if err := e.flock.Unlock(); err != nil { - e.debugf("Failed to unlock on file: %s", err) - } - if err := os.Remove(e.flock.Path()); err != nil { - e.debugf("Failed to remove lock file: %s", err) - } -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/commands/help.go b/vendor/github.com/golangci/golangci-lint/pkg/commands/help.go deleted file mode 100644 index a06d508f2..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/commands/help.go +++ /dev/null @@ -1,96 +0,0 @@ -package commands - -import ( - "fmt" - "sort" - "strings" - - "github.com/fatih/color" - "github.com/spf13/cobra" - - "github.com/golangci/golangci-lint/pkg/lint/linter" - "github.com/golangci/golangci-lint/pkg/logutils" -) - -func (e *Executor) initHelp() { - helpCmd := &cobra.Command{ - Use: "help", - Short: "Help", - Args: cobra.NoArgs, - RunE: func(cmd *cobra.Command, _ []string) error { - return cmd.Help() - }, - } - e.rootCmd.SetHelpCommand(helpCmd) - - lintersHelpCmd := &cobra.Command{ - Use: "linters", - Short: "Help about linters", - Args: cobra.NoArgs, - ValidArgsFunction: cobra.NoFileCompletions, - Run: e.executeLintersHelp, - } - helpCmd.AddCommand(lintersHelpCmd) -} - -func printLinterConfigs(lcs []*linter.Config) { - sort.Slice(lcs, func(i, j int) bool { - return lcs[i].Name() < lcs[j].Name() - }) - for _, lc := range lcs { - altNamesStr := "" - if len(lc.AlternativeNames) != 0 { - altNamesStr = fmt.Sprintf(" (%s)", strings.Join(lc.AlternativeNames, ", ")) - } - - // If the linter description spans multiple lines, truncate everything following the first newline - linterDescription := lc.Linter.Desc() - firstNewline := strings.IndexRune(linterDescription, '\n') - if firstNewline > 0 { - linterDescription = linterDescription[:firstNewline] - } - - deprecatedMark := "" - if lc.IsDeprecated() { - deprecatedMark = " [" + color.RedString("deprecated") + "]" - } - - fmt.Fprintf(logutils.StdOut, "%s%s%s: %s [fast: %t, auto-fix: %t]\n", color.YellowString(lc.Name()), - altNamesStr, deprecatedMark, linterDescription, !lc.IsSlowLinter(), lc.CanAutoFix) - } -} - -func (e *Executor) executeLintersHelp(_ *cobra.Command, _ []string) { - var enabledLCs, disabledLCs []*linter.Config - for _, lc := range e.DBManager.GetAllSupportedLinterConfigs() { - if lc.Internal { - continue - } - - if lc.EnabledByDefault { - enabledLCs = append(enabledLCs, lc) - } else { - disabledLCs = append(disabledLCs, lc) - } - } - - color.Green("Enabled by default linters:\n") - printLinterConfigs(enabledLCs) - color.Red("\nDisabled by default linters:\n") - printLinterConfigs(disabledLCs) - - color.Green("\nLinters presets:") - for _, p := range e.DBManager.AllPresets() { - linters := e.DBManager.GetAllLinterConfigsForPreset(p) - var linterNames []string - for _, lc := range linters { - if lc.Internal { - continue - } - - linterNames = append(linterNames, lc.Name()) - } - sort.Strings(linterNames) - fmt.Fprintf(logutils.StdOut, "%s: %s\n", color.YellowString(p), strings.Join(linterNames, ", ")) - } -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/commands/linters.go b/vendor/github.com/golangci/golangci-lint/pkg/commands/linters.go deleted file mode 100644 index 69df21154..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/commands/linters.go +++ /dev/null @@ -1,72 +0,0 @@ -package commands - -import ( - "fmt" - "strings" - - "github.com/fatih/color" - "github.com/spf13/cobra" - "github.com/spf13/pflag" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/lint/linter" -) - -func (e *Executor) initLinters() { - e.lintersCmd = &cobra.Command{ - Use: "linters", - Short: "List current linters configuration", - Args: cobra.NoArgs, - ValidArgsFunction: cobra.NoFileCompletions, - RunE: e.executeLinters, - } - - fs := e.lintersCmd.Flags() - fs.SortFlags = false // sort them as they are defined here - - initConfigFileFlagSet(fs, &e.cfg.Run) - e.initLintersFlagSet(fs, &e.cfg.Linters) - - e.rootCmd.AddCommand(e.lintersCmd) -} - -func (e *Executor) initLintersFlagSet(fs *pflag.FlagSet, cfg *config.Linters) { - fs.StringSliceVarP(&cfg.Disable, "disable", "D", nil, wh("Disable specific linter")) - fs.BoolVar(&cfg.DisableAll, "disable-all", false, wh("Disable all linters")) - fs.StringSliceVarP(&cfg.Enable, "enable", "E", nil, wh("Enable specific linter")) - fs.BoolVar(&cfg.EnableAll, "enable-all", false, wh("Enable all linters")) - fs.BoolVar(&cfg.Fast, "fast", false, wh("Enable only fast linters from enabled linters set (first run won't be fast)")) - fs.StringSliceVarP(&cfg.Presets, "presets", "p", nil, - wh(fmt.Sprintf("Enable presets (%s) of linters. Run 'golangci-lint help linters' to see "+ - "them. This option implies option --disable-all", strings.Join(e.DBManager.AllPresets(), "|")))) -} - -// executeLinters runs the 'linters' CLI command, which displays the supported linters. -func (e *Executor) executeLinters(_ *cobra.Command, _ []string) error { - enabledLintersMap, err := e.EnabledLintersSet.GetEnabledLintersMap() - if err != nil { - return fmt.Errorf("can't get enabled linters: %w", err) - } - - var enabledLinters []*linter.Config - var disabledLCs []*linter.Config - - for _, lc := range e.DBManager.GetAllSupportedLinterConfigs() { - if lc.Internal { - continue - } - - if enabledLintersMap[lc.Name()] == nil { - disabledLCs = append(disabledLCs, lc) - } else { - enabledLinters = append(enabledLinters, lc) - } - } - - color.Green("Enabled by your configuration linters:\n") - printLinterConfigs(enabledLinters) - color.Red("\nDisabled by your configuration linters:\n") - printLinterConfigs(disabledLCs) - - return nil -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/commands/root.go b/vendor/github.com/golangci/golangci-lint/pkg/commands/root.go deleted file mode 100644 index 4425be10f..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/commands/root.go +++ /dev/null @@ -1,175 +0,0 @@ -package commands - -import ( - "fmt" - "os" - "runtime" - "runtime/pprof" - "runtime/trace" - "strconv" - - "github.com/spf13/cobra" - "github.com/spf13/pflag" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/exitcodes" - "github.com/golangci/golangci-lint/pkg/logutils" -) - -const ( - // envHelpRun value: "1". - envHelpRun = "HELP_RUN" - envMemProfileRate = "GL_MEM_PROFILE_RATE" -) - -func (e *Executor) persistentPreRun(_ *cobra.Command, _ []string) error { - if e.cfg.Run.PrintVersion { - _ = printVersion(logutils.StdOut, e.buildInfo) - os.Exit(exitcodes.Success) // a return nil is not enough to stop the process because we are inside the `preRun`. - } - - runtime.GOMAXPROCS(e.cfg.Run.Concurrency) - - if e.cfg.Run.CPUProfilePath != "" { - f, err := os.Create(e.cfg.Run.CPUProfilePath) - if err != nil { - return fmt.Errorf("can't create file %s: %w", e.cfg.Run.CPUProfilePath, err) - } - if err := pprof.StartCPUProfile(f); err != nil { - return fmt.Errorf("can't start CPU profiling: %w", err) - } - } - - if e.cfg.Run.MemProfilePath != "" { - if rate := os.Getenv(envMemProfileRate); rate != "" { - runtime.MemProfileRate, _ = strconv.Atoi(rate) - } - } - - if e.cfg.Run.TracePath != "" { - f, err := os.Create(e.cfg.Run.TracePath) - if err != nil { - return fmt.Errorf("can't create file %s: %w", e.cfg.Run.TracePath, err) - } - if err = trace.Start(f); err != nil { - return fmt.Errorf("can't start tracing: %w", err) - } - } - - return nil -} - -func (e *Executor) persistentPostRun(_ *cobra.Command, _ []string) error { - if e.cfg.Run.CPUProfilePath != "" { - pprof.StopCPUProfile() - } - - if e.cfg.Run.MemProfilePath != "" { - f, err := os.Create(e.cfg.Run.MemProfilePath) - if err != nil { - return fmt.Errorf("can't create file %s: %w", e.cfg.Run.MemProfilePath, err) - } - - var ms runtime.MemStats - runtime.ReadMemStats(&ms) - printMemStats(&ms, e.log) - - if err := pprof.WriteHeapProfile(f); err != nil { - return fmt.Errorf("cCan't write heap profile: %w", err) - } - _ = f.Close() - } - - if e.cfg.Run.TracePath != "" { - trace.Stop() - } - - os.Exit(e.exitCode) - - return nil -} - -func printMemStats(ms *runtime.MemStats, logger logutils.Log) { - logger.Infof("Mem stats: alloc=%s total_alloc=%s sys=%s "+ - "heap_alloc=%s heap_sys=%s heap_idle=%s heap_released=%s heap_in_use=%s "+ - "stack_in_use=%s stack_sys=%s "+ - "mspan_sys=%s mcache_sys=%s buck_hash_sys=%s gc_sys=%s other_sys=%s "+ - "mallocs_n=%d frees_n=%d heap_objects_n=%d gc_cpu_fraction=%.2f", - formatMemory(ms.Alloc), formatMemory(ms.TotalAlloc), formatMemory(ms.Sys), - formatMemory(ms.HeapAlloc), formatMemory(ms.HeapSys), - formatMemory(ms.HeapIdle), formatMemory(ms.HeapReleased), formatMemory(ms.HeapInuse), - formatMemory(ms.StackInuse), formatMemory(ms.StackSys), - formatMemory(ms.MSpanSys), formatMemory(ms.MCacheSys), formatMemory(ms.BuckHashSys), - formatMemory(ms.GCSys), formatMemory(ms.OtherSys), - ms.Mallocs, ms.Frees, ms.HeapObjects, ms.GCCPUFraction) -} - -func formatMemory(memBytes uint64) string { - const Kb = 1024 - const Mb = Kb * 1024 - - if memBytes < Kb { - return fmt.Sprintf("%db", memBytes) - } - if memBytes < Mb { - return fmt.Sprintf("%dkb", memBytes/Kb) - } - return fmt.Sprintf("%dmb", memBytes/Mb) -} - -func getDefaultConcurrency() int { - if os.Getenv(envHelpRun) == "1" { - // Make stable concurrency for generating help documentation. - const prettyConcurrency = 8 - return prettyConcurrency - } - - return runtime.NumCPU() -} - -func (e *Executor) initRoot() { - rootCmd := &cobra.Command{ - Use: "golangci-lint", - Short: "golangci-lint is a smart linters runner.", - Long: `Smart, fast linters runner.`, - Args: cobra.NoArgs, - RunE: func(cmd *cobra.Command, _ []string) error { - return cmd.Help() - }, - PersistentPreRunE: e.persistentPreRun, - PersistentPostRunE: e.persistentPostRun, - } - - initRootFlagSet(rootCmd.PersistentFlags(), e.cfg, e.needVersionOption()) - e.rootCmd = rootCmd -} - -func (e *Executor) needVersionOption() bool { - return e.buildInfo.Date != "" -} - -func initRootFlagSet(fs *pflag.FlagSet, cfg *config.Config, needVersionOption bool) { - fs.BoolVarP(&cfg.Run.IsVerbose, "verbose", "v", false, wh("Verbose output")) - - var silent bool - fs.BoolVarP(&silent, "silent", "s", false, wh("Disables congrats outputs")) - if err := fs.MarkHidden("silent"); err != nil { - panic(err) - } - err := fs.MarkDeprecated("silent", - "now golangci-lint by default is silent: it doesn't print Congrats message") - if err != nil { - panic(err) - } - - fs.StringVar(&cfg.Run.CPUProfilePath, "cpu-profile-path", "", wh("Path to CPU profile output file")) - fs.StringVar(&cfg.Run.MemProfilePath, "mem-profile-path", "", wh("Path to memory profile output file")) - fs.StringVar(&cfg.Run.TracePath, "trace-path", "", wh("Path to trace output file")) - fs.IntVarP(&cfg.Run.Concurrency, "concurrency", "j", getDefaultConcurrency(), - wh("Number of CPUs to use (Default: number of logical CPUs)")) - if needVersionOption { - fs.BoolVar(&cfg.Run.PrintVersion, "version", false, wh("Print version")) - } - - fs.StringVar(&cfg.Output.Color, "color", "auto", wh("Use color when printing; can be 'always', 'auto', or 'never'")) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/commands/run.go b/vendor/github.com/golangci/golangci-lint/pkg/commands/run.go deleted file mode 100644 index 5c7083c30..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/commands/run.go +++ /dev/null @@ -1,634 +0,0 @@ -package commands - -import ( - "context" - "errors" - "fmt" - "io" - "log" - "os" - "runtime" - "sort" - "strings" - "time" - - "github.com/fatih/color" - "github.com/spf13/cobra" - "github.com/spf13/pflag" - "golang.org/x/exp/maps" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/exitcodes" - "github.com/golangci/golangci-lint/pkg/lint" - "github.com/golangci/golangci-lint/pkg/lint/lintersdb" - "github.com/golangci/golangci-lint/pkg/logutils" - "github.com/golangci/golangci-lint/pkg/packages" - "github.com/golangci/golangci-lint/pkg/printers" - "github.com/golangci/golangci-lint/pkg/result" -) - -const defaultFileMode = 0644 - -const defaultTimeout = time.Minute - -const ( - // envFailOnWarnings value: "1" - envFailOnWarnings = "FAIL_ON_WARNINGS" - // envMemLogEvery value: "1" - envMemLogEvery = "GL_MEM_LOG_EVERY" -) - -//nolint:funlen,gomnd -func (e *Executor) initFlagSet(fs *pflag.FlagSet, cfg *config.Config, isFinalInit bool) { - hideFlag := func(name string) { - if err := fs.MarkHidden(name); err != nil { - panic(err) - } - - // we run initFlagSet multiple times, but we wouldn't like to see deprecation message multiple times - if isFinalInit { - const deprecateMessage = "flag will be removed soon, please, use .golangci.yml config" - if err := fs.MarkDeprecated(name, deprecateMessage); err != nil { - panic(err) - } - } - } - - // Config file config - rc := &cfg.Run - initConfigFileFlagSet(fs, rc) - - // Output config - oc := &cfg.Output - fs.StringVar(&oc.Format, "out-format", - config.OutFormatColoredLineNumber, - wh(fmt.Sprintf("Format of output: %s", strings.Join(config.OutFormats, "|")))) - fs.BoolVar(&oc.PrintIssuedLine, "print-issued-lines", true, wh("Print lines of code with issue")) - fs.BoolVar(&oc.PrintLinterName, "print-linter-name", true, wh("Print linter name in issue line")) - fs.BoolVar(&oc.UniqByLine, "uniq-by-line", true, wh("Make issues output unique by line")) - fs.BoolVar(&oc.SortResults, "sort-results", false, wh("Sort linter results")) - fs.BoolVar(&oc.PrintWelcomeMessage, "print-welcome", false, wh("Print welcome message")) - fs.StringVar(&oc.PathPrefix, "path-prefix", "", wh("Path prefix to add to output")) - hideFlag("print-welcome") // no longer used - - fs.BoolVar(&cfg.InternalCmdTest, "internal-cmd-test", false, wh("Option is used only for testing golangci-lint command, don't use it")) - if err := fs.MarkHidden("internal-cmd-test"); err != nil { - panic(err) - } - - // Run config - fs.StringVar(&rc.ModulesDownloadMode, "modules-download-mode", "", - wh("Modules download mode. If not empty, passed as -mod= to go tools")) - fs.IntVar(&rc.ExitCodeIfIssuesFound, "issues-exit-code", - exitcodes.IssuesFound, wh("Exit code when issues were found")) - fs.StringVar(&rc.Go, "go", "", wh("Targeted Go version")) - fs.StringSliceVar(&rc.BuildTags, "build-tags", nil, wh("Build tags")) - - fs.DurationVar(&rc.Timeout, "deadline", defaultTimeout, wh("Deadline for total work")) - if err := fs.MarkHidden("deadline"); err != nil { - panic(err) - } - fs.DurationVar(&rc.Timeout, "timeout", defaultTimeout, wh("Timeout for total work")) - - fs.BoolVar(&rc.AnalyzeTests, "tests", true, wh("Analyze tests (*_test.go)")) - fs.BoolVar(&rc.PrintResourcesUsage, "print-resources-usage", false, - wh("Print avg and max memory usage of golangci-lint and total time")) - fs.StringSliceVar(&rc.SkipDirs, "skip-dirs", nil, wh("Regexps of directories to skip")) - fs.BoolVar(&rc.UseDefaultSkipDirs, "skip-dirs-use-default", true, getDefaultDirectoryExcludeHelp()) - fs.StringSliceVar(&rc.SkipFiles, "skip-files", nil, wh("Regexps of files to skip")) - - const allowParallelDesc = "Allow multiple parallel golangci-lint instances running. " + - "If false (default) - golangci-lint acquires file lock on start." - fs.BoolVar(&rc.AllowParallelRunners, "allow-parallel-runners", false, wh(allowParallelDesc)) - const allowSerialDesc = "Allow multiple golangci-lint instances running, but serialize them around a lock. " + - "If false (default) - golangci-lint exits with an error if it fails to acquire file lock on start." - fs.BoolVar(&rc.AllowSerialRunners, "allow-serial-runners", false, wh(allowSerialDesc)) - fs.BoolVar(&rc.ShowStats, "show-stats", false, wh("Show statistics per linter")) - - // Linters settings config - lsc := &cfg.LintersSettings - - // Hide all linters settings flags: they were initially visible, - // but when number of linters started to grow it became obvious that - // we can't fill 90% of flags by linters settings: common flags became hard to find. - // New linters settings should be done only through config file. - fs.BoolVar(&lsc.Errcheck.CheckTypeAssertions, "errcheck.check-type-assertions", - false, "Errcheck: check for ignored type assertion results") - hideFlag("errcheck.check-type-assertions") - fs.BoolVar(&lsc.Errcheck.CheckAssignToBlank, "errcheck.check-blank", false, - "Errcheck: check for errors assigned to blank identifier: _ = errFunc()") - hideFlag("errcheck.check-blank") - fs.StringVar(&lsc.Errcheck.Exclude, "errcheck.exclude", "", - "Path to a file containing a list of functions to exclude from checking") - hideFlag("errcheck.exclude") - fs.StringVar(&lsc.Errcheck.Ignore, "errcheck.ignore", "fmt:.*", - `Comma-separated list of pairs of the form pkg:regex. The regex is used to ignore names within pkg`) - hideFlag("errcheck.ignore") - - fs.BoolVar(&lsc.Govet.CheckShadowing, "govet.check-shadowing", false, - "Govet: check for shadowed variables") - hideFlag("govet.check-shadowing") - - fs.Float64Var(&lsc.Golint.MinConfidence, "golint.min-confidence", 0.8, - "Golint: minimum confidence of a problem to print it") - hideFlag("golint.min-confidence") - - fs.BoolVar(&lsc.Gofmt.Simplify, "gofmt.simplify", true, "Gofmt: simplify code") - hideFlag("gofmt.simplify") - - fs.IntVar(&lsc.Gocyclo.MinComplexity, "gocyclo.min-complexity", - 30, "Minimal complexity of function to report it") - hideFlag("gocyclo.min-complexity") - - fs.BoolVar(&lsc.Maligned.SuggestNewOrder, "maligned.suggest-new", false, - "Maligned: print suggested more optimal struct fields ordering") - hideFlag("maligned.suggest-new") - - fs.IntVar(&lsc.Dupl.Threshold, "dupl.threshold", - 150, "Dupl: Minimal threshold to detect copy-paste") - hideFlag("dupl.threshold") - - fs.BoolVar(&lsc.Goconst.MatchWithConstants, "goconst.match-constant", - true, "Goconst: look for existing constants matching the values") - hideFlag("goconst.match-constant") - fs.IntVar(&lsc.Goconst.MinStringLen, "goconst.min-len", - 3, "Goconst: minimum constant string length") - hideFlag("goconst.min-len") - fs.IntVar(&lsc.Goconst.MinOccurrencesCount, "goconst.min-occurrences", - 3, "Goconst: minimum occurrences of constant string count to trigger issue") - hideFlag("goconst.min-occurrences") - fs.BoolVar(&lsc.Goconst.ParseNumbers, "goconst.numbers", - false, "Goconst: search also for duplicated numbers") - hideFlag("goconst.numbers") - fs.IntVar(&lsc.Goconst.NumberMin, "goconst.min", - 3, "minimum value, only works with goconst.numbers") - hideFlag("goconst.min") - fs.IntVar(&lsc.Goconst.NumberMax, "goconst.max", - 3, "maximum value, only works with goconst.numbers") - hideFlag("goconst.max") - fs.BoolVar(&lsc.Goconst.IgnoreCalls, "goconst.ignore-calls", - true, "Goconst: ignore when constant is not used as function argument") - hideFlag("goconst.ignore-calls") - - fs.IntVar(&lsc.Lll.TabWidth, "lll.tab-width", 1, - "Lll: tab width in spaces") - hideFlag("lll.tab-width") - - // Linters config - lc := &cfg.Linters - e.initLintersFlagSet(fs, lc) - - // Issues config - ic := &cfg.Issues - fs.StringSliceVarP(&ic.ExcludePatterns, "exclude", "e", nil, wh("Exclude issue by regexp")) - fs.BoolVar(&ic.UseDefaultExcludes, "exclude-use-default", true, getDefaultIssueExcludeHelp()) - fs.BoolVar(&ic.ExcludeCaseSensitive, "exclude-case-sensitive", false, wh("If set to true exclude "+ - "and exclude rules regular expressions are case sensitive")) - - fs.IntVar(&ic.MaxIssuesPerLinter, "max-issues-per-linter", 50, - wh("Maximum issues count per one linter. Set to 0 to disable")) - fs.IntVar(&ic.MaxSameIssues, "max-same-issues", 3, - wh("Maximum count of issues with the same text. Set to 0 to disable")) - - fs.BoolVarP(&ic.Diff, "new", "n", false, - wh("Show only new issues: if there are unstaged changes or untracked files, only those changes "+ - "are analyzed, else only changes in HEAD~ are analyzed.\nIt's a super-useful option for integration "+ - "of golangci-lint into existing large codebase.\nIt's not practical to fix all existing issues at "+ - "the moment of integration: much better to not allow issues in new code.\nFor CI setups, prefer "+ - "--new-from-rev=HEAD~, as --new can skip linting the current patch if any scripts generate "+ - "unstaged files before golangci-lint runs.")) - fs.StringVar(&ic.DiffFromRevision, "new-from-rev", "", - wh("Show only new issues created after git revision `REV`")) - fs.StringVar(&ic.DiffPatchFilePath, "new-from-patch", "", - wh("Show only new issues created in git patch with file path `PATH`")) - fs.BoolVar(&ic.WholeFiles, "whole-files", false, - wh("Show issues in any part of update files (requires new-from-rev or new-from-patch)")) - fs.BoolVar(&ic.NeedFix, "fix", false, wh("Fix found issues (if it's supported by the linter)")) -} - -func (e *Executor) initRunConfiguration(cmd *cobra.Command) { - fs := cmd.Flags() - fs.SortFlags = false // sort them as they are defined here - e.initFlagSet(fs, e.cfg, true) -} - -func (e *Executor) getConfigForCommandLine() (*config.Config, error) { - // We use another pflag.FlagSet here to not set `changed` flag - // on cmd.Flags() options. Otherwise, string slice options will be duplicated. - fs := pflag.NewFlagSet("config flag set", pflag.ContinueOnError) - - var cfg config.Config - // Don't do `fs.AddFlagSet(cmd.Flags())` because it shares flags representations: - // `changed` variable inside string slice vars will be shared. - // Use another config variable here, not e.cfg, to not - // affect main parsing by this parsing of only config option. - e.initFlagSet(fs, &cfg, false) - initVersionFlagSet(fs, &cfg) - - // Parse max options, even force version option: don't want - // to get access to Executor here: it's error-prone to use - // cfg vs e.cfg. - initRootFlagSet(fs, &cfg, true) - - fs.Usage = func() {} // otherwise, help text will be printed twice - if err := fs.Parse(os.Args); err != nil { - if errors.Is(err, pflag.ErrHelp) { - return nil, err - } - - return nil, fmt.Errorf("can't parse args: %w", err) - } - - return &cfg, nil -} - -func (e *Executor) initRun() { - e.runCmd = &cobra.Command{ - Use: "run", - Short: "Run the linters", - Run: e.executeRun, - PreRunE: func(_ *cobra.Command, _ []string) error { - if ok := e.acquireFileLock(); !ok { - return errors.New("parallel golangci-lint is running") - } - return nil - }, - PostRun: func(_ *cobra.Command, _ []string) { - e.releaseFileLock() - }, - } - e.rootCmd.AddCommand(e.runCmd) - - e.runCmd.SetOut(logutils.StdOut) // use custom output to properly color it in Windows terminals - e.runCmd.SetErr(logutils.StdErr) - - e.initRunConfiguration(e.runCmd) -} - -func fixSlicesFlags(fs *pflag.FlagSet) { - // It's a dirty hack to set flag.Changed to true for every string slice flag. - // It's necessary to merge config and command-line slices: otherwise command-line - // flags will always overwrite ones from the config. - fs.VisitAll(func(f *pflag.Flag) { - if f.Value.Type() != "stringSlice" { - return - } - - s, err := fs.GetStringSlice(f.Name) - if err != nil { - return - } - - if s == nil { // assume that every string slice flag has nil as the default - return - } - - var safe []string - for _, v := range s { - // add quotes to escape comma because spf13/pflag use a CSV parser: - // https://github.com/spf13/pflag/blob/85dd5c8bc61cfa382fecd072378089d4e856579d/string_slice.go#L43 - safe = append(safe, `"`+v+`"`) - } - - // calling Set sets Changed to true: next Set calls will append, not overwrite - _ = f.Value.Set(strings.Join(safe, ",")) - }) -} - -// runAnalysis executes the linters that have been enabled in the configuration. -func (e *Executor) runAnalysis(ctx context.Context, args []string) ([]result.Issue, error) { - e.cfg.Run.Args = args - - lintersToRun, err := e.EnabledLintersSet.GetOptimizedLinters() - if err != nil { - return nil, err - } - - enabledLintersMap, err := e.EnabledLintersSet.GetEnabledLintersMap() - if err != nil { - return nil, err - } - - for _, lc := range e.DBManager.GetAllSupportedLinterConfigs() { - isEnabled := enabledLintersMap[lc.Name()] != nil - e.reportData.AddLinter(lc.Name(), isEnabled, lc.EnabledByDefault) - } - - lintCtx, err := e.contextLoader.Load(ctx, lintersToRun) - if err != nil { - return nil, fmt.Errorf("context loading failed: %w", err) - } - lintCtx.Log = e.log.Child(logutils.DebugKeyLintersContext) - - runner, err := lint.NewRunner(e.cfg, e.log.Child(logutils.DebugKeyRunner), - e.goenv, e.EnabledLintersSet, e.lineCache, e.fileCache, e.DBManager, lintCtx.Packages) - if err != nil { - return nil, err - } - - return runner.Run(ctx, lintersToRun, lintCtx) -} - -func (e *Executor) setOutputToDevNull() (savedStdout, savedStderr *os.File) { - savedStdout, savedStderr = os.Stdout, os.Stderr - devNull, err := os.Open(os.DevNull) - if err != nil { - e.log.Warnf("Can't open null device %q: %s", os.DevNull, err) - return - } - - os.Stdout, os.Stderr = devNull, devNull - return -} - -func (e *Executor) setExitCodeIfIssuesFound(issues []result.Issue) { - if len(issues) != 0 { - e.exitCode = e.cfg.Run.ExitCodeIfIssuesFound - } -} - -func (e *Executor) runAndPrint(ctx context.Context, args []string) error { - if err := e.goenv.Discover(ctx); err != nil { - e.log.Warnf("Failed to discover go env: %s", err) - } - - if !logutils.HaveDebugTag(logutils.DebugKeyLintersOutput) { - // Don't allow linters and loader to print anything - log.SetOutput(io.Discard) - savedStdout, savedStderr := e.setOutputToDevNull() - defer func() { - os.Stdout, os.Stderr = savedStdout, savedStderr - }() - } - - issues, err := e.runAnalysis(ctx, args) - if err != nil { - return err // XXX: don't loose type - } - - formats := strings.Split(e.cfg.Output.Format, ",") - for _, format := range formats { - out := strings.SplitN(format, ":", 2) - if len(out) < 2 { - out = append(out, "") - } - - err := e.printReports(issues, out[1], out[0]) - if err != nil { - return err - } - } - - e.printStats(issues) - - e.setExitCodeIfIssuesFound(issues) - - e.fileCache.PrintStats(e.log) - - return nil -} - -func (e *Executor) printReports(issues []result.Issue, path, format string) error { - w, shouldClose, err := e.createWriter(path) - if err != nil { - return fmt.Errorf("can't create output for %s: %w", path, err) - } - - p, err := e.createPrinter(format, w) - if err != nil { - if file, ok := w.(io.Closer); shouldClose && ok { - _ = file.Close() - } - return err - } - - if err = p.Print(issues); err != nil { - if file, ok := w.(io.Closer); shouldClose && ok { - _ = file.Close() - } - return fmt.Errorf("can't print %d issues: %w", len(issues), err) - } - - if file, ok := w.(io.Closer); shouldClose && ok { - _ = file.Close() - } - - return nil -} - -func (e *Executor) createWriter(path string) (io.Writer, bool, error) { - if path == "" || path == "stdout" { - return logutils.StdOut, false, nil - } - if path == "stderr" { - return logutils.StdErr, false, nil - } - f, err := os.OpenFile(path, os.O_CREATE|os.O_TRUNC|os.O_WRONLY, defaultFileMode) - if err != nil { - return nil, false, err - } - return f, true, nil -} - -func (e *Executor) createPrinter(format string, w io.Writer) (printers.Printer, error) { - var p printers.Printer - switch format { - case config.OutFormatJSON: - p = printers.NewJSON(&e.reportData, w) - case config.OutFormatColoredLineNumber, config.OutFormatLineNumber: - p = printers.NewText(e.cfg.Output.PrintIssuedLine, - format == config.OutFormatColoredLineNumber, e.cfg.Output.PrintLinterName, - e.log.Child(logutils.DebugKeyTextPrinter), w) - case config.OutFormatTab, config.OutFormatColoredTab: - p = printers.NewTab(e.cfg.Output.PrintLinterName, - format == config.OutFormatColoredTab, - e.log.Child(logutils.DebugKeyTabPrinter), w) - case config.OutFormatCheckstyle: - p = printers.NewCheckstyle(w) - case config.OutFormatCodeClimate: - p = printers.NewCodeClimate(w) - case config.OutFormatHTML: - p = printers.NewHTML(w) - case config.OutFormatJunitXML: - p = printers.NewJunitXML(w) - case config.OutFormatGithubActions: - p = printers.NewGithub(w) - case config.OutFormatTeamCity: - p = printers.NewTeamCity(w) - default: - return nil, fmt.Errorf("unknown output format %s", format) - } - - return p, nil -} - -func (e *Executor) printStats(issues []result.Issue) { - if !e.cfg.Run.ShowStats { - return - } - - if len(issues) == 0 { - e.runCmd.Println("0 issues.") - return - } - - stats := map[string]int{} - for idx := range issues { - stats[issues[idx].FromLinter]++ - } - - e.runCmd.Printf("%d issues:\n", len(issues)) - - keys := maps.Keys(stats) - sort.Strings(keys) - - for _, key := range keys { - e.runCmd.Printf("* %s: %d\n", key, stats[key]) - } -} - -// executeRun executes the 'run' CLI command, which runs the linters. -func (e *Executor) executeRun(_ *cobra.Command, args []string) { - needTrackResources := e.cfg.Run.IsVerbose || e.cfg.Run.PrintResourcesUsage - trackResourcesEndCh := make(chan struct{}) - defer func() { // XXX: this defer must be before ctx.cancel defer - if needTrackResources { // wait until resource tracking finished to print properly - <-trackResourcesEndCh - } - }() - - e.setTimeoutToDeadlineIfOnlyDeadlineIsSet() - ctx, cancel := context.WithTimeout(context.Background(), e.cfg.Run.Timeout) - defer cancel() - - if needTrackResources { - go watchResources(ctx, trackResourcesEndCh, e.log, e.debugf) - } - - if err := e.runAndPrint(ctx, args); err != nil { - e.log.Errorf("Running error: %s", err) - if e.exitCode == exitcodes.Success { - var exitErr *exitcodes.ExitError - if errors.As(err, &exitErr) { - e.exitCode = exitErr.Code - } else { - e.exitCode = exitcodes.Failure - } - } - } - - e.setupExitCode(ctx) -} - -// to be removed when deadline is finally decommissioned -func (e *Executor) setTimeoutToDeadlineIfOnlyDeadlineIsSet() { - deadlineValue := e.cfg.Run.Deadline - if deadlineValue != 0 && e.cfg.Run.Timeout == defaultTimeout { - e.cfg.Run.Timeout = deadlineValue - } -} - -func (e *Executor) setupExitCode(ctx context.Context) { - if ctx.Err() != nil { - e.exitCode = exitcodes.Timeout - e.log.Errorf("Timeout exceeded: try increasing it by passing --timeout option") - return - } - - if e.exitCode != exitcodes.Success { - return - } - - needFailOnWarnings := os.Getenv(lintersdb.EnvTestRun) == "1" || os.Getenv(envFailOnWarnings) == "1" - if needFailOnWarnings && len(e.reportData.Warnings) != 0 { - e.exitCode = exitcodes.WarningInTest - return - } - - if e.reportData.Error != "" { - // it's a case e.g. when typecheck linter couldn't parse and error and just logged it - e.exitCode = exitcodes.ErrorWasLogged - return - } -} - -func watchResources(ctx context.Context, done chan struct{}, logger logutils.Log, debugf logutils.DebugFunc) { - startedAt := time.Now() - debugf("Started tracking time") - - var maxRSSMB, totalRSSMB float64 - var iterationsCount int - - const intervalMS = 100 - ticker := time.NewTicker(intervalMS * time.Millisecond) - defer ticker.Stop() - - logEveryRecord := os.Getenv(envMemLogEvery) == "1" - const MB = 1024 * 1024 - - track := func() { - var m runtime.MemStats - runtime.ReadMemStats(&m) - - if logEveryRecord { - debugf("Stopping memory tracing iteration, printing ...") - printMemStats(&m, logger) - } - - rssMB := float64(m.Sys) / MB - if rssMB > maxRSSMB { - maxRSSMB = rssMB - } - totalRSSMB += rssMB - iterationsCount++ - } - - for { - track() - - stop := false - select { - case <-ctx.Done(): - stop = true - debugf("Stopped resources tracking") - case <-ticker.C: - } - - if stop { - break - } - } - track() - - avgRSSMB := totalRSSMB / float64(iterationsCount) - - logger.Infof("Memory: %d samples, avg is %.1fMB, max is %.1fMB", - iterationsCount, avgRSSMB, maxRSSMB) - logger.Infof("Execution took %s", time.Since(startedAt)) - close(done) -} - -func getDefaultIssueExcludeHelp() string { - parts := []string{color.GreenString("Use or not use default excludes:")} - for _, ep := range config.DefaultExcludePatterns { - parts = append(parts, - fmt.Sprintf(" # %s %s: %s", ep.ID, ep.Linter, ep.Why), - fmt.Sprintf(" - %s", color.YellowString(ep.Pattern)), - "", - ) - } - return strings.Join(parts, "\n") -} - -func getDefaultDirectoryExcludeHelp() string { - parts := []string{color.GreenString("Use or not use default excluded directories:")} - for _, dir := range packages.StdExcludeDirRegexps { - parts = append(parts, fmt.Sprintf(" - %s", color.YellowString(dir))) - } - parts = append(parts, "") - return strings.Join(parts, "\n") -} - -func wh(text string) string { - return color.GreenString(text) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/commands/version.go b/vendor/github.com/golangci/golangci-lint/pkg/commands/version.go deleted file mode 100644 index 10ab7c1bb..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/commands/version.go +++ /dev/null @@ -1,83 +0,0 @@ -package commands - -import ( - "encoding/json" - "fmt" - "io" - "os" - "runtime/debug" - "strings" - - "github.com/spf13/cobra" - "github.com/spf13/pflag" - - "github.com/golangci/golangci-lint/pkg/config" -) - -type versionInfo struct { - Info BuildInfo - BuildInfo *debug.BuildInfo -} - -func (e *Executor) initVersionConfiguration(cmd *cobra.Command) { - fs := cmd.Flags() - fs.SortFlags = false // sort them as they are defined here - initVersionFlagSet(fs, e.cfg) -} - -func initVersionFlagSet(fs *pflag.FlagSet, cfg *config.Config) { - // Version config - vc := &cfg.Version - fs.StringVar(&vc.Format, "format", "", wh("The version's format can be: 'short', 'json'")) - fs.BoolVar(&vc.Debug, "debug", false, wh("Add build information")) -} - -func (e *Executor) initVersion() { - versionCmd := &cobra.Command{ - Use: "version", - Short: "Version", - Args: cobra.NoArgs, - ValidArgsFunction: cobra.NoFileCompletions, - RunE: func(_ *cobra.Command, _ []string) error { - if e.cfg.Version.Debug { - info, ok := debug.ReadBuildInfo() - if !ok { - return nil - } - - switch strings.ToLower(e.cfg.Version.Format) { - case "json": - return json.NewEncoder(os.Stdout).Encode(versionInfo{ - Info: e.buildInfo, - BuildInfo: info, - }) - - default: - fmt.Println(info.String()) - return printVersion(os.Stdout, e.buildInfo) - } - } - - switch strings.ToLower(e.cfg.Version.Format) { - case "short": - fmt.Println(e.buildInfo.Version) - return nil - - case "json": - return json.NewEncoder(os.Stdout).Encode(e.buildInfo) - - default: - return printVersion(os.Stdout, e.buildInfo) - } - }, - } - - e.rootCmd.AddCommand(versionCmd) - e.initVersionConfiguration(versionCmd) -} - -func printVersion(w io.Writer, buildInfo BuildInfo) error { - _, err := fmt.Fprintf(w, "golangci-lint has version %s built with %s from %s on %s\n", - buildInfo.Version, buildInfo.GoVersion, buildInfo.Commit, buildInfo.Date) - return err -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/config/config.go b/vendor/github.com/golangci/golangci-lint/pkg/config/config.go deleted file mode 100644 index a5483a20e..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/config/config.go +++ /dev/null @@ -1,71 +0,0 @@ -package config - -import ( - "os" - "strings" - - hcversion "github.com/hashicorp/go-version" - "github.com/ldez/gomoddirectives" -) - -// Config encapsulates the config data specified in the golangci yaml config file. -type Config struct { - cfgDir string // The directory containing the golangci config file. - Run Run - - Output Output - - LintersSettings LintersSettings `mapstructure:"linters-settings"` - Linters Linters - Issues Issues - Severity Severity - Version Version - - InternalCmdTest bool `mapstructure:"internal-cmd-test"` // Option is used only for testing golangci-lint command, don't use it - InternalTest bool // Option is used only for testing golangci-lint code, don't use it -} - -// GetConfigDir returns the directory that contains golangci config file. -func (c *Config) GetConfigDir() string { - return c.cfgDir -} - -func NewDefault() *Config { - return &Config{ - LintersSettings: defaultLintersSettings, - } -} - -type Version struct { - Format string `mapstructure:"format"` - Debug bool `mapstructure:"debug"` -} - -func IsGreaterThanOrEqualGo122(v string) bool { - v1, err := hcversion.NewVersion(strings.TrimPrefix(v, "go")) - if err != nil { - return false - } - - limit, err := hcversion.NewVersion("1.22") - if err != nil { - return false - } - - return v1.GreaterThanOrEqual(limit) -} - -func DetectGoVersion() string { - file, _ := gomoddirectives.GetModuleFile() - - if file != nil && file.Go != nil && file.Go.Version != "" { - return file.Go.Version - } - - v := os.Getenv("GOVERSION") - if v != "" { - return v - } - - return "1.17" -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/config/issues.go b/vendor/github.com/golangci/golangci-lint/pkg/config/issues.go deleted file mode 100644 index 27dcf8105..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/config/issues.go +++ /dev/null @@ -1,213 +0,0 @@ -package config - -import ( - "fmt" - "regexp" -) - -const excludeRuleMinConditionsCount = 2 - -var DefaultExcludePatterns = []ExcludePattern{ - { - ID: "EXC0001", - Pattern: "Error return value of .((os\\.)?std(out|err)\\..*|.*Close" + - "|.*Flush|os\\.Remove(All)?|.*print(f|ln)?|os\\.(Un)?Setenv). is not checked", - Linter: "errcheck", - Why: "Almost all programs ignore errors on these functions and in most cases it's ok", - }, - { - ID: "EXC0002", - Pattern: "(comment on exported (method|function|type|const)|" + - "should have( a package)? comment|comment should be of the form)", - Linter: "golint", - Why: "Annoying issue about not having a comment. The rare codebase has such comments", - }, - { - ID: "EXC0003", - Pattern: "func name will be used as test\\.Test.* by other packages, and that stutters; consider calling this", - Linter: "golint", - Why: "False positive when tests are defined in package 'test'", - }, - { - ID: "EXC0004", - Pattern: "(possible misuse of unsafe.Pointer|should have signature)", - Linter: "govet", - Why: "Common false positives", - }, - { - ID: "EXC0005", - Pattern: "ineffective break statement. Did you mean to break out of the outer loop", - Linter: "staticcheck", - Why: "Developers tend to write in C-style with an explicit 'break' in a 'switch', so it's ok to ignore", - }, - { - ID: "EXC0006", - Pattern: "Use of unsafe calls should be audited", - Linter: "gosec", - Why: "Too many false-positives on 'unsafe' usage", - }, - { - ID: "EXC0007", - Pattern: "Subprocess launch(ed with variable|ing should be audited)", - Linter: "gosec", - Why: "Too many false-positives for parametrized shell calls", - }, - { - ID: "EXC0008", - Pattern: "(G104)", - Linter: "gosec", - Why: "Duplicated errcheck checks", - }, - { - ID: "EXC0009", - Pattern: "(Expect directory permissions to be 0750 or less|Expect file permissions to be 0600 or less)", - Linter: "gosec", - Why: "Too many issues in popular repos", - }, - { - ID: "EXC0010", - Pattern: "Potential file inclusion via variable", - Linter: "gosec", - Why: "False positive is triggered by 'src, err := ioutil.ReadFile(filename)'", - }, - { - ID: "EXC0011", - Pattern: "(comment on exported (method|function|type|const)|" + - "should have( a package)? comment|comment should be of the form)", - Linter: "stylecheck", - Why: "Annoying issue about not having a comment. The rare codebase has such comments", - }, - { - ID: "EXC0012", - Pattern: `exported (.+) should have comment( \(or a comment on this block\))? or be unexported`, - Linter: "revive", - Why: "Annoying issue about not having a comment. The rare codebase has such comments", - }, - { - ID: "EXC0013", - Pattern: `package comment should be of the form "(.+)...`, - Linter: "revive", - Why: "Annoying issue about not having a comment. The rare codebase has such comments", - }, - { - ID: "EXC0014", - Pattern: `comment on exported (.+) should be of the form "(.+)..."`, - Linter: "revive", - Why: "Annoying issue about not having a comment. The rare codebase has such comments", - }, - { - ID: "EXC0015", - Pattern: `should have a package comment`, - Linter: "revive", - Why: "Annoying issue about not having a comment. The rare codebase has such comments", - }, -} - -type Issues struct { - IncludeDefaultExcludes []string `mapstructure:"include"` - ExcludeCaseSensitive bool `mapstructure:"exclude-case-sensitive"` - ExcludePatterns []string `mapstructure:"exclude"` - ExcludeRules []ExcludeRule `mapstructure:"exclude-rules"` - UseDefaultExcludes bool `mapstructure:"exclude-use-default"` - - MaxIssuesPerLinter int `mapstructure:"max-issues-per-linter"` - MaxSameIssues int `mapstructure:"max-same-issues"` - - DiffFromRevision string `mapstructure:"new-from-rev"` - DiffPatchFilePath string `mapstructure:"new-from-patch"` - WholeFiles bool `mapstructure:"whole-files"` - Diff bool `mapstructure:"new"` - - NeedFix bool `mapstructure:"fix"` -} - -type ExcludeRule struct { - BaseRule `mapstructure:",squash"` -} - -func (e *ExcludeRule) Validate() error { - return e.BaseRule.Validate(excludeRuleMinConditionsCount) -} - -type BaseRule struct { - Linters []string - Path string - PathExcept string `mapstructure:"path-except"` - Text string - Source string -} - -func (b *BaseRule) Validate(minConditionsCount int) error { - if err := validateOptionalRegex(b.Path); err != nil { - return fmt.Errorf("invalid path regex: %w", err) - } - if err := validateOptionalRegex(b.PathExcept); err != nil { - return fmt.Errorf("invalid path-except regex: %w", err) - } - if err := validateOptionalRegex(b.Text); err != nil { - return fmt.Errorf("invalid text regex: %w", err) - } - if err := validateOptionalRegex(b.Source); err != nil { - return fmt.Errorf("invalid source regex: %w", err) - } - nonBlank := 0 - if len(b.Linters) > 0 { - nonBlank++ - } - // Filtering by path counts as one condition, regardless how it is done (one or both). - // Otherwise, a rule with Path and PathExcept set would pass validation - // whereas before the introduction of path-except that wouldn't have been precise enough. - if b.Path != "" || b.PathExcept != "" { - nonBlank++ - } - if b.Text != "" { - nonBlank++ - } - if b.Source != "" { - nonBlank++ - } - if nonBlank < minConditionsCount { - return fmt.Errorf("at least %d of (text, source, path[-except], linters) should be set", minConditionsCount) - } - return nil -} - -func validateOptionalRegex(value string) error { - if value == "" { - return nil - } - _, err := regexp.Compile(value) - return err -} - -type ExcludePattern struct { - ID string - Pattern string - Linter string - Why string -} - -func GetDefaultExcludePatternsStrings() []string { - ret := make([]string, len(DefaultExcludePatterns)) - for i, p := range DefaultExcludePatterns { - ret[i] = p.Pattern - } - return ret -} - -// TODO(ldez): this behavior must be changed in v2, because this is confusing. -func GetExcludePatterns(include []string) []ExcludePattern { - includeMap := make(map[string]struct{}, len(include)) - for _, inc := range include { - includeMap[inc] = struct{}{} - } - - var ret []ExcludePattern - for _, p := range DefaultExcludePatterns { - if _, ok := includeMap[p.ID]; !ok { - ret = append(ret, p) - } - } - - return ret -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/config/linters.go b/vendor/github.com/golangci/golangci-lint/pkg/config/linters.go deleted file mode 100644 index ccbdc123a..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/config/linters.go +++ /dev/null @@ -1,11 +0,0 @@ -package config - -type Linters struct { - Enable []string - Disable []string - EnableAll bool `mapstructure:"enable-all"` - DisableAll bool `mapstructure:"disable-all"` - Fast bool - - Presets []string -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/config/linters_settings.go b/vendor/github.com/golangci/golangci-lint/pkg/config/linters_settings.go deleted file mode 100644 index a3206f597..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/config/linters_settings.go +++ /dev/null @@ -1,951 +0,0 @@ -package config - -import ( - "encoding" - "errors" - "runtime" - - "gopkg.in/yaml.v3" -) - -var defaultLintersSettings = LintersSettings{ - Asasalint: AsasalintSettings{ - UseBuiltinExclusions: true, - }, - Decorder: DecorderSettings{ - DecOrder: []string{"type", "const", "var", "func"}, - DisableDecNumCheck: true, - DisableDecOrderCheck: true, - DisableInitFuncFirstCheck: true, - }, - Dogsled: DogsledSettings{ - MaxBlankIdentifiers: 2, - }, - ErrorLint: ErrorLintSettings{ - Errorf: true, - ErrorfMulti: true, - Asserts: true, - Comparison: true, - }, - Exhaustive: ExhaustiveSettings{ - Check: []string{"switch"}, - CheckGenerated: false, - DefaultSignifiesExhaustive: false, - IgnoreEnumMembers: "", - PackageScopeOnly: false, - ExplicitExhaustiveMap: false, - ExplicitExhaustiveSwitch: false, - }, - Forbidigo: ForbidigoSettings{ - ExcludeGodocExamples: true, - }, - Gci: GciSettings{ - Sections: []string{"standard", "default"}, - SkipGenerated: true, - }, - Gocognit: GocognitSettings{ - MinComplexity: 30, - }, - Gocritic: GoCriticSettings{ - SettingsPerCheck: map[string]GoCriticCheckSettings{}, - }, - Godox: GodoxSettings{ - Keywords: []string{}, - }, - Godot: GodotSettings{ - Scope: "declarations", - Period: true, - }, - Gofumpt: GofumptSettings{ - LangVersion: "", - ModulePath: "", - ExtraRules: false, - }, - Gosec: GoSecSettings{ - Concurrency: runtime.NumCPU(), - }, - Gosmopolitan: GosmopolitanSettings{ - AllowTimeLocal: false, - EscapeHatches: []string{}, - IgnoreTests: true, - WatchForScripts: []string{"Han"}, - }, - Ifshort: IfshortSettings{ - MaxDeclLines: 1, - MaxDeclChars: 30, - }, - Inamedparam: INamedParamSettings{ - SkipSingleParam: false, - }, - InterfaceBloat: InterfaceBloatSettings{ - Max: 10, - }, - Lll: LllSettings{ - LineLength: 120, - TabWidth: 1, - }, - LoggerCheck: LoggerCheckSettings{ - Kitlog: true, - Klog: true, - Logr: true, - Zap: true, - RequireStringKey: false, - NoPrintfLike: false, - Rules: nil, - }, - MaintIdx: MaintIdxSettings{ - Under: 20, - }, - Nakedret: NakedretSettings{ - MaxFuncLines: 30, - }, - Nestif: NestifSettings{ - MinComplexity: 5, - }, - NoLintLint: NoLintLintSettings{ - RequireExplanation: false, - RequireSpecific: false, - AllowUnused: false, - }, - PerfSprint: PerfSprintSettings{ - IntConversion: true, - ErrError: false, - ErrorF: true, - SprintF1: true, - }, - Prealloc: PreallocSettings{ - Simple: true, - RangeLoops: true, - ForLoops: false, - }, - Predeclared: PredeclaredSettings{ - Ignore: "", - Qualified: false, - }, - SlogLint: SlogLintSettings{ - NoMixedArgs: true, - KVOnly: false, - AttrOnly: false, - ContextOnly: false, - StaticMsg: false, - NoRawKeys: false, - KeyNamingCase: "", - ArgsOnSepLines: false, - }, - TagAlign: TagAlignSettings{ - Align: true, - Sort: true, - Order: nil, - Strict: false, - }, - Testpackage: TestpackageSettings{ - SkipRegexp: `(export|internal)_test\.go`, - AllowPackages: []string{"main"}, - }, - Unparam: UnparamSettings{ - Algo: "cha", - }, - Unused: UnusedSettings{ - FieldWritesAreUses: true, - PostStatementsAreReads: false, - ExportedIsUsed: true, - ExportedFieldsAreUsed: true, - ParametersAreUsed: true, - LocalVariablesAreUsed: true, - GeneratedIsUsed: true, - }, - UseStdlibVars: UseStdlibVarsSettings{ - HTTPMethod: true, - HTTPStatusCode: true, - }, - Varnamelen: VarnamelenSettings{ - MaxDistance: 5, - MinNameLength: 3, - }, - WSL: WSLSettings{ - StrictAppend: true, - AllowAssignAndCallCuddle: true, - AllowAssignAndAnythingCuddle: false, - AllowMultiLineAssignCuddle: true, - ForceCaseTrailingWhitespaceLimit: 0, - AllowTrailingComment: false, - AllowSeparatedLeadingComment: false, - AllowCuddleDeclaration: false, - AllowCuddleWithCalls: []string{"Lock", "RLock"}, - AllowCuddleWithRHS: []string{"Unlock", "RUnlock"}, - ForceCuddleErrCheckAndAssign: false, - ErrorVariableNames: []string{"err"}, - ForceExclusiveShortDeclarations: false, - }, -} - -type LintersSettings struct { - Asasalint AsasalintSettings - BiDiChk BiDiChkSettings - Cyclop Cyclop - Decorder DecorderSettings - Depguard DepGuardSettings - Dogsled DogsledSettings - Dupl DuplSettings - DupWord DupWordSettings - Errcheck ErrcheckSettings - ErrChkJSON ErrChkJSONSettings - ErrorLint ErrorLintSettings - Exhaustive ExhaustiveSettings - ExhaustiveStruct ExhaustiveStructSettings - Exhaustruct ExhaustructSettings - Forbidigo ForbidigoSettings - Funlen FunlenSettings - Gci GciSettings - GinkgoLinter GinkgoLinterSettings - Gocognit GocognitSettings - Goconst GoConstSettings - Gocritic GoCriticSettings - Gocyclo GoCycloSettings - Godot GodotSettings - Godox GodoxSettings - Gofmt GoFmtSettings - Gofumpt GofumptSettings - Goheader GoHeaderSettings - Goimports GoImportsSettings - Golint GoLintSettings - Gomnd GoMndSettings - GoModDirectives GoModDirectivesSettings - Gomodguard GoModGuardSettings - Gosec GoSecSettings - Gosimple StaticCheckSettings - Gosmopolitan GosmopolitanSettings - Govet GovetSettings - Grouper GrouperSettings - Ifshort IfshortSettings - ImportAs ImportAsSettings - Inamedparam INamedParamSettings - InterfaceBloat InterfaceBloatSettings - Ireturn IreturnSettings - Lll LllSettings - LoggerCheck LoggerCheckSettings - MaintIdx MaintIdxSettings - Makezero MakezeroSettings - Maligned MalignedSettings - Misspell MisspellSettings - MustTag MustTagSettings - Nakedret NakedretSettings - Nestif NestifSettings - NilNil NilNilSettings - Nlreturn NlreturnSettings - NoLintLint NoLintLintSettings - NoNamedReturns NoNamedReturnsSettings - ParallelTest ParallelTestSettings - PerfSprint PerfSprintSettings - Prealloc PreallocSettings - Predeclared PredeclaredSettings - Promlinter PromlinterSettings - ProtoGetter ProtoGetterSettings - Reassign ReassignSettings - Revive ReviveSettings - RowsErrCheck RowsErrCheckSettings - SlogLint SlogLintSettings - Spancheck SpancheckSettings - Staticcheck StaticCheckSettings - Structcheck StructCheckSettings - Stylecheck StaticCheckSettings - TagAlign TagAlignSettings - Tagliatelle TagliatelleSettings - Tenv TenvSettings - Testifylint TestifylintSettings - Testpackage TestpackageSettings - Thelper ThelperSettings - Unparam UnparamSettings - Unused UnusedSettings - UseStdlibVars UseStdlibVarsSettings - Varcheck VarCheckSettings - Varnamelen VarnamelenSettings - Whitespace WhitespaceSettings - Wrapcheck WrapcheckSettings - WSL WSLSettings - - Custom map[string]CustomLinterSettings -} - -type AsasalintSettings struct { - Exclude []string `mapstructure:"exclude"` - UseBuiltinExclusions bool `mapstructure:"use-builtin-exclusions"` - IgnoreTest bool `mapstructure:"ignore-test"` -} - -type BiDiChkSettings struct { - LeftToRightEmbedding bool `mapstructure:"left-to-right-embedding"` - RightToLeftEmbedding bool `mapstructure:"right-to-left-embedding"` - PopDirectionalFormatting bool `mapstructure:"pop-directional-formatting"` - LeftToRightOverride bool `mapstructure:"left-to-right-override"` - RightToLeftOverride bool `mapstructure:"right-to-left-override"` - LeftToRightIsolate bool `mapstructure:"left-to-right-isolate"` - RightToLeftIsolate bool `mapstructure:"right-to-left-isolate"` - FirstStrongIsolate bool `mapstructure:"first-strong-isolate"` - PopDirectionalIsolate bool `mapstructure:"pop-directional-isolate"` -} - -type Cyclop struct { - MaxComplexity int `mapstructure:"max-complexity"` - PackageAverage float64 `mapstructure:"package-average"` - SkipTests bool `mapstructure:"skip-tests"` -} - -type DepGuardSettings struct { - Rules map[string]*DepGuardList `mapstructure:"rules"` -} - -type DepGuardList struct { - ListMode string `mapstructure:"list-mode"` - Files []string `mapstructure:"files"` - Allow []string `mapstructure:"allow"` - Deny []DepGuardDeny `mapstructure:"deny"` -} - -type DepGuardDeny struct { - Pkg string `mapstructure:"pkg"` - Desc string `mapstructure:"desc"` -} - -type DecorderSettings struct { - DecOrder []string `mapstructure:"dec-order"` - IgnoreUnderscoreVars bool `mapstructure:"ignore-underscore-vars"` - DisableDecNumCheck bool `mapstructure:"disable-dec-num-check"` - DisableTypeDecNumCheck bool `mapstructure:"disable-type-dec-num-check"` - DisableConstDecNumCheck bool `mapstructure:"disable-const-dec-num-check"` - DisableVarDecNumCheck bool `mapstructure:"disable-var-dec-num-check"` - DisableDecOrderCheck bool `mapstructure:"disable-dec-order-check"` - DisableInitFuncFirstCheck bool `mapstructure:"disable-init-func-first-check"` -} - -type DogsledSettings struct { - MaxBlankIdentifiers int `mapstructure:"max-blank-identifiers"` -} - -type DuplSettings struct { - Threshold int -} - -type DupWordSettings struct { - Keywords []string `mapstructure:"keywords"` - Ignore []string `mapstructure:"ignore"` -} - -type ErrcheckSettings struct { - DisableDefaultExclusions bool `mapstructure:"disable-default-exclusions"` - CheckTypeAssertions bool `mapstructure:"check-type-assertions"` - CheckAssignToBlank bool `mapstructure:"check-blank"` - Ignore string `mapstructure:"ignore"` - ExcludeFunctions []string `mapstructure:"exclude-functions"` - - // Deprecated: use ExcludeFunctions instead - Exclude string `mapstructure:"exclude"` -} - -type ErrChkJSONSettings struct { - CheckErrorFreeEncoding bool `mapstructure:"check-error-free-encoding"` - ReportNoExported bool `mapstructure:"report-no-exported"` -} - -type ErrorLintSettings struct { - Errorf bool `mapstructure:"errorf"` - ErrorfMulti bool `mapstructure:"errorf-multi"` - Asserts bool `mapstructure:"asserts"` - Comparison bool `mapstructure:"comparison"` -} - -type ExhaustiveSettings struct { - Check []string `mapstructure:"check"` - CheckGenerated bool `mapstructure:"check-generated"` - DefaultSignifiesExhaustive bool `mapstructure:"default-signifies-exhaustive"` - IgnoreEnumMembers string `mapstructure:"ignore-enum-members"` - IgnoreEnumTypes string `mapstructure:"ignore-enum-types"` - PackageScopeOnly bool `mapstructure:"package-scope-only"` - ExplicitExhaustiveMap bool `mapstructure:"explicit-exhaustive-map"` - ExplicitExhaustiveSwitch bool `mapstructure:"explicit-exhaustive-switch"` - DefaultCaseRequired bool `mapstructure:"default-case-required"` -} - -type ExhaustiveStructSettings struct { - StructPatterns []string `mapstructure:"struct-patterns"` -} - -type ExhaustructSettings struct { - Include []string `mapstructure:"include"` - Exclude []string `mapstructure:"exclude"` -} - -type ForbidigoSettings struct { - Forbid []ForbidigoPattern `mapstructure:"forbid"` - ExcludeGodocExamples bool `mapstructure:"exclude-godoc-examples"` - AnalyzeTypes bool `mapstructure:"analyze-types"` -} - -var _ encoding.TextUnmarshaler = &ForbidigoPattern{} - -// ForbidigoPattern corresponds to forbidigo.pattern and adds mapstructure support. -// The YAML field names must match what forbidigo expects. -type ForbidigoPattern struct { - // patternString gets populated when the config contains a string as entry in ForbidigoSettings.Forbid[] - // because ForbidigoPattern implements encoding.TextUnmarshaler - // and the reader uses the mapstructure.TextUnmarshallerHookFunc as decoder hook. - // - // If the entry is a map, then the other fields are set as usual by mapstructure. - patternString string - - Pattern string `yaml:"p" mapstructure:"p"` - Package string `yaml:"pkg,omitempty" mapstructure:"pkg,omitempty"` - Msg string `yaml:"msg,omitempty" mapstructure:"msg,omitempty"` -} - -func (p *ForbidigoPattern) UnmarshalText(text []byte) error { - // Validation happens when instantiating forbidigo. - p.patternString = string(text) - return nil -} - -// MarshalString converts the pattern into a string as needed by forbidigo.NewLinter. -// -// MarshalString is intentionally not called MarshalText, -// although it has the same signature -// because implementing encoding.TextMarshaler led to infinite recursion when yaml.Marshal called MarshalText. -func (p *ForbidigoPattern) MarshalString() ([]byte, error) { - if p.patternString != "" { - return []byte(p.patternString), nil - } - - return yaml.Marshal(p) -} - -type FunlenSettings struct { - Lines int - Statements int - IgnoreComments bool `mapstructure:"ignore-comments"` -} - -type GciSettings struct { - LocalPrefixes string `mapstructure:"local-prefixes"` // Deprecated - Sections []string `mapstructure:"sections"` - SkipGenerated bool `mapstructure:"skip-generated"` - CustomOrder bool `mapstructure:"custom-order"` -} - -type GinkgoLinterSettings struct { - SuppressLenAssertion bool `mapstructure:"suppress-len-assertion"` - SuppressNilAssertion bool `mapstructure:"suppress-nil-assertion"` - SuppressErrAssertion bool `mapstructure:"suppress-err-assertion"` - SuppressCompareAssertion bool `mapstructure:"suppress-compare-assertion"` - SuppressAsyncAssertion bool `mapstructure:"suppress-async-assertion"` - SuppressTypeCompareWarning bool `mapstructure:"suppress-type-compare-assertion"` - ForbidFocusContainer bool `mapstructure:"forbid-focus-container"` - AllowHaveLenZero bool `mapstructure:"allow-havelen-zero"` -} - -type GocognitSettings struct { - MinComplexity int `mapstructure:"min-complexity"` -} - -type GoConstSettings struct { - IgnoreStrings string `mapstructure:"ignore-strings"` - IgnoreTests bool `mapstructure:"ignore-tests"` - MatchWithConstants bool `mapstructure:"match-constant"` - MinStringLen int `mapstructure:"min-len"` - MinOccurrencesCount int `mapstructure:"min-occurrences"` - ParseNumbers bool `mapstructure:"numbers"` - NumberMin int `mapstructure:"min"` - NumberMax int `mapstructure:"max"` - IgnoreCalls bool `mapstructure:"ignore-calls"` -} - -type GoCriticSettings struct { - Go string `mapstructure:"-"` - EnabledChecks []string `mapstructure:"enabled-checks"` - DisabledChecks []string `mapstructure:"disabled-checks"` - EnabledTags []string `mapstructure:"enabled-tags"` - DisabledTags []string `mapstructure:"disabled-tags"` - SettingsPerCheck map[string]GoCriticCheckSettings `mapstructure:"settings"` -} - -type GoCriticCheckSettings map[string]any - -type GoCycloSettings struct { - MinComplexity int `mapstructure:"min-complexity"` -} - -type GodotSettings struct { - Scope string `mapstructure:"scope"` - Exclude []string `mapstructure:"exclude"` - Capital bool `mapstructure:"capital"` - Period bool `mapstructure:"period"` - - // Deprecated: use `Scope` instead - CheckAll bool `mapstructure:"check-all"` -} - -type GodoxSettings struct { - Keywords []string -} - -type GoFmtSettings struct { - Simplify bool - RewriteRules []GoFmtRewriteRule `mapstructure:"rewrite-rules"` -} - -type GoFmtRewriteRule struct { - Pattern string - Replacement string -} - -type GofumptSettings struct { - ModulePath string `mapstructure:"module-path"` - ExtraRules bool `mapstructure:"extra-rules"` - - // Deprecated: use the global `run.go` instead. - LangVersion string `mapstructure:"lang-version"` -} - -type GoHeaderSettings struct { - Values map[string]map[string]string `mapstructure:"values"` - Template string `mapstructure:"template"` - TemplatePath string `mapstructure:"template-path"` -} - -type GoImportsSettings struct { - LocalPrefixes string `mapstructure:"local-prefixes"` -} - -type GoLintSettings struct { - MinConfidence float64 `mapstructure:"min-confidence"` -} - -type GoMndSettings struct { - Settings map[string]map[string]any // Deprecated - Checks []string `mapstructure:"checks"` - IgnoredNumbers []string `mapstructure:"ignored-numbers"` - IgnoredFiles []string `mapstructure:"ignored-files"` - IgnoredFunctions []string `mapstructure:"ignored-functions"` -} - -type GoModDirectivesSettings struct { - ReplaceAllowList []string `mapstructure:"replace-allow-list"` - ReplaceLocal bool `mapstructure:"replace-local"` - ExcludeForbidden bool `mapstructure:"exclude-forbidden"` - RetractAllowNoExplanation bool `mapstructure:"retract-allow-no-explanation"` -} - -type GoModGuardSettings struct { - Allowed struct { - Modules []string `mapstructure:"modules"` - Domains []string `mapstructure:"domains"` - } `mapstructure:"allowed"` - Blocked struct { - Modules []map[string]struct { - Recommendations []string `mapstructure:"recommendations"` - Reason string `mapstructure:"reason"` - } `mapstructure:"modules"` - Versions []map[string]struct { - Version string `mapstructure:"version"` - Reason string `mapstructure:"reason"` - } `mapstructure:"versions"` - LocalReplaceDirectives bool `mapstructure:"local_replace_directives"` - } `mapstructure:"blocked"` -} - -type GoSecSettings struct { - Includes []string `mapstructure:"includes"` - Excludes []string `mapstructure:"excludes"` - Severity string `mapstructure:"severity"` - Confidence string `mapstructure:"confidence"` - ExcludeGenerated bool `mapstructure:"exclude-generated"` - Config map[string]any `mapstructure:"config"` - Concurrency int `mapstructure:"concurrency"` -} - -type GosmopolitanSettings struct { - AllowTimeLocal bool `mapstructure:"allow-time-local"` - EscapeHatches []string `mapstructure:"escape-hatches"` - IgnoreTests bool `mapstructure:"ignore-tests"` - WatchForScripts []string `mapstructure:"watch-for-scripts"` -} - -type GovetSettings struct { - Go string `mapstructure:"-"` - CheckShadowing bool `mapstructure:"check-shadowing"` - Settings map[string]map[string]any - - Enable []string - Disable []string - EnableAll bool `mapstructure:"enable-all"` - DisableAll bool `mapstructure:"disable-all"` -} - -func (cfg *GovetSettings) Validate() error { - if cfg.EnableAll && cfg.DisableAll { - return errors.New("enable-all and disable-all can't be combined") - } - if cfg.EnableAll && len(cfg.Enable) != 0 { - return errors.New("enable-all and enable can't be combined") - } - if cfg.DisableAll && len(cfg.Disable) != 0 { - return errors.New("disable-all and disable can't be combined") - } - return nil -} - -type GrouperSettings struct { - ConstRequireSingleConst bool `mapstructure:"const-require-single-const"` - ConstRequireGrouping bool `mapstructure:"const-require-grouping"` - ImportRequireSingleImport bool `mapstructure:"import-require-single-import"` - ImportRequireGrouping bool `mapstructure:"import-require-grouping"` - TypeRequireSingleType bool `mapstructure:"type-require-single-type"` - TypeRequireGrouping bool `mapstructure:"type-require-grouping"` - VarRequireSingleVar bool `mapstructure:"var-require-single-var"` - VarRequireGrouping bool `mapstructure:"var-require-grouping"` -} - -type IfshortSettings struct { - MaxDeclLines int `mapstructure:"max-decl-lines"` - MaxDeclChars int `mapstructure:"max-decl-chars"` -} - -type ImportAsSettings struct { - Alias []ImportAsAlias - NoUnaliased bool `mapstructure:"no-unaliased"` - NoExtraAliases bool `mapstructure:"no-extra-aliases"` -} - -type ImportAsAlias struct { - Pkg string - Alias string -} - -type INamedParamSettings struct { - SkipSingleParam bool `mapstructure:"skip-single-param"` -} - -type InterfaceBloatSettings struct { - Max int `mapstructure:"max"` -} - -type IreturnSettings struct { - Allow []string `mapstructure:"allow"` - Reject []string `mapstructure:"reject"` -} - -type LllSettings struct { - LineLength int `mapstructure:"line-length"` - TabWidth int `mapstructure:"tab-width"` -} - -type LoggerCheckSettings struct { - Kitlog bool `mapstructure:"kitlog"` - Klog bool `mapstructure:"klog"` - Logr bool `mapstructure:"logr"` - Zap bool `mapstructure:"zap"` - RequireStringKey bool `mapstructure:"require-string-key"` - NoPrintfLike bool `mapstructure:"no-printf-like"` - Rules []string `mapstructure:"rules"` -} - -type MaintIdxSettings struct { - Under int `mapstructure:"under"` -} - -type MakezeroSettings struct { - Always bool -} - -type MalignedSettings struct { - SuggestNewOrder bool `mapstructure:"suggest-new"` -} - -type MisspellSettings struct { - Mode string `mapstructure:"mode"` - Locale string - // TODO(ldez): v2 the option must be renamed to `IgnoredRules`. - IgnoreWords []string `mapstructure:"ignore-words"` -} - -type MustTagSettings struct { - Functions []struct { - Name string `mapstructure:"name"` - Tag string `mapstructure:"tag"` - ArgPos int `mapstructure:"arg-pos"` - } `mapstructure:"functions"` -} - -type NakedretSettings struct { - MaxFuncLines int `mapstructure:"max-func-lines"` -} - -type NestifSettings struct { - MinComplexity int `mapstructure:"min-complexity"` -} - -type NilNilSettings struct { - CheckedTypes []string `mapstructure:"checked-types"` -} - -type NlreturnSettings struct { - BlockSize int `mapstructure:"block-size"` -} - -type NoLintLintSettings struct { - RequireExplanation bool `mapstructure:"require-explanation"` - RequireSpecific bool `mapstructure:"require-specific"` - AllowNoExplanation []string `mapstructure:"allow-no-explanation"` - AllowUnused bool `mapstructure:"allow-unused"` -} - -type NoNamedReturnsSettings struct { - ReportErrorInDefer bool `mapstructure:"report-error-in-defer"` -} - -type ParallelTestSettings struct { - IgnoreMissing bool `mapstructure:"ignore-missing"` - IgnoreMissingSubtests bool `mapstructure:"ignore-missing-subtests"` -} - -type PerfSprintSettings struct { - IntConversion bool `mapstructure:"int-conversion"` - ErrError bool `mapstructure:"err-error"` - ErrorF bool `mapstructure:"errorf"` - SprintF1 bool `mapstructure:"sprintf1"` -} - -type PreallocSettings struct { - Simple bool - RangeLoops bool `mapstructure:"range-loops"` - ForLoops bool `mapstructure:"for-loops"` -} - -type PredeclaredSettings struct { - Ignore string `mapstructure:"ignore"` - Qualified bool `mapstructure:"q"` -} - -type PromlinterSettings struct { - Strict bool `mapstructure:"strict"` - DisabledLinters []string `mapstructure:"disabled-linters"` -} - -type ProtoGetterSettings struct { - SkipGeneratedBy []string `mapstructure:"skip-generated-by"` - SkipFiles []string `mapstructure:"skip-files"` - SkipAnyGenerated bool `mapstructure:"skip-any-generated"` - ReplaceFirstArgInAppend bool `mapstructure:"replace-first-arg-in-append"` -} - -type ReassignSettings struct { - Patterns []string `mapstructure:"patterns"` -} - -type ReviveSettings struct { - MaxOpenFiles int `mapstructure:"max-open-files"` - IgnoreGeneratedHeader bool `mapstructure:"ignore-generated-header"` - Confidence float64 - Severity string - EnableAllRules bool `mapstructure:"enable-all-rules"` - Rules []struct { - Name string - Arguments []any - Severity string - Disabled bool - } - ErrorCode int `mapstructure:"error-code"` - WarningCode int `mapstructure:"warning-code"` - Directives []struct { - Name string - Severity string - } -} - -type RowsErrCheckSettings struct { - Packages []string -} - -type SlogLintSettings struct { - NoMixedArgs bool `mapstructure:"no-mixed-args"` - KVOnly bool `mapstructure:"kv-only"` - AttrOnly bool `mapstructure:"attr-only"` - ContextOnly bool `mapstructure:"context-only"` - StaticMsg bool `mapstructure:"static-msg"` - NoRawKeys bool `mapstructure:"no-raw-keys"` - KeyNamingCase string `mapstructure:"key-naming-case"` - ArgsOnSepLines bool `mapstructure:"args-on-sep-lines"` -} - -type SpancheckSettings struct { - Checks []string `mapstructure:"checks"` - IgnoreCheckSignatures []string `mapstructure:"ignore-check-signatures"` -} - -type StaticCheckSettings struct { - // Deprecated: use the global `run.go` instead. - GoVersion string `mapstructure:"go"` - - Checks []string `mapstructure:"checks"` - Initialisms []string `mapstructure:"initialisms"` // only for stylecheck - DotImportWhitelist []string `mapstructure:"dot-import-whitelist"` // only for stylecheck - HTTPStatusCodeWhitelist []string `mapstructure:"http-status-code-whitelist"` // only for stylecheck -} - -func (s *StaticCheckSettings) HasConfiguration() bool { - return len(s.Initialisms) > 0 || len(s.HTTPStatusCodeWhitelist) > 0 || len(s.DotImportWhitelist) > 0 || len(s.Checks) > 0 -} - -type StructCheckSettings struct { - CheckExportedFields bool `mapstructure:"exported-fields"` -} - -type TagAlignSettings struct { - Align bool `mapstructure:"align"` - Sort bool `mapstructure:"sort"` - Order []string `mapstructure:"order"` - Strict bool `mapstructure:"strict"` -} - -type TagliatelleSettings struct { - Case struct { - Rules map[string]string - UseFieldName bool `mapstructure:"use-field-name"` - } -} - -type TestifylintSettings struct { - EnableAll bool `mapstructure:"enable-all"` - DisableAll bool `mapstructure:"disable-all"` - EnabledCheckers []string `mapstructure:"enable"` - DisabledCheckers []string `mapstructure:"disable"` - - ExpectedActual struct { - ExpVarPattern string `mapstructure:"pattern"` - } `mapstructure:"expected-actual"` - - RequireError struct { - FnPattern string `mapstructure:"fn-pattern"` - } `mapstructure:"require-error"` - - SuiteExtraAssertCall struct { - Mode string `mapstructure:"mode"` - } `mapstructure:"suite-extra-assert-call"` -} - -type TestpackageSettings struct { - SkipRegexp string `mapstructure:"skip-regexp"` - AllowPackages []string `mapstructure:"allow-packages"` -} - -type ThelperSettings struct { - Test ThelperOptions `mapstructure:"test"` - Fuzz ThelperOptions `mapstructure:"fuzz"` - Benchmark ThelperOptions `mapstructure:"benchmark"` - TB ThelperOptions `mapstructure:"tb"` -} - -type ThelperOptions struct { - First *bool `mapstructure:"first"` - Name *bool `mapstructure:"name"` - Begin *bool `mapstructure:"begin"` -} - -type TenvSettings struct { - All bool `mapstructure:"all"` -} - -type UseStdlibVarsSettings struct { - HTTPMethod bool `mapstructure:"http-method"` - HTTPStatusCode bool `mapstructure:"http-status-code"` - TimeWeekday bool `mapstructure:"time-weekday"` - TimeMonth bool `mapstructure:"time-month"` - TimeLayout bool `mapstructure:"time-layout"` - CryptoHash bool `mapstructure:"crypto-hash"` - DefaultRPCPath bool `mapstructure:"default-rpc-path"` - OSDevNull bool `mapstructure:"os-dev-null"` - SQLIsolationLevel bool `mapstructure:"sql-isolation-level"` - TLSSignatureScheme bool `mapstructure:"tls-signature-scheme"` - ConstantKind bool `mapstructure:"constant-kind"` - SyslogPriority bool `mapstructure:"syslog-priority"` -} - -type UnparamSettings struct { - CheckExported bool `mapstructure:"check-exported"` - Algo string -} - -type UnusedSettings struct { - FieldWritesAreUses bool `mapstructure:"field-writes-are-uses"` - PostStatementsAreReads bool `mapstructure:"post-statements-are-reads"` - ExportedIsUsed bool `mapstructure:"exported-is-used"` - ExportedFieldsAreUsed bool `mapstructure:"exported-fields-are-used"` - ParametersAreUsed bool `mapstructure:"parameters-are-used"` - LocalVariablesAreUsed bool `mapstructure:"local-variables-are-used"` - GeneratedIsUsed bool `mapstructure:"generated-is-used"` -} - -type VarCheckSettings struct { - CheckExportedFields bool `mapstructure:"exported-fields"` -} - -type VarnamelenSettings struct { - MaxDistance int `mapstructure:"max-distance"` - MinNameLength int `mapstructure:"min-name-length"` - CheckReceiver bool `mapstructure:"check-receiver"` - CheckReturn bool `mapstructure:"check-return"` - CheckTypeParam bool `mapstructure:"check-type-param"` - IgnoreNames []string `mapstructure:"ignore-names"` - IgnoreTypeAssertOk bool `mapstructure:"ignore-type-assert-ok"` - IgnoreMapIndexOk bool `mapstructure:"ignore-map-index-ok"` - IgnoreChanRecvOk bool `mapstructure:"ignore-chan-recv-ok"` - IgnoreDecls []string `mapstructure:"ignore-decls"` -} - -type WhitespaceSettings struct { - MultiIf bool `mapstructure:"multi-if"` - MultiFunc bool `mapstructure:"multi-func"` -} - -type WrapcheckSettings struct { - // TODO(ldez): v2 the options must be renamed to use hyphen. - IgnoreSigs []string `mapstructure:"ignoreSigs"` - IgnoreSigRegexps []string `mapstructure:"ignoreSigRegexps"` - IgnorePackageGlobs []string `mapstructure:"ignorePackageGlobs"` - IgnoreInterfaceRegexps []string `mapstructure:"ignoreInterfaceRegexps"` -} - -type WSLSettings struct { - StrictAppend bool `mapstructure:"strict-append"` - AllowAssignAndCallCuddle bool `mapstructure:"allow-assign-and-call"` - AllowAssignAndAnythingCuddle bool `mapstructure:"allow-assign-and-anything"` - AllowMultiLineAssignCuddle bool `mapstructure:"allow-multiline-assign"` - ForceCaseTrailingWhitespaceLimit int `mapstructure:"force-case-trailing-whitespace"` - AllowTrailingComment bool `mapstructure:"allow-trailing-comment"` - AllowSeparatedLeadingComment bool `mapstructure:"allow-separated-leading-comment"` - AllowCuddleDeclaration bool `mapstructure:"allow-cuddle-declarations"` - AllowCuddleWithCalls []string `mapstructure:"allow-cuddle-with-calls"` - AllowCuddleWithRHS []string `mapstructure:"allow-cuddle-with-rhs"` - ForceCuddleErrCheckAndAssign bool `mapstructure:"force-err-cuddling"` - ErrorVariableNames []string `mapstructure:"error-variable-names"` - ForceExclusiveShortDeclarations bool `mapstructure:"force-short-decl-cuddling"` -} - -// CustomLinterSettings encapsulates the meta-data of a private linter. -// For example, a private linter may be added to the golangci config file as shown below. -// -// linters-settings: -// custom: -// example: -// path: /example.so -// description: The description of the linter -// original-url: github.com/golangci/example-linter -type CustomLinterSettings struct { - // Path to a plugin *.so file that implements the private linter. - Path string - // Description describes the purpose of the private linter. - Description string - // OriginalURL The URL containing the source code for the private linter. - OriginalURL string `mapstructure:"original-url"` - - // Settings plugin settings only work with linterdb.PluginConstructor symbol. - Settings any -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/config/output.go b/vendor/github.com/golangci/golangci-lint/pkg/config/output.go deleted file mode 100644 index e87263920..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/config/output.go +++ /dev/null @@ -1,41 +0,0 @@ -package config - -const ( - OutFormatJSON = "json" - OutFormatLineNumber = "line-number" - OutFormatColoredLineNumber = "colored-line-number" - OutFormatTab = "tab" - OutFormatColoredTab = "colored-tab" - OutFormatCheckstyle = "checkstyle" - OutFormatCodeClimate = "code-climate" - OutFormatHTML = "html" - OutFormatJunitXML = "junit-xml" - OutFormatGithubActions = "github-actions" - OutFormatTeamCity = "teamcity" -) - -var OutFormats = []string{ - OutFormatColoredLineNumber, - OutFormatLineNumber, - OutFormatJSON, - OutFormatTab, - OutFormatCheckstyle, - OutFormatCodeClimate, - OutFormatHTML, - OutFormatJunitXML, - OutFormatGithubActions, - OutFormatTeamCity, -} - -type Output struct { - Format string - PrintIssuedLine bool `mapstructure:"print-issued-lines"` - PrintLinterName bool `mapstructure:"print-linter-name"` - UniqByLine bool `mapstructure:"uniq-by-line"` - SortResults bool `mapstructure:"sort-results"` - PrintWelcomeMessage bool `mapstructure:"print-welcome"` - PathPrefix string `mapstructure:"path-prefix"` - - // only work with CLI flags because the setup of logs is done before the config file parsing. - Color string -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/config/reader.go b/vendor/github.com/golangci/golangci-lint/pkg/config/reader.go deleted file mode 100644 index 0c4fa13b0..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/config/reader.go +++ /dev/null @@ -1,248 +0,0 @@ -package config - -import ( - "errors" - "fmt" - "os" - "path/filepath" - "slices" - "strings" - - "github.com/go-viper/mapstructure/v2" - "github.com/mitchellh/go-homedir" - "github.com/spf13/viper" - - "github.com/golangci/golangci-lint/pkg/exitcodes" - "github.com/golangci/golangci-lint/pkg/fsutils" - "github.com/golangci/golangci-lint/pkg/logutils" -) - -type FileReader struct { - log logutils.Log - cfg *Config - commandLineCfg *Config -} - -func NewFileReader(toCfg, commandLineCfg *Config, log logutils.Log) *FileReader { - return &FileReader{ - log: log, - cfg: toCfg, - commandLineCfg: commandLineCfg, - } -} - -func (r *FileReader) Read() error { - // XXX: hack with double parsing for 2 purposes: - // 1. to access "config" option here. - // 2. to give config less priority than command line. - - configFile, err := r.parseConfigOption() - if err != nil { - if errors.Is(err, errConfigDisabled) { - return nil - } - - return fmt.Errorf("can't parse --config option: %w", err) - } - - if configFile != "" { - viper.SetConfigFile(configFile) - - // Assume YAML if the file has no extension. - if filepath.Ext(configFile) == "" { - viper.SetConfigType("yaml") - } - } else { - r.setupConfigFileSearch() - } - - return r.parseConfig() -} - -func (r *FileReader) parseConfig() error { - if err := viper.ReadInConfig(); err != nil { - if _, ok := err.(viper.ConfigFileNotFoundError); ok { - return nil - } - - return fmt.Errorf("can't read viper config: %w", err) - } - - usedConfigFile := viper.ConfigFileUsed() - if usedConfigFile == "" { - return nil - } - - if usedConfigFile == os.Stdin.Name() { - usedConfigFile = "" - r.log.Infof("Reading config file stdin") - } else { - var err error - usedConfigFile, err = fsutils.ShortestRelPath(usedConfigFile, "") - if err != nil { - r.log.Warnf("Can't pretty print config file path: %v", err) - } - - r.log.Infof("Used config file %s", usedConfigFile) - } - - usedConfigDir, err := filepath.Abs(filepath.Dir(usedConfigFile)) - if err != nil { - return errors.New("can't get config directory") - } - r.cfg.cfgDir = usedConfigDir - - if err := viper.Unmarshal(r.cfg, viper.DecodeHook(mapstructure.ComposeDecodeHookFunc( - // Default hooks (https://github.com/spf13/viper/blob/518241257478c557633ab36e474dfcaeb9a3c623/viper.go#L135-L138). - mapstructure.StringToTimeDurationHookFunc(), - mapstructure.StringToSliceHookFunc(","), - - // Needed for forbidigo. - mapstructure.TextUnmarshallerHookFunc(), - ))); err != nil { - return fmt.Errorf("can't unmarshal config by viper: %w", err) - } - - if err := r.validateConfig(); err != nil { - return fmt.Errorf("can't validate config: %w", err) - } - - if r.cfg.InternalTest { // just for testing purposes: to detect config file usage - fmt.Fprintln(logutils.StdOut, "test") - os.Exit(exitcodes.Success) - } - - return nil -} - -func (r *FileReader) validateConfig() error { - c := r.cfg - if len(c.Run.Args) != 0 { - return errors.New("option run.args in config isn't supported now") - } - - if c.Run.CPUProfilePath != "" { - return errors.New("option run.cpuprofilepath in config isn't allowed") - } - - if c.Run.MemProfilePath != "" { - return errors.New("option run.memprofilepath in config isn't allowed") - } - - if c.Run.TracePath != "" { - return errors.New("option run.tracepath in config isn't allowed") - } - - if c.Run.IsVerbose { - return errors.New("can't set run.verbose option with config: only on command-line") - } - for i, rule := range c.Issues.ExcludeRules { - if err := rule.Validate(); err != nil { - return fmt.Errorf("error in exclude rule #%d: %w", i, err) - } - } - if len(c.Severity.Rules) > 0 && c.Severity.Default == "" { - return errors.New("can't set severity rule option: no default severity defined") - } - for i, rule := range c.Severity.Rules { - if err := rule.Validate(); err != nil { - return fmt.Errorf("error in severity rule #%d: %w", i, err) - } - } - if err := c.LintersSettings.Govet.Validate(); err != nil { - return fmt.Errorf("error in govet config: %w", err) - } - return nil -} - -func getFirstPathArg() string { - args := os.Args - - // skip all args ([golangci-lint, run/linters]) before files/dirs list - for len(args) != 0 { - if args[0] == "run" { - args = args[1:] - break - } - - args = args[1:] - } - - // find first file/dir arg - firstArg := "./..." - for _, arg := range args { - if !strings.HasPrefix(arg, "-") { - firstArg = arg - break - } - } - - return firstArg -} - -func (r *FileReader) setupConfigFileSearch() { - firstArg := getFirstPathArg() - absStartPath, err := filepath.Abs(firstArg) - if err != nil { - r.log.Warnf("Can't make abs path for %q: %s", firstArg, err) - absStartPath = filepath.Clean(firstArg) - } - - // start from it - var curDir string - if fsutils.IsDir(absStartPath) { - curDir = absStartPath - } else { - curDir = filepath.Dir(absStartPath) - } - - // find all dirs from it up to the root - configSearchPaths := []string{"./"} - - for { - configSearchPaths = append(configSearchPaths, curDir) - newCurDir := filepath.Dir(curDir) - if curDir == newCurDir || newCurDir == "" { - break - } - curDir = newCurDir - } - - // find home directory for global config - if home, err := homedir.Dir(); err != nil { - r.log.Warnf("Can't get user's home directory: %s", err.Error()) - } else if !slices.Contains(configSearchPaths, home) { - configSearchPaths = append(configSearchPaths, home) - } - - r.log.Infof("Config search paths: %s", configSearchPaths) - viper.SetConfigName(".golangci") - for _, p := range configSearchPaths { - viper.AddConfigPath(p) - } -} - -var errConfigDisabled = errors.New("config is disabled by --no-config") - -func (r *FileReader) parseConfigOption() (string, error) { - cfg := r.commandLineCfg - if cfg == nil { - return "", nil - } - - configFile := cfg.Run.Config - if cfg.Run.NoConfig && configFile != "" { - return "", errors.New("can't combine option --config and --no-config") - } - - if cfg.Run.NoConfig { - return "", errConfigDisabled - } - - configFile, err := homedir.Expand(configFile) - if err != nil { - return "", errors.New("failed to expand configuration path") - } - - return configFile, nil -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/config/run.go b/vendor/github.com/golangci/golangci-lint/pkg/config/run.go deleted file mode 100644 index 2bb21d9fa..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/config/run.go +++ /dev/null @@ -1,42 +0,0 @@ -package config - -import "time" - -// Run encapsulates the config options for running the linter analysis. -type Run struct { - IsVerbose bool `mapstructure:"verbose"` - Silent bool - CPUProfilePath string - MemProfilePath string - TracePath string - Concurrency int - PrintResourcesUsage bool `mapstructure:"print-resources-usage"` - - Config string // The path to the golangci config file, as specified with the --config argument. - NoConfig bool - - Args []string - - Go string `mapstructure:"go"` - - BuildTags []string `mapstructure:"build-tags"` - ModulesDownloadMode string `mapstructure:"modules-download-mode"` - - ExitCodeIfIssuesFound int `mapstructure:"issues-exit-code"` - AnalyzeTests bool `mapstructure:"tests"` - - // Deprecated: Deadline exists for historical compatibility - // and should not be used. To set run timeout use Timeout instead. - Deadline time.Duration - Timeout time.Duration - - PrintVersion bool - SkipFiles []string `mapstructure:"skip-files"` - SkipDirs []string `mapstructure:"skip-dirs"` - UseDefaultSkipDirs bool `mapstructure:"skip-dirs-use-default"` - - AllowParallelRunners bool `mapstructure:"allow-parallel-runners"` - AllowSerialRunners bool `mapstructure:"allow-serial-runners"` - - ShowStats bool `mapstructure:"show-stats"` -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/config/severity.go b/vendor/github.com/golangci/golangci-lint/pkg/config/severity.go deleted file mode 100644 index 3068a0ed6..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/config/severity.go +++ /dev/null @@ -1,18 +0,0 @@ -package config - -const severityRuleMinConditionsCount = 1 - -type Severity struct { - Default string `mapstructure:"default-severity"` - CaseSensitive bool `mapstructure:"case-sensitive"` - Rules []SeverityRule `mapstructure:"rules"` -} - -type SeverityRule struct { - BaseRule `mapstructure:",squash"` - Severity string -} - -func (s *SeverityRule) Validate() error { - return s.BaseRule.Validate(severityRuleMinConditionsCount) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/exitcodes/exitcodes.go b/vendor/github.com/golangci/golangci-lint/pkg/exitcodes/exitcodes.go deleted file mode 100644 index 83331dbe7..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/exitcodes/exitcodes.go +++ /dev/null @@ -1,32 +0,0 @@ -package exitcodes - -const ( - Success = iota - IssuesFound - WarningInTest - Failure - Timeout - NoGoFiles - NoConfigFileDetected - ErrorWasLogged -) - -type ExitError struct { - Message string - Code int -} - -func (e ExitError) Error() string { - return e.Message -} - -var ( - ErrNoGoFiles = &ExitError{ - Message: "no go files to analyze", - Code: NoGoFiles, - } - ErrFailure = &ExitError{ - Message: "failed to analyze", - Code: Failure, - } -) diff --git a/vendor/github.com/golangci/golangci-lint/pkg/fsutils/filecache.go b/vendor/github.com/golangci/golangci-lint/pkg/fsutils/filecache.go deleted file mode 100644 index e8e5ba19b..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/fsutils/filecache.go +++ /dev/null @@ -1,65 +0,0 @@ -package fsutils - -import ( - "fmt" - "os" - "sync" - - "github.com/golangci/golangci-lint/pkg/logutils" -) - -type FileCache struct { - files sync.Map -} - -func NewFileCache() *FileCache { - return &FileCache{} -} - -func (fc *FileCache) GetFileBytes(filePath string) ([]byte, error) { - cachedBytes, ok := fc.files.Load(filePath) - if ok { - return cachedBytes.([]byte), nil - } - - fileBytes, err := os.ReadFile(filePath) - if err != nil { - return nil, fmt.Errorf("can't read file %s: %w", filePath, err) - } - - fc.files.Store(filePath, fileBytes) - return fileBytes, nil -} - -func PrettifyBytesCount(n int64) string { - const ( - Multiplexer = 1024 - KiB = 1 * Multiplexer - MiB = KiB * Multiplexer - GiB = MiB * Multiplexer - ) - - if n >= GiB { - return fmt.Sprintf("%.1fGiB", float64(n)/GiB) - } - if n >= MiB { - return fmt.Sprintf("%.1fMiB", float64(n)/MiB) - } - if n >= KiB { - return fmt.Sprintf("%.1fKiB", float64(n)/KiB) - } - return fmt.Sprintf("%dB", n) -} - -func (fc *FileCache) PrintStats(log logutils.Log) { - var size int64 - var mapLen int - fc.files.Range(func(_, fileBytes any) bool { - mapLen++ - size += int64(len(fileBytes.([]byte))) - - return true - }) - - log.Infof("File cache stats: %d entries of total size %s", mapLen, PrettifyBytesCount(size)) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/fsutils/files.go b/vendor/github.com/golangci/golangci-lint/pkg/fsutils/files.go deleted file mode 100644 index 4398ab9fc..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/fsutils/files.go +++ /dev/null @@ -1,33 +0,0 @@ -package fsutils - -import "path/filepath" - -// Files combines different operations related to handling file paths and content. -type Files struct { - *LineCache - pathPrefix string -} - -func NewFiles(lc *LineCache, pathPrefix string) *Files { - return &Files{ - LineCache: lc, - pathPrefix: pathPrefix, - } -} - -// WithPathPrefix takes a path that is relative to the current directory (as used in issues) -// and adds the configured path prefix, if there is one. -// The resulting path then can be shown to the user or compared against paths specified in the configuration. -func (f *Files) WithPathPrefix(relativePath string) string { - return WithPathPrefix(f.pathPrefix, relativePath) -} - -// WithPathPrefix takes a path that is relative to the current directory (as used in issues) -// and adds the configured path prefix, if there is one. -// The resulting path then can be shown to the user or compared against paths specified in the configuration. -func WithPathPrefix(pathPrefix, relativePath string) string { - if pathPrefix == "" { - return relativePath - } - return filepath.Join(pathPrefix, relativePath) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/fsutils/fsutils.go b/vendor/github.com/golangci/golangci-lint/pkg/fsutils/fsutils.go deleted file mode 100644 index 715a3a4af..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/fsutils/fsutils.go +++ /dev/null @@ -1,100 +0,0 @@ -package fsutils - -import ( - "fmt" - "os" - "path/filepath" - "sync" -) - -func IsDir(filename string) bool { - fi, err := os.Stat(filename) - return err == nil && fi.IsDir() -} - -var cachedWd string -var cachedWdError error -var getWdOnce sync.Once -var useCache = true - -func UseWdCache(use bool) { - useCache = use -} - -func Getwd() (string, error) { - if !useCache { // for tests - return os.Getwd() - } - - getWdOnce.Do(func() { - cachedWd, cachedWdError = os.Getwd() - if cachedWdError != nil { - return - } - - evaledWd, err := EvalSymlinks(cachedWd) - if err != nil { - cachedWd, cachedWdError = "", fmt.Errorf("can't eval symlinks on wd %s: %w", cachedWd, err) - return - } - - cachedWd = evaledWd - }) - - return cachedWd, cachedWdError -} - -var evalSymlinkCache sync.Map - -type evalSymlinkRes struct { - path string - err error -} - -func EvalSymlinks(path string) (string, error) { - r, ok := evalSymlinkCache.Load(path) - if ok { - er := r.(evalSymlinkRes) - return er.path, er.err - } - - var er evalSymlinkRes - er.path, er.err = filepath.EvalSymlinks(path) - evalSymlinkCache.Store(path, er) - - return er.path, er.err -} - -func ShortestRelPath(path, wd string) (string, error) { - if wd == "" { // get it if user don't have cached working dir - var err error - wd, err = Getwd() - if err != nil { - return "", fmt.Errorf("can't get working directory: %w", err) - } - } - - evaledPath, err := EvalSymlinks(path) - if err != nil { - return "", fmt.Errorf("can't eval symlinks for path %s: %w", path, err) - } - path = evaledPath - - // make path absolute and then relative to be able to fix this case: - // we are in /test dir, we want to normalize ../test, and have file file.go in this dir; - // it must have normalized path file.go, not ../test/file.go, - var absPath string - if filepath.IsAbs(path) { - absPath = path - } else { - absPath = filepath.Join(wd, path) - } - - relPath, err := filepath.Rel(wd, absPath) - if err != nil { - return "", fmt.Errorf("can't get relative path for path %s and root %s: %w", - absPath, wd, err) - } - - return relPath, nil -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/fsutils/linecache.go b/vendor/github.com/golangci/golangci-lint/pkg/fsutils/linecache.go deleted file mode 100644 index 2e9226484..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/fsutils/linecache.go +++ /dev/null @@ -1,68 +0,0 @@ -package fsutils - -import ( - "bytes" - "fmt" - "sync" -) - -type fileLinesCache [][]byte - -type LineCache struct { - files sync.Map - fileCache *FileCache -} - -func NewLineCache(fc *FileCache) *LineCache { - return &LineCache{ - fileCache: fc, - } -} - -// GetLine returns the index1-th (1-based index) line from the file on filePath -func (lc *LineCache) GetLine(filePath string, index1 int) (string, error) { - if index1 == 0 { // some linters, e.g. gosec can do it: it really means first line - index1 = 1 - } - - const index1To0Offset = -1 - rawLine, err := lc.getRawLine(filePath, index1+index1To0Offset) - if err != nil { - return "", err - } - - return string(bytes.Trim(rawLine, "\r")), nil -} - -func (lc *LineCache) getRawLine(filePath string, index0 int) ([]byte, error) { - fc, err := lc.getFileCache(filePath) - if err != nil { - return nil, fmt.Errorf("failed to get file %s lines cache: %w", filePath, err) - } - - if index0 < 0 { - return nil, fmt.Errorf("invalid file line index0 < 0: %d", index0) - } - - if index0 >= len(fc) { - return nil, fmt.Errorf("invalid file line index0 (%d) >= len(fc) (%d)", index0, len(fc)) - } - - return fc[index0], nil -} - -func (lc *LineCache) getFileCache(filePath string) (fileLinesCache, error) { - loadedFc, ok := lc.files.Load(filePath) - if ok { - return loadedFc.(fileLinesCache), nil - } - - fileBytes, err := lc.fileCache.GetFileBytes(filePath) - if err != nil { - return nil, fmt.Errorf("can't get file %s bytes from cache: %w", filePath, err) - } - - fc := bytes.Split(fileBytes, []byte("\n")) - lc.files.Store(filePath, fileLinesCache(fc)) - return fc, nil -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/fsutils/path_unix.go b/vendor/github.com/golangci/golangci-lint/pkg/fsutils/path_unix.go deleted file mode 100644 index 2a171ecc0..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/fsutils/path_unix.go +++ /dev/null @@ -1,8 +0,0 @@ -//go:build !windows - -package fsutils - -// NormalizePathInRegex it's a noop function on Unix. -func NormalizePathInRegex(path string) string { - return path -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/fsutils/path_windows.go b/vendor/github.com/golangci/golangci-lint/pkg/fsutils/path_windows.go deleted file mode 100644 index 650aae1e1..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/fsutils/path_windows.go +++ /dev/null @@ -1,28 +0,0 @@ -//go:build windows - -package fsutils - -import ( - "path/filepath" - "regexp" - "strings" -) - -var separatorToReplace = regexp.QuoteMeta(string(filepath.Separator)) - -// NormalizePathInRegex normalizes path in regular expressions. -// noop on Unix. -// This replacing should be safe because "/" are disallowed in Windows -// https://docs.microsoft.com/windows/win32/fileio/naming-a-file -func NormalizePathInRegex(path string) string { - // remove redundant character escape "\/" https://github.com/golangci/golangci-lint/issues/3277 - clean := regexp.MustCompile(`\\+/`). - ReplaceAllStringFunc(path, func(s string) string { - if strings.Count(s, "\\")%2 == 0 { - return s - } - return s[1:] - }) - - return strings.ReplaceAll(clean, "/", separatorToReplace) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/asasalint.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/asasalint.go deleted file mode 100644 index 67dde7991..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/asasalint.go +++ /dev/null @@ -1,30 +0,0 @@ -package golinters - -import ( - "github.com/alingse/asasalint" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" -) - -func NewAsasalint(setting *config.AsasalintSettings) *goanalysis.Linter { - cfg := asasalint.LinterSetting{} - if setting != nil { - cfg.Exclude = setting.Exclude - cfg.NoBuiltinExclusions = !setting.UseBuiltinExclusions - cfg.IgnoreTest = setting.IgnoreTest - } - - a, err := asasalint.NewAnalyzer(cfg) - if err != nil { - linterLogger.Fatalf("asasalint: create analyzer: %v", err) - } - - return goanalysis.NewLinter( - a.Name, - a.Doc, - []*analysis.Analyzer{a}, - nil, - ).WithLoadMode(goanalysis.LoadModeTypesInfo) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/asciicheck.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/asciicheck.go deleted file mode 100644 index 7e7ee05e5..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/asciicheck.go +++ /dev/null @@ -1,19 +0,0 @@ -package golinters - -import ( - "github.com/tdakkota/asciicheck" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" -) - -func NewAsciicheck() *goanalysis.Linter { - a := asciicheck.NewAnalyzer() - - return goanalysis.NewLinter( - a.Name, - a.Doc, - []*analysis.Analyzer{a}, - nil, - ).WithLoadMode(goanalysis.LoadModeSyntax) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/bidichk.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/bidichk.go deleted file mode 100644 index ef266f55c..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/bidichk.go +++ /dev/null @@ -1,59 +0,0 @@ -package golinters - -import ( - "strings" - - "github.com/breml/bidichk/pkg/bidichk" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" -) - -func NewBiDiChkFuncName(cfg *config.BiDiChkSettings) *goanalysis.Linter { - a := bidichk.NewAnalyzer() - - cfgMap := map[string]map[string]any{} - if cfg != nil { - var opts []string - - if cfg.LeftToRightEmbedding { - opts = append(opts, "LEFT-TO-RIGHT-EMBEDDING") - } - if cfg.RightToLeftEmbedding { - opts = append(opts, "RIGHT-TO-LEFT-EMBEDDING") - } - if cfg.PopDirectionalFormatting { - opts = append(opts, "POP-DIRECTIONAL-FORMATTING") - } - if cfg.LeftToRightOverride { - opts = append(opts, "LEFT-TO-RIGHT-OVERRIDE") - } - if cfg.RightToLeftOverride { - opts = append(opts, "RIGHT-TO-LEFT-OVERRIDE") - } - if cfg.LeftToRightIsolate { - opts = append(opts, "LEFT-TO-RIGHT-ISOLATE") - } - if cfg.RightToLeftIsolate { - opts = append(opts, "RIGHT-TO-LEFT-ISOLATE") - } - if cfg.FirstStrongIsolate { - opts = append(opts, "FIRST-STRONG-ISOLATE") - } - if cfg.PopDirectionalIsolate { - opts = append(opts, "POP-DIRECTIONAL-ISOLATE") - } - - cfgMap[a.Name] = map[string]any{ - "disallowed-runes": strings.Join(opts, ","), - } - } - - return goanalysis.NewLinter( - a.Name, - "Checks for dangerous unicode character sequences", - []*analysis.Analyzer{a}, - cfgMap, - ).WithLoadMode(goanalysis.LoadModeSyntax) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/bodyclose.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/bodyclose.go deleted file mode 100644 index e56bd83f2..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/bodyclose.go +++ /dev/null @@ -1,17 +0,0 @@ -package golinters - -import ( - "github.com/timakin/bodyclose/passes/bodyclose" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" -) - -func NewBodyclose() *goanalysis.Linter { - return goanalysis.NewLinter( - "bodyclose", - "checks whether HTTP response body is closed successfully", - []*analysis.Analyzer{bodyclose.Analyzer}, - nil, - ).WithLoadMode(goanalysis.LoadModeTypesInfo) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/commons.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/commons.go deleted file mode 100644 index 3b40e59bf..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/commons.go +++ /dev/null @@ -1,6 +0,0 @@ -package golinters - -import "github.com/golangci/golangci-lint/pkg/logutils" - -// linterLogger must be use only when the context logger is not available. -var linterLogger = logutils.NewStderrLog(logutils.DebugKeyLinter) diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/containedctx.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/containedctx.go deleted file mode 100644 index 8f7859af7..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/containedctx.go +++ /dev/null @@ -1,19 +0,0 @@ -package golinters - -import ( - "github.com/sivchari/containedctx" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" -) - -func NewContainedCtx() *goanalysis.Linter { - a := containedctx.Analyzer - - return goanalysis.NewLinter( - a.Name, - a.Doc, - []*analysis.Analyzer{a}, - nil, - ).WithLoadMode(goanalysis.LoadModeTypesInfo) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/contextcheck.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/contextcheck.go deleted file mode 100644 index f54192a18..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/contextcheck.go +++ /dev/null @@ -1,22 +0,0 @@ -package golinters - -import ( - "github.com/kkHAIKE/contextcheck" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" - "github.com/golangci/golangci-lint/pkg/lint/linter" -) - -func NewContextCheck() *goanalysis.Linter { - analyzer := contextcheck.NewAnalyzer(contextcheck.Configuration{}) - - return goanalysis.NewLinter( - analyzer.Name, - analyzer.Doc, - []*analysis.Analyzer{analyzer}, - nil, - ).WithContextSetter(func(lintCtx *linter.Context) { - analyzer.Run = contextcheck.NewRun(lintCtx.Packages, false) - }).WithLoadMode(goanalysis.LoadModeTypesInfo) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/cyclop.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/cyclop.go deleted file mode 100644 index f76c55552..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/cyclop.go +++ /dev/null @@ -1,37 +0,0 @@ -package golinters - -import ( - "github.com/bkielbasa/cyclop/pkg/analyzer" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" -) - -func NewCyclop(settings *config.Cyclop) *goanalysis.Linter { - a := analyzer.NewAnalyzer() - - var cfg map[string]map[string]any - if settings != nil { - d := map[string]any{ - "skipTests": settings.SkipTests, - } - - if settings.MaxComplexity != 0 { - d["maxComplexity"] = settings.MaxComplexity - } - - if settings.PackageAverage != 0 { - d["packageAverage"] = settings.PackageAverage - } - - cfg = map[string]map[string]any{a.Name: d} - } - - return goanalysis.NewLinter( - a.Name, - "checks function and package cyclomatic complexity", - []*analysis.Analyzer{a}, - cfg, - ).WithLoadMode(goanalysis.LoadModeTypesInfo) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/deadcode.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/deadcode.go deleted file mode 100644 index 4f563c381..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/deadcode.go +++ /dev/null @@ -1,61 +0,0 @@ -package golinters - -import ( - "fmt" - "sync" - - deadcodeAPI "github.com/golangci/go-misc/deadcode" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" - "github.com/golangci/golangci-lint/pkg/lint/linter" - "github.com/golangci/golangci-lint/pkg/result" -) - -const deadcodeName = "deadcode" - -func NewDeadcode() *goanalysis.Linter { - var mu sync.Mutex - var resIssues []goanalysis.Issue - - analyzer := &analysis.Analyzer{ - Name: deadcodeName, - Doc: goanalysis.TheOnlyanalyzerDoc, - Run: func(pass *analysis.Pass) (any, error) { - prog := goanalysis.MakeFakeLoaderProgram(pass) - - issues, err := deadcodeAPI.Run(prog) - if err != nil { - return nil, err - } - - res := make([]goanalysis.Issue, 0, len(issues)) - for _, i := range issues { - res = append(res, goanalysis.NewIssue(&result.Issue{ - Pos: i.Pos, - Text: fmt.Sprintf("%s is unused", formatCode(i.UnusedIdentName, nil)), - FromLinter: deadcodeName, - }, pass)) - } - - if len(issues) == 0 { - return nil, nil - } - - mu.Lock() - resIssues = append(resIssues, res...) - mu.Unlock() - - return nil, nil - }, - } - - return goanalysis.NewLinter( - deadcodeName, - "Finds unused code", - []*analysis.Analyzer{analyzer}, - nil, - ).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { - return resIssues - }).WithLoadMode(goanalysis.LoadModeTypesInfo) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/decorder.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/decorder.go deleted file mode 100644 index 5202a03a4..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/decorder.go +++ /dev/null @@ -1,44 +0,0 @@ -package golinters - -import ( - "strings" - - "gitlab.com/bosi/decorder" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" -) - -func NewDecorder(settings *config.DecorderSettings) *goanalysis.Linter { - a := decorder.Analyzer - - // disable all rules/checks by default - cfg := map[string]any{ - "ignore-underscore-vars": false, - "disable-dec-num-check": true, - "disable-type-dec-num-check": false, - "disable-const-dec-num-check": false, - "disable-var-dec-num-check": false, - "disable-dec-order-check": true, - "disable-init-func-first-check": true, - } - - if settings != nil { - cfg["dec-order"] = strings.Join(settings.DecOrder, ",") - cfg["ignore-underscore-vars"] = settings.IgnoreUnderscoreVars - cfg["disable-dec-num-check"] = settings.DisableDecNumCheck - cfg["disable-type-dec-num-check"] = settings.DisableTypeDecNumCheck - cfg["disable-const-dec-num-check"] = settings.DisableConstDecNumCheck - cfg["disable-var-dec-num-check"] = settings.DisableVarDecNumCheck - cfg["disable-dec-order-check"] = settings.DisableDecOrderCheck - cfg["disable-init-func-first-check"] = settings.DisableInitFuncFirstCheck - } - - return goanalysis.NewLinter( - a.Name, - a.Doc, - []*analysis.Analyzer{a}, - map[string]map[string]any{a.Name: cfg}, - ).WithLoadMode(goanalysis.LoadModeSyntax) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/depguard.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/depguard.go deleted file mode 100644 index 49e471df8..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/depguard.go +++ /dev/null @@ -1,50 +0,0 @@ -package golinters - -import ( - "github.com/OpenPeeDeeP/depguard/v2" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" - "github.com/golangci/golangci-lint/pkg/lint/linter" -) - -func NewDepguard(settings *config.DepGuardSettings) *goanalysis.Linter { - conf := depguard.LinterSettings{} - - if settings != nil { - for s, rule := range settings.Rules { - list := &depguard.List{ - ListMode: rule.ListMode, - Files: rule.Files, - Allow: rule.Allow, - } - - // because of bug with Viper parsing (split on dot) we use a list of struct instead of a map. - // https://github.com/spf13/viper/issues/324 - // https://github.com/golangci/golangci-lint/issues/3749#issuecomment-1492536630 - - deny := map[string]string{} - for _, r := range rule.Deny { - deny[r.Pkg] = r.Desc - } - list.Deny = deny - - conf[s] = list - } - } - - a := depguard.NewUncompiledAnalyzer(&conf) - - return goanalysis.NewLinter( - a.Analyzer.Name, - a.Analyzer.Doc, - []*analysis.Analyzer{a.Analyzer}, - nil, - ).WithContextSetter(func(lintCtx *linter.Context) { - err := a.Compile() - if err != nil { - lintCtx.Log.Errorf("create analyzer: %v", err) - } - }).WithLoadMode(goanalysis.LoadModeSyntax) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/dogsled.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/dogsled.go deleted file mode 100644 index 79502fe8b..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/dogsled.go +++ /dev/null @@ -1,111 +0,0 @@ -package golinters - -import ( - "fmt" - "go/ast" - "go/token" - "sync" - - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" - "github.com/golangci/golangci-lint/pkg/lint/linter" - "github.com/golangci/golangci-lint/pkg/result" -) - -const dogsledName = "dogsled" - -//nolint:dupl -func NewDogsled(settings *config.DogsledSettings) *goanalysis.Linter { - var mu sync.Mutex - var resIssues []goanalysis.Issue - - analyzer := &analysis.Analyzer{ - Name: dogsledName, - Doc: goanalysis.TheOnlyanalyzerDoc, - Run: func(pass *analysis.Pass) (any, error) { - issues := runDogsled(pass, settings) - - if len(issues) == 0 { - return nil, nil - } - - mu.Lock() - resIssues = append(resIssues, issues...) - mu.Unlock() - - return nil, nil - }, - } - - return goanalysis.NewLinter( - dogsledName, - "Checks assignments with too many blank identifiers (e.g. x, _, _, _, := f())", - []*analysis.Analyzer{analyzer}, - nil, - ).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { - return resIssues - }).WithLoadMode(goanalysis.LoadModeSyntax) -} - -func runDogsled(pass *analysis.Pass, settings *config.DogsledSettings) []goanalysis.Issue { - var reports []goanalysis.Issue - for _, f := range pass.Files { - v := &returnsVisitor{ - maxBlanks: settings.MaxBlankIdentifiers, - f: pass.Fset, - } - - ast.Walk(v, f) - - for i := range v.issues { - reports = append(reports, goanalysis.NewIssue(&v.issues[i], pass)) - } - } - - return reports -} - -type returnsVisitor struct { - f *token.FileSet - maxBlanks int - issues []result.Issue -} - -func (v *returnsVisitor) Visit(node ast.Node) ast.Visitor { - funcDecl, ok := node.(*ast.FuncDecl) - if !ok { - return v - } - if funcDecl.Body == nil { - return v - } - - for _, expr := range funcDecl.Body.List { - assgnStmt, ok := expr.(*ast.AssignStmt) - if !ok { - continue - } - - numBlank := 0 - for _, left := range assgnStmt.Lhs { - ident, ok := left.(*ast.Ident) - if !ok { - continue - } - if ident.Name == "_" { - numBlank++ - } - } - - if numBlank > v.maxBlanks { - v.issues = append(v.issues, result.Issue{ - FromLinter: dogsledName, - Text: fmt.Sprintf("declaration has %v blank identifiers", numBlank), - Pos: v.f.Position(assgnStmt.Pos()), - }) - } - } - return v -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/dupl.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/dupl.go deleted file mode 100644 index 5d772a5f2..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/dupl.go +++ /dev/null @@ -1,96 +0,0 @@ -package golinters - -import ( - "fmt" - "go/token" - "sync" - - duplAPI "github.com/golangci/dupl" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/fsutils" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" - "github.com/golangci/golangci-lint/pkg/lint/linter" - "github.com/golangci/golangci-lint/pkg/result" -) - -const duplName = "dupl" - -//nolint:dupl -func NewDupl(settings *config.DuplSettings) *goanalysis.Linter { - var mu sync.Mutex - var resIssues []goanalysis.Issue - - analyzer := &analysis.Analyzer{ - Name: duplName, - Doc: goanalysis.TheOnlyanalyzerDoc, - Run: func(pass *analysis.Pass) (any, error) { - issues, err := runDupl(pass, settings) - if err != nil { - return nil, err - } - - if len(issues) == 0 { - return nil, nil - } - - mu.Lock() - resIssues = append(resIssues, issues...) - mu.Unlock() - - return nil, nil - }, - } - - return goanalysis.NewLinter( - duplName, - "Tool for code clone detection", - []*analysis.Analyzer{analyzer}, - nil, - ).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { - return resIssues - }).WithLoadMode(goanalysis.LoadModeSyntax) -} - -func runDupl(pass *analysis.Pass, settings *config.DuplSettings) ([]goanalysis.Issue, error) { - fileNames := getFileNames(pass) - - issues, err := duplAPI.Run(fileNames, settings.Threshold) - if err != nil { - return nil, err - } - - if len(issues) == 0 { - return nil, nil - } - - res := make([]goanalysis.Issue, 0, len(issues)) - - for _, i := range issues { - toFilename, err := fsutils.ShortestRelPath(i.To.Filename(), "") - if err != nil { - return nil, fmt.Errorf("failed to get shortest rel path for %q: %w", i.To.Filename(), err) - } - - dupl := fmt.Sprintf("%s:%d-%d", toFilename, i.To.LineStart(), i.To.LineEnd()) - text := fmt.Sprintf("%d-%d lines are duplicate of %s", - i.From.LineStart(), i.From.LineEnd(), - formatCode(dupl, nil)) - - res = append(res, goanalysis.NewIssue(&result.Issue{ - Pos: token.Position{ - Filename: i.From.Filename(), - Line: i.From.LineStart(), - }, - LineRange: &result.Range{ - From: i.From.LineStart(), - To: i.From.LineEnd(), - }, - Text: text, - FromLinter: duplName, - }, pass)) - } - - return res, nil -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/dupword.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/dupword.go deleted file mode 100644 index 6f079ffc8..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/dupword.go +++ /dev/null @@ -1,30 +0,0 @@ -package golinters - -import ( - "strings" - - "github.com/Abirdcfly/dupword" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" -) - -func NewDupWord(setting *config.DupWordSettings) *goanalysis.Linter { - a := dupword.NewAnalyzer() - - cfgMap := map[string]map[string]any{} - if setting != nil { - cfgMap[a.Name] = map[string]any{ - "keyword": strings.Join(setting.Keywords, ","), - "ignore": strings.Join(setting.Ignore, ","), - } - } - - return goanalysis.NewLinter( - a.Name, - "checks for duplicate words in the source code", - []*analysis.Analyzer{a}, - cfgMap, - ).WithLoadMode(goanalysis.LoadModeSyntax) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/durationcheck.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/durationcheck.go deleted file mode 100644 index 880de5d42..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/durationcheck.go +++ /dev/null @@ -1,19 +0,0 @@ -package golinters - -import ( - "github.com/charithe/durationcheck" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" -) - -func NewDurationCheck() *goanalysis.Linter { - a := durationcheck.Analyzer - - return goanalysis.NewLinter( - a.Name, - a.Doc, - []*analysis.Analyzer{a}, - nil, - ).WithLoadMode(goanalysis.LoadModeTypesInfo) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/errcheck.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/errcheck.go deleted file mode 100644 index 89b18519c..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/errcheck.go +++ /dev/null @@ -1,269 +0,0 @@ -package golinters - -import ( - "bufio" - "fmt" - "os" - "os/user" - "path/filepath" - "regexp" - "strings" - "sync" - - "github.com/kisielk/errcheck/errcheck" - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/packages" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/fsutils" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" - "github.com/golangci/golangci-lint/pkg/lint/linter" - "github.com/golangci/golangci-lint/pkg/result" -) - -const errcheckName = "errcheck" - -func NewErrcheck(settings *config.ErrcheckSettings) *goanalysis.Linter { - var mu sync.Mutex - var resIssues []goanalysis.Issue - - analyzer := &analysis.Analyzer{ - Name: errcheckName, - Doc: goanalysis.TheOnlyanalyzerDoc, - Run: goanalysis.DummyRun, - } - - return goanalysis.NewLinter( - errcheckName, - "errcheck is a program for checking for unchecked errors in Go code. "+ - "These unchecked errors can be critical bugs in some cases", - []*analysis.Analyzer{analyzer}, - nil, - ).WithContextSetter(func(lintCtx *linter.Context) { - // copied from errcheck - checker, err := getChecker(settings) - if err != nil { - lintCtx.Log.Errorf("failed to get checker: %v", err) - return - } - - checker.Tags = lintCtx.Cfg.Run.BuildTags - - analyzer.Run = func(pass *analysis.Pass) (any, error) { - issues := runErrCheck(lintCtx, pass, checker) - if err != nil { - return nil, err - } - - if len(issues) == 0 { - return nil, nil - } - - mu.Lock() - resIssues = append(resIssues, issues...) - mu.Unlock() - - return nil, nil - } - }).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { - return resIssues - }).WithLoadMode(goanalysis.LoadModeTypesInfo) -} - -func runErrCheck(lintCtx *linter.Context, pass *analysis.Pass, checker *errcheck.Checker) []goanalysis.Issue { - pkg := &packages.Package{ - Fset: pass.Fset, - Syntax: pass.Files, - Types: pass.Pkg, - TypesInfo: pass.TypesInfo, - } - - lintIssues := checker.CheckPackage(pkg).Unique() - if len(lintIssues.UncheckedErrors) == 0 { - return nil - } - - issues := make([]goanalysis.Issue, len(lintIssues.UncheckedErrors)) - - for i, err := range lintIssues.UncheckedErrors { - text := "Error return value is not checked" - - if err.FuncName != "" { - code := err.SelectorName - if err.SelectorName == "" { - code = err.FuncName - } - - text = fmt.Sprintf("Error return value of %s is not checked", formatCode(code, lintCtx.Cfg)) - } - - issues[i] = goanalysis.NewIssue( - &result.Issue{ - FromLinter: errcheckName, - Text: text, - Pos: err.Pos, - }, - pass, - ) - } - - return issues -} - -// parseIgnoreConfig was taken from errcheck in order to keep the API identical. -// https://github.com/kisielk/errcheck/blob/1787c4bee836470bf45018cfbc783650db3c6501/main.go#L25-L60 -func parseIgnoreConfig(s string) (map[string]*regexp.Regexp, error) { - if s == "" { - return nil, nil - } - - cfg := map[string]*regexp.Regexp{} - - for _, pair := range strings.Split(s, ",") { - colonIndex := strings.Index(pair, ":") - var pkg, re string - if colonIndex == -1 { - pkg = "" - re = pair - } else { - pkg = pair[:colonIndex] - re = pair[colonIndex+1:] - } - regex, err := regexp.Compile(re) - if err != nil { - return nil, err - } - cfg[pkg] = regex - } - - return cfg, nil -} - -func getChecker(errCfg *config.ErrcheckSettings) (*errcheck.Checker, error) { - ignoreConfig, err := parseIgnoreConfig(errCfg.Ignore) - if err != nil { - return nil, fmt.Errorf("failed to parse 'ignore' directive: %w", err) - } - - checker := errcheck.Checker{ - Exclusions: errcheck.Exclusions{ - BlankAssignments: !errCfg.CheckAssignToBlank, - TypeAssertions: !errCfg.CheckTypeAssertions, - SymbolRegexpsByPackage: map[string]*regexp.Regexp{}, - }, - } - - if !errCfg.DisableDefaultExclusions { - checker.Exclusions.Symbols = append(checker.Exclusions.Symbols, errcheck.DefaultExcludedSymbols...) - } - - for pkg, re := range ignoreConfig { - checker.Exclusions.SymbolRegexpsByPackage[pkg] = re - } - - if errCfg.Exclude != "" { - exclude, err := readExcludeFile(errCfg.Exclude) - if err != nil { - return nil, err - } - - checker.Exclusions.Symbols = append(checker.Exclusions.Symbols, exclude...) - } - - checker.Exclusions.Symbols = append(checker.Exclusions.Symbols, errCfg.ExcludeFunctions...) - - return &checker, nil -} - -func getFirstPathArg() string { - args := os.Args - - // skip all args ([golangci-lint, run/linters]) before files/dirs list - for len(args) != 0 { - if args[0] == "run" { - args = args[1:] - break - } - - args = args[1:] - } - - // find first file/dir arg - firstArg := "./..." - for _, arg := range args { - if !strings.HasPrefix(arg, "-") { - firstArg = arg - break - } - } - - return firstArg -} - -func setupConfigFileSearch(name string) []string { - if strings.HasPrefix(name, "~") { - if u, err := user.Current(); err == nil { - name = strings.Replace(name, "~", u.HomeDir, 1) - } - } - - if filepath.IsAbs(name) { - return []string{name} - } - - firstArg := getFirstPathArg() - - absStartPath, err := filepath.Abs(firstArg) - if err != nil { - absStartPath = filepath.Clean(firstArg) - } - - // start from it - var curDir string - if fsutils.IsDir(absStartPath) { - curDir = absStartPath - } else { - curDir = filepath.Dir(absStartPath) - } - - // find all dirs from it up to the root - configSearchPaths := []string{filepath.Join(".", name)} - for { - configSearchPaths = append(configSearchPaths, filepath.Join(curDir, name)) - newCurDir := filepath.Dir(curDir) - if curDir == newCurDir || newCurDir == "" { - break - } - curDir = newCurDir - } - - return configSearchPaths -} - -func readExcludeFile(name string) ([]string, error) { - var err error - var fh *os.File - - for _, path := range setupConfigFileSearch(name) { - if fh, err = os.Open(path); err == nil { - break - } - } - - if fh == nil { - return nil, fmt.Errorf("failed reading exclude file: %s: %w", name, err) - } - - scanner := bufio.NewScanner(fh) - - var excludes []string - for scanner.Scan() { - excludes = append(excludes, scanner.Text()) - } - - if err := scanner.Err(); err != nil { - return nil, fmt.Errorf("failed scanning file: %s: %w", name, err) - } - - return excludes, nil -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/errchkjson.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/errchkjson.go deleted file mode 100644 index 1af4450b4..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/errchkjson.go +++ /dev/null @@ -1,31 +0,0 @@ -package golinters - -import ( - "github.com/breml/errchkjson" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" -) - -func NewErrChkJSONFuncName(cfg *config.ErrChkJSONSettings) *goanalysis.Linter { - a := errchkjson.NewAnalyzer() - - cfgMap := map[string]map[string]any{} - cfgMap[a.Name] = map[string]any{ - "omit-safe": true, - } - if cfg != nil { - cfgMap[a.Name] = map[string]any{ - "omit-safe": !cfg.CheckErrorFreeEncoding, - "report-no-exported": cfg.ReportNoExported, - } - } - - return goanalysis.NewLinter( - a.Name, - a.Doc, - []*analysis.Analyzer{a}, - cfgMap, - ).WithLoadMode(goanalysis.LoadModeTypesInfo) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/errname.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/errname.go deleted file mode 100644 index 193a7aba7..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/errname.go +++ /dev/null @@ -1,19 +0,0 @@ -package golinters - -import ( - "github.com/Antonboom/errname/pkg/analyzer" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" -) - -func NewErrName() *goanalysis.Linter { - a := analyzer.New() - - return goanalysis.NewLinter( - a.Name, - a.Doc, - []*analysis.Analyzer{a}, - nil, - ).WithLoadMode(goanalysis.LoadModeTypesInfo) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/errorlint.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/errorlint.go deleted file mode 100644 index cac94159d..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/errorlint.go +++ /dev/null @@ -1,32 +0,0 @@ -package golinters - -import ( - "github.com/polyfloyd/go-errorlint/errorlint" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" -) - -func NewErrorLint(cfg *config.ErrorLintSettings) *goanalysis.Linter { - a := errorlint.NewAnalyzer() - - cfgMap := map[string]map[string]any{} - - if cfg != nil { - cfgMap[a.Name] = map[string]any{ - "errorf": cfg.Errorf, - "errorf-multi": cfg.ErrorfMulti, - "asserts": cfg.Asserts, - "comparison": cfg.Comparison, - } - } - - return goanalysis.NewLinter( - a.Name, - "errorlint is a linter for that can be used to find code "+ - "that will cause problems with the error wrapping scheme introduced in Go 1.13.", - []*analysis.Analyzer{a}, - cfgMap, - ).WithLoadMode(goanalysis.LoadModeTypesInfo) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/execinquery.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/execinquery.go deleted file mode 100644 index 9911d315e..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/execinquery.go +++ /dev/null @@ -1,19 +0,0 @@ -package golinters - -import ( - "github.com/lufeee/execinquery" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" -) - -func NewExecInQuery() *goanalysis.Linter { - a := execinquery.Analyzer - - return goanalysis.NewLinter( - a.Name, - a.Doc, - []*analysis.Analyzer{a}, - nil, - ).WithLoadMode(goanalysis.LoadModeTypesInfo) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/exhaustive.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/exhaustive.go deleted file mode 100644 index fe58e10f0..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/exhaustive.go +++ /dev/null @@ -1,37 +0,0 @@ -package golinters - -import ( - "github.com/nishanths/exhaustive" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" -) - -func NewExhaustive(settings *config.ExhaustiveSettings) *goanalysis.Linter { - a := exhaustive.Analyzer - - var cfg map[string]map[string]any - if settings != nil { - cfg = map[string]map[string]any{ - a.Name: { - exhaustive.CheckFlag: settings.Check, - exhaustive.CheckGeneratedFlag: settings.CheckGenerated, - exhaustive.DefaultSignifiesExhaustiveFlag: settings.DefaultSignifiesExhaustive, - exhaustive.IgnoreEnumMembersFlag: settings.IgnoreEnumMembers, - exhaustive.IgnoreEnumTypesFlag: settings.IgnoreEnumTypes, - exhaustive.PackageScopeOnlyFlag: settings.PackageScopeOnly, - exhaustive.ExplicitExhaustiveMapFlag: settings.ExplicitExhaustiveMap, - exhaustive.ExplicitExhaustiveSwitchFlag: settings.ExplicitExhaustiveSwitch, - exhaustive.DefaultCaseRequiredFlag: settings.DefaultCaseRequired, - }, - } - } - - return goanalysis.NewLinter( - a.Name, - a.Doc, - []*analysis.Analyzer{a}, - cfg, - ).WithLoadMode(goanalysis.LoadModeTypesInfo) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/exhaustivestruct.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/exhaustivestruct.go deleted file mode 100644 index 9bc9bbfb0..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/exhaustivestruct.go +++ /dev/null @@ -1,31 +0,0 @@ -package golinters - -import ( - "strings" - - "github.com/mbilski/exhaustivestruct/pkg/analyzer" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" -) - -func NewExhaustiveStruct(settings *config.ExhaustiveStructSettings) *goanalysis.Linter { - a := analyzer.Analyzer - - var cfg map[string]map[string]any - if settings != nil { - cfg = map[string]map[string]any{ - a.Name: { - "struct_patterns": strings.Join(settings.StructPatterns, ","), - }, - } - } - - return goanalysis.NewLinter( - a.Name, - a.Doc, - []*analysis.Analyzer{a}, - cfg, - ).WithLoadMode(goanalysis.LoadModeTypesInfo) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/exhaustruct.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/exhaustruct.go deleted file mode 100644 index 5272879e1..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/exhaustruct.go +++ /dev/null @@ -1,29 +0,0 @@ -package golinters - -import ( - "github.com/GaijinEntertainment/go-exhaustruct/v3/analyzer" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" -) - -func NewExhaustruct(settings *config.ExhaustructSettings) *goanalysis.Linter { - var include, exclude []string - if settings != nil { - include = settings.Include - exclude = settings.Exclude - } - - a, err := analyzer.NewAnalyzer(include, exclude) - if err != nil { - linterLogger.Fatalf("exhaustruct configuration: %v", err) - } - - return goanalysis.NewLinter( - a.Name, - a.Doc, - []*analysis.Analyzer{a}, - nil, - ).WithLoadMode(goanalysis.LoadModeTypesInfo) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/exportloopref.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/exportloopref.go deleted file mode 100644 index 1131c575b..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/exportloopref.go +++ /dev/null @@ -1,19 +0,0 @@ -package golinters - -import ( - "github.com/kyoh86/exportloopref" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" -) - -func NewExportLoopRef() *goanalysis.Linter { - a := exportloopref.Analyzer - - return goanalysis.NewLinter( - a.Name, - a.Doc, - []*analysis.Analyzer{a}, - nil, - ).WithLoadMode(goanalysis.LoadModeTypesInfo) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/forbidigo.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/forbidigo.go deleted file mode 100644 index 6aced2922..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/forbidigo.go +++ /dev/null @@ -1,104 +0,0 @@ -package golinters - -import ( - "fmt" - "sync" - - "github.com/ashanbrown/forbidigo/forbidigo" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" - "github.com/golangci/golangci-lint/pkg/lint/linter" - "github.com/golangci/golangci-lint/pkg/logutils" - "github.com/golangci/golangci-lint/pkg/result" -) - -const forbidigoName = "forbidigo" - -//nolint:dupl -func NewForbidigo(settings *config.ForbidigoSettings) *goanalysis.Linter { - var mu sync.Mutex - var resIssues []goanalysis.Issue - - analyzer := &analysis.Analyzer{ - Name: forbidigoName, - Doc: goanalysis.TheOnlyanalyzerDoc, - Run: func(pass *analysis.Pass) (any, error) { - issues, err := runForbidigo(pass, settings) - if err != nil { - return nil, err - } - - if len(issues) == 0 { - return nil, nil - } - - mu.Lock() - resIssues = append(resIssues, issues...) - mu.Unlock() - return nil, nil - }, - } - - // Without AnalyzeTypes, LoadModeSyntax is enough. - // But we cannot make this depend on the settings and have to mirror the mode chosen in GetAllSupportedLinterConfigs, - // therefore we have to use LoadModeTypesInfo in all cases. - return goanalysis.NewLinter( - forbidigoName, - "Forbids identifiers", - []*analysis.Analyzer{analyzer}, - nil, - ).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { - return resIssues - }).WithLoadMode(goanalysis.LoadModeTypesInfo) -} - -func runForbidigo(pass *analysis.Pass, settings *config.ForbidigoSettings) ([]goanalysis.Issue, error) { - options := []forbidigo.Option{ - forbidigo.OptionExcludeGodocExamples(settings.ExcludeGodocExamples), - // disable "//permit" directives so only "//nolint" directives matters within golangci-lint - forbidigo.OptionIgnorePermitDirectives(true), - forbidigo.OptionAnalyzeTypes(settings.AnalyzeTypes), - } - - // Convert patterns back to strings because that is what NewLinter accepts. - var patterns []string - for _, pattern := range settings.Forbid { - buffer, err := pattern.MarshalString() - if err != nil { - return nil, err - } - patterns = append(patterns, string(buffer)) - } - - forbid, err := forbidigo.NewLinter(patterns, options...) - if err != nil { - return nil, fmt.Errorf("failed to create linter %q: %w", forbidigoName, err) - } - - var issues []goanalysis.Issue - for _, file := range pass.Files { - runConfig := forbidigo.RunConfig{ - Fset: pass.Fset, - DebugLog: logutils.Debug(logutils.DebugKeyForbidigo), - } - if settings != nil && settings.AnalyzeTypes { - runConfig.TypesInfo = pass.TypesInfo - } - hints, err := forbid.RunWithConfig(runConfig, file) - if err != nil { - return nil, fmt.Errorf("forbidigo linter failed on file %q: %w", file.Name.String(), err) - } - - for _, hint := range hints { - issues = append(issues, goanalysis.NewIssue(&result.Issue{ - Pos: hint.Position(), - Text: hint.Details(), - FromLinter: forbidigoName, - }, pass)) - } - } - - return issues, nil -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/forcetypeassert.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/forcetypeassert.go deleted file mode 100644 index 873c833b5..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/forcetypeassert.go +++ /dev/null @@ -1,19 +0,0 @@ -package golinters - -import ( - "github.com/gostaticanalysis/forcetypeassert" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" -) - -func NewForceTypeAssert() *goanalysis.Linter { - a := forcetypeassert.Analyzer - - return goanalysis.NewLinter( - a.Name, - "finds forced type assertions", - []*analysis.Analyzer{a}, - nil, - ).WithLoadMode(goanalysis.LoadModeSyntax) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/funlen.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/funlen.go deleted file mode 100644 index 8def9c1f6..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/funlen.go +++ /dev/null @@ -1,76 +0,0 @@ -package golinters - -import ( - "go/token" - "strings" - "sync" - - "github.com/ultraware/funlen" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" - "github.com/golangci/golangci-lint/pkg/lint/linter" - "github.com/golangci/golangci-lint/pkg/result" -) - -const funlenName = "funlen" - -//nolint:dupl -func NewFunlen(settings *config.FunlenSettings) *goanalysis.Linter { - var mu sync.Mutex - var resIssues []goanalysis.Issue - - analyzer := &analysis.Analyzer{ - Name: funlenName, - Doc: goanalysis.TheOnlyanalyzerDoc, - Run: func(pass *analysis.Pass) (any, error) { - issues := runFunlen(pass, settings) - - if len(issues) == 0 { - return nil, nil - } - - mu.Lock() - resIssues = append(resIssues, issues...) - mu.Unlock() - - return nil, nil - }, - } - - return goanalysis.NewLinter( - funlenName, - "Tool for detection of long functions", - []*analysis.Analyzer{analyzer}, - nil, - ).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { - return resIssues - }).WithLoadMode(goanalysis.LoadModeSyntax) -} - -func runFunlen(pass *analysis.Pass, settings *config.FunlenSettings) []goanalysis.Issue { - var lintIssues []funlen.Message - for _, file := range pass.Files { - fileIssues := funlen.Run(file, pass.Fset, settings.Lines, settings.Statements, settings.IgnoreComments) - lintIssues = append(lintIssues, fileIssues...) - } - - if len(lintIssues) == 0 { - return nil - } - - issues := make([]goanalysis.Issue, len(lintIssues)) - for k, i := range lintIssues { - issues[k] = goanalysis.NewIssue(&result.Issue{ - Pos: token.Position{ - Filename: i.Pos.Filename, - Line: i.Pos.Line, - }, - Text: strings.TrimRight(i.Message, "\n"), - FromLinter: funlenName, - }, pass) - } - - return issues -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gci.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gci.go deleted file mode 100644 index 386226769..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gci.go +++ /dev/null @@ -1,157 +0,0 @@ -package golinters - -import ( - "fmt" - "sync" - - gcicfg "github.com/daixiang0/gci/pkg/config" - "github.com/daixiang0/gci/pkg/gci" - "github.com/daixiang0/gci/pkg/io" - "github.com/daixiang0/gci/pkg/log" - "github.com/hexops/gotextdiff" - "github.com/hexops/gotextdiff/myers" - "github.com/hexops/gotextdiff/span" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" - "github.com/golangci/golangci-lint/pkg/lint/linter" -) - -const gciName = "gci" - -func NewGci(settings *config.GciSettings) *goanalysis.Linter { - var mu sync.Mutex - var resIssues []goanalysis.Issue - - analyzer := &analysis.Analyzer{ - Name: gciName, - Doc: goanalysis.TheOnlyanalyzerDoc, - Run: goanalysis.DummyRun, - } - - var cfg *gcicfg.Config - if settings != nil { - rawCfg := gcicfg.YamlConfig{ - Cfg: gcicfg.BoolConfig{ - SkipGenerated: settings.SkipGenerated, - CustomOrder: settings.CustomOrder, - }, - SectionStrings: settings.Sections, - } - - if settings.LocalPrefixes != "" { - prefix := []string{"standard", "default", fmt.Sprintf("prefix(%s)", settings.LocalPrefixes)} - rawCfg.SectionStrings = prefix - } - - var err error - cfg, err = rawCfg.Parse() - if err != nil { - linterLogger.Fatalf("gci: configuration parsing: %v", err) - } - } - - var lock sync.Mutex - - return goanalysis.NewLinter( - gciName, - "Gci controls Go package import order and makes it always deterministic.", - []*analysis.Analyzer{analyzer}, - nil, - ).WithContextSetter(func(lintCtx *linter.Context) { - analyzer.Run = func(pass *analysis.Pass) (any, error) { - issues, err := runGci(pass, lintCtx, cfg, &lock) - if err != nil { - return nil, err - } - - if len(issues) == 0 { - return nil, nil - } - - mu.Lock() - resIssues = append(resIssues, issues...) - mu.Unlock() - - return nil, nil - } - }).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { - return resIssues - }).WithLoadMode(goanalysis.LoadModeSyntax) -} - -func runGci(pass *analysis.Pass, lintCtx *linter.Context, cfg *gcicfg.Config, lock *sync.Mutex) ([]goanalysis.Issue, error) { - fileNames := getFileNames(pass) - - var diffs []string - err := diffFormattedFilesToArray(fileNames, *cfg, &diffs, lock) - if err != nil { - return nil, err - } - - var issues []goanalysis.Issue - - for _, diff := range diffs { - if diff == "" { - continue - } - - is, err := extractIssuesFromPatch(diff, lintCtx, gciName) - if err != nil { - return nil, fmt.Errorf("can't extract issues from gci diff output %s: %w", diff, err) - } - - for i := range is { - issues = append(issues, goanalysis.NewIssue(&is[i], pass)) - } - } - - return issues, nil -} - -// diffFormattedFilesToArray is a copy of gci.DiffFormattedFilesToArray without io.StdInGenerator. -// gci.DiffFormattedFilesToArray uses gci.processStdInAndGoFilesInPaths that uses io.StdInGenerator but stdin is not active on CI. -// https://github.com/daixiang0/gci/blob/6f5cb16718ba07f0342a58de9b830ec5a6d58790/pkg/gci/gci.go#L63-L75 -// https://github.com/daixiang0/gci/blob/6f5cb16718ba07f0342a58de9b830ec5a6d58790/pkg/gci/gci.go#L80 -func diffFormattedFilesToArray(paths []string, cfg gcicfg.Config, diffs *[]string, lock *sync.Mutex) error { - log.InitLogger() - defer func() { _ = log.L().Sync() }() - - return gci.ProcessFiles(io.GoFilesInPathsGenerator(paths, true), cfg, func(filePath string, unmodifiedFile, formattedFile []byte) error { - fileURI := span.URIFromPath(filePath) - edits := myers.ComputeEdits(fileURI, string(unmodifiedFile), string(formattedFile)) - unifiedEdits := gotextdiff.ToUnified(filePath, filePath, string(unmodifiedFile), edits) - lock.Lock() - *diffs = append(*diffs, fmt.Sprint(unifiedEdits)) - lock.Unlock() - return nil - }) -} - -func getErrorTextForGci(settings config.GciSettings) string { - text := "File is not `gci`-ed" - - hasOptions := settings.SkipGenerated || len(settings.Sections) > 0 - if !hasOptions { - return text - } - - text += " with" - - if settings.SkipGenerated { - text += " --skip-generated" - } - - if len(settings.Sections) > 0 { - for _, section := range settings.Sections { - text += " -s " + section - } - } - - if settings.CustomOrder { - text += " --custom-order" - } - - return text -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/ginkgolinter.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/ginkgolinter.go deleted file mode 100644 index 8919de15b..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/ginkgolinter.go +++ /dev/null @@ -1,34 +0,0 @@ -package golinters - -import ( - "github.com/nunnatsa/ginkgolinter" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" -) - -func NewGinkgoLinter(cfg *config.GinkgoLinterSettings) *goanalysis.Linter { - a := ginkgolinter.NewAnalyzer() - - cfgMap := make(map[string]map[string]any) - if cfg != nil { - cfgMap[a.Name] = map[string]any{ - "suppress-len-assertion": cfg.SuppressLenAssertion, - "suppress-nil-assertion": cfg.SuppressNilAssertion, - "suppress-err-assertion": cfg.SuppressErrAssertion, - "suppress-compare-assertion": cfg.SuppressCompareAssertion, - "suppress-async-assertion": cfg.SuppressAsyncAssertion, - "suppress-type-compare-assertion": cfg.SuppressTypeCompareWarning, - "forbid-focus-container": cfg.ForbidFocusContainer, - "allow-havelen-0": cfg.AllowHaveLenZero, - } - } - - return goanalysis.NewLinter( - a.Name, - "enforces standards of using ginkgo and gomega", - []*analysis.Analyzer{a}, - cfgMap, - ).WithLoadMode(goanalysis.LoadModeTypesInfo) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/adapters.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/adapters.go deleted file mode 100644 index 284215dfc..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/adapters.go +++ /dev/null @@ -1,41 +0,0 @@ -package goanalysis - -import ( - "go/types" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/loader" //nolint:staticcheck // it's an adapter for golang.org/x/tools/go/packages -) - -func MakeFakeLoaderProgram(pass *analysis.Pass) *loader.Program { - var info types.Info - if pass.TypesInfo != nil { - info = *pass.TypesInfo - } - - prog := &loader.Program{ - Fset: pass.Fset, - Created: []*loader.PackageInfo{ - { - Pkg: pass.Pkg, - Importable: true, // not used - TransitivelyErrorFree: true, // TODO ??? - - Files: pass.Files, - Errors: nil, - Info: info, - }, - }, - AllPackages: map[*types.Package]*loader.PackageInfo{ - pass.Pkg: { - Pkg: pass.Pkg, - Importable: true, - TransitivelyErrorFree: true, - Files: pass.Files, - Errors: nil, - Info: info, - }, - }, - } - return prog -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/errors.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/errors.go deleted file mode 100644 index f59e02cc6..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/errors.go +++ /dev/null @@ -1,72 +0,0 @@ -package goanalysis - -import ( - "errors" - "fmt" - - "golang.org/x/tools/go/packages" - - "github.com/golangci/golangci-lint/pkg/lint/linter" - libpackages "github.com/golangci/golangci-lint/pkg/packages" - "github.com/golangci/golangci-lint/pkg/result" -) - -type IllTypedError struct { - Pkg *packages.Package -} - -func (e *IllTypedError) Error() string { - return fmt.Sprintf("errors in package: %v", e.Pkg.Errors) -} - -func buildIssuesFromIllTypedError(errs []error, lintCtx *linter.Context) ([]result.Issue, error) { - var issues []result.Issue - uniqReportedIssues := map[string]bool{} - - var other error - - for _, err := range errs { - err := err - - var ill *IllTypedError - if !errors.As(err, &ill) { - if other == nil { - other = err - } - continue - } - - for _, err := range libpackages.ExtractErrors(ill.Pkg) { - i, perr := parseError(err) - if perr != nil { // failed to parse - if uniqReportedIssues[err.Msg] { - continue - } - uniqReportedIssues[err.Msg] = true - lintCtx.Log.Errorf("typechecking error: %s", err.Msg) - } else { - i.Pkg = ill.Pkg // to save to cache later - issues = append(issues, *i) - } - } - } - - if len(issues) == 0 && other != nil { - return nil, other - } - - return issues, nil -} - -func parseError(srcErr packages.Error) (*result.Issue, error) { - pos, err := libpackages.ParseErrorPosition(srcErr.Pos) - if err != nil { - return nil, err - } - - return &result.Issue{ - Pos: *pos, - Text: srcErr.Msg, - FromLinter: "typecheck", - }, nil -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/issue.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/issue.go deleted file mode 100644 index f331a3ab9..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/issue.go +++ /dev/null @@ -1,31 +0,0 @@ -package goanalysis - -import ( - "go/token" - - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/result" -) - -type Issue struct { - result.Issue - Pass *analysis.Pass -} - -func NewIssue(i *result.Issue, pass *analysis.Pass) Issue { - return Issue{ - Issue: *i, - Pass: pass, - } -} - -type EncodingIssue struct { - FromLinter string - Text string - Pos token.Position - LineRange *result.Range - Replacement *result.Replacement - ExpectNoLint bool - ExpectedNoLintLinter string -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/linter.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/linter.go deleted file mode 100644 index f8ca2e755..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/linter.go +++ /dev/null @@ -1,217 +0,0 @@ -package goanalysis - -import ( - "context" - "errors" - "flag" - "fmt" - "strings" - - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/lint/linter" - "github.com/golangci/golangci-lint/pkg/result" -) - -const ( - TheOnlyAnalyzerName = "the_only_name" - TheOnlyanalyzerDoc = "the_only_doc" -) - -type LoadMode int - -func (loadMode LoadMode) String() string { - switch loadMode { - case LoadModeNone: - return "none" - case LoadModeSyntax: - return "syntax" - case LoadModeTypesInfo: - return "types info" - case LoadModeWholeProgram: - return "whole program" - } - panic(fmt.Sprintf("unknown load mode %d", loadMode)) -} - -const ( - LoadModeNone LoadMode = iota - LoadModeSyntax - LoadModeTypesInfo - LoadModeWholeProgram -) - -type Linter struct { - name, desc string - analyzers []*analysis.Analyzer - cfg map[string]map[string]any - issuesReporter func(*linter.Context) []Issue - contextSetter func(*linter.Context) - loadMode LoadMode - needUseOriginalPackages bool -} - -func NewLinter(name, desc string, analyzers []*analysis.Analyzer, cfg map[string]map[string]any) *Linter { - return &Linter{name: name, desc: desc, analyzers: analyzers, cfg: cfg} -} - -func (lnt *Linter) Run(_ context.Context, lintCtx *linter.Context) ([]result.Issue, error) { - if err := lnt.preRun(lintCtx); err != nil { - return nil, err - } - - return runAnalyzers(lnt, lintCtx) -} - -func (lnt *Linter) UseOriginalPackages() { - lnt.needUseOriginalPackages = true -} - -func (lnt *Linter) LoadMode() LoadMode { - return lnt.loadMode -} - -func (lnt *Linter) WithLoadMode(loadMode LoadMode) *Linter { - lnt.loadMode = loadMode - return lnt -} - -func (lnt *Linter) WithIssuesReporter(r func(*linter.Context) []Issue) *Linter { - lnt.issuesReporter = r - return lnt -} - -func (lnt *Linter) WithContextSetter(cs func(*linter.Context)) *Linter { - lnt.contextSetter = cs - return lnt -} - -func (lnt *Linter) Name() string { - return lnt.name -} - -func (lnt *Linter) Desc() string { - return lnt.desc -} - -func (lnt *Linter) allAnalyzerNames() []string { - var ret []string - for _, a := range lnt.analyzers { - ret = append(ret, a.Name) - } - return ret -} - -func (lnt *Linter) configureAnalyzer(a *analysis.Analyzer, cfg map[string]any) error { - for k, v := range cfg { - f := a.Flags.Lookup(k) - if f == nil { - validFlagNames := allFlagNames(&a.Flags) - if len(validFlagNames) == 0 { - return errors.New("analyzer doesn't have settings") - } - - return fmt.Errorf("analyzer doesn't have setting %q, valid settings: %v", - k, validFlagNames) - } - - if err := f.Value.Set(valueToString(v)); err != nil { - return fmt.Errorf("failed to set analyzer setting %q with value %v: %w", k, v, err) - } - } - - return nil -} - -func (lnt *Linter) configure() error { - analyzersMap := map[string]*analysis.Analyzer{} - for _, a := range lnt.analyzers { - analyzersMap[a.Name] = a - } - - for analyzerName, analyzerSettings := range lnt.cfg { - a := analyzersMap[analyzerName] - if a == nil { - return fmt.Errorf("settings key %q must be valid analyzer name, valid analyzers: %v", - analyzerName, lnt.allAnalyzerNames()) - } - - if err := lnt.configureAnalyzer(a, analyzerSettings); err != nil { - return fmt.Errorf("failed to configure analyzer %s: %w", analyzerName, err) - } - } - - return nil -} - -func (lnt *Linter) preRun(lintCtx *linter.Context) error { - if err := analysis.Validate(lnt.analyzers); err != nil { - return fmt.Errorf("failed to validate analyzers: %w", err) - } - - if err := lnt.configure(); err != nil { - return fmt.Errorf("failed to configure analyzers: %w", err) - } - - if lnt.contextSetter != nil { - lnt.contextSetter(lintCtx) - } - - return nil -} - -func (lnt *Linter) getName() string { - return lnt.name -} - -func (lnt *Linter) getLinterNameForDiagnostic(*Diagnostic) string { - return lnt.name -} - -func (lnt *Linter) getAnalyzers() []*analysis.Analyzer { - return lnt.analyzers -} - -func (lnt *Linter) useOriginalPackages() bool { - return lnt.needUseOriginalPackages -} - -func (lnt *Linter) reportIssues(lintCtx *linter.Context) []Issue { - if lnt.issuesReporter != nil { - return lnt.issuesReporter(lintCtx) - } - return nil -} - -func (lnt *Linter) getLoadMode() LoadMode { - return lnt.loadMode -} - -func allFlagNames(fs *flag.FlagSet) []string { - var ret []string - fs.VisitAll(func(f *flag.Flag) { - ret = append(ret, f.Name) - }) - return ret -} - -func valueToString(v any) string { - if ss, ok := v.([]string); ok { - return strings.Join(ss, ",") - } - - if is, ok := v.([]any); ok { - var ss []string - for _, i := range is { - ss = append(ss, fmt.Sprint(i)) - } - - return valueToString(ss) - } - - return fmt.Sprint(v) -} - -func DummyRun(_ *analysis.Pass) (any, error) { - return nil, nil -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/load/guard.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/load/guard.go deleted file mode 100644 index ab7775cc8..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/load/guard.go +++ /dev/null @@ -1,30 +0,0 @@ -package load - -import ( - "sync" - - "golang.org/x/tools/go/packages" -) - -type Guard struct { - loadMutexes map[*packages.Package]*sync.Mutex - mutex sync.Mutex -} - -func NewGuard() *Guard { - return &Guard{ - loadMutexes: map[*packages.Package]*sync.Mutex{}, - } -} - -func (g *Guard) AddMutexForPkg(pkg *packages.Package) { - g.loadMutexes[pkg] = &sync.Mutex{} -} - -func (g *Guard) MutexForPkg(pkg *packages.Package) *sync.Mutex { - return g.loadMutexes[pkg] -} - -func (g *Guard) Mutex() *sync.Mutex { - return &g.mutex -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/metalinter.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/metalinter.go deleted file mode 100644 index 333ab20f1..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/metalinter.go +++ /dev/null @@ -1,90 +0,0 @@ -package goanalysis - -import ( - "context" - "fmt" - - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/lint/linter" - "github.com/golangci/golangci-lint/pkg/result" -) - -type MetaLinter struct { - linters []*Linter - analyzerToLinterName map[*analysis.Analyzer]string -} - -func NewMetaLinter(linters []*Linter) *MetaLinter { - ml := &MetaLinter{linters: linters} - ml.analyzerToLinterName = ml.getAnalyzerToLinterNameMapping() - return ml -} - -func (ml MetaLinter) Run(_ context.Context, lintCtx *linter.Context) ([]result.Issue, error) { - for _, l := range ml.linters { - if err := l.preRun(lintCtx); err != nil { - return nil, fmt.Errorf("failed to pre-run %s: %w", l.Name(), err) - } - } - - return runAnalyzers(ml, lintCtx) -} - -func (ml MetaLinter) Name() string { - return "goanalysis_metalinter" -} - -func (ml MetaLinter) Desc() string { - return "" -} - -func (ml MetaLinter) getLoadMode() LoadMode { - loadMode := LoadModeNone - for _, l := range ml.linters { - if l.loadMode > loadMode { - loadMode = l.loadMode - } - } - return loadMode -} - -func (ml MetaLinter) getAnalyzers() []*analysis.Analyzer { - var allAnalyzers []*analysis.Analyzer - for _, l := range ml.linters { - allAnalyzers = append(allAnalyzers, l.analyzers...) - } - return allAnalyzers -} - -func (ml MetaLinter) getName() string { - return "metalinter" -} - -func (ml MetaLinter) useOriginalPackages() bool { - return false // `unused` can't be run by this metalinter -} - -func (ml MetaLinter) reportIssues(lintCtx *linter.Context) []Issue { - var ret []Issue - for _, lnt := range ml.linters { - if lnt.issuesReporter != nil { - ret = append(ret, lnt.issuesReporter(lintCtx)...) - } - } - return ret -} - -func (ml MetaLinter) getLinterNameForDiagnostic(diag *Diagnostic) string { - return ml.analyzerToLinterName[diag.Analyzer] -} - -func (ml MetaLinter) getAnalyzerToLinterNameMapping() map[*analysis.Analyzer]string { - analyzerToLinterName := map[*analysis.Analyzer]string{} - for _, l := range ml.linters { - for _, a := range l.analyzers { - analyzerToLinterName[a] = l.Name() - } - } - return analyzerToLinterName -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/runner.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/runner.go deleted file mode 100644 index a8660b612..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/runner.go +++ /dev/null @@ -1,337 +0,0 @@ -// checker is a partial copy of https://github.com/golang/tools/blob/master/go/analysis/internal/checker -// Copyright 2018 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package goanalysis defines the implementation of the checker commands. -// The same code drives the multi-analysis driver, the single-analysis -// driver that is conventionally provided for convenience along with -// each analysis package, and the test driver. -package goanalysis - -import ( - "encoding/gob" - "fmt" - "go/token" - "runtime" - "sort" - "sync" - - "golang.org/x/exp/maps" - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/packages" - - "github.com/golangci/golangci-lint/internal/errorutil" - "github.com/golangci/golangci-lint/internal/pkgcache" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis/load" - "github.com/golangci/golangci-lint/pkg/logutils" - "github.com/golangci/golangci-lint/pkg/timeutils" -) - -var ( - debugf = logutils.Debug(logutils.DebugKeyGoAnalysis) - - analyzeDebugf = logutils.Debug(logutils.DebugKeyGoAnalysisAnalyze) - isMemoryDebug = logutils.HaveDebugTag(logutils.DebugKeyGoAnalysisMemory) - issuesCacheDebugf = logutils.Debug(logutils.DebugKeyGoAnalysisIssuesCache) - - factsDebugf = logutils.Debug(logutils.DebugKeyGoAnalysisFacts) - factsCacheDebugf = logutils.Debug(logutils.DebugKeyGoAnalysisFactsCache) - factsInheritDebugf = logutils.Debug(logutils.DebugKeyGoAnalysisFactsInherit) - factsExportDebugf = logutils.Debug(logutils.DebugKeyGoAnalysisFacts) - isFactsExportDebug = logutils.HaveDebugTag(logutils.DebugKeyGoAnalysisFactsExport) -) - -type Diagnostic struct { - analysis.Diagnostic - Analyzer *analysis.Analyzer - Position token.Position - Pkg *packages.Package -} - -type runner struct { - log logutils.Log - prefix string // ensure unique analyzer names - pkgCache *pkgcache.Cache - loadGuard *load.Guard - loadMode LoadMode - passToPkg map[*analysis.Pass]*packages.Package - passToPkgGuard sync.Mutex - sw *timeutils.Stopwatch -} - -func newRunner(prefix string, logger logutils.Log, pkgCache *pkgcache.Cache, loadGuard *load.Guard, - loadMode LoadMode, sw *timeutils.Stopwatch) *runner { - return &runner{ - prefix: prefix, - log: logger, - pkgCache: pkgCache, - loadGuard: loadGuard, - loadMode: loadMode, - passToPkg: map[*analysis.Pass]*packages.Package{}, - sw: sw, - } -} - -// Run loads the packages specified by args using go/packages, -// then applies the specified analyzers to them. -// Analysis flags must already have been set. -// It provides most of the logic for the main functions of both the -// singlechecker and the multi-analysis commands. -// It returns the appropriate exit code. -func (r *runner) run(analyzers []*analysis.Analyzer, initialPackages []*packages.Package) ([]Diagnostic, - []error, map[*analysis.Pass]*packages.Package) { - debugf("Analyzing %d packages on load mode %s", len(initialPackages), r.loadMode) - defer r.pkgCache.Trim() - - roots := r.analyze(initialPackages, analyzers) - - diags, errs := extractDiagnostics(roots) - - return diags, errs, r.passToPkg -} - -type actKey struct { - *analysis.Analyzer - *packages.Package -} - -func (r *runner) markAllActions(a *analysis.Analyzer, pkg *packages.Package, markedActions map[actKey]struct{}) { - k := actKey{a, pkg} - if _, ok := markedActions[k]; ok { - return - } - - for _, req := range a.Requires { - r.markAllActions(req, pkg, markedActions) - } - - if len(a.FactTypes) != 0 { - for path := range pkg.Imports { - r.markAllActions(a, pkg.Imports[path], markedActions) - } - } - - markedActions[k] = struct{}{} -} - -func (r *runner) makeAction(a *analysis.Analyzer, pkg *packages.Package, - initialPkgs map[*packages.Package]bool, actions map[actKey]*action, actAlloc *actionAllocator) *action { - k := actKey{a, pkg} - act, ok := actions[k] - if ok { - return act - } - - act = actAlloc.alloc() - act.a = a - act.pkg = pkg - act.r = r - act.isInitialPkg = initialPkgs[pkg] - act.needAnalyzeSource = initialPkgs[pkg] - act.analysisDoneCh = make(chan struct{}) - - depsCount := len(a.Requires) - if len(a.FactTypes) > 0 { - depsCount += len(pkg.Imports) - } - act.deps = make([]*action, 0, depsCount) - - // Add a dependency on each required analyzers. - for _, req := range a.Requires { - act.deps = append(act.deps, r.makeAction(req, pkg, initialPkgs, actions, actAlloc)) - } - - r.buildActionFactDeps(act, a, pkg, initialPkgs, actions, actAlloc) - - actions[k] = act - - return act -} - -func (r *runner) buildActionFactDeps(act *action, a *analysis.Analyzer, pkg *packages.Package, - initialPkgs map[*packages.Package]bool, actions map[actKey]*action, actAlloc *actionAllocator) { - // An analysis that consumes/produces facts - // must run on the package's dependencies too. - if len(a.FactTypes) == 0 { - return - } - - act.objectFacts = make(map[objectFactKey]analysis.Fact) - act.packageFacts = make(map[packageFactKey]analysis.Fact) - - paths := maps.Keys(pkg.Imports) - sort.Strings(paths) // for determinism - for _, path := range paths { - dep := r.makeAction(a, pkg.Imports[path], initialPkgs, actions, actAlloc) - act.deps = append(act.deps, dep) - } - - // Need to register fact types for pkgcache proper gob encoding. - for _, f := range a.FactTypes { - gob.Register(f) - } -} - -//nolint:gocritic -func (r *runner) prepareAnalysis(pkgs []*packages.Package, - analyzers []*analysis.Analyzer) (map[*packages.Package]bool, []*action, []*action) { - // Construct the action graph. - - // Each graph node (action) is one unit of analysis. - // Edges express package-to-package (vertical) dependencies, - // and analysis-to-analysis (horizontal) dependencies. - - // This place is memory-intensive: e.g. Istio project has 120k total actions. - // Therefore, optimize it carefully. - markedActions := make(map[actKey]struct{}, len(analyzers)*len(pkgs)) - for _, a := range analyzers { - for _, pkg := range pkgs { - r.markAllActions(a, pkg, markedActions) - } - } - totalActionsCount := len(markedActions) - - actions := make(map[actKey]*action, totalActionsCount) - actAlloc := newActionAllocator(totalActionsCount) - - initialPkgs := make(map[*packages.Package]bool, len(pkgs)) - for _, pkg := range pkgs { - initialPkgs[pkg] = true - } - - // Build nodes for initial packages. - roots := make([]*action, 0, len(pkgs)*len(analyzers)) - for _, a := range analyzers { - for _, pkg := range pkgs { - root := r.makeAction(a, pkg, initialPkgs, actions, actAlloc) - root.isroot = true - roots = append(roots, root) - } - } - - allActions := maps.Values(actions) - - debugf("Built %d actions", len(actions)) - - return initialPkgs, allActions, roots -} - -func (r *runner) analyze(pkgs []*packages.Package, analyzers []*analysis.Analyzer) []*action { - initialPkgs, actions, rootActions := r.prepareAnalysis(pkgs, analyzers) - - actionPerPkg := map[*packages.Package][]*action{} - for _, act := range actions { - actionPerPkg[act.pkg] = append(actionPerPkg[act.pkg], act) - } - - // Fill Imports field. - loadingPackages := map[*packages.Package]*loadingPackage{} - var dfs func(pkg *packages.Package) - dfs = func(pkg *packages.Package) { - if loadingPackages[pkg] != nil { - return - } - - imports := map[string]*loadingPackage{} - for impPath, imp := range pkg.Imports { - dfs(imp) - impLp := loadingPackages[imp] - impLp.dependents++ - imports[impPath] = impLp - } - - loadingPackages[pkg] = &loadingPackage{ - pkg: pkg, - imports: imports, - isInitial: initialPkgs[pkg], - log: r.log, - actions: actionPerPkg[pkg], - loadGuard: r.loadGuard, - dependents: 1, // self dependent - } - } - for _, act := range actions { - dfs(act.pkg) - } - - // Limit memory and IO usage. - gomaxprocs := runtime.GOMAXPROCS(-1) - debugf("Analyzing at most %d packages in parallel", gomaxprocs) - loadSem := make(chan struct{}, gomaxprocs) - - var wg sync.WaitGroup - debugf("There are %d initial and %d total packages", len(initialPkgs), len(loadingPackages)) - for _, lp := range loadingPackages { - if lp.isInitial { - wg.Add(1) - go func(lp *loadingPackage) { - lp.analyzeRecursive(r.loadMode, loadSem) - wg.Done() - }(lp) - } - } - wg.Wait() - - return rootActions -} - -//nolint:nakedret -func extractDiagnostics(roots []*action) (retDiags []Diagnostic, retErrors []error) { - extracted := make(map[*action]bool) - var extract func(*action) - var visitAll func(actions []*action) - visitAll = func(actions []*action) { - for _, act := range actions { - if !extracted[act] { - extracted[act] = true - visitAll(act.deps) - extract(act) - } - } - } - - // De-duplicate diagnostics by position (not token.Pos) to - // avoid double-reporting in source files that belong to - // multiple packages, such as foo and foo.test. - type key struct { - token.Position - *analysis.Analyzer - message string - } - seen := make(map[key]bool) - - extract = func(act *action) { - if act.err != nil { - if pe, ok := act.err.(*errorutil.PanicError); ok { - panic(pe) - } - retErrors = append(retErrors, fmt.Errorf("%s: %w", act.a.Name, act.err)) - return - } - - if act.isroot { - for _, diag := range act.diagnostics { - // We don't display a.Name/f.Category - // as most users don't care. - - posn := act.pkg.Fset.Position(diag.Pos) - k := key{posn, act.a, diag.Message} - if seen[k] { - continue // duplicate - } - seen[k] = true - - retDiag := Diagnostic{ - Diagnostic: diag, - Analyzer: act.a, - Position: posn, - Pkg: act.pkg, - } - retDiags = append(retDiags, retDiag) - } - } - } - visitAll(roots) - return -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/runner_action.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/runner_action.go deleted file mode 100644 index 6b57cb0c9..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/runner_action.go +++ /dev/null @@ -1,384 +0,0 @@ -package goanalysis - -import ( - "errors" - "fmt" - "go/types" - "io" - "reflect" - "runtime/debug" - "time" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/packages" - "golang.org/x/tools/go/types/objectpath" - - "github.com/golangci/golangci-lint/internal/errorutil" - "github.com/golangci/golangci-lint/internal/pkgcache" -) - -type actionAllocator struct { - allocatedActions []action - nextFreeIndex int -} - -func newActionAllocator(maxCount int) *actionAllocator { - return &actionAllocator{ - allocatedActions: make([]action, maxCount), - nextFreeIndex: 0, - } -} - -func (actAlloc *actionAllocator) alloc() *action { - if actAlloc.nextFreeIndex == len(actAlloc.allocatedActions) { - panic(fmt.Sprintf("Made too many allocations of actions: %d allowed", len(actAlloc.allocatedActions))) - } - act := &actAlloc.allocatedActions[actAlloc.nextFreeIndex] - actAlloc.nextFreeIndex++ - return act -} - -// An action represents one unit of analysis work: the application of -// one analysis to one package. Actions form a DAG, both within a -// package (as different analyzers are applied, either in sequence or -// parallel), and across packages (as dependencies are analyzed). -type action struct { - a *analysis.Analyzer - pkg *packages.Package - pass *analysis.Pass - deps []*action - objectFacts map[objectFactKey]analysis.Fact - packageFacts map[packageFactKey]analysis.Fact - result any - diagnostics []analysis.Diagnostic - err error - r *runner - analysisDoneCh chan struct{} - loadCachedFactsDone bool - loadCachedFactsOk bool - isroot bool - isInitialPkg bool - needAnalyzeSource bool -} - -func (act *action) String() string { - return fmt.Sprintf("%s@%s", act.a, act.pkg) -} - -func (act *action) loadCachedFacts() bool { - if act.loadCachedFactsDone { // can't be set in parallel - return act.loadCachedFactsOk - } - - res := func() bool { - if act.isInitialPkg { - return true // load cached facts only for non-initial packages - } - - if len(act.a.FactTypes) == 0 { - return true // no need to load facts - } - - return act.loadPersistedFacts() - }() - act.loadCachedFactsDone = true - act.loadCachedFactsOk = res - return res -} - -func (act *action) waitUntilDependingAnalyzersWorked() { - for _, dep := range act.deps { - if dep.pkg == act.pkg { - <-dep.analysisDoneCh - } - } -} - -func (act *action) analyzeSafe() { - defer func() { - if p := recover(); p != nil { - if !act.isroot { - // This line allows to display "hidden" panic with analyzers like buildssa. - // Some linters are dependent of sub-analyzers but when a sub-analyzer fails the linter is not aware of that, - // this results to another panic (ex: "interface conversion: interface {} is nil, not *buildssa.SSA"). - act.r.log.Errorf("%s: panic during analysis: %v, %s", act.a.Name, p, string(debug.Stack())) - } - - act.err = errorutil.NewPanicError(fmt.Sprintf("%s: package %q (isInitialPkg: %t, needAnalyzeSource: %t): %s", - act.a.Name, act.pkg.Name, act.isInitialPkg, act.needAnalyzeSource, p), debug.Stack()) - } - }() - act.r.sw.TrackStage(act.a.Name, func() { - act.analyze() - }) -} - -func (act *action) analyze() { - defer close(act.analysisDoneCh) // unblock actions depending on this action - - if !act.needAnalyzeSource { - return - } - - defer func(now time.Time) { - analyzeDebugf("go/analysis: %s: %s: analyzed package %q in %s", act.r.prefix, act.a.Name, act.pkg.Name, time.Since(now)) - }(time.Now()) - - // Report an error if any dependency failures. - var depErrors error - for _, dep := range act.deps { - if dep.err == nil { - continue - } - - depErrors = errors.Join(depErrors, errors.Unwrap(dep.err)) - } - if depErrors != nil { - act.err = fmt.Errorf("failed prerequisites: %w", depErrors) - return - } - - // Plumb the output values of the dependencies - // into the inputs of this action. Also facts. - inputs := make(map[*analysis.Analyzer]any) - startedAt := time.Now() - for _, dep := range act.deps { - if dep.pkg == act.pkg { - // Same package, different analysis (horizontal edge): - // in-memory outputs of prerequisite analyzers - // become inputs to this analysis pass. - inputs[dep.a] = dep.result - } else if dep.a == act.a { // (always true) - // Same analysis, different package (vertical edge): - // serialized facts produced by prerequisite analysis - // become available to this analysis pass. - inheritFacts(act, dep) - } - } - factsDebugf("%s: Inherited facts in %s", act, time.Since(startedAt)) - - // Run the analysis. - pass := &analysis.Pass{ - Analyzer: act.a, - Fset: act.pkg.Fset, - Files: act.pkg.Syntax, - OtherFiles: act.pkg.OtherFiles, - Pkg: act.pkg.Types, - TypesInfo: act.pkg.TypesInfo, - TypesSizes: act.pkg.TypesSizes, - ResultOf: inputs, - Report: func(d analysis.Diagnostic) { act.diagnostics = append(act.diagnostics, d) }, - ImportObjectFact: act.importObjectFact, - ExportObjectFact: act.exportObjectFact, - ImportPackageFact: act.importPackageFact, - ExportPackageFact: act.exportPackageFact, - AllObjectFacts: act.allObjectFacts, - AllPackageFacts: act.allPackageFacts, - } - act.pass = pass - act.r.passToPkgGuard.Lock() - act.r.passToPkg[pass] = act.pkg - act.r.passToPkgGuard.Unlock() - - if act.pkg.IllTyped { - // It looks like there should be !pass.Analyzer.RunDespiteErrors - // but govet's cgocall crashes on it. Govet itself contains !pass.Analyzer.RunDespiteErrors condition here, - // but it exits before it if packages.Load have failed. - act.err = fmt.Errorf("analysis skipped: %w", &IllTypedError{Pkg: act.pkg}) - } else { - startedAt = time.Now() - act.result, act.err = pass.Analyzer.Run(pass) - analyzedIn := time.Since(startedAt) - if analyzedIn > time.Millisecond*10 { - debugf("%s: run analyzer in %s", act, analyzedIn) - } - } - - // disallow calls after Run - pass.ExportObjectFact = nil - pass.ExportPackageFact = nil - - if err := act.persistFactsToCache(); err != nil { - act.r.log.Warnf("Failed to persist facts to cache: %s", err) - } -} - -// importObjectFact implements Pass.ImportObjectFact. -// Given a non-nil pointer ptr of type *T, where *T satisfies Fact, -// importObjectFact copies the fact value to *ptr. -func (act *action) importObjectFact(obj types.Object, ptr analysis.Fact) bool { - if obj == nil { - panic("nil object") - } - key := objectFactKey{obj, act.factType(ptr)} - if v, ok := act.objectFacts[key]; ok { - reflect.ValueOf(ptr).Elem().Set(reflect.ValueOf(v).Elem()) - return true - } - return false -} - -// exportObjectFact implements Pass.ExportObjectFact. -func (act *action) exportObjectFact(obj types.Object, fact analysis.Fact) { - if obj.Pkg() != act.pkg.Types { - act.r.log.Panicf("internal error: in analysis %s of package %s: Fact.Set(%s, %T): can't set facts on objects belonging another package", - act.a, act.pkg, obj, fact) - } - - key := objectFactKey{obj, act.factType(fact)} - act.objectFacts[key] = fact // clobber any existing entry - if isFactsExportDebug { - objstr := types.ObjectString(obj, (*types.Package).Name) - factsExportDebugf("%s: object %s has fact %s\n", - act.pkg.Fset.Position(obj.Pos()), objstr, fact) - } -} - -func (act *action) allObjectFacts() []analysis.ObjectFact { - out := make([]analysis.ObjectFact, 0, len(act.objectFacts)) - for key, fact := range act.objectFacts { - out = append(out, analysis.ObjectFact{ - Object: key.obj, - Fact: fact, - }) - } - return out -} - -// importPackageFact implements Pass.ImportPackageFact. -// Given a non-nil pointer ptr of type *T, where *T satisfies Fact, -// fact copies the fact value to *ptr. -func (act *action) importPackageFact(pkg *types.Package, ptr analysis.Fact) bool { - if pkg == nil { - panic("nil package") - } - key := packageFactKey{pkg, act.factType(ptr)} - if v, ok := act.packageFacts[key]; ok { - reflect.ValueOf(ptr).Elem().Set(reflect.ValueOf(v).Elem()) - return true - } - return false -} - -// exportPackageFact implements Pass.ExportPackageFact. -func (act *action) exportPackageFact(fact analysis.Fact) { - key := packageFactKey{act.pass.Pkg, act.factType(fact)} - act.packageFacts[key] = fact // clobber any existing entry - factsDebugf("%s: package %s has fact %s\n", - act.pkg.Fset.Position(act.pass.Files[0].Pos()), act.pass.Pkg.Path(), fact) -} - -func (act *action) allPackageFacts() []analysis.PackageFact { - out := make([]analysis.PackageFact, 0, len(act.packageFacts)) - for key, fact := range act.packageFacts { - out = append(out, analysis.PackageFact{ - Package: key.pkg, - Fact: fact, - }) - } - return out -} - -func (act *action) factType(fact analysis.Fact) reflect.Type { - t := reflect.TypeOf(fact) - if t.Kind() != reflect.Ptr { - act.r.log.Fatalf("invalid Fact type: got %T, want pointer", t) - } - return t -} - -func (act *action) persistFactsToCache() error { - analyzer := act.a - if len(analyzer.FactTypes) == 0 { - return nil - } - - // Merge new facts into the package and persist them. - var facts []Fact - for key, fact := range act.packageFacts { - if key.pkg != act.pkg.Types { - // The fact is from inherited facts from another package - continue - } - facts = append(facts, Fact{ - Path: "", - Fact: fact, - }) - } - for key, fact := range act.objectFacts { - obj := key.obj - if obj.Pkg() != act.pkg.Types { - // The fact is from inherited facts from another package - continue - } - - path, err := objectpath.For(obj) - if err != nil { - // The object is not globally addressable - continue - } - - facts = append(facts, Fact{ - Path: string(path), - Fact: fact, - }) - } - - factsCacheDebugf("Caching %d facts for package %q and analyzer %s", len(facts), act.pkg.Name, act.a.Name) - - key := fmt.Sprintf("%s/facts", analyzer.Name) - return act.r.pkgCache.Put(act.pkg, pkgcache.HashModeNeedAllDeps, key, facts) -} - -func (act *action) loadPersistedFacts() bool { - var facts []Fact - key := fmt.Sprintf("%s/facts", act.a.Name) - if err := act.r.pkgCache.Get(act.pkg, pkgcache.HashModeNeedAllDeps, key, &facts); err != nil { - if !errors.Is(err, pkgcache.ErrMissing) && !errors.Is(err, io.EOF) { - act.r.log.Warnf("Failed to get persisted facts: %s", err) - } - - factsCacheDebugf("No cached facts for package %q and analyzer %s", act.pkg.Name, act.a.Name) - return false - } - - factsCacheDebugf("Loaded %d cached facts for package %q and analyzer %s", len(facts), act.pkg.Name, act.a.Name) - - for _, f := range facts { - if f.Path == "" { // this is a package fact - key := packageFactKey{act.pkg.Types, act.factType(f.Fact)} - act.packageFacts[key] = f.Fact - continue - } - obj, err := objectpath.Object(act.pkg.Types, objectpath.Path(f.Path)) - if err != nil { - // Be lenient about these errors. For example, when - // analyzing io/ioutil from source, we may get a fact - // for methods on the devNull type, and objectpath - // will happily create a path for them. However, when - // we later load io/ioutil from export data, the path - // no longer resolves. - // - // If an exported type embeds the unexported type, - // then (part of) the unexported type will become part - // of the type information and our path will resolve - // again. - continue - } - factKey := objectFactKey{obj, act.factType(f.Fact)} - act.objectFacts[factKey] = f.Fact - } - - return true -} - -func (act *action) markDepsForAnalyzingSource() { - // Horizontal deps (analyzer.Requires) must be loaded from source and analyzed before analyzing - // this action. - for _, dep := range act.deps { - if dep.pkg == act.pkg { - // Analyze source only for horizontal dependencies, e.g. from "buildssa". - dep.needAnalyzeSource = true // can't be set in parallel - } - } -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/runner_facts.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/runner_facts.go deleted file mode 100644 index 1d0fb974e..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/runner_facts.go +++ /dev/null @@ -1,125 +0,0 @@ -package goanalysis - -import ( - "bytes" - "encoding/gob" - "fmt" - "go/types" - "reflect" - - "golang.org/x/tools/go/analysis" -) - -type objectFactKey struct { - obj types.Object - typ reflect.Type -} - -type packageFactKey struct { - pkg *types.Package - typ reflect.Type -} - -type Fact struct { - Path string // non-empty only for object facts - Fact analysis.Fact -} - -// inheritFacts populates act.facts with -// those it obtains from its dependency, dep. -func inheritFacts(act, dep *action) { - serialize := false - - for key, fact := range dep.objectFacts { - // Filter out facts related to objects - // that are irrelevant downstream - // (equivalently: not in the compiler export data). - if !exportedFrom(key.obj, dep.pkg.Types) { - factsInheritDebugf("%v: discarding %T fact from %s for %s: %s", act, fact, dep, key.obj, fact) - continue - } - - // Optionally serialize/deserialize fact - // to verify that it works across address spaces. - if serialize { - var err error - fact, err = codeFact(fact) - if err != nil { - act.r.log.Panicf("internal error: encoding of %T fact failed in %v", fact, act) - } - } - - factsInheritDebugf("%v: inherited %T fact for %s: %s", act, fact, key.obj, fact) - act.objectFacts[key] = fact - } - - for key, fact := range dep.packageFacts { - // TODO: filter out facts that belong to - // packages not mentioned in the export data - // to prevent side channels. - - // Optionally serialize/deserialize fact - // to verify that it works across address spaces - // and is deterministic. - if serialize { - var err error - fact, err = codeFact(fact) - if err != nil { - act.r.log.Panicf("internal error: encoding of %T fact failed in %v", fact, act) - } - } - - factsInheritDebugf("%v: inherited %T fact for %s: %s", act, fact, key.pkg.Path(), fact) - act.packageFacts[key] = fact - } -} - -// codeFact encodes then decodes a fact, -// just to exercise that logic. -func codeFact(fact analysis.Fact) (analysis.Fact, error) { - // We encode facts one at a time. - // A real modular driver would emit all facts - // into one encoder to improve gob efficiency. - var buf bytes.Buffer - if err := gob.NewEncoder(&buf).Encode(fact); err != nil { - return nil, err - } - - // Encode it twice and assert that we get the same bits. - // This helps detect nondeterministic Gob encoding (e.g. of maps). - var buf2 bytes.Buffer - if err := gob.NewEncoder(&buf2).Encode(fact); err != nil { - return nil, err - } - if !bytes.Equal(buf.Bytes(), buf2.Bytes()) { - return nil, fmt.Errorf("encoding of %T fact is nondeterministic", fact) - } - - newFact := reflect.New(reflect.TypeOf(fact).Elem()).Interface().(analysis.Fact) - if err := gob.NewDecoder(&buf).Decode(newFact); err != nil { - return nil, err - } - return newFact, nil -} - -// exportedFrom reports whether obj may be visible to a package that imports pkg. -// This includes not just the exported members of pkg, but also unexported -// constants, types, fields, and methods, perhaps belonging to other packages, -// that find there way into the API. -// This is an over-approximation of the more accurate approach used by -// gc export data, which walks the type graph, but it's much simpler. -// -// TODO(adonovan): do more accurate filtering by walking the type graph. -func exportedFrom(obj types.Object, pkg *types.Package) bool { - switch obj := obj.(type) { - case *types.Func: - return obj.Exported() && obj.Pkg() == pkg || - obj.Type().(*types.Signature).Recv() != nil - case *types.Var: - return obj.Exported() && obj.Pkg() == pkg || - obj.IsField() - case *types.TypeName, *types.Const: - return true - } - return false // Nil, Builtin, Label, or PkgName -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/runner_loadingpackage.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/runner_loadingpackage.go deleted file mode 100644 index e39e2212c..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/runner_loadingpackage.go +++ /dev/null @@ -1,499 +0,0 @@ -package goanalysis - -import ( - "errors" - "fmt" - "go/ast" - "go/parser" - "go/scanner" - "go/types" - "os" - "reflect" - "sync" - "sync/atomic" - - "golang.org/x/tools/go/gcexportdata" - "golang.org/x/tools/go/packages" - - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis/load" - "github.com/golangci/golangci-lint/pkg/logutils" -) - -const unsafePkgName = "unsafe" - -type loadingPackage struct { - pkg *packages.Package - imports map[string]*loadingPackage - isInitial bool - log logutils.Log - actions []*action // all actions with this package - loadGuard *load.Guard - dependents int32 // number of depending on it packages - analyzeOnce sync.Once - decUseMutex sync.Mutex -} - -func (lp *loadingPackage) analyzeRecursive(loadMode LoadMode, loadSem chan struct{}) { - lp.analyzeOnce.Do(func() { - // Load the direct dependencies, in parallel. - var wg sync.WaitGroup - wg.Add(len(lp.imports)) - for _, imp := range lp.imports { - go func(imp *loadingPackage) { - imp.analyzeRecursive(loadMode, loadSem) - wg.Done() - }(imp) - } - wg.Wait() - lp.analyze(loadMode, loadSem) - }) -} - -func (lp *loadingPackage) analyze(loadMode LoadMode, loadSem chan struct{}) { - loadSem <- struct{}{} - defer func() { - <-loadSem - }() - - // Save memory on unused more fields. - defer lp.decUse(loadMode < LoadModeWholeProgram) - - if err := lp.loadWithFacts(loadMode); err != nil { - werr := fmt.Errorf("failed to load package %s: %w", lp.pkg.Name, err) - // Don't need to write error to errCh, it will be extracted and reported on another layer. - // Unblock depending on actions and propagate error. - for _, act := range lp.actions { - close(act.analysisDoneCh) - act.err = werr - } - return - } - - var actsWg sync.WaitGroup - actsWg.Add(len(lp.actions)) - for _, act := range lp.actions { - go func(act *action) { - defer actsWg.Done() - - act.waitUntilDependingAnalyzersWorked() - - act.analyzeSafe() - }(act) - } - actsWg.Wait() -} - -func (lp *loadingPackage) loadFromSource(loadMode LoadMode) error { - pkg := lp.pkg - - // Many packages have few files, much fewer than there - // are CPU cores. Additionally, parsing each individual file is - // very fast. A naive parallel implementation of this loop won't - // be faster, and tends to be slower due to extra scheduling, - // bookkeeping and potentially false sharing of cache lines. - pkg.Syntax = make([]*ast.File, 0, len(pkg.CompiledGoFiles)) - for _, file := range pkg.CompiledGoFiles { - f, err := parser.ParseFile(pkg.Fset, file, nil, parser.ParseComments) - if err != nil { - pkg.Errors = append(pkg.Errors, lp.convertError(err)...) - continue - } - pkg.Syntax = append(pkg.Syntax, f) - } - if len(pkg.Errors) != 0 { - pkg.IllTyped = true - return nil - } - - if loadMode == LoadModeSyntax { - return nil - } - - // Call NewPackage directly with explicit name. - // This avoids skew between golist and go/types when the files' - // package declarations are inconsistent. - // Subtle: we populate all Types fields with an empty Package - // before loading export data so that export data processing - // never has to create a types.Package for an indirect dependency, - // which would then require that such created packages be explicitly - // inserted back into the Import graph as a final step after export data loading. - pkg.Types = types.NewPackage(pkg.PkgPath, pkg.Name) - - pkg.IllTyped = true - - pkg.TypesInfo = &types.Info{ - Types: make(map[ast.Expr]types.TypeAndValue), - Instances: make(map[*ast.Ident]types.Instance), - Defs: make(map[*ast.Ident]types.Object), - Uses: make(map[*ast.Ident]types.Object), - Implicits: make(map[ast.Node]types.Object), - Scopes: make(map[ast.Node]*types.Scope), - Selections: make(map[*ast.SelectorExpr]*types.Selection), - } - - importer := func(path string) (*types.Package, error) { - if path == unsafePkgName { - return types.Unsafe, nil - } - if path == "C" { - // go/packages doesn't tell us that cgo preprocessing - // failed. When we subsequently try to parse the package, - // we'll encounter the raw C import. - return nil, errors.New("cgo preprocessing failed") - } - imp := pkg.Imports[path] - if imp == nil { - return nil, nil - } - if len(imp.Errors) > 0 { - return nil, imp.Errors[0] - } - return imp.Types, nil - } - tc := &types.Config{ - Importer: importerFunc(importer), - Error: func(err error) { - pkg.Errors = append(pkg.Errors, lp.convertError(err)...) - }, - } - _ = types.NewChecker(tc, pkg.Fset, pkg.Types, pkg.TypesInfo).Files(pkg.Syntax) - // Don't handle error here: errors are adding by tc.Error function. - - illTyped := len(pkg.Errors) != 0 - if !illTyped { - for _, imp := range lp.imports { - if imp.pkg.IllTyped { - illTyped = true - break - } - } - } - pkg.IllTyped = illTyped - return nil -} - -func (lp *loadingPackage) loadFromExportData() error { - pkg := lp.pkg - - // Call NewPackage directly with explicit name. - // This avoids skew between golist and go/types when the files' - // package declarations are inconsistent. - // Subtle: we populate all Types fields with an empty Package - // before loading export data so that export data processing - // never has to create a types.Package for an indirect dependency, - // which would then require that such created packages be explicitly - // inserted back into the Import graph as a final step after export data loading. - pkg.Types = types.NewPackage(pkg.PkgPath, pkg.Name) - - pkg.IllTyped = true - for path, pkg := range pkg.Imports { - if pkg.Types == nil { - return fmt.Errorf("dependency %q hasn't been loaded yet", path) - } - } - if pkg.ExportFile == "" { - return fmt.Errorf("no export data for %q", pkg.ID) - } - f, err := os.Open(pkg.ExportFile) - if err != nil { - return err - } - defer f.Close() - - r, err := gcexportdata.NewReader(f) - if err != nil { - return err - } - - view := make(map[string]*types.Package) // view seen by gcexportdata - seen := make(map[*packages.Package]bool) // all visited packages - var visit func(pkgs map[string]*packages.Package) - visit = func(pkgs map[string]*packages.Package) { - for _, pkg := range pkgs { - if !seen[pkg] { - seen[pkg] = true - view[pkg.PkgPath] = pkg.Types - visit(pkg.Imports) - } - } - } - visit(pkg.Imports) - tpkg, err := gcexportdata.Read(r, pkg.Fset, view, pkg.PkgPath) - if err != nil { - return err - } - pkg.Types = tpkg - pkg.IllTyped = false - return nil -} - -func (lp *loadingPackage) loadWithFacts(loadMode LoadMode) error { - pkg := lp.pkg - - if pkg.PkgPath == unsafePkgName { - // Fill in the blanks to avoid surprises. - pkg.Syntax = []*ast.File{} - if loadMode >= LoadModeTypesInfo { - pkg.Types = types.Unsafe - pkg.TypesInfo = new(types.Info) - } - return nil - } - - if pkg.TypesInfo != nil { - // Already loaded package, e.g. because another not go/analysis linter required types for deps. - // Try load cached facts for it. - - for _, act := range lp.actions { - if !act.loadCachedFacts() { - // Cached facts loading failed: analyze later the action from source. - act.needAnalyzeSource = true - factsCacheDebugf("Loading of facts for already loaded %s failed, analyze it from source later", act) - act.markDepsForAnalyzingSource() - } - } - return nil - } - - if lp.isInitial { - // No need to load cached facts: the package will be analyzed from source - // because it's the initial. - return lp.loadFromSource(loadMode) - } - - return lp.loadImportedPackageWithFacts(loadMode) -} - -func (lp *loadingPackage) loadImportedPackageWithFacts(loadMode LoadMode) error { - pkg := lp.pkg - - // Load package from export data - if loadMode >= LoadModeTypesInfo { - if err := lp.loadFromExportData(); err != nil { - // We asked Go to give us up-to-date export data, yet - // we can't load it. There must be something wrong. - // - // Attempt loading from source. This should fail (because - // otherwise there would be export data); we just want to - // get the compile errors. If loading from source succeeds - // we discard the result, anyway. Otherwise, we'll fail - // when trying to reload from export data later. - - // Otherwise, it panics because uses already existing (from exported data) types. - pkg.Types = types.NewPackage(pkg.PkgPath, pkg.Name) - if srcErr := lp.loadFromSource(loadMode); srcErr != nil { - return srcErr - } - // Make sure this package can't be imported successfully - pkg.Errors = append(pkg.Errors, packages.Error{ - Pos: "-", - Msg: fmt.Sprintf("could not load export data: %s", err), - Kind: packages.ParseError, - }) - return fmt.Errorf("could not load export data: %w", err) - } - } - - needLoadFromSource := false - for _, act := range lp.actions { - if act.loadCachedFacts() { - continue - } - - // Cached facts loading failed: analyze later the action from source. - factsCacheDebugf("Loading of facts for %s failed, analyze it from source later", act) - act.needAnalyzeSource = true // can't be set in parallel - needLoadFromSource = true - - act.markDepsForAnalyzingSource() - } - - if needLoadFromSource { - // Cached facts loading failed: analyze later the action from source. To perform - // the analysis we need to load the package from source code. - - // Otherwise, it panics because uses already existing (from exported data) types. - if loadMode >= LoadModeTypesInfo { - pkg.Types = types.NewPackage(pkg.PkgPath, pkg.Name) - } - return lp.loadFromSource(loadMode) - } - - return nil -} - -func (lp *loadingPackage) decUse(canClearTypes bool) { - lp.decUseMutex.Lock() - defer lp.decUseMutex.Unlock() - - for _, act := range lp.actions { - pass := act.pass - if pass == nil { - continue - } - - pass.Files = nil - pass.TypesInfo = nil - pass.TypesSizes = nil - pass.ResultOf = nil - pass.Pkg = nil - pass.OtherFiles = nil - pass.AllObjectFacts = nil - pass.AllPackageFacts = nil - pass.ImportObjectFact = nil - pass.ExportObjectFact = nil - pass.ImportPackageFact = nil - pass.ExportPackageFact = nil - act.pass = nil - act.deps = nil - if act.result != nil { - if isMemoryDebug { - debugf("%s: decUse: nilling act result of size %d bytes", act, sizeOfValueTreeBytes(act.result)) - } - act.result = nil - } - } - - lp.pkg.Syntax = nil - lp.pkg.TypesInfo = nil - lp.pkg.TypesSizes = nil - - // Can't set lp.pkg.Imports to nil because of loadFromExportData.visit. - - dependents := atomic.AddInt32(&lp.dependents, -1) - if dependents != 0 { - return - } - - if canClearTypes { - // canClearTypes is set to true if we can discard type - // information after the package and its dependents have been - // processed. This is the case when no whole program checkers (unused) are - // being run. - lp.pkg.Types = nil - } - lp.pkg = nil - - for _, imp := range lp.imports { - imp.decUse(canClearTypes) - } - lp.imports = nil - - for _, act := range lp.actions { - if !lp.isInitial { - act.pkg = nil - } - act.packageFacts = nil - act.objectFacts = nil - } - lp.actions = nil -} - -func (lp *loadingPackage) convertError(err error) []packages.Error { - var errs []packages.Error - // taken from go/packages - switch err := err.(type) { - case packages.Error: - // from driver - errs = append(errs, err) - - case *os.PathError: - // from parser - errs = append(errs, packages.Error{ - Pos: err.Path + ":1", - Msg: err.Err.Error(), - Kind: packages.ParseError, - }) - - case scanner.ErrorList: - // from parser - for _, err := range err { - errs = append(errs, packages.Error{ - Pos: err.Pos.String(), - Msg: err.Msg, - Kind: packages.ParseError, - }) - } - - case types.Error: - // from type checker - errs = append(errs, packages.Error{ - Pos: err.Fset.Position(err.Pos).String(), - Msg: err.Msg, - Kind: packages.TypeError, - }) - - default: - // unexpected impoverished error from parser? - errs = append(errs, packages.Error{ - Pos: "-", - Msg: err.Error(), - Kind: packages.UnknownError, - }) - - // If you see this error message, please file a bug. - lp.log.Warnf("Internal error: error %q (%T) without position", err, err) - } - - return errs -} - -func (lp *loadingPackage) String() string { - return fmt.Sprintf("%s@%s", lp.pkg.PkgPath, lp.pkg.Name) -} - -type importerFunc func(path string) (*types.Package, error) - -func (f importerFunc) Import(path string) (*types.Package, error) { return f(path) } - -func sizeOfValueTreeBytes(v any) int { - return sizeOfReflectValueTreeBytes(reflect.ValueOf(v), map[uintptr]struct{}{}) -} - -func sizeOfReflectValueTreeBytes(rv reflect.Value, visitedPtrs map[uintptr]struct{}) int { - switch rv.Kind() { - case reflect.Ptr: - ptrSize := int(rv.Type().Size()) - if rv.IsNil() { - return ptrSize - } - ptr := rv.Pointer() - if _, ok := visitedPtrs[ptr]; ok { - return 0 - } - visitedPtrs[ptr] = struct{}{} - return ptrSize + sizeOfReflectValueTreeBytes(rv.Elem(), visitedPtrs) - case reflect.Interface: - if rv.IsNil() { - return 0 - } - return sizeOfReflectValueTreeBytes(rv.Elem(), visitedPtrs) - case reflect.Struct: - ret := 0 - for i := 0; i < rv.NumField(); i++ { - ret += sizeOfReflectValueTreeBytes(rv.Field(i), visitedPtrs) - } - return ret - case reflect.Slice, reflect.Array, reflect.Chan: - return int(rv.Type().Size()) + rv.Cap()*int(rv.Type().Elem().Size()) - case reflect.Map: - ret := 0 - for _, key := range rv.MapKeys() { - mv := rv.MapIndex(key) - ret += sizeOfReflectValueTreeBytes(key, visitedPtrs) - ret += sizeOfReflectValueTreeBytes(mv, visitedPtrs) - } - return ret - case reflect.String: - return rv.Len() - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, - reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, - reflect.Uintptr, reflect.Bool, reflect.Float32, reflect.Float64, - reflect.Complex64, reflect.Complex128, reflect.Func, reflect.UnsafePointer: - return int(rv.Type().Size()) - case reflect.Invalid: - return 0 - default: - panic("unknown rv of type " + rv.String()) - } -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/runners.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/runners.go deleted file mode 100644 index 559fb1392..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/runners.go +++ /dev/null @@ -1,270 +0,0 @@ -package goanalysis - -import ( - "fmt" - "runtime" - "sort" - "strings" - "sync" - "sync/atomic" - "time" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/packages" - - "github.com/golangci/golangci-lint/internal/pkgcache" - "github.com/golangci/golangci-lint/pkg/lint/linter" - "github.com/golangci/golangci-lint/pkg/logutils" - "github.com/golangci/golangci-lint/pkg/result" - "github.com/golangci/golangci-lint/pkg/timeutils" -) - -type runAnalyzersConfig interface { - getName() string - getLinterNameForDiagnostic(*Diagnostic) string - getAnalyzers() []*analysis.Analyzer - useOriginalPackages() bool - reportIssues(*linter.Context) []Issue - getLoadMode() LoadMode -} - -func runAnalyzers(cfg runAnalyzersConfig, lintCtx *linter.Context) ([]result.Issue, error) { - log := lintCtx.Log.Child(logutils.DebugKeyGoAnalysis) - sw := timeutils.NewStopwatch("analyzers", log) - - const stagesToPrint = 10 - defer sw.PrintTopStages(stagesToPrint) - - runner := newRunner(cfg.getName(), log, lintCtx.PkgCache, lintCtx.LoadGuard, cfg.getLoadMode(), sw) - - pkgs := lintCtx.Packages - if cfg.useOriginalPackages() { - pkgs = lintCtx.OriginalPackages - } - - issues, pkgsFromCache := loadIssuesFromCache(pkgs, lintCtx, cfg.getAnalyzers()) - var pkgsToAnalyze []*packages.Package - for _, pkg := range pkgs { - if !pkgsFromCache[pkg] { - pkgsToAnalyze = append(pkgsToAnalyze, pkg) - } - } - - diags, errs, passToPkg := runner.run(cfg.getAnalyzers(), pkgsToAnalyze) - - defer func() { - if len(errs) == 0 { - // If we try to save to cache even if we have compilation errors - // we won't see them on repeated runs. - saveIssuesToCache(pkgs, pkgsFromCache, issues, lintCtx, cfg.getAnalyzers()) - } - }() - - buildAllIssues := func() []result.Issue { - var retIssues []result.Issue - reportedIssues := cfg.reportIssues(lintCtx) - for i := range reportedIssues { - issue := &reportedIssues[i].Issue - if issue.Pkg == nil { - issue.Pkg = passToPkg[reportedIssues[i].Pass] - } - retIssues = append(retIssues, *issue) - } - retIssues = append(retIssues, buildIssues(diags, cfg.getLinterNameForDiagnostic)...) - return retIssues - } - - errIssues, err := buildIssuesFromIllTypedError(errs, lintCtx) - if err != nil { - return nil, err - } - - issues = append(issues, errIssues...) - issues = append(issues, buildAllIssues()...) - - return issues, nil -} - -func buildIssues(diags []Diagnostic, linterNameBuilder func(diag *Diagnostic) string) []result.Issue { - var issues []result.Issue - for i := range diags { - diag := &diags[i] - linterName := linterNameBuilder(diag) - - var text string - if diag.Analyzer.Name == linterName { - text = diag.Message - } else { - text = fmt.Sprintf("%s: %s", diag.Analyzer.Name, diag.Message) - } - - issues = append(issues, result.Issue{ - FromLinter: linterName, - Text: text, - Pos: diag.Position, - Pkg: diag.Pkg, - }) - - if len(diag.Related) > 0 { - for _, info := range diag.Related { - issues = append(issues, result.Issue{ - FromLinter: linterName, - Text: fmt.Sprintf("%s(related information): %s", diag.Analyzer.Name, info.Message), - Pos: diag.Pkg.Fset.Position(info.Pos), - Pkg: diag.Pkg, - }) - } - } - } - return issues -} - -func getIssuesCacheKey(analyzers []*analysis.Analyzer) string { - return "lint/result:" + analyzersHashID(analyzers) -} - -func saveIssuesToCache(allPkgs []*packages.Package, pkgsFromCache map[*packages.Package]bool, - issues []result.Issue, lintCtx *linter.Context, analyzers []*analysis.Analyzer) { - startedAt := time.Now() - perPkgIssues := map[*packages.Package][]result.Issue{} - for ind := range issues { - i := &issues[ind] - perPkgIssues[i.Pkg] = append(perPkgIssues[i.Pkg], *i) - } - - savedIssuesCount := int32(0) - lintResKey := getIssuesCacheKey(analyzers) - - workerCount := runtime.GOMAXPROCS(-1) - var wg sync.WaitGroup - wg.Add(workerCount) - - pkgCh := make(chan *packages.Package, len(allPkgs)) - for i := 0; i < workerCount; i++ { - go func() { - defer wg.Done() - for pkg := range pkgCh { - pkgIssues := perPkgIssues[pkg] - encodedIssues := make([]EncodingIssue, 0, len(pkgIssues)) - for ind := range pkgIssues { - i := &pkgIssues[ind] - encodedIssues = append(encodedIssues, EncodingIssue{ - FromLinter: i.FromLinter, - Text: i.Text, - Pos: i.Pos, - LineRange: i.LineRange, - Replacement: i.Replacement, - ExpectNoLint: i.ExpectNoLint, - ExpectedNoLintLinter: i.ExpectedNoLintLinter, - }) - } - - atomic.AddInt32(&savedIssuesCount, int32(len(encodedIssues))) - if err := lintCtx.PkgCache.Put(pkg, pkgcache.HashModeNeedAllDeps, lintResKey, encodedIssues); err != nil { - lintCtx.Log.Infof("Failed to save package %s issues (%d) to cache: %s", pkg, len(pkgIssues), err) - } else { - issuesCacheDebugf("Saved package %s issues (%d) to cache", pkg, len(pkgIssues)) - } - } - }() - } - - for _, pkg := range allPkgs { - if pkgsFromCache[pkg] { - continue - } - - pkgCh <- pkg - } - close(pkgCh) - wg.Wait() - - issuesCacheDebugf("Saved %d issues from %d packages to cache in %s", savedIssuesCount, len(allPkgs), time.Since(startedAt)) -} - -//nolint:gocritic -func loadIssuesFromCache(pkgs []*packages.Package, lintCtx *linter.Context, - analyzers []*analysis.Analyzer) ([]result.Issue, map[*packages.Package]bool) { - startedAt := time.Now() - - lintResKey := getIssuesCacheKey(analyzers) - type cacheRes struct { - issues []result.Issue - loadErr error - } - pkgToCacheRes := make(map[*packages.Package]*cacheRes, len(pkgs)) - for _, pkg := range pkgs { - pkgToCacheRes[pkg] = &cacheRes{} - } - - workerCount := runtime.GOMAXPROCS(-1) - var wg sync.WaitGroup - wg.Add(workerCount) - - pkgCh := make(chan *packages.Package, len(pkgs)) - for i := 0; i < workerCount; i++ { - go func() { - defer wg.Done() - for pkg := range pkgCh { - var pkgIssues []EncodingIssue - err := lintCtx.PkgCache.Get(pkg, pkgcache.HashModeNeedAllDeps, lintResKey, &pkgIssues) - cacheRes := pkgToCacheRes[pkg] - cacheRes.loadErr = err - if err != nil { - continue - } - if len(pkgIssues) == 0 { - continue - } - - issues := make([]result.Issue, 0, len(pkgIssues)) - for _, i := range pkgIssues { - issues = append(issues, result.Issue{ - FromLinter: i.FromLinter, - Text: i.Text, - Pos: i.Pos, - LineRange: i.LineRange, - Replacement: i.Replacement, - Pkg: pkg, - ExpectNoLint: i.ExpectNoLint, - ExpectedNoLintLinter: i.ExpectedNoLintLinter, - }) - } - cacheRes.issues = issues - } - }() - } - - for _, pkg := range pkgs { - pkgCh <- pkg - } - close(pkgCh) - wg.Wait() - - loadedIssuesCount := 0 - var issues []result.Issue - pkgsFromCache := map[*packages.Package]bool{} - for pkg, cacheRes := range pkgToCacheRes { - if cacheRes.loadErr == nil { - loadedIssuesCount += len(cacheRes.issues) - pkgsFromCache[pkg] = true - issues = append(issues, cacheRes.issues...) - issuesCacheDebugf("Loaded package %s issues (%d) from cache", pkg, len(cacheRes.issues)) - } else { - issuesCacheDebugf("Didn't load package %s issues from cache: %s", pkg, cacheRes.loadErr) - } - } - issuesCacheDebugf("Loaded %d issues from cache in %s, analyzing %d/%d packages", - loadedIssuesCount, time.Since(startedAt), len(pkgs)-len(pkgsFromCache), len(pkgs)) - return issues, pkgsFromCache -} - -func analyzersHashID(analyzers []*analysis.Analyzer) string { - names := make([]string, 0, len(analyzers)) - for _, a := range analyzers { - names = append(names, a.Name) - } - - sort.Strings(names) - return strings.Join(names, ",") -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gocheckcompilerdirectives.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gocheckcompilerdirectives.go deleted file mode 100644 index 2592c8994..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gocheckcompilerdirectives.go +++ /dev/null @@ -1,19 +0,0 @@ -package golinters - -import ( - "4d63.com/gocheckcompilerdirectives/checkcompilerdirectives" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" -) - -func NewGoCheckCompilerDirectives() *goanalysis.Linter { - a := checkcompilerdirectives.Analyzer() - - return goanalysis.NewLinter( - a.Name, - a.Doc, - []*analysis.Analyzer{a}, - nil, - ).WithLoadMode(goanalysis.LoadModeSyntax) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gochecknoglobals.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gochecknoglobals.go deleted file mode 100644 index 6e18aeb27..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gochecknoglobals.go +++ /dev/null @@ -1,29 +0,0 @@ -package golinters - -import ( - "4d63.com/gochecknoglobals/checknoglobals" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" -) - -func NewGochecknoglobals() *goanalysis.Linter { - gochecknoglobals := checknoglobals.Analyzer() - - // gochecknoglobals only lints test files if the `-t` flag is passed, so we - // pass the `t` flag as true to the analyzer before running it. This can be - // turned off by using the regular golangci-lint flags such as `--tests` or - // `--skip-files`. - linterConfig := map[string]map[string]any{ - gochecknoglobals.Name: { - "t": true, - }, - } - - return goanalysis.NewLinter( - gochecknoglobals.Name, - gochecknoglobals.Doc, - []*analysis.Analyzer{gochecknoglobals}, - linterConfig, - ).WithLoadMode(goanalysis.LoadModeTypesInfo) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gochecknoinits.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gochecknoinits.go deleted file mode 100644 index a51b531b9..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gochecknoinits.go +++ /dev/null @@ -1,74 +0,0 @@ -package golinters - -import ( - "fmt" - "go/ast" - "go/token" - "sync" - - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" - "github.com/golangci/golangci-lint/pkg/lint/linter" - "github.com/golangci/golangci-lint/pkg/result" -) - -const gochecknoinitsName = "gochecknoinits" - -func NewGochecknoinits() *goanalysis.Linter { - var mu sync.Mutex - var resIssues []goanalysis.Issue - - analyzer := &analysis.Analyzer{ - Name: gochecknoinitsName, - Doc: goanalysis.TheOnlyanalyzerDoc, - Run: func(pass *analysis.Pass) (any, error) { - var res []goanalysis.Issue - for _, file := range pass.Files { - fileIssues := checkFileForInits(file, pass.Fset) - for i := range fileIssues { - res = append(res, goanalysis.NewIssue(&fileIssues[i], pass)) - } - } - if len(res) == 0 { - return nil, nil - } - - mu.Lock() - resIssues = append(resIssues, res...) - mu.Unlock() - - return nil, nil - }, - } - - return goanalysis.NewLinter( - gochecknoinitsName, - "Checks that no init functions are present in Go code", - []*analysis.Analyzer{analyzer}, - nil, - ).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { - return resIssues - }).WithLoadMode(goanalysis.LoadModeSyntax) -} - -func checkFileForInits(f *ast.File, fset *token.FileSet) []result.Issue { - var res []result.Issue - for _, decl := range f.Decls { - funcDecl, ok := decl.(*ast.FuncDecl) - if !ok { - continue - } - - name := funcDecl.Name.Name - if name == "init" && funcDecl.Recv.NumFields() == 0 { - res = append(res, result.Issue{ - Pos: fset.Position(funcDecl.Pos()), - Text: fmt.Sprintf("don't use %s function", formatCode(name, nil)), - FromLinter: gochecknoinitsName, - }) - } - } - - return res -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gochecksumtype.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gochecksumtype.go deleted file mode 100644 index 5516179df..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gochecksumtype.go +++ /dev/null @@ -1,80 +0,0 @@ -package golinters - -import ( - "strings" - "sync" - - gochecksumtype "github.com/alecthomas/go-check-sumtype" - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/packages" - - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" - "github.com/golangci/golangci-lint/pkg/lint/linter" - "github.com/golangci/golangci-lint/pkg/result" -) - -const goCheckSumTypeName = "gochecksumtype" - -func NewGoCheckSumType() *goanalysis.Linter { - var mu sync.Mutex - var resIssues []goanalysis.Issue - - analyzer := &analysis.Analyzer{ - Name: goCheckSumTypeName, - Doc: goanalysis.TheOnlyanalyzerDoc, - Run: func(pass *analysis.Pass) (any, error) { - issues, err := runGoCheckSumType(pass) - if err != nil { - return nil, err - } - - if len(issues) == 0 { - return nil, nil - } - - mu.Lock() - resIssues = append(resIssues, issues...) - mu.Unlock() - - return nil, nil - }, - } - - return goanalysis.NewLinter( - goCheckSumTypeName, - `Run exhaustiveness checks on Go "sum types"`, - []*analysis.Analyzer{analyzer}, - nil, - ).WithIssuesReporter(func(_ *linter.Context) []goanalysis.Issue { - return resIssues - }).WithLoadMode(goanalysis.LoadModeTypesInfo) -} - -func runGoCheckSumType(pass *analysis.Pass) ([]goanalysis.Issue, error) { - var resIssues []goanalysis.Issue - - pkg := &packages.Package{ - Fset: pass.Fset, - Syntax: pass.Files, - Types: pass.Pkg, - TypesInfo: pass.TypesInfo, - } - - var unknownError error - errors := gochecksumtype.Run([]*packages.Package{pkg}) - for _, err := range errors { - err, ok := err.(gochecksumtype.Error) - if !ok { - unknownError = err - continue - } - - resIssues = append(resIssues, goanalysis.NewIssue(&result.Issue{ - FromLinter: goCheckSumTypeName, - Text: strings.TrimPrefix(err.Error(), err.Pos().String()+": "), - Pos: err.Pos(), - }, pass)) - } - - return resIssues, unknownError -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gocognit.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gocognit.go deleted file mode 100644 index 406d34ed6..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gocognit.go +++ /dev/null @@ -1,80 +0,0 @@ -package golinters - -import ( - "fmt" - "sort" - "sync" - - "github.com/uudashr/gocognit" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" - "github.com/golangci/golangci-lint/pkg/lint/linter" - "github.com/golangci/golangci-lint/pkg/result" -) - -const gocognitName = "gocognit" - -//nolint:dupl -func NewGocognit(settings *config.GocognitSettings) *goanalysis.Linter { - var mu sync.Mutex - var resIssues []goanalysis.Issue - - analyzer := &analysis.Analyzer{ - Name: goanalysis.TheOnlyAnalyzerName, - Doc: goanalysis.TheOnlyanalyzerDoc, - Run: func(pass *analysis.Pass) (any, error) { - issues := runGocognit(pass, settings) - - if len(issues) == 0 { - return nil, nil - } - - mu.Lock() - resIssues = append(resIssues, issues...) - mu.Unlock() - - return nil, nil - }, - } - - return goanalysis.NewLinter( - gocognitName, - "Computes and checks the cognitive complexity of functions", - []*analysis.Analyzer{analyzer}, - nil, - ).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { - return resIssues - }).WithLoadMode(goanalysis.LoadModeSyntax) -} - -func runGocognit(pass *analysis.Pass, settings *config.GocognitSettings) []goanalysis.Issue { - var stats []gocognit.Stat - for _, f := range pass.Files { - stats = gocognit.ComplexityStats(f, pass.Fset, stats) - } - if len(stats) == 0 { - return nil - } - - sort.SliceStable(stats, func(i, j int) bool { - return stats[i].Complexity > stats[j].Complexity - }) - - issues := make([]goanalysis.Issue, 0, len(stats)) - for _, s := range stats { - if s.Complexity <= settings.MinComplexity { - break // Break as the stats is already sorted from greatest to least - } - - issues = append(issues, goanalysis.NewIssue(&result.Issue{ - Pos: s.Pos, - Text: fmt.Sprintf("cognitive complexity %d of func %s is high (> %d)", - s.Complexity, formatCode(s.FuncName, nil), settings.MinComplexity), - FromLinter: gocognitName, - }, pass)) - } - - return issues -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goconst.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goconst.go deleted file mode 100644 index b51885275..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goconst.go +++ /dev/null @@ -1,98 +0,0 @@ -package golinters - -import ( - "fmt" - "sync" - - goconstAPI "github.com/jgautheron/goconst" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" - "github.com/golangci/golangci-lint/pkg/lint/linter" - "github.com/golangci/golangci-lint/pkg/result" -) - -const goconstName = "goconst" - -//nolint:dupl -func NewGoconst(settings *config.GoConstSettings) *goanalysis.Linter { - var mu sync.Mutex - var resIssues []goanalysis.Issue - - analyzer := &analysis.Analyzer{ - Name: goconstName, - Doc: goanalysis.TheOnlyanalyzerDoc, - Run: func(pass *analysis.Pass) (any, error) { - issues, err := runGoconst(pass, settings) - if err != nil { - return nil, err - } - - if len(issues) == 0 { - return nil, nil - } - - mu.Lock() - resIssues = append(resIssues, issues...) - mu.Unlock() - - return nil, nil - }, - } - - return goanalysis.NewLinter( - goconstName, - "Finds repeated strings that could be replaced by a constant", - []*analysis.Analyzer{analyzer}, - nil, - ).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { - return resIssues - }).WithLoadMode(goanalysis.LoadModeSyntax) -} - -func runGoconst(pass *analysis.Pass, settings *config.GoConstSettings) ([]goanalysis.Issue, error) { - cfg := goconstAPI.Config{ - IgnoreStrings: settings.IgnoreStrings, - IgnoreTests: settings.IgnoreTests, - MatchWithConstants: settings.MatchWithConstants, - MinStringLength: settings.MinStringLen, - MinOccurrences: settings.MinOccurrencesCount, - ParseNumbers: settings.ParseNumbers, - NumberMin: settings.NumberMin, - NumberMax: settings.NumberMax, - ExcludeTypes: map[goconstAPI.Type]bool{}, - } - - if settings.IgnoreCalls { - cfg.ExcludeTypes[goconstAPI.Call] = true - } - - lintIssues, err := goconstAPI.Run(pass.Files, pass.Fset, &cfg) - if err != nil { - return nil, err - } - - if len(lintIssues) == 0 { - return nil, nil - } - - res := make([]goanalysis.Issue, 0, len(lintIssues)) - for _, i := range lintIssues { - text := fmt.Sprintf("string %s has %d occurrences", formatCode(i.Str, nil), i.OccurrencesCount) - - if i.MatchingConst == "" { - text += ", make it a constant" - } else { - text += fmt.Sprintf(", but such constant %s already exists", formatCode(i.MatchingConst, nil)) - } - - res = append(res, goanalysis.NewIssue(&result.Issue{ - Pos: i.Pos, - Text: text, - FromLinter: goconstName, - }, pass)) - } - - return res, nil -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gocritic.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gocritic.go deleted file mode 100644 index 3cf43afc6..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gocritic.go +++ /dev/null @@ -1,630 +0,0 @@ -package golinters - -import ( - "errors" - "fmt" - "go/ast" - "go/types" - "path/filepath" - "reflect" - "runtime" - "sort" - "strings" - "sync" - - "github.com/go-critic/go-critic/checkers" - gocriticlinter "github.com/go-critic/go-critic/linter" - "golang.org/x/exp/maps" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" - "github.com/golangci/golangci-lint/pkg/lint/linter" - "github.com/golangci/golangci-lint/pkg/logutils" - "github.com/golangci/golangci-lint/pkg/result" -) - -const goCriticName = "gocritic" - -var ( - goCriticDebugf = logutils.Debug(logutils.DebugKeyGoCritic) - isGoCriticDebug = logutils.HaveDebugTag(logutils.DebugKeyGoCritic) -) - -func NewGoCritic(settings *config.GoCriticSettings, cfg *config.Config) *goanalysis.Linter { - var mu sync.Mutex - var resIssues []goanalysis.Issue - - wrapper := &goCriticWrapper{ - cfg: cfg, - sizes: types.SizesFor("gc", runtime.GOARCH), - } - - analyzer := &analysis.Analyzer{ - Name: goCriticName, - Doc: goanalysis.TheOnlyanalyzerDoc, - Run: func(pass *analysis.Pass) (any, error) { - issues, err := wrapper.run(pass) - if err != nil { - return nil, err - } - - if len(issues) == 0 { - return nil, nil - } - - mu.Lock() - resIssues = append(resIssues, issues...) - mu.Unlock() - - return nil, nil - }, - } - - return goanalysis.NewLinter( - goCriticName, - `Provides diagnostics that check for bugs, performance and style issues. -Extensible without recompilation through dynamic rules. -Dynamic rules are written declaratively with AST patterns, filters, report message and optional suggestion.`, - []*analysis.Analyzer{analyzer}, - nil, - ). - WithContextSetter(func(context *linter.Context) { - wrapper.init(settings, context.Log) - }). - WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { - return resIssues - }).WithLoadMode(goanalysis.LoadModeTypesInfo) -} - -type goCriticWrapper struct { - settingsWrapper *goCriticSettingsWrapper - cfg *config.Config - sizes types.Sizes - once sync.Once -} - -func (w *goCriticWrapper) init(settings *config.GoCriticSettings, logger logutils.Log) { - if settings == nil { - return - } - - w.once.Do(func() { - err := checkers.InitEmbeddedRules() - if err != nil { - logger.Fatalf("%s: %v: setting an explicit GOROOT can fix this problem.", goCriticName, err) - } - }) - - settingsWrapper := newGoCriticSettingsWrapper(settings, logger) - - settingsWrapper.inferEnabledChecks() - - if err := settingsWrapper.validate(); err != nil { - logger.Fatalf("%s: invalid settings: %s", goCriticName, err) - } - - w.settingsWrapper = settingsWrapper -} - -func (w *goCriticWrapper) run(pass *analysis.Pass) ([]goanalysis.Issue, error) { - if w.settingsWrapper == nil { - return nil, fmt.Errorf("the settings wrapper is nil") - } - - linterCtx := gocriticlinter.NewContext(pass.Fset, w.sizes) - - linterCtx.SetGoVersion(w.settingsWrapper.Go) - - enabledCheckers, err := w.buildEnabledCheckers(linterCtx) - if err != nil { - return nil, err - } - - linterCtx.SetPackageInfo(pass.TypesInfo, pass.Pkg) - - pkgIssues := runGocriticOnPackage(linterCtx, enabledCheckers, pass.Files) - - issues := make([]goanalysis.Issue, 0, len(pkgIssues)) - for i := range pkgIssues { - issues = append(issues, goanalysis.NewIssue(&pkgIssues[i], pass)) - } - - return issues, nil -} - -func (w *goCriticWrapper) buildEnabledCheckers(linterCtx *gocriticlinter.Context) ([]*gocriticlinter.Checker, error) { - allParams := w.settingsWrapper.getLowerCasedParams() - - var enabledCheckers []*gocriticlinter.Checker - for _, info := range gocriticlinter.GetCheckersInfo() { - if !w.settingsWrapper.isCheckEnabled(info.Name) { - continue - } - - if err := w.configureCheckerInfo(info, allParams); err != nil { - return nil, err - } - - c, err := gocriticlinter.NewChecker(linterCtx, info) - if err != nil { - return nil, err - } - enabledCheckers = append(enabledCheckers, c) - } - - return enabledCheckers, nil -} - -func runGocriticOnPackage(linterCtx *gocriticlinter.Context, checks []*gocriticlinter.Checker, - files []*ast.File) []result.Issue { - var res []result.Issue - for _, f := range files { - filename := filepath.Base(linterCtx.FileSet.Position(f.Pos()).Filename) - linterCtx.SetFileInfo(filename, f) - - issues := runGocriticOnFile(linterCtx, f, checks) - res = append(res, issues...) - } - return res -} - -func runGocriticOnFile(linterCtx *gocriticlinter.Context, f *ast.File, checks []*gocriticlinter.Checker) []result.Issue { - var res []result.Issue - - for _, c := range checks { - // All checkers are expected to use *lint.Context - // as read-only structure, so no copying is required. - for _, warn := range c.Check(f) { - pos := linterCtx.FileSet.Position(warn.Pos) - issue := result.Issue{ - Pos: pos, - Text: fmt.Sprintf("%s: %s", c.Info.Name, warn.Text), - FromLinter: goCriticName, - } - - if warn.HasQuickFix() { - issue.Replacement = &result.Replacement{ - Inline: &result.InlineFix{ - StartCol: pos.Column - 1, - Length: int(warn.Suggestion.To - warn.Suggestion.From), - NewString: string(warn.Suggestion.Replacement), - }, - } - } - - res = append(res, issue) - } - } - - return res -} - -func (w *goCriticWrapper) configureCheckerInfo(info *gocriticlinter.CheckerInfo, allParams map[string]config.GoCriticCheckSettings) error { - params := allParams[strings.ToLower(info.Name)] - if params == nil { // no config for this checker - return nil - } - - infoParams := normalizeCheckerInfoParams(info) - for k, p := range params { - v, ok := infoParams[k] - if ok { - v.Value = w.normalizeCheckerParamsValue(p) - continue - } - - // param `k` isn't supported - if len(info.Params) == 0 { - return fmt.Errorf("checker %s config param %s doesn't exist: checker doesn't have params", - info.Name, k) - } - - supportedKeys := maps.Keys(info.Params) - sort.Strings(supportedKeys) - - return fmt.Errorf("checker %s config param %s doesn't exist, all existing: %s", - info.Name, k, supportedKeys) - } - - return nil -} - -func normalizeCheckerInfoParams(info *gocriticlinter.CheckerInfo) gocriticlinter.CheckerParams { - // lowercase info param keys here because golangci-lint's config parser lowercases all strings - ret := gocriticlinter.CheckerParams{} - for k, v := range info.Params { - ret[strings.ToLower(k)] = v - } - - return ret -} - -// normalizeCheckerParamsValue normalizes value types. -// go-critic asserts that CheckerParam.Value has some specific types, -// but the file parsers (TOML, YAML, JSON) don't create the same representation for raw type. -// then we have to convert value types into the expected value types. -// Maybe in the future, this kind of conversion will be done in go-critic itself. -func (w *goCriticWrapper) normalizeCheckerParamsValue(p any) any { - rv := reflect.ValueOf(p) - switch rv.Type().Kind() { - case reflect.Int64, reflect.Int32, reflect.Int16, reflect.Int8, reflect.Int: - return int(rv.Int()) - case reflect.Bool: - return rv.Bool() - case reflect.String: - // Perform variable substitution. - return strings.ReplaceAll(rv.String(), "${configDir}", w.cfg.GetConfigDir()) - default: - return p - } -} - -// TODO(ldez): rewrite and simplify goCriticSettingsWrapper. - -type goCriticSettingsWrapper struct { - *config.GoCriticSettings - - logger logutils.Log - - allCheckers []*gocriticlinter.CheckerInfo - allCheckerMap map[string]*gocriticlinter.CheckerInfo - - inferredEnabledChecks map[string]bool -} - -func newGoCriticSettingsWrapper(settings *config.GoCriticSettings, logger logutils.Log) *goCriticSettingsWrapper { - allCheckers := gocriticlinter.GetCheckersInfo() - - allCheckerMap := make(map[string]*gocriticlinter.CheckerInfo) - for _, checkInfo := range allCheckers { - allCheckerMap[checkInfo.Name] = checkInfo - } - - return &goCriticSettingsWrapper{ - GoCriticSettings: settings, - logger: logger, - allCheckers: allCheckers, - allCheckerMap: allCheckerMap, - inferredEnabledChecks: map[string]bool{}, - } -} - -func (s *goCriticSettingsWrapper) buildTagToCheckersMap() map[string][]string { - tagToCheckers := map[string][]string{} - - for _, checker := range s.allCheckers { - for _, tag := range checker.Tags { - tagToCheckers[tag] = append(tagToCheckers[tag], checker.Name) - } - } - - return tagToCheckers -} - -func (s *goCriticSettingsWrapper) checkerTagsDebugf() { - if !isGoCriticDebug { - return - } - - tagToCheckers := s.buildTagToCheckersMap() - - allTags := maps.Keys(tagToCheckers) - sort.Strings(allTags) - - goCriticDebugf("All gocritic existing tags and checks:") - for _, tag := range allTags { - debugChecksListf(tagToCheckers[tag], " tag %q", tag) - } -} - -func (s *goCriticSettingsWrapper) disabledCheckersDebugf() { - if !isGoCriticDebug { - return - } - - var disabledCheckers []string - for _, checker := range s.allCheckers { - if s.inferredEnabledChecks[strings.ToLower(checker.Name)] { - continue - } - - disabledCheckers = append(disabledCheckers, checker.Name) - } - - if len(disabledCheckers) == 0 { - goCriticDebugf("All checks are enabled") - } else { - debugChecksListf(disabledCheckers, "Final not used") - } -} - -func (s *goCriticSettingsWrapper) inferEnabledChecks() { - s.checkerTagsDebugf() - - enabledByDefaultChecks := s.getDefaultEnabledCheckersNames() - debugChecksListf(enabledByDefaultChecks, "Enabled by default") - - disabledByDefaultChecks := s.getDefaultDisabledCheckersNames() - debugChecksListf(disabledByDefaultChecks, "Disabled by default") - - enabledChecks := make([]string, 0, len(s.EnabledTags)+len(enabledByDefaultChecks)) - - // EnabledTags - if len(s.EnabledTags) != 0 { - tagToCheckers := s.buildTagToCheckersMap() - for _, tag := range s.EnabledTags { - enabledChecks = append(enabledChecks, tagToCheckers[tag]...) - } - - debugChecksListf(enabledChecks, "Enabled by config tags %s", sprintStrings(s.EnabledTags)) - } - - if !(len(s.EnabledTags) == 0 && len(s.EnabledChecks) != 0) { - // don't use default checks only if we have no enabled tags and enable some checks manually - enabledChecks = append(enabledChecks, enabledByDefaultChecks...) - } - - // DisabledTags - if len(s.DisabledTags) != 0 { - enabledChecks = s.filterByDisableTags(enabledChecks, s.DisabledTags) - } - - // EnabledChecks - if len(s.EnabledChecks) != 0 { - debugChecksListf(s.EnabledChecks, "Enabled by config") - - alreadyEnabledChecksSet := stringsSliceToSet(enabledChecks) - for _, enabledCheck := range s.EnabledChecks { - if alreadyEnabledChecksSet[enabledCheck] { - s.logger.Warnf("%s: no need to enable check %q: it's already enabled", goCriticName, enabledCheck) - continue - } - enabledChecks = append(enabledChecks, enabledCheck) - } - } - - // DisabledChecks - if len(s.DisabledChecks) != 0 { - debugChecksListf(s.DisabledChecks, "Disabled by config") - - enabledChecksSet := stringsSliceToSet(enabledChecks) - for _, disabledCheck := range s.DisabledChecks { - if !enabledChecksSet[disabledCheck] { - s.logger.Warnf("%s: check %q was explicitly disabled via config. However, as this check "+ - "is disabled by default, there is no need to explicitly disable it via config.", goCriticName, disabledCheck) - continue - } - delete(enabledChecksSet, disabledCheck) - } - - enabledChecks = nil - for enabledCheck := range enabledChecksSet { - enabledChecks = append(enabledChecks, enabledCheck) - } - } - - s.inferredEnabledChecks = map[string]bool{} - for _, check := range enabledChecks { - s.inferredEnabledChecks[strings.ToLower(check)] = true - } - - debugChecksListf(enabledChecks, "Final used") - - s.disabledCheckersDebugf() -} - -func (s *goCriticSettingsWrapper) validate() error { - if len(s.EnabledTags) == 0 { - if len(s.EnabledChecks) != 0 && len(s.DisabledChecks) != 0 { - return errors.New("both enabled and disabled check aren't allowed for gocritic") - } - } else { - if err := validateStringsUniq(s.EnabledTags); err != nil { - return fmt.Errorf("validate enabled tags: %w", err) - } - - tagToCheckers := s.buildTagToCheckersMap() - - for _, tag := range s.EnabledTags { - if _, ok := tagToCheckers[tag]; !ok { - return fmt.Errorf("gocritic [enabled]tag %q doesn't exist", tag) - } - } - } - - if len(s.DisabledTags) > 0 { - tagToCheckers := s.buildTagToCheckersMap() - for _, tag := range s.EnabledTags { - if _, ok := tagToCheckers[tag]; !ok { - return fmt.Errorf("gocritic [disabled]tag %q doesn't exist", tag) - } - } - } - - if err := validateStringsUniq(s.EnabledChecks); err != nil { - return fmt.Errorf("validate enabled checks: %w", err) - } - - if err := validateStringsUniq(s.DisabledChecks); err != nil { - return fmt.Errorf("validate disabled checks: %w", err) - } - - if err := s.validateCheckerNames(); err != nil { - return fmt.Errorf("validation failed: %w", err) - } - - return nil -} - -func (s *goCriticSettingsWrapper) isCheckEnabled(name string) bool { - return s.inferredEnabledChecks[strings.ToLower(name)] -} - -// getAllCheckerNames returns a map containing all checker names supported by gocritic. -func (s *goCriticSettingsWrapper) getAllCheckerNames() map[string]bool { - allCheckerNames := make(map[string]bool, len(s.allCheckers)) - - for _, checker := range s.allCheckers { - allCheckerNames[strings.ToLower(checker.Name)] = true - } - - return allCheckerNames -} - -func (s *goCriticSettingsWrapper) getDefaultEnabledCheckersNames() []string { - var enabled []string - - for _, info := range s.allCheckers { - enable := s.isEnabledByDefaultCheck(info) - if enable { - enabled = append(enabled, info.Name) - } - } - - return enabled -} - -func (s *goCriticSettingsWrapper) getDefaultDisabledCheckersNames() []string { - var disabled []string - - for _, info := range s.allCheckers { - enable := s.isEnabledByDefaultCheck(info) - if !enable { - disabled = append(disabled, info.Name) - } - } - - return disabled -} - -func (s *goCriticSettingsWrapper) validateCheckerNames() error { - allowedNames := s.getAllCheckerNames() - - for _, name := range s.EnabledChecks { - if !allowedNames[strings.ToLower(name)] { - return fmt.Errorf("enabled checker %s doesn't exist, all existing checkers: %s", - name, sprintAllowedCheckerNames(allowedNames)) - } - } - - for _, name := range s.DisabledChecks { - if !allowedNames[strings.ToLower(name)] { - return fmt.Errorf("disabled checker %s doesn't exist, all existing checkers: %s", - name, sprintAllowedCheckerNames(allowedNames)) - } - } - - for checkName := range s.SettingsPerCheck { - if _, ok := allowedNames[checkName]; !ok { - return fmt.Errorf("invalid setting, checker %s doesn't exist, all existing checkers: %s", - checkName, sprintAllowedCheckerNames(allowedNames)) - } - - if !s.isCheckEnabled(checkName) { - s.logger.Warnf("%s: settings were provided for not enabled check %q", goCriticName, checkName) - } - } - - return nil -} - -func (s *goCriticSettingsWrapper) getLowerCasedParams() map[string]config.GoCriticCheckSettings { - ret := make(map[string]config.GoCriticCheckSettings, len(s.SettingsPerCheck)) - - for checker, params := range s.SettingsPerCheck { - ret[strings.ToLower(checker)] = params - } - - return ret -} - -func (s *goCriticSettingsWrapper) filterByDisableTags(enabledChecks, disableTags []string) []string { - enabledChecksSet := stringsSliceToSet(enabledChecks) - - for _, enabledCheck := range enabledChecks { - checkInfo, checkInfoExists := s.allCheckerMap[enabledCheck] - if !checkInfoExists { - s.logger.Warnf("%s: check %q was not exists via filtering disabled tags", goCriticName, enabledCheck) - continue - } - - hitTags := intersectStringSlice(checkInfo.Tags, disableTags) - if len(hitTags) != 0 { - delete(enabledChecksSet, enabledCheck) - } - } - - debugChecksListf(enabledChecks, "Disabled by config tags %s", sprintStrings(disableTags)) - - enabledChecks = nil - for enabledCheck := range enabledChecksSet { - enabledChecks = append(enabledChecks, enabledCheck) - } - - return enabledChecks -} - -func (s *goCriticSettingsWrapper) isEnabledByDefaultCheck(info *gocriticlinter.CheckerInfo) bool { - return !info.HasTag("experimental") && - !info.HasTag("opinionated") && - !info.HasTag("performance") -} - -func validateStringsUniq(ss []string) error { - set := map[string]bool{} - - for _, s := range ss { - _, ok := set[s] - if ok { - return fmt.Errorf("%q occurs multiple times in list", s) - } - set[s] = true - } - - return nil -} - -func intersectStringSlice(s1, s2 []string) []string { - s1Map := make(map[string]struct{}, len(s1)) - - for _, s := range s1 { - s1Map[s] = struct{}{} - } - - results := make([]string, 0) - for _, s := range s2 { - if _, exists := s1Map[s]; exists { - results = append(results, s) - } - } - - return results -} - -func sprintAllowedCheckerNames(allowedNames map[string]bool) string { - namesSlice := maps.Keys(allowedNames) - return sprintStrings(namesSlice) -} - -func sprintStrings(ss []string) string { - sort.Strings(ss) - return fmt.Sprint(ss) -} - -func debugChecksListf(checks []string, format string, args ...any) { - if !isGoCriticDebug { - return - } - - goCriticDebugf("%s checks (%d): %s", fmt.Sprintf(format, args...), len(checks), sprintStrings(checks)) -} - -func stringsSliceToSet(ss []string) map[string]bool { - ret := make(map[string]bool, len(ss)) - for _, s := range ss { - ret[s] = true - } - - return ret -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gocyclo.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gocyclo.go deleted file mode 100644 index b502623ba..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gocyclo.go +++ /dev/null @@ -1,76 +0,0 @@ -package golinters - -import ( - "fmt" - "sync" - - "github.com/fzipp/gocyclo" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" - "github.com/golangci/golangci-lint/pkg/lint/linter" - "github.com/golangci/golangci-lint/pkg/result" -) - -const gocycloName = "gocyclo" - -//nolint:dupl -func NewGocyclo(settings *config.GoCycloSettings) *goanalysis.Linter { - var mu sync.Mutex - var resIssues []goanalysis.Issue - - analyzer := &analysis.Analyzer{ - Name: gocycloName, - Doc: goanalysis.TheOnlyanalyzerDoc, - Run: func(pass *analysis.Pass) (any, error) { - issues := runGoCyclo(pass, settings) - - if len(issues) == 0 { - return nil, nil - } - - mu.Lock() - resIssues = append(resIssues, issues...) - mu.Unlock() - - return nil, nil - }, - } - - return goanalysis.NewLinter( - gocycloName, - "Computes and checks the cyclomatic complexity of functions", - []*analysis.Analyzer{analyzer}, - nil, - ).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { - return resIssues - }).WithLoadMode(goanalysis.LoadModeSyntax) -} - -func runGoCyclo(pass *analysis.Pass, settings *config.GoCycloSettings) []goanalysis.Issue { - var stats gocyclo.Stats - for _, f := range pass.Files { - stats = gocyclo.AnalyzeASTFile(f, pass.Fset, stats) - } - if len(stats) == 0 { - return nil - } - - stats = stats.SortAndFilter(-1, settings.MinComplexity) - - issues := make([]goanalysis.Issue, 0, len(stats)) - - for _, s := range stats { - text := fmt.Sprintf("cyclomatic complexity %d of func %s is high (> %d)", - s.Complexity, formatCode(s.FuncName, nil), settings.MinComplexity) - - issues = append(issues, goanalysis.NewIssue(&result.Issue{ - Pos: s.Pos, - Text: text, - FromLinter: gocycloName, - }, pass)) - } - - return issues -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/godot.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/godot.go deleted file mode 100644 index b0ee64434..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/godot.go +++ /dev/null @@ -1,102 +0,0 @@ -package golinters - -import ( - "sync" - - "github.com/tetafro/godot" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" - "github.com/golangci/golangci-lint/pkg/lint/linter" - "github.com/golangci/golangci-lint/pkg/result" -) - -const godotName = "godot" - -func NewGodot(settings *config.GodotSettings) *goanalysis.Linter { - var mu sync.Mutex - var resIssues []goanalysis.Issue - - var dotSettings godot.Settings - - if settings != nil { - dotSettings = godot.Settings{ - Scope: godot.Scope(settings.Scope), - Exclude: settings.Exclude, - Period: settings.Period, - Capital: settings.Capital, - } - - // Convert deprecated setting - // todo(butuzov): remove on v2 release - if settings.CheckAll { //nolint:staticcheck // Keep for retro-compatibility. - dotSettings.Scope = godot.AllScope - } - } - - if dotSettings.Scope == "" { - dotSettings.Scope = godot.DeclScope - } - - analyzer := &analysis.Analyzer{ - Name: godotName, - Doc: goanalysis.TheOnlyanalyzerDoc, - Run: func(pass *analysis.Pass) (any, error) { - issues, err := runGodot(pass, dotSettings) - if err != nil { - return nil, err - } - - if len(issues) == 0 { - return nil, nil - } - - mu.Lock() - resIssues = append(resIssues, issues...) - mu.Unlock() - - return nil, nil - }, - } - - return goanalysis.NewLinter( - godotName, - "Check if comments end in a period", - []*analysis.Analyzer{analyzer}, - nil, - ).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { - return resIssues - }).WithLoadMode(goanalysis.LoadModeSyntax) -} - -func runGodot(pass *analysis.Pass, settings godot.Settings) ([]goanalysis.Issue, error) { - var lintIssues []godot.Issue - for _, file := range pass.Files { - iss, err := godot.Run(file, pass.Fset, settings) - if err != nil { - return nil, err - } - lintIssues = append(lintIssues, iss...) - } - - if len(lintIssues) == 0 { - return nil, nil - } - - issues := make([]goanalysis.Issue, len(lintIssues)) - for k, i := range lintIssues { - issue := result.Issue{ - Pos: i.Pos, - Text: i.Message, - FromLinter: godotName, - Replacement: &result.Replacement{ - NewLines: []string{i.Replacement}, - }, - } - - issues[k] = goanalysis.NewIssue(&issue, pass) - } - - return issues, nil -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/godox.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/godox.go deleted file mode 100644 index 955810417..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/godox.go +++ /dev/null @@ -1,76 +0,0 @@ -package golinters - -import ( - "go/token" - "strings" - "sync" - - "github.com/matoous/godox" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" - "github.com/golangci/golangci-lint/pkg/lint/linter" - "github.com/golangci/golangci-lint/pkg/result" -) - -const godoxName = "godox" - -//nolint:dupl -func NewGodox(settings *config.GodoxSettings) *goanalysis.Linter { - var mu sync.Mutex - var resIssues []goanalysis.Issue - - analyzer := &analysis.Analyzer{ - Name: godoxName, - Doc: goanalysis.TheOnlyanalyzerDoc, - Run: func(pass *analysis.Pass) (any, error) { - issues := runGodox(pass, settings) - - if len(issues) == 0 { - return nil, nil - } - - mu.Lock() - resIssues = append(resIssues, issues...) - mu.Unlock() - - return nil, nil - }, - } - - return goanalysis.NewLinter( - godoxName, - "Tool for detection of FIXME, TODO and other comment keywords", - []*analysis.Analyzer{analyzer}, - nil, - ).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { - return resIssues - }).WithLoadMode(goanalysis.LoadModeSyntax) -} - -func runGodox(pass *analysis.Pass, settings *config.GodoxSettings) []goanalysis.Issue { - var messages []godox.Message - for _, file := range pass.Files { - messages = append(messages, godox.Run(file, pass.Fset, settings.Keywords...)...) - } - - if len(messages) == 0 { - return nil - } - - issues := make([]goanalysis.Issue, len(messages)) - - for k, i := range messages { - issues[k] = goanalysis.NewIssue(&result.Issue{ - Pos: token.Position{ - Filename: i.Pos.Filename, - Line: i.Pos.Line, - }, - Text: strings.TrimRight(i.Message, "\n"), - FromLinter: godoxName, - }, pass) - } - - return issues -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goerr113.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goerr113.go deleted file mode 100644 index 10addc57c..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goerr113.go +++ /dev/null @@ -1,17 +0,0 @@ -package golinters - -import ( - "github.com/Djarvur/go-err113" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" -) - -func NewGoerr113() *goanalysis.Linter { - return goanalysis.NewLinter( - "goerr113", - "Go linter to check the errors handling expressions", - []*analysis.Analyzer{err113.NewAnalyzer()}, - nil, - ).WithLoadMode(goanalysis.LoadModeTypesInfo) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gofmt.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gofmt.go deleted file mode 100644 index d2d0d3ccc..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gofmt.go +++ /dev/null @@ -1,85 +0,0 @@ -package golinters - -import ( - "fmt" - "sync" - - gofmtAPI "github.com/golangci/gofmt/gofmt" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" - "github.com/golangci/golangci-lint/pkg/lint/linter" -) - -const gofmtName = "gofmt" - -func NewGofmt(settings *config.GoFmtSettings) *goanalysis.Linter { - var mu sync.Mutex - var resIssues []goanalysis.Issue - - analyzer := &analysis.Analyzer{ - Name: gofmtName, - Doc: goanalysis.TheOnlyanalyzerDoc, - Run: goanalysis.DummyRun, - } - - return goanalysis.NewLinter( - gofmtName, - "Gofmt checks whether code was gofmt-ed. By default "+ - "this tool runs with -s option to check for code simplification", - []*analysis.Analyzer{analyzer}, - nil, - ).WithContextSetter(func(lintCtx *linter.Context) { - analyzer.Run = func(pass *analysis.Pass) (any, error) { - issues, err := runGofmt(lintCtx, pass, settings) - if err != nil { - return nil, err - } - - if len(issues) == 0 { - return nil, nil - } - - mu.Lock() - resIssues = append(resIssues, issues...) - mu.Unlock() - - return nil, nil - } - }).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { - return resIssues - }).WithLoadMode(goanalysis.LoadModeSyntax) -} - -func runGofmt(lintCtx *linter.Context, pass *analysis.Pass, settings *config.GoFmtSettings) ([]goanalysis.Issue, error) { - fileNames := getFileNames(pass) - - var rewriteRules []gofmtAPI.RewriteRule - for _, rule := range settings.RewriteRules { - rewriteRules = append(rewriteRules, gofmtAPI.RewriteRule(rule)) - } - - var issues []goanalysis.Issue - - for _, f := range fileNames { - diff, err := gofmtAPI.RunRewrite(f, settings.Simplify, rewriteRules) - if err != nil { // TODO: skip - return nil, err - } - if diff == nil { - continue - } - - is, err := extractIssuesFromPatch(string(diff), lintCtx, gofmtName) - if err != nil { - return nil, fmt.Errorf("can't extract issues from gofmt diff output %q: %w", string(diff), err) - } - - for i := range is { - issues = append(issues, goanalysis.NewIssue(&is[i], pass)) - } - } - - return issues, nil -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gofmt_common.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gofmt_common.go deleted file mode 100644 index 6b7184d65..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gofmt_common.go +++ /dev/null @@ -1,290 +0,0 @@ -package golinters - -import ( - "bytes" - "fmt" - "go/token" - "strings" - - diffpkg "github.com/sourcegraph/go-diff/diff" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/lint/linter" - "github.com/golangci/golangci-lint/pkg/logutils" - "github.com/golangci/golangci-lint/pkg/result" -) - -type Change struct { - LineRange result.Range - Replacement result.Replacement -} - -type diffLineType string - -const ( - diffLineAdded diffLineType = "added" - diffLineOriginal diffLineType = "original" - diffLineDeleted diffLineType = "deleted" -) - -type diffLine struct { - originalNumber int // 1-based original line number - typ diffLineType - data string // "+" or "-" stripped line -} - -type hunkChangesParser struct { - // needed because we merge currently added lines with the last original line - lastOriginalLine *diffLine - - // if the first line of diff is an adding we save all additions to replacementLinesToPrepend - replacementLinesToPrepend []string - - log logutils.Log - - lines []diffLine - - ret []Change -} - -func (p *hunkChangesParser) parseDiffLines(h *diffpkg.Hunk) { - lines := bytes.Split(h.Body, []byte{'\n'}) - currentOriginalLineNumber := int(h.OrigStartLine) - var ret []diffLine - - for i, line := range lines { - dl := diffLine{ - originalNumber: currentOriginalLineNumber, - } - - lineStr := string(line) - - if strings.HasPrefix(lineStr, "-") { - dl.typ = diffLineDeleted - dl.data = strings.TrimPrefix(lineStr, "-") - currentOriginalLineNumber++ - } else if strings.HasPrefix(lineStr, "+") { - dl.typ = diffLineAdded - dl.data = strings.TrimPrefix(lineStr, "+") - } else { - if i == len(lines)-1 && lineStr == "" { - // handle last \n: don't add an empty original line - break - } - - dl.typ = diffLineOriginal - dl.data = strings.TrimPrefix(lineStr, " ") - currentOriginalLineNumber++ - } - - ret = append(ret, dl) - } - - // if > 0, then the original file had a 'No newline at end of file' mark - if h.OrigNoNewlineAt > 0 { - dl := diffLine{ - originalNumber: currentOriginalLineNumber + 1, - typ: diffLineAdded, - data: "", - } - ret = append(ret, dl) - } - - p.lines = ret -} - -func (p *hunkChangesParser) handleOriginalLine(line diffLine, i *int) { - if len(p.replacementLinesToPrepend) == 0 { - p.lastOriginalLine = &line - *i++ - return - } - - // check following added lines for the case: - // + added line 1 - // original line - // + added line 2 - - *i++ - var followingAddedLines []string - for ; *i < len(p.lines) && p.lines[*i].typ == diffLineAdded; *i++ { - followingAddedLines = append(followingAddedLines, p.lines[*i].data) - } - - p.ret = append(p.ret, Change{ - LineRange: result.Range{ - From: line.originalNumber, - To: line.originalNumber, - }, - Replacement: result.Replacement{ - NewLines: append(p.replacementLinesToPrepend, append([]string{line.data}, followingAddedLines...)...), - }, - }) - p.replacementLinesToPrepend = nil - p.lastOriginalLine = &line -} - -func (p *hunkChangesParser) handleDeletedLines(deletedLines []diffLine, addedLines []string) { - change := Change{ - LineRange: result.Range{ - From: deletedLines[0].originalNumber, - To: deletedLines[len(deletedLines)-1].originalNumber, - }, - } - - if len(addedLines) != 0 { - //nolint:gocritic - change.Replacement.NewLines = append(p.replacementLinesToPrepend, addedLines...) - if len(p.replacementLinesToPrepend) != 0 { - p.replacementLinesToPrepend = nil - } - - p.ret = append(p.ret, change) - return - } - - // delete-only change with possible prepending - if len(p.replacementLinesToPrepend) != 0 { - change.Replacement.NewLines = p.replacementLinesToPrepend - p.replacementLinesToPrepend = nil - } else { - change.Replacement.NeedOnlyDelete = true - } - - p.ret = append(p.ret, change) -} - -func (p *hunkChangesParser) handleAddedOnlyLines(addedLines []string) { - if p.lastOriginalLine == nil { - // the first line is added; the diff looks like: - // 1. + ... - // 2. - ... - // or - // 1. + ... - // 2. ... - - p.replacementLinesToPrepend = addedLines - return - } - - // add-only change merged into the last original line with possible prepending - p.ret = append(p.ret, Change{ - LineRange: result.Range{ - From: p.lastOriginalLine.originalNumber, - To: p.lastOriginalLine.originalNumber, - }, - Replacement: result.Replacement{ - NewLines: append(p.replacementLinesToPrepend, append([]string{p.lastOriginalLine.data}, addedLines...)...), - }, - }) - p.replacementLinesToPrepend = nil -} - -func (p *hunkChangesParser) parse(h *diffpkg.Hunk) []Change { - p.parseDiffLines(h) - - for i := 0; i < len(p.lines); { - line := p.lines[i] - if line.typ == diffLineOriginal { - p.handleOriginalLine(line, &i) - continue - } - - var deletedLines []diffLine - for ; i < len(p.lines) && p.lines[i].typ == diffLineDeleted; i++ { - deletedLines = append(deletedLines, p.lines[i]) - } - - var addedLines []string - for ; i < len(p.lines) && p.lines[i].typ == diffLineAdded; i++ { - addedLines = append(addedLines, p.lines[i].data) - } - - if len(deletedLines) != 0 { - p.handleDeletedLines(deletedLines, addedLines) - continue - } - - // no deletions, only additions - p.handleAddedOnlyLines(addedLines) - } - - if len(p.replacementLinesToPrepend) != 0 { - p.log.Infof("The diff contains only additions: no original or deleted lines: %#v", p.lines) - return nil - } - - return p.ret -} - -func getErrorTextForLinter(settings *config.LintersSettings, linterName string) string { - text := "File is not formatted" - switch linterName { - case gciName: - text = getErrorTextForGci(settings.Gci) - case gofumptName: - text = "File is not `gofumpt`-ed" - if settings.Gofumpt.ExtraRules { - text += " with `-extra`" - } - case gofmtName: - text = "File is not `gofmt`-ed" - if settings.Gofmt.Simplify { - text += " with `-s`" - } - for _, rule := range settings.Gofmt.RewriteRules { - text += fmt.Sprintf(" `-r '%s -> %s'`", rule.Pattern, rule.Replacement) - } - case goimportsName: - text = "File is not `goimports`-ed" - if settings.Goimports.LocalPrefixes != "" { - text += " with -local " + settings.Goimports.LocalPrefixes - } - } - return text -} - -func extractIssuesFromPatch(patch string, lintCtx *linter.Context, linterName string) ([]result.Issue, error) { - diffs, err := diffpkg.ParseMultiFileDiff([]byte(patch)) - if err != nil { - return nil, fmt.Errorf("can't parse patch: %w", err) - } - - if len(diffs) == 0 { - return nil, fmt.Errorf("got no diffs from patch parser: %v", patch) - } - - var issues []result.Issue - for _, d := range diffs { - if len(d.Hunks) == 0 { - lintCtx.Log.Warnf("Got no hunks in diff %+v", d) - continue - } - - for _, hunk := range d.Hunks { - p := hunkChangesParser{log: lintCtx.Log} - - changes := p.parse(hunk) - - for _, change := range changes { - change := change // fix scope - i := result.Issue{ - FromLinter: linterName, - Pos: token.Position{ - Filename: d.NewName, - Line: change.LineRange.From, - }, - Text: getErrorTextForLinter(lintCtx.Settings(), linterName), - Replacement: &change.Replacement, - } - if change.LineRange.From != change.LineRange.To { - i.LineRange = &change.LineRange - } - - issues = append(issues, i) - } - } - } - - return issues, nil -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gofumpt.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gofumpt.go deleted file mode 100644 index c2aaf121d..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gofumpt.go +++ /dev/null @@ -1,119 +0,0 @@ -package golinters - -import ( - "bytes" - "fmt" - "io" - "os" - "sync" - - "github.com/shazow/go-diff/difflib" - "golang.org/x/tools/go/analysis" - "mvdan.cc/gofumpt/format" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" - "github.com/golangci/golangci-lint/pkg/lint/linter" -) - -const gofumptName = "gofumpt" - -type differ interface { - Diff(out io.Writer, a io.ReadSeeker, b io.ReadSeeker) error -} - -func NewGofumpt(settings *config.GofumptSettings) *goanalysis.Linter { - var mu sync.Mutex - var resIssues []goanalysis.Issue - - diff := difflib.New() - - var options format.Options - - if settings != nil { - options = format.Options{ - LangVersion: getLangVersion(settings), - ModulePath: settings.ModulePath, - ExtraRules: settings.ExtraRules, - } - } - - analyzer := &analysis.Analyzer{ - Name: gofumptName, - Doc: goanalysis.TheOnlyanalyzerDoc, - Run: goanalysis.DummyRun, - } - - return goanalysis.NewLinter( - gofumptName, - "Gofumpt checks whether code was gofumpt-ed.", - []*analysis.Analyzer{analyzer}, - nil, - ).WithContextSetter(func(lintCtx *linter.Context) { - analyzer.Run = func(pass *analysis.Pass) (any, error) { - issues, err := runGofumpt(lintCtx, pass, diff, options) - if err != nil { - return nil, err - } - - if len(issues) == 0 { - return nil, nil - } - - mu.Lock() - resIssues = append(resIssues, issues...) - mu.Unlock() - - return nil, nil - } - }).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { - return resIssues - }).WithLoadMode(goanalysis.LoadModeSyntax) -} - -func runGofumpt(lintCtx *linter.Context, pass *analysis.Pass, diff differ, options format.Options) ([]goanalysis.Issue, error) { - fileNames := getFileNames(pass) - - var issues []goanalysis.Issue - - for _, f := range fileNames { - input, err := os.ReadFile(f) - if err != nil { - return nil, fmt.Errorf("unable to open file %s: %w", f, err) - } - - output, err := format.Source(input, options) - if err != nil { - return nil, fmt.Errorf("error while running gofumpt: %w", err) - } - - if !bytes.Equal(input, output) { - out := bytes.NewBufferString(fmt.Sprintf("--- %[1]s\n+++ %[1]s\n", f)) - - err := diff.Diff(out, bytes.NewReader(input), bytes.NewReader(output)) - if err != nil { - return nil, fmt.Errorf("error while running gofumpt: %w", err) - } - - diff := out.String() - is, err := extractIssuesFromPatch(diff, lintCtx, gofumptName) - if err != nil { - return nil, fmt.Errorf("can't extract issues from gofumpt diff output %q: %w", diff, err) - } - - for i := range is { - issues = append(issues, goanalysis.NewIssue(&is[i], pass)) - } - } - } - - return issues, nil -} - -func getLangVersion(settings *config.GofumptSettings) string { - if settings == nil || settings.LangVersion == "" { - // TODO: defaults to "1.15", in the future (v2) must be set by using build.Default.ReleaseTags like staticcheck. - return "1.15" - } - return settings.LangVersion -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goheader.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goheader.go deleted file mode 100644 index d3cfefa90..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goheader.go +++ /dev/null @@ -1,104 +0,0 @@ -package golinters - -import ( - "go/token" - "sync" - - goheader "github.com/denis-tingaikin/go-header" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" - "github.com/golangci/golangci-lint/pkg/lint/linter" - "github.com/golangci/golangci-lint/pkg/result" -) - -const goHeaderName = "goheader" - -func NewGoHeader(settings *config.GoHeaderSettings) *goanalysis.Linter { - var mu sync.Mutex - var resIssues []goanalysis.Issue - - conf := &goheader.Configuration{} - if settings != nil { - conf = &goheader.Configuration{ - Values: settings.Values, - Template: settings.Template, - TemplatePath: settings.TemplatePath, - } - } - - analyzer := &analysis.Analyzer{ - Name: goHeaderName, - Doc: goanalysis.TheOnlyanalyzerDoc, - Run: func(pass *analysis.Pass) (any, error) { - issues, err := runGoHeader(pass, conf) - if err != nil { - return nil, err - } - - if len(issues) == 0 { - return nil, nil - } - - mu.Lock() - resIssues = append(resIssues, issues...) - mu.Unlock() - - return nil, nil - }, - } - - return goanalysis.NewLinter( - goHeaderName, - "Checks is file header matches to pattern", - []*analysis.Analyzer{analyzer}, - nil, - ).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { - return resIssues - }).WithLoadMode(goanalysis.LoadModeSyntax) -} - -func runGoHeader(pass *analysis.Pass, conf *goheader.Configuration) ([]goanalysis.Issue, error) { - if conf.TemplatePath == "" && conf.Template == "" { - // User did not pass template, so then do not run go-header linter - return nil, nil - } - - template, err := conf.GetTemplate() - if err != nil { - return nil, err - } - - values, err := conf.GetValues() - if err != nil { - return nil, err - } - - a := goheader.New(goheader.WithTemplate(template), goheader.WithValues(values)) - - var issues []goanalysis.Issue - for _, file := range pass.Files { - path := pass.Fset.Position(file.Pos()).Filename - - i := a.Analyze(&goheader.Target{File: file, Path: path}) - - if i == nil { - continue - } - - issue := result.Issue{ - Pos: token.Position{ - Line: i.Location().Line + 1, - Column: i.Location().Position, - Filename: path, - }, - Text: i.Message(), - FromLinter: goHeaderName, - } - - issues = append(issues, goanalysis.NewIssue(&issue, pass)) - } - - return issues, nil -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goimports.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goimports.go deleted file mode 100644 index aac27f38e..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goimports.go +++ /dev/null @@ -1,83 +0,0 @@ -package golinters - -import ( - "fmt" - "sync" - - goimportsAPI "github.com/golangci/gofmt/goimports" - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/imports" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" - "github.com/golangci/golangci-lint/pkg/lint/linter" -) - -const goimportsName = "goimports" - -func NewGoimports(settings *config.GoImportsSettings) *goanalysis.Linter { - var mu sync.Mutex - var resIssues []goanalysis.Issue - - analyzer := &analysis.Analyzer{ - Name: goimportsName, - Doc: goanalysis.TheOnlyanalyzerDoc, - Run: goanalysis.DummyRun, - } - - return goanalysis.NewLinter( - goimportsName, - "Check import statements are formatted according to the 'goimport' command. "+ - "Reformat imports in autofix mode.", - []*analysis.Analyzer{analyzer}, - nil, - ).WithContextSetter(func(lintCtx *linter.Context) { - imports.LocalPrefix = settings.LocalPrefixes - - analyzer.Run = func(pass *analysis.Pass) (any, error) { - issues, err := runGoImports(lintCtx, pass) - if err != nil { - return nil, err - } - - if len(issues) == 0 { - return nil, nil - } - - mu.Lock() - resIssues = append(resIssues, issues...) - mu.Unlock() - - return nil, nil - } - }).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { - return resIssues - }).WithLoadMode(goanalysis.LoadModeSyntax) -} - -func runGoImports(lintCtx *linter.Context, pass *analysis.Pass) ([]goanalysis.Issue, error) { - fileNames := getFileNames(pass) - - var issues []goanalysis.Issue - - for _, f := range fileNames { - diff, err := goimportsAPI.Run(f) - if err != nil { // TODO: skip - return nil, err - } - if diff == nil { - continue - } - - is, err := extractIssuesFromPatch(string(diff), lintCtx, goimportsName) - if err != nil { - return nil, fmt.Errorf("can't extract issues from gofmt diff output %q: %w", string(diff), err) - } - - for i := range is { - issues = append(issues, goanalysis.NewIssue(&is[i], pass)) - } - } - - return issues, nil -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/golint.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/golint.go deleted file mode 100644 index 22ca59048..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/golint.go +++ /dev/null @@ -1,84 +0,0 @@ -package golinters - -import ( - "fmt" - "sync" - - lintAPI "github.com/golangci/lint-1" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" - "github.com/golangci/golangci-lint/pkg/lint/linter" - "github.com/golangci/golangci-lint/pkg/result" -) - -const golintName = "golint" - -//nolint:dupl -func NewGolint(settings *config.GoLintSettings) *goanalysis.Linter { - var mu sync.Mutex - var resIssues []goanalysis.Issue - - analyzer := &analysis.Analyzer{ - Name: golintName, - Doc: goanalysis.TheOnlyanalyzerDoc, - Run: func(pass *analysis.Pass) (any, error) { - issues, err := runGoLint(pass, settings) - if err != nil { - return nil, err - } - - if len(issues) == 0 { - return nil, nil - } - - mu.Lock() - resIssues = append(resIssues, issues...) - mu.Unlock() - - return nil, nil - }, - } - - return goanalysis.NewLinter( - golintName, - "Golint differs from gofmt. Gofmt reformats Go source code, whereas golint prints out style mistakes", - []*analysis.Analyzer{analyzer}, - nil, - ).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { - return resIssues - }).WithLoadMode(goanalysis.LoadModeTypesInfo) -} - -func runGoLint(pass *analysis.Pass, settings *config.GoLintSettings) ([]goanalysis.Issue, error) { - l := new(lintAPI.Linter) - - ps, err := l.LintPkg(pass.Files, pass.Fset, pass.Pkg, pass.TypesInfo) - if err != nil { - return nil, fmt.Errorf("can't lint %d files: %w", len(pass.Files), err) - } - - if len(ps) == 0 { - return nil, nil - } - - lintIssues := make([]*result.Issue, 0, len(ps)) // This is worst case - for idx := range ps { - if ps[idx].Confidence >= settings.MinConfidence { - lintIssues = append(lintIssues, &result.Issue{ - Pos: ps[idx].Position, - Text: ps[idx].Text, - FromLinter: golintName, - }) - // TODO: use p.Link and p.Category - } - } - - issues := make([]goanalysis.Issue, 0, len(lintIssues)) - for _, issue := range lintIssues { - issues = append(issues, goanalysis.NewIssue(issue, pass)) - } - - return issues, nil -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gomnd.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gomnd.go deleted file mode 100644 index 2e6d77a80..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gomnd.go +++ /dev/null @@ -1,45 +0,0 @@ -package golinters - -import ( - mnd "github.com/tommy-muehle/go-mnd/v2" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" -) - -func NewGoMND(settings *config.GoMndSettings) *goanalysis.Linter { - var linterCfg map[string]map[string]any - - if settings != nil { - // TODO(ldez) For compatibility only, must be drop in v2. - if len(settings.Settings) > 0 { - linterCfg = settings.Settings - } else { - cfg := make(map[string]any) - if len(settings.Checks) > 0 { - cfg["checks"] = settings.Checks - } - if len(settings.IgnoredNumbers) > 0 { - cfg["ignored-numbers"] = settings.IgnoredNumbers - } - if len(settings.IgnoredFiles) > 0 { - cfg["ignored-files"] = settings.IgnoredFiles - } - if len(settings.IgnoredFunctions) > 0 { - cfg["ignored-functions"] = settings.IgnoredFunctions - } - - linterCfg = map[string]map[string]any{ - "mnd": cfg, - } - } - } - - return goanalysis.NewLinter( - "gomnd", - "An analyzer to detect magic numbers.", - []*analysis.Analyzer{mnd.Analyzer}, - linterCfg, - ).WithLoadMode(goanalysis.LoadModeSyntax) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gomoddirectives.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gomoddirectives.go deleted file mode 100644 index 56afcd465..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gomoddirectives.go +++ /dev/null @@ -1,65 +0,0 @@ -package golinters - -import ( - "sync" - - "github.com/ldez/gomoddirectives" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" - "github.com/golangci/golangci-lint/pkg/lint/linter" - "github.com/golangci/golangci-lint/pkg/result" -) - -const goModDirectivesName = "gomoddirectives" - -// NewGoModDirectives returns a new gomoddirectives linter. -func NewGoModDirectives(settings *config.GoModDirectivesSettings) *goanalysis.Linter { - var issues []goanalysis.Issue - var once sync.Once - - var opts gomoddirectives.Options - if settings != nil { - opts.ReplaceAllowLocal = settings.ReplaceLocal - opts.ReplaceAllowList = settings.ReplaceAllowList - opts.RetractAllowNoExplanation = settings.RetractAllowNoExplanation - opts.ExcludeForbidden = settings.ExcludeForbidden - } - - analyzer := &analysis.Analyzer{ - Name: goanalysis.TheOnlyAnalyzerName, - Doc: goanalysis.TheOnlyanalyzerDoc, - Run: goanalysis.DummyRun, - } - - return goanalysis.NewLinter( - goModDirectivesName, - "Manage the use of 'replace', 'retract', and 'excludes' directives in go.mod.", - []*analysis.Analyzer{analyzer}, - nil, - ).WithContextSetter(func(lintCtx *linter.Context) { - analyzer.Run = func(pass *analysis.Pass) (any, error) { - once.Do(func() { - results, err := gomoddirectives.Analyze(opts) - if err != nil { - lintCtx.Log.Warnf("running %s failed: %s: "+ - "if you are not using go modules it is suggested to disable this linter", goModDirectivesName, err) - return - } - - for _, p := range results { - issues = append(issues, goanalysis.NewIssue(&result.Issue{ - FromLinter: goModDirectivesName, - Pos: p.Start, - Text: p.Reason, - }, pass)) - } - }) - - return nil, nil - } - }).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { - return issues - }).WithLoadMode(goanalysis.LoadModeSyntax) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gomodguard.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gomodguard.go deleted file mode 100644 index 157bf56c3..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gomodguard.go +++ /dev/null @@ -1,94 +0,0 @@ -package golinters - -import ( - "sync" - - "github.com/ryancurrah/gomodguard" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" - "github.com/golangci/golangci-lint/pkg/lint/linter" - "github.com/golangci/golangci-lint/pkg/result" -) - -const ( - gomodguardName = "gomodguard" - gomodguardDesc = "Allow and block list linter for direct Go module dependencies. " + - "This is different from depguard where there are different block " + - "types for example version constraints and module recommendations." -) - -// NewGomodguard returns a new Gomodguard linter. -func NewGomodguard(settings *config.GoModGuardSettings) *goanalysis.Linter { - var issues []goanalysis.Issue - var mu sync.Mutex - - processorCfg := &gomodguard.Configuration{} - if settings != nil { - processorCfg.Allowed.Modules = settings.Allowed.Modules - processorCfg.Allowed.Domains = settings.Allowed.Domains - processorCfg.Blocked.LocalReplaceDirectives = settings.Blocked.LocalReplaceDirectives - - for n := range settings.Blocked.Modules { - for k, v := range settings.Blocked.Modules[n] { - m := map[string]gomodguard.BlockedModule{k: { - Recommendations: v.Recommendations, - Reason: v.Reason, - }} - processorCfg.Blocked.Modules = append(processorCfg.Blocked.Modules, m) - break - } - } - - for n := range settings.Blocked.Versions { - for k, v := range settings.Blocked.Versions[n] { - m := map[string]gomodguard.BlockedVersion{k: { - Version: v.Version, - Reason: v.Reason, - }} - processorCfg.Blocked.Versions = append(processorCfg.Blocked.Versions, m) - break - } - } - } - - analyzer := &analysis.Analyzer{ - Name: goanalysis.TheOnlyAnalyzerName, - Doc: goanalysis.TheOnlyanalyzerDoc, - Run: goanalysis.DummyRun, - } - - return goanalysis.NewLinter( - gomodguardName, - gomodguardDesc, - []*analysis.Analyzer{analyzer}, - nil, - ).WithContextSetter(func(lintCtx *linter.Context) { - processor, err := gomodguard.NewProcessor(processorCfg) - if err != nil { - lintCtx.Log.Warnf("running gomodguard failed: %s: if you are not using go modules "+ - "it is suggested to disable this linter", err) - return - } - - analyzer.Run = func(pass *analysis.Pass) (any, error) { - gomodguardIssues := processor.ProcessFiles(getFileNames(pass)) - - mu.Lock() - defer mu.Unlock() - - for _, gomodguardIssue := range gomodguardIssues { - issues = append(issues, goanalysis.NewIssue(&result.Issue{ - FromLinter: gomodguardName, - Pos: gomodguardIssue.Position, - Text: gomodguardIssue.Reason, - }, pass)) - } - - return nil, nil - } - }).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { - return issues - }).WithLoadMode(goanalysis.LoadModeSyntax) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goprintffuncname.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goprintffuncname.go deleted file mode 100644 index e513718ba..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goprintffuncname.go +++ /dev/null @@ -1,19 +0,0 @@ -package golinters - -import ( - "github.com/jirfag/go-printf-func-name/pkg/analyzer" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" -) - -func NewGoPrintfFuncName() *goanalysis.Linter { - a := analyzer.Analyzer - - return goanalysis.NewLinter( - a.Name, - a.Doc, - []*analysis.Analyzer{a}, - nil, - ).WithLoadMode(goanalysis.LoadModeSyntax) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gosec.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gosec.go deleted file mode 100644 index 235f0e914..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gosec.go +++ /dev/null @@ -1,205 +0,0 @@ -package golinters - -import ( - "fmt" - "go/token" - "io" - "log" - "strconv" - "strings" - "sync" - - "github.com/securego/gosec/v2" - "github.com/securego/gosec/v2/issue" - "github.com/securego/gosec/v2/rules" - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/packages" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" - "github.com/golangci/golangci-lint/pkg/lint/linter" - "github.com/golangci/golangci-lint/pkg/result" -) - -const gosecName = "gosec" - -func NewGosec(settings *config.GoSecSettings) *goanalysis.Linter { - var mu sync.Mutex - var resIssues []goanalysis.Issue - - var filters []rules.RuleFilter - conf := gosec.NewConfig() - if settings != nil { - filters = gosecRuleFilters(settings.Includes, settings.Excludes) - conf = toGosecConfig(settings) - } - - logger := log.New(io.Discard, "", 0) - - ruleDefinitions := rules.Generate(false, filters...) - - analyzer := &analysis.Analyzer{ - Name: gosecName, - Doc: goanalysis.TheOnlyanalyzerDoc, - Run: goanalysis.DummyRun, - } - - return goanalysis.NewLinter( - gosecName, - "Inspects source code for security problems", - []*analysis.Analyzer{analyzer}, - nil, - ).WithContextSetter(func(lintCtx *linter.Context) { - analyzer.Run = func(pass *analysis.Pass) (any, error) { - // The `gosecAnalyzer` is here because of concurrency issue. - gosecAnalyzer := gosec.NewAnalyzer(conf, true, settings.ExcludeGenerated, false, settings.Concurrency, logger) - gosecAnalyzer.LoadRules(ruleDefinitions.RulesInfo()) - - issues := runGoSec(lintCtx, pass, settings, gosecAnalyzer) - - mu.Lock() - resIssues = append(resIssues, issues...) - mu.Unlock() - - return nil, nil - } - }).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { - return resIssues - }).WithLoadMode(goanalysis.LoadModeTypesInfo) -} - -func runGoSec(lintCtx *linter.Context, pass *analysis.Pass, settings *config.GoSecSettings, analyzer *gosec.Analyzer) []goanalysis.Issue { - pkg := &packages.Package{ - Fset: pass.Fset, - Syntax: pass.Files, - Types: pass.Pkg, - TypesInfo: pass.TypesInfo, - } - - analyzer.CheckRules(pkg) - - secIssues, _, _ := analyzer.Report() - if len(secIssues) == 0 { - return nil - } - - severity, err := convertToScore(settings.Severity) - if err != nil { - lintCtx.Log.Warnf("The provided severity %v", err) - } - - confidence, err := convertToScore(settings.Confidence) - if err != nil { - lintCtx.Log.Warnf("The provided confidence %v", err) - } - - secIssues = filterIssues(secIssues, severity, confidence) - - issues := make([]goanalysis.Issue, 0, len(secIssues)) - for _, i := range secIssues { - text := fmt.Sprintf("%s: %s", i.RuleID, i.What) // TODO: use severity and confidence - - var r *result.Range - - line, err := strconv.Atoi(i.Line) - if err != nil { - r = &result.Range{} - if n, rerr := fmt.Sscanf(i.Line, "%d-%d", &r.From, &r.To); rerr != nil || n != 2 { - lintCtx.Log.Warnf("Can't convert gosec line number %q of %v to int: %s", i.Line, i, err) - continue - } - line = r.From - } - - column, err := strconv.Atoi(i.Col) - if err != nil { - lintCtx.Log.Warnf("Can't convert gosec column number %q of %v to int: %s", i.Col, i, err) - continue - } - - issues = append(issues, goanalysis.NewIssue(&result.Issue{ - Pos: token.Position{ - Filename: i.File, - Line: line, - Column: column, - }, - Text: text, - LineRange: r, - FromLinter: gosecName, - }, pass)) - } - - return issues -} - -func toGosecConfig(settings *config.GoSecSettings) gosec.Config { - conf := gosec.NewConfig() - - for k, v := range settings.Config { - if k == gosec.Globals { - convertGosecGlobals(v, conf) - continue - } - - // Uses ToUpper because the parsing of the map's key change the key to lowercase. - // The value is not impacted by that: the case is respected. - conf.Set(strings.ToUpper(k), v) - } - - return conf -} - -// based on https://github.com/securego/gosec/blob/47bfd4eb6fc7395940933388550b547538b4c946/config.go#L52-L62 -func convertGosecGlobals(globalOptionFromConfig any, conf gosec.Config) { - globalOptionMap, ok := globalOptionFromConfig.(map[string]any) - if !ok { - return - } - - for k, v := range globalOptionMap { - conf.SetGlobal(gosec.GlobalOption(k), fmt.Sprintf("%v", v)) - } -} - -// based on https://github.com/securego/gosec/blob/569328eade2ccbad4ce2d0f21ee158ab5356a5cf/cmd/gosec/main.go#L170-L188 -func gosecRuleFilters(includes, excludes []string) []rules.RuleFilter { - var filters []rules.RuleFilter - - if len(includes) > 0 { - filters = append(filters, rules.NewRuleFilter(false, includes...)) - } - - if len(excludes) > 0 { - filters = append(filters, rules.NewRuleFilter(true, excludes...)) - } - - return filters -} - -// code borrowed from https://github.com/securego/gosec/blob/69213955dacfd560562e780f723486ef1ca6d486/cmd/gosec/main.go#L250-L262 -func convertToScore(str string) (issue.Score, error) { - str = strings.ToLower(str) - switch str { - case "", "low": - return issue.Low, nil - case "medium": - return issue.Medium, nil - case "high": - return issue.High, nil - default: - return issue.Low, fmt.Errorf("'%s' is invalid, use low instead. Valid options: low, medium, high", str) - } -} - -// code borrowed from https://github.com/securego/gosec/blob/69213955dacfd560562e780f723486ef1ca6d486/cmd/gosec/main.go#L264-L276 -func filterIssues(issues []*issue.Issue, severity, confidence issue.Score) []*issue.Issue { - res := make([]*issue.Issue, 0) - - for _, i := range issues { - if i.Severity >= severity && i.Confidence >= confidence { - res = append(res, i) - } - } - - return res -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gosimple.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gosimple.go deleted file mode 100644 index de60ded73..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gosimple.go +++ /dev/null @@ -1,21 +0,0 @@ -package golinters - -import ( - "honnef.co/go/tools/simple" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" -) - -func NewGosimple(settings *config.StaticCheckSettings) *goanalysis.Linter { - cfg := staticCheckConfig(settings) - - analyzers := setupStaticCheckAnalyzers(simple.Analyzers, getGoVersion(settings), cfg.Checks) - - return goanalysis.NewLinter( - "gosimple", - "Linter for Go source code that specializes in simplifying code", - analyzers, - nil, - ).WithLoadMode(goanalysis.LoadModeTypesInfo) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gosmopolitan.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gosmopolitan.go deleted file mode 100644 index 2e01fcc70..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gosmopolitan.go +++ /dev/null @@ -1,32 +0,0 @@ -package golinters - -import ( - "strings" - - "github.com/xen0n/gosmopolitan" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" -) - -func NewGosmopolitan(s *config.GosmopolitanSettings) *goanalysis.Linter { - a := gosmopolitan.NewAnalyzer() - - cfgMap := map[string]map[string]any{} - if s != nil { - cfgMap[a.Name] = map[string]any{ - "allowtimelocal": s.AllowTimeLocal, - "escapehatches": strings.Join(s.EscapeHatches, ","), - "lookattests": !s.IgnoreTests, - "watchforscripts": strings.Join(s.WatchForScripts, ","), - } - } - - return goanalysis.NewLinter( - a.Name, - a.Doc, - []*analysis.Analyzer{a}, - cfgMap, - ).WithLoadMode(goanalysis.LoadModeTypesInfo) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/govet.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/govet.go deleted file mode 100644 index a0d33835d..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/govet.go +++ /dev/null @@ -1,196 +0,0 @@ -package golinters - -import ( - "slices" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/appends" - "golang.org/x/tools/go/analysis/passes/asmdecl" - "golang.org/x/tools/go/analysis/passes/assign" - "golang.org/x/tools/go/analysis/passes/atomic" - "golang.org/x/tools/go/analysis/passes/atomicalign" - "golang.org/x/tools/go/analysis/passes/bools" - _ "golang.org/x/tools/go/analysis/passes/buildssa" // unused, internal analyzer - "golang.org/x/tools/go/analysis/passes/buildtag" - "golang.org/x/tools/go/analysis/passes/cgocall" - "golang.org/x/tools/go/analysis/passes/composite" - "golang.org/x/tools/go/analysis/passes/copylock" - _ "golang.org/x/tools/go/analysis/passes/ctrlflow" // unused, internal analyzer - "golang.org/x/tools/go/analysis/passes/deepequalerrors" - "golang.org/x/tools/go/analysis/passes/defers" - "golang.org/x/tools/go/analysis/passes/directive" - "golang.org/x/tools/go/analysis/passes/errorsas" - "golang.org/x/tools/go/analysis/passes/fieldalignment" - "golang.org/x/tools/go/analysis/passes/findcall" - "golang.org/x/tools/go/analysis/passes/framepointer" - "golang.org/x/tools/go/analysis/passes/httpresponse" - "golang.org/x/tools/go/analysis/passes/ifaceassert" - _ "golang.org/x/tools/go/analysis/passes/inspect" // unused internal analyzer - "golang.org/x/tools/go/analysis/passes/loopclosure" - "golang.org/x/tools/go/analysis/passes/lostcancel" - "golang.org/x/tools/go/analysis/passes/nilfunc" - "golang.org/x/tools/go/analysis/passes/nilness" - _ "golang.org/x/tools/go/analysis/passes/pkgfact" // unused, internal analyzer - "golang.org/x/tools/go/analysis/passes/printf" - "golang.org/x/tools/go/analysis/passes/reflectvaluecompare" - "golang.org/x/tools/go/analysis/passes/shadow" - "golang.org/x/tools/go/analysis/passes/shift" - "golang.org/x/tools/go/analysis/passes/sigchanyzer" - "golang.org/x/tools/go/analysis/passes/slog" - "golang.org/x/tools/go/analysis/passes/sortslice" - "golang.org/x/tools/go/analysis/passes/stdmethods" - "golang.org/x/tools/go/analysis/passes/stringintconv" - "golang.org/x/tools/go/analysis/passes/structtag" - "golang.org/x/tools/go/analysis/passes/testinggoroutine" - "golang.org/x/tools/go/analysis/passes/tests" - "golang.org/x/tools/go/analysis/passes/timeformat" - "golang.org/x/tools/go/analysis/passes/unmarshal" - "golang.org/x/tools/go/analysis/passes/unreachable" - "golang.org/x/tools/go/analysis/passes/unsafeptr" - "golang.org/x/tools/go/analysis/passes/unusedresult" - "golang.org/x/tools/go/analysis/passes/unusedwrite" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" -) - -var ( - allAnalyzers = []*analysis.Analyzer{ - appends.Analyzer, - asmdecl.Analyzer, - assign.Analyzer, - atomic.Analyzer, - atomicalign.Analyzer, - bools.Analyzer, - buildtag.Analyzer, - cgocall.Analyzer, - composite.Analyzer, - copylock.Analyzer, - deepequalerrors.Analyzer, - defers.Analyzer, - directive.Analyzer, - errorsas.Analyzer, - fieldalignment.Analyzer, - findcall.Analyzer, - framepointer.Analyzer, - httpresponse.Analyzer, - ifaceassert.Analyzer, - loopclosure.Analyzer, - lostcancel.Analyzer, - nilfunc.Analyzer, - nilness.Analyzer, - printf.Analyzer, - reflectvaluecompare.Analyzer, - shadow.Analyzer, - shift.Analyzer, - sigchanyzer.Analyzer, - slog.Analyzer, - sortslice.Analyzer, - stdmethods.Analyzer, - stringintconv.Analyzer, - structtag.Analyzer, - testinggoroutine.Analyzer, - tests.Analyzer, - timeformat.Analyzer, - unmarshal.Analyzer, - unreachable.Analyzer, - unsafeptr.Analyzer, - unusedresult.Analyzer, - unusedwrite.Analyzer, - } - - // https://github.com/golang/go/blob/b56645a87b28840a180d64077877cb46570b4176/src/cmd/vet/main.go#L49-L81 - defaultAnalyzers = []*analysis.Analyzer{ - appends.Analyzer, - asmdecl.Analyzer, - assign.Analyzer, - atomic.Analyzer, - bools.Analyzer, - buildtag.Analyzer, - cgocall.Analyzer, - composite.Analyzer, - copylock.Analyzer, - defers.Analyzer, - directive.Analyzer, - errorsas.Analyzer, - framepointer.Analyzer, - httpresponse.Analyzer, - ifaceassert.Analyzer, - loopclosure.Analyzer, - lostcancel.Analyzer, - nilfunc.Analyzer, - printf.Analyzer, - shift.Analyzer, - sigchanyzer.Analyzer, - slog.Analyzer, - stdmethods.Analyzer, - stringintconv.Analyzer, - structtag.Analyzer, - testinggoroutine.Analyzer, - tests.Analyzer, - timeformat.Analyzer, - unmarshal.Analyzer, - unreachable.Analyzer, - unsafeptr.Analyzer, - unusedresult.Analyzer, - } -) - -func NewGovet(settings *config.GovetSettings) *goanalysis.Linter { - var conf map[string]map[string]any - if settings != nil { - conf = settings.Settings - } - - return goanalysis.NewLinter( - "govet", - "Vet examines Go source code and reports suspicious constructs, "+ - "such as Printf calls whose arguments do not align with the format string", - analyzersFromConfig(settings), - conf, - ).WithLoadMode(goanalysis.LoadModeTypesInfo) -} - -func analyzersFromConfig(settings *config.GovetSettings) []*analysis.Analyzer { - if settings == nil { - return defaultAnalyzers - } - - if settings.CheckShadowing { - // Keeping for backward compatibility. - settings.Enable = append(settings.Enable, shadow.Analyzer.Name) - } - - var enabledAnalyzers []*analysis.Analyzer - for _, a := range allAnalyzers { - if isAnalyzerEnabled(a.Name, settings, defaultAnalyzers) { - enabledAnalyzers = append(enabledAnalyzers, a) - } - } - - return enabledAnalyzers -} - -func isAnalyzerEnabled(name string, cfg *config.GovetSettings, defaultAnalyzers []*analysis.Analyzer) bool { - // TODO(ldez) remove loopclosure when go1.23 - if name == loopclosure.Analyzer.Name && config.IsGreaterThanOrEqualGo122(cfg.Go) { - return false - } - - switch { - case cfg.EnableAll: - return !slices.Contains(cfg.Disable, name) - - case slices.Contains(cfg.Enable, name): - return true - - case slices.Contains(cfg.Disable, name): - return false - - case cfg.DisableAll: - return false - - default: - return slices.ContainsFunc(defaultAnalyzers, func(a *analysis.Analyzer) bool { return a.Name == name }) - } -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/grouper.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/grouper.go deleted file mode 100644 index 41761f2ae..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/grouper.go +++ /dev/null @@ -1,32 +0,0 @@ -package golinters - -import ( - grouper "github.com/leonklingele/grouper/pkg/analyzer" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" -) - -func NewGrouper(settings *config.GrouperSettings) *goanalysis.Linter { - linterCfg := map[string]map[string]any{} - if settings != nil { - linterCfg["grouper"] = map[string]any{ - "const-require-single-const": settings.ConstRequireSingleConst, - "const-require-grouping": settings.ConstRequireGrouping, - "import-require-single-import": settings.ImportRequireSingleImport, - "import-require-grouping": settings.ImportRequireGrouping, - "type-require-single-type": settings.TypeRequireSingleType, - "type-require-grouping": settings.TypeRequireGrouping, - "var-require-single-var": settings.VarRequireSingleVar, - "var-require-grouping": settings.VarRequireGrouping, - } - } - - return goanalysis.NewLinter( - "grouper", - "Analyze expression groups.", - []*analysis.Analyzer{grouper.New()}, - linterCfg, - ).WithLoadMode(goanalysis.LoadModeSyntax) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/ifshort.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/ifshort.go deleted file mode 100644 index 50e2c172e..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/ifshort.go +++ /dev/null @@ -1,30 +0,0 @@ -package golinters - -import ( - "github.com/esimonov/ifshort/pkg/analyzer" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" -) - -func NewIfshort(settings *config.IfshortSettings) *goanalysis.Linter { - var cfg map[string]map[string]any - if settings != nil { - cfg = map[string]map[string]any{ - analyzer.Analyzer.Name: { - "max-decl-lines": settings.MaxDeclLines, - "max-decl-chars": settings.MaxDeclChars, - }, - } - } - - a := analyzer.Analyzer - - return goanalysis.NewLinter( - a.Name, - a.Doc, - []*analysis.Analyzer{a}, - cfg, - ).WithLoadMode(goanalysis.LoadModeSyntax) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/importas.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/importas.go deleted file mode 100644 index b06aec7a3..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/importas.go +++ /dev/null @@ -1,67 +0,0 @@ -package golinters - -import ( - "fmt" - "strconv" - "strings" - - "github.com/julz/importas" //nolint:misspell - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" - "github.com/golangci/golangci-lint/pkg/lint/linter" -) - -func NewImportAs(settings *config.ImportAsSettings) *goanalysis.Linter { - analyzer := importas.Analyzer - - return goanalysis.NewLinter( - analyzer.Name, - analyzer.Doc, - []*analysis.Analyzer{analyzer}, - nil, - ).WithContextSetter(func(lintCtx *linter.Context) { - if settings == nil { - return - } - if len(settings.Alias) == 0 { - lintCtx.Log.Infof("importas settings found, but no aliases listed. List aliases under alias: key.") //nolint:misspell - } - - if err := analyzer.Flags.Set("no-unaliased", strconv.FormatBool(settings.NoUnaliased)); err != nil { - lintCtx.Log.Errorf("failed to parse configuration: %v", err) - } - - if err := analyzer.Flags.Set("no-extra-aliases", strconv.FormatBool(settings.NoExtraAliases)); err != nil { - lintCtx.Log.Errorf("failed to parse configuration: %v", err) - } - - uniqPackages := make(map[string]config.ImportAsAlias) - uniqAliases := make(map[string]config.ImportAsAlias) - for _, a := range settings.Alias { - if a.Pkg == "" { - lintCtx.Log.Errorf("invalid configuration, empty package: pkg=%s alias=%s", a.Pkg, a.Alias) - continue - } - - if v, ok := uniqPackages[a.Pkg]; ok { - lintCtx.Log.Errorf("invalid configuration, multiple aliases for the same package: pkg=%s aliases=[%s,%s]", a.Pkg, a.Alias, v.Alias) - } else { - uniqPackages[a.Pkg] = a - } - - // skip the duplication check when the alias is a regular expression replacement pattern (ie. contains `$`). - if v, ok := uniqAliases[a.Alias]; ok && !strings.Contains(a.Alias, "$") { - lintCtx.Log.Errorf("invalid configuration, multiple packages with the same alias: alias=%s packages=[%s,%s]", a.Alias, a.Pkg, v.Pkg) - } else { - uniqAliases[a.Alias] = a - } - - err := analyzer.Flags.Set("alias", fmt.Sprintf("%s:%s", a.Pkg, a.Alias)) - if err != nil { - lintCtx.Log.Errorf("failed to parse configuration: %v", err) - } - } - }).WithLoadMode(goanalysis.LoadModeTypesInfo) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/inamedparam.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/inamedparam.go deleted file mode 100644 index 887f3db2a..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/inamedparam.go +++ /dev/null @@ -1,30 +0,0 @@ -package golinters - -import ( - "github.com/macabu/inamedparam" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" -) - -func NewINamedParam(settings *config.INamedParamSettings) *goanalysis.Linter { - a := inamedparam.Analyzer - - var cfg map[string]map[string]any - - if settings != nil { - cfg = map[string]map[string]any{ - a.Name: { - "skip-single-param": settings.SkipSingleParam, - }, - } - } - - return goanalysis.NewLinter( - a.Name, - a.Doc, - []*analysis.Analyzer{a}, - cfg, - ).WithLoadMode(goanalysis.LoadModeSyntax) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/ineffassign.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/ineffassign.go deleted file mode 100644 index ac5eb20ad..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/ineffassign.go +++ /dev/null @@ -1,19 +0,0 @@ -package golinters - -import ( - "github.com/gordonklaus/ineffassign/pkg/ineffassign" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" -) - -func NewIneffassign() *goanalysis.Linter { - a := ineffassign.Analyzer - - return goanalysis.NewLinter( - a.Name, - "Detects when assignments to existing variables are not used", - []*analysis.Analyzer{a}, - nil, - ).WithLoadMode(goanalysis.LoadModeSyntax) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/interfacebloat.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/interfacebloat.go deleted file mode 100644 index a6dbfe178..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/interfacebloat.go +++ /dev/null @@ -1,29 +0,0 @@ -package golinters - -import ( - "github.com/sashamelentyev/interfacebloat/pkg/analyzer" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" -) - -func NewInterfaceBloat(settings *config.InterfaceBloatSettings) *goanalysis.Linter { - a := analyzer.New() - - var cfg map[string]map[string]any - if settings != nil { - cfg = map[string]map[string]any{ - a.Name: { - analyzer.InterfaceMaxMethodsFlag: settings.Max, - }, - } - } - - return goanalysis.NewLinter( - a.Name, - a.Doc, - []*analysis.Analyzer{a}, - cfg, - ).WithLoadMode(goanalysis.LoadModeSyntax) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/interfacer.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/interfacer.go deleted file mode 100644 index 71bdfddbe..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/interfacer.go +++ /dev/null @@ -1,82 +0,0 @@ -package golinters - -import ( - "sync" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/buildssa" - "mvdan.cc/interfacer/check" - - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" - "github.com/golangci/golangci-lint/pkg/lint/linter" - "github.com/golangci/golangci-lint/pkg/result" -) - -const interfacerName = "interfacer" - -func NewInterfacer() *goanalysis.Linter { - var mu sync.Mutex - var resIssues []goanalysis.Issue - - analyzer := &analysis.Analyzer{ - Name: interfacerName, - Doc: goanalysis.TheOnlyanalyzerDoc, - Requires: []*analysis.Analyzer{buildssa.Analyzer}, - Run: func(pass *analysis.Pass) (any, error) { - issues, err := runInterfacer(pass) - if err != nil { - return nil, err - } - - if len(issues) == 0 { - return nil, nil - } - - mu.Lock() - resIssues = append(resIssues, issues...) - mu.Unlock() - - return nil, nil - }, - } - - return goanalysis.NewLinter( - interfacerName, - "Linter that suggests narrower interface types", - []*analysis.Analyzer{analyzer}, - nil, - ).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { - return resIssues - }).WithLoadMode(goanalysis.LoadModeTypesInfo) -} - -func runInterfacer(pass *analysis.Pass) ([]goanalysis.Issue, error) { - c := &check.Checker{} - - prog := goanalysis.MakeFakeLoaderProgram(pass) - c.Program(prog) - - ssa := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA) - ssaPkg := ssa.Pkg - c.ProgramSSA(ssaPkg.Prog) - - lintIssues, err := c.Check() - if err != nil { - return nil, err - } - if len(lintIssues) == 0 { - return nil, nil - } - - issues := make([]goanalysis.Issue, 0, len(lintIssues)) - for _, i := range lintIssues { - pos := pass.Fset.Position(i.Pos()) - issues = append(issues, goanalysis.NewIssue(&result.Issue{ - Pos: pos, - Text: i.Message(), - FromLinter: interfacerName, - }, pass)) - } - - return issues, nil -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/ireturn.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/ireturn.go deleted file mode 100644 index dc09dad0e..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/ireturn.go +++ /dev/null @@ -1,31 +0,0 @@ -package golinters - -import ( - "strings" - - "github.com/butuzov/ireturn/analyzer" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" -) - -func NewIreturn(settings *config.IreturnSettings) *goanalysis.Linter { - a := analyzer.NewAnalyzer() - - cfg := map[string]map[string]any{} - if settings != nil { - cfg[a.Name] = map[string]any{ - "allow": strings.Join(settings.Allow, ","), - "reject": strings.Join(settings.Reject, ","), - "nonolint": true, - } - } - - return goanalysis.NewLinter( - a.Name, - a.Doc, - []*analysis.Analyzer{a}, - cfg, - ).WithLoadMode(goanalysis.LoadModeTypesInfo) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/lll.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/lll.go deleted file mode 100644 index 61498087a..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/lll.go +++ /dev/null @@ -1,157 +0,0 @@ -package golinters - -import ( - "bufio" - "errors" - "fmt" - "go/token" - "os" - "strings" - "sync" - "unicode/utf8" - - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" - "github.com/golangci/golangci-lint/pkg/lint/linter" - "github.com/golangci/golangci-lint/pkg/result" -) - -const lllName = "lll" - -const goCommentDirectivePrefix = "//go:" - -//nolint:dupl -func NewLLL(settings *config.LllSettings) *goanalysis.Linter { - var mu sync.Mutex - var resIssues []goanalysis.Issue - - analyzer := &analysis.Analyzer{ - Name: lllName, - Doc: goanalysis.TheOnlyanalyzerDoc, - Run: func(pass *analysis.Pass) (any, error) { - issues, err := runLll(pass, settings) - if err != nil { - return nil, err - } - - if len(issues) == 0 { - return nil, nil - } - - mu.Lock() - resIssues = append(resIssues, issues...) - mu.Unlock() - - return nil, nil - }, - } - - return goanalysis.NewLinter( - lllName, - "Reports long lines", - []*analysis.Analyzer{analyzer}, - nil, - ).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { - return resIssues - }).WithLoadMode(goanalysis.LoadModeSyntax) -} - -func runLll(pass *analysis.Pass, settings *config.LllSettings) ([]goanalysis.Issue, error) { - fileNames := getFileNames(pass) - - spaces := strings.Repeat(" ", settings.TabWidth) - - var issues []goanalysis.Issue - for _, f := range fileNames { - lintIssues, err := getLLLIssuesForFile(f, settings.LineLength, spaces) - if err != nil { - return nil, err - } - - for i := range lintIssues { - issues = append(issues, goanalysis.NewIssue(&lintIssues[i], pass)) - } - } - - return issues, nil -} - -func getLLLIssuesForFile(filename string, maxLineLen int, tabSpaces string) ([]result.Issue, error) { - var res []result.Issue - - f, err := os.Open(filename) - if err != nil { - return nil, fmt.Errorf("can't open file %s: %w", filename, err) - } - defer f.Close() - - lineNumber := 0 - multiImportEnabled := false - - scanner := bufio.NewScanner(f) - for scanner.Scan() { - lineNumber++ - - line := scanner.Text() - line = strings.ReplaceAll(line, "\t", tabSpaces) - - if strings.HasPrefix(line, goCommentDirectivePrefix) { - continue - } - - if strings.HasPrefix(line, "import") { - multiImportEnabled = strings.HasSuffix(line, "(") - continue - } - - if multiImportEnabled { - if line == ")" { - multiImportEnabled = false - } - - continue - } - - lineLen := utf8.RuneCountInString(line) - if lineLen > maxLineLen { - res = append(res, result.Issue{ - Pos: token.Position{ - Filename: filename, - Line: lineNumber, - }, - Text: fmt.Sprintf("line is %d characters", lineLen), - FromLinter: lllName, - }) - } - } - - if err := scanner.Err(); err != nil { - if errors.Is(err, bufio.ErrTooLong) && maxLineLen < bufio.MaxScanTokenSize { - // scanner.Scan() might fail if the line is longer than bufio.MaxScanTokenSize - // In the case where the specified maxLineLen is smaller than bufio.MaxScanTokenSize - // we can return this line as a long line instead of returning an error. - // The reason for this change is that this case might happen with autogenerated files - // The go-bindata tool for instance might generate a file with a very long line. - // In this case, as it's an auto generated file, the warning returned by lll will - // be ignored. - // But if we return a linter error here, and this error happens for an autogenerated - // file the error will be discarded (fine), but all the subsequent errors for lll will - // be discarded for other files, and we'll miss legit error. - res = append(res, result.Issue{ - Pos: token.Position{ - Filename: filename, - Line: lineNumber, - Column: 1, - }, - Text: fmt.Sprintf("line is more than %d characters", bufio.MaxScanTokenSize), - FromLinter: lllName, - }) - } else { - return nil, fmt.Errorf("can't scan file %s: %w", filename, err) - } - } - - return res, nil -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/loggercheck.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/loggercheck.go deleted file mode 100644 index fc29127c3..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/loggercheck.go +++ /dev/null @@ -1,44 +0,0 @@ -package golinters - -import ( - "github.com/timonwong/loggercheck" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" -) - -func NewLoggerCheck(settings *config.LoggerCheckSettings) *goanalysis.Linter { - var opts []loggercheck.Option - - if settings != nil { - var disable []string - if !settings.Kitlog { - disable = append(disable, "kitlog") - } - if !settings.Klog { - disable = append(disable, "klog") - } - if !settings.Logr { - disable = append(disable, "logr") - } - if !settings.Zap { - disable = append(disable, "zap") - } - - opts = []loggercheck.Option{ - loggercheck.WithDisable(disable), - loggercheck.WithRequireStringKey(settings.RequireStringKey), - loggercheck.WithRules(settings.Rules), - loggercheck.WithNoPrintfLike(settings.NoPrintfLike), - } - } - - analyzer := loggercheck.NewAnalyzer(opts...) - return goanalysis.NewLinter( - analyzer.Name, - analyzer.Doc, - []*analysis.Analyzer{analyzer}, - nil, - ).WithLoadMode(goanalysis.LoadModeTypesInfo) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/maintidx.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/maintidx.go deleted file mode 100644 index 55509d970..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/maintidx.go +++ /dev/null @@ -1,30 +0,0 @@ -package golinters - -import ( - "github.com/yagipy/maintidx" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" -) - -func NewMaintIdx(cfg *config.MaintIdxSettings) *goanalysis.Linter { - analyzer := maintidx.Analyzer - - cfgMap := map[string]map[string]any{ - analyzer.Name: {"under": 20}, - } - - if cfg != nil { - cfgMap[analyzer.Name] = map[string]any{ - "under": cfg.Under, - } - } - - return goanalysis.NewLinter( - analyzer.Name, - analyzer.Doc, - []*analysis.Analyzer{analyzer}, - cfgMap, - ).WithLoadMode(goanalysis.LoadModeSyntax) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/makezero.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/makezero.go deleted file mode 100644 index a9828629a..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/makezero.go +++ /dev/null @@ -1,75 +0,0 @@ -package golinters - -import ( - "fmt" - "sync" - - "github.com/ashanbrown/makezero/makezero" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" - "github.com/golangci/golangci-lint/pkg/lint/linter" - "github.com/golangci/golangci-lint/pkg/result" -) - -const makezeroName = "makezero" - -//nolint:dupl -func NewMakezero(settings *config.MakezeroSettings) *goanalysis.Linter { - var mu sync.Mutex - var resIssues []goanalysis.Issue - - analyzer := &analysis.Analyzer{ - Name: makezeroName, - Doc: goanalysis.TheOnlyanalyzerDoc, - Run: func(pass *analysis.Pass) (any, error) { - issues, err := runMakeZero(pass, settings) - if err != nil { - return nil, err - } - - if len(issues) == 0 { - return nil, nil - } - - mu.Lock() - resIssues = append(resIssues, issues...) - mu.Unlock() - - return nil, nil - }, - } - - return goanalysis.NewLinter( - makezeroName, - "Finds slice declarations with non-zero initial length", - []*analysis.Analyzer{analyzer}, - nil, - ).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { - return resIssues - }).WithLoadMode(goanalysis.LoadModeTypesInfo) -} - -func runMakeZero(pass *analysis.Pass, settings *config.MakezeroSettings) ([]goanalysis.Issue, error) { - zero := makezero.NewLinter(settings.Always) - - var issues []goanalysis.Issue - - for _, file := range pass.Files { - hints, err := zero.Run(pass.Fset, pass.TypesInfo, file) - if err != nil { - return nil, fmt.Errorf("makezero linter failed on file %q: %w", file.Name.String(), err) - } - - for _, hint := range hints { - issues = append(issues, goanalysis.NewIssue(&result.Issue{ - Pos: hint.Position(), - Text: hint.Details(), - FromLinter: makezeroName, - }, pass)) - } - } - - return issues, nil -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/maligned.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/maligned.go deleted file mode 100644 index 0455be76a..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/maligned.go +++ /dev/null @@ -1,74 +0,0 @@ -package golinters - -import ( - "fmt" - "sync" - - malignedAPI "github.com/golangci/maligned" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" - "github.com/golangci/golangci-lint/pkg/lint/linter" - "github.com/golangci/golangci-lint/pkg/result" -) - -const malignedName = "maligned" - -//nolint:dupl -func NewMaligned(settings *config.MalignedSettings) *goanalysis.Linter { - var mu sync.Mutex - var res []goanalysis.Issue - - analyzer := &analysis.Analyzer{ - Name: malignedName, - Doc: goanalysis.TheOnlyanalyzerDoc, - Run: func(pass *analysis.Pass) (any, error) { - issues := runMaligned(pass, settings) - - if len(issues) == 0 { - return nil, nil - } - - mu.Lock() - res = append(res, issues...) - mu.Unlock() - - return nil, nil - }, - } - - return goanalysis.NewLinter( - malignedName, - "Tool to detect Go structs that would take less memory if their fields were sorted", - []*analysis.Analyzer{analyzer}, - nil, - ).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { - return res - }).WithLoadMode(goanalysis.LoadModeTypesInfo) -} - -func runMaligned(pass *analysis.Pass, settings *config.MalignedSettings) []goanalysis.Issue { - prog := goanalysis.MakeFakeLoaderProgram(pass) - - malignedIssues := malignedAPI.Run(prog) - if len(malignedIssues) == 0 { - return nil - } - - issues := make([]goanalysis.Issue, 0, len(malignedIssues)) - for _, i := range malignedIssues { - text := fmt.Sprintf("struct of size %d bytes could be of size %d bytes", i.OldSize, i.NewSize) - if settings.SuggestNewOrder { - text += fmt.Sprintf(":\n%s", formatCodeBlock(i.NewStructDef, nil)) - } - - issues = append(issues, goanalysis.NewIssue(&result.Issue{ - Pos: i.Pos, - Text: text, - FromLinter: malignedName, - }, pass)) - } - - return issues -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/mirror.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/mirror.go deleted file mode 100644 index d6e2bb06a..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/mirror.go +++ /dev/null @@ -1,70 +0,0 @@ -package golinters - -import ( - "sync" - - "github.com/butuzov/mirror" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" - "github.com/golangci/golangci-lint/pkg/lint/linter" - "github.com/golangci/golangci-lint/pkg/result" -) - -func NewMirror() *goanalysis.Linter { - var ( - mu sync.Mutex - issues []goanalysis.Issue - ) - - a := mirror.NewAnalyzer() - a.Run = func(pass *analysis.Pass) (any, error) { - // mirror only lints test files if the `--with-tests` flag is passed, - // so we pass the `with-tests` flag as true to the analyzer before running it. - // This can be turned off by using the regular golangci-lint flags such as `--tests` or `--skip-files` - // or can be disabled per linter via exclude rules. - // (see https://github.com/golangci/golangci-lint/issues/2527#issuecomment-1023707262) - violations := mirror.Run(pass, true) - - if len(violations) == 0 { - return nil, nil - } - - for index := range violations { - i := violations[index].Issue(pass.Fset) - - issue := result.Issue{ - FromLinter: a.Name, - Text: i.Message, - Pos: i.Start, - } - - if i.InlineFix != "" { - issue.Replacement = &result.Replacement{ - Inline: &result.InlineFix{ - StartCol: i.Start.Column - 1, - Length: len(i.Original), - NewString: i.InlineFix, - }, - } - } - - mu.Lock() - issues = append(issues, goanalysis.NewIssue(&issue, pass)) - mu.Unlock() - } - - return nil, nil - } - - analyzer := goanalysis.NewLinter( - a.Name, - a.Doc, - []*analysis.Analyzer{a}, - nil, - ).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { - return issues - }).WithLoadMode(goanalysis.LoadModeTypesInfo) - - return analyzer -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/misspell.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/misspell.go deleted file mode 100644 index 0f69cdb87..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/misspell.go +++ /dev/null @@ -1,155 +0,0 @@ -package golinters - -import ( - "fmt" - "go/token" - "strings" - "sync" - - "github.com/golangci/misspell" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" - "github.com/golangci/golangci-lint/pkg/lint/linter" - "github.com/golangci/golangci-lint/pkg/result" -) - -const misspellName = "misspell" - -func NewMisspell(settings *config.MisspellSettings) *goanalysis.Linter { - var mu sync.Mutex - var resIssues []goanalysis.Issue - - analyzer := &analysis.Analyzer{ - Name: misspellName, - Doc: goanalysis.TheOnlyanalyzerDoc, - Run: goanalysis.DummyRun, - } - - return goanalysis.NewLinter( - misspellName, - "Finds commonly misspelled English words", - []*analysis.Analyzer{analyzer}, - nil, - ).WithContextSetter(func(lintCtx *linter.Context) { - replacer, ruleErr := createMisspellReplacer(settings) - - analyzer.Run = func(pass *analysis.Pass) (any, error) { - if ruleErr != nil { - return nil, ruleErr - } - - issues, err := runMisspell(lintCtx, pass, replacer, settings.Mode) - if err != nil { - return nil, err - } - - if len(issues) == 0 { - return nil, nil - } - - mu.Lock() - resIssues = append(resIssues, issues...) - mu.Unlock() - - return nil, nil - } - }).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { - return resIssues - }).WithLoadMode(goanalysis.LoadModeSyntax) -} - -func runMisspell(lintCtx *linter.Context, pass *analysis.Pass, replacer *misspell.Replacer, mode string) ([]goanalysis.Issue, error) { - fileNames := getFileNames(pass) - - var issues []goanalysis.Issue - for _, filename := range fileNames { - lintIssues, err := runMisspellOnFile(lintCtx, filename, replacer, mode) - if err != nil { - return nil, err - } - - for i := range lintIssues { - issues = append(issues, goanalysis.NewIssue(&lintIssues[i], pass)) - } - } - - return issues, nil -} - -func createMisspellReplacer(settings *config.MisspellSettings) (*misspell.Replacer, error) { - replacer := &misspell.Replacer{ - Replacements: misspell.DictMain, - } - - // Figure out regional variations - switch strings.ToUpper(settings.Locale) { - case "": - // nothing - case "US": - replacer.AddRuleList(misspell.DictAmerican) - case "UK", "GB": - replacer.AddRuleList(misspell.DictBritish) - case "NZ", "AU", "CA": - return nil, fmt.Errorf("unknown locale: %q", settings.Locale) - } - - if len(settings.IgnoreWords) != 0 { - replacer.RemoveRule(settings.IgnoreWords) - } - - // It can panic. - replacer.Compile() - - return replacer, nil -} - -func runMisspellOnFile(lintCtx *linter.Context, filename string, replacer *misspell.Replacer, mode string) ([]result.Issue, error) { - fileContent, err := lintCtx.FileCache.GetFileBytes(filename) - if err != nil { - return nil, fmt.Errorf("can't get file %s contents: %w", filename, err) - } - - // `r.ReplaceGo` doesn't find issues inside strings: it searches only inside comments. - // `r.Replace` searches all words: it treats input as a plain text. - // The standalone misspell tool uses `r.Replace` by default. - var replace func(input string) (string, []misspell.Diff) - switch strings.ToLower(mode) { - case "restricted": - replace = replacer.ReplaceGo - default: - replace = replacer.Replace - } - - _, diffs := replace(string(fileContent)) - - var res []result.Issue - - for _, diff := range diffs { - text := fmt.Sprintf("`%s` is a misspelling of `%s`", diff.Original, diff.Corrected) - - pos := token.Position{ - Filename: filename, - Line: diff.Line, - Column: diff.Column + 1, - } - - replacement := &result.Replacement{ - Inline: &result.InlineFix{ - StartCol: diff.Column, - Length: len(diff.Original), - NewString: diff.Corrected, - }, - } - - res = append(res, result.Issue{ - Pos: pos, - Text: text, - FromLinter: misspellName, - Replacement: replacement, - }) - } - - return res, nil -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/musttag.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/musttag.go deleted file mode 100644 index 72d919582..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/musttag.go +++ /dev/null @@ -1,29 +0,0 @@ -package golinters - -import ( - "go-simpler.org/musttag" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" -) - -func NewMustTag(setting *config.MustTagSettings) *goanalysis.Linter { - var funcs []musttag.Func - - if setting != nil { - for _, fn := range setting.Functions { - funcs = append(funcs, musttag.Func{ - Name: fn.Name, - Tag: fn.Tag, - ArgPos: fn.ArgPos, - }) - } - } - - a := musttag.New(funcs...) - - return goanalysis. - NewLinter(a.Name, a.Doc, []*analysis.Analyzer{a}, nil). - WithLoadMode(goanalysis.LoadModeTypesInfo) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nakedret.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nakedret.go deleted file mode 100644 index 6153860fb..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nakedret.go +++ /dev/null @@ -1,25 +0,0 @@ -package golinters - -import ( - "github.com/alexkohler/nakedret/v2" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" -) - -func NewNakedret(settings *config.NakedretSettings) *goanalysis.Linter { - var maxLines int - if settings != nil { - maxLines = settings.MaxFuncLines - } - - a := nakedret.NakedReturnAnalyzer(uint(maxLines)) - - return goanalysis.NewLinter( - a.Name, - a.Doc, - []*analysis.Analyzer{a}, - nil, - ).WithLoadMode(goanalysis.LoadModeSyntax) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nestif.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nestif.go deleted file mode 100644 index 12ad69ece..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nestif.go +++ /dev/null @@ -1,79 +0,0 @@ -package golinters - -import ( - "sort" - "sync" - - "github.com/nakabonne/nestif" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" - "github.com/golangci/golangci-lint/pkg/lint/linter" - "github.com/golangci/golangci-lint/pkg/result" -) - -const nestifName = "nestif" - -//nolint:dupl -func NewNestif(settings *config.NestifSettings) *goanalysis.Linter { - var mu sync.Mutex - var resIssues []goanalysis.Issue - - analyzer := &analysis.Analyzer{ - Name: goanalysis.TheOnlyAnalyzerName, - Doc: goanalysis.TheOnlyanalyzerDoc, - Run: func(pass *analysis.Pass) (any, error) { - issues := runNestIf(pass, settings) - - if len(issues) == 0 { - return nil, nil - } - - mu.Lock() - resIssues = append(resIssues, issues...) - mu.Unlock() - - return nil, nil - }, - } - - return goanalysis.NewLinter( - nestifName, - "Reports deeply nested if statements", - []*analysis.Analyzer{analyzer}, - nil, - ).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { - return resIssues - }).WithLoadMode(goanalysis.LoadModeSyntax) -} - -func runNestIf(pass *analysis.Pass, settings *config.NestifSettings) []goanalysis.Issue { - checker := &nestif.Checker{ - MinComplexity: settings.MinComplexity, - } - - var lintIssues []nestif.Issue - for _, f := range pass.Files { - lintIssues = append(lintIssues, checker.Check(f, pass.Fset)...) - } - - if len(lintIssues) == 0 { - return nil - } - - sort.SliceStable(lintIssues, func(i, j int) bool { - return lintIssues[i].Complexity > lintIssues[j].Complexity - }) - - issues := make([]goanalysis.Issue, 0, len(lintIssues)) - for _, i := range lintIssues { - issues = append(issues, goanalysis.NewIssue(&result.Issue{ - Pos: i.Pos, - Text: i.Message, - FromLinter: nestifName, - }, pass)) - } - - return issues -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nilerr.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nilerr.go deleted file mode 100644 index 2ea16f2f3..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nilerr.go +++ /dev/null @@ -1,19 +0,0 @@ -package golinters - -import ( - "github.com/gostaticanalysis/nilerr" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" -) - -func NewNilErr() *goanalysis.Linter { - a := nilerr.Analyzer - - return goanalysis.NewLinter( - a.Name, - "Finds the code that returns nil even if it checks that the error is not nil.", - []*analysis.Analyzer{a}, - nil, - ).WithLoadMode(goanalysis.LoadModeTypesInfo) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nilnil.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nilnil.go deleted file mode 100644 index 804557b76..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nilnil.go +++ /dev/null @@ -1,30 +0,0 @@ -package golinters - -import ( - "strings" - - "github.com/Antonboom/nilnil/pkg/analyzer" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" -) - -func NewNilNil(cfg *config.NilNilSettings) *goanalysis.Linter { - a := analyzer.New() - - cfgMap := make(map[string]map[string]any) - if cfg != nil && len(cfg.CheckedTypes) != 0 { - cfgMap[a.Name] = map[string]any{ - "checked-types": strings.Join(cfg.CheckedTypes, ","), - } - } - - return goanalysis.NewLinter( - a.Name, - a.Doc, - []*analysis.Analyzer{a}, - cfgMap, - ). - WithLoadMode(goanalysis.LoadModeTypesInfo) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nlreturn.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nlreturn.go deleted file mode 100644 index a359548f4..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nlreturn.go +++ /dev/null @@ -1,27 +0,0 @@ -package golinters - -import ( - "github.com/ssgreg/nlreturn/v2/pkg/nlreturn" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" -) - -func NewNLReturn(settings *config.NlreturnSettings) *goanalysis.Linter { - a := nlreturn.NewAnalyzer() - - cfg := map[string]map[string]any{} - if settings != nil { - cfg[a.Name] = map[string]any{ - "block-size": settings.BlockSize, - } - } - - return goanalysis.NewLinter( - a.Name, - "nlreturn checks for a new line before return and branch statements to increase code clarity", - []*analysis.Analyzer{a}, - cfg, - ).WithLoadMode(goanalysis.LoadModeSyntax) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/noctx.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/noctx.go deleted file mode 100644 index cff9c97dc..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/noctx.go +++ /dev/null @@ -1,19 +0,0 @@ -package golinters - -import ( - "github.com/sonatard/noctx" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" -) - -func NewNoctx() *goanalysis.Linter { - a := noctx.Analyzer - - return goanalysis.NewLinter( - a.Name, - "Finds sending http request without context.Context", - []*analysis.Analyzer{a}, - nil, - ).WithLoadMode(goanalysis.LoadModeTypesInfo) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nolintlint.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nolintlint.go deleted file mode 100644 index ae372ab79..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nolintlint.go +++ /dev/null @@ -1,105 +0,0 @@ -package golinters - -import ( - "fmt" - "go/ast" - "sync" - - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" - "github.com/golangci/golangci-lint/pkg/golinters/nolintlint" - "github.com/golangci/golangci-lint/pkg/lint/linter" - "github.com/golangci/golangci-lint/pkg/result" -) - -const NoLintLintName = "nolintlint" - -//nolint:dupl -func NewNoLintLint(settings *config.NoLintLintSettings) *goanalysis.Linter { - var mu sync.Mutex - var resIssues []goanalysis.Issue - - analyzer := &analysis.Analyzer{ - Name: NoLintLintName, - Doc: goanalysis.TheOnlyanalyzerDoc, - Run: func(pass *analysis.Pass) (any, error) { - issues, err := runNoLintLint(pass, settings) - if err != nil { - return nil, err - } - - if len(issues) == 0 { - return nil, nil - } - - mu.Lock() - resIssues = append(resIssues, issues...) - mu.Unlock() - - return nil, nil - }, - } - - return goanalysis.NewLinter( - NoLintLintName, - "Reports ill-formed or insufficient nolint directives", - []*analysis.Analyzer{analyzer}, - nil, - ).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { - return resIssues - }).WithLoadMode(goanalysis.LoadModeSyntax) -} - -func runNoLintLint(pass *analysis.Pass, settings *config.NoLintLintSettings) ([]goanalysis.Issue, error) { - var needs nolintlint.Needs - if settings.RequireExplanation { - needs |= nolintlint.NeedsExplanation - } - if settings.RequireSpecific { - needs |= nolintlint.NeedsSpecific - } - if !settings.AllowUnused { - needs |= nolintlint.NeedsUnused - } - - lnt, err := nolintlint.NewLinter(needs, settings.AllowNoExplanation) - if err != nil { - return nil, err - } - - nodes := make([]ast.Node, 0, len(pass.Files)) - for _, n := range pass.Files { - nodes = append(nodes, n) - } - - lintIssues, err := lnt.Run(pass.Fset, nodes...) - if err != nil { - return nil, fmt.Errorf("linter failed to run: %w", err) - } - - var issues []goanalysis.Issue - - for _, i := range lintIssues { - expectNoLint := false - var expectedNolintLinter string - if ii, ok := i.(nolintlint.UnusedCandidate); ok { - expectedNolintLinter = ii.ExpectedLinter - expectNoLint = true - } - - issue := &result.Issue{ - FromLinter: NoLintLintName, - Text: i.Details(), - Pos: i.Position(), - ExpectNoLint: expectNoLint, - ExpectedNoLintLinter: expectedNolintLinter, - Replacement: i.Replacement(), - } - - issues = append(issues, goanalysis.NewIssue(issue, pass)) - } - - return issues, nil -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nolintlint/README.md b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nolintlint/README.md deleted file mode 100644 index 1643df7a5..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nolintlint/README.md +++ /dev/null @@ -1,31 +0,0 @@ -# nolintlint - -nolintlint is a Go static analysis tool to find ill-formed or insufficiently explained `//nolint` directives for golangci-lint -(or any other linter, using this package) - -## Purpose - -To ensure that lint exceptions have explanations. Consider the case below: - -```Go -import "crypto/md5" //nolint:all - -func hash(data []byte) []byte { - return md5.New().Sum(data) //nolint:all -} -``` - -In the above case, nolint directives are present but the user has no idea why this is being done or which linter -is being suppressed (in this case, gosec recommends against use of md5). `nolintlint` can require that the code provide an explanation, which might look as follows: - -```Go -import "crypto/md5" //nolint:gosec // this is not used in a secure application - -func hash(data []byte) []byte { - return md5.New().Sum(data) //nolint:gosec // this result is not used in a secure application -} -``` - -`nolintlint` can also identify cases where you may have written `// nolint`. Finally `nolintlint`, can also enforce that you -use the machine-readable nolint directive format `//nolint:all` and that you mention what linter is being suppressed, as shown above when we write `//nolint:gosec`. - diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nolintlint/nolintlint.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nolintlint/nolintlint.go deleted file mode 100644 index a245561a0..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nolintlint/nolintlint.go +++ /dev/null @@ -1,302 +0,0 @@ -// Package nolintlint provides a linter to ensure that all //nolint directives are followed by explanations -package nolintlint - -import ( - "fmt" - "go/ast" - "go/token" - "regexp" - "strings" - "unicode" - - "github.com/golangci/golangci-lint/pkg/result" -) - -type BaseIssue struct { - fullDirective string - directiveWithOptionalLeadingSpace string - position token.Position - replacement *result.Replacement -} - -//nolint:gocritic // TODO(ldez) must be change in the future. -func (b BaseIssue) Position() token.Position { - return b.position -} - -//nolint:gocritic // TODO(ldez) must be change in the future. -func (b BaseIssue) Replacement() *result.Replacement { - return b.replacement -} - -type ExtraLeadingSpace struct { - BaseIssue -} - -//nolint:gocritic // TODO(ldez) must be change in the future. -func (i ExtraLeadingSpace) Details() string { - return fmt.Sprintf("directive `%s` should not have more than one leading space", i.fullDirective) -} - -func (i ExtraLeadingSpace) String() string { return toString(i) } - -type NotMachine struct { - BaseIssue -} - -//nolint:gocritic // TODO(ldez) must be change in the future. -func (i NotMachine) Details() string { - expected := i.fullDirective[:2] + strings.TrimLeftFunc(i.fullDirective[2:], unicode.IsSpace) - return fmt.Sprintf("directive `%s` should be written without leading space as `%s`", - i.fullDirective, expected) -} - -func (i NotMachine) String() string { return toString(i) } - -type NotSpecific struct { - BaseIssue -} - -//nolint:gocritic // TODO(ldez) must be change in the future. -func (i NotSpecific) Details() string { - return fmt.Sprintf("directive `%s` should mention specific linter such as `%s:my-linter`", - i.fullDirective, i.directiveWithOptionalLeadingSpace) -} - -func (i NotSpecific) String() string { return toString(i) } - -type ParseError struct { - BaseIssue -} - -//nolint:gocritic // TODO(ldez) must be change in the future. -func (i ParseError) Details() string { - return fmt.Sprintf("directive `%s` should match `%s[:] [// ]`", - i.fullDirective, - i.directiveWithOptionalLeadingSpace) -} - -func (i ParseError) String() string { return toString(i) } - -type NoExplanation struct { - BaseIssue - fullDirectiveWithoutExplanation string -} - -//nolint:gocritic // TODO(ldez) must be change in the future. -func (i NoExplanation) Details() string { - return fmt.Sprintf("directive `%s` should provide explanation such as `%s // this is why`", - i.fullDirective, i.fullDirectiveWithoutExplanation) -} - -func (i NoExplanation) String() string { return toString(i) } - -type UnusedCandidate struct { - BaseIssue - ExpectedLinter string -} - -//nolint:gocritic // TODO(ldez) must be change in the future. -func (i UnusedCandidate) Details() string { - details := fmt.Sprintf("directive `%s` is unused", i.fullDirective) - if i.ExpectedLinter != "" { - details += fmt.Sprintf(" for linter %q", i.ExpectedLinter) - } - return details -} - -func (i UnusedCandidate) String() string { return toString(i) } - -func toString(i Issue) string { - return fmt.Sprintf("%s at %s", i.Details(), i.Position()) -} - -type Issue interface { - Details() string - Position() token.Position - String() string - Replacement() *result.Replacement -} - -type Needs uint - -const ( - NeedsMachineOnly Needs = 1 << iota - NeedsSpecific - NeedsExplanation - NeedsUnused - NeedsAll = NeedsMachineOnly | NeedsSpecific | NeedsExplanation -) - -var commentPattern = regexp.MustCompile(`^//\s*(nolint)(:\s*[\w-]+\s*(?:,\s*[\w-]+\s*)*)?\b`) - -// matches a complete nolint directive -var fullDirectivePattern = regexp.MustCompile(`^//\s*nolint(?::(\s*[\w-]+\s*(?:,\s*[\w-]+\s*)*))?\s*(//.*)?\s*\n?$`) - -type Linter struct { - needs Needs // indicates which linter checks to perform - excludeByLinter map[string]bool -} - -// NewLinter creates a linter that enforces that the provided directives fulfill the provided requirements -func NewLinter(needs Needs, excludes []string) (*Linter, error) { - excludeByName := make(map[string]bool) - for _, e := range excludes { - excludeByName[e] = true - } - - return &Linter{ - needs: needs | NeedsMachineOnly, - excludeByLinter: excludeByName, - }, nil -} - -var leadingSpacePattern = regexp.MustCompile(`^//(\s*)`) -var trailingBlankExplanation = regexp.MustCompile(`\s*(//\s*)?$`) - -//nolint:funlen,gocyclo -func (l Linter) Run(fset *token.FileSet, nodes ...ast.Node) ([]Issue, error) { - var issues []Issue - - for _, node := range nodes { - file, ok := node.(*ast.File) - if !ok { - continue - } - - for _, c := range file.Comments { - for _, comment := range c.List { - if !commentPattern.MatchString(comment.Text) { - continue - } - - // check for a space between the "//" and the directive - leadingSpaceMatches := leadingSpacePattern.FindStringSubmatch(comment.Text) - - var leadingSpace string - if len(leadingSpaceMatches) > 0 { - leadingSpace = leadingSpaceMatches[1] - } - - directiveWithOptionalLeadingSpace := "//" - if leadingSpace != "" { - directiveWithOptionalLeadingSpace += " " - } - - split := strings.Split(strings.SplitN(comment.Text, ":", 2)[0], "//") - directiveWithOptionalLeadingSpace += strings.TrimSpace(split[1]) - - pos := fset.Position(comment.Pos()) - end := fset.Position(comment.End()) - - base := BaseIssue{ - fullDirective: comment.Text, - directiveWithOptionalLeadingSpace: directiveWithOptionalLeadingSpace, - position: pos, - } - - // check for, report and eliminate leading spaces, so we can check for other issues - if leadingSpace != "" { - removeWhitespace := &result.Replacement{ - Inline: &result.InlineFix{ - StartCol: pos.Column + 1, - Length: len(leadingSpace), - NewString: "", - }, - } - if (l.needs & NeedsMachineOnly) != 0 { - issue := NotMachine{BaseIssue: base} - issue.BaseIssue.replacement = removeWhitespace - issues = append(issues, issue) - } else if len(leadingSpace) > 1 { - issue := ExtraLeadingSpace{BaseIssue: base} - issue.BaseIssue.replacement = removeWhitespace - issue.BaseIssue.replacement.Inline.NewString = " " // assume a single space was intended - issues = append(issues, issue) - } - } - - fullMatches := fullDirectivePattern.FindStringSubmatch(comment.Text) - if len(fullMatches) == 0 { - issues = append(issues, ParseError{BaseIssue: base}) - continue - } - - lintersText, explanation := fullMatches[1], fullMatches[2] - - var linters []string - if lintersText != "" && !strings.HasPrefix(lintersText, "all") { - lls := strings.Split(lintersText, ",") - linters = make([]string, 0, len(lls)) - rangeStart := (pos.Column - 1) + len("//") + len(leadingSpace) + len("nolint:") - for i, ll := range lls { - rangeEnd := rangeStart + len(ll) - if i < len(lls)-1 { - rangeEnd++ // include trailing comma - } - trimmedLinterName := strings.TrimSpace(ll) - if trimmedLinterName != "" { - linters = append(linters, trimmedLinterName) - } - rangeStart = rangeEnd - } - } - - if (l.needs & NeedsSpecific) != 0 { - if len(linters) == 0 { - issues = append(issues, NotSpecific{BaseIssue: base}) - } - } - - // when detecting unused directives, we send all the directives through and filter them out in the nolint processor - if (l.needs & NeedsUnused) != 0 { - removeNolintCompletely := &result.Replacement{ - Inline: &result.InlineFix{ - StartCol: pos.Column - 1, - Length: end.Column - pos.Column, - NewString: "", - }, - } - - if len(linters) == 0 { - issue := UnusedCandidate{BaseIssue: base} - issue.replacement = removeNolintCompletely - issues = append(issues, issue) - } else { - for _, linter := range linters { - issue := UnusedCandidate{BaseIssue: base, ExpectedLinter: linter} - // only offer replacement if there is a single linter - // because of issues around commas and the possibility of all - // linters being removed - if len(linters) == 1 { - issue.replacement = removeNolintCompletely - } - issues = append(issues, issue) - } - } - } - - if (l.needs&NeedsExplanation) != 0 && (explanation == "" || strings.TrimSpace(explanation) == "//") { - needsExplanation := len(linters) == 0 // if no linters are mentioned, we must have explanation - // otherwise, check if we are excluding all the mentioned linters - for _, ll := range linters { - if !l.excludeByLinter[ll] { // if a linter does require explanation - needsExplanation = true - break - } - } - - if needsExplanation { - fullDirectiveWithoutExplanation := trailingBlankExplanation.ReplaceAllString(comment.Text, "") - issues = append(issues, NoExplanation{ - BaseIssue: base, - fullDirectiveWithoutExplanation: fullDirectiveWithoutExplanation, - }) - } - } - } - } - } - - return issues, nil -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nonamedreturns.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nonamedreturns.go deleted file mode 100644 index 7856f6d61..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nonamedreturns.go +++ /dev/null @@ -1,29 +0,0 @@ -package golinters - -import ( - "github.com/firefart/nonamedreturns/analyzer" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" -) - -func NewNoNamedReturns(settings *config.NoNamedReturnsSettings) *goanalysis.Linter { - a := analyzer.Analyzer - - var cfg map[string]map[string]any - if settings != nil { - cfg = map[string]map[string]any{ - a.Name: { - analyzer.FlagReportErrorInDefer: settings.ReportErrorInDefer, - }, - } - } - - return goanalysis.NewLinter( - a.Name, - a.Doc, - []*analysis.Analyzer{a}, - cfg, - ).WithLoadMode(goanalysis.LoadModeTypesInfo) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nosnakecase.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nosnakecase.go deleted file mode 100644 index 26d5d6d4c..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nosnakecase.go +++ /dev/null @@ -1,19 +0,0 @@ -package golinters - -import ( - "github.com/sivchari/nosnakecase" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" -) - -func NewNoSnakeCase() *goanalysis.Linter { - a := nosnakecase.Analyzer - - return goanalysis.NewLinter( - a.Name, - a.Doc, - []*analysis.Analyzer{a}, - nil, - ).WithLoadMode(goanalysis.LoadModeSyntax) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nosprintfhostport.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nosprintfhostport.go deleted file mode 100644 index a63b9bb5f..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nosprintfhostport.go +++ /dev/null @@ -1,19 +0,0 @@ -package golinters - -import ( - "github.com/stbenjam/no-sprintf-host-port/pkg/analyzer" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" -) - -func NewNoSprintfHostPort() *goanalysis.Linter { - a := analyzer.Analyzer - - return goanalysis.NewLinter( - a.Name, - a.Doc, - []*analysis.Analyzer{a}, - nil, - ).WithLoadMode(goanalysis.LoadModeSyntax) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/paralleltest.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/paralleltest.go deleted file mode 100644 index c49f74aa2..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/paralleltest.go +++ /dev/null @@ -1,30 +0,0 @@ -package golinters - -import ( - "github.com/kunwardeep/paralleltest/pkg/paralleltest" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" -) - -func NewParallelTest(settings *config.ParallelTestSettings) *goanalysis.Linter { - a := paralleltest.NewAnalyzer() - - var cfg map[string]map[string]any - if settings != nil { - cfg = map[string]map[string]any{ - a.Name: { - "i": settings.IgnoreMissing, - "ignoremissingsubtests": settings.IgnoreMissingSubtests, - }, - } - } - - return goanalysis.NewLinter( - a.Name, - "Detects missing usage of t.Parallel() method in your Go test", - []*analysis.Analyzer{a}, - cfg, - ).WithLoadMode(goanalysis.LoadModeTypesInfo) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/perfsprint.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/perfsprint.go deleted file mode 100644 index acaa3a522..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/perfsprint.go +++ /dev/null @@ -1,31 +0,0 @@ -package golinters - -import ( - "github.com/catenacyber/perfsprint/analyzer" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" -) - -func NewPerfSprint(settings *config.PerfSprintSettings) *goanalysis.Linter { - a := analyzer.New() - - cfg := map[string]map[string]any{ - a.Name: {"fiximports": false}, - } - - if settings != nil { - cfg[a.Name]["int-conversion"] = settings.IntConversion - cfg[a.Name]["err-error"] = settings.ErrError - cfg[a.Name]["errorf"] = settings.ErrorF - cfg[a.Name]["sprintf1"] = settings.SprintF1 - } - - return goanalysis.NewLinter( - a.Name, - a.Doc, - []*analysis.Analyzer{a}, - cfg, - ).WithLoadMode(goanalysis.LoadModeTypesInfo) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/prealloc.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/prealloc.go deleted file mode 100644 index f48d57562..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/prealloc.go +++ /dev/null @@ -1,65 +0,0 @@ -package golinters - -import ( - "fmt" - "sync" - - "github.com/alexkohler/prealloc/pkg" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" - "github.com/golangci/golangci-lint/pkg/lint/linter" - "github.com/golangci/golangci-lint/pkg/result" -) - -const preallocName = "prealloc" - -//nolint:dupl -func NewPreAlloc(settings *config.PreallocSettings) *goanalysis.Linter { - var mu sync.Mutex - var resIssues []goanalysis.Issue - - analyzer := &analysis.Analyzer{ - Name: preallocName, - Doc: goanalysis.TheOnlyanalyzerDoc, - Run: func(pass *analysis.Pass) (any, error) { - issues := runPreAlloc(pass, settings) - - if len(issues) == 0 { - return nil, nil - } - - mu.Lock() - resIssues = append(resIssues, issues...) - mu.Unlock() - - return nil, nil - }, - } - - return goanalysis.NewLinter( - preallocName, - "Finds slice declarations that could potentially be pre-allocated", - []*analysis.Analyzer{analyzer}, - nil, - ).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { - return resIssues - }).WithLoadMode(goanalysis.LoadModeSyntax) -} - -func runPreAlloc(pass *analysis.Pass, settings *config.PreallocSettings) []goanalysis.Issue { - var issues []goanalysis.Issue - - hints := pkg.Check(pass.Files, settings.Simple, settings.RangeLoops, settings.ForLoops) - - for _, hint := range hints { - issues = append(issues, goanalysis.NewIssue(&result.Issue{ - Pos: pass.Fset.Position(hint.Pos), - Text: fmt.Sprintf("Consider pre-allocating %s", formatCode(hint.DeclaredSliceName, nil)), - FromLinter: preallocName, - }, pass)) - } - - return issues -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/predeclared.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/predeclared.go deleted file mode 100644 index d3c25e274..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/predeclared.go +++ /dev/null @@ -1,26 +0,0 @@ -package golinters - -import ( - "github.com/nishanths/predeclared/passes/predeclared" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" -) - -func NewPredeclared(settings *config.PredeclaredSettings) *goanalysis.Linter { - a := predeclared.Analyzer - - var cfg map[string]map[string]any - if settings != nil { - cfg = map[string]map[string]any{ - a.Name: { - predeclared.IgnoreFlag: settings.Ignore, - predeclared.QualifiedFlag: settings.Qualified, - }, - } - } - - return goanalysis.NewLinter(a.Name, a.Doc, []*analysis.Analyzer{a}, cfg). - WithLoadMode(goanalysis.LoadModeSyntax) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/promlinter.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/promlinter.go deleted file mode 100644 index 381c57489..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/promlinter.go +++ /dev/null @@ -1,77 +0,0 @@ -package golinters - -import ( - "fmt" - "sync" - - "github.com/yeya24/promlinter" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" - "github.com/golangci/golangci-lint/pkg/lint/linter" - "github.com/golangci/golangci-lint/pkg/result" -) - -const promlinterName = "promlinter" - -func NewPromlinter(settings *config.PromlinterSettings) *goanalysis.Linter { - var mu sync.Mutex - var resIssues []goanalysis.Issue - - var promSettings promlinter.Setting - if settings != nil { - promSettings = promlinter.Setting{ - Strict: settings.Strict, - DisabledLintFuncs: settings.DisabledLinters, - } - } - - analyzer := &analysis.Analyzer{ - Name: promlinterName, - Doc: goanalysis.TheOnlyanalyzerDoc, - Run: func(pass *analysis.Pass) (any, error) { - issues := runPromLinter(pass, promSettings) - - if len(issues) == 0 { - return nil, nil - } - - mu.Lock() - resIssues = append(resIssues, issues...) - mu.Unlock() - - return nil, nil - }, - } - - return goanalysis.NewLinter( - promlinterName, - "Check Prometheus metrics naming via promlint", - []*analysis.Analyzer{analyzer}, - nil, - ).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { - return resIssues - }).WithLoadMode(goanalysis.LoadModeSyntax) -} - -func runPromLinter(pass *analysis.Pass, promSettings promlinter.Setting) []goanalysis.Issue { - lintIssues := promlinter.RunLint(pass.Fset, pass.Files, promSettings) - - if len(lintIssues) == 0 { - return nil - } - - issues := make([]goanalysis.Issue, len(lintIssues)) - for k, i := range lintIssues { - issue := result.Issue{ - Pos: i.Pos, - Text: fmt.Sprintf("Metric: %s Error: %s", i.Metric, i.Text), - FromLinter: promlinterName, - } - - issues[k] = goanalysis.NewIssue(&issue, pass) - } - - return issues -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/protogetter.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/protogetter.go deleted file mode 100644 index 9a5e7b4db..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/protogetter.go +++ /dev/null @@ -1,74 +0,0 @@ -package golinters - -import ( - "sync" - - "github.com/ghostiam/protogetter" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" - "github.com/golangci/golangci-lint/pkg/lint/linter" - "github.com/golangci/golangci-lint/pkg/result" -) - -func NewProtoGetter(settings *config.ProtoGetterSettings) *goanalysis.Linter { - var mu sync.Mutex - var resIssues []goanalysis.Issue - - var cfg protogetter.Config - if settings != nil { - cfg = protogetter.Config{ - SkipGeneratedBy: settings.SkipGeneratedBy, - SkipFiles: settings.SkipFiles, - SkipAnyGenerated: settings.SkipAnyGenerated, - ReplaceFirstArgInAppend: settings.ReplaceFirstArgInAppend, - } - } - cfg.Mode = protogetter.GolangciLintMode - - a := protogetter.NewAnalyzer(&cfg) - a.Run = func(pass *analysis.Pass) (any, error) { - pgIssues, err := protogetter.Run(pass, &cfg) - if err != nil { - return nil, err - } - - issues := make([]goanalysis.Issue, len(pgIssues)) - for i, issue := range pgIssues { - report := &result.Issue{ - FromLinter: a.Name, - Pos: issue.Pos, - Text: issue.Message, - Replacement: &result.Replacement{ - Inline: &result.InlineFix{ - StartCol: issue.InlineFix.StartCol, - Length: issue.InlineFix.Length, - NewString: issue.InlineFix.NewString, - }, - }, - } - - issues[i] = goanalysis.NewIssue(report, pass) - } - - if len(issues) == 0 { - return nil, nil - } - - mu.Lock() - resIssues = append(resIssues, issues...) - mu.Unlock() - - return nil, nil - } - - return goanalysis.NewLinter( - a.Name, - a.Doc, - []*analysis.Analyzer{a}, - nil, - ).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { - return resIssues - }).WithLoadMode(goanalysis.LoadModeTypesInfo) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/reassign.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/reassign.go deleted file mode 100644 index a6dd67053..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/reassign.go +++ /dev/null @@ -1,32 +0,0 @@ -package golinters - -import ( - "fmt" - "strings" - - "github.com/curioswitch/go-reassign" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" -) - -func NewReassign(settings *config.ReassignSettings) *goanalysis.Linter { - a := reassign.NewAnalyzer() - - var cfg map[string]map[string]any - if settings != nil && len(settings.Patterns) > 0 { - cfg = map[string]map[string]any{ - a.Name: { - reassign.FlagPattern: fmt.Sprintf("^(%s)$", strings.Join(settings.Patterns, "|")), - }, - } - } - - return goanalysis.NewLinter( - a.Name, - a.Doc, - []*analysis.Analyzer{a}, - cfg, - ).WithLoadMode(goanalysis.LoadModeTypesInfo) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/revive.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/revive.go deleted file mode 100644 index 46d8b1c9c..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/revive.go +++ /dev/null @@ -1,395 +0,0 @@ -package golinters - -import ( - "bytes" - "encoding/json" - "fmt" - "go/token" - "os" - "reflect" - "sync" - - "github.com/BurntSushi/toml" - reviveConfig "github.com/mgechev/revive/config" - "github.com/mgechev/revive/lint" - "github.com/mgechev/revive/rule" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" - "github.com/golangci/golangci-lint/pkg/lint/linter" - "github.com/golangci/golangci-lint/pkg/logutils" - "github.com/golangci/golangci-lint/pkg/result" -) - -const reviveName = "revive" - -var reviveDebugf = logutils.Debug(logutils.DebugKeyRevive) - -// jsonObject defines a JSON object of a failure -type jsonObject struct { - Severity lint.Severity - lint.Failure `json:",inline"` -} - -// NewRevive returns a new Revive linter. -// - -func NewRevive(settings *config.ReviveSettings) *goanalysis.Linter { - var mu sync.Mutex - var resIssues []goanalysis.Issue - - analyzer := &analysis.Analyzer{ - Name: goanalysis.TheOnlyAnalyzerName, - Doc: goanalysis.TheOnlyanalyzerDoc, - Run: goanalysis.DummyRun, - } - - return goanalysis.NewLinter( - reviveName, - "Fast, configurable, extensible, flexible, and beautiful linter for Go. Drop-in replacement of golint.", - []*analysis.Analyzer{analyzer}, - nil, - ).WithContextSetter(func(lintCtx *linter.Context) { - analyzer.Run = func(pass *analysis.Pass) (any, error) { - issues, err := runRevive(lintCtx, pass, settings) - if err != nil { - return nil, err - } - - if len(issues) == 0 { - return nil, nil - } - - mu.Lock() - resIssues = append(resIssues, issues...) - mu.Unlock() - - return nil, nil - } - }).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { - return resIssues - }).WithLoadMode(goanalysis.LoadModeSyntax) -} - -func runRevive(lintCtx *linter.Context, pass *analysis.Pass, settings *config.ReviveSettings) ([]goanalysis.Issue, error) { - packages := [][]string{getFileNames(pass)} - - conf, err := getReviveConfig(settings) - if err != nil { - return nil, err - } - - formatter, err := reviveConfig.GetFormatter("json") - if err != nil { - return nil, err - } - - revive := lint.New(os.ReadFile, settings.MaxOpenFiles) - - lintingRules, err := reviveConfig.GetLintingRules(conf, []lint.Rule{}) - if err != nil { - return nil, err - } - - failures, err := revive.Lint(packages, lintingRules, *conf) - if err != nil { - return nil, err - } - - formatChan := make(chan lint.Failure) - exitChan := make(chan bool) - - var output string - go func() { - output, err = formatter.Format(formatChan, *conf) - if err != nil { - lintCtx.Log.Errorf("Format error: %v", err) - } - exitChan <- true - }() - - for f := range failures { - if f.Confidence < conf.Confidence { - continue - } - - formatChan <- f - } - - close(formatChan) - <-exitChan - - var results []jsonObject - err = json.Unmarshal([]byte(output), &results) - if err != nil { - return nil, err - } - - var issues []goanalysis.Issue - for i := range results { - issues = append(issues, reviveToIssue(pass, &results[i])) - } - - return issues, nil -} - -func reviveToIssue(pass *analysis.Pass, object *jsonObject) goanalysis.Issue { - lineRangeTo := object.Position.End.Line - if object.RuleName == (&rule.ExportedRule{}).Name() { - lineRangeTo = object.Position.Start.Line - } - - return goanalysis.NewIssue(&result.Issue{ - Severity: string(object.Severity), - Text: fmt.Sprintf("%s: %s", object.RuleName, object.Failure.Failure), - Pos: token.Position{ - Filename: object.Position.Start.Filename, - Line: object.Position.Start.Line, - Offset: object.Position.Start.Offset, - Column: object.Position.Start.Column, - }, - LineRange: &result.Range{ - From: object.Position.Start.Line, - To: lineRangeTo, - }, - FromLinter: reviveName, - }, pass) -} - -// This function mimics the GetConfig function of revive. -// This allows to get default values and right types. -// https://github.com/golangci/golangci-lint/issues/1745 -// https://github.com/mgechev/revive/blob/v1.1.4/config/config.go#L182 -func getReviveConfig(cfg *config.ReviveSettings) (*lint.Config, error) { - conf := defaultConfig() - - if !reflect.DeepEqual(cfg, &config.ReviveSettings{}) { - rawRoot := createConfigMap(cfg) - buf := bytes.NewBuffer(nil) - - err := toml.NewEncoder(buf).Encode(rawRoot) - if err != nil { - return nil, fmt.Errorf("failed to encode configuration: %w", err) - } - - conf = &lint.Config{} - _, err = toml.NewDecoder(buf).Decode(conf) - if err != nil { - return nil, fmt.Errorf("failed to decode configuration: %w", err) - } - } - - normalizeConfig(conf) - - reviveDebugf("revive configuration: %#v", conf) - - return conf, nil -} - -func createConfigMap(cfg *config.ReviveSettings) map[string]any { - rawRoot := map[string]any{ - "ignoreGeneratedHeader": cfg.IgnoreGeneratedHeader, - "confidence": cfg.Confidence, - "severity": cfg.Severity, - "errorCode": cfg.ErrorCode, - "warningCode": cfg.WarningCode, - "enableAllRules": cfg.EnableAllRules, - } - - rawDirectives := map[string]map[string]any{} - for _, directive := range cfg.Directives { - rawDirectives[directive.Name] = map[string]any{ - "severity": directive.Severity, - } - } - - if len(rawDirectives) > 0 { - rawRoot["directive"] = rawDirectives - } - - rawRules := map[string]map[string]any{} - for _, s := range cfg.Rules { - rawRules[s.Name] = map[string]any{ - "severity": s.Severity, - "arguments": safeTomlSlice(s.Arguments), - "disabled": s.Disabled, - } - } - - if len(rawRules) > 0 { - rawRoot["rule"] = rawRules - } - - return rawRoot -} - -func safeTomlSlice(r []any) []any { - if len(r) == 0 { - return nil - } - - if _, ok := r[0].(map[any]any); !ok { - return r - } - - var typed []any - for _, elt := range r { - item := map[string]any{} - for k, v := range elt.(map[any]any) { - item[k.(string)] = v - } - - typed = append(typed, item) - } - - return typed -} - -// This element is not exported by revive, so we need copy the code. -// Extracted from https://github.com/mgechev/revive/blob/v1.3.7/config/config.go#L15 -var defaultRules = []lint.Rule{ - &rule.VarDeclarationsRule{}, - &rule.PackageCommentsRule{}, - &rule.DotImportsRule{}, - &rule.BlankImportsRule{}, - &rule.ExportedRule{}, - &rule.VarNamingRule{}, - &rule.IndentErrorFlowRule{}, - &rule.RangeRule{}, - &rule.ErrorfRule{}, - &rule.ErrorNamingRule{}, - &rule.ErrorStringsRule{}, - &rule.ReceiverNamingRule{}, - &rule.IncrementDecrementRule{}, - &rule.ErrorReturnRule{}, - &rule.UnexportedReturnRule{}, - &rule.TimeNamingRule{}, - &rule.ContextKeysType{}, - &rule.ContextAsArgumentRule{}, - &rule.EmptyBlockRule{}, - &rule.SuperfluousElseRule{}, - &rule.UnusedParamRule{}, - &rule.UnreachableCodeRule{}, - &rule.RedefinesBuiltinIDRule{}, -} - -var allRules = append([]lint.Rule{ - &rule.ArgumentsLimitRule{}, - &rule.CyclomaticRule{}, - &rule.FileHeaderRule{}, - &rule.ConfusingNamingRule{}, - &rule.GetReturnRule{}, - &rule.ModifiesParamRule{}, - &rule.ConfusingResultsRule{}, - &rule.DeepExitRule{}, - &rule.AddConstantRule{}, - &rule.FlagParamRule{}, - &rule.UnnecessaryStmtRule{}, - &rule.StructTagRule{}, - &rule.ModifiesValRecRule{}, - &rule.ConstantLogicalExprRule{}, - &rule.BoolLiteralRule{}, - &rule.ImportsBlocklistRule{}, - &rule.FunctionResultsLimitRule{}, - &rule.MaxPublicStructsRule{}, - &rule.RangeValInClosureRule{}, - &rule.RangeValAddress{}, - &rule.WaitGroupByValueRule{}, - &rule.AtomicRule{}, - &rule.EmptyLinesRule{}, - &rule.LineLengthLimitRule{}, - &rule.CallToGCRule{}, - &rule.DuplicatedImportsRule{}, - &rule.ImportShadowingRule{}, - &rule.BareReturnRule{}, - &rule.UnusedReceiverRule{}, - &rule.UnhandledErrorRule{}, - &rule.CognitiveComplexityRule{}, - &rule.StringOfIntRule{}, - &rule.StringFormatRule{}, - &rule.EarlyReturnRule{}, - &rule.UnconditionalRecursionRule{}, - &rule.IdenticalBranchesRule{}, - &rule.DeferRule{}, - &rule.UnexportedNamingRule{}, - &rule.FunctionLength{}, - &rule.NestedStructs{}, - &rule.UselessBreak{}, - &rule.UncheckedTypeAssertionRule{}, - &rule.TimeEqualRule{}, - &rule.BannedCharsRule{}, - &rule.OptimizeOperandsOrderRule{}, - &rule.UseAnyRule{}, - &rule.DataRaceRule{}, - &rule.CommentSpacingsRule{}, - &rule.IfReturnRule{}, - &rule.RedundantImportAlias{}, - &rule.ImportAliasNamingRule{}, - &rule.EnforceMapStyleRule{}, - &rule.EnforceRepeatedArgTypeStyleRule{}, - &rule.EnforceSliceStyleRule{}, - &rule.MaxControlNestingRule{}, -}, defaultRules...) - -const defaultConfidence = 0.8 - -// This element is not exported by revive, so we need copy the code. -// Extracted from https://github.com/mgechev/revive/blob/v1.1.4/config/config.go#L145 -func normalizeConfig(cfg *lint.Config) { - // NOTE(ldez): this custom section for golangci-lint should be kept. - // --- - if cfg.Confidence == 0 { - cfg.Confidence = defaultConfidence - } - if cfg.Severity == "" { - cfg.Severity = lint.SeverityWarning - } - // --- - - if len(cfg.Rules) == 0 { - cfg.Rules = map[string]lint.RuleConfig{} - } - if cfg.EnableAllRules { - // Add to the configuration all rules not yet present in it - for _, r := range allRules { - ruleName := r.Name() - _, alreadyInConf := cfg.Rules[ruleName] - if alreadyInConf { - continue - } - // Add the rule with an empty conf for - cfg.Rules[ruleName] = lint.RuleConfig{} - } - } - - severity := cfg.Severity - if severity != "" { - for k, v := range cfg.Rules { - if v.Severity == "" { - v.Severity = severity - } - cfg.Rules[k] = v - } - for k, v := range cfg.Directives { - if v.Severity == "" { - v.Severity = severity - } - cfg.Directives[k] = v - } - } -} - -// This element is not exported by revive, so we need copy the code. -// Extracted from https://github.com/mgechev/revive/blob/v1.1.4/config/config.go#L214 -func defaultConfig() *lint.Config { - defaultConfig := lint.Config{ - Confidence: defaultConfidence, - Severity: lint.SeverityWarning, - Rules: map[string]lint.RuleConfig{}, - } - for _, r := range defaultRules { - defaultConfig.Rules[r.Name()] = lint.RuleConfig{} - } - return &defaultConfig -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/rowserrcheck.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/rowserrcheck.go deleted file mode 100644 index d67efd069..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/rowserrcheck.go +++ /dev/null @@ -1,25 +0,0 @@ -package golinters - -import ( - "github.com/jingyugao/rowserrcheck/passes/rowserr" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" -) - -func NewRowsErrCheck(settings *config.RowsErrCheckSettings) *goanalysis.Linter { - var pkgs []string - if settings != nil { - pkgs = settings.Packages - } - - a := rowserr.NewAnalyzer(pkgs...) - - return goanalysis.NewLinter( - a.Name, - "checks whether Rows.Err of rows is checked successfully", - []*analysis.Analyzer{a}, - nil, - ).WithLoadMode(goanalysis.LoadModeTypesInfo) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/scopelint.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/scopelint.go deleted file mode 100644 index e6ef15ede..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/scopelint.go +++ /dev/null @@ -1,190 +0,0 @@ -package golinters - -import ( - "fmt" - "go/ast" - "go/token" - "sync" - - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" - "github.com/golangci/golangci-lint/pkg/lint/linter" - "github.com/golangci/golangci-lint/pkg/result" -) - -const scopelintName = "scopelint" - -//nolint:dupl -func NewScopelint() *goanalysis.Linter { - var mu sync.Mutex - var resIssues []goanalysis.Issue - - analyzer := &analysis.Analyzer{ - Name: scopelintName, - Doc: goanalysis.TheOnlyanalyzerDoc, - Run: func(pass *analysis.Pass) (any, error) { - issues := runScopeLint(pass) - - if len(issues) == 0 { - return nil, nil - } - - mu.Lock() - resIssues = append(resIssues, issues...) - mu.Unlock() - - return nil, nil - }, - } - - return goanalysis.NewLinter( - scopelintName, - "Scopelint checks for unpinned variables in go programs", - []*analysis.Analyzer{analyzer}, - nil, - ).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { - return resIssues - }).WithLoadMode(goanalysis.LoadModeSyntax) -} - -func runScopeLint(pass *analysis.Pass) []goanalysis.Issue { - var lintIssues []result.Issue - - for _, file := range pass.Files { - n := Node{ - fset: pass.Fset, - DangerObjects: map[*ast.Object]int{}, - UnsafeObjects: map[*ast.Object]int{}, - SkipFuncs: map[*ast.FuncLit]int{}, - issues: &lintIssues, - } - ast.Walk(&n, file) - } - - var issues []goanalysis.Issue - for i := range lintIssues { - issues = append(issues, goanalysis.NewIssue(&lintIssues[i], pass)) - } - - return issues -} - -// The code below is copy-pasted from https://github.com/kyoh86/scopelint 92cbe2cc9276abda0e309f52cc9e309d407f174e - -// Node represents a Node being linted. -type Node struct { - fset *token.FileSet - DangerObjects map[*ast.Object]int - UnsafeObjects map[*ast.Object]int - SkipFuncs map[*ast.FuncLit]int - issues *[]result.Issue -} - -// Visit method is invoked for each node encountered by Walk. -// If the result visitor w is not nil, Walk visits each of the children -// of node with the visitor w, followed by a call of w.Visit(nil). -// -//nolint:gocyclo,gocritic -func (f *Node) Visit(node ast.Node) ast.Visitor { - switch typedNode := node.(type) { - case *ast.ForStmt: - switch init := typedNode.Init.(type) { - case *ast.AssignStmt: - for _, lh := range init.Lhs { - switch tlh := lh.(type) { - case *ast.Ident: - f.UnsafeObjects[tlh.Obj] = 0 - } - } - } - - case *ast.RangeStmt: - // Memory variables declared in range statement - switch k := typedNode.Key.(type) { - case *ast.Ident: - f.UnsafeObjects[k.Obj] = 0 - } - switch v := typedNode.Value.(type) { - case *ast.Ident: - f.UnsafeObjects[v.Obj] = 0 - } - - case *ast.UnaryExpr: - if typedNode.Op == token.AND { - switch ident := typedNode.X.(type) { - case *ast.Ident: - if _, unsafe := f.UnsafeObjects[ident.Obj]; unsafe { - f.errorf(ident, "Using a reference for the variable on range scope %s", formatCode(ident.Name, nil)) - } - } - } - - case *ast.Ident: - if _, obj := f.DangerObjects[typedNode.Obj]; obj { - // It is the naked variable in scope of range statement. - f.errorf(node, "Using the variable on range scope %s in function literal", formatCode(typedNode.Name, nil)) - break - } - - case *ast.CallExpr: - // Ignore func literals that'll be called immediately. - switch funcLit := typedNode.Fun.(type) { - case *ast.FuncLit: - f.SkipFuncs[funcLit] = 0 - } - - case *ast.FuncLit: - if _, skip := f.SkipFuncs[typedNode]; !skip { - dangers := map[*ast.Object]int{} - for d := range f.DangerObjects { - dangers[d] = 0 - } - for u := range f.UnsafeObjects { - dangers[u] = 0 - f.UnsafeObjects[u]++ - } - return &Node{ - fset: f.fset, - DangerObjects: dangers, - UnsafeObjects: f.UnsafeObjects, - SkipFuncs: f.SkipFuncs, - issues: f.issues, - } - } - - case *ast.ReturnStmt: - unsafe := map[*ast.Object]int{} - for u := range f.UnsafeObjects { - if f.UnsafeObjects[u] == 0 { - continue - } - unsafe[u] = f.UnsafeObjects[u] - } - return &Node{ - fset: f.fset, - DangerObjects: f.DangerObjects, - UnsafeObjects: unsafe, - SkipFuncs: f.SkipFuncs, - issues: f.issues, - } - } - return f -} - -// The variadic arguments may start with link and category types, -// and must end with a format string and any arguments. -// -//nolint:interfacer -func (f *Node) errorf(n ast.Node, format string, args ...any) { - pos := f.fset.Position(n.Pos()) - f.errorAtf(pos, format, args...) -} - -func (f *Node) errorAtf(pos token.Position, format string, args ...any) { - *f.issues = append(*f.issues, result.Issue{ - Pos: pos, - Text: fmt.Sprintf(format, args...), - FromLinter: scopelintName, - }) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/sloglint.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/sloglint.go deleted file mode 100644 index acea90d53..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/sloglint.go +++ /dev/null @@ -1,31 +0,0 @@ -package golinters - -import ( - "go-simpler.org/sloglint" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" -) - -func NewSlogLint(settings *config.SlogLintSettings) *goanalysis.Linter { - var opts *sloglint.Options - if settings != nil { - opts = &sloglint.Options{ - NoMixedArgs: settings.NoMixedArgs, - KVOnly: settings.KVOnly, - AttrOnly: settings.AttrOnly, - ContextOnly: settings.ContextOnly, - StaticMsg: settings.StaticMsg, - NoRawKeys: settings.NoRawKeys, - KeyNamingCase: settings.KeyNamingCase, - ArgsOnSepLines: settings.ArgsOnSepLines, - } - } - - a := sloglint.New(opts) - - return goanalysis. - NewLinter(a.Name, a.Doc, []*analysis.Analyzer{a}, nil). - WithLoadMode(goanalysis.LoadModeTypesInfo) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/spancheck.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/spancheck.go deleted file mode 100644 index 934124477..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/spancheck.go +++ /dev/null @@ -1,29 +0,0 @@ -package golinters - -import ( - "github.com/jjti/go-spancheck" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" -) - -func NewSpancheck(settings *config.SpancheckSettings) *goanalysis.Linter { - cfg := spancheck.NewDefaultConfig() - - if settings != nil { - if settings.Checks != nil { - cfg.EnabledChecks = settings.Checks - } - - if settings.IgnoreCheckSignatures != nil { - cfg.IgnoreChecksSignaturesSlice = settings.IgnoreCheckSignatures - } - } - - a := spancheck.NewAnalyzerWithConfig(cfg) - - return goanalysis. - NewLinter(a.Name, a.Doc, []*analysis.Analyzer{a}, nil). - WithLoadMode(goanalysis.LoadModeTypesInfo) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/sqlclosecheck.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/sqlclosecheck.go deleted file mode 100644 index e63b292a2..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/sqlclosecheck.go +++ /dev/null @@ -1,19 +0,0 @@ -package golinters - -import ( - "github.com/ryanrolds/sqlclosecheck/pkg/analyzer" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" -) - -func NewSQLCloseCheck() *goanalysis.Linter { - a := analyzer.NewAnalyzer() - - return goanalysis.NewLinter( - a.Name, - a.Doc, - []*analysis.Analyzer{a}, - nil, - ).WithLoadMode(goanalysis.LoadModeTypesInfo) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/staticcheck.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/staticcheck.go deleted file mode 100644 index 673484630..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/staticcheck.go +++ /dev/null @@ -1,21 +0,0 @@ -package golinters - -import ( - "honnef.co/go/tools/staticcheck" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" -) - -func NewStaticcheck(settings *config.StaticCheckSettings) *goanalysis.Linter { - cfg := staticCheckConfig(settings) - analyzers := setupStaticCheckAnalyzers(staticcheck.Analyzers, getGoVersion(settings), cfg.Checks) - - return goanalysis.NewLinter( - "staticcheck", - "It's a set of rules from staticcheck. It's not the same thing as the staticcheck binary."+ - " The author of staticcheck doesn't support or approve the use of staticcheck as a library inside golangci-lint.", - analyzers, - nil, - ).WithLoadMode(goanalysis.LoadModeTypesInfo) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/staticcheck_common.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/staticcheck_common.go deleted file mode 100644 index 0eb21ec9c..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/staticcheck_common.go +++ /dev/null @@ -1,168 +0,0 @@ -package golinters - -import ( - "strings" - "unicode" - - "golang.org/x/tools/go/analysis" - "honnef.co/go/tools/analysis/lint" - scconfig "honnef.co/go/tools/config" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/logutils" -) - -var debugf = logutils.Debug(logutils.DebugKeyMegacheck) - -func getGoVersion(settings *config.StaticCheckSettings) string { - var goVersion string - if settings != nil { - goVersion = settings.GoVersion - } - - if goVersion != "" { - return goVersion - } - - return "1.17" -} - -func setupStaticCheckAnalyzers(src []*lint.Analyzer, goVersion string, checks []string) []*analysis.Analyzer { - var names []string - for _, a := range src { - names = append(names, a.Analyzer.Name) - } - - filter := filterAnalyzerNames(names, checks) - - var ret []*analysis.Analyzer - for _, a := range src { - if filter[a.Analyzer.Name] { - setAnalyzerGoVersion(a.Analyzer, goVersion) - ret = append(ret, a.Analyzer) - } - } - - return ret -} - -func setAnalyzerGoVersion(a *analysis.Analyzer, goVersion string) { - if v := a.Flags.Lookup("go"); v != nil { - if err := v.Value.Set(goVersion); err != nil { - debugf("Failed to set go version: %s", err) - } - } -} - -func staticCheckConfig(settings *config.StaticCheckSettings) *scconfig.Config { - var cfg *scconfig.Config - - if settings == nil || !settings.HasConfiguration() { - return &scconfig.Config{ - Checks: []string{"*"}, // override for compatibility reason. Must drop in the next major version. - Initialisms: scconfig.DefaultConfig.Initialisms, - DotImportWhitelist: scconfig.DefaultConfig.DotImportWhitelist, - HTTPStatusCodeWhitelist: scconfig.DefaultConfig.HTTPStatusCodeWhitelist, - } - } - - cfg = &scconfig.Config{ - Checks: settings.Checks, - Initialisms: settings.Initialisms, - DotImportWhitelist: settings.DotImportWhitelist, - HTTPStatusCodeWhitelist: settings.HTTPStatusCodeWhitelist, - } - - if len(cfg.Checks) == 0 { - cfg.Checks = append(cfg.Checks, "*") // override for compatibility reason. Must drop in the next major version. - } - - if len(cfg.Initialisms) == 0 { - cfg.Initialisms = append(cfg.Initialisms, scconfig.DefaultConfig.Initialisms...) - } - - if len(cfg.DotImportWhitelist) == 0 { - cfg.DotImportWhitelist = append(cfg.DotImportWhitelist, scconfig.DefaultConfig.DotImportWhitelist...) - } - - if len(cfg.HTTPStatusCodeWhitelist) == 0 { - cfg.HTTPStatusCodeWhitelist = append(cfg.HTTPStatusCodeWhitelist, scconfig.DefaultConfig.HTTPStatusCodeWhitelist...) - } - - cfg.Checks = normalizeList(cfg.Checks) - cfg.Initialisms = normalizeList(cfg.Initialisms) - cfg.DotImportWhitelist = normalizeList(cfg.DotImportWhitelist) - cfg.HTTPStatusCodeWhitelist = normalizeList(cfg.HTTPStatusCodeWhitelist) - - return cfg -} - -// https://github.com/dominikh/go-tools/blob/9bf17c0388a65710524ba04c2d821469e639fdc2/lintcmd/lint.go#L437-L477 -// -//nolint:gocritic // Keep the original source code. -func filterAnalyzerNames(analyzers []string, checks []string) map[string]bool { - allowedChecks := map[string]bool{} - - for _, check := range checks { - b := true - if len(check) > 1 && check[0] == '-' { - b = false - check = check[1:] - } - - if check == "*" || check == "all" { - // Match all - for _, c := range analyzers { - allowedChecks[c] = b - } - } else if strings.HasSuffix(check, "*") { - // Glob - prefix := check[:len(check)-1] - isCat := strings.IndexFunc(prefix, func(r rune) bool { return unicode.IsNumber(r) }) == -1 - - for _, a := range analyzers { - idx := strings.IndexFunc(a, func(r rune) bool { return unicode.IsNumber(r) }) - if isCat { - // Glob is S*, which should match S1000 but not SA1000 - cat := a[:idx] - if prefix == cat { - allowedChecks[a] = b - } - } else { - // Glob is S1* - if strings.HasPrefix(a, prefix) { - allowedChecks[a] = b - } - } - } - } else { - // Literal check name - allowedChecks[check] = b - } - } - return allowedChecks -} - -// https://github.com/dominikh/go-tools/blob/9bf17c0388a65710524ba04c2d821469e639fdc2/config/config.go#L95-L116 -func normalizeList(list []string) []string { - if len(list) > 1 { - nlist := make([]string, 0, len(list)) - nlist = append(nlist, list[0]) - for i, el := range list[1:] { - if el != list[i] { - nlist = append(nlist, el) - } - } - list = nlist - } - - for _, el := range list { - if el == "inherit" { - // This should never happen, because the default config - // should not use "inherit" - panic(`unresolved "inherit"`) - } - } - - return list -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/structcheck.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/structcheck.go deleted file mode 100644 index f3df0c2f3..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/structcheck.go +++ /dev/null @@ -1,71 +0,0 @@ -package golinters - -import ( - "fmt" - "sync" - - structcheckAPI "github.com/golangci/check/cmd/structcheck" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" - "github.com/golangci/golangci-lint/pkg/lint/linter" - "github.com/golangci/golangci-lint/pkg/result" -) - -const structcheckName = "structcheck" - -//nolint:dupl -func NewStructcheck(settings *config.StructCheckSettings) *goanalysis.Linter { - var mu sync.Mutex - var resIssues []goanalysis.Issue - - analyzer := &analysis.Analyzer{ - Name: structcheckName, - Doc: goanalysis.TheOnlyanalyzerDoc, - Run: func(pass *analysis.Pass) (any, error) { - issues := runStructCheck(pass, settings) - - if len(issues) == 0 { - return nil, nil - } - - mu.Lock() - resIssues = append(resIssues, issues...) - mu.Unlock() - - return nil, nil - }, - } - - return goanalysis.NewLinter( - structcheckName, - "Finds unused struct fields", - []*analysis.Analyzer{analyzer}, - nil, - ).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { - return resIssues - }).WithLoadMode(goanalysis.LoadModeTypesInfo) -} - -//nolint:dupl -func runStructCheck(pass *analysis.Pass, settings *config.StructCheckSettings) []goanalysis.Issue { - prog := goanalysis.MakeFakeLoaderProgram(pass) - - lintIssues := structcheckAPI.Run(prog, settings.CheckExportedFields) - if len(lintIssues) == 0 { - return nil - } - - issues := make([]goanalysis.Issue, 0, len(lintIssues)) - - for _, i := range lintIssues { - issues = append(issues, goanalysis.NewIssue(&result.Issue{ - Pos: i.Pos, - Text: fmt.Sprintf("%s is unused", formatCode(i.FieldName, nil)), - FromLinter: structcheckName, - }, pass)) - } - - return issues -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/stylecheck.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/stylecheck.go deleted file mode 100644 index d9b0f87c8..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/stylecheck.go +++ /dev/null @@ -1,30 +0,0 @@ -package golinters - -import ( - "golang.org/x/tools/go/analysis" - scconfig "honnef.co/go/tools/config" - "honnef.co/go/tools/stylecheck" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" -) - -func NewStylecheck(settings *config.StaticCheckSettings) *goanalysis.Linter { - cfg := staticCheckConfig(settings) - - // `scconfig.Analyzer` is a singleton, then it's not possible to have more than one instance for all staticcheck "sub-linters". - // When we will merge the 4 "sub-linters", the problem will disappear: https://github.com/golangci/golangci-lint/issues/357 - // Currently only stylecheck analyzer has a configuration in staticcheck. - scconfig.Analyzer.Run = func(_ *analysis.Pass) (any, error) { - return cfg, nil - } - - analyzers := setupStaticCheckAnalyzers(stylecheck.Analyzers, getGoVersion(settings), cfg.Checks) - - return goanalysis.NewLinter( - "stylecheck", - "Stylecheck is a replacement for golint", - analyzers, - nil, - ).WithLoadMode(goanalysis.LoadModeTypesInfo) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/tagalign.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/tagalign.go deleted file mode 100644 index c23838f70..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/tagalign.go +++ /dev/null @@ -1,75 +0,0 @@ -package golinters - -import ( - "sync" - - "github.com/4meepo/tagalign" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" - "github.com/golangci/golangci-lint/pkg/lint/linter" - "github.com/golangci/golangci-lint/pkg/result" -) - -func NewTagAlign(settings *config.TagAlignSettings) *goanalysis.Linter { - var mu sync.Mutex - var resIssues []goanalysis.Issue - - options := []tagalign.Option{tagalign.WithMode(tagalign.GolangciLintMode)} - - if settings != nil { - options = append(options, tagalign.WithAlign(settings.Align)) - - if settings.Sort || len(settings.Order) > 0 { - options = append(options, tagalign.WithSort(settings.Order...)) - } - - // Strict style will be applied only if Align and Sort are enabled together. - if settings.Strict && settings.Align && settings.Sort { - options = append(options, tagalign.WithStrictStyle()) - } - } - - analyzer := tagalign.NewAnalyzer(options...) - analyzer.Run = func(pass *analysis.Pass) (any, error) { - taIssues := tagalign.Run(pass, options...) - - issues := make([]goanalysis.Issue, len(taIssues)) - for i, issue := range taIssues { - report := &result.Issue{ - FromLinter: analyzer.Name, - Pos: issue.Pos, - Text: issue.Message, - Replacement: &result.Replacement{ - Inline: &result.InlineFix{ - StartCol: issue.InlineFix.StartCol, - Length: issue.InlineFix.Length, - NewString: issue.InlineFix.NewString, - }, - }, - } - - issues[i] = goanalysis.NewIssue(report, pass) - } - - if len(issues) == 0 { - return nil, nil - } - - mu.Lock() - resIssues = append(resIssues, issues...) - mu.Unlock() - - return nil, nil - } - - return goanalysis.NewLinter( - analyzer.Name, - analyzer.Doc, - []*analysis.Analyzer{analyzer}, - nil, - ).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { - return resIssues - }).WithLoadMode(goanalysis.LoadModeSyntax) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/tagliatelle.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/tagliatelle.go deleted file mode 100644 index 67c14cbd4..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/tagliatelle.go +++ /dev/null @@ -1,35 +0,0 @@ -package golinters - -import ( - "github.com/ldez/tagliatelle" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" -) - -func NewTagliatelle(settings *config.TagliatelleSettings) *goanalysis.Linter { - cfg := tagliatelle.Config{ - Rules: map[string]string{ - "json": "camel", - "yaml": "camel", - "header": "header", - }, - } - - if settings != nil { - for k, v := range settings.Case.Rules { - cfg.Rules[k] = v - } - cfg.UseFieldName = settings.Case.UseFieldName - } - - a := tagliatelle.New(cfg) - - return goanalysis.NewLinter( - a.Name, - a.Doc, - []*analysis.Analyzer{a}, - nil, - ).WithLoadMode(goanalysis.LoadModeSyntax) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/tenv.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/tenv.go deleted file mode 100644 index 6c6bd3186..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/tenv.go +++ /dev/null @@ -1,29 +0,0 @@ -package golinters - -import ( - "github.com/sivchari/tenv" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" -) - -func NewTenv(settings *config.TenvSettings) *goanalysis.Linter { - a := tenv.Analyzer - - var cfg map[string]map[string]any - if settings != nil { - cfg = map[string]map[string]any{ - a.Name: { - tenv.A: settings.All, - }, - } - } - - return goanalysis.NewLinter( - a.Name, - a.Doc, - []*analysis.Analyzer{a}, - cfg, - ).WithLoadMode(goanalysis.LoadModeSyntax) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/testableexamples.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/testableexamples.go deleted file mode 100644 index 3333593a6..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/testableexamples.go +++ /dev/null @@ -1,19 +0,0 @@ -package golinters - -import ( - "github.com/maratori/testableexamples/pkg/testableexamples" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" -) - -func NewTestableexamples() *goanalysis.Linter { - a := testableexamples.NewAnalyzer() - - return goanalysis.NewLinter( - a.Name, - a.Doc, - []*analysis.Analyzer{a}, - nil, - ).WithLoadMode(goanalysis.LoadModeSyntax) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/testifylint.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/testifylint.go deleted file mode 100644 index d9cfc04f6..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/testifylint.go +++ /dev/null @@ -1,44 +0,0 @@ -package golinters - -import ( - "github.com/Antonboom/testifylint/analyzer" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" -) - -func NewTestifylint(settings *config.TestifylintSettings) *goanalysis.Linter { - a := analyzer.New() - - cfg := make(map[string]map[string]any) - if settings != nil { - cfg[a.Name] = map[string]any{ - "enable-all": settings.EnableAll, - "disable-all": settings.DisableAll, - } - if len(settings.EnabledCheckers) > 0 { - cfg[a.Name]["enable"] = settings.EnabledCheckers - } - if len(settings.DisabledCheckers) > 0 { - cfg[a.Name]["disable"] = settings.DisabledCheckers - } - - if p := settings.ExpectedActual.ExpVarPattern; p != "" { - cfg[a.Name]["expected-actual.pattern"] = p - } - if p := settings.RequireError.FnPattern; p != "" { - cfg[a.Name]["require-error.fn-pattern"] = p - } - if m := settings.SuiteExtraAssertCall.Mode; m != "" { - cfg[a.Name]["suite-extra-assert-call.mode"] = m - } - } - - return goanalysis.NewLinter( - a.Name, - a.Doc, - []*analysis.Analyzer{a}, - cfg, - ).WithLoadMode(goanalysis.LoadModeTypesInfo) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/testpackage.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/testpackage.go deleted file mode 100644 index db1ead966..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/testpackage.go +++ /dev/null @@ -1,28 +0,0 @@ -package golinters - -import ( - "strings" - - "github.com/maratori/testpackage/pkg/testpackage" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" -) - -func NewTestpackage(cfg *config.TestpackageSettings) *goanalysis.Linter { - var a = testpackage.NewAnalyzer() - - var settings map[string]map[string]any - if cfg != nil { - settings = map[string]map[string]any{ - a.Name: { - testpackage.SkipRegexpFlagName: cfg.SkipRegexp, - testpackage.AllowPackagesFlagName: strings.Join(cfg.AllowPackages, ","), - }, - } - } - - return goanalysis.NewLinter(a.Name, a.Doc, []*analysis.Analyzer{a}, settings). - WithLoadMode(goanalysis.LoadModeSyntax) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/thelper.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/thelper.go deleted file mode 100644 index 1ae85ef42..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/thelper.go +++ /dev/null @@ -1,80 +0,0 @@ -package golinters - -import ( - "strings" - - "github.com/kulti/thelper/pkg/analyzer" - "golang.org/x/exp/maps" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" -) - -func NewThelper(cfg *config.ThelperSettings) *goanalysis.Linter { - a := analyzer.NewAnalyzer() - - opts := map[string]struct{}{ - "t_name": {}, - "t_begin": {}, - "t_first": {}, - - "f_name": {}, - "f_begin": {}, - "f_first": {}, - - "b_name": {}, - "b_begin": {}, - "b_first": {}, - - "tb_name": {}, - "tb_begin": {}, - "tb_first": {}, - } - - if cfg != nil { - applyTHelperOptions(cfg.Test, "t_", opts) - applyTHelperOptions(cfg.Fuzz, "f_", opts) - applyTHelperOptions(cfg.Benchmark, "b_", opts) - applyTHelperOptions(cfg.TB, "tb_", opts) - } - - if len(opts) == 0 { - linterLogger.Fatalf("thelper: at least one option must be enabled") - } - - args := maps.Keys(opts) - - cfgMap := map[string]map[string]any{ - a.Name: { - "checks": strings.Join(args, ","), - }, - } - - return goanalysis.NewLinter( - a.Name, - a.Doc, - []*analysis.Analyzer{a}, - cfgMap, - ).WithLoadMode(goanalysis.LoadModeTypesInfo) -} - -func applyTHelperOptions(o config.ThelperOptions, prefix string, opts map[string]struct{}) { - if o.Name != nil { - if !*o.Name { - delete(opts, prefix+"name") - } - } - - if o.Begin != nil { - if !*o.Begin { - delete(opts, prefix+"begin") - } - } - - if o.First != nil { - if !*o.First { - delete(opts, prefix+"first") - } - } -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/tparallel.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/tparallel.go deleted file mode 100644 index 643f2c271..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/tparallel.go +++ /dev/null @@ -1,18 +0,0 @@ -package golinters - -import ( - "github.com/moricho/tparallel" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" -) - -func NewTparallel() *goanalysis.Linter { - a := tparallel.Analyzer - return goanalysis.NewLinter( - a.Name, - a.Doc, - []*analysis.Analyzer{a}, - nil, - ).WithLoadMode(goanalysis.LoadModeTypesInfo) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/typecheck.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/typecheck.go deleted file mode 100644 index e9b26ef48..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/typecheck.go +++ /dev/null @@ -1,24 +0,0 @@ -package golinters - -import ( - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" -) - -func NewTypecheck() *goanalysis.Linter { - const linterName = "typecheck" - - analyzer := &analysis.Analyzer{ - Name: linterName, - Doc: goanalysis.TheOnlyanalyzerDoc, - Run: goanalysis.DummyRun, - } - - return goanalysis.NewLinter( - linterName, - "Like the front-end of a Go compiler, parses and type-checks Go code", - []*analysis.Analyzer{analyzer}, - nil, - ).WithLoadMode(goanalysis.LoadModeTypesInfo) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/unconvert.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/unconvert.go deleted file mode 100644 index aad858dfd..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/unconvert.go +++ /dev/null @@ -1,67 +0,0 @@ -package golinters - -import ( - "sync" - - unconvertAPI "github.com/golangci/unconvert" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" - "github.com/golangci/golangci-lint/pkg/lint/linter" - "github.com/golangci/golangci-lint/pkg/result" -) - -const unconvertName = "unconvert" - -//nolint:dupl -func NewUnconvert() *goanalysis.Linter { - var mu sync.Mutex - var resIssues []goanalysis.Issue - - analyzer := &analysis.Analyzer{ - Name: unconvertName, - Doc: goanalysis.TheOnlyanalyzerDoc, - Run: func(pass *analysis.Pass) (any, error) { - issues := runUnconvert(pass) - - if len(issues) == 0 { - return nil, nil - } - - mu.Lock() - resIssues = append(resIssues, issues...) - mu.Unlock() - - return nil, nil - }, - } - - return goanalysis.NewLinter( - unconvertName, - "Remove unnecessary type conversions", - []*analysis.Analyzer{analyzer}, - nil, - ).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { - return resIssues - }).WithLoadMode(goanalysis.LoadModeTypesInfo) -} - -func runUnconvert(pass *analysis.Pass) []goanalysis.Issue { - prog := goanalysis.MakeFakeLoaderProgram(pass) - - positions := unconvertAPI.Run(prog) - if len(positions) == 0 { - return nil - } - - issues := make([]goanalysis.Issue, 0, len(positions)) - for _, pos := range positions { - issues = append(issues, goanalysis.NewIssue(&result.Issue{ - Pos: pos, - Text: "unnecessary conversion", - FromLinter: unconvertName, - }, pass)) - } - - return issues -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/unparam.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/unparam.go deleted file mode 100644 index 4078d9498..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/unparam.go +++ /dev/null @@ -1,90 +0,0 @@ -package golinters - -import ( - "sync" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/buildssa" - "golang.org/x/tools/go/packages" - "mvdan.cc/unparam/check" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" - "github.com/golangci/golangci-lint/pkg/lint/linter" - "github.com/golangci/golangci-lint/pkg/result" -) - -const unparamName = "unparam" - -func NewUnparam(settings *config.UnparamSettings) *goanalysis.Linter { - var mu sync.Mutex - var resIssues []goanalysis.Issue - - analyzer := &analysis.Analyzer{ - Name: unparamName, - Doc: goanalysis.TheOnlyanalyzerDoc, - Requires: []*analysis.Analyzer{buildssa.Analyzer}, - Run: func(pass *analysis.Pass) (any, error) { - issues, err := runUnparam(pass, settings) - if err != nil { - return nil, err - } - - if len(issues) == 0 { - return nil, nil - } - - mu.Lock() - resIssues = append(resIssues, issues...) - mu.Unlock() - - return nil, nil - }, - } - - return goanalysis.NewLinter( - unparamName, - "Reports unused function parameters", - []*analysis.Analyzer{analyzer}, - nil, - ).WithContextSetter(func(lintCtx *linter.Context) { - if settings.Algo != "cha" { - lintCtx.Log.Warnf("`linters-settings.unparam.algo` isn't supported by the newest `unparam`") - } - }).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { - return resIssues - }).WithLoadMode(goanalysis.LoadModeTypesInfo) -} - -func runUnparam(pass *analysis.Pass, settings *config.UnparamSettings) ([]goanalysis.Issue, error) { - ssa := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA) - ssaPkg := ssa.Pkg - - pkg := &packages.Package{ - Fset: pass.Fset, - Syntax: pass.Files, - Types: pass.Pkg, - TypesInfo: pass.TypesInfo, - } - - c := &check.Checker{} - c.CheckExportedFuncs(settings.CheckExported) - c.Packages([]*packages.Package{pkg}) - c.ProgramSSA(ssaPkg.Prog) - - unparamIssues, err := c.Check() - if err != nil { - return nil, err - } - - var issues []goanalysis.Issue - for _, i := range unparamIssues { - issues = append(issues, goanalysis.NewIssue(&result.Issue{ - Pos: pass.Fset.Position(i.Pos()), - Text: i.Message(), - FromLinter: unparamName, - }, pass)) - } - - return issues, nil -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/unused.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/unused.go deleted file mode 100644 index a93061c96..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/unused.go +++ /dev/null @@ -1,112 +0,0 @@ -package golinters - -import ( - "fmt" - "sync" - - "golang.org/x/tools/go/analysis" - "honnef.co/go/tools/analysis/facts/directives" - "honnef.co/go/tools/analysis/facts/generated" - "honnef.co/go/tools/analysis/lint" - "honnef.co/go/tools/unused" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" - "github.com/golangci/golangci-lint/pkg/lint/linter" - "github.com/golangci/golangci-lint/pkg/result" -) - -const unusedName = "unused" - -func NewUnused(settings *config.UnusedSettings, scSettings *config.StaticCheckSettings) *goanalysis.Linter { - var mu sync.Mutex - var resIssues []goanalysis.Issue - - analyzer := &analysis.Analyzer{ - Name: unusedName, - Doc: unused.Analyzer.Analyzer.Doc, - Requires: unused.Analyzer.Analyzer.Requires, - Run: func(pass *analysis.Pass) (any, error) { - issues := runUnused(pass, settings) - if len(issues) == 0 { - return nil, nil - } - - mu.Lock() - resIssues = append(resIssues, issues...) - mu.Unlock() - - return nil, nil - }, - } - - setAnalyzerGoVersion(analyzer, getGoVersion(scSettings)) - - return goanalysis.NewLinter( - unusedName, - "Checks Go code for unused constants, variables, functions and types", - []*analysis.Analyzer{analyzer}, - nil, - ).WithIssuesReporter(func(_ *linter.Context) []goanalysis.Issue { - return resIssues - }).WithLoadMode(goanalysis.LoadModeTypesInfo) -} - -func runUnused(pass *analysis.Pass, cfg *config.UnusedSettings) []goanalysis.Issue { - res := getUnusedResults(pass, cfg) - - used := make(map[string]bool) - for _, obj := range res.Used { - used[fmt.Sprintf("%s %d %s", obj.Position.Filename, obj.Position.Line, obj.Name)] = true - } - - var issues []goanalysis.Issue - - // Inspired by https://github.com/dominikh/go-tools/blob/d694aadcb1f50c2d8ac0a1dd06217ebb9f654764/lintcmd/lint.go#L177-L197 - for _, object := range res.Unused { - if object.Kind == "type param" { - continue - } - - key := fmt.Sprintf("%s %d %s", object.Position.Filename, object.Position.Line, object.Name) - if used[key] { - continue - } - - issue := goanalysis.NewIssue(&result.Issue{ - FromLinter: unusedName, - Text: fmt.Sprintf("%s %s is unused", object.Kind, object.Name), - Pos: object.Position, - }, pass) - - issues = append(issues, issue) - } - - return issues -} - -func getUnusedResults(pass *analysis.Pass, settings *config.UnusedSettings) unused.Result { - opts := unused.Options{ - FieldWritesAreUses: settings.FieldWritesAreUses, - PostStatementsAreReads: settings.PostStatementsAreReads, - ExportedIsUsed: settings.ExportedIsUsed, - ExportedFieldsAreUsed: settings.ExportedFieldsAreUsed, - ParametersAreUsed: settings.ParametersAreUsed, - LocalVariablesAreUsed: settings.LocalVariablesAreUsed, - GeneratedIsUsed: settings.GeneratedIsUsed, - } - - // ref: https://github.com/dominikh/go-tools/blob/4ec1f474ca6c0feb8e10a8fcca4ab95f5b5b9881/internal/cmd/unused/unused.go#L68 - nodes := unused.Graph(pass.Fset, - pass.Files, - pass.Pkg, - pass.TypesInfo, - pass.ResultOf[directives.Analyzer].([]lint.Directive), - pass.ResultOf[generated.Analyzer].(map[string]generated.Generator), - opts, - ) - - sg := unused.SerializedGraph{} - sg.Merge(nodes) - return sg.Results() -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/usestdlibvars.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/usestdlibvars.go deleted file mode 100644 index 663a841ac..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/usestdlibvars.go +++ /dev/null @@ -1,38 +0,0 @@ -package golinters - -import ( - "github.com/sashamelentyev/usestdlibvars/pkg/analyzer" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" -) - -func NewUseStdlibVars(cfg *config.UseStdlibVarsSettings) *goanalysis.Linter { - a := analyzer.New() - - cfgMap := make(map[string]map[string]any) - if cfg != nil { - cfgMap[a.Name] = map[string]any{ - analyzer.ConstantKindFlag: cfg.ConstantKind, - analyzer.CryptoHashFlag: cfg.CryptoHash, - analyzer.HTTPMethodFlag: cfg.HTTPMethod, - analyzer.HTTPStatusCodeFlag: cfg.HTTPStatusCode, - analyzer.OSDevNullFlag: cfg.OSDevNull, - analyzer.RPCDefaultPathFlag: cfg.DefaultRPCPath, - analyzer.SQLIsolationLevelFlag: cfg.SQLIsolationLevel, - analyzer.SyslogPriorityFlag: cfg.SyslogPriority, - analyzer.TimeLayoutFlag: cfg.TimeLayout, - analyzer.TimeMonthFlag: cfg.TimeMonth, - analyzer.TimeWeekdayFlag: cfg.TimeWeekday, - analyzer.TLSSignatureSchemeFlag: cfg.TLSSignatureScheme, - } - } - - return goanalysis.NewLinter( - a.Name, - a.Doc, - []*analysis.Analyzer{a}, - cfgMap, - ).WithLoadMode(goanalysis.LoadModeSyntax) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/util.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/util.go deleted file mode 100644 index 1044567a9..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/util.go +++ /dev/null @@ -1,41 +0,0 @@ -package golinters - -import ( - "fmt" - "path/filepath" - "strings" - - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/config" -) - -func formatCode(code string, _ *config.Config) string { - if strings.Contains(code, "`") { - return code // TODO: properly escape or remove - } - - return fmt.Sprintf("`%s`", code) -} - -func formatCodeBlock(code string, _ *config.Config) string { - if strings.Contains(code, "`") { - return code // TODO: properly escape or remove - } - - return fmt.Sprintf("```\n%s\n```", code) -} - -func getFileNames(pass *analysis.Pass) []string { - var fileNames []string - for _, f := range pass.Files { - fileName := pass.Fset.PositionFor(f.Pos(), true).Filename - ext := filepath.Ext(fileName) - if ext != "" && ext != ".go" { - // position has been adjusted to a non-go file, revert to original file - fileName = pass.Fset.PositionFor(f.Pos(), false).Filename - } - fileNames = append(fileNames, fileName) - } - return fileNames -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/varcheck.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/varcheck.go deleted file mode 100644 index ea735672f..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/varcheck.go +++ /dev/null @@ -1,72 +0,0 @@ -package golinters - -import ( - "fmt" - "sync" - - varcheckAPI "github.com/golangci/check/cmd/varcheck" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" - "github.com/golangci/golangci-lint/pkg/lint/linter" - "github.com/golangci/golangci-lint/pkg/result" -) - -const varcheckName = "varcheck" - -func NewVarcheck(settings *config.VarCheckSettings) *goanalysis.Linter { - var mu sync.Mutex - var resIssues []goanalysis.Issue - - analyzer := &analysis.Analyzer{ - Name: varcheckName, - Doc: goanalysis.TheOnlyanalyzerDoc, - Run: goanalysis.DummyRun, - } - - return goanalysis.NewLinter( - varcheckName, - "Finds unused global variables and constants", - []*analysis.Analyzer{analyzer}, - nil, - ).WithContextSetter(func(_ *linter.Context) { - analyzer.Run = func(pass *analysis.Pass) (any, error) { - issues := runVarCheck(pass, settings) - - if len(issues) == 0 { - return nil, nil - } - - mu.Lock() - resIssues = append(resIssues, issues...) - mu.Unlock() - - return nil, nil - } - }).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { - return resIssues - }).WithLoadMode(goanalysis.LoadModeTypesInfo) -} - -//nolint:dupl -func runVarCheck(pass *analysis.Pass, settings *config.VarCheckSettings) []goanalysis.Issue { - prog := goanalysis.MakeFakeLoaderProgram(pass) - - lintIssues := varcheckAPI.Run(prog, settings.CheckExportedFields) - if len(lintIssues) == 0 { - return nil - } - - issues := make([]goanalysis.Issue, 0, len(lintIssues)) - - for _, i := range lintIssues { - issues = append(issues, goanalysis.NewIssue(&result.Issue{ - Pos: i.Pos, - Text: fmt.Sprintf("%s is unused", formatCode(i.VarName, nil)), - FromLinter: varcheckName, - }, pass)) - } - - return issues -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/varnamelen.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/varnamelen.go deleted file mode 100644 index 688dfa804..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/varnamelen.go +++ /dev/null @@ -1,46 +0,0 @@ -package golinters - -import ( - "strconv" - "strings" - - "github.com/blizzy78/varnamelen" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" -) - -func NewVarnamelen(settings *config.VarnamelenSettings) *goanalysis.Linter { - analyzer := varnamelen.NewAnalyzer() - cfg := map[string]map[string]any{} - - if settings != nil { - vnlCfg := map[string]any{ - "checkReceiver": strconv.FormatBool(settings.CheckReceiver), - "checkReturn": strconv.FormatBool(settings.CheckReturn), - "checkTypeParam": strconv.FormatBool(settings.CheckTypeParam), - "ignoreNames": strings.Join(settings.IgnoreNames, ","), - "ignoreTypeAssertOk": strconv.FormatBool(settings.IgnoreTypeAssertOk), - "ignoreMapIndexOk": strconv.FormatBool(settings.IgnoreMapIndexOk), - "ignoreChanRecvOk": strconv.FormatBool(settings.IgnoreChanRecvOk), - "ignoreDecls": strings.Join(settings.IgnoreDecls, ","), - } - - if settings.MaxDistance > 0 { - vnlCfg["maxDistance"] = strconv.Itoa(settings.MaxDistance) - } - if settings.MinNameLength > 0 { - vnlCfg["minNameLength"] = strconv.Itoa(settings.MinNameLength) - } - - cfg[analyzer.Name] = vnlCfg - } - - return goanalysis.NewLinter( - analyzer.Name, - "checks that the length of a variable's name matches its scope", - []*analysis.Analyzer{analyzer}, - cfg, - ).WithLoadMode(goanalysis.LoadModeTypesInfo) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/wastedassign.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/wastedassign.go deleted file mode 100644 index 9038c827d..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/wastedassign.go +++ /dev/null @@ -1,19 +0,0 @@ -package golinters - -import ( - "github.com/sanposhiho/wastedassign/v2" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" -) - -func NewWastedAssign() *goanalysis.Linter { - a := wastedassign.Analyzer - - return goanalysis.NewLinter( - a.Name, - "Finds wasted assignment statements", - []*analysis.Analyzer{a}, - nil, - ).WithLoadMode(goanalysis.LoadModeTypesInfo) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/whitespace.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/whitespace.go deleted file mode 100644 index 5487b1016..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/whitespace.go +++ /dev/null @@ -1,102 +0,0 @@ -package golinters - -import ( - "fmt" - "sync" - - "github.com/ultraware/whitespace" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" - "github.com/golangci/golangci-lint/pkg/lint/linter" - "github.com/golangci/golangci-lint/pkg/result" -) - -const whitespaceName = "whitespace" - -func NewWhitespace(settings *config.WhitespaceSettings) *goanalysis.Linter { - var mu sync.Mutex - var resIssues []goanalysis.Issue - - var wsSettings whitespace.Settings - if settings != nil { - wsSettings = whitespace.Settings{ - Mode: whitespace.RunningModeGolangCI, - MultiIf: settings.MultiIf, - MultiFunc: settings.MultiFunc, - } - } - - a := whitespace.NewAnalyzer(&wsSettings) - - return goanalysis.NewLinter( - a.Name, - a.Doc, - []*analysis.Analyzer{a}, - nil, - ).WithContextSetter(func(_ *linter.Context) { - a.Run = func(pass *analysis.Pass) (any, error) { - issues, err := runWhitespace(pass, wsSettings) - if err != nil { - return nil, err - } - - if len(issues) == 0 { - return nil, nil - } - - mu.Lock() - resIssues = append(resIssues, issues...) - mu.Unlock() - - return nil, nil - } - }).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { - return resIssues - }).WithLoadMode(goanalysis.LoadModeSyntax) -} - -func runWhitespace(pass *analysis.Pass, wsSettings whitespace.Settings) ([]goanalysis.Issue, error) { - lintIssues := whitespace.Run(pass, &wsSettings) - - issues := make([]goanalysis.Issue, len(lintIssues)) - for i, issue := range lintIssues { - report := &result.Issue{ - FromLinter: whitespaceName, - Pos: pass.Fset.PositionFor(issue.Diagnostic, false), - Text: issue.Message, - } - - switch issue.MessageType { - case whitespace.MessageTypeRemove: - if len(issue.LineNumbers) == 0 { - continue - } - - report.LineRange = &result.Range{ - From: issue.LineNumbers[0], - To: issue.LineNumbers[len(issue.LineNumbers)-1], - } - - report.Replacement = &result.Replacement{NeedOnlyDelete: true} - - case whitespace.MessageTypeAdd: - report.Pos = pass.Fset.PositionFor(issue.FixStart, false) - report.Replacement = &result.Replacement{ - Inline: &result.InlineFix{ - StartCol: 0, - Length: 1, - NewString: "\n\t", - }, - } - - default: - return nil, fmt.Errorf("unknown message type: %v", issue.MessageType) - } - - issues[i] = goanalysis.NewIssue(report, pass) - } - - return issues, nil -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/wrapcheck.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/wrapcheck.go deleted file mode 100644 index 6d25db427..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/wrapcheck.go +++ /dev/null @@ -1,36 +0,0 @@ -package golinters - -import ( - "github.com/tomarrell/wrapcheck/v2/wrapcheck" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" -) - -func NewWrapcheck(settings *config.WrapcheckSettings) *goanalysis.Linter { - cfg := wrapcheck.NewDefaultConfig() - if settings != nil { - if len(settings.IgnoreSigs) != 0 { - cfg.IgnoreSigs = settings.IgnoreSigs - } - if len(settings.IgnoreSigRegexps) != 0 { - cfg.IgnoreSigRegexps = settings.IgnoreSigRegexps - } - if len(settings.IgnorePackageGlobs) != 0 { - cfg.IgnorePackageGlobs = settings.IgnorePackageGlobs - } - if len(settings.IgnoreInterfaceRegexps) != 0 { - cfg.IgnoreInterfaceRegexps = settings.IgnoreInterfaceRegexps - } - } - - a := wrapcheck.NewAnalyzer(cfg) - - return goanalysis.NewLinter( - a.Name, - a.Doc, - []*analysis.Analyzer{a}, - nil, - ).WithLoadMode(goanalysis.LoadModeTypesInfo) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/wsl.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/wsl.go deleted file mode 100644 index 3b090a686..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/wsl.go +++ /dev/null @@ -1,39 +0,0 @@ -package golinters - -import ( - "github.com/bombsimon/wsl/v4" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" -) - -func NewWSL(settings *config.WSLSettings) *goanalysis.Linter { - var conf *wsl.Configuration - if settings != nil { - conf = &wsl.Configuration{ - StrictAppend: settings.StrictAppend, - AllowAssignAndCallCuddle: settings.AllowAssignAndCallCuddle, - AllowAssignAndAnythingCuddle: settings.AllowAssignAndAnythingCuddle, - AllowMultiLineAssignCuddle: settings.AllowMultiLineAssignCuddle, - ForceCaseTrailingWhitespaceLimit: settings.ForceCaseTrailingWhitespaceLimit, - AllowTrailingComment: settings.AllowTrailingComment, - AllowSeparatedLeadingComment: settings.AllowSeparatedLeadingComment, - AllowCuddleDeclaration: settings.AllowCuddleDeclaration, - AllowCuddleWithCalls: settings.AllowCuddleWithCalls, - AllowCuddleWithRHS: settings.AllowCuddleWithRHS, - ForceCuddleErrCheckAndAssign: settings.ForceCuddleErrCheckAndAssign, - ErrorVariableNames: settings.ErrorVariableNames, - ForceExclusiveShortDeclarations: settings.ForceExclusiveShortDeclarations, - } - } - - a := wsl.NewAnalyzer(conf) - - return goanalysis.NewLinter( - a.Name, - a.Doc, - []*analysis.Analyzer{a}, - nil, - ).WithLoadMode(goanalysis.LoadModeSyntax) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/zerologlint.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/zerologlint.go deleted file mode 100644 index edde72665..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/zerologlint.go +++ /dev/null @@ -1,19 +0,0 @@ -package golinters - -import ( - "github.com/ykadowak/zerologlint" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" -) - -func NewZerologLint() *goanalysis.Linter { - a := zerologlint.Analyzer - - return goanalysis.NewLinter( - a.Name, - a.Doc, - []*analysis.Analyzer{a}, - nil, - ).WithLoadMode(goanalysis.LoadModeTypesInfo) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/goutil/env.go b/vendor/github.com/golangci/golangci-lint/pkg/goutil/env.go deleted file mode 100644 index 93922f85a..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/goutil/env.go +++ /dev/null @@ -1,60 +0,0 @@ -package goutil - -import ( - "context" - "encoding/json" - "fmt" - "os" - "os/exec" - "strings" - "time" - - "github.com/golangci/golangci-lint/pkg/logutils" -) - -type EnvKey string - -const ( - EnvGoCache EnvKey = "GOCACHE" - EnvGoRoot EnvKey = "GOROOT" -) - -type Env struct { - vars map[string]string - log logutils.Log - debugf logutils.DebugFunc -} - -func NewEnv(log logutils.Log) *Env { - return &Env{ - vars: map[string]string{}, - log: log, - debugf: logutils.Debug(logutils.DebugKeyEnv), - } -} - -func (e *Env) Discover(ctx context.Context) error { - startedAt := time.Now() - args := []string{"env", "-json"} - args = append(args, string(EnvGoCache), string(EnvGoRoot)) - out, err := exec.CommandContext(ctx, "go", args...).Output() - if err != nil { - return fmt.Errorf("failed to run 'go env': %w", err) - } - - if err = json.Unmarshal(out, &e.vars); err != nil { - return fmt.Errorf("failed to parse 'go %s' json: %w", strings.Join(args, " "), err) - } - - e.debugf("Read go env for %s: %#v", time.Since(startedAt), e.vars) - return nil -} - -func (e Env) Get(k EnvKey) string { - envValue := os.Getenv(string(k)) - if envValue != "" { - return envValue - } - - return e.vars[string(k)] -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/lint/linter/config.go b/vendor/github.com/golangci/golangci-lint/pkg/lint/linter/config.go deleted file mode 100644 index ed5e5508c..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/lint/linter/config.go +++ /dev/null @@ -1,158 +0,0 @@ -package linter - -import ( - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/packages" - - "github.com/golangci/golangci-lint/pkg/config" -) - -const ( - PresetBugs = "bugs" // Related to bugs detection. - PresetComment = "comment" // Related to comments analysis. - PresetComplexity = "complexity" // Related to code complexity analysis. - PresetError = "error" // Related to error handling analysis. - PresetFormatting = "format" // Related to code formatting. - PresetImport = "import" // Related to imports analysis. - PresetMetaLinter = "metalinter" // Related to linter that contains multiple rules or multiple linters. - PresetModule = "module" // Related to Go modules analysis. - PresetPerformance = "performance" // Related to performance. - PresetSQL = "sql" // Related to SQL. - PresetStyle = "style" // Related to coding style. - PresetTest = "test" // Related to the analysis of the code of the tests. - PresetUnused = "unused" // Related to the detection of unused code. -) - -// LastLinter nolintlint must be last because it looks at the results of all the previous linters for unused nolint directives. -const LastLinter = "nolintlint" - -type Deprecation struct { - Since string - Message string - Replacement string -} - -type Config struct { - Linter Linter - EnabledByDefault bool - - LoadMode packages.LoadMode - - InPresets []string - AlternativeNames []string - - OriginalURL string // URL of original (not forked) repo, needed for autogenerated README - Internal bool // Internal linters cannot be disabled (ex: typecheck). - CanAutoFix bool - IsSlow bool - DoesChangeTypes bool - - Since string - Deprecation *Deprecation -} - -func (lc *Config) WithEnabledByDefault() *Config { - lc.EnabledByDefault = true - return lc -} - -func (lc *Config) WithInternal() *Config { - lc.Internal = true - return lc -} - -func (lc *Config) ConsiderSlow() *Config { - lc.IsSlow = true - return lc -} - -func (lc *Config) IsSlowLinter() bool { - return lc.IsSlow -} - -func (lc *Config) WithLoadFiles() *Config { - lc.LoadMode |= packages.NeedName | packages.NeedFiles | packages.NeedCompiledGoFiles - return lc -} - -func (lc *Config) WithLoadForGoAnalysis() *Config { - lc = lc.WithLoadFiles() - lc.LoadMode |= packages.NeedImports | packages.NeedDeps | packages.NeedExportFile | packages.NeedTypesSizes - lc.IsSlow = true - return lc -} - -func (lc *Config) WithPresets(presets ...string) *Config { - lc.InPresets = presets - return lc -} - -func (lc *Config) WithURL(url string) *Config { - lc.OriginalURL = url - return lc -} - -func (lc *Config) WithAlternativeNames(names ...string) *Config { - lc.AlternativeNames = names - return lc -} - -func (lc *Config) WithAutoFix() *Config { - lc.CanAutoFix = true - return lc -} - -func (lc *Config) WithChangeTypes() *Config { - lc.DoesChangeTypes = true - return lc -} - -func (lc *Config) WithSince(version string) *Config { - lc.Since = version - return lc -} - -func (lc *Config) Deprecated(message, version, replacement string) *Config { - lc.Deprecation = &Deprecation{ - Since: version, - Message: message, - Replacement: replacement, - } - return lc -} - -func (lc *Config) IsDeprecated() bool { - return lc.Deprecation != nil -} - -func (lc *Config) AllNames() []string { - return append([]string{lc.Name()}, lc.AlternativeNames...) -} - -func (lc *Config) Name() string { - return lc.Linter.Name() -} - -func (lc *Config) WithNoopFallback(cfg *config.Config) *Config { - if cfg != nil && config.IsGreaterThanOrEqualGo122(cfg.Run.Go) { - lc.Linter = &Noop{ - name: lc.Linter.Name(), - desc: lc.Linter.Desc(), - run: func(_ *analysis.Pass) (any, error) { - return nil, nil - }, - } - - lc.LoadMode = 0 - return lc.WithLoadFiles() - } - - return lc -} - -func NewConfig(linter Linter) *Config { - lc := &Config{ - Linter: linter, - } - return lc.WithLoadFiles() -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/lint/linter/context.go b/vendor/github.com/golangci/golangci-lint/pkg/lint/linter/context.go deleted file mode 100644 index a9f9d7d7f..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/lint/linter/context.go +++ /dev/null @@ -1,49 +0,0 @@ -package linter - -import ( - "go/ast" - - "golang.org/x/tools/go/packages" - - "github.com/golangci/golangci-lint/internal/pkgcache" - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/fsutils" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis/load" - "github.com/golangci/golangci-lint/pkg/logutils" -) - -type Context struct { - // Packages are deduplicated (test and normal packages) packages - Packages []*packages.Package - - // OriginalPackages aren't deduplicated: they contain both normal and test - // version for each of packages - OriginalPackages []*packages.Package - - Cfg *config.Config - FileCache *fsutils.FileCache - LineCache *fsutils.LineCache - Log logutils.Log - - PkgCache *pkgcache.Cache - LoadGuard *load.Guard -} - -func (c *Context) Settings() *config.LintersSettings { - return &c.Cfg.LintersSettings -} - -func (c *Context) ClearTypesInPackages() { - for _, p := range c.Packages { - clearTypes(p) - } - for _, p := range c.OriginalPackages { - clearTypes(p) - } -} - -func clearTypes(p *packages.Package) { - p.Types = nil - p.TypesInfo = nil - p.Syntax = []*ast.File{} -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/lint/linter/linter.go b/vendor/github.com/golangci/golangci-lint/pkg/lint/linter/linter.go deleted file mode 100644 index a65d6b927..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/lint/linter/linter.go +++ /dev/null @@ -1,35 +0,0 @@ -package linter - -import ( - "context" - - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/result" -) - -type Linter interface { - Run(ctx context.Context, lintCtx *Context) ([]result.Issue, error) - Name() string - Desc() string -} - -type Noop struct { - name string - desc string - run func(pass *analysis.Pass) (any, error) -} - -func (n Noop) Run(_ context.Context, lintCtx *Context) ([]result.Issue, error) { - lintCtx.Log.Warnf("%s is disabled because of generics."+ - " You can track the evolution of the generics support by following the https://github.com/golangci/golangci-lint/issues/2649.", n.name) - return nil, nil -} - -func (n Noop) Name() string { - return n.name -} - -func (n Noop) Desc() string { - return n.desc -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/custom_linters.go b/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/custom_linters.go deleted file mode 100644 index 188c14d91..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/custom_linters.go +++ /dev/null @@ -1,126 +0,0 @@ -package lintersdb - -import ( - "errors" - "fmt" - "path/filepath" - "plugin" - - "github.com/spf13/viper" - "golang.org/x/tools/go/analysis" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" - "github.com/golangci/golangci-lint/pkg/lint/linter" -) - -type AnalyzerPlugin interface { - GetAnalyzers() []*analysis.Analyzer -} - -// getCustomLinterConfigs loads private linters that are specified in the golangci config file. -func (m *Manager) getCustomLinterConfigs() []*linter.Config { - if m.cfg == nil || m.log == nil { - return nil - } - - var linters []*linter.Config - - for name, settings := range m.cfg.LintersSettings.Custom { - lc, err := m.loadCustomLinterConfig(name, settings) - if err != nil { - m.log.Errorf("Unable to load custom analyzer %s:%s, %v", name, settings.Path, err) - } else { - linters = append(linters, lc) - } - } - - return linters -} - -// loadCustomLinterConfig loads the configuration of private linters. -// Private linters are dynamically loaded from .so plugin files. -func (m *Manager) loadCustomLinterConfig(name string, settings config.CustomLinterSettings) (*linter.Config, error) { - analyzers, err := m.getAnalyzerPlugin(settings.Path, settings.Settings) - if err != nil { - return nil, err - } - - m.log.Infof("Loaded %s: %s", settings.Path, name) - - customLinter := goanalysis.NewLinter(name, settings.Description, analyzers, nil). - WithLoadMode(goanalysis.LoadModeTypesInfo) - - linterConfig := linter.NewConfig(customLinter). - WithEnabledByDefault(). - WithLoadForGoAnalysis(). - WithURL(settings.OriginalURL) - - return linterConfig, nil -} - -// getAnalyzerPlugin loads a private linter as specified in the config file, -// loads the plugin from a .so file, -// and returns the 'AnalyzerPlugin' interface implemented by the private plugin. -// An error is returned if the private linter cannot be loaded -// or the linter does not implement the AnalyzerPlugin interface. -func (m *Manager) getAnalyzerPlugin(path string, settings any) ([]*analysis.Analyzer, error) { - if !filepath.IsAbs(path) { - // resolve non-absolute paths relative to config file's directory - configFilePath := viper.ConfigFileUsed() - absConfigFilePath, err := filepath.Abs(configFilePath) - if err != nil { - return nil, fmt.Errorf("could not get absolute representation of config file path %q: %w", configFilePath, err) - } - path = filepath.Join(filepath.Dir(absConfigFilePath), path) - } - - plug, err := plugin.Open(path) - if err != nil { - return nil, err - } - - analyzers, err := m.lookupPlugin(plug, settings) - if err != nil { - return nil, fmt.Errorf("lookup plugin %s: %w", path, err) - } - - return analyzers, nil -} - -func (m *Manager) lookupPlugin(plug *plugin.Plugin, settings any) ([]*analysis.Analyzer, error) { - symbol, err := plug.Lookup("New") - if err != nil { - analyzers, errP := m.lookupAnalyzerPlugin(plug) - if errP != nil { - return nil, errors.Join(err, errP) - } - - return analyzers, nil - } - - // The type func cannot be used here, must be the explicit signature. - constructor, ok := symbol.(func(any) ([]*analysis.Analyzer, error)) - if !ok { - return nil, fmt.Errorf("plugin does not abide by 'New' function: %T", symbol) - } - - return constructor(settings) -} - -func (m *Manager) lookupAnalyzerPlugin(plug *plugin.Plugin) ([]*analysis.Analyzer, error) { - symbol, err := plug.Lookup("AnalyzerPlugin") - if err != nil { - return nil, err - } - - m.log.Warnf("plugin: 'AnalyzerPlugin' plugins are deprecated, please use the new plugin signature: " + - "https://golangci-lint.run/contributing/new-linters/#create-a-plugin") - - analyzerPlugin, ok := symbol.(AnalyzerPlugin) - if !ok { - return nil, fmt.Errorf("plugin does not abide by 'AnalyzerPlugin' interface: %T", symbol) - } - - return analyzerPlugin.GetAnalyzers(), nil -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/enabled_set.go b/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/enabled_set.go deleted file mode 100644 index 6f7b91b4d..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/enabled_set.go +++ /dev/null @@ -1,219 +0,0 @@ -package lintersdb - -import ( - "os" - "sort" - - "golang.org/x/exp/maps" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" - "github.com/golangci/golangci-lint/pkg/lint/linter" - "github.com/golangci/golangci-lint/pkg/logutils" -) - -// EnvTestRun value: "1" -const EnvTestRun = "GL_TEST_RUN" - -type EnabledSet struct { - m *Manager - v *Validator - log logutils.Log - cfg *config.Config - debugf logutils.DebugFunc -} - -func NewEnabledSet(m *Manager, v *Validator, log logutils.Log, cfg *config.Config) *EnabledSet { - return &EnabledSet{ - m: m, - v: v, - log: log, - cfg: cfg, - debugf: logutils.Debug(logutils.DebugKeyEnabledLinters), - } -} - -//nolint:gocyclo // the complexity cannot be reduced. -func (es EnabledSet) build(lcfg *config.Linters, enabledByDefaultLinters []*linter.Config) map[string]*linter.Config { - es.debugf("Linters config: %#v", lcfg) - - resultLintersSet := map[string]*linter.Config{} - switch { - case len(lcfg.Presets) != 0: - break // imply --disable-all - case lcfg.EnableAll: - resultLintersSet = linterConfigsToMap(es.m.GetAllSupportedLinterConfigs()) - case lcfg.DisableAll: - break - default: - resultLintersSet = linterConfigsToMap(enabledByDefaultLinters) - } - - // --presets can only add linters to default set - for _, p := range lcfg.Presets { - for _, lc := range es.m.GetAllLinterConfigsForPreset(p) { - lc := lc - resultLintersSet[lc.Name()] = lc - } - } - - // --fast removes slow linters from current set. - // It should be after --presets to be able to run only fast linters in preset. - // It should be before --enable and --disable to be able to enable or disable specific linter. - if lcfg.Fast { - for name, lc := range resultLintersSet { - if lc.IsSlowLinter() { - delete(resultLintersSet, name) - } - } - } - - for _, name := range lcfg.Enable { - for _, lc := range es.m.GetLinterConfigs(name) { - // it's important to use lc.Name() nor name because name can be alias - resultLintersSet[lc.Name()] = lc - } - } - - for _, name := range lcfg.Disable { - for _, lc := range es.m.GetLinterConfigs(name) { - // it's important to use lc.Name() nor name because name can be alias - delete(resultLintersSet, lc.Name()) - } - } - - // typecheck is not a real linter and cannot be disabled. - if _, ok := resultLintersSet["typecheck"]; !ok && (es.cfg == nil || !es.cfg.InternalCmdTest) { - for _, lc := range es.m.GetLinterConfigs("typecheck") { - // it's important to use lc.Name() nor name because name can be alias - resultLintersSet[lc.Name()] = lc - } - } - - return resultLintersSet -} - -func (es EnabledSet) GetEnabledLintersMap() (map[string]*linter.Config, error) { - if err := es.v.validateEnabledDisabledLintersConfig(&es.cfg.Linters); err != nil { - return nil, err - } - - enabledLinters := es.build(&es.cfg.Linters, es.m.GetAllEnabledByDefaultLinters()) - if os.Getenv(EnvTestRun) == "1" { - es.verbosePrintLintersStatus(enabledLinters) - } - return enabledLinters, nil -} - -// GetOptimizedLinters returns enabled linters after optimization (merging) of multiple linters -// into a fewer number of linters. E.g. some go/analysis linters can be optimized into -// one metalinter for data reuse and speed up. -func (es EnabledSet) GetOptimizedLinters() ([]*linter.Config, error) { - if err := es.v.validateEnabledDisabledLintersConfig(&es.cfg.Linters); err != nil { - return nil, err - } - - resultLintersSet := es.build(&es.cfg.Linters, es.m.GetAllEnabledByDefaultLinters()) - es.verbosePrintLintersStatus(resultLintersSet) - es.combineGoAnalysisLinters(resultLintersSet) - - resultLinters := maps.Values(resultLintersSet) - - // Make order of execution of linters (go/analysis metalinter and unused) stable. - sort.Slice(resultLinters, func(i, j int) bool { - a, b := resultLinters[i], resultLinters[j] - - if b.Name() == linter.LastLinter { - return true - } - - if a.Name() == linter.LastLinter { - return false - } - - if a.DoesChangeTypes != b.DoesChangeTypes { - return b.DoesChangeTypes // move type-changing linters to the end to optimize speed - } - return a.Name() < b.Name() - }) - - return resultLinters, nil -} - -func (es EnabledSet) combineGoAnalysisLinters(linters map[string]*linter.Config) { - var goanalysisLinters []*goanalysis.Linter - goanalysisPresets := map[string]bool{} - for _, lc := range linters { - lnt, ok := lc.Linter.(*goanalysis.Linter) - if !ok { - continue - } - if lnt.LoadMode() == goanalysis.LoadModeWholeProgram { - // It's ineffective by CPU and memory to run whole-program and incremental analyzers at once. - continue - } - goanalysisLinters = append(goanalysisLinters, lnt) - for _, p := range lc.InPresets { - goanalysisPresets[p] = true - } - } - - if len(goanalysisLinters) <= 1 { - es.debugf("Didn't combine go/analysis linters: got only %d linters", len(goanalysisLinters)) - return - } - - for _, lnt := range goanalysisLinters { - delete(linters, lnt.Name()) - } - - // Make order of execution of go/analysis analyzers stable. - sort.Slice(goanalysisLinters, func(i, j int) bool { - a, b := goanalysisLinters[i], goanalysisLinters[j] - - if b.Name() == linter.LastLinter { - return true - } - - if a.Name() == linter.LastLinter { - return false - } - - return a.Name() <= b.Name() - }) - - ml := goanalysis.NewMetaLinter(goanalysisLinters) - - presets := maps.Keys(goanalysisPresets) - - mlConfig := &linter.Config{ - Linter: ml, - EnabledByDefault: false, - InPresets: presets, - AlternativeNames: nil, - OriginalURL: "", - } - - mlConfig = mlConfig.WithLoadForGoAnalysis() - - linters[ml.Name()] = mlConfig - es.debugf("Combined %d go/analysis linters into one metalinter", len(goanalysisLinters)) -} - -func (es EnabledSet) verbosePrintLintersStatus(lcs map[string]*linter.Config) { - var linterNames []string - for _, lc := range lcs { - if lc.Internal { - continue - } - - linterNames = append(linterNames, lc.Name()) - } - sort.StringSlice(linterNames).Sort() - es.log.Infof("Active %d linters: %s", len(linterNames), linterNames) - - if len(es.cfg.Linters.Presets) != 0 { - sort.StringSlice(es.cfg.Linters.Presets).Sort() - es.log.Infof("Active presets: %s", es.cfg.Linters.Presets) - } -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/manager.go b/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/manager.go deleted file mode 100644 index fdc73ec73..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/manager.go +++ /dev/null @@ -1,998 +0,0 @@ -package lintersdb - -import ( - "regexp" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters" - "github.com/golangci/golangci-lint/pkg/lint/linter" - "github.com/golangci/golangci-lint/pkg/logutils" -) - -type Manager struct { - cfg *config.Config - log logutils.Log - - nameToLCs map[string][]*linter.Config - customLinters []*linter.Config -} - -func NewManager(cfg *config.Config, log logutils.Log) *Manager { - m := &Manager{cfg: cfg, log: log} - m.customLinters = m.getCustomLinterConfigs() - - nameToLCs := make(map[string][]*linter.Config) - for _, lc := range m.GetAllSupportedLinterConfigs() { - for _, name := range lc.AllNames() { - nameToLCs[name] = append(nameToLCs[name], lc) - } - } - - m.nameToLCs = nameToLCs - - return m -} - -func (Manager) AllPresets() []string { - return []string{ - linter.PresetBugs, - linter.PresetComment, - linter.PresetComplexity, - linter.PresetError, - linter.PresetFormatting, - linter.PresetImport, - linter.PresetMetaLinter, - linter.PresetModule, - linter.PresetPerformance, - linter.PresetSQL, - linter.PresetStyle, - linter.PresetTest, - linter.PresetUnused, - } -} - -func (m Manager) allPresetsSet() map[string]bool { - ret := map[string]bool{} - for _, p := range m.AllPresets() { - ret[p] = true - } - return ret -} - -func (m Manager) GetLinterConfigs(name string) []*linter.Config { - return m.nameToLCs[name] -} - -//nolint:funlen -func (m Manager) GetAllSupportedLinterConfigs() []*linter.Config { - var ( - asasalintCfg *config.AsasalintSettings - bidichkCfg *config.BiDiChkSettings - cyclopCfg *config.Cyclop - decorderCfg *config.DecorderSettings - depGuardCfg *config.DepGuardSettings - dogsledCfg *config.DogsledSettings - duplCfg *config.DuplSettings - dupwordCfg *config.DupWordSettings - errcheckCfg *config.ErrcheckSettings - errchkjsonCfg *config.ErrChkJSONSettings - errorlintCfg *config.ErrorLintSettings - exhaustiveCfg *config.ExhaustiveSettings - exhaustiveStructCfg *config.ExhaustiveStructSettings - exhaustructCfg *config.ExhaustructSettings - forbidigoCfg *config.ForbidigoSettings - funlenCfg *config.FunlenSettings - gciCfg *config.GciSettings - ginkgolinterCfg *config.GinkgoLinterSettings - gocognitCfg *config.GocognitSettings - goconstCfg *config.GoConstSettings - gocriticCfg *config.GoCriticSettings - gocycloCfg *config.GoCycloSettings - godotCfg *config.GodotSettings - godoxCfg *config.GodoxSettings - gofmtCfg *config.GoFmtSettings - gofumptCfg *config.GofumptSettings - goheaderCfg *config.GoHeaderSettings - goimportsCfg *config.GoImportsSettings - golintCfg *config.GoLintSettings - goMndCfg *config.GoMndSettings - goModDirectivesCfg *config.GoModDirectivesSettings - gomodguardCfg *config.GoModGuardSettings - gosecCfg *config.GoSecSettings - gosimpleCfg *config.StaticCheckSettings - gosmopolitanCfg *config.GosmopolitanSettings - govetCfg *config.GovetSettings - grouperCfg *config.GrouperSettings - ifshortCfg *config.IfshortSettings - importAsCfg *config.ImportAsSettings - inamedparamCfg *config.INamedParamSettings - interfaceBloatCfg *config.InterfaceBloatSettings - ireturnCfg *config.IreturnSettings - lllCfg *config.LllSettings - loggerCheckCfg *config.LoggerCheckSettings - maintIdxCfg *config.MaintIdxSettings - makezeroCfg *config.MakezeroSettings - malignedCfg *config.MalignedSettings - misspellCfg *config.MisspellSettings - musttagCfg *config.MustTagSettings - nakedretCfg *config.NakedretSettings - nestifCfg *config.NestifSettings - nilNilCfg *config.NilNilSettings - nlreturnCfg *config.NlreturnSettings - noLintLintCfg *config.NoLintLintSettings - noNamedReturnsCfg *config.NoNamedReturnsSettings - parallelTestCfg *config.ParallelTestSettings - perfSprintCfg *config.PerfSprintSettings - preallocCfg *config.PreallocSettings - predeclaredCfg *config.PredeclaredSettings - promlinterCfg *config.PromlinterSettings - protogetterCfg *config.ProtoGetterSettings - reassignCfg *config.ReassignSettings - reviveCfg *config.ReviveSettings - rowserrcheckCfg *config.RowsErrCheckSettings - sloglintCfg *config.SlogLintSettings - spancheckCfg *config.SpancheckSettings - staticcheckCfg *config.StaticCheckSettings - structcheckCfg *config.StructCheckSettings - stylecheckCfg *config.StaticCheckSettings - tagalignCfg *config.TagAlignSettings - tagliatelleCfg *config.TagliatelleSettings - tenvCfg *config.TenvSettings - testifylintCfg *config.TestifylintSettings - testpackageCfg *config.TestpackageSettings - thelperCfg *config.ThelperSettings - unparamCfg *config.UnparamSettings - unusedCfg *config.UnusedSettings - usestdlibvars *config.UseStdlibVarsSettings - varcheckCfg *config.VarCheckSettings - varnamelenCfg *config.VarnamelenSettings - whitespaceCfg *config.WhitespaceSettings - wrapcheckCfg *config.WrapcheckSettings - wslCfg *config.WSLSettings - ) - - if m.cfg != nil { - asasalintCfg = &m.cfg.LintersSettings.Asasalint - bidichkCfg = &m.cfg.LintersSettings.BiDiChk - cyclopCfg = &m.cfg.LintersSettings.Cyclop - decorderCfg = &m.cfg.LintersSettings.Decorder - depGuardCfg = &m.cfg.LintersSettings.Depguard - dogsledCfg = &m.cfg.LintersSettings.Dogsled - duplCfg = &m.cfg.LintersSettings.Dupl - dupwordCfg = &m.cfg.LintersSettings.DupWord - errcheckCfg = &m.cfg.LintersSettings.Errcheck - errchkjsonCfg = &m.cfg.LintersSettings.ErrChkJSON - errorlintCfg = &m.cfg.LintersSettings.ErrorLint - exhaustiveCfg = &m.cfg.LintersSettings.Exhaustive - exhaustiveStructCfg = &m.cfg.LintersSettings.ExhaustiveStruct - exhaustructCfg = &m.cfg.LintersSettings.Exhaustruct - forbidigoCfg = &m.cfg.LintersSettings.Forbidigo - funlenCfg = &m.cfg.LintersSettings.Funlen - gciCfg = &m.cfg.LintersSettings.Gci - ginkgolinterCfg = &m.cfg.LintersSettings.GinkgoLinter - gocognitCfg = &m.cfg.LintersSettings.Gocognit - goconstCfg = &m.cfg.LintersSettings.Goconst - gocriticCfg = &m.cfg.LintersSettings.Gocritic - gocycloCfg = &m.cfg.LintersSettings.Gocyclo - godotCfg = &m.cfg.LintersSettings.Godot - godoxCfg = &m.cfg.LintersSettings.Godox - gofmtCfg = &m.cfg.LintersSettings.Gofmt - gofumptCfg = &m.cfg.LintersSettings.Gofumpt - goheaderCfg = &m.cfg.LintersSettings.Goheader - goimportsCfg = &m.cfg.LintersSettings.Goimports - golintCfg = &m.cfg.LintersSettings.Golint - goMndCfg = &m.cfg.LintersSettings.Gomnd - goModDirectivesCfg = &m.cfg.LintersSettings.GoModDirectives - gomodguardCfg = &m.cfg.LintersSettings.Gomodguard - gosecCfg = &m.cfg.LintersSettings.Gosec - gosimpleCfg = &m.cfg.LintersSettings.Gosimple - gosmopolitanCfg = &m.cfg.LintersSettings.Gosmopolitan - govetCfg = &m.cfg.LintersSettings.Govet - grouperCfg = &m.cfg.LintersSettings.Grouper - ifshortCfg = &m.cfg.LintersSettings.Ifshort - importAsCfg = &m.cfg.LintersSettings.ImportAs - inamedparamCfg = &m.cfg.LintersSettings.Inamedparam - interfaceBloatCfg = &m.cfg.LintersSettings.InterfaceBloat - ireturnCfg = &m.cfg.LintersSettings.Ireturn - lllCfg = &m.cfg.LintersSettings.Lll - loggerCheckCfg = &m.cfg.LintersSettings.LoggerCheck - maintIdxCfg = &m.cfg.LintersSettings.MaintIdx - makezeroCfg = &m.cfg.LintersSettings.Makezero - malignedCfg = &m.cfg.LintersSettings.Maligned - misspellCfg = &m.cfg.LintersSettings.Misspell - musttagCfg = &m.cfg.LintersSettings.MustTag - nakedretCfg = &m.cfg.LintersSettings.Nakedret - nestifCfg = &m.cfg.LintersSettings.Nestif - nilNilCfg = &m.cfg.LintersSettings.NilNil - nlreturnCfg = &m.cfg.LintersSettings.Nlreturn - noLintLintCfg = &m.cfg.LintersSettings.NoLintLint - noNamedReturnsCfg = &m.cfg.LintersSettings.NoNamedReturns - parallelTestCfg = &m.cfg.LintersSettings.ParallelTest - perfSprintCfg = &m.cfg.LintersSettings.PerfSprint - preallocCfg = &m.cfg.LintersSettings.Prealloc - predeclaredCfg = &m.cfg.LintersSettings.Predeclared - promlinterCfg = &m.cfg.LintersSettings.Promlinter - protogetterCfg = &m.cfg.LintersSettings.ProtoGetter - reassignCfg = &m.cfg.LintersSettings.Reassign - reviveCfg = &m.cfg.LintersSettings.Revive - rowserrcheckCfg = &m.cfg.LintersSettings.RowsErrCheck - sloglintCfg = &m.cfg.LintersSettings.SlogLint - spancheckCfg = &m.cfg.LintersSettings.Spancheck - staticcheckCfg = &m.cfg.LintersSettings.Staticcheck - structcheckCfg = &m.cfg.LintersSettings.Structcheck - stylecheckCfg = &m.cfg.LintersSettings.Stylecheck - tagalignCfg = &m.cfg.LintersSettings.TagAlign - tagliatelleCfg = &m.cfg.LintersSettings.Tagliatelle - tenvCfg = &m.cfg.LintersSettings.Tenv - testifylintCfg = &m.cfg.LintersSettings.Testifylint - testpackageCfg = &m.cfg.LintersSettings.Testpackage - thelperCfg = &m.cfg.LintersSettings.Thelper - unparamCfg = &m.cfg.LintersSettings.Unparam - unusedCfg = &m.cfg.LintersSettings.Unused - usestdlibvars = &m.cfg.LintersSettings.UseStdlibVars - varcheckCfg = &m.cfg.LintersSettings.Varcheck - varnamelenCfg = &m.cfg.LintersSettings.Varnamelen - whitespaceCfg = &m.cfg.LintersSettings.Whitespace - wrapcheckCfg = &m.cfg.LintersSettings.Wrapcheck - wslCfg = &m.cfg.LintersSettings.WSL - - govetCfg.Go = m.cfg.Run.Go - - gocriticCfg.Go = trimGoVersion(m.cfg.Run.Go) - - if gofumptCfg.LangVersion == "" { - gofumptCfg.LangVersion = m.cfg.Run.Go - } - - // staticcheck related linters. - if staticcheckCfg.GoVersion == "" { - staticcheckCfg.GoVersion = trimGoVersion(m.cfg.Run.Go) - } - if gosimpleCfg.GoVersion == "" { - gosimpleCfg.GoVersion = trimGoVersion(m.cfg.Run.Go) - } - if stylecheckCfg.GoVersion != "" { - stylecheckCfg.GoVersion = trimGoVersion(m.cfg.Run.Go) - } - } - - const megacheckName = "megacheck" - - var linters []*linter.Config - linters = append(linters, m.customLinters...) - - // The linters are sorted in the alphabetical order (case-insensitive). - // When a new linter is added the version in `WithSince(...)` must be the next minor version of golangci-lint. - linters = append(linters, - linter.NewConfig(golinters.NewAsasalint(asasalintCfg)). - WithSince("1.47.0"). - WithPresets(linter.PresetBugs). - WithLoadForGoAnalysis(). - WithURL("https://github.com/alingse/asasalint"), - - linter.NewConfig(golinters.NewAsciicheck()). - WithSince("v1.26.0"). - WithPresets(linter.PresetBugs, linter.PresetStyle). - WithURL("https://github.com/tdakkota/asciicheck"), - - linter.NewConfig(golinters.NewBiDiChkFuncName(bidichkCfg)). - WithSince("1.43.0"). - WithPresets(linter.PresetBugs). - WithURL("https://github.com/breml/bidichk"), - - linter.NewConfig(golinters.NewBodyclose()). - WithSince("v1.18.0"). - WithLoadForGoAnalysis(). - WithPresets(linter.PresetPerformance, linter.PresetBugs). - WithURL("https://github.com/timakin/bodyclose"), - - linter.NewConfig(golinters.NewContainedCtx()). - WithSince("1.44.0"). - WithLoadForGoAnalysis(). - WithPresets(linter.PresetStyle). - WithURL("https://github.com/sivchari/containedctx"), - - linter.NewConfig(golinters.NewContextCheck()). - WithSince("v1.43.0"). - WithPresets(linter.PresetBugs). - WithLoadForGoAnalysis(). - WithURL("https://github.com/kkHAIKE/contextcheck"), - - linter.NewConfig(golinters.NewCyclop(cyclopCfg)). - WithSince("v1.37.0"). - WithLoadForGoAnalysis(). - WithPresets(linter.PresetComplexity). - WithURL("https://github.com/bkielbasa/cyclop"), - - linter.NewConfig(golinters.NewDecorder(decorderCfg)). - WithSince("v1.44.0"). - WithPresets(linter.PresetFormatting, linter.PresetStyle). - WithURL("https://gitlab.com/bosi/decorder"), - - linter.NewConfig(golinters.NewDeadcode()). - WithSince("v1.0.0"). - WithLoadForGoAnalysis(). - WithPresets(linter.PresetUnused). - WithURL("https://github.com/remyoudompheng/go-misc/tree/master/deadcode"). - Deprecated("The owner seems to have abandoned the linter.", "v1.49.0", "unused"), - - linter.NewConfig(golinters.NewDepguard(depGuardCfg)). - WithSince("v1.4.0"). - WithPresets(linter.PresetStyle, linter.PresetImport, linter.PresetModule). - WithURL("https://github.com/OpenPeeDeeP/depguard"), - - linter.NewConfig(golinters.NewDogsled(dogsledCfg)). - WithSince("v1.19.0"). - WithPresets(linter.PresetStyle). - WithURL("https://github.com/alexkohler/dogsled"), - - linter.NewConfig(golinters.NewDupl(duplCfg)). - WithSince("v1.0.0"). - WithPresets(linter.PresetStyle). - WithURL("https://github.com/mibk/dupl"), - - linter.NewConfig(golinters.NewDupWord(dupwordCfg)). - WithSince("1.50.0"). - WithPresets(linter.PresetComment). - WithAutoFix(). - WithURL("https://github.com/Abirdcfly/dupword"), - - linter.NewConfig(golinters.NewDurationCheck()). - WithSince("v1.37.0"). - WithPresets(linter.PresetBugs). - WithLoadForGoAnalysis(). - WithURL("https://github.com/charithe/durationcheck"), - - linter.NewConfig(golinters.NewErrcheck(errcheckCfg)). - WithEnabledByDefault(). - WithSince("v1.0.0"). - WithLoadForGoAnalysis(). - WithPresets(linter.PresetBugs, linter.PresetError). - WithURL("https://github.com/kisielk/errcheck"), - - linter.NewConfig(golinters.NewErrChkJSONFuncName(errchkjsonCfg)). - WithSince("1.44.0"). - WithPresets(linter.PresetBugs). - WithLoadForGoAnalysis(). - WithURL("https://github.com/breml/errchkjson"), - - linter.NewConfig(golinters.NewErrName()). - WithSince("v1.42.0"). - WithPresets(linter.PresetStyle). - WithLoadForGoAnalysis(). - WithURL("https://github.com/Antonboom/errname"), - - linter.NewConfig(golinters.NewErrorLint(errorlintCfg)). - WithSince("v1.32.0"). - WithPresets(linter.PresetBugs, linter.PresetError). - WithLoadForGoAnalysis(). - WithURL("https://github.com/polyfloyd/go-errorlint"), - - linter.NewConfig(golinters.NewExecInQuery()). - WithSince("v1.46.0"). - WithPresets(linter.PresetSQL). - WithLoadForGoAnalysis(). - WithURL("https://github.com/lufeee/execinquery"), - - linter.NewConfig(golinters.NewExhaustive(exhaustiveCfg)). - WithSince(" v1.28.0"). - WithPresets(linter.PresetBugs). - WithLoadForGoAnalysis(). - WithURL("https://github.com/nishanths/exhaustive"), - - linter.NewConfig(golinters.NewExhaustiveStruct(exhaustiveStructCfg)). - WithSince("v1.32.0"). - WithPresets(linter.PresetStyle, linter.PresetTest). - WithLoadForGoAnalysis(). - WithURL("https://github.com/mbilski/exhaustivestruct"). - Deprecated("The owner seems to have abandoned the linter.", "v1.46.0", "exhaustruct"), - - linter.NewConfig(golinters.NewExhaustruct(exhaustructCfg)). - WithSince("v1.46.0"). - WithPresets(linter.PresetStyle, linter.PresetTest). - WithLoadForGoAnalysis(). - WithURL("https://github.com/GaijinEntertainment/go-exhaustruct"), - - linter.NewConfig(golinters.NewExportLoopRef()). - WithSince("v1.28.0"). - WithPresets(linter.PresetBugs). - WithLoadForGoAnalysis(). - WithURL("https://github.com/kyoh86/exportloopref"), - - linter.NewConfig(golinters.NewForbidigo(forbidigoCfg)). - WithSince("v1.34.0"). - WithPresets(linter.PresetStyle). - // Strictly speaking, - // the additional information is only needed when forbidigoCfg.AnalyzeTypes is chosen by the user. - // But we don't know that here in all cases (sometimes config is not loaded), - // so we have to assume that it is needed to be on the safe side. - WithLoadForGoAnalysis(). - WithURL("https://github.com/ashanbrown/forbidigo"), - - linter.NewConfig(golinters.NewForceTypeAssert()). - WithSince("v1.38.0"). - WithPresets(linter.PresetStyle). - WithURL("https://github.com/gostaticanalysis/forcetypeassert"), - - linter.NewConfig(golinters.NewFunlen(funlenCfg)). - WithSince("v1.18.0"). - WithPresets(linter.PresetComplexity). - WithURL("https://github.com/ultraware/funlen"), - - linter.NewConfig(golinters.NewGci(gciCfg)). - WithSince("v1.30.0"). - WithPresets(linter.PresetFormatting, linter.PresetImport). - WithURL("https://github.com/daixiang0/gci"), - - linter.NewConfig(golinters.NewGinkgoLinter(ginkgolinterCfg)). - WithSince("v1.51.0"). - WithLoadForGoAnalysis(). - WithPresets(linter.PresetStyle). - WithURL("https://github.com/nunnatsa/ginkgolinter"), - - linter.NewConfig(golinters.NewGoCheckCompilerDirectives()). - WithSince("v1.51.0"). - WithPresets(linter.PresetBugs). - WithURL("https://github.com/leighmcculloch/gocheckcompilerdirectives"), - - linter.NewConfig(golinters.NewGochecknoglobals()). - WithSince("v1.12.0"). - WithPresets(linter.PresetStyle). - WithLoadForGoAnalysis(). - WithURL("https://github.com/leighmcculloch/gochecknoglobals"), - - linter.NewConfig(golinters.NewGochecknoinits()). - WithSince("v1.12.0"). - WithPresets(linter.PresetStyle), - - linter.NewConfig(golinters.NewGoCheckSumType()). - WithSince("v1.55.0"). - WithPresets(linter.PresetBugs). - WithLoadForGoAnalysis(). - WithURL("https://github.com/alecthomas/go-check-sumtype"), - - linter.NewConfig(golinters.NewGocognit(gocognitCfg)). - WithSince("v1.20.0"). - WithPresets(linter.PresetComplexity). - WithURL("https://github.com/uudashr/gocognit"), - - linter.NewConfig(golinters.NewGoconst(goconstCfg)). - WithSince("v1.0.0"). - WithPresets(linter.PresetStyle). - WithURL("https://github.com/jgautheron/goconst"), - - linter.NewConfig(golinters.NewGoCritic(gocriticCfg, m.cfg)). - WithSince("v1.12.0"). - WithPresets(linter.PresetStyle, linter.PresetMetaLinter). - WithLoadForGoAnalysis(). - WithURL("https://github.com/go-critic/go-critic"), - - linter.NewConfig(golinters.NewGocyclo(gocycloCfg)). - WithSince("v1.0.0"). - WithPresets(linter.PresetComplexity). - WithURL("https://github.com/fzipp/gocyclo"), - - linter.NewConfig(golinters.NewGodot(godotCfg)). - WithSince("v1.25.0"). - WithPresets(linter.PresetStyle, linter.PresetComment). - WithAutoFix(). - WithURL("https://github.com/tetafro/godot"), - - linter.NewConfig(golinters.NewGodox(godoxCfg)). - WithSince("v1.19.0"). - WithPresets(linter.PresetStyle, linter.PresetComment). - WithURL("https://github.com/matoous/godox"), - - linter.NewConfig(golinters.NewGoerr113()). - WithSince("v1.26.0"). - WithPresets(linter.PresetStyle, linter.PresetError). - WithLoadForGoAnalysis(). - WithURL("https://github.com/Djarvur/go-err113"), - - linter.NewConfig(golinters.NewGofmt(gofmtCfg)). - WithSince("v1.0.0"). - WithPresets(linter.PresetFormatting). - WithAutoFix(). - WithURL("https://pkg.go.dev/cmd/gofmt"), - - linter.NewConfig(golinters.NewGofumpt(gofumptCfg)). - WithSince("v1.28.0"). - WithPresets(linter.PresetFormatting). - WithAutoFix(). - WithURL("https://github.com/mvdan/gofumpt"), - - linter.NewConfig(golinters.NewGoHeader(goheaderCfg)). - WithSince("v1.28.0"). - WithPresets(linter.PresetStyle). - WithURL("https://github.com/denis-tingaikin/go-header"), - - linter.NewConfig(golinters.NewGoimports(goimportsCfg)). - WithSince("v1.20.0"). - WithPresets(linter.PresetFormatting, linter.PresetImport). - WithAutoFix(). - WithURL("https://pkg.go.dev/golang.org/x/tools/cmd/goimports"), - - linter.NewConfig(golinters.NewGolint(golintCfg)). - WithSince("v1.0.0"). - WithLoadForGoAnalysis(). - WithPresets(linter.PresetStyle). - WithURL("https://github.com/golang/lint"). - Deprecated("The repository of the linter has been archived by the owner.", "v1.41.0", "revive"), - - linter.NewConfig(golinters.NewGoMND(goMndCfg)). - WithSince("v1.22.0"). - WithPresets(linter.PresetStyle). - WithURL("https://github.com/tommy-muehle/go-mnd"), - - linter.NewConfig(golinters.NewGoModDirectives(goModDirectivesCfg)). - WithSince("v1.39.0"). - WithPresets(linter.PresetStyle, linter.PresetModule). - WithURL("https://github.com/ldez/gomoddirectives"), - - linter.NewConfig(golinters.NewGomodguard(gomodguardCfg)). - WithSince("v1.25.0"). - WithPresets(linter.PresetStyle, linter.PresetImport, linter.PresetModule). - WithURL("https://github.com/ryancurrah/gomodguard"), - - linter.NewConfig(golinters.NewGoPrintfFuncName()). - WithSince("v1.23.0"). - WithPresets(linter.PresetStyle). - WithURL("https://github.com/jirfag/go-printf-func-name"), - - linter.NewConfig(golinters.NewGosec(gosecCfg)). - WithSince("v1.0.0"). - WithLoadForGoAnalysis(). - WithPresets(linter.PresetBugs). - WithURL("https://github.com/securego/gosec"). - WithAlternativeNames("gas"), - - linter.NewConfig(golinters.NewGosimple(gosimpleCfg)). - WithEnabledByDefault(). - WithSince("v1.20.0"). - WithLoadForGoAnalysis(). - WithPresets(linter.PresetStyle). - WithAlternativeNames(megacheckName). - WithURL("https://github.com/dominikh/go-tools/tree/master/simple"), - - linter.NewConfig(golinters.NewGosmopolitan(gosmopolitanCfg)). - WithSince("v1.53.0"). - WithLoadForGoAnalysis(). - WithPresets(linter.PresetBugs). - WithURL("https://github.com/xen0n/gosmopolitan"), - - linter.NewConfig(golinters.NewGovet(govetCfg)). - WithEnabledByDefault(). - WithSince("v1.0.0"). - WithLoadForGoAnalysis(). - WithPresets(linter.PresetBugs, linter.PresetMetaLinter). - WithAlternativeNames("vet", "vetshadow"). - WithURL("https://pkg.go.dev/cmd/vet"), - - linter.NewConfig(golinters.NewGrouper(grouperCfg)). - WithSince("v1.44.0"). - WithPresets(linter.PresetStyle). - WithURL("https://github.com/leonklingele/grouper"), - - linter.NewConfig(golinters.NewIfshort(ifshortCfg)). - WithSince("v1.36.0"). - WithPresets(linter.PresetStyle). - WithURL("https://github.com/esimonov/ifshort"). - Deprecated("The repository of the linter has been deprecated by the owner.", "v1.48.0", ""), - - linter.NewConfig(golinters.NewImportAs(importAsCfg)). - WithSince("v1.38.0"). - WithPresets(linter.PresetStyle). - WithLoadForGoAnalysis(). - WithURL("https://github.com/julz/importas"), - - linter.NewConfig(golinters.NewINamedParam(inamedparamCfg)). - WithSince("v1.55.0"). - WithPresets(linter.PresetStyle). - WithURL("https://github.com/macabu/inamedparam"), - - linter.NewConfig(golinters.NewIneffassign()). - WithEnabledByDefault(). - WithSince("v1.0.0"). - WithPresets(linter.PresetUnused). - WithURL("https://github.com/gordonklaus/ineffassign"), - - linter.NewConfig(golinters.NewInterfaceBloat(interfaceBloatCfg)). - WithSince("v1.49.0"). - WithPresets(linter.PresetStyle). - WithURL("https://github.com/sashamelentyev/interfacebloat"), - - linter.NewConfig(golinters.NewInterfacer()). - WithSince("v1.0.0"). - WithLoadForGoAnalysis(). - WithPresets(linter.PresetStyle). - WithURL("https://github.com/mvdan/interfacer"). - Deprecated("The repository of the linter has been archived by the owner.", "v1.38.0", ""), - - linter.NewConfig(golinters.NewIreturn(ireturnCfg)). - WithSince("v1.43.0"). - WithPresets(linter.PresetStyle). - WithLoadForGoAnalysis(). - WithURL("https://github.com/butuzov/ireturn"), - - linter.NewConfig(golinters.NewLLL(lllCfg)). - WithSince("v1.8.0"). - WithPresets(linter.PresetStyle), - - linter.NewConfig(golinters.NewLoggerCheck(loggerCheckCfg)). - WithSince("v1.49.0"). - WithLoadForGoAnalysis(). - WithPresets(linter.PresetStyle, linter.PresetBugs). - WithAlternativeNames("logrlint"). - WithURL("https://github.com/timonwong/loggercheck"), - - linter.NewConfig(golinters.NewMaintIdx(maintIdxCfg)). - WithSince("v1.44.0"). - WithPresets(linter.PresetComplexity). - WithURL("https://github.com/yagipy/maintidx"), - - linter.NewConfig(golinters.NewMakezero(makezeroCfg)). - WithSince("v1.34.0"). - WithPresets(linter.PresetStyle, linter.PresetBugs). - WithLoadForGoAnalysis(). - WithURL("https://github.com/ashanbrown/makezero"), - - linter.NewConfig(golinters.NewMaligned(malignedCfg)). - WithSince("v1.0.0"). - WithLoadForGoAnalysis(). - WithPresets(linter.PresetPerformance). - WithURL("https://github.com/mdempsky/maligned"). - Deprecated("The repository of the linter has been archived by the owner.", "v1.38.0", "govet 'fieldalignment'"), - - linter.NewConfig(golinters.NewMirror()). - WithSince("v1.53.0"). - WithPresets(linter.PresetStyle). - WithLoadForGoAnalysis(). - WithURL("https://github.com/butuzov/mirror"), - - linter.NewConfig(golinters.NewMisspell(misspellCfg)). - WithSince("v1.8.0"). - WithPresets(linter.PresetStyle, linter.PresetComment). - WithAutoFix(). - WithURL("https://github.com/client9/misspell"), - - linter.NewConfig(golinters.NewMustTag(musttagCfg)). - WithSince("v1.51.0"). - WithLoadForGoAnalysis(). - WithPresets(linter.PresetStyle, linter.PresetBugs). - WithURL("https://github.com/go-simpler/musttag"), - - linter.NewConfig(golinters.NewNakedret(nakedretCfg)). - WithSince("v1.19.0"). - WithPresets(linter.PresetStyle). - WithURL("https://github.com/alexkohler/nakedret"), - - linter.NewConfig(golinters.NewNestif(nestifCfg)). - WithSince("v1.25.0"). - WithPresets(linter.PresetComplexity). - WithURL("https://github.com/nakabonne/nestif"), - - linter.NewConfig(golinters.NewNilErr()). - WithSince("v1.38.0"). - WithLoadForGoAnalysis(). - WithPresets(linter.PresetBugs). - WithURL("https://github.com/gostaticanalysis/nilerr"), - - linter.NewConfig(golinters.NewNilNil(nilNilCfg)). - WithSince("v1.43.0"). - WithPresets(linter.PresetStyle). - WithLoadForGoAnalysis(). - WithURL("https://github.com/Antonboom/nilnil"), - - linter.NewConfig(golinters.NewNLReturn(nlreturnCfg)). - WithSince("v1.30.0"). - WithPresets(linter.PresetStyle). - WithURL("https://github.com/ssgreg/nlreturn"), - - linter.NewConfig(golinters.NewNoctx()). - WithSince("v1.28.0"). - WithLoadForGoAnalysis(). - WithPresets(linter.PresetPerformance, linter.PresetBugs). - WithURL("https://github.com/sonatard/noctx"), - - linter.NewConfig(golinters.NewNoNamedReturns(noNamedReturnsCfg)). - WithSince("v1.46.0"). - WithLoadForGoAnalysis(). - WithPresets(linter.PresetStyle). - WithURL("https://github.com/firefart/nonamedreturns"), - - linter.NewConfig(golinters.NewNoSnakeCase()). - WithSince("v1.47.0"). - WithPresets(linter.PresetStyle). - WithURL("https://github.com/sivchari/nosnakecase"). - Deprecated("The repository of the linter has been deprecated by the owner.", "v1.48.1", "revive(var-naming)"), - - linter.NewConfig(golinters.NewNoSprintfHostPort()). - WithSince("v1.46.0"). - WithPresets(linter.PresetStyle). - WithURL("https://github.com/stbenjam/no-sprintf-host-port"), - - linter.NewConfig(golinters.NewParallelTest(parallelTestCfg)). - WithSince("v1.33.0"). - WithLoadForGoAnalysis(). - WithPresets(linter.PresetStyle, linter.PresetTest). - WithURL("https://github.com/kunwardeep/paralleltest"), - - linter.NewConfig(golinters.NewPerfSprint(perfSprintCfg)). - WithSince("v1.55.0"). - WithLoadForGoAnalysis(). - WithPresets(linter.PresetPerformance). - WithURL("https://github.com/catenacyber/perfsprint"), - - linter.NewConfig(golinters.NewPreAlloc(preallocCfg)). - WithSince("v1.19.0"). - WithPresets(linter.PresetPerformance). - WithURL("https://github.com/alexkohler/prealloc"), - - linter.NewConfig(golinters.NewPredeclared(predeclaredCfg)). - WithSince("v1.35.0"). - WithPresets(linter.PresetStyle). - WithURL("https://github.com/nishanths/predeclared"), - - linter.NewConfig(golinters.NewPromlinter(promlinterCfg)). - WithSince("v1.40.0"). - WithPresets(linter.PresetStyle). - WithURL("https://github.com/yeya24/promlinter"), - - linter.NewConfig(golinters.NewProtoGetter(protogetterCfg)). - WithSince("v1.55.0"). - WithPresets(linter.PresetBugs). - WithLoadForGoAnalysis(). - WithAutoFix(). - WithURL("https://github.com/ghostiam/protogetter"), - - linter.NewConfig(golinters.NewReassign(reassignCfg)). - WithSince("1.49.0"). - WithPresets(linter.PresetBugs). - WithLoadForGoAnalysis(). - WithURL("https://github.com/curioswitch/go-reassign"), - - linter.NewConfig(golinters.NewRevive(reviveCfg)). - WithSince("v1.37.0"). - WithPresets(linter.PresetStyle, linter.PresetMetaLinter). - ConsiderSlow(). - WithURL("https://github.com/mgechev/revive"), - - linter.NewConfig(golinters.NewRowsErrCheck(rowserrcheckCfg)). - WithSince("v1.23.0"). - WithLoadForGoAnalysis(). - WithPresets(linter.PresetBugs, linter.PresetSQL). - WithURL("https://github.com/jingyugao/rowserrcheck"), - - linter.NewConfig(golinters.NewSlogLint(sloglintCfg)). - WithSince("v1.55.0"). - WithLoadForGoAnalysis(). - WithPresets(linter.PresetStyle, linter.PresetFormatting). - WithURL("https://github.com/go-simpler/sloglint"), - - linter.NewConfig(golinters.NewScopelint()). - WithSince("v1.12.0"). - WithPresets(linter.PresetBugs). - WithURL("https://github.com/kyoh86/scopelint"). - Deprecated("The repository of the linter has been deprecated by the owner.", "v1.39.0", "exportloopref"), - - linter.NewConfig(golinters.NewSQLCloseCheck()). - WithSince("v1.28.0"). - WithPresets(linter.PresetBugs, linter.PresetSQL). - WithLoadForGoAnalysis(). - WithURL("https://github.com/ryanrolds/sqlclosecheck"), - - linter.NewConfig(golinters.NewSpancheck(spancheckCfg)). - WithSince("v1.56.0"). - WithLoadForGoAnalysis(). - WithPresets(linter.PresetBugs). - WithURL("https://github.com/jjti/go-spancheck"), - - linter.NewConfig(golinters.NewStaticcheck(staticcheckCfg)). - WithEnabledByDefault(). - WithSince("v1.0.0"). - WithLoadForGoAnalysis(). - WithPresets(linter.PresetBugs, linter.PresetMetaLinter). - WithAlternativeNames(megacheckName). - WithURL("https://staticcheck.io/"), - - linter.NewConfig(golinters.NewStructcheck(structcheckCfg)). - WithSince("v1.0.0"). - WithLoadForGoAnalysis(). - WithPresets(linter.PresetUnused). - WithURL("https://github.com/opennota/check"). - Deprecated("The owner seems to have abandoned the linter.", "v1.49.0", "unused"), - - linter.NewConfig(golinters.NewStylecheck(stylecheckCfg)). - WithSince("v1.20.0"). - WithLoadForGoAnalysis(). - WithPresets(linter.PresetStyle). - WithURL("https://github.com/dominikh/go-tools/tree/master/stylecheck"), - - linter.NewConfig(golinters.NewTagAlign(tagalignCfg)). - WithSince("v1.53.0"). - WithPresets(linter.PresetStyle, linter.PresetFormatting). - WithAutoFix(). - WithURL("https://github.com/4meepo/tagalign"), - - linter.NewConfig(golinters.NewTagliatelle(tagliatelleCfg)). - WithSince("v1.40.0"). - WithPresets(linter.PresetStyle). - WithURL("https://github.com/ldez/tagliatelle"), - - linter.NewConfig(golinters.NewTenv(tenvCfg)). - WithSince("v1.43.0"). - WithPresets(linter.PresetStyle). - WithLoadForGoAnalysis(). - WithURL("https://github.com/sivchari/tenv"), - - linter.NewConfig(golinters.NewTestableexamples()). - WithSince("v1.50.0"). - WithPresets(linter.PresetTest). - WithURL("https://github.com/maratori/testableexamples"), - - linter.NewConfig(golinters.NewTestifylint(testifylintCfg)). - WithSince("v1.55.0"). - WithPresets(linter.PresetTest, linter.PresetBugs). - WithLoadForGoAnalysis(). - WithURL("https://github.com/Antonboom/testifylint"), - - linter.NewConfig(golinters.NewTestpackage(testpackageCfg)). - WithSince("v1.25.0"). - WithPresets(linter.PresetStyle, linter.PresetTest). - WithURL("https://github.com/maratori/testpackage"), - - linter.NewConfig(golinters.NewThelper(thelperCfg)). - WithSince("v1.34.0"). - WithPresets(linter.PresetStyle). - WithLoadForGoAnalysis(). - WithURL("https://github.com/kulti/thelper"), - - linter.NewConfig(golinters.NewTparallel()). - WithSince("v1.32.0"). - WithPresets(linter.PresetStyle, linter.PresetTest). - WithLoadForGoAnalysis(). - WithURL("https://github.com/moricho/tparallel"), - - linter.NewConfig(golinters.NewTypecheck()). - WithInternal(). - WithEnabledByDefault(). - WithSince("v1.3.0"). - WithLoadForGoAnalysis(). - WithPresets(linter.PresetBugs). - WithURL(""), - - linter.NewConfig(golinters.NewUnconvert()). - WithSince("v1.0.0"). - WithLoadForGoAnalysis(). - WithPresets(linter.PresetStyle). - WithURL("https://github.com/mdempsky/unconvert"), - - linter.NewConfig(golinters.NewUnparam(unparamCfg)). - WithSince("v1.9.0"). - WithPresets(linter.PresetUnused). - WithLoadForGoAnalysis(). - WithURL("https://github.com/mvdan/unparam"), - - linter.NewConfig(golinters.NewUnused(unusedCfg, staticcheckCfg)). - WithEnabledByDefault(). - WithSince("v1.20.0"). - WithLoadForGoAnalysis(). - WithPresets(linter.PresetUnused). - WithAlternativeNames(megacheckName). - ConsiderSlow(). - WithChangeTypes(). - WithURL("https://github.com/dominikh/go-tools/tree/master/unused"), - - linter.NewConfig(golinters.NewUseStdlibVars(usestdlibvars)). - WithSince("v1.48.0"). - WithPresets(linter.PresetStyle). - WithURL("https://github.com/sashamelentyev/usestdlibvars"), - - linter.NewConfig(golinters.NewVarcheck(varcheckCfg)). - WithSince("v1.0.0"). - WithLoadForGoAnalysis(). - WithPresets(linter.PresetUnused). - WithURL("https://github.com/opennota/check"). - Deprecated("The owner seems to have abandoned the linter.", "v1.49.0", "unused"), - - linter.NewConfig(golinters.NewVarnamelen(varnamelenCfg)). - WithSince("v1.43.0"). - WithPresets(linter.PresetStyle). - WithLoadForGoAnalysis(). - WithURL("https://github.com/blizzy78/varnamelen"), - - linter.NewConfig(golinters.NewWastedAssign()). - WithSince("v1.38.0"). - WithPresets(linter.PresetStyle). - WithLoadForGoAnalysis(). - WithURL("https://github.com/sanposhiho/wastedassign"), - - linter.NewConfig(golinters.NewWhitespace(whitespaceCfg)). - WithSince("v1.19.0"). - WithPresets(linter.PresetStyle). - WithAutoFix(). - WithURL("https://github.com/ultraware/whitespace"), - - linter.NewConfig(golinters.NewWrapcheck(wrapcheckCfg)). - WithSince("v1.32.0"). - WithPresets(linter.PresetStyle, linter.PresetError). - WithLoadForGoAnalysis(). - WithURL("https://github.com/tomarrell/wrapcheck"), - - linter.NewConfig(golinters.NewWSL(wslCfg)). - WithSince("v1.20.0"). - WithPresets(linter.PresetStyle). - WithURL("https://github.com/bombsimon/wsl"), - - linter.NewConfig(golinters.NewZerologLint()). - WithSince("v1.53.0"). - WithPresets(linter.PresetBugs). - WithLoadForGoAnalysis(). - WithURL("https://github.com/ykadowak/zerologlint"), - - // nolintlint must be last because it looks at the results of all the previous linters for unused nolint directives - linter.NewConfig(golinters.NewNoLintLint(noLintLintCfg)). - WithSince("v1.26.0"). - WithPresets(linter.PresetStyle). - WithURL("https://github.com/golangci/golangci-lint/blob/master/pkg/golinters/nolintlint/README.md"), - ) - - return linters -} - -func (m Manager) GetAllEnabledByDefaultLinters() []*linter.Config { - var ret []*linter.Config - for _, lc := range m.GetAllSupportedLinterConfigs() { - if lc.EnabledByDefault { - ret = append(ret, lc) - } - } - - return ret -} - -func linterConfigsToMap(lcs []*linter.Config) map[string]*linter.Config { - ret := map[string]*linter.Config{} - for _, lc := range lcs { - lc := lc // local copy - ret[lc.Name()] = lc - } - - return ret -} - -func (m Manager) GetAllLinterConfigsForPreset(p string) []*linter.Config { - var ret []*linter.Config - for _, lc := range m.GetAllSupportedLinterConfigs() { - if lc.IsDeprecated() { - continue - } - - for _, ip := range lc.InPresets { - if p == ip { - ret = append(ret, lc) - break - } - } - } - - return ret -} - -// Trims the Go version to keep only M.m. -// Since Go 1.21 the version inside the go.mod can be a patched version (ex: 1.21.0). -// https://go.dev/doc/toolchain#versions -// This a problem with staticcheck and gocritic. -func trimGoVersion(v string) string { - if v == "" { - return "" - } - - exp := regexp.MustCompile(`(\d\.\d+)(?:\.\d+|[a-z]+\d)`) - - if exp.MatchString(v) { - return exp.FindStringSubmatch(v)[1] - } - - return v -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/validator.go b/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/validator.go deleted file mode 100644 index 52a70d859..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/validator.go +++ /dev/null @@ -1,108 +0,0 @@ -package lintersdb - -import ( - "errors" - "fmt" - "strings" - - "github.com/golangci/golangci-lint/pkg/config" -) - -type Validator struct { - m *Manager -} - -func NewValidator(m *Manager) *Validator { - return &Validator{ - m: m, - } -} - -func (v Validator) validateLintersNames(cfg *config.Linters) error { - allNames := append([]string{}, cfg.Enable...) - allNames = append(allNames, cfg.Disable...) - - var unknownNames []string - - for _, name := range allNames { - if v.m.GetLinterConfigs(name) == nil { - unknownNames = append(unknownNames, name) - } - } - - if len(unknownNames) > 0 { - return fmt.Errorf("unknown linters: '%v', run 'golangci-lint help linters' to see the list of supported linters", - strings.Join(unknownNames, ",")) - } - - return nil -} - -func (v Validator) validatePresets(cfg *config.Linters) error { - allPresets := v.m.allPresetsSet() - for _, p := range cfg.Presets { - if !allPresets[p] { - return fmt.Errorf("no such preset %q: only next presets exist: (%s)", - p, strings.Join(v.m.AllPresets(), "|")) - } - } - - if len(cfg.Presets) != 0 && cfg.EnableAll { - return errors.New("--presets is incompatible with --enable-all") - } - - return nil -} - -func (v Validator) validateAllDisableEnableOptions(cfg *config.Linters) error { - if cfg.EnableAll && cfg.DisableAll { - return errors.New("--enable-all and --disable-all options must not be combined") - } - - if cfg.DisableAll { - if len(cfg.Enable) == 0 && len(cfg.Presets) == 0 { - return errors.New("all linters were disabled, but no one linter was enabled: must enable at least one") - } - - if len(cfg.Disable) != 0 { - return fmt.Errorf("can't combine options --disable-all and --disable %s", cfg.Disable[0]) - } - } - - if cfg.EnableAll && len(cfg.Enable) != 0 && !cfg.Fast { - return fmt.Errorf("can't combine options --enable-all and --enable %s", cfg.Enable[0]) - } - - return nil -} - -func (v Validator) validateDisabledAndEnabledAtOneMoment(cfg *config.Linters) error { - enabledLintersSet := map[string]bool{} - for _, name := range cfg.Enable { - enabledLintersSet[name] = true - } - - for _, name := range cfg.Disable { - if enabledLintersSet[name] { - return fmt.Errorf("linter %q can't be disabled and enabled at one moment", name) - } - } - - return nil -} - -func (v Validator) validateEnabledDisabledLintersConfig(cfg *config.Linters) error { - validators := []func(cfg *config.Linters) error{ - v.validateLintersNames, - v.validatePresets, - v.validateAllDisableEnableOptions, - v.validateDisabledAndEnabledAtOneMoment, - } - for _, v := range validators { - if err := v(cfg); err != nil { - return err - } - } - - return nil -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/lint/load.go b/vendor/github.com/golangci/golangci-lint/pkg/lint/load.go deleted file mode 100644 index babad5ba6..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/lint/load.go +++ /dev/null @@ -1,326 +0,0 @@ -package lint - -import ( - "context" - "fmt" - "go/build" - "go/token" - "os" - "path/filepath" - "regexp" - "strings" - "time" - - "golang.org/x/tools/go/packages" - - "github.com/golangci/golangci-lint/internal/pkgcache" - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/exitcodes" - "github.com/golangci/golangci-lint/pkg/fsutils" - "github.com/golangci/golangci-lint/pkg/golinters/goanalysis/load" - "github.com/golangci/golangci-lint/pkg/goutil" - "github.com/golangci/golangci-lint/pkg/lint/linter" - "github.com/golangci/golangci-lint/pkg/logutils" -) - -type ContextLoader struct { - cfg *config.Config - log logutils.Log - debugf logutils.DebugFunc - goenv *goutil.Env - pkgTestIDRe *regexp.Regexp - lineCache *fsutils.LineCache - fileCache *fsutils.FileCache - pkgCache *pkgcache.Cache - loadGuard *load.Guard -} - -func NewContextLoader(cfg *config.Config, log logutils.Log, goenv *goutil.Env, - lineCache *fsutils.LineCache, fileCache *fsutils.FileCache, pkgCache *pkgcache.Cache, loadGuard *load.Guard) *ContextLoader { - return &ContextLoader{ - cfg: cfg, - log: log, - debugf: logutils.Debug(logutils.DebugKeyLoader), - goenv: goenv, - pkgTestIDRe: regexp.MustCompile(`^(.*) \[(.*)\.test\]`), - lineCache: lineCache, - fileCache: fileCache, - pkgCache: pkgCache, - loadGuard: loadGuard, - } -} - -func (cl *ContextLoader) prepareBuildContext() { - // Set GOROOT to have working cross-compilation: cross-compiled binaries - // have invalid GOROOT. XXX: can't use runtime.GOROOT(). - goroot := cl.goenv.Get(goutil.EnvGoRoot) - if goroot == "" { - return - } - - os.Setenv(string(goutil.EnvGoRoot), goroot) - build.Default.GOROOT = goroot - build.Default.BuildTags = cl.cfg.Run.BuildTags -} - -func (cl *ContextLoader) findLoadMode(linters []*linter.Config) packages.LoadMode { - loadMode := packages.LoadMode(0) - for _, lc := range linters { - loadMode |= lc.LoadMode - } - - return loadMode -} - -func (cl *ContextLoader) buildArgs() []string { - args := cl.cfg.Run.Args - if len(args) == 0 { - return []string{"./..."} - } - - var retArgs []string - for _, arg := range args { - if strings.HasPrefix(arg, ".") || filepath.IsAbs(arg) { - retArgs = append(retArgs, arg) - } else { - // go/packages doesn't work well if we don't have the prefix ./ for local packages - retArgs = append(retArgs, fmt.Sprintf(".%c%s", filepath.Separator, arg)) - } - } - - return retArgs -} - -func (cl *ContextLoader) makeBuildFlags() ([]string, error) { - var buildFlags []string - - if len(cl.cfg.Run.BuildTags) != 0 { - // go help build - buildFlags = append(buildFlags, "-tags", strings.Join(cl.cfg.Run.BuildTags, " ")) - cl.log.Infof("Using build tags: %v", cl.cfg.Run.BuildTags) - } - - mod := cl.cfg.Run.ModulesDownloadMode - if mod != "" { - // go help modules - allowedMods := []string{"mod", "readonly", "vendor"} - var ok bool - for _, am := range allowedMods { - if am == mod { - ok = true - break - } - } - if !ok { - return nil, fmt.Errorf("invalid modules download path %s, only (%s) allowed", mod, strings.Join(allowedMods, "|")) - } - - buildFlags = append(buildFlags, fmt.Sprintf("-mod=%s", cl.cfg.Run.ModulesDownloadMode)) - } - - return buildFlags, nil -} - -func stringifyLoadMode(mode packages.LoadMode) string { - m := map[packages.LoadMode]string{ - packages.NeedCompiledGoFiles: "compiled_files", - packages.NeedDeps: "deps", - packages.NeedExportFile: "exports_file", - packages.NeedFiles: "files", - packages.NeedImports: "imports", - packages.NeedName: "name", - packages.NeedSyntax: "syntax", - packages.NeedTypes: "types", - packages.NeedTypesInfo: "types_info", - packages.NeedTypesSizes: "types_sizes", - } - - var flags []string - for flag, flagStr := range m { - if mode&flag != 0 { - flags = append(flags, flagStr) - } - } - - return fmt.Sprintf("%d (%s)", mode, strings.Join(flags, "|")) -} - -func (cl *ContextLoader) debugPrintLoadedPackages(pkgs []*packages.Package) { - cl.debugf("loaded %d pkgs", len(pkgs)) - for i, pkg := range pkgs { - var syntaxFiles []string - for _, sf := range pkg.Syntax { - syntaxFiles = append(syntaxFiles, pkg.Fset.Position(sf.Pos()).Filename) - } - cl.debugf("Loaded pkg #%d: ID=%s GoFiles=%s CompiledGoFiles=%s Syntax=%s", - i, pkg.ID, pkg.GoFiles, pkg.CompiledGoFiles, syntaxFiles) - } -} - -func (cl *ContextLoader) parseLoadedPackagesErrors(pkgs []*packages.Package) error { - for _, pkg := range pkgs { - var errs []packages.Error - for _, err := range pkg.Errors { - // quick fix: skip error related to `go list` invocation by packages.Load() - // The behavior has been changed between go1.19 and go1.20, the error is now inside the JSON content. - // https://github.com/golangci/golangci-lint/pull/3414#issuecomment-1364756303 - if strings.Contains(err.Msg, "# command-line-arguments") { - continue - } - - errs = append(errs, err) - - if strings.Contains(err.Msg, "no Go files") { - return fmt.Errorf("package %s: %w", pkg.PkgPath, exitcodes.ErrNoGoFiles) - } - if strings.Contains(err.Msg, "cannot find package") { - // when analyzing not existing directory - return fmt.Errorf("%v: %w", err.Msg, exitcodes.ErrFailure) - } - } - - pkg.Errors = errs - } - - return nil -} - -func (cl *ContextLoader) loadPackages(ctx context.Context, loadMode packages.LoadMode) ([]*packages.Package, error) { - defer func(startedAt time.Time) { - cl.log.Infof("Go packages loading at mode %s took %s", stringifyLoadMode(loadMode), time.Since(startedAt)) - }(time.Now()) - - cl.prepareBuildContext() - - buildFlags, err := cl.makeBuildFlags() - if err != nil { - return nil, fmt.Errorf("failed to make build flags for go list: %w", err) - } - - conf := &packages.Config{ - Mode: loadMode, - Tests: cl.cfg.Run.AnalyzeTests, - Context: ctx, - BuildFlags: buildFlags, - Logf: cl.debugf, - // TODO: use fset, parsefile, overlay - } - - args := cl.buildArgs() - cl.debugf("Built loader args are %s", args) - pkgs, err := packages.Load(conf, args...) - if err != nil { - return nil, fmt.Errorf("failed to load with go/packages: %w", err) - } - - // Currently, go/packages doesn't guarantee that error will be returned - // if context was canceled. See - // https://github.com/golang/tools/commit/c5cec6710e927457c3c29d6c156415e8539a5111#r39261855 - if ctx.Err() != nil { - return nil, fmt.Errorf("timed out to load packages: %w", ctx.Err()) - } - - if loadMode&packages.NeedSyntax == 0 { - // Needed e.g. for go/analysis loading. - fset := token.NewFileSet() - packages.Visit(pkgs, nil, func(pkg *packages.Package) { - pkg.Fset = fset - cl.loadGuard.AddMutexForPkg(pkg) - }) - } - - cl.debugPrintLoadedPackages(pkgs) - - if err := cl.parseLoadedPackagesErrors(pkgs); err != nil { - return nil, err - } - - return cl.filterTestMainPackages(pkgs), nil -} - -func (cl *ContextLoader) tryParseTestPackage(pkg *packages.Package) (name string, isTest bool) { - matches := cl.pkgTestIDRe.FindStringSubmatch(pkg.ID) - if matches == nil { - return "", false - } - - return matches[1], true -} - -func (cl *ContextLoader) filterTestMainPackages(pkgs []*packages.Package) []*packages.Package { - var retPkgs []*packages.Package - for _, pkg := range pkgs { - if pkg.Name == "main" && strings.HasSuffix(pkg.PkgPath, ".test") { - // it's an implicit testmain package - cl.debugf("skip pkg ID=%s", pkg.ID) - continue - } - - retPkgs = append(retPkgs, pkg) - } - - return retPkgs -} - -func (cl *ContextLoader) filterDuplicatePackages(pkgs []*packages.Package) []*packages.Package { - packagesWithTests := map[string]bool{} - for _, pkg := range pkgs { - name, isTest := cl.tryParseTestPackage(pkg) - if !isTest { - continue - } - packagesWithTests[name] = true - } - - cl.debugf("package with tests: %#v", packagesWithTests) - - var retPkgs []*packages.Package - for _, pkg := range pkgs { - _, isTest := cl.tryParseTestPackage(pkg) - if !isTest && packagesWithTests[pkg.PkgPath] { - // If tests loading is enabled, - // for package with files a.go and a_test.go go/packages loads two packages: - // 1. ID=".../a" GoFiles=[a.go] - // 2. ID=".../a [.../a.test]" GoFiles=[a.go a_test.go] - // We need only the second package, otherwise we can get warnings about unused variables/fields/functions - // in a.go if they are used only in a_test.go. - cl.debugf("skip pkg ID=%s because we load it with test package", pkg.ID) - continue - } - - retPkgs = append(retPkgs, pkg) - } - - return retPkgs -} - -func (cl *ContextLoader) Load(ctx context.Context, linters []*linter.Config) (*linter.Context, error) { - loadMode := cl.findLoadMode(linters) - pkgs, err := cl.loadPackages(ctx, loadMode) - if err != nil { - return nil, fmt.Errorf("failed to load packages: %w", err) - } - - deduplicatedPkgs := cl.filterDuplicatePackages(pkgs) - - if len(deduplicatedPkgs) == 0 { - return nil, exitcodes.ErrNoGoFiles - } - - ret := &linter.Context{ - Packages: deduplicatedPkgs, - - // At least `unused` linters works properly only on original (not deduplicated) packages, - // see https://github.com/golangci/golangci-lint/pull/585. - OriginalPackages: pkgs, - - Cfg: cl.cfg, - Log: cl.log, - FileCache: cl.fileCache, - LineCache: cl.lineCache, - PkgCache: cl.pkgCache, - LoadGuard: cl.loadGuard, - } - - return ret, nil -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/lint/runner.go b/vendor/github.com/golangci/golangci-lint/pkg/lint/runner.go deleted file mode 100644 index e7cb17555..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/lint/runner.go +++ /dev/null @@ -1,350 +0,0 @@ -package lint - -import ( - "context" - "errors" - "fmt" - "runtime/debug" - "strings" - - gopackages "golang.org/x/tools/go/packages" - - "github.com/golangci/golangci-lint/internal/errorutil" - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/fsutils" - "github.com/golangci/golangci-lint/pkg/goutil" - "github.com/golangci/golangci-lint/pkg/lint/linter" - "github.com/golangci/golangci-lint/pkg/lint/lintersdb" - "github.com/golangci/golangci-lint/pkg/logutils" - "github.com/golangci/golangci-lint/pkg/packages" - "github.com/golangci/golangci-lint/pkg/result" - "github.com/golangci/golangci-lint/pkg/result/processors" - "github.com/golangci/golangci-lint/pkg/timeutils" -) - -type Runner struct { - Processors []processors.Processor - Log logutils.Log -} - -func NewRunner(cfg *config.Config, log logutils.Log, goenv *goutil.Env, - es *lintersdb.EnabledSet, - lineCache *fsutils.LineCache, fileCache *fsutils.FileCache, - dbManager *lintersdb.Manager, pkgs []*gopackages.Package) (*Runner, error) { - // Beware that some processors need to add the path prefix when working with paths - // because they get invoked before the path prefixer (exclude and severity rules) - // or process other paths (skip files). - files := fsutils.NewFiles(lineCache, cfg.Output.PathPrefix) - - skipFilesProcessor, err := processors.NewSkipFiles(cfg.Run.SkipFiles, cfg.Output.PathPrefix) - if err != nil { - return nil, err - } - - skipDirs := cfg.Run.SkipDirs - if cfg.Run.UseDefaultSkipDirs { - skipDirs = append(skipDirs, packages.StdExcludeDirRegexps...) - } - skipDirsProcessor, err := processors.NewSkipDirs(skipDirs, log.Child(logutils.DebugKeySkipDirs), cfg.Run.Args, cfg.Output.PathPrefix) - if err != nil { - return nil, err - } - - enabledLinters, err := es.GetEnabledLintersMap() - if err != nil { - return nil, fmt.Errorf("failed to get enabled linters: %w", err) - } - - // print deprecated messages - if !cfg.InternalCmdTest { - for name, lc := range enabledLinters { - if !lc.IsDeprecated() { - continue - } - - var extra string - if lc.Deprecation.Replacement != "" { - extra = fmt.Sprintf("Replaced by %s.", lc.Deprecation.Replacement) - } - - log.Warnf("The linter '%s' is deprecated (since %s) due to: %s %s", name, lc.Deprecation.Since, lc.Deprecation.Message, extra) - } - } - - return &Runner{ - Processors: []processors.Processor{ - processors.NewCgo(goenv), - - // Must go after Cgo. - processors.NewFilenameUnadjuster(pkgs, log.Child(logutils.DebugKeyFilenameUnadjuster)), - - // Must be before diff, nolint and exclude autogenerated processor at least. - processors.NewPathPrettifier(), - skipFilesProcessor, - skipDirsProcessor, // must be after path prettifier - - processors.NewAutogeneratedExclude(), - - // Must be before exclude because users see already marked output and configure excluding by it. - processors.NewIdentifierMarker(), - - getExcludeProcessor(&cfg.Issues), - getExcludeRulesProcessor(&cfg.Issues, log, files), - processors.NewNolint(log.Child(logutils.DebugKeyNolint), dbManager, enabledLinters), - - processors.NewUniqByLine(cfg), - processors.NewDiff(cfg.Issues.Diff, cfg.Issues.DiffFromRevision, cfg.Issues.DiffPatchFilePath, cfg.Issues.WholeFiles), - processors.NewMaxPerFileFromLinter(cfg), - processors.NewMaxSameIssues(cfg.Issues.MaxSameIssues, log.Child(logutils.DebugKeyMaxSameIssues), cfg), - processors.NewMaxFromLinter(cfg.Issues.MaxIssuesPerLinter, log.Child(logutils.DebugKeyMaxFromLinter), cfg), - processors.NewSourceCode(lineCache, log.Child(logutils.DebugKeySourceCode)), - processors.NewPathShortener(), - getSeverityRulesProcessor(&cfg.Severity, log, files), - - // The fixer still needs to see paths for the issues that are relative to the current directory. - processors.NewFixer(cfg, log, fileCache), - - // Now we can modify the issues for output. - processors.NewPathPrefixer(cfg.Output.PathPrefix), - processors.NewSortResults(cfg), - }, - Log: log, - }, nil -} - -func (r *Runner) runLinterSafe(ctx context.Context, lintCtx *linter.Context, - lc *linter.Config) (ret []result.Issue, err error) { - defer func() { - if panicData := recover(); panicData != nil { - if pe, ok := panicData.(*errorutil.PanicError); ok { - err = fmt.Errorf("%s: %w", lc.Name(), pe) - - // Don't print stacktrace from goroutines twice - r.Log.Errorf("Panic: %s: %s", pe, pe.Stack()) - } else { - err = fmt.Errorf("panic occurred: %s", panicData) - r.Log.Errorf("Panic stack trace: %s", debug.Stack()) - } - } - }() - - issues, err := lc.Linter.Run(ctx, lintCtx) - - if lc.DoesChangeTypes { - // Packages in lintCtx might be dirty due to the last analysis, - // which affects to the next analysis. - // To avoid this issue, we clear type information from the packages. - // See https://github.com/golangci/golangci-lint/pull/944. - // Currently, DoesChangeTypes is true only for `unused`. - lintCtx.ClearTypesInPackages() - } - - if err != nil { - return nil, err - } - - for i := range issues { - if issues[i].FromLinter == "" { - issues[i].FromLinter = lc.Name() - } - } - - return issues, nil -} - -type processorStat struct { - inCount int - outCount int -} - -func (r Runner) processLintResults(inIssues []result.Issue) []result.Issue { - sw := timeutils.NewStopwatch("processing", r.Log) - - var issuesBefore, issuesAfter int - statPerProcessor := map[string]processorStat{} - - var outIssues []result.Issue - if len(inIssues) != 0 { - issuesBefore += len(inIssues) - outIssues = r.processIssues(inIssues, sw, statPerProcessor) - issuesAfter += len(outIssues) - } - - // finalize processors: logging, clearing, no heavy work here - - for _, p := range r.Processors { - p := p - sw.TrackStage(p.Name(), func() { - p.Finish() - }) - } - - if issuesBefore != issuesAfter { - r.Log.Infof("Issues before processing: %d, after processing: %d", issuesBefore, issuesAfter) - } - r.printPerProcessorStat(statPerProcessor) - sw.PrintStages() - - return outIssues -} - -func (r Runner) printPerProcessorStat(stat map[string]processorStat) { - parts := make([]string, 0, len(stat)) - for name, ps := range stat { - if ps.inCount != 0 { - parts = append(parts, fmt.Sprintf("%s: %d/%d", name, ps.outCount, ps.inCount)) - } - } - if len(parts) != 0 { - r.Log.Infof("Processors filtering stat (out/in): %s", strings.Join(parts, ", ")) - } -} - -func (r Runner) Run(ctx context.Context, linters []*linter.Config, lintCtx *linter.Context) ([]result.Issue, error) { - sw := timeutils.NewStopwatch("linters", r.Log) - defer sw.Print() - - var ( - lintErrors error - issues []result.Issue - ) - - for _, lc := range linters { - lc := lc - sw.TrackStage(lc.Name(), func() { - linterIssues, err := r.runLinterSafe(ctx, lintCtx, lc) - if err != nil { - lintErrors = errors.Join(lintErrors, fmt.Errorf("can't run linter %s", lc.Linter.Name()), err) - r.Log.Warnf("Can't run linter %s: %v", lc.Linter.Name(), err) - - return - } - - issues = append(issues, linterIssues...) - }) - } - - return r.processLintResults(issues), lintErrors -} - -func (r *Runner) processIssues(issues []result.Issue, sw *timeutils.Stopwatch, statPerProcessor map[string]processorStat) []result.Issue { - for _, p := range r.Processors { - var newIssues []result.Issue - var err error - p := p - sw.TrackStage(p.Name(), func() { - newIssues, err = p.Process(issues) - }) - - if err != nil { - r.Log.Warnf("Can't process result by %s processor: %s", p.Name(), err) - } else { - stat := statPerProcessor[p.Name()] - stat.inCount += len(issues) - stat.outCount += len(newIssues) - statPerProcessor[p.Name()] = stat - issues = newIssues - } - - if issues == nil { - issues = []result.Issue{} - } - } - - return issues -} - -func getExcludeProcessor(cfg *config.Issues) processors.Processor { - var excludeTotalPattern string - - if len(cfg.ExcludePatterns) != 0 { - excludeTotalPattern = fmt.Sprintf("(%s)", strings.Join(cfg.ExcludePatterns, "|")) - } - - var excludeProcessor processors.Processor - if cfg.ExcludeCaseSensitive { - excludeProcessor = processors.NewExcludeCaseSensitive(excludeTotalPattern) - } else { - excludeProcessor = processors.NewExclude(excludeTotalPattern) - } - - return excludeProcessor -} - -func getExcludeRulesProcessor(cfg *config.Issues, log logutils.Log, files *fsutils.Files) processors.Processor { - var excludeRules []processors.ExcludeRule - for _, r := range cfg.ExcludeRules { - excludeRules = append(excludeRules, processors.ExcludeRule{ - BaseRule: processors.BaseRule{ - Text: r.Text, - Source: r.Source, - Path: r.Path, - PathExcept: r.PathExcept, - Linters: r.Linters, - }, - }) - } - - if cfg.UseDefaultExcludes { - for _, r := range config.GetExcludePatterns(cfg.IncludeDefaultExcludes) { - excludeRules = append(excludeRules, processors.ExcludeRule{ - BaseRule: processors.BaseRule{ - Text: r.Pattern, - Linters: []string{r.Linter}, - }, - }) - } - } - - var excludeRulesProcessor processors.Processor - if cfg.ExcludeCaseSensitive { - excludeRulesProcessor = processors.NewExcludeRulesCaseSensitive( - excludeRules, - files, - log.Child(logutils.DebugKeyExcludeRules), - ) - } else { - excludeRulesProcessor = processors.NewExcludeRules( - excludeRules, - files, - log.Child(logutils.DebugKeyExcludeRules), - ) - } - - return excludeRulesProcessor -} - -func getSeverityRulesProcessor(cfg *config.Severity, log logutils.Log, files *fsutils.Files) processors.Processor { - var severityRules []processors.SeverityRule - for _, r := range cfg.Rules { - severityRules = append(severityRules, processors.SeverityRule{ - Severity: r.Severity, - BaseRule: processors.BaseRule{ - Text: r.Text, - Source: r.Source, - Path: r.Path, - PathExcept: r.PathExcept, - Linters: r.Linters, - }, - }) - } - - var severityRulesProcessor processors.Processor - if cfg.CaseSensitive { - severityRulesProcessor = processors.NewSeverityRulesCaseSensitive( - cfg.Default, - severityRules, - files, - log.Child(logutils.DebugKeySeverityRules), - ) - } else { - severityRulesProcessor = processors.NewSeverityRules( - cfg.Default, - severityRules, - files, - log.Child(logutils.DebugKeySeverityRules), - ) - } - - return severityRulesProcessor -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/logutils/log.go b/vendor/github.com/golangci/golangci-lint/pkg/logutils/log.go deleted file mode 100644 index 16067e490..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/logutils/log.go +++ /dev/null @@ -1,31 +0,0 @@ -package logutils - -type Log interface { - Fatalf(format string, args ...any) - Panicf(format string, args ...any) - Errorf(format string, args ...any) - Warnf(format string, args ...any) - Infof(format string, args ...any) - - Child(name string) Log - SetLevel(level LogLevel) -} - -type LogLevel int - -const ( - // LogLevelDebug Debug messages, write to debug logs only by logutils.Debug. - LogLevelDebug LogLevel = 0 - - // LogLevelInfo Information messages, don't write too many messages, - // only useful ones: they are shown when running with -v. - LogLevelInfo LogLevel = 1 - - // LogLevelWarn Hidden errors: non-critical errors: work can be continued, no need to fail whole program; - // tests will crash if any warning occurred. - LogLevelWarn LogLevel = 2 - - // LogLevelError Only not hidden from user errors: whole program failing, usually - // error logging happens in 1-2 places: in the "main" function. - LogLevelError LogLevel = 3 -) diff --git a/vendor/github.com/golangci/golangci-lint/pkg/logutils/logutils.go b/vendor/github.com/golangci/golangci-lint/pkg/logutils/logutils.go deleted file mode 100644 index 94479bc7b..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/logutils/logutils.go +++ /dev/null @@ -1,106 +0,0 @@ -package logutils - -import ( - "os" - "strings" -) - -// envDebug value: one or several debug keys. -// examples: -// - Remove output to `/dev/null`: `GL_DEBUG=linters_output ./golangci-lint run` -// - Show linters configuration: `GL_DEBUG=enabled_linters golangci-lint run` -// - Some analysis details: `GL_DEBUG=goanalysis/analyze,goanalysis/facts golangci-lint run` -const envDebug = "GL_DEBUG" - -const ( - DebugKeyAutogenExclude = "autogen_exclude" // Debugs a filter excluding autogenerated source code. - DebugKeyBinSalt = "bin_salt" - DebugKeyConfigReader = "config_reader" - DebugKeyEmpty = "" - DebugKeyEnabledLinters = "enabled_linters" - DebugKeyEnv = "env" // Debugs `go env` command. - DebugKeyExcludeRules = "exclude_rules" - DebugKeyExec = "exec" - DebugKeyFilenameUnadjuster = "filename_unadjuster" - DebugKeyForbidigo = "forbidigo" - DebugKeyGoEnv = "goenv" - DebugKeyLinter = "linter" - DebugKeyLintersContext = "linters_context" - DebugKeyLintersDB = "lintersdb" - DebugKeyLintersOutput = "linters_output" - DebugKeyLoader = "loader" // Debugs packages loading (including `go/packages` internal debugging). - DebugKeyMaxFromLinter = "max_from_linter" - DebugKeyMaxSameIssues = "max_same_issues" - DebugKeyPkgCache = "pkgcache" - DebugKeyRunner = "runner" - DebugKeySeverityRules = "severity_rules" - DebugKeySkipDirs = "skip_dirs" - DebugKeySourceCode = "source_code" - DebugKeyStopwatch = "stopwatch" - DebugKeyTabPrinter = "tab_printer" - DebugKeyTest = "test" - DebugKeyTextPrinter = "text_printer" -) - -const ( - DebugKeyGoAnalysis = "goanalysis" - - DebugKeyGoAnalysisAnalyze = DebugKeyGoAnalysis + "/analyze" - DebugKeyGoAnalysisIssuesCache = DebugKeyGoAnalysis + "/issues/cache" - DebugKeyGoAnalysisMemory = DebugKeyGoAnalysis + "/memory" - - DebugKeyGoAnalysisFacts = DebugKeyGoAnalysis + "/facts" - DebugKeyGoAnalysisFactsCache = DebugKeyGoAnalysisFacts + "/cache" - DebugKeyGoAnalysisFactsExport = DebugKeyGoAnalysisFacts + "/export" - DebugKeyGoAnalysisFactsInherit = DebugKeyGoAnalysisFacts + "/inherit" -) - -const ( - DebugKeyGoCritic = "gocritic" // Debugs `go-critic` linter. - DebugKeyMegacheck = "megacheck" // Debugs `staticcheck` related linters. - DebugKeyNolint = "nolint" // Debugs a filter excluding issues by `//nolint` comments. - DebugKeyRevive = "revive" // Debugs `revive` linter. -) - -func getEnabledDebugs() map[string]bool { - ret := map[string]bool{} - debugVar := os.Getenv(envDebug) - if debugVar == "" { - return ret - } - - for _, tag := range strings.Split(debugVar, ",") { - ret[tag] = true - } - - return ret -} - -var enabledDebugs = getEnabledDebugs() - -type DebugFunc func(format string, args ...any) - -func nopDebugf(_ string, _ ...any) {} - -func Debug(tag string) DebugFunc { - if !enabledDebugs[tag] { - return nopDebugf - } - - logger := NewStderrLog(tag) - logger.SetLevel(LogLevelDebug) - - return func(format string, args ...any) { - logger.Debugf(format, args...) - } -} - -func HaveDebugTag(tag string) bool { - return enabledDebugs[tag] -} - -func SetupVerboseLog(log Log, isVerbose bool) { - if isVerbose { - log.SetLevel(LogLevelInfo) - } -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/logutils/mock.go b/vendor/github.com/golangci/golangci-lint/pkg/logutils/mock.go deleted file mode 100644 index efda8cc20..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/logutils/mock.go +++ /dev/null @@ -1,47 +0,0 @@ -package logutils - -import ( - "github.com/stretchr/testify/mock" -) - -type MockLog struct { - mock.Mock -} - -func NewMockLog() *MockLog { - return &MockLog{} -} - -func (m *MockLog) Fatalf(format string, args ...any) { - mArgs := []any{format} - m.Called(append(mArgs, args...)...) -} - -func (m *MockLog) Panicf(format string, args ...any) { - mArgs := []any{format} - m.Called(append(mArgs, args...)...) -} - -func (m *MockLog) Errorf(format string, args ...any) { - mArgs := []any{format} - m.Called(append(mArgs, args...)...) -} - -func (m *MockLog) Warnf(format string, args ...any) { - mArgs := []any{format} - m.Called(append(mArgs, args...)...) -} - -func (m *MockLog) Infof(format string, args ...any) { - mArgs := []any{format} - m.Called(append(mArgs, args...)...) -} - -func (m *MockLog) Child(name string) Log { - m.Called(name) - return m -} - -func (m *MockLog) SetLevel(level LogLevel) { - m.Called(level) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/logutils/out.go b/vendor/github.com/golangci/golangci-lint/pkg/logutils/out.go deleted file mode 100644 index 67c70dc8f..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/logutils/out.go +++ /dev/null @@ -1,9 +0,0 @@ -package logutils - -import ( - "github.com/fatih/color" - colorable "github.com/mattn/go-colorable" -) - -var StdOut = color.Output // https://github.com/golangci/golangci-lint/issues/14 -var StdErr = colorable.NewColorableStderr() diff --git a/vendor/github.com/golangci/golangci-lint/pkg/logutils/stderr_log.go b/vendor/github.com/golangci/golangci-lint/pkg/logutils/stderr_log.go deleted file mode 100644 index 367c94f38..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/logutils/stderr_log.go +++ /dev/null @@ -1,129 +0,0 @@ -package logutils - -import ( - "fmt" - "os" - "time" - - "github.com/sirupsen/logrus" //nolint:depguard - - "github.com/golangci/golangci-lint/pkg/exitcodes" -) - -const ( - // envLogLevel values: "error", "err", "warning", "warn","info" - envLogLevel = "LOG_LEVEL" - // envLogTimestamp value: "1" - envLogTimestamp = "LOG_TIMESTAMP" -) - -type StderrLog struct { - name string - logger *logrus.Logger - level LogLevel -} - -var _ Log = NewStderrLog(DebugKeyEmpty) - -func NewStderrLog(name string) *StderrLog { - sl := &StderrLog{ - name: name, - logger: logrus.New(), - level: LogLevelWarn, - } - - switch os.Getenv(envLogLevel) { - case "error", "err": - sl.logger.SetLevel(logrus.ErrorLevel) - case "warning", "warn": - sl.logger.SetLevel(logrus.WarnLevel) - case "info": - sl.logger.SetLevel(logrus.InfoLevel) - default: - sl.logger.SetLevel(logrus.DebugLevel) - } - - sl.logger.Out = StdErr - formatter := &logrus.TextFormatter{ - DisableTimestamp: true, // `INFO[0007] msg` -> `INFO msg` - EnvironmentOverrideColors: true, - } - if os.Getenv(envLogTimestamp) == "1" { - formatter.DisableTimestamp = false - formatter.FullTimestamp = true - formatter.TimestampFormat = time.StampMilli - } - sl.logger.Formatter = formatter - - return sl -} - -func (sl StderrLog) prefix() string { - prefix := "" - if sl.name != "" { - prefix = fmt.Sprintf("[%s] ", sl.name) - } - - return prefix -} - -func (sl StderrLog) Fatalf(format string, args ...any) { - sl.logger.Errorf("%s%s", sl.prefix(), fmt.Sprintf(format, args...)) - os.Exit(exitcodes.Failure) -} - -func (sl StderrLog) Panicf(format string, args ...any) { - v := fmt.Sprintf("%s%s", sl.prefix(), fmt.Sprintf(format, args...)) - panic(v) -} - -func (sl StderrLog) Errorf(format string, args ...any) { - if sl.level > LogLevelError { - return - } - - sl.logger.Errorf("%s%s", sl.prefix(), fmt.Sprintf(format, args...)) - // don't call exitIfTest() because the idea is to - // crash on hidden errors (warnings); but Errorf MUST NOT be - // called on hidden errors, see log levels comments. -} - -func (sl StderrLog) Warnf(format string, args ...any) { - if sl.level > LogLevelWarn { - return - } - - sl.logger.Warnf("%s%s", sl.prefix(), fmt.Sprintf(format, args...)) -} - -func (sl StderrLog) Infof(format string, args ...any) { - if sl.level > LogLevelInfo { - return - } - - sl.logger.Infof("%s%s", sl.prefix(), fmt.Sprintf(format, args...)) -} - -func (sl StderrLog) Debugf(format string, args ...any) { - if sl.level > LogLevelDebug { - return - } - - sl.logger.Debugf("%s%s", sl.prefix(), fmt.Sprintf(format, args...)) -} - -func (sl StderrLog) Child(name string) Log { - prefix := "" - if sl.name != "" { - prefix = sl.name + "/" - } - - child := sl - child.name = prefix + name - - return &child -} - -func (sl *StderrLog) SetLevel(level LogLevel) { - sl.level = level -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/packages/errors.go b/vendor/github.com/golangci/golangci-lint/pkg/packages/errors.go deleted file mode 100644 index ff37651af..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/packages/errors.go +++ /dev/null @@ -1,37 +0,0 @@ -package packages - -import ( - "errors" - "fmt" - "go/token" - "strconv" - "strings" -) - -func ParseErrorPosition(pos string) (*token.Position, error) { - // file:line(:colon) - parts := strings.Split(pos, ":") - if len(parts) == 1 { - return nil, errors.New("no colons") - } - - file := parts[0] - line, err := strconv.Atoi(parts[1]) - if err != nil { - return nil, fmt.Errorf("can't parse line number %q: %w", parts[1], err) - } - - var column int - if len(parts) == 3 { // no column - column, err = strconv.Atoi(parts[2]) - if err != nil { - return nil, fmt.Errorf("failed to parse column from %q: %w", parts[2], err) - } - } - - return &token.Position{ - Filename: file, - Line: line, - Column: column, - }, nil -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/packages/skip.go b/vendor/github.com/golangci/golangci-lint/pkg/packages/skip.go deleted file mode 100644 index cdd327f5d..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/packages/skip.go +++ /dev/null @@ -1,25 +0,0 @@ -package packages - -import ( - "fmt" - "path/filepath" - "regexp" -) - -func pathElemReImpl(e string, sep rune) string { - escapedSep := regexp.QuoteMeta(string(sep)) // needed for windows sep '\\' - return fmt.Sprintf(`(^|%s)%s($|%s)`, escapedSep, e, escapedSep) -} - -func pathElemRe(e string) string { - return pathElemReImpl(e, filepath.Separator) -} - -var StdExcludeDirRegexps = []string{ - pathElemRe("vendor"), - pathElemRe("third_party"), - pathElemRe("testdata"), - pathElemRe("examples"), - pathElemRe("Godeps"), - pathElemRe("builtin"), -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/packages/util.go b/vendor/github.com/golangci/golangci-lint/pkg/packages/util.go deleted file mode 100644 index 6a7789ebb..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/packages/util.go +++ /dev/null @@ -1,102 +0,0 @@ -package packages - -import ( - "fmt" - "regexp" - "strings" - - "golang.org/x/tools/go/packages" -) - -// reFile matches a line who starts with path and position. -// ex: `/example/main.go:11:17: foobar` -var reFile = regexp.MustCompile(`^.+\.go:\d+:\d+: .+`) - -func ExtractErrors(pkg *packages.Package) []packages.Error { - errors := extractErrorsImpl(pkg, map[*packages.Package]bool{}) - if len(errors) == 0 { - return errors - } - - seenErrors := map[string]bool{} - var uniqErrors []packages.Error - for _, err := range errors { - msg := stackCrusher(err.Error()) - if seenErrors[msg] { - continue - } - - if msg != err.Error() { - continue - } - - seenErrors[msg] = true - - uniqErrors = append(uniqErrors, err) - } - - if len(pkg.GoFiles) != 0 { - // errors were extracted from deps and have at least one file in package - for i := range uniqErrors { - if _, parseErr := ParseErrorPosition(uniqErrors[i].Pos); parseErr == nil { - continue - } - - // change pos to local file to properly process it by processors (properly read line etc.) - uniqErrors[i].Msg = fmt.Sprintf("%s: %s", uniqErrors[i].Pos, uniqErrors[i].Msg) - uniqErrors[i].Pos = fmt.Sprintf("%s:1", pkg.GoFiles[0]) - } - - // some errors like "code in directory expects import" don't have Pos, set it here - for i := range uniqErrors { - err := &uniqErrors[i] - if err.Pos == "" { - err.Pos = fmt.Sprintf("%s:1", pkg.GoFiles[0]) - } - } - } - - return uniqErrors -} - -func extractErrorsImpl(pkg *packages.Package, seenPackages map[*packages.Package]bool) []packages.Error { - if seenPackages[pkg] { - return nil - } - seenPackages[pkg] = true - - if !pkg.IllTyped { // otherwise, it may take hours to traverse all deps many times - return nil - } - - if len(pkg.Errors) > 0 { - return pkg.Errors - } - - var errors []packages.Error - for _, iPkg := range pkg.Imports { - iPkgErrors := extractErrorsImpl(iPkg, seenPackages) - if iPkgErrors != nil { - errors = append(errors, iPkgErrors...) - } - } - - return errors -} - -func stackCrusher(msg string) string { - index := strings.Index(msg, "(") - lastIndex := strings.LastIndex(msg, ")") - - if index == -1 || index == len(msg)-1 || lastIndex == -1 || lastIndex != len(msg)-1 { - return msg - } - - frag := msg[index+1 : lastIndex] - - if !reFile.MatchString(frag) { - return msg - } - - return stackCrusher(frag) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/printers/checkstyle.go b/vendor/github.com/golangci/golangci-lint/pkg/printers/checkstyle.go deleted file mode 100644 index e32eef7f5..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/printers/checkstyle.go +++ /dev/null @@ -1,95 +0,0 @@ -package printers - -import ( - "encoding/xml" - "fmt" - "io" - "sort" - - "github.com/go-xmlfmt/xmlfmt" - "golang.org/x/exp/maps" - - "github.com/golangci/golangci-lint/pkg/result" -) - -const defaultCheckstyleSeverity = "error" - -type checkstyleOutput struct { - XMLName xml.Name `xml:"checkstyle"` - Version string `xml:"version,attr"` - Files []*checkstyleFile `xml:"file"` -} - -type checkstyleFile struct { - Name string `xml:"name,attr"` - Errors []*checkstyleError `xml:"error"` -} - -type checkstyleError struct { - Column int `xml:"column,attr"` - Line int `xml:"line,attr"` - Message string `xml:"message,attr"` - Severity string `xml:"severity,attr"` - Source string `xml:"source,attr"` -} - -type Checkstyle struct { - w io.Writer -} - -func NewCheckstyle(w io.Writer) *Checkstyle { - return &Checkstyle{w: w} -} - -func (p Checkstyle) Print(issues []result.Issue) error { - out := checkstyleOutput{ - Version: "5.0", - } - - files := map[string]*checkstyleFile{} - - for i := range issues { - issue := &issues[i] - file, ok := files[issue.FilePath()] - if !ok { - file = &checkstyleFile{ - Name: issue.FilePath(), - } - - files[issue.FilePath()] = file - } - - severity := defaultCheckstyleSeverity - if issue.Severity != "" { - severity = issue.Severity - } - - newError := &checkstyleError{ - Column: issue.Column(), - Line: issue.Line(), - Message: issue.Text, - Source: issue.FromLinter, - Severity: severity, - } - - file.Errors = append(file.Errors, newError) - } - - out.Files = maps.Values(files) - - sort.Slice(out.Files, func(i, j int) bool { - return out.Files[i].Name < out.Files[j].Name - }) - - data, err := xml.Marshal(&out) - if err != nil { - return err - } - - _, err = fmt.Fprintf(p.w, "%s%s\n", xml.Header, xmlfmt.FormatXML(string(data), "", " ")) - if err != nil { - return err - } - - return nil -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/printers/codeclimate.go b/vendor/github.com/golangci/golangci-lint/pkg/printers/codeclimate.go deleted file mode 100644 index 50d6dcff3..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/printers/codeclimate.go +++ /dev/null @@ -1,59 +0,0 @@ -package printers - -import ( - "encoding/json" - "io" - - "github.com/golangci/golangci-lint/pkg/result" -) - -const defaultCodeClimateSeverity = "critical" - -// CodeClimateIssue is a subset of the Code Climate spec. -// https://github.com/codeclimate/platform/blob/master/spec/analyzers/SPEC.md#data-types -// It is just enough to support GitLab CI Code Quality. -// https://docs.gitlab.com/ee/user/project/merge_requests/code_quality.html -type CodeClimateIssue struct { - Description string `json:"description"` - Severity string `json:"severity,omitempty"` - Fingerprint string `json:"fingerprint"` - Location struct { - Path string `json:"path"` - Lines struct { - Begin int `json:"begin"` - } `json:"lines"` - } `json:"location"` -} - -type CodeClimate struct { - w io.Writer -} - -func NewCodeClimate(w io.Writer) *CodeClimate { - return &CodeClimate{w: w} -} - -func (p CodeClimate) Print(issues []result.Issue) error { - codeClimateIssues := make([]CodeClimateIssue, 0, len(issues)) - for i := range issues { - issue := &issues[i] - codeClimateIssue := CodeClimateIssue{} - codeClimateIssue.Description = issue.Description() - codeClimateIssue.Location.Path = issue.Pos.Filename - codeClimateIssue.Location.Lines.Begin = issue.Pos.Line - codeClimateIssue.Fingerprint = issue.Fingerprint() - codeClimateIssue.Severity = defaultCodeClimateSeverity - - if issue.Severity != "" { - codeClimateIssue.Severity = issue.Severity - } - - codeClimateIssues = append(codeClimateIssues, codeClimateIssue) - } - - err := json.NewEncoder(p.w).Encode(codeClimateIssues) - if err != nil { - return err - } - return nil -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/printers/github.go b/vendor/github.com/golangci/golangci-lint/pkg/printers/github.go deleted file mode 100644 index f471d5a86..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/printers/github.go +++ /dev/null @@ -1,52 +0,0 @@ -package printers - -import ( - "fmt" - "io" - "path/filepath" - - "github.com/golangci/golangci-lint/pkg/result" -) - -type github struct { - w io.Writer -} - -const defaultGithubSeverity = "error" - -// NewGithub output format outputs issues according to GitHub actions format: -// https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#setting-an-error-message -func NewGithub(w io.Writer) Printer { - return &github{w: w} -} - -// print each line as: ::error file=app.js,line=10,col=15::Something went wrong -func formatIssueAsGithub(issue *result.Issue) string { - severity := defaultGithubSeverity - if issue.Severity != "" { - severity = issue.Severity - } - - // Convert backslashes to forward slashes. - // This is needed when running on windows. - // Otherwise, GitHub won't be able to show the annotations pointing to the file path with backslashes. - file := filepath.ToSlash(issue.FilePath()) - - ret := fmt.Sprintf("::%s file=%s,line=%d", severity, file, issue.Line()) - if issue.Pos.Column != 0 { - ret += fmt.Sprintf(",col=%d", issue.Pos.Column) - } - - ret += fmt.Sprintf("::%s (%s)", issue.Text, issue.FromLinter) - return ret -} - -func (p *github) Print(issues []result.Issue) error { - for ind := range issues { - _, err := fmt.Fprintln(p.w, formatIssueAsGithub(&issues[ind])) - if err != nil { - return err - } - } - return nil -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/printers/html.go b/vendor/github.com/golangci/golangci-lint/pkg/printers/html.go deleted file mode 100644 index 7dd1e5c62..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/printers/html.go +++ /dev/null @@ -1,156 +0,0 @@ -package printers - -import ( - "fmt" - "html/template" - "io" - "strings" - - "github.com/golangci/golangci-lint/pkg/result" -) - -const templateContent = ` - - - - golangci-lint - - - - - - - - - - -
-
-
-
-
- - - -` - -type htmlIssue struct { - Title string - Pos string - Linter string - Code string -} - -type HTML struct { - w io.Writer -} - -func NewHTML(w io.Writer) *HTML { - return &HTML{w: w} -} - -func (p HTML) Print(issues []result.Issue) error { - var htmlIssues []htmlIssue - - for i := range issues { - pos := fmt.Sprintf("%s:%d", issues[i].FilePath(), issues[i].Line()) - if issues[i].Pos.Column != 0 { - pos += fmt.Sprintf(":%d", issues[i].Pos.Column) - } - - htmlIssues = append(htmlIssues, htmlIssue{ - Title: strings.TrimSpace(issues[i].Text), - Pos: pos, - Linter: issues[i].FromLinter, - Code: strings.Join(issues[i].SourceLines, "\n"), - }) - } - - t, err := template.New("golangci-lint").Parse(templateContent) - if err != nil { - return err - } - - return t.Execute(p.w, struct{ Issues []htmlIssue }{Issues: htmlIssues}) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/printers/json.go b/vendor/github.com/golangci/golangci-lint/pkg/printers/json.go deleted file mode 100644 index 4bae526b8..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/printers/json.go +++ /dev/null @@ -1,38 +0,0 @@ -package printers - -import ( - "encoding/json" - "io" - - "github.com/golangci/golangci-lint/pkg/report" - "github.com/golangci/golangci-lint/pkg/result" -) - -type JSON struct { - rd *report.Data - w io.Writer -} - -func NewJSON(rd *report.Data, w io.Writer) *JSON { - return &JSON{ - rd: rd, - w: w, - } -} - -type JSONResult struct { - Issues []result.Issue - Report *report.Data -} - -func (p JSON) Print(issues []result.Issue) error { - res := JSONResult{ - Issues: issues, - Report: p.rd, - } - if res.Issues == nil { - res.Issues = []result.Issue{} - } - - return json.NewEncoder(p.w).Encode(res) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/printers/junitxml.go b/vendor/github.com/golangci/golangci-lint/pkg/printers/junitxml.go deleted file mode 100644 index 3e3f82f58..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/printers/junitxml.go +++ /dev/null @@ -1,88 +0,0 @@ -package printers - -import ( - "encoding/xml" - "fmt" - "io" - "sort" - "strings" - - "golang.org/x/exp/maps" - - "github.com/golangci/golangci-lint/pkg/result" -) - -type testSuitesXML struct { - XMLName xml.Name `xml:"testsuites"` - TestSuites []testSuiteXML -} - -type testSuiteXML struct { - XMLName xml.Name `xml:"testsuite"` - Suite string `xml:"name,attr"` - Tests int `xml:"tests,attr"` - Errors int `xml:"errors,attr"` - Failures int `xml:"failures,attr"` - TestCases []testCaseXML `xml:"testcase"` -} - -type testCaseXML struct { - Name string `xml:"name,attr"` - ClassName string `xml:"classname,attr"` - Failure failureXML `xml:"failure"` -} - -type failureXML struct { - Message string `xml:"message,attr"` - Type string `xml:"type,attr"` - Content string `xml:",cdata"` -} - -type JunitXML struct { - w io.Writer -} - -func NewJunitXML(w io.Writer) *JunitXML { - return &JunitXML{w: w} -} - -func (p JunitXML) Print(issues []result.Issue) error { - suites := make(map[string]testSuiteXML) // use a map to group by file - - for ind := range issues { - i := &issues[ind] - suiteName := i.FilePath() - testSuite := suites[suiteName] - testSuite.Suite = i.FilePath() - testSuite.Tests++ - testSuite.Failures++ - - tc := testCaseXML{ - Name: i.FromLinter, - ClassName: i.Pos.String(), - Failure: failureXML{ - Type: i.Severity, - Message: i.Pos.String() + ": " + i.Text, - Content: fmt.Sprintf("%s: %s\nCategory: %s\nFile: %s\nLine: %d\nDetails: %s", - i.Severity, i.Text, i.FromLinter, i.Pos.Filename, i.Pos.Line, strings.Join(i.SourceLines, "\n")), - }, - } - - testSuite.TestCases = append(testSuite.TestCases, tc) - suites[suiteName] = testSuite - } - - var res testSuitesXML - res.TestSuites = maps.Values(suites) - - sort.Slice(res.TestSuites, func(i, j int) bool { - return res.TestSuites[i].Suite < res.TestSuites[j].Suite - }) - - enc := xml.NewEncoder(p.w) - enc.Indent("", " ") - if err := enc.Encode(res); err != nil { - return err - } - return nil -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/printers/printer.go b/vendor/github.com/golangci/golangci-lint/pkg/printers/printer.go deleted file mode 100644 index ce3116fa4..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/printers/printer.go +++ /dev/null @@ -1,9 +0,0 @@ -package printers - -import ( - "github.com/golangci/golangci-lint/pkg/result" -) - -type Printer interface { - Print(issues []result.Issue) error -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/printers/tab.go b/vendor/github.com/golangci/golangci-lint/pkg/printers/tab.go deleted file mode 100644 index 8ede89740..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/printers/tab.go +++ /dev/null @@ -1,67 +0,0 @@ -package printers - -import ( - "fmt" - "io" - "text/tabwriter" - - "github.com/fatih/color" - - "github.com/golangci/golangci-lint/pkg/logutils" - "github.com/golangci/golangci-lint/pkg/result" -) - -type Tab struct { - printLinterName bool - useColors bool - - log logutils.Log - w io.Writer -} - -func NewTab(printLinterName, useColors bool, log logutils.Log, w io.Writer) *Tab { - return &Tab{ - printLinterName: printLinterName, - useColors: useColors, - log: log, - w: w, - } -} - -func (p *Tab) SprintfColored(ca color.Attribute, format string, args ...any) string { - c := color.New(ca) - - if !p.useColors { - c.DisableColor() - } - - return c.Sprintf(format, args...) -} - -func (p *Tab) Print(issues []result.Issue) error { - w := tabwriter.NewWriter(p.w, 0, 0, 2, ' ', 0) - - for i := range issues { - p.printIssue(&issues[i], w) - } - - if err := w.Flush(); err != nil { - p.log.Warnf("Can't flush tab writer: %s", err) - } - - return nil -} - -func (p *Tab) printIssue(i *result.Issue, w io.Writer) { - text := p.SprintfColored(color.FgRed, "%s", i.Text) - if p.printLinterName { - text = fmt.Sprintf("%s\t%s", i.FromLinter, text) - } - - pos := p.SprintfColored(color.Bold, "%s:%d", i.FilePath(), i.Line()) - if i.Pos.Column != 0 { - pos += fmt.Sprintf(":%d", i.Pos.Column) - } - - fmt.Fprintf(w, "%s\t%s\n", pos, text) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/printers/teamcity.go b/vendor/github.com/golangci/golangci-lint/pkg/printers/teamcity.go deleted file mode 100644 index d3693e997..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/printers/teamcity.go +++ /dev/null @@ -1,122 +0,0 @@ -package printers - -import ( - "fmt" - "io" - "strings" - "unicode/utf8" - - "github.com/golangci/golangci-lint/pkg/result" -) - -// Field limits. -const ( - smallLimit = 255 - largeLimit = 4000 -) - -// TeamCity printer for TeamCity format. -type TeamCity struct { - w io.Writer - escaper *strings.Replacer -} - -// NewTeamCity output format outputs issues according to TeamCity service message format. -func NewTeamCity(w io.Writer) *TeamCity { - return &TeamCity{ - w: w, - // https://www.jetbrains.com/help/teamcity/service-messages.html#Escaped+Values - escaper: strings.NewReplacer( - "'", "|'", - "\n", "|n", - "\r", "|r", - "|", "||", - "[", "|[", - "]", "|]", - ), - } -} - -func (p *TeamCity) Print(issues []result.Issue) error { - uniqLinters := map[string]struct{}{} - - for i := range issues { - issue := issues[i] - - _, ok := uniqLinters[issue.FromLinter] - if !ok { - inspectionType := InspectionType{ - id: issue.FromLinter, - name: issue.FromLinter, - description: issue.FromLinter, - category: "Golangci-lint reports", - } - - _, err := inspectionType.Print(p.w, p.escaper) - if err != nil { - return err - } - - uniqLinters[issue.FromLinter] = struct{}{} - } - - instance := InspectionInstance{ - typeID: issue.FromLinter, - message: issue.Text, - file: issue.FilePath(), - line: issue.Line(), - severity: issue.Severity, - } - - _, err := instance.Print(p.w, p.escaper) - if err != nil { - return err - } - } - - return nil -} - -// InspectionType is the unique description of the conducted inspection. Each specific warning or -// an error in code (inspection instance) has an inspection type. -// https://www.jetbrains.com/help/teamcity/service-messages.html#Inspection+Type -type InspectionType struct { - id string // (mandatory) limited by 255 characters. - name string // (mandatory) limited by 255 characters. - description string // (mandatory) limited by 255 characters. - category string // (mandatory) limited by 4000 characters. -} - -func (i InspectionType) Print(w io.Writer, escaper *strings.Replacer) (int, error) { - return fmt.Fprintf(w, "##teamcity[InspectionType id='%s' name='%s' description='%s' category='%s']\n", - limit(i.id, smallLimit), limit(i.name, smallLimit), limit(escaper.Replace(i.description), largeLimit), limit(i.category, smallLimit)) -} - -// InspectionInstance reports a specific defect, warning, error message. -// Includes location, description, and various optional and custom attributes. -// https://www.jetbrains.com/help/teamcity/service-messages.html#Inspection+Instance -type InspectionInstance struct { - typeID string // (mandatory) limited by 255 characters. - message string // (optional) limited by 4000 characters. - file string // (mandatory) file path limited by 4000 characters. - line int // (optional) line of the file. - severity string // (optional) any linter severity. -} - -func (i InspectionInstance) Print(w io.Writer, replacer *strings.Replacer) (int, error) { - return fmt.Fprintf(w, "##teamcity[inspection typeId='%s' message='%s' file='%s' line='%d' SEVERITY='%s']\n", - limit(i.typeID, smallLimit), - limit(replacer.Replace(i.message), largeLimit), - limit(i.file, largeLimit), - i.line, strings.ToUpper(i.severity)) -} - -func limit(s string, max int) string { - var size, count int - for i := 0; i < max && count < len(s); i++ { - _, size = utf8.DecodeRuneInString(s[count:]) - count += size - } - - return s[:count] -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/printers/text.go b/vendor/github.com/golangci/golangci-lint/pkg/printers/text.go deleted file mode 100644 index 6e29c4b50..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/printers/text.go +++ /dev/null @@ -1,94 +0,0 @@ -package printers - -import ( - "fmt" - "io" - "strings" - - "github.com/fatih/color" - - "github.com/golangci/golangci-lint/pkg/logutils" - "github.com/golangci/golangci-lint/pkg/result" -) - -type Text struct { - printIssuedLine bool - printLinterName bool - useColors bool - - log logutils.Log - w io.Writer -} - -func NewText(printIssuedLine, useColors, printLinterName bool, log logutils.Log, w io.Writer) *Text { - return &Text{ - printIssuedLine: printIssuedLine, - printLinterName: printLinterName, - useColors: useColors, - log: log, - w: w, - } -} - -func (p *Text) SprintfColored(ca color.Attribute, format string, args ...any) string { - c := color.New(ca) - - if !p.useColors { - c.DisableColor() - } - - return c.Sprintf(format, args...) -} - -func (p *Text) Print(issues []result.Issue) error { - for i := range issues { - p.printIssue(&issues[i]) - - if !p.printIssuedLine { - continue - } - - p.printSourceCode(&issues[i]) - p.printUnderLinePointer(&issues[i]) - } - - return nil -} - -func (p *Text) printIssue(i *result.Issue) { - text := p.SprintfColored(color.FgRed, "%s", strings.TrimSpace(i.Text)) - if p.printLinterName { - text += fmt.Sprintf(" (%s)", i.FromLinter) - } - pos := p.SprintfColored(color.Bold, "%s:%d", i.FilePath(), i.Line()) - if i.Pos.Column != 0 { - pos += fmt.Sprintf(":%d", i.Pos.Column) - } - fmt.Fprintf(p.w, "%s: %s\n", pos, text) -} - -func (p *Text) printSourceCode(i *result.Issue) { - for _, line := range i.SourceLines { - fmt.Fprintln(p.w, line) - } -} - -func (p *Text) printUnderLinePointer(i *result.Issue) { - // if column == 0 it means column is unknown (e.g. for gosec) - if len(i.SourceLines) != 1 || i.Pos.Column == 0 { - return - } - - col0 := i.Pos.Column - 1 - line := i.SourceLines[0] - prefixRunes := make([]rune, 0, len(line)) - for j := 0; j < len(line) && j < col0; j++ { - if line[j] == '\t' { - prefixRunes = append(prefixRunes, '\t') - } else { - prefixRunes = append(prefixRunes, ' ') - } - } - - fmt.Fprintf(p.w, "%s%s\n", string(prefixRunes), p.SprintfColored(color.FgYellow, "^")) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/report/data.go b/vendor/github.com/golangci/golangci-lint/pkg/report/data.go deleted file mode 100644 index f083fa9f5..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/report/data.go +++ /dev/null @@ -1,26 +0,0 @@ -package report - -type Warning struct { - Tag string `json:",omitempty"` - Text string -} - -type LinterData struct { - Name string - Enabled bool `json:",omitempty"` - EnabledByDefault bool `json:",omitempty"` -} - -type Data struct { - Warnings []Warning `json:",omitempty"` - Linters []LinterData `json:",omitempty"` - Error string `json:",omitempty"` -} - -func (d *Data) AddLinter(name string, enabled, enabledByDefault bool) { - d.Linters = append(d.Linters, LinterData{ - Name: name, - Enabled: enabled, - EnabledByDefault: enabledByDefault, - }) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/report/log.go b/vendor/github.com/golangci/golangci-lint/pkg/report/log.go deleted file mode 100644 index 61665f28b..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/report/log.go +++ /dev/null @@ -1,64 +0,0 @@ -package report - -import ( - "fmt" - "strings" - - "github.com/golangci/golangci-lint/pkg/logutils" -) - -type LogWrapper struct { - rd *Data - tags []string - origLog logutils.Log -} - -func NewLogWrapper(log logutils.Log, reportData *Data) *LogWrapper { - return &LogWrapper{ - rd: reportData, - origLog: log, - } -} - -func (lw LogWrapper) Fatalf(format string, args ...any) { - lw.origLog.Fatalf(format, args...) -} - -func (lw LogWrapper) Panicf(format string, args ...any) { - lw.origLog.Panicf(format, args...) -} - -func (lw LogWrapper) Errorf(format string, args ...any) { - lw.origLog.Errorf(format, args...) - lw.rd.Error = fmt.Sprintf(format, args...) -} - -func (lw LogWrapper) Warnf(format string, args ...any) { - lw.origLog.Warnf(format, args...) - w := Warning{ - Tag: strings.Join(lw.tags, "/"), - Text: fmt.Sprintf(format, args...), - } - - lw.rd.Warnings = append(lw.rd.Warnings, w) -} - -func (lw LogWrapper) Infof(format string, args ...any) { - lw.origLog.Infof(format, args...) -} - -func (lw LogWrapper) Child(name string) logutils.Log { - c := lw - c.origLog = lw.origLog.Child(name) - c.tags = append([]string{}, lw.tags...) - c.tags = append(c.tags, name) - return c -} - -func (lw LogWrapper) SetLevel(level logutils.LogLevel) { - lw.origLog.SetLevel(level) -} - -func (lw LogWrapper) GoString() string { - return fmt.Sprintf("lw: %+v, orig log: %#v", lw, lw.origLog) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/issue.go b/vendor/github.com/golangci/golangci-lint/pkg/result/issue.go deleted file mode 100644 index 1e8cd3052..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/result/issue.go +++ /dev/null @@ -1,98 +0,0 @@ -package result - -import ( - "crypto/md5" //nolint:gosec - "fmt" - "go/token" - - "golang.org/x/tools/go/packages" -) - -type Range struct { - From, To int -} - -type Replacement struct { - NeedOnlyDelete bool // need to delete all lines of the issue without replacement with new lines - NewLines []string // if NeedDelete is false it's the replacement lines - Inline *InlineFix -} - -type InlineFix struct { - StartCol int // zero-based - Length int // length of chunk to be replaced - NewString string -} - -type Issue struct { - FromLinter string - Text string - - Severity string - - // Source lines of a code with the issue to show - SourceLines []string - - // If we know how to fix the issue we can provide replacement lines - Replacement *Replacement - - // Pkg is needed for proper caching of linting results - Pkg *packages.Package `json:"-"` - - LineRange *Range `json:",omitempty"` - - Pos token.Position - - // HunkPos is used only when golangci-lint is run over a diff - HunkPos int `json:",omitempty"` - - // If we are expecting a nolint (because this is from nolintlint), record the expected linter - ExpectNoLint bool - ExpectedNoLintLinter string -} - -func (i *Issue) FilePath() string { - return i.Pos.Filename -} - -func (i *Issue) Line() int { - return i.Pos.Line -} - -func (i *Issue) Column() int { - return i.Pos.Column -} - -func (i *Issue) GetLineRange() Range { - if i.LineRange == nil { - return Range{ - From: i.Line(), - To: i.Line(), - } - } - - if i.LineRange.From == 0 { - return Range{ - From: i.Line(), - To: i.Line(), - } - } - - return *i.LineRange -} - -func (i *Issue) Description() string { - return fmt.Sprintf("%s: %s", i.FromLinter, i.Text) -} - -func (i *Issue) Fingerprint() string { - firstLine := "" - if len(i.SourceLines) > 0 { - firstLine = i.SourceLines[0] - } - - hash := md5.New() //nolint:gosec - _, _ = fmt.Fprintf(hash, "%s%s%s", i.Pos.Filename, i.Text, firstLine) - - return fmt.Sprintf("%X", hash.Sum(nil)) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/autogenerated_exclude.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/autogenerated_exclude.go deleted file mode 100644 index c7675fce8..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/autogenerated_exclude.go +++ /dev/null @@ -1,133 +0,0 @@ -package processors - -import ( - "errors" - "fmt" - "go/parser" - "go/token" - "path/filepath" - "strings" - - "github.com/golangci/golangci-lint/pkg/logutils" - "github.com/golangci/golangci-lint/pkg/result" -) - -var autogenDebugf = logutils.Debug(logutils.DebugKeyAutogenExclude) - -type ageFileSummary struct { - isGenerated bool -} - -type ageFileSummaryCache map[string]*ageFileSummary - -type AutogeneratedExclude struct { - fileSummaryCache ageFileSummaryCache -} - -func NewAutogeneratedExclude() *AutogeneratedExclude { - return &AutogeneratedExclude{ - fileSummaryCache: ageFileSummaryCache{}, - } -} - -var _ Processor = &AutogeneratedExclude{} - -func (p *AutogeneratedExclude) Name() string { - return "autogenerated_exclude" -} - -func (p *AutogeneratedExclude) Process(issues []result.Issue) ([]result.Issue, error) { - return filterIssuesErr(issues, p.shouldPassIssue) -} - -func isSpecialAutogeneratedFile(filePath string) bool { - fileName := filepath.Base(filePath) - // fake files or generation definitions to which //line points to for generated files - return filepath.Ext(fileName) != ".go" -} - -func (p *AutogeneratedExclude) shouldPassIssue(i *result.Issue) (bool, error) { - if i.FromLinter == "typecheck" { - // don't hide typechecking errors in generated files: users expect to see why the project isn't compiling - return true, nil - } - - if filepath.Base(i.FilePath()) == "go.mod" { - return true, nil - } - - if isSpecialAutogeneratedFile(i.FilePath()) { - return false, nil - } - - fs, err := p.getOrCreateFileSummary(i) - if err != nil { - return false, err - } - - // don't report issues for autogenerated files - return !fs.isGenerated, nil -} - -// isGenerated reports whether the source file is generated code. -// Using a bit laxer rules than https://go.dev/s/generatedcode to -// match more generated code. See #48 and #72. -func isGeneratedFileByComment(doc string) bool { - const ( - genCodeGenerated = "code generated" - genDoNotEdit = "do not edit" - genAutoFile = "autogenerated file" // easyjson - ) - - markers := []string{genCodeGenerated, genDoNotEdit, genAutoFile} - doc = strings.ToLower(doc) - for _, marker := range markers { - if strings.Contains(doc, marker) { - autogenDebugf("doc contains marker %q: file is generated", marker) - return true - } - } - - autogenDebugf("doc of len %d doesn't contain any of markers: %s", len(doc), markers) - return false -} - -func (p *AutogeneratedExclude) getOrCreateFileSummary(i *result.Issue) (*ageFileSummary, error) { - fs := p.fileSummaryCache[i.FilePath()] - if fs != nil { - return fs, nil - } - - fs = &ageFileSummary{} - p.fileSummaryCache[i.FilePath()] = fs - - if i.FilePath() == "" { - return nil, errors.New("no file path for issue") - } - - doc, err := getDoc(i.FilePath()) - if err != nil { - return nil, fmt.Errorf("failed to get doc of file %s: %w", i.FilePath(), err) - } - - fs.isGenerated = isGeneratedFileByComment(doc) - autogenDebugf("file %q is generated: %t", i.FilePath(), fs.isGenerated) - return fs, nil -} - -func getDoc(filePath string) (string, error) { - fset := token.NewFileSet() - syntax, err := parser.ParseFile(fset, filePath, nil, parser.PackageClauseOnly|parser.ParseComments) - if err != nil { - return "", fmt.Errorf("failed to parse file: %w", err) - } - - var docLines []string - for _, c := range syntax.Comments { - docLines = append(docLines, strings.TrimSpace(c.Text())) - } - - return strings.Join(docLines, "\n"), nil -} - -func (p *AutogeneratedExclude) Finish() {} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/base_rule.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/base_rule.go deleted file mode 100644 index b5e138806..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/base_rule.go +++ /dev/null @@ -1,74 +0,0 @@ -package processors - -import ( - "regexp" - - "github.com/golangci/golangci-lint/pkg/fsutils" - "github.com/golangci/golangci-lint/pkg/logutils" - "github.com/golangci/golangci-lint/pkg/result" -) - -type BaseRule struct { - Text string - Source string - Path string - PathExcept string - Linters []string -} - -type baseRule struct { - text *regexp.Regexp - source *regexp.Regexp - path *regexp.Regexp - pathExcept *regexp.Regexp - linters []string -} - -func (r *baseRule) isEmpty() bool { - return r.text == nil && r.source == nil && r.path == nil && r.pathExcept == nil && len(r.linters) == 0 -} - -func (r *baseRule) match(issue *result.Issue, files *fsutils.Files, log logutils.Log) bool { - if r.isEmpty() { - return false - } - if r.text != nil && !r.text.MatchString(issue.Text) { - return false - } - if r.path != nil && !r.path.MatchString(files.WithPathPrefix(issue.FilePath())) { - return false - } - if r.pathExcept != nil && r.pathExcept.MatchString(issue.FilePath()) { - return false - } - if len(r.linters) != 0 && !r.matchLinter(issue) { - return false - } - - // the most heavyweight checking last - if r.source != nil && !r.matchSource(issue, files.LineCache, log) { - return false - } - - return true -} - -func (r *baseRule) matchLinter(issue *result.Issue) bool { - for _, linter := range r.linters { - if linter == issue.FromLinter { - return true - } - } - - return false -} - -func (r *baseRule) matchSource(issue *result.Issue, lineCache *fsutils.LineCache, log logutils.Log) bool { //nolint:interfacer - sourceLine, errSourceLine := lineCache.GetLine(issue.FilePath(), issue.Line()) - if errSourceLine != nil { - log.Warnf("Failed to get line %s:%d from line cache: %s", issue.FilePath(), issue.Line(), errSourceLine) - return false // can't properly match - } - - return r.source.MatchString(sourceLine) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/cgo.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/cgo.go deleted file mode 100644 index 8e7723751..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/cgo.go +++ /dev/null @@ -1,57 +0,0 @@ -package processors - -import ( - "fmt" - "path/filepath" - "strings" - - "github.com/golangci/golangci-lint/pkg/goutil" - "github.com/golangci/golangci-lint/pkg/result" -) - -type Cgo struct { - goCacheDir string -} - -var _ Processor = Cgo{} - -func NewCgo(goenv *goutil.Env) *Cgo { - return &Cgo{ - goCacheDir: goenv.Get(goutil.EnvGoCache), - } -} - -func (p Cgo) Name() string { - return "cgo" -} - -func (p Cgo) Process(issues []result.Issue) ([]result.Issue, error) { - return filterIssuesErr(issues, func(i *result.Issue) (bool, error) { - // some linters (.e.g gosec, deadcode) return incorrect filepaths for cgo issues, - // also cgo files have strange issues looking like false positives. - - // cache dir contains all preprocessed files including cgo files - - issueFilePath := i.FilePath() - if !filepath.IsAbs(i.FilePath()) { - absPath, err := filepath.Abs(i.FilePath()) - if err != nil { - return false, fmt.Errorf("failed to build abs path for %q: %w", i.FilePath(), err) - } - issueFilePath = absPath - } - - if p.goCacheDir != "" && strings.HasPrefix(issueFilePath, p.goCacheDir) { - return false, nil - } - - if filepath.Base(i.FilePath()) == "_cgo_gotypes.go" { - // skip cgo warning for go1.10 - return false, nil - } - - return true, nil - }) -} - -func (Cgo) Finish() {} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/diff.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/diff.go deleted file mode 100644 index 496d9c865..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/diff.go +++ /dev/null @@ -1,78 +0,0 @@ -package processors - -import ( - "bytes" - "fmt" - "io" - "os" - "strings" - - "github.com/golangci/revgrep" - - "github.com/golangci/golangci-lint/pkg/result" -) - -const envGolangciDiffProcessorPatch = "GOLANGCI_DIFF_PROCESSOR_PATCH" - -type Diff struct { - onlyNew bool - fromRev string - patchFilePath string - wholeFiles bool - patch string -} - -var _ Processor = Diff{} - -func NewDiff(onlyNew bool, fromRev, patchFilePath string, wholeFiles bool) *Diff { - return &Diff{ - onlyNew: onlyNew, - fromRev: fromRev, - patchFilePath: patchFilePath, - wholeFiles: wholeFiles, - patch: os.Getenv(envGolangciDiffProcessorPatch), - } -} - -func (p Diff) Name() string { - return "diff" -} - -func (p Diff) Process(issues []result.Issue) ([]result.Issue, error) { - if !p.onlyNew && p.fromRev == "" && p.patchFilePath == "" && p.patch == "" { // no need to work - return issues, nil - } - - var patchReader io.Reader - if p.patchFilePath != "" { - patch, err := os.ReadFile(p.patchFilePath) - if err != nil { - return nil, fmt.Errorf("can't read from patch file %s: %w", p.patchFilePath, err) - } - patchReader = bytes.NewReader(patch) - } else if p.patch != "" { - patchReader = strings.NewReader(p.patch) - } - - c := revgrep.Checker{ - Patch: patchReader, - RevisionFrom: p.fromRev, - WholeFiles: p.wholeFiles, - } - if err := c.Prepare(); err != nil { - return nil, fmt.Errorf("can't prepare diff by revgrep: %w", err) - } - - return transformIssues(issues, func(i *result.Issue) *result.Issue { - hunkPos, isNew := c.IsNewIssue(i) - if !isNew { - return nil - } - - newI := *i - newI.HunkPos = hunkPos - return &newI - }), nil -} - -func (Diff) Finish() {} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/exclude.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/exclude.go deleted file mode 100644 index 92959a328..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/exclude.go +++ /dev/null @@ -1,59 +0,0 @@ -package processors - -import ( - "regexp" - - "github.com/golangci/golangci-lint/pkg/result" -) - -type Exclude struct { - pattern *regexp.Regexp -} - -var _ Processor = Exclude{} - -func NewExclude(pattern string) *Exclude { - var patternRe *regexp.Regexp - if pattern != "" { - patternRe = regexp.MustCompile("(?i)" + pattern) - } - return &Exclude{ - pattern: patternRe, - } -} - -func (p Exclude) Name() string { - return "exclude" -} - -func (p Exclude) Process(issues []result.Issue) ([]result.Issue, error) { - if p.pattern == nil { - return issues, nil - } - - return filterIssues(issues, func(i *result.Issue) bool { - return !p.pattern.MatchString(i.Text) - }), nil -} - -func (p Exclude) Finish() {} - -type ExcludeCaseSensitive struct { - *Exclude -} - -var _ Processor = ExcludeCaseSensitive{} - -func NewExcludeCaseSensitive(pattern string) *ExcludeCaseSensitive { - var patternRe *regexp.Regexp - if pattern != "" { - patternRe = regexp.MustCompile(pattern) - } - return &ExcludeCaseSensitive{ - &Exclude{pattern: patternRe}, - } -} - -func (p ExcludeCaseSensitive) Name() string { - return "exclude-case-sensitive" -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/exclude_rules.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/exclude_rules.go deleted file mode 100644 index 2f7e30b43..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/exclude_rules.go +++ /dev/null @@ -1,95 +0,0 @@ -package processors - -import ( - "regexp" - - "github.com/golangci/golangci-lint/pkg/fsutils" - "github.com/golangci/golangci-lint/pkg/logutils" - "github.com/golangci/golangci-lint/pkg/result" -) - -type excludeRule struct { - baseRule -} - -type ExcludeRule struct { - BaseRule -} - -type ExcludeRules struct { - rules []excludeRule - files *fsutils.Files - log logutils.Log -} - -func NewExcludeRules(rules []ExcludeRule, files *fsutils.Files, log logutils.Log) *ExcludeRules { - r := &ExcludeRules{ - files: files, - log: log, - } - r.rules = createRules(rules, "(?i)") - - return r -} - -func createRules(rules []ExcludeRule, prefix string) []excludeRule { - parsedRules := make([]excludeRule, 0, len(rules)) - for _, rule := range rules { - parsedRule := excludeRule{} - parsedRule.linters = rule.Linters - if rule.Text != "" { - parsedRule.text = regexp.MustCompile(prefix + rule.Text) - } - if rule.Source != "" { - parsedRule.source = regexp.MustCompile(prefix + rule.Source) - } - if rule.Path != "" { - path := fsutils.NormalizePathInRegex(rule.Path) - parsedRule.path = regexp.MustCompile(path) - } - if rule.PathExcept != "" { - pathExcept := fsutils.NormalizePathInRegex(rule.PathExcept) - parsedRule.pathExcept = regexp.MustCompile(pathExcept) - } - parsedRules = append(parsedRules, parsedRule) - } - return parsedRules -} - -func (p ExcludeRules) Process(issues []result.Issue) ([]result.Issue, error) { - if len(p.rules) == 0 { - return issues, nil - } - return filterIssues(issues, func(i *result.Issue) bool { - for _, rule := range p.rules { - rule := rule - if rule.match(i, p.files, p.log) { - return false - } - } - return true - }), nil -} - -func (ExcludeRules) Name() string { return "exclude-rules" } -func (ExcludeRules) Finish() {} - -var _ Processor = ExcludeRules{} - -type ExcludeRulesCaseSensitive struct { - *ExcludeRules -} - -func NewExcludeRulesCaseSensitive(rules []ExcludeRule, files *fsutils.Files, log logutils.Log) *ExcludeRulesCaseSensitive { - r := &ExcludeRules{ - files: files, - log: log, - } - r.rules = createRules(rules, "") - - return &ExcludeRulesCaseSensitive{r} -} - -func (ExcludeRulesCaseSensitive) Name() string { return "exclude-rules-case-sensitive" } - -var _ Processor = ExcludeCaseSensitive{} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/filename_unadjuster.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/filename_unadjuster.go deleted file mode 100644 index 2aaafbf58..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/filename_unadjuster.go +++ /dev/null @@ -1,131 +0,0 @@ -package processors - -import ( - "go/parser" - "go/token" - "path/filepath" - "strings" - "sync" - "time" - - "golang.org/x/tools/go/packages" - - "github.com/golangci/golangci-lint/pkg/logutils" - "github.com/golangci/golangci-lint/pkg/result" -) - -type posMapper func(pos token.Position) token.Position - -type adjustMap struct { - sync.Mutex - m map[string]posMapper -} - -// FilenameUnadjuster is needed because a lot of linters use fset.Position(f.Pos()) -// to get filename. And they return adjusted filename (e.g. *.qtpl) for an issue. We need -// restore real .go filename to properly output it, parse it, etc. -type FilenameUnadjuster struct { - m map[string]posMapper // map from adjusted filename to position mapper: adjusted -> unadjusted position - log logutils.Log - loggedUnadjustments map[string]bool -} - -var _ Processor = &FilenameUnadjuster{} - -func processUnadjusterPkg(m *adjustMap, pkg *packages.Package, log logutils.Log) { - fset := token.NewFileSet() // it's more memory efficient to not store all in one fset - - for _, filename := range pkg.CompiledGoFiles { - // It's important to call func here to run GC - processUnadjusterFile(filename, m, log, fset) - } -} - -func processUnadjusterFile(filename string, m *adjustMap, log logutils.Log, fset *token.FileSet) { - syntax, err := parser.ParseFile(fset, filename, nil, parser.ParseComments) - if err != nil { - // Error will be reported by typecheck - return - } - - adjustedFilename := fset.PositionFor(syntax.Pos(), true).Filename - if adjustedFilename == "" { - return - } - - unadjustedFilename := fset.PositionFor(syntax.Pos(), false).Filename - if unadjustedFilename == "" || unadjustedFilename == adjustedFilename { - return - } - - if !strings.HasSuffix(unadjustedFilename, ".go") { - return // file.go -> /caches/cgo-xxx - } - - m.Lock() - defer m.Unlock() - m.m[adjustedFilename] = func(adjustedPos token.Position) token.Position { - tokenFile := fset.File(syntax.Pos()) - if tokenFile == nil { - log.Warnf("Failed to get token file for %s", adjustedFilename) - return adjustedPos - } - return fset.PositionFor(tokenFile.Pos(adjustedPos.Offset), false) - } -} - -func NewFilenameUnadjuster(pkgs []*packages.Package, log logutils.Log) *FilenameUnadjuster { - m := adjustMap{m: map[string]posMapper{}} - - startedAt := time.Now() - var wg sync.WaitGroup - wg.Add(len(pkgs)) - for _, pkg := range pkgs { - go func(pkg *packages.Package) { - // It's important to call func here to run GC - processUnadjusterPkg(&m, pkg, log) - wg.Done() - }(pkg) - } - wg.Wait() - log.Infof("Pre-built %d adjustments in %s", len(m.m), time.Since(startedAt)) - - return &FilenameUnadjuster{ - m: m.m, - log: log, - loggedUnadjustments: map[string]bool{}, - } -} - -func (p *FilenameUnadjuster) Name() string { - return "filename_unadjuster" -} - -func (p *FilenameUnadjuster) Process(issues []result.Issue) ([]result.Issue, error) { - return transformIssues(issues, func(i *result.Issue) *result.Issue { - issueFilePath := i.FilePath() - if !filepath.IsAbs(i.FilePath()) { - absPath, err := filepath.Abs(i.FilePath()) - if err != nil { - p.log.Warnf("failed to build abs path for %q: %s", i.FilePath(), err) - return i - } - issueFilePath = absPath - } - - mapper := p.m[issueFilePath] - if mapper == nil { - return i - } - - newI := *i - newI.Pos = mapper(i.Pos) - if !p.loggedUnadjustments[i.Pos.Filename] { - p.log.Infof("Unadjusted from %v to %v", i.Pos, newI.Pos) - p.loggedUnadjustments[i.Pos.Filename] = true - } - return &newI - }), nil -} - -func (p *FilenameUnadjuster) Finish() {} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/fixer.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/fixer.go deleted file mode 100644 index a79a84628..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/fixer.go +++ /dev/null @@ -1,255 +0,0 @@ -package processors - -import ( - "bytes" - "fmt" - "os" - "path/filepath" - "sort" - "strings" - - "github.com/golangci/golangci-lint/internal/robustio" - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/fsutils" - "github.com/golangci/golangci-lint/pkg/logutils" - "github.com/golangci/golangci-lint/pkg/result" - "github.com/golangci/golangci-lint/pkg/timeutils" -) - -var _ Processor = Fixer{} - -type Fixer struct { - cfg *config.Config - log logutils.Log - fileCache *fsutils.FileCache - sw *timeutils.Stopwatch -} - -func NewFixer(cfg *config.Config, log logutils.Log, fileCache *fsutils.FileCache) *Fixer { - return &Fixer{ - cfg: cfg, - log: log, - fileCache: fileCache, - sw: timeutils.NewStopwatch("fixer", log), - } -} - -func (f Fixer) printStat() { - f.sw.PrintStages() -} - -func (f Fixer) Process(issues []result.Issue) ([]result.Issue, error) { - if !f.cfg.Issues.NeedFix { - return issues, nil - } - - outIssues := make([]result.Issue, 0, len(issues)) - issuesToFixPerFile := map[string][]result.Issue{} - for i := range issues { - issue := &issues[i] - if issue.Replacement == nil { - outIssues = append(outIssues, *issue) - continue - } - - issuesToFixPerFile[issue.FilePath()] = append(issuesToFixPerFile[issue.FilePath()], *issue) - } - - for file, issuesToFix := range issuesToFixPerFile { - var err error - f.sw.TrackStage("all", func() { - err = f.fixIssuesInFile(file, issuesToFix) - }) - if err != nil { - f.log.Errorf("Failed to fix issues in file %s: %s", file, err) - - // show issues only if can't fix them - outIssues = append(outIssues, issuesToFix...) - } - } - - f.printStat() - return outIssues, nil -} - -func (f Fixer) Name() string { - return "fixer" -} - -func (f Fixer) Finish() {} - -func (f Fixer) fixIssuesInFile(filePath string, issues []result.Issue) error { - // TODO: don't read the whole file into memory: read line by line; - // can't just use bufio.scanner: it has a line length limit - origFileData, err := f.fileCache.GetFileBytes(filePath) - if err != nil { - return fmt.Errorf("failed to get file bytes for %s: %w", filePath, err) - } - origFileLines := bytes.Split(origFileData, []byte("\n")) - - tmpFileName := filepath.Join(filepath.Dir(filePath), fmt.Sprintf(".%s.golangci_fix", filepath.Base(filePath))) - tmpOutFile, err := os.Create(tmpFileName) - if err != nil { - return fmt.Errorf("failed to make file %s: %w", tmpFileName, err) - } - - // merge multiple issues per line into one issue - issuesPerLine := map[int][]result.Issue{} - for i := range issues { - issue := &issues[i] - issuesPerLine[issue.Line()] = append(issuesPerLine[issue.Line()], *issue) - } - - issues = issues[:0] // reuse the same memory - for line, lineIssues := range issuesPerLine { - if mergedIssue := f.mergeLineIssues(line, lineIssues, origFileLines); mergedIssue != nil { - issues = append(issues, *mergedIssue) - } - } - - issues = f.findNotIntersectingIssues(issues) - - if err = f.writeFixedFile(origFileLines, issues, tmpOutFile); err != nil { - tmpOutFile.Close() - _ = robustio.RemoveAll(tmpOutFile.Name()) - return err - } - - tmpOutFile.Close() - if err = robustio.Rename(tmpOutFile.Name(), filePath); err != nil { - _ = robustio.RemoveAll(tmpOutFile.Name()) - return fmt.Errorf("failed to rename %s -> %s: %w", tmpOutFile.Name(), filePath, err) - } - - return nil -} - -func (f Fixer) mergeLineIssues(lineNum int, lineIssues []result.Issue, origFileLines [][]byte) *result.Issue { - origLine := origFileLines[lineNum-1] // lineNum is 1-based - - if len(lineIssues) == 1 && lineIssues[0].Replacement.Inline == nil { - return &lineIssues[0] - } - - // check issues first - for ind := range lineIssues { - i := &lineIssues[ind] - if i.LineRange != nil { - f.log.Infof("Line %d has multiple issues but at least one of them is ranged: %#v", lineNum, lineIssues) - return &lineIssues[0] - } - - r := i.Replacement - if r.Inline == nil || len(r.NewLines) != 0 || r.NeedOnlyDelete { - f.log.Infof("Line %d has multiple issues but at least one of them isn't inline: %#v", lineNum, lineIssues) - return &lineIssues[0] - } - - if r.Inline.StartCol < 0 || r.Inline.Length <= 0 || r.Inline.StartCol+r.Inline.Length > len(origLine) { - f.log.Warnf("Line %d (%q) has invalid inline fix: %#v, %#v", lineNum, origLine, i, r.Inline) - return nil - } - } - - return f.applyInlineFixes(lineIssues, origLine, lineNum) -} - -func (f Fixer) applyInlineFixes(lineIssues []result.Issue, origLine []byte, lineNum int) *result.Issue { - sort.Slice(lineIssues, func(i, j int) bool { - return lineIssues[i].Replacement.Inline.StartCol < lineIssues[j].Replacement.Inline.StartCol - }) - - var newLineBuf bytes.Buffer - newLineBuf.Grow(len(origLine)) - - //nolint:misspell - // example: origLine="it's becouse of them", StartCol=5, Length=7, NewString="because" - - curOrigLinePos := 0 - for i := range lineIssues { - fix := lineIssues[i].Replacement.Inline - if fix.StartCol < curOrigLinePos { - f.log.Warnf("Line %d has multiple intersecting issues: %#v", lineNum, lineIssues) - return nil - } - - if curOrigLinePos != fix.StartCol { - newLineBuf.Write(origLine[curOrigLinePos:fix.StartCol]) - } - newLineBuf.WriteString(fix.NewString) - curOrigLinePos = fix.StartCol + fix.Length - } - if curOrigLinePos != len(origLine) { - newLineBuf.Write(origLine[curOrigLinePos:]) - } - - mergedIssue := lineIssues[0] // use text from the first issue (it's not really used) - mergedIssue.Replacement = &result.Replacement{ - NewLines: []string{newLineBuf.String()}, - } - return &mergedIssue -} - -func (f Fixer) findNotIntersectingIssues(issues []result.Issue) []result.Issue { - sort.SliceStable(issues, func(i, j int) bool { - a, b := issues[i], issues[j] - return a.Line() < b.Line() - }) - - var ret []result.Issue - var currentEnd int - for i := range issues { - issue := &issues[i] - rng := issue.GetLineRange() - if rng.From <= currentEnd { - f.log.Infof("Skip issue %#v: intersects with end %d", issue, currentEnd) - continue // skip intersecting issue - } - f.log.Infof("Fix issue %#v with range %v", issue, issue.GetLineRange()) - ret = append(ret, *issue) - currentEnd = rng.To - } - - return ret -} - -func (f Fixer) writeFixedFile(origFileLines [][]byte, issues []result.Issue, tmpOutFile *os.File) error { - // issues aren't intersecting - - nextIssueIndex := 0 - for i := 0; i < len(origFileLines); i++ { - var outLine string - var nextIssue *result.Issue - if nextIssueIndex != len(issues) { - nextIssue = &issues[nextIssueIndex] - } - - origFileLineNumber := i + 1 - if nextIssue == nil || origFileLineNumber != nextIssue.GetLineRange().From { - outLine = string(origFileLines[i]) - } else { - nextIssueIndex++ - rng := nextIssue.GetLineRange() - if rng.From > rng.To { - // Maybe better decision is to skip such issues, re-evaluate if regressed. - f.log.Warnf("[fixer]: issue line range is probably invalid, fix can be incorrect (from=%d, to=%d, linter=%s)", - rng.From, rng.To, nextIssue.FromLinter, - ) - } - i += rng.To - rng.From - if nextIssue.Replacement.NeedOnlyDelete { - continue - } - outLine = strings.Join(nextIssue.Replacement.NewLines, "\n") - } - - if i < len(origFileLines)-1 { - outLine += "\n" - } - if _, err := tmpOutFile.WriteString(outLine); err != nil { - return fmt.Errorf("failed to write output line: %w", err) - } - } - - return nil -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/identifier_marker.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/identifier_marker.go deleted file mode 100644 index 5cc4e56ba..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/identifier_marker.go +++ /dev/null @@ -1,125 +0,0 @@ -package processors - -import ( - "regexp" - - "github.com/golangci/golangci-lint/pkg/result" -) - -type replacePattern struct { - re string - repl string -} - -type replaceRegexp struct { - re *regexp.Regexp - repl string -} - -var replacePatterns = []replacePattern{ - // unparam - {`^(\S+) - (\S+) is unused$`, "`${1}` - `${2}` is unused"}, - {`^(\S+) - (\S+) always receives (\S+) \((.*)\)$`, "`${1}` - `${2}` always receives `${3}` (`${4}`)"}, - {`^(\S+) - (\S+) always receives (.*)$`, "`${1}` - `${2}` always receives `${3}`"}, - {`^(\S+) - result (\S+) is always (\S+)`, "`${1}` - result `${2}` is always `${3}`"}, - - // interfacer - {`^(\S+) can be (\S+)$`, "`${1}` can be `${2}`"}, - - // govet - {`^printf: (\S+) arg list ends with redundant newline$`, "printf: `${1}` arg list ends with redundant newline"}, - {`^composites: (\S+) composite literal uses unkeyed fields$`, "composites: `${1}` composite literal uses unkeyed fields"}, - - // gosec - {`^(\S+): Blacklisted import (\S+): weak cryptographic primitive$`, - "${1}: Blacklisted import `${2}`: weak cryptographic primitive"}, - {`^TLS InsecureSkipVerify set true.$`, "TLS `InsecureSkipVerify` set true."}, - - // gosimple - {`should replace loop with (.*)$`, "should replace loop with `${1}`"}, - {`should use a simple channel send/receive instead of select with a single case`, - "should use a simple channel send/receive instead of `select` with a single case"}, - {`should omit comparison to bool constant, can be simplified to (.+)$`, - "should omit comparison to bool constant, can be simplified to `${1}`"}, - {`should write (.+) instead of (.+)$`, "should write `${1}` instead of `${2}`"}, - {`redundant return statement$`, "redundant `return` statement"}, - {`should replace this if statement with an unconditional strings.TrimPrefix`, - "should replace this `if` statement with an unconditional `strings.TrimPrefix`"}, - - // staticcheck - {`this value of (\S+) is never used$`, "this value of `${1}` is never used"}, - {`should use time.Since instead of time.Now\(\).Sub$`, - "should use `time.Since` instead of `time.Now().Sub`"}, - {`should check returned error before deferring response.Close\(\)$`, - "should check returned error before deferring `response.Close()`"}, - {`no value of type uint is less than 0$`, "no value of type `uint` is less than `0`"}, - - // unused - {`(func|const|field|type|var) (\S+) is unused$`, "${1} `${2}` is unused"}, - - // typecheck - {`^unknown field (\S+) in struct literal$`, "unknown field `${1}` in struct literal"}, - {`^invalid operation: (\S+) \(variable of type (\S+)\) has no field or method (\S+)$`, - "invalid operation: `${1}` (variable of type `${2}`) has no field or method `${3}`"}, - {`^undeclared name: (\S+)$`, "undeclared name: `${1}`"}, - {`^cannot use addr \(variable of type (\S+)\) as (\S+) value in argument to (\S+)$`, - "cannot use addr (variable of type `${1}`) as `${2}` value in argument to `${3}`"}, - {`^other declaration of (\S+)$`, "other declaration of `${1}`"}, - {`^(\S+) redeclared in this block$`, "`${1}` redeclared in this block"}, - - // golint - {`^exported (type|method|function|var|const) (\S+) should have comment or be unexported$`, - "exported ${1} `${2}` should have comment or be unexported"}, - {`^comment on exported (type|method|function|var|const) (\S+) should be of the form "(\S+) ..."$`, - "comment on exported ${1} `${2}` should be of the form `${3} ...`"}, - {`^should replace (.+) with (.+)$`, "should replace `${1}` with `${2}`"}, - {`^if block ends with a return statement, so drop this else and outdent its block$`, - "`if` block ends with a `return` statement, so drop this `else` and outdent its block"}, - {`^(struct field|var|range var|const|type|(?:func|method|interface method) (?:parameter|result)) (\S+) should be (\S+)$`, - "${1} `${2}` should be `${3}`"}, - {`^don't use underscores in Go names; var (\S+) should be (\S+)$`, - "don't use underscores in Go names; var `${1}` should be `${2}`"}, -} - -type IdentifierMarker struct { - replaceRegexps []replaceRegexp -} - -func NewIdentifierMarker() *IdentifierMarker { - var replaceRegexps []replaceRegexp - for _, p := range replacePatterns { - r := replaceRegexp{ - re: regexp.MustCompile(p.re), - repl: p.repl, - } - replaceRegexps = append(replaceRegexps, r) - } - - return &IdentifierMarker{ - replaceRegexps: replaceRegexps, - } -} - -func (im IdentifierMarker) Process(issues []result.Issue) ([]result.Issue, error) { - return transformIssues(issues, func(i *result.Issue) *result.Issue { - iCopy := *i - iCopy.Text = im.markIdentifiers(iCopy.Text) - return &iCopy - }), nil -} - -func (im IdentifierMarker) markIdentifiers(s string) string { - for _, rr := range im.replaceRegexps { - rs := rr.re.ReplaceAllString(s, rr.repl) - if rs != s { - return rs - } - } - - return s -} - -func (im IdentifierMarker) Name() string { - return "identifier_marker" -} -func (im IdentifierMarker) Finish() {} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/issues.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/issues.go deleted file mode 100644 index 4691be38a..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/issues.go +++ /dev/null @@ -1,46 +0,0 @@ -package processors - -import ( - "fmt" - - "github.com/golangci/golangci-lint/pkg/result" -) - -func filterIssues(issues []result.Issue, filter func(i *result.Issue) bool) []result.Issue { - retIssues := make([]result.Issue, 0, len(issues)) - for i := range issues { - if filter(&issues[i]) { - retIssues = append(retIssues, issues[i]) - } - } - - return retIssues -} - -func filterIssuesErr(issues []result.Issue, filter func(i *result.Issue) (bool, error)) ([]result.Issue, error) { - retIssues := make([]result.Issue, 0, len(issues)) - for i := range issues { - ok, err := filter(&issues[i]) - if err != nil { - return nil, fmt.Errorf("can't filter issue %#v: %w", issues[i], err) - } - - if ok { - retIssues = append(retIssues, issues[i]) - } - } - - return retIssues, nil -} - -func transformIssues(issues []result.Issue, transform func(i *result.Issue) *result.Issue) []result.Issue { - retIssues := make([]result.Issue, 0, len(issues)) - for i := range issues { - newI := transform(&issues[i]) - if newI != nil { - retIssues = append(retIssues, *newI) - } - } - - return retIssues -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/max_from_linter.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/max_from_linter.go deleted file mode 100644 index 649ed86ae..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/max_from_linter.go +++ /dev/null @@ -1,54 +0,0 @@ -package processors - -import ( - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/logutils" - "github.com/golangci/golangci-lint/pkg/result" -) - -type MaxFromLinter struct { - lc linterToCountMap - limit int - log logutils.Log - cfg *config.Config -} - -var _ Processor = &MaxFromLinter{} - -func NewMaxFromLinter(limit int, log logutils.Log, cfg *config.Config) *MaxFromLinter { - return &MaxFromLinter{ - lc: linterToCountMap{}, - limit: limit, - log: log, - cfg: cfg, - } -} - -func (p *MaxFromLinter) Name() string { - return "max_from_linter" -} - -func (p *MaxFromLinter) Process(issues []result.Issue) ([]result.Issue, error) { - if p.limit <= 0 { // no limit - return issues, nil - } - - return filterIssues(issues, func(i *result.Issue) bool { - if i.Replacement != nil && p.cfg.Issues.NeedFix { - // we need to fix all issues at once => we need to return all of them - return true - } - - p.lc[i.FromLinter]++ // always inc for stat - return p.lc[i.FromLinter] <= p.limit - }), nil -} - -func (p *MaxFromLinter) Finish() { - walkStringToIntMapSortedByValue(p.lc, func(linter string, count int) { - if count > p.limit { - p.log.Infof("%d/%d issues from linter %s were hidden, use --max-issues-per-linter", - count-p.limit, count, linter) - } - }) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/max_per_file_from_linter.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/max_per_file_from_linter.go deleted file mode 100644 index 64182e3e2..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/max_per_file_from_linter.go +++ /dev/null @@ -1,59 +0,0 @@ -package processors - -import ( - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/result" -) - -type linterToCountMap map[string]int -type fileToLinterToCountMap map[string]linterToCountMap - -type MaxPerFileFromLinter struct { - flc fileToLinterToCountMap - maxPerFileFromLinterConfig map[string]int -} - -var _ Processor = &MaxPerFileFromLinter{} - -func NewMaxPerFileFromLinter(cfg *config.Config) *MaxPerFileFromLinter { - maxPerFileFromLinterConfig := map[string]int{} - - if !cfg.Issues.NeedFix { - // if we don't fix we do this limiting to not annoy user; - // otherwise we need to fix all issues in the file at once - maxPerFileFromLinterConfig["gofmt"] = 1 - maxPerFileFromLinterConfig["goimports"] = 1 - } - - return &MaxPerFileFromLinter{ - flc: fileToLinterToCountMap{}, - maxPerFileFromLinterConfig: maxPerFileFromLinterConfig, - } -} - -func (p *MaxPerFileFromLinter) Name() string { - return "max_per_file_from_linter" -} - -func (p *MaxPerFileFromLinter) Process(issues []result.Issue) ([]result.Issue, error) { - return filterIssues(issues, func(i *result.Issue) bool { - limit := p.maxPerFileFromLinterConfig[i.FromLinter] - if limit == 0 { - return true - } - - lm := p.flc[i.FilePath()] - if lm == nil { - p.flc[i.FilePath()] = linterToCountMap{} - } - count := p.flc[i.FilePath()][i.FromLinter] - if count >= limit { - return false - } - - p.flc[i.FilePath()][i.FromLinter]++ - return true - }), nil -} - -func (p *MaxPerFileFromLinter) Finish() {} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/max_same_issues.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/max_same_issues.go deleted file mode 100644 index 391ae5fa7..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/max_same_issues.go +++ /dev/null @@ -1,81 +0,0 @@ -package processors - -import ( - "sort" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/logutils" - "github.com/golangci/golangci-lint/pkg/result" -) - -type textToCountMap map[string]int - -type MaxSameIssues struct { - tc textToCountMap - limit int - log logutils.Log - cfg *config.Config -} - -var _ Processor = &MaxSameIssues{} - -func NewMaxSameIssues(limit int, log logutils.Log, cfg *config.Config) *MaxSameIssues { - return &MaxSameIssues{ - tc: textToCountMap{}, - limit: limit, - log: log, - cfg: cfg, - } -} - -func (p *MaxSameIssues) Name() string { - return "max_same_issues" -} - -func (p *MaxSameIssues) Process(issues []result.Issue) ([]result.Issue, error) { - if p.limit <= 0 { // no limit - return issues, nil - } - - return filterIssues(issues, func(i *result.Issue) bool { - if i.Replacement != nil && p.cfg.Issues.NeedFix { - // we need to fix all issues at once => we need to return all of them - return true - } - - p.tc[i.Text]++ // always inc for stat - return p.tc[i.Text] <= p.limit - }), nil -} - -func (p *MaxSameIssues) Finish() { - walkStringToIntMapSortedByValue(p.tc, func(text string, count int) { - if count > p.limit { - p.log.Infof("%d/%d issues with text %q were hidden, use --max-same-issues", - count-p.limit, count, text) - } - }) -} - -type kv struct { - Key string - Value int -} - -func walkStringToIntMapSortedByValue(m map[string]int, walk func(k string, v int)) { - var ss []kv - for k, v := range m { - ss = append(ss, kv{ - Key: k, - Value: v, - }) - } - - sort.Slice(ss, func(i, j int) bool { - return ss[i].Value > ss[j].Value - }) - - for _, kv := range ss { - walk(kv.Key, kv.Value) - } -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/nolint.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/nolint.go deleted file mode 100644 index a72dd1ef2..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/nolint.go +++ /dev/null @@ -1,313 +0,0 @@ -package processors - -import ( - "errors" - "go/ast" - "go/parser" - "go/token" - "regexp" - "sort" - "strings" - - "golang.org/x/exp/maps" - - "github.com/golangci/golangci-lint/pkg/golinters" - "github.com/golangci/golangci-lint/pkg/lint/linter" - "github.com/golangci/golangci-lint/pkg/lint/lintersdb" - "github.com/golangci/golangci-lint/pkg/logutils" - "github.com/golangci/golangci-lint/pkg/result" -) - -var nolintDebugf = logutils.Debug(logutils.DebugKeyNolint) -var nolintRe = regexp.MustCompile(`^nolint( |:|$)`) - -type ignoredRange struct { - linters []string - matchedIssueFromLinter map[string]bool - result.Range - col int - originalRange *ignoredRange // pre-expanded range (used to match nolintlint issues) -} - -func (i *ignoredRange) doesMatch(issue *result.Issue) bool { - if issue.Line() < i.From || issue.Line() > i.To { - return false - } - - // only allow selective nolinting of nolintlint - nolintFoundForLinter := len(i.linters) == 0 && issue.FromLinter != golinters.NoLintLintName - - for _, linterName := range i.linters { - if linterName == issue.FromLinter { - nolintFoundForLinter = true - break - } - } - - if nolintFoundForLinter { - return true - } - - // handle possible unused nolint directives - // nolintlint generates potential issues for every nolint directive, and they are filtered out here - if issue.FromLinter == golinters.NoLintLintName && issue.ExpectNoLint { - if issue.ExpectedNoLintLinter != "" { - return i.matchedIssueFromLinter[issue.ExpectedNoLintLinter] - } - return len(i.matchedIssueFromLinter) > 0 - } - - return false -} - -type fileData struct { - ignoredRanges []ignoredRange -} - -type filesCache map[string]*fileData - -type Nolint struct { - cache filesCache - dbManager *lintersdb.Manager - enabledLinters map[string]*linter.Config - log logutils.Log - - unknownLintersSet map[string]bool -} - -func NewNolint(log logutils.Log, dbManager *lintersdb.Manager, enabledLinters map[string]*linter.Config) *Nolint { - return &Nolint{ - cache: filesCache{}, - dbManager: dbManager, - enabledLinters: enabledLinters, - log: log, - unknownLintersSet: map[string]bool{}, - } -} - -var _ Processor = &Nolint{} - -func (p *Nolint) Name() string { - return "nolint" -} - -func (p *Nolint) Process(issues []result.Issue) ([]result.Issue, error) { - // put nolintlint issues last because we process other issues first to determine which nolint directives are unused - sort.Stable(sortWithNolintlintLast(issues)) - return filterIssuesErr(issues, p.shouldPassIssue) -} - -func (p *Nolint) getOrCreateFileData(i *result.Issue) (*fileData, error) { - fd := p.cache[i.FilePath()] - if fd != nil { - return fd, nil - } - - fd = &fileData{} - p.cache[i.FilePath()] = fd - - if i.FilePath() == "" { - return nil, errors.New("no file path for issue") - } - - // TODO: migrate this parsing to go/analysis facts - // or cache them somehow per file. - - // Don't use cached AST because they consume a lot of memory on large projects. - fset := token.NewFileSet() - f, err := parser.ParseFile(fset, i.FilePath(), nil, parser.ParseComments) - if err != nil { - // Don't report error because it's already must be reporter by typecheck or go/analysis. - return fd, nil - } - - fd.ignoredRanges = p.buildIgnoredRangesForFile(f, fset, i.FilePath()) - nolintDebugf("file %s: built nolint ranges are %+v", i.FilePath(), fd.ignoredRanges) - return fd, nil -} - -func (p *Nolint) buildIgnoredRangesForFile(f *ast.File, fset *token.FileSet, filePath string) []ignoredRange { - inlineRanges := p.extractFileCommentsInlineRanges(fset, f.Comments...) - nolintDebugf("file %s: inline nolint ranges are %+v", filePath, inlineRanges) - - if len(inlineRanges) == 0 { - return nil - } - - e := rangeExpander{ - fset: fset, - inlineRanges: inlineRanges, - } - - ast.Walk(&e, f) - - // TODO: merge all ranges: there are repeated ranges - allRanges := append([]ignoredRange{}, inlineRanges...) - allRanges = append(allRanges, e.expandedRanges...) - - return allRanges -} - -func (p *Nolint) shouldPassIssue(i *result.Issue) (bool, error) { - nolintDebugf("got issue: %v", *i) - if i.FromLinter == golinters.NoLintLintName && i.ExpectNoLint && i.ExpectedNoLintLinter != "" { - // don't expect disabled linters to cover their nolint statements - nolintDebugf("enabled linters: %v", p.enabledLinters) - if p.enabledLinters[i.ExpectedNoLintLinter] == nil { - return false, nil - } - nolintDebugf("checking that lint issue was used for %s: %v", i.ExpectedNoLintLinter, i) - } - - fd, err := p.getOrCreateFileData(i) - if err != nil { - return false, err - } - - for _, ir := range fd.ignoredRanges { - if ir.doesMatch(i) { - nolintDebugf("found ignored range for issue %v: %v", i, ir) - ir.matchedIssueFromLinter[i.FromLinter] = true - if ir.originalRange != nil { - ir.originalRange.matchedIssueFromLinter[i.FromLinter] = true - } - return false, nil - } - } - - return true, nil -} - -type rangeExpander struct { - fset *token.FileSet - inlineRanges []ignoredRange - expandedRanges []ignoredRange -} - -func (e *rangeExpander) Visit(node ast.Node) ast.Visitor { - if node == nil { - return e - } - - nodeStartPos := e.fset.Position(node.Pos()) - nodeStartLine := nodeStartPos.Line - nodeEndLine := e.fset.Position(node.End()).Line - - var foundRange *ignoredRange - for _, r := range e.inlineRanges { - if r.To == nodeStartLine-1 && nodeStartPos.Column == r.col { - r := r - foundRange = &r - break - } - } - if foundRange == nil { - return e - } - - expandedRange := *foundRange - // store the original unexpanded range for matching nolintlint issues - if expandedRange.originalRange == nil { - expandedRange.originalRange = foundRange - } - if expandedRange.To < nodeEndLine { - expandedRange.To = nodeEndLine - } - - nolintDebugf("found range is %v for node %#v [%d;%d], expanded range is %v", - *foundRange, node, nodeStartLine, nodeEndLine, expandedRange) - e.expandedRanges = append(e.expandedRanges, expandedRange) - - return e -} - -func (p *Nolint) extractFileCommentsInlineRanges(fset *token.FileSet, comments ...*ast.CommentGroup) []ignoredRange { - var ret []ignoredRange - for _, g := range comments { - for _, c := range g.List { - ir := p.extractInlineRangeFromComment(c.Text, g, fset) - if ir != nil { - ret = append(ret, *ir) - } - } - } - - return ret -} - -func (p *Nolint) extractInlineRangeFromComment(text string, g ast.Node, fset *token.FileSet) *ignoredRange { - text = strings.TrimLeft(text, "/ ") - if !nolintRe.MatchString(text) { - return nil - } - - buildRange := func(linters []string) *ignoredRange { - pos := fset.Position(g.Pos()) - return &ignoredRange{ - Range: result.Range{ - From: pos.Line, - To: fset.Position(g.End()).Line, - }, - col: pos.Column, - linters: linters, - matchedIssueFromLinter: make(map[string]bool), - } - } - - if strings.HasPrefix(text, "nolint:all") || !strings.HasPrefix(text, "nolint:") { - return buildRange(nil) // ignore all linters - } - - // ignore specific linters - var linters []string - text = strings.Split(text, "//")[0] // allow another comment after this comment - linterItems := strings.Split(strings.TrimPrefix(text, "nolint:"), ",") - for _, item := range linterItems { - linterName := strings.ToLower(strings.TrimSpace(item)) - if linterName == "all" { - p.unknownLintersSet = map[string]bool{} - return buildRange(nil) - } - - lcs := p.dbManager.GetLinterConfigs(linterName) - if lcs == nil { - p.unknownLintersSet[linterName] = true - linters = append(linters, linterName) - nolintDebugf("unknown linter %s on line %d", linterName, fset.Position(g.Pos()).Line) - continue - } - - for _, lc := range lcs { - linters = append(linters, lc.Name()) // normalize name to work with aliases - } - } - - nolintDebugf("%d: linters are %s", fset.Position(g.Pos()).Line, linters) - return buildRange(linters) -} - -func (p *Nolint) Finish() { - if len(p.unknownLintersSet) == 0 { - return - } - - unknownLinters := maps.Keys(p.unknownLintersSet) - sort.Strings(unknownLinters) - - p.log.Warnf("Found unknown linters in //nolint directives: %s", strings.Join(unknownLinters, ", ")) -} - -// put nolintlint last -type sortWithNolintlintLast []result.Issue - -func (issues sortWithNolintlintLast) Len() int { - return len(issues) -} - -func (issues sortWithNolintlintLast) Less(i, j int) bool { - return issues[i].FromLinter != golinters.NoLintLintName && issues[j].FromLinter == golinters.NoLintLintName -} - -func (issues sortWithNolintlintLast) Swap(i, j int) { - issues[j], issues[i] = issues[i], issues[j] -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/path_prefixer.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/path_prefixer.go deleted file mode 100644 index f6b885011..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/path_prefixer.go +++ /dev/null @@ -1,36 +0,0 @@ -package processors - -import ( - "github.com/golangci/golangci-lint/pkg/fsutils" - "github.com/golangci/golangci-lint/pkg/result" -) - -// PathPrefixer adds a customizable prefix to every output path -type PathPrefixer struct { - prefix string -} - -var _ Processor = new(PathPrefixer) - -// NewPathPrefixer returns a new path prefixer for the provided string -func NewPathPrefixer(prefix string) *PathPrefixer { - return &PathPrefixer{prefix: prefix} -} - -// Name returns the name of this processor -func (*PathPrefixer) Name() string { - return "path_prefixer" -} - -// Process adds the prefix to each path -func (p *PathPrefixer) Process(issues []result.Issue) ([]result.Issue, error) { - if p.prefix != "" { - for i := range issues { - issues[i].Pos.Filename = fsutils.WithPathPrefix(p.prefix, issues[i].Pos.Filename) - } - } - return issues, nil -} - -// Finish is implemented to satisfy the Processor interface -func (*PathPrefixer) Finish() {} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/path_prettifier.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/path_prettifier.go deleted file mode 100644 index 3a140999c..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/path_prettifier.go +++ /dev/null @@ -1,48 +0,0 @@ -package processors - -import ( - "fmt" - "path/filepath" - - "github.com/golangci/golangci-lint/pkg/fsutils" - "github.com/golangci/golangci-lint/pkg/result" -) - -type PathPrettifier struct { - root string -} - -var _ Processor = PathPrettifier{} - -func NewPathPrettifier() *PathPrettifier { - root, err := fsutils.Getwd() - if err != nil { - panic(fmt.Sprintf("Can't get working dir: %s", err)) - } - return &PathPrettifier{ - root: root, - } -} - -func (p PathPrettifier) Name() string { - return "path_prettifier" -} - -func (p PathPrettifier) Process(issues []result.Issue) ([]result.Issue, error) { - return transformIssues(issues, func(i *result.Issue) *result.Issue { - if !filepath.IsAbs(i.FilePath()) { - return i - } - - rel, err := fsutils.ShortestRelPath(i.FilePath(), "") - if err != nil { - return i - } - - newI := i - newI.Pos.Filename = rel - return newI - }), nil -} - -func (p PathPrettifier) Finish() {} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/path_shortener.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/path_shortener.go deleted file mode 100644 index 6b66bea8b..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/path_shortener.go +++ /dev/null @@ -1,40 +0,0 @@ -package processors - -import ( - "fmt" - "strings" - - "github.com/golangci/golangci-lint/pkg/fsutils" - "github.com/golangci/golangci-lint/pkg/result" -) - -type PathShortener struct { - wd string -} - -var _ Processor = PathShortener{} - -func NewPathShortener() *PathShortener { - wd, err := fsutils.Getwd() - if err != nil { - panic(fmt.Sprintf("Can't get working dir: %s", err)) - } - return &PathShortener{ - wd: wd, - } -} - -func (p PathShortener) Name() string { - return "path_shortener" -} - -func (p PathShortener) Process(issues []result.Issue) ([]result.Issue, error) { - return transformIssues(issues, func(i *result.Issue) *result.Issue { - newI := i - newI.Text = strings.ReplaceAll(newI.Text, p.wd+"/", "") - newI.Text = strings.ReplaceAll(newI.Text, p.wd, "") - return newI - }), nil -} - -func (p PathShortener) Finish() {} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/processor.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/processor.go deleted file mode 100644 index 1a7a40434..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/processor.go +++ /dev/null @@ -1,11 +0,0 @@ -package processors - -import ( - "github.com/golangci/golangci-lint/pkg/result" -) - -type Processor interface { - Process(issues []result.Issue) ([]result.Issue, error) - Name() string - Finish() -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/severity_rules.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/severity_rules.go deleted file mode 100644 index 0a4a643b7..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/severity_rules.go +++ /dev/null @@ -1,108 +0,0 @@ -package processors - -import ( - "regexp" - - "github.com/golangci/golangci-lint/pkg/fsutils" - "github.com/golangci/golangci-lint/pkg/logutils" - "github.com/golangci/golangci-lint/pkg/result" -) - -type severityRule struct { - baseRule - severity string -} - -type SeverityRule struct { - BaseRule - Severity string -} - -type SeverityRules struct { - defaultSeverity string - rules []severityRule - files *fsutils.Files - log logutils.Log -} - -func NewSeverityRules(defaultSeverity string, rules []SeverityRule, files *fsutils.Files, log logutils.Log) *SeverityRules { - r := &SeverityRules{ - files: files, - log: log, - defaultSeverity: defaultSeverity, - } - r.rules = createSeverityRules(rules, "(?i)") - - return r -} - -func createSeverityRules(rules []SeverityRule, prefix string) []severityRule { - parsedRules := make([]severityRule, 0, len(rules)) - for _, rule := range rules { - parsedRule := severityRule{} - parsedRule.linters = rule.Linters - parsedRule.severity = rule.Severity - if rule.Text != "" { - parsedRule.text = regexp.MustCompile(prefix + rule.Text) - } - if rule.Source != "" { - parsedRule.source = regexp.MustCompile(prefix + rule.Source) - } - if rule.Path != "" { - path := fsutils.NormalizePathInRegex(rule.Path) - parsedRule.path = regexp.MustCompile(path) - } - if rule.PathExcept != "" { - pathExcept := fsutils.NormalizePathInRegex(rule.PathExcept) - parsedRule.pathExcept = regexp.MustCompile(pathExcept) - } - parsedRules = append(parsedRules, parsedRule) - } - return parsedRules -} - -func (p SeverityRules) Process(issues []result.Issue) ([]result.Issue, error) { - if len(p.rules) == 0 && p.defaultSeverity == "" { - return issues, nil - } - return transformIssues(issues, func(i *result.Issue) *result.Issue { - for _, rule := range p.rules { - rule := rule - - ruleSeverity := p.defaultSeverity - if rule.severity != "" { - ruleSeverity = rule.severity - } - - if rule.match(i, p.files, p.log) { - i.Severity = ruleSeverity - return i - } - } - i.Severity = p.defaultSeverity - return i - }), nil -} - -func (SeverityRules) Name() string { return "severity-rules" } -func (SeverityRules) Finish() {} - -var _ Processor = SeverityRules{} - -type SeverityRulesCaseSensitive struct { - *SeverityRules -} - -func NewSeverityRulesCaseSensitive(defaultSeverity string, rules []SeverityRule, - files *fsutils.Files, log logutils.Log) *SeverityRulesCaseSensitive { - r := &SeverityRules{ - files: files, - log: log, - defaultSeverity: defaultSeverity, - } - r.rules = createSeverityRules(rules, "") - - return &SeverityRulesCaseSensitive{r} -} - -func (SeverityRulesCaseSensitive) Name() string { return "severity-rules-case-sensitive" } diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/skip_dirs.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/skip_dirs.go deleted file mode 100644 index e71495fd0..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/skip_dirs.go +++ /dev/null @@ -1,146 +0,0 @@ -package processors - -import ( - "fmt" - "path/filepath" - "regexp" - "strings" - - "github.com/golangci/golangci-lint/pkg/fsutils" - "github.com/golangci/golangci-lint/pkg/logutils" - "github.com/golangci/golangci-lint/pkg/result" -) - -type skipStat struct { - pattern string - count int -} - -type SkipDirs struct { - patterns []*regexp.Regexp - log logutils.Log - skippedDirs map[string]*skipStat - absArgsDirs []string - skippedDirsCache map[string]bool - pathPrefix string -} - -var _ Processor = (*SkipDirs)(nil) - -const goFileSuffix = ".go" - -func NewSkipDirs(patterns []string, log logutils.Log, runArgs []string, pathPrefix string) (*SkipDirs, error) { - var patternsRe []*regexp.Regexp - for _, p := range patterns { - p = fsutils.NormalizePathInRegex(p) - patternRe, err := regexp.Compile(p) - if err != nil { - return nil, fmt.Errorf("can't compile regexp %q: %w", p, err) - } - patternsRe = append(patternsRe, patternRe) - } - - if len(runArgs) == 0 { - runArgs = append(runArgs, "./...") - } - var absArgsDirs []string - for _, arg := range runArgs { - base := filepath.Base(arg) - if base == "..." || strings.HasSuffix(base, goFileSuffix) { - arg = filepath.Dir(arg) - } - - absArg, err := filepath.Abs(arg) - if err != nil { - return nil, fmt.Errorf("failed to abs-ify arg %q: %w", arg, err) - } - absArgsDirs = append(absArgsDirs, absArg) - } - - return &SkipDirs{ - patterns: patternsRe, - log: log, - skippedDirs: map[string]*skipStat{}, - absArgsDirs: absArgsDirs, - skippedDirsCache: map[string]bool{}, - pathPrefix: pathPrefix, - }, nil -} - -func (p *SkipDirs) Name() string { - return "skip_dirs" -} - -func (p *SkipDirs) Process(issues []result.Issue) ([]result.Issue, error) { - if len(p.patterns) == 0 { - return issues, nil - } - - return filterIssues(issues, p.shouldPassIssue), nil -} - -func (p *SkipDirs) shouldPassIssue(i *result.Issue) bool { - if filepath.IsAbs(i.FilePath()) { - if !isSpecialAutogeneratedFile(i.FilePath()) { - p.log.Warnf("Got abs path %s in skip dirs processor, it should be relative", i.FilePath()) - } - return true - } - - issueRelDir := filepath.Dir(i.FilePath()) - - if toPass, ok := p.skippedDirsCache[issueRelDir]; ok { - if !toPass { - p.skippedDirs[issueRelDir].count++ - } - return toPass - } - - issueAbsDir, err := filepath.Abs(issueRelDir) - if err != nil { - p.log.Warnf("Can't abs-ify path %q: %s", issueRelDir, err) - return true - } - - toPass := p.shouldPassIssueDirs(issueRelDir, issueAbsDir) - p.skippedDirsCache[issueRelDir] = toPass - return toPass -} - -func (p *SkipDirs) shouldPassIssueDirs(issueRelDir, issueAbsDir string) bool { - for _, absArgDir := range p.absArgsDirs { - if absArgDir == issueAbsDir { - // we must not skip issues if they are from explicitly set dirs - // even if they match skip patterns - return true - } - } - - // We use issueRelDir for matching: it's the relative to the current - // work dir path of directory of source file with the issue. It can lead - // to unexpected behavior if we're analyzing files out of current work dir. - // The alternative solution is to find relative to args path, but it has - // disadvantages (https://github.com/golangci/golangci-lint/pull/313). - - path := fsutils.WithPathPrefix(p.pathPrefix, issueRelDir) - for _, pattern := range p.patterns { - if pattern.MatchString(path) { - ps := pattern.String() - if p.skippedDirs[issueRelDir] == nil { - p.skippedDirs[issueRelDir] = &skipStat{ - pattern: ps, - } - } - p.skippedDirs[issueRelDir].count++ - return false - } - } - - return true -} - -func (p *SkipDirs) Finish() { - for dir, stat := range p.skippedDirs { - p.log.Infof("Skipped %d issues from dir %s by pattern %s", stat.count, dir, stat.pattern) - } -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/skip_files.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/skip_files.go deleted file mode 100644 index 6c1c58695..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/skip_files.go +++ /dev/null @@ -1,56 +0,0 @@ -package processors - -import ( - "fmt" - "regexp" - - "github.com/golangci/golangci-lint/pkg/fsutils" - "github.com/golangci/golangci-lint/pkg/result" -) - -type SkipFiles struct { - patterns []*regexp.Regexp - pathPrefix string -} - -var _ Processor = (*SkipFiles)(nil) - -func NewSkipFiles(patterns []string, pathPrefix string) (*SkipFiles, error) { - var patternsRe []*regexp.Regexp - for _, p := range patterns { - p = fsutils.NormalizePathInRegex(p) - patternRe, err := regexp.Compile(p) - if err != nil { - return nil, fmt.Errorf("can't compile regexp %q: %w", p, err) - } - patternsRe = append(patternsRe, patternRe) - } - - return &SkipFiles{ - patterns: patternsRe, - pathPrefix: pathPrefix, - }, nil -} - -func (p SkipFiles) Name() string { - return "skip_files" -} - -func (p SkipFiles) Process(issues []result.Issue) ([]result.Issue, error) { - if len(p.patterns) == 0 { - return issues, nil - } - - return filterIssues(issues, func(i *result.Issue) bool { - path := fsutils.WithPathPrefix(p.pathPrefix, i.FilePath()) - for _, pattern := range p.patterns { - if pattern.MatchString(path) { - return false - } - } - - return true - }), nil -} - -func (p SkipFiles) Finish() {} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/sort_results.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/sort_results.go deleted file mode 100644 index 740c4fa8c..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/sort_results.go +++ /dev/null @@ -1,166 +0,0 @@ -package processors - -import ( - "sort" - "strings" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/result" -) - -// Base propose of this functionality to sort results (issues) -// produced by various linters by analyzing code. We're achieving this -// by sorting results.Issues using processor step, and chain based -// rules that can compare different properties of the Issues struct. - -var _ Processor = (*SortResults)(nil) - -type SortResults struct { - cmp comparator - cfg *config.Config -} - -func NewSortResults(cfg *config.Config) *SortResults { - // For sorting we are comparing (in next order): file names, line numbers, - // position, and finally - giving up. - return &SortResults{ - cmp: ByName{ - next: ByLine{ - next: ByColumn{}, - }, - }, - cfg: cfg, - } -} - -// Process is performing sorting of the result issues. -func (sr SortResults) Process(issues []result.Issue) ([]result.Issue, error) { - if !sr.cfg.Output.SortResults { - return issues, nil - } - - sort.Slice(issues, func(i, j int) bool { - return sr.cmp.Compare(&issues[i], &issues[j]) == Less - }) - - return issues, nil -} - -func (sr SortResults) Name() string { return "sort_results" } -func (sr SortResults) Finish() {} - -type compareResult int - -const ( - Less compareResult = iota - 1 - Equal - Greater - None -) - -func (c compareResult) isNeutral() bool { - // return true if compare result is incomparable or equal. - return c == None || c == Equal -} - -func (c compareResult) String() string { - switch c { - case Less: - return "Less" - case Equal: - return "Equal" - case Greater: - return "Greater" - } - - return "None" -} - -// comparator describe how to implement compare for two "issues" lexicographically -type comparator interface { - Compare(a, b *result.Issue) compareResult - Next() comparator -} - -var ( - _ comparator = (*ByName)(nil) - _ comparator = (*ByLine)(nil) - _ comparator = (*ByColumn)(nil) -) - -type ByName struct{ next comparator } - -func (cmp ByName) Next() comparator { return cmp.next } - -func (cmp ByName) Compare(a, b *result.Issue) compareResult { - var res compareResult - - if res = compareResult(strings.Compare(a.FilePath(), b.FilePath())); !res.isNeutral() { - return res - } - - if next := cmp.Next(); next != nil { - return next.Compare(a, b) - } - - return res -} - -type ByLine struct{ next comparator } - -func (cmp ByLine) Next() comparator { return cmp.next } - -func (cmp ByLine) Compare(a, b *result.Issue) compareResult { - var res compareResult - - if res = numericCompare(a.Line(), b.Line()); !res.isNeutral() { - return res - } - - if next := cmp.Next(); next != nil { - return next.Compare(a, b) - } - - return res -} - -type ByColumn struct{ next comparator } - -func (cmp ByColumn) Next() comparator { return cmp.next } - -func (cmp ByColumn) Compare(a, b *result.Issue) compareResult { - var res compareResult - - if res = numericCompare(a.Column(), b.Column()); !res.isNeutral() { - return res - } - - if next := cmp.Next(); next != nil { - return next.Compare(a, b) - } - - return res -} - -func numericCompare(a, b int) compareResult { - var ( - isValuesInvalid = a < 0 || b < 0 - isZeroValuesBoth = a == 0 && b == 0 - isEqual = a == b - isZeroValueInA = b > 0 && a == 0 - isZeroValueInB = a > 0 && b == 0 - ) - - switch { - case isZeroValuesBoth || isEqual: - return Equal - case isValuesInvalid || isZeroValueInA || isZeroValueInB: - return None - case a > b: - return Greater - case a < b: - return Less - } - - return Equal -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/source_code.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/source_code.go deleted file mode 100644 index cfd73cb98..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/source_code.go +++ /dev/null @@ -1,47 +0,0 @@ -package processors - -import ( - "github.com/golangci/golangci-lint/pkg/fsutils" - "github.com/golangci/golangci-lint/pkg/logutils" - "github.com/golangci/golangci-lint/pkg/result" -) - -type SourceCode struct { - lineCache *fsutils.LineCache - log logutils.Log -} - -var _ Processor = SourceCode{} - -func NewSourceCode(lc *fsutils.LineCache, log logutils.Log) *SourceCode { - return &SourceCode{ - lineCache: lc, - log: log, - } -} - -func (p SourceCode) Name() string { - return "source_code" -} - -func (p SourceCode) Process(issues []result.Issue) ([]result.Issue, error) { - return transformIssues(issues, func(i *result.Issue) *result.Issue { - newI := *i - - lineRange := i.GetLineRange() - for lineNumber := lineRange.From; lineNumber <= lineRange.To; lineNumber++ { - line, err := p.lineCache.GetLine(i.FilePath(), lineNumber) - if err != nil { - p.log.Warnf("Failed to get line %d for file %s: %s", - lineNumber, i.FilePath(), err) - return i - } - - newI.SourceLines = append(newI.SourceLines, line) - } - - return &newI - }), nil -} - -func (p SourceCode) Finish() {} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/uniq_by_line.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/uniq_by_line.go deleted file mode 100644 index dc0e1e8cf..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/uniq_by_line.go +++ /dev/null @@ -1,58 +0,0 @@ -package processors - -import ( - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/result" -) - -type lineToCount map[int]int -type fileToLineToCount map[string]lineToCount - -type UniqByLine struct { - flc fileToLineToCount - cfg *config.Config -} - -func NewUniqByLine(cfg *config.Config) *UniqByLine { - return &UniqByLine{ - flc: fileToLineToCount{}, - cfg: cfg, - } -} - -var _ Processor = &UniqByLine{} - -func (p *UniqByLine) Name() string { - return "uniq_by_line" -} - -func (p *UniqByLine) Process(issues []result.Issue) ([]result.Issue, error) { - if !p.cfg.Output.UniqByLine { - return issues, nil - } - - return filterIssues(issues, func(i *result.Issue) bool { - if i.Replacement != nil && p.cfg.Issues.NeedFix { - // if issue will be auto-fixed we shouldn't collapse issues: - // e.g. one line can contain 2 misspellings, they will be in 2 issues and misspell should fix both of them. - return true - } - - lc := p.flc[i.FilePath()] - if lc == nil { - lc = lineToCount{} - p.flc[i.FilePath()] = lc - } - - const limit = 1 - count := lc[i.Line()] - if count == limit { - return false - } - - lc[i.Line()]++ - return true - }), nil -} - -func (p *UniqByLine) Finish() {} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/timeutils/stopwatch.go b/vendor/github.com/golangci/golangci-lint/pkg/timeutils/stopwatch.go deleted file mode 100644 index d944dea2e..000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/timeutils/stopwatch.go +++ /dev/null @@ -1,116 +0,0 @@ -package timeutils - -import ( - "fmt" - "sort" - "strings" - "sync" - "time" - - "github.com/golangci/golangci-lint/pkg/logutils" -) - -const noStagesText = "no stages" - -type Stopwatch struct { - name string - startedAt time.Time - log logutils.Log - - stages map[string]time.Duration - mu sync.Mutex -} - -func NewStopwatch(name string, log logutils.Log) *Stopwatch { - return &Stopwatch{ - name: name, - startedAt: time.Now(), - stages: map[string]time.Duration{}, - log: log, - } -} - -type stageDuration struct { - name string - d time.Duration -} - -func (s *Stopwatch) stageDurationsSorted() []stageDuration { - stageDurations := make([]stageDuration, 0, len(s.stages)) - for n, d := range s.stages { - stageDurations = append(stageDurations, stageDuration{ - name: n, - d: d, - }) - } - sort.Slice(stageDurations, func(i, j int) bool { - return stageDurations[i].d > stageDurations[j].d - }) - return stageDurations -} - -func (s *Stopwatch) sprintStages() string { - if len(s.stages) == 0 { - return noStagesText - } - - stageDurations := s.stageDurationsSorted() - - stagesStrings := make([]string, 0, len(stageDurations)) - for _, s := range stageDurations { - stagesStrings = append(stagesStrings, fmt.Sprintf("%s: %s", s.name, s.d)) - } - - return fmt.Sprintf("stages: %s", strings.Join(stagesStrings, ", ")) -} - -func (s *Stopwatch) sprintTopStages(n int) string { - if len(s.stages) == 0 { - return noStagesText - } - - stageDurations := s.stageDurationsSorted() - - var stagesStrings []string - for i := 0; i < len(stageDurations) && i < n; i++ { - s := stageDurations[i] - stagesStrings = append(stagesStrings, fmt.Sprintf("%s: %s", s.name, s.d)) - } - - return fmt.Sprintf("top %d stages: %s", n, strings.Join(stagesStrings, ", ")) -} - -func (s *Stopwatch) Print() { - p := fmt.Sprintf("%s took %s", s.name, time.Since(s.startedAt)) - if len(s.stages) == 0 { - s.log.Infof("%s", p) - return - } - - s.log.Infof("%s with %s", p, s.sprintStages()) -} - -func (s *Stopwatch) PrintStages() { - var stagesDuration time.Duration - for _, s := range s.stages { - stagesDuration += s - } - s.log.Infof("%s took %s with %s", s.name, stagesDuration, s.sprintStages()) -} - -func (s *Stopwatch) PrintTopStages(n int) { - var stagesDuration time.Duration - for _, s := range s.stages { - stagesDuration += s - } - s.log.Infof("%s took %s with %s", s.name, stagesDuration, s.sprintTopStages(n)) -} - -func (s *Stopwatch) TrackStage(name string, f func()) { - startedAt := time.Now() - f() - - s.mu.Lock() - s.stages[name] += time.Since(startedAt) - s.mu.Unlock() -} diff --git a/vendor/github.com/golangci/lint-1/.travis.yml b/vendor/github.com/golangci/lint-1/.travis.yml deleted file mode 100644 index bc2f4b311..000000000 --- a/vendor/github.com/golangci/lint-1/.travis.yml +++ /dev/null @@ -1,19 +0,0 @@ -sudo: false -language: go -go: - - 1.10.x - - 1.11.x - - master - -go_import_path: github.com/golangci/lint-1 - -install: - - go get -t -v ./... - -script: - - go test -v -race ./... - -matrix: - allow_failures: - - go: master - fast_finish: true diff --git a/vendor/github.com/golangci/lint-1/CONTRIBUTING.md b/vendor/github.com/golangci/lint-1/CONTRIBUTING.md deleted file mode 100644 index 2e39a1c67..000000000 --- a/vendor/github.com/golangci/lint-1/CONTRIBUTING.md +++ /dev/null @@ -1,15 +0,0 @@ -# Contributing to Golint - -## Before filing an issue: - -### Are you having trouble building golint? - -Check you have the latest version of its dependencies. Run -``` -go get -u github.com/golangci/lint-1/golint -``` -If you still have problems, consider searching for existing issues before filing a new issue. - -## Before sending a pull request: - -Have you understood the purpose of golint? Make sure to carefully read `README`. diff --git a/vendor/github.com/golangci/lint-1/LICENSE b/vendor/github.com/golangci/lint-1/LICENSE deleted file mode 100644 index 65d761bc9..000000000 --- a/vendor/github.com/golangci/lint-1/LICENSE +++ /dev/null @@ -1,27 +0,0 @@ -Copyright (c) 2013 The Go Authors. All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above -copyright notice, this list of conditions and the following disclaimer -in the documentation and/or other materials provided with the -distribution. - * Neither the name of Google Inc. nor the names of its -contributors may be used to endorse or promote products derived from -this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/golangci/lint-1/README.md b/vendor/github.com/golangci/lint-1/README.md deleted file mode 100644 index 2de6ee835..000000000 --- a/vendor/github.com/golangci/lint-1/README.md +++ /dev/null @@ -1,88 +0,0 @@ -Golint is a linter for Go source code. - -[![Build Status](https://travis-ci.org/golang/lint.svg?branch=master)](https://travis-ci.org/golang/lint) - -## Installation - -Golint requires a -[supported release of Go](https://golang.org/doc/devel/release.html#policy). - - go get -u github.com/golangci/lint-1/golint - -To find out where `golint` was installed you can run `go list -f {{.Target}} github.com/golangci/lint-1/golint`. For `golint` to be used globally add that directory to the `$PATH` environment setting. - -## Usage - -Invoke `golint` with one or more filenames, directories, or packages named -by its import path. Golint uses the same -[import path syntax](https://golang.org/cmd/go/#hdr-Import_path_syntax) as -the `go` command and therefore -also supports relative import paths like `./...`. Additionally the `...` -wildcard can be used as suffix on relative and absolute file paths to recurse -into them. - -The output of this tool is a list of suggestions in Vim quickfix format, -which is accepted by lots of different editors. - -## Purpose - -Golint differs from gofmt. Gofmt reformats Go source code, whereas -golint prints out style mistakes. - -Golint differs from govet. Govet is concerned with correctness, whereas -golint is concerned with coding style. Golint is in use at Google, and it -seeks to match the accepted style of the open source Go project. - -The suggestions made by golint are exactly that: suggestions. -Golint is not perfect, and has both false positives and false negatives. -Do not treat its output as a gold standard. We will not be adding pragmas -or other knobs to suppress specific warnings, so do not expect or require -code to be completely "lint-free". -In short, this tool is not, and will never be, trustworthy enough for its -suggestions to be enforced automatically, for example as part of a build process. -Golint makes suggestions for many of the mechanically checkable items listed in -[Effective Go](https://golang.org/doc/effective_go.html) and the -[CodeReviewComments wiki page](https://golang.org/wiki/CodeReviewComments). - -## Scope - -Golint is meant to carry out the stylistic conventions put forth in -[Effective Go](https://golang.org/doc/effective_go.html) and -[CodeReviewComments](https://golang.org/wiki/CodeReviewComments). -Changes that are not aligned with those documents will not be considered. - -## Contributions - -Contributions to this project are welcome provided they are [in scope](#scope), -though please send mail before starting work on anything major. -Contributors retain their copyright, so we need you to fill out -[a short form](https://developers.google.com/open-source/cla/individual) -before we can accept your contribution. - -## Vim - -Add this to your ~/.vimrc: - - set rtp+=$GOPATH/src/github.com/golangci/lint-1/misc/vim - -If you have multiple entries in your GOPATH, replace `$GOPATH` with the right value. - -Running `:Lint` will run golint on the current file and populate the quickfix list. - -Optionally, add this to your `~/.vimrc` to automatically run `golint` on `:w` - - autocmd BufWritePost,FileWritePost *.go execute 'Lint' | cwindow - - -## Emacs - -Add this to your `.emacs` file: - - (add-to-list 'load-path (concat (getenv "GOPATH") "/src/github.com/golang/lint/misc/emacs")) - (require 'golint) - -If you have multiple entries in your GOPATH, replace `$GOPATH` with the right value. - -Running M-x golint will run golint on the current file. - -For more usage, see [Compilation-Mode](http://www.gnu.org/software/emacs/manual/html_node/emacs/Compilation-Mode.html). diff --git a/vendor/github.com/golangci/lint-1/lint.go b/vendor/github.com/golangci/lint-1/lint.go deleted file mode 100644 index 886c85bf0..000000000 --- a/vendor/github.com/golangci/lint-1/lint.go +++ /dev/null @@ -1,1655 +0,0 @@ -// Copyright (c) 2013 The Go Authors. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file or at -// https://developers.google.com/open-source/licenses/bsd. - -// Package lint contains a linter for Go source code. -package lint // import "github.com/golangci/lint-1" - -import ( - "bufio" - "bytes" - "fmt" - "go/ast" - "go/parser" - "go/printer" - "go/token" - "go/types" - "io/ioutil" - "regexp" - "sort" - "strconv" - "strings" - "unicode" - "unicode/utf8" - - "golang.org/x/tools/go/ast/astutil" - "golang.org/x/tools/go/gcexportdata" -) - -const styleGuideBase = "https://golang.org/wiki/CodeReviewComments" - -// A Linter lints Go source code. -type Linter struct { -} - -// Problem represents a problem in some source code. -type Problem struct { - Position token.Position // position in source file - Text string // the prose that describes the problem - Link string // (optional) the link to the style guide for the problem - Confidence float64 // a value in (0,1] estimating the confidence in this problem's correctness - LineText string // the source line - Category string // a short name for the general category of the problem - - // If the problem has a suggested fix (the minority case), - // ReplacementLine is a full replacement for the relevant line of the source file. - ReplacementLine string -} - -func (p *Problem) String() string { - if p.Link != "" { - return p.Text + "\n\n" + p.Link - } - return p.Text -} - -type byPosition []Problem - -func (p byPosition) Len() int { return len(p) } -func (p byPosition) Swap(i, j int) { p[i], p[j] = p[j], p[i] } - -func (p byPosition) Less(i, j int) bool { - pi, pj := p[i].Position, p[j].Position - - if pi.Filename != pj.Filename { - return pi.Filename < pj.Filename - } - if pi.Line != pj.Line { - return pi.Line < pj.Line - } - if pi.Column != pj.Column { - return pi.Column < pj.Column - } - - return p[i].Text < p[j].Text -} - -// Lint lints src. -func (l *Linter) Lint(filename string, src []byte) ([]Problem, error) { - return l.LintFiles(map[string][]byte{filename: src}) -} - -// LintFiles lints a set of files of a single package. -// The argument is a map of filename to source. -func (l *Linter) LintFiles(files map[string][]byte) ([]Problem, error) { - pkg := &pkg{ - fset: token.NewFileSet(), - files: make(map[string]*file), - } - var pkgName string - for filename, src := range files { - if isGenerated(src) { - continue // See issue #239 - } - f, err := parser.ParseFile(pkg.fset, filename, src, parser.ParseComments) - if err != nil { - return nil, err - } - if pkgName == "" { - pkgName = f.Name.Name - } else if f.Name.Name != pkgName { - return nil, fmt.Errorf("%s is in package %s, not %s", filename, f.Name.Name, pkgName) - } - pkg.files[filename] = &file{ - pkg: pkg, - f: f, - fset: pkg.fset, - src: src, - filename: filename, - } - } - if len(pkg.files) == 0 { - return nil, nil - } - return pkg.lint(), nil -} - -// LintFiles lints a set of files of a single package. -// The argument is a map of filename to source. -func (l *Linter) LintPkg(files []*ast.File, fset *token.FileSet, typesPkg *types.Package, typesInfo *types.Info) ([]Problem, error) { - pkg := &pkg{ - fset: fset, - files: make(map[string]*file), - typesPkg: typesPkg, - typesInfo: typesInfo, - } - var pkgName string - for _, f := range files { - // use PositionFor, not Position because of //line directives: - // this filename will be used for source lines extraction. - filename := fset.PositionFor(f.Pos(), false).Filename - if filename == "" { - return nil, fmt.Errorf("no file name for file %+v", f) - } - - if pkgName == "" { - pkgName = f.Name.Name - } else if f.Name.Name != pkgName { - return nil, fmt.Errorf("%s is in package %s, not %s", filename, f.Name.Name, pkgName) - } - - // TODO: reuse golangci-lint lines cache - src, err := ioutil.ReadFile(filename) - if err != nil { - return nil, fmt.Errorf("can't read file %s: %s", filename, err) - } - - pkg.files[filename] = &file{ - pkg: pkg, - f: f, - fset: pkg.fset, - src: src, - filename: filename, - } - } - if len(pkg.files) == 0 { - return nil, nil - } - return pkg.lint(), nil -} - -var ( - genHdr = []byte("// Code generated ") - genFtr = []byte(" DO NOT EDIT.") -) - -// isGenerated reports whether the source file is generated code -// according the rules from https://golang.org/s/generatedcode. -func isGenerated(src []byte) bool { - sc := bufio.NewScanner(bytes.NewReader(src)) - for sc.Scan() { - b := sc.Bytes() - if bytes.HasPrefix(b, genHdr) && bytes.HasSuffix(b, genFtr) && len(b) >= len(genHdr)+len(genFtr) { - return true - } - } - return false -} - -// pkg represents a package being linted. -type pkg struct { - fset *token.FileSet - files map[string]*file - - typesPkg *types.Package - typesInfo *types.Info - - // sortable is the set of types in the package that implement sort.Interface. - sortable map[string]bool - // main is whether this is a "main" package. - main bool - - problems []Problem -} - -func (p *pkg) lint() []Problem { - p.scanSortable() - p.main = p.isMain() - - for _, f := range p.files { - f.lint() - } - - sort.Sort(byPosition(p.problems)) - - return p.problems -} - -// file represents a file being linted. -type file struct { - pkg *pkg - f *ast.File - fset *token.FileSet - src []byte - filename string -} - -func (f *file) isTest() bool { return strings.HasSuffix(f.filename, "_test.go") } - -func (f *file) lint() { - f.lintPackageComment() - f.lintImports() - f.lintBlankImports() - f.lintExported() - f.lintNames() - f.lintElses() - f.lintRanges() - f.lintErrorf() - f.lintErrors() - f.lintErrorStrings() - f.lintReceiverNames() - f.lintIncDec() - f.lintErrorReturn() - f.lintUnexportedReturn() - f.lintTimeNames() - f.lintContextKeyTypes() - f.lintContextArgs() -} - -type link string -type category string - -// The variadic arguments may start with link and category types, -// and must end with a format string and any arguments. -// It returns the new Problem. -func (f *file) errorf(n ast.Node, confidence float64, args ...interface{}) *Problem { - pos := f.fset.Position(n.Pos()) - if pos.Filename == "" { - pos.Filename = f.filename - } - return f.pkg.errorfAt(pos, confidence, args...) -} - -func (p *pkg) errorfAt(pos token.Position, confidence float64, args ...interface{}) *Problem { - problem := Problem{ - Position: pos, - Confidence: confidence, - } - if pos.Filename != "" { - // The file might not exist in our mapping if a //line directive was encountered. - if f, ok := p.files[pos.Filename]; ok { - problem.LineText = srcLine(f.src, pos) - } - } - -argLoop: - for len(args) > 1 { // always leave at least the format string in args - switch v := args[0].(type) { - case link: - problem.Link = string(v) - case category: - problem.Category = string(v) - default: - break argLoop - } - args = args[1:] - } - - problem.Text = fmt.Sprintf(args[0].(string), args[1:]...) - - p.problems = append(p.problems, problem) - return &p.problems[len(p.problems)-1] -} - -var newImporter = func(fset *token.FileSet) types.ImporterFrom { - return gcexportdata.NewImporter(fset, make(map[string]*types.Package)) -} - -func (p *pkg) typeCheck() error { - config := &types.Config{ - // By setting a no-op error reporter, the type checker does as much work as possible. - Error: func(error) {}, - Importer: newImporter(p.fset), - } - info := &types.Info{ - Types: make(map[ast.Expr]types.TypeAndValue), - Defs: make(map[*ast.Ident]types.Object), - Uses: make(map[*ast.Ident]types.Object), - Scopes: make(map[ast.Node]*types.Scope), - } - var anyFile *file - var astFiles []*ast.File - for _, f := range p.files { - anyFile = f - astFiles = append(astFiles, f.f) - } - pkg, err := config.Check(anyFile.f.Name.Name, p.fset, astFiles, info) - // Remember the typechecking info, even if config.Check failed, - // since we will get partial information. - p.typesPkg = pkg - p.typesInfo = info - return err -} - -func (p *pkg) typeOf(expr ast.Expr) types.Type { - if p.typesInfo == nil { - return nil - } - return p.typesInfo.TypeOf(expr) -} - -func (p *pkg) isNamedType(typ types.Type, importPath, name string) bool { - n, ok := typ.(*types.Named) - if !ok { - return false - } - tn := n.Obj() - return tn != nil && tn.Pkg() != nil && tn.Pkg().Path() == importPath && tn.Name() == name -} - -// scopeOf returns the tightest scope encompassing id. -func (p *pkg) scopeOf(id *ast.Ident) *types.Scope { - var scope *types.Scope - if obj := p.typesInfo.ObjectOf(id); obj != nil { - scope = obj.Parent() - } - if scope == p.typesPkg.Scope() { - // We were given a top-level identifier. - // Use the file-level scope instead of the package-level scope. - pos := id.Pos() - for _, f := range p.files { - if f.f.Pos() <= pos && pos < f.f.End() { - scope = p.typesInfo.Scopes[f.f] - break - } - } - } - return scope -} - -func (p *pkg) scanSortable() { - p.sortable = make(map[string]bool) - - // bitfield for which methods exist on each type. - const ( - Len = 1 << iota - Less - Swap - ) - nmap := map[string]int{"Len": Len, "Less": Less, "Swap": Swap} - has := make(map[string]int) - for _, f := range p.files { - f.walk(func(n ast.Node) bool { - fn, ok := n.(*ast.FuncDecl) - if !ok || fn.Recv == nil || len(fn.Recv.List) == 0 { - return true - } - // TODO(dsymonds): We could check the signature to be more precise. - recv := receiverType(fn) - if i, ok := nmap[fn.Name.Name]; ok { - has[recv] |= i - } - return false - }) - } - for typ, ms := range has { - if ms == Len|Less|Swap { - p.sortable[typ] = true - } - } -} - -func (p *pkg) isMain() bool { - for _, f := range p.files { - if f.isMain() { - return true - } - } - return false -} - -func (f *file) isMain() bool { - if f.f.Name.Name == "main" { - return true - } - return false -} - -// lintPackageComment checks package comments. It complains if -// there is no package comment, or if it is not of the right form. -// This has a notable false positive in that a package comment -// could rightfully appear in a different file of the same package, -// but that's not easy to fix since this linter is file-oriented. -func (f *file) lintPackageComment() { - if f.isTest() { - return - } - - const ref = styleGuideBase + "#package-comments" - prefix := "Package " + f.f.Name.Name + " " - - // Look for a detached package comment. - // First, scan for the last comment that occurs before the "package" keyword. - var lastCG *ast.CommentGroup - for _, cg := range f.f.Comments { - if cg.Pos() > f.f.Package { - // Gone past "package" keyword. - break - } - lastCG = cg - } - if lastCG != nil && strings.HasPrefix(lastCG.Text(), prefix) { - endPos := f.fset.Position(lastCG.End()) - pkgPos := f.fset.Position(f.f.Package) - if endPos.Line+1 < pkgPos.Line { - // There isn't a great place to anchor this error; - // the start of the blank lines between the doc and the package statement - // is at least pointing at the location of the problem. - pos := token.Position{ - Filename: endPos.Filename, - // Offset not set; it is non-trivial, and doesn't appear to be needed. - Line: endPos.Line + 1, - Column: 1, - } - f.pkg.errorfAt(pos, 0.9, link(ref), category("comments"), "package comment is detached; there should be no blank lines between it and the package statement") - return - } - } - - if f.f.Doc == nil { - f.errorf(f.f, 0.2, link(ref), category("comments"), "should have a package comment, unless it's in another file for this package") - return - } - s := f.f.Doc.Text() - if ts := strings.TrimLeft(s, " \t"); ts != s { - f.errorf(f.f.Doc, 1, link(ref), category("comments"), "package comment should not have leading space") - s = ts - } - // Only non-main packages need to keep to this form. - if !f.pkg.main && !strings.HasPrefix(s, prefix) { - f.errorf(f.f.Doc, 1, link(ref), category("comments"), `package comment should be of the form "%s..."`, prefix) - } -} - -func (f *file) isCgo() bool { - if f.src == nil { - return false - } - newLinePos := bytes.Index(f.src, []byte("\n")) - if newLinePos < 0 { - return false - } - firstLine := string(f.src[:newLinePos]) - - // files using cgo have implicitly added comment "Created by cgo - DO NOT EDIT" for go <= 1.10 - // and "Code generated by cmd/cgo" for go >= 1.11 - return strings.Contains(firstLine, "Created by cgo") || strings.Contains(firstLine, "Code generated by cmd/cgo") -} - -// lintBlankImports complains if a non-main package has blank imports that are -// not documented. -func (f *file) lintBlankImports() { - // In package main and in tests, we don't complain about blank imports. - if f.pkg.main || f.isTest() || f.isCgo() { - return - } - - // The first element of each contiguous group of blank imports should have - // an explanatory comment of some kind. - for i, imp := range f.f.Imports { - pos := f.fset.Position(imp.Pos()) - - if !isBlank(imp.Name) { - continue // Ignore non-blank imports. - } - if i > 0 { - prev := f.f.Imports[i-1] - prevPos := f.fset.Position(prev.Pos()) - if isBlank(prev.Name) && prevPos.Line+1 == pos.Line { - continue // A subsequent blank in a group. - } - } - - // This is the first blank import of a group. - if imp.Doc == nil && imp.Comment == nil { - ref := "" - f.errorf(imp, 1, link(ref), category("imports"), "a blank import should be only in a main or test package, or have a comment justifying it") - } - } -} - -// lintImports examines import blocks. -func (f *file) lintImports() { - for i, is := range f.f.Imports { - _ = i - if is.Name != nil && is.Name.Name == "." && !f.isTest() { - f.errorf(is, 1, link(styleGuideBase+"#import-dot"), category("imports"), "should not use dot imports") - } - - } -} - -const docCommentsLink = styleGuideBase + "#doc-comments" - -// lintExported examines the exported names. -// It complains if any required doc comments are missing, -// or if they are not of the right form. The exact rules are in -// lintFuncDoc, lintTypeDoc and lintValueSpecDoc; this function -// also tracks the GenDecl structure being traversed to permit -// doc comments for constants to be on top of the const block. -// It also complains if the names stutter when combined with -// the package name. -func (f *file) lintExported() { - if f.isTest() { - return - } - - var lastGen *ast.GenDecl // last GenDecl entered. - - // Set of GenDecls that have already had missing comments flagged. - genDeclMissingComments := make(map[*ast.GenDecl]bool) - - f.walk(func(node ast.Node) bool { - switch v := node.(type) { - case *ast.GenDecl: - if v.Tok == token.IMPORT { - return false - } - // token.CONST, token.TYPE or token.VAR - lastGen = v - return true - case *ast.FuncDecl: - f.lintFuncDoc(v) - if v.Recv == nil { - // Only check for stutter on functions, not methods. - // Method names are not used package-qualified. - f.checkStutter(v.Name, "func") - } - // Don't proceed inside funcs. - return false - case *ast.TypeSpec: - // inside a GenDecl, which usually has the doc - doc := v.Doc - if doc == nil { - doc = lastGen.Doc - } - f.lintTypeDoc(v, doc) - f.checkStutter(v.Name, "type") - // Don't proceed inside types. - return false - case *ast.ValueSpec: - f.lintValueSpecDoc(v, lastGen, genDeclMissingComments) - return false - } - return true - }) -} - -var ( - allCapsRE = regexp.MustCompile(`^[A-Z0-9_]+$`) - anyCapsRE = regexp.MustCompile(`[A-Z]`) -) - -// knownNameExceptions is a set of names that are known to be exempt from naming checks. -// This is usually because they are constrained by having to match names in the -// standard library. -var knownNameExceptions = map[string]bool{ - "LastInsertId": true, // must match database/sql - "kWh": true, -} - -func isInTopLevel(f *ast.File, ident *ast.Ident) bool { - path, _ := astutil.PathEnclosingInterval(f, ident.Pos(), ident.End()) - for _, f := range path { - switch f.(type) { - case *ast.File, *ast.GenDecl, *ast.ValueSpec, *ast.Ident: - continue - } - return false - } - return true -} - -// lintNames examines all names in the file. -// It complains if any use underscores or incorrect known initialisms. -func (f *file) lintNames() { - // Package names need slightly different handling than other names. - if strings.Contains(f.f.Name.Name, "_") && !strings.HasSuffix(f.f.Name.Name, "_test") { - f.errorf(f.f, 1, link("http://golang.org/doc/effective_go.html#package-names"), category("naming"), "don't use an underscore in package name") - } - if anyCapsRE.MatchString(f.f.Name.Name) { - f.errorf(f.f, 1, link("http://golang.org/doc/effective_go.html#package-names"), category("mixed-caps"), "don't use MixedCaps in package name; %s should be %s", f.f.Name.Name, strings.ToLower(f.f.Name.Name)) - } - - check := func(id *ast.Ident, thing string) { - if id.Name == "_" { - return - } - if knownNameExceptions[id.Name] { - return - } - - // Handle two common styles from other languages that don't belong in Go. - if len(id.Name) >= 5 && allCapsRE.MatchString(id.Name) && strings.Contains(id.Name, "_") { - capCount := 0 - for _, c := range id.Name { - if 'A' <= c && c <= 'Z' { - capCount++ - } - } - if capCount >= 2 { - f.errorf(id, 0.8, link(styleGuideBase+"#mixed-caps"), category("naming"), "don't use ALL_CAPS in Go names; use CamelCase") - return - } - } - if thing == "const" || (thing == "var" && isInTopLevel(f.f, id)) { - if len(id.Name) > 2 && id.Name[0] == 'k' && id.Name[1] >= 'A' && id.Name[1] <= 'Z' { - should := string(id.Name[1]+'a'-'A') + id.Name[2:] - f.errorf(id, 0.8, link(styleGuideBase+"#mixed-caps"), category("naming"), "don't use leading k in Go names; %s %s should be %s", thing, id.Name, should) - } - } - - should := lintName(id.Name) - if id.Name == should { - return - } - - if len(id.Name) > 2 && strings.Contains(id.Name[1:], "_") { - f.errorf(id, 0.9, link("http://golang.org/doc/effective_go.html#mixed-caps"), category("naming"), "don't use underscores in Go names; %s %s should be %s", thing, id.Name, should) - return - } - f.errorf(id, 0.8, link(styleGuideBase+"#initialisms"), category("naming"), "%s %s should be %s", thing, id.Name, should) - } - checkList := func(fl *ast.FieldList, thing string) { - if fl == nil { - return - } - for _, f := range fl.List { - for _, id := range f.Names { - check(id, thing) - } - } - } - f.walk(func(node ast.Node) bool { - switch v := node.(type) { - case *ast.AssignStmt: - if v.Tok == token.ASSIGN { - return true - } - for _, exp := range v.Lhs { - if id, ok := exp.(*ast.Ident); ok { - check(id, "var") - } - } - case *ast.FuncDecl: - if f.isTest() && (strings.HasPrefix(v.Name.Name, "Example") || strings.HasPrefix(v.Name.Name, "Test") || strings.HasPrefix(v.Name.Name, "Benchmark")) { - return true - } - - thing := "func" - if v.Recv != nil { - thing = "method" - } - - // Exclude naming warnings for functions that are exported to C but - // not exported in the Go API. - // See https://github.com/golang/lint/issues/144. - if ast.IsExported(v.Name.Name) || !isCgoExported(v) { - check(v.Name, thing) - } - - checkList(v.Type.Params, thing+" parameter") - checkList(v.Type.Results, thing+" result") - case *ast.GenDecl: - if v.Tok == token.IMPORT { - return true - } - var thing string - switch v.Tok { - case token.CONST: - thing = "const" - case token.TYPE: - thing = "type" - case token.VAR: - thing = "var" - } - for _, spec := range v.Specs { - switch s := spec.(type) { - case *ast.TypeSpec: - check(s.Name, thing) - case *ast.ValueSpec: - for _, id := range s.Names { - check(id, thing) - } - } - } - case *ast.InterfaceType: - // Do not check interface method names. - // They are often constrainted by the method names of concrete types. - for _, x := range v.Methods.List { - ft, ok := x.Type.(*ast.FuncType) - if !ok { // might be an embedded interface name - continue - } - checkList(ft.Params, "interface method parameter") - checkList(ft.Results, "interface method result") - } - case *ast.RangeStmt: - if v.Tok == token.ASSIGN { - return true - } - if id, ok := v.Key.(*ast.Ident); ok { - check(id, "range var") - } - if id, ok := v.Value.(*ast.Ident); ok { - check(id, "range var") - } - case *ast.StructType: - for _, f := range v.Fields.List { - for _, id := range f.Names { - check(id, "struct field") - } - } - } - return true - }) -} - -// lintName returns a different name if it should be different. -func lintName(name string) (should string) { - // Fast path for simple cases: "_" and all lowercase. - if name == "_" { - return name - } - allLower := true - for _, r := range name { - if !unicode.IsLower(r) { - allLower = false - break - } - } - if allLower { - return name - } - - // Split camelCase at any lower->upper transition, and split on underscores. - // Check each word for common initialisms. - runes := []rune(name) - w, i := 0, 0 // index of start of word, scan - for i+1 <= len(runes) { - eow := false // whether we hit the end of a word - if i+1 == len(runes) { - eow = true - } else if runes[i+1] == '_' { - // underscore; shift the remainder forward over any run of underscores - eow = true - n := 1 - for i+n+1 < len(runes) && runes[i+n+1] == '_' { - n++ - } - - // Leave at most one underscore if the underscore is between two digits - if i+n+1 < len(runes) && unicode.IsDigit(runes[i]) && unicode.IsDigit(runes[i+n+1]) { - n-- - } - - copy(runes[i+1:], runes[i+n+1:]) - runes = runes[:len(runes)-n] - } else if unicode.IsLower(runes[i]) && !unicode.IsLower(runes[i+1]) { - // lower->non-lower - eow = true - } - i++ - if !eow { - continue - } - - // [w,i) is a word. - word := string(runes[w:i]) - if u := strings.ToUpper(word); commonInitialisms[u] { - // Keep consistent case, which is lowercase only at the start. - if w == 0 && unicode.IsLower(runes[w]) { - u = strings.ToLower(u) - } - // All the common initialisms are ASCII, - // so we can replace the bytes exactly. - copy(runes[w:], []rune(u)) - } else if w > 0 && strings.ToLower(word) == word { - // already all lowercase, and not the first word, so uppercase the first character. - runes[w] = unicode.ToUpper(runes[w]) - } - w = i - } - return string(runes) -} - -// commonInitialisms is a set of common initialisms. -// Only add entries that are highly unlikely to be non-initialisms. -// For instance, "ID" is fine (Freudian code is rare), but "AND" is not. -var commonInitialisms = map[string]bool{ - "ACL": true, - "API": true, - "ASCII": true, - "CPU": true, - "CSS": true, - "DNS": true, - "EOF": true, - "GUID": true, - "HTML": true, - "HTTP": true, - "HTTPS": true, - "ID": true, - "IP": true, - "JSON": true, - "LHS": true, - "QPS": true, - "RAM": true, - "RHS": true, - "RPC": true, - "SLA": true, - "SMTP": true, - "SQL": true, - "SSH": true, - "TCP": true, - "TLS": true, - "TTL": true, - "UDP": true, - "UI": true, - "UID": true, - "UUID": true, - "URI": true, - "URL": true, - "UTF8": true, - "VM": true, - "XML": true, - "XMPP": true, - "XSRF": true, - "XSS": true, -} - -// lintTypeDoc examines the doc comment on a type. -// It complains if they are missing from an exported type, -// or if they are not of the standard form. -func (f *file) lintTypeDoc(t *ast.TypeSpec, doc *ast.CommentGroup) { - if !ast.IsExported(t.Name.Name) { - return - } - if doc == nil { - f.errorf(t, 1, link(docCommentsLink), category("comments"), "exported type %v should have comment or be unexported", t.Name) - return - } - - s := doc.Text() - articles := [...]string{"A", "An", "The"} - for _, a := range articles { - if strings.HasPrefix(s, a+" ") { - s = s[len(a)+1:] - break - } - } - if !strings.HasPrefix(s, t.Name.Name+" ") { - f.errorf(doc, 1, link(docCommentsLink), category("comments"), `comment on exported type %v should be of the form "%v ..." (with optional leading article)`, t.Name, t.Name) - } -} - -var commonMethods = map[string]bool{ - "Error": true, - "Read": true, - "ServeHTTP": true, - "String": true, - "Write": true, -} - -// lintFuncDoc examines doc comments on functions and methods. -// It complains if they are missing, or not of the right form. -// It has specific exclusions for well-known methods (see commonMethods above). -func (f *file) lintFuncDoc(fn *ast.FuncDecl) { - if !ast.IsExported(fn.Name.Name) { - // func is unexported - return - } - kind := "function" - name := fn.Name.Name - if fn.Recv != nil && len(fn.Recv.List) > 0 { - // method - kind = "method" - recv := receiverType(fn) - if !ast.IsExported(recv) { - // receiver is unexported - return - } - if commonMethods[name] { - return - } - switch name { - case "Len", "Less", "Swap": - if f.pkg.sortable[recv] { - return - } - } - name = recv + "." + name - } - if fn.Doc == nil { - f.errorf(fn, 1, link(docCommentsLink), category("comments"), "exported %s %s should have comment or be unexported", kind, name) - return - } - s := fn.Doc.Text() - prefix := fn.Name.Name + " " - if !strings.HasPrefix(s, prefix) { - f.errorf(fn.Doc, 1, link(docCommentsLink), category("comments"), `comment on exported %s %s should be of the form "%s..."`, kind, name, prefix) - } -} - -// lintValueSpecDoc examines package-global variables and constants. -// It complains if they are not individually declared, -// or if they are not suitably documented in the right form (unless they are in a block that is commented). -func (f *file) lintValueSpecDoc(vs *ast.ValueSpec, gd *ast.GenDecl, genDeclMissingComments map[*ast.GenDecl]bool) { - kind := "var" - if gd.Tok == token.CONST { - kind = "const" - } - - if len(vs.Names) > 1 { - // Check that none are exported except for the first. - for _, n := range vs.Names[1:] { - if ast.IsExported(n.Name) { - f.errorf(vs, 1, category("comments"), "exported %s %s should have its own declaration", kind, n.Name) - return - } - } - } - - // Only one name. - name := vs.Names[0].Name - if !ast.IsExported(name) { - return - } - - if vs.Doc == nil && gd.Doc == nil { - if genDeclMissingComments[gd] { - return - } - block := "" - if kind == "const" && gd.Lparen.IsValid() { - block = " (or a comment on this block)" - } - f.errorf(vs, 1, link(docCommentsLink), category("comments"), "exported %s %s should have comment%s or be unexported", kind, name, block) - genDeclMissingComments[gd] = true - return - } - // If this GenDecl has parens and a comment, we don't check its comment form. - if gd.Lparen.IsValid() && gd.Doc != nil { - return - } - // The relevant text to check will be on either vs.Doc or gd.Doc. - // Use vs.Doc preferentially. - doc := vs.Doc - if doc == nil { - doc = gd.Doc - } - prefix := name + " " - if !strings.HasPrefix(doc.Text(), prefix) { - f.errorf(doc, 1, link(docCommentsLink), category("comments"), `comment on exported %s %s should be of the form "%s..."`, kind, name, prefix) - } -} - -func (f *file) checkStutter(id *ast.Ident, thing string) { - pkg, name := f.f.Name.Name, id.Name - if !ast.IsExported(name) { - // unexported name - return - } - // A name stutters if the package name is a strict prefix - // and the next character of the name starts a new word. - if len(name) <= len(pkg) { - // name is too short to stutter. - // This permits the name to be the same as the package name. - return - } - if !strings.EqualFold(pkg, name[:len(pkg)]) { - return - } - // We can assume the name is well-formed UTF-8. - // If the next rune after the package name is uppercase or an underscore - // the it's starting a new word and thus this name stutters. - rem := name[len(pkg):] - if next, _ := utf8.DecodeRuneInString(rem); next == '_' || unicode.IsUpper(next) { - f.errorf(id, 0.8, link(styleGuideBase+"#package-names"), category("naming"), "%s name will be used as %s.%s by other packages, and that stutters; consider calling this %s", thing, pkg, name, rem) - } -} - -// zeroLiteral is a set of ast.BasicLit values that are zero values. -// It is not exhaustive. -var zeroLiteral = map[string]bool{ - "false": true, // bool - // runes - `'\x00'`: true, - `'\000'`: true, - // strings - `""`: true, - "``": true, - // numerics - "0": true, - "0.": true, - "0.0": true, - "0i": true, -} - -// lintElses examines else blocks. It complains about any else block whose if block ends in a return. -func (f *file) lintElses() { - // We don't want to flag if { } else if { } else { } constructions. - // They will appear as an IfStmt whose Else field is also an IfStmt. - // Record such a node so we ignore it when we visit it. - ignore := make(map[*ast.IfStmt]bool) - - f.walk(func(node ast.Node) bool { - ifStmt, ok := node.(*ast.IfStmt) - if !ok || ifStmt.Else == nil { - return true - } - if elseif, ok := ifStmt.Else.(*ast.IfStmt); ok { - ignore[elseif] = true - return true - } - if ignore[ifStmt] { - return true - } - if _, ok := ifStmt.Else.(*ast.BlockStmt); !ok { - // only care about elses without conditions - return true - } - if len(ifStmt.Body.List) == 0 { - return true - } - shortDecl := false // does the if statement have a ":=" initialization statement? - if ifStmt.Init != nil { - if as, ok := ifStmt.Init.(*ast.AssignStmt); ok && as.Tok == token.DEFINE { - shortDecl = true - } - } - lastStmt := ifStmt.Body.List[len(ifStmt.Body.List)-1] - if _, ok := lastStmt.(*ast.ReturnStmt); ok { - extra := "" - if shortDecl { - extra = " (move short variable declaration to its own line if necessary)" - } - f.errorf(ifStmt.Else, 1, link(styleGuideBase+"#indent-error-flow"), category("indent"), "if block ends with a return statement, so drop this else and outdent its block"+extra) - } - return true - }) -} - -// lintRanges examines range clauses. It complains about redundant constructions. -func (f *file) lintRanges() { - f.walk(func(node ast.Node) bool { - rs, ok := node.(*ast.RangeStmt) - if !ok { - return true - } - - if isIdent(rs.Key, "_") && (rs.Value == nil || isIdent(rs.Value, "_")) { - p := f.errorf(rs.Key, 1, category("range-loop"), "should omit values from range; this loop is equivalent to `for range ...`") - - newRS := *rs // shallow copy - newRS.Value = nil - newRS.Key = nil - p.ReplacementLine = f.firstLineOf(&newRS, rs) - - return true - } - - if isIdent(rs.Value, "_") { - p := f.errorf(rs.Value, 1, category("range-loop"), "should omit 2nd value from range; this loop is equivalent to `for %s %s range ...`", f.render(rs.Key), rs.Tok) - - newRS := *rs // shallow copy - newRS.Value = nil - p.ReplacementLine = f.firstLineOf(&newRS, rs) - } - - return true - }) -} - -// lintErrorf examines errors.New and testing.Error calls. It complains if its only argument is an fmt.Sprintf invocation. -func (f *file) lintErrorf() { - f.walk(func(node ast.Node) bool { - ce, ok := node.(*ast.CallExpr) - if !ok || len(ce.Args) != 1 { - return true - } - isErrorsNew := isPkgDot(ce.Fun, "errors", "New") - var isTestingError bool - se, ok := ce.Fun.(*ast.SelectorExpr) - if ok && se.Sel.Name == "Error" { - if typ := f.pkg.typeOf(se.X); typ != nil { - isTestingError = typ.String() == "*testing.T" - } - } - if !isErrorsNew && !isTestingError { - return true - } - if !f.imports("errors") { - return true - } - arg := ce.Args[0] - ce, ok = arg.(*ast.CallExpr) - if !ok || !isPkgDot(ce.Fun, "fmt", "Sprintf") { - return true - } - errorfPrefix := "fmt" - if isTestingError { - errorfPrefix = f.render(se.X) - } - p := f.errorf(node, 1, category("errors"), "should replace %s(fmt.Sprintf(...)) with %s.Errorf(...)", f.render(se), errorfPrefix) - - m := f.srcLineWithMatch(ce, `^(.*)`+f.render(se)+`\(fmt\.Sprintf\((.*)\)\)(.*)$`) - if m != nil { - p.ReplacementLine = m[1] + errorfPrefix + ".Errorf(" + m[2] + ")" + m[3] - } - - return true - }) -} - -// lintErrors examines global error vars. It complains if they aren't named in the standard way. -func (f *file) lintErrors() { - for _, decl := range f.f.Decls { - gd, ok := decl.(*ast.GenDecl) - if !ok || gd.Tok != token.VAR { - continue - } - for _, spec := range gd.Specs { - spec := spec.(*ast.ValueSpec) - if len(spec.Names) != 1 || len(spec.Values) != 1 { - continue - } - ce, ok := spec.Values[0].(*ast.CallExpr) - if !ok { - continue - } - if !isPkgDot(ce.Fun, "errors", "New") && !isPkgDot(ce.Fun, "fmt", "Errorf") { - continue - } - - id := spec.Names[0] - prefix := "err" - if id.IsExported() { - prefix = "Err" - } - if !strings.HasPrefix(id.Name, prefix) { - f.errorf(id, 0.9, category("naming"), "error var %s should have name of the form %sFoo", id.Name, prefix) - } - } - } -} - -func lintErrorString(s string) (isClean bool, conf float64) { - const basicConfidence = 0.8 - const capConfidence = basicConfidence - 0.2 - first, firstN := utf8.DecodeRuneInString(s) - last, _ := utf8.DecodeLastRuneInString(s) - if last == '.' || last == ':' || last == '!' || last == '\n' { - return false, basicConfidence - } - if unicode.IsUpper(first) { - // People use proper nouns and exported Go identifiers in error strings, - // so decrease the confidence of warnings for capitalization. - if len(s) <= firstN { - return false, capConfidence - } - // Flag strings starting with something that doesn't look like an initialism. - if second, _ := utf8.DecodeRuneInString(s[firstN:]); !unicode.IsUpper(second) { - return false, capConfidence - } - } - return true, 0 -} - -// lintErrorStrings examines error strings. -// It complains if they are capitalized or end in punctuation or a newline. -func (f *file) lintErrorStrings() { - f.walk(func(node ast.Node) bool { - ce, ok := node.(*ast.CallExpr) - if !ok { - return true - } - if !isPkgDot(ce.Fun, "errors", "New") && !isPkgDot(ce.Fun, "fmt", "Errorf") { - return true - } - if len(ce.Args) < 1 { - return true - } - str, ok := ce.Args[0].(*ast.BasicLit) - if !ok || str.Kind != token.STRING { - return true - } - s, _ := strconv.Unquote(str.Value) // can assume well-formed Go - if s == "" { - return true - } - clean, conf := lintErrorString(s) - if clean { - return true - } - - f.errorf(str, conf, link(styleGuideBase+"#error-strings"), category("errors"), - "error strings should not be capitalized or end with punctuation or a newline") - return true - }) -} - -// lintReceiverNames examines receiver names. It complains about inconsistent -// names used for the same type and names such as "this". -func (f *file) lintReceiverNames() { - typeReceiver := map[string]string{} - f.walk(func(n ast.Node) bool { - fn, ok := n.(*ast.FuncDecl) - if !ok || fn.Recv == nil || len(fn.Recv.List) == 0 { - return true - } - names := fn.Recv.List[0].Names - if len(names) < 1 { - return true - } - name := names[0].Name - const ref = styleGuideBase + "#receiver-names" - if name == "_" { - f.errorf(n, 1, link(ref), category("naming"), `receiver name should not be an underscore, omit the name if it is unused`) - return true - } - if name == "this" || name == "self" { - f.errorf(n, 1, link(ref), category("naming"), `receiver name should be a reflection of its identity; don't use generic names such as "this" or "self"`) - return true - } - recv := receiverType(fn) - if prev, ok := typeReceiver[recv]; ok && prev != name { - f.errorf(n, 1, link(ref), category("naming"), "receiver name %s should be consistent with previous receiver name %s for %s", name, prev, recv) - return true - } - typeReceiver[recv] = name - return true - }) -} - -// lintIncDec examines statements that increment or decrement a variable. -// It complains if they don't use x++ or x--. -func (f *file) lintIncDec() { - f.walk(func(n ast.Node) bool { - as, ok := n.(*ast.AssignStmt) - if !ok { - return true - } - if len(as.Lhs) != 1 { - return true - } - if !isOne(as.Rhs[0]) { - return true - } - var suffix string - switch as.Tok { - case token.ADD_ASSIGN: - suffix = "++" - case token.SUB_ASSIGN: - suffix = "--" - default: - return true - } - f.errorf(as, 0.8, category("unary-op"), "should replace %s with %s%s", f.render(as), f.render(as.Lhs[0]), suffix) - return true - }) -} - -// lintErrorReturn examines function declarations that return an error. -// It complains if the error isn't the last parameter. -func (f *file) lintErrorReturn() { - f.walk(func(n ast.Node) bool { - fn, ok := n.(*ast.FuncDecl) - if !ok || fn.Type.Results == nil { - return true - } - ret := fn.Type.Results.List - if len(ret) <= 1 { - return true - } - if isIdent(ret[len(ret)-1].Type, "error") { - return true - } - // An error return parameter should be the last parameter. - // Flag any error parameters found before the last. - for _, r := range ret[:len(ret)-1] { - if isIdent(r.Type, "error") { - f.errorf(fn, 0.9, category("arg-order"), "error should be the last type when returning multiple items") - break // only flag one - } - } - return true - }) -} - -// lintUnexportedReturn examines exported function declarations. -// It complains if any return an unexported type. -func (f *file) lintUnexportedReturn() { - f.walk(func(n ast.Node) bool { - fn, ok := n.(*ast.FuncDecl) - if !ok { - return true - } - if fn.Type.Results == nil { - return false - } - if !fn.Name.IsExported() { - return false - } - thing := "func" - if fn.Recv != nil && len(fn.Recv.List) > 0 { - thing = "method" - if !ast.IsExported(receiverType(fn)) { - // Don't report exported methods of unexported types, - // such as private implementations of sort.Interface. - return false - } - } - for _, ret := range fn.Type.Results.List { - typ := f.pkg.typeOf(ret.Type) - if exportedType(typ) { - continue - } - f.errorf(ret.Type, 0.8, category("unexported-type-in-api"), - "exported %s %s returns unexported type %s, which can be annoying to use", - thing, fn.Name.Name, typ) - break // only flag one - } - return false - }) -} - -// exportedType reports whether typ is an exported type. -// It is imprecise, and will err on the side of returning true, -// such as for composite types. -func exportedType(typ types.Type) bool { - switch T := typ.(type) { - case *types.Named: - // Builtin types have no package. - return T.Obj().Pkg() == nil || T.Obj().Exported() - case *types.Map: - return exportedType(T.Key()) && exportedType(T.Elem()) - case interface { - Elem() types.Type - }: // array, slice, pointer, chan - return exportedType(T.Elem()) - } - // Be conservative about other types, such as struct, interface, etc. - return true -} - -// timeSuffixes is a list of name suffixes that imply a time unit. -// This is not an exhaustive list. -var timeSuffixes = []string{ - "Sec", "Secs", "Seconds", - "Msec", "Msecs", - "Milli", "Millis", "Milliseconds", - "Usec", "Usecs", "Microseconds", - "MS", "Ms", -} - -func (f *file) lintTimeNames() { - f.walk(func(node ast.Node) bool { - v, ok := node.(*ast.ValueSpec) - if !ok { - return true - } - for _, name := range v.Names { - origTyp := f.pkg.typeOf(name) - // Look for time.Duration or *time.Duration; - // the latter is common when using flag.Duration. - typ := origTyp - if pt, ok := typ.(*types.Pointer); ok { - typ = pt.Elem() - } - if !f.pkg.isNamedType(typ, "time", "Duration") { - continue - } - suffix := "" - for _, suf := range timeSuffixes { - if strings.HasSuffix(name.Name, suf) { - suffix = suf - break - } - } - if suffix == "" { - continue - } - f.errorf(v, 0.9, category("time"), "var %s is of type %v; don't use unit-specific suffix %q", name.Name, origTyp, suffix) - } - return true - }) -} - -// lintContextKeyTypes checks for call expressions to context.WithValue with -// basic types used for the key argument. -// See: https://golang.org/issue/17293 -func (f *file) lintContextKeyTypes() { - f.walk(func(node ast.Node) bool { - switch node := node.(type) { - case *ast.CallExpr: - f.checkContextKeyType(node) - } - - return true - }) -} - -// checkContextKeyType reports an error if the call expression calls -// context.WithValue with a key argument of basic type. -func (f *file) checkContextKeyType(x *ast.CallExpr) { - sel, ok := x.Fun.(*ast.SelectorExpr) - if !ok { - return - } - pkg, ok := sel.X.(*ast.Ident) - if !ok || pkg.Name != "context" { - return - } - if sel.Sel.Name != "WithValue" { - return - } - - // key is second argument to context.WithValue - if len(x.Args) != 3 { - return - } - key := f.pkg.typesInfo.Types[x.Args[1]] - - if ktyp, ok := key.Type.(*types.Basic); ok && ktyp.Kind() != types.Invalid { - f.errorf(x, 1.0, category("context"), fmt.Sprintf("should not use basic type %s as key in context.WithValue", key.Type)) - } -} - -// lintContextArgs examines function declarations that contain an -// argument with a type of context.Context -// It complains if that argument isn't the first parameter. -func (f *file) lintContextArgs() { - f.walk(func(n ast.Node) bool { - fn, ok := n.(*ast.FuncDecl) - if !ok || len(fn.Type.Params.List) <= 1 { - return true - } - // A context.Context should be the first parameter of a function. - // Flag any that show up after the first. - for _, arg := range fn.Type.Params.List[1:] { - if isPkgDot(arg.Type, "context", "Context") { - f.errorf(fn, 0.9, link("https://golang.org/pkg/context/"), category("arg-order"), "context.Context should be the first parameter of a function") - break // only flag one - } - } - return true - }) -} - -// containsComments returns whether the interval [start, end) contains any -// comments without "// MATCH " prefix. -func (f *file) containsComments(start, end token.Pos) bool { - for _, cgroup := range f.f.Comments { - comments := cgroup.List - if comments[0].Slash >= end { - // All comments starting with this group are after end pos. - return false - } - if comments[len(comments)-1].Slash < start { - // Comments group ends before start pos. - continue - } - for _, c := range comments { - if start <= c.Slash && c.Slash < end && !strings.HasPrefix(c.Text, "// MATCH ") { - return true - } - } - } - return false -} - -// receiverType returns the named type of the method receiver, sans "*", -// or "invalid-type" if fn.Recv is ill formed. -func receiverType(fn *ast.FuncDecl) string { - switch e := fn.Recv.List[0].Type.(type) { - case *ast.Ident: - return e.Name - case *ast.StarExpr: - if id, ok := e.X.(*ast.Ident); ok { - return id.Name - } - } - // The parser accepts much more than just the legal forms. - return "invalid-type" -} - -func (f *file) walk(fn func(ast.Node) bool) { - ast.Walk(walker(fn), f.f) -} - -func (f *file) render(x interface{}) string { - var buf bytes.Buffer - if err := printer.Fprint(&buf, f.fset, x); err != nil { - panic(err) - } - return buf.String() -} - -func (f *file) debugRender(x interface{}) string { - var buf bytes.Buffer - if err := ast.Fprint(&buf, f.fset, x, nil); err != nil { - panic(err) - } - return buf.String() -} - -// walker adapts a function to satisfy the ast.Visitor interface. -// The function return whether the walk should proceed into the node's children. -type walker func(ast.Node) bool - -func (w walker) Visit(node ast.Node) ast.Visitor { - if w(node) { - return w - } - return nil -} - -func isIdent(expr ast.Expr, ident string) bool { - id, ok := expr.(*ast.Ident) - return ok && id.Name == ident -} - -// isBlank returns whether id is the blank identifier "_". -// If id == nil, the answer is false. -func isBlank(id *ast.Ident) bool { return id != nil && id.Name == "_" } - -func isPkgDot(expr ast.Expr, pkg, name string) bool { - sel, ok := expr.(*ast.SelectorExpr) - return ok && isIdent(sel.X, pkg) && isIdent(sel.Sel, name) -} - -func isOne(expr ast.Expr) bool { - lit, ok := expr.(*ast.BasicLit) - return ok && lit.Kind == token.INT && lit.Value == "1" -} - -func isCgoExported(f *ast.FuncDecl) bool { - if f.Recv != nil || f.Doc == nil { - return false - } - - cgoExport := regexp.MustCompile(fmt.Sprintf("(?m)^//export %s$", regexp.QuoteMeta(f.Name.Name))) - for _, c := range f.Doc.List { - if cgoExport.MatchString(c.Text) { - return true - } - } - return false -} - -var basicTypeKinds = map[types.BasicKind]string{ - types.UntypedBool: "bool", - types.UntypedInt: "int", - types.UntypedRune: "rune", - types.UntypedFloat: "float64", - types.UntypedComplex: "complex128", - types.UntypedString: "string", -} - -// isUntypedConst reports whether expr is an untyped constant, -// and indicates what its default type is. -// scope may be nil. -func (f *file) isUntypedConst(expr ast.Expr) (defType string, ok bool) { - // Re-evaluate expr outside of its context to see if it's untyped. - // (An expr evaluated within, for example, an assignment context will get the type of the LHS.) - exprStr := f.render(expr) - tv, err := types.Eval(f.fset, f.pkg.typesPkg, expr.Pos(), exprStr) - if err != nil { - return "", false - } - if b, ok := tv.Type.(*types.Basic); ok { - if dt, ok := basicTypeKinds[b.Kind()]; ok { - return dt, true - } - } - - return "", false -} - -// firstLineOf renders the given node and returns its first line. -// It will also match the indentation of another node. -func (f *file) firstLineOf(node, match ast.Node) string { - line := f.render(node) - if i := strings.Index(line, "\n"); i >= 0 { - line = line[:i] - } - return f.indentOf(match) + line -} - -func (f *file) indentOf(node ast.Node) string { - line := srcLine(f.src, f.fset.Position(node.Pos())) - for i, r := range line { - switch r { - case ' ', '\t': - default: - return line[:i] - } - } - return line // unusual or empty line -} - -func (f *file) srcLineWithMatch(node ast.Node, pattern string) (m []string) { - line := srcLine(f.src, f.fset.Position(node.Pos())) - line = strings.TrimSuffix(line, "\n") - rx := regexp.MustCompile(pattern) - return rx.FindStringSubmatch(line) -} - -// imports returns true if the current file imports the specified package path. -func (f *file) imports(importPath string) bool { - all := astutil.Imports(f.fset, f.f) - for _, p := range all { - for _, i := range p { - uq, err := strconv.Unquote(i.Path.Value) - if err == nil && importPath == uq { - return true - } - } - } - return false -} - -// srcLine returns the complete line at p, including the terminating newline. -func srcLine(src []byte, p token.Position) string { - // Run to end of line in both directions if not at line start/end. - lo, hi := p.Offset, p.Offset+1 - for lo > 0 && src[lo-1] != '\n' { - lo-- - } - for hi < len(src) && src[hi-1] != '\n' { - hi++ - } - return string(src[lo:hi]) -} diff --git a/vendor/github.com/golangci/maligned/LICENSE b/vendor/github.com/golangci/maligned/LICENSE deleted file mode 100644 index 744875676..000000000 --- a/vendor/github.com/golangci/maligned/LICENSE +++ /dev/null @@ -1,27 +0,0 @@ -Copyright (c) 2012 The Go Authors. All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above -copyright notice, this list of conditions and the following disclaimer -in the documentation and/or other materials provided with the -distribution. - * Neither the name of Google Inc. nor the names of its -contributors may be used to endorse or promote products derived from -this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/golangci/maligned/README b/vendor/github.com/golangci/maligned/README deleted file mode 100644 index 4e57f6eab..000000000 --- a/vendor/github.com/golangci/maligned/README +++ /dev/null @@ -1,7 +0,0 @@ -Install: - - go get github.com/mdempsky/maligned - -Usage: - - maligned cmd/compile/internal/gc cmd/link/internal/ld diff --git a/vendor/github.com/golangci/maligned/maligned.go b/vendor/github.com/golangci/maligned/maligned.go deleted file mode 100644 index c2492b2ff..000000000 --- a/vendor/github.com/golangci/maligned/maligned.go +++ /dev/null @@ -1,253 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package maligned - -import ( - "fmt" - "go/ast" - "go/build" - "go/token" - "go/types" - "sort" - "strings" - - "golang.org/x/tools/go/loader" -) - -var fset = token.NewFileSet() - -type Issue struct { - OldSize, NewSize int - NewStructDef string - Pos token.Position -} - -func Run(prog *loader.Program) []Issue { - flagVerbose := true - fset = prog.Fset - - var issues []Issue - - for _, pkg := range prog.InitialPackages() { - for _, file := range pkg.Files { - ast.Inspect(file, func(node ast.Node) bool { - if s, ok := node.(*ast.StructType); ok { - i := malign(node.Pos(), pkg.Types[s].Type.(*types.Struct), flagVerbose) - if i != nil { - issues = append(issues, *i) - } - } - return true - }) - } - } - - return issues -} - -func malign(pos token.Pos, str *types.Struct, verbose bool) *Issue { - wordSize := int64(8) - maxAlign := int64(8) - switch build.Default.GOARCH { - case "386", "arm": - wordSize, maxAlign = 4, 4 - case "amd64p32": - wordSize = 4 - } - - s := gcSizes{wordSize, maxAlign} - sz := s.Sizeof(str) - opt, fields := optimalSize(str, &s, verbose) - if sz == opt { - return nil - } - - newStructDefParts := []string{"struct{"} - - var w int - for _, f := range fields { - if n := len(f.Name()); n > w { - w = n - } - } - spaces := strings.Repeat(" ", w) - for _, f := range fields { - line := fmt.Sprintf("\t%s%s\t%s,", f.Name(), spaces[len(f.Name()):], f.Type().String()) - newStructDefParts = append(newStructDefParts, line) - } - newStructDefParts = append(newStructDefParts, "}") - - return &Issue{ - OldSize: int(sz), - NewSize: int(opt), - NewStructDef: strings.Join(newStructDefParts, "\n"), - Pos: fset.Position(pos), - } -} - -func optimalSize(str *types.Struct, sizes *gcSizes, stable bool) (int64, []*types.Var) { - nf := str.NumFields() - fields := make([]*types.Var, nf) - alignofs := make([]int64, nf) - sizeofs := make([]int64, nf) - for i := 0; i < nf; i++ { - fields[i] = str.Field(i) - ft := fields[i].Type() - alignofs[i] = sizes.Alignof(ft) - sizeofs[i] = sizes.Sizeof(ft) - } - if stable { // Stable keeps as much of the order as possible, but slower - sort.Stable(&byAlignAndSize{fields, alignofs, sizeofs}) - } else { - sort.Sort(&byAlignAndSize{fields, alignofs, sizeofs}) - } - return sizes.Sizeof(types.NewStruct(fields, nil)), fields -} - -type byAlignAndSize struct { - fields []*types.Var - alignofs []int64 - sizeofs []int64 -} - -func (s *byAlignAndSize) Len() int { return len(s.fields) } -func (s *byAlignAndSize) Swap(i, j int) { - s.fields[i], s.fields[j] = s.fields[j], s.fields[i] - s.alignofs[i], s.alignofs[j] = s.alignofs[j], s.alignofs[i] - s.sizeofs[i], s.sizeofs[j] = s.sizeofs[j], s.sizeofs[i] -} - -func (s *byAlignAndSize) Less(i, j int) bool { - // Place zero sized objects before non-zero sized objects. - if s.sizeofs[i] == 0 && s.sizeofs[j] != 0 { - return true - } - if s.sizeofs[j] == 0 && s.sizeofs[i] != 0 { - return false - } - - // Next, place more tightly aligned objects before less tightly aligned objects. - if s.alignofs[i] != s.alignofs[j] { - return s.alignofs[i] > s.alignofs[j] - } - - // Lastly, order by size. - if s.sizeofs[i] != s.sizeofs[j] { - return s.sizeofs[i] > s.sizeofs[j] - } - - return false -} - -// Code below based on go/types.StdSizes. - -type gcSizes struct { - WordSize int64 - MaxAlign int64 -} - -func (s *gcSizes) Alignof(T types.Type) int64 { - // NOTE: On amd64, complex64 is 8 byte aligned, - // even though float32 is only 4 byte aligned. - - // For arrays and structs, alignment is defined in terms - // of alignment of the elements and fields, respectively. - switch t := T.Underlying().(type) { - case *types.Array: - // spec: "For a variable x of array type: unsafe.Alignof(x) - // is the same as unsafe.Alignof(x[0]), but at least 1." - return s.Alignof(t.Elem()) - case *types.Struct: - // spec: "For a variable x of struct type: unsafe.Alignof(x) - // is the largest of the values unsafe.Alignof(x.f) for each - // field f of x, but at least 1." - max := int64(1) - for i, nf := 0, t.NumFields(); i < nf; i++ { - if a := s.Alignof(t.Field(i).Type()); a > max { - max = a - } - } - return max - } - a := s.Sizeof(T) // may be 0 - // spec: "For a variable x of any type: unsafe.Alignof(x) is at least 1." - if a < 1 { - return 1 - } - if a > s.MaxAlign { - return s.MaxAlign - } - return a -} - -var basicSizes = [...]byte{ - types.Bool: 1, - types.Int8: 1, - types.Int16: 2, - types.Int32: 4, - types.Int64: 8, - types.Uint8: 1, - types.Uint16: 2, - types.Uint32: 4, - types.Uint64: 8, - types.Float32: 4, - types.Float64: 8, - types.Complex64: 8, - types.Complex128: 16, -} - -func (s *gcSizes) Sizeof(T types.Type) int64 { - switch t := T.Underlying().(type) { - case *types.Basic: - k := t.Kind() - if int(k) < len(basicSizes) { - if s := basicSizes[k]; s > 0 { - return int64(s) - } - } - if k == types.String { - return s.WordSize * 2 - } - case *types.Array: - n := t.Len() - if n == 0 { - return 0 - } - a := s.Alignof(t.Elem()) - z := s.Sizeof(t.Elem()) - return align(z, a)*(n-1) + z - case *types.Slice: - return s.WordSize * 3 - case *types.Struct: - nf := t.NumFields() - if nf == 0 { - return 0 - } - - var o int64 - max := int64(1) - for i := 0; i < nf; i++ { - ft := t.Field(i).Type() - a, sz := s.Alignof(ft), s.Sizeof(ft) - if a > max { - max = a - } - if i == nf-1 && sz == 0 && o != 0 { - sz = 1 - } - o = align(o, a) + sz - } - return align(o, max) - case *types.Interface: - return s.WordSize * 2 - } - return s.WordSize // catch-all -} - -// align returns the smallest y >= x such that y % a == 0. -func align(x, a int64) int64 { - y := x + a - 1 - return y - y%a -} diff --git a/vendor/github.com/golangci/misspell/.gitignore b/vendor/github.com/golangci/misspell/.gitignore deleted file mode 100644 index 5e5c368f8..000000000 --- a/vendor/github.com/golangci/misspell/.gitignore +++ /dev/null @@ -1,37 +0,0 @@ -dist/ -bin/ -vendor/ - -.idea/ -/misspell - -# editor turds -*~ -*.gz -*.bz2 -*.csv - -# Compiled Object files, Static and Dynamic libs (Shared Objects) -*.o -*.a -*.so - -# Folders -_obj -_test - -# Architecture specific extensions/prefixes -*.[568vq] -[568vq].out - -*.cgo1.go -*.cgo2.c -_cgo_defun.c -_cgo_gotypes.go -_cgo_export.* - -_testmain.go - -*.exe -*.test -*.prof diff --git a/vendor/github.com/golangci/misspell/.golangci.yml b/vendor/github.com/golangci/misspell/.golangci.yml deleted file mode 100644 index 31c566eab..000000000 --- a/vendor/github.com/golangci/misspell/.golangci.yml +++ /dev/null @@ -1,109 +0,0 @@ -run: - timeout: 2m - skip-files: [] - -linters-settings: - govet: - enable-all: true - disable: - - fieldalignment - - shadow # FIXME(ldez) must be fixed - gocyclo: - min-complexity: 16 - goconst: - min-len: 3 - min-occurrences: 3 - misspell: - locale: US - funlen: - lines: -1 - statements: 40 - gofumpt: - extra-rules: true - depguard: - rules: - main: - deny: - - pkg: "github.com/instana/testify" - desc: not allowed - - pkg: "github.com/pkg/errors" - desc: Should be replaced by standard lib errors package - godox: - keywords: - - FIXME - gocritic: - enabled-tags: - - diagnostic - - style - - performance - disabled-checks: - - sloppyReassign - - rangeValCopy - - octalLiteral - - paramTypeCombine # already handle by gofumpt.extra-rules - - exitAfterDefer # FIXME(ldez) must be fixed - - ifElseChain # FIXME(ldez) must be fixed - settings: - hugeParam: - sizeThreshold: 100 - forbidigo: - forbid: - - '^print(ln)?$' - - '^panic$' - - '^spew\.Print(f|ln)?$' - - '^spew\.Dump$' - -linters: - enable-all: true - disable: - - deadcode # deprecated - - exhaustivestruct # deprecated - - golint # deprecated - - ifshort # deprecated - - interfacer # deprecated - - maligned # deprecated - - nosnakecase # deprecated - - scopelint # deprecated - - scopelint # deprecated - - structcheck # deprecated - - varcheck # deprecated - - execinquery # not relevant (SQL) - - rowserrcheck # not relevant (SQL) - - sqlclosecheck # not relevant (SQL) - - cyclop # duplicate of gocyclo - - dupl - - exhaustive - - exhaustruct - - forbidigo - - gochecknoglobals - - gochecknoinits - - goerr113 - - gomnd - - lll - - nilnil - - nlreturn - - paralleltest - - prealloc - - testpackage - - tparallel - - varnamelen - - wrapcheck - - wsl - - misspell - - gosec # FIXME(ldez) must be fixed - - errcheck # FIXME(ldez) must be fixed - - nonamedreturns # FIXME(ldez) must be fixed - - nakedret # FIXME(ldez) must be fixed - -issues: - exclude-use-default: false - max-per-linter: 0 - max-same-issues: 0 - exclude: - - 'ST1000: at least one file in a package should have a package comment' - - 'package-comments: should have a package comment' - exclude-rules: - - path: .*_test.go - linters: - - funlen - - goconst diff --git a/vendor/github.com/golangci/misspell/Dockerfile b/vendor/github.com/golangci/misspell/Dockerfile deleted file mode 100644 index 788ce3a77..000000000 --- a/vendor/github.com/golangci/misspell/Dockerfile +++ /dev/null @@ -1,35 +0,0 @@ -FROM golang:1.19-alpine - -# cache buster -RUN echo 4 - -# git is needed for "go get" below -RUN apk add --no-cache git make - -# these are my standard testing / linting tools -RUN /bin/true \ - && rm -rf /go/src /go/pkg -# -# * SCOWL word list -# -# Downloads -# http://wordlist.aspell.net/dicts/ -# --> http://app.aspell.net/create -# - -# use en_US large size -# use regular size for others -ENV SOURCE_US_BIG http://app.aspell.net/create?max_size=70&spelling=US&max_variant=2&diacritic=both&special=hacker&special=roman-numerals&download=wordlist&encoding=utf-8&format=inline - -# should be able tell difference between English variations using this -ENV SOURCE_US http://app.aspell.net/create?max_size=60&spelling=US&max_variant=1&diacritic=both&download=wordlist&encoding=utf-8&format=inline -ENV SOURCE_GB_ISE http://app.aspell.net/create?max_size=60&spelling=GBs&max_variant=2&diacritic=both&download=wordlist&encoding=utf-8&format=inline -ENV SOURCE_GB_IZE http://app.aspell.net/create?max_size=60&spelling=GBz&max_variant=2&diacritic=both&download=wordlist&encoding=utf-8&format=inline -ENV SOURCE_CA http://app.aspell.net/create?max_size=60&spelling=CA&max_variant=2&diacritic=both&download=wordlist&encoding=utf-8&format=inline - -RUN /bin/true \ - && mkdir /scowl-wl \ - && wget -O /scowl-wl/words-US-60.txt ${SOURCE_US} \ - && wget -O /scowl-wl/words-GB-ise-60.txt ${SOURCE_GB_ISE} - -RUN git config --global --add safe.directory "/go/src/github.com/golangci/misspell" diff --git a/vendor/github.com/golangci/misspell/LICENSE b/vendor/github.com/golangci/misspell/LICENSE deleted file mode 100644 index bfcfcd301..000000000 --- a/vendor/github.com/golangci/misspell/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2015-2017 Nick Galbreath - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/golangci/misspell/Makefile b/vendor/github.com/golangci/misspell/Makefile deleted file mode 100644 index 783f977cb..000000000 --- a/vendor/github.com/golangci/misspell/Makefile +++ /dev/null @@ -1,66 +0,0 @@ -CONTAINER=golangci/misspell - -default: lint test build - -install: ## install misspell into GOPATH/bin - go install ./cmd/misspell - -build: ## build misspell - go build ./cmd/misspell - -test: ## run all tests - go test -v . - -lint: ## run linter - golangci-lint run - -# the grep in line 2 is to remove misspellings in the spelling dictionary -# that trigger false positives!! -falsepositives: /scowl-wl - cat /scowl-wl/words-US-60.txt | \ - grep -i -v -E "payed|Tyre|Euclidian|nonoccurence|dependancy|reenforced|accidently|surprize|dependance|idealogy|binominal|causalities|conquerer|withing|casette|analyse|analogue|dialogue|paralyse|catalogue|archaeolog|clarinettist|catalyses|cancell|chisell|ageing|cataloguing" | \ - misspell -debug -error - cat /scowl-wl/words-GB-ise-60.txt | \ - grep -v -E "payed|nonoccurence|withing" | \ - misspell -locale=UK -debug -error -# cat /scowl-wl/words-GB-ize-60.txt | \ -# grep -v -E "withing" | \ -# misspell -debug -error -# cat /scowl-wl/words-CA-60.txt | \ -# grep -v -E "withing" | \ -# misspell -debug -error - -bench: ## run benchmarks - go test -bench '.*' - -clean: ## clean up time - rm -rf dist/ bin/ - go clean ./... - git gc --aggressive - -ci: docker-build ## run test like travis-ci does, requires docker - docker run --rm \ - -v $(PWD):/go/src/github.com/golangci/misspell \ - -w /go/src/github.com/golangci/misspell \ - ${CONTAINER} \ - make install falsepositives - -docker-build: ## build a docker test image - docker build -t ${CONTAINER} . - -docker-console: ## log into the test image - docker run --rm -it \ - -v $(PWD):/go/src/github.com/golangci/misspell \ - -w /go/src/github.com/golangci/misspell \ - ${CONTAINER} sh - -.PHONY: help ci console docker-build bench - -# https://www.client9.com/self-documenting-makefiles/ -help: - @awk -F ':|##' '/^[^\t].+?:.*?##/ {\ - printf "\033[36m%-30s\033[0m %s\n", $$1, $$NF \ - }' $(MAKEFILE_LIST) -.DEFAULT_GOAL=default -.PHONY=help - diff --git a/vendor/github.com/golangci/misspell/README.md b/vendor/github.com/golangci/misspell/README.md deleted file mode 100644 index cccd04996..000000000 --- a/vendor/github.com/golangci/misspell/README.md +++ /dev/null @@ -1,424 +0,0 @@ -[![Build Status](https://travis-ci.org/client9/misspell.svg?branch=master)](https://travis-ci.org/client9/misspell) [![Go Report Card](https://goreportcard.com/badge/github.com/client9/misspell)](https://goreportcard.com/report/github.com/client9/misspell) [![GoDoc](https://godoc.org/github.com/client9/misspell?status.svg)](https://godoc.org/github.com/client9/misspell) [![Coverage](http://gocover.io/_badge/github.com/client9/misspell)](http://gocover.io/github.com/client9/misspell) [![license](https://img.shields.io/badge/license-MIT-blue.svg?style=flat)](https://raw.githubusercontent.com/client9/misspell/master/LICENSE) - -Correct commonly misspelled English words... quickly. - -### Install - - -If you just want a binary and to start using `misspell`: - -``` -curl -L -o ./install-misspell.sh https://git.io/misspell -sh ./install-misspell.sh -``` - - -Both will install as `./bin/misspell`. You can adjust the download location using the `-b` flag. File a ticket if you want another platform supported. - - -If you use [Go](https://golang.org/), the best way to run `misspell` is by using [gometalinter](#gometalinter). Otherwise, install `misspell` the old-fashioned way: - -``` -go install github.com/client9/misspell/cmd/misspell@latest -``` - -and misspell will be in your `GOPATH` - - -Also if you like to live dangerously, one could do - -```bash -curl -L https://git.io/misspell | bash -``` - -### Usage - - -```bash -$ misspell all.html your.txt important.md files.go -your.txt:42:10 found "langauge" a misspelling of "language" - -# ^ file, line, column -``` - -``` -$ misspell -help -Usage of misspell: - -debug - Debug matching, very slow - -error - Exit with 2 if misspelling found - -f string - 'csv', 'sqlite3' or custom Golang template for output - -i string - ignore the following corrections, comma separated - -j int - Number of workers, 0 = number of CPUs - -legal - Show legal information and exit - -locale string - Correct spellings using locale perferances for US or UK. Default is to use a neutral variety of English. Setting locale to US will correct the British spelling of 'colour' to 'color' - -o string - output file or [stderr|stdout|] (default "stdout") - -q Do not emit misspelling output - -source string - Source mode: auto=guess, go=golang source, text=plain or markdown-like text (default "auto") - -w Overwrite file with corrections (default is just to display) -``` - -## FAQ - -* [Automatic Corrections](#correct) -* [Converting UK spellings to US](#locale) -* [Using pipes and stdin](#stdin) -* [Golang special support](#golang) -* [gometalinter support](#gometalinter) -* [CSV Output](#csv) -* [Using SQLite3](#sqlite) -* [Changing output format](#output) -* [Checking a folder recursively](#recursive) -* [Performance](#performance) -* [Known Issues](#issues) -* [Debugging](#debug) -* [False Negatives and missing words](#missing) -* [Origin of Word Lists](#words) -* [Software License](#license) -* [Problem statement](#problem) -* [Other spelling correctors](#others) -* [Other ideas](#otherideas) - - -### How can I make the corrections automatically? - -Just add the `-w` flag! - -``` -$ misspell -w all.html your.txt important.md files.go -your.txt:9:21:corrected "langauge" to "language" - -# ^ File is rewritten only if a misspelling is found -``` - - -### How do I convert British spellings to American (or vice-versa)? - -Add the `-locale US` flag! - -```bash -$ misspell -locale US important.txt -important.txt:10:20 found "colour" a misspelling of "color" -``` - -Add the `-locale UK` flag! - -```bash -$ echo "My favorite color is blue" | misspell -locale UK -stdin:1:3:found "favorite color" a misspelling of "favourite colour" -``` - -Help is appreciated as I'm neither British nor an -expert in the English language. - - -### How do you check an entire folder recursively? - -Just list a directory you'd like to check - -```bash -misspell . -misspell aDirectory anotherDirectory aFile -``` - -You can also run misspell recursively using the following shell tricks: - -```bash -misspell directory/**/* -``` - -or - -```bash -find . -type f | xargs misspell -``` - -You can select a type of file as well. The following examples selects all `.txt` files that are *not* in the `vendor` directory: - -```bash -find . -type f -name '*.txt' | grep -v vendor/ | xargs misspell -error -``` - - -### Can I use pipes or `stdin` for input? - -Yes! - -Print messages to `stderr` only: - -```bash -$ echo "zeebra" | misspell -stdin:1:0:found "zeebra" a misspelling of "zebra" -``` - -Print messages to `stderr`, and corrected text to `stdout`: - -```bash -$ echo "zeebra" | misspell -w -stdin:1:0:corrected "zeebra" to "zebra" -zebra -``` - -Only print the corrected text to `stdout`: - -```bash -$ echo "zeebra" | misspell -w -q -zebra -``` - - -### Are there special rules for golang source files? - -Yes! If the file ends in `.go`, then misspell will only check spelling in -comments. - -If you want to force a file to be checked as a golang source, use `-source=go` -on the command line. Conversely, you can check a golang source as if it were -pure text by using `-source=text`. You might want to do this since many -variable names have misspellings in them! - -### Can I check only-comments in other other programming languages? - -I'm told the using `-source=go` works well for ruby, javascript, java, c and -c++. - -It doesn't work well for python and bash. - - -### Does this work with gometalinter? - -[gometalinter](https://github.com/alecthomas/gometalinter) runs -multiple golang linters. Starting on [2016-06-12](https://github.com/alecthomas/gometalinter/pull/134) -gometalinter supports `misspell` natively but it is disabled by default. - -```bash -# update your copy of gometalinter -go get -u github.com/alecthomas/gometalinter - -# install updates and misspell -gometalinter --install --update -``` - -To use, just enable `misspell` - -``` -gometalinter --enable misspell ./... -``` - -Note that gometalinter only checks golang files, and uses the default options -of `misspell` - -You may wish to run this on your plaintext (.txt) and/or markdown files too. - - - -### How Can I Get CSV Output? - -Using `-f csv`, the output is standard comma-seprated values with headers in the first row. - -``` -misspell -f csv * -file,line,column,typo,corrected -"README.md",9,22,langauge,language -"README.md",47,25,langauge,language -``` - - -### How can I export to SQLite3? - -Using `-f sqlite`, the output is a [sqlite3](https://www.sqlite.org/index.html) dump-file. - -```bash -$ misspell -f sqlite * > /tmp/misspell.sql -$ cat /tmp/misspell.sql - -PRAGMA foreign_keys=OFF; -BEGIN TRANSACTION; -CREATE TABLE misspell( - "file" TEXT, - "line" INTEGER,i - "column" INTEGER,i - "typo" TEXT, - "corrected" TEXT -); -INSERT INTO misspell VALUES("install.txt",202,31,"immediatly","immediately"); -# etc... -COMMIT; -``` - -```bash -$ sqlite3 -init /tmp/misspell.sql :memory: 'select count(*) from misspell' -1 -``` - -With some tricks you can directly pipe output to sqlite3 by using `-init /dev/stdin`: - -``` -misspell -f sqlite * | sqlite3 -init /dev/stdin -column -cmd '.width 60 15' ':memory' \ - 'select substr(file,35),typo,count(*) as count from misspell group by file, typo order by count desc;' -``` - - -### How can I ignore rules? - -Using the `-i "comma,separated,rules"` flag you can specify corrections to ignore. - -For example, if you were to run `misspell -w -error -source=text` against document that contains the string `Guy Finkelshteyn Braswell`, misspell would change the text to `Guy Finkelstheyn Bras well`. You can then -determine the rules to ignore by reverting the change and running the with the `-debug` flag. You can then see -that the corrections were `htey -> they` and `aswell -> as well`. To ignore these two rules, you add `-i "htey,aswell"` to -your command. With debug mode on, you can see it print the corrections, but it will no longer make them. - - -### How can I change the output format? - -Using the `-f template` flag you can pass in a -[golang text template](https://golang.org/pkg/text/template/) to format the output. - -One can use `printf "%q" VALUE` to safely quote a value. - -The default template is compatible with [gometalinter](https://github.com/alecthomas/gometalinter) -``` -{{ .Filename }}:{{ .Line }}:{{ .Column }}:corrected {{ printf "%q" .Original }} to "{{ printf "%q" .Corrected }}" -``` - -To just print probable misspellings: - -``` --f '{{ .Original }}' -``` - - -### What problem does this solve? - -This corrects commonly misspelled English words in computer source -code, and other text-based formats (`.txt`, `.md`, etc). - -It is designed to run quickly so it can be -used as a [pre-commit hook](https://git-scm.com/book/en/v2/Customizing-Git-Git-Hooks) -with minimal burden on the developer. - -It does not work with binary formats (e.g. Word, etc). - -It is not a complete spell-checking program nor a grammar checker. - - -### What are other misspelling correctors and what's wrong with them? - -Some other misspelling correctors: - -* https://github.com/vlajos/misspell_fixer -* https://github.com/lyda/misspell-check -* https://github.com/lucasdemarchi/codespell - -They all work but had problems that prevented me from using them at scale: - -* slow, all of the above check one misspelling at a time (i.e. linear) using regexps -* not MIT/Apache2 licensed (or equivalent) -* have dependencies that don't work for me (python3, bash, linux sed, etc) -* don't understand American vs. British English and sometimes makes unwelcome "corrections" - -That said, they might be perfect for you and many have more features -than this project! - - -### How fast is it? - -Misspell is easily 100x to 1000x faster than other spelling correctors. You -should be able to check and correct 1000 files in under 250ms. - -This uses the mighty power of golang's -[strings.Replacer](https://golang.org/pkg/strings/#Replacer) which is -a implementation or variation of the -[Aho–Corasick algorithm](https://en.wikipedia.org/wiki/Aho–Corasick_algorithm). -This makes multiple substring matches *simultaneously*. - -In addition this uses multiple CPU cores to work on multiple files. - - -### What problems does it have? - -Unlike the other projects, this doesn't know what a "word" is. There may be -more false positives and false negatives due to this. On the other hand, it -sometimes catches things others don't. - -Either way, please file bugs and we'll fix them! - -Since it operates in parallel to make corrections, it can be non-obvious to -determine exactly what word was corrected. - - -### It's making mistakes. How can I debug? - -Run using `-debug` flag on the file you want. It should then print what word -it is trying to correct. Then [file a -bug](https://github.com/client9/misspell/issues) describing the problem. -Thanks! - - -### Why is it making mistakes or missing items in golang files? - -The matching function is *case-sensitive*, so variable names that are multiple -worlds either in all-upper or all-lower case sometimes can cause false -positives. For instance a variable named `bodyreader` could trigger a false -positive since `yrea` is in the middle that could be corrected to `year`. -Other problems happen if the variable name uses a English contraction that -should use an apostrophe. The best way of fixing this is to use the -[Effective Go naming -conventions](https://golang.org/doc/effective_go.html#mixed-caps) and use -[camelCase](https://en.wikipedia.org/wiki/CamelCase) for variable names. You -can check your code using [golint](https://github.com/golang/lint) - - -### What license is this? - -The main code is [MIT](https://github.com/client9/misspell/blob/master/LICENSE). - -Misspell also makes uses of the Golang standard library and contains a modified version of Golang's [strings.Replacer](https://golang.org/pkg/strings/#Replacer) -which are covered under a [BSD License](https://github.com/golang/go/blob/master/LICENSE). Type `misspell -legal` for more details or see [legal.go](https://github.com/client9/misspell/blob/master/legal.go) - - -### Where do the word lists come from? - -It started with a word list from -[Wikipedia](https://en.wikipedia.org/wiki/Wikipedia:Lists_of_common_misspellings/For_machines). -Unfortunately, this list had to be highly edited as many of the words are -obsolete or based from mistakes on mechanical typewriters (I'm guessing). - -Additional words were added based on actually mistakes seen in -the wild (meaning self-generated). - -Variations of UK and US spellings are based on many sources including: - -* http://www.tysto.com/uk-us-spelling-list.html (with heavy editing, many are incorrect) -* http://www.oxforddictionaries.com/us/words/american-and-british-spelling-american (excellent site but incomplete) -* Diffing US and UK [scowl dictionaries](http://wordlist.aspell.net) - -American English is more accepting of spelling variations than is British -English, so "what is American or not" is subject to opinion. Corrections and help welcome. - - -### What are some other enhancements that could be done? - -Here's some ideas for enhancements: - -*Capitalization of proper nouns* could be done (e.g. weekday and month names, country names, language names) - -*Opinionated US spellings* US English has a number of words with alternate -spellings. Think [adviser vs. -advisor](http://grammarist.com/spelling/adviser-advisor/). While "advisor" is not wrong, the opinionated US -locale would correct "advisor" to "adviser". - -*Versioning* Some type of versioning is needed so reporting mistakes and errors is easier. - -*Feedback* Mistakes would be sent to some server for agregation and feedback review. - -*Contractions and Apostrophes* This would optionally correct "isnt" to -"isn't", etc. diff --git a/vendor/github.com/golangci/misspell/RELEASE-HOWTO.md b/vendor/github.com/golangci/misspell/RELEASE-HOWTO.md deleted file mode 100644 index 55b52d962..000000000 --- a/vendor/github.com/golangci/misspell/RELEASE-HOWTO.md +++ /dev/null @@ -1,38 +0,0 @@ -# Release HOWTO - -since I forget. - - -1. Review existing tags and pick new release number - - ```sh - git tag - ``` - -2. Tag locally - - ```sh - git tag -a v0.1.0 -m "First release" - ``` - - If things get screwed up, delete the tag with - - ```sh - git tag -d v0.1.0 - ``` - -3. Test goreleaser - - TODO: how to install goreleaser - - ```sh - ./scripts/goreleaser-dryrun.sh - ``` - -4. Push - - ```bash - git push origin v0.1.0 - ``` - -5. Verify release and edit notes. See https://github.com/client9/misspell/releases diff --git a/vendor/github.com/golangci/misspell/ascii.go b/vendor/github.com/golangci/misspell/ascii.go deleted file mode 100644 index d60af5a8d..000000000 --- a/vendor/github.com/golangci/misspell/ascii.go +++ /dev/null @@ -1,60 +0,0 @@ -package misspell - -// ByteToUpper converts an ascii byte to upper cases. -// Uses a branch-less algorithm. -func ByteToUpper(x byte) byte { - b := byte(0x80) | x - c := b - byte(0x61) - d := ^(b - byte(0x7b)) - e := (c & d) & (^x & 0x7f) - return x - (e >> 2) -} - -// ByteToLower converts an ascii byte to lower case. -// Uses a branch-less algorithm. -func ByteToLower(eax byte) byte { - ebx := eax&byte(0x7f) + byte(0x25) - ebx = ebx&byte(0x7f) + byte(0x1a) - ebx = ((ebx & ^eax) >> 2) & byte(0x20) - return eax + ebx -} - -// ByteEqualFold does ascii compare, case insensitive. -func ByteEqualFold(a, b byte) bool { - return a == b || ByteToLower(a) == ByteToLower(b) -} - -// StringEqualFold ASCII case-insensitive comparison -// golang toUpper/toLower for both bytes and strings -// appears to be Unicode based which is super slow -// based from https://codereview.appspot.com/5180044/patch/14007/21002. -func StringEqualFold(s1, s2 string) bool { - if len(s1) != len(s2) { - return false - } - for i := 0; i < len(s1); i++ { - c1 := s1[i] - c2 := s2[i] - // c1 & c2 - if c1 != c2 { - c1 |= 'a' - 'A' - c2 |= 'a' - 'A' - if c1 != c2 || c1 < 'a' || c1 > 'z' { - return false - } - } - } - return true -} - -// StringHasPrefixFold is similar to strings.HasPrefix but comparison is done ignoring ASCII case. -func StringHasPrefixFold(s1, s2 string) bool { - // prefix is bigger than input --> false - if len(s1) < len(s2) { - return false - } - if len(s1) == len(s2) { - return StringEqualFold(s1, s2) - } - return StringEqualFold(s1[:len(s2)], s2) -} diff --git a/vendor/github.com/golangci/misspell/case.go b/vendor/github.com/golangci/misspell/case.go deleted file mode 100644 index 0b580bedb..000000000 --- a/vendor/github.com/golangci/misspell/case.go +++ /dev/null @@ -1,58 +0,0 @@ -package misspell - -import ( - "strings" -) - -// WordCase is an enum of various word casing styles. -type WordCase int - -// Various WordCase types... likely to be not correct. -const ( - CaseUnknown WordCase = iota - CaseLower - CaseUpper - CaseTitle -) - -// CaseStyle returns what case style a word is in. -func CaseStyle(word string) WordCase { - upperCount := 0 - lowerCount := 0 - - // this iterates over RUNES not BYTES - for i := 0; i < len(word); i++ { - ch := word[i] - switch { - case ch >= 'a' && ch <= 'z': - lowerCount++ - case ch >= 'A' && ch <= 'Z': - upperCount++ - } - } - - switch { - case upperCount != 0 && lowerCount == 0: - return CaseUpper - case upperCount == 0 && lowerCount != 0: - return CaseLower - case upperCount == 1 && lowerCount > 0 && word[0] >= 'A' && word[0] <= 'Z': - return CaseTitle - } - return CaseUnknown -} - -// CaseVariations returns: -// If AllUpper or First-Letter-Only is upper-cased: add the all upper case version. -// If AllLower, add the original, the title and upper-case forms. -// If Mixed, return the original, and the all upper-case form. -func CaseVariations(word string, style WordCase) []string { - switch style { - case CaseLower: - return []string{word, strings.ToUpper(word[0:1]) + word[1:], strings.ToUpper(word)} - case CaseUpper: - return []string{strings.ToUpper(word)} - default: - return []string{word, strings.ToUpper(word)} - } -} diff --git a/vendor/github.com/golangci/misspell/goreleaser.yml b/vendor/github.com/golangci/misspell/goreleaser.yml deleted file mode 100644 index 97aa83e5a..000000000 --- a/vendor/github.com/golangci/misspell/goreleaser.yml +++ /dev/null @@ -1,30 +0,0 @@ -project_name: misspell - -builds: - - main: cmd/misspell/main.go - binary: misspell - ldflags: -s -w -X main.version={{.Version}} - goos: - - darwin - - linux - - windows - goarch: - - amd64 - - arm64 - env: - - CGO_ENABLED=0 - -archives: - - name_template: "{{ .Binary }}_{{ .Version }}_{{ .Os }}_{{ .Arch }}" - replacements: - amd64: 64bit - 386: 32bit - darwin: mac - files: - - LICENSE - -checksum: - name_template: "{{ .ProjectName }}_{{ .Version }}_checksums.txt" - -snapshot: - name_template: "SNAPSHOT-{{.Commit}}" diff --git a/vendor/github.com/golangci/misspell/install-misspell.sh b/vendor/github.com/golangci/misspell/install-misspell.sh deleted file mode 100644 index 51e9b3372..000000000 --- a/vendor/github.com/golangci/misspell/install-misspell.sh +++ /dev/null @@ -1,365 +0,0 @@ -#!/bin/sh -set -e -# Code generated by godownloader. DO NOT EDIT. -# - -usage() { - this=$1 - cat </dev/null -} -echoerr() { - echo "$@" 1>&2 -} -log_prefix() { - echo "$0" -} -_logp=6 -log_set_priority() { - _logp="$1" -} -log_priority() { - if test -z "$1"; then - echo "$_logp" - return - fi - [ "$1" -ge "$_logp" ] -} -log_debug() { - log_priority 7 && echoerr "$(log_prefix)" "DEBUG" "$@" -} -log_info() { - log_priority 6 && echoerr "$(log_prefix)" "INFO" "$@" -} -log_err() { - log_priority 3 && echoerr "$(log_prefix)" "ERR" "$@" -} -log_crit() { - log_priority 2 && echoerr "$(log_prefix)" "CRIT" "$@" -} -uname_os() { - os=$(uname -s | tr '[:upper:]' '[:lower:]') - case "$os" in - msys_nt) os="windows" ;; - esac - echo "$os" -} -uname_arch() { - arch=$(uname -m) - case $arch in - x86_64) arch="amd64" ;; - x86) arch="386" ;; - i686) arch="386" ;; - i386) arch="386" ;; - aarch64) arch="arm64" ;; - armv5*) arch="arm5" ;; - armv6*) arch="arm6" ;; - armv7*) arch="arm7" ;; - esac - echo ${arch} -} -uname_os_check() { - os=$(uname_os) - case "$os" in - darwin) return 0 ;; - dragonfly) return 0 ;; - freebsd) return 0 ;; - linux) return 0 ;; - android) return 0 ;; - nacl) return 0 ;; - netbsd) return 0 ;; - openbsd) return 0 ;; - plan9) return 0 ;; - solaris) return 0 ;; - windows) return 0 ;; - esac - log_crit "uname_os_check '$(uname -s)' got converted to '$os' which is not a GOOS value. Please file bug at https://github.com/client9/shlib" - return 1 -} -uname_arch_check() { - arch=$(uname_arch) - case "$arch" in - 386) return 0 ;; - amd64) return 0 ;; - arm64) return 0 ;; - armv5) return 0 ;; - armv6) return 0 ;; - armv7) return 0 ;; - ppc64) return 0 ;; - ppc64le) return 0 ;; - mips) return 0 ;; - mipsle) return 0 ;; - mips64) return 0 ;; - mips64le) return 0 ;; - s390x) return 0 ;; - amd64p32) return 0 ;; - esac - log_crit "uname_arch_check '$(uname -m)' got converted to '$arch' which is not a GOARCH value. Please file bug report at https://github.com/client9/shlib" - return 1 -} -untar() { - tarball=$1 - case "${tarball}" in - *.tar.gz | *.tgz) tar -xzf "${tarball}" ;; - *.tar) tar -xf "${tarball}" ;; - *.zip) unzip "${tarball}" ;; - *) - log_err "untar unknown archive format for ${tarball}" - return 1 - ;; - esac -} -mktmpdir() { - test -z "$TMPDIR" && TMPDIR="$(mktemp -d)" - mkdir -p "${TMPDIR}" - echo "${TMPDIR}" -} -http_download() { - local_file=$1 - source_url=$2 - header=$3 - headerflag='' - destflag='' - if is_command curl; then - cmd='curl --fail -sSL' - destflag='-o' - headerflag='-H' - elif is_command wget; then - cmd='wget -q' - destflag='-O' - headerflag='--header' - else - log_crit "http_download unable to find wget or curl" - return 1 - fi - if [ -z "$header" ]; then - $cmd $destflag "$local_file" "$source_url" - else - $cmd $headerflag "$header" $destflag "$local_file" "$source_url" - fi -} -github_api() { - local_file=$1 - source_url=$2 - header="" - case "$source_url" in - https://api.github.com*) - test -z "$GITHUB_TOKEN" || header="Authorization: token $GITHUB_TOKEN" - ;; - esac - http_download "$local_file" "$source_url" "$header" -} -github_last_release() { - owner_repo=$1 - version=$2 - test -z "$version" && version="latest" - giturl="https://github.com/${owner_repo}/releases/${version}" - json=$(http_download "-" "$giturl" "Accept:application/json") - version=$(echo "$json" | tr -s '\n' ' ' | sed 's/.*"tag_name":"//' | sed 's/".*//') - test -z "$version" && return 1 - echo "$version" -} -hash_sha256() { - TARGET=${1:-/dev/stdin} - if is_command gsha256sum; then - hash=$(gsha256sum "$TARGET") || return 1 - echo "$hash" | cut -d ' ' -f 1 - elif is_command sha256sum; then - hash=$(sha256sum "$TARGET") || return 1 - echo "$hash" | cut -d ' ' -f 1 - elif is_command shasum; then - hash=$(shasum -a 256 "$TARGET" 2>/dev/null) || return 1 - echo "$hash" | cut -d ' ' -f 1 - elif is_command openssl; then - hash=$(openssl -dst openssl dgst -sha256 "$TARGET") || return 1 - echo "$hash" | cut -d ' ' -f a - else - log_crit "hash_sha256 unable to find command to compute sha-256 hash" - return 1 - fi -} -hash_sha256_verify() { - TARGET=$1 - checksums=$2 - if [ -z "$checksums" ]; then - log_err "hash_sha256_verify checksum file not specified in arg2" - return 1 - fi - BASENAME=${TARGET##*/} - want=$(grep "${BASENAME}" "${checksums}" 2>/dev/null | tr '\t' ' ' | cut -d ' ' -f 1) - if [ -z "$want" ]; then - log_err "hash_sha256_verify unable to find checksum for '${TARGET}' in '${checksums}'" - return 1 - fi - got=$(hash_sha256 "$TARGET") - if [ "$want" != "$got" ]; then - log_err "hash_sha256_verify checksum for '$TARGET' did not verify ${want} vs $got" - return 1 - fi -} -cat /dev/null < 50000 { - fin, err := os.Open(filename) - if err != nil { - return "", fmt.Errorf("unable to open large file %q: %w", filename, err) - } - defer fin.Close() - buf := make([]byte, 512) - _, err = io.ReadFull(fin, buf) - if err != nil { - return "", fmt.Errorf("unable to read 512 bytes from %q: %w", filename, err) - } - if !isTextFile(buf) { - return "", nil - } - - // set so we don't double-check this file - isText = true - } - - // read in whole file - raw, err := os.ReadFile(filename) - if err != nil { - return "", fmt.Errorf("unable to read all %q: %w", filename, err) - } - - if !isText && !isTextFile(raw) { - return "", nil - } - return string(raw), nil -} diff --git a/vendor/github.com/golangci/misspell/notwords.go b/vendor/github.com/golangci/misspell/notwords.go deleted file mode 100644 index a250cf7f6..000000000 --- a/vendor/github.com/golangci/misspell/notwords.go +++ /dev/null @@ -1,85 +0,0 @@ -package misspell - -import ( - "bytes" - "regexp" - "strings" -) - -var ( - reEmail = regexp.MustCompile(`[a-zA-Z0-9_.%+-]+@[a-zA-Z0-9-.]+\.[a-zA-Z]{2,6}[^a-zA-Z]`) - reHost = regexp.MustCompile(`[a-zA-Z0-9-.]+\.[a-zA-Z]+`) - reBackslash = regexp.MustCompile(`\\[a-z]`) -) - -// RemovePath attempts to strip away embedded file system paths, e.g. -// -// /foo/bar or /static/myimg.png -// -// TODO: windows style. -func RemovePath(s string) string { - out := bytes.Buffer{} - var idx int - for len(s) > 0 { - if idx = strings.IndexByte(s, '/'); idx == -1 { - out.WriteString(s) - break - } - - if idx > 0 { - idx-- - } - - var chclass string - switch s[idx] { - case '/', ' ', '\n', '\t', '\r': - chclass = " \n\r\t" - case '[': - chclass = "]\n" - case '(': - chclass = ")\n" - default: - out.WriteString(s[:idx+2]) - s = s[idx+2:] - continue - } - - endx := strings.IndexAny(s[idx+1:], chclass) - if endx != -1 { - out.WriteString(s[:idx+1]) - out.Write(bytes.Repeat([]byte{' '}, endx)) - s = s[idx+endx+1:] - } else { - out.WriteString(s) - break - } - } - return out.String() -} - -// replaceWithBlanks returns a string with the same number of spaces as the input. -func replaceWithBlanks(s string) string { - return strings.Repeat(" ", len(s)) -} - -// RemoveEmail remove email-like strings, e.g. "nickg+junk@xfoobar.com", "nickg@xyz.abc123.biz". -func RemoveEmail(s string) string { - return reEmail.ReplaceAllStringFunc(s, replaceWithBlanks) -} - -// RemoveHost removes host-like strings "foobar.com" "abc123.fo1231.biz". -func RemoveHost(s string) string { - return reHost.ReplaceAllStringFunc(s, replaceWithBlanks) -} - -// RemoveBackslashEscapes removes characters that are preceded by a backslash. -// commonly found in printf format string "\nto". -func removeBackslashEscapes(s string) string { - return reBackslash.ReplaceAllStringFunc(s, replaceWithBlanks) -} - -// RemoveNotWords blanks out all the not words. -func RemoveNotWords(s string) string { - // do most selective/specific first - return removeBackslashEscapes(RemoveHost(RemoveEmail(RemovePath(StripURL(s))))) -} diff --git a/vendor/github.com/golangci/misspell/replace.go b/vendor/github.com/golangci/misspell/replace.go deleted file mode 100644 index bcfcf8deb..000000000 --- a/vendor/github.com/golangci/misspell/replace.go +++ /dev/null @@ -1,247 +0,0 @@ -package misspell - -import ( - "bufio" - "bytes" - "io" - "regexp" - "strings" - "text/scanner" -) - -func max(x, y int) int { - if x > y { - return x - } - return y -} - -func inArray(haystack []string, needle string) bool { - for _, word := range haystack { - if strings.EqualFold(needle, word) { - return true - } - } - return false -} - -var wordRegexp = regexp.MustCompile(`[a-zA-Z0-9']+`) - -// Diff is datastructures showing what changed in a single line. -type Diff struct { - Filename string - FullLine string - Line int - Column int - Original string - Corrected string -} - -// Replacer is the main struct for spelling correction. -type Replacer struct { - Replacements []string - Debug bool - engine *StringReplacer - corrected map[string]string -} - -// New creates a new default Replacer using the main rule list. -func New() *Replacer { - r := Replacer{ - Replacements: DictMain, - } - r.Compile() - return &r -} - -// RemoveRule deletes existing rules. -// The content of `ignore` is case-insensitive. -// TODO: make in place to save memory. -func (r *Replacer) RemoveRule(ignore []string) { - newWords := make([]string, 0, len(r.Replacements)) - for i := 0; i < len(r.Replacements); i += 2 { - if inArray(ignore, r.Replacements[i]) { - continue - } - newWords = append(newWords, r.Replacements[i:i+2]...) - } - r.engine = nil - r.Replacements = newWords -} - -// AddRuleList appends new rules. -// Input is in the same form as Strings.Replacer: [ old1, new1, old2, new2, ....] -// Note: does not check for duplicates. -func (r *Replacer) AddRuleList(additions []string) { - r.engine = nil - r.Replacements = append(r.Replacements, additions...) -} - -// Compile compiles the rules. -// Required before using the Replace functions. -func (r *Replacer) Compile() { - r.corrected = make(map[string]string, len(r.Replacements)/2) - for i := 0; i < len(r.Replacements); i += 2 { - r.corrected[r.Replacements[i]] = r.Replacements[i+1] - } - r.engine = NewStringReplacer(r.Replacements...) -} - -/* -line1 and line2 are different -extract words from each line1 - -replace word -> newword -if word == new-word - - continue - -if new-word in list of replacements - - continue - -new word not original, and not in list of replacements some substring got mixed up. UNdo. -*/ -func (r *Replacer) recheckLine(s string, lineNum int, buf io.Writer, next func(Diff)) { - first := 0 - redacted := RemoveNotWords(s) - - idx := wordRegexp.FindAllStringIndex(redacted, -1) - for _, ab := range idx { - word := s[ab[0]:ab[1]] - newword := r.engine.Replace(word) - if newword == word { - // no replacement done - continue - } - - // ignore camelCase words - // https://github.com/client9/misspell/issues/113 - if CaseStyle(word) == CaseUnknown { - continue - } - - if StringEqualFold(r.corrected[strings.ToLower(word)], newword) { - // word got corrected into something we know - io.WriteString(buf, s[first:ab[0]]) - io.WriteString(buf, newword) - first = ab[1] - next(Diff{ - FullLine: s, - Line: lineNum, - Original: word, - Corrected: newword, - Column: ab[0], - }) - continue - } - // Word got corrected into something unknown. Ignore it - } - io.WriteString(buf, s[first:]) -} - -// ReplaceGo is a specialized routine for correcting Golang source files. -// Currently only checks comments, not identifiers for spelling. -func (r *Replacer) ReplaceGo(input string) (string, []Diff) { - var s scanner.Scanner - s.Init(strings.NewReader(input)) - s.Mode = scanner.ScanIdents | scanner.ScanFloats | scanner.ScanChars | scanner.ScanStrings | scanner.ScanRawStrings | scanner.ScanComments - lastPos := 0 - output := "" -Loop: - for { - switch s.Scan() { - case scanner.Comment: - origComment := s.TokenText() - newComment := r.engine.Replace(origComment) - - if origComment != newComment { - // s.Pos().Offset is the end of the current token - // subtract len(origComment) to get the start of the token - offset := s.Pos().Offset - output = output + input[lastPos:offset-len(origComment)] + newComment - lastPos = offset - } - case scanner.EOF: - break Loop - } - } - - if lastPos == 0 { - // no changes, no copies - return input, nil - } - if lastPos < len(input) { - output += input[lastPos:] - } - diffs := make([]Diff, 0, 8) - buf := bytes.NewBuffer(make([]byte, 0, max(len(input), len(output))+100)) - // faster that making a bytes.Buffer and bufio.ReadString - outlines := strings.SplitAfter(output, "\n") - inlines := strings.SplitAfter(input, "\n") - for i := 0; i < len(inlines); i++ { - if inlines[i] == outlines[i] { - buf.WriteString(outlines[i]) - continue - } - r.recheckLine(inlines[i], i+1, buf, func(d Diff) { - diffs = append(diffs, d) - }) - } - - return buf.String(), diffs -} - -// Replace is corrects misspellings in input, returning corrected version along with a list of diffs. -func (r *Replacer) Replace(input string) (string, []Diff) { - output := r.engine.Replace(input) - if input == output { - return input, nil - } - diffs := make([]Diff, 0, 8) - buf := bytes.NewBuffer(make([]byte, 0, max(len(input), len(output))+100)) - // faster that making a bytes.Buffer and bufio.ReadString - outlines := strings.SplitAfter(output, "\n") - inlines := strings.SplitAfter(input, "\n") - for i := 0; i < len(inlines); i++ { - if inlines[i] == outlines[i] { - buf.WriteString(outlines[i]) - continue - } - r.recheckLine(inlines[i], i+1, buf, func(d Diff) { - diffs = append(diffs, d) - }) - } - - return buf.String(), diffs -} - -// ReplaceReader applies spelling corrections to a reader stream. -// Diffs are emitted through a callback. -func (r *Replacer) ReplaceReader(raw io.Reader, w io.Writer, next func(Diff)) error { - var ( - err error - line string - lineNum int - ) - reader := bufio.NewReader(raw) - for err == nil { - lineNum++ - line, err = reader.ReadString('\n') - - // if it's EOF, then line has the last line - // don't like the check of err here and - // in for loop - if err != nil && err != io.EOF { - return err - } - // easily 5x faster than regexp+map - if line == r.engine.Replace(line) { - io.WriteString(w, line) - continue - } - // but it can be inaccurate, so we need to double-check - r.recheckLine(line, lineNum, w, next) - } - return nil -} diff --git a/vendor/github.com/golangci/misspell/stringreplacer.go b/vendor/github.com/golangci/misspell/stringreplacer.go deleted file mode 100644 index 73ca9a56a..000000000 --- a/vendor/github.com/golangci/misspell/stringreplacer.go +++ /dev/null @@ -1,333 +0,0 @@ -// Copyright 2011 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package misspell - -import ( - "io" - "strings" -) - -// StringReplacer replaces a list of strings with replacements. -// It is safe for concurrent use by multiple goroutines. -type StringReplacer struct { - r replacer -} - -// replacer is the interface that a replacement algorithm needs to implement. -type replacer interface { - Replace(s string) string - WriteString(w io.Writer, s string) (n int, err error) -} - -// NewStringReplacer returns a new Replacer from a list of old, new string pairs. -// Replacements are performed in order, without overlapping matches. -func NewStringReplacer(oldnew ...string) *StringReplacer { - if len(oldnew)%2 == 1 { - panic("strings.NewReplacer: odd argument count") - } - - return &StringReplacer{r: makeGenericReplacer(oldnew)} -} - -// Replace returns a copy of s with all replacements performed. -func (r *StringReplacer) Replace(s string) string { - return r.r.Replace(s) -} - -// WriteString writes s to w with all replacements performed. -func (r *StringReplacer) WriteString(w io.Writer, s string) (int, error) { - return r.r.WriteString(w, s) -} - -// trieNode is a node in a lookup trie for prioritized key/value pairs. Keys -// and values may be empty. For example, the trie containing keys "ax", "ay", -// "bcbc", "x" and "xy" could have eight nodes: -// -// n0 - -// n1 a- -// n2 .x+ -// n3 .y+ -// n4 b- -// n5 .cbc+ -// n6 x+ -// n7 .y+ -// -// n0 is the root node, and its children are n1, n4 and n6; n1's children are -// n2 and n3; n4's child is n5; n6's child is n7. Nodes n0, n1 and n4 (marked -// with a trailing "-") are partial keys, and nodes n2, n3, n5, n6 and n7 -// (marked with a trailing "+") are complete keys. -type trieNode struct { - // value is the value of the trie node's key/value pair. It is empty if - // this node is not a complete key. - value string - // priority is the priority (higher is more important) of the trie node's - // key/value pair; keys are not necessarily matched shortest- or longest- - // first. Priority is positive if this node is a complete key, and zero - // otherwise. In the example above, positive/zero priorities are marked - // with a trailing "+" or "-". - priority int - - // A trie node may have zero, one or more child nodes: - // * if the remaining fields are zero, there are no children. - // * if prefix and next are non-zero, there is one child in next. - // * if table is non-zero, it defines all the children. - // - // Prefixes are preferred over tables when there is one child, but the - // root node always uses a table for lookup efficiency. - - // prefix is the difference in keys between this trie node and the next. - // In the example above, node n4 has prefix "cbc" and n4's next node is n5. - // Node n5 has no children and so has zero prefix, next and table fields. - prefix string - next *trieNode - - // table is a lookup table indexed by the next byte in the key, after - // remapping that byte through genericReplacer.mapping to create a dense - // index. In the example above, the keys only use 'a', 'b', 'c', 'x' and - // 'y', which remap to 0, 1, 2, 3 and 4. All other bytes remap to 5, and - // genericReplacer.tableSize will be 5. Node n0's table will be - // []*trieNode{ 0:n1, 1:n4, 3:n6 }, where the 0, 1 and 3 are the remapped - // 'a', 'b' and 'x'. - table []*trieNode -} - -func (t *trieNode) add(key, val string, priority int, r *genericReplacer) { - if key == "" { - if t.priority == 0 { - t.value = val - t.priority = priority - } - return - } - - //nolint:nestif // TODO(ldez) must be fixed. - if t.prefix != "" { - // Need to split the prefix among multiple nodes. - var n int // length of the longest common prefix - for ; n < len(t.prefix) && n < len(key); n++ { - if t.prefix[n] != key[n] { - break - } - } - if n == len(t.prefix) { - t.next.add(key[n:], val, priority, r) - } else if n == 0 { - // First byte differs, start a new lookup table here. Looking up - // what is currently t.prefix[0] will lead to prefixNode, and - // looking up key[0] will lead to keyNode. - var prefixNode *trieNode - if len(t.prefix) == 1 { - prefixNode = t.next - } else { - prefixNode = &trieNode{ - prefix: t.prefix[1:], - next: t.next, - } - } - keyNode := new(trieNode) - t.table = make([]*trieNode, r.tableSize) - t.table[r.mapping[t.prefix[0]]] = prefixNode - t.table[r.mapping[key[0]]] = keyNode - t.prefix = "" - t.next = nil - keyNode.add(key[1:], val, priority, r) - } else { - // Insert new node after the common section of the prefix. - next := &trieNode{ - prefix: t.prefix[n:], - next: t.next, - } - t.prefix = t.prefix[:n] - t.next = next - next.add(key[n:], val, priority, r) - } - } else if t.table != nil { - // Insert into existing table. - m := r.mapping[key[0]] - if t.table[m] == nil { - t.table[m] = new(trieNode) - } - t.table[m].add(key[1:], val, priority, r) - } else { - t.prefix = key - t.next = new(trieNode) - t.next.add("", val, priority, r) - } -} - -// genericReplacer is the fully generic algorithm. -// It's used as a fallback when nothing faster can be used. -type genericReplacer struct { - root trieNode - // tableSize is the size of a trie node's lookup table. It is the number - // of unique key bytes. - tableSize int - // mapping maps from key bytes to a dense index for trieNode.table. - mapping [256]byte -} - -func makeGenericReplacer(oldnew []string) *genericReplacer { - r := new(genericReplacer) - // Find each byte used, then assign them each an index. - for i := 0; i < len(oldnew); i += 2 { - key := strings.ToLower(oldnew[i]) - for j := 0; j < len(key); j++ { - r.mapping[key[j]] = 1 - } - } - - for _, b := range r.mapping { - r.tableSize += int(b) - } - - var index byte - for i, b := range r.mapping { - if b == 0 { - r.mapping[i] = byte(r.tableSize) - } else { - r.mapping[i] = index - index++ - } - } - // Ensure root node uses a lookup table (for performance). - r.root.table = make([]*trieNode, r.tableSize) - - for i := 0; i < len(oldnew); i += 2 { - r.root.add(strings.ToLower(oldnew[i]), oldnew[i+1], len(oldnew)-i, r) - } - return r -} - -func (r *genericReplacer) lookup(s string, ignoreRoot bool) (val string, keylen int, found bool) { - // Iterate down the trie to the end, and grab the value and keylen with - // the highest priority. - bestPriority := 0 - node := &r.root - n := 0 - for node != nil { - if node.priority > bestPriority && !(ignoreRoot && node == &r.root) { - bestPriority = node.priority - val = node.value - keylen = n - found = true - } - - if s == "" { - break - } - if node.table != nil { - index := r.mapping[ByteToLower(s[0])] - if int(index) == r.tableSize { - break - } - node = node.table[index] - s = s[1:] - n++ - } else if node.prefix != "" && StringHasPrefixFold(s, node.prefix) { - n += len(node.prefix) - s = s[len(node.prefix):] - node = node.next - } else { - break - } - } - return -} - -func (r *genericReplacer) Replace(s string) string { - buf := make(appendSliceWriter, 0, len(s)) - r.WriteString(&buf, s) - return string(buf) -} - -//nolint:gocognit // TODO(ldez) must be fixed. -func (r *genericReplacer) WriteString(w io.Writer, s string) (n int, err error) { - sw := getStringWriter(w) - var last, wn int - var prevMatchEmpty bool - for i := 0; i <= len(s); { - // Fast path: s[i] is not a prefix of any pattern. - if i != len(s) && r.root.priority == 0 { - index := int(r.mapping[ByteToLower(s[i])]) - if index == r.tableSize || r.root.table[index] == nil { - i++ - continue - } - } - - // Ignore the empty match iff the previous loop found the empty match. - val, keylen, match := r.lookup(s[i:], prevMatchEmpty) - prevMatchEmpty = match && keylen == 0 - if match { - orig := s[i : i+keylen] - switch CaseStyle(orig) { - case CaseUnknown: - // pretend we didn't match - // i++ - // continue - case CaseUpper: - val = strings.ToUpper(val) - case CaseLower: - val = strings.ToLower(val) - case CaseTitle: - if len(val) < 2 { - val = strings.ToUpper(val) - } else { - val = strings.ToUpper(val[:1]) + strings.ToLower(val[1:]) - } - } - wn, err = sw.WriteString(s[last:i]) - n += wn - if err != nil { - return - } - // debug helper: log.Printf("%d: Going to correct %q with %q", i, s[i:i+keylen], val) - wn, err = sw.WriteString(val) - n += wn - if err != nil { - return - } - i += keylen - last = i - continue - } - i++ - } - if last != len(s) { - wn, err = sw.WriteString(s[last:]) - n += wn - } - return -} - -type appendSliceWriter []byte - -// Write writes to the buffer to satisfy io.Writer. -func (w *appendSliceWriter) Write(p []byte) (int, error) { - *w = append(*w, p...) - return len(p), nil -} - -// WriteString writes to the buffer without string->[]byte->string allocations. -func (w *appendSliceWriter) WriteString(s string) (int, error) { - *w = append(*w, s...) - return len(s), nil -} - -type stringWriter struct { - w io.Writer -} - -func (w stringWriter) WriteString(s string) (int, error) { - return w.w.Write([]byte(s)) -} - -func getStringWriter(w io.Writer) io.StringWriter { - sw, ok := w.(io.StringWriter) - if !ok { - sw = stringWriter{w} - } - return sw -} diff --git a/vendor/github.com/golangci/misspell/stringreplacer_test.gox b/vendor/github.com/golangci/misspell/stringreplacer_test.gox deleted file mode 100644 index 70da997f6..000000000 --- a/vendor/github.com/golangci/misspell/stringreplacer_test.gox +++ /dev/null @@ -1,421 +0,0 @@ -// Copyright 2009 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package misspell_test - -import ( - "bytes" - "fmt" - "strings" - "testing" - - . "github.com/client9/misspell" -) - -var htmlEscaper = NewStringReplacer( - "&", "&", - "<", "<", - ">", ">", - `"`, """, - "'", "'", -) - -var htmlUnescaper = NewStringReplacer( - "&", "&", - "<", "<", - ">", ">", - """, `"`, - "'", "'", -) - -// The http package's old HTML escaping function. -func oldHTMLEscape(s string) string { - s = strings.Replace(s, "&", "&", -1) - s = strings.Replace(s, "<", "<", -1) - s = strings.Replace(s, ">", ">", -1) - s = strings.Replace(s, `"`, """, -1) - s = strings.Replace(s, "'", "'", -1) - return s -} - -var capitalLetters = NewStringReplacer("a", "A", "b", "B") - -// TestReplacer tests the replacer implementations. -func TestReplacer(t *testing.T) { - type testCase struct { - r *StringReplacer - in, out string - } - var testCases []testCase - - // str converts 0xff to "\xff". This isn't just string(b) since that converts to UTF-8. - str := func(b byte) string { - return string([]byte{b}) - } - var s []string - - // inc maps "\x00"->"\x01", ..., "a"->"b", "b"->"c", ..., "\xff"->"\x00". - for i := 0; i < 256; i++ { - s = append(s, str(byte(i)), str(byte(i+1))) - } - inc := NewStringReplacer(s...) - - // Test cases with 1-byte old strings, 1-byte new strings. - testCases = append(testCases, - testCase{capitalLetters, "brad", "BrAd"}, - testCase{capitalLetters, strings.Repeat("a", (32<<10)+123), strings.Repeat("A", (32<<10)+123)}, - testCase{capitalLetters, "", ""}, - - testCase{inc, "brad", "csbe"}, - testCase{inc, "\x00\xff", "\x01\x00"}, - testCase{inc, "", ""}, - - testCase{NewStringReplacer("a", "1", "a", "2"), "brad", "br1d"}, - ) - - // repeat maps "a"->"a", "b"->"bb", "c"->"ccc", ... - s = nil - for i := 0; i < 256; i++ { - n := i + 1 - 'a' - if n < 1 { - n = 1 - } - s = append(s, str(byte(i)), strings.Repeat(str(byte(i)), n)) - } - repeat := NewStringReplacer(s...) - - // Test cases with 1-byte old strings, variable length new strings. - testCases = append(testCases, - testCase{htmlEscaper, "No changes", "No changes"}, - testCase{htmlEscaper, "I <3 escaping & stuff", "I <3 escaping & stuff"}, - testCase{htmlEscaper, "&&&", "&&&"}, - testCase{htmlEscaper, "", ""}, - - testCase{repeat, "brad", "bbrrrrrrrrrrrrrrrrrradddd"}, - testCase{repeat, "abba", "abbbba"}, - testCase{repeat, "", ""}, - - testCase{NewStringReplacer("a", "11", "a", "22"), "brad", "br11d"}, - ) - - // The remaining test cases have variable length old strings. - - testCases = append(testCases, - testCase{htmlUnescaper, "&amp;", "&"}, - testCase{htmlUnescaper, "<b>HTML's neat</b>", "HTML's neat"}, - testCase{htmlUnescaper, "", ""}, - - testCase{NewStringReplacer("a", "1", "a", "2", "xxx", "xxx"), "brad", "br1d"}, - - testCase{NewStringReplacer("a", "1", "aa", "2", "aaa", "3"), "aaaa", "1111"}, - - testCase{NewStringReplacer("aaa", "3", "aa", "2", "a", "1"), "aaaa", "31"}, - ) - - // gen1 has multiple old strings of variable length. There is no - // overall non-empty common prefix, but some pairwise common prefixes. - gen1 := NewStringReplacer( - "aaa", "3[aaa]", - "aa", "2[aa]", - "a", "1[a]", - "i", "i", - "longerst", "most long", - "longer", "medium", - "long", "short", - "xx", "xx", - "x", "X", - "X", "Y", - "Y", "Z", - ) - testCases = append(testCases, - testCase{gen1, "fooaaabar", "foo3[aaa]b1[a]r"}, - testCase{gen1, "long, longerst, longer", "short, most long, medium"}, - testCase{gen1, "xxxxx", "xxxxX"}, - testCase{gen1, "XiX", "YiY"}, - testCase{gen1, "", ""}, - ) - - // gen2 has multiple old strings with no pairwise common prefix. - gen2 := NewStringReplacer( - "roses", "red", - "violets", "blue", - "sugar", "sweet", - ) - testCases = append(testCases, - testCase{gen2, "roses are red, violets are blue...", "red are red, blue are blue..."}, - testCase{gen2, "", ""}, - ) - - // gen3 has multiple old strings with an overall common prefix. - gen3 := NewStringReplacer( - "abracadabra", "poof", - "abracadabrakazam", "splat", - "abraham", "lincoln", - "abrasion", "scrape", - "abraham", "isaac", - ) - testCases = append(testCases, - testCase{gen3, "abracadabrakazam abraham", "poofkazam lincoln"}, - testCase{gen3, "abrasion abracad", "scrape abracad"}, - testCase{gen3, "abba abram abrasive", "abba abram abrasive"}, - testCase{gen3, "", ""}, - ) - - // foo{1,2,3,4} have multiple old strings with an overall common prefix - // and 1- or 2- byte extensions from the common prefix. - foo1 := NewStringReplacer( - "foo1", "A", - "foo2", "B", - "foo3", "C", - ) - foo2 := NewStringReplacer( - "foo1", "A", - "foo2", "B", - "foo31", "C", - "foo32", "D", - ) - foo3 := NewStringReplacer( - "foo11", "A", - "foo12", "B", - "foo31", "C", - "foo32", "D", - ) - foo4 := NewStringReplacer( - "foo12", "B", - "foo32", "D", - ) - testCases = append(testCases, - testCase{foo1, "fofoofoo12foo32oo", "fofooA2C2oo"}, - testCase{foo1, "", ""}, - - testCase{foo2, "fofoofoo12foo32oo", "fofooA2Doo"}, - testCase{foo2, "", ""}, - - testCase{foo3, "fofoofoo12foo32oo", "fofooBDoo"}, - testCase{foo3, "", ""}, - - testCase{foo4, "fofoofoo12foo32oo", "fofooBDoo"}, - testCase{foo4, "", ""}, - ) - - // genAll maps "\x00\x01\x02...\xfe\xff" to "[all]", amongst other things. - allBytes := make([]byte, 256) - for i := range allBytes { - allBytes[i] = byte(i) - } - allString := string(allBytes) - genAll := NewStringReplacer( - allString, "[all]", - "\xff", "[ff]", - "\x00", "[00]", - ) - testCases = append(testCases, - testCase{genAll, allString, "[all]"}, - testCase{genAll, "a\xff" + allString + "\x00", "a[ff][all][00]"}, - testCase{genAll, "", ""}, - ) - - // Test cases with empty old strings. - - blankToX1 := NewStringReplacer("", "X") - blankToX2 := NewStringReplacer("", "X", "", "") - blankHighPriority := NewStringReplacer("", "X", "o", "O") - blankLowPriority := NewStringReplacer("o", "O", "", "X") - blankNoOp1 := NewStringReplacer("", "") - blankNoOp2 := NewStringReplacer("", "", "", "A") - blankFoo := NewStringReplacer("", "X", "foobar", "R", "foobaz", "Z") - testCases = append(testCases, - testCase{blankToX1, "foo", "XfXoXoX"}, - testCase{blankToX1, "", "X"}, - - testCase{blankToX2, "foo", "XfXoXoX"}, - testCase{blankToX2, "", "X"}, - - testCase{blankHighPriority, "oo", "XOXOX"}, - testCase{blankHighPriority, "ii", "XiXiX"}, - testCase{blankHighPriority, "oiio", "XOXiXiXOX"}, - testCase{blankHighPriority, "iooi", "XiXOXOXiX"}, - testCase{blankHighPriority, "", "X"}, - - testCase{blankLowPriority, "oo", "OOX"}, - testCase{blankLowPriority, "ii", "XiXiX"}, - testCase{blankLowPriority, "oiio", "OXiXiOX"}, - testCase{blankLowPriority, "iooi", "XiOOXiX"}, - testCase{blankLowPriority, "", "X"}, - - testCase{blankNoOp1, "foo", "foo"}, - testCase{blankNoOp1, "", ""}, - - testCase{blankNoOp2, "foo", "foo"}, - testCase{blankNoOp2, "", ""}, - - testCase{blankFoo, "foobarfoobaz", "XRXZX"}, - testCase{blankFoo, "foobar-foobaz", "XRX-XZX"}, - testCase{blankFoo, "", "X"}, - ) - - // single string replacer - - abcMatcher := NewStringReplacer("abc", "[match]") - - testCases = append(testCases, - testCase{abcMatcher, "", ""}, - testCase{abcMatcher, "ab", "ab"}, - testCase{abcMatcher, "abc", "[match]"}, - testCase{abcMatcher, "abcd", "[match]d"}, - testCase{abcMatcher, "cabcabcdabca", "c[match][match]d[match]a"}, - ) - - // Issue 6659 cases (more single string replacer) - - noHello := NewStringReplacer("Hello", "") - testCases = append(testCases, - testCase{noHello, "Hello", ""}, - testCase{noHello, "Hellox", "x"}, - testCase{noHello, "xHello", "x"}, - testCase{noHello, "xHellox", "xx"}, - ) - - // No-arg test cases. - - nop := NewStringReplacer() - testCases = append(testCases, - testCase{nop, "abc", "abc"}, - testCase{nop, "", ""}, - ) - - // Run the test cases. - - for i, tc := range testCases { - if s := tc.r.Replace(tc.in); s != tc.out { - t.Errorf("%d. strings.Replace(%q) = %q, want %q", i, tc.in, s, tc.out) - } - var buf bytes.Buffer - n, err := tc.r.WriteString(&buf, tc.in) - if err != nil { - t.Errorf("%d. WriteString: %v", i, err) - continue - } - got := buf.String() - if got != tc.out { - t.Errorf("%d. WriteString(%q) wrote %q, want %q", i, tc.in, got, tc.out) - continue - } - if n != len(tc.out) { - t.Errorf("%d. WriteString(%q) wrote correct string but reported %d bytes; want %d (%q)", - i, tc.in, n, len(tc.out), tc.out) - } - } -} - -type errWriter struct{} - -func (errWriter) Write(p []byte) (n int, err error) { - return 0, fmt.Errorf("unwritable") -} - -func BenchmarkGenericNoMatch(b *testing.B) { - str := strings.Repeat("A", 100) + strings.Repeat("B", 100) - generic := NewStringReplacer("a", "A", "b", "B", "12", "123") // varying lengths forces generic - for i := 0; i < b.N; i++ { - generic.Replace(str) - } -} - -func BenchmarkGenericMatch1(b *testing.B) { - str := strings.Repeat("a", 100) + strings.Repeat("b", 100) - generic := NewStringReplacer("a", "A", "b", "B", "12", "123") - for i := 0; i < b.N; i++ { - generic.Replace(str) - } -} - -func BenchmarkGenericMatch2(b *testing.B) { - str := strings.Repeat("It's <b>HTML</b>!", 100) - for i := 0; i < b.N; i++ { - htmlUnescaper.Replace(str) - } -} - -func benchmarkSingleString(b *testing.B, pattern, text string) { - r := NewStringReplacer(pattern, "[match]") - b.SetBytes(int64(len(text))) - b.ResetTimer() - for i := 0; i < b.N; i++ { - r.Replace(text) - } -} - -func BenchmarkSingleMaxSkipping(b *testing.B) { - benchmarkSingleString(b, strings.Repeat("b", 25), strings.Repeat("a", 10000)) -} - -func BenchmarkSingleLongSuffixFail(b *testing.B) { - benchmarkSingleString(b, "b"+strings.Repeat("a", 500), strings.Repeat("a", 1002)) -} - -func BenchmarkSingleMatch(b *testing.B) { - benchmarkSingleString(b, "abcdef", strings.Repeat("abcdefghijklmno", 1000)) -} - -func BenchmarkByteByteNoMatch(b *testing.B) { - str := strings.Repeat("A", 100) + strings.Repeat("B", 100) - for i := 0; i < b.N; i++ { - capitalLetters.Replace(str) - } -} - -func BenchmarkByteByteMatch(b *testing.B) { - str := strings.Repeat("a", 100) + strings.Repeat("b", 100) - for i := 0; i < b.N; i++ { - capitalLetters.Replace(str) - } -} - -func BenchmarkByteStringMatch(b *testing.B) { - str := "<" + strings.Repeat("a", 99) + strings.Repeat("b", 99) + ">" - for i := 0; i < b.N; i++ { - htmlEscaper.Replace(str) - } -} - -func BenchmarkHTMLEscapeNew(b *testing.B) { - str := "I <3 to escape HTML & other text too." - for i := 0; i < b.N; i++ { - htmlEscaper.Replace(str) - } -} - -func BenchmarkHTMLEscapeOld(b *testing.B) { - str := "I <3 to escape HTML & other text too." - for i := 0; i < b.N; i++ { - oldHTMLEscape(str) - } -} - -func BenchmarkByteStringReplacerWriteString(b *testing.B) { - str := strings.Repeat("I <3 to escape HTML & other text too.", 100) - buf := new(bytes.Buffer) - for i := 0; i < b.N; i++ { - htmlEscaper.WriteString(buf, str) - buf.Reset() - } -} - -func BenchmarkByteReplacerWriteString(b *testing.B) { - str := strings.Repeat("abcdefghijklmnopqrstuvwxyz", 100) - buf := new(bytes.Buffer) - for i := 0; i < b.N; i++ { - capitalLetters.WriteString(buf, str) - buf.Reset() - } -} - -// BenchmarkByteByteReplaces compares byteByteImpl against multiple Replaces. -func BenchmarkByteByteReplaces(b *testing.B) { - str := strings.Repeat("a", 100) + strings.Repeat("b", 100) - for i := 0; i < b.N; i++ { - strings.Replace(strings.Replace(str, "a", "A", -1), "b", "B", -1) - } -} diff --git a/vendor/github.com/golangci/misspell/url.go b/vendor/github.com/golangci/misspell/url.go deleted file mode 100644 index 203b91a79..000000000 --- a/vendor/github.com/golangci/misspell/url.go +++ /dev/null @@ -1,18 +0,0 @@ -package misspell - -import ( - "regexp" -) - -// Regexp for URL https://mathiasbynens.be/demo/url-regex -// -// original @imme_emosol (54 chars) has trouble with dashes in hostname -// @(https?|ftp)://(-\.)?([^\s/?\.#-]+\.?)+(/[^\s]*)?$@iS. -var reURL = regexp.MustCompile(`(?i)(https?|ftp)://(-\.)?([^\s/?.#]+\.?)+(/\S*)?`) - -// StripURL attemps to replace URLs with blank spaces, e.g. -// -// "xxx http://foo.com/ yyy -> "xxx yyyy". -func StripURL(s string) string { - return reURL.ReplaceAllStringFunc(s, replaceWithBlanks) -} diff --git a/vendor/github.com/golangci/misspell/words.go b/vendor/github.com/golangci/misspell/words.go deleted file mode 100644 index 1603d87e6..000000000 --- a/vendor/github.com/golangci/misspell/words.go +++ /dev/null @@ -1,31194 +0,0 @@ -package misspell - -// Code generated automatically. DO NOT EDIT. - -// DictMain is the main rule set, not including locale-specific spellings -var DictMain = []string{ - "differentiatiations", "differentiations", - "disproportionaltely", "disproportionately", - "oversimplificiation", "oversimplification", - "transcendentational", "transcendental", - "anthromorphization", "anthropomorphization", - "disporportionately", "disproportionately", - "dispraportionately", "disproportionately", - "disproportianately", "disproportionately", - "disproportionatley", "disproportionately", - "disproprotionately", "disproportionately", - "fundamentalistisch", "fundamentalists", - "fundamentalistiska", "fundamentalists", - "fundamentalistiske", "fundamentalists", - "fundamentalistiskt", "fundamentalists", - "histocompatability", "histocompatibility", - "microtransacations", "microtransactions", - "microtransacciones", "microtransactions", - "microtransactional", "microtransactions", - "microtransactioned", "microtransactions", - "misunderstandingly", "misunderstandings", - "oversemplification", "oversimplification", - "oversimplifacation", "oversimplification", - "oversimplificaiton", "oversimplification", - "oversimplificating", "oversimplification", - "oversimplyfication", "oversimplification", - "cardiovasculaires", "cardiovascular", - "certificationkits", "certifications", - "counterporductive", "counterproductive", - "coutnerproductive", "counterproductive", - "disporportionatly", "disproportionately", - "disproportiantely", "disproportionately", - "disproportionatly", "disproportionately", - "disproportionnate", "disproportionate", - "disrepresentation", "misrepresentation", - "fundamentalistisk", "fundamentalists", - "incompatabilities", "incompatibilities", - "inconsequentional", "inconsequential", - "indistinguishible", "indistinguishable", - "indistingusihable", "indistinguishable", - "indistinquishable", "indistinguishable", - "indistuingishable", "indistinguishable", - "instatutionalized", "institutionalized", - "institucionalized", "institutionalized", - "institutionilized", "institutionalized", - "instutitionalized", "institutionalized", - "instututionalized", "institutionalized", - "interchangeablely", "interchangeably", - "interchangeablity", "interchangeably", - "intercontinential", "intercontinental", - "micortransactions", "microtransactions", - "microstansactions", "microtransactions", - "microtramsactions", "microtransactions", - "microtranasctions", "microtransactions", - "microtransacitons", "microtransactions", - "microtransacrions", "microtransactions", - "microtransactioms", "microtransactions", - "microtransactiosn", "microtransactions", - "microtranscations", "microtransactions", - "microtrasnactions", "microtransactions", - "mircotransactions", "microtransactions", - "misinterpretating", "misinterpreting", - "misrepresantation", "misrepresentation", - "misrepresentaiton", "misrepresentation", - "misrepresentating", "misrepresenting", - "misunderstantings", "misunderstandings", - "mocrotransactions", "microtransactions", - "oversimplifaction", "oversimplification", - "oversimplificaton", "oversimplification", - "oversimplifiction", "oversimplification", - "responsibillities", "responsibilities", - "unconstitutionnal", "unconstitutional", - "accomplishements", "accomplishments", - "admininistrative", "administrative", - "antidepresssants", "antidepressants", - "architechturally", "architecturally", - "cardiovasculaire", "cardiovascular", - "charactarization", "characterization", - "characterazation", "characterization", - "characterisitics", "characteristics", - "characteristsics", "characteristic", - "characterizarion", "characterization", - "charecterization", "characterization", - "charicterization", "characterization", - "circumstantional", "circumstantial", - "conversationable", "conversational", - "counterprodutive", "counterproductive", - "demonstrationens", "demonstrations", - "deterministische", "deterministic", - "differenciations", "differentiation", - "differentiantion", "differentiation", - "differentiatiors", "differentiation", - "differentitation", "differentiation", - "disperportionate", "disproportionate", - "disporportionate", "disproportionate", - "dispraportionate", "disproportionate", - "disproportianate", "disproportionate", - "disproportionaly", "disproportionately", - "disproprotionate", "disproportionate", - "electromagnectic", "electromagnetic", - "enviornmentalist", "environmentalist", - "environmentality", "environmentally", - "extraordinairily", "extraordinarily", - "extraordinarilly", "extraordinary", - "extraterrestials", "extraterrestrials", - "fundamentalismos", "fundamentalists", - "fundamentalismus", "fundamentalists", - "fundamentalistas", "fundamentalists", - "fundamentalisten", "fundamentalists", - "fundamentalister", "fundamentalists", - "imcomprehensible", "incomprehensible", - "immunosupressant", "immunosuppressant", - "imperfectionists", "imperfections", - "implementaciones", "implementations", - "implementationen", "implementations", - "implementationer", "implementations", - "inappropriatelly", "inappropriately", - "incompatablities", "incompatibilities", - "incompatiblities", "incompatibilities", - "incomprehencible", "incomprehensible", - "incomprehendible", "incomprehensible", - "incomprehenisble", "incomprehensible", - "incomprehensable", "incomprehensible", - "incomprehinsible", "incomprehensible", - "incomprihensible", "incomprehensible", - "inconprehensible", "incomprehensible", - "inconsistentcies", "inconsistencies", - "inconstitutional", "unconstitutional", - "incrompehensible", "incomprehensible", - "indistinguisable", "indistinguishable", - "institutionlized", "institutionalized", - "intellectualiser", "intellectuals", - "intellectualisme", "intellectuals", - "interchangeabley", "interchangeably", - "internationnally", "internationally", - "interpretaciones", "interpretations", - "interpretationen", "interpretations", - "manoeuverability", "maneuverability", - "massachusettians", "massachusetts", - "microtransacions", "microtransactions", - "microtransacting", "microtransactions", - "microtransactios", "microtransactions", - "microtransactons", "microtransactions", - "microtransations", "microtransactions", - "microtranscation", "microtransactions", - "mircotransaction", "microtransactions", - "miscommunciation", "miscommunication", - "miscommunicaiton", "miscommunication", - "miscomunnication", "miscommunication", - "miscummunication", "miscommunication", - "misinterpretated", "misinterpreted", - "misinterpretions", "misinterpreting", - "misinterpretting", "misinterpreting", - "misproportionate", "disproportionate", - "misrepresenation", "misrepresentation", - "misrepresentaion", "misrepresentation", - "misrepresentated", "misrepresented", - "misrepresentatie", "misrepresentation", - "misrepresentativ", "misrepresentation", - "misubderstanding", "misunderstandings", - "misudnerstanding", "misunderstandings", - "misundarstanding", "misunderstandings", - "misunderatanding", "misunderstandings", - "misunderdtanding", "misunderstandings", - "misundersatnding", "misunderstandings", - "misundersranding", "misunderstandings", - "misunderstadings", "misunderstandings", - "misunderstadning", "misunderstandings", - "misunderstamding", "misunderstandings", - "misunderstandigs", "misunderstandings", - "misunderstandimg", "misunderstandings", - "misunderstandind", "misunderstandings", - "misunderstanging", "misunderstandings", - "misunderstanidng", "misunderstandings", - "misunderstanings", "misunderstandings", - "misunderstansing", "misunderstandings", - "misunderstanting", "misunderstandings", - "misunderstending", "misunderstandings", - "misunderstnading", "misunderstandings", - "misunderstsnding", "misunderstandings", - "misunderstunding", "misunderstandings", - "misundertsanding", "misunderstandings", - "misundrestanding", "misunderstandings", - "misunterstanding", "misunderstandings", - "nationalistische", "nationalistic", - "nationalististic", "nationalistic", - "neconstitutional", "unconstitutional", - "notwhithstanding", "notwithstanding", - "objectificiation", "objectification", - "organisationnels", "organisations", - "perpendiculaires", "perpendicular", - "phillosophically", "philosophically", - "preinitalization", "preinitialization", - "prescriptionists", "prescriptions", - "procrastinarting", "procrastinating", - "procrastinationg", "procrastinating", - "procrastinazione", "procrastination", - "professionalisim", "professionalism", - "professionalisme", "professionals", - "professionallism", "professionalism", - "professionnalism", "professionalism", - "programattically", "programmatically", - "proportionallity", "proportionally", - "reaponsibilities", "responsibilities", - "reinitalizations", "reinitializations", - "representaciones", "representations", - "representationen", "representations", - "representationer", "representations", - "repsonsibilities", "responsibilities", - "responcibilities", "responsibilities", - "responisbilities", "responsibilities", - "responsabilities", "responsibilities", - "responsebilities", "responsibilities", - "straightforeward", "straightforward", - "surrepetitiously", "surreptitiously", - "technologicially", "technologically", - "unconditionnally", "unconditionally", - "unconfortability", "discomfort", - "unconstititional", "unconstitutional", - "uncontrollablely", "uncontrollably", - "underestimateing", "underestimating", - "understandablely", "understandably", - "unintentionnally", "unintentionally", - "unsubstantianted", "unsubstantiated", - "unsubstantiative", "unsubstantiated", - "acclimitization", "acclimatization", - "accomplishemnts", "accomplishments", - "accountabillity", "accountability", - "acknolwedgement", "acknowledgement", - "acknoweldgement", "acknowledgement", - "acknowldegement", "acknowledgement", - "acknowlegdement", "acknowledgement", - "administratieve", "administrative", - "administratiors", "administrators", - "administrativne", "administrative", - "aforementionned", "aforementioned", - "anitdepressants", "antidepressants", - "antidepressents", "antidepressants", - "archetecturally", "architecturally", - "associationthis", "associations", - "authobiographic", "autobiographic", - "awknowledgement", "acknowledgement", - "bureaucratische", "bureaucratic", - "cardiovascualar", "cardiovascular", - "carnagie-mellon", "carnegie-mellon", - "carnigie-mellon", "carnegie-mellon", - "celebrationists", "celebrations", - "charactaristics", "characteristics", - "characterisitcs", "characteristics", - "characterisitic", "characteristic", - "characterizaton", "characterization", - "charactersistic", "characteristic", - "charactersitics", "characteristics", - "charactoristics", "characteristics", - "charecteristics", "characteristics", - "comfrontational", "confrontational", - "commuinications", "communications", - "compatabilities", "compatibilities", - "complimentarity", "complimentary", - "compositionwise", "compositions", - "confidenciality", "confidential", - "confidentuality", "confidential", - "confrentational", "confrontational", - "confrontacional", "confrontational", - "conglaturations", "congratulations", - "congradulations", "congratulations", - "congragulations", "congratulations", - "congratualtions", "congratulations", - "congraturations", "congratulations", - "consequentually", "consequently", - "constitutionnal", "constitutional", - "deinitalization", "deinitialization", - "denominationals", "denominations", - "destinationhash", "destinations", - "deterministisch", "deterministic", - "developmentwise", "developments", - "differantiation", "differentiation", - "differenciation", "differentiation", - "differientation", "differentiation", - "discriminatoire", "discriminate", - "discriminatorie", "discriminate", - "disproportiante", "disproportionate", - "disproportinate", "disproportionate", - "elecrtomagnetic", "electromagnetic", - "electormagnetic", "electromagnetic", - "electromagentic", "electromagnetic", - "electromagnatic", "electromagnetic", - "electromangetic", "electromagnetic", - "electromegnetic", "electromagnetic", - "electronagnetic", "electromagnetic", - "enivronmentally", "environmentally", - "entrepreneurers", "entrepreneurs", - "enviornmentally", "environmentally", - "enviromentalist", "environmentalist", - "environemntally", "environmentally", - "envrionmentally", "environmentally", - "evolutionarilly", "evolutionary", - "experementation", "experimentation", - "experimantation", "experimentation", - "experimentacion", "experimentation", - "experimentating", "experimentation", - "experimenterade", "experimented", - "experimintation", "experimentation", - "expirementation", "experimentation", - "extraodrinarily", "extraordinarily", - "extraordinairly", "extraordinarily", - "extraordinarely", "extraordinarily", - "extraordinaryly", "extraordinarily", - "extraterrestial", "extraterrestrial", - "extroardinarily", "extraordinarily", - "fondamentalists", "fundamentalists", - "fundamendalists", "fundamentalists", - "fundamentalisme", "fundamentals", - "fundamentalismo", "fundamentals", - "fundamentalista", "fundamentals", - "fundamentalisti", "fundamentals", - "fundamnetalists", "fundamentalists", - "fundemantalists", "fundamentalists", - "fundimentalists", "fundamentalists", - "fundumentalists", "fundamentalists", - "gongratulations", "congratulations", - "grammaticallity", "grammatically", - "gundamentalists", "fundamentalists", - "idiosynchracies", "idiosyncrasies", - "implementaitons", "implementations", - "implimentations", "implementations", - "inapporpriately", "inappropriately", - "inappropraitely", "inappropriately", - "inappropriatley", "inappropriately", - "incompatability", "incompatibility", - "incompetentence", "incompetence", - "incomprehensibe", "incomprehensible", - "incomprehesible", "incomprehensible", - "inconcequential", "inconsequential", - "inconcistencies", "inconsistencies", - "inconditionally", "unconditionally", - "inconsecuential", "inconsequential", - "inconsequantial", "inconsequential", - "inconsequencial", "inconsequential", - "inconsequentual", "inconsequential", - "inconsiquential", "inconsequential", - "inconsistancies", "inconsistencies", - "inconsistencias", "inconsistencies", - "inconsistensies", "inconsistencies", - "inconsistenties", "inconsistencies", - "independentisme", "independents", - "independentiste", "independents", - "independentness", "independents", - "inexperiencable", "inexperience", - "inplementations", "implementations", - "instantaneoulsy", "instantaneous", - "institutionella", "institutional", - "institutionnels", "institutions", - "instutionalized", "institutionalized", - "insubstantiated", "unsubstantiated", - "interchangabley", "interchangeably", - "interchangebale", "interchangeable", - "intercontinetal", "intercontinental", - "interpertations", "interpretations", - "interpratations", "interpretations", - "interpritations", "interpretations", - "intersectionals", "intersections", - "intrepretations", "interpretations", - "investigationes", "investigations", - "journalistische", "journalistic", - "libertarianisim", "libertarianism", - "libertarianisme", "libertarians", - "libertarianismo", "libertarians", - "libertarianists", "libertarians", - "libertariansism", "libertarianism", - "manisfestations", "manifestations", - "manouverability", "maneuverability", - "manufacturerers", "manufacturers", - "marshmallowiest", "marshmallows", - "marshmallowness", "marshmallows", - "microtransacton", "microtransactions", - "mininterpreting", "misinterpreting", - "miscommuniation", "miscommunication", - "miscommunicatie", "miscommunication", - "miscommuniction", "miscommunication", - "misinterperting", "misinterpreting", - "misinterprating", "misinterpreting", - "misinterprented", "misinterpret", - "misinterprested", "misinterpret", - "misinterpretion", "misinterpreting", - "misinterpretted", "misinterpreted", - "misinterpriting", "misinterpreting", - "misintrepreting", "misinterpreting", - "misrepresention", "misrepresenting", - "misunderstading", "misunderstanding", - "misunderstandig", "misunderstandings", - "misunderstandng", "misunderstandings", - "misunderstaning", "misunderstanding", - "multicultralism", "multiculturalism", - "multinationella", "multinational", - "nationalistisch", "nationalists", - "nationalistisen", "nationalists", - "nationalistiska", "nationalists", - "nationalistiske", "nationalists", - "nationalistiskt", "nationalists", - "nationalistista", "nationalists", - "objectificaiton", "objectification", - "objectivication", "objectification", - "organisationens", "organisations", - "organisationers", "organisations", - "overestimateing", "overestimating", - "paychologically", "psychologically", - "performancetest", "performances", - "performancewise", "performances", - "perpendiculaire", "perpendicular", - "pharamceuticals", "pharmaceutical", - "pharmacueticals", "pharmaceutical", - "philoshopically", "philosophically", - "philosohpically", "philosophically", - "philosophycally", "philosophically", - "phsycologically", "psychologically", - "phychologically", "psychologically", - "phylosophically", "philosophically", - "physcologically", "psychologically", - "precrastination", "procrastination", - "prefessionalism", "professionalism", - "premonasterians", "premonstratensians", - "procastrinating", "procrastinating", - "procastrination", "procrastination", - "procrascinating", "procrastinating", - "procrastenating", "procrastinating", - "procrastiantion", "procrastination", - "procrastibating", "procrastinating", - "procrastibation", "procrastination", - "procrastonating", "procrastinating", - "procrestinating", "procrastinating", - "procrestination", "procrastination", - "professionalsim", "professionalism", - "prograstination", "procrastination", - "progressionists", "progressions", - "progressionwise", "progressions", - "prokrastination", "procrastination", - "proportionallly", "proportionally", - "proscratination", "procrastination", - "pscyhologically", "psychologically", - "pshycologically", "psychologically", - "psichologically", "psychologically", - "psychedelicious", "psychedelics", - "psychedelicness", "psychedelics", - "psycholigically", "psychologically", - "psychopathische", "psychopathic", - "pyschologically", "psychologically", - "racionalization", "rationalization", - "rationalizaiton", "rationalization", - "rationalizating", "rationalization", - "reccomendations", "recommendations", - "recommandations", "recommendations", - "recommondations", "recommendations", - "reinitalization", "reinitialization", - "repersentations", "representations", - "represantations", "representations", - "represantatives", "representatives", - "representatieve", "representative", - "representativas", "representatives", - "representetives", "representatives", - "representitives", "representatives", - "responibilities", "responsibilities", - "responsibilites", "responsibilities", - "responsibilitys", "responsibilities", - "responsibillity", "responsibility", - "responsibilties", "responsibilities", - "responsiblities", "responsibilities", - "ridiculoussness", "ridiculousness", - "saskatchewinian", "saskatchewan", - "satisfactorally", "satisfactory", - "satisfactorilly", "satisfactory", - "schizophreniiic", "schizophrenic", - "sensationalisim", "sensationalism", - "spreadsheeticus", "spreadsheets", - "starightforward", "straightforward", - "straigthforward", "straightforward", - "striaghtforward", "straightforward", - "sustainabillity", "sustainability", - "technoligically", "technologically", - "troubelshooting", "troubleshooting", - "troublehsooting", "troubleshooting", - "troubleshotting", "troubleshooting", - "trustworthyness", "trustworthiness", - "ubsubstantiated", "unsubstantiated", - "unappropriately", "inappropriately", - "uncomfortablely", "uncomfortably", - "uncomfortablity", "uncomfortably", - "unconditionable", "unconditional", - "unconstituional", "unconstitutional", - "uncontitutional", "unconstitutional", - "uncontrollabley", "uncontrollably", - "uncontrollablly", "uncontrollably", - "unconventionnal", "unconventional", - "underastimating", "underestimating", - "underestemating", "underestimating", - "understandabley", "understandably", - "unintensionally", "unintentionally", - "unprofessionnal", "unprofessional", - "unresponsivness", "unresponsive", - "unsibstantiated", "unsubstantiated", - "unsubstanciated", "unsubstantiated", - "unsubstansiated", "unsubstantiated", - "unsusbtantiated", "unsubstantiated", - "untranslateable", "untranslatable", - "vulernabilities", "vulnerabilities", - "vulnarabilities", "vulnerabilities", - "vulnurabilities", "vulnerabilities", - "vunlerabilities", "vulnerabilities", - "vurnerabilities", "vulnerabilities", - "accomplishemnt", "accomplishment", - "accomplishents", "accomplishes", - "acconplishment", "accomplishment", - "acknowledgeing", "acknowledging", - "acknowledgemnt", "acknowledgement", - "acomplishments", "accomplishments", - "administartion", "administration", - "administartors", "administrators", - "administraters", "administrators", - "administratief", "administrative", - "administratiei", "administrative", - "administratior", "administrator", - "administrativo", "administration", - "adminsitration", "administration", - "adminsitrative", "administrative", - "adminsitrators", "administrators", - "affectionatley", "affectionate", - "aforememtioned", "aforementioned", - "aforementioend", "aforementioned", - "alternativelly", "alternatively", - "amministrative", "administrative", - "anitdepressant", "antidepressants", - "approproximate", "approximate", - "approximatelly", "approximately", - "archeaologists", "archeologists", - "architechtures", "architectures", - "architectureal", "architectural", - "architecturial", "architectural", - "assassintation", "assassination", - "authenitcation", "authentication", - "authenticaiton", "authentication", - "authobiography", "autobiography", - "breakthroughts", "breakthroughs", - "bureaucratisch", "bureaucratic", - "calssification", "classification", - "capatilization", "capitalization", - "capitalizacion", "capitalization", - "capitalizaiton", "capitalization", - "capitalizating", "capitalization", - "capitilazation", "capitalization", - "capitolization", "capitalization", - "captialization", "capitalization", - "cardiocascular", "cardiovascular", - "cardiovascualr", "cardiovascular", - "cardiovasuclar", "cardiovascular", - "caridovascular", "cardiovascular", - "cessationalism", "sensationalism", - "cessationalist", "sensationalist", - "charactaristic", "characteristic", - "characterisics", "characteristics", - "characterisitc", "characteristics", - "characteristcs", "characteristics", - "characteritics", "characteristic", - "charactersitic", "characteristics", - "charasteristic", "characteristics", - "charecteristic", "characteristic", - "cheeseburguers", "cheeseburgers", - "cinematagraphy", "cinematography", - "cinematagrophy", "cinematography", - "cinematograhpy", "cinematography", - "cinematogrophy", "cinematography", - "cinematogrpahy", "cinematography", - "cinemetography", "cinematography", - "cinimatography", "cinematography", - "circumstansial", "circumstantial", - "circumstantual", "circumstantial", - "circumstential", "circumstantial", - "circunstantial", "circumstantial", - "classificaiton", "classification", - "coincedentally", "coincidentally", - "coinsidentally", "coincidentally", - "commemmorating", "commemorating", - "communciations", "communications", - "compatablities", "compatibilities", - "compatibillity", "compatibility", - "compatiblities", "compatibilities", - "competitioners", "competitions", - "comphrehensive", "comprehensive", - "computationnal", "computational", - "concatentation", "concatenation", - "conciderations", "considerations", - "condescenscion", "condescension", - "condradictions", "contradictions", - "configuartions", "configurations", - "confugurations", "configurations", - "conglaturation", "congratulations", - "congratulatons", "congratulations", - "conicidentally", "coincidentally", - "conifgurations", "configurations", - "conscioussness", "consciousness", - "consentrations", "concentrations", - "consiciousness", "consciousness", - "considerablely", "considerably", - "considerstions", "considerations", - "constititional", "constitutional", - "constitucional", "constitutional", - "contamporaries", "contemporaries", - "contemporaneus", "contemporaneous", - "contraceptivos", "contraceptives", - "contradicitons", "contradictions", - "contradictiong", "contradicting", - "contriceptives", "contraceptives", - "controceptives", "contraceptives", - "controdictions", "contradictions", - "conversacional", "conversational", - "converstaional", "conversational", - "correpsondence", "correspondence", - "correspondants", "correspondents", - "correspondense", "correspondence", - "correspondente", "correspondence", - "corrispondants", "correspondents", - "corrispondence", "correspondence", - "corrospondence", "correspondence", - "costumizations", "customization", - "councidentally", "coincidentally", - "crystalisation", "crystallisation", - "curcumstantial", "circumstantial", - "demenstrations", "demonstrations", - "deminstrations", "demonstrations", - "demonstartions", "demonstrations", - "demonstrativno", "demonstrations", - "demonstrativos", "demonstrations", - "demosntrations", "demonstrations", - "desintegration", "disintegration", - "deterioriating", "deteriorating", - "determinisitic", "deterministic", - "differentiaton", "differentiation", - "disatisfaction", "dissatisfaction", - "discrimanatory", "discriminatory", - "discriminacion", "discrimination", - "discriminitory", "discriminatory", - "disillusionned", "disillusioned", - "diskrimination", "discrimination", - "disproportiate", "disproportionate", - "distingiushing", "distinguishing", - "distingquished", "distinguished", - "distingusihing", "distinguishing", - "distinquishing", "distinguishing", - "distuingishing", "distinguishing", - "dysfunctionnal", "dysfunctional", - "eldistribution", "redistribution", - "electromagnetc", "electromagnetic", - "electromagntic", "electromagnetic", - "endoctrination", "indoctrination", - "enthusiastisch", "enthusiastic", - "entrepreneuers", "entrepreneurs", - "entrepreneures", "entrepreneurs", - "enviormentally", "environmentally", - "enviromentally", "environmentally", - "environmentals", "environments", - "environmentaly", "environmentally", - "experimentaion", "experimentation", - "experimentella", "experimental", - "extraordinairy", "extraordinary", - "extraordinarly", "extraordinary", - "extrordinarily", "extraordinarily", - "fondamentalist", "fundamentalist", - "foreshadowning", "foreshadowing", - "functionallity", "functionality", - "fundamendalist", "fundamentalist", - "fundamentalits", "fundamentalists", - "fundamnetalist", "fundamentalist", - "fundemantalist", "fundamentalist", - "fundimentalist", "fundamentalist", - "fundumentalist", "fundamentalist", - "generalizacion", "generalization", - "generalizating", "generalization", - "generelization", "generalization", - "geographacilly", "geographically", - "geographycally", "geographically", - "geogrpahically", "geographically", - "geopraphically", "geographically", - "goegraphically", "geographically", - "grandchilderen", "grandchildren", - "gravitationnal", "gravitational", - "groubdbreaking", "groundbreaking", - "groudnbreaking", "groundbreaking", - "hallcuinations", "hallucination", - "hallicunations", "hallucinations", - "hallucenations", "hallucinations", - "halluciantions", "hallucinations", - "hallucinaitons", "hallucination", - "hallunications", "hallucinations", - "hallusinations", "hallucinations", - "halluzinations", "hallucinations", - "hellucinations", "hallucinations", - "heterosexuella", "heterosexual", - "hipothetically", "hypothetically", - "homosexuallity", "homosexuality", - "hullucinations", "hallucinations", - "hyopthetically", "hypothetically", - "hypathetically", "hypothetically", - "hypethetically", "hypothetically", - "hypotehtically", "hypothetically", - "hypotethically", "hypothetically", - "identificacion", "identification", - "identificaiton", "identification", - "identificativo", "identification", - "identifikation", "identification", - "imlpementation", "implementations", - "impelmentation", "implementations", - "impersonationg", "impersonating", - "implementacion", "implementation", - "implementaiton", "implementation", - "implementating", "implementation", - "implementatino", "implementations", - "implemetnation", "implementations", - "implimentation", "implementation", - "impossibillity", "impossibility", - "inadvertantely", "inadvertently", - "inappropriatly", "inappropriately", - "inapproprietly", "inappropriately", - "incompatablity", "incompatibility", - "incompatiblity", "incompatibility", - "inconsequental", "inconsequential", - "inconsistentcy", "inconsistency", - "incontrollably", "uncontrollably", - "inconventional", "unconventional", - "inconvienenced", "inconvenience", - "indestrictible", "indestructible", - "indestructuble", "indestructible", - "indetification", "identification", - "indistructible", "indestructible", - "individuallity", "individuality", - "indocrtination", "indoctrination", - "indoctrication", "indoctrination", - "indoktrination", "indoctrination", - "industiralized", "industrialized", - "industrailized", "industrialized", - "industrualized", "industrialized", - "industructible", "indestructible", - "inexplicablely", "inexplicably", - "infrastracture", "infrastructure", - "infrastructuur", "infrastructure", - "infrastrucutre", "infrastructure", - "infrastrukture", "infrastructure", - "infrastrutture", "infrastructure", - "infrasturcture", "infrastructure", - "initalisations", "initialisations", - "initalizations", "initializations", - "inplementation", "implementation", - "inspirationnal", "inspirational", - "instinctivelly", "instinctively", - "institutionale", "institutionalized", - "institutionals", "institutions", - "institutionnal", "institutional", - "intellectualis", "intellectuals", - "intellectualls", "intellectuals", - "intellecutally", "intellectually", - "intercepticons", "interceptions", - "interchangable", "interchangeable", - "interchangably", "interchangeably", - "interchangeble", "interchangeable", - "interchangebly", "interchangeably", - "interlectually", "intellectually", - "internationaal", "international", - "internationaly", "internationally", - "internationnal", "international", - "interpersonnal", "interpersonal", - "interpertation", "interpretation", - "interpratation", "interpretation", - "interpretacion", "interpretation", - "interpretaiton", "interpretations", - "interpretating", "interpretation", - "interpritation", "interpretation", - "interstellaire", "interstellar", - "intillectually", "intellectually", - "intrepretation", "interpretation", - "invesitgations", "investigations", - "investiagtions", "investigations", - "investigatiors", "investigations", - "investigativos", "investigations", - "investigstions", "investigations", - "irrationallity", "irrationally", - "irresponsibile", "irresponsible", - "journalistisch", "journalistic", - "justificativos", "justifications", - "koncentrations", "concentrations", - "liberatrianism", "libertarianism", - "libertarainism", "libertarianism", - "libertariansim", "libertarianism", - "libertarinaism", "libertarianism", - "libertaryanism", "libertarianism", - "libertatianism", "libertarianism", - "liberterianism", "libertarianism", - "libretarianism", "libertarianism", - "manufactureers", "manufactures", - "manufactureras", "manufactures", - "manufacturered", "manufactured", - "manufactureres", "manufacturers", - "manufactureros", "manufactures", - "massachusettes", "massachusetts", - "massachussetts", "massachusetts", - "mataphorically", "metaphorically", - "mathameticians", "mathematicians", - "mathemagically", "mathematically", - "mathematitians", "mathematicians", - "mathemetically", "mathematically", - "mathemeticians", "mathematicians", - "mathimatically", "mathematically", - "mediterainnean", "mediterranean", - "mediterrannean", "mediterranean", - "metaphotically", "metaphorically", - "metephorically", "metaphorically", - "methaporically", "metaphorically", - "metiphorically", "metaphorically", - "metophorically", "metaphorically", - "metropolitaine", "metropolitan", - "misconseptions", "misconceptions", - "misinterperted", "misinterpreted", - "misintrepreted", "misinterpreted", - "mulitnationals", "multinational", - "mulitplication", "multiplication", - "multiplicacion", "multiplication", - "multiplicaiton", "multiplication", - "multiplicativo", "multiplication", - "multiplikation", "multiplication", - "mutlinationals", "multinational", - "mutliplication", "multiplication", - "nationalisitic", "nationalistic", - "nationalistics", "nationalists", - "nationalisties", "nationalists", - "nationalistisk", "nationalists", - "neighbourhoood", "neighbourhood", - "nieghbourhoods", "neighbourhood", - "northereastern", "northeastern", - "objectificaton", "objectification", - "opthalmologist", "ophthalmologist", - "organizacional", "organizational", - "organizaitonal", "organizational", - "organziational", "organizational", - "orginazational", "organizational", - "overestemating", "overestimating", - "overextimating", "overestimating", - "overhwelmingly", "overwhelmingly", - "overhwlemingly", "overwhelmingly", - "overpolulation", "overpopulation", - "overpopluation", "overpopulation", - "oversetimating", "overestimating", - "overshadowered", "overshadowed", - "overwhemlingly", "overwhelmingly", - "overwhlemingly", "overwhelmingly", - "paliamentarian", "parliamentarian", - "parliamentiary", "parliamentary", - "performancepcs", "performances", - "personalitites", "personalities", - "pharamceutical", "pharmaceutical", - "pharmaceudical", "pharmaceutical", - "pharmacuetical", "pharmaceutical", - "pharmaseutical", "pharmaceutical", - "pharmeceutical", "pharmaceutical", - "philosophicaly", "philosophically", - "phramaceutical", "pharmaceutical", - "playthroughers", "playthroughs", - "porportionally", "proportionally", - "practitionners", "practitioners", - "predeterminded", "predetermined", - "predominantely", "predominantly", - "predominantley", "predominantly", - "preinitalizing", "preinitializing", - "prerequisities", "prerequisite", - "procrastinatin", "procrastination", - "procrastinaton", "procrastination", - "professionials", "professionalism", - "professionnals", "professionals", - "profitabillity", "profitability", - "progressivelly", "progressively", - "progressivisme", "progressives", - "pronounciation", "pronunciation", - "proportianally", "proportionally", - "proportionalty", "proportionally", - "proportionella", "proportionally", - "proprotionally", "proportionally", - "protruberances", "protuberances", - "pseudononymous", "pseudonymous", - "psychologicaly", "psychologically", - "qaulifications", "qualification", - "qualifiactions", "qualification", - "qualificaitons", "qualifications", - "quarterbackers", "quarterbacks", - "rationalizaton", "rationalization", - "reaponsibility", "responsibility", - "recommandation", "recommendation", - "recommedations", "recommendations", - "recommondation", "recommendation", - "reconnaissence", "reconnaissance", - "reconstruccion", "reconstruction", - "reconsturction", "reconstruction", - "redistirbution", "redistribution", - "redistribucion", "redistribution", - "redistributivo", "redistribution", - "redistrubition", "redistribution", - "refridgeration", "refrigeration", - "rehabilitacion", "rehabilitation", - "rehabilitaiton", "rehabilitation", - "reinforcemnets", "reinforcements", - "rekommendation", "recommendation", - "rektifications", "certifications", - "reniforcements", "reinforcements", - "repersentation", "representation", - "represantation", "representation", - "represantative", "representative", - "representacion", "representation", - "representaiton", "representations", - "representatief", "representative", - "representating", "representation", - "representativo", "representation", - "representetive", "representative", - "representitive", "representative", - "representstion", "representations", - "representstive", "representatives", - "represetnation", "representations", - "represnetation", "representations", - "reprezentative", "representative", - "repsonsibility", "responsibility", - "resistribution", "redistribution", - "responcibility", "responsibility", - "responisbility", "responsibility", - "responnsibilty", "responsibility", - "responsability", "responsibility", - "responsibilies", "responsibilities", - "responsibities", "responsibilities", - "restaraunteurs", "restaurateurs", - "retroactivelly", "retroactively", - "revolutionairy", "revolutionary", - "revolutionnary", "revolutionary", - "ridicilousness", "ridiculousness", - "ridicoulusness", "ridiculousness", - "rienforcements", "reinforcements", - "righteoussness", "righteousness", - "satisfactoraly", "satisfactory", - "satisfactority", "satisfactorily", - "sceintifically", "scientifically", - "schizophrentic", "schizophrenic", - "screenwrighter", "screenwriter", - "sensacionalism", "sensationalism", - "sensacionalist", "sensationalist", - "sensasionalism", "sensationalism", - "sensasionalist", "sensationalist", - "sensationality", "sensationalist", - "sensationalizm", "sensationalism", - "sensationalsim", "sensationalism", - "sensationilism", "sensationalism", - "sensationilist", "sensationalist", - "sensationslism", "sensationalism", - "sensetionalism", "sensationalism", - "sensibilisiert", "sensibilities", - "sentationalism", "sensationalism", - "sentationalist", "sensationalist", - "senzationalism", "sensationalism", - "senzationalist", "sensationalist", - "sepcifications", "specification", - "simaltaneously", "simultaneously", - "simeltaneously", "simultaneously", - "similtaneously", "simultaneously", - "simlutaneously", "simultaneously", - "simplificacion", "simplification", - "simplificaiton", "simplification", - "simplificating", "simplification", - "simulatenously", "simultaneously", - "simulatneously", "simultaneously", - "simultaenously", "simultaneously", - "simultainously", "simultaneously", - "simultaneoulsy", "simultaneously", - "simultaniously", "simultaneously", - "simulteanously", "simultaneously", - "sistematically", "systematically", - "slaugterhouses", "slaughterhouses", - "specailization", "specialization", - "specialication", "specialization", - "specializaiton", "specialization", - "specificaitons", "specification", - "speciliazation", "specialization", - "spectacularely", "spectacularly", - "spectacularily", "spectacularly", - "spesifications", "specifications", - "spezialisation", "specialization", - "sportsmansship", "sportsmanship", - "spreadsheeters", "spreadsheets", - "straightforwad", "straightforward", - "subconcsiously", "subconsciously", - "subconsicously", "subconsciously", - "subsconciously", "subconsciously", - "sunconsciously", "subconsciously", - "superintendant", "superintendent", - "suppliementing", "supplementing", - "surrepetitious", "surreptitious", - "survivabililty", "survivability", - "survivabillity", "survivability", - "sustainabiltiy", "sustainability", - "syncronization", "synchronization", - "systemetically", "systematically", - "systimatically", "systematically", - "technologicaly", "technologically", - "thermodinamics", "thermodynamics", - "thermodyanmics", "thermodynamics", - "thermodymamics", "thermodynamics", - "thermodymanics", "thermodynamics", - "thermodynamcis", "thermodynamics", - "thermodynanics", "thermodynamics", - "thermodynmaics", "thermodynamics", - "thernodynamics", "thermodynamics", - "theromdynamics", "thermodynamics", - "transformacion", "transformation", - "transfromation", "transformation", - "transitionable", "transitional", - "transitionning", "transitioning", - "transofrmation", "transformation", - "trasnformation", "transformation", - "trasnportation", "transportation", - "unbelievablely", "unbelievably", - "unchallengable", "unchallengeable", - "uncomfortabley", "uncomfortably", - "uncomfortablly", "uncomfortably", - "unconciousness", "unconsciousness", - "unconditionaly", "unconditionally", - "unconditionnal", "unconditional", - "unconsciouslly", "unconsciously", - "uncontrallable", "uncontrollable", - "uncontrallably", "uncontrollably", - "uncontrolablly", "uncontrollably", - "unconvectional", "unconventional", - "unconvencional", "unconventional", - "unconvensional", "unconventional", - "unconventianal", "unconventional", - "underastimated", "underestimated", - "underestamated", "underestimated", - "underestemated", "underestimated", - "underestimeted", "underestimated", - "undersetimated", "underestimated", - "understandebly", "understandably", - "understandible", "understandable", - "understandibly", "understandably", - "undestructible", "indestructible", - "unforetunately", "unfortunately", - "unfortunatelly", "unfortunately", - "unfourtunately", "unfortunately", - "uninitalizable", "uninitializable", - "unintelligient", "unintelligent", - "unintentionaly", "unintentionally", - "unintentionnal", "unintentional", - "unmanouverable", "unmaneuverable", - "unneccessarily", "unnecessarily", - "unnecessarilly", "unnecessarily", - "unprecendented", "unprecedented", - "unprofessionel", "unprofessional", - "unreasonablely", "unreasonably", - "unsubstantiaed", "unsubstantiated", - "unsurprizingly", "unsurprisingly", - "vizualisations", "visualization", - "vulnerabilites", "vulnerabilities", - "vulnerabillity", "vulnerability", - "vulnerablility", "vulnerability", - "wholeheartadly", "wholeheartedly", - "wholeheartidly", "wholeheartedly", - "abbrievations", "abbreviation", - "accelleration", "acceleration", - "accomadations", "accommodations", - "accommadating", "accommodating", - "accommadation", "accommodation", - "accommidation", "accommodation", - "accomodations", "accommodations", - "accomondating", "accommodating", - "accomondation", "accommodation", - "accomplishent", "accomplishment", - "accountabilty", "accountability", - "accredidation", "accreditation", - "acknolwedging", "acknowledging", - "acknowlegding", "acknowledging", - "acomplishment", "accomplishment", - "acquaintaince", "acquaintance", - "acquaintences", "acquaintances", - "acquaintinces", "acquaintances", - "acquanitances", "acquaintance", - "acquantainces", "acquaintances", - "acquantiances", "acquaintances", - "acquiantances", "acquaintances", - "acquiantences", "acquaintances", - "adminastrator", "administrator", - "administartor", "administrator", - "administraion", "administration", - "administraron", "administrator", - "administrater", "administrator", - "administratio", "administrator", - "administraton", "administration", - "adminsitrator", "administrator", - "adminstration", "administration", - "adminstrative", "administrative", - "admissability", "admissibility", - "adnimistrator", "administrators", - "adverticement", "advertisement", - "advertisiment", "advertisement", - "advertisments", "advertisements", - "advirtisement", "advertisement", - "aestethically", "aesthetically", - "aesthatically", "aesthetically", - "aesthitically", "aesthetically", - "affectionnate", "affectionate", - "aforementiond", "aforementioned", - "agriculturual", "agricultural", - "agrumentative", "argumentative", - "alterantively", "alternatively", - "alternativets", "alternatives", - "alternativley", "alternatively", - "alternitavely", "alternatively", - "alternitively", "alternatively", - "aninteresting", "uninteresting", - "annoucnements", "announcements", - "antagonisitic", "antagonistic", - "anthropolgist", "anthropologist", - "apporpriately", "appropriately", - "apporpriation", "appropriation", - "apporximately", "approximately", - "appreciateing", "appreciating", - "appreciateive", "appreciative", - "appreciationg", "appreciating", - "appropirately", "appropriately", - "appropiration", "appropriation", - "appropraitely", "appropriately", - "appropreation", "appropriation", - "appropriatley", "appropriately", - "appropropiate", "appropriate", - "approrpiation", "appropriation", - "approxamately", "approximately", - "approxiamtely", "approximately", - "approximatley", "approximately", - "approximitely", "approximately", - "aqcuaintances", "acquaintances", - "aqquaintances", "acquaintances", - "archaelogical", "archaeological", - "archaelogists", "archaeologists", - "archeaologist", "archeologist", - "archetectural", "architectural", - "architechture", "architecture", - "architechural", "architectural", - "architectrual", "architectural", - "architecutral", "architectural", - "argumentitive", "argumentative", - "arugmentative", "argumentative", - "asethetically", "aesthetically", - "assasinations", "assassinations", - "audomoderator", "automoderator", - "australianess", "australians", - "authenticaion", "authentication", - "authenticaton", "authentication", - "autherization", "authorization", - "authoratitive", "authoritative", - "authoritatian", "authoritarian", - "authoritation", "authorization", - "authorititive", "authoritative", - "authoritorian", "authoritarian", - "authorotative", "authoritative", - "authroization", "authorization", - "automoderador", "automoderator", - "automoderater", "automoderator", - "automodorator", "automoderator", - "automoterator", "automoderator", - "autoritharian", "authoritarian", - "availabillity", "availability", - "awknowledging", "acknowledging", - "billingualism", "bilingualism", - "billionairres", "billionaire", - "borderlanders", "borderlands", - "breadtfeeding", "breastfeeding", - "breastfeading", "breastfeeding", - "breatsfeeding", "breastfeeding", - "broadacasting", "broadcasting", - "bureaucractic", "bureaucratic", - "bureaucratics", "bureaucrats", - "bureaucratius", "bureaucrats", - "californiaman", "californian", - "calrification", "clarification", - "capitalizaton", "capitalization", - "carbohdyrates", "carbohydrates", - "carbohidrates", "carbohydrates", - "carbohyrdates", "carbohydrates", - "carboyhdrates", "carbohydrates", - "carthographer", "cartographer", - "catagorically", "categorically", - "catastrophies", "catastrophe", - "catastrophize", "catastrophe", - "catigorically", "categorically", - "catterpillars", "caterpillars", - "celebrationis", "celebrations", - "ceritfication", "certifications", - "certificaiton", "certification", - "championchips", "championship", - "championshiop", "championships", - "championsship", "championships", - "chanpionships", "championships", - "charactarized", "characterized", - "characterisic", "characteristic", - "characteristc", "characteristics", - "characterists", "characteristics", - "charicterized", "characterized", - "charismatisch", "charismatic", - "checkpointusa", "checkpoints", - "cheeseburgare", "cheeseburger", - "cheeseburgler", "cheeseburger", - "cheeseburguer", "cheeseburger", - "cheezeburgers", "cheeseburgers", - "chornological", "chronological", - "chronoligical", "chronological", - "chronologicly", "chronological", - "cinematograhy", "cinematography", - "cinematograpy", "cinematography", - "circomference", "circumference", - "circumcission", "circumcision", - "circumferance", "circumference", - "circumsicions", "circumcision", - "circumstanial", "circumstantial", - "circumstantal", "circumstantial", - "circumstnaces", "circumstance", - "circunference", "circumference", - "circunstances", "circumstances", - "cirucmference", "circumference", - "cirucmstances", "circumstances", - "civilications", "civilizations", - "civilizaitons", "civilizations", - "clarificaiton", "clarification", - "clasification", "clarification", - "clerification", "clarification", - "coincidentaly", "coincidentally", - "coincidential", "coincidental", - "colaborations", "collaborations", - "collabaration", "collaboration", - "collaberation", "collaboration", - "collaberative", "collaborative", - "collaboratore", "collaborate", - "collectioners", "collections", - "collectivelly", "collectively", - "collobaration", "collaboration", - "combatibility", "compatibility", - "comeptitively", "competitively", - "comfortablely", "comfortably", - "comfortablity", "comfortably", - "comfrontation", "confrontation", - "commemerative", "commemorative", - "commericially", "commercially", - "commerorative", "commemorative", - "comminication", "communication", - "comminucation", "communications", - "commissionees", "commissions", - "commissionned", "commissioned", - "commissionner", "commissioner", - "commmemorated", "commemorated", - "commuications", "communications", - "commuincation", "communications", - "communciation", "communication", - "communiaction", "communications", - "communicaiton", "communication", - "communicatoin", "communications", - "communicatons", "communications", - "compadibility", "compatibility", - "comparativley", "comparatively", - "comparetively", "comparatively", - "comparitavely", "comparatively", - "comparitively", "comparatively", - "compatability", "compatibility", - "compatibiltiy", "compatibility", - "compeditively", "competitively", - "compensantion", "compensation", - "compensationg", "compensating", - "comperatively", "comparatively", - "comperhension", "comprehension", - "competatively", "competitively", - "competitavely", "competitively", - "competitevely", "competitively", - "competitivley", "competitively", - "competiveness", "competitiveness", - "compilcations", "complication", - "compitability", "compatibility", - "complciations", "complication", - "complecations", "complications", - "compliactions", "complication", - "complicaitons", "complication", - "complilations", "complications", - "complimentery", "complimentary", - "complimentoni", "complimenting", - "complimentory", "complimentary", - "comprehention", "comprehension", - "computacional", "computational", - "comtamination", "contamination", - "comtemplating", "contemplating", - "concatination", "contamination", - "conceivablely", "conceivably", - "concencration", "concentration", - "concenrtation", "concentrations", - "concentartion", "concentrations", - "concentracion", "concentration", - "concentraited", "concentrated", - "concentraiton", "concentrations", - "concentratons", "concentrations", - "concervatives", "conservatives", - "concideration", "consideration", - "concioussness", "consciousness", - "concnetration", "concentrations", - "concsiousness", "consciousness", - "condascending", "condescending", - "condescencion", "condescension", - "condescendion", "condescension", - "condescensing", "condescension", - "condiscending", "condescending", - "conditionning", "conditioning", - "condradicting", "contradicting", - "condradiction", "contradiction", - "condradictory", "contradictory", - "conecntration", "concentrations", - "conenctration", "concentrations", - "confidentally", "confidentially", - "configrations", "configurations", - "configruation", "configurations", - "configuartion", "configuration", - "configuracion", "configuration", - "configuraiton", "configuration", - "configuratoin", "configurations", - "configureable", "configurable", - "confrentation", "confrontation", - "confrontacion", "confrontation", - "confrontating", "confrontation", - "confrontativo", "confrontation", - "congratualted", "congratulate", - "conifguration", "configurations", - "conisderation", "considerations", - "connecticunts", "connecticut", - "connectivitiy", "connectivity", - "conpassionate", "compassionate", - "conplications", "complications", - "conplimentary", "complimentary", - "conplimenting", "complimenting", - "conprehension", "comprehension", - "consdieration", "considerations", - "consenquently", "consequently", - "consentrating", "concentrating", - "consentration", "concentration", - "consequencies", "consequence", - "consequentely", "consequently", - "consequeseces", "consequences", - "conservatisim", "conservatism", - "conservativsm", "conservatism", - "conservitives", "conservatives", - "consicousness", "consciousness", - "considerabely", "considerable", - "considerabile", "considerable", - "considerabley", "considerably", - "considerablly", "considerably", - "consideracion", "consideration", - "consideratoin", "considerations", - "considerstion", "considerations", - "considertaion", "considerations", - "consituencies", "constituencies", - "consitutional", "constitutional", - "constallation", "constellation", - "constarnation", "consternation", - "constillation", "constellation", - "constituintes", "constituents", - "constituional", "constitutional", - "constitutents", "constitutes", - "constitutinal", "constitutional", - "constructicon", "construction", - "constructieve", "constructive", - "constructiong", "constructing", - "consttruction", "construction", - "contaminacion", "contamination", - "contaminanted", "contaminated", - "contanimation", "contamination", - "contenplating", "contemplating", - "contimplating", "contemplating", - "contraceptivo", "contraception", - "contradiccion", "contradiction", - "contradicitng", "contradicting", - "contradiciton", "contradiction", - "contradictary", "contradictory", - "contradictons", "contradicts", - "contraticting", "contradicting", - "contravercial", "controversial", - "contraversial", "controversial", - "contreception", "contraception", - "contreversial", "controversial", - "contributeurs", "contributes", - "contributiors", "contributors", - "contriception", "contraception", - "contridictory", "contradictory", - "contritutions", "contributions", - "contriversial", "controversial", - "controception", "contraception", - "controdicting", "contradicting", - "controdiction", "contradiction", - "controvercial", "controversial", - "controverisal", "controversial", - "controversary", "controversy", - "controversity", "controversy", - "controvertial", "controversial", - "contstruction", "construction", - "conventionnal", "conventional", - "converastions", "conservation", - "conversationa", "conservation", - "conversationg", "conservation", - "conversationy", "conservation", - "conversatiosn", "conservation", - "conversatives", "conservatives", - "converstaions", "conversations", - "convorsations", "conversations", - "cooresponding", "corresponding", - "coorperations", "corporations", - "correctionals", "corrections", - "correpsonding", "corresponding", - "correspondant", "correspondent", - "correspondece", "correspondence", - "corresponders", "corresponds", - "corresponsing", "corresponding", - "corrispondant", "correspondent", - "corrisponding", "corresponding", - "corrosponding", "corresponding", - "costomization", "customization", - "costumization", "customization", - "counterfeight", "counterfeit", - "creationistas", "creationists", - "cricumference", "circumference", - "cringeworthey", "cringeworthy", - "cringeworthly", "cringeworthy", - "crytopgraphic", "cryptographic", - "curcumference", "circumference", - "curcumstances", "circumstances", - "custumization", "customization", - "cuztomization", "customization", - "decentraliced", "decentralized", - "decentrilized", "decentralized", - "decomissioned", "decommissioned", - "decompositing", "decomposing", - "definitivelly", "definitively", - "deinitalizing", "deinitializing", - "demenstration", "demonstration", - "democraticaly", "democratically", - "democraticlly", "democratically", - "demoninations", "denominations", - "demonstarting", "demonstrating", - "demonstartion", "demonstration", - "demonstraiton", "demonstrations", - "demonstratbly", "demonstrably", - "demonstraties", "demonstrate", - "demonstrativo", "demonstration", - "demosntrating", "demonstrating", - "demosntration", "demonstrations", - "denomenations", "denominations", - "denomonations", "denominations", - "deomnstration", "demonstrations", - "dermatalogist", "dermatologist", - "dermatolagist", "dermatologist", - "dermatoligist", "dermatologist", - "dermatologyst", "dermatologist", - "dermetologist", "dermatologist", - "dermitologist", "dermatologist", - "derpatologist", "dermatologist", - "desentralized", "decentralized", - "desillusioned", "disillusioned", - "desintegrated", "disintegrated", - "desinterested", "disinterested", - "determenation", "determination", - "determinacion", "determination", - "determinining", "determining", - "determinisitc", "deterministic", - "determinsitic", "deterministic", - "detmatologist", "dermatologist", - "developmently", "developmental", - "dezentralized", "decentralized", - "differantiate", "differentiate", - "differenciate", "differentiate", - "differintiate", "differentiate", - "diffirentiate", "differentiate", - "disadvandages", "disadvantaged", - "disadvantadge", "disadvantaged", - "disadvanteged", "disadvantaged", - "disadvanteges", "disadvantages", - "disadvatanges", "disadvantages", - "disadventaged", "disadvantaged", - "disadventages", "disadvantages", - "disallusioned", "disillusioned", - "disappearence", "disappearance", - "disappearnace", "disappearance", - "disappearring", "disappearing", - "disatvantaged", "disadvantaged", - "disatvantages", "disadvantages", - "disciplinairy", "disciplinary", - "disciplinerad", "disciplined", - "discipliniary", "disciplinary", - "disconnecters", "disconnects", - "discontinuted", "discontinued", - "discrimianted", "discriminated", - "discriminante", "discriminate", - "discriminatie", "discriminate", - "discriminatin", "discrimination", - "disillisioned", "disillusioned", - "disillutioned", "disillusioned", - "disingenuious", "disingenuous", - "disollusioned", "disillusioned", - "disrecpectful", "disrespectful", - "disrecpecting", "disrespecting", - "disrepsectful", "disrespectful", - "disrepsecting", "disrespecting", - "disresepctful", "disrespectful", - "disresepcting", "disrespecting", - "disrespection", "disrespecting", - "disrespekting", "disrespecting", - "disrispectful", "disrespectful", - "disrispecting", "disrespecting", - "dissagreement", "disagreement", - "dissapearance", "disappearance", - "dissapointted", "dissapointed", - "dissappointed", "disappointed", - "dissobediance", "disobedience", - "dissobedience", "disobedience", - "distingishing", "distinguishing", - "distinguising", "distinguishing", - "distinquished", "distinguished", - "distirbutions", "distributions", - "distiungished", "distinguished", - "distribustion", "distributions", - "distributiors", "distributors", - "distributivos", "distributions", - "distrobutions", "distributions", - "distrubitions", "distributions", - "distuingished", "distinguished", - "documantaries", "documentaries", - "documenatries", "documentaries", - "documentacion", "documentation", - "documentaires", "documentaries", - "documentaiton", "documentation", - "documentarios", "documentaries", - "documentaties", "documentaries", - "documentating", "documentation", - "documenteries", "documentaries", - "documentories", "documentaries", - "drammatically", "grammatically", - "dsyfunctional", "dysfunctional", - "dumbfoundeads", "dumbfounded", - "dusfunctional", "dysfunctional", - "dustification", "justification", - "dysfonctional", "dysfunctional", - "dysfucntional", "dysfunctional", - "dysfuncitonal", "dysfunctional", - "dysfunktional", "dysfunctional", - "easthetically", "aesthetically", - "effectiviness", "effectiveness", - "effictiveness", "effectiveness", - "effortlessely", "effortlessly", - "effortlessley", "effortlessly", - "embarrasement", "embarrassment", - "embarrasments", "embarrassment", - "embarressment", "embarrassment", - "emberrassment", "embarrassment", - "encarceration", "incarceration", - "encorporating", "incorporating", - "encyclopeadia", "encyclopedia", - "encyclopeadic", "encyclopedia", - "encyclopeedia", "encyclopedia", - "encycolpedias", "encyclopedia", - "endoctrinated", "indoctrinated", - "enlightenting", "enlightening", - "enlightnement", "enlightenment", - "enligthenment", "enlightenment", - "enteratinment", "entertainment", - "enterpreneurs", "entrepreneurs", - "enterprenuers", "entrepreneurs", - "enterpreuners", "entrepreneurs", - "entertianment", "entertainment", - "enthusiasists", "enthusiasts", - "enthusiastics", "enthusiasts", - "entrepraneurs", "entrepreneurs", - "entreprenaurs", "entrepreneurs", - "entrepreneuer", "entrepreneurs", - "entreprenours", "entrepreneurs", - "entreprenuers", "entrepreneurs", - "entreprenures", "entrepreneurs", - "entrepreuners", "entrepreneurs", - "entretainment", "entertainment", - "enviornmental", "environmental", - "environemntal", "environmental", - "environmently", "environmental", - "envolutionary", "evolutionary", - "envrionmental", "environmental", - "estabilshment", "establishments", - "establishemnt", "establishments", - "establishmnet", "establishments", - "establsihment", "establishments", - "estbalishment", "establishments", - "ethnocentricm", "ethnocentrism", - "evolutionairy", "evolutionary", - "evolutionarly", "evolutionary", - "evolutionnary", "evolutionary", - "exaggeratting", "exaggerating", - "excpetionally", "exceptionally", - "executioneers", "executioner", - "existentiella", "existential", - "expectionally", "exceptionally", - "experementing", "experimenting", - "experienceing", "experiencing", - "experimentais", "experiments", - "experimention", "experimenting", - "experimentors", "experiments", - "expirementing", "experimenting", - "expodentially", "exponentially", - "exponantially", "exponentially", - "exponencially", "exponentially", - "exponentiella", "exponential", - "extraodrinary", "extraordinary", - "extraordianry", "extraordinary", - "extraordinair", "extraordinary", - "extraordinaly", "extraordinary", - "extraoridnary", "extraordinary", - "extremeophile", "extremophile", - "extroardinary", "extraordinary", - "familiarizate", "familiarize", - "fantasitcally", "fantastically", - "fantasmically", "fantastically", - "fantistically", "fantastically", - "faptastically", "fantastically", - "figurativeley", "figuratively", - "figurativelly", "figuratively", - "frankenstiens", "frankenstein", - "frankenstined", "frankenstein", - "frankenstiner", "frankenstein", - "frankenstines", "frankenstein", - "friendzoneado", "friendzoned", - "fucntionality", "functionality", - "funcitonality", "functionality", - "functionailty", "functionality", - "fundamentalis", "fundamentals", - "fundamnetally", "fundamentally", - "fundementally", "fundamentally", - "fundimentally", "fundamentally", - "gamifications", "ramifications", - "generalizaing", "generalizing", - "generalizaton", "generalization", - "generationals", "generations", - "generationens", "generations", - "generationers", "generations", - "generationnal", "generational", - "geographicaly", "geographically", - "geographicial", "geographical", - "geometricians", "geometers", - "goreshadowing", "foreshadowing", - "governmential", "governmental", - "gradification", "gratification", - "grammarically", "grammatically", - "grandchildern", "grandchildren", - "gratificacion", "gratification", - "gratificaiton", "gratification", - "grativational", "gravitational", - "gravitacional", "gravitational", - "gravitaitonal", "gravitational", - "hallcuination", "hallucination", - "hallicunation", "hallucination", - "hallucenation", "hallucination", - "halluciantion", "hallucinations", - "hallukination", "hallucination", - "hallunication", "hallucination", - "hallusination", "hallucination", - "halluzination", "hallucination", - "heiroglyphics", "hieroglyphics", - "hellucination", "hallucination", - "highlightning", "highlighting", - "homesexuality", "homosexuality", - "homosexualtiy", "homosexuality", - "homosexulaity", "homosexuality", - "horizontallly", "horizontally", - "hullucination", "hallucination", - "hypocriticial", "hypocritical", - "hypotheticaly", "hypothetically", - "hystericallly", "hysterically", - "identificaton", "identification", - "ideoligically", "ideologically", - "ideosyncratic", "idiosyncratic", - "idiologically", "ideologically", - "illistrations", "illustrations", - "illustartions", "illustrations", - "imperfactions", "imperfections", - "impersinating", "impersonating", - "implementaion", "implementation", - "implementatin", "implementations", - "implimenation", "implementation", - "imprefections", "imperfections", - "impresonating", "impersonating", - "inaccessibile", "inaccessible", - "inadventently", "inadvertently", - "inadverdently", "inadvertently", - "inadvertantly", "inadvertently", - "inadvertendly", "inadvertently", - "inapporpriate", "inappropriate", - "inappropirate", "inappropriate", - "inappropraite", "inappropriate", - "inaproppriate", "inappropriate", - "incarcaration", "incarceration", - "incarciration", "incarceration", - "incarseration", "incarceration", - "incerceration", "incarceration", - "incidentially", "incidentally", - "incomfortable", "uncomfortable", - "incomfortably", "uncomfortably", - "incompatabile", "incompatible", - "incompatiable", "incompatible", - "incompatibile", "incompatible", - "inconciderate", "inconsiderate", - "inconcistency", "inconsistency", - "inconditional", "unconditional", - "inconsciously", "unconsciously", - "inconsiderant", "inconsiderate", - "inconsistance", "inconsistency", - "inconsistancy", "inconsistency", - "inconsistenly", "inconsistency", - "inconsistensy", "inconsistency", - "inconsistenty", "inconsistency", - "inconveinence", "inconvenience", - "inconveniance", "inconvenience", - "inconveniente", "inconvenience", - "inconvienence", "inconvenience", - "incoroporated", "incorporated", - "incorparating", "incorporating", - "incorperating", "incorporating", - "incorperation", "incorporation", - "incorruptable", "incorruptible", - "incramentally", "incrementally", - "incrementarla", "incremental", - "incrementarlo", "incremental", - "indavertently", "inadvertently", - "indefinitelly", "indefinitely", - "independantes", "independents", - "independantly", "independently", - "independendet", "independent", - "independendly", "independently", - "indepentently", "independently", - "indespensable", "indispensable", - "indespensible", "indispensable", - "indestructble", "indestructible", - "indestructibe", "indestructible", - "indictrinated", "indoctrinated", - "indipendently", "independently", - "indispensible", "indispensable", - "indivuduality", "individuality", - "indocrtinated", "indoctrinated", - "indocternated", "indoctrinated", - "indoctornated", "indoctrinated", - "indoctrinatie", "indoctrinated", - "indoctrinatin", "indoctrination", - "indoctronated", "indoctrinated", - "industrialied", "industrialized", - "industrialzed", "industrialized", - "inexeprienced", "inexperience", - "inexpeirenced", "inexperience", - "inexpereinced", "inexperienced", - "inexperianced", "inexperienced", - "inexperiecned", "inexperience", - "inexperineced", "inexperience", - "inexpierenced", "inexperienced", - "inexplicabley", "inexplicably", - "inexplicablly", "inexplicably", - "infilitration", "infiltration", - "infrastructre", "infrastructure", - "infrastrucure", "infrastructure", - "inintelligent", "unintelligent", - "ininteresting", "uninteresting", - "initalisation", "initialisation", - "initalization", "initialization", - "inperfections", "imperfections", - "inpersonating", "impersonating", - "inpossibility", "impossibility", - "inpredictable", "unpredictable", - "inresponsible", "irresponsible", - "insectiverous", "insectivorous", - "insecuritites", "insecurities", - "insiginficant", "insignificant", - "insiginifcant", "insignificant", - "insignificent", "insignificant", - "insignificunt", "insignificant", - "insignifigant", "insignificant", - "insiprational", "inspirational", - "insperational", "inspirational", - "inspiritional", "inspirational", - "inspriational", "inspirational", - "instantaenous", "instantaneous", - "instantanious", "instantaneous", - "instanteneous", "instantaneous", - "instantenious", "instantaneous", - "instincitvely", "instinctively", - "instinctivley", "instinctively", - "instititional", "institutional", - "institutionel", "institutional", - "insturmentals", "instrumental", - "instutitional", "institutional", - "insustainable", "unsustainable", - "intelelctuals", "intellectuals", - "intellectualy", "intellectually", - "intellectuels", "intellectuals", - "intellecutals", "intellectuals", - "intellegently", "intelligently", - "intelluctuals", "intellectuals", - "intepretation", "interpretation", - "intereactions", "intersections", - "interesctions", "intersections", - "interlectuals", "intellectuals", - "intermittient", "intermittent", - "intermittment", "intermittent", - "internacional", "international", - "interpersonel", "interpersonal", - "interpresonal", "interpersonal", - "interpretaion", "interpretation", - "interpretarea", "interpreter", - "interpretarem", "interpreter", - "interpretares", "interpreter", - "interpretarse", "interpreter", - "interpretarte", "interpreter", - "interpretatin", "interpretations", - "interpreteert", "interpreter", - "interragation", "interrogation", - "interregation", "interrogation", - "interrigation", "interrogation", - "interrogacion", "interrogation", - "interrogativo", "interrogation", - "intertainment", "entertainment", - "intillectuals", "intellectuals", - "intraspection", "introspection", - "intrensically", "intrinsically", - "intriniscally", "intrinsically", - "intrinsecally", "intrinsically", - "intrisincally", "intrinsically", - "intristically", "intrinsically", - "introductiory", "introductory", - "introspeccion", "introspection", - "introspectivo", "introspection", - "introspektion", "introspection", - "invertibrates", "invertebrates", - "invesitgation", "investigation", - "invesitgative", "investigative", - "invesitgators", "investigators", - "investagators", "investigators", - "investegating", "investigating", - "investegators", "investigators", - "investiagtion", "investigation", - "investiagtive", "investigative", - "investigacion", "investigation", - "investigaiton", "investigations", - "investigaters", "investigators", - "investigativo", "investigation", - "investigatons", "investigations", - "investigsting", "investigating", - "investigstion", "investigations", - "investogators", "investigators", - "invisibillity", "invisibility", - "involuntarely", "involuntary", - "involuntarity", "involuntary", - "invulnerabile", "invulnerable", - "irrationallly", "irrationally", - "irresponcible", "irresponsible", - "irresponisble", "irresponsible", - "irresponsable", "irresponsible", - "irresponsbile", "irresponsible", - "irreversiable", "irreversible", - "irreversibelt", "irreversible", - "irreversibile", "irreversible", - "irrisponsible", "irresponsible", - "jacksonvillle", "jacksonville", - "journalisitic", "journalistic", - "journalistens", "journalists", - "journalisters", "journalists", - "journalistisk", "journalists", - "jsutification", "justifications", - "jurisdicitons", "jurisdictions", - "jurisidctions", "jurisdictions", - "juristictions", "jurisdictions", - "jursidictions", "jurisdictions", - "jusitfication", "justifications", - "justifiaction", "justifications", - "justificacion", "justification", - "justificaiton", "justification", - "justificativo", "justification", - "justificatons", "justifications", - "justificstion", "justifications", - "justiifcation", "justifications", - "karbohydrates", "carbohydrates", - "knoweldgeable", "knowledgeable", - "knowledegable", "knowledgeable", - "knowledgebale", "knowledgable", - "knowlegdeable", "knowledgeable", - "kollaboration", "collaboration", - "koncentration", "concentration", - "konfiguration", "configuration", - "konfrontation", "confrontation", - "konservatives", "conservatives", - "konstellation", "constellation", - "kontamination", "contamination", - "legitimatelly", "legitimately", - "libertariaism", "libertarianism", - "libertariansm", "libertarianism", - "libitarianisn", "libertarianism", - "lighthearthed", "lighthearted", - "mainfestation", "manifestation", - "manafacturers", "manufacturers", - "manafacturing", "manufacturing", - "manafestation", "manifestation", - "manefestation", "manifestation", - "manfuacturers", "manufactures", - "manifacturers", "manufacturers", - "manifacturing", "manufacturing", - "manifastation", "manifestation", - "manifestacion", "manifestation", - "manifestating", "manifestation", - "manifistation", "manifestation", - "manipulationg", "manipulating", - "manufacterers", "manufacturers", - "manufactering", "manufacturing", - "manufacterurs", "manufactures", - "manufactorers", "manufacturers", - "manufactoring", "manufacturing", - "manufactuered", "manufactured", - "manufactuerer", "manufacturer", - "manufactueres", "manufactures", - "manufacturedd", "manufactured", - "manufactureds", "manufactures", - "manufacturerd", "manufactured", - "manufacturier", "manufacturer", - "manufacturors", "manufacturers", - "manufactuters", "manufactures", - "manufacutrers", "manufactures", - "manufcaturers", "manufactures", - "marshmalllows", "marshmallows", - "massachsuetts", "massachusetts", - "massachucetts", "massachusetts", - "massachuestts", "massachusetts", - "massachusents", "massachusetts", - "massachusites", "massachusetts", - "massachussets", "massachusetts", - "massechusetts", "massachusetts", - "masturbateing", "masturbating", - "materialisimo", "materialism", - "mathamatician", "mathematician", - "mathametician", "mathematician", - "mathematicals", "mathematics", - "mathematicaly", "mathematically", - "mathematicans", "mathematics", - "mathematicion", "mathematician", - "mathematitian", "mathematician", - "mathemetician", "mathematician", - "mathmatically", "mathematically", - "mathmaticians", "mathematicians", - "mechanicallly", "mechanically", - "medeterranean", "mediterranean", - "meditarrenean", "mediterranean", - "meditereanean", "mediterranean", - "membranaphone", "membranophone", - "metamorphysis", "metamorphosis", - "metaphoricaly", "metaphorically", - "metaphoricial", "metaphorical", - "metaphysicals", "metaphysics", - "metaphysicans", "metaphysics", - "methamatician", "mathematician", - "methematician", "mathematician", - "metropolitain", "metropolitan", - "metropolitcan", "metropolitan", - "metropolitian", "metropolitan", - "millionairres", "millionaire", - "minneapolites", "minneapolis", - "misanderstood", "misunderstood", - "miscellanious", "miscellaneous", - "misconcpetion", "misconceptions", - "misconecption", "misconceptions", - "misinterperet", "misinterpret", - "misinterprate", "misinterpret", - "misinterprent", "misinterpret", - "misinterprted", "misinterpret", - "misogynisitic", "misogynistic", - "misrepreseted", "misrepresented", - "misunterstood", "misunderstood", - "modificaitons", "modifications", - "motivationals", "motivations", - "motivationnal", "motivational", - "mulitnational", "multinational", - "multimational", "multinational", - "multiplicaton", "multiplication", - "muncipalities", "municipalities", - "munnicipality", "municipality", - "mutlinational", "multinational", - "nacionalistic", "nationalistic", - "narcissisitic", "narcissistic", - "narcississtic", "narcissistic", - "natioanlistic", "nationalistic", - "nationalisitc", "nationalistic", - "nationalistes", "nationalists", - "nationalsitic", "nationalistic", - "neigbhourhood", "neighbourhood", - "neighboorhoud", "neighbourhood", - "neighborehood", "neighbourhood", - "neighborhoood", "neighborhoods", - "neighbourbood", "neighbourhood", - "neighbourgood", "neighbourhood", - "neighbourhoud", "neighbourhood", - "neighourhoods", "neighborhoods", - "nieghborhoods", "neighborhoods", - "nieghbourhood", "neighbourhood", - "noncombatents", "noncombatants", - "noninitalized", "noninitialized", - "northwestener", "northwestern", - "notificaitons", "notifications", - "occassionally", "occasionally", - "operationable", "operational", - "oppertunities", "opportunities", - "opprotunities", "opportunities", - "oppurtunities", "opportunities", - "opthamologist", "ophthalmologist", - "organistaions", "organisations", - "organizatinal", "organizational", - "organizativos", "organizations", - "organsiations", "organisations", - "organziations", "organizations", - "orginasations", "organisations", - "orginazations", "organizations", - "overpopulaton", "overpopulation", - "overreactiong", "overreacting", - "overshaddowed", "overshadowed", - "overwheliming", "overwhelming", - "overwhelmigly", "overwhelmingly", - "overwhelmingy", "overwhelmingly", - "overwhelminly", "overwhelmingly", - "palestininans", "palestinians", - "paraphraseing", "paraphrasing", - "paraphrashing", "paraphrasing", - "parilamentary", "parliamentary", - "parlaimentary", "parliamentary", - "parliamantary", "parliamentary", - "parliamentery", "parliamentary", - "parliamnetary", "parliamentary", - "parliementary", "parliamentary", - "particiaption", "participation", - "participacion", "participation", - "participantes", "participants", - "participativo", "participation", - "particularely", "particularly", - "particularily", "particularly", - "particularlly", "particularly", - "partizipation", "participation", - "passionatelly", "passionately", - "paychiatrists", "psychiatrists", - "paychologists", "psychologists", - "pennsylvainia", "pennsylvania", - "pennsylvanica", "pennsylvania", - "pennsylvannia", "pennsylvania", - "perdominantly", "predominantly", - "perpandicular", "perpendicular", - "perpendicualr", "perpendicular", - "perpenticular", "perpendicular", - "perpetuationg", "perpetuating", - "perpindicular", "perpendicular", - "personalitits", "personalities", - "pessimistisch", "pessimistic", - "pharmaceutial", "pharmaceutical", - "philisophical", "philosophical", - "philosiphical", "philosophical", - "philosohpical", "philosophical", - "philosophycal", "philosophically", - "philospohical", "philosophical", - "phisiological", "physiological", - "photagraphers", "photographers", - "photographics", "photographs", - "photographied", "photographed", - "photographier", "photographer", - "photograpphed", "photographed", - "photogrophers", "photographers", - "photogrpahers", "photographers", - "photoshoppade", "photoshopped", - "photoshoppped", "photoshopped", - "phsyiological", "physiological", - "phychiatrists", "psychiatrists", - "phychological", "psychological", - "phychologists", "psychologists", - "phylosophical", "philosophical", - "physciatrists", "psychiatrists", - "physcological", "psychological", - "physcologists", "psychologists", - "physioligical", "physiological", - "planeswlakers", "planeswalker", - "plansewalkers", "planeswalker", - "playthroughts", "playthroughs", - "polysaccaride", "polysaccharide", - "practicallity", "practically", - "practicioners", "practitioners", - "practisioners", "practitioners", - "practitioneer", "practitioners", - "practitionner", "practitioner", - "pratictioners", "practitioners", - "preconceieved", "preconceived", - "predecessores", "predecessors", - "predetermiend", "predetermined", - "predetirmined", "predetermined", - "preditermined", "predetermined", - "predomenantly", "predominantly", - "predominently", "predominantly", - "pregressively", "progressively", - "preinitalized", "preinitialized", - "preinitalizes", "preinitializes", - "preliferation", "proliferation", - "prependicular", "perpendicular", - "preposterious", "preposterous", - "prerequisties", "prerequisite", - "prerequistite", "prerequisite", - "prescribtions", "prescriptions", - "presumptuious", "presumptuous", - "pretedermined", "predetermined", - "problematisch", "problematic", - "proclaimation", "proclamation", - "prodominantly", "predominantly", - "professionnal", "professional", - "profitiablity", "profitability", - "profitibality", "profitability", - "progressivily", "progressively", - "progressivley", "progressively", - "prononciation", "pronunciation", - "pronouciation", "pronunciation", - "pronunciacion", "pronunciation", - "pronunciating", "pronunciation", - "pronuncuation", "pronunciation", - "pronunication", "pronunciation", - "pronuntiation", "pronunciation", - "propabilities", "probabilities", - "proportionaly", "proportionally", - "proportionnal", "proportional", - "proseletyzing", "proselytizing", - "protagonistas", "protagonists", - "protagonistes", "protagonists", - "protestantisk", "protestants", - "protruberance", "protuberance", - "provocativley", "provocative", - "pscyhiatrists", "psychiatrists", - "pscyhological", "psychological", - "pscyhologists", "psychologists", - "pshycological", "psychological", - "pshycologists", "psychologists", - "psichological", "psychological", - "psychaitrists", "psychiatrists", - "psychedellics", "psychedelics", - "psychiatrisch", "psychiatric", - "psycholigical", "psychological", - "psycholigists", "psychologists", - "psychologycal", "psychologically", - "psychologysts", "psychologists", - "psychyatrists", "psychiatrists", - "psysiological", "physiological", - "purpendicular", "perpendicular", - "pyschiatrists", "psychiatrists", - "pyschological", "psychological", - "pyschologists", "psychologists", - "qaulification", "qualification", - "qualifiaction", "qualification", - "qualificaiton", "qualifications", - "qualificatons", "qualifications", - "qualifikation", "qualification", - "questionalble", "questionable", - "quinessential", "quintessential", - "ramificaitons", "ramifications", - "realisitcally", "realistically", - "realtionships", "relationships", - "reccommending", "recommending", - "receptionnist", "receptionist", - "receptionsist", "receptionist", - "reconaissance", "reconnaissance", - "reconcilation", "reconciliation", - "reconnaisance", "reconnaissance", - "reconstrucion", "reconstruction", - "recreationnal", "recreational", - "rectangulaire", "rectangular", - "redistribuito", "redistribution", - "redistributin", "redistribution", - "reencarnation", "reincarnation", - "refridgerator", "refrigerator", - "rehabilitaion", "rehabilitation", - "rehabilitatin", "rehabilitation", - "rehabilitaton", "rehabilitation", - "reincarantion", "reincarnation", - "reincatnation", "reincarnation", - "reinforcemens", "reinforcements", - "reinforcemnts", "reinforcements", - "reinitalising", "reinitialising", - "reinitalizing", "reinitializing", - "reinkarnation", "reincarnation", - "reinstallling", "reinstalling", - "reintarnation", "reincarnation", - "relationshits", "relationships", - "relationsship", "relationships", - "relatiopnship", "relationship", - "relentlessely", "relentlessly", - "relentlessley", "relentlessly", - "relinqushment", "relinquishment", - "remifications", "ramifications", - "reprehenisble", "reprehensible", - "reprehensable", "reprehensible", - "reprehinsible", "reprehensible", - "represenation", "representation", - "represensible", "reprehensible", - "representaion", "representation", - "representatie", "representatives", - "representatin", "representations", - "representerad", "represented", - "representitve", "representative", - "representives", "representatives", - "repricussions", "repercussions", - "reprihensible", "reprehensible", - "resolutionary", "revolutionary", - "respectivelly", "respectively", - "responsibiliy", "responsibility", - "responsibilty", "responsibility", - "responsiblity", "responsibility", - "respositories", "repositories", - "resssurecting", "resurrecting", - "ressurrection", "resurrection", - "restaraunteur", "restaurateur", - "retoractively", "retroactively", - "retroactivily", "retroactively", - "retroactivley", "retroactively", - "retrocatively", "retroactively", - "revelutionary", "revolutionary", - "revolutionair", "revolutionary", - "revolutionens", "revolutions", - "revolutioners", "revolutions", - "revoultionary", "revolutionary", - "ridiculouness", "ridiculousness", - "rienforcement", "reinforcements", - "righetousness", "righteousness", - "rightiousness", "righteousness", - "rigtheousness", "righteousness", - "rollarcoaster", "rollercoaster", - "rollercaoster", "rollercoaster", - "rollercoaters", "rollercoaster", - "rollercoatser", "rollercoaster", - "rollerocaster", "rollercoaster", - "rollertoaster", "rollercoaster", - "rollorcoaster", "rollercoaster", - "sacrastically", "sarcastically", - "sarcasitcally", "sarcastically", - "satisfactorly", "satisfactory", - "scandianvians", "scandinavian", - "scateboarding", "skateboarding", - "schisophrenic", "schizophrenic", - "schiziphrenic", "schizophrenic", - "schizophernia", "schizophrenia", - "schizophernic", "schizophrenic", - "schizophrania", "schizophrenia", - "schizoprhenia", "schizophrenia", - "schizoprhenic", "schizophrenic", - "schozophrenia", "schizophrenia", - "schozophrenic", "schizophrenic", - "schyzophrenia", "schizophrenia", - "schyzophrenic", "schizophrenic", - "schziophrenia", "schizophrenia", - "schziophrenic", "schizophrenic", - "scientificaly", "scientifically", - "scientificlly", "scientifically", - "segementation", "segmentation", - "sensationable", "sensational", - "sensationails", "sensationalism", - "sensationaism", "sensationalism", - "sensationalim", "sensationalism", - "sensationella", "sensational", - "shcizophrenic", "schizophrenic", - "significanlty", "significantly", - "significently", "significantly", - "signifigantly", "significantly", - "simultaneosly", "simultaneously", - "simultaneuous", "simultaneous", - "simultanously", "simultaneously", - "singificantly", "significantly", - "skatebaording", "skateboarding", - "skateborading", "skateboarding", - "socioecenomic", "socioeconomic", - "socioecomonic", "socioeconomic", - "socioeconimic", "socioeconomic", - "sohpisticated", "sophisticated", - "sophisitcated", "sophisticated", - "sophistacated", "sophisticated", - "sophistocated", "sophisticated", - "sophosticated", "sophisticated", - "spacification", "specification", - "specializaton", "specialization", - "specificaiton", "specifications", - "specificatons", "specifications", - "specifikation", "specification", - "spectaculaire", "spectacular", - "spectaculalry", "spectacularly", - "spectatularly", "spectacularly", - "spesification", "specification", - "spirituallity", "spiritually", - "sponatenously", "spontaneously", - "spontaenously", "spontaneously", - "spontainously", "spontaneously", - "spontaneoulsy", "spontaneously", - "spontaneuosly", "spontaneously", - "spontaniously", "spontaneously", - "spontanuously", "spontaneously", - "sponteanously", "spontaneously", - "sponteneously", "spontaneously", - "sporstmanship", "sportsmanship", - "sportmansship", "sportsmanship", - "sportsmamship", "sportsmanship", - "sportsmenship", "sportsmanship", - "sprotsmanship", "sportsmanship", - "stakeboarding", "skateboarding", - "startegically", "strategically", - "statisitcally", "statistically", - "statistacally", "statistically", - "stereotipical", "stereotypical", - "stereotpyical", "stereotypical", - "stereotypcial", "stereotypical", - "stereotypeing", "stereotyping", - "stereotypying", "stereotyping", - "steriotypical", "stereotypical", - "steroetypical", "stereotypical", - "steryotypical", "stereotypical", - "storytellling", "storytelling", - "stragegically", "strategically", - "stragetically", "strategically", - "straightenend", "straightened", - "straitforward", "straightforward", - "stratagically", "strategically", - "stratigically", "strategically", - "strawberrries", "strawberries", - "stregnthening", "strengthening", - "strenghtening", "strengthening", - "strengthining", "strengthening", - "stretegically", "strategically", - "subcatagories", "subcategories", - "subconsciosly", "subconsciously", - "subconsciouly", "subconsciously", - "subconsiously", "subconsciously", - "subjectivelly", "subjectively", - "subscribtions", "subscriptions", - "substancially", "substantially", - "substanitally", "substantially", - "substansially", "substantially", - "substantiable", "substantial", - "substantually", "substantially", - "substitutents", "substitutes", - "successfullly", "successfully", - "supermarkedet", "supermarket", - "supermarkerts", "supermarkets", - "superpowereds", "superpowers", - "supersticious", "superstitious", - "superstisious", "superstitious", - "superstitiosi", "superstitious", - "superstitiuos", "superstitious", - "superstituous", "superstitious", - "suphisticated", "sophisticated", - "supscriptions", "subscriptions", - "surreptiously", "surreptitiously", - "survavibility", "survivability", - "survibability", "survivability", - "survivabiltiy", "survivability", - "survivalibity", "survivability", - "survivavility", "survivability", - "survivebility", "survivability", - "susbtantially", "substantially", - "sustainabilty", "sustainability", - "synchornously", "synchronously", - "systematicaly", "systematically", - "systematiclly", "systematically", - "techmological", "technological", - "technicallity", "technically", - "technoligical", "technological", - "technologicly", "technological", - "techonlogical", "technological", - "telaportation", "teleportation", - "teleportating", "teleportation", - "teleprotation", "teleportation", - "teliportation", "teleportation", - "teloportation", "teleportation", - "territoriella", "territorial", - "theoratically", "theoretically", - "theoritically", "theoretically", - "therapeutisch", "therapeutic", - "thereotically", "theoretically", - "thermodynaics", "thermodynamics", - "thermodynamcs", "thermodynamics", - "theroetically", "theoretically", - "thoeretically", "theoretically", - "tranistioning", "transitioning", - "transcendance", "transcendence", - "transcribtion", "transcription", - "transcripcion", "transcription", - "transferrring", "transferring", - "transformarea", "transformer", - "transformarem", "transformer", - "transformarse", "transformers", - "transformaton", "transformation", - "transformered", "transformed", - "transgengered", "transgendered", - "transisioning", "transitioning", - "transitionals", "transitions", - "transitionnal", "transitional", - "transitionned", "transitioned", - "transkription", "transcription", - "translyvanian", "transylvania", - "transmisisons", "transmissions", - "transmissable", "transmissible", - "transmisssion", "transmissions", - "transparantie", "transparent", - "transparentcy", "transparency", - "transplantees", "transplants", - "transporation", "transportation", - "transportaion", "transportation", - "transportarme", "transporter", - "transportarse", "transporter", - "transportarte", "transporter", - "transporteurs", "transporter", - "transsexuella", "transsexual", - "transylvannia", "transylvania", - "trasngendered", "transgendered", - "troubleshooot", "troubleshoot", - "udnerestimate", "underestimated", - "umcomfortable", "uncomfortable", - "umcomfortably", "uncomfortably", - "umpredictable", "unpredictable", - "unappropriate", "inappropriate", - "unbelievabley", "unbelievably", - "unbelievablly", "unbelievably", - "uncertaintity", "uncertainty", - "uncomfertable", "uncomfortable", - "uncomfertably", "uncomfortably", - "uncomfortabel", "uncomfortably", - "uncomforyable", "uncomfortably", - "uncomfrotable", "uncomfortable", - "uncomfrotably", "uncomfortably", - "uncomftorable", "uncomfortable", - "uncomftorably", "uncomfortably", - "unconcsiously", "unconsciously", - "unconfortable", "uncomfortable", - "unconfortably", "uncomfortably", - "unconscioulsy", "unconsciously", - "unconsicously", "unconsciously", - "unconsiderate", "inconsiderate", - "uncontrollabe", "uncontrollable", - "uncontrollaby", "uncontrollably", - "unconventinal", "unconventional", - "uncounciously", "unconsciously", - "uncousciously", "unconsciously", - "underastimate", "underestimate", - "underesitmate", "underestimated", - "underestamate", "underestimate", - "underestemate", "underestimate", - "underestiamte", "underestimated", - "undergratuate", "undergraduate", - "underhwelming", "underwhelming", - "underhwleming", "underwhelming", - "underminining", "undermining", - "underpowererd", "underpowered", - "undersetimate", "underestimate", - "understandble", "understandable", - "understandbly", "understandably", - "underwealming", "underwhelming", - "underwhemling", "underwhelming", - "underwhleming", "underwhelming", - "undoctrinated", "indoctrinated", - "unexpectadely", "unexpectedly", - "unfomfortable", "uncomfortable", - "unforgiveable", "unforgivable", - "unfortuantely", "unfortunately", - "unfortunantly", "unfortunately", - "unfortunatley", "unfortunately", - "unfortuneatly", "unfortunately", - "unfortunetely", "unfortunately", - "unilaterallly", "unilaterally", - "uninstallling", "uninstalling", - "unintellegent", "unintelligent", - "unintelligant", "unintelligent", - "unintensional", "unintentional", - "uninteristing", "uninteresting", - "universitites", "universities", - "unnecassarily", "unnecessarily", - "unneccesarily", "unnecessarily", - "unnecessairly", "unnecessarily", - "unnecessarely", "unnecessarily", - "unnecessarity", "unnecessarily", - "unnecesserily", "unnecessarily", - "unnecissarily", "unnecessarily", - "unnessecarily", "unnecessarily", - "unoperational", "nonoperational", - "unprecendeted", "unprecedented", - "unprecidented", "unprecedented", - "unpredecented", "unprecedented", - "unpredicatble", "unpredictable", - "unpredictible", "unpredictable", - "unpresedented", "unprecedented", - "unpridictable", "unpredictable", - "unprofessinal", "unprofessional", - "unrealistisch", "unrealistic", - "unreasonabley", "unreasonably", - "unreasonablly", "unreasonably", - "unrestrictred", "unrestricted", - "unsistainable", "unsustainable", - "unsubscribade", "unsubscribed", - "unsubscribbed", "unsubscribe", - "unsuccesfully", "unsuccessfully", - "unsuccessfull", "unsuccessful", - "unsucessfully", "unsuccessfully", - "unsuprisingly", "unsurprisingly", - "unsuprizingly", "unsurprisingly", - "unsustainible", "unsustainable", - "unsustianable", "unsustainable", - "vertification", "certification", - "villification", "vilification", - "virualization", "visualization", - "visualizacion", "visualization", - "visualizaiton", "visualization", - "visualizating", "visualization", - "vitualization", "visualization", - "vizualization", "visualization", - "volounteering", "volunteering", - "vulberability", "vulnerability", - "vulernability", "vulnerability", - "vulnarability", "vulnerability", - "vulnerabiltiy", "vulnerability", - "vulnurability", "vulnerability", - "vunlerability", "vulnerability", - "vurnerability", "vulnerability", - "weightlfiting", "weightlifting", - "weightlifitng", "weightlifting", - "weightligting", "weightlifting", - "weigthlifting", "weightlifting", - "wholeheartdly", "wholeheartedly", - "wholeheartedy", "wholeheartedly", - "wholeheartely", "wholeheartedly", - "wieghtlifting", "weightlifting", - "abberivation", "abbreviation", - "abberviation", "abbreviation", - "abbreivation", "abbreviation", - "abbreveation", "abbreviation", - "abbrievation", "abbreviation", - "abortificant", "abortifacient", - "abrreviation", "abbreviation", - "academcially", "academically", - "accedentally", "accidentally", - "accelarating", "accelerating", - "accelaration", "acceleration", - "acceleartion", "acceleration", - "acceleraptor", "accelerator", - "accelorating", "accelerating", - "accessibilty", "accessibility", - "accidentlaly", "accidently", - "accomadating", "accommodating", - "accomadation", "accommodation", - "accomodating", "accommodating", - "accomodation", "accommodation", - "accrediation", "accreditation", - "acculumation", "accumulation", - "accumalation", "accumulation", - "accumilation", "accumulation", - "acedemically", "academically", - "acheivements", "achievements", - "acknolwedged", "acknowledged", - "acknolwedges", "acknowledges", - "acknoweldged", "acknowledged", - "acknoweldges", "acknowledges", - "acknowiedged", "acknowledged", - "acknowladges", "acknowledges", - "acknowldeged", "acknowledged", - "acknowledget", "acknowledgement", - "acknowleding", "acknowledging", - "acknowlegded", "acknowledged", - "acknowlegdes", "acknowledges", - "ackumulation", "accumulation", - "acquaintaces", "acquaintances", - "acquaintence", "acquaintance", - "acquantaince", "acquaintance", - "acquantiance", "acquaintances", - "acquiantance", "acquaintances", - "acquiantence", "acquaintance", - "adknowledged", "acknowledged", - "adknowledges", "acknowledges", - "administored", "administer", - "adminsitered", "administered", - "adminstrator", "administrator", - "advantagious", "advantageous", - "advantegeous", "advantageous", - "adventageous", "advantageous", - "adventureous", "adventures", - "adventureres", "adventures", - "adventurious", "adventurous", - "adventuruous", "adventurous", - "advertisiers", "advertisers", - "advertisment", "advertisement", - "advertisters", "advertisers", - "advertisting", "advertising", - "aestheticaly", "aesthetically", - "aestheticlly", "aesthetically", - "afficianados", "aficionados", - "afficionados", "aficionados", - "afghanisthan", "afghanistan", - "afterhtought", "afterthought", - "afterthougth", "afterthought", - "aggressivley", "aggressively", - "agircultural", "agricultural", - "agknowledged", "acknowledged", - "agnosticisim", "agnosticism", - "agracultural", "agricultural", - "agriculteral", "agricultural", - "agriculteurs", "agriculture", - "agricultrual", "agricultural", - "agriculutral", "agricultural", - "agrigultural", "agricultural", - "agrocultural", "agricultural", - "allegiancies", "allegiance", - "alterantives", "alternatives", - "alternatevly", "alternately", - "alternatiely", "alternately", - "alternatieve", "alternative", - "alternativly", "alternatively", - "alternativos", "alternatives", - "alternatvely", "alternately", - "alternitives", "alternatives", - "altruistisch", "altruistic", - "amendmenters", "amendments", - "amohetamines", "amphetamines", - "ampehtamines", "amphetamines", - "ampethamines", "amphetamines", - "amphatamines", "amphetamines", - "amphedamines", "amphetamines", - "amphetamenes", "amphetamines", - "amphetemines", "amphetamines", - "amphetimines", "amphetamines", - "amphetmaines", "amphetamines", - "anecdotallly", "anecdotally", - "annhiliation", "annihilation", - "annihalition", "annihilation", - "annihilatron", "annihilation", - "annihliation", "annihilation", - "annilihation", "annihilation", - "anniversairy", "anniversary", - "anniversarry", "anniversary", - "anniversiary", "anniversary", - "annoucenment", "announcements", - "annoucnement", "announcement", - "announcemnet", "announcements", - "announcemnts", "announcements", - "anphetamines", "amphetamines", - "ansalisation", "nasalisation", - "ansalization", "nasalization", - "antaganistic", "antagonistic", - "antagonisitc", "antagonistic", - "antagonostic", "antagonist", - "antibioticos", "antibiotics", - "anticiaption", "anticipation", - "anticipacion", "anticipation", - "antisipation", "anticipation", - "antogonistic", "antagonistic", - "antrhopology", "anthropology", - "antrophology", "anthropology", - "apllications", "applications", - "apocalypitic", "apocalyptic", - "apologistics", "apologists", - "apologizeing", "apologizing", - "apostrophied", "apostrophe", - "apostrophies", "apostrophe", - "apperciation", "appreciation", - "applicaitons", "applications", - "appoitnments", "appointments", - "apporachable", "approachable", - "appraochable", "approachable", - "appreceating", "appreciating", - "appreciaters", "appreciates", - "appreciatied", "appreciative", - "appreicating", "appreciating", - "appreication", "appreciation", - "appretiation", "appreciation", - "appropriatin", "appropriation", - "appropriatly", "appropriately", - "appropriaton", "appropriation", - "approprietly", "appropriately", - "approstraphe", "apostrophe", - "approxiately", "approximately", - "approximatly", "approximately", - "approximetly", "approximately", - "aproximately", "approximately", - "aqcuaintance", "acquaintance", - "aqquaintance", "acquaintance", - "arbitrariliy", "arbitrarily", - "arbitrarilly", "arbitrarily", - "archetecture", "architecture", - "architechure", "architecture", - "architectual", "architectural", - "architectuur", "architecture", - "architecutre", "architecture", - "architexture", "architecture", - "arcitechture", "architecture", - "areodynamics", "aerodynamics", - "argicultural", "agricultural", - "argumentatie", "argumentative", - "arithmetisch", "arithmetic", - "armageddomon", "armageddon", - "arrengements", "arrangements", - "articifially", "artificially", - "artificailly", "artificially", - "artificiella", "artificial", - "artificually", "artificially", - "artifiically", "artificially", - "assasination", "assassination", - "assassinatin", "assassination", - "assissinated", "assassinated", - "associationg", "associating", - "assoications", "associations", - "assosiations", "associations", - "assosication", "assassination", - "assotiations", "associations", - "assymetrical", "asymmetrical", - "asthetically", "aesthetically", - "astranomical", "astronomical", - "astromonical", "astronomical", - "astronautlis", "astronauts", - "astronimical", "astronomical", - "astronomicly", "astronomical", - "athleticisim", "athleticism", - "atmosphereic", "atmospheric", - "audiobookmrs", "audiobooks", - "auhtenticate", "authenticate", - "australianas", "australians", - "australianos", "australians", - "authentisity", "authenticity", - "authorithies", "authorities", - "authoritiers", "authorities", - "authorizaton", "authorization", - "authrorities", "authorities", - "autochtonous", "autochthonous", - "autocorrrect", "autocorrect", - "automobilies", "automobile", - "automodertor", "automoderator", - "automonomous", "autonomous", - "auxilliaries", "auxiliaries", - "avaliability", "availability", - "avialability", "availability", - "awknowledged", "acknowledged", - "awknowledges", "acknowledges", - "awkwardsness", "awkwardness", - "babysittting", "babysitting", - "beaurocratic", "bureaucratic", - "beautifullly", "beautifully", - "belligerante", "belligerent", - "beuraucratic", "bureaucratic", - "billionairre", "billionaire", - "billionaries", "billionaires", - "billioniares", "billionaires", - "bioligically", "biologically", - "birmingharam", "birmingham", - "bittersweeet", "bittersweet", - "blamethrower", "flamethrower", - "blueberrries", "blueberries", - "blueprintcss", "blueprints", - "boardcasting", "broadcasting", - "bobybuilding", "bodybuilding", - "bodybuidling", "bodybuilding", - "bodybuilidng", "bodybuilding", - "bodybuliding", "bodybuilding", - "bodydbuilder", "bodybuilder", - "bombardement", "bombardment", - "boradcasting", "broadcasting", - "botivational", "motivational", - "brainwahsing", "brainwashing", - "brakethrough", "breakthrough", - "braodcasting", "broadcasting", - "brazilianese", "brazilians", - "brazilianess", "brazilians", - "breakthorugh", "breakthrough", - "breaktrhough", "breakthrough", - "breastfeedig", "breastfeeding", - "breastfeeing", "breastfeeding", - "breasttaking", "breathtaking", - "brianwashing", "brainwashing", - "broadcastors", "broadcasts", - "brotherhoood", "brotherhood", - "buearucratic", "bureaucratic", - "bueraucratic", "bureaucratic", - "bulletprooof", "bulletproof", - "bureaocratic", "bureaucratic", - "bureaucracie", "bureaucratic", - "bureaucracts", "bureaucrats", - "bureaucrates", "bureaucrats", - "bureuacratic", "bureaucratic", - "businessemen", "businessmen", - "cababilities", "capabilities", - "caclulations", "calculations", - "calcluations", "calculation", - "calcualtions", "calculations", - "calculationg", "calculating", - "calculatoare", "calculator", - "californains", "californian", - "californican", "californian", - "californinan", "californian", - "caluclations", "calculations", - "camouflagued", "camouflage", - "canceltation", "cancellation", - "cannibalisim", "cannibalism", - "canniballism", "cannibalism", - "cannotations", "connotations", - "capitalistes", "capitalists", - "caracterized", "characterized", - "carbohydrats", "carbohydrates", - "carbohyrdate", "carbohydrates", - "caricaturale", "caricature", - "caricaturile", "caricature", - "caricaturise", "caricature", - "caricaturize", "caricature", - "catastraphic", "catastrophic", - "catastrohpic", "catastrophic", - "catastrophie", "catastrophe", - "categoricaly", "categorically", - "categoriezed", "categorized", - "catergorized", "categorized", - "caterpillers", "caterpillars", - "catestrophic", "catastrophic", - "catholicisim", "catholicism", - "catholocisim", "catholicism", - "catistrophic", "catastrophic", - "catostraphic", "catastrophic", - "catostrophic", "catastrophic", - "catterpilars", "caterpillars", - "catterpillar", "caterpillar", - "celebratings", "celebrations", - "celebritites", "celebrities", - "celibrations", "celebrations", - "cententenial", "centennial", - "cercumstance", "circumstance", - "cerification", "verification", - "certificiate", "certificate", - "challengeing", "challenging", - "chamiponship", "championships", - "champinoship", "championships", - "championchip", "championship", - "championsihp", "championships", - "championsips", "championships", - "champiosnhip", "championships", - "champoinship", "championship", - "chanpionship", "championship", - "charactarize", "characterize", - "charaterized", "characterized", - "charismastic", "charismatic", - "cheerlearder", "cheerleader", - "cheerleeders", "cheerleaders", - "cheeseberger", "cheeseburger", - "cheeseborger", "cheeseburger", - "cheesebruger", "cheeseburgers", - "cheeseburges", "cheeseburgers", - "cheeseburgie", "cheeseburger", - "cheezeburger", "cheeseburger", - "chirstianity", "christianity", - "chocolateers", "chocolates", - "chrisitanity", "christianity", - "christainity", "christianity", - "christiantiy", "christianity", - "christinaity", "christianity", - "chromosomers", "chromosomes", - "chronologial", "chronological", - "chrsitianity", "christianity", - "cilivization", "civilizations", - "circulatiing", "circulating", - "circulationg", "circulating", - "circumcisied", "circumcised", - "circumcition", "circumcision", - "circumsicion", "circumcision", - "circumsision", "circumcision", - "circumsition", "circumcision", - "circumsizion", "circumcision", - "circumstanes", "circumstance", - "circumstanta", "circumstantial", - "circumstante", "circumstance", - "circuncision", "circumcision", - "circunstance", "circumstance", - "civiliaztion", "civilizations", - "civilizacion", "civilization", - "civilizaiton", "civilization", - "civilizatoin", "civilizations", - "civilizatons", "civilizations", - "civilziation", "civilizations", - "civizilation", "civilizations", - "claculations", "calculations", - "classificato", "classification", - "cockroachers", "cockroaches", - "coefficienct", "coefficient", - "coencidental", "coincidental", - "coincedental", "coincidental", - "coincidencal", "coincidental", - "coincidentia", "coincidental", - "coindidental", "coincidental", - "coinsidental", "coincidental", - "cointerpoint", "counterpoint", - "collaberator", "collaborate", - "collaboratie", "collaborate", - "collaboratin", "collaboration", - "collectivily", "collectively", - "collectivley", "collectively", - "colonialisim", "colonialism", - "colonizacion", "colonization", - "colonizators", "colonizers", - "colonozation", "colonization", - "combanations", "combinations", - "combonations", "combinations", - "comdemnation", "condemnation", - "comemmorates", "commemorates", - "comemoretion", "commemoration", - "comeptitions", "competitions", - "comfirmation", "confirmation", - "comfortabley", "comfortably", - "comfortablly", "comfortably", - "comissioning", "commissioning", - "commandemnts", "commandment", - "commandmants", "commandments", - "commandmends", "commandments", - "commemmorate", "commemorate", - "commendments", "commandments", - "commenteries", "commenters", - "commenwealth", "commonwealth", - "commerciales", "commercials", - "commerically", "commercially", - "comminicated", "communicated", - "commishioned", "commissioned", - "commishioner", "commissioner", - "commisioning", "commissioning", - "commissionar", "commissioner", - "commissionor", "commissioner", - "committments", "commitments", - "commoditites", "commodities", - "commomwealth", "commonwealth", - "commonhealth", "commonwealth", - "commonweatlh", "commonwealth", - "commonwelath", "commonwealth", - "communciated", "communicated", - "communiation", "communication", - "communicatie", "communicate", - "communicatin", "communications", - "communicaton", "communication", - "communitites", "communities", - "compansating", "compensating", - "compansation", "compensation", - "comparativly", "comparatively", - "comparisions", "comparisons", - "comparission", "comparisons", - "comparissons", "comparisons", - "compatablity", "compatibility", - "compatibiliy", "compatibility", - "compatibilty", "compatibility", - "compatiblity", "compatibility", - "compensacion", "compensation", - "compensative", "compensate", - "compesitions", "compositions", - "competetions", "competitions", - "competitevly", "competitively", - "competitiion", "competition", - "competitiors", "competitors", - "competitivly", "competitively", - "competitivos", "competitions", - "compinsating", "compensating", - "compinsation", "compensation", - "complainging", "complaining", - "completetion", "completion", - "compliations", "compilation", - "complicacion", "complication", - "complicatied", "complicate", - "complicaties", "complicate", - "complicatred", "complicate", - "complicatted", "complicate", - "complilation", "complication", - "complimation", "complication", - "complimenary", "complimentary", - "complimentje", "complimented", - "complimentry", "complimentary", - "complination", "complication", - "complitation", "complication", - "composistion", "compositions", - "compramising", "compromising", - "compremising", "compromising", - "compresssion", "compression", - "compromissen", "compromise", - "compromisses", "compromises", - "compromizing", "compromising", - "compromosing", "compromising", - "comptability", "compatibility", - "compulsivley", "compulsive", - "compulsorary", "compulsory", - "computarized", "computerized", - "comrpomising", "compromising", - "comtaminated", "contaminated", - "comtemporary", "contemporary", - "conbinations", "combinations", - "concatinated", "contaminated", - "conceivabley", "conceivably", - "concellation", "cancellation", - "concentraded", "concentrated", - "concentraing", "concentrating", - "concentraion", "concentration", - "concentrarte", "concentrate", - "concentratie", "concentrate", - "concentratin", "concentration", - "concequences", "consequences", - "concequently", "consequently", - "concersation", "conservation", - "concervation", "conservation", - "concervatism", "conservatism", - "concervative", "conservative", - "conciderable", "considerable", - "conciderably", "considerably", - "conciousness", "consciousness", - "conclusiones", "conclusions", - "conclusivley", "conclusive", - "condamnation", "condemnation", - "condemantion", "condemnation", - "condenmation", "condemnation", - "condescening", "condescending", - "condescenion", "condescension", - "conditionnal", "conditional", - "conditionned", "conditioned", - "conditionner", "conditioner", - "condmenation", "condemnation", - "condolencies", "condolences", - "condolensces", "condolences", - "condomnation", "condemnation", - "condradicted", "contradicted", - "conenctivity", "connectivity", - "confedential", "confidential", - "confederancy", "confederacy", - "confederatie", "confederate", - "confermation", "confirmation", - "confersation", "conservation", - "confessionis", "confessions", - "confidencial", "confidential", - "confidentail", "confidential", - "confidentaly", "confidently", - "confidentely", "confidently", - "confidentiel", "confidential", - "configuratin", "configurations", - "configuraton", "configuration", - "confirmacion", "confirmation", - "confrimation", "confirmation", - "confrontaion", "confrontation", - "congegration", "congregation", - "congergation", "congregation", - "congradulate", "congratulate", - "congragation", "congregation", - "congragulate", "congratulate", - "congratualte", "congratulate", - "congregacion", "congregation", - "congresional", "congressional", - "congresssman", "congressman", - "congresssmen", "congressmen", - "congretation", "congregation", - "congrigation", "congregation", - "conicidental", "coincidental", - "connatations", "connotations", - "connecticuit", "connecticut", - "connectivety", "connectivity", - "connetations", "connotations", - "connitations", "connotations", - "connonations", "connotations", - "conolization", "colonization", - "conpensating", "compensating", - "conpensation", "compensation", - "conpetitions", "competitions", - "conplimented", "complimented", - "conpromising", "compromising", - "consciouness", "consciousness", - "consciouslly", "consciously", - "consectutive", "consecutive", - "consecuences", "consequences", - "consecuentes", "consequences", - "consecuently", "consequently", - "consensuarlo", "consensual", - "consentrated", "concentrated", - "consentrates", "concentrates", - "conseqeunces", "consequence", - "consequenses", "consequences", - "consequental", "consequently", - "consequneces", "consequence", - "conservacion", "conservation", - "conservaties", "conservatives", - "conservativo", "conservation", - "conservativs", "conservatism", - "conservitave", "conservatives", - "conservitism", "conservatism", - "conservitive", "conservative", - "considerarle", "considerable", - "considerarte", "considerate", - "consideraste", "considerate", - "consideratie", "considerate", - "consideratin", "considerations", - "consideribly", "considerably", - "consilidated", "consolidated", - "consipracies", "conspiracies", - "consiquently", "consequently", - "consistantly", "consistently", - "consistencey", "consistency", - "consistentcy", "consistently", - "consitutents", "constituents", - "consoldiated", "consolidated", - "consolitated", "consolidate", - "consolodated", "consolidated", - "consoltation", "consultation", - "conspericies", "conspiracies", - "conspiracize", "conspiracies", - "conspiriator", "conspirator", - "conspiricies", "conspiracies", - "conspriacies", "conspiracies", - "consqeuences", "consequence", - "constinually", "continually", - "constitition", "constitution", - "constituante", "constituents", - "constituants", "constituents", - "constituates", "constitutes", - "constitucion", "constitution", - "constituient", "constitute", - "constituinte", "constituents", - "constitutiei", "constitute", - "constitutues", "constitute", - "constiutents", "constituents", - "constracting", "constructing", - "constraction", "construction", - "constrainsts", "constraints", - "construccion", "construction", - "construciton", "construction", - "constructeds", "constructs", - "constructief", "constructive", - "constructies", "constructs", - "constructifs", "constructs", - "constructiin", "constructing", - "constructivo", "construction", - "consturction", "construction", - "consultating", "consultation", - "consumerisim", "consumerism", - "contaiminate", "contaminate", - "contaminatie", "contaminated", - "contaminaton", "contamination", - "contaminents", "containment", - "contamporary", "contemporary", - "contanimated", "contaminated", - "contaniments", "containment", - "contemperary", "contemporary", - "contemporany", "contemporary", - "continentais", "continents", - "continential", "continental", - "contineously", "continuously", - "continiously", "continuously", - "continuacion", "continuation", - "continuating", "continuation", - "continuativo", "continuation", - "continuining", "continuing", - "contirbution", "contribution", - "contirbutors", "contributors", - "contiunation", "continuation", - "contrabution", "contribution", - "contraceptie", "contraceptives", - "contradicing", "contradicting", - "contradicion", "contradiction", - "contradicory", "contradictory", - "contradictie", "contradicted", - "contradictin", "contradiction", - "contradicton", "contradiction", - "contraticted", "contradicted", - "contribucion", "contribution", - "contribuitor", "contributor", - "contributers", "contributors", - "contributivo", "contribution", - "contributons", "contributors", - "contrictions", "contractions", - "contridicted", "contradicted", - "controlleras", "controllers", - "controlllers", "controllers", - "controverial", "controversial", - "controveries", "controversies", - "controversal", "controversial", - "controversey", "controversy", - "contructions", "contractions", - "conveinently", "conveniently", - "convencional", "conventional", - "conveniantly", "conveniently", - "converastion", "conversations", - "converdation", "conservation", - "conversacion", "conversation", - "conversaiton", "conversations", - "conversatino", "conservation", - "conversatism", "conservatism", - "conversatoin", "conversations", - "conversiones", "conversions", - "converstaion", "conversation", - "convertables", "convertibles", - "convertiable", "convertible", - "convertibile", "convertible", - "convervation", "conservation", - "convervatism", "conservatism", - "converzation", "conservation", - "convesration", "conservation", - "convienently", "conveniently", - "convorsation", "conversation", - "convseration", "conservation", - "coordenation", "coordination", - "coordiantion", "coordination", - "coordinacion", "coordination", - "coordinaters", "coordinates", - "coordinatior", "coordinator", - "coordinatore", "coordinate", - "coordonation", "coordination", - "cooridnation", "coordination", - "coorperation", "cooperation", - "coprorations", "corporations", - "corinthianos", "corinthians", - "corinthinans", "corinthians", - "corparations", "corporations", - "corperations", "corporations", - "corporativos", "corporations", - "corproations", "corporations", - "corrdination", "coordination", - "correponding", "corresponding", - "correposding", "corresponding", - "correspondes", "corresponds", - "correspondig", "corresponding", - "corresponing", "corresponding", - "corrisponded", "corresponded", - "costomizable", "customizable", - "costumizable", "customizable", - "councidental", "coincidental", - "counsellling", "counselling", - "counterfiets", "counterfeit", - "counterfited", "counterfeit", - "counterracts", "counterparts", - "countertraps", "counterparts", - "countrywides", "countryside", - "coutnerparts", "counterparts", - "coutnerpoint", "counterpoint", - "covnersation", "conservation", - "crankenstein", "frankenstein", - "creationisim", "creationism", - "creationnism", "creationism", - "creationnist", "creationist", - "creationsism", "creationism", - "creationsist", "creationist", - "creationsits", "creationists", - "credibillity", "credibility", - "crigneworthy", "cringeworthy", - "cringewhorty", "cringeworthy", - "cringeworhty", "cringeworthy", - "cringewrothy", "cringeworthy", - "cringyworthy", "cringeworthy", - "criticallity", "critically", - "criticiszing", "criticising", - "croporations", "corporations", - "crucifiction", "crucifixion", - "cuestionable", "questionable", - "culiminating", "culminating", - "cumulatative", "cumulative", - "cuntaminated", "contaminated", - "curcumcision", "circumcision", - "curcumstance", "circumstance", - "custamizable", "customizable", - "custimizable", "customizable", - "customizaton", "customization", - "customizeble", "customizable", - "customizible", "customizable", - "custumizable", "customizable", - "cuztomizable", "customizable", - "dabilitating", "debilitating", - "dangerousely", "dangerously", - "decensitized", "desensitized", - "deceptionist", "receptionist", - "declareation", "declaration", - "decomposeion", "decomposition", - "decomposited", "decomposed", - "decscription", "description", - "deffensively", "defensively", - "deficiancies", "deficiencies", - "deficiencias", "deficiencies", - "deficiensies", "deficiencies", - "definatively", "definitively", - "defininitely", "definitively", - "definitavely", "definitively", - "definitevely", "definitively", - "definitifely", "definitively", - "definitinely", "definitively", - "definititely", "definitively", - "definitivley", "definitively", - "deinitalized", "deinitialized", - "deinitalizes", "deinitializes", - "delibaretely", "deliberately", - "deliberatley", "deliberately", - "delibirately", "deliberately", - "delibitating", "debilitating", - "deliverately", "deliberately", - "delusionally", "delusively", - "demesticated", "domesticated", - "democracries", "democracies", - "democraphics", "demographics", - "democratisch", "democratic", - "demograhpics", "demographics", - "demogrpahics", "demographics", - "demonination", "denominations", - "demonstarted", "demonstrated", - "demonstartes", "demonstrates", - "demonstrabil", "demonstrably", - "demonstraion", "demonstration", - "demonstraits", "demonstrates", - "demonstrants", "demonstrates", - "demonstratie", "demonstrate", - "demonstratin", "demonstration", - "demonstrerat", "demonstrate", - "demosntrably", "demonstrably", - "demosntrated", "demonstrated", - "demosntrates", "demonstrates", - "demostration", "demonstration", - "denomenation", "denomination", - "denominacion", "denomination", - "denominatior", "denominator", - "denominatons", "denominations", - "denomonation", "denomination", - "deomgraphics", "demographics", - "depencencies", "dependencies", - "dependancies", "dependencies", - "dependencias", "dependencies", - "dependenices", "dependencies", - "dependensies", "dependencies", - "deperecation", "deprecation", - "deplacements", "replacements", - "deregualtion", "deregulation", - "deregulaiton", "deregulation", - "derugulation", "deregulation", - "describtions", "descriptions", - "descriminant", "discriminant", - "descriptivos", "descriptions", - "desctiptions", "descriptions", - "desctruction", "destruction", - "desencitized", "desensitized", - "desensatized", "desensitized", - "desensitived", "desensitized", - "desentisized", "desensitized", - "desentitized", "desensitized", - "desentizised", "desensitized", - "desginations", "destinations", - "desgustingly", "disgustingly", - "desitnations", "destinations", - "despectively", "respectively", - "despensaries", "dispensaries", - "desperatedly", "desperately", - "desperatelly", "desperately", - "desqualified", "disqualified", - "desregarding", "disregarding", - "dessertation", "dissertation", - "destiantions", "destinations", - "destinctions", "destinations", - "destractions", "distractions", - "destributors", "distributors", - "determinanti", "determination", - "determinaton", "determination", - "determinging", "determining", - "determinisic", "deterministic", - "determinisim", "determinism", - "deterministc", "deterministic", - "determinitic", "deterministic", - "detrimential", "detrimental", - "developement", "development", - "developmenet", "developments", - "develpoments", "developments", - "devolopement", "development", - "devolopments", "developments", - "diasspointed", "dissapointed", - "dicitonaries", "dictionaries", - "dictadorship", "dictatorship", - "dictarorship", "dictatorship", - "dictatorshop", "dictatorship", - "dictionaires", "dictionaries", - "didsapointed", "dissapointed", - "differencial", "differential", - "differencies", "differences", - "differentate", "differentiate", - "differnetial", "differential", - "difficulites", "difficulties", - "difficutlies", "difficulties", - "diffuculties", "difficulties", - "dimensionals", "dimensions", - "dimensionnal", "dimensional", - "dimensionsal", "dimensional", - "diplomatisch", "diplomatic", - "directionnal", "directional", - "disaapointed", "dissapointed", - "disadvandage", "disadvantaged", - "disadvantged", "disadvantaged", - "disadvantges", "disadvantages", - "disadvatange", "disadvantage", - "disadventage", "disadvantage", - "disagremeent", "disagreements", - "disapointing", "disappointing", - "disappearnce", "disappearance", - "disappearred", "disappeared", - "disapperaing", "disappearing", - "disaspointed", "dissapointed", - "disastisfied", "dissatisfied", - "disatissfied", "dissatisfied", - "disatvantage", "disadvantage", - "discertation", "dissertation", - "disciniplary", "disciplinary", - "disciplanary", "disciplinary", - "disciplenary", "disciplinary", - "disciplinare", "discipline", - "disciplinera", "disciplinary", - "disciplinery", "disciplinary", - "disclipinary", "disciplinary", - "disconencted", "disconnected", - "disconnectes", "disconnects", - "disconnectme", "disconnected", - "disconnectus", "disconnects", - "discontiuned", "discontinued", - "discountined", "discontinued", - "discreditied", "discredited", - "discreditted", "discredited", - "discriminare", "discriminate", - "discriminted", "discriminated", - "disctinction", "distinction", - "disctinctive", "distinctive", - "disctintions", "distinctions", - "discualified", "disqualified", - "discustingly", "disgustingly", - "disemination", "dissemination", - "disenchanged", "disenchanted", - "disengenuous", "disingenuous", - "disenginuous", "disingenuous", - "disensitized", "desensitized", - "disgareement", "disagreements", - "disgruntaled", "disgruntled", - "disgrunteled", "disgruntled", - "disguntingly", "disgustingly", - "disingeneous", "disingenuous", - "disingenious", "disingenuous", - "disinteresed", "disinterested", - "disintereted", "disinterested", - "dismantleing", "dismantling", - "disobediance", "disobedience", - "disobeidence", "disobedience", - "dispalcement", "displacement", - "dispapointed", "dissapointed", - "dispencaries", "dispensaries", - "dispensaires", "dispensaries", - "dispensarios", "dispensaries", - "dispensiries", "dispensaries", - "dispensories", "dispensaries", - "disqaulified", "disqualified", - "disqualifyed", "disqualified", - "disqustingly", "disgustingly", - "disrecpected", "disrespected", - "disrepsected", "disrespected", - "disresepcted", "disrespected", - "disrespecful", "disrespectful", - "disrespecing", "disrespecting", - "disrespectul", "disrespectful", - "disrespekted", "disrespected", - "disrtibution", "distributions", - "dissapearing", "disappearing", - "dissapionted", "dissapointed", - "dissapoimted", "dissapointed", - "dissapoitned", "dissapointed", - "dissaponited", "dissapointed", - "dissapoonted", "dissapointed", - "dissapounted", "dissapointed", - "dissappinted", "dissapointed", - "dissapponted", "dissapointed", - "dissastified", "dissatisfied", - "dissatisifed", "dissatisfied", - "dissatsified", "dissatisfied", - "dissepointed", "dissapointed", - "dissipointed", "dissapointed", - "dissobediant", "disobedient", - "dissobedient", "disobedient", - "dissopointed", "dissapointed", - "disspaointed", "dissapointed", - "dissppointed", "dissapointed", - "dissspointed", "dissapointed", - "distinations", "distinctions", - "distincitons", "distinctions", - "distingished", "distinguished", - "distingishes", "distinguishes", - "distinguised", "distinguished", - "distirbuting", "distributing", - "distirbution", "distribution", - "distrabution", "distribution", - "distribitors", "distributors", - "distribtuion", "distributions", - "distribucion", "distribution", - "distribuited", "distributed", - "distribuiton", "distributions", - "distribuitor", "distributor", - "distribusion", "distributions", - "distributino", "distributions", - "distributior", "distributor", - "distributons", "distributors", - "distributore", "distribute", - "distriubtion", "distributions", - "distrobution", "distribution", - "distrubances", "disturbance", - "distrubiting", "distributing", - "distrubition", "distribution", - "distrubitors", "distributors", - "distrubution", "distribution", - "distrubutors", "distributors", - "distructions", "distractions", - "distustingly", "disgustingly", - "ditactorship", "dictatorship", - "documenation", "documentation", - "documentaion", "documentation", - "documentaire", "documentaries", - "documentarse", "documentaries", - "documentarsi", "documentaries", - "domesitcated", "domesticated", - "domisticated", "domesticated", - "donesticated", "domesticated", - "donwloadable", "downloadable", - "dossapointed", "dissapointed", - "downlaodable", "downloadable", - "downloadbale", "downloadable", - "downloadeble", "downloadable", - "drankenstein", "frankenstein", - "dublications", "publications", - "dusgustingly", "disgustingly", - "dynamicallly", "dynamically", - "dyregulation", "deregulation", - "earthquackes", "earthquakes", - "earthquakers", "earthquakes", - "econimically", "economically", - "economisesti", "economists", - "educationnal", "educational", - "effectionate", "affectionate", - "effectivelly", "effectively", - "effectivenss", "effectiveness", - "efficienctly", "efficiency", - "effordlessly", "effortlessly", - "ejacualtions", "ejaculation", - "electorlytes", "electrolytes", - "electricrain", "electrician", - "electrictian", "electrician", - "electrobytes", "electrolytes", - "electrocytes", "electrolytes", - "electrolites", "electrolytes", - "electroltyes", "electrolytes", - "electronicas", "electronics", - "electronicos", "electronics", - "electroyltes", "electrolytes", - "elektrolytes", "electrolytes", - "eloctrolytes", "electrolytes", - "embarassment", "embarrassment", - "embarasssing", "embarassing", - "embarrasment", "embarrassment", - "embarressing", "embarrassing", - "embarrissing", "embarrassing", - "emberrassing", "embarrassing", - "emphetamines", "amphetamines", - "emprisonment", "imprisonment", - "encarcerated", "incarcerated", - "enceclopedia", "encyclopedia", - "enchancement", "enhancement", - "enchancments", "enchantments", - "enchantmants", "enchantments", - "enchentments", "enchantments", - "enciclopedia", "encyclopedia", - "enclycopedia", "encyclopedia", - "encorporated", "incorporated", - "encourageing", "encouraging", - "encyclapedia", "encyclopedia", - "encyclepedia", "encyclopedia", - "encyclopadia", "encyclopedia", - "encyclopeida", "encyclopedia", - "encyclopidia", "encyclopedia", - "encycolpedia", "encyclopedia", - "encyklopedia", "encyclopedia", - "encylcopedia", "encyclopedia", - "encyplopedia", "encyclopedia", - "endoresments", "endorsement", - "enemployment", "unemployment", - "enfringement", "infringement", - "enlightended", "enlightened", - "enlightenend", "enlightened", - "enlightented", "enlightened", - "enlightining", "enlightening", - "enligthening", "enlightening", - "entaglements", "entanglements", - "entartaining", "entertaining", - "enterpreneur", "entrepreneurs", - "enterprenuer", "entrepreneur", - "entertainted", "entertained", - "enthusiaists", "enthusiasts", - "enthusuastic", "enthusiastic", - "entoxication", "intoxication", - "entrepeneurs", "entrepreneurs", - "entreperneur", "entrepreneurs", - "entreprenaur", "entrepreneur", - "entrepreners", "entrepreneurs", - "entrepreneus", "entrepreneurs", - "entreprenour", "entrepreneur", - "entreprenure", "entrepreneurs", - "entreprenurs", "entrepreneurs", - "entrepreuner", "entrepreneurs", - "entretaining", "entertaining", - "enviormental", "environmental", - "enviornments", "environments", - "enviromental", "environmental", - "environemnts", "environments", - "environmentl", "environmentally", - "environmetal", "environmental", - "envrionments", "environments", - "errorneously", "erroneously", - "establishmet", "establishments", - "evelutionary", "evolutionary", - "exagerrating", "exaggerating", - "exaggarating", "exaggerating", - "exaggaration", "exaggeration", - "exaggeratted", "exaggerated", - "exaggurating", "exaggerating", - "exagguration", "exaggeration", - "exceptionaly", "exceptionally", - "exceptionnal", "exceptional", - "exclusiveity", "exclusivity", - "exclusivelly", "exclusively", - "exclusivitiy", "exclusivity", - "excorciating", "excruciating", - "excrusiating", "excruciating", - "excurciating", "excruciating", - "exectuioners", "executioner", - "executioneer", "executioner", - "executionees", "executions", - "executioness", "executions", - "executionier", "executioner", - "executionner", "executioner", - "exeggerating", "exaggerating", - "exeggeration", "exaggeration", - "expeditonary", "expeditionary", - "expendatures", "expenditures", - "expendetures", "expenditures", - "expentitures", "expenditures", - "experamental", "experimental", - "expereincing", "experiencing", - "experemental", "experimental", - "experiancing", "experiencing", - "experiemntal", "experimental", - "experiemnted", "experimented", - "experimantal", "experimental", - "experimentan", "experimentation", - "experimentes", "experiments", - "experimentle", "experimented", - "experimentos", "experiments", - "experimentul", "experimental", - "expidentures", "expenditures", - "expierencing", "experiencing", - "expiremental", "experimental", - "expiremented", "experimented", - "explaination", "explanation", - "explenations", "explanations", - "expliotation", "exploitation", - "exploitaiton", "exploitation", - "exploitating", "exploitation", - "exploititive", "exploitative", - "explortation", "exploitation", - "explotiation", "exploitation", - "explotiative", "exploitative", - "expolitation", "exploitation", - "expolitative", "exploitative", - "exponentialy", "exponentially", - "expropiation", "expropriation", - "extensivelly", "extensively", - "extradiction", "extradition", - "extraordiary", "extraordinary", - "extraordinay", "extraordinary", - "extrapolerat", "extrapolate", - "extrapoloate", "extrapolate", - "extremistisk", "extremists", - "extrordinary", "extraordinary", - "extruciating", "excruciating", - "facilitatile", "facilitate", - "fahrenheight", "fahrenheit", - "falmethrower", "flamethrower", - "familiarlize", "familiarize", - "fanslaughter", "manslaughter", - "fantasticaly", "fantastically", - "fantasticlly", "fantastically", - "fashionalble", "fashionable", - "fermantation", "fermentation", - "fermentacion", "fermentation", - "fermentaiton", "fermentation", - "fermentating", "fermentation", - "fermintation", "fermentation", - "fictionaries", "dictionaries", - "figuartively", "figuratively", - "figuratevely", "figuratively", - "figurativley", "figuratively", - "figuretively", "figuratively", - "figuritively", "figuratively", - "fingerpoints", "fingerprints", - "firefigthers", "firefighters", - "flamethorwer", "flamethrower", - "flametrhower", "flamethrower", - "flanethrower", "flamethrower", - "flexibillity", "flexibility", - "flourishment", "flourishing", - "fluctiations", "fluctuations", - "flucutations", "fluctuations", - "fluxtuations", "fluctuations", - "forgivenness", "forgiveness", - "fortunatelly", "fortunately", - "framethrower", "flamethrower", - "frankenstain", "frankenstein", - "frankensteen", "frankenstein", - "frankenstine", "frankenstein", - "frankinstein", "frankenstein", - "frementation", "fermentation", - "friendzonded", "friendzoned", - "friendzonned", "friendzoned", - "friendzowned", "friendzoned", - "fringeworthy", "cringeworthy", - "fronkenstein", "frankenstein", - "fruitsations", "frustrations", - "frustrastion", "frustrations", - "fucntionally", "functionally", - "funcitonally", "functionally", - "functionable", "functional", - "functionaliy", "functionally", - "functionalty", "functionality", - "functionlity", "functionality", - "functionning", "functioning", - "fundamentais", "fundamentals", - "fundamentalt", "fundamentalist", - "fundamentaly", "fundamentally", - "fundemantals", "fundamentals", - "fundementals", "fundamentals", - "fundimentals", "fundamentals", - "furstrations", "frustrations", - "futuristisch", "futuristic", - "fwankenstein", "frankenstein", - "geneological", "genealogical", - "generacional", "generational", - "generalizare", "generalize", - "generalizate", "generalize", - "generelizing", "generalizing", - "geograhpical", "geographical", - "geographicly", "geographical", - "geographisch", "geographic", - "geogrpahical", "geographical", - "goegraphical", "geographical", - "governemntal", "governmental", - "governmently", "governmental", - "grammaticaal", "grammatical", - "grammaticaly", "grammatically", - "grandchilden", "grandchildren", - "grandchilder", "grandchildren", - "grandchilren", "grandchildren", - "grassrooters", "grassroots", - "gringeworthy", "cringeworthy", - "guantanameow", "guantanamo", - "guantanamero", "guantanamo", - "hallucinatin", "hallucinations", - "hallucinaton", "hallucination", - "handwritting", "handwriting", - "harrassments", "harassments", - "headqaurters", "headquarters", - "headquatered", "headquartered", - "healthercare", "healthcare", - "heavywieghts", "heavyweight", - "helicopteros", "helicopters", - "hererosexual", "heterosexual", - "heretosexual", "heterosexual", - "heteresexual", "heterosexual", - "hetreosexual", "heterosexual", - "highligthing", "highlighting", - "hipocritical", "hypocritical", - "hipothetical", "hypothetical", - "histarically", "historically", - "histerically", "historically", - "historicians", "historians", - "homogeneized", "homogenized", - "homogenenous", "homogeneous", - "homosexuales", "homosexuals", - "homosexualiy", "homosexuality", - "homosexualls", "homosexuals", - "homosexualty", "homosexuality", - "homosexuella", "homosexual", - "hopsitalized", "hospitalized", - "horisontally", "horizontally", - "horizantally", "horizontally", - "horiztonally", "horizontally", - "horozontally", "horizontally", - "hospitallity", "hospitality", - "hospitilized", "hospitalized", - "hospitolized", "hospitalized", - "hosptialized", "hospitalized", - "humanitarien", "humanitarian", - "humanitarion", "humanitarian", - "humanitatian", "humanitarian", - "humaniterian", "humanitarian", - "humantiarian", "humanitarian", - "huminatarian", "humanitarian", - "hurricanefps", "hurricanes", - "hyopthetical", "hypothetical", - "hypathetical", "hypothetical", - "hypertrophey", "hypertrophy", - "hypethetical", "hypothetical", - "hypocrticial", "hypocritical", - "hypocrytical", "hypocritical", - "hypotehtical", "hypothetical", - "hypotethical", "hypothetical", - "hypotherical", "hypothetical", - "hypotheticly", "hypothetical", - "hystarically", "hysterically", - "hystorically", "hysterically", - "idealistisch", "idealistic", - "identificato", "identification", - "identifierad", "identified", - "identifieras", "identifies", - "identifyable", "identifiable", - "ideologicaly", "ideologically", - "idiosyncracy", "idiosyncrasy", - "illegetimate", "illegitimate", - "illegitamate", "illegitimate", - "illegitamite", "illegitimate", - "illegitemate", "illegitimate", - "illegitimite", "illegitimate", - "illigetimate", "illegitimate", - "illigitemate", "illegitimate", - "illistration", "illustration", - "illsutration", "illustrations", - "illustartion", "illustration", - "illustraitor", "illustrator", - "illustraties", "illustrate", - "illustratior", "illustrator", - "imcompatible", "incompatible", - "imcompetence", "incompetence", - "imexperience", "inexperience", - "immediatelly", "immediately", - "immortallity", "immortality", - "imperialfist", "imperialist", - "imperialisim", "imperialism", - "imperialstic", "imperialist", - "implamenting", "implementing", - "implausibile", "implausible", - "implecations", "implications", - "implementase", "implements", - "implementasi", "implements", - "implementato", "implementation", - "implentation", "implementation", - "implimenting", "implementing", - "imporvements", "improvements", - "impossibilty", "impossibility", - "impossiblely", "impossibly", - "impossiblity", "impossibly", - "impovershied", "impoverished", - "impoversihed", "impoverished", - "imprefection", "imperfections", - "improsonment", "imprisonment", - "improviserad", "improvised", - "imrpovements", "improvements", - "imtimidating", "intimidating", - "imtimidation", "intimidation", - "inaccesibles", "inaccessible", - "inaccessable", "inaccessible", - "inaccessbile", "inaccessible", - "inaccurasies", "inaccuracies", - "inaccuraties", "inaccuracies", - "inaccuricies", "inaccuracies", - "inacuraccies", "inaccuracies", - "inadvertenly", "inadvertently", - "inappropiate", "inappropriate", - "inapproprate", "inappropriate", - "inappropriae", "inappropriately", - "inappropriet", "inappropriately", - "inattractive", "unattractive", - "inbelievable", "unbelievable", - "incarcelated", "incarcerated", - "incarcirated", "incarcerated", - "incarserated", "incarcerated", - "incedentally", "incidentally", - "incentiveise", "incentives", - "incestigator", "investigator", - "incomaptible", "incompatible", - "incomparible", "incompatible", - "incompatable", "incompatible", - "incompatibil", "incompatible", - "incompetance", "incompetence", - "incompetente", "incompetence", - "incompitable", "incompatible", - "incomptetent", "incompetent", - "inconcistent", "inconsistent", - "inconsistant", "inconsistent", - "inconsistecy", "inconsistency", - "inconsisteny", "inconsistency", - "inconveinent", "inconvenient", - "inconveniant", "inconvenient", - "inconveniece", "inconvenience", - "inconvenince", "inconvenience", - "inconvienent", "inconvenient", - "incorparated", "incorporated", - "incorperated", "incorporated", - "incorportaed", "incorporated", - "incorportate", "incorporate", - "incrediblely", "incredibly", - "incrementers", "increments", - "incremential", "incremental", - "indefinately", "indefinitely", - "indefineable", "undefinable", - "indefinetely", "indefinitely", - "indefinitive", "indefinite", - "indefinitley", "indefinitely", - "indefintiely", "indefinitely", - "indepedantly", "independently", - "indepencence", "independence", - "independance", "independence", - "independante", "independents", - "independenet", "independents", - "independenly", "independently", - "independense", "independents", - "independente", "independence", - "independetly", "independently", - "indepentents", "independents", - "indetifiable", "identifiable", - "indianaoplis", "indianapolis", - "indianopolis", "indianapolis", - "indicentally", "incidentally", - "indifferance", "indifference", - "indifferente", "indifference", - "indiffernece", "indifference", - "indimidating", "intimidating", - "indimidation", "intimidation", - "indipendence", "independence", - "indisputible", "indisputable", - "indisputibly", "indisputably", - "individuales", "individuals", - "individualty", "individuality", - "individuella", "individual", - "indiviudally", "individually", - "indivudually", "individually", - "indpendently", "independently", - "indroduction", "introduction", - "indroductory", "introductory", - "industriella", "industrial", - "industrijske", "industries", - "inefficienct", "inefficient", - "inefficienty", "inefficiently", - "inevitablely", "inevitably", - "inevitablity", "inevitably", - "inevititably", "inevitably", - "inexblicably", "inexplicably", - "inexpectedly", "unexpectedly", - "inexpereince", "inexperience", - "inexperiance", "inexperience", - "inexperieced", "inexperienced", - "inexperiened", "inexperienced", - "inexperiente", "inexperience", - "inexpierence", "inexperienced", - "inexplicabil", "inexplicably", - "inexplicibly", "inexplicably", - "infalability", "infallibility", - "infilitrated", "infiltrated", - "infiltraitor", "infiltrator", - "infiltratior", "infiltrator", - "infiltratred", "infiltrate", - "influenceing", "influencing", - "infogrpahics", "infographic", - "inforgivable", "unforgivable", - "infrantryman", "infantryman", - "infridgement", "infringement", - "infrignement", "infringement", - "ingestigator", "investigator", - "ingredientes", "ingredients", - "ingreediants", "ingredients", - "ininterested", "uninterested", - "initalizable", "initializable", - "inkompatible", "incompatible", - "inkompetence", "incompetence", - "inkonsistent", "inconsistent", - "inlightening", "enlightening", - "innersection", "intersection", - "innerstellar", "interstellar", - "inpenetrable", "impenetrable", - "inplementing", "implementing", - "inplications", "implications", - "inpoverished", "impoverished", - "inprisonment", "imprisonment", - "inproductive", "unproductive", - "inprovements", "improvements", - "inresponsive", "unresponsive", - "insentivised", "insensitive", - "insentivises", "insensitive", - "insignifiant", "insignificant", - "insignificat", "insignificant", - "insinuationg", "insinuating", - "instabillity", "instability", - "instalaltion", "installations", - "installatons", "installations", - "installatron", "installation", - "instantaneos", "instantaneous", - "instantaneus", "instantaneous", - "instantanous", "instantaneous", - "instinctivly", "instinctively", - "institutuion", "institution", - "instramental", "instrumental", - "instrcutions", "instruction", - "instrucitons", "instruction", - "instructiosn", "instruction", - "instructores", "instructors", - "instrumentos", "instruments", - "instrumentul", "instrumental", - "insturmental", "instrumental", - "instutitions", "institutions", - "insuccessful", "unsuccessful", - "insufficiant", "insufficient", - "insuffucient", "insufficient", - "insuspecting", "unsuspecting", - "intaxication", "intoxication", - "intelelctual", "intellectuals", - "intellectals", "intellectuals", - "intellectaul", "intellectuals", - "intellectuel", "intellectual", - "intellecutal", "intellectual", - "intelligance", "intelligence", - "intelligenly", "intelligently", - "intelligente", "intelligence", - "intelligenty", "intelligently", - "intelligient", "intelligent", - "intenational", "international", - "intentionnal", "intentional", - "intepretator", "interpretor", - "interatellar", "interstellar", - "interational", "international", - "intercection", "interception", - "intercepcion", "interception", - "interceptons", "interceptions", - "intereaction", "intersection", - "interections", "interactions", - "interersting", "interpreting", - "interesction", "intersection", - "interestigly", "interestingly", - "interestinly", "interestingly", - "interferance", "interference", - "interfereing", "interfering", - "interferisce", "interferes", - "interferisse", "interferes", - "interferring", "interfering", - "intergration", "integration", - "interlectual", "intellectual", - "intermediare", "intermediate", - "intermediete", "intermediate", - "intermettent", "intermittent", - "intermideate", "intermediate", - "intermidiate", "intermediate", - "internatinal", "international", - "internationl", "international", - "internations", "interactions", - "internediate", "intermediate", - "internelized", "internalized", - "internilized", "internalized", - "interperters", "interpreter", - "interperting", "interpreting", - "interprating", "interpreting", - "interpretare", "interpreter", - "interpretato", "interpretation", - "interpreteer", "interpreter", - "interpretier", "interpreter", - "interpretion", "interpreting", - "interpretter", "interpreter", - "interpriting", "interpreting", - "interraccial", "interracial", - "interractial", "interracial", - "interrogatin", "interrogation", - "interrumping", "interrupting", - "interrupteds", "interrupts", - "interruptors", "interrupts", - "interseccion", "intersection", - "interseciton", "intersections", - "interseption", "interception", - "intersetllar", "interstellar", - "interstallar", "interstellar", - "interstaller", "interstellar", - "intersteller", "interstellar", - "interstellor", "interstellar", - "intertaining", "entertaining", - "intertwinded", "intertwined", - "intertwinned", "intertwined", - "interveiwing", "interviewing", - "intervencion", "intervention", - "interveneing", "intervening", - "intervension", "intervention", - "interviening", "interviewing", - "intidimation", "intimidation", - "intillectual", "intellectual", - "intimidacion", "intimidation", - "intimidative", "intimidate", - "intimitading", "intimidating", - "intimitating", "intimidating", - "intimitation", "intimidation", - "intorduction", "introduction", - "intorductory", "introductory", - "intoxicacion", "intoxication", - "intoxination", "intoxication", - "intrepreting", "interpreting", - "intrinsicaly", "intrinsically", - "introdiction", "introduction", - "introduccion", "introduction", - "introduceras", "introduces", - "introduceres", "introduces", - "introduciton", "introduction", - "introductary", "introductory", - "introducting", "introduction", - "introductury", "introductory", - "introduktion", "introduction", - "introspectin", "introspection", - "intruduction", "introduction", - "intruductory", "introductory", - "intsrumental", "instrumental", - "intuitivelly", "intuitively", - "inturrupting", "interrupting", - "invervention", "intervention", - "investagated", "investigated", - "investagator", "investigator", - "investegated", "investigated", - "investegator", "investigator", - "investigaron", "investigator", - "investigater", "investigator", - "investigatie", "investigative", - "investigatin", "investigation", - "investigatio", "investigator", - "investigaton", "investigation", - "investingate", "investigate", - "investogator", "investigator", - "invicibility", "invisibility", - "invididually", "individually", - "invisibiltiy", "invisibility", - "invisilibity", "invisibility", - "invisivility", "invisibility", - "invlunerable", "invulnerable", - "involnerable", "invulnerable", - "involuntairy", "involuntary", - "involuntarly", "involuntary", - "invonvenient", "inconvenient", - "invulenrable", "invulnerable", - "invulernable", "invulnerable", - "invulnarable", "invulnerable", - "invulnerbale", "invulnerable", - "invulnurable", "invulnerable", - "invulverable", "invulnerable", - "invunlerable", "invulnerable", - "invurnerable", "invulnerable", - "irrationably", "irrationally", - "irrationatly", "irrationally", - "irrationella", "irrational", - "irreplacable", "irreplaceable", - "irresistable", "irresistible", - "irresistably", "irresistibly", - "irrespecitve", "irrespective", - "irresponsble", "irresponsible", - "irresponsibe", "irresponsible", - "irreverisble", "irreversible", - "irreversebly", "irreversible", - "irreversibel", "irreversible", - "irrevirsible", "irreversible", - "irrispective", "irrespective", - "irriversible", "irreversible", - "isdefinitely", "indefinitely", - "isntallation", "installation", - "isntrumental", "instrumental", - "jackonsville", "jacksonville", - "jounralistic", "journalistic", - "jouranlistic", "journalistic", - "journalisitc", "journalistic", - "journalistes", "journalists", - "judgementals", "judgements", - "juggernaunts", "juggernaut", - "juridisction", "jurisdictions", - "jurisdiccion", "jurisdiction", - "jurisdiciton", "jurisdiction", - "jurisdiktion", "jurisdiction", - "jurisfiction", "jurisdiction", - "jurisidction", "jurisdiction", - "juristiction", "jurisdiction", - "jursidiction", "jurisdiction", - "jusridiction", "jurisdiction", - "justificatin", "justifications", - "katastrophic", "catastrophic", - "kidnergarten", "kindergarten", - "kindergarden", "kindergarten", - "kingergarten", "kindergarten", - "kintergarten", "kindergarten", - "knolwedgable", "knowledgable", - "knoweldgable", "knowledgable", - "knowladgable", "knowledgable", - "knowldegable", "knowledgable", - "knowldgeable", "knowledgable", - "knowleagable", "knowledgable", - "knowledagble", "knowledgable", - "knowledeable", "knowledgable", - "knowledgabel", "knowledgable", - "knowledgeble", "knowledgeable", - "knowledgebly", "knowledgable", - "knowledgible", "knowledgable", - "knowlegdable", "knowledgable", - "knowlegeable", "knowledgeable", - "knwoledgable", "knowledgable", - "kolonization", "colonization", - "kombinations", "combinations", - "kommissioner", "commissioner", - "kompensation", "compensation", - "konfidential", "confidential", - "konfirmation", "confirmation", - "kongregation", "congregation", - "konservatism", "conservatism", - "konservative", "conservative", - "konsultation", "consultation", - "konversation", "conversation", - "koordination", "coordination", - "krankenstein", "frankenstein", - "leaglization", "legalization", - "legalizacion", "legalization", - "legalizaiton", "legalization", - "legendariske", "legendaries", - "legimitately", "legitimately", - "legislatiors", "legislators", - "legistration", "registration", - "legitamately", "legitimately", - "legitamitely", "legitimately", - "legitemately", "legitimately", - "legitimatley", "legitimately", - "legitimitely", "legitimately", - "liberatrians", "libertarians", - "libertarains", "libertarians", - "libertariens", "libertarians", - "libertaryans", "libertarians", - "libertatians", "libertarians", - "liberterians", "libertarians", - "libretarians", "libertarians", - "lighthearded", "lighthearted", - "linguisticas", "linguistics", - "linguisticos", "linguistics", - "linguistisch", "linguistics", - "litllefinger", "littlefinger", - "littelfinger", "littlefinger", - "litterfinger", "littlefinger", - "littiefinger", "littlefinger", - "littlefigner", "littlefinger", - "littlefinder", "littlefinger", - "littlepinger", "littlefinger", - "lnowledgable", "knowledgable", - "longitudonal", "longitudinal", - "madturbating", "masturbating", - "madturbation", "masturbation", - "magnificient", "magnificent", - "maintainance", "maintenance", - "maintainence", "maintenance", - "maintenaince", "maintenance", - "malfucntions", "malfunction", - "manafactured", "manufactured", - "manafacturer", "manufacturer", - "manafactures", "manufactures", - "manifactured", "manufactured", - "manifacturer", "manufacturer", - "manifactures", "manufactures", - "manifestaion", "manifestation", - "manifestanti", "manifestation", - "manipluating", "manipulating", - "manipluation", "manipulation", - "manipualting", "manipulating", - "manipualtion", "manipulation", - "manipualtive", "manipulative", - "manipulacion", "manipulation", - "manipulitive", "manipulative", - "maniuplating", "manipulating", - "maniuplation", "manipulation", - "maniuplative", "manipulative", - "manouverable", "maneuverable", - "mansalughter", "manslaughter", - "manslaugther", "manslaughter", - "mansluaghter", "manslaughter", - "manufactered", "manufactured", - "manufacterer", "manufacturer", - "manufacteres", "manufactures", - "manufacteurs", "manufactures", - "manufactored", "manufactured", - "manufactorer", "manufacturer", - "manufactores", "manufactures", - "manufactuers", "manufacturers", - "manufactuing", "manufacturing", - "manufacturas", "manufactures", - "manufacturor", "manufacturer", - "manufactuter", "manufacture", - "manufacuters", "manufactures", - "manufacutred", "manufacture", - "manufacutres", "manufactures", - "manufaturing", "manufacturing", - "manupilating", "manipulating", - "manupulating", "manipulating", - "manupulation", "manipulation", - "manupulative", "manipulative", - "marchmallows", "marshmallows", - "marganilized", "marginalized", - "margenalized", "marginalized", - "marginilized", "marginalized", - "marhsmallows", "marshmallows", - "marshamllows", "marshmallows", - "marshmallons", "marshmallows", - "masoginistic", "misogynistic", - "masogynistic", "misogynistic", - "massachusets", "massachusetts", - "massachustts", "massachusetts", - "masterbation", "masturbation", - "masterpeices", "masterpiece", - "mastrubating", "masturbating", - "mastrubation", "masturbation", - "mastubration", "masturbation", - "masturabting", "masturbating", - "masturabtion", "masturbation", - "masturbacion", "masturbation", - "masturbaited", "masturbated", - "masturbathon", "masturbation", - "masturbsting", "masturbating", - "masturdating", "masturbating", - "mastutbation", "masturbation", - "mataphorical", "metaphorical", - "mataphysical", "metaphysical", - "matchmakeing", "matchmaking", - "mathemathics", "mathematics", - "mathematican", "mathematician", - "mathematicas", "mathematics", - "mathematicks", "mathematics", - "mathematicly", "mathematical", - "mathematisch", "mathematics", - "mathemetical", "mathematical", - "matheticians", "mathematicians", - "mathimatical", "mathematical", - "mathmatician", "mathematician", - "mecahnically", "mechanically", - "mechancially", "mechanically", - "meditaciones", "medications", - "mediteranean", "mediterranean", - "mediterraean", "mediterranean", - "mediterranen", "mediterranean", - "memerization", "memorization", - "memorizacion", "memorization", - "memorozation", "memorization", - "metalurgical", "metallurgical", - "metaphisical", "metaphysical", - "metaphoricly", "metaphorical", - "metaphsyical", "metaphysical", - "metaphyiscal", "metaphysical", - "metaphyscial", "metaphysical", - "metaphysisch", "metaphysics", - "metephorical", "metaphorical", - "metephysical", "metaphysical", - "meterologist", "meteorologist", - "meterosexual", "heterosexual", - "methaporical", "metaphorical", - "methematical", "mathematical", - "metiphorical", "metaphorical", - "metophorical", "metaphorical", - "metorpolitan", "metropolitan", - "metrololitan", "metropolitan", - "metropilitan", "metropolitan", - "metroploitan", "metropolitan", - "metropolians", "metropolis", - "metropoliten", "metropolitan", - "metropolitin", "metropolitan", - "metropoliton", "metropolitan", - "microcentres", "microcenter", - "microphonies", "microphones", - "microscophic", "microscopic", - "microscopice", "microscope", - "microscoptic", "microscopic", - "midfieldiers", "midfielders", - "millenialism", "millennialism", - "millionairre", "millionaire", - "millionaries", "millionaires", - "millioniares", "millionaires", - "minimalisitc", "minimalist", - "minimalisity", "minimalist", - "mininterpret", "misinterpret", - "minipulating", "manipulating", - "minipulation", "manipulation", - "minipulative", "manipulative", - "miracilously", "miraculously", - "miracurously", "miraculous", - "miscarraiges", "miscarriage", - "miscelaneous", "miscellaneous", - "miscellanous", "miscellaneous", - "mischievious", "mischievous", - "misdameanors", "misdemeanors", - "misdeamenors", "misdemeanor", - "misfourtunes", "misfortunes", - "misgoynistic", "misogynistic", - "misinterpert", "misinterpret", - "misinterpred", "misinterpreted", - "misinterprit", "misinterpreting", - "misinterpted", "misinterpret", - "misintrepret", "misinterpret", - "misisonaries", "missionaries", - "misoganistic", "misogynistic", - "misogenistic", "misogynistic", - "misoginystic", "misogynistic", - "misognyistic", "misogynistic", - "misogonistic", "misogynistic", - "misogynisitc", "misogynistic", - "misogynsitic", "misogynistic", - "misogynystic", "misogynistic", - "missionaires", "missionaries", - "mississipppi", "mississippi", - "misspellling", "misspelling", - "misteriously", "mysteriously", - "misundersood", "misunderstood", - "misunderstod", "misunderstood", - "misygonistic", "misogynistic", - "modificacion", "modification", - "modificaiton", "modification", - "modificatons", "modifications", - "modifikation", "modification", - "modivational", "motivational", - "moisterizing", "moisturizing", - "moistorizing", "moisturizing", - "moisutrizing", "moisturizing", - "momentarilly", "momentarily", - "monolithisch", "monolithic", - "mositurizing", "moisturizing", - "motherbaords", "motherboards", - "motherborads", "motherboards", - "motivacional", "motivational", - "motovational", "motivational", - "mousturizing", "moisturizing", - "muktitasking", "multitasking", - "mulittasking", "multitasking", - "multinatinal", "multinational", - "multitaksing", "multitasking", - "munipulative", "manipulative", - "mutlitasking", "multitasking", - "mysoganistic", "misogynistic", - "mysogenistic", "misogynistic", - "mysogonistic", "misogynistic", - "mysterioulsy", "mysteriously", - "nacionalists", "nationalists", - "narcisisstic", "narcissistic", - "narcissictic", "narcissistic", - "narcissisism", "narcissism", - "narcissisist", "narcissist", - "narcissisitc", "narcissist", - "narcississts", "narcissist", - "narssicistic", "narcissistic", - "natioanlists", "nationalists", - "nationalisic", "nationalistic", - "nationalisim", "nationalism", - "nationalistc", "nationalistic", - "nationalites", "nationalist", - "nationalitic", "nationalistic", - "nationalitys", "nationalist", - "nationallity", "nationally", - "nationalsits", "nationalists", - "nationalties", "nationalist", - "nazionalists", "nationalists", - "neccessarily", "necessarily", - "neccessities", "necessities", - "necessarilly", "necessarily", - "necessitites", "necessities", - "neckbearders", "neckbeards", - "neckbeardese", "neckbeards", - "neckbeardest", "neckbeards", - "neckbeardies", "neckbeards", - "neckbeardius", "neckbeards", - "negociations", "negotiations", - "negoitations", "negotiations", - "negotiatians", "negotiations", - "negotiatiing", "negotiating", - "negotiationg", "negotiating", - "negotiatiors", "negotiations", - "neigbhorhood", "neighborhoods", - "neigbourhood", "neighbourhood", - "neighboorhod", "neighbourhood", - "neighborhing", "neighboring", - "neighborhods", "neighborhoods", - "neighbourghs", "neighbours", - "neighbourhod", "neighbourhood", - "neighbourood", "neighbourhood", - "neighbrohood", "neighborhoods", - "neighourhood", "neighborhood", - "neoroscience", "neuroscience", - "neruological", "neurological", - "neruoscience", "neuroscience", - "netropolitan", "metropolitan", - "neuorscience", "neuroscience", - "neuralogical", "neurological", - "neuroligical", "neurological", - "neurosceince", "neuroscience", - "neuroscienze", "neuroscience", - "neurosicence", "neuroscience", - "neverhteless", "nevertheless", - "nieghborhood", "neighborhood", - "norhtwestern", "northwestern", - "nothingsness", "nothingness", - "noticeablely", "noticeably", - "notificacion", "notification", - "notificaiton", "notification", - "notificatons", "notifications", - "nuerological", "neurological", - "nueroscience", "neuroscience", - "nutritionnal", "nutritional", - "obersvations", "observations", - "objectivelly", "objectively", - "objectiviser", "objectives", - "objectivitiy", "objectivity", - "obversations", "observations", - "ocassionally", "occasionally", - "occaisonally", "occasionally", - "occasioanlly", "occasionally", - "occassionaly", "occasionally", - "occationally", "occasionally", - "occurrencies", "occurrences", - "offensivelly", "offensively", - "ogranisation", "organisation", - "omniverously", "omnivorously", - "operationnal", "operational", - "opportuniste", "opportunities", - "opportunites", "opportunities", - "oppositition", "opposition", - "opthalmology", "ophthalmology", - "optimistisch", "optimistic", - "optimizacion", "optimization", - "optimizating", "optimization", - "optimziation", "optimization", - "optmizations", "optimizations", - "oragnisation", "organisation", - "orchastrated", "orchestrated", - "orchestarted", "orchestrated", - "orchestraded", "orchestrated", - "orchistrated", "orchestrated", - "orgainsation", "organisation", - "orgainzation", "organizations", - "organisaiton", "organisation", - "organisatons", "organisations", - "organistaion", "organisation", - "organizacion", "organization", - "organizaiton", "organization", - "organizativo", "organization", - "organizatons", "organizations", - "organsiation", "organisation", - "organziation", "organization", - "orginasation", "organisation", - "orginazation", "organization", - "orgnaisation", "organisations", - "originallity", "originality", - "outraegously", "outrageously", - "outrageoulsy", "outrageously", - "outragesouly", "outrageously", - "outrageuosly", "outrageously", - "outragiously", "outrageously", - "outsourceing", "outsourcing", - "overbearring", "overbearing", - "overblocking", "overclocking", - "overclcoking", "overclocking", - "overclicking", "overclocking", - "overcloaking", "overclocking", - "overclockign", "overclocking", - "overclokcing", "overclocking", - "overhearting", "overreacting", - "overheathing", "overheating", - "overhtinking", "overthinking", - "overhwelming", "overwhelming", - "overlappping", "overlapping", - "overlcocking", "overclocking", - "overreaktion", "overreaction", - "overwealming", "overwhelming", - "overwhelemed", "overwhelmed", - "overwhemling", "overwhelming", - "overwhleming", "overwhelming", - "owerpowering", "overpowering", - "painkilllers", "painkillers", - "palastinians", "palestinians", - "palesitnians", "palestinians", - "palestenians", "palestinians", - "palestinains", "palestinians", - "palestiniens", "palestinians", - "palestininan", "palestinian", - "palestininas", "palestinians", - "palistinians", "palestinians", - "palythroughs", "playthroughs", - "parapharsing", "paraphrasing", - "paraphenalia", "paraphernalia", - "paraphrashed", "paraphrase", - "paraphrazing", "paraphrasing", - "paraprashing", "paraphrasing", - "paraprhasing", "paraphrasing", - "parenthesees", "parentheses", - "parenthesies", "parenthesis", - "parliamentry", "parliamentary", - "partecipants", "participants", - "partecipated", "participated", - "parternships", "partnership", - "particapated", "participated", - "particiapnts", "participant", - "particiapted", "participated", - "participante", "participate", - "participaste", "participants", - "participatie", "participated", - "participatin", "participation", - "participatns", "participant", - "participaton", "participant", - "participents", "participants", - "particualrly", "particularly", - "particulalry", "particularly", - "particullary", "particularly", - "passionatley", "passionately", - "pathalogical", "pathological", - "pathelogical", "pathological", - "patholigical", "pathological", - "paychedelics", "psychedelics", - "paychiatrist", "psychiatrist", - "paychologist", "psychologist", - "paychopathic", "psychopathic", - "penetratiing", "penetrating", - "penisylvania", "pennsylvania", - "pennsilvania", "pennsylvania", - "pennslyvania", "pennsylvania", - "pennsylvaina", "pennsylvania", - "pennsyvlania", "pennsylvania", - "pennyslvania", "pennsylvania", - "penssylvania", "pennsylvania", - "pentsylvania", "pennsylvania", - "percentagens", "percentages", - "perferential", "preferential", - "performantes", "performances", - "performences", "performances", - "perfromances", "performances", - "peridoically", "periodically", - "peripathetic", "peripatetic", - "periphereals", "peripherals", - "peripherials", "peripherals", - "permanantely", "permanently", - "permanentely", "permanently", - "permissiable", "permissible", - "peroidically", "periodically", - "perpatrators", "perpetrators", - "perpatuating", "perpetuating", - "perpertators", "perpetrators", - "perpertrated", "perpetrated", - "perpetraitor", "perpetrator", - "perpetraters", "perpetrators", - "perpetuaters", "perpetuates", - "perpitrators", "perpetrators", - "perposefully", "purposefully", - "perposterous", "preposterous", - "perpretators", "perpetrators", - "perpsectives", "perspectives", - "perputrators", "perpetrators", - "perputuating", "perpetuating", - "persepctives", "perspectives", - "perservation", "preservation", - "perseverence", "perseverance", - "personalites", "personalities", - "personallity", "personally", - "personilized", "personalized", - "perspecitves", "perspectives", - "perspectivas", "perspectives", - "persumptuous", "presumptuous", - "perticularly", "particularly", - "pertubations", "perturbations", - "pessimisitic", "pessimistic", - "pessimisstic", "pessimistic", - "phenomenonal", "phenomenal", - "phenomenonly", "phenomenally", - "phenomonenon", "phenomenon", - "phialdelphia", "philadelphia", - "philadalphia", "philadelphia", - "philadelhpia", "philadelphia", - "philadeplhia", "philadelphia", - "philadlephia", "philadelphia", - "philedalphia", "philadelphia", - "philedelphia", "philadelphia", - "philidalphia", "philadelphia", - "philippinnes", "philippines", - "philippinoes", "philippines", - "philisophers", "philosophers", - "philisophies", "philosophies", - "phillippines", "philippines", - "philosiphers", "philosophers", - "philosiphies", "philosophies", - "philosohpers", "philosopher", - "philosohpies", "philosophies", - "philosophiae", "philosophies", - "philosophics", "philosophies", - "philosophios", "philosophies", - "philospohers", "philosophers", - "philospohies", "philosophies", - "photagrapher", "photographer", - "photochopped", "photoshopped", - "photograhper", "photographer", - "photograpers", "photographers", - "photographes", "photographs", - "photographyi", "photographic", - "photogropher", "photographer", - "photogrpahed", "photographed", - "photogrpaher", "photographer", - "photoshipped", "photoshopped", - "photoshooped", "photoshopped", - "photoshoppad", "photoshopped", - "phychedelics", "psychedelics", - "phychiatrist", "psychiatrist", - "phychologist", "psychologist", - "phychopathic", "psychopathic", - "physcedelics", "psychedelics", - "physciatrist", "psychiatrist", - "physcologist", "psychologist", - "physcopathic", "psychopathic", - "physicallity", "physically", - "physiologial", "physiological", - "pilgrimmages", "pilgrimages", - "pitchforkers", "pitchforks", - "pkaythroughs", "playthroughs", - "plabeswalker", "planeswalker", - "plaestinians", "palestinians", - "planeswaller", "planeswalker", - "planeswlaker", "planeswalker", - "planetwalker", "planeswalker", - "plansewalker", "planeswalker", - "plauthroughs", "playthroughs", - "playhtroughs", "playthroughs", - "playtgroughs", "playthroughs", - "playthorughs", "playthroughs", - "playthourghs", "playthroughs", - "playthrougth", "playthroughs", - "playthrouhgs", "playthroughs", - "playthtoughs", "playthroughs", - "playtrhoughs", "playthroughs", - "populationes", "populations", - "pornograpghy", "pornography", - "porportional", "proportional", - "portabillity", "portability", - "portagonists", "protagonists", - "positionning", "positioning", - "positivitely", "positivity", - "possessivize", "possessive", - "possibillity", "possibility", - "possiblility", "possibility", - "possiblities", "possibilities", - "powerfisting", "powerlifting", - "powerlfiting", "powerlifting", - "powerlifitng", "powerlifting", - "powerlisting", "powerlifting", - "powetlifting", "powerlifting", - "powrrlifting", "powerlifting", - "practicioner", "practitioner", - "practisioner", "practitioner", - "pratictioner", "practitioners", - "precedessors", "predecessors", - "preconveived", "preconceived", - "predacessors", "predecessors", - "predeccesors", "predecessor", - "predecesores", "predecessor", - "predescesors", "predecessors", - "predessecors", "predecessors", - "predetermind", "predetermined", - "predicessors", "predecessors", - "predocessors", "predecessors", - "predomiantly", "predominately", - "predominanty", "predominantly", - "predominatly", "predominantly", - "preferantial", "preferential", - "preferentail", "preferential", - "preformances", "performances", - "preinitalize", "preinitialize", - "preliminarly", "preliminary", - "prematurelly", "prematurely", - "premillenial", "premillennial", - "preocupation", "preoccupation", - "preperations", "preparations", - "prepetrators", "perpetrators", - "prepetuating", "perpetuating", - "prepostorous", "preposterous", - "preposturous", "preposterous", - "prerequisets", "prerequisite", - "prescirption", "prescriptions", - "prescribtion", "prescription", - "prescripcion", "prescription", - "prescriptons", "prescriptions", - "prescritpion", "prescriptions", - "presedential", "presidential", - "presentacion", "presentation", - "presentaiton", "presentations", - "preservacion", "preservation", - "preservating", "preservation", - "preservativo", "preservation", - "presidencial", "presidential", - "presidenital", "presidential", - "presidentail", "presidential", - "presnetation", "presentations", - "presonalized", "personalized", - "prespectives", "perspectives", - "presrciption", "prescriptions", - "presumpteous", "presumptuous", - "presumputous", "presumptuous", - "prevantative", "preventative", - "preventation", "presentation", - "preventetive", "preventative", - "preventitive", "preventative", - "prezidential", "presidential", - "principlaity", "principality", - "probabiliste", "probabilities", - "probabilites", "probabilities", - "probabillity", "probability", - "probablistic", "probabilistic", - "proclomation", "proclamation", - "proconceived", "preconceived", - "profesisonal", "professionals", - "professiinal", "professionalism", - "professioanl", "professionals", - "professiomal", "professionalism", - "professionel", "professional", - "professionsl", "professionalism", - "professoinal", "professionals", - "professonial", "professionals", - "proffesional", "professional", - "proficientcy", "proficiency", - "profissional", "professional", - "profitabiliy", "profitability", - "profitabilty", "profitability", - "profressions", "progressions", - "progatonists", "protagonists", - "programmeurs", "programmer", - "progressieve", "progressive", - "progressioin", "progressions", - "progressiong", "progressing", - "progressisme", "progresses", - "progressiste", "progresses", - "progressivas", "progressives", - "progressivey", "progressively", - "progressivly", "progressively", - "progressivsm", "progressives", - "progresssing", "progressing", - "progresssion", "progressions", - "progresssive", "progressives", - "prohibitting", "prohibiting", - "projecticles", "projectiles", - "proletariaat", "proletariat", - "proletariant", "proletariat", - "proletaricat", "proletariat", - "prominantely", "prominently", - "promiscuious", "promiscuous", - "promisculous", "promiscuous", - "promotionnal", "promotional", - "pronounceing", "pronouncing", - "pronunciaton", "pronunciation", - "propertional", "proportional", - "propesterous", "preposterous", - "proportianal", "proportional", - "proportionel", "proportional", - "proposterous", "preposterous", - "proprotional", "proportional", - "prostetution", "prostitution", - "prostitition", "prostitution", - "prostitucion", "prostitution", - "prostituiton", "prostitution", - "prostitutiei", "prostitute", - "protaganists", "protagonists", - "protaginists", "protagonists", - "protagnoists", "protagonists", - "protestantes", "protestants", - "protoganists", "protagonists", - "prouncements", "pronouncements", - "pruposefully", "purposefully", - "pscyhologist", "psychologist", - "pscyhopathic", "psychopathic", - "pshyciatrist", "psychiatrist", - "pshycologist", "psychologist", - "pshycopathic", "psychopathic", - "psichologist", "psychologist", - "psychaitrist", "psychiatrist", - "psychedellic", "psychedelic", - "psychedilics", "psychedelics", - "psychemedics", "psychedelics", - "psychiatirst", "psychiatrists", - "psychiatrics", "psychiatrist", - "psychiatrict", "psychiatrist", - "psychiatrits", "psychiatrists", - "psychistrist", "psychiatrist", - "psychodelics", "psychedelics", - "psycholigist", "psychologist", - "psychologial", "psychological", - "psychologits", "psychologists", - "psychologyst", "psychologist", - "psychopathes", "psychopaths", - "psychyatrist", "psychiatrist", - "puplications", "publications", - "puritannical", "puritanical", - "purpetrators", "perpetrators", - "purpetuating", "perpetuating", - "purpusefully", "purposefully", - "pyschedelics", "psychedelics", - "pyschiatrist", "psychiatrist", - "pyschologist", "psychologist", - "pyschopathic", "psychopathic", - "qualificaton", "qualification", - "qualifierais", "qualifiers", - "qualtitative", "quantitative", - "quantatitive", "quantitative", - "quantititive", "quantitative", - "quarterblack", "quarterback", - "quesitonable", "questionable", - "questionalbe", "questionable", - "questionning", "questioning", - "questionsign", "questioning", - "radioactieve", "radioactive", - "rationallity", "rationally", - "reactionairy", "reactionary", - "reactionnary", "reactionary", - "realisticaly", "realistically", - "realisticlly", "realistically", - "reasonablely", "reasonably", - "recallection", "recollection", - "reccomending", "recommending", - "reccommended", "recommended", - "recepcionist", "receptionist", - "receptionest", "receptionist", - "recgonizable", "recognizable", - "reciporcated", "reciprocate", - "reciprociate", "reciprocate", - "reciprocrate", "reciprocate", - "recognizible", "recognizable", - "recolleciton", "recollection", - "recommanding", "recommending", - "recommendeds", "recommends", - "recommendors", "recommends", - "recommeneded", "recommended", - "recommenting", "recommending", - "recongizable", "recognizable", - "recontructed", "reconstructed", - "recpetionist", "receptionist", - "recreacional", "recreational", - "recriational", "recreational", - "referenceing", "referencing", - "refirgerator", "refrigerator", - "refriderator", "refrigerator", - "refrigarator", "refrigerator", - "refrigerador", "refrigerator", - "refrigerater", "refrigerator", - "refrigirator", "refrigerator", - "regenaration", "regeneration", - "regeneracion", "regeneration", - "regestration", "registration", - "registartion", "registration", - "registrating", "registration", - "regrigerator", "refrigerator", - "regulatorias", "regulators", - "regulatories", "regulators", - "regulatorios", "regulators", - "reicarnation", "reincarnation", - "reinforcemnt", "reinforcement", - "reinitalised", "reinitialised", - "reinitalises", "reinitialises", - "reinitalized", "reinitialized", - "reinitalizes", "reinitializes", - "reinstallled", "reinstalled", - "reisntalling", "reinstalling", - "relaitonship", "relationships", - "relatinoship", "relationships", - "reliabillity", "reliability", - "reluctanctly", "reluctantly", - "remarkablely", "remarkably", - "rememberance", "remembrance", - "reminiscient", "reminiscent", - "renaissaince", "renaissance", - "renegeration", "regeneration", - "reorganision", "reorganisation", - "repalcements", "replacements", - "repersenting", "representing", - "reporduction", "reproduction", - "reporductive", "reproductive", - "reprecussion", "repercussions", - "representate", "representative", - "represention", "representing", - "representive", "representative", - "reproducable", "reproducible", - "reproduccion", "reproduction", - "reproduciton", "reproduction", - "reproducting", "reproduction", - "reproductivo", "reproduction", - "reproduktion", "reproduction", - "repsectfully", "respectfully", - "repsectively", "respectively", - "republicanas", "republicans", - "republicanos", "republicans", - "republicants", "republicans", - "republicians", "republicans", - "requerimento", "requirement", - "requeriments", "requirements", - "requierments", "requirements", - "requriements", "requirements", - "resembelance", "resemblance", - "reseptionist", "receptionist", - "reserrection", "resurrection", - "resintalling", "reinstalling", - "resistancies", "resistances", - "resistencias", "resistances", - "respecitvely", "respectively", - "respectabile", "respectable", - "respectivily", "respectively", - "respectivley", "respectively", - "respectuflly", "respectfully", - "respiratiory", "respiratory", - "responsabile", "responsible", - "responsaveis", "responsive", - "responsbilty", "responsibly", - "responsibile", "responsible", - "responsibily", "responsibility", - "responsibley", "responsibly", - "responsibliy", "responsibly", - "responsiblty", "responsibly", - "ressemblance", "resemblance", - "ressemblence", "resemblance", - "ressurection", "resurrection", - "restaurantes", "restaurants", - "restauration", "restoration", - "restauraunts", "restaurants", - "restirctions", "restrictions", - "restrainting", "restraining", - "restrcitions", "restriction", - "restricitons", "restrictions", - "resurreccion", "resurrection", - "resurrektion", "resurrection", - "retalitation", "retaliation", - "retributioon", "retribution", - "retroactivly", "retroactively", - "revolutionay", "revolutionary", - "revolutionos", "revolutions", - "rezurrection", "resurrection", - "rictatorship", "dictatorship", - "ridicilously", "ridiculously", - "ridicoulusly", "ridiculously", - "righteouness", "righteousness", - "rockerfeller", "rockefeller", - "rollercoaser", "rollercoaster", - "rollercoater", "rollercoaster", - "romanitcally", "romantically", - "roundabounts", "roundabout", - "rudimentatry", "rudimentary", - "rysurrection", "resurrection", - "sacksonville", "jacksonville", - "sacreligious", "sacrilegious", - "sacrificeing", "sacrificing", - "saksatchewan", "saskatchewan", - "salughtering", "slaughtering", - "sanctionning", "sanctioning", - "sarcasticaly", "sarcastically", - "sarcasticlly", "sarcastically", - "sascatchewan", "saskatchewan", - "saskatcehwan", "saskatchewan", - "saskatchawan", "saskatchewan", - "saskatechwan", "saskatchewan", - "sasketchawan", "saskatchewan", - "sasketchewan", "saskatchewan", - "sasktachewan", "saskatchewan", - "satasfaction", "satisfaction", - "satasfactory", "satisfactory", - "satisfaccion", "satisfaction", - "satisfacting", "satisfaction", - "satisfcation", "satisfaction", - "satisfiction", "satisfaction", - "satistactory", "satisfactory", - "satsifaction", "satisfaction", - "satsifactory", "satisfactory", - "scandanivian", "scandinavian", - "scandenavian", "scandinavian", - "scandianvian", "scandinavian", - "scandinacian", "scandinavian", - "scandinaivan", "scandinavia", - "scandinavica", "scandinavian", - "scandinavien", "scandinavian", - "scandinavion", "scandinavian", - "scandivanian", "scandinavian", - "scandonavian", "scandinavian", - "schizophrena", "schizophrenia", - "scholarhsips", "scholarships", - "scholerships", "scholarships", - "scholorships", "scholarships", - "scnadinavian", "scandinavian", - "screenshoots", "screenshot", - "sensationail", "sensational", - "sensationnal", "sensational", - "sensibilites", "sensibilities", - "sensitivitiy", "sensitivity", - "sentimentals", "sentiments", - "sertificates", "certificates", - "serveillance", "surveillance", - "seskatchewan", "saskatchewan", - "shakesperean", "shakespeare", - "shamelessely", "shamelessly", - "shamelessley", "shamelessly", - "shampionship", "championship", - "shardholders", "shareholders", - "shenanigains", "shenanigans", - "shenanigangs", "shenanigans", - "shenaniganns", "shenanigans", - "shenanighans", "shenanigans", - "shopkeeepers", "shopkeepers", - "showboarding", "snowboarding", - "siginificant", "significant", - "significanly", "significantly", - "significante", "significance", - "significanty", "significantly", - "significatly", "significantly", - "signleplayer", "singleplayer", - "simaltaneous", "simultaneous", - "simeltaneous", "simultaneous", - "similaraties", "similarities", - "similiarites", "similarities", - "similiarties", "similarities", - "similiraties", "similarities", - "similtaneous", "simultaneous", - "simliarities", "similarities", - "simlutaneous", "simultaneous", - "simpathizers", "sympathizers", - "simplistisch", "simplistic", - "simulatenous", "simultaneous", - "simulatneous", "simultaneous", - "simultaenous", "simultaneous", - "simultaneuos", "simultaneous", - "simultanious", "simultaneous", - "simulteneous", "simultaneous", - "singelplayer", "singleplayer", - "singlepalyer", "singleplayer", - "sinlgeplayer", "singleplayer", - "situationals", "situations", - "situationnal", "situational", - "skandinavian", "scandinavian", - "skateboaring", "skateboarding", - "skrawberries", "strawberries", - "slaugthering", "slaughtering", - "sloughtering", "slaughtering", - "sluaghtering", "slaughtering", - "snowballling", "snowballing", - "snowbaording", "snowboarding", - "socialistisk", "socialists", - "socialogical", "sociological", - "socioeconimc", "socioeconomic", - "socioeconmic", "socioeconomic", - "socioligical", "sociological", - "sociopolical", "sociological", - "somethingest", "somethings", - "sophisticaed", "sophisticated", - "sophisticted", "sophisticated", - "southamption", "southampton", - "southernerns", "southerners", - "sovereighnty", "sovereignty", - "sovereignety", "sovereignty", - "sovereignity", "sovereignty", - "specialistes", "specialists", - "specializare", "specialize", - "specializate", "specialize", - "specializeds", "specializes", - "specializied", "specialize", - "speciallized", "specialised", - "specifcation", "specification", - "spectacuarly", "spectacular", - "spectaculair", "spectacular", - "spectaculary", "spectacularly", - "spectacullar", "spectacularly", - "specualtions", "speculation", - "spermatozoan", "spermatozoon", - "spesifically", "specifically", - "spirituallly", "spiritually", - "spirtiuality", "spirituality", - "spirutuality", "spirituality", - "spontaneosly", "spontaneously", - "spontaneouly", "spontaneously", - "spreadhseets", "spreadsheets", - "spreadsheats", "spreadsheets", - "spreadsheeds", "spreadsheets", - "spreadsheeet", "spreadsheets", - "standartized", "standardized", - "standerdized", "standardized", - "stardardized", "standardized", - "starightened", "straightened", - "starwberries", "strawberries", - "statisticaly", "statistically", - "stereotpying", "stereotyping", - "stereotypers", "stereotypes", - "stereotypian", "stereotyping", - "steriotyping", "stereotyping", - "steroetyping", "stereotyping", - "steryotyping", "stereotyping", - "straigntened", "straightened", - "straigthened", "straightened", - "strategicaly", "strategically", - "strategiclly", "strategically", - "strawburries", "strawberries", - "streemlining", "streamlining", - "streightened", "straightened", - "strenghening", "strengthening", - "strenghtened", "strengthened", - "strengtheing", "strengthening", - "stroytelling", "storytelling", - "subconcsious", "subconscious", - "subconsicous", "subconscious", - "subcouncious", "subconscious", - "subcsription", "subscriptions", - "subesquently", "subsequently", - "subjectivety", "subjectively", - "subjectivily", "subjectively", - "subjectivley", "subjectively", - "subjudgation", "subjugation", - "subredditors", "subreddits", - "subscirption", "subscriptions", - "subsconcious", "subconscious", - "subscribbers", "subscribers", - "subscribbing", "subscribing", - "subscribirse", "subscriber", - "subscribtion", "subscription", - "subscriptons", "subscriptions", - "subscritpion", "subscriptions", - "subscrpition", "subscriptions", - "subsiquently", "subsequently", - "subsrciption", "subscriptions", - "subsricption", "subscriptions", - "substantialy", "substantially", - "substantitve", "substantive", - "substitition", "substitution", - "substituters", "substitutes", - "substitutivo", "substitution", - "substitutues", "substitutes", - "substracting", "subtracting", - "substraction", "subtraction", - "subterranian", "subterranean", - "succsessfull", "successful", - "sunconscious", "subconscious", - "supermarkeds", "supermarkets", - "supermarkers", "supermarkets", - "supermarkert", "supermarkets", - "supermarkten", "supermarket", - "supermarktes", "supermarkets", - "supernarkets", "supermarkets", - "supernatrual", "supernatural", - "supersticion", "superstition", - "superstision", "superstition", - "superstitios", "superstitious", - "superstitous", "superstitious", - "supervisiors", "supervisors", - "supervisoras", "supervisors", - "supervisores", "supervisors", - "supllemental", "supplemental", - "supplamental", "supplemental", - "supplamented", "supplemented", - "supplimental", "supplemental", - "suppresssion", "suppression", - "supscription", "subscription", - "supsiciously", "suspiciously", - "surprizingly", "surprisingly", - "surrenderred", "surrendered", - "surrundering", "surrendering", - "survaillance", "surveillance", - "survaillence", "surveillance", - "survallience", "surveillance", - "surveillence", "surveillance", - "survelliance", "surveillance", - "surviellance", "surveillance", - "survivabiity", "survivability", - "survivabiliy", "survivability", - "survivabilty", "survivability", - "susceptiable", "susceptible", - "susceptibile", "susceptible", - "suspeciously", "suspiciously", - "suspicioulsy", "suspiciously", - "suspiciuosly", "suspiciously", - "suspisiously", "suspiciously", - "sustainabily", "sustainability", - "symapthizers", "sympathizers", - "symetrically", "symmetrically", - "symmetricaly", "symmetrically", - "sympathethic", "sympathetic", - "sympathsizer", "sympathizers", - "sympathyzers", "sympathizers", - "sympethizers", "sympathizers", - "symphatizers", "sympathizers", - "sympithizers", "sympathizers", - "syncronously", "synchronously", - "sysmatically", "systematically", - "systematisch", "systematic", - "tablespooons", "tablespoon", - "tacticallity", "tactically", - "tangencially", "tangentially", - "tangenitally", "tangentially", - "tangientally", "tangentially", - "teamfighters", "teamfights", - "teansylvania", "transylvania", - "techanically", "mechanically", - "techincality", "technicality", - "technologial", "technological", - "telelevision", "television", - "teleportaion", "teleportation", - "teleportaton", "teleportation", - "temepratures", "temperatures", - "temparatures", "temperatures", - "temperaturas", "temperatures", - "temporarilly", "temporarily", - "tempreatures", "temperatures", - "tempuratures", "temperatures", - "tengentially", "tangentially", - "termendously", "tremendously", - "territorrial", "territorial", - "territorries", "territories", - "testasterone", "testosterone", - "testestorone", "testosterone", - "thanskgiving", "thanksgiving", - "theologicial", "theological", - "theoreticaly", "theoretically", - "thermomenter", "thermometer", - "thermomether", "thermometer", - "thumbnailers", "thumbnails", - "thunderboldt", "thunderbolt", - "tindergarten", "kindergarten", - "torubleshoot", "troubleshoot", - "totalitarion", "totalitarian", - "totalitatian", "totalitarian", - "touchscreeen", "touchscreen", - "traditionaly", "traditionally", - "traditionnal", "traditional", - "tradtionally", "traditionally", - "tramendously", "tremendously", - "tramsformers", "transformers", - "tramsforming", "transforming", - "tranditional", "transitional", - "tranistional", "transitional", - "tranistioned", "transitioned", - "tranlsations", "translations", - "tranmsission", "transmissions", - "transaltions", "translations", - "transaprency", "transparency", - "transational", "transitional", - "transcations", "transactions", - "transcendant", "transcendent", - "transcripton", "transcription", - "transcriptus", "transcripts", - "transesxuals", "transsexuals", - "transfarmers", "transformers", - "transfarring", "transferring", - "transferrred", "transferred", - "transformare", "transformers", - "transformase", "transforms", - "transformees", "transforms", - "transforners", "transformers", - "transfromers", "transformers", - "transfroming", "transforming", - "transgenderd", "transgendered", - "transgendred", "transgendered", - "transgenered", "transgender", - "transicional", "transitional", - "transilvania", "transylvania", - "transimssion", "transmissions", - "transisioned", "transitioned", - "translastion", "translations", - "translateing", "translating", - "translationg", "translating", - "translucient", "translucent", - "translyvania", "transylvania", - "transmisions", "transmission", - "transmisison", "transmission", - "transmissons", "transmissions", - "transmitirte", "transmitter", - "transmittted", "transmitted", - "transmorfers", "transformer", - "transofrmers", "transformers", - "transofrming", "transforming", - "transparancy", "transparency", - "transparenty", "transparency", - "transparrent", "transparent", - "transperancy", "transparency", - "transperency", "transparency", - "transplantes", "transplants", - "transporteur", "transporter", - "transportion", "transporting", - "transpotting", "transporting", - "transsmision", "transmissions", - "transylmania", "transylvania", - "transylvanai", "transylvania", - "trasnferring", "transferring", - "trasnformers", "transformers", - "trasnforming", "transforming", - "trasnmission", "transmissions", - "trasnparency", "transparency", - "trasnporting", "transporting", - "trememdously", "tremendously", - "tremendoulsy", "tremendously", - "tremondously", "tremendously", - "troubelshoot", "troubleshoot", - "troublehsoot", "troubleshoot", - "trumendously", "tremendously", - "trustworthly", "trustworthy", - "ubsubscribed", "unsubscribed", - "udnerpowered", "underpowered", - "umbelievable", "unbelievable", - "umemployment", "unemployment", - "unaccaptable", "unacceptable", - "unacceptible", "unacceptable", - "unaccpetable", "unacceptable", - "unacompanied", "unaccompanied", - "unappealling", "unappealing", - "unattractice", "unattractive", - "unautherized", "unauthorized", - "unauthroized", "unauthorized", - "unbeleivable", "unbelievable", - "unbeleivably", "unbelievably", - "unbeliavable", "unbelievable", - "unbeliavably", "unbelievably", - "unbeliebable", "unbelievable", - "unbelieveble", "unbelievable", - "unbelievibly", "unbelievably", - "unbeliveable", "unbelievable", - "unbeliveably", "unbelievably", - "unbelizeable", "unbelievable", - "unbolievable", "unbelievable", - "uncertainity", "uncertainty", - "uncertaintly", "uncertainty", - "uncompatible", "incompatible", - "unconditinal", "unconditional", - "unconsciosly", "unconsciously", - "unconsciouly", "unconsciously", - "unconsistent", "inconsistent", - "unconvenient", "inconvenient", - "unconvential", "unconventional", - "undecideable", "undecidable", - "undefinitely", "indefinitely", - "undeniablely", "undeniably", - "undergradate", "undergraduate", - "undergradute", "undergraduate", - "underminding", "undermining", - "undermineing", "undermining", - "undermineras", "undermines", - "undermineres", "undermines", - "underminging", "undermining", - "underminning", "undermining", - "undertakeing", "undertaking", - "underwhelimg", "underwhelming", - "underwheling", "underwhelming", - "undesireable", "undesirable", - "undoubtedbly", "undoubtedly", - "unemployemnt", "unemployment", - "unemplyoment", "unemployment", - "unempolyment", "unemployment", - "unenployment", "unemployment", - "unequalities", "inequalities", - "unexpectadly", "unexpectedly", - "unexpectetly", "unexpectedly", - "unexpectidly", "unexpectedly", - "unexperience", "inexperience", - "unexpextedly", "unexpectedly", - "unexplicably", "inexplicably", - "unforgetable", "unforgettable", - "unforgiveble", "unforgivable", - "unforgivible", "unforgivable", - "unfortunatly", "unfortunately", - "unfortunetly", "unfortunately", - "unilatreally", "unilaterally", - "uniliterally", "unilaterally", - "unimpresssed", "unimpressed", - "uninitalised", "uninitialised", - "uninitalized", "uninitialized", - "uninstallimg", "uninstalling", - "uninstallled", "uninstalled", - "unintentinal", "unintentional", - "uninteresing", "uninteresting", - "uninterneted", "uninterested", - "uninterruped", "uninterrupted", - "uninterupted", "uninterrupted", - "unisntalling", "uninstalling", - "unitesstates", "unitedstates", - "univerisites", "universities", - "univeristies", "universities", - "universitets", "universities", - "unliaterally", "unilaterally", - "unneccessary", "unnecessary", - "unnecesarily", "unnecessarily", - "unnecessairy", "unnecessarily", - "unnecessarly", "unnecessarily", - "unnistalling", "uninstalling", - "unpredictabe", "unpredictable", - "unpreductive", "unproductive", - "unproduktive", "unproductive", - "unrealisitic", "unrealistic", - "unreaponsive", "unresponsive", - "unreasonalby", "unreasonably", - "unrepsonsive", "unresponsive", - "unresponcive", "unresponsive", - "unresponisve", "unresponsive", - "unresponsibe", "unresponsive", - "unrestircted", "unrestricted", - "unrestrcited", "unrestricted", - "unristricted", "unrestricted", - "unseccessful", "unsuccessful", - "unsespecting", "unsuspecting", - "unsibscribed", "unsubscribed", - "unsoliciated", "unsolicited", - "unsolicitied", "unsolicited", - "unsubscirbed", "unsubscribed", - "unsubscrible", "unsubscribed", - "unsubscrided", "unsubscribed", - "unsubscriped", "unsubscribed", - "unsubscrubed", "unsubscribed", - "unsubsrcibed", "unsubscribed", - "unsucessfull", "unsuccessful", - "unsunscribed", "unsubscribed", - "unsurprizing", "unsurprising", - "unsusbcribed", "unsubscribed", - "unsustainble", "unsustainable", - "unvelievable", "unbelievable", - "unvelievably", "unbelievably", - "unviersities", "universities", - "unvulnerable", "invulnerable", - "varification", "verification", - "vegetarianas", "vegetarians", - "vegetarianos", "vegetarians", - "verficiation", "verification", - "verificacion", "verification", - "verificaiton", "verification", - "verifikation", "verification", - "vernaculaire", "vernacular", - "versatillity", "versatility", - "verticallity", "vertically", - "videogamemes", "videogames", - "visualizaton", "visualization", - "vocabularily", "vocabulary", - "vocabularity", "vocabulary", - "volonteering", "volunteering", - "volounteered", "volunteered", - "voluntarilly", "voluntarily", - "volunterring", "volunteering", - "vulnerabilty", "vulnerability", - "weightlifing", "weightlifting", - "withdrawalls", "withdrawals", - "withdrawling", "withdrawing", - "withdrawning", "withdrawing", - "wonderfullly", "wonderfully", - "worshippping", "worshipping", - "xenophobical", "xenophobia", - "abandenment", "abandonment", - "abandomnent", "abandonment", - "abandonding", "abandoning", - "abandonnent", "abandonment", - "abandonning", "abandoning", - "abbreviatin", "abbreviation", - "abbreviaton", "abbreviation", - "abdominable", "abdominal", - "abomanation", "abomination", - "abominacion", "abomination", - "abomonation", "abomination", - "abonimation", "abomination", - "aboriginial", "aboriginal", - "aborigional", "aboriginal", - "abreviation", "abbreviation", - "abritrarily", "arbitrarily", - "abritration", "arbitration", - "absolutelly", "absolutely", - "absolutelys", "absolutes", - "absolutisme", "absolutes", - "absolutiste", "absolutes", - "abstraccion", "abstraction", - "abstraktion", "abstraction", - "abstruction", "abstraction", - "abundancies", "abundances", - "academicaly", "academically", - "academicese", "academics", - "accelarated", "accelerated", - "accelarator", "accelerator", - "accelerater", "accelerator", - "acceleratie", "accelerate", - "acceleratio", "accelerator", - "acceleraton", "acceleration", - "accelorated", "accelerated", - "accelorator", "accelerator", - "acceptabelt", "acceptable", - "accesseries", "accessories", - "accessibile", "accessible", - "accessibily", "accessibility", - "accessoires", "accessories", - "accidantely", "accidently", - "accidentaly", "accidentally", - "accidentely", "accidently", - "accidential", "accidental", - "accidentily", "accidently", - "accidentlay", "accidently", - "accidentley", "accidently", - "accidentlly", "accidently", - "accomadated", "accommodated", - "accomadates", "accommodates", - "accommadate", "accommodate", - "accommidate", "accommodate", - "accomodated", "accommodated", - "accomodates", "accommodates", - "accomondate", "accommodate", - "accompained", "accompanied", - "accompanyed", "accompanied", - "accompianed", "accompanied", - "accompinied", "accompanied", - "accomplises", "accomplishes", - "accomplishs", "accomplishes", - "accomponied", "accompanied", - "accountatns", "accountants", - "accountents", "accountants", - "accquainted", "acquainted", - "accrediated", "accredited", - "accreditied", "accredited", - "accreditted", "accredited", - "acculumated", "accumulated", - "accumalated", "accumulated", - "accumelated", "accumulated", - "accumilated", "accumulated", - "accumulatin", "accumulation", - "accumulaton", "accumulation", - "accuratelly", "accurately", - "accustommed", "accustomed", - "acheivement", "achievement", - "acheivments", "achievements", - "achievemint", "achievement", - "achievemnts", "achievements", - "achievments", "achievements", - "achivements", "achievements", - "acknolwedge", "acknowledge", - "acknoweldge", "acknowledge", - "acknowleded", "acknowledged", - "acknowlegde", "acknowledge", - "acknowleged", "acknowledge", - "acknowleges", "acknowledges", - "acknwoledge", "acknowledges", - "acomplished", "accomplished", - "acopalyptic", "apocalyptic", - "acquaintace", "acquaintance", - "acquisation", "acquisition", - "activateing", "activating", - "activationg", "activating", - "activistion", "activision", - "additinally", "additionally", - "additionaly", "additionally", - "additonally", "additionally", - "adequatedly", "adequately", - "adjectiveus", "adjectives", - "administerd", "administered", - "administrar", "administrator", - "administren", "administer", - "administrer", "administer", - "administres", "administer", - "administrez", "administer", - "adminstered", "administered", - "adminstrate", "administrate", - "admittadely", "admittedly", - "adolencence", "adolescence", - "adolescance", "adolescence", - "adolescense", "adolescence", - "advantadges", "advantages", - "advantageos", "advantageous", - "advantageus", "advantageous", - "advantagous", "advantageous", - "adventerous", "adventures", - "adventourus", "adventurous", - "adversiting", "advertising", - "advertisors", "advertisers", - "advertisted", "advertised", - "aesthethics", "aesthetics", - "afficionado", "aficionado", - "affiliction", "affiliation", - "affirmitave", "affirmative", - "affirmitive", "affirmative", - "affixiation", "affiliation", - "affrimative", "affirmative", - "afgahnistan", "afghanistan", - "afganhistan", "afghanistan", - "afghanastan", "afghanistan", - "afghansitan", "afghanistan", - "afhganistan", "afghanistan", - "afternarket", "aftermarket", - "afterthougt", "afterthought", - "aggaravates", "aggravates", - "aggragating", "aggravating", - "aggregatore", "aggregate", - "aggressivly", "aggressively", - "aggresssion", "aggression", - "aggrovating", "aggravating", - "agnostacism", "agnosticism", - "agnostisicm", "agnosticism", - "agnostisism", "agnosticism", - "agnostocism", "agnosticism", - "agnsoticism", "agnosticism", - "agonsticism", "agnosticism", - "agressively", "aggressively", - "agressivley", "agressive", - "agressivnes", "agressive", - "agricolture", "agriculture", - "agriculteur", "agriculture", - "agricultral", "agricultural", - "agricultual", "agricultural", - "agricutlure", "agriculture", - "ahtleticism", "athleticism", - "alcoholicas", "alcoholics", - "alcoholicos", "alcoholics", - "alcoholisim", "alcoholism", - "algorithems", "algorithm", - "algorithims", "algorithm", - "algorithmes", "algorithms", - "algorithmns", "algorithms", - "algorithmus", "algorithms", - "algorithyms", "algorithm", - "algorythims", "algorithms", - "alientating", "alienating", - "alleigances", "allegiance", - "alltogether", "altogether", - "alterantive", "alternative", - "alternatley", "alternately", - "alternitive", "alternative", - "altheticism", "athleticism", - "altnerately", "alternately", - "altruisitic", "altruistic", - "altruistric", "altruistic", - "amalgomated", "amalgamated", - "ambulancier", "ambulance", - "amerliorate", "ameliorate", - "ammendments", "amendments", - "ampehtamine", "amphetamine", - "ampethamine", "amphetamine", - "amphetamies", "amphetamines", - "amphetamins", "amphetamines", - "amphetemine", "amphetamine", - "amphetimine", "amphetamine", - "amphetmaine", "amphetamines", - "analyticals", "analytics", - "anarchistes", "anarchists", - "ancedotally", "anecdotally", - "androgenous", "androgynous", - "anecdatally", "anecdotally", - "anecdotelly", "anecdotally", - "anecodtally", "anecdotally", - "anectodally", "anecdotally", - "anectotally", "anecdotally", - "anedoctally", "anecdotally", - "angosticism", "agnosticism", - "anihilation", "annihilation", - "anitbiotics", "antibiotics", - "annihalated", "annihilated", - "annihilaton", "annihilation", - "annihilited", "annihilated", - "annihliated", "annihilated", - "annilihated", "annihilated", - "anniversery", "anniversary", - "annonymouse", "anonymous", - "announceing", "announcing", - "announcemet", "announcements", - "announcemnt", "announcement", - "announcents", "announces", - "annoymously", "anonymously", - "anonamously", "anonymously", - "anonimously", "anonymously", - "anonmyously", "anonymously", - "anonomously", "anonymously", - "anonymousny", "anonymously", - "anouncement", "announcement", - "antagonisic", "antagonistic", - "antagonistc", "antagonistic", - "antagonstic", "antagonist", - "anthropolgy", "anthropology", - "anthropoloy", "anthropology", - "antibiodics", "antibiotics", - "antibioitcs", "antibiotic", - "antibioitic", "antibiotic", - "antibitoics", "antibiotics", - "antiboitics", "antibiotics", - "anticapated", "anticipated", - "anticiapted", "anticipated", - "anticipatin", "anticipation", - "antiobitics", "antibiotic", - "antiquaited", "antiquated", - "antisipated", "anticipated", - "apacolyptic", "apocalyptic", - "apocaliptic", "apocalyptic", - "apocalpytic", "apocalyptic", - "apocalytpic", "apocalyptic", - "apolagizing", "apologizing", - "apolegetics", "apologetics", - "apologistas", "apologists", - "apologistes", "apologists", - "apostrophie", "apostrophe", - "apparantely", "apparently", - "appareances", "appearances", - "apparentely", "apparently", - "appartments", "apartments", - "appeareance", "appearance", - "appearences", "appearances", - "apperciated", "appreciated", - "apperciates", "appreciates", - "appereances", "appearances", - "applicabile", "applicable", - "applicaiton", "application", - "applicatins", "applicants", - "applicatons", "applications", - "appoitnment", "appointments", - "apporaching", "approaching", - "apporpriate", "appropriate", - "apporximate", "approximate", - "appraoching", "approaching", - "apprearance", "appearance", - "apprecaited", "appreciated", - "apprecaites", "appreciates", - "appreciaite", "appreciative", - "appreciatie", "appreciative", - "appreciatin", "appreciation", - "appreciaton", "appreciation", - "appreciatve", "appreciative", - "appreicated", "appreciated", - "appreicates", "appreciates", - "apprentince", "apprentice", - "appriciated", "appreciated", - "appriciates", "appreciates", - "apprieciate", "appreciate", - "appropirate", "appropriate", - "appropraite", "appropriate", - "appropriato", "appropriation", - "approxamate", "approximate", - "approxiamte", "approximate", - "approxmiate", "approximate", - "aprehensive", "apprehensive", - "apsirations", "aspirations", - "aqcuisition", "acquisition", - "aquaintance", "acquaintance", - "aquiantance", "acquaintance", - "arbitrairly", "arbitrarily", - "arbitralily", "arbitrarily", - "arbitrarely", "arbitrarily", - "arbitrarion", "arbitration", - "arbitratily", "arbitrarily", - "arbritarily", "arbitrarily", - "arbritation", "arbitration", - "arcaheology", "archaeology", - "archaoelogy", "archeology", - "archeaology", "archaeology", - "archimedian", "archimedean", - "architechts", "architect", - "architectes", "architects", - "architecure", "architecture", - "argiculture", "agriculture", - "argumentate", "argumentative", - "aribtrarily", "arbitrarily", - "aribtration", "arbitration", - "arithmentic", "arithmetic", - "arithmethic", "arithmetic", - "arithmetric", "arithmetic", - "armagedddon", "armageddon", - "armageddeon", "armageddon", - "arrangments", "arrangements", - "arrengement", "arrangement", - "articluated", "articulated", - "articualted", "articulated", - "artifically", "artificially", - "artificialy", "artificially", - "aspergerers", "aspergers", - "asphyxation", "asphyxiation", - "aspriations", "aspirations", - "assasinated", "assassinated", - "assasinates", "assassinates", - "assassiante", "assassinate", - "assassinare", "assassinate", - "assassinatd", "assassinated", - "assassinato", "assassination", - "assassinats", "assassins", - "assassinted", "assassinated", - "assembleing", "assembling", - "assemblying", "assembling", - "assertation", "assertion", - "assignemnts", "assignments", - "assimialted", "assimilate", - "assimilatie", "assimilate", - "assimilerat", "assimilate", - "assimiliate", "assimilate", - "assimliated", "assimilate", - "assingments", "assignments", - "assistantes", "assistants", - "assocaition", "associations", - "associaiton", "associations", - "associaties", "associates", - "associatons", "associations", - "assoication", "association", - "assosiating", "associating", - "assosiation", "association", - "assoziation", "association", - "assumptious", "assumptions", - "astonashing", "astonishing", - "astonoshing", "astonishing", - "astronaught", "astronaut", - "astronaunts", "astronaut", - "astronautas", "astronauts", - "astronautes", "astronauts", - "asychronous", "asynchronous", - "asyncronous", "asynchronous", - "atatchments", "attachments", - "atheistisch", "atheistic", - "athelticism", "athleticism", - "athletecism", "athleticism", - "athleticsim", "athleticism", - "athletisicm", "athleticism", - "athletisism", "athleticism", - "atmopsheric", "atmospheric", - "atmoshperic", "atmospheric", - "atmosoheric", "atmospheric", - "atomspheric", "atmospheric", - "atrocitites", "atrocities", - "attachemnts", "attachments", - "attackerasu", "attackers", - "attackerats", "attackers", - "attactments", "attachments", - "attributred", "attributed", - "attributted", "attribute", - "attrocities", "atrocities", - "atttributes", "attributes", - "audiobookas", "audiobooks", - "audioboooks", "audiobook", - "auotcorrect", "autocorrect", - "austrailans", "australians", - "austrailian", "australian", - "australiaan", "australians", - "australiams", "australians", - "australiens", "australians", - "australlian", "australian", - "authenticiy", "authenticity", - "authenticor", "authenticator", - "authenticty", "authenticity", - "authorative", "authoritative", - "authoritate", "authoritative", - "authoroties", "authorities", - "autoatttack", "autoattack", - "autocoreect", "autocorrect", - "autocorrekt", "autocorrect", - "autocorrent", "autocorrect", - "autocorrext", "autocorrect", - "autoctonous", "autochthonous", - "autokorrect", "autocorrect", - "automaticly", "automatically", - "automatonic", "automation", - "automoblies", "automobile", - "auxillaries", "auxiliaries", - "availabiliy", "availability", - "availabilty", "availability", - "availablity", "availability", - "awesoneness", "awesomeness", - "babysittter", "babysitter", - "backbacking", "backpacking", - "backgorunds", "backgrounds", - "backhacking", "backpacking", - "backjacking", "backpacking", - "backtacking", "backpacking", - "bangaldeshi", "bangladesh", - "bangladesch", "bangladesh", - "barceloneta", "barcelona", - "bargainning", "bargaining", - "battelfield", "battlefield", - "battelfront", "battlefront", - "battelships", "battleship", - "battlefeild", "battlefield", - "battlefiend", "battlefield", - "battlefiled", "battlefield", - "battlefornt", "battlefront", - "battlehsips", "battleship", - "beastiality", "bestiality", - "beaurocracy", "bureaucracy", - "beautyfully", "beautifully", - "behaviorial", "behavioral", - "belittleing", "belittling", - "belittlling", "belittling", - "belligerant", "belligerent", - "belligirent", "belligerent", - "bellweather", "bellwether", - "benefitical", "beneficial", - "bestiallity", "bestiality", - "beuatifully", "beautifully", - "beuraucracy", "bureaucracy", - "beuraucrats", "bureaucrats", - "billegerent", "belligerent", - "billionairs", "billionaires", - "billionarie", "billionaire", - "billioniare", "billionaire", - "biologicaly", "biologically", - "birthdayers", "birthdays", - "birthdaymas", "birthdays", - "bittersweat", "bittersweet", - "bitterwseet", "bittersweet", - "blackberrry", "blackberry", - "blacksmitch", "blacksmith", - "bloodboorne", "bloodborne", - "bluebarries", "blueberries", - "blueburries", "blueberries", - "blueprients", "blueprints", - "bodybuildig", "bodybuilding", - "bodybuildng", "bodybuilding", - "bodybuiling", "bodybuilding", - "bombardeada", "bombarded", - "bombardeado", "bombarded", - "bombarderad", "bombarded", - "bordelrands", "borderlands", - "bordlerands", "borderlands", - "bortherhood", "brotherhood", - "bourgeousie", "bourgeois", - "boycottting", "boycotting", - "bracelettes", "bracelets", - "brainwahsed", "brainwashed", - "brainwasing", "brainwashing", - "braziliians", "brazilians", - "breakthough", "breakthrough", - "breakthrouh", "breakthrough", - "breathtakng", "breathtaking", - "brianwashed", "brainwashed", - "brillaintly", "brilliantly", - "broadcasing", "broadcasting", - "broadcastes", "broadcasts", - "broderlands", "borderlands", - "brotherwood", "brotherhood", - "buddhistisk", "buddhists", - "buearucrats", "bureaucrats", - "bueraucracy", "bureaucracy", - "bueraucrats", "bureaucrats", - "buisnessman", "businessman", - "buisnessmen", "businessmen", - "bullerproof", "bulletproof", - "bulletbroof", "bulletproof", - "bulletproff", "bulletproof", - "bulletprrof", "bulletproof", - "bullitproof", "bulletproof", - "bureacuracy", "bureaucracy", - "bureaocracy", "bureaucracy", - "bureaocrats", "bureaucrats", - "bureaucraps", "bureaucrats", - "bureaucrash", "bureaucrats", - "bureaucrasy", "bureaucrats", - "bureaucrazy", "bureaucracy", - "bureuacracy", "bureaucracy", - "bureuacrats", "bureaucrats", - "burueacrats", "bureaucrats", - "businessnes", "businessmen", - "busniessmen", "businessmen", - "butterfiles", "butterflies", - "butterfleye", "butterfly", - "butterflyes", "butterflies", - "butterfries", "butterflies", - "butterlfies", "butterflies", - "caclulating", "calculating", - "caclulation", "calculation", - "caclulators", "calculators", - "cailbration", "calibration", - "calbiration", "calibration", - "calcualting", "calculating", - "calcualtion", "calculations", - "calcualtors", "calculators", - "calculaters", "calculators", - "calculatios", "calculators", - "calculatons", "calculations", - "calibartion", "calibration", - "calibraiton", "calibration", - "califorinan", "californian", - "californain", "californian", - "californica", "california", - "californien", "californian", - "californiia", "californian", - "californina", "californian", - "californnia", "californian", - "califronian", "californian", - "caluclating", "calculating", - "caluclation", "calculation", - "caluclators", "calculators", - "caluculated", "calculated", - "caluiflower", "cauliflower", - "camouflague", "camouflage", - "camouflauge", "camouflage", - "campagining", "campaigning", - "campainging", "campaigning", - "canadianese", "canadians", - "cannabilism", "cannibalism", - "cannabolism", "cannibalism", - "canniablism", "cannibalism", - "cannibalizm", "cannibalism", - "cannibaljim", "cannibalism", - "cannibalsim", "cannibalism", - "cannibilism", "cannibalism", - "cannobalism", "cannibalism", - "cannotation", "connotation", - "capabilites", "capabilities", - "capabilitiy", "capability", - "capabillity", "capability", - "capacitaron", "capacitor", - "capacitores", "capacitors", - "capatilists", "capitalists", - "capatilized", "capitalized", - "caperbility", "capability", - "capitalisim", "capitalism", - "capitilists", "capitalists", - "capitilized", "capitalized", - "capitolists", "capitalists", - "capitolized", "capitalized", - "captialists", "capitalists", - "captialized", "capitalized", - "cariactures", "caricature", - "carniverous", "carnivorous", - "castatrophe", "catastrophe", - "catagorized", "categorized", - "catapillars", "caterpillars", - "catapillers", "caterpillars", - "catasthrope", "catastrophe", - "catastraphe", "catastrophe", - "catastrohpe", "catastrophe", - "catastropic", "catastrophic", - "categroized", "categorized", - "catepillars", "caterpillars", - "catergorize", "categorize", - "caterogized", "categorized", - "caterpilars", "caterpillars", - "caterpiller", "caterpillar", - "catholacism", "catholicism", - "catholicsim", "catholicism", - "catholisicm", "catholicism", - "catholisism", "catholicism", - "catholizism", "catholicism", - "catholocism", "catholicism", - "catogerized", "categorized", - "catterpilar", "caterpillar", - "cauilflower", "cauliflower", - "caulfilower", "cauliflower", - "celebartion", "celebrations", - "celebirties", "celebrities", - "celebracion", "celebration", - "celebrasion", "celebrations", - "celebratons", "celebrations", - "centipeddle", "centipede", - "cerimonious", "ceremonious", - "certaintity", "certainty", - "certificaat", "certificate", - "certificare", "certificate", - "certificato", "certification", - "certificats", "certificates", - "challanging", "challenging", - "challeneged", "challenged", - "challeneger", "challenger", - "challeneges", "challenges", - "chameleooon", "chameleon", - "championshp", "championship", - "championsip", "championship", - "chancellour", "chancellor", - "charachters", "characters", - "charasmatic", "charismatic", - "charimastic", "charismatic", - "charsimatic", "charismatic", - "cheerleadra", "cheerleader", - "cheerleards", "cheerleaders", - "cheerleeder", "cheerleader", - "cheesebuger", "cheeseburger", - "cheeseburgs", "cheeseburgers", - "chihuahuita", "chihuahua", - "childrenmrs", "childrens", - "chloesterol", "cholesterol", - "cholesteral", "cholesterol", - "cholestoral", "cholesterol", - "cholestorol", "cholesterol", - "cholosterol", "cholesterol", - "chormosomes", "chromosomes", - "christianty", "christianity", - "chromasomes", "chromosomes", - "chromesomes", "chromosomes", - "chromisomes", "chromosomes", - "chromosones", "chromosomes", - "chromossome", "chromosomes", - "chromozomes", "chromosomes", - "chronicales", "chronicles", - "chronichles", "chronicles", - "cicrulating", "circulating", - "cincinnasti", "cincinnati", - "cincinnatti", "cincinnati", - "cincinnnati", "cincinnati", - "circimcised", "circumcised", - "circluating", "circulating", - "circualtion", "circulation", - "circulacion", "circulation", - "circumcison", "circumcision", - "circumsiced", "circumcised", - "circumsised", "circumcised", - "circumstace", "circumstance", - "circumvrent", "circumvent", - "circuncised", "circumcised", - "cirticising", "criticising", - "ciruclating", "circulating", - "ciruclation", "circulation", - "citicenship", "citizenship", - "citisenship", "citizenship", - "citizinship", "citizenship", - "civilizatin", "civilizations", - "civilizaton", "civilization", - "claculators", "calculators", - "classifides", "classified", - "cleanilness", "cleanliness", - "cleanleness", "cleanliness", - "cleanlyness", "cleanliness", - "cleansiness", "cleanliness", - "cliffbanger", "cliffhanger", - "cliffhander", "cliffhanger", - "cliffhangar", "cliffhanger", - "clifthanger", "cliffhanger", - "cockaroches", "cockroaches", - "cockraoches", "cockroaches", - "cockroackes", "cockroaches", - "cocktailers", "cocktails", - "coefficeint", "coefficient", - "coefficiant", "coefficient", - "coincedince", "coincidence", - "coincidance", "coincidence", - "coincidense", "coincidence", - "coincidente", "coincidence", - "coincidince", "coincidence", - "coinsidence", "coincidence", - "collabarate", "collaborate", - "collaberate", "collaborate", - "collaborant", "collaborate", - "collaborare", "collaborate", - "collaborato", "collaboration", - "collapseing", "collapsing", - "collaterial", "collateral", - "collectieve", "collective", - "collectivly", "collectively", - "collectivos", "collections", - "collobarate", "collaborate", - "colloborate", "collaborate", - "colonializm", "colonialism", - "colonialsim", "colonialism", - "colonianism", "colonialism", - "colonizaton", "colonization", - "comaprisons", "comparisons", - "combiantion", "combinations", - "combinacion", "combination", - "combinaison", "combinations", - "combinaiton", "combinations", - "combinatino", "combinations", - "combinatins", "combinations", - "combinatios", "combinations", - "combinining", "combining", - "combonation", "combination", - "comediantes", "comedians", - "comeptition", "competition", - "comeptitive", "competitive", - "comeptitors", "competitors", - "comfertable", "comfortable", - "comfertably", "comfortably", - "comfortabel", "comfortably", - "comfortabil", "comfortably", - "comfrotable", "comfortable", - "comftorable", "comfortable", - "comftorably", "comfortably", - "comisioning", "commissioning", - "comissioned", "commissioned", - "comissioner", "commissioner", - "commandered", "commanded", - "commandmant", "commandment", - "commantator", "commentator", - "commendment", "commandment", - "commentarea", "commenter", - "commentaren", "commenter", - "commentater", "commentator", - "commenteers", "commenter", - "commentries", "commenters", - "commercialy", "commercially", - "commericals", "commercials", - "commericial", "commercial", - "comminicate", "communicate", - "comminucate", "communicate", - "commisioned", "commissioned", - "commisioner", "commissioner", - "commisssion", "commissions", - "committment", "commitment", - "commodoties", "commodities", - "commomplace", "commonplace", - "commonspace", "commonplace", - "commonweath", "commonwealth", - "commonwelth", "commonwealth", - "commuincate", "communicated", - "communciate", "communicate", - "communicted", "communicated", - "communistas", "communists", - "communistes", "communists", - "compability", "compatibility", - "compalation", "compilation", - "compansated", "compensated", - "comparabile", "comparable", - "comparasion", "comparison", - "comparasons", "comparisons", - "comparement", "compartment", - "comparetive", "comparative", - "comparision", "comparison", - "comparisson", "comparisons", - "comparitave", "comparative", - "comparitive", "comparative", - "comparsions", "comparisons", - "compassione", "compassionate", - "compasssion", "compassion", - "compatabile", "compatible", - "compatative", "comparative", - "compatiable", "compatible", - "compatibile", "compatible", - "compatibily", "compatibility", - "compeditive", "competitive", - "compeditors", "competitors", - "compeitions", "competitions", - "compeittion", "competitions", - "compelation", "compilation", - "compensante", "compensate", - "compensatie", "compensate", - "compensatin", "compensation", - "compenstate", "compensate", - "comperative", "comparative", - "compesition", "composition", - "competation", "computation", - "competative", "competitive", - "competators", "competitors", - "competetion", "competition", - "competetors", "competitors", - "competiters", "competitors", - "competiting", "competition", - "competitior", "competitor", - "competitivo", "competition", - "competitoin", "competitions", - "competitons", "competitors", - "competution", "computation", - "compilacion", "compilation", - "compilcated", "complicate", - "compination", "compilation", - "compinsated", "compensated", - "compitation", "computation", - "compitetion", "competitions", - "complacient", "complacent", - "complciated", "complicate", - "compleation", "compilation", - "complecated", "complicated", - "completaste", "completes", - "completeing", "completing", - "completeion", "completion", - "completelly", "completely", - "completelyl", "completely", - "completelys", "completes", - "completenes", "completes", - "complexitiy", "complexity", - "compliacted", "complicate", - "compliation", "compilation", - "complicarte", "complicate", - "complicatie", "complicit", - "complicatii", "complicit", - "complicatin", "complicit", - "complictaed", "complicate", - "complimente", "complement", - "complimenty", "complimentary", - "complusions", "compulsion", - "compolation", "compilation", - "componenets", "components", - "componentes", "components", - "composicion", "composition", - "composiiton", "compositions", - "composision", "compositions", - "compositied", "composite", - "composities", "composite", - "compositoin", "compositions", - "compositons", "compositions", - "compositore", "composite", - "compostiion", "compositions", - "compotition", "composition", - "compramised", "compromised", - "compramises", "compromises", - "compremised", "compromised", - "compremises", "compromises", - "comprension", "compression", - "compresores", "compressor", - "compresssed", "compressed", - "compresssor", "compressor", - "comprimised", "compromised", - "comprimises", "compromises", - "compromessi", "compromises", - "compromisng", "compromising", - "compromisse", "compromises", - "compromisso", "compromises", - "compromized", "compromised", - "compulstion", "compulsion", - "compunation", "computation", - "computacion", "computation", - "computating", "computation", - "computition", "computation", - "conceivibly", "conceivably", - "concencrate", "concentrate", - "concentrace", "concentrate", - "concentrade", "concentrated", - "concentrait", "concentrate", - "concentrant", "concentrate", - "concentrare", "concentrate", - "concentrato", "concentration", - "concertmate", "concentrate", - "conceviable", "conceivable", - "conceviably", "conceivably", - "concidering", "considering", - "conciveable", "conceivable", - "conciveably", "conceivably", - "conclsuions", "concussions", - "concludendo", "concluded", - "conclussion", "conclusions", - "conclussive", "conclusive", - "conclutions", "conclusions", - "concsiously", "consciously", - "conculsions", "conclusions", - "concusssion", "concussions", - "condeferacy", "confederacy", - "condicional", "conditional", - "condidtions", "conditions", - "conditionar", "conditioner", - "conditionel", "conditional", - "condolances", "condolences", - "condolenses", "condolences", - "condolonces", "condolences", - "conductiong", "conducting", - "condulences", "condolences", - "conenctions", "connections", - "conescutive", "consecutive", - "confedaracy", "confederacy", - "confedarate", "confederate", - "confederecy", "confederacy", - "conferances", "conferences", - "conferedate", "confederate", - "confererate", "confederate", - "confescated", "confiscated", - "confesssion", "confessions", - "confidantly", "confidently", - "configurare", "configure", - "configurate", "configure", - "configurato", "configuration", - "confilcting", "conflicting", - "confisgated", "confiscated", - "conflciting", "conflicting", - "confortable", "comfortable", - "confrontato", "confrontation", - "confussions", "confessions", - "congrassman", "congressman", - "congratuate", "congratulate", - "conicidence", "coincidence", - "conjonction", "conjunction", - "conjucntion", "conjunction", - "conjuncting", "conjunction", - "conlcusions", "conclusions", - "connatation", "connotation", - "connecitcut", "connecticut", - "connecticon", "connection", - "connectiong", "connecting", - "connectivty", "connectivity", - "connetation", "connotation", - "connonation", "connotation", - "connotacion", "connotation", - "conontation", "connotation", - "conotations", "connotations", - "conquerring", "conquering", - "consdidered", "considered", - "consectuive", "consecutive", - "consecuence", "consequence", - "conseguence", "consequence", - "conselation", "consolation", - "consentrate", "concentrate", - "consequenes", "consequence", - "consequense", "consequences", - "consequente", "consequence", - "consequenty", "consequently", - "consequtive", "consecutive", - "conservanti", "conservation", - "conservatie", "conservatives", - "conservaton", "conservation", - "consficated", "confiscated", - "considerabe", "considerate", - "considerais", "considers", - "considerant", "considerate", - "considerato", "consideration", - "considerble", "considerable", - "considerbly", "considerably", - "considereis", "considers", - "consilation", "consolation", - "consilidate", "consolidate", - "consistance", "consistency", - "consistenly", "consistently", - "consistensy", "consistency", - "consistenty", "consistently", - "consitution", "constitution", - "conslutants", "consultant", - "consolacion", "consolation", - "consoldiate", "consolidate", - "consolidare", "consolidate", - "consolodate", "consolidate", - "consomation", "consolation", - "conspiraces", "conspiracies", - "conspiracys", "conspiracies", - "conspirancy", "conspiracy", - "constantins", "constants", - "constantivs", "constants", - "constarints", "constraint", - "constituant", "constituent", - "constituion", "constitution", - "constituite", "constitute", - "constitutie", "constitutes", - "constrating", "constraint", - "constriants", "constraints", - "construcing", "constructing", - "construcion", "construction", - "construcive", "constructive", - "constructie", "constructive", - "constructos", "constructs", - "constructur", "constructor", - "constructus", "constructs", - "constuction", "construction", - "consturcted", "constructed", - "consuelling", "counselling", - "consulation", "consolation", - "consultaion", "consultation", - "consultanti", "consultation", - "consumation", "consumption", - "consumbales", "consumables", - "consumersim", "consumerism", - "consumibles", "consumables", - "contagiosum", "contagious", - "containered", "contained", - "containmemt", "containment", - "containters", "containers", - "containting", "containing", - "contaminato", "contamination", - "contaminent", "containment", - "contaminted", "contaminated", - "contancting", "contracting", - "contanimate", "contaminated", - "contemplare", "contemplate", - "contempoary", "contemporary", - "contemporay", "contemporary", - "contencious", "contentious", - "contenental", "continental", - "contengency", "contingency", - "contenintal", "continental", - "contenplate", "contemplate", - "contensious", "contentious", - "contentants", "contestants", - "contentuous", "contentious", - "contestaste", "contestants", - "contestents", "contestants", - "contianment", "containment", - "contientous", "contentious", - "contimplate", "contemplate", - "continenets", "continents", - "continentes", "continents", - "continentul", "continental", - "contingancy", "contingency", - "contingient", "contingent", - "contingincy", "contingency", - "continously", "continuously", - "continuarla", "continual", - "continuarlo", "continual", - "continuasse", "continues", - "continueing", "continuing", - "continuemos", "continues", - "continueous", "continuous", - "continuious", "continuous", - "continuning", "continuing", - "continunity", "continuity", - "continuosly", "continuously", - "continuting", "continuing", - "continutity", "continuity", - "continuuing", "continuing", - "continuuity", "continuity", - "contirbuted", "contributed", - "contiunally", "continually", - "contraccion", "contraction", - "contraddice", "contradicted", - "contradices", "contradicts", - "contradtion", "contraction", - "contraversy", "controversy", - "contreversy", "controversy", - "contribuent", "contribute", - "contribuito", "contribution", - "contributer", "contributor", - "contributie", "contribute", - "contributin", "contribution", - "contributos", "contributors", - "contribuyes", "contributes", - "contricting", "contracting", - "contriction", "contraction", - "contridicts", "contradicts", - "contriversy", "controversy", - "controleurs", "controllers", - "controllore", "controllers", - "controvercy", "controversy", - "controversa", "controversial", - "contrubutes", "contributes", - "contructing", "contracting", - "contruction", "construction", - "contructors", "contractors", - "conveinence", "convenience", - "conveneince", "convenience", - "conveniance", "convenience", - "conveniente", "convenience", - "convenietly", "conveniently", - "conventinal", "conventional", - "converitble", "convertible", - "conversaion", "conversion", - "conversatin", "conversations", - "converseley", "conversely", - "converstion", "conversion", - "convertirea", "converter", - "convertirle", "convertible", - "convertirme", "converter", - "convertirte", "converter", - "convicitons", "convictions", - "convienence", "convenience", - "convienient", "convenient", - "convinceing", "convincing", - "convincente", "convenient", - "convincersi", "convinces", - "convirtible", "convertible", - "cooperacion", "cooperation", - "cooperativo", "cooperation", - "cooporation", "cooperation", - "cooporative", "cooperative", - "coordenated", "coordinated", - "coordenates", "coordinates", - "coordianted", "coordinated", - "coordiantes", "coordinates", - "coordiantor", "coordinator", - "coordinador", "coordinator", - "coordinants", "coordinates", - "coordinater", "coordinator", - "coordinaton", "coordination", - "coordonated", "coordinated", - "coordonates", "coordinates", - "coordonator", "coordinator", - "cooridnated", "coordinated", - "cooridnates", "coordinates", - "cooridnator", "coordinator", - "copenhaagen", "copenhagen", - "copenhaegen", "copenhagen", - "copenhaguen", "copenhagen", - "copenhangen", "copenhagen", - "copmetitors", "competitors", - "coproration", "corporation", - "copyrigthed", "copyrighted", - "corinthains", "corinthians", - "corintheans", "corinthians", - "corinthiens", "corinthians", - "corinthinas", "corinthians", - "cornithians", "corinthians", - "corparation", "corporation", - "corperation", "corporation", - "corporacion", "corporation", - "corporativo", "corporation", - "corralation", "correlation", - "correctings", "corrections", - "correctivos", "corrections", - "correktions", "corrections", - "correktness", "correctness", - "correlacion", "correlation", - "correlaties", "correlates", - "corrilation", "correlation", - "corrisponds", "corresponds", - "corrolation", "correlation", - "corrosponds", "corresponds", - "costitution", "constitution", - "councellors", "councillors", - "counrtyside", "countryside", - "counsilling", "counselling", - "countercoat", "counteract", - "counteredit", "counterfeit", - "counterfact", "counteract", - "counterfait", "counterfeit", - "counterfest", "counterfeit", - "counterfiet", "counterfeit", - "counterpaly", "counterplay", - "counterpary", "counterplay", - "counterpath", "counterpart", - "counterpats", "counterparts", - "counterpont", "counterpoint", - "counterract", "counterpart", - "counterside", "countryside", - "countertrap", "counterpart", - "countriside", "countryside", - "countrycide", "countryside", - "countrywise", "countryside", - "courthourse", "courthouse", - "coutnerfeit", "counterfeit", - "coutnerpart", "counterpart", - "coutnerplay", "counterplay", - "creacionism", "creationism", - "creationkit", "creationist", - "creationsim", "creationism", - "creationsit", "creationist", - "creationsts", "creationists", - "creativelly", "creatively", - "credencials", "credentials", - "credentails", "credentials", - "credentaisl", "credentials", - "credientals", "credentials", - "credintials", "credentials", - "cricitising", "criticising", - "criculating", "circulating", - "cringeworhy", "cringeworthy", - "cringeworty", "cringeworthy", - "cringewothy", "cringeworthy", - "criticicing", "criticising", - "criticisied", "criticise", - "criticisims", "criticisms", - "criticisize", "criticise", - "criticiszed", "criticise", - "critisicing", "criticizing", - "critisising", "criticising", - "critizicing", "criticizing", - "critizising", "criticizing", - "critizizing", "criticizing", - "crockodiles", "crocodiles", - "crocodiller", "crocodile", - "crocodilule", "crocodile", - "croporation", "corporation", - "crossfiters", "crossfire", - "cultivative", "cultivate", - "curricullum", "curriculum", - "customizabe", "customizable", - "customizble", "customizable", - "dangeroulsy", "dangerously", - "dardenelles", "dardanelles", - "deadlifters", "deadlifts", - "dealershits", "dealerships", - "deceptivley", "deceptive", - "declaracion", "declaration", - "decleration", "declaration", - "declinining", "declining", - "decloration", "declaration", - "decoartions", "decoration", - "decomposits", "decomposes", - "decoratieve", "decorative", - "decorativos", "decorations", - "decotations", "decorations", - "decsendants", "descendants", - "deductiable", "deductible", - "defenderlas", "defenders", - "defenderlos", "defenders", - "defendernos", "defenders", - "defenesless", "defenseless", - "defenisvely", "defensively", - "defensivley", "defensively", - "deficiencey", "deficiency", - "deficienies", "deficiencies", - "deficientcy", "deficiency", - "definantley", "definately", - "definatedly", "definately", - "definateley", "definately", - "definatelly", "definately", - "definatelty", "definately", - "definatetly", "definately", - "definations", "definitions", - "definatlely", "definately", - "definetally", "definately", - "definetlely", "definetly", - "definitaley", "definately", - "definitelly", "definitely", - "definitevly", "definitively", - "definitiely", "definitively", - "definitieve", "definitive", - "definitiley", "definitively", - "definitivly", "definitively", - "definitivno", "definition", - "definitivos", "definitions", - "definitlely", "definitly", - "definitlety", "definitly", - "deflecticon", "deflection", - "degenererat", "degenerate", - "degradacion", "degradation", - "degradating", "degradation", - "degragation", "degradation", - "degridation", "degradation", - "dehyrdation", "dehydration", - "deinitalize", "deinitialize", - "delaerships", "dealerships", - "delapidated", "dilapidated", - "delcaration", "declaration", - "delearships", "dealerships", - "delevopment", "development", - "deliberante", "deliberate", - "deliberatly", "deliberately", - "deliberetly", "deliberately", - "delightlful", "delightful", - "deliverying", "delivering", - "delusionnal", "delusional", - "deminsional", "dimensional", - "democarcies", "democracies", - "democracize", "democracies", - "democractic", "democratic", - "democraphic", "demographic", - "democrasies", "democracies", - "democrazies", "democracies", - "democrocies", "democracies", - "demograhpic", "demographic", - "demographis", "demographics", - "demograpics", "demographics", - "demogrpahic", "demographic", - "demoninator", "denominator", - "demonstarte", "demonstrate", - "demonstates", "demonstrates", - "demonstraby", "demonstrably", - "demonstrant", "demonstrate", - "demonstrats", "demonstrates", - "demosntrate", "demonstrate", - "denegrating", "denigrating", - "denomenator", "denominator", - "denominador", "denominator", - "denominaron", "denominator", - "denominater", "denominator", - "denominaton", "denomination", - "denomitator", "denominator", - "denomonator", "denominator", - "denonimator", "denominator", - "deocrations", "decorations", - "deomcracies", "democracies", - "deparmental", "departmental", - "depedencies", "dependencies", - "dependancey", "dependency", - "dependencey", "dependency", - "dependencie", "dependence", - "dependenies", "dependencies", - "deplorabile", "deplorable", - "depressieve", "depressive", - "depresssion", "depression", - "deprevation", "deprivation", - "deprication", "deprivation", - "deprivating", "deprivation", - "deprivition", "deprivation", - "deprovation", "deprivation", - "depserately", "desperately", - "depseration", "desperation", - "deregulatin", "deregulation", - "derivativos", "derivatives", - "derivitaves", "derivatives", - "derivitives", "derivatives", - "derpivation", "deprivation", - "derviatives", "derivatives", - "descandants", "descendants", - "descendands", "descendants", - "descendends", "descended", - "descendenta", "descendants", - "descentants", "descendants", - "descirption", "descriptions", - "descprition", "descriptions", - "describiste", "describes", - "describtion", "description", - "descripcion", "description", - "descripiton", "descriptions", - "descripters", "descriptors", - "descriptoin", "descriptions", - "descriptons", "descriptions", - "descritpion", "descriptions", - "descrpition", "descriptions", - "desensitied", "desensitized", - "desensitzed", "desensitized", - "desentisize", "desensitized", - "desgination", "designation", - "designacion", "designation", - "designstion", "designation", - "desinations", "destinations", - "desingation", "designation", - "desitnation", "destination", - "desoriented", "disoriented", - "desparately", "desperately", - "desparation", "desperation", - "desperating", "desperation", - "desperatley", "desperately", - "despirately", "desperately", - "despiration", "desperation", - "destablized", "destabilized", - "destiantion", "destinations", - "destinaiton", "destinations", - "destinatons", "destinations", - "destinction", "destination", - "destraction", "destruction", - "destruccion", "destruction", - "destruciton", "destruction", - "destructivo", "destruction", - "destruktion", "destruction", - "destruktive", "destructive", - "deteoriated", "deteriorated", - "determanism", "determinism", - "determening", "determining", - "determenism", "determinism", - "determinare", "determine", - "determinato", "determination", - "determinded", "determine", - "determinsim", "determinism", - "detramental", "detrimental", - "detremental", "detrimental", - "detrimentul", "detrimental", - "detuschland", "deutschland", - "deustchland", "deutschland", - "deutchsland", "deutschland", - "deutcshland", "deutschland", - "deutschalnd", "deutschland", - "deutshcland", "deutschland", - "develepmont", "developments", - "develompent", "developments", - "developemnt", "developments", - "developmant", "developmental", - "developmetn", "developments", - "developmnet", "developments", - "developpers", "developers", - "develpoment", "developments", - "deveolpment", "developments", - "deveploment", "developments", - "devestating", "devastating", - "devistating", "devastating", - "deyhdration", "dehydration", - "diagnositcs", "diagnostic", - "diagnositic", "diagnostic", - "diagonstics", "diagnostic", - "dictatorhip", "dictatorship", - "dictionaire", "dictionaries", - "dictionairy", "dictionary", - "dictionarys", "dictionaries", - "dictionnary", "dictionary", - "differances", "differences", - "differantly", "differently", - "differental", "differential", - "differentes", "differences", - "differneces", "differences", - "differnetly", "differently", - "difficulity", "difficulty", - "difficultes", "difficulties", - "dificulties", "difficulties", - "dimensiones", "dimensions", - "dimentional", "dimensional", - "dimesnional", "dimensional", - "diminisheds", "diminishes", - "diminsihing", "diminishing", - "diminuitive", "diminutive", - "diminushing", "diminishing", - "dinosaurios", "dinosaurs", - "direccional", "directional", - "direcitonal", "directional", - "directorguy", "directory", - "directorios", "directors", - "direktional", "directional", - "disadvantge", "disadvantage", - "disagreemet", "disagreements", - "disagreemtn", "disagreements", - "disapperead", "disappeared", - "disapporval", "disapproval", - "disapprovel", "disapproval", - "disasterous", "disastrous", - "disastreous", "disastrous", - "disastrious", "disastrous", - "disastruous", "disastrous", - "disatisfied", "dissatisfied", - "disciplened", "disciplined", - "disciplinas", "disciplines", - "disciplince", "disciplines", - "disclipined", "disciplined", - "disclipines", "disciplines", - "discogrophy", "discography", - "discogrpahy", "discography", - "disconencts", "disconnects", - "disconneted", "disconnected", - "disconnnect", "disconnect", - "discontined", "discontinued", - "discontiued", "discontinued", - "discrapency", "discrepancy", - "discretited", "discredited", - "discrimante", "discriminate", - "discrimiate", "discriminate", - "discussiong", "discussing", - "discusssion", "discussions", - "disgraseful", "disgraceful", - "disgrateful", "disgraceful", - "disgrunteld", "disgruntled", - "disgustigly", "disgustingly", - "disgustingy", "disgustingly", - "disgustinly", "disgustingly", - "disicplined", "disciplined", - "disicplines", "disciplines", - "disingenuos", "disingenuous", - "dismanlting", "dismantling", - "dismantaled", "dismantled", - "dismanteled", "dismantled", - "disobediant", "disobedient", - "disocgraphy", "discography", - "disparingly", "disparagingly", - "dispensaire", "dispensaries", - "dispensarie", "dispenser", - "dispensiary", "dispensary", - "displacemnt", "displacement", - "disposicion", "disposition", - "disputandem", "disputandum", - "disqualifed", "disqualified", - "disregaring", "disregarding", - "dissapeared", "disappeared", - "dissapoined", "dissapointed", - "dissapointd", "dissapointed", - "dissapoited", "dissapointed", - "dissappears", "disappears", - "dissatisfed", "dissatisfied", - "disscusions", "discussions", - "dissertaion", "dissertation", - "dissipatore", "dissipate", - "distatesful", "distasteful", - "distatseful", "distasteful", - "disterbance", "disturbance", - "disticntion", "distinctions", - "distinciton", "distinction", - "distincitve", "distinctive", - "distinctily", "distinctly", - "distingiush", "distinguish", - "distinguise", "distinguished", - "distinktion", "distinction", - "distinquish", "distinguish", - "distirbance", "disturbance", - "distirbuted", "distribute", - "distirbutor", "distributor", - "distraccion", "distraction", - "distractons", "distracts", - "distraktion", "distraction", - "distribitor", "distributor", - "distribuent", "distribute", - "distribuite", "distribute", - "distribuito", "distribution", - "distributie", "distributed", - "distributin", "distribution", - "distributio", "distributor", - "distrobuted", "distributed", - "distrubance", "disturbance", - "distrubited", "distributed", - "distrubitor", "distributor", - "distrubuted", "distributed", - "distrubutor", "distributor", - "distructive", "destructive", - "distuingish", "distinguish", - "distunguish", "distinguish", - "disturbante", "disturbance", - "disturbence", "disturbance", - "disucssions", "discussions", - "divisionals", "divisions", - "doccumented", "documented", - "documantary", "documentary", - "documenatry", "documentary", - "documentare", "documentaries", - "documentato", "documentation", - "documentery", "documentary", - "documentory", "documentary", - "domesticted", "domesticated", - "dominateurs", "dominates", - "dominationg", "dominating", - "donwloading", "downloading", - "doublellift", "doublelift", - "downlaoding", "downloading", - "downloadbel", "downloadable", - "downloadbig", "downloading", - "downloadble", "downloadable", - "downvoteers", "downvoters", - "downvoteing", "downvoting", - "downvoteres", "downvoters", - "downvoteros", "downvoters", - "downvoteurs", "downvoters", - "downvotters", "downvoters", - "downvotting", "downvoting", - "dramaticaly", "dramatically", - "dramaticlly", "dramatically", - "drasitcally", "drastically", - "dsyfunction", "dysfunction", - "duetschland", "deutschland", - "durabillity", "durability", - "dyanmically", "dynamically", - "dymanically", "dynamically", - "dysfonction", "dysfunction", - "dysfucntion", "dysfunction", - "dysfunciton", "dysfunction", - "dysfunktion", "dysfunction", - "earhtquakes", "earthquakes", - "earthqaukes", "earthquakes", - "earthquacks", "earthquakes", - "economicaly", "economically", - "economiclly", "economically", - "economisiti", "economist", - "economistes", "economists", - "educacional", "educational", - "effeciently", "efficiently", - "effecitvely", "effectively", - "effectivley", "effectively", - "efficeintly", "efficiently", - "efficiantly", "efficiently", - "efficientcy", "efficiently", - "effortlesly", "effortlessly", - "effortlessy", "effortlessly", - "egaletarian", "egalitarian", - "egalitatian", "egalitarian", - "egaliterian", "egalitarian", - "egostitical", "egotistical", - "egotastical", "egotistical", - "egotestical", "egotistical", - "egotisitcal", "egotistical", - "egotisticle", "egotistical", - "egotystical", "egotistical", - "ehtnicities", "ethnicities", - "ejacluation", "ejaculation", - "ejacualtion", "ejaculation", - "electoratul", "electoral", - "electornics", "electronics", - "electricain", "electrician", - "electricial", "electrical", - "electricien", "electrician", - "electricion", "electrician", - "electricman", "electrician", - "electrisity", "electricity", - "electritian", "electrician", - "electrocity", "electricity", - "electrolyes", "electrolytes", - "electrolyts", "electrolytes", - "electroncis", "electrons", - "electroylte", "electrolytes", - "elementrary", "elementary", - "eleminating", "eliminating", - "elimanation", "elimination", - "eliminacion", "elimination", - "elimintates", "eliminates", - "ellipitcals", "elliptical", - "eloquentely", "eloquently", - "emabrassing", "embarassing", - "embaraasing", "embarassing", - "embarasaing", "embarassing", - "embarassign", "embarassing", - "embarassimg", "embarassing", - "embarassing", "embarrassing", - "embarissing", "embarassing", - "embarrasing", "embarrassing", - "embarressed", "embarrassed", - "embarrssing", "embarassing", - "emergancies", "emergencies", - "emergencias", "emergencies", - "emergenices", "emergencies", - "emmediately", "immediately", - "emmisarries", "emissaries", - "emotionella", "emotionally", - "empahsizing", "emphasizing", - "empathethic", "empathetic", - "emphacizing", "emphasizing", - "emphatising", "emphasizing", - "emphatizing", "emphasizing", - "emphazising", "emphasizing", - "emphesizing", "emphasizing", - "empiracally", "empirically", - "empirialism", "imperialism", - "empirialist", "imperialist", - "enchamtment", "enchantment", - "enchancment", "enchantment", - "enchanement", "enchantment", - "enchanthing", "enchanting", - "enchantmant", "enchantment", - "enchantmens", "enchantments", - "enchantmets", "enchantments", - "encomapsses", "encompasses", - "encompasess", "encompasses", - "encompesses", "encompasses", - "encounteres", "encounters", - "encoutnered", "encountered", - "encryptiion", "encryption", - "encyclopdia", "encyclopedia", - "encylopedia", "encyclopedia", - "endagnering", "endangering", - "endandering", "endangering", - "endorcement", "endorsement", - "endoresment", "endorsement", - "engagaments", "engagements", - "engeneering", "engineering", - "enginerring", "engineering", - "enginnering", "engineering", - "enlargments", "enlargements", - "enligthened", "enlightened", - "enourmously", "enormously", - "enterpirses", "enterprises", - "enterprices", "enterprises", - "enterprishe", "enterprises", - "entertainig", "entertaining", - "entertwined", "entertained", - "enthicities", "ethnicities", - "enthisiasts", "enthusiasts", - "enthuasists", "enthusiasts", - "enthuisasts", "enthusiasts", - "enthusaists", "enthusiasts", - "enthusiants", "enthusiast", - "enthusiasic", "enthusiastic", - "enthusiasim", "enthusiasm", - "enthusiasum", "enthusiasm", - "enthusiatic", "enthusiastic", - "enthusiests", "enthusiasts", - "enthusigasm", "enthusiasm", - "enthusisast", "enthusiasts", - "entrepeneur", "entrepreneur", - "entreperure", "entrepreneur", - "entrepeuner", "entrepreneur", - "entreprener", "entrepreneurs", - "entreprenur", "entrepreneur", - "entretained", "entertained", - "envinroment", "environments", - "enviorments", "environments", - "enviornment", "environment", - "envirnoment", "environment", - "enviroments", "environments", - "enviromnent", "environments", - "environemnt", "environment", - "environmnet", "environments", - "envrionment", "environment", - "equilavents", "equivalents", - "equilbirium", "equilibrium", - "equilevants", "equivalents", - "equilibirum", "equilibrium", - "equilibriam", "equilibrium", - "equilibruim", "equilibrium", - "equivalance", "equivalence", - "equivalants", "equivalents", - "equivalenet", "equivalents", - "equivallent", "equivalent", - "equivelance", "equivalence", - "equivelants", "equivalents", - "equivelents", "equivalents", - "equivilants", "equivalents", - "equivilence", "equivalence", - "equivilents", "equivalents", - "equivlalent", "equivalent", - "equivlanets", "equivalents", - "equivolence", "equivalence", - "equivolents", "equivalents", - "essencially", "essentially", - "essentailly", "essentially", - "essentialls", "essentials", - "essentually", "essentially", - "establising", "establishing", - "ethicallity", "ethically", - "ethincities", "ethnicities", - "ethniticies", "ethnicities", - "europeaners", "europeans", - "europeaness", "europeans", - "evaluatiing", "evaluating", - "evaluationg", "evaluating", - "evangalical", "evangelical", - "evangelikal", "evangelical", - "evengalical", "evangelical", - "evenhtually", "eventually", - "everyonehas", "everyones", - "everyonelse", "everyones", - "evidentally", "evidently", - "exacarbated", "exacerbated", - "exacberated", "exacerbated", - "exagerating", "exaggerating", - "exagerrated", "exaggerated", - "exagerrates", "exaggerates", - "exaggarated", "exaggerated", - "exaggareted", "exaggerate", - "exaggeratin", "exaggeration", - "exaggerrate", "exaggerate", - "exaggurated", "exaggerated", - "exarcebated", "exacerbated", - "excalmation", "exclamation", - "excepcional", "exceptional", - "exceptionel", "exceptional", - "excessivley", "excessively", - "exceutioner", "executioner", - "exchanching", "exchanging", - "exclamacion", "exclamation", - "exclamating", "exclamation", - "exclamativo", "exclamation", - "exclemation", "exclamation", - "exclimation", "exclamation", - "exclucivity", "exclusivity", - "exclusivety", "exclusivity", - "exclusivily", "exclusivity", - "exclusivley", "exclusively", - "excpetional", "exceptional", - "exculsively", "exclusively", - "exculsivity", "exclusivity", - "execitioner", "executioner", - "execptional", "exceptional", - "exectuables", "executable", - "exectuioner", "executioner", - "executionar", "executioner", - "executionor", "executioner", - "exerciseing", "exercising", - "exeuctioner", "executioner", - "existantial", "existential", - "existencial", "existential", - "existensial", "existential", - "existentiel", "existential", - "exlcamation", "exclamation", - "exlcusively", "exclusively", - "exlcusivity", "exclusivity", - "exoskelaton", "exoskeleton", - "expansiones", "expansions", - "expectantcy", "expectancy", - "expectating", "expectation", - "expectional", "exceptional", - "expendature", "expenditure", - "expendeture", "expenditure", - "expentiture", "expenditure", - "expereinced", "experienced", - "expereinces", "experiences", - "experements", "experiments", - "experianced", "experienced", - "experiances", "experiences", - "experiemnts", "experiments", - "experiening", "experiencing", - "experimetal", "experimental", - "experimeted", "experimented", - "experssions", "expressions", - "expiditions", "expeditions", - "expierenced", "experienced", - "expierences", "experiences", - "expirements", "experiments", - "explainging", "explaining", - "explaintory", "explanatory", - "explanaiton", "explanations", - "explanetary", "explanatory", - "explanetory", "explanatory", - "explanitary", "explanatory", - "explanotory", "explanatory", - "explenation", "explanation", - "explenatory", "explanatory", - "explicitely", "explicitly", - "explicitily", "explicitly", - "explination", "explanation", - "explinatory", "explanatory", - "exploitaion", "exploitation", - "exploitatie", "exploitative", - "explonation", "exploration", - "exploracion", "exploration", - "explorating", "exploration", - "explorerers", "explorers", - "explosiones", "explosions", - "explotacion", "exploration", - "expodential", "exponential", - "exponantial", "exponential", - "exponencial", "exponential", - "exponentiel", "exponential", - "expresscoin", "expression", - "expressivos", "expressions", - "expresssive", "expressive", - "expressview", "expressive", - "exprimental", "experimental", - "expropiated", "expropriated", - "extensiones", "extensions", - "extensivley", "extensively", - "extragavant", "extravagant", - "extrapalate", "extrapolate", - "extraploate", "extrapolate", - "extrapolant", "extrapolate", - "extrapolare", "extrapolate", - "extrapolite", "extrapolate", - "extrapulate", "extrapolate", - "extravagent", "extravagant", - "extravagina", "extravagant", - "extravegant", "extravagant", - "extravigant", "extravagant", - "extravogant", "extravagant", - "extremistas", "extremists", - "extremistes", "extremists", - "extropolate", "extrapolate", - "fabircation", "fabrication", - "fabricacion", "fabrication", - "fabrikation", "fabrication", - "facilitarte", "facilitate", - "facilitiate", "facilitate", - "facillitate", "facilitate", - "facisnation", "fascination", - "facsination", "fascination", - "factuallity", "factually", - "familairity", "familiarity", - "familairize", "familiarize", - "familiaries", "familiarize", - "familierize", "familiarize", - "fanatsizing", "fantasizing", - "fanficitons", "fanfiction", - "fantacising", "fantasizing", - "fantacizing", "fantasizing", - "fantasazing", "fantasizing", - "fantasiaing", "fantasizing", - "fantasyzing", "fantasizing", - "fantazising", "fantasizing", - "fascinacion", "fascination", - "fascinatinf", "fascination", - "fascisation", "fascination", - "fascization", "fascination", - "fashionalbe", "fashionable", - "fashoinable", "fashionable", - "fatalitites", "fatalities", - "favoritisme", "favorites", - "favoutrable", "favourable", - "felxibility", "flexibility", - "feministers", "feminists", - "feministisk", "feminists", - "fermentaion", "fermentation", - "fermenterad", "fermented", - "fertilizier", "fertilizer", - "fertizilers", "fertilizer", - "festivalens", "festivals", - "fignernails", "fingernails", - "fignerprint", "fingerprint", - "figurativly", "figuratively", - "finanically", "financially", - "finantially", "financially", - "fingerpints", "fingertips", - "fingerpoint", "fingerprint", - "fingertrips", "fingertips", - "firefighers", "firefighters", - "firefigther", "firefighters", - "firendzoned", "friendzoned", - "firghtening", "frightening", - "flatterende", "flattered", - "flawlessely", "flawlessly", - "flawlessley", "flawlessly", - "flexibiltiy", "flexibility", - "flourescent", "fluorescent", - "fluctuaties", "fluctuate", - "fluctuative", "fluctuate", - "flutteryshy", "fluttershy", - "forcefullly", "forcefully", - "foreseaable", "foreseeable", - "foresseable", "foreseeable", - "forgettting", "forgetting", - "forgiviness", "forgiveness", - "formallized", "formalized", - "formattting", "formatting", - "formidabble", "formidable", - "formidabelt", "formidable", - "formidabile", "formidable", - "fortitudine", "fortitude", - "fortuantely", "fortunately", - "fortunantly", "fortunately", - "fortunatley", "fortunately", - "fortunetely", "fortunately", - "franchieses", "franchises", - "frankensite", "frankenstein", - "frankensten", "frankenstein", - "fransiscans", "franciscans", - "freindships", "friendships", - "freindzoned", "friendzoned", - "frequenices", "frequencies", - "frequensies", "frequencies", - "frequenties", "frequencies", - "frequentily", "frequently", - "frequenzies", "frequencies", - "friendboned", "friendzoned", - "friendlines", "friendlies", - "friendzonie", "friendzoned", - "frientships", "friendships", - "frientzoned", "friendzoned", - "frightenend", "frightened", - "frightining", "frightening", - "frigthening", "frightening", - "frinedzoned", "friendzoned", - "frontlinies", "frontline", - "frontlinjen", "frontline", - "frustartion", "frustrations", - "frustracion", "frustration", - "frustraited", "frustrated", - "frustrantes", "frustrates", - "frustrasion", "frustrations", - "frustrasted", "frustrates", - "frustraties", "frustrates", - "fucntioning", "functioning", - "fulfillling", "fulfilling", - "fulfullment", "fulfillment", - "fullfilment", "fulfillment", - "fullscreeen", "fullscreen", - "funcitoning", "functioning", - "functionaly", "functionally", - "functionnal", "functional", - "fundamentas", "fundamentals", - "fundamently", "fundamental", - "fundametals", "fundamentals", - "fundamnetal", "fundamentals", - "fundemantal", "fundamental", - "fundemental", "fundamental", - "fundimental", "fundamental", - "furhtermore", "furthermore", - "furstration", "frustration", - "furthremore", "furthermore", - "furthurmore", "furthermore", - "futurisitic", "futuristic", - "gangsterest", "gangsters", - "gangsterous", "gangsters", - "gauntlettes", "gauntlets", - "geneologies", "genealogies", - "generalizng", "generalizing", - "generatting", "generating", - "genitaliban", "genitalia", - "gentlemanne", "gentlemen", - "girlfirends", "girlfriends", - "girlfreinds", "girlfriends", - "girlfrients", "girlfriends", - "glorifierad", "glorified", - "glorifindel", "glorified", - "goosebumbps", "goosebumps", - "govenrments", "governments", - "govermental", "governmental", - "governemnts", "governments", - "governmanet", "governmental", - "governmeant", "governmental", - "govormental", "governmental", - "gracefullly", "gracefully", - "grahpically", "graphically", - "grammarical", "grammatical", - "grammaticly", "grammatical", - "grammitical", "grammatical", - "graphcially", "graphically", - "grassrooots", "grassroots", - "gratuitious", "gratuitous", - "gratuituous", "gratuitous", - "gravitatiei", "gravitate", - "grilfriends", "girlfriends", - "grpahically", "graphically", - "guaranteeds", "guarantees", - "guerrillera", "guerrilla", - "gunslingner", "gunslinger", - "hamburgaren", "hamburger", - "hamburgeres", "hamburgers", - "hamburglers", "hamburgers", - "hamburguers", "hamburgers", - "handlebards", "handlebars", - "handrwiting", "handwriting", - "handycapped", "handicapped", - "hanidcapped", "handicapped", - "harassement", "harassment", - "harrasments", "harassments", - "harrassment", "harassment", - "harvestgain", "harvesting", - "headquartes", "headquarters", - "headquaters", "headquarters", - "hearhtstone", "hearthstone", - "heartborken", "heartbroken", - "heartbraker", "heartbreak", - "heartbrakes", "heartbreak", - "heartsthone", "hearthstone", - "heaviweight", "heavyweight", - "heavyweigth", "heavyweight", - "heavywieght", "heavyweight", - "helicoptors", "helicopters", - "helicotpers", "helicopters", - "helicpoters", "helicopters", - "helictopers", "helicopters", - "helikopters", "helicopters", - "hemipsheres", "hemisphere", - "hemishperes", "hemisphere", - "herathstone", "hearthstone", - "heterosexal", "heterosexual", - "hexidecimal", "hexadecimal", - "hierachical", "hierarchical", - "hierarcical", "hierarchical", - "highlighing", "highlighting", - "highschoool", "highschool", - "hipopotamus", "hippopotamus", - "historicaly", "historically", - "historicans", "historians", - "historietas", "histories", - "historinhas", "historians", - "homecomeing", "homecoming", - "homecomming", "homecoming", - "homelesness", "homelessness", - "homelessess", "homelessness", - "homeowneris", "homeowners", - "homoegenous", "homogeneous", - "homogeneize", "homogenize", - "homogenious", "homogeneous", - "homogenuous", "homogeneous", - "homophoboes", "homophobe", - "homosexuais", "homosexuals", - "homosexuels", "homosexuals", - "hopelessely", "hopelessly", - "hopelessley", "hopelessly", - "hopsitality", "hospitality", - "horizonatal", "horizontal", - "horizontaal", "horizontal", - "horizontaly", "horizontally", - "horrendeous", "horrendous", - "horrendious", "horrendous", - "horrenduous", "horrendous", - "hospitalzed", "hospitalized", - "hospotality", "hospitality", - "househoulds", "households", - "humanitarna", "humanitarian", - "humanitites", "humanities", - "humilitaing", "humiliating", - "humilitaion", "humiliation", - "humillating", "humiliating", - "humillation", "humiliation", - "hurricaines", "hurricanes", - "hurricances", "hurricanes", - "hurricanger", "hurricane", - "hyperbollic", "hyperbolic", - "hyperbrophy", "hypertrophy", - "hyperthropy", "hypertrophy", - "hypertorphy", "hypertrophy", - "hypertrohpy", "hypertrophy", - "hypocritcal", "hypocritical", - "hypocritial", "hypocritical", - "hypocrities", "hypocrite", - "hypothesees", "hypotheses", - "hypothesies", "hypothesis", - "hystericaly", "hysterically", - "hystericlly", "hysterically", - "iconclastic", "iconoclastic", - "idealisitic", "idealistic", - "identifible", "identifiable", - "identitites", "identities", - "identitties", "identities", - "ideologiers", "ideologies", - "ideologisen", "ideologies", - "ideologiset", "ideologies", - "ideologiske", "ideologies", - "illegallity", "illegally", - "illegitamte", "illegitimate", - "illegitmate", "illegitimate", - "illsutrator", "illustrator", - "illuminanti", "illuminati", - "illuminarti", "illuminati", - "illuminatti", "illuminati", - "illuminauti", "illuminati", - "illuminiati", "illuminati", - "illuminista", "illuminati", - "illumintati", "illuminati", - "illustarted", "illustrated", - "illustartor", "illustrator", - "illustraded", "illustrated", - "illustraion", "illustration", - "illustrater", "illustrator", - "illustratie", "illustrate", - "illustratin", "illustrations", - "illustraton", "illustration", - "imaganative", "imaginative", - "imaganitive", "imaginative", - "imaginacion", "imagination", - "imaginatiei", "imaginative", - "imaginating", "imagination", - "imaginativo", "imagination", - "imaginitave", "imaginative", - "imbalanaced", "imbalanced", - "imbalanaces", "imbalances", - "imbalancers", "imbalances", - "immatureity", "immaturity", - "immedeately", "immediately", - "immediantly", "immediately", - "immediatley", "immediately", - "immedietely", "immediately", - "immideately", "immediately", - "immidiately", "immediately", - "immigraiton", "immigration", - "immigrantes", "immigrants", - "immoratlity", "immortality", - "immortailty", "immortality", - "immortalisy", "immortals", - "impeccabile", "impeccable", - "imperailist", "imperialist", - "imperealist", "imperialist", - "imperialims", "imperialism", - "imperialsim", "imperialism", - "imperiarist", "imperialist", - "imperically", "empirically", - "imperislist", "imperialist", - "implausable", "implausible", - "implausbile", "implausible", - "implementas", "implements", - "implementes", "implements", - "implementig", "implementing", - "implementos", "implements", - "implicacion", "implication", - "implicatons", "implications", - "implicitely", "implicitly", - "implicitily", "implicitly", - "implikation", "implication", - "implimented", "implemented", - "importantce", "importance", - "importently", "importantly", - "imporvement", "improvement", - "impossibile", "impossible", - "impossibily", "impossibly", - "impossibley", "impossibly", - "impossiblly", "impossibly", - "impoverised", "impoverished", - "impracticle", "impractical", - "impressario", "impresario", - "impresssion", "impressions", - "imprisonent", "imprisonment", - "imprisonned", "imprisoned", - "improbabile", "improbable", - "improtantly", "importantly", - "improvemnts", "improvements", - "improvished", "improvised", - "improvision", "improvisation", - "improvments", "improvements", - "impulsivley", "impulsive", - "imrpovement", "improvement", - "inaccessble", "inaccessible", - "inaccuraces", "inaccuracies", - "inaccurrate", "inaccurate", - "inadvertant", "inadvertent", - "inaguration", "inauguration", - "inahbitants", "inhabitants", - "incarantion", "incarnation", - "incarcerato", "incarceration", - "incarnacion", "incarnation", - "incentivare", "incentive", - "incentivate", "incentive", - "incentivice", "incentive", - "incentivies", "incentives", - "incidencies", "incidence", - "incidentaly", "incidentally", - "incidential", "incidental", - "inclanation", "inclination", - "inclenation", "inclination", - "inclinacion", "inclination", - "inclinaison", "inclination", - "incognition", "incognito", - "incoherrent", "incoherent", - "incompatble", "incompatible", - "incompatent", "incompetent", - "incompetant", "incompetent", - "incompitent", "incompetent", - "incompotent", "incompetent", - "incomptable", "incompatible", - "inconsisent", "inconsistent", - "inconveniet", "inconvenient", - "incoroprate", "incorporate", - "incorparate", "incorporate", - "incorperate", "incorporate", - "incorporare", "incorporate", - "incorported", "incorporated", - "incorprates", "incorporates", - "incorproate", "incorporated", - "incramental", "incremental", - "increadible", "incredible", - "incrediable", "incredible", - "incrediably", "incredibly", - "incredibile", "incredible", - "incredibily", "incredibly", - "incredibley", "incredibly", - "incrememnts", "increments", - "incremenets", "increments", - "incrementas", "increments", - "incremently", "incremental", - "incrementos", "increments", - "incrimental", "incremental", - "inctroduced", "introduced", - "indefinetly", "indefinitely", - "indefininte", "indefinite", - "indefinitly", "indefinitely", - "indepdenent", "independents", - "indepedence", "independence", - "indepednent", "independents", - "independant", "independent", - "independece", "independence", - "independens", "independents", - "independetn", "independents", - "independets", "independents", - "independnet", "independents", - "indepentend", "independents", - "indepentent", "independent", - "indianapols", "indianapolis", - "indicateurs", "indicates", - "indicatiors", "indicators", - "indictement", "indictment", - "indifferant", "indifferent", - "indiffernce", "indifference", - "indigeneous", "indigenous", - "indigenious", "indigenous", - "indigenuous", "indigenous", - "indigineous", "indigenous", - "indipendent", "independent", - "indirectely", "indirectly", - "individiual", "individual", - "individuais", "individuals", - "individualy", "individually", - "individuati", "individuality", - "individuels", "individuals", - "indivuduals", "individuals", - "industriels", "industries", - "ineffecitve", "ineffective", - "ineffektive", "ineffective", - "inefficeint", "inefficient", - "inefficiant", "inefficient", - "ineffictive", "ineffective", - "ineffizient", "inefficient", - "inequallity", "inequality", - "inevitabile", "inevitable", - "inevitabily", "inevitably", - "inevitabley", "inevitably", - "inevitablly", "inevitably", - "inexpencive", "inexpensive", - "inexpenisve", "inexpensive", - "inexperiece", "inexperience", - "inexperince", "inexperience", - "inexplicaby", "inexplicably", - "infallibale", "infallible", - "infallibile", "infallible", - "infectation", "infestation", - "inferioirty", "inferiority", - "infestating", "infestation", - "infilitrate", "infiltrate", - "infiltartor", "infiltrator", - "infiltraron", "infiltrator", - "infiltrarte", "infiltrate", - "infiltrater", "infiltrator", - "infiltratie", "infiltrate", - "infiltrerat", "infiltrate", - "infinitelly", "infinitely", - "infintrator", "infiltrator", - "inflamation", "inflammation", - "inflatabale", "inflatable", - "inflitrator", "infiltrator", - "influancing", "influencing", - "influencial", "influential", - "influencian", "influencing", - "influenting", "influencing", - "influentual", "influential", - "influincing", "influencing", - "infograhpic", "infographic", - "infograpgic", "infographic", - "infogrpahic", "infographic", - "informacion", "information", - "informatice", "informative", - "informatief", "informative", - "informatiei", "informative", - "informatike", "informative", - "informativo", "information", - "informitive", "informative", - "infrigement", "infringement", - "infringeing", "infringing", - "infromation", "information", - "infromative", "informative", - "infulential", "influential", - "ingerdients", "ingredients", - "ingrediants", "ingredients", - "ingreidents", "ingredient", - "ingriedents", "ingredient", - "inhabitents", "inhabitants", - "inheirtance", "inheritance", - "inheratance", "inheritance", - "inheretance", "inheritance", - "inheritence", "inheritance", - "inhertiance", "inheritance", - "initaitives", "initiatives", - "initalisers", "initialisers", - "initalising", "initialising", - "initalizers", "initializers", - "initalizing", "initializing", - "initiaitive", "initiative", - "inititiaves", "initiatives", - "innocenters", "innocents", - "innocentius", "innocents", - "innoculated", "inoculated", - "inpsiration", "inspiration", - "inquisicion", "inquisition", - "inquisistor", "inquisitor", - "inquisiting", "inquisition", - "inquisitior", "inquisitor", - "inquisitivo", "inquisition", - "inquizition", "inquisition", - "insecurites", "insecurities", - "insensative", "insensitive", - "insensetive", "insensitive", - "insentitive", "insensitive", - "insepctions", "inspections", - "inseperable", "inseparable", - "insipration", "inspiration", - "insitutions", "institutions", - "insparation", "inspiration", - "inspecticon", "inspection", - "inspectoras", "inspectors", - "insperation", "inspiration", - "inspiracion", "inspiration", - "inspirating", "inspiration", - "inspriation", "inspiration", - "instalation", "installation", - "instalement", "installment", - "installatin", "installations", - "installeert", "installer", - "installemnt", "installment", - "installling", "installing", - "installmant", "installment", - "instanciate", "instantiate", - "instantaneu", "instantaneous", - "institucion", "institution", - "institutiei", "institute", - "instituttet", "institute", - "instraments", "instruments", - "instruccion", "instruction", - "instruciton", "instruction", - "instructers", "instructors", - "instructior", "instructor", - "instructios", "instructors", - "instructivo", "instruction", - "instructons", "instructors", - "instruktion", "instruction", - "instrumenal", "instrumental", - "instrumetal", "instrumental", - "insturction", "instruction", - "insturctors", "instructors", - "insturments", "instruments", - "instutition", "institution", - "instutution", "institution", - "insufficent", "insufficient", - "insuinating", "insinuating", - "insuniating", "insinuating", - "insurgencey", "insurgency", - "intangiable", "intangible", - "intangibile", "intangible", - "inteferring", "interfering", - "integracion", "integration", - "integratron", "integration", - "integrering", "interfering", - "intelectual", "intellectual", - "inteligence", "intelligence", - "intellectul", "intellectuals", - "intellectus", "intellectuals", - "intellecual", "intellectual", - "intellegent", "intelligent", - "intelligant", "intelligent", - "intencional", "intentional", - "intentionly", "intentional", - "interaccion", "interaction", - "interactice", "interactive", - "interacties", "interacts", - "interactifs", "interacts", - "interactins", "interacts", - "interactios", "interacts", - "interactivo", "interaction", - "interactons", "interacts", - "interaktion", "interaction", - "interaktive", "interactive", - "interasting", "interacting", - "intercation", "integration", - "interceptin", "interception", - "intercoarse", "intercourse", - "intercource", "intercourse", - "interecting", "interacting", - "interection", "interaction", - "interelated", "interrelated", - "interersted", "interpreted", - "interesring", "interfering", - "interessted", "interested", - "interferece", "interference", - "interferens", "interferes", - "interferire", "interfere", - "interfernce", "interference", - "interferred", "interfere", - "interferres", "interferes", - "intergation", "integration", - "intergrated", "integrated", - "intermedate", "intermediate", - "intermedite", "intermediate", - "intermitent", "intermittent", - "internation", "international", - "interneters", "internets", - "internetese", "internets", - "internetest", "internets", - "interneting", "interesting", - "internetors", "internets", - "internettes", "internets", - "interperted", "interpreted", - "interperter", "interpreter", - "interprered", "interpreter", - "interpretor", "interpreter", - "interratial", "interracial", - "interresing", "interfering", - "interrogato", "interrogation", - "interrputed", "interrupted", - "interruping", "interrupting", - "interruptes", "interrupts", - "interruptis", "interrupts", - "intersecton", "intersection", - "interstelar", "interstellar", - "intertained", "intertwined", - "intertvined", "intertwined", - "intertwyned", "intertwined", - "intervalles", "intervals", - "intervation", "integration", - "interveiwed", "interviewed", - "interveiwer", "interviewer", - "intervenion", "intervening", - "intervenire", "intervene", - "interventie", "intervene", - "intervewing", "intervening", - "interviened", "interviewed", - "interviewes", "interviews", - "interviewie", "interviewer", - "intervining", "intervening", - "interwebers", "interwebs", - "interwiever", "interviewer", - "intestinces", "intestines", - "inticracies", "intricacies", - "intimadated", "intimidated", - "intimidades", "intimidated", - "intimidante", "intimidate", - "intimidatie", "intimidated", - "intimidatin", "intimidation", - "intimidaton", "intimidation", - "intimidiate", "intimidate", - "intiminated", "intimidated", - "intimitaded", "intimidated", - "intimitated", "intimidated", - "intiutively", "intuitively", - "intoleranse", "intolerance", - "intolerante", "intolerance", - "intolerence", "intolerance", - "intolernace", "intolerance", - "intolorance", "intolerance", - "intolorence", "intolerance", - "intorducing", "introducing", - "intorverted", "introverted", - "intoxicatin", "intoxication", - "intoxicaton", "intoxication", - "intoxinated", "intoxicated", - "intoxocated", "intoxicated", - "intracacies", "intricacies", - "intracicies", "intricacies", - "intraverted", "introverted", - "intrecacies", "intricacies", - "intrepreted", "interpreted", - "intrepreter", "interpreter", - "intrerupted", "interrupted", - "intricasies", "intricacies", - "intricicies", "intricacies", - "intrigueing", "intriguing", - "intrinsisch", "intrinsic", - "introducion", "introduction", - "introducted", "introduced", - "introductie", "introduce", - "introvertie", "introverted", - "introvertis", "introverts", - "intruducing", "introducing", - "intrumental", "instrumental", - "intuatively", "intuitively", - "intuitevely", "intuitively", - "intuitivley", "intuitively", - "intuituvely", "intuitively", - "inutitively", "intuitively", - "invaldiates", "invalidates", - "invalidades", "invalidates", - "invalidante", "invalidate", - "invariabley", "invariably", - "invariablly", "invariably", - "inventiones", "inventions", - "invesitgate", "investigate", - "investagate", "investigate", - "investiagte", "investigate", - "investigare", "investigate", - "invincibile", "invincible", - "invincinble", "invincible", - "invisibiity", "invisibility", - "invisibiliy", "invisibility", - "invokations", "invocations", - "involantary", "involuntary", - "involentary", "involuntary", - "involintary", "involuntary", - "involontary", "involuntary", - "involunatry", "involuntary", - "invulnerabe", "invulnerable", - "invulnerble", "invulnerable", - "iresistable", "irresistible", - "iresistably", "irresistibly", - "iresistible", "irresistible", - "iresistibly", "irresistibly", - "irrationaly", "irrationally", - "irrationnal", "irrational", - "islamisists", "islamists", - "islamisters", "islamists", - "islamistisk", "islamists", - "isntruments", "instruments", - "jacksonvile", "jacksonville", - "jailbroaken", "jailbroken", - "jailbrocken", "jailbroken", - "jounralists", "journalists", - "jouranlists", "journalists", - "journalisim", "journalism", - "journalistc", "journalistic", - "journolists", "journalists", - "judegmental", "judgemental", - "judgamental", "judgemental", - "judgementle", "judgemental", - "judgementsl", "judgemental", - "judgenental", "judgemental", - "jugdemental", "judgemental", - "juggernaugt", "juggernaut", - "juggernault", "juggernaut", - "juggernaunt", "juggernaut", - "justifyable", "justifiable", - "kidnappning", "kidnapping", - "kidnappping", "kidnapping", - "kilometeres", "kilometers", - "kindergaten", "kindergarten", - "knowledgabe", "knowledgable", - "knowledgble", "knowledgable", - "kryptoninte", "kryptonite", - "lacklusture", "lackluster", - "laughablely", "laughably", - "legalizaing", "legalizing", - "legalizaton", "legalization", - "legalizeing", "legalizing", - "legenadries", "legendaries", - "legendaires", "legendaries", - "legendarios", "legendaries", - "legendarisk", "legendaries", - "legendaryes", "legendaries", - "legenderies", "legendaries", - "legilsation", "legislation", - "legislacion", "legislation", - "legislativo", "legislation", - "legistation", "legislation", - "legistative", "legislative", - "legistators", "legislators", - "legitematly", "legitimately", - "legitimancy", "legitimacy", - "legitimatcy", "legitimacy", - "legitimatly", "legitimately", - "legitimetly", "legitimately", - "legnedaries", "legendaries", - "lengedaries", "legendaries", - "liberalisim", "liberalism", - "liberatrian", "libertarians", - "libertairan", "libertarians", - "libertarain", "libertarian", - "libertarias", "libertarians", - "libertarien", "libertarian", - "libertaryan", "libertarian", - "libertatian", "libertarian", - "liberterian", "libertarian", - "libguistics", "linguistics", - "libretarian", "libertarian", - "lieutennant", "lieutenant", - "lieutentant", "lieutenant", - "lightenning", "lightening", - "lightenting", "lightening", - "lightheared", "lighthearted", - "lightheated", "lighthearted", - "lightweigth", "lightweight", - "lightwieght", "lightweight", - "lightwright", "lightweight", - "ligthweight", "lightweight", - "limitaitons", "limitation", - "linguisitcs", "linguistics", - "linguisitic", "linguistic", - "lingusitics", "linguistics", - "lithuaninan", "lithuania", - "littlefiger", "littlefinger", - "littlefiner", "littlefinger", - "lockscreeen", "lockscreen", - "longevitity", "longevity", - "lotharingen", "lothringen", - "louisvillle", "louisville", - "maginficent", "magnificent", - "magneficent", "magnificent", - "magnicifent", "magnificent", - "magnifacent", "magnificent", - "magnifecent", "magnificent", - "magnificant", "magnificent", - "magnitudine", "magnitude", - "maintainted", "maintained", - "maintanance", "maintenance", - "maintanence", "maintenance", - "maintenence", "maintenance", - "maintianing", "maintaining", - "maintinaing", "maintaining", - "maintinance", "maintenance", - "maintinence", "maintenance", - "malfonction", "malfunction", - "malfucntion", "malfunction", - "malfunciton", "malfunction", - "malfuncting", "malfunction", - "malfunktion", "malfunction", - "malpractise", "malpractice", - "malpractive", "malpractice", - "maneouvring", "manoeuvring", - "manifestado", "manifesto", - "manifestano", "manifesto", - "manifestato", "manifesto", - "manifestion", "manifesto", - "manifestior", "manifesto", - "manifestons", "manifests", - "manifestors", "manifests", - "manipluated", "manipulated", - "manipualted", "manipulated", - "manipulatie", "manipulative", - "manipulatin", "manipulation", - "manipulaton", "manipulation", - "maniuplated", "manipulated", - "mannerisims", "mannerisms", - "manslaugher", "manslaughter", - "manslaugter", "manslaughter", - "manufacters", "manufactures", - "manufacteur", "manufactures", - "manufactued", "manufactured", - "manufactuer", "manufacture", - "manufacturs", "manufactures", - "manufacuter", "manufacture", - "manufacutre", "manufactures", - "manufatured", "manufactured", - "manupilated", "manipulated", - "marganilize", "marginalized", - "marhsmallow", "marshmallow", - "marijuannas", "marijuana", - "markerplace", "marketplace", - "marketpalce", "marketplace", - "marshamllow", "marshmallow", - "marshmalows", "marshmallows", - "masculanity", "masculinity", - "masculenity", "masculinity", - "masrhmallow", "marshmallow", - "mastermined", "mastermind", - "masterpeace", "masterpiece", - "masterpeice", "masterpiece", - "mastrubated", "masturbated", - "mastrubates", "masturbate", - "masturabted", "masturbated", - "masturbaing", "masturbating", - "masturbarte", "masturbate", - "masturbathe", "masturbated", - "masturbatie", "masturbated", - "masturbatin", "masturbation", - "masturbaton", "masturbation", - "masturbsted", "masturbated", - "masturpiece", "masterpiece", - "masuclinity", "masculinity", - "matchamking", "matchmaking", - "materalists", "materialist", - "materialsim", "materialism", - "mathamatics", "mathematics", - "mathcmaking", "matchmaking", - "mathemagics", "mathematics", - "mathemetics", "mathematics", - "mathimatics", "mathematics", - "matieralism", "materialism", - "maybelleine", "maybelline", - "maybelliene", "maybelline", - "maybellinne", "maybelline", - "maybellline", "maybelline", - "mdifielders", "midfielders", - "meatballers", "meatballs", - "mecernaries", "mercenaries", - "mechanicaly", "mechanically", - "mechanichal", "mechanical", - "mechaniclly", "mechanically", - "mechanicsms", "mechanisms", - "mechanisims", "mechanism", - "mechanismus", "mechanisms", - "medicaitons", "medications", - "medicineras", "medicines", - "meditatiing", "meditating", - "meditationg", "meditating", - "mentionning", "mentioning", - "mercanaries", "mercenaries", - "mercaneries", "mercenaries", - "mercenaires", "mercenaries", - "mercenarias", "mercenaries", - "mercenarios", "mercenaries", - "merceneries", "mercenaries", - "merchandice", "merchandise", - "merchandies", "merchandise", - "merchanidse", "merchandise", - "merchanters", "merchants", - "merchendise", "merchandise", - "merchindise", "merchandise", - "mercinaries", "mercenaries", - "mercineries", "mercenaries", - "metabolisim", "metabolism", - "metabolitic", "metabolic", - "metaphisics", "metaphysics", - "metaphorial", "metaphorical", - "metaphorics", "metaphors", - "metaphsyics", "metaphysics", - "metaphyiscs", "metaphysics", - "methodoligy", "methodology", - "metholodogy", "methodology", - "metropolian", "metropolitan", - "metropolies", "metropolis", - "metropollis", "metropolis", - "metropolois", "metropolis", - "micorcenter", "microcenter", - "micorphones", "microphones", - "microcender", "microcenter", - "microcentre", "microcenter", - "microcentro", "microcenter", - "microhpones", "microphones", - "microscrope", "microscope", - "microwavees", "microwaves", - "microwavers", "microwaves", - "midfeilders", "midfielders", - "midfiedlers", "midfielders", - "midfileders", "midfielders", - "midifelders", "midfielders", - "millienaire", "millionaire", - "millionairs", "millionaires", - "millionarie", "millionaire", - "millioniare", "millionaire", - "mindlessely", "mindlessly", - "mindlessley", "mindlessly", - "minimalstic", "minimalist", - "ministerens", "ministers", - "ministerios", "ministers", - "minneaoplis", "minneapolis", - "minneaplois", "minneapolis", - "minniapolis", "minneapolis", - "miraculaous", "miraculous", - "miraculosly", "miraculously", - "miraculousy", "miraculously", - "mircocenter", "microcenter", - "mircophones", "microphones", - "mircoscopic", "microscopic", - "miscairrage", "miscarriage", - "miscarraige", "miscarriage", - "miscarridge", "miscarriage", - "miscarriege", "miscarriage", - "mischeivous", "mischievous", - "mischevious", "mischievous", - "misdameanor", "misdemeanor", - "misdeamenor", "misdemeanor", - "misdemeaner", "misdemeanor", - "misdemenaor", "misdemeanor", - "misdemenors", "misdemeanors", - "misdimeanor", "misdemeanor", - "misdomeanor", "misdemeanor", - "miserablely", "miserably", - "misfortunte", "misfortune", - "misimformed", "misinformed", - "misinterept", "misinterpret", - "misinterpet", "misinterpret", - "misoginysts", "misogynist", - "misognyists", "misogynist", - "misogyinsts", "misogynist", - "misogynisic", "misogynistic", - "misogynistc", "misogynistic", - "misogynstic", "misogynist", - "missionaire", "missionaries", - "missionairy", "missionary", - "missionares", "missionaries", - "missionaris", "missionaries", - "missionarry", "missionary", - "missionnary", "missionary", - "mississipis", "mississippi", - "misspeeling", "misspelling", - "misspellled", "misspelled", - "mistakengly", "mistakenly", - "mistakently", "mistakenly", - "moderatedly", "moderately", - "moderateurs", "moderates", - "moderatorin", "moderation", - "modificaton", "modification", - "moisterizer", "moisturizer", - "moistruizer", "moisturizer", - "moisturizng", "moisturizing", - "moisturizor", "moisturizer", - "moistutizer", "moisturizer", - "moisutrizer", "moisturizer", - "moleculaire", "molecular", - "molestating", "molestation", - "moleststion", "molestation", - "momemtarily", "momentarily", - "momentairly", "momentarily", - "momentaraly", "momentarily", - "momentarely", "momentarily", - "momenterily", "momentarily", - "monestaries", "monasteries", - "monitoreada", "monitored", - "monitoreado", "monitored", - "monogameous", "monogamous", - "monolitihic", "monolithic", - "monopollies", "monopolies", - "monstorsity", "monstrosity", - "monstrasity", "monstrosity", - "monstrisity", "monstrosity", - "monstrocity", "monstrosity", - "monstrosoty", "monstrosity", - "monstrostiy", "monstrosity", - "monumentaal", "monumental", - "monumentais", "monuments", - "monumentals", "monuments", - "monumentous", "monuments", - "mositurizer", "moisturizer", - "mosntrosity", "monstrosity", - "motehrboard", "motherboard", - "mothebroard", "motherboards", - "motherbaord", "motherboard", - "motherboads", "motherboards", - "motherboars", "motherboards", - "motherborad", "motherboard", - "motherbords", "motherboards", - "motherobard", "motherboards", - "mothreboard", "motherboards", - "motivatinal", "motivational", - "motorcicles", "motorcycles", - "motorcylces", "motorcycles", - "mouthpeices", "mouthpiece", - "mulitplayer", "multiplayer", - "mulitplying", "multiplying", - "multipalyer", "multiplayer", - "multiplater", "multiplayer", - "multiplebgs", "multiples", - "multipleies", "multiples", - "multitaskng", "multitasking", - "multitudine", "multitude", - "multiverese", "multiverse", - "multyplayer", "multiplayer", - "multyplying", "multiplying", - "muncipality", "municipality", - "murdererous", "murderers", - "musicallity", "musically", - "mutliplayer", "multiplayer", - "mutliplying", "multiplying", - "mysterieuse", "mysteries", - "mysteriosly", "mysteriously", - "mysteriouly", "mysteriously", - "mysteriousy", "mysteriously", - "napoleonian", "napoleonic", - "narcisissim", "narcissism", - "narcisissts", "narcissist", - "narcisscism", "narcissism", - "narcisscist", "narcissist", - "narcissisim", "narcissism", - "narcississm", "narcissism", - "narcississt", "narcissist", - "narcissistc", "narcissistic", - "narcissitic", "narcissistic", - "narcisssism", "narcissism", - "narcisssist", "narcissist", - "narcissstic", "narcissist", - "natioanlist", "nationalist", - "nationailty", "nationality", - "nationalesl", "nationals", - "nationalisn", "nationals", - "nationalite", "nationalist", - "nationalits", "nationalist", - "nationalizm", "nationalism", - "nationalsim", "nationalism", - "neccesarily", "necessarily", - "necessairly", "necessarily", - "necessaties", "necessities", - "necesseraly", "necessarily", - "necesserily", "necessarily", - "necessiates", "necessities", - "necessitive", "necessities", - "neckbeardos", "neckbeards", - "neckbeardus", "neckbeards", - "necormancer", "necromancer", - "necromamcer", "necromancer", - "necromanser", "necromancer", - "necromencer", "necromancer", - "needlessley", "needlessly", - "negativeity", "negativity", - "negativelly", "negatively", - "negativitiy", "negativity", - "negiotating", "negotiating", - "negligiable", "negligible", - "negociating", "negotiating", - "negociation", "negotiation", - "negoitating", "negotiating", - "negoitation", "negotiation", - "negotiatied", "negotiate", - "negotiative", "negotiate", - "negotiatons", "negotiations", - "neigborhood", "neighborhood", - "neigbouring", "neighbouring", - "neighborhod", "neighborhood", - "neighbourgs", "neighbours", - "neighouring", "neighboring", - "nercomancer", "necromancer", - "nessasarily", "necessarily", - "neurologial", "neurological", - "neurosciene", "neuroscience", - "neutrallity", "neutrality", - "neverthelss", "nevertheless", - "neverthless", "nevertheless", - "newspapaers", "newspapers", - "newspappers", "newspapers", - "nieghboring", "neighboring", - "nightmarket", "nightmare", - "nonsencical", "nonsensical", - "nonsenscial", "nonsensical", - "nonsensicle", "nonsensical", - "normallized", "normalized", - "northwesten", "northwestern", - "nostalgisch", "nostalgic", - "noteworthly", "noteworthy", - "noticeabley", "noticeably", - "notificaton", "notification", - "notoriuosly", "notoriously", - "numericable", "numerical", - "nurtitional", "nutritional", - "nutricional", "nutritional", - "nutrutional", "nutritional", - "obamination", "abomination", - "obersvation", "observation", - "obilterated", "obliterated", - "objectivety", "objectivity", - "objectivify", "objectivity", - "objectivily", "objectivity", - "objectivley", "objectively", - "obliberated", "obliterated", - "obliderated", "obliterated", - "obligerated", "obliterated", - "oblitarated", "obliterated", - "obliteraded", "obliterated", - "obliterared", "obliterated", - "oblitirated", "obliterated", - "oblitorated", "obliterated", - "obliverated", "obliterated", - "observacion", "observation", - "observaiton", "observant", - "observasion", "observations", - "observating", "observation", - "observerats", "observers", - "obsessivley", "obsessive", - "obstruccion", "obstruction", - "obstruktion", "obstruction", - "obsturction", "obstruction", - "obversation", "observation", - "ocasionally", "occasionally", - "ocassionaly", "occasionally", - "occasionals", "occasions", - "occasionaly", "occasionally", - "occasionnal", "occasional", - "occassional", "occasional", - "occassioned", "occasioned", - "occurrances", "occurrences", - "offensivley", "offensively", - "offesnively", "offensively", - "officiallly", "officially", - "olbiterated", "obliterated", - "omniscienct", "omniscient", - "operacional", "operational", - "operasional", "operational", - "operationel", "operational", - "oppresssing", "oppressing", - "oppresssion", "oppression", - "opprotunity", "opportunity", - "optimisitic", "optimistic", - "optimizaton", "optimization", - "optmization", "optimization", - "orchestraed", "orchestrated", - "orchestrial", "orchestra", - "oreintation", "orientation", - "organisaton", "organisation", - "organiserad", "organised", - "organistion", "organisation", - "organizarea", "organizer", - "organizarem", "organizer", - "organizarme", "organizer", - "organizarte", "organizer", - "organiztion", "organization", - "oridinarily", "ordinarily", - "orientacion", "orientation", - "originially", "originally", - "originnally", "originally", - "origniality", "originality", - "ostensiably", "ostensibly", - "ostensibily", "ostensibly", - "outclasssed", "outclassed", - "outnunbered", "outnumbered", - "outperfroms", "outperform", - "outpreforms", "outperform", - "outrageosly", "outrageously", - "outrageouly", "outrageously", - "outragerous", "outrageous", - "outskirters", "outskirts", - "outsorucing", "outsourcing", - "outsourcade", "outsourced", - "outsoursing", "outsourcing", - "overbraking", "overbearing", - "overcapping", "overlapping", - "overcharing", "overarching", - "overclcoked", "overclocked", - "overclicked", "overclocked", - "overcloaked", "overclocked", - "overclocing", "overclocking", - "overclockig", "overclocking", - "overclocled", "overclocked", - "overcomeing", "overcoming", - "overcomming", "overcoming", - "overeaching", "overarching", - "overfapping", "overlapping", - "overheading", "overheating", - "overhooking", "overlooking", - "overhwelmed", "overwhelmed", - "overkapping", "overlapping", - "overklocked", "overclocked", - "overlapsing", "overlapping", - "overlcocked", "overclocked", - "overlcoking", "overlooking", - "overlooming", "overlooking", - "overloooked", "overlooked", - "overlordess", "overlords", - "overmapping", "overlapping", - "overpooling", "overlooking", - "overpovered", "overpowered", - "overpoweing", "overpowering", - "overreacing", "overreacting", - "overreactin", "overreaction", - "overreacton", "overreaction", - "overshaddow", "overshadowed", - "overshadowd", "overshadowed", - "overtapping", "overlapping", - "overthining", "overthinking", - "overthinkig", "overthinking", - "overvlocked", "overclocked", - "overwealmed", "overwhelmed", - "overwelming", "overwhelming", - "overwhelemd", "overwhelmed", - "overwhelimg", "overwhelm", - "overwheling", "overwhelming", - "overwhemled", "overwhelmed", - "overwhlemed", "overwhelmed", - "overwritted", "overwrite", - "pakistanais", "pakistani", - "pakistanezi", "pakistani", - "palceholder", "placeholder", - "palesitnian", "palestinians", - "palestenian", "palestinian", - "palestinain", "palestinians", - "palestinans", "palestinians", - "palestinier", "palestine", - "palistinian", "palestinian", - "palythrough", "playthrough", - "papanicalou", "papanicolaou", - "parachutage", "parachute", - "paragraphes", "paragraphs", - "paramedicks", "paramedics", - "paramedicos", "paramedics", - "parameteres", "parameters", - "paranthesis", "parenthesis", - "parapharsed", "paraphrase", - "paraprhased", "paraphrase", - "parasitisme", "parasites", - "parenthasis", "parenthesis", - "parenthesys", "parentheses", - "parenthises", "parenthesis", - "parenthisis", "parenthesis", - "parliamenty", "parliamentary", - "parntership", "partnership", - "parrallelly", "parallelly", - "partecipant", "participant", - "partecipate", "participate", - "parternship", "partnership", - "partiarchal", "patriarchal", - "particapate", "participate", - "particiapte", "participate", - "participait", "participant", - "participans", "participants", - "participare", "participate", - "participatd", "participant", - "participati", "participant", - "participats", "participant", - "participent", "participant", - "particpiate", "participated", - "particually", "particularly", - "particulaly", "particularly", - "particulary", "particularly", - "partnetship", "partnership", - "partonizing", "patronizing", - "passionatly", "passionately", - "passionetly", "passionately", - "passionnate", "passionate", - "passporters", "passports", - "pathologial", "pathological", - "patriarchia", "patriarchal", - "patriarcial", "patriarchal", - "patriarical", "patriarchal", - "patriotisch", "patriotic", - "patriotisim", "patriotism", - "patriottism", "patriotism", - "patronozing", "patronizing", - "peacefullly", "peacefully", - "pedestirans", "pedestrians", - "pedestrains", "pedestrians", - "pedophilies", "pedophile", - "pedophilles", "pedophile", - "penetracion", "penetration", - "penetrading", "penetrating", - "penetrarion", "penetration", - "penninsular", "peninsular", - "pennsylvnia", "pennsylvania", - "pepperocini", "pepperoni", - "percantages", "percentages", - "percautions", "precautions", - "percentille", "percentile", - "percpetions", "perceptions", - "percusssion", "percussion", - "perdicament", "predicament", - "perdictable", "predictable", - "perdictions", "predictions", - "perephirals", "peripherals", - "pereptually", "perpetually", - "perferences", "preferences", - "perfomrance", "performances", - "perforamnce", "performances", - "performaces", "performances", - "performacne", "performances", - "performanes", "performances", - "performanse", "performances", - "performence", "performance", - "performnace", "performances", - "perfromance", "performance", - "perhiperals", "peripherals", - "perihperals", "peripherals", - "periodicaly", "periodically", - "periperhals", "peripherals", - "periphereal", "peripheral", - "peripherial", "peripheral", - "periphirals", "peripherals", - "periphreals", "peripherals", - "periphrials", "peripherals", - "perjorative", "pejorative", - "perliminary", "preliminary", - "permamently", "permanently", - "permanantly", "permanently", - "permaturely", "prematurely", - "permenantly", "permanently", - "permenently", "permanently", - "perminantly", "permanently", - "perminently", "permanently", - "permisisons", "permissions", - "permissable", "permissible", - "permisssion", "permissions", - "pernamently", "permanently", - "perosnality", "personality", - "perparation", "preparation", - "perpatrated", "perpetrated", - "perpatrator", "perpetrator", - "perpatuated", "perpetuated", - "perpatuates", "perpetuates", - "perpertated", "perpetuated", - "perpertator", "perpetrators", - "perpetraded", "perpetrated", - "perpetrador", "perpetrator", - "perpetraron", "perpetrator", - "perpetrater", "perpetrator", - "perpetuaded", "perpetuated", - "perpetutate", "perpetuate", - "perpetuties", "perpetuates", - "perpitrated", "perpetrated", - "perpitrator", "perpetrator", - "perpretated", "perpetrated", - "perpretator", "perpetrators", - "perpsective", "perspective", - "perputrator", "perpetrator", - "perputually", "perpetually", - "perputuated", "perpetuated", - "perputuates", "perpetuates", - "perrogative", "prerogative", - "persceptive", "perspectives", - "persectuion", "persecution", - "persecucion", "persecution", - "persecusion", "persecution", - "persecutted", "persecuted", - "persepctive", "perspective", - "persicution", "persecution", - "persistance", "persistence", - "persistante", "persistent", - "persistense", "persistence", - "persistente", "persistence", - "personhoood", "personhood", - "perspecitve", "perspective", - "perspectief", "perspective", - "perspektive", "perspective", - "persuassion", "persuasion", - "persuassive", "persuasive", - "persucution", "persecution", - "persumption", "presumption", - "pertubation", "perturbation", - "pessimestic", "pessimistic", - "pharamcists", "pharmacist", - "phenomenona", "phenomena", - "philadelpha", "philadelphia", - "philadelpia", "philadelphia", - "philiphines", "philippines", - "philippenes", "philippines", - "philippenis", "philippines", - "philippides", "philippines", - "philippinas", "philippines", - "philippinos", "philippines", - "philisopher", "philosopher", - "phillipines", "philippines", - "philosipher", "philosopher", - "philosopers", "philosophers", - "philosophae", "philosopher", - "philosophia", "philosophical", - "philosopies", "philosophies", - "philosphies", "philosophies", - "philospoher", "philosopher", - "photograhed", "photographed", - "photograher", "photographer", - "photograhic", "photographic", - "photograhpy", "photography", - "photograped", "photographed", - "photograper", "photographer", - "photograpgh", "photographs", - "photograpic", "photographic", - "photogrpahs", "photographs", - "photogrpahy", "photography", - "physcedelic", "psychedelic", - "physciatric", "psychiatric", - "physcopaths", "psychopaths", - "piankillers", "painkillers", - "pilgrimmage", "pilgrimage", - "pitchforcks", "pitchforks", - "pitchforkes", "pitchforks", - "plaestinian", "palestinian", - "plagiariasm", "plagiarism", - "planeswaker", "planeswalker", - "planeswaler", "planeswalker", - "planeswalkr", "planeswalker", - "platfromers", "platformer", - "playhtrough", "playthrough", - "playthorugh", "playthrough", - "playthourgh", "playthrough", - "playthroguh", "playthroughs", - "playthrougs", "playthroughs", - "playthrouhg", "playthroughs", - "playthtough", "playthrough", - "playtrhough", "playthrough", - "ploretariat", "proletariat", - "policitally", "politically", - "policitians", "politicians", - "politicains", "politicians", - "politicanti", "politician", - "politiciens", "politicians", - "politiicans", "politician", - "polititians", "politicians", - "polyphonyic", "polyphonic", - "pomegranite", "pomegranate", - "popluations", "populations", - "poportional", "proportional", - "popoulation", "population", - "porjectiles", "projectiles", - "porletariat", "proletariat", - "pornagraphy", "pornography", - "pornograghy", "pornography", - "pornograhpy", "pornography", - "pornograpgy", "pornography", - "pornogrophy", "pornography", - "pornogrpahy", "pornography", - "porportions", "proportions", - "portestants", "protestants", - "portuguease", "portuguese", - "portuguesse", "portuguese", - "positionial", "positional", - "positionnal", "positional", - "positionned", "positioned", - "positiveity", "positivity", - "positiviely", "positively", - "positivisme", "positives", - "positivisty", "positivity", - "positivitey", "positivity", - "positivitiy", "positivity", - "possesseurs", "possesses", - "possesssion", "possessions", - "possestions", "possessions", - "possiblilty", "possibility", - "potencially", "potentially", - "potentailly", "potentially", - "powerhourse", "powerhouse", - "powerlifing", "powerlifting", - "powerliftng", "powerlifting", - "pracitcally", "practically", - "practicarlo", "practical", - "practioners", "practitioners", - "practitions", "practitioners", - "pragmatisch", "pragmatic", - "precausions", "precautions", - "precedessor", "predecessor", - "precendence", "precedence", - "precentages", "percentages", - "preconceved", "preconceived", - "preconcieve", "preconceived", - "precuations", "precautions", - "predacessor", "predecessor", - "predecesser", "predecessor", - "predections", "predictions", - "predescesor", "predecessors", - "predesessor", "predecessors", - "predesposed", "predisposed", - "predessecor", "predecessor", - "predicatble", "predictable", - "predicement", "predicament", - "predicessor", "predecessor", - "prediciment", "predicament", - "predicitons", "predictions", - "predictible", "predictable", - "predictious", "predictions", - "predictment", "predicament", - "predisposte", "predisposed", - "predocessor", "predecessor", - "preferabbly", "preferably", - "preferabely", "preferable", - "preferabley", "preferably", - "preferablly", "preferably", - "preferances", "preferences", - "preferenser", "preferences", - "preferental", "preferential", - "preferentes", "preferences", - "preferrably", "preferably", - "preferrring", "preferring", - "preformance", "performance", - "pregnanices", "pregnancies", - "pregnencies", "pregnancies", - "pregorative", "prerogative", - "preipherals", "peripherals", - "prejudicies", "prejudice", - "preleminary", "preliminary", - "prelimanary", "preliminary", - "prelimenary", "preliminary", - "premanently", "permanently", - "prematuraly", "prematurely", - "prematurily", "prematurely", - "prematurley", "prematurely", - "premilinary", "preliminary", - "premissible", "permissible", - "premissions", "permissions", - "preorderded", "preordered", - "preorderers", "preorders", - "preparacion", "preparation", - "preperation", "preparation", - "prepetrated", "perpetrated", - "prepetrator", "perpetrator", - "prepetually", "perpetually", - "prepetuated", "perpetuated", - "prepetuates", "perpetuates", - "preporation", "preparation", - "preposterus", "preposterous", - "prerequesit", "prerequisite", - "prerequiste", "prerequisite", - "prerequites", "prerequisite", - "prerogitive", "prerogative", - "prerogotive", "prerogative", - "prescripton", "prescription", - "presecution", "persecution", - "presedintia", "presidential", - "presentaion", "presentation", - "presentatin", "presentations", - "preservaton", "preservation", - "preservered", "preserved", - "presidencey", "presidency", - "presidental", "presidential", - "presidentcy", "presidency", - "presistence", "persistence", - "presitgious", "prestigious", - "presitigous", "prestigious", - "presomption", "presumption", - "prespective", "perspective", - "pressureing", "pressuring", - "prestegious", "prestigious", - "prestigeous", "prestigious", - "prestigieus", "prestigious", - "prestigiosa", "prestigious", - "prestigiose", "prestigious", - "prestigiosi", "prestigious", - "prestigioso", "prestigious", - "prestiguous", "prestigious", - "presumabely", "presumably", - "presumabley", "presumably", - "presumptous", "presumptuous", - "presumptuos", "presumptuous", - "pretencious", "pretentious", - "pretendendo", "pretended", - "pretensious", "pretentious", - "pretentieus", "pretentious", - "prevailaing", "prevailing", - "prevailling", "prevailing", - "preventitve", "preventative", - "preventivno", "prevention", - "primatively", "primitively", - "princessses", "princesses", - "principales", "principles", - "principalis", "principals", - "principielt", "principle", - "privatizied", "privatized", - "priveledges", "privileges", - "privelleges", "privileges", - "privilegeds", "privileges", - "privilegied", "privileged", - "privilegien", "privilege", - "privilegier", "privilege", - "privilegies", "privilege", - "proactivley", "proactive", - "probabilaty", "probability", - "probabilite", "probabilities", - "probalibity", "probability", - "probelmatic", "problematic", - "problamatic", "problematic", - "problimatic", "problematic", - "problomatic", "problematic", - "proccedings", "proceedings", - "proccessing", "processing", - "proceddings", "proceedings", - "procedureal", "procedural", - "procedurial", "procedural", - "procedurile", "procedure", - "processesor", "processors", - "processeurs", "processes", - "processsors", "processors", - "procrastion", "procreation", - "procriation", "procreation", - "prodcutions", "productions", - "prodictions", "productions", - "producerats", "producers", - "producitons", "productions", - "productioin", "productions", - "productivos", "productions", - "productivty", "productivity", - "produktions", "productions", - "professinal", "professional", - "professionl", "professionals", - "professoras", "professors", - "professores", "professors", - "professorin", "profession", - "professsion", "professions", - "proficiancy", "proficiency", - "proficienct", "proficient", - "proficienty", "proficiency", - "proficinecy", "proficiency", - "profitabile", "profitable", - "progerssion", "progressions", - "progerssive", "progressives", - "programable", "programmable", - "programmare", "programmer", - "programmars", "programmers", - "programmate", "programme", - "programmets", "programmers", - "programmeur", "programmer", - "programmier", "programmer", - "programmmed", "programme", - "programmmer", "programme", - "progresison", "progressions", - "progressers", "progresses", - "progressief", "progressive", - "progressino", "progressions", - "progressivo", "progression", - "progressoin", "progressions", - "progressvie", "progressives", - "prohabition", "prohibition", - "prohibation", "prohibition", - "prohibicion", "prohibition", - "prohibiteds", "prohibits", - "prohibitied", "prohibited", - "prohibitifs", "prohibits", - "prohibitivo", "prohibition", - "prohibitons", "prohibits", - "prohibitted", "prohibited", - "projecticle", "projectile", - "projectives", "projectiles", - "projectlies", "projectiles", - "prolateriat", "proletariat", - "proletariet", "proletariat", - "proletariot", "proletariat", - "proletaryat", "proletariat", - "proleteriat", "proletariat", - "prolitariat", "proletariat", - "prologomena", "prolegomena", - "promenantly", "prominently", - "promenently", "prominently", - "prometheius", "prometheus", - "prometheous", "prometheus", - "promethesus", "prometheus", - "prometheyus", "prometheus", - "promimently", "prominently", - "prominantly", "prominently", - "prominately", "prominently", - "promiscious", "promiscuous", - "promocional", "promotional", - "promsicuous", "promiscuous", - "pronography", "pornography", - "pronoucning", "pronouncing", - "pronounched", "pronounced", - "pronunciato", "pronunciation", - "propaganada", "propaganda", - "properitary", "proprietary", - "propertiary", "proprietary", - "propertions", "proportions", - "prophechies", "prophecies", - "propiertary", "proprietary", - "propogation", "propagation", - "proponenets", "proponents", - "proponentes", "proponents", - "proporition", "proposition", - "proportians", "proportions", - "proportinal", "proportional", - "proposicion", "proposition", - "propositivo", "proposition", - "propostions", "proportions", - "propreitary", "proprietary", - "propriatary", "proprietary", - "propriatery", "proprietary", - "propriatory", "proprietary", - "proprietery", "proprietary", - "proprietory", "proprietary", - "propriotary", "proprietary", - "proprotions", "proportions", - "propsective", "prospective", - "propulstion", "propulsion", - "prosectuion", "prosecution", - "prosectuors", "prosecutors", - "prosecuters", "prosecutors", - "prosicution", "prosecution", - "prosocution", "prosecution", - "prosperious", "prosperous", - "prospertity", "prosperity", - "prospettive", "prospective", - "prostethics", "prosthetic", - "prosthethic", "prosthetic", - "prostitites", "prostitutes", - "prostitiute", "prostitute", - "prostituate", "prostitute", - "prostitudes", "prostitutes", - "prostituees", "prostitutes", - "prostituion", "prostitution", - "prostitures", "prostitutes", - "prostitutas", "prostitutes", - "prostitutie", "prostitute", - "prostitutin", "prostitution", - "prostitutke", "prostitutes", - "prostituton", "prostitution", - "prostitutos", "prostitutes", - "protability", "portability", - "protaganist", "protagonist", - "protaginist", "protagonist", - "protagnoist", "protagonist", - "protagoinst", "protagonists", - "protagonits", "protagonists", - "protagonsit", "protagonists", - "protectings", "protections", - "protectoras", "protectors", - "protectores", "protectors", - "protectrons", "protections", - "protelariat", "proletariat", - "protestents", "protestants", - "protistants", "protestants", - "protoganist", "protagonist", - "protogonist", "protagonist", - "protostants", "protestants", - "protototype", "prototype", - "provacative", "provocative", - "provacotive", "provocative", - "provicative", "provocative", - "providencie", "providence", - "provinciaal", "provincial", - "provinicial", "provincial", - "provisiones", "provisions", - "provoactive", "provocative", - "provocatief", "provocative", - "provocitive", "provocative", - "provocotive", "provocative", - "provokative", "provocative", - "pscyhedelic", "psychedelic", - "pscyhiatric", "psychiatric", - "pscyhopaths", "psychopaths", - "pshyciatric", "psychiatric", - "pshycopaths", "psychopaths", - "psychaitric", "psychiatric", - "psychedilic", "psychedelic", - "psychedleic", "psychedelics", - "psychiatist", "psychiatrist", - "psychidelic", "psychedelic", - "psychodelic", "psychedelic", - "psychopants", "psychopaths", - "psychopatch", "psychopath", - "psychopatic", "psychopathic", - "psychotisch", "psychotic", - "psychriatic", "psychiatric", - "publikation", "publication", - "punctiation", "punctuation", - "puncutation", "punctuation", - "punshiments", "punishments", - "punsihments", "punishments", - "purchaseing", "purchasing", - "purchashing", "purchasing", - "purposefuly", "purposefully", - "pyschedelic", "psychedelic", - "pyschiatric", "psychiatric", - "pyschopaths", "psychopaths", - "qaurterback", "quarterback", - "qualificato", "qualification", - "qualifieres", "qualifiers", - "quantitaive", "quantitative", - "quantitatve", "quantitative", - "quantitites", "quantities", - "quantitties", "quantities", - "quarantaine", "quarantine", - "quarantenni", "quarantine", - "quartercask", "quarterbacks", - "quesitoning", "questioning", - "questionned", "questioned", - "questonable", "questionable", - "radiaoctive", "radioactive", - "radioactice", "radioactive", - "radioactief", "radioactive", - "radioaktive", "radioactive", - "radiocative", "radioactive", - "raidoactive", "radioactive", - "reaccurring", "recurring", - "reactionair", "reactionary", - "realibility", "reliability", - "realistisch", "realistic", - "reaserchers", "researchers", - "reaserching", "researching", - "reasonabley", "reasonably", - "reasonablly", "reasonably", - "reassureing", "reassuring", - "reassurring", "reassuring", - "rebuildling", "rebuilding", - "rebuplicans", "republicans", - "reccomended", "recommended", - "receptionst", "receptionist", - "recgonition", "recognition", - "recgonizing", "recognizing", - "rechargable", "rechargeable", - "recipientes", "recipients", - "reciporcate", "reciprocate", - "recipricate", "reciprocate", - "reciprocant", "reciprocate", - "reciprocite", "reciprocate", - "recivership", "receivership", - "reclutantly", "reluctantly", - "recognicing", "recognizing", - "recognision", "recognition", - "recomending", "recommending", - "recommandes", "recommends", - "recommendes", "recommends", - "recommented", "recommended", - "reconcilled", "reconcile", - "recongition", "recognition", - "recongizing", "recognizing", - "reconsidder", "reconsider", - "recrational", "recreational", - "recrutiment", "recruitment", - "rectangluar", "rectangular", - "rectangualr", "rectangular", - "rectengular", "rectangular", - "recuritment", "recruitment", - "redundantcy", "redundancy", - "reevalulate", "reevaluate", - "reevalutate", "reevaluate", - "reevaulated", "reevaluate", - "refelctions", "reflections", - "referancing", "referencing", - "refereneced", "referenced", - "refereneces", "references", - "referincing", "referencing", - "referrences", "references", - "reflectivos", "reflections", - "refreshener", "refresher", - "refrubished", "refurbished", - "refubrished", "refurbished", - "refurbushed", "refurbished", - "regeneratin", "regeneration", - "regeneraton", "regeneration", - "registerdns", "registers", - "registeries", "registers", - "registerred", "registered", - "registraion", "registration", - "regocnition", "recognition", - "regresssion", "regression", - "regresssive", "regressive", - "regualtions", "regulations", - "regulationg", "regulating", - "regulatiors", "regulators", - "reinassance", "renaissance", - "reinforcemt", "reinforcement", - "reinfornced", "reinforced", - "reinitalise", "reinitialise", - "reinitalize", "reinitialize", - "reinstaling", "reinstalling", - "reinstallng", "reinstalling", - "reisntalled", "reinstalled", - "relaibility", "reliability", - "relatiation", "retaliation", - "relationshp", "relationships", - "relativiser", "relatives", - "relativisme", "relatives", - "relativitiy", "relativity", - "relativitly", "relativity", - "relcutantly", "reluctantly", - "relentlesly", "relentlessly", - "relentlessy", "relentlessly", - "relevations", "revelations", - "relfections", "reflections", - "religeously", "religiously", - "religionens", "religions", - "religioners", "religions", - "relpacement", "replacement", - "reluctently", "reluctantly", - "remarkabley", "remarkably", - "remarkablly", "remarkably", - "remasterred", "remastered", - "remembrence", "remembrance", - "reminescent", "reminiscent", - "reminicient", "reminiscent", - "reminiscant", "reminiscent", - "reminiscint", "reminiscent", - "reminscient", "reminiscent", - "reminsicent", "reminiscent", - "renaiisance", "renaissance", - "renaiscance", "renaissance", - "renaissanse", "renaissance", - "renaissence", "renaissance", - "renassaince", "renaissance", - "renassiance", "renaissance", - "reniassance", "renaissance", - "rennovating", "renovating", - "rennovation", "renovation", - "repalcement", "replacement", - "repbulicans", "republicans", - "repeateadly", "repeatedly", - "repectively", "respectively", - "repersented", "represented", - "replacemnet", "replacements", - "replacemnts", "replacements", - "repleacable", "replaceable", - "repositiory", "repository", - "representas", "represents", - "representes", "represents", - "represssion", "repression", - "reproducion", "reproduction", - "reproducive", "reproductive", - "repsectable", "respectable", - "repsonsible", "responsible", - "repsonsibly", "responsibly", - "republcians", "republicans", - "republician", "republican", - "republicons", "republicans", - "repuglicans", "republicans", - "requeriment", "requirement", - "requierment", "requirements", - "resemblence", "resemblance", - "resemblense", "resembles", - "reserachers", "researchers", - "reseraching", "researching", - "resgination", "resignation", - "residencial", "residential", - "residentail", "residential", - "residentual", "residential", - "resignacion", "resignation", - "resignating", "resignation", - "resignement", "resignment", - "resignition", "resignation", - "resintalled", "reinstalled", - "resistansen", "resistances", - "resistanses", "resistances", - "resistences", "resistances", - "resistnaces", "resistances", - "resoltuions", "resolutions", - "resotration", "restoration", - "resoultions", "resolutions", - "respecatble", "respectable", - "respectabil", "respectable", - "respectfuly", "respectfully", - "respectible", "respectable", - "respectivly", "respectively", - "respectuful", "respectful", - "respektable", "respectable", - "resperatory", "respiratory", - "resperitory", "respiratory", - "respiritory", "respiratory", - "respitatory", "respiratory", - "responcible", "responsible", - "responcibly", "responsibly", - "respondendo", "responded", - "responisble", "responsible", - "responisbly", "responsibly", - "responsable", "responsible", - "responsably", "responsibly", - "responsbile", "responsible", - "responsbily", "responsibly", - "responsibel", "responsibly", - "responsibil", "responsibly", - "responsivle", "responsive", - "resporatory", "respiratory", - "respository", "repository", - "respriatory", "respiratory", - "ressembling", "resembling", - "ressurected", "resurrected", - "restaraunts", "restaurants", - "restaruants", "restaurants", - "restauraunt", "restaurant", - "restaurents", "restaurants", - "resteraunts", "restaurants", - "restirction", "restriction", - "restorarion", "restoration", - "restorating", "restoration", - "restrainted", "restrained", - "restrective", "restrictive", - "restriccion", "restriction", - "restricitng", "restricting", - "restriciton", "restrictions", - "restricitve", "restrictive", - "restricteds", "restricts", - "restricters", "restricts", - "restrictied", "restrictive", - "restrictifs", "restricts", - "restrictins", "restricts", - "restrictios", "restricts", - "restrictivo", "restriction", - "restrictons", "restricts", - "restriktion", "restriction", - "restriktive", "restrictive", - "restrittive", "restrictive", - "restructing", "restricting", - "restruction", "restriction", - "restuarants", "restaurants", - "resturaunts", "restaurants", - "resurecting", "resurrecting", - "resurrecion", "resurrection", - "retailation", "retaliation", - "retalitated", "retaliated", - "retardathon", "retardation", - "retardating", "retardation", - "retardatron", "retardation", - "retartation", "retardation", - "retirbution", "retribution", - "retrebution", "retribution", - "retribucion", "retribution", - "retribuiton", "retribution", - "retributivo", "retribution", - "retribvtion", "retribution", - "retrobution", "retribution", - "retrubution", "retribution", - "revealtions", "revelations", - "revelaitons", "revelations", - "revolations", "revolutions", - "revoultions", "revolutions", - "ridiculious", "ridiculous", - "ridiculosly", "ridiculously", - "ridiculouly", "ridiculously", - "ridiculousy", "ridiculously", - "rightfullly", "rightfully", - "rolepalying", "roleplaying", - "romanticaly", "romantically", - "roundabaout", "roundabout", - "roundabount", "roundabout", - "rudimentery", "rudimentary", - "rudimentory", "rudimentary", - "ruidmentary", "rudimentary", - "sacrifacing", "sacrificing", - "sacrificare", "sacrifice", - "sacrificied", "sacrifice", - "sacrificies", "sacrifice", - "sacrifieced", "sacrificed", - "sacrifising", "sacrificing", - "sacrifizing", "sacrificing", - "salughtered", "slaughtered", - "sanctionned", "sanctioned", - "sarcastisch", "sarcastic", - "saskatchewn", "saskatchewan", - "saskatchwan", "saskatchewan", - "satisfacion", "satisfaction", - "satisfacory", "satisfactory", - "scandanavia", "scandinavia", - "scandanivia", "scandinavian", - "scandenavia", "scandinavia", - "scandianvia", "scandinavian", - "scandimania", "scandinavia", - "scandinaiva", "scandinavian", - "scandinavan", "scandinavian", - "scandivania", "scandinavian", - "scandonavia", "scandinavia", - "scarificing", "sacrificing", - "scheduleing", "scheduling", - "schedulling", "scheduling", - "schoalrship", "scholarships", - "scholarhips", "scholarship", - "scholarstic", "scholastic", - "scholership", "scholarship", - "scholorship", "scholarship", - "scientiests", "scientists", - "scnadinavia", "scandinavia", - "scrambleing", "scrambling", - "screenshoot", "screenshot", - "seamlessley", "seamlessly", - "sedentarity", "sedentary", - "seflishness", "selfishness", - "segergation", "segregation", - "segragation", "segregation", - "segregacion", "segregation", - "segretation", "segregation", - "segrigation", "segregation", - "selectivley", "selectively", - "selfeshness", "selfishness", - "senitmental", "sentimental", - "sensacional", "sensational", - "sensasional", "sensational", - "sensationel", "sensational", - "sensetional", "sensational", - "sensitivety", "sensitivity", - "sentamental", "sentimental", - "sentemental", "sentimental", - "sentenceing", "sentencing", - "sentimentos", "sentiments", - "sentimentul", "sentimental", - "separatedly", "separately", - "separatelly", "separately", - "separatisme", "separates", - "separatiste", "separates", - "sepculating", "speculating", - "serivceable", "serviceable", - "serviciable", "serviceable", - "settelement", "settlement", - "settelments", "settlements", - "settlemetns", "settlements", - "sexualizied", "sexualized", - "shakeapeare", "shakespeare", - "shakepseare", "shakespeare", - "shakesphere", "shakespeare", - "shanenigans", "shenanigans", - "shareholdes", "shareholders", - "sharpeneing", "sharpening", - "sharpenning", "sharpening", - "shatterling", "shattering", - "shatterring", "shattering", - "sheakspeare", "shakespeare", - "shenadigans", "shenanigans", - "shenanagans", "shenanigans", - "shenanagins", "shenanigans", - "shenanegans", "shenanigans", - "shenanegins", "shenanigans", - "shenangians", "shenanigans", - "shenanigens", "shenanigans", - "shenanigins", "shenanigans", - "shenenigans", "shenanigans", - "sheninigans", "shenanigans", - "shennaigans", "shenanigans", - "shortenning", "shortening", - "shortenting", "shortening", - "signficiant", "significant", - "signifantly", "significantly", - "significane", "significance", - "significato", "significant", - "signifigant", "significant", - "signifikant", "significant", - "signitories", "signatories", - "signularity", "singularity", - "similarites", "similarities", - "similarlity", "similarity", - "similiarity", "similarity", - "simluations", "simulations", - "simplefying", "simplifying", - "simplicitly", "simplicity", - "simplifiing", "simplifying", - "simplisitic", "simplistic", - "simplyifing", "simplifying", - "simualtions", "simulations", - "simulatious", "simulations", - "simultaneos", "simultaneous", - "simultaneus", "simultaneous", - "simultanous", "simultaneous", - "singluarity", "singularity", - "singualrity", "singularity", - "singulairty", "singularity", - "singularily", "singularity", - "sitautional", "situational", - "situacional", "situational", - "situationly", "situational", - "siutational", "situational", - "skatebaords", "skateboard", - "skateboader", "skateboard", - "skepticisim", "skepticism", - "skillshoots", "skillshots", - "skillshosts", "skillshots", - "slaugthered", "slaughtered", - "slefishness", "selfishness", - "sluaghtered", "slaughtered", - "smarthpones", "smartphones", - "snowboaring", "snowboarding", - "snowbolling", "snowballing", - "snowfalling", "snowballing", - "socailizing", "socializing", - "socialicing", "socializing", - "socialistes", "socialists", - "socialistos", "socialists", - "socializare", "socialize", - "sociapathic", "sociopathic", - "sociologial", "sociological", - "sociopathes", "sociopaths", - "sociopathis", "sociopaths", - "sociophatic", "sociopathic", - "solidariety", "solidarity", - "somethingis", "somethings", - "sorrounding", "surrounding", - "soundtrakcs", "soundtracks", - "southamtpon", "southampton", - "southanpton", "southampton", - "southapmton", "southampton", - "southernese", "southerners", - "southerness", "southerners", - "southernest", "southerners", - "southernors", "southerners", - "southmapton", "southampton", - "southtampon", "southampton", - "soveregnity", "sovereignty", - "sovereighty", "sovereignty", - "sovereingty", "sovereignty", - "sovereinity", "sovereignty", - "soveriegnty", "sovereignty", - "soveriengty", "sovereignty", - "soverignity", "sovereignty", - "specailists", "specialists", - "specailized", "specialized", - "specailizes", "specializes", - "specatcular", "spectacular", - "specialiced", "specialized", - "specialices", "specializes", - "specialites", "specializes", - "speciallist", "specialist", - "speciallity", "specially", - "speciallize", "specialize", - "specialzied", "specialized", - "specifcally", "specifically", - "specificaly", "specifically", - "specificato", "specification", - "specificies", "specifics", - "specifiying", "specifying", - "specilaized", "specialize", - "speciliazed", "specialize", - "spectatores", "spectators", - "spectatular", "spectacular", - "spectauclar", "spectacular", - "spectaulars", "spectaculars", - "spectecular", "spectacular", - "specualting", "speculating", - "specualtion", "speculation", - "specualtive", "speculative", - "specularite", "speculative", - "speculaties", "speculative", - "spiritualiy", "spiritually", - "spiritualty", "spirituality", - "spirituella", "spiritually", - "spirtiually", "spiritually", - "spirutually", "spiritually", - "spitirually", "spiritually", - "sponatenous", "spontaneous", - "sponatneous", "spontaneous", - "sponsership", "sponsorship", - "sponsorhips", "sponsorship", - "sponsorhsip", "sponsorship", - "sponsorshop", "sponsorship", - "spontaenous", "spontaneous", - "spontainous", "spontaneous", - "spontaneuos", "spontaneous", - "spontanious", "spontaneous", - "sponteanous", "spontaneous", - "sponteneous", "spontaneous", - "spreadhseet", "spreadsheet", - "spreadsheat", "spreadsheet", - "spreadshets", "spreadsheets", - "spreedsheet", "spreadsheet", - "springfeild", "springfield", - "springfiled", "springfield", - "sprinklered", "sprinkled", - "squirrelies", "squirrels", - "squirrelius", "squirrels", - "stabilizare", "stabilize", - "stabilizied", "stabilize", - "stabilizier", "stabilize", - "stabilizies", "stabilize", - "staggerring", "staggering", - "staggerwing", "staggering", - "stationairy", "stationary", - "stationerad", "stationed", - "stationnary", "stationary", - "statisitcal", "statistical", - "statisticly", "statistical", - "statistisch", "statistics", - "statsitical", "statistical", - "stereotpyes", "stereotypes", - "stereotying", "stereotyping", - "steriotypes", "stereotypes", - "steroetypes", "stereotypes", - "steryotypes", "stereotypes", - "stimluating", "stimulating", - "stimualting", "stimulating", - "stimualtion", "stimulation", - "stimulantes", "stimulants", - "stockpilled", "stockpile", - "stormfrount", "stormfront", - "storyteling", "storytelling", - "straightden", "straightened", - "straightend", "straightened", - "straightmen", "straighten", - "straightned", "straightened", - "straightner", "straighten", - "strangeshit", "strangest", - "strategisch", "strategic", - "strategiske", "strategies", - "strawberies", "strawberries", - "strawberrry", "strawberry", - "strawbrerry", "strawberry", - "strenghened", "strengthened", - "strenghtend", "strengthen", - "strenghtens", "strengthen", - "strengtened", "strengthened", - "structurels", "structures", - "strugglebus", "struggles", - "struggleing", "struggling", - "stubborness", "stubbornness", - "stutterring", "stuttering", - "subcatagory", "subcategory", - "subconscius", "subconscious", - "subconscous", "subconscious", - "subisdizing", "subsidizing", - "subjectivly", "subjectively", - "submergered", "submerged", - "submisisons", "submissions", - "subredddits", "subreddits", - "subscirbers", "subscribers", - "subscribbed", "subscribe", - "subscribber", "subscriber", - "subscriping", "subscribing", - "subscriptin", "subscriptions", - "subscripton", "subscription", - "subsequenty", "subsequently", - "subsidiezed", "subsidized", - "subsidizied", "subsidized", - "subsidizies", "subsidize", - "subsiziding", "subsidizing", - "subsquently", "subsequently", - "subsrcibers", "subscribers", - "substancial", "substantial", - "substansial", "substantial", - "substansive", "substantive", - "substantied", "substantive", - "substanties", "substantive", - "substential", "substantial", - "substitiute", "substitute", - "substituded", "substituted", - "substitudes", "substitutes", - "substituion", "substitution", - "substitures", "substitutes", - "substitutie", "substitutes", - "substitutos", "substitutes", - "substitutue", "substitutes", - "substracted", "subtracted", - "suburburban", "suburban", - "succesfully", "successfully", - "successeurs", "successes", - "successfull", "successful", - "successfuly", "successfully", - "successsion", "succession", - "successully", "successfully", - "succsesfull", "successfully", - "sucessfully", "successfully", - "sucseptible", "susceptible", - "sufficently", "sufficiently", - "suggestieve", "suggestive", - "sumbissions", "submissions", - "sunglassses", "sunglasses", - "superceeded", "superseded", - "superficiel", "superficial", - "superfulous", "superfluous", - "superhereos", "superhero", - "superifical", "superficial", - "superiorest", "superiors", - "supermacist", "supremacist", - "supermakert", "supermarkets", - "supermakret", "supermarkets", - "supermakter", "supermarkets", - "supermarkts", "supermarkets", - "supermaster", "supermarkets", - "supernatual", "supernatural", - "supersition", "supervision", - "superstiton", "superstition", - "supervisers", "supervisors", - "supervisior", "supervisor", - "suplimented", "supplemented", - "supplaments", "supplements", - "supplemetal", "supplemental", - "supporteurs", "supporters", - "supposedely", "supposedly", - "supposidely", "supposedly", - "supposingly", "supposedly", - "suppresions", "suppression", - "suppresssor", "suppressor", - "supramacist", "supremacist", - "supremacits", "supremacist", - "supremasist", "supremacist", - "supremicist", "supremacist", - "suprimacist", "supremacist", - "suprisingly", "surprisingly", - "suprizingly", "surprisingly", - "suroundings", "surroundings", - "surpemacist", "supremacist", - "surprisinly", "surprisingly", - "surreptious", "surreptitious", - "surroundign", "surroundings", - "surroundigs", "surrounds", - "surroundins", "surrounds", - "surroundngs", "surrounds", - "surveilence", "surveillance", - "survivabily", "survivability", - "susbtantial", "substantial", - "susbtantive", "substantive", - "suscepitble", "susceptible", - "susceptable", "susceptible", - "suscpetible", "susceptible", - "susecptible", "susceptible", - "suspectible", "susceptible", - "suspiciosly", "suspiciously", - "suspiciouly", "suspiciously", - "suspiciouns", "suspicion", - "suspicision", "suspicions", - "suspicisons", "suspicions", - "sustainible", "sustainable", - "switerzland", "switzerland", - "switserland", "switzerland", - "switzlerand", "switzerland", - "swizterland", "switzerland", - "swtizerland", "switzerland", - "symapthetic", "sympathetic", - "symmertical", "symmetrical", - "sympathatic", "sympathetic", - "sympathiers", "sympathizers", - "sympathsize", "sympathize", - "sympethetic", "sympathetic", - "symphatetic", "sympathetic", - "symphatized", "sympathize", - "symphatizer", "sympathizers", - "symphatizes", "sympathize", - "sympothetic", "sympathetic", - "synthesasia", "synthesis", - "synthesesia", "synthesis", - "sypmathetic", "sympathetic", - "tabelspoons", "tablespoons", - "tablepsoons", "tablespoons", - "tablespooon", "tablespoon", - "tablesppons", "tablespoons", - "tailgateing", "tailgating", - "tailgatting", "tailgating", - "tangentialy", "tangentially", - "techincally", "technically", - "techincians", "technicians", - "techiniques", "techniques", - "techncially", "technically", - "technicalty", "technicality", - "technichian", "technician", - "technicials", "technicians", - "techniciens", "technicians", - "technitians", "technicians", - "technnology", "technology", - "technologia", "technological", - "techticians", "technicians", - "teleportato", "teleportation", - "teleportion", "teleporting", - "teleproting", "teleporting", - "temeprature", "temperature", - "temparament", "temperament", - "temparature", "temperature", - "temparement", "temperament", - "tempearture", "temperatures", - "temperamant", "temperament", - "temperarily", "temporarily", - "temperatues", "temperatures", - "temperaturs", "temperatures", - "temperatuur", "temperature", - "temperement", "temperament", - "tempermeant", "temperament", - "tempertaure", "temperature", - "temporairly", "temporarily", - "temporaraly", "temporarily", - "temporarity", "temporarily", - "tempreature", "temperature", - "temproarily", "temporarily", - "tempurature", "temperature", - "tepmorarily", "temporarily", - "termanology", "terminology", - "terminacion", "termination", - "terminaison", "termination", - "terminalogy", "terminology", - "terminatior", "terminator", - "terminatorn", "termination", - "terminilogy", "terminology", - "terminoligy", "terminology", - "terratorial", "territorial", - "terratories", "territories", - "terretorial", "territorial", - "terretories", "territories", - "terrirorial", "territorial", - "terrirories", "territories", - "terriroties", "territories", - "terristrial", "territorial", - "territoires", "territories", - "territorist", "terrorist", - "territority", "territory", - "terroristas", "terrorists", - "terroristes", "terrorists", - "terrorities", "territories", - "terrotorial", "territorial", - "terrotories", "territories", - "testiclular", "testicular", - "thankfullly", "thankfully", - "thanksgivng", "thanksgiving", - "theoligical", "theological", - "theoratical", "theoretical", - "theoreticly", "theoretical", - "theoritical", "theoretical", - "therapautic", "therapeutic", - "therapeudic", "therapeutic", - "therapeutuc", "therapeutic", - "therapuetic", "therapeutic", - "theraupetic", "therapeutic", - "thereaputic", "therapeutic", - "thereotical", "theoretical", - "therepeutic", "therapeutic", - "thermometor", "thermometer", - "thermometre", "thermometer", - "thermomiter", "thermometer", - "thermomoter", "thermometer", - "thermoneter", "thermometer", - "thermostaat", "thermostat", - "theroetical", "theoretical", - "thoeretical", "theoretical", - "threataning", "threatening", - "threatended", "threatened", - "threatining", "threatening", - "throttleing", "throttling", - "throughoput", "throughput", - "throughtout", "throughout", - "throughtput", "throughput", - "thudnerbolt", "thunderbolt", - "thunberbolt", "thunderbolt", - "thunderblot", "thunderbolt", - "thunderboat", "thunderbolt", - "thunderbots", "thunderbolt", - "thunderbowl", "thunderbolt", - "thunderjolt", "thunderbolt", - "thundervolt", "thunderbolt", - "tightenting", "tightening", - "tocuhscreen", "touchscreen", - "torrentking", "torrenting", - "torrentting", "torrenting", - "torublesome", "troublesome", - "torunaments", "tournaments", - "totalitaran", "totalitarian", - "totalitarni", "totalitarian", - "touranments", "tournaments", - "tournamnets", "tournaments", - "tournemants", "tournaments", - "tournements", "tournaments", - "tournmanets", "tournaments", - "tradicional", "traditional", - "tradionally", "traditionally", - "tradisional", "traditional", - "traditionel", "traditional", - "traditition", "tradition", - "tragicallly", "tragically", - "tramautized", "traumatized", - "tramuatized", "traumatized", - "trancendent", "transcendent", - "trancending", "transcending", - "tranclucent", "translucent", - "trandgender", "transgender", - "tranditions", "transitions", - "tranistions", "transitions", - "tranlastion", "translations", - "tranlsating", "translating", - "tranlsation", "translation", - "tranluscent", "translucent", - "trannsexual", "transsexual", - "tranpshobic", "transphobic", - "transaccion", "transaction", - "transaciton", "transactions", - "transalting", "translating", - "transaltion", "translation", - "transations", "transitions", - "transcluent", "translucent", - "transcripto", "transcription", - "transctions", "transitions", - "transculent", "translucent", - "transending", "transcending", - "transfender", "transgender", - "transferers", "transfers", - "transfering", "transferring", - "transfersom", "transforms", - "transfomers", "transforms", - "transformas", "transforms", - "transformes", "transformers", - "transformis", "transforms", - "transformus", "transforms", - "transforums", "transforms", - "transfromed", "transformed", - "transfromer", "transformers", - "transgemder", "transgender", - "transgended", "transgendered", - "transgenger", "transgender", - "transgenres", "transgender", - "transhpobic", "transphobic", - "transisions", "transitions", - "transisitor", "transistor", - "transistion", "transition", - "transistior", "transistor", - "transitiond", "transitioned", - "transitiong", "transitioning", - "translatron", "translation", - "translusent", "translucent", - "transmatter", "transmitter", - "transmision", "transmission", - "transmissin", "transmissions", - "transmisson", "transmission", - "transmittor", "transmitter", - "transmorged", "transformed", - "transmutter", "transmitter", - "transofrmed", "transformed", - "transohobic", "transphobic", - "transparant", "transparent", - "transparecy", "transparency", - "transpareny", "transparency", - "transperant", "transparent", - "transperent", "transparent", - "transphonic", "transphobic", - "transphopic", "transphobic", - "transplanet", "transplant", - "transporder", "transporter", - "transporing", "transporting", - "transportar", "transporter", - "transportng", "transporting", - "transportor", "transporter", - "transseuxal", "transsexual", - "transsexaul", "transsexual", - "transsexuel", "transsexual", - "transulcent", "translucent", - "transylvnia", "transylvania", - "tranzformer", "transformer", - "tranzitions", "transitions", - "tranzporter", "transporter", - "trasncripts", "transcripts", - "trasnferred", "transferred", - "trasnformed", "transformed", - "trasnformer", "transformer", - "trasngender", "transgender", - "trasnmitted", "transmitted", - "trasnmitter", "transmitter", - "trasnparent", "transparent", - "trasnphobic", "transphobic", - "trasnported", "transported", - "trasnporter", "transporter", - "traumatisch", "traumatic", - "traumetized", "traumatized", - "traumitized", "traumatized", - "travellerhd", "travelled", - "travellodge", "travelled", - "tremendeous", "tremendous", - "tremendious", "tremendous", - "tremenduous", "tremendous", - "trespessing", "trespassing", - "tresspasing", "trespassing", - "triggereing", "triggering", - "triggerring", "triggering", - "troubelsome", "troublesome", - "truamatized", "traumatized", - "trushworthy", "trustworthy", - "trustowrthy", "trustworthy", - "trustwhorty", "trustworthy", - "trustworhty", "trustworthy", - "truthfullly", "truthfully", - "tupperwears", "tupperware", - "turstworthy", "trustworthy", - "ubiquitious", "ubiquitous", - "ubiquituous", "ubiquitous", - "ukraininans", "ukrainians", - "ultimatelly", "ultimately", - "unanimoulsy", "unanimous", - "unappeasing", "unappealing", - "unappeeling", "unappealing", - "unathorised", "unauthorised", - "unattendend", "unattended", - "unatteneded", "unattended", - "unattracive", "unattractive", - "unauthoried", "unauthorized", - "unavailible", "unavailable", - "unavaliable", "unavailable", - "unaviodable", "unavoidable", - "unbalanaced", "unbalanced", - "unbraikable", "unbreakable", - "unbrakeable", "unbreakable", - "unbreakabie", "unbreakable", - "unbreakabke", "unbreakable", - "unbreakbale", "unbreakable", - "unbreakeble", "unbreakable", - "unbrearable", "unbreakable", - "uncensorred", "uncensored", - "uncertaincy", "uncertainty", - "uncertanity", "uncertainty", - "uncertianty", "uncertainty", - "unchangable", "unchangeable", - "uncompetive", "uncompetitive", - "unconcsious", "unconscious", - "unconsicous", "unconscious", - "uncouncious", "unconscious", - "undeniabely", "undeniably", - "undeniabley", "undeniably", - "undeniablly", "undeniably", - "undenialbly", "undeniably", - "underestime", "underestimate", - "undergating", "undertaking", - "undergorund", "underground", - "underheight", "underweight", - "undermiming", "undermining", - "undermindes", "undermines", - "undernearth", "underneath", - "underneight", "underweight", - "underpining", "undermining", - "underpowerd", "underpowered", - "underpowred", "underpowered", - "underratted", "underrated", - "understannd", "understands", - "understsand", "understands", - "undertacker", "undertaker", - "underwarter", "underwater", - "underwieght", "underweight", - "underwright", "underweight", - "undesireble", "undesirable", - "undesriable", "undesirable", - "undetecable", "undetectable", - "undiserable", "undesirable", - "undoubedtly", "undoubtedly", - "undoubetdly", "undoubtedly", - "undoubtadly", "undoubtedly", - "undoubtebly", "undoubtedly", - "undoubtetly", "undoubtedly", - "undreground", "underground", - "unemployeed", "unemployed", - "unemployent", "unemployment", - "unemploymed", "unemployed", - "unexpectdly", "unexpectedly", - "unexpectely", "unexpectedly", - "unfamilliar", "unfamiliar", - "unfortuante", "unfortunate", - "ungreatfull", "ungrateful", - "unilateraly", "unilaterally", - "unilaterlly", "unilaterally", - "unimportent", "unimportant", - "uninspiried", "uninspired", - "uninstaling", "uninstalling", - "uninstallng", "uninstalling", - "uninteresed", "uninterested", - "uniquesness", "uniqueness", - "unisntalled", "uninstalled", - "universella", "universally", - "universites", "universities", - "univesities", "universities", - "unjustifyed", "unjustified", - "unknowinlgy", "unknowingly", - "unkowningly", "unknowingly", - "unnecassary", "unnecessary", - "unneccesary", "unnecessary", - "unnecessery", "unnecessary", - "unnecissary", "unnecessary", - "unnessecary", "unnecessary", - "unnistalled", "uninstalled", - "unoriginial", "unoriginal", - "unorigional", "unoriginal", - "unoticeable", "unnoticeable", - "unpleaseant", "unpleasant", - "unportected", "unprotected", - "unprepaired", "unprepared", - "unpreparred", "unprepared", - "unproducive", "unproductive", - "unprotexted", "unprotected", - "unqaulified", "unqualified", - "unrealisitc", "unrealistic", - "unrealsitic", "unrealistic", - "unreasonbly", "unreasonably", - "unregluated", "unregulated", - "unregualted", "unregulated", - "unregulared", "unregulated", - "unrepentent", "unrepentant", - "unresponive", "unresponsive", - "unrestriced", "unrestricted", - "unsettleing", "unsettling", - "unsintalled", "uninstalled", - "unsolicated", "unsolicited", - "unsoliticed", "unsolicited", - "unsolocited", "unsolicited", - "unsubscirbe", "unsubscribe", - "unsubscrbed", "unsubscribed", - "unsubscried", "unsubscribed", - "unsubscripe", "unsubscribe", - "unsubscrive", "unsubscribe", - "unsubscrube", "unsubscribe", - "unsubsrcibe", "unsubscribe", - "unsuccesful", "unsuccessful", - "unsuccessul", "unsuccessful", - "unsucesfuly", "unsuccessfully", - "unsucessful", "unsuccessful", - "unsunscribe", "unsubscribe", - "unsuprising", "unsurprising", - "unsuprizing", "unsurprising", - "unsurprized", "unsurprised", - "unsusbcribe", "unsubscribe", - "unviersally", "universally", - "unwarrented", "unwarranted", - "utiliatrian", "utilitarian", - "utilitatian", "utilitarian", - "utiliterian", "utilitarian", - "utilizacion", "utilization", - "utilizaiton", "utilization", - "utilizating", "utilization", - "utiltiarian", "utilitarian", - "vacciantion", "vaccination", - "vaccinaties", "vaccinate", - "vegaterians", "vegetarians", - "vegetariens", "vegetarians", - "vegetatians", "vegetarians", - "vegeterians", "vegetarians", - "vehementely", "vehemently", - "venezuelean", "venezuela", - "venezuelian", "venezuela", - "ventalation", "ventilation", - "ventelation", "ventilation", - "ventialtion", "ventilation", - "ventilacion", "ventilation", - "verastility", "versatility", - "verfication", "verification", - "versatality", "versatility", - "versitality", "versatility", - "versitilaty", "versatility", - "victorieuse", "victories", - "victoriuous", "victorious", - "vietnameese", "vietnamese", - "vietnamesse", "vietnamese", - "vietnamiese", "vietnamese", - "vietnamnese", "vietnamese", - "vigilanties", "vigilante", - "visibillity", "visibility", - "vocabularly", "vocabulary", - "volatillity", "volatility", - "volonteered", "volunteered", - "volounteers", "volunteers", - "volunatrily", "voluntarily", - "voluntairly", "voluntarily", - "volunteeers", "volunteers", - "volunteraly", "voluntarily", - "voluntereed", "volunteered", - "volunterily", "voluntarily", - "vulnerabile", "vulnerable", - "wallpapaers", "wallpapers", - "wallpappers", "wallpapers", - "washingtion", "washington", - "watermeleon", "watermelon", - "waterprooof", "waterproof", - "wavelegnths", "wavelength", - "wavelenghth", "wavelength", - "wavelenghts", "wavelength", - "weaknessses", "weaknesses", - "wellingston", "wellington", - "wellingtion", "wellington", - "westernerns", "westerners", - "westmisnter", "westminster", - "westmnister", "westminster", - "westmonster", "westminster", - "whisperered", "whispered", - "whitholding", "withholding", - "wikileakers", "wikileaks", - "willingless", "willingness", - "wincheseter", "winchester", - "windsheilds", "windshield", - "withdrawels", "withdrawals", - "withdrawles", "withdrawals", - "withhelding", "withholding", - "withrdawing", "withdrawing", - "witnesssing", "witnessing", - "woodowrking", "woodworking", - "woodworkign", "woodworking", - "worhsipping", "worshipping", - "workstaiton", "workstation", - "workststion", "workstation", - "worshopping", "worshipping", - "xenophoblic", "xenophobic", - "abandining", "abandoning", - "abandonned", "abandoned", - "abbreviato", "abbreviation", - "abnoramlly", "abnormally", - "abnormalty", "abnormally", - "abnornally", "abnormally", - "abominaton", "abomination", - "abondoning", "abandoning", - "aborginial", "aboriginal", - "aboriganal", "aboriginal", - "aborigenal", "aboriginal", - "aborignial", "aboriginal", - "aborigonal", "aboriginal", - "aboroginal", "aboriginal", - "aboslutely", "absolutely", - "abosrption", "absorption", - "abreviated", "abbreviated", - "absintence", "abstinence", - "absitnence", "abstinence", - "absolument", "absolute", - "absolutley", "absolutely", - "absoprtion", "absorption", - "absorbsion", "absorption", - "absorbtion", "absorption", - "absorpsion", "absorption", - "absoultely", "absolutely", - "abstanence", "abstinence", - "abstenance", "abstinence", - "abstenince", "abstinence", - "abstinense", "abstinence", - "abstinince", "abstinence", - "absurditiy", "absurdity", - "abundacies", "abundances", - "academicas", "academics", - "academicos", "academics", - "academicus", "academics", - "accdiently", "accidently", - "accelarate", "accelerate", - "accelerade", "accelerated", - "accelerare", "accelerate", - "accelerato", "acceleration", - "acceleread", "accelerated", - "accelertor", "accelerator", - "accelorate", "accelerate", - "acceptabel", "acceptable", - "acceptabil", "acceptable", - "acceptence", "acceptance", - "accepterad", "accepted", - "acceptible", "acceptable", - "accerelate", "accelerated", - "accesories", "accessories", - "accessable", "accessible", - "accessbile", "accessible", - "accessoire", "accessories", - "accessoirs", "accessories", - "accicently", "accidently", - "accidantly", "accidently", - "accidebtly", "accidently", - "accidenlty", "accidently", - "accidentes", "accidents", - "accidentky", "accidently", - "accidently", "accidentally", - "accidnetly", "accidently", - "accomadate", "accommodate", - "accomodate", "accommodate", - "accompined", "accompanied", - "accomplise", "accomplishes", - "accompliss", "accomplishes", - "accostumed", "accustomed", - "accountent", "accountant", - "accpetable", "acceptable", - "accpetance", "acceptance", - "accuastion", "accusation", - "acculumate", "accumulate", - "accumalate", "accumulate", - "accumelate", "accumulate", - "accumilate", "accumulate", - "accumulare", "accumulate", - "accumulato", "accumulation", - "accumulted", "accumulated", - "accuratley", "accurately", - "accusating", "accusation", - "accusition", "accusation", - "accustumed", "accustomed", - "acheivable", "achievable", - "acheivment", "achievement", - "acheviable", "achievable", - "achiavable", "achievable", - "achieveble", "achievable", - "achievemnt", "achievement", - "achievemts", "achieves", - "achievents", "achieves", - "achievment", "achievement", - "achilleous", "achilles", - "achiveable", "achievable", - "achivement", "achievement", - "acitvating", "activating", - "acitvision", "activision", - "acknowldge", "acknowledge", - "acknowlede", "acknowledge", - "acknowlege", "acknowledge", - "acommodate", "accommodate", - "acopalypse", "apocalypse", - "acordingly", "accordingly", - "acqauinted", "acquainted", - "acquanited", "acquainted", - "acquianted", "acquainted", - "acquinated", "acquainted", - "acquisiton", "acquisition", - "acticating", "activating", - "actication", "activation", - "activacion", "activation", - "activaters", "activates", - "activiates", "activist", - "activiites", "activist", - "activisiom", "activism", - "activisits", "activist", - "activistas", "activists", - "activistes", "activists", - "activiting", "activating", - "activizion", "activision", - "acustommed", "accustomed", - "adaptacion", "adaptation", - "adaptating", "adaptation", - "adaquetely", "adequately", - "addicitons", "addictions", - "addionally", "additionally", - "additivies", "additive", - "additivley", "additive", - "addittions", "addictions", - "addmission", "admission", - "addresable", "addressable", - "addressess", "addresses", - "adequatley", "adequately", - "adequetely", "adequately", - "adequitely", "adequately", - "adernaline", "adrenaline", - "adjectivos", "adjectives", - "adjustible", "adjustable", - "admendment", "amendment", - "administed", "administered", - "administor", "administer", - "administre", "administer", - "administro", "administer", - "adminsiter", "administer", - "admissable", "admissible", - "admittadly", "admittedly", - "admittetly", "admittedly", - "admittidly", "admittedly", - "adolencent", "adolescent", - "adolescant", "adolescent", - "adolescene", "adolescence", - "adoloscent", "adolescent", - "adolsecent", "adolescent", - "adpatation", "adaptation", - "adreanline", "adrenaline", - "adrelanine", "adrenaline", - "adreneline", "adrenaline", - "adreniline", "adrenaline", - "adressable", "addressable", - "advanteges", "advantages", - "advatanges", "advantages", - "adventrous", "adventurous", - "adventrues", "adventures", - "adventuers", "adventures", - "adventuous", "adventurous", - "adventuros", "adventurous", - "adventurus", "adventurous", - "adverticed", "advertised", - "aestethics", "aesthetics", - "aesthatics", "aesthetics", - "aesthestic", "aesthetics", - "affiliaton", "affiliation", - "affilliate", "affiliate", - "affirmitve", "affirmative", - "afflcition", "affliction", - "afflection", "affliction", - "affliation", "affliction", - "affliciton", "affliction", - "afforadble", "affordable", - "affordible", "affordable", - "affortable", "affordable", - "africaners", "africans", - "africaness", "africans", - "aftermaket", "aftermarket", - "afternooon", "afternoon", - "aggravanti", "aggravating", - "aggraveted", "aggravated", - "aggreement", "agreement", - "aggregious", "egregious", - "aggresions", "aggression", - "aggressivo", "aggression", - "aggrovated", "aggravated", - "agnosticim", "agnosticism", - "agnosticsm", "agnosticism", - "agnostisch", "agnostic", - "agnostiscm", "agnosticism", - "agnostisim", "agnosticism", - "agreeement", "agreement", - "agricultre", "agriculture", - "agricultue", "agriculture", - "agriculure", "agriculture", - "agricuture", "agriculture", - "ailenating", "alienating", - "ajdectives", "adjectives", - "alchoholic", "alcoholic", - "alchoolism", "alcoholism", - "alcohalics", "alcoholics", - "alcohalism", "alcoholism", - "alcoholsim", "alcoholism", - "aleinating", "alienating", - "algorhitms", "algorithms", - "algorithem", "algorithm", - "algorithim", "algorithm", - "algorithsm", "algorithms", - "algorithum", "algorithm", - "algorithym", "algorithm", - "algoritmes", "algorithms", - "algoritmos", "algorithms", - "algorthims", "algorithms", - "algortihms", "algorithms", - "algorythms", "algorithms", - "alievating", "alienating", - "alledgedly", "allegedly", - "allegeance", "allegiance", - "allegedely", "allegedly", - "allegedley", "allegedly", - "allegience", "allegiance", - "alleigance", "allegiance", - "allergisch", "allergic", - "alliegance", "allegiance", - "alligeance", "allegiance", - "alocholics", "alcoholics", - "alocholism", "alcoholism", - "alogrithms", "algorithms", - "alphabeast", "alphabet", - "alteracion", "alteration", - "alterarion", "alteration", - "alterating", "alteration", - "alternador", "alternator", - "alternater", "alternator", - "alternatie", "alternatives", - "alternatly", "alternately", - "alternatve", "alternate", - "alternetly", "alternately", - "altogehter", "altogether", - "altogheter", "altogether", - "altriustic", "altruistic", - "altruisitc", "altruistic", - "altrusitic", "altruistic", - "alturistic", "altruistic", - "aluminimum", "aluminum", - "amargeddon", "armageddon", - "amateurest", "amateurs", - "ambassabor", "ambassador", - "ambassader", "ambassador", - "ambassator", "ambassador", - "ambassedor", "ambassador", - "ambassidor", "ambassador", - "ambassodor", "ambassador", - "ambiguitiy", "ambiguity", - "amendmants", "amendments", - "amendmends", "amendments", - "americains", "americas", - "americanas", "americans", - "americanis", "americas", - "americanss", "americas", - "americants", "americas", - "americanus", "americans", - "americares", "americas", - "ammendment", "amendment", - "amrageddon", "armageddon", - "analitical", "analytical", - "analitycal", "analytical", - "analogeous", "analogous", - "analyitcal", "analytical", - "analyseles", "analyses", - "analyseras", "analyses", - "analyseres", "analyses", - "analysised", "analyses", - "analysises", "analyses", - "analysisto", "analysts", - "analystics", "analysts", - "anarchisim", "anarchism", - "anarchistm", "anarchism", - "anarchiszm", "anarchism", - "anarchsits", "anarchists", - "anayltical", "analytical", - "ancilliary", "ancillary", - "androiders", "androids", - "androidtvs", "androids", - "anecdotale", "anecdote", - "anecdotice", "anecdote", - "anestheisa", "anesthesia", - "anesthetia", "anesthesia", - "anesthisia", "anesthesia", - "anitbiotic", "antibiotic", - "anitquated", "antiquated", - "anitsocial", "antisocial", - "aniversary", "anniversary", - "annilihate", "annihilated", - "anniverary", "anniversary", - "anniversay", "anniversary", - "anniversry", "anniversary", - "annointing", "anointing", - "annonceurs", "announcers", - "annoucners", "announcers", - "annoucning", "announcing", - "announched", "announce", - "annyoingly", "annoyingly", - "anonymosly", "anonymously", - "anonymousy", "anonymously", - "antaganist", "antagonist", - "antagnoist", "antagonist", - "antarcitca", "antarctica", - "antarctida", "antarctica", - "anthropoly", "anthropology", - "antibiodic", "antibiotic", - "antibiotcs", "antibiotics", - "antibitoic", "antibiotic", - "antiboitic", "antibiotics", - "anticapate", "anticipate", - "anticiapte", "anticipate", - "anticipare", "anticipate", - "anticipato", "anticipation", - "anticuated", "antiquated", - "antiquited", "antiquated", - "antiqvated", "antiquated", - "antisipate", "anticipate", - "antisocail", "antisocial", - "antisosial", "antisocial", - "antoganist", "antagonist", - "antractica", "antarctica", - "apacolypse", "apocalypse", - "apartheied", "apartheid", - "aplication", "application", - "apocalipse", "apocalypse", - "apocalpyse", "apocalypse", - "apocalypes", "apocalypse", - "apocalypic", "apocalyptic", - "apocalyspe", "apocalypse", - "apocalytic", "apocalyptic", - "apocaplyse", "apocalypse", - "apocolapse", "apocalypse", - "apolagetic", "apologetic", - "apolagized", "apologized", - "apolegetic", "apologetic", - "apoligetic", "apologetic", - "apoligists", "apologists", - "apoligized", "apologized", - "apologisms", "apologists", - "apologiste", "apologise", - "apologitic", "apologetic", - "apostraphe", "apostrophe", - "apostrephe", "apostrophe", - "apostrohpe", "apostrophe", - "apostropes", "apostrophe", - "apparantly", "apparently", - "appareance", "appearance", - "apparenlty", "apparently", - "appartment", "apartment", - "appealling", "appealing", - "appearence", "appearance", - "appearnace", "appearances", - "apperances", "appearances", - "apperantly", "apparently", - "apperciate", "appreciate", - "appereance", "appearance", - "appetities", "appetite", - "appetitite", "appetite", - "appication", "application", - "applainces", "appliances", - "applicaple", "applicable", - "applicates", "applicants", - "applicaton", "application", - "applicible", "applicable", - "appliences", "appliances", - "appointmet", "appointments", - "appologies", "apologies", - "apporached", "approached", - "apporaches", "approaches", - "appraoched", "approached", - "appraoches", "approaches", - "apprecaite", "appreciate", - "appreciato", "appreciation", - "appreciste", "appreciates", - "apprecitae", "appreciates", - "apprecited", "appreciated", - "apprectice", "apprentice", - "appreicate", "appreciate", - "apprendice", "apprentice", - "apprentace", "apprentice", - "apprentise", "apprentice", - "appretiate", "appreciate", - "appretince", "apprentice", - "appriceate", "appreciates", - "appriciate", "appreciate", - "appriecate", "appreciates", - "approacing", "approaching", - "appropiate", "appropriate", - "approprate", "appropriate", - "apropriate", "appropriate", - "aproximate", "approximate", - "apsotrophe", "apostrophe", - "aptitudine", "aptitude", - "aqcuainted", "acquainted", - "aquisition", "acquisition", - "aramgeddon", "armageddon", - "arangement", "arrangement", - "arbitarily", "arbitrarily", - "arbitraily", "arbitrarily", - "arbitraion", "arbitration", - "arbitrairy", "arbitrarily", - "arbitrarly", "arbitrary", - "arbitraton", "arbitration", - "arcehtypes", "archetypes", - "archaelogy", "archaeology", - "archaeolgy", "archaeology", - "archaology", "archeology", - "archatypes", "archetypes", - "archetects", "architects", - "archetipes", "archetypes", - "archetpyes", "archetypes", - "archetypus", "archetypes", - "archeytpes", "archetypes", - "archictect", "architect", - "architechs", "architects", - "architecht", "architect", - "architecte", "architecture", - "architexts", "architects", - "architypes", "archetypes", - "archtiects", "architects", - "archytypes", "archetypes", - "argentinia", "argentina", - "arguements", "arguments", - "argumentas", "arguments", - "argumentos", "arguments", - "arithemtic", "arithmetic", - "arithmitic", "arithmetic", - "aritmethic", "arithmetic", - "armagaddon", "armageddon", - "armageddan", "armageddon", - "armagedden", "armageddon", - "armageddin", "armageddon", - "armagedeon", "armageddon", - "armageedon", "armageddon", - "armagideon", "armageddon", - "armegaddon", "armageddon", - "arrangerad", "arranged", - "arrangment", "arrangement", - "arthimetic", "arithmetic", - "articifial", "artificial", - "articluate", "articulate", - "articualte", "articulate", - "articulted", "articulated", - "artifactos", "artifacts", - "artificiel", "artificial", - "artihmetic", "arithmetic", - "artillerly", "artillery", - "asbestoast", "asbestos", - "asethetics", "aesthetics", - "asisstants", "assistants", - "aspiratons", "aspirations", - "assasinate", "assassinate", - "assassians", "assassin", - "assassinas", "assassins", - "assassines", "assassins", - "assassinos", "assassins", - "assemblare", "assemble", - "assempling", "assembling", - "assersions", "assertions", - "assesement", "assessment", - "assestment", "assessment", - "assigments", "assignments", - "assignemnt", "assignment", - "assimalate", "assimilate", - "assimilant", "assimilate", - "assimilare", "assimilate", - "assimliate", "assimilate", - "assimulate", "assimilate", - "assingment", "assignment", - "assistanat", "assistants", - "assistanse", "assistants", - "assistante", "assistance", - "assistence", "assistance", - "assistendo", "assisted", - "assistents", "assistants", - "assmebling", "assembling", - "assocaited", "associated", - "assocaites", "associates", - "assocation", "association", - "associatie", "associated", - "associatin", "associations", - "associaton", "association", - "associsted", "associates", - "assoicated", "associated", - "assoicates", "associates", - "assosiated", "associated", - "assosiates", "associates", - "asssassans", "assassins", - "assupmtion", "assumptions", - "assymetric", "asymmetric", - "asteroides", "asteroids", - "asthetical", "aesthetical", - "astonising", "astonishing", - "astornauts", "astronauts", - "astranauts", "astronauts", - "astronatus", "astronauts", - "astronaunt", "astronaut", - "astronomia", "astronomical", - "astronouts", "astronauts", - "astronuats", "astronauts", - "asutralian", "australian", - "atatchment", "attachment", - "athleticos", "athletics", - "athleticsm", "athleticism", - "athletiscm", "athleticism", - "athletisim", "athleticism", - "atmopshere", "atmosphere", - "atmoshpere", "atmosphere", - "atomsphere", "atmosphere", - "atriculate", "articulate", - "atrocoties", "atrocities", - "atrosities", "atrocities", - "attachemnt", "attachment", - "attackeras", "attackers", - "attactment", "attachment", - "attemtping", "attempting", - "attendence", "attendance", - "attendents", "attendants", - "attirbutes", "attributes", - "attmepting", "attempting", - "attracters", "attracts", - "attractice", "attractive", - "attracties", "attracts", - "attractifs", "attracts", - "attraktion", "attraction", - "attraktive", "attractive", - "attribuito", "attribution", - "attritubes", "attributes", - "auctioners", "auctions", - "audioboook", "audiobook", - "audioboost", "audiobooks", - "auidobooks", "audiobooks", - "auotattack", "autoattack", - "austrailan", "australian", - "austrailia", "australia", - "australain", "australians", - "australien", "australian", - "australina", "australians", - "austrlaian", "australians", - "authenticy", "authenticity", - "autherized", "authorized", - "authoritay", "authority", - "authorites", "authorities", - "authorithy", "authority", - "authroized", "authorized", - "autistisch", "autistic", - "autoattaks", "autoattack", - "autocorect", "autocorrect", - "autocorrct", "autocorrect", - "autocorret", "autocorrect", - "autograpgh", "autograph", - "automatice", "automate", - "automatico", "automation", - "automatied", "automate", - "automatiek", "automate", - "automatron", "automation", - "automatted", "automate", - "automibile", "automobile", - "automitive", "automotive", - "automoblie", "automobile", - "automomous", "autonomous", - "automonous", "autonomous", - "automotice", "automotive", - "automotion", "automation", - "automotize", "automotive", - "automotove", "automotive", - "autonamous", "autonomous", - "autonation", "automation", - "autonimous", "autonomous", - "autonomity", "autonomy", - "autononous", "autonomous", - "auttoatack", "autoattack", - "auxilliary", "auxiliary", - "availabale", "available", - "availaible", "available", - "availiable", "available", - "averageadi", "averaged", - "averageifs", "averages", - "awesomeley", "awesomely", - "awesomelly", "awesomely", - "awesomenss", "awesomeness", - "awkwardess", "awkwardness", - "babysister", "babysitter", - "babysiting", "babysitting", - "babysittng", "babysitting", - "bachelores", "bachelors", - "backgorund", "background", - "backgroudn", "backgrounds", - "backgrouds", "backgrounds", - "backgrouns", "backgrounds", - "backgruond", "backgrounds", - "backpacing", "backpacking", - "backpackng", "backpacking", - "backrgound", "backgrounds", - "backrounds", "backgrounds", - "baksetball", "basketball", - "balanceada", "balanced", - "balanceado", "balanced", - "balckberry", "blackberry", - "balckhawks", "blackhawks", - "balcksmith", "blacksmith", - "bandwagoon", "bandwagon", - "bangaldesh", "bangladesh", - "bangladash", "bangladesh", - "bangledash", "bangladesh", - "bangledesh", "bangladesh", - "banglidesh", "bangladesh", - "bankrupcty", "bankruptcy", - "bankruptsy", "bankruptcy", - "bankrutpcy", "bankruptcy", - "barabrians", "barbarians", - "barbariens", "barbarians", - "barbarions", "barbarians", - "barbarisch", "barbaric", - "barberians", "barbarians", - "bargianing", "bargaining", - "bartendars", "bartenders", - "basektball", "basketball", - "baskteball", "basketball", - "bastardous", "bastards", - "battelship", "battleship", - "battelstar", "battlestar", - "battlearts", "battlestar", - "battlechip", "battleship", - "battlefied", "battlefield", - "battlefont", "battlefront", - "battlehips", "battleship", - "battlesaur", "battlestar", - "battlescar", "battlestar", - "battleshop", "battleship", - "battlestsr", "battlestar", - "beahviours", "behaviours", - "beautifuly", "beautifully", - "beautilful", "beautifully", - "beautyfull", "beautiful", - "becnhmarks", "benchmarks", - "beethoveen", "beethoven", - "begginings", "beginnings", - "begininngs", "beginnings", - "beginninng", "beginnings", - "behaivours", "behaviours", - "behaviorly", "behavioral", - "behavoiral", "behavioral", - "behavoiurs", "behaviours", - "behavorial", "behavioral", - "behavoural", "behavioral", - "behvaiours", "behaviours", - "beleagured", "beleaguered", - "beleivable", "believable", - "beliavable", "believable", - "beliebable", "believable", - "believeble", "believable", - "beliveable", "believable", - "benchamrks", "benchmarks", - "benchmakrs", "benchmarks", - "benckmarks", "benchmarks", - "benefecial", "beneficial", - "beneficary", "beneficiary", - "beneficiul", "beneficial", - "benefitial", "beneficial", - "beneifical", "beneficial", - "benelovent", "benevolent", - "benevalent", "benevolent", - "benevelant", "benevolent", - "benevelent", "benevolent", - "benevelont", "benevolent", - "benevloent", "benevolent", - "benevolant", "benevolent", - "benificial", "beneficial", - "benovelent", "benevolent", - "bernouilli", "bernoulli", - "besitality", "bestiality", - "bestaility", "bestiality", - "besteality", "bestiality", - "betrayeado", "betrayed", - "bilateraly", "bilaterally", - "billborads", "billboards", - "bioligical", "biological", - "biologiset", "biologist", - "biologiskt", "biologist", - "birghtness", "brightness", - "birmignham", "birmingham", - "birmimgham", "birmingham", - "bisexuella", "bisexual", - "bitterseet", "bittersweet", - "bitterswet", "bittersweet", - "blackahwks", "blackhawks", - "blackbarry", "blackberry", - "blackbeary", "blackberry", - "blackbeery", "blackberry", - "blackcawks", "blackhawks", - "blackhakws", "blackhawks", - "blackhwaks", "blackhawks", - "blackmsith", "blacksmith", - "blackshits", "blacksmith", - "blasphemey", "blasphemy", - "blitzkreig", "blitzkrieg", - "blochchain", "blockchain", - "blockcahin", "blockchain", - "blockchian", "blockchain", - "bloodboner", "bloodborne", - "bloodbonre", "bloodborne", - "bloodborbe", "bloodborne", - "bloodbrone", "bloodborne", - "bloodporne", "bloodborne", - "bloorborne", "bloodborne", - "blueberies", "blueberries", - "blueberris", "blueberries", - "blueberrry", "blueberry", - "bluebrints", "blueprints", - "boardcasts", "broadcasts", - "bodyheight", "bodyweight", - "bodyweigth", "bodyweight", - "bodywieght", "bodyweight", - "bombarment", "bombardment", - "bookmakred", "bookmarked", - "bootlaoder", "bootloader", - "bootleader", "bootloader", - "boradcasts", "broadcasts", - "borderlads", "borderlands", - "borderlans", "borderlands", - "bottelneck", "bottleneck", - "bottlebeck", "bottleneck", - "boundaires", "boundaries", - "bounderies", "boundaries", - "bourgeoius", "bourgeois", - "boycutting", "boycotting", - "boyfirends", "boyfriends", - "boyfreinds", "boyfriends", - "boyfrients", "boyfriends", - "braceletes", "bracelets", - "braceletts", "bracelets", - "brainwased", "brainwashed", - "brakedowns", "breakdowns", - "braodcasts", "broadcasts", - "brasillian", "brazilian", - "bratenders", "bartenders", - "brazilains", "brazilians", - "brazileans", "brazilians", - "braziliaan", "brazilians", - "brazilions", "brazilians", - "brazillans", "brazilians", - "brightoner", "brighten", - "brigthness", "brightness", - "brillaince", "brilliance", - "brilliante", "brilliance", - "brillianty", "brilliantly", - "brimestone", "brimstone", - "brimingham", "birmingham", - "broacasted", "broadcast", - "brotherhod", "brotherhood", - "brotherood", "brotherhood", - "brusselers", "brussels", - "brutallity", "brutally", - "buisnesses", "businesses", - "bulgariska", "bulgaria", - "bulletpoof", "bulletproof", - "bulletprof", "bulletproof", - "bureaucats", "bureaucrats", - "businesman", "businessman", - "businesmen", "businessmen", - "businessen", "businessmen", - "butterfies", "butterflies", - "cabinettas", "cabinets", - "caclulated", "calculated", - "caclulator", "calculator", - "cahracters", "characters", - "calcluator", "calculators", - "calcualted", "calculated", - "calcualtor", "calculator", - "calculador", "calculator", - "calcularon", "calculator", - "calculater", "calculator", - "calculatin", "calculations", - "calibratin", "calibration", - "calibraton", "calibration", - "califnoria", "californian", - "califonria", "californian", - "califorian", "californian", - "califorina", "california", - "californai", "californian", - "califronia", "california", - "caligraphy", "calligraphy", - "caliofrnia", "californian", - "calrifying", "clarifying", - "calssified", "classified", - "caluclated", "calculated", - "caluclator", "calculator", - "caluculate", "calculate", - "cambodican", "cambodia", - "camofluage", "camouflage", - "camoufalge", "camouflage", - "camouglage", "camouflage", - "campaiging", "campaigning", - "campaignes", "campaigns", - "cancellato", "cancellation", - "candidatas", "candidates", - "candidatxs", "candidates", - "candidiate", "candidate", - "canditates", "candidates", - "cannibalsm", "cannibalism", - "cannisters", "canisters", - "cannonical", "canonical", - "capabality", "capability", - "capabiltiy", "capability", - "capacators", "capacitors", - "capaciters", "capacitors", - "capactiors", "capacitors", - "capasitors", "capacitors", - "capatilism", "capitalism", - "capatilist", "capitalist", - "capatilize", "capitalize", - "capialized", "capitalized", - "capicators", "capacitors", - "capitalisn", "capitals", - "capitalits", "capitalists", - "capitalsim", "capitalism", - "capitalsit", "capitalists", - "capitarist", "capitalist", - "capitilism", "capitalism", - "capitilist", "capitalist", - "capitilize", "capitalize", - "capitlaism", "capitalism", - "capitlaist", "capitalist", - "capitlaize", "capitalized", - "capitolism", "capitalism", - "capitolist", "capitalist", - "capitolize", "capitalize", - "captainers", "captains", - "captialism", "capitalism", - "captialist", "capitalist", - "captialize", "capitalize", - "captivitiy", "captivity", - "caraciture", "caricature", - "carciature", "caricature", - "cardinales", "cardinals", - "cardinalis", "cardinals", - "carefullly", "carefully", - "cariacture", "caricature", - "caricatore", "caricature", - "cariciture", "caricature", - "caricuture", "caricature", - "carismatic", "charismatic", - "carribbean", "caribbean", - "cartdridge", "cartridge", - "cartdriges", "cartridges", - "carthagian", "carthaginian", - "cartilidge", "cartilage", - "cartirdges", "cartridges", - "cartrdiges", "cartridges", - "cartriages", "cartridges", - "cartrigdes", "cartridges", - "casaulties", "casualties", - "cassowarry", "cassowary", - "casualites", "casualties", - "casualries", "casualties", - "casulaties", "casualties", - "cataclysim", "cataclysm", - "cataclysym", "cataclysm", - "catagories", "categories", - "catapillar", "caterpillar", - "catapiller", "caterpillar", - "catastrope", "catastrophe", - "catastrphe", "catastrophe", - "categorice", "categorize", - "categoried", "categorized", - "categoriei", "categorize", - "cateogrize", "categorized", - "catepillar", "caterpillar", - "caterpilar", "caterpillar", - "catholicsm", "catholicism", - "catholicus", "catholics", - "catholisim", "catholicism", - "cativating", "activating", - "cattleship", "battleship", - "causalties", "casualties", - "cautionsly", "cautiously", - "celebratin", "celebration", - "celebrites", "celebrities", - "celebritiy", "celebrity", - "cellpading", "cellpadding", - "cellulaire", "cellular", - "cemetaries", "cemeteries", - "censorhsip", "censorship", - "censurship", "censorship", - "centipedle", "centipede", - "ceremonias", "ceremonies", - "ceremoniis", "ceremonies", - "ceremonije", "ceremonies", - "cerimonial", "ceremonial", - "cerimonies", "ceremonies", - "certainity", "certainty", - "certainlyt", "certainty", - "chairtable", "charitable", - "chalenging", "challenging", - "challanged", "challenged", - "challanges", "challenges", - "challegner", "challenger", - "challender", "challenger", - "challengue", "challenger", - "challengur", "challenger", - "challening", "challenging", - "challneger", "challenger", - "chanceller", "chancellor", - "chancillor", "chancellor", - "chansellor", "chancellor", - "charachter", "character", - "charactere", "characterize", - "characterz", "characterize", - "charactors", "characters", - "charakters", "characters", - "charatable", "charitable", - "charecters", "characters", - "charistics", "characteristics", - "charitible", "charitable", - "chartiable", "charitable", - "chechpoint", "checkpoint", - "checkpiont", "checkpoint", - "checkpoins", "checkpoints", - "checkponts", "checkpoints", - "cheesecase", "cheesecake", - "cheesecave", "cheesecake", - "cheeseface", "cheesecake", - "cheezecake", "cheesecake", - "chemcially", "chemically", - "chidlbirth", "childbirth", - "chihuahuha", "chihuahua", - "childbrith", "childbirth", - "childrends", "childrens", - "childrenis", "childrens", - "childrents", "childrens", - "chirstians", "christians", - "chocalates", "chocolates", - "chocloates", "chocolates", - "chocoaltes", "chocolates", - "chocolatie", "chocolates", - "chocolatos", "chocolates", - "chocolatte", "chocolates", - "chocolotes", "chocolates", - "cholestrol", "cholesterol", - "chormosome", "chromosome", - "chornicles", "chronicles", - "chrisitans", "christians", - "christains", "christians", - "christiaan", "christian", - "christimas", "christians", - "christinas", "christians", - "christines", "christians", - "christmans", "christians", - "chromasome", "chromosome", - "chromesome", "chromosome", - "chromisome", "chromosome", - "chromosmes", "chromosomes", - "chromosoms", "chromosomes", - "chromosone", "chromosome", - "chromosoom", "chromosome", - "chromozome", "chromosome", - "chronciles", "chronicles", - "chronicals", "chronicles", - "chronicels", "chronicles", - "chronocles", "chronicles", - "chronosome", "chromosome", - "chrsitians", "christians", - "cigarattes", "cigarettes", - "cigerattes", "cigarettes", - "cincinatti", "cincinnati", - "cinncinati", "cincinnati", - "circulaire", "circular", - "circulaton", "circulation", - "circumsice", "circumcised", - "circumsied", "circumcised", - "circumwent", "circumvent", - "circunvent", "circumvent", - "cirruculum", "curriculum", - "claculator", "calculator", - "clairfying", "clarifying", - "clasically", "classically", - "classicals", "classics", - "classrooom", "classroom", - "cleanliess", "cleanliness", - "cleareance", "clearance", - "cleverleys", "cleverly", - "cliffhager", "cliffhanger", - "climateers", "climates", - "climatiser", "climates", - "clincially", "clinically", - "clitoridis", "clitoris", - "clitorious", "clitoris", - "co-incided", "coincided", - "cockroachs", "cockroaches", - "cockroahes", "cockroaches", - "coefficent", "coefficient", - "cognatious", "contagious", - "cognitivie", "cognitive", - "coincidnce", "coincide", - "colelctive", "collective", - "colelctors", "collectors", - "collapsers", "collapses", - "collaquial", "colloquial", - "collasping", "collapsing", - "collataral", "collateral", - "collaterol", "collateral", - "collatoral", "collateral", - "collcetion", "collections", - "colleauges", "colleagues", - "colleciton", "collection", - "collectems", "collects", - "collectief", "collective", - "collecties", "collects", - "collectifs", "collects", - "collectivo", "collection", - "collectoin", "collections", - "collectons", "collections", - "collectros", "collects", - "collegaues", "colleagues", - "collequial", "colloquial", - "colleteral", "collateral", - "colliquial", "colloquial", - "collission", "collisions", - "collitions", "collisions", - "colloqiual", "colloquial", - "colloquail", "colloquial", - "colloqueal", "colloquial", - "collpasing", "collapsing", - "colonialsm", "colonialism", - "colorblend", "colorblind", - "coloublind", "colorblind", - "columbidae", "columbia", - "comapnions", "companions", - "comaprable", "comparable", - "comaprison", "comparison", - "comaptible", "compatible", - "combatabts", "combatants", - "combatents", "combatants", - "combinatin", "combinations", - "combinaton", "combination", - "comediants", "comedians", - "comepndium", "compendium", - "comferting", "comforting", - "comforming", "comforting", - "comfortbly", "comfortably", - "comisioned", "commissioned", - "comisioner", "commissioner", - "comissions", "commissions", - "commandbox", "commando", - "commandent", "commandment", - "commandeur", "commanders", - "commandore", "commanders", - "commandpod", "commando", - "commanists", "communists", - "commemters", "commenters", - "commencera", "commerce", - "commenciez", "commence", - "commentaar", "commentary", - "commentare", "commenter", - "commentars", "commenters", - "commentart", "commentator", - "commentery", "commentary", - "commentsry", "commenters", - "commercail", "commercials", - "commercent", "commence", - "commerical", "commercial", - "comminists", "communists", - "commisison", "commissions", - "commissons", "commissions", - "commiteted", "commited", - "commodites", "commodities", - "commtiment", "commitments", - "communicae", "communicated", - "communisim", "communism", - "communiste", "communities", - "communites", "communities", - "communters", "commenters", - "compadible", "compatible", - "compagnons", "companions", - "compainons", "companions", - "compairson", "comparison", - "compalined", "complained", - "compandium", "compendium", - "companians", "companions", - "companines", "companions", - "compansate", "compensate", - "comparabil", "comparable", - "comparason", "comparison", - "comparaste", "compares", - "comparatie", "comparative", - "compareble", "comparable", - "comparemos", "compares", - "comparions", "comparison", - "compariosn", "comparisons", - "comparisen", "compares", - "comparitve", "comparative", - "comparsion", "comparison", - "compartent", "compartment", - "compartmet", "compartment", - "compatibel", "compatible", - "compatibil", "compatible", - "compeating", "completing", - "compeditor", "competitor", - "compednium", "compendium", - "compeeting", "completing", - "compeltely", "completely", - "compelting", "completing", - "compeltion", "completion", - "compemdium", "compendium", - "compenduim", "compendium", - "compenents", "components", - "compenidum", "compendium", - "compensare", "compensate", - "comperable", "comparable", - "comperhend", "comprehend", - "compession", "compassion", - "competance", "competence", - "competator", "competitor", - "competenet", "competence", - "competense", "competence", - "competenze", "competence", - "competeted", "competed", - "competetor", "competitor", - "competidor", "competitor", - "competiors", "competitors", - "competitie", "competitive", - "competitin", "competitions", - "competitio", "competitor", - "competiton", "competition", - "competitve", "competitive", - "compilance", "compliance", - "compilaton", "compilation", - "compinsate", "compensate", - "compitable", "compatible", - "compitance", "compliance", - "complacant", "complacent", - "complaince", "compliance", - "complaines", "complaints", - "complainig", "complaining", - "complainte", "complained", - "complation", "completion", - "compleatly", "completely", - "complecate", "complicate", - "completeds", "completes", - "completent", "complement", - "completily", "complexity", - "completito", "completion", - "completley", "completely", - "complexers", "complexes", - "complexety", "complexity", - "complianed", "compliance", - "compliants", "complaints", - "complicaed", "complicate", - "complicare", "complicate", - "complicati", "complicit", - "complicato", "complication", - "complicite", "complicate", - "complicted", "complicated", - "complience", "compliance", - "complimate", "complicate", - "complition", "completion", - "complusion", "compulsion", - "complusive", "compulsive", - "complusory", "compulsory", - "compolsive", "compulsive", - "compolsory", "compulsory", - "compolsury", "compulsory", - "componants", "components", - "componenet", "components", - "componsate", "compensate", - "comporable", "comparable", - "compositae", "composite", - "compositie", "composite", - "compositon", "composition", - "compraison", "comparisons", - "compramise", "compromise", - "comprassem", "compress", - "comprehand", "comprehend", - "compresion", "compression", - "compresors", "compressor", - "compresser", "compressor", - "compressio", "compressor", - "compresson", "compression", - "comprihend", "comprehend", - "comprimise", "compromise", - "compromiss", "compromises", - "compromize", "compromise", - "compromsie", "compromises", - "comprossor", "compressor", - "compteting", "completing", - "comptetion", "completion", - "compulisve", "compulsive", - "compulosry", "compulsory", - "compulsary", "compulsory", - "compulsery", "compulsory", - "compulsing", "compulsion", - "compulsivo", "compulsion", - "compulsury", "compulsory", - "compuslion", "compulsion", - "compuslive", "compulsive", - "compuslory", "compulsory", - "compustion", "compulsion", - "computanti", "computation", - "conatiners", "containers", - "concedendo", "conceded", - "concedered", "conceded", - "conceitual", "conceptual", - "concentate", "concentrate", - "concenting", "connecting", - "conceptial", "conceptual", - "conceptuel", "conceptual", - "concersion", "concession", - "concesions", "concession", - "concidered", "considered", - "conciously", "consciously", - "concission", "concession", - "conclsuion", "concussion", - "conclusies", "conclusive", - "conclution", "conclusion", - "concorrent", "concurrent", - "concsience", "conscience", - "conculsion", "conclusion", - "conculsive", "conclusive", - "concurment", "concurrent", - "concurrant", "concurrent", - "concurrect", "concurrent", - "concusions", "concussion", - "concusison", "concussions", - "condamning", "condemning", - "condemming", "condemning", - "condencing", "condemning", - "condenming", "condemning", - "condensend", "condensed", - "condidtion", "condition", - "conditinal", "conditional", - "conditiner", "conditioner", - "conditiond", "conditioned", - "conditiong", "conditioning", - "condmening", "condemning", - "conduiting", "conducting", - "conencting", "connecting", - "conenction", "connection", - "conenctors", "connectors", - "conesencus", "consensus", - "confedarcy", "confederacy", - "confedence", "conference", - "confedercy", "confederacy", - "conferance", "conference", - "conferenze", "conference", - "conferming", "confirming", - "confernece", "conferences", - "confessino", "confessions", - "confidance", "confidence", - "confidenly", "confidently", - "confidense", "confidence", - "confidenty", "confidently", - "conflcting", "conflating", - "conflicing", "conflicting", - "conflictos", "conflicts", - "confliting", "conflating", - "confriming", "confirming", - "confussion", "confession", - "congratule", "congratulate", - "congresman", "congressman", - "congresmen", "congressmen", - "congressen", "congressmen", - "conjecutre", "conjecture", - "conjuction", "conjunction", - "conjuncion", "conjunction", - "conlcusion", "conclusion", - "conncetion", "connections", - "conneciton", "connection", - "connecties", "connects", - "connectins", "connects", - "connectivy", "connectivity", - "connectpro", "connector", - "conneticut", "connecticut", - "connotaion", "connotation", - "conpsiracy", "conspiracy", - "conqeuring", "conquering", - "conqouring", "conquering", - "conquerers", "conquerors", - "conquoring", "conquering", - "consciense", "conscience", - "consciouly", "consciously", - "consdiered", "considered", - "consending", "consenting", - "consensuel", "consensual", - "consenusal", "consensual", - "consequece", "consequence", - "consequnce", "consequence", - "conservare", "conserve", - "conservato", "conservation", - "conservice", "conserve", - "conservies", "conserve", - "conservite", "conserve", - "consicence", "conscience", - "consideras", "considers", - "consideret", "considerate", - "consipracy", "conspiracy", - "consistant", "consistent", - "consistens", "consists", - "consisteny", "consistency", - "consitency", "consistency", - "consituted", "constituted", - "conslutant", "consultant", - "consluting", "consulting", - "consolidad", "consolidated", - "consonents", "consonants", - "consorcium", "consortium", - "conspirace", "conspiracies", - "conspiricy", "conspiracy", - "conspriacy", "conspiracy", - "constaints", "constraints", - "constatnly", "constantly", - "constently", "constantly", - "constitude", "constitute", - "constitued", "constitute", - "constituem", "constitute", - "constituer", "constitute", - "constitues", "constitutes", - "constituie", "constitute", - "constituit", "constitute", - "constitutn", "constituents", - "constituye", "constitute", - "constnatly", "constantly", - "constracts", "constructs", - "constraits", "constraints", - "constransi", "constraints", - "constrants", "constraints", - "construced", "constructed", - "constructo", "construction", - "construint", "constraint", - "construits", "constructs", - "construted", "constructed", - "consueling", "consulting", - "consultata", "consultant", - "consultate", "consultant", - "consultati", "consultant", - "consultato", "consultation", - "consultent", "consultant", - "consumated", "consummated", - "consumbale", "consumables", - "consuments", "consumes", - "consumirem", "consumerism", - "consumires", "consumerism", - "consumirse", "consumerism", - "consumiste", "consumes", - "consumpion", "consumption", - "contaction", "contacting", - "contageous", "contagious", - "contagiosa", "contagious", - "contagioso", "contagious", - "contaigous", "contagious", - "containors", "containers", - "contaminen", "containment", - "contanting", "contacting", - "contection", "contention", - "contectual", "contextual", - "conteiners", "contenders", - "contempate", "contemplate", - "contemplat", "contempt", - "contempory", "contemporary", - "contenants", "continents", - "contencion", "contention", - "contendors", "contenders", - "contenents", "continents", - "conteneurs", "contenders", - "contengent", "contingent", - "contension", "contention", - "contentino", "contention", - "contentios", "contentious", - "contentous", "contentious", - "contestais", "contests", - "contestans", "contests", - "contestase", "contests", - "contestion", "contention", - "contestors", "contests", - "contextful", "contextual", - "contextuel", "contextual", - "contextura", "contextual", - "contianers", "containers", - "contianing", "containing", - "contibuted", "contributed", - "contibutes", "contributes", - "contigents", "continents", - "contigious", "contagious", - "contignent", "contingent", - "continants", "continents", - "continenal", "continental", - "continenet", "continents", - "contineous", "continuous", - "continetal", "continental", - "contingecy", "contingency", - "contingeny", "contingency", - "continient", "contingent", - "continious", "continuous", - "continiuty", "continuity", - "contintent", "contingent", - "continualy", "continually", - "continuare", "continue", - "continuati", "continuity", - "continuato", "continuation", - "continuent", "contingent", - "continuety", "continuity", - "continunes", "continents", - "continuons", "continuous", - "continutiy", "continuity", - "continuuum", "continuum", - "contitnent", "contingent", - "contiuning", "containing", - "contiunity", "continuity", - "contorller", "controllers", - "contracing", "contracting", - "contractar", "contractor", - "contracter", "contractor", - "contractin", "contraction", - "contractos", "contracts", - "contradice", "contradicted", - "contradics", "contradicts", - "contredict", "contradict", - "contribued", "contributed", - "contribuem", "contribute", - "contribuer", "contribute", - "contribues", "contributes", - "contribuie", "contribute", - "contribuit", "contribute", - "contributo", "contribution", - "contributs", "contributes", - "contribuye", "contribute", - "contricted", "contracted", - "contridict", "contradict", - "contriubte", "contributes", - "controlelr", "controllers", - "controlers", "controls", - "controling", "controlling", - "controlles", "controls", - "controvery", "controversy", - "controvesy", "controversy", - "contrubite", "contributes", - "contrubute", "contribute", - "contuining", "continuing", - "contuinity", "continuity", - "convaluted", "convoluted", - "convcition", "convictions", - "conveinent", "convenient", - "conveluted", "convoluted", - "convencion", "convention", - "conveniant", "convenient", - "conveniece", "convenience", - "convenince", "convenience", - "convential", "conventional", - "converesly", "conversely", - "convergens", "converse", - "converison", "conversions", - "converning", "converting", - "conversare", "converse", - "conversino", "conversions", - "conversley", "conversely", - "conversoin", "conversions", - "conversons", "conversions", - "convertion", "conversion", - "convertire", "converter", - "converying", "converting", - "conveyered", "conveyed", - "conviccion", "conviction", - "conviciton", "conviction", - "convienent", "convenient", - "conviluted", "convoluted", - "convincted", "convince", - "convinsing", "convincing", - "convinving", "convincing", - "convoluded", "convoluted", - "convoulted", "convoluted", - "convulated", "convoluted", - "convuluted", "convoluted", - "cooperatve", "cooperative", - "coordenate", "coordinate", - "coordiante", "coordinate", - "coordinare", "coordinate", - "coordinato", "coordination", - "coordinats", "coordinates", - "coordonate", "coordinate", - "cooridnate", "coordinate", - "copehnagen", "copenhagen", - "copenaghen", "copenhagen", - "copenahgen", "copenhagen", - "copengagen", "copenhagen", - "copengahen", "copenhagen", - "copenhagan", "copenhagen", - "copenhague", "copenhagen", - "copenhagun", "copenhagen", - "copenhaven", "copenhagen", - "copenhegan", "copenhagen", - "copyrighed", "copyrighted", - "copyrigted", "copyrighted", - "corinthans", "corinthians", - "corinthias", "corinthians", - "corinthins", "corinthians", - "cornmitted", "committed", - "corporatie", "corporate", - "corralated", "correlated", - "corralates", "correlates", - "correccion", "correction", - "correciton", "corrections", - "correcters", "correctors", - "correctess", "correctness", - "correctivo", "correction", - "correctons", "corrections", - "corregated", "correlated", - "correkting", "correcting", - "correlatas", "correlates", - "correlatie", "correlated", - "correlatos", "correlates", - "correspend", "correspond", - "corrilated", "correlated", - "corrilates", "correlates", - "corrispond", "correspond", - "corrolated", "correlated", - "corrolates", "correlates", - "corrospond", "correspond", - "corrpution", "corruption", - "corrulates", "correlates", - "corrupcion", "corruption", - "cosmeticas", "cosmetics", - "cosmeticos", "cosmetics", - "costumized", "customized", - "counceling", "counseling", - "councellor", "councillor", - "councelors", "counselors", - "councilers", "councils", - "counselers", "counselors", - "counsellng", "counselling", - "counsilers", "counselors", - "counsiling", "counseling", - "counsilors", "counselors", - "counsolers", "counselors", - "counsoling", "counseling", - "countepart", "counteract", - "counteratk", "counteract", - "counterbat", "counteract", - "countercat", "counteract", - "countercut", "counteract", - "counteries", "counters", - "countoring", "countering", - "countryies", "countryside", - "countrying", "countering", - "courcework", "coursework", - "coursefork", "coursework", - "courthosue", "courthouse", - "courtrooom", "courtroom", - "cousnelors", "counselors", - "coutneract", "counteract", - "coutnering", "countering", - "covenental", "covenant", - "cranberrry", "cranberry", - "creationis", "creations", - "creationsm", "creationism", - "creationst", "creationist", - "creativily", "creatively", - "creativley", "creatively", - "credibilty", "credibility", - "creeperest", "creepers", - "crimanally", "criminally", - "criminalty", "criminally", - "criminalul", "criminally", - "criticable", "critical", - "criticarlo", "critical", - "criticiing", "criticising", - "criticisim", "criticism", - "criticisme", "criticise", - "criticisng", "criticising", - "criticists", "critics", - "criticisze", "criticise", - "criticizms", "criticisms", - "criticizng", "criticizing", - "critisiced", "criticized", - "critisicms", "criticisms", - "critisicsm", "criticisms", - "critisiscm", "criticisms", - "critisisms", "criticisms", - "critisizes", "criticises", - "critisizms", "criticisms", - "critiziced", "criticized", - "critizised", "criticized", - "critizisms", "criticisms", - "critizized", "criticized", - "crocodille", "crocodile", - "crossfiter", "crossfire", - "crutchetts", "crutches", - "crystalens", "crystals", - "crystalisk", "crystals", - "crystallis", "crystals", - "cuatiously", "cautiously", - "culterally", "culturally", - "cultrually", "culturally", - "culumative", "cumulative", - "culutrally", "culturally", - "cumbersone", "cumbersome", - "cumbursome", "cumbersome", - "cumpolsory", "compulsory", - "cumulitive", "cumulative", - "currancies", "currencies", - "currenctly", "currency", - "currenices", "currencies", - "currentfps", "currents", - "currentlys", "currents", - "currentpos", "currents", - "currentusa", "currents", - "curriculem", "curriculum", - "curriculim", "curriculum", - "curriences", "currencies", - "curroption", "corruption", - "custimized", "customized", - "customzied", "customized", - "custumized", "customized", - "cutscences", "cutscene", - "cutscenses", "cutscene", - "dangerouly", "dangerously", - "dealerhsip", "dealerships", - "deathamtch", "deathmatch", - "deathmacth", "deathmatch", - "debateable", "debatable", - "decembeard", "december", - "decendants", "descendants", - "decendents", "descendants", - "decideable", "decidable", - "deciptions", "depictions", - "decisiones", "decisions", - "declarasen", "declares", - "declaraste", "declares", - "declaremos", "declares", - "decomposit", "decompose", - "decoracion", "decoration", - "decorativo", "decoration", - "decoritive", "decorative", - "decroative", "decorative", - "decsending", "descending", - "dedicacion", "dedication", - "dedikation", "dedication", - "deducatble", "deductible", - "deducitble", "deductible", - "defacation", "defamation", - "defamating", "defamation", - "defanitely", "definately", - "defelction", "deflection", - "defendeers", "defender", - "defendents", "defendants", - "defenderes", "defenders", - "defenesman", "defenseman", - "defenselss", "defenseless", - "defensivly", "defensively", - "defianetly", "definately", - "defiantely", "definately", - "defiantley", "definately", - "defibately", "definately", - "deficately", "definately", - "deficiancy", "deficiency", - "deficience", "deficiencies", - "deficienct", "deficient", - "deficienty", "deficiency", - "defiintely", "definately", - "definaetly", "definately", - "definaitly", "definately", - "definaltey", "definately", - "definataly", "definately", - "definateky", "definately", - "definately", "definitely", - "definatily", "definately", - "defination", "definition", - "definative", "definitive", - "definatlly", "definately", - "definatrly", "definately", - "definayely", "definately", - "defineatly", "definately", - "definetaly", "definately", - "definetely", "definitely", - "definetily", "definately", - "definetlly", "definetly", - "definettly", "definately", - "definicion", "definition", - "definietly", "definitely", - "definining", "defining", - "definitaly", "definately", - "definiteyl", "definitly", - "definitivo", "definition", - "definitley", "definitely", - "definitlly", "definitly", - "definitlry", "definitly", - "definitlty", "definitly", - "definjtely", "definately", - "definltely", "definately", - "definotely", "definately", - "definstely", "definately", - "defintaley", "definately", - "defintiely", "definitely", - "defintiion", "definitions", - "definutely", "definately", - "deflaction", "deflection", - "defleciton", "deflection", - "deflektion", "deflection", - "defniately", "definately", - "degenarate", "degenerate", - "degenerare", "degenerate", - "degenerite", "degenerate", - "degoratory", "derogatory", - "degraderad", "degraded", - "dehydraded", "dehydrated", - "dehyrdated", "dehydrated", - "deifnately", "definately", - "deisgnated", "designated", - "delaership", "dealership", - "delearship", "dealership", - "delegaties", "delegate", - "delegative", "delegate", - "delfection", "deflection", - "delibarate", "deliberate", - "deliberant", "deliberate", - "delibirate", "deliberate", - "deligthful", "delightful", - "deliverate", "deliberate", - "deliverees", "deliveries", - "deliviered", "delivered", - "deliviring", "delivering", - "delporable", "deplorable", - "delpoyment", "deployment", - "delutional", "delusional", - "dementieva", "dementia", - "deminsions", "dimensions", - "democracis", "democracies", - "democracts", "democrat", - "democratas", "democrats", - "democrates", "democrats", - "demograhic", "demographic", - "demographs", "demographics", - "demograpic", "demographic", - "demolation", "demolition", - "demolicion", "demolition", - "demolision", "demolition", - "demolitian", "demolition", - "demoliting", "demolition", - "demoloshed", "demolished", - "demolution", "demolition", - "demonished", "demolished", - "demonstate", "demonstrate", - "demonstras", "demonstrates", - "demorcracy", "democracy", - "denegerate", "degenerate", - "denominato", "denomination", - "denomintor", "denominator", - "deocrative", "decorative", - "deomcratic", "democratic", - "deparments", "departments", - "departmens", "departments", - "departmnet", "departments", - "depcitions", "depictions", - "depdending", "depending", - "depencency", "dependency", - "dependance", "dependence", - "dependancy", "dependency", - "dependandt", "dependant", - "dependends", "depended", - "dependened", "depended", - "dependenta", "dependant", - "dependente", "dependence", - "depicitons", "depictions", - "deplorabel", "deplorable", - "deplorabil", "deplorable", - "deplorible", "deplorable", - "deplyoment", "deployment", - "depolyment", "deployment", - "depositers", "deposits", - "depressief", "depressive", - "depressies", "depressive", - "deprivaton", "deprivation", - "deragotory", "derogatory", - "derivaties", "derivatives", - "deriviated", "derived", - "derivitave", "derivative", - "derivitive", "derivative", - "derogatary", "derogatory", - "derogatery", "derogatory", - "derogetory", "derogatory", - "derogitory", "derogatory", - "derogotary", "derogatory", - "derogotory", "derogatory", - "derviative", "derivative", - "descendats", "descendants", - "descendend", "descended", - "descenting", "descending", - "descerning", "descending", - "descipable", "despicable", - "descisions", "decisions", - "descriibes", "describes", - "descripton", "description", - "desginated", "designated", - "desigining", "designing", - "desireable", "desirable", - "desktopbsd", "desktops", - "despciable", "despicable", - "desperatly", "desperately", - "desperetly", "desperately", - "despicaple", "despicable", - "despicible", "despicable", - "dessicated", "desiccated", - "destinatin", "destinations", - "destinaton", "destination", - "destoryers", "destroyers", - "destorying", "destroying", - "destroyeds", "destroyers", - "destroyeer", "destroyers", - "destrucion", "destruction", - "destrucive", "destructive", - "destryoing", "destroying", - "detectarlo", "detector", - "detectaron", "detector", - "detectoare", "detector", - "determinas", "determines", - "determinig", "determining", - "determinsm", "determinism", - "deutschand", "deutschland", - "devastaded", "devastated", - "devastaing", "devastating", - "devastanti", "devastating", - "devasteted", "devastated", - "develepors", "developers", - "develoeprs", "developers", - "developmet", "developments", - "developors", "develops", - "developped", "developed", - "developres", "develops", - "develpment", "development", - "devestated", "devastated", - "devolvendo", "devolved", - "deyhdrated", "dehydrated", - "diagnosied", "diagnose", - "diagnosies", "diagnosis", - "diagnositc", "diagnostic", - "diagnossed", "diagnose", - "diagnosted", "diagnose", - "diagnotics", "diagnostic", - "diagonstic", "diagnostic", - "dichotomoy", "dichotomy", - "dicitonary", "dictionary", - "diconnects", "disconnects", - "dicovering", "discovering", - "dictateurs", "dictates", - "dictionare", "dictionaries", - "differance", "difference", - "differenly", "differently", - "differense", "differences", - "differente", "difference", - "differentl", "differential", - "differenty", "differently", - "differnece", "difference", - "difficulte", "difficulties", - "difficults", "difficulties", - "difficutly", "difficulty", - "diffuculty", "difficulty", - "diganostic", "diagnostic", - "dimensinal", "dimensional", - "dimentions", "dimensions", - "dimesnions", "dimensions", - "dimineshes", "diminishes", - "diminising", "diminishing", - "dimunitive", "diminutive", - "dinosaures", "dinosaurs", - "dinosaurus", "dinosaurs", - "dipections", "depictions", - "diplimatic", "diplomatic", - "diplomacia", "diplomatic", - "diplomancy", "diplomacy", - "dipolmatic", "diplomatic", - "directinla", "directional", - "directionl", "directional", - "directivos", "directions", - "directores", "directors", - "directorys", "directors", - "directsong", "directions", - "disaapoint", "disappoint", - "disagreeed", "disagreed", - "disapeared", "disappeared", - "disappeard", "disappeared", - "disappered", "disappeared", - "disappiont", "disappoint", - "disaproval", "disapproval", - "disastorus", "disastrous", - "disastrosa", "disastrous", - "disastrose", "disastrous", - "disastrosi", "disastrous", - "disastroso", "disastrous", - "disaterous", "disastrous", - "discalimer", "disclaimer", - "discapline", "discipline", - "discepline", "discipline", - "disception", "discretion", - "discharded", "discharged", - "disciplers", "disciples", - "disciplies", "disciplines", - "disciplins", "disciplines", - "disciprine", "discipline", - "disclamier", "disclaimer", - "discliamer", "disclaimer", - "disclipine", "discipline", - "disclousre", "disclosure", - "disclsoure", "disclosure", - "discograhy", "discography", - "discograpy", "discography", - "discolsure", "disclosure", - "disconenct", "disconnect", - "disconncet", "disconnects", - "disconnets", "disconnects", - "discontued", "discounted", - "discoruage", "discourages", - "discources", "discourse", - "discourgae", "discourages", - "discourges", "discourages", - "discoveres", "discovers", - "discoveryd", "discovered", - "discoverys", "discovers", - "discrecion", "discretion", - "discreddit", "discredited", - "discrepany", "discrepancy", - "discresion", "discretion", - "discreting", "discretion", - "discribing", "describing", - "discrimine", "discriminate", - "discrouage", "discourages", - "discrption", "discretion", - "discusison", "discussions", - "discusting", "discussing", - "disgracful", "disgraceful", - "disgrunted", "disgruntled", - "disgruntld", "disgruntled", - "disguisted", "disguise", - "disgustiny", "disgustingly", - "disgustosa", "disgusts", - "disgustose", "disgusts", - "disgustosi", "disgusts", - "disgustoso", "disgusts", - "dishcarged", "discharged", - "dishinored", "dishonored", - "disicpline", "discipline", - "disiplined", "disciplined", - "dislcaimer", "disclaimer", - "dismanteld", "dismantled", - "dismanting", "dismantling", - "dismentled", "dismantled", - "dispecable", "despicable", - "dispencary", "dispensary", - "dispencers", "dispenser", - "dispencing", "dispensing", - "dispensare", "dispenser", - "dispensory", "dispensary", - "dispesnary", "dispensary", - "dispicable", "despicable", - "displayfps", "displays", - "dispositon", "disposition", - "dispostion", "disposition", - "disputerad", "disputed", - "disrecpect", "disrespect", - "disrection", "discretion", - "disrepsect", "disrespect", - "disresepct", "disrespect", - "disrespekt", "disrespect", - "disription", "disruption", - "disrispect", "disrespect", - "disrputing", "disrupting", - "disruptivo", "disruption", - "disruptron", "disruption", - "dissapears", "disappears", - "dissappear", "disappear", - "disscusion", "discussion", - "dissmisive", "dismissive", - "dissodance", "dissonance", - "dissonante", "dissonance", - "dissonence", "dissonance", - "distastful", "distasteful", - "disticntly", "distinctly", - "distiction", "distinction", - "distincion", "distinction", - "distincive", "distinctive", - "distinclty", "distinctly", - "distinctie", "distinctive", - "distinctin", "distinctions", - "distingish", "distinguish", - "distingush", "distinguish", - "distintcly", "distinctly", - "distoriton", "distortion", - "distorsion", "distortion", - "distortian", "distortion", - "distortron", "distortion", - "distractes", "distracts", - "distractia", "district", - "distractin", "district", - "distractiv", "district", - "distration", "distortion", - "distribuem", "distribute", - "distribuer", "distribute", - "distribuie", "distribute", - "distribuit", "distribute", - "distributs", "distributors", - "distribuye", "distribute", - "distrotion", "distortion", - "distrubing", "disturbing", - "distrubtes", "distrust", - "distrubute", "distribute", - "distubring", "disturbing", - "disturbace", "disturbance", - "disturping", "disrupting", - "disucssing", "discussing", - "disucssion", "discussion", - "disurption", "disruption", - "ditributed", "distributed", - "diversifiy", "diversify", - "dividendes", "dividends", - "dividendos", "dividends", - "divideneds", "dividend", - "divinition", "divination", - "divinitory", "divinity", - "divisiones", "divisions", - "dobulelift", "doublelift", - "doccuments", "documents", - "documentry", "documentary", - "dogmatisch", "dogmatic", - "dolphinese", "dolphins", - "domianting", "dominating", - "domimation", "domination", - "dominacion", "domination", - "dominaters", "dominates", - "donwgraded", "downgraded", - "donwloaded", "downloaded", - "donwvoters", "downvoters", - "donwvoting", "downvoting", - "doomsdaily", "doomsday", - "doubellift", "doublelift", - "doubleiift", "doublelift", - "doubleleft", "doublelift", - "doublelfit", "doublelift", - "doublerift", "doublelift", - "doulbelift", "doublelift", - "downgarded", "downgraded", - "downgrated", "downgrade", - "downlaoded", "downloaded", - "downloadas", "downloads", - "downloades", "downloads", - "downovting", "downvoting", - "downroaded", "downgraded", - "downsiders", "downsides", - "downstaris", "downstairs", - "downstiars", "downstairs", - "downtokers", "downvoters", - "downtoking", "downvoting", - "downtraded", "downgraded", - "downviting", "downvoting", - "downvotear", "downvoters", - "downvoteas", "downvoters", - "downvoteds", "downvoters", - "downvotees", "downvoters", - "downvotesd", "downvoters", - "downvotess", "downvoters", - "downvotest", "downvoters", - "downvoteur", "downvoters", - "downvoties", "downvoters", - "downvotres", "downvoters", - "downvotted", "downvote", - "downvottes", "downvoters", - "downwoters", "downvoters", - "downwoting", "downvoting", - "drasticaly", "drastically", - "drasticlly", "drastically", - "draughtman", "draughtsman", - "dumbbellls", "dumbbells", - "dumbfouded", "dumbfounded", - "dumbfouned", "dumbfounded", - "dungeoness", "dungeons", - "dupilcates", "duplicates", - "duplicants", "duplicates", - "duplicatas", "duplicates", - "duplicitas", "duplicates", - "duplifaces", "duplicates", - "durabiltiy", "durability", - "dyamically", "dynamically", - "dynamicaly", "dynamically", - "dynamicdns", "dynamics", - "dynamiclly", "dynamically", - "dynamicpsf", "dynamics", - "dynamitage", "dynamite", - "dysfuncion", "dysfunction", - "earhtbound", "earthbound", - "earthqauke", "earthquake", - "earthquack", "earthquake", - "earthquaks", "earthquakes", - "earthquate", "earthquake", - "earthqukes", "earthquakes", - "easthetics", "aesthetics", - "ecoligical", "ecological", - "ecomonical", "economical", - "econimical", "economical", - "econimists", "economists", - "economicas", "economics", - "economicos", "economics", - "economicus", "economics", - "economisch", "economic", - "economisit", "economists", - "effeciency", "efficiency", - "effectivly", "effectively", - "efficeincy", "efficiency", - "efficently", "efficiently", - "efficiancy", "efficiency", - "efficienct", "efficient", - "efficienty", "efficiently", - "egotistcal", "egotistical", - "ehtnically", "ethnically", - "ejaculaion", "ejaculation", - "ejaculatie", "ejaculate", - "ejaculatin", "ejaculation", - "ejaculaton", "ejaculation", - "ejaculatte", "ejaculate", - "electircal", "electrical", - "electivite", "elective", - "electoraat", "electorate", - "electorale", "electorate", - "electorite", "electorate", - "electornic", "electronic", - "electrican", "electrician", - "electriciy", "electricity", - "electricty", "electricity", - "electrinic", "electrician", - "electroate", "electorate", - "electrodan", "electron", - "electroinc", "electron", - "electrolye", "electrolytes", - "electroman", "electron", - "electroncs", "electrons", - "electrones", "electrons", - "electronik", "election", - "electronis", "electronics", - "electronix", "election", - "elemantary", "elementary", - "elementery", "elementary", - "elementray", "elementary", - "eleminated", "eliminated", - "elephantes", "elephants", - "elephantis", "elephants", - "elephantos", "elephants", - "elephantus", "elephants", - "eletricity", "electricity", - "elimanates", "eliminates", - "elimenates", "eliminates", - "elimentary", "elementary", - "elimimates", "eliminates", - "eliminaste", "eliminates", - "eliminatin", "elimination", - "eliminaton", "elimination", - "eliminster", "eliminates", - "ellipitcal", "elliptical", - "ellipsical", "elliptical", - "ellipticle", "elliptical", - "ellitpical", "elliptical", - "ellpitical", "elliptical", - "eloquantly", "eloquently", - "eloquintly", "eloquently", - "emapthetic", "empathetic", - "embarassed", "embarrassed", - "embarassig", "embarassing", - "embarrased", "embarrassed", - "embarrases", "embarrassed", - "embezelled", "embezzled", - "emblamatic", "emblematic", - "embodyment", "embodiment", - "emergenies", "emergencies", - "emmigrated", "emigrated", - "emminently", "eminently", - "emmisaries", "emissaries", - "emobdiment", "embodiment", - "emotionaly", "emotionally", - "empahsized", "emphasized", - "empahsizes", "emphasizes", - "empathatic", "empathetic", - "emphacized", "emphasized", - "emphatetic", "empathetic", - "emphatised", "emphasized", - "emphatized", "emphasized", - "emphatizes", "emphasizes", - "emphazised", "emphasized", - "emphazises", "emphasizes", - "emphesized", "emphasized", - "emphesizes", "emphasizes", - "emphisized", "emphasized", - "emphisizes", "emphasizes", - "empiricaly", "empirically", - "employeers", "employees", - "employeurs", "employer", - "emprisoned", "imprisoned", - "encahnting", "enchanting", - "enchancing", "enchanting", - "enchanging", "enchanting", - "enchantent", "enchantment", - "enchantmet", "enchantments", - "encompases", "encompasses", - "encounterd", "encountered", - "encountred", "encountered", - "encouraing", "encouraging", - "encoutners", "encounters", - "encription", "encryption", - "encrpytion", "encryption", - "encyrption", "encryption", - "endlessley", "endlessly", - "endolithes", "endoliths", - "enforceing", "enforcing", - "engagemnet", "engagements", - "engagemnts", "engagements", - "engieneers", "engineers", - "enginereed", "engineered", - "enivitable", "inevitable", - "enlargment", "enlargement", - "enlighment", "enlighten", - "enlightend", "enlightened", - "enlightned", "enlightened", - "enrolement", "enrollment", - "enrollemnt", "enrollment", - "enterpirse", "enterprise", - "enterprice", "enterprise", - "enterpries", "enterprises", - "enterprize", "enterprise", - "enterprsie", "enterprises", - "enterrpise", "enterprises", - "entertaing", "entertaining", - "enthically", "ethnically", - "enthisiast", "enthusiast", - "enthuiasts", "enthusiast", - "enthuisast", "enthusiasts", - "enthusiams", "enthusiasm", - "enthusiant", "enthusiast", - "enthusiats", "enthusiast", - "enthusiest", "enthusiast", - "enthusists", "enthusiasts", - "envelopped", "envelope", - "enveloppen", "envelope", - "enveloppes", "envelope", - "enviorment", "environment", - "enviroment", "environment", - "environmet", "environments", - "equiavlent", "equivalents", - "equilavent", "equivalent", - "equilibium", "equilibrium", - "equilibrim", "equilibrium", - "equilibrum", "equilibrium", - "equippment", "equipment", - "equitorial", "equatorial", - "equivalant", "equivalent", - "equivalnce", "equivalence", - "equivalnet", "equivalents", - "equivelant", "equivalent", - "equivelent", "equivalent", - "equivilant", "equivalent", - "equivilent", "equivalent", - "equivlaent", "equivalents", - "equivolent", "equivalent", - "eratically", "erratically", - "escalative", "escalate", - "escavation", "escalation", - "esitmation", "estimation", - "esoterisch", "esoteric", - "especailly", "especially", - "espeically", "especially", - "espressino", "espresso", - "espression", "espresso", - "essencials", "essentials", - "essensials", "essentials", - "essentails", "essentials", - "essentialy", "essentially", - "essentiels", "essentials", - "essentuals", "essentials", - "estabishes", "establishes", - "estimacion", "estimation", - "estimativo", "estimation", - "estination", "estimation", - "ethicallly", "ethically", - "ethincally", "ethnically", - "ethnicites", "ethnicities", - "ethnicitiy", "ethnicity", - "euphorical", "euphoria", - "euphorisch", "euphoric", - "euthanaisa", "euthanasia", - "euthanazia", "euthanasia", - "euthanesia", "euthanasia", - "evaluacion", "evaluation", - "evalutaion", "evaluation", - "evaulating", "evaluating", - "evaulation", "evaluation", - "eventaully", "eventually", - "eventially", "eventually", - "everyoneis", "everyones", - "exacberate", "exacerbated", - "exagerated", "exaggerated", - "exagerates", "exaggerates", - "exagerrate", "exaggerate", - "exaggarate", "exaggerate", - "exaggurate", "exaggerate", - "exahusting", "exhausting", - "exahustion", "exhaustion", - "examinated", "examined", - "examinerad", "examined", - "exapansion", "expansion", - "exapnsions", "expansions", - "exauhsting", "exhausting", - "exauhstion", "exhaustion", - "excecuting", "executing", - "excecution", "execution", - "exceedigly", "exceedingly", - "exceedinly", "exceedingly", - "excellance", "excellence", - "excellenet", "excellence", - "excellenze", "excellence", - "excerising", "exercising", - "excessivly", "excessively", - "exchangees", "exchanges", - "excitiment", "excitement", - "exclsuives", "exclusives", - "exclusivas", "exclusives", - "exclusivly", "exclusively", - "exclusivos", "exclusives", - "exclusivty", "exclusivity", - "exclussive", "exclusives", - "exclusvies", "exclusives", - "excpetions", "exceptions", - "exculsives", "exclusives", - "exculsivly", "exclusively", - "execptions", "exceptions", - "exectuable", "executable", - "exectuions", "executions", - "exectuives", "executives", - "execusions", "executions", - "executabil", "executable", - "executible", "executable", - "executiner", "executioner", - "executings", "executions", - "executivas", "executives", - "exeedingly", "exceedingly", - "exepmtions", "exemptions", - "exeptional", "exceptional", - "exercicing", "exercising", - "exercizing", "exercising", - "exersicing", "exercising", - "exersising", "exercising", - "exersizing", "exercising", - "exerternal", "external", - "exeuctions", "executions", - "exhasuting", "exhausting", - "exhasution", "exhaustion", - "exhaustivo", "exhaustion", - "exhibicion", "exhibition", - "exhibitons", "exhibits", - "exhuasting", "exhausting", - "exhuastion", "exhaustion", - "exibitions", "exhibitions", - "exictement", "excitement", - "exipration", "expiration", - "existantes", "existent", - "existenial", "existential", - "existental", "existential", - "exlcusives", "exclusives", - "exorbatant", "exorbitant", - "exorbatent", "exorbitant", - "exorbidant", "exorbitant", - "exorbirant", "exorbitant", - "exorbitent", "exorbitant", - "expalining", "explaining", - "expanisons", "expansions", - "expansivos", "expansions", - "expanssion", "expansions", - "expantions", "expansions", - "expecially", "especially", - "expectaion", "expectation", - "expectansy", "expectancy", - "expectency", "expectancy", - "expections", "exceptions", - "expedetion", "expedition", - "expedicion", "expedition", - "expeditivo", "expedition", - "expeiments", "experiments", - "expemtions", "exemptions", - "expendeble", "expendable", - "expendible", "expendable", - "expensable", "expendable", - "expentancy", "expectancy", - "expereince", "experience", - "experement", "experiment", - "experiance", "experience", - "experieced", "experienced", - "experieces", "experiences", - "experiemnt", "experiment", - "experiened", "experienced", - "experiense", "experiences", - "expermient", "experiments", - "experssion", "expression", - "expextancy", "expectancy", - "expidetion", "expedition", - "expierence", "experience", - "expination", "expiration", - "expirement", "experiment", - "explanatin", "explanations", - "explicatia", "explicit", - "explicatie", "explicit", - "explicatif", "explicit", - "explicatii", "explicit", - "explicetly", "explicitly", - "explicilty", "explicitly", - "explioting", "exploiting", - "exploiding", "exploiting", - "exploition", "exploiting", - "explorarea", "explorer", - "exploreres", "explorers", - "explosivas", "explosives", - "explossion", "explosions", - "explossive", "explosives", - "explosvies", "explosives", - "explotions", "explosions", - "explusions", "explosions", - "expodition", "exposition", - "expoliting", "exploiting", - "expolsions", "explosions", - "expolsives", "explosives", - "exponental", "exponential", - "exposicion", "exposition", - "expositivo", "exposition", - "expotition", "exposition", - "exprensive", "expressive", - "expresions", "expression", - "expresison", "expressions", - "expressens", "expresses", - "expressief", "expressive", - "expressley", "expressly", - "expriation", "expiration", - "extensivly", "extensively", - "extentions", "extensions", - "exterioara", "exterior", - "exterioare", "exterior", - "extermally", "externally", - "extermists", "extremists", - "extraccion", "extraction", - "extractivo", "extraction", - "extractnow", "extraction", - "extradtion", "extraction", - "extremaste", "extremes", - "extremeley", "extremely", - "extremelly", "extremely", - "extrememly", "extremely", - "extremests", "extremists", - "extremised", "extremes", - "extremisim", "extremism", - "extremisme", "extremes", - "extremiste", "extremes", - "extrenally", "externally", - "extrimists", "extremists", - "eyeballers", "eyeballs", - "fabriacted", "fabricated", - "fabricatie", "fabricated", - "faciliated", "facilitated", - "facilitait", "facilitate", - "facilitant", "facilitate", - "facilitare", "facilitate", - "facisnated", "fascinated", - "facitilies", "facilities", - "facsinated", "fascinated", - "fahernheit", "fahrenheit", - "fahrenhiet", "fahrenheit", - "fallatious", "fallacious", - "fallicious", "fallacious", - "falshbacks", "flashbacks", - "familiarty", "familiarity", - "familiarze", "familiarize", - "fanaticals", "fanatics", - "fanfaction", "fanfiction", - "fanfcition", "fanfiction", - "fanficiton", "fanfiction", - "fanserivce", "fanservice", - "fanservise", "fanservice", - "fanservive", "fanservice", - "fantasiose", "fantasies", - "farehnheit", "fahrenheit", - "farhenheit", "fahrenheit", - "fascianted", "fascinated", - "fascinatie", "fascinated", - "fascinatin", "fascination", - "fascistisk", "fascists", - "fatalaties", "fatalities", - "favoruites", "favourites", - "favourates", "favourites", - "favouritsm", "favourites", - "favourties", "favourites", - "federacion", "federation", - "federativo", "federation", - "fellowhsip", "fellowship", - "fellowshop", "fellowship", - "feminimity", "femininity", - "feministas", "feminists", - "feminitity", "femininity", - "fermentato", "fermentation", - "fertalizer", "fertilizer", - "fertelizer", "fertilizer", - "fertilizar", "fertilizer", - "fertilzier", "fertilizer", - "fertiziler", "fertilizer", - "festivales", "festivals", - "fetishiste", "fetishes", - "ficticious", "fictitious", - "filessytem", "filesystem", - "filesytems", "filesystem", - "filmamkers", "filmmakers", - "filmmakare", "filmmakers", - "finallizes", "finalizes", - "financialy", "financially", - "fingernals", "fingernails", - "fingerpies", "fingertips", - "fingerpint", "fingerprint", - "fingertaps", "fingertips", - "fingertits", "fingertips", - "fingertops", "fingertips", - "fireballls", "fireballs", - "firefigher", "firefighter", - "firefigter", "firefighter", - "firendlies", "friendlies", - "firghtened", "frightened", - "fisionable", "fissionable", - "flashligth", "flashlight", - "flaskbacks", "flashbacks", - "flawleslly", "flawlessly", - "flexibiliy", "flexibility", - "flexibilty", "flexibility", - "flimmakers", "filmmakers", - "fluctuatie", "fluctuate", - "fluctuatin", "fluctuations", - "flutterhsy", "fluttershy", - "fluttersky", "fluttershy", - "flutterspy", "fluttershy", - "forcifully", "forcefully", - "forecfully", "forcefully", - "foreginers", "foreigners", - "foregorund", "foreground", - "foreignese", "foreigners", - "foreigness", "foreigners", - "foreignors", "foreigners", - "foreingers", "foreigners", - "forensisch", "forensic", - "foreseeble", "foreseeable", - "forgeiners", "foreigners", - "forgieners", "foreigners", - "forgivance", "forgiven", - "forgivenss", "forgiveness", - "forgotting", "forgetting", - "foriegners", "foreigners", - "formadible", "formidable", - "formalhaut", "fomalhaut", - "formallity", "formally", - "formallize", "formalize", - "formatiing", "formatting", - "formatings", "formations", - "formativos", "formations", - "formidabel", "formidable", - "formidabil", "formidable", - "formidible", "formidable", - "forminable", "formidable", - "formitable", "formidable", - "formuladas", "formulas", - "formulados", "formulas", - "forseeable", "foreseeable", - "fortelling", "foretelling", - "fortunatly", "fortunately", - "fortunetly", "fortunately", - "foundaiton", "foundations", - "foundaries", "foundries", - "foundatoin", "foundations", - "fractalers", "fractals", - "fractalius", "fractals", - "fractalpus", "fractals", - "fracturare", "fracture", - "fragmanted", "fragment", - "francaises", "franchises", - "franchices", "franchises", - "franchines", "franchises", - "franchizes", "franchises", - "franchsies", "franchises", - "fransiscan", "franciscan", - "franticaly", "frantically", - "franticlly", "frantically", - "fraternaty", "fraternity", - "fraternety", "fraternity", - "fraterntiy", "fraternity", - "fraturnity", "fraternity", - "fraudalent", "fraudulent", - "fraudelant", "fraudulent", - "fraudelent", "fraudulent", - "fraudolent", "fraudulent", - "fraudulant", "fraudulent", - "freedomers", "freedoms", - "freedomest", "freedoms", - "freindlies", "friendlies", - "freindship", "friendship", - "frequencey", "frequency", - "friednship", "friendships", - "friednzone", "friendzoned", - "friendhsip", "friendship", - "friendsies", "friendlies", - "friendzies", "friendlies", - "friendzond", "friendzoned", - "frientship", "friendship", - "frigthened", "frightened", - "fromatting", "formatting", - "fromidable", "formidable", - "frontlinie", "frontline", - "fruadulent", "fraudulent", - "frustraded", "frustrated", - "frustradet", "frustrates", - "frustraits", "frustrates", - "frustrants", "frustrates", - "frustratin", "frustration", - "frustrsted", "frustrates", - "fucntional", "functional", - "fulfulling", "fulfilling", - "fullfiling", "fulfilling", - "fullfilled", "fulfilled", - "fullscrean", "fullscreen", - "fulttershy", "fluttershy", - "funcitonal", "functional", - "fundametal", "fundamental", - "furstrated", "frustrated", - "furstrates", "frustrates", - "furutistic", "futuristic", - "futhermore", "furthermore", - "futurestic", "futuristic", - "futurisitc", "futuristic", - "futurustic", "futuristic", - "galvinized", "galvanized", - "garuanteed", "guaranteed", - "garuantees", "guarantees", - "gauntanamo", "guantanamo", - "gauntlents", "gauntlet", - "gauranteed", "guaranteed", - "gaurantees", "guarantees", - "gaurenteed", "guaranteed", - "gaurentees", "guarantees", - "generalice", "generalize", - "generalife", "generalize", - "generalnie", "generalize", - "generaters", "generates", - "generaties", "generate", - "generatios", "generators", - "generatons", "generators", - "generatore", "generate", - "generelize", "generalize", - "generocity", "generosity", - "generoisty", "generosity", - "generostiy", "generosity", - "geneticaly", "genetically", - "geneticlly", "genetically", - "genitalias", "genitals", - "genuinelly", "genuinely", - "geographia", "geographical", - "geogrpahic", "geographic", - "germanisch", "germanic", - "gigantisch", "gigantic", - "gimmickers", "gimmicks", - "girlfirend", "girlfriend", - "girlfreind", "girlfriend", - "girlfriens", "girlfriends", - "girlfrinds", "girlfriends", - "girlfrined", "girlfriends", - "goalkeaper", "goalkeeper", - "goalkeeprs", "goalkeeper", - "goalkepeer", "goalkeeper", - "goegraphic", "geographic", - "golakeeper", "goalkeeper", - "goldburger", "goldberg", - "goosebumbs", "goosebumps", - "goosegumps", "goosebumps", - "goosepumps", "goosebumps", - "gothenberg", "gothenburg", - "govenrment", "government", - "govermenet", "goverment", - "govermnent", "governments", - "governemnt", "government", - "governened", "governed", - "governered", "governed", - "governmant", "governmental", - "governmetn", "governments", - "governmnet", "government", - "govnerment", "government", - "govornment", "government", - "gradiating", "graduating", - "gradiation", "graduation", - "graduacion", "graduation", - "grapefriut", "grapefruit", - "grapefrukt", "grapefruit", - "graphicaly", "graphically", - "graphiclly", "graphically", - "gratituous", "gratuitous", - "gratiutous", "gratuitous", - "gratuidous", "gratuitous", - "gratuituos", "gratuitous", - "gratutious", "gratuitous", - "graudating", "graduating", - "graudation", "graduation", - "gravitatie", "gravitate", - "greatfully", "gratefully", - "greenhosue", "greenhouse", - "greviances", "grievances", - "grievences", "grievances", - "grilfriend", "girlfriend", - "guaduloupe", "guadalupe", - "guanatanmo", "guantanamo", - "guantamamo", "guantanamo", - "guantamano", "guantanamo", - "guantanano", "guantanamo", - "guantanemo", "guantanamo", - "guantanoma", "guantanamo", - "guantanomo", "guantanamo", - "guantonamo", "guantanamo", - "guarantess", "guarantees", - "guardiands", "guardians", - "guardianes", "guardians", - "guardianis", "guardians", - "guarenteed", "guaranteed", - "guarentees", "guarantees", - "guarnateed", "guaranteed", - "guarnatees", "guarantees", - "guarunteed", "guaranteed", - "guaruntees", "guarantees", - "guatamalan", "guatemalan", - "gunatanamo", "guantanamo", - "gunlsinger", "gunslinger", - "gunsiinger", "gunslinger", - "gunslanger", "gunslinger", - "gunsligner", "gunslinger", - "gunstinger", "gunslinger", - "gymanstics", "gymnastics", - "gymnasitcs", "gymnastics", - "gynmastics", "gymnastics", - "haemorrage", "haemorrhage", - "halloweeen", "halloween", - "hambergers", "hamburgers", - "hamburgare", "hamburger", - "hamburgesa", "hamburgers", - "hamburgles", "hamburgers", - "hamburgurs", "hamburgers", - "handcuffes", "handcuffs", - "handelbars", "handlebars", - "handicaped", "handicapped", - "handwritng", "handwriting", - "harasments", "harassments", - "hardlinked", "hardline", - "harmoniacs", "harmonic", - "harmonisch", "harmonic", - "harrasment", "harassment", - "harrassing", "harassing", - "harvasting", "harvesting", - "haversting", "harvesting", - "headhpones", "headphones", - "headphoens", "headphones", - "headquarer", "headquarter", - "headquater", "headquarter", - "headshoots", "headshot", - "healtchare", "healthcare", - "healtheast", "healthiest", - "healthyest", "healthiest", - "heapdhones", "headphones", - "heartbeart", "heartbeat", - "heartbeast", "heartbeat", - "heartborne", "heartbroken", - "heartbrake", "heartbreak", - "hearthsone", "hearthstone", - "heatlhcare", "healthcare", - "heavyweght", "heavyweight", - "heavyweigt", "heavyweight", - "hedgehodge", "hedgehog", - "heidelburg", "heidelberg", - "heigthened", "heightened", - "heistation", "hesitation", - "helathcare", "healthcare", - "helicopers", "helicopters", - "helicoptor", "helicopter", - "helicotper", "helicopters", - "helicpoter", "helicopter", - "helictoper", "helicopters", - "helikopter", "helicopter", - "hemingwary", "hemingway", - "hemingwavy", "hemingway", - "hemipshere", "hemisphere", - "hemishpere", "hemisphere", - "hemmorhage", "hemorrhage", - "hempishere", "hemisphere", - "herculeans", "hercules", - "herculeasy", "hercules", - "herculeees", "hercules", - "hesitstion", "hesitation", - "hestiation", "hesitation", - "hieghtened", "heightened", - "hierachies", "hierarchies", - "hieroglphs", "hieroglyphs", - "highalnder", "highlander", - "highlighed", "highlighted", - "highligted", "highlighted", - "highloader", "highlander", - "highpander", "highlander", - "highscholl", "highschool", - "highshcool", "highschool", - "hillarious", "hilarious", - "hinderance", "hindrance", - "hinderence", "hindrance", - "hipsterest", "hipsters", - "hispanicos", "hispanics", - "hispanicus", "hispanics", - "histarical", "historical", - "histerical", "historical", - "historiaan", "historians", - "historicas", "historians", - "historicly", "historical", - "historiens", "histories", - "historisch", "historic", - "hoemopathy", "homeopathy", - "hollywoood", "hollywood", - "homecuming", "homecoming", - "homeoapthy", "homeopathy", - "homeonwers", "homeowners", - "homeopahty", "homeopathy", - "homeophaty", "homeopathy", - "homeopothy", "homeopathy", - "homeothapy", "homeopathy", - "homepoathy", "homeopathy", - "homewoners", "homeowners", - "homoepathy", "homeopathy", - "homogeneos", "homogeneous", - "homogeneus", "homogeneous", - "homophibia", "homophobia", - "homophibic", "homophobic", - "homophobie", "homophobe", - "homophonia", "homophobia", - "homophopia", "homophobia", - "homophopic", "homophobic", - "homosexaul", "homosexual", - "homosexuel", "homosexual", - "honeymooon", "honeymoon", - "hopefullly", "hopefully", - "hopeleslly", "hopelessly", - "horisontal", "horizontal", - "horizantal", "horizontal", - "horizontes", "horizons", - "horiztonal", "horizontal", - "horrendeus", "horrendous", - "horriblely", "horribly", - "hospitales", "hospitals", - "hospitalty", "hospitality", - "hospitible", "hospitable", - "hsitorians", "historians", - "humanaties", "humanities", - "humanitary", "humanity", - "humiliatin", "humiliation", - "humiliaton", "humiliation", - "humilitied", "humiliated", - "humillated", "humiliated", - "hurricance", "hurricane", - "hurriganes", "hurricanes", - "hurrikanes", "hurricanes", - "hurrycanes", "hurricanes", - "hydropilic", "hydrophilic", - "hydropobic", "hydrophobic", - "hyperbolie", "hyperbole", - "hyperlobic", "hyperbolic", - "hyperlogic", "hyperbolic", - "hypertrohy", "hypertrophy", - "hypertropy", "hypertrophy", - "hyphotesis", "hypothesis", - "hypocrates", "hypocrites", - "hypocriscy", "hypocrisy", - "hypocrises", "hypocrites", - "hypocritus", "hypocrites", - "hypocrties", "hypocrites", - "hypocrytes", "hypocrites", - "hypokrites", "hypocrites", - "hypothecis", "hypothesis", - "hypotheiss", "hypotheses", - "hypothesus", "hypotheses", - "hypothises", "hypotheses", - "hypothisis", "hypothesis", - "hypothosis", "hypothesis", - "hyprocites", "hypocrites", - "hystarical", "hysterical", - "hystericly", "hysterical", - "hysteriska", "hysteria", - "ibuprofein", "ibuprofen", - "ibuprofine", "ibuprofen", - "icelandinc", "icelandic", - "idealisitc", "idealistic", - "idealogies", "ideologies", - "identicial", "identical", - "identifyed", "identified", - "identitets", "identities", - "ideolagies", "ideologies", - "ideoligies", "ideologies", - "ideologias", "ideologies", - "ideologice", "ideologies", - "ideologije", "ideologies", - "ideologins", "ideologies", - "ideologisk", "ideologies", - "ideolouges", "ideologies", - "illegalest", "illegals", - "illegallly", "illegally", - "illegimacy", "illegitimacy", - "illegitime", "illegitimate", - "illegitimt", "illegitimate", - "illimunati", "illuminati", - "illinoians", "illinois", - "illistrate", "illiterate", - "illitarate", "illiterate", - "illitirate", "illiterate", - "illumanati", "illuminati", - "illumaniti", "illuminati", - "illumianti", "illuminati", - "illumimati", "illuminati", - "illuminaci", "illuminati", - "illuminadi", "illuminati", - "illuminami", "illuminati", - "illuminazi", "illuminati", - "illuminite", "illuminati", - "illuminiti", "illuminati", - "illuminoti", "illuminati", - "illuminuti", "illuminati", - "illumniati", "illuminati", - "illumunati", "illuminati", - "illuninati", "illuminati", - "illusiones", "illusions", - "illustrant", "illustrate", - "illustrare", "illustrate", - "illustrato", "illustration", - "imablanced", "imbalanced", - "imablances", "imbalances", - "imaginatie", "imaginative", - "imaginaton", "imagination", - "imaginitve", "imaginative", - "imbalenced", "imbalanced", - "imbalences", "imbalances", - "imcomplete", "incomplete", - "imediately", "immediately", - "imigration", "emigration", - "immaturaty", "immaturity", - "immaturety", "immaturity", - "immedeatly", "immediately", - "immediatly", "immediately", - "immedietly", "immediately", - "immenseley", "immensely", - "immidately", "immediately", - "immigranti", "immigration", - "immigrents", "immigrants", - "immitating", "imitating", - "immobilien", "immobile", - "immobilier", "immobile", - "immobilzed", "immobile", - "immobilzer", "immobile", - "immobilzes", "immobile", - "immortales", "immortals", - "immortalis", "immortals", - "immortaliy", "immortality", - "immortalls", "immortals", - "immortalty", "immortality", - "impartirla", "impartial", - "impecabbly", "impeccably", - "impeccible", "impeccable", - "impeckable", "impeccable", - "impelments", "implements", - "imperetive", "imperative", - "imperialsm", "imperialism", - "imperialst", "imperialist", - "imperitave", "imperative", - "imperitive", "imperative", - "implaments", "implements", - "implantase", "implants", - "implausble", "implausible", - "implausibe", "implausible", - "implemenet", "implements", - "implicatia", "implicit", - "implicatie", "implicit", - "implicatii", "implicit", - "implicetly", "implicitly", - "impliciete", "implicit", - "implicilty", "implicitly", - "impliments", "implements", - "imporbable", "improbable", - "importanly", "importantly", - "importanty", "importantly", - "importence", "importance", - "importerad", "imported", - "imporvised", "improvised", - "impossable", "impossible", - "impossbily", "impossibly", - "impossibal", "impossibly", - "impossibel", "impossibly", - "impossibry", "impossibly", - "impossibul", "impossibly", - "impractial", "impractical", - "impreative", "imperative", - "impresison", "impressions", - "impressoin", "impressions", - "impressons", "impressions", - "improbabil", "improbable", - "improbible", "improbable", - "impropable", "improbable", - "improsined", "imprisoned", - "improsoned", "imprisoned", - "improvemnt", "improvement", - "improvents", "improves", - "improvized", "improvised", - "imprsioned", "imprisoned", - "impulsemos", "impulses", - "imrpovised", "improvised", - "inablility", "inability", - "inaccruate", "inaccurate", - "inadaquate", "inadequate", - "inadaquete", "inadequate", - "inadecuate", "inadequate", - "inadeguate", "inadequate", - "inadeqaute", "inadequate", - "inadequete", "inadequate", - "inadequite", "inadequate", - "inadiquate", "inadequate", - "inagurated", "inaugurated", - "inbalanced", "imbalanced", - "inbetweeen", "inbetween", - "incarnaton", "incarnation", - "incentivos", "incentives", - "inchoerent", "incoherent", - "incidentes", "incidents", - "incidently", "incidentally", - "incidentul", "incidental", - "inclreased", "increased", - "incognitio", "incognito", - "incoherant", "incoherent", - "incohorent", "incoherent", - "incorectly", "incorrectly", - "incorrecly", "incorrectly", - "incorrecty", "incorrectly", - "incorretly", "incorrectly", - "incraments", "increments", - "incredable", "incredible", - "incredably", "incredibly", - "incremetal", "incremental", - "incriments", "increments", - "inctroduce", "introduce", - "indefenite", "indefinite", - "indefinate", "indefinite", - "indefinete", "indefinite", - "indefinity", "indefinitely", - "indeginous", "indigenous", - "indentical", "identical", - "independet", "independent", - "indepenent", "independent", - "inderictly", "indirectly", - "indicaters", "indicates", - "indicativo", "indication", - "indicatore", "indicate", - "indicitave", "indicative", - "indicitive", "indicative", - "indiffernt", "indifferent", - "indigenius", "indigenous", - "indiginous", "indigenous", - "indigneous", "indigenous", - "indikation", "indication", - "indireclty", "indirectly", - "indirektly", "indirectly", - "individuel", "individual", - "indiviudal", "individuals", - "indivudual", "individual", - "indoensian", "indonesian", - "indonasian", "indonesian", - "indoneisan", "indonesian", - "indonesean", "indonesian", - "indonesien", "indonesian", - "indonesion", "indonesian", - "indonisian", "indonesian", - "indonistan", "indonesian", - "indpendent", "independent", - "industiral", "industrial", - "industires", "industries", - "industrail", "industrial", - "industrees", "industries", - "industrias", "industries", - "industriel", "industrial", - "industrija", "industrial", - "industrije", "industries", - "indviduals", "individuals", - "inefficent", "inefficient", - "ineqaulity", "inequality", - "inequailty", "inequality", - "inevatible", "inevitable", - "inevetable", "inevitable", - "inevetably", "inevitably", - "inevetible", "inevitable", - "inevidable", "inevitable", - "inevidably", "inevitably", - "inevitible", "inevitable", - "inevitibly", "inevitably", - "inevtiable", "inevitable", - "inevtiably", "inevitably", - "infallable", "infallible", - "infaltable", "inflatable", - "infeccious", "infectious", - "infecteous", "infectious", - "infectuous", "infectious", - "infedility", "infidelity", - "infektious", "infectious", - "inferioara", "inferior", - "inferioare", "inferior", - "inferiorty", "inferiority", - "inferrence", "inference", - "infestaion", "infestation", - "infestaton", "infestation", - "infestions", "infections", - "infideltiy", "infidelity", - "infidility", "infidelity", - "infiltrade", "infiltrate", - "infiltrait", "infiltrate", - "infiltrare", "infiltrate", - "infiltrase", "infiltrate", - "infinately", "infinitely", - "infinetely", "infinitely", - "infiniment", "infinite", - "infinitley", "infinitely", - "infintiely", "infinitely", - "inflamable", "inflatable", - "inflateble", "inflatable", - "inflatible", "inflatable", - "infleunced", "influenced", - "inflitrate", "infiltrate", - "influanced", "influenced", - "influances", "influences", - "influencie", "influences", - "influening", "influencing", - "influensed", "influences", - "influenser", "influences", - "influenses", "influences", - "influental", "influential", - "influented", "influenced", - "influentes", "influences", - "influneced", "influenced", - "infograhic", "infographic", - "infograpic", "infographic", - "infomation", "information", - "informable", "informal", - "informarla", "informal", - "informarle", "informal", - "informarlo", "informal", - "informatie", "informative", - "informella", "informal", - "informerad", "informed", - "informtion", "information", - "infridging", "infringing", - "infrigning", "infringing", - "infulenced", "influenced", - "infulences", "influences", - "ingenuitiy", "ingenuity", - "ingrediant", "ingredient", - "ingrediens", "ingredients", - "ingrediets", "ingredient", - "inhabitans", "inhabitants", - "inhabitats", "inhabitants", - "inherantly", "inherently", - "inherintly", "inherently", - "inheritage", "heritage", - "inhernetly", "inherently", - "inifnitely", "infinitely", - "initaition", "initiation", - "initalised", "initialised", - "initaliser", "initialiser", - "initalises", "initialises", - "initalisms", "initialisms", - "initalized", "initialized", - "initalizer", "initializer", - "initalizes", "initializes", - "initalling", "initialling", - "initalness", "initialness", - "initiaitve", "initiatives", - "initiaties", "initiatives", - "initiativs", "initiatives", - "initiatves", "initiatives", - "initiavite", "initiatives", - "inititaive", "initiatives", - "inititiave", "initiatives", - "initmately", "intimately", - "initmidate", "intimidate", - "inituition", "initiation", - "injustaces", "injustices", - "injusticas", "injustices", - "inmigrants", "immigrants", - "innoavtion", "innovations", - "innocentes", "innocents", - "innotation", "innovation", - "innovacion", "innovation", - "innovaiton", "innovations", - "innovatief", "innovate", - "innovaties", "innovate", - "innovativo", "innovation", - "innvoation", "innovation", - "inofficial", "unofficial", - "inpsection", "inspection", - "inquisator", "inquisitor", - "inquisidor", "inquisitor", - "inquisiter", "inquisitor", - "inquisitio", "inquisitor", - "inquisitir", "inquisitor", - "inquisiton", "inquisition", - "inquistior", "inquisitor", - "inquizitor", "inquisitor", - "inqusitior", "inquisitor", - "insensitve", "insensitive", - "insepction", "inspection", - "insistance", "insistence", - "insistente", "insistence", - "insistenze", "insistence", - "insistince", "insistence", - "insitution", "institution", - "inspeccion", "inspection", - "inspeciton", "inspections", - "inspectons", "inspections", - "inspectres", "inspectors", - "inspektion", "inspection", - "inspektors", "inspectors", - "inspiraste", "inspires", - "inspiraton", "inspiration", - "inspirerad", "inspired", - "inspireras", "inspires", - "insrugency", "insurgency", - "instabiliy", "instability", - "instabilty", "instability", - "installeer", "installer", - "installent", "installment", - "installesd", "installs", - "installion", "installing", - "instatance", "instance", - "instelling", "installing", - "instituded", "instituted", - "instituion", "institution", - "institutie", "institute", - "institutue", "instituted", - "instrament", "instrument", - "instrcutor", "instructors", - "instrucion", "instruction", - "instructer", "instructor", - "instructie", "instructed", - "instruktor", "instructor", - "instuction", "instruction", - "instuments", "instruments", - "insturcted", "instructed", - "insturctor", "instructor", - "insturment", "instrument", - "instutions", "intuitions", - "instututed", "instituted", - "insurgance", "insurgency", - "insurgancy", "insurgency", - "intangable", "intangible", - "intangeble", "intangible", - "intangibil", "intangible", - "intanjible", "intangible", - "integraded", "integrated", - "integrarla", "integral", - "integrarlo", "integral", - "integratie", "integrated", - "integreres", "interferes", - "integreted", "integrated", - "inteligent", "intelligent", - "intenseley", "intensely", - "intensitiy", "intensity", - "intentinal", "intentional", - "intentines", "intestines", - "interacive", "interactive", - "interactes", "interacts", - "interactie", "interactive", - "interactue", "interacted", - "interasted", "interacted", - "interbread", "interbreed", - "intercepto", "interception", - "intercorse", "intercourse", - "intercouse", "intercourse", - "intereacts", "interfaces", - "interected", "interacted", - "interefers", "interferes", - "interesant", "interest", - "interesing", "interesting", - "interestes", "interests", - "interfacce", "interfaces", - "interfears", "interferes", - "interfeers", "interferes", - "interferce", "interferes", - "interferre", "interfere", - "intergated", "integrated", - "interioara", "interior", - "interioare", "interior", - "intermedie", "intermediate", - "internetbs", "internets", - "internetes", "internets", - "internetis", "internets", - "internetts", "internets", - "internetus", "internets", - "interprate", "interpret", - "interrugum", "interregnum", - "interruped", "interrupted", - "interstela", "interstellar", - "intervalls", "intervals", - "intervalos", "intervals", - "interveign", "intervening", - "interveing", "intervening", - "interveiws", "interviews", - "intervento", "intervention", - "intervenue", "intervene", - "interveres", "interferes", - "intervieni", "interviewing", - "intervieuw", "interviews", - "interviewd", "interviewed", - "interviewr", "interviewer", - "intervines", "intervenes", - "interviwed", "interviewed", - "interviwer", "interviewer", - "interwebbs", "interwebs", - "intestents", "intestines", - "intestinas", "intestines", - "intestinos", "intestines", - "intestions", "intestines", - "intidimate", "intimidate", - "intimadate", "intimidate", - "intimatley", "intimately", - "intimiated", "intimidate", - "intimidade", "intimidated", - "intimidant", "intimidate", - "intimidare", "intimidate", - "intimitade", "intimidated", - "intimitaly", "intimately", - "intimitate", "intimidate", - "intimitely", "intimately", - "intolarant", "intolerant", - "intolerace", "intolerance", - "intolerate", "intolerant", - "intolerent", "intolerant", - "intolorant", "intolerant", - "intolorent", "intolerant", - "intorduced", "introduced", - "intorduces", "introduces", - "intorverts", "introverts", - "intoxicted", "intoxicated", - "intraverts", "introverts", - "intreguing", "intriguing", - "intricaces", "intricacies", - "intriguied", "intrigue", - "intrigured", "intrigue", - "intrinseci", "intrinsic", - "intrinsinc", "intrinsic", - "intriquing", "intriguing", - "intriuging", "intriguing", - "introdecks", "introduces", - "introdused", "introduces", - "introvents", "introverts", - "introvered", "introverted", - "introversa", "introverts", - "introverse", "introverts", - "introversi", "introverts", - "introverso", "introverts", - "introversy", "introverts", - "introveted", "introverted", - "intruduced", "introduced", - "intruduces", "introduces", - "intruiging", "intriguing", - "intruments", "instruments", - "intuitevly", "intuitively", - "intuitivly", "intuitively", - "intuitivno", "intuition", - "intutively", "intuitively", - "inumerable", "enumerable", - "inusrgency", "insurgency", - "invaderats", "invaders", - "invaildate", "invalidates", - "invairably", "invariably", - "invaldiate", "invalidates", - "invalidade", "invalidate", - "invalidare", "invalidate", - "invalubale", "invaluable", - "invalueble", "invaluable", - "invaraibly", "invariably", - "invariabil", "invariably", - "invaribaly", "invariably", - "invaulable", "invaluable", - "inveitable", "inevitable", - "inveitably", "inevitably", - "invensions", "inventions", - "inventario", "inventor", - "inventarlo", "inventor", - "inventaron", "inventor", - "inventings", "inventions", - "inventivos", "inventions", - "invertendo", "inverted", - "inverterad", "inverted", - "invertions", "inventions", - "investemnt", "investments", - "investiage", "investigate", - "investions", "inventions", - "investirat", "investigator", - "investmens", "investments", - "invicinble", "invincible", - "invididual", "individual", - "invincable", "invincible", - "invinceble", "invincible", - "invinicble", "invincible", - "invinsible", "invincible", - "invinvible", "invincible", - "invisibily", "invisibility", - "invitacion", "invitation", - "invitating", "invitation", - "involunary", "involuntary", - "involvment", "involvement", - "ironcially", "ironically", - "irracional", "irrational", - "irrationel", "irrational", - "irrelavant", "irrelevant", - "irrelavent", "irrelevant", - "irrelevent", "irrelevant", - "irrelivant", "irrelevant", - "irrelivent", "irrelevant", - "irrevelant", "irrelevant", - "irreverant", "irrelevant", - "irridation", "irritation", - "irriration", "irritation", - "irritacion", "irritation", - "irritaties", "irritate", - "islamisist", "islamist", - "islamistas", "islamists", - "isntalling", "installing", - "isntructed", "instructed", - "isntrument", "instrument", - "israeliens", "israelis", - "israelitas", "israelis", - "italianess", "italians", - "itnroduced", "introduced", - "jailborken", "jailbroken", - "jalibroken", "jailbroken", - "jamaicains", "jamaican", - "jamaicaman", "jamaican", - "jerusaleum", "jerusalem", - "jounralism", "journalism", - "jounralist", "journalist", - "jouranlism", "journalism", - "jouranlist", "journalist", - "journalims", "journals", - "journalits", "journals", - "journalizm", "journalism", - "journalsim", "journalism", - "journolist", "journalist", - "judegments", "judgements", - "judgemenal", "judgemental", - "judgemetal", "judgemental", - "jugdements", "judgements", - "juggarnaut", "juggernaut", - "juggeranut", "juggernaut", - "juggernath", "juggernaut", - "juggernout", "juggernaut", - "juggernuat", "juggernaut", - "juggetnaut", "juggernaut", - "jugglenaut", "juggernaut", - "juggurnaut", "juggernaut", - "justifible", "justifiable", - "juvenilles", "juvenile", - "kickstarer", "kickstarter", - "kickstartr", "kickstarter", - "kickstater", "kickstarter", - "kidnapning", "kidnapping", - "kidnappade", "kidnapped", - "killingest", "killings", - "kilometros", "kilometers", - "kilomiters", "kilometers", - "kilomoters", "kilometers", - "kilomteres", "kilometers", - "kindapping", "kidnapping", - "kingdomers", "kingdoms", - "krpytonite", "kryptonite", - "krypotnite", "kryptonite", - "krypronite", "kryptonite", - "kryptinite", "kryptonite", - "kryptolite", "kryptonite", - "kryptonyte", "kryptonite", - "krypyonite", "kryptonite", - "krytponite", "kryptonite", - "kyrptonite", "kryptonite", - "labarotory", "laboratory", - "laboratroy", "laboratory", - "laborerers", "laborers", - "laboritory", "laboratory", - "laborotory", "laboratory", - "lackbuster", "lackluster", - "lacklaster", "lackluster", - "landacapes", "landscapes", - "landingers", "landings", - "landshapes", "landscapes", - "landspaces", "landscapes", - "lannasters", "lannisters", - "lannesters", "lannisters", - "lannistars", "lannisters", - "lannsiters", "lannisters", - "lateration", "alteration", - "latitudine", "latitude", - "laughabley", "laughably", - "laughablly", "laughably", - "launchered", "launched", - "leaglizing", "legalizing", - "lectureres", "lectures", - "legalazing", "legalizing", - "legalizare", "legalize", - "legalizate", "legalize", - "legendaies", "legendaries", - "legendaris", "legendaries", - "legimitacy", "legitimacy", - "legimitate", "legitimate", - "legislatie", "legislative", - "legitamacy", "legitimacy", - "legitamate", "legitimate", - "legitamicy", "legitimacy", - "legitamite", "legitimate", - "legitemacy", "legitimacy", - "legitemate", "legitimate", - "legitimaly", "legitimacy", - "legitimicy", "legitimacy", - "legitimite", "legitimate", - "leiutenant", "lieutenant", - "lesbianese", "lesbians", - "lesbianest", "lesbians", - "leuitenant", "lieutenant", - "levetating", "levitating", - "liberacion", "liberation", - "liberalest", "liberate", - "liberalizm", "liberalism", - "liberalnim", "liberalism", - "liberalsim", "liberalism", - "liberarion", "liberation", - "liberaties", "liberate", - "liberatore", "liberate", - "libertania", "libertarians", - "libguistic", "linguistic", - "lietuenant", "lieutenant", - "lieutanant", "lieutenant", - "lieutanent", "lieutenant", - "lieutenent", "lieutenant", - "lifestiles", "lifestyles", - "lifestlyes", "lifestyles", - "lifesystem", "filesystem", - "lifesytles", "lifestyles", - "lifetimers", "lifetimes", - "lifetsyles", "lifestyles", - "lighhtning", "lightening", - "lightergas", "lighters", - "lighthning", "lightening", - "lighthorse", "lighthouse", - "lighthosue", "lighthouse", - "lighthours", "lighthouse", - "lightining", "lighting", - "lightneing", "lightening", - "lightnting", "lightening", - "lightrooom", "lightroom", - "lightweigt", "lightweight", - "ligitation", "litigation", - "ligthening", "lightening", - "ligthhouse", "lighthouse", - "likelyhood", "likelihood", - "limination", "limitation", - "limitacion", "limitation", - "limitaiton", "limitation", - "limitating", "limitation", - "limitativo", "limitation", - "linguisics", "linguistics", - "linguisitc", "linguistics", - "linguistcs", "linguistics", - "linguistis", "linguistics", - "linguitics", "linguistic", - "lingusitic", "linguistics", - "lingvistic", "linguistic", - "liousville", "louisville", - "listeneres", "listeners", - "literallly", "literally", - "literarely", "literary", - "literarlly", "literary", - "literatire", "literate", - "literative", "literate", - "literatute", "literate", - "lithuanina", "lithuania", - "litterally", "literally", - "liuetenant", "lieutenant", - "liveatream", "livestream", - "livelehood", "livelihood", - "liverpoool", "liverpool", - "livescream", "livestream", - "livestreem", "livestream", - "livestrems", "livestream", - "livilehood", "livelihood", - "livliehood", "livelihood", - "lobbyistes", "lobbyists", - "lockacreen", "lockscreen", - "logictical", "logistical", - "logisitcal", "logistical", - "logisticas", "logistics", - "logisticly", "logistical", - "loiusville", "louisville", - "lollipoopy", "lollipop", - "lonelyness", "loneliness", - "longevitiy", "longevity", - "lonileness", "loneliness", - "lonlieness", "loneliness", - "louieville", "louisville", - "louisiania", "louisiana", - "louisianna", "louisiana", - "louisivlle", "louisville", - "louisviile", "louisville", - "lousiville", "louisville", - "luietenant", "lieutenant", - "mabyelline", "maybelline", - "magnifient", "magnificent", - "mainpulate", "manipulate", - "mainstreem", "mainstream", - "maintaince", "maintained", - "maintaines", "maintains", - "maintainig", "maintaining", - "maintenace", "maintenance", - "maintianed", "maintained", - "maintioned", "mentioned", - "malfuncion", "malfunction", - "malpractce", "malpractice", - "managebale", "manageable", - "maneagable", "manageable", - "maneouvred", "manoeuvred", - "maneouvres", "manoeuvres", - "maneuveres", "maneuvers", - "maneuveurs", "maneuver", - "manifestas", "manifests", - "manifestes", "manifests", - "manifestus", "manifests", - "manipluate", "manipulate", - "manipualte", "manipulate", - "manipulant", "manipulate", - "manipulare", "manipulate", - "manipulted", "manipulated", - "maniuplate", "manipulate", - "mannarisms", "mannerisms", - "mannersims", "mannerisms", - "mannorisms", "mannerisms", - "manufacter", "manufacture", - "manufacure", "manufacture", - "manufature", "manufacture", - "maraudeurs", "marauder", - "margaritte", "margaret", - "margianlly", "marginally", - "marginaali", "marginal", - "marginable", "marginal", - "marignally", "marginally", - "marijuanna", "marijuana", - "marketting", "marketing", - "marshmalow", "marshmallow", - "masculinty", "masculinity", - "massacrare", "massacre", - "massivelly", "massively", - "masteriers", "masteries", - "masternind", "mastermind", - "masterpice", "masterpiece", - "mastrubate", "masturbate", - "mastubrate", "masturbated", - "masturabte", "masturbate", - "masturbait", "masturbate", - "masturbare", "masturbate", - "masturbeta", "masturbated", - "masturdate", "masturbate", - "materiales", "materials", - "materialsm", "materialism", - "maximazing", "maximizing", - "maximixing", "maximizing", - "mayballine", "maybelline", - "maybellene", "maybelline", - "maybellibe", "maybelline", - "maybilline", "maybelline", - "mccarthyst", "mccarthyist", - "mdifielder", "midfielder", - "meagthread", "megathread", - "meaningess", "meanings", - "meaningles", "meanings", - "meatballls", "meatballs", - "mecahnical", "mechanical", - "mecahnisms", "mechanisms", - "mechancial", "mechanical", - "mechandise", "merchandise", - "mechanichs", "mechanics", - "mechanicle", "mechanical", - "mechanicly", "mechanical", - "mechanicus", "mechanics", - "mechanincs", "mechanic", - "mechanisim", "mechanism", - "mechansims", "mechanisms", - "mechinical", "mechanical", - "mechinisms", "mechanisms", - "mediaction", "medications", - "medicacion", "medication", - "medicaiton", "medication", - "medicalert", "medicare", - "medicallly", "medically", - "medicatons", "medications", - "medicinens", "medicines", - "medicinske", "medicine", - "medicority", "mediocrity", - "medidating", "meditating", - "mediocirty", "mediocrity", - "mediocraty", "mediocrity", - "mediocrety", "mediocrity", - "mediocricy", "mediocrity", - "mediocrily", "mediocrity", - "mediocrisy", "mediocrity", - "meditacion", "medications", - "meditaiton", "meditation", - "melatonian", "melatonin", - "melatonion", "melatonin", - "mellinnium", "millennium", - "melodieuse", "melodies", - "membrances", "membrane", - "mentallity", "mentally", - "mentionnes", "mentions", - "mercenaire", "mercenaries", - "mercenares", "mercenaries", - "mercentile", "mercantile", - "merchanise", "merchandise", - "merchantos", "merchants", - "messagease", "messages", - "messagepad", "messaged", - "messenging", "messaging", - "metabalism", "metabolism", - "metabilism", "metabolism", - "metabloism", "metabolism", - "metablosim", "metabolism", - "metabolics", "metabolism", - "metabolizm", "metabolism", - "metabolsim", "metabolism", - "metalurgic", "metallurgic", - "metaphoras", "metaphors", - "metaphores", "metaphors", - "metaphyics", "metaphysics", - "meterology", "meteorology", - "methaphors", "metaphors", - "methodolgy", "methodology", - "methodoloy", "methodology", - "metrapolis", "metropolis", - "metrolopis", "metropolis", - "metropilis", "metropolis", - "metroplois", "metropolis", - "metropolin", "metropolitan", - "metropolos", "metropolis", - "metropolys", "metropolis", - "mexicanese", "mexicans", - "mexicaness", "mexicans", - "michelline", "michelle", - "micorwaves", "microwaves", - "microhpone", "microphone", - "microscoop", "microscope", - "microvaves", "microwaves", - "microvaxes", "microwaves", - "micrpohone", "microphones", - "midfeilder", "midfielder", - "midfiedler", "midfielder", - "midfieldes", "midfielders", - "midfielers", "midfielders", - "midfileder", "midfielder", - "midifelder", "midfielder", - "midnlessly", "mindlessly", - "migitation", "mitigation", - "migrainers", "migraines", - "miletsones", "milestones", - "milisecond", "millisecond", - "militiades", "militias", - "militiants", "militias", - "millinnium", "millennium", - "miminalist", "minimalist", - "minamilist", "minimalist", - "mindleslly", "mindlessly", - "minimazing", "minimizing", - "minimilast", "minimalist", - "minimilist", "minimalist", - "mininalist", "minimalist", - "ministeres", "ministers", - "ministerns", "ministers", - "minneaplis", "minneapolis", - "minneapols", "minneapolis", - "minnesotta", "minnesota", - "minoritets", "minorities", - "minoroties", "minorities", - "miracalous", "miraculous", - "miracluous", "miraculous", - "miracoulus", "miraculous", - "mircophone", "microphone", - "mircoscope", "microscope", - "mircowaves", "microwaves", - "misandrony", "misandry", - "miscarrage", "miscarriage", - "miscarrige", "miscarriage", - "misdemenor", "misdemeanor", - "miserabley", "miserably", - "miserablly", "miserably", - "misforture", "misfortune", - "misgoynist", "misogynist", - "misinfomed", "misinformed", - "misinterpt", "misinterpret", - "misisonary", "missionary", - "misoganist", "misogynist", - "misogenist", "misogynist", - "misoginist", "misogynist", - "misoginyst", "misogynist", - "misognyist", "misogynist", - "misogonist", "misogynist", - "misogonyst", "misogynist", - "misogyinst", "misogynist", - "misogynyst", "misogynist", - "misoygnist", "misogynist", - "mispelling", "misspelling", - "missionare", "missionaries", - "missionera", "missionary", - "missisippi", "mississippi", - "mississipi", "mississippi", - "mississppi", "mississippi", - "misspeling", "misspelling", - "misspellng", "misspelling", - "mistakedly", "mistakenly", - "mistakinly", "mistakenly", - "mistankely", "mistakenly", - "misterious", "mysterious", - "misteryous", "mysterious", - "mistreaded", "mistreated", - "misygonist", "misogynist", - "mitigaiton", "mitigation", - "moderacion", "moderation", - "moderaters", "moderates", - "moderatley", "moderately", - "moderatore", "moderate", - "moderatorn", "moderation", - "modificato", "modification", - "modifieras", "modifiers", - "modifieres", "modifiers", - "moisturier", "moisturizer", - "moleculair", "molecular", - "molestaion", "molestation", - "molestarle", "molester", - "molestarme", "molester", - "molestarse", "molester", - "molestarte", "molester", - "molestered", "molested", - "momentarly", "momentarily", - "monagomous", "monogamous", - "monetizare", "monetize", - "monitering", "monitoring", - "monogymous", "monogamous", - "monolistic", "monolithic", - "monolitich", "monolithic", - "monolopies", "monopolies", - "monolothic", "monolithic", - "monolythic", "monolithic", - "monopilies", "monopolies", - "monoploies", "monopolies", - "monopolets", "monopolies", - "monopolice", "monopolies", - "monopolios", "monopolies", - "monothilic", "monolithic", - "monsterous", "monsters", - "montioring", "monitoring", - "monumentos", "monuments", - "monumentul", "monumental", - "monumentus", "monuments", - "mormonisim", "mormonism", - "morphinate", "morphine", - "morrisette", "morissette", - "morrisound", "morrison", - "mosquitero", "mosquito", - "mosquiters", "mosquitoes", - "motherbard", "motherboard", - "motherboad", "motherboard", - "motherbord", "motherboard", - "motivaiton", "motivations", - "motiviated", "motivated", - "motorcicle", "motorcycle", - "motorcylce", "motorcycle", - "motorcyles", "motorcycles", - "motorollas", "motorola", - "mouthpeace", "mouthpiece", - "mouthpeice", "mouthpiece", - "movespeeed", "movespeed", - "mozzaralla", "mozzarella", - "mozzeralla", "mozzarella", - "mozzorella", "mozzarella", - "mulitation", "mutilation", - "mulitplied", "multiplied", - "mulitplier", "multiplier", - "mulitverse", "multiverse", - "multilpier", "multiplier", - "multiplaer", "multiplier", - "multiplaye", "multiply", - "multiplayr", "multiply", - "multiplays", "multiply", - "multipleye", "multiply", - "multipling", "multiplying", - "multiplyed", "multiplied", - "multiplyer", "multiple", - "multiplyng", "multiplying", - "murderered", "murdered", - "murdereres", "murderers", - "muscicians", "musicians", - "musculaire", "muscular", - "mushroooms", "mushroom", - "mutialtion", "mutilation", - "mutiliated", "mutilated", - "mutliation", "mutilation", - "mutliplied", "multiplied", - "mutliplier", "multiplier", - "mutliverse", "multiverse", - "mysogynist", "misogynist", - "mysterieus", "mysteries", - "nagivating", "navigating", - "nagivation", "navigation", - "narcassism", "narcissism", - "narcassist", "narcissist", - "narcessist", "narcissist", - "narciscism", "narcissism", - "narciscist", "narcissist", - "narcisissm", "narcissism", - "narcisisst", "narcissist", - "narcisists", "narcissist", - "narcissicm", "narcissism", - "narcissict", "narcissist", - "narcissitc", "narcissist", - "narcissits", "narcissist", - "narcoticos", "narcotics", - "narrativas", "narratives", - "narrativos", "narratives", - "narritives", "narratives", - "nashvillle", "nashville", - "nationales", "nationals", - "nationalis", "nationals", - "nationalit", "nationalist", - "nationaliy", "nationality", - "nationalty", "nationality", - "nationella", "national", - "naturually", "naturally", - "naviagting", "navigating", - "naviagtion", "navigation", - "navigatore", "navigate", - "neccessary", "necessary", - "necesarily", "necessarily", - "necessairy", "necessarily", - "necessarly", "necessary", - "necessarry", "necessary", - "necessiate", "necessitate", - "necessites", "necessities", - "neckbeared", "neckbeard", - "neckboards", "neckbeards", - "neckbreads", "neckbeards", - "neckneards", "neckbeards", - "necromacer", "necromancer", - "necromaner", "necromancer", - "needleslly", "needlessly", - "negativaty", "negativity", - "negativley", "negatively", - "negelcting", "neglecting", - "negilgence", "negligence", - "negiotated", "negotiated", - "neglacting", "neglecting", - "neglagence", "negligence", - "neglegance", "negligence", - "neglegible", "negligible", - "neglegting", "neglecting", - "neglibible", "negligible", - "neglicence", "negligence", - "neglicible", "negligible", - "neglicting", "neglecting", - "negligable", "negligible", - "negligance", "negligence", - "negligeble", "negligible", - "negligente", "negligence", - "negociated", "negotiated", - "negogiated", "negotiated", - "negoitated", "negotiated", - "negotaited", "negotiated", - "negotation", "negotiation", - "negotiaion", "negotiation", - "negotiatie", "negotiated", - "negotiatin", "negotiations", - "negotiaton", "negotiation", - "neigbhours", "neighbours", - "neighbhors", "neighbours", - "neighbords", "neighbours", - "neighbores", "neighbours", - "netowrking", "networking", - "netruality", "neutrality", - "neturality", "neutrality", - "netwroking", "networking", - "neurologia", "neurological", - "neutrailty", "neutrality", - "newletters", "newsletters", - "newlsetter", "newsletter", - "newsettler", "newsletter", - "newslatter", "newsletter", - "nieghbours", "neighbours", - "nightmates", "nightmares", - "nightmears", "nightmares", - "nightmeres", "nightmares", - "nigthmares", "nightmares", - "nipticking", "nitpicking", - "nitpciking", "nitpicking", - "nominacion", "nomination", - "nominatino", "nominations", - "nominativo", "nomination", - "nominatons", "nominations", - "nonsencial", "nonsensical", - "nontheless", "nonetheless", - "northerend", "northern", - "nostalgica", "nostalgia", - "nostalgija", "nostalgia", - "noteworhty", "noteworthy", - "nothingess", "nothingness", - "noticabely", "noticeably", - "noticabley", "noticeably", - "noticiably", "noticeably", - "notoriosly", "notoriously", - "novembeard", "november", - "nuetrality", "neutrality", - "nutricious", "nutritious", - "nutrientes", "nutrients", - "nutritents", "nutrients", - "nutritinal", "nutritional", - "nutritiuos", "nutritious", - "nutritivos", "nutritious", - "nutrituous", "nutritious", - "nutrutious", "nutritious", - "obatinable", "obtainable", - "obejctives", "objectives", - "obilgatory", "obligatory", - "objecitves", "objectives", - "objectivas", "objectives", - "objectivly", "objectively", - "objectivst", "objectives", - "objectivty", "objectivity", - "objektives", "objectives", - "obligitary", "obligatory", - "obligitory", "obligatory", - "observabil", "observable", - "observarse", "observers", - "observaton", "observation", - "observeras", "observers", - "observered", "observed", - "observeres", "observers", - "observible", "observable", - "obstancles", "obstacles", - "obstrucion", "obstruction", - "obstructin", "obstruction", - "obtainabie", "obtainable", - "obtaineble", "obtainable", - "obtainible", "obtainable", - "obtianable", "obtainable", - "ocasionaly", "occasionally", - "ocassional", "occasional", - "ocassioned", "occasioned", - "occaisonal", "occasional", - "occasionly", "occasional", - "occassions", "occasions", - "occational", "occasional", - "occulation", "occupation", - "occupaiton", "occupation", - "occurances", "occurrences", - "occurences", "occurrences", - "occurrance", "occurrence", - "octohedral", "octahedral", - "octohedron", "octahedron", - "offensivly", "offensively", - "offereings", "offerings", - "officailly", "officially", - "olbigatory", "obligatory", - "ominpotent", "omnipotent", - "ominscient", "omniscient", - "omnipetent", "omnipotent", - "omnipitent", "omnipotent", - "omnipotant", "omnipotent", - "omnisicent", "omniscient", - "omniverous", "omnivorous", - "omnsicient", "omniscient", - "on-premise", "on-premises", - "onmipotent", "omnipotent", - "onmiscient", "omniscient", - "operatings", "operations", - "operativne", "operative", - "operativos", "operations", - "oportunity", "opportunity", - "opponenets", "opponent", - "oppononent", "opponent", - "oppressiun", "oppressing", - "optimisitc", "optimistic", - "optimizare", "optimize", - "optimizate", "optimize", - "optimizied", "optimize", - "organicaly", "organically", - "organiclly", "organically", - "organisate", "organise", - "organische", "organise", - "organisera", "organizers", - "organisere", "organizers", - "organisert", "organizers", - "organisier", "organise", - "organisims", "organism", - "organismed", "organise", - "organismen", "organise", - "organismer", "organise", - "organismes", "organisms", - "organismus", "organisms", - "organisten", "organise", - "organiszed", "organise", - "organizaed", "organize", - "organizare", "organizer", - "organizate", "organize", - "organizors", "organizers", - "organizuje", "organize", - "organziers", "organizers", - "orientaion", "orientation", - "orientarla", "oriental", - "orientarlo", "oriental", - "origianlly", "originally", - "originales", "originals", - "originalet", "originated", - "originalis", "originals", - "originalty", "originality", - "orignially", "originally", - "origniated", "originated", - "origonally", "originally", - "origonated", "originated", - "ostencibly", "ostensibly", - "ostenisbly", "ostensibly", - "ostensably", "ostensibly", - "ostentibly", "ostensibly", - "ostrasiced", "ostracized", - "ostrasized", "ostracized", - "ostraziced", "ostracized", - "ostrazised", "ostracized", - "ostrecized", "ostracized", - "ostricized", "ostracized", - "ostrocized", "ostracized", - "oustanding", "outstanding", - "outcalssed", "outclassed", - "outlcassed", "outclassed", - "outnumberd", "outnumbered", - "outnumbred", "outnumbered", - "outperfoms", "outperform", - "outperfrom", "outperform", - "outpreform", "outperform", - "outrageuos", "outrageous", - "outragious", "outrageous", - "outragoues", "outrageous", - "outreagous", "outrageous", - "outsourcad", "outsourced", - "outsouring", "outsourcing", - "outsoursed", "outsourced", - "outweighes", "outweighs", - "overarcing", "overarching", - "overclockd", "overclocked", - "overcloked", "overclocked", - "overcoding", "overcoming", - "overheards", "overhead", - "overheared", "overhead", - "overhooked", "overlooked", - "overlanded", "overloaded", - "overlaoded", "overloaded", - "overlaping", "overlapping", - "overlauded", "overloaded", - "overloards", "overload", - "overlorded", "overloaded", - "overlordes", "overlords", - "overnurfed", "overturned", - "overpirced", "overpriced", - "overpowerd", "overpowered", - "overpowred", "overpowered", - "overprised", "overpriced", - "overtunned", "overturned", - "overtunred", "overturned", - "overturing", "overturn", - "overweigth", "overweight", - "overwhemed", "overwhelmed", - "overwieght", "overweight", - "overwritte", "overwrite", - "pahtfinder", "pathfinder", - "painfullly", "painfully", - "painkilers", "painkillers", - "pairlament", "parliament", - "pakistanti", "pakistani", - "paladinlst", "paladins", - "palcements", "placements", - "paleolitic", "paleolithic", - "palestinan", "palestinian", - "paltformer", "platformer", - "palyerbase", "playerbase", - "parachutte", "parachute", - "parademics", "paramedics", - "paradiggum", "paradigm", - "paragraghs", "paragraphs", - "paragrahps", "paragraphs", - "paragrapgh", "paragraphs", - "paragrpahs", "paragraphs", - "parahprase", "paraphrase", - "paralleles", "parallels", - "parallells", "parallels", - "paramadics", "paramedics", - "paramaters", "parameters", - "paramecias", "paramedics", - "parametics", "paramedics", - "parametros", "parameters", - "paramiters", "parameters", - "paramormal", "paranormal", - "paranoicas", "paranoia", - "paranomral", "paranormal", - "paranornal", "paranormal", - "parapharse", "paraphrase", - "paraphraze", "paraphrase", - "paraprhase", "paraphrase", - "parasitter", "parasite", - "parilament", "parliament", - "parituclar", "particular", - "parlaiment", "parliament", - "parliamant", "parliament", - "parliamone", "parliament", - "parliement", "parliament", - "parrallell", "parallel", - "parrallely", "parallelly", - "partiarchy", "patriarchy", - "participas", "participants", - "participat", "participants", - "participte", "participate", - "particualr", "particular", - "partiotism", "patriotism", - "passionais", "passions", - "passionale", "passionately", - "passionant", "passionate", - "passionite", "passionate", - "passivedns", "passives", - "passivelly", "passively", - "patenterad", "patented", - "pathfidner", "pathfinder", - "pathfindir", "pathfinder", - "pathifnder", "pathfinder", - "patientens", "patients", - "patrairchy", "patriarchy", - "patriachry", "patriarchy", - "patriarcal", "patriarchal", - "patriarhal", "patriarchal", - "patriatchy", "patriarchy", - "patriatism", "patriotism", - "patrionism", "patriotism", - "patriotics", "patriotism", - "patriotisk", "patriots", - "patroitism", "patriotism", - "patryarchy", "patriarchy", - "pedantisch", "pedantic", - "pedestiran", "pedestrian", - "pedestrain", "pedestrian", - "pedictions", "depictions", - "pedohpiles", "pedophiles", - "pedohpilia", "pedophilia", - "pedophilac", "pedophilia", - "pedophilea", "pedophilia", - "pedophilie", "pedophile", - "pedophilla", "pedophilia", - "pedophille", "pedophile", - "pedopholia", "pedophilia", - "penetraion", "penetration", - "penetratin", "penetration", - "penetraton", "penetration", - "penguinese", "penguins", - "penguiness", "penguins", - "peninsulla", "peninsula", - "penninsula", "peninsula", - "peodphiles", "pedophiles", - "peodphilia", "pedophilia", - "pepperment", "peppermint", - "pepperonni", "pepperoni", - "percantage", "percentage", - "percantile", "percentile", - "percaution", "precaution", - "percenatge", "percentages", - "percential", "percentile", - "percentige", "percentile", - "perceptoin", "perceptions", - "percession", "percussion", - "percetange", "percentages", - "percetnage", "percentages", - "percintile", "percentile", - "percission", "percussion", - "percpetion", "perceptions", - "percusions", "percussion", - "perdicting", "predicting", - "perdiction", "prediction", - "perdictive", "predictive", - "perenially", "perennially", - "perfeccion", "perfection", - "perfecxion", "perfection", - "perfektion", "perfection", - "perferable", "preferable", - "perferably", "preferably", - "perference", "preference", - "perferring", "preferring", - "perfexcion", "perfection", - "perfomance", "performance", - "performace", "performance", - "performane", "performances", - "performans", "performances", - "performens", "performers", - "performous", "performs", - "perfromers", "performers", - "perhiperal", "peripheral", - "peridinkle", "periwinkle", - "perihperal", "peripheral", - "periodisch", "periodic", - "periperhal", "peripheral", - "peripheals", "peripherals", - "peripheria", "peripheral", - "periphiral", "peripheral", - "periphreal", "peripheral", - "periphrial", "peripheral", - "peritinkle", "periwinkle", - "periwankle", "periwinkle", - "periwinkel", "periwinkle", - "periwinkie", "periwinkle", - "periwinlke", "periwinkle", - "permanenty", "permanently", - "permanetly", "permanently", - "permisions", "permission", - "permisison", "permissions", - "permissble", "permissible", - "permissibe", "permissible", - "permissons", "permissions", - "perogative", "prerogative", - "perordered", "preordered", - "perpatuate", "perpetuate", - "perpetualy", "perpetually", - "perpetuare", "perpetuate", - "persausion", "persuasion", - "persausive", "persuasive", - "persective", "respective", - "persectued", "persecuted", - "persecutie", "persecuted", - "persecutin", "persecution", - "perserving", "preserving", - "persicuted", "persecuted", - "persistant", "persistent", - "persistens", "persists", - "persoanlly", "personally", - "persocuted", "persecuted", - "personalie", "personalized", - "personalis", "personas", - "personarse", "personas", - "personatus", "personas", - "personnell", "personnel", - "perspecive", "perspective", - "perspectie", "perspectives", - "persuasian", "persuasion", - "persuasing", "persuasion", - "persuasivo", "persuasion", - "persuation", "persuasion", - "persucuted", "persecuted", - "persumably", "presumably", - "persussion", "persuasion", - "persvasive", "persuasive", - "perswasion", "persuasion", - "pertinante", "pertinent", - "pervailing", "prevailing", - "pervalence", "prevalence", - "pervention", "prevention", - "perversley", "perverse", - "pesitcides", "pesticides", - "pessimistc", "pessimistic", - "pessimitic", "pessimistic", - "pestacides", "pesticides", - "pestecides", "pesticides", - "pesticedes", "pesticides", - "pesticidas", "pesticides", - "pestisides", "pesticides", - "pestizides", "pesticides", - "pharamcist", "pharmacist", - "pharmacias", "pharmacist", - "pharmacyst", "pharmacist", - "pharmasist", "pharmacist", - "pharmicist", "pharmacist", - "phemonenon", "phenomenon", - "phenemenon", "phenomenon", - "phenemonal", "phenomenal", - "phenomanal", "phenomenal", - "phenomanon", "phenomenon", - "phenomemon", "phenomenon", - "phenomenen", "phenomenon", - "phenomenol", "phenomenal", - "phenomenom", "phenomenon", - "phenominon", "phenomenon", - "phenomonal", "phenomenal", - "phenomonen", "phenomenon", - "phenomonon", "phenomenon", - "phenonemal", "phenomenal", - "phenonemon", "phenomenon", - "phenonmena", "phenomena", - "philipines", "philippines", - "philippins", "philippines", - "philisophy", "philosophy", - "phillipine", "philippine", - "phillipses", "phillies", - "philosiphy", "philosophy", - "philosohpy", "philosophy", - "philosoper", "philosopher", - "philospher", "philosopher", - "philospohy", "philosophy", - "photogragh", "photograph", - "photograhs", "photographs", - "photograhy", "photography", - "photograps", "photographs", - "photograpy", "photography", - "photogrpah", "photographs", - "photoshopd", "photoshopped", - "photoshope", "photoshopped", - "phramacist", "pharmacist", - "phsyically", "physically", - "phsyicians", "physicians", - "phsyicists", "physicists", - "phsyiology", "physiology", - "phycisians", "physicians", - "phycisists", "physicists", - "phyiscally", "physically", - "phyisology", "physiology", - "physcially", "physically", - "physcology", "psychology", - "physcopath", "psychopath", - "physicials", "physicians", - "physiciens", "physicians", - "physioligy", "physiology", - "picthforks", "pitchforks", - "pinoneered", "pioneered", - "pitchferks", "pitchforks", - "pitchfolks", "pitchforks", - "pitchfords", "pitchforks", - "pitchworks", "pitchforks", - "pitckforks", "pitchforks", - "pittaburgh", "pittsburgh", - "pittsbrugh", "pittsburgh", - "placehoder", "placeholder", - "placeholdr", "placeholder", - "placeholer", "placeholder", - "placemenet", "placements", - "plagairism", "plagiarism", - "plagarisim", "plagiarism", - "plagiariam", "plagiarism", - "plagiarios", "plagiarism", - "plagiarius", "plagiarism", - "plagiarizm", "plagiarism", - "plagierism", "plagiarism", - "plaguarism", "plagiarism", - "plaigarism", "plagiarism", - "plasticosa", "plastics", - "platfarmer", "platformer", - "platformar", "platformer", - "platformie", "platformer", - "platfotmer", "platformer", - "platfromer", "platformer", - "platofrmer", "platformer", - "playaround", "playground", - "playersare", "playerbase", - "playgorund", "playground", - "playthrogh", "playthrough", - "playthrouh", "playthrough", - "playwrites", "playwrights", - "plethorian", "plethora", - "policitian", "politician", - "polinators", "pollinators", - "polishuset", "polishes", - "politessen", "politeness", - "politicain", "politician", - "politicaly", "politically", - "politicien", "politician", - "politicing", "politician", - "politicion", "politician", - "politickin", "politician", - "politiikan", "politician", - "politiness", "politeness", - "polititian", "politician", - "popualtion", "populations", - "populairty", "popularity", - "populaiton", "populations", - "popularaty", "popularity", - "popularest", "populate", - "popularily", "popularity", - "populaties", "populate", - "populatiry", "popularity", - "populative", "populate", - "populatoin", "populations", - "popultaion", "populations", - "pormetheus", "prometheus", - "pornograhy", "pornography", - "pornograpy", "pornography", - "pornogrphy", "pornography", - "porportion", "proportion", - "portabilty", "portability", - "portarying", "portraying", - "portoguese", "portuguese", - "portraiing", "portraying", - "portrating", "portraying", - "portrayels", "portrays", - "portugeuse", "portuguese", - "portuguise", "portuguese", - "posessions", "possessions", - "posicional", "positional", - "positevely", "positively", - "positioing", "positioning", - "positionly", "positional", - "positionne", "positioned", - "positivley", "positively", - "possesives", "possessive", - "possessers", "possesses", - "possessess", "possesses", - "possibiliy", "possibility", - "possibilty", "possibility", - "possissive", "possessive", - "posthomous", "posthumous", - "potentialy", "potentially", - "poulations", "populations", - "powerhorse", "powerhouse", - "powerhosue", "powerhouse", - "powerhours", "powerhouse", - "powerhsell", "powershell", - "powerprint", "powerpoint", - "powersehll", "powershell", - "ppublisher", "publisher", - "practially", "practically", - "practicaly", "practically", - "practicess", "practise", - "practiclly", "practically", - "practioner", "practitioner", - "precaucion", "precaution", - "precausion", "precaution", - "precautios", "precautions", - "precedance", "precedence", - "precedense", "precedence", - "preceeding", "preceding", - "precendece", "precedence", - "precentage", "percentage", - "precentile", "percentile", - "preciselly", "precisely", - "precuation", "precautions", - "precussion", "percussion", - "predecated", "predicated", - "predecence", "precedence", - "predecesor", "predecessor", - "predection", "prediction", - "predective", "predictive", - "prediccion", "prediction", - "prediceted", "predicated", - "predicited", "predicated", - "predicitng", "predicting", - "prediciton", "prediction", - "predicitve", "predictive", - "predickted", "predicated", - "predictave", "predictive", - "predictivo", "prediction", - "predictons", "predictions", - "predjuiced", "prejudiced", - "predjuices", "prejudices", - "preduction", "prediction", - "preductive", "predictive", - "predujiced", "prejudiced", - "predujices", "prejudices", - "prefarable", "preferable", - "prefarably", "preferably", - "prefection", "perfection", - "preferance", "preference", - "prefereble", "preferable", - "preferente", "preference", - "preferenze", "preference", - "preferible", "preferable", - "preferibly", "preferably", - "prefernece", "preferences", - "preformers", "performers", - "pregancies", "pregnancies", - "pregnanies", "pregnancies", - "preipheral", "peripheral", - "preisdents", "presidents", - "preisthood", "priesthood", - "prejeduced", "prejudiced", - "prejeduces", "prejudices", - "prejiduced", "prejudiced", - "prejiduces", "prejudices", - "prejucided", "prejudiced", - "prejucides", "prejudices", - "prejuduced", "prejudiced", - "prejuduces", "prejudices", - "prelimiary", "preliminary", - "prematurly", "prematurely", - "preminence", "preeminence", - "premission", "permission", - "preorderes", "preorders", - "prepartion", "preparation", - "prepetuate", "perpetuate", - "preposters", "preposterous", - "prescients", "presidents", - "prescirbed", "prescribed", - "prescriped", "prescribed", - "presearing", "preserving", - "presecuted", "persecuted", - "presedency", "presidency", - "presedents", "presidents", - "presenning", "presenting", - "presentase", "presents", - "presentato", "presentation", - "presention", "presenting", - "presentors", "presents", - "preservare", "preserve", - "preservato", "preservation", - "preserverd", "preserved", - "presidancy", "presidency", - "presidante", "presidents", - "presidenta", "presidential", - "presidenty", "presidency", - "presidunce", "presidency", - "presistent", "persistent", - "presonally", "personally", - "presonhood", "personhood", - "pressuming", "pressuring", - "prestigios", "prestigious", - "prestigous", "prestigious", - "presuambly", "presumably", - "presuasion", "persuasion", - "presuasive", "persuasive", - "presumebly", "presumably", - "presumendo", "presumed", - "presumibly", "presumably", - "presumpton", "presumption", - "pretaining", "pertaining", - "pretection", "protection", - "pretendias", "pretends", - "pretensive", "pretense", - "pretentios", "pretentious", - "pretentous", "pretentious", - "prevalecen", "prevalence", - "prevalente", "prevalence", - "prevencion", "prevention", - "preventivo", "prevention", - "preventors", "prevents", - "previaling", "prevailing", - "previosuly", "previously", - "previoulsy", "previously", - "prevolence", "prevalence", - "pricinpals", "principals", - "primarilly", "primarily", - "primatives", "primitives", - "princepals", "principals", - "princesess", "princesses", - "princibles", "principles", - "principaly", "principality", - "principels", "principals", - "principial", "principal", - "principias", "principals", - "principlas", "principals", - "prinicipal", "principal", - "prinicpals", "principals", - "prinicples", "principles", - "printerest", "printers", - "prioratize", "prioritize", - "prioretize", "prioritize", - "prioritice", "prioritize", - "prioritied", "prioritize", - "prioroties", "priorities", - "priorotize", "prioritize", - "priotities", "priorities", - "priotitize", "prioritize", - "privaleged", "privileged", - "privaleges", "privileges", - "privaticed", "privatized", - "privelaged", "privileged", - "privelages", "privileges", - "priveldges", "privileges", - "priveleged", "privileged", - "priveleges", "privileges", - "privelidge", "privileged", - "priveliged", "privileged", - "priveliges", "privileges", - "privetized", "privatized", - "privilaged", "privileged", - "privilages", "privileges", - "priviledge", "privilege", - "privilegde", "privileges", - "privilegie", "privilege", - "priviliged", "privileged", - "priviliges", "privileges", - "privitazed", "privatized", - "privitized", "privatized", - "probabiliy", "probability", - "probabilty", "probability", - "probablies", "probable", - "probablybe", "probable", - "problemita", "problematic", - "procalimed", "proclaimed", - "procceding", "proceeding", - "procedding", "proceeding", - "procederal", "procedural", - "procedings", "proceedings", - "procedrual", "procedural", - "proceededs", "proceeds", - "proceedure", "procedure", - "proceesing", "proceeding", - "processsor", "processors", - "proclamied", "proclaimed", - "proclaming", "proclaiming", - "procliamed", "proclaimed", - "procreatin", "procreation", - "procudures", "procedures", - "prodcution", "production", - "prodecural", "procedural", - "prodecures", "procedures", - "produccion", "production", - "produceras", "produces", - "produceres", "produces", - "producirse", "producers", - "produciton", "production", - "producting", "production", - "productino", "productions", - "productivo", "production", - "productivy", "productivity", - "productoin", "productions", - "produktion", "production", - "produktive", "productive", - "produtcion", "productions", - "profesions", "profession", - "professers", "professors", - "professorn", "profession", - "professsor", "professors", - "proffesion", "profession", - "proficeint", "proficient", - "proficiant", "proficient", - "proficieny", "proficiency", - "proficincy", "proficiency", - "profitabel", "profitable", - "profitabil", "profitable", - "profitible", "profitable", - "proftiable", "profitable", - "programmar", "programmer", - "programmme", "programme", - "progresing", "progressing", - "progresion", "progression", - "progresive", "progressive", - "progressie", "progressives", - "progressin", "progression", - "progresson", "progression", - "progressos", "progresses", - "progressus", "progresses", - "prohibirte", "prohibit", - "prohibites", "prohibits", - "prohibitng", "prohibiting", - "prohibiton", "prohibition", - "prohibitus", "prohibits", - "prohibitve", "prohibited", - "prohobited", "prohibited", - "prohpecies", "prophecies", - "projecitle", "projectiles", - "projectiel", "projectiles", - "projecties", "projectiles", - "projectils", "projectiles", - "projectles", "projectiles", - "projectlie", "projectiles", - "projectyle", "projectile", - "projektile", "projectile", - "projektion", "projection", - "prometheas", "prometheus", - "promethese", "prometheus", - "promethius", "prometheus", - "promethous", "prometheus", - "promethues", "prometheus", - "prominance", "prominence", - "prominenty", "prominently", - "prominetly", "prominently", - "promiscous", "promiscuous", - "promiscuos", "promiscuous", - "promoteurs", "promotes", - "promotheus", "prometheus", - "promotinal", "promotional", - "pronoucned", "pronounced", - "pronouning", "pronouncing", - "propechies", "prophecies", - "propencity", "propensity", - "propenents", "proponents", - "properites", "properties", - "propersity", "propensity", - "propertion", "proportion", - "propertius", "properties", - "prophacies", "prophecies", - "prophocies", "prophecies", - "propietary", "proprietary", - "proplusion", "propulsion", - "propoganda", "propaganda", - "propogates", "propagates", - "propolsion", "propulsion", - "proponants", "proponents", - "proponenet", "proponent", - "proporcion", "proportion", - "proporties", "properties", - "proporting", "proportion", - "propositon", "proposition", - "propotions", "proportions", - "proprietry", "proprietary", - "proprotion", "proportion", - "propserity", "prosperity", - "propserous", "prosperous", - "propulaios", "propulsion", - "propulsing", "propulsion", - "propultion", "propulsion", - "propuslion", "propulsion", - "prosectued", "prosecuted", - "prosectuor", "prosecutor", - "prosecuter", "prosecutor", - "prosecutie", "prosecuted", - "prosicuted", "prosecuted", - "prosicutor", "prosecutor", - "prosocuted", "prosecuted", - "prosparity", "prosperity", - "prospectos", "prospects", - "prosperety", "prosperity", - "prospertiy", "prosperity", - "prosphetic", "prosthetic", - "prosporous", "prosperous", - "prostehtic", "prosthetic", - "prosterity", "prosperity", - "prostethic", "prosthetic", - "prostitite", "prostitute", - "prostitude", "prostitute", - "prostituee", "prostitute", - "prostituer", "prostitute", - "prostitues", "prostitutes", - "prostiture", "prostitute", - "prostituto", "prostitution", - "prostituye", "prostitute", - "protaginst", "protagonist", - "protastant", "protestant", - "proteccion", "protection", - "proteciton", "protections", - "protectice", "protective", - "protectiei", "protective", - "protectoin", "protections", - "protectons", "protectors", - "protectron", "protection", - "protestans", "protests", - "protestare", "protesters", - "protestato", "protestant", - "protestent", "protestant", - "protestina", "protestant", - "prothsetic", "prosthetic", - "protistant", "protestant", - "protocoles", "protocols", - "protocolls", "protocols", - "protocolos", "protocols", - "protohypes", "prototypes", - "protostant", "protestant", - "prototipes", "prototypes", - "prototpyes", "prototypes", - "protraying", "portraying", - "protuguese", "portuguese", - "provencial", "provincial", - "proveribal", "proverbial", - "provervial", "proverbial", - "providance", "providence", - "providince", "providence", - "provinciae", "province", - "provincies", "province", - "provincija", "provincial", - "provinence", "providence", - "provinical", "provincial", - "provintial", "provincial", - "provinvial", "provincial", - "provisiosn", "provision", - "provisonal", "provisional", - "provocatie", "provocative", - "pscyhology", "psychology", - "pscyhopath", "psychopath", - "pshycology", "psychology", - "pshycopath", "psychopath", - "psychedlic", "psychedelic", - "psychiatic", "psychiatric", - "psycholoog", "psychology", - "psychopaat", "psychopath", - "psychopats", "psychopaths", - "ptichforks", "pitchforks", - "publicitan", "publication", - "publisheed", "published", - "publisherr", "publisher", - "publishher", "publisher", - "publissher", "publisher", - "publlisher", "publisher", - "punihsment", "punishments", - "punishemnt", "punishments", - "punishible", "punishable", - "punishmnet", "punishments", - "punissable", "punishable", - "punsihable", "punishable", - "purchacing", "purchasing", - "purpolsion", "propulsion", - "purposedly", "purposely", - "purposelly", "purposely", - "purpotedly", "purportedly", - "pususading", "persuading", - "pyschology", "psychology", - "pyschopath", "psychopath", - "qaulifiers", "qualifiers", - "quailfiers", "qualifiers", - "qualfiiers", "qualifiers", - "qualifieds", "qualifies", - "qualifiies", "qualifiers", - "qualifiing", "qualifying", - "qualifires", "qualifiers", - "qualifyers", "qualifiers", - "qualitying", "qualifying", - "quanitites", "quantities", - "quantaties", "quantities", - "quantitize", "quantities", - "quarantena", "quarantine", - "quarantene", "quarantine", - "quarantied", "quarantine", - "quarintine", "quarantine", - "quaruntine", "quarantine", - "quesitoned", "questioned", - "questional", "questionable", - "questionne", "questioned", - "rabinnical", "rabbinical", - "radiactive", "radioactive", - "radioacive", "radioactive", - "rainbowers", "rainbows", - "randmoness", "randomness", - "randomzied", "randomized", - "randonmess", "randomness", - "randumness", "randomness", - "raspberrry", "raspberry", - "rationalle", "rationale", - "readmition", "readmission", - "realitvely", "relatively", - "realtively", "relatively", - "realtivity", "relativity", - "reaserched", "researched", - "reasercher", "researcher", - "rebiulding", "rebuilding", - "reboudning", "rebounding", - "rebouncing", "rebounding", - "rebuidling", "rebuilding", - "rebuliding", "rebuilding", - "rebuplican", "republican", - "reccommend", "recommend", - "recepients", "recipients", - "receptoras", "receptors", - "receptores", "receptors", - "recgonised", "recognised", - "recgonized", "recognized", - "recgonizes", "recognizes", - "reciepents", "recipients", - "recipeints", "recipients", - "recipiants", "recipients", - "recocnised", "recognised", - "recoginsed", "recognised", - "recoginzed", "recognized", - "recognices", "recognizes", - "recogniton", "recognition", - "recognzied", "recognised", - "recomended", "recommended", - "recommande", "recommend", - "recommands", "recommends", - "recommeded", "recommended", - "recommened", "recommend", - "recommennd", "recommends", - "recomments", "recommends", - "recompence", "recompense", - "reconcider", "reconsider", - "reconcille", "reconcile", - "recongised", "recognised", - "recongized", "recognized", - "recongizes", "recognizes", - "reconisder", "reconsider", - "reconsiled", "reconsider", - "recordarle", "recorder", - "recordarme", "recorder", - "recordarse", "recorder", - "recordarte", "recorder", - "recreacion", "recreation", - "recreatief", "recreate", - "recreativo", "recreation", - "recrutiers", "recruiters", - "rectanglar", "rectangular", - "rectangual", "rectangular", - "rectanguar", "rectangular", - "recuriters", "recruiters", - "recurrance", "recurrence", - "recursivly", "recursively", - "redefinied", "redefine", - "redefinine", "redefine", - "redemtpion", "redemption", - "redepmtion", "redemption", - "redesiging", "redesign", - "rediculous", "ridiculous", - "redmeption", "redemption", - "redneckers", "rednecks", - "redneckese", "rednecks", - "redneckest", "rednecks", - "reduncancy", "redundancy", - "redundency", "redundancy", - "redundnacy", "redundancy", - "redunduncy", "redundancy", - "reenforced", "reinforced", - "reevaulate", "reevaluate", - "refedendum", "referendum", - "refelcting", "reflecting", - "refelction", "reflection", - "refelctive", "reflective", - "referances", "references", - "referandum", "referendum", - "referemces", "references", - "referemdum", "referendum", - "referendim", "referendum", - "referendom", "referendum", - "referenece", "reference", - "referening", "referencing", - "referenses", "referees", - "referentes", "references", - "referneces", "references", - "referrence", "reference", - "referundum", "referendum", - "refference", "reference", - "refleciton", "reflections", - "reflecters", "reflects", - "reflektion", "reflection", - "reflextion", "reflection", - "reformerad", "reformed", - "refrigerar", "refrigerator", - "refurbised", "refurbished", - "regenarate", "regenerate", - "registeres", "registers", - "registrato", "registration", - "regresives", "regressive", - "regressivo", "regression", - "regualting", "regulating", - "regualtion", "regulations", - "regualtors", "regulators", - "regulacion", "regulation", - "regulament", "regulate", - "regulaotrs", "regulators", - "regularily", "regularly", - "regularing", "regulating", - "regularlas", "regulars", - "regularlos", "regulars", - "regulaters", "regulators", - "regulatios", "regulators", - "regulatons", "regulations", - "rehtorical", "rhetorical", - "reinstaled", "reinstalled", - "reitrement", "retirement", - "relagation", "relaxation", - "relatation", "relaxation", - "relativety", "relativity", - "relativily", "relativity", - "relativley", "relatively", - "relavation", "relaxation", - "relaxating", "relaxation", - "relazation", "relaxation", - "releagtion", "relegation", - "relegetion", "relegation", - "relentness", "relentless", - "reletnless", "relentless", - "relevation", "revelation", - "relexation", "relegation", - "relfecting", "reflecting", - "relfection", "reflection", - "relfective", "reflective", - "reliabilty", "reliability", - "reliablely", "reliably", - "religiones", "religions", - "religiosly", "religiously", - "religiousy", "religiously", - "religously", "religiously", - "relitavely", "relatively", - "reluctanct", "reluctant", - "reluctanly", "reluctantly", - "reluctanty", "reluctantly", - "remarcably", "remarkably", - "remarkibly", "remarkably", - "rememberes", "remembers", - "remenicent", "reminiscent", - "reminisent", "reminiscent", - "reminscent", "reminiscent", - "remmebered", "remembered", - "renaissace", "renaissance", - "renderered", "rendered", - "renegerate", "regenerate", - "renewabels", "renewables", - "renewebles", "renewables", - "rennovated", "renovated", - "renweables", "renewables", - "repatition", "repetition", - "repblicans", "republicans", - "repbulican", "republican", - "repeadedly", "repeatedly", - "repeadetly", "repeatedly", - "repearable", "repeatable", - "repearedly", "repealed", - "repeatadly", "repeatedly", - "repeatedlt", "repealed", - "repeatetly", "repeatedly", - "repeatible", "repeatable", - "repeatidly", "repeatedly", - "repectable", "repeatable", - "repentable", "repeatable", - "repentence", "repentance", - "repersents", "represents", - "repetation", "repetition", - "repeteadly", "repeatedly", - "repetetion", "repetition", - "repeticion", "repetition", - "repetitivo", "repetition", - "replacated", "replicated", - "replaceble", "replaceable", - "replacemet", "replacements", - "replacemnt", "replacement", - "replacemtn", "replacements", - "replecated", "replicated", - "repoistory", "repository", - "reponsible", "responsible", - "reportadly", "reportedly", - "reporteros", "reporters", - "reportidly", "reportedly", - "repositary", "repository", - "reposotory", "repository", - "repostiory", "repository", - "representn", "representing", - "repressent", "represents", - "repressivo", "repression", - "repsectful", "respectful", - "repsecting", "respecting", - "repsective", "respective", - "repsonding", "responding", - "repsonsive", "responsive", - "reptuation", "reputation", - "repubicans", "republicans", - "republcian", "republican", - "republians", "republicans", - "republicon", "republican", - "repuglican", "republican", - "repulicans", "republicans", - "reputacion", "reputation", - "requirment", "requirement", - "requrement", "requirement", - "resemblace", "resemble", - "reserached", "researched", - "reseracher", "researchers", - "reserverad", "reserved", - "reservered", "reserved", - "residental", "residential", - "resistable", "resistible", - "resistanes", "resistances", - "resistanse", "resistances", - "resistence", "resistance", - "resistendo", "resisted", - "resistered", "resisted", - "resistnace", "resistances", - "resitsance", "resistances", - "resoltuion", "resolutions", - "resolucion", "resolution", - "resolutino", "resolutions", - "resolutoin", "resolutions", - "resolutons", "resolutions", - "resolvemos", "resolves", - "resolvendo", "resolved", - "resolveres", "resolves", - "resolverse", "resolves", - "resolviste", "resolves", - "resonabelt", "resonate", - "resoultion", "resolution", - "respecitve", "respective", - "respectifs", "respects", - "respection", "respecting", - "respectons", "respects", - "respectuos", "respects", - "respektive", "respective", - "respiratoy", "respiratory", - "responcive", "responsive", - "responisve", "responsive", - "responsibe", "responsive", - "responsiby", "responsibly", - "responsile", "responsive", - "responsing", "responding", - "ressembled", "resembled", - "restarants", "restaurants", - "restaraunt", "restaurant", - "restaruant", "restaurant", - "restatting", "restarting", - "restaurent", "restaurant", - "restauring", "restarting", - "resteraunt", "restaurant", - "restircted", "restricted", - "restorting", "restarting", - "restrainig", "restraining", - "restrcited", "restricted", - "restrcting", "restarting", - "restricing", "restricting", - "restricion", "restriction", - "restricive", "restrictive", - "restrictes", "restricts", - "restrictie", "restrictive", - "restricton", "restriction", - "restructed", "restricted", - "restuarant", "restaurant", - "resturants", "restaurants", - "resturaunt", "restaurant", - "retaliaton", "retaliation", - "rethorical", "rhetorical", - "retierment", "retirement", - "retribuito", "retribution", - "retrosepct", "retrospect", - "retrospekt", "retrospect", - "revaluated", "reevaluated", - "revealtion", "revelations", - "revelaiton", "revelations", - "revelatons", "revelations", - "revelution", "revelation", - "reversable", "reversible", - "reversably", "reversal", - "reviewtrue", "reviewer", - "revisiones", "revisions", - "revisionis", "revisions", - "revoltuion", "revolution", - "revoluiton", "revolutions", - "revolutoin", "revolutions", - "revoultion", "revolution", - "rewarching", "rewatching", - "rewatchibg", "rewatching", - "rewatchign", "rewatching", - "rewatchimg", "rewatching", - "rhapsodomy", "rhapsody", - "rhetorisch", "rhetoric", - "ridicilous", "ridiculous", - "ridicoulus", "ridiculous", - "ridiculise", "ridicule", - "ridiculize", "ridicule", - "ridiculled", "ridicule", - "ridiculose", "ridicule", - "ridiculued", "ridicule", - "rienforced", "reinforced", - "rigthfully", "rightfully", - "roleplaing", "roleplaying", - "romanmania", "romanian", - "roundaboot", "roundabout", - "rucuperate", "recuperate", - "rudimentry", "rudimentary", - "sacarmento", "sacramento", - "sacntioned", "sanctioned", - "sacraficed", "sacrificed", - "sacrafices", "sacrifices", - "sacramenno", "sacramento", - "sacreficed", "sacrificed", - "sacrefices", "sacrifices", - "sacremento", "sacramento", - "sacrifaced", "sacrificed", - "sacrifaces", "sacrifices", - "sacrifical", "sacrificial", - "sacrificas", "sacrifices", - "sacrificie", "sacrificed", - "sacrificng", "sacrificing", - "sacrifises", "sacrifices", - "sacrifized", "sacrificed", - "sacrifizes", "sacrifices", - "sacromento", "sacramento", - "sadistisch", "sadistic", - "sanctionne", "sanctioned", - "sandiwches", "sandwiches", - "sandviches", "sandwiches", - "sandwishes", "sandwiches", - "sanitazion", "sanitation", - "santiation", "sanitation", - "sastifying", "satisfying", - "satellitte", "satellites", - "satifsying", "satisfying", - "satrically", "satirically", - "satsifying", "satisfying", - "sattelites", "satellites", - "saturacion", "saturation", - "scandalosa", "scandals", - "scandalose", "scandals", - "scandalosi", "scandals", - "scandaloso", "scandals", - "scandaniva", "scandinavia", - "scandinava", "scandinavian", - "scandinvia", "scandinavia", - "scaramento", "sacramento", - "scarificed", "sacrificed", - "scarifices", "sacrifices", - "scarmbling", "scrambling", - "scartching", "scratching", - "sceintific", "scientific", - "sceintists", "scientists", - "scenarioes", "scenarios", - "scenarions", "scenarios", - "scenarious", "scenarios", - "scheudling", "scheduling", - "scholarhip", "scholarship", - "scholarley", "scholarly", - "sciencists", "scientists", - "scientests", "scientists", - "scirptures", "scriptures", - "scooterers", "scooters", - "scorebaord", "scoreboard", - "scoreborad", "scoreboard", - "scorebored", "scoreboard", - "scorpiomon", "scorpion", - "scracthing", "scratching", - "scramblies", "scramble", - "screenshat", "screenshot", - "screenshit", "screenshot", - "scriptores", "scriptures", - "scripturae", "scriptures", - "scriputres", "scriptures", - "scritpures", "scriptures", - "scrutinity", "scrutiny", - "seahawkers", "seahawks", - "sebastiaan", "sebastian", - "segegrated", "segregated", - "segragated", "segregated", - "segregaded", "segregated", - "segregatie", "segregated", - "segretated", "segregated", - "segrigated", "segregated", - "selectiose", "selections", - "selectivly", "selectively", - "selectivos", "selections", - "selfishess", "selfishness", - "senitments", "sentiments", - "sensitiviy", "sensitivity", - "sensitivty", "sensitivity", - "sentaments", "sentiments", - "sentancing", "sentencing", - "sentements", "sentiments", - "sentencian", "sentencing", - "sentensing", "sentencing", - "sentimenal", "sentimental", - "sentimetal", "sentimental", - "sentincing", "sentencing", - "sentinents", "sentiments", - "separacion", "separation", - "separaters", "separates", - "separatley", "separately", - "separatron", "separation", - "separetely", "separately", - "seperately", "separately", - "seperating", "separating", - "seperation", "separation", - "seperatism", "separatism", - "seperatist", "separatist", - "seperatley", "seperate", - "sepulchure", "sepulchre", - "serenitary", "serenity", - "serviceble", "serviceable", - "settelment", "settlement", - "settlemens", "settlements", - "settlemets", "settlements", - "settlemnts", "settlements", - "seuxalized", "sexualized", - "seventeeen", "seventeen", - "sexaulized", "sexualized", - "sexualixed", "sexualized", - "sexuallity", "sexually", - "sexualzied", "sexualized", - "sexulaized", "sexualized", - "shakespare", "shakespeare", - "shakespeer", "shakespeare", - "shakespere", "shakespeare", - "shamelesly", "shamelessly", - "shamelessy", "shamelessly", - "shaprening", "sharpening", - "shareholds", "shareholders", - "sharkening", "sharpening", - "sharpining", "sharpening", - "shartening", "sharpening", - "shatnering", "shattering", - "shattening", "shattering", - "shepharded", "shepherd", - "shilouette", "silhouette", - "shitlasses", "shitless", - "shortenend", "shortened", - "shortining", "shortening", - "sidelinien", "sideline", - "sidelinjen", "sideline", - "sidelinked", "sideline", - "sigantures", "signatures", - "sightstine", "sightstone", - "signficant", "significant", - "signifiant", "significant", - "significat", "significant", - "signitures", "signatures", - "sigthstone", "sightstone", - "sihlouette", "silhouette", - "silohuette", "silhouette", - "silouhette", "silhouette", - "similairty", "similarity", - "similarily", "similarly", - "similarlly", "similarly", - "similiarly", "similarly", - "similiarty", "similarity", - "simliarity", "similarity", - "simluation", "simulation", - "simplictic", "simplistic", - "simplifing", "simplifying", - "simplifyed", "simplified", - "simplifyng", "simplifying", - "simplisitc", "simplistic", - "simplisity", "simplicity", - "simplistes", "simplest", - "simplivity", "simplicity", - "simplyfied", "simplified", - "simualtion", "simulation", - "simulacion", "simulation", - "simulaiton", "simulations", - "simulaties", "simulate", - "simulative", "simulate", - "simulatons", "simulations", - "simulatore", "simulate", - "sincereley", "sincerely", - "sincerelly", "sincerely", - "singatures", "signatures", - "singulaire", "singular", - "singulariy", "singularity", - "singularty", "singularity", - "singulator", "singular", - "sitautions", "situations", - "situatinal", "situational", - "skatebaord", "skateboard", - "skateborad", "skateboard", - "skatebored", "skateboard", - "skatebrand", "skateboard", - "skeletones", "skeletons", - "skeptecism", "skepticism", - "skepticals", "skeptics", - "skepticles", "skeptics", - "skepticons", "skeptics", - "skeptisicm", "skepticism", - "skeptisism", "skepticism", - "sketchysex", "sketches", - "sketpicism", "skepticism", - "skillhosts", "skillshots", - "skillshits", "skillshots", - "skillshoot", "skillshots", - "skillslots", "skillshots", - "skillsofts", "skillshots", - "skillsshot", "skillshots", - "skirmiches", "skirmish", - "skpeticism", "skepticism", - "slaughterd", "slaughtered", - "slipperies", "slippers", - "smarpthone", "smartphones", - "smarthpone", "smartphone", - "snadwiches", "sandwiches", - "snowbaling", "snowballing", - "snowballes", "snowballs", - "snowballls", "snowballs", - "socailists", "socialists", - "socailized", "socialized", - "socialisim", "socialism", - "socializng", "socializing", - "socialsits", "socialists", - "sociapaths", "sociopaths", - "socilaists", "socialists", - "socilaized", "socialized", - "sociologia", "sociological", - "sociopatas", "sociopaths", - "sociopatch", "sociopaths", - "sociopatic", "sociopathic", - "socratease", "socrates", - "socreboard", "scoreboard", - "soemthings", "somethings", - "soldiarity", "solidarity", - "solidairty", "solidarity", - "soliditary", "solidarity", - "solitudine", "solitude", - "somehtings", "somethings", - "someonelse", "someones", - "somethibng", "somethin", - "somethigng", "somethin", - "somethigns", "somethings", - "somethihng", "somethin", - "somethiing", "somethin", - "somethijng", "somethin", - "somethikng", "somethin", - "somethimng", "somethin", - "somethinbg", "somethings", - "somethines", "somethings", - "somethinfg", "somethings", - "somethinhg", "somethings", - "somethinig", "somethings", - "somethinkg", "somethings", - "somethinks", "somethings", - "somethinmg", "somethings", - "somethinng", "somethings", - "somethintg", "somethings", - "somethiong", "somethin", - "somethiung", "somethin", - "sophicated", "sophisticated", - "sotrmfront", "stormfront", - "sotrylines", "storylines", - "soudntrack", "soundtrack", - "soundrtack", "soundtracks", - "soundtracs", "soundtracks", - "soundtrakc", "soundtracks", - "soundtrakk", "soundtrack", - "soundtraks", "soundtracks", - "southampon", "southampton", - "southamton", "southampton", - "southerers", "southerners", - "southernes", "southerners", - "southerton", "southern", - "souveniers", "souvenirs", - "sovereigny", "sovereignty", - "sovereinty", "sovereignty", - "soverignty", "sovereignty", - "spartaniis", "spartans", - "spartanops", "spartans", - "specailist", "specialist", - "specailize", "specializes", - "specialice", "specialize", - "specialied", "specialized", - "specialies", "specializes", - "specialits", "specials", - "speciallly", "specially", - "speciallty", "specially", - "specialops", "specials", - "specialsts", "specialists", - "specialtys", "specials", - "specialzed", "specialized", - "specialzes", "specializes", - "specifices", "specifics", - "specifiing", "specifying", - "specifiyng", "specifying", - "speciliast", "specialists", - "specimines", "specimen", - "spectarors", "spectators", - "spectaters", "spectators", - "spectracal", "spectral", - "spectraply", "spectral", - "spectrolab", "spectral", - "speculatie", "speculative", - "speculatin", "speculation", - "speecheasy", "speeches", - "speicalist", "specialist", - "spiritualy", "spiritually", - "sponsorees", "sponsors", - "sponsorhip", "sponsorship", - "sponsorise", "sponsors", - "spontaneos", "spontaneous", - "spontaneus", "spontaneous", - "spontanous", "spontaneous", - "spoonfulls", "spoonfuls", - "spreadshet", "spreadsheet", - "springfeld", "springfield", - "springfied", "springfield", - "spriritual", "spiritual", - "squirrells", "squirrels", - "squirrelus", "squirrels", - "stabelized", "stabilized", - "stabilzied", "stabilized", - "stablility", "stability", - "stablizied", "stabilized", - "staggaring", "staggering", - "stakeboard", "skateboard", - "starighten", "straighten", - "starnation", "starvation", - "startegies", "strategies", - "startupbus", "startups", - "starwberry", "strawberry", - "statememts", "statements", - "statictics", "statistics", - "stationair", "stationary", - "statisitcs", "statistics", - "statistcal", "statistical", - "statistisk", "statistics", - "stauration", "saturation", - "stealthboy", "stealthy", - "stealthely", "stealthy", - "stealthify", "stealthy", - "stealthray", "stealthy", - "steeleries", "steelers", - "stereotipe", "stereotype", - "stereotpye", "stereotypes", - "steriotype", "stereotype", - "steroetype", "stereotype", - "sterotypes", "stereotypes", - "steryotype", "stereotype", - "stimilants", "stimulants", - "stimilated", "stimulated", - "stimualted", "stimulated", - "stimulatie", "stimulated", - "stimulatin", "stimulation", - "stimulaton", "stimulation", - "stimulents", "stimulants", - "stomrfront", "stormfront", - "storelines", "storylines", - "stormfornt", "stormfront", - "stormfromt", "stormfront", - "stornfront", "stormfront", - "stornghold", "stronghold", - "stradegies", "strategies", - "strageties", "strategies", - "straighted", "straightened", - "straightie", "straighten", - "straightin", "straighten", - "straigthen", "straighten", - "stranglove", "strangle", - "strangreal", "strangle", - "stratagies", "strategies", - "strategems", "strategies", - "strategice", "strategies", - "strategisk", "strategies", - "stravation", "starvation", - "strawbarry", "strawberry", - "strawbeary", "strawberry", - "strawbeery", "strawberry", - "strawbrary", "strawberry", - "strawburry", "strawberry", - "streaching", "stretching", - "streamtrue", "streamer", - "strechting", "stretching", - "strecthing", "stretching", - "stregnthen", "strengthen", - "streichung", "stretching", - "strenghten", "strengthen", - "strengsten", "strengthen", - "strengthes", "strengths", - "strengthin", "strengthen", - "stressende", "stressed", - "striaghten", "straighten", - "stromfront", "stormfront", - "stronkhold", "stronghold", - "stroylines", "storylines", - "structered", "structured", - "structrual", "structural", - "structurel", "structural", - "strucutral", "structural", - "strucutred", "structured", - "strucutres", "structures", - "strugglign", "struggling", - "strwaberry", "strawberry", - "sttutering", "stuttering", - "stupidfree", "stupider", - "stupiditiy", "stupidity", - "sturctural", "structural", - "sturctures", "structures", - "sturggling", "struggling", - "subarmines", "submarines", - "subcultuur", "subculture", - "subesquent", "subsequent", - "subisdized", "subsidized", - "subjectief", "subjective", - "subjectifs", "subjects", - "subjectivy", "subjectively", - "subjektive", "subjective", - "submariens", "submarines", - "submarinas", "submarines", - "submergerd", "submerged", - "submerines", "submarines", - "submisison", "submissions", - "submissies", "submissive", - "submissons", "submissions", - "submittion", "submitting", - "subsadized", "subsidized", - "subscirbed", "subscribed", - "subscirber", "subscribers", - "subscribar", "subscriber", - "subscribir", "subscriber", - "subscrible", "subscriber", - "subscriped", "subscribed", - "subscrubed", "subscribed", - "subscryber", "subscriber", - "subsedized", "subsidized", - "subsequant", "subsequent", - "subsidezed", "subsidized", - "subsidiced", "subsidized", - "subsidizng", "subsidizing", - "subsiduary", "subsidiary", - "subsiquent", "subsequent", - "subsittute", "substitutes", - "subsizided", "subsidized", - "subsrcibed", "subscribed", - "substanial", "substantial", - "substansen", "substances", - "substanser", "substances", - "substanses", "substances", - "substantie", "substantive", - "substatial", "substantial", - "substences", "substances", - "substitite", "substitute", - "substittue", "substitutes", - "substitude", "substitute", - "substitued", "substitute", - "substituer", "substitute", - "substitues", "substitutes", - "substiture", "substitute", - "substituto", "substitution", - "substituts", "substitutes", - "substracts", "subtracts", - "substutite", "substitutes", - "subsudized", "subsidized", - "subtitltes", "subtitle", - "succceeded", "succeeded", - "succcesses", "successes", - "succesfuly", "successfully", - "succesions", "succession", - "successing", "succession", - "successivo", "succession", - "sucesfully", "successfully", - "sucessfull", "successful", - "sucessfuly", "successfully", - "sudnerland", "sunderland", - "sufferered", "suffered", - "sufferring", "suffering", - "sufficiant", "sufficient", - "suggestied", "suggestive", - "suggestief", "suggestive", - "suggestons", "suggests", - "sumbarines", "submarines", - "sumbissive", "submissive", - "sumbitting", "submitting", - "summerized", "summarized", - "summorized", "summarized", - "summurized", "summarized", - "sunderlona", "sunderland", - "sunderlund", "sunderland", - "sungalsses", "sunglasses", - "sunglesses", "sunglasses", - "sunglinger", "gunslinger", - "sunscreeen", "sunscreen", - "superfical", "superficial", - "superfluos", "superfluous", - "superioara", "superior", - "superioare", "superior", - "superioris", "superiors", - "superivsor", "supervisors", - "supermaket", "supermarket", - "supermarkt", "supermarket", - "superouman", "superhuman", - "superposer", "superpowers", - "superviors", "supervisors", - "superviosr", "supervisors", - "supervisar", "supervisor", - "superviser", "supervisor", - "supervisin", "supervision", - "supervison", "supervision", - "supervsior", "supervisors", - "supperssor", "suppressor", - "supplament", "supplement", - "supplemant", "supplemental", - "supplemets", "supplements", - "supportare", "supporters", - "supporteur", "supporter", - "supportied", "supported", - "supportors", "supporters", - "supposdely", "supposedly", - "supposebly", "supposedly", - "supposidly", "supposedly", - "suppresion", "suppression", - "suppresors", "suppressor", - "suppressin", "suppression", - "suppressio", "suppressor", - "suppresson", "suppression", - "suprassing", "surpassing", - "supressing", "suppressing", - "supression", "suppression", - "supsension", "suspension", - "supsicions", "suspicions", - "supsicious", "suspicious", - "surounding", "surrounding", - "surplanted", "supplanted", - "surpressed", "suppressed", - "surprizing", "surprising", - "surrenderd", "surrendered", - "surrouding", "surrounding", - "surroundes", "surrounds", - "surroundig", "surroundings", - "survivours", "survivor", - "suseptable", "susceptible", - "suseptible", "susceptible", - "suspecions", "suspicions", - "suspecious", "suspicious", - "suspencion", "suspension", - "suspendeds", "suspense", - "suspention", "suspension", - "suspicians", "suspicions", - "suspiciois", "suspicions", - "suspicioso", "suspicions", - "suspicioun", "suspicion", - "suspicison", "suspicions", - "suspiciuos", "suspicions", - "suspicsion", "suspicions", - "suspisions", "suspicions", - "suspisious", "suspicious", - "suspitions", "suspicions", - "sustainble", "sustainable", - "swaetshirt", "sweatshirt", - "swearengin", "swearing", - "swearshirt", "sweatshirt", - "sweathsirt", "sweatshirt", - "sweatshits", "sweatshirt", - "sweatshort", "sweatshirt", - "sweatshrit", "sweatshirt", - "sweerheart", "sweetheart", - "sweetshart", "sweetheart", - "switcheasy", "switches", - "switzerand", "switzerland", - "symapthize", "sympathize", - "symbolisch", "symbolic", - "symbolisim", "symbolism", - "symetrical", "symmetrical", - "sympatheic", "sympathetic", - "sympathiek", "sympathize", - "sympathien", "sympathize", - "sympathtic", "sympathetic", - "sympathyze", "sympathize", - "sympethize", "sympathize", - "symphatize", "sympathize", - "symphonity", "symphony", - "sympothize", "sympathize", - "syncronous", "synchronous", - "synomymous", "synonymous", - "synomynous", "synonymous", - "synonamous", "synonymous", - "synonimous", "synonymous", - "synonmyous", "synonymous", - "synonomous", "synonymous", - "synonumous", "synonymous", - "synonynous", "synonymous", - "sypmathize", "sympathize", - "systamatic", "systematic", - "systemetic", "systematic", - "systemisch", "systemic", - "systimatic", "systematic", - "tabelspoon", "tablespoon", - "tablespons", "tablespoons", - "tablesppon", "tablespoon", - "tacitcally", "tactically", - "taiwanesse", "taiwanese", - "taligating", "tailgating", - "tantrumers", "tantrums", - "targetting", "targeting", - "teamfigths", "teamfights", - "teamifghts", "teamfights", - "teamspeack", "teamspeak", - "techicians", "technicians", - "techincian", "technician", - "techinican", "technician", - "techinques", "techniques", - "technicain", "technician", - "technicaly", "technically", - "technicans", "technicians", - "technichan", "technician", - "technicien", "technician", - "technicion", "technician", - "technitian", "technician", - "technqiues", "techniques", - "techtician", "technician", - "tehnically", "ethnically", - "telegrapgh", "telegraph", - "teleporing", "teleporting", - "televesion", "television", - "televisivo", "television", - "temafights", "teamfights", - "temerature", "temperature", - "temperatue", "temperature", - "temperment", "temperament", - "temperture", "temperature", - "templarios", "templars", - "templarius", "templars", - "temporaily", "temporarily", - "temporarly", "temporary", - "temptating", "temptation", - "temptetion", "temptation", - "tendancies", "tendencies", - "tendencias", "tendencies", - "tendencije", "tendencies", - "tendensies", "tendencies", - "tendincies", "tendencies", - "tensionors", "tensions", - "tentacreul", "tentacle", - "termanator", "terminator", - "termendous", "tremendous", - "termiantor", "terminator", - "termigator", "terminator", - "terminales", "terminals", - "terminalis", "terminals", - "terminarla", "terminal", - "terminarlo", "terminal", - "terminaron", "terminator", - "terminater", "terminator", - "terminolgy", "terminology", - "terorrists", "terrorists", - "terrerists", "terrorists", - "terrestial", "terrestrial", - "terriblely", "terribly", - "terriories", "territories", - "territoral", "territorial", - "territores", "territories", - "territoris", "territories", - "territorry", "territory", - "terrorisim", "terrorism", - "terrorsits", "terrorists", - "terrurists", "terrorists", - "testiclees", "testicles", - "testiclies", "testicle", - "testimoney", "testimony", - "thankyooou", "thankyou", - "themselfes", "themselves", - "themsevles", "themselves", - "themsleves", "themselves", - "theocracry", "theocracy", - "theologial", "theological", - "therapetic", "therapeutic", - "therepists", "therapists", - "theripists", "therapists", - "thermastat", "thermostat", - "thermistat", "thermostat", - "thermomter", "thermometer", - "theromstat", "thermostat", - "thorttling", "throttling", - "thorughout", "throughout", - "thouroghly", "thoroughly", - "threadened", "threaded", - "threatenes", "threatens", - "threatning", "threatening", - "threshhold", "threshold", - "throthling", "throttling", - "throtlling", "throttling", - "throughiut", "throughput", - "thubmnails", "thumbnails", - "thumbmails", "thumbnails", - "thunderbot", "thunderbolt", - "thunderolt", "thunderbolt", - "tighetning", "tightening", - "tightining", "tightening", - "tigthening", "tightening", - "tjpanishad", "upanishad", - "toothbruch", "toothbrush", - "toothbruth", "toothbrush", - "toothbursh", "toothbrush", - "toothrbush", "toothbrush", - "toppingest", "toppings", - "torchilght", "torchlight", - "torchlgiht", "torchlight", - "torchligth", "torchlight", - "torhclight", "torchlight", - "torrentbig", "torrenting", - "torrenters", "torrents", - "torrentors", "torrents", - "tortillera", "tortilla", - "tortillias", "tortilla", - "tortillita", "tortilla", - "tortilllas", "tortilla", - "torunament", "tournament", - "totalitara", "totalitarian", - "touchsceen", "touchscreen", - "touchscren", "touchscreen", - "touranment", "tournaments", - "tourmanent", "tournaments", - "tournamets", "tournaments", - "tournamnet", "tournament", - "tournemant", "tournament", - "tournement", "tournament", - "toxicitity", "toxicity", - "trafficing", "trafficking", - "trainwreak", "trainwreck", - "traitorise", "traitors", - "tramboline", "trampoline", - "tramploine", "trampoline", - "trampolene", "trampoline", - "tranformed", "transformed", - "tranistion", "transition", - "tranlsated", "translated", - "transalted", "translated", - "transaltes", "translates", - "transaltor", "translator", - "transation", "transition", - "transciprt", "transcripts", - "transcirpt", "transcripts", - "transcrips", "transcripts", - "transcrito", "transcript", - "transcrits", "transcripts", - "transcrpit", "transcript", - "transfered", "transferred", - "transferer", "transferred", - "transferes", "transfers", - "transferrs", "transfers", - "transferts", "transfers", - "transfomed", "transformed", - "transfored", "transformed", - "transforme", "transfer", - "transfroms", "transforms", - "transgeder", "transgender", - "transgener", "transgender", - "transicion", "transition", - "transision", "transition", - "transister", "transistor", - "transitons", "transitions", - "transitors", "transistor", - "transkript", "transcript", - "translater", "translator", - "translatin", "translations", - "translatio", "translator", - "translpant", "transplants", - "transluent", "translucent", - "transmited", "transmitted", - "transmiter", "transmitter", - "transmitor", "transistor", - "transmorgs", "transforms", - "transpalnt", "transplants", - "transphoic", "transphobic", - "transplain", "transplant", - "transplate", "transplant", - "transplats", "transplants", - "transpoder", "transported", - "transportr", "transporter", - "transsexal", "transsexual", - "transtator", "translator", - "tranzistor", "transistor", - "trasncript", "transcript", - "trasnforms", "transforms", - "trasnlated", "translated", - "trasnlator", "translator", - "trasnplant", "transplant", - "traveleres", "travelers", - "travelodge", "traveled", - "traverlers", "traverse", - "traversare", "traverse", - "traversier", "traverse", - "treasurery", "treasury", - "trememdous", "tremendous", - "tremondous", "tremendous", - "trespasing", "trespassing", - "trianwreck", "trainwreck", - "trochlight", "torchlight", - "trustworhy", "trustworthy", - "trustworty", "trustworthy", - "trustwothy", "trustworthy", - "tryannical", "tyrannical", - "tunraround", "turnaround", - "tupparware", "tupperware", - "turnapound", "turnaround", - "turthfully", "truthfully", - "tutoriales", "tutorials", - "tyrantical", "tyrannical", - "ubiqituous", "ubiquitous", - "ubiquotous", "ubiquitous", - "ubiqutious", "ubiquitous", - "ukrainains", "ukrainians", - "ukraineans", "ukrainians", - "ukrainiens", "ukrainians", - "ukraininas", "ukrainians", - "ukrianians", "ukrainians", - "ulitmately", "ultimately", - "ulterioara", "ulterior", - "ulterioare", "ulterior", - "ultimative", "ultimate", - "ultimatley", "ultimately", - "ultimatuum", "ultimatum", - "unanwsered", "unanswered", - "unasnwered", "unanswered", - "unattanded", "unattended", - "unattented", "unattended", - "unavailabe", "unavailable", - "unavailble", "unavailable", - "unavoidble", "unavoidable", - "unawnsered", "unanswered", - "unbalenced", "unbalanced", - "unballance", "unbalance", - "unbalnaced", "unbalanced", - "unbareable", "unbearable", - "unbeakable", "unbeatable", - "unbeareble", "unbearable", - "unbeatbale", "unbeatable", - "unbeateble", "unbeatable", - "unbeerable", "unbearable", - "unbeetable", "unbeatable", - "unbeknowst", "unbeknownst", - "unbreakble", "unbreakable", - "uncencored", "uncensored", - "uncensered", "uncensored", - "uncersored", "uncensored", - "uncertainy", "uncertainty", - "uncertanty", "uncertainty", - "uncesnored", "uncensored", - "uncomitted", "uncommitted", - "uncommited", "uncommitted", - "unconcious", "unconscious", - "unconscous", "unconscious", - "undebiably", "undeniably", - "undeinable", "undeniable", - "undeinably", "undeniably", - "undenaible", "undeniable", - "undenaibly", "undeniably", - "undenyable", "undeniable", - "undenyably", "undeniably", - "underbaker", "undertaker", - "undercling", "underlying", - "underfaker", "undertaker", - "undergated", "underrated", - "undergrand", "undergrad", - "undergroud", "underground", - "undergrund", "underground", - "undermimes", "undermines", - "underminde", "undermines", - "underminig", "undermining", - "underneeth", "underneath", - "underneith", "underneath", - "undernieth", "underneath", - "underpowed", "underpowered", - "underraged", "underrated", - "underraker", "undertaker", - "underrater", "undertaker", - "undersatnd", "understands", - "understadn", "understands", - "understans", "understands", - "understnad", "understands", - "understoon", "understood", - "understsnd", "understands", - "undertoker", "undertaker", - "undertsand", "understands", - "undertunes", "undertones", - "underwager", "underwater", - "underwares", "underwater", - "underwolrd", "underworld", - "underwoord", "underworld", - "underwrold", "underworld", - "underyling", "underlying", - "undesrtand", "understands", - "undoubtedy", "undoubtedly", - "undoubtely", "undoubtedly", - "undoubtley", "undoubtedly", - "uneccesary", "unnecessary", - "unecessary", "unnecessary", - "unedcuated", "uneducated", - "unedicated", "uneducated", - "unempolyed", "unemployed", - "unexplaind", "unexplained", - "unexplaned", "unexplained", - "unfamilair", "unfamiliar", - "unfamilier", "unfamiliar", - "unfinsihed", "unfinished", - "unfirendly", "unfriendly", - "unfortuate", "unfortunate", - "unfreindly", "unfriendly", - "unfriednly", "unfriendly", - "unfriently", "unfriendly", - "ungrapeful", "ungrateful", - "ungreatful", "ungrateful", - "unhealthly", "unhealthy", - "unicornios", "unicorns", - "unifnished", "unfinished", - "unihabited", "uninhabited", - "unilatreal", "unilateral", - "unimporant", "unimportant", - "unimpresed", "unimpressed", - "unimpressd", "unimpressed", - "uninsipred", "uninspired", - "uninspried", "uninspired", - "uninstaled", "uninstalled", - "uniquiness", "uniqueness", - "univercity", "university", - "univeristy", "university", - "universale", "universe", - "universaly", "universally", - "universels", "universes", - "universets", "universes", - "universite", "universities", - "universtiy", "university", - "unjustifed", "unjustified", - "unknowingy", "unknowingly", - "unknowinly", "unknowingly", - "unnecesary", "unnecessary", - "unofficail", "unofficial", - "unoffocial", "unofficial", - "unorginial", "unoriginal", - "unorignial", "unoriginal", - "unorigonal", "unoriginal", - "unplacable", "unplayable", - "unplaybale", "unplayable", - "unplayeble", "unplayable", - "unpleasent", "unpleasant", - "unpopulair", "unpopular", - "unproteced", "unprotected", - "unqiueness", "uniqueness", - "unqualifed", "unqualified", - "unrealesed", "unreleased", - "unrealible", "unreliable", - "unrealistc", "unrealistic", - "unrealitic", "unrealistic", - "unreasonal", "unreasonably", - "unrelaible", "unreliable", - "unreleated", "unreleased", - "unrelyable", "unreliable", - "unrepetant", "unrepentant", - "unrepetent", "unrepentant", - "unresponse", "unresponsive", - "unsencored", "uncensored", - "unsetlling", "unsettling", - "unsolicted", "unsolicited", - "unsubscibe", "unsubscribe", - "unsubscrbe", "unsubscribe", - "unsucesful", "unsuccessful", - "unsuprised", "unsurprised", - "unsuprized", "unsurprised", - "unviersity", "university", - "unwrittern", "unwritten", - "urkainians", "ukrainians", - "utlimately", "ultimately", - "utlrasound", "ultrasound", - "vaccinatie", "vaccinated", - "vaccineras", "vaccines", - "valentians", "valentines", - "valentiens", "valentines", - "valentimes", "valentines", - "valentinas", "valentines", - "valentinos", "valentines", - "valentones", "valentines", - "validitity", "validity", - "valnetines", "valentines", - "vandalisim", "vandalism", - "vasectomey", "vasectomy", - "vegatarian", "vegetarian", - "vegaterian", "vegetarian", - "vegeratian", "vegetarians", - "vegetairan", "vegetarians", - "vegetarain", "vegetarians", - "vegetarien", "vegetarian", - "vegetarion", "vegetarian", - "vegetatian", "vegetarian", - "vegeterian", "vegetarian", - "vegitables", "vegetables", - "vehemantly", "vehemently", - "vehemontly", "vehemently", - "veitnamese", "vietnamese", - "veiwership", "viewership", - "veiwpoints", "viewpoints", - "venezuella", "venezuela", - "verificato", "verification", - "verifyable", "verifiable", - "veritcally", "vertically", - "veritiable", "verifiable", - "vernecular", "vernacular", - "vernicular", "vernacular", - "versatiliy", "versatility", - "versatille", "versatile", - "versatilty", "versatility", - "versitlity", "versatility", - "vewiership", "viewership", - "vibratoare", "vibrator", - "vicitmized", "victimized", - "vicotrious", "victorious", - "victemized", "victimized", - "victomized", "victimized", - "victorinos", "victorious", - "victorinus", "victorious", - "victoriosa", "victorious", - "victorioso", "victorious", - "victoriuos", "victorious", - "victumized", "victimized", - "videogaems", "videogames", - "videojames", "videogames", - "vidoegames", "videogames", - "vientamese", "vietnamese", - "vietmanese", "vietnamese", - "vietnamees", "vietnamese", - "vietnamise", "vietnamese", - "viewpionts", "viewpoints", - "vigilantie", "vigilante", - "vigoruosly", "vigorously", - "vigourosly", "vigorously", - "villageois", "villages", - "vindicitve", "vindictive", - "vindictave", "vindictive", - "visibiltiy", "visibility", - "vitenamese", "vietnamese", - "vocabluary", "vocabulary", - "volatiltiy", "volatility", - "volativity", "volatility", - "volitality", "volatility", - "volleyboll", "volleyball", - "vollyeball", "volleyball", - "volonteers", "volunteers", - "volounteer", "volunteer", - "voluntairy", "voluntarily", - "voluntarly", "voluntary", - "voluntears", "volunteers", - "volunteeer", "volunteers", - "volunteerd", "volunteered", - "voluntered", "volunteered", - "vulernable", "vulnerable", - "vulnarable", "vulnerable", - "vulnerabil", "vulnerable", - "vulnurable", "vulnerable", - "vunlerable", "vulnerable", - "warrandyte", "warranty", - "warrantles", "warranties", - "warrenties", "warranties", - "washignton", "washington", - "waterlemon", "watermelon", - "watermalon", "watermelon", - "waterproff", "waterproof", - "wavelegnth", "wavelength", - "wavelenghs", "wavelength", - "wavelenght", "wavelength", - "weakensses", "weaknesses", - "weaknesess", "weaknesses", - "weathliest", "wealthiest", - "wedensdays", "wednesdays", - "wednesdsay", "wednesdays", - "wednessday", "wednesdays", - "wednsedays", "wednesdays", - "weightened", "weighted", - "welathiest", "wealthiest", - "wellignton", "wellington", - "wellingotn", "wellington", - "wendesdays", "wednesdays", - "wereabouts", "whereabouts", - "westbroook", "westbrook", - "westernese", "westerners", - "westerness", "westerners", - "westminser", "westminster", - "westminter", "westminster", - "whatosever", "whatsoever", - "whatseover", "whatsoever", - "whipsering", "whispering", - "whsipering", "whispering", - "widepsread", "widespread", - "wikileakes", "wikileaks", - "wilderniss", "wilderness", - "wildreness", "wilderness", - "willfullly", "willfully", - "winchestor", "winchester", - "windhsield", "windshield", - "windsheild", "windshield", - "windshiled", "windshield", - "wisconsion", "wisconsin", - "wishpering", "whispering", - "withdrawan", "withdrawn", - "withdrawel", "withdrawal", - "withdrawin", "withdrawn", - "withholdng", "withholding", - "withrdawal", "withdrawals", - "witnissing", "witnessing", - "wonderfull", "wonderful", - "wonderfuly", "wonderfully", - "wonderwand", "wonderland", - "worhsiping", "worshiping", - "workingest", "workings", - "workstaion", "workstation", - "workstaton", "workstation", - "worshippig", "worshipping", - "worshoping", "worshiping", - "wrestlewar", "wrestler", - "xenohpobic", "xenophobic", - "xenophibia", "xenophobia", - "xenophibic", "xenophobic", - "xenophonic", "xenophobic", - "xenophopia", "xenophobia", - "xenophopic", "xenophobic", - "xeonphobia", "xenophobia", - "xeonphobic", "xenophobic", - "yourselfes", "yourselves", - "yoursleves", "yourselves", - "zimbabwaen", "zimbabwe", - "zionistisk", "zionists", - "abandonig", "abandoning", - "abandonne", "abandonment", - "abanonded", "abandoned", - "abdomnial", "abdominal", - "abdonimal", "abdominal", - "aberation", "aberration", - "abnormaly", "abnormally", - "abodminal", "abdominal", - "abondoned", "abandoned", - "aborigene", "aborigine", - "aboslutes", "absolutes", - "abosrbing", "absorbing", - "abreviate", "abbreviate", - "abritrary", "arbitrary", - "abruptley", "abruptly", - "absailing", "abseiling", - "absloutes", "absolutes", - "absolutey", "absolutely", - "absolutly", "absolutely", - "absoultes", "absolutes", - "abstracto", "abstraction", - "absurdley", "absurdly", - "absuridty", "absurdity", - "abusrdity", "absurdity", - "academica", "academia", - "accademic", "academic", - "accalimed", "acclaimed", - "accelerar", "accelerator", - "accending", "ascending", - "accension", "accession", - "accidenty", "accidently", - "acclamied", "acclaimed", - "accliamed", "acclaimed", - "accomdate", "accommodate", - "accordeon", "accordion", - "accordian", "accordion", - "accoridng", "according", - "accountas", "accountants", - "accountat", "accountants", - "accoustic", "acoustic", - "accroding", "according", - "accuraccy", "accuracy", - "acftually", "factually", - "acheiving", "achieving", - "achieveds", "achieves", - "achillees", "achilles", - "achilleos", "achilles", - "achilleus", "achilles", - "achiveing", "achieving", - "acitvates", "activates", - "aclhemist", "alchemist", - "acomplish", "accomplish", - "acquisito", "acquisition", - "acronymes", "acronyms", - "acronymns", "acronyms", - "acsending", "ascending", - "acsension", "ascension", - "activaste", "activates", - "activatin", "activation", - "activelly", "actively", - "activisim", "activism", - "activisit", "activist", - "activites", "activities", - "actresess", "actresses", - "acusation", "causation", - "acutality", "actuality", - "adavanced", "advanced", - "adbominal", "abdominal", - "additonal", "additional", - "addoptive", "adoptive", - "addresing", "addressing", - "addtional", "additional", - "adhearing", "adhering", - "adherance", "adherence", - "adjectivs", "adjectives", - "adjustabe", "adjustable", - "administr", "administer", - "admitedly", "admittedly", - "adolecent", "adolescent", - "adovcated", "advocated", - "adovcates", "advocates", - "adquiring", "acquiring", - "adresable", "addressable", - "adressing", "addressing", - "aduiobook", "audiobook", - "advatange", "advantage", - "adventurs", "adventures", - "adveristy", "adversity", - "advertisy", "adversity", - "advisorys", "advisors", - "aeorspace", "aerospace", - "aeropsace", "aerospace", - "aerosapce", "aerospace", - "aersopace", "aerospace", - "aestethic", "aesthetic", - "aethistic", "atheistic", - "affiliato", "affiliation", - "affinitiy", "affinity", - "affirmate", "affirmative", - "affliated", "affiliated", - "africanas", "africans", - "africanos", "africans", - "aggegrate", "aggregate", - "aggresive", "aggressive", - "agnosticm", "agnosticism", - "agregates", "aggregates", - "agreggate", "aggregate", - "agrentina", "argentina", - "agression", "aggression", - "agressive", "aggressive", - "agressvie", "agressive", - "agruement", "arguement", - "agruments", "arguments", - "agurement", "arguement", - "ailenated", "alienated", - "airbourne", "airborne", - "aircrafts", "aircraft", - "airplance", "airplane", - "airrcraft", "aircraft", - "aksreddit", "askreddit", - "alcehmist", "alchemist", - "alchemsit", "alchemist", - "alchimest", "alchemist", - "alchmeist", "alchemist", - "alchoolic", "alcoholic", - "alcoholis", "alcoholics", - "alechmist", "alchemist", - "alegience", "allegiance", - "aleinated", "alienated", - "algoriths", "algorithms", - "algoritms", "algorithms", - "algorthim", "algorithm", - "algortihm", "algorithm", - "alignemnt", "alignment", - "alimunium", "aluminium", - "alingment", "alignment", - "allainces", "alliances", - "alledgely", "allegedly", - "allegence", "allegiance", - "alleivate", "alleviate", - "allievate", "alleviate", - "alliviate", "alleviate", - "allopones", "allophones", - "allthough", "although", - "almightly", "almighty", - "alocholic", "alcoholic", - "alogrithm", "algorithm", - "alphabeat", "alphabet", - "alrightey", "alrighty", - "alrightly", "alrighty", - "alrightty", "alrighty", - "alrington", "arlington", - "alrorythm", "algorithm", - "alterante", "alternate", - "alternatr", "alternator", - "althetics", "athletics", - "althought", "although", - "altruisim", "altruism", - "amateures", "amateurs", - "ambluance", "ambulance", - "ambuigity", "ambiguity", - "amendmant", "amendment", - "amercians", "americans", - "americain", "american", - "americams", "americas", - "americaps", "americas", - "americats", "americas", - "amibguity", "ambiguity", - "aminosity", "animosity", - "amrstrong", "armstrong", - "amublance", "ambulance", - "amunition", "ammunition", - "anachrist", "anarchist", - "analagous", "analogous", - "analitycs", "analytics", - "analtyics", "analytics", - "analyitcs", "analytics", - "analyseas", "analyses", - "analysees", "analyses", - "analysens", "analyses", - "analysise", "analyses", - "analystes", "analysts", - "analzying", "analyzing", - "anarchsim", "anarchism", - "anayltics", "analytics", - "anaylzing", "analyzing", - "ancedotal", "anecdotal", - "ancedotes", "anecdotes", - "ancestory", "ancestry", - "androgeny", "androgyny", - "androides", "androids", - "androidos", "androids", - "anecdotle", "anecdote", - "anecodtal", "anecdotal", - "anecodtes", "anecdotes", - "anectodal", "anecdotal", - "anectodes", "anecdotes", - "anedoctal", "anecdotal", - "anedoctes", "anecdotes", - "animostiy", "animosity", - "anitvirus", "antivirus", - "anlaytics", "analytics", - "anniversy", "anniversary", - "annointed", "anointed", - "annoucnes", "announces", - "annoyingy", "annoyingly", - "annoymous", "anonymous", - "annoynace", "annoyance", - "annyoance", "annoyance", - "anomisity", "animosity", - "anomolies", "anomalies", - "anomolous", "anomalous", - "anomynity", "anonymity", - "anomynous", "anonymous", - "anonimity", "anonymity", - "anonmyous", "anonymous", - "anonymoys", "anonymously", - "anorexiac", "anorexic", - "anorexica", "anorexia", - "anrachist", "anarchist", - "ansestors", "ancestors", - "antarctia", "antarctica", - "antennaes", "antennas", - "antiviurs", "antivirus", - "antivrius", "antivirus", - "antivuris", "antivirus", - "anwsering", "answering", - "anynomity", "anonymity", - "anynomous", "anonymous", - "aparthide", "apartheid", - "aparthied", "apartheid", - "apartmens", "apartments", - "apocalype", "apocalypse", - "apostrope", "apostrophe", - "apparenty", "apparently", - "appearane", "appearances", - "appenines", "apennines", - "apperance", "appearance", - "appetitie", "appetite", - "applaudes", "applause", - "applicato", "application", - "appreciae", "appreciates", - "apprentie", "apprentice", - "approachs", "approaches", - "apratheid", "apartheid", - "apsaragus", "asparagus", - "apsergers", "aspergers", - "aquainted", "acquainted", - "arbirtary", "arbitrary", - "arbritary", "arbitrary", - "arcehtype", "archetype", - "archetect", "architect", - "archetpye", "archetype", - "archetyps", "archetypes", - "architecs", "architects", - "archtypes", "archetypes", - "aregument", "arguement", - "areospace", "aerospace", - "argessive", "agressive", - "argeument", "arguement", - "arguabley", "arguably", - "arguablly", "arguably", - "arguement", "argument", - "arguemnet", "arguement", - "arguemnts", "arguments", - "argumeent", "arguement", - "arhtritis", "arthritis", - "aribtrary", "arbitrary", - "ariplanes", "airplanes", - "aristolte", "aristotle", - "aristotel", "aristotle", - "aritfacts", "artifacts", - "arlignton", "arlington", - "arlingotn", "arlington", - "armistace", "armistice", - "armstorng", "armstrong", - "arpatheid", "apartheid", - "arthirtis", "arthritis", - "artifcats", "artifacts", - "artifical", "artificial", - "artillary", "artillery", - "arugement", "arguement", - "arugments", "arguments", - "asapragus", "asparagus", - "asbestoes", "asbestos", - "asborbing", "absorbing", - "asburdity", "absurdity", - "ascendend", "ascended", - "ascneding", "ascending", - "ascnesion", "ascension", - "asethetic", "aesthetic", - "asnwering", "answering", - "asociated", "associated", - "assasined", "assassinated", - "assassian", "assassin", - "assassine", "assassinate", - "assasssin", "assassins", - "assaultes", "assaults", - "assembeld", "assembled", - "assembley", "assembly", - "assemblie", "assemble", - "assisnate", "assassinate", - "assistans", "assistants", - "assistsnt", "assistants", - "assmebled", "assembled", - "associato", "association", - "assoicate", "associate", - "asssasins", "assassins", - "assualted", "assaulted", - "assulated", "assaulted", - "asteorids", "asteroids", - "astericks", "asterisk", - "asteriods", "asteroids", - "astroanut", "astronaut", - "astronuat", "astronaut", - "astrounat", "astronaut", - "asuterity", "austerity", - "atempting", "attempting", - "atheltics", "athletics", - "atheneans", "athenians", - "athesitic", "atheistic", - "athetlics", "athletics", - "athiestic", "atheistic", - "athleticm", "athleticism", - "atmosphir", "atmospheric", - "atributed", "attributed", - "atributes", "attributes", - "atrifacts", "artifacts", - "atrillery", "artillery", - "atrittion", "attrition", - "attachmet", "attachments", - "attaindre", "attainder", - "attemting", "attempting", - "attemtped", "attempted", - "attendent", "attendant", - "attension", "attention", - "attirbute", "attribute", - "attirtion", "attrition", - "attmepted", "attempted", - "attractes", "attracts", - "attractin", "attraction", - "attributo", "attribution", - "attributs", "attributes", - "attritube", "attribute", - "auctionrs", "auctions", - "auidobook", "audiobook", - "auromated", "automated", - "australin", "australians", - "authroity", "authority", - "autoattak", "autoattack", - "autogrpah", "autograph", - "autonomos", "autonomous", - "auxillary", "auxiliary", - "avaialble", "available", - "availible", "available", - "avalaible", "available", - "avaliable", "available", - "averageed", "averaged", - "avialable", "available", - "awakenend", "awakened", - "awesomley", "awesomely", - "awkawrdly", "awkwardly", - "awnsering", "answering", - "bacehlors", "bachelors", - "bachelour", "bachelor", - "bachleors", "bachelors", - "bacholers", "bachelors", - "backdooor", "backdoor", - "backfeild", "backfield", - "backfiled", "backfield", - "backgroud", "background", - "backpakcs", "backpacks", - "badnwagon", "bandwagon", - "badnwidth", "bandwidth", - "balckjack", "blackjack", - "balcklist", "blacklist", - "balitmore", "baltimore", - "ballisitc", "ballistic", - "ballsitic", "ballistic", - "balsphemy", "blasphemy", - "bandiwdth", "bandwidth", - "bandwdith", "bandwidth", - "bandwidht", "bandwidth", - "bandwitdh", "bandwidth", - "bankrupcy", "bankruptcy", - "bankrupty", "bankruptcy", - "banruptcy", "bankruptcy", - "baordwalk", "boardwalk", - "barabrian", "barbarian", - "barbarain", "barbarian", - "barbarina", "barbarian", - "barcelets", "bracelets", - "barcleona", "barcelona", - "bareclona", "barcelona", - "barrackus", "barracks", - "bascially", "basically", - "bastardes", "bastards", - "bastardos", "bastards", - "bastardus", "bastards", - "bathrooom", "bathroom", - "batlimore", "baltimore", - "battailon", "battalion", - "battlaion", "battalion", - "beahviour", "behaviour", - "beauitful", "beautiful", - "beautifyl", "beautifully", - "becnhmark", "benchmark", - "becomeing", "becoming", - "becomming", "becoming", - "beehtoven", "beethoven", - "begginers", "beginners", - "beggining", "beginning", - "begininng", "beginning", - "beginnins", "beginnings", - "behaivors", "behaviors", - "behaivour", "behaviour", - "behavoirs", "behaviors", - "behavoiur", "behaviour", - "behvaiour", "behaviour", - "beleiving", "believing", - "beliveing", "believing", - "belssings", "blessings", - "bemusemnt", "bemusement", - "benchamrk", "benchmark", - "benchmars", "benchmarks", - "benedicat", "benedict", - "benedickt", "benedict", - "benghazhi", "benghazi", - "benghazzi", "benghazi", - "bergamont", "bergamot", - "berkelely", "berkeley", - "bersekrer", "berserker", - "berskerer", "berserker", - "beseiging", "besieging", - "bestialiy", "bestiality", - "beuatiful", "beautiful", - "biginning", "beginning", - "bigrading", "brigading", - "billbaord", "billboard", - "billboars", "billboards", - "binominal", "binomial", - "birgading", "brigading", - "birghtest", "brightest", - "birhtdays", "birthdays", - "bitcoints", "bitcoins", - "blackbery", "blackberry", - "blackhaws", "blackhawks", - "blackshit", "blacksmith", - "blanketts", "blankets", - "blapshemy", "blasphemy", - "blashpemy", "blasphemy", - "blaspehmy", "blasphemy", - "blasphmey", "blasphemy", - "blatanlty", "blatantly", - "blatimore", "baltimore", - "bleuberry", "blueberry", - "bleutooth", "bluetooth", - "blisteres", "blisters", - "blizzcoin", "blizzcon", - "blockchan", "blockchain", - "blockeras", "blockers", - "bloodbore", "bloodborne", - "boardband", "broadband", - "boardcast", "broadcast", - "bodyweigt", "bodyweight", - "bookamrks", "bookmarks", - "bookmakrs", "bookmarks", - "bookmarkd", "bookmarked", - "boradband", "broadband", - "boradcast", "broadcast", - "boradwalk", "boardwalk", - "bouregois", "bourgeois", - "bourgeios", "bourgeois", - "bourgoeis", "bourgeois", - "boyfirend", "boyfriend", - "boyfreind", "boyfriend", - "boyfriens", "boyfriends", - "brabarian", "barbarian", - "bracelona", "barcelona", - "braodband", "broadband", - "braodcast", "broadcast", - "brazilias", "brazilians", - "breakdows", "breakdowns", - "breserker", "berserker", - "bretheren", "brethren", - "bridaging", "brigading", - "brightern", "brighten", - "brigthest", "brightest", - "brilliany", "brilliantly", - "brithdays", "birthdays", - "broadwalk", "boardwalk", - "bruiseres", "bruisers", - "brunettte", "brunette", - "brusseles", "brussels", - "brussells", "brussels", - "brutailty", "brutality", - "brutallly", "brutally", - "buddhisim", "buddhism", - "buddihsts", "buddhists", - "buddishts", "buddhists", - "buhddists", "buddhists", - "buidlings", "buildings", - "bulidings", "buildings", - "burgunday", "burgundy", - "burgundry", "burgundy", - "burritoes", "burritos", - "burtality", "brutality", - "busineses", "business", - "businessa", "businessman", - "businesse", "businessmen", - "businesss", "businesses", - "bussiness", "business", - "buthcered", "butchered", - "butterlfy", "butterfly", - "cacausian", "caucasian", - "caclulate", "calculate", - "cacuasian", "caucasian", - "caculater", "calculator", - "cafeteira", "cafeteria", - "cafetiera", "cafeteria", - "caffeinne", "caffeine", - "calcualte", "calculate", - "californa", "california", - "caluclate", "calculate", - "calulated", "calculated", - "calulater", "calculator", - "cambirdge", "cambridge", - "cambrdige", "cambridge", - "cambrigde", "cambridge", - "camoflage", "camouflage", - "campagins", "campaigns", - "campaings", "campaigns", - "campiagns", "campaigns", - "campusers", "campuses", - "camrbidge", "cambridge", - "canadains", "canadians", - "candadate", "candidate", - "candidats", "candidates", - "cannister", "canister", - "cannoical", "canonical", - "canoncial", "canonical", - "capactior", "capacitor", - "capicator", "capacitor", - "capitalis", "capitals", - "caprenter", "carpenter", - "capsulers", "capsules", - "capsulets", "capsules", - "carachter", "character", - "cardbaord", "cardboard", - "cardborad", "cardboard", - "cardianls", "cardinals", - "cardnials", "cardinals", - "caridnals", "cardinals", - "carmalite", "carmelite", - "carnberry", "cranberry", - "carolinia", "carolina", - "carpetner", "carpenter", - "carptener", "carpenter", - "carribean", "caribbean", - "cartdrige", "cartridge", - "cartilege", "cartilage", - "cartirdge", "cartridge", - "cartrdige", "cartridge", - "cartrigde", "cartridge", - "casaulity", "causality", - "cashieres", "cashiers", - "cassawory", "cassowary", - "cassettte", "cassette", - "casuation", "causation", - "cataclsym", "cataclysm", - "cataclyms", "cataclysm", - "catacylsm", "cataclysm", - "catacyslm", "cataclysm", - "catalcysm", "cataclysm", - "catalgoue", "catalogue", - "cathderal", "cathedral", - "catherdal", "cathedral", - "cathloics", "catholics", - "cathredal", "cathedral", - "caucaisan", "caucasian", - "caucasain", "caucasian", - "causacian", "caucasian", - "causailty", "causality", - "celebirty", "celebrity", - "celebrato", "celebration", - "celebrite", "celebrities", - "celesital", "celestial", - "celestail", "celestial", - "cementary", "cemetery", - "cemetarey", "cemetery", - "cenitpede", "centipede", - "centepide", "centipede", - "centipeed", "centipede", - "centruies", "centuries", - "centuties", "centuries", - "cerebrawl", "cerebral", - "certanity", "certainty", - "certianty", "certainty", - "cesspoool", "cesspool", - "chairmain", "chairman", - "challange", "challenge", - "challengr", "challenger", - "challengs", "challenges", - "chameloen", "chameleon", - "champagen", "champagne", - "champange", "champagne", - "chandlure", "chandler", - "changable", "changeable", - "charactor", "character", - "chatedral", "cathedral", - "chatolics", "catholics", - "checkmeat", "checkmate", - "checkpoit", "checkpoints", - "chekcmate", "checkmate", - "chemestry", "chemistry", - "chemicaly", "chemically", - "chemsitry", "chemistry", - "chernboyl", "chernobyl", - "chernobly", "chernobyl", - "chernoybl", "chernobyl", - "chernyobl", "chernobyl", - "cheronbyl", "chernobyl", - "chidlfree", "childfree", - "chidlrens", "childrens", - "chihauhua", "chihuahua", - "chihuahau", "chihuahua", - "childbird", "childbirth", - "childerns", "childrens", - "childisch", "childish", - "childresn", "childrens", - "chirstian", "christian", - "chirstmas", "christmas", - "chiuhahua", "chihuahua", - "chlidfree", "childfree", - "chlidrens", "childrens", - "chocloate", "chocolate", - "chocoalte", "chocolate", - "chocolats", "chocolates", - "chocolste", "chocolates", - "cholocate", "chocolate", - "chrenobyl", "chernobyl", - "chrisitan", "christian", - "christain", "christian", - "christams", "christmas", - "chrsitian", "christian", - "chrsitmas", "christmas", - "churchers", "churches", - "cigaretts", "cigarettes", - "cigeratte", "cigarette", - "cilivians", "civilians", - "cilpboard", "clipboard", - "cilynders", "cylinders", - "circuitos", "circuits", - "ciriculum", "curriculum", - "cirticise", "criticise", - "civilains", "civilians", - "civillian", "civilian", - "classicos", "classics", - "classicus", "classics", - "classifiy", "classify", - "cleanisng", "cleansing", - "cleasning", "cleansing", - "clikcbait", "clickbait", - "clinicaly", "clinically", - "clipbaord", "clipboard", - "clitories", "clitoris", - "clitorios", "clitoris", - "clitorius", "clitoris", - "clucthing", "clutching", - "clutchign", "clutching", - "cluthcing", "clutching", - "coca cola", "coca-cola", - "cockatils", "cocktails", - "cocktials", "cocktails", - "cognizent", "cognizant", - "colateral", "collateral", - "collabore", "collaborate", - "collasped", "collapsed", - "collaspes", "collapses", - "colleauge", "colleague", - "collectes", "collects", - "collectie", "collective", - "collecton", "collection", - "collectos", "collectors", - "collegaue", "colleague", - "collegues", "colleagues", - "collisson", "collisions", - "collonade", "colonnade", - "collonies", "colonies", - "collpased", "collapsed", - "collpases", "collapses", - "colombina", "colombia", - "columbina", "columbia", - "comapnies", "companies", - "combatans", "combatants", - "combinato", "combination", - "combusion", "combustion", - "comestics", "cosmetics", - "comisions", "commissions", - "comission", "commission", - "comitting", "committing", - "commandes", "commands", - "commentar", "commentator", - "commentes", "commenters", - "commercie", "commerce", - "commision", "commission", - "commiteed", "commited", - "commiting", "committing", - "commitmet", "commitments", - "commments", "comments", - "commongly", "commonly", - "communiss", "communists", - "communite", "communities", - "communits", "communist", - "communsim", "communism", - "compaines", "companies", - "compalins", "complains", - "compalint", "compliant", - "comparisn", "comparisons", - "compeltes", "completes", - "competant", "competent", - "competend", "competed", - "competion", "competition", - "competive", "competitive", - "compilant", "compliant", - "compilare", "compiler", - "compilato", "compilation", - "compitent", "competent", - "complaind", "complained", - "complaing", "complaining", - "completen", "complement", - "completey", "completely", - "completin", "completion", - "complians", "complains", - "componant", "component", - "comprable", "comparable", - "compresas", "compress", - "compreses", "compress", - "compteurs", "computers", - "comptuers", "computers", - "computato", "computation", - "comradets", "comrades", - "comsetics", "cosmetics", - "conanical", "canonical", - "conatiner", "container", - "concelaed", "concealed", - "concelaer", "concealer", - "concelear", "concealer", - "concensus", "consensus", - "conceptos", "concepts", - "conceptul", "conceptual", - "concernig", "concerning", - "concertas", "concerts", - "concevied", "conceived", - "conciders", "considers", - "concieted", "conceited", - "concieved", "conceived", - "conclusie", "conclusive", - "concsious", "conscious", - "concurret", "concurrent", - "condamned", "condemned", - "condemend", "condemned", - "condemmed", "condemned", - "condemnig", "condemning", - "condenmed", "condemned", - "condesend", "condensed", - "condesned", "condensed", - "condmened", "condemned", - "conection", "connection", - "conenctor", "connector", - "conferene", "conferences", - "confessin", "confession", - "confideny", "confidently", - "confilcts", "conflicts", - "confimred", "confirmed", - "confirmas", "confirms", - "conflcits", "conflicts", - "confrimed", "confirmed", - "congitive", "cognitive", - "conlcuded", "concluded", - "connectes", "connects", - "connectit", "connecticut", - "connectos", "connectors", - "conquerer", "conqueror", - "consdider", "consider", - "consensul", "consensual", - "conserned", "concerned", - "consicous", "conscious", - "considerd", "considered", - "considert", "considerate", - "consisent", "consistent", - "consistes", "consists", - "consolato", "consolation", - "consolide", "consolidate", - "consonent", "consonant", - "constanly", "constantly", - "constanst", "constants", - "constanty", "constantly", - "constasnt", "constants", - "constitue", "constitutes", - "constrait", "constraints", - "construcs", "constructs", - "construde", "construed", - "construst", "constructs", - "constucts", "constructs", - "constured", "construed", - "consulant", "consultant", - "consultat", "consultant", - "consumate", "consummate", - "contactes", "contacts", - "contactos", "contacts", - "contagios", "contagious", - "containes", "contains", - "containig", "containing", - "containts", "contains", - "contemple", "contemplate", - "contendor", "contender", - "contentas", "contents", - "contentes", "contents", - "contentos", "contents", - "contestas", "contests", - "contestat", "contestants", - "contestes", "contests", - "contextes", "contexts", - "contextos", "contexts", - "contianer", "container", - "contibute", "contribute", - "contigent", "contingent", - "continant", "continental", - "continens", "continents", - "continous", "continuous", - "continuos", "continuous", - "continute", "continue", - "contiunal", "continual", - "contracto", "contraction", - "contribue", "contribute", - "contribuo", "contributor", - "controlas", "controls", - "controled", "controlled", - "controles", "controls", - "controlls", "controls", - "convenant", "covenant", - "convencen", "convenience", - "conveniet", "convenient", - "conversie", "converse", - "conversin", "conversions", - "convertie", "convertible", - "convertis", "converts", - "cooldwons", "cooldowns", - "coordinar", "coordinator", - "copenhagn", "copenhagen", - "coprorate", "corporate", - "copywrite", "copyright", - "corcodile", "crocodile", - "corparate", "corporate", - "corproate", "corporate", - "correclty", "correctly", - "correctin", "correction", - "correlato", "correlation", - "corridoor", "corridor", - "corruptin", "corruption", - "corssfire", "crossfire", - "corsshair", "crosshair", - "corsspost", "crosspost", - "coruching", "crouching", - "cosemtics", "cosmetics", - "costumise", "costumes", - "counciles", "councils", - "councills", "councils", - "councilos", "councils", - "countains", "contains", - "counteres", "counters", - "countires", "countries", - "courching", "crouching", - "courtesey", "courtesy", - "courtesty", "courtesy", - "coururier", "courier", - "coutnered", "countered", - "crapenter", "carpenter", - "creativey", "creatively", - "creedence", "credence", - "crhistmas", "christmas", - "cricketts", "crickets", - "criminaly", "criminally", - "critereon", "criterion", - "criterias", "criteria", - "criticaly", "critically", - "criticies", "criticise", - "criticisn", "criticising", - "critisice", "criticise", - "critisicm", "criticism", - "critising", "criticising", - "critisism", "criticism", - "critisize", "criticise", - "critizing", "criticizing", - "crosshiar", "crosshair", - "crossifre", "crossfire", - "crticised", "criticised", - "crusdaers", "crusaders", - "crutchers", "crutches", - "crystalls", "crystals", - "crystalus", "crystals", - "crystalys", "crystals", - "cuacasian", "caucasian", - "cuasality", "causality", - "culitvate", "cultivate", - "culturaly", "culturally", - "culturels", "cultures", - "curiostiy", "curiosity", - "curisoity", "curiosity", - "currenlty", "currently", - "curriculm", "curriculum", - "cursaders", "crusaders", - "custcenes", "cutscenes", - "cutsceens", "cutscenes", - "cutscence", "cutscene", - "cutsences", "cutscenes", - "cyclinder", "cylinder", - "cyclistes", "cyclists", - "cylindres", "cylinders", - "cynicisim", "cynicism", - "dahsboard", "dashboard", - "dalmation", "dalmatian", - "dangeroys", "dangerously", - "dashbaord", "dashboard", - "daugthers", "daughters", - "davantage", "advantage", - "deadlfits", "deadlifts", - "deadpoool", "deadpool", - "dealershp", "dealerships", - "deathmath", "deathmatch", - "decalring", "declaring", - "decendant", "descendant", - "decendent", "descendant", - "decipting", "depicting", - "deciption", "depiction", - "decisivie", "decisive", - "declarase", "declares", - "declarees", "declares", - "decoratie", "decorative", - "decoratin", "decorations", - "decpetion", "deception", - "decpetive", "deceptive", - "decribing", "describing", - "decsended", "descended", - "deductibe", "deductible", - "defaintly", "defiantly", - "defaltion", "deflation", - "defanitly", "defiantly", - "defeintly", "definetly", - "defendent", "defendant", - "defensese", "defenseless", - "defianlty", "defiantly", - "deficeint", "deficient", - "deficieny", "deficiency", - "deficites", "deficits", - "definance", "defiance", - "definatey", "definately", - "definatly", "definitely", - "definetly", "definitely", - "definetyl", "definetly", - "definilty", "definitly", - "definitie", "definitive", - "definitin", "definitions", - "definitly", "definitely", - "definiton", "definition", - "definitve", "definite", - "definityl", "definitly", - "definltey", "definetly", - "defintaly", "defiantly", - "defintily", "definitly", - "defintion", "definition", - "defintley", "definetly", - "defitenly", "definetly", - "defitinly", "definitly", - "defitnaly", "defiantly", - "defitnely", "definetly", - "deflectin", "deflection", - "defnietly", "definetly", - "degeneret", "degenerate", - "degradato", "degradation", - "degradead", "degraded", - "degrassie", "degrasse", - "degrassse", "degrasse", - "deifnetly", "definetly", - "deifnitly", "definitly", - "deisgners", "designers", - "delagates", "delegates", - "delcaring", "declaring", - "delcining", "declining", - "delegatie", "delegate", - "delerious", "delirious", - "deleteing", "deleting", - "delfation", "deflation", - "deliveres", "delivers", - "deliverys", "delivers", - "delpoying", "deploying", - "demcorats", "democrats", - "deminsion", "dimension", - "democarcy", "democracy", - "democract", "democrat", - "demonstre", "demonstrate", - "denominar", "denominator", - "dentistas", "dentists", - "dentistes", "dentists", - "deomcrats", "democrats", - "deopsited", "deposited", - "deparment", "department", - "departmet", "departments", - "depciting", "depicting", - "depcition", "depiction", - "depection", "deception", - "depedency", "dependency", - "depicitng", "depicting", - "depiciton", "depiction", - "deplyoing", "deploying", - "depoisted", "deposited", - "depolying", "deploying", - "depositas", "deposits", - "deposites", "deposits", - "depositis", "deposits", - "depositos", "deposits", - "depostied", "deposited", - "depressie", "depressive", - "depressin", "depression", - "depserate", "desperate", - "depsoited", "deposited", - "descirbes", "describes", - "descision", "decision", - "desginers", "designers", - "desgining", "designing", - "desicions", "decisions", - "designade", "designated", - "designato", "designation", - "desingage", "disengage", - "desingers", "designers", - "desinging", "designing", - "desktopos", "desktops", - "desparate", "desperate", - "desperato", "desperation", - "despoited", "deposited", - "desriable", "desirable", - "dessigned", "designed", - "destinato", "destination", - "destoryed", "destroyed", - "destoryer", "destroyer", - "destroyes", "destroys", - "destructo", "destruction", - "destryoed", "destroyed", - "destryoer", "destroyer", - "desuction", "seduction", - "detailled", "detailed", - "detatched", "detached", - "detectivs", "detectives", - "deteriate", "deteriorate", - "determing", "determining", - "determins", "determines", - "developrs", "develops", - "diabetees", "diabetes", - "diablical", "diabolical", - "diagonaal", "diagonal", - "diagonsed", "diagnosed", - "diagonsis", "diagnosis", - "diagramas", "diagrams", - "diagramms", "diagrams", - "dialectes", "dialects", - "dialectos", "dialects", - "diarrheoa", "diarrhea", - "diasbling", "disabling", - "dichomoty", "dichotomy", - "dicovered", "discovered", - "dictaters", "dictates", - "dictionay", "dictionary", - "difenitly", "definitly", - "diferrent", "different", - "differene", "differences", - "differens", "differences", - "differeny", "differently", - "difficuly", "difficulty", - "diffucult", "difficult", - "dificulty", "difficulty", - "diganosed", "diagnosed", - "diganosis", "diagnosis", - "dimenions", "dimensions", - "dimention", "dimension", - "dimesnion", "dimension", - "diminishs", "diminishes", - "dinasours", "dinosaurs", - "dinosuars", "dinosaurs", - "dinsoaurs", "dinosaurs", - "dionsaurs", "dinosaurs", - "diphtongs", "diphthongs", - "dipthongs", "diphthongs", - "direcotry", "directory", - "directoty", "directory", - "directroy", "directory", - "disapears", "disappears", - "disaprity", "disparity", - "disastros", "disastrous", - "disatrous", "disastrous", - "disbaling", "disabling", - "disbeleif", "disbelief", - "disbelife", "disbelief", - "disciplen", "disciplines", - "disclamer", "disclaimer", - "disclosue", "disclosure", - "disconnet", "disconnect", - "discosure", "discourse", - "discoverd", "discovered", - "discovere", "discoveries", - "discredid", "discredited", - "discribed", "described", - "discribes", "describes", - "discussin", "discussion", - "diserable", "desirable", - "disgarees", "disagrees", - "disgiused", "disguised", - "disgusied", "disguised", - "disgustes", "disgusts", - "disgustos", "disgusts", - "disgustus", "disgusts", - "dishonesy", "dishonesty", - "dishonord", "dishonored", - "disicples", "disciples", - "dismantel", "dismantle", - "dismisals", "dismissal", - "disnegage", "disengage", - "dispairty", "disparity", - "dispalyed", "displayed", - "dispartiy", "disparity", - "dispenced", "dispensed", - "dispeners", "dispenser", - "displayes", "displays", - "disruptin", "disruption", - "dissapear", "disappear", - "dissarray", "disarray", - "dissmisal", "dismissal", - "disspiate", "dissipate", - "distincte", "distinctive", - "distrcits", "districts", - "distribue", "distributed", - "distrubed", "disturbed", - "distrupts", "distrust", - "disturben", "disturbance", - "diverisfy", "diversify", - "diveristy", "diversity", - "diverstiy", "diversity", - "dividened", "dividend", - "divinitiy", "divinity", - "doccument", "document", - "docrtines", "doctrines", - "docuhebag", "douchebag", - "dogdammit", "goddammit", - "dogfather", "godfather", - "dolphines", "dolphins", - "domecracy", "democracy", - "domecrats", "democrats", - "domiantes", "dominates", - "dominatin", "domination", - "dominaton", "domination", - "dominiant", "dominant", - "donwgrade", "downgrade", - "donwloads", "downloads", - "donwsides", "downsides", - "donwvoted", "downvoted", - "donwvotes", "downvotes", - "doublelit", "doublelift", - "doucehbag", "douchebag", - "downgarde", "downgrade", - "downlaods", "downloads", - "downloaad", "download", - "downovted", "downvoted", - "dravadian", "dravidian", - "drummless", "drumless", - "dsyphoria", "dysphoria", - "dsytopian", "dystopian", - "duaghters", "daughters", - "duplicats", "duplicates", - "durabiliy", "durability", - "dynamicus", "dynamics", - "dypshoria", "dysphoria", - "dyshporia", "dysphoria", - "dysoptian", "dystopian", - "dysphoira", "dysphoria", - "dysphroia", "dysphoria", - "dyspohria", "dysphoria", - "dyspotian", "dystopian", - "dystopain", "dystopian", - "dystpoian", "dystopian", - "eachohter", "eachother", - "eachotehr", "eachother", - "eachtoher", "eachother", - "earpluggs", "earplugs", - "earthboud", "earthbound", - "eastwoood", "eastwood", - "eastwoord", "eastwood", - "ecclectic", "eclectic", - "ecomonics", "economics", - "edficient", "deficient", - "effecient", "efficient", - "efficeint", "efficient", - "efficency", "efficiency", - "efficieny", "efficiency", - "effulence", "effluence", - "egalitara", "egalitarian", - "egpytians", "egyptians", - "egyptains", "egyptians", - "egytpians", "egyptians", - "ehtically", "ethically", - "ehtnicity", "ethnicity", - "eighteeen", "eighteen", - "eitquette", "etiquette", - "ejacualte", "ejaculate", - "electivre", "elective", - "electorns", "electrons", - "electrial", "electrical", - "electricy", "electricity", - "electroal", "electoral", - "elementay", "elementary", - "elepahnts", "elephants", - "eliminase", "eliminates", - "eliminato", "elimination", - "ellignton", "ellington", - "ellingotn", "ellington", - "eloquenty", "eloquently", - "elsehwere", "elsewhere", - "emapthize", "empathize", - "embarress", "embarrassed", - "emmisarry", "emissary", - "emmisions", "emissions", - "emmitting", "emitting", - "empahsize", "emphasize", - "emperical", "empirical", - "emphaised", "emphasised", - "emphatize", "empathize", - "emphazise", "emphasize", - "emphysyma", "emphysema", - "empitness", "emptiness", - "employeer", "employer", - "employeur", "employer", - "empolyees", "employees", - "emtpiness", "emptiness", - "emualtion", "emulation", - "enahncing", "enhancing", - "enchantig", "enchanting", - "enclousre", "enclosure", - "enclsoure", "enclosure", - "encolsure", "enclosure", - "encompase", "encompass", - "enconding", "encoding", - "encounted", "encountered", - "encrpyted", "encrypted", - "encrytped", "encrypted", - "encyrpted", "encrypted", - "endangerd", "endangered", - "enevlopes", "envelopes", - "enforcees", "enforces", - "engagemet", "engagements", - "engagment", "engagement", - "engieneer", "engineer", - "engineeer", "engineer", - "engineerd", "engineered", - "enhacning", "enhancing", - "enhanceds", "enhances", - "enligthen", "enlighten", - "enourmous", "enormous", - "ensconsed", "ensconced", - "enthicity", "ethnicity", - "enthusiam", "enthusiasm", - "enthusiat", "enthusiast", - "entirelly", "entirely", - "entitlied", "entitled", - "enveloppe", "envelope", - "epidsodes", "episodes", - "epilepsey", "epilepsy", - "epiphanny", "epiphany", - "episonage", "espionage", - "epscially", "specially", - "epsionage", "espionage", - "eqautions", "equations", - "equialent", "equivalent", - "equivalet", "equivalents", - "ermington", "remington", - "erroenous", "erroneous", - "escalatie", "escalate", - "escalatin", "escalation", - "esitmated", "estimated", - "esitmates", "estimates", - "eslewhere", "elsewhere", - "especialy", "especially", - "espianoge", "espionage", - "espinoage", "espionage", - "espoinage", "espionage", - "esponiage", "espionage", - "espressso", "espresso", - "essencial", "essential", - "essentail", "essential", - "essentias", "essentials", - "essentual", "essential", - "essesital", "essential", - "estiamted", "estimated", - "estiamtes", "estimates", - "estimatin", "estimation", - "ethcially", "ethically", - "ethincity", "ethnicity", - "ethnicaly", "ethnically", - "ethniticy", "ethnicity", - "etmyology", "etymology", - "euclidian", "euclidean", - "euorpeans", "europeans", - "euphoriac", "euphoric", - "euphorica", "euphoria", - "europenas", "europeans", - "europians", "europeans", - "eurpoeans", "europeans", - "evangelia", "evangelical", - "evelation", "elevation", - "evenlopes", "envelopes", - "eventally", "eventually", - "eventualy", "eventually", - "everthing", "everything", - "evertyime", "everytime", - "everwhere", "everywhere", - "everyoens", "everyones", - "everyteim", "everytime", - "everytiem", "everytime", - "everyting", "everything", - "eveyrones", "everyones", - "evreyones", "everyones", - "evreytime", "everytime", - "exagerate", "exaggerate", - "exahusted", "exhausted", - "exapnsive", "expansive", - "exauhsted", "exhausted", - "excahnges", "exchanges", - "excecuted", "executed", - "excecutes", "executes", - "excellant", "excellent", - "excercise", "exercise", - "excerised", "exercised", - "excerises", "exercises", - "exceuting", "executing", - "exchnages", "exchanges", - "exclsuive", "exclusive", - "excludeds", "excludes", - "exclusivs", "exclusives", - "exclusivy", "exclusivity", - "excpetion", "exception", - "exculding", "excluding", - "exculsion", "exclusion", - "exculsive", "exclusive", - "execising", "exercising", - "execption", "exception", - "exectuing", "executing", - "exectuion", "execution", - "exectuive", "executive", - "executabe", "executable", - "exepmtion", "exemption", - "exerbated", "exacerbated", - "exercices", "exercise", - "exerciese", "exercises", - "exercizes", "exercise", - "exersices", "exercises", - "exhasuted", "exhausted", - "exhaustin", "exhaustion", - "exhibites", "exhibits", - "exhibitin", "exhibition", - "exhibtion", "exhibition", - "exhuasted", "exhausted", - "exibition", "exhibition", - "existance", "existence", - "existenta", "existential", - "existince", "existence", - "existnace", "existance", - "exlcuding", "excluding", - "exlcusion", "exclusion", - "exlcusive", "exclusive", - "exlpoding", "exploding", - "exlporers", "explorers", - "exlposion", "explosion", - "exonorate", "exonerate", - "expalined", "explained", - "expanisve", "expansive", - "expatriot", "expatriate", - "expectany", "expectancy", - "expection", "exception", - "expemtion", "exemption", - "experimet", "experiments", - "explaines", "explains", - "explainig", "explaining", - "explaning", "explaining", - "expliciet", "explicit", - "explicity", "explicitly", - "explictly", "explicitly", - "explioted", "exploited", - "explodeds", "explodes", - "exploites", "exploits", - "explorare", "explorer", - "explotied", "exploited", - "expolding", "exploding", - "expolited", "exploited", - "expolsion", "explosion", - "expolsive", "explosive", - "expressie", "expressive", - "expressin", "expression", - "exsitance", "existance", - "extention", "extension", - "exteriour", "exterior", - "extermely", "extremely", - "extermism", "extremism", - "extermist", "extremist", - "externaly", "externally", - "extractin", "extraction", - "extrapole", "extrapolate", - "extreemly", "extremely", - "extremers", "extremes", - "extremley", "extremely", - "extrotion", "extortion", - "eyeballls", "eyeballs", - "eyebrowes", "eyebrows", - "eyebrowns", "eyebrows", - "eyesahdow", "eyeshadow", - "eyeshdaow", "eyeshadow", - "eygptians", "egyptians", - "eytmology", "etymology", - "faceboook", "facebook", - "faciliate", "facilitate", - "facilites", "facilities", - "facilitiy", "facility", - "facinated", "fascinated", - "facutally", "factually", - "familiair", "familiar", - "familiare", "familiarize", - "familiary", "familiarity", - "familliar", "familiar", - "fanaticas", "fanatics", - "fanaticos", "fanatics", - "fanaticus", "fanatics", - "fanatsies", "fantasies", - "fanatsize", "fantasize", - "fandation", "foundation", - "fanservie", "fanservice", - "fantazise", "fantasize", - "farenheit", "fahrenheit", - "fascistes", "fascists", - "fashoined", "fashioned", - "favorties", "favorites", - "favoruite", "favourite", - "favourits", "favourites", - "favourtie", "favourite", - "fedreally", "federally", - "feminisim", "feminism", - "feminsits", "feminists", - "femminist", "feminist", - "fesitvals", "festivals", - "fetishers", "fetishes", - "fightings", "fighting", - "filetimes", "lifetimes", - "filiament", "filament", - "filmmakes", "filmmakers", - "fingernal", "fingernails", - "flashligt", "flashlight", - "flavorade", "flavored", - "flavoures", "flavours", - "flavourus", "flavours", - "flawlessy", "flawlessly", - "flexibily", "flexibility", - "fluctaute", "fluctuate", - "flucutate", "fluctuate", - "fluttersy", "fluttershy", - "follwoing", "following", - "foootball", "football", - "forcefuly", "forcefully", - "forcibley", "forcibly", - "forciblly", "forcibly", - "forearmes", "forearms", - "foreginer", "foreigner", - "foregroud", "foreground", - "foreinger", "foreigner", - "forgeiner", "foreigner", - "forgiener", "foreigner", - "forgivens", "forgiveness", - "foriegner", "foreigner", - "forigener", "foreigner", - "formerlly", "formerly", - "formualte", "formulate", - "formulaes", "formulas", - "formulars", "formulas", - "forntline", "frontline", - "forntpage", "frontpage", - "fortuante", "fortunate", - "forumlate", "formulate", - "foundatin", "foundations", - "fourteeen", "fourteen", - "fractales", "fractals", - "fractalis", "fractals", - "fractalus", "fractals", - "fragement", "fragment", - "fragmenot", "fragment", - "franchies", "franchise", - "francsico", "francisco", - "franscico", "francisco", - "frecklers", "freckles", - "freedomes", "freedoms", - "freestlye", "freestyle", - "freesytle", "freestyle", - "fremented", "fermented", - "freqeuncy", "frequency", - "frequence", "frequencies", - "friendlis", "friendlies", - "frightend", "frightened", - "fromation", "formation", - "frontapge", "frontpage", - "frontilne", "frontline", - "frustrato", "frustration", - "frustrats", "frustrates", - "fucntions", "functions", - "fullscren", "fullscreen", - "funcitons", "functions", - "functiong", "functioning", - "functtion", "function", - "furiosuly", "furiously", - "furiuosly", "furiously", - "futuristc", "futuristic", - "gagnsters", "gangsters", - "galations", "galatians", - "galdiator", "gladiator", - "gallaxies", "galaxies", - "garanteed", "guaranteed", - "garantees", "guarantees", - "garuantee", "guarantee", - "gatherins", "gatherings", - "gauntelts", "gauntlets", - "gauntlent", "gauntlet", - "gaurantee", "guarantee", - "gaurentee", "guarantee", - "genatilia", "genitalia", - "geneology", "genealogy", - "generalbs", "generals", - "generalis", "generals", - "generaste", "generates", - "generatie", "generate", - "generatin", "generations", - "generatos", "generators", - "genitaila", "genitalia", - "genitales", "genitals", - "genitalis", "genitals", - "geniunely", "genuinely", - "gentailia", "genitalia", - "gentelmen", "gentlemen", - "gentialia", "genitalia", - "genuienly", "genuinely", - "genuinley", "genuinely", - "geogrpahy", "geography", - "germaniac", "germanic", - "geurrilla", "guerrilla", - "gimmickey", "gimmicky", - "gimmickly", "gimmicky", - "girlfried", "girlfriend", - "goalkeepr", "goalkeeper", - "godafther", "godfather", - "godspeeed", "godspeed", - "goegraphy", "geography", - "goldfisch", "goldfish", - "goosebums", "goosebumps", - "gorvement", "goverment", - "govemrent", "goverment", - "govenment", "government", - "goverance", "governance", - "goveremnt", "goverment", - "goverment", "government", - "govermetn", "goverment", - "govermnet", "goverment", - "governmet", "governments", - "govorment", "government", - "govrement", "goverment", - "gracefull", "graceful", - "gracefuly", "gracefully", - "graduaste", "graduates", - "graduatin", "graduation", - "grahpical", "graphical", - "grativate", "gravitate", - "graudally", "gradually", - "graudates", "graduates", - "greenalnd", "greenland", - "grenaders", "grenades", - "grpahical", "graphical", - "guadulupe", "guadalupe", - "guaranted", "guaranteed", - "guarantes", "guarantees", - "guardains", "guardians", - "guarentee", "guarantee", - "guaridans", "guardians", - "guatamala", "guatemala", - "guerrilas", "guerrillas", - "guradians", "guardians", - "guranteed", "guaranteed", - "gurantees", "guarantees", - "gutiarist", "guitarist", - "habsbourg", "habsburg", - "hairstlye", "hairstyle", - "hairsytle", "hairstyle", - "halarious", "hilarious", - "hambruger", "hamburger", - "hamburges", "hamburgers", - "hamphsire", "hampshire", - "hamsphire", "hampshire", - "handboook", "handbook", - "handedley", "handedly", - "handedlly", "handedly", - "handicape", "handicapped", - "hapmshire", "hampshire", - "happended", "happened", - "happenend", "happened", - "happenned", "happened", - "harasment", "harassment", - "hardenend", "hardened", - "hardwoord", "hardwood", - "haristyle", "hairstyle", - "harrasing", "harassing", - "harrassed", "harassed", - "harrasses", "harassed", - "hdinsight", "hindsight", - "headahces", "headaches", - "headhpone", "headphone", - "headshoot", "headshot", - "healither", "healthier", - "healtheir", "healthier", - "healthiet", "healthiest", - "healthire", "healthier", - "heapdhone", "headphone", - "hedgehoog", "hedgehog", - "hedgehorg", "hedgehog", - "heightend", "heightened", - "heirarchy", "hierarchy", - "herculase", "hercules", - "herculeas", "hercules", - "herculees", "hercules", - "herculeus", "hercules", - "heriarchy", "hierarchy", - "hesistant", "hesitant", - "hesistate", "hesitate", - "hesitatin", "hesitation", - "hieroglph", "hieroglyph", - "highschol", "highschool", - "hindisght", "hindsight", - "hindrence", "hindrance", - "hinduisim", "hinduism", - "hinduisum", "hinduism", - "hipsanics", "hispanics", - "hirearchy", "hierarchy", - "hirsohima", "hiroshima", - "hispancis", "hispanics", - "hitboxers", "hitboxes", - "hoepfully", "hopefully", - "holocasut", "holocaust", - "holocuast", "holocaust", - "homeonwer", "homeowner", - "homeopaty", "homeopathy", - "homewolrd", "homeworld", - "homewoner", "homeowner", - "homewrold", "homeworld", - "homogenes", "homogeneous", - "homosexul", "homosexuals", - "hopelessy", "hopelessly", - "hopsitals", "hospitals", - "horishima", "hiroshima", - "horizones", "horizons", - "horizonts", "horizons", - "horrendos", "horrendous", - "horribley", "horribly", - "horriblly", "horribly", - "horrifing", "horrifying", - "hositlity", "hostility", - "hospitaly", "hospitality", - "hosptials", "hospitals", - "hourgalss", "hourglass", - "hourlgass", "hourglass", - "househols", "households", - "humanitis", "humanities", - "humanoind", "humanoid", - "humiditiy", "humidity", - "hunagrian", "hungarian", - "hurriance", "hurricane", - "hurricans", "hurricanes", - "husbandos", "husbands", - "hydraluic", "hydraulic", - "hydropile", "hydrophile", - "hydropobe", "hydrophobe", - "hydrualic", "hydraulic", - "hyopcrite", "hypocrite", - "hypcorite", "hypocrite", - "hyperoble", "hyperbole", - "hypocracy", "hypocrisy", - "hypocrasy", "hypocrisy", - "hypocricy", "hypocrisy", - "hypocriet", "hypocrite", - "hypocrits", "hypocrites", - "hyporcite", "hypocrite", - "hypothess", "hypotheses", - "hyprocisy", "hypocrisy", - "hyprocite", "hypocrite", - "hyrdation", "hydration", - "hyrdaulic", "hydraulic", - "hysterica", "hysteria", - "hysteriia", "hysteria", - "iburpofen", "ibuprofen", - "icleandic", "icelandic", - "icongnito", "incognito", - "idealisim", "idealism", - "idealistc", "idealistic", - "identifer", "identifier", - "identifiy", "identify", - "ideologis", "ideologies", - "ignornace", "ignorance", - "illegales", "illegals", - "illegalis", "illegals", - "illegalls", "illegals", - "illnesess", "illnesses", - "illsuions", "illusions", - "illuminai", "illuminati", - "imagenary", "imaginary", - "imaginery", "imaginary", - "imaptient", "impatient", - "imigrated", "emigrated", - "immensley", "immensely", - "immerisve", "immersive", - "immesnely", "immensely", - "immidiate", "immediate", - "immigrato", "immigration", - "immitated", "imitated", - "immitator", "imitator", - "immobilie", "immobile", - "immobille", "immobile", - "immobilze", "immobile", - "immortaly", "immortality", - "immserive", "immersive", - "impaitent", "impatient", - "imparital", "impartial", - "impedence", "impedance", - "implantes", "implants", - "implicati", "implicit", - "impliciet", "implicit", - "implicity", "implicitly", - "impliment", "implement", - "implusive", "impulsive", - "importamt", "important", - "importend", "imported", - "imporving", "improving", - "impossibe", "impossible", - "imprefect", "imperfect", - "impressin", "impressions", - "imprioned", "imprisoned", - "improbabe", "improbable", - "impulisve", "impulsive", - "impuslive", "impulsive", - "imrpoving", "improving", - "inadequet", "inadequate", - "inadquate", "inadequate", - "inaugures", "inaugurates", - "inbalance", "imbalance", - "inbeetwen", "inbetween", - "inbetween", "between", - "inbewteen", "inbetween", - "incarnato", "incarnation", - "incgonito", "incognito", - "inclinato", "inclination", - "includeds", "includes", - "incoginto", "incognito", - "incongito", "incognito", - "incorpore", "incorporate", - "incpetion", "inception", - "incredibe", "incredible", - "incrediby", "incredibly", - "inculding", "including", - "incunabla", "incunabula", - "indicaste", "indicates", - "indicatie", "indicative", - "indicence", "incidence", - "indicents", "incidents", - "indigenos", "indigenous", - "indirecty", "indirectly", - "indisious", "insidious", - "individul", "individual", - "individus", "individuals", - "indoensia", "indonesia", - "indoneisa", "indonesia", - "indutrial", "industrial", - "inersting", "inserting", - "inexpense", "inexpensive", - "infallibe", "infallible", - "inferioir", "inferior", - "inferiour", "inferior", - "infestato", "infestation", - "infiltrar", "infiltrator", - "infinitey", "infinity", - "infinitie", "infinite", - "infinitiy", "infinity", - "infinitly", "infinity", - "inflatabe", "inflatable", - "influense", "influences", - "influenta", "influential", - "informate", "informative", - "infraread", "infrared", - "ingeniuty", "ingenuity", - "ingeunity", "ingenuity", - "ingocnito", "incognito", - "ingorance", "ignorance", - "inguenity", "ingenuity", - "inhabitat", "inhabitants", - "inheirted", "inherited", - "inhertied", "inherited", - "initailly", "initially", - "initalese", "initialese", - "initaling", "initialing", - "initalise", "initialise", - "initalism", "initialism", - "initalize", "initialize", - "initalled", "initialled", - "initation", "initiation", - "initiales", "initials", - "initiatie", "initiatives", - "initiatin", "initiation", - "initiatve", "initiate", - "injustics", "injustices", - "inlcuding", "including", - "inmigrant", "immigrant", - "innoucous", "innocuous", - "innovatin", "innovations", - "innovatve", "innovate", - "inpection", "inception", - "inpending", "impending", - "inproving", "improving", - "inpsector", "inspector", - "inpsiring", "inspiring", - "inquisito", "inquisition", - "inquisitr", "inquisitor", - "inresting", "inserting", - "insanelly", "insanely", - "insepctor", "inspector", - "insidiuos", "insidious", - "insipring", "inspiring", - "insluated", "insulated", - "inspectin", "inspection", - "instabilt", "instability", - "installes", "installs", - "installus", "installs", - "instering", "inserting", - "insticnts", "instincts", - "institude", "instituted", - "instituto", "institution", - "insualted", "insulated", - "insurence", "insurance", - "insurgeny", "insurgency", - "integirty", "integrity", - "integraal", "integral", - "integrade", "integrated", - "integrato", "integration", - "intenisty", "intensity", - "intensley", "intensely", - "interacte", "interactive", - "interents", "internets", - "interesat", "interest", - "interesst", "interests", - "interewbs", "interwebs", - "interfase", "interfaces", - "interfeer", "interfere", - "interfers", "interferes", - "intergate", "integrate", - "intergity", "integrity", - "interiour", "interior", - "internest", "internets", - "interpert", "interpret", - "interprut", "interrupt", - "interrups", "interrupts", - "interstae", "interstate", - "interveen", "intervene", - "intervied", "interviewed", - "intervier", "interviewer", - "intervies", "interviews", - "intesnely", "intensely", - "intesnity", "intensity", - "intestins", "intestines", - "intialize", "initialize", - "inticrate", "intricate", - "intimidad", "intimidated", - "intircate", "intricate", - "intiution", "intuition", - "intiutive", "intuitive", - "intorduce", "introduce", - "intorvert", "introvert", - "intracite", "intricate", - "intrduced", "introduced", - "intregity", "integrity", - "intrenets", "internets", - "intrepret", "interpret", - "intrerupt", "interrupt", - "intrewebs", "interwebs", - "intrinisc", "intrinsic", - "intrisinc", "intrinsic", - "intrisnic", "intrinsic", - "intriuged", "intrigued", - "introdued", "introduced", - "introduse", "introduces", - "introvers", "introverts", - "intruiged", "intrigued", - "intrument", "instrument", - "inutition", "intuition", - "inutitive", "intuitive", - "invaderas", "invaders", - "invalidas", "invalidates", - "inventios", "inventions", - "investige", "investigate", - "investmet", "investments", - "invincibe", "invincible", - "invloving", "involving", - "invovling", "involving", - "ipubrofen", "ibuprofen", - "iranianos", "iranians", - "irelevent", "irrelevant", - "ironicaly", "ironically", - "irritatie", "irritate", - "irritatin", "irritation", - "isalmists", "islamists", - "isalnders", "islanders", - "islamiskt", "islamist", - "islamsits", "islamists", - "islmaists", "islamists", - "isntaller", "installer", - "isntances", "instances", - "isntantly", "instantly", - "israelies", "israelis", - "israelits", "israelis", - "italianas", "italians", - "italianos", "italians", - "jailbrake", "jailbreak", - "jalibreak", "jailbreak", - "jamaicain", "jamaican", - "jersualem", "jerusalem", - "jeruselam", "jerusalem", - "jeruslaem", "jerusalem", - "journalis", "journals", - "judegment", "judgement", - "judgemant", "judgemental", - "judisuary", "judiciary", - "jugdement", "judgement", - "juggernat", "juggernaut", - "juvenille", "juvenile", - "keneysian", "keynesian", - "kentuckey", "kentucky", - "kenyesian", "keynesian", - "keybaords", "keyboards", - "keyensian", "keynesian", - "keyesnian", "keynesian", - "keynseian", "keynesian", - "keysenian", "keynesian", - "kilometes", "kilometers", - "kindapped", "kidnapped", - "kncokback", "knockback", - "knoweldge", "knowledge", - "knowlegde", "knowledge", - "konckback", "knockback", - "kryptonie", "kryptonite", - "labirynth", "labyrinth", - "laboratoy", "laboratory", - "laboreres", "laborers", - "labratory", "laboratory", - "labriynth", "labyrinth", - "labryinth", "labyrinth", - "labyrnith", "labyrinth", - "landscaps", "landscapes", - "landscspe", "landscapes", - "langauges", "languages", - "languague", "language", - "lanuchers", "launchers", - "lanugages", "languages", - "larington", "arlington", - "latitudie", "latitude", - "lattitude", "latitude", - "laucnhers", "launchers", - "laucnhing", "launching", - "launchign", "launching", - "laybrinth", "labyrinth", - "lebanesse", "lebanese", - "leceister", "leicester", - "leciester", "leicester", - "legitmate", "legitimate", - "legnedary", "legendary", - "lesbianas", "lesbians", - "lesbianus", "lesbians", - "letivicus", "leviticus", - "leutenant", "lieutenant", - "levaithan", "leviathan", - "levellign", "levelling", - "levetated", "levitated", - "levetates", "levitates", - "levicitus", "leviticus", - "levleling", "levelling", - "lfiesteal", "lifesteal", - "liberales", "liberals", - "liberalim", "liberalism", - "liberalis", "liberals", - "liberatin", "liberation", - "libraires", "libraries", - "liecester", "leicester", - "lieuenant", "lieutenant", - "lieutenat", "lieutenant", - "lifespawn", "lifespan", - "lifestlye", "lifestyle", - "lighnting", "lightning", - "lightnign", "lightning", - "ligthning", "lightning", - "ligthroom", "lightroom", - "lingerine", "lingerie", - "lispticks", "lipsticks", - "listenend", "listened", - "literarly", "literary", - "literarry", "literary", - "literatre", "literate", - "literatue", "literate", - "literture", "literature", - "lithaunia", "lithuania", - "lithuaina", "lithuania", - "lithuiana", "lithuania", - "lithunaia", "lithuania", - "litigatin", "litigation", - "lituhania", "lithuania", - "liveprool", "liverpool", - "livestrem", "livestream", - "lobbysits", "lobbyists", - "lockscren", "lockscreen", - "logisitcs", "logistics", - "logsitics", "logistics", - "loiusiana", "louisiana", - "lollipoop", "lollipop", - "louisvile", "louisville", - "luanchers", "launchers", - "luanching", "launching", - "lubicrant", "lubricant", - "lubircant", "lubricant", - "ludcrious", "ludicrous", - "ludricous", "ludicrous", - "lunaticos", "lunatics", - "lunaticus", "lunatics", - "macaronni", "macaroni", - "maestries", "masteries", - "magainzes", "magazines", - "magensium", "magnesium", - "magincian", "magician", - "magintude", "magnitude", - "magneisum", "magnesium", - "magnesuim", "magnesium", - "magnifine", "magnificent", - "mainfesto", "manifesto", - "mainfests", "manifests", - "mainstrem", "mainstream", - "maintaing", "maintaining", - "maintance", "maintenance", - "maintians", "maintains", - "mairjuana", "marijuana", - "malasyian", "malaysian", - "malayisan", "malaysian", - "malaysain", "malaysian", - "maletonin", "melatonin", - "maltesian", "maltese", - "malyasian", "malaysian", - "managable", "manageable", - "managment", "management", - "mandarian", "mandarin", - "mandarijn", "mandarin", - "mandarion", "mandarin", - "maneouvre", "manoeuvre", - "maneuveur", "maneuver", - "maneveurs", "maneuvers", - "manfiesto", "manifesto", - "manfiests", "manifests", - "mangesium", "magnesium", - "mangitude", "magnitude", - "manouvers", "maneuvers", - "mantained", "maintained", - "manuevers", "maneuvers", - "maraudeur", "marauder", - "marevlous", "marvelous", - "margarent", "margaret", - "margarite", "margaret", - "marginaal", "marginal", - "marginaly", "marginally", - "marijauna", "marijuana", - "marineras", "mariners", - "marineris", "mariners", - "marineros", "mariners", - "marjiuana", "marijuana", - "marjority", "majority", - "marmelade", "marmalade", - "marrtyred", "martyred", - "massagens", "massages", - "massivley", "massively", - "masteires", "masteries", - "mastereis", "masteries", - "masterise", "masteries", - "mastermid", "mastermind", - "mastieres", "masteries", - "masturbae", "masturbated", - "materiaal", "material", - "matierals", "materials", - "mattreses", "mattress", - "mayalsian", "malaysian", - "maylasian", "malaysian", - "mccarthey", "mccarthy", - "mecahnics", "mechanics", - "mecernary", "mercenary", - "mechancis", "mechanics", - "mechanims", "mechanism", - "mechaninc", "mechanic", - "mechansim", "mechanism", - "medicince", "medicine", - "mediciney", "mediciny", - "meditatie", "meditate", - "meditatin", "meditation", - "megathred", "megathread", - "melanotin", "melatonin", - "melborune", "melbourne", - "melbounre", "melbourne", - "membrance", "membrane", - "menstraul", "menstrual", - "menstural", "menstrual", - "mensutral", "menstrual", - "mentiones", "mentions", - "mercanery", "mercenary", - "merhcants", "merchants", - "messagers", "messages", - "messanger", "messenger", - "metabloic", "metabolic", - "metalurgy", "metallurgy", - "methaphor", "metaphor", - "methapors", "metaphors", - "methodoly", "methodology", - "metropols", "metropolis", - "mexicanas", "mexicans", - "mexicants", "mexicans", - "mexicanus", "mexicans", - "michellle", "michelle", - "micorwave", "microwave", - "micoscopy", "microscopy", - "microphen", "microphone", - "migrantes", "migrants", - "migrianes", "migraines", - "milawukee", "milwaukee", - "milennium", "millennium", - "milestons", "milestones", - "militians", "militias", - "millenial", "millennial", - "millenian", "millennia", - "millenium", "millennium", - "millionar", "millionaire", - "millitary", "military", - "miluwakee", "milwaukee", - "milwakuee", "milwaukee", - "milwuakee", "milwaukee", - "mindcarck", "mindcrack", - "mindlessy", "mindlessly", - "minerales", "minerals", - "minisclue", "miniscule", - "miniscuel", "miniscule", - "ministery", "ministry", - "minisucle", "miniscule", - "minitaure", "miniature", - "minituare", "miniature", - "minneosta", "minnesota", - "minnestoa", "minnesota", - "minsicule", "miniscule", - "minsiters", "ministers", - "minstries", "ministries", - "miraculos", "miraculous", - "mircowave", "microwave", - "mirrorred", "mirrored", - "miserabel", "miserable", - "mispelled", "misspelled", - "misreable", "miserable", - "misreably", "miserably", - "missisipi", "mississippi", - "missonary", "missionary", - "missourri", "missouri", - "misspelld", "misspelled", - "mobilitiy", "mobility", - "moderatey", "moderately", - "moderatin", "moderation", - "modifires", "modifiers", - "moelcules", "molecules", - "moleclues", "molecules", - "molestare", "molester", - "molestato", "molestation", - "molesterd", "molested", - "monestary", "monastery", - "monitores", "monitors", - "monolgoue", "monologue", - "monolight", "moonlight", - "monolouge", "monologue", - "monopolis", "monopolies", - "monopolly", "monopoly", - "monopoloy", "monopoly", - "monserrat", "montserrat", - "monstorus", "monstrous", - "monstruos", "monstrous", - "montanous", "mountainous", - "montoring", "monitoring", - "monumnets", "monuments", - "moratlity", "mortality", - "morbidley", "morbidly", - "morgatges", "mortgages", - "morgtages", "mortgages", - "morisette", "morissette", - "mormonsim", "mormonism", - "morroccan", "moroccan", - "mortailty", "mortality", - "mosquitto", "mosquito", - "motivatie", "motivate", - "motivatin", "motivations", - "motorcyce", "motorcycles", - "motorolja", "motorola", - "motoroloa", "motorola", - "moustahce", "moustache", - "movepseed", "movespeed", - "mozzarela", "mozzarella", - "mucisians", "musicians", - "mulitated", "mutilated", - "mulitples", "multiples", - "multipled", "multiplied", - "multplies", "multiples", - "murdererd", "murdered", - "muscially", "musically", - "muscician", "musician", - "musculair", "muscular", - "mushrooom", "mushroom", - "musicains", "musicians", - "mutatiohn", "mutation", - "mutialted", "mutilated", - "mutilatin", "mutilation", - "mutliated", "mutilated", - "mutliples", "multiples", - "mutlitude", "multitude", - "mysterise", "mysteries", - "mysterous", "mysterious", - "nacrotics", "narcotics", - "naferious", "nefarious", - "nahsville", "nashville", - "narcissim", "narcissism", - "narcissit", "narcissist", - "narcissts", "narcissist", - "narctoics", "narcotics", - "nasvhille", "nashville", - "nationaal", "national", - "nationaly", "nationally", - "nativelly", "natively", - "natrually", "naturally", - "navigatie", "navigate", - "navigatin", "navigation", - "neccesary", "necessary", - "necessite", "necessities", - "neckbears", "neckbeards", - "neckbread", "neckbeard", - "nedlessly", "endlessly", - "needlessy", "needlessly", - "negiotate", "negotiate", - "negociate", "negotiate", - "negoitate", "negotiate", - "neigbhour", "neighbour", - "neigbours", "neighbours", - "neighboor", "neighbor", - "nessecary", "necessary", - "newcaslte", "newcastle", - "newcastel", "newcastle", - "nieghbour", "neighbour", - "nightfa;;", "nightfall", - "nightlcub", "nightclub", - "nigthclub", "nightclub", - "nigthlife", "nightlife", - "nigthmare", "nightmare", - "nihilisim", "nihilism", - "ninteenth", "nineteenth", - "nominatie", "nominate", - "nominatin", "nomination", - "noninital", "noninitial", - "norhteast", "northeast", - "norhtwest", "northwest", - "normanday", "normandy", - "northeren", "northern", - "norwegain", "norwegian", - "norwiegan", "norwegian", - "nostaglia", "nostalgia", - "nostaglic", "nostalgic", - "nostaliga", "nostalgia", - "nostaligc", "nostalgic", - "nostlagia", "nostalgia", - "nostlagic", "nostalgic", - "nostriles", "nostrils", - "nostrills", "nostrils", - "notacible", "noticable", - "notciable", "noticable", - "noteboook", "notebook", - "noteriety", "notoriety", - "noteworty", "noteworthy", - "noticable", "noticeable", - "noticably", "noticeably", - "noticalbe", "noticable", - "noticeing", "noticing", - "noticible", "noticeable", - "notoroius", "notorious", - "novermber", "november", - "nullabour", "nullarbor", - "numberous", "numerous", - "numercial", "numerical", - "numerious", "numerous", - "nuremburg", "nuremberg", - "nurtients", "nutrients", - "nutirents", "nutrients", - "nutreints", "nutrients", - "nutritent", "nutrient", - "nutritian", "nutritional", - "nutritios", "nutritious", - "obediance", "obedience", - "obeidence", "obedience", - "obersvant", "observant", - "obersvers", "observers", - "obesssion", "obsession", - "obiedence", "obedience", - "obivously", "obviously", - "objectivs", "objectives", - "objectivy", "objectivity", - "obscruity", "obscurity", - "obscuirty", "obscurity", - "observare", "observer", - "observerd", "observed", - "obssesion", "obsession", - "obssesive", "obsessive", - "obssessed", "obsessed", - "obstruced", "obstructed", - "obsucrity", "obscurity", - "obtainabe", "obtainable", - "obviosuly", "obviously", - "obvioulsy", "obviously", - "obvisouly", "obviously", - "obvoiusly", "obviously", - "ocasional", "occasional", - "ocasioned", "occasioned", - "ocassions", "occasions", - "occaisons", "occasions", - "occassion", "occasion", - "occurance", "occurrence", - "occurence", "occurrence", - "octohedra", "octahedra", - "ocuntries", "countries", - "ocurrance", "occurrence", - "ocurrence", "occurrence", - "offcially", "officially", - "offically", "officially", - "officialy", "officially", - "offpsring", "offspring", - "offspirng", "offspring", - "offsrping", "offspring", - "ogliarchy", "oligarchy", - "oilgarchy", "oligarchy", - "oligrachy", "oligarchy", - "ommitting", "omitting", - "onlsaught", "onslaught", - "onsalught", "onslaught", - "onslaugth", "onslaught", - "onsluaght", "onslaught", - "onwership", "ownership", - "opiniones", "opinions", - "oposition", "opposition", - "opponenet", "opponent", - "opposiste", "opposites", - "opposties", "opposites", - "oppressin", "oppression", - "opression", "oppression", - "opressive", "oppressive", - "opthalmic", "ophthalmic", - "optimisim", "optimism", - "optimistc", "optimistic", - "optinally", "optimally", - "oragnered", "orangered", - "oragnised", "organised", - "oragnizer", "organizer", - "orcehstra", "orchestra", - "ordinarly", "ordinary", - "orgainsed", "organised", - "orgainzer", "organizer", - "organered", "orangered", - "organices", "organise", - "organisim", "organism", - "organiske", "organise", - "organiste", "organise", - "organites", "organise", - "organizms", "organism", - "organsied", "organised", - "organsims", "organisms", - "organzier", "organizer", - "orginally", "originally", - "orgnaised", "organised", - "orhcestra", "orchestra", - "orientato", "orientation", - "origanaly", "originally", - "originall", "original", - "originalt", "originality", - "originaly", "originally", - "origintea", "originate", - "origional", "original", - "orignally", "originally", - "orignials", "originals", - "oublisher", "publisher", - "oursleves", "ourselves", - "oustiders", "outsiders", - "oustpoken", "outspoken", - "outisders", "outsiders", - "outnumbed", "outnumbered", - "outpalyed", "outplayed", - "outperfom", "outperform", - "outpsoken", "outspoken", - "outrageos", "outrageous", - "outskirst", "outskirts", - "outskrits", "outskirts", - "outwieghs", "outweighs", - "overbaord", "overboard", - "overclcok", "overclock", - "overdirve", "overdrive", - "overhpyed", "overhyped", - "overhwelm", "overwhelm", - "overlcock", "overclock", - "overloard", "overload", - "overpaied", "overpaid", - "overpowed", "overpowered", - "overriden", "overridden", - "overwhlem", "overwhelm", - "overwirte", "overwrite", - "overwtach", "overwatch", - "overyhped", "overhyped", - "owernship", "ownership", - "pacificts", "pacifist", - "packageid", "packaged", - "pactivity", "captivity", - "painkills", "painkillers", - "paitently", "patiently", - "paitience", "patience", - "pakistain", "pakistani", - "pakistian", "pakistani", - "pakistnai", "pakistani", - "paksitani", "pakistani", - "paladines", "paladins", - "paladinos", "paladins", - "palestein", "palestine", - "palestina", "palestinian", - "palistian", "palestinian", - "paltforms", "platforms", - "palystyle", "playstyle", - "pancakers", "pancakes", - "pantomine", "pantomime", - "paradimes", "paradise", - "paragraps", "paragraphs", - "paragrpah", "paragraph", - "paralells", "parallels", - "paralelly", "parallelly", - "paralisys", "paralysis", - "parallely", "parallelly", - "paralzyed", "paralyzed", - "paramedis", "paramedics", - "paramters", "parameters", - "paranoica", "paranoia", - "paranoida", "paranoia", - "parasties", "parasites", - "paraylsis", "paralysis", - "paraylzed", "paralyzed", - "parellels", "parallels", - "paricular", "particular", - "parisitic", "parasitic", - "paritally", "partially", - "parliment", "parliament", - "parmesaen", "parmesan", - "parntered", "partnered", - "parrallel", "parallel", - "partchett", "pratchett", - "parterned", "partnered", - "participe", "participate", - "partiotic", "patriotic", - "partisain", "partisan", - "pasengers", "passengers", - "passagens", "passages", - "passagers", "passages", - "passerbys", "passersby", - "passiones", "passions", - "passivley", "passively", - "passowrds", "passwords", - "pateintly", "patiently", - "paticular", "particular", - "patinetly", "patiently", - "patriarca", "patriarchal", - "patriarcy", "patriarchy", - "patriotas", "patriots", - "patriotes", "patriots", - "patroitic", "patriotic", - "pattented", "patented", - "pavillion", "pavilion", - "pbulisher", "publisher", - "peacefuly", "peacefully", - "pedohpile", "pedophile", - "pedophila", "pedophilia", - "pedophils", "pedophiles", - "peircings", "piercings", - "penalites", "penalties", - "penatlies", "penalties", - "penduluum", "pendulum", - "penerator", "penetrator", - "penguines", "penguins", - "penguings", "penguins", - "penguinos", "penguins", - "peninsual", "peninsula", - "peninusla", "peninsula", - "penisnula", "peninsula", - "penisular", "peninsular", - "pennisula", "peninsula", - "pensinula", "peninsula", - "pentagoon", "pentagon", - "peodphile", "pedophile", - "pepperino", "pepperoni", - "peppermit", "peppermint", - "percepted", "perceived", - "percevied", "perceived", - "percieved", "perceived", - "percisely", "precisely", - "percision", "precision", - "percursor", "precursor", - "perdators", "predators", - "peremiter", "perimeter", - "perfeclty", "perfectly", - "perfomers", "performers", - "performas", "performs", - "perfromer", "performer", - "pericings", "piercings", - "perimetre", "perimeter", - "peristent", "persistent", - "periwinke", "periwinkle", - "permanant", "permanent", - "permature", "premature", - "permenant", "permanent", - "permieter", "perimeter", - "permissie", "permissible", - "permissin", "permissions", - "permisson", "permission", - "pernament", "permanent", - "perorders", "preorders", - "perpetrar", "perpetrator", - "perpetuae", "perpetuate", - "perpetuas", "perpetuates", - "persauded", "persuaded", - "perscribe", "prescribe", - "perserved", "preserved", - "persistes", "persists", - "personaes", "personas", - "personaly", "personally", - "personell", "personnel", - "personhod", "personhood", - "persuated", "persuade", - "pertended", "pretended", - "pertoleum", "petroleum", - "perusaded", "persuaded", - "pervertes", "perverse", - "pesticids", "pesticides", - "petroluem", "petroleum", - "phemonena", "phenomena", - "phenemona", "phenomena", - "phenonema", "phenomena", - "phillipse", "phillies", - "philosopy", "philosophy", - "philosphy", "philosophy", - "phonecian", "phoenecian", - "phonemena", "phenomena", - "phongraph", "phonograph", - "photograh", "photograph", - "phsyician", "physician", - "phsyicist", "physicist", - "phycisian", "physician", - "phycisist", "physicist", - "physicaly", "physically", - "physicits", "physicist", - "physisict", "physicist", - "piblisher", "publisher", - "picthfork", "pitchfork", - "pinetrest", "pinterest", - "placeheld", "placeholder", - "placemens", "placements", - "plaestine", "palestine", - "plagarism", "plagiarism", - "planatery", "planetary", - "planation", "plantation", - "planteary", "planetary", - "plasticas", "plastics", - "plasticos", "plastics", - "plasticus", "plastics", - "platfroms", "platforms", - "platofrms", "platforms", - "plausable", "plausible", - "plausbile", "plausible", - "plausibel", "plausible", - "playgroud", "playground", - "playright", "playwright", - "playstlye", "playstyle", - "playwrite", "playwright", - "plebicite", "plebiscite", - "plethoria", "plethora", - "ploarized", "polarized", - "pointeres", "pointers", - "polinator", "pollinator", - "polishees", "polishes", - "politelly", "politely", - "politican", "politician", - "politicas", "politics", - "politicin", "politician", - "politicus", "politics", - "polygammy", "polygamy", - "populatin", "populations", - "poralized", "polarized", - "porcelian", "porcelain", - "porcelina", "porcelain", - "poreclain", "porcelain", - "porftolio", "portfolio", - "porgramme", "programme", - "portfoilo", "portfolio", - "portoflio", "portfolio", - "portraing", "portraying", - "portrayes", "portrays", - "portrayls", "portrays", - "portriats", "portraits", - "portugese", "portuguese", - "portugues", "portuguese", - "posessing", "possessing", - "posession", "possession", - "positiond", "positioned", - "positiong", "positioning", - "positionl", "positional", - "positiviy", "positivity", - "possesess", "possesses", - "possesing", "possessing", - "possesion", "possession", - "possessin", "possessions", - "possibile", "possible", - "possibily", "possibility", - "possibley", "possibly", - "possiblly", "possibly", - "possition", "position", - "powderade", "powdered", - "powerfull", "powerful", - "pracitcal", "practical", - "practhett", "pratchett", - "practicly", "practically", - "practives", "practise", - "pragamtic", "pragmatic", - "preadtors", "predators", - "precedeed", "preceded", - "preceeded", "preceded", - "preceived", "perceived", - "preciesly", "precisely", - "precisley", "precisely", - "precurors", "precursor", - "precurosr", "precursor", - "precurser", "precursor", - "predatobr", "predator", - "predictie", "predictive", - "predictin", "prediction", - "predjuice", "prejudice", - "predujice", "prejudice", - "prefectly", "perfectly", - "preferens", "preferences", - "prefering", "preferring", - "preformer", "performer", - "pregnance", "pregnancies", - "preimeter", "perimeter", - "prejiduce", "prejudice", - "prejucide", "prejudice", - "premanent", "permanent", - "premeired", "premiered", - "preorderd", "preordered", - "preparato", "preparation", - "prepatory", "preparatory", - "presentas", "presents", - "presentes", "presents", - "presicely", "precisely", - "presicion", "precision", - "presideny", "presidency", - "prestigiu", "prestigious", - "prestigue", "prestige", - "presuaded", "persuaded", - "pretendas", "pretends", - "pretensje", "pretense", - "pretinent", "pertinent", - "prevelant", "prevalent", - "preventin", "prevention", - "previvous", "previous", - "priesthod", "priesthood", - "priestood", "priesthood", - "primaires", "primaries", - "primairly", "primarily", - "primarliy", "primarily", - "primative", "primitive", - "primordal", "primordial", - "princesas", "princess", - "princeses", "princess", - "princesss", "princesses", - "principas", "principals", - "principly", "principally", - "prinicple", "principle", - "prioritie", "prioritize", - "prioritse", "priorities", - "privalege", "privilege", - "privelege", "privilege", - "privelige", "privilege", - "privilage", "privilege", - "privilegs", "privileges", - "privledge", "privilege", - "probabily", "probability", - "probablly", "probably", - "problemas", "problems", - "procative", "proactive", - "procedger", "procedure", - "proceding", "proceeding", - "proceedes", "proceeds", - "procelain", "porcelain", - "procesess", "processes", - "processer", "processor", - "processos", "processors", - "proclamed", "proclaimed", - "procotols", "protocols", - "prodecure", "procedure", - "productie", "productive", - "productin", "productions", - "productos", "products", - "profesion", "profusion", - "professer", "professor", - "professin", "professions", - "proffesed", "professed", - "proffesor", "professor", - "progessed", "progressed", - "programas", "programs", - "programem", "programme", - "programes", "programs", - "programms", "programs", - "progresso", "progression", - "progresss", "progresses", - "projectie", "projectile", - "projectin", "projection", - "prominant", "prominent", - "promiscus", "promiscuous", - "promotted", "promoted", - "pronomial", "pronominal", - "pronouced", "pronounced", - "pronounds", "pronouns", - "pronounes", "pronouns", - "propagana", "propaganda", - "properies", "properties", - "propertly", "property", - "propeties", "properties", - "prophesie", "prophecies", - "prophetes", "prophets", - "propogate", "propagate", - "proposels", "proposes", - "proposito", "proposition", - "propperly", "properly", - "propsects", "prospects", - "prosperos", "prosperous", - "prostitue", "prostitute", - "protectes", "protects", - "protectie", "protective", - "protectos", "protectors", - "proteinas", "proteins", - "proteines", "proteins", - "protestas", "protests", - "protestat", "protestant", - "protestes", "protests", - "protestos", "protests", - "protfolio", "portfolio", - "protocool", "protocol", - "prototpye", "prototype", - "prototyps", "prototypes", - "protraits", "portraits", - "protrayal", "portrayal", - "protrayed", "portrayed", - "provicial", "provincial", - "provincie", "province", - "provisios", "provisions", - "pruchased", "purchased", - "pruchases", "purchases", - "prugatory", "purgatory", - "pruposely", "purposely", - "pscyhotic", "psychotic", - "pseudonyn", "pseudonym", - "pshycosis", "psychosis", - "pshycotic", "psychotic", - "psycology", "psychology", - "psycothic", "psychotic", - "ptichfork", "pitchfork", - "pubilsher", "publisher", - "publiaher", "publisher", - "publicaly", "publicly", - "publicani", "publication", - "publicher", "publisher", - "publiclly", "publicly", - "publihser", "publisher", - "publisehr", "publisher", - "publisger", "publisher", - "publishor", "publisher", - "publishre", "publisher", - "publsiher", "publisher", - "publusher", "publisher", - "puchasing", "purchasing", - "punishmet", "punishments", - "puplisher", "publisher", - "puragtory", "purgatory", - "purcahsed", "purchased", - "purcahses", "purchases", - "purhcased", "purchased", - "purposley", "purposely", - "pursuaded", "persuaded", - "pursuades", "persuades", - "pyramidas", "pyramids", - "pyramides", "pyramids", - "pyschosis", "psychosis", - "pyschotic", "psychotic", - "qaulifies", "qualifies", - "quantifiy", "quantify", - "quantitiy", "quantity", - "quantitty", "quantity", - "quartlery", "quarterly", - "queations", "equations", - "queenland", "queensland", - "questiond", "questioned", - "questiong", "questioning", - "questionn", "questioning", - "radicalis", "radicals", - "rapsberry", "raspberry", - "rasbperry", "raspberry", - "rationaly", "rationally", - "reactiony", "reactionary", - "realisitc", "realistic", - "realoding", "reloading", - "realsitic", "realistic", - "realtable", "relatable", - "realtions", "relations", - "realtives", "relatives", - "reamining", "remaining", - "reaplying", "replaying", - "reasearch", "research", - "reaveling", "revealing", - "rebellios", "rebellious", - "rebllions", "rebellions", - "recations", "creations", - "reccomend", "recommend", - "reccuring", "recurring", - "receeding", "receding", - "recepient", "recipient", - "recgonise", "recognise", - "recgonize", "recognize", - "recidents", "residents", - "recievers", "receivers", - "recieving", "receiving", - "recipiant", "recipient", - "reciproce", "reciprocate", - "reclutant", "reluctant", - "recoginse", "recognise", - "recoginze", "recognize", - "recomends", "recommends", - "recommens", "recommends", - "reconenct", "reconnect", - "recongise", "recognise", - "recongize", "recognize", - "reconicle", "reconcile", - "reconized", "recognized", - "recordare", "recorder", - "recoveres", "recovers", - "recoverys", "recovers", - "recpetive", "receptive", - "recpetors", "receptors", - "recquired", "required", - "recreatie", "recreate", - "recruitcs", "recruits", - "recruites", "recruits", - "recrusion", "recursion", - "recrutied", "recruited", - "recrutier", "recruiter", - "rectangel", "rectangle", - "rectanlge", "rectangle", - "recuiting", "recruiting", - "recurison", "recursion", - "recurited", "recruited", - "recuriter", "recruiter", - "recusrion", "recursion", - "redeemeed", "redeemed", - "redundany", "redundancy", - "redundent", "redundant", - "reedeming", "redeeming", - "refelcted", "reflected", - "refereces", "references", - "refereees", "referees", - "refereers", "referees", - "referemce", "reference", - "referencs", "references", - "referense", "references", - "referiang", "referring", - "referinng", "refering", - "refernces", "references", - "refernece", "reference", - "refershed", "refreshed", - "refersher", "refresher", - "reffering", "referring", - "reflectie", "reflective", - "refrehser", "refresher", - "refrences", "references", - "refromist", "reformist", - "regionaal", "regional", - "registerd", "registered", - "registery", "registry", - "regualrly", "regularly", - "regualtor", "regulator", - "regulaion", "regulation", - "regulalry", "regularly", - "regulares", "regulars", - "regularis", "regulars", - "regulatin", "regulations", - "regurally", "regularly", - "reigining", "reigning", - "reinstale", "reinstalled", - "reisntall", "reinstall", - "reknowned", "renowned", - "relaoding", "reloading", - "relatiate", "retaliate", - "relativiy", "relativity", - "relativly", "relatively", - "relativno", "relation", - "relavence", "relevance", - "relcutant", "reluctant", - "relevence", "relevance", - "relfected", "reflected", - "reliabily", "reliability", - "reliabley", "reliably", - "religeous", "religious", - "remasterd", "remastered", - "rememberd", "remembered", - "rememebrs", "remembers", - "remianing", "remaining", - "remignton", "remington", - "remingotn", "remington", - "remmebers", "remembers", - "remotelly", "remotely", - "rendevous", "rendezvous", - "rendezous", "rendezvous", - "renedered", "rende", - "renegated", "renegade", - "rennovate", "renovate", - "repalying", "replaying", - "repblican", "republican", - "repeatedy", "repeatedly", - "repective", "receptive", - "repeition", "repetition", - "repentent", "repentant", - "rephrasse", "rephrase", - "replusive", "repulsive", - "reportedy", "reportedly", - "represend", "represented", - "repressin", "repression", - "reprtoire", "repertoire", - "repsonded", "responded", - "reptition", "repetition", - "reptuable", "reputable", - "repubican", "republican", - "republian", "republican", - "repulican", "republican", - "repulisve", "repulsive", - "repuslive", "repulsive", - "resaurant", "restaurant", - "researchs", "researchers", - "resembels", "resembles", - "reserverd", "reserved", - "resintall", "reinstall", - "resistane", "resistances", - "resistans", "resistances", - "resistend", "resisted", - "resistent", "resistant", - "resmebles", "resembles", - "resolutin", "resolutions", - "resoruces", "resources", - "respectes", "respects", - "respectos", "respects", - "responces", "response", - "respondas", "responds", - "respondis", "responds", - "respondus", "responds", - "respoting", "reposting", - "ressemble", "resemble", - "ressurect", "resurrect", - "restarant", "restaurant", - "resticted", "restricted", - "restircts", "restricts", - "restorani", "restoration", - "restraind", "restrained", - "restraing", "restraining", - "restraunt", "restraint", - "restriant", "restraint", - "restricte", "restrictive", - "resturant", "restaurant", - "retailate", "retaliate", - "retalaite", "retaliate", - "retaliers", "retailers", - "reteriver", "retriever", - "retirever", "retriever", - "retrevier", "retriever", - "retriving", "retrieving", - "reuptable", "reputable", - "reveiwers", "reviewers", - "revelaing", "revealing", - "revelance", "relevance", - "revolutin", "revolutions", - "rewachted", "rewatched", - "rewatchig", "rewatching", - "rferences", "references", - "ridiculos", "ridiculous", - "ridiculue", "ridicule", - "ridiculus", "ridiculous", - "righetous", "righteous", - "rightfuly", "rightfully", - "rightoues", "righteous", - "rigourous", "rigorous", - "rigtheous", "righteous", - "rilvaries", "rivalries", - "rininging", "ringing", - "rivarlies", "rivalries", - "rivlaries", "rivalries", - "roaylties", "royalties", - "roboticus", "robotics", - "roganisms", "organisms", - "royalites", "royalties", - "roylaties", "royalties", - "ruleboook", "rulebook", - "sacarstic", "sarcastic", - "sacntuary", "sanctuary", - "sacrafice", "sacrifice", - "sacrastic", "sarcastic", - "sacrifise", "sacrifices", - "salughter", "slaughter", - "samckdown", "smackdown", - "sanctiond", "sanctioned", - "sancturay", "sanctuary", - "sancutary", "sanctuary", - "sandstrom", "sandstorm", - "sandwhich", "sandwich", - "sanhedrim", "sanhedrin", - "santcuary", "sanctuary", - "santioned", "sanctioned", - "sapphirre", "sapphire", - "sastified", "satisfied", - "sastifies", "satisfies", - "satelites", "satellites", - "saterdays", "saturdays", - "satisifed", "satisfied", - "satisifes", "satisfies", - "satrudays", "saturdays", - "satsified", "satisfied", - "satsifies", "satisfies", - "sattelite", "satellite", - "saxaphone", "saxophone", - "scaepgoat", "scapegoat", - "scaleable", "scalable", - "scandales", "scandals", - "scandalos", "scandals", - "scantuary", "sanctuary", - "scaricity", "scarcity", - "scarifice", "sacrifice", - "scarmbled", "scrambled", - "scartched", "scratched", - "scartches", "scratches", - "scavanged", "scavenged", - "sceintist", "scientist", - "scholalry", "scholarly", - "sciencers", "sciences", - "scientfic", "scientific", - "scientifc", "scientific", - "scientits", "scientist", - "sclupture", "sculpture", - "scnearios", "scenarios", - "scoreboad", "scoreboard", - "scottisch", "scottish", - "scracthed", "scratched", - "scracthes", "scratches", - "scrambeld", "scrambled", - "scrathces", "scratches", - "scrollade", "scrolled", - "scrutiney", "scrutiny", - "scrutinty", "scrutiny", - "sculpteur", "sculpture", - "sculputre", "sculpture", - "scultpure", "sculpture", - "scuplture", "sculpture", - "scuptures", "sculptures", - "seamlessy", "seamlessly", - "searchign", "searching", - "sebasitan", "sebastian", - "sebastain", "sebastian", - "sebsatian", "sebastian", - "secceeded", "seceded", - "secertary", "secretary", - "secratary", "secretary", - "secratery", "secretary", - "secretery", "secretary", - "secretley", "secretly", - "sednetary", "sedentary", - "seduciton", "seduction", - "semanitcs", "semantics", - "semestres", "semesters", - "semnatics", "semantics", - "semseters", "semesters", - "senatores", "senators", - "sendetary", "sedentary", - "sensitivy", "sensitivity", - "sentimant", "sentimental", - "sepcially", "specially", - "seperated", "separated", - "seperates", "separates", - "seperator", "separator", - "septmeber", "september", - "seraching", "searching", - "seriosuly", "seriously", - "serioulsy", "seriously", - "seriuosly", "seriously", - "servantes", "servants", - "settlment", "settlement", - "sexualizd", "sexualized", - "sexuallly", "sexually", - "shadasloo", "shadaloo", - "shaprness", "sharpness", - "sharpenss", "sharpness", - "shawhsank", "shawshank", - "sheilding", "shielding", - "shephered", "shepherd", - "shileding", "shielding", - "shitstrom", "shitstorm", - "shletered", "sheltered", - "shoudlers", "shoulders", - "shouldnot", "shouldnt", - "shperical", "spherical", - "shwashank", "shawshank", - "sidebaord", "sideboard", - "siganture", "signature", - "signapore", "singapore", - "signitory", "signatory", - "silhouete", "silhouette", - "similiair", "similiar", - "simliarly", "similarly", - "simluated", "simulated", - "simluator", "simulator", - "simplifiy", "simplify", - "simualted", "simulated", - "simualtor", "simulator", - "simulatie", "simulate", - "simulatin", "simulation", - "sinagpore", "singapore", - "sincerley", "sincerely", - "singature", "signature", - "singpaore", "singapore", - "singulair", "singular", - "singulary", "singularity", - "skateboad", "skateboard", - "skeletaal", "skeletal", - "skepitcal", "skeptical", - "skepticim", "skepticism", - "sketpical", "skeptical", - "slaughted", "slaughtered", - "slaugther", "slaughter", - "slipperly", "slippery", - "sluaghter", "slaughter", - "smackdwon", "smackdown", - "smealting", "smelting", - "smeesters", "semesters", - "snadstorm", "sandstorm", - "snippetts", "snippets", - "snowfalke", "snowflake", - "snowflaek", "snowflake", - "snowlfake", "snowflake", - "snwoballs", "snowballs", - "snythesis", "synthesis", - "snythetic", "synthetic", - "socailism", "socialism", - "socailist", "socialist", - "socailize", "socialize", - "soceities", "societies", - "socialini", "socializing", - "socialiss", "socialists", - "socialsim", "socialism", - "socieites", "societies", - "socilaism", "socialism", - "socilaist", "socialist", - "sociopati", "sociopathic", - "sociopats", "sociopaths", - "socratees", "socrates", - "socrateks", "socrates", - "soemthing", "something", - "sohpomore", "sophomore", - "soliliquy", "soliloquy", - "somehting", "something", - "someoneis", "someones", - "somethign", "something", - "somethins", "somethings", - "sopohmore", "sophomore", - "sotryline", "storyline", - "soundtrak", "soundtrack", - "sountrack", "soundtrack", - "sourthern", "southern", - "souvenier", "souvenir", - "soveregin", "sovereign", - "sovereing", "sovereign", - "soveriegn", "sovereign", - "spacegoat", "scapegoat", - "spagehtti", "spaghetti", - "spahgetti", "spaghetti", - "sparlking", "sparkling", - "spartants", "spartans", - "specailly", "specially", - "specailty", "specialty", - "specality", "specialty", - "speciales", "specials", - "specialis", "specials", - "speciatly", "specialty", - "specifing", "specifying", - "specimine", "specimen", - "spectrail", "spectral", - "specualte", "speculate", - "speechers", "speeches", - "spehrical", "spherical", - "speically", "specially", - "spetember", "september", - "sphagetti", "spaghetti", - "splatooon", "splatoon", - "sponosred", "sponsored", - "sponsered", "sponsored", - "sponsores", "sponsors", - "spontanes", "spontaneous", - "sponzored", "sponsored", - "sprakling", "sparkling", - "sprinkeld", "sprinkled", - "squadroon", "squadron", - "squirrles", "squirrels", - "squirrtle", "squirrel", - "squrriels", "squirrels", - "srirachia", "sriracha", - "srirachra", "sriracha", - "stabliize", "stabilize", - "stainlees", "stainless", - "startegic", "strategic", - "startlxde", "startled", - "statisitc", "statistic", - "staurdays", "saturdays", - "steadilly", "steadily", - "stealthly", "stealthy", - "stichting", "stitching", - "sticthing", "stitching", - "stimulans", "stimulants", - "stockplie", "stockpile", - "stornegst", "strongest", - "stragetic", "strategic", - "straightn", "straighten", - "strangets", "strangest", - "strategis", "strategies", - "strawbery", "strawberry", - "streamade", "streamed", - "streamare", "streamer", - "streamear", "streamer", - "strechted", "stretched", - "strechtes", "stretches", - "strecthed", "stretched", - "strecthes", "stretches", - "stregnths", "strengths", - "strenghen", "strengthen", - "strengthn", "strengthen", - "strentghs", "strengths", - "stressade", "stressed", - "stressers", "stresses", - "strictist", "strictest", - "stringnet", "stringent", - "stroyline", "storyline", - "structual", "structural", - "structurs", "structures", - "strucutre", "structure", - "struggeld", "struggled", - "struggels", "struggles", - "stryofoam", "styrofoam", - "stuctured", "structured", - "stuggling", "struggling", - "stupitidy", "stupidity", - "sturcture", "structure", - "sturggled", "struggled", - "sturggles", "struggles", - "styrofaom", "styrofoam", - "subarmine", "submarine", - "subculter", "subculture", - "submachne", "submachine", - "subpecies", "subspecies", - "subscirbe", "subscribe", - "subsidary", "subsidiary", - "subsizide", "subsidize", - "subsquent", "subsequent", - "subsrcibe", "subscribe", - "substanse", "substances", - "substanta", "substantial", - "substante", "substantive", - "substarte", "substrate", - "substitue", "substitute", - "substract", "subtract", - "subtances", "substances", - "subtiltes", "subtitles", - "subtitels", "subtitles", - "subtletly", "subtlety", - "subtlties", "subtitles", - "succedded", "succeeded", - "succeedes", "succeeds", - "succesful", "successful", - "succesion", "succession", - "succesive", "successive", - "suceeding", "succeeding", - "sucesfuly", "successfully", - "sucessful", "successful", - "sucession", "succession", - "sucessive", "successive", - "sufferage", "suffrage", - "sufferred", "suffered", - "sufficent", "sufficient", - "suggestes", "suggests", - "suggestie", "suggestive", - "sumbarine", "submarine", - "sumberged", "submerged", - "summenors", "summoners", - "summoenrs", "summoners", - "sunderlad", "sunderland", - "sunglases", "sunglasses", - "superfluu", "superfluous", - "superiour", "superior", - "superisor", "superiors", - "supermare", "supermarket", - "superviso", "supervision", - "suposedly", "supposedly", - "supportes", "supports", - "suppreses", "suppress", - "supressed", "suppressed", - "supresses", "suppresses", - "suprising", "surprising", - "suprizing", "surprising", - "supsicion", "suspicion", - "surounded", "surrounded", - "surprized", "surprised", - "surronded", "surrounded", - "surrouded", "surrounded", - "surrouned", "surround", - "survivers", "survivors", - "survivied", "survived", - "survivour", "survivor", - "susbcribe", "subscribe", - "susbtrate", "substrate", - "susncreen", "sunscreen", - "suspectes", "suspects", - "suspendes", "suspense", - "suspensie", "suspense", - "swastikka", "swastika", - "sweatshit", "sweatshirt", - "sweetheat", "sweetheart", - "switchign", "switching", - "swithcing", "switching", - "swtiching", "switching", - "sydnicate", "syndicate", - "sykwalker", "skywalker", - "sylablles", "syllables", - "syllabels", "syllables", - "symbolsim", "symbolism", - "symettric", "symmetric", - "symmetral", "symmetric", - "symmetria", "symmetrical", - "symoblism", "symbolism", - "sympathie", "sympathize", - "symphoney", "symphony", - "symptomes", "symptoms", - "symptomps", "symptoms", - "synagouge", "synagogue", - "syndacite", "syndicate", - "syndiacte", "syndicate", - "synidcate", "syndicate", - "synonymes", "synonyms", - "synonymis", "synonyms", - "synonymns", "synonyms", - "synonymos", "synonymous", - "synonymus", "synonyms", - "synopsies", "synopsis", - "syntehsis", "synthesis", - "syntehtic", "synthetic", - "syntethic", "synthetic", - "syphyllis", "syphilis", - "syracusae", "syracuse", - "sytrofoam", "styrofoam", - "tablespon", "tablespoon", - "tacticaly", "tactically", - "tanenhill", "tannehill", - "tannheill", "tannehill", - "targetted", "targeted", - "tawainese", "taiwanese", - "tawianese", "taiwanese", - "taxanomic", "taxonomic", - "teamfighs", "teamfights", - "teamfigth", "teamfight", - "teamifght", "teamfight", - "teampseak", "teamspeak", - "teaspooon", "teaspoon", - "techician", "technician", - "techinque", "technique", - "technolgy", "technology", - "teeangers", "teenagers", - "tehtering", "tethering", - "telegrpah", "telegraph", - "televsion", "television", - "temafight", "teamfight", - "tempaltes", "templates", - "temparate", "temperate", - "templaras", "templars", - "templares", "templars", - "temporali", "temporarily", - "tenacitiy", "tenacity", - "tensiones", "tensions", - "tentacels", "tentacles", - "tentacuel", "tentacle", - "tentalces", "tentacles", - "termianls", "terminals", - "terminato", "termination", - "terorrism", "terrorism", - "terorrist", "terrorist", - "terrabyte", "terabyte", - "terribley", "terribly", - "terriblly", "terribly", - "terriroty", "territory", - "terrorits", "terrorist", - "terrorsim", "terrorism", - "tesitcles", "testicles", - "tesitmony", "testimony", - "testicels", "testicles", - "testomony", "testimony", - "texturers", "textures", - "thankfuly", "thankfully", - "thankyoou", "thankyou", - "themselfs", "themselves", - "themselvs", "themselves", - "themslves", "themselves", - "theologia", "theological", - "therafter", "thereafter", - "therefoer", "therefor", - "therefour", "therefor", - "theroists", "theorists", - "thetering", "tethering", - "thirlling", "thrilling", - "thirteeen", "thirteen", - "thoecracy", "theocracy", - "thoerists", "theorists", - "thoroughy", "thoroughly", - "thoughout", "throughout", - "threatend", "threatened", - "throrough", "thorough", - "throughly", "thoroughly", - "througout", "throughout", - "thrusdays", "thursdays", - "thurdsays", "thursdays", - "thursdsay", "thursdays", - "thursters", "thrusters", - "tiawanese", "taiwanese", - "timestmap", "timestamp", - "tirangles", "triangles", - "tocuhdown", "touchdown", - "toghether", "together", - "tolerence", "tolerance", - "tommorrow", "tomorrow", - "torandoes", "tornadoes", - "torchligt", "torchlight", - "torelable", "tolerable", - "toritllas", "tortillas", - "tornaodes", "tornadoes", - "torpeados", "torpedoes", - "torrentas", "torrents", - "torrentes", "torrents", - "tortialls", "tortillas", - "tortillia", "tortilla", - "tortillla", "tortilla", - "tottehnam", "tottenham", - "tottenahm", "tottenham", - "tottneham", "tottenham", - "toturials", "tutorials", - "touchdwon", "touchdown", - "touristas", "tourists", - "touristes", "tourists", - "touristey", "touristy", - "touristly", "touristy", - "touristsy", "touristy", - "tournamet", "tournament", - "toxicitiy", "toxicity", - "trafficed", "trafficked", - "tragicaly", "tragically", - "traileras", "trailers", - "traingles", "triangles", - "trainwrek", "trainwreck", - "traitoris", "traitors", - "traitorus", "traitors", - "tramautic", "traumatic", - "tranlsate", "translate", - "transalte", "translate", - "transcris", "transcripts", - "transcrit", "transcript", - "transferd", "transferred", - "transfere", "transferred", - "transfors", "transforms", - "transfrom", "transform", - "transiten", "transient", - "transitin", "transitions", - "transofrm", "transform", - "transplat", "transplant", - "trasnfers", "transfers", - "trasnform", "transform", - "trasnport", "transport", - "traversie", "traverse", - "travestry", "travesty", - "treasuers", "treasures", - "treasurey", "treasury", - "treatmens", "treatments", - "treausres", "treasures", - "tremendos", "tremendous", - "trhilling", "thrilling", - "trhusters", "thrusters", - "triangels", "triangles", - "trianlges", "triangles", - "tribunaal", "tribunal", - "triguered", "triggered", - "trinagles", "triangles", - "truamatic", "traumatic", - "truthfuly", "truthfully", - "tunrtable", "turntable", - "turnaroud", "turnaround", - "turntabel", "turntable", - "typcially", "typically", - "tyrranies", "tyrannies", - "ubiquitos", "ubiquitous", - "ugprading", "upgrading", - "ukrainain", "ukrainian", - "ukrainias", "ukrainians", - "ukrainina", "ukrainian", - "ukrainisn", "ukrainians", - "ukrianian", "ukrainian", - "ulitmatum", "ultimatum", - "ulteriour", "ulterior", - "umbrellla", "umbrella", - "unaminous", "unanimous", - "unanmious", "unanimous", - "unanswerd", "unanswered", - "unanymous", "unanimous", - "unbannend", "unbanned", - "uncensord", "uncensored", - "uncomited", "uncommitted", - "undercunt", "undercut", - "underdong", "underdog", - "undergard", "undergrad", - "underming", "undermining", - "understad", "understands", - "underwaer", "underwear", - "underware", "underwear", - "undescore", "underscore", - "unforseen", "unforeseen", - "unfortune", "unfortunate", - "unfriendy", "unfriendly", - "unhealhty", "unhealthy", - "unheathly", "unhealthy", - "unhelathy", "unhealthy", - "unicornis", "unicorns", - "unicornus", "unicorns", - "uniformes", "uniforms", - "uninamous", "unanimous", - "unintuive", "unintuitive", - "uniquelly", "uniquely", - "unisntall", "uninstall", - "univerity", "university", - "universse", "universes", - "univesity", "university", - "unnistall", "uninstall", - "unoffical", "unofficial", - "unopenend", "unopened", - "unplayabe", "unplayable", - "unplesant", "unpleasant", - "unpopluar", "unpopular", - "unrankend", "unranked", - "unreliabe", "unreliable", - "unrwitten", "unwritten", - "untrianed", "untrained", - "unusaully", "unusually", - "unuseable", "unusable", - "unusuable", "unusable", - "unvierses", "universes", - "unweildly", "unwieldy", - "unwieldly", "unwieldy", - "unwirtten", "unwritten", - "unworthly", "unworthy", - "upcomming", "upcoming", - "upgarding", "upgrading", - "upgradded", "upgraded", - "uplfiting", "uplifting", - "uplifitng", "uplifting", - "urkainian", "ukrainian", - "utlimatum", "ultimatum", - "vacciante", "vaccinate", - "vaccinato", "vaccination", - "vacciners", "vaccines", - "vacestomy", "vasectomy", - "vaguaries", "vagaries", - "vaibility", "viability", - "vaildated", "validated", - "vairables", "variables", - "valdiated", "validated", - "valentein", "valentine", - "valentien", "valentine", - "valentins", "valentines", - "validitiy", "validity", - "valueable", "valuable", - "vanadlism", "vandalism", - "vandalsim", "vandalism", - "varaibles", "variables", - "varations", "variations", - "variantes", "variants", - "vascetomy", "vasectomy", - "vastecomy", "vasectomy", - "veganisim", "veganism", - "vegetarin", "vegetarians", - "vegitable", "vegetable", - "vehementy", "vehemently", - "veiwpoint", "viewpoint", - "velantine", "valentine", - "vendettta", "vendetta", - "venegance", "vengeance", - "veneuzela", "venezuela", - "venezeula", "venezuela", - "venezulea", "venezuela", - "vengaence", "vengeance", - "vengenace", "vengeance", - "ventilato", "ventilation", - "verbatium", "verbatim", - "verfiying", "verifying", - "verifiyng", "verifying", - "verisions", "revisions", - "versalite", "versatile", - "versatily", "versatility", - "versiones", "versions", - "versitale", "versatile", - "verstaile", "versatile", - "verticaly", "vertically", - "veryifing", "verifying", - "vicotrian", "victorian", - "vicotries", "victories", - "victoires", "victories", - "victorain", "victorian", - "victorina", "victorian", - "victorios", "victorious", - "videogaem", "videogame", - "videogams", "videogames", - "vidoegame", "videogame", - "viewpiont", "viewpoint", - "vigilence", "vigilance", - "vigliante", "vigilante", - "vigourous", "vigorous", - "viligante", "vigilante", - "viloently", "violently", - "vincinity", "vicinity", - "vioalting", "violating", - "violentce", "violence", - "virbation", "vibration", - "virgintiy", "virginity", - "virignity", "virginity", - "virutally", "virtually", - "visibiliy", "visibility", - "vitaminas", "vitamins", - "vitamines", "vitamins", - "vitrually", "virtually", - "vociemail", "voicemail", - "voilating", "violating", - "voilation", "violation", - "voilently", "violently", - "volatiliy", "volatility", - "voleyball", "volleyball", - "volontary", "voluntary", - "volonteer", "volunteer", - "volunatry", "voluntary", - "volunteed", "volunteered", - "vriginity", "virginity", - "wallpapes", "wallpapers", - "warrantly", "warranty", - "warrriors", "warriors", - "wavelengh", "wavelength", - "weakenend", "weakened", - "weakneses", "weakness", - "weaknesss", "weaknesses", - "wealtheir", "wealthier", - "weaponary", "weaponry", - "wedensday", "wednesday", - "wednesdsy", "wednesdays", - "wednessay", "wednesdays", - "wednseday", "wednesday", - "welathier", "wealthier", - "wendesday", "wednesday", - "wesbtrook", "westbrook", - "westernes", "westerners", - "westrbook", "westbrook", - "whereever", "wherever", - "whietlist", "whitelist", - "whilrwind", "whirlwind", - "whilsting", "whistling", - "whipsered", "whispered", - "whislting", "whistling", - "whisperes", "whispers", - "whitelsit", "whitelist", - "whitleist", "whitelist", - "whitsling", "whistling", - "whrilwind", "whirlwind", - "whsipered", "whispered", - "whtielist", "whitelist", - "widespred", "widespread", - "widesread", "widespread", - "windshied", "windshield", - "wintesses", "witnesses", - "wisconisn", "wisconsin", - "wishlisht", "wishlist", - "wishpered", "whispered", - "withdrawl", "withdrawal", - "withelist", "whitelist", - "witnesess", "witnesses", - "wolrdview", "worldview", - "wolrdwide", "worldwide", - "wonderlad", "wonderland", - "wordlview", "worldview", - "wordlwide", "worldwide", - "worhtless", "worthless", - "workfroce", "workforce", - "worldivew", "worldview", - "worldveiw", "worldview", - "worstened", "worsened", - "worthelss", "worthless", - "xenbolade", "xenoblade", - "xenobalde", "xenoblade", - "xenophoby", "xenophobia", - "xeonblade", "xenoblade", - "yementite", "yemenite", - "yorkshrie", "yorkshire", - "yorskhire", "yorkshire", - "yosemitie", "yosemite", - "youngents", "youngest", - "yourselvs", "yourselves", - "zimbabwae", "zimbabwe", - "zionistas", "zionists", - "zionistes", "zionists", - "abandond", "abandoned", - "abdomine", "abdomen", - "abilitiy", "ability", - "abilties", "abilities", - "abondons", "abandons", - "aboslute", "absolute", - "abosrbed", "absorbed", - "abruplty", "abruptly", - "abrutply", "abruptly", - "abscence", "absence", - "absestos", "asbestos", - "absoluts", "absolutes", - "absolvte", "absolve", - "absorbes", "absorbs", - "absoulte", "absolute", - "abstante", "bastante", - "abudance", "abundance", - "abudcted", "abducted", - "abundunt", "abundant", - "aburptly", "abruptly", - "abuseres", "abusers", - "abusrdly", "absurdly", - "academis", "academics", - "accademy", "academy", - "acccused", "accused", - "acceptes", "accepts", - "accidens", "accidents", - "accideny", "accidently", - "accoring", "according", - "accountt", "accountant", - "accpeted", "accepted", - "accuarcy", "accuracy", - "accumule", "accumulate", - "accusato", "accusation", - "accussed", "accused", - "acedamia", "academia", - "acedemic", "academic", - "acheived", "achieved", - "acheives", "achieves", - "achieval", "achievable", - "acnedote", "anecdote", - "acording", "according", - "acornyms", "acronyms", - "acousitc", "acoustic", - "acoutsic", "acoustic", - "acovados", "avocados", - "acquifer", "acquire", - "acquited", "acquitted", - "acquried", "acquired", - "acronmys", "acronyms", - "acronysm", "acronyms", - "acroynms", "acronyms", - "acrynoms", "acronyms", - "acsended", "ascended", - "actaully", "actually", - "activite", "activities", - "activits", "activities", - "activley", "actively", - "actresss", "actresses", - "actualey", "actualy", - "actualiy", "actuality", - "actualky", "actualy", - "actualmy", "actualy", - "actualoy", "actualy", - "actualpy", "actualy", - "actualty", "actualy", - "acutally", "actually", - "acutions", "auctions", - "adaptare", "adapter", - "adbandon", "abandon", - "adbucted", "abducted", - "addictes", "addicts", - "addictin", "addictions", - "addictis", "addictions", - "addional", "additional", - "addopted", "adopted", - "addresed", "addressed", - "adealide", "adelaide", - "adecuate", "adequate", - "adeilade", "adelaide", - "adeladie", "adelaide", - "adeliade", "adelaide", - "adeqaute", "adequate", - "adheisve", "adhesive", - "adhevise", "adhesive", - "adivsors", "advisors", - "admiraal", "admiral", - "adolence", "adolescent", - "adorbale", "adorable", - "adovcacy", "advocacy", - "adpaters", "adapters", - "adquired", "acquired", - "adquires", "acquires", - "adresing", "addressing", - "adressed", "addressed", - "adroable", "adorable", - "adultrey", "adultery", - "adventue", "adventures", - "adventus", "adventures", - "advertis", "adverts", - "advesary", "adversary", - "adviseer", "adviser", - "adviseur", "adviser", - "advocade", "advocated", - "advocats", "advocates", - "advsiors", "advisors", - "aethists", "atheists", - "affaires", "affairs", - "affilate", "affiliate", - "affintiy", "affinity", - "affleunt", "affluent", - "affulent", "affluent", - "afircans", "africans", - "africain", "african", - "afternon", "afternoon", - "againnst", "against", - "agnositc", "agnostic", - "agonstic", "agnostic", - "agravate", "aggravate", - "agreemnt", "agreement", - "agregate", "aggregate", - "agressie", "agressive", - "agressor", "aggressor", - "agrieved", "aggrieved", - "agruable", "arguable", - "agruably", "arguably", - "agrument", "argument", - "ahtletes", "athletes", - "aincents", "ancients", - "airboner", "airborne", - "airbrone", "airborne", - "aircarft", "aircraft", - "airplans", "airplanes", - "airporta", "airports", - "airpsace", "airspace", - "airscape", "airspace", - "akransas", "arkansas", - "alchemey", "alchemy", - "alchohol", "alcohol", - "alcholic", "alcoholic", - "alcoholc", "alcoholics", - "aldutery", "adultery", - "aleniate", "alienate", - "algoritm", "algorithm", - "alimoney", "alimony", - "alirghty", "alrighty", - "allaince", "alliance", - "alledged", "alleged", - "alledges", "alleges", - "allegedy", "allegedly", - "allegely", "allegedly", - "allegric", "allergic", - "allergey", "allergy", - "allianse", "alliances", - "alligned", "aligned", - "allinace", "alliance", - "allopone", "allophone", - "allready", "already", - "almigthy", "almighty", - "alpahbet", "alphabet", - "alrigthy", "alrighty", - "altantic", "atlantic", - "alterato", "alteration", - "alternar", "alternator", - "althetes", "athletes", - "althetic", "athletic", - "altriusm", "altruism", - "altrusim", "altruism", - "alturism", "altruism", - "aluminim", "aluminium", - "alumnium", "aluminum", - "alunimum", "aluminum", - "amatersu", "amateurs", - "amaterus", "amateurs", - "amendmet", "amendments", - "amercian", "american", - "amercias", "americas", - "amernian", "armenian", - "amethsyt", "amethyst", - "ameythst", "amethyst", - "ammended", "amended", - "amnestry", "amnesty", - "amoungst", "amongst", - "amplifiy", "amplify", - "amplifly", "amplify", - "amrchair", "armchair", - "amrenian", "armenian", - "amtheyst", "amethyst", - "analgoue", "analogue", - "analisys", "analysis", - "analitic", "analytic", - "analouge", "analogue", - "analysie", "analyse", - "analysit", "analyst", - "analyste", "analyse", - "analysze", "analyse", - "analzyed", "analyzed", - "anaolgue", "analogue", - "anarchim", "anarchism", - "anaylses", "analyses", - "anaylsis", "analysis", - "anaylsts", "analysts", - "anaylzed", "analyzed", - "ancedote", "anecdote", - "anceints", "ancients", - "ancinets", "ancients", - "andoirds", "androids", - "andorids", "androids", - "andriods", "androids", - "anecdots", "anecdotes", - "anectode", "anecdote", - "anedocte", "anecdote", - "aneroxia", "anorexia", - "aneroxic", "anorexic", - "angostic", "agnostic", - "angrilly", "angrily", - "anicents", "ancients", - "animatie", "animate", - "animatte", "animate", - "anlayses", "analyses", - "annoints", "anoints", - "annouced", "announced", - "annoucne", "announce", - "anntenas", "antennas", - "anoerxia", "anorexia", - "anoerxic", "anorexic", - "anonymos", "anonymous", - "anoreixa", "anorexia", - "anounced", "announced", - "anoxeria", "anorexia", - "anoxeric", "anorexic", - "answeres", "answers", - "antartic", "antarctic", - "antennea", "antenna", - "antennna", "antenna", - "anticipe", "anticipate", - "antiquae", "antique", - "antivirs", "antivirus", - "anwsered", "answered", - "anyhting", "anything", - "anyhwere", "anywhere", - "anyoneis", "anyones", - "anythign", "anything", - "anytying", "anything", - "aparment", "apartment", - "apartmet", "apartments", - "apenines", "apennines", - "aperutre", "aperture", - "aplhabet", "alphabet", - "apologes", "apologise", - "aposltes", "apostles", - "apostels", "apostles", - "appaluse", "applause", - "apparant", "apparent", - "appareal", "apparel", - "appareil", "apparel", - "apperead", "appeared", - "applaued", "applaud", - "appluase", "applause", - "appology", "apology", - "apporach", "approach", - "appraoch", "approach", - "apreture", "aperture", - "apsotles", "apostles", - "aqaurium", "aquarium", - "aqcuired", "acquired", - "aquaduct", "aqueduct", - "aquairum", "aquarium", - "aquaruim", "aquarium", - "aquiring", "acquiring", - "aquitted", "acquitted", - "arbitary", "arbitrary", - "arbitray", "arbitrary", - "arbiture", "arbiter", - "architet", "architect", - "archtype", "archetype", - "aremnian", "armenian", - "argentia", "argentina", - "argubaly", "arguably", - "arguemet", "arguement", - "arguemtn", "arguement", - "ariborne", "airborne", - "aricraft", "aircraft", - "ariplane", "airplane", - "ariports", "airports", - "arispace", "airspace", - "aristote", "aristotle", - "aritfact", "artifact", - "arizonia", "arizona", - "arkasnas", "arkansas", - "arlighty", "alrighty", - "armamant", "armament", - "armenain", "armenian", - "armenina", "armenian", - "armpitts", "armpits", - "armstrog", "armstrong", - "arpanoid", "paranoid", - "arpeture", "aperture", - "arragned", "arranged", - "arrestes", "arrests", - "arrestos", "arrests", - "arsenaal", "arsenal", - "artemios", "artemis", - "artemius", "artemis", - "arthrits", "arthritis", - "articule", "articulate", - "artifacs", "artifacts", - "artifcat", "artifact", - "artilley", "artillery", - "artisitc", "artistic", - "artistas", "artists", - "arugable", "arguable", - "arugably", "arguably", - "arugment", "argument", - "asborbed", "absorbed", - "asburdly", "absurdly", - "ascneded", "ascended", - "asissted", "assisted", - "askreddt", "askreddit", - "asnwered", "answered", - "aspectos", "aspects", - "asperges", "aspergers", - "assasins", "assassins", - "assemple", "assemble", - "assertin", "assertions", - "asshates", "asshats", - "asshatts", "asshats", - "assimile", "assimilate", - "assistat", "assistants", - "assitant", "assistant", - "assmeble", "assemble", - "assmebly", "assembly", - "asssasin", "assassin", - "assualts", "assaults", - "asteorid", "asteroid", - "asteriks", "asterisk", - "asteriod", "asteroid", - "asterois", "asteroids", - "astersik", "asterisk", - "asthetic", "aesthetic", - "astronat", "astronaut", - "asutrian", "austrian", - "atheisim", "atheism", - "atheistc", "atheistic", - "atheltes", "athletes", - "atheltic", "athletic", - "athenean", "athenian", - "athesits", "atheists", - "athetlic", "athletic", - "athients", "athiest", - "atittude", "attitude", - "atlantia", "atlanta", - "atmoizer", "atomizer", - "atomzier", "atomizer", - "atribute", "attribute", - "atrifact", "artifact", - "attackes", "attackers", - "attemped", "attempted", - "attemted", "attempted", - "attemtps", "attempts", - "attidute", "attitude", - "attitide", "attitude", - "attribue", "attribute", - "aucitons", "auctions", - "audactiy", "audacity", - "audcaity", "audacity", - "audeince", "audience", - "audiobok", "audiobook", - "austeriy", "austerity", - "austiran", "austrian", - "austitic", "autistic", - "austrain", "austrian", - "australa", "australian", - "austrija", "austria", - "austrila", "austria", - "autisitc", "autistic", - "autoattk", "autoattack", - "autograh", "autograph", - "automato", "automation", - "automony", "autonomy", - "autority", "authority", - "autsitic", "autistic", - "auxilary", "auxiliary", - "avacodos", "avocados", - "avaiable", "available", - "availabe", "available", - "availble", "available", - "avaition", "aviation", - "avalable", "available", - "avalance", "avalanche", - "avataras", "avatars", - "avatards", "avatars", - "avatares", "avatars", - "averadge", "averaged", - "avergaed", "averaged", - "avergaes", "averages", - "aviaiton", "aviation", - "avilable", "available", - "avnegers", "avengers", - "avodacos", "avocados", - "awekened", "weakened", - "awesomey", "awesomely", - "awfullly", "awfully", - "awkwardy", "awkwardly", - "awnsered", "answered", - "babysite", "babysitter", - "baceause", "because", - "bacehlor", "bachelor", - "bachleor", "bachelor", - "bacholer", "bachelor", - "backeast", "backseat", - "backerds", "backers", - "backfied", "backfield", - "backpacs", "backpacks", - "balcanes", "balances", - "balconey", "balcony", - "balconny", "balcony", - "ballistc", "ballistic", - "balnaced", "balanced", - "banannas", "bananas", - "banditas", "bandits", - "bandwith", "bandwidth", - "bangkock", "bangkok", - "baptisim", "baptism", - "barabric", "barbaric", - "barbarin", "barbarian", - "barbaris", "barbarians", - "bardford", "bradford", - "bargaing", "bargaining", - "baristia", "barista", - "barrakcs", "barracks", - "barrells", "barrels", - "basicaly", "basically", - "basiclay", "basicly", - "basicley", "basicly", - "basicliy", "basicly", - "batistia", "batista", - "battalin", "battalion", - "bayonent", "bayonet", - "beachead", "beachhead", - "beacuoup", "beaucoup", - "beardude", "bearded", - "beastley", "beastly", - "beatiful", "beautiful", - "beccause", "because", - "becuasse", "becuase", - "befirend", "befriend", - "befreind", "befriend", - "begginer", "beginner", - "begginig", "begging", - "begginng", "begging", - "begining", "beginning", - "beginnig", "beginning", - "behaivor", "behavior", - "behavios", "behaviours", - "behavoir", "behavior", - "behavour", "behavior", - "behngazi", "benghazi", - "behtesda", "bethesda", - "beleived", "believed", - "beleiver", "believer", - "beleives", "believes", - "beliefes", "beliefs", - "benefica", "beneficial", - "bengahzi", "benghazi", - "bengalas", "bengals", - "bengalos", "bengals", - "bengazhi", "benghazi", - "benghzai", "benghazi", - "bengzhai", "benghazi", - "benhgazi", "benghazi", - "benidect", "benedict", - "benifits", "benefits", - "berekley", "berkeley", - "berserkr", "berserker", - "beseiged", "besieged", - "betehsda", "bethesda", - "beteshda", "bethesda", - "bethdesa", "bethesda", - "bethedsa", "bethesda", - "bethseda", "bethesda", - "beyoncye", "beyonce", - "bibilcal", "biblical", - "bicylces", "bicycles", - "bigfooot", "bigfoot", - "bigining", "beginning", - "bilbical", "biblical", - "billboad", "billboard", - "bilsters", "blisters", - "bilzzard", "blizzard", - "bilzzcon", "blizzcon", - "biologia", "biological", - "birhtday", "birthday", - "birsbane", "brisbane", - "birthdsy", "birthdays", - "biseuxal", "bisexual", - "bisexaul", "bisexual", - "bitcions", "bitcoins", - "bitocins", "bitcoins", - "blackade", "blacked", - "blackend", "blacked", - "blackjak", "blackjack", - "blacklit", "blacklist", - "blatanty", "blatantly", - "blessins", "blessings", - "blessure", "blessing", - "bloggare", "blogger", - "bloggeur", "blogger", - "bluebery", "blueberry", - "bluetooh", "bluetooth", - "blugaria", "bulgaria", - "boardway", "broadway", - "bollcoks", "bollocks", - "bomberos", "bombers", - "bookmars", "bookmarks", - "boradway", "broadway", - "boredoom", "boredom", - "bouldore", "boulder", - "bounites", "bounties", - "boutnies", "bounties", - "boutqiue", "boutique", - "bouyancy", "buoyancy", - "boyfried", "boyfriend", - "bradcast", "broadcast", - "bradfrod", "bradford", - "brakeout", "breakout", - "braodway", "broadway", - "braverly", "bravery", - "breathis", "breaths", - "breathos", "breaths", - "brekaout", "breakout", - "brendamn", "brendan", - "breweres", "brewers", - "brewerey", "brewery", - "brewerks", "brewers", - "brewerys", "brewers", - "brigaged", "brigade", - "brigated", "brigade", - "brigthen", "brighten", - "briliant", "brilliant", - "brillant", "brilliant", - "bristool", "bristol", - "brithday", "birthday", - "brittish", "british", - "briusers", "bruisers", - "broadbad", "broadband", - "broadcat", "broadcasts", - "broadley", "broadly", - "brocolli", "broccoli", - "brodaway", "broadway", - "broncoes", "broncos", - "broswing", "browsing", - "browines", "brownies", - "browisng", "browsing", - "brtually", "brutally", - "brugundy", "burgundy", - "bruisend", "bruised", - "brussles", "brussels", - "brusting", "bursting", - "bubblews", "bubbles", - "buddhits", "buddhist", - "buddhsim", "buddhism", - "buddishm", "buddhism", - "buddisht", "buddhist", - "buglaria", "bulgaria", - "buhddism", "buddhism", - "buhddist", "buddhist", - "buidlers", "builders", - "buidling", "building", - "buildins", "buildings", - "buisness", "business", - "bulagria", "bulgaria", - "bulgaira", "bulgaria", - "buliders", "builders", - "buliding", "building", - "bulletts", "bullets", - "burisers", "bruisers", - "burriots", "burritos", - "burritio", "burrito", - "burritto", "burrito", - "burrtios", "burritos", - "burssels", "brussels", - "burtally", "brutally", - "burtsing", "bursting", - "busrting", "bursting", - "butcherd", "butchered", - "butterey", "buttery", - "butterfy", "butterfly", - "butterry", "buttery", - "butthoel", "butthole", - "bycicles", "bicycles", - "cabbagge", "cabbage", - "cabients", "cabinets", - "cabinate", "cabinet", - "cabinent", "cabinet", - "cabniets", "cabinets", - "caclulus", "calculus", - "cafetera", "cafeteria", - "caffinee", "caffeine", - "cahsiers", "cashiers", - "cainster", "canister", - "calander", "calendar", - "calcular", "calculator", - "calgarry", "calgary", - "calibler", "calibre", - "caloires", "calories", - "calrkson", "clarkson", - "calroies", "calories", - "calssify", "classify", - "calulate", "calculate", - "calymore", "claymore", - "camapign", "campaign", - "cambodai", "cambodia", - "camboida", "cambodia", - "cambpell", "campbell", - "cambride", "cambridge", - "cambrige", "cambridge", - "camoufle", "camouflage", - "campagin", "campaign", - "campaing", "campaign", - "campains", "campaigns", - "camperas", "campers", - "camperos", "campers", - "canadias", "canadians", - "cananbis", "cannabis", - "cancelas", "cancels", - "canceles", "cancels", - "cancells", "cancels", - "canceres", "cancers", - "cancerns", "cancers", - "cancerus", "cancers", - "candiate", "candidate", - "candiens", "candies", - "canistre", "canister", - "cannabil", "cannibal", - "cannbial", "cannibal", - "cannibas", "cannabis", - "cansiter", "canister", - "capitans", "captains", - "capitola", "capital", - "capitulo", "capitol", - "capmbell", "campbell", - "capsuels", "capsules", - "capsulse", "capsules", - "capsumel", "capsule", - "capteurs", "captures", - "captials", "capitals", - "captians", "captains", - "capusles", "capsules", - "caputres", "captures", - "cardboad", "cardboard", - "cardianl", "cardinal", - "cardnial", "cardinal", - "careflly", "carefully", - "carefull", "careful", - "carefuly", "carefully", - "caricate", "caricature", - "caridgan", "cardigan", - "caridnal", "cardinal", - "carinval", "carnival", - "carloina", "carolina", - "carnagie", "carnegie", - "carnigie", "carnegie", - "carnvial", "carnival", - "carrotts", "carrots", - "carrotus", "carrots", - "cartells", "cartels", - "cartmaan", "cartman", - "cartride", "cartridge", - "cartrige", "cartridge", - "carvinal", "carnival", - "casaulty", "casualty", - "casheirs", "cashiers", - "cashieer", "cashier", - "cashires", "cashiers", - "castleos", "castles", - "castlers", "castles", - "casulaty", "casualty", - "cataclym", "cataclysm", - "catagory", "category", - "cataline", "catiline", - "cataloge", "catalogue", - "catalsyt", "catalyst", - "cataylst", "catalyst", - "cathloic", "catholic", - "catlayst", "catalyst", - "caucasin", "caucasian", - "causalty", "casualty", - "cellural", "cellular", - "celullar", "cellular", - "celverly", "cleverly", - "cemetary", "cemetery", - "centeres", "centers", - "centerns", "centers", - "centrase", "centres", - "centrers", "centres", - "ceratine", "creatine", - "cerberal", "cerebral", - "cerbreus", "cerberus", - "cerbures", "cerberus", - "ceremone", "ceremonies", - "cerimony", "ceremony", - "ceromony", "ceremony", - "certainy", "certainty", - "challege", "challenge", - "chambear", "chamber", - "chambres", "chambers", - "champage", "champagne", - "chanisaw", "chainsaw", - "chanlder", "chandler", - "charcaol", "charcoal", - "chargehr", "charger", - "chargeur", "charger", - "chariman", "chairman", - "charimsa", "charisma", - "charmisa", "charisma", - "charocal", "charcoal", - "charsima", "charisma", - "chasiers", "cashiers", - "chassids", "chassis", - "chassies", "chassis", - "chatolic", "catholic", - "chcukles", "chuckles", - "checkare", "checker", - "checkear", "checker", - "cheesees", "cheeses", - "cheeseus", "cheeses", - "cheetoos", "cheetos", - "chemcial", "chemical", - "chemisty", "chemistry", - "chernobl", "chernobyl", - "chiansaw", "chainsaw", - "chidlish", "childish", - "chihuaha", "chihuahua", - "childres", "childrens", - "chillade", "chilled", - "chillead", "chilled", - "chillend", "chilled", - "chilvary", "chivalry", - "chinesse", "chinese", - "chivarly", "chivalry", - "chivlary", "chivalry", - "chlidish", "childish", - "chlroine", "chlorine", - "chmabers", "chambers", - "chocolae", "chocolates", - "chocolet", "chocolates", - "choesive", "cohesive", - "choicers", "choices", - "cholrine", "chlorine", - "chorline", "chlorine", - "chracter", "character", - "christin", "christian", - "chroline", "chlorine", - "chromose", "chromosome", - "chronice", "chronicles", - "chruches", "churches", - "chuckels", "chuckles", - "cielings", "ceilings", - "cigarete", "cigarettes", - "cigarets", "cigarettes", - "cilmbers", "climbers", - "cilnatro", "cilantro", - "ciltoris", "clitoris", - "circiuts", "circuits", - "circkets", "crickets", - "circlebs", "circles", - "circluar", "circular", - "ciricuit", "circuit", - "cirlcing", "circling", - "ciruclar", "circular", - "clannand", "clannad", - "clarifiy", "clarify", - "clarskon", "clarkson", - "clasical", "classical", - "classrom", "classroom", - "classsic", "classics", - "clausens", "clauses", - "cleanies", "cleanse", - "cleasner", "cleanser", - "clenaser", "cleanser", - "clevelry", "cleverly", - "clhorine", "chlorine", - "cliamtes", "climates", - "cliantro", "cilantro", - "clickare", "clicker", - "clickbat", "clickbait", - "clickear", "clicker", - "clientes", "clients", - "clincial", "clinical", - "clinicas", "clinics", - "clinicos", "clinics", - "clipboad", "clipboard", - "clitiros", "clitoris", - "closeing", "closing", - "closeley", "closely", - "clyamore", "claymore", - "clyinder", "cylinder", - "cmoputer", "computer", - "coindice", "coincide", - "collapes", "collapse", - "collares", "collars", - "collaris", "collars", - "collaros", "collars", - "collaspe", "collapse", - "colleage", "colleagues", - "collecte", "collective", - "collegue", "colleague", - "collisin", "collisions", - "collosal", "colossal", - "collpase", "collapse", - "coloardo", "colorado", - "colordao", "colorado", - "colubmia", "columbia", - "columnas", "columns", - "comadres", "comrades", - "comander", "commander", - "comandos", "commandos", - "comapany", "company", - "comapres", "compares", - "combiens", "combines", - "combinig", "combining", - "comediac", "comedic", - "comedias", "comedians", - "comestic", "cosmetic", - "comision", "commission", - "comiting", "committing", - "comitted", "committed", - "comittee", "committee", - "commandd", "commanded", - "commecen", "commence", - "commedic", "comedic", - "commense", "commenters", - "commenty", "commentary", - "commiest", "commits", - "commited", "committed", - "commitee", "committee", - "commites", "commits", - "committe", "committee", - "committs", "commits", - "commitus", "commits", - "commmand", "command", - "communit", "communist", - "companis", "companions", - "comparse", "compares", - "comparte", "compare", - "compasso", "compassion", - "compelte", "complete", - "compense", "compensate", - "complais", "complains", - "complane", "complacent", - "complate", "complacent", - "compleet", "complete", - "completi", "complexity", - "complets", "completes", - "complety", "completely", - "complexs", "complexes", - "complext", "complexity", - "complexy", "complexity", - "complict", "complicit", - "complier", "compiler", - "compones", "compose", - "componet", "components", - "componts", "compost", - "composet", "compost", - "composit", "compost", - "composte", "compose", - "comprese", "compressed", - "compreso", "compressor", - "compsers", "compress", - "comptown", "compton", - "compunet", "compute", - "computre", "compute", - "comradre", "comrade", - "comsetic", "cosmetic", - "conatins", "contains", - "conceald", "concealed", - "conceide", "conceived", - "conceled", "concede", - "concened", "concede", - "concepta", "conceptual", - "concered", "concede", - "concernt", "concert", - "concerte", "concrete", - "concesso", "concession", - "conceted", "concede", - "conceved", "concede", - "concibes", "concise", - "concider", "consider", - "concides", "concise", - "concious", "conscious", - "conclued", "conclude", - "concluse", "conclusive", - "concluso", "conclusion", - "concreet", "concrete", - "concrets", "concerts", - "condemnd", "condemned", - "conditon", "condition", - "condomes", "condoms", - "condomns", "condoms", - "conduict", "conduit", - "conected", "connected", - "conencts", "connects", - "confeses", "confess", - "confesos", "confess", - "confesso", "confession", - "configue", "configure", - "confilct", "conflict", - "confirmd", "confirmed", - "conflcit", "conflict", - "conflics", "conflicts", - "confrims", "confirms", - "conicide", "coincide", - "conlcude", "conclude", - "conqueor", "conquer", - "conquerd", "conquered", - "conqured", "conquered", - "conscent", "consent", - "consious", "conscious", - "constans", "constants", - "constast", "constants", - "constatn", "constant", - "constrat", "constraint", - "construt", "constructs", - "containd", "contained", - "containg", "containing", - "contaire", "containers", - "contanti", "contacting", - "contense", "contenders", - "contenst", "contents", - "contexta", "contextual", - "contextl", "contextual", - "contians", "contains", - "contined", "continued", - "contines", "continents", - "continum", "continuum", - "continus", "continues", - "continut", "continuity", - "continuu", "continuous", - "contracr", "contractor", - "contracs", "contracts", - "controll", "control", - "contruct", "construct", - "convenit", "convenient", - "convento", "convention", - "converst", "converts", - "convertr", "converter", - "conviced", "convinced", - "convicto", "conviction", - "convingi", "convincing", - "convinse", "convinces", - "cooldows", "cooldowns", - "coordine", "coordinate", - "coralina", "carolina", - "corollla", "corolla", - "corolloa", "corolla", - "corosion", "corrosion", - "corpsers", "corpses", - "corrdior", "corridor", - "correcty", "correctly", - "correnti", "correcting", - "corretly", "correctly", - "corrupto", "corruption", - "cosemtic", "cosmetic", - "cosutmes", "costumes", - "couldnot", "couldnt", - "coulored", "coloured", - "counries", "countries", - "counseil", "counsel", - "counsole", "counsel", - "counterd", "countered", - "countert", "counteract", - "countres", "counters", - "courtrom", "courtroom", - "courtsey", "courtesy", - "cousines", "cousins", - "cousings", "cousins", - "coutners", "counters", - "covanent", "covenant", - "coverted", "converted", - "coyotees", "coyotes", - "cpatains", "captains", - "cranbery", "cranberry", - "crayones", "crayons", - "creaeted", "created", - "createin", "creatine", - "createur", "creature", - "creatien", "creatine", - "creepgin", "creeping", - "cricling", "circling", - "cringely", "cringey", - "cringery", "cringey", - "criticas", "critics", - "critices", "critics", - "criticie", "criticise", - "criticim", "criticisms", - "criticis", "critics", - "criticms", "critics", - "criticos", "critics", - "criticts", "critics", - "criticus", "critics", - "critiera", "criteria", - "critized", "criticized", - "croatioa", "croatia", - "crossfie", "crossfire", - "crosshar", "crosshair", - "crosspot", "crosspost", - "crowbahr", "crowbar", - "cruasder", "crusader", - "cruciaal", "crucial", - "crucibel", "crucible", - "cruicble", "crucible", - "crusdaer", "crusader", - "crusiers", "cruisers", - "crusiing", "cruising", - "cruthces", "crutches", - "cthulhlu", "cthulhu", - "cthulluh", "cthulhu", - "cubpoard", "cupboard", - "cuddleys", "cuddles", - "culprint", "culprit", - "cultrual", "cultural", - "culutral", "cultural", - "cupbaord", "cupboard", - "cupborad", "cupboard", - "curcible", "crucible", - "curisers", "cruisers", - "curising", "cruising", - "currecny", "currency", - "currence", "currencies", - "currenly", "currently", - "currenty", "currently", - "cursader", "crusader", - "custcene", "cutscene", - "cutsceen", "cutscene", - "cutscens", "cutscenes", - "cutsence", "cutscene", - "cylcists", "cyclists", - "cylidner", "cylinder", - "cylindre", "cylinder", - "cynisicm", "cynicism", - "cyrstals", "crystals", - "dacquiri", "daiquiri", - "daimonds", "diamonds", - "dangeros", "dangers", - "dangerus", "dangers", - "darkenss", "darkness", - "darnkess", "darkness", - "dashboad", "dashboard", - "daugther", "daughter", - "deadlfit", "deadlift", - "deadlifs", "deadlifts", - "deafauts", "defaults", - "deafeted", "defeated", - "deafults", "defaults", - "dealying", "delaying", - "deamenor", "demeanor", - "deathcat", "deathmatch", - "debuffes", "debuffs", - "debufffs", "debuffs", - "decalred", "declared", - "decalres", "declares", - "decembre", "december", - "decidely", "decidedly", - "decieved", "deceived", - "decifits", "deficits", - "decipted", "depicted", - "declears", "declares", - "declinig", "declining", - "decmeber", "december", - "decribed", "described", - "decribes", "describes", - "dedicato", "dedication", - "deductie", "deductible", - "defautls", "defaults", - "defectos", "defects", - "defectus", "defects", - "defendas", "defends", - "defendes", "defenders", - "defendis", "defends", - "defendre", "defender", - "defendrs", "defends", - "defensea", "defenseman", - "defensen", "defenseman", - "defensie", "defensive", - "defetead", "defeated", - "deffined", "defined", - "deficiet", "deficient", - "definate", "definite", - "definaty", "definately", - "definety", "definetly", - "definito", "definition", - "definitv", "definitive", - "deflatin", "deflation", - "deflecto", "deflection", - "defualts", "defaults", - "degarded", "degraded", - "degenere", "degenerate", - "degraged", "degrade", - "degrated", "degrade", - "deisgned", "designed", - "deisgner", "designer", - "dekstops", "desktops", - "delcared", "declared", - "delcares", "declares", - "delepted", "depleted", - "delivere", "deliveries", - "delpeted", "depleted", - "delpoyed", "deployed", - "delyaing", "delaying", - "demandas", "demands", - "demandes", "demands", - "demenaor", "demeanor", - "democray", "democracy", - "demolito", "demolition", - "denseley", "densely", - "densitiy", "density", - "deomcrat", "democrat", - "deovtion", "devotion", - "departer", "departure", - "departue", "departure", - "depcited", "depicted", - "depelted", "depleted", - "dependat", "dependant", - "depictes", "depicts", - "depictin", "depictions", - "depolyed", "deployed", - "depositd", "deposited", - "depostis", "deposits", - "depresse", "depressive", - "depresso", "depression", - "derivate", "derivative", - "descened", "descend", - "descibed", "described", - "descirbe", "describe", - "descrise", "describes", - "desgined", "designed", - "desginer", "designer", - "desicive", "decisive", - "designad", "designated", - "designes", "designs", - "designet", "designated", - "desinged", "designed", - "desinger", "designer", - "desitned", "destined", - "desktiop", "desktop", - "desorder", "disorder", - "despides", "despised", - "despiste", "despise", - "destiney", "destiny", - "destinty", "destiny", - "destkops", "desktops", - "destorys", "destroys", - "destrose", "destroyers", - "destroyd", "destroyed", - "destroyr", "destroyers", - "detalied", "detailed", - "detectas", "detects", - "detectes", "detects", - "detectie", "detectives", - "determen", "determines", - "devasted", "devastated", - "develope", "develop", - "devialet", "deviate", - "deviatie", "deviate", - "devilers", "delivers", - "devloved", "devolved", - "devovled", "devolved", - "diaganol", "diagonal", - "diagnoal", "diagonal", - "diagnoes", "diagnose", - "diagnosi", "diagnostic", - "diagonse", "diagnose", - "diahrrea", "diarrhea", - "dialetcs", "dialects", - "dialgoue", "dialogue", - "dialouge", "dialogue", - "diarreah", "diarrhea", - "diarreha", "diarrhea", - "dichtomy", "dichotomy", - "dickisch", "dickish", - "dicovers", "discovers", - "dicovery", "discovery", - "dicussed", "discussed", - "diferent", "different", - "differnt", "different", - "difficut", "difficulty", - "diffrent", "different", - "diganose", "diagnose", - "dignitiy", "dignity", - "dimaonds", "diamonds", - "dinasour", "dinosaur", - "dinosaus", "dinosaurs", - "dinosuar", "dinosaur", - "dinsoaur", "dinosaur", - "dionsaur", "dinosaur", - "diphtong", "diphthong", - "diplomma", "diploma", - "dipthong", "diphthong", - "direclty", "directly", - "directin", "directions", - "directix", "directx", - "directos", "directors", - "directoy", "directory", - "directrx", "directx", - "dirfting", "drifting", - "disabeld", "disabled", - "disabels", "disables", - "disagred", "disagreed", - "disagres", "disagrees", - "disbaled", "disabled", - "disbales", "disables", - "disbelif", "disbelief", - "dischard", "discharged", - "dischare", "discharged", - "discound", "discounted", - "discoure", "discourse", - "discoved", "discovered", - "discreto", "discretion", - "discribe", "describe", - "disentry", "dysentery", - "disgiuse", "disguise", - "dishoner", "dishonored", - "dishonet", "dishonesty", - "dislikse", "dislikes", - "dismante", "dismantle", - "dismisse", "dismissive", - "disolved", "dissolved", - "dispacth", "dispatch", - "dispalys", "displays", - "dispence", "dispense", - "dispersa", "dispensary", - "displayd", "displayed", - "disposle", "dispose", - "disposte", "dispose", - "dispoves", "dispose", - "disptach", "dispatch", - "disricts", "districts", - "dissovle", "dissolve", - "distates", "distaste", - "distatse", "distaste", - "disticnt", "distinct", - "distorto", "distortion", - "distrcit", "district", - "districs", "districts", - "disturbd", "disturbed", - "disupted", "disputed", - "disuptes", "disputes", - "diversed", "diverse", - "diversiy", "diversify", - "dividens", "dividends", - "divintiy", "divinity", - "divisons", "divisions", - "doapmine", "dopamine", - "docrines", "doctrines", - "docrtine", "doctrine", - "doctines", "doctrines", - "doctirne", "doctrine", - "doctrins", "doctrines", - "dogamtic", "dogmatic", - "dolhpins", "dolphins", - "domapine", "dopamine", - "domecrat", "democrat", - "domiante", "dominate", - "dominato", "domination", - "dominats", "dominates", - "dominent", "dominant", - "dominoin", "dominion", - "donwload", "download", - "donwvote", "downvote", - "doomdsay", "doomsday", - "doosmday", "doomsday", - "doplhins", "dolphins", - "dopmaine", "dopamine", - "dormtund", "dortmund", - "dortumnd", "dortmund", - "dotrmund", "dortmund", - "douchely", "douchey", - "doucheus", "douches", - "dowloads", "downloads", - "downlaod", "download", - "downloas", "downloads", - "downstar", "downstairs", - "downvore", "downvoters", - "downvotr", "downvoters", - "downvots", "downvotes", - "draculea", "dracula", - "draculla", "dracula", - "dragones", "dragons", - "dragonus", "dragons", - "drfiting", "drifting", - "driectly", "directly", - "drifitng", "drifting", - "driveris", "drivers", - "drotmund", "dortmund", - "duaghter", "daughter", - "dumbbels", "dumbbells", - "dumptser", "dumpster", - "dumspter", "dumpster", - "dunegons", "dungeons", - "dungeoun", "dungeon", - "dungoens", "dungeons", - "dupicate", "duplicate", - "duplicas", "duplicates", - "dwarvens", "dwarves", - "dyanmics", "dynamics", - "dyanmite", "dynamite", - "dymanics", "dynamics", - "dymanite", "dynamite", - "dynastry", "dynasty", - "dysentry", "dysentery", - "dysphora", "dysphoria", - "earilest", "earliest", - "eatswood", "eastwood", - "eceonomy", "economy", - "ecidious", "deciduous", - "ecologia", "ecological", - "ecomonic", "economic", - "ecstacys", "ecstasy", - "ecstascy", "ecstasy", - "ecstasty", "ecstasy", - "ectastic", "ecstatic", - "editoras", "editors", - "editores", "editors", - "efficent", "efficient", - "egpytian", "egyptian", - "egyptain", "egyptian", - "egytpian", "egyptian", - "ehtereal", "ethereal", - "ehternet", "ethernet", - "eigtheen", "eighteen", - "electhor", "electro", - "electorn", "electron", - "elementy", "elementary", - "elephans", "elephants", - "elevatin", "elevation", - "elicided", "elicited", - "eligable", "eligible", - "elimiate", "eliminate", - "eliminas", "eliminates", - "elitisim", "elitism", - "elitistm", "elitism", - "ellected", "elected", - "embarass", "embarrass", - "embargos", "embargoes", - "embarras", "embarrass", - "embassay", "embassy", - "embassey", "embassy", - "embasssy", "embassy", - "emergend", "emerged", - "emergerd", "emerged", - "eminated", "emanated", - "emminent", "eminent", - "emmisary", "emissary", - "emmision", "emission", - "emmiting", "emitting", - "emmitted", "emitted", - "empathie", "empathize", - "empirial", "empirical", - "emulatin", "emulation", - "enahnces", "enhances", - "enchanct", "enchant", - "encolsed", "enclosed", - "endanged", "endangered", - "endevors", "endeavors", - "endevour", "endeavour", - "endlessy", "endlessly", - "endorces", "endorse", - "engeneer", "engineer", - "engeries", "energies", - "engineed", "engineered", - "engrames", "engrams", - "engramms", "engrams", - "enigneer", "engineer", - "enitrely", "entirely", - "enlcosed", "enclosed", - "enlsaved", "enslaved", - "ensalved", "enslaved", - "enterity", "entirety", - "entierly", "entirely", - "entierty", "entirety", - "entilted", "entitled", - "entirley", "entirely", - "entiteld", "entitled", - "entitity", "entity", - "entropay", "entropy", - "entrophy", "entropy", - "ephipany", "epiphany", - "epihpany", "epiphany", - "epilespy", "epilepsy", - "epilgoue", "epilogue", - "episdoes", "episodes", - "epitomie", "epitome", - "epliepsy", "epilepsy", - "epliogue", "epilogue", - "epsiodes", "episodes", - "epsresso", "espresso", - "eqaulity", "equality", - "eqaution", "equation", - "equailty", "equality", - "eraticly", "erratically", - "erroneos", "erroneous", - "errupted", "erupted", - "escalato", "escalation", - "esctatic", "ecstatic", - "esential", "essential", - "esitmate", "estimate", - "esperate", "seperate", - "esportes", "esports", - "estiamte", "estimate", - "estoeric", "esoteric", - "estonija", "estonia", - "estoniya", "estonia", - "etherael", "ethereal", - "etherent", "ethernet", - "ethicaly", "ethically", - "etiquete", "etiquette", - "etrailer", "retailer", - "eugencis", "eugenics", - "eugneics", "eugenics", - "euhporia", "euphoria", - "euhporic", "euphoric", - "euorpean", "european", - "euphoira", "euphoria", - "euphroia", "euphoria", - "euphroic", "euphoric", - "europian", "european", - "eurpoean", "european", - "evangers", "avengers", - "everyons", "everyones", - "evidencd", "evidenced", - "evidende", "evidenced", - "evloving", "evolving", - "evolveds", "evolves", - "evolveos", "evolves", - "evovling", "evolving", - "excecute", "execute", - "excedded", "exceeded", - "excelent", "excellent", - "exceptin", "exceptions", - "excerise", "exercise", - "excisted", "existed", - "exclusie", "exclusives", - "exculded", "excluded", - "exculdes", "excludes", - "exection", "execution", - "exectued", "executed", - "executie", "executive", - "executin", "execution", - "exellent", "excellent", - "exerbate", "exacerbate", - "exercide", "exercised", - "exercies", "exercise", - "exersice", "exercise", - "exersize", "exercise", - "exhalted", "exalted", - "exhaustn", "exhaustion", - "exhausto", "exhaustion", - "exicting", "exciting", - "exisitng", "existing", - "existane", "existance", - "existant", "existent", - "existend", "existed", - "exlcuded", "excluded", - "exlcudes", "excludes", - "exlporer", "explorer", - "exoticas", "exotics", - "exoticos", "exotics", - "expalins", "explains", - "expandas", "expands", - "expandes", "expands", - "expansie", "expansive", - "expectes", "expects", - "expectus", "expects", - "expedito", "expedition", - "expences", "expense", - "expensie", "expense", - "expensve", "expense", - "expertas", "experts", - "expertis", "experts", - "expertos", "experts", - "expireds", "expires", - "explaind", "explained", - "explaing", "explaining", - "expliots", "exploits", - "explodie", "explode", - "exploint", "exploit", - "explosie", "explosive", - "explosin", "explosions", - "exploted", "explode", - "expoldes", "explodes", - "expolits", "exploits", - "exportas", "exports", - "exportes", "exports", - "exportfs", "exports", - "exposees", "exposes", - "exposito", "exposition", - "expresse", "expressive", - "expresss", "expresses", - "expressy", "expressly", - "exressed", "expressed", - "exsitent", "existent", - "exsiting", "existing", - "extactly", "exactly", - "extemely", "extremely", - "extendes", "extends", - "extendos", "extends", - "extenion", "extension", - "extensie", "extensive", - "extensis", "extensions", - "extortin", "extortion", - "extracto", "extraction", - "extreems", "extremes", - "extremly", "extremely", - "eygptian", "egyptian", - "faboulus", "fabulous", - "fabricas", "fabrics", - "fabrices", "fabrics", - "fabricus", "fabrics", - "faceplam", "facepalm", - "facilisi", "facilities", - "faciltiy", "facility", - "facsists", "fascists", - "factores", "factors", - "factorys", "factors", - "factualy", "factually", - "faggotts", "faggots", - "faggotus", "faggots", - "falcones", "falcons", - "falgship", "flagship", - "faliures", "failures", - "falseley", "falsely", - "falshing", "flashing", - "falvored", "flavored", - "falvours", "flavours", - "familair", "familiar", - "famoulsy", "famously", - "fanatism", "fanaticism", - "fanatsic", "fanatics", - "fanserve", "fanservice", - "fantasty", "fantasy", - "farcking", "fracking", - "fascisim", "fascism", - "fashiond", "fashioned", - "fasicsts", "fascists", - "fatigure", "fatigue", - "favorits", "favorites", - "favourie", "favourites", - "feasable", "feasible", - "feasbile", "feasible", - "febraury", "february", - "februray", "february", - "feburary", "february", - "fedility", "fidelity", - "fedorahs", "fedoras", - "fedorans", "fedoras", - "feilding", "fielding", - "feisable", "feasible", - "feitshes", "fetishes", - "feltcher", "fletcher", - "felxible", "flexible", - "feminint", "femininity", - "feminsim", "feminism", - "feromone", "pheromone", - "fesiable", "feasible", - "festivas", "festivals", - "festivle", "festive", - "fictious", "fictitious", - "fideling", "fielding", - "fideltiy", "fidelity", - "fiedling", "fielding", - "fiedlity", "fidelity", - "fighitng", "fighting", - "figthing", "fighting", - "fileding", "fielding", - "fimilies", "families", - "finacial", "financial", - "fineshes", "finesse", - "fingersi", "fingertips", - "finnisch", "finnish", - "finsihes", "finishes", - "firebals", "fireballs", - "firendly", "friendly", - "firmwear", "firmware", - "firwmare", "firmware", - "flaghsip", "flagship", - "flamable", "flammable", - "flasghip", "flagship", - "flatterd", "flattered", - "flatteur", "flatter", - "flattire", "flatter", - "flavores", "flavors", - "flechter", "fletcher", - "flecther", "fletcher", - "flemmish", "flemish", - "flethcer", "fletcher", - "flexbile", "flexible", - "flexibel", "flexible", - "flippade", "flipped", - "flitered", "filtered", - "florecen", "florence", - "floridia", "florida", - "floruide", "fluoride", - "floruish", "flourish", - "flourine", "fluorine", - "floursih", "flourish", - "fluorish", "flourish", - "fluroide", "fluoride", - "folowing", "following", - "fontrier", "fontier", - "forasken", "forsaken", - "forbiden", "forbidden", - "foreamrs", "forearms", - "foreksin", "foreskin", - "forenics", "forensic", - "forenisc", "forensic", - "foresnic", "forensic", - "foreward", "foreword", - "foricbly", "forcibly", - "forigven", "forgiven", - "formatin", "formation", - "formelly", "formerly", - "formuals", "formulas", - "fornesic", "forensic", - "forresst", "forrest", - "forsekan", "forsaken", - "forsekin", "foreskin", - "forsenic", "forensic", - "forskaen", "forsaken", - "forsting", "frosting", - "fortitue", "fortitude", - "fortunae", "fortune", - "fortunte", "fortune", - "forumlas", "formulas", - "forunner", "forerunner", - "fossiles", "fossils", - "fossilis", "fossils", - "foundary", "foundry", - "fountian", "fountain", - "fourties", "forties", - "fowrards", "forwards", - "frackign", "fracking", - "framgent", "fragment", - "franches", "franchise", - "franchie", "franchises", - "franciso", "francisco", - "frankiln", "franklin", - "franlkin", "franklin", - "freckels", "freckles", - "freindly", "friendly", - "frequeny", "frequency", - "friendle", "friendlies", - "friendsi", "friendlies", - "frimware", "firmware", - "frogiven", "forgiven", - "frointer", "frontier", - "fromerly", "formerly", - "froniter", "frontier", - "fronteir", "frontier", - "frosaken", "forsaken", - "frutcose", "fructose", - "fucntion", "function", - "fufilled", "fulfilled", - "fulfiled", "fulfilled", - "fullfill", "fulfill", - "funciton", "function", - "fundirse", "fundies", - "funniliy", "funnily", - "funnilly", "funnily", - "furctose", "fructose", - "furition", "fruition", - "furuther", "further", - "futurers", "futures", - "futureus", "futures", - "gamemdoe", "gamemode", - "gamepaly", "gameplay", - "gamergat", "gamertag", - "gammeode", "gamemode", - "ganerate", "generate", - "garantee", "guarantee", - "gardient", "gradient", - "garfeild", "garfield", - "garfiled", "garfield", - "garflied", "garfield", - "garnison", "garrison", - "garrions", "garrison", - "garriosn", "garrison", - "garrsion", "garrison", - "gatherig", "gatherings", - "gauarana", "guaraná", - "gauntelt", "gauntlet", - "gauntles", "gauntlets", - "gaurdian", "guardian", - "gaurding", "guarding", - "gautnlet", "gauntlet", - "gemoetry", "geometry", - "generaly", "generally", - "generase", "generates", - "generats", "generates", - "genialia", "genitalia", - "genisues", "geniuses", - "genitala", "genitalia", - "genrates", "generates", - "gentials", "genitals", - "gentlemn", "gentlemen", - "genuises", "geniuses", - "geograpy", "geography", - "geomerty", "geometry", - "geomtery", "geometry", - "germanos", "germans", - "germanus", "germans", - "gernades", "grenades", - "giagbyte", "gigabyte", - "gigabtye", "gigabyte", - "gigaybte", "gigabyte", - "gigbayte", "gigabyte", - "gignatic", "gigantic", - "giltched", "glitched", - "giltches", "glitches", - "girafffe", "giraffe", - "girefing", "griefing", - "girlling", "grilling", - "gladiatr", "gladiator", - "glichted", "glitched", - "glichtes", "glitches", - "glicthed", "glitched", - "glicthes", "glitches", - "glitchey", "glitchy", - "glitchly", "glitchy", - "glitchty", "glitchy", - "glithced", "glitched", - "glithces", "glitches", - "gloablly", "globally", - "glodberg", "goldberg", - "glodfish", "goldfish", - "gloriuos", "glorious", - "gltiched", "glitched", - "gltiches", "glitches", - "gmaertag", "gamertag", - "goblings", "goblins", - "goddammn", "goddamn", - "goddammt", "goddammit", - "godesses", "goddesses", - "godlberg", "goldberg", - "godlfish", "goldfish", - "godounov", "godunov", - "godpseed", "godspeed", - "godspede", "godspeed", - "goldifsh", "goldfish", - "gonewidl", "gonewild", - "goodlcuk", "goodluck", - "goregous", "gorgeous", - "gorgoeus", "gorgeous", - "gorillia", "gorilla", - "gorillla", "gorilla", - "gospells", "gospels", - "gottleib", "gottlieb", - "gourmelt", "gourmet", - "gourment", "gourmet", - "gouvener", "governor", - "govement", "government", - "goverend", "governed", - "govermet", "goverment", - "governer", "governor", - "gradualy", "gradually", - "grafield", "garfield", - "grafitti", "graffiti", - "grahpics", "graphics", - "grahpite", "graphite", - "graident", "gradient", - "granolla", "granola", - "graphcis", "graphics", - "grapichs", "graphics", - "grappnel", "grapple", - "greandes", "grenades", - "greatful", "grateful", - "greeneer", "greener", - "greenhoe", "greenhouse", - "greenlad", "greenland", - "greenore", "greener", - "greusome", "gruesome", - "grieifng", "griefing", - "grifeing", "griefing", - "grizzlay", "grizzly", - "grizzley", "grizzly", - "grpahics", "graphics", - "grpahite", "graphite", - "gruseome", "gruesome", - "guantano", "guantanamo", - "guardain", "guardian", - "guardias", "guardians", - "guaridan", "guardian", - "guerrila", "guerrilla", - "guidence", "guidance", - "guiseppe", "giuseppe", - "guitards", "guitars", - "guitares", "guitars", - "guitarit", "guitarist", - "gullbile", "gullible", - "gunanine", "guanine", - "guniness", "guinness", - "gunniess", "guinness", - "guradian", "guardian", - "gurading", "guarding", - "gurantee", "guarantee", - "guresome", "gruesome", - "guttaral", "guttural", - "gutteral", "guttural", - "hacthing", "hatching", - "hafltime", "halftime", - "haircuit", "haircut", - "halfitme", "halftime", - "hallowen", "halloween", - "hamburgr", "hamburgers", - "hamitlon", "hamilton", - "hamliton", "hamilton", - "handcufs", "handcuffs", - "handeldy", "handedly", - "handlade", "handled", - "handlare", "handler", - "handledy", "handedly", - "hannbial", "hannibal", - "haording", "hoarding", - "hapening", "happening", - "happends", "happens", - "happenes", "happens", - "happilly", "happily", - "harldine", "hardline", - "harrased", "harassed", - "harrases", "harasses", - "hatchign", "hatching", - "hatesink", "heatsink", - "hathcing", "hatching", - "headachs", "headaches", - "headests", "headsets", - "headhsot", "headshot", - "headseat", "headset", - "healthit", "healthiest", - "heastink", "heatsink", - "heathern", "heathen", - "heatskin", "heatsink", - "heaviliy", "heavily", - "heavilly", "heavily", - "heavnely", "heavenly", - "hedeghog", "hedgehog", - "hegdehog", "hedgehog", - "heighest", "heights", - "heighted", "heightened", - "heirachy", "hierarchy", - "heistant", "hesitant", - "heistate", "hesitate", - "hellifre", "hellfire", - "helluvva", "helluva", - "helpfull", "helpful", - "heratige", "heritage", - "herclues", "hercules", - "heridity", "heredity", - "heroicas", "heroics", - "heroices", "heroics", - "heroicos", "heroics", - "heroicus", "heroics", - "hertiage", "heritage", - "herucles", "hercules", - "hestiant", "hesitant", - "hestiate", "hesitate", - "heveanly", "heavenly", - "hierachy", "hierarchy", - "hierarcy", "hierarchy", - "highlane", "highlander", - "hindiusm", "hinduism", - "hindusim", "hinduism", - "hinudism", "hinduism", - "hiptsers", "hipsters", - "hispanis", "hispanics", - "hispters", "hipsters", - "histroic", "historic", - "hodlings", "holdings", - "hoenstly", "honestly", - "hoildays", "holidays", - "holdiays", "holidays", - "hollywod", "hollywood", - "homeword", "homeworld", - "homineim", "hominem", - "homineum", "hominem", - "honeslty", "honestly", - "honeymon", "honeymoon", - "honsetly", "honestly", - "hopefuly", "hopefully", - "hopkings", "hopkins", - "hopsital", "hospital", - "horading", "hoarding", - "horzions", "horizons", - "hosptial", "hospital", - "hosteles", "hostels", - "hostiliy", "hostility", - "hotshoot", "hotshot", - "hotsport", "hotspot", - "hsyteria", "hysteria", - "htaching", "hatching", - "htiboxes", "hitboxes", - "huanting", "haunting", - "humaniod", "humanoid", - "humanite", "humanities", - "humantiy", "humanity", - "humerous", "humorous", - "huminoid", "humanoid", - "humitidy", "humidity", - "humoural", "humoral", - "humouros", "humorous", - "humurous", "humorous", - "hunderds", "hundreds", - "hundread", "hundred", - "hungarin", "hungarian", - "huntmsan", "huntsman", - "hutnsman", "huntsman", - "hybrides", "hybrids", - "hybridus", "hybrids", - "hydorgen", "hydrogen", - "hydratin", "hydration", - "hydregon", "hydrogen", - "hygience", "hygiene", - "hygienne", "hygiene", - "hyperbel", "hyperbole", - "hypocrit", "hypocrite", - "hyponsis", "hypnosis", - "hyrdogen", "hydrogen", - "icefrong", "icefrog", - "icelings", "ceilings", - "idaeidae", "idea", - "idealogy", "ideology", - "idealsim", "idealism", - "idenfity", "identify", - "idenitfy", "identify", - "identite", "identities", - "ideologe", "ideologies", - "illiegal", "illegal", - "illinios", "illinois", - "illionis", "illinois", - "illnesss", "illnesses", - "illumini", "illuminati", - "illustre", "illustrate", - "illution", "illusion", - "ilogical", "illogical", - "ilterate", "literate", - "imapired", "impaired", - "imgrants", "migrants", - "imigrant", "emigrant", - "immboile", "immobile", - "immenint", "imminent", - "immersie", "immerse", - "immersve", "immerse", - "immitate", "imitate", - "immoblie", "immobile", - "immortas", "immortals", - "impactes", "impacts", - "impactos", "impacts", - "imparied", "impaired", - "imperavi", "imperative", - "imperfet", "imperfect", - "implemet", "implements", - "implosed", "implode", - "impluses", "impulses", - "imporper", "improper", - "importas", "imports", - "importen", "importance", - "importes", "imports", - "imporved", "improved", - "imporves", "improves", - "impropre", "improper", - "improted", "imported", - "improvie", "improvised", - "impusles", "impulses", - "imrpoved", "improved", - "imrpoves", "improves", - "inbetwen", "inbetween", - "inclince", "incline", - "inclinde", "incline", - "includng", "including", - "incorect", "incorrect", - "incuding", "including", - "inculded", "included", - "indianas", "indians", - "indiands", "indians", - "indiania", "indiana", - "indianna", "indiana", - "indianos", "indians", - "indicato", "indication", - "indicats", "indicators", - "indonesa", "indonesia", - "indulgue", "indulge", - "infantis", "infants", - "infantus", "infants", - "infarred", "infrared", - "infectin", "infections", - "infermon", "inferno", - "infiltre", "infiltrate", - "infintie", "infinite", - "infintiy", "infinity", - "inflatie", "inflate", - "influens", "influences", - "informas", "informs", - "informis", "informs", - "infromal", "informal", - "infromed", "informed", - "ingenius", "ingenious", - "ingition", "ignition", - "ingorant", "ignorant", - "inheriet", "inherit", - "inherint", "inherit", - "inhumaan", "inhuman", - "inhumain", "inhuman", - "inifnite", "infinite", - "inifnity", "infinity", - "inisghts", "insights", - "initails", "initials", - "initaite", "initiate", - "initaled", "initialed", - "initally", "initially", - "initialy", "initially", - "initmacy", "intimacy", - "initmate", "intimate", - "injustie", "injustices", - "inlcuded", "included", - "inlcudes", "includes", - "innocens", "innocents", - "innocuos", "innocuous", - "innvoate", "innovate", - "inocence", "innocence", - "inpolite", "impolite", - "inpsired", "inspired", - "inquirey", "inquiry", - "inquirie", "inquire", - "inquiriy", "inquiry", - "inrested", "inserted", - "insanley", "insanely", - "insectes", "insects", - "insectos", "insects", - "insertas", "inserts", - "insertes", "inserts", - "insertos", "inserts", - "insidios", "insidious", - "insigths", "insights", - "insipred", "inspired", - "insipres", "inspires", - "insistas", "insists", - "insistes", "insists", - "insistis", "insists", - "insmonia", "insomnia", - "insomina", "insomnia", - "insonmia", "insomnia", - "inspried", "inspired", - "inspries", "inspires", - "instanse", "instances", - "instanty", "instantly", - "instered", "inserted", - "insticnt", "instinct", - "instincs", "instincts", - "institue", "institute", - "insultas", "insults", - "insultes", "insults", - "insultos", "insults", - "intamicy", "intimacy", - "intamite", "intimate", - "intendes", "intends", - "intendos", "intends", - "intentas", "intents", - "intented", "intended", - "interace", "interacted", - "interacs", "interacts", - "interect", "interacted", - "interent", "internet", - "interese", "interested", - "interfce", "interface", - "intergal", "integral", - "internts", "interns", - "internus", "interns", - "interpet", "interpret", - "interrim", "interim", - "interste", "interstate", - "interupt", "interrupt", - "intevene", "intervene", - "intially", "initially", - "intiials", "initials", - "intimaty", "intimately", - "intimide", "intimidate", - "intregal", "integral", - "intriuge", "intrigue", - "introdue", "introduces", - "introdus", "introduces", - "introvet", "introvert", - "intruige", "intrigue", - "intutive", "intuitive", - "inudstry", "industry", - "inventer", "inventor", - "invertes", "inverse", - "invincil", "invincible", - "invitato", "invitation", - "invloved", "involved", - "invloves", "involves", - "invovled", "involved", - "invovles", "involves", - "iranains", "iranians", - "iraninas", "iranians", - "iritable", "irritable", - "iritated", "irritated", - "ironicly", "ironically", - "irritato", "irritation", - "isalmist", "islamist", - "isarelis", "israelis", - "islamits", "islamist", - "islamsit", "islamist", - "islandes", "islanders", - "ismalist", "islamist", - "isntalls", "installs", - "isolatie", "isolate", - "israelli", "israeli", - "israleis", "israelis", - "isralies", "israelis", - "isrealis", "israelis", - "issueing", "issuing", - "italains", "italians", - "jaguards", "jaguars", - "jaguares", "jaguars", - "jailbrek", "jailbreak", - "jaimacan", "jamaican", - "jamacain", "jamaican", - "jamaicia", "jamaica", - "jamiacan", "jamaican", - "januaray", "january", - "janurary", "january", - "jeapardy", "jeopardy", - "jefferry", "jeffery", - "jefferty", "jeffery", - "jennigns", "jennings", - "jeoprady", "jeopardy", - "jepoardy", "jeopardy", - "jerusalm", "jerusalem", - "jewelrey", "jewelry", - "jewllery", "jewellery", - "joanthan", "jonathan", - "joepardy", "jeopardy", - "johanine", "johannine", - "jonatahn", "jonathan", - "journaal", "journal", - "journied", "journeyed", - "journies", "journeys", - "joysitck", "joystick", - "juadaism", "judaism", - "judaisim", "judaism", - "judgemet", "judgements", - "juducial", "judicial", - "jugnling", "jungling", - "junglign", "jungling", - "junlging", "jungling", - "justifiy", "justify", - "juveline", "juvenile", - "juvenlie", "juvenile", - "katemine", "ketamine", - "kennedey", "kennedy", - "ketmaine", "ketamine", - "keybaord", "keyboard", - "keyboars", "keyboards", - "keyborad", "keyboard", - "keychian", "keychain", - "kicthens", "kitchens", - "kindgoms", "kingdoms", - "kittiens", "kitties", - "knockbak", "knockback", - "knowlege", "knowledge", - "knuckels", "knuckles", - "koreanos", "koreans", - "kunckles", "knuckles", - "kurdisch", "kurdish", - "labatory", "lavatory", - "labenese", "lebanese", - "laboraty", "laboratory", - "laguages", "languages", - "landscae", "landscapes", - "langauge", "language", - "lanucher", "launcher", - "lanuches", "launches", - "laodouts", "loadouts", - "larwence", "lawrence", - "lasagnea", "lasagna", - "lasagnia", "lasagna", - "laucnhed", "launched", - "laucnher", "launcher", - "laucnhes", "launches", - "laundrey", "laundry", - "lawernce", "lawrence", - "lazyness", "laziness", - "leaglize", "legalize", - "lecteurs", "lectures", - "lecutres", "lectures", - "lefitsts", "leftists", - "leftsits", "leftists", - "legenday", "legendary", - "legionis", "legions", - "legitimt", "legitimate", - "lengthes", "lengths", - "lengthly", "lengthy", - "lentiles", "lentils", - "lentills", "lentils", - "lesbains", "lesbians", - "lesibans", "lesbians", - "levander", "lavender", - "levelign", "leveling", - "levetate", "levitate", - "leviathn", "leviathan", - "levleing", "leveling", - "liberato", "liberation", - "libertae", "liberate", - "libertea", "liberate", - "librarse", "libraries", - "licencie", "licence", - "licencse", "licence", - "liebrals", "liberals", - "liekable", "likeable", - "lifepsan", "lifespan", - "lifestel", "lifesteal", - "lifestye", "lifestyle", - "lighitng", "lighting", - "lightnig", "lightning", - "lightres", "lighters", - "lightrom", "lightroom", - "ligthers", "lighters", - "ligthing", "lighting", - "likebale", "likeable", - "limitant", "militant", - "limitato", "limitation", - "lincolin", "lincoln", - "lincolon", "lincoln", - "lineupes", "lineups", - "lingeire", "lingerie", - "lingiere", "lingerie", - "linnaena", "linnaean", - "lipstics", "lipsticks", - "liquidas", "liquids", - "liquides", "liquids", - "liquidos", "liquids", - "liscense", "license", - "lisenced", "silenced", - "listenes", "listens", - "listents", "listens", - "listners", "listeners", - "litature", "literature", - "litecion", "litecoin", - "liteicon", "litecoin", - "literaly", "literally", - "lithuana", "lithuania", - "litigato", "litigation", - "liverpol", "liverpool", - "locagion", "location", - "logtiech", "logitech", - "longitme", "longtime", - "longtiem", "longtime", - "looseley", "loosely", - "loreplay", "roleplay", - "luanched", "launched", - "luancher", "launcher", - "luanches", "launches", - "lubricat", "lubricant", - "lucifear", "lucifer", - "luckilly", "luckily", - "macarino", "macaroni", - "machiens", "machines", - "mackeral", "mackerel", - "macthups", "matchups", - "magasine", "magazine", - "magazins", "magazines", - "magentic", "magnetic", - "magicain", "magician", - "magisine", "magazine", - "magizine", "magazine", - "magnetis", "magnets", - "magnited", "magnitude", - "magnitue", "magnitude", - "mainfest", "manifest", - "maintian", "maintain", - "majoroty", "majority", - "makrsman", "marksman", - "malariya", "malaria", - "malasiya", "malaysia", - "malasyia", "malaysia", - "malayisa", "malaysia", - "malyasia", "malaysia", - "mamalian", "mammalian", - "manadrin", "mandarin", - "manaully", "manually", - "mandaste", "mandates", - "mandrain", "mandarin", - "mandrian", "mandarin", - "maneveur", "maneuver", - "manevuer", "maneuver", - "manfiest", "manifest", - "mangetic", "magnetic", - "manglade", "mangled", - "manifeso", "manifesto", - "manipule", "manipulate", - "manouver", "maneuver", - "manuales", "manuals", - "manuever", "maneuver", - "maraconi", "macaroni", - "maradeur", "marauder", - "maraduer", "marauder", - "maragret", "margaret", - "marbleds", "marbles", - "margerat", "margaret", - "margines", "margins", - "margings", "margins", - "marginis", "margins", - "marignal", "marginal", - "marilyin", "marilyn", - "marinens", "marines", - "markedet", "marketed", - "markeras", "markers", - "markerts", "markers", - "marniers", "mariners", - "marraige", "marriage", - "marryied", "married", - "marskman", "marksman", - "maruader", "marauder", - "marvelos", "marvelous", - "marxisim", "marxism", - "mascarra", "mascara", - "massacer", "massacre", - "massarce", "massacre", - "massasge", "massages", - "masscare", "massacre", - "masteris", "masteries", - "masturbe", "masturbate", - "materias", "materials", - "mathcups", "matchups", - "mathewes", "mathews", - "matieral", "material", - "matterss", "mattress", - "mauarder", "marauder", - "maximini", "maximizing", - "mayalsia", "malaysia", - "maybelle", "maybelline", - "maylasia", "malaysia", - "mccarhty", "mccarthy", - "mcgergor", "mcgregor", - "mchanics", "mechanics", - "mclarean", "mclaren", - "mcreggor", "mcgregor", - "meagtron", "megatron", - "meancing", "menacing", - "meaninng", "meaning", - "meatbals", "meatballs", - "mecahnic", "mechanic", - "mechanim", "mechanism", - "mechanis", "mechanics", - "medacine", "medicine", - "medatite", "meditate", - "medeival", "medieval", - "medevial", "medieval", - "mediavel", "medieval", - "medicaly", "medically", - "mediciad", "medicaid", - "medicins", "medicines", - "medicore", "mediocre", - "medievel", "medieval", - "mediocer", "mediocre", - "mediocry", "mediocrity", - "mediorce", "mediocre", - "meditato", "meditation", - "mediveal", "medieval", - "medoicre", "mediocre", - "meerkrat", "meerkat", - "megatorn", "megatron", - "meidcare", "medicare", - "meixcans", "mexicans", - "melboure", "melbourne", - "meltodwn", "meltdown", - "memoriez", "memorize", - "mencaing", "menacing", - "menstrul", "menstrual", - "mentiong", "mentioning", - "meoldies", "melodies", - "merchans", "merchants", - "mercurcy", "mercury", - "mercurey", "mercury", - "merficul", "merciful", - "merhcant", "merchant", - "mericful", "merciful", - "messgaed", "messaged", - "messiach", "messiah", - "metagaem", "metagame", - "metahpor", "metaphor", - "metamage", "metagame", - "methapor", "metaphor", - "metldown", "meltdown", - "metricas", "metrics", - "metrices", "metrics", - "metropos", "metropolis", - "mexcians", "mexicans", - "mexicain", "mexican", - "mhytical", "mythical", - "michagan", "michigan", - "michgian", "michigan", - "microtax", "microatx", - "microwae", "microwaves", - "midfeild", "midfield", - "midfiled", "midfield", - "midifeld", "midfield", - "migrains", "migraines", - "migriane", "migraine", - "milennia", "millennia", - "miligram", "milligram", - "miliitas", "militias", - "miliraty", "military", - "militais", "militias", - "millenia", "millennia", - "millenna", "millennia", - "miltiant", "militant", - "minature", "miniature", - "mindcrak", "mindcrack", - "minerial", "mineral", - "mingiame", "minigame", - "minimage", "minigame", - "minimals", "minimalist", - "minimalt", "minimalist", - "minimini", "minimizing", - "minimium", "minimum", - "miniscue", "miniscule", - "minsiter", "minister", - "minsitry", "ministry", - "miraculu", "miraculous", - "miralces", "miracles", - "mircales", "miracles", - "mircoatx", "microatx", - "mirgaine", "migraine", - "mirorred", "mirrored", - "misnadry", "misandry", - "misogynt", "misogynist", - "missigno", "mission", - "missiony", "missionary", - "misslies", "missiles", - "missorui", "missouri", - "misspeld", "misspelled", - "mistakey", "mistakenly", - "mistread", "mistreated", - "mobiltiy", "mobility", - "moderats", "moderates", - "modulair", "modular", - "moleculs", "molecules", - "momentos", "moments", - "momentus", "moments", - "monagomy", "monogamy", - "mongoles", "mongols", - "mongolos", "mongols", - "monitord", "monitored", - "monogmay", "monogamy", - "monolite", "monolithic", - "monologe", "monologue", - "monolopy", "monopoly", - "monoploy", "monopoly", - "monopols", "monopolies", - "monrachy", "monarchy", - "monstros", "monstrous", - "montaban", "montana", - "montains", "mountains", - "montanha", "montana", - "montania", "montana", - "montanna", "montana", - "montanta", "montana", - "montanya", "montana", - "montaran", "montana", - "monteize", "monetize", - "monteral", "montreal", - "montiors", "monitors", - "montnana", "montana", - "montypic", "monotypic", - "monumnet", "monument", - "moonligt", "moonlight", - "moprhine", "morphine", - "morbildy", "morbidly", - "mordibly", "morbidly", - "morevoer", "moreover", - "morhpine", "morphine", - "moribdly", "morbidly", - "mormones", "mormons", - "mormonts", "mormons", - "moroever", "moreover", - "morotola", "motorola", - "morphein", "morphine", - "morriosn", "morrison", - "morrocco", "morocco", - "morrsion", "morrison", - "mortards", "mortars", - "mortarts", "mortars", - "moruning", "mourning", - "mosnters", "monsters", - "mosqueto", "mosquitoes", - "mosquite", "mosquitoes", - "mosqutio", "mosquito", - "motoroal", "motorola", - "mounment", "monument", - "mounring", "mourning", - "mountian", "mountain", - "moustace", "moustache", - "movesped", "movespeed", - "mozillia", "mozilla", - "mozillla", "mozilla", - "msytical", "mystical", - "mucnhies", "munchies", - "mudering", "murdering", - "muffings", "muffins", - "muffinus", "muffins", - "mulitple", "multiple", - "mulitply", "multiply", - "multiplr", "multiplier", - "multipls", "multiples", - "mundance", "mundane", - "mundande", "mundane", - "muniches", "munchies", - "murderes", "murders", - "murderus", "murders", - "muscluar", "muscular", - "muscualr", "muscular", - "musicaly", "musically", - "musuclar", "muscular", - "mutliple", "multiple", - "mutliply", "multiply", - "myhtical", "mythical", - "mysitcal", "mystical", - "mysogyny", "misogyny", - "mysteris", "mysteries", - "mythraic", "mithraic", - "nagivate", "navigate", - "naopleon", "napoleon", - "napcakes", "pancakes", - "naploeon", "napoleon", - "napoelon", "napoleon", - "napolean", "napoleon", - "napoloen", "napoleon", - "narcissm", "narcissism", - "narcisst", "narcissist", - "narcotis", "narcotics", - "narwharl", "narwhal", - "naseuous", "nauseous", - "nashvile", "nashville", - "nasueous", "nauseous", - "natievly", "natively", - "nationas", "nationals", - "nationsl", "nationals", - "nativley", "natively", - "natuilus", "nautilus", - "naturaly", "naturally", - "naturels", "natures", - "naturely", "naturally", - "naturens", "natures", - "naturual", "natural", - "nauesous", "nauseous", - "naughtly", "naughty", - "nauitlus", "nautilus", - "nauseuos", "nauseous", - "nautiuls", "nautilus", - "nautlius", "nautilus", - "nautulis", "nautilus", - "naviagte", "navigate", - "navigato", "navigation", - "nazereth", "nazareth", - "necesary", "necessary", - "neckbead", "neckbeard", - "needlees", "needles", - "nefarios", "nefarious", - "negativy", "negativity", - "neglectn", "neglecting", - "neglible", "negligible", - "neigbour", "neighbour", - "neolitic", "neolithic", - "netboook", "netbook", - "neuronas", "neurons", - "neutraal", "neutral", - "neutralt", "neutrality", - "neutraly", "neutrality", - "newcaste", "newcastle", - "nickanme", "nickname", - "nickmane", "nickname", - "nieghbor", "neighbor", - "nightime", "nighttime", - "nightley", "nightly", - "nightlie", "nightlife", - "nihilsim", "nihilism", - "nilihism", "nihilism", - "nirtogen", "nitrogen", - "nirvanna", "nirvana", - "nitorgen", "nitrogen", - "niusance", "nuisance", - "noctrune", "nocturne", - "noctunre", "nocturne", - "nocturen", "nocturne", - "nominato", "nomination", - "nonsence", "nonsense", - "nonsesne", "nonsense", - "noramlly", "normally", - "norhtern", "northern", - "normalis", "normals", - "normalls", "normals", - "normalos", "normals", - "northeat", "northeast", - "northren", "northern", - "northwet", "northwest", - "norwegin", "norwegian", - "nostalga", "nostalgia", - "nostirls", "nostrils", - "notabley", "notably", - "notablly", "notably", - "noteable", "notable", - "noteably", "notably", - "noticabe", "noticable", - "notorios", "notorious", - "novmeber", "november", - "nromandy", "normandy", - "nuatilus", "nautilus", - "nuculear", "nuclear", - "nuetered", "neutered", - "nuisanse", "nuisance", - "nullifiy", "nullify", - "nurtient", "nutrient", - "nusaince", "nuisance", - "nusiance", "nuisance", - "nutirent", "nutrient", - "nutriens", "nutrients", - "nuturing", "nurturing", - "obdisian", "obsidian", - "obediant", "obedient", - "obession", "obsession", - "obilvion", "oblivion", - "obisdian", "obsidian", - "obsessie", "obsessive", - "obsessin", "obsession", - "obsidain", "obsidian", - "obstacal", "obstacle", - "obvilion", "oblivion", - "ocasions", "occasions", - "ocassion", "occasion", - "occaison", "occasion", - "occupato", "occupation", - "occuring", "occurring", - "octobear", "october", - "octopuns", "octopus", - "ofcoruse", "ofcourse", - "ofcoures", "ofcourse", - "ofcousre", "ofcourse", - "ofcrouse", "ofcourse", - "officals", "officials", - "officaly", "officially", - "offsited", "offside", - "ofocurse", "ofcourse", - "oligarcy", "oligarchy", - "olmypics", "olympics", - "olymipcs", "olympics", - "olypmics", "olympics", - "ommision", "omission", - "ommiting", "omitting", - "ommitted", "omitted", - "ongewild", "gonewild", - "onslaugt", "onslaught", - "operatie", "operative", - "opinoins", "opinions", - "oppinion", "opinion", - "opponant", "opponent", - "opposits", "opposites", - "oppossed", "opposed", - "oppresso", "oppression", - "optimaal", "optimal", - "optomism", "optimism", - "oragnise", "organise", - "orangerd", "orangered", - "orangers", "oranges", - "orangism", "organism", - "orchesta", "orchestra", - "ordianry", "ordinary", - "oreintal", "oriental", - "orgainse", "organise", - "orgainze", "organize", - "organims", "organism", - "organsie", "organise", - "organsim", "organism", - "organzie", "organize", - "orgasmes", "orgasms", - "orgasmos", "orgasms", - "orgasmus", "orgasms", - "orginize", "organise", - "orhtodox", "orthodox", - "oridnary", "ordinary", - "originas", "origins", - "origines", "origins", - "originsl", "originals", - "orphanes", "orphans", - "osbidian", "obsidian", - "othrodox", "orthodox", - "ourselvs", "ourselves", - "oustider", "outsider", - "outfeild", "outfield", - "outfidel", "outfield", - "outfiled", "outfield", - "outisder", "outsider", - "outplayd", "outplayed", - "outputed", "outputted", - "outsoure", "outsourced", - "overboad", "overboard", - "overclok", "overclock", - "overdrev", "overdrive", - "overhual", "overhaul", - "overlaod", "overload", - "overpiad", "overpaid", - "overules", "overuse", - "overwath", "overwatch", - "overwhem", "overwhelm", - "oximoron", "oxymoron", - "oylmpics", "olympics", - "pacakged", "packaged", - "packadge", "packaged", - "paficist", "pacifist", - "painfuly", "painfully", - "paitence", "patience", - "paitents", "patients", - "palidans", "paladins", - "palstics", "plastics", - "paltform", "platform", - "paltinum", "platinum", - "palyable", "playable", - "palyoffs", "playoffs", - "pancaeks", "pancakes", - "panckaes", "pancakes", - "pandoria", "pandora", - "pandorra", "pandora", - "panedmic", "pandemic", - "panethon", "pantheon", - "pankaces", "pancakes", - "panmedic", "pandemic", - "pantehon", "pantheon", - "panthoen", "pantheon", - "paradies", "paradise", - "paradyse", "parades", - "paragrah", "paragraph", - "paraiste", "parasite", - "paralell", "parallel", - "paralely", "parallelly", - "paralles", "parallels", - "parameds", "paramedics", - "paramter", "parameter", - "paranioa", "paranoia", - "paraniod", "paranoid", - "paraside", "paradise", - "parasits", "parasites", - "parastie", "parasite", - "parctise", "practise", - "paremsan", "parmesan", - "paristan", "partisan", - "parmasen", "parmesan", - "parmenas", "parmesan", - "parmsean", "parmesan", - "parnters", "partners", - "parralel", "parallel", - "parterns", "partners", - "partialy", "partially", - "partians", "partisan", - "partical", "particular", - "particel", "particle", - "partiets", "parties", - "partiots", "patriots", - "partnerd", "partnered", - "partsian", "partisan", - "passabel", "passable", - "passione", "passionate", - "passisve", "passives", - "passpost", "passports", - "passvies", "passives", - "passwors", "passwords", - "pasttime", "pastime", - "pastural", "pastoral", - "pateince", "patience", - "pateints", "patients", - "patethic", "pathetic", - "patheitc", "pathetic", - "patienty", "patiently", - "patirots", "patriots", - "patriarh", "patriarchy", - "patroits", "patriots", - "patrolls", "patrols", - "patronas", "patrons", - "patrones", "patrons", - "patronis", "patrons", - "patronos", "patrons", - "pattened", "patented", - "patterno", "patterson", - "pattersn", "patterson", - "pblisher", "publisher", - "peageant", "pageant", - "pebbleos", "pebbles", - "pebblers", "pebbles", - "pebblets", "pebbles", - "peciluar", "peculiar", - "pecuilar", "peculiar", - "peculair", "peculiar", - "peculure", "peculiar", - "peformed", "performed", - "peircing", "piercing", - "penaltis", "penalties", - "penatgon", "pentagon", - "penciles", "pencils", - "pendatic", "pedantic", - "pengiuns", "penguins", - "penisula", "peninsula", - "pensioen", "pension", - "pepperin", "pepperoni", - "perceded", "preceded", - "percente", "percentile", - "percieve", "perceive", - "percious", "precious", - "perclude", "preclude", - "perfecty", "perfectly", - "perfroms", "performs", - "perheaps", "perhaps", - "pericing", "piercing", - "peridoic", "periodic", - "perimetr", "perimeter", - "periodes", "periods", - "periodos", "periods", - "permanet", "permanent", - "permiere", "premiere", - "permises", "premises", - "permitas", "permits", - "permites", "permits", - "permitis", "permits", - "permitts", "permits", - "permiums", "premiums", - "peroidic", "periodic", - "perosnas", "personas", - "perpetue", "perpetuate", - "persaude", "persuade", - "perserve", "preserve", - "persisit", "persist", - "personel", "personnel", - "persones", "persons", - "personis", "persons", - "personsa", "personas", - "perstige", "prestige", - "persuaso", "persuasion", - "persuded", "persuaded", - "persuing", "pursuing", - "persuits", "pursuits", - "persumed", "presumed", - "pertaing", "pertaining", - "pertians", "pertains", - "pertinet", "pertinent", - "pervents", "prevents", - "perverst", "pervert", - "perviews", "previews", - "pervious", "previous", - "perxoide", "peroxide", - "pessiary", "pessary", - "petetion", "petition", - "petrolem", "petroleum", - "phantoom", "phantom", - "pharamcy", "pharmacy", - "pharmacs", "pharmacist", - "pharmsci", "pharmacist", - "phenomon", "phenomenon", - "phramacy", "pharmacy", - "phsyical", "physical", - "phsyique", "physique", - "phyiscal", "physical", - "phyisque", "physique", - "physcial", "physical", - "physicis", "physicians", - "physicks", "physics", - "physicts", "physicist", - "physqiue", "physique", - "picthers", "pitchers", - "pillards", "pillars", - "pillaris", "pillars", - "pinancle", "pinnacle", - "pinapple", "pineapple", - "pinnalce", "pinnacle", - "pinnaple", "pineapple", - "pinncale", "pinnacle", - "pinpiont", "pinpoint", - "pinteret", "pinterest", - "piolting", "piloting", - "pioneeer", "pioneer", - "pithcers", "pitchers", - "placebro", "placebo", - "placemet", "placements", - "planetas", "planets", - "planetos", "planets", - "plantiff", "plaintiff", - "plantium", "platinum", - "plasitcs", "plastics", - "platfrom", "platform", - "platimun", "platinum", - "platnium", "platinum", - "platnuim", "platinum", - "plausibe", "plausible", - "playbody", "playboy", - "playstye", "playstyle", - "pleasent", "pleasant", - "plehtora", "plethora", - "pleothra", "plethora", - "plethroa", "plethora", - "ploygamy", "polygamy", - "pnatheon", "pantheon", - "poeoples", "peoples", - "poingant", "poignant", - "pointeur", "pointer", - "pointure", "pointer", - "poisones", "poisons", - "poisonis", "poisons", - "poisonos", "poisons", - "poisonus", "poisons", - "polgyamy", "polygamy", - "polietly", "politely", - "politing", "piloting", - "politley", "politely", - "poltical", "political", - "poluting", "polluting", - "polution", "pollution", - "polygoon", "polygon", - "polymore", "polymer", - "pomotion", "promotion", - "popoulus", "populous", - "populair", "popular", - "populare", "popular", - "populary", "popularity", - "porcelan", "porcelain", - "porposes", "proposes", - "portabel", "portable", - "portalis", "portals", - "portalus", "portals", - "portayed", "portrayed", - "portgual", "portugal", - "portrais", "portraits", - "portrary", "portray", - "portrayl", "portrayal", - "portriat", "portrait", - "posessed", "possessed", - "posesses", "possesses", - "posioned", "poisoned", - "positivs", "positives", - "positivy", "positivity", - "possable", "possible", - "possably", "possibly", - "possbily", "possibly", - "posseses", "possesses", - "possesse", "possessive", - "possesss", "possesses", - "potrayed", "portrayed", - "poverful", "powerful", - "powerded", "powdered", - "powerpot", "powerpoint", - "pracitse", "practise", - "practial", "practical", - "practies", "practise", - "pratcise", "practise", - "praticle", "particle", - "prceeded", "preceded", - "preadtor", "predator", - "preample", "preamble", - "preceeds", "precedes", - "precisie", "precise", - "precisly", "precisely", - "precisou", "precious", - "preculde", "preclude", - "predicat", "predict", - "predicte", "predictive", - "preferas", "prefers", - "prefered", "preferred", - "preferes", "prefers", - "preferis", "prefers", - "preferrs", "prefers", - "preimere", "premiere", - "preimums", "premiums", - "preiodic", "periodic", - "preivews", "previews", - "prejudis", "prejudices", - "prelayed", "replayed", - "premeire", "premiere", - "premesis", "premises", - "premiare", "premier", - "premines", "premise", - "premuims", "premiums", - "preorded", "preordered", - "preordes", "preorders", - "preoxide", "peroxide", - "prepaird", "prepaid", - "preqeuls", "prequels", - "prequles", "prequels", - "prescrie", "prescribed", - "presense", "presence", - "presenst", "presets", - "presidet", "presidents", - "presists", "persists", - "presitge", "prestige", - "presonas", "personas", - "presuade", "persuade", - "pretador", "predator", - "pretains", "pertains", - "preveiws", "previews", - "preverse", "perverse", - "previwes", "previews", - "pricipal", "principal", - "priciple", "principle", - "priemere", "premiere", - "priestes", "priests", - "primaris", "primaries", - "primarly", "primarily", - "princila", "principals", - "principl", "principals", - "prisitne", "pristine", - "probelms", "problems", - "probleem", "problem", - "procalim", "proclaim", - "proccess", "process", - "proceded", "proceeded", - "proceder", "procedure", - "procedes", "proceeds", - "procedue", "procedure", - "proceeed", "proceed", - "procesed", "proceeds", - "processs", "processes", - "proclami", "proclaim", - "procliam", "proclaim", - "procotol", "protocol", - "prodcuts", "products", - "producto", "production", - "profesor", "professor", - "proficit", "proficient", - "profilic", "prolific", - "progroms", "pogroms", - "prohibis", "prohibits", - "prohpecy", "prophecy", - "prohpets", "prophets", - "projecte", "projectile", - "projecto", "projection", - "prolouge", "prologue", - "promplty", "promptly", - "promptes", "prompts", - "promptus", "prompts", - "promtply", "promptly", - "pronoune", "pronounced", - "propechy", "prophecy", - "propehcy", "prophecy", - "propehts", "prophets", - "prophacy", "prophecy", - "propmted", "prompted", - "propmtly", "promptly", - "proponet", "proponents", - "proposse", "proposes", - "proposte", "propose", - "proprety", "property", - "propsect", "prospect", - "prosepct", "prospect", - "prostite", "prostitute", - "protable", "portable", - "protecte", "protective", - "protiens", "proteins", - "protines", "proteins", - "protocal", "protocol", - "prototye", "prototype", - "protrait", "portrait", - "protrays", "portrays", - "protugal", "portugal", - "proverai", "proverbial", - "providee", "providence", - "proximty", "proximity", - "pruchase", "purchase", - "pryamids", "pyramids", - "ptichers", "pitchers", - "pubisher", "publisher", - "publiser", "publisher", - "puinsher", "punisher", - "pulisher", "publisher", - "pumkpins", "pumpkins", - "pumpinks", "pumpkins", - "pumpknis", "pumpkins", - "punshier", "punisher", - "punsiher", "punisher", - "punsihes", "punishes", - "purcahse", "purchase", - "pyramind", "pyramid", - "pyrimads", "pyramids", - "pyrmaids", "pyramids", - "qauntity", "quantity", - "qualifiy", "qualify", - "quanitfy", "quantify", - "quantaty", "quantity", - "quantite", "quantities", - "quantuum", "quantum", - "quarante", "quarantine", - "quartery", "quarterly", - "qucikest", "quickest", - "queation", "equation", - "quention", "quentin", - "quickets", "quickest", - "quicklyu", "quickly", - "rabbitos", "rabbits", - "rabbitts", "rabbits", - "racistas", "racists", - "racistes", "racists", - "radaince", "radiance", - "rahpsody", "rhapsody", - "raidance", "radiance", - "railraod", "railroad", - "randomes", "randoms", - "randomez", "randomized", - "randomns", "randoms", - "randomrs", "randoms", - "randomus", "randoms", - "raosting", "roasting", - "raphsody", "rhapsody", - "raptores", "raptors", - "raspbery", "raspberry", - "rationel", "rationale", - "realible", "reliable", - "realibly", "reliably", - "realiest", "earliest", - "realisim", "realism", - "realisme", "realise", - "realistc", "realistic", - "realiste", "realise", - "realoded", "reloaded", - "realsied", "realised", - "realtion", "relation", - "realtive", "relative", - "reamined", "remained", - "reapired", "repaired", - "reaplugs", "earplugs", - "reaserch", "research", - "reasonal", "reasonably", - "reatiler", "retailer", - "reaveled", "revealed", - "rebellis", "rebellious", - "reboudns", "rebounds", - "rebounce", "rebound", - "rebuildt", "rebuilt", - "rebuplic", "republic", - "receeded", "receded", - "recepits", "receipts", - "receptie", "receptive", - "receptos", "receptors", - "receving", "receiving", - "recident", "resident", - "reciding", "residing", - "recieved", "received", - "reciever", "receiver", - "recieves", "receives", - "recipees", "recipes", - "recipets", "recipes", - "recogise", "recognise", - "recogize", "recognize", - "recognie", "recognizes", - "recomend", "recommend", - "recommed", "recommend", - "reconnet", "reconnect", - "rectange", "rectangle", - "rectifiy", "rectify", - "recuring", "recurring", - "recurits", "recruits", - "redeisgn", "redesign", - "redemeed", "redeemed", - "redesgin", "redesign", - "redesing", "redesign", - "reedemed", "redeemed", - "refeeres", "referees", - "refelcts", "reflects", - "refelxes", "reflexes", - "referede", "referee", - "referene", "referee", - "referens", "references", - "referere", "referee", - "referign", "refering", - "refering", "referring", - "refernce", "references", - "reffered", "referred", - "refilles", "refills", - "refillls", "refills", - "reflecte", "reflective", - "reflecto", "reflection", - "reformes", "reforms", - "refreing", "refering", - "refrence", "reference", - "refreshd", "refreshed", - "refreshr", "refresher", - "refromed", "reformed", - "regardes", "regards", - "regenade", "renegade", - "regenere", "regenerate", - "regiones", "regions", - "regisrty", "registry", - "registed", "registered", - "regresas", "regress", - "regreses", "regress", - "regresos", "regress", - "regresse", "regressive", - "regresso", "regression", - "regrests", "regress", - "regretts", "regrets", - "regsitry", "registry", - "regualrs", "regulars", - "regualte", "regulate", - "reguarly", "regularly", - "regulary", "regularly", - "regulatr", "regulator", - "regulats", "regulators", - "rehersal", "rehearsal", - "rehtoric", "rhetoric", - "reiceved", "recieved", - "reigment", "regiment", - "reigonal", "regional", - "rekenton", "renekton", - "relaible", "reliable", - "relaibly", "reliably", - "relaised", "realised", - "relaoded", "reloaded", - "relasped", "relapsed", - "relatabe", "relatable", - "relateds", "relates", - "relativy", "relativity", - "relavent", "relevant", - "relected", "reelected", - "relegato", "relegation", - "releived", "relieved", - "releiver", "reliever", - "relevent", "relevant", - "relfects", "reflects", - "relfexes", "reflexes", - "reliased", "realised", - "religous", "religious", - "relpased", "relapsed", - "remainds", "remains", - "remainig", "remaining", - "remannts", "remnants", - "remarkes", "remarks", - "remembed", "remembered", - "remembee", "remembered", - "rememebr", "remember", - "remenant", "remnant", - "reminent", "remnant", - "remmeber", "remember", - "remotley", "remotely", - "renderes", "renders", - "reneagde", "renegade", - "renetkon", "renekton", - "renewabe", "renewables", - "renketon", "renekton", - "renmants", "remnants", - "renoylds", "reynolds", - "renteris", "renters", - "renyolds", "reynolds", - "reowrked", "reworked", - "repaires", "repairs", - "repalces", "replaces", - "reparied", "repaired", - "repblics", "republics", - "repbulic", "republic", - "repeatae", "repeatable", - "repeates", "repeats", - "repetion", "repetition", - "repharse", "rephrase", - "repitles", "reptiles", - "replased", "relapsed", - "replayes", "replays", - "replicae", "replicated", - "replubic", "republic", - "reportes", "reporters", - "reposity", "repository", - "repostas", "reposts", - "repostes", "reposts", - "repostig", "reposting", - "repostus", "reposts", - "represet", "represents", - "represso", "repression", - "reprhase", "rephrase", - "repsects", "respects", - "repsonds", "responds", - "repsonse", "response", - "repsoted", "reposted", - "repubics", "republics", - "republis", "republics", - "repulics", "republics", - "repulsie", "repulsive", - "requiers", "requires", - "requieum", "requiem", - "requilme", "requiem", - "requried", "required", - "requries", "requires", - "rescuecd", "rescued", - "researce", "researcher", - "resembes", "resembles", - "reserach", "research", - "resevoir", "reservoir", - "resgined", "resigned", - "residude", "residue", - "residule", "residue", - "resinged", "resigned", - "resistas", "resists", - "resisten", "resistance", - "resistes", "resists", - "resloved", "resolved", - "resloves", "resolves", - "resmeble", "resemble", - "resotred", "restored", - "resourse", "resources", - "resovled", "resolved", - "resovles", "resolves", - "respecte", "respective", - "respesct", "respects", - "responce", "response", - "responed", "respond", - "respones", "response", - "responsd", "responds", - "respoted", "reposted", - "restanti", "restarting", - "restrait", "restraint", - "restrics", "restricts", - "resuable", "reusable", - "retailes", "retailers", - "retalier", "retailer", - "rethoric", "rhetoric", - "retirase", "retires", - "retireds", "retires", - "retireus", "retires", - "retireve", "retrieve", - "retreive", "retrieve", - "retrived", "retrieved", - "retunred", "returned", - "reuasble", "reusable", - "reveales", "reveals", - "reveiwed", "reviewed", - "reveiwer", "reviewer", - "revelaed", "revealed", - "revelant", "relevant", - "revelead", "revealed", - "reverals", "reversal", - "reviewes", "reviewers", - "revlover", "revolver", - "revloves", "revolves", - "revovler", "revolver", - "revovles", "revolves", - "rewatchd", "rewatched", - "rewitten", "rewritten", - "rewritte", "rewrite", - "rewtched", "wretched", - "reynlods", "reynolds", - "reyonlds", "reynolds", - "rhaposdy", "rhapsody", - "rhaspody", "rhapsody", - "rheotric", "rhetoric", - "righteos", "righteous", - "rigntone", "ringtone", - "ringotne", "ringtone", - "ritalian", "ritalin", - "rivalrly", "rivalry", - "roachers", "roaches", - "robberts", "robbers", - "robberys", "robbers", - "robocoop", "robocop", - "robocorp", "robocop", - "robocoup", "robocop", - "roelplay", "roleplay", - "roganism", "organism", - "rolepaly", "roleplay", - "romaanin", "romanian", - "romainan", "romanian", - "romanain", "romanian", - "romanica", "romania", - "rosettta", "rosetta", - "rostaing", "roasting", - "routeros", "routers", - "rutgerus", "rutgers", - "ryenolds", "reynolds", - "sacrifie", "sacrifice", - "saddends", "saddens", - "saddenes", "saddens", - "sadisitc", "sadistic", - "salaires", "salaries", - "sandales", "sandals", - "sandalls", "sandals", - "sandstom", "sandstorm", - "sanotrum", "santorum", - "santourm", "santorum", - "santroum", "santorum", - "santurom", "santorum", - "sapcebar", "spacebar", - "sapphrie", "sapphire", - "sarcasam", "sarcasm", - "sarcasim", "sarcasm", - "sarcastc", "sarcastic", - "sargeant", "sergeant", - "sasauges", "sausages", - "sasuages", "sausages", - "satelite", "satellite", - "satellie", "satellites", - "saterday", "saturday", - "satifies", "satisfies", - "satisfiy", "satisfy", - "satrical", "satirical", - "satruday", "saturday", - "saturdsy", "saturdays", - "sawstika", "swastika", - "scandlas", "scandals", - "scannign", "scanning", - "scarmble", "scramble", - "scepture", "scepter", - "schedual", "schedule", - "schoalrs", "scholars", - "scholary", "scholarly", - "schoodle", "schooled", - "scientic", "scientific", - "scientis", "scientist", - "scoprion", "scorpion", - "scorates", "socrates", - "scoripon", "scorpion", - "scorpoin", "scorpion", - "scostman", "scotsman", - "scratchs", "scratches", - "scriptue", "scriptures", - "scriptus", "scripts", - "scritped", "scripted", - "scroates", "socrates", - "scropion", "scorpion", - "scrpited", "scripted", - "scruitny", "scrutiny", - "scrunity", "scrutiny", - "sctosman", "scotsman", - "sculpter", "sculpture", - "scurtiny", "scrutiny", - "seahakws", "seahawks", - "seahwaks", "seahawks", - "seantors", "senators", - "sebastin", "sebastian", - "seceeded", "succeeded", - "secertly", "secretly", - "secrelty", "secretly", - "secretas", "secrets", - "secretos", "secrets", - "secruity", "security", - "secuirty", "security", - "sedereal", "sidereal", - "seldomly", "seldom", - "selectie", "selective", - "selfiers", "selfies", - "semestre", "semester", - "semseter", "semester", - "senarios", "scenarios", - "senerity", "serenity", - "seniores", "seniors", - "senisble", "sensible", - "sensibel", "sensible", - "sensores", "sensors", - "senstive", "sensitive", - "sentaors", "senators", - "sentiers", "sentries", - "sentinet", "sentient", - "sentinte", "sentient", - "sentires", "sentries", - "sentreis", "sentries", - "separato", "separation", - "separete", "seperate", - "sepearte", "seperate", - "seperate", "separate", - "seplling", "spelling", - "sepreate", "seperate", - "sepulcre", "sepulchre", - "serached", "searched", - "seraches", "searches", - "serentiy", "serenity", - "sergaent", "sergeant", - "settigns", "settings", - "settting", "setting", - "seventen", "seventeen", - "severeal", "several", - "severeid", "severed", - "severide", "severed", - "severley", "severely", - "sexaully", "sexually", - "seziures", "seizures", - "sezuires", "seizures", - "shadoloo", "shadaloo", - "shangahi", "shanghai", - "shanghia", "shanghai", - "sharplay", "sharply", - "sharpley", "sharply", - "shawshak", "shawshank", - "shcolars", "scholars", - "shcooled", "schooled", - "sheilded", "shielded", - "shelterd", "sheltered", - "shelvers", "shelves", - "shelveys", "shelves", - "sherlcok", "sherlock", - "shetlers", "shelters", - "shfiting", "shifting", - "shifitng", "shifting", - "shifteer", "shifter", - "shileded", "shielded", - "shineing", "shining", - "shitstom", "shitstorm", - "shittoon", "shitton", - "shittown", "shitton", - "shleters", "shelters", - "shnaghai", "shanghai", - "shortend", "shortened", - "shotuout", "shoutout", - "shoudlnt", "shouldnt", - "shouldes", "shoulders", - "shoulndt", "shouldnt", - "shrapenl", "shrapnel", - "shrelock", "sherlock", - "shrinked", "shrunk", - "shrpanel", "shrapnel", - "shtiless", "shitless", - "shuoldnt", "shouldnt", - "sideboad", "sideboard", - "sidleine", "sideline", - "siezable", "sizeable", - "siezures", "seizures", - "signatue", "signatures", - "signfies", "signifies", - "signifiy", "signify", - "signigns", "signings", - "signular", "singular", - "silbings", "siblings", - "silicoln", "silicon", - "silicoon", "silicon", - "silimiar", "similiar", - "simialir", "similiar", - "simiilar", "similiar", - "similair", "similar", - "similari", "similiar", - "similart", "similarity", - "similary", "similarly", - "similiar", "similar", - "simliiar", "similiar", - "simluate", "simulate", - "simmilar", "similar", - "simpelst", "simplest", - "simplets", "simplest", - "simplicy", "simplicity", - "simplier", "simpler", - "simulato", "simulation", - "singlers", "singles", - "singluar", "singular", - "sinistre", "sinister", - "sinsiter", "sinister", - "sitckers", "stickers", - "sitrring", "stirring", - "sizebale", "sizeable", - "skateing", "skating", - "skecthes", "sketches", - "skelatel", "skeletal", - "skeletos", "skeletons", - "sketchey", "sketchy", - "sketpics", "skeptics", - "skillsto", "skillshots", - "skimrish", "skirmish", - "skpetics", "skeptics", - "skrimish", "skirmish", - "skteches", "sketches", - "skywalkr", "skywalker", - "slaptoon", "splatoon", - "slaverly", "slavery", - "slienced", "silenced", - "sliently", "silently", - "slighlty", "slightly", - "sligthly", "slightly", - "smartare", "smarter", - "snetries", "sentries", - "snippent", "snippet", - "snippert", "snippet", - "snowbals", "snowballs", - "snugglie", "snuggle", - "snydrome", "syndrome", - "snyopsis", "synopsis", - "soberity", "sobriety", - "sobreity", "sobriety", - "socailly", "socially", - "socalism", "socialism", - "socartes", "socrates", - "socialim", "socialism", - "socities", "societies", - "socttish", "scottish", - "soemthin", "somethin", - "soilders", "soldiers", - "solatary", "solitary", - "soldeirs", "soldiers", - "soliders", "soldiers", - "soluable", "soluble", - "solutide", "solitude", - "somalija", "somalia", - "somehtin", "somethin", - "someoens", "someones", - "somethis", "somethings", - "sometihn", "somethin", - "sometinh", "somethin", - "somoenes", "someones", - "somtimes", "sometimes", - "somwhere", "somewhere", - "soparnos", "sopranos", - "sophmore", "sophomore", - "sorcercy", "sorcery", - "sorcerey", "sorcery", - "sorceror", "sorcerer", - "sorcerry", "sorcery", - "sorpanos", "sopranos", - "southren", "southern", - "soverein", "sovereign", - "soverign", "sovereign", - "sovietes", "soviets", - "spagheti", "spaghetti", - "spainish", "spanish", - "spaltoon", "splatoon", - "spammade", "spammed", - "spammare", "spammer", - "spammear", "spammer", - "spammend", "spammed", - "spammeur", "spammer", - "spanisch", "spanish", - "sparklie", "sparkle", - "spawnign", "spawning", - "specemin", "specimen", - "speciaal", "special", - "specialt", "specialist", - "specialy", "specially", - "specialz", "specialize", - "specifed", "specified", - "specifiy", "specify", - "speciman", "specimen", - "specrtal", "spectral", - "speicals", "specials", - "spellign", "spelling", - "spendour", "splendour", - "sphereos", "spheres", - "spilnter", "splinter", - "spiltter", "splitter", - "spindrel", "spindle", - "spirites", "spirits", - "spiritis", "spirits", - "spiritus", "spirits", - "spirtied", "spirited", - "spleling", "spelling", - "splitner", "splinter", - "spoilerd", "spoiled", - "spoliers", "spoilers", - "sponsord", "sponsored", - "sporanos", "sopranos", - "spotifiy", "spotify", - "spotifty", "spotify", - "sppeches", "speeches", - "sprayade", "sprayed", - "spreaded", "spread", - "springst", "sprints", - "sprinkel", "sprinkle", - "sprintas", "sprints", - "spritual", "spiritual", - "sproutes", "sprouts", - "spwaning", "spawning", - "sqaudron", "squadron", - "sqaurely", "squarely", - "sqiurtle", "squirtle", - "squardon", "squadron", - "squareds", "squares", - "squarley", "squarely", - "squeakey", "squeaky", - "squeakly", "squeaky", - "squirlte", "squirtle", - "squirrle", "squirrel", - "squirtel", "squirtle", - "squishey", "squishy", - "squishly", "squishy", - "squritle", "squirtle", - "squrriel", "squirrel", - "squrtile", "squirtle", - "sriarcha", "sriracha", - "srriacha", "sriracha", - "sryacuse", "syracuse", - "staduims", "stadiums", - "staidums", "stadiums", - "staklers", "stalkers", - "stalekrs", "stalkers", - "stalkear", "stalker", - "staminia", "stamina", - "stampade", "stamped", - "stampeed", "stamped", - "stancels", "stances", - "stancers", "stances", - "standars", "standards", - "standbay", "standby", - "standbuy", "standby", - "stangant", "stagnant", - "staright", "straight", - "starined", "strained", - "starlted", "startled", - "startegy", "strategy", - "starteld", "startled", - "startsup", "startups", - "stateman", "statesman", - "staticts", "statist", - "stationd", "stationed", - "stationy", "stationary", - "statiskt", "statist", - "statistc", "statistic", - "statment", "statement", - "stattues", "statutes", - "statuets", "statutes", - "statuser", "stature", - "staurday", "saturday", - "steadliy", "steadily", - "stealhty", "stealthy", - "steathly", "stealthy", - "stelathy", "stealthy", - "sterilze", "sterile", - "steriods", "steroids", - "stichted", "stitched", - "sticthed", "stitched", - "sticthes", "stitches", - "stimulai", "stimuli", - "stimulas", "stimulants", - "stimulat", "stimulants", - "stimulli", "stimuli", - "stingent", "stringent", - "stirkers", "strikers", - "stlakers", "stalkers", - "stomache", "stomach", - "stormade", "stormed", - "stormend", "stormed", - "stradegy", "strategy", - "stragety", "strategy", - "straignt", "straighten", - "straigth", "straight", - "straings", "strains", - "strangel", "strangle", - "stranget", "strangest", - "stratgey", "strategy", - "stratled", "startled", - "streames", "streams", - "streamos", "streams", - "streamus", "streams", - "streamys", "streams", - "stregnth", "strength", - "stremear", "streamer", - "strenght", "strength", - "strengts", "strengths", - "strenous", "strenuous", - "strentgh", "strength", - "stretchs", "stretches", - "striaght", "straight", - "striclty", "strictly", - "striekrs", "strikers", - "strikely", "strikingly", - "stringet", "stringent", - "stubbron", "stubborn", - "stubmled", "stumbled", - "stucture", "structure", - "studioes", "studios", - "stuipder", "stupider", - "stumbeld", "stumbled", - "stupdily", "stupidly", - "stupidiy", "stupidity", - "stylisch", "stylish", - "styrofom", "styrofoam", - "suasages", "sausages", - "subltety", "subtlety", - "submarie", "submarines", - "subruban", "suburban", - "subscrie", "subscriber", - "subsidie", "subsidized", - "subsidiy", "subsidy", - "substace", "substance", - "substans", "substances", - "substite", "substitute", - "subtelty", "subtlety", - "subtetly", "subtlety", - "subtilte", "subtitle", - "subtitel", "subtitle", - "subtitls", "subtitles", - "subtltey", "subtlety", - "succeded", "succeeded", - "succedes", "succeeds", - "succeeed", "succeed", - "succesed", "succeeds", - "successs", "successes", - "succsess", "success", - "suceeded", "succeeded", - "sucesful", "successful", - "sucesion", "succession", - "sucesses", "successes", - "sucessor", "successor", - "sucessot", "successor", - "sucidial", "suicidal", - "suddnely", "suddenly", - "sufficit", "sufficient", - "suggesst", "suggests", - "suggeste", "suggestive", - "summenor", "summoner", - "summones", "summoners", - "sunfiber", "sunfire", - "sunscren", "sunscreen", - "superham", "superhuman", - "superheo", "superhero", - "superios", "superiors", - "supirsed", "suprised", - "suposing", "supposing", - "supporre", "supporters", - "suppoted", "supported", - "suprised", "surprised", - "suprized", "surprised", - "suprsied", "suprised", - "supsects", "suspects", - "supsense", "suspense", - "surbuban", "suburban", - "surounds", "surrounds", - "surpases", "surpass", - "surpress", "suppress", - "surprize", "surprise", - "surrouns", "surrounds", - "surveill", "surveil", - "surveyer", "surveyor", - "surviver", "survivor", - "suspened", "suspend", - "suspenso", "suspension", - "swaering", "swearing", - "swansoon", "swanson", - "swasitka", "swastika", - "swaskita", "swastika", - "swatiska", "swastika", - "swatsika", "swastika", - "swedisch", "swedish", - "swiftley", "swiftly", - "swithced", "switched", - "swithces", "switches", - "swtiched", "switched", - "swtiches", "switches", - "syarcuse", "syracuse", - "sydnrome", "syndrome", - "sylablle", "syllable", - "syllabel", "syllable", - "symapthy", "sympathy", - "symboles", "symbols", - "symhpony", "symphony", - "symmerty", "symmetry", - "symmtery", "symmetry", - "symoblic", "symbolic", - "symphaty", "sympathy", - "symptoom", "symptom", - "symtpoms", "symptoms", - "synomyns", "synonyms", - "synonmys", "synonyms", - "synonomy", "synonym", - "synoynms", "synonyms", - "synphony", "symphony", - "synposis", "synopsis", - "sypmathy", "sympathy", - "sypmtoms", "symptoms", - "sypnosis", "synopsis", - "syraucse", "syracuse", - "syrcause", "syracuse", - "syringae", "syringe", - "syringue", "syringe", - "sysamdin", "sysadmin", - "sysdamin", "sysadmin", - "tacticas", "tactics", - "tacticts", "tactics", - "tacticus", "tactics", - "tagliate", "tailgate", - "tahnkyou", "thankyou", - "tailsman", "talisman", - "taiwanee", "taiwanese", - "taligate", "tailgate", - "taliored", "tailored", - "tallents", "tallest", - "talsiman", "talisman", - "tanturms", "tantrums", - "tapitude", "aptitude", - "tasliman", "talisman", - "tattooes", "tattoos", - "tattooos", "tattoos", - "taxanomy", "taxonomy", - "teamfigt", "teamfight", - "teamspek", "teamspeak", - "teancity", "tenacity", - "teapsoon", "teaspoon", - "techniqe", "technique", - "teenages", "teenagers", - "telegrah", "telegraph", - "telphony", "telephony", - "tempalrs", "templars", - "tempalte", "template", - "templats", "templates", - "templeos", "temples", - "templers", "temples", - "temporay", "temporary", - "temprary", "temporary", - "tenacles", "tentacles", - "tenactiy", "tenacity", - "tencaity", "tenacity", - "tendancy", "tendency", - "tendence", "tendencies", - "tentacel", "tentacle", - "tentacls", "tentacles", - "tentalce", "tentacle", - "tequilia", "tequila", - "terriory", "territory", - "territoy", "territory", - "terroist", "terrorist", - "tesitcle", "testicle", - "testicel", "testicle", - "testifiy", "testify", - "teusdays", "tuesdays", - "texutres", "textures", - "thaliand", "thailand", - "theather", "theater", - "theathre", "theater", - "theature", "theater", - "theisitc", "theistic", - "themslef", "themself", - "theorits", "theorist", - "theraphy", "therapy", - "thereian", "therein", - "theroies", "theories", - "theroist", "theorist", - "thesitic", "theistic", - "thialand", "thailand", - "thiestic", "theistic", - "thikning", "thinking", - "thirites", "thirties", - "thirstay", "thirsty", - "thnakyou", "thankyou", - "thoeries", "theories", - "thoerist", "theorist", - "thomspon", "thompson", - "thopmson", "thompson", - "thougths", "thoughts", - "thourogh", "thorough", - "threates", "threatens", - "threefor", "therefor", - "thriteen", "thirteen", - "thrities", "thirties", - "throaths", "throats", - "throners", "thrones", - "throough", "thorough", - "throught", "thought", - "thrusday", "thursday", - "thumbnal", "thumbnails", - "thurdsay", "thursday", - "thursdsy", "thursdays", - "tightare", "tighter", - "timestap", "timestamp", - "tirangle", "triangle", - "tirbunal", "tribunal", - "titainum", "titanium", - "titanuim", "titanium", - "tocuhpad", "touchpad", - "togehter", "together", - "togheter", "together", - "toiletts", "toilets", - "tolerabe", "tolerable", - "tommorow", "tomorrow", - "tonguers", "tongues", - "toriodal", "toroidal", - "toritlla", "tortilla", - "tornadoe", "tornado", - "torotise", "tortoise", - "torpedeo", "torpedo", - "torphies", "trophies", - "tortiose", "tortoise", - "toruisty", "touristy", - "toruneys", "tourneys", - "touchapd", "touchpad", - "tounreys", "tourneys", - "tourisim", "tourism", - "touritsy", "touristy", - "tournyes", "tourneys", - "toursits", "tourists", - "toursity", "touristy", - "toxiticy", "toxicity", - "trabajao", "trabajo", - "trabajdo", "trabajo", - "trackres", "trackers", - "trageted", "targeted", - "traingle", "triangle", - "traitour", "traitor", - "trakcers", "trackers", - "traliers", "trailers", - "tranform", "transform", - "transeat", "translates", - "transfom", "transform", - "transfos", "transforms", - "transiet", "transient", - "transito", "transition", - "transpot", "transport", - "trasnfer", "transfer", - "tratiors", "traitors", - "traveles", "travels", - "traveres", "traverse", - "treasurs", "treasures", - "treatmet", "treatments", - "treatsie", "treaties", - "treausre", "treasure", - "tredning", "trending", - "tremelos", "tremolos", - "tresuary", "treasury", - "trialers", "trailers", - "trianers", "trainers", - "triangel", "triangle", - "triangls", "triangles", - "trianing", "training", - "trianlge", "triangle", - "triators", "traitors", - "tribuanl", "tribunal", - "trickyer", "trickery", - "triggern", "triggering", - "trilogoy", "trilogy", - "trinagle", "triangle", - "trinekts", "trinkets", - "tringale", "triangle", - "trinitiy", "trinity", - "triology", "trilogy", - "triumpth", "triumph", - "trohpies", "trophies", - "trollade", "trolled", - "tropcial", "tropical", - "trotilla", "tortilla", - "trpoical", "tropical", - "trubinal", "tribunal", - "trubines", "turbines", - "tsunamai", "tsunami", - "tuesdsay", "tuesdays", - "tunnells", "tunnels", - "turkisch", "turkish", - "turntabe", "turntable", - "turretts", "turrets", - "tusedays", "tuesdays", - "tutorual", "tutorial", - "twilgiht", "twilight", - "tylenool", "tylenol", - "typicaly", "typically", - "tyranies", "tyrannies", - "tyrannia", "tyrannical", - "ublisher", "publisher", - "udnercut", "undercut", - "udnerdog", "underdog", - "ugpraded", "upgraded", - "ugprades", "upgrades", - "ukrainie", "ukraine", - "ukrainin", "ukrainian", - "ukranian", "ukrainian", - "ulitmate", "ultimate", - "ultamite", "ultimate", - "ultiamte", "ultimate", - "ultimely", "ultimately", - "ultrason", "ultrasound", - "umberlla", "umbrella", - "unabnned", "unbanned", - "unbanend", "unbanned", - "uncanney", "uncanny", - "uncannny", "uncanny", - "underbog", "undergo", - "underglo", "undergo", - "undersog", "undergo", - "undertoe", "undertones", - "underwar", "underwater", - "unfailry", "unfairly", - "unfarily", "unfairly", - "ungodley", "ungodly", - "unhapppy", "unhappy", - "unhealty", "unhealthy", - "unicrons", "unicorns", - "unifroms", "uniforms", - "uniquley", "uniquely", - "univeral", "universal", - "unlikley", "unlikely", - "unlockes", "unlocks", - "unluckly", "unlucky", - "unpoened", "unopened", - "unqiuely", "uniquely", - "unrakned", "unranked", - "unrnaked", "unranked", - "unrpoven", "unproven", - "unsuable", "unusable", - "untraind", "untrained", - "unusualy", "unusually", - "unvierse", "universe", - "unworhty", "unworthy", - "upgarded", "upgraded", - "upgardes", "upgrades", - "uploades", "uploads", - "upstaris", "upstairs", - "upstiars", "upstairs", - "urethrea", "urethra", - "uruguary", "uruguay", - "ususally", "usually", - "utilitiy", "utility", - "utlimate", "ultimate", - "vaccinae", "vaccinated", - "vaccinet", "vaccinated", - "vacinity", "vicinity", - "vaguelly", "vaguely", - "vaiation", "aviation", - "vaieties", "varieties", - "vailidty", "validity", - "vairable", "variable", - "vaklyrie", "valkyrie", - "valenica", "valencia", - "valentie", "valentines", - "valentis", "valentines", - "validade", "validated", - "valkirye", "valkyrie", - "valkiyre", "valkyrie", - "valkriye", "valkyrie", - "valkryie", "valkyrie", - "valkyire", "valkyrie", - "valnecia", "valencia", - "valubale", "valuable", - "valykrie", "valkyrie", - "vamipres", "vampires", - "vampiers", "vampires", - "vampries", "vampires", - "vangurad", "vanguard", - "vanillia", "vanilla", - "vanillla", "vanilla", - "vanugard", "vanguard", - "varaible", "variable", - "varaints", "variants", - "variabel", "variable", - "varibale", "variable", - "varities", "varieties", - "vassales", "vassals", - "vassalls", "vassals", - "vassalos", "vassals", - "vaticaan", "vatican", - "vaticina", "vatican", - "vaulable", "valuable", - "vaylkrie", "valkyrie", - "vechiles", "vehicles", - "vectores", "vectors", - "vegansim", "veganism", - "vegtable", "vegetable", - "vehciles", "vehicles", - "vehicels", "vehicles", - "vehicule", "vehicle", - "veichles", "vehicles", - "venelope", "envelope", - "venemous", "venomous", - "vengance", "vengeance", - "vengence", "vengeance", - "verablly", "verbally", - "verbaitm", "verbatim", - "verisons", "versions", - "versatel", "versatile", - "vertabim", "verbatim", - "vertigro", "vertigo", - "vesseles", "vessels", - "vessells", "vessels", - "viabiliy", "viability", - "viatmins", "vitamins", - "vibratie", "vibrate", - "vibratin", "vibration", - "vicintiy", "vicinity", - "vicseral", "visceral", - "victimas", "victims", - "victimes", "victims", - "victorin", "victorian", - "victoris", "victories", - "vieweres", "viewers", - "viewpoit", "viewpoints", - "vigilane", "vigilante", - "vigliant", "vigilant", - "vikingos", "vikings", - "viligant", "vigilant", - "villegas", "villages", - "vindicte", "vindictive", - "vinicity", "vicinity", - "violatin", "violation", - "violenty", "violently", - "violetas", "violates", - "virament", "vraiment", - "virbator", "vibrator", - "virginas", "virgins", - "virgines", "virgins", - "virgings", "virgins", - "virginis", "virgins", - "virginus", "virgins", - "virtualy", "virtually", - "virtuels", "virtues", - "virtuose", "virtues", - "viscreal", "visceral", - "visercal", "visceral", - "visibily", "visibility", - "visibley", "visibly", - "visiblly", "visibly", - "vitailty", "vitality", - "vitimans", "vitamins", - "vitmains", "vitamins", - "vitories", "victories", - "voicemal", "voicemail", - "voilates", "violates", - "volatily", "volatility", - "volcando", "volcano", - "volcanoe", "volcano", - "volcaron", "volcano", - "vriament", "vraiment", - "wahtever", "whatever", - "wallpapr", "wallpapers", - "warantee", "warranty", - "warcarft", "warcraft", - "warrante", "warranties", - "warriros", "warriors", - "watchemn", "watchmen", - "watchign", "watching", - "wathcing", "watching", - "wathcmen", "watchmen", - "wathever", "whatever", - "watkings", "watkins", - "wealthly", "wealthy", - "webistes", "websites", - "websties", "websites", - "wednesdy", "wednesdays", - "weigthed", "weighted", - "weridest", "weirdest", - "werstler", "wrestler", - "wesbites", "websites", - "westbrok", "westbrook", - "westerse", "westerners", - "wherease", "whereas", - "whipsers", "whispers", - "whislist", "wishlist", - "whisltes", "whistles", - "whisperd", "whispered", - "whistels", "whistles", - "whitsles", "whistles", - "whsipers", "whispers", - "widgetas", "widgets", - "wieghted", "weighted", - "willaims", "williams", - "willfuly", "willfully", - "willimas", "williams", - "windsoar", "windsor", - "wininpeg", "winnipeg", - "winnigns", "winnings", - "winnpieg", "winnipeg", - "wiredest", "weirdest", - "wishlsit", "wishlist", - "wishpers", "whispers", - "withdral", "withdrawal", - "witnesss", "witnesses", - "wonderes", "wonders", - "wonderus", "wonders", - "workfore", "workforce", - "wouldnot", "wouldnt", - "wranlger", "wrangler", - "wreckign", "wrecking", - "wrecthed", "wretched", - "wrekcing", "wrecking", - "wreslter", "wrestler", - "wresters", "wrestlers", - "writting", "writing", - "wrnagler", "wrangler", - "wrteched", "wretched", - "yeilding", "yielding", - "yoesmite", "yosemite", - "yorksher", "yorkshire", - "yorkshie", "yorkshire", - "yosemeti", "yosemite", - "yosimete", "yosemite", - "zealotes", "zealots", - "zealoths", "zealots", - "zealotus", "zealots", - "zealouts", "zealous", - "zepplein", "zeppelin", - "zepplien", "zeppelin", - "zimbabew", "zimbabwe", - "zimbawbe", "zimbabwe", - "zinoists", "zionists", - "zionisim", "zionism", - "zionistm", "zionism", - "zionsits", "zionists", - "zoinists", "zionists", - "abiltiy", "ability", - "abodmen", "abdomen", - "abondon", "abandon", - "aboslve", "absolve", - "abosrbs", "absorbs", - "abriter", "arbiter", - "abrupty", "abruptly", - "absense", "absence", - "absolue", "absolute", - "absovle", "absolve", - "absrobs", "absorbs", - "absuers", "abusers", - "absurdy", "absurdly", - "absymal", "abysmal", - "abymsal", "abysmal", - "acadamy", "academy", - "acadmic", "academic", - "accesss", "access", - "accpets", "accepts", - "accross", "across", - "accuray", "accuracy", - "acheive", "achieve", - "achived", "achieved", - "acident", "accident", - "ackward", "awkward", - "acrlyic", "acrylic", - "actauly", "actualy", - "activit", "activist", - "activly", "actively", - "actualy", "actually", - "actulay", "actualy", - "acuracy", "accuracy", - "acusing", "causing", - "acustom", "accustom", - "acutaly", "actualy", - "acyrlic", "acrylic", - "adaptes", "adapters", - "adatper", "adapter", - "adbomen", "abdomen", - "addcits", "addicts", - "adderss", "address", - "addtion", "addition", - "adequet", "adequate", - "adequit", "adequate", - "adivser", "adviser", - "adivsor", "advisor", - "admited", "admitted", - "admrial", "admiral", - "adpater", "adapter", - "adquire", "acquire", - "adultey", "adultery", - "adverst", "adverts", - "adviced", "advised", - "advocay", "advocacy", - "advsior", "advisor", - "aeriels", "aerials", - "affaris", "affairs", - "affiars", "affairs", - "afircan", "african", - "africas", "africans", - "afwully", "awfully", - "againts", "against", - "agaisnt", "against", - "aganist", "against", - "aggreed", "agreed", - "agianst", "against", - "agreing", "agreeing", - "agruing", "arguing", - "ahtiest", "athiest", - "aicraft", "aircraft", - "ailmony", "alimony", - "airbore", "airborne", - "aircaft", "aircraft", - "airlfow", "airflow", - "airosft", "airsoft", - "airpost", "airports", - "airsfot", "airsoft", - "airzona", "arizona", - "alchmey", "alchemy", - "alchool", "alcohol", - "alcohal", "alcohol", - "aledged", "alleged", - "aledges", "alleges", - "alegbra", "algebra", - "algerba", "algebra", - "alienet", "alienate", - "alledge", "allege", - "allegry", "allergy", - "alltime", "all-time", - "almighy", "almighty", - "alochol", "alcohol", - "alotted", "allotted", - "alowing", "allowing", - "alphabt", "alphabet", - "alreayd", "already", - "alrighy", "alrighty", - "altanta", "atlanta", - "alteast", "atleast", - "altough", "although", - "alusion", "allusion", - "amateus", "amateurs", - "amatuer", "amateur", - "amature", "armature", - "amensia", "amnesia", - "amensty", "amnesty", - "amercia", "america", - "americs", "americas", - "ammount", "amount", - "ammused", "amused", - "amneisa", "amnesia", - "amnsety", "amnesty", - "amognst", "amongst", - "amongts", "amongst", - "amonsgt", "amongst", - "ampilfy", "amplify", - "amrpits", "armpits", - "analoge", "analogue", - "analsyt", "analyst", - "analyes", "analyse", - "analyts", "analyst", - "analzye", "analyze", - "anaylse", "analyse", - "anaylst", "analyst", - "anaylze", "analyze", - "anceint", "ancient", - "andorid", "android", - "andriod", "android", - "androis", "androids", - "angirly", "angrily", - "angluar", "angular", - "angualr", "angular", - "anicent", "ancient", - "anitque", "antique", - "anixety", "anxiety", - "anmesia", "amnesia", - "anmesty", "amnesty", - "annoint", "anoint", - "annualy", "annually", - "annuled", "annulled", - "anohter", "another", - "anomoly", "anomaly", - "answerd", "answered", - "anuglar", "angular", - "anulled", "annulled", - "anwsers", "answers", - "anwyays", "anyways", - "anxeity", "anxiety", - "anyoens", "anyones", - "anyonse", "anyones", - "anywyas", "anyways", - "aparent", "apparent", - "appeard", "appeared", - "appluad", "applaud", - "aproval", "approval", - "apsects", "aspects", - "apshalt", "asphalt", - "apsirin", "aspirin", - "aqcuire", "acquire", - "aquarim", "aquarium", - "aquired", "acquired", - "aranged", "arranged", - "arbitre", "arbiter", - "arcahic", "archaic", - "archiac", "archaic", - "arcylic", "acrylic", - "aresnal", "arsenal", - "aretmis", "artemis", - "argubly", "arguably", - "aribter", "arbiter", - "ariflow", "airflow", - "arisoft", "airsoft", - "aritsts", "artists", - "armchar", "armchair", - "arogant", "arrogant", - "arogent", "arrogant", - "arresst", "arrests", - "arround", "around", - "arsneal", "arsenal", - "artcile", "article", - "artical", "article", - "articel", "article", - "artistc", "artistic", - "artmeis", "artemis", - "artsits", "artists", - "aruging", "arguing", - "aseuxal", "asexual", - "asexaul", "asexual", - "ashpalt", "asphalt", - "asiprin", "aspirin", - "asissts", "assists", - "asnwers", "answers", - "asorbed", "absorbed", - "aspahlt", "asphalt", - "asphlat", "asphalt", - "aspriin", "aspirin", - "assagne", "assange", - "assasin", "assassin", - "assembe", "assemble", - "assemby", "assembly", - "assisst", "assists", - "assnage", "assange", - "asssits", "assists", - "assualt", "assault", - "asterik", "asterisk", - "asutria", "austria", - "atcualy", "actualy", - "atelast", "atleast", - "athesim", "atheism", - "athiesm", "atheism", - "athiest", "atheist", - "athiets", "athiest", - "athlets", "athletes", - "atlantc", "atlantic", - "atleats", "atleast", - "atlesat", "atleast", - "atorney", "attorney", - "atremis", "artemis", - "attemps", "attempts", - "attemts", "attempts", - "attened", "attended", - "attracs", "attracts", - "audbile", "audible", - "audibel", "audible", - "austira", "austria", - "austrai", "austria", - "autistc", "autistic", - "avation", "aviation", - "avtaars", "avatars", - "awakend", "awakened", - "bablyon", "babylon", - "backdor", "backdoor", - "backsta", "backseat", - "baclony", "balcony", - "badnits", "bandits", - "baiscly", "basicly", - "bakcers", "backers", - "balanse", "balances", - "balcked", "blacked", - "banhsee", "banshee", - "bankgok", "bangkok", - "baoynet", "bayonet", - "baptims", "baptism", - "baptsim", "baptism", - "baragin", "bargain", - "bargani", "bargain", - "bargian", "bargain", - "bariner", "brainer", - "barlkey", "barkley", - "barracs", "barracks", - "barrles", "barrels", - "barsita", "barista", - "barvery", "bravery", - "bascily", "basicly", - "basicly", "basically", - "basilcy", "basicly", - "basiton", "bastion", - "basnhee", "banshee", - "bastane", "bastante", - "bastars", "bastards", - "bastino", "bastion", - "bathrom", "bathroom", - "batitsa", "batista", - "batsita", "batista", - "bayblon", "babylon", - "baynoet", "bayonet", - "bayoent", "bayonet", - "bceuase", "becuase", - "beacuse", "because", - "bealtes", "beatles", - "beaslty", "beastly", - "beatels", "beatles", - "beaucop", "beaucoup", - "becamae", "became", - "becames", "becomes", - "becasue", "because", - "becouse", "because", - "becuaes", "becuase", - "becuase", "because", - "becusae", "becuase", - "befried", "befriend", - "beggins", "begins", - "beglian", "belgian", - "beglium", "belgium", - "begnals", "bengals", - "bejiing", "beijing", - "beleifs", "beliefs", - "beleive", "believe", - "belgain", "belgian", - "belguim", "belgium", - "believr", "believer", - "believs", "believes", - "belifes", "beliefs", - "beligan", "belgian", - "beligum", "belgium", - "belived", "believed", - "belives", "believes", - "benagls", "bengals", - "benedit", "benedict", - "benghai", "benghazi", - "benglas", "bengals", - "benifit", "benefit", - "beoynce", "beyonce", - "beraded", "bearded", - "bersekr", "berserk", - "beseige", "besiege", - "betales", "beatles", - "bethesa", "bethesda", - "betrayd", "betrayed", - "beucase", "becuase", - "bewteen", "between", - "bicthes", "bitches", - "bidrman", "birdman", - "biejing", "beijing", - "bifgoot", "bigfoot", - "bigorty", "bigotry", - "bigtoed", "bigoted", - "bigtory", "bigotry", - "biogted", "bigoted", - "biogtry", "bigotry", - "bioplar", "bipolar", - "biploar", "bipolar", - "birdamn", "birdman", - "birdges", "bridges", - "birgade", "brigade", - "bitcion", "bitcoin", - "bithced", "bitched", - "bithces", "bitches", - "bitocin", "bitcoin", - "bizzare", "bizarre", - "blacony", "balcony", - "blaimed", "blamed", - "blankes", "blankets", - "blegian", "belgian", - "blegium", "belgium", - "blizzad", "blizzard", - "blockes", "blockers", - "bloster", "bolster", - "blulets", "bullets", - "bobmers", "bombers", - "bollocs", "bollocks", - "bondary", "boundary", - "bonnano", "bonanno", - "bonsues", "bonuses", - "boraden", "broaden", - "borader", "broader", - "boradly", "broadly", - "bordeom", "boredom", - "boslter", "bolster", - "boudler", "boulder", - "boundry", "boundary", - "bounses", "bonuses", - "boutiqe", "boutique", - "bouyant", "buoyant", - "braevry", "bravery", - "braista", "barista", - "brakley", "barkley", - "branier", "brainer", - "braoden", "broaden", - "braoder", "broader", - "braodly", "broadly", - "brednan", "brendan", - "breifly", "briefly", - "breserk", "berserk", - "brethen", "brethren", - "brewrey", "brewery", - "briagde", "brigade", - "brianer", "brainer", - "bridman", "birdman", - "brielfy", "briefly", - "brigdes", "bridges", - "brightn", "brighten", - "brisben", "brisbane", - "britian", "britain", - "britsol", "bristol", - "briused", "bruised", - "briuser", "bruiser", - "briuses", "bruises", - "brocoli", "broccoli", - "bronocs", "broncos", - "browine", "brownie", - "brownei", "brownie", - "brownis", "brownies", - "bruglar", "burglar", - "brunete", "brunette", - "bruning", "burning", - "brusied", "bruised", - "brusies", "bruises", - "brusses", "brussels", - "brutaly", "brutally", - "btiched", "bitched", - "btiches", "bitches", - "bubbels", "bubbles", - "buddhim", "buddhism", - "buddhit", "buddhist", - "buddist", "buddhist", - "budgest", "budgets", - "bugdets", "budgets", - "buildes", "builders", - "bulgara", "bulgaria", - "bullest", "bullets", - "buoancy", "buoyancy", - "burguny", "burgundy", - "buriser", "bruiser", - "burlgar", "burglar", - "burnign", "burning", - "burried", "buried", - "burrtio", "burrito", - "busines", "business", - "busness", "business", - "butthoe", "butthole", - "buttrey", "buttery", - "cababge", "cabbage", - "cabines", "cabinets", - "cabniet", "cabinet", - "caclium", "calcium", - "cacuses", "caucuses", - "caffeen", "caffeine", - "cahched", "cached", - "cahotic", "chaotic", - "cahsier", "cashier", - "cailbre", "calibre", - "calaber", "caliber", - "calagry", "calgary", - "calback", "callback", - "calbire", "calibre", - "calcuim", "calcium", - "calculs", "calculus", - "calicum", "calcium", - "calrify", "clarify", - "calrity", "clarity", - "caluses", "clauses", - "camboda", "cambodia", - "campain", "campaign", - "campuss", "campuses", - "cancles", "cancels", - "cancres", "cancers", - "cancuks", "canucks", - "canides", "candies", - "cannnot", "cannot", - "canrage", "carnage", - "capible", "capable", - "capitas", "capitals", - "capsuls", "capsules", - "captais", "captains", - "captial", "capital", - "captiol", "capitol", - "captued", "captured", - "capturd", "captured", - "capusle", "capsule", - "carange", "carnage", - "carbien", "carbine", - "cardaic", "cardiac", - "cardina", "cardigan", - "careing", "caring", - "caridac", "cardiac", - "carmtan", "cartman", - "carnege", "carnage", - "carnige", "carnage", - "carolan", "carolina", - "carreer", "career", - "carrers", "careers", - "cartles", "cartels", - "caryons", "crayons", - "casette", "cassette", - "casheir", "cashier", - "cashies", "cashiers", - "cashire", "cashier", - "casltes", "castles", - "caspule", "capsule", - "cassete", "cassette", - "castels", "castles", - "casuing", "causing", - "cathlic", "catholic", - "cauncks", "canucks", - "cavarly", "cavalry", - "cavlary", "cavalry", - "celcius", "celsius", - "celisus", "celsius", - "celitcs", "celtics", - "celsuis", "celsius", - "centruy", "century", - "centuty", "century", - "ceratin", "certain", - "cermaic", "ceramic", - "certian", "certain", - "cervial", "cervical", - "cesspol", "cesspool", - "cetlics", "celtics", - "chambre", "chamber", - "charcol", "charcoal", - "charisa", "charisma", - "chasiss", "chassis", - "chatoic", "chaotic", - "cheeots", "cheetos", - "cheesse", "cheeses", - "chekcer", "checker", - "chelsae", "chelsea", - "cheslea", "chelsea", - "chiense", "chinese", - "childen", "children", - "chimeny", "chimney", - "chinees", "chinese", - "chinmey", "chimney", - "chipest", "chipset", - "chispet", "chipset", - "chivaly", "chivalry", - "chlesea", "chelsea", - "chnages", "changes", - "choatic", "chaotic", - "chocies", "choices", - "choosen", "chosen", - "chtulhu", "cthulhu", - "churchs", "churches", - "cilanto", "cilantro", - "cilents", "clients", - "circels", "circles", - "circuis", "circuits", - "cirlces", "circles", - "clacium", "calcium", - "claerer", "clearer", - "claerly", "clearly", - "clagary", "calgary", - "claibre", "calibre", - "claimes", "claims", - "clairfy", "clarify", - "clairty", "clarity", - "clanand", "clannad", - "clarfiy", "clarify", - "classis", "classics", - "clasues", "clauses", - "claymer", "claymore", - "claymoe", "claymore", - "cleanes", "cleanse", - "cleasne", "cleanse", - "cleints", "clients", - "clenase", "cleanse", - "clesius", "celsius", - "cletics", "celtics", - "clevery", "cleverly", - "climats", "climates", - "climbes", "climbers", - "clincis", "clinics", - "clitors", "clitoris", - "cloesly", "closely", - "closley", "closely", - "cluases", "clauses", - "cluprit", "culprit", - "coalese", "coalesce", - "coctail", "cocktail", - "cohesie", "cohesive", - "colgone", "cologne", - "collape", "collapse", - "collest", "collects", - "collony", "colony", - "collumn", "column", - "cologen", "cologne", - "colomba", "colombia", - "colonge", "cologne", - "colorao", "colorado", - "colourd", "coloured", - "columsn", "columns", - "comando", "commando", - "comapny", "company", - "comapre", "compare", - "comarde", "comrade", - "comback", "comeback", - "combins", "combines", - "comdeic", "comedic", - "comited", "committed", - "commano", "commando", - "commans", "commands", - "commere", "commerce", - "comming", "coming", - "commitd", "commited", - "compase", "compares", - "compede", "competed", - "compilr", "compiler", - "compnay", "company", - "compots", "compost", - "comrads", "comrades", - "comtpon", "compton", - "conceed", "concede", - "conceps", "concepts", - "conclue", "conclude", - "concret", "concert", - "condenm", "condemn", - "condiut", "conduit", - "condmen", "condemn", - "confids", "confides", - "confins", "confines", - "confise", "confines", - "conflit", "conflict", - "conived", "connived", - "connecs", "connects", - "conqeur", "conquer", - "conqure", "conquer", - "consept", "concept", - "consern", "concern", - "consums", "consumes", - "contacs", "contacts", - "contais", "contains", - "contast", "contacts", - "contemt", "contempt", - "contens", "contents", - "contess", "contests", - "contian", "contain", - "contine", "continue", - "convers", "converts", - "conveyd", "conveyed", - "convine", "convince", - "coprses", "corpses", - "coputer", "computer", - "corasir", "corsair", - "coratia", "croatia", - "coridal", "cordial", - "corsari", "corsair", - "corsiar", "corsair", - "corspes", "corpses", - "corwbar", "crowbar", - "costums", "costumes", - "coudlnt", "couldnt", - "coulmns", "columns", - "coulndt", "couldnt", - "counsle", "counsel", - "countes", "counters", - "courtey", "courtesy", - "covenat", "covenant", - "coytoes", "coyotes", - "crabine", "carbine", - "cralwed", "crawled", - "craotia", "croatia", - "craweld", "crawled", - "creamic", "ceramic", - "createn", "creatine", - "creater", "creature", - "creatie", "creatine", - "creatue", "creature", - "creepes", "creepers", - "creepig", "creeping", - "creulty", "cruelty", - "cricles", "circles", - "critera", "criteria", - "cropses", "corpses", - "crosair", "corsair", - "crpytic", "cryptic", - "crsytal", "crystal", - "crtical", "critical", - "crucibe", "crucible", - "cruetly", "cruelty", - "cruical", "crucial", - "crulety", "cruelty", - "crusdae", "crusade", - "crusier", "cruiser", - "crusies", "cruises", - "crusive", "cursive", - "crutchs", "crutches", - "crypitc", "cryptic", - "crystas", "crystals", - "crystsl", "crystals", - "crytpic", "cryptic", - "crytsal", "crystal", - "cthluhu", "cthulhu", - "cthuhlu", "cthulhu", - "cthuluh", "cthulhu", - "ctuhlhu", "cthulhu", - "cuasing", "causing", - "cubcile", "cubicle", - "cubilce", "cubicle", - "cuddels", "cuddles", - "culrpit", "culprit", - "culturs", "cultures", - "cupboad", "cupboard", - "cuplrit", "culprit", - "curatin", "curtain", - "curcial", "crucial", - "curcuit", "circuit", - "curelty", "cruelty", - "curiser", "cruiser", - "curisve", "cursive", - "currate", "curate", - "currens", "currents", - "curreny", "currency", - "currest", "currents", - "cursade", "crusade", - "curtian", "curtain", - "cyandie", "cyanide", - "cyclits", "cyclist", - "cycloen", "cyclone", - "cycolps", "cyclops", - "cylcist", "cyclist", - "cylcone", "cyclone", - "cylcops", "cyclops", - "cynaide", "cyanide", - "cyrptic", "cryptic", - "cyrstal", "crystal", - "dagners", "dangers", - "daimond", "diamond", - "damenor", "demeanor", - "dammage", "damage", - "darcula", "dracula", - "dargons", "dragons", - "darkets", "darkest", - "datbase", "database", - "daulity", "duality", - "dawrves", "dwarves", - "ddogers", "dodgers", - "ddoging", "dodging", - "deadlit", "deadlift", - "deadpol", "deadpool", - "deafult", "default", - "deahtly", "deathly", - "deatils", "details", - "deatlhy", "deathly", - "decalre", "declare", - "decison", "decision", - "declars", "declares", - "declase", "declares", - "decress", "decrees", - "decribe", "describe", - "decsend", "descend", - "dectect", "detect", - "defaint", "defiant", - "defauls", "defaults", - "defelct", "deflect", - "defensd", "defends", - "deffine", "define", - "definat", "defiant", - "definet", "definite", - "definie", "definite", - "definig", "defining", - "definit", "definite", - "defualt", "default", - "degarde", "degrade", - "degrase", "degrasse", - "degrate", "degrade", - "deiners", "deniers", - "deisgns", "designs", - "deivant", "deviant", - "dekstop", "desktop", - "delcare", "declare", - "delfect", "deflect", - "demenor", "demeanor", - "dementa", "dementia", - "demsond", "desmond", - "deneirs", "deniers", - "denisty", "density", - "densley", "densely", - "depcits", "depicts", - "dependd", "depended", - "depitcs", "depicts", - "deployd", "deployed", - "depsise", "despise", - "descrie", "describe", - "descuss", "discuss", - "desgins", "designs", - "desings", "designs", - "desitny", "destiny", - "desnely", "densely", - "desnity", "density", - "desomnd", "desmond", - "despict", "depict", - "despide", "despised", - "despies", "despise", - "destkop", "desktop", - "destory", "destroy", - "destros", "destroys", - "detaild", "detailed", - "detials", "details", - "detorit", "detroit", - "detriot", "detroit", - "deuling", "dueling", - "devaint", "deviant", - "devaite", "deviate", - "devided", "divided", - "devlove", "devolve", - "devotin", "devotion", - "devovle", "devolve", - "diabets", "diabetes", - "dialecs", "dialects", - "dialoge", "dialogue", - "diamons", "diamonds", - "diasble", "disable", - "dicksih", "dickish", - "dicover", "discover", - "dictats", "dictates", - "dieties", "deities", - "dilpoma", "diploma", - "dimaond", "diamond", - "dingity", "dignity", - "dinosar", "dinosaur", - "diosese", "diocese", - "dipolma", "diploma", - "dirbble", "dribble", - "directy", "directly", - "diretcx", "directx", - "dirived", "derived", - "dirvers", "drivers", - "disbale", "disable", - "disguss", "disgusts", - "disliks", "dislikes", - "disover", "discover", - "dispair", "despair", - "dispath", "dispatch", - "dispite", "despite", - "dispuse", "disputes", - "disputs", "disputes", - "dissole", "dissolve", - "distase", "distaste", - "distint", "distinct", - "divison", "division", - "docuhes", "douches", - "docuhey", "douchey", - "dogders", "dodgers", - "dogding", "dodging", - "dolhpin", "dolphin", - "dolphis", "dolphins", - "dominae", "dominate", - "dominno", "dominion", - "doplhin", "dolphin", - "dortmud", "dortmund", - "draclua", "dracula", - "dracual", "dracula", - "drakest", "darkest", - "dramtic", "dramatic", - "dribbel", "dribble", - "driectx", "directx", - "driftig", "drifting", - "drinkes", "drinkers", - "druming", "drumming", - "duailty", "duality", - "dualtiy", "duality", - "dubsetp", "dubstep", - "dulaity", "duality", - "duleing", "dueling", - "dunegon", "dungeon", - "dungeos", "dungeons", - "dungoen", "dungeon", - "durring", "during", - "dusbtep", "dubstep", - "dyansty", "dynasty", - "dynamis", "dynamics", - "dynsaty", "dynasty", - "earlies", "earliest", - "earliet", "earliest", - "earplus", "earplugs", - "eastwod", "eastwood", - "ebcuase", "becuase", - "ecilpse", "eclipse", - "eclipes", "eclipse", - "eclispe", "eclipse", - "eclpise", "eclipse", - "ectsasy", "ecstasy", - "edbiles", "edibles", - "edibels", "edibles", - "effords", "efforts", - "ehtanol", "ethanol", - "eifnach", "einfach", - "eighten", "eighteen", - "einfahc", "einfach", - "elasped", "elapsed", - "elcipse", "eclipse", - "elction", "election", - "elecrto", "electro", - "electic", "electric", - "electon", "election", - "ellitot", "elliott", - "elloitt", "elliott", - "elphant", "elephant", - "emabrgo", "embargo", - "emabssy", "embassy", - "emapthy", "empathy", - "embeded", "embedded", - "embrago", "embargo", - "eminate", "emanate", - "emipres", "empires", - "emision", "emission", - "emiting", "emitting", - "emition", "emission", - "emmited", "emitted", - "empahty", "empathy", - "emphsis", "emphasis", - "empiers", "empires", - "empited", "emptied", - "emplore", "employer", - "emporer", "emperor", - "empries", "empires", - "emtpied", "emptied", - "enameld", "enameled", - "encahnt", "enchant", - "encalve", "enclave", - "encrpyt", "encrypt", - "encyrpt", "encrypt", - "endores", "endorse", - "endrose", "endorse", - "energis", "energies", - "enforse", "enforces", - "enginer", "engineer", - "englsih", "english", - "enhanse", "enhances", - "enlcave", "enclave", - "enlgish", "english", - "enlsave", "enslave", - "ensalve", "enslave", - "entbook", "netbook", - "entirey", "entirety", - "entorpy", "entropy", - "epiloge", "epilogue", - "episdoe", "episode", - "epsiode", "episode", - "epsorts", "esports", - "eptiome", "epitome", - "equiped", "equipped", - "erested", "arrested", - "escapse", "escapes", - "escpaes", "escapes", - "esctasy", "ecstasy", - "esporst", "esports", - "espreso", "espresso", - "esprots", "esports", - "essense", "essence", - "etherel", "ethereal", - "ethnaol", "ethanol", - "euphora", "euphoria", - "europen", "european", - "eurpean", "european", - "everets", "everest", - "everset", "everest", - "evloved", "evolved", - "evloves", "evolves", - "evovled", "evolved", - "evovles", "evolves", - "exaclty", "exactly", - "exahust", "exhaust", - "examind", "examined", - "exapnds", "expands", - "exatled", "exalted", - "excange", "exchange", - "excatly", "exactly", - "excells", "excels", - "exceprt", "excerpt", - "excluse", "excludes", - "excrept", "excerpt", - "exculde", "exclude", - "exelent", "excellent", - "exemple", "example", - "exerpts", "excerpts", - "exhasut", "exhaust", - "exhuast", "exhaust", - "exising", "existing", - "existet", "existent", - "exlated", "exalted", - "exlcude", "exclude", - "exliled", "exiled", - "exludes", "excludes", - "exmaple", "example", - "exoitcs", "exotics", - "expalin", "explain", - "expeced", "expected", - "expells", "expels", - "expiers", "expires", - "explict", "explicit", - "expliot", "exploit", - "explods", "explodes", - "explose", "explodes", - "expolde", "explode", - "expolit", "exploit", - "exposse", "exposes", - "expries", "expires", - "exracts", "extracts", - "exsited", "existed", - "extered", "exerted", - "exterme", "extreme", - "extoics", "exotics", - "extreem", "extreme", - "extrems", "extremes", - "eyebals", "eyeballs", - "eyebros", "eyebrows", - "fabulos", "fabulous", - "facebok", "facebook", - "facepam", "facepalm", - "faclons", "falcons", - "facsism", "fascism", - "facsist", "fascist", - "failurs", "failures", - "faincee", "fiancee", - "falesly", "falsely", - "falired", "flaired", - "falshed", "flashed", - "falshes", "flashes", - "falsley", "falsely", - "falvors", "flavors", - "familes", "families", - "famoust", "famous", - "famousy", "famously", - "fanatsy", "fantasy", - "fantaic", "fanatic", - "faoming", "foaming", - "fascits", "fascist", - "fasicsm", "fascism", - "fasicst", "fascist", - "faslely", "falsely", - "fatiuge", "fatigue", - "febuary", "february", - "fecthed", "fetched", - "fecthes", "fetches", - "feminen", "feminine", - "feminie", "feminine", - "feminim", "feminism", - "feodras", "fedoras", - "fertily", "fertility", - "fesitve", "festive", - "fethced", "fetched", - "fethces", "fetches", - "fetishs", "fetishes", - "fianite", "finite", - "fianlly", "finally", - "fiercly", "fiercely", - "filcker", "flicker", - "filpped", "flipped", - "filterd", "filtered", - "finacee", "fiancee", - "fineses", "finesse", - "fininsh", "finnish", - "finishs", "finishes", - "finisse", "finishes", - "finnsih", "finnish", - "firends", "friends", - "firggin", "friggin", - "firsbee", "frisbee", - "firslty", "firstly", - "firtsly", "firstly", - "fitlers", "filters", - "flacons", "falcons", - "flahsed", "flashed", - "flahses", "flashes", - "flaried", "flaired", - "flasely", "falsely", - "flashig", "flashing", - "flavord", "flavored", - "flavous", "flavours", - "flawess", "flawless", - "flciker", "flicker", - "fliters", "filters", - "flordia", "florida", - "florene", "florence", - "fnaatic", "fanatic", - "fomaing", "foaming", - "fonetic", "phonetic", - "forefit", "forfeit", - "foregin", "foreign", - "foreing", "foreign", - "forfiet", "forfeit", - "forhead", "forehead", - "foriegn", "foreign", - "formaly", "formally", - "formery", "formerly", - "formost", "foremost", - "formual", "formula", - "formuls", "formulas", - "forrset", "forrest", - "forsakn", "forsaken", - "forsane", "forsaken", - "forumla", "formula", - "fountan", "fountain", - "fourten", "fourteen", - "fracter", "fracture", - "fragmet", "fragment", - "freedos", "freedoms", - "freinds", "friends", - "frigign", "friggin", - "fristly", "firstly", - "frostig", "frosting", - "frsibee", "frisbee", - "fruitin", "fruition", - "fullets", "fullest", - "fullset", "fullest", - "funides", "fundies", - "funtion", "function", - "furance", "furnace", - "furncae", "furnace", - "futhroc", "futhark", - "gadgest", "gadgets", - "gagdets", "gadgets", - "galatic", "galactic", - "galcier", "glacier", - "galsgow", "glasgow", - "gameply", "gameplay", - "gamerga", "gamertag", - "gankign", "ganking", - "ganster", "gangster", - "garabge", "garbage", - "garfied", "garfield", - "garnola", "granola", - "generas", "generals", - "genersl", "generals", - "geniuss", "geniuses", - "geogria", "georgia", - "geomety", "geometry", - "georiga", "georgia", - "gernade", "grenade", - "gerogia", "georgia", - "gigabye", "gigabyte", - "giltchy", "glitchy", - "gimmics", "gimmicks", - "gimmicy", "gimmicky", - "girzzly", "grizzly", - "glagsow", "glasgow", - "glaicer", "glacier", - "glicthy", "glitchy", - "glimpes", "glimpse", - "glimspe", "glimpse", - "glipmse", "glimpse", - "glitchd", "glitched", - "glitchs", "glitches", - "glithcy", "glitchy", - "globaly", "globally", - "gloiath", "goliath", - "glorios", "glorious", - "gltichy", "glitchy", - "gnaking", "ganking", - "gnawwed", "gnawed", - "goddanm", "goddamn", - "goddman", "goddamn", - "godliek", "godlike", - "godlman", "goldman", - "godsped", "godspeed", - "goergia", "georgia", - "goilath", "goliath", - "golaith", "goliath", - "golbins", "goblins", - "goldamn", "goldman", - "goldbeg", "goldberg", - "goldike", "godlike", - "golitah", "goliath", - "goodluk", "goodluck", - "gorumet", "gourmet", - "gosepls", "gospels", - "gosples", "gospels", - "gpysies", "gypsies", - "grabage", "garbage", - "grahpic", "graphic", - "grainte", "granite", - "grammer", "grammar", - "graniet", "granite", - "grantie", "granite", - "graphie", "graphite", - "graphis", "graphics", - "grappel", "grapple", - "greande", "grenade", - "grenads", "grenades", - "greneer", "greener", - "griaffe", "giraffe", - "gridles", "griddles", - "grillig", "grilling", - "grpahic", "graphic", - "guardin", "guardian", - "guiness", "guinness", - "gullibe", "gullible", - "gutiars", "guitars", - "gypises", "gypsies", - "gyspies", "gypsies", - "habaeus", "habeas", - "haethen", "heathen", - "hailfax", "halifax", - "halfiax", "halifax", - "handbok", "handbook", - "handedy", "handedly", - "handeld", "handled", - "hanlder", "handler", - "hannibl", "hannibal", - "hanuted", "haunted", - "haorder", "hoarder", - "hapened", "happened", - "happend", "happened", - "happliy", "happily", - "harased", "harassed", - "harases", "harasses", - "hardend", "hardened", - "hardwod", "hardwood", - "haricut", "haircut", - "hatchig", "hatching", - "hauntig", "haunting", - "haviest", "heaviest", - "headest", "headset", - "headses", "headsets", - "heaveny", "heavenly", - "heigher", "higher", - "heigths", "heights", - "helemts", "helmets", - "hellfie", "hellfire", - "hellvua", "helluva", - "helment", "helmet", - "helpped", "helped", - "hemlets", "helmets", - "henious", "heinous", - "heorics", "heroics", - "heorine", "heroine", - "heriocs", "heroics", - "herione", "heroine", - "herocis", "heroics", - "heronie", "heroine", - "hesiman", "heisman", - "hieghts", "heights", - "hienous", "heinous", - "hiesman", "heisman", - "himselv", "himself", - "hiptser", "hipster", - "hismelf", "himself", - "hispter", "hipster", - "hitboxs", "hitboxes", - "hoilday", "holiday", - "hokpins", "hopkins", - "holdiay", "holiday", - "holdins", "holdings", - "homniem", "hominem", - "horader", "hoarder", - "hosited", "hoisted", - "hosthot", "hotshot", - "hostles", "hostels", - "hostpot", "hotspot", - "hothsot", "hotshot", - "hotpsot", "hotspot", - "hotsopt", "hotspot", - "hounour", "honour", - "hseldon", "sheldon", - "huanted", "haunted", - "humanit", "humanist", - "humants", "humanist", - "humidiy", "humidity", - "humoros", "humorous", - "hunagry", "hungary", - "hunderd", "hundred", - "hundres", "hundreds", - "hungray", "hungary", - "hurdels", "hurdles", - "hurldes", "hurdles", - "husbans", "husbands", - "hweaton", "wheaton", - "hybirds", "hybrids", - "hydogen", "hydrogen", - "hygeine", "hygiene", - "hypnoss", "hypnosis", - "hyrbids", "hybrids", - "hystera", "hysteria", - "iceforg", "icefrog", - "ierland", "ireland", - "ignitin", "ignition", - "ignorat", "ignorant", - "illegas", "illegals", - "illegsl", "illegals", - "illinos", "illinois", - "imanent", "eminent", - "imapcts", "impacts", - "iminent", "eminent", - "imminet", "imminent", - "implict", "implicit", - "imploed", "implode", - "imploys", "employs", - "impluse", "impulse", - "impolde", "implode", - "importd", "imported", - "imporve", "improve", - "impules", "impulse", - "impusle", "impulse", - "imrpove", "improve", - "incldue", "include", - "incluse", "includes", - "indains", "indians", - "indeces", "indices", - "indiaan", "indiana", - "indluge", "indulge", - "indugle", "indulge", - "infalte", "inflate", - "infenro", "inferno", - "infered", "inferred", - "inferir", "inferior", - "infinet", "infinite", - "infinie", "infinite", - "infinit", "infinite", - "infornt", "infront", - "infroms", "informs", - "infrotn", "infront", - "inheirt", "inherit", - "inidans", "indians", - "initals", "initials", - "initisl", "initials", - "inlcine", "incline", - "inovker", "invoker", - "inpeach", "impeach", - "inpsect", "inspect", - "inpsire", "inspire", - "inquier", "inquire", - "inquriy", "inquiry", - "insaney", "insanely", - "inscets", "insects", - "insepct", "inspect", - "insipre", "inspire", - "insluts", "insults", - "instade", "instead", - "instint", "instinct", - "intenst", "intents", - "intered", "interred", - "interet", "interest", - "internt", "internet", - "interro", "interior", - "intrest", "interest", - "intrige", "intrigue", - "invlove", "involve", - "invoekr", "invoker", - "invovle", "involve", - "iornman", "ironman", - "iranain", "iranian", - "iranias", "iranians", - "iranina", "iranian", - "irleand", "ireland", - "ironamn", "ironman", - "isalmic", "islamic", - "isareli", "israeli", - "islamit", "islamist", - "islmaic", "islamic", - "isloate", "isolate", - "isralei", "israeli", - "isreali", "israeli", - "italias", "italians", - "jagaurs", "jaguars", - "jaguras", "jaguars", - "jamacia", "jamaica", - "jamaina", "jamaican", - "jamiaca", "jamaica", - "jamsine", "jasmine", - "janaury", "january", - "januray", "january", - "japanes", "japanese", - "jasmien", "jasmine", - "jaugars", "jaguars", - "jaunary", "january", - "jeircho", "jericho", - "jennins", "jennings", - "jeopary", "jeopardy", - "jeresys", "jerseys", - "jericoh", "jericho", - "jersyes", "jerseys", - "jewerly", "jewelry", - "jorunal", "journal", - "jounral", "journal", - "joystik", "joystick", - "juadism", "judaism", - "judasim", "judaism", - "judical", "judicial", - "juipter", "jupiter", - "junglig", "jungling", - "juptier", "jupiter", - "jusitfy", "justify", - "justfiy", "justify", - "karakoe", "karaoke", - "karoake", "karaoke", - "kenendy", "kennedy", - "kenndey", "kennedy", - "kentucy", "kentucky", - "keyboad", "keyboard", - "keychan", "keychain", - "keynode", "keynote", - "kicthen", "kitchen", - "killins", "killings", - "kineitc", "kinetic", - "kinghts", "knights", - "kinteic", "kinetic", - "kitches", "kitchens", - "kitites", "kitties", - "knietic", "kinetic", - "knigths", "knights", - "knuckel", "knuckle", - "kroeans", "koreans", - "krudish", "kurdish", - "ktichen", "kitchen", - "kubirck", "kubrick", - "kunckle", "knuckle", - "kurbick", "kubrick", - "kuridsh", "kurdish", - "laguage", "language", - "landins", "landings", - "lantren", "lantern", - "laready", "already", - "laregly", "largely", - "largley", "largely", - "lasanga", "lasagna", - "lasgana", "lasagna", - "latitue", "latitude", - "latnern", "lantern", - "launhed", "launched", - "lavendr", "lavender", - "leathal", "lethal", - "lefitst", "leftist", - "leftits", "leftist", - "legnths", "lengths", - "legnthy", "lengthy", - "legoins", "legions", - "leigons", "legions", - "lenghts", "lengths", - "lenoard", "leonard", - "lepoard", "leopard", - "lesbain", "lesbian", - "lesiban", "lesbian", - "lesiure", "leisure", - "liasion", "liaison", - "liasons", "liaisons", - "liberae", "liberate", - "liberas", "liberals", - "lienups", "lineups", - "liesure", "leisure", - "liftime", "lifetime", - "lighlty", "lightly", - "lightes", "lighters", - "ligthly", "lightly", - "linclon", "lincoln", - "linueps", "lineups", - "liqiuds", "liquids", - "lisence", "license", - "lisense", "license", - "listend", "listened", - "litecon", "litecoin", - "literae", "literate", - "lithuim", "lithium", - "litihum", "lithium", - "loadous", "loadouts", - "loenard", "leonard", - "loepard", "leopard", - "logiteh", "logitech", - "loosley", "loosely", - "luandry", "laundry", - "luckliy", "luckily", - "luicfer", "lucifer", - "lunatis", "lunatics", - "maching", "machine", - "machins", "machines", - "maclolm", "malcolm", - "macthup", "matchup", - "madsion", "madison", - "magents", "magnets", - "magicin", "magician", - "magolia", "magnolia", - "maidson", "madison", - "maintan", "maintain", - "mairlyn", "marilyn", - "malaira", "malaria", - "malaysa", "malaysia", - "malclom", "malcolm", - "manauls", "manuals", - "mandase", "mandates", - "mandats", "mandates", - "mangeld", "mangled", - "mangets", "magnets", - "manualy", "manually", - "manuver", "maneuver", - "marbels", "marbles", - "margart", "margaret", - "mariage", "marriage", - "mariens", "marines", - "maritan", "martian", - "marixsm", "marxism", - "mariyln", "marilyn", - "markede", "marketed", - "marlbes", "marbles", - "marliyn", "marilyn", - "marnies", "marines", - "marrage", "marriage", - "martail", "martial", - "martain", "martian", - "masacra", "mascara", - "massace", "massacre", - "mathcup", "matchup", - "mathwes", "mathews", - "matrial", "martial", - "maunals", "manuals", - "mcalren", "mclaren", - "meanins", "meanings", - "medicad", "medicaid", - "medicae", "medicare", - "medioce", "mediocre", - "meixcan", "mexican", - "meldoic", "melodic", - "melieux", "milieux", - "melodis", "melodies", - "memeber", "member", - "memoery", "memory", - "memorie", "memory", - "menally", "mentally", - "mentaly", "mentally", - "meoldic", "melodic", - "meranda", "veranda", - "merchat", "merchant", - "merucry", "mercury", - "messagd", "messaged", - "messaih", "messiah", - "metagem", "metagame", - "metalic", "metallic", - "mexcian", "mexican", - "michina", "michigan", - "midfied", "midfield", - "midotwn", "midtown", - "midtwon", "midtown", - "migrans", "migrants", - "militat", "militant", - "militis", "militias", - "miltary", "military", - "mimimum", "minimum", - "mineras", "minerals", - "mininos", "minions", - "ministr", "minister", - "ministy", "ministry", - "minoins", "minions", - "minstry", "ministry", - "minumum", "minimum", - "mirrord", "mirrored", - "misandy", "misandry", - "misison", "mission", - "misouri", "missouri", - "mispell", "misspell", - "missils", "missiles", - "mistery", "mystery", - "mobiliy", "mobility", - "modualr", "modular", - "momento", "memento", - "momment", "moment", - "monarcy", "monarchy", - "monatge", "montage", - "monglos", "mongols", - "monitos", "monitors", - "monstre", "monster", - "montaeg", "montage", - "montrel", "montreal", - "monumet", "monument", - "morbidy", "morbidly", - "morgage", "mortgage", - "morphen", "morphine", - "morphie", "morphine", - "morroco", "morocco", - "mortage", "mortgage", - "mosnter", "monster", - "mosture", "moisture", - "motivet", "motivate", - "motnage", "montage", - "motoral", "motorola", - "mountan", "mountain", - "movment", "movement", - "mucuous", "mucous", - "muesums", "museums", - "muliple", "multiple", - "mulsims", "muslims", - "multipe", "multiple", - "multipy", "multiply", - "munbers", "numbers", - "munchis", "munchies", - "murderd", "murdered", - "muscial", "musical", - "mushrom", "mushroom", - "musilms", "muslims", - "muslces", "muscles", - "musuems", "museums", - "mutatin", "mutation", - "mypsace", "myspace", - "mysapce", "myspace", - "napolen", "napoleon", - "narhwal", "narwhal", - "natique", "antique", - "nativey", "natively", - "natrual", "natural", - "naugthy", "naughty", - "nauseos", "nauseous", - "nautils", "nautilus", - "nautral", "natural", - "nautres", "natures", - "nectode", "netcode", - "needels", "needles", - "neruons", "neurons", - "neslave", "enslave", - "netocde", "netcode", - "netowrk", "network", - "netural", "neutral", - "neturon", "neutron", - "netwrok", "network", - "neurton", "neutron", - "neuterd", "neutered", - "nighlty", "nightly", - "nigthly", "nightly", - "nihilim", "nihilism", - "ninties", "1990s", - "niverse", "inverse", - "nocture", "nocturne", - "nominae", "nominate", - "nominet", "nominate", - "nonsene", "nonsense", - "noramls", "normals", - "norhern", "northern", - "normaly", "normally", - "normany", "normandy", - "northen", "northern", - "nostris", "nostrils", - "notario", "ontario", - "notebok", "notebook", - "nothern", "northern", - "nowdays", "nowadays", - "nrivana", "nirvana", - "nuaghty", "naughty", - "nubmers", "numbers", - "nucelar", "nuclear", - "nucelus", "nucleus", - "nuclean", "unclean", - "nuclues", "nucleus", - "nucular", "nuclear", - "nuerons", "neurons", - "nuetral", "neutral", - "nuetron", "neutron", - "nulcear", "nuclear", - "nullfiy", "nullify", - "nusance", "nuisance", - "nutriet", "nutrient", - "oarcles", "oracles", - "obivous", "obvious", - "obvoius", "obvious", - "ocarnia", "ocarina", - "ocasion", "occasion", - "occured", "occurred", - "ocotber", "october", - "ocotpus", "octopus", - "ocraina", "ocarina", - "ocuntry", "country", - "ocurred", "occurred", - "ofcoure", "ofcourse", - "offcers", "officers", - "offical", "official", - "offisde", "offside", - "oftenly", "often", - "ogrilla", "gorilla", - "olmypic", "olympic", - "olreans", "orleans", - "olympis", "olympics", - "olypmic", "olympic", - "omision", "omission", - "omiting", "omitting", - "omlette", "omelette", - "ommited", "omitted", - "onatrio", "ontario", - "onbaord", "onboard", - "onborad", "onboard", - "ontairo", "ontario", - "ontraio", "ontario", - "opartor", "operator", - "openess", "openness", - "opitcal", "optical", - "opitmal", "optimal", - "oponent", "opponent", - "oposite", "opposite", - "oppenly", "openly", - "opponet", "opponent", - "oprhans", "orphans", - "optimim", "optimism", - "oracels", "oracles", - "oragnes", "oranges", - "oragsms", "orgasms", - "oralces", "oracles", - "orbtial", "orbital", - "orcales", "oracles", - "orelans", "orleans", - "organes", "organise", - "organie", "organise", - "organim", "organism", - "orginal", "original", - "orhpans", "orphans", - "oribtal", "orbital", - "orlenas", "orleans", - "orpahns", "orphans", - "orthodx", "orthodox", - "outfied", "outfield", - "outsidr", "outsider", - "overhal", "overhaul", - "overpad", "overpaid", - "oversue", "overuse", - "overtun", "overturn", - "ownders", "wonders", - "owuldve", "wouldve", - "oylmpic", "olympic", - "pacakge", "package", - "pacifit", "pacifist", - "packade", "packaged", - "pacthes", "patches", - "pahntom", "phantom", - "paitent", "patient", - "palcebo", "placebo", - "pallete", "palette", - "palster", "plaster", - "palyboy", "playboy", - "pamflet", "pamphlet", - "pamplet", "pamphlet", - "pancaks", "pancakes", - "pandroa", "pandora", - "panthen", "pantheon", - "paradim", "paradigm", - "paradse", "parades", - "paralel", "parallel", - "paranoa", "paranoia", - "parises", "praises", - "parites", "parties", - "partice", "particle", - "partick", "patrick", - "partiel", "particle", - "partiot", "patriot", - "partols", "patrols", - "passabe", "passable", - "passivs", "passives", - "pasuing", "pausing", - "pateint", "patient", - "pathces", "patches", - "patiens", "patients", - "patirot", "patriot", - "patrcik", "patrick", - "patrios", "patriots", - "patroit", "patriot", - "peaples", "peoples", - "pebbels", "pebbles", - "peirced", "pierced", - "penatly", "penalty", - "pendulm", "pendulum", - "penguis", "penguins", - "penicls", "pencils", - "penison", "pension", - "penisse", "penises", - "penitum", "pentium", - "pensies", "penises", - "pensino", "pension", - "pentuim", "pentium", - "peopels", "peoples", - "percise", "precise", - "perdict", "predict", - "perfers", "prefers", - "perhasp", "perhaps", - "perhpas", "perhaps", - "perisan", "persian", - "perjery", "perjury", - "permade", "premade", - "permier", "premier", - "permise", "premise", - "permium", "premium", - "peroids", "periods", - "peronal", "personal", - "perpaid", "prepaid", - "perphas", "perhaps", - "persain", "persian", - "persets", "presets", - "persits", "persist", - "persued", "pursued", - "persuit", "pursuit", - "pervail", "prevail", - "perview", "preview", - "pharoah", "pharaoh", - "phatnom", "phantom", - "phsyics", "physics", - "phyiscs", "physics", - "physcis", "physics", - "physiqe", "physique", - "picthed", "pitched", - "picther", "pitcher", - "picthes", "pitches", - "piegons", "pigeons", - "piglrim", "pilgrim", - "pigoens", "pigeons", - "pilgirm", "pilgrim", - "pilrgim", "pilgrim", - "pinoeer", "pioneer", - "pinpoit", "pinpoint", - "pionere", "pioneer", - "pireced", "pierced", - "pithces", "pitches", - "plantes", "planets", - "plastis", "plastics", - "plastre", "plaster", - "plataeu", "plateau", - "plateua", "plateau", - "playabe", "playable", - "playofs", "playoffs", - "plesant", "pleasant", - "pligrim", "pilgrim", - "ploygon", "polygon", - "ploymer", "polymer", - "podemso", "podemos", - "podmeos", "podemos", - "poeples", "peoples", - "poignat", "poignant", - "poineer", "pioneer", - "pointes", "pointers", - "poisond", "poisoned", - "polgyon", "polygon", - "polical", "political", - "polishs", "polishes", - "polisse", "polishes", - "politey", "politely", - "poluted", "polluted", - "polutes", "pollutes", - "popluar", "popular", - "populer", "popular", - "populos", "populous", - "porpose", "propose", - "porshan", "portion", - "porshon", "portion", - "portait", "portrait", - "portary", "portray", - "portras", "portrays", - "portrat", "portrait", - "posions", "poisons", - "positon", "position", - "positve", "positive", - "possibe", "possible", - "possiby", "possibly", - "postdam", "potsdam", - "postion", "position", - "postive", "positive", - "potatos", "potatoes", - "potical", "optical", - "potrait", "portrait", - "powderd", "powdered", - "poweful", "powerful", - "poylgon", "polygon", - "poylmer", "polymer", - "practie", "practise", - "praisse", "praises", - "praries", "prairies", - "prasied", "praised", - "prasies", "praises", - "pratice", "practice", - "preamde", "premade", - "preceed", "precede", - "precice", "precise", - "preests", "presets", - "prehaps", "perhaps", - "preimer", "premier", - "preimum", "premium", - "preists", "priests", - "preivew", "preview", - "premeir", "premier", - "premiee", "premiere", - "premire", "premier", - "premits", "permits", - "premius", "premiums", - "premuim", "premium", - "prepair", "prepare", - "preriod", "period", - "presens", "presents", - "presest", "presets", - "presist", "persist", - "prestes", "presets", - "presude", "presumed", - "pretene", "pretense", - "pretens", "pretends", - "preveiw", "preview", - "prevert", "pervert", - "previal", "prevail", - "previes", "previews", - "previos", "previous", - "priased", "praised", - "priases", "praises", - "printes", "printers", - "pristen", "pristine", - "probabe", "probable", - "probaly", "probably", - "probelm", "problem", - "procede", "proceed", - "procees", "proceeds", - "procesd", "proceeds", - "proclam", "proclaim", - "produly", "proudly", - "produse", "produces", - "progidy", "prodigy", - "progrom", "pogrom", - "prohibt", "prohibit", - "prohpet", "prophet", - "prologe", "prologue", - "promose", "promotes", - "promots", "promotes", - "prompty", "promptly", - "promtps", "prompts", - "pronous", "pronouns", - "prooved", "proved", - "propeht", "prophet", - "prophey", "prophecy", - "propper", "proper", - "protals", "portals", - "protecs", "protects", - "protess", "protests", - "protocl", "protocol", - "protray", "portray", - "prouldy", "proudly", - "provded", "provided", - "provine", "province", - "prusuit", "pursuit", - "pryamid", "pyramid", - "pscyhed", "psyched", - "ptiched", "pitched", - "pticher", "pitcher", - "puasing", "pausing", - "publicy", "publicly", - "publsih", "publish", - "puhsups", "pushups", - "punishs", "punishes", - "punisse", "punishes", - "pursiut", "pursuit", - "pursude", "pursued", - "purused", "pursued", - "pushpus", "pushups", - "pyarmid", "pyramid", - "pyramis", "pyramids", - "pyrmaid", "pyramid", - "pysched", "psyched", - "qaulify", "qualify", - "qaulity", "quality", - "qauntum", "quantum", - "quailfy", "qualify", - "quailty", "quality", - "queires", "queries", - "queitly", "quietly", - "quereis", "queries", - "quicket", "quickest", - "quielty", "quietly", - "quitely", "quietly", - "qunatum", "quantum", - "qunetin", "quentin", - "racisst", "racists", - "racthet", "ratchet", - "radaint", "radiant", - "radiane", "radiance", - "radicas", "radicals", - "radiers", "raiders", - "raelism", "realism", - "raidant", "radiant", - "railrod", "railroad", - "rainbos", "rainbows", - "raoches", "roaches", - "raoming", "roaming", - "raptros", "raptors", - "raputre", "rapture", - "rathcet", "ratchet", - "ratpure", "rapture", - "reacing", "reaching", - "reagrds", "regards", - "realies", "realise", - "realsie", "realise", - "realsim", "realism", - "realtes", "relates", - "reamins", "remains", - "reapirs", "repairs", - "rebouns", "rebounds", - "rebulit", "rebuilt", - "recalim", "reclaim", - "receips", "receipts", - "recided", "resided", - "reciept", "receipt", - "recievd", "recieved", - "recieve", "receive", - "recitfy", "rectify", - "recived", "received", - "reclami", "reclaim", - "recliam", "reclaim", - "recorre", "recorder", - "recoves", "recovers", - "recpies", "recipes", - "redeemd", "redeemed", - "redners", "renders", - "refelct", "reflect", - "referal", "referral", - "refered", "referred", - "referig", "refering", - "referrs", "refers", - "reflexs", "reflexes", - "refrers", "refers", - "refroms", "reforms", - "refusla", "refusal", - "regerts", "regrets", - "regiems", "regimes", - "regimet", "regiment", - "registy", "registry", - "regluar", "regular", - "regrest", "regrets", - "regulae", "regulate", - "regulas", "regulars", - "regulsr", "regulars", - "reigmes", "regimes", - "reigons", "regions", - "reitres", "retires", - "reivews", "reviews", - "reknown", "renown", - "relaise", "realise", - "relapes", "relapse", - "relaspe", "relapse", - "relatie", "relative", - "relatin", "relation", - "relcaim", "reclaim", - "releive", "relieve", - "releses", "releases", - "relfect", "reflect", - "reliabe", "reliable", - "relient", "reliant", - "relized", "realised", - "relpase", "relapse", - "remaind", "remained", - "remaing", "remaining", - "remakrs", "remarks", - "remannt", "remnant", - "remeber", "remember", - "remians", "remains", - "remnans", "remnants", - "renderd", "rendered", - "renegae", "renegade", - "renmant", "remnant", - "rentors", "renters", - "rentres", "renters", - "renuion", "reunion", - "repaird", "repaired", - "repalys", "replays", - "repblic", "republic", - "repeast", "repeats", - "repects", "respects", - "repitle", "reptile", - "replase", "replaces", - "replayd", "replayed", - "reponse", "response", - "repostd", "reposted", - "repsawn", "respawn", - "repsond", "respond", - "repsots", "reposts", - "reptiel", "reptile", - "reptils", "reptiles", - "repubic", "republic", - "republi", "republic", - "repulic", "republic", - "reqiuem", "requiem", - "requeim", "requiem", - "requime", "requiem", - "requred", "required", - "resapwn", "respawn", - "rescuse", "rescues", - "resembe", "resemble", - "reslove", "resolve", - "resolvs", "resolves", - "resonet", "resonate", - "resouce", "resource", - "resovle", "resolve", - "respest", "respects", - "respone", "response", - "respwan", "respawn", - "ressits", "resists", - "restord", "restored", - "resuced", "rescued", - "resuces", "rescues", - "retrive", "retrieve", - "returnd", "returned", - "reuinon", "reunion", - "reveald", "revealed", - "reveiws", "reviews", - "revelas", "reveals", - "reveral", "reversal", - "reviere", "reviewer", - "reviewd", "reviewed", - "reviewr", "reviewer", - "revolvr", "revolver", - "revolvs", "revolves", - "rewirte", "rewrite", - "reworkd", "reworked", - "rewriet", "rewrite", - "reynols", "reynolds", - "rhapsoy", "rhapsody", - "rhythem", "rhythm", - "rhythim", "rhythm", - "rhytmic", "rhythmic", - "riaders", "raiders", - "ritlain", "ritalin", - "ritoers", "rioters", - "rivarly", "rivalry", - "rivlary", "rivalry", - "roahces", "roaches", - "robotis", "robotics", - "rococco", "rococo", - "roestta", "rosetta", - "roiters", "rioters", - "roleply", "roleplay", - "romaina", "romania", - "romaing", "roaming", - "romanin", "romanian", - "romanna", "romanian", - "roomate", "roommate", - "rotuers", "routers", - "rugters", "rutgers", - "rulebok", "rulebook", - "rumorus", "rumours", - "rumuors", "rumours", - "runnung", "running", - "ruslted", "rustled", - "russina", "russian", - "russion", "russian", - "rusteld", "rustled", - "rythmic", "rhythmic", - "rythyms", "rhythms", - "sacrasm", "sarcasm", - "saddnes", "saddens", - "sadistc", "sadistic", - "sadning", "sanding", - "salaris", "salaries", - "salavge", "salvage", - "salvery", "slavery", - "salying", "slaying", - "sampels", "samples", - "samruai", "samurai", - "samuari", "samurai", - "samuria", "samurai", - "sandlas", "sandals", - "sandnig", "sanding", - "sanlder", "sandler", - "santorm", "santorum", - "sapphie", "sapphire", - "sarcams", "sarcasm", - "sargant", "sergeant", - "sasuage", "sausage", - "satifsy", "satisfy", - "satsify", "satisfy", - "satsohi", "satoshi", - "savanha", "savannah", - "savannh", "savannah", - "saveing", "saving", - "sawnsea", "swansea", - "sawnson", "swanson", - "scandas", "scandals", - "scannig", "scanning", - "scartch", "scratch", - "scheems", "schemes", - "schoold", "schooled", - "sciense", "sciences", - "scinece", "science", - "scootes", "scooters", - "scorpin", "scorpion", - "scpeter", "scepter", - "scracth", "scratch", - "scrambe", "scramble", - "scritps", "scripts", - "scrolld", "scrolled", - "scrpits", "scripts", - "scyhter", "scyther", - "seached", "searched", - "seaches", "searches", - "seahaws", "seahawks", - "seantor", "senator", - "searchd", "searched", - "searchs", "searches", - "sebrian", "serbian", - "secerts", "secrets", - "secpter", "scepter", - "secrest", "secrets", - "secrety", "secretly", - "seflies", "selfies", - "seguoys", "segues", - "seinors", "seniors", - "selifes", "selfies", - "senoirs", "seniors", - "sensure", "censure", - "sentaor", "senator", - "sentris", "sentries", - "serbain", "serbian", - "sergeat", "sergeant", - "sergent", "sergeant", - "seriban", "serbian", - "servans", "servants", - "sesnors", "sensors", - "settins", "settings", - "severly", "severely", - "sexualy", "sexually", - "seziure", "seizure", - "shaddow", "shadow", - "shanghi", "shanghai", - "shaprie", "sharpie", - "shaprly", "sharply", - "sharipe", "sharpie", - "shcemes", "schemes", - "sheelpe", "sheeple", - "sheepel", "sheeple", - "shephed", "shepherd", - "sherlok", "sherlock", - "shetler", "shelter", - "shevles", "shelves", - "shfiter", "shifter", - "shieldd", "shielded", - "shiping", "shipping", - "shirely", "shirley", - "shitfer", "shifter", - "shledon", "sheldon", - "shleter", "shelter", - "shoudln", "should", - "shouldt", "shouldnt", - "shoutot", "shoutout", - "showede", "showered", - "showerd", "showered", - "shperes", "spheres", - "shriley", "shirley", - "siblins", "siblings", - "sidelen", "sideline", - "sideral", "sidereal", - "siezing", "seizing", - "siezure", "seizure", - "signfiy", "signify", - "signins", "signings", - "signles", "singles", - "silders", "sliders", - "silenty", "silently", - "similir", "similiar", - "simliar", "similar", - "simplet", "simplest", - "simpley", "simply", - "simplfy", "simplify", - "simpliy", "simplify", - "simposn", "simpson", - "simspon", "simpson", - "singals", "signals", - "singels", "singles", - "singify", "signify", - "singsog", "singsong", - "sitmuli", "stimuli", - "skecthy", "sketchy", - "skeletl", "skeletal", - "skeptis", "skeptics", - "sketchs", "sketches", - "sketpic", "skeptic", - "skpetic", "skeptic", - "sktechy", "sketchy", - "skwyard", "skyward", - "slavage", "salvage", - "slayign", "slaying", - "sldiers", "sliders", - "slefies", "selfies", - "slighly", "slightly", - "slighty", "slightly", - "slippes", "slippers", - "slippey", "slippery", - "smaples", "samples", - "smartre", "smarter", - "smaurai", "samurai", - "snadler", "sandler", - "snigles", "singles", - "snippes", "snippets", - "snodwen", "snowden", - "snwoden", "snowden", - "snycing", "syncing", - "snyergy", "synergy", - "socialy", "socially", - "sofware", "software", - "soildly", "solidly", - "soldies", "soldiers", - "soldily", "solidly", - "somaila", "somalia", - "someons", "someones", - "somethn", "somethin", - "southen", "southern", - "soveits", "soviets", - "spacebr", "spacebar", - "spainsh", "spanish", - "spansih", "spanish", - "spanwed", "spawned", - "sparkel", "sparkle", - "spartas", "spartans", - "spartsn", "spartans", - "sparyed", "sprayed", - "spawend", "spawned", - "spawnig", "spawning", - "specail", "special", - "specfic", "specific", - "specias", "specials", - "specisl", "specials", - "spectum", "spectrum", - "speechs", "speeches", - "spehres", "spheres", - "speical", "special", - "speices", "species", - "spellig", "spelling", - "spindel", "spindle", - "spiritd", "spirited", - "splaton", "splatoon", - "splittr", "splitter", - "spoiles", "spoilers", - "spoitfy", "spotify", - "spolied", "spoiled", - "sponser", "sponsor", - "sporles", "sproles", - "sporuts", "sprouts", - "spotfiy", "spotify", - "sprinke", "sprinkle", - "sproels", "sproles", - "spwaned", "spawned", - "sqaures", "squares", - "sqeuaky", "squeaky", - "sqiushy", "squishy", - "squarey", "squarely", - "squirel", "squirtle", - "squirle", "squirrel", - "squirrl", "squirrel", - "squirte", "squirtle", - "squsihy", "squishy", - "sriraca", "sriracha", - "srpouts", "sprouts", - "sryians", "syrians", - "sryinge", "syringe", - "stadius", "stadiums", - "staduim", "stadium", - "stagnat", "stagnant", - "staidum", "stadium", - "stakler", "stalker", - "stalkes", "stalkers", - "stamnia", "stamina", - "staoshi", "satoshi", - "starins", "strains", - "startde", "startled", - "startus", "startups", - "statits", "statist", - "statsit", "statist", - "statuer", "stature", - "statuse", "statutes", - "statuts", "statutes", - "stautes", "statues", - "stealty", "stealthy", - "steeles", "steelers", - "steorid", "steroid", - "steriel", "sterile", - "sterlie", "sterile", - "stickes", "stickers", - "stiring", "stirring", - "stirker", "striker", - "stirrig", "stirring", - "stitchs", "stitches", - "stlaker", "stalker", - "stlyish", "stylish", - "storeis", "stories", - "storise", "stories", - "stormde", "stormed", - "straigt", "straight", - "straind", "strained", - "streamd", "streamed", - "stregth", "strength", - "strengh", "strength", - "streoid", "steroid", - "stresss", "stresses", - "strians", "strains", - "stricty", "strictly", - "striekr", "striker", - "stromed", "stormed", - "stubbon", "stubborn", - "studing", "studying", - "stuidos", "studios", - "stunami", "tsunami", - "stupidr", "stupider", - "stupidy", "stupidly", - "stupire", "stupider", - "suasage", "sausage", - "subisdy", "subsidy", - "subjest", "subjects", - "subtiel", "subtitle", - "succede", "succeed", - "succeds", "succeeds", - "succees", "succeeds", - "succesd", "succeeds", - "suceeds", "succeeds", - "suddeny", "suddenly", - "suefull", "usefull", - "sufferd", "suffered", - "summonr", "summoner", - "summore", "summoner", - "sunggle", "snuggle", - "sunifre", "sunfire", - "superme", "supreme", - "suposed", "supposed", - "suposes", "supposes", - "suppoed", "supposed", - "suppost", "supports", - "suprass", "surpass", - "supress", "suppress", - "suprisd", "suprised", - "suprise", "surprise", - "suprize", "surprise", - "supsend", "suspend", - "suround", "surround", - "surpeme", "supreme", - "surroud", "surround", - "sweidsh", "swedish", - "swiflty", "swiftly", - "swiming", "swimming", - "switchs", "switches", - "switfly", "swiftly", - "swnasea", "swansea", - "sycning", "syncing", - "sycther", "scyther", - "syirans", "syrians", - "sykward", "skyward", - "syllabe", "syllable", - "symetry", "symmetry", - "symmety", "symmetry", - "symobls", "symbols", - "sympaty", "sympathy", - "symtpom", "symptom", - "synegry", "synergy", - "synoynm", "synonym", - "sypmtom", "symptom", - "syracue", "syracuse", - "syrains", "syrians", - "sysadmn", "sysadmin", - "systemc", "systemic", - "sytlish", "stylish", - "tabacco", "tobacco", - "tailban", "taliban", - "tailord", "tailored", - "talbian", "taliban", - "tallets", "tallest", - "tangeld", "tangled", - "tanlged", "tangled", - "targetd", "targeted", - "taryvon", "trayvon", - "teached", "taught", - "teaspon", "teaspoon", - "techeis", "techies", - "tehcies", "techies", - "temepst", "tempest", - "tempels", "temples", - "tempets", "tempest", - "templas", "templars", - "tempset", "tempest", - "tenacle", "tentacle", - "tendacy", "tendency", - "tequlia", "tequila", - "tesitfy", "testify", - "testice", "testicle", - "teusday", "tuesday", - "thankyu", "thankyou", - "thearpy", "therapy", - "theistc", "theistic", - "theives", "thieves", - "themsef", "themself", - "therefo", "thereof", - "therien", "therein", - "theroem", "theorem", - "thesits", "theists", - "thiests", "theists", - "thirldy", "thirdly", - "thirten", "thirteen", - "thirtsy", "thirsty", - "thoerem", "theorem", - "thorats", "throats", - "thornes", "thrones", - "thoruim", "thorium", - "thoughs", "thoughts", - "threadd", "threaded", - "threeof", "thereof", - "thridly", "thirdly", - "thristy", "thirsty", - "throast", "throats", - "throium", "thorium", - "thryoid", "thyroid", - "thyorid", "thyroid", - "thyriod", "thyroid", - "tigther", "tighter", - "tiolets", "toilets", - "tirdent", "trident", - "titanim", "titanium", - "tlaking", "talking", - "tobbaco", "tobacco", - "toliets", "toilets", - "tolkein", "tolkien", - "tomatos", "tomatoes", - "tongiht", "tonight", - "tonuges", "tongues", - "toppins", "toppings", - "torando", "tornado", - "torndao", "tornado", - "torpdeo", "torpedo", - "torrest", "torrents", - "tortila", "tortilla", - "toruney", "tourney", - "toubles", "troubles", - "touchda", "touchpad", - "tounrey", "tourney", - "tourisy", "touristy", - "tourits", "tourist", - "tournes", "tourneys", - "toursim", "tourism", - "toursit", "tourist", - "towords", "towards", - "trackes", "trackers", - "trailes", "trailers", - "traines", "trainers", - "trainig", "training", - "tralier", "trailer", - "tratior", "traitor", - "traveld", "traveled", - "travere", "traverse", - "travesy", "travesty", - "travles", "travels", - "treasue", "treasure", - "treatis", "treaties", - "tremelo", "tremolo", - "trendig", "trending", - "trialer", "trailer", - "triange", "triangle", - "triator", "traitor", - "trickey", "trickery", - "tridnet", "trident", - "trimuph", "triumph", - "trinkes", "trinkets", - "trinkst", "trinkets", - "trintiy", "trinity", - "triolgy", "trilogy", - "troleld", "trolled", - "troling", "trolling", - "tronado", "tornado", - "tropedo", "torpedo", - "trudnle", "trundle", - "truimph", "triumph", - "trukish", "turkish", - "trundel", "trundle", - "trunlde", "trundle", - "tryahrd", "tryhard", - "tryavon", "trayvon", - "tsamina", "stamina", - "tsnuami", "tsunami", - "tsuanmi", "tsunami", - "tsunmai", "tsunami", - "tuesdsy", "tuesdays", - "tunnles", "tunnels", - "turbins", "turbines", - "turksih", "turkish", - "turltes", "turtles", - "turrest", "turrets", - "turtels", "turtles", - "tuseday", "tuesday", - "tusnami", "tsunami", - "tutrles", "turtles", - "twiligt", "twilight", - "tyelnol", "tylenol", - "typcial", "typical", - "tyrhard", "tryhard", - "tyrrany", "tyranny", - "udpated", "updated", - "uesfull", "usefull", - "ugprade", "upgrade", - "ukarine", "ukraine", - "ukranie", "ukraine", - "ukriane", "ukraine", - "ultimae", "ultimate", - "umbrela", "umbrella", - "unahppy", "unhappy", - "unbannd", "unbanned", - "underog", "undergo", - "unfairy", "unfairly", - "ungoldy", "ungodly", - "unicors", "unicorns", - "uniquey", "uniquely", - "unknwon", "unknown", - "unkonwn", "unknown", - "unlcean", "unclean", - "unlcoks", "unlocks", - "unlcuky", "unlucky", - "unlikey", "unlikely", - "unopend", "unopened", - "unprone", "unproven", - "unusabe", "unusable", - "unworty", "unworthy", - "upgarde", "upgrade", - "upgrads", "upgrades", - "uplaods", "uploads", - "upsteam", "upstream", - "urainum", "uranium", - "uranuim", "uranium", - "uretrha", "urethra", - "urkaine", "ukraine", - "urnaium", "uranium", - "urugauy", "uruguay", - "usefull", "useful", - "usefuly", "usefully", - "utiltiy", "utility", - "utopain", "utopian", - "utpoian", "utopian", - "vaccins", "vaccines", - "vaccume", "vacuum", - "vageuly", "vaguely", - "vaguley", "vaguely", - "vairant", "variant", - "valenca", "valencia", - "valetta", "valletta", - "valkyre", "valkyrie", - "valuabe", "valuable", - "valuble", "valuable", - "vampirs", "vampires", - "vanguad", "vanguard", - "varaint", "variant", - "vareity", "variety", - "varians", "variants", - "varient", "variant", - "varisty", "varsity", - "varitey", "variety", - "varstiy", "varsity", - "vasalls", "vassals", - "vasslas", "vassals", - "vaugely", "vaguely", - "vecotrs", "vectors", - "vectros", "vectors", - "veitnam", "vietnam", - "veiwers", "viewers", - "vendeta", "vendetta", - "verbaly", "verbally", - "verical", "vertical", - "verious", "various", - "verison", "version", - "veritgo", "vertigo", - "versoin", "version", - "vertgio", "vertigo", - "vessles", "vessels", - "vetween", "between", - "viatmin", "vitamin", - "vibratr", "vibrator", - "vicitms", "victims", - "vientam", "vietnam", - "vigrins", "virgins", - "vikigns", "vikings", - "villian", "villain", - "villify", "vilify", - "virbate", "vibrate", - "virigns", "virgins", - "virtiol", "vitriol", - "virutal", "virtual", - "virutes", "virtues", - "visable", "visible", - "visably", "visibly", - "visbily", "visibly", - "visting", "visiting", - "vistors", "visitors", - "vitaliy", "vitality", - "vitamis", "vitamins", - "vitenam", "vietnam", - "vitirol", "vitriol", - "vitmain", "vitamin", - "vitroil", "vitriol", - "vitrual", "virtual", - "vitrues", "virtues", - "volatge", "voltage", - "volumne", "volume", - "votlage", "voltage", - "vrigins", "virgins", - "waclott", "walcott", - "wacther", "watcher", - "waitres", "waiters", - "waktins", "watkins", - "warcrat", "warcraft", - "wardobe", "wardrobe", - "wariwck", "warwick", - "warrany", "warranty", - "warrent", "warrant", - "warrios", "warriors", - "warwcik", "warwick", - "wathcer", "watcher", - "watiers", "waiters", - "waviers", "waivers", - "wawrick", "warwick", - "wayword", "wayward", - "webapge", "webpage", - "webiste", "website", - "webstie", "website", - "weigths", "weights", - "weilded", "wielded", - "weirldy", "weirdly", - "weirods", "weirdos", - "welathy", "wealthy", - "wendsay", "wednesday", - "wensday", "wednesday", - "wepbage", "webpage", - "weridly", "weirdly", - "weridos", "weirdos", - "werstle", "wrestle", - "wesbite", "website", - "whaeton", "wheaton", - "whipser", "whisper", - "whislte", "whistle", - "whistel", "whistle", - "whitsle", "whistle", - "whsiper", "whisper", - "wiaters", "waiters", - "wiavers", "waivers", - "widgest", "widgets", - "wieghts", "weights", - "wigdets", "widgets", - "windosr", "windsor", - "winnins", "winnings", - "winsdor", "windsor", - "wintson", "winston", - "wirting", "writing", - "wisnton", "winston", - "withces", "witches", - "witheld", "withheld", - "withing", "within", - "withold", "withhold", - "wlacott", "walcott", - "wokring", "working", - "workins", "workings", - "woudlnt", "wouldnt", - "woudlve", "wouldve", - "woulndt", "wouldnt", - "wreslte", "wrestle", - "wroking", "working", - "wtiches", "witches", - "wupport", "support", - "yaching", "yachting", - "younget", "youngest", - "youseff", "yousef", - "youself", "yourself", - "zaelots", "zealots", - "zealtos", "zealots", - "zelaots", "zealots", - "zelaous", "zealous", - "zimbabe", "zimbabwe", - "zionsim", "zionism", - "zionsit", "zionist", - "zoinism", "zionism", - "zoinist", "zionist", - "abbout", "about", - "abilty", "ability", - "absail", "abseil", - "abutts", "abuts", - "achive", "achieve", - "acused", "accused", - "addopt", "adopt", - "addres", "address", - "adress", "address", - "aeriel", "aerial", - "affort", "afford", - "agains", "against", - "aginst", "against", - "ahppen", "happen", - "aiport", "airport", - "aisian", "asian", - "albiet", "albeit", - "alchol", "alcohol", - "aledge", "allege", - "aleged", "alleged", - "allign", "align", - "almsot", "almost", - "alomst", "almost", - "alowed", "allowed", - "alwasy", "always", - "alwyas", "always", - "amking", "making", - "ammend", "amend", - "amoung", "among", - "aplied", "applied", - "appart", "apart", - "aquire", "acquire", - "aready", "already", - "arised", "arose", - "arival", "arrival", - "arrary", "array", - "artice", "article", - "asetic", "ascetic", - "asside", "aside", - "attemp", "attempt", - "attemt", "attempt", - "auther", "author", - "awared", "awarded", - "bedore", "before", - "beeing", "being", - "befoer", "before", - "beggin", "begin", - "beleif", "belief", - "belive", "believe", - "beteen", "between", - "betwen", "between", - "beween", "between", - "bianry", "binary", - "boyant", "buoyant", - "broady", "broadly", - "buddah", "buddha", - "buring", "burying", - "carcas", "carcass", - "casion", "caisson", - "casued", "caused", - "casues", "causes", - "ceasar", "caesar", - "cencus", "census", - "censur", "censor", - "cheifs", "chiefs", - "circut", "circuit", - "clasic", "classic", - "coform", "conform", - "comany", "company", - "coucil", "council", - "curent", "current", - "densly", "densely", - "deside", "decide", - "devels", "delves", - "devide", "divide", - "dieing", "dying", - "divice", "device", - "doulbe", "double", - "dreasm", "dreams", - "duting", "during", - "ealier", "earlier", - "eearly", "early", - "efford", "effort", - "emited", "emitted", - "emnity", "enmity", - "enduce", "induce", - "enlish", "english", - "erally", "orally", - "eratic", "erratic", - "ethose", "those", - "exampt", "exempt", - "excact", "exact", - "excell", "excel", - "exerpt", "excerpt", - "exinct", "extinct", - "expell", "expel", - "expoch", "epoch", - "extint", "extinct", - "facist", "fascist", - "faught", "fought", - "finaly", "finally", - "forsaw", "foresaw", - "fougth", "fought", - "fourty", "forty", - "foward", "forward", - "freind", "friend", - "fromed", "formed", - "fufill", "fulfill", - "futher", "further", - "gardai", "gardaí", - "geting", "getting", - "ghandi", "gandhi", - "glight", "flight", - "gloabl", "global", - "godess", "goddess", - "guilia", "giulia", - "guilio", "giulio", - "habeus", "habeas", - "harras", "harass", - "hatian", "haitian", - "heared", "heard", - "hertzs", "hertz", - "hieght", "height", - "higest", "highest", - "higway", "highway", - "honory", "honorary", - "howver", "however", - "hstory", "history", - "hunman", "human", - "husban", "husband", - "hvaing", "having", - "illess", "illness", - "ilness", "illness", - "imagin", "imagine", - "imense", "immense", - "includ", "include", - "inital", "initial", - "interm", "interim", - "intial", "initial", - "invlid", "invalid", - "iunior", "junior", - "jaques", "jacques", - "jospeh", "joseph", - "jouney", "journey", - "klenex", "kleenex", - "labled", "labelled", - "largst", "largest", - "larrry", "larry", - "lefted", "left", - "lenght", "length", - "lerans", "learns", - "liason", "liaison", - "libary", "library", - "lieing", "lying", - "lieved", "lived", - "littel", "little", - "livley", "lively", - "lonley", "lonely", - "mailny", "mainly", - "markes", "marks", - "mileau", "milieu", - "milion", "million", - "millon", "million", - "misile", "missile", - "missen", "mizzen", - "missle", "missile", - "mkaing", "making", - "moderm", "modem", - "moreso", "more", - "mounth", "month", - "myraid", "myriad", - "naieve", "naive", - "nestin", "nesting", - "nineth", "ninth", - "noveau", "nouveau", - "occour", "occur", - "occurr", "occur", - "offred", "offered", - "omited", "omitted", - "ouevre", "oeuvre", - "oxigen", "oxygen", - "p0enis", "penis", - "packge", "package", - "peaple", "people", - "pensle", "pencil", - "peopel", "people", - "peotry", "poetry", - "perade", "parade", - "persan", "person", - "persue", "pursue", - "plateu", "plateau", - "poenis", "penis", - "poisin", "poison", - "polute", "pollute", - "posess", "possess", - "posion", "poison", - "prairy", "prairie", - "prarie", "prairie", - "preiod", "period", - "privte", "private", - "proces", "process", - "proove", "prove", - "psuedo", "pseudo", - "psyhic", "psychic", - "pucini", "puccini", - "pumkin", "pumpkin", - "puting", "putting", - "pyscic", "psychic", - "quizes", "quizzes", - "quuery", "query", - "racaus", "raucous", - "radify", "ratify", - "raelly", "really", - "reacll", "recall", - "realyl", "really", - "reched", "reached", - "recide", "reside", - "recrod", "record", - "refect", "reflect", - "relaly", "really", - "renewl", "renewal", - "retuns", "returns", - "reveiw", "review", - "rhymme", "rhyme", - "rigeur", "rigueur", - "rocord", "record", - "rougly", "roughly", - "runing", "running", - "rythem", "rhythm", - "rythim", "rhythm", - "saftey", "safety", - "salery", "salary", - "satisy", "satisfy", - "satric", "satiric", - "saught", "sought", - "scince", "science", - "scirpt", "script", - "seceed", "succeed", - "seinor", "senior", - "sepina", "subpoena", - "sevice", "service", - "shamen", "shaman", - "sheild", "shield", - "shiped", "shipped", - "shorly", "shortly", - "shoudl", "should", - "shreak", "shriek", - "siezed", "seized", - "sixtin", "sistine", - "skiped", "skipped", - "sneeks", "sneaks", - "somene", "someone", - "soruce", "source", - "soudns", "sounds", - "sourth", "south", - "speach", "speech", - "spects", "aspects", - "spoace", "space", - "sqaure", "square", - "staion", "station", - "stange", "strange", - "stilus", "stylus", - "stirrs", "stirs", - "stopry", "story", - "strnad", "strand", - "studdy", "study", - "suceed", "succeed", - "sucess", "success", - "sucide", "suicide", - "sumary", "summary", - "suport", "support", - "supose", "suppose", - "surfce", "surface", - "surley", "surly", - "swaers", "swears", - "swepth", "swept", - "talekd", "talked", - "theese", "these", - "therby", "thereby", - "thigns", "things", - "thigsn", "things", - "thikns", "thinks", - "thiunk", "think", - "thnigs", "things", - "threee", "three", - "tkaing", "taking", - "tounge", "tongue", - "tourch", "torch", - "towrad", "toward", - "trafic", "traffic", - "troups", "troupes", - "truely", "truly", - "twelth", "twelfth", - "tyrany", "tyranny", - "unabel", "unable", - "unkown", "unknown", - "unmont", "unmount", - "unmout", "unmount", - "untill", "until", - "usally", "usually", - "useage", "usage", - "useing", "using", - "usualy", "usually", - "vaccum", "vacuum", - "variey", "variety", - "varing", "varying", - "varity", "variety", - "vasall", "vassal", - "vigeur", "vigueur", - "villin", "villain", - "vreity", "variety", - "vriety", "variety", - "whants", "wants", - "wheras", "whereas", - "wheter", "whether", - "wholey", "wholly", - "whther", "whether", - "wnated", "wanted", - "writen", "written", - "yaerly", "yearly", - "yotube", "youtube", - "zeebra", "zebra", - "abotu", "about", - "adres", "address", - "afair", "affair", - "agian", "again", - "agina", "again", - "agred", "agreed", - "alege", "allege", - "alsot", "also", - "altho", "although", - "amung", "among", - "anual", "annual", - "aroud", "around", - "arund", "around", - "asign", "assign", - "assit", "assist", - "asume", "assume", - "atain", "attain", - "autor", "author", - "baout", "about", - "blaim", "blame", - "boaut", "bout", - "boook", "book", - "borke", "broke", - "breif", "brief", - "caost", "coast", - "casue", "cause", - "chasr", "chaser", - "cheif", "chief", - "chuch", "church", - "claer", "clear", - "clera", "clear", - "coudl", "could", - "crowm", "crown", - "deram", "dram", - "diety", "deity", - "doens", "does", - "doign", "doing", - "donig", "doing", - "drnik", "drink", - "durig", "during", - "earnt", "earned", - "eigth", "eighth", - "eiter", "either", - "emtpy", "empty", - "endig", "ending", - "eveyr", "every", - "exept", "except", - "eyars", "years", - "eyasr", "years", - "fiels", "fields", - "firts", "flirts", - "fleed", "fled", - "fomed", "formed", - "foucs", "focus", - "foudn", "found", - "fouth", "fourth", - "frome", "from", - "ganes", "games", - "gaurd", "guard", - "gerat", "great", - "gogin", "going", - "goign", "going", - "gonig", "going", - "graet", "great", - "greif", "grief", - "gropu", "group", - "guage", "gauge", - "hapen", "happen", - "herad", "heard", - "heroe", "hero", - "higer", "higher", - "housr", "hours", - "htere", "there", - "htikn", "think", - "hting", "thing", - "htink", "think", - "hwihc", "which", - "hwile", "while", - "hwole", "whole", - "idaes", "ideas", - "idesa", "ideas", - "ihaca", "ithaca", - "knwos", "knows", - "konws", "knows", - "lastr", "last", - "lavae", "larvae", - "layed", "laid", - "leage", "league", - "leanr", "lean", - "leran", "learn", - "levle", "level", - "lible", "libel", - "liekd", "liked", - "liuke", "like", - "lmits", "limits", - "lonly", "lonely", - "lukid", "likud", - "lybia", "libya", - "maked", "marked", - "makse", "makes", - "mamal", "mammal", - "mileu", "milieu", - "mkaes", "makes", - "modle", "model", - "moent", "moment", - "moeny", "money", - "monts", "months", - "movei", "movie", - "muder", "murder", - "mysef", "myself", - "neice", "niece", - "ninty", "ninety", - "ocurr", "occur", - "oging", "going", - "opose", "oppose", - "orded", "ordered", - "orgin", "origin", - "otehr", "other", - "ouput", "output", - "owudl", "would", - "paide", "paid", - "palce", "place", - "pased", "passed", - "payed", "paid", - "peice", "piece", - "peoms", "poems", - "poety", "poetry", - "pwoer", "power", - "qtuie", "quite", - "qutie", "quite", - "realy", "really", - "repid", "rapid", - "rised", "raised", - "rulle", "rule", - "rwite", "write", - "rythm", "rhythm", - "safty", "safety", - "scoll", "scroll", - "seach", "search", - "seige", "siege", - "seing", "seeing", - "sence", "sense", - "sicne", "since", - "sieze", "seize", - "sinse", "sines", - "slowy", "slowly", - "snese", "sneeze", - "soley", "solely", - "sotry", "story", - "sotyr", "satyr", - "soudn", "sound", - "sould", "could", - "spred", "spread", - "stlye", "style", - "stong", "strong", - "stoyr", "story", - "strat", "start", - "stroy", "story", - "suppy", "supply", - "swaer", "swear", - "syrap", "syrup", - "sytem", "system", - "sytle", "style", - "tatoo", "tattoo", - "thast", "that", - "theif", "thief", - "theri", "their", - "thgat", "that", - "thier", "their", - "thign", "thing", - "thikn", "think", - "thnig", "thing", - "thrid", "third", - "thsoe", "those", - "thyat", "that", - "tihkn", "think", - "timne", "time", - "tiome", "time", - "tkaes", "takes", - "todya", "today", - "tyhat", "that", - "unsed", "used", - "weild", "wield", - "whant", "want", - "whcih", "which", - "whihc", "which", - "whith", "with", - "whlch", "which", - "wholy", "wholly", - "wierd", "weird", - "wille", "will", - "willk", "will", - "withh", "with", - "witht", "with", - "wiull", "will", - "wnats", "wants", - "wohle", "whole", - "worls", "world", - "woudl", "would", - "wriet", "write", - "wroet", "wrote", - "yaers", "years", - "yatch", "yacht", - "yearm", "year", - "yeasr", "years", - "yeild", "yield", - "yeras", "years", - "yersa", "years", - "agin", "again", - "agre", "agree", - "ahev", "have", - "ahve", "have", - "alse", "else", - "amke", "make", - "anbd", "and", - "andd", "and", - "apon", "upon", - "aslo", "also", - "awya", "away", - "bakc", "back", - "bcak", "back", - "clas", "class", - "cpoy", "coy", - "cxan", "cyan", - "daed", "dead", - "dael", "deal", - "diea", "idea", - "doub", "doubt", - "dyas", "dryas", - "eahc", "each", - "efel", "evil", - "eles", "eels", - "ened", "need", - "enxt", "next", - "esle", "else", - "eyar", "year", - "fatc", "fact", - "fidn", "find", - "fomr", "from", - "grwo", "grow", - "haev", "have", - "halp", "help", - "holf", "hold", - "hten", "then", - "htey", "they", - "htis", "this", - "hvae", "have", - "hvea", "have", - "inot", "into", - "iwll", "will", - "iwth", "with", - "jstu", "just", - "jsut", "just", - "knwo", "know", - "konw", "know", - "kwno", "know", - "liek", "like", - "loev", "love", - "lveo", "love", - "lvoe", "love", - "mkae", "make", - "mkea", "make", - "mroe", "more", - "nkow", "know", - "nkwo", "know", - "nmae", "name", - "noth", "north", - "nowe", "now", - "omre", "more", - "onot", "note", - "onyl", "only", - "owrk", "work", - "peom", "poem", - "pich", "pitch", - "rela", "real", - "sasy", "says", - "smae", "same", - "smoe", "some", - "soem", "some", - "sohw", "show", - "stpo", "stop", - "suop", "soup", - "syas", "says", - "tahn", "than", - "taht", "that", - "tast", "taste", - "tath", "that", - "tehy", "they", - "tghe", "the", - "ther", "there", - "thge", "the", - "thna", "than", - "thne", "then", - "thsi", "this", - "thta", "that", - "tiem", "time", - "tihs", "this", - "tjhe", "the", - "tkae", "take", - "tood", "todo", - "tust", "trust", - "twon", "town", - "twpo", "two", - "tyhe", "they", - "uise", "use", - "vell", "well", - "veyr", "very", - "vrey", "very", - "vyer", "very", - "vyre", "very", - "waht", "what", - "wass", "was", - "watn", "want", - "weas", "was", - "wehn", "when", - "whic", "which", - "whta", "what", - "wich", "which", - "wief", "wife", - "wiew", "view", - "wiht", "with", - "witn", "with", - "wnat", "want", - "wokr", "work", - "wrok", "work", - "wtih", "with", - "yaer", "year", - "yera", "year", - "yrea", "year", - "ytou", "you", - "adn", "and", - "ect", "etc", - "nto", "not", - "teh", "the", - "thn", "then", - "tje", "the", - "whn", "when", - "wih", "with", - "yuo", "you", -} - -// DictAmerican converts UK spellings to US spellings -var DictAmerican = []string{ - "institutionalisation", "institutionalization", - "internationalisation", "internationalization", - "professionalisation", "professionalization", - "compartmentalising", "compartmentalizing", - "institutionalising", "institutionalizing", - "internationalising", "internationalizing", - "compartmentalised", "compartmentalized", - "compartmentalises", "compartmentalizes", - "decriminalisation", "decriminalization", - "denationalisation", "denationalization", - "fictionalisations", "fictionalizations", - "institutionalised", "institutionalized", - "institutionalises", "institutionalizes", - "intellectualising", "intellectualizing", - "internationalised", "internationalized", - "internationalises", "internationalizes", - "pedestrianisation", "pedestrianization", - "professionalising", "professionalizing", - "archaeologically", "archeologically", - "compartmentalise", "compartmentalize", - "decentralisation", "decentralization", - "demilitarisation", "demilitarization", - "externalisations", "externalizations", - "fictionalisation", "fictionalization", - "institutionalise", "institutionalize", - "intellectualised", "intellectualized", - "intellectualises", "intellectualizes", - "internationalise", "internationalize", - "nationalisations", "nationalizations", - "palaeontologists", "paleontologists", - "professionalised", "professionalized", - "professionalises", "professionalizes", - "rationalisations", "rationalizations", - "sensationalising", "sensationalizing", - "sentimentalising", "sentimentalizing", - "acclimatisation", "acclimatization", - "bougainvillaeas", "bougainvilleas", - "commercialising", "commercializing", - "conceptualising", "conceptualizing", - "contextualising", "contextualizing", - "crystallisation", "crystallization", - "decriminalising", "decriminalizing", - "democratisation", "democratization", - "denationalising", "denationalizing", - "depersonalising", "depersonalizing", - "desensitisation", "desensitization", - "destabilisation", "destabilization", - "disorganisation", "disorganization", - "extemporisation", "extemporization", - "externalisation", "externalization", - "familiarisation", "familiarization", - "generalisations", "generalizations", - "hospitalisation", "hospitalization", - "individualising", "individualizing", - "industrialising", "industrializing", - "intellectualise", "intellectualize", - "internalisation", "internalization", - "manoeuvrability", "maneuverability", - "marginalisation", "marginalization", - "materialisation", "materialization", - "miniaturisation", "miniaturization", - "nationalisation", "nationalization", - "neighbourliness", "neighborliness", - "overemphasising", "overemphasizing", - "palaeontologist", "paleontologist", - "particularising", "particularizing", - "pedestrianising", "pedestrianizing", - "professionalise", "professionalize", - "psychoanalysing", "psychoanalyzing", - "rationalisation", "rationalization", - "reorganisations", "reorganizations", - "revolutionising", "revolutionizing", - "sensationalised", "sensationalized", - "sensationalises", "sensationalizes", - "sentimentalised", "sentimentalized", - "sentimentalises", "sentimentalizes", - "specialisations", "specializations", - "standardisation", "standardization", - "synchronisation", "synchronization", - "systematisation", "systematization", - "aggrandisement", "aggrandizement", - "anaesthetising", "anesthetizing", - "archaeological", "archeological", - "archaeologists", "archeologists", - "bougainvillaea", "bougainvillea", - "characterising", "characterizing", - "collectivising", "collectivizing", - "commercialised", "commercialized", - "commercialises", "commercializes", - "conceptualised", "conceptualized", - "conceptualises", "conceptualizes", - "contextualised", "contextualized", - "contextualises", "contextualizes", - "decentralising", "decentralizing", - "decriminalised", "decriminalized", - "decriminalises", "decriminalizes", - "dehumanisation", "dehumanization", - "demilitarising", "demilitarizing", - "demobilisation", "demobilization", - "demoralisation", "demoralization", - "denationalised", "denationalized", - "denationalises", "denationalizes", - "depersonalised", "depersonalized", - "depersonalises", "depersonalizes", - "disembowelling", "disemboweling", - "dramatisations", "dramatizations", - "editorialising", "editorializing", - "encyclopaedias", "encyclopedias", - "fictionalising", "fictionalizing", - "fraternisation", "fraternization", - "generalisation", "generalization", - "gynaecological", "gynecological", - "gynaecologists", "gynecologists", - "haematological", "hematological", - "haematologists", "hematologists", - "immobilisation", "immobilization", - "individualised", "individualized", - "individualises", "individualizes", - "industrialised", "industrialized", - "industrialises", "industrializes", - "liberalisation", "liberalization", - "monopolisation", "monopolization", - "naturalisation", "naturalization", - "neighbourhoods", "neighborhoods", - "neutralisation", "neutralization", - "organisational", "organizational", - "outmanoeuvring", "outmaneuvering", - "overemphasised", "overemphasized", - "overemphasises", "overemphasizes", - "paediatricians", "pediatricians", - "particularised", "particularized", - "particularises", "particularizes", - "pasteurisation", "pasteurization", - "pedestrianised", "pedestrianized", - "pedestrianises", "pedestrianizes", - "philosophising", "philosophizing", - "politicisation", "politicization", - "popularisation", "popularization", - "pressurisation", "pressurization", - "prioritisation", "prioritization", - "privatisations", "privatizations", - "propagandising", "propagandizing", - "psychoanalysed", "psychoanalyzed", - "psychoanalyses", "psychoanalyzes", - "regularisation", "regularization", - "reorganisation", "reorganization", - "revolutionised", "revolutionized", - "revolutionises", "revolutionizes", - "secularisation", "secularization", - "sensationalise", "sensationalize", - "sentimentalise", "sentimentalize", - "serialisations", "serializations", - "specialisation", "specialization", - "sterilisations", "sterilizations", - "stigmatisation", "stigmatization", - "transistorised", "transistorized", - "unrecognisable", "unrecognizable", - "visualisations", "visualizations", - "westernisation", "westernization", - "accessorising", "accessorizing", - "acclimatising", "acclimatizing", - "amortisations", "amortizations", - "amphitheatres", "amphitheaters", - "anaesthetised", "anesthetized", - "anaesthetises", "anesthetizes", - "anaesthetists", "anesthetists", - "archaeologist", "archeologist", - "backpedalling", "backpedaling", - "behaviourists", "behaviorists", - "breathalysers", "breathalyzers", - "breathalysing", "breathalyzing", - "callisthenics", "calisthenics", - "cannibalising", "cannibalizing", - "characterised", "characterized", - "characterises", "characterizes", - "circularising", "circularizing", - "clarinettists", "clarinetists", - "collectivised", "collectivized", - "collectivises", "collectivizes", - "commercialise", "commercialize", - "computerising", "computerizing", - "conceptualise", "conceptualize", - "contextualise", "contextualize", - "criminalising", "criminalizing", - "crystallising", "crystallizing", - "decentralised", "decentralized", - "decentralises", "decentralizes", - "decriminalise", "decriminalize", - "demilitarised", "demilitarized", - "demilitarises", "demilitarizes", - "democratising", "democratizing", - "denationalise", "denationalize", - "depersonalise", "depersonalize", - "desensitising", "desensitizing", - "destabilising", "destabilizing", - "disembowelled", "disemboweled", - "dishonourable", "dishonorable", - "dishonourably", "dishonorably", - "dramatisation", "dramatization", - "editorialised", "editorialized", - "editorialises", "editorializes", - "encyclopaedia", "encyclopedia", - "encyclopaedic", "encyclopedic", - "extemporising", "extemporizing", - "externalising", "externalizing", - "familiarising", "familiarizing", - "fertilisation", "fertilization", - "fictionalised", "fictionalized", - "fictionalises", "fictionalizes", - "formalisation", "formalization", - "fossilisation", "fossilization", - "globalisation", "globalization", - "gynaecologist", "gynecologist", - "haematologist", "hematologist", - "haemophiliacs", "hemophiliacs", - "haemorrhaging", "hemorrhaging", - "harmonisation", "harmonization", - "hospitalising", "hospitalizing", - "hypothesising", "hypothesizing", - "immortalising", "immortalizing", - "individualise", "individualize", - "industrialise", "industrialize", - "internalising", "internalizing", - "marginalising", "marginalizing", - "materialising", "materializing", - "mechanisation", "mechanization", - "memorialising", "memorializing", - "miniaturising", "miniaturizing", - "miscatalogued", "miscataloged", - "misdemeanours", "misdemeanors", - "multicoloured", "multicolored", - "nationalising", "nationalizing", - "neighbourhood", "neighborhood", - "normalisation", "normalization", - "organisations", "organizations", - "outmanoeuvred", "outmaneuvered", - "outmanoeuvres", "outmaneuvers", - "overemphasise", "overemphasize", - "paediatrician", "pediatrician", - "palaeontology", "paleontology", - "particularise", "particularize", - "passivisation", "passivization", - "patronisingly", "patronizingly", - "pedestrianise", "pedestrianize", - "personalising", "personalizing", - "philosophised", "philosophized", - "philosophises", "philosophizes", - "privatisation", "privatization", - "propagandised", "propagandized", - "propagandises", "propagandizes", - "proselytisers", "proselytizers", - "proselytising", "proselytizing", - "psychoanalyse", "psychoanalyze", - "pulverisation", "pulverization", - "rationalising", "rationalizing", - "reconnoitring", "reconnoitering", - "revolutionise", "revolutionize", - "romanticising", "romanticizing", - "serialisation", "serialization", - "socialisation", "socialization", - "stabilisation", "stabilization", - "standardising", "standardizing", - "sterilisation", "sterilization", - "subsidisation", "subsidization", - "synchronising", "synchronizing", - "systematising", "systematizing", - "tantalisingly", "tantalizingly", - "underutilised", "underutilized", - "victimisation", "victimization", - "visualisation", "visualization", - "vocalisations", "vocalizations", - "vulgarisation", "vulgarization", - "accessorised", "accessorized", - "accessorises", "accessorizes", - "acclimatised", "acclimatized", - "acclimatises", "acclimatizes", - "amortisation", "amortization", - "amphitheatre", "amphitheater", - "anaesthetics", "anesthetics", - "anaesthetise", "anesthetize", - "anaesthetist", "anesthetist", - "antagonising", "antagonizing", - "appetisingly", "appetizingly", - "backpedalled", "backpedaled", - "bastardising", "bastardizing", - "behaviourism", "behaviorism", - "behaviourist", "behaviorist", - "bowdlerising", "bowdlerizing", - "breathalysed", "breathalyzed", - "breathalyser", "breathalyzer", - "breathalyses", "breathalyzes", - "cannibalised", "cannibalized", - "cannibalises", "cannibalizes", - "capitalising", "capitalizing", - "caramelising", "caramelizing", - "categorising", "categorizing", - "centigrammes", "centigrams", - "centralising", "centralizing", - "centrepieces", "centerpieces", - "characterise", "characterize", - "circularised", "circularized", - "circularises", "circularizes", - "clarinettist", "clarinetist", - "collectivise", "collectivize", - "colonisation", "colonization", - "computerised", "computerized", - "computerises", "computerizes", - "criminalised", "criminalized", - "criminalises", "criminalizes", - "crystallised", "crystallized", - "crystallises", "crystallizes", - "decentralise", "decentralize", - "dehumanising", "dehumanizing", - "demilitarise", "demilitarize", - "demobilising", "demobilizing", - "democratised", "democratized", - "democratises", "democratizes", - "demoralising", "demoralizing", - "desensitised", "desensitized", - "desensitises", "desensitizes", - "destabilised", "destabilized", - "destabilises", "destabilizes", - "discolouring", "discoloring", - "dishonouring", "dishonoring", - "disorganised", "disorganized", - "editorialise", "editorialize", - "endeavouring", "endeavoring", - "equalisation", "equalization", - "evangelising", "evangelizing", - "extemporised", "extemporized", - "extemporises", "extemporizes", - "externalised", "externalized", - "externalises", "externalizes", - "familiarised", "familiarized", - "familiarises", "familiarizes", - "fictionalise", "fictionalize", - "finalisation", "finalization", - "fraternising", "fraternizing", - "generalising", "generalizing", - "haemophiliac", "hemophiliac", - "haemorrhaged", "hemorrhaged", - "haemorrhages", "hemorrhages", - "haemorrhoids", "hemorrhoids", - "homoeopathic", "homeopathic", - "homogenising", "homogenizing", - "hospitalised", "hospitalized", - "hospitalises", "hospitalizes", - "hypothesised", "hypothesized", - "hypothesises", "hypothesizes", - "idealisation", "idealization", - "immobilisers", "immobilizers", - "immobilising", "immobilizing", - "immortalised", "immortalized", - "immortalises", "immortalizes", - "immunisation", "immunization", - "initialising", "initializing", - "internalised", "internalized", - "internalises", "internalizes", - "jeopardising", "jeopardizing", - "legalisation", "legalization", - "legitimising", "legitimizing", - "liberalising", "liberalizing", - "manoeuvrable", "maneuverable", - "manoeuvrings", "maneuverings", - "marginalised", "marginalized", - "marginalises", "marginalizes", - "marvellously", "marvelously", - "materialised", "materialized", - "materialises", "materializes", - "maximisation", "maximization", - "memorialised", "memorialized", - "memorialises", "memorializes", - "metabolising", "metabolizing", - "militarising", "militarizing", - "milligrammes", "milligrams", - "miniaturised", "miniaturized", - "miniaturises", "miniaturizes", - "misbehaviour", "misbehavior", - "misdemeanour", "misdemeanor", - "mobilisation", "mobilization", - "moisturisers", "moisturizers", - "moisturising", "moisturizing", - "monopolising", "monopolizing", - "moustachioed", "mustachioed", - "nationalised", "nationalized", - "nationalises", "nationalizes", - "naturalising", "naturalizing", - "neighbouring", "neighboring", - "neutralising", "neutralizing", - "oesophaguses", "esophaguses", - "organisation", "organization", - "orthopaedics", "orthopedics", - "outmanoeuvre", "outmaneuver", - "palaeolithic", "paleolithic", - "pasteurising", "pasteurizing", - "personalised", "personalized", - "personalises", "personalizes", - "philosophise", "philosophize", - "plagiarising", "plagiarizing", - "ploughshares", "plowshares", - "polarisation", "polarization", - "politicising", "politicizing", - "popularising", "popularizing", - "pressurising", "pressurizing", - "prioritising", "prioritizing", - "propagandise", "propagandize", - "proselytised", "proselytized", - "proselytiser", "proselytizer", - "proselytises", "proselytizes", - "radicalising", "radicalizing", - "rationalised", "rationalized", - "rationalises", "rationalizes", - "realisations", "realizations", - "recognisable", "recognizable", - "recognisably", "recognizably", - "recognisance", "recognizance", - "reconnoitred", "reconnoitered", - "reconnoitres", "reconnoiters", - "regularising", "regularizing", - "reorganising", "reorganizing", - "revitalising", "revitalizing", - "rhapsodising", "rhapsodizing", - "romanticised", "romanticized", - "romanticises", "romanticizes", - "scandalising", "scandalizing", - "scrutinising", "scrutinizing", - "secularising", "secularizing", - "specialising", "specializing", - "squirrelling", "squirreling", - "standardised", "standardized", - "standardises", "standardizes", - "stigmatising", "stigmatizing", - "sympathisers", "sympathizers", - "sympathising", "sympathizing", - "synchronised", "synchronized", - "synchronises", "synchronizes", - "synthesisers", "synthesizers", - "synthesising", "synthesizing", - "systematised", "systematized", - "systematises", "systematizes", - "technicolour", "technicolor", - "theatregoers", "theatergoers", - "traumatising", "traumatizing", - "trivialising", "trivializing", - "unauthorised", "unauthorized", - "uncatalogued", "uncataloged", - "unfavourable", "unfavorable", - "unfavourably", "unfavorably", - "unionisation", "unionization", - "unrecognised", "unrecognized", - "untrammelled", "untrammeled", - "urbanisation", "urbanization", - "vaporisation", "vaporization", - "vocalisation", "vocalization", - "watercolours", "watercolors", - "westernising", "westernizing", - "accessorise", "accessorize", - "acclimatise", "acclimatize", - "agonisingly", "agonizingly", - "amortisable", "amortizable", - "anaesthesia", "anesthesia", - "anaesthetic", "anesthetic", - "anglicising", "anglicizing", - "antagonised", "antagonized", - "antagonises", "antagonizes", - "apologising", "apologizing", - "archaeology", "archeology", - "authorising", "authorizing", - "bastardised", "bastardized", - "bastardises", "bastardizes", - "bedevilling", "bedeviling", - "behavioural", "behavioral", - "belabouring", "belaboring", - "bowdlerised", "bowdlerized", - "bowdlerises", "bowdlerizes", - "breathalyse", "breathalyze", - "brutalising", "brutalizing", - "cannibalise", "cannibalize", - "capitalised", "capitalized", - "capitalises", "capitalizes", - "caramelised", "caramelized", - "caramelises", "caramelizes", - "carbonising", "carbonizing", - "cataloguing", "cataloging", - "categorised", "categorized", - "categorises", "categorizes", - "cauterising", "cauterizing", - "centigramme", "centigram", - "centilitres", "centiliters", - "centimetres", "centimeters", - "centralised", "centralized", - "centralises", "centralizes", - "centrefolds", "centerfolds", - "centrepiece", "centerpiece", - "channelling", "channeling", - "chequebooks", "checkbooks", - "circularise", "circularize", - "colourfully", "colorfully", - "colourizing", "colorizing", - "computerise", "computerize", - "councillors", "councilors", - "counselling", "counseling", - "counsellors", "counselors", - "criminalise", "criminalize", - "criticising", "criticizing", - "crystallise", "crystallize", - "customising", "customizing", - "defenceless", "defenseless", - "dehumanised", "dehumanized", - "dehumanises", "dehumanizes", - "demobilised", "demobilized", - "demobilises", "demobilizes", - "democratise", "democratize", - "demoralised", "demoralized", - "demoralises", "demoralizes", - "deodorising", "deodorizing", - "desensitise", "desensitize", - "destabilise", "destabilize", - "discoloured", "discolored", - "dishevelled", "disheveled", - "dishonoured", "dishonored", - "dramatising", "dramatizing", - "economising", "economizing", - "empathising", "empathizing", - "emphasising", "emphasizing", - "endeavoured", "endeavored", - "epitomising", "epitomizing", - "evangelised", "evangelized", - "evangelises", "evangelizes", - "extemporise", "extemporize", - "externalise", "externalize", - "factorising", "factorizing", - "familiarise", "familiarize", - "fantasising", "fantasizing", - "favouritism", "favoritism", - "fertilisers", "fertilizers", - "fertilising", "fertilizing", - "flavourings", "flavorings", - "flavourless", "flavorless", - "flavoursome", "flavorsome", - "formalising", "formalizing", - "fossilising", "fossilizing", - "fraternised", "fraternized", - "fraternises", "fraternizes", - "galvanising", "galvanizing", - "generalised", "generalized", - "generalises", "generalizes", - "ghettoising", "ghettoizing", - "globalising", "globalizing", - "gruellingly", "gruelingly", - "gynaecology", "gynecology", - "haematology", "hematology", - "haemoglobin", "hemoglobin", - "haemophilia", "hemophilia", - "haemorrhage", "hemorrhage", - "harmonising", "harmonizing", - "homoeopaths", "homeopaths", - "homoeopathy", "homeopathy", - "homogenised", "homogenized", - "homogenises", "homogenizes", - "hospitalise", "hospitalize", - "hybridising", "hybridizing", - "hypnotising", "hypnotizing", - "hypothesise", "hypothesize", - "immobilised", "immobilized", - "immobiliser", "immobilizer", - "immobilises", "immobilizes", - "immortalise", "immortalize", - "impanelling", "impaneling", - "imperilling", "imperiling", - "initialised", "initialized", - "initialises", "initializes", - "initialling", "initialing", - "instalments", "installments", - "internalise", "internalize", - "italicising", "italicizing", - "jeopardised", "jeopardized", - "jeopardises", "jeopardizes", - "kilogrammes", "kilograms", - "legitimised", "legitimized", - "legitimises", "legitimizes", - "liberalised", "liberalized", - "liberalises", "liberalizes", - "lionisation", "lionization", - "liquidisers", "liquidizers", - "liquidising", "liquidizing", - "magnetising", "magnetizing", - "manoeuvring", "maneuvering", - "marginalise", "marginalize", - "marshalling", "marshaling", - "materialise", "materialize", - "mechanising", "mechanizing", - "memorialise", "memorialize", - "mesmerising", "mesmerizing", - "metabolised", "metabolized", - "metabolises", "metabolizes", - "micrometres", "micrometers", - "militarised", "militarized", - "militarises", "militarizes", - "milligramme", "milligram", - "millilitres", "milliliters", - "millimetres", "millimeters", - "miniaturise", "miniaturize", - "modernising", "modernizing", - "moisturised", "moisturized", - "moisturiser", "moisturizer", - "moisturises", "moisturizes", - "monopolised", "monopolized", - "monopolises", "monopolizes", - "nationalise", "nationalize", - "naturalised", "naturalized", - "naturalises", "naturalizes", - "neighbourly", "neighborly", - "neutralised", "neutralized", - "neutralises", "neutralizes", - "normalising", "normalizing", - "orthopaedic", "orthopedic", - "ostracising", "ostracizing", - "oxidisation", "oxidization", - "paediatrics", "pediatrics", - "paedophiles", "pedophiles", - "paedophilia", "pedophilia", - "passivising", "passivizing", - "pasteurised", "pasteurized", - "pasteurises", "pasteurizes", - "patronising", "patronizing", - "personalise", "personalize", - "plagiarised", "plagiarized", - "plagiarises", "plagiarizes", - "ploughshare", "plowshare", - "politicised", "politicized", - "politicises", "politicizes", - "popularised", "popularized", - "popularises", "popularizes", - "praesidiums", "presidiums", - "pressurised", "pressurized", - "pressurises", "pressurizes", - "prioritised", "prioritized", - "prioritises", "prioritizes", - "privatising", "privatizing", - "proselytise", "proselytize", - "publicising", "publicizing", - "pulverising", "pulverizing", - "quarrelling", "quarreling", - "radicalised", "radicalized", - "radicalises", "radicalizes", - "randomising", "randomizing", - "rationalise", "rationalize", - "realisation", "realization", - "recognising", "recognizing", - "reconnoitre", "reconnoiter", - "regularised", "regularized", - "regularises", "regularizes", - "remodelling", "remodeling", - "reorganised", "reorganized", - "reorganises", "reorganizes", - "revitalised", "revitalized", - "revitalises", "revitalizes", - "rhapsodised", "rhapsodized", - "rhapsodises", "rhapsodizes", - "romanticise", "romanticize", - "scandalised", "scandalized", - "scandalises", "scandalizes", - "sceptically", "skeptically", - "scrutinised", "scrutinized", - "scrutinises", "scrutinizes", - "secularised", "secularized", - "secularises", "secularizes", - "sensitising", "sensitizing", - "serialising", "serializing", - "sermonising", "sermonizing", - "shrivelling", "shriveling", - "signalising", "signalizing", - "snorkelling", "snorkeling", - "snowploughs", "snowplow", - "socialising", "socializing", - "solemnising", "solemnizing", - "specialised", "specialized", - "specialises", "specializes", - "squirrelled", "squirreled", - "stabilisers", "stabilizers", - "stabilising", "stabilizing", - "standardise", "standardize", - "stencilling", "stenciling", - "sterilisers", "sterilizers", - "sterilising", "sterilizing", - "stigmatised", "stigmatized", - "stigmatises", "stigmatizes", - "subsidisers", "subsidizers", - "subsidising", "subsidizing", - "summarising", "summarizing", - "symbolising", "symbolizing", - "sympathised", "sympathized", - "sympathiser", "sympathizer", - "sympathises", "sympathizes", - "synchronise", "synchronize", - "synthesised", "synthesized", - "synthesiser", "synthesizer", - "synthesises", "synthesizes", - "systematise", "systematize", - "tantalising", "tantalizing", - "temporising", "temporizing", - "tenderising", "tenderizing", - "terrorising", "terrorizing", - "theatregoer", "theatergoer", - "traumatised", "traumatized", - "traumatises", "traumatizes", - "trivialised", "trivialized", - "trivialises", "trivializes", - "tyrannising", "tyrannizing", - "uncivilised", "uncivilized", - "unorganised", "unorganized", - "unravelling", "unraveling", - "utilisation", "utilization", - "vandalising", "vandalizing", - "verbalising", "verbalizing", - "victimising", "victimizing", - "visualising", "visualizing", - "vulgarising", "vulgarizing", - "watercolour", "watercolor", - "westernised", "westernized", - "westernises", "westernizes", - "worshipping", "worshiping", - "aeroplanes", "airplanes", - "amortising", "amortizing", - "anglicised", "anglicized", - "anglicises", "anglicizes", - "annualised", "annualized", - "antagonise", "antagonize", - "apologised", "apologized", - "apologises", "apologizes", - "appetisers", "appetizers", - "appetising", "appetizing", - "authorised", "authorized", - "authorises", "authorizes", - "bannisters", "banisters", - "bastardise", "bastardize", - "bedevilled", "bedeviled", - "behaviours", "behaviors", - "bejewelled", "bejeweled", - "belaboured", "belabored", - "bowdlerise", "bowdlerize", - "brutalised", "brutalized", - "brutalises", "brutalizes", - "canalising", "canalizing", - "cancelling", "canceling", - "canonising", "canonizing", - "capitalise", "capitalize", - "caramelise", "caramelize", - "carbonised", "carbonized", - "carbonises", "carbonizes", - "catalogued", "cataloged", - "catalogues", "catalogs", - "catalysing", "catalyzing", - "categorise", "categorize", - "cauterised", "cauterized", - "cauterises", "cauterizes", - "centilitre", "centiliter", - "centimetre", "centimeter", - "centralise", "centralize", - "centrefold", "centerfold", - "channelled", "channeled", - "chequebook", "checkbook", - "chiselling", "chiseling", - "civilising", "civilizing", - "clamouring", "clamoring", - "colonisers", "colonizers", - "colonising", "colonizing", - "colourants", "colorants", - "colourized", "colorized", - "colourizes", "colorizes", - "colourless", "colorless", - "connexions", "connections", - "councillor", "councilor", - "counselled", "counseled", - "counsellor", "counselor", - "criticised", "criticized", - "criticises", "criticizes", - "cudgelling", "cudgeling", - "customised", "customized", - "customises", "customizes", - "dehumanise", "dehumanize", - "demobilise", "demobilize", - "demonising", "demonizing", - "demoralise", "demoralize", - "deodorised", "deodorized", - "deodorises", "deodorizes", - "deputising", "deputizing", - "digitising", "digitizing", - "discolours", "discolors", - "dishonours", "dishonors", - "dramatised", "dramatized", - "dramatises", "dramatizes", - "drivelling", "driveling", - "economised", "economized", - "economises", "economizes", - "empathised", "empathized", - "empathises", "empathizes", - "emphasised", "emphasized", - "emphasises", "emphasizes", - "enamelling", "enameling", - "endeavours", "endeavors", - "energising", "energizing", - "epaulettes", "epaulets", - "epicentres", "epicenters", - "epitomised", "epitomized", - "epitomises", "epitomizes", - "equalisers", "equalizers", - "equalising", "equalizing", - "eulogising", "eulogizing", - "evangelise", "evangelize", - "factorised", "factorized", - "factorises", "factorizes", - "fantasised", "fantasized", - "fantasises", "fantasizes", - "favourable", "favorable", - "favourably", "favorably", - "favourites", "favorites", - "feminising", "feminizing", - "fertilised", "fertilized", - "fertiliser", "fertilizer", - "fertilises", "fertilizes", - "fibreglass", "fiberglass", - "finalising", "finalizing", - "flavouring", "flavoring", - "formalised", "formalized", - "formalises", "formalizes", - "fossilised", "fossilized", - "fossilises", "fossilizes", - "fraternise", "fraternize", - "fulfilment", "fulfillment", - "funnelling", "funneling", - "galvanised", "galvanized", - "galvanises", "galvanizes", - "gambolling", "gamboling", - "gaolbreaks", "jailbreaks", - "generalise", "generalize", - "ghettoised", "ghettoized", - "ghettoises", "ghettoizes", - "globalised", "globalized", - "globalises", "globalizes", - "gonorrhoea", "gonorrhea", - "grovelling", "groveling", - "harbouring", "harboring", - "harmonised", "harmonized", - "harmonises", "harmonizes", - "homoeopath", "homeopath", - "homogenise", "homogenize", - "honourable", "honorable", - "honourably", "honorably", - "humanising", "humanizing", - "humourless", "humorless", - "hybridised", "hybridized", - "hybridises", "hybridizes", - "hypnotised", "hypnotized", - "hypnotises", "hypnotizes", - "idealising", "idealizing", - "immobilise", "immobilize", - "immunising", "immunizing", - "impanelled", "impaneled", - "imperilled", "imperiled", - "inflexions", "inflections", - "initialise", "initialize", - "initialled", "initialed", - "instalment", "installment", - "ionisation", "ionization", - "italicised", "italicized", - "italicises", "italicizes", - "jeopardise", "jeopardize", - "kilogramme", "kilogram", - "kilometres", "kilometers", - "lacklustre", "lackluster", - "legalising", "legalizing", - "legitimise", "legitimize", - "liberalise", "liberalize", - "liquidised", "liquidized", - "liquidiser", "liquidizer", - "liquidises", "liquidizes", - "localising", "localizing", - "magnetised", "magnetized", - "magnetises", "magnetizes", - "manoeuvred", "maneuvered", - "manoeuvres", "maneuvers", - "marshalled", "marshaled", - "marvelling", "marveling", - "marvellous", "marvelous", - "maximising", "maximizing", - "mechanised", "mechanized", - "mechanises", "mechanizes", - "memorising", "memorizing", - "mesmerised", "mesmerized", - "mesmerises", "mesmerizes", - "metabolise", "metabolize", - "micrometre", "micrometer", - "militarise", "militarize", - "millilitre", "milliliter", - "millimetre", "millimeter", - "minimising", "minimizing", - "mobilising", "mobilizing", - "modernised", "modernized", - "modernises", "modernizes", - "moisturise", "moisturize", - "monopolise", "monopolize", - "moralising", "moralizing", - "mouldering", "moldering", - "moustached", "mustached", - "moustaches", "mustaches", - "naturalise", "naturalize", - "neighbours", "neighbors", - "neutralise", "neutralize", - "normalised", "normalized", - "normalises", "normalizes", - "oesophagus", "esophagus", - "optimising", "optimizing", - "organisers", "organizers", - "organising", "organizing", - "ostracised", "ostracized", - "ostracises", "ostracizes", - "paederasts", "pederasts", - "paediatric", "pediatric", - "paedophile", "pedophile", - "panellists", "panelists", - "paralysing", "paralyzing", - "parcelling", "parceling", - "passivised", "passivized", - "passivises", "passivizes", - "pasteurise", "pasteurize", - "patronised", "patronized", - "patronises", "patronizes", - "penalising", "penalizing", - "pencilling", "penciling", - "plagiarise", "plagiarize", - "polarising", "polarizing", - "politicise", "politicize", - "popularise", "popularize", - "practising", "practicing", - "praesidium", "presidium", - "pressurise", "pressurize", - "prioritise", "prioritize", - "privatised", "privatized", - "privatises", "privatizes", - "programmes", "programs", - "publicised", "publicized", - "publicises", "publicizes", - "pulverised", "pulverized", - "pulverises", "pulverizes", - "pummelling", "pummeled", - "quarrelled", "quarreled", - "radicalise", "radicalize", - "randomised", "randomized", - "randomises", "randomizes", - "realisable", "realizable", - "recognised", "recognized", - "recognises", "recognizes", - "refuelling", "refueling", - "regularise", "regularize", - "remodelled", "remodeled", - "remoulding", "remolding", - "reorganise", "reorganize", - "revitalise", "revitalize", - "rhapsodise", "rhapsodize", - "ritualised", "ritualized", - "sanitising", "sanitizing", - "satirising", "satirizing", - "scandalise", "scandalize", - "scepticism", "skepticism", - "scrutinise", "scrutinize", - "secularise", "secularize", - "sensitised", "sensitized", - "sensitises", "sensitizes", - "sepulchres", "sepulchers", - "serialised", "serialized", - "serialises", "serializes", - "sermonised", "sermonized", - "sermonises", "sermonizes", - "shovelling", "shoveling", - "shrivelled", "shriveled", - "signalised", "signalized", - "signalises", "signalizes", - "signalling", "signaling", - "snivelling", "sniveling", - "snorkelled", "snorkeled", - "snowplough", "snowplow", - "socialised", "socialized", - "socialises", "socializes", - "sodomising", "sodomizing", - "solemnised", "solemnized", - "solemnises", "solemnizes", - "specialise", "specialize", - "spiralling", "spiraling", - "splendours", "splendors", - "stabilised", "stabilized", - "stabiliser", "stabilizer", - "stabilises", "stabilizes", - "stencilled", "stenciled", - "sterilised", "sterilized", - "steriliser", "sterilizer", - "sterilises", "sterilizes", - "stigmatise", "stigmatize", - "subsidised", "subsidized", - "subsidiser", "subsidizer", - "subsidises", "subsidizes", - "succouring", "succoring", - "sulphurous", "sulfurous", - "summarised", "summarized", - "summarises", "summarizes", - "swivelling", "swiveling", - "symbolised", "symbolized", - "symbolises", "symbolizes", - "sympathise", "sympathize", - "synthesise", "synthesize", - "tantalised", "tantalized", - "tantalises", "tantalizes", - "temporised", "temporized", - "temporises", "temporizes", - "tenderised", "tenderized", - "tenderises", "tenderizes", - "terrorised", "terrorized", - "terrorises", "terrorizes", - "theorising", "theorizing", - "traumatise", "traumatize", - "travellers", "travelers", - "travelling", "traveling", - "tricolours", "tricolors", - "trivialise", "trivialize", - "tunnelling", "tunneling", - "tyrannised", "tyrannized", - "tyrannises", "tyrannizes", - "unequalled", "unequaled", - "unionising", "unionizing", - "unravelled", "unraveled", - "unrivalled", "unrivaled", - "urbanising", "urbanizing", - "utilisable", "utilizable", - "vandalised", "vandalized", - "vandalises", "vandalizes", - "vaporising", "vaporizing", - "verbalised", "verbalized", - "verbalises", "verbalizes", - "victimised", "victimized", - "victimises", "victimizes", - "visualised", "visualized", - "visualises", "visualizes", - "vocalising", "vocalizing", - "vulcanised", "vulcanized", - "vulgarised", "vulgarized", - "vulgarises", "vulgarizes", - "weaselling", "weaseling", - "westernise", "westernize", - "womanisers", "womanizers", - "womanising", "womanizing", - "worshipped", "worshiped", - "worshipper", "worshiper", - "aeroplane", "airplane", - "aetiology", "etiology", - "agonising", "agonizing", - "almanacks", "almanacs", - "aluminium", "aluminum", - "amortised", "amortized", - "amortises", "amortizes", - "analogues", "analogs", - "analysing", "analyzing", - "anglicise", "anglicize", - "apologise", "apologize", - "appetiser", "appetizer", - "armourers", "armorers", - "armouries", "armories", - "artefacts", "artifacts", - "authorise", "authorize", - "baptising", "baptizing", - "behaviour", "behavior", - "belabours", "belabors", - "brutalise", "brutalize", - "callipers", "calipers", - "canalised", "canalized", - "canalises", "canalizes", - "cancelled", "canceled", - "canonised", "canonized", - "canonises", "canonizes", - "carbonise", "carbonize", - "carolling", "caroling", - "catalogue", "catalog", - "catalysed", "catalyzed", - "catalyses", "catalyzes", - "cauterise", "cauterize", - "cavilling", "caviling", - "chequered", "checkered", - "chiselled", "chiseled", - "civilised", "civilized", - "civilises", "civilizes", - "clamoured", "clamored", - "colonised", "colonized", - "coloniser", "colonizer", - "colonises", "colonizes", - "colourant", "colorant", - "coloureds", "coloreds", - "colourful", "colorful", - "colouring", "coloring", - "colourize", "colorize", - "connexion", "connection", - "criticise", "criticize", - "cruellest", "cruelest", - "cudgelled", "cudgeled", - "customise", "customize", - "demeanour", "demeanor", - "demonised", "demonized", - "demonises", "demonizes", - "deodorise", "deodorize", - "deputised", "deputized", - "deputises", "deputizes", - "dialogues", "dialogs", - "diarrhoea", "diarrhea", - "digitised", "digitized", - "digitises", "digitizes", - "discolour", "discolor", - "disfavour", "disfavor", - "dishonour", "dishonor", - "dramatise", "dramatize", - "drivelled", "driveled", - "economise", "economize", - "empathise", "empathize", - "emphasise", "emphasize", - "enamelled", "enameled", - "enamoured", "enamored", - "endeavour", "endeavor", - "energised", "energized", - "energises", "energizes", - "epaulette", "epaulet", - "epicentre", "epicenter", - "epitomise", "epitomize", - "equalised", "equalized", - "equaliser", "equalizer", - "equalises", "equalizes", - "eulogised", "eulogized", - "eulogises", "eulogizes", - "factorise", "factorize", - "fantasise", "fantasize", - "favouring", "favoring", - "favourite", "favorite", - "feminised", "feminized", - "feminises", "feminizes", - "fertilise", "fertilize", - "finalised", "finalized", - "finalises", "finalizes", - "flautists", "flutists", - "flavoured", "flavored", - "formalise", "formalize", - "fossilise", "fossilize", - "funnelled", "funneled", - "galvanise", "galvanize", - "gambolled", "gamboled", - "gaolbirds", "jailbirds", - "gaolbreak", "jailbreak", - "ghettoise", "ghettoize", - "globalise", "globalize", - "gravelled", "graveled", - "grovelled", "groveled", - "gruelling", "grueling", - "harboured", "harbored", - "harmonise", "harmonize", - "honouring", "honoring", - "humanised", "humanized", - "humanises", "humanizes", - "humouring", "humoring", - "hybridise", "hybridize", - "hypnotise", "hypnotize", - "idealised", "idealized", - "idealises", "idealizes", - "idolising", "idolizing", - "immunised", "immunized", - "immunises", "immunizes", - "inflexion", "inflection", - "italicise", "italicize", - "itemising", "itemizing", - "jewellers", "jewelers", - "jewellery", "jewelry", - "kilometre", "kilometer", - "labelling", "labeling", - "labourers", "laborers", - "labouring", "laboring", - "legalised", "legalized", - "legalises", "legalizes", - "leukaemia", "leukemia", - "levellers", "levelers", - "levelling", "leveling", - "libelling", "libeling", - "libellous", "libelous", - "licencing", "licensing", - "lionising", "lionizing", - "liquidise", "liquidize", - "localised", "localized", - "localises", "localizes", - "magnetise", "magnetize", - "manoeuvre", "maneuver", - "marvelled", "marveled", - "maximised", "maximized", - "maximises", "maximizes", - "mechanise", "mechanize", - "mediaeval", "medieval", - "memorised", "memorized", - "memorises", "memorizes", - "mesmerise", "mesmerize", - "minimised", "minimized", - "minimises", "minimizes", - "mobilised", "mobilized", - "mobilises", "mobilizes", - "modellers", "modelers", - "modelling", "modeling", - "modernise", "modernize", - "moralised", "moralized", - "moralises", "moralizes", - "motorised", "motorized", - "mouldered", "moldered", - "mouldiest", "moldiest", - "mouldings", "moldings", - "moustache", "mustache", - "neighbour", "neighbor", - "normalise", "normalize", - "odourless", "odorless", - "oestrogen", "estrogen", - "optimised", "optimized", - "optimises", "optimizes", - "organised", "organized", - "organiser", "organizer", - "organises", "organizes", - "ostracise", "ostracize", - "oxidising", "oxidizing", - "paederast", "pederast", - "panelling", "paneling", - "panellist", "panelist", - "paralysed", "paralyzed", - "paralyses", "paralyzes", - "parcelled", "parceled", - "passivise", "passivize", - "patronise", "patronize", - "pedalling", "pedaling", - "penalised", "penalized", - "penalises", "penalizes", - "pencilled", "penciled", - "ploughing", "plowing", - "ploughman", "plowman", - "ploughmen", "plowmen", - "polarised", "polarized", - "polarises", "polarizes", - "practised", "practiced", - "practises", "practices", - "pretences", "pretenses", - "primaeval", "primeval", - "privatise", "privatize", - "programme", "program", - "publicise", "publicize", - "pulverise", "pulverize", - "pummelled", "pummel", - "randomise", "randomize", - "ravelling", "raveling", - "realising", "realizing", - "recognise", "recognize", - "refuelled", "refueled", - "remoulded", "remolded", - "revellers", "revelers", - "revelling", "reveling", - "rivalling", "rivaling", - "saltpetre", "saltpeter", - "sanitised", "sanitized", - "sanitises", "sanitizes", - "satirised", "satirized", - "satirises", "satirizes", - "savouries", "savories", - "savouring", "savoring", - "sceptical", "skeptical", - "sensitise", "sensitize", - "sepulchre", "sepulcher", - "serialise", "serialize", - "sermonise", "sermonize", - "shovelled", "shoveled", - "signalise", "signalize", - "signalled", "signaled", - "snivelled", "sniveled", - "socialise", "socialize", - "sodomised", "sodomized", - "sodomises", "sodomizes", - "solemnise", "solemnize", - "spiralled", "spiraled", - "splendour", "splendor", - "stabilise", "stabilize", - "sterilise", "sterilize", - "subsidise", "subsidize", - "succoured", "succored", - "sulphates", "sulfates", - "sulphides", "sulfides", - "summarise", "summarize", - "swivelled", "swiveled", - "symbolise", "symbolize", - "syphoning", "siphoning", - "tantalise", "tantalize", - "tasselled", "tasseled", - "temporise", "temporize", - "tenderise", "tenderize", - "terrorise", "terrorize", - "theorised", "theorized", - "theorises", "theorizes", - "towelling", "toweling", - "travelled", "traveled", - "traveller", "traveler", - "trialling", "trialing", - "tricolour", "tricolor", - "tunnelled", "tunneled", - "tyrannise", "tyrannize", - "unionised", "unionized", - "unionises", "unionizes", - "unsavoury", "unsavory", - "urbanised", "urbanized", - "urbanises", "urbanizes", - "utilising", "utilizing", - "vandalise", "vandalize", - "vaporised", "vaporized", - "vaporises", "vaporizes", - "verbalise", "verbalize", - "victimise", "victimize", - "visualise", "visualize", - "vocalised", "vocalized", - "vocalises", "vocalizes", - "vulgarise", "vulgarize", - "weaselled", "weaseled", - "womanised", "womanized", - "womaniser", "womanizer", - "womanises", "womanizes", - "yodelling", "yodeling", - "yoghourts", "yogurts", - "agonised", "agonized", - "agonises", "agonizes", - "almanack", "almanac", - "amortise", "amortize", - "analogue", "analog", - "analysed", "analyzed", - "analyses", "analyzes", - "armoured", "armored", - "armourer", "armorer", - "artefact", "artifact", - "baptised", "baptized", - "baptises", "baptizes", - "baulking", "balking", - "belabour", "belabor", - "bevelled", "beveled", - "calibres", "calibers", - "calliper", "caliper", - "canalise", "canalize", - "canonise", "canonize", - "carolled", "caroled", - "catalyse", "catalyze", - "cavilled", "caviled", - "civilise", "civilize", - "clamours", "clamors", - "clangour", "clangor", - "colonise", "colonize", - "coloured", "colored", - "cosiness", "coziness", - "crueller", "crueler", - "defences", "defenses", - "demonise", "demonize", - "deputise", "deputize", - "dialling", "dialing", - "dialogue", "dialog", - "digitise", "digitize", - "draughty", "drafty", - "duelling", "dueling", - "energise", "energize", - "enthrals", "enthralls", - "equalise", "equalize", - "eulogise", "eulogize", - "favoured", "favored", - "feminise", "feminize", - "finalise", "finalize", - "flautist", "flutist", - "flavours", "flavors", - "foetuses", "fetuses", - "fuelling", "fueling", - "gaolbird", "jailbird", - "gryphons", "griffins", - "harbours", "harbors", - "honoured", "honored", - "humanise", "humanize", - "humoured", "humored", - "idealise", "idealize", - "idolised", "idolized", - "idolises", "idolizes", - "immunise", "immunize", - "ionisers", "ionizers", - "ionising", "ionizing", - "itemised", "itemized", - "itemises", "itemizes", - "jewelled", "jeweled", - "jeweller", "jeweler", - "labelled", "labeled", - "laboured", "labored", - "labourer", "laborer", - "legalise", "legalize", - "levelled", "leveled", - "leveller", "leveler", - "libelled", "libeled", - "licenced", "licensed", - "licences", "licenses", - "lionised", "lionized", - "lionises", "lionizes", - "localise", "localize", - "maximise", "maximize", - "memorise", "memorize", - "minimise", "minimize", - "misspelt", "misspelled", - "mobilise", "mobilize", - "modelled", "modeled", - "modeller", "modeler", - "moralise", "moralize", - "moulders", "molders", - "mouldier", "moldier", - "moulding", "molding", - "moulting", "molting", - "offences", "offenses", - "optimise", "optimize", - "organise", "organize", - "oxidised", "oxidized", - "oxidises", "oxidizes", - "panelled", "paneled", - "paralyse", "paralyze", - "parlours", "parlors", - "pedalled", "pedaled", - "penalise", "penalize", - "philtres", "filters", - "ploughed", "plowed", - "polarise", "polarize", - "practise", "practice", - "pretence", "pretense", - "ravelled", "raveled", - "realised", "realized", - "realises", "realizes", - "remoulds", "remolds", - "revelled", "reveled", - "reveller", "reveler", - "rivalled", "rivaled", - "rumoured", "rumored", - "sanitise", "sanitize", - "satirise", "satirize", - "saviours", "saviors", - "savoured", "savored", - "sceptics", "skeptics", - "sceptres", "scepters", - "sodomise", "sodomize", - "spectres", "specters", - "succours", "succors", - "sulphate", "sulfate", - "sulphide", "sulfide", - "syphoned", "siphoned", - "theatres", "theaters", - "theorise", "theorize", - "towelled", "toweled", - "toxaemia", "toxemia", - "trialled", "trialed", - "unionise", "unionize", - "urbanise", "urbanize", - "utilised", "utilized", - "utilises", "utilizes", - "vaporise", "vaporize", - "vocalise", "vocalize", - "womanise", "womanize", - "yodelled", "yodeled", - "yoghourt", "yogurt", - "yoghurts", "yogurts", - "agonise", "agonize", - "anaemia", "anemia", - "anaemic", "anemic", - "analyse", "analyze", - "arbours", "arbors", - "armoury", "armory", - "baptise", "baptize", - "baulked", "balked", - "behoved", "behooved", - "behoves", "behooves", - "calibre", "caliber", - "candour", "candor", - "centred", "centered", - "centres", "centers", - "cheques", "checks", - "clamour", "clamor", - "colours", "colors", - "cosiest", "coziest", - "defence", "defense", - "dialled", "dialed", - "distils", "distills", - "duelled", "dueled", - "enthral", "enthrall", - "favours", "favors", - "fervour", "fervor", - "flavour", "flavor", - "fuelled", "fueled", - "fulfils", "fulfills", - "gaolers", "jailers", - "gaoling", "jailing", - "gipsies", "gypsies", - "glueing", "gluing", - "goitres", "goiters", - "grammes", "grams", - "groynes", "groins", - "gryphon", "griffin", - "harbour", "harbor", - "honours", "honors", - "humours", "humors", - "idolise", "idolize", - "instals", "installs", - "instils", "instills", - "ionised", "ionized", - "ioniser", "ionizer", - "ionises", "ionizes", - "itemise", "itemize", - "labours", "labors", - "licence", "license", - "lionise", "lionize", - "louvred", "louvered", - "louvres", "louvers", - "moulded", "molded", - "moulder", "molder", - "moulted", "molted", - "offence", "offense", - "oxidise", "oxidize", - "parlour", "parlor", - "philtre", "filter", - "ploughs", "plows", - "pyjamas", "pajamas", - "rancour", "rancor", - "realise", "realize", - "remould", "remold", - "rigours", "rigors", - "rumours", "rumors", - "saviour", "savior", - "savours", "savors", - "savoury", "savory", - "sceptic", "skeptic", - "sceptre", "scepter", - "spectre", "specter", - "storeys", "stories", - "succour", "succor", - "sulphur", "sulfur", - "syphons", "siphons", - "theatre", "theater", - "tumours", "tumors", - "utilise", "utilize", - "vapours", "vapors", - "waggons", "wagons", - "yoghurt", "yogurt", - "ageing", "aging", - "appals", "appalls", - "arbour", "arbor", - "ardour", "ardor", - "baulks", "balks", - "behove", "behoove", - "centre", "center", - "cheque", "check", - "chilli", "chili", - "colour", "color", - "cosier", "cozier", - "cosies", "cozies", - "cosily", "cozily", - "distil", "distill", - "edoema", "edema", - "enrols", "enrolls", - "faecal", "fecal", - "faeces", "feces", - "favour", "favor", - "fibres", "fibers", - "foetal", "fetal", - "foetid", "fetid", - "foetus", "fetus", - "fulfil", "fulfill", - "gaoled", "jailed", - "gaoler", "jailer", - "goitre", "goiter", - "gramme", "gram", - "groyne", "groin", - "honour", "honor", - "humour", "humor", - "instal", "install", - "instil", "instill", - "ionise", "ionize", - "labour", "labor", - "litres", "liters", - "lustre", "luster", - "meagre", "meager", - "metres", "meters", - "mitres", "miters", - "moulds", "molds", - "mouldy", "moldy", - "moults", "molts", - "odours", "odors", - "plough", "plow", - "pyjama", "pajama", - "rigour", "rigor", - "rumour", "rumor", - "savour", "savor", - "storey", "story", - "syphon", "siphon", - "tumour", "tumor", - "valour", "valor", - "vapour", "vapor", - "vigour", "vigor", - "waggon", "wagon", - "appal", "appall", - "baulk", "balk", - "enrol", "enroll", - "fibre", "fiber", - "gaols", "jails", - "litre", "liter", - "metre", "meter", - "mitre", "miter", - "mould", "mold", - "moult", "molt", - "odour", "odor", - "tyres", "tires", - "cosy", "cozy", - "gaol", "jail", - "tyre", "tire", -} - -// DictBritish converts US spellings to UK spellings -var DictBritish = []string{ - "institutionalization", "institutionalisation", - "internationalization", "internationalisation", - "professionalization", "professionalisation", - "compartmentalizing", "compartmentalising", - "institutionalizing", "institutionalising", - "internationalizing", "internationalising", - "compartmentalized", "compartmentalised", - "compartmentalizes", "compartmentalises", - "decriminalization", "decriminalisation", - "denationalization", "denationalisation", - "fictionalizations", "fictionalisations", - "institutionalized", "institutionalised", - "institutionalizes", "institutionalises", - "intellectualizing", "intellectualising", - "internationalized", "internationalised", - "internationalizes", "internationalises", - "pedestrianization", "pedestrianisation", - "professionalizing", "professionalising", - "compartmentalize", "compartmentalise", - "decentralization", "decentralisation", - "demilitarization", "demilitarisation", - "externalizations", "externalisations", - "fictionalization", "fictionalisation", - "institutionalize", "institutionalise", - "intellectualized", "intellectualised", - "intellectualizes", "intellectualises", - "internationalize", "internationalise", - "nationalizations", "nationalisations", - "professionalized", "professionalised", - "professionalizes", "professionalises", - "rationalizations", "rationalisations", - "sensationalizing", "sensationalising", - "sentimentalizing", "sentimentalising", - "acclimatization", "acclimatisation", - "commercializing", "commercialising", - "conceptualizing", "conceptualising", - "contextualizing", "contextualising", - "crystallization", "crystallisation", - "decriminalizing", "decriminalising", - "democratization", "democratisation", - "denationalizing", "denationalising", - "depersonalizing", "depersonalising", - "desensitization", "desensitisation", - "disorganization", "disorganisation", - "extemporization", "extemporisation", - "externalization", "externalisation", - "familiarization", "familiarisation", - "generalizations", "generalisations", - "hospitalization", "hospitalisation", - "individualizing", "individualising", - "industrializing", "industrialising", - "intellectualize", "intellectualise", - "internalization", "internalisation", - "maneuverability", "manoeuvrability", - "materialization", "materialisation", - "miniaturization", "miniaturisation", - "nationalization", "nationalisation", - "overemphasizing", "overemphasising", - "paleontologists", "palaeontologists", - "particularizing", "particularising", - "pedestrianizing", "pedestrianising", - "professionalize", "professionalise", - "psychoanalyzing", "psychoanalysing", - "rationalization", "rationalisation", - "reorganizations", "reorganisations", - "revolutionizing", "revolutionising", - "sensationalized", "sensationalised", - "sensationalizes", "sensationalises", - "sentimentalized", "sentimentalised", - "sentimentalizes", "sentimentalises", - "specializations", "specialisations", - "standardization", "standardisation", - "synchronization", "synchronisation", - "systematization", "systematisation", - "aggrandizement", "aggrandisement", - "characterizing", "characterising", - "collectivizing", "collectivising", - "commercialized", "commercialised", - "commercializes", "commercialises", - "conceptualized", "conceptualised", - "conceptualizes", "conceptualises", - "contextualized", "contextualised", - "contextualizes", "contextualises", - "decentralizing", "decentralising", - "decriminalized", "decriminalised", - "decriminalizes", "decriminalises", - "dehumanization", "dehumanisation", - "demilitarizing", "demilitarising", - "demobilization", "demobilisation", - "demoralization", "demoralisation", - "denationalized", "denationalised", - "denationalizes", "denationalises", - "depersonalized", "depersonalised", - "depersonalizes", "depersonalises", - "dramatizations", "dramatisations", - "editorializing", "editorialising", - "fictionalizing", "fictionalising", - "fraternization", "fraternisation", - "generalization", "generalisation", - "immobilization", "immobilisation", - "individualized", "individualised", - "individualizes", "individualises", - "industrialized", "industrialised", - "industrializes", "industrialises", - "liberalization", "liberalisation", - "monopolization", "monopolisation", - "naturalization", "naturalisation", - "neighborliness", "neighbourliness", - "neutralization", "neutralisation", - "organizational", "organisational", - "outmaneuvering", "outmanoeuvring", - "overemphasized", "overemphasised", - "overemphasizes", "overemphasises", - "paleontologist", "palaeontologist", - "particularized", "particularised", - "particularizes", "particularises", - "pasteurization", "pasteurisation", - "pedestrianized", "pedestrianised", - "pedestrianizes", "pedestrianises", - "philosophizing", "philosophising", - "politicization", "politicisation", - "popularization", "popularisation", - "pressurization", "pressurisation", - "prioritization", "prioritisation", - "privatizations", "privatisations", - "propagandizing", "propagandising", - "psychoanalyzed", "psychoanalysed", - "psychoanalyzes", "psychoanalyses", - "reconnoitering", "reconnoitring", - "regularization", "regularisation", - "reorganization", "reorganisation", - "revolutionized", "revolutionised", - "revolutionizes", "revolutionises", - "secularization", "secularisation", - "sensationalize", "sensationalise", - "sentimentalize", "sentimentalise", - "serializations", "serialisations", - "specialization", "specialisation", - "sterilizations", "sterilisations", - "stigmatization", "stigmatisation", - "transistorized", "transistorised", - "unrecognizable", "unrecognisable", - "visualizations", "visualisations", - "westernization", "westernisation", - "accessorizing", "accessorising", - "acclimatizing", "acclimatising", - "amortizations", "amortisations", - "amphitheaters", "amphitheatres", - "anesthetizing", "anaesthetising", - "archeologists", "archaeologists", - "breathalyzers", "breathalysers", - "breathalyzing", "breathalysing", - "cannibalizing", "cannibalising", - "characterized", "characterised", - "characterizes", "characterises", - "circularizing", "circularising", - "collectivized", "collectivised", - "collectivizes", "collectivises", - "commercialize", "commercialise", - "computerizing", "computerising", - "conceptualize", "conceptualise", - "contextualize", "contextualise", - "criminalizing", "criminalising", - "crystallizing", "crystallising", - "decentralized", "decentralised", - "decentralizes", "decentralises", - "decriminalize", "decriminalise", - "demilitarized", "demilitarised", - "demilitarizes", "demilitarises", - "democratizing", "democratising", - "denationalize", "denationalise", - "depersonalize", "depersonalise", - "desensitizing", "desensitising", - "destabilizing", "destabilising", - "disemboweling", "disembowelling", - "dramatization", "dramatisation", - "editorialized", "editorialised", - "editorializes", "editorialises", - "extemporizing", "extemporising", - "externalizing", "externalising", - "familiarizing", "familiarising", - "fertilization", "fertilisation", - "fictionalized", "fictionalised", - "fictionalizes", "fictionalises", - "formalization", "formalisation", - "fossilization", "fossilisation", - "globalization", "globalisation", - "gynecological", "gynaecological", - "gynecologists", "gynaecologists", - "harmonization", "harmonisation", - "hematological", "haematological", - "hematologists", "haematologists", - "hospitalizing", "hospitalising", - "hypothesizing", "hypothesising", - "immortalizing", "immortalising", - "individualize", "individualise", - "industrialize", "industrialise", - "internalizing", "internalising", - "marginalizing", "marginalising", - "materializing", "materialising", - "mechanization", "mechanisation", - "memorializing", "memorialising", - "miniaturizing", "miniaturising", - "nationalizing", "nationalising", - "neighborhoods", "neighbourhoods", - "normalization", "normalisation", - "organizations", "organisations", - "outmaneuvered", "outmanoeuvred", - "overemphasize", "overemphasise", - "particularize", "particularise", - "passivization", "passivisation", - "patronizingly", "patronisingly", - "pedestrianize", "pedestrianise", - "pediatricians", "paediatricians", - "personalizing", "personalising", - "philosophized", "philosophised", - "philosophizes", "philosophises", - "privatization", "privatisation", - "propagandized", "propagandised", - "propagandizes", "propagandises", - "proselytizers", "proselytisers", - "proselytizing", "proselytising", - "psychoanalyze", "psychoanalyse", - "pulverization", "pulverisation", - "rationalizing", "rationalising", - "reconnoitered", "reconnoitred", - "revolutionize", "revolutionise", - "romanticizing", "romanticising", - "serialization", "serialisation", - "socialization", "socialisation", - "standardizing", "standardising", - "sterilization", "sterilisation", - "subsidization", "subsidisation", - "synchronizing", "synchronising", - "systematizing", "systematising", - "tantalizingly", "tantalisingly", - "underutilized", "underutilised", - "victimization", "victimisation", - "visualization", "visualisation", - "vocalizations", "vocalisations", - "vulgarization", "vulgarisation", - "accessorized", "accessorised", - "accessorizes", "accessorises", - "acclimatized", "acclimatised", - "acclimatizes", "acclimatises", - "amortization", "amortisation", - "amphitheater", "amphitheatre", - "anesthetists", "anaesthetists", - "anesthetized", "anaesthetised", - "anesthetizes", "anaesthetises", - "antagonizing", "antagonising", - "appetizingly", "appetisingly", - "archeologist", "archaeologist", - "backpedaling", "backpedalling", - "bastardizing", "bastardising", - "behaviorists", "behaviourists", - "bowdlerizing", "bowdlerising", - "breathalyzed", "breathalysed", - "breathalyzes", "breathalyses", - "cannibalized", "cannibalised", - "cannibalizes", "cannibalises", - "capitalizing", "capitalising", - "caramelizing", "caramelising", - "categorizing", "categorising", - "centerpieces", "centrepieces", - "centralizing", "centralising", - "characterize", "characterise", - "circularized", "circularised", - "circularizes", "circularises", - "clarinetists", "clarinettists", - "collectivize", "collectivise", - "colonization", "colonisation", - "computerized", "computerised", - "computerizes", "computerises", - "criminalized", "criminalised", - "criminalizes", "criminalises", - "crystallized", "crystallised", - "crystallizes", "crystallises", - "decentralize", "decentralise", - "dehumanizing", "dehumanising", - "demilitarize", "demilitarise", - "demobilizing", "demobilising", - "democratized", "democratised", - "democratizes", "democratises", - "demoralizing", "demoralising", - "desensitized", "desensitised", - "desensitizes", "desensitises", - "destabilized", "destabilised", - "destabilizes", "destabilises", - "disemboweled", "disembowelled", - "dishonorable", "dishonourable", - "dishonorably", "dishonourably", - "disorganized", "disorganised", - "editorialize", "editorialise", - "equalization", "equalisation", - "evangelizing", "evangelising", - "extemporized", "extemporised", - "extemporizes", "extemporises", - "externalized", "externalised", - "externalizes", "externalises", - "familiarized", "familiarised", - "familiarizes", "familiarises", - "fictionalize", "fictionalise", - "finalization", "finalisation", - "fraternizing", "fraternising", - "generalizing", "generalising", - "gynecologist", "gynaecologist", - "hematologist", "haematologist", - "hemophiliacs", "haemophiliacs", - "hemorrhaging", "haemorrhaging", - "homogenizing", "homogenising", - "hospitalized", "hospitalised", - "hospitalizes", "hospitalises", - "hypothesized", "hypothesised", - "hypothesizes", "hypothesises", - "idealization", "idealisation", - "immobilizers", "immobilisers", - "immobilizing", "immobilising", - "immortalized", "immortalised", - "immortalizes", "immortalises", - "immunization", "immunisation", - "initializing", "initialising", - "installments", "instalments", - "internalized", "internalised", - "internalizes", "internalises", - "jeopardizing", "jeopardising", - "legalization", "legalisation", - "legitimizing", "legitimising", - "liberalizing", "liberalising", - "maneuverable", "manoeuvrable", - "maneuverings", "manoeuvrings", - "marginalized", "marginalised", - "marginalizes", "marginalises", - "materialized", "materialised", - "materializes", "materialises", - "maximization", "maximisation", - "memorialized", "memorialised", - "memorializes", "memorialises", - "metabolizing", "metabolising", - "militarizing", "militarising", - "miniaturized", "miniaturised", - "miniaturizes", "miniaturises", - "miscataloged", "miscatalogued", - "misdemeanors", "misdemeanours", - "mobilization", "mobilisation", - "moisturizers", "moisturisers", - "moisturizing", "moisturising", - "monopolizing", "monopolising", - "multicolored", "multicoloured", - "nationalized", "nationalised", - "nationalizes", "nationalises", - "naturalizing", "naturalising", - "neighborhood", "neighbourhood", - "neutralizing", "neutralising", - "organization", "organisation", - "outmaneuvers", "outmanoeuvres", - "paleontology", "palaeontology", - "pasteurizing", "pasteurising", - "pediatrician", "paediatrician", - "personalized", "personalised", - "personalizes", "personalises", - "philosophize", "philosophise", - "plagiarizing", "plagiarising", - "polarization", "polarisation", - "politicizing", "politicising", - "popularizing", "popularising", - "pressurizing", "pressurising", - "prioritizing", "prioritising", - "propagandize", "propagandise", - "proselytized", "proselytised", - "proselytizer", "proselytiser", - "proselytizes", "proselytises", - "radicalizing", "radicalising", - "rationalized", "rationalised", - "rationalizes", "rationalises", - "realizations", "realisations", - "recognizable", "recognisable", - "recognizably", "recognisably", - "recognizance", "recognisance", - "reconnoiters", "reconnoitres", - "regularizing", "regularising", - "reorganizing", "reorganising", - "revitalizing", "revitalising", - "rhapsodizing", "rhapsodising", - "romanticized", "romanticised", - "romanticizes", "romanticises", - "scandalizing", "scandalising", - "scrutinizing", "scrutinising", - "secularizing", "secularising", - "standardized", "standardised", - "standardizes", "standardises", - "stigmatizing", "stigmatising", - "sympathizers", "sympathisers", - "sympathizing", "sympathising", - "synchronized", "synchronised", - "synchronizes", "synchronises", - "synthesizing", "synthesising", - "systematized", "systematised", - "systematizes", "systematises", - "theatergoers", "theatregoers", - "traumatizing", "traumatising", - "trivializing", "trivialising", - "unauthorized", "unauthorised", - "unionization", "unionisation", - "unrecognized", "unrecognised", - "urbanization", "urbanisation", - "vaporization", "vaporisation", - "vocalization", "vocalisation", - "westernizing", "westernising", - "accessorize", "accessorise", - "acclimatize", "acclimatise", - "agonizingly", "agonisingly", - "amortizable", "amortisable", - "anesthetics", "anaesthetics", - "anesthetist", "anaesthetist", - "anesthetize", "anaesthetise", - "anglicizing", "anglicising", - "antagonized", "antagonised", - "antagonizes", "antagonises", - "apologizing", "apologising", - "backpedaled", "backpedalled", - "bastardized", "bastardised", - "bastardizes", "bastardises", - "behaviorism", "behaviourism", - "behaviorist", "behaviourist", - "bowdlerized", "bowdlerised", - "bowdlerizes", "bowdlerises", - "brutalizing", "brutalising", - "cannibalize", "cannibalise", - "capitalized", "capitalised", - "capitalizes", "capitalises", - "caramelized", "caramelised", - "caramelizes", "caramelises", - "carbonizing", "carbonising", - "categorized", "categorised", - "categorizes", "categorises", - "cauterizing", "cauterising", - "centerfolds", "centrefolds", - "centerpiece", "centrepiece", - "centiliters", "centilitres", - "centimeters", "centimetres", - "centralized", "centralised", - "centralizes", "centralises", - "circularize", "circularise", - "clarinetist", "clarinettist", - "computerize", "computerise", - "criminalize", "criminalise", - "criticizing", "criticising", - "crystallize", "crystallise", - "customizing", "customising", - "defenseless", "defenceless", - "dehumanized", "dehumanised", - "dehumanizes", "dehumanises", - "demobilized", "demobilised", - "demobilizes", "demobilises", - "democratize", "democratise", - "demoralized", "demoralised", - "demoralizes", "demoralises", - "deodorizing", "deodorising", - "desensitize", "desensitise", - "destabilize", "destabilise", - "discoloring", "discolouring", - "dishonoring", "dishonouring", - "dramatizing", "dramatising", - "economizing", "economising", - "empathizing", "empathising", - "emphasizing", "emphasising", - "endeavoring", "endeavouring", - "epitomizing", "epitomising", - "esophaguses", "oesophaguses", - "evangelized", "evangelised", - "evangelizes", "evangelises", - "extemporize", "extemporise", - "externalize", "externalise", - "factorizing", "factorising", - "familiarize", "familiarise", - "fantasizing", "fantasising", - "fertilizers", "fertilisers", - "fertilizing", "fertilising", - "formalizing", "formalising", - "fossilizing", "fossilising", - "fraternized", "fraternised", - "fraternizes", "fraternises", - "fulfillment", "fulfilment", - "galvanizing", "galvanising", - "generalized", "generalised", - "generalizes", "generalises", - "ghettoizing", "ghettoising", - "globalizing", "globalising", - "harmonizing", "harmonising", - "hemophiliac", "haemophiliac", - "hemorrhaged", "haemorrhaged", - "hemorrhages", "haemorrhages", - "hemorrhoids", "haemorrhoids", - "homogenized", "homogenised", - "homogenizes", "homogenises", - "hospitalize", "hospitalise", - "hybridizing", "hybridising", - "hypnotizing", "hypnotising", - "hypothesize", "hypothesise", - "immobilized", "immobilised", - "immobilizer", "immobiliser", - "immobilizes", "immobilises", - "immortalize", "immortalise", - "initialized", "initialised", - "initializes", "initialises", - "installment", "instalment", - "internalize", "internalise", - "italicizing", "italicising", - "jeopardized", "jeopardised", - "jeopardizes", "jeopardises", - "legitimized", "legitimised", - "legitimizes", "legitimises", - "liberalized", "liberalised", - "liberalizes", "liberalises", - "lionization", "lionisation", - "liquidizers", "liquidisers", - "liquidizing", "liquidising", - "magnetizing", "magnetising", - "maneuvering", "manoeuvring", - "marginalize", "marginalise", - "marvelously", "marvellously", - "materialize", "materialise", - "mechanizing", "mechanising", - "memorialize", "memorialise", - "mesmerizing", "mesmerising", - "metabolized", "metabolised", - "metabolizes", "metabolises", - "militarized", "militarised", - "militarizes", "militarises", - "milliliters", "millilitres", - "millimeters", "millimetres", - "miniaturize", "miniaturise", - "misbehavior", "misbehaviour", - "misdemeanor", "misdemeanour", - "modernizing", "modernising", - "moisturized", "moisturised", - "moisturizer", "moisturiser", - "moisturizes", "moisturises", - "monopolized", "monopolised", - "monopolizes", "monopolises", - "nationalize", "nationalise", - "naturalized", "naturalised", - "naturalizes", "naturalises", - "neighboring", "neighbouring", - "neutralized", "neutralised", - "neutralizes", "neutralises", - "normalizing", "normalising", - "orthopedics", "orthopaedics", - "ostracizing", "ostracising", - "outmaneuver", "outmanoeuvre", - "oxidization", "oxidisation", - "pasteurized", "pasteurised", - "pasteurizes", "pasteurises", - "patronizing", "patronising", - "personalize", "personalise", - "plagiarized", "plagiarised", - "plagiarizes", "plagiarises", - "politicized", "politicised", - "politicizes", "politicises", - "popularized", "popularised", - "popularizes", "popularises", - "pressurized", "pressurised", - "pressurizes", "pressurises", - "prioritized", "prioritised", - "prioritizes", "prioritises", - "privatizing", "privatising", - "proselytize", "proselytise", - "publicizing", "publicising", - "pulverizing", "pulverising", - "radicalized", "radicalised", - "radicalizes", "radicalises", - "randomizing", "randomising", - "rationalize", "rationalise", - "realization", "realisation", - "recognizing", "recognising", - "reconnoiter", "reconnoitre", - "regularized", "regularised", - "regularizes", "regularises", - "reorganized", "reorganised", - "reorganizes", "reorganises", - "revitalized", "revitalised", - "revitalizes", "revitalises", - "rhapsodized", "rhapsodised", - "rhapsodizes", "rhapsodises", - "romanticize", "romanticise", - "scandalized", "scandalised", - "scandalizes", "scandalises", - "scrutinized", "scrutinised", - "scrutinizes", "scrutinises", - "secularized", "secularised", - "secularizes", "secularises", - "sensitizing", "sensitising", - "serializing", "serialising", - "sermonizing", "sermonising", - "signalizing", "signalising", - "skeptically", "sceptically", - "socializing", "socialising", - "solemnizing", "solemnising", - "specialized", "specialised", - "specializes", "specialises", - "squirreling", "squirrelling", - "stabilizers", "stabilisers", - "stabilizing", "stabilising", - "standardize", "standardise", - "sterilizers", "sterilisers", - "sterilizing", "sterilising", - "stigmatized", "stigmatised", - "stigmatizes", "stigmatises", - "subsidizers", "subsidisers", - "subsidizing", "subsidising", - "summarizing", "summarising", - "symbolizing", "symbolising", - "sympathized", "sympathised", - "sympathizer", "sympathiser", - "sympathizes", "sympathises", - "synchronize", "synchronise", - "synthesized", "synthesised", - "synthesizes", "synthesises", - "systematize", "systematise", - "tantalizing", "tantalising", - "temporizing", "temporising", - "tenderizing", "tenderising", - "terrorizing", "terrorising", - "theatergoer", "theatregoer", - "traumatized", "traumatised", - "traumatizes", "traumatises", - "trivialized", "trivialised", - "trivializes", "trivialises", - "tyrannizing", "tyrannising", - "uncataloged", "uncatalogued", - "uncivilized", "uncivilised", - "unfavorable", "unfavourable", - "unfavorably", "unfavourably", - "unorganized", "unorganised", - "untrammeled", "untrammelled", - "utilization", "utilisation", - "vandalizing", "vandalising", - "verbalizing", "verbalising", - "victimizing", "victimising", - "visualizing", "visualising", - "vulgarizing", "vulgarising", - "watercolors", "watercolours", - "westernized", "westernised", - "westernizes", "westernises", - "amortizing", "amortising", - "anesthesia", "anaesthesia", - "anesthetic", "anaesthetic", - "anglicized", "anglicised", - "anglicizes", "anglicises", - "annualized", "annualised", - "antagonize", "antagonise", - "apologized", "apologised", - "apologizes", "apologises", - "appetizers", "appetisers", - "appetizing", "appetising", - "archeology", "archaeology", - "authorizes", "authorises", - "bastardize", "bastardise", - "bedeviling", "bedevilling", - "behavioral", "behavioural", - "belaboring", "belabouring", - "bowdlerize", "bowdlerise", - "brutalized", "brutalised", - "brutalizes", "brutalises", - "canalizing", "canalising", - "canonizing", "canonising", - "capitalize", "capitalise", - "caramelize", "caramelise", - "carbonized", "carbonised", - "carbonizes", "carbonises", - "cataloging", "cataloguing", - "catalyzing", "catalysing", - "categorize", "categorise", - "cauterized", "cauterised", - "cauterizes", "cauterises", - "centerfold", "centrefold", - "centiliter", "centilitre", - "centimeter", "centimetre", - "centralize", "centralise", - "channeling", "channelling", - "checkbooks", "chequebooks", - "civilizing", "civilising", - "colonizers", "colonisers", - "colonizing", "colonising", - "colorfully", "colourfully", - "colorizing", "colourizing", - "councilors", "councillors", - "counselors", "counsellors", - "criticized", "criticised", - "criticizes", "criticises", - "customized", "customised", - "customizes", "customises", - "dehumanize", "dehumanise", - "demobilize", "demobilise", - "demonizing", "demonising", - "demoralize", "demoralise", - "deodorized", "deodorised", - "deodorizes", "deodorises", - "deputizing", "deputising", - "digitizing", "digitising", - "discolored", "discoloured", - "disheveled", "dishevelled", - "dishonored", "dishonoured", - "dramatized", "dramatised", - "dramatizes", "dramatises", - "economized", "economised", - "economizes", "economises", - "empathized", "empathised", - "empathizes", "empathises", - "emphasized", "emphasised", - "emphasizes", "emphasises", - "endeavored", "endeavoured", - "energizing", "energising", - "epicenters", "epicentres", - "epitomized", "epitomised", - "epitomizes", "epitomises", - "equalizers", "equalisers", - "equalizing", "equalising", - "eulogizing", "eulogising", - "evangelize", "evangelise", - "factorized", "factorised", - "factorizes", "factorises", - "fantasized", "fantasised", - "fantasizes", "fantasises", - "favoritism", "favouritism", - "feminizing", "feminising", - "fertilized", "fertilised", - "fertilizer", "fertiliser", - "fertilizes", "fertilises", - "fiberglass", "fibreglass", - "finalizing", "finalising", - "flavorings", "flavourings", - "flavorless", "flavourless", - "flavorsome", "flavoursome", - "formalized", "formalised", - "formalizes", "formalises", - "fossilized", "fossilised", - "fossilizes", "fossilises", - "fraternize", "fraternise", - "galvanized", "galvanised", - "galvanizes", "galvanises", - "generalize", "generalise", - "ghettoized", "ghettoised", - "ghettoizes", "ghettoises", - "globalized", "globalised", - "globalizes", "globalises", - "gruelingly", "gruellingly", - "gynecology", "gynaecology", - "harmonized", "harmonised", - "harmonizes", "harmonises", - "hematology", "haematology", - "hemoglobin", "haemoglobin", - "hemophilia", "haemophilia", - "hemorrhage", "haemorrhage", - "homogenize", "homogenise", - "humanizing", "humanising", - "hybridized", "hybridised", - "hybridizes", "hybridises", - "hypnotized", "hypnotised", - "hypnotizes", "hypnotises", - "idealizing", "idealising", - "immobilize", "immobilise", - "immunizing", "immunising", - "impaneling", "impanelling", - "imperiling", "imperilling", - "initialing", "initialling", - "initialize", "initialise", - "ionization", "ionisation", - "italicized", "italicised", - "italicizes", "italicises", - "jeopardize", "jeopardise", - "kilometers", "kilometres", - "lackluster", "lacklustre", - "legalizing", "legalising", - "legitimize", "legitimise", - "liberalize", "liberalise", - "liquidized", "liquidised", - "liquidizer", "liquidiser", - "liquidizes", "liquidises", - "localizing", "localising", - "magnetized", "magnetised", - "magnetizes", "magnetises", - "maneuvered", "manoeuvred", - "marshaling", "marshalling", - "maximizing", "maximising", - "mechanized", "mechanised", - "mechanizes", "mechanises", - "memorizing", "memorising", - "mesmerized", "mesmerised", - "mesmerizes", "mesmerises", - "metabolize", "metabolise", - "militarize", "militarise", - "milliliter", "millilitre", - "millimeter", "millimetre", - "minimizing", "minimising", - "mobilizing", "mobilising", - "modernized", "modernised", - "modernizes", "modernises", - "moisturize", "moisturise", - "monopolize", "monopolise", - "moralizing", "moralising", - "naturalize", "naturalise", - "neighborly", "neighbourly", - "neutralize", "neutralise", - "normalized", "normalised", - "normalizes", "normalises", - "optimizing", "optimising", - "organizers", "organisers", - "organizing", "organising", - "orthopedic", "orthopaedic", - "ostracized", "ostracised", - "ostracizes", "ostracises", - "paralyzing", "paralysing", - "pasteurize", "pasteurise", - "patronized", "patronised", - "patronizes", "patronises", - "pedophiles", "paedophiles", - "pedophilia", "paedophilia", - "penalizing", "penalising", - "plagiarize", "plagiarise", - "plowshares", "ploughshares", - "polarizing", "polarising", - "politicize", "politicise", - "popularize", "popularise", - "prioritize", "prioritise", - "privatized", "privatised", - "privatizes", "privatises", - "publicized", "publicised", - "publicizes", "publicises", - "pulverized", "pulverised", - "pulverizes", "pulverises", - "quarreling", "quarrelling", - "radicalize", "radicalise", - "randomized", "randomised", - "randomizes", "randomises", - "realizable", "realisable", - "recognized", "recognised", - "recognizes", "recognises", - "regularize", "regularise", - "remodeling", "remodelling", - "reorganize", "reorganise", - "revitalize", "revitalise", - "rhapsodize", "rhapsodise", - "ritualized", "ritualised", - "sanitizing", "sanitising", - "satirizing", "satirising", - "scandalize", "scandalise", - "scrutinize", "scrutinise", - "secularize", "secularise", - "sensitized", "sensitised", - "sensitizes", "sensitises", - "sepulchers", "sepulchres", - "serialized", "serialised", - "serializes", "serialises", - "sermonized", "sermonised", - "sermonizes", "sermonises", - "shriveling", "shrivelling", - "signalized", "signalised", - "signalizes", "signalises", - "skepticism", "scepticism", - "socialized", "socialised", - "socializes", "socialises", - "sodomizing", "sodomising", - "solemnized", "solemnised", - "solemnizes", "solemnises", - "specialize", "specialise", - "squirreled", "squirrelled", - "stabilized", "stabilised", - "stabilizer", "stabiliser", - "stabilizes", "stabilises", - "stenciling", "stencilling", - "sterilized", "sterilised", - "sterilizer", "steriliser", - "sterilizes", "sterilises", - "stigmatize", "stigmatise", - "subsidized", "subsidised", - "subsidizer", "subsidiser", - "subsidizes", "subsidises", - "summarized", "summarised", - "summarizes", "summarises", - "symbolized", "symbolised", - "symbolizes", "symbolises", - "sympathize", "sympathise", - "tantalized", "tantalised", - "tantalizes", "tantalises", - "temporized", "temporised", - "temporizes", "temporises", - "tenderized", "tenderised", - "tenderizes", "tenderises", - "terrorized", "terrorised", - "terrorizes", "terrorises", - "theorizing", "theorising", - "traumatize", "traumatise", - "trivialize", "trivialise", - "tyrannized", "tyrannised", - "tyrannizes", "tyrannises", - "unionizing", "unionising", - "unraveling", "unravelling", - "urbanizing", "urbanising", - "utilizable", "utilisable", - "vandalized", "vandalised", - "vandalizes", "vandalises", - "vaporizing", "vaporising", - "verbalized", "verbalised", - "verbalizes", "verbalises", - "victimized", "victimised", - "victimizes", "victimises", - "visualized", "visualised", - "visualizes", "visualises", - "vocalizing", "vocalising", - "vulcanized", "vulcanised", - "vulgarized", "vulgarised", - "vulgarizes", "vulgarises", - "watercolor", "watercolour", - "westernize", "westernise", - "womanizers", "womanisers", - "womanizing", "womanising", - "worshiping", "worshipping", - "agonizing", "agonising", - "airplanes", "aeroplanes", - "amortized", "amortised", - "amortizes", "amortises", - "analyzing", "analysing", - "apologize", "apologise", - "appetizer", "appetiser", - "artifacts", "artefacts", - "baptizing", "baptising", - "bedeviled", "bedevilled", - "behaviors", "behaviours", - "bejeweled", "bejewelled", - "belabored", "belaboured", - "brutalize", "brutalise", - "canalized", "canalised", - "canalizes", "canalises", - "canonized", "canonised", - "canonizes", "canonises", - "carbonize", "carbonise", - "cataloged", "catalogued", - "catalyzed", "catalysed", - "catalyzes", "catalyses", - "cauterize", "cauterise", - "channeled", "channelled", - "checkbook", "chequebook", - "checkered", "chequered", - "chiseling", "chiselling", - "civilized", "civilised", - "civilizes", "civilises", - "clamoring", "clamouring", - "colonized", "colonised", - "colonizer", "coloniser", - "colonizes", "colonises", - "colorants", "colourants", - "colorized", "colourized", - "colorizes", "colourizes", - "colorless", "colourless", - "councilor", "councillor", - "counseled", "counselled", - "counselor", "counsellor", - "criticize", "criticise", - "cudgeling", "cudgelling", - "customize", "customise", - "demonized", "demonised", - "demonizes", "demonises", - "deodorize", "deodorise", - "deputized", "deputised", - "deputizes", "deputises", - "digitized", "digitised", - "digitizes", "digitises", - "discolors", "discolours", - "dishonors", "dishonours", - "dramatize", "dramatise", - "driveling", "drivelling", - "economize", "economise", - "empathize", "empathise", - "emphasize", "emphasise", - "enameling", "enamelling", - "endeavors", "endeavours", - "energized", "energised", - "energizes", "energises", - "enthralls", "enthrals", - "epicenter", "epicentre", - "epitomize", "epitomise", - "equalized", "equalised", - "equalizer", "equaliser", - "equalizes", "equalises", - "eulogized", "eulogised", - "eulogizes", "eulogises", - "factorize", "factorise", - "fantasize", "fantasise", - "favorable", "favourable", - "favorably", "favourably", - "favorites", "favourites", - "feminized", "feminised", - "feminizes", "feminises", - "fertilize", "fertilise", - "finalized", "finalised", - "finalizes", "finalises", - "flavoring", "flavouring", - "formalize", "formalise", - "fossilize", "fossilise", - "funneling", "funnelling", - "galvanize", "galvanise", - "gamboling", "gambolling", - "ghettoize", "ghettoise", - "globalize", "globalise", - "gonorrhea", "gonorrhoea", - "groveling", "grovelling", - "harboring", "harbouring", - "harmonize", "harmonise", - "honorably", "honourably", - "humanized", "humanised", - "humanizes", "humanises", - "hybridize", "hybridise", - "hypnotize", "hypnotise", - "idealized", "idealised", - "idealizes", "idealises", - "idolizing", "idolising", - "immunized", "immunised", - "immunizes", "immunises", - "impaneled", "impanelled", - "imperiled", "imperilled", - "initialed", "initialled", - "italicize", "italicise", - "itemizing", "itemising", - "kilometer", "kilometre", - "legalized", "legalised", - "legalizes", "legalises", - "lionizing", "lionising", - "liquidize", "liquidise", - "localized", "localised", - "localizes", "localises", - "magnetize", "magnetise", - "maneuvers", "manoeuvres", - "marshaled", "marshalled", - "marveling", "marvelling", - "marvelous", "marvellous", - "maximized", "maximised", - "maximizes", "maximises", - "mechanize", "mechanise", - "memorized", "memorised", - "memorizes", "memorises", - "mesmerize", "mesmerise", - "minimized", "minimised", - "minimizes", "minimises", - "mobilized", "mobilised", - "mobilizes", "mobilises", - "modernize", "modernise", - "moldering", "mouldering", - "moralized", "moralised", - "moralizes", "moralises", - "motorized", "motorised", - "mustached", "moustached", - "mustaches", "moustaches", - "neighbors", "neighbours", - "normalize", "normalise", - "optimized", "optimised", - "optimizes", "optimises", - "organized", "organised", - "organizer", "organiser", - "organizes", "organises", - "ostracize", "ostracise", - "oxidizing", "oxidising", - "panelists", "panellists", - "paralyzed", "paralysed", - "paralyzes", "paralyses", - "parceling", "parcelling", - "patronize", "patronise", - "pedophile", "paedophile", - "penalized", "penalised", - "penalizes", "penalises", - "penciling", "pencilling", - "plowshare", "ploughshare", - "polarized", "polarised", - "polarizes", "polarises", - "practiced", "practised", - "pretenses", "pretences", - "privatize", "privatise", - "publicize", "publicise", - "pulverize", "pulverise", - "quarreled", "quarrelled", - "randomize", "randomise", - "realizing", "realising", - "recognize", "recognise", - "refueling", "refuelling", - "remodeled", "remodelled", - "remolding", "remoulding", - "saltpeter", "saltpetre", - "sanitized", "sanitised", - "sanitizes", "sanitises", - "satirized", "satirised", - "satirizes", "satirises", - "sensitize", "sensitise", - "sepulcher", "sepulchre", - "serialize", "serialise", - "sermonize", "sermonise", - "shoveling", "shovelling", - "shriveled", "shrivelled", - "signaling", "signalling", - "signalize", "signalise", - "skeptical", "sceptical", - "sniveling", "snivelling", - "snorkeled", "snorkelled", - "socialize", "socialise", - "sodomized", "sodomised", - "sodomizes", "sodomises", - "solemnize", "solemnise", - "spiraling", "spiralling", - "splendors", "splendours", - "stabilize", "stabilise", - "stenciled", "stencilled", - "sterilize", "sterilise", - "subsidize", "subsidise", - "succoring", "succouring", - "sulfurous", "sulphurous", - "summarize", "summarise", - "swiveling", "swivelling", - "symbolize", "symbolise", - "tantalize", "tantalise", - "temporize", "temporise", - "tenderize", "tenderise", - "terrorize", "terrorise", - "theorized", "theorised", - "theorizes", "theorises", - "travelers", "travellers", - "traveling", "travelling", - "tricolors", "tricolours", - "tunneling", "tunnelling", - "tyrannize", "tyrannise", - "unequaled", "unequalled", - "unionized", "unionised", - "unionizes", "unionises", - "unraveled", "unravelled", - "unrivaled", "unrivalled", - "urbanized", "urbanised", - "urbanizes", "urbanises", - "utilizing", "utilising", - "vandalize", "vandalise", - "vaporized", "vaporised", - "vaporizes", "vaporises", - "verbalize", "verbalise", - "victimize", "victimise", - "visualize", "visualise", - "vocalized", "vocalised", - "vocalizes", "vocalises", - "vulgarize", "vulgarise", - "weaseling", "weaselling", - "womanized", "womanised", - "womanizer", "womaniser", - "womanizes", "womanises", - "worshiped", "worshipped", - "worshiper", "worshipper", - "agonized", "agonised", - "agonizes", "agonises", - "airplane", "aeroplane", - "aluminum", "aluminium", - "amortize", "amortise", - "analyzed", "analysed", - "analyzes", "analyses", - "armorers", "armourers", - "armories", "armouries", - "artifact", "artefact", - "baptized", "baptised", - "baptizes", "baptises", - "behavior", "behaviour", - "behooved", "behoved", - "behooves", "behoves", - "belabors", "belabours", - "calibers", "calibres", - "canalize", "canalise", - "canonize", "canonise", - "catalogs", "catalogues", - "catalyze", "catalyse", - "caviling", "cavilling", - "centered", "centred", - "chiseled", "chiselled", - "civilize", "civilise", - "clamored", "clamoured", - "colonize", "colonise", - "colorant", "colourant", - "coloreds", "coloureds", - "colorful", "colourful", - "coloring", "colouring", - "colorize", "colourize", - "coziness", "cosiness", - "cruelest", "cruellest", - "cudgeled", "cudgelled", - "defenses", "defences", - "demeanor", "demeanour", - "demonize", "demonise", - "deputize", "deputise", - "diarrhea", "diarrhoea", - "digitize", "digitise", - "disfavor", "disfavour", - "dishonor", "dishonour", - "distills", "distils", - "driveled", "drivelled", - "enameled", "enamelled", - "enamored", "enamoured", - "endeavor", "endeavour", - "energize", "energise", - "epaulets", "epaulettes", - "equalize", "equalise", - "estrogen", "oestrogen", - "etiology", "aetiology", - "eulogize", "eulogise", - "favoring", "favouring", - "favorite", "favourite", - "feminize", "feminise", - "finalize", "finalise", - "flavored", "flavoured", - "flutists", "flautists", - "fulfills", "fulfils", - "funneled", "funnelled", - "gamboled", "gambolled", - "graveled", "gravelled", - "groveled", "grovelled", - "grueling", "gruelling", - "harbored", "harboured", - "honoring", "honouring", - "humanize", "humanise", - "humoring", "humouring", - "idealize", "idealise", - "idolized", "idolised", - "idolizes", "idolises", - "immunize", "immunise", - "ionizing", "ionising", - "itemized", "itemised", - "itemizes", "itemises", - "jewelers", "jewellers", - "labeling", "labelling", - "laborers", "labourers", - "laboring", "labouring", - "legalize", "legalise", - "leukemia", "leukaemia", - "levelers", "levellers", - "leveling", "levelling", - "libeling", "libelling", - "libelous", "libellous", - "lionized", "lionised", - "lionizes", "lionises", - "localize", "localise", - "louvered", "louvred", - "maneuver", "manoeuvre", - "marveled", "marvelled", - "maximize", "maximise", - "memorize", "memorise", - "minimize", "minimise", - "mobilize", "mobilise", - "modelers", "modellers", - "modeling", "modelling", - "moldered", "mouldered", - "moldiest", "mouldiest", - "moldings", "mouldings", - "moralize", "moralise", - "mustache", "moustache", - "neighbor", "neighbour", - "odorless", "odourless", - "offenses", "offences", - "optimize", "optimise", - "organize", "organise", - "oxidized", "oxidised", - "oxidizes", "oxidises", - "paneling", "panelling", - "panelist", "panellist", - "paralyze", "paralyse", - "parceled", "parcelled", - "pedaling", "pedalling", - "penalize", "penalise", - "penciled", "pencilled", - "polarize", "polarise", - "pretense", "pretence", - "pummeled", "pummelling", - "raveling", "ravelling", - "realized", "realised", - "realizes", "realises", - "refueled", "refuelled", - "remolded", "remoulded", - "revelers", "revellers", - "reveling", "revelling", - "rivaling", "rivalling", - "sanitize", "sanitise", - "satirize", "satirise", - "savories", "savouries", - "savoring", "savouring", - "scepters", "sceptres", - "shoveled", "shovelled", - "signaled", "signalled", - "skeptics", "sceptics", - "sniveled", "snivelled", - "sodomize", "sodomise", - "specters", "spectres", - "spiraled", "spiralled", - "splendor", "splendour", - "succored", "succoured", - "sulfates", "sulphates", - "sulfides", "sulphides", - "swiveled", "swivelled", - "tasseled", "tasselled", - "theaters", "theatres", - "theorize", "theorise", - "toweling", "towelling", - "traveler", "traveller", - "trialing", "trialling", - "tricolor", "tricolour", - "tunneled", "tunnelled", - "unionize", "unionise", - "unsavory", "unsavoury", - "urbanize", "urbanise", - "utilized", "utilised", - "utilizes", "utilises", - "vaporize", "vaporise", - "vocalize", "vocalise", - "weaseled", "weaselled", - "womanize", "womanise", - "yodeling", "yodelling", - "agonize", "agonise", - "analyze", "analyse", - "appalls", "appals", - "armored", "armoured", - "armorer", "armourer", - "baptize", "baptise", - "behoove", "behove", - "belabor", "belabour", - "beveled", "bevelled", - "caliber", "calibre", - "caroled", "carolled", - "caviled", "cavilled", - "centers", "centres", - "clamors", "clamours", - "clangor", "clangour", - "colored", "coloured", - "coziest", "cosiest", - "crueler", "crueller", - "defense", "defence", - "dialing", "dialling", - "dialogs", "dialogues", - "distill", "distil", - "dueling", "duelling", - "enrolls", "enrols", - "epaulet", "epaulette", - "favored", "favoured", - "flavors", "flavours", - "flutist", "flautist", - "fueling", "fuelling", - "fulfill", "fulfil", - "goiters", "goitres", - "harbors", "harbours", - "honored", "honoured", - "humored", "humoured", - "idolize", "idolise", - "ionized", "ionised", - "ionizes", "ionises", - "itemize", "itemise", - "jeweled", "jewelled", - "jeweler", "jeweller", - "jewelry", "jewellery", - "labeled", "labelled", - "labored", "laboured", - "laborer", "labourer", - "leveled", "levelled", - "leveler", "leveller", - "libeled", "libelled", - "lionize", "lionise", - "louvers", "louvres", - "modeled", "modelled", - "modeler", "modeller", - "molders", "moulders", - "moldier", "mouldier", - "molding", "moulding", - "molting", "moulting", - "offense", "offence", - "oxidize", "oxidise", - "pajamas", "pyjamas", - "paneled", "panelled", - "parlors", "parlours", - "pedaled", "pedalled", - "plowing", "ploughing", - "plowman", "ploughman", - "plowmen", "ploughmen", - "realize", "realise", - "remolds", "remoulds", - "reveled", "revelled", - "reveler", "reveller", - "rivaled", "rivalled", - "rumored", "rumoured", - "saviors", "saviours", - "savored", "savoured", - "scepter", "sceptre", - "skeptic", "sceptic", - "specter", "spectre", - "succors", "succours", - "sulfate", "sulphate", - "sulfide", "sulphide", - "theater", "theatre", - "toweled", "towelled", - "toxemia", "toxaemia", - "trialed", "trialled", - "utilize", "utilise", - "yodeled", "yodelled", - "anemia", "anaemia", - "anemic", "anaemic", - "appall", "appal", - "arbors", "arbours", - "armory", "armoury", - "candor", "candour", - "center", "centre", - "clamor", "clamour", - "colors", "colours", - "cozier", "cosier", - "cozies", "cosies", - "cozily", "cosily", - "dialed", "dialled", - "drafty", "draughty", - "dueled", "duelled", - "favors", "favours", - "fervor", "fervour", - "fibers", "fibres", - "flavor", "flavour", - "fueled", "fuelled", - "goiter", "goitre", - "harbor", "harbour", - "honors", "honours", - "humors", "humours", - "labors", "labours", - "liters", "litres", - "louver", "louvre", - "luster", "lustre", - "meager", "meagre", - "miters", "mitres", - "molded", "moulded", - "molder", "moulder", - "molted", "moulted", - "pajama", "pyjama", - "parlor", "parlour", - "plowed", "ploughed", - "rancor", "rancour", - "remold", "remould", - "rigors", "rigours", - "rumors", "rumours", - "savors", "savours", - "savory", "savoury", - "succor", "succour", - "tumors", "tumours", - "vapors", "vapours", - "aging", "ageing", - "arbor", "arbour", - "ardor", "ardour", - "armor", "armour", - "chili", "chilli", - "color", "colour", - "edema", "edoema", - "favor", "favour", - "fecal", "faecal", - "feces", "faeces", - "fiber", "fibre", - "honor", "honour", - "humor", "humour", - "labor", "labour", - "liter", "litre", - "miter", "mitre", - "molds", "moulds", - "moldy", "mouldy", - "molts", "moults", - "odors", "odours", - "plows", "ploughs", - "rigor", "rigour", - "rumor", "rumour", - "savor", "savour", - "valor", "valour", - "vapor", "vapour", - "vigor", "vigour", - "cozy", "cosy", - "mold", "mould", - "molt", "moult", - "odor", "odour", - "plow", "plough", -} diff --git a/vendor/github.com/golangci/revgrep/.gitignore b/vendor/github.com/golangci/revgrep/.gitignore deleted file mode 100644 index 0540fe2ca..000000000 --- a/vendor/github.com/golangci/revgrep/.gitignore +++ /dev/null @@ -1 +0,0 @@ -testdata/git diff --git a/vendor/github.com/golangci/revgrep/.golangci.yml b/vendor/github.com/golangci/revgrep/.golangci.yml deleted file mode 100644 index 02ed5ec84..000000000 --- a/vendor/github.com/golangci/revgrep/.golangci.yml +++ /dev/null @@ -1,82 +0,0 @@ -run: - timeout: 2m - -linters-settings: - govet: - check-shadowing: true - enable-all: true - disable: - - fieldalignment - gocyclo: - min-complexity: 30 # 30 by default (but we recommend 10-20) - goconst: - min-len: 3 - min-occurrences: 3 - misspell: - locale: US - funlen: - lines: -1 - statements: 80 # default 40 - gocognit: - min-complexity: 65 # default 30 - gofumpt: - extra-rules: true - godox: - keywords: - - FIXME - -linters: - enable-all: true - disable: - - deadcode # deprecated - - exhaustivestruct # deprecated - - golint # deprecated - - ifshort # deprecated - - interfacer # deprecated - - maligned # deprecated - - nosnakecase # deprecated - - scopelint # deprecated - - structcheck # deprecated - - varcheck # deprecated - - cyclop # duplicate of gocyclo - - sqlclosecheck # not relevant (SQL) - - rowserrcheck # not relevant (SQL) - - execinquery # not relevant (SQL) - - dupl - - lll - - nestif - - gomnd - - goerr113 -# - wrapcheck - - nlreturn - - wsl - - exhaustive - - exhaustruct - - tparallel - - testpackage - - paralleltest - - ifshort - - forcetypeassert - - varnamelen - - prealloc # false-positives - - nosnakecase - - nonamedreturns - - nilerr - - depguard - -issues: - exclude-use-default: false - max-per-linter: 0 - max-same-issues: 0 - exclude: - - 'ST1000: at least one file in a package should have a package comment' - exclude-rules: - - path: (.+)_test.go - linters: - - funlen - - goconst - - gosec - - maintidx - - path: cmd/revgrep/main.go - linters: - - forbidigo diff --git a/vendor/github.com/golangci/revgrep/LICENSE b/vendor/github.com/golangci/revgrep/LICENSE deleted file mode 100644 index 8dada3eda..000000000 --- a/vendor/github.com/golangci/revgrep/LICENSE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "{}" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright {yyyy} {name of copyright owner} - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/vendor/github.com/golangci/revgrep/Makefile b/vendor/github.com/golangci/revgrep/Makefile deleted file mode 100644 index 5ac8725d0..000000000 --- a/vendor/github.com/golangci/revgrep/Makefile +++ /dev/null @@ -1,12 +0,0 @@ -.PHONY: clean lint lint-fix test - -default: lint test - -test: - go test -v -cover ./... - -lint: - golangci-lint run - -lint-fix: - golangci-lint run --fix diff --git a/vendor/github.com/golangci/revgrep/README.md b/vendor/github.com/golangci/revgrep/README.md deleted file mode 100644 index 97f25ffb3..000000000 --- a/vendor/github.com/golangci/revgrep/README.md +++ /dev/null @@ -1,55 +0,0 @@ -# Overview - -`revgrep` is a CLI tool used to filter static analysis tools to only lines changed based on a commit reference. - -# Install - -```bash -go get -u github.com/golangci/revgrep/... -``` - -# Usage - -In the scenario below, a change was made causing a warning in `go vet` on line 5, but `go vet` will show all warnings. -Using `revgrep`, you can show only warnings for lines of code that have been changed (in this case, hiding line 6). - -```bash -[user@host dir (master)]$ go vet -main.go:5: missing argument for Sprintf("%s"): format reads arg 1, have only 0 args -main.go:6: missing argument for Sprintf("%s"): format reads arg 1, have only 0 args -[user@host dir (master)]$ go vet |& revgrep -main.go:5: missing argument for Sprintf("%s"): format reads arg 1, have only 0 args -``` - -`|&` is shown above as many static analysis programs write to `stderr`, not `stdout`, `|&` combines both `stderr` and -`stdout`. It could also be achieved with `go vet 2>&1 | revgrep`. - -`revgrep` CLI tool will return an exit status of 1 if any issues match, else it will return 0. Consider using -`${PIPESTATUS[0]}` for the exit status of the `go vet` command in the above example. - -``` -Usage: revgrep [options] [from-rev] [to-rev] - -from-rev filters issues to lines changed since (and including) this revision - to-rev filters issues to lines changed since (and including) this revision, requires - - If no revisions are given, and there are unstaged changes or untracked files, only those changes are shown - If no revisions are given, and there are no unstaged changes or untracked files, only changes in HEAD~ are shown - If from-rev is given and to-rev is not, only changes between from-rev and HEAD are shown. - - -d Show debug output - -regexp string - Regexp to match path, line number, optional column number, and message -``` - -# Other Examples - -Issues between branches: -```bash -[user@host dir (feature/branch)]$ go vet |& revgrep master -``` - -Issues since last push: -```bash -[user@host dir (master)]$ go vet |& revgrep origin/master -``` diff --git a/vendor/github.com/golangci/revgrep/revgrep.go b/vendor/github.com/golangci/revgrep/revgrep.go deleted file mode 100644 index 7796b1c01..000000000 --- a/vendor/github.com/golangci/revgrep/revgrep.go +++ /dev/null @@ -1,461 +0,0 @@ -// Package revgrep filter static analysis tools to only lines changed based on a commit reference. -package revgrep - -import ( - "bufio" - "bytes" - "errors" - "fmt" - "io" - "os" - "os/exec" - "path/filepath" - "regexp" - "strconv" - "strings" -) - -// Checker provides APIs to filter static analysis tools to specific commits, -// such as showing only issues since last commit. -type Checker struct { - // Patch file (unified) to read to detect lines being changed, - // if nil revgrep will attempt to detect the VCS and generate an appropriate patch. - // Auto-detection will search for uncommitted changes first, - // if none found, will generate a patch from last committed change. - // File paths within patches must be relative to current working directory. - Patch io.Reader - // NewFiles is a list of file names (with absolute paths) where the entire contents of the file is new. - NewFiles []string - // Debug sets the debug writer for additional output. - Debug io.Writer - // RevisionFrom check revision starting at, leave blank for auto-detection ignored if patch is set. - RevisionFrom string - // WholeFiles indicates that the user wishes to see all issues that comes up anywhere in any file that has been changed in this revision or patch. - WholeFiles bool - // RevisionTo checks revision finishing at, leave blank for auto-detection ignored if patch is set. - RevisionTo string - // Regexp to match path, line number, optional column number, and message. - Regexp string - // AbsPath is used to make an absolute path of an issue's filename to be relative in order to match patch file. - // If not set, current working directory is used. - AbsPath string - - // Calculated changes for next calls to IsNewIssue - changes map[string][]pos -} - -// Issue contains metadata about an issue found. -type Issue struct { - // File is the name of the file as it appeared from the patch. - File string - // LineNo is the line number of the file. - LineNo int - // ColNo is the column number or 0 if none could be parsed. - ColNo int - // HunkPos is position from file's first @@, for new files this will be the line number. - // See also: https://developer.github.com/v3/pulls/comments/#create-a-comment - HunkPos int - // Issue text as it appeared from the tool. - Issue string - // Message is the issue without file name, line number and column number. - Message string -} - -// InputIssue represents issue found by some linter. -type InputIssue interface { - FilePath() string - Line() int -} - -type simpleInputIssue struct { - filePath string - lineNumber int -} - -type pos struct { - lineNo int // line number - hunkPos int // position relative to first @@ in file -} - -func (i simpleInputIssue) FilePath() string { - return i.filePath -} - -func (i simpleInputIssue) Line() int { - return i.lineNumber -} - -// Prepare extracts a patch and changed lines. -func (c *Checker) Prepare() error { - returnErr := c.preparePatch() - c.changes = c.linesChanged() - return returnErr -} - -// IsNewIssue checks whether issue found by linter is new: it was found in changed lines. -func (c *Checker) IsNewIssue(i InputIssue) (hunkPos int, isNew bool) { - fchanges, ok := c.changes[filepath.ToSlash(i.FilePath())] - if !ok { // file wasn't changed - return 0, false - } - - if c.WholeFiles { - return i.Line(), true - } - - var ( - fpos pos - changed bool - ) - // found file, see if lines matched - for _, pos := range fchanges { - if pos.lineNo == i.Line() { - fpos = pos - changed = true - break - } - } - - if changed || fchanges == nil { - // either file changed or it's a new file - hunkPos := fpos.lineNo - if changed { // existing file changed - hunkPos = fpos.hunkPos - } - - return hunkPos, true - } - - return 0, false -} - -// Check scans reader and writes any lines to writer that have been added in Checker.Patch. -// -// Returns the issues written to writer when no error occurs. -// -// If no VCS could be found or other VCS errors occur, -// all issues are written to writer and an error is returned. -// -// File paths in reader must be relative to current working directory or absolute. -func (c *Checker) Check(reader io.Reader, writer io.Writer) (issues []Issue, err error) { - returnErr := c.Prepare() - writeAll := returnErr != nil - - // file.go:lineNo:colNo:message - // colNo is optional, strip spaces before message - lineRE := regexp.MustCompile(`(.+\.go):([0-9]+):([0-9]+)?:?\s*(.*)`) - if c.Regexp != "" { - lineRE, err = regexp.Compile(c.Regexp) - if err != nil { - return nil, fmt.Errorf("could not parse regexp: %w", err) - } - } - - // TODO consider lazy loading this, if there's nothing in stdin, no point - // checking for recent changes - c.debugf("lines changed: %+v", c.changes) - - absPath := c.AbsPath - if absPath == "" { - absPath, err = os.Getwd() - if err != nil { - returnErr = fmt.Errorf("could not get current working directory: %w", err) - } - } - - // Scan each line in reader and only write those lines if lines changed - scanner := bufio.NewScanner(reader) - for scanner.Scan() { - line := lineRE.FindSubmatch(scanner.Bytes()) - if line == nil { - c.debugf("cannot parse file+line number: %s", scanner.Text()) - continue - } - - if writeAll { - _, _ = fmt.Fprintln(writer, scanner.Text()) - continue - } - - // Make absolute path names relative - path := string(line[1]) - if rel, err := filepath.Rel(absPath, path); err == nil { - c.debugf("rewrote path from %q to %q (absPath: %q)", path, rel, absPath) - path = rel - } - - // Parse line number - lno, err := strconv.ParseUint(string(line[2]), 10, 64) - if err != nil { - c.debugf("cannot parse line number: %q", scanner.Text()) - continue - } - - // Parse optional column number - var cno uint64 - if len(line[3]) > 0 { - cno, err = strconv.ParseUint(string(line[3]), 10, 64) - if err != nil { - c.debugf("cannot parse column number: %q", scanner.Text()) - // Ignore this error and continue - } - } - - // Extract message - msg := string(line[4]) - - c.debugf("path: %q, lineNo: %v, colNo: %v, msg: %q", path, lno, cno, msg) - - simpleIssue := simpleInputIssue{filePath: path, lineNumber: int(lno)} - - hunkPos, changed := c.IsNewIssue(simpleIssue) - if changed { - issue := Issue{ - File: path, - LineNo: int(lno), - ColNo: int(cno), - HunkPos: hunkPos, - Issue: scanner.Text(), - Message: msg, - } - issues = append(issues, issue) - - _, _ = fmt.Fprintln(writer, scanner.Text()) - } else { - c.debugf("unchanged: %s", scanner.Text()) - } - } - - if err := scanner.Err(); err != nil { - returnErr = fmt.Errorf("error reading standard input: %w", err) - } - - return issues, returnErr -} - -func (c *Checker) debugf(format string, s ...interface{}) { - if c.Debug != nil { - _, _ = fmt.Fprint(c.Debug, "DEBUG: ") - _, _ = fmt.Fprintf(c.Debug, format+"\n", s...) - } -} - -func (c *Checker) preparePatch() error { - // Check if patch is supplied, if not, retrieve from VCS - if c.Patch == nil { - var err error - c.Patch, c.NewFiles, err = GitPatch(c.RevisionFrom, c.RevisionTo) - if err != nil { - return fmt.Errorf("could not read git repo: %w", err) - } - if c.Patch == nil { - return errors.New("no version control repository found") - } - } - - return nil -} - -// linesChanges returns a map of file names to line numbers being changed. -// If key is nil, the file has been recently added, else it contains a slice of positions that have been added. -func (c *Checker) linesChanged() map[string][]pos { - type state struct { - file string - lineNo int // current line number within chunk - hunkPos int // current line count since first @@ in file - changes []pos // position of changes - } - - changes := make(map[string][]pos) - - for _, file := range c.NewFiles { - changes[file] = nil - } - - if c.Patch == nil { - return changes - } - - var s state - - scanner := bufio.NewReader(c.Patch) - var scanErr error - for { - lineB, isPrefix, err := scanner.ReadLine() - if isPrefix { - // If a single line overflowed the buffer, don't bother processing it as - // it's likey part of a file and not relevant to the patch. - continue - } - if err != nil { - scanErr = err - break - } - line := strings.TrimRight(string(lineB), "\n") - - c.debugf(line) - s.lineNo++ - s.hunkPos++ - switch { - case strings.HasPrefix(line, "+++ ") && len(line) > 4: - if s.changes != nil { - // record the last state - changes[s.file] = s.changes - } - // 6 removes "+++ b/" - s = state{file: line[6:], hunkPos: -1, changes: []pos{}} - case strings.HasPrefix(line, "@@ "): - // @@ -1 +2,4 @@ - // chdr ^^^^^^^^^^^^^ - // ahdr ^^^^ - // cstart ^ - chdr := strings.Split(line, " ") - ahdr := strings.Split(chdr[2], ",") - // [1:] to remove leading plus - cstart, err := strconv.ParseUint(ahdr[0][1:], 10, 64) - if err != nil { - panic(err) - } - s.lineNo = int(cstart) - 1 // -1 as cstart is the next line number - case strings.HasPrefix(line, "-"): - s.lineNo-- - case strings.HasPrefix(line, "+"): - s.changes = append(s.changes, pos{lineNo: s.lineNo, hunkPos: s.hunkPos}) - } - } - - if !errors.Is(scanErr, io.EOF) { - _, _ = fmt.Fprintln(os.Stderr, "reading standard input:", scanErr) - } - - // record the last state - changes[s.file] = s.changes - - return changes -} - -// GitPatch returns a patch from a git repository, -// if no git repository was found and no errors occurred, nil is returned, else an error is returned revisionFrom and revisionTo defines the git diff parameters, -// if left blank and there are unstaged changes or untracked files, only those will be returned else only check changes since HEAD~. -// If revisionFrom is set but revisionTo is not, untracked files will be included, to exclude untracked files set revisionTo to HEAD~. -// It's incorrect to specify revisionTo without a revisionFrom. -func GitPatch(revisionFrom, revisionTo string) (io.Reader, []string, error) { - // check if git repo exists - if err := exec.Command("git", "status", "--porcelain").Run(); err != nil { - // don't return an error, we assume the error is not repo exists - return nil, nil, nil - } - - // make a patch for untracked files - ls, err := exec.Command("git", "ls-files", "--others", "--exclude-standard").CombinedOutput() - if err != nil { - return nil, nil, fmt.Errorf("error executing git ls-files: %w", err) - } - - var newFiles []string - for _, file := range bytes.Split(ls, []byte{'\n'}) { - if len(file) == 0 || bytes.HasSuffix(file, []byte{'/'}) { - // ls-files was sometimes showing directories when they were ignored - // I couldn't create a test case for this as I couldn't reproduce correctly - // for the moment, just exclude files with trailing / - continue - } - newFiles = append(newFiles, string(file)) - } - - var patch bytes.Buffer - if revisionFrom != "" { - cmd := gitDiff(revisionFrom) - if revisionTo != "" { - cmd.Args = append(cmd.Args, revisionTo) - } - cmd.Args = append(cmd.Args, "--") - - cmd.Stdout = &patch - if err := cmd.Run(); err != nil { - return nil, nil, fmt.Errorf("error executing git diff %q %q: %w", revisionFrom, revisionTo, err) - } - - if revisionTo == "" { - return &patch, newFiles, nil - } - - return &patch, nil, nil - } - - // make a patch for unstaged changes - cmd := gitDiff("--") - cmd.Stdout = &patch - if err := cmd.Run(); err != nil { - return nil, nil, fmt.Errorf("error executing git diff: %w", err) - } - unstaged := patch.Len() > 0 - - // If there's unstaged changes OR untracked changes (or both), then this is - // a suitable patch - if unstaged || newFiles != nil { - return &patch, newFiles, nil - } - - // check for changes in recent commit - - cmd = gitDiff("HEAD~", "--") - cmd.Stdout = &patch - if err := cmd.Run(); err != nil { - return nil, nil, fmt.Errorf("error executing git diff HEAD~: %w", err) - } - - return &patch, nil, nil -} - -func gitDiff(extraArgs ...string) *exec.Cmd { - cmd := exec.Command("git", "diff", "--color=never", "--no-ext-diff") - - if isSupportedByGit(2, 41, 0) { - cmd.Args = append(cmd.Args, "--default-prefix") - } - - cmd.Args = append(cmd.Args, "--relative") - cmd.Args = append(cmd.Args, extraArgs...) - - return cmd -} - -func isSupportedByGit(major, minor, patch int) bool { - output, err := exec.Command("git", "version").CombinedOutput() - if err != nil { - return false - } - - parts := bytes.Split(bytes.TrimSpace(output), []byte(" ")) - if len(parts) < 3 { - return false - } - - v := string(parts[2]) - if v == "" { - return false - } - - vp := regexp.MustCompile(`^(\d+)\.(\d+)(?:\.(\d+))?.*$`).FindStringSubmatch(v) - if len(vp) < 4 { - return false - } - - currentMajor, err := strconv.Atoi(vp[1]) - if err != nil { - return false - } - - currentMinor, err := strconv.Atoi(vp[2]) - if err != nil { - return false - } - - currentPatch, err := strconv.Atoi(vp[3]) - if err != nil { - return false - } - - return currentMajor*1_000_000_000+currentMinor*1_000_000+currentPatch*1_000 >= major*1_000_000_000+minor*1_000_000+patch*1_000 -} diff --git a/vendor/github.com/golangci/unconvert/LICENSE b/vendor/github.com/golangci/unconvert/LICENSE deleted file mode 100644 index 744875676..000000000 --- a/vendor/github.com/golangci/unconvert/LICENSE +++ /dev/null @@ -1,27 +0,0 @@ -Copyright (c) 2012 The Go Authors. All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above -copyright notice, this list of conditions and the following disclaimer -in the documentation and/or other materials provided with the -distribution. - * Neither the name of Google Inc. nor the names of its -contributors may be used to endorse or promote products derived from -this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/golangci/unconvert/README.md b/vendor/github.com/golangci/unconvert/README.md deleted file mode 100644 index e9230c218..000000000 --- a/vendor/github.com/golangci/unconvert/README.md +++ /dev/null @@ -1,6 +0,0 @@ -Fork of [unconvert](https://github.com/mdempsky/unconvert) to be usable as a library. - -The specific elements are inside the file `golangci.go`. - -The only modification of the file `unconvert.go` is the remove of the global variables for the flags. -The tests will never work because of that, then the CI is disabled. diff --git a/vendor/github.com/golangci/unconvert/golangci.go b/vendor/github.com/golangci/unconvert/golangci.go deleted file mode 100644 index 306c44e5e..000000000 --- a/vendor/github.com/golangci/unconvert/golangci.go +++ /dev/null @@ -1,78 +0,0 @@ -package unconvert - -import ( - "go/ast" - "go/token" - "strings" - "sync" - - "golang.org/x/tools/go/analysis" -) - -// Transformed version of the original unconvert flags section. -// The section has been removed inside `unconvert.go` -var ( - flagAll = pointer(false) - flagApply = pointer(false) - flagCPUProfile = pointer("") - flagSafe = pointer(false) - flagV = pointer(false) - flagTests = pointer(true) - flagFastMath = pointer(false) - flagTags = pointer("") - flagConfigs = pointer("") -) - -func pointer[T string | int | int32 | int64 | bool](v T) *T { return &v } - -func Run(pass *analysis.Pass, fastMath, safe bool) []token.Position { - type res struct { - file string - edits editSet - } - - flagFastMath = pointer(fastMath) - flagSafe = pointer(safe) - - ch := make(chan res) - var wg sync.WaitGroup - for _, file := range pass.Files { - file := file - - tokenFile := pass.Fset.File(file.Package) - filename := tokenFile.Position(file.Package).Filename - - // Hack to recognize _cgo_gotypes.go. - if strings.HasSuffix(filename, "-d") || strings.HasSuffix(filename, "/_cgo_gotypes.go") { - continue - } - - wg.Add(1) - go func() { - defer wg.Done() - - v := visitor{info: pass.TypesInfo, file: tokenFile, edits: make(editSet)} - ast.Walk(&v, file) - - ch <- res{filename, v.edits} - }() - } - go func() { - wg.Wait() - close(ch) - }() - - m := make(fileToEditSet) - for r := range ch { - m[r.file] = r.edits - } - - var positions []token.Position - for _, edit := range m { - for position, _ := range edit { - positions = append(positions, position) - } - } - - return positions -} diff --git a/vendor/github.com/golangci/unconvert/unconvert.go b/vendor/github.com/golangci/unconvert/unconvert.go deleted file mode 100644 index 222aeadf8..000000000 --- a/vendor/github.com/golangci/unconvert/unconvert.go +++ /dev/null @@ -1,650 +0,0 @@ -// Copyright 2015 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package unconvert Unconvert removes redundant type conversions from Go packages. -package unconvert - -import ( - "bytes" - "encoding/json" - "flag" - "fmt" - "go/ast" - "go/format" - "go/parser" - "go/token" - "go/types" - "io/ioutil" - "log" - "os" - "os/exec" - "reflect" - "runtime/pprof" - "sort" - "strings" - "sync" - "unicode" - - "golang.org/x/text/width" - "golang.org/x/tools/go/packages" -) - -// Unnecessary conversions are identified by the position -// of their left parenthesis within a source file. - -type editSet map[token.Position]struct{} - -func (e editSet) add(pos token.Position) { - pos.Offset = 0 - e[pos] = struct{}{} -} - -func (e editSet) has(pos token.Position) bool { - pos.Offset = 0 - _, ok := e[pos] - return ok -} - -func (e editSet) remove(pos token.Position) { - pos.Offset = 0 - delete(e, pos) -} - -// intersect removes positions from e that are not present in x. -func (e editSet) intersect(x editSet) { - for pos := range e { - if _, ok := x[pos]; !ok { - delete(e, pos) - } - } -} - -type fileToEditSet map[string]editSet - -func apply(file string, edits editSet) { - if len(edits) == 0 { - return - } - - fset := token.NewFileSet() - f, err := parser.ParseFile(fset, file, nil, parser.ParseComments) - if err != nil { - log.Fatal(err) - } - - // Note: We modify edits during the walk. - v := editor{edits: edits, file: fset.File(f.Package)} - ast.Walk(&v, f) - if len(edits) != 0 { - log.Printf("%s: missing edits %s", file, edits) - } - - // TODO(mdempsky): Write to temporary file and rename. - var buf bytes.Buffer - err = format.Node(&buf, fset, f) - if err != nil { - log.Fatal(err) - } - - err = ioutil.WriteFile(file, buf.Bytes(), 0) - if err != nil { - log.Fatal(err) - } -} - -type editor struct { - edits editSet - file *token.File -} - -func (e *editor) Visit(n ast.Node) ast.Visitor { - if n == nil { - return nil - } - v := reflect.ValueOf(n).Elem() - for i, n := 0, v.NumField(); i < n; i++ { - switch f := v.Field(i).Addr().Interface().(type) { - case *ast.Expr: - e.rewrite(f) - case *[]ast.Expr: - for i := range *f { - e.rewrite(&(*f)[i]) - } - } - } - return e -} - -func (e *editor) rewrite(f *ast.Expr) { - call, ok := (*f).(*ast.CallExpr) - if !ok { - return - } - - pos := e.file.Position(call.Lparen) - if !e.edits.has(pos) { - return - } - *f = call.Args[0] - e.edits.remove(pos) -} - -var ( - cr = []byte{'\r'} - nl = []byte{'\n'} -) - -func print(conversions []token.Position) { - var file string - var lines [][]byte - - for _, pos := range conversions { - fmt.Printf("%s:%d:%d: unnecessary conversion\n", pos.Filename, pos.Line, pos.Column) - if *flagV { - if pos.Filename != file { - buf, err := ioutil.ReadFile(pos.Filename) - if err != nil { - log.Fatal(err) - } - file = pos.Filename - lines = bytes.Split(buf, nl) - } - - line := bytes.TrimSuffix(lines[pos.Line-1], cr) - fmt.Printf("%s\n", line) - - // For files processed by cgo, Column is the - // column location after cgo processing, which - // may be different than the source column - // that we want here. In lieu of a better - // heuristic for detecting this case, at least - // avoid panicking if column is out of bounds. - if pos.Column <= len(line) { - fmt.Printf("%s^\n", rub(line[:pos.Column-1])) - } - } - } -} - -// Rub returns a copy of buf with all non-whitespace characters replaced -// by spaces (like rubbing them out with white out). -func rub(buf []byte) []byte { - // TODO(mdempsky): Handle combining characters? - var res bytes.Buffer - for _, r := range string(buf) { - if unicode.IsSpace(r) { - res.WriteRune(r) - continue - } - switch width.LookupRune(r).Kind() { - case width.EastAsianWide, width.EastAsianFullwidth: - res.WriteString(" ") - default: - res.WriteByte(' ') - } - } - return res.Bytes() -} - -func usage() { - fmt.Fprintf(os.Stderr, "usage: unconvert [flags] [package ...]\n") - flag.PrintDefaults() -} - -func main() { - flag.Usage = usage - flag.Parse() - - if *flagCPUProfile != "" { - f, err := os.Create(*flagCPUProfile) - if err != nil { - log.Fatal(err) - } - pprof.StartCPUProfile(f) - defer pprof.StopCPUProfile() - } - - patterns := flag.Args() // 0 or more import path patterns. - - var configs [][]string - if *flagConfigs != "" { - if os.Getenv("UNCONVERT_CONFIGS_EXPERIMENT") != "1" { - fmt.Println("WARNING: -configs is experimental and subject to change without notice.") - fmt.Println("Please comment at https://github.com/mdempsky/unconvert/issues/26") - fmt.Println("if you'd like to rely on this interface.") - fmt.Println("(Set UNCONVERT_CONFIGS_EXPERIMENT=1 to silence this warning.)") - fmt.Println() - } - - if err := json.Unmarshal([]byte(*flagConfigs), &configs); err != nil { - log.Fatal(err) - } - } else if *flagAll { - configs = allConfigs() - } else { - configs = [][]string{nil} - } - - m := mergeEdits(patterns, configs) - - if *flagApply { - var wg sync.WaitGroup - for f, e := range m { - wg.Add(1) - f, e := f, e - go func() { - defer wg.Done() - apply(f, e) - }() - } - wg.Wait() - } else { - var conversions []token.Position - for _, positions := range m { - for pos := range positions { - conversions = append(conversions, pos) - } - } - sort.Sort(byPosition(conversions)) - print(conversions) - if len(conversions) > 0 { - os.Exit(1) - } - } -} - -func allConfigs() [][]string { - out, err := exec.Command("go", "tool", "dist", "list", "-json").Output() - if err != nil { - log.Fatal(err) - } - - var platforms []struct { - GOOS, GOARCH string - } - err = json.Unmarshal(out, &platforms) - if err != nil { - log.Fatal(err) - } - - var res [][]string - for _, platform := range platforms { - res = append(res, []string{ - "GOOS=" + platform.GOOS, - "GOARCH=" + platform.GOARCH, - }) - } - return res -} - -func mergeEdits(patterns []string, configs [][]string) fileToEditSet { - m := make(fileToEditSet) - for _, config := range configs { - for f, e := range computeEdits(patterns, config) { - if e0, ok := m[f]; ok { - e0.intersect(e) - } else { - m[f] = e - } - } - } - return m -} - -func computeEdits(patterns []string, config []string) fileToEditSet { - // TODO(mdempsky): Move into config? - var buildFlags []string - if *flagTags != "" { - buildFlags = []string{"-tags", *flagTags} - } - - pkgs, err := packages.Load(&packages.Config{ - Mode: packages.NeedSyntax | packages.NeedTypes | packages.NeedTypesInfo, - Env: append(os.Environ(), config...), - BuildFlags: buildFlags, - Tests: *flagTests, - }, patterns...) - if err != nil { - log.Fatal(err) - } - packages.PrintErrors(pkgs) - - type res struct { - file string - edits editSet - } - - ch := make(chan res) - var wg sync.WaitGroup - for _, pkg := range pkgs { - for _, file := range pkg.Syntax { - pkg, file := pkg, file - tokenFile := pkg.Fset.File(file.Package) - filename := tokenFile.Position(file.Package).Filename - - // Hack to recognize _cgo_gotypes.go. - if strings.HasSuffix(filename, "-d") || strings.HasSuffix(filename, "/_cgo_gotypes.go") { - continue - } - - wg.Add(1) - go func() { - defer wg.Done() - v := visitor{info: pkg.TypesInfo, file: tokenFile, edits: make(editSet)} - ast.Walk(&v, file) - ch <- res{filename, v.edits} - }() - } - } - go func() { - wg.Wait() - close(ch) - }() - - m := make(fileToEditSet) - for r := range ch { - m[r.file] = r.edits - } - return m -} - -type step struct { - n ast.Node - i int -} - -type visitor struct { - info *types.Info - file *token.File - edits editSet - path []step -} - -func (v *visitor) Visit(node ast.Node) ast.Visitor { - if node != nil { - v.path = append(v.path, step{n: node}) - } else { - n := len(v.path) - v.path = v.path[:n-1] - if n >= 2 { - v.path[n-2].i++ - } - } - - if call, ok := node.(*ast.CallExpr); ok { - v.unconvert(call) - } - return v -} - -func (v *visitor) unconvert(call *ast.CallExpr) { - // TODO(mdempsky): Handle useless multi-conversions. - - // Conversions have exactly one argument. - if len(call.Args) != 1 || call.Ellipsis != token.NoPos { - return - } - ft, ok := v.info.Types[call.Fun] - if !ok { - fmt.Println("Missing type for function") - return - } - if !ft.IsType() { - // Function call; not a conversion. - return - } - at, ok := v.info.Types[call.Args[0]] - if !ok { - fmt.Println("Missing type for argument") - return - } - if !types.Identical(ft.Type, at.Type) { - // A real conversion. - return - } - if !*flagFastMath && isFloatingPoint(ft.Type) { - // As of Go 1.9, explicit floating-point type - // conversions are always significant because they - // force rounding and prevent operation fusing. - return - } - if isUntypedValue(call.Args[0], v.info) { - // Workaround golang.org/issue/13061. - return - } - if *flagSafe && !v.isSafeContext(at.Type) { - // TODO(mdempsky): Remove this message. - fmt.Println("Skipped a possible type conversion because of -safe at", v.file.Position(call.Pos())) - return - } - - v.edits.add(v.file.Position(call.Lparen)) -} - -// isFloatingPointer reports whether t's underlying type is a floating -// point type. -func isFloatingPoint(t types.Type) bool { - ut, ok := t.Underlying().(*types.Basic) - return ok && ut.Info()&(types.IsFloat|types.IsComplex) != 0 -} - -// isSafeContext reports whether the current context requires -// an expression of type t. -// -// TODO(mdempsky): That's a bad explanation. -func (v *visitor) isSafeContext(t types.Type) bool { - ctxt := &v.path[len(v.path)-2] - switch n := ctxt.n.(type) { - case *ast.AssignStmt: - pos := ctxt.i - len(n.Lhs) - if pos < 0 { - fmt.Println("Type conversion on LHS of assignment?") - return false - } - if n.Tok == token.DEFINE { - // Skip := assignments. - return true - } - // We're a conversion in the pos'th element of n.Rhs. - // Check that the corresponding element of n.Lhs is of type t. - lt, ok := v.info.Types[n.Lhs[pos]] - if !ok { - fmt.Println("Missing type for LHS expression") - return false - } - return types.Identical(t, lt.Type) - case *ast.BinaryExpr: - if n.Op == token.SHL || n.Op == token.SHR { - if ctxt.i == 1 { - // RHS of a shift is always safe. - return true - } - // For the LHS, we should inspect up another level. - fmt.Println("TODO(mdempsky): Handle LHS of shift expressions") - return true - } - var other ast.Expr - if ctxt.i == 0 { - other = n.Y - } else { - other = n.X - } - ot, ok := v.info.Types[other] - if !ok { - fmt.Println("Missing type for other binop subexpr") - return false - } - return types.Identical(t, ot.Type) - case *ast.CallExpr: - pos := ctxt.i - 1 - if pos < 0 { - // Type conversion in the function subexpr is okay. - return true - } - ft, ok := v.info.Types[n.Fun] - if !ok { - fmt.Println("Missing type for function expression") - return false - } - sig, ok := ft.Type.(*types.Signature) - if !ok { - // "Function" is either a type conversion (ok) or a builtin (ok?). - return true - } - params := sig.Params() - var pt types.Type - if sig.Variadic() && n.Ellipsis == token.NoPos && pos >= params.Len()-1 { - pt = params.At(params.Len() - 1).Type().(*types.Slice).Elem() - } else { - pt = params.At(pos).Type() - } - return types.Identical(t, pt) - case *ast.CompositeLit, *ast.KeyValueExpr: - fmt.Println("TODO(mdempsky): Compare against value type of composite literal type at", v.file.Position(n.Pos())) - return true - case *ast.ReturnStmt: - // TODO(mdempsky): Is there a better way to get the corresponding - // return parameter type? - var funcType *ast.FuncType - for i := len(v.path) - 1; funcType == nil && i >= 0; i-- { - switch f := v.path[i].n.(type) { - case *ast.FuncDecl: - funcType = f.Type - case *ast.FuncLit: - funcType = f.Type - } - } - var typeExpr ast.Expr - for i, j := ctxt.i, 0; j < len(funcType.Results.List); j++ { - f := funcType.Results.List[j] - if len(f.Names) == 0 { - if i >= 1 { - i-- - continue - } - } else { - if i >= len(f.Names) { - i -= len(f.Names) - continue - } - } - typeExpr = f.Type - break - } - if typeExpr == nil { - fmt.Println(ctxt) - } - pt, ok := v.info.Types[typeExpr] - if !ok { - fmt.Println("Missing type for return parameter at", v.file.Position(n.Pos())) - return false - } - return types.Identical(t, pt.Type) - case *ast.StarExpr, *ast.UnaryExpr: - // TODO(mdempsky): I think these are always safe. - return true - case *ast.SwitchStmt: - // TODO(mdempsky): I think this is always safe? - return true - default: - // TODO(mdempsky): When can this happen? - fmt.Printf("... huh, %T at %v\n", n, v.file.Position(n.Pos())) - return true - } -} - -func isUntypedValue(n ast.Expr, info *types.Info) (res bool) { - switch n := n.(type) { - case *ast.BinaryExpr: - switch n.Op { - case token.SHL, token.SHR: - // Shifts yield an untyped value if their LHS is untyped. - return isUntypedValue(n.X, info) - case token.EQL, token.NEQ, token.LSS, token.GTR, token.LEQ, token.GEQ: - // Comparisons yield an untyped boolean value. - return true - case token.ADD, token.SUB, token.MUL, token.QUO, token.REM, - token.AND, token.OR, token.XOR, token.AND_NOT, - token.LAND, token.LOR: - return isUntypedValue(n.X, info) && isUntypedValue(n.Y, info) - } - case *ast.UnaryExpr: - switch n.Op { - case token.ADD, token.SUB, token.NOT, token.XOR: - return isUntypedValue(n.X, info) - } - case *ast.BasicLit: - // Basic literals are always untyped. - return true - case *ast.ParenExpr: - return isUntypedValue(n.X, info) - case *ast.SelectorExpr: - return isUntypedValue(n.Sel, info) - case *ast.Ident: - if obj, ok := info.Uses[n]; ok { - if obj.Pkg() == nil && obj.Name() == "nil" { - // The universal untyped zero value. - return true - } - if b, ok := obj.Type().(*types.Basic); ok && b.Info()&types.IsUntyped != 0 { - // Reference to an untyped constant. - return true - } - } - case *ast.CallExpr: - if b, ok := asBuiltin(n.Fun, info); ok { - switch b.Name() { - case "real", "imag": - return isUntypedValue(n.Args[0], info) - case "complex": - return isUntypedValue(n.Args[0], info) && isUntypedValue(n.Args[1], info) - } - } - } - - return false -} - -func asBuiltin(n ast.Expr, info *types.Info) (*types.Builtin, bool) { - for { - paren, ok := n.(*ast.ParenExpr) - if !ok { - break - } - n = paren.X - } - - ident, ok := n.(*ast.Ident) - if !ok { - return nil, false - } - - obj, ok := info.Uses[ident] - if !ok { - return nil, false - } - - b, ok := obj.(*types.Builtin) - return b, ok -} - -type byPosition []token.Position - -func (p byPosition) Len() int { - return len(p) -} - -func (p byPosition) Less(i, j int) bool { - if p[i].Filename != p[j].Filename { - return p[i].Filename < p[j].Filename - } - if p[i].Line != p[j].Line { - return p[i].Line < p[j].Line - } - return p[i].Column < p[j].Column -} - -func (p byPosition) Swap(i, j int) { - p[i], p[j] = p[j], p[i] -} diff --git a/vendor/github.com/gordonklaus/ineffassign/LICENSE b/vendor/github.com/gordonklaus/ineffassign/LICENSE deleted file mode 100644 index 9e3d9bcc0..000000000 --- a/vendor/github.com/gordonklaus/ineffassign/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2016 Gordon Klaus and contributors - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/gordonklaus/ineffassign/pkg/ineffassign/ineffassign.go b/vendor/github.com/gordonklaus/ineffassign/pkg/ineffassign/ineffassign.go deleted file mode 100644 index f9dece8f2..000000000 --- a/vendor/github.com/gordonklaus/ineffassign/pkg/ineffassign/ineffassign.go +++ /dev/null @@ -1,610 +0,0 @@ -package ineffassign - -import ( - "fmt" - "go/ast" - "go/token" - "sort" - "strings" - - "golang.org/x/tools/go/analysis" -) - -// Analyzer is the ineffassign analysis.Analyzer instance. -var Analyzer = &analysis.Analyzer{ - Name: "ineffassign", - Doc: "detect ineffectual assignments in Go code", - Run: checkPath, -} - -func checkPath(pass *analysis.Pass) (interface{}, error) { - for _, file := range pass.Files { - if isGenerated(file) { - continue - } - - bld := &builder{vars: map[*ast.Object]*variable{}} - bld.walk(file) - - chk := &checker{vars: bld.vars, seen: map[*block]bool{}} - for _, b := range bld.roots { - chk.check(b) - } - sort.Sort(chk.ineff) - - for _, id := range chk.ineff { - pass.Report(analysis.Diagnostic{ - Pos: id.Pos(), - Message: fmt.Sprintf("ineffectual assignment to %s", id.Name), - }) - } - } - - return nil, nil -} - -func isGenerated(file *ast.File) bool { - for _, cg := range file.Comments { - for _, c := range cg.List { - if strings.HasPrefix(c.Text, "// Code generated ") && strings.HasSuffix(c.Text, " DO NOT EDIT.") { - return true - } - } - } - - return false -} - -type builder struct { - roots []*block - block *block - vars map[*ast.Object]*variable - results []*ast.FieldList - defers []bool - breaks branchStack - continues branchStack - gotos branchStack - labelStmt *ast.LabeledStmt -} - -type block struct { - children []*block - ops map[*ast.Object][]operation -} - -func (b *block) addChild(c *block) { - b.children = append(b.children, c) -} - -type operation struct { - id *ast.Ident - assign bool -} - -type variable struct { - fundept int - escapes bool -} - -func (bld *builder) walk(n ast.Node) { - if n != nil { - ast.Walk(bld, n) - } -} - -func (bld *builder) Visit(n ast.Node) ast.Visitor { - switch n := n.(type) { - case *ast.FuncDecl: - if n.Body != nil { - bld.fun(n.Type, n.Body) - } - case *ast.FuncLit: - bld.fun(n.Type, n.Body) - case *ast.IfStmt: - bld.walk(n.Init) - bld.walk(n.Cond) - b0 := bld.block - bld.newBlock(b0) - bld.walk(n.Body) - b1 := bld.block - if n.Else != nil { - bld.newBlock(b0) - bld.walk(n.Else) - b0 = bld.block - } - bld.newBlock(b0, b1) - case *ast.ForStmt: - lbl := bld.stmtLabel(n) - brek := bld.breaks.push(lbl) - continu := bld.continues.push(lbl) - bld.walk(n.Init) - start := bld.newBlock(bld.block) - bld.walk(n.Cond) - cond := bld.block - bld.newBlock(cond) - bld.walk(n.Body) - continu.setDestination(bld.newBlock(bld.block)) - bld.walk(n.Post) - bld.block.addChild(start) - brek.setDestination(bld.newBlock(cond)) - bld.breaks.pop() - bld.continues.pop() - case *ast.RangeStmt: - lbl := bld.stmtLabel(n) - brek := bld.breaks.push(lbl) - continu := bld.continues.push(lbl) - bld.walk(n.X) - pre := bld.newBlock(bld.block) - start := bld.newBlock(pre) - if n.Key != nil { - lhs := []ast.Expr{n.Key} - if n.Value != nil { - lhs = append(lhs, n.Value) - } - bld.walk(&ast.AssignStmt{Lhs: lhs, Tok: n.Tok, TokPos: n.TokPos, Rhs: []ast.Expr{&ast.Ident{NamePos: n.X.End()}}}) - } - bld.walk(n.Body) - bld.block.addChild(start) - continu.setDestination(pre) - brek.setDestination(bld.newBlock(pre, bld.block)) - bld.breaks.pop() - bld.continues.pop() - case *ast.SwitchStmt: - bld.walk(n.Init) - bld.walk(n.Tag) - bld.swtch(n, n.Body.List) - case *ast.TypeSwitchStmt: - bld.walk(n.Init) - bld.walk(n.Assign) - bld.swtch(n, n.Body.List) - case *ast.SelectStmt: - brek := bld.breaks.push(bld.stmtLabel(n)) - for _, c := range n.Body.List { - c := c.(*ast.CommClause).Comm - if s, ok := c.(*ast.AssignStmt); ok { - bld.walk(s.Rhs[0]) - } else { - bld.walk(c) - } - } - b0 := bld.block - exits := make([]*block, len(n.Body.List)) - dfault := false - for i, c := range n.Body.List { - c := c.(*ast.CommClause) - bld.newBlock(b0) - bld.walk(c) - exits[i] = bld.block - dfault = dfault || c.Comm == nil - } - if !dfault { - exits = append(exits, b0) - } - brek.setDestination(bld.newBlock(exits...)) - bld.breaks.pop() - case *ast.DeferStmt: - bld.walk(n.Call.Fun) - for _, a := range n.Call.Args { - bld.walk(a) - } - bld.defers[len(bld.defers)-1] = true - case *ast.LabeledStmt: - bld.gotos.get(n.Label).setDestination(bld.newBlock(bld.block)) - bld.labelStmt = n - bld.walk(n.Stmt) - case *ast.BranchStmt: - switch n.Tok { - case token.BREAK: - bld.breaks.get(n.Label).addSource(bld.block) - bld.newBlock() - case token.CONTINUE: - bld.continues.get(n.Label).addSource(bld.block) - bld.newBlock() - case token.GOTO: - bld.gotos.get(n.Label).addSource(bld.block) - bld.newBlock() - } - - case *ast.AssignStmt: - if n.Tok == token.QUO_ASSIGN || n.Tok == token.REM_ASSIGN { - bld.maybePanic() - } - - for _, x := range n.Rhs { - bld.walk(x) - } - for i, x := range n.Lhs { - if id, ok := ident(x); ok { - if n.Tok >= token.ADD_ASSIGN && n.Tok <= token.AND_NOT_ASSIGN { - bld.use(id) - } - // Don't treat explicit initialization to zero as assignment; it is often used as shorthand for a bare declaration. - if n.Tok == token.DEFINE && i < len(n.Rhs) && isZeroInitializer(n.Rhs[i]) { - bld.use(id) - } else { - bld.assign(id) - } - } else { - bld.walk(x) - } - } - case *ast.GenDecl: - if n.Tok == token.VAR { - for _, s := range n.Specs { - s := s.(*ast.ValueSpec) - for _, x := range s.Values { - bld.walk(x) - } - for _, id := range s.Names { - if len(s.Values) > 0 { - bld.assign(id) - } else { - bld.use(id) - } - } - } - } - case *ast.IncDecStmt: - if id, ok := ident(n.X); ok { - bld.use(id) - bld.assign(id) - } else { - bld.walk(n.X) - } - case *ast.Ident: - bld.use(n) - case *ast.ReturnStmt: - for _, x := range n.Results { - bld.walk(x) - } - if res := bld.results[len(bld.results)-1]; res != nil { - for _, f := range res.List { - for _, id := range f.Names { - if n.Results != nil { - bld.assign(id) - } - bld.use(id) - } - } - } - bld.newBlock() - case *ast.SendStmt: - bld.maybePanic() - return bld - - case *ast.BinaryExpr: - if n.Op == token.EQL || n.Op == token.QUO || n.Op == token.REM { - bld.maybePanic() - } - return bld - case *ast.CallExpr: - bld.maybePanic() - return bld - case *ast.IndexExpr: - bld.maybePanic() - return bld - case *ast.UnaryExpr: - id, ok := ident(n.X) - if ix, isIx := n.X.(*ast.IndexExpr); isIx { - // We don't care about indexing into slices, but without type information we can do no better. - id, ok = ident(ix.X) - } - if ok && n.Op == token.AND { - if v, ok := bld.vars[id.Obj]; ok { - v.escapes = true - } - } - return bld - case *ast.SelectorExpr: - bld.maybePanic() - // A method call (possibly delayed via a method value) might implicitly take - // the address of its receiver, causing it to escape. - // We can't do any better here without knowing the variable's type. - if id, ok := ident(n.X); ok { - if v, ok := bld.vars[id.Obj]; ok { - v.escapes = true - } - } - return bld - case *ast.SliceExpr: - bld.maybePanic() - // We don't care about slicing into slices, but without type information we can do no better. - if id, ok := ident(n.X); ok { - if v, ok := bld.vars[id.Obj]; ok { - v.escapes = true - } - } - return bld - case *ast.StarExpr: - bld.maybePanic() - return bld - case *ast.TypeAssertExpr: - bld.maybePanic() - return bld - - default: - return bld - } - return nil -} - -func isZeroInitializer(x ast.Expr) bool { - // Assume that a call expression of a single argument is a conversion expression. We can't do better without type information. - if c, ok := x.(*ast.CallExpr); ok { - fun := c.Fun - if p, ok := fun.(*ast.ParenExpr); ok { - fun = p.X - } - if s, ok := fun.(*ast.StarExpr); ok { - fun = s.X - } - switch fun.(type) { - case *ast.Ident, *ast.SelectorExpr, *ast.ArrayType, *ast.StructType, *ast.FuncType, *ast.InterfaceType, *ast.MapType, *ast.ChanType: - default: - return false - } - if len(c.Args) != 1 { - return false - } - x = c.Args[0] - } - - switch x := x.(type) { - case *ast.BasicLit: - switch x.Value { - case "0", "0.0", "0.", ".0", `""`: - return true - } - case *ast.Ident: - return (x.Name == "false" || x.Name == "nil") && x.Obj == nil - } - - return false -} - -func (bld *builder) fun(typ *ast.FuncType, body *ast.BlockStmt) { - for _, v := range bld.vars { - v.fundept++ - } - bld.results = append(bld.results, typ.Results) - bld.defers = append(bld.defers, false) - - b := bld.block - bld.newBlock() - bld.roots = append(bld.roots, bld.block) - bld.walk(typ) - bld.walk(body) - bld.block = b - - bld.results = bld.results[:len(bld.results)-1] - bld.defers = bld.defers[:len(bld.defers)-1] - for _, v := range bld.vars { - v.fundept-- - } -} - -func (bld *builder) swtch(stmt ast.Stmt, cases []ast.Stmt) { - brek := bld.breaks.push(bld.stmtLabel(stmt)) - b0 := bld.block - list := b0 - exits := make([]*block, 0, len(cases)+1) - var dfault, fallthru *block - for _, c := range cases { - c := c.(*ast.CaseClause) - - if c.List != nil { - list = bld.newBlock(list) - for _, x := range c.List { - bld.walk(x) - } - } - - parents := []*block{} - if c.List != nil { - parents = append(parents, list) - } - if fallthru != nil { - parents = append(parents, fallthru) - fallthru = nil - } - bld.newBlock(parents...) - if c.List == nil { - dfault = bld.block - } - for _, s := range c.Body { - bld.walk(s) - if s, ok := s.(*ast.BranchStmt); ok && s.Tok == token.FALLTHROUGH { - fallthru = bld.block - } - } - - if fallthru == nil { - exits = append(exits, bld.block) - } - } - if dfault != nil { - list.addChild(dfault) - } else { - exits = append(exits, b0) - } - brek.setDestination(bld.newBlock(exits...)) - bld.breaks.pop() -} - -// If an operation might panic and be recovered, mark named function results as used. -func (bld *builder) maybePanic() { - if len(bld.defers) == 0 || !bld.defers[len(bld.defers)-1] { - return - } - if len(bld.results) == 0 { - return - } - res := bld.results[len(bld.results)-1] - if res == nil { - return - } - for _, f := range res.List { - for _, id := range f.Names { - bld.use(id) - } - } -} - -func (bld *builder) newBlock(parents ...*block) *block { - bld.block = &block{ops: map[*ast.Object][]operation{}} - for _, b := range parents { - b.addChild(bld.block) - } - return bld.block -} - -func (bld *builder) stmtLabel(s ast.Stmt) *ast.Object { - if ls := bld.labelStmt; ls != nil && ls.Stmt == s { - return ls.Label.Obj - } - return nil -} - -func (bld *builder) assign(id *ast.Ident) { - bld.newOp(id, true) -} - -func (bld *builder) use(id *ast.Ident) { - bld.newOp(id, false) -} - -func (bld *builder) newOp(id *ast.Ident, assign bool) { - if id.Name == "_" || id.Obj == nil { - return - } - - v, ok := bld.vars[id.Obj] - if !ok { - v = &variable{} - bld.vars[id.Obj] = v - } - v.escapes = v.escapes || v.fundept > 0 || bld.block == nil - - if b := bld.block; b != nil && !v.escapes { - b.ops[id.Obj] = append(b.ops[id.Obj], operation{id, assign}) - } -} - -type branchStack []*branch - -type branch struct { - label *ast.Object - srcs []*block - dst *block -} - -func (s *branchStack) push(lbl *ast.Object) *branch { - br := &branch{label: lbl} - *s = append(*s, br) - return br -} - -func (s *branchStack) get(lbl *ast.Ident) *branch { - for i := len(*s) - 1; i >= 0; i-- { - if br := (*s)[i]; lbl == nil || br.label == lbl.Obj { - return br - } - } - - // Guard against invalid code (break/continue outside of loop). - if lbl == nil { - return &branch{} - } - - return s.push(lbl.Obj) -} - -func (br *branch) addSource(src *block) { - br.srcs = append(br.srcs, src) - if br.dst != nil { - src.addChild(br.dst) - } -} - -func (br *branch) setDestination(dst *block) { - br.dst = dst - for _, src := range br.srcs { - src.addChild(dst) - } -} - -func (s *branchStack) pop() { - *s = (*s)[:len(*s)-1] -} - -func ident(x ast.Expr) (*ast.Ident, bool) { - if p, ok := x.(*ast.ParenExpr); ok { - return ident(p.X) - } - id, ok := x.(*ast.Ident) - return id, ok -} - -type checker struct { - vars map[*ast.Object]*variable - seen map[*block]bool - ineff idents -} - -func (chk *checker) check(b *block) { - if chk.seen[b] { - return - } - chk.seen[b] = true - - for obj, ops := range b.ops { - ops: - for i, op := range ops { - if !op.assign { - continue - } - if i+1 < len(ops) { - if ops[i+1].assign { - chk.ineff = append(chk.ineff, op.id) - } - continue - } - seen := map[*block]bool{} - for _, b := range b.children { - if used(obj, b, seen) { - continue ops - } - } - if !chk.vars[obj].escapes { - chk.ineff = append(chk.ineff, op.id) - } - } - } - - for _, b := range b.children { - chk.check(b) - } -} - -func used(obj *ast.Object, b *block, seen map[*block]bool) bool { - if seen[b] { - return false - } - seen[b] = true - - if ops := b.ops[obj]; len(ops) > 0 { - return !ops[0].assign - } - for _, b := range b.children { - if used(obj, b, seen) { - return true - } - } - return false -} - -type idents []*ast.Ident - -func (ids idents) Len() int { return len(ids) } -func (ids idents) Less(i, j int) bool { return ids[i].Pos() < ids[j].Pos() } -func (ids idents) Swap(i, j int) { ids[i], ids[j] = ids[j], ids[i] } diff --git a/vendor/github.com/gostaticanalysis/analysisutil/LICENSE b/vendor/github.com/gostaticanalysis/analysisutil/LICENSE deleted file mode 100644 index bf7e33db8..000000000 --- a/vendor/github.com/gostaticanalysis/analysisutil/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2019 GoStaticAnalysis - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/gostaticanalysis/analysisutil/README.md b/vendor/github.com/gostaticanalysis/analysisutil/README.md deleted file mode 100644 index d8fd3d2a4..000000000 --- a/vendor/github.com/gostaticanalysis/analysisutil/README.md +++ /dev/null @@ -1,5 +0,0 @@ -# analysisutil - -[![PkgGoDev](https://pkg.go.dev/badge/github.com/gostaticanalysis/analysisutil)](https://pkg.go.dev/github.com/gostaticanalysis/analysisutil) - -Utilities for x/tools/go/analysis package. diff --git a/vendor/github.com/gostaticanalysis/analysisutil/call.go b/vendor/github.com/gostaticanalysis/analysisutil/call.go deleted file mode 100644 index e3d98d1dc..000000000 --- a/vendor/github.com/gostaticanalysis/analysisutil/call.go +++ /dev/null @@ -1,405 +0,0 @@ -package analysisutil - -import ( - "go/types" - - "golang.org/x/tools/go/ssa" -) - -// CalledChecker checks a function is called. -// See From and Func. -type CalledChecker struct { - Ignore func(instr ssa.Instruction) bool -} - -// NotIn checks whether receiver's method is called in a function. -// If there is no methods calling at a path from an instruction -// which type is receiver to all return instruction, NotIn returns these instructions. -func (c *CalledChecker) NotIn(f *ssa.Function, receiver types.Type, methods ...*types.Func) []ssa.Instruction { - done := map[ssa.Value]bool{} - var instrs []ssa.Instruction - for _, b := range f.Blocks { - for i, instr := range b.Instrs { - v, _ := instr.(ssa.Value) - if v == nil || done[v] { - continue - } - - if v, _ := v.(*ssa.UnOp); v != nil && done[v.X] { - continue - } - - called, ok := c.From(b, i, receiver, methods...) - if ok && !called { - instrs = append(instrs, instr) - done[v] = true - if v, _ := v.(*ssa.UnOp); v != nil { - done[v.X] = true - } - } - } - } - return instrs -} - -// Func returns true when f is called in the instr. -// If recv is not nil, Func also checks the receiver. -func (c *CalledChecker) Func(instr ssa.Instruction, recv ssa.Value, f *types.Func) bool { - - if c.Ignore != nil && c.Ignore(instr) { - return false - } - - call, ok := instr.(ssa.CallInstruction) - if !ok { - return false - } - - common := call.Common() - if common == nil { - return false - } - - callee := common.StaticCallee() - if callee == nil { - return false - } - - fn, ok := callee.Object().(*types.Func) - if !ok { - return false - } - - if recv != nil && - common.Signature().Recv() != nil && - (len(common.Args) == 0 && recv != nil || common.Args[0] != recv && - !referrer(recv, common.Args[0])) { - return false - } - - return fn == f -} - -func referrer(a, b ssa.Value) bool { - return isReferrerOf(a, b) || isReferrerOf(b, a) -} - -func isReferrerOf(a, b ssa.Value) bool { - if a == nil || b == nil { - return false - } - if b.Referrers() != nil { - brs := *b.Referrers() - - for _, br := range brs { - brv, ok := br.(ssa.Value) - if !ok { - continue - } - if brv == a { - return true - } - } - } - return false -} - -// From checks whether receiver's method is called in an instruction -// which belogns to after i-th instructions, or in succsor blocks of b. -// The first result is above value. -// The second result is whether type of i-th instruction does not much receiver -// or matches with ignore cases. -func (c *CalledChecker) From(b *ssa.BasicBlock, i int, receiver types.Type, methods ...*types.Func) (called, ok bool) { - if b == nil || i < 0 || i >= len(b.Instrs) || - receiver == nil || len(methods) == 0 { - return false, false - } - - v, ok := b.Instrs[i].(ssa.Value) - if !ok { - return false, false - } - - from := &calledFrom{recv: v, fs: methods, ignore: c.Ignore} - - if !from.isRecv(receiver, v.Type()) { - return false, false - } - - if from.ignored() { - return false, false - } - - if from.instrs(b.Instrs[i+1:]) || - from.succs(b) { - return true, true - } - - from.done = nil - if from.storedInInstrs(b.Instrs[i+1:]) || - from.storedInSuccs(b) { - return false, false - } - - return false, true -} - -type calledFrom struct { - recv ssa.Value - fs []*types.Func - done map[*ssa.BasicBlock]bool - ignore func(ssa.Instruction) bool -} - -func (c *calledFrom) ignored() bool { - - switch v := c.recv.(type) { - case *ssa.UnOp: - switch v.X.(type) { - case *ssa.FreeVar, *ssa.Global: - return true - } - } - - refs := c.recv.Referrers() - if refs == nil { - return false - } - - for _, ref := range *refs { - done := map[ssa.Instruction]bool{} - if !c.isOwn(ref) && - ((c.ignore != nil && c.ignore(ref)) || - c.isRet(ref, done) || c.isArg(ref)) { - return true - } - } - - return false -} - -func (c *calledFrom) isOwn(instr ssa.Instruction) bool { - v, ok := instr.(ssa.Value) - if !ok { - return false - } - return v == c.recv -} - -func (c *calledFrom) isRet(instr ssa.Instruction, done map[ssa.Instruction]bool) bool { - if done[instr] { - return false - } - done[instr] = true - - switch instr := instr.(type) { - case *ssa.Return: - return true - case *ssa.MapUpdate: - return c.isRetInRefs(instr.Map, done) - case *ssa.Store: - if instr, _ := instr.Addr.(ssa.Instruction); instr != nil { - return c.isRet(instr, done) - } - return c.isRetInRefs(instr.Addr, done) - case *ssa.FieldAddr: - return c.isRetInRefs(instr.X, done) - case ssa.Value: - return c.isRetInRefs(instr, done) - default: - return false - } -} - -func (c *calledFrom) isRetInRefs(v ssa.Value, done map[ssa.Instruction]bool) bool { - refs := v.Referrers() - if refs == nil { - return false - } - for _, ref := range *refs { - if c.isRet(ref, done) { - return true - } - } - return false -} - -func (c *calledFrom) isArg(instr ssa.Instruction) bool { - - call, ok := instr.(ssa.CallInstruction) - if !ok { - return false - } - - common := call.Common() - if common == nil { - return false - } - - args := common.Args - if common.Signature().Recv() != nil { - args = args[1:] - } - - for i := range args { - if args[i] == c.recv { - return true - } - } - - return false -} - -func (c *calledFrom) instrs(instrs []ssa.Instruction) bool { - for _, instr := range instrs { - for _, f := range c.fs { - if Called(instr, c.recv, f) { - return true - } - } - } - return false -} - -func (c *calledFrom) succs(b *ssa.BasicBlock) bool { - if c.done == nil { - c.done = map[*ssa.BasicBlock]bool{} - } - - if c.done[b] { - return true - } - c.done[b] = true - - if len(b.Succs) == 0 { - return false - } - - for _, s := range b.Succs { - if !c.instrs(s.Instrs) && !c.succs(s) { - return false - } - } - - return true -} - -func (c *calledFrom) storedInInstrs(instrs []ssa.Instruction) bool { - for _, instr := range instrs { - switch instr := instr.(type) { - case *ssa.Store: - if instr.Val == c.recv { - return true - } - } - } - return false -} - -func (c *calledFrom) storedInSuccs(b *ssa.BasicBlock) bool { - if c.done == nil { - c.done = map[*ssa.BasicBlock]bool{} - } - - if c.done[b] { - return true - } - c.done[b] = true - - if len(b.Succs) == 0 { - return false - } - - for _, s := range b.Succs { - if !c.storedInInstrs(s.Instrs) && !c.succs(s) { - return false - } - } - - return true -} - -func (c *calledFrom) isRecv(recv, typ types.Type) bool { - return recv == typ || identical(recv, typ) || - c.isRecvInTuple(recv, typ) || c.isRecvInEmbedded(recv, typ) -} - -func (c *calledFrom) isRecvInTuple(recv, typ types.Type) bool { - tuple, _ := typ.(*types.Tuple) - if tuple == nil { - return false - } - - for i := 0; i < tuple.Len(); i++ { - if c.isRecv(recv, tuple.At(i).Type()) { - return true - } - } - - return false -} - -func (c *calledFrom) isRecvInEmbedded(recv, typ types.Type) bool { - - var st *types.Struct - switch typ := typ.(type) { - case *types.Struct: - st = typ - case *types.Pointer: - return c.isRecvInEmbedded(recv, typ.Elem()) - case *types.Named: - return c.isRecvInEmbedded(recv, typ.Underlying()) - default: - return false - } - - for i := 0; i < st.NumFields(); i++ { - field := st.Field(i) - if !field.Embedded() { - continue - } - - ft := field.Type() - if c.isRecv(recv, ft) { - return true - } - - var ptrOrUnptr types.Type - switch ft := ft.(type) { - case *types.Pointer: - // struct { *T } -> T - ptrOrUnptr = ft.Elem() - default: - // struct { T } -> *T - ptrOrUnptr = types.NewPointer(ft) - } - - if c.isRecv(recv, ptrOrUnptr) { - return true - } - } - - return false -} - -// NotCalledIn checks whether receiver's method is called in a function. -// If there is no methods calling at a path from an instruction -// which type is receiver to all return instruction, NotCalledIn returns these instructions. -func NotCalledIn(f *ssa.Function, receiver types.Type, methods ...*types.Func) []ssa.Instruction { - return new(CalledChecker).NotIn(f, receiver, methods...) -} - -// CalledFrom checks whether receiver's method is called in an instruction -// which belogns to after i-th instructions, or in succsor blocks of b. -// The first result is above value. -// The second result is whether type of i-th instruction does not much receiver -// or matches with ignore cases. -func CalledFrom(b *ssa.BasicBlock, i int, receiver types.Type, methods ...*types.Func) (called, ok bool) { - return new(CalledChecker).From(b, i, receiver, methods...) -} - -// Called returns true when f is called in the instr. -// If recv is not nil, Called also checks the receiver. -func Called(instr ssa.Instruction, recv ssa.Value, f *types.Func) bool { - return new(CalledChecker).Func(instr, recv, f) -} diff --git a/vendor/github.com/gostaticanalysis/analysisutil/diagnostic.go b/vendor/github.com/gostaticanalysis/analysisutil/diagnostic.go deleted file mode 100644 index a911db6f1..000000000 --- a/vendor/github.com/gostaticanalysis/analysisutil/diagnostic.go +++ /dev/null @@ -1,45 +0,0 @@ -package analysisutil - -import ( - "go/token" - - "github.com/gostaticanalysis/comment" - "github.com/gostaticanalysis/comment/passes/commentmap" - "golang.org/x/tools/go/analysis" -) - -// ReportWithoutIgnore returns a report function which can set to (analysis.Pass).Report. -// The report function ignores a diagnostic which annotated by ignore comment as the below. -// //lint:ignore Check1[,Check2,...,CheckN] reason -// names is a list of checker names. -// If names was omitted, the report function ignores by pass.Analyzer.Name. -func ReportWithoutIgnore(pass *analysis.Pass, names ...string) func(analysis.Diagnostic) { - cmaps, _ := pass.ResultOf[commentmap.Analyzer].(comment.Maps) - if cmaps == nil { - cmaps = comment.New(pass.Fset, pass.Files) - } - - if len(names) == 0 { - names = []string{pass.Analyzer.Name} - } - - report := pass.Report // original report func - - return func(d analysis.Diagnostic) { - start := pass.Fset.File(d.Pos).Line(d.Pos) - end := start - if d.End != token.NoPos { - end = pass.Fset.File(d.End).Line(d.End) - } - - for l := start; l <= end; l++ { - for _, n := range names { - if cmaps.IgnoreLine(pass.Fset, l, n) { - return - } - } - } - - report(d) - } -} diff --git a/vendor/github.com/gostaticanalysis/analysisutil/file.go b/vendor/github.com/gostaticanalysis/analysisutil/file.go deleted file mode 100644 index b9b295530..000000000 --- a/vendor/github.com/gostaticanalysis/analysisutil/file.go +++ /dev/null @@ -1,30 +0,0 @@ -package analysisutil - -import ( - "go/ast" - "go/token" - "regexp" - - "golang.org/x/tools/go/analysis" -) - -// File finds *ast.File in pass.Files by pos. -func File(pass *analysis.Pass, pos token.Pos) *ast.File { - for _, f := range pass.Files { - if f.Pos() <= pos && pos <= f.End() { - return f - } - } - return nil -} - -var genCommentRegexp = regexp.MustCompile(`^// Code generated .* DO NOT EDIT\.$`) - -// IsGeneratedFile reports whether the file has been generated automatically. -// If file is nil, IsGeneratedFile will return false. -func IsGeneratedFile(file *ast.File) bool { - if file == nil || len(file.Comments) == 0 { - return false - } - return genCommentRegexp.MatchString(file.Comments[0].List[0].Text) -} diff --git a/vendor/github.com/gostaticanalysis/analysisutil/pkg.go b/vendor/github.com/gostaticanalysis/analysisutil/pkg.go deleted file mode 100644 index b64150d81..000000000 --- a/vendor/github.com/gostaticanalysis/analysisutil/pkg.go +++ /dev/null @@ -1,49 +0,0 @@ -package analysisutil - -import ( - "go/types" - "strconv" - "strings" - - "golang.org/x/tools/go/analysis" -) - -// RemoVendor removes vendoring information from import path. -func RemoveVendor(path string) string { - i := strings.Index(path, "vendor/") - if i >= 0 { - return path[i+len("vendor/"):] - } - return path -} - -// LookupFromImports finds an object from import paths. -func LookupFromImports(imports []*types.Package, path, name string) types.Object { - path = RemoveVendor(path) - for i := range imports { - if path == RemoveVendor(imports[i].Path()) { - return imports[i].Scope().Lookup(name) - } - } - return nil -} - -// Imported returns true when the given pass imports the pkg. -func Imported(pkgPath string, pass *analysis.Pass) bool { - fs := pass.Files - if len(fs) == 0 { - return false - } - for _, f := range fs { - for _, i := range f.Imports { - path, err := strconv.Unquote(i.Path.Value) - if err != nil { - continue - } - if RemoveVendor(path) == pkgPath { - return true - } - } - } - return false -} diff --git a/vendor/github.com/gostaticanalysis/analysisutil/ssa.go b/vendor/github.com/gostaticanalysis/analysisutil/ssa.go deleted file mode 100644 index 2e22bbe79..000000000 --- a/vendor/github.com/gostaticanalysis/analysisutil/ssa.go +++ /dev/null @@ -1,152 +0,0 @@ -package analysisutil - -import ( - "golang.org/x/tools/go/ssa" -) - -// IfInstr returns *ssa.If which is contained in the block b. -// If the block b has not any if instruction, IfInstr returns nil. -func IfInstr(b *ssa.BasicBlock) *ssa.If { - if len(b.Instrs) == 0 { - return nil - } - - ifinstr, ok := b.Instrs[len(b.Instrs)-1].(*ssa.If) - if !ok { - return nil - } - - return ifinstr -} - -// Phi returns phi values which are contained in the block b. -func Phi(b *ssa.BasicBlock) []*ssa.Phi { - var phis []*ssa.Phi - for _, instr := range b.Instrs { - if phi, ok := instr.(*ssa.Phi); ok { - phis = append(phis, phi) - } else { - // no more phi - break - } - } - return phis -} - -// Returns returns a slice of *ssa.Return in the function. -func Returns(v ssa.Value) []*ssa.Return { - var fn *ssa.Function - switch v := v.(type) { - case *ssa.Function: - fn = v - case *ssa.MakeClosure: - return Returns(v.Fn) - default: - return nil - } - - var rets []*ssa.Return - done := map[*ssa.BasicBlock]bool{} - for _, b := range fn.Blocks { - rets = append(rets, returnsInBlock(b, done)...) - } - return rets -} - -func returnsInBlock(b *ssa.BasicBlock, done map[*ssa.BasicBlock]bool) (rets []*ssa.Return) { - if done[b] { - return nil - } - done[b] = true - - if b.Index != 0 && len(b.Preds) == 0 { - return nil - } - - if len(b.Instrs) != 0 { - switch instr := b.Instrs[len(b.Instrs)-1].(type) { - case *ssa.Return: - rets = append(rets, instr) - } - } - - for _, s := range b.Succs { - rets = append(rets, returnsInBlock(s, done)...) - } - - return rets -} - -// BinOp returns binary operator values which are contained in the block b. -func BinOp(b *ssa.BasicBlock) []*ssa.BinOp { - var binops []*ssa.BinOp - for _, instr := range b.Instrs { - if binop, ok := instr.(*ssa.BinOp); ok { - binops = append(binops, binop) - } - } - return binops -} - -// Used returns an instruction which uses the value in the instructions. -func Used(v ssa.Value, instrs []ssa.Instruction) ssa.Instruction { - if len(instrs) == 0 || v.Referrers() == nil { - return nil - } - - for _, instr := range instrs { - if used := usedInInstr(v, instr); used != nil { - return used - } - } - - return nil -} - -func usedInInstr(v ssa.Value, instr ssa.Instruction) ssa.Instruction { - switch instr := instr.(type) { - case *ssa.MakeClosure: - return usedInClosure(v, instr) - default: - operands := instr.Operands(nil) - for _, x := range operands { - if x != nil && *x == v { - return instr - } - } - } - - switch v := v.(type) { - case *ssa.UnOp: - return usedInInstr(v.X, instr) - } - - return nil -} - -func usedInClosure(v ssa.Value, instr *ssa.MakeClosure) ssa.Instruction { - fn, _ := instr.Fn.(*ssa.Function) - if fn == nil { - return nil - } - - var fv *ssa.FreeVar - for i := range instr.Bindings { - if instr.Bindings[i] == v { - fv = fn.FreeVars[i] - break - } - } - - if fv == nil { - return nil - } - - for _, b := range fn.Blocks { - if used := Used(fv, b.Instrs); used != nil { - return used - } - } - - return nil -} diff --git a/vendor/github.com/gostaticanalysis/analysisutil/ssainspect.go b/vendor/github.com/gostaticanalysis/analysisutil/ssainspect.go deleted file mode 100644 index b2ae75f24..000000000 --- a/vendor/github.com/gostaticanalysis/analysisutil/ssainspect.go +++ /dev/null @@ -1,47 +0,0 @@ -package analysisutil - -import "golang.org/x/tools/go/ssa" - -// InspectFuncs inspects functions. -func InspectFuncs(funcs []*ssa.Function, f func(i int, instr ssa.Instruction) bool) { - for _, fun := range funcs { - if len(fun.Blocks) == 0 { - continue - } - new(instrInspector).block(fun.Blocks[0], 0, f) - } -} - -// InspectInstr inspects from i-th instruction of start block to succsessor blocks. -func InspectInstr(start *ssa.BasicBlock, i int, f func(i int, instr ssa.Instruction) bool) { - new(instrInspector).block(start, i, f) -} - -type instrInspector struct { - done map[*ssa.BasicBlock]bool -} - -func (ins *instrInspector) block(b *ssa.BasicBlock, i int, f func(i int, instr ssa.Instruction) bool) { - if ins.done == nil { - ins.done = map[*ssa.BasicBlock]bool{} - } - - if b == nil || ins.done[b] || len(b.Instrs) <= i { - return - } - - ins.done[b] = true - ins.instrs(i, b.Instrs[i:], f) - for _, s := range b.Succs { - ins.block(s, 0, f) - } - -} - -func (ins *instrInspector) instrs(offset int, instrs []ssa.Instruction, f func(i int, instr ssa.Instruction) bool) { - for i, instr := range instrs { - if !f(offset+i, instr) { - break - } - } -} diff --git a/vendor/github.com/gostaticanalysis/analysisutil/types.go b/vendor/github.com/gostaticanalysis/analysisutil/types.go deleted file mode 100644 index 8265efc8e..000000000 --- a/vendor/github.com/gostaticanalysis/analysisutil/types.go +++ /dev/null @@ -1,228 +0,0 @@ -package analysisutil - -import ( - "go/ast" - "go/types" - - "golang.org/x/tools/go/analysis" -) - -var errType = types.Universe.Lookup("error").Type().Underlying().(*types.Interface) - -// ImplementsError return whether t implements error interface. -func ImplementsError(t types.Type) bool { - return types.Implements(t, errType) -} - -// ObjectOf returns types.Object by given name in the package. -func ObjectOf(pass *analysis.Pass, pkg, name string) types.Object { - obj := LookupFromImports(pass.Pkg.Imports(), pkg, name) - if obj != nil { - return obj - } - if RemoveVendor(pass.Pkg.Name()) != RemoveVendor(pkg) { - return nil - } - return pass.Pkg.Scope().Lookup(name) -} - -// TypeOf returns types.Type by given name in the package. -// TypeOf accepts pointer types such as *T. -func TypeOf(pass *analysis.Pass, pkg, name string) types.Type { - if name == "" { - return nil - } - - if name[0] == '*' { - obj := TypeOf(pass, pkg, name[1:]) - if obj == nil { - return nil - } - return types.NewPointer(obj) - } - - obj := ObjectOf(pass, pkg, name) - if obj == nil { - return nil - } - - return obj.Type() -} - -// MethodOf returns a method which has given name in the type. -func MethodOf(typ types.Type, name string) *types.Func { - switch typ := typ.(type) { - case *types.Named: - for i := 0; i < typ.NumMethods(); i++ { - if f := typ.Method(i); f.Name() == name { - return f - } - } - case *types.Pointer: - return MethodOf(typ.Elem(), name) - } - return nil -} - -// see: https://github.com/golang/go/issues/19670 -func identical(x, y types.Type) (ret bool) { - defer func() { - r := recover() - switch r := r.(type) { - case string: - if r == "unreachable" { - ret = false - return - } - case nil: - return - } - panic(r) - }() - return types.Identical(x, y) -} - -// Interfaces returns a map of interfaces which are declared in the package. -func Interfaces(pkg *types.Package) map[string]*types.Interface { - ifs := map[string]*types.Interface{} - - for _, n := range pkg.Scope().Names() { - o := pkg.Scope().Lookup(n) - if o != nil { - i, ok := o.Type().Underlying().(*types.Interface) - if ok { - ifs[n] = i - } - } - } - - return ifs -} - -// Structs returns a map of structs which are declared in the package. -func Structs(pkg *types.Package) map[string]*types.Struct { - structs := map[string]*types.Struct{} - - for _, n := range pkg.Scope().Names() { - o := pkg.Scope().Lookup(n) - if o != nil { - s, ok := o.Type().Underlying().(*types.Struct) - if ok { - structs[n] = s - } - } - } - - return structs -} - -// HasField returns whether the struct has the field. -func HasField(s *types.Struct, f *types.Var) bool { - if s == nil || f == nil { - return false - } - - for i := 0; i < s.NumFields(); i++ { - if s.Field(i) == f { - return true - } - } - - return false -} - -// Field returns field of the struct type. -// If the type is not struct or has not the field, -// Field returns -1, nil. -// If the type is a named type or a pointer type, -// Field calls itself recursively with -// an underlying type or an element type of pointer. -func Field(t types.Type, name string) (int, *types.Var) { - switch t := t.(type) { - case *types.Pointer: - return Field(t.Elem(), name) - case *types.Named: - return Field(t.Underlying(), name) - case *types.Struct: - for i := 0; i < t.NumFields(); i++ { - f := t.Field(i) - if f.Name() == name { - return i, f - } - } - } - - return -1, nil -} - -func TypesInfo(info ...*types.Info) *types.Info { - if len(info) == 0 { - return nil - } - - var merged types.Info - for i := range info { - mergeTypesInfo(&merged, info[i]) - } - - return &merged -} - -func mergeTypesInfo(i1, i2 *types.Info) { - // Types - if i1.Types == nil && i2.Types != nil { - i1.Types = map[ast.Expr]types.TypeAndValue{} - } - for expr, tv := range i2.Types { - i1.Types[expr] = tv - } - - // Defs - if i1.Defs == nil && i2.Defs != nil { - i1.Defs = map[*ast.Ident]types.Object{} - } - for ident, obj := range i2.Defs { - i1.Defs[ident] = obj - } - - // Uses - if i1.Uses == nil && i2.Uses != nil { - i1.Uses = map[*ast.Ident]types.Object{} - } - for ident, obj := range i2.Uses { - i1.Uses[ident] = obj - } - - // Implicits - if i1.Implicits == nil && i2.Implicits != nil { - i1.Implicits = map[ast.Node]types.Object{} - } - for n, obj := range i2.Implicits { - i1.Implicits[n] = obj - } - - // Selections - if i1.Selections == nil && i2.Selections != nil { - i1.Selections = map[*ast.SelectorExpr]*types.Selection{} - } - for expr, sel := range i2.Selections { - i1.Selections[expr] = sel - } - - // Scopes - if i1.Scopes == nil && i2.Scopes != nil { - i1.Scopes = map[ast.Node]*types.Scope{} - } - for n, s := range i2.Scopes { - i1.Scopes[n] = s - } - - // InitOrder - i1.InitOrder = append(i1.InitOrder, i2.InitOrder...) -} - -// Under returns the most bottom underlying type. -// Deprecated: (types.Type).Underlying returns same value of it. -func Under(t types.Type) types.Type { - return t.Underlying() -} diff --git a/vendor/github.com/gostaticanalysis/comment/LICENSE b/vendor/github.com/gostaticanalysis/comment/LICENSE deleted file mode 100644 index 4f7eeff5b..000000000 --- a/vendor/github.com/gostaticanalysis/comment/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2018 Takuya Ueda - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/gostaticanalysis/comment/README.md b/vendor/github.com/gostaticanalysis/comment/README.md deleted file mode 100644 index 533555313..000000000 --- a/vendor/github.com/gostaticanalysis/comment/README.md +++ /dev/null @@ -1,10 +0,0 @@ -# gostaticanalysis/comment - -[![godoc.org][godoc-badge]][godoc] - -`comment` provides utilities for [ast.CommentMap](https://golang.org/pkg/go/ast/#CommentMap). - - -[godoc]: https://godoc.org/github.com/gostaticanalysis/comment -[godoc-badge]: https://img.shields.io/badge/godoc-reference-4F73B3.svg?style=flat-square&label=%20godoc.org - diff --git a/vendor/github.com/gostaticanalysis/comment/comment.go b/vendor/github.com/gostaticanalysis/comment/comment.go deleted file mode 100644 index 79cb09382..000000000 --- a/vendor/github.com/gostaticanalysis/comment/comment.go +++ /dev/null @@ -1,152 +0,0 @@ -package comment - -import ( - "go/ast" - "go/token" - "strings" -) - -// Maps is slice of ast.CommentMap. -type Maps []ast.CommentMap - -// New creates new a CommentMap slice from specified files. -func New(fset *token.FileSet, files []*ast.File) Maps { - maps := make(Maps, len(files)) - for i := range files { - maps[i] = ast.NewCommentMap(fset, files[i], files[i].Comments) - } - return maps -} - -// Comments returns correspond a CommentGroup slice to specified AST node. -func (maps Maps) Comments(n ast.Node) []*ast.CommentGroup { - for i := range maps { - if maps[i][n] != nil { - return maps[i][n] - } - } - return nil -} - -// CommentsByPos returns correspond a CommentGroup slice to specified pos. -func (maps Maps) CommentsByPos(pos token.Pos) []*ast.CommentGroup { - for i := range maps { - for n, cgs := range maps[i] { - if n.Pos() == pos { - return cgs - } - } - } - return nil -} - -// Annotated checks either specified AST node is annotated or not. -func (maps Maps) Annotated(n ast.Node, annotation string) bool { - for _, cg := range maps.Comments(n) { - if strings.HasPrefix(strings.TrimSpace(cg.Text()), annotation) { - return true - } - } - return false -} - -// Ignore checks either specified AST node is ignored by the check. -// It follows staticcheck style as the below. -// //lint:ignore Check1[,Check2,...,CheckN] reason -func (maps Maps) Ignore(n ast.Node, check string) bool { - for _, cg := range maps.Comments(n) { - if hasIgnoreCheck(cg, check) { - return true - } - } - return false -} - -// IgnorePos checks either specified postion of AST node is ignored by the check. -// It follows staticcheck style as the below. -// //lint:ignore Check1[,Check2,...,CheckN] reason -func (maps Maps) IgnorePos(pos token.Pos, check string) bool { - for _, cg := range maps.CommentsByPos(pos) { - if hasIgnoreCheck(cg, check) { - return true - } - } - return false -} - -// Deprecated: This function does not work with multiple files. -// CommentsByPosLine can be used instead of CommentsByLine. -// -// CommentsByLine returns correspond a CommentGroup slice to specified line. -func (maps Maps) CommentsByLine(fset *token.FileSet, line int) []*ast.CommentGroup { - for i := range maps { - for n, cgs := range maps[i] { - l := fset.File(n.Pos()).Line(n.Pos()) - if l == line { - return cgs - } - } - } - return nil -} - -// CommentsByPosLine returns correspond a CommentGroup slice to specified line. -func (maps Maps) CommentsByPosLine(fset *token.FileSet, pos token.Pos) []*ast.CommentGroup { - f1 := fset.File(pos) - for i := range maps { - for n, cgs := range maps[i] { - f2 := fset.File(n.Pos()) - if f1 != f2 { - // different file - continue - } - - if f1.Line(pos) == f2.Line(n.Pos()) { - return cgs - } - } - } - return nil -} - -// IgnoreLine checks either specified lineof AST node is ignored by the check. -// It follows staticcheck style as the below. -// //lint:ignore Check1[,Check2,...,CheckN] reason -func (maps Maps) IgnoreLine(fset *token.FileSet, line int, check string) bool { - for _, cg := range maps.CommentsByLine(fset, line) { - if hasIgnoreCheck(cg, check) { - return true - } - } - return false -} - -// hasIgnoreCheck returns true if the provided CommentGroup starts with a comment -// of the form "//lint:ignore Check1[,Check2,...,CheckN] reason" and one of the -// checks matches the provided check. -// -// The *ast.CommentGroup is checked directly rather than using "cg.Text()" because, -// starting in Go 1.15, the "cg.Text()" call no longer returns directive-style -// comments (see https://github.com/golang/go/issues/37974). -func hasIgnoreCheck(cg *ast.CommentGroup, check string) bool { - for _, list := range cg.List { - if !strings.HasPrefix(list.Text, "//") { - continue - } - - s := strings.TrimSpace(list.Text[2:]) // list.Text[2:]: trim "//" - txt := strings.Split(s, " ") - if len(txt) < 3 || txt[0] != "lint:ignore" { - continue - } - - checks := strings.Split(txt[1], ",") // txt[1]: trim "lint:ignore" - for i := range checks { - if check == checks[i] { - return true - } - } - } - - return false -} diff --git a/vendor/github.com/gostaticanalysis/comment/passes/commentmap/commentmap.go b/vendor/github.com/gostaticanalysis/comment/passes/commentmap/commentmap.go deleted file mode 100644 index 1b60a160c..000000000 --- a/vendor/github.com/gostaticanalysis/comment/passes/commentmap/commentmap.go +++ /dev/null @@ -1,21 +0,0 @@ -package commentmap - -import ( - "reflect" - - "golang.org/x/tools/go/analysis" - - "github.com/gostaticanalysis/comment" -) - -var Analyzer = &analysis.Analyzer{ - Name: "commentmap", - Doc: "create comment map", - Run: run, - RunDespiteErrors: true, - ResultType: reflect.TypeOf(comment.Maps{}), -} - -func run(pass *analysis.Pass) (interface{}, error) { - return comment.New(pass.Fset, pass.Files), nil -} diff --git a/vendor/github.com/gostaticanalysis/forcetypeassert/.reviewdog.yml b/vendor/github.com/gostaticanalysis/forcetypeassert/.reviewdog.yml deleted file mode 100644 index 2e243ff73..000000000 --- a/vendor/github.com/gostaticanalysis/forcetypeassert/.reviewdog.yml +++ /dev/null @@ -1,8 +0,0 @@ -runner: - golint: - cmd: golint ./... - errorformat: - - "%f:%l:%c: %m" - level: warning - govet: - cmd: go vet -all . diff --git a/vendor/github.com/gostaticanalysis/forcetypeassert/LICENSE b/vendor/github.com/gostaticanalysis/forcetypeassert/LICENSE deleted file mode 100644 index bf7e33db8..000000000 --- a/vendor/github.com/gostaticanalysis/forcetypeassert/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2019 GoStaticAnalysis - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/gostaticanalysis/forcetypeassert/README.md b/vendor/github.com/gostaticanalysis/forcetypeassert/README.md deleted file mode 100644 index 36a47594e..000000000 --- a/vendor/github.com/gostaticanalysis/forcetypeassert/README.md +++ /dev/null @@ -1,28 +0,0 @@ -# forcetypeassert - -[![godoc.org][godoc-badge]][godoc] - -`forcetypeassert` finds type assertions which did forcely such as below. - -```go -func f() { - var a interface{} - _ = a.(int) // type assertion must be checked -} -``` - -You need to check if the assertion failed like so: -```go -func f() { - var a interface{} - _, ok := a.(int) - if !ok { // type assertion failed - // handle error - } -} -``` - - -[godoc]: https://godoc.org/github.com/gostaticanalysis/forcetypeassert -[godoc-badge]: https://img.shields.io/badge/godoc-reference-4F73B3.svg?style=flat-square&label=%20godoc.org - diff --git a/vendor/github.com/gostaticanalysis/forcetypeassert/forcetypeassert.go b/vendor/github.com/gostaticanalysis/forcetypeassert/forcetypeassert.go deleted file mode 100644 index bb48485d9..000000000 --- a/vendor/github.com/gostaticanalysis/forcetypeassert/forcetypeassert.go +++ /dev/null @@ -1,143 +0,0 @@ -package forcetypeassert - -import ( - "go/ast" - "reflect" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/ast/inspector" -) - -var Analyzer = &analysis.Analyzer{ - Name: "forcetypeassert", - Doc: Doc, - Run: run, - Requires: []*analysis.Analyzer{ - inspect.Analyzer, - }, - ResultType: reflect.TypeOf((*Panicable)(nil)), -} - -// Panicable stores panicable type assertions. -type Panicable struct { - m map[ast.Node]bool - nodes []ast.Node -} - -// Check checks whether the node may occur panic or not. -func (p *Panicable) Check(n ast.Node) bool { - return p.m[n] -} - -// Len is number of panicable nodes. -func (p *Panicable) Len() int { - return len(p.nodes) -} - -// At returns the i-th panicable node. -func (p *Panicable) At(i int) ast.Node { - return p.nodes[i] -} - -const Doc = "forcetypeassert is finds type assertions which did forcely" - -func run(pass *analysis.Pass) (interface{}, error) { - inspect, _ := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - result := &Panicable{m: make(map[ast.Node]bool)} - - nodeFilter := []ast.Node{ - (*ast.AssignStmt)(nil), - (*ast.ValueSpec)(nil), - (*ast.TypeAssertExpr)(nil), - } - - inspect.Nodes(nodeFilter, func(n ast.Node, push bool) bool { - if !push { - return false - } - switch n := n.(type) { - case *ast.AssignStmt: - return checkAssignStmt(pass, result, n) - case *ast.ValueSpec: - return checkValueSpec(pass, result, n) - case *ast.TypeAssertExpr: - if n.Type != nil { - result.m[n] = true - result.nodes = append(result.nodes, n) - pass.Reportf(n.Pos(), "type assertion must be checked") - } - return false - } - - return true - }) - - return result, nil -} - -func checkAssignStmt(pass *analysis.Pass, result *Panicable, n *ast.AssignStmt) bool { - tae := findTypeAssertion(n.Rhs) - if tae == nil { - return true - } - - switch { - // if right hand has 2 or more values, assign statement can't assert boolean value which describes type assertion is succeeded - case len(n.Rhs) > 1: - pass.Reportf(n.Pos(), "right hand must be only type assertion") - return false - case len(n.Lhs) != 2 && tae.Type != nil: - result.m[n] = true - result.nodes = append(result.nodes, n) - pass.Reportf(n.Pos(), "type assertion must be checked") - return false - case len(n.Lhs) == 2: - return false - } - - return true -} - -func checkValueSpec(pass *analysis.Pass, result *Panicable, n *ast.ValueSpec) bool { - tae := findTypeAssertion(n.Values) - if tae == nil { - return true - } - - switch { - // if right hand has 2 or more values, assign statement can't assert boolean value which describes type assertion is succeeded - case len(n.Values) > 1: - pass.Reportf(n.Pos(), "right hand must be only type assertion") - return false - case len(n.Names) != 2 && tae.Type != nil: - result.m[n] = true - result.nodes = append(result.nodes, n) - pass.Reportf(n.Pos(), "type assertion must be checked") - return false - case len(n.Names) == 2: - return false - } - - return true -} - -func findTypeAssertion(exprs []ast.Expr) *ast.TypeAssertExpr { - for _, expr := range exprs { - var typeAssertExpr *ast.TypeAssertExpr - ast.Inspect(expr, func(n ast.Node) bool { - switch n := n.(type) { - case *ast.FuncLit: - return false - case *ast.TypeAssertExpr: - typeAssertExpr = n - return false - } - return true - }) - if typeAssertExpr != nil { - return typeAssertExpr - } - } - return nil -} diff --git a/vendor/github.com/gostaticanalysis/nilerr/LICENSE b/vendor/github.com/gostaticanalysis/nilerr/LICENSE deleted file mode 100644 index bf7e33db8..000000000 --- a/vendor/github.com/gostaticanalysis/nilerr/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2019 GoStaticAnalysis - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/gostaticanalysis/nilerr/README.md b/vendor/github.com/gostaticanalysis/nilerr/README.md deleted file mode 100644 index d6b4acf8b..000000000 --- a/vendor/github.com/gostaticanalysis/nilerr/README.md +++ /dev/null @@ -1,41 +0,0 @@ -# nilerr - -[![pkg.go.dev][gopkg-badge]][gopkg] - -`nilerr` finds code which returns nil even though it checks that error is not nil. - -```go -func f() error { - err := do() - if err != nil { - return nil // miss - } -} -``` - -`nilerr` also finds code which returns error even though it checks that error is nil. - -```go -func f() error { - err := do() - if err == nil { - return err // miss - } -} -``` - -`nilerr` ignores code which has a miss with ignore comment. - -```go -func f() error { - err := do() - if err != nil { - //lint:ignore nilerr reason - return nil // ignore - } -} -``` - - -[gopkg]: https://pkg.go.dev/github.com/gostaticanalysis/nilerr -[gopkg-badge]: https://pkg.go.dev/badge/github.com/gostaticanalysis/nilerr?status.svg diff --git a/vendor/github.com/gostaticanalysis/nilerr/nilerr.go b/vendor/github.com/gostaticanalysis/nilerr/nilerr.go deleted file mode 100644 index 787a9e1e9..000000000 --- a/vendor/github.com/gostaticanalysis/nilerr/nilerr.go +++ /dev/null @@ -1,291 +0,0 @@ -package nilerr - -import ( - "fmt" - "go/token" - "go/types" - - "github.com/gostaticanalysis/comment" - "github.com/gostaticanalysis/comment/passes/commentmap" - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/buildssa" - "golang.org/x/tools/go/ssa" -) - -var Analyzer = &analysis.Analyzer{ - Name: "nilerr", - Doc: Doc, - Run: run, - Requires: []*analysis.Analyzer{ - buildssa.Analyzer, - commentmap.Analyzer, - }, -} - -const Doc = "nilerr checks returning nil when err is not nil" - -func run(pass *analysis.Pass) (interface{}, error) { - funcs := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs - cmaps := pass.ResultOf[commentmap.Analyzer].(comment.Maps) - - reportFail := func(v ssa.Value, ret *ssa.Return, format string) { - pos := ret.Pos() - line := getNodeLineNumber(pass, ret) - errLines := getValueLineNumbers(pass, v) - if !cmaps.IgnoreLine(pass.Fset, line, "nilerr") { - var errLineText string - if len(errLines) == 1 { - errLineText = fmt.Sprintf("line %d", errLines[0]) - } else { - errLineText = fmt.Sprintf("lines %v", errLines) - } - pass.Reportf(pos, format, errLineText) - } - } - - for i := range funcs { - for _, b := range funcs[i].Blocks { - if v := binOpErrNil(b, token.NEQ); v != nil { - if ret := isReturnNil(b.Succs[0]); ret != nil { - if !usesErrorValue(b.Succs[0], v) { - reportFail(v, ret, "error is not nil (%s) but it returns nil") - } - } - } else if v := binOpErrNil(b, token.EQL); v != nil { - if len(b.Succs[0].Preds) == 1 { // if there are multiple conditions, this may be false positive - if ret := isReturnError(b.Succs[0], v); ret != nil { - reportFail(v, ret, "error is nil (%s) but it returns error") - } - } - } - - } - } - - return nil, nil -} - -func getValueLineNumbers(pass *analysis.Pass, v ssa.Value) []int { - if phi, ok := v.(*ssa.Phi); ok { - result := make([]int, 0, len(phi.Edges)) - for _, edge := range phi.Edges { - result = append(result, getValueLineNumbers(pass, edge)...) - } - return result - } - - value := v - if extract, ok := value.(*ssa.Extract); ok { - value = extract.Tuple - } - - pos := value.Pos() - return []int{pass.Fset.File(pos).Line(pos)} -} - -func getNodeLineNumber(pass *analysis.Pass, node ssa.Node) int { - pos := node.Pos() - return pass.Fset.File(pos).Line(pos) -} - -var errType = types.Universe.Lookup("error").Type().Underlying().(*types.Interface) - -func binOpErrNil(b *ssa.BasicBlock, op token.Token) ssa.Value { - if len(b.Instrs) == 0 { - return nil - } - - ifinst, ok := b.Instrs[len(b.Instrs)-1].(*ssa.If) - if !ok { - return nil - } - - binop, ok := ifinst.Cond.(*ssa.BinOp) - if !ok { - return nil - } - - if binop.Op != op { - return nil - } - - if !types.Implements(binop.X.Type(), errType) { - return nil - } - - if !types.Implements(binop.Y.Type(), errType) { - return nil - } - - xIsConst, yIsConst := isConst(binop.X), isConst(binop.Y) - switch { - case !xIsConst && yIsConst: // err != nil or err == nil - return binop.X - case xIsConst && !yIsConst: // nil != err or nil == err - return binop.Y - } - - return nil -} - -func isConst(v ssa.Value) bool { - _, ok := v.(*ssa.Const) - return ok -} - -func isReturnNil(b *ssa.BasicBlock) *ssa.Return { - if len(b.Instrs) == 0 { - return nil - } - - ret, ok := b.Instrs[len(b.Instrs)-1].(*ssa.Return) - if !ok { - return nil - } - - errorReturnValues := 0 - for _, res := range ret.Results { - if !types.Implements(res.Type(), errType) { - continue - } - - errorReturnValues++ - v, ok := res.(*ssa.Const) - if !ok { - return nil - } - - if !v.IsNil() { - return nil - } - } - - if errorReturnValues == 0 { - return nil - } - - return ret -} - -func isReturnError(b *ssa.BasicBlock, errVal ssa.Value) *ssa.Return { - if len(b.Instrs) == 0 { - return nil - } - - ret, ok := b.Instrs[len(b.Instrs)-1].(*ssa.Return) - if !ok { - return nil - } - - for _, v := range ret.Results { - if v == errVal { - return ret - } - } - - return nil -} - -func usesErrorValue(b *ssa.BasicBlock, errVal ssa.Value) bool { - for _, instr := range b.Instrs { - if callInstr, ok := instr.(*ssa.Call); ok { - for _, arg := range callInstr.Call.Args { - if isUsedInValue(arg, errVal) { - return true - } - - sliceArg, ok := arg.(*ssa.Slice) - if ok { - if isUsedInSlice(sliceArg, errVal) { - return true - } - } - } - } - } - return false -} - -type ReferrersHolder interface { - Referrers() *[]ssa.Instruction -} - -var _ ReferrersHolder = (ssa.Node)(nil) -var _ ReferrersHolder = (ssa.Value)(nil) - -func isUsedInSlice(sliceArg *ssa.Slice, errVal ssa.Value) bool { - var valueBuf [10]*ssa.Value - operands := sliceArg.Operands(valueBuf[:0]) - - var valuesToInspect []ssa.Value - addValueForInspection := func(value ssa.Value) { - if value != nil { - valuesToInspect = append(valuesToInspect, value) - } - } - - var nodesToInspect []ssa.Node - visitedNodes := map[ssa.Node]bool{} - addNodeForInspection := func(node ssa.Node) { - if !visitedNodes[node] { - visitedNodes[node] = true - nodesToInspect = append(nodesToInspect, node) - } - } - addReferrersForInspection := func(h ReferrersHolder) { - if h == nil { - return - } - - referrers := h.Referrers() - if referrers == nil { - return - } - - for _, r := range *referrers { - if node, ok := r.(ssa.Node); ok { - addNodeForInspection(node) - } - } - } - - for _, operand := range operands { - addReferrersForInspection(*operand) - addValueForInspection(*operand) - } - - for i := 0; i < len(nodesToInspect); i++ { - switch node := nodesToInspect[i].(type) { - case *ssa.IndexAddr: - addReferrersForInspection(node) - case *ssa.Store: - addValueForInspection(node.Val) - } - } - - for _, value := range valuesToInspect { - if isUsedInValue(value, errVal) { - return true - } - } - return false -} - -func isUsedInValue(value, lookedFor ssa.Value) bool { - if value == lookedFor { - return true - } - - switch value := value.(type) { - case *ssa.ChangeInterface: - return isUsedInValue(value.X, lookedFor) - case *ssa.MakeInterface: - return isUsedInValue(value.X, lookedFor) - case *ssa.Call: - if value.Call.IsInvoke() { - return isUsedInValue(value.Call.Value, lookedFor) - } - } - - return false -} diff --git a/vendor/github.com/hashicorp/go-version/CHANGELOG.md b/vendor/github.com/hashicorp/go-version/CHANGELOG.md deleted file mode 100644 index 5f16dd140..000000000 --- a/vendor/github.com/hashicorp/go-version/CHANGELOG.md +++ /dev/null @@ -1,45 +0,0 @@ -# 1.6.0 (June 28, 2022) - -FEATURES: - -- Add `Prerelease` function to `Constraint` to return true if the version includes a prerelease field ([#100](https://github.com/hashicorp/go-version/pull/100)) - -# 1.5.0 (May 18, 2022) - -FEATURES: - -- Use `encoding` `TextMarshaler` & `TextUnmarshaler` instead of JSON equivalents ([#95](https://github.com/hashicorp/go-version/pull/95)) -- Add JSON handlers to allow parsing from/to JSON ([#93](https://github.com/hashicorp/go-version/pull/93)) - -# 1.4.0 (January 5, 2022) - -FEATURES: - - - Introduce `MustConstraints()` ([#87](https://github.com/hashicorp/go-version/pull/87)) - - `Constraints`: Introduce `Equals()` and `sort.Interface` methods ([#88](https://github.com/hashicorp/go-version/pull/88)) - -# 1.3.0 (March 31, 2021) - -Please note that CHANGELOG.md does not exist in the source code prior to this release. - -FEATURES: - - Add `Core` function to return a version without prerelease or metadata ([#85](https://github.com/hashicorp/go-version/pull/85)) - -# 1.2.1 (June 17, 2020) - -BUG FIXES: - - Prevent `Version.Equal` method from panicking on `nil` encounter ([#73](https://github.com/hashicorp/go-version/pull/73)) - -# 1.2.0 (April 23, 2019) - -FEATURES: - - Add `GreaterThanOrEqual` and `LessThanOrEqual` helper methods ([#53](https://github.com/hashicorp/go-version/pull/53)) - -# 1.1.0 (Jan 07, 2019) - -FEATURES: - - Add `NewSemver` constructor ([#45](https://github.com/hashicorp/go-version/pull/45)) - -# 1.0.0 (August 24, 2018) - -Initial release. diff --git a/vendor/github.com/hashicorp/go-version/LICENSE b/vendor/github.com/hashicorp/go-version/LICENSE deleted file mode 100644 index c33dcc7c9..000000000 --- a/vendor/github.com/hashicorp/go-version/LICENSE +++ /dev/null @@ -1,354 +0,0 @@ -Mozilla Public License, version 2.0 - -1. Definitions - -1.1. “Contributor” - - means each individual or legal entity that creates, contributes to the - creation of, or owns Covered Software. - -1.2. “Contributor Version” - - means the combination of the Contributions of others (if any) used by a - Contributor and that particular Contributor’s Contribution. - -1.3. “Contribution” - - means Covered Software of a particular Contributor. - -1.4. “Covered Software” - - means Source Code Form to which the initial Contributor has attached the - notice in Exhibit A, the Executable Form of such Source Code Form, and - Modifications of such Source Code Form, in each case including portions - thereof. - -1.5. “Incompatible With Secondary Licenses” - means - - a. that the initial Contributor has attached the notice described in - Exhibit B to the Covered Software; or - - b. that the Covered Software was made available under the terms of version - 1.1 or earlier of the License, but not also under the terms of a - Secondary License. - -1.6. “Executable Form” - - means any form of the work other than Source Code Form. - -1.7. “Larger Work” - - means a work that combines Covered Software with other material, in a separate - file or files, that is not Covered Software. - -1.8. “License” - - means this document. - -1.9. “Licensable” - - means having the right to grant, to the maximum extent possible, whether at the - time of the initial grant or subsequently, any and all of the rights conveyed by - this License. - -1.10. “Modifications” - - means any of the following: - - a. any file in Source Code Form that results from an addition to, deletion - from, or modification of the contents of Covered Software; or - - b. any new file in Source Code Form that contains any Covered Software. - -1.11. “Patent Claims” of a Contributor - - means any patent claim(s), including without limitation, method, process, - and apparatus claims, in any patent Licensable by such Contributor that - would be infringed, but for the grant of the License, by the making, - using, selling, offering for sale, having made, import, or transfer of - either its Contributions or its Contributor Version. - -1.12. “Secondary License” - - means either the GNU General Public License, Version 2.0, the GNU Lesser - General Public License, Version 2.1, the GNU Affero General Public - License, Version 3.0, or any later versions of those licenses. - -1.13. “Source Code Form” - - means the form of the work preferred for making modifications. - -1.14. “You” (or “Your”) - - means an individual or a legal entity exercising rights under this - License. For legal entities, “You” includes any entity that controls, is - controlled by, or is under common control with You. For purposes of this - definition, “control” means (a) the power, direct or indirect, to cause - the direction or management of such entity, whether by contract or - otherwise, or (b) ownership of more than fifty percent (50%) of the - outstanding shares or beneficial ownership of such entity. - - -2. License Grants and Conditions - -2.1. Grants - - Each Contributor hereby grants You a world-wide, royalty-free, - non-exclusive license: - - a. under intellectual property rights (other than patent or trademark) - Licensable by such Contributor to use, reproduce, make available, - modify, display, perform, distribute, and otherwise exploit its - Contributions, either on an unmodified basis, with Modifications, or as - part of a Larger Work; and - - b. under Patent Claims of such Contributor to make, use, sell, offer for - sale, have made, import, and otherwise transfer either its Contributions - or its Contributor Version. - -2.2. Effective Date - - The licenses granted in Section 2.1 with respect to any Contribution become - effective for each Contribution on the date the Contributor first distributes - such Contribution. - -2.3. Limitations on Grant Scope - - The licenses granted in this Section 2 are the only rights granted under this - License. No additional rights or licenses will be implied from the distribution - or licensing of Covered Software under this License. Notwithstanding Section - 2.1(b) above, no patent license is granted by a Contributor: - - a. for any code that a Contributor has removed from Covered Software; or - - b. for infringements caused by: (i) Your and any other third party’s - modifications of Covered Software, or (ii) the combination of its - Contributions with other software (except as part of its Contributor - Version); or - - c. under Patent Claims infringed by Covered Software in the absence of its - Contributions. - - This License does not grant any rights in the trademarks, service marks, or - logos of any Contributor (except as may be necessary to comply with the - notice requirements in Section 3.4). - -2.4. Subsequent Licenses - - No Contributor makes additional grants as a result of Your choice to - distribute the Covered Software under a subsequent version of this License - (see Section 10.2) or under the terms of a Secondary License (if permitted - under the terms of Section 3.3). - -2.5. Representation - - Each Contributor represents that the Contributor believes its Contributions - are its original creation(s) or it has sufficient rights to grant the - rights to its Contributions conveyed by this License. - -2.6. Fair Use - - This License is not intended to limit any rights You have under applicable - copyright doctrines of fair use, fair dealing, or other equivalents. - -2.7. Conditions - - Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in - Section 2.1. - - -3. Responsibilities - -3.1. Distribution of Source Form - - All distribution of Covered Software in Source Code Form, including any - Modifications that You create or to which You contribute, must be under the - terms of this License. You must inform recipients that the Source Code Form - of the Covered Software is governed by the terms of this License, and how - they can obtain a copy of this License. You may not attempt to alter or - restrict the recipients’ rights in the Source Code Form. - -3.2. Distribution of Executable Form - - If You distribute Covered Software in Executable Form then: - - a. such Covered Software must also be made available in Source Code Form, - as described in Section 3.1, and You must inform recipients of the - Executable Form how they can obtain a copy of such Source Code Form by - reasonable means in a timely manner, at a charge no more than the cost - of distribution to the recipient; and - - b. You may distribute such Executable Form under the terms of this License, - or sublicense it under different terms, provided that the license for - the Executable Form does not attempt to limit or alter the recipients’ - rights in the Source Code Form under this License. - -3.3. Distribution of a Larger Work - - You may create and distribute a Larger Work under terms of Your choice, - provided that You also comply with the requirements of this License for the - Covered Software. If the Larger Work is a combination of Covered Software - with a work governed by one or more Secondary Licenses, and the Covered - Software is not Incompatible With Secondary Licenses, this License permits - You to additionally distribute such Covered Software under the terms of - such Secondary License(s), so that the recipient of the Larger Work may, at - their option, further distribute the Covered Software under the terms of - either this License or such Secondary License(s). - -3.4. Notices - - You may not remove or alter the substance of any license notices (including - copyright notices, patent notices, disclaimers of warranty, or limitations - of liability) contained within the Source Code Form of the Covered - Software, except that You may alter any license notices to the extent - required to remedy known factual inaccuracies. - -3.5. Application of Additional Terms - - You may choose to offer, and to charge a fee for, warranty, support, - indemnity or liability obligations to one or more recipients of Covered - Software. However, You may do so only on Your own behalf, and not on behalf - of any Contributor. You must make it absolutely clear that any such - warranty, support, indemnity, or liability obligation is offered by You - alone, and You hereby agree to indemnify every Contributor for any - liability incurred by such Contributor as a result of warranty, support, - indemnity or liability terms You offer. You may include additional - disclaimers of warranty and limitations of liability specific to any - jurisdiction. - -4. Inability to Comply Due to Statute or Regulation - - If it is impossible for You to comply with any of the terms of this License - with respect to some or all of the Covered Software due to statute, judicial - order, or regulation then You must: (a) comply with the terms of this License - to the maximum extent possible; and (b) describe the limitations and the code - they affect. Such description must be placed in a text file included with all - distributions of the Covered Software under this License. Except to the - extent prohibited by statute or regulation, such description must be - sufficiently detailed for a recipient of ordinary skill to be able to - understand it. - -5. Termination - -5.1. The rights granted under this License will terminate automatically if You - fail to comply with any of its terms. However, if You become compliant, - then the rights granted under this License from a particular Contributor - are reinstated (a) provisionally, unless and until such Contributor - explicitly and finally terminates Your grants, and (b) on an ongoing basis, - if such Contributor fails to notify You of the non-compliance by some - reasonable means prior to 60 days after You have come back into compliance. - Moreover, Your grants from a particular Contributor are reinstated on an - ongoing basis if such Contributor notifies You of the non-compliance by - some reasonable means, this is the first time You have received notice of - non-compliance with this License from such Contributor, and You become - compliant prior to 30 days after Your receipt of the notice. - -5.2. If You initiate litigation against any entity by asserting a patent - infringement claim (excluding declaratory judgment actions, counter-claims, - and cross-claims) alleging that a Contributor Version directly or - indirectly infringes any patent, then the rights granted to You by any and - all Contributors for the Covered Software under Section 2.1 of this License - shall terminate. - -5.3. In the event of termination under Sections 5.1 or 5.2 above, all end user - license agreements (excluding distributors and resellers) which have been - validly granted by You or Your distributors under this License prior to - termination shall survive termination. - -6. Disclaimer of Warranty - - Covered Software is provided under this License on an “as is” basis, without - warranty of any kind, either expressed, implied, or statutory, including, - without limitation, warranties that the Covered Software is free of defects, - merchantable, fit for a particular purpose or non-infringing. The entire - risk as to the quality and performance of the Covered Software is with You. - Should any Covered Software prove defective in any respect, You (not any - Contributor) assume the cost of any necessary servicing, repair, or - correction. This disclaimer of warranty constitutes an essential part of this - License. No use of any Covered Software is authorized under this License - except under this disclaimer. - -7. Limitation of Liability - - Under no circumstances and under no legal theory, whether tort (including - negligence), contract, or otherwise, shall any Contributor, or anyone who - distributes Covered Software as permitted above, be liable to You for any - direct, indirect, special, incidental, or consequential damages of any - character including, without limitation, damages for lost profits, loss of - goodwill, work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses, even if such party shall have been - informed of the possibility of such damages. This limitation of liability - shall not apply to liability for death or personal injury resulting from such - party’s negligence to the extent applicable law prohibits such limitation. - Some jurisdictions do not allow the exclusion or limitation of incidental or - consequential damages, so this exclusion and limitation may not apply to You. - -8. Litigation - - Any litigation relating to this License may be brought only in the courts of - a jurisdiction where the defendant maintains its principal place of business - and such litigation shall be governed by laws of that jurisdiction, without - reference to its conflict-of-law provisions. Nothing in this Section shall - prevent a party’s ability to bring cross-claims or counter-claims. - -9. Miscellaneous - - This License represents the complete agreement concerning the subject matter - hereof. If any provision of this License is held to be unenforceable, such - provision shall be reformed only to the extent necessary to make it - enforceable. Any law or regulation which provides that the language of a - contract shall be construed against the drafter shall not be used to construe - this License against a Contributor. - - -10. Versions of the License - -10.1. New Versions - - Mozilla Foundation is the license steward. Except as provided in Section - 10.3, no one other than the license steward has the right to modify or - publish new versions of this License. Each version will be given a - distinguishing version number. - -10.2. Effect of New Versions - - You may distribute the Covered Software under the terms of the version of - the License under which You originally received the Covered Software, or - under the terms of any subsequent version published by the license - steward. - -10.3. Modified Versions - - If you create software not governed by this License, and you want to - create a new license for such software, you may create and use a modified - version of this License if you rename the license and remove any - references to the name of the license steward (except to note that such - modified license differs from this License). - -10.4. Distributing Source Code Form that is Incompatible With Secondary Licenses - If You choose to distribute Source Code Form that is Incompatible With - Secondary Licenses under the terms of this version of the License, the - notice described in Exhibit B of this License must be attached. - -Exhibit A - Source Code Form License Notice - - This Source Code Form is subject to the - terms of the Mozilla Public License, v. - 2.0. If a copy of the MPL was not - distributed with this file, You can - obtain one at - http://mozilla.org/MPL/2.0/. - -If it is not possible or desirable to put the notice in a particular file, then -You may include the notice in a location (such as a LICENSE file in a relevant -directory) where a recipient would be likely to look for such a notice. - -You may add additional accurate notices of copyright ownership. - -Exhibit B - “Incompatible With Secondary Licenses” Notice - - This Source Code Form is “Incompatible - With Secondary Licenses”, as defined by - the Mozilla Public License, v. 2.0. - diff --git a/vendor/github.com/hashicorp/go-version/README.md b/vendor/github.com/hashicorp/go-version/README.md deleted file mode 100644 index 4d2505090..000000000 --- a/vendor/github.com/hashicorp/go-version/README.md +++ /dev/null @@ -1,66 +0,0 @@ -# Versioning Library for Go -[![Build Status](https://circleci.com/gh/hashicorp/go-version/tree/main.svg?style=svg)](https://circleci.com/gh/hashicorp/go-version/tree/main) -[![GoDoc](https://godoc.org/github.com/hashicorp/go-version?status.svg)](https://godoc.org/github.com/hashicorp/go-version) - -go-version is a library for parsing versions and version constraints, -and verifying versions against a set of constraints. go-version -can sort a collection of versions properly, handles prerelease/beta -versions, can increment versions, etc. - -Versions used with go-version must follow [SemVer](http://semver.org/). - -## Installation and Usage - -Package documentation can be found on -[GoDoc](http://godoc.org/github.com/hashicorp/go-version). - -Installation can be done with a normal `go get`: - -``` -$ go get github.com/hashicorp/go-version -``` - -#### Version Parsing and Comparison - -```go -v1, err := version.NewVersion("1.2") -v2, err := version.NewVersion("1.5+metadata") - -// Comparison example. There is also GreaterThan, Equal, and just -// a simple Compare that returns an int allowing easy >=, <=, etc. -if v1.LessThan(v2) { - fmt.Printf("%s is less than %s", v1, v2) -} -``` - -#### Version Constraints - -```go -v1, err := version.NewVersion("1.2") - -// Constraints example. -constraints, err := version.NewConstraint(">= 1.0, < 1.4") -if constraints.Check(v1) { - fmt.Printf("%s satisfies constraints %s", v1, constraints) -} -``` - -#### Version Sorting - -```go -versionsRaw := []string{"1.1", "0.7.1", "1.4-beta", "1.4", "2"} -versions := make([]*version.Version, len(versionsRaw)) -for i, raw := range versionsRaw { - v, _ := version.NewVersion(raw) - versions[i] = v -} - -// After this, the versions are properly sorted -sort.Sort(version.Collection(versions)) -``` - -## Issues and Contributing - -If you find an issue with this library, please report an issue. If you'd -like, we welcome any contributions. Fork this library and submit a pull -request. diff --git a/vendor/github.com/hashicorp/go-version/constraint.go b/vendor/github.com/hashicorp/go-version/constraint.go deleted file mode 100644 index da5d1aca1..000000000 --- a/vendor/github.com/hashicorp/go-version/constraint.go +++ /dev/null @@ -1,296 +0,0 @@ -package version - -import ( - "fmt" - "reflect" - "regexp" - "sort" - "strings" -) - -// Constraint represents a single constraint for a version, such as -// ">= 1.0". -type Constraint struct { - f constraintFunc - op operator - check *Version - original string -} - -func (c *Constraint) Equals(con *Constraint) bool { - return c.op == con.op && c.check.Equal(con.check) -} - -// Constraints is a slice of constraints. We make a custom type so that -// we can add methods to it. -type Constraints []*Constraint - -type constraintFunc func(v, c *Version) bool - -var constraintOperators map[string]constraintOperation - -type constraintOperation struct { - op operator - f constraintFunc -} - -var constraintRegexp *regexp.Regexp - -func init() { - constraintOperators = map[string]constraintOperation{ - "": {op: equal, f: constraintEqual}, - "=": {op: equal, f: constraintEqual}, - "!=": {op: notEqual, f: constraintNotEqual}, - ">": {op: greaterThan, f: constraintGreaterThan}, - "<": {op: lessThan, f: constraintLessThan}, - ">=": {op: greaterThanEqual, f: constraintGreaterThanEqual}, - "<=": {op: lessThanEqual, f: constraintLessThanEqual}, - "~>": {op: pessimistic, f: constraintPessimistic}, - } - - ops := make([]string, 0, len(constraintOperators)) - for k := range constraintOperators { - ops = append(ops, regexp.QuoteMeta(k)) - } - - constraintRegexp = regexp.MustCompile(fmt.Sprintf( - `^\s*(%s)\s*(%s)\s*$`, - strings.Join(ops, "|"), - VersionRegexpRaw)) -} - -// NewConstraint will parse one or more constraints from the given -// constraint string. The string must be a comma-separated list of -// constraints. -func NewConstraint(v string) (Constraints, error) { - vs := strings.Split(v, ",") - result := make([]*Constraint, len(vs)) - for i, single := range vs { - c, err := parseSingle(single) - if err != nil { - return nil, err - } - - result[i] = c - } - - return Constraints(result), nil -} - -// MustConstraints is a helper that wraps a call to a function -// returning (Constraints, error) and panics if error is non-nil. -func MustConstraints(c Constraints, err error) Constraints { - if err != nil { - panic(err) - } - - return c -} - -// Check tests if a version satisfies all the constraints. -func (cs Constraints) Check(v *Version) bool { - for _, c := range cs { - if !c.Check(v) { - return false - } - } - - return true -} - -// Equals compares Constraints with other Constraints -// for equality. This may not represent logical equivalence -// of compared constraints. -// e.g. even though '>0.1,>0.2' is logically equivalent -// to '>0.2' it is *NOT* treated as equal. -// -// Missing operator is treated as equal to '=', whitespaces -// are ignored and constraints are sorted before comaparison. -func (cs Constraints) Equals(c Constraints) bool { - if len(cs) != len(c) { - return false - } - - // make copies to retain order of the original slices - left := make(Constraints, len(cs)) - copy(left, cs) - sort.Stable(left) - right := make(Constraints, len(c)) - copy(right, c) - sort.Stable(right) - - // compare sorted slices - for i, con := range left { - if !con.Equals(right[i]) { - return false - } - } - - return true -} - -func (cs Constraints) Len() int { - return len(cs) -} - -func (cs Constraints) Less(i, j int) bool { - if cs[i].op < cs[j].op { - return true - } - if cs[i].op > cs[j].op { - return false - } - - return cs[i].check.LessThan(cs[j].check) -} - -func (cs Constraints) Swap(i, j int) { - cs[i], cs[j] = cs[j], cs[i] -} - -// Returns the string format of the constraints -func (cs Constraints) String() string { - csStr := make([]string, len(cs)) - for i, c := range cs { - csStr[i] = c.String() - } - - return strings.Join(csStr, ",") -} - -// Check tests if a constraint is validated by the given version. -func (c *Constraint) Check(v *Version) bool { - return c.f(v, c.check) -} - -// Prerelease returns true if the version underlying this constraint -// contains a prerelease field. -func (c *Constraint) Prerelease() bool { - return len(c.check.Prerelease()) > 0 -} - -func (c *Constraint) String() string { - return c.original -} - -func parseSingle(v string) (*Constraint, error) { - matches := constraintRegexp.FindStringSubmatch(v) - if matches == nil { - return nil, fmt.Errorf("Malformed constraint: %s", v) - } - - check, err := NewVersion(matches[2]) - if err != nil { - return nil, err - } - - cop := constraintOperators[matches[1]] - - return &Constraint{ - f: cop.f, - op: cop.op, - check: check, - original: v, - }, nil -} - -func prereleaseCheck(v, c *Version) bool { - switch vPre, cPre := v.Prerelease() != "", c.Prerelease() != ""; { - case cPre && vPre: - // A constraint with a pre-release can only match a pre-release version - // with the same base segments. - return reflect.DeepEqual(c.Segments64(), v.Segments64()) - - case !cPre && vPre: - // A constraint without a pre-release can only match a version without a - // pre-release. - return false - - case cPre && !vPre: - // OK, except with the pessimistic operator - case !cPre && !vPre: - // OK - } - return true -} - -//------------------------------------------------------------------- -// Constraint functions -//------------------------------------------------------------------- - -type operator rune - -const ( - equal operator = '=' - notEqual operator = '≠' - greaterThan operator = '>' - lessThan operator = '<' - greaterThanEqual operator = '≥' - lessThanEqual operator = '≤' - pessimistic operator = '~' -) - -func constraintEqual(v, c *Version) bool { - return v.Equal(c) -} - -func constraintNotEqual(v, c *Version) bool { - return !v.Equal(c) -} - -func constraintGreaterThan(v, c *Version) bool { - return prereleaseCheck(v, c) && v.Compare(c) == 1 -} - -func constraintLessThan(v, c *Version) bool { - return prereleaseCheck(v, c) && v.Compare(c) == -1 -} - -func constraintGreaterThanEqual(v, c *Version) bool { - return prereleaseCheck(v, c) && v.Compare(c) >= 0 -} - -func constraintLessThanEqual(v, c *Version) bool { - return prereleaseCheck(v, c) && v.Compare(c) <= 0 -} - -func constraintPessimistic(v, c *Version) bool { - // Using a pessimistic constraint with a pre-release, restricts versions to pre-releases - if !prereleaseCheck(v, c) || (c.Prerelease() != "" && v.Prerelease() == "") { - return false - } - - // If the version being checked is naturally less than the constraint, then there - // is no way for the version to be valid against the constraint - if v.LessThan(c) { - return false - } - // We'll use this more than once, so grab the length now so it's a little cleaner - // to write the later checks - cs := len(c.segments) - - // If the version being checked has less specificity than the constraint, then there - // is no way for the version to be valid against the constraint - if cs > len(v.segments) { - return false - } - - // Check the segments in the constraint against those in the version. If the version - // being checked, at any point, does not have the same values in each index of the - // constraints segments, then it cannot be valid against the constraint. - for i := 0; i < c.si-1; i++ { - if v.segments[i] != c.segments[i] { - return false - } - } - - // Check the last part of the segment in the constraint. If the version segment at - // this index is less than the constraints segment at this index, then it cannot - // be valid against the constraint - if c.segments[cs-1] > v.segments[cs-1] { - return false - } - - // If nothing has rejected the version by now, it's valid - return true -} diff --git a/vendor/github.com/hashicorp/go-version/version.go b/vendor/github.com/hashicorp/go-version/version.go deleted file mode 100644 index e87df6990..000000000 --- a/vendor/github.com/hashicorp/go-version/version.go +++ /dev/null @@ -1,407 +0,0 @@ -package version - -import ( - "bytes" - "fmt" - "reflect" - "regexp" - "strconv" - "strings" -) - -// The compiled regular expression used to test the validity of a version. -var ( - versionRegexp *regexp.Regexp - semverRegexp *regexp.Regexp -) - -// The raw regular expression string used for testing the validity -// of a version. -const ( - VersionRegexpRaw string = `v?([0-9]+(\.[0-9]+)*?)` + - `(-([0-9]+[0-9A-Za-z\-~]*(\.[0-9A-Za-z\-~]+)*)|(-?([A-Za-z\-~]+[0-9A-Za-z\-~]*(\.[0-9A-Za-z\-~]+)*)))?` + - `(\+([0-9A-Za-z\-~]+(\.[0-9A-Za-z\-~]+)*))?` + - `?` - - // SemverRegexpRaw requires a separator between version and prerelease - SemverRegexpRaw string = `v?([0-9]+(\.[0-9]+)*?)` + - `(-([0-9]+[0-9A-Za-z\-~]*(\.[0-9A-Za-z\-~]+)*)|(-([A-Za-z\-~]+[0-9A-Za-z\-~]*(\.[0-9A-Za-z\-~]+)*)))?` + - `(\+([0-9A-Za-z\-~]+(\.[0-9A-Za-z\-~]+)*))?` + - `?` -) - -// Version represents a single version. -type Version struct { - metadata string - pre string - segments []int64 - si int - original string -} - -func init() { - versionRegexp = regexp.MustCompile("^" + VersionRegexpRaw + "$") - semverRegexp = regexp.MustCompile("^" + SemverRegexpRaw + "$") -} - -// NewVersion parses the given version and returns a new -// Version. -func NewVersion(v string) (*Version, error) { - return newVersion(v, versionRegexp) -} - -// NewSemver parses the given version and returns a new -// Version that adheres strictly to SemVer specs -// https://semver.org/ -func NewSemver(v string) (*Version, error) { - return newVersion(v, semverRegexp) -} - -func newVersion(v string, pattern *regexp.Regexp) (*Version, error) { - matches := pattern.FindStringSubmatch(v) - if matches == nil { - return nil, fmt.Errorf("Malformed version: %s", v) - } - segmentsStr := strings.Split(matches[1], ".") - segments := make([]int64, len(segmentsStr)) - for i, str := range segmentsStr { - val, err := strconv.ParseInt(str, 10, 64) - if err != nil { - return nil, fmt.Errorf( - "Error parsing version: %s", err) - } - - segments[i] = val - } - - // Even though we could support more than three segments, if we - // got less than three, pad it with 0s. This is to cover the basic - // default usecase of semver, which is MAJOR.MINOR.PATCH at the minimum - for i := len(segments); i < 3; i++ { - segments = append(segments, 0) - } - - pre := matches[7] - if pre == "" { - pre = matches[4] - } - - return &Version{ - metadata: matches[10], - pre: pre, - segments: segments, - si: len(segmentsStr), - original: v, - }, nil -} - -// Must is a helper that wraps a call to a function returning (*Version, error) -// and panics if error is non-nil. -func Must(v *Version, err error) *Version { - if err != nil { - panic(err) - } - - return v -} - -// Compare compares this version to another version. This -// returns -1, 0, or 1 if this version is smaller, equal, -// or larger than the other version, respectively. -// -// If you want boolean results, use the LessThan, Equal, -// GreaterThan, GreaterThanOrEqual or LessThanOrEqual methods. -func (v *Version) Compare(other *Version) int { - // A quick, efficient equality check - if v.String() == other.String() { - return 0 - } - - segmentsSelf := v.Segments64() - segmentsOther := other.Segments64() - - // If the segments are the same, we must compare on prerelease info - if reflect.DeepEqual(segmentsSelf, segmentsOther) { - preSelf := v.Prerelease() - preOther := other.Prerelease() - if preSelf == "" && preOther == "" { - return 0 - } - if preSelf == "" { - return 1 - } - if preOther == "" { - return -1 - } - - return comparePrereleases(preSelf, preOther) - } - - // Get the highest specificity (hS), or if they're equal, just use segmentSelf length - lenSelf := len(segmentsSelf) - lenOther := len(segmentsOther) - hS := lenSelf - if lenSelf < lenOther { - hS = lenOther - } - // Compare the segments - // Because a constraint could have more/less specificity than the version it's - // checking, we need to account for a lopsided or jagged comparison - for i := 0; i < hS; i++ { - if i > lenSelf-1 { - // This means Self had the lower specificity - // Check to see if the remaining segments in Other are all zeros - if !allZero(segmentsOther[i:]) { - // if not, it means that Other has to be greater than Self - return -1 - } - break - } else if i > lenOther-1 { - // this means Other had the lower specificity - // Check to see if the remaining segments in Self are all zeros - - if !allZero(segmentsSelf[i:]) { - //if not, it means that Self has to be greater than Other - return 1 - } - break - } - lhs := segmentsSelf[i] - rhs := segmentsOther[i] - if lhs == rhs { - continue - } else if lhs < rhs { - return -1 - } - // Otherwis, rhs was > lhs, they're not equal - return 1 - } - - // if we got this far, they're equal - return 0 -} - -func allZero(segs []int64) bool { - for _, s := range segs { - if s != 0 { - return false - } - } - return true -} - -func comparePart(preSelf string, preOther string) int { - if preSelf == preOther { - return 0 - } - - var selfInt int64 - selfNumeric := true - selfInt, err := strconv.ParseInt(preSelf, 10, 64) - if err != nil { - selfNumeric = false - } - - var otherInt int64 - otherNumeric := true - otherInt, err = strconv.ParseInt(preOther, 10, 64) - if err != nil { - otherNumeric = false - } - - // if a part is empty, we use the other to decide - if preSelf == "" { - if otherNumeric { - return -1 - } - return 1 - } - - if preOther == "" { - if selfNumeric { - return 1 - } - return -1 - } - - if selfNumeric && !otherNumeric { - return -1 - } else if !selfNumeric && otherNumeric { - return 1 - } else if !selfNumeric && !otherNumeric && preSelf > preOther { - return 1 - } else if selfInt > otherInt { - return 1 - } - - return -1 -} - -func comparePrereleases(v string, other string) int { - // the same pre release! - if v == other { - return 0 - } - - // split both pre releases for analyse their parts - selfPreReleaseMeta := strings.Split(v, ".") - otherPreReleaseMeta := strings.Split(other, ".") - - selfPreReleaseLen := len(selfPreReleaseMeta) - otherPreReleaseLen := len(otherPreReleaseMeta) - - biggestLen := otherPreReleaseLen - if selfPreReleaseLen > otherPreReleaseLen { - biggestLen = selfPreReleaseLen - } - - // loop for parts to find the first difference - for i := 0; i < biggestLen; i = i + 1 { - partSelfPre := "" - if i < selfPreReleaseLen { - partSelfPre = selfPreReleaseMeta[i] - } - - partOtherPre := "" - if i < otherPreReleaseLen { - partOtherPre = otherPreReleaseMeta[i] - } - - compare := comparePart(partSelfPre, partOtherPre) - // if parts are equals, continue the loop - if compare != 0 { - return compare - } - } - - return 0 -} - -// Core returns a new version constructed from only the MAJOR.MINOR.PATCH -// segments of the version, without prerelease or metadata. -func (v *Version) Core() *Version { - segments := v.Segments64() - segmentsOnly := fmt.Sprintf("%d.%d.%d", segments[0], segments[1], segments[2]) - return Must(NewVersion(segmentsOnly)) -} - -// Equal tests if two versions are equal. -func (v *Version) Equal(o *Version) bool { - if v == nil || o == nil { - return v == o - } - - return v.Compare(o) == 0 -} - -// GreaterThan tests if this version is greater than another version. -func (v *Version) GreaterThan(o *Version) bool { - return v.Compare(o) > 0 -} - -// GreaterThanOrEqual tests if this version is greater than or equal to another version. -func (v *Version) GreaterThanOrEqual(o *Version) bool { - return v.Compare(o) >= 0 -} - -// LessThan tests if this version is less than another version. -func (v *Version) LessThan(o *Version) bool { - return v.Compare(o) < 0 -} - -// LessThanOrEqual tests if this version is less than or equal to another version. -func (v *Version) LessThanOrEqual(o *Version) bool { - return v.Compare(o) <= 0 -} - -// Metadata returns any metadata that was part of the version -// string. -// -// Metadata is anything that comes after the "+" in the version. -// For example, with "1.2.3+beta", the metadata is "beta". -func (v *Version) Metadata() string { - return v.metadata -} - -// Prerelease returns any prerelease data that is part of the version, -// or blank if there is no prerelease data. -// -// Prerelease information is anything that comes after the "-" in the -// version (but before any metadata). For example, with "1.2.3-beta", -// the prerelease information is "beta". -func (v *Version) Prerelease() string { - return v.pre -} - -// Segments returns the numeric segments of the version as a slice of ints. -// -// This excludes any metadata or pre-release information. For example, -// for a version "1.2.3-beta", segments will return a slice of -// 1, 2, 3. -func (v *Version) Segments() []int { - segmentSlice := make([]int, len(v.segments)) - for i, v := range v.segments { - segmentSlice[i] = int(v) - } - return segmentSlice -} - -// Segments64 returns the numeric segments of the version as a slice of int64s. -// -// This excludes any metadata or pre-release information. For example, -// for a version "1.2.3-beta", segments will return a slice of -// 1, 2, 3. -func (v *Version) Segments64() []int64 { - result := make([]int64, len(v.segments)) - copy(result, v.segments) - return result -} - -// String returns the full version string included pre-release -// and metadata information. -// -// This value is rebuilt according to the parsed segments and other -// information. Therefore, ambiguities in the version string such as -// prefixed zeroes (1.04.0 => 1.4.0), `v` prefix (v1.0.0 => 1.0.0), and -// missing parts (1.0 => 1.0.0) will be made into a canonicalized form -// as shown in the parenthesized examples. -func (v *Version) String() string { - var buf bytes.Buffer - fmtParts := make([]string, len(v.segments)) - for i, s := range v.segments { - // We can ignore err here since we've pre-parsed the values in segments - str := strconv.FormatInt(s, 10) - fmtParts[i] = str - } - fmt.Fprintf(&buf, strings.Join(fmtParts, ".")) - if v.pre != "" { - fmt.Fprintf(&buf, "-%s", v.pre) - } - if v.metadata != "" { - fmt.Fprintf(&buf, "+%s", v.metadata) - } - - return buf.String() -} - -// Original returns the original parsed version as-is, including any -// potential whitespace, `v` prefix, etc. -func (v *Version) Original() string { - return v.original -} - -// UnmarshalText implements encoding.TextUnmarshaler interface. -func (v *Version) UnmarshalText(b []byte) error { - temp, err := NewVersion(string(b)) - if err != nil { - return err - } - - *v = *temp - - return nil -} - -// MarshalText implements encoding.TextMarshaler interface. -func (v *Version) MarshalText() ([]byte, error) { - return []byte(v.String()), nil -} diff --git a/vendor/github.com/hashicorp/go-version/version_collection.go b/vendor/github.com/hashicorp/go-version/version_collection.go deleted file mode 100644 index cc888d43e..000000000 --- a/vendor/github.com/hashicorp/go-version/version_collection.go +++ /dev/null @@ -1,17 +0,0 @@ -package version - -// Collection is a type that implements the sort.Interface interface -// so that versions can be sorted. -type Collection []*Version - -func (v Collection) Len() int { - return len(v) -} - -func (v Collection) Less(i, j int) bool { - return v[i].LessThan(v[j]) -} - -func (v Collection) Swap(i, j int) { - v[i], v[j] = v[j], v[i] -} diff --git a/vendor/github.com/hashicorp/hcl/.gitignore b/vendor/github.com/hashicorp/hcl/.gitignore deleted file mode 100644 index 15586a2b5..000000000 --- a/vendor/github.com/hashicorp/hcl/.gitignore +++ /dev/null @@ -1,9 +0,0 @@ -y.output - -# ignore intellij files -.idea -*.iml -*.ipr -*.iws - -*.test diff --git a/vendor/github.com/hashicorp/hcl/.travis.yml b/vendor/github.com/hashicorp/hcl/.travis.yml deleted file mode 100644 index cb63a3216..000000000 --- a/vendor/github.com/hashicorp/hcl/.travis.yml +++ /dev/null @@ -1,13 +0,0 @@ -sudo: false - -language: go - -go: - - 1.x - - tip - -branches: - only: - - master - -script: make test diff --git a/vendor/github.com/hashicorp/hcl/LICENSE b/vendor/github.com/hashicorp/hcl/LICENSE deleted file mode 100644 index c33dcc7c9..000000000 --- a/vendor/github.com/hashicorp/hcl/LICENSE +++ /dev/null @@ -1,354 +0,0 @@ -Mozilla Public License, version 2.0 - -1. Definitions - -1.1. “Contributor” - - means each individual or legal entity that creates, contributes to the - creation of, or owns Covered Software. - -1.2. “Contributor Version” - - means the combination of the Contributions of others (if any) used by a - Contributor and that particular Contributor’s Contribution. - -1.3. “Contribution” - - means Covered Software of a particular Contributor. - -1.4. “Covered Software” - - means Source Code Form to which the initial Contributor has attached the - notice in Exhibit A, the Executable Form of such Source Code Form, and - Modifications of such Source Code Form, in each case including portions - thereof. - -1.5. “Incompatible With Secondary Licenses” - means - - a. that the initial Contributor has attached the notice described in - Exhibit B to the Covered Software; or - - b. that the Covered Software was made available under the terms of version - 1.1 or earlier of the License, but not also under the terms of a - Secondary License. - -1.6. “Executable Form” - - means any form of the work other than Source Code Form. - -1.7. “Larger Work” - - means a work that combines Covered Software with other material, in a separate - file or files, that is not Covered Software. - -1.8. “License” - - means this document. - -1.9. “Licensable” - - means having the right to grant, to the maximum extent possible, whether at the - time of the initial grant or subsequently, any and all of the rights conveyed by - this License. - -1.10. “Modifications” - - means any of the following: - - a. any file in Source Code Form that results from an addition to, deletion - from, or modification of the contents of Covered Software; or - - b. any new file in Source Code Form that contains any Covered Software. - -1.11. “Patent Claims” of a Contributor - - means any patent claim(s), including without limitation, method, process, - and apparatus claims, in any patent Licensable by such Contributor that - would be infringed, but for the grant of the License, by the making, - using, selling, offering for sale, having made, import, or transfer of - either its Contributions or its Contributor Version. - -1.12. “Secondary License” - - means either the GNU General Public License, Version 2.0, the GNU Lesser - General Public License, Version 2.1, the GNU Affero General Public - License, Version 3.0, or any later versions of those licenses. - -1.13. “Source Code Form” - - means the form of the work preferred for making modifications. - -1.14. “You” (or “Your”) - - means an individual or a legal entity exercising rights under this - License. For legal entities, “You” includes any entity that controls, is - controlled by, or is under common control with You. For purposes of this - definition, “control” means (a) the power, direct or indirect, to cause - the direction or management of such entity, whether by contract or - otherwise, or (b) ownership of more than fifty percent (50%) of the - outstanding shares or beneficial ownership of such entity. - - -2. License Grants and Conditions - -2.1. Grants - - Each Contributor hereby grants You a world-wide, royalty-free, - non-exclusive license: - - a. under intellectual property rights (other than patent or trademark) - Licensable by such Contributor to use, reproduce, make available, - modify, display, perform, distribute, and otherwise exploit its - Contributions, either on an unmodified basis, with Modifications, or as - part of a Larger Work; and - - b. under Patent Claims of such Contributor to make, use, sell, offer for - sale, have made, import, and otherwise transfer either its Contributions - or its Contributor Version. - -2.2. Effective Date - - The licenses granted in Section 2.1 with respect to any Contribution become - effective for each Contribution on the date the Contributor first distributes - such Contribution. - -2.3. Limitations on Grant Scope - - The licenses granted in this Section 2 are the only rights granted under this - License. No additional rights or licenses will be implied from the distribution - or licensing of Covered Software under this License. Notwithstanding Section - 2.1(b) above, no patent license is granted by a Contributor: - - a. for any code that a Contributor has removed from Covered Software; or - - b. for infringements caused by: (i) Your and any other third party’s - modifications of Covered Software, or (ii) the combination of its - Contributions with other software (except as part of its Contributor - Version); or - - c. under Patent Claims infringed by Covered Software in the absence of its - Contributions. - - This License does not grant any rights in the trademarks, service marks, or - logos of any Contributor (except as may be necessary to comply with the - notice requirements in Section 3.4). - -2.4. Subsequent Licenses - - No Contributor makes additional grants as a result of Your choice to - distribute the Covered Software under a subsequent version of this License - (see Section 10.2) or under the terms of a Secondary License (if permitted - under the terms of Section 3.3). - -2.5. Representation - - Each Contributor represents that the Contributor believes its Contributions - are its original creation(s) or it has sufficient rights to grant the - rights to its Contributions conveyed by this License. - -2.6. Fair Use - - This License is not intended to limit any rights You have under applicable - copyright doctrines of fair use, fair dealing, or other equivalents. - -2.7. Conditions - - Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in - Section 2.1. - - -3. Responsibilities - -3.1. Distribution of Source Form - - All distribution of Covered Software in Source Code Form, including any - Modifications that You create or to which You contribute, must be under the - terms of this License. You must inform recipients that the Source Code Form - of the Covered Software is governed by the terms of this License, and how - they can obtain a copy of this License. You may not attempt to alter or - restrict the recipients’ rights in the Source Code Form. - -3.2. Distribution of Executable Form - - If You distribute Covered Software in Executable Form then: - - a. such Covered Software must also be made available in Source Code Form, - as described in Section 3.1, and You must inform recipients of the - Executable Form how they can obtain a copy of such Source Code Form by - reasonable means in a timely manner, at a charge no more than the cost - of distribution to the recipient; and - - b. You may distribute such Executable Form under the terms of this License, - or sublicense it under different terms, provided that the license for - the Executable Form does not attempt to limit or alter the recipients’ - rights in the Source Code Form under this License. - -3.3. Distribution of a Larger Work - - You may create and distribute a Larger Work under terms of Your choice, - provided that You also comply with the requirements of this License for the - Covered Software. If the Larger Work is a combination of Covered Software - with a work governed by one or more Secondary Licenses, and the Covered - Software is not Incompatible With Secondary Licenses, this License permits - You to additionally distribute such Covered Software under the terms of - such Secondary License(s), so that the recipient of the Larger Work may, at - their option, further distribute the Covered Software under the terms of - either this License or such Secondary License(s). - -3.4. Notices - - You may not remove or alter the substance of any license notices (including - copyright notices, patent notices, disclaimers of warranty, or limitations - of liability) contained within the Source Code Form of the Covered - Software, except that You may alter any license notices to the extent - required to remedy known factual inaccuracies. - -3.5. Application of Additional Terms - - You may choose to offer, and to charge a fee for, warranty, support, - indemnity or liability obligations to one or more recipients of Covered - Software. However, You may do so only on Your own behalf, and not on behalf - of any Contributor. You must make it absolutely clear that any such - warranty, support, indemnity, or liability obligation is offered by You - alone, and You hereby agree to indemnify every Contributor for any - liability incurred by such Contributor as a result of warranty, support, - indemnity or liability terms You offer. You may include additional - disclaimers of warranty and limitations of liability specific to any - jurisdiction. - -4. Inability to Comply Due to Statute or Regulation - - If it is impossible for You to comply with any of the terms of this License - with respect to some or all of the Covered Software due to statute, judicial - order, or regulation then You must: (a) comply with the terms of this License - to the maximum extent possible; and (b) describe the limitations and the code - they affect. Such description must be placed in a text file included with all - distributions of the Covered Software under this License. Except to the - extent prohibited by statute or regulation, such description must be - sufficiently detailed for a recipient of ordinary skill to be able to - understand it. - -5. Termination - -5.1. The rights granted under this License will terminate automatically if You - fail to comply with any of its terms. However, if You become compliant, - then the rights granted under this License from a particular Contributor - are reinstated (a) provisionally, unless and until such Contributor - explicitly and finally terminates Your grants, and (b) on an ongoing basis, - if such Contributor fails to notify You of the non-compliance by some - reasonable means prior to 60 days after You have come back into compliance. - Moreover, Your grants from a particular Contributor are reinstated on an - ongoing basis if such Contributor notifies You of the non-compliance by - some reasonable means, this is the first time You have received notice of - non-compliance with this License from such Contributor, and You become - compliant prior to 30 days after Your receipt of the notice. - -5.2. If You initiate litigation against any entity by asserting a patent - infringement claim (excluding declaratory judgment actions, counter-claims, - and cross-claims) alleging that a Contributor Version directly or - indirectly infringes any patent, then the rights granted to You by any and - all Contributors for the Covered Software under Section 2.1 of this License - shall terminate. - -5.3. In the event of termination under Sections 5.1 or 5.2 above, all end user - license agreements (excluding distributors and resellers) which have been - validly granted by You or Your distributors under this License prior to - termination shall survive termination. - -6. Disclaimer of Warranty - - Covered Software is provided under this License on an “as is” basis, without - warranty of any kind, either expressed, implied, or statutory, including, - without limitation, warranties that the Covered Software is free of defects, - merchantable, fit for a particular purpose or non-infringing. The entire - risk as to the quality and performance of the Covered Software is with You. - Should any Covered Software prove defective in any respect, You (not any - Contributor) assume the cost of any necessary servicing, repair, or - correction. This disclaimer of warranty constitutes an essential part of this - License. No use of any Covered Software is authorized under this License - except under this disclaimer. - -7. Limitation of Liability - - Under no circumstances and under no legal theory, whether tort (including - negligence), contract, or otherwise, shall any Contributor, or anyone who - distributes Covered Software as permitted above, be liable to You for any - direct, indirect, special, incidental, or consequential damages of any - character including, without limitation, damages for lost profits, loss of - goodwill, work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses, even if such party shall have been - informed of the possibility of such damages. This limitation of liability - shall not apply to liability for death or personal injury resulting from such - party’s negligence to the extent applicable law prohibits such limitation. - Some jurisdictions do not allow the exclusion or limitation of incidental or - consequential damages, so this exclusion and limitation may not apply to You. - -8. Litigation - - Any litigation relating to this License may be brought only in the courts of - a jurisdiction where the defendant maintains its principal place of business - and such litigation shall be governed by laws of that jurisdiction, without - reference to its conflict-of-law provisions. Nothing in this Section shall - prevent a party’s ability to bring cross-claims or counter-claims. - -9. Miscellaneous - - This License represents the complete agreement concerning the subject matter - hereof. If any provision of this License is held to be unenforceable, such - provision shall be reformed only to the extent necessary to make it - enforceable. Any law or regulation which provides that the language of a - contract shall be construed against the drafter shall not be used to construe - this License against a Contributor. - - -10. Versions of the License - -10.1. New Versions - - Mozilla Foundation is the license steward. Except as provided in Section - 10.3, no one other than the license steward has the right to modify or - publish new versions of this License. Each version will be given a - distinguishing version number. - -10.2. Effect of New Versions - - You may distribute the Covered Software under the terms of the version of - the License under which You originally received the Covered Software, or - under the terms of any subsequent version published by the license - steward. - -10.3. Modified Versions - - If you create software not governed by this License, and you want to - create a new license for such software, you may create and use a modified - version of this License if you rename the license and remove any - references to the name of the license steward (except to note that such - modified license differs from this License). - -10.4. Distributing Source Code Form that is Incompatible With Secondary Licenses - If You choose to distribute Source Code Form that is Incompatible With - Secondary Licenses under the terms of this version of the License, the - notice described in Exhibit B of this License must be attached. - -Exhibit A - Source Code Form License Notice - - This Source Code Form is subject to the - terms of the Mozilla Public License, v. - 2.0. If a copy of the MPL was not - distributed with this file, You can - obtain one at - http://mozilla.org/MPL/2.0/. - -If it is not possible or desirable to put the notice in a particular file, then -You may include the notice in a location (such as a LICENSE file in a relevant -directory) where a recipient would be likely to look for such a notice. - -You may add additional accurate notices of copyright ownership. - -Exhibit B - “Incompatible With Secondary Licenses” Notice - - This Source Code Form is “Incompatible - With Secondary Licenses”, as defined by - the Mozilla Public License, v. 2.0. - diff --git a/vendor/github.com/hashicorp/hcl/Makefile b/vendor/github.com/hashicorp/hcl/Makefile deleted file mode 100644 index 84fd743f5..000000000 --- a/vendor/github.com/hashicorp/hcl/Makefile +++ /dev/null @@ -1,18 +0,0 @@ -TEST?=./... - -default: test - -fmt: generate - go fmt ./... - -test: generate - go get -t ./... - go test $(TEST) $(TESTARGS) - -generate: - go generate ./... - -updatedeps: - go get -u golang.org/x/tools/cmd/stringer - -.PHONY: default generate test updatedeps diff --git a/vendor/github.com/hashicorp/hcl/README.md b/vendor/github.com/hashicorp/hcl/README.md deleted file mode 100644 index c8223326d..000000000 --- a/vendor/github.com/hashicorp/hcl/README.md +++ /dev/null @@ -1,125 +0,0 @@ -# HCL - -[![GoDoc](https://godoc.org/github.com/hashicorp/hcl?status.png)](https://godoc.org/github.com/hashicorp/hcl) [![Build Status](https://travis-ci.org/hashicorp/hcl.svg?branch=master)](https://travis-ci.org/hashicorp/hcl) - -HCL (HashiCorp Configuration Language) is a configuration language built -by HashiCorp. The goal of HCL is to build a structured configuration language -that is both human and machine friendly for use with command-line tools, but -specifically targeted towards DevOps tools, servers, etc. - -HCL is also fully JSON compatible. That is, JSON can be used as completely -valid input to a system expecting HCL. This helps makes systems -interoperable with other systems. - -HCL is heavily inspired by -[libucl](https://github.com/vstakhov/libucl), -nginx configuration, and others similar. - -## Why? - -A common question when viewing HCL is to ask the question: why not -JSON, YAML, etc.? - -Prior to HCL, the tools we built at [HashiCorp](http://www.hashicorp.com) -used a variety of configuration languages from full programming languages -such as Ruby to complete data structure languages such as JSON. What we -learned is that some people wanted human-friendly configuration languages -and some people wanted machine-friendly languages. - -JSON fits a nice balance in this, but is fairly verbose and most -importantly doesn't support comments. With YAML, we found that beginners -had a really hard time determining what the actual structure was, and -ended up guessing more often than not whether to use a hyphen, colon, etc. -in order to represent some configuration key. - -Full programming languages such as Ruby enable complex behavior -a configuration language shouldn't usually allow, and also forces -people to learn some set of Ruby. - -Because of this, we decided to create our own configuration language -that is JSON-compatible. Our configuration language (HCL) is designed -to be written and modified by humans. The API for HCL allows JSON -as an input so that it is also machine-friendly (machines can generate -JSON instead of trying to generate HCL). - -Our goal with HCL is not to alienate other configuration languages. -It is instead to provide HCL as a specialized language for our tools, -and JSON as the interoperability layer. - -## Syntax - -For a complete grammar, please see the parser itself. A high-level overview -of the syntax and grammar is listed here. - - * Single line comments start with `#` or `//` - - * Multi-line comments are wrapped in `/*` and `*/`. Nested block comments - are not allowed. A multi-line comment (also known as a block comment) - terminates at the first `*/` found. - - * Values are assigned with the syntax `key = value` (whitespace doesn't - matter). The value can be any primitive: a string, number, boolean, - object, or list. - - * Strings are double-quoted and can contain any UTF-8 characters. - Example: `"Hello, World"` - - * Multi-line strings start with `<- - echo %Path% - - go version - - go env - - go get -t ./... - -build_script: -- cmd: go test -v ./... diff --git a/vendor/github.com/hashicorp/hcl/decoder.go b/vendor/github.com/hashicorp/hcl/decoder.go deleted file mode 100644 index bed9ebbe1..000000000 --- a/vendor/github.com/hashicorp/hcl/decoder.go +++ /dev/null @@ -1,729 +0,0 @@ -package hcl - -import ( - "errors" - "fmt" - "reflect" - "sort" - "strconv" - "strings" - - "github.com/hashicorp/hcl/hcl/ast" - "github.com/hashicorp/hcl/hcl/parser" - "github.com/hashicorp/hcl/hcl/token" -) - -// This is the tag to use with structures to have settings for HCL -const tagName = "hcl" - -var ( - // nodeType holds a reference to the type of ast.Node - nodeType reflect.Type = findNodeType() -) - -// Unmarshal accepts a byte slice as input and writes the -// data to the value pointed to by v. -func Unmarshal(bs []byte, v interface{}) error { - root, err := parse(bs) - if err != nil { - return err - } - - return DecodeObject(v, root) -} - -// Decode reads the given input and decodes it into the structure -// given by `out`. -func Decode(out interface{}, in string) error { - obj, err := Parse(in) - if err != nil { - return err - } - - return DecodeObject(out, obj) -} - -// DecodeObject is a lower-level version of Decode. It decodes a -// raw Object into the given output. -func DecodeObject(out interface{}, n ast.Node) error { - val := reflect.ValueOf(out) - if val.Kind() != reflect.Ptr { - return errors.New("result must be a pointer") - } - - // If we have the file, we really decode the root node - if f, ok := n.(*ast.File); ok { - n = f.Node - } - - var d decoder - return d.decode("root", n, val.Elem()) -} - -type decoder struct { - stack []reflect.Kind -} - -func (d *decoder) decode(name string, node ast.Node, result reflect.Value) error { - k := result - - // If we have an interface with a valid value, we use that - // for the check. - if result.Kind() == reflect.Interface { - elem := result.Elem() - if elem.IsValid() { - k = elem - } - } - - // Push current onto stack unless it is an interface. - if k.Kind() != reflect.Interface { - d.stack = append(d.stack, k.Kind()) - - // Schedule a pop - defer func() { - d.stack = d.stack[:len(d.stack)-1] - }() - } - - switch k.Kind() { - case reflect.Bool: - return d.decodeBool(name, node, result) - case reflect.Float32, reflect.Float64: - return d.decodeFloat(name, node, result) - case reflect.Int, reflect.Int32, reflect.Int64: - return d.decodeInt(name, node, result) - case reflect.Interface: - // When we see an interface, we make our own thing - return d.decodeInterface(name, node, result) - case reflect.Map: - return d.decodeMap(name, node, result) - case reflect.Ptr: - return d.decodePtr(name, node, result) - case reflect.Slice: - return d.decodeSlice(name, node, result) - case reflect.String: - return d.decodeString(name, node, result) - case reflect.Struct: - return d.decodeStruct(name, node, result) - default: - return &parser.PosError{ - Pos: node.Pos(), - Err: fmt.Errorf("%s: unknown kind to decode into: %s", name, k.Kind()), - } - } -} - -func (d *decoder) decodeBool(name string, node ast.Node, result reflect.Value) error { - switch n := node.(type) { - case *ast.LiteralType: - if n.Token.Type == token.BOOL { - v, err := strconv.ParseBool(n.Token.Text) - if err != nil { - return err - } - - result.Set(reflect.ValueOf(v)) - return nil - } - } - - return &parser.PosError{ - Pos: node.Pos(), - Err: fmt.Errorf("%s: unknown type %T", name, node), - } -} - -func (d *decoder) decodeFloat(name string, node ast.Node, result reflect.Value) error { - switch n := node.(type) { - case *ast.LiteralType: - if n.Token.Type == token.FLOAT || n.Token.Type == token.NUMBER { - v, err := strconv.ParseFloat(n.Token.Text, 64) - if err != nil { - return err - } - - result.Set(reflect.ValueOf(v).Convert(result.Type())) - return nil - } - } - - return &parser.PosError{ - Pos: node.Pos(), - Err: fmt.Errorf("%s: unknown type %T", name, node), - } -} - -func (d *decoder) decodeInt(name string, node ast.Node, result reflect.Value) error { - switch n := node.(type) { - case *ast.LiteralType: - switch n.Token.Type { - case token.NUMBER: - v, err := strconv.ParseInt(n.Token.Text, 0, 0) - if err != nil { - return err - } - - if result.Kind() == reflect.Interface { - result.Set(reflect.ValueOf(int(v))) - } else { - result.SetInt(v) - } - return nil - case token.STRING: - v, err := strconv.ParseInt(n.Token.Value().(string), 0, 0) - if err != nil { - return err - } - - if result.Kind() == reflect.Interface { - result.Set(reflect.ValueOf(int(v))) - } else { - result.SetInt(v) - } - return nil - } - } - - return &parser.PosError{ - Pos: node.Pos(), - Err: fmt.Errorf("%s: unknown type %T", name, node), - } -} - -func (d *decoder) decodeInterface(name string, node ast.Node, result reflect.Value) error { - // When we see an ast.Node, we retain the value to enable deferred decoding. - // Very useful in situations where we want to preserve ast.Node information - // like Pos - if result.Type() == nodeType && result.CanSet() { - result.Set(reflect.ValueOf(node)) - return nil - } - - var set reflect.Value - redecode := true - - // For testing types, ObjectType should just be treated as a list. We - // set this to a temporary var because we want to pass in the real node. - testNode := node - if ot, ok := node.(*ast.ObjectType); ok { - testNode = ot.List - } - - switch n := testNode.(type) { - case *ast.ObjectList: - // If we're at the root or we're directly within a slice, then we - // decode objects into map[string]interface{}, otherwise we decode - // them into lists. - if len(d.stack) == 0 || d.stack[len(d.stack)-1] == reflect.Slice { - var temp map[string]interface{} - tempVal := reflect.ValueOf(temp) - result := reflect.MakeMap( - reflect.MapOf( - reflect.TypeOf(""), - tempVal.Type().Elem())) - - set = result - } else { - var temp []map[string]interface{} - tempVal := reflect.ValueOf(temp) - result := reflect.MakeSlice( - reflect.SliceOf(tempVal.Type().Elem()), 0, len(n.Items)) - set = result - } - case *ast.ObjectType: - // If we're at the root or we're directly within a slice, then we - // decode objects into map[string]interface{}, otherwise we decode - // them into lists. - if len(d.stack) == 0 || d.stack[len(d.stack)-1] == reflect.Slice { - var temp map[string]interface{} - tempVal := reflect.ValueOf(temp) - result := reflect.MakeMap( - reflect.MapOf( - reflect.TypeOf(""), - tempVal.Type().Elem())) - - set = result - } else { - var temp []map[string]interface{} - tempVal := reflect.ValueOf(temp) - result := reflect.MakeSlice( - reflect.SliceOf(tempVal.Type().Elem()), 0, 1) - set = result - } - case *ast.ListType: - var temp []interface{} - tempVal := reflect.ValueOf(temp) - result := reflect.MakeSlice( - reflect.SliceOf(tempVal.Type().Elem()), 0, 0) - set = result - case *ast.LiteralType: - switch n.Token.Type { - case token.BOOL: - var result bool - set = reflect.Indirect(reflect.New(reflect.TypeOf(result))) - case token.FLOAT: - var result float64 - set = reflect.Indirect(reflect.New(reflect.TypeOf(result))) - case token.NUMBER: - var result int - set = reflect.Indirect(reflect.New(reflect.TypeOf(result))) - case token.STRING, token.HEREDOC: - set = reflect.Indirect(reflect.New(reflect.TypeOf(""))) - default: - return &parser.PosError{ - Pos: node.Pos(), - Err: fmt.Errorf("%s: cannot decode into interface: %T", name, node), - } - } - default: - return fmt.Errorf( - "%s: cannot decode into interface: %T", - name, node) - } - - // Set the result to what its supposed to be, then reset - // result so we don't reflect into this method anymore. - result.Set(set) - - if redecode { - // Revisit the node so that we can use the newly instantiated - // thing and populate it. - if err := d.decode(name, node, result); err != nil { - return err - } - } - - return nil -} - -func (d *decoder) decodeMap(name string, node ast.Node, result reflect.Value) error { - if item, ok := node.(*ast.ObjectItem); ok { - node = &ast.ObjectList{Items: []*ast.ObjectItem{item}} - } - - if ot, ok := node.(*ast.ObjectType); ok { - node = ot.List - } - - n, ok := node.(*ast.ObjectList) - if !ok { - return &parser.PosError{ - Pos: node.Pos(), - Err: fmt.Errorf("%s: not an object type for map (%T)", name, node), - } - } - - // If we have an interface, then we can address the interface, - // but not the slice itself, so get the element but set the interface - set := result - if result.Kind() == reflect.Interface { - result = result.Elem() - } - - resultType := result.Type() - resultElemType := resultType.Elem() - resultKeyType := resultType.Key() - if resultKeyType.Kind() != reflect.String { - return &parser.PosError{ - Pos: node.Pos(), - Err: fmt.Errorf("%s: map must have string keys", name), - } - } - - // Make a map if it is nil - resultMap := result - if result.IsNil() { - resultMap = reflect.MakeMap( - reflect.MapOf(resultKeyType, resultElemType)) - } - - // Go through each element and decode it. - done := make(map[string]struct{}) - for _, item := range n.Items { - if item.Val == nil { - continue - } - - // github.com/hashicorp/terraform/issue/5740 - if len(item.Keys) == 0 { - return &parser.PosError{ - Pos: node.Pos(), - Err: fmt.Errorf("%s: map must have string keys", name), - } - } - - // Get the key we're dealing with, which is the first item - keyStr := item.Keys[0].Token.Value().(string) - - // If we've already processed this key, then ignore it - if _, ok := done[keyStr]; ok { - continue - } - - // Determine the value. If we have more than one key, then we - // get the objectlist of only these keys. - itemVal := item.Val - if len(item.Keys) > 1 { - itemVal = n.Filter(keyStr) - done[keyStr] = struct{}{} - } - - // Make the field name - fieldName := fmt.Sprintf("%s.%s", name, keyStr) - - // Get the key/value as reflection values - key := reflect.ValueOf(keyStr) - val := reflect.Indirect(reflect.New(resultElemType)) - - // If we have a pre-existing value in the map, use that - oldVal := resultMap.MapIndex(key) - if oldVal.IsValid() { - val.Set(oldVal) - } - - // Decode! - if err := d.decode(fieldName, itemVal, val); err != nil { - return err - } - - // Set the value on the map - resultMap.SetMapIndex(key, val) - } - - // Set the final map if we can - set.Set(resultMap) - return nil -} - -func (d *decoder) decodePtr(name string, node ast.Node, result reflect.Value) error { - // Create an element of the concrete (non pointer) type and decode - // into that. Then set the value of the pointer to this type. - resultType := result.Type() - resultElemType := resultType.Elem() - val := reflect.New(resultElemType) - if err := d.decode(name, node, reflect.Indirect(val)); err != nil { - return err - } - - result.Set(val) - return nil -} - -func (d *decoder) decodeSlice(name string, node ast.Node, result reflect.Value) error { - // If we have an interface, then we can address the interface, - // but not the slice itself, so get the element but set the interface - set := result - if result.Kind() == reflect.Interface { - result = result.Elem() - } - // Create the slice if it isn't nil - resultType := result.Type() - resultElemType := resultType.Elem() - if result.IsNil() { - resultSliceType := reflect.SliceOf(resultElemType) - result = reflect.MakeSlice( - resultSliceType, 0, 0) - } - - // Figure out the items we'll be copying into the slice - var items []ast.Node - switch n := node.(type) { - case *ast.ObjectList: - items = make([]ast.Node, len(n.Items)) - for i, item := range n.Items { - items[i] = item - } - case *ast.ObjectType: - items = []ast.Node{n} - case *ast.ListType: - items = n.List - default: - return &parser.PosError{ - Pos: node.Pos(), - Err: fmt.Errorf("unknown slice type: %T", node), - } - } - - for i, item := range items { - fieldName := fmt.Sprintf("%s[%d]", name, i) - - // Decode - val := reflect.Indirect(reflect.New(resultElemType)) - - // if item is an object that was decoded from ambiguous JSON and - // flattened, make sure it's expanded if it needs to decode into a - // defined structure. - item := expandObject(item, val) - - if err := d.decode(fieldName, item, val); err != nil { - return err - } - - // Append it onto the slice - result = reflect.Append(result, val) - } - - set.Set(result) - return nil -} - -// expandObject detects if an ambiguous JSON object was flattened to a List which -// should be decoded into a struct, and expands the ast to properly deocode. -func expandObject(node ast.Node, result reflect.Value) ast.Node { - item, ok := node.(*ast.ObjectItem) - if !ok { - return node - } - - elemType := result.Type() - - // our target type must be a struct - switch elemType.Kind() { - case reflect.Ptr: - switch elemType.Elem().Kind() { - case reflect.Struct: - //OK - default: - return node - } - case reflect.Struct: - //OK - default: - return node - } - - // A list value will have a key and field name. If it had more fields, - // it wouldn't have been flattened. - if len(item.Keys) != 2 { - return node - } - - keyToken := item.Keys[0].Token - item.Keys = item.Keys[1:] - - // we need to un-flatten the ast enough to decode - newNode := &ast.ObjectItem{ - Keys: []*ast.ObjectKey{ - &ast.ObjectKey{ - Token: keyToken, - }, - }, - Val: &ast.ObjectType{ - List: &ast.ObjectList{ - Items: []*ast.ObjectItem{item}, - }, - }, - } - - return newNode -} - -func (d *decoder) decodeString(name string, node ast.Node, result reflect.Value) error { - switch n := node.(type) { - case *ast.LiteralType: - switch n.Token.Type { - case token.NUMBER: - result.Set(reflect.ValueOf(n.Token.Text).Convert(result.Type())) - return nil - case token.STRING, token.HEREDOC: - result.Set(reflect.ValueOf(n.Token.Value()).Convert(result.Type())) - return nil - } - } - - return &parser.PosError{ - Pos: node.Pos(), - Err: fmt.Errorf("%s: unknown type for string %T", name, node), - } -} - -func (d *decoder) decodeStruct(name string, node ast.Node, result reflect.Value) error { - var item *ast.ObjectItem - if it, ok := node.(*ast.ObjectItem); ok { - item = it - node = it.Val - } - - if ot, ok := node.(*ast.ObjectType); ok { - node = ot.List - } - - // Handle the special case where the object itself is a literal. Previously - // the yacc parser would always ensure top-level elements were arrays. The new - // parser does not make the same guarantees, thus we need to convert any - // top-level literal elements into a list. - if _, ok := node.(*ast.LiteralType); ok && item != nil { - node = &ast.ObjectList{Items: []*ast.ObjectItem{item}} - } - - list, ok := node.(*ast.ObjectList) - if !ok { - return &parser.PosError{ - Pos: node.Pos(), - Err: fmt.Errorf("%s: not an object type for struct (%T)", name, node), - } - } - - // This slice will keep track of all the structs we'll be decoding. - // There can be more than one struct if there are embedded structs - // that are squashed. - structs := make([]reflect.Value, 1, 5) - structs[0] = result - - // Compile the list of all the fields that we're going to be decoding - // from all the structs. - type field struct { - field reflect.StructField - val reflect.Value - } - fields := []field{} - for len(structs) > 0 { - structVal := structs[0] - structs = structs[1:] - - structType := structVal.Type() - for i := 0; i < structType.NumField(); i++ { - fieldType := structType.Field(i) - tagParts := strings.Split(fieldType.Tag.Get(tagName), ",") - - // Ignore fields with tag name "-" - if tagParts[0] == "-" { - continue - } - - if fieldType.Anonymous { - fieldKind := fieldType.Type.Kind() - if fieldKind != reflect.Struct { - return &parser.PosError{ - Pos: node.Pos(), - Err: fmt.Errorf("%s: unsupported type to struct: %s", - fieldType.Name, fieldKind), - } - } - - // We have an embedded field. We "squash" the fields down - // if specified in the tag. - squash := false - for _, tag := range tagParts[1:] { - if tag == "squash" { - squash = true - break - } - } - - if squash { - structs = append( - structs, result.FieldByName(fieldType.Name)) - continue - } - } - - // Normal struct field, store it away - fields = append(fields, field{fieldType, structVal.Field(i)}) - } - } - - usedKeys := make(map[string]struct{}) - decodedFields := make([]string, 0, len(fields)) - decodedFieldsVal := make([]reflect.Value, 0) - unusedKeysVal := make([]reflect.Value, 0) - for _, f := range fields { - field, fieldValue := f.field, f.val - if !fieldValue.IsValid() { - // This should never happen - panic("field is not valid") - } - - // If we can't set the field, then it is unexported or something, - // and we just continue onwards. - if !fieldValue.CanSet() { - continue - } - - fieldName := field.Name - - tagValue := field.Tag.Get(tagName) - tagParts := strings.SplitN(tagValue, ",", 2) - if len(tagParts) >= 2 { - switch tagParts[1] { - case "decodedFields": - decodedFieldsVal = append(decodedFieldsVal, fieldValue) - continue - case "key": - if item == nil { - return &parser.PosError{ - Pos: node.Pos(), - Err: fmt.Errorf("%s: %s asked for 'key', impossible", - name, fieldName), - } - } - - fieldValue.SetString(item.Keys[0].Token.Value().(string)) - continue - case "unusedKeys": - unusedKeysVal = append(unusedKeysVal, fieldValue) - continue - } - } - - if tagParts[0] != "" { - fieldName = tagParts[0] - } - - // Determine the element we'll use to decode. If it is a single - // match (only object with the field), then we decode it exactly. - // If it is a prefix match, then we decode the matches. - filter := list.Filter(fieldName) - - prefixMatches := filter.Children() - matches := filter.Elem() - if len(matches.Items) == 0 && len(prefixMatches.Items) == 0 { - continue - } - - // Track the used key - usedKeys[fieldName] = struct{}{} - - // Create the field name and decode. We range over the elements - // because we actually want the value. - fieldName = fmt.Sprintf("%s.%s", name, fieldName) - if len(prefixMatches.Items) > 0 { - if err := d.decode(fieldName, prefixMatches, fieldValue); err != nil { - return err - } - } - for _, match := range matches.Items { - var decodeNode ast.Node = match.Val - if ot, ok := decodeNode.(*ast.ObjectType); ok { - decodeNode = &ast.ObjectList{Items: ot.List.Items} - } - - if err := d.decode(fieldName, decodeNode, fieldValue); err != nil { - return err - } - } - - decodedFields = append(decodedFields, field.Name) - } - - if len(decodedFieldsVal) > 0 { - // Sort it so that it is deterministic - sort.Strings(decodedFields) - - for _, v := range decodedFieldsVal { - v.Set(reflect.ValueOf(decodedFields)) - } - } - - return nil -} - -// findNodeType returns the type of ast.Node -func findNodeType() reflect.Type { - var nodeContainer struct { - Node ast.Node - } - value := reflect.ValueOf(nodeContainer).FieldByName("Node") - return value.Type() -} diff --git a/vendor/github.com/hashicorp/hcl/hcl.go b/vendor/github.com/hashicorp/hcl/hcl.go deleted file mode 100644 index 575a20b50..000000000 --- a/vendor/github.com/hashicorp/hcl/hcl.go +++ /dev/null @@ -1,11 +0,0 @@ -// Package hcl decodes HCL into usable Go structures. -// -// hcl input can come in either pure HCL format or JSON format. -// It can be parsed into an AST, and then decoded into a structure, -// or it can be decoded directly from a string into a structure. -// -// If you choose to parse HCL into a raw AST, the benefit is that you -// can write custom visitor implementations to implement custom -// semantic checks. By default, HCL does not perform any semantic -// checks. -package hcl diff --git a/vendor/github.com/hashicorp/hcl/hcl/ast/ast.go b/vendor/github.com/hashicorp/hcl/hcl/ast/ast.go deleted file mode 100644 index 6e5ef654b..000000000 --- a/vendor/github.com/hashicorp/hcl/hcl/ast/ast.go +++ /dev/null @@ -1,219 +0,0 @@ -// Package ast declares the types used to represent syntax trees for HCL -// (HashiCorp Configuration Language) -package ast - -import ( - "fmt" - "strings" - - "github.com/hashicorp/hcl/hcl/token" -) - -// Node is an element in the abstract syntax tree. -type Node interface { - node() - Pos() token.Pos -} - -func (File) node() {} -func (ObjectList) node() {} -func (ObjectKey) node() {} -func (ObjectItem) node() {} -func (Comment) node() {} -func (CommentGroup) node() {} -func (ObjectType) node() {} -func (LiteralType) node() {} -func (ListType) node() {} - -// File represents a single HCL file -type File struct { - Node Node // usually a *ObjectList - Comments []*CommentGroup // list of all comments in the source -} - -func (f *File) Pos() token.Pos { - return f.Node.Pos() -} - -// ObjectList represents a list of ObjectItems. An HCL file itself is an -// ObjectList. -type ObjectList struct { - Items []*ObjectItem -} - -func (o *ObjectList) Add(item *ObjectItem) { - o.Items = append(o.Items, item) -} - -// Filter filters out the objects with the given key list as a prefix. -// -// The returned list of objects contain ObjectItems where the keys have -// this prefix already stripped off. This might result in objects with -// zero-length key lists if they have no children. -// -// If no matches are found, an empty ObjectList (non-nil) is returned. -func (o *ObjectList) Filter(keys ...string) *ObjectList { - var result ObjectList - for _, item := range o.Items { - // If there aren't enough keys, then ignore this - if len(item.Keys) < len(keys) { - continue - } - - match := true - for i, key := range item.Keys[:len(keys)] { - key := key.Token.Value().(string) - if key != keys[i] && !strings.EqualFold(key, keys[i]) { - match = false - break - } - } - if !match { - continue - } - - // Strip off the prefix from the children - newItem := *item - newItem.Keys = newItem.Keys[len(keys):] - result.Add(&newItem) - } - - return &result -} - -// Children returns further nested objects (key length > 0) within this -// ObjectList. This should be used with Filter to get at child items. -func (o *ObjectList) Children() *ObjectList { - var result ObjectList - for _, item := range o.Items { - if len(item.Keys) > 0 { - result.Add(item) - } - } - - return &result -} - -// Elem returns items in the list that are direct element assignments -// (key length == 0). This should be used with Filter to get at elements. -func (o *ObjectList) Elem() *ObjectList { - var result ObjectList - for _, item := range o.Items { - if len(item.Keys) == 0 { - result.Add(item) - } - } - - return &result -} - -func (o *ObjectList) Pos() token.Pos { - // always returns the uninitiliazed position - return o.Items[0].Pos() -} - -// ObjectItem represents a HCL Object Item. An item is represented with a key -// (or keys). It can be an assignment or an object (both normal and nested) -type ObjectItem struct { - // keys is only one length long if it's of type assignment. If it's a - // nested object it can be larger than one. In that case "assign" is - // invalid as there is no assignments for a nested object. - Keys []*ObjectKey - - // assign contains the position of "=", if any - Assign token.Pos - - // val is the item itself. It can be an object,list, number, bool or a - // string. If key length is larger than one, val can be only of type - // Object. - Val Node - - LeadComment *CommentGroup // associated lead comment - LineComment *CommentGroup // associated line comment -} - -func (o *ObjectItem) Pos() token.Pos { - // I'm not entirely sure what causes this, but removing this causes - // a test failure. We should investigate at some point. - if len(o.Keys) == 0 { - return token.Pos{} - } - - return o.Keys[0].Pos() -} - -// ObjectKeys are either an identifier or of type string. -type ObjectKey struct { - Token token.Token -} - -func (o *ObjectKey) Pos() token.Pos { - return o.Token.Pos -} - -// LiteralType represents a literal of basic type. Valid types are: -// token.NUMBER, token.FLOAT, token.BOOL and token.STRING -type LiteralType struct { - Token token.Token - - // comment types, only used when in a list - LeadComment *CommentGroup - LineComment *CommentGroup -} - -func (l *LiteralType) Pos() token.Pos { - return l.Token.Pos -} - -// ListStatement represents a HCL List type -type ListType struct { - Lbrack token.Pos // position of "[" - Rbrack token.Pos // position of "]" - List []Node // the elements in lexical order -} - -func (l *ListType) Pos() token.Pos { - return l.Lbrack -} - -func (l *ListType) Add(node Node) { - l.List = append(l.List, node) -} - -// ObjectType represents a HCL Object Type -type ObjectType struct { - Lbrace token.Pos // position of "{" - Rbrace token.Pos // position of "}" - List *ObjectList // the nodes in lexical order -} - -func (o *ObjectType) Pos() token.Pos { - return o.Lbrace -} - -// Comment node represents a single //, # style or /*- style commment -type Comment struct { - Start token.Pos // position of / or # - Text string -} - -func (c *Comment) Pos() token.Pos { - return c.Start -} - -// CommentGroup node represents a sequence of comments with no other tokens and -// no empty lines between. -type CommentGroup struct { - List []*Comment // len(List) > 0 -} - -func (c *CommentGroup) Pos() token.Pos { - return c.List[0].Pos() -} - -//------------------------------------------------------------------- -// GoStringer -//------------------------------------------------------------------- - -func (o *ObjectKey) GoString() string { return fmt.Sprintf("*%#v", *o) } -func (o *ObjectList) GoString() string { return fmt.Sprintf("*%#v", *o) } diff --git a/vendor/github.com/hashicorp/hcl/hcl/ast/walk.go b/vendor/github.com/hashicorp/hcl/hcl/ast/walk.go deleted file mode 100644 index ba07ad42b..000000000 --- a/vendor/github.com/hashicorp/hcl/hcl/ast/walk.go +++ /dev/null @@ -1,52 +0,0 @@ -package ast - -import "fmt" - -// WalkFunc describes a function to be called for each node during a Walk. The -// returned node can be used to rewrite the AST. Walking stops the returned -// bool is false. -type WalkFunc func(Node) (Node, bool) - -// Walk traverses an AST in depth-first order: It starts by calling fn(node); -// node must not be nil. If fn returns true, Walk invokes fn recursively for -// each of the non-nil children of node, followed by a call of fn(nil). The -// returned node of fn can be used to rewrite the passed node to fn. -func Walk(node Node, fn WalkFunc) Node { - rewritten, ok := fn(node) - if !ok { - return rewritten - } - - switch n := node.(type) { - case *File: - n.Node = Walk(n.Node, fn) - case *ObjectList: - for i, item := range n.Items { - n.Items[i] = Walk(item, fn).(*ObjectItem) - } - case *ObjectKey: - // nothing to do - case *ObjectItem: - for i, k := range n.Keys { - n.Keys[i] = Walk(k, fn).(*ObjectKey) - } - - if n.Val != nil { - n.Val = Walk(n.Val, fn) - } - case *LiteralType: - // nothing to do - case *ListType: - for i, l := range n.List { - n.List[i] = Walk(l, fn) - } - case *ObjectType: - n.List = Walk(n.List, fn).(*ObjectList) - default: - // should we panic here? - fmt.Printf("unknown type: %T\n", n) - } - - fn(nil) - return rewritten -} diff --git a/vendor/github.com/hashicorp/hcl/hcl/parser/error.go b/vendor/github.com/hashicorp/hcl/hcl/parser/error.go deleted file mode 100644 index 5c99381df..000000000 --- a/vendor/github.com/hashicorp/hcl/hcl/parser/error.go +++ /dev/null @@ -1,17 +0,0 @@ -package parser - -import ( - "fmt" - - "github.com/hashicorp/hcl/hcl/token" -) - -// PosError is a parse error that contains a position. -type PosError struct { - Pos token.Pos - Err error -} - -func (e *PosError) Error() string { - return fmt.Sprintf("At %s: %s", e.Pos, e.Err) -} diff --git a/vendor/github.com/hashicorp/hcl/hcl/parser/parser.go b/vendor/github.com/hashicorp/hcl/hcl/parser/parser.go deleted file mode 100644 index 64c83bcfb..000000000 --- a/vendor/github.com/hashicorp/hcl/hcl/parser/parser.go +++ /dev/null @@ -1,532 +0,0 @@ -// Package parser implements a parser for HCL (HashiCorp Configuration -// Language) -package parser - -import ( - "bytes" - "errors" - "fmt" - "strings" - - "github.com/hashicorp/hcl/hcl/ast" - "github.com/hashicorp/hcl/hcl/scanner" - "github.com/hashicorp/hcl/hcl/token" -) - -type Parser struct { - sc *scanner.Scanner - - // Last read token - tok token.Token - commaPrev token.Token - - comments []*ast.CommentGroup - leadComment *ast.CommentGroup // last lead comment - lineComment *ast.CommentGroup // last line comment - - enableTrace bool - indent int - n int // buffer size (max = 1) -} - -func newParser(src []byte) *Parser { - return &Parser{ - sc: scanner.New(src), - } -} - -// Parse returns the fully parsed source and returns the abstract syntax tree. -func Parse(src []byte) (*ast.File, error) { - // normalize all line endings - // since the scanner and output only work with "\n" line endings, we may - // end up with dangling "\r" characters in the parsed data. - src = bytes.Replace(src, []byte("\r\n"), []byte("\n"), -1) - - p := newParser(src) - return p.Parse() -} - -var errEofToken = errors.New("EOF token found") - -// Parse returns the fully parsed source and returns the abstract syntax tree. -func (p *Parser) Parse() (*ast.File, error) { - f := &ast.File{} - var err, scerr error - p.sc.Error = func(pos token.Pos, msg string) { - scerr = &PosError{Pos: pos, Err: errors.New(msg)} - } - - f.Node, err = p.objectList(false) - if scerr != nil { - return nil, scerr - } - if err != nil { - return nil, err - } - - f.Comments = p.comments - return f, nil -} - -// objectList parses a list of items within an object (generally k/v pairs). -// The parameter" obj" tells this whether to we are within an object (braces: -// '{', '}') or just at the top level. If we're within an object, we end -// at an RBRACE. -func (p *Parser) objectList(obj bool) (*ast.ObjectList, error) { - defer un(trace(p, "ParseObjectList")) - node := &ast.ObjectList{} - - for { - if obj { - tok := p.scan() - p.unscan() - if tok.Type == token.RBRACE { - break - } - } - - n, err := p.objectItem() - if err == errEofToken { - break // we are finished - } - - // we don't return a nil node, because might want to use already - // collected items. - if err != nil { - return node, err - } - - node.Add(n) - - // object lists can be optionally comma-delimited e.g. when a list of maps - // is being expressed, so a comma is allowed here - it's simply consumed - tok := p.scan() - if tok.Type != token.COMMA { - p.unscan() - } - } - return node, nil -} - -func (p *Parser) consumeComment() (comment *ast.Comment, endline int) { - endline = p.tok.Pos.Line - - // count the endline if it's multiline comment, ie starting with /* - if len(p.tok.Text) > 1 && p.tok.Text[1] == '*' { - // don't use range here - no need to decode Unicode code points - for i := 0; i < len(p.tok.Text); i++ { - if p.tok.Text[i] == '\n' { - endline++ - } - } - } - - comment = &ast.Comment{Start: p.tok.Pos, Text: p.tok.Text} - p.tok = p.sc.Scan() - return -} - -func (p *Parser) consumeCommentGroup(n int) (comments *ast.CommentGroup, endline int) { - var list []*ast.Comment - endline = p.tok.Pos.Line - - for p.tok.Type == token.COMMENT && p.tok.Pos.Line <= endline+n { - var comment *ast.Comment - comment, endline = p.consumeComment() - list = append(list, comment) - } - - // add comment group to the comments list - comments = &ast.CommentGroup{List: list} - p.comments = append(p.comments, comments) - - return -} - -// objectItem parses a single object item -func (p *Parser) objectItem() (*ast.ObjectItem, error) { - defer un(trace(p, "ParseObjectItem")) - - keys, err := p.objectKey() - if len(keys) > 0 && err == errEofToken { - // We ignore eof token here since it is an error if we didn't - // receive a value (but we did receive a key) for the item. - err = nil - } - if len(keys) > 0 && err != nil && p.tok.Type == token.RBRACE { - // This is a strange boolean statement, but what it means is: - // We have keys with no value, and we're likely in an object - // (since RBrace ends an object). For this, we set err to nil so - // we continue and get the error below of having the wrong value - // type. - err = nil - - // Reset the token type so we don't think it completed fine. See - // objectType which uses p.tok.Type to check if we're done with - // the object. - p.tok.Type = token.EOF - } - if err != nil { - return nil, err - } - - o := &ast.ObjectItem{ - Keys: keys, - } - - if p.leadComment != nil { - o.LeadComment = p.leadComment - p.leadComment = nil - } - - switch p.tok.Type { - case token.ASSIGN: - o.Assign = p.tok.Pos - o.Val, err = p.object() - if err != nil { - return nil, err - } - case token.LBRACE: - o.Val, err = p.objectType() - if err != nil { - return nil, err - } - default: - keyStr := make([]string, 0, len(keys)) - for _, k := range keys { - keyStr = append(keyStr, k.Token.Text) - } - - return nil, &PosError{ - Pos: p.tok.Pos, - Err: fmt.Errorf( - "key '%s' expected start of object ('{') or assignment ('=')", - strings.Join(keyStr, " ")), - } - } - - // key=#comment - // val - if p.lineComment != nil { - o.LineComment, p.lineComment = p.lineComment, nil - } - - // do a look-ahead for line comment - p.scan() - if len(keys) > 0 && o.Val.Pos().Line == keys[0].Pos().Line && p.lineComment != nil { - o.LineComment = p.lineComment - p.lineComment = nil - } - p.unscan() - return o, nil -} - -// objectKey parses an object key and returns a ObjectKey AST -func (p *Parser) objectKey() ([]*ast.ObjectKey, error) { - keyCount := 0 - keys := make([]*ast.ObjectKey, 0) - - for { - tok := p.scan() - switch tok.Type { - case token.EOF: - // It is very important to also return the keys here as well as - // the error. This is because we need to be able to tell if we - // did parse keys prior to finding the EOF, or if we just found - // a bare EOF. - return keys, errEofToken - case token.ASSIGN: - // assignment or object only, but not nested objects. this is not - // allowed: `foo bar = {}` - if keyCount > 1 { - return nil, &PosError{ - Pos: p.tok.Pos, - Err: fmt.Errorf("nested object expected: LBRACE got: %s", p.tok.Type), - } - } - - if keyCount == 0 { - return nil, &PosError{ - Pos: p.tok.Pos, - Err: errors.New("no object keys found!"), - } - } - - return keys, nil - case token.LBRACE: - var err error - - // If we have no keys, then it is a syntax error. i.e. {{}} is not - // allowed. - if len(keys) == 0 { - err = &PosError{ - Pos: p.tok.Pos, - Err: fmt.Errorf("expected: IDENT | STRING got: %s", p.tok.Type), - } - } - - // object - return keys, err - case token.IDENT, token.STRING: - keyCount++ - keys = append(keys, &ast.ObjectKey{Token: p.tok}) - case token.ILLEGAL: - return keys, &PosError{ - Pos: p.tok.Pos, - Err: fmt.Errorf("illegal character"), - } - default: - return keys, &PosError{ - Pos: p.tok.Pos, - Err: fmt.Errorf("expected: IDENT | STRING | ASSIGN | LBRACE got: %s", p.tok.Type), - } - } - } -} - -// object parses any type of object, such as number, bool, string, object or -// list. -func (p *Parser) object() (ast.Node, error) { - defer un(trace(p, "ParseType")) - tok := p.scan() - - switch tok.Type { - case token.NUMBER, token.FLOAT, token.BOOL, token.STRING, token.HEREDOC: - return p.literalType() - case token.LBRACE: - return p.objectType() - case token.LBRACK: - return p.listType() - case token.COMMENT: - // implement comment - case token.EOF: - return nil, errEofToken - } - - return nil, &PosError{ - Pos: tok.Pos, - Err: fmt.Errorf("Unknown token: %+v", tok), - } -} - -// objectType parses an object type and returns a ObjectType AST -func (p *Parser) objectType() (*ast.ObjectType, error) { - defer un(trace(p, "ParseObjectType")) - - // we assume that the currently scanned token is a LBRACE - o := &ast.ObjectType{ - Lbrace: p.tok.Pos, - } - - l, err := p.objectList(true) - - // if we hit RBRACE, we are good to go (means we parsed all Items), if it's - // not a RBRACE, it's an syntax error and we just return it. - if err != nil && p.tok.Type != token.RBRACE { - return nil, err - } - - // No error, scan and expect the ending to be a brace - if tok := p.scan(); tok.Type != token.RBRACE { - return nil, &PosError{ - Pos: tok.Pos, - Err: fmt.Errorf("object expected closing RBRACE got: %s", tok.Type), - } - } - - o.List = l - o.Rbrace = p.tok.Pos // advanced via parseObjectList - return o, nil -} - -// listType parses a list type and returns a ListType AST -func (p *Parser) listType() (*ast.ListType, error) { - defer un(trace(p, "ParseListType")) - - // we assume that the currently scanned token is a LBRACK - l := &ast.ListType{ - Lbrack: p.tok.Pos, - } - - needComma := false - for { - tok := p.scan() - if needComma { - switch tok.Type { - case token.COMMA, token.RBRACK: - default: - return nil, &PosError{ - Pos: tok.Pos, - Err: fmt.Errorf( - "error parsing list, expected comma or list end, got: %s", - tok.Type), - } - } - } - switch tok.Type { - case token.BOOL, token.NUMBER, token.FLOAT, token.STRING, token.HEREDOC: - node, err := p.literalType() - if err != nil { - return nil, err - } - - // If there is a lead comment, apply it - if p.leadComment != nil { - node.LeadComment = p.leadComment - p.leadComment = nil - } - - l.Add(node) - needComma = true - case token.COMMA: - // get next list item or we are at the end - // do a look-ahead for line comment - p.scan() - if p.lineComment != nil && len(l.List) > 0 { - lit, ok := l.List[len(l.List)-1].(*ast.LiteralType) - if ok { - lit.LineComment = p.lineComment - l.List[len(l.List)-1] = lit - p.lineComment = nil - } - } - p.unscan() - - needComma = false - continue - case token.LBRACE: - // Looks like a nested object, so parse it out - node, err := p.objectType() - if err != nil { - return nil, &PosError{ - Pos: tok.Pos, - Err: fmt.Errorf( - "error while trying to parse object within list: %s", err), - } - } - l.Add(node) - needComma = true - case token.LBRACK: - node, err := p.listType() - if err != nil { - return nil, &PosError{ - Pos: tok.Pos, - Err: fmt.Errorf( - "error while trying to parse list within list: %s", err), - } - } - l.Add(node) - case token.RBRACK: - // finished - l.Rbrack = p.tok.Pos - return l, nil - default: - return nil, &PosError{ - Pos: tok.Pos, - Err: fmt.Errorf("unexpected token while parsing list: %s", tok.Type), - } - } - } -} - -// literalType parses a literal type and returns a LiteralType AST -func (p *Parser) literalType() (*ast.LiteralType, error) { - defer un(trace(p, "ParseLiteral")) - - return &ast.LiteralType{ - Token: p.tok, - }, nil -} - -// scan returns the next token from the underlying scanner. If a token has -// been unscanned then read that instead. In the process, it collects any -// comment groups encountered, and remembers the last lead and line comments. -func (p *Parser) scan() token.Token { - // If we have a token on the buffer, then return it. - if p.n != 0 { - p.n = 0 - return p.tok - } - - // Otherwise read the next token from the scanner and Save it to the buffer - // in case we unscan later. - prev := p.tok - p.tok = p.sc.Scan() - - if p.tok.Type == token.COMMENT { - var comment *ast.CommentGroup - var endline int - - // fmt.Printf("p.tok.Pos.Line = %+v prev: %d endline %d \n", - // p.tok.Pos.Line, prev.Pos.Line, endline) - if p.tok.Pos.Line == prev.Pos.Line { - // The comment is on same line as the previous token; it - // cannot be a lead comment but may be a line comment. - comment, endline = p.consumeCommentGroup(0) - if p.tok.Pos.Line != endline { - // The next token is on a different line, thus - // the last comment group is a line comment. - p.lineComment = comment - } - } - - // consume successor comments, if any - endline = -1 - for p.tok.Type == token.COMMENT { - comment, endline = p.consumeCommentGroup(1) - } - - if endline+1 == p.tok.Pos.Line && p.tok.Type != token.RBRACE { - switch p.tok.Type { - case token.RBRACE, token.RBRACK: - // Do not count for these cases - default: - // The next token is following on the line immediately after the - // comment group, thus the last comment group is a lead comment. - p.leadComment = comment - } - } - - } - - return p.tok -} - -// unscan pushes the previously read token back onto the buffer. -func (p *Parser) unscan() { - p.n = 1 -} - -// ---------------------------------------------------------------------------- -// Parsing support - -func (p *Parser) printTrace(a ...interface{}) { - if !p.enableTrace { - return - } - - const dots = ". . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . " - const n = len(dots) - fmt.Printf("%5d:%3d: ", p.tok.Pos.Line, p.tok.Pos.Column) - - i := 2 * p.indent - for i > n { - fmt.Print(dots) - i -= n - } - // i <= n - fmt.Print(dots[0:i]) - fmt.Println(a...) -} - -func trace(p *Parser, msg string) *Parser { - p.printTrace(msg, "(") - p.indent++ - return p -} - -// Usage pattern: defer un(trace(p, "...")) -func un(p *Parser) { - p.indent-- - p.printTrace(")") -} diff --git a/vendor/github.com/hashicorp/hcl/hcl/printer/nodes.go b/vendor/github.com/hashicorp/hcl/hcl/printer/nodes.go deleted file mode 100644 index 7c038d12a..000000000 --- a/vendor/github.com/hashicorp/hcl/hcl/printer/nodes.go +++ /dev/null @@ -1,789 +0,0 @@ -package printer - -import ( - "bytes" - "fmt" - "sort" - - "github.com/hashicorp/hcl/hcl/ast" - "github.com/hashicorp/hcl/hcl/token" -) - -const ( - blank = byte(' ') - newline = byte('\n') - tab = byte('\t') - infinity = 1 << 30 // offset or line -) - -var ( - unindent = []byte("\uE123") // in the private use space -) - -type printer struct { - cfg Config - prev token.Pos - - comments []*ast.CommentGroup // may be nil, contains all comments - standaloneComments []*ast.CommentGroup // contains all standalone comments (not assigned to any node) - - enableTrace bool - indentTrace int -} - -type ByPosition []*ast.CommentGroup - -func (b ByPosition) Len() int { return len(b) } -func (b ByPosition) Swap(i, j int) { b[i], b[j] = b[j], b[i] } -func (b ByPosition) Less(i, j int) bool { return b[i].Pos().Before(b[j].Pos()) } - -// collectComments comments all standalone comments which are not lead or line -// comment -func (p *printer) collectComments(node ast.Node) { - // first collect all comments. This is already stored in - // ast.File.(comments) - ast.Walk(node, func(nn ast.Node) (ast.Node, bool) { - switch t := nn.(type) { - case *ast.File: - p.comments = t.Comments - return nn, false - } - return nn, true - }) - - standaloneComments := make(map[token.Pos]*ast.CommentGroup, 0) - for _, c := range p.comments { - standaloneComments[c.Pos()] = c - } - - // next remove all lead and line comments from the overall comment map. - // This will give us comments which are standalone, comments which are not - // assigned to any kind of node. - ast.Walk(node, func(nn ast.Node) (ast.Node, bool) { - switch t := nn.(type) { - case *ast.LiteralType: - if t.LeadComment != nil { - for _, comment := range t.LeadComment.List { - if _, ok := standaloneComments[comment.Pos()]; ok { - delete(standaloneComments, comment.Pos()) - } - } - } - - if t.LineComment != nil { - for _, comment := range t.LineComment.List { - if _, ok := standaloneComments[comment.Pos()]; ok { - delete(standaloneComments, comment.Pos()) - } - } - } - case *ast.ObjectItem: - if t.LeadComment != nil { - for _, comment := range t.LeadComment.List { - if _, ok := standaloneComments[comment.Pos()]; ok { - delete(standaloneComments, comment.Pos()) - } - } - } - - if t.LineComment != nil { - for _, comment := range t.LineComment.List { - if _, ok := standaloneComments[comment.Pos()]; ok { - delete(standaloneComments, comment.Pos()) - } - } - } - } - - return nn, true - }) - - for _, c := range standaloneComments { - p.standaloneComments = append(p.standaloneComments, c) - } - - sort.Sort(ByPosition(p.standaloneComments)) -} - -// output prints creates b printable HCL output and returns it. -func (p *printer) output(n interface{}) []byte { - var buf bytes.Buffer - - switch t := n.(type) { - case *ast.File: - // File doesn't trace so we add the tracing here - defer un(trace(p, "File")) - return p.output(t.Node) - case *ast.ObjectList: - defer un(trace(p, "ObjectList")) - - var index int - for { - // Determine the location of the next actual non-comment - // item. If we're at the end, the next item is at "infinity" - var nextItem token.Pos - if index != len(t.Items) { - nextItem = t.Items[index].Pos() - } else { - nextItem = token.Pos{Offset: infinity, Line: infinity} - } - - // Go through the standalone comments in the file and print out - // the comments that we should be for this object item. - for _, c := range p.standaloneComments { - // Go through all the comments in the group. The group - // should be printed together, not separated by double newlines. - printed := false - newlinePrinted := false - for _, comment := range c.List { - // We only care about comments after the previous item - // we've printed so that comments are printed in the - // correct locations (between two objects for example). - // And before the next item. - if comment.Pos().After(p.prev) && comment.Pos().Before(nextItem) { - // if we hit the end add newlines so we can print the comment - // we don't do this if prev is invalid which means the - // beginning of the file since the first comment should - // be at the first line. - if !newlinePrinted && p.prev.IsValid() && index == len(t.Items) { - buf.Write([]byte{newline, newline}) - newlinePrinted = true - } - - // Write the actual comment. - buf.WriteString(comment.Text) - buf.WriteByte(newline) - - // Set printed to true to note that we printed something - printed = true - } - } - - // If we're not at the last item, write a new line so - // that there is a newline separating this comment from - // the next object. - if printed && index != len(t.Items) { - buf.WriteByte(newline) - } - } - - if index == len(t.Items) { - break - } - - buf.Write(p.output(t.Items[index])) - if index != len(t.Items)-1 { - // Always write a newline to separate us from the next item - buf.WriteByte(newline) - - // Need to determine if we're going to separate the next item - // with a blank line. The logic here is simple, though there - // are a few conditions: - // - // 1. The next object is more than one line away anyways, - // so we need an empty line. - // - // 2. The next object is not a "single line" object, so - // we need an empty line. - // - // 3. This current object is not a single line object, - // so we need an empty line. - current := t.Items[index] - next := t.Items[index+1] - if next.Pos().Line != t.Items[index].Pos().Line+1 || - !p.isSingleLineObject(next) || - !p.isSingleLineObject(current) { - buf.WriteByte(newline) - } - } - index++ - } - case *ast.ObjectKey: - buf.WriteString(t.Token.Text) - case *ast.ObjectItem: - p.prev = t.Pos() - buf.Write(p.objectItem(t)) - case *ast.LiteralType: - buf.Write(p.literalType(t)) - case *ast.ListType: - buf.Write(p.list(t)) - case *ast.ObjectType: - buf.Write(p.objectType(t)) - default: - fmt.Printf(" unknown type: %T\n", n) - } - - return buf.Bytes() -} - -func (p *printer) literalType(lit *ast.LiteralType) []byte { - result := []byte(lit.Token.Text) - switch lit.Token.Type { - case token.HEREDOC: - // Clear the trailing newline from heredocs - if result[len(result)-1] == '\n' { - result = result[:len(result)-1] - } - - // Poison lines 2+ so that we don't indent them - result = p.heredocIndent(result) - case token.STRING: - // If this is a multiline string, poison lines 2+ so we don't - // indent them. - if bytes.IndexRune(result, '\n') >= 0 { - result = p.heredocIndent(result) - } - } - - return result -} - -// objectItem returns the printable HCL form of an object item. An object type -// starts with one/multiple keys and has a value. The value might be of any -// type. -func (p *printer) objectItem(o *ast.ObjectItem) []byte { - defer un(trace(p, fmt.Sprintf("ObjectItem: %s", o.Keys[0].Token.Text))) - var buf bytes.Buffer - - if o.LeadComment != nil { - for _, comment := range o.LeadComment.List { - buf.WriteString(comment.Text) - buf.WriteByte(newline) - } - } - - // If key and val are on different lines, treat line comments like lead comments. - if o.LineComment != nil && o.Val.Pos().Line != o.Keys[0].Pos().Line { - for _, comment := range o.LineComment.List { - buf.WriteString(comment.Text) - buf.WriteByte(newline) - } - } - - for i, k := range o.Keys { - buf.WriteString(k.Token.Text) - buf.WriteByte(blank) - - // reach end of key - if o.Assign.IsValid() && i == len(o.Keys)-1 && len(o.Keys) == 1 { - buf.WriteString("=") - buf.WriteByte(blank) - } - } - - buf.Write(p.output(o.Val)) - - if o.LineComment != nil && o.Val.Pos().Line == o.Keys[0].Pos().Line { - buf.WriteByte(blank) - for _, comment := range o.LineComment.List { - buf.WriteString(comment.Text) - } - } - - return buf.Bytes() -} - -// objectType returns the printable HCL form of an object type. An object type -// begins with a brace and ends with a brace. -func (p *printer) objectType(o *ast.ObjectType) []byte { - defer un(trace(p, "ObjectType")) - var buf bytes.Buffer - buf.WriteString("{") - - var index int - var nextItem token.Pos - var commented, newlinePrinted bool - for { - // Determine the location of the next actual non-comment - // item. If we're at the end, the next item is the closing brace - if index != len(o.List.Items) { - nextItem = o.List.Items[index].Pos() - } else { - nextItem = o.Rbrace - } - - // Go through the standalone comments in the file and print out - // the comments that we should be for this object item. - for _, c := range p.standaloneComments { - printed := false - var lastCommentPos token.Pos - for _, comment := range c.List { - // We only care about comments after the previous item - // we've printed so that comments are printed in the - // correct locations (between two objects for example). - // And before the next item. - if comment.Pos().After(p.prev) && comment.Pos().Before(nextItem) { - // If there are standalone comments and the initial newline has not - // been printed yet, do it now. - if !newlinePrinted { - newlinePrinted = true - buf.WriteByte(newline) - } - - // add newline if it's between other printed nodes - if index > 0 { - commented = true - buf.WriteByte(newline) - } - - // Store this position - lastCommentPos = comment.Pos() - - // output the comment itself - buf.Write(p.indent(p.heredocIndent([]byte(comment.Text)))) - - // Set printed to true to note that we printed something - printed = true - - /* - if index != len(o.List.Items) { - buf.WriteByte(newline) // do not print on the end - } - */ - } - } - - // Stuff to do if we had comments - if printed { - // Always write a newline - buf.WriteByte(newline) - - // If there is another item in the object and our comment - // didn't hug it directly, then make sure there is a blank - // line separating them. - if nextItem != o.Rbrace && nextItem.Line != lastCommentPos.Line+1 { - buf.WriteByte(newline) - } - } - } - - if index == len(o.List.Items) { - p.prev = o.Rbrace - break - } - - // At this point we are sure that it's not a totally empty block: print - // the initial newline if it hasn't been printed yet by the previous - // block about standalone comments. - if !newlinePrinted { - buf.WriteByte(newline) - newlinePrinted = true - } - - // check if we have adjacent one liner items. If yes we'll going to align - // the comments. - var aligned []*ast.ObjectItem - for _, item := range o.List.Items[index:] { - // we don't group one line lists - if len(o.List.Items) == 1 { - break - } - - // one means a oneliner with out any lead comment - // two means a oneliner with lead comment - // anything else might be something else - cur := lines(string(p.objectItem(item))) - if cur > 2 { - break - } - - curPos := item.Pos() - - nextPos := token.Pos{} - if index != len(o.List.Items)-1 { - nextPos = o.List.Items[index+1].Pos() - } - - prevPos := token.Pos{} - if index != 0 { - prevPos = o.List.Items[index-1].Pos() - } - - // fmt.Println("DEBUG ----------------") - // fmt.Printf("prev = %+v prevPos: %s\n", prev, prevPos) - // fmt.Printf("cur = %+v curPos: %s\n", cur, curPos) - // fmt.Printf("next = %+v nextPos: %s\n", next, nextPos) - - if curPos.Line+1 == nextPos.Line { - aligned = append(aligned, item) - index++ - continue - } - - if curPos.Line-1 == prevPos.Line { - aligned = append(aligned, item) - index++ - - // finish if we have a new line or comment next. This happens - // if the next item is not adjacent - if curPos.Line+1 != nextPos.Line { - break - } - continue - } - - break - } - - // put newlines if the items are between other non aligned items. - // newlines are also added if there is a standalone comment already, so - // check it too - if !commented && index != len(aligned) { - buf.WriteByte(newline) - } - - if len(aligned) >= 1 { - p.prev = aligned[len(aligned)-1].Pos() - - items := p.alignedItems(aligned) - buf.Write(p.indent(items)) - } else { - p.prev = o.List.Items[index].Pos() - - buf.Write(p.indent(p.objectItem(o.List.Items[index]))) - index++ - } - - buf.WriteByte(newline) - } - - buf.WriteString("}") - return buf.Bytes() -} - -func (p *printer) alignedItems(items []*ast.ObjectItem) []byte { - var buf bytes.Buffer - - // find the longest key and value length, needed for alignment - var longestKeyLen int // longest key length - var longestValLen int // longest value length - for _, item := range items { - key := len(item.Keys[0].Token.Text) - val := len(p.output(item.Val)) - - if key > longestKeyLen { - longestKeyLen = key - } - - if val > longestValLen { - longestValLen = val - } - } - - for i, item := range items { - if item.LeadComment != nil { - for _, comment := range item.LeadComment.List { - buf.WriteString(comment.Text) - buf.WriteByte(newline) - } - } - - for i, k := range item.Keys { - keyLen := len(k.Token.Text) - buf.WriteString(k.Token.Text) - for i := 0; i < longestKeyLen-keyLen+1; i++ { - buf.WriteByte(blank) - } - - // reach end of key - if i == len(item.Keys)-1 && len(item.Keys) == 1 { - buf.WriteString("=") - buf.WriteByte(blank) - } - } - - val := p.output(item.Val) - valLen := len(val) - buf.Write(val) - - if item.Val.Pos().Line == item.Keys[0].Pos().Line && item.LineComment != nil { - for i := 0; i < longestValLen-valLen+1; i++ { - buf.WriteByte(blank) - } - - for _, comment := range item.LineComment.List { - buf.WriteString(comment.Text) - } - } - - // do not print for the last item - if i != len(items)-1 { - buf.WriteByte(newline) - } - } - - return buf.Bytes() -} - -// list returns the printable HCL form of an list type. -func (p *printer) list(l *ast.ListType) []byte { - if p.isSingleLineList(l) { - return p.singleLineList(l) - } - - var buf bytes.Buffer - buf.WriteString("[") - buf.WriteByte(newline) - - var longestLine int - for _, item := range l.List { - // for now we assume that the list only contains literal types - if lit, ok := item.(*ast.LiteralType); ok { - lineLen := len(lit.Token.Text) - if lineLen > longestLine { - longestLine = lineLen - } - } - } - - haveEmptyLine := false - for i, item := range l.List { - // If we have a lead comment, then we want to write that first - leadComment := false - if lit, ok := item.(*ast.LiteralType); ok && lit.LeadComment != nil { - leadComment = true - - // Ensure an empty line before every element with a - // lead comment (except the first item in a list). - if !haveEmptyLine && i != 0 { - buf.WriteByte(newline) - } - - for _, comment := range lit.LeadComment.List { - buf.Write(p.indent([]byte(comment.Text))) - buf.WriteByte(newline) - } - } - - // also indent each line - val := p.output(item) - curLen := len(val) - buf.Write(p.indent(val)) - - // if this item is a heredoc, then we output the comma on - // the next line. This is the only case this happens. - comma := []byte{','} - if lit, ok := item.(*ast.LiteralType); ok && lit.Token.Type == token.HEREDOC { - buf.WriteByte(newline) - comma = p.indent(comma) - } - - buf.Write(comma) - - if lit, ok := item.(*ast.LiteralType); ok && lit.LineComment != nil { - // if the next item doesn't have any comments, do not align - buf.WriteByte(blank) // align one space - for i := 0; i < longestLine-curLen; i++ { - buf.WriteByte(blank) - } - - for _, comment := range lit.LineComment.List { - buf.WriteString(comment.Text) - } - } - - buf.WriteByte(newline) - - // Ensure an empty line after every element with a - // lead comment (except the first item in a list). - haveEmptyLine = leadComment && i != len(l.List)-1 - if haveEmptyLine { - buf.WriteByte(newline) - } - } - - buf.WriteString("]") - return buf.Bytes() -} - -// isSingleLineList returns true if: -// * they were previously formatted entirely on one line -// * they consist entirely of literals -// * there are either no heredoc strings or the list has exactly one element -// * there are no line comments -func (printer) isSingleLineList(l *ast.ListType) bool { - for _, item := range l.List { - if item.Pos().Line != l.Lbrack.Line { - return false - } - - lit, ok := item.(*ast.LiteralType) - if !ok { - return false - } - - if lit.Token.Type == token.HEREDOC && len(l.List) != 1 { - return false - } - - if lit.LineComment != nil { - return false - } - } - - return true -} - -// singleLineList prints a simple single line list. -// For a definition of "simple", see isSingleLineList above. -func (p *printer) singleLineList(l *ast.ListType) []byte { - buf := &bytes.Buffer{} - - buf.WriteString("[") - for i, item := range l.List { - if i != 0 { - buf.WriteString(", ") - } - - // Output the item itself - buf.Write(p.output(item)) - - // The heredoc marker needs to be at the end of line. - if lit, ok := item.(*ast.LiteralType); ok && lit.Token.Type == token.HEREDOC { - buf.WriteByte(newline) - } - } - - buf.WriteString("]") - return buf.Bytes() -} - -// indent indents the lines of the given buffer for each non-empty line -func (p *printer) indent(buf []byte) []byte { - var prefix []byte - if p.cfg.SpacesWidth != 0 { - for i := 0; i < p.cfg.SpacesWidth; i++ { - prefix = append(prefix, blank) - } - } else { - prefix = []byte{tab} - } - - var res []byte - bol := true - for _, c := range buf { - if bol && c != '\n' { - res = append(res, prefix...) - } - - res = append(res, c) - bol = c == '\n' - } - return res -} - -// unindent removes all the indentation from the tombstoned lines -func (p *printer) unindent(buf []byte) []byte { - var res []byte - for i := 0; i < len(buf); i++ { - skip := len(buf)-i <= len(unindent) - if !skip { - skip = !bytes.Equal(unindent, buf[i:i+len(unindent)]) - } - if skip { - res = append(res, buf[i]) - continue - } - - // We have a marker. we have to backtrace here and clean out - // any whitespace ahead of our tombstone up to a \n - for j := len(res) - 1; j >= 0; j-- { - if res[j] == '\n' { - break - } - - res = res[:j] - } - - // Skip the entire unindent marker - i += len(unindent) - 1 - } - - return res -} - -// heredocIndent marks all the 2nd and further lines as unindentable -func (p *printer) heredocIndent(buf []byte) []byte { - var res []byte - bol := false - for _, c := range buf { - if bol && c != '\n' { - res = append(res, unindent...) - } - res = append(res, c) - bol = c == '\n' - } - return res -} - -// isSingleLineObject tells whether the given object item is a single -// line object such as "obj {}". -// -// A single line object: -// -// * has no lead comments (hence multi-line) -// * has no assignment -// * has no values in the stanza (within {}) -// -func (p *printer) isSingleLineObject(val *ast.ObjectItem) bool { - // If there is a lead comment, can't be one line - if val.LeadComment != nil { - return false - } - - // If there is assignment, we always break by line - if val.Assign.IsValid() { - return false - } - - // If it isn't an object type, then its not a single line object - ot, ok := val.Val.(*ast.ObjectType) - if !ok { - return false - } - - // If the object has no items, it is single line! - return len(ot.List.Items) == 0 -} - -func lines(txt string) int { - endline := 1 - for i := 0; i < len(txt); i++ { - if txt[i] == '\n' { - endline++ - } - } - return endline -} - -// ---------------------------------------------------------------------------- -// Tracing support - -func (p *printer) printTrace(a ...interface{}) { - if !p.enableTrace { - return - } - - const dots = ". . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . " - const n = len(dots) - i := 2 * p.indentTrace - for i > n { - fmt.Print(dots) - i -= n - } - // i <= n - fmt.Print(dots[0:i]) - fmt.Println(a...) -} - -func trace(p *printer, msg string) *printer { - p.printTrace(msg, "(") - p.indentTrace++ - return p -} - -// Usage pattern: defer un(trace(p, "...")) -func un(p *printer) { - p.indentTrace-- - p.printTrace(")") -} diff --git a/vendor/github.com/hashicorp/hcl/hcl/printer/printer.go b/vendor/github.com/hashicorp/hcl/hcl/printer/printer.go deleted file mode 100644 index 6617ab8e7..000000000 --- a/vendor/github.com/hashicorp/hcl/hcl/printer/printer.go +++ /dev/null @@ -1,66 +0,0 @@ -// Package printer implements printing of AST nodes to HCL format. -package printer - -import ( - "bytes" - "io" - "text/tabwriter" - - "github.com/hashicorp/hcl/hcl/ast" - "github.com/hashicorp/hcl/hcl/parser" -) - -var DefaultConfig = Config{ - SpacesWidth: 2, -} - -// A Config node controls the output of Fprint. -type Config struct { - SpacesWidth int // if set, it will use spaces instead of tabs for alignment -} - -func (c *Config) Fprint(output io.Writer, node ast.Node) error { - p := &printer{ - cfg: *c, - comments: make([]*ast.CommentGroup, 0), - standaloneComments: make([]*ast.CommentGroup, 0), - // enableTrace: true, - } - - p.collectComments(node) - - if _, err := output.Write(p.unindent(p.output(node))); err != nil { - return err - } - - // flush tabwriter, if any - var err error - if tw, _ := output.(*tabwriter.Writer); tw != nil { - err = tw.Flush() - } - - return err -} - -// Fprint "pretty-prints" an HCL node to output -// It calls Config.Fprint with default settings. -func Fprint(output io.Writer, node ast.Node) error { - return DefaultConfig.Fprint(output, node) -} - -// Format formats src HCL and returns the result. -func Format(src []byte) ([]byte, error) { - node, err := parser.Parse(src) - if err != nil { - return nil, err - } - - var buf bytes.Buffer - if err := DefaultConfig.Fprint(&buf, node); err != nil { - return nil, err - } - - // Add trailing newline to result - buf.WriteString("\n") - return buf.Bytes(), nil -} diff --git a/vendor/github.com/hashicorp/hcl/hcl/scanner/scanner.go b/vendor/github.com/hashicorp/hcl/hcl/scanner/scanner.go deleted file mode 100644 index 624a18fe3..000000000 --- a/vendor/github.com/hashicorp/hcl/hcl/scanner/scanner.go +++ /dev/null @@ -1,652 +0,0 @@ -// Package scanner implements a scanner for HCL (HashiCorp Configuration -// Language) source text. -package scanner - -import ( - "bytes" - "fmt" - "os" - "regexp" - "unicode" - "unicode/utf8" - - "github.com/hashicorp/hcl/hcl/token" -) - -// eof represents a marker rune for the end of the reader. -const eof = rune(0) - -// Scanner defines a lexical scanner -type Scanner struct { - buf *bytes.Buffer // Source buffer for advancing and scanning - src []byte // Source buffer for immutable access - - // Source Position - srcPos token.Pos // current position - prevPos token.Pos // previous position, used for peek() method - - lastCharLen int // length of last character in bytes - lastLineLen int // length of last line in characters (for correct column reporting) - - tokStart int // token text start position - tokEnd int // token text end position - - // Error is called for each error encountered. If no Error - // function is set, the error is reported to os.Stderr. - Error func(pos token.Pos, msg string) - - // ErrorCount is incremented by one for each error encountered. - ErrorCount int - - // tokPos is the start position of most recently scanned token; set by - // Scan. The Filename field is always left untouched by the Scanner. If - // an error is reported (via Error) and Position is invalid, the scanner is - // not inside a token. - tokPos token.Pos -} - -// New creates and initializes a new instance of Scanner using src as -// its source content. -func New(src []byte) *Scanner { - // even though we accept a src, we read from a io.Reader compatible type - // (*bytes.Buffer). So in the future we might easily change it to streaming - // read. - b := bytes.NewBuffer(src) - s := &Scanner{ - buf: b, - src: src, - } - - // srcPosition always starts with 1 - s.srcPos.Line = 1 - return s -} - -// next reads the next rune from the bufferred reader. Returns the rune(0) if -// an error occurs (or io.EOF is returned). -func (s *Scanner) next() rune { - ch, size, err := s.buf.ReadRune() - if err != nil { - // advance for error reporting - s.srcPos.Column++ - s.srcPos.Offset += size - s.lastCharLen = size - return eof - } - - // remember last position - s.prevPos = s.srcPos - - s.srcPos.Column++ - s.lastCharLen = size - s.srcPos.Offset += size - - if ch == utf8.RuneError && size == 1 { - s.err("illegal UTF-8 encoding") - return ch - } - - if ch == '\n' { - s.srcPos.Line++ - s.lastLineLen = s.srcPos.Column - s.srcPos.Column = 0 - } - - if ch == '\x00' { - s.err("unexpected null character (0x00)") - return eof - } - - if ch == '\uE123' { - s.err("unicode code point U+E123 reserved for internal use") - return utf8.RuneError - } - - // debug - // fmt.Printf("ch: %q, offset:column: %d:%d\n", ch, s.srcPos.Offset, s.srcPos.Column) - return ch -} - -// unread unreads the previous read Rune and updates the source position -func (s *Scanner) unread() { - if err := s.buf.UnreadRune(); err != nil { - panic(err) // this is user fault, we should catch it - } - s.srcPos = s.prevPos // put back last position -} - -// peek returns the next rune without advancing the reader. -func (s *Scanner) peek() rune { - peek, _, err := s.buf.ReadRune() - if err != nil { - return eof - } - - s.buf.UnreadRune() - return peek -} - -// Scan scans the next token and returns the token. -func (s *Scanner) Scan() token.Token { - ch := s.next() - - // skip white space - for isWhitespace(ch) { - ch = s.next() - } - - var tok token.Type - - // token text markings - s.tokStart = s.srcPos.Offset - s.lastCharLen - - // token position, initial next() is moving the offset by one(size of rune - // actually), though we are interested with the starting point - s.tokPos.Offset = s.srcPos.Offset - s.lastCharLen - if s.srcPos.Column > 0 { - // common case: last character was not a '\n' - s.tokPos.Line = s.srcPos.Line - s.tokPos.Column = s.srcPos.Column - } else { - // last character was a '\n' - // (we cannot be at the beginning of the source - // since we have called next() at least once) - s.tokPos.Line = s.srcPos.Line - 1 - s.tokPos.Column = s.lastLineLen - } - - switch { - case isLetter(ch): - tok = token.IDENT - lit := s.scanIdentifier() - if lit == "true" || lit == "false" { - tok = token.BOOL - } - case isDecimal(ch): - tok = s.scanNumber(ch) - default: - switch ch { - case eof: - tok = token.EOF - case '"': - tok = token.STRING - s.scanString() - case '#', '/': - tok = token.COMMENT - s.scanComment(ch) - case '.': - tok = token.PERIOD - ch = s.peek() - if isDecimal(ch) { - tok = token.FLOAT - ch = s.scanMantissa(ch) - ch = s.scanExponent(ch) - } - case '<': - tok = token.HEREDOC - s.scanHeredoc() - case '[': - tok = token.LBRACK - case ']': - tok = token.RBRACK - case '{': - tok = token.LBRACE - case '}': - tok = token.RBRACE - case ',': - tok = token.COMMA - case '=': - tok = token.ASSIGN - case '+': - tok = token.ADD - case '-': - if isDecimal(s.peek()) { - ch := s.next() - tok = s.scanNumber(ch) - } else { - tok = token.SUB - } - default: - s.err("illegal char") - } - } - - // finish token ending - s.tokEnd = s.srcPos.Offset - - // create token literal - var tokenText string - if s.tokStart >= 0 { - tokenText = string(s.src[s.tokStart:s.tokEnd]) - } - s.tokStart = s.tokEnd // ensure idempotency of tokenText() call - - return token.Token{ - Type: tok, - Pos: s.tokPos, - Text: tokenText, - } -} - -func (s *Scanner) scanComment(ch rune) { - // single line comments - if ch == '#' || (ch == '/' && s.peek() != '*') { - if ch == '/' && s.peek() != '/' { - s.err("expected '/' for comment") - return - } - - ch = s.next() - for ch != '\n' && ch >= 0 && ch != eof { - ch = s.next() - } - if ch != eof && ch >= 0 { - s.unread() - } - return - } - - // be sure we get the character after /* This allows us to find comment's - // that are not erminated - if ch == '/' { - s.next() - ch = s.next() // read character after "/*" - } - - // look for /* - style comments - for { - if ch < 0 || ch == eof { - s.err("comment not terminated") - break - } - - ch0 := ch - ch = s.next() - if ch0 == '*' && ch == '/' { - break - } - } -} - -// scanNumber scans a HCL number definition starting with the given rune -func (s *Scanner) scanNumber(ch rune) token.Type { - if ch == '0' { - // check for hexadecimal, octal or float - ch = s.next() - if ch == 'x' || ch == 'X' { - // hexadecimal - ch = s.next() - found := false - for isHexadecimal(ch) { - ch = s.next() - found = true - } - - if !found { - s.err("illegal hexadecimal number") - } - - if ch != eof { - s.unread() - } - - return token.NUMBER - } - - // now it's either something like: 0421(octal) or 0.1231(float) - illegalOctal := false - for isDecimal(ch) { - ch = s.next() - if ch == '8' || ch == '9' { - // this is just a possibility. For example 0159 is illegal, but - // 0159.23 is valid. So we mark a possible illegal octal. If - // the next character is not a period, we'll print the error. - illegalOctal = true - } - } - - if ch == 'e' || ch == 'E' { - ch = s.scanExponent(ch) - return token.FLOAT - } - - if ch == '.' { - ch = s.scanFraction(ch) - - if ch == 'e' || ch == 'E' { - ch = s.next() - ch = s.scanExponent(ch) - } - return token.FLOAT - } - - if illegalOctal { - s.err("illegal octal number") - } - - if ch != eof { - s.unread() - } - return token.NUMBER - } - - s.scanMantissa(ch) - ch = s.next() // seek forward - if ch == 'e' || ch == 'E' { - ch = s.scanExponent(ch) - return token.FLOAT - } - - if ch == '.' { - ch = s.scanFraction(ch) - if ch == 'e' || ch == 'E' { - ch = s.next() - ch = s.scanExponent(ch) - } - return token.FLOAT - } - - if ch != eof { - s.unread() - } - return token.NUMBER -} - -// scanMantissa scans the mantissa beginning from the rune. It returns the next -// non decimal rune. It's used to determine wheter it's a fraction or exponent. -func (s *Scanner) scanMantissa(ch rune) rune { - scanned := false - for isDecimal(ch) { - ch = s.next() - scanned = true - } - - if scanned && ch != eof { - s.unread() - } - return ch -} - -// scanFraction scans the fraction after the '.' rune -func (s *Scanner) scanFraction(ch rune) rune { - if ch == '.' { - ch = s.peek() // we peek just to see if we can move forward - ch = s.scanMantissa(ch) - } - return ch -} - -// scanExponent scans the remaining parts of an exponent after the 'e' or 'E' -// rune. -func (s *Scanner) scanExponent(ch rune) rune { - if ch == 'e' || ch == 'E' { - ch = s.next() - if ch == '-' || ch == '+' { - ch = s.next() - } - ch = s.scanMantissa(ch) - } - return ch -} - -// scanHeredoc scans a heredoc string -func (s *Scanner) scanHeredoc() { - // Scan the second '<' in example: '<= len(identBytes) && identRegexp.Match(s.src[lineStart:s.srcPos.Offset-s.lastCharLen]) { - break - } - - // Not an anchor match, record the start of a new line - lineStart = s.srcPos.Offset - } - - if ch == eof { - s.err("heredoc not terminated") - return - } - } - - return -} - -// scanString scans a quoted string -func (s *Scanner) scanString() { - braces := 0 - for { - // '"' opening already consumed - // read character after quote - ch := s.next() - - if (ch == '\n' && braces == 0) || ch < 0 || ch == eof { - s.err("literal not terminated") - return - } - - if ch == '"' && braces == 0 { - break - } - - // If we're going into a ${} then we can ignore quotes for awhile - if braces == 0 && ch == '$' && s.peek() == '{' { - braces++ - s.next() - } else if braces > 0 && ch == '{' { - braces++ - } - if braces > 0 && ch == '}' { - braces-- - } - - if ch == '\\' { - s.scanEscape() - } - } - - return -} - -// scanEscape scans an escape sequence -func (s *Scanner) scanEscape() rune { - // http://en.cppreference.com/w/cpp/language/escape - ch := s.next() // read character after '/' - switch ch { - case 'a', 'b', 'f', 'n', 'r', 't', 'v', '\\', '"': - // nothing to do - case '0', '1', '2', '3', '4', '5', '6', '7': - // octal notation - ch = s.scanDigits(ch, 8, 3) - case 'x': - // hexademical notation - ch = s.scanDigits(s.next(), 16, 2) - case 'u': - // universal character name - ch = s.scanDigits(s.next(), 16, 4) - case 'U': - // universal character name - ch = s.scanDigits(s.next(), 16, 8) - default: - s.err("illegal char escape") - } - return ch -} - -// scanDigits scans a rune with the given base for n times. For example an -// octal notation \184 would yield in scanDigits(ch, 8, 3) -func (s *Scanner) scanDigits(ch rune, base, n int) rune { - start := n - for n > 0 && digitVal(ch) < base { - ch = s.next() - if ch == eof { - // If we see an EOF, we halt any more scanning of digits - // immediately. - break - } - - n-- - } - if n > 0 { - s.err("illegal char escape") - } - - if n != start && ch != eof { - // we scanned all digits, put the last non digit char back, - // only if we read anything at all - s.unread() - } - - return ch -} - -// scanIdentifier scans an identifier and returns the literal string -func (s *Scanner) scanIdentifier() string { - offs := s.srcPos.Offset - s.lastCharLen - ch := s.next() - for isLetter(ch) || isDigit(ch) || ch == '-' || ch == '.' { - ch = s.next() - } - - if ch != eof { - s.unread() // we got identifier, put back latest char - } - - return string(s.src[offs:s.srcPos.Offset]) -} - -// recentPosition returns the position of the character immediately after the -// character or token returned by the last call to Scan. -func (s *Scanner) recentPosition() (pos token.Pos) { - pos.Offset = s.srcPos.Offset - s.lastCharLen - switch { - case s.srcPos.Column > 0: - // common case: last character was not a '\n' - pos.Line = s.srcPos.Line - pos.Column = s.srcPos.Column - case s.lastLineLen > 0: - // last character was a '\n' - // (we cannot be at the beginning of the source - // since we have called next() at least once) - pos.Line = s.srcPos.Line - 1 - pos.Column = s.lastLineLen - default: - // at the beginning of the source - pos.Line = 1 - pos.Column = 1 - } - return -} - -// err prints the error of any scanning to s.Error function. If the function is -// not defined, by default it prints them to os.Stderr -func (s *Scanner) err(msg string) { - s.ErrorCount++ - pos := s.recentPosition() - - if s.Error != nil { - s.Error(pos, msg) - return - } - - fmt.Fprintf(os.Stderr, "%s: %s\n", pos, msg) -} - -// isHexadecimal returns true if the given rune is a letter -func isLetter(ch rune) bool { - return 'a' <= ch && ch <= 'z' || 'A' <= ch && ch <= 'Z' || ch == '_' || ch >= 0x80 && unicode.IsLetter(ch) -} - -// isDigit returns true if the given rune is a decimal digit -func isDigit(ch rune) bool { - return '0' <= ch && ch <= '9' || ch >= 0x80 && unicode.IsDigit(ch) -} - -// isDecimal returns true if the given rune is a decimal number -func isDecimal(ch rune) bool { - return '0' <= ch && ch <= '9' -} - -// isHexadecimal returns true if the given rune is an hexadecimal number -func isHexadecimal(ch rune) bool { - return '0' <= ch && ch <= '9' || 'a' <= ch && ch <= 'f' || 'A' <= ch && ch <= 'F' -} - -// isWhitespace returns true if the rune is a space, tab, newline or carriage return -func isWhitespace(ch rune) bool { - return ch == ' ' || ch == '\t' || ch == '\n' || ch == '\r' -} - -// digitVal returns the integer value of a given octal,decimal or hexadecimal rune -func digitVal(ch rune) int { - switch { - case '0' <= ch && ch <= '9': - return int(ch - '0') - case 'a' <= ch && ch <= 'f': - return int(ch - 'a' + 10) - case 'A' <= ch && ch <= 'F': - return int(ch - 'A' + 10) - } - return 16 // larger than any legal digit val -} diff --git a/vendor/github.com/hashicorp/hcl/hcl/strconv/quote.go b/vendor/github.com/hashicorp/hcl/hcl/strconv/quote.go deleted file mode 100644 index 5f981eaa2..000000000 --- a/vendor/github.com/hashicorp/hcl/hcl/strconv/quote.go +++ /dev/null @@ -1,241 +0,0 @@ -package strconv - -import ( - "errors" - "unicode/utf8" -) - -// ErrSyntax indicates that a value does not have the right syntax for the target type. -var ErrSyntax = errors.New("invalid syntax") - -// Unquote interprets s as a single-quoted, double-quoted, -// or backquoted Go string literal, returning the string value -// that s quotes. (If s is single-quoted, it would be a Go -// character literal; Unquote returns the corresponding -// one-character string.) -func Unquote(s string) (t string, err error) { - n := len(s) - if n < 2 { - return "", ErrSyntax - } - quote := s[0] - if quote != s[n-1] { - return "", ErrSyntax - } - s = s[1 : n-1] - - if quote != '"' { - return "", ErrSyntax - } - if !contains(s, '$') && !contains(s, '{') && contains(s, '\n') { - return "", ErrSyntax - } - - // Is it trivial? Avoid allocation. - if !contains(s, '\\') && !contains(s, quote) && !contains(s, '$') { - switch quote { - case '"': - return s, nil - case '\'': - r, size := utf8.DecodeRuneInString(s) - if size == len(s) && (r != utf8.RuneError || size != 1) { - return s, nil - } - } - } - - var runeTmp [utf8.UTFMax]byte - buf := make([]byte, 0, 3*len(s)/2) // Try to avoid more allocations. - for len(s) > 0 { - // If we're starting a '${}' then let it through un-unquoted. - // Specifically: we don't unquote any characters within the `${}` - // section. - if s[0] == '$' && len(s) > 1 && s[1] == '{' { - buf = append(buf, '$', '{') - s = s[2:] - - // Continue reading until we find the closing brace, copying as-is - braces := 1 - for len(s) > 0 && braces > 0 { - r, size := utf8.DecodeRuneInString(s) - if r == utf8.RuneError { - return "", ErrSyntax - } - - s = s[size:] - - n := utf8.EncodeRune(runeTmp[:], r) - buf = append(buf, runeTmp[:n]...) - - switch r { - case '{': - braces++ - case '}': - braces-- - } - } - if braces != 0 { - return "", ErrSyntax - } - if len(s) == 0 { - // If there's no string left, we're done! - break - } else { - // If there's more left, we need to pop back up to the top of the loop - // in case there's another interpolation in this string. - continue - } - } - - if s[0] == '\n' { - return "", ErrSyntax - } - - c, multibyte, ss, err := unquoteChar(s, quote) - if err != nil { - return "", err - } - s = ss - if c < utf8.RuneSelf || !multibyte { - buf = append(buf, byte(c)) - } else { - n := utf8.EncodeRune(runeTmp[:], c) - buf = append(buf, runeTmp[:n]...) - } - if quote == '\'' && len(s) != 0 { - // single-quoted must be single character - return "", ErrSyntax - } - } - return string(buf), nil -} - -// contains reports whether the string contains the byte c. -func contains(s string, c byte) bool { - for i := 0; i < len(s); i++ { - if s[i] == c { - return true - } - } - return false -} - -func unhex(b byte) (v rune, ok bool) { - c := rune(b) - switch { - case '0' <= c && c <= '9': - return c - '0', true - case 'a' <= c && c <= 'f': - return c - 'a' + 10, true - case 'A' <= c && c <= 'F': - return c - 'A' + 10, true - } - return -} - -func unquoteChar(s string, quote byte) (value rune, multibyte bool, tail string, err error) { - // easy cases - switch c := s[0]; { - case c == quote && (quote == '\'' || quote == '"'): - err = ErrSyntax - return - case c >= utf8.RuneSelf: - r, size := utf8.DecodeRuneInString(s) - return r, true, s[size:], nil - case c != '\\': - return rune(s[0]), false, s[1:], nil - } - - // hard case: c is backslash - if len(s) <= 1 { - err = ErrSyntax - return - } - c := s[1] - s = s[2:] - - switch c { - case 'a': - value = '\a' - case 'b': - value = '\b' - case 'f': - value = '\f' - case 'n': - value = '\n' - case 'r': - value = '\r' - case 't': - value = '\t' - case 'v': - value = '\v' - case 'x', 'u', 'U': - n := 0 - switch c { - case 'x': - n = 2 - case 'u': - n = 4 - case 'U': - n = 8 - } - var v rune - if len(s) < n { - err = ErrSyntax - return - } - for j := 0; j < n; j++ { - x, ok := unhex(s[j]) - if !ok { - err = ErrSyntax - return - } - v = v<<4 | x - } - s = s[n:] - if c == 'x' { - // single-byte string, possibly not UTF-8 - value = v - break - } - if v > utf8.MaxRune { - err = ErrSyntax - return - } - value = v - multibyte = true - case '0', '1', '2', '3', '4', '5', '6', '7': - v := rune(c) - '0' - if len(s) < 2 { - err = ErrSyntax - return - } - for j := 0; j < 2; j++ { // one digit already; two more - x := rune(s[j]) - '0' - if x < 0 || x > 7 { - err = ErrSyntax - return - } - v = (v << 3) | x - } - s = s[2:] - if v > 255 { - err = ErrSyntax - return - } - value = v - case '\\': - value = '\\' - case '\'', '"': - if c != quote { - err = ErrSyntax - return - } - value = rune(c) - default: - err = ErrSyntax - return - } - tail = s - return -} diff --git a/vendor/github.com/hashicorp/hcl/hcl/token/position.go b/vendor/github.com/hashicorp/hcl/hcl/token/position.go deleted file mode 100644 index 59c1bb72d..000000000 --- a/vendor/github.com/hashicorp/hcl/hcl/token/position.go +++ /dev/null @@ -1,46 +0,0 @@ -package token - -import "fmt" - -// Pos describes an arbitrary source position -// including the file, line, and column location. -// A Position is valid if the line number is > 0. -type Pos struct { - Filename string // filename, if any - Offset int // offset, starting at 0 - Line int // line number, starting at 1 - Column int // column number, starting at 1 (character count) -} - -// IsValid returns true if the position is valid. -func (p *Pos) IsValid() bool { return p.Line > 0 } - -// String returns a string in one of several forms: -// -// file:line:column valid position with file name -// line:column valid position without file name -// file invalid position with file name -// - invalid position without file name -func (p Pos) String() string { - s := p.Filename - if p.IsValid() { - if s != "" { - s += ":" - } - s += fmt.Sprintf("%d:%d", p.Line, p.Column) - } - if s == "" { - s = "-" - } - return s -} - -// Before reports whether the position p is before u. -func (p Pos) Before(u Pos) bool { - return u.Offset > p.Offset || u.Line > p.Line -} - -// After reports whether the position p is after u. -func (p Pos) After(u Pos) bool { - return u.Offset < p.Offset || u.Line < p.Line -} diff --git a/vendor/github.com/hashicorp/hcl/hcl/token/token.go b/vendor/github.com/hashicorp/hcl/hcl/token/token.go deleted file mode 100644 index e37c0664e..000000000 --- a/vendor/github.com/hashicorp/hcl/hcl/token/token.go +++ /dev/null @@ -1,219 +0,0 @@ -// Package token defines constants representing the lexical tokens for HCL -// (HashiCorp Configuration Language) -package token - -import ( - "fmt" - "strconv" - "strings" - - hclstrconv "github.com/hashicorp/hcl/hcl/strconv" -) - -// Token defines a single HCL token which can be obtained via the Scanner -type Token struct { - Type Type - Pos Pos - Text string - JSON bool -} - -// Type is the set of lexical tokens of the HCL (HashiCorp Configuration Language) -type Type int - -const ( - // Special tokens - ILLEGAL Type = iota - EOF - COMMENT - - identifier_beg - IDENT // literals - literal_beg - NUMBER // 12345 - FLOAT // 123.45 - BOOL // true,false - STRING // "abc" - HEREDOC // < 0 { - // Pop the current item - n := len(frontier) - item := frontier[n-1] - frontier = frontier[:n-1] - - switch v := item.Val.(type) { - case *ast.ObjectType: - items, frontier = flattenObjectType(v, item, items, frontier) - case *ast.ListType: - items, frontier = flattenListType(v, item, items, frontier) - default: - items = append(items, item) - } - } - - // Reverse the list since the frontier model runs things backwards - for i := len(items)/2 - 1; i >= 0; i-- { - opp := len(items) - 1 - i - items[i], items[opp] = items[opp], items[i] - } - - // Done! Set the original items - list.Items = items - return n, true - }) -} - -func flattenListType( - ot *ast.ListType, - item *ast.ObjectItem, - items []*ast.ObjectItem, - frontier []*ast.ObjectItem) ([]*ast.ObjectItem, []*ast.ObjectItem) { - // If the list is empty, keep the original list - if len(ot.List) == 0 { - items = append(items, item) - return items, frontier - } - - // All the elements of this object must also be objects! - for _, subitem := range ot.List { - if _, ok := subitem.(*ast.ObjectType); !ok { - items = append(items, item) - return items, frontier - } - } - - // Great! We have a match go through all the items and flatten - for _, elem := range ot.List { - // Add it to the frontier so that we can recurse - frontier = append(frontier, &ast.ObjectItem{ - Keys: item.Keys, - Assign: item.Assign, - Val: elem, - LeadComment: item.LeadComment, - LineComment: item.LineComment, - }) - } - - return items, frontier -} - -func flattenObjectType( - ot *ast.ObjectType, - item *ast.ObjectItem, - items []*ast.ObjectItem, - frontier []*ast.ObjectItem) ([]*ast.ObjectItem, []*ast.ObjectItem) { - // If the list has no items we do not have to flatten anything - if ot.List.Items == nil { - items = append(items, item) - return items, frontier - } - - // All the elements of this object must also be objects! - for _, subitem := range ot.List.Items { - if _, ok := subitem.Val.(*ast.ObjectType); !ok { - items = append(items, item) - return items, frontier - } - } - - // Great! We have a match go through all the items and flatten - for _, subitem := range ot.List.Items { - // Copy the new key - keys := make([]*ast.ObjectKey, len(item.Keys)+len(subitem.Keys)) - copy(keys, item.Keys) - copy(keys[len(item.Keys):], subitem.Keys) - - // Add it to the frontier so that we can recurse - frontier = append(frontier, &ast.ObjectItem{ - Keys: keys, - Assign: item.Assign, - Val: subitem.Val, - LeadComment: item.LeadComment, - LineComment: item.LineComment, - }) - } - - return items, frontier -} diff --git a/vendor/github.com/hashicorp/hcl/json/parser/parser.go b/vendor/github.com/hashicorp/hcl/json/parser/parser.go deleted file mode 100644 index 125a5f072..000000000 --- a/vendor/github.com/hashicorp/hcl/json/parser/parser.go +++ /dev/null @@ -1,313 +0,0 @@ -package parser - -import ( - "errors" - "fmt" - - "github.com/hashicorp/hcl/hcl/ast" - hcltoken "github.com/hashicorp/hcl/hcl/token" - "github.com/hashicorp/hcl/json/scanner" - "github.com/hashicorp/hcl/json/token" -) - -type Parser struct { - sc *scanner.Scanner - - // Last read token - tok token.Token - commaPrev token.Token - - enableTrace bool - indent int - n int // buffer size (max = 1) -} - -func newParser(src []byte) *Parser { - return &Parser{ - sc: scanner.New(src), - } -} - -// Parse returns the fully parsed source and returns the abstract syntax tree. -func Parse(src []byte) (*ast.File, error) { - p := newParser(src) - return p.Parse() -} - -var errEofToken = errors.New("EOF token found") - -// Parse returns the fully parsed source and returns the abstract syntax tree. -func (p *Parser) Parse() (*ast.File, error) { - f := &ast.File{} - var err, scerr error - p.sc.Error = func(pos token.Pos, msg string) { - scerr = fmt.Errorf("%s: %s", pos, msg) - } - - // The root must be an object in JSON - object, err := p.object() - if scerr != nil { - return nil, scerr - } - if err != nil { - return nil, err - } - - // We make our final node an object list so it is more HCL compatible - f.Node = object.List - - // Flatten it, which finds patterns and turns them into more HCL-like - // AST trees. - flattenObjects(f.Node) - - return f, nil -} - -func (p *Parser) objectList() (*ast.ObjectList, error) { - defer un(trace(p, "ParseObjectList")) - node := &ast.ObjectList{} - - for { - n, err := p.objectItem() - if err == errEofToken { - break // we are finished - } - - // we don't return a nil node, because might want to use already - // collected items. - if err != nil { - return node, err - } - - node.Add(n) - - // Check for a followup comma. If it isn't a comma, then we're done - if tok := p.scan(); tok.Type != token.COMMA { - break - } - } - - return node, nil -} - -// objectItem parses a single object item -func (p *Parser) objectItem() (*ast.ObjectItem, error) { - defer un(trace(p, "ParseObjectItem")) - - keys, err := p.objectKey() - if err != nil { - return nil, err - } - - o := &ast.ObjectItem{ - Keys: keys, - } - - switch p.tok.Type { - case token.COLON: - pos := p.tok.Pos - o.Assign = hcltoken.Pos{ - Filename: pos.Filename, - Offset: pos.Offset, - Line: pos.Line, - Column: pos.Column, - } - - o.Val, err = p.objectValue() - if err != nil { - return nil, err - } - } - - return o, nil -} - -// objectKey parses an object key and returns a ObjectKey AST -func (p *Parser) objectKey() ([]*ast.ObjectKey, error) { - keyCount := 0 - keys := make([]*ast.ObjectKey, 0) - - for { - tok := p.scan() - switch tok.Type { - case token.EOF: - return nil, errEofToken - case token.STRING: - keyCount++ - keys = append(keys, &ast.ObjectKey{ - Token: p.tok.HCLToken(), - }) - case token.COLON: - // If we have a zero keycount it means that we never got - // an object key, i.e. `{ :`. This is a syntax error. - if keyCount == 0 { - return nil, fmt.Errorf("expected: STRING got: %s", p.tok.Type) - } - - // Done - return keys, nil - case token.ILLEGAL: - return nil, errors.New("illegal") - default: - return nil, fmt.Errorf("expected: STRING got: %s", p.tok.Type) - } - } -} - -// object parses any type of object, such as number, bool, string, object or -// list. -func (p *Parser) objectValue() (ast.Node, error) { - defer un(trace(p, "ParseObjectValue")) - tok := p.scan() - - switch tok.Type { - case token.NUMBER, token.FLOAT, token.BOOL, token.NULL, token.STRING: - return p.literalType() - case token.LBRACE: - return p.objectType() - case token.LBRACK: - return p.listType() - case token.EOF: - return nil, errEofToken - } - - return nil, fmt.Errorf("Expected object value, got unknown token: %+v", tok) -} - -// object parses any type of object, such as number, bool, string, object or -// list. -func (p *Parser) object() (*ast.ObjectType, error) { - defer un(trace(p, "ParseType")) - tok := p.scan() - - switch tok.Type { - case token.LBRACE: - return p.objectType() - case token.EOF: - return nil, errEofToken - } - - return nil, fmt.Errorf("Expected object, got unknown token: %+v", tok) -} - -// objectType parses an object type and returns a ObjectType AST -func (p *Parser) objectType() (*ast.ObjectType, error) { - defer un(trace(p, "ParseObjectType")) - - // we assume that the currently scanned token is a LBRACE - o := &ast.ObjectType{} - - l, err := p.objectList() - - // if we hit RBRACE, we are good to go (means we parsed all Items), if it's - // not a RBRACE, it's an syntax error and we just return it. - if err != nil && p.tok.Type != token.RBRACE { - return nil, err - } - - o.List = l - return o, nil -} - -// listType parses a list type and returns a ListType AST -func (p *Parser) listType() (*ast.ListType, error) { - defer un(trace(p, "ParseListType")) - - // we assume that the currently scanned token is a LBRACK - l := &ast.ListType{} - - for { - tok := p.scan() - switch tok.Type { - case token.NUMBER, token.FLOAT, token.STRING: - node, err := p.literalType() - if err != nil { - return nil, err - } - - l.Add(node) - case token.COMMA: - continue - case token.LBRACE: - node, err := p.objectType() - if err != nil { - return nil, err - } - - l.Add(node) - case token.BOOL: - // TODO(arslan) should we support? not supported by HCL yet - case token.LBRACK: - // TODO(arslan) should we support nested lists? Even though it's - // written in README of HCL, it's not a part of the grammar - // (not defined in parse.y) - case token.RBRACK: - // finished - return l, nil - default: - return nil, fmt.Errorf("unexpected token while parsing list: %s", tok.Type) - } - - } -} - -// literalType parses a literal type and returns a LiteralType AST -func (p *Parser) literalType() (*ast.LiteralType, error) { - defer un(trace(p, "ParseLiteral")) - - return &ast.LiteralType{ - Token: p.tok.HCLToken(), - }, nil -} - -// scan returns the next token from the underlying scanner. If a token has -// been unscanned then read that instead. -func (p *Parser) scan() token.Token { - // If we have a token on the buffer, then return it. - if p.n != 0 { - p.n = 0 - return p.tok - } - - p.tok = p.sc.Scan() - return p.tok -} - -// unscan pushes the previously read token back onto the buffer. -func (p *Parser) unscan() { - p.n = 1 -} - -// ---------------------------------------------------------------------------- -// Parsing support - -func (p *Parser) printTrace(a ...interface{}) { - if !p.enableTrace { - return - } - - const dots = ". . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . " - const n = len(dots) - fmt.Printf("%5d:%3d: ", p.tok.Pos.Line, p.tok.Pos.Column) - - i := 2 * p.indent - for i > n { - fmt.Print(dots) - i -= n - } - // i <= n - fmt.Print(dots[0:i]) - fmt.Println(a...) -} - -func trace(p *Parser, msg string) *Parser { - p.printTrace(msg, "(") - p.indent++ - return p -} - -// Usage pattern: defer un(trace(p, "...")) -func un(p *Parser) { - p.indent-- - p.printTrace(")") -} diff --git a/vendor/github.com/hashicorp/hcl/json/scanner/scanner.go b/vendor/github.com/hashicorp/hcl/json/scanner/scanner.go deleted file mode 100644 index fe3f0f095..000000000 --- a/vendor/github.com/hashicorp/hcl/json/scanner/scanner.go +++ /dev/null @@ -1,451 +0,0 @@ -package scanner - -import ( - "bytes" - "fmt" - "os" - "unicode" - "unicode/utf8" - - "github.com/hashicorp/hcl/json/token" -) - -// eof represents a marker rune for the end of the reader. -const eof = rune(0) - -// Scanner defines a lexical scanner -type Scanner struct { - buf *bytes.Buffer // Source buffer for advancing and scanning - src []byte // Source buffer for immutable access - - // Source Position - srcPos token.Pos // current position - prevPos token.Pos // previous position, used for peek() method - - lastCharLen int // length of last character in bytes - lastLineLen int // length of last line in characters (for correct column reporting) - - tokStart int // token text start position - tokEnd int // token text end position - - // Error is called for each error encountered. If no Error - // function is set, the error is reported to os.Stderr. - Error func(pos token.Pos, msg string) - - // ErrorCount is incremented by one for each error encountered. - ErrorCount int - - // tokPos is the start position of most recently scanned token; set by - // Scan. The Filename field is always left untouched by the Scanner. If - // an error is reported (via Error) and Position is invalid, the scanner is - // not inside a token. - tokPos token.Pos -} - -// New creates and initializes a new instance of Scanner using src as -// its source content. -func New(src []byte) *Scanner { - // even though we accept a src, we read from a io.Reader compatible type - // (*bytes.Buffer). So in the future we might easily change it to streaming - // read. - b := bytes.NewBuffer(src) - s := &Scanner{ - buf: b, - src: src, - } - - // srcPosition always starts with 1 - s.srcPos.Line = 1 - return s -} - -// next reads the next rune from the bufferred reader. Returns the rune(0) if -// an error occurs (or io.EOF is returned). -func (s *Scanner) next() rune { - ch, size, err := s.buf.ReadRune() - if err != nil { - // advance for error reporting - s.srcPos.Column++ - s.srcPos.Offset += size - s.lastCharLen = size - return eof - } - - if ch == utf8.RuneError && size == 1 { - s.srcPos.Column++ - s.srcPos.Offset += size - s.lastCharLen = size - s.err("illegal UTF-8 encoding") - return ch - } - - // remember last position - s.prevPos = s.srcPos - - s.srcPos.Column++ - s.lastCharLen = size - s.srcPos.Offset += size - - if ch == '\n' { - s.srcPos.Line++ - s.lastLineLen = s.srcPos.Column - s.srcPos.Column = 0 - } - - // debug - // fmt.Printf("ch: %q, offset:column: %d:%d\n", ch, s.srcPos.Offset, s.srcPos.Column) - return ch -} - -// unread unreads the previous read Rune and updates the source position -func (s *Scanner) unread() { - if err := s.buf.UnreadRune(); err != nil { - panic(err) // this is user fault, we should catch it - } - s.srcPos = s.prevPos // put back last position -} - -// peek returns the next rune without advancing the reader. -func (s *Scanner) peek() rune { - peek, _, err := s.buf.ReadRune() - if err != nil { - return eof - } - - s.buf.UnreadRune() - return peek -} - -// Scan scans the next token and returns the token. -func (s *Scanner) Scan() token.Token { - ch := s.next() - - // skip white space - for isWhitespace(ch) { - ch = s.next() - } - - var tok token.Type - - // token text markings - s.tokStart = s.srcPos.Offset - s.lastCharLen - - // token position, initial next() is moving the offset by one(size of rune - // actually), though we are interested with the starting point - s.tokPos.Offset = s.srcPos.Offset - s.lastCharLen - if s.srcPos.Column > 0 { - // common case: last character was not a '\n' - s.tokPos.Line = s.srcPos.Line - s.tokPos.Column = s.srcPos.Column - } else { - // last character was a '\n' - // (we cannot be at the beginning of the source - // since we have called next() at least once) - s.tokPos.Line = s.srcPos.Line - 1 - s.tokPos.Column = s.lastLineLen - } - - switch { - case isLetter(ch): - lit := s.scanIdentifier() - if lit == "true" || lit == "false" { - tok = token.BOOL - } else if lit == "null" { - tok = token.NULL - } else { - s.err("illegal char") - } - case isDecimal(ch): - tok = s.scanNumber(ch) - default: - switch ch { - case eof: - tok = token.EOF - case '"': - tok = token.STRING - s.scanString() - case '.': - tok = token.PERIOD - ch = s.peek() - if isDecimal(ch) { - tok = token.FLOAT - ch = s.scanMantissa(ch) - ch = s.scanExponent(ch) - } - case '[': - tok = token.LBRACK - case ']': - tok = token.RBRACK - case '{': - tok = token.LBRACE - case '}': - tok = token.RBRACE - case ',': - tok = token.COMMA - case ':': - tok = token.COLON - case '-': - if isDecimal(s.peek()) { - ch := s.next() - tok = s.scanNumber(ch) - } else { - s.err("illegal char") - } - default: - s.err("illegal char: " + string(ch)) - } - } - - // finish token ending - s.tokEnd = s.srcPos.Offset - - // create token literal - var tokenText string - if s.tokStart >= 0 { - tokenText = string(s.src[s.tokStart:s.tokEnd]) - } - s.tokStart = s.tokEnd // ensure idempotency of tokenText() call - - return token.Token{ - Type: tok, - Pos: s.tokPos, - Text: tokenText, - } -} - -// scanNumber scans a HCL number definition starting with the given rune -func (s *Scanner) scanNumber(ch rune) token.Type { - zero := ch == '0' - pos := s.srcPos - - s.scanMantissa(ch) - ch = s.next() // seek forward - if ch == 'e' || ch == 'E' { - ch = s.scanExponent(ch) - return token.FLOAT - } - - if ch == '.' { - ch = s.scanFraction(ch) - if ch == 'e' || ch == 'E' { - ch = s.next() - ch = s.scanExponent(ch) - } - return token.FLOAT - } - - if ch != eof { - s.unread() - } - - // If we have a larger number and this is zero, error - if zero && pos != s.srcPos { - s.err("numbers cannot start with 0") - } - - return token.NUMBER -} - -// scanMantissa scans the mantissa beginning from the rune. It returns the next -// non decimal rune. It's used to determine wheter it's a fraction or exponent. -func (s *Scanner) scanMantissa(ch rune) rune { - scanned := false - for isDecimal(ch) { - ch = s.next() - scanned = true - } - - if scanned && ch != eof { - s.unread() - } - return ch -} - -// scanFraction scans the fraction after the '.' rune -func (s *Scanner) scanFraction(ch rune) rune { - if ch == '.' { - ch = s.peek() // we peek just to see if we can move forward - ch = s.scanMantissa(ch) - } - return ch -} - -// scanExponent scans the remaining parts of an exponent after the 'e' or 'E' -// rune. -func (s *Scanner) scanExponent(ch rune) rune { - if ch == 'e' || ch == 'E' { - ch = s.next() - if ch == '-' || ch == '+' { - ch = s.next() - } - ch = s.scanMantissa(ch) - } - return ch -} - -// scanString scans a quoted string -func (s *Scanner) scanString() { - braces := 0 - for { - // '"' opening already consumed - // read character after quote - ch := s.next() - - if ch == '\n' || ch < 0 || ch == eof { - s.err("literal not terminated") - return - } - - if ch == '"' { - break - } - - // If we're going into a ${} then we can ignore quotes for awhile - if braces == 0 && ch == '$' && s.peek() == '{' { - braces++ - s.next() - } else if braces > 0 && ch == '{' { - braces++ - } - if braces > 0 && ch == '}' { - braces-- - } - - if ch == '\\' { - s.scanEscape() - } - } - - return -} - -// scanEscape scans an escape sequence -func (s *Scanner) scanEscape() rune { - // http://en.cppreference.com/w/cpp/language/escape - ch := s.next() // read character after '/' - switch ch { - case 'a', 'b', 'f', 'n', 'r', 't', 'v', '\\', '"': - // nothing to do - case '0', '1', '2', '3', '4', '5', '6', '7': - // octal notation - ch = s.scanDigits(ch, 8, 3) - case 'x': - // hexademical notation - ch = s.scanDigits(s.next(), 16, 2) - case 'u': - // universal character name - ch = s.scanDigits(s.next(), 16, 4) - case 'U': - // universal character name - ch = s.scanDigits(s.next(), 16, 8) - default: - s.err("illegal char escape") - } - return ch -} - -// scanDigits scans a rune with the given base for n times. For example an -// octal notation \184 would yield in scanDigits(ch, 8, 3) -func (s *Scanner) scanDigits(ch rune, base, n int) rune { - for n > 0 && digitVal(ch) < base { - ch = s.next() - n-- - } - if n > 0 { - s.err("illegal char escape") - } - - // we scanned all digits, put the last non digit char back - s.unread() - return ch -} - -// scanIdentifier scans an identifier and returns the literal string -func (s *Scanner) scanIdentifier() string { - offs := s.srcPos.Offset - s.lastCharLen - ch := s.next() - for isLetter(ch) || isDigit(ch) || ch == '-' { - ch = s.next() - } - - if ch != eof { - s.unread() // we got identifier, put back latest char - } - - return string(s.src[offs:s.srcPos.Offset]) -} - -// recentPosition returns the position of the character immediately after the -// character or token returned by the last call to Scan. -func (s *Scanner) recentPosition() (pos token.Pos) { - pos.Offset = s.srcPos.Offset - s.lastCharLen - switch { - case s.srcPos.Column > 0: - // common case: last character was not a '\n' - pos.Line = s.srcPos.Line - pos.Column = s.srcPos.Column - case s.lastLineLen > 0: - // last character was a '\n' - // (we cannot be at the beginning of the source - // since we have called next() at least once) - pos.Line = s.srcPos.Line - 1 - pos.Column = s.lastLineLen - default: - // at the beginning of the source - pos.Line = 1 - pos.Column = 1 - } - return -} - -// err prints the error of any scanning to s.Error function. If the function is -// not defined, by default it prints them to os.Stderr -func (s *Scanner) err(msg string) { - s.ErrorCount++ - pos := s.recentPosition() - - if s.Error != nil { - s.Error(pos, msg) - return - } - - fmt.Fprintf(os.Stderr, "%s: %s\n", pos, msg) -} - -// isHexadecimal returns true if the given rune is a letter -func isLetter(ch rune) bool { - return 'a' <= ch && ch <= 'z' || 'A' <= ch && ch <= 'Z' || ch == '_' || ch >= 0x80 && unicode.IsLetter(ch) -} - -// isHexadecimal returns true if the given rune is a decimal digit -func isDigit(ch rune) bool { - return '0' <= ch && ch <= '9' || ch >= 0x80 && unicode.IsDigit(ch) -} - -// isHexadecimal returns true if the given rune is a decimal number -func isDecimal(ch rune) bool { - return '0' <= ch && ch <= '9' -} - -// isHexadecimal returns true if the given rune is an hexadecimal number -func isHexadecimal(ch rune) bool { - return '0' <= ch && ch <= '9' || 'a' <= ch && ch <= 'f' || 'A' <= ch && ch <= 'F' -} - -// isWhitespace returns true if the rune is a space, tab, newline or carriage return -func isWhitespace(ch rune) bool { - return ch == ' ' || ch == '\t' || ch == '\n' || ch == '\r' -} - -// digitVal returns the integer value of a given octal,decimal or hexadecimal rune -func digitVal(ch rune) int { - switch { - case '0' <= ch && ch <= '9': - return int(ch - '0') - case 'a' <= ch && ch <= 'f': - return int(ch - 'a' + 10) - case 'A' <= ch && ch <= 'F': - return int(ch - 'A' + 10) - } - return 16 // larger than any legal digit val -} diff --git a/vendor/github.com/hashicorp/hcl/json/token/position.go b/vendor/github.com/hashicorp/hcl/json/token/position.go deleted file mode 100644 index 59c1bb72d..000000000 --- a/vendor/github.com/hashicorp/hcl/json/token/position.go +++ /dev/null @@ -1,46 +0,0 @@ -package token - -import "fmt" - -// Pos describes an arbitrary source position -// including the file, line, and column location. -// A Position is valid if the line number is > 0. -type Pos struct { - Filename string // filename, if any - Offset int // offset, starting at 0 - Line int // line number, starting at 1 - Column int // column number, starting at 1 (character count) -} - -// IsValid returns true if the position is valid. -func (p *Pos) IsValid() bool { return p.Line > 0 } - -// String returns a string in one of several forms: -// -// file:line:column valid position with file name -// line:column valid position without file name -// file invalid position with file name -// - invalid position without file name -func (p Pos) String() string { - s := p.Filename - if p.IsValid() { - if s != "" { - s += ":" - } - s += fmt.Sprintf("%d:%d", p.Line, p.Column) - } - if s == "" { - s = "-" - } - return s -} - -// Before reports whether the position p is before u. -func (p Pos) Before(u Pos) bool { - return u.Offset > p.Offset || u.Line > p.Line -} - -// After reports whether the position p is after u. -func (p Pos) After(u Pos) bool { - return u.Offset < p.Offset || u.Line < p.Line -} diff --git a/vendor/github.com/hashicorp/hcl/json/token/token.go b/vendor/github.com/hashicorp/hcl/json/token/token.go deleted file mode 100644 index 95a0c3eee..000000000 --- a/vendor/github.com/hashicorp/hcl/json/token/token.go +++ /dev/null @@ -1,118 +0,0 @@ -package token - -import ( - "fmt" - "strconv" - - hcltoken "github.com/hashicorp/hcl/hcl/token" -) - -// Token defines a single HCL token which can be obtained via the Scanner -type Token struct { - Type Type - Pos Pos - Text string -} - -// Type is the set of lexical tokens of the HCL (HashiCorp Configuration Language) -type Type int - -const ( - // Special tokens - ILLEGAL Type = iota - EOF - - identifier_beg - literal_beg - NUMBER // 12345 - FLOAT // 123.45 - BOOL // true,false - STRING // "abc" - NULL // null - literal_end - identifier_end - - operator_beg - LBRACK // [ - LBRACE // { - COMMA // , - PERIOD // . - COLON // : - - RBRACK // ] - RBRACE // } - - operator_end -) - -var tokens = [...]string{ - ILLEGAL: "ILLEGAL", - - EOF: "EOF", - - NUMBER: "NUMBER", - FLOAT: "FLOAT", - BOOL: "BOOL", - STRING: "STRING", - NULL: "NULL", - - LBRACK: "LBRACK", - LBRACE: "LBRACE", - COMMA: "COMMA", - PERIOD: "PERIOD", - COLON: "COLON", - - RBRACK: "RBRACK", - RBRACE: "RBRACE", -} - -// String returns the string corresponding to the token tok. -func (t Type) String() string { - s := "" - if 0 <= t && t < Type(len(tokens)) { - s = tokens[t] - } - if s == "" { - s = "token(" + strconv.Itoa(int(t)) + ")" - } - return s -} - -// IsIdentifier returns true for tokens corresponding to identifiers and basic -// type literals; it returns false otherwise. -func (t Type) IsIdentifier() bool { return identifier_beg < t && t < identifier_end } - -// IsLiteral returns true for tokens corresponding to basic type literals; it -// returns false otherwise. -func (t Type) IsLiteral() bool { return literal_beg < t && t < literal_end } - -// IsOperator returns true for tokens corresponding to operators and -// delimiters; it returns false otherwise. -func (t Type) IsOperator() bool { return operator_beg < t && t < operator_end } - -// String returns the token's literal text. Note that this is only -// applicable for certain token types, such as token.IDENT, -// token.STRING, etc.. -func (t Token) String() string { - return fmt.Sprintf("%s %s %s", t.Pos.String(), t.Type.String(), t.Text) -} - -// HCLToken converts this token to an HCL token. -// -// The token type must be a literal type or this will panic. -func (t Token) HCLToken() hcltoken.Token { - switch t.Type { - case BOOL: - return hcltoken.Token{Type: hcltoken.BOOL, Text: t.Text} - case FLOAT: - return hcltoken.Token{Type: hcltoken.FLOAT, Text: t.Text} - case NULL: - return hcltoken.Token{Type: hcltoken.STRING, Text: ""} - case NUMBER: - return hcltoken.Token{Type: hcltoken.NUMBER, Text: t.Text} - case STRING: - return hcltoken.Token{Type: hcltoken.STRING, Text: t.Text, JSON: true} - default: - panic(fmt.Sprintf("unimplemented HCLToken for type: %s", t.Type)) - } -} diff --git a/vendor/github.com/hashicorp/hcl/lex.go b/vendor/github.com/hashicorp/hcl/lex.go deleted file mode 100644 index d9993c292..000000000 --- a/vendor/github.com/hashicorp/hcl/lex.go +++ /dev/null @@ -1,38 +0,0 @@ -package hcl - -import ( - "unicode" - "unicode/utf8" -) - -type lexModeValue byte - -const ( - lexModeUnknown lexModeValue = iota - lexModeHcl - lexModeJson -) - -// lexMode returns whether we're going to be parsing in JSON -// mode or HCL mode. -func lexMode(v []byte) lexModeValue { - var ( - r rune - w int - offset int - ) - - for { - r, w = utf8.DecodeRune(v[offset:]) - offset += w - if unicode.IsSpace(r) { - continue - } - if r == '{' { - return lexModeJson - } - break - } - - return lexModeHcl -} diff --git a/vendor/github.com/hashicorp/hcl/parse.go b/vendor/github.com/hashicorp/hcl/parse.go deleted file mode 100644 index 1fca53c4c..000000000 --- a/vendor/github.com/hashicorp/hcl/parse.go +++ /dev/null @@ -1,39 +0,0 @@ -package hcl - -import ( - "fmt" - - "github.com/hashicorp/hcl/hcl/ast" - hclParser "github.com/hashicorp/hcl/hcl/parser" - jsonParser "github.com/hashicorp/hcl/json/parser" -) - -// ParseBytes accepts as input byte slice and returns ast tree. -// -// Input can be either JSON or HCL -func ParseBytes(in []byte) (*ast.File, error) { - return parse(in) -} - -// ParseString accepts input as a string and returns ast tree. -func ParseString(input string) (*ast.File, error) { - return parse([]byte(input)) -} - -func parse(in []byte) (*ast.File, error) { - switch lexMode(in) { - case lexModeHcl: - return hclParser.Parse(in) - case lexModeJson: - return jsonParser.Parse(in) - } - - return nil, fmt.Errorf("unknown config format") -} - -// Parse parses the given input and returns the root object. -// -// The input format can be either HCL or JSON. -func Parse(input string) (*ast.File, error) { - return parse([]byte(input)) -} diff --git a/vendor/github.com/hexops/gotextdiff/LICENSE b/vendor/github.com/hexops/gotextdiff/LICENSE deleted file mode 100644 index 6a66aea5e..000000000 --- a/vendor/github.com/hexops/gotextdiff/LICENSE +++ /dev/null @@ -1,27 +0,0 @@ -Copyright (c) 2009 The Go Authors. All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above -copyright notice, this list of conditions and the following disclaimer -in the documentation and/or other materials provided with the -distribution. - * Neither the name of Google Inc. nor the names of its -contributors may be used to endorse or promote products derived from -this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/hexops/gotextdiff/README.md b/vendor/github.com/hexops/gotextdiff/README.md deleted file mode 100644 index bfd49a0c9..000000000 --- a/vendor/github.com/hexops/gotextdiff/README.md +++ /dev/null @@ -1,54 +0,0 @@ -# gotextdiff - unified text diffing in Go Hexops logo - -This is a copy of the Go text diffing packages that [the official Go language server gopls uses internally](https://github.com/golang/tools/tree/master/internal/lsp/diff) to generate unified diffs. - -If you've previously tried to generate unified text diffs in Go (like the ones you see in Git and on GitHub), you may have found [github.com/sergi/go-diff](https://github.com/sergi/go-diff) which is a Go port of Neil Fraser's google-diff-match-patch code - however it [does not support unified diffs](https://github.com/sergi/go-diff/issues/57). - -This is arguably one of the best (and most maintained) unified text diffing packages in Go as of at least 2020. - -(All credit goes to [the Go authors](http://tip.golang.org/AUTHORS), I am merely re-publishing their work so others can use it.) - -## Example usage - -Import the packages: - -```Go -import ( - "github.com/hexops/gotextdiff" - "github.com/hexops/gotextdiff/myers" -) -``` - -Assuming you want to diff `a.txt` and `b.txt`, whose contents are stored in `aString` and `bString` then: - -```Go -edits := myers.ComputeEdits(span.URIFromPath("a.txt"), aString, bString) -diff := fmt.Sprint(gotextdiff.ToUnified("a.txt", "b.txt", aString, edits)) -``` - -`diff` will be a string like: - -```diff ---- a.txt -+++ b.txt -@@ -1,13 +1,28 @@ --foo -+bar -``` - -## API compatability - -We will publish a new major version anytime the API changes in a backwards-incompatible way. Because the upstream is not being developed with this being a public package in mind, API breakages may occur more often than in other Go packages (but you can always continue using the old version thanks to Go modules.) - -## Alternatives - -- [github.com/andreyvit/diff](https://github.com/andreyvit/diff): Quick'n'easy string diffing functions for Golang based on github.com/sergi/go-diff. -- [github.com/kylelemons/godebug/diff](https://github.com/kylelemons/godebug/tree/master/diff): implements a linewise diff algorithm ([inactive](https://github.com/kylelemons/godebug/issues/22#issuecomment-524573477)). - -## Contributing - -We will only accept changes made [upstream](https://github.com/golang/tools/tree/master/internal/lsp/diff), please send any contributions to the upstream instead! Compared to the upstream, only import paths will be modified (to be non-`internal` so they are importable.) The only thing we add here is this README. - -## License - -See https://github.com/golang/tools/blob/master/LICENSE diff --git a/vendor/github.com/hexops/gotextdiff/diff.go b/vendor/github.com/hexops/gotextdiff/diff.go deleted file mode 100644 index 53e499bc0..000000000 --- a/vendor/github.com/hexops/gotextdiff/diff.go +++ /dev/null @@ -1,159 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// package gotextdiff supports a pluggable diff algorithm. -package gotextdiff - -import ( - "sort" - "strings" - - "github.com/hexops/gotextdiff/span" -) - -// TextEdit represents a change to a section of a document. -// The text within the specified span should be replaced by the supplied new text. -type TextEdit struct { - Span span.Span - NewText string -} - -// ComputeEdits is the type for a function that produces a set of edits that -// convert from the before content to the after content. -type ComputeEdits func(uri span.URI, before, after string) []TextEdit - -// SortTextEdits attempts to order all edits by their starting points. -// The sort is stable so that edits with the same starting point will not -// be reordered. -func SortTextEdits(d []TextEdit) { - // Use a stable sort to maintain the order of edits inserted at the same position. - sort.SliceStable(d, func(i int, j int) bool { - return span.Compare(d[i].Span, d[j].Span) < 0 - }) -} - -// ApplyEdits applies the set of edits to the before and returns the resulting -// content. -// It may panic or produce garbage if the edits are not valid for the provided -// before content. -func ApplyEdits(before string, edits []TextEdit) string { - // Preconditions: - // - all of the edits apply to before - // - and all the spans for each TextEdit have the same URI - if len(edits) == 0 { - return before - } - _, edits, _ = prepareEdits(before, edits) - after := strings.Builder{} - last := 0 - for _, edit := range edits { - start := edit.Span.Start().Offset() - if start > last { - after.WriteString(before[last:start]) - last = start - } - after.WriteString(edit.NewText) - last = edit.Span.End().Offset() - } - if last < len(before) { - after.WriteString(before[last:]) - } - return after.String() -} - -// LineEdits takes a set of edits and expands and merges them as necessary -// to ensure that there are only full line edits left when it is done. -func LineEdits(before string, edits []TextEdit) []TextEdit { - if len(edits) == 0 { - return nil - } - c, edits, partial := prepareEdits(before, edits) - if partial { - edits = lineEdits(before, c, edits) - } - return edits -} - -// prepareEdits returns a sorted copy of the edits -func prepareEdits(before string, edits []TextEdit) (*span.TokenConverter, []TextEdit, bool) { - partial := false - c := span.NewContentConverter("", []byte(before)) - copied := make([]TextEdit, len(edits)) - for i, edit := range edits { - edit.Span, _ = edit.Span.WithAll(c) - copied[i] = edit - partial = partial || - edit.Span.Start().Offset() >= len(before) || - edit.Span.Start().Column() > 1 || edit.Span.End().Column() > 1 - } - SortTextEdits(copied) - return c, copied, partial -} - -// lineEdits rewrites the edits to always be full line edits -func lineEdits(before string, c *span.TokenConverter, edits []TextEdit) []TextEdit { - adjusted := make([]TextEdit, 0, len(edits)) - current := TextEdit{Span: span.Invalid} - for _, edit := range edits { - if current.Span.IsValid() && edit.Span.Start().Line() <= current.Span.End().Line() { - // overlaps with the current edit, need to combine - // first get the gap from the previous edit - gap := before[current.Span.End().Offset():edit.Span.Start().Offset()] - // now add the text of this edit - current.NewText += gap + edit.NewText - // and then adjust the end position - current.Span = span.New(current.Span.URI(), current.Span.Start(), edit.Span.End()) - } else { - // does not overlap, add previous run (if there is one) - adjusted = addEdit(before, adjusted, current) - // and then remember this edit as the start of the next run - current = edit - } - } - // add the current pending run if there is one - return addEdit(before, adjusted, current) -} - -func addEdit(before string, edits []TextEdit, edit TextEdit) []TextEdit { - if !edit.Span.IsValid() { - return edits - } - // if edit is partial, expand it to full line now - start := edit.Span.Start() - end := edit.Span.End() - if start.Column() > 1 { - // prepend the text and adjust to start of line - delta := start.Column() - 1 - start = span.NewPoint(start.Line(), 1, start.Offset()-delta) - edit.Span = span.New(edit.Span.URI(), start, end) - edit.NewText = before[start.Offset():start.Offset()+delta] + edit.NewText - } - if start.Offset() >= len(before) && start.Line() > 1 && before[len(before)-1] != '\n' { - // after end of file that does not end in eol, so join to last line of file - // to do this we need to know where the start of the last line was - eol := strings.LastIndex(before, "\n") - if eol < 0 { - // file is one non terminated line - eol = 0 - } - delta := len(before) - eol - start = span.NewPoint(start.Line()-1, 1, start.Offset()-delta) - edit.Span = span.New(edit.Span.URI(), start, end) - edit.NewText = before[start.Offset():start.Offset()+delta] + edit.NewText - } - if end.Column() > 1 { - remains := before[end.Offset():] - eol := strings.IndexRune(remains, '\n') - if eol < 0 { - eol = len(remains) - } else { - eol++ - } - end = span.NewPoint(end.Line()+1, 1, end.Offset()+eol) - edit.Span = span.New(edit.Span.URI(), start, end) - edit.NewText = edit.NewText + remains[:eol] - } - edits = append(edits, edit) - return edits -} diff --git a/vendor/github.com/hexops/gotextdiff/myers/diff.go b/vendor/github.com/hexops/gotextdiff/myers/diff.go deleted file mode 100644 index 5e3e92364..000000000 --- a/vendor/github.com/hexops/gotextdiff/myers/diff.go +++ /dev/null @@ -1,205 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package myers implements the Myers diff algorithm. -package myers - -import ( - "strings" - - diff "github.com/hexops/gotextdiff" - "github.com/hexops/gotextdiff/span" -) - -// Sources: -// https://blog.jcoglan.com/2017/02/17/the-myers-diff-algorithm-part-3/ -// https://www.codeproject.com/Articles/42279/%2FArticles%2F42279%2FInvestigating-Myers-diff-algorithm-Part-1-of-2 - -func ComputeEdits(uri span.URI, before, after string) []diff.TextEdit { - ops := operations(splitLines(before), splitLines(after)) - edits := make([]diff.TextEdit, 0, len(ops)) - for _, op := range ops { - s := span.New(uri, span.NewPoint(op.I1+1, 1, 0), span.NewPoint(op.I2+1, 1, 0)) - switch op.Kind { - case diff.Delete: - // Delete: unformatted[i1:i2] is deleted. - edits = append(edits, diff.TextEdit{Span: s}) - case diff.Insert: - // Insert: formatted[j1:j2] is inserted at unformatted[i1:i1]. - if content := strings.Join(op.Content, ""); content != "" { - edits = append(edits, diff.TextEdit{Span: s, NewText: content}) - } - } - } - return edits -} - -type operation struct { - Kind diff.OpKind - Content []string // content from b - I1, I2 int // indices of the line in a - J1 int // indices of the line in b, J2 implied by len(Content) -} - -// operations returns the list of operations to convert a into b, consolidating -// operations for multiple lines and not including equal lines. -func operations(a, b []string) []*operation { - if len(a) == 0 && len(b) == 0 { - return nil - } - - trace, offset := shortestEditSequence(a, b) - snakes := backtrack(trace, len(a), len(b), offset) - - M, N := len(a), len(b) - - var i int - solution := make([]*operation, len(a)+len(b)) - - add := func(op *operation, i2, j2 int) { - if op == nil { - return - } - op.I2 = i2 - if op.Kind == diff.Insert { - op.Content = b[op.J1:j2] - } - solution[i] = op - i++ - } - x, y := 0, 0 - for _, snake := range snakes { - if len(snake) < 2 { - continue - } - var op *operation - // delete (horizontal) - for snake[0]-snake[1] > x-y { - if op == nil { - op = &operation{ - Kind: diff.Delete, - I1: x, - J1: y, - } - } - x++ - if x == M { - break - } - } - add(op, x, y) - op = nil - // insert (vertical) - for snake[0]-snake[1] < x-y { - if op == nil { - op = &operation{ - Kind: diff.Insert, - I1: x, - J1: y, - } - } - y++ - } - add(op, x, y) - op = nil - // equal (diagonal) - for x < snake[0] { - x++ - y++ - } - if x >= M && y >= N { - break - } - } - return solution[:i] -} - -// backtrack uses the trace for the edit sequence computation and returns the -// "snakes" that make up the solution. A "snake" is a single deletion or -// insertion followed by zero or diagonals. -func backtrack(trace [][]int, x, y, offset int) [][]int { - snakes := make([][]int, len(trace)) - d := len(trace) - 1 - for ; x > 0 && y > 0 && d > 0; d-- { - V := trace[d] - if len(V) == 0 { - continue - } - snakes[d] = []int{x, y} - - k := x - y - - var kPrev int - if k == -d || (k != d && V[k-1+offset] < V[k+1+offset]) { - kPrev = k + 1 - } else { - kPrev = k - 1 - } - - x = V[kPrev+offset] - y = x - kPrev - } - if x < 0 || y < 0 { - return snakes - } - snakes[d] = []int{x, y} - return snakes -} - -// shortestEditSequence returns the shortest edit sequence that converts a into b. -func shortestEditSequence(a, b []string) ([][]int, int) { - M, N := len(a), len(b) - V := make([]int, 2*(N+M)+1) - offset := N + M - trace := make([][]int, N+M+1) - - // Iterate through the maximum possible length of the SES (N+M). - for d := 0; d <= N+M; d++ { - copyV := make([]int, len(V)) - // k lines are represented by the equation y = x - k. We move in - // increments of 2 because end points for even d are on even k lines. - for k := -d; k <= d; k += 2 { - // At each point, we either go down or to the right. We go down if - // k == -d, and we go to the right if k == d. We also prioritize - // the maximum x value, because we prefer deletions to insertions. - var x int - if k == -d || (k != d && V[k-1+offset] < V[k+1+offset]) { - x = V[k+1+offset] // down - } else { - x = V[k-1+offset] + 1 // right - } - - y := x - k - - // Diagonal moves while we have equal contents. - for x < M && y < N && a[x] == b[y] { - x++ - y++ - } - - V[k+offset] = x - - // Return if we've exceeded the maximum values. - if x == M && y == N { - // Makes sure to save the state of the array before returning. - copy(copyV, V) - trace[d] = copyV - return trace, offset - } - } - - // Save the state of the array. - copy(copyV, V) - trace[d] = copyV - } - return nil, 0 -} - -func splitLines(text string) []string { - lines := strings.SplitAfter(text, "\n") - if lines[len(lines)-1] == "" { - lines = lines[:len(lines)-1] - } - return lines -} diff --git a/vendor/github.com/hexops/gotextdiff/span/parse.go b/vendor/github.com/hexops/gotextdiff/span/parse.go deleted file mode 100644 index aa17c84ec..000000000 --- a/vendor/github.com/hexops/gotextdiff/span/parse.go +++ /dev/null @@ -1,100 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package span - -import ( - "strconv" - "strings" - "unicode/utf8" -) - -// Parse returns the location represented by the input. -// Only file paths are accepted, not URIs. -// The returned span will be normalized, and thus if printed may produce a -// different string. -func Parse(input string) Span { - // :0:0#0-0:0#0 - valid := input - var hold, offset int - hadCol := false - suf := rstripSuffix(input) - if suf.sep == "#" { - offset = suf.num - suf = rstripSuffix(suf.remains) - } - if suf.sep == ":" { - valid = suf.remains - hold = suf.num - hadCol = true - suf = rstripSuffix(suf.remains) - } - switch { - case suf.sep == ":": - return New(URIFromPath(suf.remains), NewPoint(suf.num, hold, offset), Point{}) - case suf.sep == "-": - // we have a span, fall out of the case to continue - default: - // separator not valid, rewind to either the : or the start - return New(URIFromPath(valid), NewPoint(hold, 0, offset), Point{}) - } - // only the span form can get here - // at this point we still don't know what the numbers we have mean - // if have not yet seen a : then we might have either a line or a column depending - // on whether start has a column or not - // we build an end point and will fix it later if needed - end := NewPoint(suf.num, hold, offset) - hold, offset = 0, 0 - suf = rstripSuffix(suf.remains) - if suf.sep == "#" { - offset = suf.num - suf = rstripSuffix(suf.remains) - } - if suf.sep != ":" { - // turns out we don't have a span after all, rewind - return New(URIFromPath(valid), end, Point{}) - } - valid = suf.remains - hold = suf.num - suf = rstripSuffix(suf.remains) - if suf.sep != ":" { - // line#offset only - return New(URIFromPath(valid), NewPoint(hold, 0, offset), end) - } - // we have a column, so if end only had one number, it is also the column - if !hadCol { - end = NewPoint(suf.num, end.v.Line, end.v.Offset) - } - return New(URIFromPath(suf.remains), NewPoint(suf.num, hold, offset), end) -} - -type suffix struct { - remains string - sep string - num int -} - -func rstripSuffix(input string) suffix { - if len(input) == 0 { - return suffix{"", "", -1} - } - remains := input - num := -1 - // first see if we have a number at the end - last := strings.LastIndexFunc(remains, func(r rune) bool { return r < '0' || r > '9' }) - if last >= 0 && last < len(remains)-1 { - number, err := strconv.ParseInt(remains[last+1:], 10, 64) - if err == nil { - num = int(number) - remains = remains[:last+1] - } - } - // now see if we have a trailing separator - r, w := utf8.DecodeLastRuneInString(remains) - if r != ':' && r != '#' && r == '#' { - return suffix{input, "", -1} - } - remains = remains[:len(remains)-w] - return suffix{remains, string(r), num} -} diff --git a/vendor/github.com/hexops/gotextdiff/span/span.go b/vendor/github.com/hexops/gotextdiff/span/span.go deleted file mode 100644 index 4d2ad0986..000000000 --- a/vendor/github.com/hexops/gotextdiff/span/span.go +++ /dev/null @@ -1,285 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package span contains support for representing with positions and ranges in -// text files. -package span - -import ( - "encoding/json" - "fmt" - "path" -) - -// Span represents a source code range in standardized form. -type Span struct { - v span -} - -// Point represents a single point within a file. -// In general this should only be used as part of a Span, as on its own it -// does not carry enough information. -type Point struct { - v point -} - -type span struct { - URI URI `json:"uri"` - Start point `json:"start"` - End point `json:"end"` -} - -type point struct { - Line int `json:"line"` - Column int `json:"column"` - Offset int `json:"offset"` -} - -// Invalid is a span that reports false from IsValid -var Invalid = Span{v: span{Start: invalidPoint.v, End: invalidPoint.v}} - -var invalidPoint = Point{v: point{Line: 0, Column: 0, Offset: -1}} - -// Converter is the interface to an object that can convert between line:column -// and offset forms for a single file. -type Converter interface { - //ToPosition converts from an offset to a line:column pair. - ToPosition(offset int) (int, int, error) - //ToOffset converts from a line:column pair to an offset. - ToOffset(line, col int) (int, error) -} - -func New(uri URI, start Point, end Point) Span { - s := Span{v: span{URI: uri, Start: start.v, End: end.v}} - s.v.clean() - return s -} - -func NewPoint(line, col, offset int) Point { - p := Point{v: point{Line: line, Column: col, Offset: offset}} - p.v.clean() - return p -} - -func Compare(a, b Span) int { - if r := CompareURI(a.URI(), b.URI()); r != 0 { - return r - } - if r := comparePoint(a.v.Start, b.v.Start); r != 0 { - return r - } - return comparePoint(a.v.End, b.v.End) -} - -func ComparePoint(a, b Point) int { - return comparePoint(a.v, b.v) -} - -func comparePoint(a, b point) int { - if !a.hasPosition() { - if a.Offset < b.Offset { - return -1 - } - if a.Offset > b.Offset { - return 1 - } - return 0 - } - if a.Line < b.Line { - return -1 - } - if a.Line > b.Line { - return 1 - } - if a.Column < b.Column { - return -1 - } - if a.Column > b.Column { - return 1 - } - return 0 -} - -func (s Span) HasPosition() bool { return s.v.Start.hasPosition() } -func (s Span) HasOffset() bool { return s.v.Start.hasOffset() } -func (s Span) IsValid() bool { return s.v.Start.isValid() } -func (s Span) IsPoint() bool { return s.v.Start == s.v.End } -func (s Span) URI() URI { return s.v.URI } -func (s Span) Start() Point { return Point{s.v.Start} } -func (s Span) End() Point { return Point{s.v.End} } -func (s *Span) MarshalJSON() ([]byte, error) { return json.Marshal(&s.v) } -func (s *Span) UnmarshalJSON(b []byte) error { return json.Unmarshal(b, &s.v) } - -func (p Point) HasPosition() bool { return p.v.hasPosition() } -func (p Point) HasOffset() bool { return p.v.hasOffset() } -func (p Point) IsValid() bool { return p.v.isValid() } -func (p *Point) MarshalJSON() ([]byte, error) { return json.Marshal(&p.v) } -func (p *Point) UnmarshalJSON(b []byte) error { return json.Unmarshal(b, &p.v) } -func (p Point) Line() int { - if !p.v.hasPosition() { - panic(fmt.Errorf("position not set in %v", p.v)) - } - return p.v.Line -} -func (p Point) Column() int { - if !p.v.hasPosition() { - panic(fmt.Errorf("position not set in %v", p.v)) - } - return p.v.Column -} -func (p Point) Offset() int { - if !p.v.hasOffset() { - panic(fmt.Errorf("offset not set in %v", p.v)) - } - return p.v.Offset -} - -func (p point) hasPosition() bool { return p.Line > 0 } -func (p point) hasOffset() bool { return p.Offset >= 0 } -func (p point) isValid() bool { return p.hasPosition() || p.hasOffset() } -func (p point) isZero() bool { - return (p.Line == 1 && p.Column == 1) || (!p.hasPosition() && p.Offset == 0) -} - -func (s *span) clean() { - //this presumes the points are already clean - if !s.End.isValid() || (s.End == point{}) { - s.End = s.Start - } -} - -func (p *point) clean() { - if p.Line < 0 { - p.Line = 0 - } - if p.Column <= 0 { - if p.Line > 0 { - p.Column = 1 - } else { - p.Column = 0 - } - } - if p.Offset == 0 && (p.Line > 1 || p.Column > 1) { - p.Offset = -1 - } -} - -// Format implements fmt.Formatter to print the Location in a standard form. -// The format produced is one that can be read back in using Parse. -func (s Span) Format(f fmt.State, c rune) { - fullForm := f.Flag('+') - preferOffset := f.Flag('#') - // we should always have a uri, simplify if it is file format - //TODO: make sure the end of the uri is unambiguous - uri := string(s.v.URI) - if c == 'f' { - uri = path.Base(uri) - } else if !fullForm { - uri = s.v.URI.Filename() - } - fmt.Fprint(f, uri) - if !s.IsValid() || (!fullForm && s.v.Start.isZero() && s.v.End.isZero()) { - return - } - // see which bits of start to write - printOffset := s.HasOffset() && (fullForm || preferOffset || !s.HasPosition()) - printLine := s.HasPosition() && (fullForm || !printOffset) - printColumn := printLine && (fullForm || (s.v.Start.Column > 1 || s.v.End.Column > 1)) - fmt.Fprint(f, ":") - if printLine { - fmt.Fprintf(f, "%d", s.v.Start.Line) - } - if printColumn { - fmt.Fprintf(f, ":%d", s.v.Start.Column) - } - if printOffset { - fmt.Fprintf(f, "#%d", s.v.Start.Offset) - } - // start is written, do we need end? - if s.IsPoint() { - return - } - // we don't print the line if it did not change - printLine = fullForm || (printLine && s.v.End.Line > s.v.Start.Line) - fmt.Fprint(f, "-") - if printLine { - fmt.Fprintf(f, "%d", s.v.End.Line) - } - if printColumn { - if printLine { - fmt.Fprint(f, ":") - } - fmt.Fprintf(f, "%d", s.v.End.Column) - } - if printOffset { - fmt.Fprintf(f, "#%d", s.v.End.Offset) - } -} - -func (s Span) WithPosition(c Converter) (Span, error) { - if err := s.update(c, true, false); err != nil { - return Span{}, err - } - return s, nil -} - -func (s Span) WithOffset(c Converter) (Span, error) { - if err := s.update(c, false, true); err != nil { - return Span{}, err - } - return s, nil -} - -func (s Span) WithAll(c Converter) (Span, error) { - if err := s.update(c, true, true); err != nil { - return Span{}, err - } - return s, nil -} - -func (s *Span) update(c Converter, withPos, withOffset bool) error { - if !s.IsValid() { - return fmt.Errorf("cannot add information to an invalid span") - } - if withPos && !s.HasPosition() { - if err := s.v.Start.updatePosition(c); err != nil { - return err - } - if s.v.End.Offset == s.v.Start.Offset { - s.v.End = s.v.Start - } else if err := s.v.End.updatePosition(c); err != nil { - return err - } - } - if withOffset && (!s.HasOffset() || (s.v.End.hasPosition() && !s.v.End.hasOffset())) { - if err := s.v.Start.updateOffset(c); err != nil { - return err - } - if s.v.End.Line == s.v.Start.Line && s.v.End.Column == s.v.Start.Column { - s.v.End.Offset = s.v.Start.Offset - } else if err := s.v.End.updateOffset(c); err != nil { - return err - } - } - return nil -} - -func (p *point) updatePosition(c Converter) error { - line, col, err := c.ToPosition(p.Offset) - if err != nil { - return err - } - p.Line = line - p.Column = col - return nil -} - -func (p *point) updateOffset(c Converter) error { - offset, err := c.ToOffset(p.Line, p.Column) - if err != nil { - return err - } - p.Offset = offset - return nil -} diff --git a/vendor/github.com/hexops/gotextdiff/span/token.go b/vendor/github.com/hexops/gotextdiff/span/token.go deleted file mode 100644 index 6f8b9b570..000000000 --- a/vendor/github.com/hexops/gotextdiff/span/token.go +++ /dev/null @@ -1,194 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package span - -import ( - "fmt" - "go/token" -) - -// Range represents a source code range in token.Pos form. -// It also carries the FileSet that produced the positions, so that it is -// self contained. -type Range struct { - FileSet *token.FileSet - Start token.Pos - End token.Pos - Converter Converter -} - -type FileConverter struct { - file *token.File -} - -// TokenConverter is a Converter backed by a token file set and file. -// It uses the file set methods to work out the conversions, which -// makes it fast and does not require the file contents. -type TokenConverter struct { - FileConverter - fset *token.FileSet -} - -// NewRange creates a new Range from a FileSet and two positions. -// To represent a point pass a 0 as the end pos. -func NewRange(fset *token.FileSet, start, end token.Pos) Range { - return Range{ - FileSet: fset, - Start: start, - End: end, - } -} - -// NewTokenConverter returns an implementation of Converter backed by a -// token.File. -func NewTokenConverter(fset *token.FileSet, f *token.File) *TokenConverter { - return &TokenConverter{fset: fset, FileConverter: FileConverter{file: f}} -} - -// NewContentConverter returns an implementation of Converter for the -// given file content. -func NewContentConverter(filename string, content []byte) *TokenConverter { - fset := token.NewFileSet() - f := fset.AddFile(filename, -1, len(content)) - f.SetLinesForContent(content) - return NewTokenConverter(fset, f) -} - -// IsPoint returns true if the range represents a single point. -func (r Range) IsPoint() bool { - return r.Start == r.End -} - -// Span converts a Range to a Span that represents the Range. -// It will fill in all the members of the Span, calculating the line and column -// information. -func (r Range) Span() (Span, error) { - if !r.Start.IsValid() { - return Span{}, fmt.Errorf("start pos is not valid") - } - f := r.FileSet.File(r.Start) - if f == nil { - return Span{}, fmt.Errorf("file not found in FileSet") - } - return FileSpan(f, r.Converter, r.Start, r.End) -} - -// FileSpan returns a span within tok, using converter to translate between -// offsets and positions. -func FileSpan(tok *token.File, converter Converter, start, end token.Pos) (Span, error) { - var s Span - var err error - var startFilename string - startFilename, s.v.Start.Line, s.v.Start.Column, err = position(tok, start) - if err != nil { - return Span{}, err - } - s.v.URI = URIFromPath(startFilename) - if end.IsValid() { - var endFilename string - endFilename, s.v.End.Line, s.v.End.Column, err = position(tok, end) - if err != nil { - return Span{}, err - } - // In the presence of line directives, a single File can have sections from - // multiple file names. - if endFilename != startFilename { - return Span{}, fmt.Errorf("span begins in file %q but ends in %q", startFilename, endFilename) - } - } - s.v.Start.clean() - s.v.End.clean() - s.v.clean() - if converter != nil { - return s.WithOffset(converter) - } - if startFilename != tok.Name() { - return Span{}, fmt.Errorf("must supply Converter for file %q containing lines from %q", tok.Name(), startFilename) - } - return s.WithOffset(&FileConverter{tok}) -} - -func position(f *token.File, pos token.Pos) (string, int, int, error) { - off, err := offset(f, pos) - if err != nil { - return "", 0, 0, err - } - return positionFromOffset(f, off) -} - -func positionFromOffset(f *token.File, offset int) (string, int, int, error) { - if offset > f.Size() { - return "", 0, 0, fmt.Errorf("offset %v is past the end of the file %v", offset, f.Size()) - } - pos := f.Pos(offset) - p := f.Position(pos) - // TODO(golang/go#41029): Consider returning line, column instead of line+1, 1 if - // the file's last character is not a newline. - if offset == f.Size() { - return p.Filename, p.Line + 1, 1, nil - } - return p.Filename, p.Line, p.Column, nil -} - -// offset is a copy of the Offset function in go/token, but with the adjustment -// that it does not panic on invalid positions. -func offset(f *token.File, pos token.Pos) (int, error) { - if int(pos) < f.Base() || int(pos) > f.Base()+f.Size() { - return 0, fmt.Errorf("invalid pos") - } - return int(pos) - f.Base(), nil -} - -// Range converts a Span to a Range that represents the Span for the supplied -// File. -func (s Span) Range(converter *TokenConverter) (Range, error) { - s, err := s.WithOffset(converter) - if err != nil { - return Range{}, err - } - // go/token will panic if the offset is larger than the file's size, - // so check here to avoid panicking. - if s.Start().Offset() > converter.file.Size() { - return Range{}, fmt.Errorf("start offset %v is past the end of the file %v", s.Start(), converter.file.Size()) - } - if s.End().Offset() > converter.file.Size() { - return Range{}, fmt.Errorf("end offset %v is past the end of the file %v", s.End(), converter.file.Size()) - } - return Range{ - FileSet: converter.fset, - Start: converter.file.Pos(s.Start().Offset()), - End: converter.file.Pos(s.End().Offset()), - Converter: converter, - }, nil -} - -func (l *FileConverter) ToPosition(offset int) (int, int, error) { - _, line, col, err := positionFromOffset(l.file, offset) - return line, col, err -} - -func (l *FileConverter) ToOffset(line, col int) (int, error) { - if line < 0 { - return -1, fmt.Errorf("line is not valid") - } - lineMax := l.file.LineCount() + 1 - if line > lineMax { - return -1, fmt.Errorf("line is beyond end of file %v", lineMax) - } else if line == lineMax { - if col > 1 { - return -1, fmt.Errorf("column is beyond end of file") - } - // at the end of the file, allowing for a trailing eol - return l.file.Size(), nil - } - pos := lineStart(l.file, line) - if !pos.IsValid() { - return -1, fmt.Errorf("line is not in file") - } - // we assume that column is in bytes here, and that the first byte of a - // line is at column 1 - pos += token.Pos(col - 1) - return offset(l.file, pos) -} diff --git a/vendor/github.com/hexops/gotextdiff/span/token111.go b/vendor/github.com/hexops/gotextdiff/span/token111.go deleted file mode 100644 index bf7a5406b..000000000 --- a/vendor/github.com/hexops/gotextdiff/span/token111.go +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build !go1.12 - -package span - -import ( - "go/token" -) - -// lineStart is the pre-Go 1.12 version of (*token.File).LineStart. For Go -// versions <= 1.11, we borrow logic from the analysisutil package. -// TODO(rstambler): Delete this file when we no longer support Go 1.11. -func lineStart(f *token.File, line int) token.Pos { - // Use binary search to find the start offset of this line. - - min := 0 // inclusive - max := f.Size() // exclusive - for { - offset := (min + max) / 2 - pos := f.Pos(offset) - posn := f.Position(pos) - if posn.Line == line { - return pos - (token.Pos(posn.Column) - 1) - } - - if min+1 >= max { - return token.NoPos - } - - if posn.Line < line { - min = offset - } else { - max = offset - } - } -} diff --git a/vendor/github.com/hexops/gotextdiff/span/token112.go b/vendor/github.com/hexops/gotextdiff/span/token112.go deleted file mode 100644 index 017aec9c1..000000000 --- a/vendor/github.com/hexops/gotextdiff/span/token112.go +++ /dev/null @@ -1,16 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build go1.12 - -package span - -import ( - "go/token" -) - -// TODO(rstambler): Delete this file when we no longer support Go 1.11. -func lineStart(f *token.File, line int) token.Pos { - return f.LineStart(line) -} diff --git a/vendor/github.com/hexops/gotextdiff/span/uri.go b/vendor/github.com/hexops/gotextdiff/span/uri.go deleted file mode 100644 index 250492135..000000000 --- a/vendor/github.com/hexops/gotextdiff/span/uri.go +++ /dev/null @@ -1,169 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package span - -import ( - "fmt" - "net/url" - "os" - "path" - "path/filepath" - "runtime" - "strings" - "unicode" -) - -const fileScheme = "file" - -// URI represents the full URI for a file. -type URI string - -func (uri URI) IsFile() bool { - return strings.HasPrefix(string(uri), "file://") -} - -// Filename returns the file path for the given URI. -// It is an error to call this on a URI that is not a valid filename. -func (uri URI) Filename() string { - filename, err := filename(uri) - if err != nil { - panic(err) - } - return filepath.FromSlash(filename) -} - -func filename(uri URI) (string, error) { - if uri == "" { - return "", nil - } - u, err := url.ParseRequestURI(string(uri)) - if err != nil { - return "", err - } - if u.Scheme != fileScheme { - return "", fmt.Errorf("only file URIs are supported, got %q from %q", u.Scheme, uri) - } - // If the URI is a Windows URI, we trim the leading "/" and lowercase - // the drive letter, which will never be case sensitive. - if isWindowsDriveURIPath(u.Path) { - u.Path = strings.ToUpper(string(u.Path[1])) + u.Path[2:] - } - return u.Path, nil -} - -func URIFromURI(s string) URI { - if !strings.HasPrefix(s, "file://") { - return URI(s) - } - - if !strings.HasPrefix(s, "file:///") { - // VS Code sends URLs with only two slashes, which are invalid. golang/go#39789. - s = "file:///" + s[len("file://"):] - } - // Even though the input is a URI, it may not be in canonical form. VS Code - // in particular over-escapes :, @, etc. Unescape and re-encode to canonicalize. - path, err := url.PathUnescape(s[len("file://"):]) - if err != nil { - panic(err) - } - - // File URIs from Windows may have lowercase drive letters. - // Since drive letters are guaranteed to be case insensitive, - // we change them to uppercase to remain consistent. - // For example, file:///c:/x/y/z becomes file:///C:/x/y/z. - if isWindowsDriveURIPath(path) { - path = path[:1] + strings.ToUpper(string(path[1])) + path[2:] - } - u := url.URL{Scheme: fileScheme, Path: path} - return URI(u.String()) -} - -func CompareURI(a, b URI) int { - if equalURI(a, b) { - return 0 - } - if a < b { - return -1 - } - return 1 -} - -func equalURI(a, b URI) bool { - if a == b { - return true - } - // If we have the same URI basename, we may still have the same file URIs. - if !strings.EqualFold(path.Base(string(a)), path.Base(string(b))) { - return false - } - fa, err := filename(a) - if err != nil { - return false - } - fb, err := filename(b) - if err != nil { - return false - } - // Stat the files to check if they are equal. - infoa, err := os.Stat(filepath.FromSlash(fa)) - if err != nil { - return false - } - infob, err := os.Stat(filepath.FromSlash(fb)) - if err != nil { - return false - } - return os.SameFile(infoa, infob) -} - -// URIFromPath returns a span URI for the supplied file path. -// It will always have the file scheme. -func URIFromPath(path string) URI { - if path == "" { - return "" - } - // Handle standard library paths that contain the literal "$GOROOT". - // TODO(rstambler): The go/packages API should allow one to determine a user's $GOROOT. - const prefix = "$GOROOT" - if len(path) >= len(prefix) && strings.EqualFold(prefix, path[:len(prefix)]) { - suffix := path[len(prefix):] - path = runtime.GOROOT() + suffix - } - if !isWindowsDrivePath(path) { - if abs, err := filepath.Abs(path); err == nil { - path = abs - } - } - // Check the file path again, in case it became absolute. - if isWindowsDrivePath(path) { - path = "/" + strings.ToUpper(string(path[0])) + path[1:] - } - path = filepath.ToSlash(path) - u := url.URL{ - Scheme: fileScheme, - Path: path, - } - return URI(u.String()) -} - -// isWindowsDrivePath returns true if the file path is of the form used by -// Windows. We check if the path begins with a drive letter, followed by a ":". -// For example: C:/x/y/z. -func isWindowsDrivePath(path string) bool { - if len(path) < 3 { - return false - } - return unicode.IsLetter(rune(path[0])) && path[1] == ':' -} - -// isWindowsDriveURI returns true if the file URI is of the format used by -// Windows URIs. The url.Parse package does not specially handle Windows paths -// (see golang/go#6027), so we check if the URI path has a drive prefix (e.g. "/C:"). -func isWindowsDriveURIPath(uri string) bool { - if len(uri) < 4 { - return false - } - return uri[0] == '/' && unicode.IsLetter(rune(uri[1])) && uri[2] == ':' -} diff --git a/vendor/github.com/hexops/gotextdiff/span/utf16.go b/vendor/github.com/hexops/gotextdiff/span/utf16.go deleted file mode 100644 index f06a2468b..000000000 --- a/vendor/github.com/hexops/gotextdiff/span/utf16.go +++ /dev/null @@ -1,91 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package span - -import ( - "fmt" - "unicode/utf16" - "unicode/utf8" -) - -// ToUTF16Column calculates the utf16 column expressed by the point given the -// supplied file contents. -// This is used to convert from the native (always in bytes) column -// representation and the utf16 counts used by some editors. -func ToUTF16Column(p Point, content []byte) (int, error) { - if !p.HasPosition() { - return -1, fmt.Errorf("ToUTF16Column: point is missing position") - } - if !p.HasOffset() { - return -1, fmt.Errorf("ToUTF16Column: point is missing offset") - } - offset := p.Offset() // 0-based - colZero := p.Column() - 1 // 0-based - if colZero == 0 { - // 0-based column 0, so it must be chr 1 - return 1, nil - } else if colZero < 0 { - return -1, fmt.Errorf("ToUTF16Column: column is invalid (%v)", colZero) - } - // work out the offset at the start of the line using the column - lineOffset := offset - colZero - if lineOffset < 0 || offset > len(content) { - return -1, fmt.Errorf("ToUTF16Column: offsets %v-%v outside file contents (%v)", lineOffset, offset, len(content)) - } - // Use the offset to pick out the line start. - // This cannot panic: offset > len(content) and lineOffset < offset. - start := content[lineOffset:] - - // Now, truncate down to the supplied column. - start = start[:colZero] - - // and count the number of utf16 characters - // in theory we could do this by hand more efficiently... - return len(utf16.Encode([]rune(string(start)))) + 1, nil -} - -// FromUTF16Column advances the point by the utf16 character offset given the -// supplied line contents. -// This is used to convert from the utf16 counts used by some editors to the -// native (always in bytes) column representation. -func FromUTF16Column(p Point, chr int, content []byte) (Point, error) { - if !p.HasOffset() { - return Point{}, fmt.Errorf("FromUTF16Column: point is missing offset") - } - // if chr is 1 then no adjustment needed - if chr <= 1 { - return p, nil - } - if p.Offset() >= len(content) { - return p, fmt.Errorf("FromUTF16Column: offset (%v) greater than length of content (%v)", p.Offset(), len(content)) - } - remains := content[p.Offset():] - // scan forward the specified number of characters - for count := 1; count < chr; count++ { - if len(remains) <= 0 { - return Point{}, fmt.Errorf("FromUTF16Column: chr goes beyond the content") - } - r, w := utf8.DecodeRune(remains) - if r == '\n' { - // Per the LSP spec: - // - // > If the character value is greater than the line length it - // > defaults back to the line length. - break - } - remains = remains[w:] - if r >= 0x10000 { - // a two point rune - count++ - // if we finished in a two point rune, do not advance past the first - if count >= chr { - break - } - } - p.v.Column += w - p.v.Offset += w - } - return p, nil -} diff --git a/vendor/github.com/hexops/gotextdiff/unified.go b/vendor/github.com/hexops/gotextdiff/unified.go deleted file mode 100644 index b7d85cfcc..000000000 --- a/vendor/github.com/hexops/gotextdiff/unified.go +++ /dev/null @@ -1,210 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package gotextdiff - -import ( - "fmt" - "strings" -) - -// Unified represents a set of edits as a unified diff. -type Unified struct { - // From is the name of the original file. - From string - // To is the name of the modified file. - To string - // Hunks is the set of edit hunks needed to transform the file content. - Hunks []*Hunk -} - -// Hunk represents a contiguous set of line edits to apply. -type Hunk struct { - // The line in the original source where the hunk starts. - FromLine int - // The line in the original source where the hunk finishes. - ToLine int - // The set of line based edits to apply. - Lines []Line -} - -// Line represents a single line operation to apply as part of a Hunk. -type Line struct { - // Kind is the type of line this represents, deletion, insertion or copy. - Kind OpKind - // Content is the content of this line. - // For deletion it is the line being removed, for all others it is the line - // to put in the output. - Content string -} - -// OpKind is used to denote the type of operation a line represents. -type OpKind int - -const ( - // Delete is the operation kind for a line that is present in the input - // but not in the output. - Delete OpKind = iota - // Insert is the operation kind for a line that is new in the output. - Insert - // Equal is the operation kind for a line that is the same in the input and - // output, often used to provide context around edited lines. - Equal -) - -// String returns a human readable representation of an OpKind. It is not -// intended for machine processing. -func (k OpKind) String() string { - switch k { - case Delete: - return "delete" - case Insert: - return "insert" - case Equal: - return "equal" - default: - panic("unknown operation kind") - } -} - -const ( - edge = 3 - gap = edge * 2 -) - -// ToUnified takes a file contents and a sequence of edits, and calculates -// a unified diff that represents those edits. -func ToUnified(from, to string, content string, edits []TextEdit) Unified { - u := Unified{ - From: from, - To: to, - } - if len(edits) == 0 { - return u - } - c, edits, partial := prepareEdits(content, edits) - if partial { - edits = lineEdits(content, c, edits) - } - lines := splitLines(content) - var h *Hunk - last := 0 - toLine := 0 - for _, edit := range edits { - start := edit.Span.Start().Line() - 1 - end := edit.Span.End().Line() - 1 - switch { - case h != nil && start == last: - //direct extension - case h != nil && start <= last+gap: - //within range of previous lines, add the joiners - addEqualLines(h, lines, last, start) - default: - //need to start a new hunk - if h != nil { - // add the edge to the previous hunk - addEqualLines(h, lines, last, last+edge) - u.Hunks = append(u.Hunks, h) - } - toLine += start - last - h = &Hunk{ - FromLine: start + 1, - ToLine: toLine + 1, - } - // add the edge to the new hunk - delta := addEqualLines(h, lines, start-edge, start) - h.FromLine -= delta - h.ToLine -= delta - } - last = start - for i := start; i < end; i++ { - h.Lines = append(h.Lines, Line{Kind: Delete, Content: lines[i]}) - last++ - } - if edit.NewText != "" { - for _, line := range splitLines(edit.NewText) { - h.Lines = append(h.Lines, Line{Kind: Insert, Content: line}) - toLine++ - } - } - } - if h != nil { - // add the edge to the final hunk - addEqualLines(h, lines, last, last+edge) - u.Hunks = append(u.Hunks, h) - } - return u -} - -func splitLines(text string) []string { - lines := strings.SplitAfter(text, "\n") - if lines[len(lines)-1] == "" { - lines = lines[:len(lines)-1] - } - return lines -} - -func addEqualLines(h *Hunk, lines []string, start, end int) int { - delta := 0 - for i := start; i < end; i++ { - if i < 0 { - continue - } - if i >= len(lines) { - return delta - } - h.Lines = append(h.Lines, Line{Kind: Equal, Content: lines[i]}) - delta++ - } - return delta -} - -// Format converts a unified diff to the standard textual form for that diff. -// The output of this function can be passed to tools like patch. -func (u Unified) Format(f fmt.State, r rune) { - if len(u.Hunks) == 0 { - return - } - fmt.Fprintf(f, "--- %s\n", u.From) - fmt.Fprintf(f, "+++ %s\n", u.To) - for _, hunk := range u.Hunks { - fromCount, toCount := 0, 0 - for _, l := range hunk.Lines { - switch l.Kind { - case Delete: - fromCount++ - case Insert: - toCount++ - default: - fromCount++ - toCount++ - } - } - fmt.Fprint(f, "@@") - if fromCount > 1 { - fmt.Fprintf(f, " -%d,%d", hunk.FromLine, fromCount) - } else { - fmt.Fprintf(f, " -%d", hunk.FromLine) - } - if toCount > 1 { - fmt.Fprintf(f, " +%d,%d", hunk.ToLine, toCount) - } else { - fmt.Fprintf(f, " +%d", hunk.ToLine) - } - fmt.Fprint(f, " @@\n") - for _, l := range hunk.Lines { - switch l.Kind { - case Delete: - fmt.Fprintf(f, "-%s", l.Content) - case Insert: - fmt.Fprintf(f, "+%s", l.Content) - default: - fmt.Fprintf(f, " %s", l.Content) - } - if !strings.HasSuffix(l.Content, "\n") { - fmt.Fprintf(f, "\n\\ No newline at end of file\n") - } - } - } -} diff --git a/vendor/github.com/jgautheron/goconst/.gitignore b/vendor/github.com/jgautheron/goconst/.gitignore deleted file mode 100644 index a9d34d9c4..000000000 --- a/vendor/github.com/jgautheron/goconst/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -.idea -goconst \ No newline at end of file diff --git a/vendor/github.com/jgautheron/goconst/LICENSE b/vendor/github.com/jgautheron/goconst/LICENSE deleted file mode 100644 index e92649543..000000000 --- a/vendor/github.com/jgautheron/goconst/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2015 Jonathan Gautheron - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/jgautheron/goconst/README.md b/vendor/github.com/jgautheron/goconst/README.md deleted file mode 100644 index c671eb541..000000000 --- a/vendor/github.com/jgautheron/goconst/README.md +++ /dev/null @@ -1,51 +0,0 @@ -# goconst - -Find repeated strings that could be replaced by a constant. - -### Motivation - -There are obvious benefits to using constants instead of repeating strings, mostly to ease maintenance. Cannot argue against changing a single constant versus many strings. - -While this could be considered a beginner mistake, across time, multiple packages and large codebases, some repetition could have slipped in. - -### Get Started - - $ go get github.com/jgautheron/goconst/cmd/goconst - $ goconst ./... - -### Usage - -``` -Usage: - - goconst ARGS - -Flags: - - -ignore exclude files matching the given regular expression - -ignore-strings exclude strings matching the given regular expression - -ignore-tests exclude tests from the search (default: true) - -min-occurrences report from how many occurrences (default: 2) - -min-length only report strings with the minimum given length (default: 3) - -match-constant look for existing constants matching the values - -numbers search also for duplicated numbers - -min minimum value, only works with -numbers - -max maximum value, only works with -numbers - -output output formatting (text or json) - -set-exit-status Set exit status to 2 if any issues are found - -Examples: - - goconst ./... - goconst -ignore "yacc|\.pb\." $GOPATH/src/github.com/cockroachdb/cockroach/... - goconst -min-occurrences 3 -output json $GOPATH/src/github.com/cockroachdb/cockroach - goconst -numbers -min 60 -max 512 . -``` - -### Other static analysis tools - -- [gogetimports](https://github.com/jgautheron/gogetimports): Get a JSON-formatted list of imports. -- [usedexports](https://github.com/jgautheron/usedexports): Find exported variables that could be unexported. - -### License -MIT diff --git a/vendor/github.com/jgautheron/goconst/api.go b/vendor/github.com/jgautheron/goconst/api.go deleted file mode 100644 index b838e035f..000000000 --- a/vendor/github.com/jgautheron/goconst/api.go +++ /dev/null @@ -1,76 +0,0 @@ -package goconst - -import ( - "go/ast" - "go/token" - "strings" -) - -type Issue struct { - Pos token.Position - OccurrencesCount int - Str string - MatchingConst string -} - -type Config struct { - IgnoreStrings string - IgnoreTests bool - MatchWithConstants bool - MinStringLength int - MinOccurrences int - ParseNumbers bool - NumberMin int - NumberMax int - ExcludeTypes map[Type]bool -} - -func Run(files []*ast.File, fset *token.FileSet, cfg *Config) ([]Issue, error) { - p := New( - "", - "", - cfg.IgnoreStrings, - cfg.IgnoreTests, - cfg.MatchWithConstants, - cfg.ParseNumbers, - cfg.NumberMin, - cfg.NumberMax, - cfg.MinStringLength, - cfg.MinOccurrences, - cfg.ExcludeTypes, - ) - var issues []Issue - for _, f := range files { - if p.ignoreTests { - if filename := fset.Position(f.Pos()).Filename; strings.HasSuffix(filename, testSuffix) { - continue - } - } - ast.Walk(&treeVisitor{ - fileSet: fset, - packageName: "", - fileName: "", - p: p, - }, f) - } - p.ProcessResults() - - for str, item := range p.strs { - fi := item[0] - i := Issue{ - Pos: fi.Position, - OccurrencesCount: len(item), - Str: str, - } - - if len(p.consts) != 0 { - if cst, ok := p.consts[str]; ok { - // const should be in the same package and exported - i.MatchingConst = cst.Name - } - } - issues = append(issues, i) - } - - return issues, nil -} diff --git a/vendor/github.com/jgautheron/goconst/parser.go b/vendor/github.com/jgautheron/goconst/parser.go deleted file mode 100644 index 2f32740b9..000000000 --- a/vendor/github.com/jgautheron/goconst/parser.go +++ /dev/null @@ -1,187 +0,0 @@ -// Package goconst finds repeated strings that could be replaced by a constant. -// -// There are obvious benefits to using constants instead of repeating strings, -// mostly to ease maintenance. Cannot argue against changing a single constant versus many strings. -// While this could be considered a beginner mistake, across time, -// multiple packages and large codebases, some repetition could have slipped in. -package goconst - -import ( - "go/ast" - "go/parser" - "go/token" - "log" - "os" - "path/filepath" - "regexp" - "strconv" - "strings" -) - -const ( - testSuffix = "_test.go" -) - -type Parser struct { - // Meant to be passed via New() - path, ignore, ignoreStrings string - ignoreTests, matchConstant bool - minLength, minOccurrences int - numberMin, numberMax int - excludeTypes map[Type]bool - - supportedTokens []token.Token - - // Internals - strs Strings - consts Constants -} - -// New creates a new instance of the parser. -// This is your entry point if you'd like to use goconst as an API. -func New(path, ignore, ignoreStrings string, ignoreTests, matchConstant, numbers bool, numberMin, numberMax, minLength, minOccurrences int, excludeTypes map[Type]bool) *Parser { - supportedTokens := []token.Token{token.STRING} - if numbers { - supportedTokens = append(supportedTokens, token.INT, token.FLOAT) - } - - return &Parser{ - path: path, - ignore: ignore, - ignoreStrings: ignoreStrings, - ignoreTests: ignoreTests, - matchConstant: matchConstant, - minLength: minLength, - minOccurrences: minOccurrences, - numberMin: numberMin, - numberMax: numberMax, - supportedTokens: supportedTokens, - excludeTypes: excludeTypes, - - // Initialize the maps - strs: Strings{}, - consts: Constants{}, - } -} - -// ParseTree will search the given path for occurrences that could be moved into constants. -// If "..." is appended, the search will be recursive. -func (p *Parser) ParseTree() (Strings, Constants, error) { - pathLen := len(p.path) - // Parse recursively the given path if the recursive notation is found - if pathLen >= 5 && p.path[pathLen-3:] == "..." { - filepath.Walk(p.path[:pathLen-3], func(path string, f os.FileInfo, err error) error { - if err != nil { - log.Println(err) - // resume walking - return nil - } - - if f.IsDir() { - p.parseDir(path) - } - return nil - }) - } else { - p.parseDir(p.path) - } - - p.ProcessResults() - - return p.strs, p.consts, nil -} - -// ProcessResults post-processes the raw results. -func (p *Parser) ProcessResults() { - for str, item := range p.strs { - // Filter out items whose occurrences don't match the min value - if len(item) < p.minOccurrences { - delete(p.strs, str) - } - - if p.ignoreStrings != "" { - match, err := regexp.MatchString(p.ignoreStrings, str) - if err != nil { - log.Println(err) - } - if match { - delete(p.strs, str) - } - } - - // If the value is a number - if i, err := strconv.ParseInt(str, 0, 0); err == nil { - if p.numberMin != 0 && i < int64(p.numberMin) { - delete(p.strs, str) - } - if p.numberMax != 0 && i > int64(p.numberMax) { - delete(p.strs, str) - } - } - } -} - -func (p *Parser) parseDir(dir string) error { - fset := token.NewFileSet() - pkgs, err := parser.ParseDir(fset, dir, func(info os.FileInfo) bool { - valid, name := true, info.Name() - - if p.ignoreTests { - if strings.HasSuffix(name, testSuffix) { - valid = false - } - } - - if len(p.ignore) != 0 { - match, err := regexp.MatchString(p.ignore, dir+name) - if err != nil { - log.Fatal(err) - return true - } - if match { - valid = false - } - } - - return valid - }, 0) - if err != nil { - return err - } - - for _, pkg := range pkgs { - for fn, f := range pkg.Files { - ast.Walk(&treeVisitor{ - fileSet: fset, - packageName: pkg.Name, - fileName: fn, - p: p, - }, f) - } - } - - return nil -} - -type Strings map[string][]ExtendedPos -type Constants map[string]ConstType - -type ConstType struct { - token.Position - Name, packageName string -} - -type ExtendedPos struct { - token.Position - packageName string -} - -type Type int - -const ( - Assignment Type = iota - Binary - Case - Return - Call -) diff --git a/vendor/github.com/jgautheron/goconst/visitor.go b/vendor/github.com/jgautheron/goconst/visitor.go deleted file mode 100644 index a553814f5..000000000 --- a/vendor/github.com/jgautheron/goconst/visitor.go +++ /dev/null @@ -1,156 +0,0 @@ -package goconst - -import ( - "go/ast" - "go/token" - "strconv" - "strings" -) - -// treeVisitor carries the package name and file name -// for passing it to the imports map, and the fileSet for -// retrieving the token.Position. -type treeVisitor struct { - p *Parser - fileSet *token.FileSet - packageName, fileName string -} - -// Visit browses the AST tree for strings that could be potentially -// replaced by constants. -// A map of existing constants is built as well (-match-constant). -func (v *treeVisitor) Visit(node ast.Node) ast.Visitor { - if node == nil { - return v - } - - // A single case with "ast.BasicLit" would be much easier - // but then we wouldn't be able to tell in which context - // the string is defined (could be a constant definition). - switch t := node.(type) { - // Scan for constants in an attempt to match strings with existing constants - case *ast.GenDecl: - if !v.p.matchConstant { - return v - } - if t.Tok != token.CONST { - return v - } - - for _, spec := range t.Specs { - val := spec.(*ast.ValueSpec) - for i, str := range val.Values { - lit, ok := str.(*ast.BasicLit) - if !ok || !v.isSupported(lit.Kind) { - continue - } - - v.addConst(val.Names[i].Name, lit.Value, val.Names[i].Pos()) - } - } - - // foo := "moo" - case *ast.AssignStmt: - for _, rhs := range t.Rhs { - lit, ok := rhs.(*ast.BasicLit) - if !ok || !v.isSupported(lit.Kind) { - continue - } - - v.addString(lit.Value, rhs.(*ast.BasicLit).Pos(), Assignment) - } - - // if foo == "moo" - case *ast.BinaryExpr: - var lit *ast.BasicLit - var ok bool - - lit, ok = t.X.(*ast.BasicLit) - if ok && v.isSupported(lit.Kind) { - v.addString(lit.Value, lit.Pos(), Binary) - } - - lit, ok = t.Y.(*ast.BasicLit) - if ok && v.isSupported(lit.Kind) { - v.addString(lit.Value, lit.Pos(), Binary) - } - - // case "foo": - case *ast.CaseClause: - for _, item := range t.List { - lit, ok := item.(*ast.BasicLit) - if ok && v.isSupported(lit.Kind) { - v.addString(lit.Value, lit.Pos(), Case) - } - } - - // return "boo" - case *ast.ReturnStmt: - for _, item := range t.Results { - lit, ok := item.(*ast.BasicLit) - if ok && v.isSupported(lit.Kind) { - v.addString(lit.Value, lit.Pos(), Return) - } - } - - // fn("http://") - case *ast.CallExpr: - for _, item := range t.Args { - lit, ok := item.(*ast.BasicLit) - if ok && v.isSupported(lit.Kind) { - v.addString(lit.Value, lit.Pos(), Call) - } - } - } - - return v -} - -// addString adds a string in the map along with its position in the tree. -func (v *treeVisitor) addString(str string, pos token.Pos, typ Type) { - ok, excluded := v.p.excludeTypes[typ] - if ok && excluded { - return - } - // Drop quotes if any - if strings.HasPrefix(str, `"`) || strings.HasPrefix(str, "`") { - str, _ = strconv.Unquote(str) - } - - // Ignore empty strings - if len(str) == 0 { - return - } - - if len(str) < v.p.minLength { - return - } - - _, ok = v.p.strs[str] - if !ok { - v.p.strs[str] = make([]ExtendedPos, 0) - } - v.p.strs[str] = append(v.p.strs[str], ExtendedPos{ - packageName: v.packageName, - Position: v.fileSet.Position(pos), - }) -} - -// addConst adds a const in the map along with its position in the tree. -func (v *treeVisitor) addConst(name string, val string, pos token.Pos) { - val = strings.Replace(val, `"`, "", 2) - v.p.consts[val] = ConstType{ - Name: name, - packageName: v.packageName, - Position: v.fileSet.Position(pos), - } -} - -func (v *treeVisitor) isSupported(tk token.Token) bool { - for _, s := range v.p.supportedTokens { - if tk == s { - return true - } - } - return false -} diff --git a/vendor/github.com/jingyugao/rowserrcheck/LICENSE b/vendor/github.com/jingyugao/rowserrcheck/LICENSE deleted file mode 100644 index 6957f1889..000000000 --- a/vendor/github.com/jingyugao/rowserrcheck/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2019 Seiji Takahashi - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/jingyugao/rowserrcheck/passes/rowserr/rowserr.go b/vendor/github.com/jingyugao/rowserrcheck/passes/rowserr/rowserr.go deleted file mode 100644 index a142a6744..000000000 --- a/vendor/github.com/jingyugao/rowserrcheck/passes/rowserr/rowserr.go +++ /dev/null @@ -1,304 +0,0 @@ -package rowserr - -import ( - "go/ast" - "go/types" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/buildssa" - "golang.org/x/tools/go/ssa" -) - -func NewAnalyzer(sqlPkgs ...string) *analysis.Analyzer { - return &analysis.Analyzer{ - Name: "rowserrcheck", - Doc: Doc, - Run: NewRun(sqlPkgs...), - Requires: []*analysis.Analyzer{ - buildssa.Analyzer, - }, - } -} - -const ( - Doc = "rowserrcheck checks whether Rows.Err is checked" - errMethod = "Err" - rowsName = "Rows" -) - -type runner struct { - pass *analysis.Pass - rowsTyp *types.Pointer - rowsInterface *types.Interface - rowsObj types.Object - skipFile map[*ast.File]bool - sqlPkgs []string -} - -func NewRun(pkgs ...string) func(pass *analysis.Pass) (interface{}, error) { - return func(pass *analysis.Pass) (interface{}, error) { - sqlPkgs := append(pkgs, "database/sql") - for _, pkg := range sqlPkgs { - r := new(runner) - r.sqlPkgs = sqlPkgs - r.run(pass, pkg) - } - return nil, nil - } -} - -// run executes an analysis for the pass. The receiver is passed -// by value because this func is called in parallel for different passes. -func (r runner) run(pass *analysis.Pass, pkgPath string) { - r.pass = pass - pssa := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA) - funcs := pssa.SrcFuncs - - pkg := pssa.Pkg.Prog.ImportedPackage(pkgPath) - if pkg == nil { - // skip - return - } - - rowsType := pkg.Type(rowsName) - if rowsType == nil { - // skip checking - return - } - r.rowsObj = rowsType.Object() - if r.rowsObj == nil { - // skip checking - return - } - - resNamed, ok := r.rowsObj.Type().(*types.Named) - if !ok { - return - } - - rowsInterface, ok := r.rowsObj.Type().Underlying().(*types.Interface) - if ok { - r.rowsInterface = rowsInterface - } - - r.rowsTyp = types.NewPointer(resNamed) - r.skipFile = map[*ast.File]bool{} - - for _, f := range funcs { - // skip if the function is just referenced - var isRefFunc bool - - for i := 0; i < f.Signature.Results().Len(); i++ { - if types.Identical(f.Signature.Results().At(i).Type(), r.rowsTyp) { - isRefFunc = true - } - } - - if isRefFunc { - continue - } - - for _, b := range f.Blocks { - for i := range b.Instrs { - if r.errCallMissing(b, i) { - pass.Reportf(b.Instrs[i].Pos(), "rows.Err must be checked") - } - } - } - } -} - -func (r *runner) errCallMissing(b *ssa.BasicBlock, i int) (ret bool) { - call, ok := r.getCallReturnsRow(b.Instrs[i]) - if !ok { - return false - } - - for _, cRef := range *call.Referrers() { - val, ok := r.getRowsVal(cRef) - if !ok { - continue - } - if len(*val.Referrers()) == 0 { - continue - } - resRefs := *val.Referrers() - var errCalled func(resRef ssa.Instruction) bool - errCalled = func(resRef ssa.Instruction) bool { - switch resRef := resRef.(type) { - case *ssa.Phi: - for _, rf := range *resRef.Referrers() { - if errCalled(rf) { - return true - } - } - case *ssa.Store: // Call in Closure function - for _, aref := range *resRef.Addr.Referrers() { - switch c := aref.(type) { - case *ssa.MakeClosure: - f := c.Fn.(*ssa.Function) - called := r.isClosureCalled(c) - if r.calledInFunc(f, called) { - return true - } - case *ssa.UnOp: - for _, rf := range *c.Referrers() { - if errCalled(rf) { - return true - } - } - } - } - case *ssa.Call: // Indirect function call - if r.isErrCall(resRef) { - return true - } - if f, ok := resRef.Call.Value.(*ssa.Function); ok { - for _, b := range f.Blocks { - for i := range b.Instrs { - if !r.errCallMissing(b, i) { - return true - } - } - } - } - case *ssa.FieldAddr: - for _, bRef := range *resRef.Referrers() { - bOp, ok := r.getBodyOp(bRef) - if !ok { - continue - } - - for _, ccall := range *bOp.Referrers() { - if r.isErrCall(ccall) { - return true - } - } - } - } - - return false - } - - for _, resRef := range resRefs { - if errCalled(resRef) { - return false - } - } - } - - return true -} - -func (r *runner) getCallReturnsRow(instr ssa.Instruction) (*ssa.Call, bool) { - call, ok := instr.(*ssa.Call) - if !ok { - return nil, false - } - - res := call.Call.Signature().Results() - - for i := 0; i < res.Len(); i++ { - typeToCheck := res.At(i).Type() - if types.Identical(typeToCheck, r.rowsTyp) { - return call, true - } - if r.rowsInterface != nil && types.Implements(typeToCheck, r.rowsInterface) { - return call, true - } - } - - return nil, false -} - -func (r *runner) getRowsVal(instr ssa.Instruction) (ssa.Value, bool) { - switch instr := instr.(type) { - case *ssa.Call: - if len(instr.Call.Args) == 1 && types.Identical(instr.Call.Args[0].Type(), r.rowsTyp) { - return instr.Call.Args[0], true - } - if len(instr.Call.Args) == 1 && r.rowsInterface != nil && types.Implements(instr.Call.Args[0].Type(), r.rowsInterface) { - return instr.Call.Args[0], true - } - case ssa.Value: - if types.Identical(instr.Type(), r.rowsTyp) { - return instr, true - } - if r.rowsInterface != nil && types.Implements(instr.Type(), r.rowsInterface) { - return instr, true - } - default: - } - - return nil, false -} - -func (r *runner) getBodyOp(instr ssa.Instruction) (*ssa.UnOp, bool) { - op, ok := instr.(*ssa.UnOp) - if !ok { - return nil, false - } - // fix: try to check type - // if op.Type() != r.rowsObj.Type() { - // return nil, false - // } - return op, true -} - -func (r *runner) isErrCall(ccall ssa.Instruction) bool { - switch ccall := ccall.(type) { - case *ssa.Defer: - if ccall.Call.Value != nil && ccall.Call.Value.Name() == errMethod { - return true - } - if ccall.Call.Method != nil && ccall.Call.Method.Name() == errMethod { - return true - } - case *ssa.Call: - if ccall.Call.Value != nil && ccall.Call.Value.Name() == errMethod { - return true - } - if ccall.Call.Method != nil && ccall.Call.Method.Name() == errMethod { - return true - } - } - - return false -} - -func (r *runner) isClosureCalled(c *ssa.MakeClosure) bool { - for _, ref := range *c.Referrers() { - switch ref.(type) { - case *ssa.Call, *ssa.Defer: - return true - } - } - - return false -} - -func (r *runner) calledInFunc(f *ssa.Function, called bool) bool { - for _, b := range f.Blocks { - for i, instr := range b.Instrs { - switch instr := instr.(type) { - case *ssa.UnOp: - for _, ref := range *instr.Referrers() { - if v, ok := ref.(ssa.Value); ok { - if vCall, ok := v.(*ssa.Call); ok { - if vCall.Call.Value != nil && vCall.Call.Value.Name() == errMethod { - if called { - return true - } - } - } - } - } - default: - if r.errCallMissing(b, i) || !called { - return false - } - } - } - } - return false -} diff --git a/vendor/github.com/jirfag/go-printf-func-name/LICENSE b/vendor/github.com/jirfag/go-printf-func-name/LICENSE deleted file mode 100644 index d06a809c2..000000000 --- a/vendor/github.com/jirfag/go-printf-func-name/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2020 Isaev Denis - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/jirfag/go-printf-func-name/pkg/analyzer/analyzer.go b/vendor/github.com/jirfag/go-printf-func-name/pkg/analyzer/analyzer.go deleted file mode 100644 index 7937dd433..000000000 --- a/vendor/github.com/jirfag/go-printf-func-name/pkg/analyzer/analyzer.go +++ /dev/null @@ -1,74 +0,0 @@ -package analyzer - -import ( - "go/ast" - "strings" - - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/ast/inspector" - - "golang.org/x/tools/go/analysis" -) - -var Analyzer = &analysis.Analyzer{ - Name: "goprintffuncname", - Doc: "Checks that printf-like functions are named with `f` at the end.", - Run: run, - Requires: []*analysis.Analyzer{inspect.Analyzer}, -} - -func run(pass *analysis.Pass) (interface{}, error) { - inspector := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - nodeFilter := []ast.Node{ - (*ast.FuncDecl)(nil), - } - - inspector.Preorder(nodeFilter, func(node ast.Node) { - funcDecl := node.(*ast.FuncDecl) - - if res := funcDecl.Type.Results; res != nil && len(res.List) != 0 { - return - } - - params := funcDecl.Type.Params.List - if len(params) < 2 { // [0] must be format (string), [1] must be args (...interface{}) - return - } - - formatParamType, ok := params[len(params)-2].Type.(*ast.Ident) - if !ok { // first param type isn't identificator so it can't be of type "string" - return - } - - if formatParamType.Name != "string" { // first param (format) type is not string - return - } - - if formatParamNames := params[len(params)-2].Names; len(formatParamNames) == 0 || formatParamNames[len(formatParamNames)-1].Name != "format" { - return - } - - argsParamType, ok := params[len(params)-1].Type.(*ast.Ellipsis) - if !ok { // args are not ellipsis (...args) - return - } - - elementType, ok := argsParamType.Elt.(*ast.InterfaceType) - if !ok { // args are not of interface type, but we need interface{} - return - } - - if elementType.Methods != nil && len(elementType.Methods.List) != 0 { - return // has >= 1 method in interface, but we need an empty interface "interface{}" - } - - if strings.HasSuffix(funcDecl.Name.Name, "f") { - return - } - - pass.Reportf(node.Pos(), "printf-like formatting function '%s' should be named '%sf'", - funcDecl.Name.Name, funcDecl.Name.Name) - }) - - return nil, nil -} diff --git a/vendor/github.com/jjti/go-spancheck/.gitignore b/vendor/github.com/jjti/go-spancheck/.gitignore deleted file mode 100644 index 1f83be414..000000000 --- a/vendor/github.com/jjti/go-spancheck/.gitignore +++ /dev/null @@ -1,19 +0,0 @@ -# If you prefer the allow list template instead of the deny list, see community template: -# https://github.com/github/gitignore/blob/main/community/Golang/Go.AllowList.gitignore -# -# Binaries for programs and plugins -*.exe -*.exe~ -*.dll -*.so -*.dylib - -# Test binary, built with `go test -c` -*.test - -# Output of the go coverage tool, specifically when used with LiteIDE -*.out - -# Dependency directories (remove the comment below to include it) -# vendor/ -src/ diff --git a/vendor/github.com/jjti/go-spancheck/.golangci.yml b/vendor/github.com/jjti/go-spancheck/.golangci.yml deleted file mode 100644 index 15d8513d6..000000000 --- a/vendor/github.com/jjti/go-spancheck/.golangci.yml +++ /dev/null @@ -1,103 +0,0 @@ -## A good ref for this: https://gist.github.com/maratori/47a4d00457a92aa426dbd48a18776322 - -run: - timeout: 5m - tests: true -linters: - enable: - - asasalint # checks for pass []any as any in variadic func(...any) - - asciicheck # checks that your code does not contain non-ASCII identifiers - - bidichk # checks for dangerous unicode character sequences - - bodyclose - - containedctx - - decorder # checks declaration order and count of types, constants, variables and functions - - dogsled - - dupword # checks for duplicate words in the source code - - durationcheck # checks for two durations multiplied together - - errcheck - - errname - - errorlint - - exhaustive # checks exhaustiveness of enum switch statements - - exportloopref # checks for pointers to enclosing loop variables - - gci - - gochecknoinits # checks that no init functions are present in Go code - - gocritic - - gomnd - - gosimple - - govet - - importas # enforces consistent import aliases - - ineffassign - - loggercheck - - makezero # finds slice declarations with non-zero initial length - - mirror - - misspell - - musttag # enforces field tags in (un)marshaled structs - - nakedret - - nestif # reports deeply nested if statements - - nilerr # finds the code that returns nil even if it checks that the error is not nil - - noctx # finds sending http request without context.Context - - nolintlint # reports ill-formed or insufficient nolint directives - - predeclared # finds code that shadows one of Go's predeclared identifiers - - promlinter - - reassign # checks that package variables are not reassigned - - revive # fast, configurable, extensible, flexible, and beautiful linter for Go, drop-in replacement of golint - - staticcheck - - stylecheck - - tenv - - thelper # detects golang test helpers without t.Helper() call and checks the consistency of test helpers - - unconvert # removes unnecessary type conversions - - unparam # reports unused function parameters - - unused - - usestdlibvars # detects the possibility to use variables/constants from the Go standard library - - wastedassign # finds wasted assignment statements - - whitespace # detects leading and trailing whitespace -linters-settings: - gci: - skip-generated: true - custom-order: true - sections: - - standard # Standard section: captures all standard packages. - - default # Default section: contains all imports that could not be matched to another section type. - - prefix(github.com/jjti) - exhaustive: - # Program elements to check for exhaustiveness. - # Default: [ switch ] - check: - - switch - - map - gocritic: - settings: - captLocal: - # Whether to restrict checker to params only. - # Default: true - paramsOnly: false - underef: - # Whether to skip (*x).method() calls where x is a pointer receiver. - # Default: true - skipRecvDeref: false - govet: - enable-all: true - disable: - - fieldalignment # too strict - - shadow # bunch of false positive, doesn't realize when we return from a func - misspell: - locale: US - nakedret: - max-func-lines: 0 - nestif: - # Minimal complexity of if statements to report. - # Default: 5 - min-complexity: 4 - nolintlint: - # Enable to require an explanation of nonzero length after each nolint directive. - # Default: false - require-explanation: true - stylecheck: - checks: ["all"] -issues: - include: - - EXC0001 # Error return value of x is not checked - - EXC0013 # package comment should be of the form "(.+)... - - EXC0014 # comment on exported (.+) should be of the form "(.+)..." - exclude: - - ifElseChain diff --git a/vendor/github.com/jjti/go-spancheck/CONTRIBUTING.md b/vendor/github.com/jjti/go-spancheck/CONTRIBUTING.md deleted file mode 100644 index 32932fae1..000000000 --- a/vendor/github.com/jjti/go-spancheck/CONTRIBUTING.md +++ /dev/null @@ -1,51 +0,0 @@ -# Contributing guideline - -Contributions are welcome + appreciated. - -## Open Requests - -These are a couple contributions I would especially appreciate: - -1. Add check for SetAttributes: https://github.com/jjti/go-spancheck/issues/1 -1. Add SuggestedFix(es): https://github.com/jjti/go-spancheck/issues/2 - -## Steps - -### 1. Create an Issue - -If one does not exist already, open a bug report or feature request in [https://github.com/jjti/go-spancheck/issues](https://github.com/jjti/go-spancheck/issues). - -### 2. Add a test case - -Test cases are in `/testdata`. - -If fixing a bug, you can add it to `testdata/enableall/enable_all.go` (for example): - -```go -func _() { - ctx, span := otel.Tracer("foo").Start(context.Background(), "bar") // want "span.End is not called on all paths, possible memory leak" - print(ctx.Done(), span.IsRecording()) -} // want "return can be reached without calling span.End" -``` - -If adding a new feature with a new combination of flags, create a new module within `testdata`: - -1. Create a new module, eg `testdata/setattributes` -1. Copy/paste go.mod/sum into the new module directory and update the module definition, eg `module github.com/jjti/go-spancheck/testdata/setattributes` -1. Add the module to the workspace in [go.work](./go.work) -1. Add the module's directory to the `testvendor` Make target in [Makefile](./Makefile) - -### 3. Run tests - -```bash -make test -``` - -### 4. Open a PR - -Eg of a GitHub snippet for PRs: - -```bash -alias gpr='gh pr view --web 2>/dev/null || gh pr create --web --fill' -gpr -``` diff --git a/vendor/github.com/jjti/go-spancheck/LICENSE b/vendor/github.com/jjti/go-spancheck/LICENSE deleted file mode 100644 index 552ddf2dc..000000000 --- a/vendor/github.com/jjti/go-spancheck/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2023 Joshua Timmons - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/jjti/go-spancheck/Makefile b/vendor/github.com/jjti/go-spancheck/Makefile deleted file mode 100644 index 39d80f7c6..000000000 --- a/vendor/github.com/jjti/go-spancheck/Makefile +++ /dev/null @@ -1,27 +0,0 @@ -.PHONY: fmt -fmt: - golangci-lint run --fix --config ./.golangci.yml - -.PHONY: test -test: testvendor - go test -v ./... - -# note: I'm copying https://github.com/ghostiam/protogetter/blob/main/testdata/Makefile -# -# x/tools/go/analysis/analysistest does not support go modules. To work around this issue -# we need to vendor any external modules to `./src`. -# -# Follow https://github.com/golang/go/issues/37054 for more details. -.PHONY: testvendor -testvendor: - @rm -rf base/src - @cd testdata/base && go mod vendor - @cp -r testdata/base/vendor testdata/base/src - @cp -r testdata/base/vendor testdata/disableerrorchecks/src - @cp -r testdata/base/vendor testdata/enableall/src - @rm -rf testdata/base/vendor - -.PHONY: install -install: - go install ./cmd/spancheck - @echo "Installed in $(shell which spancheck)" \ No newline at end of file diff --git a/vendor/github.com/jjti/go-spancheck/README.md b/vendor/github.com/jjti/go-spancheck/README.md deleted file mode 100644 index 953489d7a..000000000 --- a/vendor/github.com/jjti/go-spancheck/README.md +++ /dev/null @@ -1,241 +0,0 @@ -# go-spancheck - -![Latest release](https://img.shields.io/github/v/release/jjti/go-spancheck) -[![ci](https://github.com/jjti/go-spancheck/actions/workflows/ci.yaml/badge.svg)](https://github.com/jjti/go-spancheck/actions/workflows/ci.yaml) -[![Go Report Card](https://goreportcard.com/badge/github.com/jjti/go-spancheck)](https://goreportcard.com/report/github.com/jjti/go-spancheck) -[![MIT License](http://img.shields.io/badge/license-MIT-blue.svg?style=flat)](LICENSE) - -Checks usage of: - -- [OpenTelemetry spans](https://opentelemetry.io/docs/instrumentation/go/manual/) from [go.opentelemetry.io/otel/trace](go.opentelemetry.io/otel/trace) -- [OpenCensus spans](https://opencensus.io/quickstart/go/tracing/) from [go.opencensus.io/trace](https://pkg.go.dev/go.opencensus.io/trace#Span) - -## Example - -```bash -spancheck -checks 'end,set-status,record-error' ./... -``` - -```go -func _() error { - // span.End is not called on all paths, possible memory leak - // span.SetStatus is not called on all paths - // span.RecordError is not called on all paths - _, span := otel.Tracer("foo").Start(context.Background(), "bar") - - if true { - // return can be reached without calling span.End - // return can be reached without calling span.SetStatus - // return can be reached without calling span.RecordError - return errors.New("err") - } - - return nil // return can be reached without calling span.End -} -``` - -## Configuration - -### golangci-lint - -Docs on configuring the linter are also available at [https://golangci-lint.run/usage/linters/#spancheck](https://golangci-lint.run/usage/linters/#spancheck): - -```yaml -linters: - enable: - - spancheck - -linters-settings: - spancheck: - # Checks to enable. - # Options include: - # - `end`: check that `span.End()` is called - # - `record-error`: check that `span.RecordError(err)` is called when an error is returned - # - `set-status`: check that `span.SetStatus(codes.Error, msg)` is called when an error is returned - # Default: ["end"] - checks: - - end - - record-error - - set-status - # A list of regexes for function signatures that silence `record-error` and `set-status` reports - # if found in the call path to a returned error. - # https://github.com/jjti/go-spancheck#ignore-check-signatures - # Default: [] - ignore-check-signatures: - - "telemetry.RecordError" -``` - -### CLI - -To install the linter as a CLI: - -```bash -go install github.com/jjti/go-spancheck/cmd/spancheck@latest -spancheck ./... -``` - -Only the `span.End()` check is enabled by default. The others can be enabled with `-checks 'end,set-status,record-error'`. - -```txt -$ spancheck -h -... -Flags: - -checks string - comma-separated list of checks to enable (options: end, set-status, record-error) (default "end") - -ignore-check-signatures string - comma-separated list of regex for function signatures that disable checks on errors -``` - -### Ignore Check Signatures - -The `span.SetStatus()` and `span.RecordError()` checks warn when there is: - -1. a path to return statement -1. that returns an error -1. without a call (to `SetStatus` or `RecordError`, respectively) - -But it's convenient to call `SetStatus` and `RecordError` from utility methods [[1](https://andydote.co.uk/2023/09/19/tracing-is-better/#step-2-wrap-the-errors)]. To support that, the `ignore-*-check-signatures` settings will suppress warnings if the configured function is present in the path. - -For example, by default, the code below would have warnings as shown: - -```go -func task(ctx context.Context) error { - ctx, span := otel.Tracer("foo").Start(ctx, "bar") // span.SetStatus is not called on all paths - defer span.End() - - if err := subTask(ctx); err != nil { - return recordErr(span, err) // return can be reached without calling span.SetStatus - } - - return nil -} - -func recordErr(span trace.Span, err error) error { - span.SetStatus(codes.Error, err.Error()) - span.RecordError(err) - return err -} -``` - -The warnings are can be ignored by setting `-ignore-check-signatures` flag to `recordErr`: - -```bash -spancheck -checks 'end,set-status,record-error' -ignore-check-signatures 'recordErr' ./... -``` - -## Problem Statement - -Tracing is a celebrated [[1](https://andydote.co.uk/2023/09/19/tracing-is-better/),[2](https://charity.wtf/2022/08/15/live-your-best-life-with-structured-events/)] and well marketed [[3](https://docs.datadoghq.com/tracing/),[4](https://www.honeycomb.io/distributed-tracing)] pillar of observability. But self-instrumented tracing requires a lot of easy-to-forget boilerplate: - -```go -import ( - "go.opentelemetry.io/otel" - "go.opentelemetry.io/otel/codes" -) - -func task(ctx context.Context) error { - ctx, span := otel.Tracer("foo").Start(ctx, "bar") - defer span.End() // call `.End()` - - if err := subTask(ctx); err != nil { - span.SetStatus(codes.Error, err.Error()) // call SetStatus(codes.Error, msg) to set status:error - span.RecordError(err) // call RecordError(err) to record an error event - return err - } - - return nil -} -``` - -For spans to be _really_ useful, developers need to: - -1. call `span.End()` always -1. call `span.SetStatus(codes.Error, msg)` on error -1. call `span.RecordError(err)` on error -1. call `span.SetAttributes()` liberally - -- OpenTelemetry: [Creating spans](https://opentelemetry.io/docs/instrumentation/go/manual/#creating-spans) -- Uptrace: [OpenTelemetry Go Tracing API](https://uptrace.dev/opentelemetry/go-tracing.html#quickstart) - -This linter helps developers with steps 1-3. - -## Checks - -This linter supports three checks, each documented below. Only the check for `span.End()` is enabled by default. See [Configuration](#configuration) for instructions on enabling the others. - -### `span.End()` - -Enabled by default. - -Not calling `End` can cause memory leaks and prevents spans from being closed. - -> Any Span that is created MUST also be ended. This is the responsibility of the user. Implementations of this API may leak memory or other resources if Spans are not ended. - -[source: trace.go](https://github.com/open-telemetry/opentelemetry-go/blob/98b32a6c3a87fbee5d34c063b9096f416b250897/trace/trace.go#L523) - -```go -func task(ctx context.Context) error { - otel.Tracer("app").Start(ctx, "foo") // span is unassigned, probable memory leak - _, span := otel.Tracer().Start(ctx, "foo") // span.End is not called on all paths, possible memory leak - return nil // return can be reached without calling span.End -} -``` - -### `span.SetStatus(codes.Error, "msg")` - -Disabled by default. Enable with `-checks 'set-status'`. - -Developers should call `SetStatus` on spans. The status attribute is an important, first-class attribute: - -1. observability platforms and APMs differentiate "success" vs "failure" using [span's status codes](https://docs.datadoghq.com/tracing/metrics/). -1. telemetry collector agents, like the [Open Telemetry Collector's Tail Sampling Processor](https://github.com/open-telemetry/opentelemetry-collector-contrib/blob/main/processor/tailsamplingprocessor/README.md#:~:text=Sampling%20Processor.-,status_code,-%3A%20Sample%20based%20upon), are configurable to sample `Error` spans at a higher rate than `OK` spans. -1. observability platforms, like [DataDog, have trace retention filters that use spans' status](https://docs.datadoghq.com/tracing/trace_pipeline/trace_retention/). In other words, `status:error` spans often receive special treatment with the assumption they are more useful for debugging. And forgetting to set the status can lead to spans, with useful debugging information, being dropped. - -```go -func _() error { - _, span := otel.Tracer("foo").Start(context.Background(), "bar") // span.SetStatus is not called on all paths - defer span.End() - - if err := subTask(); err != nil { - span.RecordError(err) - return errors.New(err) // return can be reached without calling span.SetStatus - } - - return nil -} -``` - -OpenTelemetry docs: [Set span status](https://opentelemetry.io/docs/instrumentation/go/manual/#set-span-status). - -### `span.RecordError(err)` - -Disabled by default. Enable with `-checks 'record-error'`. - -Calling `RecordError` creates a new exception-type [event (structured log message)](https://opentelemetry.io/docs/concepts/signals/traces/#span-events) on the span. This is recommended to capture the error's stack trace. - -```go -func _() error { - _, span := otel.Tracer("foo").Start(context.Background(), "bar") // span.RecordError is not called on all paths - defer span.End() - - if err := subTask(); err != nil { - span.SetStatus(codes.Error, err.Error()) - return errors.New(err) // return can be reached without calling span.RecordError - } - - return nil -} -``` - -OpenTelemetry docs: [Record errors](https://opentelemetry.io/docs/instrumentation/go/manual/#record-errors). - -Note: this check is not applied to [OpenCensus spans](https://pkg.go.dev/go.opencensus.io/trace#SpanInterface) because they have no `RecordError` method. - -## Attribution - -This linter is the product of liberal copying of: - -- [github.com/golang/tools/go/analysis/passes/lostcancel](https://github.com/golang/tools/tree/master/go/analysis/passes/lostcancel) (half the linter) -- [github.com/tomarrell/wrapcheck](https://github.com/tomarrell/wrapcheck) (error type checking and config) -- [github.com/Antonboom/testifylint](https://github.com/Antonboom/testifylint) (README) -- [github.com/ghostiam/protogetter](https://github.com/ghostiam/protogetter/blob/main/testdata/Makefile) (test setup) diff --git a/vendor/github.com/jjti/go-spancheck/config.go b/vendor/github.com/jjti/go-spancheck/config.go deleted file mode 100644 index 4005f49e0..000000000 --- a/vendor/github.com/jjti/go-spancheck/config.go +++ /dev/null @@ -1,141 +0,0 @@ -package spancheck - -import ( - "flag" - "fmt" - "log" - "regexp" - "strings" -) - -// Check is a type of check that can be enabled or disabled. -type Check int - -const ( - // EndCheck if enabled, checks that span.End() is called after span creation and before the function returns. - EndCheck Check = iota - - // SetStatusCheck if enabled, checks that `span.SetStatus(codes.Error, msg)` is called when returning an error. - SetStatusCheck - - // RecordErrorCheck if enabled, checks that span.RecordError(err) is called when returning an error. - RecordErrorCheck -) - -func (c Check) String() string { - switch c { - case EndCheck: - return "end" - case SetStatusCheck: - return "set-status" - case RecordErrorCheck: - return "record-error" - default: - return "" - } -} - -var ( - // Checks is a list of all checks by name. - Checks = map[string]Check{ - EndCheck.String(): EndCheck, - SetStatusCheck.String(): SetStatusCheck, - RecordErrorCheck.String(): RecordErrorCheck, - } -) - -// Config is a configuration for the spancheck analyzer. -type Config struct { - fs flag.FlagSet - - // EnabledChecks is a list of checks to enable by name. - EnabledChecks []string - - // IgnoreChecksSignaturesSlice is a slice of strings that are turned into - // the IgnoreSetStatusCheckSignatures regex. - IgnoreChecksSignaturesSlice []string - - endCheckEnabled bool - setStatusEnabled bool - recordErrorEnabled bool - - // ignoreChecksSignatures is a regex that, if matched, disables the - // SetStatus and RecordError checks on error. - ignoreChecksSignatures *regexp.Regexp -} - -// NewDefaultConfig returns a new Config with default values. -func NewDefaultConfig() *Config { - return &Config{ - EnabledChecks: []string{EndCheck.String()}, - } -} - -// finalize parses checks and signatures from the public string slices of Config. -func (c *Config) finalize() { - c.parseSignatures() - - checks := parseChecks(c.EnabledChecks) - c.endCheckEnabled = contains(checks, EndCheck) - c.setStatusEnabled = contains(checks, SetStatusCheck) - c.recordErrorEnabled = contains(checks, RecordErrorCheck) -} - -// parseSignatures sets the Ignore*CheckSignatures regex from the string slices. -func (c *Config) parseSignatures() { - if c.ignoreChecksSignatures == nil && len(c.IgnoreChecksSignaturesSlice) > 0 { - if len(c.IgnoreChecksSignaturesSlice) == 1 && c.IgnoreChecksSignaturesSlice[0] == "" { - return - } - - c.ignoreChecksSignatures = createRegex(c.IgnoreChecksSignaturesSlice) - } -} - -func parseChecks(checksSlice []string) []Check { - if len(checksSlice) == 0 { - return nil - } - - checks := []Check{} - for _, check := range checksSlice { - checkName := strings.TrimSpace(check) - if checkName == "" { - continue - } - - check, ok := Checks[checkName] - if !ok { - continue - } - - checks = append(checks, check) - } - - return checks -} - -func createRegex(sigs []string) *regexp.Regexp { - if len(sigs) == 0 { - return nil - } - - regex := fmt.Sprintf("(%s)", strings.Join(sigs, "|")) - regexCompiled, err := regexp.Compile(regex) - if err != nil { - log.Default().Print("[WARN] failed to compile regex from signature flag", "regex", regex, "err", err) - return nil - } - - return regexCompiled -} - -func contains(s []Check, e Check) bool { - for _, a := range s { - if a == e { - return true - } - } - - return false -} diff --git a/vendor/github.com/jjti/go-spancheck/doc.go b/vendor/github.com/jjti/go-spancheck/doc.go deleted file mode 100644 index f9dec043f..000000000 --- a/vendor/github.com/jjti/go-spancheck/doc.go +++ /dev/null @@ -1,37 +0,0 @@ -// Package spancheck defines a linter that checks for mistakes with OTEL trace spans. -// -// # Analyzer spancheck -// -// spancheck: check for mistakes with OpenTelemetry trace spans. -// -// Common mistakes with OTEL trace spans include forgetting to call End: -// -// func(ctx context.Context) { -// ctx, span := otel.Tracer("app").Start(ctx, "span") -// // defer span.End() should be here -// -// // do stuff -// } -// -// Forgetting to set an Error status: -// -// ctx, span := otel.Tracer("app").Start(ctx, "span") -// defer span.End() -// -// if err := task(); err != nil { -// // span.SetStatus(codes.Error, err.Error()) should be here -// span.RecordError(err) -// return fmt.Errorf("failed to run task: %w", err) -// } -// -// Forgetting to record the Error: -// -// ctx, span := otel.Tracer("app").Start(ctx, "span") -// defer span.End() -// -// if err := task(); err != nil { -// span.SetStatus(codes.Error, err.Error()) -// // span.RecordError(err) should be here -// return fmt.Errorf("failed to run task: %w", err) -// } -package spancheck diff --git a/vendor/github.com/jjti/go-spancheck/go.work b/vendor/github.com/jjti/go-spancheck/go.work deleted file mode 100644 index 7d0a87b9e..000000000 --- a/vendor/github.com/jjti/go-spancheck/go.work +++ /dev/null @@ -1,8 +0,0 @@ -go 1.20 - -use ( - . - ./testdata/base - ./testdata/disableerrorchecks - ./testdata/enableall -) diff --git a/vendor/github.com/jjti/go-spancheck/go.work.sum b/vendor/github.com/jjti/go-spancheck/go.work.sum deleted file mode 100644 index f3cdef790..000000000 --- a/vendor/github.com/jjti/go-spancheck/go.work.sum +++ /dev/null @@ -1,3 +0,0 @@ -github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= -golang.org/x/net v0.19.0/go.mod h1:CfAk/cbD4CthTvqiEl8NpboMuiuOYsAr/7NOjZJtv1U= -golang.org/x/sync v0.5.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= diff --git a/vendor/github.com/jjti/go-spancheck/spancheck.go b/vendor/github.com/jjti/go-spancheck/spancheck.go deleted file mode 100644 index 6f069a033..000000000 --- a/vendor/github.com/jjti/go-spancheck/spancheck.go +++ /dev/null @@ -1,441 +0,0 @@ -package spancheck - -import ( - "go/ast" - "go/types" - "regexp" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/ctrlflow" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/ast/inspector" - "golang.org/x/tools/go/cfg" -) - -const stackLen = 32 - -// spanType differentiates span types. -type spanType int - -const ( - spanUnset spanType = iota // not a span - spanOpenTelemetry // from go.opentelemetry.io/otel - spanOpenCensus // from go.opencensus.io/trace -) - -var ( - // this approach stolen from errcheck - // https://github.com/kisielk/errcheck/blob/7f94c385d0116ccc421fbb4709e4a484d98325ee/errcheck/errcheck.go#L22 - errorType = types.Universe.Lookup("error").Type().Underlying().(*types.Interface) -) - -// NewAnalyzerWithConfig returns a new analyzer configured with the Config passed in. -// Its config can be set for testing. -func NewAnalyzerWithConfig(config *Config) *analysis.Analyzer { - return newAnalyzer(config) -} - -func newAnalyzer(config *Config) *analysis.Analyzer { - config.finalize() - - return &analysis.Analyzer{ - Name: "spancheck", - Doc: "Checks for mistakes with OpenTelemetry/Census spans.", - Flags: config.fs, - Run: run(config), - Requires: []*analysis.Analyzer{ - ctrlflow.Analyzer, - inspect.Analyzer, - }, - } -} - -func run(config *Config) func(*analysis.Pass) (interface{}, error) { - return func(pass *analysis.Pass) (interface{}, error) { - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - - nodeFilter := []ast.Node{ - (*ast.FuncLit)(nil), // f := func() {} - (*ast.FuncDecl)(nil), // func foo() {} - } - inspect.Preorder(nodeFilter, func(n ast.Node) { - runFunc(pass, n, config) - }) - - return nil, nil - } -} - -type spanVar struct { - stmt ast.Node - id *ast.Ident - vr *types.Var - spanType spanType -} - -// runFunc checks if the node is a function, has a span, and the span never has SetStatus set. -func runFunc(pass *analysis.Pass, node ast.Node, config *Config) { - // copying https://cs.opensource.google/go/x/tools/+/master:go/analysis/passes/lostcancel/lostcancel.go - - // Find scope of function node - var funcScope *types.Scope - switch v := node.(type) { - case *ast.FuncLit: - funcScope = pass.TypesInfo.Scopes[v.Type] - case *ast.FuncDecl: - funcScope = pass.TypesInfo.Scopes[v.Type] - } - - // Maps each span variable to its defining ValueSpec/AssignStmt. - spanVars := make(map[*ast.Ident]spanVar) - - // Find the set of span vars to analyze. - stack := make([]ast.Node, 0, stackLen) - ast.Inspect(node, func(n ast.Node) bool { - switch n.(type) { - case *ast.FuncLit: - if len(stack) > 0 { - return false // don't stray into nested functions - } - case nil: - stack = stack[:len(stack)-1] // pop - return true - } - stack = append(stack, n) // push - - // Look for [{AssignStmt,ValueSpec} CallExpr SelectorExpr]: - // - // ctx, span := otel.Tracer("app").Start(...) - // ctx, span = otel.Tracer("app").Start(...) - // var ctx, span = otel.Tracer("app").Start(...) - sType, sStart := isSpanStart(pass.TypesInfo, n) - if !sStart || !isCall(stack[len(stack)-2]) { - return true - } - - stmt := stack[len(stack)-3] - id := getID(stmt) - if id == nil { - pass.ReportRangef(n, "span is unassigned, probable memory leak") - return true - } - - if id.Name == "_" { - pass.ReportRangef(id, "span is unassigned, probable memory leak") - } else if v, ok := pass.TypesInfo.Uses[id].(*types.Var); ok { - // If the span variable is defined outside function scope, - // do not analyze it. - if funcScope.Contains(v.Pos()) { - spanVars[id] = spanVar{ - vr: v, - stmt: stmt, - id: id, - spanType: sType, - } - } - } else if v, ok := pass.TypesInfo.Defs[id].(*types.Var); ok { - spanVars[id] = spanVar{ - vr: v, - stmt: stmt, - id: id, - spanType: sType, - } - } - - return true - }) - - if len(spanVars) == 0 { - return // no need to inspect CFG - } - - // Obtain the CFG. - cfgs := pass.ResultOf[ctrlflow.Analyzer].(*ctrlflow.CFGs) - var g *cfg.CFG - var sig *types.Signature - switch node := node.(type) { - case *ast.FuncDecl: - sig, _ = pass.TypesInfo.Defs[node.Name].Type().(*types.Signature) - g = cfgs.FuncDecl(node) - case *ast.FuncLit: - sig, _ = pass.TypesInfo.Types[node.Type].Type.(*types.Signature) - g = cfgs.FuncLit(node) - } - if sig == nil { - return // missing type information - } - - // Check for missing calls. - for _, sv := range spanVars { - if config.endCheckEnabled { - // Check if there's no End to the span. - if ret := getMissingSpanCalls(pass, g, sv, "End", func(pass *analysis.Pass, ret *ast.ReturnStmt) *ast.ReturnStmt { return ret }, nil); ret != nil { - pass.ReportRangef(sv.stmt, "%s.End is not called on all paths, possible memory leak", sv.vr.Name()) - pass.ReportRangef(ret, "return can be reached without calling %s.End", sv.vr.Name()) - } - } - - if config.setStatusEnabled { - // Check if there's no SetStatus to the span setting an error. - if ret := getMissingSpanCalls(pass, g, sv, "SetStatus", getErrorReturn, config.ignoreChecksSignatures); ret != nil { - pass.ReportRangef(sv.stmt, "%s.SetStatus is not called on all paths", sv.vr.Name()) - pass.ReportRangef(ret, "return can be reached without calling %s.SetStatus", sv.vr.Name()) - } - } - - if config.recordErrorEnabled && sv.spanType == spanOpenTelemetry { // RecordError only exists in OpenTelemetry - // Check if there's no RecordError to the span setting an error. - if ret := getMissingSpanCalls(pass, g, sv, "RecordError", getErrorReturn, config.ignoreChecksSignatures); ret != nil { - pass.ReportRangef(sv.stmt, "%s.RecordError is not called on all paths", sv.vr.Name()) - pass.ReportRangef(ret, "return can be reached without calling %s.RecordError", sv.vr.Name()) - } - } - } -} - -// isSpanStart reports whether n is tracer.Start() -func isSpanStart(info *types.Info, n ast.Node) (spanType, bool) { - sel, ok := n.(*ast.SelectorExpr) - if !ok { - return spanUnset, false - } - - switch sel.Sel.Name { - case "Start": // https://github.com/open-telemetry/opentelemetry-go/blob/98b32a6c3a87fbee5d34c063b9096f416b250897/trace/trace.go#L523 - obj, ok := info.Uses[sel.Sel] - return spanOpenTelemetry, ok && obj.Pkg().Path() == "go.opentelemetry.io/otel/trace" - case "StartSpan": // https://pkg.go.dev/go.opencensus.io/trace#StartSpan - obj, ok := info.Uses[sel.Sel] - return spanOpenCensus, ok && obj.Pkg().Path() == "go.opencensus.io/trace" - case "StartSpanWithRemoteParent": // https://github.com/census-instrumentation/opencensus-go/blob/v0.24.0/trace/trace_api.go#L66 - obj, ok := info.Uses[sel.Sel] - return spanOpenCensus, ok && obj.Pkg().Path() == "go.opencensus.io/trace" - default: - return spanUnset, false - } -} - -func isCall(n ast.Node) bool { - _, ok := n.(*ast.CallExpr) - return ok -} - -func getID(node ast.Node) *ast.Ident { - switch stmt := node.(type) { - case *ast.ValueSpec: - if len(stmt.Names) > 1 { - return stmt.Names[1] - } - case *ast.AssignStmt: - if len(stmt.Lhs) > 1 { - id, _ := stmt.Lhs[1].(*ast.Ident) - return id - } - } - return nil -} - -// getMissingSpanCalls finds a path through the CFG, from stmt (which defines -// the 'span' variable v) to a return statement, that doesn't call the passed selector on the span. -func getMissingSpanCalls( - pass *analysis.Pass, - g *cfg.CFG, - sv spanVar, - selName string, - checkErr func(pass *analysis.Pass, ret *ast.ReturnStmt) *ast.ReturnStmt, - ignoreCheckSig *regexp.Regexp, -) *ast.ReturnStmt { - // blockUses computes "uses" for each block, caching the result. - memo := make(map[*cfg.Block]bool) - blockUses := func(pass *analysis.Pass, b *cfg.Block) bool { - res, ok := memo[b] - if !ok { - res = usesCall(pass, b.Nodes, sv, selName, ignoreCheckSig, 0) - memo[b] = res - } - return res - } - - // Find the var's defining block in the CFG, - // plus the rest of the statements of that block. - var defBlock *cfg.Block - var rest []ast.Node -outer: - for _, b := range g.Blocks { - for i, n := range b.Nodes { - if n == sv.stmt { - defBlock = b - rest = b.Nodes[i+1:] - break outer - } - } - } - - // Is the call "used" in the remainder of its defining block? - if usesCall(pass, rest, sv, selName, ignoreCheckSig, 0) { - return nil - } - - // Does the defining block return without making the call? - if ret := defBlock.Return(); ret != nil { - return checkErr(pass, ret) - } - - // Search the CFG depth-first for a path, from defblock to a - // return block, in which v is never "used". - seen := make(map[*cfg.Block]bool) - var search func(blocks []*cfg.Block) *ast.ReturnStmt - search = func(blocks []*cfg.Block) *ast.ReturnStmt { - for _, b := range blocks { - if seen[b] { - continue - } - seen[b] = true - - // Prune the search if the block uses v. - if blockUses(pass, b) { - continue - } - - // Found path to return statement? - if ret := getErrorReturn(pass, b.Return()); ret != nil { - return ret // found - } - - // Recur - if ret := getErrorReturn(pass, search(b.Succs)); ret != nil { - return ret - } - } - return nil - } - - return search(defBlock.Succs) -} - -// usesCall reports whether stmts contain a use of the selName call on variable v. -func usesCall(pass *analysis.Pass, stmts []ast.Node, sv spanVar, selName string, ignoreCheckSig *regexp.Regexp, depth int) bool { - if depth > 1 { // for perf reasons, do not dive too deep thru func literals, just one level deep check. - return false - } - - found, reAssigned := false, false - for _, subStmt := range stmts { - stack := []ast.Node{} - ast.Inspect(subStmt, func(n ast.Node) bool { - switch n := n.(type) { - case *ast.FuncLit: - if len(stack) > 0 { - cfgs := pass.ResultOf[ctrlflow.Analyzer].(*ctrlflow.CFGs) - g := cfgs.FuncLit(n) - if g != nil && len(g.Blocks) > 0 { - return usesCall(pass, g.Blocks[0].Nodes, sv, selName, ignoreCheckSig, depth+1) - } - - return false - } - case *ast.CallExpr: - if ident, ok := n.Fun.(*ast.Ident); ok { - fnSig := pass.TypesInfo.ObjectOf(ident).String() - if ignoreCheckSig != nil && ignoreCheckSig.MatchString(fnSig) { - found = true - return false - } - } - case nil: - if len(stack) > 0 { - stack = stack[:len(stack)-1] // pop - return true - } - return false - } - stack = append(stack, n) // push - - // Check whether the span was assigned over top of its old value. - _, spanStart := isSpanStart(pass.TypesInfo, n) - if spanStart { - if id := getID(stack[len(stack)-3]); id != nil && id.Obj.Decl == sv.id.Obj.Decl { - reAssigned = true - return false - } - } - - if n, ok := n.(*ast.SelectorExpr); ok { - // Selector (End, SetStatus, RecordError) hit. - if n.Sel.Name == selName { - id, ok := n.X.(*ast.Ident) - found = ok && id.Obj.Decl == sv.id.Obj.Decl - } - - // Check if an ignore signature matches. - fnSig := pass.TypesInfo.ObjectOf(n.Sel).String() - if ignoreCheckSig != nil && ignoreCheckSig.MatchString(fnSig) { - found = true - } - } - - return !found - }) - } - - return found && !reAssigned -} - -func getErrorReturn(pass *analysis.Pass, ret *ast.ReturnStmt) *ast.ReturnStmt { - if ret == nil { - return nil - } - - for _, r := range ret.Results { - if isErrorType(pass.TypesInfo.TypeOf(r)) { - return ret - } - - if r, ok := r.(*ast.CallExpr); ok { - for _, err := range errorsByArg(pass, r) { - if err { - return ret - } - } - } - } - - return nil -} - -// errorsByArg returns a slice s such that -// len(s) == number of return types of call -// s[i] == true iff return type at position i from left is an error type -// -// copied from https://github.com/kisielk/errcheck/blob/master/errcheck/errcheck.go -func errorsByArg(pass *analysis.Pass, call *ast.CallExpr) []bool { - switch t := pass.TypesInfo.Types[call].Type.(type) { - case *types.Named: - // Single return - return []bool{isErrorType(t)} - case *types.Pointer: - // Single return via pointer - return []bool{isErrorType(t)} - case *types.Tuple: - // Multiple returns - s := make([]bool, t.Len()) - for i := 0; i < t.Len(); i++ { - switch et := t.At(i).Type().(type) { - case *types.Named: - // Single return - s[i] = isErrorType(et) - case *types.Pointer: - // Single return via pointer - s[i] = isErrorType(et) - default: - s[i] = false - } - } - return s - } - return []bool{false} -} - -func isErrorType(t types.Type) bool { - return types.Implements(t, errorType) -} diff --git a/vendor/github.com/julz/importas/.gitignore b/vendor/github.com/julz/importas/.gitignore deleted file mode 100644 index c264e642f..000000000 --- a/vendor/github.com/julz/importas/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -.idea/ -importas diff --git a/vendor/github.com/julz/importas/LICENSE b/vendor/github.com/julz/importas/LICENSE deleted file mode 100644 index 261eeb9e9..000000000 --- a/vendor/github.com/julz/importas/LICENSE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/vendor/github.com/julz/importas/README.md b/vendor/github.com/julz/importas/README.md deleted file mode 100644 index 1ea7b4fb2..000000000 --- a/vendor/github.com/julz/importas/README.md +++ /dev/null @@ -1,59 +0,0 @@ -A linter to enforce importing certain packages consistently. - -## What is this for? - -Ideally, go imports should avoid aliasing. Sometimes though, especially with -Kubernetes API code, it becomes unavoidable, because many packages are imported -as e.g. "[package]/v1alpha1" and you end up with lots of collisions if you use -"v1alpha1". - -This linter lets you enforce that whenever (for example) -"pkg/apis/serving/v1alpha1" is aliased, it is aliased as "servingv1alpha1". - -## Usage - -~~~~ -importas \ - -alias knative.dev/serving/pkg/apis/autoscaling/v1alpha1:autoscalingv1alpha1 \ - -alias knative.dev/serving/pkg/apis/serving/v1:servingv1 \ - ./... -~~~~ - -### `-no-unaliased` option - -By default, importas allows non-aliased imports, even when the package is specified by `-alias` flag. -With `-no-unaliased` option, importas does not allow this. - -~~~~ -importas -no-unaliased \ - -alias knative.dev/serving/pkg/apis/autoscaling/v1alpha1:autoscalingv1alpha1 \ - -alias knative.dev/serving/pkg/apis/serving/v1:servingv1 \ - ./... -~~~~ - -### `-no-extra-aliases` option - -By default, importas allows aliases which are not specified by `-alias` flags. -With `-no-extra-aliases` option, importas does not allow any unspecified aliases. - -~~~~ -importas -no-extra-aliases \ - -alias knative.dev/serving/pkg/apis/autoscaling/v1alpha1:autoscalingv1alpha1 \ - -alias knative.dev/serving/pkg/apis/serving/v1:servingv1 \ - ./... -~~~~ - -### Use regular expression - -You can specify the package path by regular expression, and alias by regular expression replacement syntax like following snippet. - -~~~~ -importas -alias 'knative.dev/serving/pkg/apis/(\w+)/(v[\w\d]+):$1$2' -~~~~ - -`$1` represents the text of the first submatch. See [detail](https://golang.org/pkg/regexp/#Regexp.Expand). - -So it will enforce that - -"knative.dev/serving/pkg/apis/autoscaling/v1alpha1" is aliased by "autoscalingv1alpha1", and -"knative.dev/serving/pkg/apis/serving/v1" is aliased by "servingv1" diff --git a/vendor/github.com/julz/importas/analyzer.go b/vendor/github.com/julz/importas/analyzer.go deleted file mode 100644 index f19653478..000000000 --- a/vendor/github.com/julz/importas/analyzer.go +++ /dev/null @@ -1,141 +0,0 @@ -package importas - -import ( - "fmt" - "go/ast" - "go/types" - "strconv" - "strings" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/ast/inspector" -) - -var config = &Config{ - RequiredAlias: make(map[string]string), -} - -var Analyzer = &analysis.Analyzer{ - Name: "importas", - Doc: "Enforces consistent import aliases", - Run: run, - - Flags: flags(config), - - Requires: []*analysis.Analyzer{inspect.Analyzer}, -} - -func run(pass *analysis.Pass) (interface{}, error) { - return runWithConfig(config, pass) -} - -func runWithConfig(config *Config, pass *analysis.Pass) (interface{}, error) { - if err := config.CompileRegexp(); err != nil { - return nil, err - } - - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - inspect.Preorder([]ast.Node{(*ast.ImportSpec)(nil)}, func(n ast.Node) { - visitImportSpecNode(config, n.(*ast.ImportSpec), pass) - }) - - return nil, nil -} - -func visitImportSpecNode(config *Config, node *ast.ImportSpec, pass *analysis.Pass) { - if !config.DisallowUnaliased && node.Name == nil { - return - } - - alias := "" - if node.Name != nil { - alias = node.Name.String() - } - - if alias == "." { - return // Dot aliases are generally used in tests, so ignore. - } - - if strings.HasPrefix(alias, "_") { - return // Used by go test and for auto-includes, not a conflict. - } - - path, err := strconv.Unquote(node.Path.Value) - if err != nil { - pass.Reportf(node.Pos(), "import not quoted") - } - - if required, exists := config.AliasFor(path); exists && required != alias { - message := fmt.Sprintf("import %q imported as %q but must be %q according to config", path, alias, required) - if alias == "" { - message = fmt.Sprintf("import %q imported without alias but must be with alias %q according to config", path, required) - } - - pass.Report(analysis.Diagnostic{ - Pos: node.Pos(), - End: node.End(), - Message: message, - SuggestedFixes: []analysis.SuggestedFix{{ - Message: "Use correct alias", - TextEdits: findEdits(node, pass.TypesInfo.Uses, path, alias, required), - }}, - }) - } else if !exists && config.DisallowExtraAliases { - pass.Report(analysis.Diagnostic{ - Pos: node.Pos(), - End: node.End(), - Message: fmt.Sprintf("import %q has alias %q which is not part of config", path, alias), - SuggestedFixes: []analysis.SuggestedFix{{ - Message: "remove alias", - TextEdits: findEdits(node, pass.TypesInfo.Uses, path, alias, ""), - }}, - }) - } -} - -func findEdits(node ast.Node, uses map[*ast.Ident]types.Object, importPath, original, required string) []analysis.TextEdit { - // Edit the actual import line. - importLine := strconv.Quote(importPath) - if required != "" { - importLine = required + " " + importLine - } - result := []analysis.TextEdit{{ - Pos: node.Pos(), - End: node.End(), - NewText: []byte(importLine), - }} - - packageReplacement := required - if required == "" { - packageParts := strings.Split(importPath, "/") - if len(packageParts) != 0 { - packageReplacement = packageParts[len(packageParts)-1] - } else { - // fall back to original - packageReplacement = original - } - } - - // Edit all the uses of the alias in the code. - for use, pkg := range uses { - pkgName, ok := pkg.(*types.PkgName) - if !ok { - // skip identifiers that aren't pointing at a PkgName. - continue - } - - if pkgName.Pos() != node.Pos() { - // skip identifiers pointing to a different import statement. - continue - } - - result = append(result, analysis.TextEdit{ - Pos: use.Pos(), - End: use.End(), - NewText: []byte(packageReplacement), - }) - } - - return result -} diff --git a/vendor/github.com/julz/importas/config.go b/vendor/github.com/julz/importas/config.go deleted file mode 100644 index 8c9c76d91..000000000 --- a/vendor/github.com/julz/importas/config.go +++ /dev/null @@ -1,70 +0,0 @@ -package importas - -import ( - "errors" - "fmt" - "regexp" -) - -type Config struct { - RequiredAlias map[string]string - Rules []*Rule - DisallowUnaliased bool - DisallowExtraAliases bool -} - -func (c *Config) CompileRegexp() error { - rules := make([]*Rule, 0, len(c.RequiredAlias)) - for path, alias := range c.RequiredAlias { - reg, err := regexp.Compile(fmt.Sprintf("^%s$", path)) - if err != nil { - return err - } - - rules = append(rules, &Rule{ - Regexp: reg, - Alias: alias, - }) - } - - c.Rules = rules - return nil -} - -func (c *Config) findRule(path string) *Rule { - for _, rule := range c.Rules { - if rule.Regexp.MatchString(path) { - return rule - } - } - - return nil -} - -func (c *Config) AliasFor(path string) (string, bool) { - rule := c.findRule(path) - if rule == nil { - return "", false - } - - alias, err := rule.aliasFor(path) - if err != nil { - return "", false - } - - return alias, true -} - -type Rule struct { - Alias string - Regexp *regexp.Regexp -} - -func (r *Rule) aliasFor(path string) (string, error) { - str := r.Regexp.FindString(path) - if len(str) > 0 { - return r.Regexp.ReplaceAllString(str, r.Alias), nil - } - - return "", errors.New("mismatch rule") -} diff --git a/vendor/github.com/julz/importas/flags.go b/vendor/github.com/julz/importas/flags.go deleted file mode 100644 index f8107104a..000000000 --- a/vendor/github.com/julz/importas/flags.go +++ /dev/null @@ -1,32 +0,0 @@ -package importas - -import ( - "errors" - "flag" - "fmt" - "strings" -) - -func flags(config *Config) flag.FlagSet { - fs := flag.FlagSet{} - fs.Var(stringMap(config.RequiredAlias), "alias", "required import alias in form path:alias") - fs.BoolVar(&config.DisallowUnaliased, "no-unaliased", false, "do not allow unaliased imports of aliased packages") - fs.BoolVar(&config.DisallowExtraAliases, "no-extra-aliases", false, "do not allow non-required aliases") - return fs -} - -type stringMap map[string]string - -func (v stringMap) Set(val string) error { - spl := strings.SplitN(val, ":", 2) - if len(spl) != 2 { - return errors.New("import flag must be of form path:alias") - } - - v[spl[0]] = spl[1] - return nil -} - -func (v stringMap) String() string { - return fmt.Sprintf("%v", (map[string]string)(v)) -} diff --git a/vendor/github.com/kisielk/errcheck/LICENSE b/vendor/github.com/kisielk/errcheck/LICENSE deleted file mode 100644 index a2b16b5bd..000000000 --- a/vendor/github.com/kisielk/errcheck/LICENSE +++ /dev/null @@ -1,22 +0,0 @@ -Copyright (c) 2013 Kamil Kisiel - -Permission is hereby granted, free of charge, to any person -obtaining a copy of this software and associated documentation -files (the "Software"), to deal in the Software without -restriction, including without limitation the rights to use, -copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following -conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES -OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT -HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR -OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/github.com/kisielk/errcheck/errcheck/analyzer.go b/vendor/github.com/kisielk/errcheck/errcheck/analyzer.go deleted file mode 100644 index 82ab6298a..000000000 --- a/vendor/github.com/kisielk/errcheck/errcheck/analyzer.go +++ /dev/null @@ -1,77 +0,0 @@ -package errcheck - -import ( - "fmt" - "go/ast" - "reflect" - "regexp" - - "golang.org/x/tools/go/analysis" -) - -var Analyzer = &analysis.Analyzer{ - Name: "errcheck", - Doc: "check for unchecked errors", - Run: runAnalyzer, - ResultType: reflect.TypeOf(Result{}), -} - -var ( - argBlank bool - argAsserts bool - argExcludeFile string - argExcludeOnly bool -) - -func init() { - Analyzer.Flags.BoolVar(&argBlank, "blank", false, "if true, check for errors assigned to blank identifier") - Analyzer.Flags.BoolVar(&argAsserts, "assert", false, "if true, check for ignored type assertion results") - Analyzer.Flags.StringVar(&argExcludeFile, "exclude", "", "Path to a file containing a list of functions to exclude from checking") - Analyzer.Flags.BoolVar(&argExcludeOnly, "excludeonly", false, "Use only excludes from exclude file") -} - -func runAnalyzer(pass *analysis.Pass) (interface{}, error) { - exclude := map[string]bool{} - if !argExcludeOnly { - for _, name := range DefaultExcludedSymbols { - exclude[name] = true - } - } - if argExcludeFile != "" { - excludes, err := ReadExcludes(argExcludeFile) - if err != nil { - return nil, fmt.Errorf("Could not read exclude file: %v\n", err) - } - for _, name := range excludes { - exclude[name] = true - } - } - - var allErrors []UncheckedError - for _, f := range pass.Files { - v := &visitor{ - typesInfo: pass.TypesInfo, - fset: pass.Fset, - blank: argBlank, - asserts: argAsserts, - exclude: exclude, - ignore: map[string]*regexp.Regexp{}, // deprecated & not used - lines: make(map[string][]string), - errors: nil, - } - - ast.Walk(v, f) - - for _, err := range v.errors { - pass.Report(analysis.Diagnostic{ - Pos: pass.Fset.File(f.Pos()).Pos(err.Pos.Offset), - Message: "unchecked error", - Category: "errcheck", - }) - } - - allErrors = append(allErrors, v.errors...) - } - - return Result{UncheckedErrors: allErrors}, nil -} diff --git a/vendor/github.com/kisielk/errcheck/errcheck/embedded_walker.go b/vendor/github.com/kisielk/errcheck/errcheck/embedded_walker.go deleted file mode 100644 index dff391797..000000000 --- a/vendor/github.com/kisielk/errcheck/errcheck/embedded_walker.go +++ /dev/null @@ -1,143 +0,0 @@ -package errcheck - -import ( - "fmt" - "go/types" -) - -// walkThroughEmbeddedInterfaces returns a slice of Interfaces that -// we need to walk through in order to reach the actual definition, -// in an Interface, of the method selected by the given selection. -// -// false will be returned in the second return value if: -// - the right side of the selection is not a function -// - the actual definition of the function is not in an Interface -// -// The returned slice will contain all the interface types that need -// to be walked through to reach the actual definition. -// -// For example, say we have: -// -// type Inner interface {Method()} -// type Middle interface {Inner} -// type Outer interface {Middle} -// type T struct {Outer} -// type U struct {T} -// type V struct {U} -// -// And then the selector: -// -// V.Method -// -// We'll return [Outer, Middle, Inner] by first walking through the embedded structs -// until we reach the Outer interface, then descending through the embedded interfaces -// until we find the one that actually explicitly defines Method. -func walkThroughEmbeddedInterfaces(sel *types.Selection) ([]types.Type, bool) { - fn, ok := sel.Obj().(*types.Func) - if !ok { - return nil, false - } - - // Start off at the receiver. - currentT := sel.Recv() - - // First, we can walk through any Struct fields provided - // by the selection Index() method. We ignore the last - // index because it would give the method itself. - indexes := sel.Index() - for _, fieldIndex := range indexes[:len(indexes)-1] { - currentT = getTypeAtFieldIndex(currentT, fieldIndex) - } - - // Now currentT is either a type implementing the actual function, - // an Invalid type (if the receiver is a package), or an interface. - // - // If it's not an Interface, then we're done, as this function - // only cares about Interface-defined functions. - // - // If it is an Interface, we potentially need to continue digging until - // we find the Interface that actually explicitly defines the function. - interfaceT, ok := maybeUnname(currentT).(*types.Interface) - if !ok { - return nil, false - } - - // The first interface we pass through is this one we've found. We return the possibly - // wrapping types.Named because it is more useful to work with for callers. - result := []types.Type{currentT} - - // If this interface itself explicitly defines the given method - // then we're done digging. - for !explicitlyDefinesMethod(interfaceT, fn) { - // Otherwise, we find which of the embedded interfaces _does_ - // define the method, add it to our list, and loop. - namedInterfaceT, ok := getEmbeddedInterfaceDefiningMethod(interfaceT, fn) - if !ok { - // Returned a nil interface, we are done. - break - } - result = append(result, namedInterfaceT) - interfaceT = namedInterfaceT.Underlying().(*types.Interface) - } - - return result, true -} - -func getTypeAtFieldIndex(startingAt types.Type, fieldIndex int) types.Type { - t := maybeUnname(maybeDereference(startingAt)) - s, ok := t.(*types.Struct) - if !ok { - panic(fmt.Sprintf("cannot get Field of a type that is not a struct, got a %T", t)) - } - - return s.Field(fieldIndex).Type() -} - -// getEmbeddedInterfaceDefiningMethod searches through any embedded interfaces of the -// passed interface searching for one that defines the given function. If found, the -// types.Named wrapping that interface will be returned along with true in the second value. -// -// If no such embedded interface is found, nil and false are returned. -func getEmbeddedInterfaceDefiningMethod(interfaceT *types.Interface, fn *types.Func) (*types.Named, bool) { - for i := 0; i < interfaceT.NumEmbeddeds(); i++ { - embedded := interfaceT.Embedded(i) - if embedded != nil && definesMethod(embedded.Underlying().(*types.Interface), fn) { - return embedded, true - } - } - return nil, false -} - -func explicitlyDefinesMethod(interfaceT *types.Interface, fn *types.Func) bool { - for i := 0; i < interfaceT.NumExplicitMethods(); i++ { - if interfaceT.ExplicitMethod(i) == fn { - return true - } - } - return false -} - -func definesMethod(interfaceT *types.Interface, fn *types.Func) bool { - for i := 0; i < interfaceT.NumMethods(); i++ { - if interfaceT.Method(i) == fn { - return true - } - } - return false -} - -func maybeDereference(t types.Type) types.Type { - p, ok := t.(*types.Pointer) - if ok { - return p.Elem() - } - return t -} - -func maybeUnname(t types.Type) types.Type { - n, ok := t.(*types.Named) - if ok { - return n.Underlying() - } - return t -} diff --git a/vendor/github.com/kisielk/errcheck/errcheck/errcheck.go b/vendor/github.com/kisielk/errcheck/errcheck/errcheck.go deleted file mode 100644 index d61d348f7..000000000 --- a/vendor/github.com/kisielk/errcheck/errcheck/errcheck.go +++ /dev/null @@ -1,668 +0,0 @@ -// Package errcheck is the library used to implement the errcheck command-line tool. -package errcheck - -import ( - "bufio" - "errors" - "fmt" - "go/ast" - "go/token" - "go/types" - "os" - "regexp" - "sort" - "strings" - - "golang.org/x/tools/go/packages" -) - -var errorType *types.Interface - -func init() { - errorType = types.Universe.Lookup("error").Type().Underlying().(*types.Interface) -} - -var ( - // ErrNoGoFiles is returned when CheckPackage is run on a package with no Go source files - ErrNoGoFiles = errors.New("package contains no go source files") -) - -// UncheckedError indicates the position of an unchecked error return. -type UncheckedError struct { - Pos token.Position - Line string - FuncName string - SelectorName string -} - -// Result is returned from the CheckPackage function, and holds all the errors -// that were found to be unchecked in a package. -// -// Aggregation can be done using the Append method for users that want to -// combine results from multiple packages. -type Result struct { - // UncheckedErrors is a list of all the unchecked errors in the package. - // Printing an error reports its position within the file and the contents of the line. - UncheckedErrors []UncheckedError -} - -type byName []UncheckedError - -// Less reports whether the element with index i should sort before the element with index j. -func (b byName) Less(i, j int) bool { - ei, ej := b[i], b[j] - - pi, pj := ei.Pos, ej.Pos - - if pi.Filename != pj.Filename { - return pi.Filename < pj.Filename - } - if pi.Line != pj.Line { - return pi.Line < pj.Line - } - if pi.Column != pj.Column { - return pi.Column < pj.Column - } - - return ei.Line < ej.Line -} - -func (b byName) Swap(i, j int) { - b[i], b[j] = b[j], b[i] -} - -func (b byName) Len() int { - return len(b) -} - -// Append appends errors to e. Append does not do any duplicate checking. -func (r *Result) Append(other Result) { - r.UncheckedErrors = append(r.UncheckedErrors, other.UncheckedErrors...) -} - -// Returns the unique errors that have been accumulated. Duplicates may occur -// when a file containing an unchecked error belongs to > 1 package. -// -// The method receiver remains unmodified after the call to Unique. -func (r Result) Unique() Result { - result := make([]UncheckedError, len(r.UncheckedErrors)) - copy(result, r.UncheckedErrors) - sort.Sort((byName)(result)) - uniq := result[:0] // compact in-place - for i, err := range result { - if i == 0 || err != result[i-1] { - uniq = append(uniq, err) - } - } - return Result{UncheckedErrors: uniq} -} - -// Exclusions define symbols and language elements that will be not checked -type Exclusions struct { - - // Packages lists paths of excluded packages. - Packages []string - - // SymbolRegexpsByPackage maps individual package paths to regular - // expressions that match symbols to be excluded. - // - // Packages whose paths appear both here and in Packages list will - // be excluded entirely. - // - // This is a legacy input that will be deprecated in errcheck version 2 and - // should not be used. - SymbolRegexpsByPackage map[string]*regexp.Regexp - - // Symbols lists patterns that exclude individual package symbols. - // - // For example: - // - // "fmt.Errorf" // function - // "fmt.Fprintf(os.Stderr)" // function with set argument value - // "(hash.Hash).Write" // method - // - Symbols []string - - // TestFiles excludes _test.go files. - TestFiles bool - - // GeneratedFiles excludes generated source files. - // - // Source file is assumed to be generated if its contents - // match the following regular expression: - // - // ^// Code generated .* DO NOT EDIT\\.$ - // - GeneratedFiles bool - - // BlankAssignments ignores assignments to blank identifier. - BlankAssignments bool - - // TypeAssertions ignores unchecked type assertions. - TypeAssertions bool -} - -// Checker checks that you checked errors. -type Checker struct { - // Exclusions defines code packages, symbols, and other elements that will not be checked. - Exclusions Exclusions - - // Tags are a list of build tags to use. - Tags []string - - // The mod flag for go build. - Mod string -} - -// loadPackages is used for testing. -var loadPackages = func(cfg *packages.Config, paths ...string) ([]*packages.Package, error) { - return packages.Load(cfg, paths...) -} - -// LoadPackages loads all the packages in all the paths provided. It uses the -// exclusions and build tags provided to by the user when loading the packages. -func (c *Checker) LoadPackages(paths ...string) ([]*packages.Package, error) { - buildFlags := []string{fmtTags(c.Tags)} - if c.Mod != "" { - buildFlags = append(buildFlags, fmt.Sprintf("-mod=%s", c.Mod)) - } - cfg := &packages.Config{ - Mode: packages.NeedSyntax | packages.NeedTypes | packages.NeedTypesInfo, - Tests: !c.Exclusions.TestFiles, - BuildFlags: buildFlags, - } - return loadPackages(cfg, paths...) -} - -var generatedCodeRegexp = regexp.MustCompile("^// Code generated .* DO NOT EDIT\\.$") -var dotStar = regexp.MustCompile(".*") - -func (c *Checker) shouldSkipFile(file *ast.File) bool { - if !c.Exclusions.GeneratedFiles { - return false - } - - for _, cg := range file.Comments { - for _, comment := range cg.List { - if generatedCodeRegexp.MatchString(comment.Text) { - return true - } - } - } - - return false -} - -// CheckPackage checks packages for errors that have not been checked. -// -// It will exclude specific errors from analysis if the user has configured -// exclusions. -func (c *Checker) CheckPackage(pkg *packages.Package) Result { - excludedSymbols := map[string]bool{} - for _, sym := range c.Exclusions.Symbols { - excludedSymbols[sym] = true - } - - ignore := map[string]*regexp.Regexp{} - // Apply SymbolRegexpsByPackage first so that if the same path appears in - // Packages, a more narrow regexp will be superseded by dotStar below. - if regexps := c.Exclusions.SymbolRegexpsByPackage; regexps != nil { - for pkg, re := range regexps { - // TODO warn if previous entry overwritten? - ignore[nonVendoredPkgPath(pkg)] = re - } - } - for _, pkg := range c.Exclusions.Packages { - // TODO warn if previous entry overwritten? - ignore[nonVendoredPkgPath(pkg)] = dotStar - } - - v := &visitor{ - typesInfo: pkg.TypesInfo, - fset: pkg.Fset, - ignore: ignore, - blank: !c.Exclusions.BlankAssignments, - asserts: !c.Exclusions.TypeAssertions, - lines: make(map[string][]string), - exclude: excludedSymbols, - errors: []UncheckedError{}, - } - - for _, astFile := range pkg.Syntax { - if c.shouldSkipFile(astFile) { - continue - } - ast.Walk(v, astFile) - } - return Result{UncheckedErrors: v.errors} -} - -// visitor implements the errcheck algorithm -type visitor struct { - typesInfo *types.Info - fset *token.FileSet - ignore map[string]*regexp.Regexp - blank bool - asserts bool - lines map[string][]string - exclude map[string]bool - - errors []UncheckedError -} - -// selectorAndFunc tries to get the selector and function from call expression. -// For example, given the call expression representing "a.b()", the selector -// is "a.b" and the function is "b" itself. -// -// The final return value will be true if it is able to do extract a selector -// from the call and look up the function object it refers to. -// -// If the call does not include a selector (like if it is a plain "f()" function call) -// then the final return value will be false. -func (v *visitor) selectorAndFunc(call *ast.CallExpr) (*ast.SelectorExpr, *types.Func, bool) { - sel, ok := call.Fun.(*ast.SelectorExpr) - if !ok { - return nil, nil, false - } - - fn, ok := v.typesInfo.ObjectOf(sel.Sel).(*types.Func) - if !ok { - // Shouldn't happen, but be paranoid - return nil, nil, false - } - - return sel, fn, true - -} - -// fullName will return a package / receiver-type qualified name for a called function -// if the function is the result of a selector. Otherwise it will return -// the empty string. -// -// The name is fully qualified by the import path, possible type, -// function/method name and pointer receiver. -// -// For example, -// - for "fmt.Printf(...)" it will return "fmt.Printf" -// - for "base64.StdEncoding.Decode(...)" it will return "(*encoding/base64.Encoding).Decode" -// - for "myFunc()" it will return "" -func (v *visitor) fullName(call *ast.CallExpr) string { - _, fn, ok := v.selectorAndFunc(call) - if !ok { - return "" - } - - // TODO(dh): vendored packages will have /vendor/ in their name, - // thus not matching vendored standard library packages. If we - // want to support vendored stdlib packages, we need to implement - // FullName with our own logic. - return fn.FullName() -} - -func getSelectorName(sel *ast.SelectorExpr) string { - if ident, ok := sel.X.(*ast.Ident); ok { - return fmt.Sprintf("%s.%s", ident.Name, sel.Sel.Name) - } - if s, ok := sel.X.(*ast.SelectorExpr); ok { - return fmt.Sprintf("%s.%s", getSelectorName(s), sel.Sel.Name) - } - - return "" -} - -// selectorName will return a name for a called function -// if the function is the result of a selector. Otherwise it will return -// the empty string. -// -// The name is fully qualified by the import path, possible type, -// function/method name and pointer receiver. -// -// For example, -// - for "fmt.Printf(...)" it will return "fmt.Printf" -// - for "base64.StdEncoding.Decode(...)" it will return "base64.StdEncoding.Decode" -// - for "myFunc()" it will return "" -func (v *visitor) selectorName(call *ast.CallExpr) string { - sel, _, ok := v.selectorAndFunc(call) - if !ok { - return "" - } - - return getSelectorName(sel) -} - -// namesForExcludeCheck will return a list of fully-qualified function names -// from a function call that can be used to check against the exclusion list. -// -// If a function call is against a local function (like "myFunc()") then no -// names are returned. If the function is package-qualified (like "fmt.Printf()") -// then just that function's fullName is returned. -// -// Otherwise, we walk through all the potentially embedded interfaces of the receiver -// the collect a list of type-qualified function names that we will check. -func (v *visitor) namesForExcludeCheck(call *ast.CallExpr) []string { - sel, fn, ok := v.selectorAndFunc(call) - if !ok { - return nil - } - - name := v.fullName(call) - if name == "" { - return nil - } - - // This will be missing for functions without a receiver (like fmt.Printf), - // so just fall back to the the function's fullName in that case. - selection, ok := v.typesInfo.Selections[sel] - if !ok { - return []string{name} - } - - // This will return with ok false if the function isn't defined - // on an interface, so just fall back to the fullName. - ts, ok := walkThroughEmbeddedInterfaces(selection) - if !ok { - return []string{name} - } - - result := make([]string, len(ts)) - for i, t := range ts { - // Like in fullName, vendored packages will have /vendor/ in their name, - // thus not matching vendored standard library packages. If we - // want to support vendored stdlib packages, we need to implement - // additional logic here. - result[i] = fmt.Sprintf("(%s).%s", t.String(), fn.Name()) - } - return result -} - -// isBufferType checks if the expression type is a known in-memory buffer type. -func (v *visitor) argName(expr ast.Expr) string { - // Special-case literal "os.Stdout" and "os.Stderr" - if sel, ok := expr.(*ast.SelectorExpr); ok { - if obj := v.typesInfo.ObjectOf(sel.Sel); obj != nil { - vr, ok := obj.(*types.Var) - if ok && vr.Pkg() != nil && vr.Pkg().Name() == "os" && (vr.Name() == "Stderr" || vr.Name() == "Stdout") { - return "os." + vr.Name() - } - } - } - t := v.typesInfo.TypeOf(expr) - if t == nil { - return "" - } - return t.String() -} - -func (v *visitor) excludeCall(call *ast.CallExpr) bool { - var arg0 string - if len(call.Args) > 0 { - arg0 = v.argName(call.Args[0]) - } - for _, name := range v.namesForExcludeCheck(call) { - if v.exclude[name] { - return true - } - if arg0 != "" && v.exclude[name+"("+arg0+")"] { - return true - } - } - return false -} - -func (v *visitor) ignoreCall(call *ast.CallExpr) bool { - if v.excludeCall(call) { - return true - } - - // Try to get an identifier. - // Currently only supports simple expressions: - // 1. f() - // 2. x.y.f() - var id *ast.Ident - switch exp := call.Fun.(type) { - case (*ast.Ident): - id = exp - case (*ast.SelectorExpr): - id = exp.Sel - default: - // eg: *ast.SliceExpr, *ast.IndexExpr - } - - if id == nil { - return false - } - - // If we got an identifier for the function, see if it is ignored - if re, ok := v.ignore[""]; ok && re.MatchString(id.Name) { - return true - } - - if obj := v.typesInfo.Uses[id]; obj != nil { - if pkg := obj.Pkg(); pkg != nil { - if re, ok := v.ignore[nonVendoredPkgPath(pkg.Path())]; ok { - return re.MatchString(id.Name) - } - } - } - - return false -} - -// nonVendoredPkgPath returns the unvendored version of the provided package -// path (or returns the provided path if it does not represent a vendored -// path). -func nonVendoredPkgPath(pkgPath string) string { - lastVendorIndex := strings.LastIndex(pkgPath, "/vendor/") - if lastVendorIndex == -1 { - return pkgPath - } - return pkgPath[lastVendorIndex+len("/vendor/"):] -} - -// errorsByArg returns a slice s such that -// len(s) == number of return types of call -// s[i] == true iff return type at position i from left is an error type -func (v *visitor) errorsByArg(call *ast.CallExpr) []bool { - switch t := v.typesInfo.Types[call].Type.(type) { - case *types.Named: - // Single return - return []bool{isErrorType(t)} - case *types.Pointer: - // Single return via pointer - return []bool{isErrorType(t)} - case *types.Tuple: - // Multiple returns - s := make([]bool, t.Len()) - for i := 0; i < t.Len(); i++ { - switch et := t.At(i).Type().(type) { - case *types.Named: - // Single return - s[i] = isErrorType(et) - case *types.Pointer: - // Single return via pointer - s[i] = isErrorType(et) - default: - s[i] = false - } - } - return s - } - return []bool{false} -} - -func (v *visitor) callReturnsError(call *ast.CallExpr) bool { - if v.isRecover(call) { - return true - } - for _, isError := range v.errorsByArg(call) { - if isError { - return true - } - } - return false -} - -// isRecover returns true if the given CallExpr is a call to the built-in recover() function. -func (v *visitor) isRecover(call *ast.CallExpr) bool { - if fun, ok := call.Fun.(*ast.Ident); ok { - if _, ok := v.typesInfo.Uses[fun].(*types.Builtin); ok { - return fun.Name == "recover" - } - } - return false -} - -// TODO (dtcaciuc) collect token.Pos and then convert them to UncheckedErrors -// after visitor is done running. This will allow to integrate more cleanly -// with analyzer so that we don't have to convert Position back to Pos. -func (v *visitor) addErrorAtPosition(position token.Pos, call *ast.CallExpr) { - pos := v.fset.Position(position) - lines, ok := v.lines[pos.Filename] - if !ok { - lines = readfile(pos.Filename) - v.lines[pos.Filename] = lines - } - - line := "??" - if pos.Line-1 < len(lines) { - line = strings.TrimSpace(lines[pos.Line-1]) - } - - var name string - var sel string - if call != nil { - name = v.fullName(call) - sel = v.selectorName(call) - } - - v.errors = append(v.errors, UncheckedError{pos, line, name, sel}) -} - -func readfile(filename string) []string { - var f, err = os.Open(filename) - if err != nil { - return nil - } - defer f.Close() - - var lines []string - var scanner = bufio.NewScanner(f) - for scanner.Scan() { - lines = append(lines, scanner.Text()) - } - return lines -} - -func (v *visitor) Visit(node ast.Node) ast.Visitor { - switch stmt := node.(type) { - case *ast.ExprStmt: - if call, ok := stmt.X.(*ast.CallExpr); ok { - if !v.ignoreCall(call) && v.callReturnsError(call) { - v.addErrorAtPosition(call.Lparen, call) - } - } - case *ast.GoStmt: - if !v.ignoreCall(stmt.Call) && v.callReturnsError(stmt.Call) { - v.addErrorAtPosition(stmt.Call.Lparen, stmt.Call) - } - case *ast.DeferStmt: - if !v.ignoreCall(stmt.Call) && v.callReturnsError(stmt.Call) { - v.addErrorAtPosition(stmt.Call.Lparen, stmt.Call) - } - case *ast.GenDecl: - if stmt.Tok != token.VAR { - break - } - - for _, spec := range stmt.Specs { - vspec := spec.(*ast.ValueSpec) - - if len(vspec.Values) == 0 { - // ignore declarations w/o assignments - continue - } - - var lhs []ast.Expr - for _, name := range vspec.Names { - lhs = append(lhs, ast.Expr(name)) - } - v.checkAssignment(lhs, vspec.Values) - } - - case *ast.AssignStmt: - v.checkAssignment(stmt.Lhs, stmt.Rhs) - - default: - } - return v -} - -func (v *visitor) checkAssignment(lhs, rhs []ast.Expr) { - if len(rhs) == 1 { - // single value on rhs; check against lhs identifiers - if call, ok := rhs[0].(*ast.CallExpr); ok { - if !v.blank { - return - } - if v.ignoreCall(call) { - return - } - isError := v.errorsByArg(call) - for i := 0; i < len(lhs); i++ { - if id, ok := lhs[i].(*ast.Ident); ok { - // We shortcut calls to recover() because errorsByArg can't - // check its return types for errors since it returns interface{}. - if id.Name == "_" && (v.isRecover(call) || isError[i]) { - v.addErrorAtPosition(id.NamePos, call) - } - } - } - } else if assert, ok := rhs[0].(*ast.TypeAssertExpr); ok { - if !v.asserts { - return - } - if assert.Type == nil { - // type switch - return - } - if len(lhs) < 2 { - // assertion result not read - v.addErrorAtPosition(rhs[0].Pos(), nil) - } else if id, ok := lhs[1].(*ast.Ident); ok && v.blank && id.Name == "_" { - // assertion result ignored - v.addErrorAtPosition(id.NamePos, nil) - } - } - } else { - // multiple value on rhs; in this case a call can't return - // multiple values. Assume len(lhs) == len(rhs) - for i := 0; i < len(lhs); i++ { - if id, ok := lhs[i].(*ast.Ident); ok { - if call, ok := rhs[i].(*ast.CallExpr); ok { - if !v.blank { - continue - } - if v.ignoreCall(call) { - continue - } - if id.Name == "_" && v.callReturnsError(call) { - v.addErrorAtPosition(id.NamePos, call) - } - } else if assert, ok := rhs[i].(*ast.TypeAssertExpr); ok { - if !v.asserts { - continue - } - if assert.Type == nil { - // Shouldn't happen anyway, no multi assignment in type switches - continue - } - v.addErrorAtPosition(id.NamePos, nil) - } - } - } - } -} - -func isErrorType(t types.Type) bool { - return types.Implements(t, errorType) -} diff --git a/vendor/github.com/kisielk/errcheck/errcheck/excludes.go b/vendor/github.com/kisielk/errcheck/errcheck/excludes.go deleted file mode 100644 index a783b5a76..000000000 --- a/vendor/github.com/kisielk/errcheck/errcheck/excludes.go +++ /dev/null @@ -1,79 +0,0 @@ -package errcheck - -import ( - "bufio" - "bytes" - "os" - "strings" -) - -// DefaultExcludedSymbols is a list of symbol names that are usually excluded from checks by default. -// -// Note, that they still need to be explicitly copied to Checker.Exclusions.Symbols -var DefaultExcludedSymbols = []string{ - // bytes - "(*bytes.Buffer).Write", - "(*bytes.Buffer).WriteByte", - "(*bytes.Buffer).WriteRune", - "(*bytes.Buffer).WriteString", - - // fmt - "fmt.Print", - "fmt.Printf", - "fmt.Println", - "fmt.Fprint(*bytes.Buffer)", - "fmt.Fprintf(*bytes.Buffer)", - "fmt.Fprintln(*bytes.Buffer)", - "fmt.Fprint(*strings.Builder)", - "fmt.Fprintf(*strings.Builder)", - "fmt.Fprintln(*strings.Builder)", - "fmt.Fprint(os.Stderr)", - "fmt.Fprintf(os.Stderr)", - "fmt.Fprintln(os.Stderr)", - - // io - "(*io.PipeReader).CloseWithError", - "(*io.PipeWriter).CloseWithError", - - // math/rand - "math/rand.Read", - "(*math/rand.Rand).Read", - - // strings - "(*strings.Builder).Write", - "(*strings.Builder).WriteByte", - "(*strings.Builder).WriteRune", - "(*strings.Builder).WriteString", - - // hash - "(hash.Hash).Write", -} - -// ReadExcludes reads an excludes file, a newline delimited file that lists -// patterns for which to allow unchecked errors. -// -// Lines that start with two forward slashes are considered comments and are ignored. -func ReadExcludes(path string) ([]string, error) { - var excludes []string - - buf, err := os.ReadFile(path) - if err != nil { - return nil, err - } - - scanner := bufio.NewScanner(bytes.NewReader(buf)) - - for scanner.Scan() { - name := scanner.Text() - // Skip comments and empty lines. - if strings.HasPrefix(name, "//") || name == "" { - continue - } - excludes = append(excludes, name) - } - if err := scanner.Err(); err != nil { - return nil, err - } - - return excludes, nil -} diff --git a/vendor/github.com/kisielk/errcheck/errcheck/tags.go b/vendor/github.com/kisielk/errcheck/errcheck/tags.go deleted file mode 100644 index 7b423ca69..000000000 --- a/vendor/github.com/kisielk/errcheck/errcheck/tags.go +++ /dev/null @@ -1,12 +0,0 @@ -// +build go1.13 - -package errcheck - -import ( - "fmt" - "strings" -) - -func fmtTags(tags []string) string { - return fmt.Sprintf("-tags=%s", strings.Join(tags, ",")) -} diff --git a/vendor/github.com/kisielk/errcheck/errcheck/tags_compat.go b/vendor/github.com/kisielk/errcheck/errcheck/tags_compat.go deleted file mode 100644 index 2f534f40a..000000000 --- a/vendor/github.com/kisielk/errcheck/errcheck/tags_compat.go +++ /dev/null @@ -1,13 +0,0 @@ -// +build go1.11 -// +build !go1.13 - -package errcheck - -import ( - "fmt" - "strings" -) - -func fmtTags(tags []string) string { - return fmt.Sprintf("-tags=%s", strings.Join(tags, " ")) -} diff --git a/vendor/github.com/kisielk/gotool/.travis.yml b/vendor/github.com/kisielk/gotool/.travis.yml deleted file mode 100644 index d1784e1e2..000000000 --- a/vendor/github.com/kisielk/gotool/.travis.yml +++ /dev/null @@ -1,23 +0,0 @@ -sudo: false -language: go -go: - - 1.2 - - 1.3 - - 1.4 - - 1.5 - - 1.6 - - 1.7 - - 1.8 - - 1.9 - - master -matrix: - allow_failures: - - go: master - fast_finish: true -install: - - # Skip. -script: - - go get -t -v ./... - - diff -u <(echo -n) <(gofmt -d .) - - go tool vet . - - go test -v -race ./... diff --git a/vendor/github.com/kisielk/gotool/LEGAL b/vendor/github.com/kisielk/gotool/LEGAL deleted file mode 100644 index 72b859cd6..000000000 --- a/vendor/github.com/kisielk/gotool/LEGAL +++ /dev/null @@ -1,32 +0,0 @@ -All the files in this distribution are covered under either the MIT -license (see the file LICENSE) except some files mentioned below. - -match.go, match_test.go: - - Copyright (c) 2009 The Go Authors. All rights reserved. - - Redistribution and use in source and binary forms, with or without - modification, are permitted provided that the following conditions are - met: - - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above - copyright notice, this list of conditions and the following disclaimer - in the documentation and/or other materials provided with the - distribution. - * Neither the name of Google Inc. nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - - THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS - "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT - LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR - A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT - OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, - SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT - LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, - DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY - THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT - (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE - OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/kisielk/gotool/LICENSE b/vendor/github.com/kisielk/gotool/LICENSE deleted file mode 100644 index 1cbf651e2..000000000 --- a/vendor/github.com/kisielk/gotool/LICENSE +++ /dev/null @@ -1,20 +0,0 @@ -Copyright (c) 2013 Kamil Kisiel - -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -"Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/github.com/kisielk/gotool/README.md b/vendor/github.com/kisielk/gotool/README.md deleted file mode 100644 index 6e4e92b2f..000000000 --- a/vendor/github.com/kisielk/gotool/README.md +++ /dev/null @@ -1,6 +0,0 @@ -gotool -====== -[![GoDoc](https://godoc.org/github.com/kisielk/gotool?status.svg)](https://godoc.org/github.com/kisielk/gotool) -[![Build Status](https://travis-ci.org/kisielk/gotool.svg?branch=master)](https://travis-ci.org/kisielk/gotool) - -Package gotool contains utility functions used to implement the standard "cmd/go" tool, provided as a convenience to developers who want to write tools with similar semantics. diff --git a/vendor/github.com/kisielk/gotool/go13.go b/vendor/github.com/kisielk/gotool/go13.go deleted file mode 100644 index 2dd9b3fdf..000000000 --- a/vendor/github.com/kisielk/gotool/go13.go +++ /dev/null @@ -1,15 +0,0 @@ -// +build !go1.4 - -package gotool - -import ( - "go/build" - "path/filepath" - "runtime" -) - -var gorootSrc = filepath.Join(runtime.GOROOT(), "src", "pkg") - -func shouldIgnoreImport(p *build.Package) bool { - return true -} diff --git a/vendor/github.com/kisielk/gotool/go14-15.go b/vendor/github.com/kisielk/gotool/go14-15.go deleted file mode 100644 index aa99a3227..000000000 --- a/vendor/github.com/kisielk/gotool/go14-15.go +++ /dev/null @@ -1,15 +0,0 @@ -// +build go1.4,!go1.6 - -package gotool - -import ( - "go/build" - "path/filepath" - "runtime" -) - -var gorootSrc = filepath.Join(runtime.GOROOT(), "src") - -func shouldIgnoreImport(p *build.Package) bool { - return true -} diff --git a/vendor/github.com/kisielk/gotool/go16-18.go b/vendor/github.com/kisielk/gotool/go16-18.go deleted file mode 100644 index f25cec14a..000000000 --- a/vendor/github.com/kisielk/gotool/go16-18.go +++ /dev/null @@ -1,15 +0,0 @@ -// +build go1.6,!go1.9 - -package gotool - -import ( - "go/build" - "path/filepath" - "runtime" -) - -var gorootSrc = filepath.Join(runtime.GOROOT(), "src") - -func shouldIgnoreImport(p *build.Package) bool { - return p == nil || len(p.InvalidGoFiles) == 0 -} diff --git a/vendor/github.com/kisielk/gotool/internal/load/path.go b/vendor/github.com/kisielk/gotool/internal/load/path.go deleted file mode 100644 index 74e15b9d3..000000000 --- a/vendor/github.com/kisielk/gotool/internal/load/path.go +++ /dev/null @@ -1,27 +0,0 @@ -// Copyright 2017 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build go1.9 - -package load - -import ( - "strings" -) - -// hasPathPrefix reports whether the path s begins with the -// elements in prefix. -func hasPathPrefix(s, prefix string) bool { - switch { - default: - return false - case len(s) == len(prefix): - return s == prefix - case len(s) > len(prefix): - if prefix != "" && prefix[len(prefix)-1] == '/' { - return strings.HasPrefix(s, prefix) - } - return s[len(prefix)] == '/' && s[:len(prefix)] == prefix - } -} diff --git a/vendor/github.com/kisielk/gotool/internal/load/pkg.go b/vendor/github.com/kisielk/gotool/internal/load/pkg.go deleted file mode 100644 index b937ede75..000000000 --- a/vendor/github.com/kisielk/gotool/internal/load/pkg.go +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright 2011 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build go1.9 - -// Package load loads packages. -package load - -import ( - "strings" -) - -// isStandardImportPath reports whether $GOROOT/src/path should be considered -// part of the standard distribution. For historical reasons we allow people to add -// their own code to $GOROOT instead of using $GOPATH, but we assume that -// code will start with a domain name (dot in the first element). -func isStandardImportPath(path string) bool { - i := strings.Index(path, "/") - if i < 0 { - i = len(path) - } - elem := path[:i] - return !strings.Contains(elem, ".") -} diff --git a/vendor/github.com/kisielk/gotool/internal/load/search.go b/vendor/github.com/kisielk/gotool/internal/load/search.go deleted file mode 100644 index 17ed62dda..000000000 --- a/vendor/github.com/kisielk/gotool/internal/load/search.go +++ /dev/null @@ -1,354 +0,0 @@ -// Copyright 2017 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build go1.9 - -package load - -import ( - "fmt" - "go/build" - "log" - "os" - "path" - "path/filepath" - "regexp" - "strings" -) - -// Context specifies values for operation of ImportPaths that would -// otherwise come from cmd/go/internal/cfg package. -// -// This is a construct added for gotool purposes and doesn't have -// an equivalent upstream in cmd/go. -type Context struct { - // BuildContext is the build context to use. - BuildContext build.Context - - // GOROOTsrc is the location of the src directory in GOROOT. - // At this time, it's used only in MatchPackages to skip - // GOOROOT/src entry from BuildContext.SrcDirs output. - GOROOTsrc string -} - -// allPackages returns all the packages that can be found -// under the $GOPATH directories and $GOROOT matching pattern. -// The pattern is either "all" (all packages), "std" (standard packages), -// "cmd" (standard commands), or a path including "...". -func (c *Context) allPackages(pattern string) []string { - pkgs := c.MatchPackages(pattern) - if len(pkgs) == 0 { - fmt.Fprintf(os.Stderr, "warning: %q matched no packages\n", pattern) - } - return pkgs -} - -// allPackagesInFS is like allPackages but is passed a pattern -// beginning ./ or ../, meaning it should scan the tree rooted -// at the given directory. There are ... in the pattern too. -func (c *Context) allPackagesInFS(pattern string) []string { - pkgs := c.MatchPackagesInFS(pattern) - if len(pkgs) == 0 { - fmt.Fprintf(os.Stderr, "warning: %q matched no packages\n", pattern) - } - return pkgs -} - -// MatchPackages returns a list of package paths matching pattern -// (see go help packages for pattern syntax). -func (c *Context) MatchPackages(pattern string) []string { - match := func(string) bool { return true } - treeCanMatch := func(string) bool { return true } - if !IsMetaPackage(pattern) { - match = matchPattern(pattern) - treeCanMatch = treeCanMatchPattern(pattern) - } - - have := map[string]bool{ - "builtin": true, // ignore pseudo-package that exists only for documentation - } - if !c.BuildContext.CgoEnabled { - have["runtime/cgo"] = true // ignore during walk - } - var pkgs []string - - for _, src := range c.BuildContext.SrcDirs() { - if (pattern == "std" || pattern == "cmd") && src != c.GOROOTsrc { - continue - } - src = filepath.Clean(src) + string(filepath.Separator) - root := src - if pattern == "cmd" { - root += "cmd" + string(filepath.Separator) - } - filepath.Walk(root, func(path string, fi os.FileInfo, err error) error { - if err != nil || path == src { - return nil - } - - want := true - // Avoid .foo, _foo, and testdata directory trees. - _, elem := filepath.Split(path) - if strings.HasPrefix(elem, ".") || strings.HasPrefix(elem, "_") || elem == "testdata" { - want = false - } - - name := filepath.ToSlash(path[len(src):]) - if pattern == "std" && (!isStandardImportPath(name) || name == "cmd") { - // The name "std" is only the standard library. - // If the name is cmd, it's the root of the command tree. - want = false - } - if !treeCanMatch(name) { - want = false - } - - if !fi.IsDir() { - if fi.Mode()&os.ModeSymlink != 0 && want { - if target, err := os.Stat(path); err == nil && target.IsDir() { - fmt.Fprintf(os.Stderr, "warning: ignoring symlink %s\n", path) - } - } - return nil - } - if !want { - return filepath.SkipDir - } - - if have[name] { - return nil - } - have[name] = true - if !match(name) { - return nil - } - pkg, err := c.BuildContext.ImportDir(path, 0) - if err != nil { - if _, noGo := err.(*build.NoGoError); noGo { - return nil - } - } - - // If we are expanding "cmd", skip main - // packages under cmd/vendor. At least as of - // March, 2017, there is one there for the - // vendored pprof tool. - if pattern == "cmd" && strings.HasPrefix(pkg.ImportPath, "cmd/vendor") && pkg.Name == "main" { - return nil - } - - pkgs = append(pkgs, name) - return nil - }) - } - return pkgs -} - -// MatchPackagesInFS returns a list of package paths matching pattern, -// which must begin with ./ or ../ -// (see go help packages for pattern syntax). -func (c *Context) MatchPackagesInFS(pattern string) []string { - // Find directory to begin the scan. - // Could be smarter but this one optimization - // is enough for now, since ... is usually at the - // end of a path. - i := strings.Index(pattern, "...") - dir, _ := path.Split(pattern[:i]) - - // pattern begins with ./ or ../. - // path.Clean will discard the ./ but not the ../. - // We need to preserve the ./ for pattern matching - // and in the returned import paths. - prefix := "" - if strings.HasPrefix(pattern, "./") { - prefix = "./" - } - match := matchPattern(pattern) - - var pkgs []string - filepath.Walk(dir, func(path string, fi os.FileInfo, err error) error { - if err != nil || !fi.IsDir() { - return nil - } - if path == dir { - // filepath.Walk starts at dir and recurses. For the recursive case, - // the path is the result of filepath.Join, which calls filepath.Clean. - // The initial case is not Cleaned, though, so we do this explicitly. - // - // This converts a path like "./io/" to "io". Without this step, running - // "cd $GOROOT/src; go list ./io/..." would incorrectly skip the io - // package, because prepending the prefix "./" to the unclean path would - // result in "././io", and match("././io") returns false. - path = filepath.Clean(path) - } - - // Avoid .foo, _foo, and testdata directory trees, but do not avoid "." or "..". - _, elem := filepath.Split(path) - dot := strings.HasPrefix(elem, ".") && elem != "." && elem != ".." - if dot || strings.HasPrefix(elem, "_") || elem == "testdata" { - return filepath.SkipDir - } - - name := prefix + filepath.ToSlash(path) - if !match(name) { - return nil - } - - // We keep the directory if we can import it, or if we can't import it - // due to invalid Go source files. This means that directories containing - // parse errors will be built (and fail) instead of being silently skipped - // as not matching the pattern. Go 1.5 and earlier skipped, but that - // behavior means people miss serious mistakes. - // See golang.org/issue/11407. - if p, err := c.BuildContext.ImportDir(path, 0); err != nil && (p == nil || len(p.InvalidGoFiles) == 0) { - if _, noGo := err.(*build.NoGoError); !noGo { - log.Print(err) - } - return nil - } - pkgs = append(pkgs, name) - return nil - }) - return pkgs -} - -// treeCanMatchPattern(pattern)(name) reports whether -// name or children of name can possibly match pattern. -// Pattern is the same limited glob accepted by matchPattern. -func treeCanMatchPattern(pattern string) func(name string) bool { - wildCard := false - if i := strings.Index(pattern, "..."); i >= 0 { - wildCard = true - pattern = pattern[:i] - } - return func(name string) bool { - return len(name) <= len(pattern) && hasPathPrefix(pattern, name) || - wildCard && strings.HasPrefix(name, pattern) - } -} - -// matchPattern(pattern)(name) reports whether -// name matches pattern. Pattern is a limited glob -// pattern in which '...' means 'any string' and there -// is no other special syntax. -// Unfortunately, there are two special cases. Quoting "go help packages": -// -// First, /... at the end of the pattern can match an empty string, -// so that net/... matches both net and packages in its subdirectories, like net/http. -// Second, any slash-separted pattern element containing a wildcard never -// participates in a match of the "vendor" element in the path of a vendored -// package, so that ./... does not match packages in subdirectories of -// ./vendor or ./mycode/vendor, but ./vendor/... and ./mycode/vendor/... do. -// Note, however, that a directory named vendor that itself contains code -// is not a vendored package: cmd/vendor would be a command named vendor, -// and the pattern cmd/... matches it. -func matchPattern(pattern string) func(name string) bool { - // Convert pattern to regular expression. - // The strategy for the trailing /... is to nest it in an explicit ? expression. - // The strategy for the vendor exclusion is to change the unmatchable - // vendor strings to a disallowed code point (vendorChar) and to use - // "(anything but that codepoint)*" as the implementation of the ... wildcard. - // This is a bit complicated but the obvious alternative, - // namely a hand-written search like in most shell glob matchers, - // is too easy to make accidentally exponential. - // Using package regexp guarantees linear-time matching. - - const vendorChar = "\x00" - - if strings.Contains(pattern, vendorChar) { - return func(name string) bool { return false } - } - - re := regexp.QuoteMeta(pattern) - re = replaceVendor(re, vendorChar) - switch { - case strings.HasSuffix(re, `/`+vendorChar+`/\.\.\.`): - re = strings.TrimSuffix(re, `/`+vendorChar+`/\.\.\.`) + `(/vendor|/` + vendorChar + `/\.\.\.)` - case re == vendorChar+`/\.\.\.`: - re = `(/vendor|/` + vendorChar + `/\.\.\.)` - case strings.HasSuffix(re, `/\.\.\.`): - re = strings.TrimSuffix(re, `/\.\.\.`) + `(/\.\.\.)?` - } - re = strings.Replace(re, `\.\.\.`, `[^`+vendorChar+`]*`, -1) - - reg := regexp.MustCompile(`^` + re + `$`) - - return func(name string) bool { - if strings.Contains(name, vendorChar) { - return false - } - return reg.MatchString(replaceVendor(name, vendorChar)) - } -} - -// replaceVendor returns the result of replacing -// non-trailing vendor path elements in x with repl. -func replaceVendor(x, repl string) string { - if !strings.Contains(x, "vendor") { - return x - } - elem := strings.Split(x, "/") - for i := 0; i < len(elem)-1; i++ { - if elem[i] == "vendor" { - elem[i] = repl - } - } - return strings.Join(elem, "/") -} - -// ImportPaths returns the import paths to use for the given command line. -func (c *Context) ImportPaths(args []string) []string { - args = c.ImportPathsNoDotExpansion(args) - var out []string - for _, a := range args { - if strings.Contains(a, "...") { - if build.IsLocalImport(a) { - out = append(out, c.allPackagesInFS(a)...) - } else { - out = append(out, c.allPackages(a)...) - } - continue - } - out = append(out, a) - } - return out -} - -// ImportPathsNoDotExpansion returns the import paths to use for the given -// command line, but it does no ... expansion. -func (c *Context) ImportPathsNoDotExpansion(args []string) []string { - if len(args) == 0 { - return []string{"."} - } - var out []string - for _, a := range args { - // Arguments are supposed to be import paths, but - // as a courtesy to Windows developers, rewrite \ to / - // in command-line arguments. Handles .\... and so on. - if filepath.Separator == '\\' { - a = strings.Replace(a, `\`, `/`, -1) - } - - // Put argument in canonical form, but preserve leading ./. - if strings.HasPrefix(a, "./") { - a = "./" + path.Clean(a) - if a == "./." { - a = "." - } - } else { - a = path.Clean(a) - } - if IsMetaPackage(a) { - out = append(out, c.allPackages(a)...) - continue - } - out = append(out, a) - } - return out -} - -// IsMetaPackage checks if name is a reserved package name that expands to multiple packages. -func IsMetaPackage(name string) bool { - return name == "std" || name == "cmd" || name == "all" -} diff --git a/vendor/github.com/kisielk/gotool/match.go b/vendor/github.com/kisielk/gotool/match.go deleted file mode 100644 index 4dbdbff47..000000000 --- a/vendor/github.com/kisielk/gotool/match.go +++ /dev/null @@ -1,56 +0,0 @@ -// Copyright (c) 2009 The Go Authors. All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -// +build go1.9 - -package gotool - -import ( - "path/filepath" - - "github.com/kisielk/gotool/internal/load" -) - -// importPaths returns the import paths to use for the given command line. -func (c *Context) importPaths(args []string) []string { - lctx := load.Context{ - BuildContext: c.BuildContext, - GOROOTsrc: c.joinPath(c.BuildContext.GOROOT, "src"), - } - return lctx.ImportPaths(args) -} - -// joinPath calls c.BuildContext.JoinPath (if not nil) or else filepath.Join. -// -// It's a copy of the unexported build.Context.joinPath helper. -func (c *Context) joinPath(elem ...string) string { - if f := c.BuildContext.JoinPath; f != nil { - return f(elem...) - } - return filepath.Join(elem...) -} diff --git a/vendor/github.com/kisielk/gotool/match18.go b/vendor/github.com/kisielk/gotool/match18.go deleted file mode 100644 index 6d6b1368c..000000000 --- a/vendor/github.com/kisielk/gotool/match18.go +++ /dev/null @@ -1,317 +0,0 @@ -// Copyright (c) 2009 The Go Authors. All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -// +build !go1.9 - -package gotool - -import ( - "fmt" - "go/build" - "log" - "os" - "path" - "path/filepath" - "regexp" - "strings" -) - -// This file contains code from the Go distribution. - -// matchPattern(pattern)(name) reports whether -// name matches pattern. Pattern is a limited glob -// pattern in which '...' means 'any string' and there -// is no other special syntax. -func matchPattern(pattern string) func(name string) bool { - re := regexp.QuoteMeta(pattern) - re = strings.Replace(re, `\.\.\.`, `.*`, -1) - // Special case: foo/... matches foo too. - if strings.HasSuffix(re, `/.*`) { - re = re[:len(re)-len(`/.*`)] + `(/.*)?` - } - reg := regexp.MustCompile(`^` + re + `$`) - return reg.MatchString -} - -// matchPackages returns a list of package paths matching pattern -// (see go help packages for pattern syntax). -func (c *Context) matchPackages(pattern string) []string { - match := func(string) bool { return true } - treeCanMatch := func(string) bool { return true } - if !isMetaPackage(pattern) { - match = matchPattern(pattern) - treeCanMatch = treeCanMatchPattern(pattern) - } - - have := map[string]bool{ - "builtin": true, // ignore pseudo-package that exists only for documentation - } - if !c.BuildContext.CgoEnabled { - have["runtime/cgo"] = true // ignore during walk - } - var pkgs []string - - for _, src := range c.BuildContext.SrcDirs() { - if (pattern == "std" || pattern == "cmd") && src != gorootSrc { - continue - } - src = filepath.Clean(src) + string(filepath.Separator) - root := src - if pattern == "cmd" { - root += "cmd" + string(filepath.Separator) - } - filepath.Walk(root, func(path string, fi os.FileInfo, err error) error { - if err != nil || !fi.IsDir() || path == src { - return nil - } - - // Avoid .foo, _foo, and testdata directory trees. - _, elem := filepath.Split(path) - if strings.HasPrefix(elem, ".") || strings.HasPrefix(elem, "_") || elem == "testdata" { - return filepath.SkipDir - } - - name := filepath.ToSlash(path[len(src):]) - if pattern == "std" && (!isStandardImportPath(name) || name == "cmd") { - // The name "std" is only the standard library. - // If the name is cmd, it's the root of the command tree. - return filepath.SkipDir - } - if !treeCanMatch(name) { - return filepath.SkipDir - } - if have[name] { - return nil - } - have[name] = true - if !match(name) { - return nil - } - _, err = c.BuildContext.ImportDir(path, 0) - if err != nil { - if _, noGo := err.(*build.NoGoError); noGo { - return nil - } - } - pkgs = append(pkgs, name) - return nil - }) - } - return pkgs -} - -// importPathsNoDotExpansion returns the import paths to use for the given -// command line, but it does no ... expansion. -func (c *Context) importPathsNoDotExpansion(args []string) []string { - if len(args) == 0 { - return []string{"."} - } - var out []string - for _, a := range args { - // Arguments are supposed to be import paths, but - // as a courtesy to Windows developers, rewrite \ to / - // in command-line arguments. Handles .\... and so on. - if filepath.Separator == '\\' { - a = strings.Replace(a, `\`, `/`, -1) - } - - // Put argument in canonical form, but preserve leading ./. - if strings.HasPrefix(a, "./") { - a = "./" + path.Clean(a) - if a == "./." { - a = "." - } - } else { - a = path.Clean(a) - } - if isMetaPackage(a) { - out = append(out, c.allPackages(a)...) - continue - } - out = append(out, a) - } - return out -} - -// importPaths returns the import paths to use for the given command line. -func (c *Context) importPaths(args []string) []string { - args = c.importPathsNoDotExpansion(args) - var out []string - for _, a := range args { - if strings.Contains(a, "...") { - if build.IsLocalImport(a) { - out = append(out, c.allPackagesInFS(a)...) - } else { - out = append(out, c.allPackages(a)...) - } - continue - } - out = append(out, a) - } - return out -} - -// allPackages returns all the packages that can be found -// under the $GOPATH directories and $GOROOT matching pattern. -// The pattern is either "all" (all packages), "std" (standard packages), -// "cmd" (standard commands), or a path including "...". -func (c *Context) allPackages(pattern string) []string { - pkgs := c.matchPackages(pattern) - if len(pkgs) == 0 { - fmt.Fprintf(os.Stderr, "warning: %q matched no packages\n", pattern) - } - return pkgs -} - -// allPackagesInFS is like allPackages but is passed a pattern -// beginning ./ or ../, meaning it should scan the tree rooted -// at the given directory. There are ... in the pattern too. -func (c *Context) allPackagesInFS(pattern string) []string { - pkgs := c.matchPackagesInFS(pattern) - if len(pkgs) == 0 { - fmt.Fprintf(os.Stderr, "warning: %q matched no packages\n", pattern) - } - return pkgs -} - -// matchPackagesInFS returns a list of package paths matching pattern, -// which must begin with ./ or ../ -// (see go help packages for pattern syntax). -func (c *Context) matchPackagesInFS(pattern string) []string { - // Find directory to begin the scan. - // Could be smarter but this one optimization - // is enough for now, since ... is usually at the - // end of a path. - i := strings.Index(pattern, "...") - dir, _ := path.Split(pattern[:i]) - - // pattern begins with ./ or ../. - // path.Clean will discard the ./ but not the ../. - // We need to preserve the ./ for pattern matching - // and in the returned import paths. - prefix := "" - if strings.HasPrefix(pattern, "./") { - prefix = "./" - } - match := matchPattern(pattern) - - var pkgs []string - filepath.Walk(dir, func(path string, fi os.FileInfo, err error) error { - if err != nil || !fi.IsDir() { - return nil - } - if path == dir { - // filepath.Walk starts at dir and recurses. For the recursive case, - // the path is the result of filepath.Join, which calls filepath.Clean. - // The initial case is not Cleaned, though, so we do this explicitly. - // - // This converts a path like "./io/" to "io". Without this step, running - // "cd $GOROOT/src; go list ./io/..." would incorrectly skip the io - // package, because prepending the prefix "./" to the unclean path would - // result in "././io", and match("././io") returns false. - path = filepath.Clean(path) - } - - // Avoid .foo, _foo, and testdata directory trees, but do not avoid "." or "..". - _, elem := filepath.Split(path) - dot := strings.HasPrefix(elem, ".") && elem != "." && elem != ".." - if dot || strings.HasPrefix(elem, "_") || elem == "testdata" { - return filepath.SkipDir - } - - name := prefix + filepath.ToSlash(path) - if !match(name) { - return nil - } - - // We keep the directory if we can import it, or if we can't import it - // due to invalid Go source files. This means that directories containing - // parse errors will be built (and fail) instead of being silently skipped - // as not matching the pattern. Go 1.5 and earlier skipped, but that - // behavior means people miss serious mistakes. - // See golang.org/issue/11407. - if p, err := c.BuildContext.ImportDir(path, 0); err != nil && shouldIgnoreImport(p) { - if _, noGo := err.(*build.NoGoError); !noGo { - log.Print(err) - } - return nil - } - pkgs = append(pkgs, name) - return nil - }) - return pkgs -} - -// isMetaPackage checks if name is a reserved package name that expands to multiple packages. -func isMetaPackage(name string) bool { - return name == "std" || name == "cmd" || name == "all" -} - -// isStandardImportPath reports whether $GOROOT/src/path should be considered -// part of the standard distribution. For historical reasons we allow people to add -// their own code to $GOROOT instead of using $GOPATH, but we assume that -// code will start with a domain name (dot in the first element). -func isStandardImportPath(path string) bool { - i := strings.Index(path, "/") - if i < 0 { - i = len(path) - } - elem := path[:i] - return !strings.Contains(elem, ".") -} - -// hasPathPrefix reports whether the path s begins with the -// elements in prefix. -func hasPathPrefix(s, prefix string) bool { - switch { - default: - return false - case len(s) == len(prefix): - return s == prefix - case len(s) > len(prefix): - if prefix != "" && prefix[len(prefix)-1] == '/' { - return strings.HasPrefix(s, prefix) - } - return s[len(prefix)] == '/' && s[:len(prefix)] == prefix - } -} - -// treeCanMatchPattern(pattern)(name) reports whether -// name or children of name can possibly match pattern. -// Pattern is the same limited glob accepted by matchPattern. -func treeCanMatchPattern(pattern string) func(name string) bool { - wildCard := false - if i := strings.Index(pattern, "..."); i >= 0 { - wildCard = true - pattern = pattern[:i] - } - return func(name string) bool { - return len(name) <= len(pattern) && hasPathPrefix(pattern, name) || - wildCard && strings.HasPrefix(name, pattern) - } -} diff --git a/vendor/github.com/kisielk/gotool/tool.go b/vendor/github.com/kisielk/gotool/tool.go deleted file mode 100644 index c7409e11e..000000000 --- a/vendor/github.com/kisielk/gotool/tool.go +++ /dev/null @@ -1,48 +0,0 @@ -// Package gotool contains utility functions used to implement the standard -// "cmd/go" tool, provided as a convenience to developers who want to write -// tools with similar semantics. -package gotool - -import "go/build" - -// Export functions here to make it easier to keep the implementations up to date with upstream. - -// DefaultContext is the default context that uses build.Default. -var DefaultContext = Context{ - BuildContext: build.Default, -} - -// A Context specifies the supporting context. -type Context struct { - // BuildContext is the build.Context that is used when computing import paths. - BuildContext build.Context -} - -// ImportPaths returns the import paths to use for the given command line. -// -// The path "all" is expanded to all packages in $GOPATH and $GOROOT. -// The path "std" is expanded to all packages in the Go standard library. -// The path "cmd" is expanded to all Go standard commands. -// The string "..." is treated as a wildcard within a path. -// When matching recursively, directories are ignored if they are prefixed with -// a dot or an underscore (such as ".foo" or "_foo"), or are named "testdata". -// Relative import paths are not converted to full import paths. -// If args is empty, a single element "." is returned. -func (c *Context) ImportPaths(args []string) []string { - return c.importPaths(args) -} - -// ImportPaths returns the import paths to use for the given command line -// using default context. -// -// The path "all" is expanded to all packages in $GOPATH and $GOROOT. -// The path "std" is expanded to all packages in the Go standard library. -// The path "cmd" is expanded to all Go standard commands. -// The string "..." is treated as a wildcard within a path. -// When matching recursively, directories are ignored if they are prefixed with -// a dot or an underscore (such as ".foo" or "_foo"), or are named "testdata". -// Relative import paths are not converted to full import paths. -// If args is empty, a single element "." is returned. -func ImportPaths(args []string) []string { - return DefaultContext.importPaths(args) -} diff --git a/vendor/github.com/kkHAIKE/contextcheck/.gitignore b/vendor/github.com/kkHAIKE/contextcheck/.gitignore deleted file mode 100644 index fc1b400c8..000000000 --- a/vendor/github.com/kkHAIKE/contextcheck/.gitignore +++ /dev/null @@ -1,18 +0,0 @@ -# Binaries for programs and plugins -*.exe -*.exe~ -*.dll -*.so -*.dylib - -# Test binary, built with `go test -c` -*.test - -# Output of the go coverage tool, specifically when used with LiteIDE -*.out - -# Dependency directories (remove the comment below to include it) -# vendor/ - -.idea -.DS_Store diff --git a/vendor/github.com/kkHAIKE/contextcheck/LICENSE b/vendor/github.com/kkHAIKE/contextcheck/LICENSE deleted file mode 100644 index 99e1c482a..000000000 --- a/vendor/github.com/kkHAIKE/contextcheck/LICENSE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright 2021 sylvia.wang - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/vendor/github.com/kkHAIKE/contextcheck/Makefile b/vendor/github.com/kkHAIKE/contextcheck/Makefile deleted file mode 100644 index 9321e9de3..000000000 --- a/vendor/github.com/kkHAIKE/contextcheck/Makefile +++ /dev/null @@ -1,5 +0,0 @@ -build: - @GO111MODULE=on go build -ldflags '-s -w' -o contextcheck ./cmd/contextcheck/main.go - -install: - @GO111MODULE=on go install -ldflags '-s -w' ./cmd/contextcheck diff --git a/vendor/github.com/kkHAIKE/contextcheck/README.md b/vendor/github.com/kkHAIKE/contextcheck/README.md deleted file mode 100644 index 2cc7b2e48..000000000 --- a/vendor/github.com/kkHAIKE/contextcheck/README.md +++ /dev/null @@ -1,157 +0,0 @@ -[![CircleCI](https://circleci.com/gh/sylvia7788/contextcheck.svg?style=svg)](https://circleci.com/gh/sylvia7788/contextcheck) - - -# contextcheck - -`contextcheck` is a static analysis tool, it is used to check whether the function uses a non-inherited context, which will result in a broken call link. - -For example: - -```go -func call1(ctx context.Context) { - ... - - ctx = getNewCtx(ctx) - call2(ctx) // OK - - call2(context.Background()) // Non-inherited new context, use function like `context.WithXXX` instead - - call3() // Function `call3` should pass the context parameter - call4() // Function `call4->call3` should pass the context parameter - ... -} - -func call2(ctx context.Context) { - ... -} - -func call3() { - ctx := context.TODO() - call2(ctx) -} - -func call4() { - call3() -} - - -// if you want none-inherit ctx, use this function -func getNewCtx(ctx context.Context) (newCtx context.Context) { - ... - return -} - -/* ---------- check net/http.HandleFunc ---------- */ - -func call5(ctx context.Context, w http.ResponseWriter, r *http.Request) { -} - -func call6(w http.ResponseWriter, r *http.Request) { - ctx := r.Context() - call5(ctx, w, r) - call5(context.Background(), w, r) // Non-inherited new context, use function like `context.WithXXX` or `r.Context` instead -} - -func call7(in bool, w http.ResponseWriter, r *http.Request) { - call5(r.Context(), w, r) - call5(context.Background(), w, r) -} - -func call8() { - http.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) { - call5(r.Context(), w, r) - call5(context.Background(), w, r) // Non-inherited new context, use function like `context.WithXXX` or `r.Context` instead - - call6(w, r) - - // call7 should be like `func call7(ctx context.Context, in bool, w http.ResponseWriter, r *http.Request)` - call7(true, w, r) // Function `call7` should pass the context parameter - }) -} -``` - -## Tips -### need break ctx inheritance -eg: [issue](https://github.com/kkHAIKE/contextcheck/issues/2). - -```go -func call1(ctx context.Context) { - ... - - newCtx, cancel := NoInheritCancel(ctx) - defer cancel() - - call2(newCtx) - ... -} - -func call2(ctx context.Context) { - ... -} - -func NoInheritCancel(_ context.Context) (context.Context,context.CancelFunc) { - return context.WithCancel(context.Background()) -} -``` - -### skip check specify function -You can add `// nolint: contextcheck` in function decl doc comment, to skip this linter in some false-positive case. - -```go -// nolint: contextcheck -func call1() { - doSomeThing(context.Background()) // add nolint will no issuss for that -} - -func call2(ctx context.Context) { - call1() -} - -func call3() { - call2(context.Background()) -} -``` - -### force mark specify function have server-side http.Request parameter -default behavior is mark http.HandlerFunc or a function use r.Context(). - -```go -// @contextcheck(req_has_ctx) -func writeErr(w http.ResponseWriter, r *http.Request, err error) { - doSomeThing(r.Context()) -} - -func handler(w http.ResponseWriter, r *http.Request) { - ... - if err != nil { - writeErr(w, r, err) - return - } - ... -} -``` - -## Installation - -You can get `contextcheck` by `go get` command. - -```bash -$ go get -u github.com/kkHAIKE/contextcheck -``` - -or build yourself. - -```bash -$ make build -$ make install -``` - -## Usage - -Invoke `contextcheck` with your package name - -```bash -$ contextcheck ./... -$ # or -$ go vet -vettool=`which contextcheck` ./... -``` diff --git a/vendor/github.com/kkHAIKE/contextcheck/contextcheck.go b/vendor/github.com/kkHAIKE/contextcheck/contextcheck.go deleted file mode 100644 index c9ad0101f..000000000 --- a/vendor/github.com/kkHAIKE/contextcheck/contextcheck.go +++ /dev/null @@ -1,835 +0,0 @@ -package contextcheck - -import ( - "go/ast" - "go/types" - "regexp" - "strconv" - "strings" - "sync" - - "github.com/gostaticanalysis/analysisutil" - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/buildssa" - "golang.org/x/tools/go/packages" - "golang.org/x/tools/go/ssa" -) - -type Configuration struct { - DisableFact bool -} - -var pkgprefix string - -func NewAnalyzer(cfg Configuration) *analysis.Analyzer { - analyzer := &analysis.Analyzer{ - Name: "contextcheck", - Doc: "check whether the function uses a non-inherited context", - Run: NewRun(nil, cfg.DisableFact), - Requires: []*analysis.Analyzer{ - buildssa.Analyzer, - }, - } - analyzer.Flags.StringVar(&pkgprefix, "pkgprefix", "", "filter init pkgs (only for cmd)") - - if !cfg.DisableFact { - analyzer.FactTypes = append(analyzer.FactTypes, (*ctxFact)(nil)) - } - - return analyzer -} - -const ( - ctxPkg = "context" - ctxName = "Context" - - httpPkg = "net/http" - httpRes = "ResponseWriter" - httpReq = "Request" -) - -const ( - CtxIn int = 1 << iota // ctx in function's param - CtxOut // ctx in function's results - CtxInField // ctx in function's field param -) - -type entryType int - -const ( - EntryNone entryType = iota - EntryNormal // without ctx in - EntryWithCtx // has ctx in - EntryWithHttpHandler // is http handler -) - -var ( - pkgFactMap = make(map[*types.Package]ctxFact) - pkgFactMu sync.RWMutex -) - -type resInfo struct { - Valid bool - Funcs []string - - // reuse for doc - ReqCtx bool - Skip bool - - EntryType entryType -} - -type ctxFact map[string]resInfo - -func (*ctxFact) String() string { return "ctxCheck" } -func (*ctxFact) AFact() {} - -type runner struct { - pass *analysis.Pass - ctxTyp *types.Named - ctxPTyp *types.Pointer - skipFile map[*ast.File]bool - - httpResTyps []types.Type - httpReqTyps []types.Type - - currentFact ctxFact - disableFact bool -} - -func getPkgRoot(pkg string) string { - arr := strings.Split(pkg, "/") - if len(arr) < 3 { - return arr[0] - } - if strings.IndexByte(arr[0], '.') == -1 { - return arr[0] - } - return strings.Join(arr[:3], "/") -} - -func NewRun(pkgs []*packages.Package, disableFact bool) func(pass *analysis.Pass) (interface{}, error) { - m := make(map[string]bool) - for _, pkg := range pkgs { - m[getPkgRoot(pkg.PkgPath)] = true - } - return func(pass *analysis.Pass) (interface{}, error) { - // skip different repo - if len(m) > 0 && !m[getPkgRoot(pass.Pkg.Path())] { - return nil, nil - } - if len(m) == 0 && pkgprefix != "" && !strings.HasPrefix(pass.Pkg.Path(), pkgprefix) { - return nil, nil - } - - r := &runner{disableFact: disableFact} - r.run(pass) - return nil, nil - } -} - -func (r *runner) run(pass *analysis.Pass) { - r.pass = pass - pssa := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA) - funcs := pssa.SrcFuncs - - // collect ctx obj - var ok bool - r.ctxTyp, r.ctxPTyp, ok = r.getRequiedType(pssa, ctxPkg, ctxName) - if !ok { - return - } - - // collect http obj - r.collectHttpTyps(pssa) - - r.skipFile = make(map[*ast.File]bool) - r.currentFact = make(ctxFact) - - type entryInfo struct { - f *ssa.Function // entryfunc - tp entryType // entrytype - } - var tmpFuncs []entryInfo - for _, f := range funcs { - // skip checked function - key := f.RelString(nil) - if _, ok := r.currentFact[key]; ok { - continue - } - - if entryType := r.checkIsEntry(f); entryType == EntryNormal { - if _, ok := r.getValue(key, f); ok { - continue - } - // record the result of nomal function - checkingMap := make(map[string]bool) - checkingMap[key] = true - r.setFact(key, r.checkFuncWithoutCtx(f, checkingMap), f.Name()) - continue - } else if entryType == EntryWithCtx || entryType == EntryWithHttpHandler { - tmpFuncs = append(tmpFuncs, entryInfo{f: f, tp: entryType}) - } - } - - for _, v := range tmpFuncs { - r.checkFuncWithCtx(v.f, v.tp) - } - - if len(r.currentFact) > 0 { - if r.disableFact { - setPkgFact(pass.Pkg, r.currentFact) - } else { - pass.ExportPackageFact(&r.currentFact) - } - } -} - -func (r *runner) getRequiedType(pssa *buildssa.SSA, path, name string) (obj *types.Named, pobj *types.Pointer, ok bool) { - pkg := pssa.Pkg.Prog.ImportedPackage(path) - if pkg == nil { - return - } - - objTyp := pkg.Type(name) - if objTyp == nil { - return - } - obj, ok = objTyp.Object().Type().(*types.Named) - if !ok { - return - } - pobj = types.NewPointer(obj) - - return -} - -func (r *runner) collectHttpTyps(pssa *buildssa.SSA) { - objRes, _, ok := r.getRequiedType(pssa, httpPkg, httpRes) - if ok { - r.httpResTyps = append(r.httpResTyps, objRes) - } - - _, pobjReq, ok := r.getRequiedType(pssa, httpPkg, httpReq) - if ok { - r.httpReqTyps = append(r.httpReqTyps, pobjReq) - } -} - -func (r *runner) noImportedContextAndHttp(f *ssa.Function) (ret bool) { - if !f.Pos().IsValid() { - return false - } - - file := analysisutil.File(r.pass, f.Pos()) - if file == nil { - return false - } - - if skip, has := r.skipFile[file]; has { - return skip - } - defer func() { - r.skipFile[file] = ret - }() - - for _, impt := range file.Imports { - path, err := strconv.Unquote(impt.Path.Value) - if err != nil { - continue - } - path = analysisutil.RemoveVendor(path) - if path == ctxPkg || path == httpPkg { - return false - } - } - - return true -} - -func (r *runner) checkIsEntry(f *ssa.Function) (ret entryType) { - // if r.noImportedContextAndHttp(f) { - // return EntryNormal - // } - key := "entry:" + f.RelString(nil) - res, ok := r.getValue(key, f) - if ok { - return res.EntryType - } - defer func() { - r.currentFact[key] = resInfo{EntryType: ret} - }() - - ctxIn, ctxOut := r.checkIsCtx(f) - if ctxOut { - // skip the function which generate ctx - return EntryNone - } else if ctxIn { - // has ctx in, ignore *http.Request.Context() - return EntryWithCtx - } - - reqctx, skip := r.docFlag(f) - - // check is `func handler(w http.ResponseWriter, r *http.Request) {}` - // or use '// @contextcheck(req_has_ctx)' - if r.checkIsHttpHandler(f, reqctx) { - return EntryWithHttpHandler - } - - if skip { - return EntryNone - } - - return EntryNormal -} - -func (r *runner) docFlag(f *ssa.Function) (reqctx, skip bool) { - for _, v := range r.getDocFromFunc(f) { - if len(nolintRe.FindString(v.Text)) > 0 && strings.Contains(v.Text, "contextcheck") { - skip = true - } else if strings.HasPrefix(v.Text, "// @contextcheck(req_has_ctx)") { - reqctx = true - } - } - return -} - -var nolintRe = regexp.MustCompile(`^//\s?nolint:`) - -func (r *runner) getDocFromFunc(f *ssa.Function) []*ast.Comment { - file := analysisutil.File(r.pass, f.Pos()) - if file == nil { - return nil - } - - // only support FuncDecl comment - var fd *ast.FuncDecl - for _, v := range file.Decls { - if tmp, ok := v.(*ast.FuncDecl); ok && tmp.Name.Pos() == f.Pos() { - fd = tmp - break - } - } - if fd == nil || fd.Doc == nil || len(fd.Doc.List) == 0 { - return nil - } - return fd.Doc.List -} - -func (r *runner) checkIsCtx(f *ssa.Function) (in, out bool) { - // check params - tuple := f.Signature.Params() - for i := 0; i < tuple.Len(); i++ { - if r.isCtxType(tuple.At(i).Type()) { - in = true - break - } - } - - // check freevars - for _, param := range f.FreeVars { - if r.isCtxType(param.Type()) { - in = true - break - } - } - - // check results - tuple = f.Signature.Results() - for i := 0; i < tuple.Len(); i++ { - if r.isCtxType(tuple.At(i).Type()) { - out = true - break - } - } - return -} - -func (r *runner) checkIsHttpHandler(f *ssa.Function, reqctx bool) bool { - var hasReq bool - tuple := f.Signature.Params() - for i := 0; i < tuple.Len(); i++ { - if r.isHttpReqType(tuple.At(i).Type()) { - hasReq = true - break - } - } - if !hasReq { - return false - } - if reqctx { - return true - } - - // must be `func f(w http.ResponseWriter, r *http.Request) {}` - if f.Signature.Results().Len() == 0 && tuple.Len() == 2 && - r.isHttpResType(tuple.At(0).Type()) && r.isHttpReqType(tuple.At(1).Type()) { - return true - } - - // check if use r.Context() - return f.Blocks != nil && len(r.getHttpReqCtx(f, true)) > 0 -} - -func (r *runner) collectCtxRef(f *ssa.Function, isHttpHandler bool) (refMap map[ssa.Instruction]bool, ok bool) { - ok = true - refMap = make(map[ssa.Instruction]bool) - checkedRefMap := make(map[ssa.Value]bool) - storeInstrs := make(map[*ssa.Store]bool) - phiInstrs := make(map[*ssa.Phi]bool) - - var checkRefs func(val ssa.Value, fromAddr bool) - var checkInstr func(instr ssa.Instruction, fromAddr bool) - - checkRefs = func(val ssa.Value, fromAddr bool) { - if val == nil || val.Referrers() == nil { - return - } - - if checkedRefMap[val] { - return - } - checkedRefMap[val] = true - - for _, instr := range *val.Referrers() { - checkInstr(instr, fromAddr) - } - } - - checkInstr = func(instr ssa.Instruction, fromAddr bool) { - switch i := instr.(type) { - case ssa.CallInstruction: - refMap[i] = true - tp := r.getCallInstrCtxType(i) - if tp&CtxOut != 0 { - // collect referrers of the results - checkRefs(i.Value(), false) - return - } - case *ssa.Store: - if fromAddr { - // collect all store to judge whether it's right value is valid - storeInstrs[i] = true - } else { - checkRefs(i.Addr, true) - } - case *ssa.UnOp: - checkRefs(i, false) - case *ssa.MakeClosure: - for _, param := range i.Bindings { - if r.isCtxType(param.Type()) { - refMap[i] = true - break - } - } - case *ssa.Extract: - // only care about ctx - if r.isCtxType(i.Type()) { - checkRefs(i, false) - } - case *ssa.Phi: - phiInstrs[i] = true - checkRefs(i, false) - case *ssa.TypeAssert: - // ctx.(*bm.Context) - } - } - - if isHttpHandler { - for _, v := range r.getHttpReqCtx(f, false) { - checkRefs(v, false) - } - } else { - for _, param := range f.Params { - if r.isCtxType(param.Type()) { - checkRefs(param, false) - } - } - - for _, param := range f.FreeVars { - if r.isCtxType(param.Type()) { - checkRefs(param, false) - } - } - } - - for instr := range storeInstrs { - if !checkedRefMap[instr.Val] { - r.pass.Reportf(instr.Pos(), "Non-inherited new context, use function like `context.WithXXX` instead") - ok = false - } - } - - for instr := range phiInstrs { - for _, v := range instr.Edges { - if !checkedRefMap[v] { - r.pass.Reportf(instr.Pos(), "Non-inherited new context, use function like `context.WithXXX` instead") - ok = false - } - } - } - - return -} - -func (r *runner) getHttpReqCtx(f *ssa.Function, least1 bool) (rets []ssa.Value) { - checkedRefMap := make(map[ssa.Value]bool) - - var checkRefs func(val ssa.Value, fromAddr bool) - var checkInstr func(instr ssa.Instruction, fromAddr bool) - - checkRefs = func(val ssa.Value, fromAddr bool) { - if val == nil || val.Referrers() == nil { - return - } - - if checkedRefMap[val] { - return - } - checkedRefMap[val] = true - - for _, instr := range *val.Referrers() { - checkInstr(instr, fromAddr) - } - } - - checkInstr = func(instr ssa.Instruction, fromAddr bool) { - switch i := instr.(type) { - case ssa.CallInstruction: - // r.Context() only has one recv - if len(i.Common().Args) != 1 { - break - } - - // find r.Context() - if r.getCallInstrCtxType(i)&CtxOut != CtxOut { - break - } - - // check is r.Context - f := r.getFunction(instr) - if f == nil || f.Name() != ctxName { - break - } - if f.Signature.Recv() != nil { - // collect the return of r.Context - rets = append(rets, i.Value()) - if least1 { - return - } - } - case *ssa.Store: - if !fromAddr { - checkRefs(i.Addr, true) - } - case *ssa.UnOp: - checkRefs(i, false) - case *ssa.Phi: - checkRefs(i, false) - case *ssa.MakeClosure: - case *ssa.Extract: - // http.Request can only be input - } - } - - for _, param := range f.Params { - if r.isHttpReqType(param.Type()) { - checkRefs(param, false) - } - } - - return -} - -func (r *runner) checkFuncWithCtx(f *ssa.Function, tp entryType) { - isHttpHandler := tp == EntryWithHttpHandler - refMap, ok := r.collectCtxRef(f, isHttpHandler) - if !ok { - return - } - - for _, b := range f.Blocks { - for _, instr := range b.Instrs { - tp, ok := r.getCtxType(instr) - if !ok { - continue - } - - // checked in collectCtxRef, skipped - if tp&CtxOut != 0 { - continue - } - - if tp&CtxIn != 0 { - if !refMap[instr] { - if isHttpHandler { - r.pass.Reportf(instr.Pos(), "Non-inherited new context, use function like `context.WithXXX` or `r.Context` instead") - } else { - r.pass.Reportf(instr.Pos(), "Non-inherited new context, use function like `context.WithXXX` instead") - } - } - } - - ff := r.getFunction(instr) - if ff == nil { - continue - } - - key := ff.RelString(nil) - res, ok := r.getValue(key, ff) - if ok { - if !res.Valid { - r.pass.Reportf(instr.Pos(), "Function `%s` should pass the context parameter", strings.Join(reverse(res.Funcs), "->")) - } - } - } - } -} - -func (r *runner) checkFuncWithoutCtx(f *ssa.Function, checkingMap map[string]bool) (ret bool) { - ret = true - orgKey := f.RelString(nil) - var seted bool - for _, b := range f.Blocks { - for _, instr := range b.Instrs { - tp, ok := r.getCtxType(instr) - if !ok { - continue - } - - if tp&CtxOut != 0 { - continue - } - - // it is considered illegal as long as ctx is in the input and not in *struct X - if tp&CtxIn != 0 { - if tp&CtxInField == 0 { - ret = false - } - } - - ff := r.getFunction(instr) - if ff == nil { - continue - } - - key := ff.RelString(nil) - res, ok := r.getValue(key, ff) - if ok { - if !res.Valid { - ret = false - - // save the call link - if !seted { - seted = true - r.setFact(orgKey, res.Valid, res.Funcs...) - } - } - continue - } - - // check is thunk or bound - if strings.HasSuffix(key, "$thunk") || strings.HasSuffix(key, "$bound") { - continue - } - - if entryType := r.checkIsEntry(ff); entryType == EntryNormal { - // cannot get info from fact, skip - if ff.Blocks == nil { - continue - } - - // handler cycle call - if checkingMap[key] { - continue - } - checkingMap[key] = true - - valid := r.checkFuncWithoutCtx(ff, checkingMap) - r.setFact(key, valid, ff.Name()) - if res, ok := r.getValue(key, ff); ok && !valid && !seted { - seted = true - r.setFact(orgKey, valid, res.Funcs...) - } - if !valid { - ret = false - } - } - } - } - return ret -} - -func (r *runner) getCtxType(instr ssa.Instruction) (tp int, ok bool) { - switch i := instr.(type) { - case ssa.CallInstruction: - tp = r.getCallInstrCtxType(i) - ok = true - case *ssa.MakeClosure: - tp = r.getMakeClosureCtxType(i) - ok = true - } - return -} - -func (r *runner) getCallInstrCtxType(c ssa.CallInstruction) (tp int) { - // check params - for _, v := range c.Common().Args { - if r.isCtxType(v.Type()) { - if vv, ok := v.(*ssa.UnOp); ok { - if _, ok := vv.X.(*ssa.FieldAddr); ok { - tp |= CtxInField - } - } - - tp |= CtxIn - break - } - } - - // check results - if v := c.Value(); v != nil { - if r.isCtxType(v.Type()) { - tp |= CtxOut - } else { - tuple, ok := v.Type().(*types.Tuple) - if !ok { - return - } - for i := 0; i < tuple.Len(); i++ { - if r.isCtxType(tuple.At(i).Type()) { - tp |= CtxOut - break - } - } - } - } - - return -} - -func (r *runner) getMakeClosureCtxType(c *ssa.MakeClosure) (tp int) { - for _, v := range c.Bindings { - if r.isCtxType(v.Type()) { - if vv, ok := v.(*ssa.UnOp); ok { - if _, ok := vv.X.(*ssa.FieldAddr); ok { - tp |= CtxInField - } - } - - tp |= CtxIn - break - } - } - return -} - -func (r *runner) getFunction(instr ssa.Instruction) (f *ssa.Function) { - switch i := instr.(type) { - case ssa.CallInstruction: - if i.Common().IsInvoke() { - return - } - - switch c := i.Common().Value.(type) { - case *ssa.Function: - f = c - case *ssa.MakeClosure: - // captured in the outer layer - case *ssa.Builtin, *ssa.UnOp, *ssa.Lookup, *ssa.Phi: - // skipped - case *ssa.Extract, *ssa.Call: - // function is a result of a call, skipped - case *ssa.Parameter: - // function is a param, skipped - } - case *ssa.MakeClosure: - f = i.Fn.(*ssa.Function) - } - return -} - -func (r *runner) isCtxType(tp types.Type) bool { - return types.Identical(tp, r.ctxTyp) || types.Identical(tp, r.ctxPTyp) -} - -func (r *runner) isHttpResType(tp types.Type) bool { - for _, v := range r.httpResTyps { - if ok := types.Identical(v, v); ok { - return true - } - } - return false -} - -func (r *runner) isHttpReqType(tp types.Type) bool { - for _, v := range r.httpReqTyps { - if ok := types.Identical(tp, v); ok { - return true - } - } - return false -} - -func (r *runner) getValue(key string, f *ssa.Function) (res resInfo, ok bool) { - res, ok = r.currentFact[key] - if ok { - return - } - - if f.Pkg == nil { - return - } - - var fact ctxFact - var got bool - if r.disableFact { - fact, got = getPkgFact(f.Pkg.Pkg) - } else { - got = r.pass.ImportPackageFact(f.Pkg.Pkg, &fact) - } - if got { - res, ok = fact[key] - } - return -} - -func (r *runner) setFact(key string, valid bool, funcs ...string) { - var names []string - if !valid { - names = append(r.currentFact[key].Funcs, funcs...) - } - r.currentFact[key] = resInfo{ - Valid: valid, - Funcs: names, - } -} - -// setPkgFact save fact to mem -func setPkgFact(pkg *types.Package, fact ctxFact) { - pkgFactMu.Lock() - pkgFactMap[pkg] = fact - pkgFactMu.Unlock() -} - -// getPkgFact get fact from mem -func getPkgFact(pkg *types.Package) (fact ctxFact, ok bool) { - pkgFactMu.RLock() - fact, ok = pkgFactMap[pkg] - pkgFactMu.RUnlock() - return -} - -func reverse(arr1 []string) (arr2 []string) { - l := len(arr1) - if l == 0 { - return - } - arr2 = make([]string, l) - for i := 0; i <= l/2; i++ { - arr2[i] = arr1[l-1-i] - arr2[l-1-i] = arr1[i] - } - return -} diff --git a/vendor/github.com/kulti/thelper/LICENSE b/vendor/github.com/kulti/thelper/LICENSE deleted file mode 100644 index e070215fe..000000000 --- a/vendor/github.com/kulti/thelper/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2020 Aleksey Bakin - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/kulti/thelper/pkg/analyzer/analyzer.go b/vendor/github.com/kulti/thelper/pkg/analyzer/analyzer.go deleted file mode 100644 index a22fd6aca..000000000 --- a/vendor/github.com/kulti/thelper/pkg/analyzer/analyzer.go +++ /dev/null @@ -1,639 +0,0 @@ -package analyzer - -import ( - "flag" - "fmt" - "go/ast" - "go/token" - "go/types" - "sort" - "strings" - - "github.com/gostaticanalysis/analysisutil" - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/ast/inspector" -) - -const ( - doc = "thelper detects tests helpers which is not start with t.Helper() method." - checksDoc = `coma separated list of enabled checks - -Available checks - -` + checkTBegin + ` - check t.Helper() begins helper function -` + checkTFirst + ` - check *testing.T is first param of helper function -` + checkTName + ` - check *testing.T param has t name - -Also available similar checks for benchmark and TB helpers: ` + - checkFBegin + `, ` + checkFFirst + `, ` + checkFName + `,` + - checkBBegin + `, ` + checkBFirst + `, ` + checkBName + `,` + - checkTBBegin + `, ` + checkTBFirst + `, ` + checkTBName + ` - -` -) - -type enabledChecksValue map[string]struct{} - -func (m enabledChecksValue) Enabled(c string) bool { - _, ok := m[c] - return ok -} - -func (m enabledChecksValue) String() string { - ss := make([]string, 0, len(m)) - for s := range m { - ss = append(ss, s) - } - sort.Strings(ss) - return strings.Join(ss, ",") -} - -func (m enabledChecksValue) Set(s string) error { - ss := strings.FieldsFunc(s, func(c rune) bool { return c == ',' }) - if len(ss) == 0 { - return nil - } - - for k := range m { - delete(m, k) - } - for _, v := range ss { - switch v { - case checkTBegin, checkTFirst, checkTName, - checkFBegin, checkFFirst, checkFName, - checkBBegin, checkBFirst, checkBName, - checkTBBegin, checkTBFirst, checkTBName: - m[v] = struct{}{} - default: - return fmt.Errorf("unknown check name %q (see help for full list)", v) - } - } - return nil -} - -const ( - checkTBegin = "t_begin" - checkTFirst = "t_first" - checkTName = "t_name" - checkFBegin = "f_begin" - checkFFirst = "f_first" - checkFName = "f_name" - checkBBegin = "b_begin" - checkBFirst = "b_first" - checkBName = "b_name" - checkTBBegin = "tb_begin" - checkTBFirst = "tb_first" - checkTBName = "tb_name" -) - -type thelper struct { - enabledChecks enabledChecksValue -} - -// NewAnalyzer return a new thelper analyzer. -// thelper analyzes Go test codes how they use t.Helper() method. -func NewAnalyzer() *analysis.Analyzer { - thelper := thelper{} - thelper.enabledChecks = enabledChecksValue{ - checkTBegin: struct{}{}, - checkTFirst: struct{}{}, - checkTName: struct{}{}, - checkFBegin: struct{}{}, - checkFFirst: struct{}{}, - checkFName: struct{}{}, - checkBBegin: struct{}{}, - checkBFirst: struct{}{}, - checkBName: struct{}{}, - checkTBBegin: struct{}{}, - checkTBFirst: struct{}{}, - checkTBName: struct{}{}, - } - - a := &analysis.Analyzer{ - Name: "thelper", - Doc: doc, - Run: thelper.run, - Requires: []*analysis.Analyzer{ - inspect.Analyzer, - }, - } - - a.Flags.Init("thelper", flag.ExitOnError) - a.Flags.Var(&thelper.enabledChecks, "checks", checksDoc) - - return a -} - -func (t thelper) run(pass *analysis.Pass) (interface{}, error) { - tCheckOpts, fCheckOpts, bCheckOpts, tbCheckOpts, ok := t.buildCheckFuncOpts(pass) - if !ok { - return nil, nil - } - - inspect, ok := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - if !ok { - return nil, nil - } - - var reports reports - nodeFilter := []ast.Node{ - (*ast.FuncDecl)(nil), - (*ast.FuncLit)(nil), - (*ast.CallExpr)(nil), - } - inspect.Preorder(nodeFilter, func(node ast.Node) { - var fd funcDecl - switch n := node.(type) { - case *ast.FuncLit: - fd.Pos = n.Pos() - fd.Type = n.Type - fd.Body = n.Body - fd.Name = ast.NewIdent("") - case *ast.FuncDecl: - fd.Pos = n.Name.NamePos - fd.Type = n.Type - fd.Body = n.Body - fd.Name = n.Name - case *ast.CallExpr: - runSubtestExprs := extractSubtestExp(pass, n, tCheckOpts.subRun, tCheckOpts.subTestFuncType) - if len(runSubtestExprs) == 0 { - runSubtestExprs = extractSubtestExp(pass, n, bCheckOpts.subRun, bCheckOpts.subTestFuncType) - } - if len(runSubtestExprs) == 0 { - runSubtestExprs = extractSubtestFuzzExp(pass, n, fCheckOpts.subRun) - } - - if len(runSubtestExprs) > 0 { - for _, expr := range runSubtestExprs { - reports.Filter(funcDefPosition(pass, expr)) - } - } else { - reports.NoFilter(funcDefPosition(pass, n.Fun)) - } - return - default: - return - } - - checkFunc(pass, &reports, fd, tCheckOpts) - checkFunc(pass, &reports, fd, fCheckOpts) - checkFunc(pass, &reports, fd, bCheckOpts) - checkFunc(pass, &reports, fd, tbCheckOpts) - }) - - reports.Flush(pass) - - return nil, nil -} - -type checkFuncOpts struct { - skipPrefix string - varName string - fnHelper types.Object - subRun types.Object - subTestFuncType types.Type - hpType types.Type - ctxType types.Type - checkBegin bool - checkFirst bool - checkName bool -} - -func (t thelper) buildCheckFuncOpts(pass *analysis.Pass) (checkFuncOpts, checkFuncOpts, checkFuncOpts, checkFuncOpts, bool) { - var ctxType types.Type - ctxObj := analysisutil.ObjectOf(pass, "context", "Context") - if ctxObj != nil { - ctxType = ctxObj.Type() - } - - tCheckOpts, ok := t.buildTestCheckFuncOpts(pass, ctxType) - if !ok { - return checkFuncOpts{}, checkFuncOpts{}, checkFuncOpts{}, checkFuncOpts{}, false - } - - fCheckOpts, ok := t.buildFuzzCheckFuncOpts(pass, ctxType) - if !ok { - return checkFuncOpts{}, checkFuncOpts{}, checkFuncOpts{}, checkFuncOpts{}, false - } - - bCheckOpts, ok := t.buildBenchmarkCheckFuncOpts(pass, ctxType) - if !ok { - return checkFuncOpts{}, checkFuncOpts{}, checkFuncOpts{}, checkFuncOpts{}, false - } - - tbCheckOpts, ok := t.buildTBCheckFuncOpts(pass, ctxType) - if !ok { - return checkFuncOpts{}, checkFuncOpts{}, checkFuncOpts{}, checkFuncOpts{}, false - } - - return tCheckOpts, fCheckOpts, bCheckOpts, tbCheckOpts, true -} - -func (t thelper) buildTestCheckFuncOpts(pass *analysis.Pass, ctxType types.Type) (checkFuncOpts, bool) { - tObj := analysisutil.ObjectOf(pass, "testing", "T") - if tObj == nil { - return checkFuncOpts{}, false - } - - tHelper, _, _ := types.LookupFieldOrMethod(tObj.Type(), true, tObj.Pkg(), "Helper") - if tHelper == nil { - return checkFuncOpts{}, false - } - - tRun, _, _ := types.LookupFieldOrMethod(tObj.Type(), true, tObj.Pkg(), "Run") - if tRun == nil { - return checkFuncOpts{}, false - } - - tType := types.NewPointer(tObj.Type()) - tVar := types.NewVar(token.NoPos, nil, "t", tType) - return checkFuncOpts{ - skipPrefix: "Test", - varName: "t", - fnHelper: tHelper, - subRun: tRun, - hpType: tType, - subTestFuncType: types.NewSignature(nil, types.NewTuple(tVar), nil, false), - ctxType: ctxType, - checkBegin: t.enabledChecks.Enabled(checkTBegin), - checkFirst: t.enabledChecks.Enabled(checkTFirst), - checkName: t.enabledChecks.Enabled(checkTName), - }, true -} - -func (t thelper) buildFuzzCheckFuncOpts(pass *analysis.Pass, ctxType types.Type) (checkFuncOpts, bool) { - fObj := analysisutil.ObjectOf(pass, "testing", "F") - if fObj == nil { - return checkFuncOpts{}, true // fuzzing supports since go1.18, it's ok, that testig.F is missed. - } - - fHelper, _, _ := types.LookupFieldOrMethod(fObj.Type(), true, fObj.Pkg(), "Helper") - if fHelper == nil { - return checkFuncOpts{}, false - } - - tFuzz, _, _ := types.LookupFieldOrMethod(fObj.Type(), true, fObj.Pkg(), "Fuzz") - if tFuzz == nil { - return checkFuncOpts{}, false - } - - return checkFuncOpts{ - skipPrefix: "Fuzz", - varName: "f", - fnHelper: fHelper, - subRun: tFuzz, - hpType: types.NewPointer(fObj.Type()), - ctxType: ctxType, - checkBegin: t.enabledChecks.Enabled(checkFBegin), - checkFirst: t.enabledChecks.Enabled(checkFFirst), - checkName: t.enabledChecks.Enabled(checkFName), - }, true -} - -func (t thelper) buildBenchmarkCheckFuncOpts(pass *analysis.Pass, ctxType types.Type) (checkFuncOpts, bool) { - bObj := analysisutil.ObjectOf(pass, "testing", "B") - if bObj == nil { - return checkFuncOpts{}, false - } - - bHelper, _, _ := types.LookupFieldOrMethod(bObj.Type(), true, bObj.Pkg(), "Helper") - if bHelper == nil { - return checkFuncOpts{}, false - } - - bRun, _, _ := types.LookupFieldOrMethod(bObj.Type(), true, bObj.Pkg(), "Run") - if bRun == nil { - return checkFuncOpts{}, false - } - - bType := types.NewPointer(bObj.Type()) - bVar := types.NewVar(token.NoPos, nil, "b", bType) - return checkFuncOpts{ - skipPrefix: "Benchmark", - varName: "b", - fnHelper: bHelper, - subRun: bRun, - hpType: types.NewPointer(bObj.Type()), - subTestFuncType: types.NewSignature(nil, types.NewTuple(bVar), nil, false), - ctxType: ctxType, - checkBegin: t.enabledChecks.Enabled(checkBBegin), - checkFirst: t.enabledChecks.Enabled(checkBFirst), - checkName: t.enabledChecks.Enabled(checkBName), - }, true -} - -func (t thelper) buildTBCheckFuncOpts(pass *analysis.Pass, ctxType types.Type) (checkFuncOpts, bool) { - tbObj := analysisutil.ObjectOf(pass, "testing", "TB") - if tbObj == nil { - return checkFuncOpts{}, false - } - - tbHelper, _, _ := types.LookupFieldOrMethod(tbObj.Type(), true, tbObj.Pkg(), "Helper") - if tbHelper == nil { - return checkFuncOpts{}, false - } - - return checkFuncOpts{ - skipPrefix: "", - varName: "tb", - fnHelper: tbHelper, - hpType: tbObj.Type(), - ctxType: ctxType, - checkBegin: t.enabledChecks.Enabled(checkTBBegin), - checkFirst: t.enabledChecks.Enabled(checkTBFirst), - checkName: t.enabledChecks.Enabled(checkTBName), - }, true -} - -type funcDecl struct { - Pos token.Pos - Name *ast.Ident - Type *ast.FuncType - Body *ast.BlockStmt -} - -func checkFunc(pass *analysis.Pass, reports *reports, funcDecl funcDecl, opts checkFuncOpts) { - if !opts.checkFirst && !opts.checkBegin && !opts.checkName { - return - } - - if opts.skipPrefix != "" && strings.HasPrefix(funcDecl.Name.Name, opts.skipPrefix) { - return - } - - p, pos, ok := searchFuncParam(pass, funcDecl, opts.hpType) - if !ok { - return - } - - if opts.checkFirst { - if pos != 0 { - checkFirstPassed := false - if pos == 1 && opts.ctxType != nil { - _, pos, ok := searchFuncParam(pass, funcDecl, opts.ctxType) - checkFirstPassed = ok && (pos == 0) - } - - if !checkFirstPassed { - reports.Reportf(funcDecl.Pos, "parameter %s should be the first or after context.Context", opts.hpType) - } - } - } - - if len(p.Names) > 0 && p.Names[0].Name != "_" { - if opts.checkName { - if p.Names[0].Name != opts.varName { - reports.Reportf(funcDecl.Pos, "parameter %s should have name %s", opts.hpType, opts.varName) - } - } - - if opts.checkBegin { - if len(funcDecl.Body.List) == 0 || !isTHelperCall(pass, funcDecl.Body.List[0], opts.fnHelper) { - reports.Reportf(funcDecl.Pos, "test helper function should start from %s.Helper()", opts.varName) - } - } - } -} - -// searchFuncParam search a function param with desired type. -// It returns the param field, its position, and true if something is found. -func searchFuncParam(pass *analysis.Pass, f funcDecl, p types.Type) (*ast.Field, int, bool) { - for i, f := range f.Type.Params.List { - if isExprHasType(pass, f.Type, p) { - return f, i, true - } - } - return nil, 0, false -} - -// isTHelperCall returns true if provided statement 's' is t.Helper() or b.Helper() call. -func isTHelperCall(pass *analysis.Pass, s ast.Stmt, tHelper types.Object) bool { - exprStmt, ok := s.(*ast.ExprStmt) - if !ok { - return false - } - - callExpr, ok := exprStmt.X.(*ast.CallExpr) - if !ok { - return false - } - - selExpr, ok := callExpr.Fun.(*ast.SelectorExpr) - if !ok { - return false - } - - return isSelectorCall(pass, selExpr, tHelper) -} - -// extractSubtestExp analyzes that call expresion 'e' is t.Run or b.Run -// and returns subtest function. -func extractSubtestExp( - pass *analysis.Pass, e *ast.CallExpr, tbRun types.Object, testFuncType types.Type, -) []ast.Expr { - selExpr, ok := e.Fun.(*ast.SelectorExpr) - if !ok { - return nil - } - - if !isSelectorCall(pass, selExpr, tbRun) { - return nil - } - - if len(e.Args) != 2 { - return nil - } - - if funcs := unwrapTestingFunctionBuilding(pass, e.Args[1], testFuncType); funcs != nil { - return funcs - } - - return []ast.Expr{e.Args[1]} -} - -// extractSubtestFuzzExp analyzes that call expresion 'e' is f.Fuzz -// and returns subtest function. -func extractSubtestFuzzExp( - pass *analysis.Pass, e *ast.CallExpr, fuzzRun types.Object, -) []ast.Expr { - selExpr, ok := e.Fun.(*ast.SelectorExpr) - if !ok { - return nil - } - - if !isSelectorCall(pass, selExpr, fuzzRun) { - return nil - } - - if len(e.Args) != 1 { - return nil - } - - return []ast.Expr{e.Args[0]} -} - -// unwrapTestingFunctionConstruction checks that expresion is build testing functions -// and returns the result of building. -func unwrapTestingFunctionBuilding(pass *analysis.Pass, expr ast.Expr, testFuncType types.Type) []ast.Expr { - callExpr, ok := expr.(*ast.CallExpr) - if !ok { - return nil - } - - var funcDecl funcDecl - switch f := callExpr.Fun.(type) { - case *ast.FuncLit: - funcDecl.Body = f.Body - funcDecl.Type = f.Type - case *ast.Ident: - funObjDecl := findFunctionDeclaration(pass, f) - if funObjDecl == nil { - return nil - } - - funcDecl.Body = funObjDecl.Body - funcDecl.Type = funObjDecl.Type - case *ast.SelectorExpr: - fd := findSelectorDeclaration(pass, f) - if fd == nil { - return nil - } - - funcDecl.Body = fd.Body - funcDecl.Type = fd.Type - default: - return nil - } - - results := funcDecl.Type.Results.List - if len(results) != 1 || !isExprHasType(pass, results[0].Type, testFuncType) { - return nil - } - - var funcs []ast.Expr - ast.Inspect(funcDecl.Body, func(n ast.Node) bool { - if n == nil { - return false - } - - if retStmt, ok := n.(*ast.ReturnStmt); ok { - if len(retStmt.Results) == 1 { - funcs = append(funcs, retStmt.Results[0]) - } - } - return true - }) - - return funcs -} - -// funcDefPosition returns a function's position. -// It works with anonymous functions as well with function names. -func funcDefPosition(pass *analysis.Pass, e ast.Expr) token.Pos { - anonFunLit, ok := e.(*ast.FuncLit) - if ok { - return anonFunLit.Pos() - } - - funIdent, ok := e.(*ast.Ident) - if !ok { - selExpr, ok := e.(*ast.SelectorExpr) - if !ok { - return token.NoPos - } - funIdent = selExpr.Sel - } - - funDef, ok := pass.TypesInfo.Uses[funIdent] - if !ok { - return token.NoPos - } - - return funDef.Pos() -} - -// isSelectorCall checks is selExpr is a call expresion on specific callObj. -// Useful to check Run() call for t.Run or b.Run. -func isSelectorCall(pass *analysis.Pass, selExpr *ast.SelectorExpr, callObj types.Object) bool { - sel, ok := pass.TypesInfo.Selections[selExpr] - if !ok { - return false - } - - return sel.Obj() == callObj -} - -// isExprHasType returns true if expr has expected type. -func isExprHasType(pass *analysis.Pass, expr ast.Expr, expType types.Type) bool { - typeInfo, ok := pass.TypesInfo.Types[expr] - if !ok { - return false - } - - return types.Identical(typeInfo.Type, expType) -} - -// findSelectorDeclaration returns function declaration called by selector expression. -func findSelectorDeclaration(pass *analysis.Pass, expr *ast.SelectorExpr) *ast.FuncDecl { - xsel, ok := pass.TypesInfo.Selections[expr] - if !ok { - return nil - } - - for _, file := range pass.Files { - for _, decl := range file.Decls { - fd, ok := decl.(*ast.FuncDecl) - if ok && fd.Recv != nil && len(fd.Recv.List) == 1 { - recvType, ok := fd.Recv.List[0].Type.(*ast.Ident) - if !ok { - continue - } - - recvObj, ok := pass.TypesInfo.Uses[recvType] - if !ok { - continue - } - - if !(types.Identical(recvObj.Type(), xsel.Recv())) { - continue - } - - if fd.Name.Name == expr.Sel.Name { - return fd - } - } - } - } - - return nil -} - -// findFunctionDeclaration returns function declaration called by identity. -func findFunctionDeclaration(pass *analysis.Pass, ident *ast.Ident) *ast.FuncDecl { - if ident.Obj != nil { - if funObjDecl, ok := ident.Obj.Decl.(*ast.FuncDecl); ok { - return funObjDecl - } - } - - obj := pass.TypesInfo.ObjectOf(ident) - if obj == nil { - return nil - } - - for _, file := range pass.Files { - for _, decl := range file.Decls { - funcDecl, ok := decl.(*ast.FuncDecl) - if !ok { - continue - } - - if funcDecl.Name.Pos() == obj.Pos() { - return funcDecl - } - } - } - - return nil -} diff --git a/vendor/github.com/kulti/thelper/pkg/analyzer/report.go b/vendor/github.com/kulti/thelper/pkg/analyzer/report.go deleted file mode 100644 index 4a23e36d5..000000000 --- a/vendor/github.com/kulti/thelper/pkg/analyzer/report.go +++ /dev/null @@ -1,56 +0,0 @@ -package analyzer - -import ( - "go/token" - - "golang.org/x/tools/go/analysis" -) - -type reports struct { - reports []report - filter map[token.Pos]struct{} - nofilter map[token.Pos]struct{} -} - -type report struct { - pos token.Pos - format string - args []interface{} -} - -func (rr *reports) Reportf(pos token.Pos, format string, args ...interface{}) { - rr.reports = append(rr.reports, report{ - pos: pos, - format: format, - args: args, - }) -} - -func (rr *reports) Filter(pos token.Pos) { - if pos.IsValid() { - if rr.filter == nil { - rr.filter = make(map[token.Pos]struct{}) - } - rr.filter[pos] = struct{}{} - } -} - -func (rr *reports) NoFilter(pos token.Pos) { - if pos.IsValid() { - if rr.nofilter == nil { - rr.nofilter = make(map[token.Pos]struct{}) - } - rr.nofilter[pos] = struct{}{} - } -} - -func (rr reports) Flush(pass *analysis.Pass) { - for _, r := range rr.reports { - if _, ok := rr.filter[r.pos]; ok { - if _, ok := rr.nofilter[r.pos]; !ok { - continue - } - } - pass.Reportf(r.pos, r.format, r.args...) - } -} diff --git a/vendor/github.com/kunwardeep/paralleltest/LICENSE b/vendor/github.com/kunwardeep/paralleltest/LICENSE deleted file mode 100644 index 77f0d26a6..000000000 --- a/vendor/github.com/kunwardeep/paralleltest/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2020 Kunwardeep Bedi - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/kunwardeep/paralleltest/pkg/paralleltest/paralleltest.go b/vendor/github.com/kunwardeep/paralleltest/pkg/paralleltest/paralleltest.go deleted file mode 100644 index e9187d6fd..000000000 --- a/vendor/github.com/kunwardeep/paralleltest/pkg/paralleltest/paralleltest.go +++ /dev/null @@ -1,320 +0,0 @@ -package paralleltest - -import ( - "flag" - "go/ast" - "go/types" - "strings" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/ast/inspector" -) - -const Doc = `check that tests use t.Parallel() method -It also checks that the t.Parallel is used if multiple tests cases are run as part of single test. -As part of ensuring parallel tests works as expected it checks for reinitialising of the range value -over the test cases.(https://tinyurl.com/y6555cy6)` - -func NewAnalyzer() *analysis.Analyzer { - return newParallelAnalyzer().analyzer -} - -// parallelAnalyzer is an internal analyzer that makes options available to a -// run pass. It wraps an `analysis.Analyzer` that should be returned for -// linters. -type parallelAnalyzer struct { - analyzer *analysis.Analyzer - ignoreMissing bool - ignoreMissingSubtests bool - ignoreLoopVar bool -} - -func newParallelAnalyzer() *parallelAnalyzer { - a := ¶llelAnalyzer{} - - var flags flag.FlagSet - flags.BoolVar(&a.ignoreMissing, "i", false, "ignore missing calls to t.Parallel") - flags.BoolVar(&a.ignoreMissingSubtests, "ignoremissingsubtests", false, "ignore missing calls to t.Parallel in subtests") - flags.BoolVar(&a.ignoreLoopVar, "ignoreloopVar", false, "ignore loop variable detection") - - a.analyzer = &analysis.Analyzer{ - Name: "paralleltest", - Doc: Doc, - Run: a.run, - Flags: flags, - } - return a -} - -func (a *parallelAnalyzer) run(pass *analysis.Pass) (interface{}, error) { - inspector := inspector.New(pass.Files) - - nodeFilter := []ast.Node{ - (*ast.FuncDecl)(nil), - } - - inspector.Preorder(nodeFilter, func(node ast.Node) { - funcDecl := node.(*ast.FuncDecl) - var funcHasParallelMethod, - funcCantParallelMethod, - rangeStatementOverTestCasesExists, - rangeStatementHasParallelMethod, - rangeStatementCantParallelMethod bool - var loopVariableUsedInRun *string - var numberOfTestRun int - var positionOfTestRunNode []ast.Node - var rangeNode ast.Node - - // Check runs for test functions only - isTest, testVar := isTestFunction(funcDecl) - if !isTest { - return - } - - for _, l := range funcDecl.Body.List { - switch v := l.(type) { - - case *ast.ExprStmt: - ast.Inspect(v, func(n ast.Node) bool { - // Check if the test method is calling t.Parallel - if !funcHasParallelMethod { - funcHasParallelMethod = methodParallelIsCalledInTestFunction(n, testVar) - } - - // Check if the test calls t.Setenv, cannot be used in parallel tests or tests with parallel ancestors - if !funcCantParallelMethod { - funcCantParallelMethod = methodSetenvIsCalledInTestFunction(n, testVar) - } - - // Check if the t.Run within the test function is calling t.Parallel - if methodRunIsCalledInTestFunction(n, testVar) { - // n is a call to t.Run; find out the name of the subtest's *testing.T parameter. - innerTestVar := getRunCallbackParameterName(n) - - hasParallel := false - cantParallel := false - numberOfTestRun++ - ast.Inspect(v, func(p ast.Node) bool { - if !hasParallel { - hasParallel = methodParallelIsCalledInTestFunction(p, innerTestVar) - } - if !cantParallel { - cantParallel = methodSetenvIsCalledInTestFunction(p, innerTestVar) - } - return true - }) - if !hasParallel && !cantParallel { - positionOfTestRunNode = append(positionOfTestRunNode, n) - } - } - return true - }) - - // Check if the range over testcases is calling t.Parallel - case *ast.RangeStmt: - rangeNode = v - - var loopVars []types.Object - for _, expr := range []ast.Expr{v.Key, v.Value} { - if id, ok := expr.(*ast.Ident); ok { - loopVars = append(loopVars, pass.TypesInfo.ObjectOf(id)) - } - } - - ast.Inspect(v, func(n ast.Node) bool { - // nolint: gocritic - switch r := n.(type) { - case *ast.ExprStmt: - if methodRunIsCalledInRangeStatement(r.X, testVar) { - // r.X is a call to t.Run; find out the name of the subtest's *testing.T parameter. - innerTestVar := getRunCallbackParameterName(r.X) - - rangeStatementOverTestCasesExists = true - - if !rangeStatementHasParallelMethod { - rangeStatementHasParallelMethod = methodParallelIsCalledInMethodRun(r.X, innerTestVar) - } - - if !rangeStatementCantParallelMethod { - rangeStatementCantParallelMethod = methodSetenvIsCalledInMethodRun(r.X, innerTestVar) - } - - if !a.ignoreLoopVar && loopVariableUsedInRun == nil { - if run, ok := r.X.(*ast.CallExpr); ok { - loopVariableUsedInRun = loopVarReferencedInRun(run, loopVars, pass.TypesInfo) - } - } - } - } - return true - }) - } - } - - // Descendents which call Setenv, also prevent tests from calling Parallel - if rangeStatementCantParallelMethod { - funcCantParallelMethod = true - } - - if !a.ignoreMissing && !funcHasParallelMethod && !funcCantParallelMethod { - pass.Reportf(node.Pos(), "Function %s missing the call to method parallel\n", funcDecl.Name.Name) - } - - if rangeStatementOverTestCasesExists && rangeNode != nil { - if !rangeStatementHasParallelMethod && !rangeStatementCantParallelMethod { - if !a.ignoreMissing && !a.ignoreMissingSubtests { - pass.Reportf(rangeNode.Pos(), "Range statement for test %s missing the call to method parallel in test Run\n", funcDecl.Name.Name) - } - } else if loopVariableUsedInRun != nil { - pass.Reportf(rangeNode.Pos(), "Range statement for test %s does not reinitialise the variable %s\n", funcDecl.Name.Name, *loopVariableUsedInRun) - } - } - - // Check if the t.Run is more than one as there is no point making one test parallel - if !a.ignoreMissing && !a.ignoreMissingSubtests { - if numberOfTestRun > 1 && len(positionOfTestRunNode) > 0 { - for _, n := range positionOfTestRunNode { - pass.Reportf(n.Pos(), "Function %s missing the call to method parallel in the test run\n", funcDecl.Name.Name) - } - } - } - }) - - return nil, nil -} - -func methodParallelIsCalledInMethodRun(node ast.Node, testVar string) bool { - return targetMethodIsCalledInMethodRun(node, testVar, "Parallel") -} - -func methodSetenvIsCalledInMethodRun(node ast.Node, testVar string) bool { - return targetMethodIsCalledInMethodRun(node, testVar, "Setenv") -} - -func targetMethodIsCalledInMethodRun(node ast.Node, testVar, targetMethod string) bool { - var called bool - // nolint: gocritic - switch callExp := node.(type) { - case *ast.CallExpr: - for _, arg := range callExp.Args { - if !called { - ast.Inspect(arg, func(n ast.Node) bool { - if !called { - called = exprCallHasMethod(n, testVar, targetMethod) - return true - } - return false - }) - } - } - } - return called -} - -func methodParallelIsCalledInTestFunction(node ast.Node, testVar string) bool { - return exprCallHasMethod(node, testVar, "Parallel") -} - -func methodRunIsCalledInRangeStatement(node ast.Node, testVar string) bool { - return exprCallHasMethod(node, testVar, "Run") -} - -func methodRunIsCalledInTestFunction(node ast.Node, testVar string) bool { - return exprCallHasMethod(node, testVar, "Run") -} - -func methodSetenvIsCalledInTestFunction(node ast.Node, testVar string) bool { - return exprCallHasMethod(node, testVar, "Setenv") -} - -func exprCallHasMethod(node ast.Node, receiverName, methodName string) bool { - // nolint: gocritic - switch n := node.(type) { - case *ast.CallExpr: - if fun, ok := n.Fun.(*ast.SelectorExpr); ok { - if receiver, ok := fun.X.(*ast.Ident); ok { - return receiver.Name == receiverName && fun.Sel.Name == methodName - } - } - } - return false -} - -// In an expression of the form t.Run(x, func(q *testing.T) {...}), return the -// value "q". In _most_ code, the name is probably t, but we shouldn't just -// assume. -func getRunCallbackParameterName(node ast.Node) string { - if n, ok := node.(*ast.CallExpr); ok { - if len(n.Args) < 2 { - // We want argument #2, but this call doesn't have two - // arguments. Maybe it's not really t.Run. - return "" - } - funcArg := n.Args[1] - if fun, ok := funcArg.(*ast.FuncLit); ok { - if len(fun.Type.Params.List) < 1 { - // Subtest function doesn't have any parameters. - return "" - } - firstArg := fun.Type.Params.List[0] - // We'll assume firstArg.Type is *testing.T. - if len(firstArg.Names) < 1 { - return "" - } - return firstArg.Names[0].Name - } - } - return "" -} - -// Checks if the function has the param type *testing.T; if it does, then the -// parameter name is returned, too. -func isTestFunction(funcDecl *ast.FuncDecl) (bool, string) { - testMethodPackageType := "testing" - testMethodStruct := "T" - testPrefix := "Test" - - if !strings.HasPrefix(funcDecl.Name.Name, testPrefix) { - return false, "" - } - - if funcDecl.Type.Params != nil && len(funcDecl.Type.Params.List) != 1 { - return false, "" - } - - param := funcDecl.Type.Params.List[0] - if starExp, ok := param.Type.(*ast.StarExpr); ok { - if selectExpr, ok := starExp.X.(*ast.SelectorExpr); ok { - if selectExpr.Sel.Name == testMethodStruct { - if s, ok := selectExpr.X.(*ast.Ident); ok { - if len(param.Names) > 0 { - return s.Name == testMethodPackageType, param.Names[0].Name - } - } - } - } - } - - return false, "" -} - -func loopVarReferencedInRun(call *ast.CallExpr, vars []types.Object, typeInfo *types.Info) (found *string) { - if len(call.Args) != 2 { - return - } - - ast.Inspect(call.Args[1], func(n ast.Node) bool { - ident, ok := n.(*ast.Ident) - if !ok { - return true - } - for _, o := range vars { - if typeInfo.ObjectOf(ident) == o { - found = &ident.Name - } - } - return true - }) - - return -} diff --git a/vendor/github.com/kyoh86/exportloopref/.golangci.yml b/vendor/github.com/kyoh86/exportloopref/.golangci.yml deleted file mode 100644 index e876057f3..000000000 --- a/vendor/github.com/kyoh86/exportloopref/.golangci.yml +++ /dev/null @@ -1,4 +0,0 @@ -linters: - enable: - - unparam - - exportloopref diff --git a/vendor/github.com/kyoh86/exportloopref/.goreleaser.yml b/vendor/github.com/kyoh86/exportloopref/.goreleaser.yml deleted file mode 100644 index 95d44aaac..000000000 --- a/vendor/github.com/kyoh86/exportloopref/.goreleaser.yml +++ /dev/null @@ -1,51 +0,0 @@ -# yaml-language-server: $schema=https://goreleaser.com/static/schema.json - -project_name: exportloopref -builds: - - id: default - goos: - - linux - - darwin - - windows - goarch: - - amd64 - - arm64 - - "386" - main: ./cmd/exportloopref - binary: exportloopref -brews: - - install: | - bin.install "exportloopref" - tap: - owner: kyoh86 - name: homebrew-tap - folder: Formula - homepage: https://github.com/kyoh86/exportloopref - description: An analyzer that finds exporting pointers for loop variables. - license: MIT -nfpms: - - builds: - - default - maintainer: kyoh86 - homepage: https://github.com/kyoh86/exportloopref - description: An analyzer that finds exporting pointers for loop variables. - license: MIT - formats: - - apk - - deb - - rpm -archives: - - id: gzip - format: tar.gz - format_overrides: - - goos: windows - format: zip - files: - - licence* - - LICENCE* - - license* - - LICENSE* - - readme* - - README* - - changelog* - - CHANGELOG* diff --git a/vendor/github.com/kyoh86/exportloopref/LICENSE b/vendor/github.com/kyoh86/exportloopref/LICENSE deleted file mode 100644 index 7ac9dba4a..000000000 --- a/vendor/github.com/kyoh86/exportloopref/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2020 kyoh86 - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. -IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE -OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/github.com/kyoh86/exportloopref/Makefile b/vendor/github.com/kyoh86/exportloopref/Makefile deleted file mode 100644 index 4d3ef22f7..000000000 --- a/vendor/github.com/kyoh86/exportloopref/Makefile +++ /dev/null @@ -1,16 +0,0 @@ -.PHONY: gen lint test install man - -VERSION := `git vertag get` -COMMIT := `git rev-parse HEAD` - -gen: - go generate ./... - -lint: gen - golangci-lint run - -test: lint - go test -v --race ./... - -install: test - go install -a -ldflags "-X=main.version=$(VERSION) -X=main.commit=$(COMMIT)" ./... diff --git a/vendor/github.com/kyoh86/exportloopref/README.md b/vendor/github.com/kyoh86/exportloopref/README.md deleted file mode 100644 index 0f581ffce..000000000 --- a/vendor/github.com/kyoh86/exportloopref/README.md +++ /dev/null @@ -1,223 +0,0 @@ -# exportloopref - -An analyzer that finds exporting pointers for loop variables. -![](https://repository-images.githubusercontent.com/256768552/a1c5bb80-dd73-11eb-9453-e520f517e730) -Pin them all! - -[![PkgGoDev](https://pkg.go.dev/badge/kyoh86/exportloopref)](https://pkg.go.dev/kyoh86/exportloopref) -[![Go Report Card](https://goreportcard.com/badge/github.com/kyoh86/exportloopref)](https://goreportcard.com/report/github.com/kyoh86/exportloopref) -[![Coverage Status](https://img.shields.io/codecov/c/github/kyoh86/exportloopref.svg)](https://codecov.io/gh/kyoh86/exportloopref) -[![Release](https://github.com/kyoh86/exportloopref/workflows/Release/badge.svg)](https://github.com/kyoh86/exportloopref/releases) - -## What's this? - -Sample problem code from: https://github.com/kyoh86/exportloopref/blob/main/testdata/src/simple/simple.go - -```go -package main - -func main() { - var intArray [4]*int - var intSlice []*int - var intRef *int - var intStr struct{ x *int } - - println("loop expecting 10, 11, 12, 13") - for i, p := range []int{10, 11, 12, 13} { - printp(&p) // not a diagnostic - intSlice = append(intSlice, &p) // want "exporting a pointer for the loop variable p" - intArray[i] = &p // want "exporting a pointer for the loop variable p" - if i%2 == 0 { - intRef = &p // want "exporting a pointer for the loop variable p" - intStr.x = &p // want "exporting a pointer for the loop variable p" - } - var vStr struct{ x *int } - var vArray [4]*int - var v *int - if i%2 == 0 { - v = &p // not a diagnostic (x is local variable) - vArray[1] = &p // not a diagnostic (x is local variable) - vStr.x = &p - } - _ = v - } - - println(`slice expecting "10, 11, 12, 13" but "13, 13, 13, 13"`) - for _, p := range intSlice { - printp(p) - } - println(`array expecting "10, 11, 12, 13" but "13, 13, 13, 13"`) - for _, p := range intArray { - printp(p) - } - println(`captured value expecting "12" but "13"`) - printp(intRef) -} - -func printp(p *int) { - println(*p) -} -``` - -In Go, the `p` variable in the above loops is actually a single variable. -So in many case (like the above), using it makes for us annoying bugs. - -You can find them with `exportloopref`, and fix it. - -```go -package main - -func main() { - var intArray [4]*int - var intSlice []*int - var intRef *int - var intStr struct{ x *int } - - println("loop expecting 10, 11, 12, 13") - for i, p := range []int{10, 11, 12, 13} { - p := p // FIX variable into the local variable - printp(&p) - intSlice = append(intSlice, &p) - intArray[i] = &p - if i%2 == 0 { - intRef = &p - intStr.x = &p - } - var vStr struct{ x *int } - var vArray [4]*int - var v *int - if i%2 == 0 { - v = &p - vArray[1] = &p - vStr.x = &p - } - _ = v - } - - println(`slice expecting "10, 11, 12, 13"`) - for _, p := range intSlice { - printp(p) - } - println(`array expecting "10, 11, 12, 13"`) - for _, p := range intArray { - printp(p) - } - println(`captured value expecting "12"`) - printp(intRef) -} - -func printp(p *int) { - println(*p) -} -``` - -ref: https://github.com/kyoh86/exportloopref/blob/main/testdata/src/fixed/fixed.go - -## Sensing policy - -I want to make exportloopref as accurately as possible. -So some cases of lints will be false-negative. - -e.g. - -```go -var s Foo -for _, p := range []int{10, 11, 12, 13} { - s.Bar(&p) // If s stores the pointer, it will be bug. -} -``` - -If you want to report all of lints (with some false-positives), -you should use [looppointer](https://github.com/kyoh86/looppointer). - -### Known false negatives - -Case 1: pass the pointer to function to export. - -Case 2: pass the pointer to local variable, and export it. - -```go -package main - -type List []*int - -func (l *List) AppendP(p *int) { - *l = append(*l, p) -} - -func main() { - var slice []*int - list := List{} - - println("loop expect exporting 10, 11, 12, 13") - for _, v := range []int{10, 11, 12, 13} { - list.AppendP(&v) // Case 1: wanted "exporting a pointer for the loop variable v", but cannot be found - - p := &v // p is the local variable - slice = append(slice, p) // Case 2: wanted "exporting a pointer for the loop variable v", but cannot be found - } - - println(`slice expecting "10, 11, 12, 13" but "13, 13, 13, 13"`) - for _, p := range slice { - printp(p) - } - println(`array expecting "10, 11, 12, 13" but "13, 13, 13, 13"`) - for _, p := range ([]*int)(list) { - printp(p) - } -} - -func printp(p *int) { - println(*p) -} -``` - -## Install - -go: - -```console -$ go get github.com/kyoh86/exportloopref/cmd/exportloopref -``` - -[homebrew](https://brew.sh/): - -```console -$ brew install kyoh86/tap/exportloopref -``` - -[gordon](https://github.com/kyoh86/gordon): - -```console -$ gordon install kyoh86/exportloopref -``` - -## Usage - -``` -exportloopref [-flag] [package] -``` - -### Flags - -| Flag | Description | -| --- | --- | -| -V | print version and exit | -| -all | no effect (deprecated) | -| -c int | display offending line with this many lines of context (default -1) | -| -cpuprofile string | write CPU profile to this file | -| -debug string | debug flags, any subset of "fpstv" | -| -fix | apply all suggested fixes | -| -flags | print analyzer flags in JSON | -| -json | emit JSON output | -| -memprofile string | write memory profile to this file | -| -source | no effect (deprecated) | -| -tags string | no effect (deprecated) | -| -trace string | write trace log to this file | -| -v | no effect (deprecated) | - -# LICENSE - -[![MIT License](http://img.shields.io/badge/license-MIT-blue.svg)](http://www.opensource.org/licenses/MIT) - -This is distributed under the [MIT License](http://www.opensource.org/licenses/MIT). diff --git a/vendor/github.com/kyoh86/exportloopref/exportloopref.go b/vendor/github.com/kyoh86/exportloopref/exportloopref.go deleted file mode 100644 index d071d5c35..000000000 --- a/vendor/github.com/kyoh86/exportloopref/exportloopref.go +++ /dev/null @@ -1,334 +0,0 @@ -package exportloopref - -import ( - "fmt" - "go/ast" - "go/token" - "go/types" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/ast/inspector" -) - -var Analyzer = &analysis.Analyzer{ - Name: "exportloopref", - Doc: "checks for pointers to enclosing loop variables", - Run: run, - RunDespiteErrors: true, - Requires: []*analysis.Analyzer{inspect.Analyzer}, -} - -func run(pass *analysis.Pass) (interface{}, error) { - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - - search := &Searcher{ - LoopVars: map[token.Pos]struct{}{}, - LocalVars: map[token.Pos]map[token.Pos]struct{}{}, - Pass: pass, - } - - nodeFilter := []ast.Node{ - (*ast.RangeStmt)(nil), - (*ast.ForStmt)(nil), - (*ast.DeclStmt)(nil), - (*ast.AssignStmt)(nil), - (*ast.UnaryExpr)(nil), - } - - inspect.WithStack(nodeFilter, search.CheckAndReport) - - return nil, nil -} - -type Searcher struct { - // LoopVars is positions that loop-variables are declared like below. - // - for , := range ... - // - for := ; ; - LoopVars map[token.Pos]struct{} - // LocalVars is positions of loops and the variables declared in them. - // Use this to determine if a point assignment is an export outside the loop. - LocalVars map[token.Pos]map[token.Pos]struct{} - - Pass *analysis.Pass -} - -// CheckAndReport inspects each node with stack. -// It is implemented as the I/F of the "golang.org/x/tools/go/analysis/passes/inspect".Analysis.WithStack. -func (s *Searcher) CheckAndReport(n ast.Node, push bool, stack []ast.Node) bool { - id, insert, digg := s.Check(n, stack) - if id == nil { - // no prob. - return digg - } - - // suggests fix - var suggest []analysis.SuggestedFix - if insert != token.NoPos { - suggest = []analysis.SuggestedFix{{ - Message: fmt.Sprintf("loop variable %s should be pinned", id.Name), - TextEdits: []analysis.TextEdit{{ - Pos: insert, - End: insert, - NewText: []byte(fmt.Sprintf("%[1]s := %[1]s\n", id.Name)), - }}, - }} - } - - // report a diagnostic - d := analysis.Diagnostic{Pos: id.Pos(), - End: id.End(), - Message: fmt.Sprintf("exporting a pointer for the loop variable %s", id.Name), - Category: "exportloopref", - SuggestedFixes: suggest, - } - s.Pass.Report(d) - return digg -} - -// Check each node and stack, whether it exports loop variables or not. -// Finding export, report the *ast.Ident of exported loop variable, -// and token.Pos to insert assignment to fix the diagnostic. -func (s *Searcher) Check(n ast.Node, stack []ast.Node) (loopVar *ast.Ident, insertPos token.Pos, digg bool) { - switch typed := n.(type) { - case *ast.RangeStmt: - s.parseRangeStmt(typed) - case *ast.ForStmt: - s.parseForStmt(typed) - case *ast.DeclStmt: - s.parseDeclStmt(typed, stack) - case *ast.AssignStmt: - s.parseAssignStmt(typed, stack) - - case *ast.UnaryExpr: - return s.checkUnaryExpr(typed, stack) - } - return nil, token.NoPos, true -} - -// parseRangeStmt will check range statement (i.e. `for , := range ...`), -// and collect positions of and . -func (s *Searcher) parseRangeStmt(n *ast.RangeStmt) { - s.storeLoopVars(n.Key) - s.storeLoopVars(n.Value) -} - -// parseForStmt will check for statement (i.e. `for := ; ; `), -// and collect positions of . -func (s *Searcher) parseForStmt(n *ast.ForStmt) { - switch post := n.Post.(type) { - case *ast.AssignStmt: - // e.g. for p = head; p != nil; p = p.next - for _, lhs := range post.Lhs { - s.storeLoopVars(lhs) - } - case *ast.IncDecStmt: - // e.g. for i := 0; i < n; i++ - s.storeLoopVars(post.X) - } -} - -func (s *Searcher) storeLoopVars(expr ast.Expr) { - if id, ok := expr.(*ast.Ident); ok { - s.LoopVars[id.Pos()] = struct{}{} - } -} - -// parseDeclStmt will parse declaring statement (i.e. `var`, `type`, `const`), -// and store the position if it is "var" declaration and is in any loop. -func (s *Searcher) parseDeclStmt(n *ast.DeclStmt, stack []ast.Node) { - genDecl, ok := n.Decl.(*ast.GenDecl) - if !ok { - // (dead branch) - // if the Decl is not GenDecl (i.e. `var`, `type` or `const` statement), it is ignored - return - } - if genDecl.Tok != token.VAR { - // if the Decl is not `var` (may be `type` or `const`), it is ignored - return - } - - loop, _ := s.innermostLoop(stack) - if loop == nil { - return - } - - // Register declared variables - for _, spec := range genDecl.Specs { - for _, name := range spec.(*ast.ValueSpec).Names { - s.storeLocalVar(loop, name) - } - } -} - -// parseDeclStmt will parse assignment statement (i.e. ` = `), -// and store the position if it is . -func (s *Searcher) parseAssignStmt(n *ast.AssignStmt, stack []ast.Node) { - if n.Tok != token.DEFINE { - // if the statement is simple assignment (without definement), it is ignored - return - } - - loop, _ := s.innermostLoop(stack) - if loop == nil { - return - } - - // Find statements declaring local variable - for _, h := range n.Lhs { - s.storeLocalVar(loop, h) - } -} - -func (s *Searcher) storeLocalVar(loop ast.Node, expr ast.Expr) { - loopPos := loop.Pos() - id, ok := expr.(*ast.Ident) - if !ok { - return - } - vars, ok := s.LocalVars[loopPos] - if !ok { - vars = map[token.Pos]struct{}{} - } - vars[id.Obj.Pos()] = struct{}{} - s.LocalVars[loopPos] = vars -} - -func insertionPosition(block *ast.BlockStmt) token.Pos { - if len(block.List) > 0 { - return block.List[0].Pos() - } - return token.NoPos -} - -func (s *Searcher) innermostLoop(stack []ast.Node) (ast.Node, token.Pos) { - for i := len(stack) - 1; i >= 0; i-- { - switch typed := stack[i].(type) { - case *ast.RangeStmt: - return typed, insertionPosition(typed.Body) - case *ast.ForStmt: - return typed, insertionPosition(typed.Body) - } - } - return nil, token.NoPos -} - -// checkUnaryExpr check unary expression (i.e. like `-x`, `*p` or `&v`) and stack. -// THIS IS THE ESSENTIAL PART OF THIS PARSER. -func (s *Searcher) checkUnaryExpr(n *ast.UnaryExpr, stack []ast.Node) (*ast.Ident, token.Pos, bool) { - if n.Op != token.AND { - return nil, token.NoPos, true - } - - loop, insert := s.innermostLoop(stack) - if loop == nil { - return nil, token.NoPos, true - } - - // Get identity of the referred item - id := s.getIdentity(n.X) - if id == nil { - return nil, token.NoPos, true - } - - // If the identity is not the loop statement variable, - // it will not be reported. - if _, isDecl := s.LoopVars[id.Obj.Pos()]; !isDecl { - return nil, token.NoPos, true - } - - // check stack append(), []X{}, map[Type]X{}, Struct{}, &Struct{}, X.(Type), (X) - // in the = - var mayRHPos token.Pos - for i := len(stack) - 2; i >= 0; i-- { - switch typed := stack[i].(type) { - case (*ast.UnaryExpr): - // noop - case (*ast.CompositeLit): - // noop - case (*ast.KeyValueExpr): - // noop - case (*ast.CallExpr): - fun, ok := typed.Fun.(*ast.Ident) - if !ok { - return nil, token.NoPos, false // it's calling a function other of `append`. It cannot be checked - } - - if fun.Name != "append" { - return nil, token.NoPos, false // it's calling a function other of `append`. It cannot be checked - } - - case (*ast.AssignStmt): - if len(typed.Rhs) != len(typed.Lhs) { - return nil, token.NoPos, false // dead logic - } - - // search x where Rhs[x].Pos() == mayRHPos - var index int - for ri, rh := range typed.Rhs { - if rh.Pos() == mayRHPos { - index = ri - break - } - } - - // check Lhs[x] is not local variable - lh := typed.Lhs[index] - isVar := s.isVar(loop, lh) - if !isVar { - return id, insert, false - } - - return nil, token.NoPos, true - default: - // Other statement is not able to be checked. - return nil, token.NoPos, false - } - - // memory an expr that may be right-hand in the AssignStmt - mayRHPos = stack[i].Pos() - } - return nil, token.NoPos, true -} - -func (s *Searcher) isVar(loop ast.Node, expr ast.Expr) bool { - vars := s.LocalVars[loop.Pos()] // map[token.Pos]struct{} - if vars == nil { - return false - } - switch typed := expr.(type) { - case (*ast.Ident): - if typed.Obj == nil { - return false // global var in another file (ref: #13) - } - _, isVar := vars[typed.Obj.Pos()] - return isVar - case (*ast.IndexExpr): // like X[Y], check X - return s.isVar(loop, typed.X) - case (*ast.SelectorExpr): // like X.Y, check X - return s.isVar(loop, typed.X) - } - return false -} - -// Get variable identity -func (s *Searcher) getIdentity(expr ast.Expr) *ast.Ident { - switch typed := expr.(type) { - case *ast.SelectorExpr: - // Ignore if the parent is pointer ref (fix for #2) - if _, ok := s.Pass.TypesInfo.Types[typed.X].Type.(*types.Pointer); ok { - return nil - } - - // Get parent identity; i.e. `a.b` of the `a.b.c`. - return s.getIdentity(typed.X) - - case *ast.Ident: - // Get simple identity; i.e. `a` of the `a`. - if typed.Obj == nil { - return nil - } - return typed - } - return nil -} diff --git a/vendor/github.com/ldez/gomoddirectives/.gitignore b/vendor/github.com/ldez/gomoddirectives/.gitignore deleted file mode 100644 index 9da3e0da9..000000000 --- a/vendor/github.com/ldez/gomoddirectives/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -.idea/ -/gomoddirectives diff --git a/vendor/github.com/ldez/gomoddirectives/.golangci.yml b/vendor/github.com/ldez/gomoddirectives/.golangci.yml deleted file mode 100644 index a2483e95f..000000000 --- a/vendor/github.com/ldez/gomoddirectives/.golangci.yml +++ /dev/null @@ -1,88 +0,0 @@ -run: - deadline: 2m - skip-files: [] - skip-dirs: [] - -linters-settings: - govet: - enable-all: true - gocyclo: - min-complexity: 12 - goconst: - min-len: 3 - min-occurrences: 3 - misspell: - locale: US - gofumpt: - extra-rules: true - depguard: - list-type: blacklist - include-go-root: false - packages: - - github.com/pkg/errors - godox: - keywords: - - FIXME - gocritic: - enabled-tags: - - diagnostic - - style - - performance - disabled-checks: - - sloppyReassign - - rangeValCopy - - octalLiteral - - paramTypeCombine # already handle by gofumpt.extra-rules - settings: - hugeParam: - sizeThreshold: 100 - forbidigo: - forbid: - - '^print(ln)?$' - - '^fmt\.Print(f|ln)?$' - - '^panic$' - - '^spew\.Print(f|ln)?$' - - '^spew\.Dump$' - tagliatelle: - case: - rules: - json: pascal - -linters: - enable-all: true - disable: - - maligned # deprecated - - interfacer # deprecated - - golint # deprecated - - scopelint # deprecated - - sqlclosecheck # not relevant (SQL) - - rowserrcheck # not relevant (SQL) - - cyclop # duplicate of gocyclo - - lll - - dupl - - prealloc - - bodyclose - - wsl - - nlreturn - - gomnd - - testpackage - - paralleltest - - tparallel - - goerr113 - - wrapcheck - - exhaustive - - exhaustivestruct - - varnamelen - -issues: - exclude-use-default: false - max-per-linter: 0 - max-same-issues: 0 - exclude: [] - exclude-rules: - - path: "(.+)_test.go" - linters: - - funlen - - goconst - - path: cmd/gomoddirectives/gomoddirectives.go - text: 'use of `fmt.Println` forbidden' diff --git a/vendor/github.com/ldez/gomoddirectives/LICENSE b/vendor/github.com/ldez/gomoddirectives/LICENSE deleted file mode 100644 index caed523b4..000000000 --- a/vendor/github.com/ldez/gomoddirectives/LICENSE +++ /dev/null @@ -1,190 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - Copyright 2021 Fernandez Ludovic - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/vendor/github.com/ldez/gomoddirectives/Makefile b/vendor/github.com/ldez/gomoddirectives/Makefile deleted file mode 100644 index dd3b335c7..000000000 --- a/vendor/github.com/ldez/gomoddirectives/Makefile +++ /dev/null @@ -1,15 +0,0 @@ -.PHONY: clean check test build - -default: clean check test build - -clean: - rm -rf dist/ cover.out - -test: clean - go test -v -cover ./... - -check: - golangci-lint run - -build: - go build -v -ldflags "-s -w" -trimpath ./cmd/gomoddirectives/ diff --git a/vendor/github.com/ldez/gomoddirectives/gomoddirectives.go b/vendor/github.com/ldez/gomoddirectives/gomoddirectives.go deleted file mode 100644 index 2a4c90474..000000000 --- a/vendor/github.com/ldez/gomoddirectives/gomoddirectives.go +++ /dev/null @@ -1,125 +0,0 @@ -// Package gomoddirectives a linter that handle `replace`, `retract`, `exclude` directives into `go.mod`. -package gomoddirectives - -import ( - "fmt" - "go/token" - "strings" - - "golang.org/x/mod/modfile" -) - -const ( - reasonRetract = "a comment is mandatory to explain why the version has been retracted" - reasonExclude = "exclude directive is not allowed" - reasonReplaceLocal = "local replacement are not allowed" - reasonReplace = "replacement are not allowed" - reasonReplaceIdentical = "the original module and the replacement are identical" - reasonReplaceDuplicate = "multiple replacement of the same module" -) - -// Result the analysis result. -type Result struct { - Reason string - Start token.Position - End token.Position -} - -// NewResult creates a new Result. -func NewResult(file *modfile.File, line *modfile.Line, reason string) Result { - return Result{ - Start: token.Position{Filename: file.Syntax.Name, Line: line.Start.Line, Column: line.Start.LineRune}, - End: token.Position{Filename: file.Syntax.Name, Line: line.End.Line, Column: line.End.LineRune}, - Reason: reason, - } -} - -func (r Result) String() string { - return fmt.Sprintf("%s: %s", r.Start, r.Reason) -} - -// Options the analyzer options. -type Options struct { - ReplaceAllowList []string - ReplaceAllowLocal bool - ExcludeForbidden bool - RetractAllowNoExplanation bool -} - -// Analyze analyzes a project. -func Analyze(opts Options) ([]Result, error) { - f, err := GetModuleFile() - if err != nil { - return nil, fmt.Errorf("failed to get module file: %w", err) - } - - return AnalyzeFile(f, opts), nil -} - -// AnalyzeFile analyzes a mod file. -func AnalyzeFile(file *modfile.File, opts Options) []Result { - var results []Result - - if !opts.RetractAllowNoExplanation { - for _, r := range file.Retract { - if r.Rationale != "" { - continue - } - - results = append(results, NewResult(file, r.Syntax, reasonRetract)) - } - } - - if opts.ExcludeForbidden { - for _, e := range file.Exclude { - results = append(results, NewResult(file, e.Syntax, reasonExclude)) - } - } - - uniqReplace := map[string]struct{}{} - - for _, r := range file.Replace { - reason := check(opts, r) - if reason != "" { - results = append(results, NewResult(file, r.Syntax, reason)) - continue - } - - if r.Old.Path == r.New.Path && r.Old.Version == r.New.Version { - results = append(results, NewResult(file, r.Syntax, reasonReplaceIdentical)) - continue - } - - if _, ok := uniqReplace[r.Old.Path+r.Old.Version]; ok { - results = append(results, NewResult(file, r.Syntax, reasonReplaceDuplicate)) - } - - uniqReplace[r.Old.Path+r.Old.Version] = struct{}{} - } - - return results -} - -func check(o Options, r *modfile.Replace) string { - if isLocal(r) { - if o.ReplaceAllowLocal { - return "" - } - - return fmt.Sprintf("%s: %s", reasonReplaceLocal, r.Old.Path) - } - - for _, v := range o.ReplaceAllowList { - if r.Old.Path == v { - return "" - } - } - - return fmt.Sprintf("%s: %s", reasonReplace, r.Old.Path) -} - -// Filesystem paths found in "replace" directives are represented by a path with an empty version. -// https://github.com/golang/mod/blob/bc388b264a244501debfb9caea700c6dcaff10e2/module/module.go#L122-L124 -func isLocal(r *modfile.Replace) bool { - return strings.TrimSpace(r.New.Version) == "" -} diff --git a/vendor/github.com/ldez/gomoddirectives/module.go b/vendor/github.com/ldez/gomoddirectives/module.go deleted file mode 100644 index 907be244f..000000000 --- a/vendor/github.com/ldez/gomoddirectives/module.go +++ /dev/null @@ -1,48 +0,0 @@ -package gomoddirectives - -import ( - "bytes" - "encoding/json" - "errors" - "fmt" - "os" - "os/exec" - - "golang.org/x/mod/modfile" -) - -type modInfo struct { - Path string `json:"Path"` - Dir string `json:"Dir"` - GoMod string `json:"GoMod"` - GoVersion string `json:"GoVersion"` - Main bool `json:"Main"` -} - -// GetModuleFile gets module file. -func GetModuleFile() (*modfile.File, error) { - // https://github.com/golang/go/issues/44753#issuecomment-790089020 - cmd := exec.Command("go", "list", "-m", "-json") - - raw, err := cmd.CombinedOutput() - if err != nil { - return nil, fmt.Errorf("command go list: %w: %s", err, string(raw)) - } - - var v modInfo - err = json.NewDecoder(bytes.NewBuffer(raw)).Decode(&v) - if err != nil { - return nil, fmt.Errorf("unmarshaling error: %w: %s", err, string(raw)) - } - - if v.GoMod == "" { - return nil, errors.New("working directory is not part of a module") - } - - raw, err = os.ReadFile(v.GoMod) - if err != nil { - return nil, fmt.Errorf("reading go.mod file: %w", err) - } - - return modfile.Parse("go.mod", raw, nil) -} diff --git a/vendor/github.com/ldez/gomoddirectives/readme.md b/vendor/github.com/ldez/gomoddirectives/readme.md deleted file mode 100644 index 510c8502e..000000000 --- a/vendor/github.com/ldez/gomoddirectives/readme.md +++ /dev/null @@ -1,16 +0,0 @@ -# gomoddirectives - -[![Sponsor](https://img.shields.io/badge/Sponsor%20me-%E2%9D%A4%EF%B8%8F-pink)](https://github.com/sponsors/ldez) -[![Build Status](https://github.com/ldez/gomoddirectives/workflows/Main/badge.svg?branch=master)](https://github.com/ldez/gomoddirectives/actions) - -A linter that handle [`replace`](https://golang.org/ref/mod#go-mod-file-replace), [`retract`](https://golang.org/ref/mod#go-mod-file-retract), [`exclude`](https://golang.org/ref/mod#go-mod-file-exclude) directives into `go.mod`. - -Features: - -- ban all [`replace`](https://golang.org/ref/mod#go-mod-file-replace) directives -- allow only local [`replace`](https://golang.org/ref/mod#go-mod-file-replace) directives -- allow only some [`replace`](https://golang.org/ref/mod#go-mod-file-replace) directives -- force explanation for [`retract`](https://golang.org/ref/mod#go-mod-file-retract) directives -- ban all [`exclude`](https://golang.org/ref/mod#go-mod-file-exclude) directives -- detect duplicated [`replace`](https://golang.org/ref/mod#go-mod-file-replace) directives -- detect identical [`replace`](https://golang.org/ref/mod#go-mod-file-replace) directives diff --git a/vendor/github.com/ldez/tagliatelle/.gitignore b/vendor/github.com/ldez/tagliatelle/.gitignore deleted file mode 100644 index 74c84ce62..000000000 --- a/vendor/github.com/ldez/tagliatelle/.gitignore +++ /dev/null @@ -1,3 +0,0 @@ -.idea/ -/tagliatelle -notes.md diff --git a/vendor/github.com/ldez/tagliatelle/.golangci.yml b/vendor/github.com/ldez/tagliatelle/.golangci.yml deleted file mode 100644 index ec5c5c766..000000000 --- a/vendor/github.com/ldez/tagliatelle/.golangci.yml +++ /dev/null @@ -1,88 +0,0 @@ -run: - timeout: 5m - skip-files: [ ] - skip-dirs: [ ] - -linters-settings: - govet: - enable-all: true - disable: - - fieldalignment - gocyclo: - min-complexity: 15 - goconst: - min-len: 5 - min-occurrences: 3 - misspell: - locale: US - funlen: - lines: -1 - statements: 40 - godox: - keywords: - - FIXME - gofumpt: - extra-rules: true - depguard: - list-type: denylist - include-go-root: false - packages: - - github.com/sirupsen/logrus - - github.com/pkg/errors - gocritic: - enabled-tags: - - diagnostic - - style - - performance - disabled-checks: - - sloppyReassign - - rangeValCopy - - octalLiteral - - paramTypeCombine # already handle by gofumpt.extra-rules - settings: - hugeParam: - sizeThreshold: 100 - -linters: - enable-all: true - disable: - - deadcode # deprecated - - exhaustivestruct # deprecated - - golint # deprecated - - ifshort # deprecated - - interfacer # deprecated - - maligned # deprecated - - nosnakecase # deprecated - - scopelint # deprecated - - structcheck # deprecated - - varcheck # deprecated - - sqlclosecheck # not relevant (SQL) - - rowserrcheck # not relevant (SQL) - - execinquery # not relevant (SQL) - - cyclop # duplicate of gocyclo - - lll - - dupl - - wsl - - nlreturn - - gomnd - - goerr113 - - wrapcheck - - exhaustive - - exhaustruct - - testpackage - - tparallel - - paralleltest - - prealloc - - ifshort - - forcetypeassert - - varnamelen - - nilnil - - errchkjson - - nonamedreturns - -issues: - exclude-use-default: false - max-per-linter: 0 - max-same-issues: 0 - exclude: - - 'package-comments: should have a package comment' diff --git a/vendor/github.com/ldez/tagliatelle/LICENSE b/vendor/github.com/ldez/tagliatelle/LICENSE deleted file mode 100644 index caed523b4..000000000 --- a/vendor/github.com/ldez/tagliatelle/LICENSE +++ /dev/null @@ -1,190 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - Copyright 2021 Fernandez Ludovic - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/vendor/github.com/ldez/tagliatelle/Makefile b/vendor/github.com/ldez/tagliatelle/Makefile deleted file mode 100644 index 196f70c02..000000000 --- a/vendor/github.com/ldez/tagliatelle/Makefile +++ /dev/null @@ -1,15 +0,0 @@ -.PHONY: clean check test build - -default: clean check test build - -clean: - rm -rf dist/ cover.out - -test: clean - go test -v -cover ./... - -check: - golangci-lint run - -build: - go build -ldflags "-s -w" -trimpath ./cmd/tagliatelle/ diff --git a/vendor/github.com/ldez/tagliatelle/readme.md b/vendor/github.com/ldez/tagliatelle/readme.md deleted file mode 100644 index 55a544db8..000000000 --- a/vendor/github.com/ldez/tagliatelle/readme.md +++ /dev/null @@ -1,177 +0,0 @@ -# Tagliatelle - -[![Sponsor](https://img.shields.io/badge/Sponsor%20me-%E2%9D%A4%EF%B8%8F-pink)](https://github.com/sponsors/ldez) -[![Build Status](https://github.com/ldez/tagliatelle/workflows/Main/badge.svg?branch=master)](https://github.com/ldez/tagliatelle/actions) - -A linter that handles struct tags. - -Supported string casing: - -- `camel` -- `pascal` -- `kebab` -- `snake` -- `upperSnake` -- `goCamel` Respects [Go's common initialisms](https://github.com/golang/lint/blob/83fdc39ff7b56453e3793356bcff3070b9b96445/lint.go#L770-L809) (e.g. HttpResponse -> HTTPResponse). -- `goPascal` Respects [Go's common initialisms](https://github.com/golang/lint/blob/83fdc39ff7b56453e3793356bcff3070b9b96445/lint.go#L770-L809) (e.g. HttpResponse -> HTTPResponse). -- `goKebab` Respects [Go's common initialisms](https://github.com/golang/lint/blob/83fdc39ff7b56453e3793356bcff3070b9b96445/lint.go#L770-L809) (e.g. HttpResponse -> HTTPResponse). -- `goSnake` Respects [Go's common initialisms](https://github.com/golang/lint/blob/83fdc39ff7b56453e3793356bcff3070b9b96445/lint.go#L770-L809) (e.g. HttpResponse -> HTTPResponse). -- `header` -- `upper` -- `lower` - -| Source | Camel Case | Go Camel Case | -|----------------|----------------|----------------| -| GooID | gooId | gooID | -| HTTPStatusCode | httpStatusCode | httpStatusCode | -| FooBAR | fooBar | fooBar | -| URL | url | url | -| ID | id | id | -| hostIP | hostIp | hostIP | -| JSON | json | json | -| JSONName | jsonName | jsonName | -| NameJSON | nameJson | nameJSON | -| UneTête | uneTête | uneTête | - -| Source | Pascal Case | Go Pascal Case | -|----------------|----------------|----------------| -| GooID | GooId | GooID | -| HTTPStatusCode | HttpStatusCode | HTTPStatusCode | -| FooBAR | FooBar | FooBar | -| URL | Url | URL | -| ID | Id | ID | -| hostIP | HostIp | HostIP | -| JSON | Json | JSON | -| JSONName | JsonName | JSONName | -| NameJSON | NameJson | NameJSON | -| UneTête | UneTête | UneTête | - -| Source | Snake Case | Upper Snake Case | Go Snake Case | -|----------------|------------------|------------------|------------------| -| GooID | goo_id | GOO_ID | goo_ID | -| HTTPStatusCode | http_status_code | HTTP_STATUS_CODE | HTTP_status_code | -| FooBAR | foo_bar | FOO_BAR | foo_bar | -| URL | url | URL | URL | -| ID | id | ID | ID | -| hostIP | host_ip | HOST_IP | host_IP | -| JSON | json | JSON | JSON | -| JSONName | json_name | JSON_NAME | JSON_name | -| NameJSON | name_json | NAME_JSON | name_JSON | -| UneTête | une_tête | UNE_TÊTE | une_tête | - -| Source | Kebab Case | Go KebabCase | -|----------------|------------------|------------------| -| GooID | goo-id | goo-ID | -| HTTPStatusCode | http-status-code | HTTP-status-code | -| FooBAR | foo-bar | foo-bar | -| URL | url | URL | -| ID | id | ID | -| hostIP | host-ip | host-IP | -| JSON | json | JSON | -| JSONName | json-name | JSON-name | -| NameJSON | name-json | name-JSON | -| UneTête | une-tête | une-tête | - -| Source | Header Case | -|----------------|------------------| -| GooID | Goo-Id | -| HTTPStatusCode | Http-Status-Code | -| FooBAR | Foo-Bar | -| URL | Url | -| ID | Id | -| hostIP | Host-Ip | -| JSON | Json | -| JSONName | Json-Name | -| NameJSON | Name-Json | -| UneTête | Une-Tête | - -## Examples - -```go -// json and camel case -type Foo struct { - ID string `json:"ID"` // must be "id" - UserID string `json:"UserID"`// must be "userId" - Name string `json:"name"` - Value string `json:"val,omitempty"`// must be "value" -} -``` - -## What this tool is about - -This tool is about validating tags according to rules you define. -The tool also allows to fix tags according to the rules you defined. - -This tool is not intended to validate the fact a tag in valid or not. -To do that, you can use `go vet`, or use [golangci-lint](https://golangci-lint.run) ["go vet"](https://golangci-lint.run/usage/linters/#govet) linter. - -## How to use the tool - -### As a golangci-lint linter - -Define the rules, you want via your [golangci-lint](https://golangci-lint.run) configuration file: - -```yaml -linters-settings: - tagliatelle: - # Check the struck tag name case. - case: - # Use the struct field name to check the name of the struct tag. - # Default: false - use-field-name: true - rules: - # Any struct tag type can be used. - # Support string case: `camel`, `pascal`, `kebab`, `snake`, `upperSnake`, `goCamel`, `goPascal`, `goKebab`, `goSnake`, `upper`, `lower`, `header`. - json: camel - yaml: camel - xml: camel -``` - -More information here https://golangci-lint.run/usage/linters/#tagliatelle - -### Install and run it from the binary - -Not recommended. - -```shell -go install github.com/ldez/tagliatelle/cmd/tagliatelle@latest -``` - -then launch it manually. - -## Rules - -Here are the default rules for the well known and used tags, when using tagliatelle as a binary or [golangci-lint linter](https://golangci-lint.run/usage/linters/#tagliatelle): - -- `json`: `camel` -- `yaml`: `camel` -- `xml`: `camel` -- `bson`: `camel` -- `avro`: `snake` -- `header`: `header` -- `envconfig`: `upperSnake` - -### Custom Rules - -The tool is not limited to the tags used in example, you can use it to validate any tag. - -You can add your own tag, for example `whatever` and tells the tool you want to use `kebab`. - -This option is only available via [golangci-lint](https://golangci-lint.run). - -```yaml -linters-settings: - tagliatelle: - # Check the struck tag name case. - case: - # Use the struct field name to check the name of the struct tag. - # Default: false - use-field-name: true - rules: - # Any struct tag type can be used. - # Support string case: `camel`, `pascal`, `kebab`, `snake`, `goCamel`, `goPascal`, `goKebab`, `goSnake`, `upper`, `lower` - json: camel - yaml: camel - xml: camel - whatever: kebab -``` diff --git a/vendor/github.com/ldez/tagliatelle/tagliatelle.go b/vendor/github.com/ldez/tagliatelle/tagliatelle.go deleted file mode 100644 index 22c5feb3d..000000000 --- a/vendor/github.com/ldez/tagliatelle/tagliatelle.go +++ /dev/null @@ -1,218 +0,0 @@ -// Package tagliatelle a linter that handle struct tags. -package tagliatelle - -import ( - "encoding/json" - "errors" - "fmt" - "go/ast" - "reflect" - "strings" - - "github.com/ettle/strcase" - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/ast/inspector" -) - -// Config the tagliatelle configuration. -type Config struct { - Rules map[string]string - UseFieldName bool -} - -// New creates an analyzer. -func New(config Config) *analysis.Analyzer { - return &analysis.Analyzer{ - Name: "tagliatelle", - Doc: "Checks the struct tags.", - Run: func(pass *analysis.Pass) (interface{}, error) { - if len(config.Rules) == 0 { - return nil, nil - } - - return run(pass, config) - }, - Requires: []*analysis.Analyzer{ - inspect.Analyzer, - }, - } -} - -func run(pass *analysis.Pass, config Config) (interface{}, error) { - isp, ok := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - if !ok { - return nil, errors.New("missing inspect analyser") - } - - nodeFilter := []ast.Node{ - (*ast.StructType)(nil), - } - - isp.Preorder(nodeFilter, func(n ast.Node) { - node, ok := n.(*ast.StructType) - if !ok { - return - } - - for _, field := range node.Fields.List { - analyze(pass, config, node, field) - } - }) - - return nil, nil -} - -func analyze(pass *analysis.Pass, config Config, n *ast.StructType, field *ast.Field) { - if n.Fields == nil || n.Fields.NumFields() < 1 { - // skip empty structs - return - } - - if field.Tag == nil { - // skip when no struct tag - return - } - - fieldName, err := getFieldName(field) - if err != nil { - pass.Reportf(n.Pos(), "unable to get field name: %v", err) - return - } - - for key, convName := range config.Rules { - if convName == "" { - continue - } - - value, flags, ok := lookupTagValue(field.Tag, key) - if !ok { - // skip when no struct tag for the key - continue - } - - if value == "-" { - // skip when skipped :) - continue - } - - // TODO(ldez): need to be rethink. - // This is an exception because of a bug. - // https://github.com/ldez/tagliatelle/issues/8 - // For now, tagliatelle should try to remain neutral in terms of format. - if hasTagFlag(flags, "inline") { - // skip for inline children (no name to lint) - continue - } - - if value == "" { - value = fieldName - } - - converter, err := getConverter(convName) - if err != nil { - pass.Reportf(n.Pos(), "%s(%s): %v", key, convName, err) - continue - } - - expected := value - if config.UseFieldName { - expected = fieldName - } - - if value != converter(expected) { - pass.Reportf(field.Tag.Pos(), "%s(%s): got '%s' want '%s'", key, convName, value, converter(expected)) - } - } -} - -func getFieldName(field *ast.Field) (string, error) { - var name string - for _, n := range field.Names { - if n.Name != "" { - name = n.Name - } - } - - if name != "" { - return name, nil - } - - return getTypeName(field.Type) -} - -func getTypeName(exp ast.Expr) (string, error) { - switch typ := exp.(type) { - case *ast.Ident: - return typ.Name, nil - case *ast.StarExpr: - return getTypeName(typ.X) - case *ast.SelectorExpr: - return getTypeName(typ.Sel) - default: - bytes, _ := json.Marshal(exp) - return "", fmt.Errorf("unexpected error: type %T: %s", typ, string(bytes)) - } -} - -func lookupTagValue(tag *ast.BasicLit, key string) (name string, flags []string, ok bool) { - raw := strings.Trim(tag.Value, "`") - - value, ok := reflect.StructTag(raw).Lookup(key) - if !ok { - return value, nil, ok - } - - values := strings.Split(value, ",") - - if len(values) < 1 { - return "", nil, true - } - - return values[0], values[1:], true -} - -func hasTagFlag(flags []string, query string) bool { - for _, flag := range flags { - if flag == query { - return true - } - } - - return false -} - -func getConverter(c string) (func(s string) string, error) { - switch c { - case "camel": - return strcase.ToCamel, nil - case "pascal": - return strcase.ToPascal, nil - case "kebab": - return strcase.ToKebab, nil - case "snake": - return strcase.ToSnake, nil - case "goCamel": - return strcase.ToGoCamel, nil - case "goPascal": - return strcase.ToGoPascal, nil - case "goKebab": - return strcase.ToGoKebab, nil - case "goSnake": - return strcase.ToGoSnake, nil - case "header": - return toHeader, nil - case "upper": - return strings.ToUpper, nil - case "upperSnake": - return strcase.ToSNAKE, nil - case "lower": - return strings.ToLower, nil - default: - return nil, fmt.Errorf("unsupported case: %s", c) - } -} - -func toHeader(s string) string { - return strcase.ToCase(s, strcase.TitleCase, '-') -} diff --git a/vendor/github.com/leonklingele/grouper/LICENSE b/vendor/github.com/leonklingele/grouper/LICENSE deleted file mode 100644 index f288702d2..000000000 --- a/vendor/github.com/leonklingele/grouper/LICENSE +++ /dev/null @@ -1,674 +0,0 @@ - GNU GENERAL PUBLIC LICENSE - Version 3, 29 June 2007 - - Copyright (C) 2007 Free Software Foundation, Inc. - Everyone is permitted to copy and distribute verbatim copies - of this license document, but changing it is not allowed. - - Preamble - - The GNU General Public License is a free, copyleft license for -software and other kinds of works. - - The licenses for most software and other practical works are designed -to take away your freedom to share and change the works. By contrast, -the GNU General Public License is intended to guarantee your freedom to -share and change all versions of a program--to make sure it remains free -software for all its users. We, the Free Software Foundation, use the -GNU General Public License for most of our software; it applies also to -any other work released this way by its authors. You can apply it to -your programs, too. - - When we speak of free software, we are referring to freedom, not -price. Our General Public Licenses are designed to make sure that you -have the freedom to distribute copies of free software (and charge for -them if you wish), that you receive source code or can get it if you -want it, that you can change the software or use pieces of it in new -free programs, and that you know you can do these things. - - To protect your rights, we need to prevent others from denying you -these rights or asking you to surrender the rights. Therefore, you have -certain responsibilities if you distribute copies of the software, or if -you modify it: responsibilities to respect the freedom of others. - - For example, if you distribute copies of such a program, whether -gratis or for a fee, you must pass on to the recipients the same -freedoms that you received. You must make sure that they, too, receive -or can get the source code. And you must show them these terms so they -know their rights. - - Developers that use the GNU GPL protect your rights with two steps: -(1) assert copyright on the software, and (2) offer you this License -giving you legal permission to copy, distribute and/or modify it. - - For the developers' and authors' protection, the GPL clearly explains -that there is no warranty for this free software. For both users' and -authors' sake, the GPL requires that modified versions be marked as -changed, so that their problems will not be attributed erroneously to -authors of previous versions. - - Some devices are designed to deny users access to install or run -modified versions of the software inside them, although the manufacturer -can do so. This is fundamentally incompatible with the aim of -protecting users' freedom to change the software. The systematic -pattern of such abuse occurs in the area of products for individuals to -use, which is precisely where it is most unacceptable. Therefore, we -have designed this version of the GPL to prohibit the practice for those -products. If such problems arise substantially in other domains, we -stand ready to extend this provision to those domains in future versions -of the GPL, as needed to protect the freedom of users. - - Finally, every program is threatened constantly by software patents. -States should not allow patents to restrict development and use of -software on general-purpose computers, but in those that do, we wish to -avoid the special danger that patents applied to a free program could -make it effectively proprietary. To prevent this, the GPL assures that -patents cannot be used to render the program non-free. - - The precise terms and conditions for copying, distribution and -modification follow. - - TERMS AND CONDITIONS - - 0. Definitions. - - "This License" refers to version 3 of the GNU General Public License. - - "Copyright" also means copyright-like laws that apply to other kinds of -works, such as semiconductor masks. - - "The Program" refers to any copyrightable work licensed under this -License. Each licensee is addressed as "you". "Licensees" and -"recipients" may be individuals or organizations. - - To "modify" a work means to copy from or adapt all or part of the work -in a fashion requiring copyright permission, other than the making of an -exact copy. The resulting work is called a "modified version" of the -earlier work or a work "based on" the earlier work. - - A "covered work" means either the unmodified Program or a work based -on the Program. - - To "propagate" a work means to do anything with it that, without -permission, would make you directly or secondarily liable for -infringement under applicable copyright law, except executing it on a -computer or modifying a private copy. Propagation includes copying, -distribution (with or without modification), making available to the -public, and in some countries other activities as well. - - To "convey" a work means any kind of propagation that enables other -parties to make or receive copies. Mere interaction with a user through -a computer network, with no transfer of a copy, is not conveying. - - An interactive user interface displays "Appropriate Legal Notices" -to the extent that it includes a convenient and prominently visible -feature that (1) displays an appropriate copyright notice, and (2) -tells the user that there is no warranty for the work (except to the -extent that warranties are provided), that licensees may convey the -work under this License, and how to view a copy of this License. If -the interface presents a list of user commands or options, such as a -menu, a prominent item in the list meets this criterion. - - 1. Source Code. - - The "source code" for a work means the preferred form of the work -for making modifications to it. "Object code" means any non-source -form of a work. - - A "Standard Interface" means an interface that either is an official -standard defined by a recognized standards body, or, in the case of -interfaces specified for a particular programming language, one that -is widely used among developers working in that language. - - The "System Libraries" of an executable work include anything, other -than the work as a whole, that (a) is included in the normal form of -packaging a Major Component, but which is not part of that Major -Component, and (b) serves only to enable use of the work with that -Major Component, or to implement a Standard Interface for which an -implementation is available to the public in source code form. A -"Major Component", in this context, means a major essential component -(kernel, window system, and so on) of the specific operating system -(if any) on which the executable work runs, or a compiler used to -produce the work, or an object code interpreter used to run it. - - The "Corresponding Source" for a work in object code form means all -the source code needed to generate, install, and (for an executable -work) run the object code and to modify the work, including scripts to -control those activities. However, it does not include the work's -System Libraries, or general-purpose tools or generally available free -programs which are used unmodified in performing those activities but -which are not part of the work. For example, Corresponding Source -includes interface definition files associated with source files for -the work, and the source code for shared libraries and dynamically -linked subprograms that the work is specifically designed to require, -such as by intimate data communication or control flow between those -subprograms and other parts of the work. - - The Corresponding Source need not include anything that users -can regenerate automatically from other parts of the Corresponding -Source. - - The Corresponding Source for a work in source code form is that -same work. - - 2. Basic Permissions. - - All rights granted under this License are granted for the term of -copyright on the Program, and are irrevocable provided the stated -conditions are met. This License explicitly affirms your unlimited -permission to run the unmodified Program. The output from running a -covered work is covered by this License only if the output, given its -content, constitutes a covered work. This License acknowledges your -rights of fair use or other equivalent, as provided by copyright law. - - You may make, run and propagate covered works that you do not -convey, without conditions so long as your license otherwise remains -in force. You may convey covered works to others for the sole purpose -of having them make modifications exclusively for you, or provide you -with facilities for running those works, provided that you comply with -the terms of this License in conveying all material for which you do -not control copyright. Those thus making or running the covered works -for you must do so exclusively on your behalf, under your direction -and control, on terms that prohibit them from making any copies of -your copyrighted material outside their relationship with you. - - Conveying under any other circumstances is permitted solely under -the conditions stated below. Sublicensing is not allowed; section 10 -makes it unnecessary. - - 3. Protecting Users' Legal Rights From Anti-Circumvention Law. - - No covered work shall be deemed part of an effective technological -measure under any applicable law fulfilling obligations under article -11 of the WIPO copyright treaty adopted on 20 December 1996, or -similar laws prohibiting or restricting circumvention of such -measures. - - When you convey a covered work, you waive any legal power to forbid -circumvention of technological measures to the extent such circumvention -is effected by exercising rights under this License with respect to -the covered work, and you disclaim any intention to limit operation or -modification of the work as a means of enforcing, against the work's -users, your or third parties' legal rights to forbid circumvention of -technological measures. - - 4. Conveying Verbatim Copies. - - You may convey verbatim copies of the Program's source code as you -receive it, in any medium, provided that you conspicuously and -appropriately publish on each copy an appropriate copyright notice; -keep intact all notices stating that this License and any -non-permissive terms added in accord with section 7 apply to the code; -keep intact all notices of the absence of any warranty; and give all -recipients a copy of this License along with the Program. - - You may charge any price or no price for each copy that you convey, -and you may offer support or warranty protection for a fee. - - 5. Conveying Modified Source Versions. - - You may convey a work based on the Program, or the modifications to -produce it from the Program, in the form of source code under the -terms of section 4, provided that you also meet all of these conditions: - - a) The work must carry prominent notices stating that you modified - it, and giving a relevant date. - - b) The work must carry prominent notices stating that it is - released under this License and any conditions added under section - 7. This requirement modifies the requirement in section 4 to - "keep intact all notices". - - c) You must license the entire work, as a whole, under this - License to anyone who comes into possession of a copy. This - License will therefore apply, along with any applicable section 7 - additional terms, to the whole of the work, and all its parts, - regardless of how they are packaged. This License gives no - permission to license the work in any other way, but it does not - invalidate such permission if you have separately received it. - - d) If the work has interactive user interfaces, each must display - Appropriate Legal Notices; however, if the Program has interactive - interfaces that do not display Appropriate Legal Notices, your - work need not make them do so. - - A compilation of a covered work with other separate and independent -works, which are not by their nature extensions of the covered work, -and which are not combined with it such as to form a larger program, -in or on a volume of a storage or distribution medium, is called an -"aggregate" if the compilation and its resulting copyright are not -used to limit the access or legal rights of the compilation's users -beyond what the individual works permit. Inclusion of a covered work -in an aggregate does not cause this License to apply to the other -parts of the aggregate. - - 6. Conveying Non-Source Forms. - - You may convey a covered work in object code form under the terms -of sections 4 and 5, provided that you also convey the -machine-readable Corresponding Source under the terms of this License, -in one of these ways: - - a) Convey the object code in, or embodied in, a physical product - (including a physical distribution medium), accompanied by the - Corresponding Source fixed on a durable physical medium - customarily used for software interchange. - - b) Convey the object code in, or embodied in, a physical product - (including a physical distribution medium), accompanied by a - written offer, valid for at least three years and valid for as - long as you offer spare parts or customer support for that product - model, to give anyone who possesses the object code either (1) a - copy of the Corresponding Source for all the software in the - product that is covered by this License, on a durable physical - medium customarily used for software interchange, for a price no - more than your reasonable cost of physically performing this - conveying of source, or (2) access to copy the - Corresponding Source from a network server at no charge. - - c) Convey individual copies of the object code with a copy of the - written offer to provide the Corresponding Source. This - alternative is allowed only occasionally and noncommercially, and - only if you received the object code with such an offer, in accord - with subsection 6b. - - d) Convey the object code by offering access from a designated - place (gratis or for a charge), and offer equivalent access to the - Corresponding Source in the same way through the same place at no - further charge. You need not require recipients to copy the - Corresponding Source along with the object code. If the place to - copy the object code is a network server, the Corresponding Source - may be on a different server (operated by you or a third party) - that supports equivalent copying facilities, provided you maintain - clear directions next to the object code saying where to find the - Corresponding Source. Regardless of what server hosts the - Corresponding Source, you remain obligated to ensure that it is - available for as long as needed to satisfy these requirements. - - e) Convey the object code using peer-to-peer transmission, provided - you inform other peers where the object code and Corresponding - Source of the work are being offered to the general public at no - charge under subsection 6d. - - A separable portion of the object code, whose source code is excluded -from the Corresponding Source as a System Library, need not be -included in conveying the object code work. - - A "User Product" is either (1) a "consumer product", which means any -tangible personal property which is normally used for personal, family, -or household purposes, or (2) anything designed or sold for incorporation -into a dwelling. In determining whether a product is a consumer product, -doubtful cases shall be resolved in favor of coverage. For a particular -product received by a particular user, "normally used" refers to a -typical or common use of that class of product, regardless of the status -of the particular user or of the way in which the particular user -actually uses, or expects or is expected to use, the product. A product -is a consumer product regardless of whether the product has substantial -commercial, industrial or non-consumer uses, unless such uses represent -the only significant mode of use of the product. - - "Installation Information" for a User Product means any methods, -procedures, authorization keys, or other information required to install -and execute modified versions of a covered work in that User Product from -a modified version of its Corresponding Source. The information must -suffice to ensure that the continued functioning of the modified object -code is in no case prevented or interfered with solely because -modification has been made. - - If you convey an object code work under this section in, or with, or -specifically for use in, a User Product, and the conveying occurs as -part of a transaction in which the right of possession and use of the -User Product is transferred to the recipient in perpetuity or for a -fixed term (regardless of how the transaction is characterized), the -Corresponding Source conveyed under this section must be accompanied -by the Installation Information. But this requirement does not apply -if neither you nor any third party retains the ability to install -modified object code on the User Product (for example, the work has -been installed in ROM). - - The requirement to provide Installation Information does not include a -requirement to continue to provide support service, warranty, or updates -for a work that has been modified or installed by the recipient, or for -the User Product in which it has been modified or installed. Access to a -network may be denied when the modification itself materially and -adversely affects the operation of the network or violates the rules and -protocols for communication across the network. - - Corresponding Source conveyed, and Installation Information provided, -in accord with this section must be in a format that is publicly -documented (and with an implementation available to the public in -source code form), and must require no special password or key for -unpacking, reading or copying. - - 7. Additional Terms. - - "Additional permissions" are terms that supplement the terms of this -License by making exceptions from one or more of its conditions. -Additional permissions that are applicable to the entire Program shall -be treated as though they were included in this License, to the extent -that they are valid under applicable law. If additional permissions -apply only to part of the Program, that part may be used separately -under those permissions, but the entire Program remains governed by -this License without regard to the additional permissions. - - When you convey a copy of a covered work, you may at your option -remove any additional permissions from that copy, or from any part of -it. (Additional permissions may be written to require their own -removal in certain cases when you modify the work.) You may place -additional permissions on material, added by you to a covered work, -for which you have or can give appropriate copyright permission. - - Notwithstanding any other provision of this License, for material you -add to a covered work, you may (if authorized by the copyright holders of -that material) supplement the terms of this License with terms: - - a) Disclaiming warranty or limiting liability differently from the - terms of sections 15 and 16 of this License; or - - b) Requiring preservation of specified reasonable legal notices or - author attributions in that material or in the Appropriate Legal - Notices displayed by works containing it; or - - c) Prohibiting misrepresentation of the origin of that material, or - requiring that modified versions of such material be marked in - reasonable ways as different from the original version; or - - d) Limiting the use for publicity purposes of names of licensors or - authors of the material; or - - e) Declining to grant rights under trademark law for use of some - trade names, trademarks, or service marks; or - - f) Requiring indemnification of licensors and authors of that - material by anyone who conveys the material (or modified versions of - it) with contractual assumptions of liability to the recipient, for - any liability that these contractual assumptions directly impose on - those licensors and authors. - - All other non-permissive additional terms are considered "further -restrictions" within the meaning of section 10. If the Program as you -received it, or any part of it, contains a notice stating that it is -governed by this License along with a term that is a further -restriction, you may remove that term. If a license document contains -a further restriction but permits relicensing or conveying under this -License, you may add to a covered work material governed by the terms -of that license document, provided that the further restriction does -not survive such relicensing or conveying. - - If you add terms to a covered work in accord with this section, you -must place, in the relevant source files, a statement of the -additional terms that apply to those files, or a notice indicating -where to find the applicable terms. - - Additional terms, permissive or non-permissive, may be stated in the -form of a separately written license, or stated as exceptions; -the above requirements apply either way. - - 8. Termination. - - You may not propagate or modify a covered work except as expressly -provided under this License. Any attempt otherwise to propagate or -modify it is void, and will automatically terminate your rights under -this License (including any patent licenses granted under the third -paragraph of section 11). - - However, if you cease all violation of this License, then your -license from a particular copyright holder is reinstated (a) -provisionally, unless and until the copyright holder explicitly and -finally terminates your license, and (b) permanently, if the copyright -holder fails to notify you of the violation by some reasonable means -prior to 60 days after the cessation. - - Moreover, your license from a particular copyright holder is -reinstated permanently if the copyright holder notifies you of the -violation by some reasonable means, this is the first time you have -received notice of violation of this License (for any work) from that -copyright holder, and you cure the violation prior to 30 days after -your receipt of the notice. - - Termination of your rights under this section does not terminate the -licenses of parties who have received copies or rights from you under -this License. If your rights have been terminated and not permanently -reinstated, you do not qualify to receive new licenses for the same -material under section 10. - - 9. Acceptance Not Required for Having Copies. - - You are not required to accept this License in order to receive or -run a copy of the Program. Ancillary propagation of a covered work -occurring solely as a consequence of using peer-to-peer transmission -to receive a copy likewise does not require acceptance. However, -nothing other than this License grants you permission to propagate or -modify any covered work. These actions infringe copyright if you do -not accept this License. Therefore, by modifying or propagating a -covered work, you indicate your acceptance of this License to do so. - - 10. Automatic Licensing of Downstream Recipients. - - Each time you convey a covered work, the recipient automatically -receives a license from the original licensors, to run, modify and -propagate that work, subject to this License. You are not responsible -for enforcing compliance by third parties with this License. - - An "entity transaction" is a transaction transferring control of an -organization, or substantially all assets of one, or subdividing an -organization, or merging organizations. If propagation of a covered -work results from an entity transaction, each party to that -transaction who receives a copy of the work also receives whatever -licenses to the work the party's predecessor in interest had or could -give under the previous paragraph, plus a right to possession of the -Corresponding Source of the work from the predecessor in interest, if -the predecessor has it or can get it with reasonable efforts. - - You may not impose any further restrictions on the exercise of the -rights granted or affirmed under this License. For example, you may -not impose a license fee, royalty, or other charge for exercise of -rights granted under this License, and you may not initiate litigation -(including a cross-claim or counterclaim in a lawsuit) alleging that -any patent claim is infringed by making, using, selling, offering for -sale, or importing the Program or any portion of it. - - 11. Patents. - - A "contributor" is a copyright holder who authorizes use under this -License of the Program or a work on which the Program is based. The -work thus licensed is called the contributor's "contributor version". - - A contributor's "essential patent claims" are all patent claims -owned or controlled by the contributor, whether already acquired or -hereafter acquired, that would be infringed by some manner, permitted -by this License, of making, using, or selling its contributor version, -but do not include claims that would be infringed only as a -consequence of further modification of the contributor version. For -purposes of this definition, "control" includes the right to grant -patent sublicenses in a manner consistent with the requirements of -this License. - - Each contributor grants you a non-exclusive, worldwide, royalty-free -patent license under the contributor's essential patent claims, to -make, use, sell, offer for sale, import and otherwise run, modify and -propagate the contents of its contributor version. - - In the following three paragraphs, a "patent license" is any express -agreement or commitment, however denominated, not to enforce a patent -(such as an express permission to practice a patent or covenant not to -sue for patent infringement). To "grant" such a patent license to a -party means to make such an agreement or commitment not to enforce a -patent against the party. - - If you convey a covered work, knowingly relying on a patent license, -and the Corresponding Source of the work is not available for anyone -to copy, free of charge and under the terms of this License, through a -publicly available network server or other readily accessible means, -then you must either (1) cause the Corresponding Source to be so -available, or (2) arrange to deprive yourself of the benefit of the -patent license for this particular work, or (3) arrange, in a manner -consistent with the requirements of this License, to extend the patent -license to downstream recipients. "Knowingly relying" means you have -actual knowledge that, but for the patent license, your conveying the -covered work in a country, or your recipient's use of the covered work -in a country, would infringe one or more identifiable patents in that -country that you have reason to believe are valid. - - If, pursuant to or in connection with a single transaction or -arrangement, you convey, or propagate by procuring conveyance of, a -covered work, and grant a patent license to some of the parties -receiving the covered work authorizing them to use, propagate, modify -or convey a specific copy of the covered work, then the patent license -you grant is automatically extended to all recipients of the covered -work and works based on it. - - A patent license is "discriminatory" if it does not include within -the scope of its coverage, prohibits the exercise of, or is -conditioned on the non-exercise of one or more of the rights that are -specifically granted under this License. You may not convey a covered -work if you are a party to an arrangement with a third party that is -in the business of distributing software, under which you make payment -to the third party based on the extent of your activity of conveying -the work, and under which the third party grants, to any of the -parties who would receive the covered work from you, a discriminatory -patent license (a) in connection with copies of the covered work -conveyed by you (or copies made from those copies), or (b) primarily -for and in connection with specific products or compilations that -contain the covered work, unless you entered into that arrangement, -or that patent license was granted, prior to 28 March 2007. - - Nothing in this License shall be construed as excluding or limiting -any implied license or other defenses to infringement that may -otherwise be available to you under applicable patent law. - - 12. No Surrender of Others' Freedom. - - If conditions are imposed on you (whether by court order, agreement or -otherwise) that contradict the conditions of this License, they do not -excuse you from the conditions of this License. If you cannot convey a -covered work so as to satisfy simultaneously your obligations under this -License and any other pertinent obligations, then as a consequence you may -not convey it at all. For example, if you agree to terms that obligate you -to collect a royalty for further conveying from those to whom you convey -the Program, the only way you could satisfy both those terms and this -License would be to refrain entirely from conveying the Program. - - 13. Use with the GNU Affero General Public License. - - Notwithstanding any other provision of this License, you have -permission to link or combine any covered work with a work licensed -under version 3 of the GNU Affero General Public License into a single -combined work, and to convey the resulting work. The terms of this -License will continue to apply to the part which is the covered work, -but the special requirements of the GNU Affero General Public License, -section 13, concerning interaction through a network will apply to the -combination as such. - - 14. Revised Versions of this License. - - The Free Software Foundation may publish revised and/or new versions of -the GNU General Public License from time to time. Such new versions will -be similar in spirit to the present version, but may differ in detail to -address new problems or concerns. - - Each version is given a distinguishing version number. If the -Program specifies that a certain numbered version of the GNU General -Public License "or any later version" applies to it, you have the -option of following the terms and conditions either of that numbered -version or of any later version published by the Free Software -Foundation. If the Program does not specify a version number of the -GNU General Public License, you may choose any version ever published -by the Free Software Foundation. - - If the Program specifies that a proxy can decide which future -versions of the GNU General Public License can be used, that proxy's -public statement of acceptance of a version permanently authorizes you -to choose that version for the Program. - - Later license versions may give you additional or different -permissions. However, no additional obligations are imposed on any -author or copyright holder as a result of your choosing to follow a -later version. - - 15. Disclaimer of Warranty. - - THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY -APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT -HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY -OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, -THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR -PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM -IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF -ALL NECESSARY SERVICING, REPAIR OR CORRECTION. - - 16. Limitation of Liability. - - IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING -WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS -THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY -GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE -USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF -DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD -PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), -EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF -SUCH DAMAGES. - - 17. Interpretation of Sections 15 and 16. - - If the disclaimer of warranty and limitation of liability provided -above cannot be given local legal effect according to their terms, -reviewing courts shall apply local law that most closely approximates -an absolute waiver of all civil liability in connection with the -Program, unless a warranty or assumption of liability accompanies a -copy of the Program in return for a fee. - - END OF TERMS AND CONDITIONS - - How to Apply These Terms to Your New Programs - - If you develop a new program, and you want it to be of the greatest -possible use to the public, the best way to achieve this is to make it -free software which everyone can redistribute and change under these terms. - - To do so, attach the following notices to the program. It is safest -to attach them to the start of each source file to most effectively -state the exclusion of warranty; and each file should have at least -the "copyright" line and a pointer to where the full notice is found. - - - Copyright (C) - - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . - -Also add information on how to contact you by electronic and paper mail. - - If the program does terminal interaction, make it output a short -notice like this when it starts in an interactive mode: - - Copyright (C) - This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. - This is free software, and you are welcome to redistribute it - under certain conditions; type `show c' for details. - -The hypothetical commands `show w' and `show c' should show the appropriate -parts of the General Public License. Of course, your program's commands -might be different; for a GUI interface, you would use an "about box". - - You should also get your employer (if you work as a programmer) or school, -if any, to sign a "copyright disclaimer" for the program, if necessary. -For more information on this, and how to apply and follow the GNU GPL, see -. - - The GNU General Public License does not permit incorporating your program -into proprietary programs. If your program is a subroutine library, you -may consider it more useful to permit linking proprietary applications with -the library. If this is what you want to do, use the GNU Lesser General -Public License instead of this License. But first, please read -. diff --git a/vendor/github.com/leonklingele/grouper/pkg/analyzer/analyzer.go b/vendor/github.com/leonklingele/grouper/pkg/analyzer/analyzer.go deleted file mode 100644 index 9852c7838..000000000 --- a/vendor/github.com/leonklingele/grouper/pkg/analyzer/analyzer.go +++ /dev/null @@ -1,89 +0,0 @@ -package analyzer - -import ( - "fmt" - "go/ast" - - "github.com/leonklingele/grouper/pkg/analyzer/consts" - "github.com/leonklingele/grouper/pkg/analyzer/imports" - "github.com/leonklingele/grouper/pkg/analyzer/types" - "github.com/leonklingele/grouper/pkg/analyzer/vars" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" -) - -const ( - Name = "grouper" - Doc = `expression group analyzer: require 'import', 'const', 'var' and/or 'type' declaration groups` -) - -func New() *analysis.Analyzer { - return &analysis.Analyzer{ //nolint:exhaustivestruct // we do not need all fields - Name: Name, - Doc: Doc, - Flags: Flags(), - Run: run, - Requires: []*analysis.Analyzer{inspect.Analyzer}, - } -} - -func run(p *analysis.Pass) (interface{}, error) { - flagLookupBool := func(name string) bool { - return p.Analyzer.Flags.Lookup(name).Value.String() == "true" - } - - c := &Config{ - ConstsConfig: &consts.Config{ - RequireSingleConst: flagLookupBool(FlagNameConstRequireSingleConst), - RequireGrouping: flagLookupBool(FlagNameConstRequireGrouping), - }, - - ImportsConfig: &imports.Config{ - RequireSingleImport: flagLookupBool(FlagNameImportRequireSingleImport), - RequireGrouping: flagLookupBool(FlagNameImportRequireGrouping), - }, - - TypesConfig: &types.Config{ - RequireSingleType: flagLookupBool(FlagNameTypeRequireSingleType), - RequireGrouping: flagLookupBool(FlagNameTypeRequireGrouping), - }, - - VarsConfig: &vars.Config{ - RequireSingleVar: flagLookupBool(FlagNameVarRequireSingleVar), - RequireGrouping: flagLookupBool(FlagNameVarRequireGrouping), - }, - } - - return nil, pass(c, p) -} - -func pass(c *Config, p *analysis.Pass) error { - for _, f := range p.Files { - if err := filepass(c, p, f); err != nil { - return err - } - } - - return nil -} - -func filepass(c *Config, p *analysis.Pass, f *ast.File) error { - if err := consts.Filepass(c.ConstsConfig, p, f); err != nil { - return fmt.Errorf("failed to consts.Filepass: %w", err) - } - - if err := imports.Filepass(c.ImportsConfig, p, f); err != nil { - return fmt.Errorf("failed to imports.Filepass: %w", err) - } - - if err := types.Filepass(c.TypesConfig, p, f); err != nil { - return fmt.Errorf("failed to types.Filepass: %w", err) - } - - if err := vars.Filepass(c.VarsConfig, p, f); err != nil { - return fmt.Errorf("failed to vars.Filepass: %w", err) - } - - return nil -} diff --git a/vendor/github.com/leonklingele/grouper/pkg/analyzer/config.go b/vendor/github.com/leonklingele/grouper/pkg/analyzer/config.go deleted file mode 100644 index b00595f9a..000000000 --- a/vendor/github.com/leonklingele/grouper/pkg/analyzer/config.go +++ /dev/null @@ -1,15 +0,0 @@ -package analyzer - -import ( - "github.com/leonklingele/grouper/pkg/analyzer/consts" - "github.com/leonklingele/grouper/pkg/analyzer/imports" - "github.com/leonklingele/grouper/pkg/analyzer/types" - "github.com/leonklingele/grouper/pkg/analyzer/vars" -) - -type Config struct { - ConstsConfig *consts.Config - ImportsConfig *imports.Config - TypesConfig *types.Config - VarsConfig *vars.Config -} diff --git a/vendor/github.com/leonklingele/grouper/pkg/analyzer/consts/analyzer.go b/vendor/github.com/leonklingele/grouper/pkg/analyzer/consts/analyzer.go deleted file mode 100644 index e4e04c127..000000000 --- a/vendor/github.com/leonklingele/grouper/pkg/analyzer/consts/analyzer.go +++ /dev/null @@ -1,19 +0,0 @@ -package consts - -import ( - "go/ast" - "go/token" - - "github.com/leonklingele/grouper/pkg/analyzer/globals" - - "golang.org/x/tools/go/analysis" -) - -// https://go.dev/ref/spec#Constant_declarations - -func Filepass(c *Config, p *analysis.Pass, f *ast.File) error { - return globals.Filepass( - p, f, - token.CONST, c.RequireSingleConst, c.RequireGrouping, - ) -} diff --git a/vendor/github.com/leonklingele/grouper/pkg/analyzer/consts/config.go b/vendor/github.com/leonklingele/grouper/pkg/analyzer/consts/config.go deleted file mode 100644 index aeeab40c7..000000000 --- a/vendor/github.com/leonklingele/grouper/pkg/analyzer/consts/config.go +++ /dev/null @@ -1,6 +0,0 @@ -package consts - -type Config struct { - RequireSingleConst bool // Require the use of a single global 'const' declaration only - RequireGrouping bool // Require the use of grouped global 'const' declarations -} diff --git a/vendor/github.com/leonklingele/grouper/pkg/analyzer/flags.go b/vendor/github.com/leonklingele/grouper/pkg/analyzer/flags.go deleted file mode 100644 index 42447cbef..000000000 --- a/vendor/github.com/leonklingele/grouper/pkg/analyzer/flags.go +++ /dev/null @@ -1,37 +0,0 @@ -package analyzer - -import ( - "flag" -) - -const ( - FlagNameConstRequireSingleConst = "const-require-single-const" - FlagNameConstRequireGrouping = "const-require-grouping" - - FlagNameImportRequireSingleImport = "import-require-single-import" - FlagNameImportRequireGrouping = "import-require-grouping" - - FlagNameTypeRequireSingleType = "type-require-single-type" - FlagNameTypeRequireGrouping = "type-require-grouping" - - FlagNameVarRequireSingleVar = "var-require-single-var" - FlagNameVarRequireGrouping = "var-require-grouping" -) - -func Flags() flag.FlagSet { - fs := flag.NewFlagSet(Name, flag.ExitOnError) - - fs.Bool(FlagNameConstRequireSingleConst, false, "require the use of a single global 'const' declaration only") - fs.Bool(FlagNameConstRequireGrouping, false, "require the use of grouped global 'const' declarations") - - fs.Bool(FlagNameImportRequireSingleImport, false, "require the use of a single 'import' declaration only") - fs.Bool(FlagNameImportRequireGrouping, false, "require the use of grouped 'import' declarations") - - fs.Bool(FlagNameTypeRequireSingleType, false, "require the use of a single global 'type' declaration only") - fs.Bool(FlagNameTypeRequireGrouping, false, "require the use of grouped global 'type' declarations") - - fs.Bool(FlagNameVarRequireSingleVar, false, "require the use of a single global 'var' declaration only") - fs.Bool(FlagNameVarRequireGrouping, false, "require the use of grouped global 'var' declarations") - - return *fs -} diff --git a/vendor/github.com/leonklingele/grouper/pkg/analyzer/globals/analyzer.go b/vendor/github.com/leonklingele/grouper/pkg/analyzer/globals/analyzer.go deleted file mode 100644 index 15940a480..000000000 --- a/vendor/github.com/leonklingele/grouper/pkg/analyzer/globals/analyzer.go +++ /dev/null @@ -1,105 +0,0 @@ -package globals - -import ( - "fmt" - "go/ast" - "go/token" - - "golang.org/x/tools/go/analysis" -) - -type Global struct { - Decl *ast.GenDecl - IsGroup bool -} - -func Filepass( - p *analysis.Pass, f *ast.File, - tkn token.Token, requireSingle, requireGrouping bool, -) error { - var globals []*Global - for _, decl := range f.Decls { - genDecl, ok := decl.(*ast.GenDecl) - if !ok { - continue - } - - if genDecl.Tok == tkn { - globals = append(globals, &Global{ - Decl: genDecl, - IsGroup: genDecl.Lparen != 0, - }) - } - } - - numGlobals := len(globals) - if numGlobals == 0 { - // Bail out early - return nil - } - - if requireSingle && numGlobals > 1 { - msg := fmt.Sprintf("should only use a single global '%s' declaration, %d found", tkn.String(), numGlobals) - dups := globals[1:] - firstdup := dups[0] - decl := firstdup.Decl - - report := analysis.Diagnostic{ //nolint:exhaustivestruct // we do not need all fields - Pos: decl.Pos(), - End: decl.End(), - Message: msg, - // TODO(leon): Suggest fix - } - - if len(dups) > 1 { - report.Related = toRelated(dups[1:]) - } - - p.Report(report) - } - - if requireGrouping { - var ungrouped []*Global - for _, g := range globals { - if !g.IsGroup { - ungrouped = append(ungrouped, g) - } - } - - if numUngrouped := len(ungrouped); numUngrouped != 0 { - msg := fmt.Sprintf("should only use grouped global '%s' declarations", tkn.String()) - firstmatch := ungrouped[0] - decl := firstmatch.Decl - - report := analysis.Diagnostic{ //nolint:exhaustivestruct // we do not need all fields - Pos: decl.Pos(), - End: decl.End(), - Message: msg, - // TODO(leon): Suggest fix - } - - if numUngrouped > 1 { - report.Related = toRelated(ungrouped[1:]) - } - - p.Report(report) - } - } - - return nil -} - -func toRelated(globals []*Global) []analysis.RelatedInformation { - related := make([]analysis.RelatedInformation, 0, len(globals)) - for _, g := range globals { - decl := g.Decl - - related = append(related, analysis.RelatedInformation{ - Pos: decl.Pos(), - End: decl.End(), - Message: "found here", - }) - } - - return related -} diff --git a/vendor/github.com/leonklingele/grouper/pkg/analyzer/imports/analyzer.go b/vendor/github.com/leonklingele/grouper/pkg/analyzer/imports/analyzer.go deleted file mode 100644 index b545f00c0..000000000 --- a/vendor/github.com/leonklingele/grouper/pkg/analyzer/imports/analyzer.go +++ /dev/null @@ -1,103 +0,0 @@ -package imports - -import ( - "fmt" - "go/ast" - "go/token" - - "golang.org/x/tools/go/analysis" -) - -// https://go.dev/ref/spec#Import_declarations - -type Import struct { - Decl *ast.GenDecl - IsGroup bool -} - -func Filepass(c *Config, p *analysis.Pass, f *ast.File) error { - var imports []*Import - ast.Inspect(f, func(n ast.Node) bool { - if decl, ok := n.(*ast.GenDecl); ok { - if decl.Tok == token.IMPORT { - imports = append(imports, &Import{ - Decl: decl, - IsGroup: decl.Lparen != 0, - }) - } - } - - return true - }) - - numImports := len(imports) - if numImports == 0 { - // Bail out early - return nil - } - - if c.RequireSingleImport && numImports > 1 { - msg := fmt.Sprintf("should only use a single 'import' declaration, %d found", numImports) - dups := imports[1:] - firstdup := dups[0] - decl := firstdup.Decl - - report := analysis.Diagnostic{ //nolint:exhaustivestruct // we do not need all fields - Pos: decl.Pos(), - End: decl.End(), - Message: msg, - // TODO(leon): Suggest fix - } - - if len(dups) > 1 { - report.Related = toRelated(dups[1:]) - } - - p.Report(report) - } - - if c.RequireGrouping { - var ungroupedImports []*Import - for _, imp := range imports { - if !imp.IsGroup { - ungroupedImports = append(ungroupedImports, imp) - } - } - - if numUngroupedImports := len(ungroupedImports); numUngroupedImports != 0 { - msg := "should only use grouped 'import' declarations" - firstmatch := ungroupedImports[0] - decl := firstmatch.Decl - - report := analysis.Diagnostic{ //nolint:exhaustivestruct // we do not need all fields - Pos: decl.Pos(), - End: decl.End(), - Message: msg, - // TODO(leon): Suggest fix - } - - if numUngroupedImports > 1 { - report.Related = toRelated(ungroupedImports[1:]) - } - - p.Report(report) - } - } - - return nil -} - -func toRelated(imports []*Import) []analysis.RelatedInformation { - related := make([]analysis.RelatedInformation, 0, len(imports)) - for _, imp := range imports { - decl := imp.Decl - - related = append(related, analysis.RelatedInformation{ - Pos: decl.Pos(), - End: decl.End(), - Message: "found here", - }) - } - - return related -} diff --git a/vendor/github.com/leonklingele/grouper/pkg/analyzer/imports/config.go b/vendor/github.com/leonklingele/grouper/pkg/analyzer/imports/config.go deleted file mode 100644 index 6a6971b4a..000000000 --- a/vendor/github.com/leonklingele/grouper/pkg/analyzer/imports/config.go +++ /dev/null @@ -1,6 +0,0 @@ -package imports - -type Config struct { - RequireSingleImport bool // Require the use of a single 'import' declaration only - RequireGrouping bool // Require the use of grouped 'import' declarations -} diff --git a/vendor/github.com/leonklingele/grouper/pkg/analyzer/types/analyzer.go b/vendor/github.com/leonklingele/grouper/pkg/analyzer/types/analyzer.go deleted file mode 100644 index 63bbab33b..000000000 --- a/vendor/github.com/leonklingele/grouper/pkg/analyzer/types/analyzer.go +++ /dev/null @@ -1,19 +0,0 @@ -package types - -import ( - "go/ast" - "go/token" - - "github.com/leonklingele/grouper/pkg/analyzer/globals" - - "golang.org/x/tools/go/analysis" -) - -// https://go.dev/ref/spec#Type_declarations - -func Filepass(c *Config, p *analysis.Pass, f *ast.File) error { - return globals.Filepass( - p, f, - token.TYPE, c.RequireSingleType, c.RequireGrouping, - ) -} diff --git a/vendor/github.com/leonklingele/grouper/pkg/analyzer/types/config.go b/vendor/github.com/leonklingele/grouper/pkg/analyzer/types/config.go deleted file mode 100644 index e24cef9da..000000000 --- a/vendor/github.com/leonklingele/grouper/pkg/analyzer/types/config.go +++ /dev/null @@ -1,6 +0,0 @@ -package types - -type Config struct { - RequireSingleType bool // Require the use of a single global 'type' declaration only - RequireGrouping bool // Require the use of grouped global 'type' declarations -} diff --git a/vendor/github.com/leonklingele/grouper/pkg/analyzer/vars/analyzer.go b/vendor/github.com/leonklingele/grouper/pkg/analyzer/vars/analyzer.go deleted file mode 100644 index 20c781223..000000000 --- a/vendor/github.com/leonklingele/grouper/pkg/analyzer/vars/analyzer.go +++ /dev/null @@ -1,19 +0,0 @@ -package vars - -import ( - "go/ast" - "go/token" - - "github.com/leonklingele/grouper/pkg/analyzer/globals" - - "golang.org/x/tools/go/analysis" -) - -// https://go.dev/ref/spec#Variable_declarations - -func Filepass(c *Config, p *analysis.Pass, f *ast.File) error { - return globals.Filepass( - p, f, - token.VAR, c.RequireSingleVar, c.RequireGrouping, - ) -} diff --git a/vendor/github.com/leonklingele/grouper/pkg/analyzer/vars/config.go b/vendor/github.com/leonklingele/grouper/pkg/analyzer/vars/config.go deleted file mode 100644 index 4c7c1d838..000000000 --- a/vendor/github.com/leonklingele/grouper/pkg/analyzer/vars/config.go +++ /dev/null @@ -1,6 +0,0 @@ -package vars - -type Config struct { - RequireSingleVar bool // Require the use of a single global 'var' declaration only - RequireGrouping bool // Require the use of grouped global 'var' declarations -} diff --git a/vendor/github.com/lufeee/execinquery/.gitignore b/vendor/github.com/lufeee/execinquery/.gitignore deleted file mode 100644 index 00e1abc31..000000000 --- a/vendor/github.com/lufeee/execinquery/.gitignore +++ /dev/null @@ -1 +0,0 @@ -execinquery diff --git a/vendor/github.com/lufeee/execinquery/LICENSE b/vendor/github.com/lufeee/execinquery/LICENSE deleted file mode 100644 index b6ab14aec..000000000 --- a/vendor/github.com/lufeee/execinquery/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2022 lufe - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/lufeee/execinquery/README.md b/vendor/github.com/lufeee/execinquery/README.md deleted file mode 100644 index 38fa7c8b9..000000000 --- a/vendor/github.com/lufeee/execinquery/README.md +++ /dev/null @@ -1,76 +0,0 @@ -# execinquery - a simple query string checker in Query function -[![Go Matrix](https://github.com/lufeee/execinquery/actions/workflows/go-cross.yml/badge.svg?branch=main)](https://github.com/lufeee/execinquery/actions/workflows/go-cross.yml) -[![Go lint](https://github.com/lufeee/execinquery/actions/workflows/lint.yml/badge.svg?branch=main)](https://github.com/lufeee/execinquery/actions/workflows/lint.yml) -[![MIT License](http://img.shields.io/badge/license-MIT-blue.svg?style=flat)](LICENSE) -## About - -execinquery is a linter about query string checker in Query function which reads your Go src files and -warnings it finds. - -## Installation - -```sh -go install github.com/lufeee/execinquery/cmd/execinquery -``` - -## Usage -```go -package main - -import ( - "database/sql" - "log" -) - -func main() { - db, err := sql.Open("mysql", "test:test@tcp(test:3306)/test") - if err != nil { - log.Fatal("Database Connect Error: ", err) - } - defer db.Close() - - test := "a" - _, err = db.Query("Update * FROM hoge where id = ?", test) - if err != nil { - log.Fatal("Query Error: ", err) - } - -} -``` - -```console -go vet -vettool=$(which execinquery) ./... - -# command-line-arguments -./a.go:16:11: Use Exec instead of Query to execute `UPDATE` query -``` - -## CI - -### CircleCI - -```yaml -- run: - name: install execinquery - command: go install github.com/lufeee/execinquery - -- run: - name: run execinquery - command: go vet -vettool=`which execinquery` ./... -``` - -### GitHub Actions - -```yaml -- name: install execinquery - run: go install github.com/lufeee/execinquery - -- name: run execinquery - run: go vet -vettool=`which execinquery` ./... -``` - -### License - -MIT license. - -
diff --git a/vendor/github.com/lufeee/execinquery/execinquery.go b/vendor/github.com/lufeee/execinquery/execinquery.go deleted file mode 100644 index c37dc1701..000000000 --- a/vendor/github.com/lufeee/execinquery/execinquery.go +++ /dev/null @@ -1,135 +0,0 @@ -package execinquery - -import ( - "go/ast" - "regexp" - "strings" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/ast/inspector" -) - -const doc = "execinquery is a linter about query string checker in Query function which reads your Go src files and warning it finds" - -// Analyzer is checking database/sql pkg Query's function -var Analyzer = &analysis.Analyzer{ - Name: "execinquery", - Doc: doc, - Run: newLinter().run, - Requires: []*analysis.Analyzer{ - inspect.Analyzer, - }, -} - -type linter struct { - commentExp *regexp.Regexp - multilineCommentExp *regexp.Regexp -} - -func newLinter() *linter { - return &linter{ - commentExp: regexp.MustCompile(`--[^\n]*\n`), - multilineCommentExp: regexp.MustCompile(`(?s)/\*.*?\*/`), - } -} - -func (l linter) run(pass *analysis.Pass) (interface{}, error) { - result := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - - nodeFilter := []ast.Node{ - (*ast.CallExpr)(nil), - } - - result.Preorder(nodeFilter, func(n ast.Node) { - switch n := n.(type) { - case *ast.CallExpr: - selector, ok := n.Fun.(*ast.SelectorExpr) - if !ok { - return - } - - if pass.TypesInfo == nil || pass.TypesInfo.Uses[selector.Sel] == nil || pass.TypesInfo.Uses[selector.Sel].Pkg() == nil { - return - } - - if "database/sql" != pass.TypesInfo.Uses[selector.Sel].Pkg().Path() { - return - } - - if !strings.Contains(selector.Sel.Name, "Query") { - return - } - - replacement := "Exec" - var i int // the index of the query argument - if strings.Contains(selector.Sel.Name, "Context") { - replacement += "Context" - i = 1 - } - - if len(n.Args) <= i { - return - } - - query := l.getQueryString(n.Args[i]) - if query == "" { - return - } - - query = strings.TrimSpace(l.cleanValue(query)) - parts := strings.SplitN(query, " ", 2) - cmd := strings.ToUpper(parts[0]) - - if strings.HasPrefix(cmd, "SELECT") { - return - } - - pass.Reportf(n.Fun.Pos(), "Use %s instead of %s to execute `%s` query", replacement, selector.Sel.Name, cmd) - } - }) - - return nil, nil -} - -func (l linter) cleanValue(s string) string { - v := strings.NewReplacer(`"`, "", "`", "").Replace(s) - - v = l.multilineCommentExp.ReplaceAllString(v, "") - - return l.commentExp.ReplaceAllString(v, "") -} - -func (l linter) getQueryString(exp interface{}) string { - switch e := exp.(type) { - case *ast.AssignStmt: - var v string - for _, stmt := range e.Rhs { - v += l.cleanValue(l.getQueryString(stmt)) - } - return v - - case *ast.BasicLit: - return e.Value - - case *ast.ValueSpec: - var v string - for _, value := range e.Values { - v += l.cleanValue(l.getQueryString(value)) - } - return v - - case *ast.Ident: - if e.Obj == nil { - return "" - } - return l.getQueryString(e.Obj.Decl) - - case *ast.BinaryExpr: - v := l.cleanValue(l.getQueryString(e.X)) - v += l.cleanValue(l.getQueryString(e.Y)) - return v - } - - return "" -} diff --git a/vendor/github.com/macabu/inamedparam/.gitignore b/vendor/github.com/macabu/inamedparam/.gitignore deleted file mode 100644 index f8d51e94c..000000000 --- a/vendor/github.com/macabu/inamedparam/.gitignore +++ /dev/null @@ -1,22 +0,0 @@ -# If you prefer the allow list template instead of the deny list, see community template: -# https://github.com/github/gitignore/blob/main/community/Golang/Go.AllowList.gitignore -# -# Binaries for programs and plugins -*.exe -*.exe~ -*.dll -*.so -*.dylib -inamedparam - -# Test binary, built with `go test -c` -*.test - -# Output of the go coverage tool, specifically when used with LiteIDE -*.out - -# Dependency directories (remove the comment below to include it) -# vendor/ - -# Go workspace file -go.work diff --git a/vendor/github.com/macabu/inamedparam/.golangci.yml b/vendor/github.com/macabu/inamedparam/.golangci.yml deleted file mode 100644 index f0efa1cb6..000000000 --- a/vendor/github.com/macabu/inamedparam/.golangci.yml +++ /dev/null @@ -1,33 +0,0 @@ -run: - deadline: 30s - -linters: - enable-all: true - disable: - - cyclop - - deadcode - - depguard - - exhaustivestruct - - exhaustruct - - forcetypeassert - - gochecknoglobals - - gocognit - - golint - - ifshort - - interfacer - - maligned - - nilnil - - nosnakecase - - paralleltest - - scopelint - - structcheck - - varcheck - -linters-settings: - gci: - sections: - - standard - - default - - prefix(github.com/macabu/inamedparam) - section-separators: - - newLine diff --git a/vendor/github.com/macabu/inamedparam/LICENSE-MIT b/vendor/github.com/macabu/inamedparam/LICENSE-MIT deleted file mode 100644 index b95f480ee..000000000 --- a/vendor/github.com/macabu/inamedparam/LICENSE-MIT +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2023 Matheus Macabu - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/macabu/inamedparam/README.md b/vendor/github.com/macabu/inamedparam/README.md deleted file mode 100644 index 3336cb950..000000000 --- a/vendor/github.com/macabu/inamedparam/README.md +++ /dev/null @@ -1,38 +0,0 @@ -# inamedparam - -A linter that reports interfaces with unnamed method parameters. - -## Flags/Config -```sh --skip-single-param - skip interfaces with a single unnamed parameter -``` - -## Usage - -### Standalone -You can run it standalone through `go vet`. - -You must install the binary to your `$GOBIN` folder like so: -```sh -$ go install github.com/macabu/inamedparam/cmd/inamedparam -``` - -And then navigate to your Go project's root folder, where can run `go vet` in the following way: -```sh -$ go vet -vettool=$(which inamedparam) ./... -``` - -### golangci-lint -`inamedparam` was added as a linter to `golangci-lint` on version `v1.55.0`. It is disabled by default. - -To enable it, you can add it to your `.golangci.yml` file, as such: -```yaml -run: - deadline: 30s - -linters: - disable-all: true - enable: - - inamedparam -``` diff --git a/vendor/github.com/macabu/inamedparam/inamedparam.go b/vendor/github.com/macabu/inamedparam/inamedparam.go deleted file mode 100644 index 8ba7fe188..000000000 --- a/vendor/github.com/macabu/inamedparam/inamedparam.go +++ /dev/null @@ -1,94 +0,0 @@ -package inamedparam - -import ( - "flag" - "go/ast" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/ast/inspector" -) - -const ( - analyzerName = "inamedparam" - - flagSkipSingleParam = "skip-single-param" -) - -var Analyzer = &analysis.Analyzer{ - Name: analyzerName, - Doc: "reports interfaces with unnamed method parameters", - Run: run, - Flags: flags(), - Requires: []*analysis.Analyzer{ - inspect.Analyzer, - }, -} - -func flags() flag.FlagSet { - flags := flag.NewFlagSet(analyzerName, flag.ExitOnError) - - flags.Bool(flagSkipSingleParam, false, "skip interface methods with a single unnamed parameter") - - return *flags -} - -func run(pass *analysis.Pass) (interface{}, error) { - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - - types := []ast.Node{ - &ast.InterfaceType{}, - } - - skipSingleParam := pass.Analyzer.Flags.Lookup(flagSkipSingleParam).Value.(flag.Getter).Get().(bool) - - inspect.Preorder(types, func(n ast.Node) { - interfaceType, ok := n.(*ast.InterfaceType) - if !ok || interfaceType == nil || interfaceType.Methods == nil { - return - } - - for _, method := range interfaceType.Methods.List { - interfaceFunc, ok := method.Type.(*ast.FuncType) - if !ok || interfaceFunc == nil || interfaceFunc.Params == nil { - continue - } - - // Improvement: add test case to reproduce this. Help wanted. - if len(method.Names) == 0 { - continue - } - - methodName := method.Names[0].Name - - if skipSingleParam && len(interfaceFunc.Params.List) == 1 { - continue - } - - for _, param := range interfaceFunc.Params.List { - if param.Names == nil { - var builtParamType string - - switch paramType := param.Type.(type) { - case *ast.SelectorExpr: - if ident := paramType.X.(*ast.Ident); ident != nil { - builtParamType += ident.Name + "." - } - - builtParamType += paramType.Sel.Name - case *ast.Ident: - builtParamType = paramType.Name - } - - if builtParamType != "" { - pass.Reportf(param.Pos(), "interface method %v must have named param for type %v", methodName, builtParamType) - } else { - pass.Reportf(param.Pos(), "interface method %v must have all named params", methodName) - } - } - } - } - }) - - return nil, nil -} diff --git a/vendor/github.com/magiconair/properties/.gitignore b/vendor/github.com/magiconair/properties/.gitignore deleted file mode 100644 index e7081ff52..000000000 --- a/vendor/github.com/magiconair/properties/.gitignore +++ /dev/null @@ -1,6 +0,0 @@ -*.sublime-project -*.sublime-workspace -*.un~ -*.swp -.idea/ -*.iml diff --git a/vendor/github.com/magiconair/properties/CHANGELOG.md b/vendor/github.com/magiconair/properties/CHANGELOG.md deleted file mode 100644 index 842e8e24f..000000000 --- a/vendor/github.com/magiconair/properties/CHANGELOG.md +++ /dev/null @@ -1,205 +0,0 @@ -## Changelog - -### [1.8.7](https://github.com/magiconair/properties/tree/v1.8.7) - 08 Dec 2022 - - * [PR #65](https://github.com/magiconair/properties/pull/65): Speedup Merge - - Thanks to [@AdityaVallabh](https://github.com/AdityaVallabh) for the patch. - - * [PR #66](https://github.com/magiconair/properties/pull/66): use github actions - -### [1.8.6](https://github.com/magiconair/properties/tree/v1.8.6) - 23 Feb 2022 - - * [PR #57](https://github.com/magiconair/properties/pull/57):Fix "unreachable code" lint error - - Thanks to [@ellie](https://github.com/ellie) for the patch. - - * [PR #63](https://github.com/magiconair/properties/pull/63): Make TestMustGetParsedDuration backwards compatible - - This patch ensures that the `TestMustGetParsedDuration` still works with `go1.3` to make the - author happy until it affects real users. - - Thanks to [@maage](https://github.com/maage) for the patch. - -### [1.8.5](https://github.com/magiconair/properties/tree/v1.8.5) - 24 Mar 2021 - - * [PR #55](https://github.com/magiconair/properties/pull/55): Fix: Encoding Bug in Comments - - When reading comments \ are loaded correctly, but when writing they are then - replaced by \\. This leads to wrong comments when writing and reading multiple times. - - Thanks to [@doxsch](https://github.com/doxsch) for the patch. - -### [1.8.4](https://github.com/magiconair/properties/tree/v1.8.4) - 23 Sep 2020 - - * [PR #50](https://github.com/magiconair/properties/pull/50): enhance error message for circular references - - Thanks to [@sriv](https://github.com/sriv) for the patch. - -### [1.8.3](https://github.com/magiconair/properties/tree/v1.8.3) - 14 Sep 2020 - - * [PR #49](https://github.com/magiconair/properties/pull/49): Include the key in error message causing the circular reference - - The change is include the key in the error message which is causing the circular - reference when parsing/loading the properties files. - - Thanks to [@haroon-sheikh](https://github.com/haroon-sheikh) for the patch. - -### [1.8.2](https://github.com/magiconair/properties/tree/v1.8.2) - 25 Aug 2020 - - * [PR #36](https://github.com/magiconair/properties/pull/36): Escape backslash on write - - This patch ensures that backslashes are escaped on write. Existing applications which - rely on the old behavior may need to be updated. - - Thanks to [@apesternikov](https://github.com/apesternikov) for the patch. - - * [PR #42](https://github.com/magiconair/properties/pull/42): Made Content-Type check whitespace agnostic in LoadURL() - - Thanks to [@aliras1](https://github.com/aliras1) for the patch. - - * [PR #41](https://github.com/magiconair/properties/pull/41): Make key/value separator configurable on Write() - - Thanks to [@mkjor](https://github.com/mkjor) for the patch. - - * [PR #40](https://github.com/magiconair/properties/pull/40): Add method to return a sorted list of keys - - Thanks to [@mkjor](https://github.com/mkjor) for the patch. - -### [1.8.1](https://github.com/magiconair/properties/tree/v1.8.1) - 10 May 2019 - - * [PR #35](https://github.com/magiconair/properties/pull/35): Close body always after request - - This patch ensures that in `LoadURL` the response body is always closed. - - Thanks to [@liubog2008](https://github.com/liubog2008) for the patch. - -### [1.8](https://github.com/magiconair/properties/tree/v1.8) - 15 May 2018 - - * [PR #26](https://github.com/magiconair/properties/pull/26): Disable expansion during loading - - This adds the option to disable property expansion during loading. - - Thanks to [@kmala](https://github.com/kmala) for the patch. - -### [1.7.6](https://github.com/magiconair/properties/tree/v1.7.6) - 14 Feb 2018 - - * [PR #29](https://github.com/magiconair/properties/pull/29): Reworked expansion logic to handle more complex cases. - - See PR for an example. - - Thanks to [@yobert](https://github.com/yobert) for the fix. - -### [1.7.5](https://github.com/magiconair/properties/tree/v1.7.5) - 13 Feb 2018 - - * [PR #28](https://github.com/magiconair/properties/pull/28): Support duplicate expansions in the same value - - Values which expand the same key multiple times (e.g. `key=${a} ${a}`) will no longer fail - with a `circular reference error`. - - Thanks to [@yobert](https://github.com/yobert) for the fix. - -### [1.7.4](https://github.com/magiconair/properties/tree/v1.7.4) - 31 Oct 2017 - - * [Issue #23](https://github.com/magiconair/properties/issues/23): Ignore blank lines with whitespaces - - * [PR #24](https://github.com/magiconair/properties/pull/24): Update keys when DisableExpansion is enabled - - Thanks to [@mgurov](https://github.com/mgurov) for the fix. - -### [1.7.3](https://github.com/magiconair/properties/tree/v1.7.3) - 10 Jul 2017 - - * [Issue #17](https://github.com/magiconair/properties/issues/17): Add [SetValue()](http://godoc.org/github.com/magiconair/properties#Properties.SetValue) method to set values generically - * [Issue #22](https://github.com/magiconair/properties/issues/22): Add [LoadMap()](http://godoc.org/github.com/magiconair/properties#LoadMap) function to load properties from a string map - -### [1.7.2](https://github.com/magiconair/properties/tree/v1.7.2) - 20 Mar 2017 - - * [Issue #15](https://github.com/magiconair/properties/issues/15): Drop gocheck dependency - * [PR #21](https://github.com/magiconair/properties/pull/21): Add [Map()](http://godoc.org/github.com/magiconair/properties#Properties.Map) and [FilterFunc()](http://godoc.org/github.com/magiconair/properties#Properties.FilterFunc) - -### [1.7.1](https://github.com/magiconair/properties/tree/v1.7.1) - 13 Jan 2017 - - * [Issue #14](https://github.com/magiconair/properties/issues/14): Decouple TestLoadExpandedFile from `$USER` - * [PR #12](https://github.com/magiconair/properties/pull/12): Load from files and URLs - * [PR #16](https://github.com/magiconair/properties/pull/16): Keep gofmt happy - * [PR #18](https://github.com/magiconair/properties/pull/18): Fix Delete() function - -### [1.7.0](https://github.com/magiconair/properties/tree/v1.7.0) - 20 Mar 2016 - - * [Issue #10](https://github.com/magiconair/properties/issues/10): Add [LoadURL,LoadURLs,MustLoadURL,MustLoadURLs](http://godoc.org/github.com/magiconair/properties#LoadURL) method to load properties from a URL. - * [Issue #11](https://github.com/magiconair/properties/issues/11): Add [LoadString,MustLoadString](http://godoc.org/github.com/magiconair/properties#LoadString) method to load properties from an UTF8 string. - * [PR #8](https://github.com/magiconair/properties/pull/8): Add [MustFlag](http://godoc.org/github.com/magiconair/properties#Properties.MustFlag) method to provide overrides via command line flags. (@pascaldekloe) - -### [1.6.0](https://github.com/magiconair/properties/tree/v1.6.0) - 11 Dec 2015 - - * Add [Decode](http://godoc.org/github.com/magiconair/properties#Properties.Decode) method to populate struct from properties via tags. - -### [1.5.6](https://github.com/magiconair/properties/tree/v1.5.6) - 18 Oct 2015 - - * Vendored in gopkg.in/check.v1 - -### [1.5.5](https://github.com/magiconair/properties/tree/v1.5.5) - 31 Jul 2015 - - * [PR #6](https://github.com/magiconair/properties/pull/6): Add [Delete](http://godoc.org/github.com/magiconair/properties#Properties.Delete) method to remove keys including comments. (@gerbenjacobs) - -### [1.5.4](https://github.com/magiconair/properties/tree/v1.5.4) - 23 Jun 2015 - - * [Issue #5](https://github.com/magiconair/properties/issues/5): Allow disabling of property expansion [DisableExpansion](http://godoc.org/github.com/magiconair/properties#Properties.DisableExpansion). When property expansion is disabled Properties become a simple key/value store and don't check for circular references. - -### [1.5.3](https://github.com/magiconair/properties/tree/v1.5.3) - 02 Jun 2015 - - * [Issue #4](https://github.com/magiconair/properties/issues/4): Maintain key order in [Filter()](http://godoc.org/github.com/magiconair/properties#Properties.Filter), [FilterPrefix()](http://godoc.org/github.com/magiconair/properties#Properties.FilterPrefix) and [FilterRegexp()](http://godoc.org/github.com/magiconair/properties#Properties.FilterRegexp) - -### [1.5.2](https://github.com/magiconair/properties/tree/v1.5.2) - 10 Apr 2015 - - * [Issue #3](https://github.com/magiconair/properties/issues/3): Don't print comments in [WriteComment()](http://godoc.org/github.com/magiconair/properties#Properties.WriteComment) if they are all empty - * Add clickable links to README - -### [1.5.1](https://github.com/magiconair/properties/tree/v1.5.1) - 08 Dec 2014 - - * Added [GetParsedDuration()](http://godoc.org/github.com/magiconair/properties#Properties.GetParsedDuration) and [MustGetParsedDuration()](http://godoc.org/github.com/magiconair/properties#Properties.MustGetParsedDuration) for values specified compatible with - [time.ParseDuration()](http://golang.org/pkg/time/#ParseDuration). - -### [1.5.0](https://github.com/magiconair/properties/tree/v1.5.0) - 18 Nov 2014 - - * Added support for single and multi-line comments (reading, writing and updating) - * The order of keys is now preserved - * Calling [Set()](http://godoc.org/github.com/magiconair/properties#Properties.Set) with an empty key now silently ignores the call and does not create a new entry - * Added a [MustSet()](http://godoc.org/github.com/magiconair/properties#Properties.MustSet) method - * Migrated test library from launchpad.net/gocheck to [gopkg.in/check.v1](http://gopkg.in/check.v1) - -### [1.4.2](https://github.com/magiconair/properties/tree/v1.4.2) - 15 Nov 2014 - - * [Issue #2](https://github.com/magiconair/properties/issues/2): Fixed goroutine leak in parser which created two lexers but cleaned up only one - -### [1.4.1](https://github.com/magiconair/properties/tree/v1.4.1) - 13 Nov 2014 - - * [Issue #1](https://github.com/magiconair/properties/issues/1): Fixed bug in Keys() method which returned an empty string - -### [1.4.0](https://github.com/magiconair/properties/tree/v1.4.0) - 23 Sep 2014 - - * Added [Keys()](http://godoc.org/github.com/magiconair/properties#Properties.Keys) to get the keys - * Added [Filter()](http://godoc.org/github.com/magiconair/properties#Properties.Filter), [FilterRegexp()](http://godoc.org/github.com/magiconair/properties#Properties.FilterRegexp) and [FilterPrefix()](http://godoc.org/github.com/magiconair/properties#Properties.FilterPrefix) to get a subset of the properties - -### [1.3.0](https://github.com/magiconair/properties/tree/v1.3.0) - 18 Mar 2014 - -* Added support for time.Duration -* Made MustXXX() failure beha[ior configurable (log.Fatal, panic](https://github.com/magiconair/properties/tree/vior configurable (log.Fatal, panic) - custom) -* Changed default of MustXXX() failure from panic to log.Fatal - -### [1.2.0](https://github.com/magiconair/properties/tree/v1.2.0) - 05 Mar 2014 - -* Added MustGet... functions -* Added support for int and uint with range checks on 32 bit platforms - -### [1.1.0](https://github.com/magiconair/properties/tree/v1.1.0) - 20 Jan 2014 - -* Renamed from goproperties to properties -* Added support for expansion of environment vars in - filenames and value expressions -* Fixed bug where value expressions were not at the - start of the string - -### [1.0.0](https://github.com/magiconair/properties/tree/v1.0.0) - 7 Jan 2014 - -* Initial release diff --git a/vendor/github.com/magiconair/properties/LICENSE.md b/vendor/github.com/magiconair/properties/LICENSE.md deleted file mode 100644 index 79c87e3e6..000000000 --- a/vendor/github.com/magiconair/properties/LICENSE.md +++ /dev/null @@ -1,24 +0,0 @@ -Copyright (c) 2013-2020, Frank Schroeder - -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - - * Redistributions of source code must retain the above copyright notice, this - list of conditions and the following disclaimer. - - * Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR -ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/magiconair/properties/README.md b/vendor/github.com/magiconair/properties/README.md deleted file mode 100644 index e2edda025..000000000 --- a/vendor/github.com/magiconair/properties/README.md +++ /dev/null @@ -1,128 +0,0 @@ -[![](https://img.shields.io/github/tag/magiconair/properties.svg?style=flat-square&label=release)](https://github.com/magiconair/properties/releases) -[![Travis CI Status](https://img.shields.io/travis/magiconair/properties.svg?branch=master&style=flat-square&label=travis)](https://travis-ci.org/magiconair/properties) -[![License](https://img.shields.io/badge/License-BSD%202--Clause-orange.svg?style=flat-square)](https://raw.githubusercontent.com/magiconair/properties/master/LICENSE) -[![GoDoc](http://img.shields.io/badge/godoc-reference-5272B4.svg?style=flat-square)](http://godoc.org/github.com/magiconair/properties) - -# Overview - -#### Please run `git pull --tags` to update the tags. See [below](#updated-git-tags) why. - -properties is a Go library for reading and writing properties files. - -It supports reading from multiple files or URLs and Spring style recursive -property expansion of expressions like `${key}` to their corresponding value. -Value expressions can refer to other keys like in `${key}` or to environment -variables like in `${USER}`. Filenames can also contain environment variables -like in `/home/${USER}/myapp.properties`. - -Properties can be decoded into structs, maps, arrays and values through -struct tags. - -Comments and the order of keys are preserved. Comments can be modified -and can be written to the output. - -The properties library supports both ISO-8859-1 and UTF-8 encoded data. - -Starting from version 1.3.0 the behavior of the MustXXX() functions is -configurable by providing a custom `ErrorHandler` function. The default has -changed from `panic` to `log.Fatal` but this is configurable and custom -error handling functions can be provided. See the package documentation for -details. - -Read the full documentation on [![GoDoc](http://img.shields.io/badge/godoc-reference-5272B4.svg?style=flat-square)](http://godoc.org/github.com/magiconair/properties) - -## Getting Started - -```go -import ( - "flag" - "github.com/magiconair/properties" -) - -func main() { - // init from a file - p := properties.MustLoadFile("${HOME}/config.properties", properties.UTF8) - - // or multiple files - p = properties.MustLoadFiles([]string{ - "${HOME}/config.properties", - "${HOME}/config-${USER}.properties", - }, properties.UTF8, true) - - // or from a map - p = properties.LoadMap(map[string]string{"key": "value", "abc": "def"}) - - // or from a string - p = properties.MustLoadString("key=value\nabc=def") - - // or from a URL - p = properties.MustLoadURL("http://host/path") - - // or from multiple URLs - p = properties.MustLoadURL([]string{ - "http://host/config", - "http://host/config-${USER}", - }, true) - - // or from flags - p.MustFlag(flag.CommandLine) - - // get values through getters - host := p.MustGetString("host") - port := p.GetInt("port", 8080) - - // or through Decode - type Config struct { - Host string `properties:"host"` - Port int `properties:"port,default=9000"` - Accept []string `properties:"accept,default=image/png;image;gif"` - Timeout time.Duration `properties:"timeout,default=5s"` - } - var cfg Config - if err := p.Decode(&cfg); err != nil { - log.Fatal(err) - } -} - -``` - -## Installation and Upgrade - -``` -$ go get -u github.com/magiconair/properties -``` - -## License - -2 clause BSD license. See [LICENSE](https://github.com/magiconair/properties/blob/master/LICENSE) file for details. - -## ToDo - -* Dump contents with passwords and secrets obscured - -## Updated Git tags - -#### 13 Feb 2018 - -I realized that all of the git tags I had pushed before v1.7.5 were lightweight tags -and I've only recently learned that this doesn't play well with `git describe` 😞 - -I have replaced all lightweight tags with signed tags using this script which should -retain the commit date, name and email address. Please run `git pull --tags` to update them. - -Worst case you have to reclone the repo. - -```shell -#!/bin/bash -tag=$1 -echo "Updating $tag" -date=$(git show ${tag}^0 --format=%aD | head -1) -email=$(git show ${tag}^0 --format=%aE | head -1) -name=$(git show ${tag}^0 --format=%aN | head -1) -GIT_COMMITTER_DATE="$date" GIT_COMMITTER_NAME="$name" GIT_COMMITTER_EMAIL="$email" git tag -s -f ${tag} ${tag}^0 -m ${tag} -``` - -I apologize for the inconvenience. - -Frank - diff --git a/vendor/github.com/magiconair/properties/decode.go b/vendor/github.com/magiconair/properties/decode.go deleted file mode 100644 index 8e6aa441d..000000000 --- a/vendor/github.com/magiconair/properties/decode.go +++ /dev/null @@ -1,289 +0,0 @@ -// Copyright 2013-2022 Frank Schroeder. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package properties - -import ( - "fmt" - "reflect" - "strconv" - "strings" - "time" -) - -// Decode assigns property values to exported fields of a struct. -// -// Decode traverses v recursively and returns an error if a value cannot be -// converted to the field type or a required value is missing for a field. -// -// The following type dependent decodings are used: -// -// String, boolean, numeric fields have the value of the property key assigned. -// The property key name is the name of the field. A different key and a default -// value can be set in the field's tag. Fields without default value are -// required. If the value cannot be converted to the field type an error is -// returned. -// -// time.Duration fields have the result of time.ParseDuration() assigned. -// -// time.Time fields have the vaule of time.Parse() assigned. The default layout -// is time.RFC3339 but can be set in the field's tag. -// -// Arrays and slices of string, boolean, numeric, time.Duration and time.Time -// fields have the value interpreted as a comma separated list of values. The -// individual values are trimmed of whitespace and empty values are ignored. A -// default value can be provided as a semicolon separated list in the field's -// tag. -// -// Struct fields are decoded recursively using the field name plus "." as -// prefix. The prefix (without dot) can be overridden in the field's tag. -// Default values are not supported in the field's tag. Specify them on the -// fields of the inner struct instead. -// -// Map fields must have a key of type string and are decoded recursively by -// using the field's name plus ".' as prefix and the next element of the key -// name as map key. The prefix (without dot) can be overridden in the field's -// tag. Default values are not supported. -// -// Examples: -// -// // Field is ignored. -// Field int `properties:"-"` -// -// // Field is assigned value of 'Field'. -// Field int -// -// // Field is assigned value of 'myName'. -// Field int `properties:"myName"` -// -// // Field is assigned value of key 'myName' and has a default -// // value 15 if the key does not exist. -// Field int `properties:"myName,default=15"` -// -// // Field is assigned value of key 'Field' and has a default -// // value 15 if the key does not exist. -// Field int `properties:",default=15"` -// -// // Field is assigned value of key 'date' and the date -// // is in format 2006-01-02 -// Field time.Time `properties:"date,layout=2006-01-02"` -// -// // Field is assigned the non-empty and whitespace trimmed -// // values of key 'Field' split by commas. -// Field []string -// -// // Field is assigned the non-empty and whitespace trimmed -// // values of key 'Field' split by commas and has a default -// // value ["a", "b", "c"] if the key does not exist. -// Field []string `properties:",default=a;b;c"` -// -// // Field is decoded recursively with "Field." as key prefix. -// Field SomeStruct -// -// // Field is decoded recursively with "myName." as key prefix. -// Field SomeStruct `properties:"myName"` -// -// // Field is decoded recursively with "Field." as key prefix -// // and the next dotted element of the key as map key. -// Field map[string]string -// -// // Field is decoded recursively with "myName." as key prefix -// // and the next dotted element of the key as map key. -// Field map[string]string `properties:"myName"` -func (p *Properties) Decode(x interface{}) error { - t, v := reflect.TypeOf(x), reflect.ValueOf(x) - if t.Kind() != reflect.Ptr || v.Elem().Type().Kind() != reflect.Struct { - return fmt.Errorf("not a pointer to struct: %s", t) - } - if err := dec(p, "", nil, nil, v); err != nil { - return err - } - return nil -} - -func dec(p *Properties, key string, def *string, opts map[string]string, v reflect.Value) error { - t := v.Type() - - // value returns the property value for key or the default if provided. - value := func() (string, error) { - if val, ok := p.Get(key); ok { - return val, nil - } - if def != nil { - return *def, nil - } - return "", fmt.Errorf("missing required key %s", key) - } - - // conv converts a string to a value of the given type. - conv := func(s string, t reflect.Type) (val reflect.Value, err error) { - var v interface{} - - switch { - case isDuration(t): - v, err = time.ParseDuration(s) - - case isTime(t): - layout := opts["layout"] - if layout == "" { - layout = time.RFC3339 - } - v, err = time.Parse(layout, s) - - case isBool(t): - v, err = boolVal(s), nil - - case isString(t): - v, err = s, nil - - case isFloat(t): - v, err = strconv.ParseFloat(s, 64) - - case isInt(t): - v, err = strconv.ParseInt(s, 10, 64) - - case isUint(t): - v, err = strconv.ParseUint(s, 10, 64) - - default: - return reflect.Zero(t), fmt.Errorf("unsupported type %s", t) - } - if err != nil { - return reflect.Zero(t), err - } - return reflect.ValueOf(v).Convert(t), nil - } - - // keydef returns the property key and the default value based on the - // name of the struct field and the options in the tag. - keydef := func(f reflect.StructField) (string, *string, map[string]string) { - _key, _opts := parseTag(f.Tag.Get("properties")) - - var _def *string - if d, ok := _opts["default"]; ok { - _def = &d - } - if _key != "" { - return _key, _def, _opts - } - return f.Name, _def, _opts - } - - switch { - case isDuration(t) || isTime(t) || isBool(t) || isString(t) || isFloat(t) || isInt(t) || isUint(t): - s, err := value() - if err != nil { - return err - } - val, err := conv(s, t) - if err != nil { - return err - } - v.Set(val) - - case isPtr(t): - return dec(p, key, def, opts, v.Elem()) - - case isStruct(t): - for i := 0; i < v.NumField(); i++ { - fv := v.Field(i) - fk, def, opts := keydef(t.Field(i)) - if !fv.CanSet() { - return fmt.Errorf("cannot set %s", t.Field(i).Name) - } - if fk == "-" { - continue - } - if key != "" { - fk = key + "." + fk - } - if err := dec(p, fk, def, opts, fv); err != nil { - return err - } - } - return nil - - case isArray(t): - val, err := value() - if err != nil { - return err - } - vals := split(val, ";") - a := reflect.MakeSlice(t, 0, len(vals)) - for _, s := range vals { - val, err := conv(s, t.Elem()) - if err != nil { - return err - } - a = reflect.Append(a, val) - } - v.Set(a) - - case isMap(t): - valT := t.Elem() - m := reflect.MakeMap(t) - for postfix := range p.FilterStripPrefix(key + ".").m { - pp := strings.SplitN(postfix, ".", 2) - mk, mv := pp[0], reflect.New(valT) - if err := dec(p, key+"."+mk, nil, nil, mv); err != nil { - return err - } - m.SetMapIndex(reflect.ValueOf(mk), mv.Elem()) - } - v.Set(m) - - default: - return fmt.Errorf("unsupported type %s", t) - } - return nil -} - -// split splits a string on sep, trims whitespace of elements -// and omits empty elements -func split(s string, sep string) []string { - var a []string - for _, v := range strings.Split(s, sep) { - if v = strings.TrimSpace(v); v != "" { - a = append(a, v) - } - } - return a -} - -// parseTag parses a "key,k=v,k=v,..." -func parseTag(tag string) (key string, opts map[string]string) { - opts = map[string]string{} - for i, s := range strings.Split(tag, ",") { - if i == 0 { - key = s - continue - } - - pp := strings.SplitN(s, "=", 2) - if len(pp) == 1 { - opts[pp[0]] = "" - } else { - opts[pp[0]] = pp[1] - } - } - return key, opts -} - -func isArray(t reflect.Type) bool { return t.Kind() == reflect.Array || t.Kind() == reflect.Slice } -func isBool(t reflect.Type) bool { return t.Kind() == reflect.Bool } -func isDuration(t reflect.Type) bool { return t == reflect.TypeOf(time.Second) } -func isMap(t reflect.Type) bool { return t.Kind() == reflect.Map } -func isPtr(t reflect.Type) bool { return t.Kind() == reflect.Ptr } -func isString(t reflect.Type) bool { return t.Kind() == reflect.String } -func isStruct(t reflect.Type) bool { return t.Kind() == reflect.Struct } -func isTime(t reflect.Type) bool { return t == reflect.TypeOf(time.Time{}) } -func isFloat(t reflect.Type) bool { - return t.Kind() == reflect.Float32 || t.Kind() == reflect.Float64 -} -func isInt(t reflect.Type) bool { - return t.Kind() == reflect.Int || t.Kind() == reflect.Int8 || t.Kind() == reflect.Int16 || t.Kind() == reflect.Int32 || t.Kind() == reflect.Int64 -} -func isUint(t reflect.Type) bool { - return t.Kind() == reflect.Uint || t.Kind() == reflect.Uint8 || t.Kind() == reflect.Uint16 || t.Kind() == reflect.Uint32 || t.Kind() == reflect.Uint64 -} diff --git a/vendor/github.com/magiconair/properties/doc.go b/vendor/github.com/magiconair/properties/doc.go deleted file mode 100644 index 7c7979315..000000000 --- a/vendor/github.com/magiconair/properties/doc.go +++ /dev/null @@ -1,155 +0,0 @@ -// Copyright 2013-2022 Frank Schroeder. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package properties provides functions for reading and writing -// ISO-8859-1 and UTF-8 encoded .properties files and has -// support for recursive property expansion. -// -// Java properties files are ISO-8859-1 encoded and use Unicode -// literals for characters outside the ISO character set. Unicode -// literals can be used in UTF-8 encoded properties files but -// aren't necessary. -// -// To load a single properties file use MustLoadFile(): -// -// p := properties.MustLoadFile(filename, properties.UTF8) -// -// To load multiple properties files use MustLoadFiles() -// which loads the files in the given order and merges the -// result. Missing properties files can be ignored if the -// 'ignoreMissing' flag is set to true. -// -// Filenames can contain environment variables which are expanded -// before loading. -// -// f1 := "/etc/myapp/myapp.conf" -// f2 := "/home/${USER}/myapp.conf" -// p := MustLoadFiles([]string{f1, f2}, properties.UTF8, true) -// -// All of the different key/value delimiters ' ', ':' and '=' are -// supported as well as the comment characters '!' and '#' and -// multi-line values. -// -// ! this is a comment -// # and so is this -// -// # the following expressions are equal -// key value -// key=value -// key:value -// key = value -// key : value -// key = val\ -// ue -// -// Properties stores all comments preceding a key and provides -// GetComments() and SetComments() methods to retrieve and -// update them. The convenience functions GetComment() and -// SetComment() allow access to the last comment. The -// WriteComment() method writes properties files including -// the comments and with the keys in the original order. -// This can be used for sanitizing properties files. -// -// Property expansion is recursive and circular references -// and malformed expressions are not allowed and cause an -// error. Expansion of environment variables is supported. -// -// # standard property -// key = value -// -// # property expansion: key2 = value -// key2 = ${key} -// -// # recursive expansion: key3 = value -// key3 = ${key2} -// -// # circular reference (error) -// key = ${key} -// -// # malformed expression (error) -// key = ${ke -// -// # refers to the users' home dir -// home = ${HOME} -// -// # local key takes precedence over env var: u = foo -// USER = foo -// u = ${USER} -// -// The default property expansion format is ${key} but can be -// changed by setting different pre- and postfix values on the -// Properties object. -// -// p := properties.NewProperties() -// p.Prefix = "#[" -// p.Postfix = "]#" -// -// Properties provides convenience functions for getting typed -// values with default values if the key does not exist or the -// type conversion failed. -// -// # Returns true if the value is either "1", "on", "yes" or "true" -// # Returns false for every other value and the default value if -// # the key does not exist. -// v = p.GetBool("key", false) -// -// # Returns the value if the key exists and the format conversion -// # was successful. Otherwise, the default value is returned. -// v = p.GetInt64("key", 999) -// v = p.GetUint64("key", 999) -// v = p.GetFloat64("key", 123.0) -// v = p.GetString("key", "def") -// v = p.GetDuration("key", 999) -// -// As an alternative properties may be applied with the standard -// library's flag implementation at any time. -// -// # Standard configuration -// v = flag.Int("key", 999, "help message") -// flag.Parse() -// -// # Merge p into the flag set -// p.MustFlag(flag.CommandLine) -// -// Properties provides several MustXXX() convenience functions -// which will terminate the app if an error occurs. The behavior -// of the failure is configurable and the default is to call -// log.Fatal(err). To have the MustXXX() functions panic instead -// of logging the error set a different ErrorHandler before -// you use the Properties package. -// -// properties.ErrorHandler = properties.PanicHandler -// -// # Will panic instead of logging an error -// p := properties.MustLoadFile("config.properties") -// -// You can also provide your own ErrorHandler function. The only requirement -// is that the error handler function must exit after handling the error. -// -// properties.ErrorHandler = func(err error) { -// fmt.Println(err) -// os.Exit(1) -// } -// -// # Will write to stdout and then exit -// p := properties.MustLoadFile("config.properties") -// -// Properties can also be loaded into a struct via the `Decode` -// method, e.g. -// -// type S struct { -// A string `properties:"a,default=foo"` -// D time.Duration `properties:"timeout,default=5s"` -// E time.Time `properties:"expires,layout=2006-01-02,default=2015-01-01"` -// } -// -// See `Decode()` method for the full documentation. -// -// The following documents provide a description of the properties -// file format. -// -// http://en.wikipedia.org/wiki/.properties -// -// http://docs.oracle.com/javase/7/docs/api/java/util/Properties.html#load%28java.io.Reader%29 -package properties diff --git a/vendor/github.com/magiconair/properties/integrate.go b/vendor/github.com/magiconair/properties/integrate.go deleted file mode 100644 index 35d0ae97b..000000000 --- a/vendor/github.com/magiconair/properties/integrate.go +++ /dev/null @@ -1,35 +0,0 @@ -// Copyright 2013-2022 Frank Schroeder. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package properties - -import "flag" - -// MustFlag sets flags that are skipped by dst.Parse when p contains -// the respective key for flag.Flag.Name. -// -// It's use is recommended with command line arguments as in: -// -// flag.Parse() -// p.MustFlag(flag.CommandLine) -func (p *Properties) MustFlag(dst *flag.FlagSet) { - m := make(map[string]*flag.Flag) - dst.VisitAll(func(f *flag.Flag) { - m[f.Name] = f - }) - dst.Visit(func(f *flag.Flag) { - delete(m, f.Name) // overridden - }) - - for name, f := range m { - v, ok := p.Get(name) - if !ok { - continue - } - - if err := f.Value.Set(v); err != nil { - ErrorHandler(err) - } - } -} diff --git a/vendor/github.com/magiconair/properties/lex.go b/vendor/github.com/magiconair/properties/lex.go deleted file mode 100644 index 3d15a1f6e..000000000 --- a/vendor/github.com/magiconair/properties/lex.go +++ /dev/null @@ -1,395 +0,0 @@ -// Copyright 2013-2022 Frank Schroeder. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. -// -// Parts of the lexer are from the template/text/parser package -// For these parts the following applies: -// -// Copyright 2011 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file of the go 1.2 -// distribution. - -package properties - -import ( - "fmt" - "strconv" - "strings" - "unicode/utf8" -) - -// item represents a token or text string returned from the scanner. -type item struct { - typ itemType // The type of this item. - pos int // The starting position, in bytes, of this item in the input string. - val string // The value of this item. -} - -func (i item) String() string { - switch { - case i.typ == itemEOF: - return "EOF" - case i.typ == itemError: - return i.val - case len(i.val) > 10: - return fmt.Sprintf("%.10q...", i.val) - } - return fmt.Sprintf("%q", i.val) -} - -// itemType identifies the type of lex items. -type itemType int - -const ( - itemError itemType = iota // error occurred; value is text of error - itemEOF - itemKey // a key - itemValue // a value - itemComment // a comment -) - -// defines a constant for EOF -const eof = -1 - -// permitted whitespace characters space, FF and TAB -const whitespace = " \f\t" - -// stateFn represents the state of the scanner as a function that returns the next state. -type stateFn func(*lexer) stateFn - -// lexer holds the state of the scanner. -type lexer struct { - input string // the string being scanned - state stateFn // the next lexing function to enter - pos int // current position in the input - start int // start position of this item - width int // width of last rune read from input - lastPos int // position of most recent item returned by nextItem - runes []rune // scanned runes for this item - items chan item // channel of scanned items -} - -// next returns the next rune in the input. -func (l *lexer) next() rune { - if l.pos >= len(l.input) { - l.width = 0 - return eof - } - r, w := utf8.DecodeRuneInString(l.input[l.pos:]) - l.width = w - l.pos += l.width - return r -} - -// peek returns but does not consume the next rune in the input. -func (l *lexer) peek() rune { - r := l.next() - l.backup() - return r -} - -// backup steps back one rune. Can only be called once per call of next. -func (l *lexer) backup() { - l.pos -= l.width -} - -// emit passes an item back to the client. -func (l *lexer) emit(t itemType) { - i := item{t, l.start, string(l.runes)} - l.items <- i - l.start = l.pos - l.runes = l.runes[:0] -} - -// ignore skips over the pending input before this point. -func (l *lexer) ignore() { - l.start = l.pos -} - -// appends the rune to the current value -func (l *lexer) appendRune(r rune) { - l.runes = append(l.runes, r) -} - -// accept consumes the next rune if it's from the valid set. -func (l *lexer) accept(valid string) bool { - if strings.ContainsRune(valid, l.next()) { - return true - } - l.backup() - return false -} - -// acceptRun consumes a run of runes from the valid set. -func (l *lexer) acceptRun(valid string) { - for strings.ContainsRune(valid, l.next()) { - } - l.backup() -} - -// lineNumber reports which line we're on, based on the position of -// the previous item returned by nextItem. Doing it this way -// means we don't have to worry about peek double counting. -func (l *lexer) lineNumber() int { - return 1 + strings.Count(l.input[:l.lastPos], "\n") -} - -// errorf returns an error token and terminates the scan by passing -// back a nil pointer that will be the next state, terminating l.nextItem. -func (l *lexer) errorf(format string, args ...interface{}) stateFn { - l.items <- item{itemError, l.start, fmt.Sprintf(format, args...)} - return nil -} - -// nextItem returns the next item from the input. -func (l *lexer) nextItem() item { - i := <-l.items - l.lastPos = i.pos - return i -} - -// lex creates a new scanner for the input string. -func lex(input string) *lexer { - l := &lexer{ - input: input, - items: make(chan item), - runes: make([]rune, 0, 32), - } - go l.run() - return l -} - -// run runs the state machine for the lexer. -func (l *lexer) run() { - for l.state = lexBeforeKey(l); l.state != nil; { - l.state = l.state(l) - } -} - -// state functions - -// lexBeforeKey scans until a key begins. -func lexBeforeKey(l *lexer) stateFn { - switch r := l.next(); { - case isEOF(r): - l.emit(itemEOF) - return nil - - case isEOL(r): - l.ignore() - return lexBeforeKey - - case isComment(r): - return lexComment - - case isWhitespace(r): - l.ignore() - return lexBeforeKey - - default: - l.backup() - return lexKey - } -} - -// lexComment scans a comment line. The comment character has already been scanned. -func lexComment(l *lexer) stateFn { - l.acceptRun(whitespace) - l.ignore() - for { - switch r := l.next(); { - case isEOF(r): - l.ignore() - l.emit(itemEOF) - return nil - case isEOL(r): - l.emit(itemComment) - return lexBeforeKey - default: - l.appendRune(r) - } - } -} - -// lexKey scans the key up to a delimiter -func lexKey(l *lexer) stateFn { - var r rune - -Loop: - for { - switch r = l.next(); { - - case isEscape(r): - err := l.scanEscapeSequence() - if err != nil { - return l.errorf(err.Error()) - } - - case isEndOfKey(r): - l.backup() - break Loop - - case isEOF(r): - break Loop - - default: - l.appendRune(r) - } - } - - if len(l.runes) > 0 { - l.emit(itemKey) - } - - if isEOF(r) { - l.emit(itemEOF) - return nil - } - - return lexBeforeValue -} - -// lexBeforeValue scans the delimiter between key and value. -// Leading and trailing whitespace is ignored. -// We expect to be just after the key. -func lexBeforeValue(l *lexer) stateFn { - l.acceptRun(whitespace) - l.accept(":=") - l.acceptRun(whitespace) - l.ignore() - return lexValue -} - -// lexValue scans text until the end of the line. We expect to be just after the delimiter. -func lexValue(l *lexer) stateFn { - for { - switch r := l.next(); { - case isEscape(r): - if isEOL(l.peek()) { - l.next() - l.acceptRun(whitespace) - } else { - err := l.scanEscapeSequence() - if err != nil { - return l.errorf(err.Error()) - } - } - - case isEOL(r): - l.emit(itemValue) - l.ignore() - return lexBeforeKey - - case isEOF(r): - l.emit(itemValue) - l.emit(itemEOF) - return nil - - default: - l.appendRune(r) - } - } -} - -// scanEscapeSequence scans either one of the escaped characters -// or a unicode literal. We expect to be after the escape character. -func (l *lexer) scanEscapeSequence() error { - switch r := l.next(); { - - case isEscapedCharacter(r): - l.appendRune(decodeEscapedCharacter(r)) - return nil - - case atUnicodeLiteral(r): - return l.scanUnicodeLiteral() - - case isEOF(r): - return fmt.Errorf("premature EOF") - - // silently drop the escape character and append the rune as is - default: - l.appendRune(r) - return nil - } -} - -// scans a unicode literal in the form \uXXXX. We expect to be after the \u. -func (l *lexer) scanUnicodeLiteral() error { - // scan the digits - d := make([]rune, 4) - for i := 0; i < 4; i++ { - d[i] = l.next() - if d[i] == eof || !strings.ContainsRune("0123456789abcdefABCDEF", d[i]) { - return fmt.Errorf("invalid unicode literal") - } - } - - // decode the digits into a rune - r, err := strconv.ParseInt(string(d), 16, 0) - if err != nil { - return err - } - - l.appendRune(rune(r)) - return nil -} - -// decodeEscapedCharacter returns the unescaped rune. We expect to be after the escape character. -func decodeEscapedCharacter(r rune) rune { - switch r { - case 'f': - return '\f' - case 'n': - return '\n' - case 'r': - return '\r' - case 't': - return '\t' - default: - return r - } -} - -// atUnicodeLiteral reports whether we are at a unicode literal. -// The escape character has already been consumed. -func atUnicodeLiteral(r rune) bool { - return r == 'u' -} - -// isComment reports whether we are at the start of a comment. -func isComment(r rune) bool { - return r == '#' || r == '!' -} - -// isEndOfKey reports whether the rune terminates the current key. -func isEndOfKey(r rune) bool { - return strings.ContainsRune(" \f\t\r\n:=", r) -} - -// isEOF reports whether we are at EOF. -func isEOF(r rune) bool { - return r == eof -} - -// isEOL reports whether we are at a new line character. -func isEOL(r rune) bool { - return r == '\n' || r == '\r' -} - -// isEscape reports whether the rune is the escape character which -// prefixes unicode literals and other escaped characters. -func isEscape(r rune) bool { - return r == '\\' -} - -// isEscapedCharacter reports whether we are at one of the characters that need escaping. -// The escape character has already been consumed. -func isEscapedCharacter(r rune) bool { - return strings.ContainsRune(" :=fnrt", r) -} - -// isWhitespace reports whether the rune is a whitespace character. -func isWhitespace(r rune) bool { - return strings.ContainsRune(whitespace, r) -} diff --git a/vendor/github.com/magiconair/properties/load.go b/vendor/github.com/magiconair/properties/load.go deleted file mode 100644 index 635368dc8..000000000 --- a/vendor/github.com/magiconair/properties/load.go +++ /dev/null @@ -1,293 +0,0 @@ -// Copyright 2013-2022 Frank Schroeder. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package properties - -import ( - "fmt" - "io/ioutil" - "net/http" - "os" - "strings" -) - -// Encoding specifies encoding of the input data. -type Encoding uint - -const ( - // utf8Default is a private placeholder for the zero value of Encoding to - // ensure that it has the correct meaning. UTF8 is the default encoding but - // was assigned a non-zero value which cannot be changed without breaking - // existing code. Clients should continue to use the public constants. - utf8Default Encoding = iota - - // UTF8 interprets the input data as UTF-8. - UTF8 - - // ISO_8859_1 interprets the input data as ISO-8859-1. - ISO_8859_1 -) - -type Loader struct { - // Encoding determines how the data from files and byte buffers - // is interpreted. For URLs the Content-Type header is used - // to determine the encoding of the data. - Encoding Encoding - - // DisableExpansion configures the property expansion of the - // returned property object. When set to true, the property values - // will not be expanded and the Property object will not be checked - // for invalid expansion expressions. - DisableExpansion bool - - // IgnoreMissing configures whether missing files or URLs which return - // 404 are reported as errors. When set to true, missing files and 404 - // status codes are not reported as errors. - IgnoreMissing bool -} - -// Load reads a buffer into a Properties struct. -func (l *Loader) LoadBytes(buf []byte) (*Properties, error) { - return l.loadBytes(buf, l.Encoding) -} - -// LoadAll reads the content of multiple URLs or files in the given order into -// a Properties struct. If IgnoreMissing is true then a 404 status code or -// missing file will not be reported as error. Encoding sets the encoding for -// files. For the URLs see LoadURL for the Content-Type header and the -// encoding. -func (l *Loader) LoadAll(names []string) (*Properties, error) { - all := NewProperties() - for _, name := range names { - n, err := expandName(name) - if err != nil { - return nil, err - } - - var p *Properties - switch { - case strings.HasPrefix(n, "http://"): - p, err = l.LoadURL(n) - case strings.HasPrefix(n, "https://"): - p, err = l.LoadURL(n) - default: - p, err = l.LoadFile(n) - } - if err != nil { - return nil, err - } - all.Merge(p) - } - - all.DisableExpansion = l.DisableExpansion - if all.DisableExpansion { - return all, nil - } - return all, all.check() -} - -// LoadFile reads a file into a Properties struct. -// If IgnoreMissing is true then a missing file will not be -// reported as error. -func (l *Loader) LoadFile(filename string) (*Properties, error) { - data, err := ioutil.ReadFile(filename) - if err != nil { - if l.IgnoreMissing && os.IsNotExist(err) { - LogPrintf("properties: %s not found. skipping", filename) - return NewProperties(), nil - } - return nil, err - } - return l.loadBytes(data, l.Encoding) -} - -// LoadURL reads the content of the URL into a Properties struct. -// -// The encoding is determined via the Content-Type header which -// should be set to 'text/plain'. If the 'charset' parameter is -// missing, 'iso-8859-1' or 'latin1' the encoding is set to -// ISO-8859-1. If the 'charset' parameter is set to 'utf-8' the -// encoding is set to UTF-8. A missing content type header is -// interpreted as 'text/plain; charset=utf-8'. -func (l *Loader) LoadURL(url string) (*Properties, error) { - resp, err := http.Get(url) - if err != nil { - return nil, fmt.Errorf("properties: error fetching %q. %s", url, err) - } - defer resp.Body.Close() - - if resp.StatusCode == 404 && l.IgnoreMissing { - LogPrintf("properties: %s returned %d. skipping", url, resp.StatusCode) - return NewProperties(), nil - } - - if resp.StatusCode != 200 { - return nil, fmt.Errorf("properties: %s returned %d", url, resp.StatusCode) - } - - body, err := ioutil.ReadAll(resp.Body) - if err != nil { - return nil, fmt.Errorf("properties: %s error reading response. %s", url, err) - } - - ct := resp.Header.Get("Content-Type") - ct = strings.Join(strings.Fields(ct), "") - var enc Encoding - switch strings.ToLower(ct) { - case "text/plain", "text/plain;charset=iso-8859-1", "text/plain;charset=latin1": - enc = ISO_8859_1 - case "", "text/plain;charset=utf-8": - enc = UTF8 - default: - return nil, fmt.Errorf("properties: invalid content type %s", ct) - } - - return l.loadBytes(body, enc) -} - -func (l *Loader) loadBytes(buf []byte, enc Encoding) (*Properties, error) { - p, err := parse(convert(buf, enc)) - if err != nil { - return nil, err - } - p.DisableExpansion = l.DisableExpansion - if p.DisableExpansion { - return p, nil - } - return p, p.check() -} - -// Load reads a buffer into a Properties struct. -func Load(buf []byte, enc Encoding) (*Properties, error) { - l := &Loader{Encoding: enc} - return l.LoadBytes(buf) -} - -// LoadString reads an UTF8 string into a properties struct. -func LoadString(s string) (*Properties, error) { - l := &Loader{Encoding: UTF8} - return l.LoadBytes([]byte(s)) -} - -// LoadMap creates a new Properties struct from a string map. -func LoadMap(m map[string]string) *Properties { - p := NewProperties() - for k, v := range m { - p.Set(k, v) - } - return p -} - -// LoadFile reads a file into a Properties struct. -func LoadFile(filename string, enc Encoding) (*Properties, error) { - l := &Loader{Encoding: enc} - return l.LoadAll([]string{filename}) -} - -// LoadFiles reads multiple files in the given order into -// a Properties struct. If 'ignoreMissing' is true then -// non-existent files will not be reported as error. -func LoadFiles(filenames []string, enc Encoding, ignoreMissing bool) (*Properties, error) { - l := &Loader{Encoding: enc, IgnoreMissing: ignoreMissing} - return l.LoadAll(filenames) -} - -// LoadURL reads the content of the URL into a Properties struct. -// See Loader#LoadURL for details. -func LoadURL(url string) (*Properties, error) { - l := &Loader{Encoding: UTF8} - return l.LoadAll([]string{url}) -} - -// LoadURLs reads the content of multiple URLs in the given order into a -// Properties struct. If IgnoreMissing is true then a 404 status code will -// not be reported as error. See Loader#LoadURL for the Content-Type header -// and the encoding. -func LoadURLs(urls []string, ignoreMissing bool) (*Properties, error) { - l := &Loader{Encoding: UTF8, IgnoreMissing: ignoreMissing} - return l.LoadAll(urls) -} - -// LoadAll reads the content of multiple URLs or files in the given order into a -// Properties struct. If 'ignoreMissing' is true then a 404 status code or missing file will -// not be reported as error. Encoding sets the encoding for files. For the URLs please see -// LoadURL for the Content-Type header and the encoding. -func LoadAll(names []string, enc Encoding, ignoreMissing bool) (*Properties, error) { - l := &Loader{Encoding: enc, IgnoreMissing: ignoreMissing} - return l.LoadAll(names) -} - -// MustLoadString reads an UTF8 string into a Properties struct and -// panics on error. -func MustLoadString(s string) *Properties { - return must(LoadString(s)) -} - -// MustLoadFile reads a file into a Properties struct and -// panics on error. -func MustLoadFile(filename string, enc Encoding) *Properties { - return must(LoadFile(filename, enc)) -} - -// MustLoadFiles reads multiple files in the given order into -// a Properties struct and panics on error. If 'ignoreMissing' -// is true then non-existent files will not be reported as error. -func MustLoadFiles(filenames []string, enc Encoding, ignoreMissing bool) *Properties { - return must(LoadFiles(filenames, enc, ignoreMissing)) -} - -// MustLoadURL reads the content of a URL into a Properties struct and -// panics on error. -func MustLoadURL(url string) *Properties { - return must(LoadURL(url)) -} - -// MustLoadURLs reads the content of multiple URLs in the given order into a -// Properties struct and panics on error. If 'ignoreMissing' is true then a 404 -// status code will not be reported as error. -func MustLoadURLs(urls []string, ignoreMissing bool) *Properties { - return must(LoadURLs(urls, ignoreMissing)) -} - -// MustLoadAll reads the content of multiple URLs or files in the given order into a -// Properties struct. If 'ignoreMissing' is true then a 404 status code or missing file will -// not be reported as error. Encoding sets the encoding for files. For the URLs please see -// LoadURL for the Content-Type header and the encoding. It panics on error. -func MustLoadAll(names []string, enc Encoding, ignoreMissing bool) *Properties { - return must(LoadAll(names, enc, ignoreMissing)) -} - -func must(p *Properties, err error) *Properties { - if err != nil { - ErrorHandler(err) - } - return p -} - -// expandName expands ${ENV_VAR} expressions in a name. -// If the environment variable does not exist then it will be replaced -// with an empty string. Malformed expressions like "${ENV_VAR" will -// be reported as error. -func expandName(name string) (string, error) { - return expand(name, []string{}, "${", "}", make(map[string]string)) -} - -// Interprets a byte buffer either as an ISO-8859-1 or UTF-8 encoded string. -// For ISO-8859-1 we can convert each byte straight into a rune since the -// first 256 unicode code points cover ISO-8859-1. -func convert(buf []byte, enc Encoding) string { - switch enc { - case utf8Default, UTF8: - return string(buf) - case ISO_8859_1: - runes := make([]rune, len(buf)) - for i, b := range buf { - runes[i] = rune(b) - } - return string(runes) - default: - ErrorHandler(fmt.Errorf("unsupported encoding %v", enc)) - } - panic("ErrorHandler should exit") -} diff --git a/vendor/github.com/magiconair/properties/parser.go b/vendor/github.com/magiconair/properties/parser.go deleted file mode 100644 index fccfd39f6..000000000 --- a/vendor/github.com/magiconair/properties/parser.go +++ /dev/null @@ -1,86 +0,0 @@ -// Copyright 2013-2022 Frank Schroeder. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package properties - -import ( - "fmt" - "runtime" -) - -type parser struct { - lex *lexer -} - -func parse(input string) (properties *Properties, err error) { - p := &parser{lex: lex(input)} - defer p.recover(&err) - - properties = NewProperties() - key := "" - comments := []string{} - - for { - token := p.expectOneOf(itemComment, itemKey, itemEOF) - switch token.typ { - case itemEOF: - goto done - case itemComment: - comments = append(comments, token.val) - continue - case itemKey: - key = token.val - if _, ok := properties.m[key]; !ok { - properties.k = append(properties.k, key) - } - } - - token = p.expectOneOf(itemValue, itemEOF) - if len(comments) > 0 { - properties.c[key] = comments - comments = []string{} - } - switch token.typ { - case itemEOF: - properties.m[key] = "" - goto done - case itemValue: - properties.m[key] = token.val - } - } - -done: - return properties, nil -} - -func (p *parser) errorf(format string, args ...interface{}) { - format = fmt.Sprintf("properties: Line %d: %s", p.lex.lineNumber(), format) - panic(fmt.Errorf(format, args...)) -} - -func (p *parser) expectOneOf(expected ...itemType) (token item) { - token = p.lex.nextItem() - for _, v := range expected { - if token.typ == v { - return token - } - } - p.unexpected(token) - panic("unexpected token") -} - -func (p *parser) unexpected(token item) { - p.errorf(token.String()) -} - -// recover is the handler that turns panics into returns from the top level of Parse. -func (p *parser) recover(errp *error) { - e := recover() - if e != nil { - if _, ok := e.(runtime.Error); ok { - panic(e) - } - *errp = e.(error) - } -} diff --git a/vendor/github.com/magiconair/properties/properties.go b/vendor/github.com/magiconair/properties/properties.go deleted file mode 100644 index fb2f7b404..000000000 --- a/vendor/github.com/magiconair/properties/properties.go +++ /dev/null @@ -1,848 +0,0 @@ -// Copyright 2013-2022 Frank Schroeder. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package properties - -// BUG(frank): Set() does not check for invalid unicode literals since this is currently handled by the lexer. -// BUG(frank): Write() does not allow to configure the newline character. Therefore, on Windows LF is used. - -import ( - "bytes" - "fmt" - "io" - "log" - "os" - "regexp" - "sort" - "strconv" - "strings" - "time" - "unicode/utf8" -) - -const maxExpansionDepth = 64 - -// ErrorHandlerFunc defines the type of function which handles failures -// of the MustXXX() functions. An error handler function must exit -// the application after handling the error. -type ErrorHandlerFunc func(error) - -// ErrorHandler is the function which handles failures of the MustXXX() -// functions. The default is LogFatalHandler. -var ErrorHandler ErrorHandlerFunc = LogFatalHandler - -// LogHandlerFunc defines the function prototype for logging errors. -type LogHandlerFunc func(fmt string, args ...interface{}) - -// LogPrintf defines a log handler which uses log.Printf. -var LogPrintf LogHandlerFunc = log.Printf - -// LogFatalHandler handles the error by logging a fatal error and exiting. -func LogFatalHandler(err error) { - log.Fatal(err) -} - -// PanicHandler handles the error by panicking. -func PanicHandler(err error) { - panic(err) -} - -// ----------------------------------------------------------------------------- - -// A Properties contains the key/value pairs from the properties input. -// All values are stored in unexpanded form and are expanded at runtime -type Properties struct { - // Pre-/Postfix for property expansion. - Prefix string - Postfix string - - // DisableExpansion controls the expansion of properties on Get() - // and the check for circular references on Set(). When set to - // true Properties behaves like a simple key/value store and does - // not check for circular references on Get() or on Set(). - DisableExpansion bool - - // Stores the key/value pairs - m map[string]string - - // Stores the comments per key. - c map[string][]string - - // Stores the keys in order of appearance. - k []string - - // WriteSeparator specifies the separator of key and value while writing the properties. - WriteSeparator string -} - -// NewProperties creates a new Properties struct with the default -// configuration for "${key}" expressions. -func NewProperties() *Properties { - return &Properties{ - Prefix: "${", - Postfix: "}", - m: map[string]string{}, - c: map[string][]string{}, - k: []string{}, - } -} - -// Load reads a buffer into the given Properties struct. -func (p *Properties) Load(buf []byte, enc Encoding) error { - l := &Loader{Encoding: enc, DisableExpansion: p.DisableExpansion} - newProperties, err := l.LoadBytes(buf) - if err != nil { - return err - } - p.Merge(newProperties) - return nil -} - -// Get returns the expanded value for the given key if exists. -// Otherwise, ok is false. -func (p *Properties) Get(key string) (value string, ok bool) { - v, ok := p.m[key] - if p.DisableExpansion { - return v, ok - } - if !ok { - return "", false - } - - expanded, err := p.expand(key, v) - - // we guarantee that the expanded value is free of - // circular references and malformed expressions - // so we panic if we still get an error here. - if err != nil { - ErrorHandler(err) - } - - return expanded, true -} - -// MustGet returns the expanded value for the given key if exists. -// Otherwise, it panics. -func (p *Properties) MustGet(key string) string { - if v, ok := p.Get(key); ok { - return v - } - ErrorHandler(invalidKeyError(key)) - panic("ErrorHandler should exit") -} - -// ---------------------------------------------------------------------------- - -// ClearComments removes the comments for all keys. -func (p *Properties) ClearComments() { - p.c = map[string][]string{} -} - -// ---------------------------------------------------------------------------- - -// GetComment returns the last comment before the given key or an empty string. -func (p *Properties) GetComment(key string) string { - comments, ok := p.c[key] - if !ok || len(comments) == 0 { - return "" - } - return comments[len(comments)-1] -} - -// ---------------------------------------------------------------------------- - -// GetComments returns all comments that appeared before the given key or nil. -func (p *Properties) GetComments(key string) []string { - if comments, ok := p.c[key]; ok { - return comments - } - return nil -} - -// ---------------------------------------------------------------------------- - -// SetComment sets the comment for the key. -func (p *Properties) SetComment(key, comment string) { - p.c[key] = []string{comment} -} - -// ---------------------------------------------------------------------------- - -// SetComments sets the comments for the key. If the comments are nil then -// all comments for this key are deleted. -func (p *Properties) SetComments(key string, comments []string) { - if comments == nil { - delete(p.c, key) - return - } - p.c[key] = comments -} - -// ---------------------------------------------------------------------------- - -// GetBool checks if the expanded value is one of '1', 'yes', -// 'true' or 'on' if the key exists. The comparison is case-insensitive. -// If the key does not exist the default value is returned. -func (p *Properties) GetBool(key string, def bool) bool { - v, err := p.getBool(key) - if err != nil { - return def - } - return v -} - -// MustGetBool checks if the expanded value is one of '1', 'yes', -// 'true' or 'on' if the key exists. The comparison is case-insensitive. -// If the key does not exist the function panics. -func (p *Properties) MustGetBool(key string) bool { - v, err := p.getBool(key) - if err != nil { - ErrorHandler(err) - } - return v -} - -func (p *Properties) getBool(key string) (value bool, err error) { - if v, ok := p.Get(key); ok { - return boolVal(v), nil - } - return false, invalidKeyError(key) -} - -func boolVal(v string) bool { - v = strings.ToLower(v) - return v == "1" || v == "true" || v == "yes" || v == "on" -} - -// ---------------------------------------------------------------------------- - -// GetDuration parses the expanded value as an time.Duration (in ns) if the -// key exists. If key does not exist or the value cannot be parsed the default -// value is returned. In almost all cases you want to use GetParsedDuration(). -func (p *Properties) GetDuration(key string, def time.Duration) time.Duration { - v, err := p.getInt64(key) - if err != nil { - return def - } - return time.Duration(v) -} - -// MustGetDuration parses the expanded value as an time.Duration (in ns) if -// the key exists. If key does not exist or the value cannot be parsed the -// function panics. In almost all cases you want to use MustGetParsedDuration(). -func (p *Properties) MustGetDuration(key string) time.Duration { - v, err := p.getInt64(key) - if err != nil { - ErrorHandler(err) - } - return time.Duration(v) -} - -// ---------------------------------------------------------------------------- - -// GetParsedDuration parses the expanded value with time.ParseDuration() if the key exists. -// If key does not exist or the value cannot be parsed the default -// value is returned. -func (p *Properties) GetParsedDuration(key string, def time.Duration) time.Duration { - s, ok := p.Get(key) - if !ok { - return def - } - v, err := time.ParseDuration(s) - if err != nil { - return def - } - return v -} - -// MustGetParsedDuration parses the expanded value with time.ParseDuration() if the key exists. -// If key does not exist or the value cannot be parsed the function panics. -func (p *Properties) MustGetParsedDuration(key string) time.Duration { - s, ok := p.Get(key) - if !ok { - ErrorHandler(invalidKeyError(key)) - } - v, err := time.ParseDuration(s) - if err != nil { - ErrorHandler(err) - } - return v -} - -// ---------------------------------------------------------------------------- - -// GetFloat64 parses the expanded value as a float64 if the key exists. -// If key does not exist or the value cannot be parsed the default -// value is returned. -func (p *Properties) GetFloat64(key string, def float64) float64 { - v, err := p.getFloat64(key) - if err != nil { - return def - } - return v -} - -// MustGetFloat64 parses the expanded value as a float64 if the key exists. -// If key does not exist or the value cannot be parsed the function panics. -func (p *Properties) MustGetFloat64(key string) float64 { - v, err := p.getFloat64(key) - if err != nil { - ErrorHandler(err) - } - return v -} - -func (p *Properties) getFloat64(key string) (value float64, err error) { - if v, ok := p.Get(key); ok { - value, err = strconv.ParseFloat(v, 64) - if err != nil { - return 0, err - } - return value, nil - } - return 0, invalidKeyError(key) -} - -// ---------------------------------------------------------------------------- - -// GetInt parses the expanded value as an int if the key exists. -// If key does not exist or the value cannot be parsed the default -// value is returned. If the value does not fit into an int the -// function panics with an out of range error. -func (p *Properties) GetInt(key string, def int) int { - v, err := p.getInt64(key) - if err != nil { - return def - } - return intRangeCheck(key, v) -} - -// MustGetInt parses the expanded value as an int if the key exists. -// If key does not exist or the value cannot be parsed the function panics. -// If the value does not fit into an int the function panics with -// an out of range error. -func (p *Properties) MustGetInt(key string) int { - v, err := p.getInt64(key) - if err != nil { - ErrorHandler(err) - } - return intRangeCheck(key, v) -} - -// ---------------------------------------------------------------------------- - -// GetInt64 parses the expanded value as an int64 if the key exists. -// If key does not exist or the value cannot be parsed the default -// value is returned. -func (p *Properties) GetInt64(key string, def int64) int64 { - v, err := p.getInt64(key) - if err != nil { - return def - } - return v -} - -// MustGetInt64 parses the expanded value as an int if the key exists. -// If key does not exist or the value cannot be parsed the function panics. -func (p *Properties) MustGetInt64(key string) int64 { - v, err := p.getInt64(key) - if err != nil { - ErrorHandler(err) - } - return v -} - -func (p *Properties) getInt64(key string) (value int64, err error) { - if v, ok := p.Get(key); ok { - value, err = strconv.ParseInt(v, 10, 64) - if err != nil { - return 0, err - } - return value, nil - } - return 0, invalidKeyError(key) -} - -// ---------------------------------------------------------------------------- - -// GetUint parses the expanded value as an uint if the key exists. -// If key does not exist or the value cannot be parsed the default -// value is returned. If the value does not fit into an int the -// function panics with an out of range error. -func (p *Properties) GetUint(key string, def uint) uint { - v, err := p.getUint64(key) - if err != nil { - return def - } - return uintRangeCheck(key, v) -} - -// MustGetUint parses the expanded value as an int if the key exists. -// If key does not exist or the value cannot be parsed the function panics. -// If the value does not fit into an int the function panics with -// an out of range error. -func (p *Properties) MustGetUint(key string) uint { - v, err := p.getUint64(key) - if err != nil { - ErrorHandler(err) - } - return uintRangeCheck(key, v) -} - -// ---------------------------------------------------------------------------- - -// GetUint64 parses the expanded value as an uint64 if the key exists. -// If key does not exist or the value cannot be parsed the default -// value is returned. -func (p *Properties) GetUint64(key string, def uint64) uint64 { - v, err := p.getUint64(key) - if err != nil { - return def - } - return v -} - -// MustGetUint64 parses the expanded value as an int if the key exists. -// If key does not exist or the value cannot be parsed the function panics. -func (p *Properties) MustGetUint64(key string) uint64 { - v, err := p.getUint64(key) - if err != nil { - ErrorHandler(err) - } - return v -} - -func (p *Properties) getUint64(key string) (value uint64, err error) { - if v, ok := p.Get(key); ok { - value, err = strconv.ParseUint(v, 10, 64) - if err != nil { - return 0, err - } - return value, nil - } - return 0, invalidKeyError(key) -} - -// ---------------------------------------------------------------------------- - -// GetString returns the expanded value for the given key if exists or -// the default value otherwise. -func (p *Properties) GetString(key, def string) string { - if v, ok := p.Get(key); ok { - return v - } - return def -} - -// MustGetString returns the expanded value for the given key if exists or -// panics otherwise. -func (p *Properties) MustGetString(key string) string { - if v, ok := p.Get(key); ok { - return v - } - ErrorHandler(invalidKeyError(key)) - panic("ErrorHandler should exit") -} - -// ---------------------------------------------------------------------------- - -// Filter returns a new properties object which contains all properties -// for which the key matches the pattern. -func (p *Properties) Filter(pattern string) (*Properties, error) { - re, err := regexp.Compile(pattern) - if err != nil { - return nil, err - } - - return p.FilterRegexp(re), nil -} - -// FilterRegexp returns a new properties object which contains all properties -// for which the key matches the regular expression. -func (p *Properties) FilterRegexp(re *regexp.Regexp) *Properties { - pp := NewProperties() - for _, k := range p.k { - if re.MatchString(k) { - // TODO(fs): we are ignoring the error which flags a circular reference. - // TODO(fs): since we are just copying a subset of keys this cannot happen (fingers crossed) - pp.Set(k, p.m[k]) - } - } - return pp -} - -// FilterPrefix returns a new properties object with a subset of all keys -// with the given prefix. -func (p *Properties) FilterPrefix(prefix string) *Properties { - pp := NewProperties() - for _, k := range p.k { - if strings.HasPrefix(k, prefix) { - // TODO(fs): we are ignoring the error which flags a circular reference. - // TODO(fs): since we are just copying a subset of keys this cannot happen (fingers crossed) - pp.Set(k, p.m[k]) - } - } - return pp -} - -// FilterStripPrefix returns a new properties object with a subset of all keys -// with the given prefix and the prefix removed from the keys. -func (p *Properties) FilterStripPrefix(prefix string) *Properties { - pp := NewProperties() - n := len(prefix) - for _, k := range p.k { - if len(k) > len(prefix) && strings.HasPrefix(k, prefix) { - // TODO(fs): we are ignoring the error which flags a circular reference. - // TODO(fs): since we are modifying keys I am not entirely sure whether we can create a circular reference - // TODO(fs): this function should probably return an error but the signature is fixed - pp.Set(k[n:], p.m[k]) - } - } - return pp -} - -// Len returns the number of keys. -func (p *Properties) Len() int { - return len(p.m) -} - -// Keys returns all keys in the same order as in the input. -func (p *Properties) Keys() []string { - keys := make([]string, len(p.k)) - copy(keys, p.k) - return keys -} - -// Set sets the property key to the corresponding value. -// If a value for key existed before then ok is true and prev -// contains the previous value. If the value contains a -// circular reference or a malformed expression then -// an error is returned. -// An empty key is silently ignored. -func (p *Properties) Set(key, value string) (prev string, ok bool, err error) { - if key == "" { - return "", false, nil - } - - // if expansion is disabled we allow circular references - if p.DisableExpansion { - prev, ok = p.Get(key) - p.m[key] = value - if !ok { - p.k = append(p.k, key) - } - return prev, ok, nil - } - - // to check for a circular reference we temporarily need - // to set the new value. If there is an error then revert - // to the previous state. Only if all tests are successful - // then we add the key to the p.k list. - prev, ok = p.Get(key) - p.m[key] = value - - // now check for a circular reference - _, err = p.expand(key, value) - if err != nil { - - // revert to the previous state - if ok { - p.m[key] = prev - } else { - delete(p.m, key) - } - - return "", false, err - } - - if !ok { - p.k = append(p.k, key) - } - - return prev, ok, nil -} - -// SetValue sets property key to the default string value -// as defined by fmt.Sprintf("%v"). -func (p *Properties) SetValue(key string, value interface{}) error { - _, _, err := p.Set(key, fmt.Sprintf("%v", value)) - return err -} - -// MustSet sets the property key to the corresponding value. -// If a value for key existed before then ok is true and prev -// contains the previous value. An empty key is silently ignored. -func (p *Properties) MustSet(key, value string) (prev string, ok bool) { - prev, ok, err := p.Set(key, value) - if err != nil { - ErrorHandler(err) - } - return prev, ok -} - -// String returns a string of all expanded 'key = value' pairs. -func (p *Properties) String() string { - var s string - for _, key := range p.k { - value, _ := p.Get(key) - s = fmt.Sprintf("%s%s = %s\n", s, key, value) - } - return s -} - -// Sort sorts the properties keys in alphabetical order. -// This is helpfully before writing the properties. -func (p *Properties) Sort() { - sort.Strings(p.k) -} - -// Write writes all unexpanded 'key = value' pairs to the given writer. -// Write returns the number of bytes written and any write error encountered. -func (p *Properties) Write(w io.Writer, enc Encoding) (n int, err error) { - return p.WriteComment(w, "", enc) -} - -// WriteComment writes all unexpanced 'key = value' pairs to the given writer. -// If prefix is not empty then comments are written with a blank line and the -// given prefix. The prefix should be either "# " or "! " to be compatible with -// the properties file format. Otherwise, the properties parser will not be -// able to read the file back in. It returns the number of bytes written and -// any write error encountered. -func (p *Properties) WriteComment(w io.Writer, prefix string, enc Encoding) (n int, err error) { - var x int - - for _, key := range p.k { - value := p.m[key] - - if prefix != "" { - if comments, ok := p.c[key]; ok { - // don't print comments if they are all empty - allEmpty := true - for _, c := range comments { - if c != "" { - allEmpty = false - break - } - } - - if !allEmpty { - // add a blank line between entries but not at the top - if len(comments) > 0 && n > 0 { - x, err = fmt.Fprintln(w) - if err != nil { - return - } - n += x - } - - for _, c := range comments { - x, err = fmt.Fprintf(w, "%s%s\n", prefix, c) - if err != nil { - return - } - n += x - } - } - } - } - sep := " = " - if p.WriteSeparator != "" { - sep = p.WriteSeparator - } - x, err = fmt.Fprintf(w, "%s%s%s\n", encode(key, " :", enc), sep, encode(value, "", enc)) - if err != nil { - return - } - n += x - } - return -} - -// Map returns a copy of the properties as a map. -func (p *Properties) Map() map[string]string { - m := make(map[string]string) - for k, v := range p.m { - m[k] = v - } - return m -} - -// FilterFunc returns a copy of the properties which includes the values which passed all filters. -func (p *Properties) FilterFunc(filters ...func(k, v string) bool) *Properties { - pp := NewProperties() -outer: - for k, v := range p.m { - for _, f := range filters { - if !f(k, v) { - continue outer - } - pp.Set(k, v) - } - } - return pp -} - -// ---------------------------------------------------------------------------- - -// Delete removes the key and its comments. -func (p *Properties) Delete(key string) { - delete(p.m, key) - delete(p.c, key) - newKeys := []string{} - for _, k := range p.k { - if k != key { - newKeys = append(newKeys, k) - } - } - p.k = newKeys -} - -// Merge merges properties, comments and keys from other *Properties into p -func (p *Properties) Merge(other *Properties) { - for _, k := range other.k { - if _, ok := p.m[k]; !ok { - p.k = append(p.k, k) - } - } - for k, v := range other.m { - p.m[k] = v - } - for k, v := range other.c { - p.c[k] = v - } -} - -// ---------------------------------------------------------------------------- - -// check expands all values and returns an error if a circular reference or -// a malformed expression was found. -func (p *Properties) check() error { - for key, value := range p.m { - if _, err := p.expand(key, value); err != nil { - return err - } - } - return nil -} - -func (p *Properties) expand(key, input string) (string, error) { - // no pre/postfix -> nothing to expand - if p.Prefix == "" && p.Postfix == "" { - return input, nil - } - - return expand(input, []string{key}, p.Prefix, p.Postfix, p.m) -} - -// expand recursively expands expressions of '(prefix)key(postfix)' to their corresponding values. -// The function keeps track of the keys that were already expanded and stops if it -// detects a circular reference or a malformed expression of the form '(prefix)key'. -func expand(s string, keys []string, prefix, postfix string, values map[string]string) (string, error) { - if len(keys) > maxExpansionDepth { - return "", fmt.Errorf("expansion too deep") - } - - for { - start := strings.Index(s, prefix) - if start == -1 { - return s, nil - } - - keyStart := start + len(prefix) - keyLen := strings.Index(s[keyStart:], postfix) - if keyLen == -1 { - return "", fmt.Errorf("malformed expression") - } - - end := keyStart + keyLen + len(postfix) - 1 - key := s[keyStart : keyStart+keyLen] - - // fmt.Printf("s:%q pp:%q start:%d end:%d keyStart:%d keyLen:%d key:%q\n", s, prefix + "..." + postfix, start, end, keyStart, keyLen, key) - - for _, k := range keys { - if key == k { - var b bytes.Buffer - b.WriteString("circular reference in:\n") - for _, k1 := range keys { - fmt.Fprintf(&b, "%s=%s\n", k1, values[k1]) - } - return "", fmt.Errorf(b.String()) - } - } - - val, ok := values[key] - if !ok { - val = os.Getenv(key) - } - new_val, err := expand(val, append(keys, key), prefix, postfix, values) - if err != nil { - return "", err - } - s = s[:start] + new_val + s[end+1:] - } -} - -// encode encodes a UTF-8 string to ISO-8859-1 and escapes some characters. -func encode(s string, special string, enc Encoding) string { - switch enc { - case UTF8: - return encodeUtf8(s, special) - case ISO_8859_1: - return encodeIso(s, special) - default: - panic(fmt.Sprintf("unsupported encoding %v", enc)) - } -} - -func encodeUtf8(s string, special string) string { - v := "" - for pos := 0; pos < len(s); { - r, w := utf8.DecodeRuneInString(s[pos:]) - pos += w - v += escape(r, special) - } - return v -} - -func encodeIso(s string, special string) string { - var r rune - var w int - var v string - for pos := 0; pos < len(s); { - switch r, w = utf8.DecodeRuneInString(s[pos:]); { - case r < 1<<8: // single byte rune -> escape special chars only - v += escape(r, special) - case r < 1<<16: // two byte rune -> unicode literal - v += fmt.Sprintf("\\u%04x", r) - default: // more than two bytes per rune -> can't encode - v += "?" - } - pos += w - } - return v -} - -func escape(r rune, special string) string { - switch r { - case '\f': - return "\\f" - case '\n': - return "\\n" - case '\r': - return "\\r" - case '\t': - return "\\t" - case '\\': - return "\\\\" - default: - if strings.ContainsRune(special, r) { - return "\\" + string(r) - } - return string(r) - } -} - -func invalidKeyError(key string) error { - return fmt.Errorf("unknown property: %s", key) -} diff --git a/vendor/github.com/magiconair/properties/rangecheck.go b/vendor/github.com/magiconair/properties/rangecheck.go deleted file mode 100644 index dbd60b36e..000000000 --- a/vendor/github.com/magiconair/properties/rangecheck.go +++ /dev/null @@ -1,31 +0,0 @@ -// Copyright 2013-2022 Frank Schroeder. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package properties - -import ( - "fmt" - "math" -) - -// make this a var to overwrite it in a test -var is32Bit = ^uint(0) == math.MaxUint32 - -// intRangeCheck checks if the value fits into the int type and -// panics if it does not. -func intRangeCheck(key string, v int64) int { - if is32Bit && (v < math.MinInt32 || v > math.MaxInt32) { - panic(fmt.Sprintf("Value %d for key %s out of range", v, key)) - } - return int(v) -} - -// uintRangeCheck checks if the value fits into the uint type and -// panics if it does not. -func uintRangeCheck(key string, v uint64) uint { - if is32Bit && v > math.MaxUint32 { - panic(fmt.Sprintf("Value %d for key %s out of range", v, key)) - } - return uint(v) -} diff --git a/vendor/github.com/maratori/testableexamples/LICENSE b/vendor/github.com/maratori/testableexamples/LICENSE deleted file mode 100644 index e8b68be3e..000000000 --- a/vendor/github.com/maratori/testableexamples/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2022 Marat Reymers - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/maratori/testableexamples/pkg/testableexamples/testableexamples.go b/vendor/github.com/maratori/testableexamples/pkg/testableexamples/testableexamples.go deleted file mode 100644 index 26d22c703..000000000 --- a/vendor/github.com/maratori/testableexamples/pkg/testableexamples/testableexamples.go +++ /dev/null @@ -1,34 +0,0 @@ -package testableexamples - -import ( - "go/ast" - "go/doc" - "strings" - - "golang.org/x/tools/go/analysis" -) - -// NewAnalyzer returns Analyzer that checks if examples are testable. -func NewAnalyzer() *analysis.Analyzer { - return &analysis.Analyzer{ - Name: "testableexamples", - Doc: "linter checks if examples are testable (have an expected output)", - Run: func(pass *analysis.Pass) (interface{}, error) { - testFiles := make([]*ast.File, 0, len(pass.Files)) - for _, file := range pass.Files { - fileName := pass.Fset.File(file.Pos()).Name() - if strings.HasSuffix(fileName, "_test.go") { - testFiles = append(testFiles, file) - } - } - - for _, example := range doc.Examples(testFiles...) { - if example.Output == "" && !example.EmptyOutput { - pass.Reportf(example.Code.Pos(), "missing output for example, go test can't validate it") - } - } - - return nil, nil - }, - } -} diff --git a/vendor/github.com/maratori/testpackage/LICENSE b/vendor/github.com/maratori/testpackage/LICENSE deleted file mode 100644 index 644d0b1c8..000000000 --- a/vendor/github.com/maratori/testpackage/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2019 Marat Reymers - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/maratori/testpackage/pkg/testpackage/testpackage.go b/vendor/github.com/maratori/testpackage/pkg/testpackage/testpackage.go deleted file mode 100644 index 2e0572972..000000000 --- a/vendor/github.com/maratori/testpackage/pkg/testpackage/testpackage.go +++ /dev/null @@ -1,73 +0,0 @@ -package testpackage - -import ( - "flag" - "regexp" - "strings" - - "go/ast" - - "golang.org/x/tools/go/analysis" -) - -const ( - SkipRegexpFlagName = "skip-regexp" - SkipRegexpFlagUsage = `regexp pattern to skip file by name. To not skip files use -skip-regexp="^$"` - SkipRegexpFlagDefault = `(export|internal)_test\.go` -) - -const ( - AllowPackagesFlagName = "allow-packages" - AllowPackagesFlagUsage = `comma separated list of packages that don't end with _test that tests are allowed to be in` - AllowPackagesFlagDefault = `main` -) - -func processTestFile(pass *analysis.Pass, f *ast.File, allowedPackages []string) { - packageName := f.Name.Name - - for _, p := range allowedPackages { - if p == packageName { - return - } - } - - if !strings.HasSuffix(packageName, "_test") { - pass.Reportf(f.Name.Pos(), "package should be `%s_test` instead of `%s`", packageName, packageName) - } -} - -// NewAnalyzer returns Analyzer that makes you use a separate _test package. -func NewAnalyzer() *analysis.Analyzer { - var ( - skipFileRegexp = SkipRegexpFlagDefault - allowPackagesStr = AllowPackagesFlagDefault - fs flag.FlagSet - ) - - fs.StringVar(&skipFileRegexp, SkipRegexpFlagName, skipFileRegexp, SkipRegexpFlagUsage) - fs.StringVar(&allowPackagesStr, AllowPackagesFlagName, allowPackagesStr, AllowPackagesFlagUsage) - - return &analysis.Analyzer{ - Name: "testpackage", - Doc: "linter that makes you use a separate _test package", - Flags: fs, - Run: func(pass *analysis.Pass) (interface{}, error) { - allowedPackages := strings.Split(allowPackagesStr, ",") - skipFile, err := regexp.Compile(skipFileRegexp) - if err != nil { - return nil, err - } - - for _, f := range pass.Files { - fileName := pass.Fset.Position(f.Pos()).Filename - if !strings.HasSuffix(fileName, "_test.go") || skipFile.MatchString(fileName) { - continue - } - - processTestFile(pass, f, allowedPackages) - } - - return nil, nil - }, - } -} diff --git a/vendor/github.com/matoous/godox/.gitignore b/vendor/github.com/matoous/godox/.gitignore deleted file mode 100644 index 30d94b102..000000000 --- a/vendor/github.com/matoous/godox/.gitignore +++ /dev/null @@ -1,19 +0,0 @@ -# Binaries for programs and plugins -*.exe -*.dll -*.so -*.dylib -.idea - -# Test binary, build with `go test -c` -*.test - -# Output of the go coverage tool, specifically when used with LiteIDE -*.out - -# Project-local glide cache, RE: https://github.com/Masterminds/glide/issues/736 -.glide/ - -.vscode/ -debug -debug.test diff --git a/vendor/github.com/matoous/godox/.golangci.yml b/vendor/github.com/matoous/godox/.golangci.yml deleted file mode 100644 index 5fabb71fd..000000000 --- a/vendor/github.com/matoous/godox/.golangci.yml +++ /dev/null @@ -1,75 +0,0 @@ -linters-settings: - dupl: - threshold: 100 - gocritic: - # Enable multiple checks by tags, run `GL_DEBUG=gocritic golangci-lint` run to see all tags and checks. - # Empty list by default. See https://github.com/go-critic/go-critic#usage -> section "Tags". - enabled-tags: - - performance - - diagnostic - - style - disabled-checks: - - emptyStringTest - - unnamedResult # it is experimental currently and doesn't handle typed channels correctly - gocyclo: - min-complexity: 14 # TODO go lower - golint: - min-confidence: 0 - govet: - check-shadowing: true - goconst: - min-len: 2 - min-occurrences: 3 - goimports: - local-prefixes: gitlab.skypicker.com/search-team/gonuts/conveyance-store - lll: - line-length: 140 - maligned: - suggest-new: true - misspell: - locale: US - -linters: - enable-all: true - disable: - - depguard - # prealloc is not recommended by `golangci-lint` developers. - - prealloc - - gochecknoglobals - - # deprecated - - maligned - - exhaustivestruct - - nosnakecase - - scopelint - - structcheck - - ifshort - - varcheck - - deadcode - - golint - - interfacer - -issues: - exclude-rules: - - path: _test\.go - linters: - - exhaustruct - - goconst - - dupl - -run: - modules-download-mode: readonly - - skip-dirs: - - "fixtures" - -# output configuration options -output: - # colored-line-number|line-number|json|tab|checkstyle|code-climate, default is "colored-line-number" - format: tab - - # print lines of code with issue, default is true - print-issued-lines: true - - # print linter name in the end of issue text, default is true - print-linter-name: true diff --git a/vendor/github.com/matoous/godox/.revive.toml b/vendor/github.com/matoous/godox/.revive.toml deleted file mode 100644 index a4a30464d..000000000 --- a/vendor/github.com/matoous/godox/.revive.toml +++ /dev/null @@ -1,136 +0,0 @@ -ignoreGeneratedHeader = false -severity = "warning" -exclude = ["./fixtures/..."] - -# confidence <= 0.2 generate a lot of errors from package-comments rule. It marks files that do not contain -# package-level comments as a warning irrespective of existing package-level coment in one file. -confidence = 0.25 -errorCode = 1 -warningCode = 1 - -# Rules block. -# ⚠ Make sure to sort rules alpabetically for readability! ⚠ - -# argument-limit rule is setting up a maximum number of parameters that can be passed to the functions/methods. -[rule.argument-limit] - arguments = [5] - -# atomic rule checks for commonly mistaken usages of the sync/atomic package. -[rule.atomic] - -# blank-imports rule disallows blank imports. -[rule.blank-imports] - -# bool-literal-in-expr suggests removing boolean literals from logic expressions like `bar == true`, `arg == false`, -# `r != true`, `false && boolExpr` and `boolExpr || true`. -[rule.bool-literal-in-expr] - -# constant-logical-expr rule warns on constant logical expressions, like `name == name`. -[rule.constant-logical-expr] - -# context-as-argument rule makes sure that context.Context is the first argument of a function. -[rule.context-as-argument] - -# context-keys-type rule disallows the usage of basic types in context.WithValue -[rule.context-keys-type] - -# confusing-naming rule warns on methods with names that differ only by capitalization. -[rule.confusing-naming] - -# confusing-results rule suggests to name potentially confusing function results. -[rule.confusing-results] - -# cyclomatic rule sets restriction for maximum Cyclomatic complexity. -[rule.cyclomatic] - arguments = [15] - -# deep-exit rule looks for program exits in funcs other than `main()` or `init()`. -[rule.deep-exit] - -# dot-imports rule forbids `.` imports. -[rule.dot-imports] - -# empty-block warns on empty code blocks. -[rule.empty-block] - -# error-return rule ensure that the error return parameter is the last. -[rule.error-return] - -# error-strings rule ensure conventions around error strings. -[rule.error-strings] - -# error-naming rule ensure naming of error variables (has `Err` or `err` prefix). -[rule.error-naming] - -# errorf rule warns on usage errors.New(fmt.Sprintf()) instead of fmt.Errorf() -[rule.errorf] - -# exported rule ensure naming and commenting conventions on exported symbols. -[rule.exported] - -# flag-parameter rule warns on boolean parameters that create a control coupling. -[rule.flag-parameter] - -# get-return rule warns on getters that do not yield any result. -[rule.get-return] - -# if-return rule warns redundant if when returning an error. -[rule.if-return] - -# increment-decrement rule forces to use `i++` and `i--` instead of `i += 1` and `i -= 1`. -[rule.increment-decrement] - -# indent-error-flow rule prevents redundant else statements. -[rule.indent-error-flow] - -# modifies-value-receiver warns on assignments to value-passed method receivers. -[rule.modifies-value-receiver] - -# package-comments rule ensures package commenting conventions. -[rule.package-comments] - -# range rule prevents redundant variables when iterating over a collection. -[rule.range] - -# range-val-in-closure warns if range value is used in a closure dispatched as goroutine. -[rule.range-val-in-closure] - -# receiver-naming ensures conventions around the naming of receivers. -[rule.receiver-naming] - -# redefines-builtin-id warns on redefinitions of built-in (constants, variables, function and types) identifiers, -# like `true := "false"` etc. -[rule.redefines-builtin-id] - -# rule.superfluous-else prevents redundant else statements (extends indent-error-flow). Checks for `if-then-else`where -# the then block ends with branching statement like `continue`, `break`, or `goto`. -[rule.superfluous-else] - -# rule.struct-tag checks common struct tags like `json`, `xml`, `yaml`. -[rule.struct-tag] - -# time-naming rule conventions around the naming of time variables. Like not to use unit suffixes (sec, min etc.) in -# naming variables of type `time.Time` or `time.Duration`. -[rule.time-naming] - -# unexported-return rule warns when a public return is from unexported type. -[rule.unexported-return] - -# unnecessary-stmt suggests removing or simplifying unnecessary statements like breaks at the end of cases or return at -# the end of bodies of functions returning nothing. -[rule.unnecessary-stmt] - -# unreachable-code rule warns on the unreachable code. -[rule.unreachable-code] - -# unused-parameter rule suggests to rename or remove unused function parameters. -[rule.unused-parameter] - -# var-declaration rule reduces redundancies around variable declaration. -[rule.var-declaration] - -# var-naming checks naming rules. -[rule.var-naming] - -# waitgroup-by-value rule warns on functions taking `sync.WaitGroup` as a by-value parameter. -[rule.waitgroup-by-value] diff --git a/vendor/github.com/matoous/godox/LICENSE b/vendor/github.com/matoous/godox/LICENSE deleted file mode 100644 index 49e1b1e3a..000000000 --- a/vendor/github.com/matoous/godox/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2018 Matous Dzivjak - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/matoous/godox/Makefile b/vendor/github.com/matoous/godox/Makefile deleted file mode 100644 index 694aa21d6..000000000 --- a/vendor/github.com/matoous/godox/Makefile +++ /dev/null @@ -1,20 +0,0 @@ -## Help display. -## Pulls comments from beside commands and prints a nicely formatted -## display with the commands and their usage information. - -.DEFAULT_GOAL := help - -help: ## Prints this help - @grep -h -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}' - -.PHONY: lint -lint: ## Lint the application - golangci-lint run --max-same-issues=0 --timeout=1m ./... - -.PHONY: test -test: ## Run unit tests - go test -race -shuffle=on ./... - -.PHONY: vet -vet: ## Run go vet - go vet ./... diff --git a/vendor/github.com/matoous/godox/README.md b/vendor/github.com/matoous/godox/README.md deleted file mode 100644 index 9c58e28ce..000000000 --- a/vendor/github.com/matoous/godox/README.md +++ /dev/null @@ -1,23 +0,0 @@ -GoDoX -=== - -[![Tests](https://github.com/matoous/godox/actions/workflows/test.yml/badge.svg)](https://github.com/matoous/godox/actions/workflows/test.yml) -[![Lint](https://github.com/matoous/godox/actions/workflows/lint.yml/badge.svg)](https://github.com/matoous/godox/actions/workflows/lint.yml) -[![GoDoc](https://godoc.org/github.com/matoous/godox?status.svg)](https://godoc.org/github.com/matoous/godox) -[![Go Report Card](https://goreportcard.com/badge/github.com/matoous/godox)](https://goreportcard.com/report/github.com/matoous/godox) -[![GitHub issues](https://img.shields.io/github/issues/matoous/godox.svg)](https://github.com/matoous/godox/issues) -[![License](https://img.shields.io/badge/license-MIT%20License-blue.svg)](https://github.com/matoous/godox/LICENSE) - -GoDoX extracts comments from Go code based on keywords. This repository is fork of https://github.com/766b/godox -but a lot of code has changed, this repository is updated and the code was adjusted for better integration with -https://github.com/golangci/golangci-lint. - -Installation ---- - - go get github.com/matoous/godox - -The main idea ---- - -The main idea of godox is the keywords like TODO, FIX, OPTIMIZE is temporary and for development purpose only. You should create tasks if some TODOs cannot be fixed in the current merge request. diff --git a/vendor/github.com/matoous/godox/godox.go b/vendor/github.com/matoous/godox/godox.go deleted file mode 100644 index ed17059e0..000000000 --- a/vendor/github.com/matoous/godox/godox.go +++ /dev/null @@ -1,121 +0,0 @@ -// Package godox is a linter that scans Go code for comments containing certain keywords -// (like TODO, BUG, FIXME) which typically indicate areas that require attention. -package godox - -import ( - "bufio" - "bytes" - "fmt" - "go/ast" - "go/token" - "path/filepath" - "strings" - "unicode" - "unicode/utf8" -) - -var defaultKeywords = []string{"TODO", "BUG", "FIXME"} - -// Message contains a message and position. -type Message struct { - Pos token.Position - Message string -} - -func getMessages(comment *ast.Comment, fset *token.FileSet, keywords []string) []Message { - commentText := extractComment(comment.Text) - - b := bufio.NewReader(bytes.NewBufferString(commentText)) - - var comments []Message - - for lineNum := 0; ; lineNum++ { - line, _, err := b.ReadLine() - if err != nil { - break - } - - const minimumSize = 4 - - sComment := bytes.TrimSpace(line) - if len(sComment) < minimumSize { - continue - } - - for _, kw := range keywords { - if lkw := len(kw); !(bytes.EqualFold([]byte(kw), sComment[0:lkw]) && - !hasAlphanumRuneAdjacent(sComment[lkw:])) { - continue - } - - pos := fset.Position(comment.Pos()) - // trim the comment - const commentLimit = 40 - if len(sComment) > commentLimit { - sComment = []byte(fmt.Sprintf("%.40s...", sComment)) - } - - comments = append(comments, Message{ - Pos: pos, - Message: fmt.Sprintf( - "%s:%d: Line contains %s: %q", - filepath.Clean(pos.Filename), - pos.Line+lineNum, - strings.Join(keywords, "/"), - sComment, - ), - }) - - break - } - } - - return comments -} - -func extractComment(commentText string) string { - switch commentText[1] { - case '/': - commentText = commentText[2:] - if len(commentText) > 0 && commentText[0] == ' ' { - commentText = commentText[1:] - } - case '*': - commentText = commentText[2 : len(commentText)-2] - } - - return commentText -} - -func hasAlphanumRuneAdjacent(rest []byte) bool { - if len(rest) == 0 { - return false - } - - switch rest[0] { // most common cases - case ':', ' ', '(': - return false - } - - r, _ := utf8.DecodeRune(rest) - - return unicode.IsLetter(r) || unicode.IsNumber(r) || unicode.IsDigit(r) -} - -// Run runs the godox linter on given file. -// Godox searches for comments starting with given keywords and reports them. -func Run(file *ast.File, fset *token.FileSet, keywords ...string) []Message { - if len(keywords) == 0 { - keywords = defaultKeywords - } - - var messages []Message - - for _, c := range file.Comments { - for _, ci := range c.List { - messages = append(messages, getMessages(ci, fset, keywords)...) - } - } - - return messages -} diff --git a/vendor/github.com/mbilski/exhaustivestruct/LICENSE b/vendor/github.com/mbilski/exhaustivestruct/LICENSE deleted file mode 100644 index 893eb73b9..000000000 --- a/vendor/github.com/mbilski/exhaustivestruct/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2020 Mateusz Bilski - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/mbilski/exhaustivestruct/pkg/analyzer/analyzer.go b/vendor/github.com/mbilski/exhaustivestruct/pkg/analyzer/analyzer.go deleted file mode 100644 index 0dfb713c5..000000000 --- a/vendor/github.com/mbilski/exhaustivestruct/pkg/analyzer/analyzer.go +++ /dev/null @@ -1,187 +0,0 @@ -package analyzer - -import ( - "flag" - "fmt" - "go/ast" - "go/types" - "path" - "strings" - - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/ast/inspector" - - "golang.org/x/tools/go/analysis" -) - -// Analyzer that checks if all struct's fields are initialized -var Analyzer = &analysis.Analyzer{ - Name: "exhaustivestruct", - Doc: "Checks if all struct's fields are initialized", - Run: run, - Requires: []*analysis.Analyzer{inspect.Analyzer}, - Flags: newFlagSet(), -} - -// StructPatternList is a comma separated list of expressions to match struct packages and names -// The struct packages have the form example.com/package.ExampleStruct -// The matching patterns can use matching syntax from https://pkg.go.dev/path#Match -// If this list is empty, all structs are tested. -var StructPatternList string - -func newFlagSet() flag.FlagSet { - fs := flag.NewFlagSet("", flag.PanicOnError) - fs.StringVar(&StructPatternList, "struct_patterns", "", "This is a comma separated list of expressions to match struct packages and names") - return *fs -} - -func run(pass *analysis.Pass) (interface{}, error) { - splitFn := func(c rune) bool { return c == ',' } - inspector := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - structPatterns := strings.FieldsFunc(StructPatternList, splitFn) - // validate the pattern syntax - for _, pattern := range structPatterns { - _, err := path.Match(pattern, "") - if err != nil { - return nil, fmt.Errorf("invalid struct pattern %s: %w", pattern, err) - } - } - - nodeFilter := []ast.Node{ - (*ast.CompositeLit)(nil), - (*ast.ReturnStmt)(nil), - } - - var returnStmt *ast.ReturnStmt - - inspector.Preorder(nodeFilter, func(node ast.Node) { - var name string - - compositeLit, ok := node.(*ast.CompositeLit) - if !ok { - // Keep track of the last return statement whilte iterating - retLit, ok := node.(*ast.ReturnStmt) - if ok { - returnStmt = retLit - } - return - } - - i, ok := compositeLit.Type.(*ast.Ident) - - if ok { - name = i.Name - } else { - s, ok := compositeLit.Type.(*ast.SelectorExpr) - - if !ok { - return - } - - name = s.Sel.Name - } - - if compositeLit.Type == nil { - return - } - - t := pass.TypesInfo.TypeOf(compositeLit.Type) - - if t == nil { - return - } - - if len(structPatterns) > 0 { - shouldLint := false - for _, pattern := range structPatterns { - // We check the patterns for vailidy ahead of time, so we don't need to check the error here - if match, _ := path.Match(pattern, t.String()); match { - shouldLint = true - break - } - } - if !shouldLint { - return - } - } - - str, ok := t.Underlying().(*types.Struct) - - if !ok { - return - } - - // Don't report an error if: - // 1. This composite literal contains no fields and - // 2. It's in a return statement and - // 3. The return statement contains a non-nil error - if len(compositeLit.Elts) == 0 { - // Check if this composite is one of the results the last return statement - isInResults := false - if returnStmt != nil { - for _, result := range returnStmt.Results { - compareComposite, ok := result.(*ast.CompositeLit) - if ok { - if compareComposite == compositeLit { - isInResults = true - } - } - } - } - nonNilError := false - if isInResults { - // Check if any of the results has an error type and if that error is set to non-nil (if it's set to nil, the type would be "untyped nil") - for _, result := range returnStmt.Results { - if pass.TypesInfo.TypeOf(result).String() == "error" { - nonNilError = true - } - } - } - - if nonNilError { - return - } - } - - samePackage := strings.HasPrefix(t.String(), pass.Pkg.Path()+".") - - missing := []string{} - - for i := 0; i < str.NumFields(); i++ { - fieldName := str.Field(i).Name() - exists := false - - if !samePackage && !str.Field(i).Exported() { - continue - } - - for eIndex, e := range compositeLit.Elts { - if k, ok := e.(*ast.KeyValueExpr); ok { - if i, ok := k.Key.(*ast.Ident); ok { - if i.Name == fieldName { - exists = true - break - } - } - } else { - if eIndex == i { - exists = true - break - } - } - } - - if !exists { - missing = append(missing, fieldName) - } - } - - if len(missing) == 1 { - pass.Reportf(node.Pos(), "%s is missing in %s", missing[0], name) - } else if len(missing) > 1 { - pass.Reportf(node.Pos(), "%s are missing in %s", strings.Join(missing, ", "), name) - } - }) - - return nil, nil -} diff --git a/vendor/github.com/mgechev/revive/LICENSE b/vendor/github.com/mgechev/revive/LICENSE deleted file mode 100644 index c617c7e01..000000000 --- a/vendor/github.com/mgechev/revive/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2018 Minko Gechev - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/mgechev/revive/config/config.go b/vendor/github.com/mgechev/revive/config/config.go deleted file mode 100644 index 50a2b8966..000000000 --- a/vendor/github.com/mgechev/revive/config/config.go +++ /dev/null @@ -1,259 +0,0 @@ -// Package config implements revive's configuration data structures and related methods -package config - -import ( - "errors" - "fmt" - "os" - - "github.com/BurntSushi/toml" - - "github.com/mgechev/revive/formatter" - "github.com/mgechev/revive/lint" - "github.com/mgechev/revive/rule" -) - -var defaultRules = []lint.Rule{ - &rule.VarDeclarationsRule{}, - &rule.PackageCommentsRule{}, - &rule.DotImportsRule{}, - &rule.BlankImportsRule{}, - &rule.ExportedRule{}, - &rule.VarNamingRule{}, - &rule.IndentErrorFlowRule{}, - &rule.RangeRule{}, - &rule.ErrorfRule{}, - &rule.ErrorNamingRule{}, - &rule.ErrorStringsRule{}, - &rule.ReceiverNamingRule{}, - &rule.IncrementDecrementRule{}, - &rule.ErrorReturnRule{}, - &rule.UnexportedReturnRule{}, - &rule.TimeNamingRule{}, - &rule.ContextKeysType{}, - &rule.ContextAsArgumentRule{}, - &rule.EmptyBlockRule{}, - &rule.SuperfluousElseRule{}, - &rule.UnusedParamRule{}, - &rule.UnreachableCodeRule{}, - &rule.RedefinesBuiltinIDRule{}, -} - -var allRules = append([]lint.Rule{ - &rule.ArgumentsLimitRule{}, - &rule.CyclomaticRule{}, - &rule.FileHeaderRule{}, - &rule.ConfusingNamingRule{}, - &rule.GetReturnRule{}, - &rule.ModifiesParamRule{}, - &rule.ConfusingResultsRule{}, - &rule.DeepExitRule{}, - &rule.AddConstantRule{}, - &rule.FlagParamRule{}, - &rule.UnnecessaryStmtRule{}, - &rule.StructTagRule{}, - &rule.ModifiesValRecRule{}, - &rule.ConstantLogicalExprRule{}, - &rule.BoolLiteralRule{}, - &rule.ImportsBlocklistRule{}, - &rule.FunctionResultsLimitRule{}, - &rule.MaxPublicStructsRule{}, - &rule.RangeValInClosureRule{}, - &rule.RangeValAddress{}, - &rule.WaitGroupByValueRule{}, - &rule.AtomicRule{}, - &rule.EmptyLinesRule{}, - &rule.LineLengthLimitRule{}, - &rule.CallToGCRule{}, - &rule.DuplicatedImportsRule{}, - &rule.ImportShadowingRule{}, - &rule.BareReturnRule{}, - &rule.UnusedReceiverRule{}, - &rule.UnhandledErrorRule{}, - &rule.CognitiveComplexityRule{}, - &rule.StringOfIntRule{}, - &rule.StringFormatRule{}, - &rule.EarlyReturnRule{}, - &rule.UnconditionalRecursionRule{}, - &rule.IdenticalBranchesRule{}, - &rule.DeferRule{}, - &rule.UnexportedNamingRule{}, - &rule.FunctionLength{}, - &rule.NestedStructs{}, - &rule.UselessBreak{}, - &rule.UncheckedTypeAssertionRule{}, - &rule.TimeEqualRule{}, - &rule.BannedCharsRule{}, - &rule.OptimizeOperandsOrderRule{}, - &rule.UseAnyRule{}, - &rule.DataRaceRule{}, - &rule.CommentSpacingsRule{}, - &rule.IfReturnRule{}, - &rule.RedundantImportAlias{}, - &rule.ImportAliasNamingRule{}, - &rule.EnforceMapStyleRule{}, - &rule.EnforceRepeatedArgTypeStyleRule{}, - &rule.EnforceSliceStyleRule{}, - &rule.MaxControlNestingRule{}, -}, defaultRules...) - -var allFormatters = []lint.Formatter{ - &formatter.Stylish{}, - &formatter.Friendly{}, - &formatter.JSON{}, - &formatter.NDJSON{}, - &formatter.Default{}, - &formatter.Unix{}, - &formatter.Checkstyle{}, - &formatter.Plain{}, - &formatter.Sarif{}, -} - -func getFormatters() map[string]lint.Formatter { - result := map[string]lint.Formatter{} - for _, f := range allFormatters { - result[f.Name()] = f - } - return result -} - -// GetLintingRules yields the linting rules that must be applied by the linter -func GetLintingRules(config *lint.Config, extraRules []lint.Rule) ([]lint.Rule, error) { - rulesMap := map[string]lint.Rule{} - for _, r := range allRules { - rulesMap[r.Name()] = r - } - for _, r := range extraRules { - if _, ok := rulesMap[r.Name()]; ok { - continue - } - rulesMap[r.Name()] = r - } - - var lintingRules []lint.Rule - for name, ruleConfig := range config.Rules { - actualName := actualRuleName(name) - r, ok := rulesMap[actualName] - if !ok { - return nil, fmt.Errorf("cannot find rule: %s", name) - } - - if ruleConfig.Disabled { - continue // skip disabled rules - } - - lintingRules = append(lintingRules, r) - } - - return lintingRules, nil -} - -func actualRuleName(name string) string { - switch name { - case "imports-blacklist": - return "imports-blocklist" - default: - return name - } -} - -func parseConfig(path string, config *lint.Config) error { - file, err := os.ReadFile(path) - if err != nil { - return errors.New("cannot read the config file") - } - _, err = toml.Decode(string(file), config) - if err != nil { - return fmt.Errorf("cannot parse the config file: %v", err) - } - for k, r := range config.Rules { - err := r.Initialize() - if err != nil { - return fmt.Errorf("error in config of rule [%s] : [%v]", k, err) - } - config.Rules[k] = r - } - - return nil -} - -func normalizeConfig(config *lint.Config) { - if len(config.Rules) == 0 { - config.Rules = map[string]lint.RuleConfig{} - } - if config.EnableAllRules { - // Add to the configuration all rules not yet present in it - for _, r := range allRules { - ruleName := r.Name() - _, alreadyInConf := config.Rules[ruleName] - if alreadyInConf { - continue - } - // Add the rule with an empty conf for - config.Rules[ruleName] = lint.RuleConfig{} - } - } - - severity := config.Severity - if severity != "" { - for k, v := range config.Rules { - if v.Severity == "" { - v.Severity = severity - } - config.Rules[k] = v - } - for k, v := range config.Directives { - if v.Severity == "" { - v.Severity = severity - } - config.Directives[k] = v - } - } -} - -const defaultConfidence = 0.8 - -// GetConfig yields the configuration -func GetConfig(configPath string) (*lint.Config, error) { - config := &lint.Config{} - switch { - case configPath != "": - config.Confidence = defaultConfidence - err := parseConfig(configPath, config) - if err != nil { - return nil, err - } - - default: // no configuration provided - config = defaultConfig() - } - - normalizeConfig(config) - return config, nil -} - -// GetFormatter yields the formatter for lint failures -func GetFormatter(formatterName string) (lint.Formatter, error) { - formatters := getFormatters() - fmtr := formatters["default"] - if formatterName != "" { - f, ok := formatters[formatterName] - if !ok { - return nil, fmt.Errorf("unknown formatter %v", formatterName) - } - fmtr = f - } - return fmtr, nil -} - -func defaultConfig() *lint.Config { - defaultConfig := lint.Config{ - Confidence: defaultConfidence, - Severity: lint.SeverityWarning, - Rules: map[string]lint.RuleConfig{}, - } - for _, r := range defaultRules { - defaultConfig.Rules[r.Name()] = lint.RuleConfig{} - } - return &defaultConfig -} diff --git a/vendor/github.com/mgechev/revive/formatter/checkstyle.go b/vendor/github.com/mgechev/revive/formatter/checkstyle.go deleted file mode 100644 index f45b63c92..000000000 --- a/vendor/github.com/mgechev/revive/formatter/checkstyle.go +++ /dev/null @@ -1,77 +0,0 @@ -package formatter - -import ( - "bytes" - "encoding/xml" - plain "text/template" - - "github.com/mgechev/revive/lint" -) - -// Checkstyle is an implementation of the Formatter interface -// which formats the errors to Checkstyle-like format. -type Checkstyle struct { - Metadata lint.FormatterMetadata -} - -// Name returns the name of the formatter -func (*Checkstyle) Name() string { - return "checkstyle" -} - -type issue struct { - Line int - Col int - What string - Confidence float64 - Severity lint.Severity - RuleName string -} - -// Format formats the failures gotten from the lint. -func (*Checkstyle) Format(failures <-chan lint.Failure, config lint.Config) (string, error) { - issues := map[string][]issue{} - for failure := range failures { - buf := new(bytes.Buffer) - xml.Escape(buf, []byte(failure.Failure)) - what := buf.String() - iss := issue{ - Line: failure.Position.Start.Line, - Col: failure.Position.Start.Column, - What: what, - Confidence: failure.Confidence, - Severity: severity(config, failure), - RuleName: failure.RuleName, - } - fn := failure.GetFilename() - if issues[fn] == nil { - issues[fn] = make([]issue, 0) - } - issues[fn] = append(issues[fn], iss) - } - - t, err := plain.New("revive").Parse(checkstyleTemplate) - if err != nil { - return "", err - } - - buf := new(bytes.Buffer) - - err = t.Execute(buf, issues) - if err != nil { - return "", err - } - - return buf.String(), nil -} - -const checkstyleTemplate = ` - -{{- range $k, $v := . }} - - {{- range $i, $issue := $v }} - - {{- end }} - -{{- end }} -` diff --git a/vendor/github.com/mgechev/revive/formatter/default.go b/vendor/github.com/mgechev/revive/formatter/default.go deleted file mode 100644 index 2d5a04434..000000000 --- a/vendor/github.com/mgechev/revive/formatter/default.go +++ /dev/null @@ -1,28 +0,0 @@ -package formatter - -import ( - "bytes" - "fmt" - - "github.com/mgechev/revive/lint" -) - -// Default is an implementation of the Formatter interface -// which formats the errors to text. -type Default struct { - Metadata lint.FormatterMetadata -} - -// Name returns the name of the formatter -func (*Default) Name() string { - return "default" -} - -// Format formats the failures gotten from the lint. -func (*Default) Format(failures <-chan lint.Failure, _ lint.Config) (string, error) { - var buf bytes.Buffer - for failure := range failures { - fmt.Fprintf(&buf, "%v: %s\n", failure.Position.Start, failure.Failure) - } - return buf.String(), nil -} diff --git a/vendor/github.com/mgechev/revive/formatter/doc.go b/vendor/github.com/mgechev/revive/formatter/doc.go deleted file mode 100644 index bb89f20ea..000000000 --- a/vendor/github.com/mgechev/revive/formatter/doc.go +++ /dev/null @@ -1,2 +0,0 @@ -// Package formatter implements the linter output formatters. -package formatter diff --git a/vendor/github.com/mgechev/revive/formatter/friendly.go b/vendor/github.com/mgechev/revive/formatter/friendly.go deleted file mode 100644 index 5ff329a23..000000000 --- a/vendor/github.com/mgechev/revive/formatter/friendly.go +++ /dev/null @@ -1,141 +0,0 @@ -package formatter - -import ( - "bytes" - "fmt" - "io" - "sort" - - "github.com/fatih/color" - "github.com/mgechev/revive/lint" - "github.com/olekukonko/tablewriter" -) - -func getErrorEmoji() string { - return color.RedString("✘") -} - -func getWarningEmoji() string { - return color.YellowString("⚠") -} - -// Friendly is an implementation of the Formatter interface -// which formats the errors to JSON. -type Friendly struct { - Metadata lint.FormatterMetadata -} - -// Name returns the name of the formatter -func (*Friendly) Name() string { - return "friendly" -} - -// Format formats the failures gotten from the lint. -func (f *Friendly) Format(failures <-chan lint.Failure, config lint.Config) (string, error) { - var buf bytes.Buffer - errorMap := map[string]int{} - warningMap := map[string]int{} - totalErrors := 0 - totalWarnings := 0 - for failure := range failures { - sev := severity(config, failure) - f.printFriendlyFailure(&buf, failure, sev) - if sev == lint.SeverityWarning { - warningMap[failure.RuleName]++ - totalWarnings++ - } - if sev == lint.SeverityError { - errorMap[failure.RuleName]++ - totalErrors++ - } - } - f.printSummary(&buf, totalErrors, totalWarnings) - f.printStatistics(&buf, color.RedString("Errors:"), errorMap) - f.printStatistics(&buf, color.YellowString("Warnings:"), warningMap) - return buf.String(), nil -} - -func (f *Friendly) printFriendlyFailure(w io.Writer, failure lint.Failure, severity lint.Severity) { - f.printHeaderRow(w, failure, severity) - f.printFilePosition(w, failure) - fmt.Fprintln(w) - fmt.Fprintln(w) -} - -func (f *Friendly) printHeaderRow(w io.Writer, failure lint.Failure, severity lint.Severity) { - emoji := getWarningEmoji() - if severity == lint.SeverityError { - emoji = getErrorEmoji() - } - fmt.Fprint(w, f.table([][]string{{emoji, "https://revive.run/r#" + failure.RuleName, color.GreenString(failure.Failure)}})) -} - -func (*Friendly) printFilePosition(w io.Writer, failure lint.Failure) { - fmt.Fprintf(w, " %s:%d:%d", failure.GetFilename(), failure.Position.Start.Line, failure.Position.Start.Column) -} - -type statEntry struct { - name string - failures int -} - -func (*Friendly) printSummary(w io.Writer, errors, warnings int) { - emoji := getWarningEmoji() - if errors > 0 { - emoji = getErrorEmoji() - } - problemsLabel := "problems" - if errors+warnings == 1 { - problemsLabel = "problem" - } - warningsLabel := "warnings" - if warnings == 1 { - warningsLabel = "warning" - } - errorsLabel := "errors" - if errors == 1 { - errorsLabel = "error" - } - str := fmt.Sprintf("%d %s (%d %s, %d %s)", errors+warnings, problemsLabel, errors, errorsLabel, warnings, warningsLabel) - if errors > 0 { - fmt.Fprintf(w, "%s %s\n", emoji, color.RedString(str)) - fmt.Fprintln(w) - return - } - if warnings > 0 { - fmt.Fprintf(w, "%s %s\n", emoji, color.YellowString(str)) - fmt.Fprintln(w) - return - } -} - -func (f *Friendly) printStatistics(w io.Writer, header string, stats map[string]int) { - if len(stats) == 0 { - return - } - var data []statEntry - for name, total := range stats { - data = append(data, statEntry{name, total}) - } - sort.Slice(data, func(i, j int) bool { - return data[i].failures > data[j].failures - }) - formatted := [][]string{} - for _, entry := range data { - formatted = append(formatted, []string{color.GreenString(fmt.Sprintf("%d", entry.failures)), entry.name}) - } - fmt.Fprintln(w, header) - fmt.Fprintln(w, f.table(formatted)) -} - -func (*Friendly) table(rows [][]string) string { - buf := new(bytes.Buffer) - table := tablewriter.NewWriter(buf) - table.SetBorder(false) - table.SetColumnSeparator("") - table.SetRowSeparator("") - table.SetAutoWrapText(false) - table.AppendBulk(rows) - table.Render() - return buf.String() -} diff --git a/vendor/github.com/mgechev/revive/formatter/json.go b/vendor/github.com/mgechev/revive/formatter/json.go deleted file mode 100644 index 7cace89ec..000000000 --- a/vendor/github.com/mgechev/revive/formatter/json.go +++ /dev/null @@ -1,40 +0,0 @@ -package formatter - -import ( - "encoding/json" - - "github.com/mgechev/revive/lint" -) - -// JSON is an implementation of the Formatter interface -// which formats the errors to JSON. -type JSON struct { - Metadata lint.FormatterMetadata -} - -// Name returns the name of the formatter -func (*JSON) Name() string { - return "json" -} - -// jsonObject defines a JSON object of an failure -type jsonObject struct { - Severity lint.Severity - lint.Failure `json:",inline"` -} - -// Format formats the failures gotten from the lint. -func (*JSON) Format(failures <-chan lint.Failure, config lint.Config) (string, error) { - var slice []jsonObject - for failure := range failures { - obj := jsonObject{} - obj.Severity = severity(config, failure) - obj.Failure = failure - slice = append(slice, obj) - } - result, err := json.Marshal(slice) - if err != nil { - return "", err - } - return string(result), err -} diff --git a/vendor/github.com/mgechev/revive/formatter/ndjson.go b/vendor/github.com/mgechev/revive/formatter/ndjson.go deleted file mode 100644 index 58b35dc44..000000000 --- a/vendor/github.com/mgechev/revive/formatter/ndjson.go +++ /dev/null @@ -1,35 +0,0 @@ -package formatter - -import ( - "bytes" - "encoding/json" - - "github.com/mgechev/revive/lint" -) - -// NDJSON is an implementation of the Formatter interface -// which formats the errors to NDJSON stream. -type NDJSON struct { - Metadata lint.FormatterMetadata -} - -// Name returns the name of the formatter -func (*NDJSON) Name() string { - return "ndjson" -} - -// Format formats the failures gotten from the lint. -func (*NDJSON) Format(failures <-chan lint.Failure, config lint.Config) (string, error) { - var buf bytes.Buffer - enc := json.NewEncoder(&buf) - for failure := range failures { - obj := jsonObject{} - obj.Severity = severity(config, failure) - obj.Failure = failure - err := enc.Encode(obj) - if err != nil { - return "", err - } - } - return buf.String(), nil -} diff --git a/vendor/github.com/mgechev/revive/formatter/plain.go b/vendor/github.com/mgechev/revive/formatter/plain.go deleted file mode 100644 index 09ebf6cdc..000000000 --- a/vendor/github.com/mgechev/revive/formatter/plain.go +++ /dev/null @@ -1,28 +0,0 @@ -package formatter - -import ( - "bytes" - "fmt" - - "github.com/mgechev/revive/lint" -) - -// Plain is an implementation of the Formatter interface -// which formats the errors to JSON. -type Plain struct { - Metadata lint.FormatterMetadata -} - -// Name returns the name of the formatter -func (*Plain) Name() string { - return "plain" -} - -// Format formats the failures gotten from the lint. -func (*Plain) Format(failures <-chan lint.Failure, _ lint.Config) (string, error) { - var buf bytes.Buffer - for failure := range failures { - fmt.Fprintf(&buf, "%v: %s %s\n", failure.Position.Start, failure.Failure, "https://revive.run/r#"+failure.RuleName) - } - return buf.String(), nil -} diff --git a/vendor/github.com/mgechev/revive/formatter/sarif.go b/vendor/github.com/mgechev/revive/formatter/sarif.go deleted file mode 100644 index c42da73eb..000000000 --- a/vendor/github.com/mgechev/revive/formatter/sarif.go +++ /dev/null @@ -1,107 +0,0 @@ -package formatter - -import ( - "bytes" - "fmt" - "strings" - - "github.com/chavacava/garif" - "github.com/mgechev/revive/lint" -) - -// Sarif is an implementation of the Formatter interface -// which formats revive failures into SARIF format. -type Sarif struct { - Metadata lint.FormatterMetadata -} - -// Name returns the name of the formatter -func (*Sarif) Name() string { - return "sarif" -} - -const reviveSite = "https://revive.run" - -// Format formats the failures gotten from the lint. -func (*Sarif) Format(failures <-chan lint.Failure, cfg lint.Config) (string, error) { - sarifLog := newReviveRunLog(cfg) - - for failure := range failures { - sarifLog.AddResult(failure) - } - - buf := new(bytes.Buffer) - sarifLog.PrettyWrite(buf) - - return buf.String(), nil -} - -type reviveRunLog struct { - *garif.LogFile - run *garif.Run - rules map[string]lint.RuleConfig -} - -func newReviveRunLog(cfg lint.Config) *reviveRunLog { - run := garif.NewRun(garif.NewTool(garif.NewDriver("revive").WithInformationUri(reviveSite))) - log := garif.NewLogFile([]*garif.Run{run}, garif.Version210) - - reviveLog := &reviveRunLog{ - log, - run, - cfg.Rules, - } - - reviveLog.addRules(cfg.Rules) - - return reviveLog -} - -func (l *reviveRunLog) addRules(cfg map[string]lint.RuleConfig) { - for name, ruleCfg := range cfg { - rule := garif.NewRule(name).WithHelpUri(reviveSite + "/r#" + name) - setRuleProperties(rule, ruleCfg) - driver := l.run.Tool.Driver - - if driver.Rules == nil { - driver.Rules = []*garif.ReportingDescriptor{rule} - return - } - - driver.Rules = append(driver.Rules, rule) - } -} - -func (l *reviveRunLog) AddResult(failure lint.Failure) { - positiveOrZero := func(x int) int { - if x > 0 { - return x - } - return 0 - } - position := failure.Position - filename := position.Start.Filename - line := positiveOrZero(position.Start.Line) // https://docs.oasis-open.org/sarif/sarif/v2.1.0/csprd01/sarif-v2.1.0-csprd01.html#def_line - column := positiveOrZero(position.Start.Column) // https://docs.oasis-open.org/sarif/sarif/v2.1.0/csprd01/sarif-v2.1.0-csprd01.html#def_column - - result := garif.NewResult(garif.NewMessageFromText(failure.Failure)) - location := garif.NewLocation().WithURI(filename).WithLineColumn(line, column) - result.Locations = append(result.Locations, location) - result.RuleId = failure.RuleName - result.Level = garif.ResultLevel(l.rules[failure.RuleName].Severity) - - l.run.Results = append(l.run.Results, result) -} - -func setRuleProperties(sarifRule *garif.ReportingDescriptor, lintRule lint.RuleConfig) { - arguments := make([]string, len(lintRule.Arguments)) - for i, arg := range lintRule.Arguments { - arguments[i] = fmt.Sprintf("%+v", arg) - } - - if len(arguments) > 0 { - sarifRule.WithProperties("arguments", strings.Join(arguments, ",")) - } - - sarifRule.WithProperties("severity", string(lintRule.Severity)) -} diff --git a/vendor/github.com/mgechev/revive/formatter/severity.go b/vendor/github.com/mgechev/revive/formatter/severity.go deleted file mode 100644 index a43bf3192..000000000 --- a/vendor/github.com/mgechev/revive/formatter/severity.go +++ /dev/null @@ -1,13 +0,0 @@ -package formatter - -import "github.com/mgechev/revive/lint" - -func severity(config lint.Config, failure lint.Failure) lint.Severity { - if config, ok := config.Rules[failure.RuleName]; ok && config.Severity == lint.SeverityError { - return lint.SeverityError - } - if config, ok := config.Directives[failure.RuleName]; ok && config.Severity == lint.SeverityError { - return lint.SeverityError - } - return lint.SeverityWarning -} diff --git a/vendor/github.com/mgechev/revive/formatter/stylish.go b/vendor/github.com/mgechev/revive/formatter/stylish.go deleted file mode 100644 index 828228c72..000000000 --- a/vendor/github.com/mgechev/revive/formatter/stylish.go +++ /dev/null @@ -1,89 +0,0 @@ -package formatter - -import ( - "bytes" - "fmt" - - "github.com/fatih/color" - "github.com/mgechev/revive/lint" - "github.com/olekukonko/tablewriter" -) - -// Stylish is an implementation of the Formatter interface -// which formats the errors to JSON. -type Stylish struct { - Metadata lint.FormatterMetadata -} - -// Name returns the name of the formatter -func (*Stylish) Name() string { - return "stylish" -} - -func formatFailure(failure lint.Failure, severity lint.Severity) []string { - fString := color.CyanString(failure.Failure) - fName := color.RedString("https://revive.run/r#" + failure.RuleName) - lineColumn := failure.Position - pos := fmt.Sprintf("(%d, %d)", lineColumn.Start.Line, lineColumn.Start.Column) - if severity == lint.SeverityWarning { - fName = color.YellowString("https://revive.run/r#" + failure.RuleName) - } - return []string{failure.GetFilename(), pos, fName, fString} -} - -// Format formats the failures gotten from the lint. -func (*Stylish) Format(failures <-chan lint.Failure, config lint.Config) (string, error) { - var result [][]string - totalErrors := 0 - total := 0 - - for f := range failures { - total++ - currentType := severity(config, f) - if currentType == lint.SeverityError { - totalErrors++ - } - result = append(result, formatFailure(f, lint.Severity(currentType))) - } - ps := "problems" - if total == 1 { - ps = "problem" - } - - fileReport := make(map[string][][]string) - - for _, row := range result { - if _, ok := fileReport[row[0]]; !ok { - fileReport[row[0]] = [][]string{} - } - - fileReport[row[0]] = append(fileReport[row[0]], []string{row[1], row[2], row[3]}) - } - - output := "" - for filename, val := range fileReport { - buf := new(bytes.Buffer) - table := tablewriter.NewWriter(buf) - table.SetBorder(false) - table.SetColumnSeparator("") - table.SetRowSeparator("") - table.SetAutoWrapText(false) - table.AppendBulk(val) - table.Render() - c := color.New(color.Underline) - output += c.SprintfFunc()(filename + "\n") - output += buf.String() + "\n" - } - - suffix := fmt.Sprintf(" %d %s (%d errors) (%d warnings)", total, ps, totalErrors, total-totalErrors) - - if total > 0 && totalErrors > 0 { - suffix = color.RedString("\n ✖" + suffix) - } else if total > 0 && totalErrors == 0 { - suffix = color.YellowString("\n ✖" + suffix) - } else { - suffix, output = "", "" - } - - return output + suffix, nil -} diff --git a/vendor/github.com/mgechev/revive/formatter/unix.go b/vendor/github.com/mgechev/revive/formatter/unix.go deleted file mode 100644 index e46f3c275..000000000 --- a/vendor/github.com/mgechev/revive/formatter/unix.go +++ /dev/null @@ -1,30 +0,0 @@ -package formatter - -import ( - "bytes" - "fmt" - - "github.com/mgechev/revive/lint" -) - -// Unix is an implementation of the Formatter interface -// which formats the errors to a simple line based error format -// -// main.go:24:9: [errorf] should replace errors.New(fmt.Sprintf(...)) with fmt.Errorf(...) -type Unix struct { - Metadata lint.FormatterMetadata -} - -// Name returns the name of the formatter -func (*Unix) Name() string { - return "unix" -} - -// Format formats the failures gotten from the lint. -func (*Unix) Format(failures <-chan lint.Failure, _ lint.Config) (string, error) { - var buf bytes.Buffer - for failure := range failures { - fmt.Fprintf(&buf, "%v: [%s] %s\n", failure.Position.Start, failure.RuleName, failure.Failure) - } - return buf.String(), nil -} diff --git a/vendor/github.com/mgechev/revive/internal/ifelse/args.go b/vendor/github.com/mgechev/revive/internal/ifelse/args.go deleted file mode 100644 index c6e647e69..000000000 --- a/vendor/github.com/mgechev/revive/internal/ifelse/args.go +++ /dev/null @@ -1,11 +0,0 @@ -package ifelse - -// PreserveScope is a configuration argument that prevents suggestions -// that would enlarge variable scope -const PreserveScope = "preserveScope" - -// Args contains arguments common to the early-return, indent-error-flow -// and superfluous-else rules (currently just preserveScope) -type Args struct { - PreserveScope bool -} diff --git a/vendor/github.com/mgechev/revive/internal/ifelse/branch.go b/vendor/github.com/mgechev/revive/internal/ifelse/branch.go deleted file mode 100644 index 6e6036b89..000000000 --- a/vendor/github.com/mgechev/revive/internal/ifelse/branch.go +++ /dev/null @@ -1,93 +0,0 @@ -package ifelse - -import ( - "fmt" - "go/ast" - "go/token" -) - -// Branch contains information about a branch within an if-else chain. -type Branch struct { - BranchKind - Call // The function called at the end for kind Panic or Exit. - HasDecls bool // The branch has one or more declarations (at the top level block) -} - -// BlockBranch gets the Branch of an ast.BlockStmt. -func BlockBranch(block *ast.BlockStmt) Branch { - blockLen := len(block.List) - if blockLen == 0 { - return Empty.Branch() - } - - branch := StmtBranch(block.List[blockLen-1]) - branch.HasDecls = hasDecls(block) - return branch -} - -// StmtBranch gets the Branch of an ast.Stmt. -func StmtBranch(stmt ast.Stmt) Branch { - switch stmt := stmt.(type) { - case *ast.ReturnStmt: - return Return.Branch() - case *ast.BlockStmt: - return BlockBranch(stmt) - case *ast.BranchStmt: - switch stmt.Tok { - case token.BREAK: - return Break.Branch() - case token.CONTINUE: - return Continue.Branch() - case token.GOTO: - return Goto.Branch() - } - case *ast.ExprStmt: - fn, ok := ExprCall(stmt) - if !ok { - break - } - kind, ok := DeviatingFuncs[fn] - if ok { - return Branch{BranchKind: kind, Call: fn} - } - case *ast.EmptyStmt: - return Empty.Branch() - case *ast.LabeledStmt: - return StmtBranch(stmt.Stmt) - } - return Regular.Branch() -} - -// String returns a brief string representation -func (b Branch) String() string { - switch b.BranchKind { - case Panic, Exit: - return fmt.Sprintf("... %v()", b.Call) - default: - return b.BranchKind.String() - } -} - -// LongString returns a longer form string representation -func (b Branch) LongString() string { - switch b.BranchKind { - case Panic, Exit: - return fmt.Sprintf("call to %v function", b.Call) - default: - return b.BranchKind.LongString() - } -} - -func hasDecls(block *ast.BlockStmt) bool { - for _, stmt := range block.List { - switch stmt := stmt.(type) { - case *ast.DeclStmt: - return true - case *ast.AssignStmt: - if stmt.Tok == token.DEFINE { - return true - } - } - } - return false -} diff --git a/vendor/github.com/mgechev/revive/internal/ifelse/branch_kind.go b/vendor/github.com/mgechev/revive/internal/ifelse/branch_kind.go deleted file mode 100644 index 41601d1e1..000000000 --- a/vendor/github.com/mgechev/revive/internal/ifelse/branch_kind.go +++ /dev/null @@ -1,101 +0,0 @@ -package ifelse - -// BranchKind is a classifier for if-else branches. It says whether the branch is empty, -// and whether the branch ends with a statement that deviates control flow. -type BranchKind int - -const ( - // Empty branches do nothing - Empty BranchKind = iota - - // Return branches return from the current function - Return - - // Continue branches continue a surrounding "for" loop - Continue - - // Break branches break a surrounding "for" loop - Break - - // Goto branches conclude with a "goto" statement - Goto - - // Panic branches panic the current function - Panic - - // Exit branches end the program - Exit - - // Regular branches do not fit any category above - Regular -) - -// IsEmpty tests if the branch is empty -func (k BranchKind) IsEmpty() bool { return k == Empty } - -// Returns tests if the branch returns from the current function -func (k BranchKind) Returns() bool { return k == Return } - -// Deviates tests if the control does not flow to the first -// statement following the if-else chain. -func (k BranchKind) Deviates() bool { - switch k { - case Empty, Regular: - return false - case Return, Continue, Break, Goto, Panic, Exit: - return true - default: - panic("invalid kind") - } -} - -// Branch returns a Branch with the given kind -func (k BranchKind) Branch() Branch { return Branch{BranchKind: k} } - -// String returns a brief string representation -func (k BranchKind) String() string { - switch k { - case Empty: - return "" - case Regular: - return "..." - case Return: - return "... return" - case Continue: - return "... continue" - case Break: - return "... break" - case Goto: - return "... goto" - case Panic: - return "... panic()" - case Exit: - return "... os.Exit()" - default: - panic("invalid kind") - } -} - -// LongString returns a longer form string representation -func (k BranchKind) LongString() string { - switch k { - case Empty: - return "an empty block" - case Regular: - return "a regular statement" - case Return: - return "a return statement" - case Continue: - return "a continue statement" - case Break: - return "a break statement" - case Goto: - return "a goto statement" - case Panic: - return "a function call that panics" - case Exit: - return "a function call that exits the program" - default: - panic("invalid kind") - } -} diff --git a/vendor/github.com/mgechev/revive/internal/ifelse/chain.go b/vendor/github.com/mgechev/revive/internal/ifelse/chain.go deleted file mode 100644 index 9891635ee..000000000 --- a/vendor/github.com/mgechev/revive/internal/ifelse/chain.go +++ /dev/null @@ -1,10 +0,0 @@ -package ifelse - -// Chain contains information about an if-else chain. -type Chain struct { - If Branch // what happens at the end of the "if" block - Else Branch // what happens at the end of the "else" block - HasInitializer bool // is there an "if"-initializer somewhere in the chain? - HasPriorNonDeviating bool // is there a prior "if" block that does NOT deviate control flow? - AtBlockEnd bool // whether the chain is placed at the end of the surrounding block -} diff --git a/vendor/github.com/mgechev/revive/internal/ifelse/doc.go b/vendor/github.com/mgechev/revive/internal/ifelse/doc.go deleted file mode 100644 index 0aa2c9817..000000000 --- a/vendor/github.com/mgechev/revive/internal/ifelse/doc.go +++ /dev/null @@ -1,6 +0,0 @@ -// Package ifelse provides helpers for analysing the control flow in if-else chains, -// presently used by the following rules: -// - early-return -// - indent-error-flow -// - superfluous-else -package ifelse diff --git a/vendor/github.com/mgechev/revive/internal/ifelse/func.go b/vendor/github.com/mgechev/revive/internal/ifelse/func.go deleted file mode 100644 index 7ba351918..000000000 --- a/vendor/github.com/mgechev/revive/internal/ifelse/func.go +++ /dev/null @@ -1,51 +0,0 @@ -package ifelse - -import ( - "fmt" - "go/ast" -) - -// Call contains the name of a function that deviates control flow. -type Call struct { - Pkg string // The package qualifier of the function, if not built-in. - Name string // The function name. -} - -// DeviatingFuncs lists known control flow deviating function calls. -var DeviatingFuncs = map[Call]BranchKind{ - {"os", "Exit"}: Exit, - {"log", "Fatal"}: Exit, - {"log", "Fatalf"}: Exit, - {"log", "Fatalln"}: Exit, - {"", "panic"}: Panic, - {"log", "Panic"}: Panic, - {"log", "Panicf"}: Panic, - {"log", "Panicln"}: Panic, -} - -// ExprCall gets the Call of an ExprStmt, if any. -func ExprCall(expr *ast.ExprStmt) (Call, bool) { - call, ok := expr.X.(*ast.CallExpr) - if !ok { - return Call{}, false - } - switch v := call.Fun.(type) { - case *ast.Ident: - return Call{Name: v.Name}, true - case *ast.SelectorExpr: - if ident, ok := v.X.(*ast.Ident); ok { - return Call{Name: v.Sel.Name, Pkg: ident.Name}, true - } - } - return Call{}, false -} - -// String returns the function name with package qualifier (if any) -func (f Call) String() string { - switch { - case f.Pkg != "": - return fmt.Sprintf("%s.%s", f.Pkg, f.Name) - default: - return f.Name - } -} diff --git a/vendor/github.com/mgechev/revive/internal/ifelse/rule.go b/vendor/github.com/mgechev/revive/internal/ifelse/rule.go deleted file mode 100644 index 07ad456b6..000000000 --- a/vendor/github.com/mgechev/revive/internal/ifelse/rule.go +++ /dev/null @@ -1,105 +0,0 @@ -package ifelse - -import ( - "go/ast" - "go/token" - - "github.com/mgechev/revive/lint" -) - -// Rule is an interface for linters operating on if-else chains -type Rule interface { - CheckIfElse(chain Chain, args Args) (failMsg string) -} - -// Apply evaluates the given Rule on if-else chains found within the given AST, -// and returns the failures. -// -// Note that in if-else chain with multiple "if" blocks, only the *last* one is checked, -// that is to say, given: -// -// if foo { -// ... -// } else if bar { -// ... -// } else { -// ... -// } -// -// Only the block following "bar" is linted. This is because the rules that use this function -// do not presently have anything to say about earlier blocks in the chain. -func Apply(rule Rule, node ast.Node, target Target, args lint.Arguments) []lint.Failure { - v := &visitor{rule: rule, target: target} - for _, arg := range args { - if arg == PreserveScope { - v.args.PreserveScope = true - } - } - ast.Walk(v, node) - return v.failures -} - -type visitor struct { - failures []lint.Failure - target Target - rule Rule - args Args -} - -func (v *visitor) Visit(node ast.Node) ast.Visitor { - block, ok := node.(*ast.BlockStmt) - if !ok { - return v - } - - for i, stmt := range block.List { - if ifStmt, ok := stmt.(*ast.IfStmt); ok { - v.visitChain(ifStmt, Chain{AtBlockEnd: i == len(block.List)-1}) - continue - } - ast.Walk(v, stmt) - } - return nil -} - -func (v *visitor) visitChain(ifStmt *ast.IfStmt, chain Chain) { - // look for other if-else chains nested inside this if { } block - ast.Walk(v, ifStmt.Body) - - if ifStmt.Else == nil { - // no else branch - return - } - - if as, ok := ifStmt.Init.(*ast.AssignStmt); ok && as.Tok == token.DEFINE { - chain.HasInitializer = true - } - chain.If = BlockBranch(ifStmt.Body) - - switch elseBlock := ifStmt.Else.(type) { - case *ast.IfStmt: - if !chain.If.Deviates() { - chain.HasPriorNonDeviating = true - } - v.visitChain(elseBlock, chain) - case *ast.BlockStmt: - // look for other if-else chains nested inside this else { } block - ast.Walk(v, elseBlock) - - chain.Else = BlockBranch(elseBlock) - if failMsg := v.rule.CheckIfElse(chain, v.args); failMsg != "" { - if chain.HasInitializer { - // if statement has a := initializer, so we might need to move the assignment - // onto its own line in case the body references it - failMsg += " (move short variable declaration to its own line if necessary)" - } - v.failures = append(v.failures, lint.Failure{ - Confidence: 1, - Node: v.target.node(ifStmt), - Failure: failMsg, - }) - } - default: - panic("invalid node type for else") - } -} diff --git a/vendor/github.com/mgechev/revive/internal/ifelse/target.go b/vendor/github.com/mgechev/revive/internal/ifelse/target.go deleted file mode 100644 index 81ff1c303..000000000 --- a/vendor/github.com/mgechev/revive/internal/ifelse/target.go +++ /dev/null @@ -1,25 +0,0 @@ -package ifelse - -import "go/ast" - -// Target decides what line/column should be indicated by the rule in question. -type Target int - -const ( - // TargetIf means the text refers to the "if" - TargetIf Target = iota - - // TargetElse means the text refers to the "else" - TargetElse -) - -func (t Target) node(ifStmt *ast.IfStmt) ast.Node { - switch t { - case TargetIf: - return ifStmt - case TargetElse: - return ifStmt.Else - default: - panic("bad target") - } -} diff --git a/vendor/github.com/mgechev/revive/internal/typeparams/typeparams.go b/vendor/github.com/mgechev/revive/internal/typeparams/typeparams.go deleted file mode 100644 index 33c4f203e..000000000 --- a/vendor/github.com/mgechev/revive/internal/typeparams/typeparams.go +++ /dev/null @@ -1,29 +0,0 @@ -// Package typeparams provides utilities for working with Go ASTs with support -// for type parameters when built with Go 1.18 and higher. -package typeparams - -import ( - "go/ast" -) - -// Enabled reports whether type parameters are enabled in the current build -// environment. -func Enabled() bool { - return enabled -} - -// ReceiverType returns the named type of the method receiver, sans "*" and type -// parameters, or "invalid-type" if fn.Recv is ill formed. -func ReceiverType(fn *ast.FuncDecl) string { - e := fn.Recv.List[0].Type - if s, ok := e.(*ast.StarExpr); ok { - e = s.X - } - if enabled { - e = unpackIndexExpr(e) - } - if id, ok := e.(*ast.Ident); ok { - return id.Name - } - return "invalid-type" -} diff --git a/vendor/github.com/mgechev/revive/internal/typeparams/typeparams_go117.go b/vendor/github.com/mgechev/revive/internal/typeparams/typeparams_go117.go deleted file mode 100644 index 913a7316e..000000000 --- a/vendor/github.com/mgechev/revive/internal/typeparams/typeparams_go117.go +++ /dev/null @@ -1,12 +0,0 @@ -//go:build !go1.18 -// +build !go1.18 - -package typeparams - -import ( - "go/ast" -) - -const enabled = false - -func unpackIndexExpr(e ast.Expr) ast.Expr { return e } diff --git a/vendor/github.com/mgechev/revive/internal/typeparams/typeparams_go118.go b/vendor/github.com/mgechev/revive/internal/typeparams/typeparams_go118.go deleted file mode 100644 index 0f7fd88d3..000000000 --- a/vendor/github.com/mgechev/revive/internal/typeparams/typeparams_go118.go +++ /dev/null @@ -1,20 +0,0 @@ -//go:build go1.18 -// +build go1.18 - -package typeparams - -import ( - "go/ast" -) - -const enabled = true - -func unpackIndexExpr(e ast.Expr) ast.Expr { - switch e := e.(type) { - case *ast.IndexExpr: - return e.X - case *ast.IndexListExpr: - return e.X - } - return e -} diff --git a/vendor/github.com/mgechev/revive/lint/config.go b/vendor/github.com/mgechev/revive/lint/config.go deleted file mode 100644 index 7e51a93c2..000000000 --- a/vendor/github.com/mgechev/revive/lint/config.go +++ /dev/null @@ -1,64 +0,0 @@ -package lint - -// Arguments is type used for the arguments of a rule. -type Arguments = []interface{} - -// FileFilters is type used for modeling file filters to apply to rules. -type FileFilters = []*FileFilter - -// RuleConfig is type used for the rule configuration. -type RuleConfig struct { - Arguments Arguments - Severity Severity - Disabled bool - // Exclude - rule-level file excludes, TOML related (strings) - Exclude []string - // excludeFilters - regex-based file filters, initialized from Exclude - excludeFilters []*FileFilter -} - -// Initialize - should be called after reading from TOML file -func (rc *RuleConfig) Initialize() error { - for _, f := range rc.Exclude { - ff, err := ParseFileFilter(f) - if err != nil { - return err - } - rc.excludeFilters = append(rc.excludeFilters, ff) - } - return nil -} - -// RulesConfig defines the config for all rules. -type RulesConfig = map[string]RuleConfig - -// MustExclude - checks if given filename `name` must be excluded -func (rc *RuleConfig) MustExclude(name string) bool { - for _, exclude := range rc.excludeFilters { - if exclude.MatchFileName(name) { - return true - } - } - return false -} - -// DirectiveConfig is type used for the linter directive configuration. -type DirectiveConfig struct { - Severity Severity -} - -// DirectivesConfig defines the config for all directives. -type DirectivesConfig = map[string]DirectiveConfig - -// Config defines the config of the linter. -type Config struct { - IgnoreGeneratedHeader bool `toml:"ignoreGeneratedHeader"` - Confidence float64 - Severity Severity - EnableAllRules bool `toml:"enableAllRules"` - Rules RulesConfig `toml:"rule"` - ErrorCode int `toml:"errorCode"` - WarningCode int `toml:"warningCode"` - Directives DirectivesConfig `toml:"directive"` - Exclude []string `toml:"exclude"` -} diff --git a/vendor/github.com/mgechev/revive/lint/doc.go b/vendor/github.com/mgechev/revive/lint/doc.go deleted file mode 100644 index 7048adf4b..000000000 --- a/vendor/github.com/mgechev/revive/lint/doc.go +++ /dev/null @@ -1,2 +0,0 @@ -// Package lint implements the linting machinery. -package lint diff --git a/vendor/github.com/mgechev/revive/lint/failure.go b/vendor/github.com/mgechev/revive/lint/failure.go deleted file mode 100644 index 479b0cb48..000000000 --- a/vendor/github.com/mgechev/revive/lint/failure.go +++ /dev/null @@ -1,39 +0,0 @@ -package lint - -import ( - "go/ast" - "go/token" -) - -const ( - // SeverityWarning declares failures of type warning - SeverityWarning = "warning" - // SeverityError declares failures of type error. - SeverityError = "error" -) - -// Severity is the type for the failure types. -type Severity string - -// FailurePosition returns the failure position -type FailurePosition struct { - Start token.Position - End token.Position -} - -// Failure defines a struct for a linting failure. -type Failure struct { - Failure string - RuleName string - Category string - Position FailurePosition - Node ast.Node `json:"-"` - Confidence float64 - // For future use - ReplacementLine string -} - -// GetFilename returns the filename. -func (f *Failure) GetFilename() string { - return f.Position.Start.Filename -} diff --git a/vendor/github.com/mgechev/revive/lint/file.go b/vendor/github.com/mgechev/revive/lint/file.go deleted file mode 100644 index 23255304c..000000000 --- a/vendor/github.com/mgechev/revive/lint/file.go +++ /dev/null @@ -1,280 +0,0 @@ -package lint - -import ( - "bytes" - "go/ast" - "go/parser" - "go/printer" - "go/token" - "go/types" - "math" - "regexp" - "strings" -) - -// File abstraction used for representing files. -type File struct { - Name string - Pkg *Package - content []byte - AST *ast.File -} - -// IsTest returns if the file contains tests. -func (f *File) IsTest() bool { return strings.HasSuffix(f.Name, "_test.go") } - -// Content returns the file's content. -func (f *File) Content() []byte { - return f.content -} - -// NewFile creates a new file -func NewFile(name string, content []byte, pkg *Package) (*File, error) { - f, err := parser.ParseFile(pkg.fset, name, content, parser.ParseComments) - if err != nil { - return nil, err - } - return &File{ - Name: name, - content: content, - Pkg: pkg, - AST: f, - }, nil -} - -// ToPosition returns line and column for given position. -func (f *File) ToPosition(pos token.Pos) token.Position { - return f.Pkg.fset.Position(pos) -} - -// Render renders a node. -func (f *File) Render(x interface{}) string { - var buf bytes.Buffer - if err := printer.Fprint(&buf, f.Pkg.fset, x); err != nil { - panic(err) - } - return buf.String() -} - -// CommentMap builds a comment map for the file. -func (f *File) CommentMap() ast.CommentMap { - return ast.NewCommentMap(f.Pkg.fset, f.AST, f.AST.Comments) -} - -var basicTypeKinds = map[types.BasicKind]string{ - types.UntypedBool: "bool", - types.UntypedInt: "int", - types.UntypedRune: "rune", - types.UntypedFloat: "float64", - types.UntypedComplex: "complex128", - types.UntypedString: "string", -} - -// IsUntypedConst reports whether expr is an untyped constant, -// and indicates what its default type is. -// scope may be nil. -func (f *File) IsUntypedConst(expr ast.Expr) (defType string, ok bool) { - // Re-evaluate expr outside its context to see if it's untyped. - // (An expr evaluated within, for example, an assignment context will get the type of the LHS.) - exprStr := f.Render(expr) - tv, err := types.Eval(f.Pkg.fset, f.Pkg.TypesPkg(), expr.Pos(), exprStr) - if err != nil { - return "", false - } - if b, ok := tv.Type.(*types.Basic); ok { - if dt, ok := basicTypeKinds[b.Kind()]; ok { - return dt, true - } - } - - return "", false -} - -func (f *File) isMain() bool { - return f.AST.Name.Name == "main" -} - -const directiveSpecifyDisableReason = "specify-disable-reason" - -func (f *File) lint(rules []Rule, config Config, failures chan Failure) { - rulesConfig := config.Rules - _, mustSpecifyDisableReason := config.Directives[directiveSpecifyDisableReason] - disabledIntervals := f.disabledIntervals(rules, mustSpecifyDisableReason, failures) - for _, currentRule := range rules { - ruleConfig := rulesConfig[currentRule.Name()] - if ruleConfig.MustExclude(f.Name) { - continue - } - currentFailures := currentRule.Apply(f, ruleConfig.Arguments) - for idx, failure := range currentFailures { - if failure.RuleName == "" { - failure.RuleName = currentRule.Name() - } - if failure.Node != nil { - failure.Position = ToFailurePosition(failure.Node.Pos(), failure.Node.End(), f) - } - currentFailures[idx] = failure - } - currentFailures = f.filterFailures(currentFailures, disabledIntervals) - for _, failure := range currentFailures { - if failure.Confidence >= config.Confidence { - failures <- failure - } - } - } -} - -type enableDisableConfig struct { - enabled bool - position int -} - -const ( - directiveRE = `^//[\s]*revive:(enable|disable)(?:-(line|next-line))?(?::([^\s]+))?[\s]*(?: (.+))?$` - directivePos = 1 - modifierPos = 2 - rulesPos = 3 - reasonPos = 4 -) - -var re = regexp.MustCompile(directiveRE) - -func (f *File) disabledIntervals(rules []Rule, mustSpecifyDisableReason bool, failures chan Failure) disabledIntervalsMap { - enabledDisabledRulesMap := make(map[string][]enableDisableConfig) - - getEnabledDisabledIntervals := func() disabledIntervalsMap { - result := make(disabledIntervalsMap) - - for ruleName, disabledArr := range enabledDisabledRulesMap { - ruleResult := []DisabledInterval{} - for i := 0; i < len(disabledArr); i++ { - interval := DisabledInterval{ - RuleName: ruleName, - From: token.Position{ - Filename: f.Name, - Line: disabledArr[i].position, - }, - To: token.Position{ - Filename: f.Name, - Line: math.MaxInt32, - }, - } - if i%2 == 0 { - ruleResult = append(ruleResult, interval) - } else { - ruleResult[len(ruleResult)-1].To.Line = disabledArr[i].position - } - } - result[ruleName] = ruleResult - } - - return result - } - - handleConfig := func(isEnabled bool, line int, name string) { - existing, ok := enabledDisabledRulesMap[name] - if !ok { - existing = []enableDisableConfig{} - enabledDisabledRulesMap[name] = existing - } - if (len(existing) > 1 && existing[len(existing)-1].enabled == isEnabled) || - (len(existing) == 0 && isEnabled) { - return - } - existing = append(existing, enableDisableConfig{ - enabled: isEnabled, - position: line, - }) - enabledDisabledRulesMap[name] = existing - } - - handleRules := func(filename, modifier string, isEnabled bool, line int, ruleNames []string) []DisabledInterval { - var result []DisabledInterval - for _, name := range ruleNames { - if modifier == "line" { - handleConfig(isEnabled, line, name) - handleConfig(!isEnabled, line, name) - } else if modifier == "next-line" { - handleConfig(isEnabled, line+1, name) - handleConfig(!isEnabled, line+1, name) - } else { - handleConfig(isEnabled, line, name) - } - } - return result - } - - handleComment := func(filename string, c *ast.CommentGroup, line int) { - comments := c.List - for _, c := range comments { - match := re.FindStringSubmatch(c.Text) - if len(match) == 0 { - continue - } - ruleNames := []string{} - tempNames := strings.Split(match[rulesPos], ",") - - for _, name := range tempNames { - name = strings.Trim(name, "\n") - if len(name) > 0 { - ruleNames = append(ruleNames, name) - } - } - - mustCheckDisablingReason := mustSpecifyDisableReason && match[directivePos] == "disable" - if mustCheckDisablingReason && strings.Trim(match[reasonPos], " ") == "" { - failures <- Failure{ - Confidence: 1, - RuleName: directiveSpecifyDisableReason, - Failure: "reason of lint disabling not found", - Position: ToFailurePosition(c.Pos(), c.End(), f), - Node: c, - } - continue // skip this linter disabling directive - } - - // TODO: optimize - if len(ruleNames) == 0 { - for _, rule := range rules { - ruleNames = append(ruleNames, rule.Name()) - } - } - - handleRules(filename, match[modifierPos], match[directivePos] == "enable", line, ruleNames) - } - } - - comments := f.AST.Comments - for _, c := range comments { - handleComment(f.Name, c, f.ToPosition(c.End()).Line) - } - - return getEnabledDisabledIntervals() -} - -func (File) filterFailures(failures []Failure, disabledIntervals disabledIntervalsMap) []Failure { - result := []Failure{} - for _, failure := range failures { - fStart := failure.Position.Start.Line - fEnd := failure.Position.End.Line - intervals, ok := disabledIntervals[failure.RuleName] - if !ok { - result = append(result, failure) - } else { - include := true - for _, interval := range intervals { - intStart := interval.From.Line - intEnd := interval.To.Line - if (fStart >= intStart && fStart <= intEnd) || - (fEnd >= intStart && fEnd <= intEnd) { - include = false - break - } - } - if include { - result = append(result, failure) - } - } - } - return result -} diff --git a/vendor/github.com/mgechev/revive/lint/filefilter.go b/vendor/github.com/mgechev/revive/lint/filefilter.go deleted file mode 100644 index 8da090b9c..000000000 --- a/vendor/github.com/mgechev/revive/lint/filefilter.go +++ /dev/null @@ -1,128 +0,0 @@ -package lint - -import ( - "fmt" - "regexp" - "strings" -) - -// FileFilter - file filter to exclude some files for rule -// supports whole -// 1. file/dir names : pkg/mypkg/my.go, -// 2. globs: **/*.pb.go, -// 3. regexes (~ prefix) ~-tmp\.\d+\.go -// 4. special test marker `TEST` - treats as `~_test\.go` -type FileFilter struct { - // raw definition of filter inside config - raw string - // don't care what was at start, will use regexes inside - rx *regexp.Regexp - // marks filter as matching everything - matchesAll bool - // marks filter as matching nothing - matchesNothing bool -} - -// ParseFileFilter - creates [FileFilter] for given raw filter -// if empty string, it matches nothing -// if `*`, or `~`, it matches everything -// while regexp could be invalid, it could return it's compilation error -func ParseFileFilter(rawFilter string) (*FileFilter, error) { - rawFilter = strings.TrimSpace(rawFilter) - result := new(FileFilter) - result.raw = rawFilter - result.matchesNothing = len(result.raw) == 0 - result.matchesAll = result.raw == "*" || result.raw == "~" - if !result.matchesAll && !result.matchesNothing { - if err := result.prepareRegexp(); err != nil { - return nil, err - } - } - return result, nil -} - -func (ff *FileFilter) String() string { return ff.raw } - -// MatchFileName - checks if file name matches filter -func (ff *FileFilter) MatchFileName(name string) bool { - if ff.matchesAll { - return true - } - if ff.matchesNothing { - return false - } - name = strings.ReplaceAll(name, "\\", "/") - return ff.rx.MatchString(name) -} - -var fileFilterInvalidGlobRegexp = regexp.MustCompile(`[^/]\*\*[^/]`) -var escapeRegexSymbols = ".+{}()[]^$" - -func (ff *FileFilter) prepareRegexp() error { - var err error - var src = ff.raw - if src == "TEST" { - src = "~_test\\.go" - } - if strings.HasPrefix(src, "~") { - ff.rx, err = regexp.Compile(src[1:]) - if err != nil { - return fmt.Errorf("invalid file filter [%s], regexp compile error: [%v]", ff.raw, err) - } - return nil - } - /* globs */ - if strings.Contains(src, "*") { - if fileFilterInvalidGlobRegexp.MatchString(src) { - return fmt.Errorf("invalid file filter [%s], invalid glob pattern", ff.raw) - } - var rxBuild strings.Builder - rxBuild.WriteByte('^') - wasStar := false - justDirGlob := false - for _, c := range src { - if c == '*' { - if wasStar { - rxBuild.WriteString(`[\s\S]*`) - wasStar = false - justDirGlob = true - continue - } - wasStar = true - continue - } - if wasStar { - rxBuild.WriteString("[^/]*") - wasStar = false - } - if strings.ContainsRune(escapeRegexSymbols, c) { - rxBuild.WriteByte('\\') - } - rxBuild.WriteRune(c) - if c == '/' && justDirGlob { - rxBuild.WriteRune('?') - } - justDirGlob = false - } - if wasStar { - rxBuild.WriteString("[^/]*") - } - rxBuild.WriteByte('$') - ff.rx, err = regexp.Compile(rxBuild.String()) - if err != nil { - return fmt.Errorf("invalid file filter [%s], regexp compile error after glob expand: [%v]", ff.raw, err) - } - return nil - } - - // it's whole file mask, just escape dots and normilze separators - fillRx := src - fillRx = strings.ReplaceAll(fillRx, "\\", "/") - fillRx = strings.ReplaceAll(fillRx, ".", `\.`) - fillRx = "^" + fillRx + "$" - ff.rx, err = regexp.Compile(fillRx) - if err != nil { - return fmt.Errorf("invalid file filter [%s], regexp compile full path: [%v]", ff.raw, err) - } - return nil -} diff --git a/vendor/github.com/mgechev/revive/lint/formatter.go b/vendor/github.com/mgechev/revive/lint/formatter.go deleted file mode 100644 index 7c19af278..000000000 --- a/vendor/github.com/mgechev/revive/lint/formatter.go +++ /dev/null @@ -1,14 +0,0 @@ -package lint - -// FormatterMetadata configuration of a formatter -type FormatterMetadata struct { - Name string - Description string - Sample string -} - -// Formatter defines an interface for failure formatters -type Formatter interface { - Format(<-chan Failure, Config) (string, error) - Name() string -} diff --git a/vendor/github.com/mgechev/revive/lint/linter.go b/vendor/github.com/mgechev/revive/lint/linter.go deleted file mode 100644 index fb1ab6f28..000000000 --- a/vendor/github.com/mgechev/revive/lint/linter.go +++ /dev/null @@ -1,163 +0,0 @@ -package lint - -import ( - "bufio" - "bytes" - "fmt" - "go/token" - "os" - "regexp" - "strconv" - "sync" -) - -// ReadFile defines an abstraction for reading files. -type ReadFile func(path string) (result []byte, err error) - -type disabledIntervalsMap = map[string][]DisabledInterval - -// Linter is used for linting set of files. -type Linter struct { - reader ReadFile - fileReadTokens chan struct{} -} - -// New creates a new Linter -func New(reader ReadFile, maxOpenFiles int) Linter { - var fileReadTokens chan struct{} - if maxOpenFiles > 0 { - fileReadTokens = make(chan struct{}, maxOpenFiles) - } - return Linter{ - reader: reader, - fileReadTokens: fileReadTokens, - } -} - -func (l Linter) readFile(path string) (result []byte, err error) { - if l.fileReadTokens != nil { - // "take" a token by writing to the channel. - // It will block if no more space in the channel's buffer - l.fileReadTokens <- struct{}{} - defer func() { - // "free" a token by reading from the channel - <-l.fileReadTokens - }() - } - - return l.reader(path) -} - -var ( - genHdr = []byte("// Code generated ") - genFtr = []byte(" DO NOT EDIT.") -) - -// Lint lints a set of files with the specified rule. -func (l *Linter) Lint(packages [][]string, ruleSet []Rule, config Config) (<-chan Failure, error) { - failures := make(chan Failure) - - var wg sync.WaitGroup - for _, pkg := range packages { - wg.Add(1) - go func(pkg []string) { - if err := l.lintPackage(pkg, ruleSet, config, failures); err != nil { - fmt.Fprintln(os.Stderr, err) - os.Exit(1) - } - defer wg.Done() - }(pkg) - } - - go func() { - wg.Wait() - close(failures) - }() - - return failures, nil -} - -func (l *Linter) lintPackage(filenames []string, ruleSet []Rule, config Config, failures chan Failure) error { - pkg := &Package{ - fset: token.NewFileSet(), - files: map[string]*File{}, - } - for _, filename := range filenames { - content, err := l.readFile(filename) - if err != nil { - return err - } - if !config.IgnoreGeneratedHeader && isGenerated(content) { - continue - } - - file, err := NewFile(filename, content, pkg) - if err != nil { - addInvalidFileFailure(filename, err.Error(), failures) - continue - } - pkg.files[filename] = file - } - - if len(pkg.files) == 0 { - return nil - } - - pkg.lint(ruleSet, config, failures) - - return nil -} - -// isGenerated reports whether the source file is generated code -// according the rules from https://golang.org/s/generatedcode. -// This is inherited from the original go lint. -func isGenerated(src []byte) bool { - sc := bufio.NewScanner(bytes.NewReader(src)) - for sc.Scan() { - b := sc.Bytes() - if bytes.HasPrefix(b, genHdr) && bytes.HasSuffix(b, genFtr) && len(b) >= len(genHdr)+len(genFtr) { - return true - } - } - return false -} - -// addInvalidFileFailure adds a failure for an invalid formatted file -func addInvalidFileFailure(filename, errStr string, failures chan Failure) { - position := getPositionInvalidFile(filename, errStr) - failures <- Failure{ - Confidence: 1, - Failure: fmt.Sprintf("invalid file %s: %v", filename, errStr), - Category: "validity", - Position: position, - } -} - -// errPosRegexp matches with an NewFile error message -// i.e. : corrupted.go:10:4: expected '}', found 'EOF -// first group matches the line and the second group, the column -var errPosRegexp = regexp.MustCompile(`.*:(\d*):(\d*):.*$`) - -// getPositionInvalidFile gets the position of the error in an invalid file -func getPositionInvalidFile(filename, s string) FailurePosition { - pos := errPosRegexp.FindStringSubmatch(s) - if len(pos) < 3 { - return FailurePosition{} - } - line, err := strconv.Atoi(pos[1]) - if err != nil { - return FailurePosition{} - } - column, err := strconv.Atoi(pos[2]) - if err != nil { - return FailurePosition{} - } - - return FailurePosition{ - Start: token.Position{ - Filename: filename, - Line: line, - Column: column, - }, - } -} diff --git a/vendor/github.com/mgechev/revive/lint/package.go b/vendor/github.com/mgechev/revive/lint/package.go deleted file mode 100644 index 5976acf99..000000000 --- a/vendor/github.com/mgechev/revive/lint/package.go +++ /dev/null @@ -1,190 +0,0 @@ -package lint - -import ( - "go/ast" - "go/importer" - "go/token" - "go/types" - "sync" - - "github.com/mgechev/revive/internal/typeparams" -) - -// Package represents a package in the project. -type Package struct { - fset *token.FileSet - files map[string]*File - - typesPkg *types.Package - typesInfo *types.Info - - // sortable is the set of types in the package that implement sort.Interface. - sortable map[string]bool - // main is whether this is a "main" package. - main int - sync.RWMutex -} - -var ( - trueValue = 1 - falseValue = 2 - notSet = 3 -) - -// Files return package's files. -func (p *Package) Files() map[string]*File { - return p.files -} - -// IsMain returns if that's the main package. -func (p *Package) IsMain() bool { - p.Lock() - defer p.Unlock() - - if p.main == trueValue { - return true - } else if p.main == falseValue { - return false - } - for _, f := range p.files { - if f.isMain() { - p.main = trueValue - return true - } - } - p.main = falseValue - return false -} - -// TypesPkg yields information on this package -func (p *Package) TypesPkg() *types.Package { - p.RLock() - defer p.RUnlock() - return p.typesPkg -} - -// TypesInfo yields type information of this package identifiers -func (p *Package) TypesInfo() *types.Info { - p.RLock() - defer p.RUnlock() - return p.typesInfo -} - -// Sortable yields a map of sortable types in this package -func (p *Package) Sortable() map[string]bool { - p.RLock() - defer p.RUnlock() - return p.sortable -} - -// TypeCheck performs type checking for given package. -func (p *Package) TypeCheck() error { - p.Lock() - defer p.Unlock() - - // If type checking has already been performed - // skip it. - if p.typesInfo != nil || p.typesPkg != nil { - return nil - } - config := &types.Config{ - // By setting a no-op error reporter, the type checker does as much work as possible. - Error: func(error) {}, - Importer: importer.Default(), - } - info := &types.Info{ - Types: make(map[ast.Expr]types.TypeAndValue), - Defs: make(map[*ast.Ident]types.Object), - Uses: make(map[*ast.Ident]types.Object), - Scopes: make(map[ast.Node]*types.Scope), - } - var anyFile *File - var astFiles []*ast.File - for _, f := range p.files { - anyFile = f - astFiles = append(astFiles, f.AST) - } - - typesPkg, err := check(config, anyFile.AST.Name.Name, p.fset, astFiles, info) - - // Remember the typechecking info, even if config.Check failed, - // since we will get partial information. - p.typesPkg = typesPkg - p.typesInfo = info - - return err -} - -// check function encapsulates the call to go/types.Config.Check method and -// recovers if the called method panics (see issue #59) -func check(config *types.Config, n string, fset *token.FileSet, astFiles []*ast.File, info *types.Info) (p *types.Package, err error) { - defer func() { - if r := recover(); r != nil { - err, _ = r.(error) - p = nil - return - } - }() - - return config.Check(n, fset, astFiles, info) -} - -// TypeOf returns the type of an expression. -func (p *Package) TypeOf(expr ast.Expr) types.Type { - if p.typesInfo == nil { - return nil - } - return p.typesInfo.TypeOf(expr) -} - -type walker struct { - nmap map[string]int - has map[string]int -} - -func (w *walker) Visit(n ast.Node) ast.Visitor { - fn, ok := n.(*ast.FuncDecl) - if !ok || fn.Recv == nil || len(fn.Recv.List) == 0 { - return w - } - // TODO(dsymonds): We could check the signature to be more precise. - recv := typeparams.ReceiverType(fn) - if i, ok := w.nmap[fn.Name.Name]; ok { - w.has[recv] |= i - } - return w -} - -func (p *Package) scanSortable() { - p.sortable = make(map[string]bool) - - // bitfield for which methods exist on each type. - const ( - Len = 1 << iota - Less - Swap - ) - nmap := map[string]int{"Len": Len, "Less": Less, "Swap": Swap} - has := make(map[string]int) - for _, f := range p.files { - ast.Walk(&walker{nmap, has}, f.AST) - } - for typ, ms := range has { - if ms == Len|Less|Swap { - p.sortable[typ] = true - } - } -} - -func (p *Package) lint(rules []Rule, config Config, failures chan Failure) { - p.scanSortable() - var wg sync.WaitGroup - for _, file := range p.files { - wg.Add(1) - go (func(file *File) { - file.lint(rules, config, failures) - defer wg.Done() - })(file) - } - wg.Wait() -} diff --git a/vendor/github.com/mgechev/revive/lint/rule.go b/vendor/github.com/mgechev/revive/lint/rule.go deleted file mode 100644 index ccc66691c..000000000 --- a/vendor/github.com/mgechev/revive/lint/rule.go +++ /dev/null @@ -1,31 +0,0 @@ -package lint - -import ( - "go/token" -) - -// DisabledInterval contains a single disabled interval and the associated rule name. -type DisabledInterval struct { - From token.Position - To token.Position - RuleName string -} - -// Rule defines an abstract rule interface -type Rule interface { - Name() string - Apply(*File, Arguments) []Failure -} - -// AbstractRule defines an abstract rule. -type AbstractRule struct { - Failures []Failure -} - -// ToFailurePosition returns the failure position. -func ToFailurePosition(start, end token.Pos, file *File) FailurePosition { - return FailurePosition{ - Start: file.ToPosition(start), - End: file.ToPosition(end), - } -} diff --git a/vendor/github.com/mgechev/revive/lint/utils.go b/vendor/github.com/mgechev/revive/lint/utils.go deleted file mode 100644 index 6ccfb0ef2..000000000 --- a/vendor/github.com/mgechev/revive/lint/utils.go +++ /dev/null @@ -1,133 +0,0 @@ -package lint - -import ( - "strings" - "unicode" -) - -// Name returns a different name if it should be different. -func Name(name string, allowlist, blocklist []string) (should string) { - // Fast path for simple cases: "_" and all lowercase. - if name == "_" { - return name - } - allLower := true - for _, r := range name { - if !unicode.IsLower(r) { - allLower = false - break - } - } - if allLower { - return name - } - - // Split camelCase at any lower->upper transition, and split on underscores. - // Check each word for common initialisms. - runes := []rune(name) - w, i := 0, 0 // index of start of word, scan - for i+1 <= len(runes) { - eow := false // whether we hit the end of a word - if i+1 == len(runes) { - eow = true - } else if runes[i+1] == '_' { - // underscore; shift the remainder forward over any run of underscores - eow = true - n := 1 - for i+n+1 < len(runes) && runes[i+n+1] == '_' { - n++ - } - - // Leave at most one underscore if the underscore is between two digits - if i+n+1 < len(runes) && unicode.IsDigit(runes[i]) && unicode.IsDigit(runes[i+n+1]) { - n-- - } - - copy(runes[i+1:], runes[i+n+1:]) - runes = runes[:len(runes)-n] - } else if unicode.IsLower(runes[i]) && !unicode.IsLower(runes[i+1]) { - // lower->non-lower - eow = true - } - i++ - if !eow { - continue - } - - // [w,i) is a word. - word := string(runes[w:i]) - ignoreInitWarnings := map[string]bool{} - for _, i := range allowlist { - ignoreInitWarnings[i] = true - } - - extraInits := map[string]bool{} - for _, i := range blocklist { - extraInits[i] = true - } - - if u := strings.ToUpper(word); (commonInitialisms[u] || extraInits[u]) && !ignoreInitWarnings[u] { - // Keep consistent case, which is lowercase only at the start. - if w == 0 && unicode.IsLower(runes[w]) { - u = strings.ToLower(u) - } - // Keep lowercase s for IDs - if u == "IDS" { - u = "IDs" - } - // All the common initialisms are ASCII, - // so we can replace the bytes exactly. - copy(runes[w:], []rune(u)) - } else if w > 0 && strings.ToLower(word) == word { - // already all lowercase, and not the first word, so uppercase the first character. - runes[w] = unicode.ToUpper(runes[w]) - } - w = i - } - return string(runes) -} - -// commonInitialisms is a set of common initialisms. -// Only add entries that are highly unlikely to be non-initialisms. -// For instance, "ID" is fine (Freudian code is rare), but "AND" is not. -var commonInitialisms = map[string]bool{ - "ACL": true, - "API": true, - "ASCII": true, - "CPU": true, - "CSS": true, - "DNS": true, - "EOF": true, - "GUID": true, - "HTML": true, - "HTTP": true, - "HTTPS": true, - "ID": true, - "IDS": true, - "IP": true, - "JSON": true, - "LHS": true, - "QPS": true, - "RAM": true, - "RHS": true, - "RPC": true, - "SLA": true, - "SMTP": true, - "SQL": true, - "SSH": true, - "TCP": true, - "TLS": true, - "TTL": true, - "UDP": true, - "UI": true, - "UID": true, - "UUID": true, - "URI": true, - "URL": true, - "UTF8": true, - "VM": true, - "XML": true, - "XMPP": true, - "XSRF": true, - "XSS": true, -} diff --git a/vendor/github.com/mgechev/revive/rule/add-constant.go b/vendor/github.com/mgechev/revive/rule/add-constant.go deleted file mode 100644 index 86182623a..000000000 --- a/vendor/github.com/mgechev/revive/rule/add-constant.go +++ /dev/null @@ -1,261 +0,0 @@ -package rule - -import ( - "fmt" - "go/ast" - "regexp" - "strconv" - "strings" - "sync" - - "github.com/mgechev/revive/lint" -) - -const ( - defaultStrLitLimit = 2 - kindFLOAT = "FLOAT" - kindINT = "INT" - kindSTRING = "STRING" -) - -type allowList map[string]map[string]bool - -func newAllowList() allowList { - return map[string]map[string]bool{kindINT: {}, kindFLOAT: {}, kindSTRING: {}} -} - -func (wl allowList) add(kind, list string) { - elems := strings.Split(list, ",") - for _, e := range elems { - wl[kind][e] = true - } -} - -// AddConstantRule lints unused params in functions. -type AddConstantRule struct { - allowList allowList - ignoreFunctions []*regexp.Regexp - strLitLimit int - sync.Mutex -} - -// Apply applies the rule to given file. -func (r *AddConstantRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure { - r.configure(arguments) - - var failures []lint.Failure - - onFailure := func(failure lint.Failure) { - failures = append(failures, failure) - } - - w := &lintAddConstantRule{ - onFailure: onFailure, - strLits: make(map[string]int), - strLitLimit: r.strLitLimit, - allowList: r.allowList, - ignoreFunctions: r.ignoreFunctions, - structTags: make(map[*ast.BasicLit]struct{}), - } - - ast.Walk(w, file.AST) - - return failures -} - -// Name returns the rule name. -func (*AddConstantRule) Name() string { - return "add-constant" -} - -type lintAddConstantRule struct { - onFailure func(lint.Failure) - strLits map[string]int - strLitLimit int - allowList allowList - ignoreFunctions []*regexp.Regexp - structTags map[*ast.BasicLit]struct{} -} - -func (w *lintAddConstantRule) Visit(node ast.Node) ast.Visitor { - if node == nil { - return nil - } - - switch n := node.(type) { - case *ast.CallExpr: - w.checkFunc(n) - return nil - case *ast.GenDecl: - return nil // skip declarations - case *ast.BasicLit: - if !w.isStructTag(n) { - w.checkLit(n) - } - case *ast.StructType: - if n.Fields != nil { - for _, field := range n.Fields.List { - if field.Tag != nil { - w.structTags[field.Tag] = struct{}{} - } - } - } - } - - return w -} - -func (w *lintAddConstantRule) checkFunc(expr *ast.CallExpr) { - fName := w.getFuncName(expr) - - for _, arg := range expr.Args { - switch t := arg.(type) { - case *ast.CallExpr: - w.checkFunc(t) - case *ast.BasicLit: - if w.isIgnoredFunc(fName) { - continue - } - w.checkLit(t) - } - } -} - -func (*lintAddConstantRule) getFuncName(expr *ast.CallExpr) string { - switch f := expr.Fun.(type) { - case *ast.SelectorExpr: - switch prefix := f.X.(type) { - case *ast.Ident: - return prefix.Name + "." + f.Sel.Name - } - case *ast.Ident: - return f.Name - } - - return "" -} - -func (w *lintAddConstantRule) checkLit(n *ast.BasicLit) { - switch kind := n.Kind.String(); kind { - case kindFLOAT, kindINT: - w.checkNumLit(kind, n) - case kindSTRING: - w.checkStrLit(n) - } -} - -func (w *lintAddConstantRule) isIgnoredFunc(fName string) bool { - for _, pattern := range w.ignoreFunctions { - if pattern.MatchString(fName) { - return true - } - } - - return false -} - -func (w *lintAddConstantRule) checkStrLit(n *ast.BasicLit) { - if w.allowList[kindSTRING][n.Value] { - return - } - - count := w.strLits[n.Value] - if count >= 0 { - w.strLits[n.Value] = count + 1 - if w.strLits[n.Value] > w.strLitLimit { - w.onFailure(lint.Failure{ - Confidence: 1, - Node: n, - Category: "style", - Failure: fmt.Sprintf("string literal %s appears, at least, %d times, create a named constant for it", n.Value, w.strLits[n.Value]), - }) - w.strLits[n.Value] = -1 // mark it to avoid failing again on the same literal - } - } -} - -func (w *lintAddConstantRule) checkNumLit(kind string, n *ast.BasicLit) { - if w.allowList[kind][n.Value] { - return - } - - w.onFailure(lint.Failure{ - Confidence: 1, - Node: n, - Category: "style", - Failure: fmt.Sprintf("avoid magic numbers like '%s', create a named constant for it", n.Value), - }) -} - -func (w *lintAddConstantRule) isStructTag(n *ast.BasicLit) bool { - _, ok := w.structTags[n] - return ok -} - -func (r *AddConstantRule) configure(arguments lint.Arguments) { - r.Lock() - defer r.Unlock() - - if r.allowList == nil { - r.strLitLimit = defaultStrLitLimit - r.allowList = newAllowList() - if len(arguments) > 0 { - args, ok := arguments[0].(map[string]any) - if !ok { - panic(fmt.Sprintf("Invalid argument to the add-constant rule. Expecting a k,v map, got %T", arguments[0])) - } - for k, v := range args { - kind := "" - switch k { - case "allowFloats": - kind = kindFLOAT - fallthrough - case "allowInts": - if kind == "" { - kind = kindINT - } - fallthrough - case "allowStrs": - if kind == "" { - kind = kindSTRING - } - list, ok := v.(string) - if !ok { - panic(fmt.Sprintf("Invalid argument to the add-constant rule, string expected. Got '%v' (%T)", v, v)) - } - r.allowList.add(kind, list) - case "maxLitCount": - sl, ok := v.(string) - if !ok { - panic(fmt.Sprintf("Invalid argument to the add-constant rule, expecting string representation of an integer. Got '%v' (%T)", v, v)) - } - - limit, err := strconv.Atoi(sl) - if err != nil { - panic(fmt.Sprintf("Invalid argument to the add-constant rule, expecting string representation of an integer. Got '%v'", v)) - } - r.strLitLimit = limit - case "ignoreFuncs": - excludes, ok := v.(string) - if !ok { - panic(fmt.Sprintf("Invalid argument to the ignoreFuncs parameter of add-constant rule, string expected. Got '%v' (%T)", v, v)) - } - - for _, exclude := range strings.Split(excludes, ",") { - exclude = strings.Trim(exclude, " ") - if exclude == "" { - panic("Invalid argument to the ignoreFuncs parameter of add-constant rule, expected regular expression must not be empty.") - } - - exp, err := regexp.Compile(exclude) - if err != nil { - panic(fmt.Sprintf("Invalid argument to the ignoreFuncs parameter of add-constant rule: regexp %q does not compile: %v", exclude, err)) - } - - r.ignoreFunctions = append(r.ignoreFunctions, exp) - } - } - } - } - } -} diff --git a/vendor/github.com/mgechev/revive/rule/argument-limit.go b/vendor/github.com/mgechev/revive/rule/argument-limit.go deleted file mode 100644 index 8120288fd..000000000 --- a/vendor/github.com/mgechev/revive/rule/argument-limit.go +++ /dev/null @@ -1,84 +0,0 @@ -package rule - -import ( - "fmt" - "go/ast" - "sync" - - "github.com/mgechev/revive/lint" -) - -// ArgumentsLimitRule lints given else constructs. -type ArgumentsLimitRule struct { - total int - sync.Mutex -} - -const defaultArgumentsLimit = 8 - -func (r *ArgumentsLimitRule) configure(arguments lint.Arguments) { - r.Lock() - defer r.Unlock() - if r.total == 0 { - if len(arguments) < 1 { - r.total = defaultArgumentsLimit - return - } - - total, ok := arguments[0].(int64) // Alt. non panicking version - if !ok { - panic(`invalid value passed as argument number to the "argument-limit" rule`) - } - r.total = int(total) - } -} - -// Apply applies the rule to given file. -func (r *ArgumentsLimitRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure { - r.configure(arguments) - - var failures []lint.Failure - onFailure := func(failure lint.Failure) { - failures = append(failures, failure) - } - - walker := lintArgsNum{ - total: r.total, - onFailure: onFailure, - } - - ast.Walk(walker, file.AST) - - return failures -} - -// Name returns the rule name. -func (*ArgumentsLimitRule) Name() string { - return "argument-limit" -} - -type lintArgsNum struct { - total int - onFailure func(lint.Failure) -} - -func (w lintArgsNum) Visit(n ast.Node) ast.Visitor { - node, ok := n.(*ast.FuncDecl) - if ok { - num := 0 - for _, l := range node.Type.Params.List { - for range l.Names { - num++ - } - } - if num > w.total { - w.onFailure(lint.Failure{ - Confidence: 1, - Failure: fmt.Sprintf("maximum number of arguments per function exceeded; max %d but got %d", w.total, num), - Node: node.Type, - }) - return w - } - } - return w -} diff --git a/vendor/github.com/mgechev/revive/rule/atomic.go b/vendor/github.com/mgechev/revive/rule/atomic.go deleted file mode 100644 index 287b28c21..000000000 --- a/vendor/github.com/mgechev/revive/rule/atomic.go +++ /dev/null @@ -1,94 +0,0 @@ -package rule - -import ( - "go/ast" - "go/token" - "go/types" - - "github.com/mgechev/revive/lint" -) - -// AtomicRule lints given else constructs. -type AtomicRule struct{} - -// Apply applies the rule to given file. -func (*AtomicRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { - var failures []lint.Failure - walker := atomic{ - pkgTypesInfo: file.Pkg.TypesInfo(), - onFailure: func(failure lint.Failure) { - failures = append(failures, failure) - }, - } - - ast.Walk(walker, file.AST) - - return failures -} - -// Name returns the rule name. -func (*AtomicRule) Name() string { - return "atomic" -} - -type atomic struct { - pkgTypesInfo *types.Info - onFailure func(lint.Failure) -} - -func (w atomic) Visit(node ast.Node) ast.Visitor { - n, ok := node.(*ast.AssignStmt) - if !ok { - return w - } - - if len(n.Lhs) != len(n.Rhs) { - return nil // skip assignment sub-tree - } - if len(n.Lhs) == 1 && n.Tok == token.DEFINE { - return nil // skip assignment sub-tree - } - - for i, right := range n.Rhs { - call, ok := right.(*ast.CallExpr) - if !ok { - continue - } - sel, ok := call.Fun.(*ast.SelectorExpr) - if !ok { - continue - } - pkgIdent, _ := sel.X.(*ast.Ident) - if w.pkgTypesInfo != nil { - pkgName, ok := w.pkgTypesInfo.Uses[pkgIdent].(*types.PkgName) - if !ok || pkgName.Imported().Path() != "sync/atomic" { - continue - } - } - - switch sel.Sel.Name { - case "AddInt32", "AddInt64", "AddUint32", "AddUint64", "AddUintptr": - left := n.Lhs[i] - if len(call.Args) != 2 { - continue - } - arg := call.Args[0] - broken := false - - if uarg, ok := arg.(*ast.UnaryExpr); ok && uarg.Op == token.AND { - broken = gofmt(left) == gofmt(uarg.X) - } else if star, ok := left.(*ast.StarExpr); ok { - broken = gofmt(star.X) == gofmt(arg) - } - - if broken { - w.onFailure(lint.Failure{ - Confidence: 1, - Failure: "direct assignment to atomic value", - Node: n, - }) - } - } - } - return w -} diff --git a/vendor/github.com/mgechev/revive/rule/banned-characters.go b/vendor/github.com/mgechev/revive/rule/banned-characters.go deleted file mode 100644 index 12997bae1..000000000 --- a/vendor/github.com/mgechev/revive/rule/banned-characters.go +++ /dev/null @@ -1,90 +0,0 @@ -package rule - -import ( - "fmt" - "go/ast" - "strings" - "sync" - - "github.com/mgechev/revive/lint" -) - -// BannedCharsRule checks if a file contains banned characters. -type BannedCharsRule struct { - bannedCharList []string - sync.Mutex -} - -const bannedCharsRuleName = "banned-characters" - -func (r *BannedCharsRule) configure(arguments lint.Arguments) { - r.Lock() - defer r.Unlock() - if r.bannedCharList == nil && len(arguments) > 0 { - checkNumberOfArguments(1, arguments, bannedCharsRuleName) - r.bannedCharList = r.getBannedCharsList(arguments) - } -} - -// Apply applied the rule to the given file. -func (r *BannedCharsRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure { - r.configure(arguments) - - var failures []lint.Failure - onFailure := func(failure lint.Failure) { - failures = append(failures, failure) - } - - w := lintBannedCharsRule{ - bannedChars: r.bannedCharList, - onFailure: onFailure, - } - - ast.Walk(w, file.AST) - return failures -} - -// Name returns the rule name -func (*BannedCharsRule) Name() string { - return bannedCharsRuleName -} - -// getBannedCharsList converts arguments into the banned characters list -func (r *BannedCharsRule) getBannedCharsList(args lint.Arguments) []string { - var bannedChars []string - for _, char := range args { - charStr, ok := char.(string) - if !ok { - panic(fmt.Sprintf("Invalid argument for the %s rule: expecting a string, got %T", r.Name(), char)) - } - bannedChars = append(bannedChars, charStr) - } - - return bannedChars -} - -type lintBannedCharsRule struct { - bannedChars []string - onFailure func(lint.Failure) -} - -// Visit checks for each node if an identifier contains banned characters -func (w lintBannedCharsRule) Visit(node ast.Node) ast.Visitor { - n, ok := node.(*ast.Ident) - if !ok { - return w - } - for _, c := range w.bannedChars { - ok := strings.Contains(n.Name, c) - if ok { - w.onFailure(lint.Failure{ - Confidence: 1, - Failure: fmt.Sprintf("banned character found: %s", c), - RuleName: bannedCharsRuleName, - Node: n, - }) - } - } - - return w -} diff --git a/vendor/github.com/mgechev/revive/rule/bare-return.go b/vendor/github.com/mgechev/revive/rule/bare-return.go deleted file mode 100644 index 147fa84db..000000000 --- a/vendor/github.com/mgechev/revive/rule/bare-return.go +++ /dev/null @@ -1,84 +0,0 @@ -package rule - -import ( - "go/ast" - - "github.com/mgechev/revive/lint" -) - -// BareReturnRule lints given else constructs. -type BareReturnRule struct{} - -// Apply applies the rule to given file. -func (*BareReturnRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { - var failures []lint.Failure - - onFailure := func(failure lint.Failure) { - failures = append(failures, failure) - } - - w := lintBareReturnRule{onFailure: onFailure} - ast.Walk(w, file.AST) - return failures -} - -// Name returns the rule name. -func (*BareReturnRule) Name() string { - return "bare-return" -} - -type lintBareReturnRule struct { - onFailure func(lint.Failure) -} - -func (w lintBareReturnRule) Visit(node ast.Node) ast.Visitor { - switch n := node.(type) { - case *ast.FuncDecl: - w.checkFunc(n.Type.Results, n.Body) - case *ast.FuncLit: // to cope with deferred functions and go-routines - w.checkFunc(n.Type.Results, n.Body) - } - - return w -} - -// checkFunc will verify if the given function has named result and bare returns -func (w lintBareReturnRule) checkFunc(results *ast.FieldList, body *ast.BlockStmt) { - hasNamedResults := results != nil && len(results.List) > 0 && results.List[0].Names != nil - if !hasNamedResults || body == nil { - return // nothing to do - } - - brf := bareReturnFinder{w.onFailure} - ast.Walk(brf, body) -} - -type bareReturnFinder struct { - onFailure func(lint.Failure) -} - -func (w bareReturnFinder) Visit(node ast.Node) ast.Visitor { - _, ok := node.(*ast.FuncLit) - if ok { - // skip analysing function literals - // they will be analysed by the lintBareReturnRule.Visit method - return nil - } - - rs, ok := node.(*ast.ReturnStmt) - if !ok { - return w - } - - if len(rs.Results) > 0 { - return w - } - - w.onFailure(lint.Failure{ - Confidence: 1, - Node: rs, - Failure: "avoid using bare returns, please add return expressions", - }) - - return w -} diff --git a/vendor/github.com/mgechev/revive/rule/blank-imports.go b/vendor/github.com/mgechev/revive/rule/blank-imports.go deleted file mode 100644 index a3d50b4f7..000000000 --- a/vendor/github.com/mgechev/revive/rule/blank-imports.go +++ /dev/null @@ -1,75 +0,0 @@ -package rule - -import ( - "go/ast" - "strings" - - "github.com/mgechev/revive/lint" -) - -// BlankImportsRule lints given else constructs. -type BlankImportsRule struct{} - -// Name returns the rule name. -func (*BlankImportsRule) Name() string { - return "blank-imports" -} - -// Apply applies the rule to given file. -func (r *BlankImportsRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { - if file.Pkg.IsMain() || file.IsTest() { - return nil - } - - const ( - message = "a blank import should be only in a main or test package, or have a comment justifying it" - category = "imports" - - embedImportPath = `"embed"` - ) - - var failures []lint.Failure - - // The first element of each contiguous group of blank imports should have - // an explanatory comment of some kind. - for i, imp := range file.AST.Imports { - pos := file.ToPosition(imp.Pos()) - - if !isBlank(imp.Name) { - continue // Ignore non-blank imports. - } - - if i > 0 { - prev := file.AST.Imports[i-1] - prevPos := file.ToPosition(prev.Pos()) - - isSubsequentBlancInAGroup := prevPos.Line+1 == pos.Line && prev.Path.Value != embedImportPath && isBlank(prev.Name) - if isSubsequentBlancInAGroup { - continue - } - } - - if imp.Path.Value == embedImportPath && r.fileHasValidEmbedComment(file.AST) { - continue - } - - // This is the first blank import of a group. - if imp.Doc == nil && imp.Comment == nil { - failures = append(failures, lint.Failure{Failure: message, Category: category, Node: imp, Confidence: 1}) - } - } - - return failures -} - -func (*BlankImportsRule) fileHasValidEmbedComment(fileAst *ast.File) bool { - for _, commentGroup := range fileAst.Comments { - for _, comment := range commentGroup.List { - if strings.HasPrefix(comment.Text, "//go:embed ") { - return true - } - } - } - - return false -} diff --git a/vendor/github.com/mgechev/revive/rule/bool-literal-in-expr.go b/vendor/github.com/mgechev/revive/rule/bool-literal-in-expr.go deleted file mode 100644 index d6150339b..000000000 --- a/vendor/github.com/mgechev/revive/rule/bool-literal-in-expr.go +++ /dev/null @@ -1,73 +0,0 @@ -package rule - -import ( - "go/ast" - "go/token" - - "github.com/mgechev/revive/lint" -) - -// BoolLiteralRule warns when logic expressions contains Boolean literals. -type BoolLiteralRule struct{} - -// Apply applies the rule to given file. -func (*BoolLiteralRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { - var failures []lint.Failure - - onFailure := func(failure lint.Failure) { - failures = append(failures, failure) - } - - astFile := file.AST - w := &lintBoolLiteral{astFile, onFailure} - ast.Walk(w, astFile) - - return failures -} - -// Name returns the rule name. -func (*BoolLiteralRule) Name() string { - return "bool-literal-in-expr" -} - -type lintBoolLiteral struct { - file *ast.File - onFailure func(lint.Failure) -} - -func (w *lintBoolLiteral) Visit(node ast.Node) ast.Visitor { - switch n := node.(type) { - case *ast.BinaryExpr: - if !isBoolOp(n.Op) { - return w - } - - lexeme, ok := isExprABooleanLit(n.X) - if !ok { - lexeme, ok = isExprABooleanLit(n.Y) - - if !ok { - return w - } - } - - isConstant := (n.Op == token.LAND && lexeme == "false") || (n.Op == token.LOR && lexeme == "true") - - if isConstant { - w.addFailure(n, "Boolean expression seems to always evaluate to "+lexeme, "logic") - } else { - w.addFailure(n, "omit Boolean literal in expression", "style") - } - } - - return w -} - -func (w lintBoolLiteral) addFailure(node ast.Node, msg, cat string) { - w.onFailure(lint.Failure{ - Confidence: 1, - Node: node, - Category: cat, - Failure: msg, - }) -} diff --git a/vendor/github.com/mgechev/revive/rule/call-to-gc.go b/vendor/github.com/mgechev/revive/rule/call-to-gc.go deleted file mode 100644 index 9c68380a4..000000000 --- a/vendor/github.com/mgechev/revive/rule/call-to-gc.go +++ /dev/null @@ -1,70 +0,0 @@ -package rule - -import ( - "go/ast" - - "github.com/mgechev/revive/lint" -) - -// CallToGCRule lints calls to the garbage collector. -type CallToGCRule struct{} - -// Apply applies the rule to given file. -func (*CallToGCRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { - var failures []lint.Failure - onFailure := func(failure lint.Failure) { - failures = append(failures, failure) - } - - gcTriggeringFunctions := map[string]map[string]bool{ - "runtime": {"GC": true}, - } - - w := lintCallToGC{onFailure, gcTriggeringFunctions} - ast.Walk(w, file.AST) - - return failures -} - -// Name returns the rule name. -func (*CallToGCRule) Name() string { - return "call-to-gc" -} - -type lintCallToGC struct { - onFailure func(lint.Failure) - gcTriggeringFunctions map[string]map[string]bool -} - -func (w lintCallToGC) Visit(node ast.Node) ast.Visitor { - ce, ok := node.(*ast.CallExpr) - if !ok { - return w // nothing to do, the node is not a call - } - - fc, ok := ce.Fun.(*ast.SelectorExpr) - if !ok { - return nil // nothing to do, the call is not of the form pkg.func(...) - } - - id, ok := fc.X.(*ast.Ident) - - if !ok { - return nil // in case X is not an id (it should be!) - } - - fn := fc.Sel.Name - pkg := id.Name - if !w.gcTriggeringFunctions[pkg][fn] { - return nil // it isn't a call to a GC triggering function - } - - w.onFailure(lint.Failure{ - Confidence: 1, - Node: node, - Category: "bad practice", - Failure: "explicit call to the garbage collector", - }) - - return w -} diff --git a/vendor/github.com/mgechev/revive/rule/cognitive-complexity.go b/vendor/github.com/mgechev/revive/rule/cognitive-complexity.go deleted file mode 100644 index 1973faef8..000000000 --- a/vendor/github.com/mgechev/revive/rule/cognitive-complexity.go +++ /dev/null @@ -1,212 +0,0 @@ -package rule - -import ( - "fmt" - "go/ast" - "go/token" - "sync" - - "github.com/mgechev/revive/lint" - "golang.org/x/tools/go/ast/astutil" -) - -// CognitiveComplexityRule lints given else constructs. -type CognitiveComplexityRule struct { - maxComplexity int - sync.Mutex -} - -const defaultMaxCognitiveComplexity = 7 - -func (r *CognitiveComplexityRule) configure(arguments lint.Arguments) { - r.Lock() - defer r.Unlock() - if r.maxComplexity == 0 { - - if len(arguments) < 1 { - r.maxComplexity = defaultMaxCognitiveComplexity - return - } - - complexity, ok := arguments[0].(int64) - if !ok { - panic(fmt.Sprintf("invalid argument type for cognitive-complexity, expected int64, got %T", arguments[0])) - } - r.maxComplexity = int(complexity) - } -} - -// Apply applies the rule to given file. -func (r *CognitiveComplexityRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure { - r.configure(arguments) - - var failures []lint.Failure - - linter := cognitiveComplexityLinter{ - file: file, - maxComplexity: r.maxComplexity, - onFailure: func(failure lint.Failure) { - failures = append(failures, failure) - }, - } - - linter.lintCognitiveComplexity() - - return failures -} - -// Name returns the rule name. -func (*CognitiveComplexityRule) Name() string { - return "cognitive-complexity" -} - -type cognitiveComplexityLinter struct { - file *lint.File - maxComplexity int - onFailure func(lint.Failure) -} - -func (w cognitiveComplexityLinter) lintCognitiveComplexity() { - f := w.file - for _, decl := range f.AST.Decls { - if fn, ok := decl.(*ast.FuncDecl); ok && fn.Body != nil { - v := cognitiveComplexityVisitor{} - c := v.subTreeComplexity(fn.Body) - if c > w.maxComplexity { - w.onFailure(lint.Failure{ - Confidence: 1, - Category: "maintenance", - Failure: fmt.Sprintf("function %s has cognitive complexity %d (> max enabled %d)", funcName(fn), c, w.maxComplexity), - Node: fn, - }) - } - } - } -} - -type cognitiveComplexityVisitor struct { - complexity int - nestingLevel int -} - -// subTreeComplexity calculates the cognitive complexity of an AST-subtree. -func (v cognitiveComplexityVisitor) subTreeComplexity(n ast.Node) int { - ast.Walk(&v, n) - return v.complexity -} - -// Visit implements the ast.Visitor interface. -func (v *cognitiveComplexityVisitor) Visit(n ast.Node) ast.Visitor { - switch n := n.(type) { - case *ast.IfStmt: - targets := []ast.Node{n.Cond, n.Body, n.Else} - v.walk(1, targets...) - return nil - case *ast.ForStmt: - targets := []ast.Node{n.Cond, n.Body} - v.walk(1, targets...) - return nil - case *ast.RangeStmt: - v.walk(1, n.Body) - return nil - case *ast.SelectStmt: - v.walk(1, n.Body) - return nil - case *ast.SwitchStmt: - v.walk(1, n.Body) - return nil - case *ast.TypeSwitchStmt: - v.walk(1, n.Body) - return nil - case *ast.FuncLit: - v.walk(0, n.Body) // do not increment the complexity, just do the nesting - return nil - case *ast.BinaryExpr: - v.complexity += v.binExpComplexity(n) - return nil // skip visiting binexp sub-tree (already visited by binExpComplexity) - case *ast.BranchStmt: - if n.Label != nil { - v.complexity++ - } - } - // TODO handle (at least) direct recursion - - return v -} - -func (v *cognitiveComplexityVisitor) walk(complexityIncrement int, targets ...ast.Node) { - v.complexity += complexityIncrement + v.nestingLevel - nesting := v.nestingLevel - v.nestingLevel++ - - for _, t := range targets { - if t == nil { - continue - } - - ast.Walk(v, t) - } - - v.nestingLevel = nesting -} - -func (cognitiveComplexityVisitor) binExpComplexity(n *ast.BinaryExpr) int { - calculator := binExprComplexityCalculator{opsStack: []token.Token{}} - - astutil.Apply(n, calculator.pre, calculator.post) - - return calculator.complexity -} - -type binExprComplexityCalculator struct { - complexity int - opsStack []token.Token // stack of bool operators - subexpStarted bool -} - -func (becc *binExprComplexityCalculator) pre(c *astutil.Cursor) bool { - switch n := c.Node().(type) { - case *ast.BinaryExpr: - isBoolOp := n.Op == token.LAND || n.Op == token.LOR - if !isBoolOp { - break - } - - ops := len(becc.opsStack) - // if - // is the first boolop in the expression OR - // is the first boolop inside a subexpression (...) OR - // is not the same to the previous one - // then - // increment complexity - if ops == 0 || becc.subexpStarted || n.Op != becc.opsStack[ops-1] { - becc.complexity++ - becc.subexpStarted = false - } - - becc.opsStack = append(becc.opsStack, n.Op) - case *ast.ParenExpr: - becc.subexpStarted = true - } - - return true -} - -func (becc *binExprComplexityCalculator) post(c *astutil.Cursor) bool { - switch n := c.Node().(type) { - case *ast.BinaryExpr: - isBoolOp := n.Op == token.LAND || n.Op == token.LOR - if !isBoolOp { - break - } - - ops := len(becc.opsStack) - if ops > 0 { - becc.opsStack = becc.opsStack[:ops-1] - } - case *ast.ParenExpr: - becc.subexpStarted = false - } - - return true -} diff --git a/vendor/github.com/mgechev/revive/rule/comment-spacings.go b/vendor/github.com/mgechev/revive/rule/comment-spacings.go deleted file mode 100644 index 2b8240ca5..000000000 --- a/vendor/github.com/mgechev/revive/rule/comment-spacings.go +++ /dev/null @@ -1,91 +0,0 @@ -package rule - -import ( - "fmt" - "strings" - "sync" - - "github.com/mgechev/revive/lint" -) - -// CommentSpacingsRule check the whether there is a space between -// the comment symbol( // ) and the start of the comment text -type CommentSpacingsRule struct { - allowList []string - sync.Mutex -} - -func (r *CommentSpacingsRule) configure(arguments lint.Arguments) { - r.Lock() - defer r.Unlock() - - if r.allowList == nil { - r.allowList = []string{ - "//go:", - "//revive:", - "//nolint:", - } - - for _, arg := range arguments { - allow, ok := arg.(string) // Alt. non panicking version - if !ok { - panic(fmt.Sprintf("invalid argument %v for %s; expected string but got %T", arg, r.Name(), arg)) - } - r.allowList = append(r.allowList, `//`+allow+`:`) - } - } -} - -// Apply the rule. -func (r *CommentSpacingsRule) Apply(file *lint.File, args lint.Arguments) []lint.Failure { - r.configure(args) - - var failures []lint.Failure - - for _, cg := range file.AST.Comments { - for _, comment := range cg.List { - commentLine := comment.Text - if len(commentLine) < 3 { - continue // nothing to do - } - - isMultiLineComment := commentLine[1] == '*' - isOK := commentLine[2] == '\n' - if isMultiLineComment && isOK { - continue - } - - isOK = (commentLine[2] == ' ') || (commentLine[2] == '\t') - if isOK { - continue - } - - if r.isAllowed(commentLine) { - continue - } - - failures = append(failures, lint.Failure{ - Node: comment, - Confidence: 1, - Category: "style", - Failure: "no space between comment delimiter and comment text", - }) - } - } - return failures -} - -// Name yields this rule name. -func (*CommentSpacingsRule) Name() string { - return "comment-spacings" -} - -func (r *CommentSpacingsRule) isAllowed(line string) bool { - for _, allow := range r.allowList { - if strings.HasPrefix(line, allow) { - return true - } - } - - return false -} diff --git a/vendor/github.com/mgechev/revive/rule/confusing-naming.go b/vendor/github.com/mgechev/revive/rule/confusing-naming.go deleted file mode 100644 index febfd8824..000000000 --- a/vendor/github.com/mgechev/revive/rule/confusing-naming.go +++ /dev/null @@ -1,191 +0,0 @@ -package rule - -import ( - "fmt" - "go/ast" - "strings" - "sync" - - "github.com/mgechev/revive/lint" -) - -type referenceMethod struct { - fileName string - id *ast.Ident -} - -type pkgMethods struct { - pkg *lint.Package - methods map[string]map[string]*referenceMethod - mu *sync.Mutex -} - -type packages struct { - pkgs []pkgMethods - mu sync.Mutex -} - -func (ps *packages) methodNames(lp *lint.Package) pkgMethods { - ps.mu.Lock() - defer ps.mu.Unlock() - - for _, pkg := range ps.pkgs { - if pkg.pkg == lp { - return pkg - } - } - - pkgm := pkgMethods{pkg: lp, methods: make(map[string]map[string]*referenceMethod), mu: &sync.Mutex{}} - ps.pkgs = append(ps.pkgs, pkgm) - - return pkgm -} - -var allPkgs = packages{pkgs: make([]pkgMethods, 1)} - -// ConfusingNamingRule lints method names that differ only by capitalization -type ConfusingNamingRule struct{} - -// Apply applies the rule to given file. -func (*ConfusingNamingRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { - var failures []lint.Failure - fileAst := file.AST - pkgm := allPkgs.methodNames(file.Pkg) - walker := lintConfusingNames{ - fileName: file.Name, - pkgm: pkgm, - onFailure: func(failure lint.Failure) { - failures = append(failures, failure) - }, - } - - ast.Walk(&walker, fileAst) - - return failures -} - -// Name returns the rule name. -func (*ConfusingNamingRule) Name() string { - return "confusing-naming" -} - -// checkMethodName checks if a given method/function name is similar (just case differences) to other method/function of the same struct/file. -func checkMethodName(holder string, id *ast.Ident, w *lintConfusingNames) { - if id.Name == "init" && holder == defaultStructName { - // ignore init functions - return - } - - pkgm := w.pkgm - name := strings.ToUpper(id.Name) - - pkgm.mu.Lock() - defer pkgm.mu.Unlock() - - if pkgm.methods[holder] != nil { - if pkgm.methods[holder][name] != nil { - refMethod := pkgm.methods[holder][name] - // confusing names - var kind string - if holder == defaultStructName { - kind = "function" - } else { - kind = "method" - } - var fileName string - if w.fileName == refMethod.fileName { - fileName = "the same source file" - } else { - fileName = refMethod.fileName - } - w.onFailure(lint.Failure{ - Failure: fmt.Sprintf("Method '%s' differs only by capitalization to %s '%s' in %s", id.Name, kind, refMethod.id.Name, fileName), - Confidence: 1, - Node: id, - Category: "naming", - }) - - return - } - } else { - pkgm.methods[holder] = make(map[string]*referenceMethod, 1) - } - - // update the block list - if pkgm.methods[holder] == nil { - println("no entry for '", holder, "'") - } - pkgm.methods[holder][name] = &referenceMethod{fileName: w.fileName, id: id} -} - -type lintConfusingNames struct { - fileName string - pkgm pkgMethods - onFailure func(lint.Failure) -} - -const defaultStructName = "_" // used to map functions - -// getStructName of a function receiver. Defaults to defaultStructName -func getStructName(r *ast.FieldList) string { - result := defaultStructName - - if r == nil || len(r.List) < 1 { - return result - } - - t := r.List[0].Type - - switch v := t.(type) { - case *ast.StarExpr: - t = v.X - case *ast.IndexExpr: - t = v.X - } - - if p, _ := t.(*ast.Ident); p != nil { - result = p.Name - } - - return result -} - -func checkStructFields(fields *ast.FieldList, structName string, w *lintConfusingNames) { - bl := make(map[string]bool, len(fields.List)) - for _, f := range fields.List { - for _, id := range f.Names { - normName := strings.ToUpper(id.Name) - if bl[normName] { - w.onFailure(lint.Failure{ - Failure: fmt.Sprintf("Field '%s' differs only by capitalization to other field in the struct type %s", id.Name, structName), - Confidence: 1, - Node: id, - Category: "naming", - }) - } else { - bl[normName] = true - } - } - } -} - -func (w *lintConfusingNames) Visit(n ast.Node) ast.Visitor { - switch v := n.(type) { - case *ast.FuncDecl: - // Exclude naming warnings for functions that are exported to C but - // not exported in the Go API. - // See https://github.com/golang/lint/issues/144. - if ast.IsExported(v.Name.Name) || !isCgoExported(v) { - checkMethodName(getStructName(v.Recv), v.Name, w) - } - case *ast.TypeSpec: - if s, ok := v.Type.(*ast.StructType); ok { - checkStructFields(s.Fields, v.Name.Name, w) - } - - default: - // will add other checks like field names, struct names, etc. - } - - return w -} diff --git a/vendor/github.com/mgechev/revive/rule/confusing-results.go b/vendor/github.com/mgechev/revive/rule/confusing-results.go deleted file mode 100644 index 1b79ada9c..000000000 --- a/vendor/github.com/mgechev/revive/rule/confusing-results.go +++ /dev/null @@ -1,66 +0,0 @@ -package rule - -import ( - "go/ast" - - "github.com/mgechev/revive/lint" -) - -// ConfusingResultsRule lints given function declarations -type ConfusingResultsRule struct{} - -// Apply applies the rule to given file. -func (*ConfusingResultsRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { - var failures []lint.Failure - - fileAst := file.AST - walker := lintConfusingResults{ - onFailure: func(failure lint.Failure) { - failures = append(failures, failure) - }, - } - - ast.Walk(walker, fileAst) - - return failures -} - -// Name returns the rule name. -func (*ConfusingResultsRule) Name() string { - return "confusing-results" -} - -type lintConfusingResults struct { - onFailure func(lint.Failure) -} - -func (w lintConfusingResults) Visit(n ast.Node) ast.Visitor { - fn, ok := n.(*ast.FuncDecl) - if !ok || fn.Type.Results == nil || len(fn.Type.Results.List) < 2 { - return w - } - lastType := "" - for _, result := range fn.Type.Results.List { - if len(result.Names) > 0 { - return w - } - - t, ok := result.Type.(*ast.Ident) - if !ok { - return w - } - - if t.Name == lastType { - w.onFailure(lint.Failure{ - Node: n, - Confidence: 1, - Category: "naming", - Failure: "unnamed results of the same type may be confusing, consider using named results", - }) - break - } - lastType = t.Name - } - - return w -} diff --git a/vendor/github.com/mgechev/revive/rule/constant-logical-expr.go b/vendor/github.com/mgechev/revive/rule/constant-logical-expr.go deleted file mode 100644 index 36cd641f7..000000000 --- a/vendor/github.com/mgechev/revive/rule/constant-logical-expr.go +++ /dev/null @@ -1,100 +0,0 @@ -package rule - -import ( - "go/ast" - "go/token" - - "github.com/mgechev/revive/lint" -) - -// ConstantLogicalExprRule warns on constant logical expressions. -type ConstantLogicalExprRule struct{} - -// Apply applies the rule to given file. -func (*ConstantLogicalExprRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { - var failures []lint.Failure - - onFailure := func(failure lint.Failure) { - failures = append(failures, failure) - } - - astFile := file.AST - w := &lintConstantLogicalExpr{astFile, onFailure} - ast.Walk(w, astFile) - return failures -} - -// Name returns the rule name. -func (*ConstantLogicalExprRule) Name() string { - return "constant-logical-expr" -} - -type lintConstantLogicalExpr struct { - file *ast.File - onFailure func(lint.Failure) -} - -func (w *lintConstantLogicalExpr) Visit(node ast.Node) ast.Visitor { - switch n := node.(type) { - case *ast.BinaryExpr: - if !w.isOperatorWithLogicalResult(n.Op) { - return w - } - - if gofmt(n.X) != gofmt(n.Y) { // check if subexpressions are the same - return w - } - - // Handles cases like: a <= a, a == a, a >= a - if w.isEqualityOperator(n.Op) { - w.newFailure(n, "expression always evaluates to true") - return w - } - - // Handles cases like: a < a, a > a, a != a - if w.isInequalityOperator(n.Op) { - w.newFailure(n, "expression always evaluates to false") - return w - } - - w.newFailure(n, "left and right hand-side sub-expressions are the same") - } - - return w -} - -func (*lintConstantLogicalExpr) isOperatorWithLogicalResult(t token.Token) bool { - switch t { - case token.LAND, token.LOR, token.EQL, token.LSS, token.GTR, token.NEQ, token.LEQ, token.GEQ: - return true - } - - return false -} - -func (*lintConstantLogicalExpr) isEqualityOperator(t token.Token) bool { - switch t { - case token.EQL, token.LEQ, token.GEQ: - return true - } - - return false -} - -func (*lintConstantLogicalExpr) isInequalityOperator(t token.Token) bool { - switch t { - case token.LSS, token.GTR, token.NEQ: - return true - } - - return false -} - -func (w lintConstantLogicalExpr) newFailure(node ast.Node, msg string) { - w.onFailure(lint.Failure{ - Confidence: 1, - Node: node, - Category: "logic", - Failure: msg, - }) -} diff --git a/vendor/github.com/mgechev/revive/rule/context-as-argument.go b/vendor/github.com/mgechev/revive/rule/context-as-argument.go deleted file mode 100644 index e0c8cfa5e..000000000 --- a/vendor/github.com/mgechev/revive/rule/context-as-argument.go +++ /dev/null @@ -1,110 +0,0 @@ -package rule - -import ( - "fmt" - "go/ast" - "strings" - "sync" - - "github.com/mgechev/revive/lint" -) - -// ContextAsArgumentRule lints given else constructs. -type ContextAsArgumentRule struct { - allowTypesLUT map[string]struct{} - sync.Mutex -} - -// Apply applies the rule to given file. -func (r *ContextAsArgumentRule) Apply(file *lint.File, args lint.Arguments) []lint.Failure { - r.Lock() - if r.allowTypesLUT == nil { - r.allowTypesLUT = getAllowTypesFromArguments(args) - } - r.Unlock() - - var failures []lint.Failure - r.Lock() - walker := lintContextArguments{ - allowTypesLUT: r.allowTypesLUT, - onFailure: func(failure lint.Failure) { - failures = append(failures, failure) - }, - } - r.Unlock() - - ast.Walk(walker, file.AST) - - return failures -} - -// Name returns the rule name. -func (*ContextAsArgumentRule) Name() string { - return "context-as-argument" -} - -type lintContextArguments struct { - allowTypesLUT map[string]struct{} - onFailure func(lint.Failure) -} - -func (w lintContextArguments) Visit(n ast.Node) ast.Visitor { - fn, ok := n.(*ast.FuncDecl) - if !ok || len(fn.Type.Params.List) <= 1 { - return w - } - - fnArgs := fn.Type.Params.List - - // A context.Context should be the first parameter of a function. - // Flag any that show up after the first. - isCtxStillAllowed := true - for _, arg := range fnArgs { - argIsCtx := isPkgDot(arg.Type, "context", "Context") - if argIsCtx && !isCtxStillAllowed { - w.onFailure(lint.Failure{ - Node: arg, - Category: "arg-order", - Failure: "context.Context should be the first parameter of a function", - Confidence: 0.9, - }) - break // only flag one - } - - typeName := gofmt(arg.Type) - // a parameter of type context.Context is still allowed if the current arg type is in the LUT - _, isCtxStillAllowed = w.allowTypesLUT[typeName] - } - - return nil // avoid visiting the function body -} - -func getAllowTypesFromArguments(args lint.Arguments) map[string]struct{} { - allowTypesBefore := []string{} - if len(args) >= 1 { - argKV, ok := args[0].(map[string]any) - if !ok { - panic(fmt.Sprintf("Invalid argument to the context-as-argument rule. Expecting a k,v map, got %T", args[0])) - } - for k, v := range argKV { - switch k { - case "allowTypesBefore": - typesBefore, ok := v.(string) - if !ok { - panic(fmt.Sprintf("Invalid argument to the context-as-argument.allowTypesBefore rule. Expecting a string, got %T", v)) - } - allowTypesBefore = append(allowTypesBefore, strings.Split(typesBefore, ",")...) - default: - panic(fmt.Sprintf("Invalid argument to the context-as-argument rule. Unrecognized key %s", k)) - } - } - } - - result := make(map[string]struct{}, len(allowTypesBefore)) - for _, v := range allowTypesBefore { - result[v] = struct{}{} - } - - result["context.Context"] = struct{}{} // context.Context is always allowed before another context.Context - return result -} diff --git a/vendor/github.com/mgechev/revive/rule/context-keys-type.go b/vendor/github.com/mgechev/revive/rule/context-keys-type.go deleted file mode 100644 index 60ccec560..000000000 --- a/vendor/github.com/mgechev/revive/rule/context-keys-type.go +++ /dev/null @@ -1,81 +0,0 @@ -package rule - -import ( - "fmt" - "go/ast" - "go/types" - - "github.com/mgechev/revive/lint" -) - -// ContextKeysType lints given else constructs. -type ContextKeysType struct{} - -// Apply applies the rule to given file. -func (*ContextKeysType) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { - var failures []lint.Failure - - fileAst := file.AST - walker := lintContextKeyTypes{ - file: file, - fileAst: fileAst, - onFailure: func(failure lint.Failure) { - failures = append(failures, failure) - }, - } - - file.Pkg.TypeCheck() - ast.Walk(walker, fileAst) - - return failures -} - -// Name returns the rule name. -func (*ContextKeysType) Name() string { - return "context-keys-type" -} - -type lintContextKeyTypes struct { - file *lint.File - fileAst *ast.File - onFailure func(lint.Failure) -} - -func (w lintContextKeyTypes) Visit(n ast.Node) ast.Visitor { - switch n := n.(type) { - case *ast.CallExpr: - checkContextKeyType(w, n) - } - - return w -} - -func checkContextKeyType(w lintContextKeyTypes, x *ast.CallExpr) { - f := w.file - sel, ok := x.Fun.(*ast.SelectorExpr) - if !ok { - return - } - pkg, ok := sel.X.(*ast.Ident) - if !ok || pkg.Name != "context" { - return - } - if sel.Sel.Name != "WithValue" { - return - } - - // key is second argument to context.WithValue - if len(x.Args) != 3 { - return - } - key := f.Pkg.TypesInfo().Types[x.Args[1]] - - if ktyp, ok := key.Type.(*types.Basic); ok && ktyp.Kind() != types.Invalid { - w.onFailure(lint.Failure{ - Confidence: 1, - Node: x, - Category: "content", - Failure: fmt.Sprintf("should not use basic type %s as key in context.WithValue", key.Type), - }) - } -} diff --git a/vendor/github.com/mgechev/revive/rule/cyclomatic.go b/vendor/github.com/mgechev/revive/rule/cyclomatic.go deleted file mode 100644 index 9f6d50043..000000000 --- a/vendor/github.com/mgechev/revive/rule/cyclomatic.go +++ /dev/null @@ -1,136 +0,0 @@ -package rule - -import ( - "fmt" - "go/ast" - "go/token" - "sync" - - "github.com/mgechev/revive/lint" -) - -// Based on https://github.com/fzipp/gocyclo - -// CyclomaticRule lints given else constructs. -type CyclomaticRule struct { - maxComplexity int - sync.Mutex -} - -const defaultMaxCyclomaticComplexity = 10 - -func (r *CyclomaticRule) configure(arguments lint.Arguments) { - r.Lock() - defer r.Unlock() - if r.maxComplexity == 0 { - if len(arguments) < 1 { - r.maxComplexity = defaultMaxCyclomaticComplexity - return - } - - complexity, ok := arguments[0].(int64) // Alt. non panicking version - if !ok { - panic(fmt.Sprintf("invalid argument for cyclomatic complexity; expected int but got %T", arguments[0])) - } - r.maxComplexity = int(complexity) - } -} - -// Apply applies the rule to given file. -func (r *CyclomaticRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure { - r.configure(arguments) - - var failures []lint.Failure - fileAst := file.AST - - walker := lintCyclomatic{ - file: file, - complexity: r.maxComplexity, - onFailure: func(failure lint.Failure) { - failures = append(failures, failure) - }, - } - - ast.Walk(walker, fileAst) - - return failures -} - -// Name returns the rule name. -func (*CyclomaticRule) Name() string { - return "cyclomatic" -} - -type lintCyclomatic struct { - file *lint.File - complexity int - onFailure func(lint.Failure) -} - -func (w lintCyclomatic) Visit(_ ast.Node) ast.Visitor { - f := w.file - for _, decl := range f.AST.Decls { - if fn, ok := decl.(*ast.FuncDecl); ok { - c := complexity(fn) - if c > w.complexity { - w.onFailure(lint.Failure{ - Confidence: 1, - Category: "maintenance", - Failure: fmt.Sprintf("function %s has cyclomatic complexity %d (> max enabled %d)", - funcName(fn), c, w.complexity), - Node: fn, - }) - } - } - } - return nil -} - -// funcName returns the name representation of a function or method: -// "(Type).Name" for methods or simply "Name" for functions. -func funcName(fn *ast.FuncDecl) string { - if fn.Recv != nil { - if fn.Recv.NumFields() > 0 { - typ := fn.Recv.List[0].Type - return fmt.Sprintf("(%s).%s", recvString(typ), fn.Name) - } - } - return fn.Name.Name -} - -// recvString returns a string representation of recv of the -// form "T", "*T", or "BADRECV" (if not a proper receiver type). -func recvString(recv ast.Expr) string { - switch t := recv.(type) { - case *ast.Ident: - return t.Name - case *ast.StarExpr: - return "*" + recvString(t.X) - } - return "BADRECV" -} - -// complexity calculates the cyclomatic complexity of a function. -func complexity(fn *ast.FuncDecl) int { - v := complexityVisitor{} - ast.Walk(&v, fn) - return v.Complexity -} - -type complexityVisitor struct { - // Complexity is the cyclomatic complexity - Complexity int -} - -// Visit implements the ast.Visitor interface. -func (v *complexityVisitor) Visit(n ast.Node) ast.Visitor { - switch n := n.(type) { - case *ast.FuncDecl, *ast.IfStmt, *ast.ForStmt, *ast.RangeStmt, *ast.CaseClause, *ast.CommClause: - v.Complexity++ - case *ast.BinaryExpr: - if n.Op == token.LAND || n.Op == token.LOR { - v.Complexity++ - } - } - return v -} diff --git a/vendor/github.com/mgechev/revive/rule/datarace.go b/vendor/github.com/mgechev/revive/rule/datarace.go deleted file mode 100644 index 39e96696a..000000000 --- a/vendor/github.com/mgechev/revive/rule/datarace.go +++ /dev/null @@ -1,142 +0,0 @@ -package rule - -import ( - "fmt" - "go/ast" - - "github.com/mgechev/revive/lint" -) - -// DataRaceRule lints assignments to value method-receivers. -type DataRaceRule struct{} - -// Apply applies the rule to given file. -func (*DataRaceRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { - var failures []lint.Failure - onFailure := func(failure lint.Failure) { - failures = append(failures, failure) - } - w := lintDataRaces{onFailure: onFailure} - - ast.Walk(w, file.AST) - - return failures -} - -// Name returns the rule name. -func (*DataRaceRule) Name() string { - return "datarace" -} - -type lintDataRaces struct { - onFailure func(failure lint.Failure) -} - -func (w lintDataRaces) Visit(n ast.Node) ast.Visitor { - node, ok := n.(*ast.FuncDecl) - if !ok { - return w // not function declaration - } - if node.Body == nil { - return nil // empty body - } - - results := node.Type.Results - - returnIDs := map[*ast.Object]struct{}{} - if results != nil { - returnIDs = w.ExtractReturnIDs(results.List) - } - fl := &lintFunctionForDataRaces{onFailure: w.onFailure, returnIDs: returnIDs, rangeIDs: map[*ast.Object]struct{}{}} - ast.Walk(fl, node.Body) - - return nil -} - -func (lintDataRaces) ExtractReturnIDs(fields []*ast.Field) map[*ast.Object]struct{} { - r := map[*ast.Object]struct{}{} - for _, f := range fields { - for _, id := range f.Names { - r[id.Obj] = struct{}{} - } - } - - return r -} - -type lintFunctionForDataRaces struct { - _ struct{} - onFailure func(failure lint.Failure) - returnIDs map[*ast.Object]struct{} - rangeIDs map[*ast.Object]struct{} -} - -func (w lintFunctionForDataRaces) Visit(node ast.Node) ast.Visitor { - switch n := node.(type) { - case *ast.RangeStmt: - if n.Body == nil { - return nil - } - - getIds := func(exprs ...ast.Expr) []*ast.Ident { - r := []*ast.Ident{} - for _, expr := range exprs { - if id, ok := expr.(*ast.Ident); ok { - r = append(r, id) - } - } - return r - } - - ids := getIds(n.Key, n.Value) - for _, id := range ids { - w.rangeIDs[id.Obj] = struct{}{} - } - - ast.Walk(w, n.Body) - - for _, id := range ids { - delete(w.rangeIDs, id.Obj) - } - - return nil // do not visit the body of the range, it has been already visited - case *ast.GoStmt: - f := n.Call.Fun - funcLit, ok := f.(*ast.FuncLit) - if !ok { - return nil - } - selectIDs := func(n ast.Node) bool { - _, ok := n.(*ast.Ident) - return ok - } - - ids := pick(funcLit.Body, selectIDs) - for _, id := range ids { - id := id.(*ast.Ident) - _, isRangeID := w.rangeIDs[id.Obj] - _, isReturnID := w.returnIDs[id.Obj] - - switch { - case isRangeID: - w.onFailure(lint.Failure{ - Confidence: 1, - Node: id, - Category: "logic", - Failure: fmt.Sprintf("datarace: range value %s is captured (by-reference) in goroutine", id.Name), - }) - case isReturnID: - w.onFailure(lint.Failure{ - Confidence: 0.8, - Node: id, - Category: "logic", - Failure: fmt.Sprintf("potential datarace: return value %s is captured (by-reference) in goroutine", id.Name), - }) - } - } - - return nil - } - - return w -} diff --git a/vendor/github.com/mgechev/revive/rule/deep-exit.go b/vendor/github.com/mgechev/revive/rule/deep-exit.go deleted file mode 100644 index 918d4294a..000000000 --- a/vendor/github.com/mgechev/revive/rule/deep-exit.go +++ /dev/null @@ -1,94 +0,0 @@ -package rule - -import ( - "fmt" - "go/ast" - - "github.com/mgechev/revive/lint" -) - -// DeepExitRule lints program exit at functions other than main or init. -type DeepExitRule struct{} - -// Apply applies the rule to given file. -func (*DeepExitRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { - var failures []lint.Failure - onFailure := func(failure lint.Failure) { - failures = append(failures, failure) - } - - exitFunctions := map[string]map[string]bool{ - "os": {"Exit": true}, - "syscall": {"Exit": true}, - "log": { - "Fatal": true, - "Fatalf": true, - "Fatalln": true, - "Panic": true, - "Panicf": true, - "Panicln": true, - }, - } - - w := lintDeepExit{onFailure, exitFunctions, file.IsTest()} - ast.Walk(w, file.AST) - return failures -} - -// Name returns the rule name. -func (*DeepExitRule) Name() string { - return "deep-exit" -} - -type lintDeepExit struct { - onFailure func(lint.Failure) - exitFunctions map[string]map[string]bool - isTestFile bool -} - -func (w lintDeepExit) Visit(node ast.Node) ast.Visitor { - if fd, ok := node.(*ast.FuncDecl); ok { - if w.mustIgnore(fd) { - return nil // skip analysis of this function - } - - return w - } - - se, ok := node.(*ast.ExprStmt) - if !ok { - return w - } - ce, ok := se.X.(*ast.CallExpr) - if !ok { - return w - } - - fc, ok := ce.Fun.(*ast.SelectorExpr) - if !ok { - return w - } - id, ok := fc.X.(*ast.Ident) - if !ok { - return w - } - - fn := fc.Sel.Name - pkg := id.Name - if w.exitFunctions[pkg] != nil && w.exitFunctions[pkg][fn] { // it's a call to an exit function - w.onFailure(lint.Failure{ - Confidence: 1, - Node: ce, - Category: "bad practice", - Failure: fmt.Sprintf("calls to %s.%s only in main() or init() functions", pkg, fn), - }) - } - - return w -} - -func (w *lintDeepExit) mustIgnore(fd *ast.FuncDecl) bool { - fn := fd.Name.Name - - return fn == "init" || fn == "main" || (w.isTestFile && fn == "TestMain") -} diff --git a/vendor/github.com/mgechev/revive/rule/defer.go b/vendor/github.com/mgechev/revive/rule/defer.go deleted file mode 100644 index adc6478ae..000000000 --- a/vendor/github.com/mgechev/revive/rule/defer.go +++ /dev/null @@ -1,177 +0,0 @@ -package rule - -import ( - "fmt" - "go/ast" - "sync" - - "github.com/mgechev/revive/lint" -) - -// DeferRule lints unused params in functions. -type DeferRule struct { - allow map[string]bool - sync.Mutex -} - -func (r *DeferRule) configure(arguments lint.Arguments) { - r.Lock() - if r.allow == nil { - r.allow = r.allowFromArgs(arguments) - } - r.Unlock() -} - -// Apply applies the rule to given file. -func (r *DeferRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure { - r.configure(arguments) - - var failures []lint.Failure - onFailure := func(failure lint.Failure) { - failures = append(failures, failure) - } - w := lintDeferRule{onFailure: onFailure, allow: r.allow} - - ast.Walk(w, file.AST) - - return failures -} - -// Name returns the rule name. -func (*DeferRule) Name() string { - return "defer" -} - -func (*DeferRule) allowFromArgs(args lint.Arguments) map[string]bool { - if len(args) < 1 { - allow := map[string]bool{ - "loop": true, - "call-chain": true, - "method-call": true, - "return": true, - "recover": true, - "immediate-recover": true, - } - - return allow - } - - aa, ok := args[0].([]any) - if !ok { - panic(fmt.Sprintf("Invalid argument '%v' for 'defer' rule. Expecting []string, got %T", args[0], args[0])) - } - - allow := make(map[string]bool, len(aa)) - for _, subcase := range aa { - sc, ok := subcase.(string) - if !ok { - panic(fmt.Sprintf("Invalid argument '%v' for 'defer' rule. Expecting string, got %T", subcase, subcase)) - } - allow[sc] = true - } - - return allow -} - -type lintDeferRule struct { - onFailure func(lint.Failure) - inALoop bool - inADefer bool - inAFuncLit bool - allow map[string]bool -} - -func (w lintDeferRule) Visit(node ast.Node) ast.Visitor { - switch n := node.(type) { - case *ast.ForStmt: - w.visitSubtree(n.Body, w.inADefer, true, w.inAFuncLit) - return nil - case *ast.RangeStmt: - w.visitSubtree(n.Body, w.inADefer, true, w.inAFuncLit) - return nil - case *ast.FuncLit: - w.visitSubtree(n.Body, w.inADefer, false, true) - return nil - case *ast.ReturnStmt: - if len(n.Results) != 0 && w.inADefer && w.inAFuncLit { - w.newFailure("return in a defer function has no effect", n, 1.0, "logic", "return") - } - case *ast.CallExpr: - isCallToRecover := isIdent(n.Fun, "recover") - switch { - case !w.inADefer && isCallToRecover: - // func fn() { recover() } - // - // confidence is not 1 because recover can be in a function that is deferred elsewhere - w.newFailure("recover must be called inside a deferred function", n, 0.8, "logic", "recover") - case w.inADefer && !w.inAFuncLit && isCallToRecover: - // defer helper(recover()) - // - // confidence is not truly 1 because this could be in a correctly-deferred func, - // but it is very likely to be a misunderstanding of defer's behavior around arguments. - w.newFailure("recover must be called inside a deferred function, this is executing recover immediately", n, 1, "logic", "immediate-recover") - } - - case *ast.DeferStmt: - if isIdent(n.Call.Fun, "recover") { - // defer recover() - // - // confidence is not truly 1 because this could be in a correctly-deferred func, - // but normally this doesn't suppress a panic, and even if it did it would silently discard the value. - w.newFailure("recover must be called inside a deferred function, this is executing recover immediately", n, 1, "logic", "immediate-recover") - } - w.visitSubtree(n.Call.Fun, true, false, false) - for _, a := range n.Call.Args { - switch a.(type) { - case *ast.FuncLit: - continue // too hard to analyze deferred calls with func literals args - default: - w.visitSubtree(a, true, false, false) // check arguments, they should not contain recover() - } - } - - if w.inALoop { - w.newFailure("prefer not to defer inside loops", n, 1.0, "bad practice", "loop") - } - - switch fn := n.Call.Fun.(type) { - case *ast.CallExpr: - w.newFailure("prefer not to defer chains of function calls", fn, 1.0, "bad practice", "call-chain") - case *ast.SelectorExpr: - if id, ok := fn.X.(*ast.Ident); ok { - isMethodCall := id != nil && id.Obj != nil && id.Obj.Kind == ast.Typ - if isMethodCall { - w.newFailure("be careful when deferring calls to methods without pointer receiver", fn, 0.8, "bad practice", "method-call") - } - } - } - - return nil - } - - return w -} - -func (w lintDeferRule) visitSubtree(n ast.Node, inADefer, inALoop, inAFuncLit bool) { - nw := lintDeferRule{ - onFailure: w.onFailure, - inADefer: inADefer, - inALoop: inALoop, - inAFuncLit: inAFuncLit, - allow: w.allow, - } - ast.Walk(nw, n) -} - -func (w lintDeferRule) newFailure(msg string, node ast.Node, confidence float64, cat, subcase string) { - if !w.allow[subcase] { - return - } - - w.onFailure(lint.Failure{ - Confidence: confidence, - Node: node, - Category: cat, - Failure: msg, - }) -} diff --git a/vendor/github.com/mgechev/revive/rule/doc.go b/vendor/github.com/mgechev/revive/rule/doc.go deleted file mode 100644 index 55bf6caa6..000000000 --- a/vendor/github.com/mgechev/revive/rule/doc.go +++ /dev/null @@ -1,2 +0,0 @@ -// Package rule implements revive's linting rules. -package rule diff --git a/vendor/github.com/mgechev/revive/rule/dot-imports.go b/vendor/github.com/mgechev/revive/rule/dot-imports.go deleted file mode 100644 index 6b877677d..000000000 --- a/vendor/github.com/mgechev/revive/rule/dot-imports.go +++ /dev/null @@ -1,106 +0,0 @@ -package rule - -import ( - "fmt" - "go/ast" - "sync" - - "github.com/mgechev/revive/lint" -) - -// DotImportsRule lints given else constructs. -type DotImportsRule struct { - sync.Mutex - allowedPackages allowPackages -} - -// Apply applies the rule to given file. -func (r *DotImportsRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure { - r.configure(arguments) - - var failures []lint.Failure - - fileAst := file.AST - walker := lintImports{ - file: file, - fileAst: fileAst, - onFailure: func(failure lint.Failure) { - failures = append(failures, failure) - }, - allowPackages: r.allowedPackages, - } - - ast.Walk(walker, fileAst) - - return failures -} - -// Name returns the rule name. -func (*DotImportsRule) Name() string { - return "dot-imports" -} - -func (r *DotImportsRule) configure(arguments lint.Arguments) { - r.Lock() - defer r.Unlock() - - if r.allowedPackages != nil { - return - } - - r.allowedPackages = make(allowPackages) - if len(arguments) == 0 { - return - } - - args, ok := arguments[0].(map[string]any) - if !ok { - panic(fmt.Sprintf("Invalid argument to the dot-imports rule. Expecting a k,v map, got %T", arguments[0])) - } - - if allowedPkgArg, ok := args["allowedPackages"]; ok { - if pkgs, ok := allowedPkgArg.([]any); ok { - for _, p := range pkgs { - if pkg, ok := p.(string); ok { - r.allowedPackages.add(pkg) - } else { - panic(fmt.Sprintf("Invalid argument to the dot-imports rule, string expected. Got '%v' (%T)", p, p)) - } - } - } else { - panic(fmt.Sprintf("Invalid argument to the dot-imports rule, []string expected. Got '%v' (%T)", allowedPkgArg, allowedPkgArg)) - } - } -} - -type lintImports struct { - file *lint.File - fileAst *ast.File - onFailure func(lint.Failure) - allowPackages allowPackages -} - -func (w lintImports) Visit(_ ast.Node) ast.Visitor { - for _, is := range w.fileAst.Imports { - if is.Name != nil && is.Name.Name == "." && !w.allowPackages.isAllowedPackage(is.Path.Value) { - w.onFailure(lint.Failure{ - Confidence: 1, - Failure: "should not use dot imports", - Node: is, - Category: "imports", - }) - } - } - return nil -} - -type allowPackages map[string]struct{} - -func (ap allowPackages) add(pkg string) { - ap[fmt.Sprintf(`"%s"`, pkg)] = struct{}{} // import path strings are with double quotes -} - -func (ap allowPackages) isAllowedPackage(pkg string) bool { - _, allowed := ap[pkg] - return allowed -} diff --git a/vendor/github.com/mgechev/revive/rule/duplicated-imports.go b/vendor/github.com/mgechev/revive/rule/duplicated-imports.go deleted file mode 100644 index 2b177fac6..000000000 --- a/vendor/github.com/mgechev/revive/rule/duplicated-imports.go +++ /dev/null @@ -1,39 +0,0 @@ -package rule - -import ( - "fmt" - - "github.com/mgechev/revive/lint" -) - -// DuplicatedImportsRule lints given else constructs. -type DuplicatedImportsRule struct{} - -// Apply applies the rule to given file. -func (*DuplicatedImportsRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { - var failures []lint.Failure - - impPaths := map[string]struct{}{} - for _, imp := range file.AST.Imports { - path := imp.Path.Value - _, ok := impPaths[path] - if ok { - failures = append(failures, lint.Failure{ - Confidence: 1, - Failure: fmt.Sprintf("Package %s already imported", path), - Node: imp, - Category: "imports", - }) - continue - } - - impPaths[path] = struct{}{} - } - - return failures -} - -// Name returns the rule name. -func (*DuplicatedImportsRule) Name() string { - return "duplicated-imports" -} diff --git a/vendor/github.com/mgechev/revive/rule/early-return.go b/vendor/github.com/mgechev/revive/rule/early-return.go deleted file mode 100644 index 9c04a1dbe..000000000 --- a/vendor/github.com/mgechev/revive/rule/early-return.go +++ /dev/null @@ -1,51 +0,0 @@ -package rule - -import ( - "fmt" - - "github.com/mgechev/revive/internal/ifelse" - "github.com/mgechev/revive/lint" -) - -// EarlyReturnRule finds opportunities to reduce nesting by inverting -// the condition of an "if" block. -type EarlyReturnRule struct{} - -// Apply applies the rule to given file. -func (e *EarlyReturnRule) Apply(file *lint.File, args lint.Arguments) []lint.Failure { - return ifelse.Apply(e, file.AST, ifelse.TargetIf, args) -} - -// Name returns the rule name. -func (*EarlyReturnRule) Name() string { - return "early-return" -} - -// CheckIfElse evaluates the rule against an ifelse.Chain. -func (*EarlyReturnRule) CheckIfElse(chain ifelse.Chain, args ifelse.Args) (failMsg string) { - if !chain.Else.Deviates() { - // this rule only applies if the else-block deviates control flow - return - } - - if chain.HasPriorNonDeviating && !chain.If.IsEmpty() { - // if we de-indent this block then a previous branch - // might flow into it, affecting program behaviour - return - } - - if chain.If.Deviates() { - // avoid overlapping with superfluous-else - return - } - - if args.PreserveScope && !chain.AtBlockEnd && (chain.HasInitializer || chain.If.HasDecls) { - // avoid increasing variable scope - return - } - - if chain.If.IsEmpty() { - return fmt.Sprintf("if c { } else { %[1]v } can be simplified to if !c { %[1]v }", chain.Else) - } - return fmt.Sprintf("if c { ... } else { %[1]v } can be simplified to if !c { %[1]v } ...", chain.Else) -} diff --git a/vendor/github.com/mgechev/revive/rule/empty-block.go b/vendor/github.com/mgechev/revive/rule/empty-block.go deleted file mode 100644 index 25a052a0e..000000000 --- a/vendor/github.com/mgechev/revive/rule/empty-block.go +++ /dev/null @@ -1,75 +0,0 @@ -package rule - -import ( - "go/ast" - - "github.com/mgechev/revive/lint" -) - -// EmptyBlockRule lints given else constructs. -type EmptyBlockRule struct{} - -// Apply applies the rule to given file. -func (*EmptyBlockRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { - var failures []lint.Failure - - onFailure := func(failure lint.Failure) { - failures = append(failures, failure) - } - - w := lintEmptyBlock{make(map[*ast.BlockStmt]bool), onFailure} - ast.Walk(w, file.AST) - return failures -} - -// Name returns the rule name. -func (*EmptyBlockRule) Name() string { - return "empty-block" -} - -type lintEmptyBlock struct { - ignore map[*ast.BlockStmt]bool - onFailure func(lint.Failure) -} - -func (w lintEmptyBlock) Visit(node ast.Node) ast.Visitor { - switch n := node.(type) { - case *ast.FuncDecl: - w.ignore[n.Body] = true - return w - case *ast.FuncLit: - w.ignore[n.Body] = true - return w - case *ast.SelectStmt: - w.ignore[n.Body] = true - return w - case *ast.ForStmt: - if len(n.Body.List) == 0 && n.Init == nil && n.Post == nil && n.Cond != nil { - if _, isCall := n.Cond.(*ast.CallExpr); isCall { - w.ignore[n.Body] = true - return w - } - } - case *ast.RangeStmt: - if len(n.Body.List) == 0 { - w.onFailure(lint.Failure{ - Confidence: 0.9, - Node: n, - Category: "logic", - Failure: "this block is empty, you can remove it", - }) - return nil // skip visiting the range subtree (it will produce a duplicated failure) - } - case *ast.BlockStmt: - if !w.ignore[n] && len(n.List) == 0 { - w.onFailure(lint.Failure{ - Confidence: 1, - Node: n, - Category: "logic", - Failure: "this block is empty, you can remove it", - }) - } - } - - return w -} diff --git a/vendor/github.com/mgechev/revive/rule/empty-lines.go b/vendor/github.com/mgechev/revive/rule/empty-lines.go deleted file mode 100644 index 2710a8979..000000000 --- a/vendor/github.com/mgechev/revive/rule/empty-lines.go +++ /dev/null @@ -1,105 +0,0 @@ -package rule - -import ( - "go/ast" - "go/token" - - "github.com/mgechev/revive/lint" -) - -// EmptyLinesRule lints empty lines in blocks. -type EmptyLinesRule struct{} - -// Apply applies the rule to given file. -func (r *EmptyLinesRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { - var failures []lint.Failure - - onFailure := func(failure lint.Failure) { - failures = append(failures, failure) - } - - w := lintEmptyLines{file, r.commentLines(file.CommentMap(), file), onFailure} - ast.Walk(w, file.AST) - return failures -} - -// Name returns the rule name. -func (*EmptyLinesRule) Name() string { - return "empty-lines" -} - -type lintEmptyLines struct { - file *lint.File - cmap map[int]struct{} - onFailure func(lint.Failure) -} - -func (w lintEmptyLines) Visit(node ast.Node) ast.Visitor { - block, ok := node.(*ast.BlockStmt) - if !ok || len(block.List) == 0 { - return w - } - - w.checkStart(block) - w.checkEnd(block) - - return w -} - -func (w lintEmptyLines) checkStart(block *ast.BlockStmt) { - blockStart := w.position(block.Lbrace) - firstNode := block.List[0] - firstStmt := w.position(firstNode.Pos()) - - firstBlockLineIsStmt := firstStmt.Line-(blockStart.Line+1) <= 0 - _, firstBlockLineIsComment := w.cmap[blockStart.Line+1] - if firstBlockLineIsStmt || firstBlockLineIsComment { - return - } - - w.onFailure(lint.Failure{ - Confidence: 1, - Node: block, - Category: "style", - Failure: "extra empty line at the start of a block", - }) -} - -func (w lintEmptyLines) checkEnd(block *ast.BlockStmt) { - blockEnd := w.position(block.Rbrace) - lastNode := block.List[len(block.List)-1] - lastStmt := w.position(lastNode.End()) - - lastBlockLineIsStmt := (blockEnd.Line-1)-lastStmt.Line <= 0 - _, lastBlockLineIsComment := w.cmap[blockEnd.Line-1] - if lastBlockLineIsStmt || lastBlockLineIsComment { - return - } - - w.onFailure(lint.Failure{ - Confidence: 1, - Node: block, - Category: "style", - Failure: "extra empty line at the end of a block", - }) -} - -func (w lintEmptyLines) position(pos token.Pos) token.Position { - return w.file.ToPosition(pos) -} - -func (*EmptyLinesRule) commentLines(cmap ast.CommentMap, file *lint.File) map[int]struct{} { - result := map[int]struct{}{} - - for _, comments := range cmap { - for _, comment := range comments { - start := file.ToPosition(comment.Pos()) - end := file.ToPosition(comment.End()) - for i := start.Line; i <= end.Line; i++ { - result[i] = struct{}{} - } - } - } - - return result -} diff --git a/vendor/github.com/mgechev/revive/rule/enforce-map-style.go b/vendor/github.com/mgechev/revive/rule/enforce-map-style.go deleted file mode 100644 index 36ac2374c..000000000 --- a/vendor/github.com/mgechev/revive/rule/enforce-map-style.go +++ /dev/null @@ -1,164 +0,0 @@ -package rule - -import ( - "fmt" - "go/ast" - "sync" - - "github.com/mgechev/revive/lint" -) - -type enforceMapStyleType string - -const ( - enforceMapStyleTypeAny enforceMapStyleType = "any" - enforceMapStyleTypeMake enforceMapStyleType = "make" - enforceMapStyleTypeLiteral enforceMapStyleType = "literal" -) - -func mapStyleFromString(s string) (enforceMapStyleType, error) { - switch s { - case string(enforceMapStyleTypeAny), "": - return enforceMapStyleTypeAny, nil - case string(enforceMapStyleTypeMake): - return enforceMapStyleTypeMake, nil - case string(enforceMapStyleTypeLiteral): - return enforceMapStyleTypeLiteral, nil - default: - return enforceMapStyleTypeAny, fmt.Errorf( - "invalid map style: %s (expecting one of %v)", - s, - []enforceMapStyleType{ - enforceMapStyleTypeAny, - enforceMapStyleTypeMake, - enforceMapStyleTypeLiteral, - }, - ) - } -} - -// EnforceMapStyleRule implements a rule to enforce `make(map[type]type)` over `map[type]type{}`. -type EnforceMapStyleRule struct { - configured bool - enforceMapStyle enforceMapStyleType - sync.Mutex -} - -func (r *EnforceMapStyleRule) configure(arguments lint.Arguments) { - r.Lock() - defer r.Unlock() - - if r.configured { - return - } - r.configured = true - - if len(arguments) < 1 { - r.enforceMapStyle = enforceMapStyleTypeAny - return - } - - enforceMapStyle, ok := arguments[0].(string) - if !ok { - panic(fmt.Sprintf("Invalid argument '%v' for 'enforce-map-style' rule. Expecting string, got %T", arguments[0], arguments[0])) - } - - var err error - r.enforceMapStyle, err = mapStyleFromString(enforceMapStyle) - - if err != nil { - panic(fmt.Sprintf("Invalid argument to the enforce-map-style rule: %v", err)) - } -} - -// Apply applies the rule to given file. -func (r *EnforceMapStyleRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure { - r.configure(arguments) - - if r.enforceMapStyle == enforceMapStyleTypeAny { - // this linter is not configured - return nil - } - - var failures []lint.Failure - - astFile := file.AST - ast.Inspect(astFile, func(n ast.Node) bool { - switch v := n.(type) { - case *ast.CompositeLit: - if r.enforceMapStyle != enforceMapStyleTypeMake { - return true - } - - if !r.isMapType(v.Type) { - return true - } - - if len(v.Elts) > 0 { - // not an empty map - return true - } - - failures = append(failures, lint.Failure{ - Confidence: 1, - Node: v, - Category: "style", - Failure: "use make(map[type]type) instead of map[type]type{}", - }) - case *ast.CallExpr: - if r.enforceMapStyle != enforceMapStyleTypeLiteral { - // skip any function calls, even if it's make(map[type]type) - // we don't want to report it if literals are not enforced - return true - } - - ident, ok := v.Fun.(*ast.Ident) - if !ok || ident.Name != "make" { - return true - } - - if len(v.Args) != 1 { - // skip make(map[type]type, size) and invalid empty declarations - return true - } - - if !r.isMapType(v.Args[0]) { - // not a map type - return true - } - - failures = append(failures, lint.Failure{ - Confidence: 1, - Node: v.Args[0], - Category: "style", - Failure: "use map[type]type{} instead of make(map[type]type)", - }) - } - return true - }) - - return failures -} - -// Name returns the rule name. -func (*EnforceMapStyleRule) Name() string { - return "enforce-map-style" -} - -func (r *EnforceMapStyleRule) isMapType(v ast.Expr) bool { - switch t := v.(type) { - case *ast.MapType: - return true - case *ast.Ident: - if t.Obj == nil { - return false - } - typeSpec, ok := t.Obj.Decl.(*ast.TypeSpec) - if !ok { - return false - } - return r.isMapType(typeSpec.Type) - default: - return false - } -} diff --git a/vendor/github.com/mgechev/revive/rule/enforce-repeated-arg-type-style.go b/vendor/github.com/mgechev/revive/rule/enforce-repeated-arg-type-style.go deleted file mode 100644 index 067082b1b..000000000 --- a/vendor/github.com/mgechev/revive/rule/enforce-repeated-arg-type-style.go +++ /dev/null @@ -1,191 +0,0 @@ -package rule - -import ( - "fmt" - "go/ast" - "go/types" - "sync" - - "github.com/mgechev/revive/lint" -) - -type enforceRepeatedArgTypeStyleType string - -const ( - enforceRepeatedArgTypeStyleTypeAny enforceRepeatedArgTypeStyleType = "any" - enforceRepeatedArgTypeStyleTypeShort enforceRepeatedArgTypeStyleType = "short" - enforceRepeatedArgTypeStyleTypeFull enforceRepeatedArgTypeStyleType = "full" -) - -func repeatedArgTypeStyleFromString(s string) enforceRepeatedArgTypeStyleType { - switch s { - case string(enforceRepeatedArgTypeStyleTypeAny), "": - return enforceRepeatedArgTypeStyleTypeAny - case string(enforceRepeatedArgTypeStyleTypeShort): - return enforceRepeatedArgTypeStyleTypeShort - case string(enforceRepeatedArgTypeStyleTypeFull): - return enforceRepeatedArgTypeStyleTypeFull - default: - err := fmt.Errorf( - "invalid repeated arg type style: %s (expecting one of %v)", - s, - []enforceRepeatedArgTypeStyleType{ - enforceRepeatedArgTypeStyleTypeAny, - enforceRepeatedArgTypeStyleTypeShort, - enforceRepeatedArgTypeStyleTypeFull, - }, - ) - - panic(fmt.Sprintf("Invalid argument to the enforce-repeated-arg-type-style rule: %v", err)) - } -} - -// EnforceRepeatedArgTypeStyleRule implements a rule to enforce repeated argument type style. -type EnforceRepeatedArgTypeStyleRule struct { - configured bool - funcArgStyle enforceRepeatedArgTypeStyleType - funcRetValStyle enforceRepeatedArgTypeStyleType - - sync.Mutex -} - -func (r *EnforceRepeatedArgTypeStyleRule) configure(arguments lint.Arguments) { - r.Lock() - defer r.Unlock() - - if r.configured { - return - } - r.configured = true - - r.funcArgStyle = enforceRepeatedArgTypeStyleTypeAny - r.funcRetValStyle = enforceRepeatedArgTypeStyleTypeAny - - if len(arguments) == 0 { - return - } - - switch funcArgStyle := arguments[0].(type) { - case string: - r.funcArgStyle = repeatedArgTypeStyleFromString(funcArgStyle) - r.funcRetValStyle = repeatedArgTypeStyleFromString(funcArgStyle) - case map[string]any: // expecting map[string]string - for k, v := range funcArgStyle { - switch k { - case "funcArgStyle": - val, ok := v.(string) - if !ok { - panic(fmt.Sprintf("Invalid map value type for 'enforce-repeated-arg-type-style' rule. Expecting string, got %T", v)) - } - r.funcArgStyle = repeatedArgTypeStyleFromString(val) - case "funcRetValStyle": - val, ok := v.(string) - if !ok { - panic(fmt.Sprintf("Invalid map value '%v' for 'enforce-repeated-arg-type-style' rule. Expecting string, got %T", v, v)) - } - r.funcRetValStyle = repeatedArgTypeStyleFromString(val) - default: - panic(fmt.Sprintf("Invalid map key for 'enforce-repeated-arg-type-style' rule. Expecting 'funcArgStyle' or 'funcRetValStyle', got %v", k)) - } - } - default: - panic(fmt.Sprintf("Invalid argument '%v' for 'import-alias-naming' rule. Expecting string or map[string]string, got %T", arguments[0], arguments[0])) - } -} - -// Apply applies the rule to a given file. -func (r *EnforceRepeatedArgTypeStyleRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure { - r.configure(arguments) - - if r.funcArgStyle == enforceRepeatedArgTypeStyleTypeAny && r.funcRetValStyle == enforceRepeatedArgTypeStyleTypeAny { - // This linter is not configured, return no failures. - return nil - } - - var failures []lint.Failure - - err := file.Pkg.TypeCheck() - if err != nil { - // the file has other issues - return nil - } - typesInfo := file.Pkg.TypesInfo() - - astFile := file.AST - ast.Inspect(astFile, func(n ast.Node) bool { - switch fn := n.(type) { - case *ast.FuncDecl: - if r.funcArgStyle == enforceRepeatedArgTypeStyleTypeFull { - if fn.Type.Params != nil { - for _, field := range fn.Type.Params.List { - if len(field.Names) > 1 { - failures = append(failures, lint.Failure{ - Confidence: 1, - Node: field, - Category: "style", - Failure: "argument types should not be omitted", - }) - } - } - } - } - - if r.funcArgStyle == enforceRepeatedArgTypeStyleTypeShort { - var prevType ast.Expr - if fn.Type.Params != nil { - for _, field := range fn.Type.Params.List { - if types.Identical(typesInfo.Types[field.Type].Type, typesInfo.Types[prevType].Type) { - failures = append(failures, lint.Failure{ - Confidence: 1, - Node: field, - Category: "style", - Failure: "repeated argument type can be omitted", - }) - } - prevType = field.Type - } - } - } - - if r.funcRetValStyle == enforceRepeatedArgTypeStyleTypeFull { - if fn.Type.Results != nil { - for _, field := range fn.Type.Results.List { - if len(field.Names) > 1 { - failures = append(failures, lint.Failure{ - Confidence: 1, - Node: field, - Category: "style", - Failure: "return types should not be omitted", - }) - } - } - } - } - - if r.funcRetValStyle == enforceRepeatedArgTypeStyleTypeShort { - var prevType ast.Expr - if fn.Type.Results != nil { - for _, field := range fn.Type.Results.List { - if field.Names != nil && types.Identical(typesInfo.Types[field.Type].Type, typesInfo.Types[prevType].Type) { - failures = append(failures, lint.Failure{ - Confidence: 1, - Node: field, - Category: "style", - Failure: "repeated return type can be omitted", - }) - } - prevType = field.Type - } - } - } - } - return true - }) - - return failures -} - -// Name returns the name of the linter rule. -func (*EnforceRepeatedArgTypeStyleRule) Name() string { - return "enforce-repeated-arg-type-style" -} diff --git a/vendor/github.com/mgechev/revive/rule/enforce-slice-style.go b/vendor/github.com/mgechev/revive/rule/enforce-slice-style.go deleted file mode 100644 index abaf20be0..000000000 --- a/vendor/github.com/mgechev/revive/rule/enforce-slice-style.go +++ /dev/null @@ -1,193 +0,0 @@ -package rule - -import ( - "fmt" - "go/ast" - "sync" - - "github.com/mgechev/revive/lint" -) - -type enforceSliceStyleType string - -const ( - enforceSliceStyleTypeAny enforceSliceStyleType = "any" - enforceSliceStyleTypeMake enforceSliceStyleType = "make" - enforceSliceStyleTypeLiteral enforceSliceStyleType = "literal" -) - -func sliceStyleFromString(s string) (enforceSliceStyleType, error) { - switch s { - case string(enforceSliceStyleTypeAny), "": - return enforceSliceStyleTypeAny, nil - case string(enforceSliceStyleTypeMake): - return enforceSliceStyleTypeMake, nil - case string(enforceSliceStyleTypeLiteral): - return enforceSliceStyleTypeLiteral, nil - default: - return enforceSliceStyleTypeAny, fmt.Errorf( - "invalid slice style: %s (expecting one of %v)", - s, - []enforceSliceStyleType{ - enforceSliceStyleTypeAny, - enforceSliceStyleTypeMake, - enforceSliceStyleTypeLiteral, - }, - ) - } -} - -// EnforceSliceStyleRule implements a rule to enforce `make([]type)` over `[]type{}`. -type EnforceSliceStyleRule struct { - configured bool - enforceSliceStyle enforceSliceStyleType - sync.Mutex -} - -func (r *EnforceSliceStyleRule) configure(arguments lint.Arguments) { - r.Lock() - defer r.Unlock() - - if r.configured { - return - } - r.configured = true - - if len(arguments) < 1 { - r.enforceSliceStyle = enforceSliceStyleTypeAny - return - } - - enforceSliceStyle, ok := arguments[0].(string) - if !ok { - panic(fmt.Sprintf("Invalid argument '%v' for 'enforce-slice-style' rule. Expecting string, got %T", arguments[0], arguments[0])) - } - - var err error - r.enforceSliceStyle, err = sliceStyleFromString(enforceSliceStyle) - - if err != nil { - panic(fmt.Sprintf("Invalid argument to the enforce-slice-style rule: %v", err)) - } -} - -// Apply applies the rule to given file. -func (r *EnforceSliceStyleRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure { - r.configure(arguments) - - if r.enforceSliceStyle == enforceSliceStyleTypeAny { - // this linter is not configured - return nil - } - - var failures []lint.Failure - - astFile := file.AST - ast.Inspect(astFile, func(n ast.Node) bool { - switch v := n.(type) { - case *ast.CompositeLit: - if r.enforceSliceStyle != enforceSliceStyleTypeMake { - return true - } - - if !r.isSliceType(v.Type) { - return true - } - - if len(v.Elts) > 0 { - // not an empty slice - return true - } - - failures = append(failures, lint.Failure{ - Confidence: 1, - Node: v, - Category: "style", - Failure: "use make([]type) instead of []type{} (or declare nil slice)", - }) - case *ast.CallExpr: - if r.enforceSliceStyle != enforceSliceStyleTypeLiteral { - // skip any function calls, even if it's make([]type) - // we don't want to report it if literals are not enforced - return true - } - - ident, ok := v.Fun.(*ast.Ident) - if !ok || ident.Name != "make" { - return true - } - - if len(v.Args) < 2 { - // skip invalid make declarations - return true - } - - if !r.isSliceType(v.Args[0]) { - // not a slice type - return true - } - - arg, ok := v.Args[1].(*ast.BasicLit) - if !ok { - // skip invalid make declarations - return true - } - - if arg.Value != "0" { - // skip slice with non-zero size - return true - } - - if len(v.Args) > 2 { - arg, ok := v.Args[2].(*ast.BasicLit) - if !ok { - // skip invalid make declarations - return true - } - - if arg.Value != "0" { - // skip non-zero capacity slice - return true - } - } - - failures = append(failures, lint.Failure{ - Confidence: 1, - Node: v.Args[0], - Category: "style", - Failure: "use []type{} instead of make([]type, 0) (or declare nil slice)", - }) - } - return true - }) - - return failures -} - -// Name returns the rule name. -func (*EnforceSliceStyleRule) Name() string { - return "enforce-slice-style" -} - -func (r *EnforceSliceStyleRule) isSliceType(v ast.Expr) bool { - switch t := v.(type) { - case *ast.ArrayType: - if t.Len != nil { - // array - return false - } - // slice - return true - case *ast.Ident: - if t.Obj == nil { - return false - } - typeSpec, ok := t.Obj.Decl.(*ast.TypeSpec) - if !ok { - return false - } - return r.isSliceType(typeSpec.Type) - default: - return false - } -} diff --git a/vendor/github.com/mgechev/revive/rule/error-naming.go b/vendor/github.com/mgechev/revive/rule/error-naming.go deleted file mode 100644 index a4f24f3f0..000000000 --- a/vendor/github.com/mgechev/revive/rule/error-naming.go +++ /dev/null @@ -1,79 +0,0 @@ -package rule - -import ( - "fmt" - "go/ast" - "go/token" - "strings" - - "github.com/mgechev/revive/lint" -) - -// ErrorNamingRule lints given else constructs. -type ErrorNamingRule struct{} - -// Apply applies the rule to given file. -func (*ErrorNamingRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { - var failures []lint.Failure - - fileAst := file.AST - walker := lintErrors{ - file: file, - fileAst: fileAst, - onFailure: func(failure lint.Failure) { - failures = append(failures, failure) - }, - } - - ast.Walk(walker, fileAst) - - return failures -} - -// Name returns the rule name. -func (*ErrorNamingRule) Name() string { - return "error-naming" -} - -type lintErrors struct { - file *lint.File - fileAst *ast.File - onFailure func(lint.Failure) -} - -func (w lintErrors) Visit(_ ast.Node) ast.Visitor { - for _, decl := range w.fileAst.Decls { - gd, ok := decl.(*ast.GenDecl) - if !ok || gd.Tok != token.VAR { - continue - } - for _, spec := range gd.Specs { - spec := spec.(*ast.ValueSpec) - if len(spec.Names) != 1 || len(spec.Values) != 1 { - continue - } - ce, ok := spec.Values[0].(*ast.CallExpr) - if !ok { - continue - } - if !isPkgDot(ce.Fun, "errors", "New") && !isPkgDot(ce.Fun, "fmt", "Errorf") { - continue - } - - id := spec.Names[0] - prefix := "err" - if id.IsExported() { - prefix = "Err" - } - if !strings.HasPrefix(id.Name, prefix) { - w.onFailure(lint.Failure{ - Node: id, - Confidence: 0.9, - Category: "naming", - Failure: fmt.Sprintf("error var %s should have name of the form %sFoo", id.Name, prefix), - }) - } - } - } - return nil -} diff --git a/vendor/github.com/mgechev/revive/rule/error-return.go b/vendor/github.com/mgechev/revive/rule/error-return.go deleted file mode 100644 index a724e001c..000000000 --- a/vendor/github.com/mgechev/revive/rule/error-return.go +++ /dev/null @@ -1,67 +0,0 @@ -package rule - -import ( - "go/ast" - - "github.com/mgechev/revive/lint" -) - -// ErrorReturnRule lints given else constructs. -type ErrorReturnRule struct{} - -// Apply applies the rule to given file. -func (*ErrorReturnRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { - var failures []lint.Failure - - fileAst := file.AST - walker := lintErrorReturn{ - file: file, - fileAst: fileAst, - onFailure: func(failure lint.Failure) { - failures = append(failures, failure) - }, - } - - ast.Walk(walker, fileAst) - - return failures -} - -// Name returns the rule name. -func (*ErrorReturnRule) Name() string { - return "error-return" -} - -type lintErrorReturn struct { - file *lint.File - fileAst *ast.File - onFailure func(lint.Failure) -} - -func (w lintErrorReturn) Visit(n ast.Node) ast.Visitor { - fn, ok := n.(*ast.FuncDecl) - if !ok || fn.Type.Results == nil { - return w - } - ret := fn.Type.Results.List - if len(ret) <= 1 { - return w - } - if isIdent(ret[len(ret)-1].Type, "error") { - return nil - } - // An error return parameter should be the last parameter. - // Flag any error parameters found before the last. - for _, r := range ret[:len(ret)-1] { - if isIdent(r.Type, "error") { - w.onFailure(lint.Failure{ - Category: "arg-order", - Confidence: 0.9, - Node: fn, - Failure: "error should be the last type when returning multiple items", - }) - break // only flag one - } - } - return w -} diff --git a/vendor/github.com/mgechev/revive/rule/error-strings.go b/vendor/github.com/mgechev/revive/rule/error-strings.go deleted file mode 100644 index 81ebda540..000000000 --- a/vendor/github.com/mgechev/revive/rule/error-strings.go +++ /dev/null @@ -1,199 +0,0 @@ -package rule - -import ( - "go/ast" - "go/token" - "strconv" - "strings" - "sync" - "unicode" - "unicode/utf8" - - "github.com/mgechev/revive/lint" -) - -// ErrorStringsRule lints given else constructs. -type ErrorStringsRule struct { - errorFunctions map[string]map[string]struct{} - sync.Mutex -} - -func (r *ErrorStringsRule) configure(arguments lint.Arguments) { - r.Lock() - defer r.Unlock() - - if r.errorFunctions != nil { - return - } - - r.errorFunctions = map[string]map[string]struct{}{ - "fmt": { - "Errorf": {}, - }, - "errors": { - "Errorf": {}, - "WithMessage": {}, - "Wrap": {}, - "New": {}, - "WithMessagef": {}, - "Wrapf": {}, - }, - } - - var invalidCustomFunctions []string - for _, argument := range arguments { - if functionName, ok := argument.(string); ok { - fields := strings.Split(strings.TrimSpace(functionName), ".") - if len(fields) != 2 || len(fields[0]) == 0 || len(fields[1]) == 0 { - invalidCustomFunctions = append(invalidCustomFunctions, functionName) - continue - } - r.errorFunctions[fields[0]] = map[string]struct{}{fields[1]: {}} - } - } - if len(invalidCustomFunctions) != 0 { - panic("found invalid custom function: " + strings.Join(invalidCustomFunctions, ",")) - } -} - -// Apply applies the rule to given file. -func (r *ErrorStringsRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure { - var failures []lint.Failure - - r.configure(arguments) - - fileAst := file.AST - walker := lintErrorStrings{ - file: file, - fileAst: fileAst, - errorFunctions: r.errorFunctions, - onFailure: func(failure lint.Failure) { - failures = append(failures, failure) - }, - } - - ast.Walk(walker, fileAst) - - return failures -} - -// Name returns the rule name. -func (*ErrorStringsRule) Name() string { - return "error-strings" -} - -type lintErrorStrings struct { - file *lint.File - fileAst *ast.File - errorFunctions map[string]map[string]struct{} - onFailure func(lint.Failure) -} - -// Visit browses the AST -func (w lintErrorStrings) Visit(n ast.Node) ast.Visitor { - ce, ok := n.(*ast.CallExpr) - if !ok { - return w - } - - if len(ce.Args) < 1 { - return w - } - - // expression matches the known pkg.function - ok = w.match(ce) - if !ok { - return w - } - - str, ok := w.getMessage(ce) - if !ok { - return w - } - s, _ := strconv.Unquote(str.Value) // can assume well-formed Go - if s == "" { - return w - } - clean, conf := lintErrorString(s) - if clean { - return w - } - w.onFailure(lint.Failure{ - Node: str, - Confidence: conf, - Category: "errors", - Failure: "error strings should not be capitalized or end with punctuation or a newline", - }) - return w -} - -// match returns true if the expression corresponds to the known pkg.function -// i.e.: errors.Wrap -func (w lintErrorStrings) match(expr *ast.CallExpr) bool { - sel, ok := expr.Fun.(*ast.SelectorExpr) - if !ok { - return false - } - // retrieve the package - id, ok := sel.X.(*ast.Ident) - if !ok { - return false - } - functions, ok := w.errorFunctions[id.Name] - if !ok { - return false - } - // retrieve the function - _, ok = functions[sel.Sel.Name] - return ok -} - -// getMessage returns the message depending on its position -// returns false if the cast is unsuccessful -func (w lintErrorStrings) getMessage(expr *ast.CallExpr) (s *ast.BasicLit, success bool) { - str, ok := w.checkArg(expr, 0) - if ok { - return str, true - } - if len(expr.Args) < 2 { - return s, false - } - str, ok = w.checkArg(expr, 1) - if !ok { - return s, false - } - return str, true -} - -func (lintErrorStrings) checkArg(expr *ast.CallExpr, arg int) (s *ast.BasicLit, success bool) { - str, ok := expr.Args[arg].(*ast.BasicLit) - if !ok { - return s, false - } - if str.Kind != token.STRING { - return s, false - } - return str, true -} - -func lintErrorString(s string) (isClean bool, conf float64) { - const basicConfidence = 0.8 - const capConfidence = basicConfidence - 0.2 - first, firstN := utf8.DecodeRuneInString(s) - last, _ := utf8.DecodeLastRuneInString(s) - if last == '.' || last == ':' || last == '!' || last == '\n' { - return false, basicConfidence - } - if unicode.IsUpper(first) { - // People use proper nouns and exported Go identifiers in error strings, - // so decrease the confidence of warnings for capitalization. - if len(s) <= firstN { - return false, capConfidence - } - // Flag strings starting with something that doesn't look like an initialism. - if second, _ := utf8.DecodeRuneInString(s[firstN:]); !unicode.IsUpper(second) { - return false, capConfidence - } - } - return true, 0 -} diff --git a/vendor/github.com/mgechev/revive/rule/errorf.go b/vendor/github.com/mgechev/revive/rule/errorf.go deleted file mode 100644 index 1588a745d..000000000 --- a/vendor/github.com/mgechev/revive/rule/errorf.go +++ /dev/null @@ -1,93 +0,0 @@ -package rule - -import ( - "fmt" - "go/ast" - "regexp" - "strings" - - "github.com/mgechev/revive/lint" -) - -// ErrorfRule lints given else constructs. -type ErrorfRule struct{} - -// Apply applies the rule to given file. -func (*ErrorfRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { - var failures []lint.Failure - - fileAst := file.AST - walker := lintErrorf{ - file: file, - fileAst: fileAst, - onFailure: func(failure lint.Failure) { - failures = append(failures, failure) - }, - } - - file.Pkg.TypeCheck() - ast.Walk(walker, fileAst) - - return failures -} - -// Name returns the rule name. -func (*ErrorfRule) Name() string { - return "errorf" -} - -type lintErrorf struct { - file *lint.File - fileAst *ast.File - onFailure func(lint.Failure) -} - -func (w lintErrorf) Visit(n ast.Node) ast.Visitor { - ce, ok := n.(*ast.CallExpr) - if !ok || len(ce.Args) != 1 { - return w - } - isErrorsNew := isPkgDot(ce.Fun, "errors", "New") - var isTestingError bool - se, ok := ce.Fun.(*ast.SelectorExpr) - if ok && se.Sel.Name == "Error" { - if typ := w.file.Pkg.TypeOf(se.X); typ != nil { - isTestingError = typ.String() == "*testing.T" - } - } - if !isErrorsNew && !isTestingError { - return w - } - arg := ce.Args[0] - ce, ok = arg.(*ast.CallExpr) - if !ok || !isPkgDot(ce.Fun, "fmt", "Sprintf") { - return w - } - errorfPrefix := "fmt" - if isTestingError { - errorfPrefix = w.file.Render(se.X) - } - - failure := lint.Failure{ - Category: "errors", - Node: n, - Confidence: 1, - Failure: fmt.Sprintf("should replace %s(fmt.Sprintf(...)) with %s.Errorf(...)", w.file.Render(se), errorfPrefix), - } - - m := srcLineWithMatch(w.file, ce, `^(.*)`+w.file.Render(se)+`\(fmt\.Sprintf\((.*)\)\)(.*)$`) - if m != nil { - failure.ReplacementLine = m[1] + errorfPrefix + ".Errorf(" + m[2] + ")" + m[3] - } - - w.onFailure(failure) - - return w -} - -func srcLineWithMatch(file *lint.File, node ast.Node, pattern string) (m []string) { - line := srcLine(file.Content(), file.ToPosition(node.Pos())) - line = strings.TrimSuffix(line, "\n") - rx := regexp.MustCompile(pattern) - return rx.FindStringSubmatch(line) -} diff --git a/vendor/github.com/mgechev/revive/rule/exported.go b/vendor/github.com/mgechev/revive/rule/exported.go deleted file mode 100644 index b8663c48c..000000000 --- a/vendor/github.com/mgechev/revive/rule/exported.go +++ /dev/null @@ -1,340 +0,0 @@ -package rule - -import ( - "fmt" - "go/ast" - "go/token" - "strings" - "sync" - "unicode" - "unicode/utf8" - - "github.com/mgechev/revive/internal/typeparams" - "github.com/mgechev/revive/lint" -) - -// ExportedRule lints given else constructs. -type ExportedRule struct { - configured bool - checkPrivateReceivers bool - disableStutteringCheck bool - stuttersMsg string - sync.Mutex -} - -func (r *ExportedRule) configure(arguments lint.Arguments) { - r.Lock() - if !r.configured { - var sayRepetitiveInsteadOfStutters bool - r.checkPrivateReceivers, r.disableStutteringCheck, sayRepetitiveInsteadOfStutters = r.getConf(arguments) - r.stuttersMsg = "stutters" - if sayRepetitiveInsteadOfStutters { - r.stuttersMsg = "is repetitive" - } - - r.configured = true - } - r.Unlock() -} - -// Apply applies the rule to given file. -func (r *ExportedRule) Apply(file *lint.File, args lint.Arguments) []lint.Failure { - r.configure(args) - - var failures []lint.Failure - if file.IsTest() { - return failures - } - - fileAst := file.AST - - walker := lintExported{ - file: file, - fileAst: fileAst, - onFailure: func(failure lint.Failure) { - failures = append(failures, failure) - }, - genDeclMissingComments: make(map[*ast.GenDecl]bool), - checkPrivateReceivers: r.checkPrivateReceivers, - disableStutteringCheck: r.disableStutteringCheck, - stuttersMsg: r.stuttersMsg, - } - - ast.Walk(&walker, fileAst) - - return failures -} - -// Name returns the rule name. -func (*ExportedRule) Name() string { - return "exported" -} - -func (r *ExportedRule) getConf(args lint.Arguments) (checkPrivateReceivers, disableStutteringCheck, sayRepetitiveInsteadOfStutters bool) { - // if any, we expect a slice of strings as configuration - if len(args) < 1 { - return - } - for _, flag := range args { - flagStr, ok := flag.(string) - if !ok { - panic(fmt.Sprintf("Invalid argument for the %s rule: expecting a string, got %T", r.Name(), flag)) - } - - switch flagStr { - case "checkPrivateReceivers": - checkPrivateReceivers = true - case "disableStutteringCheck": - disableStutteringCheck = true - case "sayRepetitiveInsteadOfStutters": - sayRepetitiveInsteadOfStutters = true - default: - panic(fmt.Sprintf("Unknown configuration flag %s for %s rule", flagStr, r.Name())) - } - } - - return -} - -type lintExported struct { - file *lint.File - fileAst *ast.File - lastGen *ast.GenDecl - genDeclMissingComments map[*ast.GenDecl]bool - onFailure func(lint.Failure) - checkPrivateReceivers bool - disableStutteringCheck bool - stuttersMsg string -} - -func (w *lintExported) lintFuncDoc(fn *ast.FuncDecl) { - if !ast.IsExported(fn.Name.Name) { - // func is unexported - return - } - kind := "function" - name := fn.Name.Name - if fn.Recv != nil && len(fn.Recv.List) > 0 { - // method - kind = "method" - recv := typeparams.ReceiverType(fn) - if !w.checkPrivateReceivers && !ast.IsExported(recv) { - // receiver is unexported - return - } - if commonMethods[name] { - return - } - switch name { - case "Len", "Less", "Swap": - sortables := w.file.Pkg.Sortable() - if sortables[recv] { - return - } - } - name = recv + "." + name - } - if fn.Doc == nil { - w.onFailure(lint.Failure{ - Node: fn, - Confidence: 1, - Category: "comments", - Failure: fmt.Sprintf("exported %s %s should have comment or be unexported", kind, name), - }) - return - } - s := normalizeText(fn.Doc.Text()) - prefix := fn.Name.Name + " " - if !strings.HasPrefix(s, prefix) { - w.onFailure(lint.Failure{ - Node: fn.Doc, - Confidence: 0.8, - Category: "comments", - Failure: fmt.Sprintf(`comment on exported %s %s should be of the form "%s..."`, kind, name, prefix), - }) - } -} - -func (w *lintExported) checkStutter(id *ast.Ident, thing string) { - if w.disableStutteringCheck { - return - } - - pkg, name := w.fileAst.Name.Name, id.Name - if !ast.IsExported(name) { - // unexported name - return - } - // A name stutters if the package name is a strict prefix - // and the next character of the name starts a new word. - if len(name) <= len(pkg) { - // name is too short to stutter. - // This permits the name to be the same as the package name. - return - } - if !strings.EqualFold(pkg, name[:len(pkg)]) { - return - } - // We can assume the name is well-formed UTF-8. - // If the next rune after the package name is uppercase or an underscore - // the it's starting a new word and thus this name stutters. - rem := name[len(pkg):] - if next, _ := utf8.DecodeRuneInString(rem); next == '_' || unicode.IsUpper(next) { - w.onFailure(lint.Failure{ - Node: id, - Confidence: 0.8, - Category: "naming", - Failure: fmt.Sprintf("%s name will be used as %s.%s by other packages, and that %s; consider calling this %s", thing, pkg, name, w.stuttersMsg, rem), - }) - } -} - -func (w *lintExported) lintTypeDoc(t *ast.TypeSpec, doc *ast.CommentGroup) { - if !ast.IsExported(t.Name.Name) { - return - } - if doc == nil { - w.onFailure(lint.Failure{ - Node: t, - Confidence: 1, - Category: "comments", - Failure: fmt.Sprintf("exported type %v should have comment or be unexported", t.Name), - }) - return - } - - s := normalizeText(doc.Text()) - articles := [...]string{"A", "An", "The", "This"} - for _, a := range articles { - if t.Name.Name == a { - continue - } - if strings.HasPrefix(s, a+" ") { - s = s[len(a)+1:] - break - } - } - if !strings.HasPrefix(s, t.Name.Name+" ") { - w.onFailure(lint.Failure{ - Node: doc, - Confidence: 1, - Category: "comments", - Failure: fmt.Sprintf(`comment on exported type %v should be of the form "%v ..." (with optional leading article)`, t.Name, t.Name), - }) - } -} - -func (w *lintExported) lintValueSpecDoc(vs *ast.ValueSpec, gd *ast.GenDecl, genDeclMissingComments map[*ast.GenDecl]bool) { - kind := "var" - if gd.Tok == token.CONST { - kind = "const" - } - - if len(vs.Names) > 1 { - // Check that none are exported except for the first. - for _, n := range vs.Names[1:] { - if ast.IsExported(n.Name) { - w.onFailure(lint.Failure{ - Category: "comments", - Confidence: 1, - Failure: fmt.Sprintf("exported %s %s should have its own declaration", kind, n.Name), - Node: vs, - }) - return - } - } - } - - // Only one name. - name := vs.Names[0].Name - if !ast.IsExported(name) { - return - } - - if vs.Doc == nil && vs.Comment == nil && gd.Doc == nil { - if genDeclMissingComments[gd] { - return - } - block := "" - if kind == "const" && gd.Lparen.IsValid() { - block = " (or a comment on this block)" - } - w.onFailure(lint.Failure{ - Confidence: 1, - Node: vs, - Category: "comments", - Failure: fmt.Sprintf("exported %s %s should have comment%s or be unexported", kind, name, block), - }) - genDeclMissingComments[gd] = true - return - } - // If this GenDecl has parens and a comment, we don't check its comment form. - if gd.Doc != nil && gd.Lparen.IsValid() { - return - } - // The relevant text to check will be on either vs.Doc or gd.Doc. - // Use vs.Doc preferentially. - var doc *ast.CommentGroup - switch { - case vs.Doc != nil: - doc = vs.Doc - case vs.Comment != nil && gd.Doc == nil: - doc = vs.Comment - default: - doc = gd.Doc - } - - prefix := name + " " - s := normalizeText(doc.Text()) - if !strings.HasPrefix(s, prefix) { - w.onFailure(lint.Failure{ - Confidence: 1, - Node: doc, - Category: "comments", - Failure: fmt.Sprintf(`comment on exported %s %s should be of the form "%s..."`, kind, name, prefix), - }) - } -} - -// normalizeText is a helper function that normalizes comment strings by: -// * removing one leading space -// -// This function is needed because ast.CommentGroup.Text() does not handle //-style and /*-style comments uniformly -func normalizeText(t string) string { - return strings.TrimPrefix(t, " ") -} - -func (w *lintExported) Visit(n ast.Node) ast.Visitor { - switch v := n.(type) { - case *ast.GenDecl: - if v.Tok == token.IMPORT { - return nil - } - // token.CONST, token.TYPE or token.VAR - w.lastGen = v - return w - case *ast.FuncDecl: - w.lintFuncDoc(v) - if v.Recv == nil { - // Only check for stutter on functions, not methods. - // Method names are not used package-qualified. - w.checkStutter(v.Name, "func") - } - // Don't proceed inside funcs. - return nil - case *ast.TypeSpec: - // inside a GenDecl, which usually has the doc - doc := v.Doc - if doc == nil { - doc = w.lastGen.Doc - } - w.lintTypeDoc(v, doc) - w.checkStutter(v.Name, "type") - // Don't proceed inside types. - return nil - case *ast.ValueSpec: - w.lintValueSpecDoc(v, w.lastGen, w.genDeclMissingComments) - return nil - } - return w -} diff --git a/vendor/github.com/mgechev/revive/rule/file-header.go b/vendor/github.com/mgechev/revive/rule/file-header.go deleted file mode 100644 index a7d69ff2b..000000000 --- a/vendor/github.com/mgechev/revive/rule/file-header.go +++ /dev/null @@ -1,87 +0,0 @@ -package rule - -import ( - "fmt" - "regexp" - "sync" - - "github.com/mgechev/revive/lint" -) - -// FileHeaderRule lints given else constructs. -type FileHeaderRule struct { - header string - sync.Mutex -} - -var ( - multiRegexp = regexp.MustCompile(`^/\*`) - singleRegexp = regexp.MustCompile("^//") -) - -func (r *FileHeaderRule) configure(arguments lint.Arguments) { - r.Lock() - defer r.Unlock() - if r.header == "" { - if len(arguments) < 1 { - return - } - - var ok bool - r.header, ok = arguments[0].(string) - if !ok { - panic(fmt.Sprintf("invalid argument for \"file-header\" rule: argument should be a string, got %T", arguments[0])) - } - } -} - -// Apply applies the rule to given file. -func (r *FileHeaderRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure { - r.configure(arguments) - - if r.header == "" { - return nil - } - - failure := []lint.Failure{ - { - Node: file.AST, - Confidence: 1, - Failure: "the file doesn't have an appropriate header", - }, - } - - if len(file.AST.Comments) == 0 { - return failure - } - - g := file.AST.Comments[0] - if g == nil { - return failure - } - comment := "" - for _, c := range g.List { - text := c.Text - if multiRegexp.MatchString(text) { - text = text[2 : len(text)-2] - } else if singleRegexp.MatchString(text) { - text = text[2:] - } - comment += text - } - - regex, err := regexp.Compile(r.header) - if err != nil { - panic(err.Error()) - } - - if !regex.MatchString(comment) { - return failure - } - return nil -} - -// Name returns the rule name. -func (*FileHeaderRule) Name() string { - return "file-header" -} diff --git a/vendor/github.com/mgechev/revive/rule/flag-param.go b/vendor/github.com/mgechev/revive/rule/flag-param.go deleted file mode 100644 index f9bfb712c..000000000 --- a/vendor/github.com/mgechev/revive/rule/flag-param.go +++ /dev/null @@ -1,105 +0,0 @@ -package rule - -import ( - "fmt" - "go/ast" - - "github.com/mgechev/revive/lint" -) - -// FlagParamRule lints given else constructs. -type FlagParamRule struct{} - -// Apply applies the rule to given file. -func (*FlagParamRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { - var failures []lint.Failure - - onFailure := func(failure lint.Failure) { - failures = append(failures, failure) - } - - w := lintFlagParamRule{onFailure: onFailure} - ast.Walk(w, file.AST) - return failures -} - -// Name returns the rule name. -func (*FlagParamRule) Name() string { - return "flag-parameter" -} - -type lintFlagParamRule struct { - onFailure func(lint.Failure) -} - -func (w lintFlagParamRule) Visit(node ast.Node) ast.Visitor { - fd, ok := node.(*ast.FuncDecl) - if !ok { - return w - } - - if fd.Body == nil { - return nil // skip whole function declaration - } - - for _, p := range fd.Type.Params.List { - t := p.Type - - id, ok := t.(*ast.Ident) - if !ok { - continue - } - - if id.Name != "bool" { - continue - } - - cv := conditionVisitor{p.Names, fd, w} - ast.Walk(cv, fd.Body) - } - - return w -} - -type conditionVisitor struct { - ids []*ast.Ident - fd *ast.FuncDecl - linter lintFlagParamRule -} - -func (w conditionVisitor) Visit(node ast.Node) ast.Visitor { - ifStmt, ok := node.(*ast.IfStmt) - if !ok { - return w - } - - fselect := func(n ast.Node) bool { - ident, ok := n.(*ast.Ident) - if !ok { - return false - } - - for _, id := range w.ids { - if ident.Name == id.Name { - return true - } - } - - return false - } - - uses := pick(ifStmt.Cond, fselect) - - if len(uses) < 1 { - return w - } - - w.linter.onFailure(lint.Failure{ - Confidence: 1, - Node: w.fd.Type.Params, - Category: "bad practice", - Failure: fmt.Sprintf("parameter '%s' seems to be a control flag, avoid control coupling", uses[0]), - }) - - return nil -} diff --git a/vendor/github.com/mgechev/revive/rule/function-length.go b/vendor/github.com/mgechev/revive/rule/function-length.go deleted file mode 100644 index fd65884e9..000000000 --- a/vendor/github.com/mgechev/revive/rule/function-length.go +++ /dev/null @@ -1,177 +0,0 @@ -package rule - -import ( - "fmt" - "go/ast" - "reflect" - "sync" - - "github.com/mgechev/revive/lint" -) - -// FunctionLength lint. -type FunctionLength struct { - maxStmt int - maxLines int - configured bool - sync.Mutex -} - -func (r *FunctionLength) configure(arguments lint.Arguments) { - r.Lock() - defer r.Unlock() - if !r.configured { - maxStmt, maxLines := r.parseArguments(arguments) - r.maxStmt = int(maxStmt) - r.maxLines = int(maxLines) - r.configured = true - } -} - -// Apply applies the rule to given file. -func (r *FunctionLength) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure { - r.configure(arguments) - - var failures []lint.Failure - - walker := lintFuncLength{ - file: file, - maxStmt: r.maxStmt, - maxLines: r.maxLines, - onFailure: func(failure lint.Failure) { - failures = append(failures, failure) - }, - } - - ast.Walk(walker, file.AST) - - return failures -} - -// Name returns the rule name. -func (*FunctionLength) Name() string { - return "function-length" -} - -const defaultFuncStmtsLimit = 50 -const defaultFuncLinesLimit = 75 - -func (*FunctionLength) parseArguments(arguments lint.Arguments) (maxStmt, maxLines int64) { - if len(arguments) == 0 { - return defaultFuncStmtsLimit, defaultFuncLinesLimit - } - - if len(arguments) != 2 { - panic(fmt.Sprintf(`invalid configuration for "function-length" rule, expected 2 arguments but got %d`, len(arguments))) - } - - maxStmt, maxStmtOk := arguments[0].(int64) - if !maxStmtOk { - panic(fmt.Sprintf(`invalid configuration value for max statements in "function-length" rule; need int64 but got %T`, arguments[0])) - } - if maxStmt < 0 { - panic(fmt.Sprintf(`the configuration value for max statements in "function-length" rule cannot be negative, got %d`, maxStmt)) - } - - maxLines, maxLinesOk := arguments[1].(int64) - if !maxLinesOk { - panic(fmt.Sprintf(`invalid configuration value for max lines in "function-length" rule; need int64 but got %T`, arguments[1])) - } - if maxLines < 0 { - panic(fmt.Sprintf(`the configuration value for max statements in "function-length" rule cannot be negative, got %d`, maxLines)) - } - - return maxStmt, maxLines -} - -type lintFuncLength struct { - file *lint.File - maxStmt int - maxLines int - onFailure func(lint.Failure) -} - -func (w lintFuncLength) Visit(n ast.Node) ast.Visitor { - node, ok := n.(*ast.FuncDecl) - if !ok { - return w - } - - body := node.Body - if body == nil || len(node.Body.List) == 0 { - return nil - } - - if w.maxStmt > 0 { - stmtCount := w.countStmts(node.Body.List) - if stmtCount > w.maxStmt { - w.onFailure(lint.Failure{ - Confidence: 1, - Failure: fmt.Sprintf("maximum number of statements per function exceeded; max %d but got %d", w.maxStmt, stmtCount), - Node: node, - }) - } - } - - if w.maxLines > 0 { - lineCount := w.countLines(node.Body) - if lineCount > w.maxLines { - w.onFailure(lint.Failure{ - Confidence: 1, - Failure: fmt.Sprintf("maximum number of lines per function exceeded; max %d but got %d", w.maxLines, lineCount), - Node: node, - }) - } - } - - return nil -} - -func (w lintFuncLength) countLines(b *ast.BlockStmt) int { - return w.file.ToPosition(b.End()).Line - w.file.ToPosition(b.Pos()).Line - 1 -} - -func (w lintFuncLength) countStmts(b []ast.Stmt) int { - count := 0 - for _, s := range b { - switch stmt := s.(type) { - case *ast.BlockStmt: - count += w.countStmts(stmt.List) - case *ast.IfStmt: - count += 1 + w.countBodyListStmts(stmt) - if stmt.Else != nil { - elseBody, ok := stmt.Else.(*ast.BlockStmt) - if ok { - count += w.countStmts(elseBody.List) - } - } - case *ast.ForStmt, *ast.RangeStmt, - *ast.SwitchStmt, *ast.TypeSwitchStmt, *ast.SelectStmt: - count += 1 + w.countBodyListStmts(stmt) - case *ast.CaseClause: - count += w.countStmts(stmt.Body) - case *ast.AssignStmt: - count += 1 + w.countFuncLitStmts(stmt.Rhs[0]) - case *ast.GoStmt: - count += 1 + w.countFuncLitStmts(stmt.Call.Fun) - case *ast.DeferStmt: - count += 1 + w.countFuncLitStmts(stmt.Call.Fun) - default: - count++ - } - } - - return count -} - -func (w lintFuncLength) countFuncLitStmts(stmt ast.Expr) int { - if block, ok := stmt.(*ast.FuncLit); ok { - return w.countStmts(block.Body.List) - } - return 0 -} - -func (w lintFuncLength) countBodyListStmts(t any) int { - i := reflect.ValueOf(t).Elem().FieldByName(`Body`).Elem().FieldByName(`List`).Interface() - return w.countStmts(i.([]ast.Stmt)) -} diff --git a/vendor/github.com/mgechev/revive/rule/function-result-limit.go b/vendor/github.com/mgechev/revive/rule/function-result-limit.go deleted file mode 100644 index 6a0748011..000000000 --- a/vendor/github.com/mgechev/revive/rule/function-result-limit.go +++ /dev/null @@ -1,83 +0,0 @@ -package rule - -import ( - "fmt" - "go/ast" - "sync" - - "github.com/mgechev/revive/lint" -) - -// FunctionResultsLimitRule lints given else constructs. -type FunctionResultsLimitRule struct { - max int - sync.Mutex -} - -const defaultResultsLimit = 3 - -func (r *FunctionResultsLimitRule) configure(arguments lint.Arguments) { - r.Lock() - defer r.Unlock() - if r.max == 0 { - if len(arguments) < 1 { - r.max = defaultResultsLimit - return - } - max, ok := arguments[0].(int64) // Alt. non panicking version - if !ok { - panic(fmt.Sprintf(`invalid value passed as return results number to the "function-result-limit" rule; need int64 but got %T`, arguments[0])) - } - if max < 0 { - panic(`the value passed as return results number to the "function-result-limit" rule cannot be negative`) - } - r.max = int(max) - } -} - -// Apply applies the rule to given file. -func (r *FunctionResultsLimitRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure { - r.configure(arguments) - - var failures []lint.Failure - - walker := lintFunctionResultsNum{ - max: r.max, - onFailure: func(failure lint.Failure) { - failures = append(failures, failure) - }, - } - - ast.Walk(walker, file.AST) - - return failures -} - -// Name returns the rule name. -func (*FunctionResultsLimitRule) Name() string { - return "function-result-limit" -} - -type lintFunctionResultsNum struct { - max int - onFailure func(lint.Failure) -} - -func (w lintFunctionResultsNum) Visit(n ast.Node) ast.Visitor { - node, ok := n.(*ast.FuncDecl) - if ok { - num := 0 - if node.Type.Results != nil { - num = node.Type.Results.NumFields() - } - if num > w.max { - w.onFailure(lint.Failure{ - Confidence: 1, - Failure: fmt.Sprintf("maximum number of return results per function exceeded; max %d but got %d", w.max, num), - Node: node.Type, - }) - return w - } - } - return w -} diff --git a/vendor/github.com/mgechev/revive/rule/get-return.go b/vendor/github.com/mgechev/revive/rule/get-return.go deleted file mode 100644 index 600a40fac..000000000 --- a/vendor/github.com/mgechev/revive/rule/get-return.go +++ /dev/null @@ -1,70 +0,0 @@ -package rule - -import ( - "fmt" - "go/ast" - "strings" - - "github.com/mgechev/revive/lint" -) - -// GetReturnRule lints given else constructs. -type GetReturnRule struct{} - -// Apply applies the rule to given file. -func (*GetReturnRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { - var failures []lint.Failure - - onFailure := func(failure lint.Failure) { - failures = append(failures, failure) - } - - w := lintReturnRule{onFailure} - ast.Walk(w, file.AST) - return failures -} - -// Name returns the rule name. -func (*GetReturnRule) Name() string { - return "get-return" -} - -type lintReturnRule struct { - onFailure func(lint.Failure) -} - -func isGetter(name string) bool { - if strings.HasPrefix(strings.ToUpper(name), "GET") { - if len(name) > 3 { - c := name[3] - return !(c >= 'a' && c <= 'z') - } - } - - return false -} - -func hasResults(rs *ast.FieldList) bool { - return rs != nil && len(rs.List) > 0 -} - -func (w lintReturnRule) Visit(node ast.Node) ast.Visitor { - fd, ok := node.(*ast.FuncDecl) - if !ok { - return w - } - - if !isGetter(fd.Name.Name) { - return w - } - if !hasResults(fd.Type.Results) { - w.onFailure(lint.Failure{ - Confidence: 0.8, - Node: fd, - Category: "logic", - Failure: fmt.Sprintf("function '%s' seems to be a getter but it does not return any result", fd.Name.Name), - }) - } - - return w -} diff --git a/vendor/github.com/mgechev/revive/rule/identical-branches.go b/vendor/github.com/mgechev/revive/rule/identical-branches.go deleted file mode 100644 index 9222c8a9c..000000000 --- a/vendor/github.com/mgechev/revive/rule/identical-branches.go +++ /dev/null @@ -1,84 +0,0 @@ -package rule - -import ( - "go/ast" - - "github.com/mgechev/revive/lint" -) - -// IdenticalBranchesRule warns on constant logical expressions. -type IdenticalBranchesRule struct{} - -// Apply applies the rule to given file. -func (*IdenticalBranchesRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { - var failures []lint.Failure - - onFailure := func(failure lint.Failure) { - failures = append(failures, failure) - } - - astFile := file.AST - w := &lintIdenticalBranches{astFile, onFailure} - ast.Walk(w, astFile) - return failures -} - -// Name returns the rule name. -func (*IdenticalBranchesRule) Name() string { - return "identical-branches" -} - -type lintIdenticalBranches struct { - file *ast.File - onFailure func(lint.Failure) -} - -func (w *lintIdenticalBranches) Visit(node ast.Node) ast.Visitor { - n, ok := node.(*ast.IfStmt) - if !ok { - return w - } - - if n.Else == nil { - return w - } - branches := []*ast.BlockStmt{n.Body} - - elseBranch, ok := n.Else.(*ast.BlockStmt) - if !ok { // if-else-if construction - return w - } - branches = append(branches, elseBranch) - - if w.identicalBranches(branches) { - w.newFailure(n, "both branches of the if are identical") - } - - return w -} - -func (lintIdenticalBranches) identicalBranches(branches []*ast.BlockStmt) bool { - if len(branches) < 2 { - return false - } - - ref := gofmt(branches[0]) - refSize := len(branches[0].List) - for i := 1; i < len(branches); i++ { - currentSize := len(branches[i].List) - if currentSize != refSize || gofmt(branches[i]) != ref { - return false - } - } - - return true -} - -func (w lintIdenticalBranches) newFailure(node ast.Node, msg string) { - w.onFailure(lint.Failure{ - Confidence: 1, - Node: node, - Category: "logic", - Failure: msg, - }) -} diff --git a/vendor/github.com/mgechev/revive/rule/if-return.go b/vendor/github.com/mgechev/revive/rule/if-return.go deleted file mode 100644 index a6a3113ad..000000000 --- a/vendor/github.com/mgechev/revive/rule/if-return.go +++ /dev/null @@ -1,115 +0,0 @@ -package rule - -import ( - "go/ast" - "go/token" - "strings" - - "github.com/mgechev/revive/lint" -) - -// IfReturnRule lints given else constructs. -type IfReturnRule struct{} - -// Apply applies the rule to given file. -func (*IfReturnRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { - var failures []lint.Failure - - onFailure := func(failure lint.Failure) { - failures = append(failures, failure) - } - - astFile := file.AST - w := &lintElseError{astFile, onFailure} - ast.Walk(w, astFile) - return failures -} - -// Name returns the rule name. -func (*IfReturnRule) Name() string { - return "if-return" -} - -type lintElseError struct { - file *ast.File - onFailure func(lint.Failure) -} - -func (w *lintElseError) Visit(node ast.Node) ast.Visitor { - switch v := node.(type) { - case *ast.BlockStmt: - for i := 0; i < len(v.List)-1; i++ { - // if var := whatever; var != nil { return var } - s, ok := v.List[i].(*ast.IfStmt) - if !ok || s.Body == nil || len(s.Body.List) != 1 || s.Else != nil { - continue - } - assign, ok := s.Init.(*ast.AssignStmt) - if !ok || len(assign.Lhs) != 1 || !(assign.Tok == token.DEFINE || assign.Tok == token.ASSIGN) { - continue - } - id, ok := assign.Lhs[0].(*ast.Ident) - if !ok { - continue - } - expr, ok := s.Cond.(*ast.BinaryExpr) - if !ok || expr.Op != token.NEQ { - continue - } - if lhs, ok := expr.X.(*ast.Ident); !ok || lhs.Name != id.Name { - continue - } - if rhs, ok := expr.Y.(*ast.Ident); !ok || rhs.Name != "nil" { - continue - } - r, ok := s.Body.List[0].(*ast.ReturnStmt) - if !ok || len(r.Results) != 1 { - continue - } - if r, ok := r.Results[0].(*ast.Ident); !ok || r.Name != id.Name { - continue - } - - // return nil - r, ok = v.List[i+1].(*ast.ReturnStmt) - if !ok || len(r.Results) != 1 { - continue - } - if r, ok := r.Results[0].(*ast.Ident); !ok || r.Name != "nil" { - continue - } - - // check if there are any comments explaining the construct, don't emit an error if there are some. - if containsComments(s.Pos(), r.Pos(), w.file) { - continue - } - - w.onFailure(lint.Failure{ - Confidence: .9, - Node: v.List[i], - Failure: "redundant if ...; err != nil check, just return error instead.", - }) - } - } - return w -} - -func containsComments(start, end token.Pos, f *ast.File) bool { - for _, cgroup := range f.Comments { - comments := cgroup.List - if comments[0].Slash >= end { - // All comments starting with this group are after end pos. - return false - } - if comments[len(comments)-1].Slash < start { - // Comments group ends before start pos. - continue - } - for _, c := range comments { - if start <= c.Slash && c.Slash < end && !strings.HasPrefix(c.Text, "// MATCH ") { - return true - } - } - } - return false -} diff --git a/vendor/github.com/mgechev/revive/rule/import-alias-naming.go b/vendor/github.com/mgechev/revive/rule/import-alias-naming.go deleted file mode 100644 index a6d096c8b..000000000 --- a/vendor/github.com/mgechev/revive/rule/import-alias-naming.go +++ /dev/null @@ -1,126 +0,0 @@ -package rule - -import ( - "fmt" - "regexp" - "sync" - - "github.com/mgechev/revive/lint" -) - -// ImportAliasNamingRule lints import alias naming. -type ImportAliasNamingRule struct { - configured bool - allowRegexp *regexp.Regexp - denyRegexp *regexp.Regexp - sync.Mutex -} - -const defaultImportAliasNamingAllowRule = "^[a-z][a-z0-9]{0,}$" - -var defaultImportAliasNamingAllowRegexp = regexp.MustCompile(defaultImportAliasNamingAllowRule) - -func (r *ImportAliasNamingRule) configure(arguments lint.Arguments) { - r.Lock() - defer r.Unlock() - if r.configured { - return - } - - if len(arguments) == 0 { - r.allowRegexp = defaultImportAliasNamingAllowRegexp - return - } - - switch namingRule := arguments[0].(type) { - case string: - r.setAllowRule(namingRule) - case map[string]any: // expecting map[string]string - for k, v := range namingRule { - switch k { - case "allowRegex": - r.setAllowRule(v) - case "denyRegex": - r.setDenyRule(v) - default: - panic(fmt.Sprintf("Invalid map key for 'import-alias-naming' rule. Expecting 'allowRegex' or 'denyRegex', got %v", k)) - } - } - default: - panic(fmt.Sprintf("Invalid argument '%v' for 'import-alias-naming' rule. Expecting string or map[string]string, got %T", arguments[0], arguments[0])) - } - - if r.allowRegexp == nil && r.denyRegexp == nil { - r.allowRegexp = defaultImportAliasNamingAllowRegexp - } -} - -// Apply applies the rule to given file. -func (r *ImportAliasNamingRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure { - r.configure(arguments) - - var failures []lint.Failure - - for _, is := range file.AST.Imports { - path := is.Path - if path == nil { - continue - } - - alias := is.Name - if alias == nil || alias.Name == "_" || alias.Name == "." { // "_" and "." are special types of import aiases and should be processed by another linter rule - continue - } - - if r.allowRegexp != nil && !r.allowRegexp.MatchString(alias.Name) { - failures = append(failures, lint.Failure{ - Confidence: 1, - Failure: fmt.Sprintf("import name (%s) must match the regular expression: %s", alias.Name, r.allowRegexp.String()), - Node: alias, - Category: "imports", - }) - } - - if r.denyRegexp != nil && r.denyRegexp.MatchString(alias.Name) { - failures = append(failures, lint.Failure{ - Confidence: 1, - Failure: fmt.Sprintf("import name (%s) must NOT match the regular expression: %s", alias.Name, r.denyRegexp.String()), - Node: alias, - Category: "imports", - }) - } - } - - return failures -} - -// Name returns the rule name. -func (*ImportAliasNamingRule) Name() string { - return "import-alias-naming" -} - -func (r *ImportAliasNamingRule) setAllowRule(value any) { - namingRule, ok := value.(string) - if !ok { - panic(fmt.Sprintf("Invalid argument '%v' for import-alias-naming allowRegexp rule. Expecting string, got %T", value, value)) - } - - namingRuleRegexp, err := regexp.Compile(namingRule) - if err != nil { - panic(fmt.Sprintf("Invalid argument to the import-alias-naming allowRegexp rule. Expecting %q to be a valid regular expression, got: %v", namingRule, err)) - } - r.allowRegexp = namingRuleRegexp -} - -func (r *ImportAliasNamingRule) setDenyRule(value any) { - namingRule, ok := value.(string) - if !ok { - panic(fmt.Sprintf("Invalid argument '%v' for import-alias-naming denyRegexp rule. Expecting string, got %T", value, value)) - } - - namingRuleRegexp, err := regexp.Compile(namingRule) - if err != nil { - panic(fmt.Sprintf("Invalid argument to the import-alias-naming denyRegexp rule. Expecting %q to be a valid regular expression, got: %v", namingRule, err)) - } - r.denyRegexp = namingRuleRegexp -} diff --git a/vendor/github.com/mgechev/revive/rule/import-shadowing.go b/vendor/github.com/mgechev/revive/rule/import-shadowing.go deleted file mode 100644 index 046aeb688..000000000 --- a/vendor/github.com/mgechev/revive/rule/import-shadowing.go +++ /dev/null @@ -1,115 +0,0 @@ -package rule - -import ( - "fmt" - "go/ast" - "go/token" - "strings" - - "github.com/mgechev/revive/lint" -) - -// ImportShadowingRule lints given else constructs. -type ImportShadowingRule struct{} - -// Apply applies the rule to given file. -func (*ImportShadowingRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { - var failures []lint.Failure - - importNames := map[string]struct{}{} - for _, imp := range file.AST.Imports { - importNames[getName(imp)] = struct{}{} - } - - fileAst := file.AST - walker := importShadowing{ - packageNameIdent: fileAst.Name, - importNames: importNames, - onFailure: func(failure lint.Failure) { - failures = append(failures, failure) - }, - alreadySeen: map[*ast.Object]struct{}{}, - skipIdents: map[*ast.Ident]struct{}{}, - } - - ast.Walk(walker, fileAst) - - return failures -} - -// Name returns the rule name. -func (*ImportShadowingRule) Name() string { - return "import-shadowing" -} - -func getName(imp *ast.ImportSpec) string { - const pathSep = "/" - const strDelim = `"` - if imp.Name != nil { - return imp.Name.Name - } - - path := imp.Path.Value - i := strings.LastIndex(path, pathSep) - if i == -1 { - return strings.Trim(path, strDelim) - } - - return strings.Trim(path[i+1:], strDelim) -} - -type importShadowing struct { - packageNameIdent *ast.Ident - importNames map[string]struct{} - onFailure func(lint.Failure) - alreadySeen map[*ast.Object]struct{} - skipIdents map[*ast.Ident]struct{} -} - -// Visit visits AST nodes and checks if id nodes (ast.Ident) shadow an import name -func (w importShadowing) Visit(n ast.Node) ast.Visitor { - switch n := n.(type) { - case *ast.AssignStmt: - if n.Tok == token.DEFINE { - return w // analyze variable declarations of the form id := expr - } - - return nil // skip assigns of the form id = expr (not an id declaration) - case *ast.CallExpr, // skip call expressions (not an id declaration) - *ast.ImportSpec, // skip import section subtree because we already have the list of imports - *ast.KeyValueExpr, // skip analysis of key-val expressions ({key:value}): ids of such expressions, even the same of an import name, do not shadow the import name - *ast.ReturnStmt, // skip skipping analysis of returns, ids in expression were already analyzed - *ast.SelectorExpr, // skip analysis of selector expressions (anId.otherId): because if anId shadows an import name, it was already detected, and otherId does not shadows the import name - *ast.StructType: // skip analysis of struct type because struct fields can not shadow an import name - return nil - case *ast.FuncDecl: - if n.Recv != nil { - w.skipIdents[n.Name] = struct{}{} - } - case *ast.Ident: - if n == w.packageNameIdent { - return nil // skip the ident corresponding to the package name of this file - } - - id := n.Name - if id == "_" { - return w // skip _ id - } - - _, isImportName := w.importNames[id] - _, alreadySeen := w.alreadySeen[n.Obj] - _, skipIdent := w.skipIdents[n] - if isImportName && !alreadySeen && !skipIdent { - w.onFailure(lint.Failure{ - Confidence: 1, - Node: n, - Category: "naming", - Failure: fmt.Sprintf("The name '%s' shadows an import name", id), - }) - - w.alreadySeen[n.Obj] = struct{}{} - } - } - - return w -} diff --git a/vendor/github.com/mgechev/revive/rule/imports-blocklist.go b/vendor/github.com/mgechev/revive/rule/imports-blocklist.go deleted file mode 100644 index 431066403..000000000 --- a/vendor/github.com/mgechev/revive/rule/imports-blocklist.go +++ /dev/null @@ -1,73 +0,0 @@ -package rule - -import ( - "fmt" - "regexp" - "sync" - - "github.com/mgechev/revive/lint" -) - -// ImportsBlocklistRule lints given else constructs. -type ImportsBlocklistRule struct { - blocklist []*regexp.Regexp - sync.Mutex -} - -var replaceImportRegexp = regexp.MustCompile(`/?\*\*/?`) - -func (r *ImportsBlocklistRule) configure(arguments lint.Arguments) { - r.Lock() - defer r.Unlock() - - if r.blocklist == nil { - r.blocklist = make([]*regexp.Regexp, 0) - - for _, arg := range arguments { - argStr, ok := arg.(string) - if !ok { - panic(fmt.Sprintf("Invalid argument to the imports-blocklist rule. Expecting a string, got %T", arg)) - } - regStr, err := regexp.Compile(fmt.Sprintf(`(?m)"%s"$`, replaceImportRegexp.ReplaceAllString(argStr, `(\W|\w)*`))) - if err != nil { - panic(fmt.Sprintf("Invalid argument to the imports-blocklist rule. Expecting %q to be a valid regular expression, got: %v", argStr, err)) - } - r.blocklist = append(r.blocklist, regStr) - } - } -} - -func (r *ImportsBlocklistRule) isBlocklisted(path string) bool { - for _, regex := range r.blocklist { - if regex.MatchString(path) { - return true - } - } - return false -} - -// Apply applies the rule to given file. -func (r *ImportsBlocklistRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure { - r.configure(arguments) - - var failures []lint.Failure - - for _, is := range file.AST.Imports { - path := is.Path - if path != nil && r.isBlocklisted(path.Value) { - failures = append(failures, lint.Failure{ - Confidence: 1, - Failure: "should not use the following blocklisted import: " + path.Value, - Node: is, - Category: "imports", - }) - } - } - - return failures -} - -// Name returns the rule name. -func (*ImportsBlocklistRule) Name() string { - return "imports-blocklist" -} diff --git a/vendor/github.com/mgechev/revive/rule/increment-decrement.go b/vendor/github.com/mgechev/revive/rule/increment-decrement.go deleted file mode 100644 index 34a8e1ec5..000000000 --- a/vendor/github.com/mgechev/revive/rule/increment-decrement.go +++ /dev/null @@ -1,73 +0,0 @@ -package rule - -import ( - "fmt" - "go/ast" - "go/token" - - "github.com/mgechev/revive/lint" -) - -// IncrementDecrementRule lints given else constructs. -type IncrementDecrementRule struct{} - -// Apply applies the rule to given file. -func (*IncrementDecrementRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { - var failures []lint.Failure - - fileAst := file.AST - walker := lintIncrementDecrement{ - file: file, - onFailure: func(failure lint.Failure) { - failures = append(failures, failure) - }, - } - - ast.Walk(walker, fileAst) - - return failures -} - -// Name returns the rule name. -func (*IncrementDecrementRule) Name() string { - return "increment-decrement" -} - -type lintIncrementDecrement struct { - file *lint.File - onFailure func(lint.Failure) -} - -func (w lintIncrementDecrement) Visit(n ast.Node) ast.Visitor { - as, ok := n.(*ast.AssignStmt) - if !ok { - return w - } - if len(as.Lhs) != 1 { - return w - } - if !isOne(as.Rhs[0]) { - return w - } - var suffix string - switch as.Tok { - case token.ADD_ASSIGN: - suffix = "++" - case token.SUB_ASSIGN: - suffix = "--" - default: - return w - } - w.onFailure(lint.Failure{ - Confidence: 0.8, - Node: as, - Category: "unary-op", - Failure: fmt.Sprintf("should replace %s with %s%s", w.file.Render(as), w.file.Render(as.Lhs[0]), suffix), - }) - return w -} - -func isOne(expr ast.Expr) bool { - lit, ok := expr.(*ast.BasicLit) - return ok && lit.Kind == token.INT && lit.Value == "1" -} diff --git a/vendor/github.com/mgechev/revive/rule/indent-error-flow.go b/vendor/github.com/mgechev/revive/rule/indent-error-flow.go deleted file mode 100644 index 294ceef84..000000000 --- a/vendor/github.com/mgechev/revive/rule/indent-error-flow.go +++ /dev/null @@ -1,45 +0,0 @@ -package rule - -import ( - "github.com/mgechev/revive/internal/ifelse" - "github.com/mgechev/revive/lint" -) - -// IndentErrorFlowRule lints given else constructs. -type IndentErrorFlowRule struct{} - -// Apply applies the rule to given file. -func (e *IndentErrorFlowRule) Apply(file *lint.File, args lint.Arguments) []lint.Failure { - return ifelse.Apply(e, file.AST, ifelse.TargetElse, args) -} - -// Name returns the rule name. -func (*IndentErrorFlowRule) Name() string { - return "indent-error-flow" -} - -// CheckIfElse evaluates the rule against an ifelse.Chain. -func (*IndentErrorFlowRule) CheckIfElse(chain ifelse.Chain, args ifelse.Args) (failMsg string) { - if !chain.If.Deviates() { - // this rule only applies if the if-block deviates control flow - return - } - - if chain.HasPriorNonDeviating { - // if we de-indent the "else" block then a previous branch - // might flow into it, affecting program behaviour - return - } - - if !chain.If.Returns() { - // avoid overlapping with superfluous-else - return - } - - if args.PreserveScope && !chain.AtBlockEnd && (chain.HasInitializer || chain.Else.HasDecls) { - // avoid increasing variable scope - return - } - - return "if block ends with a return statement, so drop this else and outdent its block" -} diff --git a/vendor/github.com/mgechev/revive/rule/line-length-limit.go b/vendor/github.com/mgechev/revive/rule/line-length-limit.go deleted file mode 100644 index 1a414f691..000000000 --- a/vendor/github.com/mgechev/revive/rule/line-length-limit.go +++ /dev/null @@ -1,102 +0,0 @@ -package rule - -import ( - "bufio" - "bytes" - "fmt" - "go/token" - "strings" - "sync" - "unicode/utf8" - - "github.com/mgechev/revive/lint" -) - -// LineLengthLimitRule lints given else constructs. -type LineLengthLimitRule struct { - max int - sync.Mutex -} - -const defaultLineLengthLimit = 80 - -func (r *LineLengthLimitRule) configure(arguments lint.Arguments) { - r.Lock() - defer r.Unlock() - if r.max == 0 { - if len(arguments) < 1 { - r.max = defaultLineLengthLimit - return - } - - max, ok := arguments[0].(int64) // Alt. non panicking version - if !ok || max < 0 { - panic(`invalid value passed as argument number to the "line-length-limit" rule`) - } - - r.max = int(max) - } -} - -// Apply applies the rule to given file. -func (r *LineLengthLimitRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure { - r.configure(arguments) - - var failures []lint.Failure - - checker := lintLineLengthNum{ - max: r.max, - file: file, - onFailure: func(failure lint.Failure) { - failures = append(failures, failure) - }, - } - - checker.check() - - return failures -} - -// Name returns the rule name. -func (*LineLengthLimitRule) Name() string { - return "line-length-limit" -} - -type lintLineLengthNum struct { - max int - file *lint.File - onFailure func(lint.Failure) -} - -func (r lintLineLengthNum) check() { - f := bytes.NewReader(r.file.Content()) - spaces := strings.Repeat(" ", 4) // tab width = 4 - l := 1 - s := bufio.NewScanner(f) - for s.Scan() { - t := s.Text() - t = strings.ReplaceAll(t, "\t", spaces) - c := utf8.RuneCountInString(t) - if c > r.max { - r.onFailure(lint.Failure{ - Category: "code-style", - Position: lint.FailurePosition{ - // Offset not set; it is non-trivial, and doesn't appear to be needed. - Start: token.Position{ - Filename: r.file.Name, - Line: l, - Column: 0, - }, - End: token.Position{ - Filename: r.file.Name, - Line: l, - Column: c, - }, - }, - Confidence: 1, - Failure: fmt.Sprintf("line is %d characters, out of limit %d", c, r.max), - }) - } - l++ - } -} diff --git a/vendor/github.com/mgechev/revive/rule/max-control-nesting.go b/vendor/github.com/mgechev/revive/rule/max-control-nesting.go deleted file mode 100644 index c4eb36193..000000000 --- a/vendor/github.com/mgechev/revive/rule/max-control-nesting.go +++ /dev/null @@ -1,128 +0,0 @@ -package rule - -import ( - "fmt" - "go/ast" - "sync" - - "github.com/mgechev/revive/lint" -) - -// MaxControlNestingRule lints given else constructs. -type MaxControlNestingRule struct { - max int64 - sync.Mutex -} - -const defaultMaxControlNesting = 5 - -// Apply applies the rule to given file. -func (r *MaxControlNestingRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure { - r.configure(arguments) - - var failures []lint.Failure - - fileAst := file.AST - - walker := &lintMaxControlNesting{ - onFailure: func(failure lint.Failure) { - failures = append(failures, failure) - }, - max: int(r.max), - } - - ast.Walk(walker, fileAst) - - return failures -} - -// Name returns the rule name. -func (*MaxControlNestingRule) Name() string { - return "max-control-nesting" -} - -type lintMaxControlNesting struct { - max int - onFailure func(lint.Failure) - nestingLevelAcc int - lastCtrlStmt ast.Node -} - -func (w *lintMaxControlNesting) Visit(n ast.Node) ast.Visitor { - if w.nestingLevelAcc > w.max { // we are visiting a node beyond the max nesting level - w.onFailure(lint.Failure{ - Failure: fmt.Sprintf("control flow nesting exceeds %d", w.max), - Confidence: 1, - Node: w.lastCtrlStmt, - Category: "complexity", - }) - return nil // stop visiting deeper - } - - switch v := n.(type) { - case *ast.IfStmt: - w.lastCtrlStmt = v - w.walkControlledBlock(v.Body) // "then" branch block - if v.Else != nil { - w.walkControlledBlock(v.Else) // "else" branch block - } - return nil // stop re-visiting nesting blocks (already visited by w.walkControlledBlock) - - case *ast.ForStmt: - w.lastCtrlStmt = v - w.walkControlledBlock(v.Body) - return nil // stop re-visiting nesting blocks (already visited by w.walkControlledBlock) - - case *ast.CaseClause: // switch case - w.lastCtrlStmt = v - for _, s := range v.Body { // visit each statement in the case clause - w.walkControlledBlock(s) - } - return nil // stop re-visiting nesting blocks (already visited by w.walkControlledBlock) - - case *ast.CommClause: // select case - w.lastCtrlStmt = v - for _, s := range v.Body { // visit each statement in the select case clause - w.walkControlledBlock(s) - } - return nil // stop re-visiting nesting blocks (already visited by w.walkControlledBlock) - - case *ast.FuncLit: - walker := &lintMaxControlNesting{ - onFailure: w.onFailure, - max: w.max, - } - ast.Walk(walker, v.Body) - return nil - } - - return w -} - -func (w *lintMaxControlNesting) walkControlledBlock(b ast.Node) { - oldNestingLevel := w.nestingLevelAcc - w.nestingLevelAcc++ - ast.Walk(w, b) - w.nestingLevelAcc = oldNestingLevel -} - -func (r *MaxControlNestingRule) configure(arguments lint.Arguments) { - r.Lock() - defer r.Unlock() - if !(r.max < 1) { - return // max already set - } - - if len(arguments) < 1 { - r.max = defaultMaxControlNesting - return - } - - checkNumberOfArguments(1, arguments, r.Name()) - - max, ok := arguments[0].(int64) // Alt. non panicking version - if !ok { - panic(`invalid value passed as argument number to the "max-control-nesting" rule`) - } - r.max = max -} diff --git a/vendor/github.com/mgechev/revive/rule/max-public-structs.go b/vendor/github.com/mgechev/revive/rule/max-public-structs.go deleted file mode 100644 index 25be3e676..000000000 --- a/vendor/github.com/mgechev/revive/rule/max-public-structs.go +++ /dev/null @@ -1,88 +0,0 @@ -package rule - -import ( - "go/ast" - "strings" - "sync" - - "github.com/mgechev/revive/lint" -) - -// MaxPublicStructsRule lints given else constructs. -type MaxPublicStructsRule struct { - max int64 - sync.Mutex -} - -const defaultMaxPublicStructs = 5 - -func (r *MaxPublicStructsRule) configure(arguments lint.Arguments) { - r.Lock() - defer r.Unlock() - if r.max < 1 { - if len(arguments) < 1 { - r.max = defaultMaxPublicStructs - return - } - - checkNumberOfArguments(1, arguments, r.Name()) - - max, ok := arguments[0].(int64) // Alt. non panicking version - if !ok { - panic(`invalid value passed as argument number to the "max-public-structs" rule`) - } - r.max = max - } -} - -// Apply applies the rule to given file. -func (r *MaxPublicStructsRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure { - r.configure(arguments) - - var failures []lint.Failure - - fileAst := file.AST - - walker := &lintMaxPublicStructs{ - fileAst: fileAst, - onFailure: func(failure lint.Failure) { - failures = append(failures, failure) - }, - } - - ast.Walk(walker, fileAst) - - if walker.current > r.max { - walker.onFailure(lint.Failure{ - Failure: "you have exceeded the maximum number of public struct declarations", - Confidence: 1, - Node: fileAst, - Category: "style", - }) - } - - return failures -} - -// Name returns the rule name. -func (*MaxPublicStructsRule) Name() string { - return "max-public-structs" -} - -type lintMaxPublicStructs struct { - current int64 - fileAst *ast.File - onFailure func(lint.Failure) -} - -func (w *lintMaxPublicStructs) Visit(n ast.Node) ast.Visitor { - switch v := n.(type) { - case *ast.TypeSpec: - name := v.Name.Name - first := string(name[0]) - if strings.ToUpper(first) == first { - w.current++ - } - } - return w -} diff --git a/vendor/github.com/mgechev/revive/rule/modifies-param.go b/vendor/github.com/mgechev/revive/rule/modifies-param.go deleted file mode 100644 index a68ae2501..000000000 --- a/vendor/github.com/mgechev/revive/rule/modifies-param.go +++ /dev/null @@ -1,80 +0,0 @@ -package rule - -import ( - "fmt" - "go/ast" - - "github.com/mgechev/revive/lint" -) - -// ModifiesParamRule lints given else constructs. -type ModifiesParamRule struct{} - -// Apply applies the rule to given file. -func (*ModifiesParamRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { - var failures []lint.Failure - - onFailure := func(failure lint.Failure) { - failures = append(failures, failure) - } - - w := lintModifiesParamRule{onFailure: onFailure} - ast.Walk(w, file.AST) - return failures -} - -// Name returns the rule name. -func (*ModifiesParamRule) Name() string { - return "modifies-parameter" -} - -type lintModifiesParamRule struct { - params map[string]bool - onFailure func(lint.Failure) -} - -func retrieveParamNames(pl []*ast.Field) map[string]bool { - result := make(map[string]bool, len(pl)) - for _, p := range pl { - for _, n := range p.Names { - if n.Name == "_" { - continue - } - - result[n.Name] = true - } - } - return result -} - -func (w lintModifiesParamRule) Visit(node ast.Node) ast.Visitor { - switch v := node.(type) { - case *ast.FuncDecl: - w.params = retrieveParamNames(v.Type.Params.List) - case *ast.IncDecStmt: - if id, ok := v.X.(*ast.Ident); ok { - checkParam(id, &w) - } - case *ast.AssignStmt: - lhs := v.Lhs - for _, e := range lhs { - id, ok := e.(*ast.Ident) - if ok { - checkParam(id, &w) - } - } - } - - return w -} - -func checkParam(id *ast.Ident, w *lintModifiesParamRule) { - if w.params[id.Name] { - w.onFailure(lint.Failure{ - Confidence: 0.5, // confidence is low because of shadow variables - Node: id, - Category: "bad practice", - Failure: fmt.Sprintf("parameter '%s' seems to be modified", id), - }) - } -} diff --git a/vendor/github.com/mgechev/revive/rule/modifies-value-receiver.go b/vendor/github.com/mgechev/revive/rule/modifies-value-receiver.go deleted file mode 100644 index e9e64b9a6..000000000 --- a/vendor/github.com/mgechev/revive/rule/modifies-value-receiver.go +++ /dev/null @@ -1,129 +0,0 @@ -package rule - -import ( - "go/ast" - "strings" - - "github.com/mgechev/revive/lint" -) - -// ModifiesValRecRule lints assignments to value method-receivers. -type ModifiesValRecRule struct{} - -// Apply applies the rule to given file. -func (*ModifiesValRecRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { - var failures []lint.Failure - - onFailure := func(failure lint.Failure) { - failures = append(failures, failure) - } - - w := lintModifiesValRecRule{file: file, onFailure: onFailure} - file.Pkg.TypeCheck() - ast.Walk(w, file.AST) - - return failures -} - -// Name returns the rule name. -func (*ModifiesValRecRule) Name() string { - return "modifies-value-receiver" -} - -type lintModifiesValRecRule struct { - file *lint.File - onFailure func(lint.Failure) -} - -func (w lintModifiesValRecRule) Visit(node ast.Node) ast.Visitor { - switch n := node.(type) { - case *ast.FuncDecl: - if n.Recv == nil { - return nil // skip, not a method - } - - receiver := n.Recv.List[0] - if _, ok := receiver.Type.(*ast.StarExpr); ok { - return nil // skip, method with pointer receiver - } - - if w.skipType(receiver.Type) { - return nil // skip, receiver is a map or array - } - - if len(receiver.Names) < 1 { - return nil // skip, anonymous receiver - } - - receiverName := receiver.Names[0].Name - if receiverName == "_" { - return nil // skip, anonymous receiver - } - - fselect := func(n ast.Node) bool { - // look for assignments with the receiver in the right hand - asgmt, ok := n.(*ast.AssignStmt) - if !ok { - return false - } - - for _, exp := range asgmt.Lhs { - switch e := exp.(type) { - case *ast.IndexExpr: // receiver...[] = ... - continue - case *ast.StarExpr: // *receiver = ... - continue - case *ast.SelectorExpr: // receiver.field = ... - name := w.getNameFromExpr(e.X) - if name == "" || name != receiverName { - continue - } - case *ast.Ident: // receiver := ... - if e.Name != receiverName { - continue - } - default: - continue - } - - return true - } - - return false - } - - assignmentsToReceiver := pick(n.Body, fselect) - - for _, assignment := range assignmentsToReceiver { - w.onFailure(lint.Failure{ - Node: assignment, - Confidence: 1, - Failure: "suspicious assignment to a by-value method receiver", - }) - } - } - - return w -} - -func (w lintModifiesValRecRule) skipType(t ast.Expr) bool { - rt := w.file.Pkg.TypeOf(t) - if rt == nil { - return false - } - - rt = rt.Underlying() - rtName := rt.String() - - // skip when receiver is a map or array - return strings.HasPrefix(rtName, "[]") || strings.HasPrefix(rtName, "map[") -} - -func (lintModifiesValRecRule) getNameFromExpr(ie ast.Expr) string { - ident, ok := ie.(*ast.Ident) - if !ok { - return "" - } - - return ident.Name -} diff --git a/vendor/github.com/mgechev/revive/rule/nested-structs.go b/vendor/github.com/mgechev/revive/rule/nested-structs.go deleted file mode 100644 index 147bd482b..000000000 --- a/vendor/github.com/mgechev/revive/rule/nested-structs.go +++ /dev/null @@ -1,75 +0,0 @@ -package rule - -import ( - "go/ast" - - "github.com/mgechev/revive/lint" -) - -// NestedStructs lints nested structs. -type NestedStructs struct{} - -// Apply applies the rule to given file. -func (*NestedStructs) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { - var failures []lint.Failure - - walker := &lintNestedStructs{ - onFailure: func(failure lint.Failure) { - failures = append(failures, failure) - }, - } - - ast.Walk(walker, file.AST) - - return failures -} - -// Name returns the rule name. -func (*NestedStructs) Name() string { - return "nested-structs" -} - -type lintNestedStructs struct { - onFailure func(lint.Failure) -} - -func (l *lintNestedStructs) Visit(n ast.Node) ast.Visitor { - if v, ok := n.(*ast.StructType); ok { - ls := &lintStruct{l.onFailure} - ast.Walk(ls, v.Fields) - } - - return l -} - -type lintStruct struct { - onFailure func(lint.Failure) -} - -func (l *lintStruct) Visit(n ast.Node) ast.Visitor { - switch s := n.(type) { - case *ast.StructType: - l.fail(s) - return nil - case *ast.ArrayType: - if _, ok := s.Elt.(*ast.StructType); ok { - l.fail(s) - } - return nil - case *ast.ChanType: - return nil - case *ast.MapType: - return nil - default: - return l - } -} - -func (l *lintStruct) fail(n ast.Node) { - l.onFailure(lint.Failure{ - Failure: "no nested structs are allowed", - Category: "style", - Node: n, - Confidence: 1, - }) -} diff --git a/vendor/github.com/mgechev/revive/rule/optimize-operands-order.go b/vendor/github.com/mgechev/revive/rule/optimize-operands-order.go deleted file mode 100644 index 841bde56c..000000000 --- a/vendor/github.com/mgechev/revive/rule/optimize-operands-order.go +++ /dev/null @@ -1,77 +0,0 @@ -package rule - -import ( - "fmt" - "go/ast" - "go/token" - - "github.com/mgechev/revive/lint" -) - -// OptimizeOperandsOrderRule lints given else constructs. -type OptimizeOperandsOrderRule struct{} - -// Apply applies the rule to given file. -func (*OptimizeOperandsOrderRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { - var failures []lint.Failure - - onFailure := func(failure lint.Failure) { - failures = append(failures, failure) - } - w := lintOptimizeOperandsOrderlExpr{ - onFailure: onFailure, - } - ast.Walk(w, file.AST) - return failures -} - -// Name returns the rule name. -func (*OptimizeOperandsOrderRule) Name() string { - return "optimize-operands-order" -} - -type lintOptimizeOperandsOrderlExpr struct { - onFailure func(failure lint.Failure) -} - -// Visit checks boolean AND and OR expressions to determine -// if swapping their operands may result in an execution speedup. -func (w lintOptimizeOperandsOrderlExpr) Visit(node ast.Node) ast.Visitor { - binExpr, ok := node.(*ast.BinaryExpr) - if !ok { - return w - } - - switch binExpr.Op { - case token.LAND, token.LOR: - default: - return w - } - - isCaller := func(n ast.Node) bool { - _, ok := n.(*ast.CallExpr) - return ok - } - - // check if the left sub-expression contains a function call - nodes := pick(binExpr.X, isCaller) - if len(nodes) < 1 { - return w - } - - // check if the right sub-expression does not contain a function call - nodes = pick(binExpr.Y, isCaller) - if len(nodes) > 0 { - return w - } - - newExpr := ast.BinaryExpr{X: binExpr.Y, Y: binExpr.X, Op: binExpr.Op} - w.onFailure(lint.Failure{ - Failure: fmt.Sprintf("for better performance '%v' might be rewritten as '%v'", gofmt(binExpr), gofmt(&newExpr)), - Node: node, - Category: "optimization", - Confidence: 0.3, - }) - - return w -} diff --git a/vendor/github.com/mgechev/revive/rule/package-comments.go b/vendor/github.com/mgechev/revive/rule/package-comments.go deleted file mode 100644 index 02f246be0..000000000 --- a/vendor/github.com/mgechev/revive/rule/package-comments.go +++ /dev/null @@ -1,172 +0,0 @@ -package rule - -import ( - "fmt" - "go/ast" - "go/token" - "strings" - "sync" - - "github.com/mgechev/revive/lint" -) - -// PackageCommentsRule lints the package comments. It complains if -// there is no package comment, or if it is not of the right form. -// This has a notable false positive in that a package comment -// could rightfully appear in a different file of the same package, -// but that's not easy to fix since this linter is file-oriented. -type PackageCommentsRule struct { - checkPackageCommentCache sync.Map -} - -// Apply applies the rule to given file. -func (r *PackageCommentsRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { - var failures []lint.Failure - - if file.IsTest() { - return failures - } - - onFailure := func(failure lint.Failure) { - failures = append(failures, failure) - } - - fileAst := file.AST - w := &lintPackageComments{fileAst, file, onFailure, r} - ast.Walk(w, fileAst) - return failures -} - -// Name returns the rule name. -func (*PackageCommentsRule) Name() string { - return "package-comments" -} - -type lintPackageComments struct { - fileAst *ast.File - file *lint.File - onFailure func(lint.Failure) - rule *PackageCommentsRule -} - -func (l *lintPackageComments) checkPackageComment() []lint.Failure { - // deduplicate warnings in package - if _, exists := l.rule.checkPackageCommentCache.LoadOrStore(l.file.Pkg, struct{}{}); exists { - return nil - } - var docFile *ast.File // which name is doc.go - var packageFile *ast.File // which name is $package.go - var firstFile *ast.File - var firstFileName string - var fileSource string - for name, file := range l.file.Pkg.Files() { - if file.AST.Doc != nil { - return nil - } - if name == "doc.go" { - docFile = file.AST - fileSource = "doc.go" - } - if name == file.AST.Name.String()+".go" { - packageFile = file.AST - } - if firstFileName == "" || firstFileName > name { - firstFile = file.AST - firstFileName = name - } - } - // prefer warning on doc.go, $package.go over first file - if docFile == nil { - docFile = packageFile - fileSource = l.fileAst.Name.String() + ".go" - } - if docFile == nil { - docFile = firstFile - fileSource = firstFileName - } - - if docFile != nil { - pkgFile := l.file.Pkg.Files()[fileSource] - return []lint.Failure{{ - Category: "comments", - Position: lint.FailurePosition{ - Start: pkgFile.ToPosition(docFile.Pos()), - End: pkgFile.ToPosition(docFile.Name.End()), - }, - Confidence: 1, - Failure: "should have a package comment", - }} - } - return nil -} - -func (l *lintPackageComments) Visit(_ ast.Node) ast.Visitor { - if l.file.IsTest() { - return nil - } - - prefix := "Package " + l.fileAst.Name.Name + " " - - // Look for a detached package comment. - // First, scan for the last comment that occurs before the "package" keyword. - var lastCG *ast.CommentGroup - for _, cg := range l.fileAst.Comments { - if cg.Pos() > l.fileAst.Package { - // Gone past "package" keyword. - break - } - lastCG = cg - } - if lastCG != nil && strings.HasPrefix(lastCG.Text(), prefix) { - endPos := l.file.ToPosition(lastCG.End()) - pkgPos := l.file.ToPosition(l.fileAst.Package) - if endPos.Line+1 < pkgPos.Line { - // There isn't a great place to anchor this error; - // the start of the blank lines between the doc and the package statement - // is at least pointing at the location of the problem. - pos := token.Position{ - Filename: endPos.Filename, - // Offset not set; it is non-trivial, and doesn't appear to be needed. - Line: endPos.Line + 1, - Column: 1, - } - l.onFailure(lint.Failure{ - Category: "comments", - Position: lint.FailurePosition{ - Start: pos, - End: pos, - }, - Confidence: 0.9, - Failure: "package comment is detached; there should be no blank lines between it and the package statement", - }) - return nil - } - } - - if l.fileAst.Doc == nil { - for _, failure := range l.checkPackageComment() { - l.onFailure(failure) - } - return nil - } - s := l.fileAst.Doc.Text() - if ts := strings.TrimLeft(s, " \t"); ts != s { - l.onFailure(lint.Failure{ - Category: "comments", - Node: l.fileAst.Doc, - Confidence: 1, - Failure: "package comment should not have leading space", - }) - s = ts - } - // Only non-main packages need to keep to this form. - if !l.file.Pkg.IsMain() && !strings.HasPrefix(s, prefix) { - l.onFailure(lint.Failure{ - Category: "comments", - Node: l.fileAst.Doc, - Confidence: 1, - Failure: fmt.Sprintf(`package comment should be of the form "%s..."`, prefix), - }) - } - return nil -} diff --git a/vendor/github.com/mgechev/revive/rule/range-val-address.go b/vendor/github.com/mgechev/revive/rule/range-val-address.go deleted file mode 100644 index 51ad8e108..000000000 --- a/vendor/github.com/mgechev/revive/rule/range-val-address.go +++ /dev/null @@ -1,162 +0,0 @@ -package rule - -import ( - "fmt" - "go/ast" - "go/token" - "strings" - - "github.com/mgechev/revive/lint" -) - -// RangeValAddress lints -type RangeValAddress struct{} - -// Apply applies the rule to given file. -func (*RangeValAddress) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { - var failures []lint.Failure - - walker := rangeValAddress{ - file: file, - onFailure: func(failure lint.Failure) { - failures = append(failures, failure) - }, - } - - file.Pkg.TypeCheck() - ast.Walk(walker, file.AST) - - return failures -} - -// Name returns the rule name. -func (*RangeValAddress) Name() string { - return "range-val-address" -} - -type rangeValAddress struct { - file *lint.File - onFailure func(lint.Failure) -} - -func (w rangeValAddress) Visit(node ast.Node) ast.Visitor { - n, ok := node.(*ast.RangeStmt) - if !ok { - return w - } - - value, ok := n.Value.(*ast.Ident) - if !ok { - return w - } - - valueIsStarExpr := false - if t := w.file.Pkg.TypeOf(value); t != nil { - valueIsStarExpr = strings.HasPrefix(t.String(), "*") - } - - ast.Walk(rangeBodyVisitor{ - valueIsStarExpr: valueIsStarExpr, - valueID: value.Obj, - onFailure: w.onFailure, - }, n.Body) - - return w -} - -type rangeBodyVisitor struct { - valueIsStarExpr bool - valueID *ast.Object - onFailure func(lint.Failure) -} - -func (bw rangeBodyVisitor) Visit(node ast.Node) ast.Visitor { - asgmt, ok := node.(*ast.AssignStmt) - if !ok { - return bw - } - - for _, exp := range asgmt.Lhs { - e, ok := exp.(*ast.IndexExpr) - if !ok { - continue - } - if bw.isAccessingRangeValueAddress(e.Index) { // e.g. a[&value]... - bw.onFailure(bw.newFailure(e.Index)) - } - } - - for _, exp := range asgmt.Rhs { - switch e := exp.(type) { - case *ast.UnaryExpr: // e.g. ...&value, ...&value.id - if bw.isAccessingRangeValueAddress(e) { - bw.onFailure(bw.newFailure(e)) - } - case *ast.CallExpr: - if fun, ok := e.Fun.(*ast.Ident); ok && fun.Name == "append" { // e.g. ...append(arr, &value) - for _, v := range e.Args { - if lit, ok := v.(*ast.CompositeLit); ok { // e.g. ...append(arr, v{id:&value}) - bw.checkCompositeLit(lit) - continue - } - if bw.isAccessingRangeValueAddress(v) { // e.g. ...append(arr, &value) - bw.onFailure(bw.newFailure(v)) - } - } - } - case *ast.CompositeLit: // e.g. ...v{id:&value} - bw.checkCompositeLit(e) - } - } - return bw -} - -func (bw rangeBodyVisitor) checkCompositeLit(comp *ast.CompositeLit) { - for _, exp := range comp.Elts { - e, ok := exp.(*ast.KeyValueExpr) - if !ok { - continue - } - if bw.isAccessingRangeValueAddress(e.Value) { - bw.onFailure(bw.newFailure(e.Value)) - } - } -} - -func (bw rangeBodyVisitor) isAccessingRangeValueAddress(exp ast.Expr) bool { - u, ok := exp.(*ast.UnaryExpr) - if !ok { - return false - } - - if u.Op != token.AND { - return false - } - - v, ok := u.X.(*ast.Ident) - if !ok { - var s *ast.SelectorExpr - s, ok = u.X.(*ast.SelectorExpr) - if !ok { - return false - } - v, ok = s.X.(*ast.Ident) - if !ok { - return false - } - - if bw.valueIsStarExpr { // check type of value - return false - } - } - - return ok && v.Obj == bw.valueID -} - -func (bw rangeBodyVisitor) newFailure(node ast.Node) lint.Failure { - return lint.Failure{ - Node: node, - Confidence: 1, - Failure: fmt.Sprintf("suspicious assignment of '%s'. range-loop variables always have the same address", bw.valueID.Name), - } -} diff --git a/vendor/github.com/mgechev/revive/rule/range-val-in-closure.go b/vendor/github.com/mgechev/revive/rule/range-val-in-closure.go deleted file mode 100644 index 1e85d0d0d..000000000 --- a/vendor/github.com/mgechev/revive/rule/range-val-in-closure.go +++ /dev/null @@ -1,121 +0,0 @@ -package rule - -import ( - "fmt" - "go/ast" - - "github.com/mgechev/revive/lint" -) - -// RangeValInClosureRule lints given else constructs. -type RangeValInClosureRule struct{} - -// Apply applies the rule to given file. -func (*RangeValInClosureRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { - var failures []lint.Failure - - walker := rangeValInClosure{ - onFailure: func(failure lint.Failure) { - failures = append(failures, failure) - }, - } - - ast.Walk(walker, file.AST) - - return failures -} - -// Name returns the rule name. -func (*RangeValInClosureRule) Name() string { - return "range-val-in-closure" -} - -type rangeValInClosure struct { - onFailure func(lint.Failure) -} - -func (w rangeValInClosure) Visit(node ast.Node) ast.Visitor { - // Find the variables updated by the loop statement. - var vars []*ast.Ident - addVar := func(expr ast.Expr) { - if id, ok := expr.(*ast.Ident); ok { - vars = append(vars, id) - } - } - var body *ast.BlockStmt - switch n := node.(type) { - case *ast.RangeStmt: - body = n.Body - addVar(n.Key) - addVar(n.Value) - case *ast.ForStmt: - body = n.Body - switch post := n.Post.(type) { - case *ast.AssignStmt: - // e.g. for p = head; p != nil; p = p.next - for _, lhs := range post.Lhs { - addVar(lhs) - } - case *ast.IncDecStmt: - // e.g. for i := 0; i < n; i++ - addVar(post.X) - } - } - if vars == nil { - return w - } - - // Inspect a go or defer statement - // if it's the last one in the loop body. - // (We give up if there are following statements, - // because it's hard to prove go isn't followed by wait, - // or defer by return.) - if len(body.List) == 0 { - return w - } - var last *ast.CallExpr - switch s := body.List[len(body.List)-1].(type) { - case *ast.GoStmt: - last = s.Call - case *ast.DeferStmt: - last = s.Call - default: - return w - } - lit, ok := last.Fun.(*ast.FuncLit) - if !ok { - return w - } - - if lit.Type == nil { - // Not referring to a variable (e.g. struct field name) - return w - } - - var inspector func(n ast.Node) bool - inspector = func(n ast.Node) bool { - kv, ok := n.(*ast.KeyValueExpr) - if ok { - // do not check identifiers acting as key in key-value expressions (see issue #637) - ast.Inspect(kv.Value, inspector) - return false - } - id, ok := n.(*ast.Ident) - if !ok || id.Obj == nil { - return true - } - - for _, v := range vars { - if v.Obj == id.Obj { - w.onFailure(lint.Failure{ - Confidence: 1, - Failure: fmt.Sprintf("loop variable %v captured by func literal", id.Name), - Node: n, - }) - } - } - return true - } - ast.Inspect(lit.Body, inspector) - return w -} diff --git a/vendor/github.com/mgechev/revive/rule/range.go b/vendor/github.com/mgechev/revive/rule/range.go deleted file mode 100644 index 9d483a673..000000000 --- a/vendor/github.com/mgechev/revive/rule/range.go +++ /dev/null @@ -1,82 +0,0 @@ -package rule - -import ( - "fmt" - "go/ast" - "strings" - - "github.com/mgechev/revive/lint" -) - -// RangeRule lints given else constructs. -type RangeRule struct{} - -// Apply applies the rule to given file. -func (*RangeRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { - var failures []lint.Failure - - onFailure := func(failure lint.Failure) { - failures = append(failures, failure) - } - - w := &lintRanges{file, onFailure} - ast.Walk(w, file.AST) - return failures -} - -// Name returns the rule name. -func (*RangeRule) Name() string { - return "range" -} - -type lintRanges struct { - file *lint.File - onFailure func(lint.Failure) -} - -func (w *lintRanges) Visit(node ast.Node) ast.Visitor { - rs, ok := node.(*ast.RangeStmt) - if !ok { - return w - } - if rs.Value == nil { - // for x = range m { ... } - return w // single var form - } - if !isIdent(rs.Value, "_") { - // for ?, y = range m { ... } - return w - } - - newRS := *rs // shallow copy - newRS.Value = nil - - w.onFailure(lint.Failure{ - Failure: fmt.Sprintf("should omit 2nd value from range; this loop is equivalent to `for %s %s range ...`", w.file.Render(rs.Key), rs.Tok), - Confidence: 1, - Node: rs.Value, - ReplacementLine: firstLineOf(w.file, &newRS, rs), - }) - - return w -} - -func firstLineOf(f *lint.File, node, match ast.Node) string { - line := f.Render(node) - if i := strings.Index(line, "\n"); i >= 0 { - line = line[:i] - } - return indentOf(f, match) + line -} - -func indentOf(f *lint.File, node ast.Node) string { - line := srcLine(f.Content(), f.ToPosition(node.Pos())) - for i, r := range line { - switch r { - case ' ', '\t': - default: - return line[:i] - } - } - return line // unusual or empty line -} diff --git a/vendor/github.com/mgechev/revive/rule/receiver-naming.go b/vendor/github.com/mgechev/revive/rule/receiver-naming.go deleted file mode 100644 index d79bb9fe8..000000000 --- a/vendor/github.com/mgechev/revive/rule/receiver-naming.go +++ /dev/null @@ -1,81 +0,0 @@ -package rule - -import ( - "fmt" - "go/ast" - - "github.com/mgechev/revive/internal/typeparams" - "github.com/mgechev/revive/lint" -) - -// ReceiverNamingRule lints given else constructs. -type ReceiverNamingRule struct{} - -// Apply applies the rule to given file. -func (*ReceiverNamingRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { - var failures []lint.Failure - - fileAst := file.AST - walker := lintReceiverName{ - onFailure: func(failure lint.Failure) { - failures = append(failures, failure) - }, - typeReceiver: map[string]string{}, - } - - ast.Walk(walker, fileAst) - - return failures -} - -// Name returns the rule name. -func (*ReceiverNamingRule) Name() string { - return "receiver-naming" -} - -type lintReceiverName struct { - onFailure func(lint.Failure) - typeReceiver map[string]string -} - -func (w lintReceiverName) Visit(n ast.Node) ast.Visitor { - fn, ok := n.(*ast.FuncDecl) - if !ok || fn.Recv == nil || len(fn.Recv.List) == 0 { - return w - } - names := fn.Recv.List[0].Names - if len(names) < 1 { - return w - } - name := names[0].Name - if name == "_" { - w.onFailure(lint.Failure{ - Node: n, - Confidence: 1, - Category: "naming", - Failure: "receiver name should not be an underscore, omit the name if it is unused", - }) - return w - } - if name == "this" || name == "self" { - w.onFailure(lint.Failure{ - Node: n, - Confidence: 1, - Category: "naming", - Failure: `receiver name should be a reflection of its identity; don't use generic names such as "this" or "self"`, - }) - return w - } - recv := typeparams.ReceiverType(fn) - if prev, ok := w.typeReceiver[recv]; ok && prev != name { - w.onFailure(lint.Failure{ - Node: n, - Confidence: 1, - Category: "naming", - Failure: fmt.Sprintf("receiver name %s should be consistent with previous receiver name %s for %s", name, prev, recv), - }) - return w - } - w.typeReceiver[recv] = name - return w -} diff --git a/vendor/github.com/mgechev/revive/rule/redefines-builtin-id.go b/vendor/github.com/mgechev/revive/rule/redefines-builtin-id.go deleted file mode 100644 index b3ff08456..000000000 --- a/vendor/github.com/mgechev/revive/rule/redefines-builtin-id.go +++ /dev/null @@ -1,179 +0,0 @@ -package rule - -import ( - "fmt" - "go/ast" - "go/token" - - "github.com/mgechev/revive/lint" -) - -var builtInConstAndVars = map[string]bool{ - "true": true, - "false": true, - "iota": true, - "nil": true, -} - -var builtFunctions = map[string]bool{ - "append": true, - "cap": true, - "close": true, - "complex": true, - "copy": true, - "delete": true, - "imag": true, - "len": true, - "make": true, - "new": true, - "panic": true, - "print": true, - "println": true, - "real": true, - "recover": true, -} - -var builtInTypes = map[string]bool{ - "bool": true, - "byte": true, - "complex128": true, - "complex64": true, - "error": true, - "float32": true, - "float64": true, - "int": true, - "int16": true, - "int32": true, - "int64": true, - "int8": true, - "rune": true, - "string": true, - "uint": true, - "uint16": true, - "uint32": true, - "uint64": true, - "uint8": true, - "uintptr": true, - "any": true, -} - -// RedefinesBuiltinIDRule warns when a builtin identifier is shadowed. -type RedefinesBuiltinIDRule struct{} - -// Apply applies the rule to given file. -func (*RedefinesBuiltinIDRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { - var failures []lint.Failure - - onFailure := func(failure lint.Failure) { - failures = append(failures, failure) - } - - astFile := file.AST - w := &lintRedefinesBuiltinID{onFailure} - ast.Walk(w, astFile) - - return failures -} - -// Name returns the rule name. -func (*RedefinesBuiltinIDRule) Name() string { - return "redefines-builtin-id" -} - -type lintRedefinesBuiltinID struct { - onFailure func(lint.Failure) -} - -func (w *lintRedefinesBuiltinID) Visit(node ast.Node) ast.Visitor { - switch n := node.(type) { - case *ast.GenDecl: - switch n.Tok { - case token.TYPE: - if len(n.Specs) < 1 { - return nil - } - typeSpec, ok := n.Specs[0].(*ast.TypeSpec) - if !ok { - return nil - } - id := typeSpec.Name.Name - if ok, bt := w.isBuiltIn(id); ok { - w.addFailure(n, fmt.Sprintf("redefinition of the built-in %s %s", bt, id)) - } - case token.VAR, token.CONST: - for _, vs := range n.Specs { - valSpec, ok := vs.(*ast.ValueSpec) - if !ok { - continue - } - for _, name := range valSpec.Names { - if ok, bt := w.isBuiltIn(name.Name); ok { - w.addFailure(n, fmt.Sprintf("redefinition of the built-in %s %s", bt, name)) - } - } - } - default: - return nil // skip if not type/var/const declaration - } - - case *ast.FuncDecl: - if n.Recv != nil { - return w // skip methods - } - - id := n.Name.Name - if ok, bt := w.isBuiltIn(id); ok { - w.addFailure(n, fmt.Sprintf("redefinition of the built-in %s %s", bt, id)) - } - case *ast.AssignStmt: - for _, e := range n.Lhs { - id, ok := e.(*ast.Ident) - if !ok { - continue - } - - if ok, bt := w.isBuiltIn(id.Name); ok { - var msg string - switch bt { - case "constant or variable": - if n.Tok == token.DEFINE { - msg = fmt.Sprintf("assignment creates a shadow of built-in identifier %s", id.Name) - } else { - msg = fmt.Sprintf("assignment modifies built-in identifier %s", id.Name) - } - default: - msg = fmt.Sprintf("redefinition of the built-in %s %s", bt, id) - } - - w.addFailure(n, msg) - } - } - } - - return w -} - -func (w lintRedefinesBuiltinID) addFailure(node ast.Node, msg string) { - w.onFailure(lint.Failure{ - Confidence: 1, - Node: node, - Category: "logic", - Failure: msg, - }) -} - -func (lintRedefinesBuiltinID) isBuiltIn(id string) (r bool, builtInKind string) { - if builtFunctions[id] { - return true, "function" - } - - if builtInConstAndVars[id] { - return true, "constant or variable" - } - - if builtInTypes[id] { - return true, "type" - } - - return false, "" -} diff --git a/vendor/github.com/mgechev/revive/rule/redundant-import-alias.go b/vendor/github.com/mgechev/revive/rule/redundant-import-alias.go deleted file mode 100644 index fa5281f24..000000000 --- a/vendor/github.com/mgechev/revive/rule/redundant-import-alias.go +++ /dev/null @@ -1,52 +0,0 @@ -package rule - -import ( - "fmt" - "go/ast" - "strings" - - "github.com/mgechev/revive/lint" -) - -// RedundantImportAlias lints given else constructs. -type RedundantImportAlias struct{} - -// Apply applies the rule to given file. -func (*RedundantImportAlias) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { - var failures []lint.Failure - - for _, imp := range file.AST.Imports { - if imp.Name == nil { - continue - } - - if getImportPackageName(imp) == imp.Name.Name { - failures = append(failures, lint.Failure{ - Confidence: 1, - Failure: fmt.Sprintf("Import alias \"%s\" is redundant", imp.Name.Name), - Node: imp, - Category: "imports", - }) - } - } - - return failures -} - -// Name returns the rule name. -func (*RedundantImportAlias) Name() string { - return "redundant-import-alias" -} - -func getImportPackageName(imp *ast.ImportSpec) string { - const pathSep = "/" - const strDelim = `"` - - path := imp.Path.Value - i := strings.LastIndex(path, pathSep) - if i == -1 { - return strings.Trim(path, strDelim) - } - - return strings.Trim(path[i+1:], strDelim) -} diff --git a/vendor/github.com/mgechev/revive/rule/string-format.go b/vendor/github.com/mgechev/revive/rule/string-format.go deleted file mode 100644 index 70edf7387..000000000 --- a/vendor/github.com/mgechev/revive/rule/string-format.go +++ /dev/null @@ -1,311 +0,0 @@ -package rule - -import ( - "fmt" - "go/ast" - "go/token" - "regexp" - "strconv" - - "github.com/mgechev/revive/lint" -) - -// #region Revive API - -// StringFormatRule lints strings and/or comments according to a set of regular expressions given as Arguments -type StringFormatRule struct{} - -// Apply applies the rule to the given file. -func (*StringFormatRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure { - var failures []lint.Failure - - onFailure := func(failure lint.Failure) { - failures = append(failures, failure) - } - - w := lintStringFormatRule{onFailure: onFailure} - w.parseArguments(arguments) - ast.Walk(w, file.AST) - - return failures -} - -// Name returns the rule name. -func (*StringFormatRule) Name() string { - return "string-format" -} - -// ParseArgumentsTest is a public wrapper around w.parseArguments used for testing. Returns the error message provided to panic, or nil if no error was encountered -func (StringFormatRule) ParseArgumentsTest(arguments lint.Arguments) *string { - w := lintStringFormatRule{} - c := make(chan any) - // Parse the arguments in a goroutine, defer a recover() call, return the error encountered (or nil if there was no error) - go func() { - defer func() { - err := recover() - c <- err - }() - w.parseArguments(arguments) - }() - err := <-c - if err != nil { - e := fmt.Sprintf("%s", err) - return &e - } - return nil -} - -// #endregion - -// #region Internal structure - -type lintStringFormatRule struct { - onFailure func(lint.Failure) - rules []stringFormatSubrule -} - -type stringFormatSubrule struct { - parent *lintStringFormatRule - scope stringFormatSubruleScope - regexp *regexp.Regexp - negated bool - errorMessage string -} - -type stringFormatSubruleScope struct { - funcName string // Function name the rule is scoped to - argument int // (optional) Which argument in calls to the function is checked against the rule (the first argument is checked by default) - field string // (optional) If the argument to be checked is a struct, which member of the struct is checked against the rule (top level members only) -} - -// Regex inserted to match valid function/struct field identifiers -const identRegex = "[_A-Za-z][_A-Za-z0-9]*" - -var parseStringFormatScope = regexp.MustCompile( - fmt.Sprintf("^(%s(?:\\.%s)?)(?:\\[([0-9]+)\\](?:\\.(%s))?)?$", identRegex, identRegex, identRegex)) - -// #endregion - -// #region Argument parsing - -func (w *lintStringFormatRule) parseArguments(arguments lint.Arguments) { - for i, argument := range arguments { - scope, regex, negated, errorMessage := w.parseArgument(argument, i) - w.rules = append(w.rules, stringFormatSubrule{ - parent: w, - scope: scope, - regexp: regex, - negated: negated, - errorMessage: errorMessage, - }) - } -} - -func (w lintStringFormatRule) parseArgument(argument any, ruleNum int) (scope stringFormatSubruleScope, regex *regexp.Regexp, negated bool, errorMessage string) { - g, ok := argument.([]any) // Cast to generic slice first - if !ok { - w.configError("argument is not a slice", ruleNum, 0) - } - if len(g) < 2 { - w.configError("less than two slices found in argument, scope and regex are required", ruleNum, len(g)-1) - } - rule := make([]string, len(g)) - for i, obj := range g { - val, ok := obj.(string) - if !ok { - w.configError("unexpected value, string was expected", ruleNum, i) - } - rule[i] = val - } - - // Validate scope and regex length - if rule[0] == "" { - w.configError("empty scope provided", ruleNum, 0) - } else if len(rule[1]) < 2 { - w.configError("regex is too small (regexes should begin and end with '/')", ruleNum, 1) - } - - // Parse rule scope - scope = stringFormatSubruleScope{} - matches := parseStringFormatScope.FindStringSubmatch(rule[0]) - if matches == nil { - // The rule's scope didn't match the parsing regex at all, probably a configuration error - w.parseError("unable to parse rule scope", ruleNum, 0) - } else if len(matches) != 4 { - // The rule's scope matched the parsing regex, but an unexpected number of submatches was returned, probably a bug - w.parseError(fmt.Sprintf("unexpected number of submatches when parsing scope: %d, expected 4", len(matches)), ruleNum, 0) - } - scope.funcName = matches[1] - if len(matches[2]) > 0 { - var err error - scope.argument, err = strconv.Atoi(matches[2]) - if err != nil { - w.parseError("unable to parse argument number in rule scope", ruleNum, 0) - } - } - if len(matches[3]) > 0 { - scope.field = matches[3] - } - - // Strip / characters from the beginning and end of rule[1] before compiling - negated = rule[1][0] == '!' - offset := 1 - if negated { - offset++ - } - regex, err := regexp.Compile(rule[1][offset : len(rule[1])-1]) - if err != nil { - w.parseError(fmt.Sprintf("unable to compile %s as regexp", rule[1]), ruleNum, 1) - } - - // Use custom error message if provided - if len(rule) == 3 { - errorMessage = rule[2] - } - return scope, regex, negated, errorMessage -} - -// Report an invalid config, this is specifically the user's fault -func (lintStringFormatRule) configError(msg string, ruleNum, option int) { - panic(fmt.Sprintf("invalid configuration for string-format: %s [argument %d, option %d]", msg, ruleNum, option)) -} - -// Report a general config parsing failure, this may be the user's fault, but it isn't known for certain -func (lintStringFormatRule) parseError(msg string, ruleNum, option int) { - panic(fmt.Sprintf("failed to parse configuration for string-format: %s [argument %d, option %d]", msg, ruleNum, option)) -} - -// #endregion - -// #region Node traversal - -func (w lintStringFormatRule) Visit(node ast.Node) ast.Visitor { - // First, check if node is a call expression - call, ok := node.(*ast.CallExpr) - if !ok { - return w - } - - // Get the name of the call expression to check against rule scope - callName, ok := w.getCallName(call) - if !ok { - return w - } - - for _, rule := range w.rules { - if rule.scope.funcName == callName { - rule.Apply(call) - } - } - - return w -} - -// Return the name of a call expression in the form of package.Func or Func -func (lintStringFormatRule) getCallName(call *ast.CallExpr) (callName string, ok bool) { - if ident, ok := call.Fun.(*ast.Ident); ok { - // Local function call - return ident.Name, true - } - - if selector, ok := call.Fun.(*ast.SelectorExpr); ok { - // Scoped function call - scope, ok := selector.X.(*ast.Ident) - if ok { - return scope.Name + "." + selector.Sel.Name, true - } - // Scoped function call inside structure - recv, ok := selector.X.(*ast.SelectorExpr) - if ok { - return recv.Sel.Name + "." + selector.Sel.Name, true - } - } - - return "", false -} - -// #endregion - -// #region Linting logic - -// Apply a single format rule to a call expression (should be done after verifying the that the call expression matches the rule's scope) -func (r *stringFormatSubrule) Apply(call *ast.CallExpr) { - if len(call.Args) <= r.scope.argument { - return - } - - arg := call.Args[r.scope.argument] - var lit *ast.BasicLit - if len(r.scope.field) > 0 { - // Try finding the scope's Field, treating arg as a composite literal - composite, ok := arg.(*ast.CompositeLit) - if !ok { - return - } - for _, el := range composite.Elts { - kv, ok := el.(*ast.KeyValueExpr) - if !ok { - continue - } - key, ok := kv.Key.(*ast.Ident) - if !ok || key.Name != r.scope.field { - continue - } - - // We're now dealing with the exact field in the rule's scope, so if anything fails, we can safely return instead of continuing the loop - lit, ok = kv.Value.(*ast.BasicLit) - if !ok || lit.Kind != token.STRING { - return - } - } - } else { - var ok bool - // Treat arg as a string literal - lit, ok = arg.(*ast.BasicLit) - if !ok || lit.Kind != token.STRING { - return - } - } - // Unquote the string literal before linting - unquoted := lit.Value[1 : len(lit.Value)-1] - r.lintMessage(unquoted, lit) -} - -func (r *stringFormatSubrule) lintMessage(s string, node ast.Node) { - if r.negated { - if !r.regexp.MatchString(s) { - return - } - // Fail if the string does match the user's regex - var failure string - if len(r.errorMessage) > 0 { - failure = r.errorMessage - } else { - failure = fmt.Sprintf("string literal matches user defined regex /%s/", r.regexp.String()) - } - r.parent.onFailure(lint.Failure{ - Confidence: 1, - Failure: failure, - Node: node, - }) - return - } - - // Fail if the string does NOT match the user's regex - if r.regexp.MatchString(s) { - return - } - var failure string - if len(r.errorMessage) > 0 { - failure = r.errorMessage - } else { - failure = fmt.Sprintf("string literal doesn't match user defined regex /%s/", r.regexp.String()) - } - r.parent.onFailure(lint.Failure{ - Confidence: 1, - Failure: failure, - Node: node, - }) -} - -// #endregion diff --git a/vendor/github.com/mgechev/revive/rule/string-of-int.go b/vendor/github.com/mgechev/revive/rule/string-of-int.go deleted file mode 100644 index 3bec1d6ac..000000000 --- a/vendor/github.com/mgechev/revive/rule/string-of-int.go +++ /dev/null @@ -1,95 +0,0 @@ -package rule - -import ( - "go/ast" - "go/types" - - "github.com/mgechev/revive/lint" -) - -// StringOfIntRule warns when logic expressions contains Boolean literals. -type StringOfIntRule struct{} - -// Apply applies the rule to given file. -func (*StringOfIntRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { - var failures []lint.Failure - - onFailure := func(failure lint.Failure) { - failures = append(failures, failure) - } - - astFile := file.AST - file.Pkg.TypeCheck() - - w := &lintStringInt{file, onFailure} - ast.Walk(w, astFile) - - return failures -} - -// Name returns the rule name. -func (*StringOfIntRule) Name() string { - return "string-of-int" -} - -type lintStringInt struct { - file *lint.File - onFailure func(lint.Failure) -} - -func (w *lintStringInt) Visit(node ast.Node) ast.Visitor { - ce, ok := node.(*ast.CallExpr) - if !ok { - return w - } - - if !w.isCallStringCast(ce.Fun) { - return w - } - - if !w.isIntExpression(ce.Args) { - return w - } - - w.onFailure(lint.Failure{ - Confidence: 1, - Node: ce, - Failure: "dubious conversion of an integer into a string, use strconv.Itoa", - }) - - return w -} - -func (w *lintStringInt) isCallStringCast(e ast.Expr) bool { - t := w.file.Pkg.TypeOf(e) - if t == nil { - return false - } - - tb, _ := t.Underlying().(*types.Basic) - - return tb != nil && tb.Kind() == types.String -} - -func (w *lintStringInt) isIntExpression(es []ast.Expr) bool { - if len(es) != 1 { - return false - } - - t := w.file.Pkg.TypeOf(es[0]) - if t == nil { - return false - } - - ut, _ := t.Underlying().(*types.Basic) - if ut == nil || ut.Info()&types.IsInteger == 0 { - return false - } - - switch ut.Kind() { - case types.Byte, types.Rune, types.UntypedRune: - return false - } - - return true -} diff --git a/vendor/github.com/mgechev/revive/rule/struct-tag.go b/vendor/github.com/mgechev/revive/rule/struct-tag.go deleted file mode 100644 index f6ee47a73..000000000 --- a/vendor/github.com/mgechev/revive/rule/struct-tag.go +++ /dev/null @@ -1,421 +0,0 @@ -package rule - -import ( - "fmt" - "go/ast" - "strconv" - "strings" - "sync" - - "github.com/fatih/structtag" - "github.com/mgechev/revive/lint" -) - -// StructTagRule lints struct tags. -type StructTagRule struct { - userDefined map[string][]string // map: key -> []option - sync.Mutex -} - -func (r *StructTagRule) configure(arguments lint.Arguments) { - r.Lock() - defer r.Unlock() - if r.userDefined == nil && len(arguments) > 0 { - checkNumberOfArguments(1, arguments, r.Name()) - r.userDefined = make(map[string][]string, len(arguments)) - for _, arg := range arguments { - item, ok := arg.(string) - if !ok { - panic(fmt.Sprintf("Invalid argument to the %s rule. Expecting a string, got %v (of type %T)", r.Name(), arg, arg)) - } - parts := strings.Split(item, ",") - if len(parts) < 2 { - panic(fmt.Sprintf("Invalid argument to the %s rule. Expecting a string of the form key[,option]+, got %s", r.Name(), item)) - } - key := strings.TrimSpace(parts[0]) - for i := 1; i < len(parts); i++ { - option := strings.TrimSpace(parts[i]) - r.userDefined[key] = append(r.userDefined[key], option) - } - } - } -} - -// Apply applies the rule to given file. -func (r *StructTagRule) Apply(file *lint.File, args lint.Arguments) []lint.Failure { - r.configure(args) - - var failures []lint.Failure - onFailure := func(failure lint.Failure) { - failures = append(failures, failure) - } - - w := lintStructTagRule{ - onFailure: onFailure, - userDefined: r.userDefined, - } - - ast.Walk(w, file.AST) - - return failures -} - -// Name returns the rule name. -func (*StructTagRule) Name() string { - return "struct-tag" -} - -type lintStructTagRule struct { - onFailure func(lint.Failure) - userDefined map[string][]string // map: key -> []option - usedTagNbr map[int]bool // list of used tag numbers - usedTagName map[string]bool // list of used tag keys -} - -func (w lintStructTagRule) Visit(node ast.Node) ast.Visitor { - switch n := node.(type) { - case *ast.StructType: - if n.Fields == nil || n.Fields.NumFields() < 1 { - return nil // skip empty structs - } - w.usedTagNbr = map[int]bool{} // init - w.usedTagName = map[string]bool{} // init - for _, f := range n.Fields.List { - if f.Tag != nil { - w.checkTaggedField(f) - } - } - } - - return w -} - -const keyASN1 = "asn1" -const keyBSON = "bson" -const keyDefault = "default" -const keyJSON = "json" -const keyProtobuf = "protobuf" -const keyRequired = "required" -const keyXML = "xml" -const keyYAML = "yaml" - -func (w lintStructTagRule) checkTagNameIfNeed(tag *structtag.Tag) (string, bool) { - isUnnamedTag := tag.Name == "" || tag.Name == "-" - if isUnnamedTag { - return "", true - } - - needsToCheckTagName := tag.Key == keyBSON || - tag.Key == keyJSON || - tag.Key == keyXML || - tag.Key == keyYAML || - tag.Key == keyProtobuf - - if !needsToCheckTagName { - return "", true - } - - tagName := w.getTagName(tag) - if tagName == "" { - return "", true // No tag name found - } - - // We concat the key and name as the mapping key here - // to allow the same tag name in different tag type. - key := tag.Key + ":" + tagName - if _, ok := w.usedTagName[key]; ok { - return fmt.Sprintf("duplicate tag name: '%s'", tagName), false - } - - w.usedTagName[key] = true - - return "", true -} - -func (lintStructTagRule) getTagName(tag *structtag.Tag) string { - switch tag.Key { - case keyProtobuf: - for _, option := range tag.Options { - if strings.HasPrefix(option, "name=") { - return strings.TrimPrefix(option, "name=") - } - } - return "" // protobuf tag lacks 'name' option - default: - return tag.Name - } -} - -// checkTaggedField checks the tag of the given field. -// precondition: the field has a tag -func (w lintStructTagRule) checkTaggedField(f *ast.Field) { - if len(f.Names) > 0 && !f.Names[0].IsExported() { - w.addFailure(f, "tag on not-exported field "+f.Names[0].Name) - } - - tags, err := structtag.Parse(strings.Trim(f.Tag.Value, "`")) - if err != nil || tags == nil { - w.addFailure(f.Tag, "malformed tag") - return - } - - for _, tag := range tags.Tags() { - if msg, ok := w.checkTagNameIfNeed(tag); !ok { - w.addFailure(f.Tag, msg) - } - - switch key := tag.Key; key { - case keyASN1: - msg, ok := w.checkASN1Tag(f.Type, tag) - if !ok { - w.addFailure(f.Tag, msg) - } - case keyBSON: - msg, ok := w.checkBSONTag(tag.Options) - if !ok { - w.addFailure(f.Tag, msg) - } - case keyDefault: - if !w.typeValueMatch(f.Type, tag.Name) { - w.addFailure(f.Tag, "field's type and default value's type mismatch") - } - case keyJSON: - msg, ok := w.checkJSONTag(tag.Name, tag.Options) - if !ok { - w.addFailure(f.Tag, msg) - } - case keyProtobuf: - msg, ok := w.checkProtobufTag(tag) - if !ok { - w.addFailure(f.Tag, msg) - } - case keyRequired: - if tag.Name != "true" && tag.Name != "false" { - w.addFailure(f.Tag, "required should be 'true' or 'false'") - } - case keyXML: - msg, ok := w.checkXMLTag(tag.Options) - if !ok { - w.addFailure(f.Tag, msg) - } - case keyYAML: - msg, ok := w.checkYAMLTag(tag.Options) - if !ok { - w.addFailure(f.Tag, msg) - } - default: - // unknown key - } - } -} - -func (w lintStructTagRule) checkASN1Tag(t ast.Expr, tag *structtag.Tag) (string, bool) { - checkList := append(tag.Options, tag.Name) - for _, opt := range checkList { - switch opt { - case "application", "explicit", "generalized", "ia5", "omitempty", "optional", "set", "utf8": - - default: - if strings.HasPrefix(opt, "tag:") { - parts := strings.Split(opt, ":") - tagNumber := parts[1] - number, err := strconv.Atoi(tagNumber) - if err != nil { - return fmt.Sprintf("ASN1 tag must be a number, got '%s'", tagNumber), false - } - if w.usedTagNbr[number] { - return fmt.Sprintf("duplicated tag number %v", number), false - } - w.usedTagNbr[number] = true - - continue - } - - if strings.HasPrefix(opt, "default:") { - parts := strings.Split(opt, ":") - if len(parts) < 2 { - return "malformed default for ASN1 tag", false - } - if !w.typeValueMatch(t, parts[1]) { - return "field's type and default value's type mismatch", false - } - - continue - } - - if w.isUserDefined(keyASN1, opt) { - continue - } - - return fmt.Sprintf("unknown option '%s' in ASN1 tag", opt), false - } - } - - return "", true -} - -func (w lintStructTagRule) checkBSONTag(options []string) (string, bool) { - for _, opt := range options { - switch opt { - case "inline", "minsize", "omitempty": - default: - if w.isUserDefined(keyBSON, opt) { - continue - } - return fmt.Sprintf("unknown option '%s' in BSON tag", opt), false - } - } - - return "", true -} - -func (w lintStructTagRule) checkJSONTag(name string, options []string) (string, bool) { - for _, opt := range options { - switch opt { - case "omitempty", "string": - case "": - // special case for JSON key "-" - if name != "-" { - return "option can not be empty in JSON tag", false - } - default: - if w.isUserDefined(keyJSON, opt) { - continue - } - return fmt.Sprintf("unknown option '%s' in JSON tag", opt), false - } - } - - return "", true -} - -func (w lintStructTagRule) checkXMLTag(options []string) (string, bool) { - for _, opt := range options { - switch opt { - case "any", "attr", "cdata", "chardata", "comment", "innerxml", "omitempty", "typeattr": - default: - if w.isUserDefined(keyXML, opt) { - continue - } - return fmt.Sprintf("unknown option '%s' in XML tag", opt), false - } - } - - return "", true -} - -func (w lintStructTagRule) checkYAMLTag(options []string) (string, bool) { - for _, opt := range options { - switch opt { - case "flow", "inline", "omitempty": - default: - if w.isUserDefined(keyYAML, opt) { - continue - } - return fmt.Sprintf("unknown option '%s' in YAML tag", opt), false - } - } - - return "", true -} - -func (lintStructTagRule) typeValueMatch(t ast.Expr, val string) bool { - tID, ok := t.(*ast.Ident) - if !ok { - return true - } - - typeMatches := true - switch tID.Name { - case "bool": - typeMatches = val == "true" || val == "false" - case "float64": - _, err := strconv.ParseFloat(val, 64) - typeMatches = err == nil - case "int": - _, err := strconv.ParseInt(val, 10, 64) - typeMatches = err == nil - case "string": - case "nil": - default: - // unchecked type - } - - return typeMatches -} - -func (w lintStructTagRule) checkProtobufTag(tag *structtag.Tag) (string, bool) { - // check name - switch tag.Name { - case "bytes", "fixed32", "fixed64", "group", "varint", "zigzag32", "zigzag64": - // do nothing - default: - return fmt.Sprintf("invalid protobuf tag name '%s'", tag.Name), false - } - - // check options - seenOptions := map[string]bool{} - for _, opt := range tag.Options { - if number, err := strconv.Atoi(opt); err == nil { - _, alreadySeen := w.usedTagNbr[number] - if alreadySeen { - return fmt.Sprintf("duplicated tag number %v", number), false - } - w.usedTagNbr[number] = true - continue // option is an integer - } - - switch { - case opt == "opt" || opt == "proto3" || opt == "rep" || opt == "req": - // do nothing - case strings.Contains(opt, "="): - o := strings.Split(opt, "=")[0] - _, alreadySeen := seenOptions[o] - if alreadySeen { - return fmt.Sprintf("protobuf tag has duplicated option '%s'", o), false - } - seenOptions[o] = true - continue - } - } - _, hasName := seenOptions["name"] - if !hasName { - return "protobuf tag lacks mandatory option 'name'", false - } - - for k := range seenOptions { - switch k { - case "name", "json": - // do nothing - default: - if w.isUserDefined(keyProtobuf, k) { - continue - } - return fmt.Sprintf("unknown option '%s' in protobuf tag", k), false - } - } - - return "", true -} - -func (w lintStructTagRule) addFailure(n ast.Node, msg string) { - w.onFailure(lint.Failure{ - Node: n, - Failure: msg, - Confidence: 1, - }) -} - -func (w lintStructTagRule) isUserDefined(key, opt string) bool { - if w.userDefined == nil { - return false - } - - options := w.userDefined[key] - for _, o := range options { - if opt == o { - return true - } - } - return false -} diff --git a/vendor/github.com/mgechev/revive/rule/superfluous-else.go b/vendor/github.com/mgechev/revive/rule/superfluous-else.go deleted file mode 100644 index 2aa1b6b2c..000000000 --- a/vendor/github.com/mgechev/revive/rule/superfluous-else.go +++ /dev/null @@ -1,46 +0,0 @@ -package rule - -import ( - "fmt" - "github.com/mgechev/revive/internal/ifelse" - "github.com/mgechev/revive/lint" -) - -// SuperfluousElseRule lints given else constructs. -type SuperfluousElseRule struct{} - -// Apply applies the rule to given file. -func (e *SuperfluousElseRule) Apply(file *lint.File, args lint.Arguments) []lint.Failure { - return ifelse.Apply(e, file.AST, ifelse.TargetElse, args) -} - -// Name returns the rule name. -func (*SuperfluousElseRule) Name() string { - return "superfluous-else" -} - -// CheckIfElse evaluates the rule against an ifelse.Chain. -func (*SuperfluousElseRule) CheckIfElse(chain ifelse.Chain, args ifelse.Args) (failMsg string) { - if !chain.If.Deviates() { - // this rule only applies if the if-block deviates control flow - return - } - - if chain.HasPriorNonDeviating { - // if we de-indent the "else" block then a previous branch - // might flow into it, affecting program behaviour - return - } - - if chain.If.Returns() { - // avoid overlapping with indent-error-flow - return - } - - if args.PreserveScope && !chain.AtBlockEnd && (chain.HasInitializer || chain.Else.HasDecls) { - // avoid increasing variable scope - return - } - - return fmt.Sprintf("if block ends with %v, so drop this else and outdent its block", chain.If.LongString()) -} diff --git a/vendor/github.com/mgechev/revive/rule/time-equal.go b/vendor/github.com/mgechev/revive/rule/time-equal.go deleted file mode 100644 index 3b85e18a8..000000000 --- a/vendor/github.com/mgechev/revive/rule/time-equal.go +++ /dev/null @@ -1,76 +0,0 @@ -package rule - -import ( - "fmt" - "go/ast" - "go/token" - - "github.com/mgechev/revive/lint" -) - -// TimeEqualRule shows where "==" and "!=" used for equality check time.Time -type TimeEqualRule struct{} - -// Apply applies the rule to given file. -func (*TimeEqualRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { - var failures []lint.Failure - - onFailure := func(failure lint.Failure) { - failures = append(failures, failure) - } - - w := &lintTimeEqual{file, onFailure} - if w.file.Pkg.TypeCheck() != nil { - return nil - } - - ast.Walk(w, file.AST) - return failures -} - -// Name returns the rule name. -func (*TimeEqualRule) Name() string { - return "time-equal" -} - -type lintTimeEqual struct { - file *lint.File - onFailure func(lint.Failure) -} - -func (l *lintTimeEqual) Visit(node ast.Node) ast.Visitor { - expr, ok := node.(*ast.BinaryExpr) - if !ok { - return l - } - - switch expr.Op { - case token.EQL, token.NEQ: - default: - return l - } - - xtyp := l.file.Pkg.TypeOf(expr.X) - ytyp := l.file.Pkg.TypeOf(expr.Y) - - if !isNamedType(xtyp, "time", "Time") || !isNamedType(ytyp, "time", "Time") { - return l - } - - var failure string - switch expr.Op { - case token.EQL: - failure = fmt.Sprintf("use %s.Equal(%s) instead of %q operator", gofmt(expr.X), gofmt(expr.Y), expr.Op) - case token.NEQ: - failure = fmt.Sprintf("use !%s.Equal(%s) instead of %q operator", gofmt(expr.X), gofmt(expr.Y), expr.Op) - } - - l.onFailure(lint.Failure{ - Category: "time", - Confidence: 1, - Node: node, - Failure: failure, - }) - - return l -} diff --git a/vendor/github.com/mgechev/revive/rule/time-naming.go b/vendor/github.com/mgechev/revive/rule/time-naming.go deleted file mode 100644 index cea452e61..000000000 --- a/vendor/github.com/mgechev/revive/rule/time-naming.go +++ /dev/null @@ -1,95 +0,0 @@ -package rule - -import ( - "fmt" - "go/ast" - "go/types" - "strings" - - "github.com/mgechev/revive/lint" -) - -// TimeNamingRule lints given else constructs. -type TimeNamingRule struct{} - -// Apply applies the rule to given file. -func (*TimeNamingRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { - var failures []lint.Failure - - onFailure := func(failure lint.Failure) { - failures = append(failures, failure) - } - - w := &lintTimeNames{file, onFailure} - - file.Pkg.TypeCheck() - ast.Walk(w, file.AST) - return failures -} - -// Name returns the rule name. -func (*TimeNamingRule) Name() string { - return "time-naming" -} - -type lintTimeNames struct { - file *lint.File - onFailure func(lint.Failure) -} - -func (w *lintTimeNames) Visit(node ast.Node) ast.Visitor { - v, ok := node.(*ast.ValueSpec) - if !ok { - return w - } - for _, name := range v.Names { - origTyp := w.file.Pkg.TypeOf(name) - // Look for time.Duration or *time.Duration; - // the latter is common when using flag.Duration. - typ := origTyp - if pt, ok := typ.(*types.Pointer); ok { - typ = pt.Elem() - } - if !isNamedType(typ, "time", "Duration") { - continue - } - suffix := "" - for _, suf := range timeSuffixes { - if strings.HasSuffix(name.Name, suf) { - suffix = suf - break - } - } - if suffix == "" { - continue - } - w.onFailure(lint.Failure{ - Category: "time", - Confidence: 0.9, - Node: v, - Failure: fmt.Sprintf("var %s is of type %v; don't use unit-specific suffix %q", name.Name, origTyp, suffix), - }) - } - return w -} - -// timeSuffixes is a list of name suffixes that imply a time unit. -// This is not an exhaustive list. -var timeSuffixes = []string{ - "Hour", "Hours", - "Min", "Mins", "Minutes", "Minute", - "Sec", "Secs", "Seconds", "Second", - "Msec", "Msecs", - "Milli", "Millis", "Milliseconds", "Millisecond", - "Usec", "Usecs", "Microseconds", "Microsecond", - "MS", "Ms", -} - -func isNamedType(typ types.Type, importPath, name string) bool { - n, ok := typ.(*types.Named) - if !ok { - return false - } - tn := n.Obj() - return tn != nil && tn.Pkg() != nil && tn.Pkg().Path() == importPath && tn.Name() == name -} diff --git a/vendor/github.com/mgechev/revive/rule/unchecked-type-assertion.go b/vendor/github.com/mgechev/revive/rule/unchecked-type-assertion.go deleted file mode 100644 index df27743cb..000000000 --- a/vendor/github.com/mgechev/revive/rule/unchecked-type-assertion.go +++ /dev/null @@ -1,194 +0,0 @@ -package rule - -import ( - "fmt" - "go/ast" - "sync" - - "github.com/mgechev/revive/lint" -) - -const ( - ruleUTAMessagePanic = "type assertion will panic if not matched" - ruleUTAMessageIgnored = "type assertion result ignored" -) - -// UncheckedTypeAssertionRule lints missing or ignored `ok`-value in danymic type casts. -type UncheckedTypeAssertionRule struct { - sync.Mutex - acceptIgnoredAssertionResult bool - configured bool -} - -func (u *UncheckedTypeAssertionRule) configure(arguments lint.Arguments) { - u.Lock() - defer u.Unlock() - - if len(arguments) == 0 || u.configured { - return - } - - u.configured = true - - args, ok := arguments[0].(map[string]any) - if !ok { - panic("Unable to get arguments. Expected object of key-value-pairs.") - } - - for k, v := range args { - switch k { - case "acceptIgnoredAssertionResult": - u.acceptIgnoredAssertionResult, ok = v.(bool) - if !ok { - panic(fmt.Sprintf("Unable to parse argument '%s'. Expected boolean.", k)) - } - default: - panic(fmt.Sprintf("Unknown argument: %s", k)) - } - } -} - -// Apply applies the rule to given file. -func (u *UncheckedTypeAssertionRule) Apply(file *lint.File, args lint.Arguments) []lint.Failure { - u.configure(args) - - var failures []lint.Failure - - walker := &lintUnchekedTypeAssertion{ - onFailure: func(failure lint.Failure) { - failures = append(failures, failure) - }, - acceptIgnoredTypeAssertionResult: u.acceptIgnoredAssertionResult, - } - - ast.Walk(walker, file.AST) - - return failures -} - -// Name returns the rule name. -func (*UncheckedTypeAssertionRule) Name() string { - return "unchecked-type-assertion" -} - -type lintUnchekedTypeAssertion struct { - onFailure func(lint.Failure) - acceptIgnoredTypeAssertionResult bool -} - -func isIgnored(e ast.Expr) bool { - ident, ok := e.(*ast.Ident) - if !ok { - return false - } - - return ident.Name == "_" -} - -func isTypeSwitch(e *ast.TypeAssertExpr) bool { - return e.Type == nil -} - -func (w *lintUnchekedTypeAssertion) requireNoTypeAssert(expr ast.Expr) { - e, ok := expr.(*ast.TypeAssertExpr) - if ok && !isTypeSwitch(e) { - w.addFailure(e, ruleUTAMessagePanic) - } -} - -func (w *lintUnchekedTypeAssertion) handleIfStmt(n *ast.IfStmt) { - ifCondition, ok := n.Cond.(*ast.BinaryExpr) - if ok { - w.requireNoTypeAssert(ifCondition.X) - w.requireNoTypeAssert(ifCondition.Y) - } -} - -func (w *lintUnchekedTypeAssertion) requireBinaryExpressionWithoutTypeAssertion(expr ast.Expr) { - binaryExpr, ok := expr.(*ast.BinaryExpr) - if ok { - w.requireNoTypeAssert(binaryExpr.X) - w.requireNoTypeAssert(binaryExpr.Y) - } -} - -func (w *lintUnchekedTypeAssertion) handleCaseClause(n *ast.CaseClause) { - for _, expr := range n.List { - w.requireNoTypeAssert(expr) - w.requireBinaryExpressionWithoutTypeAssertion(expr) - } -} - -func (w *lintUnchekedTypeAssertion) handleSwitch(n *ast.SwitchStmt) { - w.requireNoTypeAssert(n.Tag) - w.requireBinaryExpressionWithoutTypeAssertion(n.Tag) -} - -func (w *lintUnchekedTypeAssertion) handleAssignment(n *ast.AssignStmt) { - if len(n.Rhs) == 0 { - return - } - - e, ok := n.Rhs[0].(*ast.TypeAssertExpr) - if !ok || e == nil { - return - } - - if isTypeSwitch(e) { - return - } - - if len(n.Lhs) == 1 { - w.addFailure(e, ruleUTAMessagePanic) - } - - if !w.acceptIgnoredTypeAssertionResult && len(n.Lhs) == 2 && isIgnored(n.Lhs[1]) { - w.addFailure(e, ruleUTAMessageIgnored) - } -} - -// handles "return foo(.*bar)" - one of them is enough to fail as golang does not forward the type cast tuples in return statements -func (w *lintUnchekedTypeAssertion) handleReturn(n *ast.ReturnStmt) { - for _, r := range n.Results { - w.requireNoTypeAssert(r) - } -} - -func (w *lintUnchekedTypeAssertion) handleRange(n *ast.RangeStmt) { - w.requireNoTypeAssert(n.X) -} - -func (w *lintUnchekedTypeAssertion) handleChannelSend(n *ast.SendStmt) { - w.requireNoTypeAssert(n.Value) -} - -func (w *lintUnchekedTypeAssertion) Visit(node ast.Node) ast.Visitor { - switch n := node.(type) { - case *ast.RangeStmt: - w.handleRange(n) - case *ast.SwitchStmt: - w.handleSwitch(n) - case *ast.ReturnStmt: - w.handleReturn(n) - case *ast.AssignStmt: - w.handleAssignment(n) - case *ast.IfStmt: - w.handleIfStmt(n) - case *ast.CaseClause: - w.handleCaseClause(n) - case *ast.SendStmt: - w.handleChannelSend(n) - } - - return w -} - -func (w *lintUnchekedTypeAssertion) addFailure(n *ast.TypeAssertExpr, why string) { - s := fmt.Sprintf("type cast result is unchecked in %v - %s", gofmt(n), why) - w.onFailure(lint.Failure{ - Category: "bad practice", - Confidence: 1, - Node: n, - Failure: s, - }) -} diff --git a/vendor/github.com/mgechev/revive/rule/unconditional-recursion.go b/vendor/github.com/mgechev/revive/rule/unconditional-recursion.go deleted file mode 100644 index 9ac2648cd..000000000 --- a/vendor/github.com/mgechev/revive/rule/unconditional-recursion.go +++ /dev/null @@ -1,199 +0,0 @@ -package rule - -import ( - "go/ast" - - "github.com/mgechev/revive/lint" -) - -// UnconditionalRecursionRule lints given else constructs. -type UnconditionalRecursionRule struct{} - -// Apply applies the rule to given file. -func (*UnconditionalRecursionRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { - var failures []lint.Failure - - onFailure := func(failure lint.Failure) { - failures = append(failures, failure) - } - - w := lintUnconditionalRecursionRule{onFailure: onFailure} - ast.Walk(w, file.AST) - return failures -} - -// Name returns the rule name. -func (*UnconditionalRecursionRule) Name() string { - return "unconditional-recursion" -} - -type funcDesc struct { - receiverID *ast.Ident - id *ast.Ident -} - -func (fd *funcDesc) equal(other *funcDesc) bool { - receiversAreEqual := (fd.receiverID == nil && other.receiverID == nil) || fd.receiverID != nil && other.receiverID != nil && fd.receiverID.Name == other.receiverID.Name - idsAreEqual := (fd.id == nil && other.id == nil) || fd.id.Name == other.id.Name - - return receiversAreEqual && idsAreEqual -} - -type funcStatus struct { - funcDesc *funcDesc - seenConditionalExit bool -} - -type lintUnconditionalRecursionRule struct { - onFailure func(lint.Failure) - currentFunc *funcStatus - inGoStatement bool -} - -// Visit will traverse the file AST. -// The rule is based in the following algorithm: inside each function body we search for calls to the function itself. -// We do not search inside conditional control structures (if, for, switch, ...) because any recursive call inside them is conditioned -// We do search inside conditional control structures are statements that will take the control out of the function (return, exit, panic) -// If we find conditional control exits, it means the function is NOT unconditionally-recursive -// If we find a recursive call before finding any conditional exit, a failure is generated -// In resume: if we found a recursive call control-dependant from the entry point of the function then we raise a failure. -func (w lintUnconditionalRecursionRule) Visit(node ast.Node) ast.Visitor { - switch n := node.(type) { - case *ast.FuncDecl: - var rec *ast.Ident - switch { - case n.Recv == nil: - rec = nil - case n.Recv.NumFields() < 1 || len(n.Recv.List[0].Names) < 1: - rec = &ast.Ident{Name: "_"} - default: - rec = n.Recv.List[0].Names[0] - } - w.currentFunc = &funcStatus{&funcDesc{rec, n.Name}, false} - case *ast.CallExpr: - // check if call arguments has a recursive call - for _, arg := range n.Args { - ast.Walk(w, arg) - } - - var funcID *ast.Ident - var selector *ast.Ident - switch c := n.Fun.(type) { - case *ast.Ident: - selector = nil - funcID = c - case *ast.SelectorExpr: - var ok bool - selector, ok = c.X.(*ast.Ident) - if !ok { // a.b....Foo() - return nil - } - funcID = c.Sel - case *ast.FuncLit: - ast.Walk(w, c.Body) // analyze the body of the function literal - return nil - default: - return w - } - - if w.currentFunc != nil && // not in a func body - !w.currentFunc.seenConditionalExit && // there is a conditional exit in the function - w.currentFunc.funcDesc.equal(&funcDesc{selector, funcID}) { - w.onFailure(lint.Failure{ - Category: "logic", - Confidence: 0.8, - Node: n, - Failure: "unconditional recursive call", - }) - } - return nil - case *ast.IfStmt: - w.updateFuncStatus(n.Body) - w.updateFuncStatus(n.Else) - return nil - case *ast.SelectStmt: - w.updateFuncStatus(n.Body) - return nil - case *ast.RangeStmt: - w.updateFuncStatus(n.Body) - return nil - case *ast.TypeSwitchStmt: - w.updateFuncStatus(n.Body) - return nil - case *ast.SwitchStmt: - w.updateFuncStatus(n.Body) - return nil - case *ast.GoStmt: - w.inGoStatement = true - ast.Walk(w, n.Call) - w.inGoStatement = false - return nil - case *ast.ForStmt: - if n.Cond != nil { - return nil - } - // unconditional loop - return w - case *ast.FuncLit: - if w.inGoStatement { - return w - } - return nil // literal call (closure) is not necessarily an issue - } - - return w -} - -func (w *lintUnconditionalRecursionRule) updateFuncStatus(node ast.Node) { - if node == nil || w.currentFunc == nil || w.currentFunc.seenConditionalExit { - return - } - - w.currentFunc.seenConditionalExit = w.hasControlExit(node) -} - -var exitFunctions = map[string]map[string]bool{ - "os": {"Exit": true}, - "syscall": {"Exit": true}, - "log": { - "Fatal": true, - "Fatalf": true, - "Fatalln": true, - "Panic": true, - "Panicf": true, - "Panicln": true, - }, -} - -func (lintUnconditionalRecursionRule) hasControlExit(node ast.Node) bool { - // isExit returns true if the given node makes control exit the function - isExit := func(node ast.Node) bool { - switch n := node.(type) { - case *ast.ReturnStmt: - return true - case *ast.CallExpr: - if isIdent(n.Fun, "panic") { - return true - } - se, ok := n.Fun.(*ast.SelectorExpr) - if !ok { - return false - } - - id, ok := se.X.(*ast.Ident) - if !ok { - return false - } - - fn := se.Sel.Name - pkg := id.Name - if exitFunctions[pkg] != nil && exitFunctions[pkg][fn] { // it's a call to an exit function - return true - } - } - - return false - } - - return len(pick(node, isExit)) != 0 -} diff --git a/vendor/github.com/mgechev/revive/rule/unexported-naming.go b/vendor/github.com/mgechev/revive/rule/unexported-naming.go deleted file mode 100644 index 0c2b39d41..000000000 --- a/vendor/github.com/mgechev/revive/rule/unexported-naming.go +++ /dev/null @@ -1,115 +0,0 @@ -package rule - -import ( - "fmt" - "go/ast" - "go/token" - - "github.com/mgechev/revive/lint" -) - -// UnexportedNamingRule lints wrongly named unexported symbols. -type UnexportedNamingRule struct{} - -// Apply applies the rule to given file. -func (*UnexportedNamingRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { - var failures []lint.Failure - onFailure := func(failure lint.Failure) { - failures = append(failures, failure) - } - - ba := &unexportablenamingLinter{onFailure} - ast.Walk(ba, file.AST) - - return failures -} - -// Name returns the rule name. -func (*UnexportedNamingRule) Name() string { - return "unexported-naming" -} - -type unexportablenamingLinter struct { - onFailure func(lint.Failure) -} - -func (unl unexportablenamingLinter) Visit(node ast.Node) ast.Visitor { - switch n := node.(type) { - case *ast.FuncDecl: - unl.lintFunction(n.Type, n.Body) - return nil - case *ast.FuncLit: - unl.lintFunction(n.Type, n.Body) - - return nil - case *ast.AssignStmt: - if n.Tok != token.DEFINE { - return nil - } - - ids := []*ast.Ident{} - for _, e := range n.Lhs { - id, ok := e.(*ast.Ident) - if !ok { - continue - } - ids = append(ids, id) - } - - unl.lintIDs(ids) - - case *ast.DeclStmt: - gd, ok := n.Decl.(*ast.GenDecl) - if !ok { - return nil - } - - if len(gd.Specs) < 1 { - return nil - } - - vs, ok := gd.Specs[0].(*ast.ValueSpec) - if !ok { - return nil - } - - unl.lintIDs(vs.Names) - } - - return unl -} - -func (unl unexportablenamingLinter) lintFunction(ft *ast.FuncType, body *ast.BlockStmt) { - unl.lintFields(ft.Params) - unl.lintFields(ft.Results) - - if body != nil { - ast.Walk(unl, body) - } -} - -func (unl unexportablenamingLinter) lintFields(fields *ast.FieldList) { - if fields == nil { - return - } - - ids := []*ast.Ident{} - for _, field := range fields.List { - ids = append(ids, field.Names...) - } - - unl.lintIDs(ids) -} - -func (unl unexportablenamingLinter) lintIDs(ids []*ast.Ident) { - for _, id := range ids { - if id.IsExported() { - unl.onFailure(lint.Failure{ - Node: id, - Confidence: 1, - Category: "naming", - Failure: fmt.Sprintf("the symbol %s is local, its name should start with a lowercase letter", id.String()), - }) - } - } -} diff --git a/vendor/github.com/mgechev/revive/rule/unexported-return.go b/vendor/github.com/mgechev/revive/rule/unexported-return.go deleted file mode 100644 index 10f8e3fbe..000000000 --- a/vendor/github.com/mgechev/revive/rule/unexported-return.go +++ /dev/null @@ -1,107 +0,0 @@ -package rule - -import ( - "fmt" - "go/ast" - "go/types" - - "github.com/mgechev/revive/internal/typeparams" - "github.com/mgechev/revive/lint" -) - -// UnexportedReturnRule lints given else constructs. -type UnexportedReturnRule struct{} - -// Apply applies the rule to given file. -func (*UnexportedReturnRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { - var failures []lint.Failure - - fileAst := file.AST - walker := lintUnexportedReturn{ - file: file, - fileAst: fileAst, - onFailure: func(failure lint.Failure) { - failures = append(failures, failure) - }, - } - - file.Pkg.TypeCheck() - ast.Walk(walker, fileAst) - - return failures -} - -// Name returns the rule name. -func (*UnexportedReturnRule) Name() string { - return "unexported-return" -} - -type lintUnexportedReturn struct { - file *lint.File - fileAst *ast.File - onFailure func(lint.Failure) -} - -func (w lintUnexportedReturn) Visit(n ast.Node) ast.Visitor { - fn, ok := n.(*ast.FuncDecl) - if !ok { - return w - } - if fn.Type.Results == nil { - return nil - } - if !fn.Name.IsExported() { - return nil - } - thing := "func" - if fn.Recv != nil && len(fn.Recv.List) > 0 { - thing = "method" - if !ast.IsExported(typeparams.ReceiverType(fn)) { - // Don't report exported methods of unexported types, - // such as private implementations of sort.Interface. - return nil - } - } - for _, ret := range fn.Type.Results.List { - typ := w.file.Pkg.TypeOf(ret.Type) - if exportedType(typ) { - continue - } - w.onFailure(lint.Failure{ - Category: "unexported-type-in-api", - Node: ret.Type, - Confidence: 0.8, - Failure: fmt.Sprintf("exported %s %s returns unexported type %s, which can be annoying to use", - thing, fn.Name.Name, typ), - }) - break // only flag one - } - return nil -} - -// exportedType reports whether typ is an exported type. -// It is imprecise, and will err on the side of returning true, -// such as for composite types. -func exportedType(typ types.Type) bool { - switch t := typ.(type) { - case *types.Named: - obj := t.Obj() - switch { - // Builtin types have no package. - case obj.Pkg() == nil: - case obj.Exported(): - default: - _, ok := t.Underlying().(*types.Interface) - return ok - } - return true - case *types.Map: - return exportedType(t.Key()) && exportedType(t.Elem()) - case interface { - Elem() types.Type - }: // array, slice, pointer, chan - return exportedType(t.Elem()) - } - // Be conservative about other types, such as struct, interface, etc. - return true -} diff --git a/vendor/github.com/mgechev/revive/rule/unhandled-error.go b/vendor/github.com/mgechev/revive/rule/unhandled-error.go deleted file mode 100644 index ce6fa3864..000000000 --- a/vendor/github.com/mgechev/revive/rule/unhandled-error.go +++ /dev/null @@ -1,178 +0,0 @@ -package rule - -import ( - "fmt" - "go/ast" - "go/types" - "regexp" - "strings" - "sync" - - "github.com/mgechev/revive/lint" -) - -// UnhandledErrorRule lints given else constructs. -type UnhandledErrorRule struct { - ignoreList []*regexp.Regexp - sync.Mutex -} - -func (r *UnhandledErrorRule) configure(arguments lint.Arguments) { - r.Lock() - if r.ignoreList == nil { - for _, arg := range arguments { - argStr, ok := arg.(string) - if !ok { - panic(fmt.Sprintf("Invalid argument to the unhandled-error rule. Expecting a string, got %T", arg)) - } - - argStr = strings.Trim(argStr, " ") - if argStr == "" { - panic("Invalid argument to the unhandled-error rule, expected regular expression must not be empty.") - } - - exp, err := regexp.Compile(argStr) - if err != nil { - panic(fmt.Sprintf("Invalid argument to the unhandled-error rule: regexp %q does not compile: %v", argStr, err)) - } - - r.ignoreList = append(r.ignoreList, exp) - } - } - r.Unlock() -} - -// Apply applies the rule to given file. -func (r *UnhandledErrorRule) Apply(file *lint.File, args lint.Arguments) []lint.Failure { - r.configure(args) - - var failures []lint.Failure - - walker := &lintUnhandledErrors{ - ignoreList: r.ignoreList, - pkg: file.Pkg, - onFailure: func(failure lint.Failure) { - failures = append(failures, failure) - }, - } - - file.Pkg.TypeCheck() - ast.Walk(walker, file.AST) - - return failures -} - -// Name returns the rule name. -func (*UnhandledErrorRule) Name() string { - return "unhandled-error" -} - -type lintUnhandledErrors struct { - ignoreList []*regexp.Regexp - pkg *lint.Package - onFailure func(lint.Failure) -} - -// Visit looks for statements that are function calls. -// If the called function returns a value of type error a failure will be created. -func (w *lintUnhandledErrors) Visit(node ast.Node) ast.Visitor { - switch n := node.(type) { - case *ast.ExprStmt: - fCall, ok := n.X.(*ast.CallExpr) - if !ok { - return nil // not a function call - } - - funcType := w.pkg.TypeOf(fCall) - if funcType == nil { - return nil // skip, type info not available - } - - switch t := funcType.(type) { - case *types.Named: - if !w.isTypeError(t) { - return nil // func call does not return an error - } - - w.addFailure(fCall) - default: - retTypes, ok := funcType.Underlying().(*types.Tuple) - if !ok { - return nil // skip, unable to retrieve return type of the called function - } - - if w.returnsAnError(retTypes) { - w.addFailure(fCall) - } - } - } - return w -} - -func (w *lintUnhandledErrors) addFailure(n *ast.CallExpr) { - name := w.funcName(n) - if w.isIgnoredFunc(name) { - return - } - - w.onFailure(lint.Failure{ - Category: "bad practice", - Confidence: 1, - Node: n, - Failure: fmt.Sprintf("Unhandled error in call to function %v", name), - }) -} - -func (w *lintUnhandledErrors) funcName(call *ast.CallExpr) string { - fn, ok := w.getFunc(call) - if !ok { - return gofmt(call.Fun) - } - - name := fn.FullName() - name = strings.Replace(name, "(", "", -1) - name = strings.Replace(name, ")", "", -1) - name = strings.Replace(name, "*", "", -1) - - return name -} - -func (w *lintUnhandledErrors) isIgnoredFunc(funcName string) bool { - for _, pattern := range w.ignoreList { - if len(pattern.FindString(funcName)) == len(funcName) { - return true - } - } - - return false -} - -func (*lintUnhandledErrors) isTypeError(t *types.Named) bool { - const errorTypeName = "_.error" - - return t.Obj().Id() == errorTypeName -} - -func (w *lintUnhandledErrors) returnsAnError(tt *types.Tuple) bool { - for i := 0; i < tt.Len(); i++ { - nt, ok := tt.At(i).Type().(*types.Named) - if ok && w.isTypeError(nt) { - return true - } - } - return false -} - -func (w *lintUnhandledErrors) getFunc(call *ast.CallExpr) (*types.Func, bool) { - sel, ok := call.Fun.(*ast.SelectorExpr) - if !ok { - return nil, false - } - - fn, ok := w.pkg.TypesInfo().ObjectOf(sel.Sel).(*types.Func) - if !ok { - return nil, false - } - - return fn, true -} diff --git a/vendor/github.com/mgechev/revive/rule/unnecessary-stmt.go b/vendor/github.com/mgechev/revive/rule/unnecessary-stmt.go deleted file mode 100644 index 8e0784ba4..000000000 --- a/vendor/github.com/mgechev/revive/rule/unnecessary-stmt.go +++ /dev/null @@ -1,107 +0,0 @@ -package rule - -import ( - "go/ast" - "go/token" - - "github.com/mgechev/revive/lint" -) - -// UnnecessaryStmtRule warns on unnecessary statements. -type UnnecessaryStmtRule struct{} - -// Apply applies the rule to given file. -func (*UnnecessaryStmtRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { - var failures []lint.Failure - onFailure := func(failure lint.Failure) { - failures = append(failures, failure) - } - - w := lintUnnecessaryStmtRule{onFailure} - ast.Walk(w, file.AST) - return failures -} - -// Name returns the rule name. -func (*UnnecessaryStmtRule) Name() string { - return "unnecessary-stmt" -} - -type lintUnnecessaryStmtRule struct { - onFailure func(lint.Failure) -} - -func (w lintUnnecessaryStmtRule) Visit(node ast.Node) ast.Visitor { - switch n := node.(type) { - case *ast.FuncDecl: - if n.Body == nil || n.Type.Results != nil { - return w - } - stmts := n.Body.List - if len(stmts) == 0 { - return w - } - - lastStmt := stmts[len(stmts)-1] - rs, ok := lastStmt.(*ast.ReturnStmt) - if !ok { - return w - } - - if len(rs.Results) == 0 { - w.newFailure(lastStmt, "omit unnecessary return statement") - } - - case *ast.SwitchStmt: - w.checkSwitchBody(n.Body) - case *ast.TypeSwitchStmt: - w.checkSwitchBody(n.Body) - case *ast.CaseClause: - if n.Body == nil { - return w - } - stmts := n.Body - if len(stmts) == 0 { - return w - } - - lastStmt := stmts[len(stmts)-1] - rs, ok := lastStmt.(*ast.BranchStmt) - if !ok { - return w - } - - if rs.Tok == token.BREAK && rs.Label == nil { - w.newFailure(lastStmt, "omit unnecessary break at the end of case clause") - } - } - - return w -} - -func (w lintUnnecessaryStmtRule) checkSwitchBody(b *ast.BlockStmt) { - cases := b.List - if len(cases) != 1 { - return - } - - cc, ok := cases[0].(*ast.CaseClause) - if !ok { - return - } - - if len(cc.List) > 1 { // skip cases with multiple expressions - return - } - - w.newFailure(b, "switch with only one case can be replaced by an if-then") -} - -func (w lintUnnecessaryStmtRule) newFailure(node ast.Node, msg string) { - w.onFailure(lint.Failure{ - Confidence: 1, - Node: node, - Category: "style", - Failure: msg, - }) -} diff --git a/vendor/github.com/mgechev/revive/rule/unreachable-code.go b/vendor/github.com/mgechev/revive/rule/unreachable-code.go deleted file mode 100644 index dcc5b7905..000000000 --- a/vendor/github.com/mgechev/revive/rule/unreachable-code.go +++ /dev/null @@ -1,122 +0,0 @@ -package rule - -import ( - "go/ast" - - "github.com/mgechev/revive/lint" -) - -// UnreachableCodeRule lints unreachable code. -type UnreachableCodeRule struct{} - -// Apply applies the rule to given file. -func (*UnreachableCodeRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { - var failures []lint.Failure - onFailure := func(failure lint.Failure) { - failures = append(failures, failure) - } - - testingFunctions := map[string]bool{ - "Fatal": true, - "Fatalf": true, - "FailNow": true, - } - branchingFunctions := map[string]map[string]bool{ - "os": {"Exit": true}, - "log": { - "Fatal": true, - "Fatalf": true, - "Fatalln": true, - "Panic": true, - "Panicf": true, - "Panicln": true, - }, - "t": testingFunctions, - "b": testingFunctions, - "f": testingFunctions, - } - - w := lintUnreachableCode{onFailure, branchingFunctions} - ast.Walk(w, file.AST) - return failures -} - -// Name returns the rule name. -func (*UnreachableCodeRule) Name() string { - return "unreachable-code" -} - -type lintUnreachableCode struct { - onFailure func(lint.Failure) - branchingFunctions map[string]map[string]bool -} - -func (w lintUnreachableCode) Visit(node ast.Node) ast.Visitor { - blk, ok := node.(*ast.BlockStmt) - if !ok { - return w - } - - if len(blk.List) < 2 { - return w - } -loop: - for i, stmt := range blk.List[:len(blk.List)-1] { - // println("iterating ", len(blk.List)) - next := blk.List[i+1] - if _, ok := next.(*ast.LabeledStmt); ok { - continue // skip if next statement is labeled - } - - switch s := stmt.(type) { - case *ast.ReturnStmt: - w.onFailure(newUnreachableCodeFailure(s)) - break loop - case *ast.BranchStmt: - token := s.Tok.String() - if token != "fallthrough" { - w.onFailure(newUnreachableCodeFailure(s)) - break loop - } - case *ast.ExprStmt: - ce, ok := s.X.(*ast.CallExpr) - if !ok { - continue - } - // it's a function call - fc, ok := ce.Fun.(*ast.SelectorExpr) - if !ok { - continue - } - - id, ok := fc.X.(*ast.Ident) - - if !ok { - continue - } - fn := fc.Sel.Name - pkg := id.Name - if !w.branchingFunctions[pkg][fn] { // it isn't a call to a branching function - continue - } - - if _, ok := next.(*ast.ReturnStmt); ok { // return statement needed to satisfy function signature - continue - } - - w.onFailure(newUnreachableCodeFailure(s)) - break loop - } - } - - return w -} - -func newUnreachableCodeFailure(node ast.Node) lint.Failure { - return lint.Failure{ - Confidence: 1, - Node: node, - Category: "logic", - Failure: "unreachable code after this statement", - } -} diff --git a/vendor/github.com/mgechev/revive/rule/unused-param.go b/vendor/github.com/mgechev/revive/rule/unused-param.go deleted file mode 100644 index 4b04ee916..000000000 --- a/vendor/github.com/mgechev/revive/rule/unused-param.go +++ /dev/null @@ -1,167 +0,0 @@ -package rule - -import ( - "fmt" - "go/ast" - "regexp" - "sync" - - "github.com/mgechev/revive/lint" -) - -// UnusedParamRule lints unused params in functions. -type UnusedParamRule struct { - configured bool - // regex to check if some name is valid for unused parameter, "^_$" by default - allowRegex *regexp.Regexp - failureMsg string - sync.Mutex -} - -func (r *UnusedParamRule) configure(args lint.Arguments) { - r.Lock() - defer r.Unlock() - - if r.configured { - return - } - r.configured = true - - // while by default args is an array, i think it's good to provide structures inside it by default, not arrays or primitives - // it's more compatible to JSON nature of configurations - var allowedRegexStr string - if len(args) == 0 { - allowedRegexStr = "^_$" - r.failureMsg = "parameter '%s' seems to be unused, consider removing or renaming it as _" - } else { - // Arguments = [{}] - options := args[0].(map[string]any) - // Arguments = [{allowedRegex="^_"}] - - if allowedRegexParam, ok := options["allowRegex"]; ok { - allowedRegexStr, ok = allowedRegexParam.(string) - if !ok { - panic(fmt.Errorf("error configuring %s rule: allowedRegex is not string but [%T]", r.Name(), allowedRegexParam)) - } - } - } - var err error - r.allowRegex, err = regexp.Compile(allowedRegexStr) - if err != nil { - panic(fmt.Errorf("error configuring %s rule: allowedRegex is not valid regex [%s]: %v", r.Name(), allowedRegexStr, err)) - } - - if r.failureMsg == "" { - r.failureMsg = "parameter '%s' seems to be unused, consider removing or renaming it to match " + r.allowRegex.String() - } -} - -// Apply applies the rule to given file. -func (r *UnusedParamRule) Apply(file *lint.File, args lint.Arguments) []lint.Failure { - r.configure(args) - var failures []lint.Failure - - onFailure := func(failure lint.Failure) { - failures = append(failures, failure) - } - w := lintUnusedParamRule{ - onFailure: onFailure, - allowRegex: r.allowRegex, - failureMsg: r.failureMsg, - } - - ast.Walk(w, file.AST) - - return failures -} - -// Name returns the rule name. -func (*UnusedParamRule) Name() string { - return "unused-parameter" -} - -type lintUnusedParamRule struct { - onFailure func(lint.Failure) - allowRegex *regexp.Regexp - failureMsg string -} - -func (w lintUnusedParamRule) Visit(node ast.Node) ast.Visitor { - var ( - funcType *ast.FuncType - funcBody *ast.BlockStmt - ) - switch n := node.(type) { - case *ast.FuncLit: - funcType = n.Type - funcBody = n.Body - case *ast.FuncDecl: - if n.Body == nil { - return nil // skip, is a function prototype - } - - funcType = n.Type - funcBody = n.Body - default: - return w // skip, not a function - } - - params := retrieveNamedParams(funcType.Params) - if len(params) < 1 { - return w // skip, func without parameters - } - - // inspect the func body looking for references to parameters - fselect := func(n ast.Node) bool { - ident, isAnID := n.(*ast.Ident) - - if !isAnID { - return false - } - - _, isAParam := params[ident.Obj] - if isAParam { - params[ident.Obj] = false // mark as used - } - - return false - } - _ = pick(funcBody, fselect) - - for _, p := range funcType.Params.List { - for _, n := range p.Names { - if w.allowRegex.FindStringIndex(n.Name) != nil { - continue - } - if params[n.Obj] { - w.onFailure(lint.Failure{ - Confidence: 1, - Node: n, - Category: "bad practice", - Failure: fmt.Sprintf(w.failureMsg, n.Name), - }) - } - } - } - - return w // full method body was inspected -} - -func retrieveNamedParams(params *ast.FieldList) map[*ast.Object]bool { - result := map[*ast.Object]bool{} - if params.List == nil { - return result - } - - for _, p := range params.List { - for _, n := range p.Names { - if n.Name == "_" { - continue - } - - result[n.Obj] = true - } - } - - return result -} diff --git a/vendor/github.com/mgechev/revive/rule/unused-receiver.go b/vendor/github.com/mgechev/revive/rule/unused-receiver.go deleted file mode 100644 index 715dba338..000000000 --- a/vendor/github.com/mgechev/revive/rule/unused-receiver.go +++ /dev/null @@ -1,133 +0,0 @@ -package rule - -import ( - "fmt" - "go/ast" - "regexp" - "sync" - - "github.com/mgechev/revive/lint" -) - -// UnusedReceiverRule lints unused params in functions. -type UnusedReceiverRule struct { - configured bool - // regex to check if some name is valid for unused parameter, "^_$" by default - allowRegex *regexp.Regexp - failureMsg string - sync.Mutex -} - -func (r *UnusedReceiverRule) configure(args lint.Arguments) { - r.Lock() - defer r.Unlock() - - if r.configured { - return - } - r.configured = true - - // while by default args is an array, i think it's good to provide structures inside it by default, not arrays or primitives - // it's more compatible to JSON nature of configurations - var allowedRegexStr string - if len(args) == 0 { - allowedRegexStr = "^_$" - r.failureMsg = "method receiver '%s' is not referenced in method's body, consider removing or renaming it as _" - } else { - // Arguments = [{}] - options := args[0].(map[string]any) - // Arguments = [{allowedRegex="^_"}] - - if allowedRegexParam, ok := options["allowRegex"]; ok { - allowedRegexStr, ok = allowedRegexParam.(string) - if !ok { - panic(fmt.Errorf("error configuring [unused-receiver] rule: allowedRegex is not string but [%T]", allowedRegexParam)) - } - } - } - var err error - r.allowRegex, err = regexp.Compile(allowedRegexStr) - if err != nil { - panic(fmt.Errorf("error configuring [unused-receiver] rule: allowedRegex is not valid regex [%s]: %v", allowedRegexStr, err)) - } - if r.failureMsg == "" { - r.failureMsg = "method receiver '%s' is not referenced in method's body, consider removing or renaming it to match " + r.allowRegex.String() - } -} - -// Apply applies the rule to given file. -func (r *UnusedReceiverRule) Apply(file *lint.File, args lint.Arguments) []lint.Failure { - r.configure(args) - var failures []lint.Failure - - onFailure := func(failure lint.Failure) { - failures = append(failures, failure) - } - - w := lintUnusedReceiverRule{ - onFailure: onFailure, - allowRegex: r.allowRegex, - failureMsg: r.failureMsg, - } - - ast.Walk(w, file.AST) - - return failures -} - -// Name returns the rule name. -func (*UnusedReceiverRule) Name() string { - return "unused-receiver" -} - -type lintUnusedReceiverRule struct { - onFailure func(lint.Failure) - allowRegex *regexp.Regexp - failureMsg string -} - -func (w lintUnusedReceiverRule) Visit(node ast.Node) ast.Visitor { - switch n := node.(type) { - case *ast.FuncDecl: - if n.Recv == nil { - return nil // skip this func decl, not a method - } - - rec := n.Recv.List[0] // safe to access only the first (unique) element of the list - if len(rec.Names) < 1 { - return nil // the receiver is anonymous: func (aType) Foo(...) ... - } - - recID := rec.Names[0] - if recID.Name == "_" { - return nil // the receiver is already named _ - } - - if w.allowRegex != nil && w.allowRegex.FindStringIndex(recID.Name) != nil { - return nil - } - - // inspect the func body looking for references to the receiver id - fselect := func(n ast.Node) bool { - ident, isAnID := n.(*ast.Ident) - - return isAnID && ident.Obj == recID.Obj - } - refs2recID := pick(n.Body, fselect) - - if len(refs2recID) > 0 { - return nil // the receiver is referenced in the func body - } - - w.onFailure(lint.Failure{ - Confidence: 1, - Node: recID, - Category: "bad practice", - Failure: fmt.Sprintf(w.failureMsg, recID.Name), - }) - - return nil // full method body already inspected - } - - return w -} diff --git a/vendor/github.com/mgechev/revive/rule/use-any.go b/vendor/github.com/mgechev/revive/rule/use-any.go deleted file mode 100644 index bdf3c936d..000000000 --- a/vendor/github.com/mgechev/revive/rule/use-any.go +++ /dev/null @@ -1,54 +0,0 @@ -package rule - -import ( - "go/ast" - - "github.com/mgechev/revive/lint" -) - -// UseAnyRule lints given else constructs. -type UseAnyRule struct{} - -// Apply applies the rule to given file. -func (*UseAnyRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { - var failures []lint.Failure - - walker := lintUseAny{ - onFailure: func(failure lint.Failure) { - failures = append(failures, failure) - }, - } - fileAst := file.AST - ast.Walk(walker, fileAst) - - return failures -} - -// Name returns the rule name. -func (*UseAnyRule) Name() string { - return "use-any" -} - -type lintUseAny struct { - onFailure func(lint.Failure) -} - -func (w lintUseAny) Visit(n ast.Node) ast.Visitor { - it, ok := n.(*ast.InterfaceType) - if !ok { - return w - } - - if len(it.Methods.List) != 0 { - return w // it is not and empty interface - } - - w.onFailure(lint.Failure{ - Node: n, - Confidence: 1, - Category: "naming", - Failure: "since GO 1.18 'interface{}' can be replaced by 'any'", - }) - - return w -} diff --git a/vendor/github.com/mgechev/revive/rule/useless-break.go b/vendor/github.com/mgechev/revive/rule/useless-break.go deleted file mode 100644 index 8db20c9b8..000000000 --- a/vendor/github.com/mgechev/revive/rule/useless-break.go +++ /dev/null @@ -1,82 +0,0 @@ -package rule - -import ( - "go/ast" - "go/token" - - "github.com/mgechev/revive/lint" -) - -// UselessBreak lint rule. -type UselessBreak struct{} - -// Apply applies the rule to given file. -func (*UselessBreak) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { - var failures []lint.Failure - - onFailure := func(failure lint.Failure) { - failures = append(failures, failure) - } - - astFile := file.AST - w := &lintUselessBreak{onFailure, false} - ast.Walk(w, astFile) - return failures -} - -// Name returns the rule name. -func (*UselessBreak) Name() string { - return "useless-break" -} - -type lintUselessBreak struct { - onFailure func(lint.Failure) - inLoopBody bool -} - -func (w *lintUselessBreak) Visit(node ast.Node) ast.Visitor { - switch v := node.(type) { - case *ast.ForStmt: - w.inLoopBody = true - ast.Walk(w, v.Body) - w.inLoopBody = false - return nil - case *ast.RangeStmt: - w.inLoopBody = true - ast.Walk(w, v.Body) - w.inLoopBody = false - return nil - case *ast.CommClause: - for _, n := range v.Body { - w.inspectCaseStatement(n) - } - return nil - case *ast.CaseClause: - for _, n := range v.Body { - w.inspectCaseStatement(n) - } - return nil - } - return w -} - -func (w *lintUselessBreak) inspectCaseStatement(n ast.Stmt) { - switch s := n.(type) { - case *ast.BranchStmt: - if s.Tok != token.BREAK { - return // not a break statement - } - if s.Label != nil { - return // labeled break statement, usually affects a nesting loop - } - msg := "useless break in case clause" - if w.inLoopBody { - msg += " (WARN: this break statement affects this switch or select statement and not the loop enclosing it)" - } - w.onFailure(lint.Failure{ - Confidence: 1, - Node: s, - Failure: msg, - }) - } -} diff --git a/vendor/github.com/mgechev/revive/rule/utils.go b/vendor/github.com/mgechev/revive/rule/utils.go deleted file mode 100644 index 5778e7696..000000000 --- a/vendor/github.com/mgechev/revive/rule/utils.go +++ /dev/null @@ -1,167 +0,0 @@ -package rule - -import ( - "bytes" - "fmt" - "go/ast" - "go/printer" - "go/token" - "go/types" - "regexp" - "strings" - - "github.com/mgechev/revive/lint" -) - -// isBlank returns whether id is the blank identifier "_". -// If id == nil, the answer is false. -func isBlank(id *ast.Ident) bool { return id != nil && id.Name == "_" } - -var commonMethods = map[string]bool{ - "Error": true, - "Read": true, - "ServeHTTP": true, - "String": true, - "Write": true, - "Unwrap": true, -} - -var knownNameExceptions = map[string]bool{ - "LastInsertId": true, // must match database/sql - "kWh": true, -} - -func isCgoExported(f *ast.FuncDecl) bool { - if f.Recv != nil || f.Doc == nil { - return false - } - - cgoExport := regexp.MustCompile(fmt.Sprintf("(?m)^//export %s$", regexp.QuoteMeta(f.Name.Name))) - for _, c := range f.Doc.List { - if cgoExport.MatchString(c.Text) { - return true - } - } - return false -} - -var allCapsRE = regexp.MustCompile(`^[A-Z0-9_]+$`) - -func isIdent(expr ast.Expr, ident string) bool { - id, ok := expr.(*ast.Ident) - return ok && id.Name == ident -} - -var zeroLiteral = map[string]bool{ - "false": true, // bool - // runes - `'\x00'`: true, - `'\000'`: true, - // strings - `""`: true, - "``": true, - // numerics - "0": true, - "0.": true, - "0.0": true, - "0i": true, -} - -func validType(t types.Type) bool { - return t != nil && - t != types.Typ[types.Invalid] && - !strings.Contains(t.String(), "invalid type") // good but not foolproof -} - -// isPkgDot checks if the expression is . -func isPkgDot(expr ast.Expr, pkg, name string) bool { - sel, ok := expr.(*ast.SelectorExpr) - return ok && isIdent(sel.X, pkg) && isIdent(sel.Sel, name) -} - -func srcLine(src []byte, p token.Position) string { - // Run to end of line in both directions if not at line start/end. - lo, hi := p.Offset, p.Offset+1 - for lo > 0 && src[lo-1] != '\n' { - lo-- - } - for hi < len(src) && src[hi-1] != '\n' { - hi++ - } - return string(src[lo:hi]) -} - -// pick yields a list of nodes by picking them from a sub-ast with root node n. -// Nodes are selected by applying the fselect function -func pick(n ast.Node, fselect func(n ast.Node) bool) []ast.Node { - var result []ast.Node - - if n == nil { - return result - } - - onSelect := func(n ast.Node) { - result = append(result, n) - } - p := picker{fselect: fselect, onSelect: onSelect} - ast.Walk(p, n) - return result -} - -type picker struct { - fselect func(n ast.Node) bool - onSelect func(n ast.Node) -} - -func (p picker) Visit(node ast.Node) ast.Visitor { - if p.fselect == nil { - return nil - } - - if p.fselect(node) { - p.onSelect(node) - } - - return p -} - -// isBoolOp returns true if the given token corresponds to -// a bool operator -func isBoolOp(t token.Token) bool { - switch t { - case token.LAND, token.LOR, token.EQL, token.NEQ: - return true - } - - return false -} - -const ( - trueName = "true" - falseName = "false" -) - -func isExprABooleanLit(n ast.Node) (lexeme string, ok bool) { - oper, ok := n.(*ast.Ident) - - if !ok { - return "", false - } - - return oper.Name, (oper.Name == trueName || oper.Name == falseName) -} - -// gofmt returns a string representation of an AST subtree. -func gofmt(x any) string { - buf := bytes.Buffer{} - fs := token.NewFileSet() - printer.Fprint(&buf, fs, x) - return buf.String() -} - -// checkNumberOfArguments fails if the given number of arguments is not, at least, the expected one -func checkNumberOfArguments(expected int, args lint.Arguments, ruleName string) { - if len(args) < expected { - panic(fmt.Sprintf("not enough arguments for %s rule, expected %d, got %d. Please check the rule's documentation", ruleName, expected, len(args))) - } -} diff --git a/vendor/github.com/mgechev/revive/rule/var-declarations.go b/vendor/github.com/mgechev/revive/rule/var-declarations.go deleted file mode 100644 index a15ff1eb4..000000000 --- a/vendor/github.com/mgechev/revive/rule/var-declarations.go +++ /dev/null @@ -1,120 +0,0 @@ -package rule - -import ( - "fmt" - "go/ast" - "go/token" - "go/types" - - "github.com/mgechev/revive/lint" -) - -// VarDeclarationsRule lints given else constructs. -type VarDeclarationsRule struct{} - -// Apply applies the rule to given file. -func (*VarDeclarationsRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { - var failures []lint.Failure - - fileAst := file.AST - walker := &lintVarDeclarations{ - file: file, - fileAst: fileAst, - onFailure: func(failure lint.Failure) { - failures = append(failures, failure) - }, - } - - file.Pkg.TypeCheck() - ast.Walk(walker, fileAst) - - return failures -} - -// Name returns the rule name. -func (*VarDeclarationsRule) Name() string { - return "var-declaration" -} - -type lintVarDeclarations struct { - fileAst *ast.File - file *lint.File - lastGen *ast.GenDecl - onFailure func(lint.Failure) -} - -func (w *lintVarDeclarations) Visit(node ast.Node) ast.Visitor { - switch v := node.(type) { - case *ast.GenDecl: - if v.Tok != token.CONST && v.Tok != token.VAR { - return nil - } - w.lastGen = v - return w - case *ast.ValueSpec: - if w.lastGen.Tok == token.CONST { - return nil - } - if len(v.Names) > 1 || v.Type == nil || len(v.Values) == 0 { - return nil - } - rhs := v.Values[0] - // An underscore var appears in a common idiom for compile-time interface satisfaction, - // as in "var _ Interface = (*Concrete)(nil)". - if isIdent(v.Names[0], "_") { - return nil - } - // If the RHS is a zero value, suggest dropping it. - zero := false - if lit, ok := rhs.(*ast.BasicLit); ok { - zero = zeroLiteral[lit.Value] - } else if isIdent(rhs, "nil") { - zero = true - } - if zero { - w.onFailure(lint.Failure{ - Confidence: 0.9, - Node: rhs, - Category: "zero-value", - Failure: fmt.Sprintf("should drop = %s from declaration of var %s; it is the zero value", w.file.Render(rhs), v.Names[0]), - }) - return nil - } - lhsTyp := w.file.Pkg.TypeOf(v.Type) - rhsTyp := w.file.Pkg.TypeOf(rhs) - - if !validType(lhsTyp) || !validType(rhsTyp) { - // Type checking failed (often due to missing imports). - return nil - } - - if !types.Identical(lhsTyp, rhsTyp) { - // Assignment to a different type is not redundant. - return nil - } - - // The next three conditions are for suppressing the warning in situations - // where we were unable to typecheck. - - // If the LHS type is an interface, don't warn, since it is probably a - // concrete type on the RHS. Note that our feeble lexical check here - // will only pick up interface{} and other literal interface types; - // that covers most of the cases we care to exclude right now. - if _, ok := v.Type.(*ast.InterfaceType); ok { - return nil - } - // If the RHS is an untyped const, only warn if the LHS type is its default type. - if defType, ok := w.file.IsUntypedConst(rhs); ok && !isIdent(v.Type, defType) { - return nil - } - - w.onFailure(lint.Failure{ - Category: "type-inference", - Confidence: 0.8, - Node: v.Type, - Failure: fmt.Sprintf("should omit type %s from declaration of var %s; it will be inferred from the right-hand side", w.file.Render(v.Type), v.Names[0]), - }) - return nil - } - return w -} diff --git a/vendor/github.com/mgechev/revive/rule/var-naming.go b/vendor/github.com/mgechev/revive/rule/var-naming.go deleted file mode 100644 index e91c22dc2..000000000 --- a/vendor/github.com/mgechev/revive/rule/var-naming.go +++ /dev/null @@ -1,281 +0,0 @@ -package rule - -import ( - "fmt" - "go/ast" - "go/token" - "regexp" - "strings" - "sync" - - "github.com/mgechev/revive/lint" -) - -var anyCapsRE = regexp.MustCompile(`[A-Z]`) - -// regexp for constant names like `SOME_CONST`, `SOME_CONST_2`, `X123_3`, `_SOME_PRIVATE_CONST` (#851, #865) -var upperCaseConstRE = regexp.MustCompile(`^_?[A-Z][A-Z\d]*(_[A-Z\d]+)*$`) - -// VarNamingRule lints given else constructs. -type VarNamingRule struct { - configured bool - allowlist []string - blocklist []string - upperCaseConst bool // if true - allows to use UPPER_SOME_NAMES for constants - skipPackageNameChecks bool - sync.Mutex -} - -func (r *VarNamingRule) configure(arguments lint.Arguments) { - r.Lock() - defer r.Unlock() - if r.configured { - return - } - - r.configured = true - if len(arguments) >= 1 { - r.allowlist = getList(arguments[0], "allowlist") - } - - if len(arguments) >= 2 { - r.blocklist = getList(arguments[1], "blocklist") - } - - if len(arguments) >= 3 { - // not pretty code because should keep compatibility with TOML (no mixed array types) and new map parameters - thirdArgument := arguments[2] - asSlice, ok := thirdArgument.([]any) - if !ok { - panic(fmt.Sprintf("Invalid third argument to the var-naming rule. Expecting a %s of type slice, got %T", "options", arguments[2])) - } - if len(asSlice) != 1 { - panic(fmt.Sprintf("Invalid third argument to the var-naming rule. Expecting a %s of type slice, of len==1, but %d", "options", len(asSlice))) - } - args, ok := asSlice[0].(map[string]any) - if !ok { - panic(fmt.Sprintf("Invalid third argument to the var-naming rule. Expecting a %s of type slice, of len==1, with map, but %T", "options", asSlice[0])) - } - r.upperCaseConst = fmt.Sprint(args["upperCaseConst"]) == "true" - r.skipPackageNameChecks = fmt.Sprint(args["skipPackageNameChecks"]) == "true" - } -} - -func (r *VarNamingRule) applyPackageCheckRules(walker *lintNames) { - // Package names need slightly different handling than other names. - if strings.Contains(walker.fileAst.Name.Name, "_") && !strings.HasSuffix(walker.fileAst.Name.Name, "_test") { - walker.onFailure(lint.Failure{ - Failure: "don't use an underscore in package name", - Confidence: 1, - Node: walker.fileAst.Name, - Category: "naming", - }) - } - if anyCapsRE.MatchString(walker.fileAst.Name.Name) { - walker.onFailure(lint.Failure{ - Failure: fmt.Sprintf("don't use MixedCaps in package name; %s should be %s", walker.fileAst.Name.Name, strings.ToLower(walker.fileAst.Name.Name)), - Confidence: 1, - Node: walker.fileAst.Name, - Category: "naming", - }) - } - -} - -// Apply applies the rule to given file. -func (r *VarNamingRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure { - r.configure(arguments) - - var failures []lint.Failure - - fileAst := file.AST - - walker := lintNames{ - file: file, - fileAst: fileAst, - allowlist: r.allowlist, - blocklist: r.blocklist, - onFailure: func(failure lint.Failure) { - failures = append(failures, failure) - }, - upperCaseConst: r.upperCaseConst, - } - - if !r.skipPackageNameChecks { - r.applyPackageCheckRules(&walker) - } - - ast.Walk(&walker, fileAst) - - return failures -} - -// Name returns the rule name. -func (*VarNamingRule) Name() string { - return "var-naming" -} - -func (w *lintNames) checkList(fl *ast.FieldList, thing string) { - if fl == nil { - return - } - for _, f := range fl.List { - for _, id := range f.Names { - w.check(id, thing) - } - } -} - -func (w *lintNames) check(id *ast.Ident, thing string) { - if id.Name == "_" { - return - } - if knownNameExceptions[id.Name] { - return - } - - // #851 upperCaseConst support - // if it's const - if thing == token.CONST.String() && w.upperCaseConst && upperCaseConstRE.MatchString(id.Name) { - return - } - - // Handle two common styles from other languages that don't belong in Go. - if len(id.Name) >= 5 && allCapsRE.MatchString(id.Name) && strings.Contains(id.Name, "_") { - w.onFailure(lint.Failure{ - Failure: "don't use ALL_CAPS in Go names; use CamelCase", - Confidence: 0.8, - Node: id, - Category: "naming", - }) - return - } - - should := lint.Name(id.Name, w.allowlist, w.blocklist) - if id.Name == should { - return - } - - if len(id.Name) > 2 && strings.Contains(id.Name[1:], "_") { - w.onFailure(lint.Failure{ - Failure: fmt.Sprintf("don't use underscores in Go names; %s %s should be %s", thing, id.Name, should), - Confidence: 0.9, - Node: id, - Category: "naming", - }) - return - } - w.onFailure(lint.Failure{ - Failure: fmt.Sprintf("%s %s should be %s", thing, id.Name, should), - Confidence: 0.8, - Node: id, - Category: "naming", - }) -} - -type lintNames struct { - file *lint.File - fileAst *ast.File - onFailure func(lint.Failure) - allowlist []string - blocklist []string - upperCaseConst bool -} - -func (w *lintNames) Visit(n ast.Node) ast.Visitor { - switch v := n.(type) { - case *ast.AssignStmt: - if v.Tok == token.ASSIGN { - return w - } - for _, exp := range v.Lhs { - if id, ok := exp.(*ast.Ident); ok { - w.check(id, "var") - } - } - case *ast.FuncDecl: - funcName := v.Name.Name - if w.file.IsTest() && - (strings.HasPrefix(funcName, "Example") || - strings.HasPrefix(funcName, "Test") || - strings.HasPrefix(funcName, "Benchmark") || - strings.HasPrefix(funcName, "Fuzz")) { - return w - } - - thing := "func" - if v.Recv != nil { - thing = "method" - } - - // Exclude naming warnings for functions that are exported to C but - // not exported in the Go API. - // See https://github.com/golang/lint/issues/144. - if ast.IsExported(v.Name.Name) || !isCgoExported(v) { - w.check(v.Name, thing) - } - - w.checkList(v.Type.Params, thing+" parameter") - w.checkList(v.Type.Results, thing+" result") - case *ast.GenDecl: - if v.Tok == token.IMPORT { - return w - } - - thing := v.Tok.String() - for _, spec := range v.Specs { - switch s := spec.(type) { - case *ast.TypeSpec: - w.check(s.Name, thing) - case *ast.ValueSpec: - for _, id := range s.Names { - w.check(id, thing) - } - } - } - case *ast.InterfaceType: - // Do not check interface method names. - // They are often constrained by the method names of concrete types. - for _, x := range v.Methods.List { - ft, ok := x.Type.(*ast.FuncType) - if !ok { // might be an embedded interface name - continue - } - w.checkList(ft.Params, "interface method parameter") - w.checkList(ft.Results, "interface method result") - } - case *ast.RangeStmt: - if v.Tok == token.ASSIGN { - return w - } - if id, ok := v.Key.(*ast.Ident); ok { - w.check(id, "range var") - } - if id, ok := v.Value.(*ast.Ident); ok { - w.check(id, "range var") - } - case *ast.StructType: - for _, f := range v.Fields.List { - for _, id := range f.Names { - w.check(id, "struct field") - } - } - } - return w -} - -func getList(arg any, argName string) []string { - temp, ok := arg.([]any) - if !ok { - panic(fmt.Sprintf("Invalid argument to the var-naming rule. Expecting a %s of type slice with initialisms, got %T", argName, arg)) - } - var list []string - for _, v := range temp { - if val, ok := v.(string); ok { - list = append(list, val) - } else { - panic(fmt.Sprintf("Invalid %s values of the var-naming rule. Expecting slice of strings but got element of type %T", val, arg)) - } - } - return list -} diff --git a/vendor/github.com/mgechev/revive/rule/waitgroup-by-value.go b/vendor/github.com/mgechev/revive/rule/waitgroup-by-value.go deleted file mode 100644 index 98644f41c..000000000 --- a/vendor/github.com/mgechev/revive/rule/waitgroup-by-value.go +++ /dev/null @@ -1,66 +0,0 @@ -package rule - -import ( - "go/ast" - - "github.com/mgechev/revive/lint" -) - -// WaitGroupByValueRule lints sync.WaitGroup passed by copy in functions. -type WaitGroupByValueRule struct{} - -// Apply applies the rule to given file. -func (*WaitGroupByValueRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { - var failures []lint.Failure - - onFailure := func(failure lint.Failure) { - failures = append(failures, failure) - } - - w := lintWaitGroupByValueRule{onFailure: onFailure} - ast.Walk(w, file.AST) - return failures -} - -// Name returns the rule name. -func (*WaitGroupByValueRule) Name() string { - return "waitgroup-by-value" -} - -type lintWaitGroupByValueRule struct { - onFailure func(lint.Failure) -} - -func (w lintWaitGroupByValueRule) Visit(node ast.Node) ast.Visitor { - // look for function declarations - fd, ok := node.(*ast.FuncDecl) - if !ok { - return w - } - - // Check all function's parameters - for _, field := range fd.Type.Params.List { - if !w.isWaitGroup(field.Type) { - continue - } - - w.onFailure(lint.Failure{ - Confidence: 1, - Node: field, - Failure: "sync.WaitGroup passed by value, the function will get a copy of the original one", - }) - } - - return nil -} - -func (lintWaitGroupByValueRule) isWaitGroup(ft ast.Expr) bool { - se, ok := ft.(*ast.SelectorExpr) - if !ok { - return false - } - - x, _ := se.X.(*ast.Ident) - sel := se.Sel.Name - return x.Name == "sync" && sel == "WaitGroup" -} diff --git a/vendor/github.com/mitchellh/go-homedir/LICENSE b/vendor/github.com/mitchellh/go-homedir/LICENSE deleted file mode 100644 index f9c841a51..000000000 --- a/vendor/github.com/mitchellh/go-homedir/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2013 Mitchell Hashimoto - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/vendor/github.com/mitchellh/go-homedir/README.md b/vendor/github.com/mitchellh/go-homedir/README.md deleted file mode 100644 index d70706d5b..000000000 --- a/vendor/github.com/mitchellh/go-homedir/README.md +++ /dev/null @@ -1,14 +0,0 @@ -# go-homedir - -This is a Go library for detecting the user's home directory without -the use of cgo, so the library can be used in cross-compilation environments. - -Usage is incredibly simple, just call `homedir.Dir()` to get the home directory -for a user, and `homedir.Expand()` to expand the `~` in a path to the home -directory. - -**Why not just use `os/user`?** The built-in `os/user` package requires -cgo on Darwin systems. This means that any Go code that uses that package -cannot cross compile. But 99% of the time the use for `os/user` is just to -retrieve the home directory, which we can do for the current user without -cgo. This library does that, enabling cross-compilation. diff --git a/vendor/github.com/mitchellh/go-homedir/homedir.go b/vendor/github.com/mitchellh/go-homedir/homedir.go deleted file mode 100644 index 25378537e..000000000 --- a/vendor/github.com/mitchellh/go-homedir/homedir.go +++ /dev/null @@ -1,167 +0,0 @@ -package homedir - -import ( - "bytes" - "errors" - "os" - "os/exec" - "path/filepath" - "runtime" - "strconv" - "strings" - "sync" -) - -// DisableCache will disable caching of the home directory. Caching is enabled -// by default. -var DisableCache bool - -var homedirCache string -var cacheLock sync.RWMutex - -// Dir returns the home directory for the executing user. -// -// This uses an OS-specific method for discovering the home directory. -// An error is returned if a home directory cannot be detected. -func Dir() (string, error) { - if !DisableCache { - cacheLock.RLock() - cached := homedirCache - cacheLock.RUnlock() - if cached != "" { - return cached, nil - } - } - - cacheLock.Lock() - defer cacheLock.Unlock() - - var result string - var err error - if runtime.GOOS == "windows" { - result, err = dirWindows() - } else { - // Unix-like system, so just assume Unix - result, err = dirUnix() - } - - if err != nil { - return "", err - } - homedirCache = result - return result, nil -} - -// Expand expands the path to include the home directory if the path -// is prefixed with `~`. If it isn't prefixed with `~`, the path is -// returned as-is. -func Expand(path string) (string, error) { - if len(path) == 0 { - return path, nil - } - - if path[0] != '~' { - return path, nil - } - - if len(path) > 1 && path[1] != '/' && path[1] != '\\' { - return "", errors.New("cannot expand user-specific home dir") - } - - dir, err := Dir() - if err != nil { - return "", err - } - - return filepath.Join(dir, path[1:]), nil -} - -// Reset clears the cache, forcing the next call to Dir to re-detect -// the home directory. This generally never has to be called, but can be -// useful in tests if you're modifying the home directory via the HOME -// env var or something. -func Reset() { - cacheLock.Lock() - defer cacheLock.Unlock() - homedirCache = "" -} - -func dirUnix() (string, error) { - homeEnv := "HOME" - if runtime.GOOS == "plan9" { - // On plan9, env vars are lowercase. - homeEnv = "home" - } - - // First prefer the HOME environmental variable - if home := os.Getenv(homeEnv); home != "" { - return home, nil - } - - var stdout bytes.Buffer - - // If that fails, try OS specific commands - if runtime.GOOS == "darwin" { - cmd := exec.Command("sh", "-c", `dscl -q . -read /Users/"$(whoami)" NFSHomeDirectory | sed 's/^[^ ]*: //'`) - cmd.Stdout = &stdout - if err := cmd.Run(); err == nil { - result := strings.TrimSpace(stdout.String()) - if result != "" { - return result, nil - } - } - } else { - cmd := exec.Command("getent", "passwd", strconv.Itoa(os.Getuid())) - cmd.Stdout = &stdout - if err := cmd.Run(); err != nil { - // If the error is ErrNotFound, we ignore it. Otherwise, return it. - if err != exec.ErrNotFound { - return "", err - } - } else { - if passwd := strings.TrimSpace(stdout.String()); passwd != "" { - // username:password:uid:gid:gecos:home:shell - passwdParts := strings.SplitN(passwd, ":", 7) - if len(passwdParts) > 5 { - return passwdParts[5], nil - } - } - } - } - - // If all else fails, try the shell - stdout.Reset() - cmd := exec.Command("sh", "-c", "cd && pwd") - cmd.Stdout = &stdout - if err := cmd.Run(); err != nil { - return "", err - } - - result := strings.TrimSpace(stdout.String()) - if result == "" { - return "", errors.New("blank output when reading home directory") - } - - return result, nil -} - -func dirWindows() (string, error) { - // First prefer the HOME environmental variable - if home := os.Getenv("HOME"); home != "" { - return home, nil - } - - // Prefer standard environment variable USERPROFILE - if home := os.Getenv("USERPROFILE"); home != "" { - return home, nil - } - - drive := os.Getenv("HOMEDRIVE") - path := os.Getenv("HOMEPATH") - home := drive + path - if drive == "" || path == "" { - return "", errors.New("HOMEDRIVE, HOMEPATH, or USERPROFILE are blank") - } - - return home, nil -} diff --git a/vendor/github.com/mitchellh/mapstructure/CHANGELOG.md b/vendor/github.com/mitchellh/mapstructure/CHANGELOG.md deleted file mode 100644 index c75823490..000000000 --- a/vendor/github.com/mitchellh/mapstructure/CHANGELOG.md +++ /dev/null @@ -1,96 +0,0 @@ -## 1.5.0 - -* New option `IgnoreUntaggedFields` to ignore decoding to any fields - without `mapstructure` (or the configured tag name) set [GH-277] -* New option `ErrorUnset` which makes it an error if any fields - in a target struct are not set by the decoding process. [GH-225] -* New function `OrComposeDecodeHookFunc` to help compose decode hooks. [GH-240] -* Decoding to slice from array no longer crashes [GH-265] -* Decode nested struct pointers to map [GH-271] -* Fix issue where `,squash` was ignored if `Squash` option was set. [GH-280] -* Fix issue where fields with `,omitempty` would sometimes decode - into a map with an empty string key [GH-281] - -## 1.4.3 - -* Fix cases where `json.Number` didn't decode properly [GH-261] - -## 1.4.2 - -* Custom name matchers to support any sort of casing, formatting, etc. for - field names. [GH-250] -* Fix possible panic in ComposeDecodeHookFunc [GH-251] - -## 1.4.1 - -* Fix regression where `*time.Time` value would be set to empty and not be sent - to decode hooks properly [GH-232] - -## 1.4.0 - -* A new decode hook type `DecodeHookFuncValue` has been added that has - access to the full values. [GH-183] -* Squash is now supported with embedded fields that are struct pointers [GH-205] -* Empty strings will convert to 0 for all numeric types when weakly decoding [GH-206] - -## 1.3.3 - -* Decoding maps from maps creates a settable value for decode hooks [GH-203] - -## 1.3.2 - -* Decode into interface type with a struct value is supported [GH-187] - -## 1.3.1 - -* Squash should only squash embedded structs. [GH-194] - -## 1.3.0 - -* Added `",omitempty"` support. This will ignore zero values in the source - structure when encoding. [GH-145] - -## 1.2.3 - -* Fix duplicate entries in Keys list with pointer values. [GH-185] - -## 1.2.2 - -* Do not add unsettable (unexported) values to the unused metadata key - or "remain" value. [GH-150] - -## 1.2.1 - -* Go modules checksum mismatch fix - -## 1.2.0 - -* Added support to capture unused values in a field using the `",remain"` value - in the mapstructure tag. There is an example to showcase usage. -* Added `DecoderConfig` option to always squash embedded structs -* `json.Number` can decode into `uint` types -* Empty slices are preserved and not replaced with nil slices -* Fix panic that can occur in when decoding a map into a nil slice of structs -* Improved package documentation for godoc - -## 1.1.2 - -* Fix error when decode hook decodes interface implementation into interface - type. [GH-140] - -## 1.1.1 - -* Fix panic that can happen in `decodePtr` - -## 1.1.0 - -* Added `StringToIPHookFunc` to convert `string` to `net.IP` and `net.IPNet` [GH-133] -* Support struct to struct decoding [GH-137] -* If source map value is nil, then destination map value is nil (instead of empty) -* If source slice value is nil, then destination slice value is nil (instead of empty) -* If source pointer is nil, then destination pointer is set to nil (instead of - allocated zero value of type) - -## 1.0.0 - -* Initial tagged stable release. diff --git a/vendor/github.com/mitchellh/mapstructure/LICENSE b/vendor/github.com/mitchellh/mapstructure/LICENSE deleted file mode 100644 index f9c841a51..000000000 --- a/vendor/github.com/mitchellh/mapstructure/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2013 Mitchell Hashimoto - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/vendor/github.com/mitchellh/mapstructure/README.md b/vendor/github.com/mitchellh/mapstructure/README.md deleted file mode 100644 index 0018dc7d9..000000000 --- a/vendor/github.com/mitchellh/mapstructure/README.md +++ /dev/null @@ -1,46 +0,0 @@ -# mapstructure [![Godoc](https://godoc.org/github.com/mitchellh/mapstructure?status.svg)](https://godoc.org/github.com/mitchellh/mapstructure) - -mapstructure is a Go library for decoding generic map values to structures -and vice versa, while providing helpful error handling. - -This library is most useful when decoding values from some data stream (JSON, -Gob, etc.) where you don't _quite_ know the structure of the underlying data -until you read a part of it. You can therefore read a `map[string]interface{}` -and use this library to decode it into the proper underlying native Go -structure. - -## Installation - -Standard `go get`: - -``` -$ go get github.com/mitchellh/mapstructure -``` - -## Usage & Example - -For usage and examples see the [Godoc](http://godoc.org/github.com/mitchellh/mapstructure). - -The `Decode` function has examples associated with it there. - -## But Why?! - -Go offers fantastic standard libraries for decoding formats such as JSON. -The standard method is to have a struct pre-created, and populate that struct -from the bytes of the encoded format. This is great, but the problem is if -you have configuration or an encoding that changes slightly depending on -specific fields. For example, consider this JSON: - -```json -{ - "type": "person", - "name": "Mitchell" -} -``` - -Perhaps we can't populate a specific structure without first reading -the "type" field from the JSON. We could always do two passes over the -decoding of the JSON (reading the "type" first, and the rest later). -However, it is much simpler to just decode this into a `map[string]interface{}` -structure, read the "type" key, then use something like this library -to decode it into the proper structure. diff --git a/vendor/github.com/mitchellh/mapstructure/decode_hooks.go b/vendor/github.com/mitchellh/mapstructure/decode_hooks.go deleted file mode 100644 index 3a754ca72..000000000 --- a/vendor/github.com/mitchellh/mapstructure/decode_hooks.go +++ /dev/null @@ -1,279 +0,0 @@ -package mapstructure - -import ( - "encoding" - "errors" - "fmt" - "net" - "reflect" - "strconv" - "strings" - "time" -) - -// typedDecodeHook takes a raw DecodeHookFunc (an interface{}) and turns -// it into the proper DecodeHookFunc type, such as DecodeHookFuncType. -func typedDecodeHook(h DecodeHookFunc) DecodeHookFunc { - // Create variables here so we can reference them with the reflect pkg - var f1 DecodeHookFuncType - var f2 DecodeHookFuncKind - var f3 DecodeHookFuncValue - - // Fill in the variables into this interface and the rest is done - // automatically using the reflect package. - potential := []interface{}{f1, f2, f3} - - v := reflect.ValueOf(h) - vt := v.Type() - for _, raw := range potential { - pt := reflect.ValueOf(raw).Type() - if vt.ConvertibleTo(pt) { - return v.Convert(pt).Interface() - } - } - - return nil -} - -// DecodeHookExec executes the given decode hook. This should be used -// since it'll naturally degrade to the older backwards compatible DecodeHookFunc -// that took reflect.Kind instead of reflect.Type. -func DecodeHookExec( - raw DecodeHookFunc, - from reflect.Value, to reflect.Value) (interface{}, error) { - - switch f := typedDecodeHook(raw).(type) { - case DecodeHookFuncType: - return f(from.Type(), to.Type(), from.Interface()) - case DecodeHookFuncKind: - return f(from.Kind(), to.Kind(), from.Interface()) - case DecodeHookFuncValue: - return f(from, to) - default: - return nil, errors.New("invalid decode hook signature") - } -} - -// ComposeDecodeHookFunc creates a single DecodeHookFunc that -// automatically composes multiple DecodeHookFuncs. -// -// The composed funcs are called in order, with the result of the -// previous transformation. -func ComposeDecodeHookFunc(fs ...DecodeHookFunc) DecodeHookFunc { - return func(f reflect.Value, t reflect.Value) (interface{}, error) { - var err error - data := f.Interface() - - newFrom := f - for _, f1 := range fs { - data, err = DecodeHookExec(f1, newFrom, t) - if err != nil { - return nil, err - } - newFrom = reflect.ValueOf(data) - } - - return data, nil - } -} - -// OrComposeDecodeHookFunc executes all input hook functions until one of them returns no error. In that case its value is returned. -// If all hooks return an error, OrComposeDecodeHookFunc returns an error concatenating all error messages. -func OrComposeDecodeHookFunc(ff ...DecodeHookFunc) DecodeHookFunc { - return func(a, b reflect.Value) (interface{}, error) { - var allErrs string - var out interface{} - var err error - - for _, f := range ff { - out, err = DecodeHookExec(f, a, b) - if err != nil { - allErrs += err.Error() + "\n" - continue - } - - return out, nil - } - - return nil, errors.New(allErrs) - } -} - -// StringToSliceHookFunc returns a DecodeHookFunc that converts -// string to []string by splitting on the given sep. -func StringToSliceHookFunc(sep string) DecodeHookFunc { - return func( - f reflect.Kind, - t reflect.Kind, - data interface{}) (interface{}, error) { - if f != reflect.String || t != reflect.Slice { - return data, nil - } - - raw := data.(string) - if raw == "" { - return []string{}, nil - } - - return strings.Split(raw, sep), nil - } -} - -// StringToTimeDurationHookFunc returns a DecodeHookFunc that converts -// strings to time.Duration. -func StringToTimeDurationHookFunc() DecodeHookFunc { - return func( - f reflect.Type, - t reflect.Type, - data interface{}) (interface{}, error) { - if f.Kind() != reflect.String { - return data, nil - } - if t != reflect.TypeOf(time.Duration(5)) { - return data, nil - } - - // Convert it by parsing - return time.ParseDuration(data.(string)) - } -} - -// StringToIPHookFunc returns a DecodeHookFunc that converts -// strings to net.IP -func StringToIPHookFunc() DecodeHookFunc { - return func( - f reflect.Type, - t reflect.Type, - data interface{}) (interface{}, error) { - if f.Kind() != reflect.String { - return data, nil - } - if t != reflect.TypeOf(net.IP{}) { - return data, nil - } - - // Convert it by parsing - ip := net.ParseIP(data.(string)) - if ip == nil { - return net.IP{}, fmt.Errorf("failed parsing ip %v", data) - } - - return ip, nil - } -} - -// StringToIPNetHookFunc returns a DecodeHookFunc that converts -// strings to net.IPNet -func StringToIPNetHookFunc() DecodeHookFunc { - return func( - f reflect.Type, - t reflect.Type, - data interface{}) (interface{}, error) { - if f.Kind() != reflect.String { - return data, nil - } - if t != reflect.TypeOf(net.IPNet{}) { - return data, nil - } - - // Convert it by parsing - _, net, err := net.ParseCIDR(data.(string)) - return net, err - } -} - -// StringToTimeHookFunc returns a DecodeHookFunc that converts -// strings to time.Time. -func StringToTimeHookFunc(layout string) DecodeHookFunc { - return func( - f reflect.Type, - t reflect.Type, - data interface{}) (interface{}, error) { - if f.Kind() != reflect.String { - return data, nil - } - if t != reflect.TypeOf(time.Time{}) { - return data, nil - } - - // Convert it by parsing - return time.Parse(layout, data.(string)) - } -} - -// WeaklyTypedHook is a DecodeHookFunc which adds support for weak typing to -// the decoder. -// -// Note that this is significantly different from the WeaklyTypedInput option -// of the DecoderConfig. -func WeaklyTypedHook( - f reflect.Kind, - t reflect.Kind, - data interface{}) (interface{}, error) { - dataVal := reflect.ValueOf(data) - switch t { - case reflect.String: - switch f { - case reflect.Bool: - if dataVal.Bool() { - return "1", nil - } - return "0", nil - case reflect.Float32: - return strconv.FormatFloat(dataVal.Float(), 'f', -1, 64), nil - case reflect.Int: - return strconv.FormatInt(dataVal.Int(), 10), nil - case reflect.Slice: - dataType := dataVal.Type() - elemKind := dataType.Elem().Kind() - if elemKind == reflect.Uint8 { - return string(dataVal.Interface().([]uint8)), nil - } - case reflect.Uint: - return strconv.FormatUint(dataVal.Uint(), 10), nil - } - } - - return data, nil -} - -func RecursiveStructToMapHookFunc() DecodeHookFunc { - return func(f reflect.Value, t reflect.Value) (interface{}, error) { - if f.Kind() != reflect.Struct { - return f.Interface(), nil - } - - var i interface{} = struct{}{} - if t.Type() != reflect.TypeOf(&i).Elem() { - return f.Interface(), nil - } - - m := make(map[string]interface{}) - t.Set(reflect.ValueOf(m)) - - return f.Interface(), nil - } -} - -// TextUnmarshallerHookFunc returns a DecodeHookFunc that applies -// strings to the UnmarshalText function, when the target type -// implements the encoding.TextUnmarshaler interface -func TextUnmarshallerHookFunc() DecodeHookFuncType { - return func( - f reflect.Type, - t reflect.Type, - data interface{}) (interface{}, error) { - if f.Kind() != reflect.String { - return data, nil - } - result := reflect.New(t).Interface() - unmarshaller, ok := result.(encoding.TextUnmarshaler) - if !ok { - return data, nil - } - if err := unmarshaller.UnmarshalText([]byte(data.(string))); err != nil { - return nil, err - } - return result, nil - } -} diff --git a/vendor/github.com/mitchellh/mapstructure/error.go b/vendor/github.com/mitchellh/mapstructure/error.go deleted file mode 100644 index 47a99e5af..000000000 --- a/vendor/github.com/mitchellh/mapstructure/error.go +++ /dev/null @@ -1,50 +0,0 @@ -package mapstructure - -import ( - "errors" - "fmt" - "sort" - "strings" -) - -// Error implements the error interface and can represents multiple -// errors that occur in the course of a single decode. -type Error struct { - Errors []string -} - -func (e *Error) Error() string { - points := make([]string, len(e.Errors)) - for i, err := range e.Errors { - points[i] = fmt.Sprintf("* %s", err) - } - - sort.Strings(points) - return fmt.Sprintf( - "%d error(s) decoding:\n\n%s", - len(e.Errors), strings.Join(points, "\n")) -} - -// WrappedErrors implements the errwrap.Wrapper interface to make this -// return value more useful with the errwrap and go-multierror libraries. -func (e *Error) WrappedErrors() []error { - if e == nil { - return nil - } - - result := make([]error, len(e.Errors)) - for i, e := range e.Errors { - result[i] = errors.New(e) - } - - return result -} - -func appendErrors(errors []string, err error) []string { - switch e := err.(type) { - case *Error: - return append(errors, e.Errors...) - default: - return append(errors, e.Error()) - } -} diff --git a/vendor/github.com/mitchellh/mapstructure/mapstructure.go b/vendor/github.com/mitchellh/mapstructure/mapstructure.go deleted file mode 100644 index 1efb22ac3..000000000 --- a/vendor/github.com/mitchellh/mapstructure/mapstructure.go +++ /dev/null @@ -1,1540 +0,0 @@ -// Package mapstructure exposes functionality to convert one arbitrary -// Go type into another, typically to convert a map[string]interface{} -// into a native Go structure. -// -// The Go structure can be arbitrarily complex, containing slices, -// other structs, etc. and the decoder will properly decode nested -// maps and so on into the proper structures in the native Go struct. -// See the examples to see what the decoder is capable of. -// -// The simplest function to start with is Decode. -// -// Field Tags -// -// When decoding to a struct, mapstructure will use the field name by -// default to perform the mapping. For example, if a struct has a field -// "Username" then mapstructure will look for a key in the source value -// of "username" (case insensitive). -// -// type User struct { -// Username string -// } -// -// You can change the behavior of mapstructure by using struct tags. -// The default struct tag that mapstructure looks for is "mapstructure" -// but you can customize it using DecoderConfig. -// -// Renaming Fields -// -// To rename the key that mapstructure looks for, use the "mapstructure" -// tag and set a value directly. For example, to change the "username" example -// above to "user": -// -// type User struct { -// Username string `mapstructure:"user"` -// } -// -// Embedded Structs and Squashing -// -// Embedded structs are treated as if they're another field with that name. -// By default, the two structs below are equivalent when decoding with -// mapstructure: -// -// type Person struct { -// Name string -// } -// -// type Friend struct { -// Person -// } -// -// type Friend struct { -// Person Person -// } -// -// This would require an input that looks like below: -// -// map[string]interface{}{ -// "person": map[string]interface{}{"name": "alice"}, -// } -// -// If your "person" value is NOT nested, then you can append ",squash" to -// your tag value and mapstructure will treat it as if the embedded struct -// were part of the struct directly. Example: -// -// type Friend struct { -// Person `mapstructure:",squash"` -// } -// -// Now the following input would be accepted: -// -// map[string]interface{}{ -// "name": "alice", -// } -// -// When decoding from a struct to a map, the squash tag squashes the struct -// fields into a single map. Using the example structs from above: -// -// Friend{Person: Person{Name: "alice"}} -// -// Will be decoded into a map: -// -// map[string]interface{}{ -// "name": "alice", -// } -// -// DecoderConfig has a field that changes the behavior of mapstructure -// to always squash embedded structs. -// -// Remainder Values -// -// If there are any unmapped keys in the source value, mapstructure by -// default will silently ignore them. You can error by setting ErrorUnused -// in DecoderConfig. If you're using Metadata you can also maintain a slice -// of the unused keys. -// -// You can also use the ",remain" suffix on your tag to collect all unused -// values in a map. The field with this tag MUST be a map type and should -// probably be a "map[string]interface{}" or "map[interface{}]interface{}". -// See example below: -// -// type Friend struct { -// Name string -// Other map[string]interface{} `mapstructure:",remain"` -// } -// -// Given the input below, Other would be populated with the other -// values that weren't used (everything but "name"): -// -// map[string]interface{}{ -// "name": "bob", -// "address": "123 Maple St.", -// } -// -// Omit Empty Values -// -// When decoding from a struct to any other value, you may use the -// ",omitempty" suffix on your tag to omit that value if it equates to -// the zero value. The zero value of all types is specified in the Go -// specification. -// -// For example, the zero type of a numeric type is zero ("0"). If the struct -// field value is zero and a numeric type, the field is empty, and it won't -// be encoded into the destination type. -// -// type Source struct { -// Age int `mapstructure:",omitempty"` -// } -// -// Unexported fields -// -// Since unexported (private) struct fields cannot be set outside the package -// where they are defined, the decoder will simply skip them. -// -// For this output type definition: -// -// type Exported struct { -// private string // this unexported field will be skipped -// Public string -// } -// -// Using this map as input: -// -// map[string]interface{}{ -// "private": "I will be ignored", -// "Public": "I made it through!", -// } -// -// The following struct will be decoded: -// -// type Exported struct { -// private: "" // field is left with an empty string (zero value) -// Public: "I made it through!" -// } -// -// Other Configuration -// -// mapstructure is highly configurable. See the DecoderConfig struct -// for other features and options that are supported. -package mapstructure - -import ( - "encoding/json" - "errors" - "fmt" - "reflect" - "sort" - "strconv" - "strings" -) - -// DecodeHookFunc is the callback function that can be used for -// data transformations. See "DecodeHook" in the DecoderConfig -// struct. -// -// The type must be one of DecodeHookFuncType, DecodeHookFuncKind, or -// DecodeHookFuncValue. -// Values are a superset of Types (Values can return types), and Types are a -// superset of Kinds (Types can return Kinds) and are generally a richer thing -// to use, but Kinds are simpler if you only need those. -// -// The reason DecodeHookFunc is multi-typed is for backwards compatibility: -// we started with Kinds and then realized Types were the better solution, -// but have a promise to not break backwards compat so we now support -// both. -type DecodeHookFunc interface{} - -// DecodeHookFuncType is a DecodeHookFunc which has complete information about -// the source and target types. -type DecodeHookFuncType func(reflect.Type, reflect.Type, interface{}) (interface{}, error) - -// DecodeHookFuncKind is a DecodeHookFunc which knows only the Kinds of the -// source and target types. -type DecodeHookFuncKind func(reflect.Kind, reflect.Kind, interface{}) (interface{}, error) - -// DecodeHookFuncValue is a DecodeHookFunc which has complete access to both the source and target -// values. -type DecodeHookFuncValue func(from reflect.Value, to reflect.Value) (interface{}, error) - -// DecoderConfig is the configuration that is used to create a new decoder -// and allows customization of various aspects of decoding. -type DecoderConfig struct { - // DecodeHook, if set, will be called before any decoding and any - // type conversion (if WeaklyTypedInput is on). This lets you modify - // the values before they're set down onto the resulting struct. The - // DecodeHook is called for every map and value in the input. This means - // that if a struct has embedded fields with squash tags the decode hook - // is called only once with all of the input data, not once for each - // embedded struct. - // - // If an error is returned, the entire decode will fail with that error. - DecodeHook DecodeHookFunc - - // If ErrorUnused is true, then it is an error for there to exist - // keys in the original map that were unused in the decoding process - // (extra keys). - ErrorUnused bool - - // If ErrorUnset is true, then it is an error for there to exist - // fields in the result that were not set in the decoding process - // (extra fields). This only applies to decoding to a struct. This - // will affect all nested structs as well. - ErrorUnset bool - - // ZeroFields, if set to true, will zero fields before writing them. - // For example, a map will be emptied before decoded values are put in - // it. If this is false, a map will be merged. - ZeroFields bool - - // If WeaklyTypedInput is true, the decoder will make the following - // "weak" conversions: - // - // - bools to string (true = "1", false = "0") - // - numbers to string (base 10) - // - bools to int/uint (true = 1, false = 0) - // - strings to int/uint (base implied by prefix) - // - int to bool (true if value != 0) - // - string to bool (accepts: 1, t, T, TRUE, true, True, 0, f, F, - // FALSE, false, False. Anything else is an error) - // - empty array = empty map and vice versa - // - negative numbers to overflowed uint values (base 10) - // - slice of maps to a merged map - // - single values are converted to slices if required. Each - // element is weakly decoded. For example: "4" can become []int{4} - // if the target type is an int slice. - // - WeaklyTypedInput bool - - // Squash will squash embedded structs. A squash tag may also be - // added to an individual struct field using a tag. For example: - // - // type Parent struct { - // Child `mapstructure:",squash"` - // } - Squash bool - - // Metadata is the struct that will contain extra metadata about - // the decoding. If this is nil, then no metadata will be tracked. - Metadata *Metadata - - // Result is a pointer to the struct that will contain the decoded - // value. - Result interface{} - - // The tag name that mapstructure reads for field names. This - // defaults to "mapstructure" - TagName string - - // IgnoreUntaggedFields ignores all struct fields without explicit - // TagName, comparable to `mapstructure:"-"` as default behaviour. - IgnoreUntaggedFields bool - - // MatchName is the function used to match the map key to the struct - // field name or tag. Defaults to `strings.EqualFold`. This can be used - // to implement case-sensitive tag values, support snake casing, etc. - MatchName func(mapKey, fieldName string) bool -} - -// A Decoder takes a raw interface value and turns it into structured -// data, keeping track of rich error information along the way in case -// anything goes wrong. Unlike the basic top-level Decode method, you can -// more finely control how the Decoder behaves using the DecoderConfig -// structure. The top-level Decode method is just a convenience that sets -// up the most basic Decoder. -type Decoder struct { - config *DecoderConfig -} - -// Metadata contains information about decoding a structure that -// is tedious or difficult to get otherwise. -type Metadata struct { - // Keys are the keys of the structure which were successfully decoded - Keys []string - - // Unused is a slice of keys that were found in the raw value but - // weren't decoded since there was no matching field in the result interface - Unused []string - - // Unset is a slice of field names that were found in the result interface - // but weren't set in the decoding process since there was no matching value - // in the input - Unset []string -} - -// Decode takes an input structure and uses reflection to translate it to -// the output structure. output must be a pointer to a map or struct. -func Decode(input interface{}, output interface{}) error { - config := &DecoderConfig{ - Metadata: nil, - Result: output, - } - - decoder, err := NewDecoder(config) - if err != nil { - return err - } - - return decoder.Decode(input) -} - -// WeakDecode is the same as Decode but is shorthand to enable -// WeaklyTypedInput. See DecoderConfig for more info. -func WeakDecode(input, output interface{}) error { - config := &DecoderConfig{ - Metadata: nil, - Result: output, - WeaklyTypedInput: true, - } - - decoder, err := NewDecoder(config) - if err != nil { - return err - } - - return decoder.Decode(input) -} - -// DecodeMetadata is the same as Decode, but is shorthand to -// enable metadata collection. See DecoderConfig for more info. -func DecodeMetadata(input interface{}, output interface{}, metadata *Metadata) error { - config := &DecoderConfig{ - Metadata: metadata, - Result: output, - } - - decoder, err := NewDecoder(config) - if err != nil { - return err - } - - return decoder.Decode(input) -} - -// WeakDecodeMetadata is the same as Decode, but is shorthand to -// enable both WeaklyTypedInput and metadata collection. See -// DecoderConfig for more info. -func WeakDecodeMetadata(input interface{}, output interface{}, metadata *Metadata) error { - config := &DecoderConfig{ - Metadata: metadata, - Result: output, - WeaklyTypedInput: true, - } - - decoder, err := NewDecoder(config) - if err != nil { - return err - } - - return decoder.Decode(input) -} - -// NewDecoder returns a new decoder for the given configuration. Once -// a decoder has been returned, the same configuration must not be used -// again. -func NewDecoder(config *DecoderConfig) (*Decoder, error) { - val := reflect.ValueOf(config.Result) - if val.Kind() != reflect.Ptr { - return nil, errors.New("result must be a pointer") - } - - val = val.Elem() - if !val.CanAddr() { - return nil, errors.New("result must be addressable (a pointer)") - } - - if config.Metadata != nil { - if config.Metadata.Keys == nil { - config.Metadata.Keys = make([]string, 0) - } - - if config.Metadata.Unused == nil { - config.Metadata.Unused = make([]string, 0) - } - - if config.Metadata.Unset == nil { - config.Metadata.Unset = make([]string, 0) - } - } - - if config.TagName == "" { - config.TagName = "mapstructure" - } - - if config.MatchName == nil { - config.MatchName = strings.EqualFold - } - - result := &Decoder{ - config: config, - } - - return result, nil -} - -// Decode decodes the given raw interface to the target pointer specified -// by the configuration. -func (d *Decoder) Decode(input interface{}) error { - return d.decode("", input, reflect.ValueOf(d.config.Result).Elem()) -} - -// Decodes an unknown data type into a specific reflection value. -func (d *Decoder) decode(name string, input interface{}, outVal reflect.Value) error { - var inputVal reflect.Value - if input != nil { - inputVal = reflect.ValueOf(input) - - // We need to check here if input is a typed nil. Typed nils won't - // match the "input == nil" below so we check that here. - if inputVal.Kind() == reflect.Ptr && inputVal.IsNil() { - input = nil - } - } - - if input == nil { - // If the data is nil, then we don't set anything, unless ZeroFields is set - // to true. - if d.config.ZeroFields { - outVal.Set(reflect.Zero(outVal.Type())) - - if d.config.Metadata != nil && name != "" { - d.config.Metadata.Keys = append(d.config.Metadata.Keys, name) - } - } - return nil - } - - if !inputVal.IsValid() { - // If the input value is invalid, then we just set the value - // to be the zero value. - outVal.Set(reflect.Zero(outVal.Type())) - if d.config.Metadata != nil && name != "" { - d.config.Metadata.Keys = append(d.config.Metadata.Keys, name) - } - return nil - } - - if d.config.DecodeHook != nil { - // We have a DecodeHook, so let's pre-process the input. - var err error - input, err = DecodeHookExec(d.config.DecodeHook, inputVal, outVal) - if err != nil { - return fmt.Errorf("error decoding '%s': %s", name, err) - } - } - - var err error - outputKind := getKind(outVal) - addMetaKey := true - switch outputKind { - case reflect.Bool: - err = d.decodeBool(name, input, outVal) - case reflect.Interface: - err = d.decodeBasic(name, input, outVal) - case reflect.String: - err = d.decodeString(name, input, outVal) - case reflect.Int: - err = d.decodeInt(name, input, outVal) - case reflect.Uint: - err = d.decodeUint(name, input, outVal) - case reflect.Float32: - err = d.decodeFloat(name, input, outVal) - case reflect.Struct: - err = d.decodeStruct(name, input, outVal) - case reflect.Map: - err = d.decodeMap(name, input, outVal) - case reflect.Ptr: - addMetaKey, err = d.decodePtr(name, input, outVal) - case reflect.Slice: - err = d.decodeSlice(name, input, outVal) - case reflect.Array: - err = d.decodeArray(name, input, outVal) - case reflect.Func: - err = d.decodeFunc(name, input, outVal) - default: - // If we reached this point then we weren't able to decode it - return fmt.Errorf("%s: unsupported type: %s", name, outputKind) - } - - // If we reached here, then we successfully decoded SOMETHING, so - // mark the key as used if we're tracking metainput. - if addMetaKey && d.config.Metadata != nil && name != "" { - d.config.Metadata.Keys = append(d.config.Metadata.Keys, name) - } - - return err -} - -// This decodes a basic type (bool, int, string, etc.) and sets the -// value to "data" of that type. -func (d *Decoder) decodeBasic(name string, data interface{}, val reflect.Value) error { - if val.IsValid() && val.Elem().IsValid() { - elem := val.Elem() - - // If we can't address this element, then its not writable. Instead, - // we make a copy of the value (which is a pointer and therefore - // writable), decode into that, and replace the whole value. - copied := false - if !elem.CanAddr() { - copied = true - - // Make *T - copy := reflect.New(elem.Type()) - - // *T = elem - copy.Elem().Set(elem) - - // Set elem so we decode into it - elem = copy - } - - // Decode. If we have an error then return. We also return right - // away if we're not a copy because that means we decoded directly. - if err := d.decode(name, data, elem); err != nil || !copied { - return err - } - - // If we're a copy, we need to set te final result - val.Set(elem.Elem()) - return nil - } - - dataVal := reflect.ValueOf(data) - - // If the input data is a pointer, and the assigned type is the dereference - // of that exact pointer, then indirect it so that we can assign it. - // Example: *string to string - if dataVal.Kind() == reflect.Ptr && dataVal.Type().Elem() == val.Type() { - dataVal = reflect.Indirect(dataVal) - } - - if !dataVal.IsValid() { - dataVal = reflect.Zero(val.Type()) - } - - dataValType := dataVal.Type() - if !dataValType.AssignableTo(val.Type()) { - return fmt.Errorf( - "'%s' expected type '%s', got '%s'", - name, val.Type(), dataValType) - } - - val.Set(dataVal) - return nil -} - -func (d *Decoder) decodeString(name string, data interface{}, val reflect.Value) error { - dataVal := reflect.Indirect(reflect.ValueOf(data)) - dataKind := getKind(dataVal) - - converted := true - switch { - case dataKind == reflect.String: - val.SetString(dataVal.String()) - case dataKind == reflect.Bool && d.config.WeaklyTypedInput: - if dataVal.Bool() { - val.SetString("1") - } else { - val.SetString("0") - } - case dataKind == reflect.Int && d.config.WeaklyTypedInput: - val.SetString(strconv.FormatInt(dataVal.Int(), 10)) - case dataKind == reflect.Uint && d.config.WeaklyTypedInput: - val.SetString(strconv.FormatUint(dataVal.Uint(), 10)) - case dataKind == reflect.Float32 && d.config.WeaklyTypedInput: - val.SetString(strconv.FormatFloat(dataVal.Float(), 'f', -1, 64)) - case dataKind == reflect.Slice && d.config.WeaklyTypedInput, - dataKind == reflect.Array && d.config.WeaklyTypedInput: - dataType := dataVal.Type() - elemKind := dataType.Elem().Kind() - switch elemKind { - case reflect.Uint8: - var uints []uint8 - if dataKind == reflect.Array { - uints = make([]uint8, dataVal.Len(), dataVal.Len()) - for i := range uints { - uints[i] = dataVal.Index(i).Interface().(uint8) - } - } else { - uints = dataVal.Interface().([]uint8) - } - val.SetString(string(uints)) - default: - converted = false - } - default: - converted = false - } - - if !converted { - return fmt.Errorf( - "'%s' expected type '%s', got unconvertible type '%s', value: '%v'", - name, val.Type(), dataVal.Type(), data) - } - - return nil -} - -func (d *Decoder) decodeInt(name string, data interface{}, val reflect.Value) error { - dataVal := reflect.Indirect(reflect.ValueOf(data)) - dataKind := getKind(dataVal) - dataType := dataVal.Type() - - switch { - case dataKind == reflect.Int: - val.SetInt(dataVal.Int()) - case dataKind == reflect.Uint: - val.SetInt(int64(dataVal.Uint())) - case dataKind == reflect.Float32: - val.SetInt(int64(dataVal.Float())) - case dataKind == reflect.Bool && d.config.WeaklyTypedInput: - if dataVal.Bool() { - val.SetInt(1) - } else { - val.SetInt(0) - } - case dataKind == reflect.String && d.config.WeaklyTypedInput: - str := dataVal.String() - if str == "" { - str = "0" - } - - i, err := strconv.ParseInt(str, 0, val.Type().Bits()) - if err == nil { - val.SetInt(i) - } else { - return fmt.Errorf("cannot parse '%s' as int: %s", name, err) - } - case dataType.PkgPath() == "encoding/json" && dataType.Name() == "Number": - jn := data.(json.Number) - i, err := jn.Int64() - if err != nil { - return fmt.Errorf( - "error decoding json.Number into %s: %s", name, err) - } - val.SetInt(i) - default: - return fmt.Errorf( - "'%s' expected type '%s', got unconvertible type '%s', value: '%v'", - name, val.Type(), dataVal.Type(), data) - } - - return nil -} - -func (d *Decoder) decodeUint(name string, data interface{}, val reflect.Value) error { - dataVal := reflect.Indirect(reflect.ValueOf(data)) - dataKind := getKind(dataVal) - dataType := dataVal.Type() - - switch { - case dataKind == reflect.Int: - i := dataVal.Int() - if i < 0 && !d.config.WeaklyTypedInput { - return fmt.Errorf("cannot parse '%s', %d overflows uint", - name, i) - } - val.SetUint(uint64(i)) - case dataKind == reflect.Uint: - val.SetUint(dataVal.Uint()) - case dataKind == reflect.Float32: - f := dataVal.Float() - if f < 0 && !d.config.WeaklyTypedInput { - return fmt.Errorf("cannot parse '%s', %f overflows uint", - name, f) - } - val.SetUint(uint64(f)) - case dataKind == reflect.Bool && d.config.WeaklyTypedInput: - if dataVal.Bool() { - val.SetUint(1) - } else { - val.SetUint(0) - } - case dataKind == reflect.String && d.config.WeaklyTypedInput: - str := dataVal.String() - if str == "" { - str = "0" - } - - i, err := strconv.ParseUint(str, 0, val.Type().Bits()) - if err == nil { - val.SetUint(i) - } else { - return fmt.Errorf("cannot parse '%s' as uint: %s", name, err) - } - case dataType.PkgPath() == "encoding/json" && dataType.Name() == "Number": - jn := data.(json.Number) - i, err := strconv.ParseUint(string(jn), 0, 64) - if err != nil { - return fmt.Errorf( - "error decoding json.Number into %s: %s", name, err) - } - val.SetUint(i) - default: - return fmt.Errorf( - "'%s' expected type '%s', got unconvertible type '%s', value: '%v'", - name, val.Type(), dataVal.Type(), data) - } - - return nil -} - -func (d *Decoder) decodeBool(name string, data interface{}, val reflect.Value) error { - dataVal := reflect.Indirect(reflect.ValueOf(data)) - dataKind := getKind(dataVal) - - switch { - case dataKind == reflect.Bool: - val.SetBool(dataVal.Bool()) - case dataKind == reflect.Int && d.config.WeaklyTypedInput: - val.SetBool(dataVal.Int() != 0) - case dataKind == reflect.Uint && d.config.WeaklyTypedInput: - val.SetBool(dataVal.Uint() != 0) - case dataKind == reflect.Float32 && d.config.WeaklyTypedInput: - val.SetBool(dataVal.Float() != 0) - case dataKind == reflect.String && d.config.WeaklyTypedInput: - b, err := strconv.ParseBool(dataVal.String()) - if err == nil { - val.SetBool(b) - } else if dataVal.String() == "" { - val.SetBool(false) - } else { - return fmt.Errorf("cannot parse '%s' as bool: %s", name, err) - } - default: - return fmt.Errorf( - "'%s' expected type '%s', got unconvertible type '%s', value: '%v'", - name, val.Type(), dataVal.Type(), data) - } - - return nil -} - -func (d *Decoder) decodeFloat(name string, data interface{}, val reflect.Value) error { - dataVal := reflect.Indirect(reflect.ValueOf(data)) - dataKind := getKind(dataVal) - dataType := dataVal.Type() - - switch { - case dataKind == reflect.Int: - val.SetFloat(float64(dataVal.Int())) - case dataKind == reflect.Uint: - val.SetFloat(float64(dataVal.Uint())) - case dataKind == reflect.Float32: - val.SetFloat(dataVal.Float()) - case dataKind == reflect.Bool && d.config.WeaklyTypedInput: - if dataVal.Bool() { - val.SetFloat(1) - } else { - val.SetFloat(0) - } - case dataKind == reflect.String && d.config.WeaklyTypedInput: - str := dataVal.String() - if str == "" { - str = "0" - } - - f, err := strconv.ParseFloat(str, val.Type().Bits()) - if err == nil { - val.SetFloat(f) - } else { - return fmt.Errorf("cannot parse '%s' as float: %s", name, err) - } - case dataType.PkgPath() == "encoding/json" && dataType.Name() == "Number": - jn := data.(json.Number) - i, err := jn.Float64() - if err != nil { - return fmt.Errorf( - "error decoding json.Number into %s: %s", name, err) - } - val.SetFloat(i) - default: - return fmt.Errorf( - "'%s' expected type '%s', got unconvertible type '%s', value: '%v'", - name, val.Type(), dataVal.Type(), data) - } - - return nil -} - -func (d *Decoder) decodeMap(name string, data interface{}, val reflect.Value) error { - valType := val.Type() - valKeyType := valType.Key() - valElemType := valType.Elem() - - // By default we overwrite keys in the current map - valMap := val - - // If the map is nil or we're purposely zeroing fields, make a new map - if valMap.IsNil() || d.config.ZeroFields { - // Make a new map to hold our result - mapType := reflect.MapOf(valKeyType, valElemType) - valMap = reflect.MakeMap(mapType) - } - - // Check input type and based on the input type jump to the proper func - dataVal := reflect.Indirect(reflect.ValueOf(data)) - switch dataVal.Kind() { - case reflect.Map: - return d.decodeMapFromMap(name, dataVal, val, valMap) - - case reflect.Struct: - return d.decodeMapFromStruct(name, dataVal, val, valMap) - - case reflect.Array, reflect.Slice: - if d.config.WeaklyTypedInput { - return d.decodeMapFromSlice(name, dataVal, val, valMap) - } - - fallthrough - - default: - return fmt.Errorf("'%s' expected a map, got '%s'", name, dataVal.Kind()) - } -} - -func (d *Decoder) decodeMapFromSlice(name string, dataVal reflect.Value, val reflect.Value, valMap reflect.Value) error { - // Special case for BC reasons (covered by tests) - if dataVal.Len() == 0 { - val.Set(valMap) - return nil - } - - for i := 0; i < dataVal.Len(); i++ { - err := d.decode( - name+"["+strconv.Itoa(i)+"]", - dataVal.Index(i).Interface(), val) - if err != nil { - return err - } - } - - return nil -} - -func (d *Decoder) decodeMapFromMap(name string, dataVal reflect.Value, val reflect.Value, valMap reflect.Value) error { - valType := val.Type() - valKeyType := valType.Key() - valElemType := valType.Elem() - - // Accumulate errors - errors := make([]string, 0) - - // If the input data is empty, then we just match what the input data is. - if dataVal.Len() == 0 { - if dataVal.IsNil() { - if !val.IsNil() { - val.Set(dataVal) - } - } else { - // Set to empty allocated value - val.Set(valMap) - } - - return nil - } - - for _, k := range dataVal.MapKeys() { - fieldName := name + "[" + k.String() + "]" - - // First decode the key into the proper type - currentKey := reflect.Indirect(reflect.New(valKeyType)) - if err := d.decode(fieldName, k.Interface(), currentKey); err != nil { - errors = appendErrors(errors, err) - continue - } - - // Next decode the data into the proper type - v := dataVal.MapIndex(k).Interface() - currentVal := reflect.Indirect(reflect.New(valElemType)) - if err := d.decode(fieldName, v, currentVal); err != nil { - errors = appendErrors(errors, err) - continue - } - - valMap.SetMapIndex(currentKey, currentVal) - } - - // Set the built up map to the value - val.Set(valMap) - - // If we had errors, return those - if len(errors) > 0 { - return &Error{errors} - } - - return nil -} - -func (d *Decoder) decodeMapFromStruct(name string, dataVal reflect.Value, val reflect.Value, valMap reflect.Value) error { - typ := dataVal.Type() - for i := 0; i < typ.NumField(); i++ { - // Get the StructField first since this is a cheap operation. If the - // field is unexported, then ignore it. - f := typ.Field(i) - if f.PkgPath != "" { - continue - } - - // Next get the actual value of this field and verify it is assignable - // to the map value. - v := dataVal.Field(i) - if !v.Type().AssignableTo(valMap.Type().Elem()) { - return fmt.Errorf("cannot assign type '%s' to map value field of type '%s'", v.Type(), valMap.Type().Elem()) - } - - tagValue := f.Tag.Get(d.config.TagName) - keyName := f.Name - - if tagValue == "" && d.config.IgnoreUntaggedFields { - continue - } - - // If Squash is set in the config, we squash the field down. - squash := d.config.Squash && v.Kind() == reflect.Struct && f.Anonymous - - v = dereferencePtrToStructIfNeeded(v, d.config.TagName) - - // Determine the name of the key in the map - if index := strings.Index(tagValue, ","); index != -1 { - if tagValue[:index] == "-" { - continue - } - // If "omitempty" is specified in the tag, it ignores empty values. - if strings.Index(tagValue[index+1:], "omitempty") != -1 && isEmptyValue(v) { - continue - } - - // If "squash" is specified in the tag, we squash the field down. - squash = squash || strings.Index(tagValue[index+1:], "squash") != -1 - if squash { - // When squashing, the embedded type can be a pointer to a struct. - if v.Kind() == reflect.Ptr && v.Elem().Kind() == reflect.Struct { - v = v.Elem() - } - - // The final type must be a struct - if v.Kind() != reflect.Struct { - return fmt.Errorf("cannot squash non-struct type '%s'", v.Type()) - } - } - if keyNameTagValue := tagValue[:index]; keyNameTagValue != "" { - keyName = keyNameTagValue - } - } else if len(tagValue) > 0 { - if tagValue == "-" { - continue - } - keyName = tagValue - } - - switch v.Kind() { - // this is an embedded struct, so handle it differently - case reflect.Struct: - x := reflect.New(v.Type()) - x.Elem().Set(v) - - vType := valMap.Type() - vKeyType := vType.Key() - vElemType := vType.Elem() - mType := reflect.MapOf(vKeyType, vElemType) - vMap := reflect.MakeMap(mType) - - // Creating a pointer to a map so that other methods can completely - // overwrite the map if need be (looking at you decodeMapFromMap). The - // indirection allows the underlying map to be settable (CanSet() == true) - // where as reflect.MakeMap returns an unsettable map. - addrVal := reflect.New(vMap.Type()) - reflect.Indirect(addrVal).Set(vMap) - - err := d.decode(keyName, x.Interface(), reflect.Indirect(addrVal)) - if err != nil { - return err - } - - // the underlying map may have been completely overwritten so pull - // it indirectly out of the enclosing value. - vMap = reflect.Indirect(addrVal) - - if squash { - for _, k := range vMap.MapKeys() { - valMap.SetMapIndex(k, vMap.MapIndex(k)) - } - } else { - valMap.SetMapIndex(reflect.ValueOf(keyName), vMap) - } - - default: - valMap.SetMapIndex(reflect.ValueOf(keyName), v) - } - } - - if val.CanAddr() { - val.Set(valMap) - } - - return nil -} - -func (d *Decoder) decodePtr(name string, data interface{}, val reflect.Value) (bool, error) { - // If the input data is nil, then we want to just set the output - // pointer to be nil as well. - isNil := data == nil - if !isNil { - switch v := reflect.Indirect(reflect.ValueOf(data)); v.Kind() { - case reflect.Chan, - reflect.Func, - reflect.Interface, - reflect.Map, - reflect.Ptr, - reflect.Slice: - isNil = v.IsNil() - } - } - if isNil { - if !val.IsNil() && val.CanSet() { - nilValue := reflect.New(val.Type()).Elem() - val.Set(nilValue) - } - - return true, nil - } - - // Create an element of the concrete (non pointer) type and decode - // into that. Then set the value of the pointer to this type. - valType := val.Type() - valElemType := valType.Elem() - if val.CanSet() { - realVal := val - if realVal.IsNil() || d.config.ZeroFields { - realVal = reflect.New(valElemType) - } - - if err := d.decode(name, data, reflect.Indirect(realVal)); err != nil { - return false, err - } - - val.Set(realVal) - } else { - if err := d.decode(name, data, reflect.Indirect(val)); err != nil { - return false, err - } - } - return false, nil -} - -func (d *Decoder) decodeFunc(name string, data interface{}, val reflect.Value) error { - // Create an element of the concrete (non pointer) type and decode - // into that. Then set the value of the pointer to this type. - dataVal := reflect.Indirect(reflect.ValueOf(data)) - if val.Type() != dataVal.Type() { - return fmt.Errorf( - "'%s' expected type '%s', got unconvertible type '%s', value: '%v'", - name, val.Type(), dataVal.Type(), data) - } - val.Set(dataVal) - return nil -} - -func (d *Decoder) decodeSlice(name string, data interface{}, val reflect.Value) error { - dataVal := reflect.Indirect(reflect.ValueOf(data)) - dataValKind := dataVal.Kind() - valType := val.Type() - valElemType := valType.Elem() - sliceType := reflect.SliceOf(valElemType) - - // If we have a non array/slice type then we first attempt to convert. - if dataValKind != reflect.Array && dataValKind != reflect.Slice { - if d.config.WeaklyTypedInput { - switch { - // Slice and array we use the normal logic - case dataValKind == reflect.Slice, dataValKind == reflect.Array: - break - - // Empty maps turn into empty slices - case dataValKind == reflect.Map: - if dataVal.Len() == 0 { - val.Set(reflect.MakeSlice(sliceType, 0, 0)) - return nil - } - // Create slice of maps of other sizes - return d.decodeSlice(name, []interface{}{data}, val) - - case dataValKind == reflect.String && valElemType.Kind() == reflect.Uint8: - return d.decodeSlice(name, []byte(dataVal.String()), val) - - // All other types we try to convert to the slice type - // and "lift" it into it. i.e. a string becomes a string slice. - default: - // Just re-try this function with data as a slice. - return d.decodeSlice(name, []interface{}{data}, val) - } - } - - return fmt.Errorf( - "'%s': source data must be an array or slice, got %s", name, dataValKind) - } - - // If the input value is nil, then don't allocate since empty != nil - if dataValKind != reflect.Array && dataVal.IsNil() { - return nil - } - - valSlice := val - if valSlice.IsNil() || d.config.ZeroFields { - // Make a new slice to hold our result, same size as the original data. - valSlice = reflect.MakeSlice(sliceType, dataVal.Len(), dataVal.Len()) - } - - // Accumulate any errors - errors := make([]string, 0) - - for i := 0; i < dataVal.Len(); i++ { - currentData := dataVal.Index(i).Interface() - for valSlice.Len() <= i { - valSlice = reflect.Append(valSlice, reflect.Zero(valElemType)) - } - currentField := valSlice.Index(i) - - fieldName := name + "[" + strconv.Itoa(i) + "]" - if err := d.decode(fieldName, currentData, currentField); err != nil { - errors = appendErrors(errors, err) - } - } - - // Finally, set the value to the slice we built up - val.Set(valSlice) - - // If there were errors, we return those - if len(errors) > 0 { - return &Error{errors} - } - - return nil -} - -func (d *Decoder) decodeArray(name string, data interface{}, val reflect.Value) error { - dataVal := reflect.Indirect(reflect.ValueOf(data)) - dataValKind := dataVal.Kind() - valType := val.Type() - valElemType := valType.Elem() - arrayType := reflect.ArrayOf(valType.Len(), valElemType) - - valArray := val - - if valArray.Interface() == reflect.Zero(valArray.Type()).Interface() || d.config.ZeroFields { - // Check input type - if dataValKind != reflect.Array && dataValKind != reflect.Slice { - if d.config.WeaklyTypedInput { - switch { - // Empty maps turn into empty arrays - case dataValKind == reflect.Map: - if dataVal.Len() == 0 { - val.Set(reflect.Zero(arrayType)) - return nil - } - - // All other types we try to convert to the array type - // and "lift" it into it. i.e. a string becomes a string array. - default: - // Just re-try this function with data as a slice. - return d.decodeArray(name, []interface{}{data}, val) - } - } - - return fmt.Errorf( - "'%s': source data must be an array or slice, got %s", name, dataValKind) - - } - if dataVal.Len() > arrayType.Len() { - return fmt.Errorf( - "'%s': expected source data to have length less or equal to %d, got %d", name, arrayType.Len(), dataVal.Len()) - - } - - // Make a new array to hold our result, same size as the original data. - valArray = reflect.New(arrayType).Elem() - } - - // Accumulate any errors - errors := make([]string, 0) - - for i := 0; i < dataVal.Len(); i++ { - currentData := dataVal.Index(i).Interface() - currentField := valArray.Index(i) - - fieldName := name + "[" + strconv.Itoa(i) + "]" - if err := d.decode(fieldName, currentData, currentField); err != nil { - errors = appendErrors(errors, err) - } - } - - // Finally, set the value to the array we built up - val.Set(valArray) - - // If there were errors, we return those - if len(errors) > 0 { - return &Error{errors} - } - - return nil -} - -func (d *Decoder) decodeStruct(name string, data interface{}, val reflect.Value) error { - dataVal := reflect.Indirect(reflect.ValueOf(data)) - - // If the type of the value to write to and the data match directly, - // then we just set it directly instead of recursing into the structure. - if dataVal.Type() == val.Type() { - val.Set(dataVal) - return nil - } - - dataValKind := dataVal.Kind() - switch dataValKind { - case reflect.Map: - return d.decodeStructFromMap(name, dataVal, val) - - case reflect.Struct: - // Not the most efficient way to do this but we can optimize later if - // we want to. To convert from struct to struct we go to map first - // as an intermediary. - - // Make a new map to hold our result - mapType := reflect.TypeOf((map[string]interface{})(nil)) - mval := reflect.MakeMap(mapType) - - // Creating a pointer to a map so that other methods can completely - // overwrite the map if need be (looking at you decodeMapFromMap). The - // indirection allows the underlying map to be settable (CanSet() == true) - // where as reflect.MakeMap returns an unsettable map. - addrVal := reflect.New(mval.Type()) - - reflect.Indirect(addrVal).Set(mval) - if err := d.decodeMapFromStruct(name, dataVal, reflect.Indirect(addrVal), mval); err != nil { - return err - } - - result := d.decodeStructFromMap(name, reflect.Indirect(addrVal), val) - return result - - default: - return fmt.Errorf("'%s' expected a map, got '%s'", name, dataVal.Kind()) - } -} - -func (d *Decoder) decodeStructFromMap(name string, dataVal, val reflect.Value) error { - dataValType := dataVal.Type() - if kind := dataValType.Key().Kind(); kind != reflect.String && kind != reflect.Interface { - return fmt.Errorf( - "'%s' needs a map with string keys, has '%s' keys", - name, dataValType.Key().Kind()) - } - - dataValKeys := make(map[reflect.Value]struct{}) - dataValKeysUnused := make(map[interface{}]struct{}) - for _, dataValKey := range dataVal.MapKeys() { - dataValKeys[dataValKey] = struct{}{} - dataValKeysUnused[dataValKey.Interface()] = struct{}{} - } - - targetValKeysUnused := make(map[interface{}]struct{}) - errors := make([]string, 0) - - // This slice will keep track of all the structs we'll be decoding. - // There can be more than one struct if there are embedded structs - // that are squashed. - structs := make([]reflect.Value, 1, 5) - structs[0] = val - - // Compile the list of all the fields that we're going to be decoding - // from all the structs. - type field struct { - field reflect.StructField - val reflect.Value - } - - // remainField is set to a valid field set with the "remain" tag if - // we are keeping track of remaining values. - var remainField *field - - fields := []field{} - for len(structs) > 0 { - structVal := structs[0] - structs = structs[1:] - - structType := structVal.Type() - - for i := 0; i < structType.NumField(); i++ { - fieldType := structType.Field(i) - fieldVal := structVal.Field(i) - if fieldVal.Kind() == reflect.Ptr && fieldVal.Elem().Kind() == reflect.Struct { - // Handle embedded struct pointers as embedded structs. - fieldVal = fieldVal.Elem() - } - - // If "squash" is specified in the tag, we squash the field down. - squash := d.config.Squash && fieldVal.Kind() == reflect.Struct && fieldType.Anonymous - remain := false - - // We always parse the tags cause we're looking for other tags too - tagParts := strings.Split(fieldType.Tag.Get(d.config.TagName), ",") - for _, tag := range tagParts[1:] { - if tag == "squash" { - squash = true - break - } - - if tag == "remain" { - remain = true - break - } - } - - if squash { - if fieldVal.Kind() != reflect.Struct { - errors = appendErrors(errors, - fmt.Errorf("%s: unsupported type for squash: %s", fieldType.Name, fieldVal.Kind())) - } else { - structs = append(structs, fieldVal) - } - continue - } - - // Build our field - if remain { - remainField = &field{fieldType, fieldVal} - } else { - // Normal struct field, store it away - fields = append(fields, field{fieldType, fieldVal}) - } - } - } - - // for fieldType, field := range fields { - for _, f := range fields { - field, fieldValue := f.field, f.val - fieldName := field.Name - - tagValue := field.Tag.Get(d.config.TagName) - tagValue = strings.SplitN(tagValue, ",", 2)[0] - if tagValue != "" { - fieldName = tagValue - } - - rawMapKey := reflect.ValueOf(fieldName) - rawMapVal := dataVal.MapIndex(rawMapKey) - if !rawMapVal.IsValid() { - // Do a slower search by iterating over each key and - // doing case-insensitive search. - for dataValKey := range dataValKeys { - mK, ok := dataValKey.Interface().(string) - if !ok { - // Not a string key - continue - } - - if d.config.MatchName(mK, fieldName) { - rawMapKey = dataValKey - rawMapVal = dataVal.MapIndex(dataValKey) - break - } - } - - if !rawMapVal.IsValid() { - // There was no matching key in the map for the value in - // the struct. Remember it for potential errors and metadata. - targetValKeysUnused[fieldName] = struct{}{} - continue - } - } - - if !fieldValue.IsValid() { - // This should never happen - panic("field is not valid") - } - - // If we can't set the field, then it is unexported or something, - // and we just continue onwards. - if !fieldValue.CanSet() { - continue - } - - // Delete the key we're using from the unused map so we stop tracking - delete(dataValKeysUnused, rawMapKey.Interface()) - - // If the name is empty string, then we're at the root, and we - // don't dot-join the fields. - if name != "" { - fieldName = name + "." + fieldName - } - - if err := d.decode(fieldName, rawMapVal.Interface(), fieldValue); err != nil { - errors = appendErrors(errors, err) - } - } - - // If we have a "remain"-tagged field and we have unused keys then - // we put the unused keys directly into the remain field. - if remainField != nil && len(dataValKeysUnused) > 0 { - // Build a map of only the unused values - remain := map[interface{}]interface{}{} - for key := range dataValKeysUnused { - remain[key] = dataVal.MapIndex(reflect.ValueOf(key)).Interface() - } - - // Decode it as-if we were just decoding this map onto our map. - if err := d.decodeMap(name, remain, remainField.val); err != nil { - errors = appendErrors(errors, err) - } - - // Set the map to nil so we have none so that the next check will - // not error (ErrorUnused) - dataValKeysUnused = nil - } - - if d.config.ErrorUnused && len(dataValKeysUnused) > 0 { - keys := make([]string, 0, len(dataValKeysUnused)) - for rawKey := range dataValKeysUnused { - keys = append(keys, rawKey.(string)) - } - sort.Strings(keys) - - err := fmt.Errorf("'%s' has invalid keys: %s", name, strings.Join(keys, ", ")) - errors = appendErrors(errors, err) - } - - if d.config.ErrorUnset && len(targetValKeysUnused) > 0 { - keys := make([]string, 0, len(targetValKeysUnused)) - for rawKey := range targetValKeysUnused { - keys = append(keys, rawKey.(string)) - } - sort.Strings(keys) - - err := fmt.Errorf("'%s' has unset fields: %s", name, strings.Join(keys, ", ")) - errors = appendErrors(errors, err) - } - - if len(errors) > 0 { - return &Error{errors} - } - - // Add the unused keys to the list of unused keys if we're tracking metadata - if d.config.Metadata != nil { - for rawKey := range dataValKeysUnused { - key := rawKey.(string) - if name != "" { - key = name + "." + key - } - - d.config.Metadata.Unused = append(d.config.Metadata.Unused, key) - } - for rawKey := range targetValKeysUnused { - key := rawKey.(string) - if name != "" { - key = name + "." + key - } - - d.config.Metadata.Unset = append(d.config.Metadata.Unset, key) - } - } - - return nil -} - -func isEmptyValue(v reflect.Value) bool { - switch getKind(v) { - case reflect.Array, reflect.Map, reflect.Slice, reflect.String: - return v.Len() == 0 - case reflect.Bool: - return !v.Bool() - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: - return v.Int() == 0 - case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: - return v.Uint() == 0 - case reflect.Float32, reflect.Float64: - return v.Float() == 0 - case reflect.Interface, reflect.Ptr: - return v.IsNil() - } - return false -} - -func getKind(val reflect.Value) reflect.Kind { - kind := val.Kind() - - switch { - case kind >= reflect.Int && kind <= reflect.Int64: - return reflect.Int - case kind >= reflect.Uint && kind <= reflect.Uint64: - return reflect.Uint - case kind >= reflect.Float32 && kind <= reflect.Float64: - return reflect.Float32 - default: - return kind - } -} - -func isStructTypeConvertibleToMap(typ reflect.Type, checkMapstructureTags bool, tagName string) bool { - for i := 0; i < typ.NumField(); i++ { - f := typ.Field(i) - if f.PkgPath == "" && !checkMapstructureTags { // check for unexported fields - return true - } - if checkMapstructureTags && f.Tag.Get(tagName) != "" { // check for mapstructure tags inside - return true - } - } - return false -} - -func dereferencePtrToStructIfNeeded(v reflect.Value, tagName string) reflect.Value { - if v.Kind() != reflect.Ptr || v.Elem().Kind() != reflect.Struct { - return v - } - deref := v.Elem() - derefT := deref.Type() - if isStructTypeConvertibleToMap(derefT, true, tagName) { - return deref - } - return v -} diff --git a/vendor/github.com/moricho/tparallel/.gitignore b/vendor/github.com/moricho/tparallel/.gitignore deleted file mode 100644 index 71342280e..000000000 --- a/vendor/github.com/moricho/tparallel/.gitignore +++ /dev/null @@ -1,3 +0,0 @@ -/tparallel -.envrc -/dist diff --git a/vendor/github.com/moricho/tparallel/.goreleaser.yaml b/vendor/github.com/moricho/tparallel/.goreleaser.yaml deleted file mode 100644 index 4a04fe25b..000000000 --- a/vendor/github.com/moricho/tparallel/.goreleaser.yaml +++ /dev/null @@ -1,52 +0,0 @@ -project_name: tparallel -before: - hooks: - - go mod tidy -builds: - - main: ./cmd/tparallel - binary: tparallel - ldflags: - - -s -w - - -X main.Version={{.Version}} - - -X main.Revision={{.ShortCommit}} - env: - - CGO_ENABLED=0 - goos: - - linux - - windows - - darwin - -archives: - - format: tar.gz - name_template: >- - {{ .ProjectName }}_ - {{- title .Os }}_ - {{- if eq .Arch "amd64" }}x86_64 - {{- else if eq .Arch "386" }}i386 - {{- else }}{{ .Arch }}{{ end }} - {{- if .Arm }}v{{ .Arm }}{{ end }} - format_overrides: - - goos: windows - format: zip -checksum: - name_template: 'checksums.txt' -snapshot: - name_template: "{{ incpatch .Version }}-next" -changelog: - sort: asc - filters: - exclude: - - '^docs:' - - '^test:' -release: - prerelease: auto -brews: - - tap: - owner: moricho - name: homebrew-tparallel - homepage: https://github.com/moricho/tparallel - description: tparallel detects inappropriate usage of t.Parallel() method in your Go test codes - install: | - bin.install "tparallel" - test: | - system "#{bin}/goreleaser -v" diff --git a/vendor/github.com/moricho/tparallel/LICENSE b/vendor/github.com/moricho/tparallel/LICENSE deleted file mode 100644 index 4f029982f..000000000 --- a/vendor/github.com/moricho/tparallel/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2020 moricho - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/moricho/tparallel/Makefile b/vendor/github.com/moricho/tparallel/Makefile deleted file mode 100644 index fb3588069..000000000 --- a/vendor/github.com/moricho/tparallel/Makefile +++ /dev/null @@ -1,13 +0,0 @@ -all: build - -.PHONY: build -build: - go build -o tparallel ./cmd/tparallel - -.PHONY: build_race -build_race: - go build -race -o tparallel ./cmd/tparallel - -.PHONY: test -test: build_race - go test -v ./... diff --git a/vendor/github.com/moricho/tparallel/README.md b/vendor/github.com/moricho/tparallel/README.md deleted file mode 100644 index 65ed46c42..000000000 --- a/vendor/github.com/moricho/tparallel/README.md +++ /dev/null @@ -1,112 +0,0 @@ -# tparallel - -[![tparallel](https://github.com/moricho/tparallel/workflows/tparallel/badge.svg?branch=master)](https://github.com/moricho/tparallel/actions) -[![Go Report Card](https://goreportcard.com/badge/github.com/moricho/tparallel)](https://goreportcard.com/report/github.com/moricho/tparallel) -[![MIT License](http://img.shields.io/badge/license-MIT-blue.svg?style=flat)](LICENSE) - -`tparallel` finds inappropriate usage of `t.Parallel()` method in your Go test codes. -It detects the following: - -- `t.Parallel()` is called in either a top-level test function or a sub-test function only -- Although `t.Parallel()` is called in the sub-test function, it is post-processed by `defer` instead of `t.Cleanup()` - -This tool was inspired by this blog: [Go 言語でのテストの並列化 〜t.Parallel()メソッドを理解する〜](https://engineering.mercari.com/blog/entry/how_to_use_t_parallel/) - -## Installation - -### From GitHub Releases - -Please see [GitHub Releases](https://github.com/moricho/tparallel/releases). -Available binaries are: - -- macOS -- Linux -- Windows - -### macOS - -```sh -$ brew tap moricho/tparallel -$ brew install tparallel -``` - -### go get - -```sh -$ go get -u github.com/moricho/tparallel/cmd/tparallel -``` - -## Usage - -### golangci-lint - -[golangci-lint](https://github.com/golangci/golangci-lint) now supports `tparallel`, so you can enable this linter and use in it. - -### shell - -```sh -$ go vet -vettool=`which tparallel` -``` - -## Example - -```go -package sample - -import ( - "testing" -) - -func Test_Table1(t *testing.T) { - teardown := setup("Test_Table1") - defer teardown() - - tests := []struct { - name string - }{ - { - name: "Table1_Sub1", - }, - { - name: "Table1_Sub2", - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - call(tt.name) - }) - } -} - -func Test_Table2(t *testing.T) { - teardown := setup("Test_Table2") - t.Cleanup(teardown) - t.Parallel() - - tests := []struct { - name string - }{ - { - name: "Table2_Sub1", - }, - { - name: "Table2_Sub2", - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - call(tt.name) - }) - } -} -``` - -```console -# github.com/moricho/tparallel/testdata/src/sample -testdata/src/sample/table_test.go:7:6: Test_Table1 should use t.Cleanup -testdata/src/sample/table_test.go:7:6: Test_Table1 should call t.Parallel on the top level as well as its subtests -testdata/src/sample/table_test.go:30:6: Test_Table2's subtests should call t.Parallel -``` diff --git a/vendor/github.com/moricho/tparallel/pkg/ssafunc/ssafunc.go b/vendor/github.com/moricho/tparallel/pkg/ssafunc/ssafunc.go deleted file mode 100644 index 5a8e637bd..000000000 --- a/vendor/github.com/moricho/tparallel/pkg/ssafunc/ssafunc.go +++ /dev/null @@ -1,34 +0,0 @@ -package ssafunc - -import ( - "go/types" - - "github.com/gostaticanalysis/analysisutil" - "github.com/moricho/tparallel/pkg/ssainstr" - "golang.org/x/tools/go/ssa" -) - -// IsDeferCalled returns whether the given ssa.Function calls `defer` -func IsDeferCalled(f *ssa.Function) bool { - for _, block := range f.Blocks { - for _, instr := range block.Instrs { - switch instr.(type) { - case *ssa.Defer: - return true - } - } - } - return false -} - -// IsCalled returns whether the given ssa.Function calls `fn` func -func IsCalled(f *ssa.Function, fn *types.Func) bool { - block := f.Blocks[0] - for _, instr := range block.Instrs { - called := analysisutil.Called(instr, nil, fn) - if _, ok := ssainstr.LookupCalled(instr, fn); ok || called { - return true - } - } - return false -} diff --git a/vendor/github.com/moricho/tparallel/pkg/ssainstr/ssainstr.go b/vendor/github.com/moricho/tparallel/pkg/ssainstr/ssainstr.go deleted file mode 100644 index 374553f5e..000000000 --- a/vendor/github.com/moricho/tparallel/pkg/ssainstr/ssainstr.go +++ /dev/null @@ -1,63 +0,0 @@ -package ssainstr - -import ( - "go/types" - - "github.com/gostaticanalysis/analysisutil" - "golang.org/x/tools/go/ssa" -) - -// LookupCalled looks up ssa.Instruction that call the `fn` func in the given instr -func LookupCalled(instr ssa.Instruction, fn *types.Func) ([]ssa.Instruction, bool) { - instrs := []ssa.Instruction{} - - call, ok := instr.(ssa.CallInstruction) - if !ok { - return instrs, false - } - - ssaCall := call.Value() - if ssaCall == nil { - return instrs, false - } - common := ssaCall.Common() - if common == nil { - return instrs, false - } - val := common.Value - - called := false - switch fnval := val.(type) { - case *ssa.Function: - for _, block := range fnval.Blocks { - for _, instr := range block.Instrs { - if analysisutil.Called(instr, nil, fn) { - called = true - instrs = append(instrs, instr) - } - } - } - } - - return instrs, called -} - -// HasArgs returns whether the given ssa.Instruction has `typ` type args -func HasArgs(instr ssa.Instruction, typ types.Type) bool { - call, ok := instr.(ssa.CallInstruction) - if !ok { - return false - } - - ssaCall := call.Value() - if ssaCall == nil { - return false - } - - for _, arg := range ssaCall.Call.Args { - if types.Identical(arg.Type(), typ) { - return true - } - } - return false -} diff --git a/vendor/github.com/moricho/tparallel/testmap.go b/vendor/github.com/moricho/tparallel/testmap.go deleted file mode 100644 index fa9bed708..000000000 --- a/vendor/github.com/moricho/tparallel/testmap.go +++ /dev/null @@ -1,63 +0,0 @@ -package tparallel - -import ( - "go/types" - "strings" - - "github.com/gostaticanalysis/analysisutil" - "golang.org/x/tools/go/analysis/passes/buildssa" - "golang.org/x/tools/go/ssa" - - "github.com/moricho/tparallel/pkg/ssainstr" -) - -// getTestMap gets a set of a top-level test and its sub-tests -func getTestMap(ssaanalyzer *buildssa.SSA, testTyp types.Type) map[*ssa.Function][]*ssa.Function { - testMap := map[*ssa.Function][]*ssa.Function{} - - trun := analysisutil.MethodOf(testTyp, "Run") - for _, f := range ssaanalyzer.SrcFuncs { - if !strings.HasPrefix(f.Name(), "Test") || !(f.Parent() == (*ssa.Function)(nil)) { - continue - } - testMap[f] = []*ssa.Function{} - for _, block := range f.Blocks { - for _, instr := range block.Instrs { - called := analysisutil.Called(instr, nil, trun) - - if !called && ssainstr.HasArgs(instr, types.NewPointer(testTyp)) { - if instrs, ok := ssainstr.LookupCalled(instr, trun); ok { - for _, v := range instrs { - testMap[f] = appendTestMap(testMap[f], v) - } - } - } else if called { - testMap[f] = appendTestMap(testMap[f], instr) - } - } - } - } - - return testMap -} - -// appendTestMap converts ssa.Instruction to ssa.Function and append it to a given sub-test slice -func appendTestMap(subtests []*ssa.Function, instr ssa.Instruction) []*ssa.Function { - call, ok := instr.(ssa.CallInstruction) - if !ok { - return subtests - } - - ssaCall := call.Value() - for _, arg := range ssaCall.Call.Args { - switch arg := arg.(type) { - case *ssa.Function: - subtests = append(subtests, arg) - case *ssa.MakeClosure: - fn, _ := arg.Fn.(*ssa.Function) - subtests = append(subtests, fn) - } - } - - return subtests -} diff --git a/vendor/github.com/moricho/tparallel/tparallel.go b/vendor/github.com/moricho/tparallel/tparallel.go deleted file mode 100644 index 3139e0425..000000000 --- a/vendor/github.com/moricho/tparallel/tparallel.go +++ /dev/null @@ -1,72 +0,0 @@ -package tparallel - -import ( - "go/types" - - "github.com/gostaticanalysis/analysisutil" - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/buildssa" - - "github.com/moricho/tparallel/pkg/ssafunc" -) - -const doc = "tparallel detects inappropriate usage of t.Parallel() method in your Go test codes." - -// Analyzer analyzes Go test codes whether they use t.Parallel() appropriately -// by using SSA (Single Static Assignment) -var Analyzer = &analysis.Analyzer{ - Name: "tparallel", - Doc: doc, - Run: run, - Requires: []*analysis.Analyzer{ - buildssa.Analyzer, - }, -} - -func run(pass *analysis.Pass) (interface{}, error) { - ssaanalyzer := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA) - - obj := analysisutil.ObjectOf(pass, "testing", "T") - if obj == nil { - // skip checking - return nil, nil - } - testTyp, testPkg := obj.Type(), obj.Pkg() - - p, _, _ := types.LookupFieldOrMethod(testTyp, true, testPkg, "Parallel") - parallel, _ := p.(*types.Func) - c, _, _ := types.LookupFieldOrMethod(testTyp, true, testPkg, "Cleanup") - cleanup, _ := c.(*types.Func) - - testMap := getTestMap(ssaanalyzer, testTyp) // ex. {Test1: [TestSub1, TestSub2], Test2: [TestSub1, TestSub2, TestSub3], ...} - for top, subs := range testMap { - if len(subs) == 0 { - continue - } - isParallelTop := ssafunc.IsCalled(top, parallel) - isPararellSub := false - for _, sub := range subs { - isPararellSub = ssafunc.IsCalled(sub, parallel) - if isPararellSub { - break - } - } - - if ssafunc.IsDeferCalled(top) { - useCleanup := ssafunc.IsCalled(top, cleanup) - if isPararellSub && !useCleanup { - pass.Reportf(top.Pos(), "%s should use t.Cleanup instead of defer", top.Name()) - } - } - - if isParallelTop == isPararellSub { - continue - } else if isPararellSub { - pass.Reportf(top.Pos(), "%s should call t.Parallel on the top level as well as its subtests", top.Name()) - } else if isParallelTop { - pass.Reportf(top.Pos(), "%s's subtests should call t.Parallel", top.Name()) - } - } - - return nil, nil -} diff --git a/vendor/github.com/nakabonne/nestif/.gitignore b/vendor/github.com/nakabonne/nestif/.gitignore deleted file mode 100644 index df71a2ac7..000000000 --- a/vendor/github.com/nakabonne/nestif/.gitignore +++ /dev/null @@ -1,16 +0,0 @@ -# Binaries for programs and plugins -*.exe -*.exe~ -*.dll -*.so -*.dylib -/nestif - -# Test binary, built with `go test -c` -*.test - -# Output of the go coverage tool, specifically when used with LiteIDE -*.out - -# Dependency directories (remove the comment below to include it) -# vendor/ diff --git a/vendor/github.com/nakabonne/nestif/LICENSE b/vendor/github.com/nakabonne/nestif/LICENSE deleted file mode 100644 index ddf4d71ed..000000000 --- a/vendor/github.com/nakabonne/nestif/LICENSE +++ /dev/null @@ -1,25 +0,0 @@ -BSD 2-Clause License - -Copyright (c) 2020, Ryo Nakao -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - -1. Redistributions of source code must retain the above copyright notice, this - list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE -FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/nakabonne/nestif/README.md b/vendor/github.com/nakabonne/nestif/README.md deleted file mode 100644 index 37d370175..000000000 --- a/vendor/github.com/nakabonne/nestif/README.md +++ /dev/null @@ -1,128 +0,0 @@ -# nestif - -[![Go Doc](https://img.shields.io/badge/godoc-reference-blue.svg?style=flat-square)](http://godoc.org/github.com/nakabonne/nestif) - -Reports complex nested if statements in Go code, by calculating its complexities based on the rules defined by the [Cognitive Complexity white paper by G. Ann Campbell](https://www.sonarsource.com/docs/CognitiveComplexity.pdf). - -It helps you find if statements that make your code hard to read, and clarifies which parts to refactor. - -## Installation - -### By go get - -``` -go get github.com/nakabonne/nestif/cmd/nestif -``` - -### By golangci-lint - -`nestif` is already integrated with [golangci-lint](https://github.com/golangci/golangci-lint). Please refer to the instructions there and enable it. - -## Usage - -### Quick Start - -```bash -nestif -``` - -The `...` glob operator is supported, and the above is an equivalent of: - -```bash -nestif ./... -``` - -One or more files and directories can be specified in a single command: - -```bash -nestif dir/foo.go dir2 dir3/... -``` - -Packages can be specified as well: - -```bash -nestif github.com/foo/bar example.com/bar/baz -``` - -### Options - -``` -usage: nestif [ ...] ... - -e, --exclude-dirs strings regexps of directories to be excluded for checking; comma-separated list - --json emit json format - --min int minimum complexity to show (default 1) - --top int show only the top N most complex if statements (default 10) - -v, --verbose verbose output -``` - -### Example - -Let's say you write: - -```go -package main - -func _() { - if foo { - if bar { - } - } - - if baz == "baz" { - if qux { - if quux { - } - } - } -} -``` - -And give it to nestif: - -```console -$ nestif foo.go -foo.go:9:2: `if baz == "baz"` is nested (complexity: 3) -foo.go:4:2: `if foo` is nested (complexity: 1) -``` - -Note that the results are sorted in descending order of complexity. In addition, it shows only the top 10 most complex if statements by default, and you can specify how many to show with `-top` flag. - -### Rules - -It calculates the complexities of if statements according to the nesting rules of Cognitive Complexity. -Since the more deeply-nested your code gets, the harder it can be to reason about, it assesses a nesting increment for it: - -```go -if condition1 { - if condition2 { // +1 - if condition3 { // +2 - if condition4 { // +3 - } - } - } -} -``` - -`else` and `else if` increase complexity by one wherever they are because the mental cost has already been paid when reading the if: - -```go -if condition1 { - if condition2 { // +1 - if condition3 { // +2 - } else if condition4 { // +1 - } else { // +1 - if condition5 { // +3 - } - } - } -} -``` - -## Inspired by - -- [uudashr/gocognit](https://github.com/uudashr/gocognit) -- [fzipp/gocyclo](https://github.com/fzipp/gocyclo) - -## Further reading - -Please see the [Cognitive Complexity: A new way of measuring understandability](https://www.sonarsource.com/docs/CognitiveComplexity.pdf) white paper by G. Ann Campbell for more detail on Cognitive Complexity. diff --git a/vendor/github.com/nakabonne/nestif/nestif.go b/vendor/github.com/nakabonne/nestif/nestif.go deleted file mode 100644 index c4bad7f2d..000000000 --- a/vendor/github.com/nakabonne/nestif/nestif.go +++ /dev/null @@ -1,148 +0,0 @@ -// Copyright 2020 Ryo Nakao . -// -// All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package nestif provides an API to detect complex nested if statements. -package nestif - -import ( - "bytes" - "fmt" - "go/ast" - "go/printer" - "go/token" - "io" -) - -// Issue represents an issue of root if statement that has nested ifs. -type Issue struct { - Pos token.Position - Complexity int - Message string -} - -// Checker represents a checker that finds nested if statements. -type Checker struct { - // Minimum complexity to report. - MinComplexity int - - // For debug mode. - debugWriter io.Writer - issues []Issue -} - -// Check inspects a single file and returns found issues. -func (c *Checker) Check(f *ast.File, fset *token.FileSet) []Issue { - c.issues = []Issue{} // refresh - ast.Inspect(f, func(n ast.Node) bool { - fn, ok := n.(*ast.FuncDecl) - if !ok || fn.Body == nil { - return true - } - for _, stmt := range fn.Body.List { - c.checkFunc(&stmt, fset) - } - return true - }) - - return c.issues -} - -// checkFunc inspects a function and sets a list of issues if there are. -func (c *Checker) checkFunc(stmt *ast.Stmt, fset *token.FileSet) { - ast.Inspect(*stmt, func(n ast.Node) bool { - ifStmt, ok := n.(*ast.IfStmt) - if !ok { - return true - } - - c.checkIf(ifStmt, fset) - return false - }) -} - -// checkIf inspects a if statement and sets an issue if there is. -func (c *Checker) checkIf(stmt *ast.IfStmt, fset *token.FileSet) { - v := newVisitor() - ast.Walk(v, stmt) - if v.complexity < c.MinComplexity { - return - } - pos := fset.Position(stmt.Pos()) - c.issues = append(c.issues, Issue{ - Pos: pos, - Complexity: v.complexity, - Message: c.makeMessage(v.complexity, stmt.Cond, fset), - }) -} - -type visitor struct { - complexity int - nesting int - // To avoid adding complexity including nesting level to `else if`. - elseifs map[*ast.IfStmt]bool -} - -func newVisitor() *visitor { - return &visitor{ - elseifs: make(map[*ast.IfStmt]bool), - } -} - -// Visit traverses an AST in depth-first order by calling itself -// recursively, and calculates the complexities of if statements. -func (v *visitor) Visit(n ast.Node) ast.Visitor { - ifStmt, ok := n.(*ast.IfStmt) - if !ok { - return v - } - - v.incComplexity(ifStmt) - v.nesting++ - ast.Walk(v, ifStmt.Body) - v.nesting-- - - switch t := ifStmt.Else.(type) { - case *ast.BlockStmt: - v.complexity++ - v.nesting++ - ast.Walk(v, t) - v.nesting-- - case *ast.IfStmt: - v.elseifs[t] = true - ast.Walk(v, t) - } - - return nil -} - -func (v *visitor) incComplexity(n *ast.IfStmt) { - // In case of `else if`, increase by 1. - if v.elseifs[n] { - v.complexity++ - } else { - v.complexity += v.nesting - } -} - -func (c *Checker) makeMessage(complexity int, cond ast.Expr, fset *token.FileSet) string { - p := &printer.Config{} - b := new(bytes.Buffer) - if err := p.Fprint(b, fset, cond); err != nil { - c.debug("failed to convert condition into string: %v", err) - } - return fmt.Sprintf("`if %s` has complex nested blocks (complexity: %d)", b.String(), complexity) -} - -// DebugMode makes it possible to emit debug logs. -func (c *Checker) DebugMode(w io.Writer) { - c.debugWriter = w -} - -func (c *Checker) debug(format string, a ...interface{}) { - if c.debugWriter != nil { - fmt.Fprintf(c.debugWriter, format, a...) - } -} diff --git a/vendor/github.com/nishanths/exhaustive/.gitignore b/vendor/github.com/nishanths/exhaustive/.gitignore deleted file mode 100644 index 94c13223b..000000000 --- a/vendor/github.com/nishanths/exhaustive/.gitignore +++ /dev/null @@ -1,12 +0,0 @@ -.DS_Store -*.swp -tags - -# binary -/cmd/exhaustive/exhaustive -/exhaustive - -# testing artifacts -/coverage.out - -/CHANGELOG.md diff --git a/vendor/github.com/nishanths/exhaustive/LICENSE b/vendor/github.com/nishanths/exhaustive/LICENSE deleted file mode 100644 index 32befa68f..000000000 --- a/vendor/github.com/nishanths/exhaustive/LICENSE +++ /dev/null @@ -1,25 +0,0 @@ -BSD 2-Clause License - -Copyright (c) 2020, Nishanth Shanmugham -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - -1. Redistributions of source code must retain the above copyright notice, this - list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE -FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/nishanths/exhaustive/Makefile b/vendor/github.com/nishanths/exhaustive/Makefile deleted file mode 100644 index 868f7fce2..000000000 --- a/vendor/github.com/nishanths/exhaustive/Makefile +++ /dev/null @@ -1,43 +0,0 @@ -.PHONY: default -default: build - -.PHONY: all -all: build vet test - -.PHONY: build -build: - go build ./... - go build ./cmd/exhaustive - -.PHONY: test -test: - go test -count=1 ./... - -.PHONY: testshort -testshort: - go test -short -count=1 ./... - -.PHONY: cover -cover: - go test -cover -coverprofile=coverage.out ./... - go tool cover -html=coverage.out - -.PHONY: install-vet -install-vet: - go install github.com/nishanths/exhaustive/cmd/exhaustive@latest - go install github.com/gordonklaus/ineffassign@latest - go install github.com/kisielk/errcheck@latest - go install honnef.co/go/tools/cmd/staticcheck@latest - -.PHONY: vet -vet: - go vet ./... - exhaustive ./... - ineffassign ./... - errcheck ./... - staticcheck -checks="inherit,-S1034" ./... - -.PHONY: upgrade-deps -upgrade-deps: - go get golang.org/x/tools - go mod tidy diff --git a/vendor/github.com/nishanths/exhaustive/README.md b/vendor/github.com/nishanths/exhaustive/README.md deleted file mode 100644 index dbb41ab9d..000000000 --- a/vendor/github.com/nishanths/exhaustive/README.md +++ /dev/null @@ -1,104 +0,0 @@ -# exhaustive - -[![Godoc][godoc-svg]][godoc] - -`exhaustive` checks exhaustiveness of enum switch statements in Go source code. - -For the definition of enum and the definition of exhaustiveness used by this -program, see [godoc][godoc-doc]. For the changelog, see [CHANGELOG][changelog] -in the GitHub wiki. The program can be configured to additionally check -exhaustiveness of keys in map literals whose key type is an enum. - -## Usage - -Command: - -``` -go install github.com/nishanths/exhaustive/cmd/exhaustive@latest - -exhaustive [flags] [packages] -``` - -For available flags, refer to the [Flags][godoc-flags] section in godoc or run -`exhaustive -h`. - -Package: - -``` -go get github.com/nishanths/exhaustive - -import "github.com/nishanths/exhaustive" -``` - -The `exhaustive.Analyzer` variable follows guidelines in the -[`golang.org/x/tools/go/analysis`][xanalysis] package. This should make it -possible to integrate `exhaustive` with your own analysis driver program. - -## Example - -Given an enum: - -``` -package token // import "example.org/token" - -type Token int - -const ( - Add Token = iota - Subtract - Multiply - Quotient - Remainder -) -``` - -and code that switches on the enum: - -``` -package calc - -import "example.org/token" - -func x(t token.Token) { - switch t { - case token.Add: - case token.Subtract: - case token.Remainder: - default: - } -} -``` - -running `exhaustive` with default flags will produce: - -``` -calc.go:6:2: missing cases in switch of type token.Token: token.Multiply, token.Quotient -``` - -Specify flag `-check=switch,map` to additionally check exhaustiveness of keys -in map literals. For example: - -``` -var m = map[token.Token]rune{ - token.Add: '+', - token.Subtract: '-', - token.Multiply: '*', - token.Quotient: '/', -} -``` - -``` -calc.go:14:9: missing keys in map of key type token.Token: token.Remainder -``` - -## Contributing - -Issues and changes are welcome. Please discuss substantial changes in an issue -first. - -[godoc]: https://pkg.go.dev/github.com/nishanths/exhaustive -[godoc-svg]: https://pkg.go.dev/badge/github.com/nishanths/exhaustive.svg -[godoc-doc]: https://pkg.go.dev/github.com/nishanths/exhaustive#section-documentation -[godoc-flags]: https://pkg.go.dev/github.com/nishanths/exhaustive#hdr-Flags -[xanalysis]: https://pkg.go.dev/golang.org/x/tools/go/analysis -[changelog]: https://github.com/nishanths/exhaustive/wiki/CHANGELOG diff --git a/vendor/github.com/nishanths/exhaustive/comment.go b/vendor/github.com/nishanths/exhaustive/comment.go deleted file mode 100644 index 123e0181b..000000000 --- a/vendor/github.com/nishanths/exhaustive/comment.go +++ /dev/null @@ -1,29 +0,0 @@ -package exhaustive - -import ( - "go/ast" - "go/token" - "strings" -) - -const ( - ignoreComment = "//exhaustive:ignore" - enforceComment = "//exhaustive:enforce" - ignoreDefaultCaseRequiredComment = "//exhaustive:ignore-default-case-required" - enforceDefaultCaseRequiredComment = "//exhaustive:enforce-default-case-required" -) - -func hasCommentPrefix(comments []*ast.CommentGroup, comment string) bool { - for _, c := range comments { - for _, cc := range c.List { - if strings.HasPrefix(cc.Text, comment) { - return true - } - } - } - return false -} - -func fileCommentMap(fset *token.FileSet, file *ast.File) ast.CommentMap { - return ast.NewCommentMap(fset, file, file.Comments) -} diff --git a/vendor/github.com/nishanths/exhaustive/comment_go121.go b/vendor/github.com/nishanths/exhaustive/comment_go121.go deleted file mode 100644 index a7bbc8881..000000000 --- a/vendor/github.com/nishanths/exhaustive/comment_go121.go +++ /dev/null @@ -1,11 +0,0 @@ -//go:build go1.21 - -package exhaustive - -import ( - "go/ast" -) - -func isGeneratedFile(file *ast.File) bool { - return ast.IsGenerated(file) -} diff --git a/vendor/github.com/nishanths/exhaustive/comment_pre_go121.go b/vendor/github.com/nishanths/exhaustive/comment_pre_go121.go deleted file mode 100644 index 28d2ed493..000000000 --- a/vendor/github.com/nishanths/exhaustive/comment_pre_go121.go +++ /dev/null @@ -1,27 +0,0 @@ -//go:build !go1.21 - -package exhaustive - -import ( - "go/ast" - "regexp" -) - -// For definition of generated file see: -// http://golang.org/s/generatedcode - -var generatedCodeRe = regexp.MustCompile(`^// Code generated .* DO NOT EDIT\.$`) - -func isGeneratedFile(file *ast.File) bool { - for _, c := range file.Comments { - for _, cc := range c.List { - if cc.Pos() > file.Package { - break - } - if generatedCodeRe.MatchString(cc.Text) { - return true - } - } - } - return false -} diff --git a/vendor/github.com/nishanths/exhaustive/common.go b/vendor/github.com/nishanths/exhaustive/common.go deleted file mode 100644 index f22b0e147..000000000 --- a/vendor/github.com/nishanths/exhaustive/common.go +++ /dev/null @@ -1,474 +0,0 @@ -package exhaustive - -import ( - "fmt" - "go/ast" - "go/token" - "go/types" - "regexp" - "sort" - "strings" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/ast/astutil" -) - -// enumTypeAndMembers combines an enumType and its members set. -type enumTypeAndMembers struct { - typ enumType - members enumMembers -} - -func fromNamed(pass *analysis.Pass, t *types.Named, typeparam bool) (result []enumTypeAndMembers, ok bool) { - if tpkg := t.Obj().Pkg(); tpkg == nil { - // go/types documentation says: nil for labels and - // objects in the Universe scope. This happens for the built-in - // error type for example. - return nil, false // not a valid enum type, so ok == false - } - - et := enumType{t.Obj()} - if em, ok := importFact(pass, et); ok { - return []enumTypeAndMembers{{et, em}}, true - } - - if typeparam { - // is it a named interface? - if intf, ok := t.Underlying().(*types.Interface); ok { - return fromInterface(pass, intf, typeparam) - } - } - - return nil, false // not a valid enum type, so ok == false -} - -func fromInterface(pass *analysis.Pass, intf *types.Interface, typeparam bool) (result []enumTypeAndMembers, ok bool) { - allOk := true - for i := 0; i < intf.NumEmbeddeds(); i++ { - r, ok := fromType(pass, intf.EmbeddedType(i), typeparam) - result = append(result, r...) - allOk = allOk && ok - } - return result, allOk -} - -func fromUnion(pass *analysis.Pass, union *types.Union, typeparam bool) (result []enumTypeAndMembers, ok bool) { - allOk := true - // gather from each term in the union. - for i := 0; i < union.Len(); i++ { - r, ok := fromType(pass, union.Term(i).Type(), typeparam) - result = append(result, r...) - allOk = allOk && ok - } - return result, allOk -} - -func fromTypeParam(pass *analysis.Pass, tp *types.TypeParam, typeparam bool) (result []enumTypeAndMembers, ok bool) { - // Does not appear to be explicitly documented, but based on Go language - // spec (see section Type constraints) and Go standard library source code, - // we can expect constraints to have underlying type *types.Interface - // Regardless it will be handled in fromType. - return fromType(pass, tp.Constraint().Underlying(), typeparam) -} - -func fromType(pass *analysis.Pass, t types.Type, typeparam bool) (result []enumTypeAndMembers, ok bool) { - switch t := t.(type) { - case *types.Named: - return fromNamed(pass, t, typeparam) - - case *types.Union: - return fromUnion(pass, t, typeparam) - - case *types.TypeParam: - return fromTypeParam(pass, t, typeparam) - - case *types.Interface: - if !typeparam { - return nil, true - } - // anonymous interface. - // e.g. func foo[T interface { M } | interface { N }](v T) {} - return fromInterface(pass, t, typeparam) - - default: - // ignore these. - return nil, true - } -} - -func composingEnumTypes(pass *analysis.Pass, t types.Type) (result []enumTypeAndMembers, ok bool) { - _, typeparam := t.(*types.TypeParam) - result, ok = fromType(pass, t, typeparam) - - if typeparam { - var kind types.BasicKind - var kindSet bool - - // sameBasicKind reports whether each type t that the function is called - // with has the same underlying basic kind. - sameBasicKind := func(t types.Type) (ok bool) { - basic, ok := t.Underlying().(*types.Basic) - if !ok { - return false - } - if kindSet && kind != basic.Kind() { - return false - } - kind = basic.Kind() - kindSet = true - return true - } - - for _, rr := range result { - if !sameBasicKind(rr.typ.TypeName.Type()) { - ok = false - break - } - } - } - - return result, ok -} - -func denotesPackage(ident *ast.Ident, info *types.Info) bool { - obj := info.ObjectOf(ident) - if obj == nil { - return false - } - _, ok := obj.(*types.PkgName) - return ok -} - -// exprConstVal returns the constantValue for an expression if the -// expression is a constant value and if the expression is considered -// valid to satisfy exhaustiveness as defined by this program. -// Otherwise it returns (_, false). -func exprConstVal(e ast.Expr, info *types.Info) (constantValue, bool) { - handleIdent := func(ident *ast.Ident) (constantValue, bool) { - obj := info.Uses[ident] - if obj == nil { - return "", false - } - if _, ok := obj.(*types.Const); !ok { - return "", false - } - // There are two scenarios. - // See related test cases in typealias/quux/quux.go. - // - // # Scenario 1 - // - // Tag package and constant package are the same. This is - // simple; we just use fs.ModeDir's value. - // Example: - // - // var mode fs.FileMode - // switch mode { - // case fs.ModeDir: - // } - // - // # Scenario 2 - // - // Tag package and constant package are different. In this - // scenario, too, we accept the case clause expr constant value, - // as is. If the Go type checker is okay with the name being - // listed in the case clause, we don't care much further. - // - // Example: - // - // var mode fs.FileMode - // switch mode { - // case os.ModeDir: - // } - // - // Or equivalently: - // - // // The type of mode is effectively fs.FileMode, - // // due to type alias. - // var mode os.FileMode - // switch mode { - // case os.ModeDir: - // } - return determineConstVal(ident, info), true - } - - e = stripTypeConversions(astutil.Unparen(e), info) - - switch e := e.(type) { - case *ast.Ident: - return handleIdent(e) - - case *ast.SelectorExpr: - x := astutil.Unparen(e.X) - // Ensure we only see the form pkg.Const, and not e.g. - // structVal.f or structVal.inner.f. - // - // For this purpose, first we check that X, which is everything - // except the rightmost field selector *ast.Ident (the Sel - // field), is also an *ast.Ident. - xIdent, ok := x.(*ast.Ident) - if !ok { - return "", false - } - // Second, check that it's a package. It doesn't matter which - // package, just that it denotes some package. - if !denotesPackage(xIdent, info) { - return "", false - } - return handleIdent(e.Sel) - - default: - // e.g. literal - // these aren't considered towards satisfying exhaustiveness. - return "", false - } -} - -// stripTypeConversions removing type conversions from the experession. -func stripTypeConversions(e ast.Expr, info *types.Info) ast.Expr { - c, ok := e.(*ast.CallExpr) - if !ok { - return e - } - typ := info.TypeOf(c.Fun) - if typ == nil { - // can happen for built-ins. - return e - } - // do not allow function calls. - if _, ok := typ.Underlying().(*types.Signature); ok { - return e - } - // type conversions have exactly one arg. - if len(c.Args) != 1 { - return e - } - return stripTypeConversions(astutil.Unparen(c.Args[0]), info) -} - -// member is a single member of an enum type. -type member struct { - pos token.Pos - typ enumType - name string - val constantValue -} - -type checklist struct { - info map[enumType]enumMembers - checkl map[member]struct{} - ignoreConstantRe *regexp.Regexp - ignoreTypeRe *regexp.Regexp -} - -func (c *checklist) ignoreConstant(pattern *regexp.Regexp) { - c.ignoreConstantRe = pattern -} - -func (c *checklist) ignoreType(pattern *regexp.Regexp) { - c.ignoreTypeRe = pattern -} - -func (*checklist) reMatch(re *regexp.Regexp, s string) bool { - if re == nil { - return false - } - return re.MatchString(s) -} - -func (c *checklist) add(et enumType, em enumMembers, includeUnexported bool) { - addOne := func(name string) { - if isBlankIdentifier(name) { - // Blank identifier is often used to skip entries in iota - // lists. Also, it can't be referenced anywhere (e.g. can't - // be referenced in switch statement cases) It doesn't make - // sense to include it as required member to satisfy - // exhaustiveness. - return - } - if !ast.IsExported(name) && !includeUnexported { - return - } - if c.reMatch(c.ignoreConstantRe, fmt.Sprintf("%s.%s", et.Pkg().Path(), name)) { - return - } - if c.reMatch(c.ignoreTypeRe, fmt.Sprintf("%s.%s", et.Pkg().Path(), et.TypeName.Name())) { - return - } - mem := member{ - em.NameToPos[name], - et, - name, - em.NameToValue[name], - } - if c.checkl == nil { - c.checkl = make(map[member]struct{}) - } - c.checkl[mem] = struct{}{} - } - - if c.info == nil { - c.info = make(map[enumType]enumMembers) - } - c.info[et] = em - - for _, name := range em.Names { - addOne(name) - } -} - -func (c *checklist) found(val constantValue) { - // delete all same-valued items. - for et, em := range c.info { - for _, name := range em.ValueToNames[val] { - delete(c.checkl, member{ - em.NameToPos[name], - et, - name, - em.NameToValue[name], - }) - } - } -} - -func (c *checklist) remaining() map[member]struct{} { - return c.checkl -} - -// group is a collection of same-valued members, possibly from -// different enum types. -type group []member - -func groupify(items map[member]struct{}, types []enumType) []group { - // indices maps each element in the input slice to its index. - indices := func(vs []enumType) map[enumType]int { - ret := make(map[enumType]int, len(vs)) - for i, v := range vs { - ret[v] = i - } - return ret - } - - typesOrder := indices(types) // for quick lookup - astBefore := func(x, y member) bool { - if typesOrder[x.typ] < typesOrder[y.typ] { - return true - } - if typesOrder[x.typ] > typesOrder[y.typ] { - return false - } - return x.pos < y.pos - } - - // byConstVal groups member names by constant value. - byConstVal := func(items map[member]struct{}) map[constantValue][]member { - ret := make(map[constantValue][]member) - for m := range items { - ret[m.val] = append(ret[m.val], m) - } - return ret - } - - var groups []group - for _, ms := range byConstVal(items) { - groups = append(groups, group(ms)) - } - - // sort members within each group in AST order. - for i := range groups { - g := groups[i] - sort.Slice(g, func(i, j int) bool { return astBefore(g[i], g[j]) }) - groups[i] = g - } - // sort groups themselves in AST order. - // the index [0] access is safe, because there will be at least one - // element per group. - sort.Slice(groups, func(i, j int) bool { return astBefore(groups[i][0], groups[j][0]) }) - - return groups -} - -func diagnosticEnumType(enumType *types.TypeName) string { - return enumType.Pkg().Name() + "." + enumType.Name() -} - -func diagnosticEnumTypes(types []enumType) string { - var buf strings.Builder - for i := range types { - buf.WriteString(diagnosticEnumType(types[i].TypeName)) - if i != len(types)-1 { - buf.WriteByte('|') - } - } - return buf.String() -} - -func diagnosticMember(m member) string { - return m.typ.Pkg().Name() + "." + m.name -} - -func diagnosticGroups(gs []group) string { - out := make([]string, len(gs)) - for i := range gs { - var buf strings.Builder - for j := range gs[i] { - buf.WriteString(diagnosticMember(gs[i][j])) - if j != len(gs[i])-1 { - buf.WriteByte('|') - } - } - out[i] = buf.String() - } - return strings.Join(out, ", ") -} - -func toEnumTypes(es []enumTypeAndMembers) []enumType { - out := make([]enumType, len(es)) - for i := range es { - out[i] = es[i].typ - } - return out -} - -func dedupEnumTypes(types []enumType) []enumType { - m := make(map[enumType]struct{}) - var ret []enumType - for _, t := range types { - _, ok := m[t] - if ok { - continue - } - m[t] = struct{}{} - ret = append(ret, t) - } - return ret -} - -type boolCache struct { - m map[*ast.File]bool - compute func(*ast.File) bool -} - -func (c *boolCache) get(file *ast.File) bool { - if _, ok := c.m[file]; !ok { - if c.m == nil { - c.m = make(map[*ast.File]bool) - } - c.m[file] = c.compute(file) - } - return c.m[file] -} - -type commentCache struct { - m map[*ast.File]ast.CommentMap - compute func(*token.FileSet, *ast.File) ast.CommentMap -} - -func (c *commentCache) get(fset *token.FileSet, file *ast.File) ast.CommentMap { - if _, ok := c.m[file]; !ok { - if c.m == nil { - c.m = make(map[*ast.File]ast.CommentMap) - } - c.m[file] = c.compute(fset, file) - } - return c.m[file] -} diff --git a/vendor/github.com/nishanths/exhaustive/doc.go b/vendor/github.com/nishanths/exhaustive/doc.go deleted file mode 100644 index a745247db..000000000 --- a/vendor/github.com/nishanths/exhaustive/doc.go +++ /dev/null @@ -1,216 +0,0 @@ -/* -Package exhaustive defines an analyzer that checks exhaustiveness of switch -statements of enum-like constants in Go source code. The analyzer can -optionally also check exhaustiveness of keys in map literals whose key type -is enum-like. - -# Definition of enum - -The Go [language spec] does not have an explicit definition for enums. For -the purpose of this analyzer, and by convention, an enum type is any named -type that: - - - has an [underlying type] of float, string, or integer (includes byte - and rune); and - - has at least one constant of its type defined in the same [block]. - -In the example below, Biome is an enum type. The three constants are its -enum members. - - package eco - - type Biome int - - const ( - Tundra Biome = 1 - Savanna Biome = 2 - Desert Biome = 3 - ) - -Enum member constants for an enum type must be declared in the same block as -the type. The constant values may be specified using iota, literal values, or -any valid means for declaring a Go constant. It is allowed for multiple enum -member constants for an enum type to have the same constant value. - -# Definition of exhaustiveness - -A switch statement that switches on a value of an enum type is exhaustive if -all enum members are listed in the switch statement's cases. If multiple enum -members have the same constant value, it is sufficient for any one of these -same-valued members to be listed. - -For an enum type defined in the same package as the switch statement, both -exported and unexported enum members must be listed to satisfy exhaustiveness. -For an enum type defined in an external package, it is sufficient that only -exported enum members are listed. Only constant identifiers (e.g. Tundra, -eco.Desert) listed in a switch statement's case clause can contribute towards -satisfying exhaustiveness; other expressions, such as literal values and -function calls, listed in case clauses do not contribute towards satisfying -exhaustiveness. - -By default, the existence of a default case in a switch statement does not -unconditionally make a switch statement exhaustive. Use the --default-signifies-exhaustive flag to adjust this behavior. - -For a map literal whose key type is an enum type, a similar definition of -exhaustiveness applies. The map literal is considered exhaustive if all enum -members are be listed in its keys. Empty map literals are never checked for -exhaustiveness. - -# Type parameters - -A switch statement that switches on a value whose type is a type parameter is -checked for exhaustiveness if and only if each type element in the type -constraint is an enum type and the type elements share the same underlying -[BasicKind]. - -For example, the switch statement below will be checked because each type -element (i.e. M and N) in the type constraint is an enum type and the type -elements share the same underlying BasicKind, namely int8. To satisfy -exhaustiveness, the enum members collectively belonging to the enum types M -and N (i.e. A, B, and C) must be listed in the switch statement's cases. - - func bar[T M | I](v T) { - switch v { - case T(A): - case T(B): - case T(C): - } - } - - type I interface{ N } - - type M int8 - const A M = 1 - - type N int8 - const B N = 2 - const C N = 3 - -# Type aliases - -The analyzer handles type aliases as shown in the example below. newpkg.M is -an enum type. oldpkg.M is an alias for newpkg.M. Note that oldpkg.M isn't -itself an enum type; oldpkg.M is simply an alias for the actual enum type -newpkg.M. - - package oldpkg - type M = newpkg.M - const ( - A = newpkg.A - B = newpkg.B - ) - - package newpkg - type M int - const ( - A M = 1 - B M = 2 - ) - -A switch statement that switches either on a value of type newpkg.M or of type -oldpkg.M (which, being an alias, is just an alternative spelling for newpkg.M) -is exhaustive if all of newpkg.M's enum members are listed in the switch -statement's cases. The following switch statement is exhaustive. - - func f(v newpkg.M) { - switch v { - case newpkg.A: // or equivalently oldpkg.A - case newpkg.B: // or equivalently oldpkg.B - } - } - -The analyzer guarantees that introducing a type alias (such as type M = -newpkg.M) will not result in new diagnostics if the set of enum member -constant values of the RHS type is a subset of the set of enum member constant -values of the LHS type. - -# Flags - -Summary: - - flag type default value - ---- ---- ------------- - -check comma-separated strings switch - -explicit-exhaustive-switch bool false - -explicit-exhaustive-map bool false - -check-generated bool false - -default-signifies-exhaustive bool false - -ignore-enum-members regexp pattern (none) - -ignore-enum-types regexp pattern (none) - -package-scope-only bool false - -Descriptions: - - -check - Comma-separated list of program elements to check for - exhaustiveness. Supported program element values are - "switch" and "map". The default value is "switch", which - means that only switch statements are checked. - - -explicit-exhaustive-switch - Check a switch statement only if it is associated with a - "//exhaustive:enforce" comment. By default the analyzer - checks every switch statement that isn't associated with a - "//exhaustive:ignore" comment. - - -explicit-exhaustive-map - Similar to -explicit-exhaustive-switch but for map literals. - - -check-generated - Check generated files. For the definition of a generated - file, see https://golang.org/s/generatedcode. - - -default-signifies-exhaustive - Consider a switch statement to be exhaustive - unconditionally if it has a default case. (In other words, - all enum members do not have to be listed in its cases if a - default case is present.) Setting this flag usually is - counter to the purpose of exhaustiveness checks, so it is - not recommended to set this flag. - - -ignore-enum-members - Constants that match the specified regular expression (in - package regexp syntax) are not considered enum members and - hence do not have to be listed to satisfy exhaustiveness. - The specified regular expression is matched against the - constant name inclusive of import path. For example, if the - import path for the constant is "example.org/eco" and the - constant name is "Tundra", then the specified regular - expression is matched against the string - "example.org/eco.Tundra". - - -ignore-enum-types - Similar to -ignore-enum-members but for types. - - -package-scope-only - Only discover enums declared in file-level blocks. By - default, the analyzer discovers enums defined in all - blocks. - -# Skip analysis - -To skip analysis of a switch statement or a map literal, associate it with a -comment that begins with "//exhaustive:ignore". For example: - - //exhaustive:ignore ... an optional explanation goes here ... - switch v { - case A: - case B: - } - -To ignore specific constants in exhaustiveness checks, specify the --ignore-enum-members flag: - - exhaustive -ignore-enum-members '^example\.org/eco\.Tundra$' - -To ignore specific types, specify the -ignore-enum-types flag: - - exhaustive -ignore-enum-types '^time\.Duration$|^example\.org/measure\.Unit$' - -[language spec]: https://golang.org/ref/spec -[underlying type]: https://golang.org/ref/spec#Underlying_types -[block]: https://golang.org/ref/spec#Blocks -[BasicKind]: https://pkg.go.dev/go/types#BasicKind -*/ -package exhaustive diff --git a/vendor/github.com/nishanths/exhaustive/enum.go b/vendor/github.com/nishanths/exhaustive/enum.go deleted file mode 100644 index cabf1d880..000000000 --- a/vendor/github.com/nishanths/exhaustive/enum.go +++ /dev/null @@ -1,177 +0,0 @@ -package exhaustive - -import ( - "fmt" - "go/ast" - "go/token" - "go/types" - "strings" - - "golang.org/x/tools/go/ast/inspector" -) - -// constantValue is a (constant.Value).ExactString value. -type constantValue string - -// enumType represents an enum type as defined by this program, which -// effectively is a defined (named) type. -type enumType struct{ *types.TypeName } - -func (et enumType) String() string { return et.TypeName.String() } // for debugging -func (et enumType) scope() *types.Scope { return et.TypeName.Parent() } // scope that the type is declared in -func (et enumType) factObject() types.Object { return et.TypeName } // types.Object for fact export - -// enumMembers is the set of enum members for a single enum type. -// The zero value is ready to use. -type enumMembers struct { - Names []string // enum member names - NameToPos map[string]token.Pos // enum member name -> AST position - NameToValue map[string]constantValue // enum member name -> constant value - ValueToNames map[constantValue][]string // constant value -> enum member names -} - -// add adds an enum member to the set. -func (em *enumMembers) add(name string, val constantValue, pos token.Pos) { - if em.NameToPos == nil { - em.NameToPos = make(map[string]token.Pos) - } - if em.NameToValue == nil { - em.NameToValue = make(map[string]constantValue) - } - if em.ValueToNames == nil { - em.ValueToNames = make(map[constantValue][]string) - } - em.Names = append(em.Names, name) - em.NameToPos[name] = pos - em.NameToValue[name] = val - em.ValueToNames[val] = append(em.ValueToNames[val], name) -} - -func (em *enumMembers) String() string { - return em.factString() -} - -func (em *enumMembers) factString() string { - var buf strings.Builder - for j, vv := range em.Names { - buf.WriteString(vv) - // add comma separator between each enum member - if j != len(em.Names)-1 { - buf.WriteString(",") - } - } - return buf.String() -} - -func findEnums(pkgScopeOnly bool, pkg *types.Package, inspect *inspector.Inspector, info *types.Info) map[enumType]enumMembers { - result := make(map[enumType]enumMembers) - - inspect.Preorder([]ast.Node{&ast.GenDecl{}}, func(n ast.Node) { - gen := n.(*ast.GenDecl) - if gen.Tok != token.CONST { - return - } - for _, s := range gen.Specs { - for _, name := range s.(*ast.ValueSpec).Names { - enumTyp, memberName, val, ok := possibleEnumMember(name, info) - if !ok { - continue - } - if pkgScopeOnly && enumTyp.scope() != pkg.Scope() { - continue - } - v := result[enumTyp] - v.add(memberName, val, name.Pos()) - result[enumTyp] = v - } - } - }) - - return result -} - -func possibleEnumMember(constName *ast.Ident, info *types.Info) (et enumType, name string, val constantValue, ok bool) { - // Notes - // - // type T int - // const A T = iota // obj.Type() is T - // - // type R T - // const B R = iota // obj.Type() is R - // - // type T2 int - // type T1 = T2 - // const C T1 = iota // obj.Type() is T2 - // - // type T3 = T4 - // type T4 int - // type T5 = T3 - // const D T5 = iota // obj.Type() is T4 - // - // In all these cases, validNamedBasic(obj.Type()) == true. - - obj := info.Defs[constName] - if obj == nil { - panic(fmt.Sprintf("info.Defs[%s] == nil", constName)) - } - if _, ok = obj.(*types.Const); !ok { - panic(fmt.Sprintf("obj must be *types.Const, got %T", obj)) - } - if isBlankIdentifier(obj.Name()) { - // These objects have a nil parent scope. - // Also, we have no real purpose to record them. - return enumType{}, "", "", false - } - if !validNamedBasic(obj.Type()) { - return enumType{}, "", "", false - } - - named := obj.Type().(*types.Named) // guaranteed by validNamedBasic - tn := named.Obj() - - // By definition, enum type's scope and enum member's scope must be the - // same. If they're not, don't consider the const a member. Additionally, - // the enum type and the enum member must be in the same package (the - // scope check accounts for this, too). - if tn.Parent() != obj.Parent() { - return enumType{}, "", "", false - } - - return enumType{tn}, obj.Name(), determineConstVal(constName, info), true -} - -func determineConstVal(name *ast.Ident, info *types.Info) constantValue { - c := info.ObjectOf(name).(*types.Const) - return constantValue(c.Val().ExactString()) -} - -func isBlankIdentifier(name string) bool { - return name == "_" // NOTE: go/types/decl.go does a direct comparison like this -} - -func validBasic(basic *types.Basic) bool { - switch i := basic.Info(); { - case i&types.IsInteger != 0, i&types.IsFloat != 0, i&types.IsString != 0: - return true - } - return false -} - -// validNamedBasic returns whether the type t is a named type whose underlying -// type is a valid basic type to form an enum. A type that passes this check -// meets the definition of an enum type. -// -// The following is guaranteed: -// -// validNamedBasic(t) == true => t.(*types.Named) -func validNamedBasic(t types.Type) bool { - named, ok := t.(*types.Named) - if !ok { - return false - } - basic, ok := named.Underlying().(*types.Basic) - if !ok || !validBasic(basic) { - return false - } - return true -} diff --git a/vendor/github.com/nishanths/exhaustive/exhaustive.go b/vendor/github.com/nishanths/exhaustive/exhaustive.go deleted file mode 100644 index 013ac47bb..000000000 --- a/vendor/github.com/nishanths/exhaustive/exhaustive.go +++ /dev/null @@ -1,152 +0,0 @@ -package exhaustive - -import ( - "fmt" - "go/ast" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/ast/inspector" -) - -func init() { - registerFlags() -} - -func registerFlags() { - Analyzer.Flags.Var(&fCheck, CheckFlag, "comma-separated list of program `elements` to check for exhaustiveness; supported element values: switch, map") - Analyzer.Flags.BoolVar(&fExplicitExhaustiveSwitch, ExplicitExhaustiveSwitchFlag, false, `check switch statement only if associated with "//exhaustive:enforce" comment`) - Analyzer.Flags.BoolVar(&fExplicitExhaustiveMap, ExplicitExhaustiveMapFlag, false, `check map literal only if associated with "//exhaustive:enforce" comment`) - Analyzer.Flags.BoolVar(&fCheckGenerated, CheckGeneratedFlag, false, "check generated files") - Analyzer.Flags.BoolVar(&fDefaultSignifiesExhaustive, DefaultSignifiesExhaustiveFlag, false, "switch statement is unconditionally exhaustive if it has a default case") - Analyzer.Flags.BoolVar(&fDefaultCaseRequired, DefaultCaseRequiredFlag, false, "switch statement requires default case even if exhaustive") - Analyzer.Flags.Var(&fIgnoreEnumMembers, IgnoreEnumMembersFlag, "ignore constants matching `regexp`") - Analyzer.Flags.Var(&fIgnoreEnumTypes, IgnoreEnumTypesFlag, "ignore types matching `regexp`") - Analyzer.Flags.BoolVar(&fPackageScopeOnly, PackageScopeOnlyFlag, false, "only discover enums declared in file-level blocks") - - var unused string - Analyzer.Flags.StringVar(&unused, IgnorePatternFlag, "", "no effect (deprecated); use -"+IgnoreEnumMembersFlag) - Analyzer.Flags.StringVar(&unused, CheckingStrategyFlag, "", "no effect (deprecated)") -} - -// Flag names used by the analyzer. These are exported for use by analyzer -// driver programs. -const ( - CheckFlag = "check" - ExplicitExhaustiveSwitchFlag = "explicit-exhaustive-switch" - ExplicitExhaustiveMapFlag = "explicit-exhaustive-map" - CheckGeneratedFlag = "check-generated" - DefaultSignifiesExhaustiveFlag = "default-signifies-exhaustive" - DefaultCaseRequiredFlag = "default-case-required" - IgnoreEnumMembersFlag = "ignore-enum-members" - IgnoreEnumTypesFlag = "ignore-enum-types" - PackageScopeOnlyFlag = "package-scope-only" - - // Deprecated flag names. - IgnorePatternFlag = "ignore-pattern" // Deprecated: use IgnoreEnumMembersFlag. - CheckingStrategyFlag = "checking-strategy" // Deprecated: no longer applicable. -) - -// Flag values. -var ( - fCheck = stringsFlag{elements: defaultCheckElements, filter: validCheckElement} - fExplicitExhaustiveSwitch bool - fExplicitExhaustiveMap bool - fCheckGenerated bool - fDefaultSignifiesExhaustive bool - fDefaultCaseRequired bool - fIgnoreEnumMembers regexpFlag - fIgnoreEnumTypes regexpFlag - fPackageScopeOnly bool -) - -// resetFlags resets the flag variables to default values. -// Useful in tests. -func resetFlags() { - fCheck = stringsFlag{elements: defaultCheckElements, filter: validCheckElement} - fExplicitExhaustiveSwitch = false - fExplicitExhaustiveMap = false - fCheckGenerated = false - fDefaultSignifiesExhaustive = false - fDefaultCaseRequired = false - fIgnoreEnumMembers = regexpFlag{} - fIgnoreEnumTypes = regexpFlag{} - fPackageScopeOnly = false -} - -// checkElement is a program element supported by the -check flag. -type checkElement string - -const ( - elementSwitch checkElement = "switch" - elementMap checkElement = "map" -) - -func validCheckElement(s string) error { - switch checkElement(s) { - case elementSwitch: - return nil - case elementMap: - return nil - default: - return fmt.Errorf("invalid program element %q", s) - } -} - -var defaultCheckElements = []string{ - string(elementSwitch), -} - -var Analyzer = &analysis.Analyzer{ - Name: "exhaustive", - Doc: "check exhaustiveness of enum switch statements", - Run: run, - Requires: []*analysis.Analyzer{inspect.Analyzer}, - FactTypes: []analysis.Fact{&enumMembersFact{}}, -} - -func run(pass *analysis.Pass) (interface{}, error) { - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - - for typ, members := range findEnums(fPackageScopeOnly, pass.Pkg, inspect, pass.TypesInfo) { - exportFact(pass, typ, members) - } - - generated := boolCache{compute: isGeneratedFile} - comments := commentCache{compute: fileCommentMap} - - // NOTE: should not share the same inspect.WithStack call for different - // program elements: the visitor function for a program element may - // exit traversal early, but this shouldn't affect traversal for - // other program elements. - for _, e := range fCheck.elements { - switch checkElement(e) { - case elementSwitch: - conf := switchConfig{ - explicit: fExplicitExhaustiveSwitch, - defaultSignifiesExhaustive: fDefaultSignifiesExhaustive, - defaultCaseRequired: fDefaultCaseRequired, - checkGenerated: fCheckGenerated, - ignoreConstant: fIgnoreEnumMembers.re, - ignoreType: fIgnoreEnumTypes.re, - } - checker := switchChecker(pass, conf, generated, comments) - inspect.WithStack([]ast.Node{&ast.SwitchStmt{}}, toVisitor(checker)) - - case elementMap: - conf := mapConfig{ - explicit: fExplicitExhaustiveMap, - checkGenerated: fCheckGenerated, - ignoreConstant: fIgnoreEnumMembers.re, - ignoreType: fIgnoreEnumTypes.re, - } - checker := mapChecker(pass, conf, generated, comments) - inspect.WithStack([]ast.Node{&ast.CompositeLit{}}, toVisitor(checker)) - - default: - panic(fmt.Sprintf("unknown checkElement %v", e)) - } - } - - return nil, nil -} diff --git a/vendor/github.com/nishanths/exhaustive/fact.go b/vendor/github.com/nishanths/exhaustive/fact.go deleted file mode 100644 index ecf0907b2..000000000 --- a/vendor/github.com/nishanths/exhaustive/fact.go +++ /dev/null @@ -1,28 +0,0 @@ -package exhaustive - -import "golang.org/x/tools/go/analysis" - -// NOTE: Fact types must remain gob-coding compatible. -// See TestFactsGob. - -var _ analysis.Fact = (*enumMembersFact)(nil) - -type enumMembersFact struct{ Members enumMembers } - -func (f *enumMembersFact) AFact() {} -func (f *enumMembersFact) String() string { return f.Members.factString() } - -// exportFact exports the enum members for the given enum type. -func exportFact(pass *analysis.Pass, enumTyp enumType, members enumMembers) { - pass.ExportObjectFact(enumTyp.factObject(), &enumMembersFact{members}) -} - -// importFact imports the enum members for the given possible enum type. -// An (_, false) return indicates that the enum type is not a known one. -func importFact(pass *analysis.Pass, possibleEnumType enumType) (enumMembers, bool) { - var f enumMembersFact - if !pass.ImportObjectFact(possibleEnumType.factObject(), &f) { - return enumMembers{}, false - } - return f.Members, true -} diff --git a/vendor/github.com/nishanths/exhaustive/flag.go b/vendor/github.com/nishanths/exhaustive/flag.go deleted file mode 100644 index 49d3d3c6c..000000000 --- a/vendor/github.com/nishanths/exhaustive/flag.go +++ /dev/null @@ -1,75 +0,0 @@ -package exhaustive - -import ( - "flag" - "regexp" - "strings" -) - -var _ flag.Value = (*regexpFlag)(nil) -var _ flag.Value = (*stringsFlag)(nil) - -// regexpFlag implements flag.Value for parsing -// regular expression flag inputs. -type regexpFlag struct{ re *regexp.Regexp } - -func (f *regexpFlag) String() string { - if f == nil || f.re == nil { - return "" - } - return f.re.String() -} - -func (f *regexpFlag) Set(expr string) error { - if expr == "" { - f.re = nil - return nil - } - - re, err := regexp.Compile(expr) - if err != nil { - return err - } - - f.re = re - return nil -} - -// stringsFlag implements flag.Value for parsing a comma-separated string -// list. Surrounding whitespace is stripped from the input and from each -// element. If filter is non-nil it is called for each element in the input. -type stringsFlag struct { - elements []string - filter func(string) error -} - -func (f *stringsFlag) String() string { - if f == nil { - return "" - } - return strings.Join(f.elements, ",") -} - -func (f *stringsFlag) filterFunc() func(string) error { - if f.filter != nil { - return f.filter - } - return func(_ string) error { return nil } -} - -func (f *stringsFlag) Set(input string) error { - input = strings.TrimSpace(input) - if input == "" { - f.elements = nil - return nil - } - - for _, el := range strings.Split(input, ",") { - el = strings.TrimSpace(el) - if err := f.filterFunc()(el); err != nil { - return err - } - f.elements = append(f.elements, el) - } - return nil -} diff --git a/vendor/github.com/nishanths/exhaustive/map.go b/vendor/github.com/nishanths/exhaustive/map.go deleted file mode 100644 index 23b4bef1c..000000000 --- a/vendor/github.com/nishanths/exhaustive/map.go +++ /dev/null @@ -1,134 +0,0 @@ -package exhaustive - -import ( - "fmt" - "go/ast" - "go/types" - "regexp" - - "golang.org/x/tools/go/analysis" -) - -// mapConfig is configuration for mapChecker. -type mapConfig struct { - explicit bool - checkGenerated bool - ignoreConstant *regexp.Regexp // can be nil - ignoreType *regexp.Regexp // can be nil -} - -// mapChecker returns a node visitor that checks for exhaustiveness of -// map literals for the supplied pass, and reports diagnostics. The -// node visitor expects only *ast.CompositeLit nodes. -func mapChecker(pass *analysis.Pass, cfg mapConfig, generated boolCache, comments commentCache) nodeVisitor { - return func(n ast.Node, push bool, stack []ast.Node) (bool, string) { - if !push { - return true, resultNotPush - } - - file := stack[0].(*ast.File) - - if !cfg.checkGenerated && generated.get(file) { - return false, resultGeneratedFile - } - - lit := n.(*ast.CompositeLit) - - mapType, ok := pass.TypesInfo.Types[lit.Type].Type.(*types.Map) - if !ok { - namedType, ok2 := pass.TypesInfo.Types[lit.Type].Type.(*types.Named) - if !ok2 { - return true, resultNotMapLiteral - } - mapType, ok = namedType.Underlying().(*types.Map) - if !ok { - return true, resultNotMapLiteral - } - } - - if len(lit.Elts) == 0 { - return false, resultEmptyMapLiteral - } - - fileComments := comments.get(pass.Fset, file) - var relatedComments []*ast.CommentGroup - for i := range stack { - // iterate over stack in the reverse order (from inner - // node to outer node) - node := stack[len(stack)-1-i] - switch node.(type) { - // need to check comments associated with following nodes, - // because logic of ast package doesn't associate comment - // with *ast.CompositeLit as required. - case *ast.CompositeLit, // stack[len(stack)-1] - *ast.ReturnStmt, // return ... - *ast.IndexExpr, // map[enum]...{...}[key] - *ast.CallExpr, // myfunc(map...) - *ast.UnaryExpr, // &map... - *ast.AssignStmt, // variable assignment (without var keyword) - *ast.DeclStmt, // var declaration, parent of *ast.GenDecl - *ast.GenDecl, // var declaration, parent of *ast.ValueSpec - *ast.ValueSpec: // var declaration - relatedComments = append(relatedComments, fileComments[node]...) - continue - default: - // stop iteration on the first inappropriate node - break - } - } - - if !cfg.explicit && hasCommentPrefix(relatedComments, ignoreComment) { - // Skip checking of this map literal due to ignore - // comment. Still return true because there may be nested - // map literals that are not to be ignored. - return true, resultIgnoreComment - } - if cfg.explicit && !hasCommentPrefix(relatedComments, enforceComment) { - return true, resultNoEnforceComment - } - - es, ok := composingEnumTypes(pass, mapType.Key()) - if !ok || len(es) == 0 { - return true, resultEnumTypes - } - - var checkl checklist - checkl.ignoreConstant(cfg.ignoreConstant) - checkl.ignoreType(cfg.ignoreType) - - for _, e := range es { - checkl.add(e.typ, e.members, pass.Pkg == e.typ.Pkg()) - } - - analyzeMapLiteral(lit, pass.TypesInfo, checkl.found) - if len(checkl.remaining()) == 0 { - return true, resultEnumMembersAccounted - } - pass.Report(makeMapDiagnostic(lit, dedupEnumTypes(toEnumTypes(es)), checkl.remaining())) - return true, resultReportedDiagnostic - } -} - -func analyzeMapLiteral(lit *ast.CompositeLit, info *types.Info, each func(constantValue)) { - for _, e := range lit.Elts { - expr, ok := e.(*ast.KeyValueExpr) - if !ok { - continue - } - if val, ok := exprConstVal(expr.Key, info); ok { - each(val) - } - } -} - -func makeMapDiagnostic(lit *ast.CompositeLit, enumTypes []enumType, missing map[member]struct{}) analysis.Diagnostic { - return analysis.Diagnostic{ - Pos: lit.Pos(), - End: lit.End(), - Message: fmt.Sprintf( - "missing keys in map of key type %s: %s", - diagnosticEnumTypes(enumTypes), - diagnosticGroups(groupify(missing, enumTypes)), - ), - } -} diff --git a/vendor/github.com/nishanths/exhaustive/switch.go b/vendor/github.com/nishanths/exhaustive/switch.go deleted file mode 100644 index d235fbec4..000000000 --- a/vendor/github.com/nishanths/exhaustive/switch.go +++ /dev/null @@ -1,236 +0,0 @@ -package exhaustive - -import ( - "fmt" - "go/ast" - "go/types" - "regexp" - "strings" - - "golang.org/x/tools/go/analysis" -) - -// nodeVisitor is like the visitor function used by inspector.WithStack, -// except that it returns an additional value: a short description of -// the result of this node visit. -// -// The result value is typically useful in debugging or in unit tests to check -// that the nodeVisitor function took the expected code path. -type nodeVisitor func(n ast.Node, push bool, stack []ast.Node) (proceed bool, result string) - -// toVisitor converts a nodeVisitor to a function suitable for use -// with inspector.WithStack. -func toVisitor(v nodeVisitor) func(ast.Node, bool, []ast.Node) bool { - return func(node ast.Node, push bool, stack []ast.Node) bool { - proceed, _ := v(node, push, stack) - return proceed - } -} - -// Result values returned by node visitors. -const ( - resultEmptyMapLiteral = "empty map literal" - resultNotMapLiteral = "not map literal" - resultKeyNilPkg = "nil map key package" - resultKeyNotEnum = "not all map key type terms are known enum types" - - resultNoSwitchTag = "no switch tag" - resultTagNotValue = "switch tag not value type" - resultTagNilPkg = "nil switch tag package" - resultTagNotEnum = "not all switch tag terms are known enum types" - - resultNotPush = "not push" - resultGeneratedFile = "generated file" - resultIgnoreComment = "has ignore comment" - resultNoEnforceComment = "has no enforce comment" - resultEnumMembersAccounted = "required enum members accounted for" - resultDefaultCaseSuffices = "default case satisfies exhaustiveness" - resultMissingDefaultCase = "missing required default case" - resultReportedDiagnostic = "reported diagnostic" - resultEnumTypes = "invalid or empty composing enum types" -) - -// switchConfig is configuration for switchChecker. -type switchConfig struct { - explicit bool - defaultSignifiesExhaustive bool - defaultCaseRequired bool - checkGenerated bool - ignoreConstant *regexp.Regexp // can be nil - ignoreType *regexp.Regexp // can be nil -} - -// There are few possibilities, and often none, so we use a possibly-nil slice -func userDirectives(comments []*ast.CommentGroup) []string { - var directives []string - for _, c := range comments { - for _, cc := range c.List { - // The order matters here: we always want to check the longest first. - for _, d := range []string{ - enforceDefaultCaseRequiredComment, - ignoreDefaultCaseRequiredComment, - enforceComment, - ignoreComment, - } { - if strings.HasPrefix(cc.Text, d) { - directives = append(directives, d) - // The break here is important: once we associate a comment - // with a particular (longest-possible) directive, we don't want - // to map to another! - break - } - } - } - } - return directives -} - -// Can be replaced with slices.Contains with go1.21 -func directivesIncludes(directives []string, d string) bool { - for _, ud := range directives { - if ud == d { - return true - } - } - return false -} - -// switchChecker returns a node visitor that checks exhaustiveness of -// enum switch statements for the supplied pass, and reports -// diagnostics. The node visitor expects only *ast.SwitchStmt nodes. -func switchChecker(pass *analysis.Pass, cfg switchConfig, generated boolCache, comments commentCache) nodeVisitor { - return func(n ast.Node, push bool, stack []ast.Node) (bool, string) { - if !push { - // The proceed return value should not matter; it is ignored by - // inspector package for pop calls. - // Nevertheless, return true to be on the safe side for the future. - return true, resultNotPush - } - - file := stack[0].(*ast.File) - - if !cfg.checkGenerated && generated.get(file) { - // Don't check this file. - // Return false because the children nodes of node `n` don't have to be checked. - return false, resultGeneratedFile - } - - sw := n.(*ast.SwitchStmt) - - switchComments := comments.get(pass.Fset, file)[sw] - uDirectives := userDirectives(switchComments) - if !cfg.explicit && directivesIncludes(uDirectives, ignoreComment) { - // Skip checking of this switch statement due to ignore - // comment. Still return true because there may be nested - // switch statements that are not to be ignored. - return true, resultIgnoreComment - } - if cfg.explicit && !directivesIncludes(uDirectives, enforceComment) { - // Skip checking of this switch statement due to missing - // enforce comment. - return true, resultNoEnforceComment - } - requireDefaultCase := cfg.defaultCaseRequired - if directivesIncludes(uDirectives, ignoreDefaultCaseRequiredComment) { - requireDefaultCase = false - } - if directivesIncludes(uDirectives, enforceDefaultCaseRequiredComment) { - // We have "if" instead of "else if" here in case of conflicting ignore/enforce directives. - // In that case, because this is second, we will default to enforcing. - requireDefaultCase = true - } - - if sw.Tag == nil { - return true, resultNoSwitchTag // never possible for valid Go program? - } - - t := pass.TypesInfo.Types[sw.Tag] - if !t.IsValue() { - return true, resultTagNotValue - } - - es, ok := composingEnumTypes(pass, t.Type) - if !ok || len(es) == 0 { - return true, resultEnumTypes - } - - var checkl checklist - checkl.ignoreConstant(cfg.ignoreConstant) - checkl.ignoreType(cfg.ignoreType) - - for _, e := range es { - checkl.add(e.typ, e.members, pass.Pkg == e.typ.Pkg()) - } - - defaultCaseExists := analyzeSwitchClauses(sw, pass.TypesInfo, checkl.found) - if !defaultCaseExists && requireDefaultCase { - // Even if the switch explicitly enumerates all the - // enum values, the user has still required all switches - // to have a default case. We check this first to avoid - // early-outs - pass.Report(makeMissingDefaultDiagnostic(sw, dedupEnumTypes(toEnumTypes(es)))) - return true, resultMissingDefaultCase - } - if len(checkl.remaining()) == 0 { - // All enum members accounted for. - // Nothing to report. - return true, resultEnumMembersAccounted - } - if defaultCaseExists && cfg.defaultSignifiesExhaustive { - // Though enum members are not accounted for, the - // existence of the default case signifies - // exhaustiveness. So don't report. - return true, resultDefaultCaseSuffices - } - pass.Report(makeSwitchDiagnostic(sw, dedupEnumTypes(toEnumTypes(es)), checkl.remaining())) - return true, resultReportedDiagnostic - } -} - -func isDefaultCase(c *ast.CaseClause) bool { - return c.List == nil // see doc comment on List field -} - -// analyzeSwitchClauses analyzes the clauses in the supplied switch -// statement. The info param typically is pass.TypesInfo. The each -// function is called for each enum member name found in the switch -// statement. The hasDefaultCase return value indicates whether the -// switch statement has a default clause. -func analyzeSwitchClauses(sw *ast.SwitchStmt, info *types.Info, each func(val constantValue)) (hasDefaultCase bool) { - for _, stmt := range sw.Body.List { - caseCl := stmt.(*ast.CaseClause) - if isDefaultCase(caseCl) { - hasDefaultCase = true - continue - } - for _, expr := range caseCl.List { - if val, ok := exprConstVal(expr, info); ok { - each(val) - } - } - } - return hasDefaultCase -} - -func makeSwitchDiagnostic(sw *ast.SwitchStmt, enumTypes []enumType, missing map[member]struct{}) analysis.Diagnostic { - return analysis.Diagnostic{ - Pos: sw.Pos(), - End: sw.End(), - Message: fmt.Sprintf( - "missing cases in switch of type %s: %s", - diagnosticEnumTypes(enumTypes), - diagnosticGroups(groupify(missing, enumTypes)), - ), - } -} - -func makeMissingDefaultDiagnostic(sw *ast.SwitchStmt, enumTypes []enumType) analysis.Diagnostic { - return analysis.Diagnostic{ - Pos: sw.Pos(), - End: sw.End(), - Message: fmt.Sprintf( - "missing default case in switch of type %s", - diagnosticEnumTypes(enumTypes), - ), - } -} diff --git a/vendor/github.com/nishanths/predeclared/LICENSE b/vendor/github.com/nishanths/predeclared/LICENSE deleted file mode 100644 index 946212315..000000000 --- a/vendor/github.com/nishanths/predeclared/LICENSE +++ /dev/null @@ -1,29 +0,0 @@ -BSD 3-Clause License - -Copyright (c) 2017, Nishanth Shanmugham -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - -* Redistributions of source code must retain the above copyright notice, this - list of conditions and the following disclaimer. - -* Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - -* Neither the name of the copyright holder nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE -FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/nishanths/predeclared/passes/predeclared/go18.go b/vendor/github.com/nishanths/predeclared/passes/predeclared/go18.go deleted file mode 100644 index 4083efc74..000000000 --- a/vendor/github.com/nishanths/predeclared/passes/predeclared/go18.go +++ /dev/null @@ -1,9 +0,0 @@ -// +build go1.8 - -package predeclared - -import "go/doc" - -func isPredeclaredIdent(name string) bool { - return doc.IsPredeclared(name) -} diff --git a/vendor/github.com/nishanths/predeclared/passes/predeclared/pre_go18.go b/vendor/github.com/nishanths/predeclared/passes/predeclared/pre_go18.go deleted file mode 100644 index 5780e0b56..000000000 --- a/vendor/github.com/nishanths/predeclared/passes/predeclared/pre_go18.go +++ /dev/null @@ -1,53 +0,0 @@ -// +build !go1.8 - -package predeclared - -func isPredeclaredIdent(name string) bool { - return predeclaredIdents[name] -} - -// Keep in sync with https://golang.org/ref/spec#Predeclared_identifiers -var predeclaredIdents = map[string]bool{ - "bool": true, - "byte": true, - "complex64": true, - "complex128": true, - "error": true, - "float32": true, - "float64": true, - "int": true, - "int8": true, - "int16": true, - "int32": true, - "int64": true, - "rune": true, - "string": true, - "uint": true, - "uint8": true, - "uint16": true, - "uint32": true, - "uint64": true, - "uintptr": true, - - "true": true, - "false": true, - "iota": true, - - "nil": true, - - "append": true, - "cap": true, - "close": true, - "complex": true, - "copy": true, - "delete": true, - "imag": true, - "len": true, - "make": true, - "new": true, - "panic": true, - "print": true, - "println": true, - "real": true, - "recover": true, -} diff --git a/vendor/github.com/nishanths/predeclared/passes/predeclared/predeclared.go b/vendor/github.com/nishanths/predeclared/passes/predeclared/predeclared.go deleted file mode 100644 index 67c0e0a00..000000000 --- a/vendor/github.com/nishanths/predeclared/passes/predeclared/predeclared.go +++ /dev/null @@ -1,202 +0,0 @@ -// Package predeclared provides a static analysis (used by the predeclared command) -// that can detect declarations in Go code that shadow one of Go's predeclared identifiers. -package predeclared - -import ( - "fmt" - "go/ast" - "go/token" - "strings" - - "golang.org/x/tools/go/analysis" -) - -// Flag names used by the analyzer. They are exported for use by analyzer -// driver programs. -const ( - IgnoreFlag = "ignore" - QualifiedFlag = "q" -) - -var ( - fIgnore string - fQualified bool -) - -func init() { - Analyzer.Flags.StringVar(&fIgnore, IgnoreFlag, "", "comma-separated list of predeclared identifiers to not report on") - Analyzer.Flags.BoolVar(&fQualified, QualifiedFlag, false, "include method names and field names (i.e., qualified names) in checks") -} - -var Analyzer = &analysis.Analyzer{ - Name: "predeclared", - Doc: "find code that shadows one of Go's predeclared identifiers", - Run: run, -} - -func run(pass *analysis.Pass) (interface{}, error) { - cfg := newConfig(fIgnore, fQualified) - for _, file := range pass.Files { - processFile(pass.Report, cfg, pass.Fset, file) - } - return nil, nil -} - -type config struct { - qualified bool - ignoredIdents map[string]struct{} -} - -func newConfig(ignore string, qualified bool) *config { - cfg := &config{ - qualified: qualified, - ignoredIdents: map[string]struct{}{}, - } - for _, s := range strings.Split(ignore, ",") { - ident := strings.TrimSpace(s) - if ident == "" { - continue - } - cfg.ignoredIdents[ident] = struct{}{} - } - return cfg -} - -type issue struct { - ident *ast.Ident - kind string - fset *token.FileSet -} - -func (i issue) String() string { - pos := i.fset.Position(i.ident.Pos()) - return fmt.Sprintf("%s: %s %s has same name as predeclared identifier", pos, i.kind, i.ident.Name) -} - -func processFile(report func(analysis.Diagnostic), cfg *config, fset *token.FileSet, file *ast.File) []issue { // nolint: gocyclo - var issues []issue - - maybeReport := func(x *ast.Ident, kind string) { - if _, isIgnored := cfg.ignoredIdents[x.Name]; !isIgnored && isPredeclaredIdent(x.Name) { - report(analysis.Diagnostic{ - Pos: x.Pos(), - End: x.End(), - Message: fmt.Sprintf("%s %s has same name as predeclared identifier", kind, x.Name), - }) - issues = append(issues, issue{x, kind, fset}) - } - } - - seenValueSpecs := make(map[*ast.ValueSpec]bool) - - // TODO: consider deduping package name issues for files in the - // same directory. - maybeReport(file.Name, "package name") - - for _, spec := range file.Imports { - if spec.Name == nil { - continue - } - maybeReport(spec.Name, "import name") - } - - // Handle declarations and fields. - // https://golang.org/ref/spec#Declarations_and_scope - ast.Inspect(file, func(n ast.Node) bool { - switch x := n.(type) { - case *ast.GenDecl: - var kind string - switch x.Tok { - case token.CONST: - kind = "const" - case token.VAR: - kind = "variable" - default: - return true - } - for _, spec := range x.Specs { - if vspec, ok := spec.(*ast.ValueSpec); ok && !seenValueSpecs[vspec] { - seenValueSpecs[vspec] = true - for _, name := range vspec.Names { - maybeReport(name, kind) - } - } - } - return true - case *ast.TypeSpec: - maybeReport(x.Name, "type") - return true - case *ast.StructType: - if cfg.qualified && x.Fields != nil { - for _, field := range x.Fields.List { - for _, name := range field.Names { - maybeReport(name, "field") - } - } - } - return true - case *ast.InterfaceType: - if cfg.qualified && x.Methods != nil { - for _, meth := range x.Methods.List { - for _, name := range meth.Names { - maybeReport(name, "method") - } - } - } - return true - case *ast.FuncDecl: - if x.Recv == nil { - // it's a function - maybeReport(x.Name, "function") - } else { - // it's a method - if cfg.qualified { - maybeReport(x.Name, "method") - } - } - // add receivers idents - if x.Recv != nil { - for _, field := range x.Recv.List { - for _, name := range field.Names { - maybeReport(name, "receiver") - } - } - } - // Params and Results will be checked in the *ast.FuncType case. - return true - case *ast.FuncType: - // add params idents - for _, field := range x.Params.List { - for _, name := range field.Names { - maybeReport(name, "param") - } - } - // add returns idents - if x.Results != nil { - for _, field := range x.Results.List { - for _, name := range field.Names { - maybeReport(name, "named return") - } - } - } - return true - case *ast.LabeledStmt: - maybeReport(x.Label, "label") - return true - case *ast.AssignStmt: - // We only care about short variable declarations, which use token.DEFINE. - if x.Tok == token.DEFINE { - for _, expr := range x.Lhs { - if ident, ok := expr.(*ast.Ident); ok { - maybeReport(ident, "variable") - } - } - } - return true - default: - return true - } - }) - - return issues -} diff --git a/vendor/github.com/nunnatsa/ginkgolinter/.gitignore b/vendor/github.com/nunnatsa/ginkgolinter/.gitignore deleted file mode 100644 index 7d7f8b10c..000000000 --- a/vendor/github.com/nunnatsa/ginkgolinter/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -ginkgolinter -bin/ diff --git a/vendor/github.com/nunnatsa/ginkgolinter/.golangci.yml b/vendor/github.com/nunnatsa/ginkgolinter/.golangci.yml deleted file mode 100644 index 71ff0d034..000000000 --- a/vendor/github.com/nunnatsa/ginkgolinter/.golangci.yml +++ /dev/null @@ -1,3 +0,0 @@ -linters: - enable: - - revive diff --git a/vendor/github.com/nunnatsa/ginkgolinter/LICENSE b/vendor/github.com/nunnatsa/ginkgolinter/LICENSE deleted file mode 100644 index 11096c5c8..000000000 --- a/vendor/github.com/nunnatsa/ginkgolinter/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2022 Nahshon Unna Tsameret - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/nunnatsa/ginkgolinter/Makefile b/vendor/github.com/nunnatsa/ginkgolinter/Makefile deleted file mode 100644 index 586633006..000000000 --- a/vendor/github.com/nunnatsa/ginkgolinter/Makefile +++ /dev/null @@ -1,27 +0,0 @@ -VERSION ?= "unknown" -VERSION_FLAG := -X github.com/nunnatsa/ginkgolinter/version.version=$(VERSION) -COMMIT_HASH := $(shell git rev-parse HEAD) -HASH_FLAG := -X github.com/nunnatsa/ginkgolinter/version.gitHash=$(COMMIT_HASH) - -BUILD_ARGS := -ldflags "$(VERSION_FLAG) $(HASH_FLAG)" - -build: unit-test - go build $(BUILD_ARGS) -o ginkgolinter ./cmd/ginkgolinter - -unit-test: - go test ./... - -build-for-windows: - GOOS=windows GOARCH=amd64 go build $(BUILD_ARGS) -o bin/ginkgolinter-amd64.exe ./cmd/ginkgolinter - -build-for-mac: - GOOS=darwin GOARCH=amd64 go build $(BUILD_ARGS) -o bin/ginkgolinter-amd64-darwin ./cmd/ginkgolinter - -build-for-linux: - GOOS=linux GOARCH=amd64 go build $(BUILD_ARGS) -o bin/ginkgolinter-amd64-linux ./cmd/ginkgolinter - GOOS=linux GOARCH=386 go build $(BUILD_ARGS) -o bin/ginkgolinter-386-linux ./cmd/ginkgolinter - -build-all: build build-for-linux build-for-mac build-for-windows - -test: build - ./tests/e2e.sh diff --git a/vendor/github.com/nunnatsa/ginkgolinter/README.md b/vendor/github.com/nunnatsa/ginkgolinter/README.md deleted file mode 100644 index 3832f610d..000000000 --- a/vendor/github.com/nunnatsa/ginkgolinter/README.md +++ /dev/null @@ -1,433 +0,0 @@ -[![Go Report Card](https://goreportcard.com/badge/github.com/nunnatsa/ginkgolinter)](https://goreportcard.com/report/github.com/nunnatsa/ginkgolinter) -[![Coverage Status](https://coveralls.io/repos/github/nunnatsa/ginkgolinter/badge.svg?branch=main)](https://coveralls.io/github/nunnatsa/ginkgolinter?branch=main) -![Build Status](https://github.com/nunnatsa/ginkgolinter/workflows/CI/badge.svg) -[![License](https://img.shields.io/github/license/nunnatsa/ginkgolinter)](/LICENSE) -[![Release](https://img.shields.io/github/release/nunnatsa/ginkgolinter.svg)](https://github.com/nunnatsa/ginkgolinter/releases/latest) -[![GitHub Releases Stats of ginkgolinter](https://img.shields.io/github/downloads/nunnatsa/ginkgolinter/total.svg?logo=github)](https://somsubhra.github.io/github-release-stats/?username=nunnatsa&repository=ginkgolinter) - -# ginkgo-linter -[ginkgo](https://onsi.github.io/ginkgo/) is a popular testing framework and [gomega](https://onsi.github.io/gomega/) is its assertion package. - -This is a golang linter to enforce some standards while using the ginkgo and gomega packages. - -## Install the CLI -Download the right executable from the latest release, according to your OS. - -Another option is to use go: -```shell -go install github.com/nunnatsa/ginkgolinter/cmd/ginkgolinter@latest -``` -Then add the new executable to your PATH. - -## usage -```shell -ginkgolinter [-fix] ./... -``` - -Use the `-fix` flag to apply the fix suggestions to the source code. - -### Use ginkgolinter with golangci-lint -The ginkgolinter is now part of the popular [golangci-lint](https://golangci-lint.run/), starting from version `v1.51.1`. - -It is not enabled by default, though. There are two ways to run ginkgolinter with golangci-lint: - -* From command line: - ```shell - golangci-lint run -E ginkgolinter ./... - ``` -* From configuration: - - Add ginkgolinter to the enabled linters list in .golangci.reference.yml file in your project. For more details, see - the [golangci-lint documentation](https://golangci-lint.run/usage/configuration/); e.g. - ```yaml - linters: - enable: - - ginkgolinter - ``` -## Linter Rules -The linter checks the ginkgo and gomega assertions in golang test code. Gomega may be used together with ginkgo tests, -For example: -```go -It("should test something", func() { // It is ginkgo test case function - Expect("abcd").To(HaveLen(4), "the string should have a length of 4") // Expect is the gomega assertion -}) -``` -or within a classic golang test code, like this: -```go -func TestWithGomega(t *testing.T) { - g := NewWithT(t) - g.Expect("abcd").To(HaveLen(4), "the string should have a length of 4") -} -``` - -In some cases, the gomega will be passed as a variable to function by ginkgo, for example: -```go -Eventually(func(g Gomega) error { - g.Expect("abcd").To(HaveLen(4), "the string should have a length of 4") - return nil -}).Should(Succeed()) -``` - -The linter checks the `Expect`, `ExpectWithOffset` and the `Ω` "actual" functions, with the `Should`, `ShouldNot`, `To`, `ToNot` and `NotTo` assertion functions. - -It also supports the embedded `Not()` matcher - -Some checks find actual bugs, and some are more for style. - -### Using a function call in async assertion [BUG] -This rule finds an actual bug in tests, where asserting a function call in an async function; e.g. `Eventually`. For -example: -```go -func slowInt(int val) int { - time.Sleep(time.Second) - return val -} - -... - -It("should test that slowInt returns 42, eventually", func() { - Eventually(slowInt(42)).WithPolling(time.Millisecond * 100).WithTimeout(time.Second * 2).Equal(42) -}) -``` -The problem with the above code is that it **should** poll - call the function - until it returns 42, but what actually -happens is that first the function is called, and we pass `42` to `Eventually` - not the function. This is not what we -tried to do here. - -The linter will suggest replacing this code by: -```go -It("should test that slowInt returns 42, eventually", func() { - Eventually(slowInt).WithArguments(42).WithPolling(time.Millisecond * 100).WithTimeout(time.Second * 2).Equal(42) -}) -``` - -The linter suggested replacing the function call by the function name. - -If function arguments are used, the linter will add the `WithArguments()` method to pass them. - -Please notice that `WithArguments()` is only supported from gomenga v1.22.0. - -When using an older version of gomega, change the code manually. For example: - -```go -It("should test that slowInt returns 42, eventually", func() { - Eventually(func() int { - slowint(42) - }).WithPolling(time.Millisecond * 100).WithTimeout(time.Second * 2).Equal(42) -}) -``` - -### Comparing a pointer with a value [BUG] -The linter warns when comparing a pointer with a value. -These comparisons are always wrong and will always fail. - -In case of a positive assertion (`To()` or `Should()`), the test will just fail. - -But the main concern is for false positive tests, when using a negative assertion (`NotTo()`, `ToNot()`, `ShouldNot()`, -`Should(Not())` etc.); e.g. -```go -num := 5 -... -pNum := &num -... -Expect(pNum).ShouldNot(Equal(6)) -``` -This assertion will pass, but for the wrong reasons: pNum is not equal 6, not because num == 5, but because pNum is -a pointer, while `6` is an `int`. - -In the case above, the linter will suggest `Expect(pNum).ShouldNot(HaveValue(Equal(6)))` - -This is also right for additional matchers: `BeTrue()` and `BeFalse()`, `BeIdenticalTo()`, `BeEquivalentTo()` -and `BeNumerically`. - -### Missing Assertion Method [BUG] -The linter warns when calling an "actual" method (e.g. `Expect()`, `Eventually()` etc.), without an assertion method (e.g -`Should()`, `NotTo()` etc.) - -For example: -```go -// no assertion for the result -Eventually(doSomething).WithTimeout(time.Seconds * 5).WithPolling(time.Milliseconds * 100) -``` - -The linter will not suggest a fix for this warning. - -This rule cannot be suppressed. - -### Focus Container / Focus individual spec found [BUG] -This rule finds ginkgo focus containers, or the `Focus` individual spec in the code. - -ginkgo supports the `FDescribe`, `FContext`, `FWhen`, `FIt`, `FDescribeTable` and `FEntry` -containers to allow the developer to focus -on a specific test or set of tests during test development or debug. - -For example: -```go -var _ = Describe("checking something", func() { - FIt("this test is the only one that will run", func(){ - ... - }) -}) -``` -Alternatively, the `Focus` individual spec may be used for the same purpose, e.g. -```go -var _ = Describe("checking something", Focus, func() { - It("this test is the only one that will run", func(){ - ... - }) -}) -``` - -These container, or the `Focus` spec, must not be part of the final source code, and should only be used locally by the -developer. - -***This rule is disabled by default***. Use the `--forbid-focus-container=true` command line flag to enable it. - -### Comparing values from different types [BUG] - -The `Equal` and the `BeIdentical` matchers also check the type, not only the value. - -The following code will fail in runtime: -```go -x := 5 // x is int -Expect(x).Should(Eqaul(uint(5)) // x and uint(5) are with different -``` -When using negative checks, it's even worse, because we get a false positive: -``` -x := 5 -Expect(x).ShouldNot(Equal(uint(5)) -``` - -The linter suggests two options to solve this warning: either compare with the same type, e.g. -using casting, or use the `BeEquivalentTo` matcher. - -The linter can't guess what is the best solution in each case, and so it won't auto-fix this warning. - -To suppress this warning entirely, use the `--suppress-type-compare-assertion=true` command line parameter. - -To suppress a specific file or line, use the `// ginkgo-linter:ignore-type-compare-warning` comment (see [below](#suppress-warning-from-the-code)) - -### Wrong Usage of the `MatchError` gomega Matcher [BUG] -The `MatchError` gomega matcher asserts an error value (and if it's not nil). -There are four valid formats for using this Matcher: -* error value; e.g. `Expect(err).To(MatchError(anotherErr))` -* string, to be equal to the output of the `Error()` method; e.g. `Expect(err).To(MatchError("Not Found"))` -* A gomega matcher that asserts strings; e.g. `Expect(err).To(MatchError(ContainSubstring("Found")))` -* [from v0.29.0] a function that receive a single error parameter and returns a single boolean value. - In this format, an additional single string parameter, with the function description, is also required; e.g. - `Expect(err).To(MatchError(isNotFound, "is the error is a not found error"))` - -These four format are checked on runtime, but sometimes it's too late. ginkgolinter performs a static analysis and so it -will find these issues on build time. - -ginkgolinter checks the following: -* Is the first parameter is one of the four options above. -* That there are no additional parameters passed to the matcher; e.g. - `MatchError(isNotFoundFunc, "a valid description" , "not used string")`. In this case, the matcher won't fail on run - time, but the additional parameters are not in use and ignored. -* If the first parameter is a function with the format of `func(error)bool`, ginkgolinter makes sure that the second - parameter exists and its type is string. - -### Wrong Length Assertion [STYLE] -The linter finds assertion of the golang built-in `len` function, with all kind of matchers, while there are already -gomega matchers for these usecases; We want to assert the item, rather than its length. - -There are several wrong patterns: -```go -Expect(len(x)).To(Equal(0)) // should be: Expect(x).To(BeEmpty()) -Expect(len(x)).To(BeZero()) // should be: Expect(x).To(BeEmpty()) -Expect(len(x)).To(BeNumeric(">", 0)) // should be: Expect(x).ToNot(BeEmpty()) -Expect(len(x)).To(BeNumeric(">=", 1)) // should be: Expect(x).ToNot(BeEmpty()) -Expect(len(x)).To(BeNumeric("==", 0)) // should be: Expect(x).To(BeEmpty()) -Expect(len(x)).To(BeNumeric("!=", 0)) // should be: Expect(x).ToNot(BeEmpty()) - -Expect(len(x)).To(Equal(1)) // should be: Expect(x).To(HaveLen(1)) -Expect(len(x)).To(BeNumeric("==", 2)) // should be: Expect(x).To(HaveLen(2)) -Expect(len(x)).To(BeNumeric("!=", 3)) // should be: Expect(x).ToNot(HaveLen(3)) -``` - -It also supports the embedded `Not()` matcher; e.g. - -`Ω(len(x)).Should(Not(Equal(4)))` => `Ω(x).ShouldNot(HaveLen(4))` - -Or even (double negative): - -`Ω(len(x)).To(Not(BeNumeric(">", 0)))` => `Ω(x).To(BeEmpty())` - -The output of the linter,when finding issues, looks like this: -``` -./testdata/src/a/a.go:14:5: ginkgo-linter: wrong length assertion; consider using `Expect("abcd").Should(HaveLen(4))` instead -./testdata/src/a/a.go:18:5: ginkgo-linter: wrong length assertion; consider using `Expect("").Should(BeEmpty())` instead -./testdata/src/a/a.go:22:5: ginkgo-linter: wrong length assertion; consider using `Expect("").Should(BeEmpty())` instead -``` -#### use the `HaveLen(0)` matcher. [STYLE] -The linter will also warn about the `HaveLen(0)` matcher, and will suggest to replace it with `BeEmpty()` - -### Wrong `nil` Assertion [STYLE] -The linter finds assertion of the comparison to nil, with all kind of matchers, instead of using the existing `BeNil()` -matcher; We want to assert the item, rather than a comparison result. - -There are several wrong patterns: - -```go -Expect(x == nil).To(Equal(true)) // should be: Expect(x).To(BeNil()) -Expect(nil == x).To(Equal(true)) // should be: Expect(x).To(BeNil()) -Expect(x != nil).To(Equal(true)) // should be: Expect(x).ToNot(BeNil()) -Expect(nil != nil).To(Equal(true)) // should be: Expect(x).ToNot(BeNil()) - -Expect(x == nil).To(BeTrue()) // should be: Expect(x).To(BeNil()) -Expect(x == nil).To(BeFalse()) // should be: Expect(x).ToNot(BeNil()) -``` -It also supports the embedded `Not()` matcher; e.g. - -`Ω(x == nil).Should(Not(BeTrue()))` => `Ω(x).ShouldNot(BeNil())` - -Or even (double negative): - -`Ω(x != nil).Should(Not(BeTrue()))` => `Ω(x).Should(BeNil())` - -### Wrong boolean Assertion [STYLE] -The linter finds assertion using the `Equal` method, with the values of to `true` or `false`, instead -of using the existing `BeTrue()` or `BeFalse()` matcher. - -There are several wrong patterns: - -```go -Expect(x).To(Equal(true)) // should be: Expect(x).To(BeTrue()) -Expect(x).To(Equal(false)) // should be: Expect(x).To(BeFalse()) -``` -It also supports the embedded `Not()` matcher; e.g. - -`Ω(x).Should(Not(Equal(True)))` => `Ω(x).ShouldNot(BeTrue())` - -### Wrong Error Assertion [STYLE] -The linter finds assertion of errors compared with nil, or to be equal nil, or to be nil. The linter suggests to use `Succeed` for functions or `HaveOccurred` for error values.. - -There are several wrong patterns: - -```go -Expect(err).To(BeNil()) // should be: Expect(err).ToNot(HaveOccurred()) -Expect(err == nil).To(Equal(true)) // should be: Expect(err).ToNot(HaveOccurred()) -Expect(err == nil).To(BeFalse()) // should be: Expect(err).To(HaveOccurred()) -Expect(err != nil).To(BeTrue()) // should be: Expect(err).To(HaveOccurred()) -Expect(funcReturnsError()).To(BeNil()) // should be: Expect(funcReturnsError()).To(Succeed()) - -and so on -``` -It also supports the embedded `Not()` matcher; e.g. - -`Ω(err == nil).Should(Not(BeTrue()))` => `Ω(x).Should(HaveOccurred())` - -### Wrong Comparison Assertion [STYLE] -The linter finds assertion of boolean comparisons, which are already supported by existing gomega matchers. - -The linter assumes that when compared something to literals or constants, these values should be used for the assertion, -and it will do its best to suggest the right assertion expression accordingly. - -There are several wrong patterns: -```go -var x = 10 -var s = "abcd" - -... - -Expect(x == 10).Should(BeTrue()) // should be Expect(x).Should(Equal(10)) -Expect(10 == x).Should(BeTrue()) // should be Expect(x).Should(Equal(10)) -Expect(x != 5).Should(Equal(true)) // should be Expect(x).ShouldNot(Equal(5)) -Expect(x != 0).Should(Equal(true)) // should be Expect(x).ShouldNot(BeZero()) - -Expect(s != "abcd").Should(BeFalse()) // should be Expect(s).Should(Equal("abcd")) -Expect("abcd" != s).Should(BeFalse()) // should be Expect(s).Should(Equal("abcd")) -``` -Or non-equal comparisons: -```go -Expect(x > 10).To(BeTrue()) // ==> Expect(x).To(BeNumerically(">", 10)) -Expect(x >= 15).To(BeTrue()) // ==> Expect(x).To(BeNumerically(">=", 15)) -Expect(3 > y).To(BeTrue()) // ==> Expect(y).To(BeNumerically("<", 3)) -// and so on ... -``` - -This check included a limited support in constant values. For example: -```go -const c1 = 5 - -... - -Expect(x1 == c1).Should(BeTrue()) // ==> Expect(x1).Should(Equal(c1)) -Expect(c1 == x1).Should(BeTrue()) // ==> Expect(x1).Should(Equal(c1)) -``` - -## Suppress the linter -### Suppress warning from command line -* Use the `--suppress-len-assertion=true` flag to suppress the wrong length assertion warning -* Use the `--suppress-nil-assertion=true` flag to suppress the wrong nil assertion warning -* Use the `--suppress-err-assertion=true` flag to suppress the wrong error assertion warning -* Use the `--suppress-compare-assertion=true` flag to suppress the wrong comparison assertion warning -* Use the `--suppress-async-assertion=true` flag to suppress the function call in async assertion warning -* Use the `--forbid-focus-container=true` flag to activate the focused container assertion (deactivated by default) -* Use the `--suppress-type-compare-assertion=true` to suppress the type compare assertion warning -* Use the `--allow-havelen-0=true` flag to avoid warnings about `HaveLen(0)`; Note: this parameter is only supported from - command line, and not from a comment. - -### Suppress warning from the code -To suppress the wrong length assertion warning, add a comment with (only) - -`ginkgo-linter:ignore-len-assert-warning`. - -To suppress the wrong nil assertion warning, add a comment with (only) - -`ginkgo-linter:ignore-nil-assert-warning`. - -To suppress the wrong error assertion warning, add a comment with (only) - -`ginkgo-linter:ignore-err-assert-warning`. - -To suppress the wrong comparison assertion warning, add a comment with (only) - -`ginkgo-linter:ignore-compare-assert-warning`. - -To suppress the wrong async assertion warning, add a comment with (only) - -`ginkgo-linter:ignore-async-assert-warning`. - -To supress the focus container warning, add a comment with (only) - -`ginkgo-linter:ignore-focus-container-warning` - -To suppress the different type comparison, add a comment with (only) - -`ginkgo-linter:ignore-type-compare-warning` - -Notice that this comment will not work for an anonymous variable container like -```go -// ginkgo-linter:ignore-focus-container-warning (not working!!) -var _ = FDescribe(...) -``` -In this case, use the file comment (see bellow). - -There are two options to use these comments: -1. If the comment is at the top of the file, supress the warning for the whole file; e.g.: - ```go - package mypackage - - // ginkgo-linter:ignore-len-assert-warning - - import ( - . "github.com/onsi/ginkgo/v2" - . "github.com/onsi/gomega" - ) - - var _ = Describe("my test", func() { - It("should do something", func() { - Expect(len("abc")).Should(Equal(3)) // nothing in this file will trigger the warning - }) - }) - ``` - -2. If the comment is before a wrong length check expression, the warning is suppressed for this expression only; for example: - ```golang - It("should test something", func() { - // ginkgo-linter:ignore-nil-assert-warning - Expect(x == nil).Should(BeTrue()) // this line will not trigger the warning - Expect(x == nil).Should(BeTrue()) // this line will trigger the warning - } - ``` diff --git a/vendor/github.com/nunnatsa/ginkgolinter/ginkgo_linter.go b/vendor/github.com/nunnatsa/ginkgolinter/ginkgo_linter.go deleted file mode 100644 index a0aff1612..000000000 --- a/vendor/github.com/nunnatsa/ginkgolinter/ginkgo_linter.go +++ /dev/null @@ -1,1499 +0,0 @@ -package ginkgolinter - -import ( - "bytes" - "flag" - "fmt" - "go/ast" - "go/constant" - "go/printer" - "go/token" - gotypes "go/types" - "reflect" - - "github.com/go-toolsmith/astcopy" - "golang.org/x/tools/go/analysis" - - "github.com/nunnatsa/ginkgolinter/ginkgohandler" - "github.com/nunnatsa/ginkgolinter/gomegahandler" - "github.com/nunnatsa/ginkgolinter/interfaces" - "github.com/nunnatsa/ginkgolinter/reverseassertion" - "github.com/nunnatsa/ginkgolinter/types" - "github.com/nunnatsa/ginkgolinter/version" -) - -// The ginkgolinter enforces standards of using ginkgo and gomega. -// -// For more details, look at the README.md file - -const ( - linterName = "ginkgo-linter" - wrongLengthWarningTemplate = linterName + ": wrong length assertion; consider using `%s` instead" - wrongNilWarningTemplate = linterName + ": wrong nil assertion; consider using `%s` instead" - wrongBoolWarningTemplate = linterName + ": wrong boolean assertion; consider using `%s` instead" - wrongErrWarningTemplate = linterName + ": wrong error assertion; consider using `%s` instead" - wrongCompareWarningTemplate = linterName + ": wrong comparison assertion; consider using `%s` instead" - doubleNegativeWarningTemplate = linterName + ": avoid double negative assertion; consider using `%s` instead" - valueInEventually = linterName + ": use a function call in %s. This actually checks nothing, because %s receives the function returned value, instead of function itself, and this value is never changed" - comparePointerToValue = linterName + ": comparing a pointer to a value will always fail. consider using `%s` instead" - missingAssertionMessage = linterName + `: %q: missing assertion method. Expected "Should()", "To()", "ShouldNot()", "ToNot()" or "NotTo()"` - missingAsyncAssertionMessage = linterName + `: %q: missing assertion method. Expected "Should()" or "ShouldNot()"` - focusContainerFound = linterName + ": Focus container found. This is used only for local debug and should not be part of the actual source code, consider to replace with %q" - focusSpecFound = linterName + ": Focus spec found. This is used only for local debug and should not be part of the actual source code, consider to remove it" - compareDifferentTypes = linterName + ": use %[1]s with different types: Comparing %[2]s with %[3]s; either change the expected value type if possible, or use the BeEquivalentTo() matcher, instead of %[1]s()" - matchErrorArgWrongType = linterName + ": the MatchError matcher used to assert a non error type (%s)" - matchErrorWrongTypeAssertion = linterName + ": MatchError first parameter (%s) must be error, string, GomegaMatcher or func(error)bool are allowed" - matchErrorMissingDescription = linterName + ": missing function description as second parameter of MatchError" - matchErrorRedundantArg = linterName + ": redundant MatchError arguments; consider removing them; consider using `%s` instead" - matchErrorNoFuncDescription = linterName + ": The second parameter of MatchError must be the function description (string)" -) - -const ( // gomega matchers - beEmpty = "BeEmpty" - beEquivalentTo = "BeEquivalentTo" - beFalse = "BeFalse" - beIdenticalTo = "BeIdenticalTo" - beNil = "BeNil" - beNumerically = "BeNumerically" - beTrue = "BeTrue" - beZero = "BeZero" - equal = "Equal" - haveLen = "HaveLen" - haveOccurred = "HaveOccurred" - haveValue = "HaveValue" - not = "Not" - omega = "Ω" - succeed = "Succeed" - and = "And" - or = "Or" - withTransform = "WithTransform" - matchError = "MatchError" -) - -const ( // gomega actuals - expect = "Expect" - expectWithOffset = "ExpectWithOffset" - eventually = "Eventually" - eventuallyWithOffset = "EventuallyWithOffset" - consistently = "Consistently" - consistentlyWithOffset = "ConsistentlyWithOffset" -) - -// Analyzer is the interface to go_vet -var Analyzer = NewAnalyzer() - -type ginkgoLinter struct { - config *types.Config -} - -// NewAnalyzer returns an Analyzer - the package interface with nogo -func NewAnalyzer() *analysis.Analyzer { - linter := ginkgoLinter{ - config: &types.Config{ - SuppressLen: false, - SuppressNil: false, - SuppressErr: false, - SuppressCompare: false, - ForbidFocus: false, - AllowHaveLen0: false, - }, - } - - a := &analysis.Analyzer{ - Name: "ginkgolinter", - Doc: fmt.Sprintf(doc, version.Version()), - Run: linter.run, - } - - var ignored bool - a.Flags.Init("ginkgolinter", flag.ExitOnError) - a.Flags.Var(&linter.config.SuppressLen, "suppress-len-assertion", "Suppress warning for wrong length assertions") - a.Flags.Var(&linter.config.SuppressNil, "suppress-nil-assertion", "Suppress warning for wrong nil assertions") - a.Flags.Var(&linter.config.SuppressErr, "suppress-err-assertion", "Suppress warning for wrong error assertions") - a.Flags.Var(&linter.config.SuppressCompare, "suppress-compare-assertion", "Suppress warning for wrong comparison assertions") - a.Flags.Var(&linter.config.SuppressAsync, "suppress-async-assertion", "Suppress warning for function call in async assertion, like Eventually") - a.Flags.Var(&linter.config.SuppressTypeCompare, "suppress-type-compare-assertion", "Suppress warning for comparing values from different types, like int32 and uint32") - a.Flags.Var(&linter.config.AllowHaveLen0, "allow-havelen-0", "Do not warn for HaveLen(0); default = false") - - a.Flags.BoolVar(&ignored, "suppress-focus-container", true, "Suppress warning for ginkgo focus containers like FDescribe, FContext, FWhen or FIt. Deprecated and ignored: use --forbid-focus-container instead") - a.Flags.Var(&linter.config.ForbidFocus, "forbid-focus-container", "trigger a warning for ginkgo focus containers like FDescribe, FContext, FWhen or FIt; default = false.") - - return a -} - -const doc = `enforces standards of using ginkgo and gomega - -or - ginkgolinter version - -version: %s - -currently, the linter searches for following: -* trigger a warning when using Eventually or Constantly with a function call. This is in order to prevent the case when - using a function call instead of a function. Function call returns a value only once, and so the original value - is tested again and again and is never changed. [Bug] - -* trigger a warning when comparing a pointer to a value. [Bug] - -* trigger a warning for missing assertion method: [Bug] - Eventually(checkSomething) - -* trigger a warning when a ginkgo focus container (FDescribe, FContext, FWhen or FIt) is found. [Bug] - -* validate the MatchError gomega matcher [Bug] - -* trigger a warning when using the Equal or the BeIdentical matcher with two different types, as these matchers will - fail in runtime. - -* wrong length assertions. We want to assert the item rather than its length. [Style] -For example: - Expect(len(x)).Should(Equal(1)) -This should be replaced with: - Expect(x)).Should(HavelLen(1)) - -* wrong nil assertions. We want to assert the item rather than a comparison result. [Style] -For example: - Expect(x == nil).Should(BeTrue()) -This should be replaced with: - Expect(x).Should(BeNil()) - -* wrong error assertions. For example: [Style] - Expect(err == nil).Should(BeTrue()) -This should be replaced with: - Expect(err).ShouldNot(HaveOccurred()) - -* wrong boolean comparison, for example: [Style] - Expect(x == 8).Should(BeTrue()) -This should be replaced with: - Expect(x).Should(BeEqual(8)) - -* replaces Equal(true/false) with BeTrue()/BeFalse() [Style] - -* replaces HaveLen(0) with BeEmpty() [Style] -` - -// main assertion function -func (l *ginkgoLinter) run(pass *analysis.Pass) (interface{}, error) { - for _, file := range pass.Files { - fileConfig := l.config.Clone() - - cm := ast.NewCommentMap(pass.Fset, file, file.Comments) - - fileConfig.UpdateFromFile(cm) - - gomegaHndlr := gomegahandler.GetGomegaHandler(file) - ginkgoHndlr := ginkgohandler.GetGinkgoHandler(file) - - if gomegaHndlr == nil && ginkgoHndlr == nil { // no gomega or ginkgo imports => no use in gomega in this file; nothing to do here - continue - } - - //gomegaMatcher = getMatcherInterface(pass, file) - - ast.Inspect(file, func(n ast.Node) bool { - if ginkgoHndlr != nil && fileConfig.ForbidFocus { - spec, ok := n.(*ast.ValueSpec) - if ok { - for _, val := range spec.Values { - if exp, ok := val.(*ast.CallExpr); ok { - if checkFocusContainer(pass, ginkgoHndlr, exp) { - return true - } - } - } - } - } - - stmt, ok := n.(*ast.ExprStmt) - if !ok { - return true - } - - config := fileConfig.Clone() - - if comments, ok := cm[stmt]; ok { - config.UpdateFromComment(comments) - } - - // search for function calls - assertionExp, ok := stmt.X.(*ast.CallExpr) - if !ok { - return true - } - - if ginkgoHndlr != nil && bool(config.ForbidFocus) && checkFocusContainer(pass, ginkgoHndlr, assertionExp) { - return true - } - - // no more ginkgo checks. From here it's only gomega. So if there is no gomega handler, exit here. This is - // mostly to prevent nil pointer error. - if gomegaHndlr == nil { - return true - } - - assertionFunc, ok := assertionExp.Fun.(*ast.SelectorExpr) - if !ok { - checkNoAssertion(pass, assertionExp, gomegaHndlr) - return true - } - - if !isAssertionFunc(assertionFunc.Sel.Name) { - checkNoAssertion(pass, assertionExp, gomegaHndlr) - return true - } - - actualExpr := gomegaHndlr.GetActualExpr(assertionFunc) - if actualExpr == nil { - return true - } - - return checkExpression(pass, config, assertionExp, actualExpr, gomegaHndlr) - }) - } - return nil, nil -} - -func checkFocusContainer(pass *analysis.Pass, ginkgoHndlr ginkgohandler.Handler, exp *ast.CallExpr) bool { - foundFocus := false - isFocus, id := ginkgoHndlr.GetFocusContainerName(exp) - if isFocus { - reportNewName(pass, id, id.Name[1:], focusContainerFound, id.Name) - foundFocus = true - } - - if id != nil && ginkgohandler.IsContainer(id) { - for _, arg := range exp.Args { - if ginkgoHndlr.IsFocusSpec(arg) { - reportNoFix(pass, arg.Pos(), focusSpecFound) - foundFocus = true - } else if callExp, ok := arg.(*ast.CallExpr); ok { - if checkFocusContainer(pass, ginkgoHndlr, callExp) { // handle table entries - foundFocus = true - } - } - } - } - - return foundFocus -} - -func checkExpression(pass *analysis.Pass, config types.Config, assertionExp *ast.CallExpr, actualExpr *ast.CallExpr, handler gomegahandler.Handler) bool { - expr := astcopy.CallExpr(assertionExp) - oldExpr := goFmt(pass.Fset, expr) - - if checkAsyncAssertion(pass, config, expr, actualExpr, handler, oldExpr) { - return true - } - - actualArg := getActualArg(actualExpr, handler) - if actualArg == nil { - return true - } - - if !bool(config.SuppressLen) && isActualIsLenFunc(actualArg) { - return checkLengthMatcher(expr, pass, handler, oldExpr) - } else if nilable, compOp := getNilableFromComparison(actualArg); nilable != nil { - if isExprError(pass, nilable) { - if config.SuppressErr { - return true - } - } else if config.SuppressNil { - return true - } - - return checkNilMatcher(expr, pass, nilable, handler, compOp == token.NEQ, oldExpr) - - } else if first, second, op, ok := isComparison(pass, actualArg); ok { - matcher, shouldContinue := startCheckComparison(expr, handler) - if !shouldContinue { - return false - } - if !bool(config.SuppressLen) && isActualIsLenFunc(first) { - if handleLenComparison(pass, expr, matcher, first, second, op, handler, oldExpr) { - return false - } - } - return bool(config.SuppressCompare) || checkComparison(expr, pass, matcher, handler, first, second, op, oldExpr) - - } else if checkMatchError(pass, assertionExp, actualArg, handler, oldExpr) { - return false - } else if isExprError(pass, actualArg) { - return bool(config.SuppressErr) || checkNilError(pass, expr, handler, actualArg, oldExpr) - - } else if checkPointerComparison(pass, config, assertionExp, expr, actualArg, handler, oldExpr) { - return false - } else if !handleAssertionOnly(pass, config, expr, handler, actualArg, oldExpr, true) { - return false - } else if !config.SuppressTypeCompare { - return !checkEqualWrongType(pass, assertionExp, actualArg, handler, oldExpr) - } - - return true -} - -func checkMatchError(pass *analysis.Pass, origExp *ast.CallExpr, actualArg ast.Expr, handler gomegahandler.Handler, oldExpr string) bool { - matcher, ok := origExp.Args[0].(*ast.CallExpr) - if !ok { - return false - } - - return doCheckMatchError(pass, origExp, matcher, actualArg, handler, oldExpr) -} - -func doCheckMatchError(pass *analysis.Pass, origExp *ast.CallExpr, matcher *ast.CallExpr, actualArg ast.Expr, handler gomegahandler.Handler, oldExpr string) bool { - name, ok := handler.GetActualFuncName(matcher) - if !ok { - return false - } - switch name { - case matchError: - case not: - nested, ok := matcher.Args[0].(*ast.CallExpr) - if !ok { - return false - } - - return doCheckMatchError(pass, origExp, nested, actualArg, handler, oldExpr) - case and, or: - res := true - for _, arg := range matcher.Args { - if nested, ok := arg.(*ast.CallExpr); ok { - if !doCheckMatchError(pass, origExp, nested, actualArg, handler, oldExpr) { - res = false - } - } - } - return res - default: - return false - } - - if !isExprError(pass, actualArg) { - reportNoFix(pass, origExp.Pos(), matchErrorArgWrongType, goFmt(pass.Fset, actualArg)) - } - - expr := astcopy.CallExpr(matcher) - - validAssertion, requiredParams := checkMatchErrorAssertion(pass, matcher) - if !validAssertion { - reportNoFix(pass, expr.Pos(), matchErrorWrongTypeAssertion, goFmt(pass.Fset, matcher.Args[0])) - return false - } - - numParams := len(matcher.Args) - if numParams == requiredParams { - if numParams == 2 { - t := pass.TypesInfo.TypeOf(matcher.Args[1]) - if !gotypes.Identical(t, gotypes.Typ[gotypes.String]) { - pass.Reportf(expr.Pos(), matchErrorNoFuncDescription) - return false - } - } - return true - } - - if requiredParams == 2 && numParams == 1 { - reportNoFix(pass, expr.Pos(), matchErrorMissingDescription) - return false - } - - var newArgsSuggestion = []ast.Expr{expr.Args[0]} - if requiredParams == 2 { - newArgsSuggestion = append(newArgsSuggestion, expr.Args[1]) - } - expr.Args = newArgsSuggestion - report(pass, expr, matchErrorRedundantArg, oldExpr) - return false -} - -func checkMatchErrorAssertion(pass *analysis.Pass, matcher *ast.CallExpr) (bool, int) { - if isErrorMatcherValidArg(pass, matcher.Args[0]) { - return true, 1 - } - - t1 := pass.TypesInfo.TypeOf(matcher.Args[0]) - if isFuncErrBool(t1) { - return true, 2 - } - - return false, 0 -} - -// isFuncErrBool checks if a function is with the signature `func(error) bool` -func isFuncErrBool(t gotypes.Type) bool { - sig, ok := t.(*gotypes.Signature) - if !ok { - return false - } - if sig.Params().Len() != 1 || sig.Results().Len() != 1 { - return false - } - - if !interfaces.ImplementsError(sig.Params().At(0).Type()) { - return false - } - - b, ok := sig.Results().At(0).Type().(*gotypes.Basic) - if ok && b.Name() == "bool" && b.Info() == gotypes.IsBoolean && b.Kind() == gotypes.Bool { - return true - } - - return false -} - -func isErrorMatcherValidArg(pass *analysis.Pass, arg ast.Expr) bool { - if isExprError(pass, arg) { - return true - } - - if t, ok := pass.TypesInfo.TypeOf(arg).(*gotypes.Basic); ok && t.Kind() == gotypes.String { - return true - } - - t := pass.TypesInfo.TypeOf(arg) - - return interfaces.ImplementsGomegaMatcher(t) -} - -func checkEqualWrongType(pass *analysis.Pass, origExp *ast.CallExpr, actualArg ast.Expr, handler gomegahandler.Handler, old string) bool { - matcher, ok := origExp.Args[0].(*ast.CallExpr) - if !ok { - return false - } - - return checkEqualDifferentTypes(pass, matcher, actualArg, handler, old, false) -} - -func checkEqualDifferentTypes(pass *analysis.Pass, matcher *ast.CallExpr, actualArg ast.Expr, handler gomegahandler.Handler, old string, parentPointer bool) bool { - matcherFuncName, ok := handler.GetActualFuncName(matcher) - if !ok { - return false - } - - actualType := pass.TypesInfo.TypeOf(actualArg) - - switch matcherFuncName { - case equal, beIdenticalTo: // continue - case and, or: - foundIssue := false - for _, nestedExp := range matcher.Args { - nested, ok := nestedExp.(*ast.CallExpr) - if !ok { - continue - } - if checkEqualDifferentTypes(pass, nested, actualArg, handler, old, parentPointer) { - foundIssue = true - } - } - - return foundIssue - case withTransform: - nested, ok := matcher.Args[1].(*ast.CallExpr) - if !ok { - return false - } - - matcherFuncName, ok = handler.GetActualFuncName(nested) - switch matcherFuncName { - case equal, beIdenticalTo: - case not: - return checkEqualDifferentTypes(pass, nested, actualArg, handler, old, parentPointer) - default: - return false - } - - if t := getFuncType(pass, matcher.Args[0]); t != nil { - actualType = t - matcher = nested - - if !ok { - return false - } - } else { - return checkEqualDifferentTypes(pass, nested, actualArg, handler, old, parentPointer) - } - - case not: - nested, ok := matcher.Args[0].(*ast.CallExpr) - if !ok { - return false - } - - return checkEqualDifferentTypes(pass, nested, actualArg, handler, old, parentPointer) - - case haveValue: - nested, ok := matcher.Args[0].(*ast.CallExpr) - if !ok { - return false - } - - return checkEqualDifferentTypes(pass, nested, actualArg, handler, old, true) - default: - return false - } - - matcherValue := matcher.Args[0] - - switch act := actualType.(type) { - case *gotypes.Tuple: - actualType = act.At(0).Type() - case *gotypes.Pointer: - if parentPointer { - actualType = act.Elem() - } - } - - matcherType := pass.TypesInfo.TypeOf(matcherValue) - - if !reflect.DeepEqual(matcherType, actualType) { - // Equal can handle comparison of interface and a value that implements it - if isImplementing(matcherType, actualType) || isImplementing(actualType, matcherType) { - return false - } - - reportNoFix(pass, matcher.Pos(), compareDifferentTypes, matcherFuncName, actualType, matcherType) - return true - } - - return false -} - -func getFuncType(pass *analysis.Pass, expr ast.Expr) gotypes.Type { - switch f := expr.(type) { - case *ast.FuncLit: - if f.Type != nil && f.Type.Results != nil && len(f.Type.Results.List) > 0 { - return pass.TypesInfo.TypeOf(f.Type.Results.List[0].Type) - } - case *ast.Ident: - a := pass.TypesInfo.TypeOf(f) - if sig, ok := a.(*gotypes.Signature); ok && sig.Results().Len() > 0 { - return sig.Results().At(0).Type() - } - } - - return nil -} - -func isImplementing(ifs, impl gotypes.Type) bool { - if gotypes.IsInterface(ifs) { - - var ( - theIfs *gotypes.Interface - ok bool - ) - - for { - theIfs, ok = ifs.(*gotypes.Interface) - if ok { - break - } - ifs = ifs.Underlying() - } - - return gotypes.Implements(impl, theIfs) - } - return false -} - -// be careful - never change origExp!!! only modify its clone, expr!!! -func checkPointerComparison(pass *analysis.Pass, config types.Config, origExp *ast.CallExpr, expr *ast.CallExpr, actualArg ast.Expr, handler gomegahandler.Handler, oldExpr string) bool { - if !isPointer(pass, actualArg) { - return false - } - matcher, ok := origExp.Args[0].(*ast.CallExpr) - if !ok { - return false - } - - matcherFuncName, ok := handler.GetActualFuncName(matcher) - if !ok { - return false - } - - // not using recurse here, since we need the original expression, in order to get the TypeInfo, while we should not - // modify it. - for matcherFuncName == not { - reverseAssertionFuncLogic(expr) - expr.Args[0] = expr.Args[0].(*ast.CallExpr).Args[0] - matcher, ok = matcher.Args[0].(*ast.CallExpr) - if !ok { - return false - } - - matcherFuncName, ok = handler.GetActualFuncName(matcher) - if !ok { - return false - } - } - - switch matcherFuncName { - case equal, beIdenticalTo, beEquivalentTo: - arg := matcher.Args[0] - if isPointer(pass, arg) { - return false - } - if isNil(arg) { - return false - } - if isInterface(pass, arg) { - return false - } - case beFalse, beTrue, beNumerically: - default: - return false - } - - handleAssertionOnly(pass, config, expr, handler, actualArg, oldExpr, false) - - args := []ast.Expr{astcopy.CallExpr(expr.Args[0].(*ast.CallExpr))} - handler.ReplaceFunction(expr.Args[0].(*ast.CallExpr), ast.NewIdent(haveValue)) - expr.Args[0].(*ast.CallExpr).Args = args - report(pass, expr, comparePointerToValue, oldExpr) - - return true - -} - -// check async assertion does not assert function call. This is a real bug in the test. In this case, the assertion is -// done on the returned value, instead of polling the result of a function, for instance. -func checkAsyncAssertion(pass *analysis.Pass, config types.Config, expr *ast.CallExpr, actualExpr *ast.CallExpr, handler gomegahandler.Handler, oldExpr string) bool { - funcName, ok := handler.GetActualFuncName(actualExpr) - if !ok { - return false - } - - var funcIndex int - switch funcName { - case eventually, consistently: - funcIndex = 0 - case eventuallyWithOffset, consistentlyWithOffset: - funcIndex = 1 - default: - return false - } - - if !config.SuppressAsync && len(actualExpr.Args) > funcIndex { - t := pass.TypesInfo.TypeOf(actualExpr.Args[funcIndex]) - - // skip context variable, if used as first argument - if "context.Context" == t.String() { - funcIndex++ - } - - if len(actualExpr.Args) > funcIndex { - if fun, funcCall := actualExpr.Args[funcIndex].(*ast.CallExpr); funcCall { - t = pass.TypesInfo.TypeOf(fun) - if !isValidAsyncValueType(t) { - actualExpr = handler.GetActualExpr(expr.Fun.(*ast.SelectorExpr)) - - if len(fun.Args) > 0 { - origArgs := actualExpr.Args - origFunc := actualExpr.Fun - actualExpr.Args = fun.Args - - origArgs[funcIndex] = fun.Fun - call := &ast.SelectorExpr{ - Sel: ast.NewIdent("WithArguments"), - X: &ast.CallExpr{ - Fun: origFunc, - Args: origArgs, - }, - } - - actualExpr.Fun = call - actualExpr.Args = fun.Args - } else { - actualExpr.Args[funcIndex] = fun.Fun - } - - handleAssertionOnly(pass, config, expr, handler, actualExpr, oldExpr, false) - report(pass, expr, fmt.Sprintf(valueInEventually, funcName, funcName)+"; consider using `%s` instead", oldExpr) - return true - } - } - } - } - - handleAssertionOnly(pass, config, expr, handler, actualExpr, oldExpr, true) - return true -} - -func isValidAsyncValueType(t gotypes.Type) bool { - switch t.(type) { - // allow functions that return function or channel. - case *gotypes.Signature, *gotypes.Chan, *gotypes.Pointer: - return true - case *gotypes.Named: - return isValidAsyncValueType(t.Underlying()) - } - - return false -} - -func startCheckComparison(exp *ast.CallExpr, handler gomegahandler.Handler) (*ast.CallExpr, bool) { - matcher, ok := exp.Args[0].(*ast.CallExpr) - if !ok { - return nil, false - } - - matcherFuncName, ok := handler.GetActualFuncName(matcher) - if !ok { - return nil, false - } - - switch matcherFuncName { - case beTrue: - case beFalse: - reverseAssertionFuncLogic(exp) - case equal: - boolean, found := matcher.Args[0].(*ast.Ident) - if !found { - return nil, false - } - - if boolean.Name == "false" { - reverseAssertionFuncLogic(exp) - } else if boolean.Name != "true" { - return nil, false - } - - case not: - reverseAssertionFuncLogic(exp) - exp.Args[0] = exp.Args[0].(*ast.CallExpr).Args[0] - return startCheckComparison(exp, handler) - - default: - return nil, false - } - - return matcher, true -} - -func checkComparison(exp *ast.CallExpr, pass *analysis.Pass, matcher *ast.CallExpr, handler gomegahandler.Handler, first ast.Expr, second ast.Expr, op token.Token, oldExp string) bool { - fun, ok := exp.Fun.(*ast.SelectorExpr) - if !ok { - return true - } - - call := handler.GetActualExpr(fun) - if call == nil { - return true - } - - switch op { - case token.EQL: - handleEqualComparison(pass, matcher, first, second, handler) - - case token.NEQ: - reverseAssertionFuncLogic(exp) - handleEqualComparison(pass, matcher, first, second, handler) - case token.GTR, token.GEQ, token.LSS, token.LEQ: - if !isNumeric(pass, first) { - return true - } - handler.ReplaceFunction(matcher, ast.NewIdent(beNumerically)) - matcher.Args = []ast.Expr{ - &ast.BasicLit{Kind: token.STRING, Value: fmt.Sprintf(`"%s"`, op.String())}, - second, - } - default: - return true - } - - call.Args = []ast.Expr{first} - report(pass, exp, wrongCompareWarningTemplate, oldExp) - return false -} - -func handleEqualComparison(pass *analysis.Pass, matcher *ast.CallExpr, first ast.Expr, second ast.Expr, handler gomegahandler.Handler) { - if isZero(pass, second) { - handler.ReplaceFunction(matcher, ast.NewIdent(beZero)) - matcher.Args = nil - } else { - t := pass.TypesInfo.TypeOf(first) - if gotypes.IsInterface(t) { - handler.ReplaceFunction(matcher, ast.NewIdent(beIdenticalTo)) - } else if _, ok := t.(*gotypes.Pointer); ok { - handler.ReplaceFunction(matcher, ast.NewIdent(beIdenticalTo)) - } else { - handler.ReplaceFunction(matcher, ast.NewIdent(equal)) - } - - matcher.Args = []ast.Expr{second} - } -} - -func handleLenComparison(pass *analysis.Pass, exp *ast.CallExpr, matcher *ast.CallExpr, first ast.Expr, second ast.Expr, op token.Token, handler gomegahandler.Handler, oldExpr string) bool { - switch op { - case token.EQL: - case token.NEQ: - reverseAssertionFuncLogic(exp) - default: - return false - } - - var eql *ast.Ident - if isZero(pass, second) { - eql = ast.NewIdent(beEmpty) - } else { - eql = ast.NewIdent(haveLen) - matcher.Args = []ast.Expr{second} - } - - handler.ReplaceFunction(matcher, eql) - firstLen, ok := first.(*ast.CallExpr) // assuming it's len() - if !ok { - return false // should never happen - } - - val := firstLen.Args[0] - fun := handler.GetActualExpr(exp.Fun.(*ast.SelectorExpr)) - fun.Args = []ast.Expr{val} - - report(pass, exp, wrongLengthWarningTemplate, oldExpr) - return true -} - -// Check if the "actual" argument is a call to the golang built-in len() function -func isActualIsLenFunc(actualArg ast.Expr) bool { - lenArgExp, ok := actualArg.(*ast.CallExpr) - if !ok { - return false - } - - lenFunc, ok := lenArgExp.Fun.(*ast.Ident) - return ok && lenFunc.Name == "len" -} - -// Check if matcher function is in one of the patterns we want to avoid -func checkLengthMatcher(exp *ast.CallExpr, pass *analysis.Pass, handler gomegahandler.Handler, oldExp string) bool { - matcher, ok := exp.Args[0].(*ast.CallExpr) - if !ok { - return true - } - - matcherFuncName, ok := handler.GetActualFuncName(matcher) - if !ok { - return true - } - - switch matcherFuncName { - case equal: - handleEqualMatcher(matcher, pass, exp, handler, oldExp) - return false - - case beZero: - handleBeZero(pass, exp, handler, oldExp) - return false - - case beNumerically: - return handleBeNumerically(matcher, pass, exp, handler, oldExp) - - case not: - reverseAssertionFuncLogic(exp) - exp.Args[0] = exp.Args[0].(*ast.CallExpr).Args[0] - return checkLengthMatcher(exp, pass, handler, oldExp) - - default: - return true - } -} - -// Check if matcher function is in one of the patterns we want to avoid -func checkNilMatcher(exp *ast.CallExpr, pass *analysis.Pass, nilable ast.Expr, handler gomegahandler.Handler, notEqual bool, oldExp string) bool { - matcher, ok := exp.Args[0].(*ast.CallExpr) - if !ok { - return true - } - - matcherFuncName, ok := handler.GetActualFuncName(matcher) - if !ok { - return true - } - - switch matcherFuncName { - case equal: - handleEqualNilMatcher(matcher, pass, exp, handler, nilable, notEqual, oldExp) - - case beTrue: - handleNilBeBoolMatcher(pass, exp, handler, nilable, notEqual, oldExp) - - case beFalse: - reverseAssertionFuncLogic(exp) - handleNilBeBoolMatcher(pass, exp, handler, nilable, notEqual, oldExp) - - case not: - reverseAssertionFuncLogic(exp) - exp.Args[0] = exp.Args[0].(*ast.CallExpr).Args[0] - return checkNilMatcher(exp, pass, nilable, handler, notEqual, oldExp) - - default: - return true - } - return false -} - -func checkNilError(pass *analysis.Pass, assertionExp *ast.CallExpr, handler gomegahandler.Handler, actualArg ast.Expr, oldExpr string) bool { - if len(assertionExp.Args) == 0 { - return true - } - - equalFuncExpr, ok := assertionExp.Args[0].(*ast.CallExpr) - if !ok { - return true - } - - funcName, ok := handler.GetActualFuncName(equalFuncExpr) - if !ok { - return true - } - - switch funcName { - case beNil: // no additional processing needed. - case equal: - - if len(equalFuncExpr.Args) == 0 { - return true - } - - nilable, ok := equalFuncExpr.Args[0].(*ast.Ident) - if !ok || nilable.Name != "nil" { - return true - } - - case not: - reverseAssertionFuncLogic(assertionExp) - assertionExp.Args[0] = assertionExp.Args[0].(*ast.CallExpr).Args[0] - return checkNilError(pass, assertionExp, handler, actualArg, oldExpr) - default: - return true - } - - var newFuncName string - if _, ok := actualArg.(*ast.CallExpr); ok { - newFuncName = succeed - } else { - reverseAssertionFuncLogic(assertionExp) - newFuncName = haveOccurred - } - - handler.ReplaceFunction(equalFuncExpr, ast.NewIdent(newFuncName)) - equalFuncExpr.Args = nil - - report(pass, assertionExp, wrongErrWarningTemplate, oldExpr) - return false -} - -// handleAssertionOnly checks use-cases when the actual value is valid, but only the assertion should be fixed -// it handles: -// -// Equal(nil) => BeNil() -// Equal(true) => BeTrue() -// Equal(false) => BeFalse() -// HaveLen(0) => BeEmpty() -func handleAssertionOnly(pass *analysis.Pass, config types.Config, expr *ast.CallExpr, handler gomegahandler.Handler, actualArg ast.Expr, oldExpr string, shouldReport bool) bool { - if len(expr.Args) == 0 { - return true - } - - equalFuncExpr, ok := expr.Args[0].(*ast.CallExpr) - if !ok { - return true - } - - funcName, ok := handler.GetActualFuncName(equalFuncExpr) - if !ok { - return true - } - - switch funcName { - case equal: - if len(equalFuncExpr.Args) == 0 { - return true - } - - tkn, ok := equalFuncExpr.Args[0].(*ast.Ident) - if !ok { - return true - } - - var replacement string - var template string - switch tkn.Name { - case "nil": - if config.SuppressNil { - return true - } - replacement = beNil - template = wrongNilWarningTemplate - case "true": - replacement = beTrue - template = wrongBoolWarningTemplate - case "false": - if isNegativeAssertion(expr) { - reverseAssertionFuncLogic(expr) - replacement = beTrue - } else { - replacement = beFalse - } - template = wrongBoolWarningTemplate - default: - return true - } - - handler.ReplaceFunction(equalFuncExpr, ast.NewIdent(replacement)) - equalFuncExpr.Args = nil - - if shouldReport { - report(pass, expr, template, oldExpr) - } - - return false - - case beFalse: - if isNegativeAssertion(expr) { - reverseAssertionFuncLogic(expr) - handler.ReplaceFunction(equalFuncExpr, ast.NewIdent(beTrue)) - if shouldReport { - report(pass, expr, doubleNegativeWarningTemplate, oldExpr) - } - } - return false - - case haveLen: - if config.AllowHaveLen0 { - return true - } - - if len(equalFuncExpr.Args) > 0 { - if isZero(pass, equalFuncExpr.Args[0]) { - handler.ReplaceFunction(equalFuncExpr, ast.NewIdent(beEmpty)) - equalFuncExpr.Args = nil - if shouldReport { - report(pass, expr, wrongLengthWarningTemplate, oldExpr) - } - return false - } - } - - return true - - case not: - reverseAssertionFuncLogic(expr) - expr.Args[0] = expr.Args[0].(*ast.CallExpr).Args[0] - return handleAssertionOnly(pass, config, expr, handler, actualArg, oldExpr, shouldReport) - default: - return true - } -} - -func isZero(pass *analysis.Pass, arg ast.Expr) bool { - if val, ok := arg.(*ast.BasicLit); ok && val.Kind == token.INT && val.Value == "0" { - return true - } - info, ok := pass.TypesInfo.Types[arg] - if ok { - if t, ok := info.Type.(*gotypes.Basic); ok && t.Kind() == gotypes.Int && info.Value != nil { - if i, ok := constant.Int64Val(info.Value); ok && i == 0 { - return true - } - } - } else if val, ok := arg.(*ast.Ident); ok && val.Obj != nil && val.Obj.Kind == ast.Con { - if spec, ok := val.Obj.Decl.(*ast.ValueSpec); ok { - if len(spec.Values) == 1 { - if value, ok := spec.Values[0].(*ast.BasicLit); ok && value.Kind == token.INT && value.Value == "0" { - return true - } - } - } - } - - return false -} - -// getActualArg checks that the function is an assertion's actual function and return the "actual" parameter. If the -// function is not assertion's actual function, return nil. -func getActualArg(actualExpr *ast.CallExpr, handler gomegahandler.Handler) ast.Expr { - funcName, ok := handler.GetActualFuncName(actualExpr) - if !ok { - return nil - } - - switch funcName { - case expect, omega: - return actualExpr.Args[0] - case expectWithOffset: - return actualExpr.Args[1] - default: - return nil - } -} - -// Replace the len function call by its parameter, to create a fix suggestion -func replaceLenActualArg(actualExpr *ast.CallExpr, handler gomegahandler.Handler) { - name, ok := handler.GetActualFuncName(actualExpr) - if !ok { - return - } - - switch name { - case expect, omega: - arg := actualExpr.Args[0] - if isActualIsLenFunc(arg) { - // replace the len function call by its parameter, to create a fix suggestion - actualExpr.Args[0] = arg.(*ast.CallExpr).Args[0] - } - case expectWithOffset: - arg := actualExpr.Args[1] - if isActualIsLenFunc(arg) { - // replace the len function call by its parameter, to create a fix suggestion - actualExpr.Args[1] = arg.(*ast.CallExpr).Args[0] - } - } -} - -// Replace the nil comparison with the compared object, to create a fix suggestion -func replaceNilActualArg(actualExpr *ast.CallExpr, handler gomegahandler.Handler, nilable ast.Expr) bool { - actualFuncName, ok := handler.GetActualFuncName(actualExpr) - if !ok { - return false - } - - switch actualFuncName { - case expect, omega: - actualExpr.Args[0] = nilable - return true - - case expectWithOffset: - actualExpr.Args[1] = nilable - return true - - default: - return false - } -} - -// For the BeNumerically matcher, we want to avoid the assertion of length to be > 0 or >= 1, or just == number -func handleBeNumerically(matcher *ast.CallExpr, pass *analysis.Pass, exp *ast.CallExpr, handler gomegahandler.Handler, oldExp string) bool { - opExp, ok1 := matcher.Args[0].(*ast.BasicLit) - valExp, ok2 := matcher.Args[1].(*ast.BasicLit) - - if ok1 && ok2 { - op := opExp.Value - val := valExp.Value - - if (op == `">"` && val == "0") || (op == `">="` && val == "1") { - reverseAssertionFuncLogic(exp) - handler.ReplaceFunction(exp.Args[0].(*ast.CallExpr), ast.NewIdent(beEmpty)) - exp.Args[0].(*ast.CallExpr).Args = nil - reportLengthAssertion(pass, exp, handler, oldExp) - return false - } else if op == `"=="` { - chooseNumericMatcher(pass, exp, handler, valExp) - reportLengthAssertion(pass, exp, handler, oldExp) - - return false - } else if op == `"!="` { - reverseAssertionFuncLogic(exp) - chooseNumericMatcher(pass, exp, handler, valExp) - reportLengthAssertion(pass, exp, handler, oldExp) - - return false - } - } - return true -} - -func chooseNumericMatcher(pass *analysis.Pass, exp *ast.CallExpr, handler gomegahandler.Handler, valExp ast.Expr) { - caller := exp.Args[0].(*ast.CallExpr) - if isZero(pass, valExp) { - handler.ReplaceFunction(caller, ast.NewIdent(beEmpty)) - exp.Args[0].(*ast.CallExpr).Args = nil - } else { - handler.ReplaceFunction(caller, ast.NewIdent(haveLen)) - exp.Args[0].(*ast.CallExpr).Args = []ast.Expr{valExp} - } -} - -func reverseAssertionFuncLogic(exp *ast.CallExpr) { - assertionFunc := exp.Fun.(*ast.SelectorExpr).Sel - assertionFunc.Name = reverseassertion.ChangeAssertionLogic(assertionFunc.Name) -} - -func isNegativeAssertion(exp *ast.CallExpr) bool { - assertionFunc := exp.Fun.(*ast.SelectorExpr).Sel - return reverseassertion.IsNegativeLogic(assertionFunc.Name) -} - -func handleEqualMatcher(matcher *ast.CallExpr, pass *analysis.Pass, exp *ast.CallExpr, handler gomegahandler.Handler, oldExp string) { - equalTo, ok := matcher.Args[0].(*ast.BasicLit) - if ok { - chooseNumericMatcher(pass, exp, handler, equalTo) - } else { - handler.ReplaceFunction(exp.Args[0].(*ast.CallExpr), ast.NewIdent(haveLen)) - exp.Args[0].(*ast.CallExpr).Args = []ast.Expr{matcher.Args[0]} - } - reportLengthAssertion(pass, exp, handler, oldExp) -} - -func handleBeZero(pass *analysis.Pass, exp *ast.CallExpr, handler gomegahandler.Handler, oldExp string) { - exp.Args[0].(*ast.CallExpr).Args = nil - handler.ReplaceFunction(exp.Args[0].(*ast.CallExpr), ast.NewIdent(beEmpty)) - reportLengthAssertion(pass, exp, handler, oldExp) -} - -func handleEqualNilMatcher(matcher *ast.CallExpr, pass *analysis.Pass, exp *ast.CallExpr, handler gomegahandler.Handler, nilable ast.Expr, notEqual bool, oldExp string) { - equalTo, ok := matcher.Args[0].(*ast.Ident) - if !ok { - return - } - - if equalTo.Name == "false" { - reverseAssertionFuncLogic(exp) - } else if equalTo.Name != "true" { - return - } - - newFuncName, isItError := handleNilComparisonErr(pass, exp, nilable) - - handler.ReplaceFunction(exp.Args[0].(*ast.CallExpr), ast.NewIdent(newFuncName)) - exp.Args[0].(*ast.CallExpr).Args = nil - - reportNilAssertion(pass, exp, handler, nilable, notEqual, oldExp, isItError) -} - -func handleNilBeBoolMatcher(pass *analysis.Pass, exp *ast.CallExpr, handler gomegahandler.Handler, nilable ast.Expr, notEqual bool, oldExp string) { - newFuncName, isItError := handleNilComparisonErr(pass, exp, nilable) - handler.ReplaceFunction(exp.Args[0].(*ast.CallExpr), ast.NewIdent(newFuncName)) - exp.Args[0].(*ast.CallExpr).Args = nil - - reportNilAssertion(pass, exp, handler, nilable, notEqual, oldExp, isItError) -} - -func handleNilComparisonErr(pass *analysis.Pass, exp *ast.CallExpr, nilable ast.Expr) (string, bool) { - newFuncName := beNil - isItError := isExprError(pass, nilable) - if isItError { - if _, ok := nilable.(*ast.CallExpr); ok { - newFuncName = succeed - } else { - reverseAssertionFuncLogic(exp) - newFuncName = haveOccurred - } - } - - return newFuncName, isItError -} - -func isAssertionFunc(name string) bool { - switch name { - case "To", "ToNot", "NotTo", "Should", "ShouldNot": - return true - } - return false -} - -func reportLengthAssertion(pass *analysis.Pass, expr *ast.CallExpr, handler gomegahandler.Handler, oldExpr string) { - actualExpr := handler.GetActualExpr(expr.Fun.(*ast.SelectorExpr)) - replaceLenActualArg(actualExpr, handler) - - report(pass, expr, wrongLengthWarningTemplate, oldExpr) -} - -func reportNilAssertion(pass *analysis.Pass, expr *ast.CallExpr, handler gomegahandler.Handler, nilable ast.Expr, notEqual bool, oldExpr string, isItError bool) { - actualExpr := handler.GetActualExpr(expr.Fun.(*ast.SelectorExpr)) - changed := replaceNilActualArg(actualExpr, handler, nilable) - if !changed { - return - } - - if notEqual { - reverseAssertionFuncLogic(expr) - } - template := wrongNilWarningTemplate - if isItError { - template = wrongErrWarningTemplate - } - - report(pass, expr, template, oldExpr) -} - -func report(pass *analysis.Pass, expr ast.Expr, messageTemplate, oldExpr string) { - newExp := goFmt(pass.Fset, expr) - pass.Report(analysis.Diagnostic{ - Pos: expr.Pos(), - Message: fmt.Sprintf(messageTemplate, newExp), - SuggestedFixes: []analysis.SuggestedFix{ - { - Message: fmt.Sprintf("should replace %s with %s", oldExpr, newExp), - TextEdits: []analysis.TextEdit{ - { - Pos: expr.Pos(), - End: expr.End(), - NewText: []byte(newExp), - }, - }, - }, - }, - }) -} - -func reportNewName(pass *analysis.Pass, id *ast.Ident, newName string, messageTemplate, oldExpr string) { - pass.Report(analysis.Diagnostic{ - Pos: id.Pos(), - Message: fmt.Sprintf(messageTemplate, newName), - SuggestedFixes: []analysis.SuggestedFix{ - { - Message: fmt.Sprintf("should replace %s with %s", oldExpr, newName), - TextEdits: []analysis.TextEdit{ - { - Pos: id.Pos(), - End: id.End(), - NewText: []byte(newName), - }, - }, - }, - }, - }) -} - -func reportNoFix(pass *analysis.Pass, pos token.Pos, message string, args ...any) { - if len(args) > 0 { - message = fmt.Sprintf(message, args...) - } - - pass.Report(analysis.Diagnostic{ - Pos: pos, - Message: message, - }) -} - -func getNilableFromComparison(actualArg ast.Expr) (ast.Expr, token.Token) { - bin, ok := actualArg.(*ast.BinaryExpr) - if !ok { - return nil, token.ILLEGAL - } - - if bin.Op == token.EQL || bin.Op == token.NEQ { - if isNil(bin.Y) { - return bin.X, bin.Op - } else if isNil(bin.X) { - return bin.Y, bin.Op - } - } - - return nil, token.ILLEGAL -} - -func isNil(expr ast.Expr) bool { - nilObject, ok := expr.(*ast.Ident) - return ok && nilObject.Name == "nil" && nilObject.Obj == nil -} - -func isComparison(pass *analysis.Pass, actualArg ast.Expr) (ast.Expr, ast.Expr, token.Token, bool) { - bin, ok := actualArg.(*ast.BinaryExpr) - if !ok { - return nil, nil, token.ILLEGAL, false - } - - first, second, op := bin.X, bin.Y, bin.Op - replace := false - switch realFirst := first.(type) { - case *ast.Ident: // check if const - info, ok := pass.TypesInfo.Types[realFirst] - if ok { - if _, ok := info.Type.(*gotypes.Basic); ok && info.Value != nil { - replace = true - } - } - - case *ast.BasicLit: - replace = true - } - - if replace { - first, second = second, first - } - - switch op { - case token.EQL: - case token.NEQ: - case token.GTR, token.GEQ, token.LSS, token.LEQ: - if replace { - op = reverseassertion.ChangeCompareOperator(op) - } - default: - return nil, nil, token.ILLEGAL, false - } - return first, second, op, true -} - -func goFmt(fset *token.FileSet, x ast.Expr) string { - var b bytes.Buffer - _ = printer.Fprint(&b, fset, x) - return b.String() -} - -func isExprError(pass *analysis.Pass, expr ast.Expr) bool { - actualArgType := pass.TypesInfo.TypeOf(expr) - switch t := actualArgType.(type) { - case *gotypes.Named: - if interfaces.ImplementsError(actualArgType) { - return true - } - case *gotypes.Tuple: - if t.Len() > 0 { - switch t0 := t.At(0).Type().(type) { - case *gotypes.Named, *gotypes.Pointer: - if interfaces.ImplementsError(t0) { - return true - } - } - } - } - return false -} - -func isPointer(pass *analysis.Pass, expr ast.Expr) bool { - t := pass.TypesInfo.TypeOf(expr) - _, ok := t.(*gotypes.Pointer) - return ok -} - -func isInterface(pass *analysis.Pass, expr ast.Expr) bool { - t := pass.TypesInfo.TypeOf(expr) - return gotypes.IsInterface(t) -} - -func isNumeric(pass *analysis.Pass, node ast.Expr) bool { - t := pass.TypesInfo.TypeOf(node) - - switch t.String() { - case "int", "uint", "int8", "uint8", "int16", "uint16", "int32", "uint32", "int64", "uint64", "float32", "float64": - return true - } - return false -} - -func checkNoAssertion(pass *analysis.Pass, expr *ast.CallExpr, handler gomegahandler.Handler) { - funcName, ok := handler.GetActualFuncName(expr) - if ok { - if isActualFunc(funcName) { - reportNoFix(pass, expr.Pos(), missingAssertionMessage, funcName) - } else if isActualAsyncFunc(funcName) { - reportNoFix(pass, expr.Pos(), missingAsyncAssertionMessage, funcName) - } - } -} - -func isActualFunc(name string) bool { - return name == expect || name == expectWithOffset -} - -func isActualAsyncFunc(name string) bool { - switch name { - case eventually, eventuallyWithOffset, consistently, consistentlyWithOffset: - return true - } - return false -} diff --git a/vendor/github.com/nunnatsa/ginkgolinter/ginkgohandler/handler.go b/vendor/github.com/nunnatsa/ginkgolinter/ginkgohandler/handler.go deleted file mode 100644 index c0829c469..000000000 --- a/vendor/github.com/nunnatsa/ginkgolinter/ginkgohandler/handler.go +++ /dev/null @@ -1,97 +0,0 @@ -package ginkgohandler - -import ( - "go/ast" -) - -const ( - importPath = `"github.com/onsi/ginkgo"` - importPathV2 = `"github.com/onsi/ginkgo/v2"` - - focusSpec = "Focus" -) - -// Handler provide different handling, depend on the way ginkgo was imported, whether -// in imported with "." name, custom name or without any name. -type Handler interface { - GetFocusContainerName(*ast.CallExpr) (bool, *ast.Ident) - IsFocusSpec(ident ast.Expr) bool -} - -// GetGinkgoHandler returns a ginkgor handler according to the way ginkgo was imported in the specific file -func GetGinkgoHandler(file *ast.File) Handler { - for _, imp := range file.Imports { - if imp.Path.Value != importPath && imp.Path.Value != importPathV2 { - continue - } - - switch name := imp.Name.String(); { - case name == ".": - return dotHandler{} - case name == "": // import with no local name - return nameHandler("ginkgo") - default: - return nameHandler(name) - } - } - - return nil // no ginkgo import; this file does not use ginkgo -} - -// dotHandler is used when importing ginkgo with dot; i.e. -// import . "github.com/onsi/ginkgo" -type dotHandler struct{} - -func (h dotHandler) GetFocusContainerName(exp *ast.CallExpr) (bool, *ast.Ident) { - if fun, ok := exp.Fun.(*ast.Ident); ok { - return isFocusContainer(fun.Name), fun - } - return false, nil -} - -func (h dotHandler) IsFocusSpec(exp ast.Expr) bool { - id, ok := exp.(*ast.Ident) - return ok && id.Name == focusSpec -} - -// nameHandler is used when importing ginkgo without name; i.e. -// import "github.com/onsi/ginkgo" -// -// or with a custom name; e.g. -// import customname "github.com/onsi/ginkgo" -type nameHandler string - -func (h nameHandler) GetFocusContainerName(exp *ast.CallExpr) (bool, *ast.Ident) { - if sel, ok := exp.Fun.(*ast.SelectorExpr); ok { - if id, ok := sel.X.(*ast.Ident); ok && id.Name == string(h) { - return isFocusContainer(sel.Sel.Name), sel.Sel - } - } - return false, nil -} - -func (h nameHandler) IsFocusSpec(exp ast.Expr) bool { - if selExp, ok := exp.(*ast.SelectorExpr); ok { - if x, ok := selExp.X.(*ast.Ident); ok && x.Name == string(h) { - return selExp.Sel.Name == focusSpec - } - } - - return false -} - -func isFocusContainer(name string) bool { - switch name { - case "FDescribe", "FContext", "FWhen", "FIt", "FDescribeTable", "FEntry": - return true - } - return false -} - -func IsContainer(id *ast.Ident) bool { - switch id.Name { - case "It", "When", "Context", "Describe", "DescribeTable", "Entry": - return true - } - return isFocusContainer(id.Name) -} diff --git a/vendor/github.com/nunnatsa/ginkgolinter/gomegahandler/handler.go b/vendor/github.com/nunnatsa/ginkgolinter/gomegahandler/handler.go deleted file mode 100644 index 419145b75..000000000 --- a/vendor/github.com/nunnatsa/ginkgolinter/gomegahandler/handler.go +++ /dev/null @@ -1,236 +0,0 @@ -package gomegahandler - -import ( - "go/ast" - "go/token" -) - -const ( - importPath = `"github.com/onsi/gomega"` -) - -// Handler provide different handling, depend on the way gomega was imported, whether -// in imported with "." name, custom name or without any name. -type Handler interface { - // GetActualFuncName returns the name of the gomega function, e.g. `Expect` - GetActualFuncName(*ast.CallExpr) (string, bool) - // ReplaceFunction replaces the function with another one, for fix suggestions - ReplaceFunction(*ast.CallExpr, *ast.Ident) - - getDefFuncName(expr *ast.CallExpr) string - - getFieldType(field *ast.Field) string - - GetActualExpr(assertionFunc *ast.SelectorExpr) *ast.CallExpr -} - -// GetGomegaHandler returns a gomegar handler according to the way gomega was imported in the specific file -func GetGomegaHandler(file *ast.File) Handler { - for _, imp := range file.Imports { - if imp.Path.Value != importPath { - continue - } - - switch name := imp.Name.String(); { - case name == ".": - return dotHandler{} - case name == "": // import with no local name - return nameHandler("gomega") - default: - return nameHandler(name) - } - } - - return nil // no gomega import; this file does not use gomega -} - -// dotHandler is used when importing gomega with dot; i.e. -// import . "github.com/onsi/gomega" -type dotHandler struct{} - -// GetActualFuncName returns the name of the gomega function, e.g. `Expect` -func (h dotHandler) GetActualFuncName(expr *ast.CallExpr) (string, bool) { - switch actualFunc := expr.Fun.(type) { - case *ast.Ident: - return actualFunc.Name, true - case *ast.SelectorExpr: - if isGomegaVar(actualFunc.X, h) { - return actualFunc.Sel.Name, true - } - - if x, ok := actualFunc.X.(*ast.CallExpr); ok { - return h.GetActualFuncName(x) - } - - case *ast.CallExpr: - return h.GetActualFuncName(actualFunc) - } - return "", false -} - -// ReplaceFunction replaces the function with another one, for fix suggestions -func (dotHandler) ReplaceFunction(caller *ast.CallExpr, newExpr *ast.Ident) { - caller.Fun = newExpr -} - -func (dotHandler) getDefFuncName(expr *ast.CallExpr) string { - if f, ok := expr.Fun.(*ast.Ident); ok { - return f.Name - } - return "" -} - -func (dotHandler) getFieldType(field *ast.Field) string { - switch t := field.Type.(type) { - case *ast.Ident: - return t.Name - case *ast.StarExpr: - if name, ok := t.X.(*ast.Ident); ok { - return name.Name - } - } - return "" -} - -// nameHandler is used when importing gomega without name; i.e. -// import "github.com/onsi/gomega" -// -// or with a custom name; e.g. -// import customname "github.com/onsi/gomega" -type nameHandler string - -// GetActualFuncName returns the name of the gomega function, e.g. `Expect` -func (g nameHandler) GetActualFuncName(expr *ast.CallExpr) (string, bool) { - selector, ok := expr.Fun.(*ast.SelectorExpr) - if !ok { - return "", false - } - - switch x := selector.X.(type) { - case *ast.Ident: - if x.Name != string(g) { - if !isGomegaVar(x, g) { - return "", false - } - } - - return selector.Sel.Name, true - - case *ast.CallExpr: - return g.GetActualFuncName(x) - } - - return "", false -} - -// ReplaceFunction replaces the function with another one, for fix suggestions -func (nameHandler) ReplaceFunction(caller *ast.CallExpr, newExpr *ast.Ident) { - caller.Fun.(*ast.SelectorExpr).Sel = newExpr -} - -func (g nameHandler) getDefFuncName(expr *ast.CallExpr) string { - if sel, ok := expr.Fun.(*ast.SelectorExpr); ok { - if f, ok := sel.X.(*ast.Ident); ok && f.Name == string(g) { - return sel.Sel.Name - } - } - return "" -} - -func (g nameHandler) getFieldType(field *ast.Field) string { - switch t := field.Type.(type) { - case *ast.SelectorExpr: - if id, ok := t.X.(*ast.Ident); ok { - if id.Name == string(g) { - return t.Sel.Name - } - } - case *ast.StarExpr: - if sel, ok := t.X.(*ast.SelectorExpr); ok { - if x, ok := sel.X.(*ast.Ident); ok && x.Name == string(g) { - return sel.Sel.Name - } - } - - } - return "" -} - -func isGomegaVar(x ast.Expr, handler Handler) bool { - if i, ok := x.(*ast.Ident); ok { - if i.Obj != nil && i.Obj.Kind == ast.Var { - switch decl := i.Obj.Decl.(type) { - case *ast.AssignStmt: - if decl.Tok == token.DEFINE { - if defFunc, ok := decl.Rhs[0].(*ast.CallExpr); ok { - fName := handler.getDefFuncName(defFunc) - switch fName { - case "NewGomega", "NewWithT", "NewGomegaWithT": - return true - } - } - } - case *ast.Field: - name := handler.getFieldType(decl) - switch name { - case "Gomega", "WithT", "GomegaWithT": - return true - } - } - } - } - return false -} - -func (h dotHandler) GetActualExpr(assertionFunc *ast.SelectorExpr) *ast.CallExpr { - actualExpr, ok := assertionFunc.X.(*ast.CallExpr) - if !ok { - return nil - } - - switch fun := actualExpr.Fun.(type) { - case *ast.Ident: - return actualExpr - case *ast.SelectorExpr: - if isHelperMethods(fun.Sel.Name) { - return h.GetActualExpr(fun) - } - if isGomegaVar(fun.X, h) { - return actualExpr - } - } - return nil -} - -func (g nameHandler) GetActualExpr(assertionFunc *ast.SelectorExpr) *ast.CallExpr { - actualExpr, ok := assertionFunc.X.(*ast.CallExpr) - if !ok { - return nil - } - - switch fun := actualExpr.Fun.(type) { - case *ast.Ident: - return actualExpr - case *ast.SelectorExpr: - if x, ok := fun.X.(*ast.Ident); ok && x.Name == string(g) { - return actualExpr - } - if isHelperMethods(fun.Sel.Name) { - return g.GetActualExpr(fun) - } - - if isGomegaVar(fun.X, g) { - return actualExpr - } - } - return nil -} - -func isHelperMethods(funcName string) bool { - switch funcName { - case "WithOffset", "WithTimeout", "WithPolling", "Within", "ProbeEvery", "WithContext", "WithArguments", "MustPassRepeatedly": - return true - } - - return false -} diff --git a/vendor/github.com/nunnatsa/ginkgolinter/interfaces/interfaces.go b/vendor/github.com/nunnatsa/ginkgolinter/interfaces/interfaces.go deleted file mode 100644 index dafeacd4f..000000000 --- a/vendor/github.com/nunnatsa/ginkgolinter/interfaces/interfaces.go +++ /dev/null @@ -1,76 +0,0 @@ -package interfaces - -import ( - "go/token" - gotypes "go/types" -) - -var ( - errorType *gotypes.Interface - gomegaMatcherType *gotypes.Interface -) - -func init() { - errorType = gotypes.Universe.Lookup("error").Type().Underlying().(*gotypes.Interface) - gomegaMatcherType = generateTheGomegaMatcherInfType() -} - -// generateTheGomegaMatcherInfType generates a types.Interface instance that represents the -// GomegaMatcher interface. -// The original code is (copied from https://github.com/nunnatsa/ginkgolinter/blob/8fdd05eee922578d4699f49d267001c01e0b9f1e/testdata/src/a/vendor/github.com/onsi/gomega/types/types.go) -// -// type GomegaMatcher interface { -// Match(actual interface{}) (success bool, err error) -// FailureMessage(actual interface{}) (message string) -// NegatedFailureMessage(actual interface{}) (message string) -// } -func generateTheGomegaMatcherInfType() *gotypes.Interface { - err := gotypes.Universe.Lookup("error").Type() - bl := gotypes.Typ[gotypes.Bool] - str := gotypes.Typ[gotypes.String] - anyType := gotypes.Universe.Lookup("any").Type() - - return gotypes.NewInterfaceType([]*gotypes.Func{ - // Match(actual interface{}) (success bool, err error) - gotypes.NewFunc(token.NoPos, nil, "Match", gotypes.NewSignatureType( - nil, nil, nil, - gotypes.NewTuple( - gotypes.NewVar(token.NoPos, nil, "actual", anyType), - ), - gotypes.NewTuple( - gotypes.NewVar(token.NoPos, nil, "", bl), - gotypes.NewVar(token.NoPos, nil, "", err), - ), false), - ), - // FailureMessage(actual interface{}) (message string) - gotypes.NewFunc(token.NoPos, nil, "FailureMessage", gotypes.NewSignatureType( - nil, nil, nil, - gotypes.NewTuple( - gotypes.NewVar(token.NoPos, nil, "", anyType), - ), - gotypes.NewTuple( - gotypes.NewVar(token.NoPos, nil, "", str), - ), - false), - ), - //NegatedFailureMessage(actual interface{}) (message string) - gotypes.NewFunc(token.NoPos, nil, "NegatedFailureMessage", gotypes.NewSignatureType( - nil, nil, nil, - gotypes.NewTuple( - gotypes.NewVar(token.NoPos, nil, "", anyType), - ), - gotypes.NewTuple( - gotypes.NewVar(token.NoPos, nil, "", str), - ), - false), - ), - }, nil) -} - -func ImplementsError(t gotypes.Type) bool { - return gotypes.Implements(t, errorType) -} - -func ImplementsGomegaMatcher(t gotypes.Type) bool { - return gotypes.Implements(t, gomegaMatcherType) -} diff --git a/vendor/github.com/nunnatsa/ginkgolinter/reverseassertion/reverse_assertion.go b/vendor/github.com/nunnatsa/ginkgolinter/reverseassertion/reverse_assertion.go deleted file mode 100644 index 1dbd89810..000000000 --- a/vendor/github.com/nunnatsa/ginkgolinter/reverseassertion/reverse_assertion.go +++ /dev/null @@ -1,42 +0,0 @@ -package reverseassertion - -import "go/token" - -var reverseLogicAssertions = map[string]string{ - "To": "ToNot", - "ToNot": "To", - "NotTo": "To", - "Should": "ShouldNot", - "ShouldNot": "Should", -} - -// ChangeAssertionLogic get gomega assertion function name, and returns the reverse logic function name -func ChangeAssertionLogic(funcName string) string { - if revFunc, ok := reverseLogicAssertions[funcName]; ok { - return revFunc - } - return funcName -} - -func IsNegativeLogic(funcName string) bool { - switch funcName { - case "ToNot", "NotTo", "ShouldNot": - return true - } - return false -} - -var reverseCompareOperators = map[token.Token]token.Token{ - token.LSS: token.GTR, - token.GTR: token.LSS, - token.LEQ: token.GEQ, - token.GEQ: token.LEQ, -} - -// ChangeCompareOperator return the reversed comparison operator -func ChangeCompareOperator(op token.Token) token.Token { - if revOp, ok := reverseCompareOperators[op]; ok { - return revOp - } - return op -} diff --git a/vendor/github.com/nunnatsa/ginkgolinter/types/boolean.go b/vendor/github.com/nunnatsa/ginkgolinter/types/boolean.go deleted file mode 100644 index be510c4e9..000000000 --- a/vendor/github.com/nunnatsa/ginkgolinter/types/boolean.go +++ /dev/null @@ -1,32 +0,0 @@ -package types - -import ( - "errors" - "strings" -) - -// Boolean is a bool, implementing the flag.Value interface, to be used as a flag var. -type Boolean bool - -func (b *Boolean) Set(value string) error { - if b == nil { - return errors.New("trying to set nil parameter") - } - switch strings.ToLower(value) { - case "true": - *b = true - case "false": - *b = false - default: - return errors.New(value + " is not a Boolean value") - - } - return nil -} - -func (b Boolean) String() string { - if b { - return "true" - } - return "false" -} diff --git a/vendor/github.com/nunnatsa/ginkgolinter/types/config.go b/vendor/github.com/nunnatsa/ginkgolinter/types/config.go deleted file mode 100644 index 6de22738d..000000000 --- a/vendor/github.com/nunnatsa/ginkgolinter/types/config.go +++ /dev/null @@ -1,94 +0,0 @@ -package types - -import ( - "strings" - - "go/ast" -) - -const ( - suppressPrefix = "ginkgo-linter:" - suppressLengthAssertionWarning = suppressPrefix + "ignore-len-assert-warning" - suppressNilAssertionWarning = suppressPrefix + "ignore-nil-assert-warning" - suppressErrAssertionWarning = suppressPrefix + "ignore-err-assert-warning" - suppressCompareAssertionWarning = suppressPrefix + "ignore-compare-assert-warning" - suppressAsyncAsertWarning = suppressPrefix + "ignore-async-assert-warning" - suppressFocusContainerWarning = suppressPrefix + "ignore-focus-container-warning" - suppressTypeCompareWarning = suppressPrefix + "ignore-type-compare-warning" -) - -type Config struct { - SuppressLen Boolean - SuppressNil Boolean - SuppressErr Boolean - SuppressCompare Boolean - SuppressAsync Boolean - ForbidFocus Boolean - SuppressTypeCompare Boolean - AllowHaveLen0 Boolean -} - -func (s *Config) AllTrue() bool { - return bool(s.SuppressLen && s.SuppressNil && s.SuppressErr && s.SuppressCompare && s.SuppressAsync && !s.ForbidFocus) -} - -func (s *Config) Clone() Config { - return Config{ - SuppressLen: s.SuppressLen, - SuppressNil: s.SuppressNil, - SuppressErr: s.SuppressErr, - SuppressCompare: s.SuppressCompare, - SuppressAsync: s.SuppressAsync, - ForbidFocus: s.ForbidFocus, - SuppressTypeCompare: s.SuppressTypeCompare, - AllowHaveLen0: s.AllowHaveLen0, - } -} - -func (s *Config) UpdateFromComment(commentGroup []*ast.CommentGroup) { - for _, cmntList := range commentGroup { - if s.AllTrue() { - break - } - - for _, cmnt := range cmntList.List { - commentLines := strings.Split(cmnt.Text, "\n") - for _, comment := range commentLines { - comment = strings.TrimPrefix(comment, "//") - comment = strings.TrimPrefix(comment, "/*") - comment = strings.TrimSuffix(comment, "*/") - comment = strings.TrimSpace(comment) - - switch comment { - case suppressLengthAssertionWarning: - s.SuppressLen = true - case suppressNilAssertionWarning: - s.SuppressNil = true - case suppressErrAssertionWarning: - s.SuppressErr = true - case suppressCompareAssertionWarning: - s.SuppressCompare = true - case suppressAsyncAsertWarning: - s.SuppressAsync = true - case suppressFocusContainerWarning: - s.ForbidFocus = false - case suppressTypeCompareWarning: - s.SuppressTypeCompare = true - } - } - } - } -} - -func (s *Config) UpdateFromFile(cm ast.CommentMap) { - - for key, commentGroup := range cm { - if s.AllTrue() { - break - } - - if _, ok := key.(*ast.GenDecl); ok { - s.UpdateFromComment(commentGroup) - } - } -} diff --git a/vendor/github.com/nunnatsa/ginkgolinter/version/version.go b/vendor/github.com/nunnatsa/ginkgolinter/version/version.go deleted file mode 100644 index 7bf181a8e..000000000 --- a/vendor/github.com/nunnatsa/ginkgolinter/version/version.go +++ /dev/null @@ -1,14 +0,0 @@ -package version - -var ( - version = "unknown" - gitHash = "unknown" -) - -func Version() string { - return version -} - -func GitHash() string { - return gitHash -} diff --git a/vendor/github.com/olekukonko/tablewriter/.gitignore b/vendor/github.com/olekukonko/tablewriter/.gitignore deleted file mode 100644 index b66cec635..000000000 --- a/vendor/github.com/olekukonko/tablewriter/.gitignore +++ /dev/null @@ -1,15 +0,0 @@ -# Created by .ignore support plugin (hsz.mobi) -### Go template -# Binaries for programs and plugins -*.exe -*.exe~ -*.dll -*.so -*.dylib - -# Test binary, build with `go test -c` -*.test - -# Output of the go coverage tool, specifically when used with LiteIDE -*.out - diff --git a/vendor/github.com/olekukonko/tablewriter/.travis.yml b/vendor/github.com/olekukonko/tablewriter/.travis.yml deleted file mode 100644 index 366d48a35..000000000 --- a/vendor/github.com/olekukonko/tablewriter/.travis.yml +++ /dev/null @@ -1,22 +0,0 @@ -language: go -arch: - - ppc64le - - amd64 -go: - - 1.3 - - 1.4 - - 1.5 - - 1.6 - - 1.7 - - 1.8 - - 1.9 - - "1.10" - - tip -jobs: - exclude : - - arch : ppc64le - go : - - 1.3 - - arch : ppc64le - go : - - 1.4 diff --git a/vendor/github.com/olekukonko/tablewriter/LICENSE.md b/vendor/github.com/olekukonko/tablewriter/LICENSE.md deleted file mode 100644 index a0769b5c1..000000000 --- a/vendor/github.com/olekukonko/tablewriter/LICENSE.md +++ /dev/null @@ -1,19 +0,0 @@ -Copyright (C) 2014 by Oleku Konko - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/vendor/github.com/olekukonko/tablewriter/README.md b/vendor/github.com/olekukonko/tablewriter/README.md deleted file mode 100644 index f06530d75..000000000 --- a/vendor/github.com/olekukonko/tablewriter/README.md +++ /dev/null @@ -1,431 +0,0 @@ -ASCII Table Writer -========= - -[![Build Status](https://travis-ci.org/olekukonko/tablewriter.png?branch=master)](https://travis-ci.org/olekukonko/tablewriter) -[![Total views](https://img.shields.io/sourcegraph/rrc/github.com/olekukonko/tablewriter.svg)](https://sourcegraph.com/github.com/olekukonko/tablewriter) -[![Godoc](https://godoc.org/github.com/olekukonko/tablewriter?status.svg)](https://godoc.org/github.com/olekukonko/tablewriter) - -Generate ASCII table on the fly ... Installation is simple as - - go get github.com/olekukonko/tablewriter - - -#### Features -- Automatic Padding -- Support Multiple Lines -- Supports Alignment -- Support Custom Separators -- Automatic Alignment of numbers & percentage -- Write directly to http , file etc via `io.Writer` -- Read directly from CSV file -- Optional row line via `SetRowLine` -- Normalise table header -- Make CSV Headers optional -- Enable or disable table border -- Set custom footer support -- Optional identical cells merging -- Set custom caption -- Optional reflowing of paragraphs in multi-line cells. - -#### Example 1 - Basic -```go -data := [][]string{ - []string{"A", "The Good", "500"}, - []string{"B", "The Very very Bad Man", "288"}, - []string{"C", "The Ugly", "120"}, - []string{"D", "The Gopher", "800"}, -} - -table := tablewriter.NewWriter(os.Stdout) -table.SetHeader([]string{"Name", "Sign", "Rating"}) - -for _, v := range data { - table.Append(v) -} -table.Render() // Send output -``` - -##### Output 1 -``` -+------+-----------------------+--------+ -| NAME | SIGN | RATING | -+------+-----------------------+--------+ -| A | The Good | 500 | -| B | The Very very Bad Man | 288 | -| C | The Ugly | 120 | -| D | The Gopher | 800 | -+------+-----------------------+--------+ -``` - -#### Example 2 - Without Border / Footer / Bulk Append -```go -data := [][]string{ - []string{"1/1/2014", "Domain name", "2233", "$10.98"}, - []string{"1/1/2014", "January Hosting", "2233", "$54.95"}, - []string{"1/4/2014", "February Hosting", "2233", "$51.00"}, - []string{"1/4/2014", "February Extra Bandwidth", "2233", "$30.00"}, -} - -table := tablewriter.NewWriter(os.Stdout) -table.SetHeader([]string{"Date", "Description", "CV2", "Amount"}) -table.SetFooter([]string{"", "", "Total", "$146.93"}) // Add Footer -table.SetBorder(false) // Set Border to false -table.AppendBulk(data) // Add Bulk Data -table.Render() -``` - -##### Output 2 -``` - - DATE | DESCRIPTION | CV2 | AMOUNT ------------+--------------------------+-------+---------- - 1/1/2014 | Domain name | 2233 | $10.98 - 1/1/2014 | January Hosting | 2233 | $54.95 - 1/4/2014 | February Hosting | 2233 | $51.00 - 1/4/2014 | February Extra Bandwidth | 2233 | $30.00 ------------+--------------------------+-------+---------- - TOTAL | $146 93 - --------+---------- - -``` - - -#### Example 3 - CSV -```go -table, _ := tablewriter.NewCSV(os.Stdout, "testdata/test_info.csv", true) -table.SetAlignment(tablewriter.ALIGN_LEFT) // Set Alignment -table.Render() -``` - -##### Output 3 -``` -+----------+--------------+------+-----+---------+----------------+ -| FIELD | TYPE | NULL | KEY | DEFAULT | EXTRA | -+----------+--------------+------+-----+---------+----------------+ -| user_id | smallint(5) | NO | PRI | NULL | auto_increment | -| username | varchar(10) | NO | | NULL | | -| password | varchar(100) | NO | | NULL | | -+----------+--------------+------+-----+---------+----------------+ -``` - -#### Example 4 - Custom Separator -```go -table, _ := tablewriter.NewCSV(os.Stdout, "testdata/test.csv", true) -table.SetRowLine(true) // Enable row line - -// Change table lines -table.SetCenterSeparator("*") -table.SetColumnSeparator("╪") -table.SetRowSeparator("-") - -table.SetAlignment(tablewriter.ALIGN_LEFT) -table.Render() -``` - -##### Output 4 -``` -*------------*-----------*---------* -╪ FIRST NAME ╪ LAST NAME ╪ SSN ╪ -*------------*-----------*---------* -╪ John ╪ Barry ╪ 123456 ╪ -*------------*-----------*---------* -╪ Kathy ╪ Smith ╪ 687987 ╪ -*------------*-----------*---------* -╪ Bob ╪ McCornick ╪ 3979870 ╪ -*------------*-----------*---------* -``` - -#### Example 5 - Markdown Format -```go -data := [][]string{ - []string{"1/1/2014", "Domain name", "2233", "$10.98"}, - []string{"1/1/2014", "January Hosting", "2233", "$54.95"}, - []string{"1/4/2014", "February Hosting", "2233", "$51.00"}, - []string{"1/4/2014", "February Extra Bandwidth", "2233", "$30.00"}, -} - -table := tablewriter.NewWriter(os.Stdout) -table.SetHeader([]string{"Date", "Description", "CV2", "Amount"}) -table.SetBorders(tablewriter.Border{Left: true, Top: false, Right: true, Bottom: false}) -table.SetCenterSeparator("|") -table.AppendBulk(data) // Add Bulk Data -table.Render() -``` - -##### Output 5 -``` -| DATE | DESCRIPTION | CV2 | AMOUNT | -|----------|--------------------------|------|--------| -| 1/1/2014 | Domain name | 2233 | $10.98 | -| 1/1/2014 | January Hosting | 2233 | $54.95 | -| 1/4/2014 | February Hosting | 2233 | $51.00 | -| 1/4/2014 | February Extra Bandwidth | 2233 | $30.00 | -``` - -#### Example 6 - Identical cells merging -```go -data := [][]string{ - []string{"1/1/2014", "Domain name", "1234", "$10.98"}, - []string{"1/1/2014", "January Hosting", "2345", "$54.95"}, - []string{"1/4/2014", "February Hosting", "3456", "$51.00"}, - []string{"1/4/2014", "February Extra Bandwidth", "4567", "$30.00"}, -} - -table := tablewriter.NewWriter(os.Stdout) -table.SetHeader([]string{"Date", "Description", "CV2", "Amount"}) -table.SetFooter([]string{"", "", "Total", "$146.93"}) -table.SetAutoMergeCells(true) -table.SetRowLine(true) -table.AppendBulk(data) -table.Render() -``` - -##### Output 6 -``` -+----------+--------------------------+-------+---------+ -| DATE | DESCRIPTION | CV2 | AMOUNT | -+----------+--------------------------+-------+---------+ -| 1/1/2014 | Domain name | 1234 | $10.98 | -+ +--------------------------+-------+---------+ -| | January Hosting | 2345 | $54.95 | -+----------+--------------------------+-------+---------+ -| 1/4/2014 | February Hosting | 3456 | $51.00 | -+ +--------------------------+-------+---------+ -| | February Extra Bandwidth | 4567 | $30.00 | -+----------+--------------------------+-------+---------+ -| TOTAL | $146 93 | -+----------+--------------------------+-------+---------+ -``` - -#### Example 7 - Identical cells merging (specify the column index to merge) -```go -data := [][]string{ - []string{"1/1/2014", "Domain name", "1234", "$10.98"}, - []string{"1/1/2014", "January Hosting", "1234", "$10.98"}, - []string{"1/4/2014", "February Hosting", "3456", "$51.00"}, - []string{"1/4/2014", "February Extra Bandwidth", "4567", "$30.00"}, -} - -table := tablewriter.NewWriter(os.Stdout) -table.SetHeader([]string{"Date", "Description", "CV2", "Amount"}) -table.SetFooter([]string{"", "", "Total", "$146.93"}) -table.SetAutoMergeCellsByColumnIndex([]int{2, 3}) -table.SetRowLine(true) -table.AppendBulk(data) -table.Render() -``` - -##### Output 7 -``` -+----------+--------------------------+-------+---------+ -| DATE | DESCRIPTION | CV2 | AMOUNT | -+----------+--------------------------+-------+---------+ -| 1/1/2014 | Domain name | 1234 | $10.98 | -+----------+--------------------------+ + + -| 1/1/2014 | January Hosting | | | -+----------+--------------------------+-------+---------+ -| 1/4/2014 | February Hosting | 3456 | $51.00 | -+----------+--------------------------+-------+---------+ -| 1/4/2014 | February Extra Bandwidth | 4567 | $30.00 | -+----------+--------------------------+-------+---------+ -| TOTAL | $146.93 | -+----------+--------------------------+-------+---------+ -``` - - -#### Table with color -```go -data := [][]string{ - []string{"1/1/2014", "Domain name", "2233", "$10.98"}, - []string{"1/1/2014", "January Hosting", "2233", "$54.95"}, - []string{"1/4/2014", "February Hosting", "2233", "$51.00"}, - []string{"1/4/2014", "February Extra Bandwidth", "2233", "$30.00"}, -} - -table := tablewriter.NewWriter(os.Stdout) -table.SetHeader([]string{"Date", "Description", "CV2", "Amount"}) -table.SetFooter([]string{"", "", "Total", "$146.93"}) // Add Footer -table.SetBorder(false) // Set Border to false - -table.SetHeaderColor(tablewriter.Colors{tablewriter.Bold, tablewriter.BgGreenColor}, - tablewriter.Colors{tablewriter.FgHiRedColor, tablewriter.Bold, tablewriter.BgBlackColor}, - tablewriter.Colors{tablewriter.BgRedColor, tablewriter.FgWhiteColor}, - tablewriter.Colors{tablewriter.BgCyanColor, tablewriter.FgWhiteColor}) - -table.SetColumnColor(tablewriter.Colors{tablewriter.Bold, tablewriter.FgHiBlackColor}, - tablewriter.Colors{tablewriter.Bold, tablewriter.FgHiRedColor}, - tablewriter.Colors{tablewriter.Bold, tablewriter.FgHiBlackColor}, - tablewriter.Colors{tablewriter.Bold, tablewriter.FgBlackColor}) - -table.SetFooterColor(tablewriter.Colors{}, tablewriter.Colors{}, - tablewriter.Colors{tablewriter.Bold}, - tablewriter.Colors{tablewriter.FgHiRedColor}) - -table.AppendBulk(data) -table.Render() -``` - -#### Table with color Output -![Table with Color](https://cloud.githubusercontent.com/assets/6460392/21101956/bbc7b356-c0a1-11e6-9f36-dba694746efc.png) - -#### Example - 8 Table Cells with Color - -Individual Cell Colors from `func Rich` take precedence over Column Colors - -```go -data := [][]string{ - []string{"Test1Merge", "HelloCol2 - 1", "HelloCol3 - 1", "HelloCol4 - 1"}, - []string{"Test1Merge", "HelloCol2 - 2", "HelloCol3 - 2", "HelloCol4 - 2"}, - []string{"Test1Merge", "HelloCol2 - 3", "HelloCol3 - 3", "HelloCol4 - 3"}, - []string{"Test2Merge", "HelloCol2 - 4", "HelloCol3 - 4", "HelloCol4 - 4"}, - []string{"Test2Merge", "HelloCol2 - 5", "HelloCol3 - 5", "HelloCol4 - 5"}, - []string{"Test2Merge", "HelloCol2 - 6", "HelloCol3 - 6", "HelloCol4 - 6"}, - []string{"Test2Merge", "HelloCol2 - 7", "HelloCol3 - 7", "HelloCol4 - 7"}, - []string{"Test3Merge", "HelloCol2 - 8", "HelloCol3 - 8", "HelloCol4 - 8"}, - []string{"Test3Merge", "HelloCol2 - 9", "HelloCol3 - 9", "HelloCol4 - 9"}, - []string{"Test3Merge", "HelloCol2 - 10", "HelloCol3 -10", "HelloCol4 - 10"}, -} - -table := tablewriter.NewWriter(os.Stdout) -table.SetHeader([]string{"Col1", "Col2", "Col3", "Col4"}) -table.SetFooter([]string{"", "", "Footer3", "Footer4"}) -table.SetBorder(false) - -table.SetHeaderColor(tablewriter.Colors{tablewriter.Bold, tablewriter.BgGreenColor}, - tablewriter.Colors{tablewriter.FgHiRedColor, tablewriter.Bold, tablewriter.BgBlackColor}, - tablewriter.Colors{tablewriter.BgRedColor, tablewriter.FgWhiteColor}, - tablewriter.Colors{tablewriter.BgCyanColor, tablewriter.FgWhiteColor}) - -table.SetColumnColor(tablewriter.Colors{tablewriter.Bold, tablewriter.FgHiBlackColor}, - tablewriter.Colors{tablewriter.Bold, tablewriter.FgHiRedColor}, - tablewriter.Colors{tablewriter.Bold, tablewriter.FgHiBlackColor}, - tablewriter.Colors{tablewriter.Bold, tablewriter.FgBlackColor}) - -table.SetFooterColor(tablewriter.Colors{}, tablewriter.Colors{}, - tablewriter.Colors{tablewriter.Bold}, - tablewriter.Colors{tablewriter.FgHiRedColor}) - -colorData1 := []string{"TestCOLOR1Merge", "HelloCol2 - COLOR1", "HelloCol3 - COLOR1", "HelloCol4 - COLOR1"} -colorData2 := []string{"TestCOLOR2Merge", "HelloCol2 - COLOR2", "HelloCol3 - COLOR2", "HelloCol4 - COLOR2"} - -for i, row := range data { - if i == 4 { - table.Rich(colorData1, []tablewriter.Colors{tablewriter.Colors{}, tablewriter.Colors{tablewriter.Normal, tablewriter.FgCyanColor}, tablewriter.Colors{tablewriter.Bold, tablewriter.FgWhiteColor}, tablewriter.Colors{}}) - table.Rich(colorData2, []tablewriter.Colors{tablewriter.Colors{tablewriter.Normal, tablewriter.FgMagentaColor}, tablewriter.Colors{}, tablewriter.Colors{tablewriter.Bold, tablewriter.BgRedColor}, tablewriter.Colors{tablewriter.FgHiGreenColor, tablewriter.Italic, tablewriter.BgHiCyanColor}}) - } - table.Append(row) -} - -table.SetAutoMergeCells(true) -table.Render() - -``` - -##### Table cells with color Output -![Table cells with Color](https://user-images.githubusercontent.com/9064687/63969376-bcd88d80-ca6f-11e9-9466-c3d954700b25.png) - -#### Example 9 - Set table caption -```go -data := [][]string{ - []string{"A", "The Good", "500"}, - []string{"B", "The Very very Bad Man", "288"}, - []string{"C", "The Ugly", "120"}, - []string{"D", "The Gopher", "800"}, -} - -table := tablewriter.NewWriter(os.Stdout) -table.SetHeader([]string{"Name", "Sign", "Rating"}) -table.SetCaption(true, "Movie ratings.") - -for _, v := range data { - table.Append(v) -} -table.Render() // Send output -``` - -Note: Caption text will wrap with total width of rendered table. - -##### Output 9 -``` -+------+-----------------------+--------+ -| NAME | SIGN | RATING | -+------+-----------------------+--------+ -| A | The Good | 500 | -| B | The Very very Bad Man | 288 | -| C | The Ugly | 120 | -| D | The Gopher | 800 | -+------+-----------------------+--------+ -Movie ratings. -``` - -#### Example 10 - Set NoWhiteSpace and TablePadding option -```go -data := [][]string{ - {"node1.example.com", "Ready", "compute", "1.11"}, - {"node2.example.com", "Ready", "compute", "1.11"}, - {"node3.example.com", "Ready", "compute", "1.11"}, - {"node4.example.com", "NotReady", "compute", "1.11"}, -} - -table := tablewriter.NewWriter(os.Stdout) -table.SetHeader([]string{"Name", "Status", "Role", "Version"}) -table.SetAutoWrapText(false) -table.SetAutoFormatHeaders(true) -table.SetHeaderAlignment(ALIGN_LEFT) -table.SetAlignment(ALIGN_LEFT) -table.SetCenterSeparator("") -table.SetColumnSeparator("") -table.SetRowSeparator("") -table.SetHeaderLine(false) -table.SetBorder(false) -table.SetTablePadding("\t") // pad with tabs -table.SetNoWhiteSpace(true) -table.AppendBulk(data) // Add Bulk Data -table.Render() -``` - -##### Output 10 -``` -NAME STATUS ROLE VERSION -node1.example.com Ready compute 1.11 -node2.example.com Ready compute 1.11 -node3.example.com Ready compute 1.11 -node4.example.com NotReady compute 1.11 -``` - -#### Render table into a string - -Instead of rendering the table to `io.Stdout` you can also render it into a string. Go 1.10 introduced the `strings.Builder` type which implements the `io.Writer` interface and can therefore be used for this task. Example: - -```go -package main - -import ( - "strings" - "fmt" - - "github.com/olekukonko/tablewriter" -) - -func main() { - tableString := &strings.Builder{} - table := tablewriter.NewWriter(tableString) - - /* - * Code to fill the table - */ - - table.Render() - - fmt.Println(tableString.String()) -} -``` - -#### TODO -- ~~Import Directly from CSV~~ - `done` -- ~~Support for `SetFooter`~~ - `done` -- ~~Support for `SetBorder`~~ - `done` -- ~~Support table with uneven rows~~ - `done` -- ~~Support custom alignment~~ -- General Improvement & Optimisation -- `NewHTML` Parse table from HTML diff --git a/vendor/github.com/olekukonko/tablewriter/csv.go b/vendor/github.com/olekukonko/tablewriter/csv.go deleted file mode 100644 index 98878303b..000000000 --- a/vendor/github.com/olekukonko/tablewriter/csv.go +++ /dev/null @@ -1,52 +0,0 @@ -// Copyright 2014 Oleku Konko All rights reserved. -// Use of this source code is governed by a MIT -// license that can be found in the LICENSE file. - -// This module is a Table Writer API for the Go Programming Language. -// The protocols were written in pure Go and works on windows and unix systems - -package tablewriter - -import ( - "encoding/csv" - "io" - "os" -) - -// Start A new table by importing from a CSV file -// Takes io.Writer and csv File name -func NewCSV(writer io.Writer, fileName string, hasHeader bool) (*Table, error) { - file, err := os.Open(fileName) - if err != nil { - return &Table{}, err - } - defer file.Close() - csvReader := csv.NewReader(file) - t, err := NewCSVReader(writer, csvReader, hasHeader) - return t, err -} - -// Start a New Table Writer with csv.Reader -// This enables customisation such as reader.Comma = ';' -// See http://golang.org/src/pkg/encoding/csv/reader.go?s=3213:3671#L94 -func NewCSVReader(writer io.Writer, csvReader *csv.Reader, hasHeader bool) (*Table, error) { - t := NewWriter(writer) - if hasHeader { - // Read the first row - headers, err := csvReader.Read() - if err != nil { - return &Table{}, err - } - t.SetHeader(headers) - } - for { - record, err := csvReader.Read() - if err == io.EOF { - break - } else if err != nil { - return &Table{}, err - } - t.Append(record) - } - return t, nil -} diff --git a/vendor/github.com/olekukonko/tablewriter/table.go b/vendor/github.com/olekukonko/tablewriter/table.go deleted file mode 100644 index f913149c6..000000000 --- a/vendor/github.com/olekukonko/tablewriter/table.go +++ /dev/null @@ -1,967 +0,0 @@ -// Copyright 2014 Oleku Konko All rights reserved. -// Use of this source code is governed by a MIT -// license that can be found in the LICENSE file. - -// This module is a Table Writer API for the Go Programming Language. -// The protocols were written in pure Go and works on windows and unix systems - -// Create & Generate text based table -package tablewriter - -import ( - "bytes" - "fmt" - "io" - "regexp" - "strings" -) - -const ( - MAX_ROW_WIDTH = 30 -) - -const ( - CENTER = "+" - ROW = "-" - COLUMN = "|" - SPACE = " " - NEWLINE = "\n" -) - -const ( - ALIGN_DEFAULT = iota - ALIGN_CENTER - ALIGN_RIGHT - ALIGN_LEFT -) - -var ( - decimal = regexp.MustCompile(`^-?(?:\d{1,3}(?:,\d{3})*|\d+)(?:\.\d+)?$`) - percent = regexp.MustCompile(`^-?\d+\.?\d*$%$`) -) - -type Border struct { - Left bool - Right bool - Top bool - Bottom bool -} - -type Table struct { - out io.Writer - rows [][]string - lines [][][]string - cs map[int]int - rs map[int]int - headers [][]string - footers [][]string - caption bool - captionText string - autoFmt bool - autoWrap bool - reflowText bool - mW int - pCenter string - pRow string - pColumn string - tColumn int - tRow int - hAlign int - fAlign int - align int - newLine string - rowLine bool - autoMergeCells bool - columnsToAutoMergeCells map[int]bool - noWhiteSpace bool - tablePadding string - hdrLine bool - borders Border - colSize int - headerParams []string - columnsParams []string - footerParams []string - columnsAlign []int -} - -// Start New Table -// Take io.Writer Directly -func NewWriter(writer io.Writer) *Table { - t := &Table{ - out: writer, - rows: [][]string{}, - lines: [][][]string{}, - cs: make(map[int]int), - rs: make(map[int]int), - headers: [][]string{}, - footers: [][]string{}, - caption: false, - captionText: "Table caption.", - autoFmt: true, - autoWrap: true, - reflowText: true, - mW: MAX_ROW_WIDTH, - pCenter: CENTER, - pRow: ROW, - pColumn: COLUMN, - tColumn: -1, - tRow: -1, - hAlign: ALIGN_DEFAULT, - fAlign: ALIGN_DEFAULT, - align: ALIGN_DEFAULT, - newLine: NEWLINE, - rowLine: false, - hdrLine: true, - borders: Border{Left: true, Right: true, Bottom: true, Top: true}, - colSize: -1, - headerParams: []string{}, - columnsParams: []string{}, - footerParams: []string{}, - columnsAlign: []int{}} - return t -} - -// Render table output -func (t *Table) Render() { - if t.borders.Top { - t.printLine(true) - } - t.printHeading() - if t.autoMergeCells { - t.printRowsMergeCells() - } else { - t.printRows() - } - if !t.rowLine && t.borders.Bottom { - t.printLine(true) - } - t.printFooter() - - if t.caption { - t.printCaption() - } -} - -const ( - headerRowIdx = -1 - footerRowIdx = -2 -) - -// Set table header -func (t *Table) SetHeader(keys []string) { - t.colSize = len(keys) - for i, v := range keys { - lines := t.parseDimension(v, i, headerRowIdx) - t.headers = append(t.headers, lines) - } -} - -// Set table Footer -func (t *Table) SetFooter(keys []string) { - //t.colSize = len(keys) - for i, v := range keys { - lines := t.parseDimension(v, i, footerRowIdx) - t.footers = append(t.footers, lines) - } -} - -// Set table Caption -func (t *Table) SetCaption(caption bool, captionText ...string) { - t.caption = caption - if len(captionText) == 1 { - t.captionText = captionText[0] - } -} - -// Turn header autoformatting on/off. Default is on (true). -func (t *Table) SetAutoFormatHeaders(auto bool) { - t.autoFmt = auto -} - -// Turn automatic multiline text adjustment on/off. Default is on (true). -func (t *Table) SetAutoWrapText(auto bool) { - t.autoWrap = auto -} - -// Turn automatic reflowing of multiline text when rewrapping. Default is on (true). -func (t *Table) SetReflowDuringAutoWrap(auto bool) { - t.reflowText = auto -} - -// Set the Default column width -func (t *Table) SetColWidth(width int) { - t.mW = width -} - -// Set the minimal width for a column -func (t *Table) SetColMinWidth(column int, width int) { - t.cs[column] = width -} - -// Set the Column Separator -func (t *Table) SetColumnSeparator(sep string) { - t.pColumn = sep -} - -// Set the Row Separator -func (t *Table) SetRowSeparator(sep string) { - t.pRow = sep -} - -// Set the center Separator -func (t *Table) SetCenterSeparator(sep string) { - t.pCenter = sep -} - -// Set Header Alignment -func (t *Table) SetHeaderAlignment(hAlign int) { - t.hAlign = hAlign -} - -// Set Footer Alignment -func (t *Table) SetFooterAlignment(fAlign int) { - t.fAlign = fAlign -} - -// Set Table Alignment -func (t *Table) SetAlignment(align int) { - t.align = align -} - -// Set No White Space -func (t *Table) SetNoWhiteSpace(allow bool) { - t.noWhiteSpace = allow -} - -// Set Table Padding -func (t *Table) SetTablePadding(padding string) { - t.tablePadding = padding -} - -func (t *Table) SetColumnAlignment(keys []int) { - for _, v := range keys { - switch v { - case ALIGN_CENTER: - break - case ALIGN_LEFT: - break - case ALIGN_RIGHT: - break - default: - v = ALIGN_DEFAULT - } - t.columnsAlign = append(t.columnsAlign, v) - } -} - -// Set New Line -func (t *Table) SetNewLine(nl string) { - t.newLine = nl -} - -// Set Header Line -// This would enable / disable a line after the header -func (t *Table) SetHeaderLine(line bool) { - t.hdrLine = line -} - -// Set Row Line -// This would enable / disable a line on each row of the table -func (t *Table) SetRowLine(line bool) { - t.rowLine = line -} - -// Set Auto Merge Cells -// This would enable / disable the merge of cells with identical values -func (t *Table) SetAutoMergeCells(auto bool) { - t.autoMergeCells = auto -} - -// Set Auto Merge Cells By Column Index -// This would enable / disable the merge of cells with identical values for specific columns -// If cols is empty, it is the same as `SetAutoMergeCells(true)`. -func (t *Table) SetAutoMergeCellsByColumnIndex(cols []int) { - t.autoMergeCells = true - - if len(cols) > 0 { - m := make(map[int]bool) - for _, col := range cols { - m[col] = true - } - t.columnsToAutoMergeCells = m - } -} - -// Set Table Border -// This would enable / disable line around the table -func (t *Table) SetBorder(border bool) { - t.SetBorders(Border{border, border, border, border}) -} - -func (t *Table) SetBorders(border Border) { - t.borders = border -} - -// Append row to table -func (t *Table) Append(row []string) { - rowSize := len(t.headers) - if rowSize > t.colSize { - t.colSize = rowSize - } - - n := len(t.lines) - line := [][]string{} - for i, v := range row { - - // Detect string width - // Detect String height - // Break strings into words - out := t.parseDimension(v, i, n) - - // Append broken words - line = append(line, out) - } - t.lines = append(t.lines, line) -} - -// Append row to table with color attributes -func (t *Table) Rich(row []string, colors []Colors) { - rowSize := len(t.headers) - if rowSize > t.colSize { - t.colSize = rowSize - } - - n := len(t.lines) - line := [][]string{} - for i, v := range row { - - // Detect string width - // Detect String height - // Break strings into words - out := t.parseDimension(v, i, n) - - if len(colors) > i { - color := colors[i] - out[0] = format(out[0], color) - } - - // Append broken words - line = append(line, out) - } - t.lines = append(t.lines, line) -} - -// Allow Support for Bulk Append -// Eliminates repeated for loops -func (t *Table) AppendBulk(rows [][]string) { - for _, row := range rows { - t.Append(row) - } -} - -// NumLines to get the number of lines -func (t *Table) NumLines() int { - return len(t.lines) -} - -// Clear rows -func (t *Table) ClearRows() { - t.lines = [][][]string{} -} - -// Clear footer -func (t *Table) ClearFooter() { - t.footers = [][]string{} -} - -// Center based on position and border. -func (t *Table) center(i int) string { - if i == -1 && !t.borders.Left { - return t.pRow - } - - if i == len(t.cs)-1 && !t.borders.Right { - return t.pRow - } - - return t.pCenter -} - -// Print line based on row width -func (t *Table) printLine(nl bool) { - fmt.Fprint(t.out, t.center(-1)) - for i := 0; i < len(t.cs); i++ { - v := t.cs[i] - fmt.Fprintf(t.out, "%s%s%s%s", - t.pRow, - strings.Repeat(string(t.pRow), v), - t.pRow, - t.center(i)) - } - if nl { - fmt.Fprint(t.out, t.newLine) - } -} - -// Print line based on row width with our without cell separator -func (t *Table) printLineOptionalCellSeparators(nl bool, displayCellSeparator []bool) { - fmt.Fprint(t.out, t.pCenter) - for i := 0; i < len(t.cs); i++ { - v := t.cs[i] - if i > len(displayCellSeparator) || displayCellSeparator[i] { - // Display the cell separator - fmt.Fprintf(t.out, "%s%s%s%s", - t.pRow, - strings.Repeat(string(t.pRow), v), - t.pRow, - t.pCenter) - } else { - // Don't display the cell separator for this cell - fmt.Fprintf(t.out, "%s%s", - strings.Repeat(" ", v+2), - t.pCenter) - } - } - if nl { - fmt.Fprint(t.out, t.newLine) - } -} - -// Return the PadRight function if align is left, PadLeft if align is right, -// and Pad by default -func pad(align int) func(string, string, int) string { - padFunc := Pad - switch align { - case ALIGN_LEFT: - padFunc = PadRight - case ALIGN_RIGHT: - padFunc = PadLeft - } - return padFunc -} - -// Print heading information -func (t *Table) printHeading() { - // Check if headers is available - if len(t.headers) < 1 { - return - } - - // Identify last column - end := len(t.cs) - 1 - - // Get pad function - padFunc := pad(t.hAlign) - - // Checking for ANSI escape sequences for header - is_esc_seq := false - if len(t.headerParams) > 0 { - is_esc_seq = true - } - - // Maximum height. - max := t.rs[headerRowIdx] - - // Print Heading - for x := 0; x < max; x++ { - // Check if border is set - // Replace with space if not set - if !t.noWhiteSpace { - fmt.Fprint(t.out, ConditionString(t.borders.Left, t.pColumn, SPACE)) - } - - for y := 0; y <= end; y++ { - v := t.cs[y] - h := "" - - if y < len(t.headers) && x < len(t.headers[y]) { - h = t.headers[y][x] - } - if t.autoFmt { - h = Title(h) - } - pad := ConditionString((y == end && !t.borders.Left), SPACE, t.pColumn) - if t.noWhiteSpace { - pad = ConditionString((y == end && !t.borders.Left), SPACE, t.tablePadding) - } - if is_esc_seq { - if !t.noWhiteSpace { - fmt.Fprintf(t.out, " %s %s", - format(padFunc(h, SPACE, v), - t.headerParams[y]), pad) - } else { - fmt.Fprintf(t.out, "%s %s", - format(padFunc(h, SPACE, v), - t.headerParams[y]), pad) - } - } else { - if !t.noWhiteSpace { - fmt.Fprintf(t.out, " %s %s", - padFunc(h, SPACE, v), - pad) - } else { - // the spaces between breaks the kube formatting - fmt.Fprintf(t.out, "%s%s", - padFunc(h, SPACE, v), - pad) - } - } - } - // Next line - fmt.Fprint(t.out, t.newLine) - } - if t.hdrLine { - t.printLine(true) - } -} - -// Print heading information -func (t *Table) printFooter() { - // Check if headers is available - if len(t.footers) < 1 { - return - } - - // Only print line if border is not set - if !t.borders.Bottom { - t.printLine(true) - } - - // Identify last column - end := len(t.cs) - 1 - - // Get pad function - padFunc := pad(t.fAlign) - - // Checking for ANSI escape sequences for header - is_esc_seq := false - if len(t.footerParams) > 0 { - is_esc_seq = true - } - - // Maximum height. - max := t.rs[footerRowIdx] - - // Print Footer - erasePad := make([]bool, len(t.footers)) - for x := 0; x < max; x++ { - // Check if border is set - // Replace with space if not set - fmt.Fprint(t.out, ConditionString(t.borders.Bottom, t.pColumn, SPACE)) - - for y := 0; y <= end; y++ { - v := t.cs[y] - f := "" - if y < len(t.footers) && x < len(t.footers[y]) { - f = t.footers[y][x] - } - if t.autoFmt { - f = Title(f) - } - pad := ConditionString((y == end && !t.borders.Top), SPACE, t.pColumn) - - if erasePad[y] || (x == 0 && len(f) == 0) { - pad = SPACE - erasePad[y] = true - } - - if is_esc_seq { - fmt.Fprintf(t.out, " %s %s", - format(padFunc(f, SPACE, v), - t.footerParams[y]), pad) - } else { - fmt.Fprintf(t.out, " %s %s", - padFunc(f, SPACE, v), - pad) - } - - //fmt.Fprintf(t.out, " %s %s", - // padFunc(f, SPACE, v), - // pad) - } - // Next line - fmt.Fprint(t.out, t.newLine) - //t.printLine(true) - } - - hasPrinted := false - - for i := 0; i <= end; i++ { - v := t.cs[i] - pad := t.pRow - center := t.pCenter - length := len(t.footers[i][0]) - - if length > 0 { - hasPrinted = true - } - - // Set center to be space if length is 0 - if length == 0 && !t.borders.Right { - center = SPACE - } - - // Print first junction - if i == 0 { - if length > 0 && !t.borders.Left { - center = t.pRow - } - fmt.Fprint(t.out, center) - } - - // Pad With space of length is 0 - if length == 0 { - pad = SPACE - } - // Ignore left space as it has printed before - if hasPrinted || t.borders.Left { - pad = t.pRow - center = t.pCenter - } - - // Change Center end position - if center != SPACE { - if i == end && !t.borders.Right { - center = t.pRow - } - } - - // Change Center start position - if center == SPACE { - if i < end && len(t.footers[i+1][0]) != 0 { - if !t.borders.Left { - center = t.pRow - } else { - center = t.pCenter - } - } - } - - // Print the footer - fmt.Fprintf(t.out, "%s%s%s%s", - pad, - strings.Repeat(string(pad), v), - pad, - center) - - } - - fmt.Fprint(t.out, t.newLine) -} - -// Print caption text -func (t Table) printCaption() { - width := t.getTableWidth() - paragraph, _ := WrapString(t.captionText, width) - for linecount := 0; linecount < len(paragraph); linecount++ { - fmt.Fprintln(t.out, paragraph[linecount]) - } -} - -// Calculate the total number of characters in a row -func (t Table) getTableWidth() int { - var chars int - for _, v := range t.cs { - chars += v - } - - // Add chars, spaces, seperators to calculate the total width of the table. - // ncols := t.colSize - // spaces := ncols * 2 - // seps := ncols + 1 - - return (chars + (3 * t.colSize) + 2) -} - -func (t Table) printRows() { - for i, lines := range t.lines { - t.printRow(lines, i) - } -} - -func (t *Table) fillAlignment(num int) { - if len(t.columnsAlign) < num { - t.columnsAlign = make([]int, num) - for i := range t.columnsAlign { - t.columnsAlign[i] = t.align - } - } -} - -// Print Row Information -// Adjust column alignment based on type - -func (t *Table) printRow(columns [][]string, rowIdx int) { - // Get Maximum Height - max := t.rs[rowIdx] - total := len(columns) - - // TODO Fix uneven col size - // if total < t.colSize { - // for n := t.colSize - total; n < t.colSize ; n++ { - // columns = append(columns, []string{SPACE}) - // t.cs[n] = t.mW - // } - //} - - // Pad Each Height - pads := []int{} - - // Checking for ANSI escape sequences for columns - is_esc_seq := false - if len(t.columnsParams) > 0 { - is_esc_seq = true - } - t.fillAlignment(total) - - for i, line := range columns { - length := len(line) - pad := max - length - pads = append(pads, pad) - for n := 0; n < pad; n++ { - columns[i] = append(columns[i], " ") - } - } - //fmt.Println(max, "\n") - for x := 0; x < max; x++ { - for y := 0; y < total; y++ { - - // Check if border is set - if !t.noWhiteSpace { - fmt.Fprint(t.out, ConditionString((!t.borders.Left && y == 0), SPACE, t.pColumn)) - fmt.Fprintf(t.out, SPACE) - } - - str := columns[y][x] - - // Embedding escape sequence with column value - if is_esc_seq { - str = format(str, t.columnsParams[y]) - } - - // This would print alignment - // Default alignment would use multiple configuration - switch t.columnsAlign[y] { - case ALIGN_CENTER: // - fmt.Fprintf(t.out, "%s", Pad(str, SPACE, t.cs[y])) - case ALIGN_RIGHT: - fmt.Fprintf(t.out, "%s", PadLeft(str, SPACE, t.cs[y])) - case ALIGN_LEFT: - fmt.Fprintf(t.out, "%s", PadRight(str, SPACE, t.cs[y])) - default: - if decimal.MatchString(strings.TrimSpace(str)) || percent.MatchString(strings.TrimSpace(str)) { - fmt.Fprintf(t.out, "%s", PadLeft(str, SPACE, t.cs[y])) - } else { - fmt.Fprintf(t.out, "%s", PadRight(str, SPACE, t.cs[y])) - - // TODO Custom alignment per column - //if max == 1 || pads[y] > 0 { - // fmt.Fprintf(t.out, "%s", Pad(str, SPACE, t.cs[y])) - //} else { - // fmt.Fprintf(t.out, "%s", PadRight(str, SPACE, t.cs[y])) - //} - - } - } - if !t.noWhiteSpace { - fmt.Fprintf(t.out, SPACE) - } else { - fmt.Fprintf(t.out, t.tablePadding) - } - } - // Check if border is set - // Replace with space if not set - if !t.noWhiteSpace { - fmt.Fprint(t.out, ConditionString(t.borders.Left, t.pColumn, SPACE)) - } - fmt.Fprint(t.out, t.newLine) - } - - if t.rowLine { - t.printLine(true) - } -} - -// Print the rows of the table and merge the cells that are identical -func (t *Table) printRowsMergeCells() { - var previousLine []string - var displayCellBorder []bool - var tmpWriter bytes.Buffer - for i, lines := range t.lines { - // We store the display of the current line in a tmp writer, as we need to know which border needs to be print above - previousLine, displayCellBorder = t.printRowMergeCells(&tmpWriter, lines, i, previousLine) - if i > 0 { //We don't need to print borders above first line - if t.rowLine { - t.printLineOptionalCellSeparators(true, displayCellBorder) - } - } - tmpWriter.WriteTo(t.out) - } - //Print the end of the table - if t.rowLine { - t.printLine(true) - } -} - -// Print Row Information to a writer and merge identical cells. -// Adjust column alignment based on type - -func (t *Table) printRowMergeCells(writer io.Writer, columns [][]string, rowIdx int, previousLine []string) ([]string, []bool) { - // Get Maximum Height - max := t.rs[rowIdx] - total := len(columns) - - // Pad Each Height - pads := []int{} - - // Checking for ANSI escape sequences for columns - is_esc_seq := false - if len(t.columnsParams) > 0 { - is_esc_seq = true - } - for i, line := range columns { - length := len(line) - pad := max - length - pads = append(pads, pad) - for n := 0; n < pad; n++ { - columns[i] = append(columns[i], " ") - } - } - - var displayCellBorder []bool - t.fillAlignment(total) - for x := 0; x < max; x++ { - for y := 0; y < total; y++ { - - // Check if border is set - fmt.Fprint(writer, ConditionString((!t.borders.Left && y == 0), SPACE, t.pColumn)) - - fmt.Fprintf(writer, SPACE) - - str := columns[y][x] - - // Embedding escape sequence with column value - if is_esc_seq { - str = format(str, t.columnsParams[y]) - } - - if t.autoMergeCells { - var mergeCell bool - if t.columnsToAutoMergeCells != nil { - // Check to see if the column index is in columnsToAutoMergeCells. - if t.columnsToAutoMergeCells[y] { - mergeCell = true - } - } else { - // columnsToAutoMergeCells was not set. - mergeCell = true - } - //Store the full line to merge mutli-lines cells - fullLine := strings.TrimRight(strings.Join(columns[y], " "), " ") - if len(previousLine) > y && fullLine == previousLine[y] && fullLine != "" && mergeCell { - // If this cell is identical to the one above but not empty, we don't display the border and keep the cell empty. - displayCellBorder = append(displayCellBorder, false) - str = "" - } else { - // First line or different content, keep the content and print the cell border - displayCellBorder = append(displayCellBorder, true) - } - } - - // This would print alignment - // Default alignment would use multiple configuration - switch t.columnsAlign[y] { - case ALIGN_CENTER: // - fmt.Fprintf(writer, "%s", Pad(str, SPACE, t.cs[y])) - case ALIGN_RIGHT: - fmt.Fprintf(writer, "%s", PadLeft(str, SPACE, t.cs[y])) - case ALIGN_LEFT: - fmt.Fprintf(writer, "%s", PadRight(str, SPACE, t.cs[y])) - default: - if decimal.MatchString(strings.TrimSpace(str)) || percent.MatchString(strings.TrimSpace(str)) { - fmt.Fprintf(writer, "%s", PadLeft(str, SPACE, t.cs[y])) - } else { - fmt.Fprintf(writer, "%s", PadRight(str, SPACE, t.cs[y])) - } - } - fmt.Fprintf(writer, SPACE) - } - // Check if border is set - // Replace with space if not set - fmt.Fprint(writer, ConditionString(t.borders.Left, t.pColumn, SPACE)) - fmt.Fprint(writer, t.newLine) - } - - //The new previous line is the current one - previousLine = make([]string, total) - for y := 0; y < total; y++ { - previousLine[y] = strings.TrimRight(strings.Join(columns[y], " "), " ") //Store the full line for multi-lines cells - } - //Returns the newly added line and wether or not a border should be displayed above. - return previousLine, displayCellBorder -} - -func (t *Table) parseDimension(str string, colKey, rowKey int) []string { - var ( - raw []string - maxWidth int - ) - - raw = getLines(str) - maxWidth = 0 - for _, line := range raw { - if w := DisplayWidth(line); w > maxWidth { - maxWidth = w - } - } - - // If wrapping, ensure that all paragraphs in the cell fit in the - // specified width. - if t.autoWrap { - // If there's a maximum allowed width for wrapping, use that. - if maxWidth > t.mW { - maxWidth = t.mW - } - - // In the process of doing so, we need to recompute maxWidth. This - // is because perhaps a word in the cell is longer than the - // allowed maximum width in t.mW. - newMaxWidth := maxWidth - newRaw := make([]string, 0, len(raw)) - - if t.reflowText { - // Make a single paragraph of everything. - raw = []string{strings.Join(raw, " ")} - } - for i, para := range raw { - paraLines, _ := WrapString(para, maxWidth) - for _, line := range paraLines { - if w := DisplayWidth(line); w > newMaxWidth { - newMaxWidth = w - } - } - if i > 0 { - newRaw = append(newRaw, " ") - } - newRaw = append(newRaw, paraLines...) - } - raw = newRaw - maxWidth = newMaxWidth - } - - // Store the new known maximum width. - v, ok := t.cs[colKey] - if !ok || v < maxWidth || v == 0 { - t.cs[colKey] = maxWidth - } - - // Remember the number of lines for the row printer. - h := len(raw) - v, ok = t.rs[rowKey] - - if !ok || v < h || v == 0 { - t.rs[rowKey] = h - } - //fmt.Printf("Raw %+v %d\n", raw, len(raw)) - return raw -} diff --git a/vendor/github.com/olekukonko/tablewriter/table_with_color.go b/vendor/github.com/olekukonko/tablewriter/table_with_color.go deleted file mode 100644 index ae7a364ae..000000000 --- a/vendor/github.com/olekukonko/tablewriter/table_with_color.go +++ /dev/null @@ -1,136 +0,0 @@ -package tablewriter - -import ( - "fmt" - "strconv" - "strings" -) - -const ESC = "\033" -const SEP = ";" - -const ( - BgBlackColor int = iota + 40 - BgRedColor - BgGreenColor - BgYellowColor - BgBlueColor - BgMagentaColor - BgCyanColor - BgWhiteColor -) - -const ( - FgBlackColor int = iota + 30 - FgRedColor - FgGreenColor - FgYellowColor - FgBlueColor - FgMagentaColor - FgCyanColor - FgWhiteColor -) - -const ( - BgHiBlackColor int = iota + 100 - BgHiRedColor - BgHiGreenColor - BgHiYellowColor - BgHiBlueColor - BgHiMagentaColor - BgHiCyanColor - BgHiWhiteColor -) - -const ( - FgHiBlackColor int = iota + 90 - FgHiRedColor - FgHiGreenColor - FgHiYellowColor - FgHiBlueColor - FgHiMagentaColor - FgHiCyanColor - FgHiWhiteColor -) - -const ( - Normal = 0 - Bold = 1 - UnderlineSingle = 4 - Italic -) - -type Colors []int - -func startFormat(seq string) string { - return fmt.Sprintf("%s[%sm", ESC, seq) -} - -func stopFormat() string { - return fmt.Sprintf("%s[%dm", ESC, Normal) -} - -// Making the SGR (Select Graphic Rendition) sequence. -func makeSequence(codes []int) string { - codesInString := []string{} - for _, code := range codes { - codesInString = append(codesInString, strconv.Itoa(code)) - } - return strings.Join(codesInString, SEP) -} - -// Adding ANSI escape sequences before and after string -func format(s string, codes interface{}) string { - var seq string - - switch v := codes.(type) { - - case string: - seq = v - case []int: - seq = makeSequence(v) - case Colors: - seq = makeSequence(v) - default: - return s - } - - if len(seq) == 0 { - return s - } - return startFormat(seq) + s + stopFormat() -} - -// Adding header colors (ANSI codes) -func (t *Table) SetHeaderColor(colors ...Colors) { - if t.colSize != len(colors) { - panic("Number of header colors must be equal to number of headers.") - } - for i := 0; i < len(colors); i++ { - t.headerParams = append(t.headerParams, makeSequence(colors[i])) - } -} - -// Adding column colors (ANSI codes) -func (t *Table) SetColumnColor(colors ...Colors) { - if t.colSize != len(colors) { - panic("Number of column colors must be equal to number of headers.") - } - for i := 0; i < len(colors); i++ { - t.columnsParams = append(t.columnsParams, makeSequence(colors[i])) - } -} - -// Adding column colors (ANSI codes) -func (t *Table) SetFooterColor(colors ...Colors) { - if len(t.footers) != len(colors) { - panic("Number of footer colors must be equal to number of footer.") - } - for i := 0; i < len(colors); i++ { - t.footerParams = append(t.footerParams, makeSequence(colors[i])) - } -} - -func Color(colors ...int) []int { - return colors -} diff --git a/vendor/github.com/olekukonko/tablewriter/util.go b/vendor/github.com/olekukonko/tablewriter/util.go deleted file mode 100644 index 380e7ab35..000000000 --- a/vendor/github.com/olekukonko/tablewriter/util.go +++ /dev/null @@ -1,93 +0,0 @@ -// Copyright 2014 Oleku Konko All rights reserved. -// Use of this source code is governed by a MIT -// license that can be found in the LICENSE file. - -// This module is a Table Writer API for the Go Programming Language. -// The protocols were written in pure Go and works on windows and unix systems - -package tablewriter - -import ( - "math" - "regexp" - "strings" - - "github.com/mattn/go-runewidth" -) - -var ansi = regexp.MustCompile("\033\\[(?:[0-9]{1,3}(?:;[0-9]{1,3})*)?[m|K]") - -func DisplayWidth(str string) int { - return runewidth.StringWidth(ansi.ReplaceAllLiteralString(str, "")) -} - -// Simple Condition for string -// Returns value based on condition -func ConditionString(cond bool, valid, inValid string) string { - if cond { - return valid - } - return inValid -} - -func isNumOrSpace(r rune) bool { - return ('0' <= r && r <= '9') || r == ' ' -} - -// Format Table Header -// Replace _ , . and spaces -func Title(name string) string { - origLen := len(name) - rs := []rune(name) - for i, r := range rs { - switch r { - case '_': - rs[i] = ' ' - case '.': - // ignore floating number 0.0 - if (i != 0 && !isNumOrSpace(rs[i-1])) || (i != len(rs)-1 && !isNumOrSpace(rs[i+1])) { - rs[i] = ' ' - } - } - } - name = string(rs) - name = strings.TrimSpace(name) - if len(name) == 0 && origLen > 0 { - // Keep at least one character. This is important to preserve - // empty lines in multi-line headers/footers. - name = " " - } - return strings.ToUpper(name) -} - -// Pad String -// Attempts to place string in the center -func Pad(s, pad string, width int) string { - gap := width - DisplayWidth(s) - if gap > 0 { - gapLeft := int(math.Ceil(float64(gap / 2))) - gapRight := gap - gapLeft - return strings.Repeat(string(pad), gapLeft) + s + strings.Repeat(string(pad), gapRight) - } - return s -} - -// Pad String Right position -// This would place string at the left side of the screen -func PadRight(s, pad string, width int) string { - gap := width - DisplayWidth(s) - if gap > 0 { - return s + strings.Repeat(string(pad), gap) - } - return s -} - -// Pad String Left position -// This would place string at the right side of the screen -func PadLeft(s, pad string, width int) string { - gap := width - DisplayWidth(s) - if gap > 0 { - return strings.Repeat(string(pad), gap) + s - } - return s -} diff --git a/vendor/github.com/olekukonko/tablewriter/wrap.go b/vendor/github.com/olekukonko/tablewriter/wrap.go deleted file mode 100644 index a092ee1f7..000000000 --- a/vendor/github.com/olekukonko/tablewriter/wrap.go +++ /dev/null @@ -1,99 +0,0 @@ -// Copyright 2014 Oleku Konko All rights reserved. -// Use of this source code is governed by a MIT -// license that can be found in the LICENSE file. - -// This module is a Table Writer API for the Go Programming Language. -// The protocols were written in pure Go and works on windows and unix systems - -package tablewriter - -import ( - "math" - "strings" - - "github.com/mattn/go-runewidth" -) - -var ( - nl = "\n" - sp = " " -) - -const defaultPenalty = 1e5 - -// Wrap wraps s into a paragraph of lines of length lim, with minimal -// raggedness. -func WrapString(s string, lim int) ([]string, int) { - words := strings.Split(strings.Replace(s, nl, sp, -1), sp) - var lines []string - max := 0 - for _, v := range words { - max = runewidth.StringWidth(v) - if max > lim { - lim = max - } - } - for _, line := range WrapWords(words, 1, lim, defaultPenalty) { - lines = append(lines, strings.Join(line, sp)) - } - return lines, lim -} - -// WrapWords is the low-level line-breaking algorithm, useful if you need more -// control over the details of the text wrapping process. For most uses, -// WrapString will be sufficient and more convenient. -// -// WrapWords splits a list of words into lines with minimal "raggedness", -// treating each rune as one unit, accounting for spc units between adjacent -// words on each line, and attempting to limit lines to lim units. Raggedness -// is the total error over all lines, where error is the square of the -// difference of the length of the line and lim. Too-long lines (which only -// happen when a single word is longer than lim units) have pen penalty units -// added to the error. -func WrapWords(words []string, spc, lim, pen int) [][]string { - n := len(words) - - length := make([][]int, n) - for i := 0; i < n; i++ { - length[i] = make([]int, n) - length[i][i] = runewidth.StringWidth(words[i]) - for j := i + 1; j < n; j++ { - length[i][j] = length[i][j-1] + spc + runewidth.StringWidth(words[j]) - } - } - nbrk := make([]int, n) - cost := make([]int, n) - for i := range cost { - cost[i] = math.MaxInt32 - } - for i := n - 1; i >= 0; i-- { - if length[i][n-1] <= lim { - cost[i] = 0 - nbrk[i] = n - } else { - for j := i + 1; j < n; j++ { - d := lim - length[i][j-1] - c := d*d + cost[j] - if length[i][j-1] > lim { - c += pen // too-long lines get a worse penalty - } - if c < cost[i] { - cost[i] = c - nbrk[i] = j - } - } - } - } - var lines [][]string - i := 0 - for i < n { - lines = append(lines, words[i:nbrk[i]]) - i = nbrk[i] - } - return lines -} - -// getLines decomposes a multiline string into a slice of strings. -func getLines(s string) []string { - return strings.Split(s, nl) -} diff --git a/vendor/github.com/pelletier/go-toml/v2/.dockerignore b/vendor/github.com/pelletier/go-toml/v2/.dockerignore deleted file mode 100644 index 7b5883475..000000000 --- a/vendor/github.com/pelletier/go-toml/v2/.dockerignore +++ /dev/null @@ -1,2 +0,0 @@ -cmd/tomll/tomll -cmd/tomljson/tomljson diff --git a/vendor/github.com/pelletier/go-toml/v2/.gitattributes b/vendor/github.com/pelletier/go-toml/v2/.gitattributes deleted file mode 100644 index 34a0a21a3..000000000 --- a/vendor/github.com/pelletier/go-toml/v2/.gitattributes +++ /dev/null @@ -1,4 +0,0 @@ -* text=auto - -benchmark/benchmark.toml text eol=lf -testdata/** text eol=lf diff --git a/vendor/github.com/pelletier/go-toml/v2/.gitignore b/vendor/github.com/pelletier/go-toml/v2/.gitignore deleted file mode 100644 index 4b7c4eda3..000000000 --- a/vendor/github.com/pelletier/go-toml/v2/.gitignore +++ /dev/null @@ -1,7 +0,0 @@ -test_program/test_program_bin -fuzz/ -cmd/tomll/tomll -cmd/tomljson/tomljson -cmd/tomltestgen/tomltestgen -dist -tests/ diff --git a/vendor/github.com/pelletier/go-toml/v2/.golangci.toml b/vendor/github.com/pelletier/go-toml/v2/.golangci.toml deleted file mode 100644 index 067db5517..000000000 --- a/vendor/github.com/pelletier/go-toml/v2/.golangci.toml +++ /dev/null @@ -1,84 +0,0 @@ -[service] -golangci-lint-version = "1.39.0" - -[linters-settings.wsl] -allow-assign-and-anything = true - -[linters-settings.exhaustive] -default-signifies-exhaustive = true - -[linters] -disable-all = true -enable = [ - "asciicheck", - "bodyclose", - "cyclop", - "deadcode", - "depguard", - "dogsled", - "dupl", - "durationcheck", - "errcheck", - "errorlint", - "exhaustive", - # "exhaustivestruct", - "exportloopref", - "forbidigo", - # "forcetypeassert", - "funlen", - "gci", - # "gochecknoglobals", - "gochecknoinits", - "gocognit", - "goconst", - "gocritic", - "gocyclo", - "godot", - "godox", - # "goerr113", - "gofmt", - "gofumpt", - "goheader", - "goimports", - "golint", - "gomnd", - # "gomoddirectives", - "gomodguard", - "goprintffuncname", - "gosec", - "gosimple", - "govet", - # "ifshort", - "importas", - "ineffassign", - "lll", - "makezero", - "misspell", - "nakedret", - "nestif", - "nilerr", - # "nlreturn", - "noctx", - "nolintlint", - #"paralleltest", - "prealloc", - "predeclared", - "revive", - "rowserrcheck", - "sqlclosecheck", - "staticcheck", - "structcheck", - "stylecheck", - # "testpackage", - "thelper", - "tparallel", - "typecheck", - "unconvert", - "unparam", - "unused", - "varcheck", - "wastedassign", - "whitespace", - # "wrapcheck", - # "wsl" -] diff --git a/vendor/github.com/pelletier/go-toml/v2/.goreleaser.yaml b/vendor/github.com/pelletier/go-toml/v2/.goreleaser.yaml deleted file mode 100644 index 1d8b69e65..000000000 --- a/vendor/github.com/pelletier/go-toml/v2/.goreleaser.yaml +++ /dev/null @@ -1,126 +0,0 @@ -before: - hooks: - - go mod tidy - - go fmt ./... - - go test ./... -builds: - - id: tomll - main: ./cmd/tomll - binary: tomll - env: - - CGO_ENABLED=0 - flags: - - -trimpath - ldflags: - - -X main.version={{.Version}} -X main.commit={{.Commit}} -X main.date={{.CommitDate}} - mod_timestamp: '{{ .CommitTimestamp }}' - targets: - - linux_amd64 - - linux_arm64 - - linux_arm - - linux_riscv64 - - windows_amd64 - - windows_arm64 - - windows_arm - - darwin_amd64 - - darwin_arm64 - - id: tomljson - main: ./cmd/tomljson - binary: tomljson - env: - - CGO_ENABLED=0 - flags: - - -trimpath - ldflags: - - -X main.version={{.Version}} -X main.commit={{.Commit}} -X main.date={{.CommitDate}} - mod_timestamp: '{{ .CommitTimestamp }}' - targets: - - linux_amd64 - - linux_arm64 - - linux_arm - - linux_riscv64 - - windows_amd64 - - windows_arm64 - - windows_arm - - darwin_amd64 - - darwin_arm64 - - id: jsontoml - main: ./cmd/jsontoml - binary: jsontoml - env: - - CGO_ENABLED=0 - flags: - - -trimpath - ldflags: - - -X main.version={{.Version}} -X main.commit={{.Commit}} -X main.date={{.CommitDate}} - mod_timestamp: '{{ .CommitTimestamp }}' - targets: - - linux_amd64 - - linux_arm64 - - linux_riscv64 - - linux_arm - - windows_amd64 - - windows_arm64 - - windows_arm - - darwin_amd64 - - darwin_arm64 -universal_binaries: - - id: tomll - replace: true - name_template: tomll - - id: tomljson - replace: true - name_template: tomljson - - id: jsontoml - replace: true - name_template: jsontoml -archives: -- id: jsontoml - format: tar.xz - builds: - - jsontoml - files: - - none* - name_template: "{{ .Binary }}_{{.Version}}_{{ .Os }}_{{ .Arch }}" -- id: tomljson - format: tar.xz - builds: - - tomljson - files: - - none* - name_template: "{{ .Binary }}_{{.Version}}_{{ .Os }}_{{ .Arch }}" -- id: tomll - format: tar.xz - builds: - - tomll - files: - - none* - name_template: "{{ .Binary }}_{{.Version}}_{{ .Os }}_{{ .Arch }}" -dockers: - - id: tools - goos: linux - goarch: amd64 - ids: - - jsontoml - - tomljson - - tomll - image_templates: - - "ghcr.io/pelletier/go-toml:latest" - - "ghcr.io/pelletier/go-toml:{{ .Tag }}" - - "ghcr.io/pelletier/go-toml:v{{ .Major }}" - skip_push: false -checksum: - name_template: 'sha256sums.txt' -snapshot: - name_template: "{{ incpatch .Version }}-next" -release: - github: - owner: pelletier - name: go-toml - draft: true - prerelease: auto - mode: replace -changelog: - use: github-native -announce: - skip: true diff --git a/vendor/github.com/pelletier/go-toml/v2/CONTRIBUTING.md b/vendor/github.com/pelletier/go-toml/v2/CONTRIBUTING.md deleted file mode 100644 index 04dd12bcb..000000000 --- a/vendor/github.com/pelletier/go-toml/v2/CONTRIBUTING.md +++ /dev/null @@ -1,196 +0,0 @@ -# Contributing - -Thank you for your interest in go-toml! We appreciate you considering -contributing to go-toml! - -The main goal is the project is to provide an easy-to-use and efficient TOML -implementation for Go that gets the job done and gets out of your way – dealing -with TOML is probably not the central piece of your project. - -As the single maintainer of go-toml, time is scarce. All help, big or small, is -more than welcomed! - -## Ask questions - -Any question you may have, somebody else might have it too. Always feel free to -ask them on the [discussion board][discussions]. We will try to answer them as -clearly and quickly as possible, time permitting. - -Asking questions also helps us identify areas where the documentation needs -improvement, or new features that weren't envisioned before. Sometimes, a -seemingly innocent question leads to the fix of a bug. Don't hesitate and ask -away! - -[discussions]: https://github.com/pelletier/go-toml/discussions - -## Improve the documentation - -The best way to share your knowledge and experience with go-toml is to improve -the documentation. Fix a typo, clarify an interface, add an example, anything -goes! - -The documentation is present in the [README][readme] and thorough the source -code. On release, it gets updated on [pkg.go.dev][pkg.go.dev]. To make a change -to the documentation, create a pull request with your proposed changes. For -simple changes like that, the easiest way to go is probably the "Fork this -project and edit the file" button on Github, displayed at the top right of the -file. Unless it's a trivial change (for example a typo), provide a little bit of -context in your pull request description or commit message. - -## Report a bug - -Found a bug! Sorry to hear that :(. Help us and other track them down and fix by -reporting it. [File a new bug report][bug-report] on the [issues -tracker][issues-tracker]. The template should provide enough guidance on what to -include. When in doubt: add more details! By reducing ambiguity and providing -more information, it decreases back and forth and saves everyone time. - -## Code changes - -Want to contribute a patch? Very happy to hear that! - -First, some high-level rules: - -- A short proposal with some POC code is better than a lengthy piece of text - with no code. Code speaks louder than words. That being said, bigger changes - should probably start with a [discussion][discussions]. -- No backward-incompatible patch will be accepted unless discussed. Sometimes - it's hard, but we try not to break people's programs unless we absolutely have - to. -- If you are writing a new feature or extending an existing one, make sure to - write some documentation. -- Bug fixes need to be accompanied with regression tests. -- New code needs to be tested. -- Your commit messages need to explain why the change is needed, even if already - included in the PR description. - -It does sound like a lot, but those best practices are here to save time overall -and continuously improve the quality of the project, which is something everyone -benefits from. - -### Get started - -The fairly standard code contribution process looks like that: - -1. [Fork the project][fork]. -2. Make your changes, commit on any branch you like. -3. [Open up a pull request][pull-request] -4. Review, potential ask for changes. -5. Merge. - -Feel free to ask for help! You can create draft pull requests to gather -some early feedback! - -### Run the tests - -You can run tests for go-toml using Go's test tool: `go test -race ./...`. - -During the pull request process, all tests will be ran on Linux, Windows, and -MacOS on the last two versions of Go. - -However, given GitHub's new policy to _not_ run Actions on pull requests until a -maintainer clicks on button, it is highly recommended that you run them locally -as you make changes. - -### Check coverage - -We use `go tool cover` to compute test coverage. Most code editors have a way to -run and display code coverage, but at the end of the day, we do this: - -``` -go test -covermode=atomic -coverprofile=coverage.out -go tool cover -func=coverage.out -``` - -and verify that the overall percentage of tested code does not go down. This is -a requirement. As a rule of thumb, all lines of code touched by your changes -should be covered. On Unix you can use `./ci.sh coverage -d v2` to check if your -code lowers the coverage. - -### Verify performance - -Go-toml aims to stay efficient. We rely on a set of scenarios executed with Go's -builtin benchmark systems. Because of their noisy nature, containers provided by -Github Actions cannot be reliably used for benchmarking. As a result, you are -responsible for checking that your changes do not incur a performance penalty. -You can run their following to execute benchmarks: - -``` -go test ./... -bench=. -count=10 -``` - -Benchmark results should be compared against each other with -[benchstat][benchstat]. Typical flow looks like this: - -1. On the `v2` branch, run `go test ./... -bench=. -count 10` and save output to - a file (for example `old.txt`). -2. Make some code changes. -3. Run `go test ....` again, and save the output to an other file (for example - `new.txt`). -4. Run `benchstat old.txt new.txt` to check that time/op does not go up in any - test. - -On Unix you can use `./ci.sh benchmark -d v2` to verify how your code impacts -performance. - -It is highly encouraged to add the benchstat results to your pull request -description. Pull requests that lower performance will receive more scrutiny. - -[benchstat]: https://pkg.go.dev/golang.org/x/perf/cmd/benchstat - -### Style - -Try to look around and follow the same format and structure as the rest of the -code. We enforce using `go fmt` on the whole code base. - ---- - -## Maintainers-only - -### Merge pull request - -Checklist: - -- Passing CI. -- Does not introduce backward-incompatible changes (unless discussed). -- Has relevant doc changes. -- Benchstat does not show performance regression. -- Pull request is [labeled appropriately][pr-labels]. -- Title will be understandable in the changelog. - -1. Merge using "squash and merge". -2. Make sure to edit the commit message to keep all the useful information - nice and clean. -3. Make sure the commit title is clear and contains the PR number (#123). - -### New release - -1. Decide on the next version number. Use semver. -2. Generate release notes using [`gh`][gh]. Example: -``` -$ gh api -X POST \ - -F tag_name='v2.0.0-beta.5' \ - -F target_commitish='v2' \ - -F previous_tag_name='v2.0.0-beta.4' \ - --jq '.body' \ - repos/pelletier/go-toml/releases/generate-notes -``` -3. Look for "Other changes". That would indicate a pull request not labeled - properly. Tweak labels and pull request titles until changelog looks good for - users. -4. [Draft new release][new-release]. -5. Fill tag and target with the same value used to generate the changelog. -6. Set title to the new tag value. -7. Paste the generated changelog. -8. Check "create discussion", in the "Releases" category. -9. Check pre-release if new version is an alpha or beta. - -[issues-tracker]: https://github.com/pelletier/go-toml/issues -[bug-report]: https://github.com/pelletier/go-toml/issues/new?template=bug_report.md -[pkg.go.dev]: https://pkg.go.dev/github.com/pelletier/go-toml -[readme]: ./README.md -[fork]: https://help.github.com/articles/fork-a-repo -[pull-request]: https://help.github.com/en/articles/creating-a-pull-request -[new-release]: https://github.com/pelletier/go-toml/releases/new -[gh]: https://github.com/cli/cli -[pr-labels]: https://github.com/pelletier/go-toml/blob/v2/.github/release.yml diff --git a/vendor/github.com/pelletier/go-toml/v2/Dockerfile b/vendor/github.com/pelletier/go-toml/v2/Dockerfile deleted file mode 100644 index b9e933237..000000000 --- a/vendor/github.com/pelletier/go-toml/v2/Dockerfile +++ /dev/null @@ -1,5 +0,0 @@ -FROM scratch -ENV PATH "$PATH:/bin" -COPY tomll /bin/tomll -COPY tomljson /bin/tomljson -COPY jsontoml /bin/jsontoml diff --git a/vendor/github.com/pelletier/go-toml/v2/LICENSE b/vendor/github.com/pelletier/go-toml/v2/LICENSE deleted file mode 100644 index 991e2ae96..000000000 --- a/vendor/github.com/pelletier/go-toml/v2/LICENSE +++ /dev/null @@ -1,22 +0,0 @@ -The MIT License (MIT) - -go-toml v2 -Copyright (c) 2021 - 2023 Thomas Pelletier - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/pelletier/go-toml/v2/README.md b/vendor/github.com/pelletier/go-toml/v2/README.md deleted file mode 100644 index b10f97f0b..000000000 --- a/vendor/github.com/pelletier/go-toml/v2/README.md +++ /dev/null @@ -1,575 +0,0 @@ -# go-toml v2 - -Go library for the [TOML](https://toml.io/en/) format. - -This library supports [TOML v1.0.0](https://toml.io/en/v1.0.0). - -[🐞 Bug Reports](https://github.com/pelletier/go-toml/issues) - -[💬 Anything else](https://github.com/pelletier/go-toml/discussions) - -## Documentation - -Full API, examples, and implementation notes are available in the Go -documentation. - -[![Go Reference](https://pkg.go.dev/badge/github.com/pelletier/go-toml/v2.svg)](https://pkg.go.dev/github.com/pelletier/go-toml/v2) - -## Import - -```go -import "github.com/pelletier/go-toml/v2" -``` - -See [Modules](#Modules). - -## Features - -### Stdlib behavior - -As much as possible, this library is designed to behave similarly as the -standard library's `encoding/json`. - -### Performance - -While go-toml favors usability, it is written with performance in mind. Most -operations should not be shockingly slow. See [benchmarks](#benchmarks). - -### Strict mode - -`Decoder` can be set to "strict mode", which makes it error when some parts of -the TOML document was not present in the target structure. This is a great way -to check for typos. [See example in the documentation][strict]. - -[strict]: https://pkg.go.dev/github.com/pelletier/go-toml/v2#example-Decoder.DisallowUnknownFields - -### Contextualized errors - -When most decoding errors occur, go-toml returns [`DecodeError`][decode-err], -which contains a human readable contextualized version of the error. For -example: - -``` -1| [server] -2| path = 100 - | ~~~ cannot decode TOML integer into struct field toml_test.Server.Path of type string -3| port = 50 -``` - -[decode-err]: https://pkg.go.dev/github.com/pelletier/go-toml/v2#DecodeError - -### Local date and time support - -TOML supports native [local date/times][ldt]. It allows to represent a given -date, time, or date-time without relation to a timezone or offset. To support -this use-case, go-toml provides [`LocalDate`][tld], [`LocalTime`][tlt], and -[`LocalDateTime`][tldt]. Those types can be transformed to and from `time.Time`, -making them convenient yet unambiguous structures for their respective TOML -representation. - -[ldt]: https://toml.io/en/v1.0.0#local-date-time -[tld]: https://pkg.go.dev/github.com/pelletier/go-toml/v2#LocalDate -[tlt]: https://pkg.go.dev/github.com/pelletier/go-toml/v2#LocalTime -[tldt]: https://pkg.go.dev/github.com/pelletier/go-toml/v2#LocalDateTime - -### Commented config - -Since TOML is often used for configuration files, go-toml can emit documents -annotated with [comments and commented-out values][comments-example]. For -example, it can generate the following file: - -```toml -# Host IP to connect to. -host = '127.0.0.1' -# Port of the remote server. -port = 4242 - -# Encryption parameters (optional) -# [TLS] -# cipher = 'AEAD-AES128-GCM-SHA256' -# version = 'TLS 1.3' -``` - -[comments-example]: https://pkg.go.dev/github.com/pelletier/go-toml/v2#example-Marshal-Commented - -## Getting started - -Given the following struct, let's see how to read it and write it as TOML: - -```go -type MyConfig struct { - Version int - Name string - Tags []string -} -``` - -### Unmarshaling - -[`Unmarshal`][unmarshal] reads a TOML document and fills a Go structure with its -content. For example: - -```go -doc := ` -version = 2 -name = "go-toml" -tags = ["go", "toml"] -` - -var cfg MyConfig -err := toml.Unmarshal([]byte(doc), &cfg) -if err != nil { - panic(err) -} -fmt.Println("version:", cfg.Version) -fmt.Println("name:", cfg.Name) -fmt.Println("tags:", cfg.Tags) - -// Output: -// version: 2 -// name: go-toml -// tags: [go toml] -``` - -[unmarshal]: https://pkg.go.dev/github.com/pelletier/go-toml/v2#Unmarshal - -### Marshaling - -[`Marshal`][marshal] is the opposite of Unmarshal: it represents a Go structure -as a TOML document: - -```go -cfg := MyConfig{ - Version: 2, - Name: "go-toml", - Tags: []string{"go", "toml"}, -} - -b, err := toml.Marshal(cfg) -if err != nil { - panic(err) -} -fmt.Println(string(b)) - -// Output: -// Version = 2 -// Name = 'go-toml' -// Tags = ['go', 'toml'] -``` - -[marshal]: https://pkg.go.dev/github.com/pelletier/go-toml/v2#Marshal - -## Unstable API - -This API does not yet follow the backward compatibility guarantees of this -library. They provide early access to features that may have rough edges or an -API subject to change. - -### Parser - -Parser is the unstable API that allows iterative parsing of a TOML document at -the AST level. See https://pkg.go.dev/github.com/pelletier/go-toml/v2/unstable. - -## Benchmarks - -Execution time speedup compared to other Go TOML libraries: - - - - - - - - - - - - - -
Benchmarkgo-toml v1BurntSushi/toml
Marshal/HugoFrontMatter-21.9x2.2x
Marshal/ReferenceFile/map-21.7x2.1x
Marshal/ReferenceFile/struct-22.2x3.0x
Unmarshal/HugoFrontMatter-22.9x2.7x
Unmarshal/ReferenceFile/map-22.6x2.7x
Unmarshal/ReferenceFile/struct-24.6x5.1x
-
See more -

The table above has the results of the most common use-cases. The table below -contains the results of all benchmarks, including unrealistic ones. It is -provided for completeness.

- - - - - - - - - - - - - - - - - - -
Benchmarkgo-toml v1BurntSushi/toml
Marshal/SimpleDocument/map-21.8x2.7x
Marshal/SimpleDocument/struct-22.7x3.8x
Unmarshal/SimpleDocument/map-23.8x3.0x
Unmarshal/SimpleDocument/struct-25.6x4.1x
UnmarshalDataset/example-23.0x3.2x
UnmarshalDataset/code-22.3x2.9x
UnmarshalDataset/twitter-22.6x2.7x
UnmarshalDataset/citm_catalog-22.2x2.3x
UnmarshalDataset/canada-21.8x1.5x
UnmarshalDataset/config-24.1x2.9x
geomean2.7x2.8x
-

This table can be generated with ./ci.sh benchmark -a -html.

-
- -## Modules - -go-toml uses Go's standard modules system. - -Installation instructions: - -- Go ≥ 1.16: Nothing to do. Use the import in your code. The `go` command deals - with it automatically. -- Go ≥ 1.13: `GO111MODULE=on go get github.com/pelletier/go-toml/v2`. - -In case of trouble: [Go Modules FAQ][mod-faq]. - -[mod-faq]: https://github.com/golang/go/wiki/Modules#why-does-installing-a-tool-via-go-get-fail-with-error-cannot-find-main-module - -## Tools - -Go-toml provides three handy command line tools: - - * `tomljson`: Reads a TOML file and outputs its JSON representation. - - ``` - $ go install github.com/pelletier/go-toml/v2/cmd/tomljson@latest - $ tomljson --help - ``` - - * `jsontoml`: Reads a JSON file and outputs a TOML representation. - - ``` - $ go install github.com/pelletier/go-toml/v2/cmd/jsontoml@latest - $ jsontoml --help - ``` - - * `tomll`: Lints and reformats a TOML file. - - ``` - $ go install github.com/pelletier/go-toml/v2/cmd/tomll@latest - $ tomll --help - ``` - -### Docker image - -Those tools are also available as a [Docker image][docker]. For example, to use -`tomljson`: - -``` -docker run -i ghcr.io/pelletier/go-toml:v2 tomljson < example.toml -``` - -Multiple versions are availble on [ghcr.io][docker]. - -[docker]: https://github.com/pelletier/go-toml/pkgs/container/go-toml - -## Migrating from v1 - -This section describes the differences between v1 and v2, with some pointers on -how to get the original behavior when possible. - -### Decoding / Unmarshal - -#### Automatic field name guessing - -When unmarshaling to a struct, if a key in the TOML document does not exactly -match the name of a struct field or any of the `toml`-tagged field, v1 tries -multiple variations of the key ([code][v1-keys]). - -V2 instead does a case-insensitive matching, like `encoding/json`. - -This could impact you if you are relying on casing to differentiate two fields, -and one of them is a not using the `toml` struct tag. The recommended solution -is to be specific about tag names for those fields using the `toml` struct tag. - -[v1-keys]: https://github.com/pelletier/go-toml/blob/a2e52561804c6cd9392ebf0048ca64fe4af67a43/marshal.go#L775-L781 - -#### Ignore preexisting value in interface - -When decoding into a non-nil `interface{}`, go-toml v1 uses the type of the -element in the interface to decode the object. For example: - -```go -type inner struct { - B interface{} -} -type doc struct { - A interface{} -} - -d := doc{ - A: inner{ - B: "Before", - }, -} - -data := ` -[A] -B = "After" -` - -toml.Unmarshal([]byte(data), &d) -fmt.Printf("toml v1: %#v\n", d) - -// toml v1: main.doc{A:main.inner{B:"After"}} -``` - -In this case, field `A` is of type `interface{}`, containing a `inner` struct. -V1 sees that type and uses it when decoding the object. - -When decoding an object into an `interface{}`, V2 instead disregards whatever -value the `interface{}` may contain and replaces it with a -`map[string]interface{}`. With the same data structure as above, here is what -the result looks like: - -```go -toml.Unmarshal([]byte(data), &d) -fmt.Printf("toml v2: %#v\n", d) - -// toml v2: main.doc{A:map[string]interface {}{"B":"After"}} -``` - -This is to match `encoding/json`'s behavior. There is no way to make the v2 -decoder behave like v1. - -#### Values out of array bounds ignored - -When decoding into an array, v1 returns an error when the number of elements -contained in the doc is superior to the capacity of the array. For example: - -```go -type doc struct { - A [2]string -} -d := doc{} -err := toml.Unmarshal([]byte(`A = ["one", "two", "many"]`), &d) -fmt.Println(err) - -// (1, 1): unmarshal: TOML array length (3) exceeds destination array length (2) -``` - -In the same situation, v2 ignores the last value: - -```go -err := toml.Unmarshal([]byte(`A = ["one", "two", "many"]`), &d) -fmt.Println("err:", err, "d:", d) -// err: d: {[one two]} -``` - -This is to match `encoding/json`'s behavior. There is no way to make the v2 -decoder behave like v1. - -#### Support for `toml.Unmarshaler` has been dropped - -This method was not widely used, poorly defined, and added a lot of complexity. -A similar effect can be achieved by implementing the `encoding.TextUnmarshaler` -interface and use strings. - -#### Support for `default` struct tag has been dropped - -This feature adds complexity and a poorly defined API for an effect that can be -accomplished outside of the library. - -It does not seem like other format parsers in Go support that feature (the -project referenced in the original ticket #202 has not been updated since 2017). -Given that go-toml v2 should not touch values not in the document, the same -effect can be achieved by pre-filling the struct with defaults (libraries like -[go-defaults][go-defaults] can help). Also, string representation is not well -defined for all types: it creates issues like #278. - -The recommended replacement is pre-filling the struct before unmarshaling. - -[go-defaults]: https://github.com/mcuadros/go-defaults - -#### `toml.Tree` replacement - -This structure was the initial attempt at providing a document model for -go-toml. It allows manipulating the structure of any document, encoding and -decoding from their TOML representation. While a more robust feature was -initially planned in go-toml v2, this has been ultimately [removed from -scope][nodoc] of this library, with no plan to add it back at the moment. The -closest equivalent at the moment would be to unmarshal into an `interface{}` and -use type assertions and/or reflection to manipulate the arbitrary -structure. However this would fall short of providing all of the TOML features -such as adding comments and be specific about whitespace. - - -#### `toml.Position` are not retrievable anymore - -The API for retrieving the position (line, column) of a specific TOML element do -not exist anymore. This was done to minimize the amount of concepts introduced -by the library (query path), and avoid the performance hit related to storing -positions in the absence of a document model, for a feature that seemed to have -little use. Errors however have gained more detailed position -information. Position retrieval seems better fitted for a document model, which -has been [removed from the scope][nodoc] of go-toml v2 at the moment. - -### Encoding / Marshal - -#### Default struct fields order - -V1 emits struct fields order alphabetically by default. V2 struct fields are -emitted in order they are defined. For example: - -```go -type S struct { - B string - A string -} - -data := S{ - B: "B", - A: "A", -} - -b, _ := tomlv1.Marshal(data) -fmt.Println("v1:\n" + string(b)) - -b, _ = tomlv2.Marshal(data) -fmt.Println("v2:\n" + string(b)) - -// Output: -// v1: -// A = "A" -// B = "B" - -// v2: -// B = 'B' -// A = 'A' -``` - -There is no way to make v2 encoder behave like v1. A workaround could be to -manually sort the fields alphabetically in the struct definition, or generate -struct types using `reflect.StructOf`. - -#### No indentation by default - -V1 automatically indents content of tables by default. V2 does not. However the -same behavior can be obtained using [`Encoder.SetIndentTables`][sit]. For example: - -```go -data := map[string]interface{}{ - "table": map[string]string{ - "key": "value", - }, -} - -b, _ := tomlv1.Marshal(data) -fmt.Println("v1:\n" + string(b)) - -b, _ = tomlv2.Marshal(data) -fmt.Println("v2:\n" + string(b)) - -buf := bytes.Buffer{} -enc := tomlv2.NewEncoder(&buf) -enc.SetIndentTables(true) -enc.Encode(data) -fmt.Println("v2 Encoder:\n" + string(buf.Bytes())) - -// Output: -// v1: -// -// [table] -// key = "value" -// -// v2: -// [table] -// key = 'value' -// -// -// v2 Encoder: -// [table] -// key = 'value' -``` - -[sit]: https://pkg.go.dev/github.com/pelletier/go-toml/v2#Encoder.SetIndentTables - -#### Keys and strings are single quoted - -V1 always uses double quotes (`"`) around strings and keys that cannot be -represented bare (unquoted). V2 uses single quotes instead by default (`'`), -unless a character cannot be represented, then falls back to double quotes. As a -result of this change, `Encoder.QuoteMapKeys` has been removed, as it is not -useful anymore. - -There is no way to make v2 encoder behave like v1. - -#### `TextMarshaler` emits as a string, not TOML - -Types that implement [`encoding.TextMarshaler`][tm] can emit arbitrary TOML in -v1. The encoder would append the result to the output directly. In v2 the result -is wrapped in a string. As a result, this interface cannot be implemented by the -root object. - -There is no way to make v2 encoder behave like v1. - -[tm]: https://golang.org/pkg/encoding/#TextMarshaler - -#### `Encoder.CompactComments` has been removed - -Emitting compact comments is now the default behavior of go-toml. This option -is not necessary anymore. - -#### Struct tags have been merged - -V1 used to provide multiple struct tags: `comment`, `commented`, `multiline`, -`toml`, and `omitempty`. To behave more like the standard library, v2 has merged -`toml`, `multiline`, `commented`, and `omitempty`. For example: - -```go -type doc struct { - // v1 - F string `toml:"field" multiline:"true" omitempty:"true" commented:"true"` - // v2 - F string `toml:"field,multiline,omitempty,commented"` -} -``` - -Has a result, the `Encoder.SetTag*` methods have been removed, as there is just -one tag now. - -#### `Encoder.ArraysWithOneElementPerLine` has been renamed - -The new name is `Encoder.SetArraysMultiline`. The behavior should be the same. - -#### `Encoder.Indentation` has been renamed - -The new name is `Encoder.SetIndentSymbol`. The behavior should be the same. - - -#### Embedded structs behave like stdlib - -V1 defaults to merging embedded struct fields into the embedding struct. This -behavior was unexpected because it does not follow the standard library. To -avoid breaking backward compatibility, the `Encoder.PromoteAnonymous` method was -added to make the encoder behave correctly. Given backward compatibility is not -a problem anymore, v2 does the right thing by default: it follows the behavior -of `encoding/json`. `Encoder.PromoteAnonymous` has been removed. - -[nodoc]: https://github.com/pelletier/go-toml/discussions/506#discussioncomment-1526038 - -### `query` - -go-toml v1 provided the [`go-toml/query`][query] package. It allowed to run -JSONPath-style queries on TOML files. This feature is not available in v2. For a -replacement, check out [dasel][dasel]. - -This package has been removed because it was essentially not supported anymore -(last commit May 2020), increased the complexity of the code base, and more -complete solutions exist out there. - -[query]: https://github.com/pelletier/go-toml/tree/f99d6bbca119636aeafcf351ee52b3d202782627/query -[dasel]: https://github.com/TomWright/dasel - -## Versioning - -Go-toml follows [Semantic Versioning](https://semver.org). The supported version -of [TOML](https://github.com/toml-lang/toml) is indicated at the beginning of -this document. The last two major versions of Go are supported -(see [Go Release Policy](https://golang.org/doc/devel/release.html#policy)). - -## License - -The MIT License (MIT). Read [LICENSE](LICENSE). diff --git a/vendor/github.com/pelletier/go-toml/v2/SECURITY.md b/vendor/github.com/pelletier/go-toml/v2/SECURITY.md deleted file mode 100644 index d4d554fda..000000000 --- a/vendor/github.com/pelletier/go-toml/v2/SECURITY.md +++ /dev/null @@ -1,16 +0,0 @@ -# Security Policy - -## Supported Versions - -| Version | Supported | -| ---------- | ------------------ | -| Latest 2.x | :white_check_mark: | -| All 1.x | :x: | -| All 0.x | :x: | - -## Reporting a Vulnerability - -Email a vulnerability report to `security@pelletier.codes`. Make sure to include -as many details as possible to reproduce the vulnerability. This is a -side-project: I will try to get back to you as quickly as possible, time -permitting in my personal life. Providing a working patch helps very much! diff --git a/vendor/github.com/pelletier/go-toml/v2/ci.sh b/vendor/github.com/pelletier/go-toml/v2/ci.sh deleted file mode 100644 index 86217a9b0..000000000 --- a/vendor/github.com/pelletier/go-toml/v2/ci.sh +++ /dev/null @@ -1,284 +0,0 @@ -#!/usr/bin/env bash - - -stderr() { - echo "$@" 1>&2 -} - -usage() { - b=$(basename "$0") - echo $b: ERROR: "$@" 1>&2 - - cat 1>&2 < coverage.out - go tool cover -func=coverage.out - echo "Coverage profile for ${branch}: ${dir}/coverage.out" >&2 - popd - - if [ "${branch}" != "HEAD" ]; then - git worktree remove --force "$dir" - fi -} - -coverage() { - case "$1" in - -d) - shift - target="${1?Need to provide a target branch argument}" - - output_dir="$(mktemp -d)" - target_out="${output_dir}/target.txt" - head_out="${output_dir}/head.txt" - - cover "${target}" > "${target_out}" - cover "HEAD" > "${head_out}" - - cat "${target_out}" - cat "${head_out}" - - echo "" - - target_pct="$(tail -n2 ${target_out} | head -n1 | sed -E 's/.*total.*\t([0-9.]+)%.*/\1/')" - head_pct="$(tail -n2 ${head_out} | head -n1 | sed -E 's/.*total.*\t([0-9.]+)%/\1/')" - echo "Results: ${target} ${target_pct}% HEAD ${head_pct}%" - - delta_pct=$(echo "$head_pct - $target_pct" | bc -l) - echo "Delta: ${delta_pct}" - - if [[ $delta_pct = \-* ]]; then - echo "Regression!"; - - target_diff="${output_dir}/target.diff.txt" - head_diff="${output_dir}/head.diff.txt" - cat "${target_out}" | grep -E '^github.com/pelletier/go-toml' | tr -s "\t " | cut -f 2,3 | sort > "${target_diff}" - cat "${head_out}" | grep -E '^github.com/pelletier/go-toml' | tr -s "\t " | cut -f 2,3 | sort > "${head_diff}" - - diff --side-by-side --suppress-common-lines "${target_diff}" "${head_diff}" - return 1 - fi - return 0 - ;; - esac - - cover "${1-HEAD}" -} - -bench() { - branch="${1}" - out="${2}" - replace="${3}" - dir="$(mktemp -d)" - - stderr "Executing benchmark for ${branch} at ${dir}" - - if [ "${branch}" = "HEAD" ]; then - cp -r . "${dir}/" - else - git worktree add "$dir" "$branch" - fi - - pushd "$dir" - - if [ "${replace}" != "" ]; then - find ./benchmark/ -iname '*.go' -exec sed -i -E "s|github.com/pelletier/go-toml/v2|${replace}|g" {} \; - go get "${replace}" - fi - - export GOMAXPROCS=2 - go test '-bench=^Benchmark(Un)?[mM]arshal' -count=10 -run=Nothing ./... | tee "${out}" - popd - - if [ "${branch}" != "HEAD" ]; then - git worktree remove --force "$dir" - fi -} - -fmktemp() { - if mktemp --version &> /dev/null; then - # GNU - mktemp --suffix=-$1 - else - # BSD - mktemp -t $1 - fi -} - -benchstathtml() { -python3 - $1 <<'EOF' -import sys - -lines = [] -stop = False - -with open(sys.argv[1]) as f: - for line in f.readlines(): - line = line.strip() - if line == "": - stop = True - if not stop: - lines.append(line.split(',')) - -results = [] -for line in reversed(lines[2:]): - if len(line) < 8 or line[0] == "": - continue - v2 = float(line[1]) - results.append([ - line[0].replace("-32", ""), - "%.1fx" % (float(line[3])/v2), # v1 - "%.1fx" % (float(line[7])/v2), # bs - ]) -# move geomean to the end -results.append(results[0]) -del results[0] - - -def printtable(data): - print(""" - - - - - """) - - for r in data: - print(" ".format(*r)) - - print(""" -
Benchmarkgo-toml v1BurntSushi/toml
{}{}{}
""") - - -def match(x): - return "ReferenceFile" in x[0] or "HugoFrontMatter" in x[0] - -above = [x for x in results if match(x)] -below = [x for x in results if not match(x)] - -printtable(above) -print("
See more") -print("""

The table above has the results of the most common use-cases. The table below -contains the results of all benchmarks, including unrealistic ones. It is -provided for completeness.

""") -printtable(below) -print('

This table can be generated with ./ci.sh benchmark -a -html.

') -print("
") - -EOF -} - -benchmark() { - case "$1" in - -d) - shift - target="${1?Need to provide a target branch argument}" - - old=`fmktemp ${target}` - bench "${target}" "${old}" - - new=`fmktemp HEAD` - bench HEAD "${new}" - - benchstat "${old}" "${new}" - return 0 - ;; - -a) - shift - - v2stats=`fmktemp go-toml-v2` - bench HEAD "${v2stats}" "github.com/pelletier/go-toml/v2" - v1stats=`fmktemp go-toml-v1` - bench HEAD "${v1stats}" "github.com/pelletier/go-toml" - bsstats=`fmktemp bs-toml` - bench HEAD "${bsstats}" "github.com/BurntSushi/toml" - - cp "${v2stats}" go-toml-v2.txt - cp "${v1stats}" go-toml-v1.txt - cp "${bsstats}" bs-toml.txt - - if [ "$1" = "-html" ]; then - tmpcsv=`fmktemp csv` - benchstat -format csv go-toml-v2.txt go-toml-v1.txt bs-toml.txt > $tmpcsv - benchstathtml $tmpcsv - else - benchstat go-toml-v2.txt go-toml-v1.txt bs-toml.txt - fi - - rm -f go-toml-v2.txt go-toml-v1.txt bs-toml.txt - return $? - esac - - bench "${1-HEAD}" `mktemp` -} - -case "$1" in - coverage) shift; coverage $@;; - benchmark) shift; benchmark $@;; - *) usage "bad argument $1";; -esac diff --git a/vendor/github.com/pelletier/go-toml/v2/decode.go b/vendor/github.com/pelletier/go-toml/v2/decode.go deleted file mode 100644 index f0ec3b170..000000000 --- a/vendor/github.com/pelletier/go-toml/v2/decode.go +++ /dev/null @@ -1,550 +0,0 @@ -package toml - -import ( - "fmt" - "math" - "strconv" - "time" - - "github.com/pelletier/go-toml/v2/unstable" -) - -func parseInteger(b []byte) (int64, error) { - if len(b) > 2 && b[0] == '0' { - switch b[1] { - case 'x': - return parseIntHex(b) - case 'b': - return parseIntBin(b) - case 'o': - return parseIntOct(b) - default: - panic(fmt.Errorf("invalid base '%c', should have been checked by scanIntOrFloat", b[1])) - } - } - - return parseIntDec(b) -} - -func parseLocalDate(b []byte) (LocalDate, error) { - // full-date = date-fullyear "-" date-month "-" date-mday - // date-fullyear = 4DIGIT - // date-month = 2DIGIT ; 01-12 - // date-mday = 2DIGIT ; 01-28, 01-29, 01-30, 01-31 based on month/year - var date LocalDate - - if len(b) != 10 || b[4] != '-' || b[7] != '-' { - return date, unstable.NewParserError(b, "dates are expected to have the format YYYY-MM-DD") - } - - var err error - - date.Year, err = parseDecimalDigits(b[0:4]) - if err != nil { - return LocalDate{}, err - } - - date.Month, err = parseDecimalDigits(b[5:7]) - if err != nil { - return LocalDate{}, err - } - - date.Day, err = parseDecimalDigits(b[8:10]) - if err != nil { - return LocalDate{}, err - } - - if !isValidDate(date.Year, date.Month, date.Day) { - return LocalDate{}, unstable.NewParserError(b, "impossible date") - } - - return date, nil -} - -func parseDecimalDigits(b []byte) (int, error) { - v := 0 - - for i, c := range b { - if c < '0' || c > '9' { - return 0, unstable.NewParserError(b[i:i+1], "expected digit (0-9)") - } - v *= 10 - v += int(c - '0') - } - - return v, nil -} - -func parseDateTime(b []byte) (time.Time, error) { - // offset-date-time = full-date time-delim full-time - // full-time = partial-time time-offset - // time-offset = "Z" / time-numoffset - // time-numoffset = ( "+" / "-" ) time-hour ":" time-minute - - dt, b, err := parseLocalDateTime(b) - if err != nil { - return time.Time{}, err - } - - var zone *time.Location - - if len(b) == 0 { - // parser should have checked that when assigning the date time node - panic("date time should have a timezone") - } - - if b[0] == 'Z' || b[0] == 'z' { - b = b[1:] - zone = time.UTC - } else { - const dateTimeByteLen = 6 - if len(b) != dateTimeByteLen { - return time.Time{}, unstable.NewParserError(b, "invalid date-time timezone") - } - var direction int - switch b[0] { - case '-': - direction = -1 - case '+': - direction = +1 - default: - return time.Time{}, unstable.NewParserError(b[:1], "invalid timezone offset character") - } - - if b[3] != ':' { - return time.Time{}, unstable.NewParserError(b[3:4], "expected a : separator") - } - - hours, err := parseDecimalDigits(b[1:3]) - if err != nil { - return time.Time{}, err - } - if hours > 23 { - return time.Time{}, unstable.NewParserError(b[:1], "invalid timezone offset hours") - } - - minutes, err := parseDecimalDigits(b[4:6]) - if err != nil { - return time.Time{}, err - } - if minutes > 59 { - return time.Time{}, unstable.NewParserError(b[:1], "invalid timezone offset minutes") - } - - seconds := direction * (hours*3600 + minutes*60) - if seconds == 0 { - zone = time.UTC - } else { - zone = time.FixedZone("", seconds) - } - b = b[dateTimeByteLen:] - } - - if len(b) > 0 { - return time.Time{}, unstable.NewParserError(b, "extra bytes at the end of the timezone") - } - - t := time.Date( - dt.Year, - time.Month(dt.Month), - dt.Day, - dt.Hour, - dt.Minute, - dt.Second, - dt.Nanosecond, - zone) - - return t, nil -} - -func parseLocalDateTime(b []byte) (LocalDateTime, []byte, error) { - var dt LocalDateTime - - const localDateTimeByteMinLen = 11 - if len(b) < localDateTimeByteMinLen { - return dt, nil, unstable.NewParserError(b, "local datetimes are expected to have the format YYYY-MM-DDTHH:MM:SS[.NNNNNNNNN]") - } - - date, err := parseLocalDate(b[:10]) - if err != nil { - return dt, nil, err - } - dt.LocalDate = date - - sep := b[10] - if sep != 'T' && sep != ' ' && sep != 't' { - return dt, nil, unstable.NewParserError(b[10:11], "datetime separator is expected to be T or a space") - } - - t, rest, err := parseLocalTime(b[11:]) - if err != nil { - return dt, nil, err - } - dt.LocalTime = t - - return dt, rest, nil -} - -// parseLocalTime is a bit different because it also returns the remaining -// []byte that is didn't need. This is to allow parseDateTime to parse those -// remaining bytes as a timezone. -func parseLocalTime(b []byte) (LocalTime, []byte, error) { - var ( - nspow = [10]int{0, 1e8, 1e7, 1e6, 1e5, 1e4, 1e3, 1e2, 1e1, 1e0} - t LocalTime - ) - - // check if b matches to have expected format HH:MM:SS[.NNNNNN] - const localTimeByteLen = 8 - if len(b) < localTimeByteLen { - return t, nil, unstable.NewParserError(b, "times are expected to have the format HH:MM:SS[.NNNNNN]") - } - - var err error - - t.Hour, err = parseDecimalDigits(b[0:2]) - if err != nil { - return t, nil, err - } - - if t.Hour > 23 { - return t, nil, unstable.NewParserError(b[0:2], "hour cannot be greater 23") - } - if b[2] != ':' { - return t, nil, unstable.NewParserError(b[2:3], "expecting colon between hours and minutes") - } - - t.Minute, err = parseDecimalDigits(b[3:5]) - if err != nil { - return t, nil, err - } - if t.Minute > 59 { - return t, nil, unstable.NewParserError(b[3:5], "minutes cannot be greater 59") - } - if b[5] != ':' { - return t, nil, unstable.NewParserError(b[5:6], "expecting colon between minutes and seconds") - } - - t.Second, err = parseDecimalDigits(b[6:8]) - if err != nil { - return t, nil, err - } - - if t.Second > 60 { - return t, nil, unstable.NewParserError(b[6:8], "seconds cannot be greater 60") - } - - b = b[8:] - - if len(b) >= 1 && b[0] == '.' { - frac := 0 - precision := 0 - digits := 0 - - for i, c := range b[1:] { - if !isDigit(c) { - if i == 0 { - return t, nil, unstable.NewParserError(b[0:1], "need at least one digit after fraction point") - } - break - } - digits++ - - const maxFracPrecision = 9 - if i >= maxFracPrecision { - // go-toml allows decoding fractional seconds - // beyond the supported precision of 9 - // digits. It truncates the fractional component - // to the supported precision and ignores the - // remaining digits. - // - // https://github.com/pelletier/go-toml/discussions/707 - continue - } - - frac *= 10 - frac += int(c - '0') - precision++ - } - - if precision == 0 { - return t, nil, unstable.NewParserError(b[:1], "nanoseconds need at least one digit") - } - - t.Nanosecond = frac * nspow[precision] - t.Precision = precision - - return t, b[1+digits:], nil - } - return t, b, nil -} - -//nolint:cyclop -func parseFloat(b []byte) (float64, error) { - if len(b) == 4 && (b[0] == '+' || b[0] == '-') && b[1] == 'n' && b[2] == 'a' && b[3] == 'n' { - return math.NaN(), nil - } - - cleaned, err := checkAndRemoveUnderscoresFloats(b) - if err != nil { - return 0, err - } - - if cleaned[0] == '.' { - return 0, unstable.NewParserError(b, "float cannot start with a dot") - } - - if cleaned[len(cleaned)-1] == '.' { - return 0, unstable.NewParserError(b, "float cannot end with a dot") - } - - dotAlreadySeen := false - for i, c := range cleaned { - if c == '.' { - if dotAlreadySeen { - return 0, unstable.NewParserError(b[i:i+1], "float can have at most one decimal point") - } - if !isDigit(cleaned[i-1]) { - return 0, unstable.NewParserError(b[i-1:i+1], "float decimal point must be preceded by a digit") - } - if !isDigit(cleaned[i+1]) { - return 0, unstable.NewParserError(b[i:i+2], "float decimal point must be followed by a digit") - } - dotAlreadySeen = true - } - } - - start := 0 - if cleaned[0] == '+' || cleaned[0] == '-' { - start = 1 - } - if cleaned[start] == '0' && len(cleaned) > start+1 && isDigit(cleaned[start+1]) { - return 0, unstable.NewParserError(b, "float integer part cannot have leading zeroes") - } - - f, err := strconv.ParseFloat(string(cleaned), 64) - if err != nil { - return 0, unstable.NewParserError(b, "unable to parse float: %w", err) - } - - return f, nil -} - -func parseIntHex(b []byte) (int64, error) { - cleaned, err := checkAndRemoveUnderscoresIntegers(b[2:]) - if err != nil { - return 0, err - } - - i, err := strconv.ParseInt(string(cleaned), 16, 64) - if err != nil { - return 0, unstable.NewParserError(b, "couldn't parse hexadecimal number: %w", err) - } - - return i, nil -} - -func parseIntOct(b []byte) (int64, error) { - cleaned, err := checkAndRemoveUnderscoresIntegers(b[2:]) - if err != nil { - return 0, err - } - - i, err := strconv.ParseInt(string(cleaned), 8, 64) - if err != nil { - return 0, unstable.NewParserError(b, "couldn't parse octal number: %w", err) - } - - return i, nil -} - -func parseIntBin(b []byte) (int64, error) { - cleaned, err := checkAndRemoveUnderscoresIntegers(b[2:]) - if err != nil { - return 0, err - } - - i, err := strconv.ParseInt(string(cleaned), 2, 64) - if err != nil { - return 0, unstable.NewParserError(b, "couldn't parse binary number: %w", err) - } - - return i, nil -} - -func isSign(b byte) bool { - return b == '+' || b == '-' -} - -func parseIntDec(b []byte) (int64, error) { - cleaned, err := checkAndRemoveUnderscoresIntegers(b) - if err != nil { - return 0, err - } - - startIdx := 0 - - if isSign(cleaned[0]) { - startIdx++ - } - - if len(cleaned) > startIdx+1 && cleaned[startIdx] == '0' { - return 0, unstable.NewParserError(b, "leading zero not allowed on decimal number") - } - - i, err := strconv.ParseInt(string(cleaned), 10, 64) - if err != nil { - return 0, unstable.NewParserError(b, "couldn't parse decimal number: %w", err) - } - - return i, nil -} - -func checkAndRemoveUnderscoresIntegers(b []byte) ([]byte, error) { - start := 0 - if b[start] == '+' || b[start] == '-' { - start++ - } - - if len(b) == start { - return b, nil - } - - if b[start] == '_' { - return nil, unstable.NewParserError(b[start:start+1], "number cannot start with underscore") - } - - if b[len(b)-1] == '_' { - return nil, unstable.NewParserError(b[len(b)-1:], "number cannot end with underscore") - } - - // fast path - i := 0 - for ; i < len(b); i++ { - if b[i] == '_' { - break - } - } - if i == len(b) { - return b, nil - } - - before := false - cleaned := make([]byte, i, len(b)) - copy(cleaned, b) - - for i++; i < len(b); i++ { - c := b[i] - if c == '_' { - if !before { - return nil, unstable.NewParserError(b[i-1:i+1], "number must have at least one digit between underscores") - } - before = false - } else { - before = true - cleaned = append(cleaned, c) - } - } - - return cleaned, nil -} - -func checkAndRemoveUnderscoresFloats(b []byte) ([]byte, error) { - if b[0] == '_' { - return nil, unstable.NewParserError(b[0:1], "number cannot start with underscore") - } - - if b[len(b)-1] == '_' { - return nil, unstable.NewParserError(b[len(b)-1:], "number cannot end with underscore") - } - - // fast path - i := 0 - for ; i < len(b); i++ { - if b[i] == '_' { - break - } - } - if i == len(b) { - return b, nil - } - - before := false - cleaned := make([]byte, 0, len(b)) - - for i := 0; i < len(b); i++ { - c := b[i] - - switch c { - case '_': - if !before { - return nil, unstable.NewParserError(b[i-1:i+1], "number must have at least one digit between underscores") - } - if i < len(b)-1 && (b[i+1] == 'e' || b[i+1] == 'E') { - return nil, unstable.NewParserError(b[i+1:i+2], "cannot have underscore before exponent") - } - before = false - case '+', '-': - // signed exponents - cleaned = append(cleaned, c) - before = false - case 'e', 'E': - if i < len(b)-1 && b[i+1] == '_' { - return nil, unstable.NewParserError(b[i+1:i+2], "cannot have underscore after exponent") - } - cleaned = append(cleaned, c) - case '.': - if i < len(b)-1 && b[i+1] == '_' { - return nil, unstable.NewParserError(b[i+1:i+2], "cannot have underscore after decimal point") - } - if i > 0 && b[i-1] == '_' { - return nil, unstable.NewParserError(b[i-1:i], "cannot have underscore before decimal point") - } - cleaned = append(cleaned, c) - default: - before = true - cleaned = append(cleaned, c) - } - } - - return cleaned, nil -} - -// isValidDate checks if a provided date is a date that exists. -func isValidDate(year int, month int, day int) bool { - return month > 0 && month < 13 && day > 0 && day <= daysIn(month, year) -} - -// daysBefore[m] counts the number of days in a non-leap year -// before month m begins. There is an entry for m=12, counting -// the number of days before January of next year (365). -var daysBefore = [...]int32{ - 0, - 31, - 31 + 28, - 31 + 28 + 31, - 31 + 28 + 31 + 30, - 31 + 28 + 31 + 30 + 31, - 31 + 28 + 31 + 30 + 31 + 30, - 31 + 28 + 31 + 30 + 31 + 30 + 31, - 31 + 28 + 31 + 30 + 31 + 30 + 31 + 31, - 31 + 28 + 31 + 30 + 31 + 30 + 31 + 31 + 30, - 31 + 28 + 31 + 30 + 31 + 30 + 31 + 31 + 30 + 31, - 31 + 28 + 31 + 30 + 31 + 30 + 31 + 31 + 30 + 31 + 30, - 31 + 28 + 31 + 30 + 31 + 30 + 31 + 31 + 30 + 31 + 30 + 31, -} - -func daysIn(m int, year int) int { - if m == 2 && isLeap(year) { - return 29 - } - return int(daysBefore[m] - daysBefore[m-1]) -} - -func isLeap(year int) bool { - return year%4 == 0 && (year%100 != 0 || year%400 == 0) -} - -func isDigit(r byte) bool { - return r >= '0' && r <= '9' -} diff --git a/vendor/github.com/pelletier/go-toml/v2/doc.go b/vendor/github.com/pelletier/go-toml/v2/doc.go deleted file mode 100644 index b7bc599bd..000000000 --- a/vendor/github.com/pelletier/go-toml/v2/doc.go +++ /dev/null @@ -1,2 +0,0 @@ -// Package toml is a library to read and write TOML documents. -package toml diff --git a/vendor/github.com/pelletier/go-toml/v2/errors.go b/vendor/github.com/pelletier/go-toml/v2/errors.go deleted file mode 100644 index 309733f1f..000000000 --- a/vendor/github.com/pelletier/go-toml/v2/errors.go +++ /dev/null @@ -1,252 +0,0 @@ -package toml - -import ( - "fmt" - "strconv" - "strings" - - "github.com/pelletier/go-toml/v2/internal/danger" - "github.com/pelletier/go-toml/v2/unstable" -) - -// DecodeError represents an error encountered during the parsing or decoding -// of a TOML document. -// -// In addition to the error message, it contains the position in the document -// where it happened, as well as a human-readable representation that shows -// where the error occurred in the document. -type DecodeError struct { - message string - line int - column int - key Key - - human string -} - -// StrictMissingError occurs in a TOML document that does not have a -// corresponding field in the target value. It contains all the missing fields -// in Errors. -// -// Emitted by Decoder when DisallowUnknownFields() was called. -type StrictMissingError struct { - // One error per field that could not be found. - Errors []DecodeError -} - -// Error returns the canonical string for this error. -func (s *StrictMissingError) Error() string { - return "strict mode: fields in the document are missing in the target struct" -} - -// String returns a human readable description of all errors. -func (s *StrictMissingError) String() string { - var buf strings.Builder - - for i, e := range s.Errors { - if i > 0 { - buf.WriteString("\n---\n") - } - - buf.WriteString(e.String()) - } - - return buf.String() -} - -type Key []string - -// Error returns the error message contained in the DecodeError. -func (e *DecodeError) Error() string { - return "toml: " + e.message -} - -// String returns the human-readable contextualized error. This string is multi-line. -func (e *DecodeError) String() string { - return e.human -} - -// Position returns the (line, column) pair indicating where the error -// occurred in the document. Positions are 1-indexed. -func (e *DecodeError) Position() (row int, column int) { - return e.line, e.column -} - -// Key that was being processed when the error occurred. The key is present only -// if this DecodeError is part of a StrictMissingError. -func (e *DecodeError) Key() Key { - return e.key -} - -// decodeErrorFromHighlight creates a DecodeError referencing a highlighted -// range of bytes from document. -// -// highlight needs to be a sub-slice of document, or this function panics. -// -// The function copies all bytes used in DecodeError, so that document and -// highlight can be freely deallocated. -// -//nolint:funlen -func wrapDecodeError(document []byte, de *unstable.ParserError) *DecodeError { - offset := danger.SubsliceOffset(document, de.Highlight) - - errMessage := de.Error() - errLine, errColumn := positionAtEnd(document[:offset]) - before, after := linesOfContext(document, de.Highlight, offset, 3) - - var buf strings.Builder - - maxLine := errLine + len(after) - 1 - lineColumnWidth := len(strconv.Itoa(maxLine)) - - // Write the lines of context strictly before the error. - for i := len(before) - 1; i > 0; i-- { - line := errLine - i - buf.WriteString(formatLineNumber(line, lineColumnWidth)) - buf.WriteString("|") - - if len(before[i]) > 0 { - buf.WriteString(" ") - buf.Write(before[i]) - } - - buf.WriteRune('\n') - } - - // Write the document line that contains the error. - - buf.WriteString(formatLineNumber(errLine, lineColumnWidth)) - buf.WriteString("| ") - - if len(before) > 0 { - buf.Write(before[0]) - } - - buf.Write(de.Highlight) - - if len(after) > 0 { - buf.Write(after[0]) - } - - buf.WriteRune('\n') - - // Write the line with the error message itself (so it does not have a line - // number). - - buf.WriteString(strings.Repeat(" ", lineColumnWidth)) - buf.WriteString("| ") - - if len(before) > 0 { - buf.WriteString(strings.Repeat(" ", len(before[0]))) - } - - buf.WriteString(strings.Repeat("~", len(de.Highlight))) - - if len(errMessage) > 0 { - buf.WriteString(" ") - buf.WriteString(errMessage) - } - - // Write the lines of context strictly after the error. - - for i := 1; i < len(after); i++ { - buf.WriteRune('\n') - line := errLine + i - buf.WriteString(formatLineNumber(line, lineColumnWidth)) - buf.WriteString("|") - - if len(after[i]) > 0 { - buf.WriteString(" ") - buf.Write(after[i]) - } - } - - return &DecodeError{ - message: errMessage, - line: errLine, - column: errColumn, - key: de.Key, - human: buf.String(), - } -} - -func formatLineNumber(line int, width int) string { - format := "%" + strconv.Itoa(width) + "d" - - return fmt.Sprintf(format, line) -} - -func linesOfContext(document []byte, highlight []byte, offset int, linesAround int) ([][]byte, [][]byte) { - return beforeLines(document, offset, linesAround), afterLines(document, highlight, offset, linesAround) -} - -func beforeLines(document []byte, offset int, linesAround int) [][]byte { - var beforeLines [][]byte - - // Walk the document backward from the highlight to find previous lines - // of context. - rest := document[:offset] -backward: - for o := len(rest) - 1; o >= 0 && len(beforeLines) <= linesAround && len(rest) > 0; { - switch { - case rest[o] == '\n': - // handle individual lines - beforeLines = append(beforeLines, rest[o+1:]) - rest = rest[:o] - o = len(rest) - 1 - case o == 0: - // add the first line only if it's non-empty - beforeLines = append(beforeLines, rest) - - break backward - default: - o-- - } - } - - return beforeLines -} - -func afterLines(document []byte, highlight []byte, offset int, linesAround int) [][]byte { - var afterLines [][]byte - - // Walk the document forward from the highlight to find the following - // lines of context. - rest := document[offset+len(highlight):] -forward: - for o := 0; o < len(rest) && len(afterLines) <= linesAround; { - switch { - case rest[o] == '\n': - // handle individual lines - afterLines = append(afterLines, rest[:o]) - rest = rest[o+1:] - o = 0 - - case o == len(rest)-1: - // add last line only if it's non-empty - afterLines = append(afterLines, rest) - - break forward - default: - o++ - } - } - - return afterLines -} - -func positionAtEnd(b []byte) (row int, column int) { - row = 1 - column = 1 - - for _, c := range b { - if c == '\n' { - row++ - column = 1 - } else { - column++ - } - } - - return -} diff --git a/vendor/github.com/pelletier/go-toml/v2/internal/characters/ascii.go b/vendor/github.com/pelletier/go-toml/v2/internal/characters/ascii.go deleted file mode 100644 index 80f698db4..000000000 --- a/vendor/github.com/pelletier/go-toml/v2/internal/characters/ascii.go +++ /dev/null @@ -1,42 +0,0 @@ -package characters - -var invalidAsciiTable = [256]bool{ - 0x00: true, - 0x01: true, - 0x02: true, - 0x03: true, - 0x04: true, - 0x05: true, - 0x06: true, - 0x07: true, - 0x08: true, - // 0x09 TAB - // 0x0A LF - 0x0B: true, - 0x0C: true, - // 0x0D CR - 0x0E: true, - 0x0F: true, - 0x10: true, - 0x11: true, - 0x12: true, - 0x13: true, - 0x14: true, - 0x15: true, - 0x16: true, - 0x17: true, - 0x18: true, - 0x19: true, - 0x1A: true, - 0x1B: true, - 0x1C: true, - 0x1D: true, - 0x1E: true, - 0x1F: true, - // 0x20 - 0x7E Printable ASCII characters - 0x7F: true, -} - -func InvalidAscii(b byte) bool { - return invalidAsciiTable[b] -} diff --git a/vendor/github.com/pelletier/go-toml/v2/internal/characters/utf8.go b/vendor/github.com/pelletier/go-toml/v2/internal/characters/utf8.go deleted file mode 100644 index db4f45acb..000000000 --- a/vendor/github.com/pelletier/go-toml/v2/internal/characters/utf8.go +++ /dev/null @@ -1,199 +0,0 @@ -package characters - -import ( - "unicode/utf8" -) - -type utf8Err struct { - Index int - Size int -} - -func (u utf8Err) Zero() bool { - return u.Size == 0 -} - -// Verified that a given string is only made of valid UTF-8 characters allowed -// by the TOML spec: -// -// Any Unicode character may be used except those that must be escaped: -// quotation mark, backslash, and the control characters other than tab (U+0000 -// to U+0008, U+000A to U+001F, U+007F). -// -// It is a copy of the Go 1.17 utf8.Valid implementation, tweaked to exit early -// when a character is not allowed. -// -// The returned utf8Err is Zero() if the string is valid, or contains the byte -// index and size of the invalid character. -// -// quotation mark => already checked -// backslash => already checked -// 0-0x8 => invalid -// 0x9 => tab, ok -// 0xA - 0x1F => invalid -// 0x7F => invalid -func Utf8TomlValidAlreadyEscaped(p []byte) (err utf8Err) { - // Fast path. Check for and skip 8 bytes of ASCII characters per iteration. - offset := 0 - for len(p) >= 8 { - // Combining two 32 bit loads allows the same code to be used - // for 32 and 64 bit platforms. - // The compiler can generate a 32bit load for first32 and second32 - // on many platforms. See test/codegen/memcombine.go. - first32 := uint32(p[0]) | uint32(p[1])<<8 | uint32(p[2])<<16 | uint32(p[3])<<24 - second32 := uint32(p[4]) | uint32(p[5])<<8 | uint32(p[6])<<16 | uint32(p[7])<<24 - if (first32|second32)&0x80808080 != 0 { - // Found a non ASCII byte (>= RuneSelf). - break - } - - for i, b := range p[:8] { - if InvalidAscii(b) { - err.Index = offset + i - err.Size = 1 - return - } - } - - p = p[8:] - offset += 8 - } - n := len(p) - for i := 0; i < n; { - pi := p[i] - if pi < utf8.RuneSelf { - if InvalidAscii(pi) { - err.Index = offset + i - err.Size = 1 - return - } - i++ - continue - } - x := first[pi] - if x == xx { - // Illegal starter byte. - err.Index = offset + i - err.Size = 1 - return - } - size := int(x & 7) - if i+size > n { - // Short or invalid. - err.Index = offset + i - err.Size = n - i - return - } - accept := acceptRanges[x>>4] - if c := p[i+1]; c < accept.lo || accept.hi < c { - err.Index = offset + i - err.Size = 2 - return - } else if size == 2 { - } else if c := p[i+2]; c < locb || hicb < c { - err.Index = offset + i - err.Size = 3 - return - } else if size == 3 { - } else if c := p[i+3]; c < locb || hicb < c { - err.Index = offset + i - err.Size = 4 - return - } - i += size - } - return -} - -// Return the size of the next rune if valid, 0 otherwise. -func Utf8ValidNext(p []byte) int { - c := p[0] - - if c < utf8.RuneSelf { - if InvalidAscii(c) { - return 0 - } - return 1 - } - - x := first[c] - if x == xx { - // Illegal starter byte. - return 0 - } - size := int(x & 7) - if size > len(p) { - // Short or invalid. - return 0 - } - accept := acceptRanges[x>>4] - if c := p[1]; c < accept.lo || accept.hi < c { - return 0 - } else if size == 2 { - } else if c := p[2]; c < locb || hicb < c { - return 0 - } else if size == 3 { - } else if c := p[3]; c < locb || hicb < c { - return 0 - } - - return size -} - -// acceptRange gives the range of valid values for the second byte in a UTF-8 -// sequence. -type acceptRange struct { - lo uint8 // lowest value for second byte. - hi uint8 // highest value for second byte. -} - -// acceptRanges has size 16 to avoid bounds checks in the code that uses it. -var acceptRanges = [16]acceptRange{ - 0: {locb, hicb}, - 1: {0xA0, hicb}, - 2: {locb, 0x9F}, - 3: {0x90, hicb}, - 4: {locb, 0x8F}, -} - -// first is information about the first byte in a UTF-8 sequence. -var first = [256]uint8{ - // 1 2 3 4 5 6 7 8 9 A B C D E F - as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, // 0x00-0x0F - as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, // 0x10-0x1F - as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, // 0x20-0x2F - as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, // 0x30-0x3F - as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, // 0x40-0x4F - as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, // 0x50-0x5F - as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, // 0x60-0x6F - as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, // 0x70-0x7F - // 1 2 3 4 5 6 7 8 9 A B C D E F - xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, // 0x80-0x8F - xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, // 0x90-0x9F - xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, // 0xA0-0xAF - xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, // 0xB0-0xBF - xx, xx, s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, // 0xC0-0xCF - s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, // 0xD0-0xDF - s2, s3, s3, s3, s3, s3, s3, s3, s3, s3, s3, s3, s3, s4, s3, s3, // 0xE0-0xEF - s5, s6, s6, s6, s7, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, // 0xF0-0xFF -} - -const ( - // The default lowest and highest continuation byte. - locb = 0b10000000 - hicb = 0b10111111 - - // These names of these constants are chosen to give nice alignment in the - // table below. The first nibble is an index into acceptRanges or F for - // special one-byte cases. The second nibble is the Rune length or the - // Status for the special one-byte case. - xx = 0xF1 // invalid: size 1 - as = 0xF0 // ASCII: size 1 - s1 = 0x02 // accept 0, size 2 - s2 = 0x13 // accept 1, size 3 - s3 = 0x03 // accept 0, size 3 - s4 = 0x23 // accept 2, size 3 - s5 = 0x34 // accept 3, size 4 - s6 = 0x04 // accept 0, size 4 - s7 = 0x44 // accept 4, size 4 -) diff --git a/vendor/github.com/pelletier/go-toml/v2/internal/danger/danger.go b/vendor/github.com/pelletier/go-toml/v2/internal/danger/danger.go deleted file mode 100644 index e38e1131b..000000000 --- a/vendor/github.com/pelletier/go-toml/v2/internal/danger/danger.go +++ /dev/null @@ -1,65 +0,0 @@ -package danger - -import ( - "fmt" - "reflect" - "unsafe" -) - -const maxInt = uintptr(int(^uint(0) >> 1)) - -func SubsliceOffset(data []byte, subslice []byte) int { - datap := (*reflect.SliceHeader)(unsafe.Pointer(&data)) - hlp := (*reflect.SliceHeader)(unsafe.Pointer(&subslice)) - - if hlp.Data < datap.Data { - panic(fmt.Errorf("subslice address (%d) is before data address (%d)", hlp.Data, datap.Data)) - } - offset := hlp.Data - datap.Data - - if offset > maxInt { - panic(fmt.Errorf("slice offset larger than int (%d)", offset)) - } - - intoffset := int(offset) - - if intoffset > datap.Len { - panic(fmt.Errorf("slice offset (%d) is farther than data length (%d)", intoffset, datap.Len)) - } - - if intoffset+hlp.Len > datap.Len { - panic(fmt.Errorf("slice ends (%d+%d) is farther than data length (%d)", intoffset, hlp.Len, datap.Len)) - } - - return intoffset -} - -func BytesRange(start []byte, end []byte) []byte { - if start == nil || end == nil { - panic("cannot call BytesRange with nil") - } - startp := (*reflect.SliceHeader)(unsafe.Pointer(&start)) - endp := (*reflect.SliceHeader)(unsafe.Pointer(&end)) - - if startp.Data > endp.Data { - panic(fmt.Errorf("start pointer address (%d) is after end pointer address (%d)", startp.Data, endp.Data)) - } - - l := startp.Len - endLen := int(endp.Data-startp.Data) + endp.Len - if endLen > l { - l = endLen - } - - if l > startp.Cap { - panic(fmt.Errorf("range length is larger than capacity")) - } - - return start[:l] -} - -func Stride(ptr unsafe.Pointer, size uintptr, offset int) unsafe.Pointer { - // TODO: replace with unsafe.Add when Go 1.17 is released - // https://github.com/golang/go/issues/40481 - return unsafe.Pointer(uintptr(ptr) + uintptr(int(size)*offset)) -} diff --git a/vendor/github.com/pelletier/go-toml/v2/internal/danger/typeid.go b/vendor/github.com/pelletier/go-toml/v2/internal/danger/typeid.go deleted file mode 100644 index 9d41c28a2..000000000 --- a/vendor/github.com/pelletier/go-toml/v2/internal/danger/typeid.go +++ /dev/null @@ -1,23 +0,0 @@ -package danger - -import ( - "reflect" - "unsafe" -) - -// typeID is used as key in encoder and decoder caches to enable using -// the optimize runtime.mapaccess2_fast64 function instead of the more -// expensive lookup if we were to use reflect.Type as map key. -// -// typeID holds the pointer to the reflect.Type value, which is unique -// in the program. -// -// https://github.com/segmentio/encoding/blob/master/json/codec.go#L59-L61 -type TypeID unsafe.Pointer - -func MakeTypeID(t reflect.Type) TypeID { - // reflect.Type has the fields: - // typ unsafe.Pointer - // ptr unsafe.Pointer - return TypeID((*[2]unsafe.Pointer)(unsafe.Pointer(&t))[1]) -} diff --git a/vendor/github.com/pelletier/go-toml/v2/internal/tracker/key.go b/vendor/github.com/pelletier/go-toml/v2/internal/tracker/key.go deleted file mode 100644 index 149b17f53..000000000 --- a/vendor/github.com/pelletier/go-toml/v2/internal/tracker/key.go +++ /dev/null @@ -1,48 +0,0 @@ -package tracker - -import "github.com/pelletier/go-toml/v2/unstable" - -// KeyTracker is a tracker that keeps track of the current Key as the AST is -// walked. -type KeyTracker struct { - k []string -} - -// UpdateTable sets the state of the tracker with the AST table node. -func (t *KeyTracker) UpdateTable(node *unstable.Node) { - t.reset() - t.Push(node) -} - -// UpdateArrayTable sets the state of the tracker with the AST array table node. -func (t *KeyTracker) UpdateArrayTable(node *unstable.Node) { - t.reset() - t.Push(node) -} - -// Push the given key on the stack. -func (t *KeyTracker) Push(node *unstable.Node) { - it := node.Key() - for it.Next() { - t.k = append(t.k, string(it.Node().Data)) - } -} - -// Pop key from stack. -func (t *KeyTracker) Pop(node *unstable.Node) { - it := node.Key() - for it.Next() { - t.k = t.k[:len(t.k)-1] - } -} - -// Key returns the current key -func (t *KeyTracker) Key() []string { - k := make([]string, len(t.k)) - copy(k, t.k) - return k -} - -func (t *KeyTracker) reset() { - t.k = t.k[:0] -} diff --git a/vendor/github.com/pelletier/go-toml/v2/internal/tracker/seen.go b/vendor/github.com/pelletier/go-toml/v2/internal/tracker/seen.go deleted file mode 100644 index 40e23f830..000000000 --- a/vendor/github.com/pelletier/go-toml/v2/internal/tracker/seen.go +++ /dev/null @@ -1,356 +0,0 @@ -package tracker - -import ( - "bytes" - "fmt" - "sync" - - "github.com/pelletier/go-toml/v2/unstable" -) - -type keyKind uint8 - -const ( - invalidKind keyKind = iota - valueKind - tableKind - arrayTableKind -) - -func (k keyKind) String() string { - switch k { - case invalidKind: - return "invalid" - case valueKind: - return "value" - case tableKind: - return "table" - case arrayTableKind: - return "array table" - } - panic("missing keyKind string mapping") -} - -// SeenTracker tracks which keys have been seen with which TOML type to flag -// duplicates and mismatches according to the spec. -// -// Each node in the visited tree is represented by an entry. Each entry has an -// identifier, which is provided by a counter. Entries are stored in the array -// entries. As new nodes are discovered (referenced for the first time in the -// TOML document), entries are created and appended to the array. An entry -// points to its parent using its id. -// -// To find whether a given key (sequence of []byte) has already been visited, -// the entries are linearly searched, looking for one with the right name and -// parent id. -// -// Given that all keys appear in the document after their parent, it is -// guaranteed that all descendants of a node are stored after the node, this -// speeds up the search process. -// -// When encountering [[array tables]], the descendants of that node are removed -// to allow that branch of the tree to be "rediscovered". To maintain the -// invariant above, the deletion process needs to keep the order of entries. -// This results in more copies in that case. -type SeenTracker struct { - entries []entry - currentIdx int -} - -var pool sync.Pool - -func (s *SeenTracker) reset() { - // Always contains a root element at index 0. - s.currentIdx = 0 - if len(s.entries) == 0 { - s.entries = make([]entry, 1, 2) - } else { - s.entries = s.entries[:1] - } - s.entries[0].child = -1 - s.entries[0].next = -1 -} - -type entry struct { - // Use -1 to indicate no child or no sibling. - child int - next int - - name []byte - kind keyKind - explicit bool - kv bool -} - -// Find the index of the child of parentIdx with key k. Returns -1 if -// it does not exist. -func (s *SeenTracker) find(parentIdx int, k []byte) int { - for i := s.entries[parentIdx].child; i >= 0; i = s.entries[i].next { - if bytes.Equal(s.entries[i].name, k) { - return i - } - } - return -1 -} - -// Remove all descendants of node at position idx. -func (s *SeenTracker) clear(idx int) { - if idx >= len(s.entries) { - return - } - - for i := s.entries[idx].child; i >= 0; { - next := s.entries[i].next - n := s.entries[0].next - s.entries[0].next = i - s.entries[i].next = n - s.entries[i].name = nil - s.clear(i) - i = next - } - - s.entries[idx].child = -1 -} - -func (s *SeenTracker) create(parentIdx int, name []byte, kind keyKind, explicit bool, kv bool) int { - e := entry{ - child: -1, - next: s.entries[parentIdx].child, - - name: name, - kind: kind, - explicit: explicit, - kv: kv, - } - var idx int - if s.entries[0].next >= 0 { - idx = s.entries[0].next - s.entries[0].next = s.entries[idx].next - s.entries[idx] = e - } else { - idx = len(s.entries) - s.entries = append(s.entries, e) - } - - s.entries[parentIdx].child = idx - - return idx -} - -func (s *SeenTracker) setExplicitFlag(parentIdx int) { - for i := s.entries[parentIdx].child; i >= 0; i = s.entries[i].next { - if s.entries[i].kv { - s.entries[i].explicit = true - s.entries[i].kv = false - } - s.setExplicitFlag(i) - } -} - -// CheckExpression takes a top-level node and checks that it does not contain -// keys that have been seen in previous calls, and validates that types are -// consistent. -func (s *SeenTracker) CheckExpression(node *unstable.Node) error { - if s.entries == nil { - s.reset() - } - switch node.Kind { - case unstable.KeyValue: - return s.checkKeyValue(node) - case unstable.Table: - return s.checkTable(node) - case unstable.ArrayTable: - return s.checkArrayTable(node) - default: - panic(fmt.Errorf("this should not be a top level node type: %s", node.Kind)) - } -} - -func (s *SeenTracker) checkTable(node *unstable.Node) error { - if s.currentIdx >= 0 { - s.setExplicitFlag(s.currentIdx) - } - - it := node.Key() - - parentIdx := 0 - - // This code is duplicated in checkArrayTable. This is because factoring - // it in a function requires to copy the iterator, or allocate it to the - // heap, which is not cheap. - for it.Next() { - if it.IsLast() { - break - } - - k := it.Node().Data - - idx := s.find(parentIdx, k) - - if idx < 0 { - idx = s.create(parentIdx, k, tableKind, false, false) - } else { - entry := s.entries[idx] - if entry.kind == valueKind { - return fmt.Errorf("toml: expected %s to be a table, not a %s", string(k), entry.kind) - } - } - parentIdx = idx - } - - k := it.Node().Data - idx := s.find(parentIdx, k) - - if idx >= 0 { - kind := s.entries[idx].kind - if kind != tableKind { - return fmt.Errorf("toml: key %s should be a table, not a %s", string(k), kind) - } - if s.entries[idx].explicit { - return fmt.Errorf("toml: table %s already exists", string(k)) - } - s.entries[idx].explicit = true - } else { - idx = s.create(parentIdx, k, tableKind, true, false) - } - - s.currentIdx = idx - - return nil -} - -func (s *SeenTracker) checkArrayTable(node *unstable.Node) error { - if s.currentIdx >= 0 { - s.setExplicitFlag(s.currentIdx) - } - - it := node.Key() - - parentIdx := 0 - - for it.Next() { - if it.IsLast() { - break - } - - k := it.Node().Data - - idx := s.find(parentIdx, k) - - if idx < 0 { - idx = s.create(parentIdx, k, tableKind, false, false) - } else { - entry := s.entries[idx] - if entry.kind == valueKind { - return fmt.Errorf("toml: expected %s to be a table, not a %s", string(k), entry.kind) - } - } - - parentIdx = idx - } - - k := it.Node().Data - idx := s.find(parentIdx, k) - - if idx >= 0 { - kind := s.entries[idx].kind - if kind != arrayTableKind { - return fmt.Errorf("toml: key %s already exists as a %s, but should be an array table", kind, string(k)) - } - s.clear(idx) - } else { - idx = s.create(parentIdx, k, arrayTableKind, true, false) - } - - s.currentIdx = idx - - return nil -} - -func (s *SeenTracker) checkKeyValue(node *unstable.Node) error { - parentIdx := s.currentIdx - it := node.Key() - - for it.Next() { - k := it.Node().Data - - idx := s.find(parentIdx, k) - - if idx < 0 { - idx = s.create(parentIdx, k, tableKind, false, true) - } else { - entry := s.entries[idx] - if it.IsLast() { - return fmt.Errorf("toml: key %s is already defined", string(k)) - } else if entry.kind != tableKind { - return fmt.Errorf("toml: expected %s to be a table, not a %s", string(k), entry.kind) - } else if entry.explicit { - return fmt.Errorf("toml: cannot redefine table %s that has already been explicitly defined", string(k)) - } - } - - parentIdx = idx - } - - s.entries[parentIdx].kind = valueKind - - value := node.Value() - - switch value.Kind { - case unstable.InlineTable: - return s.checkInlineTable(value) - case unstable.Array: - return s.checkArray(value) - } - - return nil -} - -func (s *SeenTracker) checkArray(node *unstable.Node) error { - it := node.Children() - for it.Next() { - n := it.Node() - switch n.Kind { - case unstable.InlineTable: - err := s.checkInlineTable(n) - if err != nil { - return err - } - case unstable.Array: - err := s.checkArray(n) - if err != nil { - return err - } - } - } - return nil -} - -func (s *SeenTracker) checkInlineTable(node *unstable.Node) error { - if pool.New == nil { - pool.New = func() interface{} { - return &SeenTracker{} - } - } - - s = pool.Get().(*SeenTracker) - s.reset() - - it := node.Children() - for it.Next() { - n := it.Node() - err := s.checkKeyValue(n) - if err != nil { - return err - } - } - - // As inline tables are self-contained, the tracker does not - // need to retain the details of what they contain. The - // keyValue element that creates the inline table is kept to - // mark the presence of the inline table and prevent - // redefinition of its keys: check* functions cannot walk into - // a value. - pool.Put(s) - return nil -} diff --git a/vendor/github.com/pelletier/go-toml/v2/internal/tracker/tracker.go b/vendor/github.com/pelletier/go-toml/v2/internal/tracker/tracker.go deleted file mode 100644 index bf0317392..000000000 --- a/vendor/github.com/pelletier/go-toml/v2/internal/tracker/tracker.go +++ /dev/null @@ -1 +0,0 @@ -package tracker diff --git a/vendor/github.com/pelletier/go-toml/v2/localtime.go b/vendor/github.com/pelletier/go-toml/v2/localtime.go deleted file mode 100644 index a856bfdb0..000000000 --- a/vendor/github.com/pelletier/go-toml/v2/localtime.go +++ /dev/null @@ -1,122 +0,0 @@ -package toml - -import ( - "fmt" - "strings" - "time" - - "github.com/pelletier/go-toml/v2/unstable" -) - -// LocalDate represents a calendar day in no specific timezone. -type LocalDate struct { - Year int - Month int - Day int -} - -// AsTime converts d into a specific time instance at midnight in zone. -func (d LocalDate) AsTime(zone *time.Location) time.Time { - return time.Date(d.Year, time.Month(d.Month), d.Day, 0, 0, 0, 0, zone) -} - -// String returns RFC 3339 representation of d. -func (d LocalDate) String() string { - return fmt.Sprintf("%04d-%02d-%02d", d.Year, d.Month, d.Day) -} - -// MarshalText returns RFC 3339 representation of d. -func (d LocalDate) MarshalText() ([]byte, error) { - return []byte(d.String()), nil -} - -// UnmarshalText parses b using RFC 3339 to fill d. -func (d *LocalDate) UnmarshalText(b []byte) error { - res, err := parseLocalDate(b) - if err != nil { - return err - } - *d = res - return nil -} - -// LocalTime represents a time of day of no specific day in no specific -// timezone. -type LocalTime struct { - Hour int // Hour of the day: [0; 24[ - Minute int // Minute of the hour: [0; 60[ - Second int // Second of the minute: [0; 60[ - Nanosecond int // Nanoseconds within the second: [0, 1000000000[ - Precision int // Number of digits to display for Nanosecond. -} - -// String returns RFC 3339 representation of d. -// If d.Nanosecond and d.Precision are zero, the time won't have a nanosecond -// component. If d.Nanosecond > 0 but d.Precision = 0, then the minimum number -// of digits for nanoseconds is provided. -func (d LocalTime) String() string { - s := fmt.Sprintf("%02d:%02d:%02d", d.Hour, d.Minute, d.Second) - - if d.Precision > 0 { - s += fmt.Sprintf(".%09d", d.Nanosecond)[:d.Precision+1] - } else if d.Nanosecond > 0 { - // Nanoseconds are specified, but precision is not provided. Use the - // minimum. - s += strings.Trim(fmt.Sprintf(".%09d", d.Nanosecond), "0") - } - - return s -} - -// MarshalText returns RFC 3339 representation of d. -func (d LocalTime) MarshalText() ([]byte, error) { - return []byte(d.String()), nil -} - -// UnmarshalText parses b using RFC 3339 to fill d. -func (d *LocalTime) UnmarshalText(b []byte) error { - res, left, err := parseLocalTime(b) - if err == nil && len(left) != 0 { - err = unstable.NewParserError(left, "extra characters") - } - if err != nil { - return err - } - *d = res - return nil -} - -// LocalDateTime represents a time of a specific day in no specific timezone. -type LocalDateTime struct { - LocalDate - LocalTime -} - -// AsTime converts d into a specific time instance in zone. -func (d LocalDateTime) AsTime(zone *time.Location) time.Time { - return time.Date(d.Year, time.Month(d.Month), d.Day, d.Hour, d.Minute, d.Second, d.Nanosecond, zone) -} - -// String returns RFC 3339 representation of d. -func (d LocalDateTime) String() string { - return d.LocalDate.String() + "T" + d.LocalTime.String() -} - -// MarshalText returns RFC 3339 representation of d. -func (d LocalDateTime) MarshalText() ([]byte, error) { - return []byte(d.String()), nil -} - -// UnmarshalText parses b using RFC 3339 to fill d. -func (d *LocalDateTime) UnmarshalText(data []byte) error { - res, left, err := parseLocalDateTime(data) - if err == nil && len(left) != 0 { - err = unstable.NewParserError(left, "extra characters") - } - if err != nil { - return err - } - - *d = res - return nil -} diff --git a/vendor/github.com/pelletier/go-toml/v2/marshaler.go b/vendor/github.com/pelletier/go-toml/v2/marshaler.go deleted file mode 100644 index 6fe78533c..000000000 --- a/vendor/github.com/pelletier/go-toml/v2/marshaler.go +++ /dev/null @@ -1,1090 +0,0 @@ -package toml - -import ( - "bytes" - "encoding" - "fmt" - "io" - "math" - "reflect" - "sort" - "strconv" - "strings" - "time" - "unicode" - - "github.com/pelletier/go-toml/v2/internal/characters" -) - -// Marshal serializes a Go value as a TOML document. -// -// It is a shortcut for Encoder.Encode() with the default options. -func Marshal(v interface{}) ([]byte, error) { - var buf bytes.Buffer - enc := NewEncoder(&buf) - - err := enc.Encode(v) - if err != nil { - return nil, err - } - - return buf.Bytes(), nil -} - -// Encoder writes a TOML document to an output stream. -type Encoder struct { - // output - w io.Writer - - // global settings - tablesInline bool - arraysMultiline bool - indentSymbol string - indentTables bool -} - -// NewEncoder returns a new Encoder that writes to w. -func NewEncoder(w io.Writer) *Encoder { - return &Encoder{ - w: w, - indentSymbol: " ", - } -} - -// SetTablesInline forces the encoder to emit all tables inline. -// -// This behavior can be controlled on an individual struct field basis with the -// inline tag: -// -// MyField `toml:",inline"` -func (enc *Encoder) SetTablesInline(inline bool) *Encoder { - enc.tablesInline = inline - return enc -} - -// SetArraysMultiline forces the encoder to emit all arrays with one element per -// line. -// -// This behavior can be controlled on an individual struct field basis with the multiline tag: -// -// MyField `multiline:"true"` -func (enc *Encoder) SetArraysMultiline(multiline bool) *Encoder { - enc.arraysMultiline = multiline - return enc -} - -// SetIndentSymbol defines the string that should be used for indentation. The -// provided string is repeated for each indentation level. Defaults to two -// spaces. -func (enc *Encoder) SetIndentSymbol(s string) *Encoder { - enc.indentSymbol = s - return enc -} - -// SetIndentTables forces the encoder to intent tables and array tables. -func (enc *Encoder) SetIndentTables(indent bool) *Encoder { - enc.indentTables = indent - return enc -} - -// Encode writes a TOML representation of v to the stream. -// -// If v cannot be represented to TOML it returns an error. -// -// # Encoding rules -// -// A top level slice containing only maps or structs is encoded as [[table -// array]]. -// -// All slices not matching rule 1 are encoded as [array]. As a result, any map -// or struct they contain is encoded as an {inline table}. -// -// Nil interfaces and nil pointers are not supported. -// -// Keys in key-values always have one part. -// -// Intermediate tables are always printed. -// -// By default, strings are encoded as literal string, unless they contain either -// a newline character or a single quote. In that case they are emitted as -// quoted strings. -// -// Unsigned integers larger than math.MaxInt64 cannot be encoded. Doing so -// results in an error. This rule exists because the TOML specification only -// requires parsers to support at least the 64 bits integer range. Allowing -// larger numbers would create non-standard TOML documents, which may not be -// readable (at best) by other implementations. To encode such numbers, a -// solution is a custom type that implements encoding.TextMarshaler. -// -// When encoding structs, fields are encoded in order of definition, with their -// exact name. -// -// Tables and array tables are separated by empty lines. However, consecutive -// subtables definitions are not. For example: -// -// [top1] -// -// [top2] -// [top2.child1] -// -// [[array]] -// -// [[array]] -// [array.child2] -// -// # Struct tags -// -// The encoding of each public struct field can be customized by the format -// string in the "toml" key of the struct field's tag. This follows -// encoding/json's convention. The format string starts with the name of the -// field, optionally followed by a comma-separated list of options. The name may -// be empty in order to provide options without overriding the default name. -// -// The "multiline" option emits strings as quoted multi-line TOML strings. It -// has no effect on fields that would not be encoded as strings. -// -// The "inline" option turns fields that would be emitted as tables into inline -// tables instead. It has no effect on other fields. -// -// The "omitempty" option prevents empty values or groups from being emitted. -// -// The "commented" option prefixes the value and all its children with a comment -// symbol. -// -// In addition to the "toml" tag struct tag, a "comment" tag can be used to emit -// a TOML comment before the value being annotated. Comments are ignored inside -// inline tables. For array tables, the comment is only present before the first -// element of the array. -func (enc *Encoder) Encode(v interface{}) error { - var ( - b []byte - ctx encoderCtx - ) - - ctx.inline = enc.tablesInline - - if v == nil { - return fmt.Errorf("toml: cannot encode a nil interface") - } - - b, err := enc.encode(b, ctx, reflect.ValueOf(v)) - if err != nil { - return err - } - - _, err = enc.w.Write(b) - if err != nil { - return fmt.Errorf("toml: cannot write: %w", err) - } - - return nil -} - -type valueOptions struct { - multiline bool - omitempty bool - commented bool - comment string -} - -type encoderCtx struct { - // Current top-level key. - parentKey []string - - // Key that should be used for a KV. - key string - // Extra flag to account for the empty string - hasKey bool - - // Set to true to indicate that the encoder is inside a KV, so that all - // tables need to be inlined. - insideKv bool - - // Set to true to skip the first table header in an array table. - skipTableHeader bool - - // Should the next table be encoded as inline - inline bool - - // Indentation level - indent int - - // Prefix the current value with a comment. - commented bool - - // Options coming from struct tags - options valueOptions -} - -func (ctx *encoderCtx) shiftKey() { - if ctx.hasKey { - ctx.parentKey = append(ctx.parentKey, ctx.key) - ctx.clearKey() - } -} - -func (ctx *encoderCtx) setKey(k string) { - ctx.key = k - ctx.hasKey = true -} - -func (ctx *encoderCtx) clearKey() { - ctx.key = "" - ctx.hasKey = false -} - -func (ctx *encoderCtx) isRoot() bool { - return len(ctx.parentKey) == 0 && !ctx.hasKey -} - -func (enc *Encoder) encode(b []byte, ctx encoderCtx, v reflect.Value) ([]byte, error) { - i := v.Interface() - - switch x := i.(type) { - case time.Time: - if x.Nanosecond() > 0 { - return x.AppendFormat(b, time.RFC3339Nano), nil - } - return x.AppendFormat(b, time.RFC3339), nil - case LocalTime: - return append(b, x.String()...), nil - case LocalDate: - return append(b, x.String()...), nil - case LocalDateTime: - return append(b, x.String()...), nil - } - - hasTextMarshaler := v.Type().Implements(textMarshalerType) - if hasTextMarshaler || (v.CanAddr() && reflect.PtrTo(v.Type()).Implements(textMarshalerType)) { - if !hasTextMarshaler { - v = v.Addr() - } - - if ctx.isRoot() { - return nil, fmt.Errorf("toml: type %s implementing the TextMarshaler interface cannot be a root element", v.Type()) - } - - text, err := v.Interface().(encoding.TextMarshaler).MarshalText() - if err != nil { - return nil, err - } - - b = enc.encodeString(b, string(text), ctx.options) - - return b, nil - } - - switch v.Kind() { - // containers - case reflect.Map: - return enc.encodeMap(b, ctx, v) - case reflect.Struct: - return enc.encodeStruct(b, ctx, v) - case reflect.Slice, reflect.Array: - return enc.encodeSlice(b, ctx, v) - case reflect.Interface: - if v.IsNil() { - return nil, fmt.Errorf("toml: encoding a nil interface is not supported") - } - - return enc.encode(b, ctx, v.Elem()) - case reflect.Ptr: - if v.IsNil() { - return enc.encode(b, ctx, reflect.Zero(v.Type().Elem())) - } - - return enc.encode(b, ctx, v.Elem()) - - // values - case reflect.String: - b = enc.encodeString(b, v.String(), ctx.options) - case reflect.Float32: - f := v.Float() - - if math.IsNaN(f) { - b = append(b, "nan"...) - } else if f > math.MaxFloat32 { - b = append(b, "inf"...) - } else if f < -math.MaxFloat32 { - b = append(b, "-inf"...) - } else if math.Trunc(f) == f { - b = strconv.AppendFloat(b, f, 'f', 1, 32) - } else { - b = strconv.AppendFloat(b, f, 'f', -1, 32) - } - case reflect.Float64: - f := v.Float() - if math.IsNaN(f) { - b = append(b, "nan"...) - } else if f > math.MaxFloat64 { - b = append(b, "inf"...) - } else if f < -math.MaxFloat64 { - b = append(b, "-inf"...) - } else if math.Trunc(f) == f { - b = strconv.AppendFloat(b, f, 'f', 1, 64) - } else { - b = strconv.AppendFloat(b, f, 'f', -1, 64) - } - case reflect.Bool: - if v.Bool() { - b = append(b, "true"...) - } else { - b = append(b, "false"...) - } - case reflect.Uint64, reflect.Uint32, reflect.Uint16, reflect.Uint8, reflect.Uint: - x := v.Uint() - if x > uint64(math.MaxInt64) { - return nil, fmt.Errorf("toml: not encoding uint (%d) greater than max int64 (%d)", x, int64(math.MaxInt64)) - } - b = strconv.AppendUint(b, x, 10) - case reflect.Int64, reflect.Int32, reflect.Int16, reflect.Int8, reflect.Int: - b = strconv.AppendInt(b, v.Int(), 10) - default: - return nil, fmt.Errorf("toml: cannot encode value of type %s", v.Kind()) - } - - return b, nil -} - -func isNil(v reflect.Value) bool { - switch v.Kind() { - case reflect.Ptr, reflect.Interface, reflect.Map: - return v.IsNil() - default: - return false - } -} - -func shouldOmitEmpty(options valueOptions, v reflect.Value) bool { - return options.omitempty && isEmptyValue(v) -} - -func (enc *Encoder) encodeKv(b []byte, ctx encoderCtx, options valueOptions, v reflect.Value) ([]byte, error) { - var err error - - if !ctx.inline { - b = enc.encodeComment(ctx.indent, options.comment, b) - b = enc.commented(ctx.commented, b) - b = enc.indent(ctx.indent, b) - } - - b = enc.encodeKey(b, ctx.key) - b = append(b, " = "...) - - // create a copy of the context because the value of a KV shouldn't - // modify the global context. - subctx := ctx - subctx.insideKv = true - subctx.shiftKey() - subctx.options = options - - b, err = enc.encode(b, subctx, v) - if err != nil { - return nil, err - } - - return b, nil -} - -func (enc *Encoder) commented(commented bool, b []byte) []byte { - if commented { - return append(b, "# "...) - } - return b -} - -func isEmptyValue(v reflect.Value) bool { - switch v.Kind() { - case reflect.Struct: - return isEmptyStruct(v) - case reflect.Array, reflect.Map, reflect.Slice, reflect.String: - return v.Len() == 0 - case reflect.Bool: - return !v.Bool() - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: - return v.Int() == 0 - case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: - return v.Uint() == 0 - case reflect.Float32, reflect.Float64: - return v.Float() == 0 - case reflect.Interface, reflect.Ptr: - return v.IsNil() - } - return false -} - -func isEmptyStruct(v reflect.Value) bool { - // TODO: merge with walkStruct and cache. - typ := v.Type() - for i := 0; i < typ.NumField(); i++ { - fieldType := typ.Field(i) - - // only consider exported fields - if fieldType.PkgPath != "" { - continue - } - - tag := fieldType.Tag.Get("toml") - - // special field name to skip field - if tag == "-" { - continue - } - - f := v.Field(i) - - if !isEmptyValue(f) { - return false - } - } - - return true -} - -const literalQuote = '\'' - -func (enc *Encoder) encodeString(b []byte, v string, options valueOptions) []byte { - if needsQuoting(v) { - return enc.encodeQuotedString(options.multiline, b, v) - } - - return enc.encodeLiteralString(b, v) -} - -func needsQuoting(v string) bool { - // TODO: vectorize - for _, b := range []byte(v) { - if b == '\'' || b == '\r' || b == '\n' || characters.InvalidAscii(b) { - return true - } - } - return false -} - -// caller should have checked that the string does not contain new lines or ' . -func (enc *Encoder) encodeLiteralString(b []byte, v string) []byte { - b = append(b, literalQuote) - b = append(b, v...) - b = append(b, literalQuote) - - return b -} - -func (enc *Encoder) encodeQuotedString(multiline bool, b []byte, v string) []byte { - stringQuote := `"` - - if multiline { - stringQuote = `"""` - } - - b = append(b, stringQuote...) - if multiline { - b = append(b, '\n') - } - - const ( - hextable = "0123456789ABCDEF" - // U+0000 to U+0008, U+000A to U+001F, U+007F - nul = 0x0 - bs = 0x8 - lf = 0xa - us = 0x1f - del = 0x7f - ) - - for _, r := range []byte(v) { - switch r { - case '\\': - b = append(b, `\\`...) - case '"': - b = append(b, `\"`...) - case '\b': - b = append(b, `\b`...) - case '\f': - b = append(b, `\f`...) - case '\n': - if multiline { - b = append(b, r) - } else { - b = append(b, `\n`...) - } - case '\r': - b = append(b, `\r`...) - case '\t': - b = append(b, `\t`...) - default: - switch { - case r >= nul && r <= bs, r >= lf && r <= us, r == del: - b = append(b, `\u00`...) - b = append(b, hextable[r>>4]) - b = append(b, hextable[r&0x0f]) - default: - b = append(b, r) - } - } - } - - b = append(b, stringQuote...) - - return b -} - -// caller should have checked that the string is in A-Z / a-z / 0-9 / - / _ . -func (enc *Encoder) encodeUnquotedKey(b []byte, v string) []byte { - return append(b, v...) -} - -func (enc *Encoder) encodeTableHeader(ctx encoderCtx, b []byte) ([]byte, error) { - if len(ctx.parentKey) == 0 { - return b, nil - } - - b = enc.encodeComment(ctx.indent, ctx.options.comment, b) - - b = enc.commented(ctx.commented, b) - - b = enc.indent(ctx.indent, b) - - b = append(b, '[') - - b = enc.encodeKey(b, ctx.parentKey[0]) - - for _, k := range ctx.parentKey[1:] { - b = append(b, '.') - b = enc.encodeKey(b, k) - } - - b = append(b, "]\n"...) - - return b, nil -} - -//nolint:cyclop -func (enc *Encoder) encodeKey(b []byte, k string) []byte { - needsQuotation := false - cannotUseLiteral := false - - if len(k) == 0 { - return append(b, "''"...) - } - - for _, c := range k { - if (c >= 'A' && c <= 'Z') || (c >= 'a' && c <= 'z') || (c >= '0' && c <= '9') || c == '-' || c == '_' { - continue - } - - if c == literalQuote { - cannotUseLiteral = true - } - - needsQuotation = true - } - - if needsQuotation && needsQuoting(k) { - cannotUseLiteral = true - } - - switch { - case cannotUseLiteral: - return enc.encodeQuotedString(false, b, k) - case needsQuotation: - return enc.encodeLiteralString(b, k) - default: - return enc.encodeUnquotedKey(b, k) - } -} - -func (enc *Encoder) keyToString(k reflect.Value) (string, error) { - keyType := k.Type() - switch { - case keyType.Kind() == reflect.String: - return k.String(), nil - - case keyType.Implements(textMarshalerType): - keyB, err := k.Interface().(encoding.TextMarshaler).MarshalText() - if err != nil { - return "", fmt.Errorf("toml: error marshalling key %v from text: %w", k, err) - } - return string(keyB), nil - } - return "", fmt.Errorf("toml: type %s is not supported as a map key", keyType.Kind()) -} - -func (enc *Encoder) encodeMap(b []byte, ctx encoderCtx, v reflect.Value) ([]byte, error) { - var ( - t table - emptyValueOptions valueOptions - ) - - iter := v.MapRange() - for iter.Next() { - v := iter.Value() - - if isNil(v) { - continue - } - - k, err := enc.keyToString(iter.Key()) - if err != nil { - return nil, err - } - - if willConvertToTableOrArrayTable(ctx, v) { - t.pushTable(k, v, emptyValueOptions) - } else { - t.pushKV(k, v, emptyValueOptions) - } - } - - sortEntriesByKey(t.kvs) - sortEntriesByKey(t.tables) - - return enc.encodeTable(b, ctx, t) -} - -func sortEntriesByKey(e []entry) { - sort.Slice(e, func(i, j int) bool { - return e[i].Key < e[j].Key - }) -} - -type entry struct { - Key string - Value reflect.Value - Options valueOptions -} - -type table struct { - kvs []entry - tables []entry -} - -func (t *table) pushKV(k string, v reflect.Value, options valueOptions) { - for _, e := range t.kvs { - if e.Key == k { - return - } - } - - t.kvs = append(t.kvs, entry{Key: k, Value: v, Options: options}) -} - -func (t *table) pushTable(k string, v reflect.Value, options valueOptions) { - for _, e := range t.tables { - if e.Key == k { - return - } - } - t.tables = append(t.tables, entry{Key: k, Value: v, Options: options}) -} - -func walkStruct(ctx encoderCtx, t *table, v reflect.Value) { - // TODO: cache this - typ := v.Type() - for i := 0; i < typ.NumField(); i++ { - fieldType := typ.Field(i) - - // only consider exported fields - if fieldType.PkgPath != "" { - continue - } - - tag := fieldType.Tag.Get("toml") - - // special field name to skip field - if tag == "-" { - continue - } - - k, opts := parseTag(tag) - if !isValidName(k) { - k = "" - } - - f := v.Field(i) - - if k == "" { - if fieldType.Anonymous { - if fieldType.Type.Kind() == reflect.Struct { - walkStruct(ctx, t, f) - } - continue - } else { - k = fieldType.Name - } - } - - if isNil(f) { - continue - } - - options := valueOptions{ - multiline: opts.multiline, - omitempty: opts.omitempty, - commented: opts.commented, - comment: fieldType.Tag.Get("comment"), - } - - if opts.inline || !willConvertToTableOrArrayTable(ctx, f) { - t.pushKV(k, f, options) - } else { - t.pushTable(k, f, options) - } - } -} - -func (enc *Encoder) encodeStruct(b []byte, ctx encoderCtx, v reflect.Value) ([]byte, error) { - var t table - - walkStruct(ctx, &t, v) - - return enc.encodeTable(b, ctx, t) -} - -func (enc *Encoder) encodeComment(indent int, comment string, b []byte) []byte { - for len(comment) > 0 { - var line string - idx := strings.IndexByte(comment, '\n') - if idx >= 0 { - line = comment[:idx] - comment = comment[idx+1:] - } else { - line = comment - comment = "" - } - b = enc.indent(indent, b) - b = append(b, "# "...) - b = append(b, line...) - b = append(b, '\n') - } - return b -} - -func isValidName(s string) bool { - if s == "" { - return false - } - for _, c := range s { - switch { - case strings.ContainsRune("!#$%&()*+-./:;<=>?@[]^_{|}~ ", c): - // Backslash and quote chars are reserved, but - // otherwise any punctuation chars are allowed - // in a tag name. - case !unicode.IsLetter(c) && !unicode.IsDigit(c): - return false - } - } - return true -} - -type tagOptions struct { - multiline bool - inline bool - omitempty bool - commented bool -} - -func parseTag(tag string) (string, tagOptions) { - opts := tagOptions{} - - idx := strings.Index(tag, ",") - if idx == -1 { - return tag, opts - } - - raw := tag[idx+1:] - tag = string(tag[:idx]) - for raw != "" { - var o string - i := strings.Index(raw, ",") - if i >= 0 { - o, raw = raw[:i], raw[i+1:] - } else { - o, raw = raw, "" - } - switch o { - case "multiline": - opts.multiline = true - case "inline": - opts.inline = true - case "omitempty": - opts.omitempty = true - case "commented": - opts.commented = true - } - } - - return tag, opts -} - -func (enc *Encoder) encodeTable(b []byte, ctx encoderCtx, t table) ([]byte, error) { - var err error - - ctx.shiftKey() - - if ctx.insideKv || (ctx.inline && !ctx.isRoot()) { - return enc.encodeTableInline(b, ctx, t) - } - - if !ctx.skipTableHeader { - b, err = enc.encodeTableHeader(ctx, b) - if err != nil { - return nil, err - } - - if enc.indentTables && len(ctx.parentKey) > 0 { - ctx.indent++ - } - } - ctx.skipTableHeader = false - - hasNonEmptyKV := false - for _, kv := range t.kvs { - if shouldOmitEmpty(kv.Options, kv.Value) { - continue - } - hasNonEmptyKV = true - - ctx.setKey(kv.Key) - ctx2 := ctx - ctx2.commented = kv.Options.commented || ctx2.commented - - b, err = enc.encodeKv(b, ctx2, kv.Options, kv.Value) - if err != nil { - return nil, err - } - - b = append(b, '\n') - } - - first := true - for _, table := range t.tables { - if shouldOmitEmpty(table.Options, table.Value) { - continue - } - if first { - first = false - if hasNonEmptyKV { - b = append(b, '\n') - } - } else { - b = append(b, "\n"...) - } - - ctx.setKey(table.Key) - - ctx.options = table.Options - ctx2 := ctx - ctx2.commented = ctx2.commented || ctx.options.commented - - b, err = enc.encode(b, ctx2, table.Value) - if err != nil { - return nil, err - } - } - - return b, nil -} - -func (enc *Encoder) encodeTableInline(b []byte, ctx encoderCtx, t table) ([]byte, error) { - var err error - - b = append(b, '{') - - first := true - for _, kv := range t.kvs { - if shouldOmitEmpty(kv.Options, kv.Value) { - continue - } - - if first { - first = false - } else { - b = append(b, `, `...) - } - - ctx.setKey(kv.Key) - - b, err = enc.encodeKv(b, ctx, kv.Options, kv.Value) - if err != nil { - return nil, err - } - } - - if len(t.tables) > 0 { - panic("inline table cannot contain nested tables, only key-values") - } - - b = append(b, "}"...) - - return b, nil -} - -func willConvertToTable(ctx encoderCtx, v reflect.Value) bool { - if !v.IsValid() { - return false - } - if v.Type() == timeType || v.Type().Implements(textMarshalerType) || (v.Kind() != reflect.Ptr && v.CanAddr() && reflect.PtrTo(v.Type()).Implements(textMarshalerType)) { - return false - } - - t := v.Type() - switch t.Kind() { - case reflect.Map, reflect.Struct: - return !ctx.inline - case reflect.Interface: - return willConvertToTable(ctx, v.Elem()) - case reflect.Ptr: - if v.IsNil() { - return false - } - - return willConvertToTable(ctx, v.Elem()) - default: - return false - } -} - -func willConvertToTableOrArrayTable(ctx encoderCtx, v reflect.Value) bool { - if ctx.insideKv { - return false - } - t := v.Type() - - if t.Kind() == reflect.Interface { - return willConvertToTableOrArrayTable(ctx, v.Elem()) - } - - if t.Kind() == reflect.Slice || t.Kind() == reflect.Array { - if v.Len() == 0 { - // An empty slice should be a kv = []. - return false - } - - for i := 0; i < v.Len(); i++ { - t := willConvertToTable(ctx, v.Index(i)) - - if !t { - return false - } - } - - return true - } - - return willConvertToTable(ctx, v) -} - -func (enc *Encoder) encodeSlice(b []byte, ctx encoderCtx, v reflect.Value) ([]byte, error) { - if v.Len() == 0 { - b = append(b, "[]"...) - - return b, nil - } - - if willConvertToTableOrArrayTable(ctx, v) { - return enc.encodeSliceAsArrayTable(b, ctx, v) - } - - return enc.encodeSliceAsArray(b, ctx, v) -} - -// caller should have checked that v is a slice that only contains values that -// encode into tables. -func (enc *Encoder) encodeSliceAsArrayTable(b []byte, ctx encoderCtx, v reflect.Value) ([]byte, error) { - ctx.shiftKey() - - scratch := make([]byte, 0, 64) - - scratch = enc.commented(ctx.commented, scratch) - - scratch = append(scratch, "[["...) - - for i, k := range ctx.parentKey { - if i > 0 { - scratch = append(scratch, '.') - } - - scratch = enc.encodeKey(scratch, k) - } - - scratch = append(scratch, "]]\n"...) - ctx.skipTableHeader = true - - b = enc.encodeComment(ctx.indent, ctx.options.comment, b) - - if enc.indentTables { - ctx.indent++ - } - - for i := 0; i < v.Len(); i++ { - if i != 0 { - b = append(b, "\n"...) - } - - b = append(b, scratch...) - - var err error - b, err = enc.encode(b, ctx, v.Index(i)) - if err != nil { - return nil, err - } - } - - return b, nil -} - -func (enc *Encoder) encodeSliceAsArray(b []byte, ctx encoderCtx, v reflect.Value) ([]byte, error) { - multiline := ctx.options.multiline || enc.arraysMultiline - separator := ", " - - b = append(b, '[') - - subCtx := ctx - subCtx.options = valueOptions{} - - if multiline { - separator = ",\n" - - b = append(b, '\n') - - subCtx.indent++ - } - - var err error - first := true - - for i := 0; i < v.Len(); i++ { - if first { - first = false - } else { - b = append(b, separator...) - } - - if multiline { - b = enc.indent(subCtx.indent, b) - } - - b, err = enc.encode(b, subCtx, v.Index(i)) - if err != nil { - return nil, err - } - } - - if multiline { - b = append(b, '\n') - b = enc.indent(ctx.indent, b) - } - - b = append(b, ']') - - return b, nil -} - -func (enc *Encoder) indent(level int, b []byte) []byte { - for i := 0; i < level; i++ { - b = append(b, enc.indentSymbol...) - } - - return b -} diff --git a/vendor/github.com/pelletier/go-toml/v2/strict.go b/vendor/github.com/pelletier/go-toml/v2/strict.go deleted file mode 100644 index 802e7e4d1..000000000 --- a/vendor/github.com/pelletier/go-toml/v2/strict.go +++ /dev/null @@ -1,107 +0,0 @@ -package toml - -import ( - "github.com/pelletier/go-toml/v2/internal/danger" - "github.com/pelletier/go-toml/v2/internal/tracker" - "github.com/pelletier/go-toml/v2/unstable" -) - -type strict struct { - Enabled bool - - // Tracks the current key being processed. - key tracker.KeyTracker - - missing []unstable.ParserError -} - -func (s *strict) EnterTable(node *unstable.Node) { - if !s.Enabled { - return - } - - s.key.UpdateTable(node) -} - -func (s *strict) EnterArrayTable(node *unstable.Node) { - if !s.Enabled { - return - } - - s.key.UpdateArrayTable(node) -} - -func (s *strict) EnterKeyValue(node *unstable.Node) { - if !s.Enabled { - return - } - - s.key.Push(node) -} - -func (s *strict) ExitKeyValue(node *unstable.Node) { - if !s.Enabled { - return - } - - s.key.Pop(node) -} - -func (s *strict) MissingTable(node *unstable.Node) { - if !s.Enabled { - return - } - - s.missing = append(s.missing, unstable.ParserError{ - Highlight: keyLocation(node), - Message: "missing table", - Key: s.key.Key(), - }) -} - -func (s *strict) MissingField(node *unstable.Node) { - if !s.Enabled { - return - } - - s.missing = append(s.missing, unstable.ParserError{ - Highlight: keyLocation(node), - Message: "missing field", - Key: s.key.Key(), - }) -} - -func (s *strict) Error(doc []byte) error { - if !s.Enabled || len(s.missing) == 0 { - return nil - } - - err := &StrictMissingError{ - Errors: make([]DecodeError, 0, len(s.missing)), - } - - for _, derr := range s.missing { - derr := derr - err.Errors = append(err.Errors, *wrapDecodeError(doc, &derr)) - } - - return err -} - -func keyLocation(node *unstable.Node) []byte { - k := node.Key() - - hasOne := k.Next() - if !hasOne { - panic("should not be called with empty key") - } - - start := k.Node().Data - end := k.Node().Data - - for k.Next() { - end = k.Node().Data - } - - return danger.BytesRange(start, end) -} diff --git a/vendor/github.com/pelletier/go-toml/v2/toml.abnf b/vendor/github.com/pelletier/go-toml/v2/toml.abnf deleted file mode 100644 index 473f3749e..000000000 --- a/vendor/github.com/pelletier/go-toml/v2/toml.abnf +++ /dev/null @@ -1,243 +0,0 @@ -;; This document describes TOML's syntax, using the ABNF format (defined in -;; RFC 5234 -- https://www.ietf.org/rfc/rfc5234.txt). -;; -;; All valid TOML documents will match this description, however certain -;; invalid documents would need to be rejected as per the semantics described -;; in the supporting text description. - -;; It is possible to try this grammar interactively, using instaparse. -;; http://instaparse.mojombo.com/ -;; -;; To do so, in the lower right, click on Options and change `:input-format` to -;; ':abnf'. Then paste this entire ABNF document into the grammar entry box -;; (above the options). Then you can type or paste a sample TOML document into -;; the beige box on the left. Tada! - -;; Overall Structure - -toml = expression *( newline expression ) - -expression = ws [ comment ] -expression =/ ws keyval ws [ comment ] -expression =/ ws table ws [ comment ] - -;; Whitespace - -ws = *wschar -wschar = %x20 ; Space -wschar =/ %x09 ; Horizontal tab - -;; Newline - -newline = %x0A ; LF -newline =/ %x0D.0A ; CRLF - -;; Comment - -comment-start-symbol = %x23 ; # -non-ascii = %x80-D7FF / %xE000-10FFFF -non-eol = %x09 / %x20-7F / non-ascii - -comment = comment-start-symbol *non-eol - -;; Key-Value pairs - -keyval = key keyval-sep val - -key = simple-key / dotted-key -simple-key = quoted-key / unquoted-key - -unquoted-key = 1*( ALPHA / DIGIT / %x2D / %x5F ) ; A-Z / a-z / 0-9 / - / _ -quoted-key = basic-string / literal-string -dotted-key = simple-key 1*( dot-sep simple-key ) - -dot-sep = ws %x2E ws ; . Period -keyval-sep = ws %x3D ws ; = - -val = string / boolean / array / inline-table / date-time / float / integer - -;; String - -string = ml-basic-string / basic-string / ml-literal-string / literal-string - -;; Basic String - -basic-string = quotation-mark *basic-char quotation-mark - -quotation-mark = %x22 ; " - -basic-char = basic-unescaped / escaped -basic-unescaped = wschar / %x21 / %x23-5B / %x5D-7E / non-ascii -escaped = escape escape-seq-char - -escape = %x5C ; \ -escape-seq-char = %x22 ; " quotation mark U+0022 -escape-seq-char =/ %x5C ; \ reverse solidus U+005C -escape-seq-char =/ %x62 ; b backspace U+0008 -escape-seq-char =/ %x66 ; f form feed U+000C -escape-seq-char =/ %x6E ; n line feed U+000A -escape-seq-char =/ %x72 ; r carriage return U+000D -escape-seq-char =/ %x74 ; t tab U+0009 -escape-seq-char =/ %x75 4HEXDIG ; uXXXX U+XXXX -escape-seq-char =/ %x55 8HEXDIG ; UXXXXXXXX U+XXXXXXXX - -;; Multiline Basic String - -ml-basic-string = ml-basic-string-delim [ newline ] ml-basic-body - ml-basic-string-delim -ml-basic-string-delim = 3quotation-mark -ml-basic-body = *mlb-content *( mlb-quotes 1*mlb-content ) [ mlb-quotes ] - -mlb-content = mlb-char / newline / mlb-escaped-nl -mlb-char = mlb-unescaped / escaped -mlb-quotes = 1*2quotation-mark -mlb-unescaped = wschar / %x21 / %x23-5B / %x5D-7E / non-ascii -mlb-escaped-nl = escape ws newline *( wschar / newline ) - -;; Literal String - -literal-string = apostrophe *literal-char apostrophe - -apostrophe = %x27 ; ' apostrophe - -literal-char = %x09 / %x20-26 / %x28-7E / non-ascii - -;; Multiline Literal String - -ml-literal-string = ml-literal-string-delim [ newline ] ml-literal-body - ml-literal-string-delim -ml-literal-string-delim = 3apostrophe -ml-literal-body = *mll-content *( mll-quotes 1*mll-content ) [ mll-quotes ] - -mll-content = mll-char / newline -mll-char = %x09 / %x20-26 / %x28-7E / non-ascii -mll-quotes = 1*2apostrophe - -;; Integer - -integer = dec-int / hex-int / oct-int / bin-int - -minus = %x2D ; - -plus = %x2B ; + -underscore = %x5F ; _ -digit1-9 = %x31-39 ; 1-9 -digit0-7 = %x30-37 ; 0-7 -digit0-1 = %x30-31 ; 0-1 - -hex-prefix = %x30.78 ; 0x -oct-prefix = %x30.6F ; 0o -bin-prefix = %x30.62 ; 0b - -dec-int = [ minus / plus ] unsigned-dec-int -unsigned-dec-int = DIGIT / digit1-9 1*( DIGIT / underscore DIGIT ) - -hex-int = hex-prefix HEXDIG *( HEXDIG / underscore HEXDIG ) -oct-int = oct-prefix digit0-7 *( digit0-7 / underscore digit0-7 ) -bin-int = bin-prefix digit0-1 *( digit0-1 / underscore digit0-1 ) - -;; Float - -float = float-int-part ( exp / frac [ exp ] ) -float =/ special-float - -float-int-part = dec-int -frac = decimal-point zero-prefixable-int -decimal-point = %x2E ; . -zero-prefixable-int = DIGIT *( DIGIT / underscore DIGIT ) - -exp = "e" float-exp-part -float-exp-part = [ minus / plus ] zero-prefixable-int - -special-float = [ minus / plus ] ( inf / nan ) -inf = %x69.6e.66 ; inf -nan = %x6e.61.6e ; nan - -;; Boolean - -boolean = true / false - -true = %x74.72.75.65 ; true -false = %x66.61.6C.73.65 ; false - -;; Date and Time (as defined in RFC 3339) - -date-time = offset-date-time / local-date-time / local-date / local-time - -date-fullyear = 4DIGIT -date-month = 2DIGIT ; 01-12 -date-mday = 2DIGIT ; 01-28, 01-29, 01-30, 01-31 based on month/year -time-delim = "T" / %x20 ; T, t, or space -time-hour = 2DIGIT ; 00-23 -time-minute = 2DIGIT ; 00-59 -time-second = 2DIGIT ; 00-58, 00-59, 00-60 based on leap second rules -time-secfrac = "." 1*DIGIT -time-numoffset = ( "+" / "-" ) time-hour ":" time-minute -time-offset = "Z" / time-numoffset - -partial-time = time-hour ":" time-minute ":" time-second [ time-secfrac ] -full-date = date-fullyear "-" date-month "-" date-mday -full-time = partial-time time-offset - -;; Offset Date-Time - -offset-date-time = full-date time-delim full-time - -;; Local Date-Time - -local-date-time = full-date time-delim partial-time - -;; Local Date - -local-date = full-date - -;; Local Time - -local-time = partial-time - -;; Array - -array = array-open [ array-values ] ws-comment-newline array-close - -array-open = %x5B ; [ -array-close = %x5D ; ] - -array-values = ws-comment-newline val ws-comment-newline array-sep array-values -array-values =/ ws-comment-newline val ws-comment-newline [ array-sep ] - -array-sep = %x2C ; , Comma - -ws-comment-newline = *( wschar / [ comment ] newline ) - -;; Table - -table = std-table / array-table - -;; Standard Table - -std-table = std-table-open key std-table-close - -std-table-open = %x5B ws ; [ Left square bracket -std-table-close = ws %x5D ; ] Right square bracket - -;; Inline Table - -inline-table = inline-table-open [ inline-table-keyvals ] inline-table-close - -inline-table-open = %x7B ws ; { -inline-table-close = ws %x7D ; } -inline-table-sep = ws %x2C ws ; , Comma - -inline-table-keyvals = keyval [ inline-table-sep inline-table-keyvals ] - -;; Array Table - -array-table = array-table-open key array-table-close - -array-table-open = %x5B.5B ws ; [[ Double left square bracket -array-table-close = ws %x5D.5D ; ]] Double right square bracket - -;; Built-in ABNF terms, reproduced here for clarity - -ALPHA = %x41-5A / %x61-7A ; A-Z / a-z -DIGIT = %x30-39 ; 0-9 -HEXDIG = DIGIT / "A" / "B" / "C" / "D" / "E" / "F" diff --git a/vendor/github.com/pelletier/go-toml/v2/types.go b/vendor/github.com/pelletier/go-toml/v2/types.go deleted file mode 100644 index 3c6b8fe57..000000000 --- a/vendor/github.com/pelletier/go-toml/v2/types.go +++ /dev/null @@ -1,14 +0,0 @@ -package toml - -import ( - "encoding" - "reflect" - "time" -) - -var timeType = reflect.TypeOf((*time.Time)(nil)).Elem() -var textMarshalerType = reflect.TypeOf((*encoding.TextMarshaler)(nil)).Elem() -var textUnmarshalerType = reflect.TypeOf((*encoding.TextUnmarshaler)(nil)).Elem() -var mapStringInterfaceType = reflect.TypeOf(map[string]interface{}(nil)) -var sliceInterfaceType = reflect.TypeOf([]interface{}(nil)) -var stringType = reflect.TypeOf("") diff --git a/vendor/github.com/pelletier/go-toml/v2/unmarshaler.go b/vendor/github.com/pelletier/go-toml/v2/unmarshaler.go deleted file mode 100644 index c5e5f3390..000000000 --- a/vendor/github.com/pelletier/go-toml/v2/unmarshaler.go +++ /dev/null @@ -1,1264 +0,0 @@ -package toml - -import ( - "encoding" - "errors" - "fmt" - "io" - "io/ioutil" - "math" - "reflect" - "strings" - "sync/atomic" - "time" - - "github.com/pelletier/go-toml/v2/internal/danger" - "github.com/pelletier/go-toml/v2/internal/tracker" - "github.com/pelletier/go-toml/v2/unstable" -) - -// Unmarshal deserializes a TOML document into a Go value. -// -// It is a shortcut for Decoder.Decode() with the default options. -func Unmarshal(data []byte, v interface{}) error { - p := unstable.Parser{} - p.Reset(data) - d := decoder{p: &p} - - return d.FromParser(v) -} - -// Decoder reads and decode a TOML document from an input stream. -type Decoder struct { - // input - r io.Reader - - // global settings - strict bool -} - -// NewDecoder creates a new Decoder that will read from r. -func NewDecoder(r io.Reader) *Decoder { - return &Decoder{r: r} -} - -// DisallowUnknownFields causes the Decoder to return an error when the -// destination is a struct and the input contains a key that does not match a -// non-ignored field. -// -// In that case, the Decoder returns a StrictMissingError that can be used to -// retrieve the individual errors as well as generate a human readable -// description of the missing fields. -func (d *Decoder) DisallowUnknownFields() *Decoder { - d.strict = true - return d -} - -// Decode the whole content of r into v. -// -// By default, values in the document that don't exist in the target Go value -// are ignored. See Decoder.DisallowUnknownFields() to change this behavior. -// -// When a TOML local date, time, or date-time is decoded into a time.Time, its -// value is represented in time.Local timezone. Otherwise the appropriate Local* -// structure is used. For time values, precision up to the nanosecond is -// supported by truncating extra digits. -// -// Empty tables decoded in an interface{} create an empty initialized -// map[string]interface{}. -// -// Types implementing the encoding.TextUnmarshaler interface are decoded from a -// TOML string. -// -// When decoding a number, go-toml will return an error if the number is out of -// bounds for the target type (which includes negative numbers when decoding -// into an unsigned int). -// -// If an error occurs while decoding the content of the document, this function -// returns a toml.DecodeError, providing context about the issue. When using -// strict mode and a field is missing, a `toml.StrictMissingError` is -// returned. In any other case, this function returns a standard Go error. -// -// # Type mapping -// -// List of supported TOML types and their associated accepted Go types: -// -// String -> string -// Integer -> uint*, int*, depending on size -// Float -> float*, depending on size -// Boolean -> bool -// Offset Date-Time -> time.Time -// Local Date-time -> LocalDateTime, time.Time -// Local Date -> LocalDate, time.Time -// Local Time -> LocalTime, time.Time -// Array -> slice and array, depending on elements types -// Table -> map and struct -// Inline Table -> same as Table -// Array of Tables -> same as Array and Table -func (d *Decoder) Decode(v interface{}) error { - b, err := ioutil.ReadAll(d.r) - if err != nil { - return fmt.Errorf("toml: %w", err) - } - - p := unstable.Parser{} - p.Reset(b) - dec := decoder{ - p: &p, - strict: strict{ - Enabled: d.strict, - }, - } - - return dec.FromParser(v) -} - -type decoder struct { - // Which parser instance in use for this decoding session. - p *unstable.Parser - - // Flag indicating that the current expression is stashed. - // If set to true, calling nextExpr will not actually pull a new expression - // but turn off the flag instead. - stashedExpr bool - - // Skip expressions until a table is found. This is set to true when a - // table could not be created (missing field in map), so all KV expressions - // need to be skipped. - skipUntilTable bool - - // Tracks position in Go arrays. - // This is used when decoding [[array tables]] into Go arrays. Given array - // tables are separate TOML expression, we need to keep track of where we - // are at in the Go array, as we can't just introspect its size. - arrayIndexes map[reflect.Value]int - - // Tracks keys that have been seen, with which type. - seen tracker.SeenTracker - - // Strict mode - strict strict - - // Current context for the error. - errorContext *errorContext -} - -type errorContext struct { - Struct reflect.Type - Field []int -} - -func (d *decoder) typeMismatchError(toml string, target reflect.Type) error { - return fmt.Errorf("toml: %s", d.typeMismatchString(toml, target)) -} - -func (d *decoder) typeMismatchString(toml string, target reflect.Type) string { - if d.errorContext != nil && d.errorContext.Struct != nil { - ctx := d.errorContext - f := ctx.Struct.FieldByIndex(ctx.Field) - return fmt.Sprintf("cannot decode TOML %s into struct field %s.%s of type %s", toml, ctx.Struct, f.Name, f.Type) - } - return fmt.Sprintf("cannot decode TOML %s into a Go value of type %s", toml, target) -} - -func (d *decoder) expr() *unstable.Node { - return d.p.Expression() -} - -func (d *decoder) nextExpr() bool { - if d.stashedExpr { - d.stashedExpr = false - return true - } - return d.p.NextExpression() -} - -func (d *decoder) stashExpr() { - d.stashedExpr = true -} - -func (d *decoder) arrayIndex(shouldAppend bool, v reflect.Value) int { - if d.arrayIndexes == nil { - d.arrayIndexes = make(map[reflect.Value]int, 1) - } - - idx, ok := d.arrayIndexes[v] - - if !ok { - d.arrayIndexes[v] = 0 - } else if shouldAppend { - idx++ - d.arrayIndexes[v] = idx - } - - return idx -} - -func (d *decoder) FromParser(v interface{}) error { - r := reflect.ValueOf(v) - if r.Kind() != reflect.Ptr { - return fmt.Errorf("toml: decoding can only be performed into a pointer, not %s", r.Kind()) - } - - if r.IsNil() { - return fmt.Errorf("toml: decoding pointer target cannot be nil") - } - - r = r.Elem() - if r.Kind() == reflect.Interface && r.IsNil() { - newMap := map[string]interface{}{} - r.Set(reflect.ValueOf(newMap)) - } - - err := d.fromParser(r) - if err == nil { - return d.strict.Error(d.p.Data()) - } - - var e *unstable.ParserError - if errors.As(err, &e) { - return wrapDecodeError(d.p.Data(), e) - } - - return err -} - -func (d *decoder) fromParser(root reflect.Value) error { - for d.nextExpr() { - err := d.handleRootExpression(d.expr(), root) - if err != nil { - return err - } - } - - return d.p.Error() -} - -/* -Rules for the unmarshal code: - -- The stack is used to keep track of which values need to be set where. -- handle* functions <=> switch on a given unstable.Kind. -- unmarshalX* functions need to unmarshal a node of kind X. -- An "object" is either a struct or a map. -*/ - -func (d *decoder) handleRootExpression(expr *unstable.Node, v reflect.Value) error { - var x reflect.Value - var err error - - if !(d.skipUntilTable && expr.Kind == unstable.KeyValue) { - err = d.seen.CheckExpression(expr) - if err != nil { - return err - } - } - - switch expr.Kind { - case unstable.KeyValue: - if d.skipUntilTable { - return nil - } - x, err = d.handleKeyValue(expr, v) - case unstable.Table: - d.skipUntilTable = false - d.strict.EnterTable(expr) - x, err = d.handleTable(expr.Key(), v) - case unstable.ArrayTable: - d.skipUntilTable = false - d.strict.EnterArrayTable(expr) - x, err = d.handleArrayTable(expr.Key(), v) - default: - panic(fmt.Errorf("parser should not permit expression of kind %s at document root", expr.Kind)) - } - - if d.skipUntilTable { - if expr.Kind == unstable.Table || expr.Kind == unstable.ArrayTable { - d.strict.MissingTable(expr) - } - } else if err == nil && x.IsValid() { - v.Set(x) - } - - return err -} - -func (d *decoder) handleArrayTable(key unstable.Iterator, v reflect.Value) (reflect.Value, error) { - if key.Next() { - return d.handleArrayTablePart(key, v) - } - return d.handleKeyValues(v) -} - -func (d *decoder) handleArrayTableCollectionLast(key unstable.Iterator, v reflect.Value) (reflect.Value, error) { - switch v.Kind() { - case reflect.Interface: - elem := v.Elem() - if !elem.IsValid() { - elem = reflect.New(sliceInterfaceType).Elem() - elem.Set(reflect.MakeSlice(sliceInterfaceType, 0, 16)) - } else if elem.Kind() == reflect.Slice { - if elem.Type() != sliceInterfaceType { - elem = reflect.New(sliceInterfaceType).Elem() - elem.Set(reflect.MakeSlice(sliceInterfaceType, 0, 16)) - } else if !elem.CanSet() { - nelem := reflect.New(sliceInterfaceType).Elem() - nelem.Set(reflect.MakeSlice(sliceInterfaceType, elem.Len(), elem.Cap())) - reflect.Copy(nelem, elem) - elem = nelem - } - } - return d.handleArrayTableCollectionLast(key, elem) - case reflect.Ptr: - elem := v.Elem() - if !elem.IsValid() { - ptr := reflect.New(v.Type().Elem()) - v.Set(ptr) - elem = ptr.Elem() - } - - elem, err := d.handleArrayTableCollectionLast(key, elem) - if err != nil { - return reflect.Value{}, err - } - v.Elem().Set(elem) - - return v, nil - case reflect.Slice: - elemType := v.Type().Elem() - var elem reflect.Value - if elemType.Kind() == reflect.Interface { - elem = makeMapStringInterface() - } else { - elem = reflect.New(elemType).Elem() - } - elem2, err := d.handleArrayTable(key, elem) - if err != nil { - return reflect.Value{}, err - } - if elem2.IsValid() { - elem = elem2 - } - return reflect.Append(v, elem), nil - case reflect.Array: - idx := d.arrayIndex(true, v) - if idx >= v.Len() { - return v, fmt.Errorf("%s at position %d", d.typeMismatchError("array table", v.Type()), idx) - } - elem := v.Index(idx) - _, err := d.handleArrayTable(key, elem) - return v, err - default: - return reflect.Value{}, d.typeMismatchError("array table", v.Type()) - } -} - -// When parsing an array table expression, each part of the key needs to be -// evaluated like a normal key, but if it returns a collection, it also needs to -// point to the last element of the collection. Unless it is the last part of -// the key, then it needs to create a new element at the end. -func (d *decoder) handleArrayTableCollection(key unstable.Iterator, v reflect.Value) (reflect.Value, error) { - if key.IsLast() { - return d.handleArrayTableCollectionLast(key, v) - } - - switch v.Kind() { - case reflect.Ptr: - elem := v.Elem() - if !elem.IsValid() { - ptr := reflect.New(v.Type().Elem()) - v.Set(ptr) - elem = ptr.Elem() - } - - elem, err := d.handleArrayTableCollection(key, elem) - if err != nil { - return reflect.Value{}, err - } - if elem.IsValid() { - v.Elem().Set(elem) - } - - return v, nil - case reflect.Slice: - elem := v.Index(v.Len() - 1) - x, err := d.handleArrayTable(key, elem) - if err != nil || d.skipUntilTable { - return reflect.Value{}, err - } - if x.IsValid() { - elem.Set(x) - } - - return v, err - case reflect.Array: - idx := d.arrayIndex(false, v) - if idx >= v.Len() { - return v, fmt.Errorf("%s at position %d", d.typeMismatchError("array table", v.Type()), idx) - } - elem := v.Index(idx) - _, err := d.handleArrayTable(key, elem) - return v, err - } - - return d.handleArrayTable(key, v) -} - -func (d *decoder) handleKeyPart(key unstable.Iterator, v reflect.Value, nextFn handlerFn, makeFn valueMakerFn) (reflect.Value, error) { - var rv reflect.Value - - // First, dispatch over v to make sure it is a valid object. - // There is no guarantee over what it could be. - switch v.Kind() { - case reflect.Ptr: - elem := v.Elem() - if !elem.IsValid() { - v.Set(reflect.New(v.Type().Elem())) - } - elem = v.Elem() - return d.handleKeyPart(key, elem, nextFn, makeFn) - case reflect.Map: - vt := v.Type() - - // Create the key for the map element. Convert to key type. - mk, err := d.keyFromData(vt.Key(), key.Node().Data) - if err != nil { - return reflect.Value{}, err - } - - // If the map does not exist, create it. - if v.IsNil() { - vt := v.Type() - v = reflect.MakeMap(vt) - rv = v - } - - mv := v.MapIndex(mk) - set := false - if !mv.IsValid() { - // If there is no value in the map, create a new one according to - // the map type. If the element type is interface, create either a - // map[string]interface{} or a []interface{} depending on whether - // this is the last part of the array table key. - - t := vt.Elem() - if t.Kind() == reflect.Interface { - mv = makeFn() - } else { - mv = reflect.New(t).Elem() - } - set = true - } else if mv.Kind() == reflect.Interface { - mv = mv.Elem() - if !mv.IsValid() { - mv = makeFn() - } - set = true - } else if !mv.CanAddr() { - vt := v.Type() - t := vt.Elem() - oldmv := mv - mv = reflect.New(t).Elem() - mv.Set(oldmv) - set = true - } - - x, err := nextFn(key, mv) - if err != nil { - return reflect.Value{}, err - } - - if x.IsValid() { - mv = x - set = true - } - - if set { - v.SetMapIndex(mk, mv) - } - case reflect.Struct: - path, found := structFieldPath(v, string(key.Node().Data)) - if !found { - d.skipUntilTable = true - return reflect.Value{}, nil - } - - if d.errorContext == nil { - d.errorContext = new(errorContext) - } - t := v.Type() - d.errorContext.Struct = t - d.errorContext.Field = path - - f := fieldByIndex(v, path) - x, err := nextFn(key, f) - if err != nil || d.skipUntilTable { - return reflect.Value{}, err - } - if x.IsValid() { - f.Set(x) - } - d.errorContext.Field = nil - d.errorContext.Struct = nil - case reflect.Interface: - if v.Elem().IsValid() { - v = v.Elem() - } else { - v = makeMapStringInterface() - } - - x, err := d.handleKeyPart(key, v, nextFn, makeFn) - if err != nil { - return reflect.Value{}, err - } - if x.IsValid() { - v = x - } - rv = v - default: - panic(fmt.Errorf("unhandled part: %s", v.Kind())) - } - - return rv, nil -} - -// HandleArrayTablePart navigates the Go structure v using the key v. It is -// only used for the prefix (non-last) parts of an array-table. When -// encountering a collection, it should go to the last element. -func (d *decoder) handleArrayTablePart(key unstable.Iterator, v reflect.Value) (reflect.Value, error) { - var makeFn valueMakerFn - if key.IsLast() { - makeFn = makeSliceInterface - } else { - makeFn = makeMapStringInterface - } - return d.handleKeyPart(key, v, d.handleArrayTableCollection, makeFn) -} - -// HandleTable returns a reference when it has checked the next expression but -// cannot handle it. -func (d *decoder) handleTable(key unstable.Iterator, v reflect.Value) (reflect.Value, error) { - if v.Kind() == reflect.Slice { - if v.Len() == 0 { - return reflect.Value{}, unstable.NewParserError(key.Node().Data, "cannot store a table in a slice") - } - elem := v.Index(v.Len() - 1) - x, err := d.handleTable(key, elem) - if err != nil { - return reflect.Value{}, err - } - if x.IsValid() { - elem.Set(x) - } - return reflect.Value{}, nil - } - if key.Next() { - // Still scoping the key - return d.handleTablePart(key, v) - } - // Done scoping the key. - // Now handle all the key-value expressions in this table. - return d.handleKeyValues(v) -} - -// Handle root expressions until the end of the document or the next -// non-key-value. -func (d *decoder) handleKeyValues(v reflect.Value) (reflect.Value, error) { - var rv reflect.Value - for d.nextExpr() { - expr := d.expr() - if expr.Kind != unstable.KeyValue { - // Stash the expression so that fromParser can just loop and use - // the right handler. - // We could just recurse ourselves here, but at least this gives a - // chance to pop the stack a bit. - d.stashExpr() - break - } - - err := d.seen.CheckExpression(expr) - if err != nil { - return reflect.Value{}, err - } - - x, err := d.handleKeyValue(expr, v) - if err != nil { - return reflect.Value{}, err - } - if x.IsValid() { - v = x - rv = x - } - } - return rv, nil -} - -type ( - handlerFn func(key unstable.Iterator, v reflect.Value) (reflect.Value, error) - valueMakerFn func() reflect.Value -) - -func makeMapStringInterface() reflect.Value { - return reflect.MakeMap(mapStringInterfaceType) -} - -func makeSliceInterface() reflect.Value { - return reflect.MakeSlice(sliceInterfaceType, 0, 16) -} - -func (d *decoder) handleTablePart(key unstable.Iterator, v reflect.Value) (reflect.Value, error) { - return d.handleKeyPart(key, v, d.handleTable, makeMapStringInterface) -} - -func (d *decoder) tryTextUnmarshaler(node *unstable.Node, v reflect.Value) (bool, error) { - // Special case for time, because we allow to unmarshal to it from - // different kind of AST nodes. - if v.Type() == timeType { - return false, nil - } - - if v.CanAddr() && v.Addr().Type().Implements(textUnmarshalerType) { - err := v.Addr().Interface().(encoding.TextUnmarshaler).UnmarshalText(node.Data) - if err != nil { - return false, unstable.NewParserError(d.p.Raw(node.Raw), "%w", err) - } - - return true, nil - } - - return false, nil -} - -func (d *decoder) handleValue(value *unstable.Node, v reflect.Value) error { - for v.Kind() == reflect.Ptr { - v = initAndDereferencePointer(v) - } - - ok, err := d.tryTextUnmarshaler(value, v) - if ok || err != nil { - return err - } - - switch value.Kind { - case unstable.String: - return d.unmarshalString(value, v) - case unstable.Integer: - return d.unmarshalInteger(value, v) - case unstable.Float: - return d.unmarshalFloat(value, v) - case unstable.Bool: - return d.unmarshalBool(value, v) - case unstable.DateTime: - return d.unmarshalDateTime(value, v) - case unstable.LocalDate: - return d.unmarshalLocalDate(value, v) - case unstable.LocalTime: - return d.unmarshalLocalTime(value, v) - case unstable.LocalDateTime: - return d.unmarshalLocalDateTime(value, v) - case unstable.InlineTable: - return d.unmarshalInlineTable(value, v) - case unstable.Array: - return d.unmarshalArray(value, v) - default: - panic(fmt.Errorf("handleValue not implemented for %s", value.Kind)) - } -} - -func (d *decoder) unmarshalArray(array *unstable.Node, v reflect.Value) error { - switch v.Kind() { - case reflect.Slice: - if v.IsNil() { - v.Set(reflect.MakeSlice(v.Type(), 0, 16)) - } else { - v.SetLen(0) - } - case reflect.Array: - // arrays are always initialized - case reflect.Interface: - elem := v.Elem() - if !elem.IsValid() { - elem = reflect.New(sliceInterfaceType).Elem() - elem.Set(reflect.MakeSlice(sliceInterfaceType, 0, 16)) - } else if elem.Kind() == reflect.Slice { - if elem.Type() != sliceInterfaceType { - elem = reflect.New(sliceInterfaceType).Elem() - elem.Set(reflect.MakeSlice(sliceInterfaceType, 0, 16)) - } else if !elem.CanSet() { - nelem := reflect.New(sliceInterfaceType).Elem() - nelem.Set(reflect.MakeSlice(sliceInterfaceType, elem.Len(), elem.Cap())) - reflect.Copy(nelem, elem) - elem = nelem - } - } - err := d.unmarshalArray(array, elem) - if err != nil { - return err - } - v.Set(elem) - return nil - default: - // TODO: use newDecodeError, but first the parser needs to fill - // array.Data. - return d.typeMismatchError("array", v.Type()) - } - - elemType := v.Type().Elem() - - it := array.Children() - idx := 0 - for it.Next() { - n := it.Node() - - // TODO: optimize - if v.Kind() == reflect.Slice { - elem := reflect.New(elemType).Elem() - - err := d.handleValue(n, elem) - if err != nil { - return err - } - - v.Set(reflect.Append(v, elem)) - } else { // array - if idx >= v.Len() { - return nil - } - elem := v.Index(idx) - err := d.handleValue(n, elem) - if err != nil { - return err - } - idx++ - } - } - - return nil -} - -func (d *decoder) unmarshalInlineTable(itable *unstable.Node, v reflect.Value) error { - // Make sure v is an initialized object. - switch v.Kind() { - case reflect.Map: - if v.IsNil() { - v.Set(reflect.MakeMap(v.Type())) - } - case reflect.Struct: - // structs are always initialized. - case reflect.Interface: - elem := v.Elem() - if !elem.IsValid() { - elem = makeMapStringInterface() - v.Set(elem) - } - return d.unmarshalInlineTable(itable, elem) - default: - return unstable.NewParserError(d.p.Raw(itable.Raw), "cannot store inline table in Go type %s", v.Kind()) - } - - it := itable.Children() - for it.Next() { - n := it.Node() - - x, err := d.handleKeyValue(n, v) - if err != nil { - return err - } - if x.IsValid() { - v = x - } - } - - return nil -} - -func (d *decoder) unmarshalDateTime(value *unstable.Node, v reflect.Value) error { - dt, err := parseDateTime(value.Data) - if err != nil { - return err - } - - v.Set(reflect.ValueOf(dt)) - return nil -} - -func (d *decoder) unmarshalLocalDate(value *unstable.Node, v reflect.Value) error { - ld, err := parseLocalDate(value.Data) - if err != nil { - return err - } - - if v.Type() == timeType { - cast := ld.AsTime(time.Local) - v.Set(reflect.ValueOf(cast)) - return nil - } - - v.Set(reflect.ValueOf(ld)) - - return nil -} - -func (d *decoder) unmarshalLocalTime(value *unstable.Node, v reflect.Value) error { - lt, rest, err := parseLocalTime(value.Data) - if err != nil { - return err - } - - if len(rest) > 0 { - return unstable.NewParserError(rest, "extra characters at the end of a local time") - } - - v.Set(reflect.ValueOf(lt)) - return nil -} - -func (d *decoder) unmarshalLocalDateTime(value *unstable.Node, v reflect.Value) error { - ldt, rest, err := parseLocalDateTime(value.Data) - if err != nil { - return err - } - - if len(rest) > 0 { - return unstable.NewParserError(rest, "extra characters at the end of a local date time") - } - - if v.Type() == timeType { - cast := ldt.AsTime(time.Local) - - v.Set(reflect.ValueOf(cast)) - return nil - } - - v.Set(reflect.ValueOf(ldt)) - - return nil -} - -func (d *decoder) unmarshalBool(value *unstable.Node, v reflect.Value) error { - b := value.Data[0] == 't' - - switch v.Kind() { - case reflect.Bool: - v.SetBool(b) - case reflect.Interface: - v.Set(reflect.ValueOf(b)) - default: - return unstable.NewParserError(value.Data, "cannot assign boolean to a %t", b) - } - - return nil -} - -func (d *decoder) unmarshalFloat(value *unstable.Node, v reflect.Value) error { - f, err := parseFloat(value.Data) - if err != nil { - return err - } - - switch v.Kind() { - case reflect.Float64: - v.SetFloat(f) - case reflect.Float32: - if f > math.MaxFloat32 { - return unstable.NewParserError(value.Data, "number %f does not fit in a float32", f) - } - v.SetFloat(f) - case reflect.Interface: - v.Set(reflect.ValueOf(f)) - default: - return unstable.NewParserError(value.Data, "float cannot be assigned to %s", v.Kind()) - } - - return nil -} - -const ( - maxInt = int64(^uint(0) >> 1) - minInt = -maxInt - 1 -) - -// Maximum value of uint for decoding. Currently the decoder parses the integer -// into an int64. As a result, on architectures where uint is 64 bits, the -// effective maximum uint we can decode is the maximum of int64. On -// architectures where uint is 32 bits, the maximum value we can decode is -// lower: the maximum of uint32. I didn't find a way to figure out this value at -// compile time, so it is computed during initialization. -var maxUint int64 = math.MaxInt64 - -func init() { - m := uint64(^uint(0)) - if m < uint64(maxUint) { - maxUint = int64(m) - } -} - -func (d *decoder) unmarshalInteger(value *unstable.Node, v reflect.Value) error { - kind := v.Kind() - if kind == reflect.Float32 || kind == reflect.Float64 { - return d.unmarshalFloat(value, v) - } - - i, err := parseInteger(value.Data) - if err != nil { - return err - } - - var r reflect.Value - - switch kind { - case reflect.Int64: - v.SetInt(i) - return nil - case reflect.Int32: - if i < math.MinInt32 || i > math.MaxInt32 { - return fmt.Errorf("toml: number %d does not fit in an int32", i) - } - - r = reflect.ValueOf(int32(i)) - case reflect.Int16: - if i < math.MinInt16 || i > math.MaxInt16 { - return fmt.Errorf("toml: number %d does not fit in an int16", i) - } - - r = reflect.ValueOf(int16(i)) - case reflect.Int8: - if i < math.MinInt8 || i > math.MaxInt8 { - return fmt.Errorf("toml: number %d does not fit in an int8", i) - } - - r = reflect.ValueOf(int8(i)) - case reflect.Int: - if i < minInt || i > maxInt { - return fmt.Errorf("toml: number %d does not fit in an int", i) - } - - r = reflect.ValueOf(int(i)) - case reflect.Uint64: - if i < 0 { - return fmt.Errorf("toml: negative number %d does not fit in an uint64", i) - } - - r = reflect.ValueOf(uint64(i)) - case reflect.Uint32: - if i < 0 || i > math.MaxUint32 { - return fmt.Errorf("toml: negative number %d does not fit in an uint32", i) - } - - r = reflect.ValueOf(uint32(i)) - case reflect.Uint16: - if i < 0 || i > math.MaxUint16 { - return fmt.Errorf("toml: negative number %d does not fit in an uint16", i) - } - - r = reflect.ValueOf(uint16(i)) - case reflect.Uint8: - if i < 0 || i > math.MaxUint8 { - return fmt.Errorf("toml: negative number %d does not fit in an uint8", i) - } - - r = reflect.ValueOf(uint8(i)) - case reflect.Uint: - if i < 0 || i > maxUint { - return fmt.Errorf("toml: negative number %d does not fit in an uint", i) - } - - r = reflect.ValueOf(uint(i)) - case reflect.Interface: - r = reflect.ValueOf(i) - default: - return unstable.NewParserError(d.p.Raw(value.Raw), d.typeMismatchString("integer", v.Type())) - } - - if !r.Type().AssignableTo(v.Type()) { - r = r.Convert(v.Type()) - } - - v.Set(r) - - return nil -} - -func (d *decoder) unmarshalString(value *unstable.Node, v reflect.Value) error { - switch v.Kind() { - case reflect.String: - v.SetString(string(value.Data)) - case reflect.Interface: - v.Set(reflect.ValueOf(string(value.Data))) - default: - return unstable.NewParserError(d.p.Raw(value.Raw), d.typeMismatchString("string", v.Type())) - } - - return nil -} - -func (d *decoder) handleKeyValue(expr *unstable.Node, v reflect.Value) (reflect.Value, error) { - d.strict.EnterKeyValue(expr) - - v, err := d.handleKeyValueInner(expr.Key(), expr.Value(), v) - if d.skipUntilTable { - d.strict.MissingField(expr) - d.skipUntilTable = false - } - - d.strict.ExitKeyValue(expr) - - return v, err -} - -func (d *decoder) handleKeyValueInner(key unstable.Iterator, value *unstable.Node, v reflect.Value) (reflect.Value, error) { - if key.Next() { - // Still scoping the key - return d.handleKeyValuePart(key, value, v) - } - // Done scoping the key. - // v is whatever Go value we need to fill. - return reflect.Value{}, d.handleValue(value, v) -} - -func (d *decoder) keyFromData(keyType reflect.Type, data []byte) (reflect.Value, error) { - switch { - case stringType.AssignableTo(keyType): - return reflect.ValueOf(string(data)), nil - - case stringType.ConvertibleTo(keyType): - return reflect.ValueOf(string(data)).Convert(keyType), nil - - case keyType.Implements(textUnmarshalerType): - mk := reflect.New(keyType.Elem()) - if err := mk.Interface().(encoding.TextUnmarshaler).UnmarshalText(data); err != nil { - return reflect.Value{}, fmt.Errorf("toml: error unmarshalling key type %s from text: %w", stringType, err) - } - return mk, nil - - case reflect.PtrTo(keyType).Implements(textUnmarshalerType): - mk := reflect.New(keyType) - if err := mk.Interface().(encoding.TextUnmarshaler).UnmarshalText(data); err != nil { - return reflect.Value{}, fmt.Errorf("toml: error unmarshalling key type %s from text: %w", stringType, err) - } - return mk.Elem(), nil - } - return reflect.Value{}, fmt.Errorf("toml: cannot convert map key of type %s to expected type %s", stringType, keyType) -} - -func (d *decoder) handleKeyValuePart(key unstable.Iterator, value *unstable.Node, v reflect.Value) (reflect.Value, error) { - // contains the replacement for v - var rv reflect.Value - - // First, dispatch over v to make sure it is a valid object. - // There is no guarantee over what it could be. - switch v.Kind() { - case reflect.Map: - vt := v.Type() - - mk, err := d.keyFromData(vt.Key(), key.Node().Data) - if err != nil { - return reflect.Value{}, err - } - - // If the map does not exist, create it. - if v.IsNil() { - v = reflect.MakeMap(vt) - rv = v - } - - mv := v.MapIndex(mk) - set := false - if !mv.IsValid() || key.IsLast() { - set = true - mv = reflect.New(v.Type().Elem()).Elem() - } - - nv, err := d.handleKeyValueInner(key, value, mv) - if err != nil { - return reflect.Value{}, err - } - if nv.IsValid() { - mv = nv - set = true - } - - if set { - v.SetMapIndex(mk, mv) - } - case reflect.Struct: - path, found := structFieldPath(v, string(key.Node().Data)) - if !found { - d.skipUntilTable = true - break - } - - if d.errorContext == nil { - d.errorContext = new(errorContext) - } - t := v.Type() - d.errorContext.Struct = t - d.errorContext.Field = path - - f := fieldByIndex(v, path) - - if !f.CanAddr() { - // If the field is not addressable, need to take a slower path and - // make a copy of the struct itself to a new location. - nvp := reflect.New(v.Type()) - nvp.Elem().Set(v) - v = nvp.Elem() - _, err := d.handleKeyValuePart(key, value, v) - if err != nil { - return reflect.Value{}, err - } - return nvp.Elem(), nil - } - x, err := d.handleKeyValueInner(key, value, f) - if err != nil { - return reflect.Value{}, err - } - - if x.IsValid() { - f.Set(x) - } - d.errorContext.Struct = nil - d.errorContext.Field = nil - case reflect.Interface: - v = v.Elem() - - // Following encoding/json: decoding an object into an - // interface{}, it needs to always hold a - // map[string]interface{}. This is for the types to be - // consistent whether a previous value was set or not. - if !v.IsValid() || v.Type() != mapStringInterfaceType { - v = makeMapStringInterface() - } - - x, err := d.handleKeyValuePart(key, value, v) - if err != nil { - return reflect.Value{}, err - } - if x.IsValid() { - v = x - } - rv = v - case reflect.Ptr: - elem := v.Elem() - if !elem.IsValid() { - ptr := reflect.New(v.Type().Elem()) - v.Set(ptr) - rv = v - elem = ptr.Elem() - } - - elem2, err := d.handleKeyValuePart(key, value, elem) - if err != nil { - return reflect.Value{}, err - } - if elem2.IsValid() { - elem = elem2 - } - v.Elem().Set(elem) - default: - return reflect.Value{}, fmt.Errorf("unhandled kv part: %s", v.Kind()) - } - - return rv, nil -} - -func initAndDereferencePointer(v reflect.Value) reflect.Value { - var elem reflect.Value - if v.IsNil() { - ptr := reflect.New(v.Type().Elem()) - v.Set(ptr) - } - elem = v.Elem() - return elem -} - -// Same as reflect.Value.FieldByIndex, but creates pointers if needed. -func fieldByIndex(v reflect.Value, path []int) reflect.Value { - for _, x := range path { - v = v.Field(x) - - if v.Kind() == reflect.Ptr { - if v.IsNil() { - v.Set(reflect.New(v.Type().Elem())) - } - v = v.Elem() - } - } - return v -} - -type fieldPathsMap = map[string][]int - -var globalFieldPathsCache atomic.Value // map[danger.TypeID]fieldPathsMap - -func structFieldPath(v reflect.Value, name string) ([]int, bool) { - t := v.Type() - - cache, _ := globalFieldPathsCache.Load().(map[danger.TypeID]fieldPathsMap) - fieldPaths, ok := cache[danger.MakeTypeID(t)] - - if !ok { - fieldPaths = map[string][]int{} - - forEachField(t, nil, func(name string, path []int) { - fieldPaths[name] = path - // extra copy for the case-insensitive match - fieldPaths[strings.ToLower(name)] = path - }) - - newCache := make(map[danger.TypeID]fieldPathsMap, len(cache)+1) - newCache[danger.MakeTypeID(t)] = fieldPaths - for k, v := range cache { - newCache[k] = v - } - globalFieldPathsCache.Store(newCache) - } - - path, ok := fieldPaths[name] - if !ok { - path, ok = fieldPaths[strings.ToLower(name)] - } - return path, ok -} - -func forEachField(t reflect.Type, path []int, do func(name string, path []int)) { - n := t.NumField() - for i := 0; i < n; i++ { - f := t.Field(i) - - if !f.Anonymous && f.PkgPath != "" { - // only consider exported fields. - continue - } - - fieldPath := append(path, i) - fieldPath = fieldPath[:len(fieldPath):len(fieldPath)] - - name := f.Tag.Get("toml") - if name == "-" { - continue - } - - if i := strings.IndexByte(name, ','); i >= 0 { - name = name[:i] - } - - if f.Anonymous && name == "" { - t2 := f.Type - if t2.Kind() == reflect.Ptr { - t2 = t2.Elem() - } - - if t2.Kind() == reflect.Struct { - forEachField(t2, fieldPath, do) - } - continue - } - - if name == "" { - name = f.Name - } - - do(name, fieldPath) - } -} diff --git a/vendor/github.com/pelletier/go-toml/v2/unstable/ast.go b/vendor/github.com/pelletier/go-toml/v2/unstable/ast.go deleted file mode 100644 index f526bf2c0..000000000 --- a/vendor/github.com/pelletier/go-toml/v2/unstable/ast.go +++ /dev/null @@ -1,136 +0,0 @@ -package unstable - -import ( - "fmt" - "unsafe" - - "github.com/pelletier/go-toml/v2/internal/danger" -) - -// Iterator over a sequence of nodes. -// -// Starts uninitialized, you need to call Next() first. -// -// For example: -// -// it := n.Children() -// for it.Next() { -// n := it.Node() -// // do something with n -// } -type Iterator struct { - started bool - node *Node -} - -// Next moves the iterator forward and returns true if points to a -// node, false otherwise. -func (c *Iterator) Next() bool { - if !c.started { - c.started = true - } else if c.node.Valid() { - c.node = c.node.Next() - } - return c.node.Valid() -} - -// IsLast returns true if the current node of the iterator is the last -// one. Subsequent calls to Next() will return false. -func (c *Iterator) IsLast() bool { - return c.node.next == 0 -} - -// Node returns a pointer to the node pointed at by the iterator. -func (c *Iterator) Node() *Node { - return c.node -} - -// Node in a TOML expression AST. -// -// Depending on Kind, its sequence of children should be interpreted -// differently. -// -// - Array have one child per element in the array. -// - InlineTable have one child per key-value in the table (each of kind -// InlineTable). -// - KeyValue have at least two children. The first one is the value. The rest -// make a potentially dotted key. -// - Table and ArrayTable's children represent a dotted key (same as -// KeyValue, but without the first node being the value). -// -// When relevant, Raw describes the range of bytes this node is referring to in -// the input document. Use Parser.Raw() to retrieve the actual bytes. -type Node struct { - Kind Kind - Raw Range // Raw bytes from the input. - Data []byte // Node value (either allocated or referencing the input). - - // References to other nodes, as offsets in the backing array - // from this node. References can go backward, so those can be - // negative. - next int // 0 if last element - child int // 0 if no child -} - -// Range of bytes in the document. -type Range struct { - Offset uint32 - Length uint32 -} - -// Next returns a pointer to the next node, or nil if there is no next node. -func (n *Node) Next() *Node { - if n.next == 0 { - return nil - } - ptr := unsafe.Pointer(n) - size := unsafe.Sizeof(Node{}) - return (*Node)(danger.Stride(ptr, size, n.next)) -} - -// Child returns a pointer to the first child node of this node. Other children -// can be accessed calling Next on the first child. Returns an nil if this Node -// has no child. -func (n *Node) Child() *Node { - if n.child == 0 { - return nil - } - ptr := unsafe.Pointer(n) - size := unsafe.Sizeof(Node{}) - return (*Node)(danger.Stride(ptr, size, n.child)) -} - -// Valid returns true if the node's kind is set (not to Invalid). -func (n *Node) Valid() bool { - return n != nil -} - -// Key returns the children nodes making the Key on a supported node. Panics -// otherwise. They are guaranteed to be all be of the Kind Key. A simple key -// would return just one element. -func (n *Node) Key() Iterator { - switch n.Kind { - case KeyValue: - value := n.Child() - if !value.Valid() { - panic(fmt.Errorf("KeyValue should have at least two children")) - } - return Iterator{node: value.Next()} - case Table, ArrayTable: - return Iterator{node: n.Child()} - default: - panic(fmt.Errorf("Key() is not supported on a %s", n.Kind)) - } -} - -// Value returns a pointer to the value node of a KeyValue. -// Guaranteed to be non-nil. Panics if not called on a KeyValue node, -// or if the Children are malformed. -func (n *Node) Value() *Node { - return n.Child() -} - -// Children returns an iterator over a node's children. -func (n *Node) Children() Iterator { - return Iterator{node: n.Child()} -} diff --git a/vendor/github.com/pelletier/go-toml/v2/unstable/builder.go b/vendor/github.com/pelletier/go-toml/v2/unstable/builder.go deleted file mode 100644 index 9538e30df..000000000 --- a/vendor/github.com/pelletier/go-toml/v2/unstable/builder.go +++ /dev/null @@ -1,71 +0,0 @@ -package unstable - -// root contains a full AST. -// -// It is immutable once constructed with Builder. -type root struct { - nodes []Node -} - -// Iterator over the top level nodes. -func (r *root) Iterator() Iterator { - it := Iterator{} - if len(r.nodes) > 0 { - it.node = &r.nodes[0] - } - return it -} - -func (r *root) at(idx reference) *Node { - return &r.nodes[idx] -} - -type reference int - -const invalidReference reference = -1 - -func (r reference) Valid() bool { - return r != invalidReference -} - -type builder struct { - tree root - lastIdx int -} - -func (b *builder) Tree() *root { - return &b.tree -} - -func (b *builder) NodeAt(ref reference) *Node { - return b.tree.at(ref) -} - -func (b *builder) Reset() { - b.tree.nodes = b.tree.nodes[:0] - b.lastIdx = 0 -} - -func (b *builder) Push(n Node) reference { - b.lastIdx = len(b.tree.nodes) - b.tree.nodes = append(b.tree.nodes, n) - return reference(b.lastIdx) -} - -func (b *builder) PushAndChain(n Node) reference { - newIdx := len(b.tree.nodes) - b.tree.nodes = append(b.tree.nodes, n) - if b.lastIdx >= 0 { - b.tree.nodes[b.lastIdx].next = newIdx - b.lastIdx - } - b.lastIdx = newIdx - return reference(b.lastIdx) -} - -func (b *builder) AttachChild(parent reference, child reference) { - b.tree.nodes[parent].child = int(child) - int(parent) -} - -func (b *builder) Chain(from reference, to reference) { - b.tree.nodes[from].next = int(to) - int(from) -} diff --git a/vendor/github.com/pelletier/go-toml/v2/unstable/doc.go b/vendor/github.com/pelletier/go-toml/v2/unstable/doc.go deleted file mode 100644 index 7ff26c53c..000000000 --- a/vendor/github.com/pelletier/go-toml/v2/unstable/doc.go +++ /dev/null @@ -1,3 +0,0 @@ -// Package unstable provides APIs that do not meet the backward compatibility -// guarantees yet. -package unstable diff --git a/vendor/github.com/pelletier/go-toml/v2/unstable/kind.go b/vendor/github.com/pelletier/go-toml/v2/unstable/kind.go deleted file mode 100644 index ff9df1bef..000000000 --- a/vendor/github.com/pelletier/go-toml/v2/unstable/kind.go +++ /dev/null @@ -1,71 +0,0 @@ -package unstable - -import "fmt" - -// Kind represents the type of TOML structure contained in a given Node. -type Kind int - -const ( - // Meta - Invalid Kind = iota - Comment - Key - - // Top level structures - Table - ArrayTable - KeyValue - - // Containers values - Array - InlineTable - - // Values - String - Bool - Float - Integer - LocalDate - LocalTime - LocalDateTime - DateTime -) - -// String implementation of fmt.Stringer. -func (k Kind) String() string { - switch k { - case Invalid: - return "Invalid" - case Comment: - return "Comment" - case Key: - return "Key" - case Table: - return "Table" - case ArrayTable: - return "ArrayTable" - case KeyValue: - return "KeyValue" - case Array: - return "Array" - case InlineTable: - return "InlineTable" - case String: - return "String" - case Bool: - return "Bool" - case Float: - return "Float" - case Integer: - return "Integer" - case LocalDate: - return "LocalDate" - case LocalTime: - return "LocalTime" - case LocalDateTime: - return "LocalDateTime" - case DateTime: - return "DateTime" - } - panic(fmt.Errorf("Kind.String() not implemented for '%d'", k)) -} diff --git a/vendor/github.com/pelletier/go-toml/v2/unstable/parser.go b/vendor/github.com/pelletier/go-toml/v2/unstable/parser.go deleted file mode 100644 index 50358a44f..000000000 --- a/vendor/github.com/pelletier/go-toml/v2/unstable/parser.go +++ /dev/null @@ -1,1245 +0,0 @@ -package unstable - -import ( - "bytes" - "fmt" - "unicode" - - "github.com/pelletier/go-toml/v2/internal/characters" - "github.com/pelletier/go-toml/v2/internal/danger" -) - -// ParserError describes an error relative to the content of the document. -// -// It cannot outlive the instance of Parser it refers to, and may cause panics -// if the parser is reset. -type ParserError struct { - Highlight []byte - Message string - Key []string // optional -} - -// Error is the implementation of the error interface. -func (e *ParserError) Error() string { - return e.Message -} - -// NewParserError is a convenience function to create a ParserError -// -// Warning: Highlight needs to be a subslice of Parser.data, so only slices -// returned by Parser.Raw are valid candidates. -func NewParserError(highlight []byte, format string, args ...interface{}) error { - return &ParserError{ - Highlight: highlight, - Message: fmt.Errorf(format, args...).Error(), - } -} - -// Parser scans over a TOML-encoded document and generates an iterative AST. -// -// To prime the Parser, first reset it with the contents of a TOML document. -// Then, process all top-level expressions sequentially. See Example. -// -// Don't forget to check Error() after you're done parsing. -// -// Each top-level expression needs to be fully processed before calling -// NextExpression() again. Otherwise, calls to various Node methods may panic if -// the parser has moved on the next expression. -// -// For performance reasons, go-toml doesn't make a copy of the input bytes to -// the parser. Make sure to copy all the bytes you need to outlive the slice -// given to the parser. -type Parser struct { - data []byte - builder builder - ref reference - left []byte - err error - first bool - - KeepComments bool -} - -// Data returns the slice provided to the last call to Reset. -func (p *Parser) Data() []byte { - return p.data -} - -// Range returns a range description that corresponds to a given slice of the -// input. If the argument is not a subslice of the parser input, this function -// panics. -func (p *Parser) Range(b []byte) Range { - return Range{ - Offset: uint32(danger.SubsliceOffset(p.data, b)), - Length: uint32(len(b)), - } -} - -// Raw returns the slice corresponding to the bytes in the given range. -func (p *Parser) Raw(raw Range) []byte { - return p.data[raw.Offset : raw.Offset+raw.Length] -} - -// Reset brings the parser to its initial state for a given input. It wipes an -// reuses internal storage to reduce allocation. -func (p *Parser) Reset(b []byte) { - p.builder.Reset() - p.ref = invalidReference - p.data = b - p.left = b - p.err = nil - p.first = true -} - -// NextExpression parses the next top-level expression. If an expression was -// successfully parsed, it returns true. If the parser is at the end of the -// document or an error occurred, it returns false. -// -// Retrieve the parsed expression with Expression(). -func (p *Parser) NextExpression() bool { - if len(p.left) == 0 || p.err != nil { - return false - } - - p.builder.Reset() - p.ref = invalidReference - - for { - if len(p.left) == 0 || p.err != nil { - return false - } - - if !p.first { - p.left, p.err = p.parseNewline(p.left) - } - - if len(p.left) == 0 || p.err != nil { - return false - } - - p.ref, p.left, p.err = p.parseExpression(p.left) - - if p.err != nil { - return false - } - - p.first = false - - if p.ref.Valid() { - return true - } - } -} - -// Expression returns a pointer to the node representing the last successfully -// parsed expression. -func (p *Parser) Expression() *Node { - return p.builder.NodeAt(p.ref) -} - -// Error returns any error that has occurred during parsing. -func (p *Parser) Error() error { - return p.err -} - -// Position describes a position in the input. -type Position struct { - // Number of bytes from the beginning of the input. - Offset int - // Line number, starting at 1. - Line int - // Column number, starting at 1. - Column int -} - -// Shape describes the position of a range in the input. -type Shape struct { - Start Position - End Position -} - -func (p *Parser) position(b []byte) Position { - offset := danger.SubsliceOffset(p.data, b) - - lead := p.data[:offset] - - return Position{ - Offset: offset, - Line: bytes.Count(lead, []byte{'\n'}) + 1, - Column: len(lead) - bytes.LastIndex(lead, []byte{'\n'}), - } -} - -// Shape returns the shape of the given range in the input. Will -// panic if the range is not a subslice of the input. -func (p *Parser) Shape(r Range) Shape { - raw := p.Raw(r) - return Shape{ - Start: p.position(raw), - End: p.position(raw[r.Length:]), - } -} - -func (p *Parser) parseNewline(b []byte) ([]byte, error) { - if b[0] == '\n' { - return b[1:], nil - } - - if b[0] == '\r' { - _, rest, err := scanWindowsNewline(b) - return rest, err - } - - return nil, NewParserError(b[0:1], "expected newline but got %#U", b[0]) -} - -func (p *Parser) parseComment(b []byte) (reference, []byte, error) { - ref := invalidReference - data, rest, err := scanComment(b) - if p.KeepComments && err == nil { - ref = p.builder.Push(Node{ - Kind: Comment, - Raw: p.Range(data), - Data: data, - }) - } - return ref, rest, err -} - -func (p *Parser) parseExpression(b []byte) (reference, []byte, error) { - // expression = ws [ comment ] - // expression =/ ws keyval ws [ comment ] - // expression =/ ws table ws [ comment ] - ref := invalidReference - - b = p.parseWhitespace(b) - - if len(b) == 0 { - return ref, b, nil - } - - if b[0] == '#' { - ref, rest, err := p.parseComment(b) - return ref, rest, err - } - - if b[0] == '\n' || b[0] == '\r' { - return ref, b, nil - } - - var err error - if b[0] == '[' { - ref, b, err = p.parseTable(b) - } else { - ref, b, err = p.parseKeyval(b) - } - - if err != nil { - return ref, nil, err - } - - b = p.parseWhitespace(b) - - if len(b) > 0 && b[0] == '#' { - cref, rest, err := p.parseComment(b) - if cref != invalidReference { - p.builder.Chain(ref, cref) - } - return ref, rest, err - } - - return ref, b, nil -} - -func (p *Parser) parseTable(b []byte) (reference, []byte, error) { - // table = std-table / array-table - if len(b) > 1 && b[1] == '[' { - return p.parseArrayTable(b) - } - - return p.parseStdTable(b) -} - -func (p *Parser) parseArrayTable(b []byte) (reference, []byte, error) { - // array-table = array-table-open key array-table-close - // array-table-open = %x5B.5B ws ; [[ Double left square bracket - // array-table-close = ws %x5D.5D ; ]] Double right square bracket - ref := p.builder.Push(Node{ - Kind: ArrayTable, - }) - - b = b[2:] - b = p.parseWhitespace(b) - - k, b, err := p.parseKey(b) - if err != nil { - return ref, nil, err - } - - p.builder.AttachChild(ref, k) - b = p.parseWhitespace(b) - - b, err = expect(']', b) - if err != nil { - return ref, nil, err - } - - b, err = expect(']', b) - - return ref, b, err -} - -func (p *Parser) parseStdTable(b []byte) (reference, []byte, error) { - // std-table = std-table-open key std-table-close - // std-table-open = %x5B ws ; [ Left square bracket - // std-table-close = ws %x5D ; ] Right square bracket - ref := p.builder.Push(Node{ - Kind: Table, - }) - - b = b[1:] - b = p.parseWhitespace(b) - - key, b, err := p.parseKey(b) - if err != nil { - return ref, nil, err - } - - p.builder.AttachChild(ref, key) - - b = p.parseWhitespace(b) - - b, err = expect(']', b) - - return ref, b, err -} - -func (p *Parser) parseKeyval(b []byte) (reference, []byte, error) { - // keyval = key keyval-sep val - ref := p.builder.Push(Node{ - Kind: KeyValue, - }) - - key, b, err := p.parseKey(b) - if err != nil { - return invalidReference, nil, err - } - - // keyval-sep = ws %x3D ws ; = - - b = p.parseWhitespace(b) - - if len(b) == 0 { - return invalidReference, nil, NewParserError(b, "expected = after a key, but the document ends there") - } - - b, err = expect('=', b) - if err != nil { - return invalidReference, nil, err - } - - b = p.parseWhitespace(b) - - valRef, b, err := p.parseVal(b) - if err != nil { - return ref, b, err - } - - p.builder.Chain(valRef, key) - p.builder.AttachChild(ref, valRef) - - return ref, b, err -} - -//nolint:cyclop,funlen -func (p *Parser) parseVal(b []byte) (reference, []byte, error) { - // val = string / boolean / array / inline-table / date-time / float / integer - ref := invalidReference - - if len(b) == 0 { - return ref, nil, NewParserError(b, "expected value, not eof") - } - - var err error - c := b[0] - - switch c { - case '"': - var raw []byte - var v []byte - if scanFollowsMultilineBasicStringDelimiter(b) { - raw, v, b, err = p.parseMultilineBasicString(b) - } else { - raw, v, b, err = p.parseBasicString(b) - } - - if err == nil { - ref = p.builder.Push(Node{ - Kind: String, - Raw: p.Range(raw), - Data: v, - }) - } - - return ref, b, err - case '\'': - var raw []byte - var v []byte - if scanFollowsMultilineLiteralStringDelimiter(b) { - raw, v, b, err = p.parseMultilineLiteralString(b) - } else { - raw, v, b, err = p.parseLiteralString(b) - } - - if err == nil { - ref = p.builder.Push(Node{ - Kind: String, - Raw: p.Range(raw), - Data: v, - }) - } - - return ref, b, err - case 't': - if !scanFollowsTrue(b) { - return ref, nil, NewParserError(atmost(b, 4), "expected 'true'") - } - - ref = p.builder.Push(Node{ - Kind: Bool, - Data: b[:4], - }) - - return ref, b[4:], nil - case 'f': - if !scanFollowsFalse(b) { - return ref, nil, NewParserError(atmost(b, 5), "expected 'false'") - } - - ref = p.builder.Push(Node{ - Kind: Bool, - Data: b[:5], - }) - - return ref, b[5:], nil - case '[': - return p.parseValArray(b) - case '{': - return p.parseInlineTable(b) - default: - return p.parseIntOrFloatOrDateTime(b) - } -} - -func atmost(b []byte, n int) []byte { - if n >= len(b) { - return b - } - - return b[:n] -} - -func (p *Parser) parseLiteralString(b []byte) ([]byte, []byte, []byte, error) { - v, rest, err := scanLiteralString(b) - if err != nil { - return nil, nil, nil, err - } - - return v, v[1 : len(v)-1], rest, nil -} - -func (p *Parser) parseInlineTable(b []byte) (reference, []byte, error) { - // inline-table = inline-table-open [ inline-table-keyvals ] inline-table-close - // inline-table-open = %x7B ws ; { - // inline-table-close = ws %x7D ; } - // inline-table-sep = ws %x2C ws ; , Comma - // inline-table-keyvals = keyval [ inline-table-sep inline-table-keyvals ] - parent := p.builder.Push(Node{ - Kind: InlineTable, - Raw: p.Range(b[:1]), - }) - - first := true - - var child reference - - b = b[1:] - - var err error - - for len(b) > 0 { - previousB := b - b = p.parseWhitespace(b) - - if len(b) == 0 { - return parent, nil, NewParserError(previousB[:1], "inline table is incomplete") - } - - if b[0] == '}' { - break - } - - if !first { - b, err = expect(',', b) - if err != nil { - return parent, nil, err - } - b = p.parseWhitespace(b) - } - - var kv reference - - kv, b, err = p.parseKeyval(b) - if err != nil { - return parent, nil, err - } - - if first { - p.builder.AttachChild(parent, kv) - } else { - p.builder.Chain(child, kv) - } - child = kv - - first = false - } - - rest, err := expect('}', b) - - return parent, rest, err -} - -//nolint:funlen,cyclop -func (p *Parser) parseValArray(b []byte) (reference, []byte, error) { - // array = array-open [ array-values ] ws-comment-newline array-close - // array-open = %x5B ; [ - // array-close = %x5D ; ] - // array-values = ws-comment-newline val ws-comment-newline array-sep array-values - // array-values =/ ws-comment-newline val ws-comment-newline [ array-sep ] - // array-sep = %x2C ; , Comma - // ws-comment-newline = *( wschar / [ comment ] newline ) - arrayStart := b - b = b[1:] - - parent := p.builder.Push(Node{ - Kind: Array, - }) - - // First indicates whether the parser is looking for the first element - // (non-comment) of the array. - first := true - - lastChild := invalidReference - - addChild := func(valueRef reference) { - if lastChild == invalidReference { - p.builder.AttachChild(parent, valueRef) - } else { - p.builder.Chain(lastChild, valueRef) - } - lastChild = valueRef - } - - var err error - for len(b) > 0 { - cref := invalidReference - cref, b, err = p.parseOptionalWhitespaceCommentNewline(b) - if err != nil { - return parent, nil, err - } - - if cref != invalidReference { - addChild(cref) - } - - if len(b) == 0 { - return parent, nil, NewParserError(arrayStart[:1], "array is incomplete") - } - - if b[0] == ']' { - break - } - - if b[0] == ',' { - if first { - return parent, nil, NewParserError(b[0:1], "array cannot start with comma") - } - b = b[1:] - - cref, b, err = p.parseOptionalWhitespaceCommentNewline(b) - if err != nil { - return parent, nil, err - } - if cref != invalidReference { - addChild(cref) - } - } else if !first { - return parent, nil, NewParserError(b[0:1], "array elements must be separated by commas") - } - - // TOML allows trailing commas in arrays. - if len(b) > 0 && b[0] == ']' { - break - } - - var valueRef reference - valueRef, b, err = p.parseVal(b) - if err != nil { - return parent, nil, err - } - - addChild(valueRef) - - cref, b, err = p.parseOptionalWhitespaceCommentNewline(b) - if err != nil { - return parent, nil, err - } - if cref != invalidReference { - addChild(cref) - } - - first = false - } - - rest, err := expect(']', b) - - return parent, rest, err -} - -func (p *Parser) parseOptionalWhitespaceCommentNewline(b []byte) (reference, []byte, error) { - rootCommentRef := invalidReference - latestCommentRef := invalidReference - - addComment := func(ref reference) { - if rootCommentRef == invalidReference { - rootCommentRef = ref - } else if latestCommentRef == invalidReference { - p.builder.AttachChild(rootCommentRef, ref) - latestCommentRef = ref - } else { - p.builder.Chain(latestCommentRef, ref) - latestCommentRef = ref - } - } - - for len(b) > 0 { - var err error - b = p.parseWhitespace(b) - - if len(b) > 0 && b[0] == '#' { - var ref reference - ref, b, err = p.parseComment(b) - if err != nil { - return invalidReference, nil, err - } - if ref != invalidReference { - addComment(ref) - } - } - - if len(b) == 0 { - break - } - - if b[0] == '\n' || b[0] == '\r' { - b, err = p.parseNewline(b) - if err != nil { - return invalidReference, nil, err - } - } else { - break - } - } - - return rootCommentRef, b, nil -} - -func (p *Parser) parseMultilineLiteralString(b []byte) ([]byte, []byte, []byte, error) { - token, rest, err := scanMultilineLiteralString(b) - if err != nil { - return nil, nil, nil, err - } - - i := 3 - - // skip the immediate new line - if token[i] == '\n' { - i++ - } else if token[i] == '\r' && token[i+1] == '\n' { - i += 2 - } - - return token, token[i : len(token)-3], rest, err -} - -//nolint:funlen,gocognit,cyclop -func (p *Parser) parseMultilineBasicString(b []byte) ([]byte, []byte, []byte, error) { - // ml-basic-string = ml-basic-string-delim [ newline ] ml-basic-body - // ml-basic-string-delim - // ml-basic-string-delim = 3quotation-mark - // ml-basic-body = *mlb-content *( mlb-quotes 1*mlb-content ) [ mlb-quotes ] - // - // mlb-content = mlb-char / newline / mlb-escaped-nl - // mlb-char = mlb-unescaped / escaped - // mlb-quotes = 1*2quotation-mark - // mlb-unescaped = wschar / %x21 / %x23-5B / %x5D-7E / non-ascii - // mlb-escaped-nl = escape ws newline *( wschar / newline ) - token, escaped, rest, err := scanMultilineBasicString(b) - if err != nil { - return nil, nil, nil, err - } - - i := 3 - - // skip the immediate new line - if token[i] == '\n' { - i++ - } else if token[i] == '\r' && token[i+1] == '\n' { - i += 2 - } - - // fast path - startIdx := i - endIdx := len(token) - len(`"""`) - - if !escaped { - str := token[startIdx:endIdx] - verr := characters.Utf8TomlValidAlreadyEscaped(str) - if verr.Zero() { - return token, str, rest, nil - } - return nil, nil, nil, NewParserError(str[verr.Index:verr.Index+verr.Size], "invalid UTF-8") - } - - var builder bytes.Buffer - - // The scanner ensures that the token starts and ends with quotes and that - // escapes are balanced. - for i < len(token)-3 { - c := token[i] - - //nolint:nestif - if c == '\\' { - // When the last non-whitespace character on a line is an unescaped \, - // it will be trimmed along with all whitespace (including newlines) up - // to the next non-whitespace character or closing delimiter. - - isLastNonWhitespaceOnLine := false - j := 1 - findEOLLoop: - for ; j < len(token)-3-i; j++ { - switch token[i+j] { - case ' ', '\t': - continue - case '\r': - if token[i+j+1] == '\n' { - continue - } - case '\n': - isLastNonWhitespaceOnLine = true - } - break findEOLLoop - } - if isLastNonWhitespaceOnLine { - i += j - for ; i < len(token)-3; i++ { - c := token[i] - if !(c == '\n' || c == '\r' || c == ' ' || c == '\t') { - i-- - break - } - } - i++ - continue - } - - // handle escaping - i++ - c = token[i] - - switch c { - case '"', '\\': - builder.WriteByte(c) - case 'b': - builder.WriteByte('\b') - case 'f': - builder.WriteByte('\f') - case 'n': - builder.WriteByte('\n') - case 'r': - builder.WriteByte('\r') - case 't': - builder.WriteByte('\t') - case 'e': - builder.WriteByte(0x1B) - case 'u': - x, err := hexToRune(atmost(token[i+1:], 4), 4) - if err != nil { - return nil, nil, nil, err - } - builder.WriteRune(x) - i += 4 - case 'U': - x, err := hexToRune(atmost(token[i+1:], 8), 8) - if err != nil { - return nil, nil, nil, err - } - - builder.WriteRune(x) - i += 8 - default: - return nil, nil, nil, NewParserError(token[i:i+1], "invalid escaped character %#U", c) - } - i++ - } else { - size := characters.Utf8ValidNext(token[i:]) - if size == 0 { - return nil, nil, nil, NewParserError(token[i:i+1], "invalid character %#U", c) - } - builder.Write(token[i : i+size]) - i += size - } - } - - return token, builder.Bytes(), rest, nil -} - -func (p *Parser) parseKey(b []byte) (reference, []byte, error) { - // key = simple-key / dotted-key - // simple-key = quoted-key / unquoted-key - // - // unquoted-key = 1*( ALPHA / DIGIT / %x2D / %x5F ) ; A-Z / a-z / 0-9 / - / _ - // quoted-key = basic-string / literal-string - // dotted-key = simple-key 1*( dot-sep simple-key ) - // - // dot-sep = ws %x2E ws ; . Period - raw, key, b, err := p.parseSimpleKey(b) - if err != nil { - return invalidReference, nil, err - } - - ref := p.builder.Push(Node{ - Kind: Key, - Raw: p.Range(raw), - Data: key, - }) - - for { - b = p.parseWhitespace(b) - if len(b) > 0 && b[0] == '.' { - b = p.parseWhitespace(b[1:]) - - raw, key, b, err = p.parseSimpleKey(b) - if err != nil { - return ref, nil, err - } - - p.builder.PushAndChain(Node{ - Kind: Key, - Raw: p.Range(raw), - Data: key, - }) - } else { - break - } - } - - return ref, b, nil -} - -func (p *Parser) parseSimpleKey(b []byte) (raw, key, rest []byte, err error) { - if len(b) == 0 { - return nil, nil, nil, NewParserError(b, "expected key but found none") - } - - // simple-key = quoted-key / unquoted-key - // unquoted-key = 1*( ALPHA / DIGIT / %x2D / %x5F ) ; A-Z / a-z / 0-9 / - / _ - // quoted-key = basic-string / literal-string - switch { - case b[0] == '\'': - return p.parseLiteralString(b) - case b[0] == '"': - return p.parseBasicString(b) - case isUnquotedKeyChar(b[0]): - key, rest = scanUnquotedKey(b) - return key, key, rest, nil - default: - return nil, nil, nil, NewParserError(b[0:1], "invalid character at start of key: %c", b[0]) - } -} - -//nolint:funlen,cyclop -func (p *Parser) parseBasicString(b []byte) ([]byte, []byte, []byte, error) { - // basic-string = quotation-mark *basic-char quotation-mark - // quotation-mark = %x22 ; " - // basic-char = basic-unescaped / escaped - // basic-unescaped = wschar / %x21 / %x23-5B / %x5D-7E / non-ascii - // escaped = escape escape-seq-char - // escape-seq-char = %x22 ; " quotation mark U+0022 - // escape-seq-char =/ %x5C ; \ reverse solidus U+005C - // escape-seq-char =/ %x62 ; b backspace U+0008 - // escape-seq-char =/ %x66 ; f form feed U+000C - // escape-seq-char =/ %x6E ; n line feed U+000A - // escape-seq-char =/ %x72 ; r carriage return U+000D - // escape-seq-char =/ %x74 ; t tab U+0009 - // escape-seq-char =/ %x75 4HEXDIG ; uXXXX U+XXXX - // escape-seq-char =/ %x55 8HEXDIG ; UXXXXXXXX U+XXXXXXXX - token, escaped, rest, err := scanBasicString(b) - if err != nil { - return nil, nil, nil, err - } - - startIdx := len(`"`) - endIdx := len(token) - len(`"`) - - // Fast path. If there is no escape sequence, the string should just be - // an UTF-8 encoded string, which is the same as Go. In that case, - // validate the string and return a direct reference to the buffer. - if !escaped { - str := token[startIdx:endIdx] - verr := characters.Utf8TomlValidAlreadyEscaped(str) - if verr.Zero() { - return token, str, rest, nil - } - return nil, nil, nil, NewParserError(str[verr.Index:verr.Index+verr.Size], "invalid UTF-8") - } - - i := startIdx - - var builder bytes.Buffer - - // The scanner ensures that the token starts and ends with quotes and that - // escapes are balanced. - for i < len(token)-1 { - c := token[i] - if c == '\\' { - i++ - c = token[i] - - switch c { - case '"', '\\': - builder.WriteByte(c) - case 'b': - builder.WriteByte('\b') - case 'f': - builder.WriteByte('\f') - case 'n': - builder.WriteByte('\n') - case 'r': - builder.WriteByte('\r') - case 't': - builder.WriteByte('\t') - case 'e': - builder.WriteByte(0x1B) - case 'u': - x, err := hexToRune(token[i+1:len(token)-1], 4) - if err != nil { - return nil, nil, nil, err - } - - builder.WriteRune(x) - i += 4 - case 'U': - x, err := hexToRune(token[i+1:len(token)-1], 8) - if err != nil { - return nil, nil, nil, err - } - - builder.WriteRune(x) - i += 8 - default: - return nil, nil, nil, NewParserError(token[i:i+1], "invalid escaped character %#U", c) - } - i++ - } else { - size := characters.Utf8ValidNext(token[i:]) - if size == 0 { - return nil, nil, nil, NewParserError(token[i:i+1], "invalid character %#U", c) - } - builder.Write(token[i : i+size]) - i += size - } - } - - return token, builder.Bytes(), rest, nil -} - -func hexToRune(b []byte, length int) (rune, error) { - if len(b) < length { - return -1, NewParserError(b, "unicode point needs %d character, not %d", length, len(b)) - } - b = b[:length] - - var r uint32 - for i, c := range b { - d := uint32(0) - switch { - case '0' <= c && c <= '9': - d = uint32(c - '0') - case 'a' <= c && c <= 'f': - d = uint32(c - 'a' + 10) - case 'A' <= c && c <= 'F': - d = uint32(c - 'A' + 10) - default: - return -1, NewParserError(b[i:i+1], "non-hex character") - } - r = r*16 + d - } - - if r > unicode.MaxRune || 0xD800 <= r && r < 0xE000 { - return -1, NewParserError(b, "escape sequence is invalid Unicode code point") - } - - return rune(r), nil -} - -func (p *Parser) parseWhitespace(b []byte) []byte { - // ws = *wschar - // wschar = %x20 ; Space - // wschar =/ %x09 ; Horizontal tab - _, rest := scanWhitespace(b) - - return rest -} - -//nolint:cyclop -func (p *Parser) parseIntOrFloatOrDateTime(b []byte) (reference, []byte, error) { - switch b[0] { - case 'i': - if !scanFollowsInf(b) { - return invalidReference, nil, NewParserError(atmost(b, 3), "expected 'inf'") - } - - return p.builder.Push(Node{ - Kind: Float, - Data: b[:3], - Raw: p.Range(b[:3]), - }), b[3:], nil - case 'n': - if !scanFollowsNan(b) { - return invalidReference, nil, NewParserError(atmost(b, 3), "expected 'nan'") - } - - return p.builder.Push(Node{ - Kind: Float, - Data: b[:3], - Raw: p.Range(b[:3]), - }), b[3:], nil - case '+', '-': - return p.scanIntOrFloat(b) - } - - if len(b) < 3 { - return p.scanIntOrFloat(b) - } - - s := 5 - if len(b) < s { - s = len(b) - } - - for idx, c := range b[:s] { - if isDigit(c) { - continue - } - - if idx == 2 && c == ':' || (idx == 4 && c == '-') { - return p.scanDateTime(b) - } - - break - } - - return p.scanIntOrFloat(b) -} - -func (p *Parser) scanDateTime(b []byte) (reference, []byte, error) { - // scans for contiguous characters in [0-9T:Z.+-], and up to one space if - // followed by a digit. - hasDate := false - hasTime := false - hasTz := false - seenSpace := false - - i := 0 -byteLoop: - for ; i < len(b); i++ { - c := b[i] - - switch { - case isDigit(c): - case c == '-': - hasDate = true - const minOffsetOfTz = 8 - if i >= minOffsetOfTz { - hasTz = true - } - case c == 'T' || c == 't' || c == ':' || c == '.': - hasTime = true - case c == '+' || c == '-' || c == 'Z' || c == 'z': - hasTz = true - case c == ' ': - if !seenSpace && i+1 < len(b) && isDigit(b[i+1]) { - i += 2 - // Avoid reaching past the end of the document in case the time - // is malformed. See TestIssue585. - if i >= len(b) { - i-- - } - seenSpace = true - hasTime = true - } else { - break byteLoop - } - default: - break byteLoop - } - } - - var kind Kind - - if hasTime { - if hasDate { - if hasTz { - kind = DateTime - } else { - kind = LocalDateTime - } - } else { - kind = LocalTime - } - } else { - kind = LocalDate - } - - return p.builder.Push(Node{ - Kind: kind, - Data: b[:i], - }), b[i:], nil -} - -//nolint:funlen,gocognit,cyclop -func (p *Parser) scanIntOrFloat(b []byte) (reference, []byte, error) { - i := 0 - - if len(b) > 2 && b[0] == '0' && b[1] != '.' && b[1] != 'e' && b[1] != 'E' { - var isValidRune validRuneFn - - switch b[1] { - case 'x': - isValidRune = isValidHexRune - case 'o': - isValidRune = isValidOctalRune - case 'b': - isValidRune = isValidBinaryRune - default: - i++ - } - - if isValidRune != nil { - i += 2 - for ; i < len(b); i++ { - if !isValidRune(b[i]) { - break - } - } - } - - return p.builder.Push(Node{ - Kind: Integer, - Data: b[:i], - Raw: p.Range(b[:i]), - }), b[i:], nil - } - - isFloat := false - - for ; i < len(b); i++ { - c := b[i] - - if c >= '0' && c <= '9' || c == '+' || c == '-' || c == '_' { - continue - } - - if c == '.' || c == 'e' || c == 'E' { - isFloat = true - - continue - } - - if c == 'i' { - if scanFollowsInf(b[i:]) { - return p.builder.Push(Node{ - Kind: Float, - Data: b[:i+3], - Raw: p.Range(b[:i+3]), - }), b[i+3:], nil - } - - return invalidReference, nil, NewParserError(b[i:i+1], "unexpected character 'i' while scanning for a number") - } - - if c == 'n' { - if scanFollowsNan(b[i:]) { - return p.builder.Push(Node{ - Kind: Float, - Data: b[:i+3], - Raw: p.Range(b[:i+3]), - }), b[i+3:], nil - } - - return invalidReference, nil, NewParserError(b[i:i+1], "unexpected character 'n' while scanning for a number") - } - - break - } - - if i == 0 { - return invalidReference, b, NewParserError(b, "incomplete number") - } - - kind := Integer - - if isFloat { - kind = Float - } - - return p.builder.Push(Node{ - Kind: kind, - Data: b[:i], - Raw: p.Range(b[:i]), - }), b[i:], nil -} - -func isDigit(r byte) bool { - return r >= '0' && r <= '9' -} - -type validRuneFn func(r byte) bool - -func isValidHexRune(r byte) bool { - return r >= 'a' && r <= 'f' || - r >= 'A' && r <= 'F' || - r >= '0' && r <= '9' || - r == '_' -} - -func isValidOctalRune(r byte) bool { - return r >= '0' && r <= '7' || r == '_' -} - -func isValidBinaryRune(r byte) bool { - return r == '0' || r == '1' || r == '_' -} - -func expect(x byte, b []byte) ([]byte, error) { - if len(b) == 0 { - return nil, NewParserError(b, "expected character %c but the document ended here", x) - } - - if b[0] != x { - return nil, NewParserError(b[0:1], "expected character %c", x) - } - - return b[1:], nil -} diff --git a/vendor/github.com/pelletier/go-toml/v2/unstable/scanner.go b/vendor/github.com/pelletier/go-toml/v2/unstable/scanner.go deleted file mode 100644 index 0512181d2..000000000 --- a/vendor/github.com/pelletier/go-toml/v2/unstable/scanner.go +++ /dev/null @@ -1,270 +0,0 @@ -package unstable - -import "github.com/pelletier/go-toml/v2/internal/characters" - -func scanFollows(b []byte, pattern string) bool { - n := len(pattern) - - return len(b) >= n && string(b[:n]) == pattern -} - -func scanFollowsMultilineBasicStringDelimiter(b []byte) bool { - return scanFollows(b, `"""`) -} - -func scanFollowsMultilineLiteralStringDelimiter(b []byte) bool { - return scanFollows(b, `'''`) -} - -func scanFollowsTrue(b []byte) bool { - return scanFollows(b, `true`) -} - -func scanFollowsFalse(b []byte) bool { - return scanFollows(b, `false`) -} - -func scanFollowsInf(b []byte) bool { - return scanFollows(b, `inf`) -} - -func scanFollowsNan(b []byte) bool { - return scanFollows(b, `nan`) -} - -func scanUnquotedKey(b []byte) ([]byte, []byte) { - // unquoted-key = 1*( ALPHA / DIGIT / %x2D / %x5F ) ; A-Z / a-z / 0-9 / - / _ - for i := 0; i < len(b); i++ { - if !isUnquotedKeyChar(b[i]) { - return b[:i], b[i:] - } - } - - return b, b[len(b):] -} - -func isUnquotedKeyChar(r byte) bool { - return (r >= 'A' && r <= 'Z') || (r >= 'a' && r <= 'z') || (r >= '0' && r <= '9') || r == '-' || r == '_' -} - -func scanLiteralString(b []byte) ([]byte, []byte, error) { - // literal-string = apostrophe *literal-char apostrophe - // apostrophe = %x27 ; ' apostrophe - // literal-char = %x09 / %x20-26 / %x28-7E / non-ascii - for i := 1; i < len(b); { - switch b[i] { - case '\'': - return b[:i+1], b[i+1:], nil - case '\n', '\r': - return nil, nil, NewParserError(b[i:i+1], "literal strings cannot have new lines") - } - size := characters.Utf8ValidNext(b[i:]) - if size == 0 { - return nil, nil, NewParserError(b[i:i+1], "invalid character") - } - i += size - } - - return nil, nil, NewParserError(b[len(b):], "unterminated literal string") -} - -func scanMultilineLiteralString(b []byte) ([]byte, []byte, error) { - // ml-literal-string = ml-literal-string-delim [ newline ] ml-literal-body - // ml-literal-string-delim - // ml-literal-string-delim = 3apostrophe - // ml-literal-body = *mll-content *( mll-quotes 1*mll-content ) [ mll-quotes ] - // - // mll-content = mll-char / newline - // mll-char = %x09 / %x20-26 / %x28-7E / non-ascii - // mll-quotes = 1*2apostrophe - for i := 3; i < len(b); { - switch b[i] { - case '\'': - if scanFollowsMultilineLiteralStringDelimiter(b[i:]) { - i += 3 - - // At that point we found 3 apostrophe, and i is the - // index of the byte after the third one. The scanner - // needs to be eager, because there can be an extra 2 - // apostrophe that can be accepted at the end of the - // string. - - if i >= len(b) || b[i] != '\'' { - return b[:i], b[i:], nil - } - i++ - - if i >= len(b) || b[i] != '\'' { - return b[:i], b[i:], nil - } - i++ - - if i < len(b) && b[i] == '\'' { - return nil, nil, NewParserError(b[i-3:i+1], "''' not allowed in multiline literal string") - } - - return b[:i], b[i:], nil - } - case '\r': - if len(b) < i+2 { - return nil, nil, NewParserError(b[len(b):], `need a \n after \r`) - } - if b[i+1] != '\n' { - return nil, nil, NewParserError(b[i:i+2], `need a \n after \r`) - } - i += 2 // skip the \n - continue - } - size := characters.Utf8ValidNext(b[i:]) - if size == 0 { - return nil, nil, NewParserError(b[i:i+1], "invalid character") - } - i += size - } - - return nil, nil, NewParserError(b[len(b):], `multiline literal string not terminated by '''`) -} - -func scanWindowsNewline(b []byte) ([]byte, []byte, error) { - const lenCRLF = 2 - if len(b) < lenCRLF { - return nil, nil, NewParserError(b, "windows new line expected") - } - - if b[1] != '\n' { - return nil, nil, NewParserError(b, `windows new line should be \r\n`) - } - - return b[:lenCRLF], b[lenCRLF:], nil -} - -func scanWhitespace(b []byte) ([]byte, []byte) { - for i := 0; i < len(b); i++ { - switch b[i] { - case ' ', '\t': - continue - default: - return b[:i], b[i:] - } - } - - return b, b[len(b):] -} - -func scanComment(b []byte) ([]byte, []byte, error) { - // comment-start-symbol = %x23 ; # - // non-ascii = %x80-D7FF / %xE000-10FFFF - // non-eol = %x09 / %x20-7F / non-ascii - // - // comment = comment-start-symbol *non-eol - - for i := 1; i < len(b); { - if b[i] == '\n' { - return b[:i], b[i:], nil - } - if b[i] == '\r' { - if i+1 < len(b) && b[i+1] == '\n' { - return b[:i+1], b[i+1:], nil - } - return nil, nil, NewParserError(b[i:i+1], "invalid character in comment") - } - size := characters.Utf8ValidNext(b[i:]) - if size == 0 { - return nil, nil, NewParserError(b[i:i+1], "invalid character in comment") - } - - i += size - } - - return b, b[len(b):], nil -} - -func scanBasicString(b []byte) ([]byte, bool, []byte, error) { - // basic-string = quotation-mark *basic-char quotation-mark - // quotation-mark = %x22 ; " - // basic-char = basic-unescaped / escaped - // basic-unescaped = wschar / %x21 / %x23-5B / %x5D-7E / non-ascii - // escaped = escape escape-seq-char - escaped := false - i := 1 - - for ; i < len(b); i++ { - switch b[i] { - case '"': - return b[:i+1], escaped, b[i+1:], nil - case '\n', '\r': - return nil, escaped, nil, NewParserError(b[i:i+1], "basic strings cannot have new lines") - case '\\': - if len(b) < i+2 { - return nil, escaped, nil, NewParserError(b[i:i+1], "need a character after \\") - } - escaped = true - i++ // skip the next character - } - } - - return nil, escaped, nil, NewParserError(b[len(b):], `basic string not terminated by "`) -} - -func scanMultilineBasicString(b []byte) ([]byte, bool, []byte, error) { - // ml-basic-string = ml-basic-string-delim [ newline ] ml-basic-body - // ml-basic-string-delim - // ml-basic-string-delim = 3quotation-mark - // ml-basic-body = *mlb-content *( mlb-quotes 1*mlb-content ) [ mlb-quotes ] - // - // mlb-content = mlb-char / newline / mlb-escaped-nl - // mlb-char = mlb-unescaped / escaped - // mlb-quotes = 1*2quotation-mark - // mlb-unescaped = wschar / %x21 / %x23-5B / %x5D-7E / non-ascii - // mlb-escaped-nl = escape ws newline *( wschar / newline ) - - escaped := false - i := 3 - - for ; i < len(b); i++ { - switch b[i] { - case '"': - if scanFollowsMultilineBasicStringDelimiter(b[i:]) { - i += 3 - - // At that point we found 3 apostrophe, and i is the - // index of the byte after the third one. The scanner - // needs to be eager, because there can be an extra 2 - // apostrophe that can be accepted at the end of the - // string. - - if i >= len(b) || b[i] != '"' { - return b[:i], escaped, b[i:], nil - } - i++ - - if i >= len(b) || b[i] != '"' { - return b[:i], escaped, b[i:], nil - } - i++ - - if i < len(b) && b[i] == '"' { - return nil, escaped, nil, NewParserError(b[i-3:i+1], `""" not allowed in multiline basic string`) - } - - return b[:i], escaped, b[i:], nil - } - case '\\': - if len(b) < i+2 { - return nil, escaped, nil, NewParserError(b[len(b):], "need a character after \\") - } - escaped = true - i++ // skip the next character - case '\r': - if len(b) < i+2 { - return nil, escaped, nil, NewParserError(b[len(b):], `need a \n after \r`) - } - if b[i+1] != '\n' { - return nil, escaped, nil, NewParserError(b[i:i+2], `need a \n after \r`) - } - i++ // skip the \n - } - } - - return nil, escaped, nil, NewParserError(b[len(b):], `multiline basic string not terminated by """`) -} diff --git a/vendor/github.com/pmezard/go-difflib/LICENSE b/vendor/github.com/pmezard/go-difflib/LICENSE deleted file mode 100644 index c67dad612..000000000 --- a/vendor/github.com/pmezard/go-difflib/LICENSE +++ /dev/null @@ -1,27 +0,0 @@ -Copyright (c) 2013, Patrick Mezard -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. - Redistributions in binary form must reproduce the above copyright -notice, this list of conditions and the following disclaimer in the -documentation and/or other materials provided with the distribution. - The names of its contributors may not be used to endorse or promote -products derived from this software without specific prior written -permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS -IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED -TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A -PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED -TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR -PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF -LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/pmezard/go-difflib/difflib/difflib.go b/vendor/github.com/pmezard/go-difflib/difflib/difflib.go deleted file mode 100644 index 003e99fad..000000000 --- a/vendor/github.com/pmezard/go-difflib/difflib/difflib.go +++ /dev/null @@ -1,772 +0,0 @@ -// Package difflib is a partial port of Python difflib module. -// -// It provides tools to compare sequences of strings and generate textual diffs. -// -// The following class and functions have been ported: -// -// - SequenceMatcher -// -// - unified_diff -// -// - context_diff -// -// Getting unified diffs was the main goal of the port. Keep in mind this code -// is mostly suitable to output text differences in a human friendly way, there -// are no guarantees generated diffs are consumable by patch(1). -package difflib - -import ( - "bufio" - "bytes" - "fmt" - "io" - "strings" -) - -func min(a, b int) int { - if a < b { - return a - } - return b -} - -func max(a, b int) int { - if a > b { - return a - } - return b -} - -func calculateRatio(matches, length int) float64 { - if length > 0 { - return 2.0 * float64(matches) / float64(length) - } - return 1.0 -} - -type Match struct { - A int - B int - Size int -} - -type OpCode struct { - Tag byte - I1 int - I2 int - J1 int - J2 int -} - -// SequenceMatcher compares sequence of strings. The basic -// algorithm predates, and is a little fancier than, an algorithm -// published in the late 1980's by Ratcliff and Obershelp under the -// hyperbolic name "gestalt pattern matching". The basic idea is to find -// the longest contiguous matching subsequence that contains no "junk" -// elements (R-O doesn't address junk). The same idea is then applied -// recursively to the pieces of the sequences to the left and to the right -// of the matching subsequence. This does not yield minimal edit -// sequences, but does tend to yield matches that "look right" to people. -// -// SequenceMatcher tries to compute a "human-friendly diff" between two -// sequences. Unlike e.g. UNIX(tm) diff, the fundamental notion is the -// longest *contiguous* & junk-free matching subsequence. That's what -// catches peoples' eyes. The Windows(tm) windiff has another interesting -// notion, pairing up elements that appear uniquely in each sequence. -// That, and the method here, appear to yield more intuitive difference -// reports than does diff. This method appears to be the least vulnerable -// to synching up on blocks of "junk lines", though (like blank lines in -// ordinary text files, or maybe "

" lines in HTML files). That may be -// because this is the only method of the 3 that has a *concept* of -// "junk" . -// -// Timing: Basic R-O is cubic time worst case and quadratic time expected -// case. SequenceMatcher is quadratic time for the worst case and has -// expected-case behavior dependent in a complicated way on how many -// elements the sequences have in common; best case time is linear. -type SequenceMatcher struct { - a []string - b []string - b2j map[string][]int - IsJunk func(string) bool - autoJunk bool - bJunk map[string]struct{} - matchingBlocks []Match - fullBCount map[string]int - bPopular map[string]struct{} - opCodes []OpCode -} - -func NewMatcher(a, b []string) *SequenceMatcher { - m := SequenceMatcher{autoJunk: true} - m.SetSeqs(a, b) - return &m -} - -func NewMatcherWithJunk(a, b []string, autoJunk bool, - isJunk func(string) bool) *SequenceMatcher { - - m := SequenceMatcher{IsJunk: isJunk, autoJunk: autoJunk} - m.SetSeqs(a, b) - return &m -} - -// Set two sequences to be compared. -func (m *SequenceMatcher) SetSeqs(a, b []string) { - m.SetSeq1(a) - m.SetSeq2(b) -} - -// Set the first sequence to be compared. The second sequence to be compared is -// not changed. -// -// SequenceMatcher computes and caches detailed information about the second -// sequence, so if you want to compare one sequence S against many sequences, -// use .SetSeq2(s) once and call .SetSeq1(x) repeatedly for each of the other -// sequences. -// -// See also SetSeqs() and SetSeq2(). -func (m *SequenceMatcher) SetSeq1(a []string) { - if &a == &m.a { - return - } - m.a = a - m.matchingBlocks = nil - m.opCodes = nil -} - -// Set the second sequence to be compared. The first sequence to be compared is -// not changed. -func (m *SequenceMatcher) SetSeq2(b []string) { - if &b == &m.b { - return - } - m.b = b - m.matchingBlocks = nil - m.opCodes = nil - m.fullBCount = nil - m.chainB() -} - -func (m *SequenceMatcher) chainB() { - // Populate line -> index mapping - b2j := map[string][]int{} - for i, s := range m.b { - indices := b2j[s] - indices = append(indices, i) - b2j[s] = indices - } - - // Purge junk elements - m.bJunk = map[string]struct{}{} - if m.IsJunk != nil { - junk := m.bJunk - for s, _ := range b2j { - if m.IsJunk(s) { - junk[s] = struct{}{} - } - } - for s, _ := range junk { - delete(b2j, s) - } - } - - // Purge remaining popular elements - popular := map[string]struct{}{} - n := len(m.b) - if m.autoJunk && n >= 200 { - ntest := n/100 + 1 - for s, indices := range b2j { - if len(indices) > ntest { - popular[s] = struct{}{} - } - } - for s, _ := range popular { - delete(b2j, s) - } - } - m.bPopular = popular - m.b2j = b2j -} - -func (m *SequenceMatcher) isBJunk(s string) bool { - _, ok := m.bJunk[s] - return ok -} - -// Find longest matching block in a[alo:ahi] and b[blo:bhi]. -// -// If IsJunk is not defined: -// -// Return (i,j,k) such that a[i:i+k] is equal to b[j:j+k], where -// alo <= i <= i+k <= ahi -// blo <= j <= j+k <= bhi -// and for all (i',j',k') meeting those conditions, -// k >= k' -// i <= i' -// and if i == i', j <= j' -// -// In other words, of all maximal matching blocks, return one that -// starts earliest in a, and of all those maximal matching blocks that -// start earliest in a, return the one that starts earliest in b. -// -// If IsJunk is defined, first the longest matching block is -// determined as above, but with the additional restriction that no -// junk element appears in the block. Then that block is extended as -// far as possible by matching (only) junk elements on both sides. So -// the resulting block never matches on junk except as identical junk -// happens to be adjacent to an "interesting" match. -// -// If no blocks match, return (alo, blo, 0). -func (m *SequenceMatcher) findLongestMatch(alo, ahi, blo, bhi int) Match { - // CAUTION: stripping common prefix or suffix would be incorrect. - // E.g., - // ab - // acab - // Longest matching block is "ab", but if common prefix is - // stripped, it's "a" (tied with "b"). UNIX(tm) diff does so - // strip, so ends up claiming that ab is changed to acab by - // inserting "ca" in the middle. That's minimal but unintuitive: - // "it's obvious" that someone inserted "ac" at the front. - // Windiff ends up at the same place as diff, but by pairing up - // the unique 'b's and then matching the first two 'a's. - besti, bestj, bestsize := alo, blo, 0 - - // find longest junk-free match - // during an iteration of the loop, j2len[j] = length of longest - // junk-free match ending with a[i-1] and b[j] - j2len := map[int]int{} - for i := alo; i != ahi; i++ { - // look at all instances of a[i] in b; note that because - // b2j has no junk keys, the loop is skipped if a[i] is junk - newj2len := map[int]int{} - for _, j := range m.b2j[m.a[i]] { - // a[i] matches b[j] - if j < blo { - continue - } - if j >= bhi { - break - } - k := j2len[j-1] + 1 - newj2len[j] = k - if k > bestsize { - besti, bestj, bestsize = i-k+1, j-k+1, k - } - } - j2len = newj2len - } - - // Extend the best by non-junk elements on each end. In particular, - // "popular" non-junk elements aren't in b2j, which greatly speeds - // the inner loop above, but also means "the best" match so far - // doesn't contain any junk *or* popular non-junk elements. - for besti > alo && bestj > blo && !m.isBJunk(m.b[bestj-1]) && - m.a[besti-1] == m.b[bestj-1] { - besti, bestj, bestsize = besti-1, bestj-1, bestsize+1 - } - for besti+bestsize < ahi && bestj+bestsize < bhi && - !m.isBJunk(m.b[bestj+bestsize]) && - m.a[besti+bestsize] == m.b[bestj+bestsize] { - bestsize += 1 - } - - // Now that we have a wholly interesting match (albeit possibly - // empty!), we may as well suck up the matching junk on each - // side of it too. Can't think of a good reason not to, and it - // saves post-processing the (possibly considerable) expense of - // figuring out what to do with it. In the case of an empty - // interesting match, this is clearly the right thing to do, - // because no other kind of match is possible in the regions. - for besti > alo && bestj > blo && m.isBJunk(m.b[bestj-1]) && - m.a[besti-1] == m.b[bestj-1] { - besti, bestj, bestsize = besti-1, bestj-1, bestsize+1 - } - for besti+bestsize < ahi && bestj+bestsize < bhi && - m.isBJunk(m.b[bestj+bestsize]) && - m.a[besti+bestsize] == m.b[bestj+bestsize] { - bestsize += 1 - } - - return Match{A: besti, B: bestj, Size: bestsize} -} - -// Return list of triples describing matching subsequences. -// -// Each triple is of the form (i, j, n), and means that -// a[i:i+n] == b[j:j+n]. The triples are monotonically increasing in -// i and in j. It's also guaranteed that if (i, j, n) and (i', j', n') are -// adjacent triples in the list, and the second is not the last triple in the -// list, then i+n != i' or j+n != j'. IOW, adjacent triples never describe -// adjacent equal blocks. -// -// The last triple is a dummy, (len(a), len(b), 0), and is the only -// triple with n==0. -func (m *SequenceMatcher) GetMatchingBlocks() []Match { - if m.matchingBlocks != nil { - return m.matchingBlocks - } - - var matchBlocks func(alo, ahi, blo, bhi int, matched []Match) []Match - matchBlocks = func(alo, ahi, blo, bhi int, matched []Match) []Match { - match := m.findLongestMatch(alo, ahi, blo, bhi) - i, j, k := match.A, match.B, match.Size - if match.Size > 0 { - if alo < i && blo < j { - matched = matchBlocks(alo, i, blo, j, matched) - } - matched = append(matched, match) - if i+k < ahi && j+k < bhi { - matched = matchBlocks(i+k, ahi, j+k, bhi, matched) - } - } - return matched - } - matched := matchBlocks(0, len(m.a), 0, len(m.b), nil) - - // It's possible that we have adjacent equal blocks in the - // matching_blocks list now. - nonAdjacent := []Match{} - i1, j1, k1 := 0, 0, 0 - for _, b := range matched { - // Is this block adjacent to i1, j1, k1? - i2, j2, k2 := b.A, b.B, b.Size - if i1+k1 == i2 && j1+k1 == j2 { - // Yes, so collapse them -- this just increases the length of - // the first block by the length of the second, and the first - // block so lengthened remains the block to compare against. - k1 += k2 - } else { - // Not adjacent. Remember the first block (k1==0 means it's - // the dummy we started with), and make the second block the - // new block to compare against. - if k1 > 0 { - nonAdjacent = append(nonAdjacent, Match{i1, j1, k1}) - } - i1, j1, k1 = i2, j2, k2 - } - } - if k1 > 0 { - nonAdjacent = append(nonAdjacent, Match{i1, j1, k1}) - } - - nonAdjacent = append(nonAdjacent, Match{len(m.a), len(m.b), 0}) - m.matchingBlocks = nonAdjacent - return m.matchingBlocks -} - -// Return list of 5-tuples describing how to turn a into b. -// -// Each tuple is of the form (tag, i1, i2, j1, j2). The first tuple -// has i1 == j1 == 0, and remaining tuples have i1 == the i2 from the -// tuple preceding it, and likewise for j1 == the previous j2. -// -// The tags are characters, with these meanings: -// -// 'r' (replace): a[i1:i2] should be replaced by b[j1:j2] -// -// 'd' (delete): a[i1:i2] should be deleted, j1==j2 in this case. -// -// 'i' (insert): b[j1:j2] should be inserted at a[i1:i1], i1==i2 in this case. -// -// 'e' (equal): a[i1:i2] == b[j1:j2] -func (m *SequenceMatcher) GetOpCodes() []OpCode { - if m.opCodes != nil { - return m.opCodes - } - i, j := 0, 0 - matching := m.GetMatchingBlocks() - opCodes := make([]OpCode, 0, len(matching)) - for _, m := range matching { - // invariant: we've pumped out correct diffs to change - // a[:i] into b[:j], and the next matching block is - // a[ai:ai+size] == b[bj:bj+size]. So we need to pump - // out a diff to change a[i:ai] into b[j:bj], pump out - // the matching block, and move (i,j) beyond the match - ai, bj, size := m.A, m.B, m.Size - tag := byte(0) - if i < ai && j < bj { - tag = 'r' - } else if i < ai { - tag = 'd' - } else if j < bj { - tag = 'i' - } - if tag > 0 { - opCodes = append(opCodes, OpCode{tag, i, ai, j, bj}) - } - i, j = ai+size, bj+size - // the list of matching blocks is terminated by a - // sentinel with size 0 - if size > 0 { - opCodes = append(opCodes, OpCode{'e', ai, i, bj, j}) - } - } - m.opCodes = opCodes - return m.opCodes -} - -// Isolate change clusters by eliminating ranges with no changes. -// -// Return a generator of groups with up to n lines of context. -// Each group is in the same format as returned by GetOpCodes(). -func (m *SequenceMatcher) GetGroupedOpCodes(n int) [][]OpCode { - if n < 0 { - n = 3 - } - codes := m.GetOpCodes() - if len(codes) == 0 { - codes = []OpCode{OpCode{'e', 0, 1, 0, 1}} - } - // Fixup leading and trailing groups if they show no changes. - if codes[0].Tag == 'e' { - c := codes[0] - i1, i2, j1, j2 := c.I1, c.I2, c.J1, c.J2 - codes[0] = OpCode{c.Tag, max(i1, i2-n), i2, max(j1, j2-n), j2} - } - if codes[len(codes)-1].Tag == 'e' { - c := codes[len(codes)-1] - i1, i2, j1, j2 := c.I1, c.I2, c.J1, c.J2 - codes[len(codes)-1] = OpCode{c.Tag, i1, min(i2, i1+n), j1, min(j2, j1+n)} - } - nn := n + n - groups := [][]OpCode{} - group := []OpCode{} - for _, c := range codes { - i1, i2, j1, j2 := c.I1, c.I2, c.J1, c.J2 - // End the current group and start a new one whenever - // there is a large range with no changes. - if c.Tag == 'e' && i2-i1 > nn { - group = append(group, OpCode{c.Tag, i1, min(i2, i1+n), - j1, min(j2, j1+n)}) - groups = append(groups, group) - group = []OpCode{} - i1, j1 = max(i1, i2-n), max(j1, j2-n) - } - group = append(group, OpCode{c.Tag, i1, i2, j1, j2}) - } - if len(group) > 0 && !(len(group) == 1 && group[0].Tag == 'e') { - groups = append(groups, group) - } - return groups -} - -// Return a measure of the sequences' similarity (float in [0,1]). -// -// Where T is the total number of elements in both sequences, and -// M is the number of matches, this is 2.0*M / T. -// Note that this is 1 if the sequences are identical, and 0 if -// they have nothing in common. -// -// .Ratio() is expensive to compute if you haven't already computed -// .GetMatchingBlocks() or .GetOpCodes(), in which case you may -// want to try .QuickRatio() or .RealQuickRation() first to get an -// upper bound. -func (m *SequenceMatcher) Ratio() float64 { - matches := 0 - for _, m := range m.GetMatchingBlocks() { - matches += m.Size - } - return calculateRatio(matches, len(m.a)+len(m.b)) -} - -// Return an upper bound on ratio() relatively quickly. -// -// This isn't defined beyond that it is an upper bound on .Ratio(), and -// is faster to compute. -func (m *SequenceMatcher) QuickRatio() float64 { - // viewing a and b as multisets, set matches to the cardinality - // of their intersection; this counts the number of matches - // without regard to order, so is clearly an upper bound - if m.fullBCount == nil { - m.fullBCount = map[string]int{} - for _, s := range m.b { - m.fullBCount[s] = m.fullBCount[s] + 1 - } - } - - // avail[x] is the number of times x appears in 'b' less the - // number of times we've seen it in 'a' so far ... kinda - avail := map[string]int{} - matches := 0 - for _, s := range m.a { - n, ok := avail[s] - if !ok { - n = m.fullBCount[s] - } - avail[s] = n - 1 - if n > 0 { - matches += 1 - } - } - return calculateRatio(matches, len(m.a)+len(m.b)) -} - -// Return an upper bound on ratio() very quickly. -// -// This isn't defined beyond that it is an upper bound on .Ratio(), and -// is faster to compute than either .Ratio() or .QuickRatio(). -func (m *SequenceMatcher) RealQuickRatio() float64 { - la, lb := len(m.a), len(m.b) - return calculateRatio(min(la, lb), la+lb) -} - -// Convert range to the "ed" format -func formatRangeUnified(start, stop int) string { - // Per the diff spec at http://www.unix.org/single_unix_specification/ - beginning := start + 1 // lines start numbering with one - length := stop - start - if length == 1 { - return fmt.Sprintf("%d", beginning) - } - if length == 0 { - beginning -= 1 // empty ranges begin at line just before the range - } - return fmt.Sprintf("%d,%d", beginning, length) -} - -// Unified diff parameters -type UnifiedDiff struct { - A []string // First sequence lines - FromFile string // First file name - FromDate string // First file time - B []string // Second sequence lines - ToFile string // Second file name - ToDate string // Second file time - Eol string // Headers end of line, defaults to LF - Context int // Number of context lines -} - -// Compare two sequences of lines; generate the delta as a unified diff. -// -// Unified diffs are a compact way of showing line changes and a few -// lines of context. The number of context lines is set by 'n' which -// defaults to three. -// -// By default, the diff control lines (those with ---, +++, or @@) are -// created with a trailing newline. This is helpful so that inputs -// created from file.readlines() result in diffs that are suitable for -// file.writelines() since both the inputs and outputs have trailing -// newlines. -// -// For inputs that do not have trailing newlines, set the lineterm -// argument to "" so that the output will be uniformly newline free. -// -// The unidiff format normally has a header for filenames and modification -// times. Any or all of these may be specified using strings for -// 'fromfile', 'tofile', 'fromfiledate', and 'tofiledate'. -// The modification times are normally expressed in the ISO 8601 format. -func WriteUnifiedDiff(writer io.Writer, diff UnifiedDiff) error { - buf := bufio.NewWriter(writer) - defer buf.Flush() - wf := func(format string, args ...interface{}) error { - _, err := buf.WriteString(fmt.Sprintf(format, args...)) - return err - } - ws := func(s string) error { - _, err := buf.WriteString(s) - return err - } - - if len(diff.Eol) == 0 { - diff.Eol = "\n" - } - - started := false - m := NewMatcher(diff.A, diff.B) - for _, g := range m.GetGroupedOpCodes(diff.Context) { - if !started { - started = true - fromDate := "" - if len(diff.FromDate) > 0 { - fromDate = "\t" + diff.FromDate - } - toDate := "" - if len(diff.ToDate) > 0 { - toDate = "\t" + diff.ToDate - } - if diff.FromFile != "" || diff.ToFile != "" { - err := wf("--- %s%s%s", diff.FromFile, fromDate, diff.Eol) - if err != nil { - return err - } - err = wf("+++ %s%s%s", diff.ToFile, toDate, diff.Eol) - if err != nil { - return err - } - } - } - first, last := g[0], g[len(g)-1] - range1 := formatRangeUnified(first.I1, last.I2) - range2 := formatRangeUnified(first.J1, last.J2) - if err := wf("@@ -%s +%s @@%s", range1, range2, diff.Eol); err != nil { - return err - } - for _, c := range g { - i1, i2, j1, j2 := c.I1, c.I2, c.J1, c.J2 - if c.Tag == 'e' { - for _, line := range diff.A[i1:i2] { - if err := ws(" " + line); err != nil { - return err - } - } - continue - } - if c.Tag == 'r' || c.Tag == 'd' { - for _, line := range diff.A[i1:i2] { - if err := ws("-" + line); err != nil { - return err - } - } - } - if c.Tag == 'r' || c.Tag == 'i' { - for _, line := range diff.B[j1:j2] { - if err := ws("+" + line); err != nil { - return err - } - } - } - } - } - return nil -} - -// Like WriteUnifiedDiff but returns the diff a string. -func GetUnifiedDiffString(diff UnifiedDiff) (string, error) { - w := &bytes.Buffer{} - err := WriteUnifiedDiff(w, diff) - return string(w.Bytes()), err -} - -// Convert range to the "ed" format. -func formatRangeContext(start, stop int) string { - // Per the diff spec at http://www.unix.org/single_unix_specification/ - beginning := start + 1 // lines start numbering with one - length := stop - start - if length == 0 { - beginning -= 1 // empty ranges begin at line just before the range - } - if length <= 1 { - return fmt.Sprintf("%d", beginning) - } - return fmt.Sprintf("%d,%d", beginning, beginning+length-1) -} - -type ContextDiff UnifiedDiff - -// Compare two sequences of lines; generate the delta as a context diff. -// -// Context diffs are a compact way of showing line changes and a few -// lines of context. The number of context lines is set by diff.Context -// which defaults to three. -// -// By default, the diff control lines (those with *** or ---) are -// created with a trailing newline. -// -// For inputs that do not have trailing newlines, set the diff.Eol -// argument to "" so that the output will be uniformly newline free. -// -// The context diff format normally has a header for filenames and -// modification times. Any or all of these may be specified using -// strings for diff.FromFile, diff.ToFile, diff.FromDate, diff.ToDate. -// The modification times are normally expressed in the ISO 8601 format. -// If not specified, the strings default to blanks. -func WriteContextDiff(writer io.Writer, diff ContextDiff) error { - buf := bufio.NewWriter(writer) - defer buf.Flush() - var diffErr error - wf := func(format string, args ...interface{}) { - _, err := buf.WriteString(fmt.Sprintf(format, args...)) - if diffErr == nil && err != nil { - diffErr = err - } - } - ws := func(s string) { - _, err := buf.WriteString(s) - if diffErr == nil && err != nil { - diffErr = err - } - } - - if len(diff.Eol) == 0 { - diff.Eol = "\n" - } - - prefix := map[byte]string{ - 'i': "+ ", - 'd': "- ", - 'r': "! ", - 'e': " ", - } - - started := false - m := NewMatcher(diff.A, diff.B) - for _, g := range m.GetGroupedOpCodes(diff.Context) { - if !started { - started = true - fromDate := "" - if len(diff.FromDate) > 0 { - fromDate = "\t" + diff.FromDate - } - toDate := "" - if len(diff.ToDate) > 0 { - toDate = "\t" + diff.ToDate - } - if diff.FromFile != "" || diff.ToFile != "" { - wf("*** %s%s%s", diff.FromFile, fromDate, diff.Eol) - wf("--- %s%s%s", diff.ToFile, toDate, diff.Eol) - } - } - - first, last := g[0], g[len(g)-1] - ws("***************" + diff.Eol) - - range1 := formatRangeContext(first.I1, last.I2) - wf("*** %s ****%s", range1, diff.Eol) - for _, c := range g { - if c.Tag == 'r' || c.Tag == 'd' { - for _, cc := range g { - if cc.Tag == 'i' { - continue - } - for _, line := range diff.A[cc.I1:cc.I2] { - ws(prefix[cc.Tag] + line) - } - } - break - } - } - - range2 := formatRangeContext(first.J1, last.J2) - wf("--- %s ----%s", range2, diff.Eol) - for _, c := range g { - if c.Tag == 'r' || c.Tag == 'i' { - for _, cc := range g { - if cc.Tag == 'd' { - continue - } - for _, line := range diff.B[cc.J1:cc.J2] { - ws(prefix[cc.Tag] + line) - } - } - break - } - } - } - return diffErr -} - -// Like WriteContextDiff but returns the diff a string. -func GetContextDiffString(diff ContextDiff) (string, error) { - w := &bytes.Buffer{} - err := WriteContextDiff(w, diff) - return string(w.Bytes()), err -} - -// Split a string on "\n" while preserving them. The output can be used -// as input for UnifiedDiff and ContextDiff structures. -func SplitLines(s string) []string { - lines := strings.SplitAfter(s, "\n") - lines[len(lines)-1] += "\n" - return lines -} diff --git a/vendor/github.com/polyfloyd/go-errorlint/LICENSE b/vendor/github.com/polyfloyd/go-errorlint/LICENSE deleted file mode 100644 index b7f88cf1c..000000000 --- a/vendor/github.com/polyfloyd/go-errorlint/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2019 polyfloyd - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/polyfloyd/go-errorlint/errorlint/allowed.go b/vendor/github.com/polyfloyd/go-errorlint/errorlint/allowed.go deleted file mode 100644 index 8bfb4c9b2..000000000 --- a/vendor/github.com/polyfloyd/go-errorlint/errorlint/allowed.go +++ /dev/null @@ -1,222 +0,0 @@ -package errorlint - -import ( - "fmt" - "go/ast" - "go/types" - "strings" -) - -var allowedErrors = []struct { - err string - fun string -}{ - // pkg/archive/tar - {err: "io.EOF", fun: "(*archive/tar.Reader).Next"}, - {err: "io.EOF", fun: "(*archive/tar.Reader).Read"}, - // pkg/bufio - {err: "io.EOF", fun: "(*bufio.Reader).Discard"}, - {err: "io.EOF", fun: "(*bufio.Reader).Peek"}, - {err: "io.EOF", fun: "(*bufio.Reader).Read"}, - {err: "io.EOF", fun: "(*bufio.Reader).ReadByte"}, - {err: "io.EOF", fun: "(*bufio.Reader).ReadBytes"}, - {err: "io.EOF", fun: "(*bufio.Reader).ReadLine"}, - {err: "io.EOF", fun: "(*bufio.Reader).ReadSlice"}, - {err: "io.EOF", fun: "(*bufio.Reader).ReadString"}, - {err: "io.EOF", fun: "(*bufio.Scanner).Scan"}, - // pkg/bytes - {err: "io.EOF", fun: "(*bytes.Buffer).Read"}, - {err: "io.EOF", fun: "(*bytes.Buffer).ReadByte"}, - {err: "io.EOF", fun: "(*bytes.Buffer).ReadBytes"}, - {err: "io.EOF", fun: "(*bytes.Buffer).ReadRune"}, - {err: "io.EOF", fun: "(*bytes.Buffer).ReadString"}, - {err: "io.EOF", fun: "(*bytes.Reader).Read"}, - {err: "io.EOF", fun: "(*bytes.Reader).ReadAt"}, - {err: "io.EOF", fun: "(*bytes.Reader).ReadByte"}, - {err: "io.EOF", fun: "(*bytes.Reader).ReadRune"}, - {err: "io.EOF", fun: "(*bytes.Reader).ReadString"}, - // pkg/database/sql - {err: "database/sql.ErrNoRows", fun: "(*database/sql.Row).Scan"}, - // pkg/debug/elf - {err: "io.EOF", fun: "debug/elf.Open"}, - {err: "io.EOF", fun: "debug/elf.NewFile"}, - // pkg/io - {err: "io.EOF", fun: "(io.ReadCloser).Read"}, - {err: "io.EOF", fun: "(io.Reader).Read"}, - {err: "io.EOF", fun: "(io.ReaderAt).ReadAt"}, - {err: "io.EOF", fun: "(*io.LimitedReader).Read"}, - {err: "io.EOF", fun: "(*io.SectionReader).Read"}, - {err: "io.EOF", fun: "(*io.SectionReader).ReadAt"}, - {err: "io.ErrClosedPipe", fun: "(*io.PipeWriter).Write"}, - {err: "io.ErrShortBuffer", fun: "io.ReadAtLeast"}, - {err: "io.ErrUnexpectedEOF", fun: "io.ReadAtLeast"}, - {err: "io.EOF", fun: "io.ReadFull"}, - {err: "io.ErrUnexpectedEOF", fun: "io.ReadFull"}, - // pkg/net/http - {err: "net/http.ErrServerClosed", fun: "(*net/http.Server).ListenAndServe"}, - {err: "net/http.ErrServerClosed", fun: "(*net/http.Server).ListenAndServeTLS"}, - {err: "net/http.ErrServerClosed", fun: "(*net/http.Server).Serve"}, - {err: "net/http.ErrServerClosed", fun: "(*net/http.Server).ServeTLS"}, - {err: "net/http.ErrServerClosed", fun: "net/http.ListenAndServe"}, - {err: "net/http.ErrServerClosed", fun: "net/http.ListenAndServeTLS"}, - {err: "net/http.ErrServerClosed", fun: "net/http.Serve"}, - {err: "net/http.ErrServerClosed", fun: "net/http.ServeTLS"}, - // pkg/os - {err: "io.EOF", fun: "(*os.File).Read"}, - {err: "io.EOF", fun: "(*os.File).ReadAt"}, - {err: "io.EOF", fun: "(*os.File).ReadDir"}, - {err: "io.EOF", fun: "(*os.File).Readdir"}, - {err: "io.EOF", fun: "(*os.File).Readdirnames"}, - // pkg/strings - {err: "io.EOF", fun: "(*strings.Reader).Read"}, - {err: "io.EOF", fun: "(*strings.Reader).ReadAt"}, - {err: "io.EOF", fun: "(*strings.Reader).ReadByte"}, - {err: "io.EOF", fun: "(*strings.Reader).ReadRune"}, - // pkg/context - {err: "context.DeadlineExceeded", fun: "(context.Context).Err"}, - {err: "context.Canceled", fun: "(context.Context).Err"}, -} - -var allowedErrorWildcards = []struct { - err string - fun string -}{ - // golang.org/x/sys/unix - {err: "golang.org/x/sys/unix.E", fun: "golang.org/x/sys/unix."}, -} - -func isAllowedErrAndFunc(err, fun string) bool { - for _, allow := range allowedErrorWildcards { - if strings.HasPrefix(fun, allow.fun) && strings.HasPrefix(err, allow.err) { - return true - } - } - - for _, allow := range allowedErrors { - if allow.fun == fun && allow.err == err { - return true - } - } - return false -} - -func isAllowedErrorComparison(pass *TypesInfoExt, binExpr *ast.BinaryExpr) bool { - var errName string // `.`, e.g. `io.EOF` - var callExprs []*ast.CallExpr - - // Figure out which half of the expression is the returned error and which - // half is the presumed error declaration. - for _, expr := range []ast.Expr{binExpr.X, binExpr.Y} { - switch t := expr.(type) { - case *ast.SelectorExpr: - // A selector which we assume refers to a staticaly declared error - // in a package. - errName = selectorToString(pass, t) - case *ast.Ident: - // Identifier, most likely to be the `err` variable or whatever - // produces it. - callExprs = assigningCallExprs(pass, t, map[types.Object]bool{}) - case *ast.CallExpr: - callExprs = append(callExprs, t) - } - } - - // Unimplemented or not sure, disallow the expression. - if errName == "" || len(callExprs) == 0 { - return false - } - - // Map call expressions to the function name format of the allow list. - functionNames := make([]string, len(callExprs)) - for i, callExpr := range callExprs { - functionSelector, ok := callExpr.Fun.(*ast.SelectorExpr) - if !ok { - // If the function is not a selector it is not an Std function that is - // allowed. - return false - } - if sel, ok := pass.TypesInfo.Selections[functionSelector]; ok { - functionNames[i] = fmt.Sprintf("(%s).%s", sel.Recv(), sel.Obj().Name()) - } else { - // If there is no selection, assume it is a package. - functionNames[i] = selectorToString(pass, callExpr.Fun.(*ast.SelectorExpr)) - } - } - - // All assignments done must be allowed. - for _, funcName := range functionNames { - if !isAllowedErrAndFunc(errName, funcName) { - return false - } - } - return true -} - -// assigningCallExprs finds all *ast.CallExpr nodes that are part of an -// *ast.AssignStmt that assign to the subject identifier. -func assigningCallExprs(pass *TypesInfoExt, subject *ast.Ident, visitedObjects map[types.Object]bool) []*ast.CallExpr { - if subject.Obj == nil { - return nil - } - - // Find other identifiers that reference this same object. - sobj := pass.TypesInfo.ObjectOf(subject) - - if visitedObjects[sobj] { - return nil - } - visitedObjects[sobj] = true - - // Make sure to exclude the subject identifier as it will cause an infinite recursion and is - // being used in a read operation anyway. - identifiers := []*ast.Ident{} - for _, ident := range pass.IdentifiersForObject[sobj] { - if subject.Pos() != ident.Pos() { - identifiers = append(identifiers, ident) - } - } - - // Find out whether the identifiers are part of an assignment statement. - var callExprs []*ast.CallExpr - for _, ident := range identifiers { - parent := pass.NodeParent[ident] - switch declT := parent.(type) { - case *ast.AssignStmt: - // The identifier is LHS of an assignment. - assignment := declT - - assigningExpr := assignment.Rhs[0] - // If the assignment is comprised of multiple expressions, find out - // which RHS expression we should use by finding its index in the LHS. - if len(assignment.Lhs) == len(assignment.Rhs) { - for i, lhs := range assignment.Lhs { - if ident, ok := lhs.(*ast.Ident); ok && subject.Name == ident.Name { - assigningExpr = assignment.Rhs[i] - break - } - } - } - - switch assignT := assigningExpr.(type) { - case *ast.CallExpr: - // Found the function call. - callExprs = append(callExprs, assignT) - case *ast.Ident: - // Skip assignments here the RHS points to the same object as the subject. - if assignT.Obj == subject.Obj { - continue - } - // The subject was the result of assigning from another identifier. - callExprs = append(callExprs, assigningCallExprs(pass, assignT, visitedObjects)...) - default: - // TODO: inconclusive? - } - } - } - return callExprs -} - -func selectorToString(pass *TypesInfoExt, selExpr *ast.SelectorExpr) string { - o := pass.TypesInfo.Uses[selExpr.Sel] - return fmt.Sprintf("%s.%s", o.Pkg().Path(), o.Name()) -} diff --git a/vendor/github.com/polyfloyd/go-errorlint/errorlint/analysis.go b/vendor/github.com/polyfloyd/go-errorlint/errorlint/analysis.go deleted file mode 100644 index f034913ea..000000000 --- a/vendor/github.com/polyfloyd/go-errorlint/errorlint/analysis.go +++ /dev/null @@ -1,113 +0,0 @@ -package errorlint - -import ( - "flag" - "go/ast" - "go/types" - "sort" - - "golang.org/x/tools/go/analysis" -) - -func NewAnalyzer() *analysis.Analyzer { - return &analysis.Analyzer{ - Name: "errorlint", - Doc: "Source code linter for Go software that can be used to find code that will cause problems with the error wrapping scheme introduced in Go 1.13.", - Run: run, - Flags: flagSet, - } -} - -var ( - flagSet flag.FlagSet - checkComparison bool - checkAsserts bool - checkErrorf bool - checkErrorfMulti bool -) - -func init() { - flagSet.BoolVar(&checkComparison, "comparison", true, "Check for plain error comparisons") - flagSet.BoolVar(&checkAsserts, "asserts", true, "Check for plain type assertions and type switches") - flagSet.BoolVar(&checkErrorf, "errorf", false, "Check whether fmt.Errorf uses the %w verb for formatting errors. See the readme for caveats") - flagSet.BoolVar(&checkErrorfMulti, "errorf-multi", true, "Permit more than 1 %w verb, valid per Go 1.20 (Requires -errorf=true)") -} - -func run(pass *analysis.Pass) (interface{}, error) { - lints := []analysis.Diagnostic{} - extInfo := newTypesInfoExt(pass) - if checkComparison { - l := LintErrorComparisons(extInfo) - lints = append(lints, l...) - } - if checkAsserts { - l := LintErrorTypeAssertions(pass.Fset, extInfo) - lints = append(lints, l...) - } - if checkErrorf { - l := LintFmtErrorfCalls(pass.Fset, *pass.TypesInfo, checkErrorfMulti) - lints = append(lints, l...) - } - sort.Sort(ByPosition(lints)) - - for _, l := range lints { - pass.Report(l) - } - return nil, nil -} - -type TypesInfoExt struct { - *analysis.Pass - - // Maps AST nodes back to the node they are contained within. - NodeParent map[ast.Node]ast.Node - - // Maps an object back to all identifiers to refer to it. - IdentifiersForObject map[types.Object][]*ast.Ident -} - -func newTypesInfoExt(pass *analysis.Pass) *TypesInfoExt { - nodeParent := map[ast.Node]ast.Node{} - for node := range pass.TypesInfo.Scopes { - file, ok := node.(*ast.File) - if !ok { - continue - } - stack := []ast.Node{file} - ast.Inspect(file, func(n ast.Node) bool { - nodeParent[n] = stack[len(stack)-1] - if n == nil { - stack = stack[:len(stack)-1] - } else { - stack = append(stack, n) - } - return true - }) - } - - identifiersForObject := map[types.Object][]*ast.Ident{} - for node, obj := range pass.TypesInfo.Defs { - identifiersForObject[obj] = append(identifiersForObject[obj], node) - } - for node, obj := range pass.TypesInfo.Uses { - identifiersForObject[obj] = append(identifiersForObject[obj], node) - } - - return &TypesInfoExt{ - Pass: pass, - NodeParent: nodeParent, - IdentifiersForObject: identifiersForObject, - } -} - -func (info *TypesInfoExt) ContainingFuncDecl(node ast.Node) *ast.FuncDecl { - for parent := info.NodeParent[node]; ; parent = info.NodeParent[parent] { - if _, ok := parent.(*ast.File); ok { - break - } - if fun, ok := parent.(*ast.FuncDecl); ok { - return fun - } - } - return nil -} diff --git a/vendor/github.com/polyfloyd/go-errorlint/errorlint/lint.go b/vendor/github.com/polyfloyd/go-errorlint/errorlint/lint.go deleted file mode 100644 index 572a3816d..000000000 --- a/vendor/github.com/polyfloyd/go-errorlint/errorlint/lint.go +++ /dev/null @@ -1,375 +0,0 @@ -package errorlint - -import ( - "fmt" - "go/ast" - "go/constant" - "go/token" - "go/types" - - "golang.org/x/tools/go/analysis" -) - -type ByPosition []analysis.Diagnostic - -func (l ByPosition) Len() int { return len(l) } -func (l ByPosition) Swap(i, j int) { l[i], l[j] = l[j], l[i] } - -func (l ByPosition) Less(i, j int) bool { - return l[i].Pos < l[j].Pos -} - -func LintFmtErrorfCalls(fset *token.FileSet, info types.Info, multipleWraps bool) []analysis.Diagnostic { - lints := []analysis.Diagnostic{} - for expr, t := range info.Types { - // Search for error expressions that are the result of fmt.Errorf - // invocations. - if t.Type.String() != "error" { - continue - } - call, ok := isFmtErrorfCallExpr(info, expr) - if !ok { - continue - } - - // Find all % fields in the format string. - formatVerbs, ok := printfFormatStringVerbs(info, call) - if !ok { - continue - } - - // For any arguments that are errors, check whether the wrapping verb is used. %w may occur - // for multiple errors in one Errorf invocation, unless multipleWraps is true. We raise an - // issue if at least one error does not have a corresponding wrapping verb. - args := call.Args[1:] - if !multipleWraps { - wrapCount := 0 - for i := 0; i < len(args) && i < len(formatVerbs); i++ { - arg := args[i] - if !implementsError(info.Types[arg].Type) { - continue - } - verb := formatVerbs[i] - - if verb.format == "w" { - wrapCount++ - if wrapCount > 1 { - lints = append(lints, analysis.Diagnostic{ - Message: "only one %w verb is permitted per format string", - Pos: arg.Pos(), - }) - break - } - } - - if wrapCount == 0 { - lints = append(lints, analysis.Diagnostic{ - Message: "non-wrapping format verb for fmt.Errorf. Use `%w` to format errors", - Pos: args[i].Pos(), - }) - break - } - } - - } else { - var lint *analysis.Diagnostic - argIndex := 0 - for _, verb := range formatVerbs { - if verb.index != -1 { - argIndex = verb.index - } else { - argIndex++ - } - - if verb.format == "w" { - continue - } - if argIndex-1 >= len(args) { - continue - } - arg := args[argIndex-1] - if !implementsError(info.Types[arg].Type) { - continue - } - - strStart := call.Args[0].Pos() - if lint == nil { - lint = &analysis.Diagnostic{ - Message: "non-wrapping format verb for fmt.Errorf. Use `%w` to format errors", - Pos: arg.Pos(), - } - } - lint.SuggestedFixes = append(lint.SuggestedFixes, analysis.SuggestedFix{ - Message: "Use `%w` to format errors", - TextEdits: []analysis.TextEdit{{ - Pos: strStart + token.Pos(verb.formatOffset) + 1, - End: strStart + token.Pos(verb.formatOffset) + 2, - NewText: []byte("w"), - }}, - }) - } - if lint != nil { - lints = append(lints, *lint) - } - } - } - return lints -} - -// printfFormatStringVerbs returns a normalized list of all the verbs that are used per argument to -// the printf function. The index of each returned element corresponds to the index of the -// respective argument. -func printfFormatStringVerbs(info types.Info, call *ast.CallExpr) ([]verb, bool) { - if len(call.Args) <= 1 { - return nil, false - } - strLit, ok := call.Args[0].(*ast.BasicLit) - if !ok { - // Ignore format strings that are not literals. - return nil, false - } - formatString := constant.StringVal(info.Types[strLit].Value) - - pp := printfParser{str: formatString} - verbs, err := pp.ParseAllVerbs() - if err != nil { - return nil, false - } - - return verbs, true -} - -func isFmtErrorfCallExpr(info types.Info, expr ast.Expr) (*ast.CallExpr, bool) { - call, ok := expr.(*ast.CallExpr) - if !ok { - return nil, false - } - fn, ok := call.Fun.(*ast.SelectorExpr) - if !ok { - // TODO: Support fmt.Errorf variable aliases? - return nil, false - } - obj := info.Uses[fn.Sel] - - pkg := obj.Pkg() - if pkg != nil && pkg.Name() == "fmt" && obj.Name() == "Errorf" { - return call, true - } - return nil, false -} - -func LintErrorComparisons(info *TypesInfoExt) []analysis.Diagnostic { - lints := []analysis.Diagnostic{} - - for expr := range info.TypesInfo.Types { - // Find == and != operations. - binExpr, ok := expr.(*ast.BinaryExpr) - if !ok { - continue - } - if binExpr.Op != token.EQL && binExpr.Op != token.NEQ { - continue - } - // Comparing errors with nil is okay. - if isNilComparison(binExpr) { - continue - } - // Find comparisons of which one side is a of type error. - if !isErrorComparison(info.TypesInfo, binExpr) { - continue - } - // Some errors that are returned from some functions are exempt. - if isAllowedErrorComparison(info, binExpr) { - continue - } - // Comparisons that happen in `func (type) Is(error) bool` are okay. - if isNodeInErrorIsFunc(info, binExpr) { - continue - } - - lints = append(lints, analysis.Diagnostic{ - Message: fmt.Sprintf("comparing with %s will fail on wrapped errors. Use errors.Is to check for a specific error", binExpr.Op), - Pos: binExpr.Pos(), - }) - } - - for scope := range info.TypesInfo.Scopes { - // Find value switch blocks. - switchStmt, ok := scope.(*ast.SwitchStmt) - if !ok { - continue - } - // Check whether the switch operates on an error type. - if switchStmt.Tag == nil { - continue - } - tagType := info.TypesInfo.Types[switchStmt.Tag] - if tagType.Type.String() != "error" { - continue - } - if isNodeInErrorIsFunc(info, switchStmt) { - continue - } - - if switchComparesNonNil(switchStmt) { - lints = append(lints, analysis.Diagnostic{ - Message: "switch on an error will fail on wrapped errors. Use errors.Is to check for specific errors", - Pos: switchStmt.Pos(), - }) - } - - } - - return lints -} - -func isNilComparison(binExpr *ast.BinaryExpr) bool { - if ident, ok := binExpr.X.(*ast.Ident); ok && ident.Name == "nil" { - return true - } - if ident, ok := binExpr.Y.(*ast.Ident); ok && ident.Name == "nil" { - return true - } - return false -} - -func isErrorComparison(info *types.Info, binExpr *ast.BinaryExpr) bool { - tx := info.Types[binExpr.X] - ty := info.Types[binExpr.Y] - return tx.Type.String() == "error" || ty.Type.String() == "error" -} - -func isNodeInErrorIsFunc(info *TypesInfoExt, node ast.Node) bool { - funcDecl := info.ContainingFuncDecl(node) - if funcDecl == nil { - return false - } - - if funcDecl.Name.Name != "Is" { - return false - } - if funcDecl.Recv == nil { - return false - } - // There should be 1 argument of type error. - if ii := funcDecl.Type.Params.List; len(ii) != 1 || info.TypesInfo.Types[ii[0].Type].Type.String() != "error" { - return false - } - // The return type should be bool. - if ii := funcDecl.Type.Results.List; len(ii) != 1 || info.TypesInfo.Types[ii[0].Type].Type.String() != "bool" { - return false - } - - return true -} - -// switchComparesNonNil returns true if one of its clauses compares by value. -func switchComparesNonNil(switchStmt *ast.SwitchStmt) bool { - for _, caseBlock := range switchStmt.Body.List { - caseClause, ok := caseBlock.(*ast.CaseClause) - if !ok { - continue - } - for _, clause := range caseClause.List { - switch clause := clause.(type) { - case nil: - // default label is safe - continue - case *ast.Ident: - // `case nil` is safe - if clause.Name == "nil" { - continue - } - } - // anything else (including an Ident other than nil) isn't safe - return true - } - } - return false -} - -func LintErrorTypeAssertions(fset *token.FileSet, info *TypesInfoExt) []analysis.Diagnostic { - lints := []analysis.Diagnostic{} - - for expr := range info.TypesInfo.Types { - // Find type assertions. - typeAssert, ok := expr.(*ast.TypeAssertExpr) - if !ok { - continue - } - - // Find type assertions that operate on values of type error. - if !isErrorTypeAssertion(*info.TypesInfo, typeAssert) { - continue - } - - if isNodeInErrorIsFunc(info, typeAssert) { - continue - } - - // If the asserted type is not an error, allow the expression. - if !implementsError(info.TypesInfo.Types[typeAssert.Type].Type) { - continue - } - - lints = append(lints, analysis.Diagnostic{ - Message: "type assertion on error will fail on wrapped errors. Use errors.As to check for specific errors", - Pos: typeAssert.Pos(), - }) - } - - for scope := range info.TypesInfo.Scopes { - // Find type switches. - typeSwitch, ok := scope.(*ast.TypeSwitchStmt) - if !ok { - continue - } - - // Find the type assertion in the type switch. - var typeAssert *ast.TypeAssertExpr - switch t := typeSwitch.Assign.(type) { - case *ast.ExprStmt: - typeAssert = t.X.(*ast.TypeAssertExpr) - case *ast.AssignStmt: - typeAssert = t.Rhs[0].(*ast.TypeAssertExpr) - } - - // Check whether the type switch is on a value of type error. - if !isErrorTypeAssertion(*info.TypesInfo, typeAssert) { - continue - } - - if isNodeInErrorIsFunc(info, typeSwitch) { - continue - } - - lints = append(lints, analysis.Diagnostic{ - Message: "type switch on error will fail on wrapped errors. Use errors.As to check for specific errors", - Pos: typeAssert.Pos(), - }) - } - - return lints -} - -func isErrorTypeAssertion(info types.Info, typeAssert *ast.TypeAssertExpr) bool { - t := info.Types[typeAssert.X] - return t.Type.String() == "error" -} - -func implementsError(t types.Type) bool { - mset := types.NewMethodSet(t) - - for i := 0; i < mset.Len(); i++ { - if mset.At(i).Kind() != types.MethodVal { - continue - } - - obj := mset.At(i).Obj() - if obj.Name() == "Error" && obj.Type().String() == "func() string" { - return true - } - } - - return false -} diff --git a/vendor/github.com/polyfloyd/go-errorlint/errorlint/printf.go b/vendor/github.com/polyfloyd/go-errorlint/errorlint/printf.go deleted file mode 100644 index 4c0e12525..000000000 --- a/vendor/github.com/polyfloyd/go-errorlint/errorlint/printf.go +++ /dev/null @@ -1,122 +0,0 @@ -package errorlint - -import ( - "fmt" - "io" - "strconv" - "strings" -) - -type verb struct { - format string - formatOffset int - index int -} - -type printfParser struct { - str string - at int -} - -func (pp *printfParser) ParseAllVerbs() ([]verb, error) { - verbs := []verb{} - for { - verb, err := pp.parseVerb() - if err == io.EOF { - break - } else if err != nil { - return nil, err - } - verbs = append(verbs, *verb) - } - return verbs, nil -} - -func (pp *printfParser) parseVerb() (*verb, error) { - if err := pp.skipToPercent(); err != nil { - return nil, err - } - if pp.next() != '%' { - return nil, fmt.Errorf("expected '%%'") - } - - index := -1 - for { - switch pp.peek() { - case '%': - pp.next() - return pp.parseVerb() - case '+', '#': - pp.next() - continue - case '[': - var err error - index, err = pp.parseIndex() - if err != nil { - return nil, err - } - case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '.': - pp.parsePrecision() - case 0: - return nil, io.EOF - } - break - } - - format := pp.next() - - return &verb{format: string(format), formatOffset: pp.at - 1, index: index}, nil -} - -func (pp *printfParser) parseIndex() (int, error) { - if pp.next() != '[' { - return -1, fmt.Errorf("expected '['") - } - end := strings.Index(pp.str, "]") - if end == -1 { - return -1, fmt.Errorf("unterminated indexed verb") - } - index, err := strconv.Atoi(pp.str[:end]) - if err != nil { - return -1, err - } - pp.str = pp.str[end+1:] - pp.at += end + 1 - return index, nil -} - -func (pp *printfParser) parsePrecision() { - for { - if r := pp.peek(); (r < '0' || '9' < r) && r != '.' { - break - } - pp.next() - } -} - -func (pp *printfParser) skipToPercent() error { - i := strings.Index(pp.str, "%") - if i == -1 { - return io.EOF - } - pp.str = pp.str[i:] - pp.at += i - return nil -} - -func (pp *printfParser) peek() rune { - if len(pp.str) == 0 { - return 0 - } - return rune(pp.str[0]) -} - -func (pp *printfParser) next() rune { - if len(pp.str) == 0 { - return 0 - } - r := rune(pp.str[0]) - pp.str = pp.str[1:] - pp.at++ - return r -} diff --git a/vendor/github.com/prometheus/client_golang/LICENSE b/vendor/github.com/prometheus/client_golang/LICENSE deleted file mode 100644 index 261eeb9e9..000000000 --- a/vendor/github.com/prometheus/client_golang/LICENSE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/vendor/github.com/prometheus/client_golang/NOTICE b/vendor/github.com/prometheus/client_golang/NOTICE deleted file mode 100644 index dd878a30e..000000000 --- a/vendor/github.com/prometheus/client_golang/NOTICE +++ /dev/null @@ -1,23 +0,0 @@ -Prometheus instrumentation library for Go applications -Copyright 2012-2015 The Prometheus Authors - -This product includes software developed at -SoundCloud Ltd. (http://soundcloud.com/). - - -The following components are included in this product: - -perks - a fork of https://github.com/bmizerany/perks -https://github.com/beorn7/perks -Copyright 2013-2015 Blake Mizerany, Björn Rabenstein -See https://github.com/beorn7/perks/blob/master/README.md for license details. - -Go support for Protocol Buffers - Google's data interchange format -http://github.com/golang/protobuf/ -Copyright 2010 The Go Authors -See source code for license details. - -Support for streaming Protocol Buffer messages for the Go language (golang). -https://github.com/matttproud/golang_protobuf_extensions -Copyright 2013 Matt T. Proud -Licensed under the Apache License, Version 2.0 diff --git a/vendor/github.com/prometheus/client_golang/prometheus/.gitignore b/vendor/github.com/prometheus/client_golang/prometheus/.gitignore deleted file mode 100644 index 3460f0346..000000000 --- a/vendor/github.com/prometheus/client_golang/prometheus/.gitignore +++ /dev/null @@ -1 +0,0 @@ -command-line-arguments.test diff --git a/vendor/github.com/prometheus/client_golang/prometheus/README.md b/vendor/github.com/prometheus/client_golang/prometheus/README.md deleted file mode 100644 index c67ff1b7f..000000000 --- a/vendor/github.com/prometheus/client_golang/prometheus/README.md +++ /dev/null @@ -1 +0,0 @@ -See [![Go Reference](https://pkg.go.dev/badge/github.com/prometheus/client_golang/prometheus.svg)](https://pkg.go.dev/github.com/prometheus/client_golang/prometheus). diff --git a/vendor/github.com/prometheus/client_golang/prometheus/build_info_collector.go b/vendor/github.com/prometheus/client_golang/prometheus/build_info_collector.go deleted file mode 100644 index 450189f35..000000000 --- a/vendor/github.com/prometheus/client_golang/prometheus/build_info_collector.go +++ /dev/null @@ -1,38 +0,0 @@ -// Copyright 2021 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package prometheus - -import "runtime/debug" - -// NewBuildInfoCollector is the obsolete version of collectors.NewBuildInfoCollector. -// See there for documentation. -// -// Deprecated: Use collectors.NewBuildInfoCollector instead. -func NewBuildInfoCollector() Collector { - path, version, sum := "unknown", "unknown", "unknown" - if bi, ok := debug.ReadBuildInfo(); ok { - path = bi.Main.Path - version = bi.Main.Version - sum = bi.Main.Sum - } - c := &selfCollector{MustNewConstMetric( - NewDesc( - "go_build_info", - "Build information about the main Go module.", - nil, Labels{"path": path, "version": version, "checksum": sum}, - ), - GaugeValue, 1)} - c.init(c.self) - return c -} diff --git a/vendor/github.com/prometheus/client_golang/prometheus/collector.go b/vendor/github.com/prometheus/client_golang/prometheus/collector.go deleted file mode 100644 index cf05079fb..000000000 --- a/vendor/github.com/prometheus/client_golang/prometheus/collector.go +++ /dev/null @@ -1,128 +0,0 @@ -// Copyright 2014 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package prometheus - -// Collector is the interface implemented by anything that can be used by -// Prometheus to collect metrics. A Collector has to be registered for -// collection. See Registerer.Register. -// -// The stock metrics provided by this package (Gauge, Counter, Summary, -// Histogram, Untyped) are also Collectors (which only ever collect one metric, -// namely itself). An implementer of Collector may, however, collect multiple -// metrics in a coordinated fashion and/or create metrics on the fly. Examples -// for collectors already implemented in this library are the metric vectors -// (i.e. collection of multiple instances of the same Metric but with different -// label values) like GaugeVec or SummaryVec, and the ExpvarCollector. -type Collector interface { - // Describe sends the super-set of all possible descriptors of metrics - // collected by this Collector to the provided channel and returns once - // the last descriptor has been sent. The sent descriptors fulfill the - // consistency and uniqueness requirements described in the Desc - // documentation. - // - // It is valid if one and the same Collector sends duplicate - // descriptors. Those duplicates are simply ignored. However, two - // different Collectors must not send duplicate descriptors. - // - // Sending no descriptor at all marks the Collector as “unchecked”, - // i.e. no checks will be performed at registration time, and the - // Collector may yield any Metric it sees fit in its Collect method. - // - // This method idempotently sends the same descriptors throughout the - // lifetime of the Collector. It may be called concurrently and - // therefore must be implemented in a concurrency safe way. - // - // If a Collector encounters an error while executing this method, it - // must send an invalid descriptor (created with NewInvalidDesc) to - // signal the error to the registry. - Describe(chan<- *Desc) - // Collect is called by the Prometheus registry when collecting - // metrics. The implementation sends each collected metric via the - // provided channel and returns once the last metric has been sent. The - // descriptor of each sent metric is one of those returned by Describe - // (unless the Collector is unchecked, see above). Returned metrics that - // share the same descriptor must differ in their variable label - // values. - // - // This method may be called concurrently and must therefore be - // implemented in a concurrency safe way. Blocking occurs at the expense - // of total performance of rendering all registered metrics. Ideally, - // Collector implementations support concurrent readers. - Collect(chan<- Metric) -} - -// DescribeByCollect is a helper to implement the Describe method of a custom -// Collector. It collects the metrics from the provided Collector and sends -// their descriptors to the provided channel. -// -// If a Collector collects the same metrics throughout its lifetime, its -// Describe method can simply be implemented as: -// -// func (c customCollector) Describe(ch chan<- *Desc) { -// DescribeByCollect(c, ch) -// } -// -// However, this will not work if the metrics collected change dynamically over -// the lifetime of the Collector in a way that their combined set of descriptors -// changes as well. The shortcut implementation will then violate the contract -// of the Describe method. If a Collector sometimes collects no metrics at all -// (for example vectors like CounterVec, GaugeVec, etc., which only collect -// metrics after a metric with a fully specified label set has been accessed), -// it might even get registered as an unchecked Collector (cf. the Register -// method of the Registerer interface). Hence, only use this shortcut -// implementation of Describe if you are certain to fulfill the contract. -// -// The Collector example demonstrates a use of DescribeByCollect. -func DescribeByCollect(c Collector, descs chan<- *Desc) { - metrics := make(chan Metric) - go func() { - c.Collect(metrics) - close(metrics) - }() - for m := range metrics { - descs <- m.Desc() - } -} - -// selfCollector implements Collector for a single Metric so that the Metric -// collects itself. Add it as an anonymous field to a struct that implements -// Metric, and call init with the Metric itself as an argument. -type selfCollector struct { - self Metric -} - -// init provides the selfCollector with a reference to the metric it is supposed -// to collect. It is usually called within the factory function to create a -// metric. See example. -func (c *selfCollector) init(self Metric) { - c.self = self -} - -// Describe implements Collector. -func (c *selfCollector) Describe(ch chan<- *Desc) { - ch <- c.self.Desc() -} - -// Collect implements Collector. -func (c *selfCollector) Collect(ch chan<- Metric) { - ch <- c.self -} - -// collectorMetric is a metric that is also a collector. -// Because of selfCollector, most (if not all) Metrics in -// this package are also collectors. -type collectorMetric interface { - Metric - Collector -} diff --git a/vendor/github.com/prometheus/client_golang/prometheus/counter.go b/vendor/github.com/prometheus/client_golang/prometheus/counter.go deleted file mode 100644 index 4ce84e7a8..000000000 --- a/vendor/github.com/prometheus/client_golang/prometheus/counter.go +++ /dev/null @@ -1,358 +0,0 @@ -// Copyright 2014 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package prometheus - -import ( - "errors" - "math" - "sync/atomic" - "time" - - dto "github.com/prometheus/client_model/go" - "google.golang.org/protobuf/types/known/timestamppb" -) - -// Counter is a Metric that represents a single numerical value that only ever -// goes up. That implies that it cannot be used to count items whose number can -// also go down, e.g. the number of currently running goroutines. Those -// "counters" are represented by Gauges. -// -// A Counter is typically used to count requests served, tasks completed, errors -// occurred, etc. -// -// To create Counter instances, use NewCounter. -type Counter interface { - Metric - Collector - - // Inc increments the counter by 1. Use Add to increment it by arbitrary - // non-negative values. - Inc() - // Add adds the given value to the counter. It panics if the value is < - // 0. - Add(float64) -} - -// ExemplarAdder is implemented by Counters that offer the option of adding a -// value to the Counter together with an exemplar. Its AddWithExemplar method -// works like the Add method of the Counter interface but also replaces the -// currently saved exemplar (if any) with a new one, created from the provided -// value, the current time as timestamp, and the provided labels. Empty Labels -// will lead to a valid (label-less) exemplar. But if Labels is nil, the current -// exemplar is left in place. AddWithExemplar panics if the value is < 0, if any -// of the provided labels are invalid, or if the provided labels contain more -// than 128 runes in total. -type ExemplarAdder interface { - AddWithExemplar(value float64, exemplar Labels) -} - -// CounterOpts is an alias for Opts. See there for doc comments. -type CounterOpts Opts - -// CounterVecOpts bundles the options to create a CounterVec metric. -// It is mandatory to set CounterOpts, see there for mandatory fields. VariableLabels -// is optional and can safely be left to its default value. -type CounterVecOpts struct { - CounterOpts - - // VariableLabels are used to partition the metric vector by the given set - // of labels. Each label value will be constrained with the optional Constraint - // function, if provided. - VariableLabels ConstrainableLabels -} - -// NewCounter creates a new Counter based on the provided CounterOpts. -// -// The returned implementation also implements ExemplarAdder. It is safe to -// perform the corresponding type assertion. -// -// The returned implementation tracks the counter value in two separate -// variables, a float64 and a uint64. The latter is used to track calls of the -// Inc method and calls of the Add method with a value that can be represented -// as a uint64. This allows atomic increments of the counter with optimal -// performance. (It is common to have an Inc call in very hot execution paths.) -// Both internal tracking values are added up in the Write method. This has to -// be taken into account when it comes to precision and overflow behavior. -func NewCounter(opts CounterOpts) Counter { - desc := NewDesc( - BuildFQName(opts.Namespace, opts.Subsystem, opts.Name), - opts.Help, - nil, - opts.ConstLabels, - ) - if opts.now == nil { - opts.now = time.Now - } - result := &counter{desc: desc, labelPairs: desc.constLabelPairs, now: opts.now} - result.init(result) // Init self-collection. - result.createdTs = timestamppb.New(opts.now()) - return result -} - -type counter struct { - // valBits contains the bits of the represented float64 value, while - // valInt stores values that are exact integers. Both have to go first - // in the struct to guarantee alignment for atomic operations. - // http://golang.org/pkg/sync/atomic/#pkg-note-BUG - valBits uint64 - valInt uint64 - - selfCollector - desc *Desc - - createdTs *timestamppb.Timestamp - labelPairs []*dto.LabelPair - exemplar atomic.Value // Containing nil or a *dto.Exemplar. - - // now is for testing purposes, by default it's time.Now. - now func() time.Time -} - -func (c *counter) Desc() *Desc { - return c.desc -} - -func (c *counter) Add(v float64) { - if v < 0 { - panic(errors.New("counter cannot decrease in value")) - } - - ival := uint64(v) - if float64(ival) == v { - atomic.AddUint64(&c.valInt, ival) - return - } - - for { - oldBits := atomic.LoadUint64(&c.valBits) - newBits := math.Float64bits(math.Float64frombits(oldBits) + v) - if atomic.CompareAndSwapUint64(&c.valBits, oldBits, newBits) { - return - } - } -} - -func (c *counter) AddWithExemplar(v float64, e Labels) { - c.Add(v) - c.updateExemplar(v, e) -} - -func (c *counter) Inc() { - atomic.AddUint64(&c.valInt, 1) -} - -func (c *counter) get() float64 { - fval := math.Float64frombits(atomic.LoadUint64(&c.valBits)) - ival := atomic.LoadUint64(&c.valInt) - return fval + float64(ival) -} - -func (c *counter) Write(out *dto.Metric) error { - // Read the Exemplar first and the value second. This is to avoid a race condition - // where users see an exemplar for a not-yet-existing observation. - var exemplar *dto.Exemplar - if e := c.exemplar.Load(); e != nil { - exemplar = e.(*dto.Exemplar) - } - val := c.get() - return populateMetric(CounterValue, val, c.labelPairs, exemplar, out, c.createdTs) -} - -func (c *counter) updateExemplar(v float64, l Labels) { - if l == nil { - return - } - e, err := newExemplar(v, c.now(), l) - if err != nil { - panic(err) - } - c.exemplar.Store(e) -} - -// CounterVec is a Collector that bundles a set of Counters that all share the -// same Desc, but have different values for their variable labels. This is used -// if you want to count the same thing partitioned by various dimensions -// (e.g. number of HTTP requests, partitioned by response code and -// method). Create instances with NewCounterVec. -type CounterVec struct { - *MetricVec -} - -// NewCounterVec creates a new CounterVec based on the provided CounterOpts and -// partitioned by the given label names. -func NewCounterVec(opts CounterOpts, labelNames []string) *CounterVec { - return V2.NewCounterVec(CounterVecOpts{ - CounterOpts: opts, - VariableLabels: UnconstrainedLabels(labelNames), - }) -} - -// NewCounterVec creates a new CounterVec based on the provided CounterVecOpts. -func (v2) NewCounterVec(opts CounterVecOpts) *CounterVec { - desc := V2.NewDesc( - BuildFQName(opts.Namespace, opts.Subsystem, opts.Name), - opts.Help, - opts.VariableLabels, - opts.ConstLabels, - ) - if opts.now == nil { - opts.now = time.Now - } - return &CounterVec{ - MetricVec: NewMetricVec(desc, func(lvs ...string) Metric { - if len(lvs) != len(desc.variableLabels.names) { - panic(makeInconsistentCardinalityError(desc.fqName, desc.variableLabels.names, lvs)) - } - result := &counter{desc: desc, labelPairs: MakeLabelPairs(desc, lvs), now: opts.now} - result.init(result) // Init self-collection. - result.createdTs = timestamppb.New(opts.now()) - return result - }), - } -} - -// GetMetricWithLabelValues returns the Counter for the given slice of label -// values (same order as the variable labels in Desc). If that combination of -// label values is accessed for the first time, a new Counter is created. -// -// It is possible to call this method without using the returned Counter to only -// create the new Counter but leave it at its starting value 0. See also the -// SummaryVec example. -// -// Keeping the Counter for later use is possible (and should be considered if -// performance is critical), but keep in mind that Reset, DeleteLabelValues and -// Delete can be used to delete the Counter from the CounterVec. In that case, -// the Counter will still exist, but it will not be exported anymore, even if a -// Counter with the same label values is created later. -// -// An error is returned if the number of label values is not the same as the -// number of variable labels in Desc (minus any curried labels). -// -// Note that for more than one label value, this method is prone to mistakes -// caused by an incorrect order of arguments. Consider GetMetricWith(Labels) as -// an alternative to avoid that type of mistake. For higher label numbers, the -// latter has a much more readable (albeit more verbose) syntax, but it comes -// with a performance overhead (for creating and processing the Labels map). -// See also the GaugeVec example. -func (v *CounterVec) GetMetricWithLabelValues(lvs ...string) (Counter, error) { - metric, err := v.MetricVec.GetMetricWithLabelValues(lvs...) - if metric != nil { - return metric.(Counter), err - } - return nil, err -} - -// GetMetricWith returns the Counter for the given Labels map (the label names -// must match those of the variable labels in Desc). If that label map is -// accessed for the first time, a new Counter is created. Implications of -// creating a Counter without using it and keeping the Counter for later use are -// the same as for GetMetricWithLabelValues. -// -// An error is returned if the number and names of the Labels are inconsistent -// with those of the variable labels in Desc (minus any curried labels). -// -// This method is used for the same purpose as -// GetMetricWithLabelValues(...string). See there for pros and cons of the two -// methods. -func (v *CounterVec) GetMetricWith(labels Labels) (Counter, error) { - metric, err := v.MetricVec.GetMetricWith(labels) - if metric != nil { - return metric.(Counter), err - } - return nil, err -} - -// WithLabelValues works as GetMetricWithLabelValues, but panics where -// GetMetricWithLabelValues would have returned an error. Not returning an -// error allows shortcuts like -// -// myVec.WithLabelValues("404", "GET").Add(42) -func (v *CounterVec) WithLabelValues(lvs ...string) Counter { - c, err := v.GetMetricWithLabelValues(lvs...) - if err != nil { - panic(err) - } - return c -} - -// With works as GetMetricWith, but panics where GetMetricWithLabels would have -// returned an error. Not returning an error allows shortcuts like -// -// myVec.With(prometheus.Labels{"code": "404", "method": "GET"}).Add(42) -func (v *CounterVec) With(labels Labels) Counter { - c, err := v.GetMetricWith(labels) - if err != nil { - panic(err) - } - return c -} - -// CurryWith returns a vector curried with the provided labels, i.e. the -// returned vector has those labels pre-set for all labeled operations performed -// on it. The cardinality of the curried vector is reduced accordingly. The -// order of the remaining labels stays the same (just with the curried labels -// taken out of the sequence – which is relevant for the -// (GetMetric)WithLabelValues methods). It is possible to curry a curried -// vector, but only with labels not yet used for currying before. -// -// The metrics contained in the CounterVec are shared between the curried and -// uncurried vectors. They are just accessed differently. Curried and uncurried -// vectors behave identically in terms of collection. Only one must be -// registered with a given registry (usually the uncurried version). The Reset -// method deletes all metrics, even if called on a curried vector. -func (v *CounterVec) CurryWith(labels Labels) (*CounterVec, error) { - vec, err := v.MetricVec.CurryWith(labels) - if vec != nil { - return &CounterVec{vec}, err - } - return nil, err -} - -// MustCurryWith works as CurryWith but panics where CurryWith would have -// returned an error. -func (v *CounterVec) MustCurryWith(labels Labels) *CounterVec { - vec, err := v.CurryWith(labels) - if err != nil { - panic(err) - } - return vec -} - -// CounterFunc is a Counter whose value is determined at collect time by calling a -// provided function. -// -// To create CounterFunc instances, use NewCounterFunc. -type CounterFunc interface { - Metric - Collector -} - -// NewCounterFunc creates a new CounterFunc based on the provided -// CounterOpts. The value reported is determined by calling the given function -// from within the Write method. Take into account that metric collection may -// happen concurrently. If that results in concurrent calls to Write, like in -// the case where a CounterFunc is directly registered with Prometheus, the -// provided function must be concurrency-safe. The function should also honor -// the contract for a Counter (values only go up, not down), but compliance will -// not be checked. -// -// Check out the ExampleGaugeFunc examples for the similar GaugeFunc. -func NewCounterFunc(opts CounterOpts, function func() float64) CounterFunc { - return newValueFunc(NewDesc( - BuildFQName(opts.Namespace, opts.Subsystem, opts.Name), - opts.Help, - nil, - opts.ConstLabels, - ), CounterValue, function) -} diff --git a/vendor/github.com/prometheus/client_golang/prometheus/desc.go b/vendor/github.com/prometheus/client_golang/prometheus/desc.go deleted file mode 100644 index 68ffe3c24..000000000 --- a/vendor/github.com/prometheus/client_golang/prometheus/desc.go +++ /dev/null @@ -1,207 +0,0 @@ -// Copyright 2016 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package prometheus - -import ( - "fmt" - "sort" - "strings" - - "github.com/cespare/xxhash/v2" - dto "github.com/prometheus/client_model/go" - "github.com/prometheus/common/model" - "google.golang.org/protobuf/proto" - - "github.com/prometheus/client_golang/prometheus/internal" -) - -// Desc is the descriptor used by every Prometheus Metric. It is essentially -// the immutable meta-data of a Metric. The normal Metric implementations -// included in this package manage their Desc under the hood. Users only have to -// deal with Desc if they use advanced features like the ExpvarCollector or -// custom Collectors and Metrics. -// -// Descriptors registered with the same registry have to fulfill certain -// consistency and uniqueness criteria if they share the same fully-qualified -// name: They must have the same help string and the same label names (aka label -// dimensions) in each, constLabels and variableLabels, but they must differ in -// the values of the constLabels. -// -// Descriptors that share the same fully-qualified names and the same label -// values of their constLabels are considered equal. -// -// Use NewDesc to create new Desc instances. -type Desc struct { - // fqName has been built from Namespace, Subsystem, and Name. - fqName string - // help provides some helpful information about this metric. - help string - // constLabelPairs contains precalculated DTO label pairs based on - // the constant labels. - constLabelPairs []*dto.LabelPair - // variableLabels contains names of labels and normalization function for - // which the metric maintains variable values. - variableLabels *compiledLabels - // id is a hash of the values of the ConstLabels and fqName. This - // must be unique among all registered descriptors and can therefore be - // used as an identifier of the descriptor. - id uint64 - // dimHash is a hash of the label names (preset and variable) and the - // Help string. Each Desc with the same fqName must have the same - // dimHash. - dimHash uint64 - // err is an error that occurred during construction. It is reported on - // registration time. - err error -} - -// NewDesc allocates and initializes a new Desc. Errors are recorded in the Desc -// and will be reported on registration time. variableLabels and constLabels can -// be nil if no such labels should be set. fqName must not be empty. -// -// variableLabels only contain the label names. Their label values are variable -// and therefore not part of the Desc. (They are managed within the Metric.) -// -// For constLabels, the label values are constant. Therefore, they are fully -// specified in the Desc. See the Collector example for a usage pattern. -func NewDesc(fqName, help string, variableLabels []string, constLabels Labels) *Desc { - return V2.NewDesc(fqName, help, UnconstrainedLabels(variableLabels), constLabels) -} - -// NewDesc allocates and initializes a new Desc. Errors are recorded in the Desc -// and will be reported on registration time. variableLabels and constLabels can -// be nil if no such labels should be set. fqName must not be empty. -// -// variableLabels only contain the label names and normalization functions. Their -// label values are variable and therefore not part of the Desc. (They are managed -// within the Metric.) -// -// For constLabels, the label values are constant. Therefore, they are fully -// specified in the Desc. See the Collector example for a usage pattern. -func (v2) NewDesc(fqName, help string, variableLabels ConstrainableLabels, constLabels Labels) *Desc { - d := &Desc{ - fqName: fqName, - help: help, - variableLabels: variableLabels.compile(), - } - if !model.IsValidMetricName(model.LabelValue(fqName)) { - d.err = fmt.Errorf("%q is not a valid metric name", fqName) - return d - } - // labelValues contains the label values of const labels (in order of - // their sorted label names) plus the fqName (at position 0). - labelValues := make([]string, 1, len(constLabels)+1) - labelValues[0] = fqName - labelNames := make([]string, 0, len(constLabels)+len(d.variableLabels.names)) - labelNameSet := map[string]struct{}{} - // First add only the const label names and sort them... - for labelName := range constLabels { - if !checkLabelName(labelName) { - d.err = fmt.Errorf("%q is not a valid label name for metric %q", labelName, fqName) - return d - } - labelNames = append(labelNames, labelName) - labelNameSet[labelName] = struct{}{} - } - sort.Strings(labelNames) - // ... so that we can now add const label values in the order of their names. - for _, labelName := range labelNames { - labelValues = append(labelValues, constLabels[labelName]) - } - // Validate the const label values. They can't have a wrong cardinality, so - // use in len(labelValues) as expectedNumberOfValues. - if err := validateLabelValues(labelValues, len(labelValues)); err != nil { - d.err = err - return d - } - // Now add the variable label names, but prefix them with something that - // cannot be in a regular label name. That prevents matching the label - // dimension with a different mix between preset and variable labels. - for _, label := range d.variableLabels.names { - if !checkLabelName(label) { - d.err = fmt.Errorf("%q is not a valid label name for metric %q", label, fqName) - return d - } - labelNames = append(labelNames, "$"+label) - labelNameSet[label] = struct{}{} - } - if len(labelNames) != len(labelNameSet) { - d.err = fmt.Errorf("duplicate label names in constant and variable labels for metric %q", fqName) - return d - } - - xxh := xxhash.New() - for _, val := range labelValues { - xxh.WriteString(val) - xxh.Write(separatorByteSlice) - } - d.id = xxh.Sum64() - // Sort labelNames so that order doesn't matter for the hash. - sort.Strings(labelNames) - // Now hash together (in this order) the help string and the sorted - // label names. - xxh.Reset() - xxh.WriteString(help) - xxh.Write(separatorByteSlice) - for _, labelName := range labelNames { - xxh.WriteString(labelName) - xxh.Write(separatorByteSlice) - } - d.dimHash = xxh.Sum64() - - d.constLabelPairs = make([]*dto.LabelPair, 0, len(constLabels)) - for n, v := range constLabels { - d.constLabelPairs = append(d.constLabelPairs, &dto.LabelPair{ - Name: proto.String(n), - Value: proto.String(v), - }) - } - sort.Sort(internal.LabelPairSorter(d.constLabelPairs)) - return d -} - -// NewInvalidDesc returns an invalid descriptor, i.e. a descriptor with the -// provided error set. If a collector returning such a descriptor is registered, -// registration will fail with the provided error. NewInvalidDesc can be used by -// a Collector to signal inability to describe itself. -func NewInvalidDesc(err error) *Desc { - return &Desc{ - err: err, - } -} - -func (d *Desc) String() string { - lpStrings := make([]string, 0, len(d.constLabelPairs)) - for _, lp := range d.constLabelPairs { - lpStrings = append( - lpStrings, - fmt.Sprintf("%s=%q", lp.GetName(), lp.GetValue()), - ) - } - vlStrings := make([]string, 0, len(d.variableLabels.names)) - for _, vl := range d.variableLabels.names { - if fn, ok := d.variableLabels.labelConstraints[vl]; ok && fn != nil { - vlStrings = append(vlStrings, fmt.Sprintf("c(%s)", vl)) - } else { - vlStrings = append(vlStrings, vl) - } - } - return fmt.Sprintf( - "Desc{fqName: %q, help: %q, constLabels: {%s}, variableLabels: {%s}}", - d.fqName, - d.help, - strings.Join(lpStrings, ","), - strings.Join(vlStrings, ","), - ) -} diff --git a/vendor/github.com/prometheus/client_golang/prometheus/doc.go b/vendor/github.com/prometheus/client_golang/prometheus/doc.go deleted file mode 100644 index 962608f02..000000000 --- a/vendor/github.com/prometheus/client_golang/prometheus/doc.go +++ /dev/null @@ -1,210 +0,0 @@ -// Copyright 2014 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// Package prometheus is the core instrumentation package. It provides metrics -// primitives to instrument code for monitoring. It also offers a registry for -// metrics. Sub-packages allow to expose the registered metrics via HTTP -// (package promhttp) or push them to a Pushgateway (package push). There is -// also a sub-package promauto, which provides metrics constructors with -// automatic registration. -// -// All exported functions and methods are safe to be used concurrently unless -// specified otherwise. -// -// # A Basic Example -// -// As a starting point, a very basic usage example: -// -// package main -// -// import ( -// "log" -// "net/http" -// -// "github.com/prometheus/client_golang/prometheus" -// "github.com/prometheus/client_golang/prometheus/promhttp" -// ) -// -// type metrics struct { -// cpuTemp prometheus.Gauge -// hdFailures *prometheus.CounterVec -// } -// -// func NewMetrics(reg prometheus.Registerer) *metrics { -// m := &metrics{ -// cpuTemp: prometheus.NewGauge(prometheus.GaugeOpts{ -// Name: "cpu_temperature_celsius", -// Help: "Current temperature of the CPU.", -// }), -// hdFailures: prometheus.NewCounterVec( -// prometheus.CounterOpts{ -// Name: "hd_errors_total", -// Help: "Number of hard-disk errors.", -// }, -// []string{"device"}, -// ), -// } -// reg.MustRegister(m.cpuTemp) -// reg.MustRegister(m.hdFailures) -// return m -// } -// -// func main() { -// // Create a non-global registry. -// reg := prometheus.NewRegistry() -// -// // Create new metrics and register them using the custom registry. -// m := NewMetrics(reg) -// // Set values for the new created metrics. -// m.cpuTemp.Set(65.3) -// m.hdFailures.With(prometheus.Labels{"device":"/dev/sda"}).Inc() -// -// // Expose metrics and custom registry via an HTTP server -// // using the HandleFor function. "/metrics" is the usual endpoint for that. -// http.Handle("/metrics", promhttp.HandlerFor(reg, promhttp.HandlerOpts{Registry: reg})) -// log.Fatal(http.ListenAndServe(":8080", nil)) -// } -// -// This is a complete program that exports two metrics, a Gauge and a Counter, -// the latter with a label attached to turn it into a (one-dimensional) vector. -// It register the metrics using a custom registry and exposes them via an HTTP server -// on the /metrics endpoint. -// -// # Metrics -// -// The number of exported identifiers in this package might appear a bit -// overwhelming. However, in addition to the basic plumbing shown in the example -// above, you only need to understand the different metric types and their -// vector versions for basic usage. Furthermore, if you are not concerned with -// fine-grained control of when and how to register metrics with the registry, -// have a look at the promauto package, which will effectively allow you to -// ignore registration altogether in simple cases. -// -// Above, you have already touched the Counter and the Gauge. There are two more -// advanced metric types: the Summary and Histogram. A more thorough description -// of those four metric types can be found in the Prometheus docs: -// https://prometheus.io/docs/concepts/metric_types/ -// -// In addition to the fundamental metric types Gauge, Counter, Summary, and -// Histogram, a very important part of the Prometheus data model is the -// partitioning of samples along dimensions called labels, which results in -// metric vectors. The fundamental types are GaugeVec, CounterVec, SummaryVec, -// and HistogramVec. -// -// While only the fundamental metric types implement the Metric interface, both -// the metrics and their vector versions implement the Collector interface. A -// Collector manages the collection of a number of Metrics, but for convenience, -// a Metric can also “collect itself”. Note that Gauge, Counter, Summary, and -// Histogram are interfaces themselves while GaugeVec, CounterVec, SummaryVec, -// and HistogramVec are not. -// -// To create instances of Metrics and their vector versions, you need a suitable -// …Opts struct, i.e. GaugeOpts, CounterOpts, SummaryOpts, or HistogramOpts. -// -// # Custom Collectors and constant Metrics -// -// While you could create your own implementations of Metric, most likely you -// will only ever implement the Collector interface on your own. At a first -// glance, a custom Collector seems handy to bundle Metrics for common -// registration (with the prime example of the different metric vectors above, -// which bundle all the metrics of the same name but with different labels). -// -// There is a more involved use case, too: If you already have metrics -// available, created outside of the Prometheus context, you don't need the -// interface of the various Metric types. You essentially want to mirror the -// existing numbers into Prometheus Metrics during collection. An own -// implementation of the Collector interface is perfect for that. You can create -// Metric instances “on the fly” using NewConstMetric, NewConstHistogram, and -// NewConstSummary (and their respective Must… versions). NewConstMetric is used -// for all metric types with just a float64 as their value: Counter, Gauge, and -// a special “type” called Untyped. Use the latter if you are not sure if the -// mirrored metric is a Counter or a Gauge. Creation of the Metric instance -// happens in the Collect method. The Describe method has to return separate -// Desc instances, representative of the “throw-away” metrics to be created -// later. NewDesc comes in handy to create those Desc instances. Alternatively, -// you could return no Desc at all, which will mark the Collector “unchecked”. -// No checks are performed at registration time, but metric consistency will -// still be ensured at scrape time, i.e. any inconsistencies will lead to scrape -// errors. Thus, with unchecked Collectors, the responsibility to not collect -// metrics that lead to inconsistencies in the total scrape result lies with the -// implementer of the Collector. While this is not a desirable state, it is -// sometimes necessary. The typical use case is a situation where the exact -// metrics to be returned by a Collector cannot be predicted at registration -// time, but the implementer has sufficient knowledge of the whole system to -// guarantee metric consistency. -// -// The Collector example illustrates the use case. You can also look at the -// source code of the processCollector (mirroring process metrics), the -// goCollector (mirroring Go metrics), or the expvarCollector (mirroring expvar -// metrics) as examples that are used in this package itself. -// -// If you just need to call a function to get a single float value to collect as -// a metric, GaugeFunc, CounterFunc, or UntypedFunc might be interesting -// shortcuts. -// -// # Advanced Uses of the Registry -// -// While MustRegister is the by far most common way of registering a Collector, -// sometimes you might want to handle the errors the registration might cause. -// As suggested by the name, MustRegister panics if an error occurs. With the -// Register function, the error is returned and can be handled. -// -// An error is returned if the registered Collector is incompatible or -// inconsistent with already registered metrics. The registry aims for -// consistency of the collected metrics according to the Prometheus data model. -// Inconsistencies are ideally detected at registration time, not at collect -// time. The former will usually be detected at start-up time of a program, -// while the latter will only happen at scrape time, possibly not even on the -// first scrape if the inconsistency only becomes relevant later. That is the -// main reason why a Collector and a Metric have to describe themselves to the -// registry. -// -// So far, everything we did operated on the so-called default registry, as it -// can be found in the global DefaultRegisterer variable. With NewRegistry, you -// can create a custom registry, or you can even implement the Registerer or -// Gatherer interfaces yourself. The methods Register and Unregister work in the -// same way on a custom registry as the global functions Register and Unregister -// on the default registry. -// -// There are a number of uses for custom registries: You can use registries with -// special properties, see NewPedanticRegistry. You can avoid global state, as -// it is imposed by the DefaultRegisterer. You can use multiple registries at -// the same time to expose different metrics in different ways. You can use -// separate registries for testing purposes. -// -// Also note that the DefaultRegisterer comes registered with a Collector for Go -// runtime metrics (via NewGoCollector) and a Collector for process metrics (via -// NewProcessCollector). With a custom registry, you are in control and decide -// yourself about the Collectors to register. -// -// # HTTP Exposition -// -// The Registry implements the Gatherer interface. The caller of the Gather -// method can then expose the gathered metrics in some way. Usually, the metrics -// are served via HTTP on the /metrics endpoint. That's happening in the example -// above. The tools to expose metrics via HTTP are in the promhttp sub-package. -// -// # Pushing to the Pushgateway -// -// Function for pushing to the Pushgateway can be found in the push sub-package. -// -// # Graphite Bridge -// -// Functions and examples to push metrics from a Gatherer to Graphite can be -// found in the graphite sub-package. -// -// # Other Means of Exposition -// -// More ways of exposing metrics can easily be added by following the approaches -// of the existing implementations. -package prometheus diff --git a/vendor/github.com/prometheus/client_golang/prometheus/expvar_collector.go b/vendor/github.com/prometheus/client_golang/prometheus/expvar_collector.go deleted file mode 100644 index de5a85629..000000000 --- a/vendor/github.com/prometheus/client_golang/prometheus/expvar_collector.go +++ /dev/null @@ -1,86 +0,0 @@ -// Copyright 2014 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package prometheus - -import ( - "encoding/json" - "expvar" -) - -type expvarCollector struct { - exports map[string]*Desc -} - -// NewExpvarCollector is the obsolete version of collectors.NewExpvarCollector. -// See there for documentation. -// -// Deprecated: Use collectors.NewExpvarCollector instead. -func NewExpvarCollector(exports map[string]*Desc) Collector { - return &expvarCollector{ - exports: exports, - } -} - -// Describe implements Collector. -func (e *expvarCollector) Describe(ch chan<- *Desc) { - for _, desc := range e.exports { - ch <- desc - } -} - -// Collect implements Collector. -func (e *expvarCollector) Collect(ch chan<- Metric) { - for name, desc := range e.exports { - var m Metric - expVar := expvar.Get(name) - if expVar == nil { - continue - } - var v interface{} - labels := make([]string, len(desc.variableLabels.names)) - if err := json.Unmarshal([]byte(expVar.String()), &v); err != nil { - ch <- NewInvalidMetric(desc, err) - continue - } - var processValue func(v interface{}, i int) - processValue = func(v interface{}, i int) { - if i >= len(labels) { - copiedLabels := append(make([]string, 0, len(labels)), labels...) - switch v := v.(type) { - case float64: - m = MustNewConstMetric(desc, UntypedValue, v, copiedLabels...) - case bool: - if v { - m = MustNewConstMetric(desc, UntypedValue, 1, copiedLabels...) - } else { - m = MustNewConstMetric(desc, UntypedValue, 0, copiedLabels...) - } - default: - return - } - ch <- m - return - } - vm, ok := v.(map[string]interface{}) - if !ok { - return - } - for lv, val := range vm { - labels[i] = lv - processValue(val, i+1) - } - } - processValue(v, 0) - } -} diff --git a/vendor/github.com/prometheus/client_golang/prometheus/fnv.go b/vendor/github.com/prometheus/client_golang/prometheus/fnv.go deleted file mode 100644 index 3d383a735..000000000 --- a/vendor/github.com/prometheus/client_golang/prometheus/fnv.go +++ /dev/null @@ -1,42 +0,0 @@ -// Copyright 2018 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package prometheus - -// Inline and byte-free variant of hash/fnv's fnv64a. - -const ( - offset64 = 14695981039346656037 - prime64 = 1099511628211 -) - -// hashNew initializies a new fnv64a hash value. -func hashNew() uint64 { - return offset64 -} - -// hashAdd adds a string to a fnv64a hash value, returning the updated hash. -func hashAdd(h uint64, s string) uint64 { - for i := 0; i < len(s); i++ { - h ^= uint64(s[i]) - h *= prime64 - } - return h -} - -// hashAddByte adds a byte to a fnv64a hash value, returning the updated hash. -func hashAddByte(h uint64, b byte) uint64 { - h ^= uint64(b) - h *= prime64 - return h -} diff --git a/vendor/github.com/prometheus/client_golang/prometheus/gauge.go b/vendor/github.com/prometheus/client_golang/prometheus/gauge.go deleted file mode 100644 index dd2eac940..000000000 --- a/vendor/github.com/prometheus/client_golang/prometheus/gauge.go +++ /dev/null @@ -1,311 +0,0 @@ -// Copyright 2014 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package prometheus - -import ( - "math" - "sync/atomic" - "time" - - dto "github.com/prometheus/client_model/go" -) - -// Gauge is a Metric that represents a single numerical value that can -// arbitrarily go up and down. -// -// A Gauge is typically used for measured values like temperatures or current -// memory usage, but also "counts" that can go up and down, like the number of -// running goroutines. -// -// To create Gauge instances, use NewGauge. -type Gauge interface { - Metric - Collector - - // Set sets the Gauge to an arbitrary value. - Set(float64) - // Inc increments the Gauge by 1. Use Add to increment it by arbitrary - // values. - Inc() - // Dec decrements the Gauge by 1. Use Sub to decrement it by arbitrary - // values. - Dec() - // Add adds the given value to the Gauge. (The value can be negative, - // resulting in a decrease of the Gauge.) - Add(float64) - // Sub subtracts the given value from the Gauge. (The value can be - // negative, resulting in an increase of the Gauge.) - Sub(float64) - - // SetToCurrentTime sets the Gauge to the current Unix time in seconds. - SetToCurrentTime() -} - -// GaugeOpts is an alias for Opts. See there for doc comments. -type GaugeOpts Opts - -// GaugeVecOpts bundles the options to create a GaugeVec metric. -// It is mandatory to set GaugeOpts, see there for mandatory fields. VariableLabels -// is optional and can safely be left to its default value. -type GaugeVecOpts struct { - GaugeOpts - - // VariableLabels are used to partition the metric vector by the given set - // of labels. Each label value will be constrained with the optional Constraint - // function, if provided. - VariableLabels ConstrainableLabels -} - -// NewGauge creates a new Gauge based on the provided GaugeOpts. -// -// The returned implementation is optimized for a fast Set method. If you have a -// choice for managing the value of a Gauge via Set vs. Inc/Dec/Add/Sub, pick -// the former. For example, the Inc method of the returned Gauge is slower than -// the Inc method of a Counter returned by NewCounter. This matches the typical -// scenarios for Gauges and Counters, where the former tends to be Set-heavy and -// the latter Inc-heavy. -func NewGauge(opts GaugeOpts) Gauge { - desc := NewDesc( - BuildFQName(opts.Namespace, opts.Subsystem, opts.Name), - opts.Help, - nil, - opts.ConstLabels, - ) - result := &gauge{desc: desc, labelPairs: desc.constLabelPairs} - result.init(result) // Init self-collection. - return result -} - -type gauge struct { - // valBits contains the bits of the represented float64 value. It has - // to go first in the struct to guarantee alignment for atomic - // operations. http://golang.org/pkg/sync/atomic/#pkg-note-BUG - valBits uint64 - - selfCollector - - desc *Desc - labelPairs []*dto.LabelPair -} - -func (g *gauge) Desc() *Desc { - return g.desc -} - -func (g *gauge) Set(val float64) { - atomic.StoreUint64(&g.valBits, math.Float64bits(val)) -} - -func (g *gauge) SetToCurrentTime() { - g.Set(float64(time.Now().UnixNano()) / 1e9) -} - -func (g *gauge) Inc() { - g.Add(1) -} - -func (g *gauge) Dec() { - g.Add(-1) -} - -func (g *gauge) Add(val float64) { - for { - oldBits := atomic.LoadUint64(&g.valBits) - newBits := math.Float64bits(math.Float64frombits(oldBits) + val) - if atomic.CompareAndSwapUint64(&g.valBits, oldBits, newBits) { - return - } - } -} - -func (g *gauge) Sub(val float64) { - g.Add(val * -1) -} - -func (g *gauge) Write(out *dto.Metric) error { - val := math.Float64frombits(atomic.LoadUint64(&g.valBits)) - return populateMetric(GaugeValue, val, g.labelPairs, nil, out, nil) -} - -// GaugeVec is a Collector that bundles a set of Gauges that all share the same -// Desc, but have different values for their variable labels. This is used if -// you want to count the same thing partitioned by various dimensions -// (e.g. number of operations queued, partitioned by user and operation -// type). Create instances with NewGaugeVec. -type GaugeVec struct { - *MetricVec -} - -// NewGaugeVec creates a new GaugeVec based on the provided GaugeOpts and -// partitioned by the given label names. -func NewGaugeVec(opts GaugeOpts, labelNames []string) *GaugeVec { - return V2.NewGaugeVec(GaugeVecOpts{ - GaugeOpts: opts, - VariableLabels: UnconstrainedLabels(labelNames), - }) -} - -// NewGaugeVec creates a new GaugeVec based on the provided GaugeVecOpts. -func (v2) NewGaugeVec(opts GaugeVecOpts) *GaugeVec { - desc := V2.NewDesc( - BuildFQName(opts.Namespace, opts.Subsystem, opts.Name), - opts.Help, - opts.VariableLabels, - opts.ConstLabels, - ) - return &GaugeVec{ - MetricVec: NewMetricVec(desc, func(lvs ...string) Metric { - if len(lvs) != len(desc.variableLabels.names) { - panic(makeInconsistentCardinalityError(desc.fqName, desc.variableLabels.names, lvs)) - } - result := &gauge{desc: desc, labelPairs: MakeLabelPairs(desc, lvs)} - result.init(result) // Init self-collection. - return result - }), - } -} - -// GetMetricWithLabelValues returns the Gauge for the given slice of label -// values (same order as the variable labels in Desc). If that combination of -// label values is accessed for the first time, a new Gauge is created. -// -// It is possible to call this method without using the returned Gauge to only -// create the new Gauge but leave it at its starting value 0. See also the -// SummaryVec example. -// -// Keeping the Gauge for later use is possible (and should be considered if -// performance is critical), but keep in mind that Reset, DeleteLabelValues and -// Delete can be used to delete the Gauge from the GaugeVec. In that case, the -// Gauge will still exist, but it will not be exported anymore, even if a -// Gauge with the same label values is created later. See also the CounterVec -// example. -// -// An error is returned if the number of label values is not the same as the -// number of variable labels in Desc (minus any curried labels). -// -// Note that for more than one label value, this method is prone to mistakes -// caused by an incorrect order of arguments. Consider GetMetricWith(Labels) as -// an alternative to avoid that type of mistake. For higher label numbers, the -// latter has a much more readable (albeit more verbose) syntax, but it comes -// with a performance overhead (for creating and processing the Labels map). -func (v *GaugeVec) GetMetricWithLabelValues(lvs ...string) (Gauge, error) { - metric, err := v.MetricVec.GetMetricWithLabelValues(lvs...) - if metric != nil { - return metric.(Gauge), err - } - return nil, err -} - -// GetMetricWith returns the Gauge for the given Labels map (the label names -// must match those of the variable labels in Desc). If that label map is -// accessed for the first time, a new Gauge is created. Implications of -// creating a Gauge without using it and keeping the Gauge for later use are -// the same as for GetMetricWithLabelValues. -// -// An error is returned if the number and names of the Labels are inconsistent -// with those of the variable labels in Desc (minus any curried labels). -// -// This method is used for the same purpose as -// GetMetricWithLabelValues(...string). See there for pros and cons of the two -// methods. -func (v *GaugeVec) GetMetricWith(labels Labels) (Gauge, error) { - metric, err := v.MetricVec.GetMetricWith(labels) - if metric != nil { - return metric.(Gauge), err - } - return nil, err -} - -// WithLabelValues works as GetMetricWithLabelValues, but panics where -// GetMetricWithLabelValues would have returned an error. Not returning an -// error allows shortcuts like -// -// myVec.WithLabelValues("404", "GET").Add(42) -func (v *GaugeVec) WithLabelValues(lvs ...string) Gauge { - g, err := v.GetMetricWithLabelValues(lvs...) - if err != nil { - panic(err) - } - return g -} - -// With works as GetMetricWith, but panics where GetMetricWithLabels would have -// returned an error. Not returning an error allows shortcuts like -// -// myVec.With(prometheus.Labels{"code": "404", "method": "GET"}).Add(42) -func (v *GaugeVec) With(labels Labels) Gauge { - g, err := v.GetMetricWith(labels) - if err != nil { - panic(err) - } - return g -} - -// CurryWith returns a vector curried with the provided labels, i.e. the -// returned vector has those labels pre-set for all labeled operations performed -// on it. The cardinality of the curried vector is reduced accordingly. The -// order of the remaining labels stays the same (just with the curried labels -// taken out of the sequence – which is relevant for the -// (GetMetric)WithLabelValues methods). It is possible to curry a curried -// vector, but only with labels not yet used for currying before. -// -// The metrics contained in the GaugeVec are shared between the curried and -// uncurried vectors. They are just accessed differently. Curried and uncurried -// vectors behave identically in terms of collection. Only one must be -// registered with a given registry (usually the uncurried version). The Reset -// method deletes all metrics, even if called on a curried vector. -func (v *GaugeVec) CurryWith(labels Labels) (*GaugeVec, error) { - vec, err := v.MetricVec.CurryWith(labels) - if vec != nil { - return &GaugeVec{vec}, err - } - return nil, err -} - -// MustCurryWith works as CurryWith but panics where CurryWith would have -// returned an error. -func (v *GaugeVec) MustCurryWith(labels Labels) *GaugeVec { - vec, err := v.CurryWith(labels) - if err != nil { - panic(err) - } - return vec -} - -// GaugeFunc is a Gauge whose value is determined at collect time by calling a -// provided function. -// -// To create GaugeFunc instances, use NewGaugeFunc. -type GaugeFunc interface { - Metric - Collector -} - -// NewGaugeFunc creates a new GaugeFunc based on the provided GaugeOpts. The -// value reported is determined by calling the given function from within the -// Write method. Take into account that metric collection may happen -// concurrently. Therefore, it must be safe to call the provided function -// concurrently. -// -// NewGaugeFunc is a good way to create an “info” style metric with a constant -// value of 1. Example: -// https://github.com/prometheus/common/blob/8558a5b7db3c84fa38b4766966059a7bd5bfa2ee/version/info.go#L36-L56 -func NewGaugeFunc(opts GaugeOpts, function func() float64) GaugeFunc { - return newValueFunc(NewDesc( - BuildFQName(opts.Namespace, opts.Subsystem, opts.Name), - opts.Help, - nil, - opts.ConstLabels, - ), GaugeValue, function) -} diff --git a/vendor/github.com/prometheus/client_golang/prometheus/get_pid.go b/vendor/github.com/prometheus/client_golang/prometheus/get_pid.go deleted file mode 100644 index 614fd61be..000000000 --- a/vendor/github.com/prometheus/client_golang/prometheus/get_pid.go +++ /dev/null @@ -1,26 +0,0 @@ -// Copyright 2015 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//go:build !js || wasm -// +build !js wasm - -package prometheus - -import "os" - -func getPIDFn() func() (int, error) { - pid := os.Getpid() - return func() (int, error) { - return pid, nil - } -} diff --git a/vendor/github.com/prometheus/client_golang/prometheus/get_pid_gopherjs.go b/vendor/github.com/prometheus/client_golang/prometheus/get_pid_gopherjs.go deleted file mode 100644 index eaf8059ee..000000000 --- a/vendor/github.com/prometheus/client_golang/prometheus/get_pid_gopherjs.go +++ /dev/null @@ -1,23 +0,0 @@ -// Copyright 2015 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//go:build js && !wasm -// +build js,!wasm - -package prometheus - -func getPIDFn() func() (int, error) { - return func() (int, error) { - return 1, nil - } -} diff --git a/vendor/github.com/prometheus/client_golang/prometheus/go_collector.go b/vendor/github.com/prometheus/client_golang/prometheus/go_collector.go deleted file mode 100644 index ad9a71a5e..000000000 --- a/vendor/github.com/prometheus/client_golang/prometheus/go_collector.go +++ /dev/null @@ -1,281 +0,0 @@ -// Copyright 2018 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package prometheus - -import ( - "runtime" - "runtime/debug" - "time" -) - -// goRuntimeMemStats provides the metrics initially provided by runtime.ReadMemStats. -// From Go 1.17 those similar (and better) statistics are provided by runtime/metrics, so -// while eval closure works on runtime.MemStats, the struct from Go 1.17+ is -// populated using runtime/metrics. -func goRuntimeMemStats() memStatsMetrics { - return memStatsMetrics{ - { - desc: NewDesc( - memstatNamespace("alloc_bytes"), - "Number of bytes allocated and still in use.", - nil, nil, - ), - eval: func(ms *runtime.MemStats) float64 { return float64(ms.Alloc) }, - valType: GaugeValue, - }, { - desc: NewDesc( - memstatNamespace("alloc_bytes_total"), - "Total number of bytes allocated, even if freed.", - nil, nil, - ), - eval: func(ms *runtime.MemStats) float64 { return float64(ms.TotalAlloc) }, - valType: CounterValue, - }, { - desc: NewDesc( - memstatNamespace("sys_bytes"), - "Number of bytes obtained from system.", - nil, nil, - ), - eval: func(ms *runtime.MemStats) float64 { return float64(ms.Sys) }, - valType: GaugeValue, - }, { - desc: NewDesc( - memstatNamespace("lookups_total"), - "Total number of pointer lookups.", - nil, nil, - ), - eval: func(ms *runtime.MemStats) float64 { return float64(ms.Lookups) }, - valType: CounterValue, - }, { - desc: NewDesc( - memstatNamespace("mallocs_total"), - "Total number of mallocs.", - nil, nil, - ), - eval: func(ms *runtime.MemStats) float64 { return float64(ms.Mallocs) }, - valType: CounterValue, - }, { - desc: NewDesc( - memstatNamespace("frees_total"), - "Total number of frees.", - nil, nil, - ), - eval: func(ms *runtime.MemStats) float64 { return float64(ms.Frees) }, - valType: CounterValue, - }, { - desc: NewDesc( - memstatNamespace("heap_alloc_bytes"), - "Number of heap bytes allocated and still in use.", - nil, nil, - ), - eval: func(ms *runtime.MemStats) float64 { return float64(ms.HeapAlloc) }, - valType: GaugeValue, - }, { - desc: NewDesc( - memstatNamespace("heap_sys_bytes"), - "Number of heap bytes obtained from system.", - nil, nil, - ), - eval: func(ms *runtime.MemStats) float64 { return float64(ms.HeapSys) }, - valType: GaugeValue, - }, { - desc: NewDesc( - memstatNamespace("heap_idle_bytes"), - "Number of heap bytes waiting to be used.", - nil, nil, - ), - eval: func(ms *runtime.MemStats) float64 { return float64(ms.HeapIdle) }, - valType: GaugeValue, - }, { - desc: NewDesc( - memstatNamespace("heap_inuse_bytes"), - "Number of heap bytes that are in use.", - nil, nil, - ), - eval: func(ms *runtime.MemStats) float64 { return float64(ms.HeapInuse) }, - valType: GaugeValue, - }, { - desc: NewDesc( - memstatNamespace("heap_released_bytes"), - "Number of heap bytes released to OS.", - nil, nil, - ), - eval: func(ms *runtime.MemStats) float64 { return float64(ms.HeapReleased) }, - valType: GaugeValue, - }, { - desc: NewDesc( - memstatNamespace("heap_objects"), - "Number of allocated objects.", - nil, nil, - ), - eval: func(ms *runtime.MemStats) float64 { return float64(ms.HeapObjects) }, - valType: GaugeValue, - }, { - desc: NewDesc( - memstatNamespace("stack_inuse_bytes"), - "Number of bytes in use by the stack allocator.", - nil, nil, - ), - eval: func(ms *runtime.MemStats) float64 { return float64(ms.StackInuse) }, - valType: GaugeValue, - }, { - desc: NewDesc( - memstatNamespace("stack_sys_bytes"), - "Number of bytes obtained from system for stack allocator.", - nil, nil, - ), - eval: func(ms *runtime.MemStats) float64 { return float64(ms.StackSys) }, - valType: GaugeValue, - }, { - desc: NewDesc( - memstatNamespace("mspan_inuse_bytes"), - "Number of bytes in use by mspan structures.", - nil, nil, - ), - eval: func(ms *runtime.MemStats) float64 { return float64(ms.MSpanInuse) }, - valType: GaugeValue, - }, { - desc: NewDesc( - memstatNamespace("mspan_sys_bytes"), - "Number of bytes used for mspan structures obtained from system.", - nil, nil, - ), - eval: func(ms *runtime.MemStats) float64 { return float64(ms.MSpanSys) }, - valType: GaugeValue, - }, { - desc: NewDesc( - memstatNamespace("mcache_inuse_bytes"), - "Number of bytes in use by mcache structures.", - nil, nil, - ), - eval: func(ms *runtime.MemStats) float64 { return float64(ms.MCacheInuse) }, - valType: GaugeValue, - }, { - desc: NewDesc( - memstatNamespace("mcache_sys_bytes"), - "Number of bytes used for mcache structures obtained from system.", - nil, nil, - ), - eval: func(ms *runtime.MemStats) float64 { return float64(ms.MCacheSys) }, - valType: GaugeValue, - }, { - desc: NewDesc( - memstatNamespace("buck_hash_sys_bytes"), - "Number of bytes used by the profiling bucket hash table.", - nil, nil, - ), - eval: func(ms *runtime.MemStats) float64 { return float64(ms.BuckHashSys) }, - valType: GaugeValue, - }, { - desc: NewDesc( - memstatNamespace("gc_sys_bytes"), - "Number of bytes used for garbage collection system metadata.", - nil, nil, - ), - eval: func(ms *runtime.MemStats) float64 { return float64(ms.GCSys) }, - valType: GaugeValue, - }, { - desc: NewDesc( - memstatNamespace("other_sys_bytes"), - "Number of bytes used for other system allocations.", - nil, nil, - ), - eval: func(ms *runtime.MemStats) float64 { return float64(ms.OtherSys) }, - valType: GaugeValue, - }, { - desc: NewDesc( - memstatNamespace("next_gc_bytes"), - "Number of heap bytes when next garbage collection will take place.", - nil, nil, - ), - eval: func(ms *runtime.MemStats) float64 { return float64(ms.NextGC) }, - valType: GaugeValue, - }, - } -} - -type baseGoCollector struct { - goroutinesDesc *Desc - threadsDesc *Desc - gcDesc *Desc - gcLastTimeDesc *Desc - goInfoDesc *Desc -} - -func newBaseGoCollector() baseGoCollector { - return baseGoCollector{ - goroutinesDesc: NewDesc( - "go_goroutines", - "Number of goroutines that currently exist.", - nil, nil), - threadsDesc: NewDesc( - "go_threads", - "Number of OS threads created.", - nil, nil), - gcDesc: NewDesc( - "go_gc_duration_seconds", - "A summary of the pause duration of garbage collection cycles.", - nil, nil), - gcLastTimeDesc: NewDesc( - "go_memstats_last_gc_time_seconds", - "Number of seconds since 1970 of last garbage collection.", - nil, nil), - goInfoDesc: NewDesc( - "go_info", - "Information about the Go environment.", - nil, Labels{"version": runtime.Version()}), - } -} - -// Describe returns all descriptions of the collector. -func (c *baseGoCollector) Describe(ch chan<- *Desc) { - ch <- c.goroutinesDesc - ch <- c.threadsDesc - ch <- c.gcDesc - ch <- c.gcLastTimeDesc - ch <- c.goInfoDesc -} - -// Collect returns the current state of all metrics of the collector. -func (c *baseGoCollector) Collect(ch chan<- Metric) { - ch <- MustNewConstMetric(c.goroutinesDesc, GaugeValue, float64(runtime.NumGoroutine())) - - n := getRuntimeNumThreads() - ch <- MustNewConstMetric(c.threadsDesc, GaugeValue, n) - - var stats debug.GCStats - stats.PauseQuantiles = make([]time.Duration, 5) - debug.ReadGCStats(&stats) - - quantiles := make(map[float64]float64) - for idx, pq := range stats.PauseQuantiles[1:] { - quantiles[float64(idx+1)/float64(len(stats.PauseQuantiles)-1)] = pq.Seconds() - } - quantiles[0.0] = stats.PauseQuantiles[0].Seconds() - ch <- MustNewConstSummary(c.gcDesc, uint64(stats.NumGC), stats.PauseTotal.Seconds(), quantiles) - ch <- MustNewConstMetric(c.gcLastTimeDesc, GaugeValue, float64(stats.LastGC.UnixNano())/1e9) - ch <- MustNewConstMetric(c.goInfoDesc, GaugeValue, 1) -} - -func memstatNamespace(s string) string { - return "go_memstats_" + s -} - -// memStatsMetrics provide description, evaluator, runtime/metrics name, and -// value type for memstat metrics. -type memStatsMetrics []struct { - desc *Desc - eval func(*runtime.MemStats) float64 - valType ValueType -} diff --git a/vendor/github.com/prometheus/client_golang/prometheus/go_collector_go116.go b/vendor/github.com/prometheus/client_golang/prometheus/go_collector_go116.go deleted file mode 100644 index 897a6e906..000000000 --- a/vendor/github.com/prometheus/client_golang/prometheus/go_collector_go116.go +++ /dev/null @@ -1,122 +0,0 @@ -// Copyright 2021 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//go:build !go1.17 -// +build !go1.17 - -package prometheus - -import ( - "runtime" - "sync" - "time" -) - -type goCollector struct { - base baseGoCollector - - // ms... are memstats related. - msLast *runtime.MemStats // Previously collected memstats. - msLastTimestamp time.Time - msMtx sync.Mutex // Protects msLast and msLastTimestamp. - msMetrics memStatsMetrics - msRead func(*runtime.MemStats) // For mocking in tests. - msMaxWait time.Duration // Wait time for fresh memstats. - msMaxAge time.Duration // Maximum allowed age of old memstats. -} - -// NewGoCollector is the obsolete version of collectors.NewGoCollector. -// See there for documentation. -// -// Deprecated: Use collectors.NewGoCollector instead. -func NewGoCollector() Collector { - msMetrics := goRuntimeMemStats() - msMetrics = append(msMetrics, struct { - desc *Desc - eval func(*runtime.MemStats) float64 - valType ValueType - }{ - // This metric is omitted in Go1.17+, see https://github.com/prometheus/client_golang/issues/842#issuecomment-861812034 - desc: NewDesc( - memstatNamespace("gc_cpu_fraction"), - "The fraction of this program's available CPU time used by the GC since the program started.", - nil, nil, - ), - eval: func(ms *runtime.MemStats) float64 { return ms.GCCPUFraction }, - valType: GaugeValue, - }) - return &goCollector{ - base: newBaseGoCollector(), - msLast: &runtime.MemStats{}, - msRead: runtime.ReadMemStats, - msMaxWait: time.Second, - msMaxAge: 5 * time.Minute, - msMetrics: msMetrics, - } -} - -// Describe returns all descriptions of the collector. -func (c *goCollector) Describe(ch chan<- *Desc) { - c.base.Describe(ch) - for _, i := range c.msMetrics { - ch <- i.desc - } -} - -// Collect returns the current state of all metrics of the collector. -func (c *goCollector) Collect(ch chan<- Metric) { - var ( - ms = &runtime.MemStats{} - done = make(chan struct{}) - ) - // Start reading memstats first as it might take a while. - go func() { - c.msRead(ms) - c.msMtx.Lock() - c.msLast = ms - c.msLastTimestamp = time.Now() - c.msMtx.Unlock() - close(done) - }() - - // Collect base non-memory metrics. - c.base.Collect(ch) - - timer := time.NewTimer(c.msMaxWait) - select { - case <-done: // Our own ReadMemStats succeeded in time. Use it. - timer.Stop() // Important for high collection frequencies to not pile up timers. - c.msCollect(ch, ms) - return - case <-timer.C: // Time out, use last memstats if possible. Continue below. - } - c.msMtx.Lock() - if time.Since(c.msLastTimestamp) < c.msMaxAge { - // Last memstats are recent enough. Collect from them under the lock. - c.msCollect(ch, c.msLast) - c.msMtx.Unlock() - return - } - // If we are here, the last memstats are too old or don't exist. We have - // to wait until our own ReadMemStats finally completes. For that to - // happen, we have to release the lock. - c.msMtx.Unlock() - <-done - c.msCollect(ch, ms) -} - -func (c *goCollector) msCollect(ch chan<- Metric, ms *runtime.MemStats) { - for _, i := range c.msMetrics { - ch <- MustNewConstMetric(i.desc, i.valType, i.eval(ms)) - } -} diff --git a/vendor/github.com/prometheus/client_golang/prometheus/go_collector_latest.go b/vendor/github.com/prometheus/client_golang/prometheus/go_collector_latest.go deleted file mode 100644 index 2d8d9f64f..000000000 --- a/vendor/github.com/prometheus/client_golang/prometheus/go_collector_latest.go +++ /dev/null @@ -1,567 +0,0 @@ -// Copyright 2021 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//go:build go1.17 -// +build go1.17 - -package prometheus - -import ( - "math" - "runtime" - "runtime/metrics" - "strings" - "sync" - - "github.com/prometheus/client_golang/prometheus/internal" - - dto "github.com/prometheus/client_model/go" - "google.golang.org/protobuf/proto" -) - -const ( - // constants for strings referenced more than once. - goGCHeapTinyAllocsObjects = "/gc/heap/tiny/allocs:objects" - goGCHeapAllocsObjects = "/gc/heap/allocs:objects" - goGCHeapFreesObjects = "/gc/heap/frees:objects" - goGCHeapFreesBytes = "/gc/heap/frees:bytes" - goGCHeapAllocsBytes = "/gc/heap/allocs:bytes" - goGCHeapObjects = "/gc/heap/objects:objects" - goGCHeapGoalBytes = "/gc/heap/goal:bytes" - goMemoryClassesTotalBytes = "/memory/classes/total:bytes" - goMemoryClassesHeapObjectsBytes = "/memory/classes/heap/objects:bytes" - goMemoryClassesHeapUnusedBytes = "/memory/classes/heap/unused:bytes" - goMemoryClassesHeapReleasedBytes = "/memory/classes/heap/released:bytes" - goMemoryClassesHeapFreeBytes = "/memory/classes/heap/free:bytes" - goMemoryClassesHeapStacksBytes = "/memory/classes/heap/stacks:bytes" - goMemoryClassesOSStacksBytes = "/memory/classes/os-stacks:bytes" - goMemoryClassesMetadataMSpanInuseBytes = "/memory/classes/metadata/mspan/inuse:bytes" - goMemoryClassesMetadataMSPanFreeBytes = "/memory/classes/metadata/mspan/free:bytes" - goMemoryClassesMetadataMCacheInuseBytes = "/memory/classes/metadata/mcache/inuse:bytes" - goMemoryClassesMetadataMCacheFreeBytes = "/memory/classes/metadata/mcache/free:bytes" - goMemoryClassesProfilingBucketsBytes = "/memory/classes/profiling/buckets:bytes" - goMemoryClassesMetadataOtherBytes = "/memory/classes/metadata/other:bytes" - goMemoryClassesOtherBytes = "/memory/classes/other:bytes" -) - -// rmNamesForMemStatsMetrics represents runtime/metrics names required to populate goRuntimeMemStats from like logic. -var rmNamesForMemStatsMetrics = []string{ - goGCHeapTinyAllocsObjects, - goGCHeapAllocsObjects, - goGCHeapFreesObjects, - goGCHeapAllocsBytes, - goGCHeapObjects, - goGCHeapGoalBytes, - goMemoryClassesTotalBytes, - goMemoryClassesHeapObjectsBytes, - goMemoryClassesHeapUnusedBytes, - goMemoryClassesHeapReleasedBytes, - goMemoryClassesHeapFreeBytes, - goMemoryClassesHeapStacksBytes, - goMemoryClassesOSStacksBytes, - goMemoryClassesMetadataMSpanInuseBytes, - goMemoryClassesMetadataMSPanFreeBytes, - goMemoryClassesMetadataMCacheInuseBytes, - goMemoryClassesMetadataMCacheFreeBytes, - goMemoryClassesProfilingBucketsBytes, - goMemoryClassesMetadataOtherBytes, - goMemoryClassesOtherBytes, -} - -func bestEffortLookupRM(lookup []string) []metrics.Description { - ret := make([]metrics.Description, 0, len(lookup)) - for _, rm := range metrics.All() { - for _, m := range lookup { - if m == rm.Name { - ret = append(ret, rm) - } - } - } - return ret -} - -type goCollector struct { - base baseGoCollector - - // mu protects updates to all fields ensuring a consistent - // snapshot is always produced by Collect. - mu sync.Mutex - - // Contains all samples that has to retrieved from runtime/metrics (not all of them will be exposed). - sampleBuf []metrics.Sample - // sampleMap allows lookup for MemStats metrics and runtime/metrics histograms for exact sums. - sampleMap map[string]*metrics.Sample - - // rmExposedMetrics represents all runtime/metrics package metrics - // that were configured to be exposed. - rmExposedMetrics []collectorMetric - rmExactSumMapForHist map[string]string - - // With Go 1.17, the runtime/metrics package was introduced. - // From that point on, metric names produced by the runtime/metrics - // package could be generated from runtime/metrics names. However, - // these differ from the old names for the same values. - // - // This field exists to export the same values under the old names - // as well. - msMetrics memStatsMetrics - msMetricsEnabled bool -} - -type rmMetricDesc struct { - metrics.Description -} - -func matchRuntimeMetricsRules(rules []internal.GoCollectorRule) []rmMetricDesc { - var descs []rmMetricDesc - for _, d := range metrics.All() { - var ( - deny = true - desc rmMetricDesc - ) - - for _, r := range rules { - if !r.Matcher.MatchString(d.Name) { - continue - } - deny = r.Deny - } - if deny { - continue - } - - desc.Description = d - descs = append(descs, desc) - } - return descs -} - -func defaultGoCollectorOptions() internal.GoCollectorOptions { - return internal.GoCollectorOptions{ - RuntimeMetricSumForHist: map[string]string{ - "/gc/heap/allocs-by-size:bytes": goGCHeapAllocsBytes, - "/gc/heap/frees-by-size:bytes": goGCHeapFreesBytes, - }, - RuntimeMetricRules: []internal.GoCollectorRule{ - //{Matcher: regexp.MustCompile("")}, - }, - } -} - -// NewGoCollector is the obsolete version of collectors.NewGoCollector. -// See there for documentation. -// -// Deprecated: Use collectors.NewGoCollector instead. -func NewGoCollector(opts ...func(o *internal.GoCollectorOptions)) Collector { - opt := defaultGoCollectorOptions() - for _, o := range opts { - o(&opt) - } - - exposedDescriptions := matchRuntimeMetricsRules(opt.RuntimeMetricRules) - - // Collect all histogram samples so that we can get their buckets. - // The API guarantees that the buckets are always fixed for the lifetime - // of the process. - var histograms []metrics.Sample - for _, d := range exposedDescriptions { - if d.Kind == metrics.KindFloat64Histogram { - histograms = append(histograms, metrics.Sample{Name: d.Name}) - } - } - - if len(histograms) > 0 { - metrics.Read(histograms) - } - - bucketsMap := make(map[string][]float64) - for i := range histograms { - bucketsMap[histograms[i].Name] = histograms[i].Value.Float64Histogram().Buckets - } - - // Generate a collector for each exposed runtime/metrics metric. - metricSet := make([]collectorMetric, 0, len(exposedDescriptions)) - // SampleBuf is used for reading from runtime/metrics. - // We are assuming the largest case to have stable pointers for sampleMap purposes. - sampleBuf := make([]metrics.Sample, 0, len(exposedDescriptions)+len(opt.RuntimeMetricSumForHist)+len(rmNamesForMemStatsMetrics)) - sampleMap := make(map[string]*metrics.Sample, len(exposedDescriptions)) - for _, d := range exposedDescriptions { - namespace, subsystem, name, ok := internal.RuntimeMetricsToProm(&d.Description) - if !ok { - // Just ignore this metric; we can't do anything with it here. - // If a user decides to use the latest version of Go, we don't want - // to fail here. This condition is tested in TestExpectedRuntimeMetrics. - continue - } - - sampleBuf = append(sampleBuf, metrics.Sample{Name: d.Name}) - sampleMap[d.Name] = &sampleBuf[len(sampleBuf)-1] - - var m collectorMetric - if d.Kind == metrics.KindFloat64Histogram { - _, hasSum := opt.RuntimeMetricSumForHist[d.Name] - unit := d.Name[strings.IndexRune(d.Name, ':')+1:] - m = newBatchHistogram( - NewDesc( - BuildFQName(namespace, subsystem, name), - d.Description.Description, - nil, - nil, - ), - internal.RuntimeMetricsBucketsForUnit(bucketsMap[d.Name], unit), - hasSum, - ) - } else if d.Cumulative { - m = NewCounter(CounterOpts{ - Namespace: namespace, - Subsystem: subsystem, - Name: name, - Help: d.Description.Description, - }, - ) - } else { - m = NewGauge(GaugeOpts{ - Namespace: namespace, - Subsystem: subsystem, - Name: name, - Help: d.Description.Description, - }) - } - metricSet = append(metricSet, m) - } - - // Add exact sum metrics to sampleBuf if not added before. - for _, h := range histograms { - sumMetric, ok := opt.RuntimeMetricSumForHist[h.Name] - if !ok { - continue - } - - if _, ok := sampleMap[sumMetric]; ok { - continue - } - sampleBuf = append(sampleBuf, metrics.Sample{Name: sumMetric}) - sampleMap[sumMetric] = &sampleBuf[len(sampleBuf)-1] - } - - var ( - msMetrics memStatsMetrics - msDescriptions []metrics.Description - ) - - if !opt.DisableMemStatsLikeMetrics { - msMetrics = goRuntimeMemStats() - msDescriptions = bestEffortLookupRM(rmNamesForMemStatsMetrics) - - // Check if metric was not exposed before and if not, add to sampleBuf. - for _, mdDesc := range msDescriptions { - if _, ok := sampleMap[mdDesc.Name]; ok { - continue - } - sampleBuf = append(sampleBuf, metrics.Sample{Name: mdDesc.Name}) - sampleMap[mdDesc.Name] = &sampleBuf[len(sampleBuf)-1] - } - } - - return &goCollector{ - base: newBaseGoCollector(), - sampleBuf: sampleBuf, - sampleMap: sampleMap, - rmExposedMetrics: metricSet, - rmExactSumMapForHist: opt.RuntimeMetricSumForHist, - msMetrics: msMetrics, - msMetricsEnabled: !opt.DisableMemStatsLikeMetrics, - } -} - -// Describe returns all descriptions of the collector. -func (c *goCollector) Describe(ch chan<- *Desc) { - c.base.Describe(ch) - for _, i := range c.msMetrics { - ch <- i.desc - } - for _, m := range c.rmExposedMetrics { - ch <- m.Desc() - } -} - -// Collect returns the current state of all metrics of the collector. -func (c *goCollector) Collect(ch chan<- Metric) { - // Collect base non-memory metrics. - c.base.Collect(ch) - - if len(c.sampleBuf) == 0 { - return - } - - // Collect must be thread-safe, so prevent concurrent use of - // sampleBuf elements. Just read into sampleBuf but write all the data - // we get into our Metrics or MemStats. - // - // This lock also ensures that the Metrics we send out are all from - // the same updates, ensuring their mutual consistency insofar as - // is guaranteed by the runtime/metrics package. - // - // N.B. This locking is heavy-handed, but Collect is expected to be called - // relatively infrequently. Also the core operation here, metrics.Read, - // is fast (O(tens of microseconds)) so contention should certainly be - // low, though channel operations and any allocations may add to that. - c.mu.Lock() - defer c.mu.Unlock() - - // Populate runtime/metrics sample buffer. - metrics.Read(c.sampleBuf) - - // Collect all our runtime/metrics user chose to expose from sampleBuf (if any). - for i, metric := range c.rmExposedMetrics { - // We created samples for exposed metrics first in order, so indexes match. - sample := c.sampleBuf[i] - - // N.B. switch on concrete type because it's significantly more efficient - // than checking for the Counter and Gauge interface implementations. In - // this case, we control all the types here. - switch m := metric.(type) { - case *counter: - // Guard against decreases. This should never happen, but a failure - // to do so will result in a panic, which is a harsh consequence for - // a metrics collection bug. - v0, v1 := m.get(), unwrapScalarRMValue(sample.Value) - if v1 > v0 { - m.Add(unwrapScalarRMValue(sample.Value) - m.get()) - } - m.Collect(ch) - case *gauge: - m.Set(unwrapScalarRMValue(sample.Value)) - m.Collect(ch) - case *batchHistogram: - m.update(sample.Value.Float64Histogram(), c.exactSumFor(sample.Name)) - m.Collect(ch) - default: - panic("unexpected metric type") - } - } - - if c.msMetricsEnabled { - // ms is a dummy MemStats that we populate ourselves so that we can - // populate the old metrics from it if goMemStatsCollection is enabled. - var ms runtime.MemStats - memStatsFromRM(&ms, c.sampleMap) - for _, i := range c.msMetrics { - ch <- MustNewConstMetric(i.desc, i.valType, i.eval(&ms)) - } - } -} - -// unwrapScalarRMValue unwraps a runtime/metrics value that is assumed -// to be scalar and returns the equivalent float64 value. Panics if the -// value is not scalar. -func unwrapScalarRMValue(v metrics.Value) float64 { - switch v.Kind() { - case metrics.KindUint64: - return float64(v.Uint64()) - case metrics.KindFloat64: - return v.Float64() - case metrics.KindBad: - // Unsupported metric. - // - // This should never happen because we always populate our metric - // set from the runtime/metrics package. - panic("unexpected unsupported metric") - default: - // Unsupported metric kind. - // - // This should never happen because we check for this during initialization - // and flag and filter metrics whose kinds we don't understand. - panic("unexpected unsupported metric kind") - } -} - -// exactSumFor takes a runtime/metrics metric name (that is assumed to -// be of kind KindFloat64Histogram) and returns its exact sum and whether -// its exact sum exists. -// -// The runtime/metrics API for histograms doesn't currently expose exact -// sums, but some of the other metrics are in fact exact sums of histograms. -func (c *goCollector) exactSumFor(rmName string) float64 { - sumName, ok := c.rmExactSumMapForHist[rmName] - if !ok { - return 0 - } - s, ok := c.sampleMap[sumName] - if !ok { - return 0 - } - return unwrapScalarRMValue(s.Value) -} - -func memStatsFromRM(ms *runtime.MemStats, rm map[string]*metrics.Sample) { - lookupOrZero := func(name string) uint64 { - if s, ok := rm[name]; ok { - return s.Value.Uint64() - } - return 0 - } - - // Currently, MemStats adds tiny alloc count to both Mallocs AND Frees. - // The reason for this is because MemStats couldn't be extended at the time - // but there was a desire to have Mallocs at least be a little more representative, - // while having Mallocs - Frees still represent a live object count. - // Unfortunately, MemStats doesn't actually export a large allocation count, - // so it's impossible to pull this number out directly. - tinyAllocs := lookupOrZero(goGCHeapTinyAllocsObjects) - ms.Mallocs = lookupOrZero(goGCHeapAllocsObjects) + tinyAllocs - ms.Frees = lookupOrZero(goGCHeapFreesObjects) + tinyAllocs - - ms.TotalAlloc = lookupOrZero(goGCHeapAllocsBytes) - ms.Sys = lookupOrZero(goMemoryClassesTotalBytes) - ms.Lookups = 0 // Already always zero. - ms.HeapAlloc = lookupOrZero(goMemoryClassesHeapObjectsBytes) - ms.Alloc = ms.HeapAlloc - ms.HeapInuse = ms.HeapAlloc + lookupOrZero(goMemoryClassesHeapUnusedBytes) - ms.HeapReleased = lookupOrZero(goMemoryClassesHeapReleasedBytes) - ms.HeapIdle = ms.HeapReleased + lookupOrZero(goMemoryClassesHeapFreeBytes) - ms.HeapSys = ms.HeapInuse + ms.HeapIdle - ms.HeapObjects = lookupOrZero(goGCHeapObjects) - ms.StackInuse = lookupOrZero(goMemoryClassesHeapStacksBytes) - ms.StackSys = ms.StackInuse + lookupOrZero(goMemoryClassesOSStacksBytes) - ms.MSpanInuse = lookupOrZero(goMemoryClassesMetadataMSpanInuseBytes) - ms.MSpanSys = ms.MSpanInuse + lookupOrZero(goMemoryClassesMetadataMSPanFreeBytes) - ms.MCacheInuse = lookupOrZero(goMemoryClassesMetadataMCacheInuseBytes) - ms.MCacheSys = ms.MCacheInuse + lookupOrZero(goMemoryClassesMetadataMCacheFreeBytes) - ms.BuckHashSys = lookupOrZero(goMemoryClassesProfilingBucketsBytes) - ms.GCSys = lookupOrZero(goMemoryClassesMetadataOtherBytes) - ms.OtherSys = lookupOrZero(goMemoryClassesOtherBytes) - ms.NextGC = lookupOrZero(goGCHeapGoalBytes) - - // N.B. GCCPUFraction is intentionally omitted. This metric is not useful, - // and often misleading due to the fact that it's an average over the lifetime - // of the process. - // See https://github.com/prometheus/client_golang/issues/842#issuecomment-861812034 - // for more details. - ms.GCCPUFraction = 0 -} - -// batchHistogram is a mutable histogram that is updated -// in batches. -type batchHistogram struct { - selfCollector - - // Static fields updated only once. - desc *Desc - hasSum bool - - // Because this histogram operates in batches, it just uses a - // single mutex for everything. updates are always serialized - // but Write calls may operate concurrently with updates. - // Contention between these two sources should be rare. - mu sync.Mutex - buckets []float64 // Inclusive lower bounds, like runtime/metrics. - counts []uint64 - sum float64 // Used if hasSum is true. -} - -// newBatchHistogram creates a new batch histogram value with the given -// Desc, buckets, and whether or not it has an exact sum available. -// -// buckets must always be from the runtime/metrics package, following -// the same conventions. -func newBatchHistogram(desc *Desc, buckets []float64, hasSum bool) *batchHistogram { - // We need to remove -Inf values. runtime/metrics keeps them around. - // But -Inf bucket should not be allowed for prometheus histograms. - if buckets[0] == math.Inf(-1) { - buckets = buckets[1:] - } - h := &batchHistogram{ - desc: desc, - buckets: buckets, - // Because buckets follows runtime/metrics conventions, there's - // 1 more value in the buckets list than there are buckets represented, - // because in runtime/metrics, the bucket values represent *boundaries*, - // and non-Inf boundaries are inclusive lower bounds for that bucket. - counts: make([]uint64, len(buckets)-1), - hasSum: hasSum, - } - h.init(h) - return h -} - -// update updates the batchHistogram from a runtime/metrics histogram. -// -// sum must be provided if the batchHistogram was created to have an exact sum. -// h.buckets must be a strict subset of his.Buckets. -func (h *batchHistogram) update(his *metrics.Float64Histogram, sum float64) { - counts, buckets := his.Counts, his.Buckets - - h.mu.Lock() - defer h.mu.Unlock() - - // Clear buckets. - for i := range h.counts { - h.counts[i] = 0 - } - // Copy and reduce buckets. - var j int - for i, count := range counts { - h.counts[j] += count - if buckets[i+1] == h.buckets[j+1] { - j++ - } - } - if h.hasSum { - h.sum = sum - } -} - -func (h *batchHistogram) Desc() *Desc { - return h.desc -} - -func (h *batchHistogram) Write(out *dto.Metric) error { - h.mu.Lock() - defer h.mu.Unlock() - - sum := float64(0) - if h.hasSum { - sum = h.sum - } - dtoBuckets := make([]*dto.Bucket, 0, len(h.counts)) - totalCount := uint64(0) - for i, count := range h.counts { - totalCount += count - if !h.hasSum { - if count != 0 { - // N.B. This computed sum is an underestimate. - sum += h.buckets[i] * float64(count) - } - } - - // Skip the +Inf bucket, but only for the bucket list. - // It must still count for sum and totalCount. - if math.IsInf(h.buckets[i+1], 1) { - break - } - // Float64Histogram's upper bound is exclusive, so make it inclusive - // by obtaining the next float64 value down, in order. - upperBound := math.Nextafter(h.buckets[i+1], h.buckets[i]) - dtoBuckets = append(dtoBuckets, &dto.Bucket{ - CumulativeCount: proto.Uint64(totalCount), - UpperBound: proto.Float64(upperBound), - }) - } - out.Histogram = &dto.Histogram{ - Bucket: dtoBuckets, - SampleCount: proto.Uint64(totalCount), - SampleSum: proto.Float64(sum), - } - return nil -} diff --git a/vendor/github.com/prometheus/client_golang/prometheus/histogram.go b/vendor/github.com/prometheus/client_golang/prometheus/histogram.go deleted file mode 100644 index b5c8bcb39..000000000 --- a/vendor/github.com/prometheus/client_golang/prometheus/histogram.go +++ /dev/null @@ -1,1577 +0,0 @@ -// Copyright 2015 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package prometheus - -import ( - "fmt" - "math" - "runtime" - "sort" - "sync" - "sync/atomic" - "time" - - dto "github.com/prometheus/client_model/go" - - "google.golang.org/protobuf/proto" - "google.golang.org/protobuf/types/known/timestamppb" -) - -// nativeHistogramBounds for the frac of observed values. Only relevant for -// schema > 0. The position in the slice is the schema. (0 is never used, just -// here for convenience of using the schema directly as the index.) -// -// TODO(beorn7): Currently, we do a binary search into these slices. There are -// ways to turn it into a small number of simple array lookups. It probably only -// matters for schema 5 and beyond, but should be investigated. See this comment -// as a starting point: -// https://github.com/open-telemetry/opentelemetry-specification/issues/1776#issuecomment-870164310 -var nativeHistogramBounds = [][]float64{ - // Schema "0": - {0.5}, - // Schema 1: - {0.5, 0.7071067811865475}, - // Schema 2: - {0.5, 0.5946035575013605, 0.7071067811865475, 0.8408964152537144}, - // Schema 3: - { - 0.5, 0.5452538663326288, 0.5946035575013605, 0.6484197773255048, - 0.7071067811865475, 0.7711054127039704, 0.8408964152537144, 0.9170040432046711, - }, - // Schema 4: - { - 0.5, 0.5221368912137069, 0.5452538663326288, 0.5693943173783458, - 0.5946035575013605, 0.620928906036742, 0.6484197773255048, 0.6771277734684463, - 0.7071067811865475, 0.7384130729697496, 0.7711054127039704, 0.805245165974627, - 0.8408964152537144, 0.8781260801866495, 0.9170040432046711, 0.9576032806985735, - }, - // Schema 5: - { - 0.5, 0.5109485743270583, 0.5221368912137069, 0.5335702003384117, - 0.5452538663326288, 0.5571933712979462, 0.5693943173783458, 0.5818624293887887, - 0.5946035575013605, 0.6076236799902344, 0.620928906036742, 0.6345254785958666, - 0.6484197773255048, 0.6626183215798706, 0.6771277734684463, 0.6919549409819159, - 0.7071067811865475, 0.7225904034885232, 0.7384130729697496, 0.7545822137967112, - 0.7711054127039704, 0.7879904225539431, 0.805245165974627, 0.8228777390769823, - 0.8408964152537144, 0.8593096490612387, 0.8781260801866495, 0.8973545375015533, - 0.9170040432046711, 0.9370838170551498, 0.9576032806985735, 0.9785720620876999, - }, - // Schema 6: - { - 0.5, 0.5054446430258502, 0.5109485743270583, 0.5165124395106142, - 0.5221368912137069, 0.5278225891802786, 0.5335702003384117, 0.5393803988785598, - 0.5452538663326288, 0.5511912916539204, 0.5571933712979462, 0.5632608093041209, - 0.5693943173783458, 0.5755946149764913, 0.5818624293887887, 0.5881984958251406, - 0.5946035575013605, 0.6010783657263515, 0.6076236799902344, 0.6142402680534349, - 0.620928906036742, 0.6276903785123455, 0.6345254785958666, 0.6414350080393891, - 0.6484197773255048, 0.6554806057623822, 0.6626183215798706, 0.6698337620266515, - 0.6771277734684463, 0.6845012114872953, 0.6919549409819159, 0.6994898362691555, - 0.7071067811865475, 0.7148066691959849, 0.7225904034885232, 0.7304588970903234, - 0.7384130729697496, 0.7464538641456323, 0.7545822137967112, 0.762799075372269, - 0.7711054127039704, 0.7795022001189185, 0.7879904225539431, 0.7965710756711334, - 0.805245165974627, 0.8140137109286738, 0.8228777390769823, 0.8318382901633681, - 0.8408964152537144, 0.8500531768592616, 0.8593096490612387, 0.8686669176368529, - 0.8781260801866495, 0.8876882462632604, 0.8973545375015533, 0.9071260877501991, - 0.9170040432046711, 0.9269895625416926, 0.9370838170551498, 0.9472879907934827, - 0.9576032806985735, 0.9680308967461471, 0.9785720620876999, 0.9892280131939752, - }, - // Schema 7: - { - 0.5, 0.5027149505564014, 0.5054446430258502, 0.5081891574554764, - 0.5109485743270583, 0.5137229745593818, 0.5165124395106142, 0.5193170509806894, - 0.5221368912137069, 0.5249720429003435, 0.5278225891802786, 0.5306886136446309, - 0.5335702003384117, 0.5364674337629877, 0.5393803988785598, 0.5423091811066545, - 0.5452538663326288, 0.5482145409081883, 0.5511912916539204, 0.5541842058618393, - 0.5571933712979462, 0.5602188762048033, 0.5632608093041209, 0.5663192597993595, - 0.5693943173783458, 0.572486072215902, 0.5755946149764913, 0.5787200368168754, - 0.5818624293887887, 0.585021884841625, 0.5881984958251406, 0.5913923554921704, - 0.5946035575013605, 0.5978321960199137, 0.6010783657263515, 0.6043421618132907, - 0.6076236799902344, 0.6109230164863786, 0.6142402680534349, 0.6175755319684665, - 0.620928906036742, 0.6243004885946023, 0.6276903785123455, 0.6310986751971253, - 0.6345254785958666, 0.637970889198196, 0.6414350080393891, 0.6449179367033329, - 0.6484197773255048, 0.6519406325959679, 0.6554806057623822, 0.659039800633032, - 0.6626183215798706, 0.6662162735415805, 0.6698337620266515, 0.6734708931164728, - 0.6771277734684463, 0.6808045103191123, 0.6845012114872953, 0.688217985377265, - 0.6919549409819159, 0.6957121878859629, 0.6994898362691555, 0.7032879969095076, - 0.7071067811865475, 0.7109463010845827, 0.7148066691959849, 0.718687998724491, - 0.7225904034885232, 0.7265139979245261, 0.7304588970903234, 0.7344252166684908, - 0.7384130729697496, 0.7424225829363761, 0.7464538641456323, 0.7505070348132126, - 0.7545822137967112, 0.7586795205991071, 0.762799075372269, 0.7669409989204777, - 0.7711054127039704, 0.7752924388424999, 0.7795022001189185, 0.7837348199827764, - 0.7879904225539431, 0.7922691326262467, 0.7965710756711334, 0.8008963778413465, - 0.805245165974627, 0.8096175675974316, 0.8140137109286738, 0.8184337248834821, - 0.8228777390769823, 0.8273458838280969, 0.8318382901633681, 0.8363550898207981, - 0.8408964152537144, 0.8454623996346523, 0.8500531768592616, 0.8546688815502312, - 0.8593096490612387, 0.8639756154809185, 0.8686669176368529, 0.8733836930995842, - 0.8781260801866495, 0.8828942179666361, 0.8876882462632604, 0.8925083056594671, - 0.8973545375015533, 0.9022270839033115, 0.9071260877501991, 0.9120516927035263, - 0.9170040432046711, 0.9219832844793128, 0.9269895625416926, 0.9320230241988943, - 0.9370838170551498, 0.9421720895161669, 0.9472879907934827, 0.9524316709088368, - 0.9576032806985735, 0.9628029718180622, 0.9680308967461471, 0.9732872087896164, - 0.9785720620876999, 0.9838856116165875, 0.9892280131939752, 0.9945994234836328, - }, - // Schema 8: - { - 0.5, 0.5013556375251013, 0.5027149505564014, 0.5040779490592088, - 0.5054446430258502, 0.5068150424757447, 0.5081891574554764, 0.509566998038869, - 0.5109485743270583, 0.5123338964485679, 0.5137229745593818, 0.5151158188430205, - 0.5165124395106142, 0.5179128468009786, 0.5193170509806894, 0.520725062344158, - 0.5221368912137069, 0.5235525479396449, 0.5249720429003435, 0.526395386502313, - 0.5278225891802786, 0.5292536613972564, 0.5306886136446309, 0.5321274564422321, - 0.5335702003384117, 0.5350168559101208, 0.5364674337629877, 0.5379219445313954, - 0.5393803988785598, 0.5408428074966075, 0.5423091811066545, 0.5437795304588847, - 0.5452538663326288, 0.5467321995364429, 0.5482145409081883, 0.549700901315111, - 0.5511912916539204, 0.5526857228508706, 0.5541842058618393, 0.5556867516724088, - 0.5571933712979462, 0.5587040757836845, 0.5602188762048033, 0.5617377836665098, - 0.5632608093041209, 0.564787964283144, 0.5663192597993595, 0.5678547070789026, - 0.5693943173783458, 0.5709381019847808, 0.572486072215902, 0.5740382394200894, - 0.5755946149764913, 0.5771552102951081, 0.5787200368168754, 0.5802891060137493, - 0.5818624293887887, 0.5834400184762408, 0.585021884841625, 0.5866080400818185, - 0.5881984958251406, 0.5897932637314379, 0.5913923554921704, 0.5929957828304968, - 0.5946035575013605, 0.5962156912915756, 0.5978321960199137, 0.5994530835371903, - 0.6010783657263515, 0.6027080545025619, 0.6043421618132907, 0.6059806996384005, - 0.6076236799902344, 0.6092711149137041, 0.6109230164863786, 0.6125793968185725, - 0.6142402680534349, 0.6159056423670379, 0.6175755319684665, 0.6192499490999082, - 0.620928906036742, 0.622612415087629, 0.6243004885946023, 0.6259931389331581, - 0.6276903785123455, 0.6293922197748583, 0.6310986751971253, 0.6328097572894031, - 0.6345254785958666, 0.6362458516947014, 0.637970889198196, 0.6397006037528346, - 0.6414350080393891, 0.6431741147730128, 0.6449179367033329, 0.6466664866145447, - 0.6484197773255048, 0.6501778216898253, 0.6519406325959679, 0.6537082229673385, - 0.6554806057623822, 0.6572577939746774, 0.659039800633032, 0.6608266388015788, - 0.6626183215798706, 0.6644148621029772, 0.6662162735415805, 0.6680225691020727, - 0.6698337620266515, 0.6716498655934177, 0.6734708931164728, 0.6752968579460171, - 0.6771277734684463, 0.6789636531064505, 0.6808045103191123, 0.6826503586020058, - 0.6845012114872953, 0.6863570825438342, 0.688217985377265, 0.690083933630119, - 0.6919549409819159, 0.6938310211492645, 0.6957121878859629, 0.6975984549830999, - 0.6994898362691555, 0.7013863456101023, 0.7032879969095076, 0.7051948041086352, - 0.7071067811865475, 0.7090239421602076, 0.7109463010845827, 0.7128738720527471, - 0.7148066691959849, 0.7167447066838943, 0.718687998724491, 0.7206365595643126, - 0.7225904034885232, 0.7245495448210174, 0.7265139979245261, 0.7284837772007218, - 0.7304588970903234, 0.7324393720732029, 0.7344252166684908, 0.7364164454346837, - 0.7384130729697496, 0.7404151139112358, 0.7424225829363761, 0.7444354947621984, - 0.7464538641456323, 0.7484777058836176, 0.7505070348132126, 0.7525418658117031, - 0.7545822137967112, 0.7566280937263048, 0.7586795205991071, 0.7607365094544071, - 0.762799075372269, 0.7648672334736434, 0.7669409989204777, 0.7690203869158282, - 0.7711054127039704, 0.7731960915705107, 0.7752924388424999, 0.7773944698885442, - 0.7795022001189185, 0.7816156449856788, 0.7837348199827764, 0.7858597406461707, - 0.7879904225539431, 0.7901268813264122, 0.7922691326262467, 0.7944171921585818, - 0.7965710756711334, 0.7987307989543135, 0.8008963778413465, 0.8030678282083853, - 0.805245165974627, 0.8074284071024302, 0.8096175675974316, 0.8118126635086642, - 0.8140137109286738, 0.8162207259936375, 0.8184337248834821, 0.820652723822003, - 0.8228777390769823, 0.8251087869603088, 0.8273458838280969, 0.8295890460808079, - 0.8318382901633681, 0.8340936325652911, 0.8363550898207981, 0.8386226785089391, - 0.8408964152537144, 0.8431763167241966, 0.8454623996346523, 0.8477546807446661, - 0.8500531768592616, 0.8523579048290255, 0.8546688815502312, 0.8569861239649629, - 0.8593096490612387, 0.8616394738731368, 0.8639756154809185, 0.8663180910111553, - 0.8686669176368529, 0.871022112577578, 0.8733836930995842, 0.8757516765159389, - 0.8781260801866495, 0.8805069215187917, 0.8828942179666361, 0.8852879870317771, - 0.8876882462632604, 0.890095013257712, 0.8925083056594671, 0.8949281411607002, - 0.8973545375015533, 0.8997875124702672, 0.9022270839033115, 0.9046732696855155, - 0.9071260877501991, 0.909585556079304, 0.9120516927035263, 0.9145245157024483, - 0.9170040432046711, 0.9194902933879467, 0.9219832844793128, 0.9244830347552253, - 0.9269895625416926, 0.92950288621441, 0.9320230241988943, 0.9345499949706191, - 0.9370838170551498, 0.93962450902828, 0.9421720895161669, 0.9447265771954693, - 0.9472879907934827, 0.9498563490882775, 0.9524316709088368, 0.9550139751351947, - 0.9576032806985735, 0.9601996065815236, 0.9628029718180622, 0.9654133954938133, - 0.9680308967461471, 0.9706554947643201, 0.9732872087896164, 0.9759260581154889, - 0.9785720620876999, 0.9812252401044634, 0.9838856116165875, 0.9865531961276168, - 0.9892280131939752, 0.9919100824251095, 0.9945994234836328, 0.9972960560854698, - }, -} - -// The nativeHistogramBounds above can be generated with the code below. -// -// TODO(beorn7): It's tempting to actually use `go generate` to generate the -// code above. However, this could lead to slightly different numbers on -// different architectures. We still need to come to terms if we are fine with -// that, or if we might prefer to specify precise numbers in the standard. -// -// var nativeHistogramBounds [][]float64 = make([][]float64, 9) -// -// func init() { -// // Populate nativeHistogramBounds. -// numBuckets := 1 -// for i := range nativeHistogramBounds { -// bounds := []float64{0.5} -// factor := math.Exp2(math.Exp2(float64(-i))) -// for j := 0; j < numBuckets-1; j++ { -// var bound float64 -// if (j+1)%2 == 0 { -// // Use previously calculated value for increased precision. -// bound = nativeHistogramBounds[i-1][j/2+1] -// } else { -// bound = bounds[j] * factor -// } -// bounds = append(bounds, bound) -// } -// numBuckets *= 2 -// nativeHistogramBounds[i] = bounds -// } -// } - -// A Histogram counts individual observations from an event or sample stream in -// configurable static buckets (or in dynamic sparse buckets as part of the -// experimental Native Histograms, see below for more details). Similar to a -// Summary, it also provides a sum of observations and an observation count. -// -// On the Prometheus server, quantiles can be calculated from a Histogram using -// the histogram_quantile PromQL function. -// -// Note that Histograms, in contrast to Summaries, can be aggregated in PromQL -// (see the documentation for detailed procedures). However, Histograms require -// the user to pre-define suitable buckets, and they are in general less -// accurate. (Both problems are addressed by the experimental Native -// Histograms. To use them, configure a NativeHistogramBucketFactor in the -// HistogramOpts. They also require a Prometheus server v2.40+ with the -// corresponding feature flag enabled.) -// -// The Observe method of a Histogram has a very low performance overhead in -// comparison with the Observe method of a Summary. -// -// To create Histogram instances, use NewHistogram. -type Histogram interface { - Metric - Collector - - // Observe adds a single observation to the histogram. Observations are - // usually positive or zero. Negative observations are accepted but - // prevent current versions of Prometheus from properly detecting - // counter resets in the sum of observations. (The experimental Native - // Histograms handle negative observations properly.) See - // https://prometheus.io/docs/practices/histograms/#count-and-sum-of-observations - // for details. - Observe(float64) -} - -// bucketLabel is used for the label that defines the upper bound of a -// bucket of a histogram ("le" -> "less or equal"). -const bucketLabel = "le" - -// DefBuckets are the default Histogram buckets. The default buckets are -// tailored to broadly measure the response time (in seconds) of a network -// service. Most likely, however, you will be required to define buckets -// customized to your use case. -var DefBuckets = []float64{.005, .01, .025, .05, .1, .25, .5, 1, 2.5, 5, 10} - -// DefNativeHistogramZeroThreshold is the default value for -// NativeHistogramZeroThreshold in the HistogramOpts. -// -// The value is 2^-128 (or 0.5*2^-127 in the actual IEEE 754 representation), -// which is a bucket boundary at all possible resolutions. -const DefNativeHistogramZeroThreshold = 2.938735877055719e-39 - -// NativeHistogramZeroThresholdZero can be used as NativeHistogramZeroThreshold -// in the HistogramOpts to create a zero bucket of width zero, i.e. a zero -// bucket that only receives observations of precisely zero. -const NativeHistogramZeroThresholdZero = -1 - -var errBucketLabelNotAllowed = fmt.Errorf( - "%q is not allowed as label name in histograms", bucketLabel, -) - -// LinearBuckets creates 'count' regular buckets, each 'width' wide, where the -// lowest bucket has an upper bound of 'start'. The final +Inf bucket is not -// counted and not included in the returned slice. The returned slice is meant -// to be used for the Buckets field of HistogramOpts. -// -// The function panics if 'count' is zero or negative. -func LinearBuckets(start, width float64, count int) []float64 { - if count < 1 { - panic("LinearBuckets needs a positive count") - } - buckets := make([]float64, count) - for i := range buckets { - buckets[i] = start - start += width - } - return buckets -} - -// ExponentialBuckets creates 'count' regular buckets, where the lowest bucket -// has an upper bound of 'start' and each following bucket's upper bound is -// 'factor' times the previous bucket's upper bound. The final +Inf bucket is -// not counted and not included in the returned slice. The returned slice is -// meant to be used for the Buckets field of HistogramOpts. -// -// The function panics if 'count' is 0 or negative, if 'start' is 0 or negative, -// or if 'factor' is less than or equal 1. -func ExponentialBuckets(start, factor float64, count int) []float64 { - if count < 1 { - panic("ExponentialBuckets needs a positive count") - } - if start <= 0 { - panic("ExponentialBuckets needs a positive start value") - } - if factor <= 1 { - panic("ExponentialBuckets needs a factor greater than 1") - } - buckets := make([]float64, count) - for i := range buckets { - buckets[i] = start - start *= factor - } - return buckets -} - -// ExponentialBucketsRange creates 'count' buckets, where the lowest bucket is -// 'min' and the highest bucket is 'max'. The final +Inf bucket is not counted -// and not included in the returned slice. The returned slice is meant to be -// used for the Buckets field of HistogramOpts. -// -// The function panics if 'count' is 0 or negative, if 'min' is 0 or negative. -func ExponentialBucketsRange(min, max float64, count int) []float64 { - if count < 1 { - panic("ExponentialBucketsRange count needs a positive count") - } - if min <= 0 { - panic("ExponentialBucketsRange min needs to be greater than 0") - } - - // Formula for exponential buckets. - // max = min*growthFactor^(bucketCount-1) - - // We know max/min and highest bucket. Solve for growthFactor. - growthFactor := math.Pow(max/min, 1.0/float64(count-1)) - - // Now that we know growthFactor, solve for each bucket. - buckets := make([]float64, count) - for i := 1; i <= count; i++ { - buckets[i-1] = min * math.Pow(growthFactor, float64(i-1)) - } - return buckets -} - -// HistogramOpts bundles the options for creating a Histogram metric. It is -// mandatory to set Name to a non-empty string. All other fields are optional -// and can safely be left at their zero value, although it is strongly -// encouraged to set a Help string. -type HistogramOpts struct { - // Namespace, Subsystem, and Name are components of the fully-qualified - // name of the Histogram (created by joining these components with - // "_"). Only Name is mandatory, the others merely help structuring the - // name. Note that the fully-qualified name of the Histogram must be a - // valid Prometheus metric name. - Namespace string - Subsystem string - Name string - - // Help provides information about this Histogram. - // - // Metrics with the same fully-qualified name must have the same Help - // string. - Help string - - // ConstLabels are used to attach fixed labels to this metric. Metrics - // with the same fully-qualified name must have the same label names in - // their ConstLabels. - // - // ConstLabels are only used rarely. In particular, do not use them to - // attach the same labels to all your metrics. Those use cases are - // better covered by target labels set by the scraping Prometheus - // server, or by one specific metric (e.g. a build_info or a - // machine_role metric). See also - // https://prometheus.io/docs/instrumenting/writing_exporters/#target-labels-not-static-scraped-labels - ConstLabels Labels - - // Buckets defines the buckets into which observations are counted. Each - // element in the slice is the upper inclusive bound of a bucket. The - // values must be sorted in strictly increasing order. There is no need - // to add a highest bucket with +Inf bound, it will be added - // implicitly. If Buckets is left as nil or set to a slice of length - // zero, it is replaced by default buckets. The default buckets are - // DefBuckets if no buckets for a native histogram (see below) are used, - // otherwise the default is no buckets. (In other words, if you want to - // use both regular buckets and buckets for a native histogram, you have - // to define the regular buckets here explicitly.) - Buckets []float64 - - // If NativeHistogramBucketFactor is greater than one, so-called sparse - // buckets are used (in addition to the regular buckets, if defined - // above). A Histogram with sparse buckets will be ingested as a Native - // Histogram by a Prometheus server with that feature enabled (requires - // Prometheus v2.40+). Sparse buckets are exponential buckets covering - // the whole float64 range (with the exception of the “zero” bucket, see - // NativeHistogramZeroThreshold below). From any one bucket to the next, - // the width of the bucket grows by a constant - // factor. NativeHistogramBucketFactor provides an upper bound for this - // factor (exception see below). The smaller - // NativeHistogramBucketFactor, the more buckets will be used and thus - // the more costly the histogram will become. A generally good trade-off - // between cost and accuracy is a value of 1.1 (each bucket is at most - // 10% wider than the previous one), which will result in each power of - // two divided into 8 buckets (e.g. there will be 8 buckets between 1 - // and 2, same as between 2 and 4, and 4 and 8, etc.). - // - // Details about the actually used factor: The factor is calculated as - // 2^(2^-n), where n is an integer number between (and including) -4 and - // 8. n is chosen so that the resulting factor is the largest that is - // still smaller or equal to NativeHistogramBucketFactor. Note that the - // smallest possible factor is therefore approx. 1.00271 (i.e. 2^(2^-8) - // ). If NativeHistogramBucketFactor is greater than 1 but smaller than - // 2^(2^-8), then the actually used factor is still 2^(2^-8) even though - // it is larger than the provided NativeHistogramBucketFactor. - // - // NOTE: Native Histograms are still an experimental feature. Their - // behavior might still change without a major version - // bump. Subsequently, all NativeHistogram... options here might still - // change their behavior or name (or might completely disappear) without - // a major version bump. - NativeHistogramBucketFactor float64 - // All observations with an absolute value of less or equal - // NativeHistogramZeroThreshold are accumulated into a “zero” bucket. - // For best results, this should be close to a bucket boundary. This is - // usually the case if picking a power of two. If - // NativeHistogramZeroThreshold is left at zero, - // DefNativeHistogramZeroThreshold is used as the threshold. To - // configure a zero bucket with an actual threshold of zero (i.e. only - // observations of precisely zero will go into the zero bucket), set - // NativeHistogramZeroThreshold to the NativeHistogramZeroThresholdZero - // constant (or any negative float value). - NativeHistogramZeroThreshold float64 - - // The remaining fields define a strategy to limit the number of - // populated sparse buckets. If NativeHistogramMaxBucketNumber is left - // at zero, the number of buckets is not limited. (Note that this might - // lead to unbounded memory consumption if the values observed by the - // Histogram are sufficiently wide-spread. In particular, this could be - // used as a DoS attack vector. Where the observed values depend on - // external inputs, it is highly recommended to set a - // NativeHistogramMaxBucketNumber.) Once the set - // NativeHistogramMaxBucketNumber is exceeded, the following strategy is - // enacted: - // - First, if the last reset (or the creation) of the histogram is at - // least NativeHistogramMinResetDuration ago, then the whole - // histogram is reset to its initial state (including regular - // buckets). - // - If less time has passed, or if NativeHistogramMinResetDuration is - // zero, no reset is performed. Instead, the zero threshold is - // increased sufficiently to reduce the number of buckets to or below - // NativeHistogramMaxBucketNumber, but not to more than - // NativeHistogramMaxZeroThreshold. Thus, if - // NativeHistogramMaxZeroThreshold is already at or below the current - // zero threshold, nothing happens at this step. - // - After that, if the number of buckets still exceeds - // NativeHistogramMaxBucketNumber, the resolution of the histogram is - // reduced by doubling the width of the sparse buckets (up to a - // growth factor between one bucket to the next of 2^(2^4) = 65536, - // see above). - // - Any increased zero threshold or reduced resolution is reset back - // to their original values once NativeHistogramMinResetDuration has - // passed (since the last reset or the creation of the histogram). - NativeHistogramMaxBucketNumber uint32 - NativeHistogramMinResetDuration time.Duration - NativeHistogramMaxZeroThreshold float64 - - // now is for testing purposes, by default it's time.Now. - now func() time.Time - - // afterFunc is for testing purposes, by default it's time.AfterFunc. - afterFunc func(time.Duration, func()) *time.Timer -} - -// HistogramVecOpts bundles the options to create a HistogramVec metric. -// It is mandatory to set HistogramOpts, see there for mandatory fields. VariableLabels -// is optional and can safely be left to its default value. -type HistogramVecOpts struct { - HistogramOpts - - // VariableLabels are used to partition the metric vector by the given set - // of labels. Each label value will be constrained with the optional Constraint - // function, if provided. - VariableLabels ConstrainableLabels -} - -// NewHistogram creates a new Histogram based on the provided HistogramOpts. It -// panics if the buckets in HistogramOpts are not in strictly increasing order. -// -// The returned implementation also implements ExemplarObserver. It is safe to -// perform the corresponding type assertion. Exemplars are tracked separately -// for each bucket. -func NewHistogram(opts HistogramOpts) Histogram { - return newHistogram( - NewDesc( - BuildFQName(opts.Namespace, opts.Subsystem, opts.Name), - opts.Help, - nil, - opts.ConstLabels, - ), - opts, - ) -} - -func newHistogram(desc *Desc, opts HistogramOpts, labelValues ...string) Histogram { - if len(desc.variableLabels.names) != len(labelValues) { - panic(makeInconsistentCardinalityError(desc.fqName, desc.variableLabels.names, labelValues)) - } - - for _, n := range desc.variableLabels.names { - if n == bucketLabel { - panic(errBucketLabelNotAllowed) - } - } - for _, lp := range desc.constLabelPairs { - if lp.GetName() == bucketLabel { - panic(errBucketLabelNotAllowed) - } - } - - if opts.now == nil { - opts.now = time.Now - } - if opts.afterFunc == nil { - opts.afterFunc = time.AfterFunc - } - h := &histogram{ - desc: desc, - upperBounds: opts.Buckets, - labelPairs: MakeLabelPairs(desc, labelValues), - nativeHistogramMaxBuckets: opts.NativeHistogramMaxBucketNumber, - nativeHistogramMaxZeroThreshold: opts.NativeHistogramMaxZeroThreshold, - nativeHistogramMinResetDuration: opts.NativeHistogramMinResetDuration, - lastResetTime: opts.now(), - now: opts.now, - afterFunc: opts.afterFunc, - } - if len(h.upperBounds) == 0 && opts.NativeHistogramBucketFactor <= 1 { - h.upperBounds = DefBuckets - } - if opts.NativeHistogramBucketFactor <= 1 { - h.nativeHistogramSchema = math.MinInt32 // To mark that there are no sparse buckets. - } else { - switch { - case opts.NativeHistogramZeroThreshold > 0: - h.nativeHistogramZeroThreshold = opts.NativeHistogramZeroThreshold - case opts.NativeHistogramZeroThreshold == 0: - h.nativeHistogramZeroThreshold = DefNativeHistogramZeroThreshold - } // Leave h.nativeHistogramZeroThreshold at 0 otherwise. - h.nativeHistogramSchema = pickSchema(opts.NativeHistogramBucketFactor) - } - for i, upperBound := range h.upperBounds { - if i < len(h.upperBounds)-1 { - if upperBound >= h.upperBounds[i+1] { - panic(fmt.Errorf( - "histogram buckets must be in increasing order: %f >= %f", - upperBound, h.upperBounds[i+1], - )) - } - } else { - if math.IsInf(upperBound, +1) { - // The +Inf bucket is implicit. Remove it here. - h.upperBounds = h.upperBounds[:i] - } - } - } - // Finally we know the final length of h.upperBounds and can make buckets - // for both counts as well as exemplars: - h.counts[0] = &histogramCounts{buckets: make([]uint64, len(h.upperBounds))} - atomic.StoreUint64(&h.counts[0].nativeHistogramZeroThresholdBits, math.Float64bits(h.nativeHistogramZeroThreshold)) - atomic.StoreInt32(&h.counts[0].nativeHistogramSchema, h.nativeHistogramSchema) - h.counts[1] = &histogramCounts{buckets: make([]uint64, len(h.upperBounds))} - atomic.StoreUint64(&h.counts[1].nativeHistogramZeroThresholdBits, math.Float64bits(h.nativeHistogramZeroThreshold)) - atomic.StoreInt32(&h.counts[1].nativeHistogramSchema, h.nativeHistogramSchema) - h.exemplars = make([]atomic.Value, len(h.upperBounds)+1) - - h.init(h) // Init self-collection. - return h -} - -type histogramCounts struct { - // Order in this struct matters for the alignment required by atomic - // operations, see http://golang.org/pkg/sync/atomic/#pkg-note-BUG - - // sumBits contains the bits of the float64 representing the sum of all - // observations. - sumBits uint64 - count uint64 - - // nativeHistogramZeroBucket counts all (positive and negative) - // observations in the zero bucket (with an absolute value less or equal - // the current threshold, see next field. - nativeHistogramZeroBucket uint64 - // nativeHistogramZeroThresholdBits is the bit pattern of the current - // threshold for the zero bucket. It's initially equal to - // nativeHistogramZeroThreshold but may change according to the bucket - // count limitation strategy. - nativeHistogramZeroThresholdBits uint64 - // nativeHistogramSchema may change over time according to the bucket - // count limitation strategy and therefore has to be saved here. - nativeHistogramSchema int32 - // Number of (positive and negative) sparse buckets. - nativeHistogramBucketsNumber uint32 - - // Regular buckets. - buckets []uint64 - - // The sparse buckets for native histograms are implemented with a - // sync.Map for now. A dedicated data structure will likely be more - // efficient. There are separate maps for negative and positive - // observations. The map's value is an *int64, counting observations in - // that bucket. (Note that we don't use uint64 as an int64 won't - // overflow in practice, and working with signed numbers from the - // beginning simplifies the handling of deltas.) The map's key is the - // index of the bucket according to the used - // nativeHistogramSchema. Index 0 is for an upper bound of 1. - nativeHistogramBucketsPositive, nativeHistogramBucketsNegative sync.Map -} - -// observe manages the parts of observe that only affects -// histogramCounts. doSparse is true if sparse buckets should be done, -// too. -func (hc *histogramCounts) observe(v float64, bucket int, doSparse bool) { - if bucket < len(hc.buckets) { - atomic.AddUint64(&hc.buckets[bucket], 1) - } - atomicAddFloat(&hc.sumBits, v) - if doSparse && !math.IsNaN(v) { - var ( - key int - schema = atomic.LoadInt32(&hc.nativeHistogramSchema) - zeroThreshold = math.Float64frombits(atomic.LoadUint64(&hc.nativeHistogramZeroThresholdBits)) - bucketCreated, isInf bool - ) - if math.IsInf(v, 0) { - // Pretend v is MaxFloat64 but later increment key by one. - if math.IsInf(v, +1) { - v = math.MaxFloat64 - } else { - v = -math.MaxFloat64 - } - isInf = true - } - frac, exp := math.Frexp(math.Abs(v)) - if schema > 0 { - bounds := nativeHistogramBounds[schema] - key = sort.SearchFloat64s(bounds, frac) + (exp-1)*len(bounds) - } else { - key = exp - if frac == 0.5 { - key-- - } - offset := (1 << -schema) - 1 - key = (key + offset) >> -schema - } - if isInf { - key++ - } - switch { - case v > zeroThreshold: - bucketCreated = addToBucket(&hc.nativeHistogramBucketsPositive, key, 1) - case v < -zeroThreshold: - bucketCreated = addToBucket(&hc.nativeHistogramBucketsNegative, key, 1) - default: - atomic.AddUint64(&hc.nativeHistogramZeroBucket, 1) - } - if bucketCreated { - atomic.AddUint32(&hc.nativeHistogramBucketsNumber, 1) - } - } - // Increment count last as we take it as a signal that the observation - // is complete. - atomic.AddUint64(&hc.count, 1) -} - -type histogram struct { - // countAndHotIdx enables lock-free writes with use of atomic updates. - // The most significant bit is the hot index [0 or 1] of the count field - // below. Observe calls update the hot one. All remaining bits count the - // number of Observe calls. Observe starts by incrementing this counter, - // and finish by incrementing the count field in the respective - // histogramCounts, as a marker for completion. - // - // Calls of the Write method (which are non-mutating reads from the - // perspective of the histogram) swap the hot–cold under the writeMtx - // lock. A cooldown is awaited (while locked) by comparing the number of - // observations with the initiation count. Once they match, then the - // last observation on the now cool one has completed. All cold fields must - // be merged into the new hot before releasing writeMtx. - // - // Fields with atomic access first! See alignment constraint: - // http://golang.org/pkg/sync/atomic/#pkg-note-BUG - countAndHotIdx uint64 - - selfCollector - desc *Desc - - // Only used in the Write method and for sparse bucket management. - mtx sync.Mutex - - // Two counts, one is "hot" for lock-free observations, the other is - // "cold" for writing out a dto.Metric. It has to be an array of - // pointers to guarantee 64bit alignment of the histogramCounts, see - // http://golang.org/pkg/sync/atomic/#pkg-note-BUG. - counts [2]*histogramCounts - - upperBounds []float64 - labelPairs []*dto.LabelPair - exemplars []atomic.Value // One more than buckets (to include +Inf), each a *dto.Exemplar. - nativeHistogramSchema int32 // The initial schema. Set to math.MinInt32 if no sparse buckets are used. - nativeHistogramZeroThreshold float64 // The initial zero threshold. - nativeHistogramMaxZeroThreshold float64 - nativeHistogramMaxBuckets uint32 - nativeHistogramMinResetDuration time.Duration - // lastResetTime is protected by mtx. It is also used as created timestamp. - lastResetTime time.Time - // resetScheduled is protected by mtx. It is true if a reset is - // scheduled for a later time (when nativeHistogramMinResetDuration has - // passed). - resetScheduled bool - - // now is for testing purposes, by default it's time.Now. - now func() time.Time - - // afterFunc is for testing purposes, by default it's time.AfterFunc. - afterFunc func(time.Duration, func()) *time.Timer -} - -func (h *histogram) Desc() *Desc { - return h.desc -} - -func (h *histogram) Observe(v float64) { - h.observe(v, h.findBucket(v)) -} - -func (h *histogram) ObserveWithExemplar(v float64, e Labels) { - i := h.findBucket(v) - h.observe(v, i) - h.updateExemplar(v, i, e) -} - -func (h *histogram) Write(out *dto.Metric) error { - // For simplicity, we protect this whole method by a mutex. It is not in - // the hot path, i.e. Observe is called much more often than Write. The - // complication of making Write lock-free isn't worth it, if possible at - // all. - h.mtx.Lock() - defer h.mtx.Unlock() - - // Adding 1<<63 switches the hot index (from 0 to 1 or from 1 to 0) - // without touching the count bits. See the struct comments for a full - // description of the algorithm. - n := atomic.AddUint64(&h.countAndHotIdx, 1<<63) - // count is contained unchanged in the lower 63 bits. - count := n & ((1 << 63) - 1) - // The most significant bit tells us which counts is hot. The complement - // is thus the cold one. - hotCounts := h.counts[n>>63] - coldCounts := h.counts[(^n)>>63] - - waitForCooldown(count, coldCounts) - - his := &dto.Histogram{ - Bucket: make([]*dto.Bucket, len(h.upperBounds)), - SampleCount: proto.Uint64(count), - SampleSum: proto.Float64(math.Float64frombits(atomic.LoadUint64(&coldCounts.sumBits))), - CreatedTimestamp: timestamppb.New(h.lastResetTime), - } - out.Histogram = his - out.Label = h.labelPairs - - var cumCount uint64 - for i, upperBound := range h.upperBounds { - cumCount += atomic.LoadUint64(&coldCounts.buckets[i]) - his.Bucket[i] = &dto.Bucket{ - CumulativeCount: proto.Uint64(cumCount), - UpperBound: proto.Float64(upperBound), - } - if e := h.exemplars[i].Load(); e != nil { - his.Bucket[i].Exemplar = e.(*dto.Exemplar) - } - } - // If there is an exemplar for the +Inf bucket, we have to add that bucket explicitly. - if e := h.exemplars[len(h.upperBounds)].Load(); e != nil { - b := &dto.Bucket{ - CumulativeCount: proto.Uint64(count), - UpperBound: proto.Float64(math.Inf(1)), - Exemplar: e.(*dto.Exemplar), - } - his.Bucket = append(his.Bucket, b) - } - if h.nativeHistogramSchema > math.MinInt32 { - his.ZeroThreshold = proto.Float64(math.Float64frombits(atomic.LoadUint64(&coldCounts.nativeHistogramZeroThresholdBits))) - his.Schema = proto.Int32(atomic.LoadInt32(&coldCounts.nativeHistogramSchema)) - zeroBucket := atomic.LoadUint64(&coldCounts.nativeHistogramZeroBucket) - - defer func() { - coldCounts.nativeHistogramBucketsPositive.Range(addAndReset(&hotCounts.nativeHistogramBucketsPositive, &hotCounts.nativeHistogramBucketsNumber)) - coldCounts.nativeHistogramBucketsNegative.Range(addAndReset(&hotCounts.nativeHistogramBucketsNegative, &hotCounts.nativeHistogramBucketsNumber)) - }() - - his.ZeroCount = proto.Uint64(zeroBucket) - his.NegativeSpan, his.NegativeDelta = makeBuckets(&coldCounts.nativeHistogramBucketsNegative) - his.PositiveSpan, his.PositiveDelta = makeBuckets(&coldCounts.nativeHistogramBucketsPositive) - - // Add a no-op span to a histogram without observations and with - // a zero threshold of zero. Otherwise, a native histogram would - // look like a classic histogram to scrapers. - if *his.ZeroThreshold == 0 && *his.ZeroCount == 0 && len(his.PositiveSpan) == 0 && len(his.NegativeSpan) == 0 { - his.PositiveSpan = []*dto.BucketSpan{{ - Offset: proto.Int32(0), - Length: proto.Uint32(0), - }} - } - } - addAndResetCounts(hotCounts, coldCounts) - return nil -} - -// findBucket returns the index of the bucket for the provided value, or -// len(h.upperBounds) for the +Inf bucket. -func (h *histogram) findBucket(v float64) int { - // TODO(beorn7): For small numbers of buckets (<30), a linear search is - // slightly faster than the binary search. If we really care, we could - // switch from one search strategy to the other depending on the number - // of buckets. - // - // Microbenchmarks (BenchmarkHistogramNoLabels): - // 11 buckets: 38.3 ns/op linear - binary 48.7 ns/op - // 100 buckets: 78.1 ns/op linear - binary 54.9 ns/op - // 300 buckets: 154 ns/op linear - binary 61.6 ns/op - return sort.SearchFloat64s(h.upperBounds, v) -} - -// observe is the implementation for Observe without the findBucket part. -func (h *histogram) observe(v float64, bucket int) { - // Do not add to sparse buckets for NaN observations. - doSparse := h.nativeHistogramSchema > math.MinInt32 && !math.IsNaN(v) - // We increment h.countAndHotIdx so that the counter in the lower - // 63 bits gets incremented. At the same time, we get the new value - // back, which we can use to find the currently-hot counts. - n := atomic.AddUint64(&h.countAndHotIdx, 1) - hotCounts := h.counts[n>>63] - hotCounts.observe(v, bucket, doSparse) - if doSparse { - h.limitBuckets(hotCounts, v, bucket) - } -} - -// limitBuckets applies a strategy to limit the number of populated sparse -// buckets. It's generally best effort, and there are situations where the -// number can go higher (if even the lowest resolution isn't enough to reduce -// the number sufficiently, or if the provided counts aren't fully updated yet -// by a concurrently happening Write call). -func (h *histogram) limitBuckets(counts *histogramCounts, value float64, bucket int) { - if h.nativeHistogramMaxBuckets == 0 { - return // No limit configured. - } - if h.nativeHistogramMaxBuckets >= atomic.LoadUint32(&counts.nativeHistogramBucketsNumber) { - return // Bucket limit not exceeded yet. - } - - h.mtx.Lock() - defer h.mtx.Unlock() - - // The hot counts might have been swapped just before we acquired the - // lock. Re-fetch the hot counts first... - n := atomic.LoadUint64(&h.countAndHotIdx) - hotIdx := n >> 63 - coldIdx := (^n) >> 63 - hotCounts := h.counts[hotIdx] - coldCounts := h.counts[coldIdx] - // ...and then check again if we really have to reduce the bucket count. - if h.nativeHistogramMaxBuckets >= atomic.LoadUint32(&hotCounts.nativeHistogramBucketsNumber) { - return // Bucket limit not exceeded after all. - } - // Try the various strategies in order. - if h.maybeReset(hotCounts, coldCounts, coldIdx, value, bucket) { - return - } - // One of the other strategies will happen. To undo what they will do as - // soon as enough time has passed to satisfy - // h.nativeHistogramMinResetDuration, schedule a reset at the right time - // if we haven't done so already. - if h.nativeHistogramMinResetDuration > 0 && !h.resetScheduled { - h.resetScheduled = true - h.afterFunc(h.nativeHistogramMinResetDuration-h.now().Sub(h.lastResetTime), h.reset) - } - - if h.maybeWidenZeroBucket(hotCounts, coldCounts) { - return - } - h.doubleBucketWidth(hotCounts, coldCounts) -} - -// maybeReset resets the whole histogram if at least -// h.nativeHistogramMinResetDuration has been passed. It returns true if the -// histogram has been reset. The caller must have locked h.mtx. -func (h *histogram) maybeReset( - hot, cold *histogramCounts, coldIdx uint64, value float64, bucket int, -) bool { - // We are using the possibly mocked h.now() rather than - // time.Since(h.lastResetTime) to enable testing. - if h.nativeHistogramMinResetDuration == 0 || // No reset configured. - h.resetScheduled || // Do not interefere if a reset is already scheduled. - h.now().Sub(h.lastResetTime) < h.nativeHistogramMinResetDuration { - return false - } - // Completely reset coldCounts. - h.resetCounts(cold) - // Repeat the latest observation to not lose it completely. - cold.observe(value, bucket, true) - // Make coldCounts the new hot counts while resetting countAndHotIdx. - n := atomic.SwapUint64(&h.countAndHotIdx, (coldIdx<<63)+1) - count := n & ((1 << 63) - 1) - waitForCooldown(count, hot) - // Finally, reset the formerly hot counts, too. - h.resetCounts(hot) - h.lastResetTime = h.now() - return true -} - -// reset resets the whole histogram. It locks h.mtx itself, i.e. it has to be -// called without having locked h.mtx. -func (h *histogram) reset() { - h.mtx.Lock() - defer h.mtx.Unlock() - - n := atomic.LoadUint64(&h.countAndHotIdx) - hotIdx := n >> 63 - coldIdx := (^n) >> 63 - hot := h.counts[hotIdx] - cold := h.counts[coldIdx] - // Completely reset coldCounts. - h.resetCounts(cold) - // Make coldCounts the new hot counts while resetting countAndHotIdx. - n = atomic.SwapUint64(&h.countAndHotIdx, coldIdx<<63) - count := n & ((1 << 63) - 1) - waitForCooldown(count, hot) - // Finally, reset the formerly hot counts, too. - h.resetCounts(hot) - h.lastResetTime = h.now() - h.resetScheduled = false -} - -// maybeWidenZeroBucket widens the zero bucket until it includes the existing -// buckets closest to the zero bucket (which could be two, if an equidistant -// negative and a positive bucket exists, but usually it's only one bucket to be -// merged into the new wider zero bucket). h.nativeHistogramMaxZeroThreshold -// limits how far the zero bucket can be extended, and if that's not enough to -// include an existing bucket, the method returns false. The caller must have -// locked h.mtx. -func (h *histogram) maybeWidenZeroBucket(hot, cold *histogramCounts) bool { - currentZeroThreshold := math.Float64frombits(atomic.LoadUint64(&hot.nativeHistogramZeroThresholdBits)) - if currentZeroThreshold >= h.nativeHistogramMaxZeroThreshold { - return false - } - // Find the key of the bucket closest to zero. - smallestKey := findSmallestKey(&hot.nativeHistogramBucketsPositive) - smallestNegativeKey := findSmallestKey(&hot.nativeHistogramBucketsNegative) - if smallestNegativeKey < smallestKey { - smallestKey = smallestNegativeKey - } - if smallestKey == math.MaxInt32 { - return false - } - newZeroThreshold := getLe(smallestKey, atomic.LoadInt32(&hot.nativeHistogramSchema)) - if newZeroThreshold > h.nativeHistogramMaxZeroThreshold { - return false // New threshold would exceed the max threshold. - } - atomic.StoreUint64(&cold.nativeHistogramZeroThresholdBits, math.Float64bits(newZeroThreshold)) - // Remove applicable buckets. - if _, loaded := cold.nativeHistogramBucketsNegative.LoadAndDelete(smallestKey); loaded { - atomicDecUint32(&cold.nativeHistogramBucketsNumber) - } - if _, loaded := cold.nativeHistogramBucketsPositive.LoadAndDelete(smallestKey); loaded { - atomicDecUint32(&cold.nativeHistogramBucketsNumber) - } - // Make cold counts the new hot counts. - n := atomic.AddUint64(&h.countAndHotIdx, 1<<63) - count := n & ((1 << 63) - 1) - // Swap the pointer names to represent the new roles and make - // the rest less confusing. - hot, cold = cold, hot - waitForCooldown(count, cold) - // Add all the now cold counts to the new hot counts... - addAndResetCounts(hot, cold) - // ...adjust the new zero threshold in the cold counts, too... - atomic.StoreUint64(&cold.nativeHistogramZeroThresholdBits, math.Float64bits(newZeroThreshold)) - // ...and then merge the newly deleted buckets into the wider zero - // bucket. - mergeAndDeleteOrAddAndReset := func(hotBuckets, coldBuckets *sync.Map) func(k, v interface{}) bool { - return func(k, v interface{}) bool { - key := k.(int) - bucket := v.(*int64) - if key == smallestKey { - // Merge into hot zero bucket... - atomic.AddUint64(&hot.nativeHistogramZeroBucket, uint64(atomic.LoadInt64(bucket))) - // ...and delete from cold counts. - coldBuckets.Delete(key) - atomicDecUint32(&cold.nativeHistogramBucketsNumber) - } else { - // Add to corresponding hot bucket... - if addToBucket(hotBuckets, key, atomic.LoadInt64(bucket)) { - atomic.AddUint32(&hot.nativeHistogramBucketsNumber, 1) - } - // ...and reset cold bucket. - atomic.StoreInt64(bucket, 0) - } - return true - } - } - - cold.nativeHistogramBucketsPositive.Range(mergeAndDeleteOrAddAndReset(&hot.nativeHistogramBucketsPositive, &cold.nativeHistogramBucketsPositive)) - cold.nativeHistogramBucketsNegative.Range(mergeAndDeleteOrAddAndReset(&hot.nativeHistogramBucketsNegative, &cold.nativeHistogramBucketsNegative)) - return true -} - -// doubleBucketWidth doubles the bucket width (by decrementing the schema -// number). Note that very sparse buckets could lead to a low reduction of the -// bucket count (or even no reduction at all). The method does nothing if the -// schema is already -4. -func (h *histogram) doubleBucketWidth(hot, cold *histogramCounts) { - coldSchema := atomic.LoadInt32(&cold.nativeHistogramSchema) - if coldSchema == -4 { - return // Already at lowest resolution. - } - coldSchema-- - atomic.StoreInt32(&cold.nativeHistogramSchema, coldSchema) - // Play it simple and just delete all cold buckets. - atomic.StoreUint32(&cold.nativeHistogramBucketsNumber, 0) - deleteSyncMap(&cold.nativeHistogramBucketsNegative) - deleteSyncMap(&cold.nativeHistogramBucketsPositive) - // Make coldCounts the new hot counts. - n := atomic.AddUint64(&h.countAndHotIdx, 1<<63) - count := n & ((1 << 63) - 1) - // Swap the pointer names to represent the new roles and make - // the rest less confusing. - hot, cold = cold, hot - waitForCooldown(count, cold) - // Add all the now cold counts to the new hot counts... - addAndResetCounts(hot, cold) - // ...adjust the schema in the cold counts, too... - atomic.StoreInt32(&cold.nativeHistogramSchema, coldSchema) - // ...and then merge the cold buckets into the wider hot buckets. - merge := func(hotBuckets *sync.Map) func(k, v interface{}) bool { - return func(k, v interface{}) bool { - key := k.(int) - bucket := v.(*int64) - // Adjust key to match the bucket to merge into. - if key > 0 { - key++ - } - key /= 2 - // Add to corresponding hot bucket. - if addToBucket(hotBuckets, key, atomic.LoadInt64(bucket)) { - atomic.AddUint32(&hot.nativeHistogramBucketsNumber, 1) - } - return true - } - } - - cold.nativeHistogramBucketsPositive.Range(merge(&hot.nativeHistogramBucketsPositive)) - cold.nativeHistogramBucketsNegative.Range(merge(&hot.nativeHistogramBucketsNegative)) - // Play it simple again and just delete all cold buckets. - atomic.StoreUint32(&cold.nativeHistogramBucketsNumber, 0) - deleteSyncMap(&cold.nativeHistogramBucketsNegative) - deleteSyncMap(&cold.nativeHistogramBucketsPositive) -} - -func (h *histogram) resetCounts(counts *histogramCounts) { - atomic.StoreUint64(&counts.sumBits, 0) - atomic.StoreUint64(&counts.count, 0) - atomic.StoreUint64(&counts.nativeHistogramZeroBucket, 0) - atomic.StoreUint64(&counts.nativeHistogramZeroThresholdBits, math.Float64bits(h.nativeHistogramZeroThreshold)) - atomic.StoreInt32(&counts.nativeHistogramSchema, h.nativeHistogramSchema) - atomic.StoreUint32(&counts.nativeHistogramBucketsNumber, 0) - for i := range h.upperBounds { - atomic.StoreUint64(&counts.buckets[i], 0) - } - deleteSyncMap(&counts.nativeHistogramBucketsNegative) - deleteSyncMap(&counts.nativeHistogramBucketsPositive) -} - -// updateExemplar replaces the exemplar for the provided bucket. With empty -// labels, it's a no-op. It panics if any of the labels is invalid. -func (h *histogram) updateExemplar(v float64, bucket int, l Labels) { - if l == nil { - return - } - e, err := newExemplar(v, h.now(), l) - if err != nil { - panic(err) - } - h.exemplars[bucket].Store(e) -} - -// HistogramVec is a Collector that bundles a set of Histograms that all share the -// same Desc, but have different values for their variable labels. This is used -// if you want to count the same thing partitioned by various dimensions -// (e.g. HTTP request latencies, partitioned by status code and method). Create -// instances with NewHistogramVec. -type HistogramVec struct { - *MetricVec -} - -// NewHistogramVec creates a new HistogramVec based on the provided HistogramOpts and -// partitioned by the given label names. -func NewHistogramVec(opts HistogramOpts, labelNames []string) *HistogramVec { - return V2.NewHistogramVec(HistogramVecOpts{ - HistogramOpts: opts, - VariableLabels: UnconstrainedLabels(labelNames), - }) -} - -// NewHistogramVec creates a new HistogramVec based on the provided HistogramVecOpts. -func (v2) NewHistogramVec(opts HistogramVecOpts) *HistogramVec { - desc := V2.NewDesc( - BuildFQName(opts.Namespace, opts.Subsystem, opts.Name), - opts.Help, - opts.VariableLabels, - opts.ConstLabels, - ) - return &HistogramVec{ - MetricVec: NewMetricVec(desc, func(lvs ...string) Metric { - return newHistogram(desc, opts.HistogramOpts, lvs...) - }), - } -} - -// GetMetricWithLabelValues returns the Histogram for the given slice of label -// values (same order as the variable labels in Desc). If that combination of -// label values is accessed for the first time, a new Histogram is created. -// -// It is possible to call this method without using the returned Histogram to only -// create the new Histogram but leave it at its starting value, a Histogram without -// any observations. -// -// Keeping the Histogram for later use is possible (and should be considered if -// performance is critical), but keep in mind that Reset, DeleteLabelValues and -// Delete can be used to delete the Histogram from the HistogramVec. In that case, the -// Histogram will still exist, but it will not be exported anymore, even if a -// Histogram with the same label values is created later. See also the CounterVec -// example. -// -// An error is returned if the number of label values is not the same as the -// number of variable labels in Desc (minus any curried labels). -// -// Note that for more than one label value, this method is prone to mistakes -// caused by an incorrect order of arguments. Consider GetMetricWith(Labels) as -// an alternative to avoid that type of mistake. For higher label numbers, the -// latter has a much more readable (albeit more verbose) syntax, but it comes -// with a performance overhead (for creating and processing the Labels map). -// See also the GaugeVec example. -func (v *HistogramVec) GetMetricWithLabelValues(lvs ...string) (Observer, error) { - metric, err := v.MetricVec.GetMetricWithLabelValues(lvs...) - if metric != nil { - return metric.(Observer), err - } - return nil, err -} - -// GetMetricWith returns the Histogram for the given Labels map (the label names -// must match those of the variable labels in Desc). If that label map is -// accessed for the first time, a new Histogram is created. Implications of -// creating a Histogram without using it and keeping the Histogram for later use -// are the same as for GetMetricWithLabelValues. -// -// An error is returned if the number and names of the Labels are inconsistent -// with those of the variable labels in Desc (minus any curried labels). -// -// This method is used for the same purpose as -// GetMetricWithLabelValues(...string). See there for pros and cons of the two -// methods. -func (v *HistogramVec) GetMetricWith(labels Labels) (Observer, error) { - metric, err := v.MetricVec.GetMetricWith(labels) - if metric != nil { - return metric.(Observer), err - } - return nil, err -} - -// WithLabelValues works as GetMetricWithLabelValues, but panics where -// GetMetricWithLabelValues would have returned an error. Not returning an -// error allows shortcuts like -// -// myVec.WithLabelValues("404", "GET").Observe(42.21) -func (v *HistogramVec) WithLabelValues(lvs ...string) Observer { - h, err := v.GetMetricWithLabelValues(lvs...) - if err != nil { - panic(err) - } - return h -} - -// With works as GetMetricWith but panics where GetMetricWithLabels would have -// returned an error. Not returning an error allows shortcuts like -// -// myVec.With(prometheus.Labels{"code": "404", "method": "GET"}).Observe(42.21) -func (v *HistogramVec) With(labels Labels) Observer { - h, err := v.GetMetricWith(labels) - if err != nil { - panic(err) - } - return h -} - -// CurryWith returns a vector curried with the provided labels, i.e. the -// returned vector has those labels pre-set for all labeled operations performed -// on it. The cardinality of the curried vector is reduced accordingly. The -// order of the remaining labels stays the same (just with the curried labels -// taken out of the sequence – which is relevant for the -// (GetMetric)WithLabelValues methods). It is possible to curry a curried -// vector, but only with labels not yet used for currying before. -// -// The metrics contained in the HistogramVec are shared between the curried and -// uncurried vectors. They are just accessed differently. Curried and uncurried -// vectors behave identically in terms of collection. Only one must be -// registered with a given registry (usually the uncurried version). The Reset -// method deletes all metrics, even if called on a curried vector. -func (v *HistogramVec) CurryWith(labels Labels) (ObserverVec, error) { - vec, err := v.MetricVec.CurryWith(labels) - if vec != nil { - return &HistogramVec{vec}, err - } - return nil, err -} - -// MustCurryWith works as CurryWith but panics where CurryWith would have -// returned an error. -func (v *HistogramVec) MustCurryWith(labels Labels) ObserverVec { - vec, err := v.CurryWith(labels) - if err != nil { - panic(err) - } - return vec -} - -type constHistogram struct { - desc *Desc - count uint64 - sum float64 - buckets map[float64]uint64 - labelPairs []*dto.LabelPair - createdTs *timestamppb.Timestamp -} - -func (h *constHistogram) Desc() *Desc { - return h.desc -} - -func (h *constHistogram) Write(out *dto.Metric) error { - his := &dto.Histogram{ - CreatedTimestamp: h.createdTs, - } - - buckets := make([]*dto.Bucket, 0, len(h.buckets)) - - his.SampleCount = proto.Uint64(h.count) - his.SampleSum = proto.Float64(h.sum) - for upperBound, count := range h.buckets { - buckets = append(buckets, &dto.Bucket{ - CumulativeCount: proto.Uint64(count), - UpperBound: proto.Float64(upperBound), - }) - } - - if len(buckets) > 0 { - sort.Sort(buckSort(buckets)) - } - his.Bucket = buckets - - out.Histogram = his - out.Label = h.labelPairs - - return nil -} - -// NewConstHistogram returns a metric representing a Prometheus histogram with -// fixed values for the count, sum, and bucket counts. As those parameters -// cannot be changed, the returned value does not implement the Histogram -// interface (but only the Metric interface). Users of this package will not -// have much use for it in regular operations. However, when implementing custom -// Collectors, it is useful as a throw-away metric that is generated on the fly -// to send it to Prometheus in the Collect method. -// -// buckets is a map of upper bounds to cumulative counts, excluding the +Inf -// bucket. The +Inf bucket is implicit, and its value is equal to the provided count. -// -// NewConstHistogram returns an error if the length of labelValues is not -// consistent with the variable labels in Desc or if Desc is invalid. -func NewConstHistogram( - desc *Desc, - count uint64, - sum float64, - buckets map[float64]uint64, - labelValues ...string, -) (Metric, error) { - if desc.err != nil { - return nil, desc.err - } - if err := validateLabelValues(labelValues, len(desc.variableLabels.names)); err != nil { - return nil, err - } - return &constHistogram{ - desc: desc, - count: count, - sum: sum, - buckets: buckets, - labelPairs: MakeLabelPairs(desc, labelValues), - }, nil -} - -// MustNewConstHistogram is a version of NewConstHistogram that panics where -// NewConstHistogram would have returned an error. -func MustNewConstHistogram( - desc *Desc, - count uint64, - sum float64, - buckets map[float64]uint64, - labelValues ...string, -) Metric { - m, err := NewConstHistogram(desc, count, sum, buckets, labelValues...) - if err != nil { - panic(err) - } - return m -} - -type buckSort []*dto.Bucket - -func (s buckSort) Len() int { - return len(s) -} - -func (s buckSort) Swap(i, j int) { - s[i], s[j] = s[j], s[i] -} - -func (s buckSort) Less(i, j int) bool { - return s[i].GetUpperBound() < s[j].GetUpperBound() -} - -// pickSchema returns the largest number n between -4 and 8 such that -// 2^(2^-n) is less or equal the provided bucketFactor. -// -// Special cases: -// - bucketFactor <= 1: panics. -// - bucketFactor < 2^(2^-8) (but > 1): still returns 8. -func pickSchema(bucketFactor float64) int32 { - if bucketFactor <= 1 { - panic(fmt.Errorf("bucketFactor %f is <=1", bucketFactor)) - } - floor := math.Floor(math.Log2(math.Log2(bucketFactor))) - switch { - case floor <= -8: - return 8 - case floor >= 4: - return -4 - default: - return -int32(floor) - } -} - -func makeBuckets(buckets *sync.Map) ([]*dto.BucketSpan, []int64) { - var ii []int - buckets.Range(func(k, v interface{}) bool { - ii = append(ii, k.(int)) - return true - }) - sort.Ints(ii) - - if len(ii) == 0 { - return nil, nil - } - - var ( - spans []*dto.BucketSpan - deltas []int64 - prevCount int64 - nextI int - ) - - appendDelta := func(count int64) { - *spans[len(spans)-1].Length++ - deltas = append(deltas, count-prevCount) - prevCount = count - } - - for n, i := range ii { - v, _ := buckets.Load(i) - count := atomic.LoadInt64(v.(*int64)) - // Multiple spans with only small gaps in between are probably - // encoded more efficiently as one larger span with a few empty - // buckets. Needs some research to find the sweet spot. For now, - // we assume that gaps of one or two buckets should not create - // a new span. - iDelta := int32(i - nextI) - if n == 0 || iDelta > 2 { - // We have to create a new span, either because we are - // at the very beginning, or because we have found a gap - // of more than two buckets. - spans = append(spans, &dto.BucketSpan{ - Offset: proto.Int32(iDelta), - Length: proto.Uint32(0), - }) - } else { - // We have found a small gap (or no gap at all). - // Insert empty buckets as needed. - for j := int32(0); j < iDelta; j++ { - appendDelta(0) - } - } - appendDelta(count) - nextI = i + 1 - } - return spans, deltas -} - -// addToBucket increments the sparse bucket at key by the provided amount. It -// returns true if a new sparse bucket had to be created for that. -func addToBucket(buckets *sync.Map, key int, increment int64) bool { - if existingBucket, ok := buckets.Load(key); ok { - // Fast path without allocation. - atomic.AddInt64(existingBucket.(*int64), increment) - return false - } - // Bucket doesn't exist yet. Slow path allocating new counter. - newBucket := increment // TODO(beorn7): Check if this is sufficient to not let increment escape. - if actualBucket, loaded := buckets.LoadOrStore(key, &newBucket); loaded { - // The bucket was created concurrently in another goroutine. - // Have to increment after all. - atomic.AddInt64(actualBucket.(*int64), increment) - return false - } - return true -} - -// addAndReset returns a function to be used with sync.Map.Range of spare -// buckets in coldCounts. It increments the buckets in the provided hotBuckets -// according to the buckets ranged through. It then resets all buckets ranged -// through to 0 (but leaves them in place so that they don't need to get -// recreated on the next scrape). -func addAndReset(hotBuckets *sync.Map, bucketNumber *uint32) func(k, v interface{}) bool { - return func(k, v interface{}) bool { - bucket := v.(*int64) - if addToBucket(hotBuckets, k.(int), atomic.LoadInt64(bucket)) { - atomic.AddUint32(bucketNumber, 1) - } - atomic.StoreInt64(bucket, 0) - return true - } -} - -func deleteSyncMap(m *sync.Map) { - m.Range(func(k, v interface{}) bool { - m.Delete(k) - return true - }) -} - -func findSmallestKey(m *sync.Map) int { - result := math.MaxInt32 - m.Range(func(k, v interface{}) bool { - key := k.(int) - if key < result { - result = key - } - return true - }) - return result -} - -func getLe(key int, schema int32) float64 { - // Here a bit of context about the behavior for the last bucket counting - // regular numbers (called simply "last bucket" below) and the bucket - // counting observations of ±Inf (called "inf bucket" below, with a key - // one higher than that of the "last bucket"): - // - // If we apply the usual formula to the last bucket, its upper bound - // would be calculated as +Inf. The reason is that the max possible - // regular float64 number (math.MaxFloat64) doesn't coincide with one of - // the calculated bucket boundaries. So the calculated boundary has to - // be larger than math.MaxFloat64, and the only float64 larger than - // math.MaxFloat64 is +Inf. However, we want to count actual - // observations of ±Inf in the inf bucket. Therefore, we have to treat - // the upper bound of the last bucket specially and set it to - // math.MaxFloat64. (The upper bound of the inf bucket, with its key - // being one higher than that of the last bucket, naturally comes out as - // +Inf by the usual formula. So that's fine.) - // - // math.MaxFloat64 has a frac of 0.9999999999999999 and an exp of - // 1024. If there were a float64 number following math.MaxFloat64, it - // would have a frac of 1.0 and an exp of 1024, or equivalently a frac - // of 0.5 and an exp of 1025. However, since frac must be smaller than - // 1, and exp must be smaller than 1025, either representation overflows - // a float64. (Which, in turn, is the reason that math.MaxFloat64 is the - // largest possible float64. Q.E.D.) However, the formula for - // calculating the upper bound from the idx and schema of the last - // bucket results in precisely that. It is either frac=1.0 & exp=1024 - // (for schema < 0) or frac=0.5 & exp=1025 (for schema >=0). (This is, - // by the way, a power of two where the exponent itself is a power of - // two, 2¹⁰ in fact, which coinicides with a bucket boundary in all - // schemas.) So these are the special cases we have to catch below. - if schema < 0 { - exp := key << -schema - if exp == 1024 { - // This is the last bucket before the overflow bucket - // (for ±Inf observations). Return math.MaxFloat64 as - // explained above. - return math.MaxFloat64 - } - return math.Ldexp(1, exp) - } - - fracIdx := key & ((1 << schema) - 1) - frac := nativeHistogramBounds[schema][fracIdx] - exp := (key >> schema) + 1 - if frac == 0.5 && exp == 1025 { - // This is the last bucket before the overflow bucket (for ±Inf - // observations). Return math.MaxFloat64 as explained above. - return math.MaxFloat64 - } - return math.Ldexp(frac, exp) -} - -// waitForCooldown returns after the count field in the provided histogramCounts -// has reached the provided count value. -func waitForCooldown(count uint64, counts *histogramCounts) { - for count != atomic.LoadUint64(&counts.count) { - runtime.Gosched() // Let observations get work done. - } -} - -// atomicAddFloat adds the provided float atomically to another float -// represented by the bit pattern the bits pointer is pointing to. -func atomicAddFloat(bits *uint64, v float64) { - for { - loadedBits := atomic.LoadUint64(bits) - newBits := math.Float64bits(math.Float64frombits(loadedBits) + v) - if atomic.CompareAndSwapUint64(bits, loadedBits, newBits) { - break - } - } -} - -// atomicDecUint32 atomically decrements the uint32 p points to. See -// https://pkg.go.dev/sync/atomic#AddUint32 to understand how this is done. -func atomicDecUint32(p *uint32) { - atomic.AddUint32(p, ^uint32(0)) -} - -// addAndResetCounts adds certain fields (count, sum, conventional buckets, zero -// bucket) from the cold counts to the corresponding fields in the hot -// counts. Those fields are then reset to 0 in the cold counts. -func addAndResetCounts(hot, cold *histogramCounts) { - atomic.AddUint64(&hot.count, atomic.LoadUint64(&cold.count)) - atomic.StoreUint64(&cold.count, 0) - coldSum := math.Float64frombits(atomic.LoadUint64(&cold.sumBits)) - atomicAddFloat(&hot.sumBits, coldSum) - atomic.StoreUint64(&cold.sumBits, 0) - for i := range hot.buckets { - atomic.AddUint64(&hot.buckets[i], atomic.LoadUint64(&cold.buckets[i])) - atomic.StoreUint64(&cold.buckets[i], 0) - } - atomic.AddUint64(&hot.nativeHistogramZeroBucket, atomic.LoadUint64(&cold.nativeHistogramZeroBucket)) - atomic.StoreUint64(&cold.nativeHistogramZeroBucket, 0) -} diff --git a/vendor/github.com/prometheus/client_golang/prometheus/internal/almost_equal.go b/vendor/github.com/prometheus/client_golang/prometheus/internal/almost_equal.go deleted file mode 100644 index 1ed5abe74..000000000 --- a/vendor/github.com/prometheus/client_golang/prometheus/internal/almost_equal.go +++ /dev/null @@ -1,60 +0,0 @@ -// Copyright (c) 2015 Björn Rabenstein -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in all -// copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -// SOFTWARE. -// -// The code in this package is copy/paste to avoid a dependency. Hence this file -// carries the copyright of the original repo. -// https://github.com/beorn7/floats -package internal - -import ( - "math" -) - -// minNormalFloat64 is the smallest positive normal value of type float64. -var minNormalFloat64 = math.Float64frombits(0x0010000000000000) - -// AlmostEqualFloat64 returns true if a and b are equal within a relative error -// of epsilon. See http://floating-point-gui.de/errors/comparison/ for the -// details of the applied method. -func AlmostEqualFloat64(a, b, epsilon float64) bool { - if a == b { - return true - } - absA := math.Abs(a) - absB := math.Abs(b) - diff := math.Abs(a - b) - if a == 0 || b == 0 || absA+absB < minNormalFloat64 { - return diff < epsilon*minNormalFloat64 - } - return diff/math.Min(absA+absB, math.MaxFloat64) < epsilon -} - -// AlmostEqualFloat64s is the slice form of AlmostEqualFloat64. -func AlmostEqualFloat64s(a, b []float64, epsilon float64) bool { - if len(a) != len(b) { - return false - } - for i := range a { - if !AlmostEqualFloat64(a[i], b[i], epsilon) { - return false - } - } - return true -} diff --git a/vendor/github.com/prometheus/client_golang/prometheus/internal/difflib.go b/vendor/github.com/prometheus/client_golang/prometheus/internal/difflib.go deleted file mode 100644 index a595a2036..000000000 --- a/vendor/github.com/prometheus/client_golang/prometheus/internal/difflib.go +++ /dev/null @@ -1,654 +0,0 @@ -// Copyright 2022 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// It provides tools to compare sequences of strings and generate textual diffs. -// -// Maintaining `GetUnifiedDiffString` here because original repository -// (https://github.com/pmezard/go-difflib) is no longer maintained. -package internal - -import ( - "bufio" - "bytes" - "fmt" - "io" - "strings" -) - -func min(a, b int) int { - if a < b { - return a - } - return b -} - -func max(a, b int) int { - if a > b { - return a - } - return b -} - -func calculateRatio(matches, length int) float64 { - if length > 0 { - return 2.0 * float64(matches) / float64(length) - } - return 1.0 -} - -type Match struct { - A int - B int - Size int -} - -type OpCode struct { - Tag byte - I1 int - I2 int - J1 int - J2 int -} - -// SequenceMatcher compares sequence of strings. The basic -// algorithm predates, and is a little fancier than, an algorithm -// published in the late 1980's by Ratcliff and Obershelp under the -// hyperbolic name "gestalt pattern matching". The basic idea is to find -// the longest contiguous matching subsequence that contains no "junk" -// elements (R-O doesn't address junk). The same idea is then applied -// recursively to the pieces of the sequences to the left and to the right -// of the matching subsequence. This does not yield minimal edit -// sequences, but does tend to yield matches that "look right" to people. -// -// SequenceMatcher tries to compute a "human-friendly diff" between two -// sequences. Unlike e.g. UNIX(tm) diff, the fundamental notion is the -// longest *contiguous* & junk-free matching subsequence. That's what -// catches peoples' eyes. The Windows(tm) windiff has another interesting -// notion, pairing up elements that appear uniquely in each sequence. -// That, and the method here, appear to yield more intuitive difference -// reports than does diff. This method appears to be the least vulnerable -// to synching up on blocks of "junk lines", though (like blank lines in -// ordinary text files, or maybe "

" lines in HTML files). That may be -// because this is the only method of the 3 that has a *concept* of -// "junk" . -// -// Timing: Basic R-O is cubic time worst case and quadratic time expected -// case. SequenceMatcher is quadratic time for the worst case and has -// expected-case behavior dependent in a complicated way on how many -// elements the sequences have in common; best case time is linear. -type SequenceMatcher struct { - a []string - b []string - b2j map[string][]int - IsJunk func(string) bool - autoJunk bool - bJunk map[string]struct{} - matchingBlocks []Match - fullBCount map[string]int - bPopular map[string]struct{} - opCodes []OpCode -} - -func NewMatcher(a, b []string) *SequenceMatcher { - m := SequenceMatcher{autoJunk: true} - m.SetSeqs(a, b) - return &m -} - -func NewMatcherWithJunk(a, b []string, autoJunk bool, - isJunk func(string) bool, -) *SequenceMatcher { - m := SequenceMatcher{IsJunk: isJunk, autoJunk: autoJunk} - m.SetSeqs(a, b) - return &m -} - -// Set two sequences to be compared. -func (m *SequenceMatcher) SetSeqs(a, b []string) { - m.SetSeq1(a) - m.SetSeq2(b) -} - -// Set the first sequence to be compared. The second sequence to be compared is -// not changed. -// -// SequenceMatcher computes and caches detailed information about the second -// sequence, so if you want to compare one sequence S against many sequences, -// use .SetSeq2(s) once and call .SetSeq1(x) repeatedly for each of the other -// sequences. -// -// See also SetSeqs() and SetSeq2(). -func (m *SequenceMatcher) SetSeq1(a []string) { - if &a == &m.a { - return - } - m.a = a - m.matchingBlocks = nil - m.opCodes = nil -} - -// Set the second sequence to be compared. The first sequence to be compared is -// not changed. -func (m *SequenceMatcher) SetSeq2(b []string) { - if &b == &m.b { - return - } - m.b = b - m.matchingBlocks = nil - m.opCodes = nil - m.fullBCount = nil - m.chainB() -} - -func (m *SequenceMatcher) chainB() { - // Populate line -> index mapping - b2j := map[string][]int{} - for i, s := range m.b { - indices := b2j[s] - indices = append(indices, i) - b2j[s] = indices - } - - // Purge junk elements - m.bJunk = map[string]struct{}{} - if m.IsJunk != nil { - junk := m.bJunk - for s := range b2j { - if m.IsJunk(s) { - junk[s] = struct{}{} - } - } - for s := range junk { - delete(b2j, s) - } - } - - // Purge remaining popular elements - popular := map[string]struct{}{} - n := len(m.b) - if m.autoJunk && n >= 200 { - ntest := n/100 + 1 - for s, indices := range b2j { - if len(indices) > ntest { - popular[s] = struct{}{} - } - } - for s := range popular { - delete(b2j, s) - } - } - m.bPopular = popular - m.b2j = b2j -} - -func (m *SequenceMatcher) isBJunk(s string) bool { - _, ok := m.bJunk[s] - return ok -} - -// Find longest matching block in a[alo:ahi] and b[blo:bhi]. -// -// If IsJunk is not defined: -// -// Return (i,j,k) such that a[i:i+k] is equal to b[j:j+k], where -// -// alo <= i <= i+k <= ahi -// blo <= j <= j+k <= bhi -// -// and for all (i',j',k') meeting those conditions, -// -// k >= k' -// i <= i' -// and if i == i', j <= j' -// -// In other words, of all maximal matching blocks, return one that -// starts earliest in a, and of all those maximal matching blocks that -// start earliest in a, return the one that starts earliest in b. -// -// If IsJunk is defined, first the longest matching block is -// determined as above, but with the additional restriction that no -// junk element appears in the block. Then that block is extended as -// far as possible by matching (only) junk elements on both sides. So -// the resulting block never matches on junk except as identical junk -// happens to be adjacent to an "interesting" match. -// -// If no blocks match, return (alo, blo, 0). -func (m *SequenceMatcher) findLongestMatch(alo, ahi, blo, bhi int) Match { - // CAUTION: stripping common prefix or suffix would be incorrect. - // E.g., - // ab - // acab - // Longest matching block is "ab", but if common prefix is - // stripped, it's "a" (tied with "b"). UNIX(tm) diff does so - // strip, so ends up claiming that ab is changed to acab by - // inserting "ca" in the middle. That's minimal but unintuitive: - // "it's obvious" that someone inserted "ac" at the front. - // Windiff ends up at the same place as diff, but by pairing up - // the unique 'b's and then matching the first two 'a's. - besti, bestj, bestsize := alo, blo, 0 - - // find longest junk-free match - // during an iteration of the loop, j2len[j] = length of longest - // junk-free match ending with a[i-1] and b[j] - j2len := map[int]int{} - for i := alo; i != ahi; i++ { - // look at all instances of a[i] in b; note that because - // b2j has no junk keys, the loop is skipped if a[i] is junk - newj2len := map[int]int{} - for _, j := range m.b2j[m.a[i]] { - // a[i] matches b[j] - if j < blo { - continue - } - if j >= bhi { - break - } - k := j2len[j-1] + 1 - newj2len[j] = k - if k > bestsize { - besti, bestj, bestsize = i-k+1, j-k+1, k - } - } - j2len = newj2len - } - - // Extend the best by non-junk elements on each end. In particular, - // "popular" non-junk elements aren't in b2j, which greatly speeds - // the inner loop above, but also means "the best" match so far - // doesn't contain any junk *or* popular non-junk elements. - for besti > alo && bestj > blo && !m.isBJunk(m.b[bestj-1]) && - m.a[besti-1] == m.b[bestj-1] { - besti, bestj, bestsize = besti-1, bestj-1, bestsize+1 - } - for besti+bestsize < ahi && bestj+bestsize < bhi && - !m.isBJunk(m.b[bestj+bestsize]) && - m.a[besti+bestsize] == m.b[bestj+bestsize] { - bestsize++ - } - - // Now that we have a wholly interesting match (albeit possibly - // empty!), we may as well suck up the matching junk on each - // side of it too. Can't think of a good reason not to, and it - // saves post-processing the (possibly considerable) expense of - // figuring out what to do with it. In the case of an empty - // interesting match, this is clearly the right thing to do, - // because no other kind of match is possible in the regions. - for besti > alo && bestj > blo && m.isBJunk(m.b[bestj-1]) && - m.a[besti-1] == m.b[bestj-1] { - besti, bestj, bestsize = besti-1, bestj-1, bestsize+1 - } - for besti+bestsize < ahi && bestj+bestsize < bhi && - m.isBJunk(m.b[bestj+bestsize]) && - m.a[besti+bestsize] == m.b[bestj+bestsize] { - bestsize++ - } - - return Match{A: besti, B: bestj, Size: bestsize} -} - -// Return list of triples describing matching subsequences. -// -// Each triple is of the form (i, j, n), and means that -// a[i:i+n] == b[j:j+n]. The triples are monotonically increasing in -// i and in j. It's also guaranteed that if (i, j, n) and (i', j', n') are -// adjacent triples in the list, and the second is not the last triple in the -// list, then i+n != i' or j+n != j'. IOW, adjacent triples never describe -// adjacent equal blocks. -// -// The last triple is a dummy, (len(a), len(b), 0), and is the only -// triple with n==0. -func (m *SequenceMatcher) GetMatchingBlocks() []Match { - if m.matchingBlocks != nil { - return m.matchingBlocks - } - - var matchBlocks func(alo, ahi, blo, bhi int, matched []Match) []Match - matchBlocks = func(alo, ahi, blo, bhi int, matched []Match) []Match { - match := m.findLongestMatch(alo, ahi, blo, bhi) - i, j, k := match.A, match.B, match.Size - if match.Size > 0 { - if alo < i && blo < j { - matched = matchBlocks(alo, i, blo, j, matched) - } - matched = append(matched, match) - if i+k < ahi && j+k < bhi { - matched = matchBlocks(i+k, ahi, j+k, bhi, matched) - } - } - return matched - } - matched := matchBlocks(0, len(m.a), 0, len(m.b), nil) - - // It's possible that we have adjacent equal blocks in the - // matching_blocks list now. - nonAdjacent := []Match{} - i1, j1, k1 := 0, 0, 0 - for _, b := range matched { - // Is this block adjacent to i1, j1, k1? - i2, j2, k2 := b.A, b.B, b.Size - if i1+k1 == i2 && j1+k1 == j2 { - // Yes, so collapse them -- this just increases the length of - // the first block by the length of the second, and the first - // block so lengthened remains the block to compare against. - k1 += k2 - } else { - // Not adjacent. Remember the first block (k1==0 means it's - // the dummy we started with), and make the second block the - // new block to compare against. - if k1 > 0 { - nonAdjacent = append(nonAdjacent, Match{i1, j1, k1}) - } - i1, j1, k1 = i2, j2, k2 - } - } - if k1 > 0 { - nonAdjacent = append(nonAdjacent, Match{i1, j1, k1}) - } - - nonAdjacent = append(nonAdjacent, Match{len(m.a), len(m.b), 0}) - m.matchingBlocks = nonAdjacent - return m.matchingBlocks -} - -// Return list of 5-tuples describing how to turn a into b. -// -// Each tuple is of the form (tag, i1, i2, j1, j2). The first tuple -// has i1 == j1 == 0, and remaining tuples have i1 == the i2 from the -// tuple preceding it, and likewise for j1 == the previous j2. -// -// The tags are characters, with these meanings: -// -// 'r' (replace): a[i1:i2] should be replaced by b[j1:j2] -// -// 'd' (delete): a[i1:i2] should be deleted, j1==j2 in this case. -// -// 'i' (insert): b[j1:j2] should be inserted at a[i1:i1], i1==i2 in this case. -// -// 'e' (equal): a[i1:i2] == b[j1:j2] -func (m *SequenceMatcher) GetOpCodes() []OpCode { - if m.opCodes != nil { - return m.opCodes - } - i, j := 0, 0 - matching := m.GetMatchingBlocks() - opCodes := make([]OpCode, 0, len(matching)) - for _, m := range matching { - // invariant: we've pumped out correct diffs to change - // a[:i] into b[:j], and the next matching block is - // a[ai:ai+size] == b[bj:bj+size]. So we need to pump - // out a diff to change a[i:ai] into b[j:bj], pump out - // the matching block, and move (i,j) beyond the match - ai, bj, size := m.A, m.B, m.Size - tag := byte(0) - if i < ai && j < bj { - tag = 'r' - } else if i < ai { - tag = 'd' - } else if j < bj { - tag = 'i' - } - if tag > 0 { - opCodes = append(opCodes, OpCode{tag, i, ai, j, bj}) - } - i, j = ai+size, bj+size - // the list of matching blocks is terminated by a - // sentinel with size 0 - if size > 0 { - opCodes = append(opCodes, OpCode{'e', ai, i, bj, j}) - } - } - m.opCodes = opCodes - return m.opCodes -} - -// Isolate change clusters by eliminating ranges with no changes. -// -// Return a generator of groups with up to n lines of context. -// Each group is in the same format as returned by GetOpCodes(). -func (m *SequenceMatcher) GetGroupedOpCodes(n int) [][]OpCode { - if n < 0 { - n = 3 - } - codes := m.GetOpCodes() - if len(codes) == 0 { - codes = []OpCode{{'e', 0, 1, 0, 1}} - } - // Fixup leading and trailing groups if they show no changes. - if codes[0].Tag == 'e' { - c := codes[0] - i1, i2, j1, j2 := c.I1, c.I2, c.J1, c.J2 - codes[0] = OpCode{c.Tag, max(i1, i2-n), i2, max(j1, j2-n), j2} - } - if codes[len(codes)-1].Tag == 'e' { - c := codes[len(codes)-1] - i1, i2, j1, j2 := c.I1, c.I2, c.J1, c.J2 - codes[len(codes)-1] = OpCode{c.Tag, i1, min(i2, i1+n), j1, min(j2, j1+n)} - } - nn := n + n - groups := [][]OpCode{} - group := []OpCode{} - for _, c := range codes { - i1, i2, j1, j2 := c.I1, c.I2, c.J1, c.J2 - // End the current group and start a new one whenever - // there is a large range with no changes. - if c.Tag == 'e' && i2-i1 > nn { - group = append(group, OpCode{ - c.Tag, i1, min(i2, i1+n), - j1, min(j2, j1+n), - }) - groups = append(groups, group) - group = []OpCode{} - i1, j1 = max(i1, i2-n), max(j1, j2-n) - } - group = append(group, OpCode{c.Tag, i1, i2, j1, j2}) - } - if len(group) > 0 && !(len(group) == 1 && group[0].Tag == 'e') { - groups = append(groups, group) - } - return groups -} - -// Return a measure of the sequences' similarity (float in [0,1]). -// -// Where T is the total number of elements in both sequences, and -// M is the number of matches, this is 2.0*M / T. -// Note that this is 1 if the sequences are identical, and 0 if -// they have nothing in common. -// -// .Ratio() is expensive to compute if you haven't already computed -// .GetMatchingBlocks() or .GetOpCodes(), in which case you may -// want to try .QuickRatio() or .RealQuickRation() first to get an -// upper bound. -func (m *SequenceMatcher) Ratio() float64 { - matches := 0 - for _, m := range m.GetMatchingBlocks() { - matches += m.Size - } - return calculateRatio(matches, len(m.a)+len(m.b)) -} - -// Return an upper bound on ratio() relatively quickly. -// -// This isn't defined beyond that it is an upper bound on .Ratio(), and -// is faster to compute. -func (m *SequenceMatcher) QuickRatio() float64 { - // viewing a and b as multisets, set matches to the cardinality - // of their intersection; this counts the number of matches - // without regard to order, so is clearly an upper bound - if m.fullBCount == nil { - m.fullBCount = map[string]int{} - for _, s := range m.b { - m.fullBCount[s]++ - } - } - - // avail[x] is the number of times x appears in 'b' less the - // number of times we've seen it in 'a' so far ... kinda - avail := map[string]int{} - matches := 0 - for _, s := range m.a { - n, ok := avail[s] - if !ok { - n = m.fullBCount[s] - } - avail[s] = n - 1 - if n > 0 { - matches++ - } - } - return calculateRatio(matches, len(m.a)+len(m.b)) -} - -// Return an upper bound on ratio() very quickly. -// -// This isn't defined beyond that it is an upper bound on .Ratio(), and -// is faster to compute than either .Ratio() or .QuickRatio(). -func (m *SequenceMatcher) RealQuickRatio() float64 { - la, lb := len(m.a), len(m.b) - return calculateRatio(min(la, lb), la+lb) -} - -// Convert range to the "ed" format -func formatRangeUnified(start, stop int) string { - // Per the diff spec at http://www.unix.org/single_unix_specification/ - beginning := start + 1 // lines start numbering with one - length := stop - start - if length == 1 { - return fmt.Sprintf("%d", beginning) - } - if length == 0 { - beginning-- // empty ranges begin at line just before the range - } - return fmt.Sprintf("%d,%d", beginning, length) -} - -// Unified diff parameters -type UnifiedDiff struct { - A []string // First sequence lines - FromFile string // First file name - FromDate string // First file time - B []string // Second sequence lines - ToFile string // Second file name - ToDate string // Second file time - Eol string // Headers end of line, defaults to LF - Context int // Number of context lines -} - -// Compare two sequences of lines; generate the delta as a unified diff. -// -// Unified diffs are a compact way of showing line changes and a few -// lines of context. The number of context lines is set by 'n' which -// defaults to three. -// -// By default, the diff control lines (those with ---, +++, or @@) are -// created with a trailing newline. This is helpful so that inputs -// created from file.readlines() result in diffs that are suitable for -// file.writelines() since both the inputs and outputs have trailing -// newlines. -// -// For inputs that do not have trailing newlines, set the lineterm -// argument to "" so that the output will be uniformly newline free. -// -// The unidiff format normally has a header for filenames and modification -// times. Any or all of these may be specified using strings for -// 'fromfile', 'tofile', 'fromfiledate', and 'tofiledate'. -// The modification times are normally expressed in the ISO 8601 format. -func WriteUnifiedDiff(writer io.Writer, diff UnifiedDiff) error { - buf := bufio.NewWriter(writer) - defer buf.Flush() - wf := func(format string, args ...interface{}) error { - _, err := buf.WriteString(fmt.Sprintf(format, args...)) - return err - } - ws := func(s string) error { - _, err := buf.WriteString(s) - return err - } - - if len(diff.Eol) == 0 { - diff.Eol = "\n" - } - - started := false - m := NewMatcher(diff.A, diff.B) - for _, g := range m.GetGroupedOpCodes(diff.Context) { - if !started { - started = true - fromDate := "" - if len(diff.FromDate) > 0 { - fromDate = "\t" + diff.FromDate - } - toDate := "" - if len(diff.ToDate) > 0 { - toDate = "\t" + diff.ToDate - } - if diff.FromFile != "" || diff.ToFile != "" { - err := wf("--- %s%s%s", diff.FromFile, fromDate, diff.Eol) - if err != nil { - return err - } - err = wf("+++ %s%s%s", diff.ToFile, toDate, diff.Eol) - if err != nil { - return err - } - } - } - first, last := g[0], g[len(g)-1] - range1 := formatRangeUnified(first.I1, last.I2) - range2 := formatRangeUnified(first.J1, last.J2) - if err := wf("@@ -%s +%s @@%s", range1, range2, diff.Eol); err != nil { - return err - } - for _, c := range g { - i1, i2, j1, j2 := c.I1, c.I2, c.J1, c.J2 - if c.Tag == 'e' { - for _, line := range diff.A[i1:i2] { - if err := ws(" " + line); err != nil { - return err - } - } - continue - } - if c.Tag == 'r' || c.Tag == 'd' { - for _, line := range diff.A[i1:i2] { - if err := ws("-" + line); err != nil { - return err - } - } - } - if c.Tag == 'r' || c.Tag == 'i' { - for _, line := range diff.B[j1:j2] { - if err := ws("+" + line); err != nil { - return err - } - } - } - } - } - return nil -} - -// Like WriteUnifiedDiff but returns the diff a string. -func GetUnifiedDiffString(diff UnifiedDiff) (string, error) { - w := &bytes.Buffer{} - err := WriteUnifiedDiff(w, diff) - return w.String(), err -} - -// Split a string on "\n" while preserving them. The output can be used -// as input for UnifiedDiff and ContextDiff structures. -func SplitLines(s string) []string { - lines := strings.SplitAfter(s, "\n") - lines[len(lines)-1] += "\n" - return lines -} diff --git a/vendor/github.com/prometheus/client_golang/prometheus/internal/go_collector_options.go b/vendor/github.com/prometheus/client_golang/prometheus/internal/go_collector_options.go deleted file mode 100644 index 723b45d64..000000000 --- a/vendor/github.com/prometheus/client_golang/prometheus/internal/go_collector_options.go +++ /dev/null @@ -1,32 +0,0 @@ -// Copyright 2021 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package internal - -import "regexp" - -type GoCollectorRule struct { - Matcher *regexp.Regexp - Deny bool -} - -// GoCollectorOptions should not be used be directly by anything, except `collectors` package. -// Use it via collectors package instead. See issue -// https://github.com/prometheus/client_golang/issues/1030. -// -// This is internal, so external users only can use it via `collector.WithGoCollector*` methods -type GoCollectorOptions struct { - DisableMemStatsLikeMetrics bool - RuntimeMetricSumForHist map[string]string - RuntimeMetricRules []GoCollectorRule -} diff --git a/vendor/github.com/prometheus/client_golang/prometheus/internal/go_runtime_metrics.go b/vendor/github.com/prometheus/client_golang/prometheus/internal/go_runtime_metrics.go deleted file mode 100644 index 97d17d6cb..000000000 --- a/vendor/github.com/prometheus/client_golang/prometheus/internal/go_runtime_metrics.go +++ /dev/null @@ -1,142 +0,0 @@ -// Copyright 2021 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//go:build go1.17 -// +build go1.17 - -package internal - -import ( - "math" - "path" - "runtime/metrics" - "strings" - - "github.com/prometheus/common/model" -) - -// RuntimeMetricsToProm produces a Prometheus metric name from a runtime/metrics -// metric description and validates whether the metric is suitable for integration -// with Prometheus. -// -// Returns false if a name could not be produced, or if Prometheus does not understand -// the runtime/metrics Kind. -// -// Note that the main reason a name couldn't be produced is if the runtime/metrics -// package exports a name with characters outside the valid Prometheus metric name -// character set. This is theoretically possible, but should never happen in practice. -// Still, don't rely on it. -func RuntimeMetricsToProm(d *metrics.Description) (string, string, string, bool) { - namespace := "go" - - comp := strings.SplitN(d.Name, ":", 2) - key := comp[0] - unit := comp[1] - - // The last path element in the key is the name, - // the rest is the subsystem. - subsystem := path.Dir(key[1:] /* remove leading / */) - name := path.Base(key) - - // subsystem is translated by replacing all / and - with _. - subsystem = strings.ReplaceAll(subsystem, "/", "_") - subsystem = strings.ReplaceAll(subsystem, "-", "_") - - // unit is translated assuming that the unit contains no - // non-ASCII characters. - unit = strings.ReplaceAll(unit, "-", "_") - unit = strings.ReplaceAll(unit, "*", "_") - unit = strings.ReplaceAll(unit, "/", "_per_") - - // name has - replaced with _ and is concatenated with the unit and - // other data. - name = strings.ReplaceAll(name, "-", "_") - name += "_" + unit - if d.Cumulative && d.Kind != metrics.KindFloat64Histogram { - name += "_total" - } - - valid := model.IsValidMetricName(model.LabelValue(namespace + "_" + subsystem + "_" + name)) - switch d.Kind { - case metrics.KindUint64: - case metrics.KindFloat64: - case metrics.KindFloat64Histogram: - default: - valid = false - } - return namespace, subsystem, name, valid -} - -// RuntimeMetricsBucketsForUnit takes a set of buckets obtained for a runtime/metrics histogram -// type (so, lower-bound inclusive) and a unit from a runtime/metrics name, and produces -// a reduced set of buckets. This function always removes any -Inf bucket as it's represented -// as the bottom-most upper-bound inclusive bucket in Prometheus. -func RuntimeMetricsBucketsForUnit(buckets []float64, unit string) []float64 { - switch unit { - case "bytes": - // Re-bucket as powers of 2. - return reBucketExp(buckets, 2) - case "seconds": - // Re-bucket as powers of 10 and then merge all buckets greater - // than 1 second into the +Inf bucket. - b := reBucketExp(buckets, 10) - for i := range b { - if b[i] <= 1 { - continue - } - b[i] = math.Inf(1) - b = b[:i+1] - break - } - return b - } - return buckets -} - -// reBucketExp takes a list of bucket boundaries (lower bound inclusive) and -// downsamples the buckets to those a multiple of base apart. The end result -// is a roughly exponential (in many cases, perfectly exponential) bucketing -// scheme. -func reBucketExp(buckets []float64, base float64) []float64 { - bucket := buckets[0] - var newBuckets []float64 - // We may see a -Inf here, in which case, add it and skip it - // since we risk producing NaNs otherwise. - // - // We need to preserve -Inf values to maintain runtime/metrics - // conventions. We'll strip it out later. - if bucket == math.Inf(-1) { - newBuckets = append(newBuckets, bucket) - buckets = buckets[1:] - bucket = buckets[0] - } - // From now on, bucket should always have a non-Inf value because - // Infs are only ever at the ends of the bucket lists, so - // arithmetic operations on it are non-NaN. - for i := 1; i < len(buckets); i++ { - if bucket >= 0 && buckets[i] < bucket*base { - // The next bucket we want to include is at least bucket*base. - continue - } else if bucket < 0 && buckets[i] < bucket/base { - // In this case the bucket we're targeting is negative, and since - // we're ascending through buckets here, we need to divide to get - // closer to zero exponentially. - continue - } - // The +Inf bucket will always be the last one, and we'll always - // end up including it here because bucket - newBuckets = append(newBuckets, bucket) - bucket = buckets[i] - } - return append(newBuckets, bucket) -} diff --git a/vendor/github.com/prometheus/client_golang/prometheus/internal/metric.go b/vendor/github.com/prometheus/client_golang/prometheus/internal/metric.go deleted file mode 100644 index 6515c1148..000000000 --- a/vendor/github.com/prometheus/client_golang/prometheus/internal/metric.go +++ /dev/null @@ -1,101 +0,0 @@ -// Copyright 2018 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package internal - -import ( - "sort" - - dto "github.com/prometheus/client_model/go" -) - -// LabelPairSorter implements sort.Interface. It is used to sort a slice of -// dto.LabelPair pointers. -type LabelPairSorter []*dto.LabelPair - -func (s LabelPairSorter) Len() int { - return len(s) -} - -func (s LabelPairSorter) Swap(i, j int) { - s[i], s[j] = s[j], s[i] -} - -func (s LabelPairSorter) Less(i, j int) bool { - return s[i].GetName() < s[j].GetName() -} - -// MetricSorter is a sortable slice of *dto.Metric. -type MetricSorter []*dto.Metric - -func (s MetricSorter) Len() int { - return len(s) -} - -func (s MetricSorter) Swap(i, j int) { - s[i], s[j] = s[j], s[i] -} - -func (s MetricSorter) Less(i, j int) bool { - if len(s[i].Label) != len(s[j].Label) { - // This should not happen. The metrics are - // inconsistent. However, we have to deal with the fact, as - // people might use custom collectors or metric family injection - // to create inconsistent metrics. So let's simply compare the - // number of labels in this case. That will still yield - // reproducible sorting. - return len(s[i].Label) < len(s[j].Label) - } - for n, lp := range s[i].Label { - vi := lp.GetValue() - vj := s[j].Label[n].GetValue() - if vi != vj { - return vi < vj - } - } - - // We should never arrive here. Multiple metrics with the same - // label set in the same scrape will lead to undefined ingestion - // behavior. However, as above, we have to provide stable sorting - // here, even for inconsistent metrics. So sort equal metrics - // by their timestamp, with missing timestamps (implying "now") - // coming last. - if s[i].TimestampMs == nil { - return false - } - if s[j].TimestampMs == nil { - return true - } - return s[i].GetTimestampMs() < s[j].GetTimestampMs() -} - -// NormalizeMetricFamilies returns a MetricFamily slice with empty -// MetricFamilies pruned and the remaining MetricFamilies sorted by name within -// the slice, with the contained Metrics sorted within each MetricFamily. -func NormalizeMetricFamilies(metricFamiliesByName map[string]*dto.MetricFamily) []*dto.MetricFamily { - for _, mf := range metricFamiliesByName { - sort.Sort(MetricSorter(mf.Metric)) - } - names := make([]string, 0, len(metricFamiliesByName)) - for name, mf := range metricFamiliesByName { - if len(mf.Metric) > 0 { - names = append(names, name) - } - } - sort.Strings(names) - result := make([]*dto.MetricFamily, 0, len(names)) - for _, name := range names { - result = append(result, metricFamiliesByName[name]) - } - return result -} diff --git a/vendor/github.com/prometheus/client_golang/prometheus/labels.go b/vendor/github.com/prometheus/client_golang/prometheus/labels.go deleted file mode 100644 index c21911f29..000000000 --- a/vendor/github.com/prometheus/client_golang/prometheus/labels.go +++ /dev/null @@ -1,188 +0,0 @@ -// Copyright 2018 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package prometheus - -import ( - "errors" - "fmt" - "strings" - "unicode/utf8" - - "github.com/prometheus/common/model" -) - -// Labels represents a collection of label name -> value mappings. This type is -// commonly used with the With(Labels) and GetMetricWith(Labels) methods of -// metric vector Collectors, e.g.: -// -// myVec.With(Labels{"code": "404", "method": "GET"}).Add(42) -// -// The other use-case is the specification of constant label pairs in Opts or to -// create a Desc. -type Labels map[string]string - -// LabelConstraint normalizes label values. -type LabelConstraint func(string) string - -// ConstrainedLabels represents a label name and its constrain function -// to normalize label values. This type is commonly used when constructing -// metric vector Collectors. -type ConstrainedLabel struct { - Name string - Constraint LabelConstraint -} - -// ConstrainableLabels is an interface that allows creating of labels that can -// be optionally constrained. -// -// prometheus.V2().NewCounterVec(CounterVecOpts{ -// CounterOpts: {...}, // Usual CounterOpts fields -// VariableLabels: []ConstrainedLabels{ -// {Name: "A"}, -// {Name: "B", Constraint: func(v string) string { ... }}, -// }, -// }) -type ConstrainableLabels interface { - compile() *compiledLabels - labelNames() []string -} - -// ConstrainedLabels represents a collection of label name -> constrain function -// to normalize label values. This type is commonly used when constructing -// metric vector Collectors. -type ConstrainedLabels []ConstrainedLabel - -func (cls ConstrainedLabels) compile() *compiledLabels { - compiled := &compiledLabels{ - names: make([]string, len(cls)), - labelConstraints: map[string]LabelConstraint{}, - } - - for i, label := range cls { - compiled.names[i] = label.Name - if label.Constraint != nil { - compiled.labelConstraints[label.Name] = label.Constraint - } - } - - return compiled -} - -func (cls ConstrainedLabels) labelNames() []string { - names := make([]string, len(cls)) - for i, label := range cls { - names[i] = label.Name - } - return names -} - -// UnconstrainedLabels represents collection of label without any constraint on -// their value. Thus, it is simply a collection of label names. -// -// UnconstrainedLabels([]string{ "A", "B" }) -// -// is equivalent to -// -// ConstrainedLabels { -// { Name: "A" }, -// { Name: "B" }, -// } -type UnconstrainedLabels []string - -func (uls UnconstrainedLabels) compile() *compiledLabels { - return &compiledLabels{ - names: uls, - } -} - -func (uls UnconstrainedLabels) labelNames() []string { - return uls -} - -type compiledLabels struct { - names []string - labelConstraints map[string]LabelConstraint -} - -func (cls *compiledLabels) compile() *compiledLabels { - return cls -} - -func (cls *compiledLabels) labelNames() []string { - return cls.names -} - -func (cls *compiledLabels) constrain(labelName, value string) string { - if fn, ok := cls.labelConstraints[labelName]; ok && fn != nil { - return fn(value) - } - return value -} - -// reservedLabelPrefix is a prefix which is not legal in user-supplied -// label names. -const reservedLabelPrefix = "__" - -var errInconsistentCardinality = errors.New("inconsistent label cardinality") - -func makeInconsistentCardinalityError(fqName string, labels, labelValues []string) error { - return fmt.Errorf( - "%w: %q has %d variable labels named %q but %d values %q were provided", - errInconsistentCardinality, fqName, - len(labels), labels, - len(labelValues), labelValues, - ) -} - -func validateValuesInLabels(labels Labels, expectedNumberOfValues int) error { - if len(labels) != expectedNumberOfValues { - return fmt.Errorf( - "%w: expected %d label values but got %d in %#v", - errInconsistentCardinality, expectedNumberOfValues, - len(labels), labels, - ) - } - - for name, val := range labels { - if !utf8.ValidString(val) { - return fmt.Errorf("label %s: value %q is not valid UTF-8", name, val) - } - } - - return nil -} - -func validateLabelValues(vals []string, expectedNumberOfValues int) error { - if len(vals) != expectedNumberOfValues { - // The call below makes vals escape, copy them to avoid that. - vals := append([]string(nil), vals...) - return fmt.Errorf( - "%w: expected %d label values but got %d in %#v", - errInconsistentCardinality, expectedNumberOfValues, - len(vals), vals, - ) - } - - for _, val := range vals { - if !utf8.ValidString(val) { - return fmt.Errorf("label value %q is not valid UTF-8", val) - } - } - - return nil -} - -func checkLabelName(l string) bool { - return model.LabelName(l).IsValid() && !strings.HasPrefix(l, reservedLabelPrefix) -} diff --git a/vendor/github.com/prometheus/client_golang/prometheus/metric.go b/vendor/github.com/prometheus/client_golang/prometheus/metric.go deleted file mode 100644 index f018e5723..000000000 --- a/vendor/github.com/prometheus/client_golang/prometheus/metric.go +++ /dev/null @@ -1,257 +0,0 @@ -// Copyright 2014 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package prometheus - -import ( - "errors" - "math" - "sort" - "strings" - "time" - - dto "github.com/prometheus/client_model/go" - "github.com/prometheus/common/model" - "google.golang.org/protobuf/proto" -) - -var separatorByteSlice = []byte{model.SeparatorByte} // For convenient use with xxhash. - -// A Metric models a single sample value with its meta data being exported to -// Prometheus. Implementations of Metric in this package are Gauge, Counter, -// Histogram, Summary, and Untyped. -type Metric interface { - // Desc returns the descriptor for the Metric. This method idempotently - // returns the same descriptor throughout the lifetime of the - // Metric. The returned descriptor is immutable by contract. A Metric - // unable to describe itself must return an invalid descriptor (created - // with NewInvalidDesc). - Desc() *Desc - // Write encodes the Metric into a "Metric" Protocol Buffer data - // transmission object. - // - // Metric implementations must observe concurrency safety as reads of - // this metric may occur at any time, and any blocking occurs at the - // expense of total performance of rendering all registered - // metrics. Ideally, Metric implementations should support concurrent - // readers. - // - // While populating dto.Metric, it is the responsibility of the - // implementation to ensure validity of the Metric protobuf (like valid - // UTF-8 strings or syntactically valid metric and label names). It is - // recommended to sort labels lexicographically. Callers of Write should - // still make sure of sorting if they depend on it. - Write(*dto.Metric) error - // TODO(beorn7): The original rationale of passing in a pre-allocated - // dto.Metric protobuf to save allocations has disappeared. The - // signature of this method should be changed to "Write() (*dto.Metric, - // error)". -} - -// Opts bundles the options for creating most Metric types. Each metric -// implementation XXX has its own XXXOpts type, but in most cases, it is just -// an alias of this type (which might change when the requirement arises.) -// -// It is mandatory to set Name to a non-empty string. All other fields are -// optional and can safely be left at their zero value, although it is strongly -// encouraged to set a Help string. -type Opts struct { - // Namespace, Subsystem, and Name are components of the fully-qualified - // name of the Metric (created by joining these components with - // "_"). Only Name is mandatory, the others merely help structuring the - // name. Note that the fully-qualified name of the metric must be a - // valid Prometheus metric name. - Namespace string - Subsystem string - Name string - - // Help provides information about this metric. - // - // Metrics with the same fully-qualified name must have the same Help - // string. - Help string - - // ConstLabels are used to attach fixed labels to this metric. Metrics - // with the same fully-qualified name must have the same label names in - // their ConstLabels. - // - // ConstLabels are only used rarely. In particular, do not use them to - // attach the same labels to all your metrics. Those use cases are - // better covered by target labels set by the scraping Prometheus - // server, or by one specific metric (e.g. a build_info or a - // machine_role metric). See also - // https://prometheus.io/docs/instrumenting/writing_exporters/#target-labels-not-static-scraped-labels - ConstLabels Labels - - // now is for testing purposes, by default it's time.Now. - now func() time.Time -} - -// BuildFQName joins the given three name components by "_". Empty name -// components are ignored. If the name parameter itself is empty, an empty -// string is returned, no matter what. Metric implementations included in this -// library use this function internally to generate the fully-qualified metric -// name from the name component in their Opts. Users of the library will only -// need this function if they implement their own Metric or instantiate a Desc -// (with NewDesc) directly. -func BuildFQName(namespace, subsystem, name string) string { - if name == "" { - return "" - } - switch { - case namespace != "" && subsystem != "": - return strings.Join([]string{namespace, subsystem, name}, "_") - case namespace != "": - return strings.Join([]string{namespace, name}, "_") - case subsystem != "": - return strings.Join([]string{subsystem, name}, "_") - } - return name -} - -type invalidMetric struct { - desc *Desc - err error -} - -// NewInvalidMetric returns a metric whose Write method always returns the -// provided error. It is useful if a Collector finds itself unable to collect -// a metric and wishes to report an error to the registry. -func NewInvalidMetric(desc *Desc, err error) Metric { - return &invalidMetric{desc, err} -} - -func (m *invalidMetric) Desc() *Desc { return m.desc } - -func (m *invalidMetric) Write(*dto.Metric) error { return m.err } - -type timestampedMetric struct { - Metric - t time.Time -} - -func (m timestampedMetric) Write(pb *dto.Metric) error { - e := m.Metric.Write(pb) - pb.TimestampMs = proto.Int64(m.t.Unix()*1000 + int64(m.t.Nanosecond()/1000000)) - return e -} - -// NewMetricWithTimestamp returns a new Metric wrapping the provided Metric in a -// way that it has an explicit timestamp set to the provided Time. This is only -// useful in rare cases as the timestamp of a Prometheus metric should usually -// be set by the Prometheus server during scraping. Exceptions include mirroring -// metrics with given timestamps from other metric -// sources. -// -// NewMetricWithTimestamp works best with MustNewConstMetric, -// MustNewConstHistogram, and MustNewConstSummary, see example. -// -// Currently, the exposition formats used by Prometheus are limited to -// millisecond resolution. Thus, the provided time will be rounded down to the -// next full millisecond value. -func NewMetricWithTimestamp(t time.Time, m Metric) Metric { - return timestampedMetric{Metric: m, t: t} -} - -type withExemplarsMetric struct { - Metric - - exemplars []*dto.Exemplar -} - -func (m *withExemplarsMetric) Write(pb *dto.Metric) error { - if err := m.Metric.Write(pb); err != nil { - return err - } - - switch { - case pb.Counter != nil: - pb.Counter.Exemplar = m.exemplars[len(m.exemplars)-1] - case pb.Histogram != nil: - for _, e := range m.exemplars { - // pb.Histogram.Bucket are sorted by UpperBound. - i := sort.Search(len(pb.Histogram.Bucket), func(i int) bool { - return pb.Histogram.Bucket[i].GetUpperBound() >= e.GetValue() - }) - if i < len(pb.Histogram.Bucket) { - pb.Histogram.Bucket[i].Exemplar = e - } else { - // The +Inf bucket should be explicitly added if there is an exemplar for it, similar to non-const histogram logic in https://github.com/prometheus/client_golang/blob/main/prometheus/histogram.go#L357-L365. - b := &dto.Bucket{ - CumulativeCount: proto.Uint64(pb.Histogram.GetSampleCount()), - UpperBound: proto.Float64(math.Inf(1)), - Exemplar: e, - } - pb.Histogram.Bucket = append(pb.Histogram.Bucket, b) - } - } - default: - // TODO(bwplotka): Implement Gauge? - return errors.New("cannot inject exemplar into Gauge, Summary or Untyped") - } - - return nil -} - -// Exemplar is easier to use, user-facing representation of *dto.Exemplar. -type Exemplar struct { - Value float64 - Labels Labels - // Optional. - // Default value (time.Time{}) indicates its empty, which should be - // understood as time.Now() time at the moment of creation of metric. - Timestamp time.Time -} - -// NewMetricWithExemplars returns a new Metric wrapping the provided Metric with given -// exemplars. Exemplars are validated. -// -// Only last applicable exemplar is injected from the list. -// For example for Counter it means last exemplar is injected. -// For Histogram, it means last applicable exemplar for each bucket is injected. -// -// NewMetricWithExemplars works best with MustNewConstMetric and -// MustNewConstHistogram, see example. -func NewMetricWithExemplars(m Metric, exemplars ...Exemplar) (Metric, error) { - if len(exemplars) == 0 { - return nil, errors.New("no exemplar was passed for NewMetricWithExemplars") - } - - var ( - now = time.Now() - exs = make([]*dto.Exemplar, len(exemplars)) - err error - ) - for i, e := range exemplars { - ts := e.Timestamp - if ts == (time.Time{}) { - ts = now - } - exs[i], err = newExemplar(e.Value, ts, e.Labels) - if err != nil { - return nil, err - } - } - - return &withExemplarsMetric{Metric: m, exemplars: exs}, nil -} - -// MustNewMetricWithExemplars is a version of NewMetricWithExemplars that panics where -// NewMetricWithExemplars would have returned an error. -func MustNewMetricWithExemplars(m Metric, exemplars ...Exemplar) Metric { - ret, err := NewMetricWithExemplars(m, exemplars...) - if err != nil { - panic(err) - } - return ret -} diff --git a/vendor/github.com/prometheus/client_golang/prometheus/num_threads.go b/vendor/github.com/prometheus/client_golang/prometheus/num_threads.go deleted file mode 100644 index 7c12b2108..000000000 --- a/vendor/github.com/prometheus/client_golang/prometheus/num_threads.go +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright 2018 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//go:build !js || wasm -// +build !js wasm - -package prometheus - -import "runtime" - -// getRuntimeNumThreads returns the number of open OS threads. -func getRuntimeNumThreads() float64 { - n, _ := runtime.ThreadCreateProfile(nil) - return float64(n) -} diff --git a/vendor/github.com/prometheus/client_golang/prometheus/num_threads_gopherjs.go b/vendor/github.com/prometheus/client_golang/prometheus/num_threads_gopherjs.go deleted file mode 100644 index 7348df01d..000000000 --- a/vendor/github.com/prometheus/client_golang/prometheus/num_threads_gopherjs.go +++ /dev/null @@ -1,22 +0,0 @@ -// Copyright 2018 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//go:build js && !wasm -// +build js,!wasm - -package prometheus - -// getRuntimeNumThreads returns the number of open OS threads. -func getRuntimeNumThreads() float64 { - return 1 -} diff --git a/vendor/github.com/prometheus/client_golang/prometheus/observer.go b/vendor/github.com/prometheus/client_golang/prometheus/observer.go deleted file mode 100644 index 03773b21f..000000000 --- a/vendor/github.com/prometheus/client_golang/prometheus/observer.go +++ /dev/null @@ -1,64 +0,0 @@ -// Copyright 2017 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package prometheus - -// Observer is the interface that wraps the Observe method, which is used by -// Histogram and Summary to add observations. -type Observer interface { - Observe(float64) -} - -// The ObserverFunc type is an adapter to allow the use of ordinary -// functions as Observers. If f is a function with the appropriate -// signature, ObserverFunc(f) is an Observer that calls f. -// -// This adapter is usually used in connection with the Timer type, and there are -// two general use cases: -// -// The most common one is to use a Gauge as the Observer for a Timer. -// See the "Gauge" Timer example. -// -// The more advanced use case is to create a function that dynamically decides -// which Observer to use for observing the duration. See the "Complex" Timer -// example. -type ObserverFunc func(float64) - -// Observe calls f(value). It implements Observer. -func (f ObserverFunc) Observe(value float64) { - f(value) -} - -// ObserverVec is an interface implemented by `HistogramVec` and `SummaryVec`. -type ObserverVec interface { - GetMetricWith(Labels) (Observer, error) - GetMetricWithLabelValues(lvs ...string) (Observer, error) - With(Labels) Observer - WithLabelValues(...string) Observer - CurryWith(Labels) (ObserverVec, error) - MustCurryWith(Labels) ObserverVec - - Collector -} - -// ExemplarObserver is implemented by Observers that offer the option of -// observing a value together with an exemplar. Its ObserveWithExemplar method -// works like the Observe method of an Observer but also replaces the currently -// saved exemplar (if any) with a new one, created from the provided value, the -// current time as timestamp, and the provided Labels. Empty Labels will lead to -// a valid (label-less) exemplar. But if Labels is nil, the current exemplar is -// left in place. ObserveWithExemplar panics if any of the provided labels are -// invalid or if the provided labels contain more than 128 runes in total. -type ExemplarObserver interface { - ObserveWithExemplar(value float64, exemplar Labels) -} diff --git a/vendor/github.com/prometheus/client_golang/prometheus/process_collector.go b/vendor/github.com/prometheus/client_golang/prometheus/process_collector.go deleted file mode 100644 index 8548dd18e..000000000 --- a/vendor/github.com/prometheus/client_golang/prometheus/process_collector.go +++ /dev/null @@ -1,164 +0,0 @@ -// Copyright 2015 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package prometheus - -import ( - "errors" - "fmt" - "os" - "strconv" - "strings" -) - -type processCollector struct { - collectFn func(chan<- Metric) - pidFn func() (int, error) - reportErrors bool - cpuTotal *Desc - openFDs, maxFDs *Desc - vsize, maxVsize *Desc - rss *Desc - startTime *Desc -} - -// ProcessCollectorOpts defines the behavior of a process metrics collector -// created with NewProcessCollector. -type ProcessCollectorOpts struct { - // PidFn returns the PID of the process the collector collects metrics - // for. It is called upon each collection. By default, the PID of the - // current process is used, as determined on construction time by - // calling os.Getpid(). - PidFn func() (int, error) - // If non-empty, each of the collected metrics is prefixed by the - // provided string and an underscore ("_"). - Namespace string - // If true, any error encountered during collection is reported as an - // invalid metric (see NewInvalidMetric). Otherwise, errors are ignored - // and the collected metrics will be incomplete. (Possibly, no metrics - // will be collected at all.) While that's usually not desired, it is - // appropriate for the common "mix-in" of process metrics, where process - // metrics are nice to have, but failing to collect them should not - // disrupt the collection of the remaining metrics. - ReportErrors bool -} - -// NewProcessCollector is the obsolete version of collectors.NewProcessCollector. -// See there for documentation. -// -// Deprecated: Use collectors.NewProcessCollector instead. -func NewProcessCollector(opts ProcessCollectorOpts) Collector { - ns := "" - if len(opts.Namespace) > 0 { - ns = opts.Namespace + "_" - } - - c := &processCollector{ - reportErrors: opts.ReportErrors, - cpuTotal: NewDesc( - ns+"process_cpu_seconds_total", - "Total user and system CPU time spent in seconds.", - nil, nil, - ), - openFDs: NewDesc( - ns+"process_open_fds", - "Number of open file descriptors.", - nil, nil, - ), - maxFDs: NewDesc( - ns+"process_max_fds", - "Maximum number of open file descriptors.", - nil, nil, - ), - vsize: NewDesc( - ns+"process_virtual_memory_bytes", - "Virtual memory size in bytes.", - nil, nil, - ), - maxVsize: NewDesc( - ns+"process_virtual_memory_max_bytes", - "Maximum amount of virtual memory available in bytes.", - nil, nil, - ), - rss: NewDesc( - ns+"process_resident_memory_bytes", - "Resident memory size in bytes.", - nil, nil, - ), - startTime: NewDesc( - ns+"process_start_time_seconds", - "Start time of the process since unix epoch in seconds.", - nil, nil, - ), - } - - if opts.PidFn == nil { - c.pidFn = getPIDFn() - } else { - c.pidFn = opts.PidFn - } - - // Set up process metric collection if supported by the runtime. - if canCollectProcess() { - c.collectFn = c.processCollect - } else { - c.collectFn = func(ch chan<- Metric) { - c.reportError(ch, nil, errors.New("process metrics not supported on this platform")) - } - } - - return c -} - -// Describe returns all descriptions of the collector. -func (c *processCollector) Describe(ch chan<- *Desc) { - ch <- c.cpuTotal - ch <- c.openFDs - ch <- c.maxFDs - ch <- c.vsize - ch <- c.maxVsize - ch <- c.rss - ch <- c.startTime -} - -// Collect returns the current state of all metrics of the collector. -func (c *processCollector) Collect(ch chan<- Metric) { - c.collectFn(ch) -} - -func (c *processCollector) reportError(ch chan<- Metric, desc *Desc, err error) { - if !c.reportErrors { - return - } - if desc == nil { - desc = NewInvalidDesc(err) - } - ch <- NewInvalidMetric(desc, err) -} - -// NewPidFileFn returns a function that retrieves a pid from the specified file. -// It is meant to be used for the PidFn field in ProcessCollectorOpts. -func NewPidFileFn(pidFilePath string) func() (int, error) { - return func() (int, error) { - content, err := os.ReadFile(pidFilePath) - if err != nil { - return 0, fmt.Errorf("can't read pid file %q: %w", pidFilePath, err) - } - pid, err := strconv.Atoi(strings.TrimSpace(string(content))) - if err != nil { - return 0, fmt.Errorf("can't parse pid file %q: %w", pidFilePath, err) - } - - return pid, nil - } -} diff --git a/vendor/github.com/prometheus/client_golang/prometheus/process_collector_js.go b/vendor/github.com/prometheus/client_golang/prometheus/process_collector_js.go deleted file mode 100644 index b1e363d6c..000000000 --- a/vendor/github.com/prometheus/client_golang/prometheus/process_collector_js.go +++ /dev/null @@ -1,26 +0,0 @@ -// Copyright 2019 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//go:build js -// +build js - -package prometheus - -func canCollectProcess() bool { - return false -} - -func (c *processCollector) processCollect(ch chan<- Metric) { - // noop on this platform - return -} diff --git a/vendor/github.com/prometheus/client_golang/prometheus/process_collector_other.go b/vendor/github.com/prometheus/client_golang/prometheus/process_collector_other.go deleted file mode 100644 index 8c1136cee..000000000 --- a/vendor/github.com/prometheus/client_golang/prometheus/process_collector_other.go +++ /dev/null @@ -1,66 +0,0 @@ -// Copyright 2019 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//go:build !windows && !js && !wasip1 -// +build !windows,!js,!wasip1 - -package prometheus - -import ( - "github.com/prometheus/procfs" -) - -func canCollectProcess() bool { - _, err := procfs.NewDefaultFS() - return err == nil -} - -func (c *processCollector) processCollect(ch chan<- Metric) { - pid, err := c.pidFn() - if err != nil { - c.reportError(ch, nil, err) - return - } - - p, err := procfs.NewProc(pid) - if err != nil { - c.reportError(ch, nil, err) - return - } - - if stat, err := p.Stat(); err == nil { - ch <- MustNewConstMetric(c.cpuTotal, CounterValue, stat.CPUTime()) - ch <- MustNewConstMetric(c.vsize, GaugeValue, float64(stat.VirtualMemory())) - ch <- MustNewConstMetric(c.rss, GaugeValue, float64(stat.ResidentMemory())) - if startTime, err := stat.StartTime(); err == nil { - ch <- MustNewConstMetric(c.startTime, GaugeValue, startTime) - } else { - c.reportError(ch, c.startTime, err) - } - } else { - c.reportError(ch, nil, err) - } - - if fds, err := p.FileDescriptorsLen(); err == nil { - ch <- MustNewConstMetric(c.openFDs, GaugeValue, float64(fds)) - } else { - c.reportError(ch, c.openFDs, err) - } - - if limits, err := p.Limits(); err == nil { - ch <- MustNewConstMetric(c.maxFDs, GaugeValue, float64(limits.OpenFiles)) - ch <- MustNewConstMetric(c.maxVsize, GaugeValue, float64(limits.AddressSpace)) - } else { - c.reportError(ch, nil, err) - } -} diff --git a/vendor/github.com/prometheus/client_golang/prometheus/process_collector_wasip1.go b/vendor/github.com/prometheus/client_golang/prometheus/process_collector_wasip1.go deleted file mode 100644 index d8d9a6d7a..000000000 --- a/vendor/github.com/prometheus/client_golang/prometheus/process_collector_wasip1.go +++ /dev/null @@ -1,26 +0,0 @@ -// Copyright 2023 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//go:build wasip1 -// +build wasip1 - -package prometheus - -func canCollectProcess() bool { - return false -} - -func (*processCollector) processCollect(chan<- Metric) { - // noop on this platform - return -} diff --git a/vendor/github.com/prometheus/client_golang/prometheus/process_collector_windows.go b/vendor/github.com/prometheus/client_golang/prometheus/process_collector_windows.go deleted file mode 100644 index f973398df..000000000 --- a/vendor/github.com/prometheus/client_golang/prometheus/process_collector_windows.go +++ /dev/null @@ -1,116 +0,0 @@ -// Copyright 2019 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package prometheus - -import ( - "syscall" - "unsafe" - - "golang.org/x/sys/windows" -) - -func canCollectProcess() bool { - return true -} - -var ( - modpsapi = syscall.NewLazyDLL("psapi.dll") - modkernel32 = syscall.NewLazyDLL("kernel32.dll") - - procGetProcessMemoryInfo = modpsapi.NewProc("GetProcessMemoryInfo") - procGetProcessHandleCount = modkernel32.NewProc("GetProcessHandleCount") -) - -type processMemoryCounters struct { - // System interface description - // https://docs.microsoft.com/en-us/windows/desktop/api/psapi/ns-psapi-process_memory_counters_ex - - // Refer to the Golang internal implementation - // https://golang.org/src/internal/syscall/windows/psapi_windows.go - _ uint32 - PageFaultCount uint32 - PeakWorkingSetSize uintptr - WorkingSetSize uintptr - QuotaPeakPagedPoolUsage uintptr - QuotaPagedPoolUsage uintptr - QuotaPeakNonPagedPoolUsage uintptr - QuotaNonPagedPoolUsage uintptr - PagefileUsage uintptr - PeakPagefileUsage uintptr - PrivateUsage uintptr -} - -func getProcessMemoryInfo(handle windows.Handle) (processMemoryCounters, error) { - mem := processMemoryCounters{} - r1, _, err := procGetProcessMemoryInfo.Call( - uintptr(handle), - uintptr(unsafe.Pointer(&mem)), - uintptr(unsafe.Sizeof(mem)), - ) - if r1 != 1 { - return mem, err - } else { - return mem, nil - } -} - -func getProcessHandleCount(handle windows.Handle) (uint32, error) { - var count uint32 - r1, _, err := procGetProcessHandleCount.Call( - uintptr(handle), - uintptr(unsafe.Pointer(&count)), - ) - if r1 != 1 { - return 0, err - } else { - return count, nil - } -} - -func (c *processCollector) processCollect(ch chan<- Metric) { - h, err := windows.GetCurrentProcess() - if err != nil { - c.reportError(ch, nil, err) - return - } - - var startTime, exitTime, kernelTime, userTime windows.Filetime - err = windows.GetProcessTimes(h, &startTime, &exitTime, &kernelTime, &userTime) - if err != nil { - c.reportError(ch, nil, err) - return - } - ch <- MustNewConstMetric(c.startTime, GaugeValue, float64(startTime.Nanoseconds()/1e9)) - ch <- MustNewConstMetric(c.cpuTotal, CounterValue, fileTimeToSeconds(kernelTime)+fileTimeToSeconds(userTime)) - - mem, err := getProcessMemoryInfo(h) - if err != nil { - c.reportError(ch, nil, err) - return - } - ch <- MustNewConstMetric(c.vsize, GaugeValue, float64(mem.PrivateUsage)) - ch <- MustNewConstMetric(c.rss, GaugeValue, float64(mem.WorkingSetSize)) - - handles, err := getProcessHandleCount(h) - if err != nil { - c.reportError(ch, nil, err) - return - } - ch <- MustNewConstMetric(c.openFDs, GaugeValue, float64(handles)) - ch <- MustNewConstMetric(c.maxFDs, GaugeValue, float64(16*1024*1024)) // Windows has a hard-coded max limit, not per-process. -} - -func fileTimeToSeconds(ft windows.Filetime) float64 { - return float64(uint64(ft.HighDateTime)<<32+uint64(ft.LowDateTime)) / 1e7 -} diff --git a/vendor/github.com/prometheus/client_golang/prometheus/registry.go b/vendor/github.com/prometheus/client_golang/prometheus/registry.go deleted file mode 100644 index 5e2ced25a..000000000 --- a/vendor/github.com/prometheus/client_golang/prometheus/registry.go +++ /dev/null @@ -1,1075 +0,0 @@ -// Copyright 2014 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package prometheus - -import ( - "bytes" - "errors" - "fmt" - "os" - "path/filepath" - "runtime" - "sort" - "strconv" - "strings" - "sync" - "unicode/utf8" - - "github.com/prometheus/client_golang/prometheus/internal" - - "github.com/cespare/xxhash/v2" - dto "github.com/prometheus/client_model/go" - "github.com/prometheus/common/expfmt" - "google.golang.org/protobuf/proto" -) - -const ( - // Capacity for the channel to collect metrics and descriptors. - capMetricChan = 1000 - capDescChan = 10 -) - -// DefaultRegisterer and DefaultGatherer are the implementations of the -// Registerer and Gatherer interface a number of convenience functions in this -// package act on. Initially, both variables point to the same Registry, which -// has a process collector (currently on Linux only, see NewProcessCollector) -// and a Go collector (see NewGoCollector, in particular the note about -// stop-the-world implication with Go versions older than 1.9) already -// registered. This approach to keep default instances as global state mirrors -// the approach of other packages in the Go standard library. Note that there -// are caveats. Change the variables with caution and only if you understand the -// consequences. Users who want to avoid global state altogether should not use -// the convenience functions and act on custom instances instead. -var ( - defaultRegistry = NewRegistry() - DefaultRegisterer Registerer = defaultRegistry - DefaultGatherer Gatherer = defaultRegistry -) - -func init() { - MustRegister(NewProcessCollector(ProcessCollectorOpts{})) - MustRegister(NewGoCollector()) -} - -// NewRegistry creates a new vanilla Registry without any Collectors -// pre-registered. -func NewRegistry() *Registry { - return &Registry{ - collectorsByID: map[uint64]Collector{}, - descIDs: map[uint64]struct{}{}, - dimHashesByName: map[string]uint64{}, - } -} - -// NewPedanticRegistry returns a registry that checks during collection if each -// collected Metric is consistent with its reported Desc, and if the Desc has -// actually been registered with the registry. Unchecked Collectors (those whose -// Describe method does not yield any descriptors) are excluded from the check. -// -// Usually, a Registry will be happy as long as the union of all collected -// Metrics is consistent and valid even if some metrics are not consistent with -// their own Desc or a Desc provided by their registered Collector. Well-behaved -// Collectors and Metrics will only provide consistent Descs. This Registry is -// useful to test the implementation of Collectors and Metrics. -func NewPedanticRegistry() *Registry { - r := NewRegistry() - r.pedanticChecksEnabled = true - return r -} - -// Registerer is the interface for the part of a registry in charge of -// registering and unregistering. Users of custom registries should use -// Registerer as type for registration purposes (rather than the Registry type -// directly). In that way, they are free to use custom Registerer implementation -// (e.g. for testing purposes). -type Registerer interface { - // Register registers a new Collector to be included in metrics - // collection. It returns an error if the descriptors provided by the - // Collector are invalid or if they — in combination with descriptors of - // already registered Collectors — do not fulfill the consistency and - // uniqueness criteria described in the documentation of metric.Desc. - // - // If the provided Collector is equal to a Collector already registered - // (which includes the case of re-registering the same Collector), the - // returned error is an instance of AlreadyRegisteredError, which - // contains the previously registered Collector. - // - // A Collector whose Describe method does not yield any Desc is treated - // as unchecked. Registration will always succeed. No check for - // re-registering (see previous paragraph) is performed. Thus, the - // caller is responsible for not double-registering the same unchecked - // Collector, and for providing a Collector that will not cause - // inconsistent metrics on collection. (This would lead to scrape - // errors.) - Register(Collector) error - // MustRegister works like Register but registers any number of - // Collectors and panics upon the first registration that causes an - // error. - MustRegister(...Collector) - // Unregister unregisters the Collector that equals the Collector passed - // in as an argument. (Two Collectors are considered equal if their - // Describe method yields the same set of descriptors.) The function - // returns whether a Collector was unregistered. Note that an unchecked - // Collector cannot be unregistered (as its Describe method does not - // yield any descriptor). - // - // Note that even after unregistering, it will not be possible to - // register a new Collector that is inconsistent with the unregistered - // Collector, e.g. a Collector collecting metrics with the same name but - // a different help string. The rationale here is that the same registry - // instance must only collect consistent metrics throughout its - // lifetime. - Unregister(Collector) bool -} - -// Gatherer is the interface for the part of a registry in charge of gathering -// the collected metrics into a number of MetricFamilies. The Gatherer interface -// comes with the same general implication as described for the Registerer -// interface. -type Gatherer interface { - // Gather calls the Collect method of the registered Collectors and then - // gathers the collected metrics into a lexicographically sorted slice - // of uniquely named MetricFamily protobufs. Gather ensures that the - // returned slice is valid and self-consistent so that it can be used - // for valid exposition. As an exception to the strict consistency - // requirements described for metric.Desc, Gather will tolerate - // different sets of label names for metrics of the same metric family. - // - // Even if an error occurs, Gather attempts to gather as many metrics as - // possible. Hence, if a non-nil error is returned, the returned - // MetricFamily slice could be nil (in case of a fatal error that - // prevented any meaningful metric collection) or contain a number of - // MetricFamily protobufs, some of which might be incomplete, and some - // might be missing altogether. The returned error (which might be a - // MultiError) explains the details. Note that this is mostly useful for - // debugging purposes. If the gathered protobufs are to be used for - // exposition in actual monitoring, it is almost always better to not - // expose an incomplete result and instead disregard the returned - // MetricFamily protobufs in case the returned error is non-nil. - Gather() ([]*dto.MetricFamily, error) -} - -// Register registers the provided Collector with the DefaultRegisterer. -// -// Register is a shortcut for DefaultRegisterer.Register(c). See there for more -// details. -func Register(c Collector) error { - return DefaultRegisterer.Register(c) -} - -// MustRegister registers the provided Collectors with the DefaultRegisterer and -// panics if any error occurs. -// -// MustRegister is a shortcut for DefaultRegisterer.MustRegister(cs...). See -// there for more details. -func MustRegister(cs ...Collector) { - DefaultRegisterer.MustRegister(cs...) -} - -// Unregister removes the registration of the provided Collector from the -// DefaultRegisterer. -// -// Unregister is a shortcut for DefaultRegisterer.Unregister(c). See there for -// more details. -func Unregister(c Collector) bool { - return DefaultRegisterer.Unregister(c) -} - -// GathererFunc turns a function into a Gatherer. -type GathererFunc func() ([]*dto.MetricFamily, error) - -// Gather implements Gatherer. -func (gf GathererFunc) Gather() ([]*dto.MetricFamily, error) { - return gf() -} - -// AlreadyRegisteredError is returned by the Register method if the Collector to -// be registered has already been registered before, or a different Collector -// that collects the same metrics has been registered before. Registration fails -// in that case, but you can detect from the kind of error what has -// happened. The error contains fields for the existing Collector and the -// (rejected) new Collector that equals the existing one. This can be used to -// find out if an equal Collector has been registered before and switch over to -// using the old one, as demonstrated in the example. -type AlreadyRegisteredError struct { - ExistingCollector, NewCollector Collector -} - -func (err AlreadyRegisteredError) Error() string { - return "duplicate metrics collector registration attempted" -} - -// MultiError is a slice of errors implementing the error interface. It is used -// by a Gatherer to report multiple errors during MetricFamily gathering. -type MultiError []error - -// Error formats the contained errors as a bullet point list, preceded by the -// total number of errors. Note that this results in a multi-line string. -func (errs MultiError) Error() string { - if len(errs) == 0 { - return "" - } - buf := &bytes.Buffer{} - fmt.Fprintf(buf, "%d error(s) occurred:", len(errs)) - for _, err := range errs { - fmt.Fprintf(buf, "\n* %s", err) - } - return buf.String() -} - -// Append appends the provided error if it is not nil. -func (errs *MultiError) Append(err error) { - if err != nil { - *errs = append(*errs, err) - } -} - -// MaybeUnwrap returns nil if len(errs) is 0. It returns the first and only -// contained error as error if len(errs is 1). In all other cases, it returns -// the MultiError directly. This is helpful for returning a MultiError in a way -// that only uses the MultiError if needed. -func (errs MultiError) MaybeUnwrap() error { - switch len(errs) { - case 0: - return nil - case 1: - return errs[0] - default: - return errs - } -} - -// Registry registers Prometheus collectors, collects their metrics, and gathers -// them into MetricFamilies for exposition. It implements Registerer, Gatherer, -// and Collector. The zero value is not usable. Create instances with -// NewRegistry or NewPedanticRegistry. -// -// Registry implements Collector to allow it to be used for creating groups of -// metrics. See the Grouping example for how this can be done. -type Registry struct { - mtx sync.RWMutex - collectorsByID map[uint64]Collector // ID is a hash of the descIDs. - descIDs map[uint64]struct{} - dimHashesByName map[string]uint64 - uncheckedCollectors []Collector - pedanticChecksEnabled bool -} - -// Register implements Registerer. -func (r *Registry) Register(c Collector) error { - var ( - descChan = make(chan *Desc, capDescChan) - newDescIDs = map[uint64]struct{}{} - newDimHashesByName = map[string]uint64{} - collectorID uint64 // All desc IDs XOR'd together. - duplicateDescErr error - ) - go func() { - c.Describe(descChan) - close(descChan) - }() - r.mtx.Lock() - defer func() { - // Drain channel in case of premature return to not leak a goroutine. - for range descChan { - } - r.mtx.Unlock() - }() - // Conduct various tests... - for desc := range descChan { - - // Is the descriptor valid at all? - if desc.err != nil { - return fmt.Errorf("descriptor %s is invalid: %w", desc, desc.err) - } - - // Is the descID unique? - // (In other words: Is the fqName + constLabel combination unique?) - if _, exists := r.descIDs[desc.id]; exists { - duplicateDescErr = fmt.Errorf("descriptor %s already exists with the same fully-qualified name and const label values", desc) - } - // If it is not a duplicate desc in this collector, XOR it to - // the collectorID. (We allow duplicate descs within the same - // collector, but their existence must be a no-op.) - if _, exists := newDescIDs[desc.id]; !exists { - newDescIDs[desc.id] = struct{}{} - collectorID ^= desc.id - } - - // Are all the label names and the help string consistent with - // previous descriptors of the same name? - // First check existing descriptors... - if dimHash, exists := r.dimHashesByName[desc.fqName]; exists { - if dimHash != desc.dimHash { - return fmt.Errorf("a previously registered descriptor with the same fully-qualified name as %s has different label names or a different help string", desc) - } - } else { - // ...then check the new descriptors already seen. - if dimHash, exists := newDimHashesByName[desc.fqName]; exists { - if dimHash != desc.dimHash { - return fmt.Errorf("descriptors reported by collector have inconsistent label names or help strings for the same fully-qualified name, offender is %s", desc) - } - } else { - newDimHashesByName[desc.fqName] = desc.dimHash - } - } - } - // A Collector yielding no Desc at all is considered unchecked. - if len(newDescIDs) == 0 { - r.uncheckedCollectors = append(r.uncheckedCollectors, c) - return nil - } - if existing, exists := r.collectorsByID[collectorID]; exists { - switch e := existing.(type) { - case *wrappingCollector: - return AlreadyRegisteredError{ - ExistingCollector: e.unwrapRecursively(), - NewCollector: c, - } - default: - return AlreadyRegisteredError{ - ExistingCollector: e, - NewCollector: c, - } - } - } - // If the collectorID is new, but at least one of the descs existed - // before, we are in trouble. - if duplicateDescErr != nil { - return duplicateDescErr - } - - // Only after all tests have passed, actually register. - r.collectorsByID[collectorID] = c - for hash := range newDescIDs { - r.descIDs[hash] = struct{}{} - } - for name, dimHash := range newDimHashesByName { - r.dimHashesByName[name] = dimHash - } - return nil -} - -// Unregister implements Registerer. -func (r *Registry) Unregister(c Collector) bool { - var ( - descChan = make(chan *Desc, capDescChan) - descIDs = map[uint64]struct{}{} - collectorID uint64 // All desc IDs XOR'd together. - ) - go func() { - c.Describe(descChan) - close(descChan) - }() - for desc := range descChan { - if _, exists := descIDs[desc.id]; !exists { - collectorID ^= desc.id - descIDs[desc.id] = struct{}{} - } - } - - r.mtx.RLock() - if _, exists := r.collectorsByID[collectorID]; !exists { - r.mtx.RUnlock() - return false - } - r.mtx.RUnlock() - - r.mtx.Lock() - defer r.mtx.Unlock() - - delete(r.collectorsByID, collectorID) - for id := range descIDs { - delete(r.descIDs, id) - } - // dimHashesByName is left untouched as those must be consistent - // throughout the lifetime of a program. - return true -} - -// MustRegister implements Registerer. -func (r *Registry) MustRegister(cs ...Collector) { - for _, c := range cs { - if err := r.Register(c); err != nil { - panic(err) - } - } -} - -// Gather implements Gatherer. -func (r *Registry) Gather() ([]*dto.MetricFamily, error) { - r.mtx.RLock() - - if len(r.collectorsByID) == 0 && len(r.uncheckedCollectors) == 0 { - // Fast path. - r.mtx.RUnlock() - return nil, nil - } - - var ( - checkedMetricChan = make(chan Metric, capMetricChan) - uncheckedMetricChan = make(chan Metric, capMetricChan) - metricHashes = map[uint64]struct{}{} - wg sync.WaitGroup - errs MultiError // The collected errors to return in the end. - registeredDescIDs map[uint64]struct{} // Only used for pedantic checks - ) - - goroutineBudget := len(r.collectorsByID) + len(r.uncheckedCollectors) - metricFamiliesByName := make(map[string]*dto.MetricFamily, len(r.dimHashesByName)) - checkedCollectors := make(chan Collector, len(r.collectorsByID)) - uncheckedCollectors := make(chan Collector, len(r.uncheckedCollectors)) - for _, collector := range r.collectorsByID { - checkedCollectors <- collector - } - for _, collector := range r.uncheckedCollectors { - uncheckedCollectors <- collector - } - // In case pedantic checks are enabled, we have to copy the map before - // giving up the RLock. - if r.pedanticChecksEnabled { - registeredDescIDs = make(map[uint64]struct{}, len(r.descIDs)) - for id := range r.descIDs { - registeredDescIDs[id] = struct{}{} - } - } - r.mtx.RUnlock() - - wg.Add(goroutineBudget) - - collectWorker := func() { - for { - select { - case collector := <-checkedCollectors: - collector.Collect(checkedMetricChan) - case collector := <-uncheckedCollectors: - collector.Collect(uncheckedMetricChan) - default: - return - } - wg.Done() - } - } - - // Start the first worker now to make sure at least one is running. - go collectWorker() - goroutineBudget-- - - // Close checkedMetricChan and uncheckedMetricChan once all collectors - // are collected. - go func() { - wg.Wait() - close(checkedMetricChan) - close(uncheckedMetricChan) - }() - - // Drain checkedMetricChan and uncheckedMetricChan in case of premature return. - defer func() { - if checkedMetricChan != nil { - for range checkedMetricChan { - } - } - if uncheckedMetricChan != nil { - for range uncheckedMetricChan { - } - } - }() - - // Copy the channel references so we can nil them out later to remove - // them from the select statements below. - cmc := checkedMetricChan - umc := uncheckedMetricChan - - for { - select { - case metric, ok := <-cmc: - if !ok { - cmc = nil - break - } - errs.Append(processMetric( - metric, metricFamiliesByName, - metricHashes, - registeredDescIDs, - )) - case metric, ok := <-umc: - if !ok { - umc = nil - break - } - errs.Append(processMetric( - metric, metricFamiliesByName, - metricHashes, - nil, - )) - default: - if goroutineBudget <= 0 || len(checkedCollectors)+len(uncheckedCollectors) == 0 { - // All collectors are already being worked on or - // we have already as many goroutines started as - // there are collectors. Do the same as above, - // just without the default. - select { - case metric, ok := <-cmc: - if !ok { - cmc = nil - break - } - errs.Append(processMetric( - metric, metricFamiliesByName, - metricHashes, - registeredDescIDs, - )) - case metric, ok := <-umc: - if !ok { - umc = nil - break - } - errs.Append(processMetric( - metric, metricFamiliesByName, - metricHashes, - nil, - )) - } - break - } - // Start more workers. - go collectWorker() - goroutineBudget-- - runtime.Gosched() - } - // Once both checkedMetricChan and uncheckedMetricChan are closed - // and drained, the contraption above will nil out cmc and umc, - // and then we can leave the collect loop here. - if cmc == nil && umc == nil { - break - } - } - return internal.NormalizeMetricFamilies(metricFamiliesByName), errs.MaybeUnwrap() -} - -// Describe implements Collector. -func (r *Registry) Describe(ch chan<- *Desc) { - r.mtx.RLock() - defer r.mtx.RUnlock() - - // Only report the checked Collectors; unchecked collectors don't report any - // Desc. - for _, c := range r.collectorsByID { - c.Describe(ch) - } -} - -// Collect implements Collector. -func (r *Registry) Collect(ch chan<- Metric) { - r.mtx.RLock() - defer r.mtx.RUnlock() - - for _, c := range r.collectorsByID { - c.Collect(ch) - } - for _, c := range r.uncheckedCollectors { - c.Collect(ch) - } -} - -// WriteToTextfile calls Gather on the provided Gatherer, encodes the result in the -// Prometheus text format, and writes it to a temporary file. Upon success, the -// temporary file is renamed to the provided filename. -// -// This is intended for use with the textfile collector of the node exporter. -// Note that the node exporter expects the filename to be suffixed with ".prom". -func WriteToTextfile(filename string, g Gatherer) error { - tmp, err := os.CreateTemp(filepath.Dir(filename), filepath.Base(filename)) - if err != nil { - return err - } - defer os.Remove(tmp.Name()) - - mfs, err := g.Gather() - if err != nil { - return err - } - for _, mf := range mfs { - if _, err := expfmt.MetricFamilyToText(tmp, mf); err != nil { - return err - } - } - if err := tmp.Close(); err != nil { - return err - } - - if err := os.Chmod(tmp.Name(), 0o644); err != nil { - return err - } - return os.Rename(tmp.Name(), filename) -} - -// processMetric is an internal helper method only used by the Gather method. -func processMetric( - metric Metric, - metricFamiliesByName map[string]*dto.MetricFamily, - metricHashes map[uint64]struct{}, - registeredDescIDs map[uint64]struct{}, -) error { - desc := metric.Desc() - // Wrapped metrics collected by an unchecked Collector can have an - // invalid Desc. - if desc.err != nil { - return desc.err - } - dtoMetric := &dto.Metric{} - if err := metric.Write(dtoMetric); err != nil { - return fmt.Errorf("error collecting metric %v: %w", desc, err) - } - metricFamily, ok := metricFamiliesByName[desc.fqName] - if ok { // Existing name. - if metricFamily.GetHelp() != desc.help { - return fmt.Errorf( - "collected metric %s %s has help %q but should have %q", - desc.fqName, dtoMetric, desc.help, metricFamily.GetHelp(), - ) - } - // TODO(beorn7): Simplify switch once Desc has type. - switch metricFamily.GetType() { - case dto.MetricType_COUNTER: - if dtoMetric.Counter == nil { - return fmt.Errorf( - "collected metric %s %s should be a Counter", - desc.fqName, dtoMetric, - ) - } - case dto.MetricType_GAUGE: - if dtoMetric.Gauge == nil { - return fmt.Errorf( - "collected metric %s %s should be a Gauge", - desc.fqName, dtoMetric, - ) - } - case dto.MetricType_SUMMARY: - if dtoMetric.Summary == nil { - return fmt.Errorf( - "collected metric %s %s should be a Summary", - desc.fqName, dtoMetric, - ) - } - case dto.MetricType_UNTYPED: - if dtoMetric.Untyped == nil { - return fmt.Errorf( - "collected metric %s %s should be Untyped", - desc.fqName, dtoMetric, - ) - } - case dto.MetricType_HISTOGRAM: - if dtoMetric.Histogram == nil { - return fmt.Errorf( - "collected metric %s %s should be a Histogram", - desc.fqName, dtoMetric, - ) - } - default: - panic("encountered MetricFamily with invalid type") - } - } else { // New name. - metricFamily = &dto.MetricFamily{} - metricFamily.Name = proto.String(desc.fqName) - metricFamily.Help = proto.String(desc.help) - // TODO(beorn7): Simplify switch once Desc has type. - switch { - case dtoMetric.Gauge != nil: - metricFamily.Type = dto.MetricType_GAUGE.Enum() - case dtoMetric.Counter != nil: - metricFamily.Type = dto.MetricType_COUNTER.Enum() - case dtoMetric.Summary != nil: - metricFamily.Type = dto.MetricType_SUMMARY.Enum() - case dtoMetric.Untyped != nil: - metricFamily.Type = dto.MetricType_UNTYPED.Enum() - case dtoMetric.Histogram != nil: - metricFamily.Type = dto.MetricType_HISTOGRAM.Enum() - default: - return fmt.Errorf("empty metric collected: %s", dtoMetric) - } - if err := checkSuffixCollisions(metricFamily, metricFamiliesByName); err != nil { - return err - } - metricFamiliesByName[desc.fqName] = metricFamily - } - if err := checkMetricConsistency(metricFamily, dtoMetric, metricHashes); err != nil { - return err - } - if registeredDescIDs != nil { - // Is the desc registered at all? - if _, exist := registeredDescIDs[desc.id]; !exist { - return fmt.Errorf( - "collected metric %s %s with unregistered descriptor %s", - metricFamily.GetName(), dtoMetric, desc, - ) - } - if err := checkDescConsistency(metricFamily, dtoMetric, desc); err != nil { - return err - } - } - metricFamily.Metric = append(metricFamily.Metric, dtoMetric) - return nil -} - -// Gatherers is a slice of Gatherer instances that implements the Gatherer -// interface itself. Its Gather method calls Gather on all Gatherers in the -// slice in order and returns the merged results. Errors returned from the -// Gather calls are all returned in a flattened MultiError. Duplicate and -// inconsistent Metrics are skipped (first occurrence in slice order wins) and -// reported in the returned error. -// -// Gatherers can be used to merge the Gather results from multiple -// Registries. It also provides a way to directly inject existing MetricFamily -// protobufs into the gathering by creating a custom Gatherer with a Gather -// method that simply returns the existing MetricFamily protobufs. Note that no -// registration is involved (in contrast to Collector registration), so -// obviously registration-time checks cannot happen. Any inconsistencies between -// the gathered MetricFamilies are reported as errors by the Gather method, and -// inconsistent Metrics are dropped. Invalid parts of the MetricFamilies -// (e.g. syntactically invalid metric or label names) will go undetected. -type Gatherers []Gatherer - -// Gather implements Gatherer. -func (gs Gatherers) Gather() ([]*dto.MetricFamily, error) { - var ( - metricFamiliesByName = map[string]*dto.MetricFamily{} - metricHashes = map[uint64]struct{}{} - errs MultiError // The collected errors to return in the end. - ) - - for i, g := range gs { - mfs, err := g.Gather() - if err != nil { - multiErr := MultiError{} - if errors.As(err, &multiErr) { - for _, err := range multiErr { - errs = append(errs, fmt.Errorf("[from Gatherer #%d] %w", i+1, err)) - } - } else { - errs = append(errs, fmt.Errorf("[from Gatherer #%d] %w", i+1, err)) - } - } - for _, mf := range mfs { - existingMF, exists := metricFamiliesByName[mf.GetName()] - if exists { - if existingMF.GetHelp() != mf.GetHelp() { - errs = append(errs, fmt.Errorf( - "gathered metric family %s has help %q but should have %q", - mf.GetName(), mf.GetHelp(), existingMF.GetHelp(), - )) - continue - } - if existingMF.GetType() != mf.GetType() { - errs = append(errs, fmt.Errorf( - "gathered metric family %s has type %s but should have %s", - mf.GetName(), mf.GetType(), existingMF.GetType(), - )) - continue - } - } else { - existingMF = &dto.MetricFamily{} - existingMF.Name = mf.Name - existingMF.Help = mf.Help - existingMF.Type = mf.Type - if err := checkSuffixCollisions(existingMF, metricFamiliesByName); err != nil { - errs = append(errs, err) - continue - } - metricFamiliesByName[mf.GetName()] = existingMF - } - for _, m := range mf.Metric { - if err := checkMetricConsistency(existingMF, m, metricHashes); err != nil { - errs = append(errs, err) - continue - } - existingMF.Metric = append(existingMF.Metric, m) - } - } - } - return internal.NormalizeMetricFamilies(metricFamiliesByName), errs.MaybeUnwrap() -} - -// checkSuffixCollisions checks for collisions with the “magic” suffixes the -// Prometheus text format and the internal metric representation of the -// Prometheus server add while flattening Summaries and Histograms. -func checkSuffixCollisions(mf *dto.MetricFamily, mfs map[string]*dto.MetricFamily) error { - var ( - newName = mf.GetName() - newType = mf.GetType() - newNameWithoutSuffix = "" - ) - switch { - case strings.HasSuffix(newName, "_count"): - newNameWithoutSuffix = newName[:len(newName)-6] - case strings.HasSuffix(newName, "_sum"): - newNameWithoutSuffix = newName[:len(newName)-4] - case strings.HasSuffix(newName, "_bucket"): - newNameWithoutSuffix = newName[:len(newName)-7] - } - if newNameWithoutSuffix != "" { - if existingMF, ok := mfs[newNameWithoutSuffix]; ok { - switch existingMF.GetType() { - case dto.MetricType_SUMMARY: - if !strings.HasSuffix(newName, "_bucket") { - return fmt.Errorf( - "collected metric named %q collides with previously collected summary named %q", - newName, newNameWithoutSuffix, - ) - } - case dto.MetricType_HISTOGRAM: - return fmt.Errorf( - "collected metric named %q collides with previously collected histogram named %q", - newName, newNameWithoutSuffix, - ) - } - } - } - if newType == dto.MetricType_SUMMARY || newType == dto.MetricType_HISTOGRAM { - if _, ok := mfs[newName+"_count"]; ok { - return fmt.Errorf( - "collected histogram or summary named %q collides with previously collected metric named %q", - newName, newName+"_count", - ) - } - if _, ok := mfs[newName+"_sum"]; ok { - return fmt.Errorf( - "collected histogram or summary named %q collides with previously collected metric named %q", - newName, newName+"_sum", - ) - } - } - if newType == dto.MetricType_HISTOGRAM { - if _, ok := mfs[newName+"_bucket"]; ok { - return fmt.Errorf( - "collected histogram named %q collides with previously collected metric named %q", - newName, newName+"_bucket", - ) - } - } - return nil -} - -// checkMetricConsistency checks if the provided Metric is consistent with the -// provided MetricFamily. It also hashes the Metric labels and the MetricFamily -// name. If the resulting hash is already in the provided metricHashes, an error -// is returned. If not, it is added to metricHashes. -func checkMetricConsistency( - metricFamily *dto.MetricFamily, - dtoMetric *dto.Metric, - metricHashes map[uint64]struct{}, -) error { - name := metricFamily.GetName() - - // Type consistency with metric family. - if metricFamily.GetType() == dto.MetricType_GAUGE && dtoMetric.Gauge == nil || - metricFamily.GetType() == dto.MetricType_COUNTER && dtoMetric.Counter == nil || - metricFamily.GetType() == dto.MetricType_SUMMARY && dtoMetric.Summary == nil || - metricFamily.GetType() == dto.MetricType_HISTOGRAM && dtoMetric.Histogram == nil || - metricFamily.GetType() == dto.MetricType_UNTYPED && dtoMetric.Untyped == nil { - return fmt.Errorf( - "collected metric %q { %s} is not a %s", - name, dtoMetric, metricFamily.GetType(), - ) - } - - previousLabelName := "" - for _, labelPair := range dtoMetric.GetLabel() { - labelName := labelPair.GetName() - if labelName == previousLabelName { - return fmt.Errorf( - "collected metric %q { %s} has two or more labels with the same name: %s", - name, dtoMetric, labelName, - ) - } - if !checkLabelName(labelName) { - return fmt.Errorf( - "collected metric %q { %s} has a label with an invalid name: %s", - name, dtoMetric, labelName, - ) - } - if dtoMetric.Summary != nil && labelName == quantileLabel { - return fmt.Errorf( - "collected metric %q { %s} must not have an explicit %q label", - name, dtoMetric, quantileLabel, - ) - } - if !utf8.ValidString(labelPair.GetValue()) { - return fmt.Errorf( - "collected metric %q { %s} has a label named %q whose value is not utf8: %#v", - name, dtoMetric, labelName, labelPair.GetValue()) - } - previousLabelName = labelName - } - - // Is the metric unique (i.e. no other metric with the same name and the same labels)? - h := xxhash.New() - h.WriteString(name) - h.Write(separatorByteSlice) - // Make sure label pairs are sorted. We depend on it for the consistency - // check. - if !sort.IsSorted(internal.LabelPairSorter(dtoMetric.Label)) { - // We cannot sort dtoMetric.Label in place as it is immutable by contract. - copiedLabels := make([]*dto.LabelPair, len(dtoMetric.Label)) - copy(copiedLabels, dtoMetric.Label) - sort.Sort(internal.LabelPairSorter(copiedLabels)) - dtoMetric.Label = copiedLabels - } - for _, lp := range dtoMetric.Label { - h.WriteString(lp.GetName()) - h.Write(separatorByteSlice) - h.WriteString(lp.GetValue()) - h.Write(separatorByteSlice) - } - if dtoMetric.TimestampMs != nil { - h.WriteString(strconv.FormatInt(*(dtoMetric.TimestampMs), 10)) - h.Write(separatorByteSlice) - } - hSum := h.Sum64() - if _, exists := metricHashes[hSum]; exists { - return fmt.Errorf( - "collected metric %q { %s} was collected before with the same name and label values", - name, dtoMetric, - ) - } - metricHashes[hSum] = struct{}{} - return nil -} - -func checkDescConsistency( - metricFamily *dto.MetricFamily, - dtoMetric *dto.Metric, - desc *Desc, -) error { - // Desc help consistency with metric family help. - if metricFamily.GetHelp() != desc.help { - return fmt.Errorf( - "collected metric %s %s has help %q but should have %q", - metricFamily.GetName(), dtoMetric, metricFamily.GetHelp(), desc.help, - ) - } - - // Is the desc consistent with the content of the metric? - lpsFromDesc := make([]*dto.LabelPair, len(desc.constLabelPairs), len(dtoMetric.Label)) - copy(lpsFromDesc, desc.constLabelPairs) - for _, l := range desc.variableLabels.names { - lpsFromDesc = append(lpsFromDesc, &dto.LabelPair{ - Name: proto.String(l), - }) - } - if len(lpsFromDesc) != len(dtoMetric.Label) { - return fmt.Errorf( - "labels in collected metric %s %s are inconsistent with descriptor %s", - metricFamily.GetName(), dtoMetric, desc, - ) - } - sort.Sort(internal.LabelPairSorter(lpsFromDesc)) - for i, lpFromDesc := range lpsFromDesc { - lpFromMetric := dtoMetric.Label[i] - if lpFromDesc.GetName() != lpFromMetric.GetName() || - lpFromDesc.Value != nil && lpFromDesc.GetValue() != lpFromMetric.GetValue() { - return fmt.Errorf( - "labels in collected metric %s %s are inconsistent with descriptor %s", - metricFamily.GetName(), dtoMetric, desc, - ) - } - } - return nil -} - -var _ TransactionalGatherer = &MultiTRegistry{} - -// MultiTRegistry is a TransactionalGatherer that joins gathered metrics from multiple -// transactional gatherers. -// -// It is caller responsibility to ensure two registries have mutually exclusive metric families, -// no deduplication will happen. -type MultiTRegistry struct { - tGatherers []TransactionalGatherer -} - -// NewMultiTRegistry creates MultiTRegistry. -func NewMultiTRegistry(tGatherers ...TransactionalGatherer) *MultiTRegistry { - return &MultiTRegistry{ - tGatherers: tGatherers, - } -} - -// Gather implements TransactionalGatherer interface. -func (r *MultiTRegistry) Gather() (mfs []*dto.MetricFamily, done func(), err error) { - errs := MultiError{} - - dFns := make([]func(), 0, len(r.tGatherers)) - // TODO(bwplotka): Implement concurrency for those? - for _, g := range r.tGatherers { - // TODO(bwplotka): Check for duplicates? - m, d, err := g.Gather() - errs.Append(err) - - mfs = append(mfs, m...) - dFns = append(dFns, d) - } - - // TODO(bwplotka): Consider sort in place, given metric family in gather is sorted already. - sort.Slice(mfs, func(i, j int) bool { - return *mfs[i].Name < *mfs[j].Name - }) - return mfs, func() { - for _, d := range dFns { - d() - } - }, errs.MaybeUnwrap() -} - -// TransactionalGatherer represents transactional gatherer that can be triggered to notify gatherer that memory -// used by metric family is no longer used by a caller. This allows implementations with cache. -type TransactionalGatherer interface { - // Gather returns metrics in a lexicographically sorted slice - // of uniquely named MetricFamily protobufs. Gather ensures that the - // returned slice is valid and self-consistent so that it can be used - // for valid exposition. As an exception to the strict consistency - // requirements described for metric.Desc, Gather will tolerate - // different sets of label names for metrics of the same metric family. - // - // Even if an error occurs, Gather attempts to gather as many metrics as - // possible. Hence, if a non-nil error is returned, the returned - // MetricFamily slice could be nil (in case of a fatal error that - // prevented any meaningful metric collection) or contain a number of - // MetricFamily protobufs, some of which might be incomplete, and some - // might be missing altogether. The returned error (which might be a - // MultiError) explains the details. Note that this is mostly useful for - // debugging purposes. If the gathered protobufs are to be used for - // exposition in actual monitoring, it is almost always better to not - // expose an incomplete result and instead disregard the returned - // MetricFamily protobufs in case the returned error is non-nil. - // - // Important: done is expected to be triggered (even if the error occurs!) - // once caller does not need returned slice of dto.MetricFamily. - Gather() (_ []*dto.MetricFamily, done func(), err error) -} - -// ToTransactionalGatherer transforms Gatherer to transactional one with noop as done function. -func ToTransactionalGatherer(g Gatherer) TransactionalGatherer { - return &noTransactionGatherer{g: g} -} - -type noTransactionGatherer struct { - g Gatherer -} - -// Gather implements TransactionalGatherer interface. -func (g *noTransactionGatherer) Gather() (_ []*dto.MetricFamily, done func(), err error) { - mfs, err := g.g.Gather() - return mfs, func() {}, err -} diff --git a/vendor/github.com/prometheus/client_golang/prometheus/summary.go b/vendor/github.com/prometheus/client_golang/prometheus/summary.go deleted file mode 100644 index 146270444..000000000 --- a/vendor/github.com/prometheus/client_golang/prometheus/summary.go +++ /dev/null @@ -1,785 +0,0 @@ -// Copyright 2014 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package prometheus - -import ( - "fmt" - "math" - "runtime" - "sort" - "sync" - "sync/atomic" - "time" - - dto "github.com/prometheus/client_model/go" - - "github.com/beorn7/perks/quantile" - "google.golang.org/protobuf/proto" - "google.golang.org/protobuf/types/known/timestamppb" -) - -// quantileLabel is used for the label that defines the quantile in a -// summary. -const quantileLabel = "quantile" - -// A Summary captures individual observations from an event or sample stream and -// summarizes them in a manner similar to traditional summary statistics: 1. sum -// of observations, 2. observation count, 3. rank estimations. -// -// A typical use-case is the observation of request latencies. By default, a -// Summary provides the median, the 90th and the 99th percentile of the latency -// as rank estimations. However, the default behavior will change in the -// upcoming v1.0.0 of the library. There will be no rank estimations at all by -// default. For a sane transition, it is recommended to set the desired rank -// estimations explicitly. -// -// Note that the rank estimations cannot be aggregated in a meaningful way with -// the Prometheus query language (i.e. you cannot average or add them). If you -// need aggregatable quantiles (e.g. you want the 99th percentile latency of all -// queries served across all instances of a service), consider the Histogram -// metric type. See the Prometheus documentation for more details. -// -// To create Summary instances, use NewSummary. -type Summary interface { - Metric - Collector - - // Observe adds a single observation to the summary. Observations are - // usually positive or zero. Negative observations are accepted but - // prevent current versions of Prometheus from properly detecting - // counter resets in the sum of observations. See - // https://prometheus.io/docs/practices/histograms/#count-and-sum-of-observations - // for details. - Observe(float64) -} - -var errQuantileLabelNotAllowed = fmt.Errorf( - "%q is not allowed as label name in summaries", quantileLabel, -) - -// Default values for SummaryOpts. -const ( - // DefMaxAge is the default duration for which observations stay - // relevant. - DefMaxAge time.Duration = 10 * time.Minute - // DefAgeBuckets is the default number of buckets used to calculate the - // age of observations. - DefAgeBuckets = 5 - // DefBufCap is the standard buffer size for collecting Summary observations. - DefBufCap = 500 -) - -// SummaryOpts bundles the options for creating a Summary metric. It is -// mandatory to set Name to a non-empty string. While all other fields are -// optional and can safely be left at their zero value, it is recommended to set -// a help string and to explicitly set the Objectives field to the desired value -// as the default value will change in the upcoming v1.0.0 of the library. -type SummaryOpts struct { - // Namespace, Subsystem, and Name are components of the fully-qualified - // name of the Summary (created by joining these components with - // "_"). Only Name is mandatory, the others merely help structuring the - // name. Note that the fully-qualified name of the Summary must be a - // valid Prometheus metric name. - Namespace string - Subsystem string - Name string - - // Help provides information about this Summary. - // - // Metrics with the same fully-qualified name must have the same Help - // string. - Help string - - // ConstLabels are used to attach fixed labels to this metric. Metrics - // with the same fully-qualified name must have the same label names in - // their ConstLabels. - // - // Due to the way a Summary is represented in the Prometheus text format - // and how it is handled by the Prometheus server internally, “quantile” - // is an illegal label name. Construction of a Summary or SummaryVec - // will panic if this label name is used in ConstLabels. - // - // ConstLabels are only used rarely. In particular, do not use them to - // attach the same labels to all your metrics. Those use cases are - // better covered by target labels set by the scraping Prometheus - // server, or by one specific metric (e.g. a build_info or a - // machine_role metric). See also - // https://prometheus.io/docs/instrumenting/writing_exporters/#target-labels-not-static-scraped-labels - ConstLabels Labels - - // Objectives defines the quantile rank estimates with their respective - // absolute error. If Objectives[q] = e, then the value reported for q - // will be the φ-quantile value for some φ between q-e and q+e. The - // default value is an empty map, resulting in a summary without - // quantiles. - Objectives map[float64]float64 - - // MaxAge defines the duration for which an observation stays relevant - // for the summary. Only applies to pre-calculated quantiles, does not - // apply to _sum and _count. Must be positive. The default value is - // DefMaxAge. - MaxAge time.Duration - - // AgeBuckets is the number of buckets used to exclude observations that - // are older than MaxAge from the summary. A higher number has a - // resource penalty, so only increase it if the higher resolution is - // really required. For very high observation rates, you might want to - // reduce the number of age buckets. With only one age bucket, you will - // effectively see a complete reset of the summary each time MaxAge has - // passed. The default value is DefAgeBuckets. - AgeBuckets uint32 - - // BufCap defines the default sample stream buffer size. The default - // value of DefBufCap should suffice for most uses. If there is a need - // to increase the value, a multiple of 500 is recommended (because that - // is the internal buffer size of the underlying package - // "github.com/bmizerany/perks/quantile"). - BufCap uint32 - - // now is for testing purposes, by default it's time.Now. - now func() time.Time -} - -// SummaryVecOpts bundles the options to create a SummaryVec metric. -// It is mandatory to set SummaryOpts, see there for mandatory fields. VariableLabels -// is optional and can safely be left to its default value. -type SummaryVecOpts struct { - SummaryOpts - - // VariableLabels are used to partition the metric vector by the given set - // of labels. Each label value will be constrained with the optional Constraint - // function, if provided. - VariableLabels ConstrainableLabels -} - -// Problem with the sliding-window decay algorithm... The Merge method of -// perk/quantile is actually not working as advertised - and it might be -// unfixable, as the underlying algorithm is apparently not capable of merging -// summaries in the first place. To avoid using Merge, we are currently adding -// observations to _each_ age bucket, i.e. the effort to add a sample is -// essentially multiplied by the number of age buckets. When rotating age -// buckets, we empty the previous head stream. On scrape time, we simply take -// the quantiles from the head stream (no merging required). Result: More effort -// on observation time, less effort on scrape time, which is exactly the -// opposite of what we try to accomplish, but at least the results are correct. -// -// The quite elegant previous contraption to merge the age buckets efficiently -// on scrape time (see code up commit 6b9530d72ea715f0ba612c0120e6e09fbf1d49d0) -// can't be used anymore. - -// NewSummary creates a new Summary based on the provided SummaryOpts. -func NewSummary(opts SummaryOpts) Summary { - return newSummary( - NewDesc( - BuildFQName(opts.Namespace, opts.Subsystem, opts.Name), - opts.Help, - nil, - opts.ConstLabels, - ), - opts, - ) -} - -func newSummary(desc *Desc, opts SummaryOpts, labelValues ...string) Summary { - if len(desc.variableLabels.names) != len(labelValues) { - panic(makeInconsistentCardinalityError(desc.fqName, desc.variableLabels.names, labelValues)) - } - - for _, n := range desc.variableLabels.names { - if n == quantileLabel { - panic(errQuantileLabelNotAllowed) - } - } - for _, lp := range desc.constLabelPairs { - if lp.GetName() == quantileLabel { - panic(errQuantileLabelNotAllowed) - } - } - - if opts.Objectives == nil { - opts.Objectives = map[float64]float64{} - } - - if opts.MaxAge < 0 { - panic(fmt.Errorf("illegal max age MaxAge=%v", opts.MaxAge)) - } - if opts.MaxAge == 0 { - opts.MaxAge = DefMaxAge - } - - if opts.AgeBuckets == 0 { - opts.AgeBuckets = DefAgeBuckets - } - - if opts.BufCap == 0 { - opts.BufCap = DefBufCap - } - - if opts.now == nil { - opts.now = time.Now - } - if len(opts.Objectives) == 0 { - // Use the lock-free implementation of a Summary without objectives. - s := &noObjectivesSummary{ - desc: desc, - labelPairs: MakeLabelPairs(desc, labelValues), - counts: [2]*summaryCounts{{}, {}}, - } - s.init(s) // Init self-collection. - s.createdTs = timestamppb.New(opts.now()) - return s - } - - s := &summary{ - desc: desc, - - objectives: opts.Objectives, - sortedObjectives: make([]float64, 0, len(opts.Objectives)), - - labelPairs: MakeLabelPairs(desc, labelValues), - - hotBuf: make([]float64, 0, opts.BufCap), - coldBuf: make([]float64, 0, opts.BufCap), - streamDuration: opts.MaxAge / time.Duration(opts.AgeBuckets), - } - s.headStreamExpTime = opts.now().Add(s.streamDuration) - s.hotBufExpTime = s.headStreamExpTime - - for i := uint32(0); i < opts.AgeBuckets; i++ { - s.streams = append(s.streams, s.newStream()) - } - s.headStream = s.streams[0] - - for qu := range s.objectives { - s.sortedObjectives = append(s.sortedObjectives, qu) - } - sort.Float64s(s.sortedObjectives) - - s.init(s) // Init self-collection. - s.createdTs = timestamppb.New(opts.now()) - return s -} - -type summary struct { - selfCollector - - bufMtx sync.Mutex // Protects hotBuf and hotBufExpTime. - mtx sync.Mutex // Protects every other moving part. - // Lock bufMtx before mtx if both are needed. - - desc *Desc - - objectives map[float64]float64 - sortedObjectives []float64 - - labelPairs []*dto.LabelPair - - sum float64 - cnt uint64 - - hotBuf, coldBuf []float64 - - streams []*quantile.Stream - streamDuration time.Duration - headStream *quantile.Stream - headStreamIdx int - headStreamExpTime, hotBufExpTime time.Time - - createdTs *timestamppb.Timestamp -} - -func (s *summary) Desc() *Desc { - return s.desc -} - -func (s *summary) Observe(v float64) { - s.bufMtx.Lock() - defer s.bufMtx.Unlock() - - now := time.Now() - if now.After(s.hotBufExpTime) { - s.asyncFlush(now) - } - s.hotBuf = append(s.hotBuf, v) - if len(s.hotBuf) == cap(s.hotBuf) { - s.asyncFlush(now) - } -} - -func (s *summary) Write(out *dto.Metric) error { - sum := &dto.Summary{ - CreatedTimestamp: s.createdTs, - } - qs := make([]*dto.Quantile, 0, len(s.objectives)) - - s.bufMtx.Lock() - s.mtx.Lock() - // Swap bufs even if hotBuf is empty to set new hotBufExpTime. - s.swapBufs(time.Now()) - s.bufMtx.Unlock() - - s.flushColdBuf() - sum.SampleCount = proto.Uint64(s.cnt) - sum.SampleSum = proto.Float64(s.sum) - - for _, rank := range s.sortedObjectives { - var q float64 - if s.headStream.Count() == 0 { - q = math.NaN() - } else { - q = s.headStream.Query(rank) - } - qs = append(qs, &dto.Quantile{ - Quantile: proto.Float64(rank), - Value: proto.Float64(q), - }) - } - - s.mtx.Unlock() - - if len(qs) > 0 { - sort.Sort(quantSort(qs)) - } - sum.Quantile = qs - - out.Summary = sum - out.Label = s.labelPairs - return nil -} - -func (s *summary) newStream() *quantile.Stream { - return quantile.NewTargeted(s.objectives) -} - -// asyncFlush needs bufMtx locked. -func (s *summary) asyncFlush(now time.Time) { - s.mtx.Lock() - s.swapBufs(now) - - // Unblock the original goroutine that was responsible for the mutation - // that triggered the compaction. But hold onto the global non-buffer - // state mutex until the operation finishes. - go func() { - s.flushColdBuf() - s.mtx.Unlock() - }() -} - -// rotateStreams needs mtx AND bufMtx locked. -func (s *summary) maybeRotateStreams() { - for !s.hotBufExpTime.Equal(s.headStreamExpTime) { - s.headStream.Reset() - s.headStreamIdx++ - if s.headStreamIdx >= len(s.streams) { - s.headStreamIdx = 0 - } - s.headStream = s.streams[s.headStreamIdx] - s.headStreamExpTime = s.headStreamExpTime.Add(s.streamDuration) - } -} - -// flushColdBuf needs mtx locked. -func (s *summary) flushColdBuf() { - for _, v := range s.coldBuf { - for _, stream := range s.streams { - stream.Insert(v) - } - s.cnt++ - s.sum += v - } - s.coldBuf = s.coldBuf[0:0] - s.maybeRotateStreams() -} - -// swapBufs needs mtx AND bufMtx locked, coldBuf must be empty. -func (s *summary) swapBufs(now time.Time) { - if len(s.coldBuf) != 0 { - panic("coldBuf is not empty") - } - s.hotBuf, s.coldBuf = s.coldBuf, s.hotBuf - // hotBuf is now empty and gets new expiration set. - for now.After(s.hotBufExpTime) { - s.hotBufExpTime = s.hotBufExpTime.Add(s.streamDuration) - } -} - -type summaryCounts struct { - // sumBits contains the bits of the float64 representing the sum of all - // observations. sumBits and count have to go first in the struct to - // guarantee alignment for atomic operations. - // http://golang.org/pkg/sync/atomic/#pkg-note-BUG - sumBits uint64 - count uint64 -} - -type noObjectivesSummary struct { - // countAndHotIdx enables lock-free writes with use of atomic updates. - // The most significant bit is the hot index [0 or 1] of the count field - // below. Observe calls update the hot one. All remaining bits count the - // number of Observe calls. Observe starts by incrementing this counter, - // and finish by incrementing the count field in the respective - // summaryCounts, as a marker for completion. - // - // Calls of the Write method (which are non-mutating reads from the - // perspective of the summary) swap the hot–cold under the writeMtx - // lock. A cooldown is awaited (while locked) by comparing the number of - // observations with the initiation count. Once they match, then the - // last observation on the now cool one has completed. All cool fields must - // be merged into the new hot before releasing writeMtx. - - // Fields with atomic access first! See alignment constraint: - // http://golang.org/pkg/sync/atomic/#pkg-note-BUG - countAndHotIdx uint64 - - selfCollector - desc *Desc - writeMtx sync.Mutex // Only used in the Write method. - - // Two counts, one is "hot" for lock-free observations, the other is - // "cold" for writing out a dto.Metric. It has to be an array of - // pointers to guarantee 64bit alignment of the histogramCounts, see - // http://golang.org/pkg/sync/atomic/#pkg-note-BUG. - counts [2]*summaryCounts - - labelPairs []*dto.LabelPair - - createdTs *timestamppb.Timestamp -} - -func (s *noObjectivesSummary) Desc() *Desc { - return s.desc -} - -func (s *noObjectivesSummary) Observe(v float64) { - // We increment h.countAndHotIdx so that the counter in the lower - // 63 bits gets incremented. At the same time, we get the new value - // back, which we can use to find the currently-hot counts. - n := atomic.AddUint64(&s.countAndHotIdx, 1) - hotCounts := s.counts[n>>63] - - for { - oldBits := atomic.LoadUint64(&hotCounts.sumBits) - newBits := math.Float64bits(math.Float64frombits(oldBits) + v) - if atomic.CompareAndSwapUint64(&hotCounts.sumBits, oldBits, newBits) { - break - } - } - // Increment count last as we take it as a signal that the observation - // is complete. - atomic.AddUint64(&hotCounts.count, 1) -} - -func (s *noObjectivesSummary) Write(out *dto.Metric) error { - // For simplicity, we protect this whole method by a mutex. It is not in - // the hot path, i.e. Observe is called much more often than Write. The - // complication of making Write lock-free isn't worth it, if possible at - // all. - s.writeMtx.Lock() - defer s.writeMtx.Unlock() - - // Adding 1<<63 switches the hot index (from 0 to 1 or from 1 to 0) - // without touching the count bits. See the struct comments for a full - // description of the algorithm. - n := atomic.AddUint64(&s.countAndHotIdx, 1<<63) - // count is contained unchanged in the lower 63 bits. - count := n & ((1 << 63) - 1) - // The most significant bit tells us which counts is hot. The complement - // is thus the cold one. - hotCounts := s.counts[n>>63] - coldCounts := s.counts[(^n)>>63] - - // Await cooldown. - for count != atomic.LoadUint64(&coldCounts.count) { - runtime.Gosched() // Let observations get work done. - } - - sum := &dto.Summary{ - SampleCount: proto.Uint64(count), - SampleSum: proto.Float64(math.Float64frombits(atomic.LoadUint64(&coldCounts.sumBits))), - CreatedTimestamp: s.createdTs, - } - - out.Summary = sum - out.Label = s.labelPairs - - // Finally add all the cold counts to the new hot counts and reset the cold counts. - atomic.AddUint64(&hotCounts.count, count) - atomic.StoreUint64(&coldCounts.count, 0) - for { - oldBits := atomic.LoadUint64(&hotCounts.sumBits) - newBits := math.Float64bits(math.Float64frombits(oldBits) + sum.GetSampleSum()) - if atomic.CompareAndSwapUint64(&hotCounts.sumBits, oldBits, newBits) { - atomic.StoreUint64(&coldCounts.sumBits, 0) - break - } - } - return nil -} - -type quantSort []*dto.Quantile - -func (s quantSort) Len() int { - return len(s) -} - -func (s quantSort) Swap(i, j int) { - s[i], s[j] = s[j], s[i] -} - -func (s quantSort) Less(i, j int) bool { - return s[i].GetQuantile() < s[j].GetQuantile() -} - -// SummaryVec is a Collector that bundles a set of Summaries that all share the -// same Desc, but have different values for their variable labels. This is used -// if you want to count the same thing partitioned by various dimensions -// (e.g. HTTP request latencies, partitioned by status code and method). Create -// instances with NewSummaryVec. -type SummaryVec struct { - *MetricVec -} - -// NewSummaryVec creates a new SummaryVec based on the provided SummaryOpts and -// partitioned by the given label names. -// -// Due to the way a Summary is represented in the Prometheus text format and how -// it is handled by the Prometheus server internally, “quantile” is an illegal -// label name. NewSummaryVec will panic if this label name is used. -func NewSummaryVec(opts SummaryOpts, labelNames []string) *SummaryVec { - return V2.NewSummaryVec(SummaryVecOpts{ - SummaryOpts: opts, - VariableLabels: UnconstrainedLabels(labelNames), - }) -} - -// NewSummaryVec creates a new SummaryVec based on the provided SummaryVecOpts. -func (v2) NewSummaryVec(opts SummaryVecOpts) *SummaryVec { - for _, ln := range opts.VariableLabels.labelNames() { - if ln == quantileLabel { - panic(errQuantileLabelNotAllowed) - } - } - desc := V2.NewDesc( - BuildFQName(opts.Namespace, opts.Subsystem, opts.Name), - opts.Help, - opts.VariableLabels, - opts.ConstLabels, - ) - return &SummaryVec{ - MetricVec: NewMetricVec(desc, func(lvs ...string) Metric { - return newSummary(desc, opts.SummaryOpts, lvs...) - }), - } -} - -// GetMetricWithLabelValues returns the Summary for the given slice of label -// values (same order as the variable labels in Desc). If that combination of -// label values is accessed for the first time, a new Summary is created. -// -// It is possible to call this method without using the returned Summary to only -// create the new Summary but leave it at its starting value, a Summary without -// any observations. -// -// Keeping the Summary for later use is possible (and should be considered if -// performance is critical), but keep in mind that Reset, DeleteLabelValues and -// Delete can be used to delete the Summary from the SummaryVec. In that case, -// the Summary will still exist, but it will not be exported anymore, even if a -// Summary with the same label values is created later. See also the CounterVec -// example. -// -// An error is returned if the number of label values is not the same as the -// number of variable labels in Desc (minus any curried labels). -// -// Note that for more than one label value, this method is prone to mistakes -// caused by an incorrect order of arguments. Consider GetMetricWith(Labels) as -// an alternative to avoid that type of mistake. For higher label numbers, the -// latter has a much more readable (albeit more verbose) syntax, but it comes -// with a performance overhead (for creating and processing the Labels map). -// See also the GaugeVec example. -func (v *SummaryVec) GetMetricWithLabelValues(lvs ...string) (Observer, error) { - metric, err := v.MetricVec.GetMetricWithLabelValues(lvs...) - if metric != nil { - return metric.(Observer), err - } - return nil, err -} - -// GetMetricWith returns the Summary for the given Labels map (the label names -// must match those of the variable labels in Desc). If that label map is -// accessed for the first time, a new Summary is created. Implications of -// creating a Summary without using it and keeping the Summary for later use are -// the same as for GetMetricWithLabelValues. -// -// An error is returned if the number and names of the Labels are inconsistent -// with those of the variable labels in Desc (minus any curried labels). -// -// This method is used for the same purpose as -// GetMetricWithLabelValues(...string). See there for pros and cons of the two -// methods. -func (v *SummaryVec) GetMetricWith(labels Labels) (Observer, error) { - metric, err := v.MetricVec.GetMetricWith(labels) - if metric != nil { - return metric.(Observer), err - } - return nil, err -} - -// WithLabelValues works as GetMetricWithLabelValues, but panics where -// GetMetricWithLabelValues would have returned an error. Not returning an -// error allows shortcuts like -// -// myVec.WithLabelValues("404", "GET").Observe(42.21) -func (v *SummaryVec) WithLabelValues(lvs ...string) Observer { - s, err := v.GetMetricWithLabelValues(lvs...) - if err != nil { - panic(err) - } - return s -} - -// With works as GetMetricWith, but panics where GetMetricWithLabels would have -// returned an error. Not returning an error allows shortcuts like -// -// myVec.With(prometheus.Labels{"code": "404", "method": "GET"}).Observe(42.21) -func (v *SummaryVec) With(labels Labels) Observer { - s, err := v.GetMetricWith(labels) - if err != nil { - panic(err) - } - return s -} - -// CurryWith returns a vector curried with the provided labels, i.e. the -// returned vector has those labels pre-set for all labeled operations performed -// on it. The cardinality of the curried vector is reduced accordingly. The -// order of the remaining labels stays the same (just with the curried labels -// taken out of the sequence – which is relevant for the -// (GetMetric)WithLabelValues methods). It is possible to curry a curried -// vector, but only with labels not yet used for currying before. -// -// The metrics contained in the SummaryVec are shared between the curried and -// uncurried vectors. They are just accessed differently. Curried and uncurried -// vectors behave identically in terms of collection. Only one must be -// registered with a given registry (usually the uncurried version). The Reset -// method deletes all metrics, even if called on a curried vector. -func (v *SummaryVec) CurryWith(labels Labels) (ObserverVec, error) { - vec, err := v.MetricVec.CurryWith(labels) - if vec != nil { - return &SummaryVec{vec}, err - } - return nil, err -} - -// MustCurryWith works as CurryWith but panics where CurryWith would have -// returned an error. -func (v *SummaryVec) MustCurryWith(labels Labels) ObserverVec { - vec, err := v.CurryWith(labels) - if err != nil { - panic(err) - } - return vec -} - -type constSummary struct { - desc *Desc - count uint64 - sum float64 - quantiles map[float64]float64 - labelPairs []*dto.LabelPair - createdTs *timestamppb.Timestamp -} - -func (s *constSummary) Desc() *Desc { - return s.desc -} - -func (s *constSummary) Write(out *dto.Metric) error { - sum := &dto.Summary{ - CreatedTimestamp: s.createdTs, - } - qs := make([]*dto.Quantile, 0, len(s.quantiles)) - - sum.SampleCount = proto.Uint64(s.count) - sum.SampleSum = proto.Float64(s.sum) - - for rank, q := range s.quantiles { - qs = append(qs, &dto.Quantile{ - Quantile: proto.Float64(rank), - Value: proto.Float64(q), - }) - } - - if len(qs) > 0 { - sort.Sort(quantSort(qs)) - } - sum.Quantile = qs - - out.Summary = sum - out.Label = s.labelPairs - - return nil -} - -// NewConstSummary returns a metric representing a Prometheus summary with fixed -// values for the count, sum, and quantiles. As those parameters cannot be -// changed, the returned value does not implement the Summary interface (but -// only the Metric interface). Users of this package will not have much use for -// it in regular operations. However, when implementing custom Collectors, it is -// useful as a throw-away metric that is generated on the fly to send it to -// Prometheus in the Collect method. -// -// quantiles maps ranks to quantile values. For example, a median latency of -// 0.23s and a 99th percentile latency of 0.56s would be expressed as: -// -// map[float64]float64{0.5: 0.23, 0.99: 0.56} -// -// NewConstSummary returns an error if the length of labelValues is not -// consistent with the variable labels in Desc or if Desc is invalid. -func NewConstSummary( - desc *Desc, - count uint64, - sum float64, - quantiles map[float64]float64, - labelValues ...string, -) (Metric, error) { - if desc.err != nil { - return nil, desc.err - } - if err := validateLabelValues(labelValues, len(desc.variableLabels.names)); err != nil { - return nil, err - } - return &constSummary{ - desc: desc, - count: count, - sum: sum, - quantiles: quantiles, - labelPairs: MakeLabelPairs(desc, labelValues), - }, nil -} - -// MustNewConstSummary is a version of NewConstSummary that panics where -// NewConstMetric would have returned an error. -func MustNewConstSummary( - desc *Desc, - count uint64, - sum float64, - quantiles map[float64]float64, - labelValues ...string, -) Metric { - m, err := NewConstSummary(desc, count, sum, quantiles, labelValues...) - if err != nil { - panic(err) - } - return m -} diff --git a/vendor/github.com/prometheus/client_golang/prometheus/testutil/promlint/problem.go b/vendor/github.com/prometheus/client_golang/prometheus/testutil/promlint/problem.go deleted file mode 100644 index 9ba42826a..000000000 --- a/vendor/github.com/prometheus/client_golang/prometheus/testutil/promlint/problem.go +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright 2020 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package promlint - -import dto "github.com/prometheus/client_model/go" - -// A Problem is an issue detected by a linter. -type Problem struct { - // The name of the metric indicated by this Problem. - Metric string - - // A description of the issue for this Problem. - Text string -} - -// newProblem is helper function to create a Problem. -func newProblem(mf *dto.MetricFamily, text string) Problem { - return Problem{ - Metric: mf.GetName(), - Text: text, - } -} diff --git a/vendor/github.com/prometheus/client_golang/prometheus/testutil/promlint/promlint.go b/vendor/github.com/prometheus/client_golang/prometheus/testutil/promlint/promlint.go deleted file mode 100644 index ea46f38ec..000000000 --- a/vendor/github.com/prometheus/client_golang/prometheus/testutil/promlint/promlint.go +++ /dev/null @@ -1,123 +0,0 @@ -// Copyright 2020 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// Package promlint provides a linter for Prometheus metrics. -package promlint - -import ( - "errors" - "io" - "sort" - - dto "github.com/prometheus/client_model/go" - "github.com/prometheus/common/expfmt" -) - -// A Linter is a Prometheus metrics linter. It identifies issues with metric -// names, types, and metadata, and reports them to the caller. -type Linter struct { - // The linter will read metrics in the Prometheus text format from r and - // then lint it, _and_ it will lint the metrics provided directly as - // MetricFamily proto messages in mfs. Note, however, that the current - // constructor functions New and NewWithMetricFamilies only ever set one - // of them. - r io.Reader - mfs []*dto.MetricFamily - - customValidations []Validation -} - -// New creates a new Linter that reads an input stream of Prometheus metrics in -// the Prometheus text exposition format. -func New(r io.Reader) *Linter { - return &Linter{ - r: r, - } -} - -// NewWithMetricFamilies creates a new Linter that reads from a slice of -// MetricFamily protobuf messages. -func NewWithMetricFamilies(mfs []*dto.MetricFamily) *Linter { - return &Linter{ - mfs: mfs, - } -} - -// AddCustomValidations adds custom validations to the linter. -func (l *Linter) AddCustomValidations(vs ...Validation) { - if l.customValidations == nil { - l.customValidations = make([]Validation, 0, len(vs)) - } - l.customValidations = append(l.customValidations, vs...) -} - -// Lint performs a linting pass, returning a slice of Problems indicating any -// issues found in the metrics stream. The slice is sorted by metric name -// and issue description. -func (l *Linter) Lint() ([]Problem, error) { - var problems []Problem - - if l.r != nil { - d := expfmt.NewDecoder(l.r, expfmt.NewFormat(expfmt.TypeTextPlain)) - - mf := &dto.MetricFamily{} - for { - if err := d.Decode(mf); err != nil { - if errors.Is(err, io.EOF) { - break - } - - return nil, err - } - - problems = append(problems, l.lint(mf)...) - } - } - for _, mf := range l.mfs { - problems = append(problems, l.lint(mf)...) - } - - // Ensure deterministic output. - sort.SliceStable(problems, func(i, j int) bool { - if problems[i].Metric == problems[j].Metric { - return problems[i].Text < problems[j].Text - } - return problems[i].Metric < problems[j].Metric - }) - - return problems, nil -} - -// lint is the entry point for linting a single metric. -func (l *Linter) lint(mf *dto.MetricFamily) []Problem { - var problems []Problem - - for _, fn := range defaultValidations { - errs := fn(mf) - for _, err := range errs { - problems = append(problems, newProblem(mf, err.Error())) - } - } - - if l.customValidations != nil { - for _, fn := range l.customValidations { - errs := fn(mf) - for _, err := range errs { - problems = append(problems, newProblem(mf, err.Error())) - } - } - } - - // TODO(mdlayher): lint rules for specific metrics types. - return problems -} diff --git a/vendor/github.com/prometheus/client_golang/prometheus/testutil/promlint/validation.go b/vendor/github.com/prometheus/client_golang/prometheus/testutil/promlint/validation.go deleted file mode 100644 index f52ad9eab..000000000 --- a/vendor/github.com/prometheus/client_golang/prometheus/testutil/promlint/validation.go +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright 2020 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package promlint - -import ( - dto "github.com/prometheus/client_model/go" - - "github.com/prometheus/client_golang/prometheus/testutil/promlint/validations" -) - -type Validation = func(mf *dto.MetricFamily) []error - -var defaultValidations = []Validation{ - validations.LintHelp, - validations.LintMetricUnits, - validations.LintCounter, - validations.LintHistogramSummaryReserved, - validations.LintMetricTypeInName, - validations.LintReservedChars, - validations.LintCamelCase, - validations.LintUnitAbbreviations, -} diff --git a/vendor/github.com/prometheus/client_golang/prometheus/testutil/promlint/validations/counter_validations.go b/vendor/github.com/prometheus/client_golang/prometheus/testutil/promlint/validations/counter_validations.go deleted file mode 100644 index f2c2c3905..000000000 --- a/vendor/github.com/prometheus/client_golang/prometheus/testutil/promlint/validations/counter_validations.go +++ /dev/null @@ -1,40 +0,0 @@ -// Copyright 2020 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package validations - -import ( - "errors" - "strings" - - dto "github.com/prometheus/client_model/go" -) - -// LintCounter detects issues specific to counters, as well as patterns that should -// only be used with counters. -func LintCounter(mf *dto.MetricFamily) []error { - var problems []error - - isCounter := mf.GetType() == dto.MetricType_COUNTER - isUntyped := mf.GetType() == dto.MetricType_UNTYPED - hasTotalSuffix := strings.HasSuffix(mf.GetName(), "_total") - - switch { - case isCounter && !hasTotalSuffix: - problems = append(problems, errors.New(`counter metrics should have "_total" suffix`)) - case !isUntyped && !isCounter && hasTotalSuffix: - problems = append(problems, errors.New(`non-counter metrics should not have "_total" suffix`)) - } - - return problems -} diff --git a/vendor/github.com/prometheus/client_golang/prometheus/testutil/promlint/validations/generic_name_validations.go b/vendor/github.com/prometheus/client_golang/prometheus/testutil/promlint/validations/generic_name_validations.go deleted file mode 100644 index bc8dbd1e1..000000000 --- a/vendor/github.com/prometheus/client_golang/prometheus/testutil/promlint/validations/generic_name_validations.go +++ /dev/null @@ -1,101 +0,0 @@ -// Copyright 2020 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package validations - -import ( - "errors" - "fmt" - "regexp" - "strings" - - dto "github.com/prometheus/client_model/go" -) - -var camelCase = regexp.MustCompile(`[a-z][A-Z]`) - -// LintMetricUnits detects issues with metric unit names. -func LintMetricUnits(mf *dto.MetricFamily) []error { - var problems []error - - unit, base, ok := metricUnits(*mf.Name) - if !ok { - // No known units detected. - return nil - } - - // Unit is already a base unit. - if unit == base { - return nil - } - - problems = append(problems, fmt.Errorf("use base unit %q instead of %q", base, unit)) - - return problems -} - -// LintMetricTypeInName detects when metric types are included in the metric name. -func LintMetricTypeInName(mf *dto.MetricFamily) []error { - var problems []error - n := strings.ToLower(mf.GetName()) - - for i, t := range dto.MetricType_name { - if i == int32(dto.MetricType_UNTYPED) { - continue - } - - typename := strings.ToLower(t) - if strings.Contains(n, "_"+typename+"_") || strings.HasSuffix(n, "_"+typename) { - problems = append(problems, fmt.Errorf(`metric name should not include type '%s'`, typename)) - } - } - return problems -} - -// LintReservedChars detects colons in metric names. -func LintReservedChars(mf *dto.MetricFamily) []error { - var problems []error - if strings.Contains(mf.GetName(), ":") { - problems = append(problems, errors.New("metric names should not contain ':'")) - } - return problems -} - -// LintCamelCase detects metric names and label names written in camelCase. -func LintCamelCase(mf *dto.MetricFamily) []error { - var problems []error - if camelCase.FindString(mf.GetName()) != "" { - problems = append(problems, errors.New("metric names should be written in 'snake_case' not 'camelCase'")) - } - - for _, m := range mf.GetMetric() { - for _, l := range m.GetLabel() { - if camelCase.FindString(l.GetName()) != "" { - problems = append(problems, errors.New("label names should be written in 'snake_case' not 'camelCase'")) - } - } - } - return problems -} - -// LintUnitAbbreviations detects abbreviated units in the metric name. -func LintUnitAbbreviations(mf *dto.MetricFamily) []error { - var problems []error - n := strings.ToLower(mf.GetName()) - for _, s := range unitAbbreviations { - if strings.Contains(n, "_"+s+"_") || strings.HasSuffix(n, "_"+s) { - problems = append(problems, errors.New("metric names should not contain abbreviated units")) - } - } - return problems -} diff --git a/vendor/github.com/prometheus/client_golang/prometheus/testutil/promlint/validations/help_validations.go b/vendor/github.com/prometheus/client_golang/prometheus/testutil/promlint/validations/help_validations.go deleted file mode 100644 index 1df294468..000000000 --- a/vendor/github.com/prometheus/client_golang/prometheus/testutil/promlint/validations/help_validations.go +++ /dev/null @@ -1,32 +0,0 @@ -// Copyright 2020 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package validations - -import ( - "errors" - - dto "github.com/prometheus/client_model/go" -) - -// LintHelp detects issues related to the help text for a metric. -func LintHelp(mf *dto.MetricFamily) []error { - var problems []error - - // Expect all metrics to have help text available. - if mf.Help == nil { - problems = append(problems, errors.New("no help text")) - } - - return problems -} diff --git a/vendor/github.com/prometheus/client_golang/prometheus/testutil/promlint/validations/histogram_validations.go b/vendor/github.com/prometheus/client_golang/prometheus/testutil/promlint/validations/histogram_validations.go deleted file mode 100644 index 6564bdf36..000000000 --- a/vendor/github.com/prometheus/client_golang/prometheus/testutil/promlint/validations/histogram_validations.go +++ /dev/null @@ -1,63 +0,0 @@ -// Copyright 2020 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package validations - -import ( - "errors" - "strings" - - dto "github.com/prometheus/client_model/go" -) - -// LintHistogramSummaryReserved detects when other types of metrics use names or labels -// reserved for use by histograms and/or summaries. -func LintHistogramSummaryReserved(mf *dto.MetricFamily) []error { - // These rules do not apply to untyped metrics. - t := mf.GetType() - if t == dto.MetricType_UNTYPED { - return nil - } - - var problems []error - - isHistogram := t == dto.MetricType_HISTOGRAM - isSummary := t == dto.MetricType_SUMMARY - - n := mf.GetName() - - if !isHistogram && strings.HasSuffix(n, "_bucket") { - problems = append(problems, errors.New(`non-histogram metrics should not have "_bucket" suffix`)) - } - if !isHistogram && !isSummary && strings.HasSuffix(n, "_count") { - problems = append(problems, errors.New(`non-histogram and non-summary metrics should not have "_count" suffix`)) - } - if !isHistogram && !isSummary && strings.HasSuffix(n, "_sum") { - problems = append(problems, errors.New(`non-histogram and non-summary metrics should not have "_sum" suffix`)) - } - - for _, m := range mf.GetMetric() { - for _, l := range m.GetLabel() { - ln := l.GetName() - - if !isHistogram && ln == "le" { - problems = append(problems, errors.New(`non-histogram metrics should not have "le" label`)) - } - if !isSummary && ln == "quantile" { - problems = append(problems, errors.New(`non-summary metrics should not have "quantile" label`)) - } - } - } - - return problems -} diff --git a/vendor/github.com/prometheus/client_golang/prometheus/testutil/promlint/validations/units.go b/vendor/github.com/prometheus/client_golang/prometheus/testutil/promlint/validations/units.go deleted file mode 100644 index 967977d2b..000000000 --- a/vendor/github.com/prometheus/client_golang/prometheus/testutil/promlint/validations/units.go +++ /dev/null @@ -1,118 +0,0 @@ -// Copyright 2020 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package validations - -import "strings" - -// Units and their possible prefixes recognized by this library. More can be -// added over time as needed. -var ( - // map a unit to the appropriate base unit. - units = map[string]string{ - // Base units. - "amperes": "amperes", - "bytes": "bytes", - "celsius": "celsius", // Also allow Celsius because it is common in typical Prometheus use cases. - "grams": "grams", - "joules": "joules", - "kelvin": "kelvin", // SI base unit, used in special cases (e.g. color temperature, scientific measurements). - "meters": "meters", // Both American and international spelling permitted. - "metres": "metres", - "seconds": "seconds", - "volts": "volts", - - // Non base units. - // Time. - "minutes": "seconds", - "hours": "seconds", - "days": "seconds", - "weeks": "seconds", - // Temperature. - "kelvins": "kelvin", - "fahrenheit": "celsius", - "rankine": "celsius", - // Length. - "inches": "meters", - "yards": "meters", - "miles": "meters", - // Bytes. - "bits": "bytes", - // Energy. - "calories": "joules", - // Mass. - "pounds": "grams", - "ounces": "grams", - } - - unitPrefixes = []string{ - "pico", - "nano", - "micro", - "milli", - "centi", - "deci", - "deca", - "hecto", - "kilo", - "kibi", - "mega", - "mibi", - "giga", - "gibi", - "tera", - "tebi", - "peta", - "pebi", - } - - // Common abbreviations that we'd like to discourage. - unitAbbreviations = []string{ - "s", - "ms", - "us", - "ns", - "sec", - "b", - "kb", - "mb", - "gb", - "tb", - "pb", - "m", - "h", - "d", - } -) - -// metricUnits attempts to detect known unit types used as part of a metric name, -// e.g. "foo_bytes_total" or "bar_baz_milligrams". -func metricUnits(m string) (unit, base string, ok bool) { - ss := strings.Split(m, "_") - - for _, s := range ss { - if base, found := units[s]; found { - return s, base, true - } - - for _, p := range unitPrefixes { - if strings.HasPrefix(s, p) { - if base, found := units[s[len(p):]]; found { - return s, base, true - } - } - } - } - - return "", "", false -} diff --git a/vendor/github.com/prometheus/client_golang/prometheus/timer.go b/vendor/github.com/prometheus/client_golang/prometheus/timer.go deleted file mode 100644 index 52344fef5..000000000 --- a/vendor/github.com/prometheus/client_golang/prometheus/timer.go +++ /dev/null @@ -1,81 +0,0 @@ -// Copyright 2016 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package prometheus - -import "time" - -// Timer is a helper type to time functions. Use NewTimer to create new -// instances. -type Timer struct { - begin time.Time - observer Observer -} - -// NewTimer creates a new Timer. The provided Observer is used to observe a -// duration in seconds. If the Observer implements ExemplarObserver, passing exemplar -// later on will be also supported. -// Timer is usually used to time a function call in the -// following way: -// -// func TimeMe() { -// timer := NewTimer(myHistogram) -// defer timer.ObserveDuration() -// // Do actual work. -// } -// -// or -// -// func TimeMeWithExemplar() { -// timer := NewTimer(myHistogram) -// defer timer.ObserveDurationWithExemplar(exemplar) -// // Do actual work. -// } -func NewTimer(o Observer) *Timer { - return &Timer{ - begin: time.Now(), - observer: o, - } -} - -// ObserveDuration records the duration passed since the Timer was created with -// NewTimer. It calls the Observe method of the Observer provided during -// construction with the duration in seconds as an argument. The observed -// duration is also returned. ObserveDuration is usually called with a defer -// statement. -// -// Note that this method is only guaranteed to never observe negative durations -// if used with Go1.9+. -func (t *Timer) ObserveDuration() time.Duration { - d := time.Since(t.begin) - if t.observer != nil { - t.observer.Observe(d.Seconds()) - } - return d -} - -// ObserveDurationWithExemplar is like ObserveDuration, but it will also -// observe exemplar with the duration unless exemplar is nil or provided Observer can't -// be casted to ExemplarObserver. -func (t *Timer) ObserveDurationWithExemplar(exemplar Labels) time.Duration { - d := time.Since(t.begin) - eo, ok := t.observer.(ExemplarObserver) - if ok && exemplar != nil { - eo.ObserveWithExemplar(d.Seconds(), exemplar) - return d - } - if t.observer != nil { - t.observer.Observe(d.Seconds()) - } - return d -} diff --git a/vendor/github.com/prometheus/client_golang/prometheus/untyped.go b/vendor/github.com/prometheus/client_golang/prometheus/untyped.go deleted file mode 100644 index 0f9ce63f4..000000000 --- a/vendor/github.com/prometheus/client_golang/prometheus/untyped.go +++ /dev/null @@ -1,42 +0,0 @@ -// Copyright 2014 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package prometheus - -// UntypedOpts is an alias for Opts. See there for doc comments. -type UntypedOpts Opts - -// UntypedFunc works like GaugeFunc but the collected metric is of type -// "Untyped". UntypedFunc is useful to mirror an external metric of unknown -// type. -// -// To create UntypedFunc instances, use NewUntypedFunc. -type UntypedFunc interface { - Metric - Collector -} - -// NewUntypedFunc creates a new UntypedFunc based on the provided -// UntypedOpts. The value reported is determined by calling the given function -// from within the Write method. Take into account that metric collection may -// happen concurrently. If that results in concurrent calls to Write, like in -// the case where an UntypedFunc is directly registered with Prometheus, the -// provided function must be concurrency-safe. -func NewUntypedFunc(opts UntypedOpts, function func() float64) UntypedFunc { - return newValueFunc(NewDesc( - BuildFQName(opts.Namespace, opts.Subsystem, opts.Name), - opts.Help, - nil, - opts.ConstLabels, - ), UntypedValue, function) -} diff --git a/vendor/github.com/prometheus/client_golang/prometheus/value.go b/vendor/github.com/prometheus/client_golang/prometheus/value.go deleted file mode 100644 index cc23011fa..000000000 --- a/vendor/github.com/prometheus/client_golang/prometheus/value.go +++ /dev/null @@ -1,274 +0,0 @@ -// Copyright 2014 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package prometheus - -import ( - "errors" - "fmt" - "sort" - "time" - "unicode/utf8" - - "github.com/prometheus/client_golang/prometheus/internal" - - dto "github.com/prometheus/client_model/go" - "google.golang.org/protobuf/proto" - "google.golang.org/protobuf/types/known/timestamppb" -) - -// ValueType is an enumeration of metric types that represent a simple value. -type ValueType int - -// Possible values for the ValueType enum. Use UntypedValue to mark a metric -// with an unknown type. -const ( - _ ValueType = iota - CounterValue - GaugeValue - UntypedValue -) - -var ( - CounterMetricTypePtr = func() *dto.MetricType { d := dto.MetricType_COUNTER; return &d }() - GaugeMetricTypePtr = func() *dto.MetricType { d := dto.MetricType_GAUGE; return &d }() - UntypedMetricTypePtr = func() *dto.MetricType { d := dto.MetricType_UNTYPED; return &d }() -) - -func (v ValueType) ToDTO() *dto.MetricType { - switch v { - case CounterValue: - return CounterMetricTypePtr - case GaugeValue: - return GaugeMetricTypePtr - default: - return UntypedMetricTypePtr - } -} - -// valueFunc is a generic metric for simple values retrieved on collect time -// from a function. It implements Metric and Collector. Its effective type is -// determined by ValueType. This is a low-level building block used by the -// library to back the implementations of CounterFunc, GaugeFunc, and -// UntypedFunc. -type valueFunc struct { - selfCollector - - desc *Desc - valType ValueType - function func() float64 - labelPairs []*dto.LabelPair -} - -// newValueFunc returns a newly allocated valueFunc with the given Desc and -// ValueType. The value reported is determined by calling the given function -// from within the Write method. Take into account that metric collection may -// happen concurrently. If that results in concurrent calls to Write, like in -// the case where a valueFunc is directly registered with Prometheus, the -// provided function must be concurrency-safe. -func newValueFunc(desc *Desc, valueType ValueType, function func() float64) *valueFunc { - result := &valueFunc{ - desc: desc, - valType: valueType, - function: function, - labelPairs: MakeLabelPairs(desc, nil), - } - result.init(result) - return result -} - -func (v *valueFunc) Desc() *Desc { - return v.desc -} - -func (v *valueFunc) Write(out *dto.Metric) error { - return populateMetric(v.valType, v.function(), v.labelPairs, nil, out, nil) -} - -// NewConstMetric returns a metric with one fixed value that cannot be -// changed. Users of this package will not have much use for it in regular -// operations. However, when implementing custom Collectors, it is useful as a -// throw-away metric that is generated on the fly to send it to Prometheus in -// the Collect method. NewConstMetric returns an error if the length of -// labelValues is not consistent with the variable labels in Desc or if Desc is -// invalid. -func NewConstMetric(desc *Desc, valueType ValueType, value float64, labelValues ...string) (Metric, error) { - if desc.err != nil { - return nil, desc.err - } - if err := validateLabelValues(labelValues, len(desc.variableLabels.names)); err != nil { - return nil, err - } - - metric := &dto.Metric{} - if err := populateMetric(valueType, value, MakeLabelPairs(desc, labelValues), nil, metric, nil); err != nil { - return nil, err - } - - return &constMetric{ - desc: desc, - metric: metric, - }, nil -} - -// MustNewConstMetric is a version of NewConstMetric that panics where -// NewConstMetric would have returned an error. -func MustNewConstMetric(desc *Desc, valueType ValueType, value float64, labelValues ...string) Metric { - m, err := NewConstMetric(desc, valueType, value, labelValues...) - if err != nil { - panic(err) - } - return m -} - -// NewConstMetricWithCreatedTimestamp does the same thing as NewConstMetric, but generates Counters -// with created timestamp set and returns an error for other metric types. -func NewConstMetricWithCreatedTimestamp(desc *Desc, valueType ValueType, value float64, ct time.Time, labelValues ...string) (Metric, error) { - if desc.err != nil { - return nil, desc.err - } - if err := validateLabelValues(labelValues, len(desc.variableLabels.names)); err != nil { - return nil, err - } - switch valueType { - case CounterValue: - break - default: - return nil, errors.New("created timestamps are only supported for counters") - } - - metric := &dto.Metric{} - if err := populateMetric(valueType, value, MakeLabelPairs(desc, labelValues), nil, metric, timestamppb.New(ct)); err != nil { - return nil, err - } - - return &constMetric{ - desc: desc, - metric: metric, - }, nil -} - -// MustNewConstMetricWithCreatedTimestamp is a version of NewConstMetricWithCreatedTimestamp that panics where -// NewConstMetricWithCreatedTimestamp would have returned an error. -func MustNewConstMetricWithCreatedTimestamp(desc *Desc, valueType ValueType, value float64, ct time.Time, labelValues ...string) Metric { - m, err := NewConstMetricWithCreatedTimestamp(desc, valueType, value, ct, labelValues...) - if err != nil { - panic(err) - } - return m -} - -type constMetric struct { - desc *Desc - metric *dto.Metric -} - -func (m *constMetric) Desc() *Desc { - return m.desc -} - -func (m *constMetric) Write(out *dto.Metric) error { - out.Label = m.metric.Label - out.Counter = m.metric.Counter - out.Gauge = m.metric.Gauge - out.Untyped = m.metric.Untyped - return nil -} - -func populateMetric( - t ValueType, - v float64, - labelPairs []*dto.LabelPair, - e *dto.Exemplar, - m *dto.Metric, - ct *timestamppb.Timestamp, -) error { - m.Label = labelPairs - switch t { - case CounterValue: - m.Counter = &dto.Counter{Value: proto.Float64(v), Exemplar: e, CreatedTimestamp: ct} - case GaugeValue: - m.Gauge = &dto.Gauge{Value: proto.Float64(v)} - case UntypedValue: - m.Untyped = &dto.Untyped{Value: proto.Float64(v)} - default: - return fmt.Errorf("encountered unknown type %v", t) - } - return nil -} - -// MakeLabelPairs is a helper function to create protobuf LabelPairs from the -// variable and constant labels in the provided Desc. The values for the -// variable labels are defined by the labelValues slice, which must be in the -// same order as the corresponding variable labels in the Desc. -// -// This function is only needed for custom Metric implementations. See MetricVec -// example. -func MakeLabelPairs(desc *Desc, labelValues []string) []*dto.LabelPair { - totalLen := len(desc.variableLabels.names) + len(desc.constLabelPairs) - if totalLen == 0 { - // Super fast path. - return nil - } - if len(desc.variableLabels.names) == 0 { - // Moderately fast path. - return desc.constLabelPairs - } - labelPairs := make([]*dto.LabelPair, 0, totalLen) - for i, l := range desc.variableLabels.names { - labelPairs = append(labelPairs, &dto.LabelPair{ - Name: proto.String(l), - Value: proto.String(labelValues[i]), - }) - } - labelPairs = append(labelPairs, desc.constLabelPairs...) - sort.Sort(internal.LabelPairSorter(labelPairs)) - return labelPairs -} - -// ExemplarMaxRunes is the max total number of runes allowed in exemplar labels. -const ExemplarMaxRunes = 128 - -// newExemplar creates a new dto.Exemplar from the provided values. An error is -// returned if any of the label names or values are invalid or if the total -// number of runes in the label names and values exceeds ExemplarMaxRunes. -func newExemplar(value float64, ts time.Time, l Labels) (*dto.Exemplar, error) { - e := &dto.Exemplar{} - e.Value = proto.Float64(value) - tsProto := timestamppb.New(ts) - if err := tsProto.CheckValid(); err != nil { - return nil, err - } - e.Timestamp = tsProto - labelPairs := make([]*dto.LabelPair, 0, len(l)) - var runes int - for name, value := range l { - if !checkLabelName(name) { - return nil, fmt.Errorf("exemplar label name %q is invalid", name) - } - runes += utf8.RuneCountInString(name) - if !utf8.ValidString(value) { - return nil, fmt.Errorf("exemplar label value %q is not valid UTF-8", value) - } - runes += utf8.RuneCountInString(value) - labelPairs = append(labelPairs, &dto.LabelPair{ - Name: proto.String(name), - Value: proto.String(value), - }) - } - if runes > ExemplarMaxRunes { - return nil, fmt.Errorf("exemplar labels have %d runes, exceeding the limit of %d", runes, ExemplarMaxRunes) - } - e.Label = labelPairs - return e, nil -} diff --git a/vendor/github.com/prometheus/client_golang/prometheus/vec.go b/vendor/github.com/prometheus/client_golang/prometheus/vec.go deleted file mode 100644 index 955cfd59f..000000000 --- a/vendor/github.com/prometheus/client_golang/prometheus/vec.go +++ /dev/null @@ -1,709 +0,0 @@ -// Copyright 2014 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package prometheus - -import ( - "fmt" - "sync" - - "github.com/prometheus/common/model" -) - -// MetricVec is a Collector to bundle metrics of the same name that differ in -// their label values. MetricVec is not used directly but as a building block -// for implementations of vectors of a given metric type, like GaugeVec, -// CounterVec, SummaryVec, and HistogramVec. It is exported so that it can be -// used for custom Metric implementations. -// -// To create a FooVec for custom Metric Foo, embed a pointer to MetricVec in -// FooVec and initialize it with NewMetricVec. Implement wrappers for -// GetMetricWithLabelValues and GetMetricWith that return (Foo, error) rather -// than (Metric, error). Similarly, create a wrapper for CurryWith that returns -// (*FooVec, error) rather than (*MetricVec, error). It is recommended to also -// add the convenience methods WithLabelValues, With, and MustCurryWith, which -// panic instead of returning errors. See also the MetricVec example. -type MetricVec struct { - *metricMap - - curry []curriedLabelValue - - // hashAdd and hashAddByte can be replaced for testing collision handling. - hashAdd func(h uint64, s string) uint64 - hashAddByte func(h uint64, b byte) uint64 -} - -// NewMetricVec returns an initialized metricVec. -func NewMetricVec(desc *Desc, newMetric func(lvs ...string) Metric) *MetricVec { - return &MetricVec{ - metricMap: &metricMap{ - metrics: map[uint64][]metricWithLabelValues{}, - desc: desc, - newMetric: newMetric, - }, - hashAdd: hashAdd, - hashAddByte: hashAddByte, - } -} - -// DeleteLabelValues removes the metric where the variable labels are the same -// as those passed in as labels (same order as the VariableLabels in Desc). It -// returns true if a metric was deleted. -// -// It is not an error if the number of label values is not the same as the -// number of VariableLabels in Desc. However, such inconsistent label count can -// never match an actual metric, so the method will always return false in that -// case. -// -// Note that for more than one label value, this method is prone to mistakes -// caused by an incorrect order of arguments. Consider Delete(Labels) as an -// alternative to avoid that type of mistake. For higher label numbers, the -// latter has a much more readable (albeit more verbose) syntax, but it comes -// with a performance overhead (for creating and processing the Labels map). -// See also the CounterVec example. -func (m *MetricVec) DeleteLabelValues(lvs ...string) bool { - lvs = constrainLabelValues(m.desc, lvs, m.curry) - - h, err := m.hashLabelValues(lvs) - if err != nil { - return false - } - - return m.metricMap.deleteByHashWithLabelValues(h, lvs, m.curry) -} - -// Delete deletes the metric where the variable labels are the same as those -// passed in as labels. It returns true if a metric was deleted. -// -// It is not an error if the number and names of the Labels are inconsistent -// with those of the VariableLabels in Desc. However, such inconsistent Labels -// can never match an actual metric, so the method will always return false in -// that case. -// -// This method is used for the same purpose as DeleteLabelValues(...string). See -// there for pros and cons of the two methods. -func (m *MetricVec) Delete(labels Labels) bool { - labels, closer := constrainLabels(m.desc, labels) - defer closer() - - h, err := m.hashLabels(labels) - if err != nil { - return false - } - - return m.metricMap.deleteByHashWithLabels(h, labels, m.curry) -} - -// DeletePartialMatch deletes all metrics where the variable labels contain all of those -// passed in as labels. The order of the labels does not matter. -// It returns the number of metrics deleted. -// -// Note that curried labels will never be matched if deleting from the curried vector. -// To match curried labels with DeletePartialMatch, it must be called on the base vector. -func (m *MetricVec) DeletePartialMatch(labels Labels) int { - labels, closer := constrainLabels(m.desc, labels) - defer closer() - - return m.metricMap.deleteByLabels(labels, m.curry) -} - -// Without explicit forwarding of Describe, Collect, Reset, those methods won't -// show up in GoDoc. - -// Describe implements Collector. -func (m *MetricVec) Describe(ch chan<- *Desc) { m.metricMap.Describe(ch) } - -// Collect implements Collector. -func (m *MetricVec) Collect(ch chan<- Metric) { m.metricMap.Collect(ch) } - -// Reset deletes all metrics in this vector. -func (m *MetricVec) Reset() { m.metricMap.Reset() } - -// CurryWith returns a vector curried with the provided labels, i.e. the -// returned vector has those labels pre-set for all labeled operations performed -// on it. The cardinality of the curried vector is reduced accordingly. The -// order of the remaining labels stays the same (just with the curried labels -// taken out of the sequence – which is relevant for the -// (GetMetric)WithLabelValues methods). It is possible to curry a curried -// vector, but only with labels not yet used for currying before. -// -// The metrics contained in the MetricVec are shared between the curried and -// uncurried vectors. They are just accessed differently. Curried and uncurried -// vectors behave identically in terms of collection. Only one must be -// registered with a given registry (usually the uncurried version). The Reset -// method deletes all metrics, even if called on a curried vector. -// -// Note that CurryWith is usually not called directly but through a wrapper -// around MetricVec, implementing a vector for a specific Metric -// implementation, for example GaugeVec. -func (m *MetricVec) CurryWith(labels Labels) (*MetricVec, error) { - var ( - newCurry []curriedLabelValue - oldCurry = m.curry - iCurry int - ) - for i, labelName := range m.desc.variableLabels.names { - val, ok := labels[labelName] - if iCurry < len(oldCurry) && oldCurry[iCurry].index == i { - if ok { - return nil, fmt.Errorf("label name %q is already curried", labelName) - } - newCurry = append(newCurry, oldCurry[iCurry]) - iCurry++ - } else { - if !ok { - continue // Label stays uncurried. - } - newCurry = append(newCurry, curriedLabelValue{ - i, - m.desc.variableLabels.constrain(labelName, val), - }) - } - } - if l := len(oldCurry) + len(labels) - len(newCurry); l > 0 { - return nil, fmt.Errorf("%d unknown label(s) found during currying", l) - } - - return &MetricVec{ - metricMap: m.metricMap, - curry: newCurry, - hashAdd: m.hashAdd, - hashAddByte: m.hashAddByte, - }, nil -} - -// GetMetricWithLabelValues returns the Metric for the given slice of label -// values (same order as the variable labels in Desc). If that combination of -// label values is accessed for the first time, a new Metric is created (by -// calling the newMetric function provided during construction of the -// MetricVec). -// -// It is possible to call this method without using the returned Metric to only -// create the new Metric but leave it in its initial state. -// -// Keeping the Metric for later use is possible (and should be considered if -// performance is critical), but keep in mind that Reset, DeleteLabelValues and -// Delete can be used to delete the Metric from the MetricVec. In that case, the -// Metric will still exist, but it will not be exported anymore, even if a -// Metric with the same label values is created later. -// -// An error is returned if the number of label values is not the same as the -// number of variable labels in Desc (minus any curried labels). -// -// Note that for more than one label value, this method is prone to mistakes -// caused by an incorrect order of arguments. Consider GetMetricWith(Labels) as -// an alternative to avoid that type of mistake. For higher label numbers, the -// latter has a much more readable (albeit more verbose) syntax, but it comes -// with a performance overhead (for creating and processing the Labels map). -// -// Note that GetMetricWithLabelValues is usually not called directly but through -// a wrapper around MetricVec, implementing a vector for a specific Metric -// implementation, for example GaugeVec. -func (m *MetricVec) GetMetricWithLabelValues(lvs ...string) (Metric, error) { - lvs = constrainLabelValues(m.desc, lvs, m.curry) - h, err := m.hashLabelValues(lvs) - if err != nil { - return nil, err - } - - return m.metricMap.getOrCreateMetricWithLabelValues(h, lvs, m.curry), nil -} - -// GetMetricWith returns the Metric for the given Labels map (the label names -// must match those of the variable labels in Desc). If that label map is -// accessed for the first time, a new Metric is created. Implications of -// creating a Metric without using it and keeping the Metric for later use -// are the same as for GetMetricWithLabelValues. -// -// An error is returned if the number and names of the Labels are inconsistent -// with those of the variable labels in Desc (minus any curried labels). -// -// This method is used for the same purpose as -// GetMetricWithLabelValues(...string). See there for pros and cons of the two -// methods. -// -// Note that GetMetricWith is usually not called directly but through a wrapper -// around MetricVec, implementing a vector for a specific Metric implementation, -// for example GaugeVec. -func (m *MetricVec) GetMetricWith(labels Labels) (Metric, error) { - labels, closer := constrainLabels(m.desc, labels) - defer closer() - - h, err := m.hashLabels(labels) - if err != nil { - return nil, err - } - - return m.metricMap.getOrCreateMetricWithLabels(h, labels, m.curry), nil -} - -func (m *MetricVec) hashLabelValues(vals []string) (uint64, error) { - if err := validateLabelValues(vals, len(m.desc.variableLabels.names)-len(m.curry)); err != nil { - return 0, err - } - - var ( - h = hashNew() - curry = m.curry - iVals, iCurry int - ) - for i := 0; i < len(m.desc.variableLabels.names); i++ { - if iCurry < len(curry) && curry[iCurry].index == i { - h = m.hashAdd(h, curry[iCurry].value) - iCurry++ - } else { - h = m.hashAdd(h, vals[iVals]) - iVals++ - } - h = m.hashAddByte(h, model.SeparatorByte) - } - return h, nil -} - -func (m *MetricVec) hashLabels(labels Labels) (uint64, error) { - if err := validateValuesInLabels(labels, len(m.desc.variableLabels.names)-len(m.curry)); err != nil { - return 0, err - } - - var ( - h = hashNew() - curry = m.curry - iCurry int - ) - for i, labelName := range m.desc.variableLabels.names { - val, ok := labels[labelName] - if iCurry < len(curry) && curry[iCurry].index == i { - if ok { - return 0, fmt.Errorf("label name %q is already curried", labelName) - } - h = m.hashAdd(h, curry[iCurry].value) - iCurry++ - } else { - if !ok { - return 0, fmt.Errorf("label name %q missing in label map", labelName) - } - h = m.hashAdd(h, val) - } - h = m.hashAddByte(h, model.SeparatorByte) - } - return h, nil -} - -// metricWithLabelValues provides the metric and its label values for -// disambiguation on hash collision. -type metricWithLabelValues struct { - values []string - metric Metric -} - -// curriedLabelValue sets the curried value for a label at the given index. -type curriedLabelValue struct { - index int - value string -} - -// metricMap is a helper for metricVec and shared between differently curried -// metricVecs. -type metricMap struct { - mtx sync.RWMutex // Protects metrics. - metrics map[uint64][]metricWithLabelValues - desc *Desc - newMetric func(labelValues ...string) Metric -} - -// Describe implements Collector. It will send exactly one Desc to the provided -// channel. -func (m *metricMap) Describe(ch chan<- *Desc) { - ch <- m.desc -} - -// Collect implements Collector. -func (m *metricMap) Collect(ch chan<- Metric) { - m.mtx.RLock() - defer m.mtx.RUnlock() - - for _, metrics := range m.metrics { - for _, metric := range metrics { - ch <- metric.metric - } - } -} - -// Reset deletes all metrics in this vector. -func (m *metricMap) Reset() { - m.mtx.Lock() - defer m.mtx.Unlock() - - for h := range m.metrics { - delete(m.metrics, h) - } -} - -// deleteByHashWithLabelValues removes the metric from the hash bucket h. If -// there are multiple matches in the bucket, use lvs to select a metric and -// remove only that metric. -func (m *metricMap) deleteByHashWithLabelValues( - h uint64, lvs []string, curry []curriedLabelValue, -) bool { - m.mtx.Lock() - defer m.mtx.Unlock() - - metrics, ok := m.metrics[h] - if !ok { - return false - } - - i := findMetricWithLabelValues(metrics, lvs, curry) - if i >= len(metrics) { - return false - } - - if len(metrics) > 1 { - old := metrics - m.metrics[h] = append(metrics[:i], metrics[i+1:]...) - old[len(old)-1] = metricWithLabelValues{} - } else { - delete(m.metrics, h) - } - return true -} - -// deleteByHashWithLabels removes the metric from the hash bucket h. If there -// are multiple matches in the bucket, use lvs to select a metric and remove -// only that metric. -func (m *metricMap) deleteByHashWithLabels( - h uint64, labels Labels, curry []curriedLabelValue, -) bool { - m.mtx.Lock() - defer m.mtx.Unlock() - - metrics, ok := m.metrics[h] - if !ok { - return false - } - i := findMetricWithLabels(m.desc, metrics, labels, curry) - if i >= len(metrics) { - return false - } - - if len(metrics) > 1 { - old := metrics - m.metrics[h] = append(metrics[:i], metrics[i+1:]...) - old[len(old)-1] = metricWithLabelValues{} - } else { - delete(m.metrics, h) - } - return true -} - -// deleteByLabels deletes a metric if the given labels are present in the metric. -func (m *metricMap) deleteByLabels(labels Labels, curry []curriedLabelValue) int { - m.mtx.Lock() - defer m.mtx.Unlock() - - var numDeleted int - - for h, metrics := range m.metrics { - i := findMetricWithPartialLabels(m.desc, metrics, labels, curry) - if i >= len(metrics) { - // Didn't find matching labels in this metric slice. - continue - } - delete(m.metrics, h) - numDeleted++ - } - - return numDeleted -} - -// findMetricWithPartialLabel returns the index of the matching metric or -// len(metrics) if not found. -func findMetricWithPartialLabels( - desc *Desc, metrics []metricWithLabelValues, labels Labels, curry []curriedLabelValue, -) int { - for i, metric := range metrics { - if matchPartialLabels(desc, metric.values, labels, curry) { - return i - } - } - return len(metrics) -} - -// indexOf searches the given slice of strings for the target string and returns -// the index or len(items) as well as a boolean whether the search succeeded. -func indexOf(target string, items []string) (int, bool) { - for i, l := range items { - if l == target { - return i, true - } - } - return len(items), false -} - -// valueMatchesVariableOrCurriedValue determines if a value was previously curried, -// and returns whether it matches either the "base" value or the curried value accordingly. -// It also indicates whether the match is against a curried or uncurried value. -func valueMatchesVariableOrCurriedValue(targetValue string, index int, values []string, curry []curriedLabelValue) (bool, bool) { - for _, curriedValue := range curry { - if curriedValue.index == index { - // This label was curried. See if the curried value matches our target. - return curriedValue.value == targetValue, true - } - } - // This label was not curried. See if the current value matches our target label. - return values[index] == targetValue, false -} - -// matchPartialLabels searches the current metric and returns whether all of the target label:value pairs are present. -func matchPartialLabels(desc *Desc, values []string, labels Labels, curry []curriedLabelValue) bool { - for l, v := range labels { - // Check if the target label exists in our metrics and get the index. - varLabelIndex, validLabel := indexOf(l, desc.variableLabels.names) - if validLabel { - // Check the value of that label against the target value. - // We don't consider curried values in partial matches. - matches, curried := valueMatchesVariableOrCurriedValue(v, varLabelIndex, values, curry) - if matches && !curried { - continue - } - } - return false - } - return true -} - -// getOrCreateMetricWithLabelValues retrieves the metric by hash and label value -// or creates it and returns the new one. -// -// This function holds the mutex. -func (m *metricMap) getOrCreateMetricWithLabelValues( - hash uint64, lvs []string, curry []curriedLabelValue, -) Metric { - m.mtx.RLock() - metric, ok := m.getMetricWithHashAndLabelValues(hash, lvs, curry) - m.mtx.RUnlock() - if ok { - return metric - } - - m.mtx.Lock() - defer m.mtx.Unlock() - metric, ok = m.getMetricWithHashAndLabelValues(hash, lvs, curry) - if !ok { - inlinedLVs := inlineLabelValues(lvs, curry) - metric = m.newMetric(inlinedLVs...) - m.metrics[hash] = append(m.metrics[hash], metricWithLabelValues{values: inlinedLVs, metric: metric}) - } - return metric -} - -// getOrCreateMetricWithLabelValues retrieves the metric by hash and label value -// or creates it and returns the new one. -// -// This function holds the mutex. -func (m *metricMap) getOrCreateMetricWithLabels( - hash uint64, labels Labels, curry []curriedLabelValue, -) Metric { - m.mtx.RLock() - metric, ok := m.getMetricWithHashAndLabels(hash, labels, curry) - m.mtx.RUnlock() - if ok { - return metric - } - - m.mtx.Lock() - defer m.mtx.Unlock() - metric, ok = m.getMetricWithHashAndLabels(hash, labels, curry) - if !ok { - lvs := extractLabelValues(m.desc, labels, curry) - metric = m.newMetric(lvs...) - m.metrics[hash] = append(m.metrics[hash], metricWithLabelValues{values: lvs, metric: metric}) - } - return metric -} - -// getMetricWithHashAndLabelValues gets a metric while handling possible -// collisions in the hash space. Must be called while holding the read mutex. -func (m *metricMap) getMetricWithHashAndLabelValues( - h uint64, lvs []string, curry []curriedLabelValue, -) (Metric, bool) { - metrics, ok := m.metrics[h] - if ok { - if i := findMetricWithLabelValues(metrics, lvs, curry); i < len(metrics) { - return metrics[i].metric, true - } - } - return nil, false -} - -// getMetricWithHashAndLabels gets a metric while handling possible collisions in -// the hash space. Must be called while holding read mutex. -func (m *metricMap) getMetricWithHashAndLabels( - h uint64, labels Labels, curry []curriedLabelValue, -) (Metric, bool) { - metrics, ok := m.metrics[h] - if ok { - if i := findMetricWithLabels(m.desc, metrics, labels, curry); i < len(metrics) { - return metrics[i].metric, true - } - } - return nil, false -} - -// findMetricWithLabelValues returns the index of the matching metric or -// len(metrics) if not found. -func findMetricWithLabelValues( - metrics []metricWithLabelValues, lvs []string, curry []curriedLabelValue, -) int { - for i, metric := range metrics { - if matchLabelValues(metric.values, lvs, curry) { - return i - } - } - return len(metrics) -} - -// findMetricWithLabels returns the index of the matching metric or len(metrics) -// if not found. -func findMetricWithLabels( - desc *Desc, metrics []metricWithLabelValues, labels Labels, curry []curriedLabelValue, -) int { - for i, metric := range metrics { - if matchLabels(desc, metric.values, labels, curry) { - return i - } - } - return len(metrics) -} - -func matchLabelValues(values, lvs []string, curry []curriedLabelValue) bool { - if len(values) != len(lvs)+len(curry) { - return false - } - var iLVs, iCurry int - for i, v := range values { - if iCurry < len(curry) && curry[iCurry].index == i { - if v != curry[iCurry].value { - return false - } - iCurry++ - continue - } - if v != lvs[iLVs] { - return false - } - iLVs++ - } - return true -} - -func matchLabels(desc *Desc, values []string, labels Labels, curry []curriedLabelValue) bool { - if len(values) != len(labels)+len(curry) { - return false - } - iCurry := 0 - for i, k := range desc.variableLabels.names { - if iCurry < len(curry) && curry[iCurry].index == i { - if values[i] != curry[iCurry].value { - return false - } - iCurry++ - continue - } - if values[i] != labels[k] { - return false - } - } - return true -} - -func extractLabelValues(desc *Desc, labels Labels, curry []curriedLabelValue) []string { - labelValues := make([]string, len(labels)+len(curry)) - iCurry := 0 - for i, k := range desc.variableLabels.names { - if iCurry < len(curry) && curry[iCurry].index == i { - labelValues[i] = curry[iCurry].value - iCurry++ - continue - } - labelValues[i] = labels[k] - } - return labelValues -} - -func inlineLabelValues(lvs []string, curry []curriedLabelValue) []string { - labelValues := make([]string, len(lvs)+len(curry)) - var iCurry, iLVs int - for i := range labelValues { - if iCurry < len(curry) && curry[iCurry].index == i { - labelValues[i] = curry[iCurry].value - iCurry++ - continue - } - labelValues[i] = lvs[iLVs] - iLVs++ - } - return labelValues -} - -var labelsPool = &sync.Pool{ - New: func() interface{} { - return make(Labels) - }, -} - -func constrainLabels(desc *Desc, labels Labels) (Labels, func()) { - if len(desc.variableLabels.labelConstraints) == 0 { - // Fast path when there's no constraints - return labels, func() {} - } - - constrainedLabels := labelsPool.Get().(Labels) - for l, v := range labels { - constrainedLabels[l] = desc.variableLabels.constrain(l, v) - } - - return constrainedLabels, func() { - for k := range constrainedLabels { - delete(constrainedLabels, k) - } - labelsPool.Put(constrainedLabels) - } -} - -func constrainLabelValues(desc *Desc, lvs []string, curry []curriedLabelValue) []string { - if len(desc.variableLabels.labelConstraints) == 0 { - // Fast path when there's no constraints - return lvs - } - - constrainedValues := make([]string, len(lvs)) - var iCurry, iLVs int - for i := 0; i < len(lvs)+len(curry); i++ { - if iCurry < len(curry) && curry[iCurry].index == i { - iCurry++ - continue - } - - if i < len(desc.variableLabels.names) { - constrainedValues[iLVs] = desc.variableLabels.constrain( - desc.variableLabels.names[i], - lvs[iLVs], - ) - } else { - constrainedValues[iLVs] = lvs[iLVs] - } - iLVs++ - } - return constrainedValues -} diff --git a/vendor/github.com/prometheus/client_golang/prometheus/vnext.go b/vendor/github.com/prometheus/client_golang/prometheus/vnext.go deleted file mode 100644 index 42bc3a8f0..000000000 --- a/vendor/github.com/prometheus/client_golang/prometheus/vnext.go +++ /dev/null @@ -1,23 +0,0 @@ -// Copyright 2022 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package prometheus - -type v2 struct{} - -// V2 is a struct that can be referenced to access experimental API that might -// be present in v2 of client golang someday. It offers extended functionality -// of v1 with slightly changed API. It is acceptable to use some pieces from v1 -// and e.g `prometheus.NewGauge` and some from v2 e.g. `prometheus.V2.NewDesc` -// in the same codebase. -var V2 = v2{} diff --git a/vendor/github.com/prometheus/client_golang/prometheus/wrap.go b/vendor/github.com/prometheus/client_golang/prometheus/wrap.go deleted file mode 100644 index 25da157f1..000000000 --- a/vendor/github.com/prometheus/client_golang/prometheus/wrap.go +++ /dev/null @@ -1,214 +0,0 @@ -// Copyright 2018 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package prometheus - -import ( - "fmt" - "sort" - - "github.com/prometheus/client_golang/prometheus/internal" - - dto "github.com/prometheus/client_model/go" - "google.golang.org/protobuf/proto" -) - -// WrapRegistererWith returns a Registerer wrapping the provided -// Registerer. Collectors registered with the returned Registerer will be -// registered with the wrapped Registerer in a modified way. The modified -// Collector adds the provided Labels to all Metrics it collects (as -// ConstLabels). The Metrics collected by the unmodified Collector must not -// duplicate any of those labels. Wrapping a nil value is valid, resulting -// in a no-op Registerer. -// -// WrapRegistererWith provides a way to add fixed labels to a subset of -// Collectors. It should not be used to add fixed labels to all metrics -// exposed. See also -// https://prometheus.io/docs/instrumenting/writing_exporters/#target-labels-not-static-scraped-labels -// -// Conflicts between Collectors registered through the original Registerer with -// Collectors registered through the wrapping Registerer will still be -// detected. Any AlreadyRegisteredError returned by the Register method of -// either Registerer will contain the ExistingCollector in the form it was -// provided to the respective registry. -// -// The Collector example demonstrates a use of WrapRegistererWith. -func WrapRegistererWith(labels Labels, reg Registerer) Registerer { - return &wrappingRegisterer{ - wrappedRegisterer: reg, - labels: labels, - } -} - -// WrapRegistererWithPrefix returns a Registerer wrapping the provided -// Registerer. Collectors registered with the returned Registerer will be -// registered with the wrapped Registerer in a modified way. The modified -// Collector adds the provided prefix to the name of all Metrics it collects. -// Wrapping a nil value is valid, resulting in a no-op Registerer. -// -// WrapRegistererWithPrefix is useful to have one place to prefix all metrics of -// a sub-system. To make this work, register metrics of the sub-system with the -// wrapping Registerer returned by WrapRegistererWithPrefix. It is rarely useful -// to use the same prefix for all metrics exposed. In particular, do not prefix -// metric names that are standardized across applications, as that would break -// horizontal monitoring, for example the metrics provided by the Go collector -// (see NewGoCollector) and the process collector (see NewProcessCollector). (In -// fact, those metrics are already prefixed with “go_” or “process_”, -// respectively.) -// -// Conflicts between Collectors registered through the original Registerer with -// Collectors registered through the wrapping Registerer will still be -// detected. Any AlreadyRegisteredError returned by the Register method of -// either Registerer will contain the ExistingCollector in the form it was -// provided to the respective registry. -func WrapRegistererWithPrefix(prefix string, reg Registerer) Registerer { - return &wrappingRegisterer{ - wrappedRegisterer: reg, - prefix: prefix, - } -} - -type wrappingRegisterer struct { - wrappedRegisterer Registerer - prefix string - labels Labels -} - -func (r *wrappingRegisterer) Register(c Collector) error { - if r.wrappedRegisterer == nil { - return nil - } - return r.wrappedRegisterer.Register(&wrappingCollector{ - wrappedCollector: c, - prefix: r.prefix, - labels: r.labels, - }) -} - -func (r *wrappingRegisterer) MustRegister(cs ...Collector) { - if r.wrappedRegisterer == nil { - return - } - for _, c := range cs { - if err := r.Register(c); err != nil { - panic(err) - } - } -} - -func (r *wrappingRegisterer) Unregister(c Collector) bool { - if r.wrappedRegisterer == nil { - return false - } - return r.wrappedRegisterer.Unregister(&wrappingCollector{ - wrappedCollector: c, - prefix: r.prefix, - labels: r.labels, - }) -} - -type wrappingCollector struct { - wrappedCollector Collector - prefix string - labels Labels -} - -func (c *wrappingCollector) Collect(ch chan<- Metric) { - wrappedCh := make(chan Metric) - go func() { - c.wrappedCollector.Collect(wrappedCh) - close(wrappedCh) - }() - for m := range wrappedCh { - ch <- &wrappingMetric{ - wrappedMetric: m, - prefix: c.prefix, - labels: c.labels, - } - } -} - -func (c *wrappingCollector) Describe(ch chan<- *Desc) { - wrappedCh := make(chan *Desc) - go func() { - c.wrappedCollector.Describe(wrappedCh) - close(wrappedCh) - }() - for desc := range wrappedCh { - ch <- wrapDesc(desc, c.prefix, c.labels) - } -} - -func (c *wrappingCollector) unwrapRecursively() Collector { - switch wc := c.wrappedCollector.(type) { - case *wrappingCollector: - return wc.unwrapRecursively() - default: - return wc - } -} - -type wrappingMetric struct { - wrappedMetric Metric - prefix string - labels Labels -} - -func (m *wrappingMetric) Desc() *Desc { - return wrapDesc(m.wrappedMetric.Desc(), m.prefix, m.labels) -} - -func (m *wrappingMetric) Write(out *dto.Metric) error { - if err := m.wrappedMetric.Write(out); err != nil { - return err - } - if len(m.labels) == 0 { - // No wrapping labels. - return nil - } - for ln, lv := range m.labels { - out.Label = append(out.Label, &dto.LabelPair{ - Name: proto.String(ln), - Value: proto.String(lv), - }) - } - sort.Sort(internal.LabelPairSorter(out.Label)) - return nil -} - -func wrapDesc(desc *Desc, prefix string, labels Labels) *Desc { - constLabels := Labels{} - for _, lp := range desc.constLabelPairs { - constLabels[*lp.Name] = *lp.Value - } - for ln, lv := range labels { - if _, alreadyUsed := constLabels[ln]; alreadyUsed { - return &Desc{ - fqName: desc.fqName, - help: desc.help, - variableLabels: desc.variableLabels, - constLabelPairs: desc.constLabelPairs, - err: fmt.Errorf("attempted wrapping with already existing label name %q", ln), - } - } - constLabels[ln] = lv - } - // NewDesc will do remaining validations. - newDesc := V2.NewDesc(prefix+desc.fqName, desc.help, desc.variableLabels, constLabels) - // Propagate errors if there was any. This will override any errer - // created by NewDesc above, i.e. earlier errors get precedence. - if desc.err != nil { - newDesc.err = desc.err - } - return newDesc -} diff --git a/vendor/github.com/prometheus/client_model/LICENSE b/vendor/github.com/prometheus/client_model/LICENSE deleted file mode 100644 index 261eeb9e9..000000000 --- a/vendor/github.com/prometheus/client_model/LICENSE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/vendor/github.com/prometheus/client_model/NOTICE b/vendor/github.com/prometheus/client_model/NOTICE deleted file mode 100644 index 20110e410..000000000 --- a/vendor/github.com/prometheus/client_model/NOTICE +++ /dev/null @@ -1,5 +0,0 @@ -Data model artifacts for Prometheus. -Copyright 2012-2015 The Prometheus Authors - -This product includes software developed at -SoundCloud Ltd. (http://soundcloud.com/). diff --git a/vendor/github.com/prometheus/client_model/go/metrics.pb.go b/vendor/github.com/prometheus/client_model/go/metrics.pb.go deleted file mode 100644 index 2f1549075..000000000 --- a/vendor/github.com/prometheus/client_model/go/metrics.pb.go +++ /dev/null @@ -1,1399 +0,0 @@ -// Copyright 2013 Prometheus Team -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// Code generated by protoc-gen-go. DO NOT EDIT. -// versions: -// protoc-gen-go v1.30.0 -// protoc v3.20.3 -// source: io/prometheus/client/metrics.proto - -package io_prometheus_client - -import ( - protoreflect "google.golang.org/protobuf/reflect/protoreflect" - protoimpl "google.golang.org/protobuf/runtime/protoimpl" - timestamppb "google.golang.org/protobuf/types/known/timestamppb" - reflect "reflect" - sync "sync" -) - -const ( - // Verify that this generated code is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) - // Verify that runtime/protoimpl is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) -) - -type MetricType int32 - -const ( - // COUNTER must use the Metric field "counter". - MetricType_COUNTER MetricType = 0 - // GAUGE must use the Metric field "gauge". - MetricType_GAUGE MetricType = 1 - // SUMMARY must use the Metric field "summary". - MetricType_SUMMARY MetricType = 2 - // UNTYPED must use the Metric field "untyped". - MetricType_UNTYPED MetricType = 3 - // HISTOGRAM must use the Metric field "histogram". - MetricType_HISTOGRAM MetricType = 4 - // GAUGE_HISTOGRAM must use the Metric field "histogram". - MetricType_GAUGE_HISTOGRAM MetricType = 5 -) - -// Enum value maps for MetricType. -var ( - MetricType_name = map[int32]string{ - 0: "COUNTER", - 1: "GAUGE", - 2: "SUMMARY", - 3: "UNTYPED", - 4: "HISTOGRAM", - 5: "GAUGE_HISTOGRAM", - } - MetricType_value = map[string]int32{ - "COUNTER": 0, - "GAUGE": 1, - "SUMMARY": 2, - "UNTYPED": 3, - "HISTOGRAM": 4, - "GAUGE_HISTOGRAM": 5, - } -) - -func (x MetricType) Enum() *MetricType { - p := new(MetricType) - *p = x - return p -} - -func (x MetricType) String() string { - return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) -} - -func (MetricType) Descriptor() protoreflect.EnumDescriptor { - return file_io_prometheus_client_metrics_proto_enumTypes[0].Descriptor() -} - -func (MetricType) Type() protoreflect.EnumType { - return &file_io_prometheus_client_metrics_proto_enumTypes[0] -} - -func (x MetricType) Number() protoreflect.EnumNumber { - return protoreflect.EnumNumber(x) -} - -// Deprecated: Do not use. -func (x *MetricType) UnmarshalJSON(b []byte) error { - num, err := protoimpl.X.UnmarshalJSONEnum(x.Descriptor(), b) - if err != nil { - return err - } - *x = MetricType(num) - return nil -} - -// Deprecated: Use MetricType.Descriptor instead. -func (MetricType) EnumDescriptor() ([]byte, []int) { - return file_io_prometheus_client_metrics_proto_rawDescGZIP(), []int{0} -} - -type LabelPair struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` - Value *string `protobuf:"bytes,2,opt,name=value" json:"value,omitempty"` -} - -func (x *LabelPair) Reset() { - *x = LabelPair{} - if protoimpl.UnsafeEnabled { - mi := &file_io_prometheus_client_metrics_proto_msgTypes[0] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *LabelPair) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*LabelPair) ProtoMessage() {} - -func (x *LabelPair) ProtoReflect() protoreflect.Message { - mi := &file_io_prometheus_client_metrics_proto_msgTypes[0] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use LabelPair.ProtoReflect.Descriptor instead. -func (*LabelPair) Descriptor() ([]byte, []int) { - return file_io_prometheus_client_metrics_proto_rawDescGZIP(), []int{0} -} - -func (x *LabelPair) GetName() string { - if x != nil && x.Name != nil { - return *x.Name - } - return "" -} - -func (x *LabelPair) GetValue() string { - if x != nil && x.Value != nil { - return *x.Value - } - return "" -} - -type Gauge struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Value *float64 `protobuf:"fixed64,1,opt,name=value" json:"value,omitempty"` -} - -func (x *Gauge) Reset() { - *x = Gauge{} - if protoimpl.UnsafeEnabled { - mi := &file_io_prometheus_client_metrics_proto_msgTypes[1] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *Gauge) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*Gauge) ProtoMessage() {} - -func (x *Gauge) ProtoReflect() protoreflect.Message { - mi := &file_io_prometheus_client_metrics_proto_msgTypes[1] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use Gauge.ProtoReflect.Descriptor instead. -func (*Gauge) Descriptor() ([]byte, []int) { - return file_io_prometheus_client_metrics_proto_rawDescGZIP(), []int{1} -} - -func (x *Gauge) GetValue() float64 { - if x != nil && x.Value != nil { - return *x.Value - } - return 0 -} - -type Counter struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Value *float64 `protobuf:"fixed64,1,opt,name=value" json:"value,omitempty"` - Exemplar *Exemplar `protobuf:"bytes,2,opt,name=exemplar" json:"exemplar,omitempty"` - CreatedTimestamp *timestamppb.Timestamp `protobuf:"bytes,3,opt,name=created_timestamp,json=createdTimestamp" json:"created_timestamp,omitempty"` -} - -func (x *Counter) Reset() { - *x = Counter{} - if protoimpl.UnsafeEnabled { - mi := &file_io_prometheus_client_metrics_proto_msgTypes[2] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *Counter) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*Counter) ProtoMessage() {} - -func (x *Counter) ProtoReflect() protoreflect.Message { - mi := &file_io_prometheus_client_metrics_proto_msgTypes[2] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use Counter.ProtoReflect.Descriptor instead. -func (*Counter) Descriptor() ([]byte, []int) { - return file_io_prometheus_client_metrics_proto_rawDescGZIP(), []int{2} -} - -func (x *Counter) GetValue() float64 { - if x != nil && x.Value != nil { - return *x.Value - } - return 0 -} - -func (x *Counter) GetExemplar() *Exemplar { - if x != nil { - return x.Exemplar - } - return nil -} - -func (x *Counter) GetCreatedTimestamp() *timestamppb.Timestamp { - if x != nil { - return x.CreatedTimestamp - } - return nil -} - -type Quantile struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Quantile *float64 `protobuf:"fixed64,1,opt,name=quantile" json:"quantile,omitempty"` - Value *float64 `protobuf:"fixed64,2,opt,name=value" json:"value,omitempty"` -} - -func (x *Quantile) Reset() { - *x = Quantile{} - if protoimpl.UnsafeEnabled { - mi := &file_io_prometheus_client_metrics_proto_msgTypes[3] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *Quantile) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*Quantile) ProtoMessage() {} - -func (x *Quantile) ProtoReflect() protoreflect.Message { - mi := &file_io_prometheus_client_metrics_proto_msgTypes[3] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use Quantile.ProtoReflect.Descriptor instead. -func (*Quantile) Descriptor() ([]byte, []int) { - return file_io_prometheus_client_metrics_proto_rawDescGZIP(), []int{3} -} - -func (x *Quantile) GetQuantile() float64 { - if x != nil && x.Quantile != nil { - return *x.Quantile - } - return 0 -} - -func (x *Quantile) GetValue() float64 { - if x != nil && x.Value != nil { - return *x.Value - } - return 0 -} - -type Summary struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - SampleCount *uint64 `protobuf:"varint,1,opt,name=sample_count,json=sampleCount" json:"sample_count,omitempty"` - SampleSum *float64 `protobuf:"fixed64,2,opt,name=sample_sum,json=sampleSum" json:"sample_sum,omitempty"` - Quantile []*Quantile `protobuf:"bytes,3,rep,name=quantile" json:"quantile,omitempty"` - CreatedTimestamp *timestamppb.Timestamp `protobuf:"bytes,4,opt,name=created_timestamp,json=createdTimestamp" json:"created_timestamp,omitempty"` -} - -func (x *Summary) Reset() { - *x = Summary{} - if protoimpl.UnsafeEnabled { - mi := &file_io_prometheus_client_metrics_proto_msgTypes[4] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *Summary) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*Summary) ProtoMessage() {} - -func (x *Summary) ProtoReflect() protoreflect.Message { - mi := &file_io_prometheus_client_metrics_proto_msgTypes[4] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use Summary.ProtoReflect.Descriptor instead. -func (*Summary) Descriptor() ([]byte, []int) { - return file_io_prometheus_client_metrics_proto_rawDescGZIP(), []int{4} -} - -func (x *Summary) GetSampleCount() uint64 { - if x != nil && x.SampleCount != nil { - return *x.SampleCount - } - return 0 -} - -func (x *Summary) GetSampleSum() float64 { - if x != nil && x.SampleSum != nil { - return *x.SampleSum - } - return 0 -} - -func (x *Summary) GetQuantile() []*Quantile { - if x != nil { - return x.Quantile - } - return nil -} - -func (x *Summary) GetCreatedTimestamp() *timestamppb.Timestamp { - if x != nil { - return x.CreatedTimestamp - } - return nil -} - -type Untyped struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Value *float64 `protobuf:"fixed64,1,opt,name=value" json:"value,omitempty"` -} - -func (x *Untyped) Reset() { - *x = Untyped{} - if protoimpl.UnsafeEnabled { - mi := &file_io_prometheus_client_metrics_proto_msgTypes[5] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *Untyped) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*Untyped) ProtoMessage() {} - -func (x *Untyped) ProtoReflect() protoreflect.Message { - mi := &file_io_prometheus_client_metrics_proto_msgTypes[5] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use Untyped.ProtoReflect.Descriptor instead. -func (*Untyped) Descriptor() ([]byte, []int) { - return file_io_prometheus_client_metrics_proto_rawDescGZIP(), []int{5} -} - -func (x *Untyped) GetValue() float64 { - if x != nil && x.Value != nil { - return *x.Value - } - return 0 -} - -type Histogram struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - SampleCount *uint64 `protobuf:"varint,1,opt,name=sample_count,json=sampleCount" json:"sample_count,omitempty"` - SampleCountFloat *float64 `protobuf:"fixed64,4,opt,name=sample_count_float,json=sampleCountFloat" json:"sample_count_float,omitempty"` // Overrides sample_count if > 0. - SampleSum *float64 `protobuf:"fixed64,2,opt,name=sample_sum,json=sampleSum" json:"sample_sum,omitempty"` - // Buckets for the conventional histogram. - Bucket []*Bucket `protobuf:"bytes,3,rep,name=bucket" json:"bucket,omitempty"` // Ordered in increasing order of upper_bound, +Inf bucket is optional. - CreatedTimestamp *timestamppb.Timestamp `protobuf:"bytes,15,opt,name=created_timestamp,json=createdTimestamp" json:"created_timestamp,omitempty"` - // schema defines the bucket schema. Currently, valid numbers are -4 <= n <= 8. - // They are all for base-2 bucket schemas, where 1 is a bucket boundary in each case, and - // then each power of two is divided into 2^n logarithmic buckets. - // Or in other words, each bucket boundary is the previous boundary times 2^(2^-n). - // In the future, more bucket schemas may be added using numbers < -4 or > 8. - Schema *int32 `protobuf:"zigzag32,5,opt,name=schema" json:"schema,omitempty"` - ZeroThreshold *float64 `protobuf:"fixed64,6,opt,name=zero_threshold,json=zeroThreshold" json:"zero_threshold,omitempty"` // Breadth of the zero bucket. - ZeroCount *uint64 `protobuf:"varint,7,opt,name=zero_count,json=zeroCount" json:"zero_count,omitempty"` // Count in zero bucket. - ZeroCountFloat *float64 `protobuf:"fixed64,8,opt,name=zero_count_float,json=zeroCountFloat" json:"zero_count_float,omitempty"` // Overrides sb_zero_count if > 0. - // Negative buckets for the native histogram. - NegativeSpan []*BucketSpan `protobuf:"bytes,9,rep,name=negative_span,json=negativeSpan" json:"negative_span,omitempty"` - // Use either "negative_delta" or "negative_count", the former for - // regular histograms with integer counts, the latter for float - // histograms. - NegativeDelta []int64 `protobuf:"zigzag64,10,rep,name=negative_delta,json=negativeDelta" json:"negative_delta,omitempty"` // Count delta of each bucket compared to previous one (or to zero for 1st bucket). - NegativeCount []float64 `protobuf:"fixed64,11,rep,name=negative_count,json=negativeCount" json:"negative_count,omitempty"` // Absolute count of each bucket. - // Positive buckets for the native histogram. - // Use a no-op span (offset 0, length 0) for a native histogram without any - // observations yet and with a zero_threshold of 0. Otherwise, it would be - // indistinguishable from a classic histogram. - PositiveSpan []*BucketSpan `protobuf:"bytes,12,rep,name=positive_span,json=positiveSpan" json:"positive_span,omitempty"` - // Use either "positive_delta" or "positive_count", the former for - // regular histograms with integer counts, the latter for float - // histograms. - PositiveDelta []int64 `protobuf:"zigzag64,13,rep,name=positive_delta,json=positiveDelta" json:"positive_delta,omitempty"` // Count delta of each bucket compared to previous one (or to zero for 1st bucket). - PositiveCount []float64 `protobuf:"fixed64,14,rep,name=positive_count,json=positiveCount" json:"positive_count,omitempty"` // Absolute count of each bucket. - // Only used for native histograms. These exemplars MUST have a timestamp. - Exemplars []*Exemplar `protobuf:"bytes,16,rep,name=exemplars" json:"exemplars,omitempty"` -} - -func (x *Histogram) Reset() { - *x = Histogram{} - if protoimpl.UnsafeEnabled { - mi := &file_io_prometheus_client_metrics_proto_msgTypes[6] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *Histogram) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*Histogram) ProtoMessage() {} - -func (x *Histogram) ProtoReflect() protoreflect.Message { - mi := &file_io_prometheus_client_metrics_proto_msgTypes[6] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use Histogram.ProtoReflect.Descriptor instead. -func (*Histogram) Descriptor() ([]byte, []int) { - return file_io_prometheus_client_metrics_proto_rawDescGZIP(), []int{6} -} - -func (x *Histogram) GetSampleCount() uint64 { - if x != nil && x.SampleCount != nil { - return *x.SampleCount - } - return 0 -} - -func (x *Histogram) GetSampleCountFloat() float64 { - if x != nil && x.SampleCountFloat != nil { - return *x.SampleCountFloat - } - return 0 -} - -func (x *Histogram) GetSampleSum() float64 { - if x != nil && x.SampleSum != nil { - return *x.SampleSum - } - return 0 -} - -func (x *Histogram) GetBucket() []*Bucket { - if x != nil { - return x.Bucket - } - return nil -} - -func (x *Histogram) GetCreatedTimestamp() *timestamppb.Timestamp { - if x != nil { - return x.CreatedTimestamp - } - return nil -} - -func (x *Histogram) GetSchema() int32 { - if x != nil && x.Schema != nil { - return *x.Schema - } - return 0 -} - -func (x *Histogram) GetZeroThreshold() float64 { - if x != nil && x.ZeroThreshold != nil { - return *x.ZeroThreshold - } - return 0 -} - -func (x *Histogram) GetZeroCount() uint64 { - if x != nil && x.ZeroCount != nil { - return *x.ZeroCount - } - return 0 -} - -func (x *Histogram) GetZeroCountFloat() float64 { - if x != nil && x.ZeroCountFloat != nil { - return *x.ZeroCountFloat - } - return 0 -} - -func (x *Histogram) GetNegativeSpan() []*BucketSpan { - if x != nil { - return x.NegativeSpan - } - return nil -} - -func (x *Histogram) GetNegativeDelta() []int64 { - if x != nil { - return x.NegativeDelta - } - return nil -} - -func (x *Histogram) GetNegativeCount() []float64 { - if x != nil { - return x.NegativeCount - } - return nil -} - -func (x *Histogram) GetPositiveSpan() []*BucketSpan { - if x != nil { - return x.PositiveSpan - } - return nil -} - -func (x *Histogram) GetPositiveDelta() []int64 { - if x != nil { - return x.PositiveDelta - } - return nil -} - -func (x *Histogram) GetPositiveCount() []float64 { - if x != nil { - return x.PositiveCount - } - return nil -} - -func (x *Histogram) GetExemplars() []*Exemplar { - if x != nil { - return x.Exemplars - } - return nil -} - -// A Bucket of a conventional histogram, each of which is treated as -// an individual counter-like time series by Prometheus. -type Bucket struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - CumulativeCount *uint64 `protobuf:"varint,1,opt,name=cumulative_count,json=cumulativeCount" json:"cumulative_count,omitempty"` // Cumulative in increasing order. - CumulativeCountFloat *float64 `protobuf:"fixed64,4,opt,name=cumulative_count_float,json=cumulativeCountFloat" json:"cumulative_count_float,omitempty"` // Overrides cumulative_count if > 0. - UpperBound *float64 `protobuf:"fixed64,2,opt,name=upper_bound,json=upperBound" json:"upper_bound,omitempty"` // Inclusive. - Exemplar *Exemplar `protobuf:"bytes,3,opt,name=exemplar" json:"exemplar,omitempty"` -} - -func (x *Bucket) Reset() { - *x = Bucket{} - if protoimpl.UnsafeEnabled { - mi := &file_io_prometheus_client_metrics_proto_msgTypes[7] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *Bucket) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*Bucket) ProtoMessage() {} - -func (x *Bucket) ProtoReflect() protoreflect.Message { - mi := &file_io_prometheus_client_metrics_proto_msgTypes[7] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use Bucket.ProtoReflect.Descriptor instead. -func (*Bucket) Descriptor() ([]byte, []int) { - return file_io_prometheus_client_metrics_proto_rawDescGZIP(), []int{7} -} - -func (x *Bucket) GetCumulativeCount() uint64 { - if x != nil && x.CumulativeCount != nil { - return *x.CumulativeCount - } - return 0 -} - -func (x *Bucket) GetCumulativeCountFloat() float64 { - if x != nil && x.CumulativeCountFloat != nil { - return *x.CumulativeCountFloat - } - return 0 -} - -func (x *Bucket) GetUpperBound() float64 { - if x != nil && x.UpperBound != nil { - return *x.UpperBound - } - return 0 -} - -func (x *Bucket) GetExemplar() *Exemplar { - if x != nil { - return x.Exemplar - } - return nil -} - -// A BucketSpan defines a number of consecutive buckets in a native -// histogram with their offset. Logically, it would be more -// straightforward to include the bucket counts in the Span. However, -// the protobuf representation is more compact in the way the data is -// structured here (with all the buckets in a single array separate -// from the Spans). -type BucketSpan struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Offset *int32 `protobuf:"zigzag32,1,opt,name=offset" json:"offset,omitempty"` // Gap to previous span, or starting point for 1st span (which can be negative). - Length *uint32 `protobuf:"varint,2,opt,name=length" json:"length,omitempty"` // Length of consecutive buckets. -} - -func (x *BucketSpan) Reset() { - *x = BucketSpan{} - if protoimpl.UnsafeEnabled { - mi := &file_io_prometheus_client_metrics_proto_msgTypes[8] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *BucketSpan) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*BucketSpan) ProtoMessage() {} - -func (x *BucketSpan) ProtoReflect() protoreflect.Message { - mi := &file_io_prometheus_client_metrics_proto_msgTypes[8] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use BucketSpan.ProtoReflect.Descriptor instead. -func (*BucketSpan) Descriptor() ([]byte, []int) { - return file_io_prometheus_client_metrics_proto_rawDescGZIP(), []int{8} -} - -func (x *BucketSpan) GetOffset() int32 { - if x != nil && x.Offset != nil { - return *x.Offset - } - return 0 -} - -func (x *BucketSpan) GetLength() uint32 { - if x != nil && x.Length != nil { - return *x.Length - } - return 0 -} - -type Exemplar struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Label []*LabelPair `protobuf:"bytes,1,rep,name=label" json:"label,omitempty"` - Value *float64 `protobuf:"fixed64,2,opt,name=value" json:"value,omitempty"` - Timestamp *timestamppb.Timestamp `protobuf:"bytes,3,opt,name=timestamp" json:"timestamp,omitempty"` // OpenMetrics-style. -} - -func (x *Exemplar) Reset() { - *x = Exemplar{} - if protoimpl.UnsafeEnabled { - mi := &file_io_prometheus_client_metrics_proto_msgTypes[9] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *Exemplar) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*Exemplar) ProtoMessage() {} - -func (x *Exemplar) ProtoReflect() protoreflect.Message { - mi := &file_io_prometheus_client_metrics_proto_msgTypes[9] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use Exemplar.ProtoReflect.Descriptor instead. -func (*Exemplar) Descriptor() ([]byte, []int) { - return file_io_prometheus_client_metrics_proto_rawDescGZIP(), []int{9} -} - -func (x *Exemplar) GetLabel() []*LabelPair { - if x != nil { - return x.Label - } - return nil -} - -func (x *Exemplar) GetValue() float64 { - if x != nil && x.Value != nil { - return *x.Value - } - return 0 -} - -func (x *Exemplar) GetTimestamp() *timestamppb.Timestamp { - if x != nil { - return x.Timestamp - } - return nil -} - -type Metric struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Label []*LabelPair `protobuf:"bytes,1,rep,name=label" json:"label,omitempty"` - Gauge *Gauge `protobuf:"bytes,2,opt,name=gauge" json:"gauge,omitempty"` - Counter *Counter `protobuf:"bytes,3,opt,name=counter" json:"counter,omitempty"` - Summary *Summary `protobuf:"bytes,4,opt,name=summary" json:"summary,omitempty"` - Untyped *Untyped `protobuf:"bytes,5,opt,name=untyped" json:"untyped,omitempty"` - Histogram *Histogram `protobuf:"bytes,7,opt,name=histogram" json:"histogram,omitempty"` - TimestampMs *int64 `protobuf:"varint,6,opt,name=timestamp_ms,json=timestampMs" json:"timestamp_ms,omitempty"` -} - -func (x *Metric) Reset() { - *x = Metric{} - if protoimpl.UnsafeEnabled { - mi := &file_io_prometheus_client_metrics_proto_msgTypes[10] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *Metric) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*Metric) ProtoMessage() {} - -func (x *Metric) ProtoReflect() protoreflect.Message { - mi := &file_io_prometheus_client_metrics_proto_msgTypes[10] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use Metric.ProtoReflect.Descriptor instead. -func (*Metric) Descriptor() ([]byte, []int) { - return file_io_prometheus_client_metrics_proto_rawDescGZIP(), []int{10} -} - -func (x *Metric) GetLabel() []*LabelPair { - if x != nil { - return x.Label - } - return nil -} - -func (x *Metric) GetGauge() *Gauge { - if x != nil { - return x.Gauge - } - return nil -} - -func (x *Metric) GetCounter() *Counter { - if x != nil { - return x.Counter - } - return nil -} - -func (x *Metric) GetSummary() *Summary { - if x != nil { - return x.Summary - } - return nil -} - -func (x *Metric) GetUntyped() *Untyped { - if x != nil { - return x.Untyped - } - return nil -} - -func (x *Metric) GetHistogram() *Histogram { - if x != nil { - return x.Histogram - } - return nil -} - -func (x *Metric) GetTimestampMs() int64 { - if x != nil && x.TimestampMs != nil { - return *x.TimestampMs - } - return 0 -} - -type MetricFamily struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` - Help *string `protobuf:"bytes,2,opt,name=help" json:"help,omitempty"` - Type *MetricType `protobuf:"varint,3,opt,name=type,enum=io.prometheus.client.MetricType" json:"type,omitempty"` - Metric []*Metric `protobuf:"bytes,4,rep,name=metric" json:"metric,omitempty"` - Unit *string `protobuf:"bytes,5,opt,name=unit" json:"unit,omitempty"` -} - -func (x *MetricFamily) Reset() { - *x = MetricFamily{} - if protoimpl.UnsafeEnabled { - mi := &file_io_prometheus_client_metrics_proto_msgTypes[11] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *MetricFamily) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*MetricFamily) ProtoMessage() {} - -func (x *MetricFamily) ProtoReflect() protoreflect.Message { - mi := &file_io_prometheus_client_metrics_proto_msgTypes[11] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use MetricFamily.ProtoReflect.Descriptor instead. -func (*MetricFamily) Descriptor() ([]byte, []int) { - return file_io_prometheus_client_metrics_proto_rawDescGZIP(), []int{11} -} - -func (x *MetricFamily) GetName() string { - if x != nil && x.Name != nil { - return *x.Name - } - return "" -} - -func (x *MetricFamily) GetHelp() string { - if x != nil && x.Help != nil { - return *x.Help - } - return "" -} - -func (x *MetricFamily) GetType() MetricType { - if x != nil && x.Type != nil { - return *x.Type - } - return MetricType_COUNTER -} - -func (x *MetricFamily) GetMetric() []*Metric { - if x != nil { - return x.Metric - } - return nil -} - -func (x *MetricFamily) GetUnit() string { - if x != nil && x.Unit != nil { - return *x.Unit - } - return "" -} - -var File_io_prometheus_client_metrics_proto protoreflect.FileDescriptor - -var file_io_prometheus_client_metrics_proto_rawDesc = []byte{ - 0x0a, 0x22, 0x69, 0x6f, 0x2f, 0x70, 0x72, 0x6f, 0x6d, 0x65, 0x74, 0x68, 0x65, 0x75, 0x73, 0x2f, - 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x2f, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x2e, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x14, 0x69, 0x6f, 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x65, 0x74, 0x68, - 0x65, 0x75, 0x73, 0x2e, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x1a, 0x1f, 0x67, 0x6f, 0x6f, 0x67, - 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x74, 0x69, 0x6d, 0x65, - 0x73, 0x74, 0x61, 0x6d, 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x35, 0x0a, 0x09, 0x4c, - 0x61, 0x62, 0x65, 0x6c, 0x50, 0x61, 0x69, 0x72, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x22, 0x1d, 0x0a, 0x05, 0x47, 0x61, 0x75, 0x67, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x01, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x22, 0xa4, 0x01, 0x0a, 0x07, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x65, 0x72, 0x12, 0x14, 0x0a, - 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x01, 0x52, 0x05, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x12, 0x3a, 0x0a, 0x08, 0x65, 0x78, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x72, 0x18, - 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x69, 0x6f, 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x65, - 0x74, 0x68, 0x65, 0x75, 0x73, 0x2e, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x2e, 0x45, 0x78, 0x65, - 0x6d, 0x70, 0x6c, 0x61, 0x72, 0x52, 0x08, 0x65, 0x78, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x72, 0x12, - 0x47, 0x0a, 0x11, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x73, - 0x74, 0x61, 0x6d, 0x70, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, - 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, - 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x10, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x54, - 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x22, 0x3c, 0x0a, 0x08, 0x51, 0x75, 0x61, 0x6e, - 0x74, 0x69, 0x6c, 0x65, 0x12, 0x1a, 0x0a, 0x08, 0x71, 0x75, 0x61, 0x6e, 0x74, 0x69, 0x6c, 0x65, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x01, 0x52, 0x08, 0x71, 0x75, 0x61, 0x6e, 0x74, 0x69, 0x6c, 0x65, - 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x01, 0x52, - 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0xd0, 0x01, 0x0a, 0x07, 0x53, 0x75, 0x6d, 0x6d, 0x61, - 0x72, 0x79, 0x12, 0x21, 0x0a, 0x0c, 0x73, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x5f, 0x63, 0x6f, 0x75, - 0x6e, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0b, 0x73, 0x61, 0x6d, 0x70, 0x6c, 0x65, - 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x1d, 0x0a, 0x0a, 0x73, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x5f, - 0x73, 0x75, 0x6d, 0x18, 0x02, 0x20, 0x01, 0x28, 0x01, 0x52, 0x09, 0x73, 0x61, 0x6d, 0x70, 0x6c, - 0x65, 0x53, 0x75, 0x6d, 0x12, 0x3a, 0x0a, 0x08, 0x71, 0x75, 0x61, 0x6e, 0x74, 0x69, 0x6c, 0x65, - 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x69, 0x6f, 0x2e, 0x70, 0x72, 0x6f, 0x6d, - 0x65, 0x74, 0x68, 0x65, 0x75, 0x73, 0x2e, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x2e, 0x51, 0x75, - 0x61, 0x6e, 0x74, 0x69, 0x6c, 0x65, 0x52, 0x08, 0x71, 0x75, 0x61, 0x6e, 0x74, 0x69, 0x6c, 0x65, - 0x12, 0x47, 0x0a, 0x11, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x5f, 0x74, 0x69, 0x6d, 0x65, - 0x73, 0x74, 0x61, 0x6d, 0x70, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, - 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, - 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x10, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, - 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x22, 0x1f, 0x0a, 0x07, 0x55, 0x6e, 0x74, - 0x79, 0x70, 0x65, 0x64, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x01, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0xea, 0x05, 0x0a, 0x09, 0x48, - 0x69, 0x73, 0x74, 0x6f, 0x67, 0x72, 0x61, 0x6d, 0x12, 0x21, 0x0a, 0x0c, 0x73, 0x61, 0x6d, 0x70, - 0x6c, 0x65, 0x5f, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0b, - 0x73, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x2c, 0x0a, 0x12, 0x73, - 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x5f, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x5f, 0x66, 0x6c, 0x6f, 0x61, - 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x01, 0x52, 0x10, 0x73, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x43, - 0x6f, 0x75, 0x6e, 0x74, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x12, 0x1d, 0x0a, 0x0a, 0x73, 0x61, 0x6d, - 0x70, 0x6c, 0x65, 0x5f, 0x73, 0x75, 0x6d, 0x18, 0x02, 0x20, 0x01, 0x28, 0x01, 0x52, 0x09, 0x73, - 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x53, 0x75, 0x6d, 0x12, 0x34, 0x0a, 0x06, 0x62, 0x75, 0x63, 0x6b, - 0x65, 0x74, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x69, 0x6f, 0x2e, 0x70, 0x72, - 0x6f, 0x6d, 0x65, 0x74, 0x68, 0x65, 0x75, 0x73, 0x2e, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x2e, - 0x42, 0x75, 0x63, 0x6b, 0x65, 0x74, 0x52, 0x06, 0x62, 0x75, 0x63, 0x6b, 0x65, 0x74, 0x12, 0x47, - 0x0a, 0x11, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, - 0x61, 0x6d, 0x70, 0x18, 0x0f, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, - 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, - 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x10, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x54, 0x69, - 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x12, 0x16, 0x0a, 0x06, 0x73, 0x63, 0x68, 0x65, 0x6d, - 0x61, 0x18, 0x05, 0x20, 0x01, 0x28, 0x11, 0x52, 0x06, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x12, - 0x25, 0x0a, 0x0e, 0x7a, 0x65, 0x72, 0x6f, 0x5f, 0x74, 0x68, 0x72, 0x65, 0x73, 0x68, 0x6f, 0x6c, - 0x64, 0x18, 0x06, 0x20, 0x01, 0x28, 0x01, 0x52, 0x0d, 0x7a, 0x65, 0x72, 0x6f, 0x54, 0x68, 0x72, - 0x65, 0x73, 0x68, 0x6f, 0x6c, 0x64, 0x12, 0x1d, 0x0a, 0x0a, 0x7a, 0x65, 0x72, 0x6f, 0x5f, 0x63, - 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x07, 0x20, 0x01, 0x28, 0x04, 0x52, 0x09, 0x7a, 0x65, 0x72, 0x6f, - 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x28, 0x0a, 0x10, 0x7a, 0x65, 0x72, 0x6f, 0x5f, 0x63, 0x6f, - 0x75, 0x6e, 0x74, 0x5f, 0x66, 0x6c, 0x6f, 0x61, 0x74, 0x18, 0x08, 0x20, 0x01, 0x28, 0x01, 0x52, - 0x0e, 0x7a, 0x65, 0x72, 0x6f, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x12, - 0x45, 0x0a, 0x0d, 0x6e, 0x65, 0x67, 0x61, 0x74, 0x69, 0x76, 0x65, 0x5f, 0x73, 0x70, 0x61, 0x6e, - 0x18, 0x09, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x69, 0x6f, 0x2e, 0x70, 0x72, 0x6f, 0x6d, - 0x65, 0x74, 0x68, 0x65, 0x75, 0x73, 0x2e, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x2e, 0x42, 0x75, - 0x63, 0x6b, 0x65, 0x74, 0x53, 0x70, 0x61, 0x6e, 0x52, 0x0c, 0x6e, 0x65, 0x67, 0x61, 0x74, 0x69, - 0x76, 0x65, 0x53, 0x70, 0x61, 0x6e, 0x12, 0x25, 0x0a, 0x0e, 0x6e, 0x65, 0x67, 0x61, 0x74, 0x69, - 0x76, 0x65, 0x5f, 0x64, 0x65, 0x6c, 0x74, 0x61, 0x18, 0x0a, 0x20, 0x03, 0x28, 0x12, 0x52, 0x0d, - 0x6e, 0x65, 0x67, 0x61, 0x74, 0x69, 0x76, 0x65, 0x44, 0x65, 0x6c, 0x74, 0x61, 0x12, 0x25, 0x0a, - 0x0e, 0x6e, 0x65, 0x67, 0x61, 0x74, 0x69, 0x76, 0x65, 0x5f, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, - 0x0b, 0x20, 0x03, 0x28, 0x01, 0x52, 0x0d, 0x6e, 0x65, 0x67, 0x61, 0x74, 0x69, 0x76, 0x65, 0x43, - 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x45, 0x0a, 0x0d, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x76, 0x65, - 0x5f, 0x73, 0x70, 0x61, 0x6e, 0x18, 0x0c, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x69, 0x6f, - 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x65, 0x74, 0x68, 0x65, 0x75, 0x73, 0x2e, 0x63, 0x6c, 0x69, 0x65, - 0x6e, 0x74, 0x2e, 0x42, 0x75, 0x63, 0x6b, 0x65, 0x74, 0x53, 0x70, 0x61, 0x6e, 0x52, 0x0c, 0x70, - 0x6f, 0x73, 0x69, 0x74, 0x69, 0x76, 0x65, 0x53, 0x70, 0x61, 0x6e, 0x12, 0x25, 0x0a, 0x0e, 0x70, - 0x6f, 0x73, 0x69, 0x74, 0x69, 0x76, 0x65, 0x5f, 0x64, 0x65, 0x6c, 0x74, 0x61, 0x18, 0x0d, 0x20, - 0x03, 0x28, 0x12, 0x52, 0x0d, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x76, 0x65, 0x44, 0x65, 0x6c, - 0x74, 0x61, 0x12, 0x25, 0x0a, 0x0e, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x76, 0x65, 0x5f, 0x63, - 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x0e, 0x20, 0x03, 0x28, 0x01, 0x52, 0x0d, 0x70, 0x6f, 0x73, 0x69, - 0x74, 0x69, 0x76, 0x65, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x3c, 0x0a, 0x09, 0x65, 0x78, 0x65, - 0x6d, 0x70, 0x6c, 0x61, 0x72, 0x73, 0x18, 0x10, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x69, - 0x6f, 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x65, 0x74, 0x68, 0x65, 0x75, 0x73, 0x2e, 0x63, 0x6c, 0x69, - 0x65, 0x6e, 0x74, 0x2e, 0x45, 0x78, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x72, 0x52, 0x09, 0x65, 0x78, - 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x72, 0x73, 0x22, 0xc6, 0x01, 0x0a, 0x06, 0x42, 0x75, 0x63, 0x6b, - 0x65, 0x74, 0x12, 0x29, 0x0a, 0x10, 0x63, 0x75, 0x6d, 0x75, 0x6c, 0x61, 0x74, 0x69, 0x76, 0x65, - 0x5f, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0f, 0x63, 0x75, - 0x6d, 0x75, 0x6c, 0x61, 0x74, 0x69, 0x76, 0x65, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x34, 0x0a, - 0x16, 0x63, 0x75, 0x6d, 0x75, 0x6c, 0x61, 0x74, 0x69, 0x76, 0x65, 0x5f, 0x63, 0x6f, 0x75, 0x6e, - 0x74, 0x5f, 0x66, 0x6c, 0x6f, 0x61, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x01, 0x52, 0x14, 0x63, - 0x75, 0x6d, 0x75, 0x6c, 0x61, 0x74, 0x69, 0x76, 0x65, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x46, 0x6c, - 0x6f, 0x61, 0x74, 0x12, 0x1f, 0x0a, 0x0b, 0x75, 0x70, 0x70, 0x65, 0x72, 0x5f, 0x62, 0x6f, 0x75, - 0x6e, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x01, 0x52, 0x0a, 0x75, 0x70, 0x70, 0x65, 0x72, 0x42, - 0x6f, 0x75, 0x6e, 0x64, 0x12, 0x3a, 0x0a, 0x08, 0x65, 0x78, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x72, - 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x69, 0x6f, 0x2e, 0x70, 0x72, 0x6f, 0x6d, - 0x65, 0x74, 0x68, 0x65, 0x75, 0x73, 0x2e, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x2e, 0x45, 0x78, - 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x72, 0x52, 0x08, 0x65, 0x78, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x72, - 0x22, 0x3c, 0x0a, 0x0a, 0x42, 0x75, 0x63, 0x6b, 0x65, 0x74, 0x53, 0x70, 0x61, 0x6e, 0x12, 0x16, - 0x0a, 0x06, 0x6f, 0x66, 0x66, 0x73, 0x65, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x11, 0x52, 0x06, - 0x6f, 0x66, 0x66, 0x73, 0x65, 0x74, 0x12, 0x16, 0x0a, 0x06, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x06, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x22, 0x91, - 0x01, 0x0a, 0x08, 0x45, 0x78, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x72, 0x12, 0x35, 0x0a, 0x05, 0x6c, - 0x61, 0x62, 0x65, 0x6c, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x69, 0x6f, 0x2e, - 0x70, 0x72, 0x6f, 0x6d, 0x65, 0x74, 0x68, 0x65, 0x75, 0x73, 0x2e, 0x63, 0x6c, 0x69, 0x65, 0x6e, - 0x74, 0x2e, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x50, 0x61, 0x69, 0x72, 0x52, 0x05, 0x6c, 0x61, 0x62, - 0x65, 0x6c, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, - 0x01, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x38, 0x0a, 0x09, 0x74, 0x69, 0x6d, 0x65, - 0x73, 0x74, 0x61, 0x6d, 0x70, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, - 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, - 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, - 0x6d, 0x70, 0x22, 0xff, 0x02, 0x0a, 0x06, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x12, 0x35, 0x0a, - 0x05, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x69, - 0x6f, 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x65, 0x74, 0x68, 0x65, 0x75, 0x73, 0x2e, 0x63, 0x6c, 0x69, - 0x65, 0x6e, 0x74, 0x2e, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x50, 0x61, 0x69, 0x72, 0x52, 0x05, 0x6c, - 0x61, 0x62, 0x65, 0x6c, 0x12, 0x31, 0x0a, 0x05, 0x67, 0x61, 0x75, 0x67, 0x65, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x69, 0x6f, 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x65, 0x74, 0x68, - 0x65, 0x75, 0x73, 0x2e, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x2e, 0x47, 0x61, 0x75, 0x67, 0x65, - 0x52, 0x05, 0x67, 0x61, 0x75, 0x67, 0x65, 0x12, 0x37, 0x0a, 0x07, 0x63, 0x6f, 0x75, 0x6e, 0x74, - 0x65, 0x72, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x69, 0x6f, 0x2e, 0x70, 0x72, - 0x6f, 0x6d, 0x65, 0x74, 0x68, 0x65, 0x75, 0x73, 0x2e, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x2e, - 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x65, 0x72, 0x52, 0x07, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x65, 0x72, - 0x12, 0x37, 0x0a, 0x07, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x18, 0x04, 0x20, 0x01, 0x28, - 0x0b, 0x32, 0x1d, 0x2e, 0x69, 0x6f, 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x65, 0x74, 0x68, 0x65, 0x75, - 0x73, 0x2e, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x2e, 0x53, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, - 0x52, 0x07, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x12, 0x37, 0x0a, 0x07, 0x75, 0x6e, 0x74, - 0x79, 0x70, 0x65, 0x64, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x69, 0x6f, 0x2e, - 0x70, 0x72, 0x6f, 0x6d, 0x65, 0x74, 0x68, 0x65, 0x75, 0x73, 0x2e, 0x63, 0x6c, 0x69, 0x65, 0x6e, - 0x74, 0x2e, 0x55, 0x6e, 0x74, 0x79, 0x70, 0x65, 0x64, 0x52, 0x07, 0x75, 0x6e, 0x74, 0x79, 0x70, - 0x65, 0x64, 0x12, 0x3d, 0x0a, 0x09, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x67, 0x72, 0x61, 0x6d, 0x18, - 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x69, 0x6f, 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x65, - 0x74, 0x68, 0x65, 0x75, 0x73, 0x2e, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x2e, 0x48, 0x69, 0x73, - 0x74, 0x6f, 0x67, 0x72, 0x61, 0x6d, 0x52, 0x09, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x67, 0x72, 0x61, - 0x6d, 0x12, 0x21, 0x0a, 0x0c, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x5f, 0x6d, - 0x73, 0x18, 0x06, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0b, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, - 0x6d, 0x70, 0x4d, 0x73, 0x22, 0xb6, 0x01, 0x0a, 0x0c, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x46, - 0x61, 0x6d, 0x69, 0x6c, 0x79, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x68, 0x65, 0x6c, - 0x70, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x68, 0x65, 0x6c, 0x70, 0x12, 0x34, 0x0a, - 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x20, 0x2e, 0x69, 0x6f, - 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x65, 0x74, 0x68, 0x65, 0x75, 0x73, 0x2e, 0x63, 0x6c, 0x69, 0x65, - 0x6e, 0x74, 0x2e, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x54, 0x79, 0x70, 0x65, 0x52, 0x04, 0x74, - 0x79, 0x70, 0x65, 0x12, 0x34, 0x0a, 0x06, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x18, 0x04, 0x20, - 0x03, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x69, 0x6f, 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x65, 0x74, 0x68, - 0x65, 0x75, 0x73, 0x2e, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x2e, 0x4d, 0x65, 0x74, 0x72, 0x69, - 0x63, 0x52, 0x06, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x12, 0x12, 0x0a, 0x04, 0x75, 0x6e, 0x69, - 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x75, 0x6e, 0x69, 0x74, 0x2a, 0x62, 0x0a, - 0x0a, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x54, 0x79, 0x70, 0x65, 0x12, 0x0b, 0x0a, 0x07, 0x43, - 0x4f, 0x55, 0x4e, 0x54, 0x45, 0x52, 0x10, 0x00, 0x12, 0x09, 0x0a, 0x05, 0x47, 0x41, 0x55, 0x47, - 0x45, 0x10, 0x01, 0x12, 0x0b, 0x0a, 0x07, 0x53, 0x55, 0x4d, 0x4d, 0x41, 0x52, 0x59, 0x10, 0x02, - 0x12, 0x0b, 0x0a, 0x07, 0x55, 0x4e, 0x54, 0x59, 0x50, 0x45, 0x44, 0x10, 0x03, 0x12, 0x0d, 0x0a, - 0x09, 0x48, 0x49, 0x53, 0x54, 0x4f, 0x47, 0x52, 0x41, 0x4d, 0x10, 0x04, 0x12, 0x13, 0x0a, 0x0f, - 0x47, 0x41, 0x55, 0x47, 0x45, 0x5f, 0x48, 0x49, 0x53, 0x54, 0x4f, 0x47, 0x52, 0x41, 0x4d, 0x10, - 0x05, 0x42, 0x52, 0x0a, 0x14, 0x69, 0x6f, 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x65, 0x74, 0x68, 0x65, - 0x75, 0x73, 0x2e, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x5a, 0x3a, 0x67, 0x69, 0x74, 0x68, 0x75, - 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x70, 0x72, 0x6f, 0x6d, 0x65, 0x74, 0x68, 0x65, 0x75, 0x73, - 0x2f, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x5f, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x2f, 0x67, 0x6f, - 0x3b, 0x69, 0x6f, 0x5f, 0x70, 0x72, 0x6f, 0x6d, 0x65, 0x74, 0x68, 0x65, 0x75, 0x73, 0x5f, 0x63, - 0x6c, 0x69, 0x65, 0x6e, 0x74, -} - -var ( - file_io_prometheus_client_metrics_proto_rawDescOnce sync.Once - file_io_prometheus_client_metrics_proto_rawDescData = file_io_prometheus_client_metrics_proto_rawDesc -) - -func file_io_prometheus_client_metrics_proto_rawDescGZIP() []byte { - file_io_prometheus_client_metrics_proto_rawDescOnce.Do(func() { - file_io_prometheus_client_metrics_proto_rawDescData = protoimpl.X.CompressGZIP(file_io_prometheus_client_metrics_proto_rawDescData) - }) - return file_io_prometheus_client_metrics_proto_rawDescData -} - -var file_io_prometheus_client_metrics_proto_enumTypes = make([]protoimpl.EnumInfo, 1) -var file_io_prometheus_client_metrics_proto_msgTypes = make([]protoimpl.MessageInfo, 12) -var file_io_prometheus_client_metrics_proto_goTypes = []interface{}{ - (MetricType)(0), // 0: io.prometheus.client.MetricType - (*LabelPair)(nil), // 1: io.prometheus.client.LabelPair - (*Gauge)(nil), // 2: io.prometheus.client.Gauge - (*Counter)(nil), // 3: io.prometheus.client.Counter - (*Quantile)(nil), // 4: io.prometheus.client.Quantile - (*Summary)(nil), // 5: io.prometheus.client.Summary - (*Untyped)(nil), // 6: io.prometheus.client.Untyped - (*Histogram)(nil), // 7: io.prometheus.client.Histogram - (*Bucket)(nil), // 8: io.prometheus.client.Bucket - (*BucketSpan)(nil), // 9: io.prometheus.client.BucketSpan - (*Exemplar)(nil), // 10: io.prometheus.client.Exemplar - (*Metric)(nil), // 11: io.prometheus.client.Metric - (*MetricFamily)(nil), // 12: io.prometheus.client.MetricFamily - (*timestamppb.Timestamp)(nil), // 13: google.protobuf.Timestamp -} -var file_io_prometheus_client_metrics_proto_depIdxs = []int32{ - 10, // 0: io.prometheus.client.Counter.exemplar:type_name -> io.prometheus.client.Exemplar - 13, // 1: io.prometheus.client.Counter.created_timestamp:type_name -> google.protobuf.Timestamp - 4, // 2: io.prometheus.client.Summary.quantile:type_name -> io.prometheus.client.Quantile - 13, // 3: io.prometheus.client.Summary.created_timestamp:type_name -> google.protobuf.Timestamp - 8, // 4: io.prometheus.client.Histogram.bucket:type_name -> io.prometheus.client.Bucket - 13, // 5: io.prometheus.client.Histogram.created_timestamp:type_name -> google.protobuf.Timestamp - 9, // 6: io.prometheus.client.Histogram.negative_span:type_name -> io.prometheus.client.BucketSpan - 9, // 7: io.prometheus.client.Histogram.positive_span:type_name -> io.prometheus.client.BucketSpan - 10, // 8: io.prometheus.client.Histogram.exemplars:type_name -> io.prometheus.client.Exemplar - 10, // 9: io.prometheus.client.Bucket.exemplar:type_name -> io.prometheus.client.Exemplar - 1, // 10: io.prometheus.client.Exemplar.label:type_name -> io.prometheus.client.LabelPair - 13, // 11: io.prometheus.client.Exemplar.timestamp:type_name -> google.protobuf.Timestamp - 1, // 12: io.prometheus.client.Metric.label:type_name -> io.prometheus.client.LabelPair - 2, // 13: io.prometheus.client.Metric.gauge:type_name -> io.prometheus.client.Gauge - 3, // 14: io.prometheus.client.Metric.counter:type_name -> io.prometheus.client.Counter - 5, // 15: io.prometheus.client.Metric.summary:type_name -> io.prometheus.client.Summary - 6, // 16: io.prometheus.client.Metric.untyped:type_name -> io.prometheus.client.Untyped - 7, // 17: io.prometheus.client.Metric.histogram:type_name -> io.prometheus.client.Histogram - 0, // 18: io.prometheus.client.MetricFamily.type:type_name -> io.prometheus.client.MetricType - 11, // 19: io.prometheus.client.MetricFamily.metric:type_name -> io.prometheus.client.Metric - 20, // [20:20] is the sub-list for method output_type - 20, // [20:20] is the sub-list for method input_type - 20, // [20:20] is the sub-list for extension type_name - 20, // [20:20] is the sub-list for extension extendee - 0, // [0:20] is the sub-list for field type_name -} - -func init() { file_io_prometheus_client_metrics_proto_init() } -func file_io_prometheus_client_metrics_proto_init() { - if File_io_prometheus_client_metrics_proto != nil { - return - } - if !protoimpl.UnsafeEnabled { - file_io_prometheus_client_metrics_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*LabelPair); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_io_prometheus_client_metrics_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Gauge); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_io_prometheus_client_metrics_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Counter); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_io_prometheus_client_metrics_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Quantile); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_io_prometheus_client_metrics_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Summary); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_io_prometheus_client_metrics_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Untyped); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_io_prometheus_client_metrics_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Histogram); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_io_prometheus_client_metrics_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Bucket); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_io_prometheus_client_metrics_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*BucketSpan); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_io_prometheus_client_metrics_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Exemplar); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_io_prometheus_client_metrics_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Metric); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_io_prometheus_client_metrics_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*MetricFamily); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - } - type x struct{} - out := protoimpl.TypeBuilder{ - File: protoimpl.DescBuilder{ - GoPackagePath: reflect.TypeOf(x{}).PkgPath(), - RawDescriptor: file_io_prometheus_client_metrics_proto_rawDesc, - NumEnums: 1, - NumMessages: 12, - NumExtensions: 0, - NumServices: 0, - }, - GoTypes: file_io_prometheus_client_metrics_proto_goTypes, - DependencyIndexes: file_io_prometheus_client_metrics_proto_depIdxs, - EnumInfos: file_io_prometheus_client_metrics_proto_enumTypes, - MessageInfos: file_io_prometheus_client_metrics_proto_msgTypes, - }.Build() - File_io_prometheus_client_metrics_proto = out.File - file_io_prometheus_client_metrics_proto_rawDesc = nil - file_io_prometheus_client_metrics_proto_goTypes = nil - file_io_prometheus_client_metrics_proto_depIdxs = nil -} diff --git a/vendor/github.com/prometheus/common/LICENSE b/vendor/github.com/prometheus/common/LICENSE deleted file mode 100644 index 261eeb9e9..000000000 --- a/vendor/github.com/prometheus/common/LICENSE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/vendor/github.com/prometheus/common/NOTICE b/vendor/github.com/prometheus/common/NOTICE deleted file mode 100644 index 636a2c1a5..000000000 --- a/vendor/github.com/prometheus/common/NOTICE +++ /dev/null @@ -1,5 +0,0 @@ -Common libraries shared by Prometheus Go components. -Copyright 2015 The Prometheus Authors - -This product includes software developed at -SoundCloud Ltd. (http://soundcloud.com/). diff --git a/vendor/github.com/prometheus/common/expfmt/decode.go b/vendor/github.com/prometheus/common/expfmt/decode.go deleted file mode 100644 index b2b89b017..000000000 --- a/vendor/github.com/prometheus/common/expfmt/decode.go +++ /dev/null @@ -1,431 +0,0 @@ -// Copyright 2015 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package expfmt - -import ( - "bufio" - "fmt" - "io" - "math" - "mime" - "net/http" - - dto "github.com/prometheus/client_model/go" - "google.golang.org/protobuf/encoding/protodelim" - - "github.com/prometheus/common/model" -) - -// Decoder types decode an input stream into metric families. -type Decoder interface { - Decode(*dto.MetricFamily) error -} - -// DecodeOptions contains options used by the Decoder and in sample extraction. -type DecodeOptions struct { - // Timestamp is added to each value from the stream that has no explicit timestamp set. - Timestamp model.Time -} - -// ResponseFormat extracts the correct format from a HTTP response header. -// If no matching format can be found FormatUnknown is returned. -func ResponseFormat(h http.Header) Format { - ct := h.Get(hdrContentType) - - mediatype, params, err := mime.ParseMediaType(ct) - if err != nil { - return fmtUnknown - } - - const textType = "text/plain" - - switch mediatype { - case ProtoType: - if p, ok := params["proto"]; ok && p != ProtoProtocol { - return fmtUnknown - } - if e, ok := params["encoding"]; ok && e != "delimited" { - return fmtUnknown - } - return fmtProtoDelim - - case textType: - if v, ok := params["version"]; ok && v != TextVersion { - return fmtUnknown - } - return fmtText - } - - return fmtUnknown -} - -// NewDecoder returns a new decoder based on the given input format. -// If the input format does not imply otherwise, a text format decoder is returned. -func NewDecoder(r io.Reader, format Format) Decoder { - switch format.FormatType() { - case TypeProtoDelim: - return &protoDecoder{r: r} - } - return &textDecoder{r: r} -} - -// protoDecoder implements the Decoder interface for protocol buffers. -type protoDecoder struct { - r io.Reader -} - -// Decode implements the Decoder interface. -func (d *protoDecoder) Decode(v *dto.MetricFamily) error { - opts := protodelim.UnmarshalOptions{ - MaxSize: -1, - } - if err := opts.UnmarshalFrom(bufio.NewReader(d.r), v); err != nil { - return err - } - if !model.IsValidMetricName(model.LabelValue(v.GetName())) { - return fmt.Errorf("invalid metric name %q", v.GetName()) - } - for _, m := range v.GetMetric() { - if m == nil { - continue - } - for _, l := range m.GetLabel() { - if l == nil { - continue - } - if !model.LabelValue(l.GetValue()).IsValid() { - return fmt.Errorf("invalid label value %q", l.GetValue()) - } - if !model.LabelName(l.GetName()).IsValid() { - return fmt.Errorf("invalid label name %q", l.GetName()) - } - } - } - return nil -} - -// textDecoder implements the Decoder interface for the text protocol. -type textDecoder struct { - r io.Reader - fams map[string]*dto.MetricFamily - err error -} - -// Decode implements the Decoder interface. -func (d *textDecoder) Decode(v *dto.MetricFamily) error { - if d.err == nil { - // Read all metrics in one shot. - var p TextParser - d.fams, d.err = p.TextToMetricFamilies(d.r) - // If we don't get an error, store io.EOF for the end. - if d.err == nil { - d.err = io.EOF - } - } - // Pick off one MetricFamily per Decode until there's nothing left. - for key, fam := range d.fams { - v.Name = fam.Name - v.Help = fam.Help - v.Type = fam.Type - v.Metric = fam.Metric - delete(d.fams, key) - return nil - } - return d.err -} - -// SampleDecoder wraps a Decoder to extract samples from the metric families -// decoded by the wrapped Decoder. -type SampleDecoder struct { - Dec Decoder - Opts *DecodeOptions - - f dto.MetricFamily -} - -// Decode calls the Decode method of the wrapped Decoder and then extracts the -// samples from the decoded MetricFamily into the provided model.Vector. -func (sd *SampleDecoder) Decode(s *model.Vector) error { - err := sd.Dec.Decode(&sd.f) - if err != nil { - return err - } - *s, err = extractSamples(&sd.f, sd.Opts) - return err -} - -// ExtractSamples builds a slice of samples from the provided metric -// families. If an error occurs during sample extraction, it continues to -// extract from the remaining metric families. The returned error is the last -// error that has occurred. -func ExtractSamples(o *DecodeOptions, fams ...*dto.MetricFamily) (model.Vector, error) { - var ( - all model.Vector - lastErr error - ) - for _, f := range fams { - some, err := extractSamples(f, o) - if err != nil { - lastErr = err - continue - } - all = append(all, some...) - } - return all, lastErr -} - -func extractSamples(f *dto.MetricFamily, o *DecodeOptions) (model.Vector, error) { - switch f.GetType() { - case dto.MetricType_COUNTER: - return extractCounter(o, f), nil - case dto.MetricType_GAUGE: - return extractGauge(o, f), nil - case dto.MetricType_SUMMARY: - return extractSummary(o, f), nil - case dto.MetricType_UNTYPED: - return extractUntyped(o, f), nil - case dto.MetricType_HISTOGRAM: - return extractHistogram(o, f), nil - } - return nil, fmt.Errorf("expfmt.extractSamples: unknown metric family type %v", f.GetType()) -} - -func extractCounter(o *DecodeOptions, f *dto.MetricFamily) model.Vector { - samples := make(model.Vector, 0, len(f.Metric)) - - for _, m := range f.Metric { - if m.Counter == nil { - continue - } - - lset := make(model.LabelSet, len(m.Label)+1) - for _, p := range m.Label { - lset[model.LabelName(p.GetName())] = model.LabelValue(p.GetValue()) - } - lset[model.MetricNameLabel] = model.LabelValue(f.GetName()) - - smpl := &model.Sample{ - Metric: model.Metric(lset), - Value: model.SampleValue(m.Counter.GetValue()), - } - - if m.TimestampMs != nil { - smpl.Timestamp = model.TimeFromUnixNano(*m.TimestampMs * 1000000) - } else { - smpl.Timestamp = o.Timestamp - } - - samples = append(samples, smpl) - } - - return samples -} - -func extractGauge(o *DecodeOptions, f *dto.MetricFamily) model.Vector { - samples := make(model.Vector, 0, len(f.Metric)) - - for _, m := range f.Metric { - if m.Gauge == nil { - continue - } - - lset := make(model.LabelSet, len(m.Label)+1) - for _, p := range m.Label { - lset[model.LabelName(p.GetName())] = model.LabelValue(p.GetValue()) - } - lset[model.MetricNameLabel] = model.LabelValue(f.GetName()) - - smpl := &model.Sample{ - Metric: model.Metric(lset), - Value: model.SampleValue(m.Gauge.GetValue()), - } - - if m.TimestampMs != nil { - smpl.Timestamp = model.TimeFromUnixNano(*m.TimestampMs * 1000000) - } else { - smpl.Timestamp = o.Timestamp - } - - samples = append(samples, smpl) - } - - return samples -} - -func extractUntyped(o *DecodeOptions, f *dto.MetricFamily) model.Vector { - samples := make(model.Vector, 0, len(f.Metric)) - - for _, m := range f.Metric { - if m.Untyped == nil { - continue - } - - lset := make(model.LabelSet, len(m.Label)+1) - for _, p := range m.Label { - lset[model.LabelName(p.GetName())] = model.LabelValue(p.GetValue()) - } - lset[model.MetricNameLabel] = model.LabelValue(f.GetName()) - - smpl := &model.Sample{ - Metric: model.Metric(lset), - Value: model.SampleValue(m.Untyped.GetValue()), - } - - if m.TimestampMs != nil { - smpl.Timestamp = model.TimeFromUnixNano(*m.TimestampMs * 1000000) - } else { - smpl.Timestamp = o.Timestamp - } - - samples = append(samples, smpl) - } - - return samples -} - -func extractSummary(o *DecodeOptions, f *dto.MetricFamily) model.Vector { - samples := make(model.Vector, 0, len(f.Metric)) - - for _, m := range f.Metric { - if m.Summary == nil { - continue - } - - timestamp := o.Timestamp - if m.TimestampMs != nil { - timestamp = model.TimeFromUnixNano(*m.TimestampMs * 1000000) - } - - for _, q := range m.Summary.Quantile { - lset := make(model.LabelSet, len(m.Label)+2) - for _, p := range m.Label { - lset[model.LabelName(p.GetName())] = model.LabelValue(p.GetValue()) - } - // BUG(matt): Update other names to "quantile". - lset[model.LabelName(model.QuantileLabel)] = model.LabelValue(fmt.Sprint(q.GetQuantile())) - lset[model.MetricNameLabel] = model.LabelValue(f.GetName()) - - samples = append(samples, &model.Sample{ - Metric: model.Metric(lset), - Value: model.SampleValue(q.GetValue()), - Timestamp: timestamp, - }) - } - - lset := make(model.LabelSet, len(m.Label)+1) - for _, p := range m.Label { - lset[model.LabelName(p.GetName())] = model.LabelValue(p.GetValue()) - } - lset[model.MetricNameLabel] = model.LabelValue(f.GetName() + "_sum") - - samples = append(samples, &model.Sample{ - Metric: model.Metric(lset), - Value: model.SampleValue(m.Summary.GetSampleSum()), - Timestamp: timestamp, - }) - - lset = make(model.LabelSet, len(m.Label)+1) - for _, p := range m.Label { - lset[model.LabelName(p.GetName())] = model.LabelValue(p.GetValue()) - } - lset[model.MetricNameLabel] = model.LabelValue(f.GetName() + "_count") - - samples = append(samples, &model.Sample{ - Metric: model.Metric(lset), - Value: model.SampleValue(m.Summary.GetSampleCount()), - Timestamp: timestamp, - }) - } - - return samples -} - -func extractHistogram(o *DecodeOptions, f *dto.MetricFamily) model.Vector { - samples := make(model.Vector, 0, len(f.Metric)) - - for _, m := range f.Metric { - if m.Histogram == nil { - continue - } - - timestamp := o.Timestamp - if m.TimestampMs != nil { - timestamp = model.TimeFromUnixNano(*m.TimestampMs * 1000000) - } - - infSeen := false - - for _, q := range m.Histogram.Bucket { - lset := make(model.LabelSet, len(m.Label)+2) - for _, p := range m.Label { - lset[model.LabelName(p.GetName())] = model.LabelValue(p.GetValue()) - } - lset[model.LabelName(model.BucketLabel)] = model.LabelValue(fmt.Sprint(q.GetUpperBound())) - lset[model.MetricNameLabel] = model.LabelValue(f.GetName() + "_bucket") - - if math.IsInf(q.GetUpperBound(), +1) { - infSeen = true - } - - samples = append(samples, &model.Sample{ - Metric: model.Metric(lset), - Value: model.SampleValue(q.GetCumulativeCount()), - Timestamp: timestamp, - }) - } - - lset := make(model.LabelSet, len(m.Label)+1) - for _, p := range m.Label { - lset[model.LabelName(p.GetName())] = model.LabelValue(p.GetValue()) - } - lset[model.MetricNameLabel] = model.LabelValue(f.GetName() + "_sum") - - samples = append(samples, &model.Sample{ - Metric: model.Metric(lset), - Value: model.SampleValue(m.Histogram.GetSampleSum()), - Timestamp: timestamp, - }) - - lset = make(model.LabelSet, len(m.Label)+1) - for _, p := range m.Label { - lset[model.LabelName(p.GetName())] = model.LabelValue(p.GetValue()) - } - lset[model.MetricNameLabel] = model.LabelValue(f.GetName() + "_count") - - count := &model.Sample{ - Metric: model.Metric(lset), - Value: model.SampleValue(m.Histogram.GetSampleCount()), - Timestamp: timestamp, - } - samples = append(samples, count) - - if !infSeen { - // Append an infinity bucket sample. - lset := make(model.LabelSet, len(m.Label)+2) - for _, p := range m.Label { - lset[model.LabelName(p.GetName())] = model.LabelValue(p.GetValue()) - } - lset[model.LabelName(model.BucketLabel)] = model.LabelValue("+Inf") - lset[model.MetricNameLabel] = model.LabelValue(f.GetName() + "_bucket") - - samples = append(samples, &model.Sample{ - Metric: model.Metric(lset), - Value: count.Value, - Timestamp: timestamp, - }) - } - } - - return samples -} diff --git a/vendor/github.com/prometheus/common/expfmt/encode.go b/vendor/github.com/prometheus/common/expfmt/encode.go deleted file mode 100644 index 7f6cbe7d2..000000000 --- a/vendor/github.com/prometheus/common/expfmt/encode.go +++ /dev/null @@ -1,197 +0,0 @@ -// Copyright 2015 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package expfmt - -import ( - "fmt" - "io" - "net/http" - - "google.golang.org/protobuf/encoding/protodelim" - "google.golang.org/protobuf/encoding/prototext" - - "github.com/prometheus/common/internal/bitbucket.org/ww/goautoneg" - "github.com/prometheus/common/model" - - dto "github.com/prometheus/client_model/go" -) - -// Encoder types encode metric families into an underlying wire protocol. -type Encoder interface { - Encode(*dto.MetricFamily) error -} - -// Closer is implemented by Encoders that need to be closed to finalize -// encoding. (For example, OpenMetrics needs a final `# EOF` line.) -// -// Note that all Encoder implementations returned from this package implement -// Closer, too, even if the Close call is a no-op. This happens in preparation -// for adding a Close method to the Encoder interface directly in a (mildly -// breaking) release in the future. -type Closer interface { - Close() error -} - -type encoderCloser struct { - encode func(*dto.MetricFamily) error - close func() error -} - -func (ec encoderCloser) Encode(v *dto.MetricFamily) error { - return ec.encode(v) -} - -func (ec encoderCloser) Close() error { - return ec.close() -} - -// Negotiate returns the Content-Type based on the given Accept header. If no -// appropriate accepted type is found, FmtText is returned (which is the -// Prometheus text format). This function will never negotiate FmtOpenMetrics, -// as the support is still experimental. To include the option to negotiate -// FmtOpenMetrics, use NegotiateOpenMetrics. -func Negotiate(h http.Header) Format { - escapingScheme := Format(fmt.Sprintf("; escaping=%s", Format(model.NameEscapingScheme.String()))) - for _, ac := range goautoneg.ParseAccept(h.Get(hdrAccept)) { - if escapeParam := ac.Params[model.EscapingKey]; escapeParam != "" { - switch Format(escapeParam) { - case model.AllowUTF8, model.EscapeUnderscores, model.EscapeDots, model.EscapeValues: - escapingScheme = Format(fmt.Sprintf("; escaping=%s", escapeParam)) - default: - // If the escaping parameter is unknown, ignore it. - } - } - ver := ac.Params["version"] - if ac.Type+"/"+ac.SubType == ProtoType && ac.Params["proto"] == ProtoProtocol { - switch ac.Params["encoding"] { - case "delimited": - return fmtProtoDelim + escapingScheme - case "text": - return fmtProtoText + escapingScheme - case "compact-text": - return fmtProtoCompact + escapingScheme - } - } - if ac.Type == "text" && ac.SubType == "plain" && (ver == TextVersion || ver == "") { - return fmtText + escapingScheme - } - } - return fmtText + escapingScheme -} - -// NegotiateIncludingOpenMetrics works like Negotiate but includes -// FmtOpenMetrics as an option for the result. Note that this function is -// temporary and will disappear once FmtOpenMetrics is fully supported and as -// such may be negotiated by the normal Negotiate function. -func NegotiateIncludingOpenMetrics(h http.Header) Format { - escapingScheme := Format(fmt.Sprintf("; escaping=%s", Format(model.NameEscapingScheme.String()))) - for _, ac := range goautoneg.ParseAccept(h.Get(hdrAccept)) { - if escapeParam := ac.Params[model.EscapingKey]; escapeParam != "" { - switch Format(escapeParam) { - case model.AllowUTF8, model.EscapeUnderscores, model.EscapeDots, model.EscapeValues: - escapingScheme = Format(fmt.Sprintf("; escaping=%s", escapeParam)) - default: - // If the escaping parameter is unknown, ignore it. - } - } - ver := ac.Params["version"] - if ac.Type+"/"+ac.SubType == ProtoType && ac.Params["proto"] == ProtoProtocol { - switch ac.Params["encoding"] { - case "delimited": - return fmtProtoDelim + escapingScheme - case "text": - return fmtProtoText + escapingScheme - case "compact-text": - return fmtProtoCompact + escapingScheme - } - } - if ac.Type == "text" && ac.SubType == "plain" && (ver == TextVersion || ver == "") { - return fmtText + escapingScheme - } - if ac.Type+"/"+ac.SubType == OpenMetricsType && (ver == OpenMetricsVersion_0_0_1 || ver == OpenMetricsVersion_1_0_0 || ver == "") { - switch ver { - case OpenMetricsVersion_1_0_0: - return fmtOpenMetrics_1_0_0 + escapingScheme - default: - return fmtOpenMetrics_0_0_1 + escapingScheme - } - } - } - return fmtText + escapingScheme -} - -// NewEncoder returns a new encoder based on content type negotiation. All -// Encoder implementations returned by NewEncoder also implement Closer, and -// callers should always call the Close method. It is currently only required -// for FmtOpenMetrics, but a future (breaking) release will add the Close method -// to the Encoder interface directly. The current version of the Encoder -// interface is kept for backwards compatibility. -// In cases where the Format does not allow for UTF-8 names, the global -// NameEscapingScheme will be applied. -// -// NewEncoder can be called with additional options to customize the OpenMetrics text output. -// For example: -// NewEncoder(w, FmtOpenMetrics_1_0_0, WithCreatedLines()) -// -// Extra options are ignored for all other formats. -func NewEncoder(w io.Writer, format Format, options ...EncoderOption) Encoder { - escapingScheme := format.ToEscapingScheme() - - switch format.FormatType() { - case TypeProtoDelim: - return encoderCloser{ - encode: func(v *dto.MetricFamily) error { - _, err := protodelim.MarshalTo(w, v) - return err - }, - close: func() error { return nil }, - } - case TypeProtoCompact: - return encoderCloser{ - encode: func(v *dto.MetricFamily) error { - _, err := fmt.Fprintln(w, model.EscapeMetricFamily(v, escapingScheme).String()) - return err - }, - close: func() error { return nil }, - } - case TypeProtoText: - return encoderCloser{ - encode: func(v *dto.MetricFamily) error { - _, err := fmt.Fprintln(w, prototext.Format(model.EscapeMetricFamily(v, escapingScheme))) - return err - }, - close: func() error { return nil }, - } - case TypeTextPlain: - return encoderCloser{ - encode: func(v *dto.MetricFamily) error { - _, err := MetricFamilyToText(w, model.EscapeMetricFamily(v, escapingScheme)) - return err - }, - close: func() error { return nil }, - } - case TypeOpenMetrics: - return encoderCloser{ - encode: func(v *dto.MetricFamily) error { - _, err := MetricFamilyToOpenMetrics(w, model.EscapeMetricFamily(v, escapingScheme), options...) - return err - }, - close: func() error { - _, err := FinalizeOpenMetrics(w) - return err - }, - } - } - panic(fmt.Errorf("expfmt.NewEncoder: unknown format %q", format)) -} diff --git a/vendor/github.com/prometheus/common/expfmt/expfmt.go b/vendor/github.com/prometheus/common/expfmt/expfmt.go deleted file mode 100644 index 6fc9555e3..000000000 --- a/vendor/github.com/prometheus/common/expfmt/expfmt.go +++ /dev/null @@ -1,167 +0,0 @@ -// Copyright 2015 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// Package expfmt contains tools for reading and writing Prometheus metrics. -package expfmt - -import ( - "strings" - - "github.com/prometheus/common/model" -) - -// Format specifies the HTTP content type of the different wire protocols. -type Format string - -// Constants to assemble the Content-Type values for the different wire -// protocols. The Content-Type strings here are all for the legacy exposition -// formats, where valid characters for metric names and label names are limited. -// Support for arbitrary UTF-8 characters in those names is already partially -// implemented in this module (see model.ValidationScheme), but to actually use -// it on the wire, new content-type strings will have to be agreed upon and -// added here. -const ( - TextVersion = "0.0.4" - ProtoType = `application/vnd.google.protobuf` - ProtoProtocol = `io.prometheus.client.MetricFamily` - protoFmt = ProtoType + "; proto=" + ProtoProtocol + ";" - OpenMetricsType = `application/openmetrics-text` - OpenMetricsVersion_0_0_1 = "0.0.1" - OpenMetricsVersion_1_0_0 = "1.0.0" - - // The Content-Type values for the different wire protocols. Note that these - // values are now unexported. If code was relying on comparisons to these - // constants, instead use FormatType(). - fmtUnknown Format = `` - fmtText Format = `text/plain; version=` + TextVersion + `; charset=utf-8` - fmtProtoDelim Format = protoFmt + ` encoding=delimited` - fmtProtoText Format = protoFmt + ` encoding=text` - fmtProtoCompact Format = protoFmt + ` encoding=compact-text` - fmtOpenMetrics_1_0_0 Format = OpenMetricsType + `; version=` + OpenMetricsVersion_1_0_0 + `; charset=utf-8` - fmtOpenMetrics_0_0_1 Format = OpenMetricsType + `; version=` + OpenMetricsVersion_0_0_1 + `; charset=utf-8` -) - -const ( - hdrContentType = "Content-Type" - hdrAccept = "Accept" -) - -// FormatType is a Go enum representing the overall category for the given -// Format. As the number of Format permutations increases, doing basic string -// comparisons are not feasible, so this enum captures the most useful -// high-level attribute of the Format string. -type FormatType int - -const ( - TypeUnknown = iota - TypeProtoCompact - TypeProtoDelim - TypeProtoText - TypeTextPlain - TypeOpenMetrics -) - -// NewFormat generates a new Format from the type provided. Mostly used for -// tests, most Formats should be generated as part of content negotiation in -// encode.go. -func NewFormat(t FormatType) Format { - switch t { - case TypeProtoCompact: - return fmtProtoCompact - case TypeProtoDelim: - return fmtProtoDelim - case TypeProtoText: - return fmtProtoText - case TypeTextPlain: - return fmtText - case TypeOpenMetrics: - return fmtOpenMetrics_1_0_0 - default: - return fmtUnknown - } -} - -// FormatType deduces an overall FormatType for the given format. -func (f Format) FormatType() FormatType { - toks := strings.Split(string(f), ";") - if len(toks) < 2 { - return TypeUnknown - } - - params := make(map[string]string) - for i, t := range toks { - if i == 0 { - continue - } - args := strings.Split(t, "=") - if len(args) != 2 { - continue - } - params[strings.TrimSpace(args[0])] = strings.TrimSpace(args[1]) - } - - switch strings.TrimSpace(toks[0]) { - case ProtoType: - if params["proto"] != ProtoProtocol { - return TypeUnknown - } - switch params["encoding"] { - case "delimited": - return TypeProtoDelim - case "text": - return TypeProtoText - case "compact-text": - return TypeProtoCompact - default: - return TypeUnknown - } - case OpenMetricsType: - if params["charset"] != "utf-8" { - return TypeUnknown - } - return TypeOpenMetrics - case "text/plain": - v, ok := params["version"] - if !ok { - return TypeTextPlain - } - if v == TextVersion { - return TypeTextPlain - } - return TypeUnknown - default: - return TypeUnknown - } -} - -// ToEscapingScheme returns an EscapingScheme depending on the Format. Iff the -// Format contains a escaping=allow-utf-8 term, it will select NoEscaping. If a valid -// "escaping" term exists, that will be used. Otherwise, the global default will -// be returned. -func (format Format) ToEscapingScheme() model.EscapingScheme { - for _, p := range strings.Split(string(format), ";") { - toks := strings.Split(p, "=") - if len(toks) != 2 { - continue - } - key, value := strings.TrimSpace(toks[0]), strings.TrimSpace(toks[1]) - if key == model.EscapingKey { - scheme, err := model.ToEscapingScheme(value) - if err != nil { - return model.NameEscapingScheme - } - return scheme - } - } - return model.NameEscapingScheme -} diff --git a/vendor/github.com/prometheus/common/expfmt/fuzz.go b/vendor/github.com/prometheus/common/expfmt/fuzz.go deleted file mode 100644 index dfac962a4..000000000 --- a/vendor/github.com/prometheus/common/expfmt/fuzz.go +++ /dev/null @@ -1,37 +0,0 @@ -// Copyright 2014 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// Build only when actually fuzzing -//go:build gofuzz -// +build gofuzz - -package expfmt - -import "bytes" - -// Fuzz text metric parser with with github.com/dvyukov/go-fuzz: -// -// go-fuzz-build github.com/prometheus/common/expfmt -// go-fuzz -bin expfmt-fuzz.zip -workdir fuzz -// -// Further input samples should go in the folder fuzz/corpus. -func Fuzz(in []byte) int { - parser := TextParser{} - _, err := parser.TextToMetricFamilies(bytes.NewReader(in)) - - if err != nil { - return 0 - } - - return 1 -} diff --git a/vendor/github.com/prometheus/common/expfmt/openmetrics_create.go b/vendor/github.com/prometheus/common/expfmt/openmetrics_create.go deleted file mode 100644 index 432843da3..000000000 --- a/vendor/github.com/prometheus/common/expfmt/openmetrics_create.go +++ /dev/null @@ -1,696 +0,0 @@ -// Copyright 2020 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package expfmt - -import ( - "bufio" - "bytes" - "fmt" - "io" - "math" - "strconv" - "strings" - - "google.golang.org/protobuf/types/known/timestamppb" - - "github.com/prometheus/common/model" - - dto "github.com/prometheus/client_model/go" -) - -type encoderOption struct { - withCreatedLines bool - withUnit bool -} - -type EncoderOption func(*encoderOption) - -// WithCreatedLines is an EncoderOption that configures the OpenMetrics encoder -// to include _created lines (See -// https://github.com/OpenObservability/OpenMetrics/blob/main/specification/OpenMetrics.md#counter-1). -// Created timestamps can improve the accuracy of series reset detection, but -// come with a bandwidth cost. -// -// At the time of writing, created timestamp ingestion is still experimental in -// Prometheus and need to be enabled with the feature-flag -// `--feature-flag=created-timestamp-zero-ingestion`, and breaking changes are -// still possible. Therefore, it is recommended to use this feature with caution. -func WithCreatedLines() EncoderOption { - return func(t *encoderOption) { - t.withCreatedLines = true - } -} - -// WithUnit is an EncoderOption enabling a set unit to be written to the output -// and to be added to the metric name, if it's not there already, as a suffix. -// Without opting in this way, the unit will not be added to the metric name and, -// on top of that, the unit will not be passed onto the output, even if it -// were declared in the *dto.MetricFamily struct, i.e. even if in.Unit !=nil. -func WithUnit() EncoderOption { - return func(t *encoderOption) { - t.withUnit = true - } -} - -// MetricFamilyToOpenMetrics converts a MetricFamily proto message into the -// OpenMetrics text format and writes the resulting lines to 'out'. It returns -// the number of bytes written and any error encountered. The output will have -// the same order as the input, no further sorting is performed. Furthermore, -// this function assumes the input is already sanitized and does not perform any -// sanity checks. If the input contains duplicate metrics or invalid metric or -// label names, the conversion will result in invalid text format output. -// -// If metric names conform to the legacy validation pattern, they will be placed -// outside the brackets in the traditional way, like `foo{}`. If the metric name -// fails the legacy validation check, it will be placed quoted inside the -// brackets: `{"foo"}`. As stated above, the input is assumed to be santized and -// no error will be thrown in this case. -// -// Similar to metric names, if label names conform to the legacy validation -// pattern, they will be unquoted as normal, like `foo{bar="baz"}`. If the label -// name fails the legacy validation check, it will be quoted: -// `foo{"bar"="baz"}`. As stated above, the input is assumed to be santized and -// no error will be thrown in this case. -// -// This function fulfills the type 'expfmt.encoder'. -// -// Note that OpenMetrics requires a final `# EOF` line. Since this function acts -// on individual metric families, it is the responsibility of the caller to -// append this line to 'out' once all metric families have been written. -// Conveniently, this can be done by calling FinalizeOpenMetrics. -// -// The output should be fully OpenMetrics compliant. However, there are a few -// missing features and peculiarities to avoid complications when switching from -// Prometheus to OpenMetrics or vice versa: -// -// - Counters are expected to have the `_total` suffix in their metric name. In -// the output, the suffix will be truncated from the `# TYPE`, `# HELP` and `# UNIT` -// lines. A counter with a missing `_total` suffix is not an error. However, -// its type will be set to `unknown` in that case to avoid invalid OpenMetrics -// output. -// -// - According to the OM specs, the `# UNIT` line is optional, but if populated, -// the unit has to be present in the metric name as its suffix: -// (see https://github.com/OpenObservability/OpenMetrics/blob/main/specification/OpenMetrics.md#unit). -// However, in order to accommodate any potential scenario where such a change in the -// metric name is not desirable, the users are here given the choice of either explicitly -// opt in, in case they wish for the unit to be included in the output AND in the metric name -// as a suffix (see the description of the WithUnit function above), -// or not to opt in, in case they don't want for any of that to happen. -// -// - No support for the following (optional) features: info type, -// stateset type, gaugehistogram type. -// -// - The size of exemplar labels is not checked (i.e. it's possible to create -// exemplars that are larger than allowed by the OpenMetrics specification). -// -// - The value of Counters is not checked. (OpenMetrics doesn't allow counters -// with a `NaN` value.) -func MetricFamilyToOpenMetrics(out io.Writer, in *dto.MetricFamily, options ...EncoderOption) (written int, err error) { - toOM := encoderOption{} - for _, option := range options { - option(&toOM) - } - - name := in.GetName() - if name == "" { - return 0, fmt.Errorf("MetricFamily has no name: %s", in) - } - - // Try the interface upgrade. If it doesn't work, we'll use a - // bufio.Writer from the sync.Pool. - w, ok := out.(enhancedWriter) - if !ok { - b := bufPool.Get().(*bufio.Writer) - b.Reset(out) - w = b - defer func() { - bErr := b.Flush() - if err == nil { - err = bErr - } - bufPool.Put(b) - }() - } - - var ( - n int - metricType = in.GetType() - compliantName = name - ) - if metricType == dto.MetricType_COUNTER && strings.HasSuffix(compliantName, "_total") { - compliantName = name[:len(name)-6] - } - if toOM.withUnit && in.Unit != nil && !strings.HasSuffix(compliantName, fmt.Sprintf("_%s", *in.Unit)) { - compliantName = compliantName + fmt.Sprintf("_%s", *in.Unit) - } - - // Comments, first HELP, then TYPE. - if in.Help != nil { - n, err = w.WriteString("# HELP ") - written += n - if err != nil { - return - } - n, err = writeName(w, compliantName) - written += n - if err != nil { - return - } - err = w.WriteByte(' ') - written++ - if err != nil { - return - } - n, err = writeEscapedString(w, *in.Help, true) - written += n - if err != nil { - return - } - err = w.WriteByte('\n') - written++ - if err != nil { - return - } - } - n, err = w.WriteString("# TYPE ") - written += n - if err != nil { - return - } - n, err = writeName(w, compliantName) - written += n - if err != nil { - return - } - switch metricType { - case dto.MetricType_COUNTER: - if strings.HasSuffix(name, "_total") { - n, err = w.WriteString(" counter\n") - } else { - n, err = w.WriteString(" unknown\n") - } - case dto.MetricType_GAUGE: - n, err = w.WriteString(" gauge\n") - case dto.MetricType_SUMMARY: - n, err = w.WriteString(" summary\n") - case dto.MetricType_UNTYPED: - n, err = w.WriteString(" unknown\n") - case dto.MetricType_HISTOGRAM: - n, err = w.WriteString(" histogram\n") - default: - return written, fmt.Errorf("unknown metric type %s", metricType.String()) - } - written += n - if err != nil { - return - } - if toOM.withUnit && in.Unit != nil { - n, err = w.WriteString("# UNIT ") - written += n - if err != nil { - return - } - n, err = writeName(w, compliantName) - written += n - if err != nil { - return - } - - err = w.WriteByte(' ') - written++ - if err != nil { - return - } - n, err = writeEscapedString(w, *in.Unit, true) - written += n - if err != nil { - return - } - err = w.WriteByte('\n') - written++ - if err != nil { - return - } - } - - var createdTsBytesWritten int - - // Finally the samples, one line for each. - for _, metric := range in.Metric { - switch metricType { - case dto.MetricType_COUNTER: - if strings.HasSuffix(name, "_total") { - compliantName = compliantName + "_total" - } - if metric.Counter == nil { - return written, fmt.Errorf( - "expected counter in metric %s %s", compliantName, metric, - ) - } - n, err = writeOpenMetricsSample( - w, compliantName, "", metric, "", 0, - metric.Counter.GetValue(), 0, false, - metric.Counter.Exemplar, - ) - if toOM.withCreatedLines && metric.Counter.CreatedTimestamp != nil { - createdTsBytesWritten, err = writeOpenMetricsCreated(w, compliantName, "_total", metric, "", 0, metric.Counter.GetCreatedTimestamp()) - n += createdTsBytesWritten - } - case dto.MetricType_GAUGE: - if metric.Gauge == nil { - return written, fmt.Errorf( - "expected gauge in metric %s %s", compliantName, metric, - ) - } - n, err = writeOpenMetricsSample( - w, compliantName, "", metric, "", 0, - metric.Gauge.GetValue(), 0, false, - nil, - ) - case dto.MetricType_UNTYPED: - if metric.Untyped == nil { - return written, fmt.Errorf( - "expected untyped in metric %s %s", compliantName, metric, - ) - } - n, err = writeOpenMetricsSample( - w, compliantName, "", metric, "", 0, - metric.Untyped.GetValue(), 0, false, - nil, - ) - case dto.MetricType_SUMMARY: - if metric.Summary == nil { - return written, fmt.Errorf( - "expected summary in metric %s %s", compliantName, metric, - ) - } - for _, q := range metric.Summary.Quantile { - n, err = writeOpenMetricsSample( - w, compliantName, "", metric, - model.QuantileLabel, q.GetQuantile(), - q.GetValue(), 0, false, - nil, - ) - written += n - if err != nil { - return - } - } - n, err = writeOpenMetricsSample( - w, compliantName, "_sum", metric, "", 0, - metric.Summary.GetSampleSum(), 0, false, - nil, - ) - written += n - if err != nil { - return - } - n, err = writeOpenMetricsSample( - w, compliantName, "_count", metric, "", 0, - 0, metric.Summary.GetSampleCount(), true, - nil, - ) - if toOM.withCreatedLines && metric.Summary.CreatedTimestamp != nil { - createdTsBytesWritten, err = writeOpenMetricsCreated(w, compliantName, "", metric, "", 0, metric.Summary.GetCreatedTimestamp()) - n += createdTsBytesWritten - } - case dto.MetricType_HISTOGRAM: - if metric.Histogram == nil { - return written, fmt.Errorf( - "expected histogram in metric %s %s", compliantName, metric, - ) - } - infSeen := false - for _, b := range metric.Histogram.Bucket { - n, err = writeOpenMetricsSample( - w, compliantName, "_bucket", metric, - model.BucketLabel, b.GetUpperBound(), - 0, b.GetCumulativeCount(), true, - b.Exemplar, - ) - written += n - if err != nil { - return - } - if math.IsInf(b.GetUpperBound(), +1) { - infSeen = true - } - } - if !infSeen { - n, err = writeOpenMetricsSample( - w, compliantName, "_bucket", metric, - model.BucketLabel, math.Inf(+1), - 0, metric.Histogram.GetSampleCount(), true, - nil, - ) - written += n - if err != nil { - return - } - } - n, err = writeOpenMetricsSample( - w, compliantName, "_sum", metric, "", 0, - metric.Histogram.GetSampleSum(), 0, false, - nil, - ) - written += n - if err != nil { - return - } - n, err = writeOpenMetricsSample( - w, compliantName, "_count", metric, "", 0, - 0, metric.Histogram.GetSampleCount(), true, - nil, - ) - if toOM.withCreatedLines && metric.Histogram.CreatedTimestamp != nil { - createdTsBytesWritten, err = writeOpenMetricsCreated(w, compliantName, "", metric, "", 0, metric.Histogram.GetCreatedTimestamp()) - n += createdTsBytesWritten - } - default: - return written, fmt.Errorf( - "unexpected type in metric %s %s", compliantName, metric, - ) - } - written += n - if err != nil { - return - } - } - return -} - -// FinalizeOpenMetrics writes the final `# EOF\n` line required by OpenMetrics. -func FinalizeOpenMetrics(w io.Writer) (written int, err error) { - return w.Write([]byte("# EOF\n")) -} - -// writeOpenMetricsSample writes a single sample in OpenMetrics text format to -// w, given the metric name, the metric proto message itself, optionally an -// additional label name with a float64 value (use empty string as label name if -// not required), the value (optionally as float64 or uint64, determined by -// useIntValue), and optionally an exemplar (use nil if not required). The -// function returns the number of bytes written and any error encountered. -func writeOpenMetricsSample( - w enhancedWriter, - name, suffix string, - metric *dto.Metric, - additionalLabelName string, additionalLabelValue float64, - floatValue float64, intValue uint64, useIntValue bool, - exemplar *dto.Exemplar, -) (int, error) { - written := 0 - n, err := writeOpenMetricsNameAndLabelPairs( - w, name+suffix, metric.Label, additionalLabelName, additionalLabelValue, - ) - written += n - if err != nil { - return written, err - } - err = w.WriteByte(' ') - written++ - if err != nil { - return written, err - } - if useIntValue { - n, err = writeUint(w, intValue) - } else { - n, err = writeOpenMetricsFloat(w, floatValue) - } - written += n - if err != nil { - return written, err - } - if metric.TimestampMs != nil { - err = w.WriteByte(' ') - written++ - if err != nil { - return written, err - } - // TODO(beorn7): Format this directly without converting to a float first. - n, err = writeOpenMetricsFloat(w, float64(*metric.TimestampMs)/1000) - written += n - if err != nil { - return written, err - } - } - if exemplar != nil && len(exemplar.Label) > 0 { - n, err = writeExemplar(w, exemplar) - written += n - if err != nil { - return written, err - } - } - err = w.WriteByte('\n') - written++ - if err != nil { - return written, err - } - return written, nil -} - -// writeOpenMetricsNameAndLabelPairs works like writeOpenMetricsSample but -// formats the float in OpenMetrics style. -func writeOpenMetricsNameAndLabelPairs( - w enhancedWriter, - name string, - in []*dto.LabelPair, - additionalLabelName string, additionalLabelValue float64, -) (int, error) { - var ( - written int - separator byte = '{' - metricInsideBraces = false - ) - - if name != "" { - // If the name does not pass the legacy validity check, we must put the - // metric name inside the braces, quoted. - if !model.IsValidLegacyMetricName(model.LabelValue(name)) { - metricInsideBraces = true - err := w.WriteByte(separator) - written++ - if err != nil { - return written, err - } - separator = ',' - } - - n, err := writeName(w, name) - written += n - if err != nil { - return written, err - } - } - - if len(in) == 0 && additionalLabelName == "" { - if metricInsideBraces { - err := w.WriteByte('}') - written++ - if err != nil { - return written, err - } - } - return written, nil - } - - for _, lp := range in { - err := w.WriteByte(separator) - written++ - if err != nil { - return written, err - } - n, err := writeName(w, lp.GetName()) - written += n - if err != nil { - return written, err - } - n, err = w.WriteString(`="`) - written += n - if err != nil { - return written, err - } - n, err = writeEscapedString(w, lp.GetValue(), true) - written += n - if err != nil { - return written, err - } - err = w.WriteByte('"') - written++ - if err != nil { - return written, err - } - separator = ',' - } - if additionalLabelName != "" { - err := w.WriteByte(separator) - written++ - if err != nil { - return written, err - } - n, err := w.WriteString(additionalLabelName) - written += n - if err != nil { - return written, err - } - n, err = w.WriteString(`="`) - written += n - if err != nil { - return written, err - } - n, err = writeOpenMetricsFloat(w, additionalLabelValue) - written += n - if err != nil { - return written, err - } - err = w.WriteByte('"') - written++ - if err != nil { - return written, err - } - } - err := w.WriteByte('}') - written++ - if err != nil { - return written, err - } - return written, nil -} - -// writeOpenMetricsCreated writes the created timestamp for a single time series -// following OpenMetrics text format to w, given the metric name, the metric proto -// message itself, optionally a suffix to be removed, e.g. '_total' for counters, -// an additional label name with a float64 value (use empty string as label name if -// not required) and the timestamp that represents the created timestamp. -// The function returns the number of bytes written and any error encountered. -func writeOpenMetricsCreated(w enhancedWriter, - name, suffixToTrim string, metric *dto.Metric, - additionalLabelName string, additionalLabelValue float64, - createdTimestamp *timestamppb.Timestamp, -) (int, error) { - written := 0 - n, err := writeOpenMetricsNameAndLabelPairs( - w, strings.TrimSuffix(name, suffixToTrim)+"_created", metric.Label, additionalLabelName, additionalLabelValue, - ) - written += n - if err != nil { - return written, err - } - - err = w.WriteByte(' ') - written++ - if err != nil { - return written, err - } - - // TODO(beorn7): Format this directly from components of ts to - // avoid overflow/underflow and precision issues of the float - // conversion. - n, err = writeOpenMetricsFloat(w, float64(createdTimestamp.AsTime().UnixNano())/1e9) - written += n - if err != nil { - return written, err - } - - err = w.WriteByte('\n') - written++ - if err != nil { - return written, err - } - return written, nil -} - -// writeExemplar writes the provided exemplar in OpenMetrics format to w. The -// function returns the number of bytes written and any error encountered. -func writeExemplar(w enhancedWriter, e *dto.Exemplar) (int, error) { - written := 0 - n, err := w.WriteString(" # ") - written += n - if err != nil { - return written, err - } - n, err = writeOpenMetricsNameAndLabelPairs(w, "", e.Label, "", 0) - written += n - if err != nil { - return written, err - } - err = w.WriteByte(' ') - written++ - if err != nil { - return written, err - } - n, err = writeOpenMetricsFloat(w, e.GetValue()) - written += n - if err != nil { - return written, err - } - if e.Timestamp != nil { - err = w.WriteByte(' ') - written++ - if err != nil { - return written, err - } - err = (*e).Timestamp.CheckValid() - if err != nil { - return written, err - } - ts := (*e).Timestamp.AsTime() - // TODO(beorn7): Format this directly from components of ts to - // avoid overflow/underflow and precision issues of the float - // conversion. - n, err = writeOpenMetricsFloat(w, float64(ts.UnixNano())/1e9) - written += n - if err != nil { - return written, err - } - } - return written, nil -} - -// writeOpenMetricsFloat works like writeFloat but appends ".0" if the resulting -// number would otherwise contain neither a "." nor an "e". -func writeOpenMetricsFloat(w enhancedWriter, f float64) (int, error) { - switch { - case f == 1: - return w.WriteString("1.0") - case f == 0: - return w.WriteString("0.0") - case f == -1: - return w.WriteString("-1.0") - case math.IsNaN(f): - return w.WriteString("NaN") - case math.IsInf(f, +1): - return w.WriteString("+Inf") - case math.IsInf(f, -1): - return w.WriteString("-Inf") - default: - bp := numBufPool.Get().(*[]byte) - *bp = strconv.AppendFloat((*bp)[:0], f, 'g', -1, 64) - if !bytes.ContainsAny(*bp, "e.") { - *bp = append(*bp, '.', '0') - } - written, err := w.Write(*bp) - numBufPool.Put(bp) - return written, err - } -} - -// writeUint is like writeInt just for uint64. -func writeUint(w enhancedWriter, u uint64) (int, error) { - bp := numBufPool.Get().(*[]byte) - *bp = strconv.AppendUint((*bp)[:0], u, 10) - written, err := w.Write(*bp) - numBufPool.Put(bp) - return written, err -} diff --git a/vendor/github.com/prometheus/common/expfmt/text_create.go b/vendor/github.com/prometheus/common/expfmt/text_create.go deleted file mode 100644 index f9b8265a9..000000000 --- a/vendor/github.com/prometheus/common/expfmt/text_create.go +++ /dev/null @@ -1,520 +0,0 @@ -// Copyright 2014 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package expfmt - -import ( - "bufio" - "fmt" - "io" - "math" - "strconv" - "strings" - "sync" - - "github.com/prometheus/common/model" - - dto "github.com/prometheus/client_model/go" -) - -// enhancedWriter has all the enhanced write functions needed here. bufio.Writer -// implements it. -type enhancedWriter interface { - io.Writer - WriteRune(r rune) (n int, err error) - WriteString(s string) (n int, err error) - WriteByte(c byte) error -} - -const ( - initialNumBufSize = 24 -) - -var ( - bufPool = sync.Pool{ - New: func() interface{} { - return bufio.NewWriter(io.Discard) - }, - } - numBufPool = sync.Pool{ - New: func() interface{} { - b := make([]byte, 0, initialNumBufSize) - return &b - }, - } -) - -// MetricFamilyToText converts a MetricFamily proto message into text format and -// writes the resulting lines to 'out'. It returns the number of bytes written -// and any error encountered. The output will have the same order as the input, -// no further sorting is performed. Furthermore, this function assumes the input -// is already sanitized and does not perform any sanity checks. If the input -// contains duplicate metrics or invalid metric or label names, the conversion -// will result in invalid text format output. -// -// If metric names conform to the legacy validation pattern, they will be placed -// outside the brackets in the traditional way, like `foo{}`. If the metric name -// fails the legacy validation check, it will be placed quoted inside the -// brackets: `{"foo"}`. As stated above, the input is assumed to be santized and -// no error will be thrown in this case. -// -// Similar to metric names, if label names conform to the legacy validation -// pattern, they will be unquoted as normal, like `foo{bar="baz"}`. If the label -// name fails the legacy validation check, it will be quoted: -// `foo{"bar"="baz"}`. As stated above, the input is assumed to be santized and -// no error will be thrown in this case. -// -// This method fulfills the type 'prometheus.encoder'. -func MetricFamilyToText(out io.Writer, in *dto.MetricFamily) (written int, err error) { - // Fail-fast checks. - if len(in.Metric) == 0 { - return 0, fmt.Errorf("MetricFamily has no metrics: %s", in) - } - name := in.GetName() - if name == "" { - return 0, fmt.Errorf("MetricFamily has no name: %s", in) - } - - // Try the interface upgrade. If it doesn't work, we'll use a - // bufio.Writer from the sync.Pool. - w, ok := out.(enhancedWriter) - if !ok { - b := bufPool.Get().(*bufio.Writer) - b.Reset(out) - w = b - defer func() { - bErr := b.Flush() - if err == nil { - err = bErr - } - bufPool.Put(b) - }() - } - - var n int - - // Comments, first HELP, then TYPE. - if in.Help != nil { - n, err = w.WriteString("# HELP ") - written += n - if err != nil { - return - } - n, err = writeName(w, name) - written += n - if err != nil { - return - } - err = w.WriteByte(' ') - written++ - if err != nil { - return - } - n, err = writeEscapedString(w, *in.Help, false) - written += n - if err != nil { - return - } - err = w.WriteByte('\n') - written++ - if err != nil { - return - } - } - n, err = w.WriteString("# TYPE ") - written += n - if err != nil { - return - } - n, err = writeName(w, name) - written += n - if err != nil { - return - } - metricType := in.GetType() - switch metricType { - case dto.MetricType_COUNTER: - n, err = w.WriteString(" counter\n") - case dto.MetricType_GAUGE: - n, err = w.WriteString(" gauge\n") - case dto.MetricType_SUMMARY: - n, err = w.WriteString(" summary\n") - case dto.MetricType_UNTYPED: - n, err = w.WriteString(" untyped\n") - case dto.MetricType_HISTOGRAM: - n, err = w.WriteString(" histogram\n") - default: - return written, fmt.Errorf("unknown metric type %s", metricType.String()) - } - written += n - if err != nil { - return - } - - // Finally the samples, one line for each. - for _, metric := range in.Metric { - switch metricType { - case dto.MetricType_COUNTER: - if metric.Counter == nil { - return written, fmt.Errorf( - "expected counter in metric %s %s", name, metric, - ) - } - n, err = writeSample( - w, name, "", metric, "", 0, - metric.Counter.GetValue(), - ) - case dto.MetricType_GAUGE: - if metric.Gauge == nil { - return written, fmt.Errorf( - "expected gauge in metric %s %s", name, metric, - ) - } - n, err = writeSample( - w, name, "", metric, "", 0, - metric.Gauge.GetValue(), - ) - case dto.MetricType_UNTYPED: - if metric.Untyped == nil { - return written, fmt.Errorf( - "expected untyped in metric %s %s", name, metric, - ) - } - n, err = writeSample( - w, name, "", metric, "", 0, - metric.Untyped.GetValue(), - ) - case dto.MetricType_SUMMARY: - if metric.Summary == nil { - return written, fmt.Errorf( - "expected summary in metric %s %s", name, metric, - ) - } - for _, q := range metric.Summary.Quantile { - n, err = writeSample( - w, name, "", metric, - model.QuantileLabel, q.GetQuantile(), - q.GetValue(), - ) - written += n - if err != nil { - return - } - } - n, err = writeSample( - w, name, "_sum", metric, "", 0, - metric.Summary.GetSampleSum(), - ) - written += n - if err != nil { - return - } - n, err = writeSample( - w, name, "_count", metric, "", 0, - float64(metric.Summary.GetSampleCount()), - ) - case dto.MetricType_HISTOGRAM: - if metric.Histogram == nil { - return written, fmt.Errorf( - "expected histogram in metric %s %s", name, metric, - ) - } - infSeen := false - for _, b := range metric.Histogram.Bucket { - n, err = writeSample( - w, name, "_bucket", metric, - model.BucketLabel, b.GetUpperBound(), - float64(b.GetCumulativeCount()), - ) - written += n - if err != nil { - return - } - if math.IsInf(b.GetUpperBound(), +1) { - infSeen = true - } - } - if !infSeen { - n, err = writeSample( - w, name, "_bucket", metric, - model.BucketLabel, math.Inf(+1), - float64(metric.Histogram.GetSampleCount()), - ) - written += n - if err != nil { - return - } - } - n, err = writeSample( - w, name, "_sum", metric, "", 0, - metric.Histogram.GetSampleSum(), - ) - written += n - if err != nil { - return - } - n, err = writeSample( - w, name, "_count", metric, "", 0, - float64(metric.Histogram.GetSampleCount()), - ) - default: - return written, fmt.Errorf( - "unexpected type in metric %s %s", name, metric, - ) - } - written += n - if err != nil { - return - } - } - return -} - -// writeSample writes a single sample in text format to w, given the metric -// name, the metric proto message itself, optionally an additional label name -// with a float64 value (use empty string as label name if not required), and -// the value. The function returns the number of bytes written and any error -// encountered. -func writeSample( - w enhancedWriter, - name, suffix string, - metric *dto.Metric, - additionalLabelName string, additionalLabelValue float64, - value float64, -) (int, error) { - written := 0 - n, err := writeNameAndLabelPairs( - w, name+suffix, metric.Label, additionalLabelName, additionalLabelValue, - ) - written += n - if err != nil { - return written, err - } - err = w.WriteByte(' ') - written++ - if err != nil { - return written, err - } - n, err = writeFloat(w, value) - written += n - if err != nil { - return written, err - } - if metric.TimestampMs != nil { - err = w.WriteByte(' ') - written++ - if err != nil { - return written, err - } - n, err = writeInt(w, *metric.TimestampMs) - written += n - if err != nil { - return written, err - } - } - err = w.WriteByte('\n') - written++ - if err != nil { - return written, err - } - return written, nil -} - -// writeNameAndLabelPairs converts a slice of LabelPair proto messages plus the -// explicitly given metric name and additional label pair into text formatted as -// required by the text format and writes it to 'w'. An empty slice in -// combination with an empty string 'additionalLabelName' results in nothing -// being written. Otherwise, the label pairs are written, escaped as required by -// the text format, and enclosed in '{...}'. The function returns the number of -// bytes written and any error encountered. If the metric name is not -// legacy-valid, it will be put inside the brackets as well. Legacy-invalid -// label names will also be quoted. -func writeNameAndLabelPairs( - w enhancedWriter, - name string, - in []*dto.LabelPair, - additionalLabelName string, additionalLabelValue float64, -) (int, error) { - var ( - written int - separator byte = '{' - metricInsideBraces = false - ) - - if name != "" { - // If the name does not pass the legacy validity check, we must put the - // metric name inside the braces. - if !model.IsValidLegacyMetricName(model.LabelValue(name)) { - metricInsideBraces = true - err := w.WriteByte(separator) - written++ - if err != nil { - return written, err - } - separator = ',' - } - n, err := writeName(w, name) - written += n - if err != nil { - return written, err - } - } - - if len(in) == 0 && additionalLabelName == "" { - if metricInsideBraces { - err := w.WriteByte('}') - written++ - if err != nil { - return written, err - } - } - return written, nil - } - - for _, lp := range in { - err := w.WriteByte(separator) - written++ - if err != nil { - return written, err - } - n, err := writeName(w, lp.GetName()) - written += n - if err != nil { - return written, err - } - n, err = w.WriteString(`="`) - written += n - if err != nil { - return written, err - } - n, err = writeEscapedString(w, lp.GetValue(), true) - written += n - if err != nil { - return written, err - } - err = w.WriteByte('"') - written++ - if err != nil { - return written, err - } - separator = ',' - } - if additionalLabelName != "" { - err := w.WriteByte(separator) - written++ - if err != nil { - return written, err - } - n, err := w.WriteString(additionalLabelName) - written += n - if err != nil { - return written, err - } - n, err = w.WriteString(`="`) - written += n - if err != nil { - return written, err - } - n, err = writeFloat(w, additionalLabelValue) - written += n - if err != nil { - return written, err - } - err = w.WriteByte('"') - written++ - if err != nil { - return written, err - } - } - err := w.WriteByte('}') - written++ - if err != nil { - return written, err - } - return written, nil -} - -// writeEscapedString replaces '\' by '\\', new line character by '\n', and - if -// includeDoubleQuote is true - '"' by '\"'. -var ( - escaper = strings.NewReplacer("\\", `\\`, "\n", `\n`) - quotedEscaper = strings.NewReplacer("\\", `\\`, "\n", `\n`, "\"", `\"`) -) - -func writeEscapedString(w enhancedWriter, v string, includeDoubleQuote bool) (int, error) { - if includeDoubleQuote { - return quotedEscaper.WriteString(w, v) - } - return escaper.WriteString(w, v) -} - -// writeFloat is equivalent to fmt.Fprint with a float64 argument but hardcodes -// a few common cases for increased efficiency. For non-hardcoded cases, it uses -// strconv.AppendFloat to avoid allocations, similar to writeInt. -func writeFloat(w enhancedWriter, f float64) (int, error) { - switch { - case f == 1: - return 1, w.WriteByte('1') - case f == 0: - return 1, w.WriteByte('0') - case f == -1: - return w.WriteString("-1") - case math.IsNaN(f): - return w.WriteString("NaN") - case math.IsInf(f, +1): - return w.WriteString("+Inf") - case math.IsInf(f, -1): - return w.WriteString("-Inf") - default: - bp := numBufPool.Get().(*[]byte) - *bp = strconv.AppendFloat((*bp)[:0], f, 'g', -1, 64) - written, err := w.Write(*bp) - numBufPool.Put(bp) - return written, err - } -} - -// writeInt is equivalent to fmt.Fprint with an int64 argument but uses -// strconv.AppendInt with a byte slice taken from a sync.Pool to avoid -// allocations. -func writeInt(w enhancedWriter, i int64) (int, error) { - bp := numBufPool.Get().(*[]byte) - *bp = strconv.AppendInt((*bp)[:0], i, 10) - written, err := w.Write(*bp) - numBufPool.Put(bp) - return written, err -} - -// writeName writes a string as-is if it complies with the legacy naming -// scheme, or escapes it in double quotes if not. -func writeName(w enhancedWriter, name string) (int, error) { - if model.IsValidLegacyMetricName(model.LabelValue(name)) { - return w.WriteString(name) - } - var written int - var err error - err = w.WriteByte('"') - written++ - if err != nil { - return written, err - } - var n int - n, err = writeEscapedString(w, name, true) - written += n - if err != nil { - return written, err - } - err = w.WriteByte('"') - written++ - return written, err -} diff --git a/vendor/github.com/prometheus/common/expfmt/text_parse.go b/vendor/github.com/prometheus/common/expfmt/text_parse.go deleted file mode 100644 index 26490211a..000000000 --- a/vendor/github.com/prometheus/common/expfmt/text_parse.go +++ /dev/null @@ -1,781 +0,0 @@ -// Copyright 2014 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package expfmt - -import ( - "bufio" - "bytes" - "errors" - "fmt" - "io" - "math" - "strconv" - "strings" - - dto "github.com/prometheus/client_model/go" - - "google.golang.org/protobuf/proto" - - "github.com/prometheus/common/model" -) - -// A stateFn is a function that represents a state in a state machine. By -// executing it, the state is progressed to the next state. The stateFn returns -// another stateFn, which represents the new state. The end state is represented -// by nil. -type stateFn func() stateFn - -// ParseError signals errors while parsing the simple and flat text-based -// exchange format. -type ParseError struct { - Line int - Msg string -} - -// Error implements the error interface. -func (e ParseError) Error() string { - return fmt.Sprintf("text format parsing error in line %d: %s", e.Line, e.Msg) -} - -// TextParser is used to parse the simple and flat text-based exchange format. Its -// zero value is ready to use. -type TextParser struct { - metricFamiliesByName map[string]*dto.MetricFamily - buf *bufio.Reader // Where the parsed input is read through. - err error // Most recent error. - lineCount int // Tracks the line count for error messages. - currentByte byte // The most recent byte read. - currentToken bytes.Buffer // Re-used each time a token has to be gathered from multiple bytes. - currentMF *dto.MetricFamily - currentMetric *dto.Metric - currentLabelPair *dto.LabelPair - - // The remaining member variables are only used for summaries/histograms. - currentLabels map[string]string // All labels including '__name__' but excluding 'quantile'/'le' - // Summary specific. - summaries map[uint64]*dto.Metric // Key is created with LabelsToSignature. - currentQuantile float64 - // Histogram specific. - histograms map[uint64]*dto.Metric // Key is created with LabelsToSignature. - currentBucket float64 - // These tell us if the currently processed line ends on '_count' or - // '_sum' respectively and belong to a summary/histogram, representing the sample - // count and sum of that summary/histogram. - currentIsSummaryCount, currentIsSummarySum bool - currentIsHistogramCount, currentIsHistogramSum bool -} - -// TextToMetricFamilies reads 'in' as the simple and flat text-based exchange -// format and creates MetricFamily proto messages. It returns the MetricFamily -// proto messages in a map where the metric names are the keys, along with any -// error encountered. -// -// If the input contains duplicate metrics (i.e. lines with the same metric name -// and exactly the same label set), the resulting MetricFamily will contain -// duplicate Metric proto messages. Similar is true for duplicate label -// names. Checks for duplicates have to be performed separately, if required. -// Also note that neither the metrics within each MetricFamily are sorted nor -// the label pairs within each Metric. Sorting is not required for the most -// frequent use of this method, which is sample ingestion in the Prometheus -// server. However, for presentation purposes, you might want to sort the -// metrics, and in some cases, you must sort the labels, e.g. for consumption by -// the metric family injection hook of the Prometheus registry. -// -// Summaries and histograms are rather special beasts. You would probably not -// use them in the simple text format anyway. This method can deal with -// summaries and histograms if they are presented in exactly the way the -// text.Create function creates them. -// -// This method must not be called concurrently. If you want to parse different -// input concurrently, instantiate a separate Parser for each goroutine. -func (p *TextParser) TextToMetricFamilies(in io.Reader) (map[string]*dto.MetricFamily, error) { - p.reset(in) - for nextState := p.startOfLine; nextState != nil; nextState = nextState() { - // Magic happens here... - } - // Get rid of empty metric families. - for k, mf := range p.metricFamiliesByName { - if len(mf.GetMetric()) == 0 { - delete(p.metricFamiliesByName, k) - } - } - // If p.err is io.EOF now, we have run into a premature end of the input - // stream. Turn this error into something nicer and more - // meaningful. (io.EOF is often used as a signal for the legitimate end - // of an input stream.) - if p.err != nil && errors.Is(p.err, io.EOF) { - p.parseError("unexpected end of input stream") - } - return p.metricFamiliesByName, p.err -} - -func (p *TextParser) reset(in io.Reader) { - p.metricFamiliesByName = map[string]*dto.MetricFamily{} - if p.buf == nil { - p.buf = bufio.NewReader(in) - } else { - p.buf.Reset(in) - } - p.err = nil - p.lineCount = 0 - if p.summaries == nil || len(p.summaries) > 0 { - p.summaries = map[uint64]*dto.Metric{} - } - if p.histograms == nil || len(p.histograms) > 0 { - p.histograms = map[uint64]*dto.Metric{} - } - p.currentQuantile = math.NaN() - p.currentBucket = math.NaN() -} - -// startOfLine represents the state where the next byte read from p.buf is the -// start of a line (or whitespace leading up to it). -func (p *TextParser) startOfLine() stateFn { - p.lineCount++ - if p.skipBlankTab(); p.err != nil { - // This is the only place that we expect to see io.EOF, - // which is not an error but the signal that we are done. - // Any other error that happens to align with the start of - // a line is still an error. - if errors.Is(p.err, io.EOF) { - p.err = nil - } - return nil - } - switch p.currentByte { - case '#': - return p.startComment - case '\n': - return p.startOfLine // Empty line, start the next one. - } - return p.readingMetricName -} - -// startComment represents the state where the next byte read from p.buf is the -// start of a comment (or whitespace leading up to it). -func (p *TextParser) startComment() stateFn { - if p.skipBlankTab(); p.err != nil { - return nil // Unexpected end of input. - } - if p.currentByte == '\n' { - return p.startOfLine - } - if p.readTokenUntilWhitespace(); p.err != nil { - return nil // Unexpected end of input. - } - // If we have hit the end of line already, there is nothing left - // to do. This is not considered a syntax error. - if p.currentByte == '\n' { - return p.startOfLine - } - keyword := p.currentToken.String() - if keyword != "HELP" && keyword != "TYPE" { - // Generic comment, ignore by fast forwarding to end of line. - for p.currentByte != '\n' { - if p.currentByte, p.err = p.buf.ReadByte(); p.err != nil { - return nil // Unexpected end of input. - } - } - return p.startOfLine - } - // There is something. Next has to be a metric name. - if p.skipBlankTab(); p.err != nil { - return nil // Unexpected end of input. - } - if p.readTokenAsMetricName(); p.err != nil { - return nil // Unexpected end of input. - } - if p.currentByte == '\n' { - // At the end of the line already. - // Again, this is not considered a syntax error. - return p.startOfLine - } - if !isBlankOrTab(p.currentByte) { - p.parseError("invalid metric name in comment") - return nil - } - p.setOrCreateCurrentMF() - if p.skipBlankTab(); p.err != nil { - return nil // Unexpected end of input. - } - if p.currentByte == '\n' { - // At the end of the line already. - // Again, this is not considered a syntax error. - return p.startOfLine - } - switch keyword { - case "HELP": - return p.readingHelp - case "TYPE": - return p.readingType - } - panic(fmt.Sprintf("code error: unexpected keyword %q", keyword)) -} - -// readingMetricName represents the state where the last byte read (now in -// p.currentByte) is the first byte of a metric name. -func (p *TextParser) readingMetricName() stateFn { - if p.readTokenAsMetricName(); p.err != nil { - return nil - } - if p.currentToken.Len() == 0 { - p.parseError("invalid metric name") - return nil - } - p.setOrCreateCurrentMF() - // Now is the time to fix the type if it hasn't happened yet. - if p.currentMF.Type == nil { - p.currentMF.Type = dto.MetricType_UNTYPED.Enum() - } - p.currentMetric = &dto.Metric{} - // Do not append the newly created currentMetric to - // currentMF.Metric right now. First wait if this is a summary, - // and the metric exists already, which we can only know after - // having read all the labels. - if p.skipBlankTabIfCurrentBlankTab(); p.err != nil { - return nil // Unexpected end of input. - } - return p.readingLabels -} - -// readingLabels represents the state where the last byte read (now in -// p.currentByte) is either the first byte of the label set (i.e. a '{'), or the -// first byte of the value (otherwise). -func (p *TextParser) readingLabels() stateFn { - // Summaries/histograms are special. We have to reset the - // currentLabels map, currentQuantile and currentBucket before starting to - // read labels. - if p.currentMF.GetType() == dto.MetricType_SUMMARY || p.currentMF.GetType() == dto.MetricType_HISTOGRAM { - p.currentLabels = map[string]string{} - p.currentLabels[string(model.MetricNameLabel)] = p.currentMF.GetName() - p.currentQuantile = math.NaN() - p.currentBucket = math.NaN() - } - if p.currentByte != '{' { - return p.readingValue - } - return p.startLabelName -} - -// startLabelName represents the state where the next byte read from p.buf is -// the start of a label name (or whitespace leading up to it). -func (p *TextParser) startLabelName() stateFn { - if p.skipBlankTab(); p.err != nil { - return nil // Unexpected end of input. - } - if p.currentByte == '}' { - if p.skipBlankTab(); p.err != nil { - return nil // Unexpected end of input. - } - return p.readingValue - } - if p.readTokenAsLabelName(); p.err != nil { - return nil // Unexpected end of input. - } - if p.currentToken.Len() == 0 { - p.parseError(fmt.Sprintf("invalid label name for metric %q", p.currentMF.GetName())) - return nil - } - p.currentLabelPair = &dto.LabelPair{Name: proto.String(p.currentToken.String())} - if p.currentLabelPair.GetName() == string(model.MetricNameLabel) { - p.parseError(fmt.Sprintf("label name %q is reserved", model.MetricNameLabel)) - return nil - } - // Special summary/histogram treatment. Don't add 'quantile' and 'le' - // labels to 'real' labels. - if !(p.currentMF.GetType() == dto.MetricType_SUMMARY && p.currentLabelPair.GetName() == model.QuantileLabel) && - !(p.currentMF.GetType() == dto.MetricType_HISTOGRAM && p.currentLabelPair.GetName() == model.BucketLabel) { - p.currentMetric.Label = append(p.currentMetric.Label, p.currentLabelPair) - } - if p.skipBlankTabIfCurrentBlankTab(); p.err != nil { - return nil // Unexpected end of input. - } - if p.currentByte != '=' { - p.parseError(fmt.Sprintf("expected '=' after label name, found %q", p.currentByte)) - return nil - } - // Check for duplicate label names. - labels := make(map[string]struct{}) - for _, l := range p.currentMetric.Label { - lName := l.GetName() - if _, exists := labels[lName]; !exists { - labels[lName] = struct{}{} - } else { - p.parseError(fmt.Sprintf("duplicate label names for metric %q", p.currentMF.GetName())) - return nil - } - } - return p.startLabelValue -} - -// startLabelValue represents the state where the next byte read from p.buf is -// the start of a (quoted) label value (or whitespace leading up to it). -func (p *TextParser) startLabelValue() stateFn { - if p.skipBlankTab(); p.err != nil { - return nil // Unexpected end of input. - } - if p.currentByte != '"' { - p.parseError(fmt.Sprintf("expected '\"' at start of label value, found %q", p.currentByte)) - return nil - } - if p.readTokenAsLabelValue(); p.err != nil { - return nil - } - if !model.LabelValue(p.currentToken.String()).IsValid() { - p.parseError(fmt.Sprintf("invalid label value %q", p.currentToken.String())) - return nil - } - p.currentLabelPair.Value = proto.String(p.currentToken.String()) - // Special treatment of summaries: - // - Quantile labels are special, will result in dto.Quantile later. - // - Other labels have to be added to currentLabels for signature calculation. - if p.currentMF.GetType() == dto.MetricType_SUMMARY { - if p.currentLabelPair.GetName() == model.QuantileLabel { - if p.currentQuantile, p.err = parseFloat(p.currentLabelPair.GetValue()); p.err != nil { - // Create a more helpful error message. - p.parseError(fmt.Sprintf("expected float as value for 'quantile' label, got %q", p.currentLabelPair.GetValue())) - return nil - } - } else { - p.currentLabels[p.currentLabelPair.GetName()] = p.currentLabelPair.GetValue() - } - } - // Similar special treatment of histograms. - if p.currentMF.GetType() == dto.MetricType_HISTOGRAM { - if p.currentLabelPair.GetName() == model.BucketLabel { - if p.currentBucket, p.err = parseFloat(p.currentLabelPair.GetValue()); p.err != nil { - // Create a more helpful error message. - p.parseError(fmt.Sprintf("expected float as value for 'le' label, got %q", p.currentLabelPair.GetValue())) - return nil - } - } else { - p.currentLabels[p.currentLabelPair.GetName()] = p.currentLabelPair.GetValue() - } - } - if p.skipBlankTab(); p.err != nil { - return nil // Unexpected end of input. - } - switch p.currentByte { - case ',': - return p.startLabelName - - case '}': - if p.skipBlankTab(); p.err != nil { - return nil // Unexpected end of input. - } - return p.readingValue - default: - p.parseError(fmt.Sprintf("unexpected end of label value %q", p.currentLabelPair.GetValue())) - return nil - } -} - -// readingValue represents the state where the last byte read (now in -// p.currentByte) is the first byte of the sample value (i.e. a float). -func (p *TextParser) readingValue() stateFn { - // When we are here, we have read all the labels, so for the - // special case of a summary/histogram, we can finally find out - // if the metric already exists. - if p.currentMF.GetType() == dto.MetricType_SUMMARY { - signature := model.LabelsToSignature(p.currentLabels) - if summary := p.summaries[signature]; summary != nil { - p.currentMetric = summary - } else { - p.summaries[signature] = p.currentMetric - p.currentMF.Metric = append(p.currentMF.Metric, p.currentMetric) - } - } else if p.currentMF.GetType() == dto.MetricType_HISTOGRAM { - signature := model.LabelsToSignature(p.currentLabels) - if histogram := p.histograms[signature]; histogram != nil { - p.currentMetric = histogram - } else { - p.histograms[signature] = p.currentMetric - p.currentMF.Metric = append(p.currentMF.Metric, p.currentMetric) - } - } else { - p.currentMF.Metric = append(p.currentMF.Metric, p.currentMetric) - } - if p.readTokenUntilWhitespace(); p.err != nil { - return nil // Unexpected end of input. - } - value, err := parseFloat(p.currentToken.String()) - if err != nil { - // Create a more helpful error message. - p.parseError(fmt.Sprintf("expected float as value, got %q", p.currentToken.String())) - return nil - } - switch p.currentMF.GetType() { - case dto.MetricType_COUNTER: - p.currentMetric.Counter = &dto.Counter{Value: proto.Float64(value)} - case dto.MetricType_GAUGE: - p.currentMetric.Gauge = &dto.Gauge{Value: proto.Float64(value)} - case dto.MetricType_UNTYPED: - p.currentMetric.Untyped = &dto.Untyped{Value: proto.Float64(value)} - case dto.MetricType_SUMMARY: - // *sigh* - if p.currentMetric.Summary == nil { - p.currentMetric.Summary = &dto.Summary{} - } - switch { - case p.currentIsSummaryCount: - p.currentMetric.Summary.SampleCount = proto.Uint64(uint64(value)) - case p.currentIsSummarySum: - p.currentMetric.Summary.SampleSum = proto.Float64(value) - case !math.IsNaN(p.currentQuantile): - p.currentMetric.Summary.Quantile = append( - p.currentMetric.Summary.Quantile, - &dto.Quantile{ - Quantile: proto.Float64(p.currentQuantile), - Value: proto.Float64(value), - }, - ) - } - case dto.MetricType_HISTOGRAM: - // *sigh* - if p.currentMetric.Histogram == nil { - p.currentMetric.Histogram = &dto.Histogram{} - } - switch { - case p.currentIsHistogramCount: - p.currentMetric.Histogram.SampleCount = proto.Uint64(uint64(value)) - case p.currentIsHistogramSum: - p.currentMetric.Histogram.SampleSum = proto.Float64(value) - case !math.IsNaN(p.currentBucket): - p.currentMetric.Histogram.Bucket = append( - p.currentMetric.Histogram.Bucket, - &dto.Bucket{ - UpperBound: proto.Float64(p.currentBucket), - CumulativeCount: proto.Uint64(uint64(value)), - }, - ) - } - default: - p.err = fmt.Errorf("unexpected type for metric name %q", p.currentMF.GetName()) - } - if p.currentByte == '\n' { - return p.startOfLine - } - return p.startTimestamp -} - -// startTimestamp represents the state where the next byte read from p.buf is -// the start of the timestamp (or whitespace leading up to it). -func (p *TextParser) startTimestamp() stateFn { - if p.skipBlankTab(); p.err != nil { - return nil // Unexpected end of input. - } - if p.readTokenUntilWhitespace(); p.err != nil { - return nil // Unexpected end of input. - } - timestamp, err := strconv.ParseInt(p.currentToken.String(), 10, 64) - if err != nil { - // Create a more helpful error message. - p.parseError(fmt.Sprintf("expected integer as timestamp, got %q", p.currentToken.String())) - return nil - } - p.currentMetric.TimestampMs = proto.Int64(timestamp) - if p.readTokenUntilNewline(false); p.err != nil { - return nil // Unexpected end of input. - } - if p.currentToken.Len() > 0 { - p.parseError(fmt.Sprintf("spurious string after timestamp: %q", p.currentToken.String())) - return nil - } - return p.startOfLine -} - -// readingHelp represents the state where the last byte read (now in -// p.currentByte) is the first byte of the docstring after 'HELP'. -func (p *TextParser) readingHelp() stateFn { - if p.currentMF.Help != nil { - p.parseError(fmt.Sprintf("second HELP line for metric name %q", p.currentMF.GetName())) - return nil - } - // Rest of line is the docstring. - if p.readTokenUntilNewline(true); p.err != nil { - return nil // Unexpected end of input. - } - p.currentMF.Help = proto.String(p.currentToken.String()) - return p.startOfLine -} - -// readingType represents the state where the last byte read (now in -// p.currentByte) is the first byte of the type hint after 'HELP'. -func (p *TextParser) readingType() stateFn { - if p.currentMF.Type != nil { - p.parseError(fmt.Sprintf("second TYPE line for metric name %q, or TYPE reported after samples", p.currentMF.GetName())) - return nil - } - // Rest of line is the type. - if p.readTokenUntilNewline(false); p.err != nil { - return nil // Unexpected end of input. - } - metricType, ok := dto.MetricType_value[strings.ToUpper(p.currentToken.String())] - if !ok { - p.parseError(fmt.Sprintf("unknown metric type %q", p.currentToken.String())) - return nil - } - p.currentMF.Type = dto.MetricType(metricType).Enum() - return p.startOfLine -} - -// parseError sets p.err to a ParseError at the current line with the given -// message. -func (p *TextParser) parseError(msg string) { - p.err = ParseError{ - Line: p.lineCount, - Msg: msg, - } -} - -// skipBlankTab reads (and discards) bytes from p.buf until it encounters a byte -// that is neither ' ' nor '\t'. That byte is left in p.currentByte. -func (p *TextParser) skipBlankTab() { - for { - if p.currentByte, p.err = p.buf.ReadByte(); p.err != nil || !isBlankOrTab(p.currentByte) { - return - } - } -} - -// skipBlankTabIfCurrentBlankTab works exactly as skipBlankTab but doesn't do -// anything if p.currentByte is neither ' ' nor '\t'. -func (p *TextParser) skipBlankTabIfCurrentBlankTab() { - if isBlankOrTab(p.currentByte) { - p.skipBlankTab() - } -} - -// readTokenUntilWhitespace copies bytes from p.buf into p.currentToken. The -// first byte considered is the byte already read (now in p.currentByte). The -// first whitespace byte encountered is still copied into p.currentByte, but not -// into p.currentToken. -func (p *TextParser) readTokenUntilWhitespace() { - p.currentToken.Reset() - for p.err == nil && !isBlankOrTab(p.currentByte) && p.currentByte != '\n' { - p.currentToken.WriteByte(p.currentByte) - p.currentByte, p.err = p.buf.ReadByte() - } -} - -// readTokenUntilNewline copies bytes from p.buf into p.currentToken. The first -// byte considered is the byte already read (now in p.currentByte). The first -// newline byte encountered is still copied into p.currentByte, but not into -// p.currentToken. If recognizeEscapeSequence is true, two escape sequences are -// recognized: '\\' translates into '\', and '\n' into a line-feed character. -// All other escape sequences are invalid and cause an error. -func (p *TextParser) readTokenUntilNewline(recognizeEscapeSequence bool) { - p.currentToken.Reset() - escaped := false - for p.err == nil { - if recognizeEscapeSequence && escaped { - switch p.currentByte { - case '\\': - p.currentToken.WriteByte(p.currentByte) - case 'n': - p.currentToken.WriteByte('\n') - default: - p.parseError(fmt.Sprintf("invalid escape sequence '\\%c'", p.currentByte)) - return - } - escaped = false - } else { - switch p.currentByte { - case '\n': - return - case '\\': - escaped = true - default: - p.currentToken.WriteByte(p.currentByte) - } - } - p.currentByte, p.err = p.buf.ReadByte() - } -} - -// readTokenAsMetricName copies a metric name from p.buf into p.currentToken. -// The first byte considered is the byte already read (now in p.currentByte). -// The first byte not part of a metric name is still copied into p.currentByte, -// but not into p.currentToken. -func (p *TextParser) readTokenAsMetricName() { - p.currentToken.Reset() - if !isValidMetricNameStart(p.currentByte) { - return - } - for { - p.currentToken.WriteByte(p.currentByte) - p.currentByte, p.err = p.buf.ReadByte() - if p.err != nil || !isValidMetricNameContinuation(p.currentByte) { - return - } - } -} - -// readTokenAsLabelName copies a label name from p.buf into p.currentToken. -// The first byte considered is the byte already read (now in p.currentByte). -// The first byte not part of a label name is still copied into p.currentByte, -// but not into p.currentToken. -func (p *TextParser) readTokenAsLabelName() { - p.currentToken.Reset() - if !isValidLabelNameStart(p.currentByte) { - return - } - for { - p.currentToken.WriteByte(p.currentByte) - p.currentByte, p.err = p.buf.ReadByte() - if p.err != nil || !isValidLabelNameContinuation(p.currentByte) { - return - } - } -} - -// readTokenAsLabelValue copies a label value from p.buf into p.currentToken. -// In contrast to the other 'readTokenAs...' functions, which start with the -// last read byte in p.currentByte, this method ignores p.currentByte and starts -// with reading a new byte from p.buf. The first byte not part of a label value -// is still copied into p.currentByte, but not into p.currentToken. -func (p *TextParser) readTokenAsLabelValue() { - p.currentToken.Reset() - escaped := false - for { - if p.currentByte, p.err = p.buf.ReadByte(); p.err != nil { - return - } - if escaped { - switch p.currentByte { - case '"', '\\': - p.currentToken.WriteByte(p.currentByte) - case 'n': - p.currentToken.WriteByte('\n') - default: - p.parseError(fmt.Sprintf("invalid escape sequence '\\%c'", p.currentByte)) - return - } - escaped = false - continue - } - switch p.currentByte { - case '"': - return - case '\n': - p.parseError(fmt.Sprintf("label value %q contains unescaped new-line", p.currentToken.String())) - return - case '\\': - escaped = true - default: - p.currentToken.WriteByte(p.currentByte) - } - } -} - -func (p *TextParser) setOrCreateCurrentMF() { - p.currentIsSummaryCount = false - p.currentIsSummarySum = false - p.currentIsHistogramCount = false - p.currentIsHistogramSum = false - name := p.currentToken.String() - if p.currentMF = p.metricFamiliesByName[name]; p.currentMF != nil { - return - } - // Try out if this is a _sum or _count for a summary/histogram. - summaryName := summaryMetricName(name) - if p.currentMF = p.metricFamiliesByName[summaryName]; p.currentMF != nil { - if p.currentMF.GetType() == dto.MetricType_SUMMARY { - if isCount(name) { - p.currentIsSummaryCount = true - } - if isSum(name) { - p.currentIsSummarySum = true - } - return - } - } - histogramName := histogramMetricName(name) - if p.currentMF = p.metricFamiliesByName[histogramName]; p.currentMF != nil { - if p.currentMF.GetType() == dto.MetricType_HISTOGRAM { - if isCount(name) { - p.currentIsHistogramCount = true - } - if isSum(name) { - p.currentIsHistogramSum = true - } - return - } - } - p.currentMF = &dto.MetricFamily{Name: proto.String(name)} - p.metricFamiliesByName[name] = p.currentMF -} - -func isValidLabelNameStart(b byte) bool { - return (b >= 'a' && b <= 'z') || (b >= 'A' && b <= 'Z') || b == '_' -} - -func isValidLabelNameContinuation(b byte) bool { - return isValidLabelNameStart(b) || (b >= '0' && b <= '9') -} - -func isValidMetricNameStart(b byte) bool { - return isValidLabelNameStart(b) || b == ':' -} - -func isValidMetricNameContinuation(b byte) bool { - return isValidLabelNameContinuation(b) || b == ':' -} - -func isBlankOrTab(b byte) bool { - return b == ' ' || b == '\t' -} - -func isCount(name string) bool { - return len(name) > 6 && name[len(name)-6:] == "_count" -} - -func isSum(name string) bool { - return len(name) > 4 && name[len(name)-4:] == "_sum" -} - -func isBucket(name string) bool { - return len(name) > 7 && name[len(name)-7:] == "_bucket" -} - -func summaryMetricName(name string) string { - switch { - case isCount(name): - return name[:len(name)-6] - case isSum(name): - return name[:len(name)-4] - default: - return name - } -} - -func histogramMetricName(name string) string { - switch { - case isCount(name): - return name[:len(name)-6] - case isSum(name): - return name[:len(name)-4] - case isBucket(name): - return name[:len(name)-7] - default: - return name - } -} - -func parseFloat(s string) (float64, error) { - if strings.ContainsAny(s, "pP_") { - return 0, fmt.Errorf("unsupported character in float") - } - return strconv.ParseFloat(s, 64) -} diff --git a/vendor/github.com/prometheus/common/internal/bitbucket.org/ww/goautoneg/README.txt b/vendor/github.com/prometheus/common/internal/bitbucket.org/ww/goautoneg/README.txt deleted file mode 100644 index 7723656d5..000000000 --- a/vendor/github.com/prometheus/common/internal/bitbucket.org/ww/goautoneg/README.txt +++ /dev/null @@ -1,67 +0,0 @@ -PACKAGE - -package goautoneg -import "bitbucket.org/ww/goautoneg" - -HTTP Content-Type Autonegotiation. - -The functions in this package implement the behaviour specified in -http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html - -Copyright (c) 2011, Open Knowledge Foundation Ltd. -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - - Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in - the documentation and/or other materials provided with the - distribution. - - Neither the name of the Open Knowledge Foundation Ltd. nor the - names of its contributors may be used to endorse or promote - products derived from this software without specific prior written - permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - - -FUNCTIONS - -func Negotiate(header string, alternatives []string) (content_type string) -Negotiate the most appropriate content_type given the accept header -and a list of alternatives. - -func ParseAccept(header string) (accept []Accept) -Parse an Accept Header string returning a sorted list -of clauses - - -TYPES - -type Accept struct { - Type, SubType string - Q float32 - Params map[string]string -} -Structure to represent a clause in an HTTP Accept Header - - -SUBDIRECTORIES - - .hg diff --git a/vendor/github.com/prometheus/common/internal/bitbucket.org/ww/goautoneg/autoneg.go b/vendor/github.com/prometheus/common/internal/bitbucket.org/ww/goautoneg/autoneg.go deleted file mode 100644 index a21b9d15d..000000000 --- a/vendor/github.com/prometheus/common/internal/bitbucket.org/ww/goautoneg/autoneg.go +++ /dev/null @@ -1,160 +0,0 @@ -/* -Copyright (c) 2011, Open Knowledge Foundation Ltd. -All rights reserved. - -HTTP Content-Type Autonegotiation. - -The functions in this package implement the behaviour specified in -http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - - Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in - the documentation and/or other materials provided with the - distribution. - - Neither the name of the Open Knowledge Foundation Ltd. nor the - names of its contributors may be used to endorse or promote - products derived from this software without specific prior written - permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -*/ -package goautoneg - -import ( - "sort" - "strconv" - "strings" -) - -// Structure to represent a clause in an HTTP Accept Header -type Accept struct { - Type, SubType string - Q float64 - Params map[string]string -} - -// For internal use, so that we can use the sort interface -type accept_slice []Accept - -func (accept accept_slice) Len() int { - slice := []Accept(accept) - return len(slice) -} - -func (accept accept_slice) Less(i, j int) bool { - slice := []Accept(accept) - ai, aj := slice[i], slice[j] - if ai.Q > aj.Q { - return true - } - if ai.Type != "*" && aj.Type == "*" { - return true - } - if ai.SubType != "*" && aj.SubType == "*" { - return true - } - return false -} - -func (accept accept_slice) Swap(i, j int) { - slice := []Accept(accept) - slice[i], slice[j] = slice[j], slice[i] -} - -// Parse an Accept Header string returning a sorted list -// of clauses -func ParseAccept(header string) (accept []Accept) { - parts := strings.Split(header, ",") - accept = make([]Accept, 0, len(parts)) - for _, part := range parts { - part := strings.Trim(part, " ") - - a := Accept{} - a.Params = make(map[string]string) - a.Q = 1.0 - - mrp := strings.Split(part, ";") - - media_range := mrp[0] - sp := strings.Split(media_range, "/") - a.Type = strings.Trim(sp[0], " ") - - switch { - case len(sp) == 1 && a.Type == "*": - a.SubType = "*" - case len(sp) == 2: - a.SubType = strings.Trim(sp[1], " ") - default: - continue - } - - if len(mrp) == 1 { - accept = append(accept, a) - continue - } - - for _, param := range mrp[1:] { - sp := strings.SplitN(param, "=", 2) - if len(sp) != 2 { - continue - } - token := strings.Trim(sp[0], " ") - if token == "q" { - a.Q, _ = strconv.ParseFloat(sp[1], 32) - } else { - a.Params[token] = strings.Trim(sp[1], " ") - } - } - - accept = append(accept, a) - } - - slice := accept_slice(accept) - sort.Sort(slice) - - return -} - -// Negotiate the most appropriate content_type given the accept header -// and a list of alternatives. -func Negotiate(header string, alternatives []string) (content_type string) { - asp := make([][]string, 0, len(alternatives)) - for _, ctype := range alternatives { - asp = append(asp, strings.SplitN(ctype, "/", 2)) - } - for _, clause := range ParseAccept(header) { - for i, ctsp := range asp { - if clause.Type == ctsp[0] && clause.SubType == ctsp[1] { - content_type = alternatives[i] - return - } - if clause.Type == ctsp[0] && clause.SubType == "*" { - content_type = alternatives[i] - return - } - if clause.Type == "*" && clause.SubType == "*" { - content_type = alternatives[i] - return - } - } - } - return -} diff --git a/vendor/github.com/prometheus/common/model/alert.go b/vendor/github.com/prometheus/common/model/alert.go deleted file mode 100644 index 178fdbaf6..000000000 --- a/vendor/github.com/prometheus/common/model/alert.go +++ /dev/null @@ -1,136 +0,0 @@ -// Copyright 2013 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "fmt" - "time" -) - -type AlertStatus string - -const ( - AlertFiring AlertStatus = "firing" - AlertResolved AlertStatus = "resolved" -) - -// Alert is a generic representation of an alert in the Prometheus eco-system. -type Alert struct { - // Label value pairs for purpose of aggregation, matching, and disposition - // dispatching. This must minimally include an "alertname" label. - Labels LabelSet `json:"labels"` - - // Extra key/value information which does not define alert identity. - Annotations LabelSet `json:"annotations"` - - // The known time range for this alert. Both ends are optional. - StartsAt time.Time `json:"startsAt,omitempty"` - EndsAt time.Time `json:"endsAt,omitempty"` - GeneratorURL string `json:"generatorURL"` -} - -// Name returns the name of the alert. It is equivalent to the "alertname" label. -func (a *Alert) Name() string { - return string(a.Labels[AlertNameLabel]) -} - -// Fingerprint returns a unique hash for the alert. It is equivalent to -// the fingerprint of the alert's label set. -func (a *Alert) Fingerprint() Fingerprint { - return a.Labels.Fingerprint() -} - -func (a *Alert) String() string { - s := fmt.Sprintf("%s[%s]", a.Name(), a.Fingerprint().String()[:7]) - if a.Resolved() { - return s + "[resolved]" - } - return s + "[active]" -} - -// Resolved returns true iff the activity interval ended in the past. -func (a *Alert) Resolved() bool { - return a.ResolvedAt(time.Now()) -} - -// ResolvedAt returns true off the activity interval ended before -// the given timestamp. -func (a *Alert) ResolvedAt(ts time.Time) bool { - if a.EndsAt.IsZero() { - return false - } - return !a.EndsAt.After(ts) -} - -// Status returns the status of the alert. -func (a *Alert) Status() AlertStatus { - if a.Resolved() { - return AlertResolved - } - return AlertFiring -} - -// Validate checks whether the alert data is inconsistent. -func (a *Alert) Validate() error { - if a.StartsAt.IsZero() { - return fmt.Errorf("start time missing") - } - if !a.EndsAt.IsZero() && a.EndsAt.Before(a.StartsAt) { - return fmt.Errorf("start time must be before end time") - } - if err := a.Labels.Validate(); err != nil { - return fmt.Errorf("invalid label set: %w", err) - } - if len(a.Labels) == 0 { - return fmt.Errorf("at least one label pair required") - } - if err := a.Annotations.Validate(); err != nil { - return fmt.Errorf("invalid annotations: %w", err) - } - return nil -} - -// Alert is a list of alerts that can be sorted in chronological order. -type Alerts []*Alert - -func (as Alerts) Len() int { return len(as) } -func (as Alerts) Swap(i, j int) { as[i], as[j] = as[j], as[i] } - -func (as Alerts) Less(i, j int) bool { - if as[i].StartsAt.Before(as[j].StartsAt) { - return true - } - if as[i].EndsAt.Before(as[j].EndsAt) { - return true - } - return as[i].Fingerprint() < as[j].Fingerprint() -} - -// HasFiring returns true iff one of the alerts is not resolved. -func (as Alerts) HasFiring() bool { - for _, a := range as { - if !a.Resolved() { - return true - } - } - return false -} - -// Status returns StatusFiring iff at least one of the alerts is firing. -func (as Alerts) Status() AlertStatus { - if as.HasFiring() { - return AlertFiring - } - return AlertResolved -} diff --git a/vendor/github.com/prometheus/common/model/fingerprinting.go b/vendor/github.com/prometheus/common/model/fingerprinting.go deleted file mode 100644 index fc4de4106..000000000 --- a/vendor/github.com/prometheus/common/model/fingerprinting.go +++ /dev/null @@ -1,105 +0,0 @@ -// Copyright 2013 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "fmt" - "strconv" -) - -// Fingerprint provides a hash-capable representation of a Metric. -// For our purposes, FNV-1A 64-bit is used. -type Fingerprint uint64 - -// FingerprintFromString transforms a string representation into a Fingerprint. -func FingerprintFromString(s string) (Fingerprint, error) { - num, err := strconv.ParseUint(s, 16, 64) - return Fingerprint(num), err -} - -// ParseFingerprint parses the input string into a fingerprint. -func ParseFingerprint(s string) (Fingerprint, error) { - num, err := strconv.ParseUint(s, 16, 64) - if err != nil { - return 0, err - } - return Fingerprint(num), nil -} - -func (f Fingerprint) String() string { - return fmt.Sprintf("%016x", uint64(f)) -} - -// Fingerprints represents a collection of Fingerprint subject to a given -// natural sorting scheme. It implements sort.Interface. -type Fingerprints []Fingerprint - -// Len implements sort.Interface. -func (f Fingerprints) Len() int { - return len(f) -} - -// Less implements sort.Interface. -func (f Fingerprints) Less(i, j int) bool { - return f[i] < f[j] -} - -// Swap implements sort.Interface. -func (f Fingerprints) Swap(i, j int) { - f[i], f[j] = f[j], f[i] -} - -// FingerprintSet is a set of Fingerprints. -type FingerprintSet map[Fingerprint]struct{} - -// Equal returns true if both sets contain the same elements (and not more). -func (s FingerprintSet) Equal(o FingerprintSet) bool { - if len(s) != len(o) { - return false - } - - for k := range s { - if _, ok := o[k]; !ok { - return false - } - } - - return true -} - -// Intersection returns the elements contained in both sets. -func (s FingerprintSet) Intersection(o FingerprintSet) FingerprintSet { - myLength, otherLength := len(s), len(o) - if myLength == 0 || otherLength == 0 { - return FingerprintSet{} - } - - subSet := s - superSet := o - - if otherLength < myLength { - subSet = o - superSet = s - } - - out := FingerprintSet{} - - for k := range subSet { - if _, ok := superSet[k]; ok { - out[k] = struct{}{} - } - } - - return out -} diff --git a/vendor/github.com/prometheus/common/model/fnv.go b/vendor/github.com/prometheus/common/model/fnv.go deleted file mode 100644 index 367afecd3..000000000 --- a/vendor/github.com/prometheus/common/model/fnv.go +++ /dev/null @@ -1,42 +0,0 @@ -// Copyright 2015 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -// Inline and byte-free variant of hash/fnv's fnv64a. - -const ( - offset64 = 14695981039346656037 - prime64 = 1099511628211 -) - -// hashNew initializes a new fnv64a hash value. -func hashNew() uint64 { - return offset64 -} - -// hashAdd adds a string to a fnv64a hash value, returning the updated hash. -func hashAdd(h uint64, s string) uint64 { - for i := 0; i < len(s); i++ { - h ^= uint64(s[i]) - h *= prime64 - } - return h -} - -// hashAddByte adds a byte to a fnv64a hash value, returning the updated hash. -func hashAddByte(h uint64, b byte) uint64 { - h ^= uint64(b) - h *= prime64 - return h -} diff --git a/vendor/github.com/prometheus/common/model/labels.go b/vendor/github.com/prometheus/common/model/labels.go deleted file mode 100644 index 3317ce22f..000000000 --- a/vendor/github.com/prometheus/common/model/labels.go +++ /dev/null @@ -1,226 +0,0 @@ -// Copyright 2013 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "encoding/json" - "fmt" - "regexp" - "strings" - "unicode/utf8" -) - -const ( - // AlertNameLabel is the name of the label containing the an alert's name. - AlertNameLabel = "alertname" - - // ExportedLabelPrefix is the prefix to prepend to the label names present in - // exported metrics if a label of the same name is added by the server. - ExportedLabelPrefix = "exported_" - - // MetricNameLabel is the label name indicating the metric name of a - // timeseries. - MetricNameLabel = "__name__" - - // SchemeLabel is the name of the label that holds the scheme on which to - // scrape a target. - SchemeLabel = "__scheme__" - - // AddressLabel is the name of the label that holds the address of - // a scrape target. - AddressLabel = "__address__" - - // MetricsPathLabel is the name of the label that holds the path on which to - // scrape a target. - MetricsPathLabel = "__metrics_path__" - - // ScrapeIntervalLabel is the name of the label that holds the scrape interval - // used to scrape a target. - ScrapeIntervalLabel = "__scrape_interval__" - - // ScrapeTimeoutLabel is the name of the label that holds the scrape - // timeout used to scrape a target. - ScrapeTimeoutLabel = "__scrape_timeout__" - - // ReservedLabelPrefix is a prefix which is not legal in user-supplied - // label names. - ReservedLabelPrefix = "__" - - // MetaLabelPrefix is a prefix for labels that provide meta information. - // Labels with this prefix are used for intermediate label processing and - // will not be attached to time series. - MetaLabelPrefix = "__meta_" - - // TmpLabelPrefix is a prefix for temporary labels as part of relabelling. - // Labels with this prefix are used for intermediate label processing and - // will not be attached to time series. This is reserved for use in - // Prometheus configuration files by users. - TmpLabelPrefix = "__tmp_" - - // ParamLabelPrefix is a prefix for labels that provide URL parameters - // used to scrape a target. - ParamLabelPrefix = "__param_" - - // JobLabel is the label name indicating the job from which a timeseries - // was scraped. - JobLabel = "job" - - // InstanceLabel is the label name used for the instance label. - InstanceLabel = "instance" - - // BucketLabel is used for the label that defines the upper bound of a - // bucket of a histogram ("le" -> "less or equal"). - BucketLabel = "le" - - // QuantileLabel is used for the label that defines the quantile in a - // summary. - QuantileLabel = "quantile" -) - -// LabelNameRE is a regular expression matching valid label names. Note that the -// IsValid method of LabelName performs the same check but faster than a match -// with this regular expression. -var LabelNameRE = regexp.MustCompile("^[a-zA-Z_][a-zA-Z0-9_]*$") - -// A LabelName is a key for a LabelSet or Metric. It has a value associated -// therewith. -type LabelName string - -// IsValid returns true iff name matches the pattern of LabelNameRE for legacy -// names, and iff it's valid UTF-8 if NameValidationScheme is set to -// UTF8Validation. For the legacy matching, it does not use LabelNameRE for the -// check but a much faster hardcoded implementation. -func (ln LabelName) IsValid() bool { - if len(ln) == 0 { - return false - } - switch NameValidationScheme { - case LegacyValidation: - for i, b := range ln { - if !((b >= 'a' && b <= 'z') || (b >= 'A' && b <= 'Z') || b == '_' || (b >= '0' && b <= '9' && i > 0)) { - return false - } - } - case UTF8Validation: - return utf8.ValidString(string(ln)) - default: - panic(fmt.Sprintf("Invalid name validation scheme requested: %d", NameValidationScheme)) - } - return true -} - -// UnmarshalYAML implements the yaml.Unmarshaler interface. -func (ln *LabelName) UnmarshalYAML(unmarshal func(interface{}) error) error { - var s string - if err := unmarshal(&s); err != nil { - return err - } - if !LabelName(s).IsValid() { - return fmt.Errorf("%q is not a valid label name", s) - } - *ln = LabelName(s) - return nil -} - -// UnmarshalJSON implements the json.Unmarshaler interface. -func (ln *LabelName) UnmarshalJSON(b []byte) error { - var s string - if err := json.Unmarshal(b, &s); err != nil { - return err - } - if !LabelName(s).IsValid() { - return fmt.Errorf("%q is not a valid label name", s) - } - *ln = LabelName(s) - return nil -} - -// LabelNames is a sortable LabelName slice. In implements sort.Interface. -type LabelNames []LabelName - -func (l LabelNames) Len() int { - return len(l) -} - -func (l LabelNames) Less(i, j int) bool { - return l[i] < l[j] -} - -func (l LabelNames) Swap(i, j int) { - l[i], l[j] = l[j], l[i] -} - -func (l LabelNames) String() string { - labelStrings := make([]string, 0, len(l)) - for _, label := range l { - labelStrings = append(labelStrings, string(label)) - } - return strings.Join(labelStrings, ", ") -} - -// A LabelValue is an associated value for a LabelName. -type LabelValue string - -// IsValid returns true iff the string is a valid UTF-8. -func (lv LabelValue) IsValid() bool { - return utf8.ValidString(string(lv)) -} - -// LabelValues is a sortable LabelValue slice. It implements sort.Interface. -type LabelValues []LabelValue - -func (l LabelValues) Len() int { - return len(l) -} - -func (l LabelValues) Less(i, j int) bool { - return string(l[i]) < string(l[j]) -} - -func (l LabelValues) Swap(i, j int) { - l[i], l[j] = l[j], l[i] -} - -// LabelPair pairs a name with a value. -type LabelPair struct { - Name LabelName - Value LabelValue -} - -// LabelPairs is a sortable slice of LabelPair pointers. It implements -// sort.Interface. -type LabelPairs []*LabelPair - -func (l LabelPairs) Len() int { - return len(l) -} - -func (l LabelPairs) Less(i, j int) bool { - switch { - case l[i].Name > l[j].Name: - return false - case l[i].Name < l[j].Name: - return true - case l[i].Value > l[j].Value: - return false - case l[i].Value < l[j].Value: - return true - default: - return false - } -} - -func (l LabelPairs) Swap(i, j int) { - l[i], l[j] = l[j], l[i] -} diff --git a/vendor/github.com/prometheus/common/model/labelset.go b/vendor/github.com/prometheus/common/model/labelset.go deleted file mode 100644 index ec738e624..000000000 --- a/vendor/github.com/prometheus/common/model/labelset.go +++ /dev/null @@ -1,184 +0,0 @@ -// Copyright 2013 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "bytes" - "encoding/json" - "fmt" - "slices" - "sort" - "strconv" -) - -// A LabelSet is a collection of LabelName and LabelValue pairs. The LabelSet -// may be fully-qualified down to the point where it may resolve to a single -// Metric in the data store or not. All operations that occur within the realm -// of a LabelSet can emit a vector of Metric entities to which the LabelSet may -// match. -type LabelSet map[LabelName]LabelValue - -// Validate checks whether all names and values in the label set -// are valid. -func (ls LabelSet) Validate() error { - for ln, lv := range ls { - if !ln.IsValid() { - return fmt.Errorf("invalid name %q", ln) - } - if !lv.IsValid() { - return fmt.Errorf("invalid value %q", lv) - } - } - return nil -} - -// Equal returns true iff both label sets have exactly the same key/value pairs. -func (ls LabelSet) Equal(o LabelSet) bool { - if len(ls) != len(o) { - return false - } - for ln, lv := range ls { - olv, ok := o[ln] - if !ok { - return false - } - if olv != lv { - return false - } - } - return true -} - -// Before compares the metrics, using the following criteria: -// -// If m has fewer labels than o, it is before o. If it has more, it is not. -// -// If the number of labels is the same, the superset of all label names is -// sorted alphanumerically. The first differing label pair found in that order -// determines the outcome: If the label does not exist at all in m, then m is -// before o, and vice versa. Otherwise the label value is compared -// alphanumerically. -// -// If m and o are equal, the method returns false. -func (ls LabelSet) Before(o LabelSet) bool { - if len(ls) < len(o) { - return true - } - if len(ls) > len(o) { - return false - } - - lns := make(LabelNames, 0, len(ls)+len(o)) - for ln := range ls { - lns = append(lns, ln) - } - for ln := range o { - lns = append(lns, ln) - } - // It's probably not worth it to de-dup lns. - sort.Sort(lns) - for _, ln := range lns { - mlv, ok := ls[ln] - if !ok { - return true - } - olv, ok := o[ln] - if !ok { - return false - } - if mlv < olv { - return true - } - if mlv > olv { - return false - } - } - return false -} - -// Clone returns a copy of the label set. -func (ls LabelSet) Clone() LabelSet { - lsn := make(LabelSet, len(ls)) - for ln, lv := range ls { - lsn[ln] = lv - } - return lsn -} - -// Merge is a helper function to non-destructively merge two label sets. -func (l LabelSet) Merge(other LabelSet) LabelSet { - result := make(LabelSet, len(l)) - - for k, v := range l { - result[k] = v - } - - for k, v := range other { - result[k] = v - } - - return result -} - -// String will look like `{foo="bar", more="less"}`. Names are sorted alphabetically. -func (l LabelSet) String() string { - var lna [32]LabelName // On stack to avoid memory allocation for sorting names. - labelNames := lna[:0] - for name := range l { - labelNames = append(labelNames, name) - } - slices.Sort(labelNames) - var bytea [1024]byte // On stack to avoid memory allocation while building the output. - b := bytes.NewBuffer(bytea[:0]) - b.WriteByte('{') - for i, name := range labelNames { - if i > 0 { - b.WriteString(", ") - } - b.WriteString(string(name)) - b.WriteByte('=') - b.Write(strconv.AppendQuote(b.AvailableBuffer(), string(l[name]))) - } - b.WriteByte('}') - return b.String() -} - -// Fingerprint returns the LabelSet's fingerprint. -func (ls LabelSet) Fingerprint() Fingerprint { - return labelSetToFingerprint(ls) -} - -// FastFingerprint returns the LabelSet's Fingerprint calculated by a faster hashing -// algorithm, which is, however, more susceptible to hash collisions. -func (ls LabelSet) FastFingerprint() Fingerprint { - return labelSetToFastFingerprint(ls) -} - -// UnmarshalJSON implements the json.Unmarshaler interface. -func (l *LabelSet) UnmarshalJSON(b []byte) error { - var m map[LabelName]LabelValue - if err := json.Unmarshal(b, &m); err != nil { - return err - } - // encoding/json only unmarshals maps of the form map[string]T. It treats - // LabelName as a string and does not call its UnmarshalJSON method. - // Thus, we have to replicate the behavior here. - for ln := range m { - if !ln.IsValid() { - return fmt.Errorf("%q is not a valid label name", ln) - } - } - *l = LabelSet(m) - return nil -} diff --git a/vendor/github.com/prometheus/common/model/metadata.go b/vendor/github.com/prometheus/common/model/metadata.go deleted file mode 100644 index 447ab8ad6..000000000 --- a/vendor/github.com/prometheus/common/model/metadata.go +++ /dev/null @@ -1,28 +0,0 @@ -// Copyright 2023 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -// MetricType represents metric type values. -type MetricType string - -const ( - MetricTypeCounter = MetricType("counter") - MetricTypeGauge = MetricType("gauge") - MetricTypeHistogram = MetricType("histogram") - MetricTypeGaugeHistogram = MetricType("gaugehistogram") - MetricTypeSummary = MetricType("summary") - MetricTypeInfo = MetricType("info") - MetricTypeStateset = MetricType("stateset") - MetricTypeUnknown = MetricType("unknown") -) diff --git a/vendor/github.com/prometheus/common/model/metric.go b/vendor/github.com/prometheus/common/model/metric.go deleted file mode 100644 index eb865e5a5..000000000 --- a/vendor/github.com/prometheus/common/model/metric.go +++ /dev/null @@ -1,457 +0,0 @@ -// Copyright 2013 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "fmt" - "regexp" - "sort" - "strings" - "unicode/utf8" - - dto "github.com/prometheus/client_model/go" - "google.golang.org/protobuf/proto" -) - -var ( - // NameValidationScheme determines the method of name validation to be used by - // all calls to IsValidMetricName() and LabelName IsValid(). Setting UTF-8 mode - // in isolation from other components that don't support UTF-8 may result in - // bugs or other undefined behavior. This value is intended to be set by - // UTF-8-aware binaries as part of their startup. To avoid need for locking, - // this value should be set once, ideally in an init(), before multiple - // goroutines are started. - NameValidationScheme = LegacyValidation - - // NameEscapingScheme defines the default way that names will be - // escaped when presented to systems that do not support UTF-8 names. If the - // Content-Type "escaping" term is specified, that will override this value. - NameEscapingScheme = ValueEncodingEscaping -) - -// ValidationScheme is a Go enum for determining how metric and label names will -// be validated by this library. -type ValidationScheme int - -const ( - // LegacyValidation is a setting that requirets that metric and label names - // conform to the original Prometheus character requirements described by - // MetricNameRE and LabelNameRE. - LegacyValidation ValidationScheme = iota - - // UTF8Validation only requires that metric and label names be valid UTF-8 - // strings. - UTF8Validation -) - -type EscapingScheme int - -const ( - // NoEscaping indicates that a name will not be escaped. Unescaped names that - // do not conform to the legacy validity check will use a new exposition - // format syntax that will be officially standardized in future versions. - NoEscaping EscapingScheme = iota - - // UnderscoreEscaping replaces all legacy-invalid characters with underscores. - UnderscoreEscaping - - // DotsEscaping is similar to UnderscoreEscaping, except that dots are - // converted to `_dot_` and pre-existing underscores are converted to `__`. - DotsEscaping - - // ValueEncodingEscaping prepends the name with `U__` and replaces all invalid - // characters with the unicode value, surrounded by underscores. Single - // underscores are replaced with double underscores. - ValueEncodingEscaping -) - -const ( - // EscapingKey is the key in an Accept or Content-Type header that defines how - // metric and label names that do not conform to the legacy character - // requirements should be escaped when being scraped by a legacy prometheus - // system. If a system does not explicitly pass an escaping parameter in the - // Accept header, the default NameEscapingScheme will be used. - EscapingKey = "escaping" - - // Possible values for Escaping Key: - AllowUTF8 = "allow-utf-8" // No escaping required. - EscapeUnderscores = "underscores" - EscapeDots = "dots" - EscapeValues = "values" -) - -// MetricNameRE is a regular expression matching valid metric -// names. Note that the IsValidMetricName function performs the same -// check but faster than a match with this regular expression. -var MetricNameRE = regexp.MustCompile(`^[a-zA-Z_:][a-zA-Z0-9_:]*$`) - -// A Metric is similar to a LabelSet, but the key difference is that a Metric is -// a singleton and refers to one and only one stream of samples. -type Metric LabelSet - -// Equal compares the metrics. -func (m Metric) Equal(o Metric) bool { - return LabelSet(m).Equal(LabelSet(o)) -} - -// Before compares the metrics' underlying label sets. -func (m Metric) Before(o Metric) bool { - return LabelSet(m).Before(LabelSet(o)) -} - -// Clone returns a copy of the Metric. -func (m Metric) Clone() Metric { - clone := make(Metric, len(m)) - for k, v := range m { - clone[k] = v - } - return clone -} - -func (m Metric) String() string { - metricName, hasName := m[MetricNameLabel] - numLabels := len(m) - 1 - if !hasName { - numLabels = len(m) - } - labelStrings := make([]string, 0, numLabels) - for label, value := range m { - if label != MetricNameLabel { - labelStrings = append(labelStrings, fmt.Sprintf("%s=%q", label, value)) - } - } - - switch numLabels { - case 0: - if hasName { - return string(metricName) - } - return "{}" - default: - sort.Strings(labelStrings) - return fmt.Sprintf("%s{%s}", metricName, strings.Join(labelStrings, ", ")) - } -} - -// Fingerprint returns a Metric's Fingerprint. -func (m Metric) Fingerprint() Fingerprint { - return LabelSet(m).Fingerprint() -} - -// FastFingerprint returns a Metric's Fingerprint calculated by a faster hashing -// algorithm, which is, however, more susceptible to hash collisions. -func (m Metric) FastFingerprint() Fingerprint { - return LabelSet(m).FastFingerprint() -} - -// IsValidMetricName returns true iff name matches the pattern of MetricNameRE -// for legacy names, and iff it's valid UTF-8 if the UTF8Validation scheme is -// selected. -func IsValidMetricName(n LabelValue) bool { - switch NameValidationScheme { - case LegacyValidation: - return IsValidLegacyMetricName(n) - case UTF8Validation: - if len(n) == 0 { - return false - } - return utf8.ValidString(string(n)) - default: - panic(fmt.Sprintf("Invalid name validation scheme requested: %d", NameValidationScheme)) - } -} - -// IsValidLegacyMetricName is similar to IsValidMetricName but always uses the -// legacy validation scheme regardless of the value of NameValidationScheme. -// This function, however, does not use MetricNameRE for the check but a much -// faster hardcoded implementation. -func IsValidLegacyMetricName(n LabelValue) bool { - if len(n) == 0 { - return false - } - for i, b := range n { - if !isValidLegacyRune(b, i) { - return false - } - } - return true -} - -// EscapeMetricFamily escapes the given metric names and labels with the given -// escaping scheme. Returns a new object that uses the same pointers to fields -// when possible and creates new escaped versions so as not to mutate the -// input. -func EscapeMetricFamily(v *dto.MetricFamily, scheme EscapingScheme) *dto.MetricFamily { - if v == nil { - return nil - } - - if scheme == NoEscaping { - return v - } - - out := &dto.MetricFamily{ - Help: v.Help, - Type: v.Type, - Unit: v.Unit, - } - - // If the name is nil, copy as-is, don't try to escape. - if v.Name == nil || IsValidLegacyMetricName(LabelValue(v.GetName())) { - out.Name = v.Name - } else { - out.Name = proto.String(EscapeName(v.GetName(), scheme)) - } - for _, m := range v.Metric { - if !metricNeedsEscaping(m) { - out.Metric = append(out.Metric, m) - continue - } - - escaped := &dto.Metric{ - Gauge: m.Gauge, - Counter: m.Counter, - Summary: m.Summary, - Untyped: m.Untyped, - Histogram: m.Histogram, - TimestampMs: m.TimestampMs, - } - - for _, l := range m.Label { - if l.GetName() == MetricNameLabel { - if l.Value == nil || IsValidLegacyMetricName(LabelValue(l.GetValue())) { - escaped.Label = append(escaped.Label, l) - continue - } - escaped.Label = append(escaped.Label, &dto.LabelPair{ - Name: proto.String(MetricNameLabel), - Value: proto.String(EscapeName(l.GetValue(), scheme)), - }) - continue - } - if l.Name == nil || IsValidLegacyMetricName(LabelValue(l.GetName())) { - escaped.Label = append(escaped.Label, l) - continue - } - escaped.Label = append(escaped.Label, &dto.LabelPair{ - Name: proto.String(EscapeName(l.GetName(), scheme)), - Value: l.Value, - }) - } - out.Metric = append(out.Metric, escaped) - } - return out -} - -func metricNeedsEscaping(m *dto.Metric) bool { - for _, l := range m.Label { - if l.GetName() == MetricNameLabel && !IsValidLegacyMetricName(LabelValue(l.GetValue())) { - return true - } - if !IsValidLegacyMetricName(LabelValue(l.GetName())) { - return true - } - } - return false -} - -const ( - lowerhex = "0123456789abcdef" -) - -// EscapeName escapes the incoming name according to the provided escaping -// scheme. Depending on the rules of escaping, this may cause no change in the -// string that is returned. (Especially NoEscaping, which by definition is a -// noop). This function does not do any validation of the name. -func EscapeName(name string, scheme EscapingScheme) string { - if len(name) == 0 { - return name - } - var escaped strings.Builder - switch scheme { - case NoEscaping: - return name - case UnderscoreEscaping: - if IsValidLegacyMetricName(LabelValue(name)) { - return name - } - for i, b := range name { - if isValidLegacyRune(b, i) { - escaped.WriteRune(b) - } else { - escaped.WriteRune('_') - } - } - return escaped.String() - case DotsEscaping: - // Do not early return for legacy valid names, we still escape underscores. - for i, b := range name { - if b == '_' { - escaped.WriteString("__") - } else if b == '.' { - escaped.WriteString("_dot_") - } else if isValidLegacyRune(b, i) { - escaped.WriteRune(b) - } else { - escaped.WriteRune('_') - } - } - return escaped.String() - case ValueEncodingEscaping: - if IsValidLegacyMetricName(LabelValue(name)) { - return name - } - escaped.WriteString("U__") - for i, b := range name { - if isValidLegacyRune(b, i) { - escaped.WriteRune(b) - } else if !utf8.ValidRune(b) { - escaped.WriteString("_FFFD_") - } else if b < 0x100 { - escaped.WriteRune('_') - for s := 4; s >= 0; s -= 4 { - escaped.WriteByte(lowerhex[b>>uint(s)&0xF]) - } - escaped.WriteRune('_') - } else if b < 0x10000 { - escaped.WriteRune('_') - for s := 12; s >= 0; s -= 4 { - escaped.WriteByte(lowerhex[b>>uint(s)&0xF]) - } - escaped.WriteRune('_') - } - } - return escaped.String() - default: - panic(fmt.Sprintf("invalid escaping scheme %d", scheme)) - } -} - -// lower function taken from strconv.atoi -func lower(c byte) byte { - return c | ('x' - 'X') -} - -// UnescapeName unescapes the incoming name according to the provided escaping -// scheme if possible. Some schemes are partially or totally non-roundtripable. -// If any error is enountered, returns the original input. -func UnescapeName(name string, scheme EscapingScheme) string { - if len(name) == 0 { - return name - } - switch scheme { - case NoEscaping: - return name - case UnderscoreEscaping: - // It is not possible to unescape from underscore replacement. - return name - case DotsEscaping: - name = strings.ReplaceAll(name, "_dot_", ".") - name = strings.ReplaceAll(name, "__", "_") - return name - case ValueEncodingEscaping: - escapedName, found := strings.CutPrefix(name, "U__") - if !found { - return name - } - - var unescaped strings.Builder - TOP: - for i := 0; i < len(escapedName); i++ { - // All non-underscores are treated normally. - if escapedName[i] != '_' { - unescaped.WriteByte(escapedName[i]) - continue - } - i++ - if i >= len(escapedName) { - return name - } - // A double underscore is a single underscore. - if escapedName[i] == '_' { - unescaped.WriteByte('_') - continue - } - // We think we are in a UTF-8 code, process it. - var utf8Val uint - for j := 0; i < len(escapedName); j++ { - // This is too many characters for a utf8 value. - if j > 4 { - return name - } - // Found a closing underscore, convert to a rune, check validity, and append. - if escapedName[i] == '_' { - utf8Rune := rune(utf8Val) - if !utf8.ValidRune(utf8Rune) { - return name - } - unescaped.WriteRune(utf8Rune) - continue TOP - } - r := lower(escapedName[i]) - utf8Val *= 16 - if r >= '0' && r <= '9' { - utf8Val += uint(r) - '0' - } else if r >= 'a' && r <= 'f' { - utf8Val += uint(r) - 'a' + 10 - } else { - return name - } - i++ - } - // Didn't find closing underscore, invalid. - return name - } - return unescaped.String() - default: - panic(fmt.Sprintf("invalid escaping scheme %d", scheme)) - } -} - -func isValidLegacyRune(b rune, i int) bool { - return (b >= 'a' && b <= 'z') || (b >= 'A' && b <= 'Z') || b == '_' || b == ':' || (b >= '0' && b <= '9' && i > 0) -} - -func (e EscapingScheme) String() string { - switch e { - case NoEscaping: - return AllowUTF8 - case UnderscoreEscaping: - return EscapeUnderscores - case DotsEscaping: - return EscapeDots - case ValueEncodingEscaping: - return EscapeValues - default: - panic(fmt.Sprintf("unknown format scheme %d", e)) - } -} - -func ToEscapingScheme(s string) (EscapingScheme, error) { - if s == "" { - return NoEscaping, fmt.Errorf("got empty string instead of escaping scheme") - } - switch s { - case AllowUTF8: - return NoEscaping, nil - case EscapeUnderscores: - return UnderscoreEscaping, nil - case EscapeDots: - return DotsEscaping, nil - case EscapeValues: - return ValueEncodingEscaping, nil - default: - return NoEscaping, fmt.Errorf("unknown format scheme " + s) - } -} diff --git a/vendor/github.com/prometheus/common/model/model.go b/vendor/github.com/prometheus/common/model/model.go deleted file mode 100644 index a7b969170..000000000 --- a/vendor/github.com/prometheus/common/model/model.go +++ /dev/null @@ -1,16 +0,0 @@ -// Copyright 2013 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// Package model contains common data structures that are shared across -// Prometheus components and libraries. -package model diff --git a/vendor/github.com/prometheus/common/model/signature.go b/vendor/github.com/prometheus/common/model/signature.go deleted file mode 100644 index dc8a0026c..000000000 --- a/vendor/github.com/prometheus/common/model/signature.go +++ /dev/null @@ -1,142 +0,0 @@ -// Copyright 2014 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "sort" -) - -// SeparatorByte is a byte that cannot occur in valid UTF-8 sequences and is -// used to separate label names, label values, and other strings from each other -// when calculating their combined hash value (aka signature aka fingerprint). -const SeparatorByte byte = 255 - -// cache the signature of an empty label set. -var emptyLabelSignature = hashNew() - -// LabelsToSignature returns a quasi-unique signature (i.e., fingerprint) for a -// given label set. (Collisions are possible but unlikely if the number of label -// sets the function is applied to is small.) -func LabelsToSignature(labels map[string]string) uint64 { - if len(labels) == 0 { - return emptyLabelSignature - } - - labelNames := make([]string, 0, len(labels)) - for labelName := range labels { - labelNames = append(labelNames, labelName) - } - sort.Strings(labelNames) - - sum := hashNew() - for _, labelName := range labelNames { - sum = hashAdd(sum, labelName) - sum = hashAddByte(sum, SeparatorByte) - sum = hashAdd(sum, labels[labelName]) - sum = hashAddByte(sum, SeparatorByte) - } - return sum -} - -// labelSetToFingerprint works exactly as LabelsToSignature but takes a LabelSet as -// parameter (rather than a label map) and returns a Fingerprint. -func labelSetToFingerprint(ls LabelSet) Fingerprint { - if len(ls) == 0 { - return Fingerprint(emptyLabelSignature) - } - - labelNames := make(LabelNames, 0, len(ls)) - for labelName := range ls { - labelNames = append(labelNames, labelName) - } - sort.Sort(labelNames) - - sum := hashNew() - for _, labelName := range labelNames { - sum = hashAdd(sum, string(labelName)) - sum = hashAddByte(sum, SeparatorByte) - sum = hashAdd(sum, string(ls[labelName])) - sum = hashAddByte(sum, SeparatorByte) - } - return Fingerprint(sum) -} - -// labelSetToFastFingerprint works similar to labelSetToFingerprint but uses a -// faster and less allocation-heavy hash function, which is more susceptible to -// create hash collisions. Therefore, collision detection should be applied. -func labelSetToFastFingerprint(ls LabelSet) Fingerprint { - if len(ls) == 0 { - return Fingerprint(emptyLabelSignature) - } - - var result uint64 - for labelName, labelValue := range ls { - sum := hashNew() - sum = hashAdd(sum, string(labelName)) - sum = hashAddByte(sum, SeparatorByte) - sum = hashAdd(sum, string(labelValue)) - result ^= sum - } - return Fingerprint(result) -} - -// SignatureForLabels works like LabelsToSignature but takes a Metric as -// parameter (rather than a label map) and only includes the labels with the -// specified LabelNames into the signature calculation. The labels passed in -// will be sorted by this function. -func SignatureForLabels(m Metric, labels ...LabelName) uint64 { - if len(labels) == 0 { - return emptyLabelSignature - } - - sort.Sort(LabelNames(labels)) - - sum := hashNew() - for _, label := range labels { - sum = hashAdd(sum, string(label)) - sum = hashAddByte(sum, SeparatorByte) - sum = hashAdd(sum, string(m[label])) - sum = hashAddByte(sum, SeparatorByte) - } - return sum -} - -// SignatureWithoutLabels works like LabelsToSignature but takes a Metric as -// parameter (rather than a label map) and excludes the labels with any of the -// specified LabelNames from the signature calculation. -func SignatureWithoutLabels(m Metric, labels map[LabelName]struct{}) uint64 { - if len(m) == 0 { - return emptyLabelSignature - } - - labelNames := make(LabelNames, 0, len(m)) - for labelName := range m { - if _, exclude := labels[labelName]; !exclude { - labelNames = append(labelNames, labelName) - } - } - if len(labelNames) == 0 { - return emptyLabelSignature - } - sort.Sort(labelNames) - - sum := hashNew() - for _, labelName := range labelNames { - sum = hashAdd(sum, string(labelName)) - sum = hashAddByte(sum, SeparatorByte) - sum = hashAdd(sum, string(m[labelName])) - sum = hashAddByte(sum, SeparatorByte) - } - return sum -} diff --git a/vendor/github.com/prometheus/common/model/silence.go b/vendor/github.com/prometheus/common/model/silence.go deleted file mode 100644 index 910b0b71f..000000000 --- a/vendor/github.com/prometheus/common/model/silence.go +++ /dev/null @@ -1,106 +0,0 @@ -// Copyright 2015 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "encoding/json" - "fmt" - "regexp" - "time" -) - -// Matcher describes a matches the value of a given label. -type Matcher struct { - Name LabelName `json:"name"` - Value string `json:"value"` - IsRegex bool `json:"isRegex"` -} - -func (m *Matcher) UnmarshalJSON(b []byte) error { - type plain Matcher - if err := json.Unmarshal(b, (*plain)(m)); err != nil { - return err - } - - if len(m.Name) == 0 { - return fmt.Errorf("label name in matcher must not be empty") - } - if m.IsRegex { - if _, err := regexp.Compile(m.Value); err != nil { - return err - } - } - return nil -} - -// Validate returns true iff all fields of the matcher have valid values. -func (m *Matcher) Validate() error { - if !m.Name.IsValid() { - return fmt.Errorf("invalid name %q", m.Name) - } - if m.IsRegex { - if _, err := regexp.Compile(m.Value); err != nil { - return fmt.Errorf("invalid regular expression %q", m.Value) - } - } else if !LabelValue(m.Value).IsValid() || len(m.Value) == 0 { - return fmt.Errorf("invalid value %q", m.Value) - } - return nil -} - -// Silence defines the representation of a silence definition in the Prometheus -// eco-system. -type Silence struct { - ID uint64 `json:"id,omitempty"` - - Matchers []*Matcher `json:"matchers"` - - StartsAt time.Time `json:"startsAt"` - EndsAt time.Time `json:"endsAt"` - - CreatedAt time.Time `json:"createdAt,omitempty"` - CreatedBy string `json:"createdBy"` - Comment string `json:"comment,omitempty"` -} - -// Validate returns true iff all fields of the silence have valid values. -func (s *Silence) Validate() error { - if len(s.Matchers) == 0 { - return fmt.Errorf("at least one matcher required") - } - for _, m := range s.Matchers { - if err := m.Validate(); err != nil { - return fmt.Errorf("invalid matcher: %w", err) - } - } - if s.StartsAt.IsZero() { - return fmt.Errorf("start time missing") - } - if s.EndsAt.IsZero() { - return fmt.Errorf("end time missing") - } - if s.EndsAt.Before(s.StartsAt) { - return fmt.Errorf("start time must be before end time") - } - if s.CreatedBy == "" { - return fmt.Errorf("creator information missing") - } - if s.Comment == "" { - return fmt.Errorf("comment missing") - } - if s.CreatedAt.IsZero() { - return fmt.Errorf("creation timestamp missing") - } - return nil -} diff --git a/vendor/github.com/prometheus/common/model/time.go b/vendor/github.com/prometheus/common/model/time.go deleted file mode 100644 index 5727452c1..000000000 --- a/vendor/github.com/prometheus/common/model/time.go +++ /dev/null @@ -1,340 +0,0 @@ -// Copyright 2013 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "encoding/json" - "errors" - "fmt" - "math" - "strconv" - "strings" - "time" -) - -const ( - // MinimumTick is the minimum supported time resolution. This has to be - // at least time.Second in order for the code below to work. - minimumTick = time.Millisecond - // second is the Time duration equivalent to one second. - second = int64(time.Second / minimumTick) - // The number of nanoseconds per minimum tick. - nanosPerTick = int64(minimumTick / time.Nanosecond) - - // Earliest is the earliest Time representable. Handy for - // initializing a high watermark. - Earliest = Time(math.MinInt64) - // Latest is the latest Time representable. Handy for initializing - // a low watermark. - Latest = Time(math.MaxInt64) -) - -// Time is the number of milliseconds since the epoch -// (1970-01-01 00:00 UTC) excluding leap seconds. -type Time int64 - -// Interval describes an interval between two timestamps. -type Interval struct { - Start, End Time -} - -// Now returns the current time as a Time. -func Now() Time { - return TimeFromUnixNano(time.Now().UnixNano()) -} - -// TimeFromUnix returns the Time equivalent to the Unix Time t -// provided in seconds. -func TimeFromUnix(t int64) Time { - return Time(t * second) -} - -// TimeFromUnixNano returns the Time equivalent to the Unix Time -// t provided in nanoseconds. -func TimeFromUnixNano(t int64) Time { - return Time(t / nanosPerTick) -} - -// Equal reports whether two Times represent the same instant. -func (t Time) Equal(o Time) bool { - return t == o -} - -// Before reports whether the Time t is before o. -func (t Time) Before(o Time) bool { - return t < o -} - -// After reports whether the Time t is after o. -func (t Time) After(o Time) bool { - return t > o -} - -// Add returns the Time t + d. -func (t Time) Add(d time.Duration) Time { - return t + Time(d/minimumTick) -} - -// Sub returns the Duration t - o. -func (t Time) Sub(o Time) time.Duration { - return time.Duration(t-o) * minimumTick -} - -// Time returns the time.Time representation of t. -func (t Time) Time() time.Time { - return time.Unix(int64(t)/second, (int64(t)%second)*nanosPerTick) -} - -// Unix returns t as a Unix time, the number of seconds elapsed -// since January 1, 1970 UTC. -func (t Time) Unix() int64 { - return int64(t) / second -} - -// UnixNano returns t as a Unix time, the number of nanoseconds elapsed -// since January 1, 1970 UTC. -func (t Time) UnixNano() int64 { - return int64(t) * nanosPerTick -} - -// The number of digits after the dot. -var dotPrecision = int(math.Log10(float64(second))) - -// String returns a string representation of the Time. -func (t Time) String() string { - return strconv.FormatFloat(float64(t)/float64(second), 'f', -1, 64) -} - -// MarshalJSON implements the json.Marshaler interface. -func (t Time) MarshalJSON() ([]byte, error) { - return []byte(t.String()), nil -} - -// UnmarshalJSON implements the json.Unmarshaler interface. -func (t *Time) UnmarshalJSON(b []byte) error { - p := strings.Split(string(b), ".") - switch len(p) { - case 1: - v, err := strconv.ParseInt(string(p[0]), 10, 64) - if err != nil { - return err - } - *t = Time(v * second) - - case 2: - v, err := strconv.ParseInt(string(p[0]), 10, 64) - if err != nil { - return err - } - v *= second - - prec := dotPrecision - len(p[1]) - if prec < 0 { - p[1] = p[1][:dotPrecision] - } else if prec > 0 { - p[1] = p[1] + strings.Repeat("0", prec) - } - - va, err := strconv.ParseInt(p[1], 10, 32) - if err != nil { - return err - } - - // If the value was something like -0.1 the negative is lost in the - // parsing because of the leading zero, this ensures that we capture it. - if len(p[0]) > 0 && p[0][0] == '-' && v+va > 0 { - *t = Time(v+va) * -1 - } else { - *t = Time(v + va) - } - - default: - return fmt.Errorf("invalid time %q", string(b)) - } - return nil -} - -// Duration wraps time.Duration. It is used to parse the custom duration format -// from YAML. -// This type should not propagate beyond the scope of input/output processing. -type Duration time.Duration - -// Set implements pflag/flag.Value -func (d *Duration) Set(s string) error { - var err error - *d, err = ParseDuration(s) - return err -} - -// Type implements pflag.Value -func (d *Duration) Type() string { - return "duration" -} - -func isdigit(c byte) bool { return c >= '0' && c <= '9' } - -// Units are required to go in order from biggest to smallest. -// This guards against confusion from "1m1d" being 1 minute + 1 day, not 1 month + 1 day. -var unitMap = map[string]struct { - pos int - mult uint64 -}{ - "ms": {7, uint64(time.Millisecond)}, - "s": {6, uint64(time.Second)}, - "m": {5, uint64(time.Minute)}, - "h": {4, uint64(time.Hour)}, - "d": {3, uint64(24 * time.Hour)}, - "w": {2, uint64(7 * 24 * time.Hour)}, - "y": {1, uint64(365 * 24 * time.Hour)}, -} - -// ParseDuration parses a string into a time.Duration, assuming that a year -// always has 365d, a week always has 7d, and a day always has 24h. -func ParseDuration(s string) (Duration, error) { - switch s { - case "0": - // Allow 0 without a unit. - return 0, nil - case "": - return 0, errors.New("empty duration string") - } - - orig := s - var dur uint64 - lastUnitPos := 0 - - for s != "" { - if !isdigit(s[0]) { - return 0, fmt.Errorf("not a valid duration string: %q", orig) - } - // Consume [0-9]* - i := 0 - for ; i < len(s) && isdigit(s[i]); i++ { - } - v, err := strconv.ParseUint(s[:i], 10, 0) - if err != nil { - return 0, fmt.Errorf("not a valid duration string: %q", orig) - } - s = s[i:] - - // Consume unit. - for i = 0; i < len(s) && !isdigit(s[i]); i++ { - } - if i == 0 { - return 0, fmt.Errorf("not a valid duration string: %q", orig) - } - u := s[:i] - s = s[i:] - unit, ok := unitMap[u] - if !ok { - return 0, fmt.Errorf("unknown unit %q in duration %q", u, orig) - } - if unit.pos <= lastUnitPos { // Units must go in order from biggest to smallest. - return 0, fmt.Errorf("not a valid duration string: %q", orig) - } - lastUnitPos = unit.pos - // Check if the provided duration overflows time.Duration (> ~ 290years). - if v > 1<<63/unit.mult { - return 0, errors.New("duration out of range") - } - dur += v * unit.mult - if dur > 1<<63-1 { - return 0, errors.New("duration out of range") - } - } - return Duration(dur), nil -} - -func (d Duration) String() string { - var ( - ms = int64(time.Duration(d) / time.Millisecond) - r = "" - ) - if ms == 0 { - return "0s" - } - - f := func(unit string, mult int64, exact bool) { - if exact && ms%mult != 0 { - return - } - if v := ms / mult; v > 0 { - r += fmt.Sprintf("%d%s", v, unit) - ms -= v * mult - } - } - - // Only format years and weeks if the remainder is zero, as it is often - // easier to read 90d than 12w6d. - f("y", 1000*60*60*24*365, true) - f("w", 1000*60*60*24*7, true) - - f("d", 1000*60*60*24, false) - f("h", 1000*60*60, false) - f("m", 1000*60, false) - f("s", 1000, false) - f("ms", 1, false) - - return r -} - -// MarshalJSON implements the json.Marshaler interface. -func (d Duration) MarshalJSON() ([]byte, error) { - return json.Marshal(d.String()) -} - -// UnmarshalJSON implements the json.Unmarshaler interface. -func (d *Duration) UnmarshalJSON(bytes []byte) error { - var s string - if err := json.Unmarshal(bytes, &s); err != nil { - return err - } - dur, err := ParseDuration(s) - if err != nil { - return err - } - *d = dur - return nil -} - -// MarshalText implements the encoding.TextMarshaler interface. -func (d *Duration) MarshalText() ([]byte, error) { - return []byte(d.String()), nil -} - -// UnmarshalText implements the encoding.TextUnmarshaler interface. -func (d *Duration) UnmarshalText(text []byte) error { - var err error - *d, err = ParseDuration(string(text)) - return err -} - -// MarshalYAML implements the yaml.Marshaler interface. -func (d Duration) MarshalYAML() (interface{}, error) { - return d.String(), nil -} - -// UnmarshalYAML implements the yaml.Unmarshaler interface. -func (d *Duration) UnmarshalYAML(unmarshal func(interface{}) error) error { - var s string - if err := unmarshal(&s); err != nil { - return err - } - dur, err := ParseDuration(s) - if err != nil { - return err - } - *d = dur - return nil -} diff --git a/vendor/github.com/prometheus/common/model/value.go b/vendor/github.com/prometheus/common/model/value.go deleted file mode 100644 index 8050637d8..000000000 --- a/vendor/github.com/prometheus/common/model/value.go +++ /dev/null @@ -1,364 +0,0 @@ -// Copyright 2013 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "encoding/json" - "fmt" - "sort" - "strconv" - "strings" -) - -// ZeroSample is the pseudo zero-value of Sample used to signal a -// non-existing sample. It is a Sample with timestamp Earliest, value 0.0, -// and metric nil. Note that the natural zero value of Sample has a timestamp -// of 0, which is possible to appear in a real Sample and thus not suitable -// to signal a non-existing Sample. -var ZeroSample = Sample{Timestamp: Earliest} - -// Sample is a sample pair associated with a metric. A single sample must either -// define Value or Histogram but not both. Histogram == nil implies the Value -// field is used, otherwise it should be ignored. -type Sample struct { - Metric Metric `json:"metric"` - Value SampleValue `json:"value"` - Timestamp Time `json:"timestamp"` - Histogram *SampleHistogram `json:"histogram"` -} - -// Equal compares first the metrics, then the timestamp, then the value. The -// semantics of value equality is defined by SampleValue.Equal. -func (s *Sample) Equal(o *Sample) bool { - if s == o { - return true - } - - if !s.Metric.Equal(o.Metric) { - return false - } - if !s.Timestamp.Equal(o.Timestamp) { - return false - } - if s.Histogram != nil { - return s.Histogram.Equal(o.Histogram) - } - return s.Value.Equal(o.Value) -} - -func (s Sample) String() string { - if s.Histogram != nil { - return fmt.Sprintf("%s => %s", s.Metric, SampleHistogramPair{ - Timestamp: s.Timestamp, - Histogram: s.Histogram, - }) - } - return fmt.Sprintf("%s => %s", s.Metric, SamplePair{ - Timestamp: s.Timestamp, - Value: s.Value, - }) -} - -// MarshalJSON implements json.Marshaler. -func (s Sample) MarshalJSON() ([]byte, error) { - if s.Histogram != nil { - v := struct { - Metric Metric `json:"metric"` - Histogram SampleHistogramPair `json:"histogram"` - }{ - Metric: s.Metric, - Histogram: SampleHistogramPair{ - Timestamp: s.Timestamp, - Histogram: s.Histogram, - }, - } - return json.Marshal(&v) - } - v := struct { - Metric Metric `json:"metric"` - Value SamplePair `json:"value"` - }{ - Metric: s.Metric, - Value: SamplePair{ - Timestamp: s.Timestamp, - Value: s.Value, - }, - } - return json.Marshal(&v) -} - -// UnmarshalJSON implements json.Unmarshaler. -func (s *Sample) UnmarshalJSON(b []byte) error { - v := struct { - Metric Metric `json:"metric"` - Value SamplePair `json:"value"` - Histogram SampleHistogramPair `json:"histogram"` - }{ - Metric: s.Metric, - Value: SamplePair{ - Timestamp: s.Timestamp, - Value: s.Value, - }, - Histogram: SampleHistogramPair{ - Timestamp: s.Timestamp, - Histogram: s.Histogram, - }, - } - - if err := json.Unmarshal(b, &v); err != nil { - return err - } - - s.Metric = v.Metric - if v.Histogram.Histogram != nil { - s.Timestamp = v.Histogram.Timestamp - s.Histogram = v.Histogram.Histogram - } else { - s.Timestamp = v.Value.Timestamp - s.Value = v.Value.Value - } - - return nil -} - -// Samples is a sortable Sample slice. It implements sort.Interface. -type Samples []*Sample - -func (s Samples) Len() int { - return len(s) -} - -// Less compares first the metrics, then the timestamp. -func (s Samples) Less(i, j int) bool { - switch { - case s[i].Metric.Before(s[j].Metric): - return true - case s[j].Metric.Before(s[i].Metric): - return false - case s[i].Timestamp.Before(s[j].Timestamp): - return true - default: - return false - } -} - -func (s Samples) Swap(i, j int) { - s[i], s[j] = s[j], s[i] -} - -// Equal compares two sets of samples and returns true if they are equal. -func (s Samples) Equal(o Samples) bool { - if len(s) != len(o) { - return false - } - - for i, sample := range s { - if !sample.Equal(o[i]) { - return false - } - } - return true -} - -// SampleStream is a stream of Values belonging to an attached COWMetric. -type SampleStream struct { - Metric Metric `json:"metric"` - Values []SamplePair `json:"values"` - Histograms []SampleHistogramPair `json:"histograms"` -} - -func (ss SampleStream) String() string { - valuesLength := len(ss.Values) - vals := make([]string, valuesLength+len(ss.Histograms)) - for i, v := range ss.Values { - vals[i] = v.String() - } - for i, v := range ss.Histograms { - vals[i+valuesLength] = v.String() - } - return fmt.Sprintf("%s =>\n%s", ss.Metric, strings.Join(vals, "\n")) -} - -func (ss SampleStream) MarshalJSON() ([]byte, error) { - if len(ss.Histograms) > 0 && len(ss.Values) > 0 { - v := struct { - Metric Metric `json:"metric"` - Values []SamplePair `json:"values"` - Histograms []SampleHistogramPair `json:"histograms"` - }{ - Metric: ss.Metric, - Values: ss.Values, - Histograms: ss.Histograms, - } - return json.Marshal(&v) - } else if len(ss.Histograms) > 0 { - v := struct { - Metric Metric `json:"metric"` - Histograms []SampleHistogramPair `json:"histograms"` - }{ - Metric: ss.Metric, - Histograms: ss.Histograms, - } - return json.Marshal(&v) - } else { - v := struct { - Metric Metric `json:"metric"` - Values []SamplePair `json:"values"` - }{ - Metric: ss.Metric, - Values: ss.Values, - } - return json.Marshal(&v) - } -} - -func (ss *SampleStream) UnmarshalJSON(b []byte) error { - v := struct { - Metric Metric `json:"metric"` - Values []SamplePair `json:"values"` - Histograms []SampleHistogramPair `json:"histograms"` - }{ - Metric: ss.Metric, - Values: ss.Values, - Histograms: ss.Histograms, - } - - if err := json.Unmarshal(b, &v); err != nil { - return err - } - - ss.Metric = v.Metric - ss.Values = v.Values - ss.Histograms = v.Histograms - - return nil -} - -// Scalar is a scalar value evaluated at the set timestamp. -type Scalar struct { - Value SampleValue `json:"value"` - Timestamp Time `json:"timestamp"` -} - -func (s Scalar) String() string { - return fmt.Sprintf("scalar: %v @[%v]", s.Value, s.Timestamp) -} - -// MarshalJSON implements json.Marshaler. -func (s Scalar) MarshalJSON() ([]byte, error) { - v := strconv.FormatFloat(float64(s.Value), 'f', -1, 64) - return json.Marshal([...]interface{}{s.Timestamp, string(v)}) -} - -// UnmarshalJSON implements json.Unmarshaler. -func (s *Scalar) UnmarshalJSON(b []byte) error { - var f string - v := [...]interface{}{&s.Timestamp, &f} - - if err := json.Unmarshal(b, &v); err != nil { - return err - } - - value, err := strconv.ParseFloat(f, 64) - if err != nil { - return fmt.Errorf("error parsing sample value: %w", err) - } - s.Value = SampleValue(value) - return nil -} - -// String is a string value evaluated at the set timestamp. -type String struct { - Value string `json:"value"` - Timestamp Time `json:"timestamp"` -} - -func (s *String) String() string { - return s.Value -} - -// MarshalJSON implements json.Marshaler. -func (s String) MarshalJSON() ([]byte, error) { - return json.Marshal([]interface{}{s.Timestamp, s.Value}) -} - -// UnmarshalJSON implements json.Unmarshaler. -func (s *String) UnmarshalJSON(b []byte) error { - v := [...]interface{}{&s.Timestamp, &s.Value} - return json.Unmarshal(b, &v) -} - -// Vector is basically only an alias for Samples, but the -// contract is that in a Vector, all Samples have the same timestamp. -type Vector []*Sample - -func (vec Vector) String() string { - entries := make([]string, len(vec)) - for i, s := range vec { - entries[i] = s.String() - } - return strings.Join(entries, "\n") -} - -func (vec Vector) Len() int { return len(vec) } -func (vec Vector) Swap(i, j int) { vec[i], vec[j] = vec[j], vec[i] } - -// Less compares first the metrics, then the timestamp. -func (vec Vector) Less(i, j int) bool { - switch { - case vec[i].Metric.Before(vec[j].Metric): - return true - case vec[j].Metric.Before(vec[i].Metric): - return false - case vec[i].Timestamp.Before(vec[j].Timestamp): - return true - default: - return false - } -} - -// Equal compares two sets of samples and returns true if they are equal. -func (vec Vector) Equal(o Vector) bool { - if len(vec) != len(o) { - return false - } - - for i, sample := range vec { - if !sample.Equal(o[i]) { - return false - } - } - return true -} - -// Matrix is a list of time series. -type Matrix []*SampleStream - -func (m Matrix) Len() int { return len(m) } -func (m Matrix) Less(i, j int) bool { return m[i].Metric.Before(m[j].Metric) } -func (m Matrix) Swap(i, j int) { m[i], m[j] = m[j], m[i] } - -func (mat Matrix) String() string { - matCp := make(Matrix, len(mat)) - copy(matCp, mat) - sort.Sort(matCp) - - strs := make([]string, len(matCp)) - - for i, ss := range matCp { - strs[i] = ss.String() - } - - return strings.Join(strs, "\n") -} diff --git a/vendor/github.com/prometheus/common/model/value_float.go b/vendor/github.com/prometheus/common/model/value_float.go deleted file mode 100644 index ae35cc2ab..000000000 --- a/vendor/github.com/prometheus/common/model/value_float.go +++ /dev/null @@ -1,98 +0,0 @@ -// Copyright 2013 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "encoding/json" - "fmt" - "math" - "strconv" -) - -// ZeroSamplePair is the pseudo zero-value of SamplePair used to signal a -// non-existing sample pair. It is a SamplePair with timestamp Earliest and -// value 0.0. Note that the natural zero value of SamplePair has a timestamp -// of 0, which is possible to appear in a real SamplePair and thus not -// suitable to signal a non-existing SamplePair. -var ZeroSamplePair = SamplePair{Timestamp: Earliest} - -// A SampleValue is a representation of a value for a given sample at a given -// time. -type SampleValue float64 - -// MarshalJSON implements json.Marshaler. -func (v SampleValue) MarshalJSON() ([]byte, error) { - return json.Marshal(v.String()) -} - -// UnmarshalJSON implements json.Unmarshaler. -func (v *SampleValue) UnmarshalJSON(b []byte) error { - if len(b) < 2 || b[0] != '"' || b[len(b)-1] != '"' { - return fmt.Errorf("sample value must be a quoted string") - } - f, err := strconv.ParseFloat(string(b[1:len(b)-1]), 64) - if err != nil { - return err - } - *v = SampleValue(f) - return nil -} - -// Equal returns true if the value of v and o is equal or if both are NaN. Note -// that v==o is false if both are NaN. If you want the conventional float -// behavior, use == to compare two SampleValues. -func (v SampleValue) Equal(o SampleValue) bool { - if v == o { - return true - } - return math.IsNaN(float64(v)) && math.IsNaN(float64(o)) -} - -func (v SampleValue) String() string { - return strconv.FormatFloat(float64(v), 'f', -1, 64) -} - -// SamplePair pairs a SampleValue with a Timestamp. -type SamplePair struct { - Timestamp Time - Value SampleValue -} - -func (s SamplePair) MarshalJSON() ([]byte, error) { - t, err := json.Marshal(s.Timestamp) - if err != nil { - return nil, err - } - v, err := json.Marshal(s.Value) - if err != nil { - return nil, err - } - return []byte(fmt.Sprintf("[%s,%s]", t, v)), nil -} - -// UnmarshalJSON implements json.Unmarshaler. -func (s *SamplePair) UnmarshalJSON(b []byte) error { - v := [...]json.Unmarshaler{&s.Timestamp, &s.Value} - return json.Unmarshal(b, &v) -} - -// Equal returns true if this SamplePair and o have equal Values and equal -// Timestamps. The semantics of Value equality is defined by SampleValue.Equal. -func (s *SamplePair) Equal(o *SamplePair) bool { - return s == o || (s.Value.Equal(o.Value) && s.Timestamp.Equal(o.Timestamp)) -} - -func (s SamplePair) String() string { - return fmt.Sprintf("%s @[%s]", s.Value, s.Timestamp) -} diff --git a/vendor/github.com/prometheus/common/model/value_histogram.go b/vendor/github.com/prometheus/common/model/value_histogram.go deleted file mode 100644 index 54bb038cf..000000000 --- a/vendor/github.com/prometheus/common/model/value_histogram.go +++ /dev/null @@ -1,178 +0,0 @@ -// Copyright 2013 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "encoding/json" - "fmt" - "strconv" - "strings" -) - -type FloatString float64 - -func (v FloatString) String() string { - return strconv.FormatFloat(float64(v), 'f', -1, 64) -} - -func (v FloatString) MarshalJSON() ([]byte, error) { - return json.Marshal(v.String()) -} - -func (v *FloatString) UnmarshalJSON(b []byte) error { - if len(b) < 2 || b[0] != '"' || b[len(b)-1] != '"' { - return fmt.Errorf("float value must be a quoted string") - } - f, err := strconv.ParseFloat(string(b[1:len(b)-1]), 64) - if err != nil { - return err - } - *v = FloatString(f) - return nil -} - -type HistogramBucket struct { - Boundaries int32 - Lower FloatString - Upper FloatString - Count FloatString -} - -func (s HistogramBucket) MarshalJSON() ([]byte, error) { - b, err := json.Marshal(s.Boundaries) - if err != nil { - return nil, err - } - l, err := json.Marshal(s.Lower) - if err != nil { - return nil, err - } - u, err := json.Marshal(s.Upper) - if err != nil { - return nil, err - } - c, err := json.Marshal(s.Count) - if err != nil { - return nil, err - } - return []byte(fmt.Sprintf("[%s,%s,%s,%s]", b, l, u, c)), nil -} - -func (s *HistogramBucket) UnmarshalJSON(buf []byte) error { - tmp := []interface{}{&s.Boundaries, &s.Lower, &s.Upper, &s.Count} - wantLen := len(tmp) - if err := json.Unmarshal(buf, &tmp); err != nil { - return err - } - if gotLen := len(tmp); gotLen != wantLen { - return fmt.Errorf("wrong number of fields: %d != %d", gotLen, wantLen) - } - return nil -} - -func (s *HistogramBucket) Equal(o *HistogramBucket) bool { - return s == o || (s.Boundaries == o.Boundaries && s.Lower == o.Lower && s.Upper == o.Upper && s.Count == o.Count) -} - -func (b HistogramBucket) String() string { - var sb strings.Builder - lowerInclusive := b.Boundaries == 1 || b.Boundaries == 3 - upperInclusive := b.Boundaries == 0 || b.Boundaries == 3 - if lowerInclusive { - sb.WriteRune('[') - } else { - sb.WriteRune('(') - } - fmt.Fprintf(&sb, "%g,%g", b.Lower, b.Upper) - if upperInclusive { - sb.WriteRune(']') - } else { - sb.WriteRune(')') - } - fmt.Fprintf(&sb, ":%v", b.Count) - return sb.String() -} - -type HistogramBuckets []*HistogramBucket - -func (s HistogramBuckets) Equal(o HistogramBuckets) bool { - if len(s) != len(o) { - return false - } - - for i, bucket := range s { - if !bucket.Equal(o[i]) { - return false - } - } - return true -} - -type SampleHistogram struct { - Count FloatString `json:"count"` - Sum FloatString `json:"sum"` - Buckets HistogramBuckets `json:"buckets"` -} - -func (s SampleHistogram) String() string { - return fmt.Sprintf("Count: %f, Sum: %f, Buckets: %v", s.Count, s.Sum, s.Buckets) -} - -func (s *SampleHistogram) Equal(o *SampleHistogram) bool { - return s == o || (s.Count == o.Count && s.Sum == o.Sum && s.Buckets.Equal(o.Buckets)) -} - -type SampleHistogramPair struct { - Timestamp Time - // Histogram should never be nil, it's only stored as pointer for efficiency. - Histogram *SampleHistogram -} - -func (s SampleHistogramPair) MarshalJSON() ([]byte, error) { - if s.Histogram == nil { - return nil, fmt.Errorf("histogram is nil") - } - t, err := json.Marshal(s.Timestamp) - if err != nil { - return nil, err - } - v, err := json.Marshal(s.Histogram) - if err != nil { - return nil, err - } - return []byte(fmt.Sprintf("[%s,%s]", t, v)), nil -} - -func (s *SampleHistogramPair) UnmarshalJSON(buf []byte) error { - tmp := []interface{}{&s.Timestamp, &s.Histogram} - wantLen := len(tmp) - if err := json.Unmarshal(buf, &tmp); err != nil { - return err - } - if gotLen := len(tmp); gotLen != wantLen { - return fmt.Errorf("wrong number of fields: %d != %d", gotLen, wantLen) - } - if s.Histogram == nil { - return fmt.Errorf("histogram is null") - } - return nil -} - -func (s SampleHistogramPair) String() string { - return fmt.Sprintf("%s @[%s]", s.Histogram, s.Timestamp) -} - -func (s *SampleHistogramPair) Equal(o *SampleHistogramPair) bool { - return s == o || (s.Histogram.Equal(o.Histogram) && s.Timestamp.Equal(o.Timestamp)) -} diff --git a/vendor/github.com/prometheus/common/model/value_type.go b/vendor/github.com/prometheus/common/model/value_type.go deleted file mode 100644 index 726c50ee6..000000000 --- a/vendor/github.com/prometheus/common/model/value_type.go +++ /dev/null @@ -1,83 +0,0 @@ -// Copyright 2013 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "encoding/json" - "fmt" -) - -// Value is a generic interface for values resulting from a query evaluation. -type Value interface { - Type() ValueType - String() string -} - -func (Matrix) Type() ValueType { return ValMatrix } -func (Vector) Type() ValueType { return ValVector } -func (*Scalar) Type() ValueType { return ValScalar } -func (*String) Type() ValueType { return ValString } - -type ValueType int - -const ( - ValNone ValueType = iota - ValScalar - ValVector - ValMatrix - ValString -) - -// MarshalJSON implements json.Marshaler. -func (et ValueType) MarshalJSON() ([]byte, error) { - return json.Marshal(et.String()) -} - -func (et *ValueType) UnmarshalJSON(b []byte) error { - var s string - if err := json.Unmarshal(b, &s); err != nil { - return err - } - switch s { - case "": - *et = ValNone - case "scalar": - *et = ValScalar - case "vector": - *et = ValVector - case "matrix": - *et = ValMatrix - case "string": - *et = ValString - default: - return fmt.Errorf("unknown value type %q", s) - } - return nil -} - -func (e ValueType) String() string { - switch e { - case ValNone: - return "" - case ValScalar: - return "scalar" - case ValVector: - return "vector" - case ValMatrix: - return "matrix" - case ValString: - return "string" - } - panic("ValueType.String: unhandled value type") -} diff --git a/vendor/github.com/prometheus/procfs/.gitignore b/vendor/github.com/prometheus/procfs/.gitignore deleted file mode 100644 index 7cc33ae4a..000000000 --- a/vendor/github.com/prometheus/procfs/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -/testdata/fixtures/ -/fixtures diff --git a/vendor/github.com/prometheus/procfs/.golangci.yml b/vendor/github.com/prometheus/procfs/.golangci.yml deleted file mode 100644 index c24864a92..000000000 --- a/vendor/github.com/prometheus/procfs/.golangci.yml +++ /dev/null @@ -1,15 +0,0 @@ ---- -linters: - enable: - - godot - - misspell - - revive - -linter-settings: - godot: - capital: true - exclude: - # Ignore "See: URL" - - 'See:' - misspell: - locale: US diff --git a/vendor/github.com/prometheus/procfs/CODE_OF_CONDUCT.md b/vendor/github.com/prometheus/procfs/CODE_OF_CONDUCT.md deleted file mode 100644 index d325872bd..000000000 --- a/vendor/github.com/prometheus/procfs/CODE_OF_CONDUCT.md +++ /dev/null @@ -1,3 +0,0 @@ -# Prometheus Community Code of Conduct - -Prometheus follows the [CNCF Code of Conduct](https://github.com/cncf/foundation/blob/main/code-of-conduct.md). diff --git a/vendor/github.com/prometheus/procfs/CONTRIBUTING.md b/vendor/github.com/prometheus/procfs/CONTRIBUTING.md deleted file mode 100644 index 853eb9d49..000000000 --- a/vendor/github.com/prometheus/procfs/CONTRIBUTING.md +++ /dev/null @@ -1,121 +0,0 @@ -# Contributing - -Prometheus uses GitHub to manage reviews of pull requests. - -* If you are a new contributor see: [Steps to Contribute](#steps-to-contribute) - -* If you have a trivial fix or improvement, go ahead and create a pull request, - addressing (with `@...`) a suitable maintainer of this repository (see - [MAINTAINERS.md](MAINTAINERS.md)) in the description of the pull request. - -* If you plan to do something more involved, first discuss your ideas - on our [mailing list](https://groups.google.com/forum/?fromgroups#!forum/prometheus-developers). - This will avoid unnecessary work and surely give you and us a good deal - of inspiration. Also please see our [non-goals issue](https://github.com/prometheus/docs/issues/149) on areas that the Prometheus community doesn't plan to work on. - -* Relevant coding style guidelines are the [Go Code Review - Comments](https://code.google.com/p/go-wiki/wiki/CodeReviewComments) - and the _Formatting and style_ section of Peter Bourgon's [Go: Best - Practices for Production - Environments](https://peter.bourgon.org/go-in-production/#formatting-and-style). - -* Be sure to sign off on the [DCO](https://github.com/probot/dco#how-it-works) - -## Steps to Contribute - -Should you wish to work on an issue, please claim it first by commenting on the GitHub issue that you want to work on it. This is to prevent duplicated efforts from contributors on the same issue. - -Please check the [`help-wanted`](https://github.com/prometheus/procfs/issues?q=is%3Aissue+is%3Aopen+label%3A%22help+wanted%22) label to find issues that are good for getting started. If you have questions about one of the issues, with or without the tag, please comment on them and one of the maintainers will clarify it. For a quicker response, contact us over [IRC](https://prometheus.io/community). - -For quickly compiling and testing your changes do: -``` -make test # Make sure all the tests pass before you commit and push :) -``` - -We use [`golangci-lint`](https://github.com/golangci/golangci-lint) for linting the code. If it reports an issue and you think that the warning needs to be disregarded or is a false-positive, you can add a special comment `//nolint:linter1[,linter2,...]` before the offending line. Use this sparingly though, fixing the code to comply with the linter's recommendation is in general the preferred course of action. - -## Pull Request Checklist - -* Branch from the master branch and, if needed, rebase to the current master branch before submitting your pull request. If it doesn't merge cleanly with master you may be asked to rebase your changes. - -* Commits should be as small as possible, while ensuring that each commit is correct independently (i.e., each commit should compile and pass tests). - -* If your patch is not getting reviewed or you need a specific person to review it, you can @-reply a reviewer asking for a review in the pull request or a comment, or you can ask for a review on IRC channel [#prometheus](https://webchat.freenode.net/?channels=#prometheus) on irc.freenode.net (for the easiest start, [join via Riot](https://riot.im/app/#/room/#prometheus:matrix.org)). - -* Add tests relevant to the fixed bug or new feature. - -## Dependency management - -The Prometheus project uses [Go modules](https://golang.org/cmd/go/#hdr-Modules__module_versions__and_more) to manage dependencies on external packages. This requires a working Go environment with version 1.12 or greater installed. - -All dependencies are vendored in the `vendor/` directory. - -To add or update a new dependency, use the `go get` command: - -```bash -# Pick the latest tagged release. -go get example.com/some/module/pkg - -# Pick a specific version. -go get example.com/some/module/pkg@vX.Y.Z -``` - -Tidy up the `go.mod` and `go.sum` files and copy the new/updated dependency to the `vendor/` directory: - - -```bash -# The GO111MODULE variable can be omitted when the code isn't located in GOPATH. -GO111MODULE=on go mod tidy - -GO111MODULE=on go mod vendor -``` - -You have to commit the changes to `go.mod`, `go.sum` and the `vendor/` directory before submitting the pull request. - - -## API Implementation Guidelines - -### Naming and Documentation - -Public functions and structs should normally be named according to the file(s) being read and parsed. For example, -the `fs.BuddyInfo()` function reads the file `/proc/buddyinfo`. In addition, the godoc for each public function -should contain the path to the file(s) being read and a URL of the linux kernel documentation describing the file(s). - -### Reading vs. Parsing - -Most functionality in this library consists of reading files and then parsing the text into structured data. In most -cases reading and parsing should be separated into different functions/methods with a public `fs.Thing()` method and -a private `parseThing(r Reader)` function. This provides a logical separation and allows parsing to be tested -directly without the need to read from the filesystem. Using a `Reader` argument is preferred over other data types -such as `string` or `*File` because it provides the most flexibility regarding the data source. When a set of files -in a directory needs to be parsed, then a `path` string parameter to the parse function can be used instead. - -### /proc and /sys filesystem I/O - -The `proc` and `sys` filesystems are pseudo file systems and work a bit differently from standard disk I/O. -Many of the files are changing continuously and the data being read can in some cases change between subsequent -reads in the same file. Also, most of the files are relatively small (less than a few KBs), and system calls -to the `stat` function will often return the wrong size. Therefore, for most files it's recommended to read the -full file in a single operation using an internal utility function called `util.ReadFileNoStat`. -This function is similar to `os.ReadFile`, but it avoids the system call to `stat` to get the current size of -the file. - -Note that parsing the file's contents can still be performed one line at a time. This is done by first reading -the full file, and then using a scanner on the `[]byte` or `string` containing the data. - -``` - data, err := util.ReadFileNoStat("/proc/cpuinfo") - if err != nil { - return err - } - reader := bytes.NewReader(data) - scanner := bufio.NewScanner(reader) -``` - -The `/sys` filesystem contains many very small files which contain only a single numeric or text value. These files -can be read using an internal function called `util.SysReadFile` which is similar to `os.ReadFile` but does -not bother to check the size of the file before reading. -``` - data, err := util.SysReadFile("/sys/class/power_supply/BAT0/capacity") -``` - diff --git a/vendor/github.com/prometheus/procfs/LICENSE b/vendor/github.com/prometheus/procfs/LICENSE deleted file mode 100644 index 261eeb9e9..000000000 --- a/vendor/github.com/prometheus/procfs/LICENSE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/vendor/github.com/prometheus/procfs/MAINTAINERS.md b/vendor/github.com/prometheus/procfs/MAINTAINERS.md deleted file mode 100644 index 56ba67d3e..000000000 --- a/vendor/github.com/prometheus/procfs/MAINTAINERS.md +++ /dev/null @@ -1,2 +0,0 @@ -* Johannes 'fish' Ziemke @discordianfish -* Paul Gier @pgier diff --git a/vendor/github.com/prometheus/procfs/Makefile b/vendor/github.com/prometheus/procfs/Makefile deleted file mode 100644 index 7edfe4d09..000000000 --- a/vendor/github.com/prometheus/procfs/Makefile +++ /dev/null @@ -1,31 +0,0 @@ -# Copyright 2018 The Prometheus Authors -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -include Makefile.common - -%/.unpacked: %.ttar - @echo ">> extracting fixtures $*" - ./ttar -C $(dir $*) -x -f $*.ttar - touch $@ - -fixtures: testdata/fixtures/.unpacked - -update_fixtures: - rm -vf testdata/fixtures/.unpacked - ./ttar -c -f testdata/fixtures.ttar -C testdata/ fixtures/ - -.PHONY: build -build: - -.PHONY: test -test: testdata/fixtures/.unpacked common-test diff --git a/vendor/github.com/prometheus/procfs/Makefile.common b/vendor/github.com/prometheus/procfs/Makefile.common deleted file mode 100644 index 92558151e..000000000 --- a/vendor/github.com/prometheus/procfs/Makefile.common +++ /dev/null @@ -1,273 +0,0 @@ -# Copyright 2018 The Prometheus Authors -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -# A common Makefile that includes rules to be reused in different prometheus projects. -# !!! Open PRs only against the prometheus/prometheus/Makefile.common repository! - -# Example usage : -# Create the main Makefile in the root project directory. -# include Makefile.common -# customTarget: -# @echo ">> Running customTarget" -# - -# Ensure GOBIN is not set during build so that promu is installed to the correct path -unexport GOBIN - -GO ?= go -GOFMT ?= $(GO)fmt -FIRST_GOPATH := $(firstword $(subst :, ,$(shell $(GO) env GOPATH))) -GOOPTS ?= -GOHOSTOS ?= $(shell $(GO) env GOHOSTOS) -GOHOSTARCH ?= $(shell $(GO) env GOHOSTARCH) - -GO_VERSION ?= $(shell $(GO) version) -GO_VERSION_NUMBER ?= $(word 3, $(GO_VERSION)) -PRE_GO_111 ?= $(shell echo $(GO_VERSION_NUMBER) | grep -E 'go1\.(10|[0-9])\.') - -PROMU := $(FIRST_GOPATH)/bin/promu -pkgs = ./... - -ifeq (arm, $(GOHOSTARCH)) - GOHOSTARM ?= $(shell GOARM= $(GO) env GOARM) - GO_BUILD_PLATFORM ?= $(GOHOSTOS)-$(GOHOSTARCH)v$(GOHOSTARM) -else - GO_BUILD_PLATFORM ?= $(GOHOSTOS)-$(GOHOSTARCH) -endif - -GOTEST := $(GO) test -GOTEST_DIR := -ifneq ($(CIRCLE_JOB),) -ifneq ($(shell command -v gotestsum > /dev/null),) - GOTEST_DIR := test-results - GOTEST := gotestsum --junitfile $(GOTEST_DIR)/unit-tests.xml -- -endif -endif - -PROMU_VERSION ?= 0.15.0 -PROMU_URL := https://github.com/prometheus/promu/releases/download/v$(PROMU_VERSION)/promu-$(PROMU_VERSION).$(GO_BUILD_PLATFORM).tar.gz - -SKIP_GOLANGCI_LINT := -GOLANGCI_LINT := -GOLANGCI_LINT_OPTS ?= -GOLANGCI_LINT_VERSION ?= v1.55.2 -# golangci-lint only supports linux, darwin and windows platforms on i386/amd64/arm64. -# windows isn't included here because of the path separator being different. -ifeq ($(GOHOSTOS),$(filter $(GOHOSTOS),linux darwin)) - ifeq ($(GOHOSTARCH),$(filter $(GOHOSTARCH),amd64 i386 arm64)) - # If we're in CI and there is an Actions file, that means the linter - # is being run in Actions, so we don't need to run it here. - ifneq (,$(SKIP_GOLANGCI_LINT)) - GOLANGCI_LINT := - else ifeq (,$(CIRCLE_JOB)) - GOLANGCI_LINT := $(FIRST_GOPATH)/bin/golangci-lint - else ifeq (,$(wildcard .github/workflows/golangci-lint.yml)) - GOLANGCI_LINT := $(FIRST_GOPATH)/bin/golangci-lint - endif - endif -endif - -PREFIX ?= $(shell pwd) -BIN_DIR ?= $(shell pwd) -DOCKER_IMAGE_TAG ?= $(subst /,-,$(shell git rev-parse --abbrev-ref HEAD)) -DOCKERFILE_PATH ?= ./Dockerfile -DOCKERBUILD_CONTEXT ?= ./ -DOCKER_REPO ?= prom - -DOCKER_ARCHS ?= amd64 - -BUILD_DOCKER_ARCHS = $(addprefix common-docker-,$(DOCKER_ARCHS)) -PUBLISH_DOCKER_ARCHS = $(addprefix common-docker-publish-,$(DOCKER_ARCHS)) -TAG_DOCKER_ARCHS = $(addprefix common-docker-tag-latest-,$(DOCKER_ARCHS)) - -SANITIZED_DOCKER_IMAGE_TAG := $(subst +,-,$(DOCKER_IMAGE_TAG)) - -ifeq ($(GOHOSTARCH),amd64) - ifeq ($(GOHOSTOS),$(filter $(GOHOSTOS),linux freebsd darwin windows)) - # Only supported on amd64 - test-flags := -race - endif -endif - -# This rule is used to forward a target like "build" to "common-build". This -# allows a new "build" target to be defined in a Makefile which includes this -# one and override "common-build" without override warnings. -%: common-% ; - -.PHONY: common-all -common-all: precheck style check_license lint yamllint unused build test - -.PHONY: common-style -common-style: - @echo ">> checking code style" - @fmtRes=$$($(GOFMT) -d $$(find . -path ./vendor -prune -o -name '*.go' -print)); \ - if [ -n "$${fmtRes}" ]; then \ - echo "gofmt checking failed!"; echo "$${fmtRes}"; echo; \ - echo "Please ensure you are using $$($(GO) version) for formatting code."; \ - exit 1; \ - fi - -.PHONY: common-check_license -common-check_license: - @echo ">> checking license header" - @licRes=$$(for file in $$(find . -type f -iname '*.go' ! -path './vendor/*') ; do \ - awk 'NR<=3' $$file | grep -Eq "(Copyright|generated|GENERATED)" || echo $$file; \ - done); \ - if [ -n "$${licRes}" ]; then \ - echo "license header checking failed:"; echo "$${licRes}"; \ - exit 1; \ - fi - -.PHONY: common-deps -common-deps: - @echo ">> getting dependencies" - $(GO) mod download - -.PHONY: update-go-deps -update-go-deps: - @echo ">> updating Go dependencies" - @for m in $$($(GO) list -mod=readonly -m -f '{{ if and (not .Indirect) (not .Main)}}{{.Path}}{{end}}' all); do \ - $(GO) get -d $$m; \ - done - $(GO) mod tidy - -.PHONY: common-test-short -common-test-short: $(GOTEST_DIR) - @echo ">> running short tests" - $(GOTEST) -short $(GOOPTS) $(pkgs) - -.PHONY: common-test -common-test: $(GOTEST_DIR) - @echo ">> running all tests" - $(GOTEST) $(test-flags) $(GOOPTS) $(pkgs) - -$(GOTEST_DIR): - @mkdir -p $@ - -.PHONY: common-format -common-format: - @echo ">> formatting code" - $(GO) fmt $(pkgs) - -.PHONY: common-vet -common-vet: - @echo ">> vetting code" - $(GO) vet $(GOOPTS) $(pkgs) - -.PHONY: common-lint -common-lint: $(GOLANGCI_LINT) -ifdef GOLANGCI_LINT - @echo ">> running golangci-lint" - $(GOLANGCI_LINT) run $(GOLANGCI_LINT_OPTS) $(pkgs) -endif - -.PHONY: common-lint-fix -common-lint-fix: $(GOLANGCI_LINT) -ifdef GOLANGCI_LINT - @echo ">> running golangci-lint fix" - $(GOLANGCI_LINT) run --fix $(GOLANGCI_LINT_OPTS) $(pkgs) -endif - -.PHONY: common-yamllint -common-yamllint: - @echo ">> running yamllint on all YAML files in the repository" -ifeq (, $(shell command -v yamllint > /dev/null)) - @echo "yamllint not installed so skipping" -else - yamllint . -endif - -# For backward-compatibility. -.PHONY: common-staticcheck -common-staticcheck: lint - -.PHONY: common-unused -common-unused: - @echo ">> running check for unused/missing packages in go.mod" - $(GO) mod tidy - @git diff --exit-code -- go.sum go.mod - -.PHONY: common-build -common-build: promu - @echo ">> building binaries" - $(PROMU) build --prefix $(PREFIX) $(PROMU_BINARIES) - -.PHONY: common-tarball -common-tarball: promu - @echo ">> building release tarball" - $(PROMU) tarball --prefix $(PREFIX) $(BIN_DIR) - -.PHONY: common-docker $(BUILD_DOCKER_ARCHS) -common-docker: $(BUILD_DOCKER_ARCHS) -$(BUILD_DOCKER_ARCHS): common-docker-%: - docker build -t "$(DOCKER_REPO)/$(DOCKER_IMAGE_NAME)-linux-$*:$(SANITIZED_DOCKER_IMAGE_TAG)" \ - -f $(DOCKERFILE_PATH) \ - --build-arg ARCH="$*" \ - --build-arg OS="linux" \ - $(DOCKERBUILD_CONTEXT) - -.PHONY: common-docker-publish $(PUBLISH_DOCKER_ARCHS) -common-docker-publish: $(PUBLISH_DOCKER_ARCHS) -$(PUBLISH_DOCKER_ARCHS): common-docker-publish-%: - docker push "$(DOCKER_REPO)/$(DOCKER_IMAGE_NAME)-linux-$*:$(SANITIZED_DOCKER_IMAGE_TAG)" - -DOCKER_MAJOR_VERSION_TAG = $(firstword $(subst ., ,$(shell cat VERSION))) -.PHONY: common-docker-tag-latest $(TAG_DOCKER_ARCHS) -common-docker-tag-latest: $(TAG_DOCKER_ARCHS) -$(TAG_DOCKER_ARCHS): common-docker-tag-latest-%: - docker tag "$(DOCKER_REPO)/$(DOCKER_IMAGE_NAME)-linux-$*:$(SANITIZED_DOCKER_IMAGE_TAG)" "$(DOCKER_REPO)/$(DOCKER_IMAGE_NAME)-linux-$*:latest" - docker tag "$(DOCKER_REPO)/$(DOCKER_IMAGE_NAME)-linux-$*:$(SANITIZED_DOCKER_IMAGE_TAG)" "$(DOCKER_REPO)/$(DOCKER_IMAGE_NAME)-linux-$*:v$(DOCKER_MAJOR_VERSION_TAG)" - -.PHONY: common-docker-manifest -common-docker-manifest: - DOCKER_CLI_EXPERIMENTAL=enabled docker manifest create -a "$(DOCKER_REPO)/$(DOCKER_IMAGE_NAME):$(SANITIZED_DOCKER_IMAGE_TAG)" $(foreach ARCH,$(DOCKER_ARCHS),$(DOCKER_REPO)/$(DOCKER_IMAGE_NAME)-linux-$(ARCH):$(SANITIZED_DOCKER_IMAGE_TAG)) - DOCKER_CLI_EXPERIMENTAL=enabled docker manifest push "$(DOCKER_REPO)/$(DOCKER_IMAGE_NAME):$(SANITIZED_DOCKER_IMAGE_TAG)" - -.PHONY: promu -promu: $(PROMU) - -$(PROMU): - $(eval PROMU_TMP := $(shell mktemp -d)) - curl -s -L $(PROMU_URL) | tar -xvzf - -C $(PROMU_TMP) - mkdir -p $(FIRST_GOPATH)/bin - cp $(PROMU_TMP)/promu-$(PROMU_VERSION).$(GO_BUILD_PLATFORM)/promu $(FIRST_GOPATH)/bin/promu - rm -r $(PROMU_TMP) - -.PHONY: proto -proto: - @echo ">> generating code from proto files" - @./scripts/genproto.sh - -ifdef GOLANGCI_LINT -$(GOLANGCI_LINT): - mkdir -p $(FIRST_GOPATH)/bin - curl -sfL https://raw.githubusercontent.com/golangci/golangci-lint/$(GOLANGCI_LINT_VERSION)/install.sh \ - | sed -e '/install -d/d' \ - | sh -s -- -b $(FIRST_GOPATH)/bin $(GOLANGCI_LINT_VERSION) -endif - -.PHONY: precheck -precheck:: - -define PRECHECK_COMMAND_template = -precheck:: $(1)_precheck - -PRECHECK_COMMAND_$(1) ?= $(1) $$(strip $$(PRECHECK_OPTIONS_$(1))) -.PHONY: $(1)_precheck -$(1)_precheck: - @if ! $$(PRECHECK_COMMAND_$(1)) 1>/dev/null 2>&1; then \ - echo "Execution of '$$(PRECHECK_COMMAND_$(1))' command failed. Is $(1) installed?"; \ - exit 1; \ - fi -endef diff --git a/vendor/github.com/prometheus/procfs/NOTICE b/vendor/github.com/prometheus/procfs/NOTICE deleted file mode 100644 index 53c5e9aa1..000000000 --- a/vendor/github.com/prometheus/procfs/NOTICE +++ /dev/null @@ -1,7 +0,0 @@ -procfs provides functions to retrieve system, kernel and process -metrics from the pseudo-filesystem proc. - -Copyright 2014-2015 The Prometheus Authors - -This product includes software developed at -SoundCloud Ltd. (http://soundcloud.com/). diff --git a/vendor/github.com/prometheus/procfs/README.md b/vendor/github.com/prometheus/procfs/README.md deleted file mode 100644 index 1224816c2..000000000 --- a/vendor/github.com/prometheus/procfs/README.md +++ /dev/null @@ -1,61 +0,0 @@ -# procfs - -This package provides functions to retrieve system, kernel, and process -metrics from the pseudo-filesystems /proc and /sys. - -*WARNING*: This package is a work in progress. Its API may still break in -backwards-incompatible ways without warnings. Use it at your own risk. - -[![Go Reference](https://pkg.go.dev/badge/github.com/prometheus/procfs.svg)](https://pkg.go.dev/github.com/prometheus/procfs) -[![CircleCI](https://circleci.com/gh/prometheus/procfs/tree/master.svg?style=svg)](https://circleci.com/gh/prometheus/procfs/tree/master) -[![Go Report Card](https://goreportcard.com/badge/github.com/prometheus/procfs)](https://goreportcard.com/report/github.com/prometheus/procfs) - -## Usage - -The procfs library is organized by packages based on whether the gathered data is coming from -/proc, /sys, or both. Each package contains an `FS` type which represents the path to either /proc, -/sys, or both. For example, cpu statistics are gathered from -`/proc/stat` and are available via the root procfs package. First, the proc filesystem mount -point is initialized, and then the stat information is read. - -```go -fs, err := procfs.NewFS("/proc") -stats, err := fs.Stat() -``` - -Some sub-packages such as `blockdevice`, require access to both the proc and sys filesystems. - -```go - fs, err := blockdevice.NewFS("/proc", "/sys") - stats, err := fs.ProcDiskstats() -``` - -## Package Organization - -The packages in this project are organized according to (1) whether the data comes from the `/proc` or -`/sys` filesystem and (2) the type of information being retrieved. For example, most process information -can be gathered from the functions in the root `procfs` package. Information about block devices such as disk drives -is available in the `blockdevices` sub-package. - -## Building and Testing - -The procfs library is intended to be built as part of another application, so there are no distributable binaries. -However, most of the API includes unit tests which can be run with `make test`. - -### Updating Test Fixtures - -The procfs library includes a set of test fixtures which include many example files from -the `/proc` and `/sys` filesystems. These fixtures are included as a [ttar](https://github.com/ideaship/ttar) file -which is extracted automatically during testing. To add/update the test fixtures, first -ensure the `fixtures` directory is up to date by removing the existing directory and then -extracting the ttar file using `make fixtures/.unpacked` or just `make test`. - -```bash -rm -rf testdata/fixtures -make test -``` - -Next, make the required changes to the extracted files in the `fixtures` directory. When -the changes are complete, run `make update_fixtures` to create a new `fixtures.ttar` file -based on the updated `fixtures` directory. And finally, verify the changes using -`git diff testdata/fixtures.ttar`. diff --git a/vendor/github.com/prometheus/procfs/SECURITY.md b/vendor/github.com/prometheus/procfs/SECURITY.md deleted file mode 100644 index fed02d85c..000000000 --- a/vendor/github.com/prometheus/procfs/SECURITY.md +++ /dev/null @@ -1,6 +0,0 @@ -# Reporting a security issue - -The Prometheus security policy, including how to report vulnerabilities, can be -found here: - - diff --git a/vendor/github.com/prometheus/procfs/arp.go b/vendor/github.com/prometheus/procfs/arp.go deleted file mode 100644 index 28783e2dd..000000000 --- a/vendor/github.com/prometheus/procfs/arp.go +++ /dev/null @@ -1,116 +0,0 @@ -// Copyright 2019 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package procfs - -import ( - "fmt" - "net" - "os" - "strconv" - "strings" -) - -// Learned from include/uapi/linux/if_arp.h. -const ( - // completed entry (ha valid). - ATFComplete = 0x02 - // permanent entry. - ATFPermanent = 0x04 - // Publish entry. - ATFPublish = 0x08 - // Has requested trailers. - ATFUseTrailers = 0x10 - // Obsoleted: Want to use a netmask (only for proxy entries). - ATFNetmask = 0x20 - // Don't answer this addresses. - ATFDontPublish = 0x40 -) - -// ARPEntry contains a single row of the columnar data represented in -// /proc/net/arp. -type ARPEntry struct { - // IP address - IPAddr net.IP - // MAC address - HWAddr net.HardwareAddr - // Name of the device - Device string - // Flags - Flags byte -} - -// GatherARPEntries retrieves all the ARP entries, parse the relevant columns, -// and then return a slice of ARPEntry's. -func (fs FS) GatherARPEntries() ([]ARPEntry, error) { - data, err := os.ReadFile(fs.proc.Path("net/arp")) - if err != nil { - return nil, fmt.Errorf("%s: error reading arp %s: %w", ErrFileRead, fs.proc.Path("net/arp"), err) - } - - return parseARPEntries(data) -} - -func parseARPEntries(data []byte) ([]ARPEntry, error) { - lines := strings.Split(string(data), "\n") - entries := make([]ARPEntry, 0) - var err error - const ( - expectedDataWidth = 6 - expectedHeaderWidth = 9 - ) - for _, line := range lines { - columns := strings.Fields(line) - width := len(columns) - - if width == expectedHeaderWidth || width == 0 { - continue - } else if width == expectedDataWidth { - entry, err := parseARPEntry(columns) - if err != nil { - return []ARPEntry{}, fmt.Errorf("%s: Failed to parse ARP entry: %v: %w", ErrFileParse, entry, err) - } - entries = append(entries, entry) - } else { - return []ARPEntry{}, fmt.Errorf("%s: %d columns found, but expected %d: %w", ErrFileParse, width, expectedDataWidth, err) - } - - } - - return entries, err -} - -func parseARPEntry(columns []string) (ARPEntry, error) { - entry := ARPEntry{Device: columns[5]} - ip := net.ParseIP(columns[0]) - entry.IPAddr = ip - - if mac, err := net.ParseMAC(columns[3]); err == nil { - entry.HWAddr = mac - } else { - return ARPEntry{}, err - } - - if flags, err := strconv.ParseUint(columns[2], 0, 8); err == nil { - entry.Flags = byte(flags) - } else { - return ARPEntry{}, err - } - - return entry, nil -} - -// IsComplete returns true if ARP entry is marked with complete flag. -func (entry *ARPEntry) IsComplete() bool { - return entry.Flags&ATFComplete != 0 -} diff --git a/vendor/github.com/prometheus/procfs/buddyinfo.go b/vendor/github.com/prometheus/procfs/buddyinfo.go deleted file mode 100644 index 4a173636c..000000000 --- a/vendor/github.com/prometheus/procfs/buddyinfo.go +++ /dev/null @@ -1,85 +0,0 @@ -// Copyright 2017 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package procfs - -import ( - "bufio" - "fmt" - "io" - "os" - "strconv" - "strings" -) - -// A BuddyInfo is the details parsed from /proc/buddyinfo. -// The data is comprised of an array of free fragments of each size. -// The sizes are 2^n*PAGE_SIZE, where n is the array index. -type BuddyInfo struct { - Node string - Zone string - Sizes []float64 -} - -// BuddyInfo reads the buddyinfo statistics from the specified `proc` filesystem. -func (fs FS) BuddyInfo() ([]BuddyInfo, error) { - file, err := os.Open(fs.proc.Path("buddyinfo")) - if err != nil { - return nil, err - } - defer file.Close() - - return parseBuddyInfo(file) -} - -func parseBuddyInfo(r io.Reader) ([]BuddyInfo, error) { - var ( - buddyInfo = []BuddyInfo{} - scanner = bufio.NewScanner(r) - bucketCount = -1 - ) - - for scanner.Scan() { - var err error - line := scanner.Text() - parts := strings.Fields(line) - - if len(parts) < 4 { - return nil, fmt.Errorf("%w: Invalid number of fields, found: %v", ErrFileParse, parts) - } - - node := strings.TrimRight(parts[1], ",") - zone := strings.TrimRight(parts[3], ",") - arraySize := len(parts[4:]) - - if bucketCount == -1 { - bucketCount = arraySize - } else { - if bucketCount != arraySize { - return nil, fmt.Errorf("%w: mismatch in number of buddyinfo buckets, previous count %d, new count %d", ErrFileParse, bucketCount, arraySize) - } - } - - sizes := make([]float64, arraySize) - for i := 0; i < arraySize; i++ { - sizes[i], err = strconv.ParseFloat(parts[i+4], 64) - if err != nil { - return nil, fmt.Errorf("%s: Invalid valid in buddyinfo: %f: %w", ErrFileParse, sizes[i], err) - } - } - - buddyInfo = append(buddyInfo, BuddyInfo{node, zone, sizes}) - } - - return buddyInfo, scanner.Err() -} diff --git a/vendor/github.com/prometheus/procfs/cmdline.go b/vendor/github.com/prometheus/procfs/cmdline.go deleted file mode 100644 index bf4f3b48c..000000000 --- a/vendor/github.com/prometheus/procfs/cmdline.go +++ /dev/null @@ -1,30 +0,0 @@ -// Copyright 2021 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package procfs - -import ( - "strings" - - "github.com/prometheus/procfs/internal/util" -) - -// CmdLine returns the command line of the kernel. -func (fs FS) CmdLine() ([]string, error) { - data, err := util.ReadFileNoStat(fs.proc.Path("cmdline")) - if err != nil { - return nil, err - } - - return strings.Fields(string(data)), nil -} diff --git a/vendor/github.com/prometheus/procfs/cpuinfo.go b/vendor/github.com/prometheus/procfs/cpuinfo.go deleted file mode 100644 index f4f5501c6..000000000 --- a/vendor/github.com/prometheus/procfs/cpuinfo.go +++ /dev/null @@ -1,519 +0,0 @@ -// Copyright 2019 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//go:build linux -// +build linux - -package procfs - -import ( - "bufio" - "bytes" - "errors" - "fmt" - "regexp" - "strconv" - "strings" - - "github.com/prometheus/procfs/internal/util" -) - -// CPUInfo contains general information about a system CPU found in /proc/cpuinfo. -type CPUInfo struct { - Processor uint - VendorID string - CPUFamily string - Model string - ModelName string - Stepping string - Microcode string - CPUMHz float64 - CacheSize string - PhysicalID string - Siblings uint - CoreID string - CPUCores uint - APICID string - InitialAPICID string - FPU string - FPUException string - CPUIDLevel uint - WP string - Flags []string - Bugs []string - BogoMips float64 - CLFlushSize uint - CacheAlignment uint - AddressSizes string - PowerManagement string -} - -var ( - cpuinfoClockRegexp = regexp.MustCompile(`([\d.]+)`) - cpuinfoS390XProcessorRegexp = regexp.MustCompile(`^processor\s+(\d+):.*`) -) - -// CPUInfo returns information about current system CPUs. -// See https://www.kernel.org/doc/Documentation/filesystems/proc.txt -func (fs FS) CPUInfo() ([]CPUInfo, error) { - data, err := util.ReadFileNoStat(fs.proc.Path("cpuinfo")) - if err != nil { - return nil, err - } - return parseCPUInfo(data) -} - -func parseCPUInfoX86(info []byte) ([]CPUInfo, error) { - scanner := bufio.NewScanner(bytes.NewReader(info)) - - // find the first "processor" line - firstLine := firstNonEmptyLine(scanner) - if !strings.HasPrefix(firstLine, "processor") || !strings.Contains(firstLine, ":") { - return nil, fmt.Errorf("%w: Cannot parse line: %q", ErrFileParse, firstLine) - } - field := strings.SplitN(firstLine, ": ", 2) - v, err := strconv.ParseUint(field[1], 0, 32) - if err != nil { - return nil, err - } - firstcpu := CPUInfo{Processor: uint(v)} - cpuinfo := []CPUInfo{firstcpu} - i := 0 - - for scanner.Scan() { - line := scanner.Text() - if !strings.Contains(line, ":") { - continue - } - field := strings.SplitN(line, ": ", 2) - switch strings.TrimSpace(field[0]) { - case "processor": - cpuinfo = append(cpuinfo, CPUInfo{}) // start of the next processor - i++ - v, err := strconv.ParseUint(field[1], 0, 32) - if err != nil { - return nil, err - } - cpuinfo[i].Processor = uint(v) - case "vendor", "vendor_id": - cpuinfo[i].VendorID = field[1] - case "cpu family": - cpuinfo[i].CPUFamily = field[1] - case "model": - cpuinfo[i].Model = field[1] - case "model name": - cpuinfo[i].ModelName = field[1] - case "stepping": - cpuinfo[i].Stepping = field[1] - case "microcode": - cpuinfo[i].Microcode = field[1] - case "cpu MHz": - v, err := strconv.ParseFloat(field[1], 64) - if err != nil { - return nil, err - } - cpuinfo[i].CPUMHz = v - case "cache size": - cpuinfo[i].CacheSize = field[1] - case "physical id": - cpuinfo[i].PhysicalID = field[1] - case "siblings": - v, err := strconv.ParseUint(field[1], 0, 32) - if err != nil { - return nil, err - } - cpuinfo[i].Siblings = uint(v) - case "core id": - cpuinfo[i].CoreID = field[1] - case "cpu cores": - v, err := strconv.ParseUint(field[1], 0, 32) - if err != nil { - return nil, err - } - cpuinfo[i].CPUCores = uint(v) - case "apicid": - cpuinfo[i].APICID = field[1] - case "initial apicid": - cpuinfo[i].InitialAPICID = field[1] - case "fpu": - cpuinfo[i].FPU = field[1] - case "fpu_exception": - cpuinfo[i].FPUException = field[1] - case "cpuid level": - v, err := strconv.ParseUint(field[1], 0, 32) - if err != nil { - return nil, err - } - cpuinfo[i].CPUIDLevel = uint(v) - case "wp": - cpuinfo[i].WP = field[1] - case "flags": - cpuinfo[i].Flags = strings.Fields(field[1]) - case "bugs": - cpuinfo[i].Bugs = strings.Fields(field[1]) - case "bogomips": - v, err := strconv.ParseFloat(field[1], 64) - if err != nil { - return nil, err - } - cpuinfo[i].BogoMips = v - case "clflush size": - v, err := strconv.ParseUint(field[1], 0, 32) - if err != nil { - return nil, err - } - cpuinfo[i].CLFlushSize = uint(v) - case "cache_alignment": - v, err := strconv.ParseUint(field[1], 0, 32) - if err != nil { - return nil, err - } - cpuinfo[i].CacheAlignment = uint(v) - case "address sizes": - cpuinfo[i].AddressSizes = field[1] - case "power management": - cpuinfo[i].PowerManagement = field[1] - } - } - return cpuinfo, nil -} - -func parseCPUInfoARM(info []byte) ([]CPUInfo, error) { - scanner := bufio.NewScanner(bytes.NewReader(info)) - - firstLine := firstNonEmptyLine(scanner) - match, err := regexp.MatchString("^[Pp]rocessor", firstLine) - if !match || !strings.Contains(firstLine, ":") { - return nil, fmt.Errorf("%s: Cannot parse line: %q: %w", ErrFileParse, firstLine, err) - - } - field := strings.SplitN(firstLine, ": ", 2) - cpuinfo := []CPUInfo{} - featuresLine := "" - commonCPUInfo := CPUInfo{} - i := 0 - if strings.TrimSpace(field[0]) == "Processor" { - commonCPUInfo = CPUInfo{ModelName: field[1]} - i = -1 - } else { - v, err := strconv.ParseUint(field[1], 0, 32) - if err != nil { - return nil, err - } - firstcpu := CPUInfo{Processor: uint(v)} - cpuinfo = []CPUInfo{firstcpu} - } - - for scanner.Scan() { - line := scanner.Text() - if !strings.Contains(line, ":") { - continue - } - field := strings.SplitN(line, ": ", 2) - switch strings.TrimSpace(field[0]) { - case "processor": - cpuinfo = append(cpuinfo, commonCPUInfo) // start of the next processor - i++ - v, err := strconv.ParseUint(field[1], 0, 32) - if err != nil { - return nil, err - } - cpuinfo[i].Processor = uint(v) - case "BogoMIPS": - if i == -1 { - cpuinfo = append(cpuinfo, commonCPUInfo) // There is only one processor - i++ - cpuinfo[i].Processor = 0 - } - v, err := strconv.ParseFloat(field[1], 64) - if err != nil { - return nil, err - } - cpuinfo[i].BogoMips = v - case "Features": - featuresLine = line - case "model name": - cpuinfo[i].ModelName = field[1] - } - } - fields := strings.SplitN(featuresLine, ": ", 2) - for i := range cpuinfo { - cpuinfo[i].Flags = strings.Fields(fields[1]) - } - return cpuinfo, nil - -} - -func parseCPUInfoS390X(info []byte) ([]CPUInfo, error) { - scanner := bufio.NewScanner(bytes.NewReader(info)) - - firstLine := firstNonEmptyLine(scanner) - if !strings.HasPrefix(firstLine, "vendor_id") || !strings.Contains(firstLine, ":") { - return nil, fmt.Errorf("%w: Cannot parse line: %q", ErrFileParse, firstLine) - } - field := strings.SplitN(firstLine, ": ", 2) - cpuinfo := []CPUInfo{} - commonCPUInfo := CPUInfo{VendorID: field[1]} - - for scanner.Scan() { - line := scanner.Text() - if !strings.Contains(line, ":") { - continue - } - field := strings.SplitN(line, ": ", 2) - switch strings.TrimSpace(field[0]) { - case "bogomips per cpu": - v, err := strconv.ParseFloat(field[1], 64) - if err != nil { - return nil, err - } - commonCPUInfo.BogoMips = v - case "features": - commonCPUInfo.Flags = strings.Fields(field[1]) - } - if strings.HasPrefix(line, "processor") { - match := cpuinfoS390XProcessorRegexp.FindStringSubmatch(line) - if len(match) < 2 { - return nil, fmt.Errorf("%w: %q", ErrFileParse, firstLine) - } - cpu := commonCPUInfo - v, err := strconv.ParseUint(match[1], 0, 32) - if err != nil { - return nil, err - } - cpu.Processor = uint(v) - cpuinfo = append(cpuinfo, cpu) - } - if strings.HasPrefix(line, "cpu number") { - break - } - } - - i := 0 - for scanner.Scan() { - line := scanner.Text() - if !strings.Contains(line, ":") { - continue - } - field := strings.SplitN(line, ": ", 2) - switch strings.TrimSpace(field[0]) { - case "cpu number": - i++ - case "cpu MHz dynamic": - clock := cpuinfoClockRegexp.FindString(strings.TrimSpace(field[1])) - v, err := strconv.ParseFloat(clock, 64) - if err != nil { - return nil, err - } - cpuinfo[i].CPUMHz = v - case "physical id": - cpuinfo[i].PhysicalID = field[1] - case "core id": - cpuinfo[i].CoreID = field[1] - case "cpu cores": - v, err := strconv.ParseUint(field[1], 0, 32) - if err != nil { - return nil, err - } - cpuinfo[i].CPUCores = uint(v) - case "siblings": - v, err := strconv.ParseUint(field[1], 0, 32) - if err != nil { - return nil, err - } - cpuinfo[i].Siblings = uint(v) - } - } - - return cpuinfo, nil -} - -func parseCPUInfoMips(info []byte) ([]CPUInfo, error) { - scanner := bufio.NewScanner(bytes.NewReader(info)) - - // find the first "processor" line - firstLine := firstNonEmptyLine(scanner) - if !strings.HasPrefix(firstLine, "system type") || !strings.Contains(firstLine, ":") { - return nil, fmt.Errorf("%w: %q", ErrFileParse, firstLine) - } - field := strings.SplitN(firstLine, ": ", 2) - cpuinfo := []CPUInfo{} - systemType := field[1] - - i := 0 - - for scanner.Scan() { - line := scanner.Text() - if !strings.Contains(line, ":") { - continue - } - field := strings.SplitN(line, ": ", 2) - switch strings.TrimSpace(field[0]) { - case "processor": - v, err := strconv.ParseUint(field[1], 0, 32) - if err != nil { - return nil, err - } - i = int(v) - cpuinfo = append(cpuinfo, CPUInfo{}) // start of the next processor - cpuinfo[i].Processor = uint(v) - cpuinfo[i].VendorID = systemType - case "cpu model": - cpuinfo[i].ModelName = field[1] - case "BogoMIPS": - v, err := strconv.ParseFloat(field[1], 64) - if err != nil { - return nil, err - } - cpuinfo[i].BogoMips = v - } - } - return cpuinfo, nil -} - -func parseCPUInfoLoong(info []byte) ([]CPUInfo, error) { - scanner := bufio.NewScanner(bytes.NewReader(info)) - // find the first "processor" line - firstLine := firstNonEmptyLine(scanner) - if !strings.HasPrefix(firstLine, "system type") || !strings.Contains(firstLine, ":") { - return nil, errors.New("invalid cpuinfo file: " + firstLine) - } - field := strings.SplitN(firstLine, ": ", 2) - cpuinfo := []CPUInfo{} - systemType := field[1] - i := 0 - for scanner.Scan() { - line := scanner.Text() - if !strings.Contains(line, ":") { - continue - } - field := strings.SplitN(line, ": ", 2) - switch strings.TrimSpace(field[0]) { - case "processor": - v, err := strconv.ParseUint(field[1], 0, 32) - if err != nil { - return nil, err - } - i = int(v) - cpuinfo = append(cpuinfo, CPUInfo{}) // start of the next processor - cpuinfo[i].Processor = uint(v) - cpuinfo[i].VendorID = systemType - case "CPU Family": - cpuinfo[i].CPUFamily = field[1] - case "Model Name": - cpuinfo[i].ModelName = field[1] - } - } - return cpuinfo, nil -} - -func parseCPUInfoPPC(info []byte) ([]CPUInfo, error) { - scanner := bufio.NewScanner(bytes.NewReader(info)) - - firstLine := firstNonEmptyLine(scanner) - if !strings.HasPrefix(firstLine, "processor") || !strings.Contains(firstLine, ":") { - return nil, fmt.Errorf("%w: %q", ErrFileParse, firstLine) - } - field := strings.SplitN(firstLine, ": ", 2) - v, err := strconv.ParseUint(field[1], 0, 32) - if err != nil { - return nil, err - } - firstcpu := CPUInfo{Processor: uint(v)} - cpuinfo := []CPUInfo{firstcpu} - i := 0 - - for scanner.Scan() { - line := scanner.Text() - if !strings.Contains(line, ":") { - continue - } - field := strings.SplitN(line, ": ", 2) - switch strings.TrimSpace(field[0]) { - case "processor": - cpuinfo = append(cpuinfo, CPUInfo{}) // start of the next processor - i++ - v, err := strconv.ParseUint(field[1], 0, 32) - if err != nil { - return nil, err - } - cpuinfo[i].Processor = uint(v) - case "cpu": - cpuinfo[i].VendorID = field[1] - case "clock": - clock := cpuinfoClockRegexp.FindString(strings.TrimSpace(field[1])) - v, err := strconv.ParseFloat(clock, 64) - if err != nil { - return nil, err - } - cpuinfo[i].CPUMHz = v - } - } - return cpuinfo, nil -} - -func parseCPUInfoRISCV(info []byte) ([]CPUInfo, error) { - scanner := bufio.NewScanner(bytes.NewReader(info)) - - firstLine := firstNonEmptyLine(scanner) - if !strings.HasPrefix(firstLine, "processor") || !strings.Contains(firstLine, ":") { - return nil, fmt.Errorf("%w: %q", ErrFileParse, firstLine) - } - field := strings.SplitN(firstLine, ": ", 2) - v, err := strconv.ParseUint(field[1], 0, 32) - if err != nil { - return nil, err - } - firstcpu := CPUInfo{Processor: uint(v)} - cpuinfo := []CPUInfo{firstcpu} - i := 0 - - for scanner.Scan() { - line := scanner.Text() - if !strings.Contains(line, ":") { - continue - } - field := strings.SplitN(line, ": ", 2) - switch strings.TrimSpace(field[0]) { - case "processor": - v, err := strconv.ParseUint(field[1], 0, 32) - if err != nil { - return nil, err - } - i = int(v) - cpuinfo = append(cpuinfo, CPUInfo{}) // start of the next processor - cpuinfo[i].Processor = uint(v) - case "hart": - cpuinfo[i].CoreID = field[1] - case "isa": - cpuinfo[i].ModelName = field[1] - } - } - return cpuinfo, nil -} - -func parseCPUInfoDummy(_ []byte) ([]CPUInfo, error) { // nolint:unused,deadcode - return nil, errors.New("not implemented") -} - -// firstNonEmptyLine advances the scanner to the first non-empty line -// and returns the contents of that line. -func firstNonEmptyLine(scanner *bufio.Scanner) string { - for scanner.Scan() { - line := scanner.Text() - if strings.TrimSpace(line) != "" { - return line - } - } - return "" -} diff --git a/vendor/github.com/prometheus/procfs/cpuinfo_armx.go b/vendor/github.com/prometheus/procfs/cpuinfo_armx.go deleted file mode 100644 index 64cfd534c..000000000 --- a/vendor/github.com/prometheus/procfs/cpuinfo_armx.go +++ /dev/null @@ -1,20 +0,0 @@ -// Copyright 2020 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//go:build linux && (arm || arm64) -// +build linux -// +build arm arm64 - -package procfs - -var parseCPUInfo = parseCPUInfoARM diff --git a/vendor/github.com/prometheus/procfs/cpuinfo_loong64.go b/vendor/github.com/prometheus/procfs/cpuinfo_loong64.go deleted file mode 100644 index d88442f0e..000000000 --- a/vendor/github.com/prometheus/procfs/cpuinfo_loong64.go +++ /dev/null @@ -1,19 +0,0 @@ -// Copyright 2022 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//go:build linux -// +build linux - -package procfs - -var parseCPUInfo = parseCPUInfoLoong diff --git a/vendor/github.com/prometheus/procfs/cpuinfo_mipsx.go b/vendor/github.com/prometheus/procfs/cpuinfo_mipsx.go deleted file mode 100644 index c11207f3a..000000000 --- a/vendor/github.com/prometheus/procfs/cpuinfo_mipsx.go +++ /dev/null @@ -1,20 +0,0 @@ -// Copyright 2020 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//go:build linux && (mips || mipsle || mips64 || mips64le) -// +build linux -// +build mips mipsle mips64 mips64le - -package procfs - -var parseCPUInfo = parseCPUInfoMips diff --git a/vendor/github.com/prometheus/procfs/cpuinfo_others.go b/vendor/github.com/prometheus/procfs/cpuinfo_others.go deleted file mode 100644 index a6b2b3127..000000000 --- a/vendor/github.com/prometheus/procfs/cpuinfo_others.go +++ /dev/null @@ -1,19 +0,0 @@ -// Copyright 2020 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//go:build linux && !386 && !amd64 && !arm && !arm64 && !loong64 && !mips && !mips64 && !mips64le && !mipsle && !ppc64 && !ppc64le && !riscv64 && !s390x -// +build linux,!386,!amd64,!arm,!arm64,!loong64,!mips,!mips64,!mips64le,!mipsle,!ppc64,!ppc64le,!riscv64,!s390x - -package procfs - -var parseCPUInfo = parseCPUInfoDummy diff --git a/vendor/github.com/prometheus/procfs/cpuinfo_ppcx.go b/vendor/github.com/prometheus/procfs/cpuinfo_ppcx.go deleted file mode 100644 index 003bc2ad4..000000000 --- a/vendor/github.com/prometheus/procfs/cpuinfo_ppcx.go +++ /dev/null @@ -1,20 +0,0 @@ -// Copyright 2020 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//go:build linux && (ppc64 || ppc64le) -// +build linux -// +build ppc64 ppc64le - -package procfs - -var parseCPUInfo = parseCPUInfoPPC diff --git a/vendor/github.com/prometheus/procfs/cpuinfo_riscvx.go b/vendor/github.com/prometheus/procfs/cpuinfo_riscvx.go deleted file mode 100644 index 1c9b7313b..000000000 --- a/vendor/github.com/prometheus/procfs/cpuinfo_riscvx.go +++ /dev/null @@ -1,20 +0,0 @@ -// Copyright 2020 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//go:build linux && (riscv || riscv64) -// +build linux -// +build riscv riscv64 - -package procfs - -var parseCPUInfo = parseCPUInfoRISCV diff --git a/vendor/github.com/prometheus/procfs/cpuinfo_s390x.go b/vendor/github.com/prometheus/procfs/cpuinfo_s390x.go deleted file mode 100644 index fa3686bc0..000000000 --- a/vendor/github.com/prometheus/procfs/cpuinfo_s390x.go +++ /dev/null @@ -1,19 +0,0 @@ -// Copyright 2020 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//go:build linux -// +build linux - -package procfs - -var parseCPUInfo = parseCPUInfoS390X diff --git a/vendor/github.com/prometheus/procfs/cpuinfo_x86.go b/vendor/github.com/prometheus/procfs/cpuinfo_x86.go deleted file mode 100644 index a0ef55562..000000000 --- a/vendor/github.com/prometheus/procfs/cpuinfo_x86.go +++ /dev/null @@ -1,20 +0,0 @@ -// Copyright 2020 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//go:build linux && (386 || amd64) -// +build linux -// +build 386 amd64 - -package procfs - -var parseCPUInfo = parseCPUInfoX86 diff --git a/vendor/github.com/prometheus/procfs/crypto.go b/vendor/github.com/prometheus/procfs/crypto.go deleted file mode 100644 index 13ee66f12..000000000 --- a/vendor/github.com/prometheus/procfs/crypto.go +++ /dev/null @@ -1,154 +0,0 @@ -// Copyright 2019 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package procfs - -import ( - "bufio" - "bytes" - "fmt" - "io" - "strings" - - "github.com/prometheus/procfs/internal/util" -) - -// Crypto holds info parsed from /proc/crypto. -type Crypto struct { - Alignmask *uint64 - Async bool - Blocksize *uint64 - Chunksize *uint64 - Ctxsize *uint64 - Digestsize *uint64 - Driver string - Geniv string - Internal string - Ivsize *uint64 - Maxauthsize *uint64 - MaxKeysize *uint64 - MinKeysize *uint64 - Module string - Name string - Priority *int64 - Refcnt *int64 - Seedsize *uint64 - Selftest string - Type string - Walksize *uint64 -} - -// Crypto parses an crypto-file (/proc/crypto) and returns a slice of -// structs containing the relevant info. More information available here: -// https://kernel.readthedocs.io/en/sphinx-samples/crypto-API.html -func (fs FS) Crypto() ([]Crypto, error) { - path := fs.proc.Path("crypto") - b, err := util.ReadFileNoStat(path) - if err != nil { - return nil, fmt.Errorf("%s: Cannot read file %v: %w", ErrFileRead, b, err) - - } - - crypto, err := parseCrypto(bytes.NewReader(b)) - if err != nil { - return nil, fmt.Errorf("%s: Cannot parse %v: %w", ErrFileParse, crypto, err) - } - - return crypto, nil -} - -// parseCrypto parses a /proc/crypto stream into Crypto elements. -func parseCrypto(r io.Reader) ([]Crypto, error) { - var out []Crypto - - s := bufio.NewScanner(r) - for s.Scan() { - text := s.Text() - switch { - case strings.HasPrefix(text, "name"): - // Each crypto element begins with its name. - out = append(out, Crypto{}) - case text == "": - continue - } - - kv := strings.Split(text, ":") - if len(kv) != 2 { - return nil, fmt.Errorf("%w: Cannot parse line: %q", ErrFileParse, text) - } - - k := strings.TrimSpace(kv[0]) - v := strings.TrimSpace(kv[1]) - - // Parse the key/value pair into the currently focused element. - c := &out[len(out)-1] - if err := c.parseKV(k, v); err != nil { - return nil, err - } - } - - if err := s.Err(); err != nil { - return nil, err - } - - return out, nil -} - -// parseKV parses a key/value pair into the appropriate field of c. -func (c *Crypto) parseKV(k, v string) error { - vp := util.NewValueParser(v) - - switch k { - case "async": - // Interpret literal yes as true. - c.Async = v == "yes" - case "blocksize": - c.Blocksize = vp.PUInt64() - case "chunksize": - c.Chunksize = vp.PUInt64() - case "digestsize": - c.Digestsize = vp.PUInt64() - case "driver": - c.Driver = v - case "geniv": - c.Geniv = v - case "internal": - c.Internal = v - case "ivsize": - c.Ivsize = vp.PUInt64() - case "maxauthsize": - c.Maxauthsize = vp.PUInt64() - case "max keysize": - c.MaxKeysize = vp.PUInt64() - case "min keysize": - c.MinKeysize = vp.PUInt64() - case "module": - c.Module = v - case "name": - c.Name = v - case "priority": - c.Priority = vp.PInt64() - case "refcnt": - c.Refcnt = vp.PInt64() - case "seedsize": - c.Seedsize = vp.PUInt64() - case "selftest": - c.Selftest = v - case "type": - c.Type = v - case "walksize": - c.Walksize = vp.PUInt64() - } - - return vp.Err() -} diff --git a/vendor/github.com/prometheus/procfs/doc.go b/vendor/github.com/prometheus/procfs/doc.go deleted file mode 100644 index f9d961e44..000000000 --- a/vendor/github.com/prometheus/procfs/doc.go +++ /dev/null @@ -1,44 +0,0 @@ -// Copyright 2014 Prometheus Team -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// Package procfs provides functions to retrieve system, kernel and process -// metrics from the pseudo-filesystem proc. -// -// Example: -// -// package main -// -// import ( -// "fmt" -// "log" -// -// "github.com/prometheus/procfs" -// ) -// -// func main() { -// p, err := procfs.Self() -// if err != nil { -// log.Fatalf("could not get process: %s", err) -// } -// -// stat, err := p.Stat() -// if err != nil { -// log.Fatalf("could not get process stat: %s", err) -// } -// -// fmt.Printf("command: %s\n", stat.Comm) -// fmt.Printf("cpu time: %fs\n", stat.CPUTime()) -// fmt.Printf("vsize: %dB\n", stat.VirtualMemory()) -// fmt.Printf("rss: %dB\n", stat.ResidentMemory()) -// } -package procfs diff --git a/vendor/github.com/prometheus/procfs/fs.go b/vendor/github.com/prometheus/procfs/fs.go deleted file mode 100644 index 4980c875b..000000000 --- a/vendor/github.com/prometheus/procfs/fs.go +++ /dev/null @@ -1,50 +0,0 @@ -// Copyright 2018 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package procfs - -import ( - "github.com/prometheus/procfs/internal/fs" -) - -// FS represents the pseudo-filesystem sys, which provides an interface to -// kernel data structures. -type FS struct { - proc fs.FS - isReal bool -} - -// DefaultMountPoint is the common mount point of the proc filesystem. -const DefaultMountPoint = fs.DefaultProcMountPoint - -// NewDefaultFS returns a new proc FS mounted under the default proc mountPoint. -// It will error if the mount point directory can't be read or is a file. -func NewDefaultFS() (FS, error) { - return NewFS(DefaultMountPoint) -} - -// NewFS returns a new proc FS mounted under the given proc mountPoint. It will error -// if the mount point directory can't be read or is a file. -func NewFS(mountPoint string) (FS, error) { - fs, err := fs.NewFS(mountPoint) - if err != nil { - return FS{}, err - } - - isReal, err := isRealProc(mountPoint) - if err != nil { - return FS{}, err - } - - return FS{fs, isReal}, nil -} diff --git a/vendor/github.com/prometheus/procfs/fs_statfs_notype.go b/vendor/github.com/prometheus/procfs/fs_statfs_notype.go deleted file mode 100644 index 134767d69..000000000 --- a/vendor/github.com/prometheus/procfs/fs_statfs_notype.go +++ /dev/null @@ -1,23 +0,0 @@ -// Copyright 2018 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//go:build !freebsd && !linux -// +build !freebsd,!linux - -package procfs - -// isRealProc returns true on architectures that don't have a Type argument -// in their Statfs_t struct -func isRealProc(mountPoint string) (bool, error) { - return true, nil -} diff --git a/vendor/github.com/prometheus/procfs/fs_statfs_type.go b/vendor/github.com/prometheus/procfs/fs_statfs_type.go deleted file mode 100644 index 80df79c31..000000000 --- a/vendor/github.com/prometheus/procfs/fs_statfs_type.go +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright 2018 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//go:build freebsd || linux -// +build freebsd linux - -package procfs - -import ( - "syscall" -) - -// isRealProc determines whether supplied mountpoint is really a proc filesystem. -func isRealProc(mountPoint string) (bool, error) { - stat := syscall.Statfs_t{} - err := syscall.Statfs(mountPoint, &stat) - if err != nil { - return false, err - } - - // 0x9fa0 is PROC_SUPER_MAGIC: https://elixir.bootlin.com/linux/v6.1/source/include/uapi/linux/magic.h#L87 - return stat.Type == 0x9fa0, nil -} diff --git a/vendor/github.com/prometheus/procfs/fscache.go b/vendor/github.com/prometheus/procfs/fscache.go deleted file mode 100644 index f560a8db3..000000000 --- a/vendor/github.com/prometheus/procfs/fscache.go +++ /dev/null @@ -1,422 +0,0 @@ -// Copyright 2019 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package procfs - -import ( - "bufio" - "bytes" - "fmt" - "io" - "strconv" - "strings" - - "github.com/prometheus/procfs/internal/util" -) - -// Fscacheinfo represents fscache statistics. -type Fscacheinfo struct { - // Number of index cookies allocated - IndexCookiesAllocated uint64 - // data storage cookies allocated - DataStorageCookiesAllocated uint64 - // Number of special cookies allocated - SpecialCookiesAllocated uint64 - // Number of objects allocated - ObjectsAllocated uint64 - // Number of object allocation failures - ObjectAllocationsFailure uint64 - // Number of objects that reached the available state - ObjectsAvailable uint64 - // Number of objects that reached the dead state - ObjectsDead uint64 - // Number of objects that didn't have a coherency check - ObjectsWithoutCoherencyCheck uint64 - // Number of objects that passed a coherency check - ObjectsWithCoherencyCheck uint64 - // Number of objects that needed a coherency data update - ObjectsNeedCoherencyCheckUpdate uint64 - // Number of objects that were declared obsolete - ObjectsDeclaredObsolete uint64 - // Number of pages marked as being cached - PagesMarkedAsBeingCached uint64 - // Number of uncache page requests seen - UncachePagesRequestSeen uint64 - // Number of acquire cookie requests seen - AcquireCookiesRequestSeen uint64 - // Number of acq reqs given a NULL parent - AcquireRequestsWithNullParent uint64 - // Number of acq reqs rejected due to no cache available - AcquireRequestsRejectedNoCacheAvailable uint64 - // Number of acq reqs succeeded - AcquireRequestsSucceeded uint64 - // Number of acq reqs rejected due to error - AcquireRequestsRejectedDueToError uint64 - // Number of acq reqs failed on ENOMEM - AcquireRequestsFailedDueToEnomem uint64 - // Number of lookup calls made on cache backends - LookupsNumber uint64 - // Number of negative lookups made - LookupsNegative uint64 - // Number of positive lookups made - LookupsPositive uint64 - // Number of objects created by lookup - ObjectsCreatedByLookup uint64 - // Number of lookups timed out and requeued - LookupsTimedOutAndRequed uint64 - InvalidationsNumber uint64 - InvalidationsRunning uint64 - // Number of update cookie requests seen - UpdateCookieRequestSeen uint64 - // Number of upd reqs given a NULL parent - UpdateRequestsWithNullParent uint64 - // Number of upd reqs granted CPU time - UpdateRequestsRunning uint64 - // Number of relinquish cookie requests seen - RelinquishCookiesRequestSeen uint64 - // Number of rlq reqs given a NULL parent - RelinquishCookiesWithNullParent uint64 - // Number of rlq reqs waited on completion of creation - RelinquishRequestsWaitingCompleteCreation uint64 - // Relinqs rtr - RelinquishRetries uint64 - // Number of attribute changed requests seen - AttributeChangedRequestsSeen uint64 - // Number of attr changed requests queued - AttributeChangedRequestsQueued uint64 - // Number of attr changed rejected -ENOBUFS - AttributeChangedRejectDueToEnobufs uint64 - // Number of attr changed failed -ENOMEM - AttributeChangedFailedDueToEnomem uint64 - // Number of attr changed ops given CPU time - AttributeChangedOps uint64 - // Number of allocation requests seen - AllocationRequestsSeen uint64 - // Number of successful alloc reqs - AllocationOkRequests uint64 - // Number of alloc reqs that waited on lookup completion - AllocationWaitingOnLookup uint64 - // Number of alloc reqs rejected -ENOBUFS - AllocationsRejectedDueToEnobufs uint64 - // Number of alloc reqs aborted -ERESTARTSYS - AllocationsAbortedDueToErestartsys uint64 - // Number of alloc reqs submitted - AllocationOperationsSubmitted uint64 - // Number of alloc reqs waited for CPU time - AllocationsWaitedForCPU uint64 - // Number of alloc reqs aborted due to object death - AllocationsAbortedDueToObjectDeath uint64 - // Number of retrieval (read) requests seen - RetrievalsReadRequests uint64 - // Number of successful retr reqs - RetrievalsOk uint64 - // Number of retr reqs that waited on lookup completion - RetrievalsWaitingLookupCompletion uint64 - // Number of retr reqs returned -ENODATA - RetrievalsReturnedEnodata uint64 - // Number of retr reqs rejected -ENOBUFS - RetrievalsRejectedDueToEnobufs uint64 - // Number of retr reqs aborted -ERESTARTSYS - RetrievalsAbortedDueToErestartsys uint64 - // Number of retr reqs failed -ENOMEM - RetrievalsFailedDueToEnomem uint64 - // Number of retr reqs submitted - RetrievalsRequests uint64 - // Number of retr reqs waited for CPU time - RetrievalsWaitingCPU uint64 - // Number of retr reqs aborted due to object death - RetrievalsAbortedDueToObjectDeath uint64 - // Number of storage (write) requests seen - StoreWriteRequests uint64 - // Number of successful store reqs - StoreSuccessfulRequests uint64 - // Number of store reqs on a page already pending storage - StoreRequestsOnPendingStorage uint64 - // Number of store reqs rejected -ENOBUFS - StoreRequestsRejectedDueToEnobufs uint64 - // Number of store reqs failed -ENOMEM - StoreRequestsFailedDueToEnomem uint64 - // Number of store reqs submitted - StoreRequestsSubmitted uint64 - // Number of store reqs granted CPU time - StoreRequestsRunning uint64 - // Number of pages given store req processing time - StorePagesWithRequestsProcessing uint64 - // Number of store reqs deleted from tracking tree - StoreRequestsDeleted uint64 - // Number of store reqs over store limit - StoreRequestsOverStoreLimit uint64 - // Number of release reqs against pages with no pending store - ReleaseRequestsAgainstPagesWithNoPendingStorage uint64 - // Number of release reqs against pages stored by time lock granted - ReleaseRequestsAgainstPagesStoredByTimeLockGranted uint64 - // Number of release reqs ignored due to in-progress store - ReleaseRequestsIgnoredDueToInProgressStore uint64 - // Number of page stores cancelled due to release req - PageStoresCancelledByReleaseRequests uint64 - VmscanWaiting uint64 - // Number of times async ops added to pending queues - OpsPending uint64 - // Number of times async ops given CPU time - OpsRunning uint64 - // Number of times async ops queued for processing - OpsEnqueued uint64 - // Number of async ops cancelled - OpsCancelled uint64 - // Number of async ops rejected due to object lookup/create failure - OpsRejected uint64 - // Number of async ops initialised - OpsInitialised uint64 - // Number of async ops queued for deferred release - OpsDeferred uint64 - // Number of async ops released (should equal ini=N when idle) - OpsReleased uint64 - // Number of deferred-release async ops garbage collected - OpsGarbageCollected uint64 - // Number of in-progress alloc_object() cache ops - CacheopAllocationsinProgress uint64 - // Number of in-progress lookup_object() cache ops - CacheopLookupObjectInProgress uint64 - // Number of in-progress lookup_complete() cache ops - CacheopLookupCompleteInPorgress uint64 - // Number of in-progress grab_object() cache ops - CacheopGrabObjectInProgress uint64 - CacheopInvalidations uint64 - // Number of in-progress update_object() cache ops - CacheopUpdateObjectInProgress uint64 - // Number of in-progress drop_object() cache ops - CacheopDropObjectInProgress uint64 - // Number of in-progress put_object() cache ops - CacheopPutObjectInProgress uint64 - // Number of in-progress attr_changed() cache ops - CacheopAttributeChangeInProgress uint64 - // Number of in-progress sync_cache() cache ops - CacheopSyncCacheInProgress uint64 - // Number of in-progress read_or_alloc_page() cache ops - CacheopReadOrAllocPageInProgress uint64 - // Number of in-progress read_or_alloc_pages() cache ops - CacheopReadOrAllocPagesInProgress uint64 - // Number of in-progress allocate_page() cache ops - CacheopAllocatePageInProgress uint64 - // Number of in-progress allocate_pages() cache ops - CacheopAllocatePagesInProgress uint64 - // Number of in-progress write_page() cache ops - CacheopWritePagesInProgress uint64 - // Number of in-progress uncache_page() cache ops - CacheopUncachePagesInProgress uint64 - // Number of in-progress dissociate_pages() cache ops - CacheopDissociatePagesInProgress uint64 - // Number of object lookups/creations rejected due to lack of space - CacheevLookupsAndCreationsRejectedLackSpace uint64 - // Number of stale objects deleted - CacheevStaleObjectsDeleted uint64 - // Number of objects retired when relinquished - CacheevRetiredWhenReliquished uint64 - // Number of objects culled - CacheevObjectsCulled uint64 -} - -// Fscacheinfo returns information about current fscache statistics. -// See https://www.kernel.org/doc/Documentation/filesystems/caching/fscache.txt -func (fs FS) Fscacheinfo() (Fscacheinfo, error) { - b, err := util.ReadFileNoStat(fs.proc.Path("fs/fscache/stats")) - if err != nil { - return Fscacheinfo{}, err - } - - m, err := parseFscacheinfo(bytes.NewReader(b)) - if err != nil { - return Fscacheinfo{}, fmt.Errorf("%s: Cannot parse %v: %w", ErrFileParse, m, err) - } - - return *m, nil -} - -func setFSCacheFields(fields []string, setFields ...*uint64) error { - var err error - if len(fields) < len(setFields) { - return fmt.Errorf("%s: Expected %d, but got %d: %w", ErrFileParse, len(setFields), len(fields), err) - } - - for i := range setFields { - *setFields[i], err = strconv.ParseUint(strings.Split(fields[i], "=")[1], 0, 64) - if err != nil { - return err - } - } - return nil -} - -func parseFscacheinfo(r io.Reader) (*Fscacheinfo, error) { - var m Fscacheinfo - s := bufio.NewScanner(r) - for s.Scan() { - fields := strings.Fields(s.Text()) - if len(fields) < 2 { - return nil, fmt.Errorf("%w: malformed Fscacheinfo line: %q", ErrFileParse, s.Text()) - } - - switch fields[0] { - case "Cookies:": - err := setFSCacheFields(fields[1:], &m.IndexCookiesAllocated, &m.DataStorageCookiesAllocated, - &m.SpecialCookiesAllocated) - if err != nil { - return &m, err - } - case "Objects:": - err := setFSCacheFields(fields[1:], &m.ObjectsAllocated, &m.ObjectAllocationsFailure, - &m.ObjectsAvailable, &m.ObjectsDead) - if err != nil { - return &m, err - } - case "ChkAux": - err := setFSCacheFields(fields[2:], &m.ObjectsWithoutCoherencyCheck, &m.ObjectsWithCoherencyCheck, - &m.ObjectsNeedCoherencyCheckUpdate, &m.ObjectsDeclaredObsolete) - if err != nil { - return &m, err - } - case "Pages": - err := setFSCacheFields(fields[2:], &m.PagesMarkedAsBeingCached, &m.UncachePagesRequestSeen) - if err != nil { - return &m, err - } - case "Acquire:": - err := setFSCacheFields(fields[1:], &m.AcquireCookiesRequestSeen, &m.AcquireRequestsWithNullParent, - &m.AcquireRequestsRejectedNoCacheAvailable, &m.AcquireRequestsSucceeded, &m.AcquireRequestsRejectedDueToError, - &m.AcquireRequestsFailedDueToEnomem) - if err != nil { - return &m, err - } - case "Lookups:": - err := setFSCacheFields(fields[1:], &m.LookupsNumber, &m.LookupsNegative, &m.LookupsPositive, - &m.ObjectsCreatedByLookup, &m.LookupsTimedOutAndRequed) - if err != nil { - return &m, err - } - case "Invals": - err := setFSCacheFields(fields[2:], &m.InvalidationsNumber, &m.InvalidationsRunning) - if err != nil { - return &m, err - } - case "Updates:": - err := setFSCacheFields(fields[1:], &m.UpdateCookieRequestSeen, &m.UpdateRequestsWithNullParent, - &m.UpdateRequestsRunning) - if err != nil { - return &m, err - } - case "Relinqs:": - err := setFSCacheFields(fields[1:], &m.RelinquishCookiesRequestSeen, &m.RelinquishCookiesWithNullParent, - &m.RelinquishRequestsWaitingCompleteCreation, &m.RelinquishRetries) - if err != nil { - return &m, err - } - case "AttrChg:": - err := setFSCacheFields(fields[1:], &m.AttributeChangedRequestsSeen, &m.AttributeChangedRequestsQueued, - &m.AttributeChangedRejectDueToEnobufs, &m.AttributeChangedFailedDueToEnomem, &m.AttributeChangedOps) - if err != nil { - return &m, err - } - case "Allocs": - if strings.Split(fields[2], "=")[0] == "n" { - err := setFSCacheFields(fields[2:], &m.AllocationRequestsSeen, &m.AllocationOkRequests, - &m.AllocationWaitingOnLookup, &m.AllocationsRejectedDueToEnobufs, &m.AllocationsAbortedDueToErestartsys) - if err != nil { - return &m, err - } - } else { - err := setFSCacheFields(fields[2:], &m.AllocationOperationsSubmitted, &m.AllocationsWaitedForCPU, - &m.AllocationsAbortedDueToObjectDeath) - if err != nil { - return &m, err - } - } - case "Retrvls:": - if strings.Split(fields[1], "=")[0] == "n" { - err := setFSCacheFields(fields[1:], &m.RetrievalsReadRequests, &m.RetrievalsOk, &m.RetrievalsWaitingLookupCompletion, - &m.RetrievalsReturnedEnodata, &m.RetrievalsRejectedDueToEnobufs, &m.RetrievalsAbortedDueToErestartsys, - &m.RetrievalsFailedDueToEnomem) - if err != nil { - return &m, err - } - } else { - err := setFSCacheFields(fields[1:], &m.RetrievalsRequests, &m.RetrievalsWaitingCPU, &m.RetrievalsAbortedDueToObjectDeath) - if err != nil { - return &m, err - } - } - case "Stores": - if strings.Split(fields[2], "=")[0] == "n" { - err := setFSCacheFields(fields[2:], &m.StoreWriteRequests, &m.StoreSuccessfulRequests, - &m.StoreRequestsOnPendingStorage, &m.StoreRequestsRejectedDueToEnobufs, &m.StoreRequestsFailedDueToEnomem) - if err != nil { - return &m, err - } - } else { - err := setFSCacheFields(fields[2:], &m.StoreRequestsSubmitted, &m.StoreRequestsRunning, - &m.StorePagesWithRequestsProcessing, &m.StoreRequestsDeleted, &m.StoreRequestsOverStoreLimit) - if err != nil { - return &m, err - } - } - case "VmScan": - err := setFSCacheFields(fields[2:], &m.ReleaseRequestsAgainstPagesWithNoPendingStorage, - &m.ReleaseRequestsAgainstPagesStoredByTimeLockGranted, &m.ReleaseRequestsIgnoredDueToInProgressStore, - &m.PageStoresCancelledByReleaseRequests, &m.VmscanWaiting) - if err != nil { - return &m, err - } - case "Ops": - if strings.Split(fields[2], "=")[0] == "pend" { - err := setFSCacheFields(fields[2:], &m.OpsPending, &m.OpsRunning, &m.OpsEnqueued, &m.OpsCancelled, &m.OpsRejected) - if err != nil { - return &m, err - } - } else { - err := setFSCacheFields(fields[2:], &m.OpsInitialised, &m.OpsDeferred, &m.OpsReleased, &m.OpsGarbageCollected) - if err != nil { - return &m, err - } - } - case "CacheOp:": - if strings.Split(fields[1], "=")[0] == "alo" { - err := setFSCacheFields(fields[1:], &m.CacheopAllocationsinProgress, &m.CacheopLookupObjectInProgress, - &m.CacheopLookupCompleteInPorgress, &m.CacheopGrabObjectInProgress) - if err != nil { - return &m, err - } - } else if strings.Split(fields[1], "=")[0] == "inv" { - err := setFSCacheFields(fields[1:], &m.CacheopInvalidations, &m.CacheopUpdateObjectInProgress, - &m.CacheopDropObjectInProgress, &m.CacheopPutObjectInProgress, &m.CacheopAttributeChangeInProgress, - &m.CacheopSyncCacheInProgress) - if err != nil { - return &m, err - } - } else { - err := setFSCacheFields(fields[1:], &m.CacheopReadOrAllocPageInProgress, &m.CacheopReadOrAllocPagesInProgress, - &m.CacheopAllocatePageInProgress, &m.CacheopAllocatePagesInProgress, &m.CacheopWritePagesInProgress, - &m.CacheopUncachePagesInProgress, &m.CacheopDissociatePagesInProgress) - if err != nil { - return &m, err - } - } - case "CacheEv:": - err := setFSCacheFields(fields[1:], &m.CacheevLookupsAndCreationsRejectedLackSpace, &m.CacheevStaleObjectsDeleted, - &m.CacheevRetiredWhenReliquished, &m.CacheevObjectsCulled) - if err != nil { - return &m, err - } - } - } - - return &m, nil -} diff --git a/vendor/github.com/prometheus/procfs/internal/fs/fs.go b/vendor/github.com/prometheus/procfs/internal/fs/fs.go deleted file mode 100644 index 3c18c7610..000000000 --- a/vendor/github.com/prometheus/procfs/internal/fs/fs.go +++ /dev/null @@ -1,55 +0,0 @@ -// Copyright 2019 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package fs - -import ( - "fmt" - "os" - "path/filepath" -) - -const ( - // DefaultProcMountPoint is the common mount point of the proc filesystem. - DefaultProcMountPoint = "/proc" - - // DefaultSysMountPoint is the common mount point of the sys filesystem. - DefaultSysMountPoint = "/sys" - - // DefaultConfigfsMountPoint is the common mount point of the configfs. - DefaultConfigfsMountPoint = "/sys/kernel/config" -) - -// FS represents a pseudo-filesystem, normally /proc or /sys, which provides an -// interface to kernel data structures. -type FS string - -// NewFS returns a new FS mounted under the given mountPoint. It will error -// if the mount point can't be read. -func NewFS(mountPoint string) (FS, error) { - info, err := os.Stat(mountPoint) - if err != nil { - return "", fmt.Errorf("could not read %q: %w", mountPoint, err) - } - if !info.IsDir() { - return "", fmt.Errorf("mount point %q is not a directory", mountPoint) - } - - return FS(mountPoint), nil -} - -// Path appends the given path elements to the filesystem path, adding separators -// as necessary. -func (fs FS) Path(p ...string) string { - return filepath.Join(append([]string{string(fs)}, p...)...) -} diff --git a/vendor/github.com/prometheus/procfs/internal/util/parse.go b/vendor/github.com/prometheus/procfs/internal/util/parse.go deleted file mode 100644 index 14272dc78..000000000 --- a/vendor/github.com/prometheus/procfs/internal/util/parse.go +++ /dev/null @@ -1,112 +0,0 @@ -// Copyright 2018 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package util - -import ( - "os" - "strconv" - "strings" -) - -// ParseUint32s parses a slice of strings into a slice of uint32s. -func ParseUint32s(ss []string) ([]uint32, error) { - us := make([]uint32, 0, len(ss)) - for _, s := range ss { - u, err := strconv.ParseUint(s, 10, 32) - if err != nil { - return nil, err - } - - us = append(us, uint32(u)) - } - - return us, nil -} - -// ParseUint64s parses a slice of strings into a slice of uint64s. -func ParseUint64s(ss []string) ([]uint64, error) { - us := make([]uint64, 0, len(ss)) - for _, s := range ss { - u, err := strconv.ParseUint(s, 10, 64) - if err != nil { - return nil, err - } - - us = append(us, u) - } - - return us, nil -} - -// ParsePInt64s parses a slice of strings into a slice of int64 pointers. -func ParsePInt64s(ss []string) ([]*int64, error) { - us := make([]*int64, 0, len(ss)) - for _, s := range ss { - u, err := strconv.ParseInt(s, 10, 64) - if err != nil { - return nil, err - } - - us = append(us, &u) - } - - return us, nil -} - -// Parses a uint64 from given hex in string. -func ParseHexUint64s(ss []string) ([]*uint64, error) { - us := make([]*uint64, 0, len(ss)) - for _, s := range ss { - u, err := strconv.ParseUint(s, 16, 64) - if err != nil { - return nil, err - } - - us = append(us, &u) - } - - return us, nil -} - -// ReadUintFromFile reads a file and attempts to parse a uint64 from it. -func ReadUintFromFile(path string) (uint64, error) { - data, err := os.ReadFile(path) - if err != nil { - return 0, err - } - return strconv.ParseUint(strings.TrimSpace(string(data)), 10, 64) -} - -// ReadIntFromFile reads a file and attempts to parse a int64 from it. -func ReadIntFromFile(path string) (int64, error) { - data, err := os.ReadFile(path) - if err != nil { - return 0, err - } - return strconv.ParseInt(strings.TrimSpace(string(data)), 10, 64) -} - -// ParseBool parses a string into a boolean pointer. -func ParseBool(b string) *bool { - var truth bool - switch b { - case "enabled": - truth = true - case "disabled": - truth = false - default: - return nil - } - return &truth -} diff --git a/vendor/github.com/prometheus/procfs/internal/util/readfile.go b/vendor/github.com/prometheus/procfs/internal/util/readfile.go deleted file mode 100644 index 71b7a70eb..000000000 --- a/vendor/github.com/prometheus/procfs/internal/util/readfile.go +++ /dev/null @@ -1,37 +0,0 @@ -// Copyright 2019 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package util - -import ( - "io" - "os" -) - -// ReadFileNoStat uses io.ReadAll to read contents of entire file. -// This is similar to os.ReadFile but without the call to os.Stat, because -// many files in /proc and /sys report incorrect file sizes (either 0 or 4096). -// Reads a max file size of 1024kB. For files larger than this, a scanner -// should be used. -func ReadFileNoStat(filename string) ([]byte, error) { - const maxBufferSize = 1024 * 1024 - - f, err := os.Open(filename) - if err != nil { - return nil, err - } - defer f.Close() - - reader := io.LimitReader(f, maxBufferSize) - return io.ReadAll(reader) -} diff --git a/vendor/github.com/prometheus/procfs/internal/util/sysreadfile.go b/vendor/github.com/prometheus/procfs/internal/util/sysreadfile.go deleted file mode 100644 index 1ab875cee..000000000 --- a/vendor/github.com/prometheus/procfs/internal/util/sysreadfile.go +++ /dev/null @@ -1,50 +0,0 @@ -// Copyright 2018 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//go:build (linux || darwin) && !appengine -// +build linux darwin -// +build !appengine - -package util - -import ( - "bytes" - "os" - "syscall" -) - -// SysReadFile is a simplified os.ReadFile that invokes syscall.Read directly. -// https://github.com/prometheus/node_exporter/pull/728/files -// -// Note that this function will not read files larger than 128 bytes. -func SysReadFile(file string) (string, error) { - f, err := os.Open(file) - if err != nil { - return "", err - } - defer f.Close() - - // On some machines, hwmon drivers are broken and return EAGAIN. This causes - // Go's os.ReadFile implementation to poll forever. - // - // Since we either want to read data or bail immediately, do the simplest - // possible read using syscall directly. - const sysFileBufferSize = 128 - b := make([]byte, sysFileBufferSize) - n, err := syscall.Read(int(f.Fd()), b) - if err != nil { - return "", err - } - - return string(bytes.TrimSpace(b[:n])), nil -} diff --git a/vendor/github.com/prometheus/procfs/internal/util/sysreadfile_compat.go b/vendor/github.com/prometheus/procfs/internal/util/sysreadfile_compat.go deleted file mode 100644 index 1d86f5e63..000000000 --- a/vendor/github.com/prometheus/procfs/internal/util/sysreadfile_compat.go +++ /dev/null @@ -1,27 +0,0 @@ -// Copyright 2019 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//go:build (linux && appengine) || (!linux && !darwin) -// +build linux,appengine !linux,!darwin - -package util - -import ( - "fmt" -) - -// SysReadFile is here implemented as a noop for builds that do not support -// the read syscall. For example Windows, or Linux on Google App Engine. -func SysReadFile(file string) (string, error) { - return "", fmt.Errorf("not supported on this platform") -} diff --git a/vendor/github.com/prometheus/procfs/internal/util/valueparser.go b/vendor/github.com/prometheus/procfs/internal/util/valueparser.go deleted file mode 100644 index fe2355d3c..000000000 --- a/vendor/github.com/prometheus/procfs/internal/util/valueparser.go +++ /dev/null @@ -1,91 +0,0 @@ -// Copyright 2019 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package util - -import ( - "strconv" -) - -// TODO(mdlayher): util packages are an anti-pattern and this should be moved -// somewhere else that is more focused in the future. - -// A ValueParser enables parsing a single string into a variety of data types -// in a concise and safe way. The Err method must be invoked after invoking -// any other methods to ensure a value was successfully parsed. -type ValueParser struct { - v string - err error -} - -// NewValueParser creates a ValueParser using the input string. -func NewValueParser(v string) *ValueParser { - return &ValueParser{v: v} -} - -// Int interprets the underlying value as an int and returns that value. -func (vp *ValueParser) Int() int { return int(vp.int64()) } - -// PInt64 interprets the underlying value as an int64 and returns a pointer to -// that value. -func (vp *ValueParser) PInt64() *int64 { - if vp.err != nil { - return nil - } - - v := vp.int64() - return &v -} - -// int64 interprets the underlying value as an int64 and returns that value. -// TODO: export if/when necessary. -func (vp *ValueParser) int64() int64 { - if vp.err != nil { - return 0 - } - - // A base value of zero makes ParseInt infer the correct base using the - // string's prefix, if any. - const base = 0 - v, err := strconv.ParseInt(vp.v, base, 64) - if err != nil { - vp.err = err - return 0 - } - - return v -} - -// PUInt64 interprets the underlying value as an uint64 and returns a pointer to -// that value. -func (vp *ValueParser) PUInt64() *uint64 { - if vp.err != nil { - return nil - } - - // A base value of zero makes ParseInt infer the correct base using the - // string's prefix, if any. - const base = 0 - v, err := strconv.ParseUint(vp.v, base, 64) - if err != nil { - vp.err = err - return nil - } - - return &v -} - -// Err returns the last error, if any, encountered by the ValueParser. -func (vp *ValueParser) Err() error { - return vp.err -} diff --git a/vendor/github.com/prometheus/procfs/ipvs.go b/vendor/github.com/prometheus/procfs/ipvs.go deleted file mode 100644 index 5a145bbfe..000000000 --- a/vendor/github.com/prometheus/procfs/ipvs.go +++ /dev/null @@ -1,241 +0,0 @@ -// Copyright 2018 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package procfs - -import ( - "bufio" - "bytes" - "encoding/hex" - "errors" - "fmt" - "io" - "net" - "os" - "strconv" - "strings" - - "github.com/prometheus/procfs/internal/util" -) - -// IPVSStats holds IPVS statistics, as exposed by the kernel in `/proc/net/ip_vs_stats`. -type IPVSStats struct { - // Total count of connections. - Connections uint64 - // Total incoming packages processed. - IncomingPackets uint64 - // Total outgoing packages processed. - OutgoingPackets uint64 - // Total incoming traffic. - IncomingBytes uint64 - // Total outgoing traffic. - OutgoingBytes uint64 -} - -// IPVSBackendStatus holds current metrics of one virtual / real address pair. -type IPVSBackendStatus struct { - // The local (virtual) IP address. - LocalAddress net.IP - // The remote (real) IP address. - RemoteAddress net.IP - // The local (virtual) port. - LocalPort uint16 - // The remote (real) port. - RemotePort uint16 - // The local firewall mark - LocalMark string - // The transport protocol (TCP, UDP). - Proto string - // The current number of active connections for this virtual/real address pair. - ActiveConn uint64 - // The current number of inactive connections for this virtual/real address pair. - InactConn uint64 - // The current weight of this virtual/real address pair. - Weight uint64 -} - -// IPVSStats reads the IPVS statistics from the specified `proc` filesystem. -func (fs FS) IPVSStats() (IPVSStats, error) { - data, err := util.ReadFileNoStat(fs.proc.Path("net/ip_vs_stats")) - if err != nil { - return IPVSStats{}, err - } - - return parseIPVSStats(bytes.NewReader(data)) -} - -// parseIPVSStats performs the actual parsing of `ip_vs_stats`. -func parseIPVSStats(r io.Reader) (IPVSStats, error) { - var ( - statContent []byte - statLines []string - statFields []string - stats IPVSStats - ) - - statContent, err := io.ReadAll(r) - if err != nil { - return IPVSStats{}, err - } - - statLines = strings.SplitN(string(statContent), "\n", 4) - if len(statLines) != 4 { - return IPVSStats{}, errors.New("ip_vs_stats corrupt: too short") - } - - statFields = strings.Fields(statLines[2]) - if len(statFields) != 5 { - return IPVSStats{}, errors.New("ip_vs_stats corrupt: unexpected number of fields") - } - - stats.Connections, err = strconv.ParseUint(statFields[0], 16, 64) - if err != nil { - return IPVSStats{}, err - } - stats.IncomingPackets, err = strconv.ParseUint(statFields[1], 16, 64) - if err != nil { - return IPVSStats{}, err - } - stats.OutgoingPackets, err = strconv.ParseUint(statFields[2], 16, 64) - if err != nil { - return IPVSStats{}, err - } - stats.IncomingBytes, err = strconv.ParseUint(statFields[3], 16, 64) - if err != nil { - return IPVSStats{}, err - } - stats.OutgoingBytes, err = strconv.ParseUint(statFields[4], 16, 64) - if err != nil { - return IPVSStats{}, err - } - - return stats, nil -} - -// IPVSBackendStatus reads and returns the status of all (virtual,real) server pairs from the specified `proc` filesystem. -func (fs FS) IPVSBackendStatus() ([]IPVSBackendStatus, error) { - file, err := os.Open(fs.proc.Path("net/ip_vs")) - if err != nil { - return nil, err - } - defer file.Close() - - return parseIPVSBackendStatus(file) -} - -func parseIPVSBackendStatus(file io.Reader) ([]IPVSBackendStatus, error) { - var ( - status []IPVSBackendStatus - scanner = bufio.NewScanner(file) - proto string - localMark string - localAddress net.IP - localPort uint16 - err error - ) - - for scanner.Scan() { - fields := strings.Fields(scanner.Text()) - if len(fields) == 0 { - continue - } - switch { - case fields[0] == "IP" || fields[0] == "Prot" || fields[1] == "RemoteAddress:Port": - continue - case fields[0] == "TCP" || fields[0] == "UDP": - if len(fields) < 2 { - continue - } - proto = fields[0] - localMark = "" - localAddress, localPort, err = parseIPPort(fields[1]) - if err != nil { - return nil, err - } - case fields[0] == "FWM": - if len(fields) < 2 { - continue - } - proto = fields[0] - localMark = fields[1] - localAddress = nil - localPort = 0 - case fields[0] == "->": - if len(fields) < 6 { - continue - } - remoteAddress, remotePort, err := parseIPPort(fields[1]) - if err != nil { - return nil, err - } - weight, err := strconv.ParseUint(fields[3], 10, 64) - if err != nil { - return nil, err - } - activeConn, err := strconv.ParseUint(fields[4], 10, 64) - if err != nil { - return nil, err - } - inactConn, err := strconv.ParseUint(fields[5], 10, 64) - if err != nil { - return nil, err - } - status = append(status, IPVSBackendStatus{ - LocalAddress: localAddress, - LocalPort: localPort, - LocalMark: localMark, - RemoteAddress: remoteAddress, - RemotePort: remotePort, - Proto: proto, - Weight: weight, - ActiveConn: activeConn, - InactConn: inactConn, - }) - } - } - return status, nil -} - -func parseIPPort(s string) (net.IP, uint16, error) { - var ( - ip net.IP - err error - ) - - switch len(s) { - case 13: - ip, err = hex.DecodeString(s[0:8]) - if err != nil { - return nil, 0, err - } - case 46: - ip = net.ParseIP(s[1:40]) - if ip == nil { - return nil, 0, fmt.Errorf("%s: Invalid IPv6 addr %s: %w", ErrFileParse, s[1:40], err) - } - default: - return nil, 0, fmt.Errorf("%s: Unexpected IP:Port %s: %w", ErrFileParse, s, err) - } - - portString := s[len(s)-4:] - if len(portString) != 4 { - return nil, 0, - fmt.Errorf("%s: Unexpected port string format %s: %w", ErrFileParse, portString, err) - } - port, err := strconv.ParseUint(portString, 16, 16) - if err != nil { - return nil, 0, err - } - - return ip, uint16(port), nil -} diff --git a/vendor/github.com/prometheus/procfs/kernel_random.go b/vendor/github.com/prometheus/procfs/kernel_random.go deleted file mode 100644 index db88566bd..000000000 --- a/vendor/github.com/prometheus/procfs/kernel_random.go +++ /dev/null @@ -1,63 +0,0 @@ -// Copyright 2020 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//go:build !windows -// +build !windows - -package procfs - -import ( - "os" - - "github.com/prometheus/procfs/internal/util" -) - -// KernelRandom contains information about to the kernel's random number generator. -type KernelRandom struct { - // EntropyAvaliable gives the available entropy, in bits. - EntropyAvaliable *uint64 - // PoolSize gives the size of the entropy pool, in bits. - PoolSize *uint64 - // URandomMinReseedSeconds is the number of seconds after which the DRNG will be reseeded. - URandomMinReseedSeconds *uint64 - // WriteWakeupThreshold the number of bits of entropy below which we wake up processes - // that do a select(2) or poll(2) for write access to /dev/random. - WriteWakeupThreshold *uint64 - // ReadWakeupThreshold is the number of bits of entropy required for waking up processes that sleep - // waiting for entropy from /dev/random. - ReadWakeupThreshold *uint64 -} - -// KernelRandom returns values from /proc/sys/kernel/random. -func (fs FS) KernelRandom() (KernelRandom, error) { - random := KernelRandom{} - - for file, p := range map[string]**uint64{ - "entropy_avail": &random.EntropyAvaliable, - "poolsize": &random.PoolSize, - "urandom_min_reseed_secs": &random.URandomMinReseedSeconds, - "write_wakeup_threshold": &random.WriteWakeupThreshold, - "read_wakeup_threshold": &random.ReadWakeupThreshold, - } { - val, err := util.ReadUintFromFile(fs.proc.Path("sys", "kernel", "random", file)) - if os.IsNotExist(err) { - continue - } - if err != nil { - return random, err - } - *p = &val - } - - return random, nil -} diff --git a/vendor/github.com/prometheus/procfs/loadavg.go b/vendor/github.com/prometheus/procfs/loadavg.go deleted file mode 100644 index 59465c5bb..000000000 --- a/vendor/github.com/prometheus/procfs/loadavg.go +++ /dev/null @@ -1,62 +0,0 @@ -// Copyright 2019 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package procfs - -import ( - "fmt" - "strconv" - "strings" - - "github.com/prometheus/procfs/internal/util" -) - -// LoadAvg represents an entry in /proc/loadavg. -type LoadAvg struct { - Load1 float64 - Load5 float64 - Load15 float64 -} - -// LoadAvg returns loadavg from /proc. -func (fs FS) LoadAvg() (*LoadAvg, error) { - path := fs.proc.Path("loadavg") - - data, err := util.ReadFileNoStat(path) - if err != nil { - return nil, err - } - return parseLoad(data) -} - -// Parse /proc loadavg and return 1m, 5m and 15m. -func parseLoad(loadavgBytes []byte) (*LoadAvg, error) { - loads := make([]float64, 3) - parts := strings.Fields(string(loadavgBytes)) - if len(parts) < 3 { - return nil, fmt.Errorf("%w: Malformed line %q", ErrFileParse, string(loadavgBytes)) - } - - var err error - for i, load := range parts[0:3] { - loads[i], err = strconv.ParseFloat(load, 64) - if err != nil { - return nil, fmt.Errorf("%s: Cannot parse load: %f: %w", ErrFileParse, loads[i], err) - } - } - return &LoadAvg{ - Load1: loads[0], - Load5: loads[1], - Load15: loads[2], - }, nil -} diff --git a/vendor/github.com/prometheus/procfs/mdstat.go b/vendor/github.com/prometheus/procfs/mdstat.go deleted file mode 100644 index fdd4b9544..000000000 --- a/vendor/github.com/prometheus/procfs/mdstat.go +++ /dev/null @@ -1,266 +0,0 @@ -// Copyright 2018 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package procfs - -import ( - "fmt" - "os" - "regexp" - "strconv" - "strings" -) - -var ( - statusLineRE = regexp.MustCompile(`(\d+) blocks .*\[(\d+)/(\d+)\] \[([U_]+)\]`) - recoveryLineBlocksRE = regexp.MustCompile(`\((\d+)/\d+\)`) - recoveryLinePctRE = regexp.MustCompile(`= (.+)%`) - recoveryLineFinishRE = regexp.MustCompile(`finish=(.+)min`) - recoveryLineSpeedRE = regexp.MustCompile(`speed=(.+)[A-Z]`) - componentDeviceRE = regexp.MustCompile(`(.*)\[\d+\]`) -) - -// MDStat holds info parsed from /proc/mdstat. -type MDStat struct { - // Name of the device. - Name string - // activity-state of the device. - ActivityState string - // Number of active disks. - DisksActive int64 - // Total number of disks the device requires. - DisksTotal int64 - // Number of failed disks. - DisksFailed int64 - // Number of "down" disks. (the _ indicator in the status line) - DisksDown int64 - // Spare disks in the device. - DisksSpare int64 - // Number of blocks the device holds. - BlocksTotal int64 - // Number of blocks on the device that are in sync. - BlocksSynced int64 - // progress percentage of current sync - BlocksSyncedPct float64 - // estimated finishing time for current sync (in minutes) - BlocksSyncedFinishTime float64 - // current sync speed (in Kilobytes/sec) - BlocksSyncedSpeed float64 - // Name of md component devices - Devices []string -} - -// MDStat parses an mdstat-file (/proc/mdstat) and returns a slice of -// structs containing the relevant info. More information available here: -// https://raid.wiki.kernel.org/index.php/Mdstat -func (fs FS) MDStat() ([]MDStat, error) { - data, err := os.ReadFile(fs.proc.Path("mdstat")) - if err != nil { - return nil, err - } - mdstat, err := parseMDStat(data) - if err != nil { - return nil, fmt.Errorf("%s: Cannot parse %v: %w", ErrFileParse, fs.proc.Path("mdstat"), err) - } - return mdstat, nil -} - -// parseMDStat parses data from mdstat file (/proc/mdstat) and returns a slice of -// structs containing the relevant info. -func parseMDStat(mdStatData []byte) ([]MDStat, error) { - mdStats := []MDStat{} - lines := strings.Split(string(mdStatData), "\n") - - for i, line := range lines { - if strings.TrimSpace(line) == "" || line[0] == ' ' || - strings.HasPrefix(line, "Personalities") || - strings.HasPrefix(line, "unused") { - continue - } - - deviceFields := strings.Fields(line) - if len(deviceFields) < 3 { - return nil, fmt.Errorf("%s: Expected 3+ lines, got %q", ErrFileParse, line) - } - mdName := deviceFields[0] // mdx - state := deviceFields[2] // active or inactive - - if len(lines) <= i+3 { - return nil, fmt.Errorf("%w: Too few lines for md device: %q", ErrFileParse, mdName) - } - - // Failed disks have the suffix (F) & Spare disks have the suffix (S). - fail := int64(strings.Count(line, "(F)")) - spare := int64(strings.Count(line, "(S)")) - active, total, down, size, err := evalStatusLine(lines[i], lines[i+1]) - - if err != nil { - return nil, fmt.Errorf("%s: Cannot parse md device lines: %v: %w", ErrFileParse, active, err) - } - - syncLineIdx := i + 2 - if strings.Contains(lines[i+2], "bitmap") { // skip bitmap line - syncLineIdx++ - } - - // If device is syncing at the moment, get the number of currently - // synced bytes, otherwise that number equals the size of the device. - syncedBlocks := size - speed := float64(0) - finish := float64(0) - pct := float64(0) - recovering := strings.Contains(lines[syncLineIdx], "recovery") - resyncing := strings.Contains(lines[syncLineIdx], "resync") - checking := strings.Contains(lines[syncLineIdx], "check") - - // Append recovery and resyncing state info. - if recovering || resyncing || checking { - if recovering { - state = "recovering" - } else if checking { - state = "checking" - } else { - state = "resyncing" - } - - // Handle case when resync=PENDING or resync=DELAYED. - if strings.Contains(lines[syncLineIdx], "PENDING") || - strings.Contains(lines[syncLineIdx], "DELAYED") { - syncedBlocks = 0 - } else { - syncedBlocks, pct, finish, speed, err = evalRecoveryLine(lines[syncLineIdx]) - if err != nil { - return nil, fmt.Errorf("%s: Cannot parse sync line in md device: %q: %w", ErrFileParse, mdName, err) - } - } - } - - mdStats = append(mdStats, MDStat{ - Name: mdName, - ActivityState: state, - DisksActive: active, - DisksFailed: fail, - DisksDown: down, - DisksSpare: spare, - DisksTotal: total, - BlocksTotal: size, - BlocksSynced: syncedBlocks, - BlocksSyncedPct: pct, - BlocksSyncedFinishTime: finish, - BlocksSyncedSpeed: speed, - Devices: evalComponentDevices(deviceFields), - }) - } - - return mdStats, nil -} - -func evalStatusLine(deviceLine, statusLine string) (active, total, down, size int64, err error) { - statusFields := strings.Fields(statusLine) - if len(statusFields) < 1 { - return 0, 0, 0, 0, fmt.Errorf("%s: Unexpected statusline %q: %w", ErrFileParse, statusLine, err) - } - - sizeStr := statusFields[0] - size, err = strconv.ParseInt(sizeStr, 10, 64) - if err != nil { - return 0, 0, 0, 0, fmt.Errorf("%s: Unexpected statusline %q: %w", ErrFileParse, statusLine, err) - } - - if strings.Contains(deviceLine, "raid0") || strings.Contains(deviceLine, "linear") { - // In the device deviceLine, only disks have a number associated with them in []. - total = int64(strings.Count(deviceLine, "[")) - return total, total, 0, size, nil - } - - if strings.Contains(deviceLine, "inactive") { - return 0, 0, 0, size, nil - } - - matches := statusLineRE.FindStringSubmatch(statusLine) - if len(matches) != 5 { - return 0, 0, 0, 0, fmt.Errorf("%s: Could not fild all substring matches %s: %w", ErrFileParse, statusLine, err) - } - - total, err = strconv.ParseInt(matches[2], 10, 64) - if err != nil { - return 0, 0, 0, 0, fmt.Errorf("%s: Unexpected statusline %q: %w", ErrFileParse, statusLine, err) - } - - active, err = strconv.ParseInt(matches[3], 10, 64) - if err != nil { - return 0, 0, 0, 0, fmt.Errorf("%s: Unexpected active %d: %w", ErrFileParse, active, err) - } - down = int64(strings.Count(matches[4], "_")) - - return active, total, down, size, nil -} - -func evalRecoveryLine(recoveryLine string) (syncedBlocks int64, pct float64, finish float64, speed float64, err error) { - matches := recoveryLineBlocksRE.FindStringSubmatch(recoveryLine) - if len(matches) != 2 { - return 0, 0, 0, 0, fmt.Errorf("%s: Unexpected recoveryLine %s: %w", ErrFileParse, recoveryLine, err) - } - - syncedBlocks, err = strconv.ParseInt(matches[1], 10, 64) - if err != nil { - return 0, 0, 0, 0, fmt.Errorf("%s: Unexpected parsing of recoveryLine %q: %w", ErrFileParse, recoveryLine, err) - } - - // Get percentage complete - matches = recoveryLinePctRE.FindStringSubmatch(recoveryLine) - if len(matches) != 2 { - return syncedBlocks, 0, 0, 0, fmt.Errorf("%w: Unexpected recoveryLine matching percentage %s", ErrFileParse, recoveryLine) - } - pct, err = strconv.ParseFloat(strings.TrimSpace(matches[1]), 64) - if err != nil { - return syncedBlocks, 0, 0, 0, fmt.Errorf("%w: Error parsing float from recoveryLine %q", ErrFileParse, recoveryLine) - } - - // Get time expected left to complete - matches = recoveryLineFinishRE.FindStringSubmatch(recoveryLine) - if len(matches) != 2 { - return syncedBlocks, pct, 0, 0, fmt.Errorf("%w: Unexpected recoveryLine matching est. finish time: %s", ErrFileParse, recoveryLine) - } - finish, err = strconv.ParseFloat(matches[1], 64) - if err != nil { - return syncedBlocks, pct, 0, 0, fmt.Errorf("%w: Unable to parse float from recoveryLine: %q", ErrFileParse, recoveryLine) - } - - // Get recovery speed - matches = recoveryLineSpeedRE.FindStringSubmatch(recoveryLine) - if len(matches) != 2 { - return syncedBlocks, pct, finish, 0, fmt.Errorf("%w: Unexpected recoveryLine value: %s", ErrFileParse, recoveryLine) - } - speed, err = strconv.ParseFloat(matches[1], 64) - if err != nil { - return syncedBlocks, pct, finish, 0, fmt.Errorf("%s: Error parsing float from recoveryLine: %q: %w", ErrFileParse, recoveryLine, err) - } - - return syncedBlocks, pct, finish, speed, nil -} - -func evalComponentDevices(deviceFields []string) []string { - mdComponentDevices := make([]string, 0) - if len(deviceFields) > 3 { - for _, field := range deviceFields[4:] { - match := componentDeviceRE.FindStringSubmatch(field) - if match == nil { - continue - } - mdComponentDevices = append(mdComponentDevices, match[1]) - } - } - - return mdComponentDevices -} diff --git a/vendor/github.com/prometheus/procfs/meminfo.go b/vendor/github.com/prometheus/procfs/meminfo.go deleted file mode 100644 index 73a03e8a3..000000000 --- a/vendor/github.com/prometheus/procfs/meminfo.go +++ /dev/null @@ -1,389 +0,0 @@ -// Copyright 2019 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package procfs - -import ( - "bufio" - "bytes" - "fmt" - "io" - "strconv" - "strings" - - "github.com/prometheus/procfs/internal/util" -) - -// Meminfo represents memory statistics. -type Meminfo struct { - // Total usable ram (i.e. physical ram minus a few reserved - // bits and the kernel binary code) - MemTotal *uint64 - // The sum of LowFree+HighFree - MemFree *uint64 - // An estimate of how much memory is available for starting - // new applications, without swapping. Calculated from - // MemFree, SReclaimable, the size of the file LRU lists, and - // the low watermarks in each zone. The estimate takes into - // account that the system needs some page cache to function - // well, and that not all reclaimable slab will be - // reclaimable, due to items being in use. The impact of those - // factors will vary from system to system. - MemAvailable *uint64 - // Relatively temporary storage for raw disk blocks shouldn't - // get tremendously large (20MB or so) - Buffers *uint64 - Cached *uint64 - // Memory that once was swapped out, is swapped back in but - // still also is in the swapfile (if memory is needed it - // doesn't need to be swapped out AGAIN because it is already - // in the swapfile. This saves I/O) - SwapCached *uint64 - // Memory that has been used more recently and usually not - // reclaimed unless absolutely necessary. - Active *uint64 - // Memory which has been less recently used. It is more - // eligible to be reclaimed for other purposes - Inactive *uint64 - ActiveAnon *uint64 - InactiveAnon *uint64 - ActiveFile *uint64 - InactiveFile *uint64 - Unevictable *uint64 - Mlocked *uint64 - // total amount of swap space available - SwapTotal *uint64 - // Memory which has been evicted from RAM, and is temporarily - // on the disk - SwapFree *uint64 - // Memory which is waiting to get written back to the disk - Dirty *uint64 - // Memory which is actively being written back to the disk - Writeback *uint64 - // Non-file backed pages mapped into userspace page tables - AnonPages *uint64 - // files which have been mapped, such as libraries - Mapped *uint64 - Shmem *uint64 - // in-kernel data structures cache - Slab *uint64 - // Part of Slab, that might be reclaimed, such as caches - SReclaimable *uint64 - // Part of Slab, that cannot be reclaimed on memory pressure - SUnreclaim *uint64 - KernelStack *uint64 - // amount of memory dedicated to the lowest level of page - // tables. - PageTables *uint64 - // NFS pages sent to the server, but not yet committed to - // stable storage - NFSUnstable *uint64 - // Memory used for block device "bounce buffers" - Bounce *uint64 - // Memory used by FUSE for temporary writeback buffers - WritebackTmp *uint64 - // Based on the overcommit ratio ('vm.overcommit_ratio'), - // this is the total amount of memory currently available to - // be allocated on the system. This limit is only adhered to - // if strict overcommit accounting is enabled (mode 2 in - // 'vm.overcommit_memory'). - // The CommitLimit is calculated with the following formula: - // CommitLimit = ([total RAM pages] - [total huge TLB pages]) * - // overcommit_ratio / 100 + [total swap pages] - // For example, on a system with 1G of physical RAM and 7G - // of swap with a `vm.overcommit_ratio` of 30 it would - // yield a CommitLimit of 7.3G. - // For more details, see the memory overcommit documentation - // in vm/overcommit-accounting. - CommitLimit *uint64 - // The amount of memory presently allocated on the system. - // The committed memory is a sum of all of the memory which - // has been allocated by processes, even if it has not been - // "used" by them as of yet. A process which malloc()'s 1G - // of memory, but only touches 300M of it will show up as - // using 1G. This 1G is memory which has been "committed" to - // by the VM and can be used at any time by the allocating - // application. With strict overcommit enabled on the system - // (mode 2 in 'vm.overcommit_memory'),allocations which would - // exceed the CommitLimit (detailed above) will not be permitted. - // This is useful if one needs to guarantee that processes will - // not fail due to lack of memory once that memory has been - // successfully allocated. - CommittedAS *uint64 - // total size of vmalloc memory area - VmallocTotal *uint64 - // amount of vmalloc area which is used - VmallocUsed *uint64 - // largest contiguous block of vmalloc area which is free - VmallocChunk *uint64 - Percpu *uint64 - HardwareCorrupted *uint64 - AnonHugePages *uint64 - ShmemHugePages *uint64 - ShmemPmdMapped *uint64 - CmaTotal *uint64 - CmaFree *uint64 - HugePagesTotal *uint64 - HugePagesFree *uint64 - HugePagesRsvd *uint64 - HugePagesSurp *uint64 - Hugepagesize *uint64 - DirectMap4k *uint64 - DirectMap2M *uint64 - DirectMap1G *uint64 - - // The struct fields below are the byte-normalized counterparts to the - // existing struct fields. Values are normalized using the optional - // unit field in the meminfo line. - MemTotalBytes *uint64 - MemFreeBytes *uint64 - MemAvailableBytes *uint64 - BuffersBytes *uint64 - CachedBytes *uint64 - SwapCachedBytes *uint64 - ActiveBytes *uint64 - InactiveBytes *uint64 - ActiveAnonBytes *uint64 - InactiveAnonBytes *uint64 - ActiveFileBytes *uint64 - InactiveFileBytes *uint64 - UnevictableBytes *uint64 - MlockedBytes *uint64 - SwapTotalBytes *uint64 - SwapFreeBytes *uint64 - DirtyBytes *uint64 - WritebackBytes *uint64 - AnonPagesBytes *uint64 - MappedBytes *uint64 - ShmemBytes *uint64 - SlabBytes *uint64 - SReclaimableBytes *uint64 - SUnreclaimBytes *uint64 - KernelStackBytes *uint64 - PageTablesBytes *uint64 - NFSUnstableBytes *uint64 - BounceBytes *uint64 - WritebackTmpBytes *uint64 - CommitLimitBytes *uint64 - CommittedASBytes *uint64 - VmallocTotalBytes *uint64 - VmallocUsedBytes *uint64 - VmallocChunkBytes *uint64 - PercpuBytes *uint64 - HardwareCorruptedBytes *uint64 - AnonHugePagesBytes *uint64 - ShmemHugePagesBytes *uint64 - ShmemPmdMappedBytes *uint64 - CmaTotalBytes *uint64 - CmaFreeBytes *uint64 - HugepagesizeBytes *uint64 - DirectMap4kBytes *uint64 - DirectMap2MBytes *uint64 - DirectMap1GBytes *uint64 -} - -// Meminfo returns an information about current kernel/system memory statistics. -// See https://www.kernel.org/doc/Documentation/filesystems/proc.txt -func (fs FS) Meminfo() (Meminfo, error) { - b, err := util.ReadFileNoStat(fs.proc.Path("meminfo")) - if err != nil { - return Meminfo{}, err - } - - m, err := parseMemInfo(bytes.NewReader(b)) - if err != nil { - return Meminfo{}, fmt.Errorf("%s: %w", ErrFileParse, err) - } - - return *m, nil -} - -func parseMemInfo(r io.Reader) (*Meminfo, error) { - var m Meminfo - s := bufio.NewScanner(r) - for s.Scan() { - fields := strings.Fields(s.Text()) - var val, valBytes uint64 - - val, err := strconv.ParseUint(fields[1], 0, 64) - if err != nil { - return nil, err - } - - switch len(fields) { - case 2: - // No unit present, use the parsed the value as bytes directly. - valBytes = val - case 3: - // Unit present in optional 3rd field, convert it to - // bytes. The only unit supported within the Linux - // kernel is `kB`. - if fields[2] != "kB" { - return nil, fmt.Errorf("%w: Unsupported unit in optional 3rd field %q", ErrFileParse, fields[2]) - } - - valBytes = 1024 * val - - default: - return nil, fmt.Errorf("%w: Malformed line %q", ErrFileParse, s.Text()) - } - - switch fields[0] { - case "MemTotal:": - m.MemTotal = &val - m.MemTotalBytes = &valBytes - case "MemFree:": - m.MemFree = &val - m.MemFreeBytes = &valBytes - case "MemAvailable:": - m.MemAvailable = &val - m.MemAvailableBytes = &valBytes - case "Buffers:": - m.Buffers = &val - m.BuffersBytes = &valBytes - case "Cached:": - m.Cached = &val - m.CachedBytes = &valBytes - case "SwapCached:": - m.SwapCached = &val - m.SwapCachedBytes = &valBytes - case "Active:": - m.Active = &val - m.ActiveBytes = &valBytes - case "Inactive:": - m.Inactive = &val - m.InactiveBytes = &valBytes - case "Active(anon):": - m.ActiveAnon = &val - m.ActiveAnonBytes = &valBytes - case "Inactive(anon):": - m.InactiveAnon = &val - m.InactiveAnonBytes = &valBytes - case "Active(file):": - m.ActiveFile = &val - m.ActiveFileBytes = &valBytes - case "Inactive(file):": - m.InactiveFile = &val - m.InactiveFileBytes = &valBytes - case "Unevictable:": - m.Unevictable = &val - m.UnevictableBytes = &valBytes - case "Mlocked:": - m.Mlocked = &val - m.MlockedBytes = &valBytes - case "SwapTotal:": - m.SwapTotal = &val - m.SwapTotalBytes = &valBytes - case "SwapFree:": - m.SwapFree = &val - m.SwapFreeBytes = &valBytes - case "Dirty:": - m.Dirty = &val - m.DirtyBytes = &valBytes - case "Writeback:": - m.Writeback = &val - m.WritebackBytes = &valBytes - case "AnonPages:": - m.AnonPages = &val - m.AnonPagesBytes = &valBytes - case "Mapped:": - m.Mapped = &val - m.MappedBytes = &valBytes - case "Shmem:": - m.Shmem = &val - m.ShmemBytes = &valBytes - case "Slab:": - m.Slab = &val - m.SlabBytes = &valBytes - case "SReclaimable:": - m.SReclaimable = &val - m.SReclaimableBytes = &valBytes - case "SUnreclaim:": - m.SUnreclaim = &val - m.SUnreclaimBytes = &valBytes - case "KernelStack:": - m.KernelStack = &val - m.KernelStackBytes = &valBytes - case "PageTables:": - m.PageTables = &val - m.PageTablesBytes = &valBytes - case "NFS_Unstable:": - m.NFSUnstable = &val - m.NFSUnstableBytes = &valBytes - case "Bounce:": - m.Bounce = &val - m.BounceBytes = &valBytes - case "WritebackTmp:": - m.WritebackTmp = &val - m.WritebackTmpBytes = &valBytes - case "CommitLimit:": - m.CommitLimit = &val - m.CommitLimitBytes = &valBytes - case "Committed_AS:": - m.CommittedAS = &val - m.CommittedASBytes = &valBytes - case "VmallocTotal:": - m.VmallocTotal = &val - m.VmallocTotalBytes = &valBytes - case "VmallocUsed:": - m.VmallocUsed = &val - m.VmallocUsedBytes = &valBytes - case "VmallocChunk:": - m.VmallocChunk = &val - m.VmallocChunkBytes = &valBytes - case "Percpu:": - m.Percpu = &val - m.PercpuBytes = &valBytes - case "HardwareCorrupted:": - m.HardwareCorrupted = &val - m.HardwareCorruptedBytes = &valBytes - case "AnonHugePages:": - m.AnonHugePages = &val - m.AnonHugePagesBytes = &valBytes - case "ShmemHugePages:": - m.ShmemHugePages = &val - m.ShmemHugePagesBytes = &valBytes - case "ShmemPmdMapped:": - m.ShmemPmdMapped = &val - m.ShmemPmdMappedBytes = &valBytes - case "CmaTotal:": - m.CmaTotal = &val - m.CmaTotalBytes = &valBytes - case "CmaFree:": - m.CmaFree = &val - m.CmaFreeBytes = &valBytes - case "HugePages_Total:": - m.HugePagesTotal = &val - case "HugePages_Free:": - m.HugePagesFree = &val - case "HugePages_Rsvd:": - m.HugePagesRsvd = &val - case "HugePages_Surp:": - m.HugePagesSurp = &val - case "Hugepagesize:": - m.Hugepagesize = &val - m.HugepagesizeBytes = &valBytes - case "DirectMap4k:": - m.DirectMap4k = &val - m.DirectMap4kBytes = &valBytes - case "DirectMap2M:": - m.DirectMap2M = &val - m.DirectMap2MBytes = &valBytes - case "DirectMap1G:": - m.DirectMap1G = &val - m.DirectMap1GBytes = &valBytes - } - } - - return &m, nil -} diff --git a/vendor/github.com/prometheus/procfs/mountinfo.go b/vendor/github.com/prometheus/procfs/mountinfo.go deleted file mode 100644 index 388ebf396..000000000 --- a/vendor/github.com/prometheus/procfs/mountinfo.go +++ /dev/null @@ -1,180 +0,0 @@ -// Copyright 2019 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package procfs - -import ( - "bufio" - "bytes" - "fmt" - "strconv" - "strings" - - "github.com/prometheus/procfs/internal/util" -) - -// A MountInfo is a type that describes the details, options -// for each mount, parsed from /proc/self/mountinfo. -// The fields described in each entry of /proc/self/mountinfo -// is described in the following man page. -// http://man7.org/linux/man-pages/man5/proc.5.html -type MountInfo struct { - // Unique ID for the mount - MountID int - // The ID of the parent mount - ParentID int - // The value of `st_dev` for the files on this FS - MajorMinorVer string - // The pathname of the directory in the FS that forms - // the root for this mount - Root string - // The pathname of the mount point relative to the root - MountPoint string - // Mount options - Options map[string]string - // Zero or more optional fields - OptionalFields map[string]string - // The Filesystem type - FSType string - // FS specific information or "none" - Source string - // Superblock options - SuperOptions map[string]string -} - -// Reads each line of the mountinfo file, and returns a list of formatted MountInfo structs. -func parseMountInfo(info []byte) ([]*MountInfo, error) { - mounts := []*MountInfo{} - scanner := bufio.NewScanner(bytes.NewReader(info)) - for scanner.Scan() { - mountString := scanner.Text() - parsedMounts, err := parseMountInfoString(mountString) - if err != nil { - return nil, err - } - mounts = append(mounts, parsedMounts) - } - - err := scanner.Err() - return mounts, err -} - -// Parses a mountinfo file line, and converts it to a MountInfo struct. -// An important check here is to see if the hyphen separator, as if it does not exist, -// it means that the line is malformed. -func parseMountInfoString(mountString string) (*MountInfo, error) { - var err error - - mountInfo := strings.Split(mountString, " ") - mountInfoLength := len(mountInfo) - if mountInfoLength < 10 { - return nil, fmt.Errorf("%w: Too few fields in mount string: %s", ErrFileParse, mountString) - } - - if mountInfo[mountInfoLength-4] != "-" { - return nil, fmt.Errorf("%w: couldn't find separator in expected field: %s", ErrFileParse, mountInfo[mountInfoLength-4]) - } - - mount := &MountInfo{ - MajorMinorVer: mountInfo[2], - Root: mountInfo[3], - MountPoint: mountInfo[4], - Options: mountOptionsParser(mountInfo[5]), - OptionalFields: nil, - FSType: mountInfo[mountInfoLength-3], - Source: mountInfo[mountInfoLength-2], - SuperOptions: mountOptionsParser(mountInfo[mountInfoLength-1]), - } - - mount.MountID, err = strconv.Atoi(mountInfo[0]) - if err != nil { - return nil, fmt.Errorf("%w: mount ID: %q", ErrFileParse, mount.MountID) - } - mount.ParentID, err = strconv.Atoi(mountInfo[1]) - if err != nil { - return nil, fmt.Errorf("%w: parent ID: %q", ErrFileParse, mount.ParentID) - } - // Has optional fields, which is a space separated list of values. - // Example: shared:2 master:7 - if mountInfo[6] != "" { - mount.OptionalFields, err = mountOptionsParseOptionalFields(mountInfo[6 : mountInfoLength-4]) - if err != nil { - return nil, fmt.Errorf("%s: %w", ErrFileParse, err) - } - } - return mount, nil -} - -// mountOptionsIsValidField checks a string against a valid list of optional fields keys. -func mountOptionsIsValidField(s string) bool { - switch s { - case - "shared", - "master", - "propagate_from", - "unbindable": - return true - } - return false -} - -// mountOptionsParseOptionalFields parses a list of optional fields strings into a double map of strings. -func mountOptionsParseOptionalFields(o []string) (map[string]string, error) { - optionalFields := make(map[string]string) - for _, field := range o { - optionSplit := strings.SplitN(field, ":", 2) - value := "" - if len(optionSplit) == 2 { - value = optionSplit[1] - } - if mountOptionsIsValidField(optionSplit[0]) { - optionalFields[optionSplit[0]] = value - } - } - return optionalFields, nil -} - -// mountOptionsParser parses the mount options, superblock options. -func mountOptionsParser(mountOptions string) map[string]string { - opts := make(map[string]string) - options := strings.Split(mountOptions, ",") - for _, opt := range options { - splitOption := strings.Split(opt, "=") - if len(splitOption) < 2 { - key := splitOption[0] - opts[key] = "" - } else { - key, value := splitOption[0], splitOption[1] - opts[key] = value - } - } - return opts -} - -// GetMounts retrieves mountinfo information from `/proc/self/mountinfo`. -func GetMounts() ([]*MountInfo, error) { - data, err := util.ReadFileNoStat("/proc/self/mountinfo") - if err != nil { - return nil, err - } - return parseMountInfo(data) -} - -// GetProcMounts retrieves mountinfo information from a processes' `/proc//mountinfo`. -func GetProcMounts(pid int) ([]*MountInfo, error) { - data, err := util.ReadFileNoStat(fmt.Sprintf("/proc/%d/mountinfo", pid)) - if err != nil { - return nil, err - } - return parseMountInfo(data) -} diff --git a/vendor/github.com/prometheus/procfs/mountstats.go b/vendor/github.com/prometheus/procfs/mountstats.go deleted file mode 100644 index 9d8af6db7..000000000 --- a/vendor/github.com/prometheus/procfs/mountstats.go +++ /dev/null @@ -1,712 +0,0 @@ -// Copyright 2018 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package procfs - -// While implementing parsing of /proc/[pid]/mountstats, this blog was used -// heavily as a reference: -// https://utcc.utoronto.ca/~cks/space/blog/linux/NFSMountstatsIndex -// -// Special thanks to Chris Siebenmann for all of his posts explaining the -// various statistics available for NFS. - -import ( - "bufio" - "fmt" - "io" - "strconv" - "strings" - "time" -) - -// Constants shared between multiple functions. -const ( - deviceEntryLen = 8 - - fieldBytesLen = 8 - fieldEventsLen = 27 - - statVersion10 = "1.0" - statVersion11 = "1.1" - - fieldTransport10TCPLen = 10 - fieldTransport10UDPLen = 7 - - fieldTransport11TCPLen = 13 - fieldTransport11UDPLen = 10 - - // kernel version >= 4.14 MaxLen - // See: https://elixir.bootlin.com/linux/v6.4.8/source/net/sunrpc/xprtrdma/xprt_rdma.h#L393 - fieldTransport11RDMAMaxLen = 28 - - // kernel version <= 4.2 MinLen - // See: https://elixir.bootlin.com/linux/v4.2.8/source/net/sunrpc/xprtrdma/xprt_rdma.h#L331 - fieldTransport11RDMAMinLen = 20 -) - -// A Mount is a device mount parsed from /proc/[pid]/mountstats. -type Mount struct { - // Name of the device. - Device string - // The mount point of the device. - Mount string - // The filesystem type used by the device. - Type string - // If available additional statistics related to this Mount. - // Use a type assertion to determine if additional statistics are available. - Stats MountStats -} - -// A MountStats is a type which contains detailed statistics for a specific -// type of Mount. -type MountStats interface { - mountStats() -} - -// A MountStatsNFS is a MountStats implementation for NFSv3 and v4 mounts. -type MountStatsNFS struct { - // The version of statistics provided. - StatVersion string - // The mount options of the NFS mount. - Opts map[string]string - // The age of the NFS mount. - Age time.Duration - // Statistics related to byte counters for various operations. - Bytes NFSBytesStats - // Statistics related to various NFS event occurrences. - Events NFSEventsStats - // Statistics broken down by filesystem operation. - Operations []NFSOperationStats - // Statistics about the NFS RPC transport. - Transport NFSTransportStats -} - -// mountStats implements MountStats. -func (m MountStatsNFS) mountStats() {} - -// A NFSBytesStats contains statistics about the number of bytes read and written -// by an NFS client to and from an NFS server. -type NFSBytesStats struct { - // Number of bytes read using the read() syscall. - Read uint64 - // Number of bytes written using the write() syscall. - Write uint64 - // Number of bytes read using the read() syscall in O_DIRECT mode. - DirectRead uint64 - // Number of bytes written using the write() syscall in O_DIRECT mode. - DirectWrite uint64 - // Number of bytes read from the NFS server, in total. - ReadTotal uint64 - // Number of bytes written to the NFS server, in total. - WriteTotal uint64 - // Number of pages read directly via mmap()'d files. - ReadPages uint64 - // Number of pages written directly via mmap()'d files. - WritePages uint64 -} - -// A NFSEventsStats contains statistics about NFS event occurrences. -type NFSEventsStats struct { - // Number of times cached inode attributes are re-validated from the server. - InodeRevalidate uint64 - // Number of times cached dentry nodes are re-validated from the server. - DnodeRevalidate uint64 - // Number of times an inode cache is cleared. - DataInvalidate uint64 - // Number of times cached inode attributes are invalidated. - AttributeInvalidate uint64 - // Number of times files or directories have been open()'d. - VFSOpen uint64 - // Number of times a directory lookup has occurred. - VFSLookup uint64 - // Number of times permissions have been checked. - VFSAccess uint64 - // Number of updates (and potential writes) to pages. - VFSUpdatePage uint64 - // Number of pages read directly via mmap()'d files. - VFSReadPage uint64 - // Number of times a group of pages have been read. - VFSReadPages uint64 - // Number of pages written directly via mmap()'d files. - VFSWritePage uint64 - // Number of times a group of pages have been written. - VFSWritePages uint64 - // Number of times directory entries have been read with getdents(). - VFSGetdents uint64 - // Number of times attributes have been set on inodes. - VFSSetattr uint64 - // Number of pending writes that have been forcefully flushed to the server. - VFSFlush uint64 - // Number of times fsync() has been called on directories and files. - VFSFsync uint64 - // Number of times locking has been attempted on a file. - VFSLock uint64 - // Number of times files have been closed and released. - VFSFileRelease uint64 - // Unknown. Possibly unused. - CongestionWait uint64 - // Number of times files have been truncated. - Truncation uint64 - // Number of times a file has been grown due to writes beyond its existing end. - WriteExtension uint64 - // Number of times a file was removed while still open by another process. - SillyRename uint64 - // Number of times the NFS server gave less data than expected while reading. - ShortRead uint64 - // Number of times the NFS server wrote less data than expected while writing. - ShortWrite uint64 - // Number of times the NFS server indicated EJUKEBOX; retrieving data from - // offline storage. - JukeboxDelay uint64 - // Number of NFS v4.1+ pNFS reads. - PNFSRead uint64 - // Number of NFS v4.1+ pNFS writes. - PNFSWrite uint64 -} - -// A NFSOperationStats contains statistics for a single operation. -type NFSOperationStats struct { - // The name of the operation. - Operation string - // Number of requests performed for this operation. - Requests uint64 - // Number of times an actual RPC request has been transmitted for this operation. - Transmissions uint64 - // Number of times a request has had a major timeout. - MajorTimeouts uint64 - // Number of bytes sent for this operation, including RPC headers and payload. - BytesSent uint64 - // Number of bytes received for this operation, including RPC headers and payload. - BytesReceived uint64 - // Duration all requests spent queued for transmission before they were sent. - CumulativeQueueMilliseconds uint64 - // Duration it took to get a reply back after the request was transmitted. - CumulativeTotalResponseMilliseconds uint64 - // Duration from when a request was enqueued to when it was completely handled. - CumulativeTotalRequestMilliseconds uint64 - // The average time from the point the client sends RPC requests until it receives the response. - AverageRTTMilliseconds float64 - // The count of operations that complete with tk_status < 0. These statuses usually indicate error conditions. - Errors uint64 -} - -// A NFSTransportStats contains statistics for the NFS mount RPC requests and -// responses. -type NFSTransportStats struct { - // The transport protocol used for the NFS mount. - Protocol string - // The local port used for the NFS mount. - Port uint64 - // Number of times the client has had to establish a connection from scratch - // to the NFS server. - Bind uint64 - // Number of times the client has made a TCP connection to the NFS server. - Connect uint64 - // Duration (in jiffies, a kernel internal unit of time) the NFS mount has - // spent waiting for connections to the server to be established. - ConnectIdleTime uint64 - // Duration since the NFS mount last saw any RPC traffic. - IdleTimeSeconds uint64 - // Number of RPC requests for this mount sent to the NFS server. - Sends uint64 - // Number of RPC responses for this mount received from the NFS server. - Receives uint64 - // Number of times the NFS server sent a response with a transaction ID - // unknown to this client. - BadTransactionIDs uint64 - // A running counter, incremented on each request as the current difference - // ebetween sends and receives. - CumulativeActiveRequests uint64 - // A running counter, incremented on each request by the current backlog - // queue size. - CumulativeBacklog uint64 - - // Stats below only available with stat version 1.1. - - // Maximum number of simultaneously active RPC requests ever used. - MaximumRPCSlotsUsed uint64 - // A running counter, incremented on each request as the current size of the - // sending queue. - CumulativeSendingQueue uint64 - // A running counter, incremented on each request as the current size of the - // pending queue. - CumulativePendingQueue uint64 - - // Stats below only available with stat version 1.1. - // Transport over RDMA - - // accessed when sending a call - ReadChunkCount uint64 - WriteChunkCount uint64 - ReplyChunkCount uint64 - TotalRdmaRequest uint64 - - // rarely accessed error counters - PullupCopyCount uint64 - HardwayRegisterCount uint64 - FailedMarshalCount uint64 - BadReplyCount uint64 - MrsRecovered uint64 - MrsOrphaned uint64 - MrsAllocated uint64 - EmptySendctxQ uint64 - - // accessed when receiving a reply - TotalRdmaReply uint64 - FixupCopyCount uint64 - ReplyWaitsForSend uint64 - LocalInvNeeded uint64 - NomsgCallCount uint64 - BcallCount uint64 -} - -// parseMountStats parses a /proc/[pid]/mountstats file and returns a slice -// of Mount structures containing detailed information about each mount. -// If available, statistics for each mount are parsed as well. -func parseMountStats(r io.Reader) ([]*Mount, error) { - const ( - device = "device" - statVersionPrefix = "statvers=" - - nfs3Type = "nfs" - nfs4Type = "nfs4" - ) - - var mounts []*Mount - - s := bufio.NewScanner(r) - for s.Scan() { - // Only look for device entries in this function - ss := strings.Fields(string(s.Bytes())) - if len(ss) == 0 || ss[0] != device { - continue - } - - m, err := parseMount(ss) - if err != nil { - return nil, err - } - - // Does this mount also possess statistics information? - if len(ss) > deviceEntryLen { - // Only NFSv3 and v4 are supported for parsing statistics - if m.Type != nfs3Type && m.Type != nfs4Type { - return nil, fmt.Errorf("%w: Cannot parse MountStats for %q", ErrFileParse, m.Type) - } - - statVersion := strings.TrimPrefix(ss[8], statVersionPrefix) - - stats, err := parseMountStatsNFS(s, statVersion) - if err != nil { - return nil, err - } - - m.Stats = stats - } - - mounts = append(mounts, m) - } - - return mounts, s.Err() -} - -// parseMount parses an entry in /proc/[pid]/mountstats in the format: -// -// device [device] mounted on [mount] with fstype [type] -func parseMount(ss []string) (*Mount, error) { - if len(ss) < deviceEntryLen { - return nil, fmt.Errorf("%w: Invalid device %q", ErrFileParse, ss) - } - - // Check for specific words appearing at specific indices to ensure - // the format is consistent with what we expect - format := []struct { - i int - s string - }{ - {i: 0, s: "device"}, - {i: 2, s: "mounted"}, - {i: 3, s: "on"}, - {i: 5, s: "with"}, - {i: 6, s: "fstype"}, - } - - for _, f := range format { - if ss[f.i] != f.s { - return nil, fmt.Errorf("%w: Invalid device %q", ErrFileParse, ss) - } - } - - return &Mount{ - Device: ss[1], - Mount: ss[4], - Type: ss[7], - }, nil -} - -// parseMountStatsNFS parses a MountStatsNFS by scanning additional information -// related to NFS statistics. -func parseMountStatsNFS(s *bufio.Scanner, statVersion string) (*MountStatsNFS, error) { - // Field indicators for parsing specific types of data - const ( - fieldOpts = "opts:" - fieldAge = "age:" - fieldBytes = "bytes:" - fieldEvents = "events:" - fieldPerOpStats = "per-op" - fieldTransport = "xprt:" - ) - - stats := &MountStatsNFS{ - StatVersion: statVersion, - } - - for s.Scan() { - ss := strings.Fields(string(s.Bytes())) - if len(ss) == 0 { - break - } - - switch ss[0] { - case fieldOpts: - if len(ss) < 2 { - return nil, fmt.Errorf("%w: Incomplete information for NFS stats: %v", ErrFileParse, ss) - } - if stats.Opts == nil { - stats.Opts = map[string]string{} - } - for _, opt := range strings.Split(ss[1], ",") { - split := strings.Split(opt, "=") - if len(split) == 2 { - stats.Opts[split[0]] = split[1] - } else { - stats.Opts[opt] = "" - } - } - case fieldAge: - if len(ss) < 2 { - return nil, fmt.Errorf("%w: Incomplete information for NFS stats: %v", ErrFileParse, ss) - } - // Age integer is in seconds - d, err := time.ParseDuration(ss[1] + "s") - if err != nil { - return nil, err - } - - stats.Age = d - case fieldBytes: - if len(ss) < 2 { - return nil, fmt.Errorf("%w: Incomplete information for NFS stats: %v", ErrFileParse, ss) - } - bstats, err := parseNFSBytesStats(ss[1:]) - if err != nil { - return nil, err - } - - stats.Bytes = *bstats - case fieldEvents: - if len(ss) < 2 { - return nil, fmt.Errorf("%w: Incomplete information for NFS events: %v", ErrFileParse, ss) - } - estats, err := parseNFSEventsStats(ss[1:]) - if err != nil { - return nil, err - } - - stats.Events = *estats - case fieldTransport: - if len(ss) < 3 { - return nil, fmt.Errorf("%w: Incomplete information for NFS transport stats: %v", ErrFileParse, ss) - } - - tstats, err := parseNFSTransportStats(ss[1:], statVersion) - if err != nil { - return nil, err - } - - stats.Transport = *tstats - } - - // When encountering "per-operation statistics", we must break this - // loop and parse them separately to ensure we can terminate parsing - // before reaching another device entry; hence why this 'if' statement - // is not just another switch case - if ss[0] == fieldPerOpStats { - break - } - } - - if err := s.Err(); err != nil { - return nil, err - } - - // NFS per-operation stats appear last before the next device entry - perOpStats, err := parseNFSOperationStats(s) - if err != nil { - return nil, err - } - - stats.Operations = perOpStats - - return stats, nil -} - -// parseNFSBytesStats parses a NFSBytesStats line using an input set of -// integer fields. -func parseNFSBytesStats(ss []string) (*NFSBytesStats, error) { - if len(ss) != fieldBytesLen { - return nil, fmt.Errorf("%w: Invalid NFS bytes stats: %v", ErrFileParse, ss) - } - - ns := make([]uint64, 0, fieldBytesLen) - for _, s := range ss { - n, err := strconv.ParseUint(s, 10, 64) - if err != nil { - return nil, err - } - - ns = append(ns, n) - } - - return &NFSBytesStats{ - Read: ns[0], - Write: ns[1], - DirectRead: ns[2], - DirectWrite: ns[3], - ReadTotal: ns[4], - WriteTotal: ns[5], - ReadPages: ns[6], - WritePages: ns[7], - }, nil -} - -// parseNFSEventsStats parses a NFSEventsStats line using an input set of -// integer fields. -func parseNFSEventsStats(ss []string) (*NFSEventsStats, error) { - if len(ss) != fieldEventsLen { - return nil, fmt.Errorf("%w: invalid NFS events stats: %v", ErrFileParse, ss) - } - - ns := make([]uint64, 0, fieldEventsLen) - for _, s := range ss { - n, err := strconv.ParseUint(s, 10, 64) - if err != nil { - return nil, err - } - - ns = append(ns, n) - } - - return &NFSEventsStats{ - InodeRevalidate: ns[0], - DnodeRevalidate: ns[1], - DataInvalidate: ns[2], - AttributeInvalidate: ns[3], - VFSOpen: ns[4], - VFSLookup: ns[5], - VFSAccess: ns[6], - VFSUpdatePage: ns[7], - VFSReadPage: ns[8], - VFSReadPages: ns[9], - VFSWritePage: ns[10], - VFSWritePages: ns[11], - VFSGetdents: ns[12], - VFSSetattr: ns[13], - VFSFlush: ns[14], - VFSFsync: ns[15], - VFSLock: ns[16], - VFSFileRelease: ns[17], - CongestionWait: ns[18], - Truncation: ns[19], - WriteExtension: ns[20], - SillyRename: ns[21], - ShortRead: ns[22], - ShortWrite: ns[23], - JukeboxDelay: ns[24], - PNFSRead: ns[25], - PNFSWrite: ns[26], - }, nil -} - -// parseNFSOperationStats parses a slice of NFSOperationStats by scanning -// additional information about per-operation statistics until an empty -// line is reached. -func parseNFSOperationStats(s *bufio.Scanner) ([]NFSOperationStats, error) { - const ( - // Minimum number of expected fields in each per-operation statistics set - minFields = 9 - ) - - var ops []NFSOperationStats - - for s.Scan() { - ss := strings.Fields(string(s.Bytes())) - if len(ss) == 0 { - // Must break when reading a blank line after per-operation stats to - // enable top-level function to parse the next device entry - break - } - - if len(ss) < minFields { - return nil, fmt.Errorf("%w: invalid NFS per-operations stats: %v", ErrFileParse, ss) - } - - // Skip string operation name for integers - ns := make([]uint64, 0, minFields-1) - for _, st := range ss[1:] { - n, err := strconv.ParseUint(st, 10, 64) - if err != nil { - return nil, err - } - - ns = append(ns, n) - } - opStats := NFSOperationStats{ - Operation: strings.TrimSuffix(ss[0], ":"), - Requests: ns[0], - Transmissions: ns[1], - MajorTimeouts: ns[2], - BytesSent: ns[3], - BytesReceived: ns[4], - CumulativeQueueMilliseconds: ns[5], - CumulativeTotalResponseMilliseconds: ns[6], - CumulativeTotalRequestMilliseconds: ns[7], - } - if ns[0] != 0 { - opStats.AverageRTTMilliseconds = float64(ns[6]) / float64(ns[0]) - } - - if len(ns) > 8 { - opStats.Errors = ns[8] - } - - ops = append(ops, opStats) - } - - return ops, s.Err() -} - -// parseNFSTransportStats parses a NFSTransportStats line using an input set of -// integer fields matched to a specific stats version. -func parseNFSTransportStats(ss []string, statVersion string) (*NFSTransportStats, error) { - // Extract the protocol field. It is the only string value in the line - protocol := ss[0] - ss = ss[1:] - - switch statVersion { - case statVersion10: - var expectedLength int - if protocol == "tcp" { - expectedLength = fieldTransport10TCPLen - } else if protocol == "udp" { - expectedLength = fieldTransport10UDPLen - } else { - return nil, fmt.Errorf("%w: Invalid NFS protocol \"%s\" in stats 1.0 statement: %v", ErrFileParse, protocol, ss) - } - if len(ss) != expectedLength { - return nil, fmt.Errorf("%w: Invalid NFS transport stats 1.0 statement: %v", ErrFileParse, ss) - } - case statVersion11: - var expectedLength int - if protocol == "tcp" { - expectedLength = fieldTransport11TCPLen - } else if protocol == "udp" { - expectedLength = fieldTransport11UDPLen - } else if protocol == "rdma" { - expectedLength = fieldTransport11RDMAMinLen - } else { - return nil, fmt.Errorf("%w: invalid NFS protocol \"%s\" in stats 1.1 statement: %v", ErrFileParse, protocol, ss) - } - if (len(ss) != expectedLength && (protocol == "tcp" || protocol == "udp")) || - (protocol == "rdma" && len(ss) < expectedLength) { - return nil, fmt.Errorf("%w: invalid NFS transport stats 1.1 statement: %v, protocol: %v", ErrFileParse, ss, protocol) - } - default: - return nil, fmt.Errorf("%s: Unrecognized NFS transport stats version: %q, protocol: %v", ErrFileParse, statVersion, protocol) - } - - // Allocate enough for v1.1 stats since zero value for v1.1 stats will be okay - // in a v1.0 response. Since the stat length is bigger for TCP stats, we use - // the TCP length here. - // - // Note: slice length must be set to length of v1.1 stats to avoid a panic when - // only v1.0 stats are present. - // See: https://github.com/prometheus/node_exporter/issues/571. - // - // Note: NFS Over RDMA slice length is fieldTransport11RDMAMaxLen - ns := make([]uint64, fieldTransport11RDMAMaxLen+3) - for i, s := range ss { - n, err := strconv.ParseUint(s, 10, 64) - if err != nil { - return nil, err - } - - ns[i] = n - } - - // The fields differ depending on the transport protocol (TCP or UDP) - // From https://utcc.utoronto.ca/%7Ecks/space/blog/linux/NFSMountstatsXprt - // - // For the udp RPC transport there is no connection count, connect idle time, - // or idle time (fields #3, #4, and #5); all other fields are the same. So - // we set them to 0 here. - if protocol == "udp" { - ns = append(ns[:2], append(make([]uint64, 3), ns[2:]...)...) - } else if protocol == "tcp" { - ns = append(ns[:fieldTransport11TCPLen], make([]uint64, fieldTransport11RDMAMaxLen-fieldTransport11TCPLen+3)...) - } else if protocol == "rdma" { - ns = append(ns[:fieldTransport10TCPLen], append(make([]uint64, 3), ns[fieldTransport10TCPLen:]...)...) - } - - return &NFSTransportStats{ - // NFS xprt over tcp or udp - Protocol: protocol, - Port: ns[0], - Bind: ns[1], - Connect: ns[2], - ConnectIdleTime: ns[3], - IdleTimeSeconds: ns[4], - Sends: ns[5], - Receives: ns[6], - BadTransactionIDs: ns[7], - CumulativeActiveRequests: ns[8], - CumulativeBacklog: ns[9], - - // NFS xprt over tcp or udp - // And statVersion 1.1 - MaximumRPCSlotsUsed: ns[10], - CumulativeSendingQueue: ns[11], - CumulativePendingQueue: ns[12], - - // NFS xprt over rdma - // And stat Version 1.1 - ReadChunkCount: ns[13], - WriteChunkCount: ns[14], - ReplyChunkCount: ns[15], - TotalRdmaRequest: ns[16], - PullupCopyCount: ns[17], - HardwayRegisterCount: ns[18], - FailedMarshalCount: ns[19], - BadReplyCount: ns[20], - MrsRecovered: ns[21], - MrsOrphaned: ns[22], - MrsAllocated: ns[23], - EmptySendctxQ: ns[24], - TotalRdmaReply: ns[25], - FixupCopyCount: ns[26], - ReplyWaitsForSend: ns[27], - LocalInvNeeded: ns[28], - NomsgCallCount: ns[29], - BcallCount: ns[30], - }, nil -} diff --git a/vendor/github.com/prometheus/procfs/net_conntrackstat.go b/vendor/github.com/prometheus/procfs/net_conntrackstat.go deleted file mode 100644 index fdfa45611..000000000 --- a/vendor/github.com/prometheus/procfs/net_conntrackstat.go +++ /dev/null @@ -1,118 +0,0 @@ -// Copyright 2020 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package procfs - -import ( - "bufio" - "bytes" - "fmt" - "io" - "strings" - - "github.com/prometheus/procfs/internal/util" -) - -// A ConntrackStatEntry represents one line from net/stat/nf_conntrack -// and contains netfilter conntrack statistics at one CPU core. -type ConntrackStatEntry struct { - Entries uint64 - Searched uint64 - Found uint64 - New uint64 - Invalid uint64 - Ignore uint64 - Delete uint64 - DeleteList uint64 - Insert uint64 - InsertFailed uint64 - Drop uint64 - EarlyDrop uint64 - SearchRestart uint64 -} - -// ConntrackStat retrieves netfilter's conntrack statistics, split by CPU cores. -func (fs FS) ConntrackStat() ([]ConntrackStatEntry, error) { - return readConntrackStat(fs.proc.Path("net", "stat", "nf_conntrack")) -} - -// Parses a slice of ConntrackStatEntries from the given filepath. -func readConntrackStat(path string) ([]ConntrackStatEntry, error) { - // This file is small and can be read with one syscall. - b, err := util.ReadFileNoStat(path) - if err != nil { - // Do not wrap this error so the caller can detect os.IsNotExist and - // similar conditions. - return nil, err - } - - stat, err := parseConntrackStat(bytes.NewReader(b)) - if err != nil { - return nil, fmt.Errorf("%s: Cannot read file: %v: %w", ErrFileRead, path, err) - } - - return stat, nil -} - -// Reads the contents of a conntrack statistics file and parses a slice of ConntrackStatEntries. -func parseConntrackStat(r io.Reader) ([]ConntrackStatEntry, error) { - var entries []ConntrackStatEntry - - scanner := bufio.NewScanner(r) - scanner.Scan() - for scanner.Scan() { - fields := strings.Fields(scanner.Text()) - conntrackEntry, err := parseConntrackStatEntry(fields) - if err != nil { - return nil, err - } - entries = append(entries, *conntrackEntry) - } - - return entries, nil -} - -// Parses a ConntrackStatEntry from given array of fields. -func parseConntrackStatEntry(fields []string) (*ConntrackStatEntry, error) { - entries, err := util.ParseHexUint64s(fields) - if err != nil { - return nil, fmt.Errorf("%s: Cannot parse entry: %d: %w", ErrFileParse, entries, err) - } - numEntries := len(entries) - if numEntries < 16 || numEntries > 17 { - return nil, - fmt.Errorf("%w: invalid conntrackstat entry, invalid number of fields: %d", ErrFileParse, numEntries) - } - - stats := &ConntrackStatEntry{ - Entries: *entries[0], - Searched: *entries[1], - Found: *entries[2], - New: *entries[3], - Invalid: *entries[4], - Ignore: *entries[5], - Delete: *entries[6], - DeleteList: *entries[7], - Insert: *entries[8], - InsertFailed: *entries[9], - Drop: *entries[10], - EarlyDrop: *entries[11], - } - - // Ignore missing search_restart on Linux < 2.6.35. - if numEntries == 17 { - stats.SearchRestart = *entries[16] - } - - return stats, nil -} diff --git a/vendor/github.com/prometheus/procfs/net_dev.go b/vendor/github.com/prometheus/procfs/net_dev.go deleted file mode 100644 index e66208aa0..000000000 --- a/vendor/github.com/prometheus/procfs/net_dev.go +++ /dev/null @@ -1,205 +0,0 @@ -// Copyright 2018 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package procfs - -import ( - "bufio" - "errors" - "os" - "sort" - "strconv" - "strings" -) - -// NetDevLine is single line parsed from /proc/net/dev or /proc/[pid]/net/dev. -type NetDevLine struct { - Name string `json:"name"` // The name of the interface. - RxBytes uint64 `json:"rx_bytes"` // Cumulative count of bytes received. - RxPackets uint64 `json:"rx_packets"` // Cumulative count of packets received. - RxErrors uint64 `json:"rx_errors"` // Cumulative count of receive errors encountered. - RxDropped uint64 `json:"rx_dropped"` // Cumulative count of packets dropped while receiving. - RxFIFO uint64 `json:"rx_fifo"` // Cumulative count of FIFO buffer errors. - RxFrame uint64 `json:"rx_frame"` // Cumulative count of packet framing errors. - RxCompressed uint64 `json:"rx_compressed"` // Cumulative count of compressed packets received by the device driver. - RxMulticast uint64 `json:"rx_multicast"` // Cumulative count of multicast frames received by the device driver. - TxBytes uint64 `json:"tx_bytes"` // Cumulative count of bytes transmitted. - TxPackets uint64 `json:"tx_packets"` // Cumulative count of packets transmitted. - TxErrors uint64 `json:"tx_errors"` // Cumulative count of transmit errors encountered. - TxDropped uint64 `json:"tx_dropped"` // Cumulative count of packets dropped while transmitting. - TxFIFO uint64 `json:"tx_fifo"` // Cumulative count of FIFO buffer errors. - TxCollisions uint64 `json:"tx_collisions"` // Cumulative count of collisions detected on the interface. - TxCarrier uint64 `json:"tx_carrier"` // Cumulative count of carrier losses detected by the device driver. - TxCompressed uint64 `json:"tx_compressed"` // Cumulative count of compressed packets transmitted by the device driver. -} - -// NetDev is parsed from /proc/net/dev or /proc/[pid]/net/dev. The map keys -// are interface names. -type NetDev map[string]NetDevLine - -// NetDev returns kernel/system statistics read from /proc/net/dev. -func (fs FS) NetDev() (NetDev, error) { - return newNetDev(fs.proc.Path("net/dev")) -} - -// NetDev returns kernel/system statistics read from /proc/[pid]/net/dev. -func (p Proc) NetDev() (NetDev, error) { - return newNetDev(p.path("net/dev")) -} - -// newNetDev creates a new NetDev from the contents of the given file. -func newNetDev(file string) (NetDev, error) { - f, err := os.Open(file) - if err != nil { - return NetDev{}, err - } - defer f.Close() - - netDev := NetDev{} - s := bufio.NewScanner(f) - for n := 0; s.Scan(); n++ { - // Skip the 2 header lines. - if n < 2 { - continue - } - - line, err := netDev.parseLine(s.Text()) - if err != nil { - return netDev, err - } - - netDev[line.Name] = *line - } - - return netDev, s.Err() -} - -// parseLine parses a single line from the /proc/net/dev file. Header lines -// must be filtered prior to calling this method. -func (netDev NetDev) parseLine(rawLine string) (*NetDevLine, error) { - idx := strings.LastIndex(rawLine, ":") - if idx == -1 { - return nil, errors.New("invalid net/dev line, missing colon") - } - fields := strings.Fields(strings.TrimSpace(rawLine[idx+1:])) - - var err error - line := &NetDevLine{} - - // Interface Name - line.Name = strings.TrimSpace(rawLine[:idx]) - if line.Name == "" { - return nil, errors.New("invalid net/dev line, empty interface name") - } - - // RX - line.RxBytes, err = strconv.ParseUint(fields[0], 10, 64) - if err != nil { - return nil, err - } - line.RxPackets, err = strconv.ParseUint(fields[1], 10, 64) - if err != nil { - return nil, err - } - line.RxErrors, err = strconv.ParseUint(fields[2], 10, 64) - if err != nil { - return nil, err - } - line.RxDropped, err = strconv.ParseUint(fields[3], 10, 64) - if err != nil { - return nil, err - } - line.RxFIFO, err = strconv.ParseUint(fields[4], 10, 64) - if err != nil { - return nil, err - } - line.RxFrame, err = strconv.ParseUint(fields[5], 10, 64) - if err != nil { - return nil, err - } - line.RxCompressed, err = strconv.ParseUint(fields[6], 10, 64) - if err != nil { - return nil, err - } - line.RxMulticast, err = strconv.ParseUint(fields[7], 10, 64) - if err != nil { - return nil, err - } - - // TX - line.TxBytes, err = strconv.ParseUint(fields[8], 10, 64) - if err != nil { - return nil, err - } - line.TxPackets, err = strconv.ParseUint(fields[9], 10, 64) - if err != nil { - return nil, err - } - line.TxErrors, err = strconv.ParseUint(fields[10], 10, 64) - if err != nil { - return nil, err - } - line.TxDropped, err = strconv.ParseUint(fields[11], 10, 64) - if err != nil { - return nil, err - } - line.TxFIFO, err = strconv.ParseUint(fields[12], 10, 64) - if err != nil { - return nil, err - } - line.TxCollisions, err = strconv.ParseUint(fields[13], 10, 64) - if err != nil { - return nil, err - } - line.TxCarrier, err = strconv.ParseUint(fields[14], 10, 64) - if err != nil { - return nil, err - } - line.TxCompressed, err = strconv.ParseUint(fields[15], 10, 64) - if err != nil { - return nil, err - } - - return line, nil -} - -// Total aggregates the values across interfaces and returns a new NetDevLine. -// The Name field will be a sorted comma separated list of interface names. -func (netDev NetDev) Total() NetDevLine { - total := NetDevLine{} - - names := make([]string, 0, len(netDev)) - for _, ifc := range netDev { - names = append(names, ifc.Name) - total.RxBytes += ifc.RxBytes - total.RxPackets += ifc.RxPackets - total.RxErrors += ifc.RxErrors - total.RxDropped += ifc.RxDropped - total.RxFIFO += ifc.RxFIFO - total.RxFrame += ifc.RxFrame - total.RxCompressed += ifc.RxCompressed - total.RxMulticast += ifc.RxMulticast - total.TxBytes += ifc.TxBytes - total.TxPackets += ifc.TxPackets - total.TxErrors += ifc.TxErrors - total.TxDropped += ifc.TxDropped - total.TxFIFO += ifc.TxFIFO - total.TxCollisions += ifc.TxCollisions - total.TxCarrier += ifc.TxCarrier - total.TxCompressed += ifc.TxCompressed - } - sort.Strings(names) - total.Name = strings.Join(names, ", ") - - return total -} diff --git a/vendor/github.com/prometheus/procfs/net_ip_socket.go b/vendor/github.com/prometheus/procfs/net_ip_socket.go deleted file mode 100644 index ba7d9caa0..000000000 --- a/vendor/github.com/prometheus/procfs/net_ip_socket.go +++ /dev/null @@ -1,248 +0,0 @@ -// Copyright 2020 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package procfs - -import ( - "bufio" - "encoding/hex" - "fmt" - "io" - "net" - "os" - "strconv" - "strings" -) - -const ( - // readLimit is used by io.LimitReader while reading the content of the - // /proc/net/udp{,6} files. The number of lines inside such a file is dynamic - // as each line represents a single used socket. - // In theory, the number of available sockets is 65535 (2^16 - 1) per IP. - // With e.g. 150 Byte per line and the maximum number of 65535, - // the reader needs to handle 150 Byte * 65535 =~ 10 MB for a single IP. - readLimit = 4294967296 // Byte -> 4 GiB -) - -// This contains generic data structures for both udp and tcp sockets. -type ( - // NetIPSocket represents the contents of /proc/net/{t,u}dp{,6} file without the header. - NetIPSocket []*netIPSocketLine - - // NetIPSocketSummary provides already computed values like the total queue lengths or - // the total number of used sockets. In contrast to NetIPSocket it does not collect - // the parsed lines into a slice. - NetIPSocketSummary struct { - // TxQueueLength shows the total queue length of all parsed tx_queue lengths. - TxQueueLength uint64 - // RxQueueLength shows the total queue length of all parsed rx_queue lengths. - RxQueueLength uint64 - // UsedSockets shows the total number of parsed lines representing the - // number of used sockets. - UsedSockets uint64 - // Drops shows the total number of dropped packets of all UPD sockets. - Drops *uint64 - } - - // netIPSocketLine represents the fields parsed from a single line - // in /proc/net/{t,u}dp{,6}. Fields which are not used by IPSocket are skipped. - // Drops is non-nil for udp{,6}, but nil for tcp{,6}. - // For the proc file format details, see https://linux.die.net/man/5/proc. - netIPSocketLine struct { - Sl uint64 - LocalAddr net.IP - LocalPort uint64 - RemAddr net.IP - RemPort uint64 - St uint64 - TxQueue uint64 - RxQueue uint64 - UID uint64 - Inode uint64 - Drops *uint64 - } -) - -func newNetIPSocket(file string) (NetIPSocket, error) { - f, err := os.Open(file) - if err != nil { - return nil, err - } - defer f.Close() - - var netIPSocket NetIPSocket - isUDP := strings.Contains(file, "udp") - - lr := io.LimitReader(f, readLimit) - s := bufio.NewScanner(lr) - s.Scan() // skip first line with headers - for s.Scan() { - fields := strings.Fields(s.Text()) - line, err := parseNetIPSocketLine(fields, isUDP) - if err != nil { - return nil, err - } - netIPSocket = append(netIPSocket, line) - } - if err := s.Err(); err != nil { - return nil, err - } - return netIPSocket, nil -} - -// newNetIPSocketSummary creates a new NetIPSocket{,6} from the contents of the given file. -func newNetIPSocketSummary(file string) (*NetIPSocketSummary, error) { - f, err := os.Open(file) - if err != nil { - return nil, err - } - defer f.Close() - - var netIPSocketSummary NetIPSocketSummary - var udpPacketDrops uint64 - isUDP := strings.Contains(file, "udp") - - lr := io.LimitReader(f, readLimit) - s := bufio.NewScanner(lr) - s.Scan() // skip first line with headers - for s.Scan() { - fields := strings.Fields(s.Text()) - line, err := parseNetIPSocketLine(fields, isUDP) - if err != nil { - return nil, err - } - netIPSocketSummary.TxQueueLength += line.TxQueue - netIPSocketSummary.RxQueueLength += line.RxQueue - netIPSocketSummary.UsedSockets++ - if isUDP { - udpPacketDrops += *line.Drops - netIPSocketSummary.Drops = &udpPacketDrops - } - } - if err := s.Err(); err != nil { - return nil, err - } - return &netIPSocketSummary, nil -} - -// the /proc/net/{t,u}dp{,6} files are network byte order for ipv4 and for ipv6 the address is four words consisting of four bytes each. In each of those four words the four bytes are written in reverse order. - -func parseIP(hexIP string) (net.IP, error) { - var byteIP []byte - byteIP, err := hex.DecodeString(hexIP) - if err != nil { - return nil, fmt.Errorf("%s: Cannot parse socket field in %q: %w", ErrFileParse, hexIP, err) - } - switch len(byteIP) { - case 4: - return net.IP{byteIP[3], byteIP[2], byteIP[1], byteIP[0]}, nil - case 16: - i := net.IP{ - byteIP[3], byteIP[2], byteIP[1], byteIP[0], - byteIP[7], byteIP[6], byteIP[5], byteIP[4], - byteIP[11], byteIP[10], byteIP[9], byteIP[8], - byteIP[15], byteIP[14], byteIP[13], byteIP[12], - } - return i, nil - default: - return nil, fmt.Errorf("%s: Unable to parse IP %s: %w", ErrFileParse, hexIP, nil) - } -} - -// parseNetIPSocketLine parses a single line, represented by a list of fields. -func parseNetIPSocketLine(fields []string, isUDP bool) (*netIPSocketLine, error) { - line := &netIPSocketLine{} - if len(fields) < 10 { - return nil, fmt.Errorf( - "%w: Less than 10 columns found %q", - ErrFileParse, - strings.Join(fields, " "), - ) - } - var err error // parse error - - // sl - s := strings.Split(fields[0], ":") - if len(s) != 2 { - return nil, fmt.Errorf("%w: Unable to parse sl field in line %q", ErrFileParse, fields[0]) - } - - if line.Sl, err = strconv.ParseUint(s[0], 0, 64); err != nil { - return nil, fmt.Errorf("%s: Unable to parse sl field in %q: %w", ErrFileParse, line.Sl, err) - } - // local_address - l := strings.Split(fields[1], ":") - if len(l) != 2 { - return nil, fmt.Errorf("%w: Unable to parse local_address field in %q", ErrFileParse, fields[1]) - } - if line.LocalAddr, err = parseIP(l[0]); err != nil { - return nil, err - } - if line.LocalPort, err = strconv.ParseUint(l[1], 16, 64); err != nil { - return nil, fmt.Errorf("%s: Unable to parse local_address port value line %q: %w", ErrFileParse, line.LocalPort, err) - } - - // remote_address - r := strings.Split(fields[2], ":") - if len(r) != 2 { - return nil, fmt.Errorf("%w: Unable to parse rem_address field in %q", ErrFileParse, fields[1]) - } - if line.RemAddr, err = parseIP(r[0]); err != nil { - return nil, err - } - if line.RemPort, err = strconv.ParseUint(r[1], 16, 64); err != nil { - return nil, fmt.Errorf("%s: Cannot parse rem_address port value in %q: %w", ErrFileParse, line.RemPort, err) - } - - // st - if line.St, err = strconv.ParseUint(fields[3], 16, 64); err != nil { - return nil, fmt.Errorf("%s: Cannot parse st value in %q: %w", ErrFileParse, line.St, err) - } - - // tx_queue and rx_queue - q := strings.Split(fields[4], ":") - if len(q) != 2 { - return nil, fmt.Errorf( - "%w: Missing colon for tx/rx queues in socket line %q", - ErrFileParse, - fields[4], - ) - } - if line.TxQueue, err = strconv.ParseUint(q[0], 16, 64); err != nil { - return nil, fmt.Errorf("%s: Cannot parse tx_queue value in %q: %w", ErrFileParse, line.TxQueue, err) - } - if line.RxQueue, err = strconv.ParseUint(q[1], 16, 64); err != nil { - return nil, fmt.Errorf("%s: Cannot parse trx_queue value in %q: %w", ErrFileParse, line.RxQueue, err) - } - - // uid - if line.UID, err = strconv.ParseUint(fields[7], 0, 64); err != nil { - return nil, fmt.Errorf("%s: Cannot parse UID value in %q: %w", ErrFileParse, line.UID, err) - } - - // inode - if line.Inode, err = strconv.ParseUint(fields[9], 0, 64); err != nil { - return nil, fmt.Errorf("%s: Cannot parse inode value in %q: %w", ErrFileParse, line.Inode, err) - } - - // drops - if isUDP { - drops, err := strconv.ParseUint(fields[12], 0, 64) - if err != nil { - return nil, fmt.Errorf("%s: Cannot parse drops value in %q: %w", ErrFileParse, drops, err) - } - line.Drops = &drops - } - - return line, nil -} diff --git a/vendor/github.com/prometheus/procfs/net_protocols.go b/vendor/github.com/prometheus/procfs/net_protocols.go deleted file mode 100644 index b6c77b709..000000000 --- a/vendor/github.com/prometheus/procfs/net_protocols.go +++ /dev/null @@ -1,180 +0,0 @@ -// Copyright 2020 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package procfs - -import ( - "bufio" - "bytes" - "fmt" - "strconv" - "strings" - - "github.com/prometheus/procfs/internal/util" -) - -// NetProtocolStats stores the contents from /proc/net/protocols. -type NetProtocolStats map[string]NetProtocolStatLine - -// NetProtocolStatLine contains a single line parsed from /proc/net/protocols. We -// only care about the first six columns as the rest are not likely to change -// and only serve to provide a set of capabilities for each protocol. -type NetProtocolStatLine struct { - Name string // 0 The name of the protocol - Size uint64 // 1 The size, in bytes, of a given protocol structure. e.g. sizeof(struct tcp_sock) or sizeof(struct unix_sock) - Sockets int64 // 2 Number of sockets in use by this protocol - Memory int64 // 3 Number of 4KB pages allocated by all sockets of this protocol - Pressure int // 4 This is either yes, no, or NI (not implemented). For the sake of simplicity we treat NI as not experiencing memory pressure. - MaxHeader uint64 // 5 Protocol specific max header size - Slab bool // 6 Indicates whether or not memory is allocated from the SLAB - ModuleName string // 7 The name of the module that implemented this protocol or "kernel" if not from a module - Capabilities NetProtocolCapabilities -} - -// NetProtocolCapabilities contains a list of capabilities for each protocol. -type NetProtocolCapabilities struct { - Close bool // 8 - Connect bool // 9 - Disconnect bool // 10 - Accept bool // 11 - IoCtl bool // 12 - Init bool // 13 - Destroy bool // 14 - Shutdown bool // 15 - SetSockOpt bool // 16 - GetSockOpt bool // 17 - SendMsg bool // 18 - RecvMsg bool // 19 - SendPage bool // 20 - Bind bool // 21 - BacklogRcv bool // 22 - Hash bool // 23 - UnHash bool // 24 - GetPort bool // 25 - EnterMemoryPressure bool // 26 -} - -// NetProtocols reads stats from /proc/net/protocols and returns a map of -// PortocolStatLine entries. As of this writing no official Linux Documentation -// exists, however the source is fairly self-explanatory and the format seems -// stable since its introduction in 2.6.12-rc2 -// Linux 2.6.12-rc2 - https://elixir.bootlin.com/linux/v2.6.12-rc2/source/net/core/sock.c#L1452 -// Linux 5.10 - https://elixir.bootlin.com/linux/v5.10.4/source/net/core/sock.c#L3586 -func (fs FS) NetProtocols() (NetProtocolStats, error) { - data, err := util.ReadFileNoStat(fs.proc.Path("net/protocols")) - if err != nil { - return NetProtocolStats{}, err - } - return parseNetProtocols(bufio.NewScanner(bytes.NewReader(data))) -} - -func parseNetProtocols(s *bufio.Scanner) (NetProtocolStats, error) { - nps := NetProtocolStats{} - - // Skip the header line - s.Scan() - - for s.Scan() { - line, err := nps.parseLine(s.Text()) - if err != nil { - return NetProtocolStats{}, err - } - - nps[line.Name] = *line - } - return nps, nil -} - -func (ps NetProtocolStats) parseLine(rawLine string) (*NetProtocolStatLine, error) { - line := &NetProtocolStatLine{Capabilities: NetProtocolCapabilities{}} - var err error - const enabled = "yes" - const disabled = "no" - - fields := strings.Fields(rawLine) - line.Name = fields[0] - line.Size, err = strconv.ParseUint(fields[1], 10, 64) - if err != nil { - return nil, err - } - line.Sockets, err = strconv.ParseInt(fields[2], 10, 64) - if err != nil { - return nil, err - } - line.Memory, err = strconv.ParseInt(fields[3], 10, 64) - if err != nil { - return nil, err - } - if fields[4] == enabled { - line.Pressure = 1 - } else if fields[4] == disabled { - line.Pressure = 0 - } else { - line.Pressure = -1 - } - line.MaxHeader, err = strconv.ParseUint(fields[5], 10, 64) - if err != nil { - return nil, err - } - if fields[6] == enabled { - line.Slab = true - } else if fields[6] == disabled { - line.Slab = false - } else { - return nil, fmt.Errorf("%w: capability for protocol: %s", ErrFileParse, line.Name) - } - line.ModuleName = fields[7] - - err = line.Capabilities.parseCapabilities(fields[8:]) - if err != nil { - return nil, err - } - - return line, nil -} - -func (pc *NetProtocolCapabilities) parseCapabilities(capabilities []string) error { - // The capabilities are all bools so we can loop over to map them - capabilityFields := [...]*bool{ - &pc.Close, - &pc.Connect, - &pc.Disconnect, - &pc.Accept, - &pc.IoCtl, - &pc.Init, - &pc.Destroy, - &pc.Shutdown, - &pc.SetSockOpt, - &pc.GetSockOpt, - &pc.SendMsg, - &pc.RecvMsg, - &pc.SendPage, - &pc.Bind, - &pc.BacklogRcv, - &pc.Hash, - &pc.UnHash, - &pc.GetPort, - &pc.EnterMemoryPressure, - } - - for i := 0; i < len(capabilities); i++ { - if capabilities[i] == "y" { - *capabilityFields[i] = true - } else if capabilities[i] == "n" { - *capabilityFields[i] = false - } else { - return fmt.Errorf("%w: capability block for protocol: position %d", ErrFileParse, i) - } - } - return nil -} diff --git a/vendor/github.com/prometheus/procfs/net_route.go b/vendor/github.com/prometheus/procfs/net_route.go deleted file mode 100644 index deb7029fe..000000000 --- a/vendor/github.com/prometheus/procfs/net_route.go +++ /dev/null @@ -1,143 +0,0 @@ -// Copyright 2023 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package procfs - -import ( - "bufio" - "bytes" - "fmt" - "io" - "strconv" - "strings" - - "github.com/prometheus/procfs/internal/util" -) - -const ( - blackholeRepresentation string = "*" - blackholeIfaceName string = "blackhole" - routeLineColumns int = 11 -) - -// A NetRouteLine represents one line from net/route. -type NetRouteLine struct { - Iface string - Destination uint32 - Gateway uint32 - Flags uint32 - RefCnt uint32 - Use uint32 - Metric uint32 - Mask uint32 - MTU uint32 - Window uint32 - IRTT uint32 -} - -func (fs FS) NetRoute() ([]NetRouteLine, error) { - return readNetRoute(fs.proc.Path("net", "route")) -} - -func readNetRoute(path string) ([]NetRouteLine, error) { - b, err := util.ReadFileNoStat(path) - if err != nil { - return nil, err - } - - routelines, err := parseNetRoute(bytes.NewReader(b)) - if err != nil { - return nil, fmt.Errorf("failed to read net route from %s: %w", path, err) - } - return routelines, nil -} - -func parseNetRoute(r io.Reader) ([]NetRouteLine, error) { - var routelines []NetRouteLine - - scanner := bufio.NewScanner(r) - scanner.Scan() - for scanner.Scan() { - fields := strings.Fields(scanner.Text()) - routeline, err := parseNetRouteLine(fields) - if err != nil { - return nil, err - } - routelines = append(routelines, *routeline) - } - return routelines, nil -} - -func parseNetRouteLine(fields []string) (*NetRouteLine, error) { - if len(fields) != routeLineColumns { - return nil, fmt.Errorf("invalid routeline, num of digits: %d", len(fields)) - } - iface := fields[0] - if iface == blackholeRepresentation { - iface = blackholeIfaceName - } - destination, err := strconv.ParseUint(fields[1], 16, 32) - if err != nil { - return nil, err - } - gateway, err := strconv.ParseUint(fields[2], 16, 32) - if err != nil { - return nil, err - } - flags, err := strconv.ParseUint(fields[3], 10, 32) - if err != nil { - return nil, err - } - refcnt, err := strconv.ParseUint(fields[4], 10, 32) - if err != nil { - return nil, err - } - use, err := strconv.ParseUint(fields[5], 10, 32) - if err != nil { - return nil, err - } - metric, err := strconv.ParseUint(fields[6], 10, 32) - if err != nil { - return nil, err - } - mask, err := strconv.ParseUint(fields[7], 16, 32) - if err != nil { - return nil, err - } - mtu, err := strconv.ParseUint(fields[8], 10, 32) - if err != nil { - return nil, err - } - window, err := strconv.ParseUint(fields[9], 10, 32) - if err != nil { - return nil, err - } - irtt, err := strconv.ParseUint(fields[10], 10, 32) - if err != nil { - return nil, err - } - routeline := &NetRouteLine{ - Iface: iface, - Destination: uint32(destination), - Gateway: uint32(gateway), - Flags: uint32(flags), - RefCnt: uint32(refcnt), - Use: uint32(use), - Metric: uint32(metric), - Mask: uint32(mask), - MTU: uint32(mtu), - Window: uint32(window), - IRTT: uint32(irtt), - } - return routeline, nil -} diff --git a/vendor/github.com/prometheus/procfs/net_sockstat.go b/vendor/github.com/prometheus/procfs/net_sockstat.go deleted file mode 100644 index 360e36af7..000000000 --- a/vendor/github.com/prometheus/procfs/net_sockstat.go +++ /dev/null @@ -1,162 +0,0 @@ -// Copyright 2019 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package procfs - -import ( - "bufio" - "bytes" - "fmt" - "io" - "strings" - - "github.com/prometheus/procfs/internal/util" -) - -// A NetSockstat contains the output of /proc/net/sockstat{,6} for IPv4 or IPv6, -// respectively. -type NetSockstat struct { - // Used is non-nil for IPv4 sockstat results, but nil for IPv6. - Used *int - Protocols []NetSockstatProtocol -} - -// A NetSockstatProtocol contains statistics about a given socket protocol. -// Pointer fields indicate that the value may or may not be present on any -// given protocol. -type NetSockstatProtocol struct { - Protocol string - InUse int - Orphan *int - TW *int - Alloc *int - Mem *int - Memory *int -} - -// NetSockstat retrieves IPv4 socket statistics. -func (fs FS) NetSockstat() (*NetSockstat, error) { - return readSockstat(fs.proc.Path("net", "sockstat")) -} - -// NetSockstat6 retrieves IPv6 socket statistics. -// -// If IPv6 is disabled on this kernel, the returned error can be checked with -// os.IsNotExist. -func (fs FS) NetSockstat6() (*NetSockstat, error) { - return readSockstat(fs.proc.Path("net", "sockstat6")) -} - -// readSockstat opens and parses a NetSockstat from the input file. -func readSockstat(name string) (*NetSockstat, error) { - // This file is small and can be read with one syscall. - b, err := util.ReadFileNoStat(name) - if err != nil { - // Do not wrap this error so the caller can detect os.IsNotExist and - // similar conditions. - return nil, err - } - - stat, err := parseSockstat(bytes.NewReader(b)) - if err != nil { - return nil, fmt.Errorf("%s: sockstats from %q: %w", ErrFileRead, name, err) - } - - return stat, nil -} - -// parseSockstat reads the contents of a sockstat file and parses a NetSockstat. -func parseSockstat(r io.Reader) (*NetSockstat, error) { - var stat NetSockstat - s := bufio.NewScanner(r) - for s.Scan() { - // Expect a minimum of a protocol and one key/value pair. - fields := strings.Split(s.Text(), " ") - if len(fields) < 3 { - return nil, fmt.Errorf("%w: Malformed sockstat line: %q", ErrFileParse, s.Text()) - } - - // The remaining fields are key/value pairs. - kvs, err := parseSockstatKVs(fields[1:]) - if err != nil { - return nil, fmt.Errorf("%s: sockstat key/value pairs from %q: %w", ErrFileParse, s.Text(), err) - } - - // The first field is the protocol. We must trim its colon suffix. - proto := strings.TrimSuffix(fields[0], ":") - switch proto { - case "sockets": - // Special case: IPv4 has a sockets "used" key/value pair that we - // embed at the top level of the structure. - used := kvs["used"] - stat.Used = &used - default: - // Parse all other lines as individual protocols. - nsp := parseSockstatProtocol(kvs) - nsp.Protocol = proto - stat.Protocols = append(stat.Protocols, nsp) - } - } - - if err := s.Err(); err != nil { - return nil, err - } - - return &stat, nil -} - -// parseSockstatKVs parses a string slice into a map of key/value pairs. -func parseSockstatKVs(kvs []string) (map[string]int, error) { - if len(kvs)%2 != 0 { - return nil, fmt.Errorf("%w:: Odd number of fields in key/value pairs %q", ErrFileParse, kvs) - } - - // Iterate two values at a time to gather key/value pairs. - out := make(map[string]int, len(kvs)/2) - for i := 0; i < len(kvs); i += 2 { - vp := util.NewValueParser(kvs[i+1]) - out[kvs[i]] = vp.Int() - - if err := vp.Err(); err != nil { - return nil, err - } - } - - return out, nil -} - -// parseSockstatProtocol parses a NetSockstatProtocol from the input kvs map. -func parseSockstatProtocol(kvs map[string]int) NetSockstatProtocol { - var nsp NetSockstatProtocol - for k, v := range kvs { - // Capture the range variable to ensure we get unique pointers for - // each of the optional fields. - v := v - switch k { - case "inuse": - nsp.InUse = v - case "orphan": - nsp.Orphan = &v - case "tw": - nsp.TW = &v - case "alloc": - nsp.Alloc = &v - case "mem": - nsp.Mem = &v - case "memory": - nsp.Memory = &v - } - } - - return nsp -} diff --git a/vendor/github.com/prometheus/procfs/net_softnet.go b/vendor/github.com/prometheus/procfs/net_softnet.go deleted file mode 100644 index c77085291..000000000 --- a/vendor/github.com/prometheus/procfs/net_softnet.go +++ /dev/null @@ -1,155 +0,0 @@ -// Copyright 2019 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package procfs - -import ( - "bufio" - "bytes" - "fmt" - "io" - "strconv" - "strings" - - "github.com/prometheus/procfs/internal/util" -) - -// For the proc file format details, -// See: -// * Linux 2.6.23 https://elixir.bootlin.com/linux/v2.6.23/source/net/core/dev.c#L2343 -// * Linux 2.6.39 https://elixir.bootlin.com/linux/v2.6.39/source/net/core/dev.c#L4086 -// * Linux 4.18 https://elixir.bootlin.com/linux/v4.18/source/net/core/net-procfs.c#L162 -// * Linux 5.14 https://elixir.bootlin.com/linux/v5.14/source/net/core/net-procfs.c#L169 - -// SoftnetStat contains a single row of data from /proc/net/softnet_stat. -type SoftnetStat struct { - // Number of processed packets. - Processed uint32 - // Number of dropped packets. - Dropped uint32 - // Number of times processing packets ran out of quota. - TimeSqueezed uint32 - // Number of collision occur while obtaining device lock while transmitting. - CPUCollision uint32 - // Number of times cpu woken up received_rps. - ReceivedRps uint32 - // number of times flow limit has been reached. - FlowLimitCount uint32 - // Softnet backlog status. - SoftnetBacklogLen uint32 - // CPU id owning this softnet_data. - Index uint32 - // softnet_data's Width. - Width int -} - -var softNetProcFile = "net/softnet_stat" - -// NetSoftnetStat reads data from /proc/net/softnet_stat. -func (fs FS) NetSoftnetStat() ([]SoftnetStat, error) { - b, err := util.ReadFileNoStat(fs.proc.Path(softNetProcFile)) - if err != nil { - return nil, err - } - - entries, err := parseSoftnet(bytes.NewReader(b)) - if err != nil { - return nil, fmt.Errorf("%s: /proc/net/softnet_stat: %w", ErrFileParse, err) - } - - return entries, nil -} - -func parseSoftnet(r io.Reader) ([]SoftnetStat, error) { - const minColumns = 9 - - s := bufio.NewScanner(r) - - var stats []SoftnetStat - cpuIndex := 0 - for s.Scan() { - columns := strings.Fields(s.Text()) - width := len(columns) - softnetStat := SoftnetStat{} - - if width < minColumns { - return nil, fmt.Errorf("%w: detected %d columns, but expected at least %d", ErrFileParse, width, minColumns) - } - - // Linux 2.6.23 https://elixir.bootlin.com/linux/v2.6.23/source/net/core/dev.c#L2347 - if width >= minColumns { - us, err := parseHexUint32s(columns[0:9]) - if err != nil { - return nil, err - } - - softnetStat.Processed = us[0] - softnetStat.Dropped = us[1] - softnetStat.TimeSqueezed = us[2] - softnetStat.CPUCollision = us[8] - } - - // Linux 2.6.39 https://elixir.bootlin.com/linux/v2.6.39/source/net/core/dev.c#L4086 - if width >= 10 { - us, err := parseHexUint32s(columns[9:10]) - if err != nil { - return nil, err - } - - softnetStat.ReceivedRps = us[0] - } - - // Linux 4.18 https://elixir.bootlin.com/linux/v4.18/source/net/core/net-procfs.c#L162 - if width >= 11 { - us, err := parseHexUint32s(columns[10:11]) - if err != nil { - return nil, err - } - - softnetStat.FlowLimitCount = us[0] - } - - // Linux 5.14 https://elixir.bootlin.com/linux/v5.14/source/net/core/net-procfs.c#L169 - if width >= 13 { - us, err := parseHexUint32s(columns[11:13]) - if err != nil { - return nil, err - } - - softnetStat.SoftnetBacklogLen = us[0] - softnetStat.Index = us[1] - } else { - // For older kernels, create the Index based on the scan line number. - softnetStat.Index = uint32(cpuIndex) - } - softnetStat.Width = width - stats = append(stats, softnetStat) - cpuIndex++ - } - - return stats, nil -} - -func parseHexUint32s(ss []string) ([]uint32, error) { - us := make([]uint32, 0, len(ss)) - for _, s := range ss { - u, err := strconv.ParseUint(s, 16, 32) - if err != nil { - return nil, err - } - - us = append(us, uint32(u)) - } - - return us, nil -} diff --git a/vendor/github.com/prometheus/procfs/net_tcp.go b/vendor/github.com/prometheus/procfs/net_tcp.go deleted file mode 100644 index 527762955..000000000 --- a/vendor/github.com/prometheus/procfs/net_tcp.go +++ /dev/null @@ -1,64 +0,0 @@ -// Copyright 2020 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package procfs - -type ( - // NetTCP represents the contents of /proc/net/tcp{,6} file without the header. - NetTCP []*netIPSocketLine - - // NetTCPSummary provides already computed values like the total queue lengths or - // the total number of used sockets. In contrast to NetTCP it does not collect - // the parsed lines into a slice. - NetTCPSummary NetIPSocketSummary -) - -// NetTCP returns the IPv4 kernel/networking statistics for TCP datagrams -// read from /proc/net/tcp. -func (fs FS) NetTCP() (NetTCP, error) { - return newNetTCP(fs.proc.Path("net/tcp")) -} - -// NetTCP6 returns the IPv6 kernel/networking statistics for TCP datagrams -// read from /proc/net/tcp6. -func (fs FS) NetTCP6() (NetTCP, error) { - return newNetTCP(fs.proc.Path("net/tcp6")) -} - -// NetTCPSummary returns already computed statistics like the total queue lengths -// for TCP datagrams read from /proc/net/tcp. -func (fs FS) NetTCPSummary() (*NetTCPSummary, error) { - return newNetTCPSummary(fs.proc.Path("net/tcp")) -} - -// NetTCP6Summary returns already computed statistics like the total queue lengths -// for TCP datagrams read from /proc/net/tcp6. -func (fs FS) NetTCP6Summary() (*NetTCPSummary, error) { - return newNetTCPSummary(fs.proc.Path("net/tcp6")) -} - -// newNetTCP creates a new NetTCP{,6} from the contents of the given file. -func newNetTCP(file string) (NetTCP, error) { - n, err := newNetIPSocket(file) - n1 := NetTCP(n) - return n1, err -} - -func newNetTCPSummary(file string) (*NetTCPSummary, error) { - n, err := newNetIPSocketSummary(file) - if n == nil { - return nil, err - } - n1 := NetTCPSummary(*n) - return &n1, err -} diff --git a/vendor/github.com/prometheus/procfs/net_tls_stat.go b/vendor/github.com/prometheus/procfs/net_tls_stat.go deleted file mode 100644 index 13994c178..000000000 --- a/vendor/github.com/prometheus/procfs/net_tls_stat.go +++ /dev/null @@ -1,119 +0,0 @@ -// Copyright 2023 Prometheus Team -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package procfs - -import ( - "bufio" - "fmt" - "os" - "strconv" - "strings" -) - -// TLSStat struct represents data in /proc/net/tls_stat. -// See https://docs.kernel.org/networking/tls.html#statistics -type TLSStat struct { - // number of TX sessions currently installed where host handles cryptography - TLSCurrTxSw int - // number of RX sessions currently installed where host handles cryptography - TLSCurrRxSw int - // number of TX sessions currently installed where NIC handles cryptography - TLSCurrTxDevice int - // number of RX sessions currently installed where NIC handles cryptography - TLSCurrRxDevice int - //number of TX sessions opened with host cryptography - TLSTxSw int - //number of RX sessions opened with host cryptography - TLSRxSw int - // number of TX sessions opened with NIC cryptography - TLSTxDevice int - // number of RX sessions opened with NIC cryptography - TLSRxDevice int - // record decryption failed (e.g. due to incorrect authentication tag) - TLSDecryptError int - // number of RX resyncs sent to NICs handling cryptography - TLSRxDeviceResync int - // number of RX records which had to be re-decrypted due to TLS_RX_EXPECT_NO_PAD mis-prediction. Note that this counter will also increment for non-data records. - TLSDecryptRetry int - // number of data RX records which had to be re-decrypted due to TLS_RX_EXPECT_NO_PAD mis-prediction. - TLSRxNoPadViolation int -} - -// NewTLSStat reads the tls_stat statistics. -func NewTLSStat() (TLSStat, error) { - fs, err := NewFS(DefaultMountPoint) - if err != nil { - return TLSStat{}, err - } - - return fs.NewTLSStat() -} - -// NewTLSStat reads the tls_stat statistics. -func (fs FS) NewTLSStat() (TLSStat, error) { - file, err := os.Open(fs.proc.Path("net/tls_stat")) - if err != nil { - return TLSStat{}, err - } - defer file.Close() - - var ( - tlsstat = TLSStat{} - s = bufio.NewScanner(file) - ) - - for s.Scan() { - fields := strings.Fields(s.Text()) - - if len(fields) != 2 { - return TLSStat{}, fmt.Errorf("%w: %q line %q", ErrFileParse, file.Name(), s.Text()) - } - - name := fields[0] - value, err := strconv.Atoi(fields[1]) - if err != nil { - return TLSStat{}, err - } - - switch name { - case "TlsCurrTxSw": - tlsstat.TLSCurrTxSw = value - case "TlsCurrRxSw": - tlsstat.TLSCurrRxSw = value - case "TlsCurrTxDevice": - tlsstat.TLSCurrTxDevice = value - case "TlsCurrRxDevice": - tlsstat.TLSCurrRxDevice = value - case "TlsTxSw": - tlsstat.TLSTxSw = value - case "TlsRxSw": - tlsstat.TLSRxSw = value - case "TlsTxDevice": - tlsstat.TLSTxDevice = value - case "TlsRxDevice": - tlsstat.TLSRxDevice = value - case "TlsDecryptError": - tlsstat.TLSDecryptError = value - case "TlsRxDeviceResync": - tlsstat.TLSRxDeviceResync = value - case "TlsDecryptRetry": - tlsstat.TLSDecryptRetry = value - case "TlsRxNoPadViolation": - tlsstat.TLSRxNoPadViolation = value - } - - } - - return tlsstat, s.Err() -} diff --git a/vendor/github.com/prometheus/procfs/net_udp.go b/vendor/github.com/prometheus/procfs/net_udp.go deleted file mode 100644 index 9ac3daf2d..000000000 --- a/vendor/github.com/prometheus/procfs/net_udp.go +++ /dev/null @@ -1,64 +0,0 @@ -// Copyright 2020 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package procfs - -type ( - // NetUDP represents the contents of /proc/net/udp{,6} file without the header. - NetUDP []*netIPSocketLine - - // NetUDPSummary provides already computed values like the total queue lengths or - // the total number of used sockets. In contrast to NetUDP it does not collect - // the parsed lines into a slice. - NetUDPSummary NetIPSocketSummary -) - -// NetUDP returns the IPv4 kernel/networking statistics for UDP datagrams -// read from /proc/net/udp. -func (fs FS) NetUDP() (NetUDP, error) { - return newNetUDP(fs.proc.Path("net/udp")) -} - -// NetUDP6 returns the IPv6 kernel/networking statistics for UDP datagrams -// read from /proc/net/udp6. -func (fs FS) NetUDP6() (NetUDP, error) { - return newNetUDP(fs.proc.Path("net/udp6")) -} - -// NetUDPSummary returns already computed statistics like the total queue lengths -// for UDP datagrams read from /proc/net/udp. -func (fs FS) NetUDPSummary() (*NetUDPSummary, error) { - return newNetUDPSummary(fs.proc.Path("net/udp")) -} - -// NetUDP6Summary returns already computed statistics like the total queue lengths -// for UDP datagrams read from /proc/net/udp6. -func (fs FS) NetUDP6Summary() (*NetUDPSummary, error) { - return newNetUDPSummary(fs.proc.Path("net/udp6")) -} - -// newNetUDP creates a new NetUDP{,6} from the contents of the given file. -func newNetUDP(file string) (NetUDP, error) { - n, err := newNetIPSocket(file) - n1 := NetUDP(n) - return n1, err -} - -func newNetUDPSummary(file string) (*NetUDPSummary, error) { - n, err := newNetIPSocketSummary(file) - if n == nil { - return nil, err - } - n1 := NetUDPSummary(*n) - return &n1, err -} diff --git a/vendor/github.com/prometheus/procfs/net_unix.go b/vendor/github.com/prometheus/procfs/net_unix.go deleted file mode 100644 index acbbc57ea..000000000 --- a/vendor/github.com/prometheus/procfs/net_unix.go +++ /dev/null @@ -1,257 +0,0 @@ -// Copyright 2018 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package procfs - -import ( - "bufio" - "fmt" - "io" - "os" - "strconv" - "strings" -) - -// For the proc file format details, -// see https://elixir.bootlin.com/linux/v4.17/source/net/unix/af_unix.c#L2815 -// and https://elixir.bootlin.com/linux/latest/source/include/uapi/linux/net.h#L48. - -// Constants for the various /proc/net/unix enumerations. -// TODO: match against x/sys/unix or similar? -const ( - netUnixTypeStream = 1 - netUnixTypeDgram = 2 - netUnixTypeSeqpacket = 5 - - netUnixFlagDefault = 0 - netUnixFlagListen = 1 << 16 - - netUnixStateUnconnected = 1 - netUnixStateConnecting = 2 - netUnixStateConnected = 3 - netUnixStateDisconnected = 4 -) - -// NetUNIXType is the type of the type field. -type NetUNIXType uint64 - -// NetUNIXFlags is the type of the flags field. -type NetUNIXFlags uint64 - -// NetUNIXState is the type of the state field. -type NetUNIXState uint64 - -// NetUNIXLine represents a line of /proc/net/unix. -type NetUNIXLine struct { - KernelPtr string - RefCount uint64 - Protocol uint64 - Flags NetUNIXFlags - Type NetUNIXType - State NetUNIXState - Inode uint64 - Path string -} - -// NetUNIX holds the data read from /proc/net/unix. -type NetUNIX struct { - Rows []*NetUNIXLine -} - -// NetUNIX returns data read from /proc/net/unix. -func (fs FS) NetUNIX() (*NetUNIX, error) { - return readNetUNIX(fs.proc.Path("net/unix")) -} - -// readNetUNIX reads data in /proc/net/unix format from the specified file. -func readNetUNIX(file string) (*NetUNIX, error) { - // This file could be quite large and a streaming read is desirable versus - // reading the entire contents at once. - f, err := os.Open(file) - if err != nil { - return nil, err - } - defer f.Close() - - return parseNetUNIX(f) -} - -// parseNetUNIX creates a NetUnix structure from the incoming stream. -func parseNetUNIX(r io.Reader) (*NetUNIX, error) { - // Begin scanning by checking for the existence of Inode. - s := bufio.NewScanner(r) - s.Scan() - - // From the man page of proc(5), it does not contain an Inode field, - // but in actually it exists. This code works for both cases. - hasInode := strings.Contains(s.Text(), "Inode") - - // Expect a minimum number of fields, but Inode and Path are optional: - // Num RefCount Protocol Flags Type St Inode Path - minFields := 6 - if hasInode { - minFields++ - } - - var nu NetUNIX - for s.Scan() { - line := s.Text() - item, err := nu.parseLine(line, hasInode, minFields) - if err != nil { - return nil, fmt.Errorf("%s: /proc/net/unix encountered data %q: %w", ErrFileParse, line, err) - } - - nu.Rows = append(nu.Rows, item) - } - - if err := s.Err(); err != nil { - return nil, fmt.Errorf("%s: /proc/net/unix encountered data: %w", ErrFileParse, err) - } - - return &nu, nil -} - -func (u *NetUNIX) parseLine(line string, hasInode bool, min int) (*NetUNIXLine, error) { - fields := strings.Fields(line) - - l := len(fields) - if l < min { - return nil, fmt.Errorf("%w: expected at least %d fields but got %d", ErrFileParse, min, l) - } - - // Field offsets are as follows: - // Num RefCount Protocol Flags Type St Inode Path - - kernelPtr := strings.TrimSuffix(fields[0], ":") - - users, err := u.parseUsers(fields[1]) - if err != nil { - return nil, fmt.Errorf("%s: ref count %q: %w", ErrFileParse, fields[1], err) - } - - flags, err := u.parseFlags(fields[3]) - if err != nil { - return nil, fmt.Errorf("%s: Unable to parse flags %q: %w", ErrFileParse, fields[3], err) - } - - typ, err := u.parseType(fields[4]) - if err != nil { - return nil, fmt.Errorf("%s: Failed to parse type %q: %w", ErrFileParse, fields[4], err) - } - - state, err := u.parseState(fields[5]) - if err != nil { - return nil, fmt.Errorf("%s: Failed to parse state %q: %w", ErrFileParse, fields[5], err) - } - - var inode uint64 - if hasInode { - inode, err = u.parseInode(fields[6]) - if err != nil { - return nil, fmt.Errorf("%s failed to parse inode %q: %w", ErrFileParse, fields[6], err) - } - } - - n := &NetUNIXLine{ - KernelPtr: kernelPtr, - RefCount: users, - Type: typ, - Flags: flags, - State: state, - Inode: inode, - } - - // Path field is optional. - if l > min { - // Path occurs at either index 6 or 7 depending on whether inode is - // already present. - pathIdx := 7 - if !hasInode { - pathIdx-- - } - - n.Path = fields[pathIdx] - } - - return n, nil -} - -func (u NetUNIX) parseUsers(s string) (uint64, error) { - return strconv.ParseUint(s, 16, 32) -} - -func (u NetUNIX) parseType(s string) (NetUNIXType, error) { - typ, err := strconv.ParseUint(s, 16, 16) - if err != nil { - return 0, err - } - - return NetUNIXType(typ), nil -} - -func (u NetUNIX) parseFlags(s string) (NetUNIXFlags, error) { - flags, err := strconv.ParseUint(s, 16, 32) - if err != nil { - return 0, err - } - - return NetUNIXFlags(flags), nil -} - -func (u NetUNIX) parseState(s string) (NetUNIXState, error) { - st, err := strconv.ParseInt(s, 16, 8) - if err != nil { - return 0, err - } - - return NetUNIXState(st), nil -} - -func (u NetUNIX) parseInode(s string) (uint64, error) { - return strconv.ParseUint(s, 10, 64) -} - -func (t NetUNIXType) String() string { - switch t { - case netUnixTypeStream: - return "stream" - case netUnixTypeDgram: - return "dgram" - case netUnixTypeSeqpacket: - return "seqpacket" - } - return "unknown" -} - -func (f NetUNIXFlags) String() string { - switch f { - case netUnixFlagListen: - return "listen" - default: - return "default" - } -} - -func (s NetUNIXState) String() string { - switch s { - case netUnixStateUnconnected: - return "unconnected" - case netUnixStateConnecting: - return "connecting" - case netUnixStateConnected: - return "connected" - case netUnixStateDisconnected: - return "disconnected" - } - return "unknown" -} diff --git a/vendor/github.com/prometheus/procfs/net_wireless.go b/vendor/github.com/prometheus/procfs/net_wireless.go deleted file mode 100644 index 7443edca9..000000000 --- a/vendor/github.com/prometheus/procfs/net_wireless.go +++ /dev/null @@ -1,182 +0,0 @@ -// Copyright 2023 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package procfs - -import ( - "bufio" - "bytes" - "fmt" - "io" - "strconv" - "strings" - - "github.com/prometheus/procfs/internal/util" -) - -// Wireless models the content of /proc/net/wireless. -type Wireless struct { - Name string - - // Status is the current 4-digit hex value status of the interface. - Status uint64 - - // QualityLink is the link quality. - QualityLink int - - // QualityLevel is the signal gain (dBm). - QualityLevel int - - // QualityNoise is the signal noise baseline (dBm). - QualityNoise int - - // DiscardedNwid is the number of discarded packets with wrong nwid/essid. - DiscardedNwid int - - // DiscardedCrypt is the number of discarded packets with wrong code/decode (WEP). - DiscardedCrypt int - - // DiscardedFrag is the number of discarded packets that can't perform MAC reassembly. - DiscardedFrag int - - // DiscardedRetry is the number of discarded packets that reached max MAC retries. - DiscardedRetry int - - // DiscardedMisc is the number of discarded packets for other reasons. - DiscardedMisc int - - // MissedBeacon is the number of missed beacons/superframe. - MissedBeacon int -} - -// Wireless returns kernel wireless statistics. -func (fs FS) Wireless() ([]*Wireless, error) { - b, err := util.ReadFileNoStat(fs.proc.Path("net/wireless")) - if err != nil { - return nil, err - } - - m, err := parseWireless(bytes.NewReader(b)) - if err != nil { - return nil, fmt.Errorf("%s: wireless: %w", ErrFileParse, err) - } - - return m, nil -} - -// parseWireless parses the contents of /proc/net/wireless. -/* -Inter-| sta-| Quality | Discarded packets | Missed | WE -face | tus | link level noise | nwid crypt frag retry misc | beacon | 22 - eth1: 0000 5. -256. -10. 0 1 0 3 0 0 - eth2: 0000 5. -256. -20. 0 2 0 4 0 0 -*/ -func parseWireless(r io.Reader) ([]*Wireless, error) { - var ( - interfaces []*Wireless - scanner = bufio.NewScanner(r) - ) - - for n := 0; scanner.Scan(); n++ { - // Skip the 2 header lines. - if n < 2 { - continue - } - - line := scanner.Text() - - parts := strings.Split(line, ":") - if len(parts) != 2 { - return nil, fmt.Errorf("%w: expected 2 parts after splitting line by ':', got %d for line %q", ErrFileParse, len(parts), line) - } - - name := strings.TrimSpace(parts[0]) - stats := strings.Fields(parts[1]) - - if len(stats) < 10 { - return nil, fmt.Errorf("%w: invalid number of fields in line %d, expected 10+, got %d: %q", ErrFileParse, n, len(stats), line) - } - - status, err := strconv.ParseUint(stats[0], 16, 16) - if err != nil { - return nil, fmt.Errorf("%w: invalid status in line %d: %q", ErrFileParse, n, line) - } - - qlink, err := strconv.Atoi(strings.TrimSuffix(stats[1], ".")) - if err != nil { - return nil, fmt.Errorf("%s: parse Quality:link as integer %q: %w", ErrFileParse, qlink, err) - } - - qlevel, err := strconv.Atoi(strings.TrimSuffix(stats[2], ".")) - if err != nil { - return nil, fmt.Errorf("%s: Quality:level as integer %q: %w", ErrFileParse, qlevel, err) - } - - qnoise, err := strconv.Atoi(strings.TrimSuffix(stats[3], ".")) - if err != nil { - return nil, fmt.Errorf("%s: Quality:noise as integer %q: %w", ErrFileParse, qnoise, err) - } - - dnwid, err := strconv.Atoi(stats[4]) - if err != nil { - return nil, fmt.Errorf("%s: Discarded:nwid as integer %q: %w", ErrFileParse, dnwid, err) - } - - dcrypt, err := strconv.Atoi(stats[5]) - if err != nil { - return nil, fmt.Errorf("%s: Discarded:crypt as integer %q: %w", ErrFileParse, dcrypt, err) - } - - dfrag, err := strconv.Atoi(stats[6]) - if err != nil { - return nil, fmt.Errorf("%s: Discarded:frag as integer %q: %w", ErrFileParse, dfrag, err) - } - - dretry, err := strconv.Atoi(stats[7]) - if err != nil { - return nil, fmt.Errorf("%s: Discarded:retry as integer %q: %w", ErrFileParse, dretry, err) - } - - dmisc, err := strconv.Atoi(stats[8]) - if err != nil { - return nil, fmt.Errorf("%s: Discarded:misc as integer %q: %w", ErrFileParse, dmisc, err) - } - - mbeacon, err := strconv.Atoi(stats[9]) - if err != nil { - return nil, fmt.Errorf("%s: Missed:beacon as integer %q: %w", ErrFileParse, mbeacon, err) - } - - w := &Wireless{ - Name: name, - Status: status, - QualityLink: qlink, - QualityLevel: qlevel, - QualityNoise: qnoise, - DiscardedNwid: dnwid, - DiscardedCrypt: dcrypt, - DiscardedFrag: dfrag, - DiscardedRetry: dretry, - DiscardedMisc: dmisc, - MissedBeacon: mbeacon, - } - - interfaces = append(interfaces, w) - } - - if err := scanner.Err(); err != nil { - return nil, fmt.Errorf("%s: Failed to scan /proc/net/wireless: %w", ErrFileRead, err) - } - - return interfaces, nil -} diff --git a/vendor/github.com/prometheus/procfs/net_xfrm.go b/vendor/github.com/prometheus/procfs/net_xfrm.go deleted file mode 100644 index 932ef2046..000000000 --- a/vendor/github.com/prometheus/procfs/net_xfrm.go +++ /dev/null @@ -1,189 +0,0 @@ -// Copyright 2017 Prometheus Team -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package procfs - -import ( - "bufio" - "fmt" - "os" - "strconv" - "strings" -) - -// XfrmStat models the contents of /proc/net/xfrm_stat. -type XfrmStat struct { - // All errors which are not matched by other - XfrmInError int - // No buffer is left - XfrmInBufferError int - // Header Error - XfrmInHdrError int - // No state found - // i.e. either inbound SPI, address, or IPSEC protocol at SA is wrong - XfrmInNoStates int - // Transformation protocol specific error - // e.g. SA Key is wrong - XfrmInStateProtoError int - // Transformation mode specific error - XfrmInStateModeError int - // Sequence error - // e.g. sequence number is out of window - XfrmInStateSeqError int - // State is expired - XfrmInStateExpired int - // State has mismatch option - // e.g. UDP encapsulation type is mismatched - XfrmInStateMismatch int - // State is invalid - XfrmInStateInvalid int - // No matching template for states - // e.g. Inbound SAs are correct but SP rule is wrong - XfrmInTmplMismatch int - // No policy is found for states - // e.g. Inbound SAs are correct but no SP is found - XfrmInNoPols int - // Policy discards - XfrmInPolBlock int - // Policy error - XfrmInPolError int - // All errors which are not matched by others - XfrmOutError int - // Bundle generation error - XfrmOutBundleGenError int - // Bundle check error - XfrmOutBundleCheckError int - // No state was found - XfrmOutNoStates int - // Transformation protocol specific error - XfrmOutStateProtoError int - // Transportation mode specific error - XfrmOutStateModeError int - // Sequence error - // i.e sequence number overflow - XfrmOutStateSeqError int - // State is expired - XfrmOutStateExpired int - // Policy discads - XfrmOutPolBlock int - // Policy is dead - XfrmOutPolDead int - // Policy Error - XfrmOutPolError int - // Forward routing of a packet is not allowed - XfrmFwdHdrError int - // State is invalid, perhaps expired - XfrmOutStateInvalid int - // State hasn’t been fully acquired before use - XfrmAcquireError int -} - -// NewXfrmStat reads the xfrm_stat statistics. -func NewXfrmStat() (XfrmStat, error) { - fs, err := NewFS(DefaultMountPoint) - if err != nil { - return XfrmStat{}, err - } - - return fs.NewXfrmStat() -} - -// NewXfrmStat reads the xfrm_stat statistics from the 'proc' filesystem. -func (fs FS) NewXfrmStat() (XfrmStat, error) { - file, err := os.Open(fs.proc.Path("net/xfrm_stat")) - if err != nil { - return XfrmStat{}, err - } - defer file.Close() - - var ( - x = XfrmStat{} - s = bufio.NewScanner(file) - ) - - for s.Scan() { - fields := strings.Fields(s.Text()) - - if len(fields) != 2 { - return XfrmStat{}, fmt.Errorf("%w: %q line %q", ErrFileParse, file.Name(), s.Text()) - } - - name := fields[0] - value, err := strconv.Atoi(fields[1]) - if err != nil { - return XfrmStat{}, err - } - - switch name { - case "XfrmInError": - x.XfrmInError = value - case "XfrmInBufferError": - x.XfrmInBufferError = value - case "XfrmInHdrError": - x.XfrmInHdrError = value - case "XfrmInNoStates": - x.XfrmInNoStates = value - case "XfrmInStateProtoError": - x.XfrmInStateProtoError = value - case "XfrmInStateModeError": - x.XfrmInStateModeError = value - case "XfrmInStateSeqError": - x.XfrmInStateSeqError = value - case "XfrmInStateExpired": - x.XfrmInStateExpired = value - case "XfrmInStateInvalid": - x.XfrmInStateInvalid = value - case "XfrmInTmplMismatch": - x.XfrmInTmplMismatch = value - case "XfrmInNoPols": - x.XfrmInNoPols = value - case "XfrmInPolBlock": - x.XfrmInPolBlock = value - case "XfrmInPolError": - x.XfrmInPolError = value - case "XfrmOutError": - x.XfrmOutError = value - case "XfrmInStateMismatch": - x.XfrmInStateMismatch = value - case "XfrmOutBundleGenError": - x.XfrmOutBundleGenError = value - case "XfrmOutBundleCheckError": - x.XfrmOutBundleCheckError = value - case "XfrmOutNoStates": - x.XfrmOutNoStates = value - case "XfrmOutStateProtoError": - x.XfrmOutStateProtoError = value - case "XfrmOutStateModeError": - x.XfrmOutStateModeError = value - case "XfrmOutStateSeqError": - x.XfrmOutStateSeqError = value - case "XfrmOutStateExpired": - x.XfrmOutStateExpired = value - case "XfrmOutPolBlock": - x.XfrmOutPolBlock = value - case "XfrmOutPolDead": - x.XfrmOutPolDead = value - case "XfrmOutPolError": - x.XfrmOutPolError = value - case "XfrmFwdHdrError": - x.XfrmFwdHdrError = value - case "XfrmOutStateInvalid": - x.XfrmOutStateInvalid = value - case "XfrmAcquireError": - x.XfrmAcquireError = value - } - - } - - return x, s.Err() -} diff --git a/vendor/github.com/prometheus/procfs/netstat.go b/vendor/github.com/prometheus/procfs/netstat.go deleted file mode 100644 index 742dff453..000000000 --- a/vendor/github.com/prometheus/procfs/netstat.go +++ /dev/null @@ -1,82 +0,0 @@ -// Copyright 2020 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package procfs - -import ( - "bufio" - "os" - "path/filepath" - "strconv" - "strings" -) - -// NetStat contains statistics for all the counters from one file. -type NetStat struct { - Stats map[string][]uint64 - Filename string -} - -// NetStat retrieves stats from `/proc/net/stat/`. -func (fs FS) NetStat() ([]NetStat, error) { - statFiles, err := filepath.Glob(fs.proc.Path("net/stat/*")) - if err != nil { - return nil, err - } - - var netStatsTotal []NetStat - - for _, filePath := range statFiles { - procNetstat, err := parseNetstat(filePath) - if err != nil { - return nil, err - } - procNetstat.Filename = filepath.Base(filePath) - - netStatsTotal = append(netStatsTotal, procNetstat) - } - return netStatsTotal, nil -} - -// parseNetstat parses the metrics from `/proc/net/stat/` file -// and returns a NetStat structure. -func parseNetstat(filePath string) (NetStat, error) { - netStat := NetStat{ - Stats: make(map[string][]uint64), - } - file, err := os.Open(filePath) - if err != nil { - return netStat, err - } - defer file.Close() - - scanner := bufio.NewScanner(file) - scanner.Scan() - - // First string is always a header for stats - var headers []string - headers = append(headers, strings.Fields(scanner.Text())...) - - // Other strings represent per-CPU counters - for scanner.Scan() { - for num, counter := range strings.Fields(scanner.Text()) { - value, err := strconv.ParseUint(counter, 16, 64) - if err != nil { - return NetStat{}, err - } - netStat.Stats[headers[num]] = append(netStat.Stats[headers[num]], value) - } - } - - return netStat, nil -} diff --git a/vendor/github.com/prometheus/procfs/proc.go b/vendor/github.com/prometheus/procfs/proc.go deleted file mode 100644 index d1f71caa5..000000000 --- a/vendor/github.com/prometheus/procfs/proc.go +++ /dev/null @@ -1,338 +0,0 @@ -// Copyright 2018 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package procfs - -import ( - "bytes" - "errors" - "fmt" - "io" - "os" - "strconv" - "strings" - - "github.com/prometheus/procfs/internal/util" -) - -// Proc provides information about a running process. -type Proc struct { - // The process ID. - PID int - - fs FS -} - -// Procs represents a list of Proc structs. -type Procs []Proc - -var ( - ErrFileParse = errors.New("Error Parsing File") - ErrFileRead = errors.New("Error Reading File") - ErrMountPoint = errors.New("Error Accessing Mount point") -) - -func (p Procs) Len() int { return len(p) } -func (p Procs) Swap(i, j int) { p[i], p[j] = p[j], p[i] } -func (p Procs) Less(i, j int) bool { return p[i].PID < p[j].PID } - -// Self returns a process for the current process read via /proc/self. -func Self() (Proc, error) { - fs, err := NewFS(DefaultMountPoint) - if err != nil || errors.Unwrap(err) == ErrMountPoint { - return Proc{}, err - } - return fs.Self() -} - -// NewProc returns a process for the given pid under /proc. -func NewProc(pid int) (Proc, error) { - fs, err := NewFS(DefaultMountPoint) - if err != nil { - return Proc{}, err - } - return fs.Proc(pid) -} - -// AllProcs returns a list of all currently available processes under /proc. -func AllProcs() (Procs, error) { - fs, err := NewFS(DefaultMountPoint) - if err != nil { - return Procs{}, err - } - return fs.AllProcs() -} - -// Self returns a process for the current process. -func (fs FS) Self() (Proc, error) { - p, err := os.Readlink(fs.proc.Path("self")) - if err != nil { - return Proc{}, err - } - pid, err := strconv.Atoi(strings.Replace(p, string(fs.proc), "", -1)) - if err != nil { - return Proc{}, err - } - return fs.Proc(pid) -} - -// NewProc returns a process for the given pid. -// -// Deprecated: Use fs.Proc() instead. -func (fs FS) NewProc(pid int) (Proc, error) { - return fs.Proc(pid) -} - -// Proc returns a process for the given pid. -func (fs FS) Proc(pid int) (Proc, error) { - if _, err := os.Stat(fs.proc.Path(strconv.Itoa(pid))); err != nil { - return Proc{}, err - } - return Proc{PID: pid, fs: fs}, nil -} - -// AllProcs returns a list of all currently available processes. -func (fs FS) AllProcs() (Procs, error) { - d, err := os.Open(fs.proc.Path()) - if err != nil { - return Procs{}, err - } - defer d.Close() - - names, err := d.Readdirnames(-1) - if err != nil { - return Procs{}, fmt.Errorf("%s: Cannot read file: %v: %w", ErrFileRead, names, err) - } - - p := Procs{} - for _, n := range names { - pid, err := strconv.ParseInt(n, 10, 64) - if err != nil { - continue - } - p = append(p, Proc{PID: int(pid), fs: fs}) - } - - return p, nil -} - -// CmdLine returns the command line of a process. -func (p Proc) CmdLine() ([]string, error) { - data, err := util.ReadFileNoStat(p.path("cmdline")) - if err != nil { - return nil, err - } - - if len(data) < 1 { - return []string{}, nil - } - - return strings.Split(string(bytes.TrimRight(data, string("\x00"))), string(byte(0))), nil -} - -// Wchan returns the wchan (wait channel) of a process. -func (p Proc) Wchan() (string, error) { - f, err := os.Open(p.path("wchan")) - if err != nil { - return "", err - } - defer f.Close() - - data, err := io.ReadAll(f) - if err != nil { - return "", err - } - - wchan := string(data) - if wchan == "" || wchan == "0" { - return "", nil - } - - return wchan, nil -} - -// Comm returns the command name of a process. -func (p Proc) Comm() (string, error) { - data, err := util.ReadFileNoStat(p.path("comm")) - if err != nil { - return "", err - } - - return strings.TrimSpace(string(data)), nil -} - -// Executable returns the absolute path of the executable command of a process. -func (p Proc) Executable() (string, error) { - exe, err := os.Readlink(p.path("exe")) - if os.IsNotExist(err) { - return "", nil - } - - return exe, err -} - -// Cwd returns the absolute path to the current working directory of the process. -func (p Proc) Cwd() (string, error) { - wd, err := os.Readlink(p.path("cwd")) - if os.IsNotExist(err) { - return "", nil - } - - return wd, err -} - -// RootDir returns the absolute path to the process's root directory (as set by chroot). -func (p Proc) RootDir() (string, error) { - rdir, err := os.Readlink(p.path("root")) - if os.IsNotExist(err) { - return "", nil - } - - return rdir, err -} - -// FileDescriptors returns the currently open file descriptors of a process. -func (p Proc) FileDescriptors() ([]uintptr, error) { - names, err := p.fileDescriptors() - if err != nil { - return nil, err - } - - fds := make([]uintptr, len(names)) - for i, n := range names { - fd, err := strconv.ParseInt(n, 10, 32) - if err != nil { - return nil, fmt.Errorf("%s: Cannot parse line: %v: %w", ErrFileParse, i, err) - } - fds[i] = uintptr(fd) - } - - return fds, nil -} - -// FileDescriptorTargets returns the targets of all file descriptors of a process. -// If a file descriptor is not a symlink to a file (like a socket), that value will be the empty string. -func (p Proc) FileDescriptorTargets() ([]string, error) { - names, err := p.fileDescriptors() - if err != nil { - return nil, err - } - - targets := make([]string, len(names)) - - for i, name := range names { - target, err := os.Readlink(p.path("fd", name)) - if err == nil { - targets[i] = target - } - } - - return targets, nil -} - -// FileDescriptorsLen returns the number of currently open file descriptors of -// a process. -func (p Proc) FileDescriptorsLen() (int, error) { - // Use fast path if available (Linux v6.2): https://github.com/torvalds/linux/commit/f1f1f2569901 - if p.fs.isReal { - stat, err := os.Stat(p.path("fd")) - if err != nil { - return 0, err - } - - size := stat.Size() - if size > 0 { - return int(size), nil - } - } - - fds, err := p.fileDescriptors() - if err != nil { - return 0, err - } - - return len(fds), nil -} - -// MountStats retrieves statistics and configuration for mount points in a -// process's namespace. -func (p Proc) MountStats() ([]*Mount, error) { - f, err := os.Open(p.path("mountstats")) - if err != nil { - return nil, err - } - defer f.Close() - - return parseMountStats(f) -} - -// MountInfo retrieves mount information for mount points in a -// process's namespace. -// It supplies information missing in `/proc/self/mounts` and -// fixes various other problems with that file too. -func (p Proc) MountInfo() ([]*MountInfo, error) { - data, err := util.ReadFileNoStat(p.path("mountinfo")) - if err != nil { - return nil, err - } - return parseMountInfo(data) -} - -func (p Proc) fileDescriptors() ([]string, error) { - d, err := os.Open(p.path("fd")) - if err != nil { - return nil, err - } - defer d.Close() - - names, err := d.Readdirnames(-1) - if err != nil { - return nil, fmt.Errorf("%s: Cannot read file: %v: %w", ErrFileRead, names, err) - } - - return names, nil -} - -func (p Proc) path(pa ...string) string { - return p.fs.proc.Path(append([]string{strconv.Itoa(p.PID)}, pa...)...) -} - -// FileDescriptorsInfo retrieves information about all file descriptors of -// the process. -func (p Proc) FileDescriptorsInfo() (ProcFDInfos, error) { - names, err := p.fileDescriptors() - if err != nil { - return nil, err - } - - var fdinfos ProcFDInfos - - for _, n := range names { - fdinfo, err := p.FDInfo(n) - if err != nil { - continue - } - fdinfos = append(fdinfos, *fdinfo) - } - - return fdinfos, nil -} - -// Schedstat returns task scheduling information for the process. -func (p Proc) Schedstat() (ProcSchedstat, error) { - contents, err := os.ReadFile(p.path("schedstat")) - if err != nil { - return ProcSchedstat{}, err - } - return parseProcSchedstat(string(contents)) -} diff --git a/vendor/github.com/prometheus/procfs/proc_cgroup.go b/vendor/github.com/prometheus/procfs/proc_cgroup.go deleted file mode 100644 index daeed7f57..000000000 --- a/vendor/github.com/prometheus/procfs/proc_cgroup.go +++ /dev/null @@ -1,98 +0,0 @@ -// Copyright 2020 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package procfs - -import ( - "bufio" - "bytes" - "fmt" - "strconv" - "strings" - - "github.com/prometheus/procfs/internal/util" -) - -// Cgroup models one line from /proc/[pid]/cgroup. Each Cgroup struct describes the placement of a PID inside a -// specific control hierarchy. The kernel has two cgroup APIs, v1 and v2. v1 has one hierarchy per available resource -// controller, while v2 has one unified hierarchy shared by all controllers. Regardless of v1 or v2, all hierarchies -// contain all running processes, so the question answerable with a Cgroup struct is 'where is this process in -// this hierarchy' (where==what path on the specific cgroupfs). By prefixing this path with the mount point of -// *this specific* hierarchy, you can locate the relevant pseudo-files needed to read/set the data for this PID -// in this hierarchy -// -// Also see http://man7.org/linux/man-pages/man7/cgroups.7.html -type Cgroup struct { - // HierarchyID that can be matched to a named hierarchy using /proc/cgroups. Cgroups V2 only has one - // hierarchy, so HierarchyID is always 0. For cgroups v1 this is a unique ID number - HierarchyID int - // Controllers using this hierarchy of processes. Controllers are also known as subsystems. For - // Cgroups V2 this may be empty, as all active controllers use the same hierarchy - Controllers []string - // Path of this control group, relative to the mount point of the cgroupfs representing this specific - // hierarchy - Path string -} - -// parseCgroupString parses each line of the /proc/[pid]/cgroup file -// Line format is hierarchyID:[controller1,controller2]:path. -func parseCgroupString(cgroupStr string) (*Cgroup, error) { - var err error - - fields := strings.SplitN(cgroupStr, ":", 3) - if len(fields) < 3 { - return nil, fmt.Errorf("%w: 3+ fields required, found %d fields in cgroup string: %s", ErrFileParse, len(fields), cgroupStr) - } - - cgroup := &Cgroup{ - Path: fields[2], - Controllers: nil, - } - cgroup.HierarchyID, err = strconv.Atoi(fields[0]) - if err != nil { - return nil, fmt.Errorf("%w: hierarchy ID: %q", ErrFileParse, cgroup.HierarchyID) - } - if fields[1] != "" { - ssNames := strings.Split(fields[1], ",") - cgroup.Controllers = append(cgroup.Controllers, ssNames...) - } - return cgroup, nil -} - -// parseCgroups reads each line of the /proc/[pid]/cgroup file. -func parseCgroups(data []byte) ([]Cgroup, error) { - var cgroups []Cgroup - scanner := bufio.NewScanner(bytes.NewReader(data)) - for scanner.Scan() { - mountString := scanner.Text() - parsedMounts, err := parseCgroupString(mountString) - if err != nil { - return nil, err - } - cgroups = append(cgroups, *parsedMounts) - } - - err := scanner.Err() - return cgroups, err -} - -// Cgroups reads from /proc//cgroups and returns a []*Cgroup struct locating this PID in each process -// control hierarchy running on this system. On every system (v1 and v2), all hierarchies contain all processes, -// so the len of the returned struct is equal to the number of active hierarchies on this system. -func (p Proc) Cgroups() ([]Cgroup, error) { - data, err := util.ReadFileNoStat(p.path("cgroup")) - if err != nil { - return nil, err - } - return parseCgroups(data) -} diff --git a/vendor/github.com/prometheus/procfs/proc_cgroups.go b/vendor/github.com/prometheus/procfs/proc_cgroups.go deleted file mode 100644 index 5dd493899..000000000 --- a/vendor/github.com/prometheus/procfs/proc_cgroups.go +++ /dev/null @@ -1,98 +0,0 @@ -// Copyright 2021 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package procfs - -import ( - "bufio" - "bytes" - "fmt" - "strconv" - "strings" - - "github.com/prometheus/procfs/internal/util" -) - -// CgroupSummary models one line from /proc/cgroups. -// This file contains information about the controllers that are compiled into the kernel. -// -// Also see http://man7.org/linux/man-pages/man7/cgroups.7.html -type CgroupSummary struct { - // The name of the controller. controller is also known as subsystem. - SubsysName string - // The unique ID of the cgroup hierarchy on which this controller is mounted. - Hierarchy int - // The number of control groups in this hierarchy using this controller. - Cgroups int - // This field contains the value 1 if this controller is enabled, or 0 if it has been disabled - Enabled int -} - -// parseCgroupSummary parses each line of the /proc/cgroup file -// Line format is `subsys_name hierarchy num_cgroups enabled`. -func parseCgroupSummaryString(CgroupSummaryStr string) (*CgroupSummary, error) { - var err error - - fields := strings.Fields(CgroupSummaryStr) - // require at least 4 fields - if len(fields) < 4 { - return nil, fmt.Errorf("%w: 4+ fields required, found %d fields in cgroup info string: %s", ErrFileParse, len(fields), CgroupSummaryStr) - } - - CgroupSummary := &CgroupSummary{ - SubsysName: fields[0], - } - CgroupSummary.Hierarchy, err = strconv.Atoi(fields[1]) - if err != nil { - return nil, fmt.Errorf("%w: Unable to parse hierarchy ID from %q", ErrFileParse, fields[1]) - } - CgroupSummary.Cgroups, err = strconv.Atoi(fields[2]) - if err != nil { - return nil, fmt.Errorf("%w: Unable to parse Cgroup Num from %q", ErrFileParse, fields[2]) - } - CgroupSummary.Enabled, err = strconv.Atoi(fields[3]) - if err != nil { - return nil, fmt.Errorf("%w: Unable to parse Enabled from %q", ErrFileParse, fields[3]) - } - return CgroupSummary, nil -} - -// parseCgroupSummary reads each line of the /proc/cgroup file. -func parseCgroupSummary(data []byte) ([]CgroupSummary, error) { - var CgroupSummarys []CgroupSummary - scanner := bufio.NewScanner(bytes.NewReader(data)) - for scanner.Scan() { - CgroupSummaryString := scanner.Text() - // ignore comment lines - if strings.HasPrefix(CgroupSummaryString, "#") { - continue - } - CgroupSummary, err := parseCgroupSummaryString(CgroupSummaryString) - if err != nil { - return nil, err - } - CgroupSummarys = append(CgroupSummarys, *CgroupSummary) - } - - err := scanner.Err() - return CgroupSummarys, err -} - -// CgroupSummarys returns information about current /proc/cgroups. -func (fs FS) CgroupSummarys() ([]CgroupSummary, error) { - data, err := util.ReadFileNoStat(fs.proc.Path("cgroups")) - if err != nil { - return nil, err - } - return parseCgroupSummary(data) -} diff --git a/vendor/github.com/prometheus/procfs/proc_environ.go b/vendor/github.com/prometheus/procfs/proc_environ.go deleted file mode 100644 index 57a89895d..000000000 --- a/vendor/github.com/prometheus/procfs/proc_environ.go +++ /dev/null @@ -1,37 +0,0 @@ -// Copyright 2019 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package procfs - -import ( - "strings" - - "github.com/prometheus/procfs/internal/util" -) - -// Environ reads process environments from `/proc//environ`. -func (p Proc) Environ() ([]string, error) { - environments := make([]string, 0) - - data, err := util.ReadFileNoStat(p.path("environ")) - if err != nil { - return environments, err - } - - environments = strings.Split(string(data), "\000") - if len(environments) > 0 { - environments = environments[:len(environments)-1] - } - - return environments, nil -} diff --git a/vendor/github.com/prometheus/procfs/proc_fdinfo.go b/vendor/github.com/prometheus/procfs/proc_fdinfo.go deleted file mode 100644 index fa761b352..000000000 --- a/vendor/github.com/prometheus/procfs/proc_fdinfo.go +++ /dev/null @@ -1,138 +0,0 @@ -// Copyright 2019 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package procfs - -import ( - "bufio" - "bytes" - "fmt" - "regexp" - - "github.com/prometheus/procfs/internal/util" -) - -var ( - rPos = regexp.MustCompile(`^pos:\s+(\d+)$`) - rFlags = regexp.MustCompile(`^flags:\s+(\d+)$`) - rMntID = regexp.MustCompile(`^mnt_id:\s+(\d+)$`) - rIno = regexp.MustCompile(`^ino:\s+(\d+)$`) - rInotify = regexp.MustCompile(`^inotify`) - rInotifyParts = regexp.MustCompile(`^inotify\s+wd:([0-9a-f]+)\s+ino:([0-9a-f]+)\s+sdev:([0-9a-f]+)(?:\s+mask:([0-9a-f]+))?`) -) - -// ProcFDInfo contains represents file descriptor information. -type ProcFDInfo struct { - // File descriptor - FD string - // File offset - Pos string - // File access mode and status flags - Flags string - // Mount point ID - MntID string - // Inode number - Ino string - // List of inotify lines (structured) in the fdinfo file (kernel 3.8+ only) - InotifyInfos []InotifyInfo -} - -// FDInfo constructor. On kernels older than 3.8, InotifyInfos will always be empty. -func (p Proc) FDInfo(fd string) (*ProcFDInfo, error) { - data, err := util.ReadFileNoStat(p.path("fdinfo", fd)) - if err != nil { - return nil, err - } - - var text, pos, flags, mntid, ino string - var inotify []InotifyInfo - - scanner := bufio.NewScanner(bytes.NewReader(data)) - for scanner.Scan() { - text = scanner.Text() - if rPos.MatchString(text) { - pos = rPos.FindStringSubmatch(text)[1] - } else if rFlags.MatchString(text) { - flags = rFlags.FindStringSubmatch(text)[1] - } else if rMntID.MatchString(text) { - mntid = rMntID.FindStringSubmatch(text)[1] - } else if rIno.MatchString(text) { - ino = rIno.FindStringSubmatch(text)[1] - } else if rInotify.MatchString(text) { - newInotify, err := parseInotifyInfo(text) - if err != nil { - return nil, err - } - inotify = append(inotify, *newInotify) - } - } - - i := &ProcFDInfo{ - FD: fd, - Pos: pos, - Flags: flags, - MntID: mntid, - Ino: ino, - InotifyInfos: inotify, - } - - return i, nil -} - -// InotifyInfo represents a single inotify line in the fdinfo file. -type InotifyInfo struct { - // Watch descriptor number - WD string - // Inode number - Ino string - // Device ID - Sdev string - // Mask of events being monitored - Mask string -} - -// InotifyInfo constructor. Only available on kernel 3.8+. -func parseInotifyInfo(line string) (*InotifyInfo, error) { - m := rInotifyParts.FindStringSubmatch(line) - if len(m) >= 4 { - var mask string - if len(m) == 5 { - mask = m[4] - } - i := &InotifyInfo{ - WD: m[1], - Ino: m[2], - Sdev: m[3], - Mask: mask, - } - return i, nil - } - return nil, fmt.Errorf("%w: invalid inode entry: %q", ErrFileParse, line) -} - -// ProcFDInfos represents a list of ProcFDInfo structs. -type ProcFDInfos []ProcFDInfo - -func (p ProcFDInfos) Len() int { return len(p) } -func (p ProcFDInfos) Swap(i, j int) { p[i], p[j] = p[j], p[i] } -func (p ProcFDInfos) Less(i, j int) bool { return p[i].FD < p[j].FD } - -// InotifyWatchLen returns the total number of inotify watches. -func (p ProcFDInfos) InotifyWatchLen() (int, error) { - length := 0 - for _, f := range p { - length += len(f.InotifyInfos) - } - - return length, nil -} diff --git a/vendor/github.com/prometheus/procfs/proc_interrupts.go b/vendor/github.com/prometheus/procfs/proc_interrupts.go deleted file mode 100644 index 86b4b4524..000000000 --- a/vendor/github.com/prometheus/procfs/proc_interrupts.go +++ /dev/null @@ -1,98 +0,0 @@ -// Copyright 2022 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package procfs - -import ( - "bufio" - "bytes" - "errors" - "fmt" - "io" - "strconv" - "strings" - - "github.com/prometheus/procfs/internal/util" -) - -// Interrupt represents a single interrupt line. -type Interrupt struct { - // Info is the type of interrupt. - Info string - // Devices is the name of the device that is located at that IRQ - Devices string - // Values is the number of interrupts per CPU. - Values []string -} - -// Interrupts models the content of /proc/interrupts. Key is the IRQ number. -// - https://access.redhat.com/documentation/en-us/red_hat_enterprise_linux/6/html/deployment_guide/s2-proc-interrupts -// - https://raspberrypi.stackexchange.com/questions/105802/explanation-of-proc-interrupts-output -type Interrupts map[string]Interrupt - -// Interrupts creates a new instance from a given Proc instance. -func (p Proc) Interrupts() (Interrupts, error) { - data, err := util.ReadFileNoStat(p.path("interrupts")) - if err != nil { - return nil, err - } - return parseInterrupts(bytes.NewReader(data)) -} - -func parseInterrupts(r io.Reader) (Interrupts, error) { - var ( - interrupts = Interrupts{} - scanner = bufio.NewScanner(r) - ) - - if !scanner.Scan() { - return nil, errors.New("interrupts empty") - } - cpuNum := len(strings.Fields(scanner.Text())) // one header per cpu - - for scanner.Scan() { - parts := strings.Fields(scanner.Text()) - if len(parts) == 0 { // skip empty lines - continue - } - if len(parts) < 2 { - return nil, fmt.Errorf("%w: Not enough fields in interrupts (expected 2+ fields but got %d): %s", ErrFileParse, len(parts), parts) - } - intName := parts[0][:len(parts[0])-1] // remove trailing : - - if len(parts) == 2 { - interrupts[intName] = Interrupt{ - Info: "", - Devices: "", - Values: []string{ - parts[1], - }, - } - continue - } - - intr := Interrupt{ - Values: parts[1 : cpuNum+1], - } - - if _, err := strconv.Atoi(intName); err == nil { // numeral interrupt - intr.Info = parts[cpuNum+1] - intr.Devices = strings.Join(parts[cpuNum+2:], " ") - } else { - intr.Info = strings.Join(parts[cpuNum+1:], " ") - } - interrupts[intName] = intr - } - - return interrupts, scanner.Err() -} diff --git a/vendor/github.com/prometheus/procfs/proc_io.go b/vendor/github.com/prometheus/procfs/proc_io.go deleted file mode 100644 index 776f34971..000000000 --- a/vendor/github.com/prometheus/procfs/proc_io.go +++ /dev/null @@ -1,59 +0,0 @@ -// Copyright 2018 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package procfs - -import ( - "fmt" - - "github.com/prometheus/procfs/internal/util" -) - -// ProcIO models the content of /proc//io. -type ProcIO struct { - // Chars read. - RChar uint64 - // Chars written. - WChar uint64 - // Read syscalls. - SyscR uint64 - // Write syscalls. - SyscW uint64 - // Bytes read. - ReadBytes uint64 - // Bytes written. - WriteBytes uint64 - // Bytes written, but taking into account truncation. See - // Documentation/filesystems/proc.txt in the kernel sources for - // detailed explanation. - CancelledWriteBytes int64 -} - -// IO creates a new ProcIO instance from a given Proc instance. -func (p Proc) IO() (ProcIO, error) { - pio := ProcIO{} - - data, err := util.ReadFileNoStat(p.path("io")) - if err != nil { - return pio, err - } - - ioFormat := "rchar: %d\nwchar: %d\nsyscr: %d\nsyscw: %d\n" + - "read_bytes: %d\nwrite_bytes: %d\n" + - "cancelled_write_bytes: %d\n" - - _, err = fmt.Sscanf(string(data), ioFormat, &pio.RChar, &pio.WChar, &pio.SyscR, - &pio.SyscW, &pio.ReadBytes, &pio.WriteBytes, &pio.CancelledWriteBytes) - - return pio, err -} diff --git a/vendor/github.com/prometheus/procfs/proc_limits.go b/vendor/github.com/prometheus/procfs/proc_limits.go deleted file mode 100644 index c86d815d7..000000000 --- a/vendor/github.com/prometheus/procfs/proc_limits.go +++ /dev/null @@ -1,160 +0,0 @@ -// Copyright 2018 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package procfs - -import ( - "bufio" - "fmt" - "os" - "regexp" - "strconv" -) - -// ProcLimits represents the soft limits for each of the process's resource -// limits. For more information see getrlimit(2): -// http://man7.org/linux/man-pages/man2/getrlimit.2.html. -type ProcLimits struct { - // CPU time limit in seconds. - CPUTime uint64 - // Maximum size of files that the process may create. - FileSize uint64 - // Maximum size of the process's data segment (initialized data, - // uninitialized data, and heap). - DataSize uint64 - // Maximum size of the process stack in bytes. - StackSize uint64 - // Maximum size of a core file. - CoreFileSize uint64 - // Limit of the process's resident set in pages. - ResidentSet uint64 - // Maximum number of processes that can be created for the real user ID of - // the calling process. - Processes uint64 - // Value one greater than the maximum file descriptor number that can be - // opened by this process. - OpenFiles uint64 - // Maximum number of bytes of memory that may be locked into RAM. - LockedMemory uint64 - // Maximum size of the process's virtual memory address space in bytes. - AddressSpace uint64 - // Limit on the combined number of flock(2) locks and fcntl(2) leases that - // this process may establish. - FileLocks uint64 - // Limit of signals that may be queued for the real user ID of the calling - // process. - PendingSignals uint64 - // Limit on the number of bytes that can be allocated for POSIX message - // queues for the real user ID of the calling process. - MsqqueueSize uint64 - // Limit of the nice priority set using setpriority(2) or nice(2). - NicePriority uint64 - // Limit of the real-time priority set using sched_setscheduler(2) or - // sched_setparam(2). - RealtimePriority uint64 - // Limit (in microseconds) on the amount of CPU time that a process - // scheduled under a real-time scheduling policy may consume without making - // a blocking system call. - RealtimeTimeout uint64 -} - -const ( - limitsFields = 4 - limitsUnlimited = "unlimited" -) - -var ( - limitsMatch = regexp.MustCompile(`(Max \w+\s{0,1}?\w*\s{0,1}\w*)\s{2,}(\w+)\s+(\w+)`) -) - -// NewLimits returns the current soft limits of the process. -// -// Deprecated: Use p.Limits() instead. -func (p Proc) NewLimits() (ProcLimits, error) { - return p.Limits() -} - -// Limits returns the current soft limits of the process. -func (p Proc) Limits() (ProcLimits, error) { - f, err := os.Open(p.path("limits")) - if err != nil { - return ProcLimits{}, err - } - defer f.Close() - - var ( - l = ProcLimits{} - s = bufio.NewScanner(f) - ) - - s.Scan() // Skip limits header - - for s.Scan() { - //fields := limitsMatch.Split(s.Text(), limitsFields) - fields := limitsMatch.FindStringSubmatch(s.Text()) - if len(fields) != limitsFields { - return ProcLimits{}, fmt.Errorf("%w: couldn't parse %q line %q", ErrFileParse, f.Name(), s.Text()) - } - - switch fields[1] { - case "Max cpu time": - l.CPUTime, err = parseUint(fields[2]) - case "Max file size": - l.FileSize, err = parseUint(fields[2]) - case "Max data size": - l.DataSize, err = parseUint(fields[2]) - case "Max stack size": - l.StackSize, err = parseUint(fields[2]) - case "Max core file size": - l.CoreFileSize, err = parseUint(fields[2]) - case "Max resident set": - l.ResidentSet, err = parseUint(fields[2]) - case "Max processes": - l.Processes, err = parseUint(fields[2]) - case "Max open files": - l.OpenFiles, err = parseUint(fields[2]) - case "Max locked memory": - l.LockedMemory, err = parseUint(fields[2]) - case "Max address space": - l.AddressSpace, err = parseUint(fields[2]) - case "Max file locks": - l.FileLocks, err = parseUint(fields[2]) - case "Max pending signals": - l.PendingSignals, err = parseUint(fields[2]) - case "Max msgqueue size": - l.MsqqueueSize, err = parseUint(fields[2]) - case "Max nice priority": - l.NicePriority, err = parseUint(fields[2]) - case "Max realtime priority": - l.RealtimePriority, err = parseUint(fields[2]) - case "Max realtime timeout": - l.RealtimeTimeout, err = parseUint(fields[2]) - } - if err != nil { - return ProcLimits{}, err - } - } - - return l, s.Err() -} - -func parseUint(s string) (uint64, error) { - if s == limitsUnlimited { - return 18446744073709551615, nil - } - i, err := strconv.ParseUint(s, 10, 64) - if err != nil { - return 0, fmt.Errorf("%s: couldn't parse value %q: %w", ErrFileParse, s, err) - } - return i, nil -} diff --git a/vendor/github.com/prometheus/procfs/proc_maps.go b/vendor/github.com/prometheus/procfs/proc_maps.go deleted file mode 100644 index 7e75c286b..000000000 --- a/vendor/github.com/prometheus/procfs/proc_maps.go +++ /dev/null @@ -1,211 +0,0 @@ -// Copyright 2019 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//go:build (aix || darwin || dragonfly || freebsd || linux || netbsd || openbsd || solaris) && !js -// +build aix darwin dragonfly freebsd linux netbsd openbsd solaris -// +build !js - -package procfs - -import ( - "bufio" - "fmt" - "os" - "strconv" - "strings" - - "golang.org/x/sys/unix" -) - -// ProcMapPermissions contains permission settings read from `/proc/[pid]/maps`. -type ProcMapPermissions struct { - // mapping has the [R]ead flag set - Read bool - // mapping has the [W]rite flag set - Write bool - // mapping has the [X]ecutable flag set - Execute bool - // mapping has the [S]hared flag set - Shared bool - // mapping is marked as [P]rivate (copy on write) - Private bool -} - -// ProcMap contains the process memory-mappings of the process -// read from `/proc/[pid]/maps`. -type ProcMap struct { - // The start address of current mapping. - StartAddr uintptr - // The end address of the current mapping - EndAddr uintptr - // The permissions for this mapping - Perms *ProcMapPermissions - // The current offset into the file/fd (e.g., shared libs) - Offset int64 - // Device owner of this mapping (major:minor) in Mkdev format. - Dev uint64 - // The inode of the device above - Inode uint64 - // The file or psuedofile (or empty==anonymous) - Pathname string -} - -// parseDevice parses the device token of a line and converts it to a dev_t -// (mkdev) like structure. -func parseDevice(s string) (uint64, error) { - i := strings.Index(s, ":") - if i == -1 { - return 0, fmt.Errorf("%w: expected separator `:` in %s", ErrFileParse, s) - } - - major, err := strconv.ParseUint(s[0:i], 16, 0) - if err != nil { - return 0, err - } - - minor, err := strconv.ParseUint(s[i+1:], 16, 0) - if err != nil { - return 0, err - } - - return unix.Mkdev(uint32(major), uint32(minor)), nil -} - -// parseAddress converts a hex-string to a uintptr. -func parseAddress(s string) (uintptr, error) { - a, err := strconv.ParseUint(s, 16, 0) - if err != nil { - return 0, err - } - - return uintptr(a), nil -} - -// parseAddresses parses the start-end address. -func parseAddresses(s string) (uintptr, uintptr, error) { - idx := strings.Index(s, "-") - if idx == -1 { - return 0, 0, fmt.Errorf("%w: expected separator `-` in %s", ErrFileParse, s) - } - - saddr, err := parseAddress(s[0:idx]) - if err != nil { - return 0, 0, err - } - - eaddr, err := parseAddress(s[idx+1:]) - if err != nil { - return 0, 0, err - } - - return saddr, eaddr, nil -} - -// parsePermissions parses a token and returns any that are set. -func parsePermissions(s string) (*ProcMapPermissions, error) { - if len(s) < 4 { - return nil, fmt.Errorf("%w: invalid permissions token", ErrFileParse) - } - - perms := ProcMapPermissions{} - for _, ch := range s { - switch ch { - case 'r': - perms.Read = true - case 'w': - perms.Write = true - case 'x': - perms.Execute = true - case 'p': - perms.Private = true - case 's': - perms.Shared = true - } - } - - return &perms, nil -} - -// parseProcMap will attempt to parse a single line within a proc/[pid]/maps -// buffer. -func parseProcMap(text string) (*ProcMap, error) { - fields := strings.Fields(text) - if len(fields) < 5 { - return nil, fmt.Errorf("%w: truncated procmap entry", ErrFileParse) - } - - saddr, eaddr, err := parseAddresses(fields[0]) - if err != nil { - return nil, err - } - - perms, err := parsePermissions(fields[1]) - if err != nil { - return nil, err - } - - offset, err := strconv.ParseInt(fields[2], 16, 0) - if err != nil { - return nil, err - } - - device, err := parseDevice(fields[3]) - if err != nil { - return nil, err - } - - inode, err := strconv.ParseUint(fields[4], 10, 0) - if err != nil { - return nil, err - } - - pathname := "" - - if len(fields) >= 5 { - pathname = strings.Join(fields[5:], " ") - } - - return &ProcMap{ - StartAddr: saddr, - EndAddr: eaddr, - Perms: perms, - Offset: offset, - Dev: device, - Inode: inode, - Pathname: pathname, - }, nil -} - -// ProcMaps reads from /proc/[pid]/maps to get the memory-mappings of the -// process. -func (p Proc) ProcMaps() ([]*ProcMap, error) { - file, err := os.Open(p.path("maps")) - if err != nil { - return nil, err - } - defer file.Close() - - maps := []*ProcMap{} - scan := bufio.NewScanner(file) - - for scan.Scan() { - m, err := parseProcMap(scan.Text()) - if err != nil { - return nil, err - } - - maps = append(maps, m) - } - - return maps, nil -} diff --git a/vendor/github.com/prometheus/procfs/proc_netstat.go b/vendor/github.com/prometheus/procfs/proc_netstat.go deleted file mode 100644 index 8e3ff4d79..000000000 --- a/vendor/github.com/prometheus/procfs/proc_netstat.go +++ /dev/null @@ -1,443 +0,0 @@ -// Copyright 2022 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package procfs - -import ( - "bufio" - "bytes" - "fmt" - "io" - "strconv" - "strings" - - "github.com/prometheus/procfs/internal/util" -) - -// ProcNetstat models the content of /proc//net/netstat. -type ProcNetstat struct { - // The process ID. - PID int - TcpExt - IpExt -} - -type TcpExt struct { // nolint:revive - SyncookiesSent *float64 - SyncookiesRecv *float64 - SyncookiesFailed *float64 - EmbryonicRsts *float64 - PruneCalled *float64 - RcvPruned *float64 - OfoPruned *float64 - OutOfWindowIcmps *float64 - LockDroppedIcmps *float64 - ArpFilter *float64 - TW *float64 - TWRecycled *float64 - TWKilled *float64 - PAWSActive *float64 - PAWSEstab *float64 - DelayedACKs *float64 - DelayedACKLocked *float64 - DelayedACKLost *float64 - ListenOverflows *float64 - ListenDrops *float64 - TCPHPHits *float64 - TCPPureAcks *float64 - TCPHPAcks *float64 - TCPRenoRecovery *float64 - TCPSackRecovery *float64 - TCPSACKReneging *float64 - TCPSACKReorder *float64 - TCPRenoReorder *float64 - TCPTSReorder *float64 - TCPFullUndo *float64 - TCPPartialUndo *float64 - TCPDSACKUndo *float64 - TCPLossUndo *float64 - TCPLostRetransmit *float64 - TCPRenoFailures *float64 - TCPSackFailures *float64 - TCPLossFailures *float64 - TCPFastRetrans *float64 - TCPSlowStartRetrans *float64 - TCPTimeouts *float64 - TCPLossProbes *float64 - TCPLossProbeRecovery *float64 - TCPRenoRecoveryFail *float64 - TCPSackRecoveryFail *float64 - TCPRcvCollapsed *float64 - TCPDSACKOldSent *float64 - TCPDSACKOfoSent *float64 - TCPDSACKRecv *float64 - TCPDSACKOfoRecv *float64 - TCPAbortOnData *float64 - TCPAbortOnClose *float64 - TCPAbortOnMemory *float64 - TCPAbortOnTimeout *float64 - TCPAbortOnLinger *float64 - TCPAbortFailed *float64 - TCPMemoryPressures *float64 - TCPMemoryPressuresChrono *float64 - TCPSACKDiscard *float64 - TCPDSACKIgnoredOld *float64 - TCPDSACKIgnoredNoUndo *float64 - TCPSpuriousRTOs *float64 - TCPMD5NotFound *float64 - TCPMD5Unexpected *float64 - TCPMD5Failure *float64 - TCPSackShifted *float64 - TCPSackMerged *float64 - TCPSackShiftFallback *float64 - TCPBacklogDrop *float64 - PFMemallocDrop *float64 - TCPMinTTLDrop *float64 - TCPDeferAcceptDrop *float64 - IPReversePathFilter *float64 - TCPTimeWaitOverflow *float64 - TCPReqQFullDoCookies *float64 - TCPReqQFullDrop *float64 - TCPRetransFail *float64 - TCPRcvCoalesce *float64 - TCPRcvQDrop *float64 - TCPOFOQueue *float64 - TCPOFODrop *float64 - TCPOFOMerge *float64 - TCPChallengeACK *float64 - TCPSYNChallenge *float64 - TCPFastOpenActive *float64 - TCPFastOpenActiveFail *float64 - TCPFastOpenPassive *float64 - TCPFastOpenPassiveFail *float64 - TCPFastOpenListenOverflow *float64 - TCPFastOpenCookieReqd *float64 - TCPFastOpenBlackhole *float64 - TCPSpuriousRtxHostQueues *float64 - BusyPollRxPackets *float64 - TCPAutoCorking *float64 - TCPFromZeroWindowAdv *float64 - TCPToZeroWindowAdv *float64 - TCPWantZeroWindowAdv *float64 - TCPSynRetrans *float64 - TCPOrigDataSent *float64 - TCPHystartTrainDetect *float64 - TCPHystartTrainCwnd *float64 - TCPHystartDelayDetect *float64 - TCPHystartDelayCwnd *float64 - TCPACKSkippedSynRecv *float64 - TCPACKSkippedPAWS *float64 - TCPACKSkippedSeq *float64 - TCPACKSkippedFinWait2 *float64 - TCPACKSkippedTimeWait *float64 - TCPACKSkippedChallenge *float64 - TCPWinProbe *float64 - TCPKeepAlive *float64 - TCPMTUPFail *float64 - TCPMTUPSuccess *float64 - TCPWqueueTooBig *float64 -} - -type IpExt struct { // nolint:revive - InNoRoutes *float64 - InTruncatedPkts *float64 - InMcastPkts *float64 - OutMcastPkts *float64 - InBcastPkts *float64 - OutBcastPkts *float64 - InOctets *float64 - OutOctets *float64 - InMcastOctets *float64 - OutMcastOctets *float64 - InBcastOctets *float64 - OutBcastOctets *float64 - InCsumErrors *float64 - InNoECTPkts *float64 - InECT1Pkts *float64 - InECT0Pkts *float64 - InCEPkts *float64 - ReasmOverlaps *float64 -} - -func (p Proc) Netstat() (ProcNetstat, error) { - filename := p.path("net/netstat") - data, err := util.ReadFileNoStat(filename) - if err != nil { - return ProcNetstat{PID: p.PID}, err - } - procNetstat, err := parseProcNetstat(bytes.NewReader(data), filename) - procNetstat.PID = p.PID - return procNetstat, err -} - -// parseProcNetstat parses the metrics from proc//net/netstat file -// and returns a ProcNetstat structure. -func parseProcNetstat(r io.Reader, fileName string) (ProcNetstat, error) { - var ( - scanner = bufio.NewScanner(r) - procNetstat = ProcNetstat{} - ) - - for scanner.Scan() { - nameParts := strings.Split(scanner.Text(), " ") - scanner.Scan() - valueParts := strings.Split(scanner.Text(), " ") - // Remove trailing :. - protocol := strings.TrimSuffix(nameParts[0], ":") - if len(nameParts) != len(valueParts) { - return procNetstat, fmt.Errorf("%w: mismatch field count mismatch in %s: %s", - ErrFileParse, fileName, protocol) - } - for i := 1; i < len(nameParts); i++ { - value, err := strconv.ParseFloat(valueParts[i], 64) - if err != nil { - return procNetstat, err - } - key := nameParts[i] - - switch protocol { - case "TcpExt": - switch key { - case "SyncookiesSent": - procNetstat.TcpExt.SyncookiesSent = &value - case "SyncookiesRecv": - procNetstat.TcpExt.SyncookiesRecv = &value - case "SyncookiesFailed": - procNetstat.TcpExt.SyncookiesFailed = &value - case "EmbryonicRsts": - procNetstat.TcpExt.EmbryonicRsts = &value - case "PruneCalled": - procNetstat.TcpExt.PruneCalled = &value - case "RcvPruned": - procNetstat.TcpExt.RcvPruned = &value - case "OfoPruned": - procNetstat.TcpExt.OfoPruned = &value - case "OutOfWindowIcmps": - procNetstat.TcpExt.OutOfWindowIcmps = &value - case "LockDroppedIcmps": - procNetstat.TcpExt.LockDroppedIcmps = &value - case "ArpFilter": - procNetstat.TcpExt.ArpFilter = &value - case "TW": - procNetstat.TcpExt.TW = &value - case "TWRecycled": - procNetstat.TcpExt.TWRecycled = &value - case "TWKilled": - procNetstat.TcpExt.TWKilled = &value - case "PAWSActive": - procNetstat.TcpExt.PAWSActive = &value - case "PAWSEstab": - procNetstat.TcpExt.PAWSEstab = &value - case "DelayedACKs": - procNetstat.TcpExt.DelayedACKs = &value - case "DelayedACKLocked": - procNetstat.TcpExt.DelayedACKLocked = &value - case "DelayedACKLost": - procNetstat.TcpExt.DelayedACKLost = &value - case "ListenOverflows": - procNetstat.TcpExt.ListenOverflows = &value - case "ListenDrops": - procNetstat.TcpExt.ListenDrops = &value - case "TCPHPHits": - procNetstat.TcpExt.TCPHPHits = &value - case "TCPPureAcks": - procNetstat.TcpExt.TCPPureAcks = &value - case "TCPHPAcks": - procNetstat.TcpExt.TCPHPAcks = &value - case "TCPRenoRecovery": - procNetstat.TcpExt.TCPRenoRecovery = &value - case "TCPSackRecovery": - procNetstat.TcpExt.TCPSackRecovery = &value - case "TCPSACKReneging": - procNetstat.TcpExt.TCPSACKReneging = &value - case "TCPSACKReorder": - procNetstat.TcpExt.TCPSACKReorder = &value - case "TCPRenoReorder": - procNetstat.TcpExt.TCPRenoReorder = &value - case "TCPTSReorder": - procNetstat.TcpExt.TCPTSReorder = &value - case "TCPFullUndo": - procNetstat.TcpExt.TCPFullUndo = &value - case "TCPPartialUndo": - procNetstat.TcpExt.TCPPartialUndo = &value - case "TCPDSACKUndo": - procNetstat.TcpExt.TCPDSACKUndo = &value - case "TCPLossUndo": - procNetstat.TcpExt.TCPLossUndo = &value - case "TCPLostRetransmit": - procNetstat.TcpExt.TCPLostRetransmit = &value - case "TCPRenoFailures": - procNetstat.TcpExt.TCPRenoFailures = &value - case "TCPSackFailures": - procNetstat.TcpExt.TCPSackFailures = &value - case "TCPLossFailures": - procNetstat.TcpExt.TCPLossFailures = &value - case "TCPFastRetrans": - procNetstat.TcpExt.TCPFastRetrans = &value - case "TCPSlowStartRetrans": - procNetstat.TcpExt.TCPSlowStartRetrans = &value - case "TCPTimeouts": - procNetstat.TcpExt.TCPTimeouts = &value - case "TCPLossProbes": - procNetstat.TcpExt.TCPLossProbes = &value - case "TCPLossProbeRecovery": - procNetstat.TcpExt.TCPLossProbeRecovery = &value - case "TCPRenoRecoveryFail": - procNetstat.TcpExt.TCPRenoRecoveryFail = &value - case "TCPSackRecoveryFail": - procNetstat.TcpExt.TCPSackRecoveryFail = &value - case "TCPRcvCollapsed": - procNetstat.TcpExt.TCPRcvCollapsed = &value - case "TCPDSACKOldSent": - procNetstat.TcpExt.TCPDSACKOldSent = &value - case "TCPDSACKOfoSent": - procNetstat.TcpExt.TCPDSACKOfoSent = &value - case "TCPDSACKRecv": - procNetstat.TcpExt.TCPDSACKRecv = &value - case "TCPDSACKOfoRecv": - procNetstat.TcpExt.TCPDSACKOfoRecv = &value - case "TCPAbortOnData": - procNetstat.TcpExt.TCPAbortOnData = &value - case "TCPAbortOnClose": - procNetstat.TcpExt.TCPAbortOnClose = &value - case "TCPDeferAcceptDrop": - procNetstat.TcpExt.TCPDeferAcceptDrop = &value - case "IPReversePathFilter": - procNetstat.TcpExt.IPReversePathFilter = &value - case "TCPTimeWaitOverflow": - procNetstat.TcpExt.TCPTimeWaitOverflow = &value - case "TCPReqQFullDoCookies": - procNetstat.TcpExt.TCPReqQFullDoCookies = &value - case "TCPReqQFullDrop": - procNetstat.TcpExt.TCPReqQFullDrop = &value - case "TCPRetransFail": - procNetstat.TcpExt.TCPRetransFail = &value - case "TCPRcvCoalesce": - procNetstat.TcpExt.TCPRcvCoalesce = &value - case "TCPRcvQDrop": - procNetstat.TcpExt.TCPRcvQDrop = &value - case "TCPOFOQueue": - procNetstat.TcpExt.TCPOFOQueue = &value - case "TCPOFODrop": - procNetstat.TcpExt.TCPOFODrop = &value - case "TCPOFOMerge": - procNetstat.TcpExt.TCPOFOMerge = &value - case "TCPChallengeACK": - procNetstat.TcpExt.TCPChallengeACK = &value - case "TCPSYNChallenge": - procNetstat.TcpExt.TCPSYNChallenge = &value - case "TCPFastOpenActive": - procNetstat.TcpExt.TCPFastOpenActive = &value - case "TCPFastOpenActiveFail": - procNetstat.TcpExt.TCPFastOpenActiveFail = &value - case "TCPFastOpenPassive": - procNetstat.TcpExt.TCPFastOpenPassive = &value - case "TCPFastOpenPassiveFail": - procNetstat.TcpExt.TCPFastOpenPassiveFail = &value - case "TCPFastOpenListenOverflow": - procNetstat.TcpExt.TCPFastOpenListenOverflow = &value - case "TCPFastOpenCookieReqd": - procNetstat.TcpExt.TCPFastOpenCookieReqd = &value - case "TCPFastOpenBlackhole": - procNetstat.TcpExt.TCPFastOpenBlackhole = &value - case "TCPSpuriousRtxHostQueues": - procNetstat.TcpExt.TCPSpuriousRtxHostQueues = &value - case "BusyPollRxPackets": - procNetstat.TcpExt.BusyPollRxPackets = &value - case "TCPAutoCorking": - procNetstat.TcpExt.TCPAutoCorking = &value - case "TCPFromZeroWindowAdv": - procNetstat.TcpExt.TCPFromZeroWindowAdv = &value - case "TCPToZeroWindowAdv": - procNetstat.TcpExt.TCPToZeroWindowAdv = &value - case "TCPWantZeroWindowAdv": - procNetstat.TcpExt.TCPWantZeroWindowAdv = &value - case "TCPSynRetrans": - procNetstat.TcpExt.TCPSynRetrans = &value - case "TCPOrigDataSent": - procNetstat.TcpExt.TCPOrigDataSent = &value - case "TCPHystartTrainDetect": - procNetstat.TcpExt.TCPHystartTrainDetect = &value - case "TCPHystartTrainCwnd": - procNetstat.TcpExt.TCPHystartTrainCwnd = &value - case "TCPHystartDelayDetect": - procNetstat.TcpExt.TCPHystartDelayDetect = &value - case "TCPHystartDelayCwnd": - procNetstat.TcpExt.TCPHystartDelayCwnd = &value - case "TCPACKSkippedSynRecv": - procNetstat.TcpExt.TCPACKSkippedSynRecv = &value - case "TCPACKSkippedPAWS": - procNetstat.TcpExt.TCPACKSkippedPAWS = &value - case "TCPACKSkippedSeq": - procNetstat.TcpExt.TCPACKSkippedSeq = &value - case "TCPACKSkippedFinWait2": - procNetstat.TcpExt.TCPACKSkippedFinWait2 = &value - case "TCPACKSkippedTimeWait": - procNetstat.TcpExt.TCPACKSkippedTimeWait = &value - case "TCPACKSkippedChallenge": - procNetstat.TcpExt.TCPACKSkippedChallenge = &value - case "TCPWinProbe": - procNetstat.TcpExt.TCPWinProbe = &value - case "TCPKeepAlive": - procNetstat.TcpExt.TCPKeepAlive = &value - case "TCPMTUPFail": - procNetstat.TcpExt.TCPMTUPFail = &value - case "TCPMTUPSuccess": - procNetstat.TcpExt.TCPMTUPSuccess = &value - case "TCPWqueueTooBig": - procNetstat.TcpExt.TCPWqueueTooBig = &value - } - case "IpExt": - switch key { - case "InNoRoutes": - procNetstat.IpExt.InNoRoutes = &value - case "InTruncatedPkts": - procNetstat.IpExt.InTruncatedPkts = &value - case "InMcastPkts": - procNetstat.IpExt.InMcastPkts = &value - case "OutMcastPkts": - procNetstat.IpExt.OutMcastPkts = &value - case "InBcastPkts": - procNetstat.IpExt.InBcastPkts = &value - case "OutBcastPkts": - procNetstat.IpExt.OutBcastPkts = &value - case "InOctets": - procNetstat.IpExt.InOctets = &value - case "OutOctets": - procNetstat.IpExt.OutOctets = &value - case "InMcastOctets": - procNetstat.IpExt.InMcastOctets = &value - case "OutMcastOctets": - procNetstat.IpExt.OutMcastOctets = &value - case "InBcastOctets": - procNetstat.IpExt.InBcastOctets = &value - case "OutBcastOctets": - procNetstat.IpExt.OutBcastOctets = &value - case "InCsumErrors": - procNetstat.IpExt.InCsumErrors = &value - case "InNoECTPkts": - procNetstat.IpExt.InNoECTPkts = &value - case "InECT1Pkts": - procNetstat.IpExt.InECT1Pkts = &value - case "InECT0Pkts": - procNetstat.IpExt.InECT0Pkts = &value - case "InCEPkts": - procNetstat.IpExt.InCEPkts = &value - case "ReasmOverlaps": - procNetstat.IpExt.ReasmOverlaps = &value - } - } - } - } - return procNetstat, scanner.Err() -} diff --git a/vendor/github.com/prometheus/procfs/proc_ns.go b/vendor/github.com/prometheus/procfs/proc_ns.go deleted file mode 100644 index c22666750..000000000 --- a/vendor/github.com/prometheus/procfs/proc_ns.go +++ /dev/null @@ -1,68 +0,0 @@ -// Copyright 2018 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package procfs - -import ( - "fmt" - "os" - "strconv" - "strings" -) - -// Namespace represents a single namespace of a process. -type Namespace struct { - Type string // Namespace type. - Inode uint32 // Inode number of the namespace. If two processes are in the same namespace their inodes will match. -} - -// Namespaces contains all of the namespaces that the process is contained in. -type Namespaces map[string]Namespace - -// Namespaces reads from /proc//ns/* to get the namespaces of which the -// process is a member. -func (p Proc) Namespaces() (Namespaces, error) { - d, err := os.Open(p.path("ns")) - if err != nil { - return nil, err - } - defer d.Close() - - names, err := d.Readdirnames(-1) - if err != nil { - return nil, fmt.Errorf("%s: failed to read contents of ns dir: %w", ErrFileRead, err) - } - - ns := make(Namespaces, len(names)) - for _, name := range names { - target, err := os.Readlink(p.path("ns", name)) - if err != nil { - return nil, err - } - - fields := strings.SplitN(target, ":", 2) - if len(fields) != 2 { - return nil, fmt.Errorf("%w: namespace type and inode from %q", ErrFileParse, target) - } - - typ := fields[0] - inode, err := strconv.ParseUint(strings.Trim(fields[1], "[]"), 10, 32) - if err != nil { - return nil, fmt.Errorf("%s: inode from %q: %w", ErrFileParse, fields[1], err) - } - - ns[name] = Namespace{typ, uint32(inode)} - } - - return ns, nil -} diff --git a/vendor/github.com/prometheus/procfs/proc_psi.go b/vendor/github.com/prometheus/procfs/proc_psi.go deleted file mode 100644 index fe9dbb425..000000000 --- a/vendor/github.com/prometheus/procfs/proc_psi.go +++ /dev/null @@ -1,102 +0,0 @@ -// Copyright 2019 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package procfs - -// The PSI / pressure interface is described at -// https://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git/tree/Documentation/accounting/psi.txt -// Each resource (cpu, io, memory, ...) is exposed as a single file. -// Each file may contain up to two lines, one for "some" pressure and one for "full" pressure. -// Each line contains several averages (over n seconds) and a total in µs. -// -// Example io pressure file: -// > some avg10=0.06 avg60=0.21 avg300=0.99 total=8537362 -// > full avg10=0.00 avg60=0.13 avg300=0.96 total=8183134 - -import ( - "bufio" - "bytes" - "fmt" - "io" - "strings" - - "github.com/prometheus/procfs/internal/util" -) - -const lineFormat = "avg10=%f avg60=%f avg300=%f total=%d" - -// PSILine is a single line of values as returned by `/proc/pressure/*`. -// -// The Avg entries are averages over n seconds, as a percentage. -// The Total line is in microseconds. -type PSILine struct { - Avg10 float64 - Avg60 float64 - Avg300 float64 - Total uint64 -} - -// PSIStats represent pressure stall information from /proc/pressure/* -// -// "Some" indicates the share of time in which at least some tasks are stalled. -// "Full" indicates the share of time in which all non-idle tasks are stalled simultaneously. -type PSIStats struct { - Some *PSILine - Full *PSILine -} - -// PSIStatsForResource reads pressure stall information for the specified -// resource from /proc/pressure/. At time of writing this can be -// either "cpu", "memory" or "io". -func (fs FS) PSIStatsForResource(resource string) (PSIStats, error) { - data, err := util.ReadFileNoStat(fs.proc.Path(fmt.Sprintf("%s/%s", "pressure", resource))) - if err != nil { - return PSIStats{}, fmt.Errorf("%s: psi_stats: unavailable for %q: %w", ErrFileRead, resource, err) - } - - return parsePSIStats(bytes.NewReader(data)) -} - -// parsePSIStats parses the specified file for pressure stall information. -func parsePSIStats(r io.Reader) (PSIStats, error) { - psiStats := PSIStats{} - - scanner := bufio.NewScanner(r) - for scanner.Scan() { - l := scanner.Text() - prefix := strings.Split(l, " ")[0] - switch prefix { - case "some": - psi := PSILine{} - _, err := fmt.Sscanf(l, fmt.Sprintf("some %s", lineFormat), &psi.Avg10, &psi.Avg60, &psi.Avg300, &psi.Total) - if err != nil { - return PSIStats{}, err - } - psiStats.Some = &psi - case "full": - psi := PSILine{} - _, err := fmt.Sscanf(l, fmt.Sprintf("full %s", lineFormat), &psi.Avg10, &psi.Avg60, &psi.Avg300, &psi.Total) - if err != nil { - return PSIStats{}, err - } - psiStats.Full = &psi - default: - // If we encounter a line with an unknown prefix, ignore it and move on - // Should new measurement types be added in the future we'll simply ignore them instead - // of erroring on retrieval - continue - } - } - - return psiStats, nil -} diff --git a/vendor/github.com/prometheus/procfs/proc_smaps.go b/vendor/github.com/prometheus/procfs/proc_smaps.go deleted file mode 100644 index ad8785a40..000000000 --- a/vendor/github.com/prometheus/procfs/proc_smaps.go +++ /dev/null @@ -1,166 +0,0 @@ -// Copyright 2020 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//go:build !windows -// +build !windows - -package procfs - -import ( - "bufio" - "errors" - "fmt" - "os" - "regexp" - "strconv" - "strings" - - "github.com/prometheus/procfs/internal/util" -) - -var ( - // match the header line before each mapped zone in `/proc/pid/smaps`. - procSMapsHeaderLine = regexp.MustCompile(`^[a-f0-9].*$`) -) - -type ProcSMapsRollup struct { - // Amount of the mapping that is currently resident in RAM. - Rss uint64 - // Process's proportional share of this mapping. - Pss uint64 - // Size in bytes of clean shared pages. - SharedClean uint64 - // Size in bytes of dirty shared pages. - SharedDirty uint64 - // Size in bytes of clean private pages. - PrivateClean uint64 - // Size in bytes of dirty private pages. - PrivateDirty uint64 - // Amount of memory currently marked as referenced or accessed. - Referenced uint64 - // Amount of memory that does not belong to any file. - Anonymous uint64 - // Amount would-be-anonymous memory currently on swap. - Swap uint64 - // Process's proportional memory on swap. - SwapPss uint64 -} - -// ProcSMapsRollup reads from /proc/[pid]/smaps_rollup to get summed memory information of the -// process. -// -// If smaps_rollup does not exists (require kernel >= 4.15), the content of /proc/pid/smaps will -// we read and summed. -func (p Proc) ProcSMapsRollup() (ProcSMapsRollup, error) { - data, err := util.ReadFileNoStat(p.path("smaps_rollup")) - if err != nil && os.IsNotExist(err) { - return p.procSMapsRollupManual() - } - if err != nil { - return ProcSMapsRollup{}, err - } - - lines := strings.Split(string(data), "\n") - smaps := ProcSMapsRollup{} - - // skip first line which don't contains information we need - lines = lines[1:] - for _, line := range lines { - if line == "" { - continue - } - - if err := smaps.parseLine(line); err != nil { - return ProcSMapsRollup{}, err - } - } - - return smaps, nil -} - -// Read /proc/pid/smaps and do the roll-up in Go code. -func (p Proc) procSMapsRollupManual() (ProcSMapsRollup, error) { - file, err := os.Open(p.path("smaps")) - if err != nil { - return ProcSMapsRollup{}, err - } - defer file.Close() - - smaps := ProcSMapsRollup{} - scan := bufio.NewScanner(file) - - for scan.Scan() { - line := scan.Text() - - if procSMapsHeaderLine.MatchString(line) { - continue - } - - if err := smaps.parseLine(line); err != nil { - return ProcSMapsRollup{}, err - } - } - - return smaps, nil -} - -func (s *ProcSMapsRollup) parseLine(line string) error { - kv := strings.SplitN(line, ":", 2) - if len(kv) != 2 { - fmt.Println(line) - return errors.New("invalid net/dev line, missing colon") - } - - k := kv[0] - if k == "VmFlags" { - return nil - } - - v := strings.TrimSpace(kv[1]) - v = strings.TrimRight(v, " kB") - - vKBytes, err := strconv.ParseUint(v, 10, 64) - if err != nil { - return err - } - vBytes := vKBytes * 1024 - - s.addValue(k, vBytes) - - return nil -} - -func (s *ProcSMapsRollup) addValue(k string, vUintBytes uint64) { - switch k { - case "Rss": - s.Rss += vUintBytes - case "Pss": - s.Pss += vUintBytes - case "Shared_Clean": - s.SharedClean += vUintBytes - case "Shared_Dirty": - s.SharedDirty += vUintBytes - case "Private_Clean": - s.PrivateClean += vUintBytes - case "Private_Dirty": - s.PrivateDirty += vUintBytes - case "Referenced": - s.Referenced += vUintBytes - case "Anonymous": - s.Anonymous += vUintBytes - case "Swap": - s.Swap += vUintBytes - case "SwapPss": - s.SwapPss += vUintBytes - } -} diff --git a/vendor/github.com/prometheus/procfs/proc_snmp.go b/vendor/github.com/prometheus/procfs/proc_snmp.go deleted file mode 100644 index b9d2cf642..000000000 --- a/vendor/github.com/prometheus/procfs/proc_snmp.go +++ /dev/null @@ -1,353 +0,0 @@ -// Copyright 2022 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package procfs - -import ( - "bufio" - "bytes" - "fmt" - "io" - "strconv" - "strings" - - "github.com/prometheus/procfs/internal/util" -) - -// ProcSnmp models the content of /proc//net/snmp. -type ProcSnmp struct { - // The process ID. - PID int - Ip - Icmp - IcmpMsg - Tcp - Udp - UdpLite -} - -type Ip struct { // nolint:revive - Forwarding *float64 - DefaultTTL *float64 - InReceives *float64 - InHdrErrors *float64 - InAddrErrors *float64 - ForwDatagrams *float64 - InUnknownProtos *float64 - InDiscards *float64 - InDelivers *float64 - OutRequests *float64 - OutDiscards *float64 - OutNoRoutes *float64 - ReasmTimeout *float64 - ReasmReqds *float64 - ReasmOKs *float64 - ReasmFails *float64 - FragOKs *float64 - FragFails *float64 - FragCreates *float64 -} - -type Icmp struct { // nolint:revive - InMsgs *float64 - InErrors *float64 - InCsumErrors *float64 - InDestUnreachs *float64 - InTimeExcds *float64 - InParmProbs *float64 - InSrcQuenchs *float64 - InRedirects *float64 - InEchos *float64 - InEchoReps *float64 - InTimestamps *float64 - InTimestampReps *float64 - InAddrMasks *float64 - InAddrMaskReps *float64 - OutMsgs *float64 - OutErrors *float64 - OutDestUnreachs *float64 - OutTimeExcds *float64 - OutParmProbs *float64 - OutSrcQuenchs *float64 - OutRedirects *float64 - OutEchos *float64 - OutEchoReps *float64 - OutTimestamps *float64 - OutTimestampReps *float64 - OutAddrMasks *float64 - OutAddrMaskReps *float64 -} - -type IcmpMsg struct { - InType3 *float64 - OutType3 *float64 -} - -type Tcp struct { // nolint:revive - RtoAlgorithm *float64 - RtoMin *float64 - RtoMax *float64 - MaxConn *float64 - ActiveOpens *float64 - PassiveOpens *float64 - AttemptFails *float64 - EstabResets *float64 - CurrEstab *float64 - InSegs *float64 - OutSegs *float64 - RetransSegs *float64 - InErrs *float64 - OutRsts *float64 - InCsumErrors *float64 -} - -type Udp struct { // nolint:revive - InDatagrams *float64 - NoPorts *float64 - InErrors *float64 - OutDatagrams *float64 - RcvbufErrors *float64 - SndbufErrors *float64 - InCsumErrors *float64 - IgnoredMulti *float64 -} - -type UdpLite struct { // nolint:revive - InDatagrams *float64 - NoPorts *float64 - InErrors *float64 - OutDatagrams *float64 - RcvbufErrors *float64 - SndbufErrors *float64 - InCsumErrors *float64 - IgnoredMulti *float64 -} - -func (p Proc) Snmp() (ProcSnmp, error) { - filename := p.path("net/snmp") - data, err := util.ReadFileNoStat(filename) - if err != nil { - return ProcSnmp{PID: p.PID}, err - } - procSnmp, err := parseSnmp(bytes.NewReader(data), filename) - procSnmp.PID = p.PID - return procSnmp, err -} - -// parseSnmp parses the metrics from proc//net/snmp file -// and returns a map contains those metrics (e.g. {"Ip": {"Forwarding": 2}}). -func parseSnmp(r io.Reader, fileName string) (ProcSnmp, error) { - var ( - scanner = bufio.NewScanner(r) - procSnmp = ProcSnmp{} - ) - - for scanner.Scan() { - nameParts := strings.Split(scanner.Text(), " ") - scanner.Scan() - valueParts := strings.Split(scanner.Text(), " ") - // Remove trailing :. - protocol := strings.TrimSuffix(nameParts[0], ":") - if len(nameParts) != len(valueParts) { - return procSnmp, fmt.Errorf("%w: mismatch field count mismatch in %s: %s", - ErrFileParse, fileName, protocol) - } - for i := 1; i < len(nameParts); i++ { - value, err := strconv.ParseFloat(valueParts[i], 64) - if err != nil { - return procSnmp, err - } - key := nameParts[i] - - switch protocol { - case "Ip": - switch key { - case "Forwarding": - procSnmp.Ip.Forwarding = &value - case "DefaultTTL": - procSnmp.Ip.DefaultTTL = &value - case "InReceives": - procSnmp.Ip.InReceives = &value - case "InHdrErrors": - procSnmp.Ip.InHdrErrors = &value - case "InAddrErrors": - procSnmp.Ip.InAddrErrors = &value - case "ForwDatagrams": - procSnmp.Ip.ForwDatagrams = &value - case "InUnknownProtos": - procSnmp.Ip.InUnknownProtos = &value - case "InDiscards": - procSnmp.Ip.InDiscards = &value - case "InDelivers": - procSnmp.Ip.InDelivers = &value - case "OutRequests": - procSnmp.Ip.OutRequests = &value - case "OutDiscards": - procSnmp.Ip.OutDiscards = &value - case "OutNoRoutes": - procSnmp.Ip.OutNoRoutes = &value - case "ReasmTimeout": - procSnmp.Ip.ReasmTimeout = &value - case "ReasmReqds": - procSnmp.Ip.ReasmReqds = &value - case "ReasmOKs": - procSnmp.Ip.ReasmOKs = &value - case "ReasmFails": - procSnmp.Ip.ReasmFails = &value - case "FragOKs": - procSnmp.Ip.FragOKs = &value - case "FragFails": - procSnmp.Ip.FragFails = &value - case "FragCreates": - procSnmp.Ip.FragCreates = &value - } - case "Icmp": - switch key { - case "InMsgs": - procSnmp.Icmp.InMsgs = &value - case "InErrors": - procSnmp.Icmp.InErrors = &value - case "InCsumErrors": - procSnmp.Icmp.InCsumErrors = &value - case "InDestUnreachs": - procSnmp.Icmp.InDestUnreachs = &value - case "InTimeExcds": - procSnmp.Icmp.InTimeExcds = &value - case "InParmProbs": - procSnmp.Icmp.InParmProbs = &value - case "InSrcQuenchs": - procSnmp.Icmp.InSrcQuenchs = &value - case "InRedirects": - procSnmp.Icmp.InRedirects = &value - case "InEchos": - procSnmp.Icmp.InEchos = &value - case "InEchoReps": - procSnmp.Icmp.InEchoReps = &value - case "InTimestamps": - procSnmp.Icmp.InTimestamps = &value - case "InTimestampReps": - procSnmp.Icmp.InTimestampReps = &value - case "InAddrMasks": - procSnmp.Icmp.InAddrMasks = &value - case "InAddrMaskReps": - procSnmp.Icmp.InAddrMaskReps = &value - case "OutMsgs": - procSnmp.Icmp.OutMsgs = &value - case "OutErrors": - procSnmp.Icmp.OutErrors = &value - case "OutDestUnreachs": - procSnmp.Icmp.OutDestUnreachs = &value - case "OutTimeExcds": - procSnmp.Icmp.OutTimeExcds = &value - case "OutParmProbs": - procSnmp.Icmp.OutParmProbs = &value - case "OutSrcQuenchs": - procSnmp.Icmp.OutSrcQuenchs = &value - case "OutRedirects": - procSnmp.Icmp.OutRedirects = &value - case "OutEchos": - procSnmp.Icmp.OutEchos = &value - case "OutEchoReps": - procSnmp.Icmp.OutEchoReps = &value - case "OutTimestamps": - procSnmp.Icmp.OutTimestamps = &value - case "OutTimestampReps": - procSnmp.Icmp.OutTimestampReps = &value - case "OutAddrMasks": - procSnmp.Icmp.OutAddrMasks = &value - case "OutAddrMaskReps": - procSnmp.Icmp.OutAddrMaskReps = &value - } - case "IcmpMsg": - switch key { - case "InType3": - procSnmp.IcmpMsg.InType3 = &value - case "OutType3": - procSnmp.IcmpMsg.OutType3 = &value - } - case "Tcp": - switch key { - case "RtoAlgorithm": - procSnmp.Tcp.RtoAlgorithm = &value - case "RtoMin": - procSnmp.Tcp.RtoMin = &value - case "RtoMax": - procSnmp.Tcp.RtoMax = &value - case "MaxConn": - procSnmp.Tcp.MaxConn = &value - case "ActiveOpens": - procSnmp.Tcp.ActiveOpens = &value - case "PassiveOpens": - procSnmp.Tcp.PassiveOpens = &value - case "AttemptFails": - procSnmp.Tcp.AttemptFails = &value - case "EstabResets": - procSnmp.Tcp.EstabResets = &value - case "CurrEstab": - procSnmp.Tcp.CurrEstab = &value - case "InSegs": - procSnmp.Tcp.InSegs = &value - case "OutSegs": - procSnmp.Tcp.OutSegs = &value - case "RetransSegs": - procSnmp.Tcp.RetransSegs = &value - case "InErrs": - procSnmp.Tcp.InErrs = &value - case "OutRsts": - procSnmp.Tcp.OutRsts = &value - case "InCsumErrors": - procSnmp.Tcp.InCsumErrors = &value - } - case "Udp": - switch key { - case "InDatagrams": - procSnmp.Udp.InDatagrams = &value - case "NoPorts": - procSnmp.Udp.NoPorts = &value - case "InErrors": - procSnmp.Udp.InErrors = &value - case "OutDatagrams": - procSnmp.Udp.OutDatagrams = &value - case "RcvbufErrors": - procSnmp.Udp.RcvbufErrors = &value - case "SndbufErrors": - procSnmp.Udp.SndbufErrors = &value - case "InCsumErrors": - procSnmp.Udp.InCsumErrors = &value - case "IgnoredMulti": - procSnmp.Udp.IgnoredMulti = &value - } - case "UdpLite": - switch key { - case "InDatagrams": - procSnmp.UdpLite.InDatagrams = &value - case "NoPorts": - procSnmp.UdpLite.NoPorts = &value - case "InErrors": - procSnmp.UdpLite.InErrors = &value - case "OutDatagrams": - procSnmp.UdpLite.OutDatagrams = &value - case "RcvbufErrors": - procSnmp.UdpLite.RcvbufErrors = &value - case "SndbufErrors": - procSnmp.UdpLite.SndbufErrors = &value - case "InCsumErrors": - procSnmp.UdpLite.InCsumErrors = &value - case "IgnoredMulti": - procSnmp.UdpLite.IgnoredMulti = &value - } - } - } - } - return procSnmp, scanner.Err() -} diff --git a/vendor/github.com/prometheus/procfs/proc_snmp6.go b/vendor/github.com/prometheus/procfs/proc_snmp6.go deleted file mode 100644 index 3059cc6a1..000000000 --- a/vendor/github.com/prometheus/procfs/proc_snmp6.go +++ /dev/null @@ -1,381 +0,0 @@ -// Copyright 2022 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package procfs - -import ( - "bufio" - "bytes" - "errors" - "io" - "os" - "strconv" - "strings" - - "github.com/prometheus/procfs/internal/util" -) - -// ProcSnmp6 models the content of /proc//net/snmp6. -type ProcSnmp6 struct { - // The process ID. - PID int - Ip6 - Icmp6 - Udp6 - UdpLite6 -} - -type Ip6 struct { // nolint:revive - InReceives *float64 - InHdrErrors *float64 - InTooBigErrors *float64 - InNoRoutes *float64 - InAddrErrors *float64 - InUnknownProtos *float64 - InTruncatedPkts *float64 - InDiscards *float64 - InDelivers *float64 - OutForwDatagrams *float64 - OutRequests *float64 - OutDiscards *float64 - OutNoRoutes *float64 - ReasmTimeout *float64 - ReasmReqds *float64 - ReasmOKs *float64 - ReasmFails *float64 - FragOKs *float64 - FragFails *float64 - FragCreates *float64 - InMcastPkts *float64 - OutMcastPkts *float64 - InOctets *float64 - OutOctets *float64 - InMcastOctets *float64 - OutMcastOctets *float64 - InBcastOctets *float64 - OutBcastOctets *float64 - InNoECTPkts *float64 - InECT1Pkts *float64 - InECT0Pkts *float64 - InCEPkts *float64 -} - -type Icmp6 struct { - InMsgs *float64 - InErrors *float64 - OutMsgs *float64 - OutErrors *float64 - InCsumErrors *float64 - InDestUnreachs *float64 - InPktTooBigs *float64 - InTimeExcds *float64 - InParmProblems *float64 - InEchos *float64 - InEchoReplies *float64 - InGroupMembQueries *float64 - InGroupMembResponses *float64 - InGroupMembReductions *float64 - InRouterSolicits *float64 - InRouterAdvertisements *float64 - InNeighborSolicits *float64 - InNeighborAdvertisements *float64 - InRedirects *float64 - InMLDv2Reports *float64 - OutDestUnreachs *float64 - OutPktTooBigs *float64 - OutTimeExcds *float64 - OutParmProblems *float64 - OutEchos *float64 - OutEchoReplies *float64 - OutGroupMembQueries *float64 - OutGroupMembResponses *float64 - OutGroupMembReductions *float64 - OutRouterSolicits *float64 - OutRouterAdvertisements *float64 - OutNeighborSolicits *float64 - OutNeighborAdvertisements *float64 - OutRedirects *float64 - OutMLDv2Reports *float64 - InType1 *float64 - InType134 *float64 - InType135 *float64 - InType136 *float64 - InType143 *float64 - OutType133 *float64 - OutType135 *float64 - OutType136 *float64 - OutType143 *float64 -} - -type Udp6 struct { // nolint:revive - InDatagrams *float64 - NoPorts *float64 - InErrors *float64 - OutDatagrams *float64 - RcvbufErrors *float64 - SndbufErrors *float64 - InCsumErrors *float64 - IgnoredMulti *float64 -} - -type UdpLite6 struct { // nolint:revive - InDatagrams *float64 - NoPorts *float64 - InErrors *float64 - OutDatagrams *float64 - RcvbufErrors *float64 - SndbufErrors *float64 - InCsumErrors *float64 -} - -func (p Proc) Snmp6() (ProcSnmp6, error) { - filename := p.path("net/snmp6") - data, err := util.ReadFileNoStat(filename) - if err != nil { - // On systems with IPv6 disabled, this file won't exist. - // Do nothing. - if errors.Is(err, os.ErrNotExist) { - return ProcSnmp6{PID: p.PID}, nil - } - - return ProcSnmp6{PID: p.PID}, err - } - - procSnmp6, err := parseSNMP6Stats(bytes.NewReader(data)) - procSnmp6.PID = p.PID - return procSnmp6, err -} - -// parseSnmp6 parses the metrics from proc//net/snmp6 file -// and returns a map contains those metrics. -func parseSNMP6Stats(r io.Reader) (ProcSnmp6, error) { - var ( - scanner = bufio.NewScanner(r) - procSnmp6 = ProcSnmp6{} - ) - - for scanner.Scan() { - stat := strings.Fields(scanner.Text()) - if len(stat) < 2 { - continue - } - // Expect to have "6" in metric name, skip line otherwise - if sixIndex := strings.Index(stat[0], "6"); sixIndex != -1 { - protocol := stat[0][:sixIndex+1] - key := stat[0][sixIndex+1:] - value, err := strconv.ParseFloat(stat[1], 64) - if err != nil { - return procSnmp6, err - } - - switch protocol { - case "Ip6": - switch key { - case "InReceives": - procSnmp6.Ip6.InReceives = &value - case "InHdrErrors": - procSnmp6.Ip6.InHdrErrors = &value - case "InTooBigErrors": - procSnmp6.Ip6.InTooBigErrors = &value - case "InNoRoutes": - procSnmp6.Ip6.InNoRoutes = &value - case "InAddrErrors": - procSnmp6.Ip6.InAddrErrors = &value - case "InUnknownProtos": - procSnmp6.Ip6.InUnknownProtos = &value - case "InTruncatedPkts": - procSnmp6.Ip6.InTruncatedPkts = &value - case "InDiscards": - procSnmp6.Ip6.InDiscards = &value - case "InDelivers": - procSnmp6.Ip6.InDelivers = &value - case "OutForwDatagrams": - procSnmp6.Ip6.OutForwDatagrams = &value - case "OutRequests": - procSnmp6.Ip6.OutRequests = &value - case "OutDiscards": - procSnmp6.Ip6.OutDiscards = &value - case "OutNoRoutes": - procSnmp6.Ip6.OutNoRoutes = &value - case "ReasmTimeout": - procSnmp6.Ip6.ReasmTimeout = &value - case "ReasmReqds": - procSnmp6.Ip6.ReasmReqds = &value - case "ReasmOKs": - procSnmp6.Ip6.ReasmOKs = &value - case "ReasmFails": - procSnmp6.Ip6.ReasmFails = &value - case "FragOKs": - procSnmp6.Ip6.FragOKs = &value - case "FragFails": - procSnmp6.Ip6.FragFails = &value - case "FragCreates": - procSnmp6.Ip6.FragCreates = &value - case "InMcastPkts": - procSnmp6.Ip6.InMcastPkts = &value - case "OutMcastPkts": - procSnmp6.Ip6.OutMcastPkts = &value - case "InOctets": - procSnmp6.Ip6.InOctets = &value - case "OutOctets": - procSnmp6.Ip6.OutOctets = &value - case "InMcastOctets": - procSnmp6.Ip6.InMcastOctets = &value - case "OutMcastOctets": - procSnmp6.Ip6.OutMcastOctets = &value - case "InBcastOctets": - procSnmp6.Ip6.InBcastOctets = &value - case "OutBcastOctets": - procSnmp6.Ip6.OutBcastOctets = &value - case "InNoECTPkts": - procSnmp6.Ip6.InNoECTPkts = &value - case "InECT1Pkts": - procSnmp6.Ip6.InECT1Pkts = &value - case "InECT0Pkts": - procSnmp6.Ip6.InECT0Pkts = &value - case "InCEPkts": - procSnmp6.Ip6.InCEPkts = &value - - } - case "Icmp6": - switch key { - case "InMsgs": - procSnmp6.Icmp6.InMsgs = &value - case "InErrors": - procSnmp6.Icmp6.InErrors = &value - case "OutMsgs": - procSnmp6.Icmp6.OutMsgs = &value - case "OutErrors": - procSnmp6.Icmp6.OutErrors = &value - case "InCsumErrors": - procSnmp6.Icmp6.InCsumErrors = &value - case "InDestUnreachs": - procSnmp6.Icmp6.InDestUnreachs = &value - case "InPktTooBigs": - procSnmp6.Icmp6.InPktTooBigs = &value - case "InTimeExcds": - procSnmp6.Icmp6.InTimeExcds = &value - case "InParmProblems": - procSnmp6.Icmp6.InParmProblems = &value - case "InEchos": - procSnmp6.Icmp6.InEchos = &value - case "InEchoReplies": - procSnmp6.Icmp6.InEchoReplies = &value - case "InGroupMembQueries": - procSnmp6.Icmp6.InGroupMembQueries = &value - case "InGroupMembResponses": - procSnmp6.Icmp6.InGroupMembResponses = &value - case "InGroupMembReductions": - procSnmp6.Icmp6.InGroupMembReductions = &value - case "InRouterSolicits": - procSnmp6.Icmp6.InRouterSolicits = &value - case "InRouterAdvertisements": - procSnmp6.Icmp6.InRouterAdvertisements = &value - case "InNeighborSolicits": - procSnmp6.Icmp6.InNeighborSolicits = &value - case "InNeighborAdvertisements": - procSnmp6.Icmp6.InNeighborAdvertisements = &value - case "InRedirects": - procSnmp6.Icmp6.InRedirects = &value - case "InMLDv2Reports": - procSnmp6.Icmp6.InMLDv2Reports = &value - case "OutDestUnreachs": - procSnmp6.Icmp6.OutDestUnreachs = &value - case "OutPktTooBigs": - procSnmp6.Icmp6.OutPktTooBigs = &value - case "OutTimeExcds": - procSnmp6.Icmp6.OutTimeExcds = &value - case "OutParmProblems": - procSnmp6.Icmp6.OutParmProblems = &value - case "OutEchos": - procSnmp6.Icmp6.OutEchos = &value - case "OutEchoReplies": - procSnmp6.Icmp6.OutEchoReplies = &value - case "OutGroupMembQueries": - procSnmp6.Icmp6.OutGroupMembQueries = &value - case "OutGroupMembResponses": - procSnmp6.Icmp6.OutGroupMembResponses = &value - case "OutGroupMembReductions": - procSnmp6.Icmp6.OutGroupMembReductions = &value - case "OutRouterSolicits": - procSnmp6.Icmp6.OutRouterSolicits = &value - case "OutRouterAdvertisements": - procSnmp6.Icmp6.OutRouterAdvertisements = &value - case "OutNeighborSolicits": - procSnmp6.Icmp6.OutNeighborSolicits = &value - case "OutNeighborAdvertisements": - procSnmp6.Icmp6.OutNeighborAdvertisements = &value - case "OutRedirects": - procSnmp6.Icmp6.OutRedirects = &value - case "OutMLDv2Reports": - procSnmp6.Icmp6.OutMLDv2Reports = &value - case "InType1": - procSnmp6.Icmp6.InType1 = &value - case "InType134": - procSnmp6.Icmp6.InType134 = &value - case "InType135": - procSnmp6.Icmp6.InType135 = &value - case "InType136": - procSnmp6.Icmp6.InType136 = &value - case "InType143": - procSnmp6.Icmp6.InType143 = &value - case "OutType133": - procSnmp6.Icmp6.OutType133 = &value - case "OutType135": - procSnmp6.Icmp6.OutType135 = &value - case "OutType136": - procSnmp6.Icmp6.OutType136 = &value - case "OutType143": - procSnmp6.Icmp6.OutType143 = &value - } - case "Udp6": - switch key { - case "InDatagrams": - procSnmp6.Udp6.InDatagrams = &value - case "NoPorts": - procSnmp6.Udp6.NoPorts = &value - case "InErrors": - procSnmp6.Udp6.InErrors = &value - case "OutDatagrams": - procSnmp6.Udp6.OutDatagrams = &value - case "RcvbufErrors": - procSnmp6.Udp6.RcvbufErrors = &value - case "SndbufErrors": - procSnmp6.Udp6.SndbufErrors = &value - case "InCsumErrors": - procSnmp6.Udp6.InCsumErrors = &value - case "IgnoredMulti": - procSnmp6.Udp6.IgnoredMulti = &value - } - case "UdpLite6": - switch key { - case "InDatagrams": - procSnmp6.UdpLite6.InDatagrams = &value - case "NoPorts": - procSnmp6.UdpLite6.NoPorts = &value - case "InErrors": - procSnmp6.UdpLite6.InErrors = &value - case "OutDatagrams": - procSnmp6.UdpLite6.OutDatagrams = &value - case "RcvbufErrors": - procSnmp6.UdpLite6.RcvbufErrors = &value - case "SndbufErrors": - procSnmp6.UdpLite6.SndbufErrors = &value - case "InCsumErrors": - procSnmp6.UdpLite6.InCsumErrors = &value - } - } - } - } - return procSnmp6, scanner.Err() -} diff --git a/vendor/github.com/prometheus/procfs/proc_stat.go b/vendor/github.com/prometheus/procfs/proc_stat.go deleted file mode 100644 index 06a8d931c..000000000 --- a/vendor/github.com/prometheus/procfs/proc_stat.go +++ /dev/null @@ -1,229 +0,0 @@ -// Copyright 2018 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package procfs - -import ( - "bytes" - "fmt" - "os" - - "github.com/prometheus/procfs/internal/util" -) - -// Originally, this USER_HZ value was dynamically retrieved via a sysconf call -// which required cgo. However, that caused a lot of problems regarding -// cross-compilation. Alternatives such as running a binary to determine the -// value, or trying to derive it in some other way were all problematic. After -// much research it was determined that USER_HZ is actually hardcoded to 100 on -// all Go-supported platforms as of the time of this writing. This is why we -// decided to hardcode it here as well. It is not impossible that there could -// be systems with exceptions, but they should be very exotic edge cases, and -// in that case, the worst outcome will be two misreported metrics. -// -// See also the following discussions: -// -// - https://github.com/prometheus/node_exporter/issues/52 -// - https://github.com/prometheus/procfs/pull/2 -// - http://stackoverflow.com/questions/17410841/how-does-user-hz-solve-the-jiffy-scaling-issue -const userHZ = 100 - -// ProcStat provides status information about the process, -// read from /proc/[pid]/stat. -type ProcStat struct { - // The process ID. - PID int - // The filename of the executable. - Comm string - // The process state. - State string - // The PID of the parent of this process. - PPID int - // The process group ID of the process. - PGRP int - // The session ID of the process. - Session int - // The controlling terminal of the process. - TTY int - // The ID of the foreground process group of the controlling terminal of - // the process. - TPGID int - // The kernel flags word of the process. - Flags uint - // The number of minor faults the process has made which have not required - // loading a memory page from disk. - MinFlt uint - // The number of minor faults that the process's waited-for children have - // made. - CMinFlt uint - // The number of major faults the process has made which have required - // loading a memory page from disk. - MajFlt uint - // The number of major faults that the process's waited-for children have - // made. - CMajFlt uint - // Amount of time that this process has been scheduled in user mode, - // measured in clock ticks. - UTime uint - // Amount of time that this process has been scheduled in kernel mode, - // measured in clock ticks. - STime uint - // Amount of time that this process's waited-for children have been - // scheduled in user mode, measured in clock ticks. - CUTime int - // Amount of time that this process's waited-for children have been - // scheduled in kernel mode, measured in clock ticks. - CSTime int - // For processes running a real-time scheduling policy, this is the negated - // scheduling priority, minus one. - Priority int - // The nice value, a value in the range 19 (low priority) to -20 (high - // priority). - Nice int - // Number of threads in this process. - NumThreads int - // The time the process started after system boot, the value is expressed - // in clock ticks. - Starttime uint64 - // Virtual memory size in bytes. - VSize uint - // Resident set size in pages. - RSS int - // Soft limit in bytes on the rss of the process. - RSSLimit uint64 - // CPU number last executed on. - Processor uint - // Real-time scheduling priority, a number in the range 1 to 99 for processes - // scheduled under a real-time policy, or 0, for non-real-time processes. - RTPriority uint - // Scheduling policy. - Policy uint - // Aggregated block I/O delays, measured in clock ticks (centiseconds). - DelayAcctBlkIOTicks uint64 - // Guest time of the process (time spent running a virtual CPU for a guest - // operating system), measured in clock ticks. - GuestTime int - // Guest time of the process's children, measured in clock ticks. - CGuestTime int - - proc FS -} - -// NewStat returns the current status information of the process. -// -// Deprecated: Use p.Stat() instead. -func (p Proc) NewStat() (ProcStat, error) { - return p.Stat() -} - -// Stat returns the current status information of the process. -func (p Proc) Stat() (ProcStat, error) { - data, err := util.ReadFileNoStat(p.path("stat")) - if err != nil { - return ProcStat{}, err - } - - var ( - ignoreInt64 int64 - ignoreUint64 uint64 - - s = ProcStat{PID: p.PID, proc: p.fs} - l = bytes.Index(data, []byte("(")) - r = bytes.LastIndex(data, []byte(")")) - ) - - if l < 0 || r < 0 { - return ProcStat{}, fmt.Errorf("%w: unexpected format, couldn't extract comm %q", ErrFileParse, data) - } - - s.Comm = string(data[l+1 : r]) - - // Check the following resources for the details about the particular stat - // fields and their data types: - // * https://man7.org/linux/man-pages/man5/proc.5.html - // * https://man7.org/linux/man-pages/man3/scanf.3.html - _, err = fmt.Fscan( - bytes.NewBuffer(data[r+2:]), - &s.State, - &s.PPID, - &s.PGRP, - &s.Session, - &s.TTY, - &s.TPGID, - &s.Flags, - &s.MinFlt, - &s.CMinFlt, - &s.MajFlt, - &s.CMajFlt, - &s.UTime, - &s.STime, - &s.CUTime, - &s.CSTime, - &s.Priority, - &s.Nice, - &s.NumThreads, - &ignoreInt64, - &s.Starttime, - &s.VSize, - &s.RSS, - &s.RSSLimit, - &ignoreUint64, - &ignoreUint64, - &ignoreUint64, - &ignoreUint64, - &ignoreUint64, - &ignoreUint64, - &ignoreUint64, - &ignoreUint64, - &ignoreUint64, - &ignoreUint64, - &ignoreUint64, - &ignoreUint64, - &ignoreInt64, - &s.Processor, - &s.RTPriority, - &s.Policy, - &s.DelayAcctBlkIOTicks, - &s.GuestTime, - &s.CGuestTime, - ) - if err != nil { - return ProcStat{}, err - } - - return s, nil -} - -// VirtualMemory returns the virtual memory size in bytes. -func (s ProcStat) VirtualMemory() uint { - return s.VSize -} - -// ResidentMemory returns the resident memory size in bytes. -func (s ProcStat) ResidentMemory() int { - return s.RSS * os.Getpagesize() -} - -// StartTime returns the unix timestamp of the process in seconds. -func (s ProcStat) StartTime() (float64, error) { - stat, err := s.proc.Stat() - if err != nil { - return 0, err - } - return float64(stat.BootTime) + (float64(s.Starttime) / userHZ), nil -} - -// CPUTime returns the total CPU user and system time in seconds. -func (s ProcStat) CPUTime() float64 { - return float64(s.UTime+s.STime) / userHZ -} diff --git a/vendor/github.com/prometheus/procfs/proc_status.go b/vendor/github.com/prometheus/procfs/proc_status.go deleted file mode 100644 index 46307f572..000000000 --- a/vendor/github.com/prometheus/procfs/proc_status.go +++ /dev/null @@ -1,221 +0,0 @@ -// Copyright 2018 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package procfs - -import ( - "bytes" - "sort" - "strconv" - "strings" - - "github.com/prometheus/procfs/internal/util" -) - -// ProcStatus provides status information about the process, -// read from /proc/[pid]/status. -type ProcStatus struct { - // The process ID. - PID int - // The process name. - Name string - - // Thread group ID. - TGID int - // List of Pid namespace. - NSpids []uint64 - - // Peak virtual memory size. - VmPeak uint64 // nolint:revive - // Virtual memory size. - VmSize uint64 // nolint:revive - // Locked memory size. - VmLck uint64 // nolint:revive - // Pinned memory size. - VmPin uint64 // nolint:revive - // Peak resident set size. - VmHWM uint64 // nolint:revive - // Resident set size (sum of RssAnnon RssFile and RssShmem). - VmRSS uint64 // nolint:revive - // Size of resident anonymous memory. - RssAnon uint64 // nolint:revive - // Size of resident file mappings. - RssFile uint64 // nolint:revive - // Size of resident shared memory. - RssShmem uint64 // nolint:revive - // Size of data segments. - VmData uint64 // nolint:revive - // Size of stack segments. - VmStk uint64 // nolint:revive - // Size of text segments. - VmExe uint64 // nolint:revive - // Shared library code size. - VmLib uint64 // nolint:revive - // Page table entries size. - VmPTE uint64 // nolint:revive - // Size of second-level page tables. - VmPMD uint64 // nolint:revive - // Swapped-out virtual memory size by anonymous private. - VmSwap uint64 // nolint:revive - // Size of hugetlb memory portions - HugetlbPages uint64 - - // Number of voluntary context switches. - VoluntaryCtxtSwitches uint64 - // Number of involuntary context switches. - NonVoluntaryCtxtSwitches uint64 - - // UIDs of the process (Real, effective, saved set, and filesystem UIDs) - UIDs [4]string - // GIDs of the process (Real, effective, saved set, and filesystem GIDs) - GIDs [4]string - - // CpusAllowedList: List of cpu cores processes are allowed to run on. - CpusAllowedList []uint64 -} - -// NewStatus returns the current status information of the process. -func (p Proc) NewStatus() (ProcStatus, error) { - data, err := util.ReadFileNoStat(p.path("status")) - if err != nil { - return ProcStatus{}, err - } - - s := ProcStatus{PID: p.PID} - - lines := strings.Split(string(data), "\n") - for _, line := range lines { - if !bytes.Contains([]byte(line), []byte(":")) { - continue - } - - kv := strings.SplitN(line, ":", 2) - - // removes spaces - k := strings.TrimSpace(kv[0]) - v := strings.TrimSpace(kv[1]) - // removes "kB" - v = strings.TrimSuffix(v, " kB") - - // value to int when possible - // we can skip error check here, 'cause vKBytes is not used when value is a string - vKBytes, _ := strconv.ParseUint(v, 10, 64) - // convert kB to B - vBytes := vKBytes * 1024 - - s.fillStatus(k, v, vKBytes, vBytes) - } - - return s, nil -} - -func (s *ProcStatus) fillStatus(k string, vString string, vUint uint64, vUintBytes uint64) { - switch k { - case "Tgid": - s.TGID = int(vUint) - case "Name": - s.Name = vString - case "Uid": - copy(s.UIDs[:], strings.Split(vString, "\t")) - case "Gid": - copy(s.GIDs[:], strings.Split(vString, "\t")) - case "NSpid": - s.NSpids = calcNSPidsList(vString) - case "VmPeak": - s.VmPeak = vUintBytes - case "VmSize": - s.VmSize = vUintBytes - case "VmLck": - s.VmLck = vUintBytes - case "VmPin": - s.VmPin = vUintBytes - case "VmHWM": - s.VmHWM = vUintBytes - case "VmRSS": - s.VmRSS = vUintBytes - case "RssAnon": - s.RssAnon = vUintBytes - case "RssFile": - s.RssFile = vUintBytes - case "RssShmem": - s.RssShmem = vUintBytes - case "VmData": - s.VmData = vUintBytes - case "VmStk": - s.VmStk = vUintBytes - case "VmExe": - s.VmExe = vUintBytes - case "VmLib": - s.VmLib = vUintBytes - case "VmPTE": - s.VmPTE = vUintBytes - case "VmPMD": - s.VmPMD = vUintBytes - case "VmSwap": - s.VmSwap = vUintBytes - case "HugetlbPages": - s.HugetlbPages = vUintBytes - case "voluntary_ctxt_switches": - s.VoluntaryCtxtSwitches = vUint - case "nonvoluntary_ctxt_switches": - s.NonVoluntaryCtxtSwitches = vUint - case "Cpus_allowed_list": - s.CpusAllowedList = calcCpusAllowedList(vString) - } - -} - -// TotalCtxtSwitches returns the total context switch. -func (s ProcStatus) TotalCtxtSwitches() uint64 { - return s.VoluntaryCtxtSwitches + s.NonVoluntaryCtxtSwitches -} - -func calcCpusAllowedList(cpuString string) []uint64 { - s := strings.Split(cpuString, ",") - - var g []uint64 - - for _, cpu := range s { - // parse cpu ranges, example: 1-3=[1,2,3] - if l := strings.Split(strings.TrimSpace(cpu), "-"); len(l) > 1 { - startCPU, _ := strconv.ParseUint(l[0], 10, 64) - endCPU, _ := strconv.ParseUint(l[1], 10, 64) - - for i := startCPU; i <= endCPU; i++ { - g = append(g, i) - } - } else if len(l) == 1 { - cpu, _ := strconv.ParseUint(l[0], 10, 64) - g = append(g, cpu) - } - - } - - sort.Slice(g, func(i, j int) bool { return g[i] < g[j] }) - return g -} - -func calcNSPidsList(nspidsString string) []uint64 { - s := strings.Split(nspidsString, " ") - var nspids []uint64 - - for _, nspid := range s { - nspid, _ := strconv.ParseUint(nspid, 10, 64) - if nspid == 0 { - continue - } - nspids = append(nspids, nspid) - } - - return nspids -} diff --git a/vendor/github.com/prometheus/procfs/proc_sys.go b/vendor/github.com/prometheus/procfs/proc_sys.go deleted file mode 100644 index 12c5bf05b..000000000 --- a/vendor/github.com/prometheus/procfs/proc_sys.go +++ /dev/null @@ -1,51 +0,0 @@ -// Copyright 2022 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package procfs - -import ( - "fmt" - "strings" - - "github.com/prometheus/procfs/internal/util" -) - -func sysctlToPath(sysctl string) string { - return strings.Replace(sysctl, ".", "/", -1) -} - -func (fs FS) SysctlStrings(sysctl string) ([]string, error) { - value, err := util.SysReadFile(fs.proc.Path("sys", sysctlToPath(sysctl))) - if err != nil { - return nil, err - } - return strings.Fields(value), nil - -} - -func (fs FS) SysctlInts(sysctl string) ([]int, error) { - fields, err := fs.SysctlStrings(sysctl) - if err != nil { - return nil, err - } - - values := make([]int, len(fields)) - for i, f := range fields { - vp := util.NewValueParser(f) - values[i] = vp.Int() - if err := vp.Err(); err != nil { - return nil, fmt.Errorf("%s: field %d in sysctl %s is not a valid int: %w", ErrFileParse, i, sysctl, err) - } - } - return values, nil -} diff --git a/vendor/github.com/prometheus/procfs/schedstat.go b/vendor/github.com/prometheus/procfs/schedstat.go deleted file mode 100644 index 5f7f32dc8..000000000 --- a/vendor/github.com/prometheus/procfs/schedstat.go +++ /dev/null @@ -1,121 +0,0 @@ -// Copyright 2019 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package procfs - -import ( - "bufio" - "errors" - "os" - "regexp" - "strconv" -) - -var ( - cpuLineRE = regexp.MustCompile(`cpu(\d+) (\d+) (\d+) (\d+) (\d+) (\d+) (\d+) (\d+) (\d+) (\d+)`) - procLineRE = regexp.MustCompile(`(\d+) (\d+) (\d+)`) -) - -// Schedstat contains scheduler statistics from /proc/schedstat -// -// See -// https://www.kernel.org/doc/Documentation/scheduler/sched-stats.txt -// for a detailed description of what these numbers mean. -// -// Note the current kernel documentation claims some of the time units are in -// jiffies when they are actually in nanoseconds since 2.6.23 with the -// introduction of CFS. A fix to the documentation is pending. See -// https://lore.kernel.org/patchwork/project/lkml/list/?series=403473 -type Schedstat struct { - CPUs []*SchedstatCPU -} - -// SchedstatCPU contains the values from one "cpu" line. -type SchedstatCPU struct { - CPUNum string - - RunningNanoseconds uint64 - WaitingNanoseconds uint64 - RunTimeslices uint64 -} - -// ProcSchedstat contains the values from `/proc//schedstat`. -type ProcSchedstat struct { - RunningNanoseconds uint64 - WaitingNanoseconds uint64 - RunTimeslices uint64 -} - -// Schedstat reads data from `/proc/schedstat`. -func (fs FS) Schedstat() (*Schedstat, error) { - file, err := os.Open(fs.proc.Path("schedstat")) - if err != nil { - return nil, err - } - defer file.Close() - - stats := &Schedstat{} - scanner := bufio.NewScanner(file) - - for scanner.Scan() { - match := cpuLineRE.FindStringSubmatch(scanner.Text()) - if match != nil { - cpu := &SchedstatCPU{} - cpu.CPUNum = match[1] - - cpu.RunningNanoseconds, err = strconv.ParseUint(match[8], 10, 64) - if err != nil { - continue - } - - cpu.WaitingNanoseconds, err = strconv.ParseUint(match[9], 10, 64) - if err != nil { - continue - } - - cpu.RunTimeslices, err = strconv.ParseUint(match[10], 10, 64) - if err != nil { - continue - } - - stats.CPUs = append(stats.CPUs, cpu) - } - } - - return stats, nil -} - -func parseProcSchedstat(contents string) (ProcSchedstat, error) { - var ( - stats ProcSchedstat - err error - ) - match := procLineRE.FindStringSubmatch(contents) - - if match != nil { - stats.RunningNanoseconds, err = strconv.ParseUint(match[1], 10, 64) - if err != nil { - return stats, err - } - - stats.WaitingNanoseconds, err = strconv.ParseUint(match[2], 10, 64) - if err != nil { - return stats, err - } - - stats.RunTimeslices, err = strconv.ParseUint(match[3], 10, 64) - return stats, err - } - - return stats, errors.New("could not parse schedstat") -} diff --git a/vendor/github.com/prometheus/procfs/slab.go b/vendor/github.com/prometheus/procfs/slab.go deleted file mode 100644 index 8611c9017..000000000 --- a/vendor/github.com/prometheus/procfs/slab.go +++ /dev/null @@ -1,151 +0,0 @@ -// Copyright 2020 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package procfs - -import ( - "bufio" - "bytes" - "fmt" - "regexp" - "strconv" - "strings" - - "github.com/prometheus/procfs/internal/util" -) - -var ( - slabSpace = regexp.MustCompile(`\s+`) - slabVer = regexp.MustCompile(`slabinfo -`) - slabHeader = regexp.MustCompile(`# name`) -) - -// Slab represents a slab pool in the kernel. -type Slab struct { - Name string - ObjActive int64 - ObjNum int64 - ObjSize int64 - ObjPerSlab int64 - PagesPerSlab int64 - // tunables - Limit int64 - Batch int64 - SharedFactor int64 - SlabActive int64 - SlabNum int64 - SharedAvail int64 -} - -// SlabInfo represents info for all slabs. -type SlabInfo struct { - Slabs []*Slab -} - -func shouldParseSlab(line string) bool { - if slabVer.MatchString(line) { - return false - } - if slabHeader.MatchString(line) { - return false - } - return true -} - -// parseV21SlabEntry is used to parse a line from /proc/slabinfo version 2.1. -func parseV21SlabEntry(line string) (*Slab, error) { - // First cleanup whitespace. - l := slabSpace.ReplaceAllString(line, " ") - s := strings.Split(l, " ") - if len(s) != 16 { - return nil, fmt.Errorf("%w: unable to parse: %q", ErrFileParse, line) - } - var err error - i := &Slab{Name: s[0]} - i.ObjActive, err = strconv.ParseInt(s[1], 10, 64) - if err != nil { - return nil, err - } - i.ObjNum, err = strconv.ParseInt(s[2], 10, 64) - if err != nil { - return nil, err - } - i.ObjSize, err = strconv.ParseInt(s[3], 10, 64) - if err != nil { - return nil, err - } - i.ObjPerSlab, err = strconv.ParseInt(s[4], 10, 64) - if err != nil { - return nil, err - } - i.PagesPerSlab, err = strconv.ParseInt(s[5], 10, 64) - if err != nil { - return nil, err - } - i.Limit, err = strconv.ParseInt(s[8], 10, 64) - if err != nil { - return nil, err - } - i.Batch, err = strconv.ParseInt(s[9], 10, 64) - if err != nil { - return nil, err - } - i.SharedFactor, err = strconv.ParseInt(s[10], 10, 64) - if err != nil { - return nil, err - } - i.SlabActive, err = strconv.ParseInt(s[13], 10, 64) - if err != nil { - return nil, err - } - i.SlabNum, err = strconv.ParseInt(s[14], 10, 64) - if err != nil { - return nil, err - } - i.SharedAvail, err = strconv.ParseInt(s[15], 10, 64) - if err != nil { - return nil, err - } - return i, nil -} - -// parseSlabInfo21 is used to parse a slabinfo 2.1 file. -func parseSlabInfo21(r *bytes.Reader) (SlabInfo, error) { - scanner := bufio.NewScanner(r) - s := SlabInfo{Slabs: []*Slab{}} - for scanner.Scan() { - line := scanner.Text() - if !shouldParseSlab(line) { - continue - } - slab, err := parseV21SlabEntry(line) - if err != nil { - return s, err - } - s.Slabs = append(s.Slabs, slab) - } - return s, nil -} - -// SlabInfo reads data from `/proc/slabinfo`. -func (fs FS) SlabInfo() (SlabInfo, error) { - // TODO: Consider passing options to allow for parsing different - // slabinfo versions. However, slabinfo 2.1 has been stable since - // kernel 2.6.10 and later. - data, err := util.ReadFileNoStat(fs.proc.Path("slabinfo")) - if err != nil { - return SlabInfo{}, err - } - - return parseSlabInfo21(bytes.NewReader(data)) -} diff --git a/vendor/github.com/prometheus/procfs/softirqs.go b/vendor/github.com/prometheus/procfs/softirqs.go deleted file mode 100644 index b8fad677d..000000000 --- a/vendor/github.com/prometheus/procfs/softirqs.go +++ /dev/null @@ -1,160 +0,0 @@ -// Copyright 2022 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package procfs - -import ( - "bufio" - "bytes" - "fmt" - "io" - "strconv" - "strings" - - "github.com/prometheus/procfs/internal/util" -) - -// Softirqs represents the softirq statistics. -type Softirqs struct { - Hi []uint64 - Timer []uint64 - NetTx []uint64 - NetRx []uint64 - Block []uint64 - IRQPoll []uint64 - Tasklet []uint64 - Sched []uint64 - HRTimer []uint64 - RCU []uint64 -} - -func (fs FS) Softirqs() (Softirqs, error) { - fileName := fs.proc.Path("softirqs") - data, err := util.ReadFileNoStat(fileName) - if err != nil { - return Softirqs{}, err - } - - reader := bytes.NewReader(data) - - return parseSoftirqs(reader) -} - -func parseSoftirqs(r io.Reader) (Softirqs, error) { - var ( - softirqs = Softirqs{} - scanner = bufio.NewScanner(r) - ) - - if !scanner.Scan() { - return Softirqs{}, fmt.Errorf("%w: softirqs empty", ErrFileRead) - } - - for scanner.Scan() { - parts := strings.Fields(scanner.Text()) - var err error - - // require at least one cpu - if len(parts) < 2 { - continue - } - switch { - case parts[0] == "HI:": - perCPU := parts[1:] - softirqs.Hi = make([]uint64, len(perCPU)) - for i, count := range perCPU { - if softirqs.Hi[i], err = strconv.ParseUint(count, 10, 64); err != nil { - return Softirqs{}, fmt.Errorf("%s: couldn't parse %q (HI%d): %w", ErrFileParse, count, i, err) - } - } - case parts[0] == "TIMER:": - perCPU := parts[1:] - softirqs.Timer = make([]uint64, len(perCPU)) - for i, count := range perCPU { - if softirqs.Timer[i], err = strconv.ParseUint(count, 10, 64); err != nil { - return Softirqs{}, fmt.Errorf("%s: couldn't parse %q (TIMER%d): %w", ErrFileParse, count, i, err) - } - } - case parts[0] == "NET_TX:": - perCPU := parts[1:] - softirqs.NetTx = make([]uint64, len(perCPU)) - for i, count := range perCPU { - if softirqs.NetTx[i], err = strconv.ParseUint(count, 10, 64); err != nil { - return Softirqs{}, fmt.Errorf("%s: couldn't parse %q (NET_TX%d): %w", ErrFileParse, count, i, err) - } - } - case parts[0] == "NET_RX:": - perCPU := parts[1:] - softirqs.NetRx = make([]uint64, len(perCPU)) - for i, count := range perCPU { - if softirqs.NetRx[i], err = strconv.ParseUint(count, 10, 64); err != nil { - return Softirqs{}, fmt.Errorf("%s: couldn't parse %q (NET_RX%d): %w", ErrFileParse, count, i, err) - } - } - case parts[0] == "BLOCK:": - perCPU := parts[1:] - softirqs.Block = make([]uint64, len(perCPU)) - for i, count := range perCPU { - if softirqs.Block[i], err = strconv.ParseUint(count, 10, 64); err != nil { - return Softirqs{}, fmt.Errorf("%s: couldn't parse %q (BLOCK%d): %w", ErrFileParse, count, i, err) - } - } - case parts[0] == "IRQ_POLL:": - perCPU := parts[1:] - softirqs.IRQPoll = make([]uint64, len(perCPU)) - for i, count := range perCPU { - if softirqs.IRQPoll[i], err = strconv.ParseUint(count, 10, 64); err != nil { - return Softirqs{}, fmt.Errorf("%s: couldn't parse %q (IRQ_POLL%d): %w", ErrFileParse, count, i, err) - } - } - case parts[0] == "TASKLET:": - perCPU := parts[1:] - softirqs.Tasklet = make([]uint64, len(perCPU)) - for i, count := range perCPU { - if softirqs.Tasklet[i], err = strconv.ParseUint(count, 10, 64); err != nil { - return Softirqs{}, fmt.Errorf("%s: couldn't parse %q (TASKLET%d): %w", ErrFileParse, count, i, err) - } - } - case parts[0] == "SCHED:": - perCPU := parts[1:] - softirqs.Sched = make([]uint64, len(perCPU)) - for i, count := range perCPU { - if softirqs.Sched[i], err = strconv.ParseUint(count, 10, 64); err != nil { - return Softirqs{}, fmt.Errorf("%s: couldn't parse %q (SCHED%d): %w", ErrFileParse, count, i, err) - } - } - case parts[0] == "HRTIMER:": - perCPU := parts[1:] - softirqs.HRTimer = make([]uint64, len(perCPU)) - for i, count := range perCPU { - if softirqs.HRTimer[i], err = strconv.ParseUint(count, 10, 64); err != nil { - return Softirqs{}, fmt.Errorf("%s: couldn't parse %q (HRTIMER%d): %w", ErrFileParse, count, i, err) - } - } - case parts[0] == "RCU:": - perCPU := parts[1:] - softirqs.RCU = make([]uint64, len(perCPU)) - for i, count := range perCPU { - if softirqs.RCU[i], err = strconv.ParseUint(count, 10, 64); err != nil { - return Softirqs{}, fmt.Errorf("%s: couldn't parse %q (RCU%d): %w", ErrFileParse, count, i, err) - } - } - } - } - - if err := scanner.Err(); err != nil { - return Softirqs{}, fmt.Errorf("%s: couldn't parse softirqs: %w", ErrFileParse, err) - } - - return softirqs, scanner.Err() -} diff --git a/vendor/github.com/prometheus/procfs/stat.go b/vendor/github.com/prometheus/procfs/stat.go deleted file mode 100644 index 34fc3ee21..000000000 --- a/vendor/github.com/prometheus/procfs/stat.go +++ /dev/null @@ -1,258 +0,0 @@ -// Copyright 2018 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package procfs - -import ( - "bufio" - "bytes" - "fmt" - "io" - "strconv" - "strings" - - "github.com/prometheus/procfs/internal/fs" - "github.com/prometheus/procfs/internal/util" -) - -// CPUStat shows how much time the cpu spend in various stages. -type CPUStat struct { - User float64 - Nice float64 - System float64 - Idle float64 - Iowait float64 - IRQ float64 - SoftIRQ float64 - Steal float64 - Guest float64 - GuestNice float64 -} - -// SoftIRQStat represent the softirq statistics as exported in the procfs stat file. -// A nice introduction can be found at https://0xax.gitbooks.io/linux-insides/content/interrupts/interrupts-9.html -// It is possible to get per-cpu stats by reading `/proc/softirqs`. -type SoftIRQStat struct { - Hi uint64 - Timer uint64 - NetTx uint64 - NetRx uint64 - Block uint64 - BlockIoPoll uint64 - Tasklet uint64 - Sched uint64 - Hrtimer uint64 - Rcu uint64 -} - -// Stat represents kernel/system statistics. -type Stat struct { - // Boot time in seconds since the Epoch. - BootTime uint64 - // Summed up cpu statistics. - CPUTotal CPUStat - // Per-CPU statistics. - CPU map[int64]CPUStat - // Number of times interrupts were handled, which contains numbered and unnumbered IRQs. - IRQTotal uint64 - // Number of times a numbered IRQ was triggered. - IRQ []uint64 - // Number of times a context switch happened. - ContextSwitches uint64 - // Number of times a process was created. - ProcessCreated uint64 - // Number of processes currently running. - ProcessesRunning uint64 - // Number of processes currently blocked (waiting for IO). - ProcessesBlocked uint64 - // Number of times a softirq was scheduled. - SoftIRQTotal uint64 - // Detailed softirq statistics. - SoftIRQ SoftIRQStat -} - -// Parse a cpu statistics line and returns the CPUStat struct plus the cpu id (or -1 for the overall sum). -func parseCPUStat(line string) (CPUStat, int64, error) { - cpuStat := CPUStat{} - var cpu string - - count, err := fmt.Sscanf(line, "%s %f %f %f %f %f %f %f %f %f %f", - &cpu, - &cpuStat.User, &cpuStat.Nice, &cpuStat.System, &cpuStat.Idle, - &cpuStat.Iowait, &cpuStat.IRQ, &cpuStat.SoftIRQ, &cpuStat.Steal, - &cpuStat.Guest, &cpuStat.GuestNice) - - if err != nil && err != io.EOF { - return CPUStat{}, -1, fmt.Errorf("%s: couldn't parse %q (cpu): %w", ErrFileParse, line, err) - } - if count == 0 { - return CPUStat{}, -1, fmt.Errorf("%w: couldn't parse %q (cpu): 0 elements parsed", ErrFileParse, line) - } - - cpuStat.User /= userHZ - cpuStat.Nice /= userHZ - cpuStat.System /= userHZ - cpuStat.Idle /= userHZ - cpuStat.Iowait /= userHZ - cpuStat.IRQ /= userHZ - cpuStat.SoftIRQ /= userHZ - cpuStat.Steal /= userHZ - cpuStat.Guest /= userHZ - cpuStat.GuestNice /= userHZ - - if cpu == "cpu" { - return cpuStat, -1, nil - } - - cpuID, err := strconv.ParseInt(cpu[3:], 10, 64) - if err != nil { - return CPUStat{}, -1, fmt.Errorf("%s: couldn't parse %q (cpu/cpuid): %w", ErrFileParse, line, err) - } - - return cpuStat, cpuID, nil -} - -// Parse a softirq line. -func parseSoftIRQStat(line string) (SoftIRQStat, uint64, error) { - softIRQStat := SoftIRQStat{} - var total uint64 - var prefix string - - _, err := fmt.Sscanf(line, "%s %d %d %d %d %d %d %d %d %d %d %d", - &prefix, &total, - &softIRQStat.Hi, &softIRQStat.Timer, &softIRQStat.NetTx, &softIRQStat.NetRx, - &softIRQStat.Block, &softIRQStat.BlockIoPoll, - &softIRQStat.Tasklet, &softIRQStat.Sched, - &softIRQStat.Hrtimer, &softIRQStat.Rcu) - - if err != nil { - return SoftIRQStat{}, 0, fmt.Errorf("%s: couldn't parse %q (softirq): %w", ErrFileParse, line, err) - } - - return softIRQStat, total, nil -} - -// NewStat returns information about current cpu/process statistics. -// See https://www.kernel.org/doc/Documentation/filesystems/proc.txt -// -// Deprecated: Use fs.Stat() instead. -func NewStat() (Stat, error) { - fs, err := NewFS(fs.DefaultProcMountPoint) - if err != nil { - return Stat{}, err - } - return fs.Stat() -} - -// NewStat returns information about current cpu/process statistics. -// See: https://www.kernel.org/doc/Documentation/filesystems/proc.txt -// -// Deprecated: Use fs.Stat() instead. -func (fs FS) NewStat() (Stat, error) { - return fs.Stat() -} - -// Stat returns information about current cpu/process statistics. -// See: https://www.kernel.org/doc/Documentation/filesystems/proc.txt -func (fs FS) Stat() (Stat, error) { - fileName := fs.proc.Path("stat") - data, err := util.ReadFileNoStat(fileName) - if err != nil { - return Stat{}, err - } - procStat, err := parseStat(bytes.NewReader(data), fileName) - if err != nil { - return Stat{}, err - } - return procStat, nil -} - -// parseStat parses the metrics from /proc/[pid]/stat. -func parseStat(r io.Reader, fileName string) (Stat, error) { - var ( - scanner = bufio.NewScanner(r) - stat = Stat{ - CPU: make(map[int64]CPUStat), - } - err error - ) - - // Increase default scanner buffer to handle very long `intr` lines. - buf := make([]byte, 0, 8*1024) - scanner.Buffer(buf, 1024*1024) - - for scanner.Scan() { - line := scanner.Text() - parts := strings.Fields(scanner.Text()) - // require at least - if len(parts) < 2 { - continue - } - switch { - case parts[0] == "btime": - if stat.BootTime, err = strconv.ParseUint(parts[1], 10, 64); err != nil { - return Stat{}, fmt.Errorf("%s: couldn't parse %q (btime): %w", ErrFileParse, parts[1], err) - } - case parts[0] == "intr": - if stat.IRQTotal, err = strconv.ParseUint(parts[1], 10, 64); err != nil { - return Stat{}, fmt.Errorf("%s: couldn't parse %q (intr): %w", ErrFileParse, parts[1], err) - } - numberedIRQs := parts[2:] - stat.IRQ = make([]uint64, len(numberedIRQs)) - for i, count := range numberedIRQs { - if stat.IRQ[i], err = strconv.ParseUint(count, 10, 64); err != nil { - return Stat{}, fmt.Errorf("%s: couldn't parse %q (intr%d): %w", ErrFileParse, count, i, err) - } - } - case parts[0] == "ctxt": - if stat.ContextSwitches, err = strconv.ParseUint(parts[1], 10, 64); err != nil { - return Stat{}, fmt.Errorf("%s: couldn't parse %q (ctxt): %w", ErrFileParse, parts[1], err) - } - case parts[0] == "processes": - if stat.ProcessCreated, err = strconv.ParseUint(parts[1], 10, 64); err != nil { - return Stat{}, fmt.Errorf("%s: couldn't parse %q (processes): %w", ErrFileParse, parts[1], err) - } - case parts[0] == "procs_running": - if stat.ProcessesRunning, err = strconv.ParseUint(parts[1], 10, 64); err != nil { - return Stat{}, fmt.Errorf("%s: couldn't parse %q (procs_running): %w", ErrFileParse, parts[1], err) - } - case parts[0] == "procs_blocked": - if stat.ProcessesBlocked, err = strconv.ParseUint(parts[1], 10, 64); err != nil { - return Stat{}, fmt.Errorf("%s: couldn't parse %q (procs_blocked): %w", ErrFileParse, parts[1], err) - } - case parts[0] == "softirq": - softIRQStats, total, err := parseSoftIRQStat(line) - if err != nil { - return Stat{}, err - } - stat.SoftIRQTotal = total - stat.SoftIRQ = softIRQStats - case strings.HasPrefix(parts[0], "cpu"): - cpuStat, cpuID, err := parseCPUStat(line) - if err != nil { - return Stat{}, err - } - if cpuID == -1 { - stat.CPUTotal = cpuStat - } else { - stat.CPU[cpuID] = cpuStat - } - } - } - - if err := scanner.Err(); err != nil { - return Stat{}, fmt.Errorf("%s: couldn't parse %q: %w", ErrFileParse, fileName, err) - } - - return stat, nil -} diff --git a/vendor/github.com/prometheus/procfs/swaps.go b/vendor/github.com/prometheus/procfs/swaps.go deleted file mode 100644 index fa00f555d..000000000 --- a/vendor/github.com/prometheus/procfs/swaps.go +++ /dev/null @@ -1,89 +0,0 @@ -// Copyright 2019 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package procfs - -import ( - "bufio" - "bytes" - "fmt" - "strconv" - "strings" - - "github.com/prometheus/procfs/internal/util" -) - -// Swap represents an entry in /proc/swaps. -type Swap struct { - Filename string - Type string - Size int - Used int - Priority int -} - -// Swaps returns a slice of all configured swap devices on the system. -func (fs FS) Swaps() ([]*Swap, error) { - data, err := util.ReadFileNoStat(fs.proc.Path("swaps")) - if err != nil { - return nil, err - } - return parseSwaps(data) -} - -func parseSwaps(info []byte) ([]*Swap, error) { - swaps := []*Swap{} - scanner := bufio.NewScanner(bytes.NewReader(info)) - scanner.Scan() // ignore header line - for scanner.Scan() { - swapString := scanner.Text() - parsedSwap, err := parseSwapString(swapString) - if err != nil { - return nil, err - } - swaps = append(swaps, parsedSwap) - } - - err := scanner.Err() - return swaps, err -} - -func parseSwapString(swapString string) (*Swap, error) { - var err error - - swapFields := strings.Fields(swapString) - swapLength := len(swapFields) - if swapLength < 5 { - return nil, fmt.Errorf("%w: too few fields in swap string: %s", ErrFileParse, swapString) - } - - swap := &Swap{ - Filename: swapFields[0], - Type: swapFields[1], - } - - swap.Size, err = strconv.Atoi(swapFields[2]) - if err != nil { - return nil, fmt.Errorf("%s: invalid swap size: %s: %w", ErrFileParse, swapFields[2], err) - } - swap.Used, err = strconv.Atoi(swapFields[3]) - if err != nil { - return nil, fmt.Errorf("%s: invalid swap used: %s: %w", ErrFileParse, swapFields[3], err) - } - swap.Priority, err = strconv.Atoi(swapFields[4]) - if err != nil { - return nil, fmt.Errorf("%s: invalid swap priority: %s: %w", ErrFileParse, swapFields[4], err) - } - - return swap, nil -} diff --git a/vendor/github.com/prometheus/procfs/thread.go b/vendor/github.com/prometheus/procfs/thread.go deleted file mode 100644 index df2215ece..000000000 --- a/vendor/github.com/prometheus/procfs/thread.go +++ /dev/null @@ -1,80 +0,0 @@ -// Copyright 2022 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package procfs - -import ( - "fmt" - "os" - "strconv" - - fsi "github.com/prometheus/procfs/internal/fs" -) - -// Provide access to /proc/PID/task/TID files, for thread specific values. Since -// such files have the same structure as /proc/PID/ ones, the data structures -// and the parsers for the latter may be reused. - -// AllThreads returns a list of all currently available threads under /proc/PID. -func AllThreads(pid int) (Procs, error) { - fs, err := NewFS(DefaultMountPoint) - if err != nil { - return Procs{}, err - } - return fs.AllThreads(pid) -} - -// AllThreads returns a list of all currently available threads for PID. -func (fs FS) AllThreads(pid int) (Procs, error) { - taskPath := fs.proc.Path(strconv.Itoa(pid), "task") - d, err := os.Open(taskPath) - if err != nil { - return Procs{}, err - } - defer d.Close() - - names, err := d.Readdirnames(-1) - if err != nil { - return Procs{}, fmt.Errorf("%s: could not read %q: %w", ErrFileRead, d.Name(), err) - } - - t := Procs{} - for _, n := range names { - tid, err := strconv.ParseInt(n, 10, 64) - if err != nil { - continue - } - - t = append(t, Proc{PID: int(tid), fs: FS{fsi.FS(taskPath), fs.isReal}}) - } - - return t, nil -} - -// Thread returns a process for a given PID, TID. -func (fs FS) Thread(pid, tid int) (Proc, error) { - taskPath := fs.proc.Path(strconv.Itoa(pid), "task") - if _, err := os.Stat(taskPath); err != nil { - return Proc{}, err - } - return Proc{PID: tid, fs: FS{fsi.FS(taskPath), fs.isReal}}, nil -} - -// Thread returns a process for a given TID of Proc. -func (proc Proc) Thread(tid int) (Proc, error) { - tfs := FS{fsi.FS(proc.path("task")), proc.fs.isReal} - if _, err := os.Stat(tfs.proc.Path(strconv.Itoa(tid))); err != nil { - return Proc{}, err - } - return Proc{PID: tid, fs: tfs}, nil -} diff --git a/vendor/github.com/prometheus/procfs/ttar b/vendor/github.com/prometheus/procfs/ttar deleted file mode 100644 index 19ef02b8d..000000000 --- a/vendor/github.com/prometheus/procfs/ttar +++ /dev/null @@ -1,413 +0,0 @@ -#!/usr/bin/env bash - -# Purpose: plain text tar format -# Limitations: - only suitable for text files, directories, and symlinks -# - stores only filename, content, and mode -# - not designed for untrusted input -# -# Note: must work with bash version 3.2 (macOS) - -# Copyright 2017 Roger Luethi -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -o errexit -o nounset - -# Sanitize environment (for instance, standard sorting of glob matches) -export LC_ALL=C - -path="" -CMD="" -ARG_STRING="$*" - -#------------------------------------------------------------------------------ -# Not all sed implementations can work on null bytes. In order to make ttar -# work out of the box on macOS, use Python as a stream editor. - -USE_PYTHON=0 - -PYTHON_CREATE_FILTER=$(cat << 'PCF' -#!/usr/bin/env python - -import re -import sys - -for line in sys.stdin: - line = re.sub(r'EOF', r'\EOF', line) - line = re.sub(r'NULLBYTE', r'\NULLBYTE', line) - line = re.sub('\x00', r'NULLBYTE', line) - sys.stdout.write(line) -PCF -) - -PYTHON_EXTRACT_FILTER=$(cat << 'PEF' -#!/usr/bin/env python - -import re -import sys - -for line in sys.stdin: - line = re.sub(r'(?/dev/null; then - echo "ERROR Python not found. Aborting." - exit 2 - fi - USE_PYTHON=1 - fi -} - -#------------------------------------------------------------------------------ - -function usage { - bname=$(basename "$0") - cat << USAGE -Usage: $bname [-C

] -c -f (create archive) - $bname -t -f (list archive contents) - $bname [-C ] -x -f (extract archive) - -Options: - -C (change directory) - -v (verbose) - --recursive-unlink (recursively delete existing directory if path - collides with file or directory to extract) - -Example: Change to sysfs directory, create ttar file from fixtures directory - $bname -C sysfs -c -f sysfs/fixtures.ttar fixtures/ -USAGE -exit "$1" -} - -function vecho { - if [ "${VERBOSE:-}" == "yes" ]; then - echo >&7 "$@" - fi -} - -function set_cmd { - if [ -n "$CMD" ]; then - echo "ERROR: more than one command given" - echo - usage 2 - fi - CMD=$1 -} - -unset VERBOSE -unset RECURSIVE_UNLINK - -while getopts :cf:-:htxvC: opt; do - case $opt in - c) - set_cmd "create" - ;; - f) - ARCHIVE=$OPTARG - ;; - h) - usage 0 - ;; - t) - set_cmd "list" - ;; - x) - set_cmd "extract" - ;; - v) - VERBOSE=yes - exec 7>&1 - ;; - C) - CDIR=$OPTARG - ;; - -) - case $OPTARG in - recursive-unlink) - RECURSIVE_UNLINK="yes" - ;; - *) - echo -e "Error: invalid option -$OPTARG" - echo - usage 1 - ;; - esac - ;; - *) - echo >&2 "ERROR: invalid option -$OPTARG" - echo - usage 1 - ;; - esac -done - -# Remove processed options from arguments -shift $(( OPTIND - 1 )); - -if [ "${CMD:-}" == "" ]; then - echo >&2 "ERROR: no command given" - echo - usage 1 -elif [ "${ARCHIVE:-}" == "" ]; then - echo >&2 "ERROR: no archive name given" - echo - usage 1 -fi - -function list { - local path="" - local size=0 - local line_no=0 - local ttar_file=$1 - if [ -n "${2:-}" ]; then - echo >&2 "ERROR: too many arguments." - echo - usage 1 - fi - if [ ! -e "$ttar_file" ]; then - echo >&2 "ERROR: file not found ($ttar_file)" - echo - usage 1 - fi - while read -r line; do - line_no=$(( line_no + 1 )) - if [ $size -gt 0 ]; then - size=$(( size - 1 )) - continue - fi - if [[ $line =~ ^Path:\ (.*)$ ]]; then - path=${BASH_REMATCH[1]} - elif [[ $line =~ ^Lines:\ (.*)$ ]]; then - size=${BASH_REMATCH[1]} - echo "$path" - elif [[ $line =~ ^Directory:\ (.*)$ ]]; then - path=${BASH_REMATCH[1]} - echo "$path/" - elif [[ $line =~ ^SymlinkTo:\ (.*)$ ]]; then - echo "$path -> ${BASH_REMATCH[1]}" - fi - done < "$ttar_file" -} - -function extract { - local path="" - local size=0 - local line_no=0 - local ttar_file=$1 - if [ -n "${2:-}" ]; then - echo >&2 "ERROR: too many arguments." - echo - usage 1 - fi - if [ ! -e "$ttar_file" ]; then - echo >&2 "ERROR: file not found ($ttar_file)" - echo - usage 1 - fi - while IFS= read -r line; do - line_no=$(( line_no + 1 )) - local eof_without_newline - if [ "$size" -gt 0 ]; then - if [[ "$line" =~ [^\\]EOF ]]; then - # An EOF not preceded by a backslash indicates that the line - # does not end with a newline - eof_without_newline=1 - else - eof_without_newline=0 - fi - # Replace NULLBYTE with null byte if at beginning of line - # Replace NULLBYTE with null byte unless preceded by backslash - # Remove one backslash in front of NULLBYTE (if any) - # Remove EOF unless preceded by backslash - # Remove one backslash in front of EOF - if [ $USE_PYTHON -eq 1 ]; then - echo -n "$line" | python -c "$PYTHON_EXTRACT_FILTER" >> "$path" - else - # The repeated pattern makes up for sed's lack of negative - # lookbehind assertions (for consecutive null bytes). - echo -n "$line" | \ - sed -e 's/^NULLBYTE/\x0/g; - s/\([^\\]\)NULLBYTE/\1\x0/g; - s/\([^\\]\)NULLBYTE/\1\x0/g; - s/\\NULLBYTE/NULLBYTE/g; - s/\([^\\]\)EOF/\1/g; - s/\\EOF/EOF/g; - ' >> "$path" - fi - if [[ "$eof_without_newline" -eq 0 ]]; then - echo >> "$path" - fi - size=$(( size - 1 )) - continue - fi - if [[ $line =~ ^Path:\ (.*)$ ]]; then - path=${BASH_REMATCH[1]} - if [ -L "$path" ]; then - rm "$path" - elif [ -d "$path" ]; then - if [ "${RECURSIVE_UNLINK:-}" == "yes" ]; then - rm -r "$path" - else - # Safe because symlinks to directories are dealt with above - rmdir "$path" - fi - elif [ -e "$path" ]; then - rm "$path" - fi - elif [[ $line =~ ^Lines:\ (.*)$ ]]; then - size=${BASH_REMATCH[1]} - # Create file even if it is zero-length. - touch "$path" - vecho " $path" - elif [[ $line =~ ^Mode:\ (.*)$ ]]; then - mode=${BASH_REMATCH[1]} - chmod "$mode" "$path" - vecho "$mode" - elif [[ $line =~ ^Directory:\ (.*)$ ]]; then - path=${BASH_REMATCH[1]} - mkdir -p "$path" - vecho " $path/" - elif [[ $line =~ ^SymlinkTo:\ (.*)$ ]]; then - ln -s "${BASH_REMATCH[1]}" "$path" - vecho " $path -> ${BASH_REMATCH[1]}" - elif [[ $line =~ ^# ]]; then - # Ignore comments between files - continue - else - echo >&2 "ERROR: Unknown keyword on line $line_no: $line" - exit 1 - fi - done < "$ttar_file" -} - -function div { - echo "# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -" \ - "- - - - - -" -} - -function get_mode { - local mfile=$1 - if [ -z "${STAT_OPTION:-}" ]; then - if stat -c '%a' "$mfile" >/dev/null 2>&1; then - # GNU stat - STAT_OPTION='-c' - STAT_FORMAT='%a' - else - # BSD stat - STAT_OPTION='-f' - # Octal output, user/group/other (omit file type, sticky bit) - STAT_FORMAT='%OLp' - fi - fi - stat "${STAT_OPTION}" "${STAT_FORMAT}" "$mfile" -} - -function _create { - shopt -s nullglob - local mode - local eof_without_newline - while (( "$#" )); do - file=$1 - if [ -L "$file" ]; then - echo "Path: $file" - symlinkTo=$(readlink "$file") - echo "SymlinkTo: $symlinkTo" - vecho " $file -> $symlinkTo" - div - elif [ -d "$file" ]; then - # Strip trailing slash (if there is one) - file=${file%/} - echo "Directory: $file" - mode=$(get_mode "$file") - echo "Mode: $mode" - vecho "$mode $file/" - div - # Find all files and dirs, including hidden/dot files - for x in "$file/"{*,.[^.]*}; do - _create "$x" - done - elif [ -f "$file" ]; then - echo "Path: $file" - lines=$(wc -l "$file"|awk '{print $1}') - eof_without_newline=0 - if [[ "$(wc -c "$file"|awk '{print $1}')" -gt 0 ]] && \ - [[ "$(tail -c 1 "$file" | wc -l)" -eq 0 ]]; then - eof_without_newline=1 - lines=$((lines+1)) - fi - echo "Lines: $lines" - # Add backslash in front of EOF - # Add backslash in front of NULLBYTE - # Replace null byte with NULLBYTE - if [ $USE_PYTHON -eq 1 ]; then - < "$file" python -c "$PYTHON_CREATE_FILTER" - else - < "$file" \ - sed 's/EOF/\\EOF/g; - s/NULLBYTE/\\NULLBYTE/g; - s/\x0/NULLBYTE/g; - ' - fi - if [[ "$eof_without_newline" -eq 1 ]]; then - # Finish line with EOF to indicate that the original line did - # not end with a linefeed - echo "EOF" - fi - mode=$(get_mode "$file") - echo "Mode: $mode" - vecho "$mode $file" - div - else - echo >&2 "ERROR: file not found ($file in $(pwd))" - exit 2 - fi - shift - done -} - -function create { - ttar_file=$1 - shift - if [ -z "${1:-}" ]; then - echo >&2 "ERROR: missing arguments." - echo - usage 1 - fi - if [ -e "$ttar_file" ]; then - rm "$ttar_file" - fi - exec > "$ttar_file" - echo "# Archive created by ttar $ARG_STRING" - _create "$@" -} - -test_environment - -if [ -n "${CDIR:-}" ]; then - if [[ "$ARCHIVE" != /* ]]; then - # Relative path: preserve the archive's location before changing - # directory - ARCHIVE="$(pwd)/$ARCHIVE" - fi - cd "$CDIR" -fi - -"$CMD" "$ARCHIVE" "$@" diff --git a/vendor/github.com/prometheus/procfs/vm.go b/vendor/github.com/prometheus/procfs/vm.go deleted file mode 100644 index 51c49d89e..000000000 --- a/vendor/github.com/prometheus/procfs/vm.go +++ /dev/null @@ -1,212 +0,0 @@ -// Copyright 2019 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//go:build !windows -// +build !windows - -package procfs - -import ( - "fmt" - "os" - "path/filepath" - "strings" - - "github.com/prometheus/procfs/internal/util" -) - -// The VM interface is described at -// -// https://www.kernel.org/doc/Documentation/sysctl/vm.txt -// -// Each setting is exposed as a single file. -// Each file contains one line with a single numerical value, except lowmem_reserve_ratio which holds an array -// and numa_zonelist_order (deprecated) which is a string. -type VM struct { - AdminReserveKbytes *int64 // /proc/sys/vm/admin_reserve_kbytes - BlockDump *int64 // /proc/sys/vm/block_dump - CompactUnevictableAllowed *int64 // /proc/sys/vm/compact_unevictable_allowed - DirtyBackgroundBytes *int64 // /proc/sys/vm/dirty_background_bytes - DirtyBackgroundRatio *int64 // /proc/sys/vm/dirty_background_ratio - DirtyBytes *int64 // /proc/sys/vm/dirty_bytes - DirtyExpireCentisecs *int64 // /proc/sys/vm/dirty_expire_centisecs - DirtyRatio *int64 // /proc/sys/vm/dirty_ratio - DirtytimeExpireSeconds *int64 // /proc/sys/vm/dirtytime_expire_seconds - DirtyWritebackCentisecs *int64 // /proc/sys/vm/dirty_writeback_centisecs - DropCaches *int64 // /proc/sys/vm/drop_caches - ExtfragThreshold *int64 // /proc/sys/vm/extfrag_threshold - HugetlbShmGroup *int64 // /proc/sys/vm/hugetlb_shm_group - LaptopMode *int64 // /proc/sys/vm/laptop_mode - LegacyVaLayout *int64 // /proc/sys/vm/legacy_va_layout - LowmemReserveRatio []*int64 // /proc/sys/vm/lowmem_reserve_ratio - MaxMapCount *int64 // /proc/sys/vm/max_map_count - MemoryFailureEarlyKill *int64 // /proc/sys/vm/memory_failure_early_kill - MemoryFailureRecovery *int64 // /proc/sys/vm/memory_failure_recovery - MinFreeKbytes *int64 // /proc/sys/vm/min_free_kbytes - MinSlabRatio *int64 // /proc/sys/vm/min_slab_ratio - MinUnmappedRatio *int64 // /proc/sys/vm/min_unmapped_ratio - MmapMinAddr *int64 // /proc/sys/vm/mmap_min_addr - NrHugepages *int64 // /proc/sys/vm/nr_hugepages - NrHugepagesMempolicy *int64 // /proc/sys/vm/nr_hugepages_mempolicy - NrOvercommitHugepages *int64 // /proc/sys/vm/nr_overcommit_hugepages - NumaStat *int64 // /proc/sys/vm/numa_stat - NumaZonelistOrder string // /proc/sys/vm/numa_zonelist_order - OomDumpTasks *int64 // /proc/sys/vm/oom_dump_tasks - OomKillAllocatingTask *int64 // /proc/sys/vm/oom_kill_allocating_task - OvercommitKbytes *int64 // /proc/sys/vm/overcommit_kbytes - OvercommitMemory *int64 // /proc/sys/vm/overcommit_memory - OvercommitRatio *int64 // /proc/sys/vm/overcommit_ratio - PageCluster *int64 // /proc/sys/vm/page-cluster - PanicOnOom *int64 // /proc/sys/vm/panic_on_oom - PercpuPagelistFraction *int64 // /proc/sys/vm/percpu_pagelist_fraction - StatInterval *int64 // /proc/sys/vm/stat_interval - Swappiness *int64 // /proc/sys/vm/swappiness - UserReserveKbytes *int64 // /proc/sys/vm/user_reserve_kbytes - VfsCachePressure *int64 // /proc/sys/vm/vfs_cache_pressure - WatermarkBoostFactor *int64 // /proc/sys/vm/watermark_boost_factor - WatermarkScaleFactor *int64 // /proc/sys/vm/watermark_scale_factor - ZoneReclaimMode *int64 // /proc/sys/vm/zone_reclaim_mode -} - -// VM reads the VM statistics from the specified `proc` filesystem. -func (fs FS) VM() (*VM, error) { - path := fs.proc.Path("sys/vm") - file, err := os.Stat(path) - if err != nil { - return nil, err - } - if !file.Mode().IsDir() { - return nil, fmt.Errorf("%w: %s is not a directory", ErrFileRead, path) - } - - files, err := os.ReadDir(path) - if err != nil { - return nil, err - } - - var vm VM - for _, f := range files { - if f.IsDir() { - continue - } - - name := filepath.Join(path, f.Name()) - // ignore errors on read, as there are some write only - // in /proc/sys/vm - value, err := util.SysReadFile(name) - if err != nil { - continue - } - vp := util.NewValueParser(value) - - switch f.Name() { - case "admin_reserve_kbytes": - vm.AdminReserveKbytes = vp.PInt64() - case "block_dump": - vm.BlockDump = vp.PInt64() - case "compact_unevictable_allowed": - vm.CompactUnevictableAllowed = vp.PInt64() - case "dirty_background_bytes": - vm.DirtyBackgroundBytes = vp.PInt64() - case "dirty_background_ratio": - vm.DirtyBackgroundRatio = vp.PInt64() - case "dirty_bytes": - vm.DirtyBytes = vp.PInt64() - case "dirty_expire_centisecs": - vm.DirtyExpireCentisecs = vp.PInt64() - case "dirty_ratio": - vm.DirtyRatio = vp.PInt64() - case "dirtytime_expire_seconds": - vm.DirtytimeExpireSeconds = vp.PInt64() - case "dirty_writeback_centisecs": - vm.DirtyWritebackCentisecs = vp.PInt64() - case "drop_caches": - vm.DropCaches = vp.PInt64() - case "extfrag_threshold": - vm.ExtfragThreshold = vp.PInt64() - case "hugetlb_shm_group": - vm.HugetlbShmGroup = vp.PInt64() - case "laptop_mode": - vm.LaptopMode = vp.PInt64() - case "legacy_va_layout": - vm.LegacyVaLayout = vp.PInt64() - case "lowmem_reserve_ratio": - stringSlice := strings.Fields(value) - pint64Slice := make([]*int64, 0, len(stringSlice)) - for _, value := range stringSlice { - vp := util.NewValueParser(value) - pint64Slice = append(pint64Slice, vp.PInt64()) - } - vm.LowmemReserveRatio = pint64Slice - case "max_map_count": - vm.MaxMapCount = vp.PInt64() - case "memory_failure_early_kill": - vm.MemoryFailureEarlyKill = vp.PInt64() - case "memory_failure_recovery": - vm.MemoryFailureRecovery = vp.PInt64() - case "min_free_kbytes": - vm.MinFreeKbytes = vp.PInt64() - case "min_slab_ratio": - vm.MinSlabRatio = vp.PInt64() - case "min_unmapped_ratio": - vm.MinUnmappedRatio = vp.PInt64() - case "mmap_min_addr": - vm.MmapMinAddr = vp.PInt64() - case "nr_hugepages": - vm.NrHugepages = vp.PInt64() - case "nr_hugepages_mempolicy": - vm.NrHugepagesMempolicy = vp.PInt64() - case "nr_overcommit_hugepages": - vm.NrOvercommitHugepages = vp.PInt64() - case "numa_stat": - vm.NumaStat = vp.PInt64() - case "numa_zonelist_order": - vm.NumaZonelistOrder = value - case "oom_dump_tasks": - vm.OomDumpTasks = vp.PInt64() - case "oom_kill_allocating_task": - vm.OomKillAllocatingTask = vp.PInt64() - case "overcommit_kbytes": - vm.OvercommitKbytes = vp.PInt64() - case "overcommit_memory": - vm.OvercommitMemory = vp.PInt64() - case "overcommit_ratio": - vm.OvercommitRatio = vp.PInt64() - case "page-cluster": - vm.PageCluster = vp.PInt64() - case "panic_on_oom": - vm.PanicOnOom = vp.PInt64() - case "percpu_pagelist_fraction": - vm.PercpuPagelistFraction = vp.PInt64() - case "stat_interval": - vm.StatInterval = vp.PInt64() - case "swappiness": - vm.Swappiness = vp.PInt64() - case "user_reserve_kbytes": - vm.UserReserveKbytes = vp.PInt64() - case "vfs_cache_pressure": - vm.VfsCachePressure = vp.PInt64() - case "watermark_boost_factor": - vm.WatermarkBoostFactor = vp.PInt64() - case "watermark_scale_factor": - vm.WatermarkScaleFactor = vp.PInt64() - case "zone_reclaim_mode": - vm.ZoneReclaimMode = vp.PInt64() - } - if err := vp.Err(); err != nil { - return nil, err - } - } - - return &vm, nil -} diff --git a/vendor/github.com/prometheus/procfs/zoneinfo.go b/vendor/github.com/prometheus/procfs/zoneinfo.go deleted file mode 100644 index ce5fefa5b..000000000 --- a/vendor/github.com/prometheus/procfs/zoneinfo.go +++ /dev/null @@ -1,196 +0,0 @@ -// Copyright 2019 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//go:build !windows -// +build !windows - -package procfs - -import ( - "bytes" - "fmt" - "os" - "regexp" - "strings" - - "github.com/prometheus/procfs/internal/util" -) - -// Zoneinfo holds info parsed from /proc/zoneinfo. -type Zoneinfo struct { - Node string - Zone string - NrFreePages *int64 - Min *int64 - Low *int64 - High *int64 - Scanned *int64 - Spanned *int64 - Present *int64 - Managed *int64 - NrActiveAnon *int64 - NrInactiveAnon *int64 - NrIsolatedAnon *int64 - NrAnonPages *int64 - NrAnonTransparentHugepages *int64 - NrActiveFile *int64 - NrInactiveFile *int64 - NrIsolatedFile *int64 - NrFilePages *int64 - NrSlabReclaimable *int64 - NrSlabUnreclaimable *int64 - NrMlockStack *int64 - NrKernelStack *int64 - NrMapped *int64 - NrDirty *int64 - NrWriteback *int64 - NrUnevictable *int64 - NrShmem *int64 - NrDirtied *int64 - NrWritten *int64 - NumaHit *int64 - NumaMiss *int64 - NumaForeign *int64 - NumaInterleave *int64 - NumaLocal *int64 - NumaOther *int64 - Protection []*int64 -} - -var nodeZoneRE = regexp.MustCompile(`(\d+), zone\s+(\w+)`) - -// Zoneinfo parses an zoneinfo-file (/proc/zoneinfo) and returns a slice of -// structs containing the relevant info. More information available here: -// https://www.kernel.org/doc/Documentation/sysctl/vm.txt -func (fs FS) Zoneinfo() ([]Zoneinfo, error) { - data, err := os.ReadFile(fs.proc.Path("zoneinfo")) - if err != nil { - return nil, fmt.Errorf("%s: error reading zoneinfo %q: %w", ErrFileRead, fs.proc.Path("zoneinfo"), err) - } - zoneinfo, err := parseZoneinfo(data) - if err != nil { - return nil, fmt.Errorf("%s: error parsing zoneinfo %q: %w", ErrFileParse, fs.proc.Path("zoneinfo"), err) - } - return zoneinfo, nil -} - -func parseZoneinfo(zoneinfoData []byte) ([]Zoneinfo, error) { - - zoneinfo := []Zoneinfo{} - - zoneinfoBlocks := bytes.Split(zoneinfoData, []byte("\nNode")) - for _, block := range zoneinfoBlocks { - var zoneinfoElement Zoneinfo - lines := strings.Split(string(block), "\n") - for _, line := range lines { - - if nodeZone := nodeZoneRE.FindStringSubmatch(line); nodeZone != nil { - zoneinfoElement.Node = nodeZone[1] - zoneinfoElement.Zone = nodeZone[2] - continue - } - if strings.HasPrefix(strings.TrimSpace(line), "per-node stats") { - continue - } - parts := strings.Fields(strings.TrimSpace(line)) - if len(parts) < 2 { - continue - } - vp := util.NewValueParser(parts[1]) - switch parts[0] { - case "nr_free_pages": - zoneinfoElement.NrFreePages = vp.PInt64() - case "min": - zoneinfoElement.Min = vp.PInt64() - case "low": - zoneinfoElement.Low = vp.PInt64() - case "high": - zoneinfoElement.High = vp.PInt64() - case "scanned": - zoneinfoElement.Scanned = vp.PInt64() - case "spanned": - zoneinfoElement.Spanned = vp.PInt64() - case "present": - zoneinfoElement.Present = vp.PInt64() - case "managed": - zoneinfoElement.Managed = vp.PInt64() - case "nr_active_anon": - zoneinfoElement.NrActiveAnon = vp.PInt64() - case "nr_inactive_anon": - zoneinfoElement.NrInactiveAnon = vp.PInt64() - case "nr_isolated_anon": - zoneinfoElement.NrIsolatedAnon = vp.PInt64() - case "nr_anon_pages": - zoneinfoElement.NrAnonPages = vp.PInt64() - case "nr_anon_transparent_hugepages": - zoneinfoElement.NrAnonTransparentHugepages = vp.PInt64() - case "nr_active_file": - zoneinfoElement.NrActiveFile = vp.PInt64() - case "nr_inactive_file": - zoneinfoElement.NrInactiveFile = vp.PInt64() - case "nr_isolated_file": - zoneinfoElement.NrIsolatedFile = vp.PInt64() - case "nr_file_pages": - zoneinfoElement.NrFilePages = vp.PInt64() - case "nr_slab_reclaimable": - zoneinfoElement.NrSlabReclaimable = vp.PInt64() - case "nr_slab_unreclaimable": - zoneinfoElement.NrSlabUnreclaimable = vp.PInt64() - case "nr_mlock_stack": - zoneinfoElement.NrMlockStack = vp.PInt64() - case "nr_kernel_stack": - zoneinfoElement.NrKernelStack = vp.PInt64() - case "nr_mapped": - zoneinfoElement.NrMapped = vp.PInt64() - case "nr_dirty": - zoneinfoElement.NrDirty = vp.PInt64() - case "nr_writeback": - zoneinfoElement.NrWriteback = vp.PInt64() - case "nr_unevictable": - zoneinfoElement.NrUnevictable = vp.PInt64() - case "nr_shmem": - zoneinfoElement.NrShmem = vp.PInt64() - case "nr_dirtied": - zoneinfoElement.NrDirtied = vp.PInt64() - case "nr_written": - zoneinfoElement.NrWritten = vp.PInt64() - case "numa_hit": - zoneinfoElement.NumaHit = vp.PInt64() - case "numa_miss": - zoneinfoElement.NumaMiss = vp.PInt64() - case "numa_foreign": - zoneinfoElement.NumaForeign = vp.PInt64() - case "numa_interleave": - zoneinfoElement.NumaInterleave = vp.PInt64() - case "numa_local": - zoneinfoElement.NumaLocal = vp.PInt64() - case "numa_other": - zoneinfoElement.NumaOther = vp.PInt64() - case "protection:": - protectionParts := strings.Split(line, ":") - protectionValues := strings.Replace(protectionParts[1], "(", "", 1) - protectionValues = strings.Replace(protectionValues, ")", "", 1) - protectionValues = strings.TrimSpace(protectionValues) - protectionStringMap := strings.Split(protectionValues, ", ") - val, err := util.ParsePInt64s(protectionStringMap) - if err == nil { - zoneinfoElement.Protection = val - } - } - - } - - zoneinfo = append(zoneinfo, zoneinfoElement) - } - return zoneinfo, nil -} diff --git a/vendor/github.com/quasilyte/go-ruleguard/LICENSE b/vendor/github.com/quasilyte/go-ruleguard/LICENSE deleted file mode 100644 index 558f81ff2..000000000 --- a/vendor/github.com/quasilyte/go-ruleguard/LICENSE +++ /dev/null @@ -1,29 +0,0 @@ -BSD 3-Clause License - -Copyright (c) 2022, Iskander (Alex) Sharipov / quasilyte -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - -1. Redistributions of source code must retain the above copyright notice, this - list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - -3. Neither the name of the copyright holder nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE -FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/quasilyte/go-ruleguard/internal/goenv/goenv.go b/vendor/github.com/quasilyte/go-ruleguard/internal/goenv/goenv.go deleted file mode 100644 index 52d0f5204..000000000 --- a/vendor/github.com/quasilyte/go-ruleguard/internal/goenv/goenv.go +++ /dev/null @@ -1,34 +0,0 @@ -package goenv - -import ( - "errors" - "os/exec" - "strings" -) - -func Read() (map[string]string, error) { - // pass in a fixed set of var names to avoid needing to unescape output - // pass in literals here instead of a variable list to avoid security linter warnings about command injection - out, err := exec.Command("go", "env", "GOROOT", "GOPATH", "GOARCH", "GOOS", "CGO_ENABLED").CombinedOutput() - if err != nil { - return nil, err - } - return parseGoEnv([]string{"GOROOT", "GOPATH", "GOARCH", "GOOS", "CGO_ENABLED"}, out) -} - -func parseGoEnv(varNames []string, data []byte) (map[string]string, error) { - vars := make(map[string]string) - - lines := strings.Split(strings.ReplaceAll(string(data), "\r\n", "\n"), "\n") - for i, varName := range varNames { - if i < len(lines) && len(lines[i]) > 0 { - vars[varName] = lines[i] - } - } - - if len(vars) == 0 { - return nil, errors.New("empty env set") - } - - return vars, nil -} diff --git a/vendor/github.com/quasilyte/go-ruleguard/internal/golist/golist.go b/vendor/github.com/quasilyte/go-ruleguard/internal/golist/golist.go deleted file mode 100644 index 50f9cca0b..000000000 --- a/vendor/github.com/quasilyte/go-ruleguard/internal/golist/golist.go +++ /dev/null @@ -1,30 +0,0 @@ -package golist - -import ( - "bytes" - "encoding/json" - "fmt" - "io" - "os/exec" -) - -// Package is `go list --json` output structure. -type Package struct { - Dir string // directory containing package sources - ImportPath string // import path of package in dir - GoFiles []string // .go source files (excluding CgoFiles, TestGoFiles, XTestGoFiles) -} - -// JSON runs `go list --json` for the specified pkgName and returns the parsed JSON. -func JSON(pkgPath string) (*Package, error) { - out, err := exec.Command("go", "list", "--json", pkgPath).CombinedOutput() - if err != nil { - return nil, fmt.Errorf("go list error (%v): %s", err, out) - } - - var pkg Package - if err := json.NewDecoder(bytes.NewReader(out)).Decode(&pkg); err != io.EOF && err != nil { - return nil, err - } - return &pkg, nil -} diff --git a/vendor/github.com/quasilyte/go-ruleguard/internal/xsrcimporter/xsrcimporter.go b/vendor/github.com/quasilyte/go-ruleguard/internal/xsrcimporter/xsrcimporter.go deleted file mode 100644 index 3b58f97c7..000000000 --- a/vendor/github.com/quasilyte/go-ruleguard/internal/xsrcimporter/xsrcimporter.go +++ /dev/null @@ -1,29 +0,0 @@ -package xsrcimporter - -import ( - "go/build" - "go/importer" - "go/token" - "go/types" - "unsafe" -) - -func New(ctxt *build.Context, fset *token.FileSet) types.Importer { - imp := importer.ForCompiler(fset, "source", nil) - ifaceVal := *(*iface)(unsafe.Pointer(&imp)) - srcImp := (*srcImporter)(ifaceVal.data) - srcImp.ctxt = ctxt - return imp -} - -type iface struct { - _ *byte - data unsafe.Pointer -} - -type srcImporter struct { - ctxt *build.Context - _ *token.FileSet - _ types.Sizes - _ map[string]*types.Package -} diff --git a/vendor/github.com/quasilyte/go-ruleguard/internal/xtypes/xtypes.go b/vendor/github.com/quasilyte/go-ruleguard/internal/xtypes/xtypes.go deleted file mode 100644 index 4c5c2a2ab..000000000 --- a/vendor/github.com/quasilyte/go-ruleguard/internal/xtypes/xtypes.go +++ /dev/null @@ -1,266 +0,0 @@ -package xtypes - -import ( - "go/types" - - "golang.org/x/exp/typeparams" -) - -// Implements reports whether type v implements iface. -// -// Unlike types.Implements(), it permits X and Y named types -// to be considered identical even if their addresses are different. -func Implements(v types.Type, iface *types.Interface) bool { - if iface.Empty() { - return true - } - - if v, _ := v.Underlying().(*types.Interface); v != nil { - for i := 0; i < iface.NumMethods(); i++ { - m := iface.Method(i) - obj, _, _ := types.LookupFieldOrMethod(v, false, m.Pkg(), m.Name()) - switch { - case obj == nil: - return false - case !Identical(obj.Type(), m.Type()): - return false - } - } - return true - } - - // A concrete type v implements iface if it implements all methods of iface. - for i := 0; i < iface.NumMethods(); i++ { - m := iface.Method(i) - - obj, _, _ := types.LookupFieldOrMethod(v, false, m.Pkg(), m.Name()) - if obj == nil { - return false - } - - f, ok := obj.(*types.Func) - if !ok { - return false - } - - if !Identical(f.Type(), m.Type()) { - return false - } - } - - return true -} - -// Identical reports whether x and y are identical types. -// -// Unlike types.Identical(), it permits X and Y named types -// to be considered identical even if their addresses are different. -func Identical(x, y types.Type) bool { - return typeIdentical(x, y, nil) -} - -func typeIdentical(x, y types.Type, p *ifacePair) bool { - if x == y { - return true - } - - switch x := x.(type) { - case *types.Basic: - // Basic types are singletons except for the rune and byte - // aliases, thus we cannot solely rely on the x == y check - // above. See also comment in TypeName.IsAlias. - if y, ok := y.(*types.Basic); ok { - return x.Kind() == y.Kind() - } - - case *types.Array: - // Two array types are identical if they have identical element types - // and the same array length. - if y, ok := y.(*types.Array); ok { - // If one or both array lengths are unknown (< 0) due to some error, - // assume they are the same to avoid spurious follow-on errors. - return (x.Len() < 0 || y.Len() < 0 || x.Len() == y.Len()) && typeIdentical(x.Elem(), y.Elem(), p) - } - - case *types.Slice: - // Two slice types are identical if they have identical element types. - if y, ok := y.(*types.Slice); ok { - return typeIdentical(x.Elem(), y.Elem(), p) - } - - case *types.Struct: - // Two struct types are identical if they have the same sequence of fields, - // and if corresponding fields have the same names, and identical types, - // and identical tags. Two embedded fields are considered to have the same - // name. Lower-case field names from different packages are always different. - if y, ok := y.(*types.Struct); ok { - if x.NumFields() == y.NumFields() { - for i := 0; i < x.NumFields(); i++ { - f := x.Field(i) - g := y.Field(i) - if f.Embedded() != g.Embedded() || !sameID(f, g.Pkg(), g.Name()) || !typeIdentical(f.Type(), g.Type(), p) { - return false - } - } - return true - } - } - - case *types.Pointer: - // Two pointer types are identical if they have identical base types. - if y, ok := y.(*types.Pointer); ok { - return typeIdentical(x.Elem(), y.Elem(), p) - } - - case *types.Tuple: - // Two tuples types are identical if they have the same number of elements - // and corresponding elements have identical types. - if y, ok := y.(*types.Tuple); ok { - if x.Len() == y.Len() { - if x != nil { - for i := 0; i < x.Len(); i++ { - v := x.At(i) - w := y.At(i) - if !typeIdentical(v.Type(), w.Type(), p) { - return false - } - } - } - return true - } - } - - case *types.Signature: - // Two function types are identical if they have the same number of parameters - // and result values, corresponding parameter and result types are identical, - // and either both functions are variadic or neither is. Parameter and result - // names are not required to match. - if y, ok := y.(*types.Signature); ok { - return x.Variadic() == y.Variadic() && - typeIdentical(x.Params(), y.Params(), p) && - typeIdentical(x.Results(), y.Results(), p) - } - - case *typeparams.Union: - // TODO(quasilyte): do we want to match generic union types too? - // It would require copying a lot of code from the go/types. - return false - - case *types.Interface: - // Two interface types are identical if they have the same set of methods with - // the same names and identical function types. Lower-case method names from - // different packages are always different. The order of the methods is irrelevant. - if y, ok := y.(*types.Interface); ok { - if x.NumMethods() != y.NumMethods() { - return false - } - // Interface types are the only types where cycles can occur - // that are not "terminated" via named types; and such cycles - // can only be created via method parameter types that are - // anonymous interfaces (directly or indirectly) embedding - // the current interface. Example: - // - // type T interface { - // m() interface{T} - // } - // - // If two such (differently named) interfaces are compared, - // endless recursion occurs if the cycle is not detected. - // - // If x and y were compared before, they must be equal - // (if they were not, the recursion would have stopped); - // search the ifacePair stack for the same pair. - // - // This is a quadratic algorithm, but in practice these stacks - // are extremely short (bounded by the nesting depth of interface - // type declarations that recur via parameter types, an extremely - // rare occurrence). An alternative implementation might use a - // "visited" map, but that is probably less efficient overall. - q := &ifacePair{x, y, p} - for p != nil { - if p.identical(q) { - return true // same pair was compared before - } - p = p.prev - } - for i := 0; i < x.NumMethods(); i++ { - f := x.Method(i) - g := y.Method(i) - if f.Id() != g.Id() || !typeIdentical(f.Type(), g.Type(), q) { - return false - } - } - return true - } - - case *types.Map: - // Two map types are identical if they have identical key and value types. - if y, ok := y.(*types.Map); ok { - return typeIdentical(x.Key(), y.Key(), p) && typeIdentical(x.Elem(), y.Elem(), p) - } - - case *types.Chan: - // Two channel types are identical if they have identical value types - // and the same direction. - if y, ok := y.(*types.Chan); ok { - return x.Dir() == y.Dir() && typeIdentical(x.Elem(), y.Elem(), p) - } - - case *types.Named: - // Two named types are identical if their type names originate - // in the same type declaration. - y, ok := y.(*types.Named) - if !ok { - return false - } - if x.Obj() == y.Obj() { - return true - } - return sameID(x.Obj(), y.Obj().Pkg(), y.Obj().Name()) - - case *typeparams.TypeParam: - // nothing to do (x and y being equal is caught in the very beginning of this function) - - case nil: - // avoid a crash in case of nil type - - default: - panic("unreachable") - } - - return false -} - -// An ifacePair is a node in a stack of interface type pairs compared for identity. -type ifacePair struct { - x *types.Interface - y *types.Interface - prev *ifacePair -} - -func (p *ifacePair) identical(q *ifacePair) bool { - return (p.x == q.x && p.y == q.y) || - (p.x == q.y && p.y == q.x) -} - -func sameID(obj types.Object, pkg *types.Package, name string) bool { - // spec: - // "Two identifiers are different if they are spelled differently, - // or if they appear in different packages and are not exported. - // Otherwise, they are the same." - if name != obj.Name() { - return false - } - // obj.Name == name - if obj.Exported() { - return true - } - // not exported, so packages must be the same (pkg == nil for - // fields in Universe scope; this can only happen for types - // introduced via Eval) - if pkg == nil || obj.Pkg() == nil { - return pkg == obj.Pkg() - } - // pkg != nil && obj.pkg != nil - return pkg.Path() == obj.Pkg().Path() -} diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ast_walker.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ast_walker.go deleted file mode 100644 index c52a5a822..000000000 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ast_walker.go +++ /dev/null @@ -1,369 +0,0 @@ -package ruleguard - -import ( - "go/ast" - "go/constant" - - "github.com/quasilyte/gogrep/nodetag" -) - -type astWalker struct { - nodePath *nodePath - - filterParams *filterParams - - visit func(ast.Node, nodetag.Value) -} - -func (w *astWalker) Walk(root ast.Node, visit func(ast.Node, nodetag.Value)) { - w.visit = visit - w.walk(root) -} - -func (w *astWalker) walkIdentList(list []*ast.Ident) { - for _, x := range list { - w.walk(x) - } -} - -func (w *astWalker) walkExprList(list []ast.Expr) { - for _, x := range list { - w.walk(x) - } -} - -func (w *astWalker) walkStmtList(list []ast.Stmt) { - for _, x := range list { - w.walk(x) - } -} - -func (w *astWalker) walkDeclList(list []ast.Decl) { - for _, x := range list { - w.walk(x) - } -} - -func (w *astWalker) walk(n ast.Node) { - w.nodePath.Push(n) - defer w.nodePath.Pop() - - switch n := n.(type) { - case *ast.Field: - // TODO: handle field types. - // See #252 - w.walkIdentList(n.Names) - w.walk(n.Type) - - case *ast.FieldList: - for _, f := range n.List { - w.walk(f) - } - - case *ast.Ellipsis: - w.visit(n, nodetag.Ellipsis) - if n.Elt != nil { - w.walk(n.Elt) - } - - case *ast.FuncLit: - w.visit(n, nodetag.FuncLit) - w.walk(n.Type) - w.walk(n.Body) - - case *ast.CompositeLit: - w.visit(n, nodetag.CompositeLit) - if n.Type != nil { - w.walk(n.Type) - } - w.walkExprList(n.Elts) - - case *ast.ParenExpr: - w.visit(n, nodetag.ParenExpr) - w.walk(n.X) - - case *ast.SelectorExpr: - w.visit(n, nodetag.SelectorExpr) - w.walk(n.X) - w.walk(n.Sel) - - case *ast.IndexExpr: - w.visit(n, nodetag.IndexExpr) - w.walk(n.X) - w.walk(n.Index) - - case *ast.SliceExpr: - w.visit(n, nodetag.SliceExpr) - w.walk(n.X) - if n.Low != nil { - w.walk(n.Low) - } - if n.High != nil { - w.walk(n.High) - } - if n.Max != nil { - w.walk(n.Max) - } - - case *ast.TypeAssertExpr: - w.visit(n, nodetag.TypeAssertExpr) - w.walk(n.X) - if n.Type != nil { - w.walk(n.Type) - } - - case *ast.CallExpr: - w.visit(n, nodetag.CallExpr) - w.walk(n.Fun) - w.walkExprList(n.Args) - - case *ast.StarExpr: - w.visit(n, nodetag.StarExpr) - w.walk(n.X) - - case *ast.UnaryExpr: - w.visit(n, nodetag.UnaryExpr) - w.walk(n.X) - - case *ast.BinaryExpr: - w.visit(n, nodetag.BinaryExpr) - w.walk(n.X) - w.walk(n.Y) - - case *ast.KeyValueExpr: - w.visit(n, nodetag.KeyValueExpr) - w.walk(n.Key) - w.walk(n.Value) - - case *ast.ArrayType: - w.visit(n, nodetag.ArrayType) - if n.Len != nil { - w.walk(n.Len) - } - w.walk(n.Elt) - - case *ast.StructType: - w.visit(n, nodetag.StructType) - w.walk(n.Fields) - - case *ast.FuncType: - w.visit(n, nodetag.FuncType) - if n.Params != nil { - w.walk(n.Params) - } - if n.Results != nil { - w.walk(n.Results) - } - - case *ast.InterfaceType: - w.visit(n, nodetag.InterfaceType) - w.walk(n.Methods) - - case *ast.MapType: - w.visit(n, nodetag.MapType) - w.walk(n.Key) - w.walk(n.Value) - - case *ast.ChanType: - w.visit(n, nodetag.ChanType) - w.walk(n.Value) - - case *ast.DeclStmt: - w.visit(n, nodetag.DeclStmt) - w.walk(n.Decl) - - case *ast.LabeledStmt: - w.visit(n, nodetag.LabeledStmt) - w.walk(n.Label) - w.walk(n.Stmt) - - case *ast.ExprStmt: - w.visit(n, nodetag.ExprStmt) - w.walk(n.X) - - case *ast.SendStmt: - w.visit(n, nodetag.SendStmt) - w.walk(n.Chan) - w.walk(n.Value) - - case *ast.IncDecStmt: - w.visit(n, nodetag.IncDecStmt) - w.walk(n.X) - - case *ast.AssignStmt: - w.visit(n, nodetag.AssignStmt) - w.walkExprList(n.Lhs) - w.walkExprList(n.Rhs) - - case *ast.GoStmt: - w.visit(n, nodetag.GoStmt) - w.walk(n.Call) - - case *ast.DeferStmt: - w.visit(n, nodetag.DeferStmt) - w.walk(n.Call) - - case *ast.ReturnStmt: - w.visit(n, nodetag.ReturnStmt) - w.walkExprList(n.Results) - - case *ast.BranchStmt: - w.visit(n, nodetag.BranchStmt) - if n.Label != nil { - w.walk(n.Label) - } - - case *ast.BlockStmt: - w.visit(n, nodetag.BlockStmt) - w.walkStmtList(n.List) - - case *ast.IfStmt: - w.visit(n, nodetag.IfStmt) - if n.Init != nil { - w.walk(n.Init) - } - w.walk(n.Cond) - deadcode := w.filterParams.deadcode - if !deadcode { - cv := w.filterParams.ctx.Types.Types[n.Cond].Value - if cv != nil { - w.filterParams.deadcode = !deadcode && !constant.BoolVal(cv) - w.walk(n.Body) - w.filterParams.deadcode = !w.filterParams.deadcode - if n.Else != nil { - w.walk(n.Else) - } - w.filterParams.deadcode = deadcode - return - } - } - w.walk(n.Body) - if n.Else != nil { - w.walk(n.Else) - } - - case *ast.CaseClause: - w.visit(n, nodetag.CaseClause) - w.walkExprList(n.List) - w.walkStmtList(n.Body) - - case *ast.SwitchStmt: - w.visit(n, nodetag.SwitchStmt) - if n.Init != nil { - w.walk(n.Init) - } - if n.Tag != nil { - w.walk(n.Tag) - } - w.walk(n.Body) - - case *ast.TypeSwitchStmt: - w.visit(n, nodetag.TypeSwitchStmt) - if n.Init != nil { - w.walk(n.Init) - } - w.walk(n.Assign) - w.walk(n.Body) - - case *ast.CommClause: - w.visit(n, nodetag.CommClause) - if n.Comm != nil { - w.walk(n.Comm) - } - w.walkStmtList(n.Body) - - case *ast.SelectStmt: - w.visit(n, nodetag.SelectStmt) - w.walk(n.Body) - - case *ast.ForStmt: - w.visit(n, nodetag.ForStmt) - if n.Init != nil { - w.walk(n.Init) - } - if n.Cond != nil { - w.walk(n.Cond) - } - if n.Post != nil { - w.walk(n.Post) - } - w.walk(n.Body) - - case *ast.RangeStmt: - w.visit(n, nodetag.RangeStmt) - if n.Key != nil { - w.walk(n.Key) - } - if n.Value != nil { - w.walk(n.Value) - } - w.walk(n.X) - w.walk(n.Body) - - case *ast.ImportSpec: - w.visit(n, nodetag.ImportSpec) - if n.Name != nil { - w.walk(n.Name) - } - w.walk(n.Path) - if n.Comment != nil { - w.walk(n.Comment) - } - - case *ast.ValueSpec: - w.visit(n, nodetag.ValueSpec) - if n.Doc != nil { - w.walk(n.Doc) - } - w.walkIdentList(n.Names) - if n.Type != nil { - w.walk(n.Type) - } - w.walkExprList(n.Values) - if n.Comment != nil { - w.walk(n.Comment) - } - - case *ast.TypeSpec: - w.visit(n, nodetag.TypeSpec) - if n.Doc != nil { - w.walk(n.Doc) - } - w.walk(n.Name) - w.walk(n.Type) - if n.Comment != nil { - w.walk(n.Comment) - } - - case *ast.GenDecl: - w.visit(n, nodetag.GenDecl) - if n.Doc != nil { - w.walk(n.Doc) - } - for _, s := range n.Specs { - w.walk(s) - } - - case *ast.FuncDecl: - w.visit(n, nodetag.FuncDecl) - prevFunc := w.filterParams.currentFunc - w.filterParams.currentFunc = n - if n.Doc != nil { - w.walk(n.Doc) - } - if n.Recv != nil { - w.walk(n.Recv) - } - w.walk(n.Name) - w.walk(n.Type) - if n.Body != nil { - w.walk(n.Body) - } - w.filterParams.currentFunc = prevFunc - - case *ast.File: - w.visit(n, nodetag.File) - w.walk(n.Name) - w.walkDeclList(n.Decls) - } -} diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/bundle.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/bundle.go deleted file mode 100644 index 72a334b9c..000000000 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/bundle.go +++ /dev/null @@ -1,19 +0,0 @@ -package ruleguard - -import ( - "path/filepath" - - "github.com/quasilyte/go-ruleguard/internal/golist" -) - -func findBundleFiles(pkgPath string) ([]string, error) { // nolint - pkg, err := golist.JSON(pkgPath) - if err != nil { - return nil, err - } - files := make([]string, 0, len(pkg.GoFiles)) - for _, f := range pkg.GoFiles { - files = append(files, filepath.Join(pkg.Dir, f)) - } - return files, nil -} diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/engine.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/engine.go deleted file mode 100644 index e4cf954ff..000000000 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/engine.go +++ /dev/null @@ -1,270 +0,0 @@ -package ruleguard - -import ( - "errors" - "fmt" - "go/ast" - "go/build" - "go/token" - "go/types" - "io" - "os" - "sort" - "strings" - "sync" - - "github.com/quasilyte/go-ruleguard/internal/goenv" - "github.com/quasilyte/go-ruleguard/ruleguard/ir" - "github.com/quasilyte/go-ruleguard/ruleguard/quasigo" - "github.com/quasilyte/go-ruleguard/ruleguard/quasigo/stdlib/qfmt" - "github.com/quasilyte/go-ruleguard/ruleguard/quasigo/stdlib/qstrconv" - "github.com/quasilyte/go-ruleguard/ruleguard/quasigo/stdlib/qstrings" - "github.com/quasilyte/go-ruleguard/ruleguard/typematch" - "github.com/quasilyte/stdinfo" -) - -type engine struct { - state *engineState - - ruleSet *goRuleSet -} - -func newEngine() *engine { - return &engine{ - state: newEngineState(), - } -} - -func (e *engine) LoadedGroups() []GoRuleGroup { - result := make([]GoRuleGroup, 0, len(e.ruleSet.groups)) - for _, g := range e.ruleSet.groups { - result = append(result, *g) - } - sort.Slice(result, func(i, j int) bool { - return result[i].Name < result[j].Name - }) - return result -} - -func (e *engine) Load(ctx *LoadContext, buildContext *build.Context, filename string, r io.Reader) error { - data, err := io.ReadAll(r) - if err != nil { - return err - } - imp := newGoImporter(e.state, goImporterConfig{ - fset: ctx.Fset, - debugImports: ctx.DebugImports, - debugPrint: ctx.DebugPrint, - buildContext: buildContext, - }) - irfile, pkg, err := convertAST(ctx, imp, filename, data) - if err != nil { - return err - } - config := irLoaderConfig{ - state: e.state, - pkg: pkg, - ctx: ctx, - importer: imp, - itab: typematch.NewImportsTab(stdinfo.PathByName), - gogrepFset: token.NewFileSet(), - } - l := newIRLoader(config) - rset, err := l.LoadFile(filename, irfile) - if err != nil { - return err - } - - if e.ruleSet == nil { - e.ruleSet = rset - } else { - combinedRuleSet, err := mergeRuleSets([]*goRuleSet{e.ruleSet, rset}) - if err != nil { - return err - } - e.ruleSet = combinedRuleSet - } - - return nil -} - -func (e *engine) LoadFromIR(ctx *LoadContext, buildContext *build.Context, filename string, f *ir.File) error { - imp := newGoImporter(e.state, goImporterConfig{ - fset: ctx.Fset, - debugImports: ctx.DebugImports, - debugPrint: ctx.DebugPrint, - buildContext: buildContext, - }) - config := irLoaderConfig{ - state: e.state, - ctx: ctx, - importer: imp, - itab: typematch.NewImportsTab(stdinfo.PathByName), - gogrepFset: token.NewFileSet(), - } - l := newIRLoader(config) - rset, err := l.LoadFile(filename, f) - if err != nil { - return err - } - - if e.ruleSet == nil { - e.ruleSet = rset - } else { - combinedRuleSet, err := mergeRuleSets([]*goRuleSet{e.ruleSet, rset}) - if err != nil { - return err - } - e.ruleSet = combinedRuleSet - } - - return nil -} - -func (e *engine) Run(ctx *RunContext, buildContext *build.Context, f *ast.File) error { - if e.ruleSet == nil { - return errors.New("used Run() with an empty rule set; forgot to call Load() first?") - } - rset := e.ruleSet - return newRulesRunner(ctx, buildContext, e.state, rset).run(f) -} - -// engineState is a shared state inside the engine. -// Its access is synchronized, unlike the RunnerState which should be thread-local. -type engineState struct { - env *quasigo.Env - - typeByFQNMu sync.RWMutex - typeByFQN map[string]types.Type - - pkgCacheMu sync.RWMutex - // pkgCache contains all imported packages, from any importer. - pkgCache map[string]*types.Package -} - -func newEngineState() *engineState { - env := quasigo.NewEnv() - qstrings.ImportAll(env) - qstrconv.ImportAll(env) - qfmt.ImportAll(env) - state := &engineState{ - env: env, - pkgCache: make(map[string]*types.Package), - typeByFQN: map[string]types.Type{}, - } - for key, typ := range typeByName { - state.typeByFQN[key] = typ - } - initEnv(state, env) - return state -} - -func (state *engineState) GetCachedPackage(pkgPath string) *types.Package { - state.pkgCacheMu.RLock() - pkg := state.pkgCache[pkgPath] - state.pkgCacheMu.RUnlock() - return pkg -} - -func (state *engineState) AddCachedPackage(pkgPath string, pkg *types.Package) { - state.pkgCacheMu.Lock() - state.addCachedPackage(pkgPath, pkg) - state.pkgCacheMu.Unlock() -} - -func (state *engineState) addCachedPackage(pkgPath string, pkg *types.Package) { - state.pkgCache[pkgPath] = pkg - - // Also add all complete packages that are dependencies of the pkg. - // This way we cache more and avoid duplicated package loading - // which can lead to typechecking issues. - // - // Note that it does not increase our memory consumption - // as these packages are reachable via pkg, so they'll - // not be freed by GC anyway. - for _, imported := range pkg.Imports() { - if imported.Complete() { - state.addCachedPackage(imported.Path(), imported) - } - } -} - -func (state *engineState) FindType(importer *goImporter, currentPkg *types.Package, fqn string) (types.Type, error) { - // TODO(quasilyte): we can pre-populate the cache during the Load() phase. - // If we inspect the AST of a user function, all constant FQN can be preloaded. - // It could be a good thing as Load() is not expected to be executed in - // concurrent environment, so write-locking is not a big deal there. - - state.typeByFQNMu.RLock() - cachedType, ok := state.typeByFQN[fqn] - state.typeByFQNMu.RUnlock() - if ok { - return cachedType, nil - } - - // Code below is under a write critical section. - state.typeByFQNMu.Lock() - defer state.typeByFQNMu.Unlock() - - typ, err := state.findTypeNoCache(importer, currentPkg, fqn) - if err != nil { - return nil, err - } - state.typeByFQN[fqn] = typ - return typ, nil -} - -func (state *engineState) findTypeNoCache(importer *goImporter, currentPkg *types.Package, fqn string) (types.Type, error) { - pos := strings.LastIndexByte(fqn, '.') - if pos == -1 { - return nil, fmt.Errorf("%s is not a valid FQN", fqn) - } - pkgPath := fqn[:pos] - objectName := fqn[pos+1:] - var pkg *types.Package - if currentPkg != nil { - if directDep := findDependency(currentPkg, pkgPath); directDep != nil { - pkg = directDep - } - } - if pkg == nil { - loadedPkg, err := importer.Import(pkgPath) - if err != nil { - return nil, err - } - pkg = loadedPkg - } - obj := pkg.Scope().Lookup(objectName) - if obj == nil { - return nil, fmt.Errorf("%s is not found in %s", objectName, pkgPath) - } - typ := obj.Type() - state.typeByFQN[fqn] = typ - return typ, nil -} - -func inferBuildContext() *build.Context { - // Inherit most fields from the build.Default. - ctx := build.Default - - env, err := goenv.Read() - if err != nil { - return &ctx - } - - ctx.GOROOT = env["GOROOT"] - ctx.GOPATH = env["GOPATH"] - ctx.GOARCH = env["GOARCH"] - ctx.GOOS = env["GOOS"] - - switch os.Getenv("CGO_ENABLED") { - case "0": - ctx.CgoEnabled = false - case "1": - ctx.CgoEnabled = true - default: - ctx.CgoEnabled = env["CGO_ENABLED"] == "1" - } - - return &ctx -} diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/filters.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/filters.go deleted file mode 100644 index 7320ab7fb..000000000 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/filters.go +++ /dev/null @@ -1,835 +0,0 @@ -package ruleguard - -import ( - "go/ast" - "go/constant" - "go/token" - "go/types" - "path/filepath" - - "github.com/quasilyte/gogrep" - "github.com/quasilyte/gogrep/nodetag" - "golang.org/x/tools/go/ast/astutil" - - "github.com/quasilyte/go-ruleguard/internal/xtypes" - "github.com/quasilyte/go-ruleguard/ruleguard/quasigo" - "github.com/quasilyte/go-ruleguard/ruleguard/textmatch" - "github.com/quasilyte/go-ruleguard/ruleguard/typematch" -) - -const filterSuccess = matchFilterResult("") - -func filterFailure(reason string) matchFilterResult { - return matchFilterResult(reason) -} - -func asExprSlice(x ast.Node) *gogrep.NodeSlice { - if x, ok := x.(*gogrep.NodeSlice); ok && x.Kind == gogrep.ExprNodeSlice { - return x - } - return nil -} - -func exprListFilterApply(src string, list []ast.Expr, fn func(ast.Expr) bool) matchFilterResult { - for _, e := range list { - if !fn(e) { - return filterFailure(src) - } - } - return filterSuccess -} - -func makeNotFilter(src string, x matchFilter) filterFunc { - return func(params *filterParams) matchFilterResult { - if x.fn(params).Matched() { - return matchFilterResult(src) - } - return "" - } -} - -func makeAndFilter(lhs, rhs matchFilter) filterFunc { - return func(params *filterParams) matchFilterResult { - if lhsResult := lhs.fn(params); !lhsResult.Matched() { - return lhsResult - } - return rhs.fn(params) - } -} - -func makeOrFilter(lhs, rhs matchFilter) filterFunc { - return func(params *filterParams) matchFilterResult { - if lhsResult := lhs.fn(params); lhsResult.Matched() { - return filterSuccess - } - return rhs.fn(params) - } -} - -func makeDeadcodeFilter(src string) filterFunc { - return func(params *filterParams) matchFilterResult { - if params.deadcode { - return filterSuccess - } - return filterFailure(src) - } -} - -func makeFileImportsFilter(src, pkgPath string) filterFunc { - return func(params *filterParams) matchFilterResult { - _, imported := params.imports[pkgPath] - if imported { - return filterSuccess - } - return filterFailure(src) - } -} - -func makeFilePkgPathMatchesFilter(src string, re textmatch.Pattern) filterFunc { - return func(params *filterParams) matchFilterResult { - pkgPath := params.ctx.Pkg.Path() - if re.MatchString(pkgPath) { - return filterSuccess - } - return filterFailure(src) - } -} - -func makeFileNameMatchesFilter(src string, re textmatch.Pattern) filterFunc { - return func(params *filterParams) matchFilterResult { - if re.MatchString(filepath.Base(params.filename)) { - return filterSuccess - } - return filterFailure(src) - } -} - -func makePureFilter(src, varname string) filterFunc { - return func(params *filterParams) matchFilterResult { - if list := asExprSlice(params.subNode(varname)); list != nil { - return exprListFilterApply(src, list.GetExprSlice(), func(x ast.Expr) bool { - return isPure(params.ctx.Types, x) - }) - } - n := params.subExpr(varname) - if isPure(params.ctx.Types, n) { - return filterSuccess - } - return filterFailure(src) - } -} - -func makeConstFilter(src, varname string) filterFunc { - return func(params *filterParams) matchFilterResult { - if list := asExprSlice(params.subNode(varname)); list != nil { - return exprListFilterApply(src, list.GetExprSlice(), func(x ast.Expr) bool { - return isConstant(params.ctx.Types, x) - }) - } - - n := params.subExpr(varname) - if isConstant(params.ctx.Types, n) { - return filterSuccess - } - return filterFailure(src) - } -} - -func makeConstSliceFilter(src, varname string) filterFunc { - return func(params *filterParams) matchFilterResult { - if list := asExprSlice(params.subNode(varname)); list != nil { - return exprListFilterApply(src, list.GetExprSlice(), func(x ast.Expr) bool { - return isConstantSlice(params.ctx.Types, x) - }) - } - - n := params.subExpr(varname) - if isConstantSlice(params.ctx.Types, n) { - return filterSuccess - } - return filterFailure(src) - } -} - -func makeAddressableFilter(src, varname string) filterFunc { - return func(params *filterParams) matchFilterResult { - if list := asExprSlice(params.subNode(varname)); list != nil { - return exprListFilterApply(src, list.GetExprSlice(), func(x ast.Expr) bool { - return isAddressable(params.ctx.Types, x) - }) - } - - n := params.subExpr(varname) - if isAddressable(params.ctx.Types, n) { - return filterSuccess - } - return filterFailure(src) - } -} - -func makeComparableFilter(src, varname string) filterFunc { - return func(params *filterParams) matchFilterResult { - if list := asExprSlice(params.subNode(varname)); list != nil { - return exprListFilterApply(src, list.GetExprSlice(), func(x ast.Expr) bool { - return types.Comparable(params.typeofNode(x)) - }) - } - - if types.Comparable(params.typeofNode(params.subNode(varname))) { - return filterSuccess - } - return filterFailure(src) - } -} - -func makeVarContainsFilter(src, varname string, pat *gogrep.Pattern) filterFunc { - return func(params *filterParams) matchFilterResult { - params.gogrepSubState.CapturePreset = params.match.CaptureList() - matched := false - gogrep.Walk(params.subNode(varname), func(n ast.Node) bool { - if matched { - return false - } - pat.MatchNode(params.gogrepSubState, n, func(m gogrep.MatchData) { - matched = true - }) - return true - }) - if matched { - return filterSuccess - } - return filterFailure(src) - } -} - -func makeCustomVarFilter(src, varname string, fn *quasigo.Func) filterFunc { - return func(params *filterParams) matchFilterResult { - // TODO(quasilyte): what if bytecode function panics due to the programming error? - // We should probably catch the panic here, print trace and return "false" - // from the filter (or even propagate that panic to let it crash). - params.varname = varname - result := quasigo.Call(params.env, fn) - if result.Value().(bool) { - return filterSuccess - } - return filterFailure(src) - } -} - -func makeTypeImplementsFilter(src, varname string, iface *types.Interface) filterFunc { - return func(params *filterParams) matchFilterResult { - if list := asExprSlice(params.subNode(varname)); list != nil { - return exprListFilterApply(src, list.GetExprSlice(), func(x ast.Expr) bool { - return xtypes.Implements(params.typeofNode(x), iface) - }) - } - - typ := params.typeofNode(params.subExpr(varname)) - if xtypes.Implements(typ, iface) { - return filterSuccess - } - return filterFailure(src) - } -} - -func makeTypeHasMethodFilter(src, varname string, fn *types.Func) filterFunc { - return func(params *filterParams) matchFilterResult { - typ := params.typeofNode(params.subNode(varname)) - if typeHasMethod(typ, fn) { - return filterSuccess - } - return filterFailure(src) - } -} - -func makeTypeHasPointersFilter(src, varname string) filterFunc { - return func(params *filterParams) matchFilterResult { - typ := params.typeofNode(params.subExpr(varname)) - if typeHasPointers(typ) { - return filterSuccess - } - return filterFailure(src) - } -} - -func makeTypeIsIntUintFilter(src, varname string, underlying bool, kind types.BasicKind) filterFunc { - return func(params *filterParams) matchFilterResult { - typ := params.typeofNode(params.subExpr(varname)) - if underlying { - typ = typ.Underlying() - } - if basicType, ok := typ.(*types.Basic); ok { - first := kind - last := kind + 4 - if basicType.Kind() >= first && basicType.Kind() <= last { - return filterSuccess - } - } - return filterFailure(src) - } -} - -func makeTypeIsSignedFilter(src, varname string, underlying bool) filterFunc { - return func(params *filterParams) matchFilterResult { - typ := params.typeofNode(params.subExpr(varname)) - if underlying { - typ = typ.Underlying() - } - if basicType, ok := typ.(*types.Basic); ok { - if basicType.Info()&types.IsInteger != 0 && basicType.Info()&types.IsUnsigned == 0 { - return filterSuccess - } - } - return filterFailure(src) - } -} - -func makeTypeOfKindFilter(src, varname string, underlying bool, kind types.BasicInfo) filterFunc { - return func(params *filterParams) matchFilterResult { - typ := params.typeofNode(params.subExpr(varname)) - if underlying { - typ = typ.Underlying() - } - if basicType, ok := typ.(*types.Basic); ok { - if basicType.Info()&kind != 0 { - return filterSuccess - } - } - return filterFailure(src) - } -} - -func makeTypesIdenticalFilter(src, lhsVarname, rhsVarname string) filterFunc { - return func(params *filterParams) matchFilterResult { - lhsType := params.typeofNode(params.subNode(lhsVarname)) - rhsType := params.typeofNode(params.subNode(rhsVarname)) - if xtypes.Identical(lhsType, rhsType) { - return filterSuccess - } - return filterFailure(src) - } -} - -func makeRootSinkTypeIsFilter(src string, pat *typematch.Pattern) filterFunc { - return func(params *filterParams) matchFilterResult { - // TODO(quasilyte): add variadic support? - e, ok := params.match.Node().(ast.Expr) - if ok { - parent, kv := findSinkRoot(params) - typ := findSinkType(params, parent, kv, e) - if pat.MatchIdentical(params.typematchState, typ) { - return filterSuccess - } - } - return filterFailure(src) - } -} - -func makeTypeIsFilter(src, varname string, underlying bool, pat *typematch.Pattern) filterFunc { - if underlying { - return func(params *filterParams) matchFilterResult { - if list := asExprSlice(params.subNode(varname)); list != nil { - return exprListFilterApply(src, list.GetExprSlice(), func(x ast.Expr) bool { - return pat.MatchIdentical(params.typematchState, params.typeofNode(x).Underlying()) - }) - } - typ := params.typeofNode(params.subNode(varname)).Underlying() - if pat.MatchIdentical(params.typematchState, typ) { - return filterSuccess - } - return filterFailure(src) - } - } - - return func(params *filterParams) matchFilterResult { - if list := asExprSlice(params.subNode(varname)); list != nil { - return exprListFilterApply(src, list.GetExprSlice(), func(x ast.Expr) bool { - return pat.MatchIdentical(params.typematchState, params.typeofNode(x)) - }) - } - typ := params.typeofNode(params.subNode(varname)) - if pat.MatchIdentical(params.typematchState, typ) { - return filterSuccess - } - return filterFailure(src) - } -} - -func makeTypeConvertibleToFilter(src, varname string, dstType types.Type) filterFunc { - return func(params *filterParams) matchFilterResult { - if list := asExprSlice(params.subNode(varname)); list != nil { - return exprListFilterApply(src, list.GetExprSlice(), func(x ast.Expr) bool { - return types.ConvertibleTo(params.typeofNode(x), dstType) - }) - } - - typ := params.typeofNode(params.subExpr(varname)) - if types.ConvertibleTo(typ, dstType) { - return filterSuccess - } - return filterFailure(src) - } -} - -func makeTypeAssignableToFilter(src, varname string, dstType types.Type) filterFunc { - return func(params *filterParams) matchFilterResult { - if list := asExprSlice(params.subNode(varname)); list != nil { - return exprListFilterApply(src, list.GetExprSlice(), func(x ast.Expr) bool { - return types.AssignableTo(params.typeofNode(x), dstType) - }) - } - - typ := params.typeofNode(params.subExpr(varname)) - if types.AssignableTo(typ, dstType) { - return filterSuccess - } - return filterFailure(src) - } -} - -func makeLineFilter(src, varname string, op token.Token, rhsVarname string) filterFunc { - // TODO(quasilyte): add variadic support. - return func(params *filterParams) matchFilterResult { - line1 := params.ctx.Fset.Position(params.subNode(varname).Pos()).Line - line2 := params.ctx.Fset.Position(params.subNode(rhsVarname).Pos()).Line - lhsValue := constant.MakeInt64(int64(line1)) - rhsValue := constant.MakeInt64(int64(line2)) - if constant.Compare(lhsValue, op, rhsValue) { - return filterSuccess - } - return filterFailure(src) - } -} - -func makeObjectIsVariadicParamFilter(src, varname string) filterFunc { - return func(params *filterParams) matchFilterResult { - if params.currentFunc == nil { - return filterFailure(src) - } - funcObj, ok := params.ctx.Types.ObjectOf(params.currentFunc.Name).(*types.Func) - if !ok { - return filterFailure(src) - } - funcSig := funcObj.Type().(*types.Signature) - if !funcSig.Variadic() { - return filterFailure(src) - } - paramObj := funcSig.Params().At(funcSig.Params().Len() - 1) - obj := params.ctx.Types.ObjectOf(identOf(params.subExpr(varname))) - if paramObj != obj { - return filterFailure(src) - } - return filterSuccess - } -} - -func makeObjectIsGlobalFilter(src, varname string) filterFunc { - return func(params *filterParams) matchFilterResult { - obj := params.ctx.Types.ObjectOf(identOf(params.subExpr(varname))) - globalScope := params.ctx.Pkg.Scope() - if obj.Parent() == globalScope { - return filterSuccess - } - - return filterFailure(src) - } -} - -func makeGoVersionFilter(src string, op token.Token, version GoVersion) filterFunc { - return func(params *filterParams) matchFilterResult { - if params.ctx.GoVersion.IsAny() { - return filterSuccess - } - if versionCompare(params.ctx.GoVersion, op, version) { - return filterSuccess - } - return filterFailure(src) - } -} - -func makeLineConstFilter(src, varname string, op token.Token, rhsValue constant.Value) filterFunc { - // TODO(quasilyte): add variadic support. - return func(params *filterParams) matchFilterResult { - n := params.subNode(varname) - lhsValue := constant.MakeInt64(int64(params.ctx.Fset.Position(n.Pos()).Line)) - if constant.Compare(lhsValue, op, rhsValue) { - return filterSuccess - } - return filterFailure(src) - } -} - -func makeTypeSizeConstFilter(src, varname string, op token.Token, rhsValue constant.Value) filterFunc { - return func(params *filterParams) matchFilterResult { - if list := asExprSlice(params.subNode(varname)); list != nil { - return exprListFilterApply(src, list.GetExprSlice(), func(x ast.Expr) bool { - typ := params.typeofNode(x) - if isTypeParam(typ) { - return false - } - lhsValue := constant.MakeInt64(params.ctx.Sizes.Sizeof(typ)) - return constant.Compare(lhsValue, op, rhsValue) - }) - } - - typ := params.typeofNode(params.subExpr(varname)) - if isTypeParam(typ) { - return filterFailure(src) - } - lhsValue := constant.MakeInt64(params.ctx.Sizes.Sizeof(typ)) - if constant.Compare(lhsValue, op, rhsValue) { - return filterSuccess - } - return filterFailure(src) - } -} - -func makeTypeSizeFilter(src, varname string, op token.Token, rhsVarname string) filterFunc { - return func(params *filterParams) matchFilterResult { - lhsTyp := params.typeofNode(params.subExpr(varname)) - rhsTyp := params.typeofNode(params.subExpr(rhsVarname)) - if isTypeParam(lhsTyp) || isTypeParam(rhsTyp) { - return filterFailure(src) - } - lhsValue := constant.MakeInt64(params.ctx.Sizes.Sizeof(lhsTyp)) - rhsValue := constant.MakeInt64(params.ctx.Sizes.Sizeof(rhsTyp)) - if constant.Compare(lhsValue, op, rhsValue) { - return filterSuccess - } - return filterFailure(src) - } -} - -func makeValueIntConstFilter(src, varname string, op token.Token, rhsValue constant.Value) filterFunc { - return func(params *filterParams) matchFilterResult { - if list := asExprSlice(params.subNode(varname)); list != nil { - return exprListFilterApply(src, list.GetExprSlice(), func(x ast.Expr) bool { - lhsValue := intValueOf(params.ctx.Types, x) - return lhsValue != nil && constant.Compare(lhsValue, op, rhsValue) - }) - } - - lhsValue := intValueOf(params.ctx.Types, params.subExpr(varname)) - if lhsValue == nil { - return filterFailure(src) // The value is unknown - } - if constant.Compare(lhsValue, op, rhsValue) { - return filterSuccess - } - return filterFailure(src) - } -} - -func makeValueIntFilter(src, varname string, op token.Token, rhsVarname string) filterFunc { - // TODO(quasilyte): add variadic support. - return func(params *filterParams) matchFilterResult { - lhsValue := intValueOf(params.ctx.Types, params.subExpr(varname)) - if lhsValue == nil { - return filterFailure(src) - } - rhsValue := intValueOf(params.ctx.Types, params.subExpr(rhsVarname)) - if rhsValue == nil { - return filterFailure(src) - } - if constant.Compare(lhsValue, op, rhsValue) { - return filterSuccess - } - return filterFailure(src) - } -} - -func makeTextConstFilter(src, varname string, op token.Token, rhsValue constant.Value) filterFunc { - // TODO(quasilyte): add variadic support. - return func(params *filterParams) matchFilterResult { - s := params.nodeText(params.subNode(varname)) - lhsValue := constant.MakeString(string(s)) - if constant.Compare(lhsValue, op, rhsValue) { - return filterSuccess - } - return filterFailure(src) - } -} - -func makeTextFilter(src, varname string, op token.Token, rhsVarname string) filterFunc { - // TODO(quasilyte): add variadic support. - return func(params *filterParams) matchFilterResult { - s1 := params.nodeText(params.subNode(varname)) - lhsValue := constant.MakeString(string(s1)) - n, _ := params.match.CapturedByName(rhsVarname) - s2 := params.nodeText(n) - rhsValue := constant.MakeString(string(s2)) - if constant.Compare(lhsValue, op, rhsValue) { - return filterSuccess - } - return filterFailure(src) - } -} - -func makeTextMatchesFilter(src, varname string, re textmatch.Pattern) filterFunc { - // TODO(quasilyte): add variadic support. - return func(params *filterParams) matchFilterResult { - if re.Match(params.nodeText(params.subNode(varname))) { - return filterSuccess - } - return filterFailure(src) - } -} - -func makeRootParentNodeIsFilter(src string, tag nodetag.Value) filterFunc { - return func(params *filterParams) matchFilterResult { - parent := params.nodePath.Parent() - if nodeIs(parent, tag) { - return filterSuccess - } - return filterFailure(src) - } -} - -func makeNodeIsFilter(src, varname string, tag nodetag.Value) filterFunc { - // TODO(quasilyte): add comment nodes support? - // TODO(quasilyte): add variadic support. - return func(params *filterParams) matchFilterResult { - n := params.subNode(varname) - if nodeIs(n, tag) { - return filterSuccess - } - return filterFailure(src) - } -} - -func makeObjectIsFilter(src, varname, objectName string) filterFunc { - var predicate func(types.Object) bool - switch objectName { - case "Func": - predicate = func(x types.Object) bool { - _, ok := x.(*types.Func) - return ok - } - case "Var": - predicate = func(x types.Object) bool { - _, ok := x.(*types.Var) - return ok - } - case "Const": - predicate = func(x types.Object) bool { - _, ok := x.(*types.Const) - return ok - } - case "TypeName": - predicate = func(x types.Object) bool { - _, ok := x.(*types.TypeName) - return ok - } - case "Label": - predicate = func(x types.Object) bool { - _, ok := x.(*types.Label) - return ok - } - case "PkgName": - predicate = func(x types.Object) bool { - _, ok := x.(*types.PkgName) - return ok - } - case "Builtin": - predicate = func(x types.Object) bool { - _, ok := x.(*types.Builtin) - return ok - } - case "Nil": - predicate = func(x types.Object) bool { - _, ok := x.(*types.Nil) - return ok - } - } - - return func(params *filterParams) matchFilterResult { - if list := asExprSlice(params.subNode(varname)); list != nil { - return exprListFilterApply(src, list.GetExprSlice(), func(x ast.Expr) bool { - ident := identOf(x) - return ident != nil && predicate(params.ctx.Types.ObjectOf(ident)) - }) - } - - ident := identOf(params.subExpr(varname)) - if ident == nil { - return filterFailure(src) - } - object := params.ctx.Types.ObjectOf(ident) - if predicate(object) { - return filterSuccess - } - return filterFailure(src) - } -} - -func nodeIs(n ast.Node, tag nodetag.Value) bool { - var matched bool - switch tag { - case nodetag.Expr: - _, matched = n.(ast.Expr) - case nodetag.Stmt: - _, matched = n.(ast.Stmt) - case nodetag.Node: - matched = true - default: - matched = (tag == nodetag.FromNode(n)) - } - return matched -} - -func typeHasMethod(typ types.Type, fn *types.Func) bool { - obj, _, _ := types.LookupFieldOrMethod(typ, true, fn.Pkg(), fn.Name()) - fn2, ok := obj.(*types.Func) - if !ok { - return false - } - return xtypes.Identical(fn.Type(), fn2.Type()) -} - -func typeHasPointers(typ types.Type) bool { - switch typ := typ.(type) { - case *types.Basic: - switch typ.Kind() { - case types.UnsafePointer, types.String, types.UntypedNil, types.UntypedString: - return true - } - return false - - case *types.Named: - return typeHasPointers(typ.Underlying()) - - case *types.Struct: - for i := 0; i < typ.NumFields(); i++ { - if typeHasPointers(typ.Field(i).Type()) { - return true - } - } - return false - - case *types.Array: - return typeHasPointers(typ.Elem()) - - default: - return true - } -} - -func findSinkRoot(params *filterParams) (ast.Node, *ast.KeyValueExpr) { - for i := 1; i < params.nodePath.Len(); i++ { - switch n := params.nodePath.NthParent(i).(type) { - case *ast.ParenExpr: - // Skip and continue. - continue - case *ast.KeyValueExpr: - return params.nodePath.NthParent(i + 1).(ast.Expr), n - default: - return n, nil - } - } - return nil, nil -} - -func findContainingFunc(params *filterParams) *types.Signature { - for i := 2; i < params.nodePath.Len(); i++ { - switch n := params.nodePath.NthParent(i).(type) { - case *ast.FuncDecl: - fn, ok := params.ctx.Types.TypeOf(n.Name).(*types.Signature) - if ok { - return fn - } - case *ast.FuncLit: - fn, ok := params.ctx.Types.TypeOf(n.Type).(*types.Signature) - if ok { - return fn - } - } - } - return nil -} - -func findSinkType(params *filterParams, parent ast.Node, kv *ast.KeyValueExpr, e ast.Expr) types.Type { - switch parent := parent.(type) { - case *ast.ValueSpec: - return params.ctx.Types.TypeOf(parent.Type) - - case *ast.ReturnStmt: - for i, result := range parent.Results { - if astutil.Unparen(result) != e { - continue - } - sig := findContainingFunc(params) - if sig == nil { - break - } - return sig.Results().At(i).Type() - } - - case *ast.IndexExpr: - if astutil.Unparen(parent.Index) == e { - switch typ := params.ctx.Types.TypeOf(parent.X).Underlying().(type) { - case *types.Map: - return typ.Key() - case *types.Slice, *types.Array: - return nil // TODO: some untyped int type? - } - } - - case *ast.AssignStmt: - if parent.Tok != token.ASSIGN || len(parent.Lhs) != len(parent.Rhs) { - break - } - for i, rhs := range parent.Rhs { - if rhs == e { - return params.ctx.Types.TypeOf(parent.Lhs[i]) - } - } - - case *ast.CompositeLit: - switch typ := params.ctx.Types.TypeOf(parent).Underlying().(type) { - case *types.Slice: - return typ.Elem() - case *types.Array: - return typ.Elem() - case *types.Map: - if astutil.Unparen(kv.Key) == e { - return typ.Key() - } - return typ.Elem() - case *types.Struct: - fieldName, ok := kv.Key.(*ast.Ident) - if !ok { - break - } - for i := 0; i < typ.NumFields(); i++ { - field := typ.Field(i) - if field.Name() == fieldName.String() { - return field.Type() - } - } - } - - case *ast.CallExpr: - switch typ := params.ctx.Types.TypeOf(parent.Fun).(type) { - case *types.Signature: - // A function call argument. - for i, arg := range parent.Args { - if astutil.Unparen(arg) != e { - continue - } - isVariadicArg := (i >= typ.Params().Len()-1) && typ.Variadic() - if isVariadicArg && !parent.Ellipsis.IsValid() { - return typ.Params().At(typ.Params().Len() - 1).Type().(*types.Slice).Elem() - } - if i < typ.Params().Len() { - return typ.Params().At(i).Type() - } - break - } - default: - // Probably a type cast. - return typ - } - } - - return invalidType -} diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/go_version.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/go_version.go deleted file mode 100644 index 39e4a492d..000000000 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/go_version.go +++ /dev/null @@ -1,58 +0,0 @@ -package ruleguard - -import ( - "fmt" - "go/token" - "strconv" - "strings" -) - -type GoVersion struct { - Major int - Minor int -} - -func (ver GoVersion) IsAny() bool { return ver.Major == 0 } - -func ParseGoVersion(version string) (GoVersion, error) { - var result GoVersion - if version == "" { - return GoVersion{}, nil - } - parts := strings.Split(version, ".") - if len(parts) != 2 { - return result, fmt.Errorf("invalid format: %s", version) - } - major, err := strconv.Atoi(parts[0]) - if err != nil { - return result, fmt.Errorf("invalid major version part: %s: %s", parts[0], err) - } - minor, err := strconv.Atoi(parts[1]) - if err != nil { - return result, fmt.Errorf("invalid minor version part: %s: %s", parts[1], err) - } - result.Major = major - result.Minor = minor - return result, nil -} - -func versionCompare(x GoVersion, op token.Token, y GoVersion) bool { - switch op { - case token.EQL: // == - return x.Major == y.Major && x.Minor == y.Minor - case token.NEQ: // != - return !versionCompare(x, token.EQL, y) - - case token.GTR: // > - return x.Major > y.Major || (x.Major == y.Major && x.Minor > y.Minor) - case token.GEQ: // >= - return x.Major > y.Major || (x.Major == y.Major && x.Minor >= y.Minor) - case token.LSS: // < - return x.Major < y.Major || (x.Major == y.Major && x.Minor < y.Minor) - case token.LEQ: // <= - return x.Major < y.Major || (x.Major == y.Major && x.Minor <= y.Minor) - - default: - panic("unexpected version compare op") - } -} diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/gorule.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/gorule.go deleted file mode 100644 index 655fc5b89..000000000 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/gorule.go +++ /dev/null @@ -1,154 +0,0 @@ -package ruleguard - -import ( - "fmt" - "go/ast" - "go/types" - "regexp" - - "github.com/quasilyte/go-ruleguard/ruleguard/quasigo" - "github.com/quasilyte/go-ruleguard/ruleguard/typematch" - "github.com/quasilyte/gogrep" - "github.com/quasilyte/gogrep/nodetag" -) - -type goRuleSet struct { - universal *scopedGoRuleSet - - groups map[string]*GoRuleGroup // To handle redefinitions -} - -type scopedGoRuleSet struct { - categorizedNum int - rulesByTag [nodetag.NumBuckets][]goRule - commentRules []goCommentRule -} - -type goCommentRule struct { - base goRule - pat *regexp.Regexp - captureGroups bool -} - -type goRule struct { - group *GoRuleGroup - line int - pat *gogrep.Pattern - msg string - location string - suggestion string - filter matchFilter - do *quasigo.Func -} - -type matchFilterResult string - -func (s matchFilterResult) Matched() bool { return s == "" } - -func (s matchFilterResult) RejectReason() string { return string(s) } - -type filterFunc func(*filterParams) matchFilterResult - -type matchFilter struct { - src string - fn func(*filterParams) matchFilterResult -} - -type filterParams struct { - ctx *RunContext - filename string - imports map[string]struct{} - env *quasigo.EvalEnv - - importer *goImporter - gogrepSubState *gogrep.MatcherState - typematchState *typematch.MatcherState - - match matchData - nodePath *nodePath - - nodeText func(n ast.Node) []byte - nodeString func(n ast.Node) string - - deadcode bool - - currentFunc *ast.FuncDecl - - // varname is set only for custom filters before bytecode function is called. - varname string - - // Both of these are Do() function related fields. - reportString string - suggestString string -} - -func (params *filterParams) subNode(name string) ast.Node { - n, _ := params.match.CapturedByName(name) - return n -} - -func (params *filterParams) subExpr(name string) ast.Expr { - n, _ := params.match.CapturedByName(name) - switch n := n.(type) { - case ast.Expr: - return n - case *ast.ExprStmt: - return n.X - default: - return nil - } -} - -func (params *filterParams) typeofNode(n ast.Node) types.Type { - var e ast.Expr - switch n := n.(type) { - case ast.Expr: - e = n - case *ast.Field: - e = n.Type - } - if typ := params.ctx.Types.TypeOf(e); typ != nil { - return typ - } - return invalidType -} - -func mergeRuleSets(toMerge []*goRuleSet) (*goRuleSet, error) { - out := &goRuleSet{ - universal: &scopedGoRuleSet{}, - groups: make(map[string]*GoRuleGroup), - } - - for _, x := range toMerge { - out.universal = appendScopedRuleSet(out.universal, x.universal) - for groupName, group := range x.groups { - if prevGroup, ok := out.groups[groupName]; ok { - newRef := fmt.Sprintf("%s:%d", group.Filename, group.Line) - oldRef := fmt.Sprintf("%s:%d", prevGroup.Filename, prevGroup.Line) - return nil, fmt.Errorf("%s: redefinition of %s(), previously defined at %s", newRef, groupName, oldRef) - } - out.groups[groupName] = group - } - } - - return out, nil -} - -func appendScopedRuleSet(dst, src *scopedGoRuleSet) *scopedGoRuleSet { - for tag, rules := range src.rulesByTag { - dst.rulesByTag[tag] = append(dst.rulesByTag[tag], cloneRuleSlice(rules)...) - dst.categorizedNum += len(rules) - } - dst.commentRules = append(dst.commentRules, src.commentRules...) - return dst -} - -func cloneRuleSlice(slice []goRule) []goRule { - out := make([]goRule, len(slice)) - for i, rule := range slice { - clone := rule - clone.pat = rule.pat.Clone() - out[i] = clone - } - return out -} diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/goutil/goutil.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/goutil/goutil.go deleted file mode 100644 index 3f35b17e1..000000000 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/goutil/goutil.go +++ /dev/null @@ -1,67 +0,0 @@ -package goutil - -import ( - "fmt" - "go/ast" - "go/importer" - "go/parser" - "go/printer" - "go/token" - "go/types" - "strings" -) - -// SprintNode returns the textual representation of n. -// If fset is nil, freshly created file set will be used. -func SprintNode(fset *token.FileSet, n ast.Node) string { - if fset == nil { - fset = token.NewFileSet() - } - var buf strings.Builder - if err := printer.Fprint(&buf, fset, n); err != nil { - return "" - } - return buf.String() -} - -type LoadConfig struct { - Fset *token.FileSet - Filename string - Data interface{} - Importer types.Importer -} - -type LoadResult struct { - Pkg *types.Package - Types *types.Info - Syntax *ast.File -} - -func LoadGoFile(config LoadConfig) (*LoadResult, error) { - imp := config.Importer - if imp == nil { - imp = importer.ForCompiler(config.Fset, "source", nil) - } - - parserFlags := parser.ParseComments - f, err := parser.ParseFile(config.Fset, config.Filename, config.Data, parserFlags) - if err != nil { - return nil, fmt.Errorf("parse file error: %w", err) - } - typechecker := types.Config{Importer: imp} - typesInfo := &types.Info{ - Types: map[ast.Expr]types.TypeAndValue{}, - Uses: map[*ast.Ident]types.Object{}, - Defs: map[*ast.Ident]types.Object{}, - } - pkg, err := typechecker.Check(f.Name.String(), config.Fset, []*ast.File{f}, typesInfo) - if err != nil { - return nil, fmt.Errorf("typechecker error: %w", err) - } - result := &LoadResult{ - Pkg: pkg, - Types: typesInfo, - Syntax: f, - } - return result, nil -} diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/goutil/resolve.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/goutil/resolve.go deleted file mode 100644 index 8705707ac..000000000 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/goutil/resolve.go +++ /dev/null @@ -1,33 +0,0 @@ -package goutil - -import ( - "go/ast" - "go/types" - - "golang.org/x/tools/go/ast/astutil" -) - -func ResolveFunc(info *types.Info, callable ast.Expr) (ast.Expr, *types.Func) { - switch callable := astutil.Unparen(callable).(type) { - case *ast.Ident: - sig, ok := info.ObjectOf(callable).(*types.Func) - if !ok { - return nil, nil - } - return nil, sig - - case *ast.SelectorExpr: - sig, ok := info.ObjectOf(callable.Sel).(*types.Func) - if !ok { - return nil, nil - } - isMethod := sig.Type().(*types.Signature).Recv() != nil - if _, ok := callable.X.(*ast.Ident); ok && !isMethod { - return nil, sig - } - return callable.X, sig - - default: - return nil, nil - } -} diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/importer.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/importer.go deleted file mode 100644 index 19494db9c..000000000 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/importer.go +++ /dev/null @@ -1,95 +0,0 @@ -package ruleguard - -import ( - "fmt" - "go/build" - "go/importer" - "go/token" - "go/types" - "runtime" - - "github.com/quasilyte/go-ruleguard/internal/xsrcimporter" -) - -// goImporter is a `types.Importer` that tries to load a package no matter what. -// It iterates through multiple import strategies and accepts whatever succeeds first. -type goImporter struct { - // TODO(quasilyte): share importers with gogrep? - - state *engineState - - defaultImporter types.Importer - srcImporter types.Importer - - fset *token.FileSet - buildContext *build.Context - - debugImports bool - debugPrint func(string) -} - -type goImporterConfig struct { - fset *token.FileSet - debugImports bool - debugPrint func(string) - buildContext *build.Context -} - -func newGoImporter(state *engineState, config goImporterConfig) *goImporter { - imp := &goImporter{ - state: state, - fset: config.fset, - debugImports: config.debugImports, - debugPrint: config.debugPrint, - defaultImporter: importer.Default(), - buildContext: config.buildContext, - } - imp.initSourceImporter() - return imp -} - -func (imp *goImporter) Import(path string) (*types.Package, error) { - if pkg := imp.state.GetCachedPackage(path); pkg != nil { - if imp.debugImports { - imp.debugPrint(fmt.Sprintf(`imported "%s" from importer cache`, path)) - } - return pkg, nil - } - - pkg, srcErr := imp.srcImporter.Import(path) - if srcErr == nil { - imp.state.AddCachedPackage(path, pkg) - if imp.debugImports { - imp.debugPrint(fmt.Sprintf(`imported "%s" from source importer`, path)) - } - return pkg, nil - } - - pkg, defaultErr := imp.defaultImporter.Import(path) - if defaultErr == nil { - imp.state.AddCachedPackage(path, pkg) - if imp.debugImports { - imp.debugPrint(fmt.Sprintf(`imported "%s" from %s importer`, path, runtime.Compiler)) - } - return pkg, nil - } - - if imp.debugImports { - imp.debugPrint(fmt.Sprintf(`failed to import "%s":`, path)) - imp.debugPrint(fmt.Sprintf(" %s importer: %v", runtime.Compiler, defaultErr)) - imp.debugPrint(fmt.Sprintf(" source importer: %v", srcErr)) - imp.debugPrint(fmt.Sprintf(" GOROOT=%q GOPATH=%q", imp.buildContext.GOROOT, imp.buildContext.GOPATH)) - } - - return nil, defaultErr -} - -func (imp *goImporter) initSourceImporter() { - if imp.buildContext == nil { - if imp.debugImports { - imp.debugPrint("using build.Default context") - } - imp.buildContext = &build.Default - } - imp.srcImporter = xsrcimporter.New(imp.buildContext, imp.fset) -} diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ir/filter_op.gen.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ir/filter_op.gen.go deleted file mode 100644 index bc2a5ee5b..000000000 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ir/filter_op.gen.go +++ /dev/null @@ -1,282 +0,0 @@ -// Code generated "gen_filter_op.go"; DO NOT EDIT. - -package ir - -const ( - FilterInvalidOp FilterOp = 0 - - // !$Args[0] - FilterNotOp FilterOp = 1 - - // $Args[0] && $Args[1] - FilterAndOp FilterOp = 2 - - // $Args[0] || $Args[1] - FilterOrOp FilterOp = 3 - - // $Args[0] == $Args[1] - FilterEqOp FilterOp = 4 - - // $Args[0] != $Args[1] - FilterNeqOp FilterOp = 5 - - // $Args[0] > $Args[1] - FilterGtOp FilterOp = 6 - - // $Args[0] < $Args[1] - FilterLtOp FilterOp = 7 - - // $Args[0] >= $Args[1] - FilterGtEqOp FilterOp = 8 - - // $Args[0] <= $Args[1] - FilterLtEqOp FilterOp = 9 - - // m[$Value].Addressable - // $Value type: string - FilterVarAddressableOp FilterOp = 10 - - // m[$Value].Comparable - // $Value type: string - FilterVarComparableOp FilterOp = 11 - - // m[$Value].Pure - // $Value type: string - FilterVarPureOp FilterOp = 12 - - // m[$Value].Const - // $Value type: string - FilterVarConstOp FilterOp = 13 - - // m[$Value].ConstSlice - // $Value type: string - FilterVarConstSliceOp FilterOp = 14 - - // m[$Value].Text - // $Value type: string - FilterVarTextOp FilterOp = 15 - - // m[$Value].Line - // $Value type: string - FilterVarLineOp FilterOp = 16 - - // m[$Value].Value.Int() - // $Value type: string - FilterVarValueIntOp FilterOp = 17 - - // m[$Value].Type.Size - // $Value type: string - FilterVarTypeSizeOp FilterOp = 18 - - // m[$Value].Type.HasPointers() - // $Value type: string - FilterVarTypeHasPointersOp FilterOp = 19 - - // m[$Value].Filter($Args[0]) - // $Value type: string - FilterVarFilterOp FilterOp = 20 - - // m[$Value].Node.Is($Args[0]) - // $Value type: string - FilterVarNodeIsOp FilterOp = 21 - - // m[$Value].Object.Is($Args[0]) - // $Value type: string - FilterVarObjectIsOp FilterOp = 22 - - // m[$Value].Object.IsGlobal() - // $Value type: string - FilterVarObjectIsGlobalOp FilterOp = 23 - - // m[$Value].Object.IsVariadicParam() - // $Value type: string - FilterVarObjectIsVariadicParamOp FilterOp = 24 - - // m[$Value].Type.Is($Args[0]) - // $Value type: string - FilterVarTypeIsOp FilterOp = 25 - - // m[$Value].Type.IdenticalTo($Args[0]) - // $Value type: string - FilterVarTypeIdenticalToOp FilterOp = 26 - - // m[$Value].Type.Underlying().Is($Args[0]) - // $Value type: string - FilterVarTypeUnderlyingIsOp FilterOp = 27 - - // m[$Value].Type.OfKind($Args[0]) - // $Value type: string - FilterVarTypeOfKindOp FilterOp = 28 - - // m[$Value].Type.Underlying().OfKind($Args[0]) - // $Value type: string - FilterVarTypeUnderlyingOfKindOp FilterOp = 29 - - // m[$Value].Type.ConvertibleTo($Args[0]) - // $Value type: string - FilterVarTypeConvertibleToOp FilterOp = 30 - - // m[$Value].Type.AssignableTo($Args[0]) - // $Value type: string - FilterVarTypeAssignableToOp FilterOp = 31 - - // m[$Value].Type.Implements($Args[0]) - // $Value type: string - FilterVarTypeImplementsOp FilterOp = 32 - - // m[$Value].Type.HasMethod($Args[0]) - // $Value type: string - FilterVarTypeHasMethodOp FilterOp = 33 - - // m[$Value].Text.Matches($Args[0]) - // $Value type: string - FilterVarTextMatchesOp FilterOp = 34 - - // m[$Value].Contains($Args[0]) - // $Value type: string - FilterVarContainsOp FilterOp = 35 - - // m.Deadcode() - FilterDeadcodeOp FilterOp = 36 - - // m.GoVersion().Eq($Value) - // $Value type: string - FilterGoVersionEqOp FilterOp = 37 - - // m.GoVersion().LessThan($Value) - // $Value type: string - FilterGoVersionLessThanOp FilterOp = 38 - - // m.GoVersion().GreaterThan($Value) - // $Value type: string - FilterGoVersionGreaterThanOp FilterOp = 39 - - // m.GoVersion().LessEqThan($Value) - // $Value type: string - FilterGoVersionLessEqThanOp FilterOp = 40 - - // m.GoVersion().GreaterEqThan($Value) - // $Value type: string - FilterGoVersionGreaterEqThanOp FilterOp = 41 - - // m.File.Imports($Value) - // $Value type: string - FilterFileImportsOp FilterOp = 42 - - // m.File.PkgPath.Matches($Value) - // $Value type: string - FilterFilePkgPathMatchesOp FilterOp = 43 - - // m.File.Name.Matches($Value) - // $Value type: string - FilterFileNameMatchesOp FilterOp = 44 - - // $Value holds a function name - // $Value type: string - FilterFilterFuncRefOp FilterOp = 45 - - // $Value holds a string constant - // $Value type: string - FilterStringOp FilterOp = 46 - - // $Value holds an int64 constant - // $Value type: int64 - FilterIntOp FilterOp = 47 - - // m[`$$`].Node.Parent().Is($Args[0]) - FilterRootNodeParentIsOp FilterOp = 48 - - // m[`$$`].SinkType.Is($Args[0]) - FilterRootSinkTypeIsOp FilterOp = 49 -) - -var filterOpNames = map[FilterOp]string{ - FilterInvalidOp: `Invalid`, - FilterNotOp: `Not`, - FilterAndOp: `And`, - FilterOrOp: `Or`, - FilterEqOp: `Eq`, - FilterNeqOp: `Neq`, - FilterGtOp: `Gt`, - FilterLtOp: `Lt`, - FilterGtEqOp: `GtEq`, - FilterLtEqOp: `LtEq`, - FilterVarAddressableOp: `VarAddressable`, - FilterVarComparableOp: `VarComparable`, - FilterVarPureOp: `VarPure`, - FilterVarConstOp: `VarConst`, - FilterVarConstSliceOp: `VarConstSlice`, - FilterVarTextOp: `VarText`, - FilterVarLineOp: `VarLine`, - FilterVarValueIntOp: `VarValueInt`, - FilterVarTypeSizeOp: `VarTypeSize`, - FilterVarTypeHasPointersOp: `VarTypeHasPointers`, - FilterVarFilterOp: `VarFilter`, - FilterVarNodeIsOp: `VarNodeIs`, - FilterVarObjectIsOp: `VarObjectIs`, - FilterVarObjectIsGlobalOp: `VarObjectIsGlobal`, - FilterVarObjectIsVariadicParamOp: `VarObjectIsVariadicParam`, - FilterVarTypeIsOp: `VarTypeIs`, - FilterVarTypeIdenticalToOp: `VarTypeIdenticalTo`, - FilterVarTypeUnderlyingIsOp: `VarTypeUnderlyingIs`, - FilterVarTypeOfKindOp: `VarTypeOfKind`, - FilterVarTypeUnderlyingOfKindOp: `VarTypeUnderlyingOfKind`, - FilterVarTypeConvertibleToOp: `VarTypeConvertibleTo`, - FilterVarTypeAssignableToOp: `VarTypeAssignableTo`, - FilterVarTypeImplementsOp: `VarTypeImplements`, - FilterVarTypeHasMethodOp: `VarTypeHasMethod`, - FilterVarTextMatchesOp: `VarTextMatches`, - FilterVarContainsOp: `VarContains`, - FilterDeadcodeOp: `Deadcode`, - FilterGoVersionEqOp: `GoVersionEq`, - FilterGoVersionLessThanOp: `GoVersionLessThan`, - FilterGoVersionGreaterThanOp: `GoVersionGreaterThan`, - FilterGoVersionLessEqThanOp: `GoVersionLessEqThan`, - FilterGoVersionGreaterEqThanOp: `GoVersionGreaterEqThan`, - FilterFileImportsOp: `FileImports`, - FilterFilePkgPathMatchesOp: `FilePkgPathMatches`, - FilterFileNameMatchesOp: `FileNameMatches`, - FilterFilterFuncRefOp: `FilterFuncRef`, - FilterStringOp: `String`, - FilterIntOp: `Int`, - FilterRootNodeParentIsOp: `RootNodeParentIs`, - FilterRootSinkTypeIsOp: `RootSinkTypeIs`, -} -var filterOpFlags = map[FilterOp]uint64{ - FilterAndOp: flagIsBinaryExpr, - FilterOrOp: flagIsBinaryExpr, - FilterEqOp: flagIsBinaryExpr, - FilterNeqOp: flagIsBinaryExpr, - FilterGtOp: flagIsBinaryExpr, - FilterLtOp: flagIsBinaryExpr, - FilterGtEqOp: flagIsBinaryExpr, - FilterLtEqOp: flagIsBinaryExpr, - FilterVarAddressableOp: flagHasVar, - FilterVarComparableOp: flagHasVar, - FilterVarPureOp: flagHasVar, - FilterVarConstOp: flagHasVar, - FilterVarConstSliceOp: flagHasVar, - FilterVarTextOp: flagHasVar, - FilterVarLineOp: flagHasVar, - FilterVarValueIntOp: flagHasVar, - FilterVarTypeSizeOp: flagHasVar, - FilterVarTypeHasPointersOp: flagHasVar, - FilterVarFilterOp: flagHasVar, - FilterVarNodeIsOp: flagHasVar, - FilterVarObjectIsOp: flagHasVar, - FilterVarObjectIsGlobalOp: flagHasVar, - FilterVarObjectIsVariadicParamOp: flagHasVar, - FilterVarTypeIsOp: flagHasVar, - FilterVarTypeIdenticalToOp: flagHasVar, - FilterVarTypeUnderlyingIsOp: flagHasVar, - FilterVarTypeOfKindOp: flagHasVar, - FilterVarTypeUnderlyingOfKindOp: flagHasVar, - FilterVarTypeConvertibleToOp: flagHasVar, - FilterVarTypeAssignableToOp: flagHasVar, - FilterVarTypeImplementsOp: flagHasVar, - FilterVarTypeHasMethodOp: flagHasVar, - FilterVarTextMatchesOp: flagHasVar, - FilterVarContainsOp: flagHasVar, - FilterStringOp: flagIsBasicLit, - FilterIntOp: flagIsBasicLit, -} diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ir/gen_filter_op.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ir/gen_filter_op.go deleted file mode 100644 index b1c819492..000000000 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ir/gen_filter_op.go +++ /dev/null @@ -1,148 +0,0 @@ -//go:build generate -// +build generate - -package main - -import ( - "bytes" - "fmt" - "go/format" - "os" - "strings" -) - -type opInfo struct { - name string - comment string - valueType string - flags uint64 -} - -const ( - flagIsBinaryExpr uint64 = 1 << iota - flagIsBasicLit - flagHasVar -) - -func main() { - ops := []opInfo{ - {name: "Invalid"}, - - {name: "Not", comment: "!$Args[0]"}, - - // Binary expressions. - {name: "And", comment: "$Args[0] && $Args[1]", flags: flagIsBinaryExpr}, - {name: "Or", comment: "$Args[0] || $Args[1]", flags: flagIsBinaryExpr}, - {name: "Eq", comment: "$Args[0] == $Args[1]", flags: flagIsBinaryExpr}, - {name: "Neq", comment: "$Args[0] != $Args[1]", flags: flagIsBinaryExpr}, - {name: "Gt", comment: "$Args[0] > $Args[1]", flags: flagIsBinaryExpr}, - {name: "Lt", comment: "$Args[0] < $Args[1]", flags: flagIsBinaryExpr}, - {name: "GtEq", comment: "$Args[0] >= $Args[1]", flags: flagIsBinaryExpr}, - {name: "LtEq", comment: "$Args[0] <= $Args[1]", flags: flagIsBinaryExpr}, - - {name: "VarAddressable", comment: "m[$Value].Addressable", valueType: "string", flags: flagHasVar}, - {name: "VarComparable", comment: "m[$Value].Comparable", valueType: "string", flags: flagHasVar}, - {name: "VarPure", comment: "m[$Value].Pure", valueType: "string", flags: flagHasVar}, - {name: "VarConst", comment: "m[$Value].Const", valueType: "string", flags: flagHasVar}, - {name: "VarConstSlice", comment: "m[$Value].ConstSlice", valueType: "string", flags: flagHasVar}, - {name: "VarText", comment: "m[$Value].Text", valueType: "string", flags: flagHasVar}, - {name: "VarLine", comment: "m[$Value].Line", valueType: "string", flags: flagHasVar}, - {name: "VarValueInt", comment: "m[$Value].Value.Int()", valueType: "string", flags: flagHasVar}, - {name: "VarTypeSize", comment: "m[$Value].Type.Size", valueType: "string", flags: flagHasVar}, - {name: "VarTypeHasPointers", comment: "m[$Value].Type.HasPointers()", valueType: "string", flags: flagHasVar}, - - {name: "VarFilter", comment: "m[$Value].Filter($Args[0])", valueType: "string", flags: flagHasVar}, - {name: "VarNodeIs", comment: "m[$Value].Node.Is($Args[0])", valueType: "string", flags: flagHasVar}, - {name: "VarObjectIs", comment: "m[$Value].Object.Is($Args[0])", valueType: "string", flags: flagHasVar}, - {name: "VarObjectIsGlobal", comment: "m[$Value].Object.IsGlobal()", valueType: "string", flags: flagHasVar}, - {name: "VarObjectIsVariadicParam", comment: "m[$Value].Object.IsVariadicParam()", valueType: "string", flags: flagHasVar}, - {name: "VarTypeIs", comment: "m[$Value].Type.Is($Args[0])", valueType: "string", flags: flagHasVar}, - {name: "VarTypeIdenticalTo", comment: "m[$Value].Type.IdenticalTo($Args[0])", valueType: "string", flags: flagHasVar}, - {name: "VarTypeUnderlyingIs", comment: "m[$Value].Type.Underlying().Is($Args[0])", valueType: "string", flags: flagHasVar}, - {name: "VarTypeOfKind", comment: "m[$Value].Type.OfKind($Args[0])", valueType: "string", flags: flagHasVar}, - {name: "VarTypeUnderlyingOfKind", comment: "m[$Value].Type.Underlying().OfKind($Args[0])", valueType: "string", flags: flagHasVar}, - {name: "VarTypeConvertibleTo", comment: "m[$Value].Type.ConvertibleTo($Args[0])", valueType: "string", flags: flagHasVar}, - {name: "VarTypeAssignableTo", comment: "m[$Value].Type.AssignableTo($Args[0])", valueType: "string", flags: flagHasVar}, - {name: "VarTypeImplements", comment: "m[$Value].Type.Implements($Args[0])", valueType: "string", flags: flagHasVar}, - {name: "VarTypeHasMethod", comment: "m[$Value].Type.HasMethod($Args[0])", valueType: "string", flags: flagHasVar}, - {name: "VarTextMatches", comment: "m[$Value].Text.Matches($Args[0])", valueType: "string", flags: flagHasVar}, - - {name: "VarContains", comment: "m[$Value].Contains($Args[0])", valueType: "string", flags: flagHasVar}, - - {name: "Deadcode", comment: "m.Deadcode()"}, - - {name: "GoVersionEq", comment: "m.GoVersion().Eq($Value)", valueType: "string"}, - {name: "GoVersionLessThan", comment: "m.GoVersion().LessThan($Value)", valueType: "string"}, - {name: "GoVersionGreaterThan", comment: "m.GoVersion().GreaterThan($Value)", valueType: "string"}, - {name: "GoVersionLessEqThan", comment: "m.GoVersion().LessEqThan($Value)", valueType: "string"}, - {name: "GoVersionGreaterEqThan", comment: "m.GoVersion().GreaterEqThan($Value)", valueType: "string"}, - - {name: "FileImports", comment: "m.File.Imports($Value)", valueType: "string"}, - {name: "FilePkgPathMatches", comment: "m.File.PkgPath.Matches($Value)", valueType: "string"}, - {name: "FileNameMatches", comment: "m.File.Name.Matches($Value)", valueType: "string"}, - - {name: "FilterFuncRef", comment: "$Value holds a function name", valueType: "string"}, - - {name: "String", comment: "$Value holds a string constant", valueType: "string", flags: flagIsBasicLit}, - {name: "Int", comment: "$Value holds an int64 constant", valueType: "int64", flags: flagIsBasicLit}, - - {name: "RootNodeParentIs", comment: "m[`$$`].Node.Parent().Is($Args[0])"}, - {name: "RootSinkTypeIs", comment: "m[`$$`].SinkType.Is($Args[0])"}, - } - - var buf bytes.Buffer - - buf.WriteString(`// Code generated "gen_filter_op.go"; DO NOT EDIT.` + "\n") - buf.WriteString("\n") - buf.WriteString("package ir\n") - buf.WriteString("const (\n") - - for i, op := range ops { - if strings.Contains(op.comment, "$Value") && op.valueType == "" { - fmt.Printf("missing %s valueType\n", op.name) - } - if op.comment != "" { - buf.WriteString("// " + op.comment + "\n") - } - if op.valueType != "" { - buf.WriteString("// $Value type: " + op.valueType + "\n") - } - fmt.Fprintf(&buf, "Filter%sOp FilterOp = %d\n", op.name, i) - buf.WriteString("\n") - } - buf.WriteString(")\n") - - buf.WriteString("var filterOpNames = map[FilterOp]string{\n") - for _, op := range ops { - fmt.Fprintf(&buf, "Filter%sOp: `%s`,\n", op.name, op.name) - } - buf.WriteString("}\n") - - buf.WriteString("var filterOpFlags = map[FilterOp]uint64{\n") - for _, op := range ops { - if op.flags == 0 { - continue - } - parts := make([]string, 0, 1) - if op.flags&flagIsBinaryExpr != 0 { - parts = append(parts, "flagIsBinaryExpr") - } - if op.flags&flagIsBasicLit != 0 { - parts = append(parts, "flagIsBasicLit") - } - if op.flags&flagHasVar != 0 { - parts = append(parts, "flagHasVar") - } - fmt.Fprintf(&buf, "Filter%sOp: %s,\n", op.name, strings.Join(parts, " | ")) - } - buf.WriteString("}\n") - - pretty, err := format.Source(buf.Bytes()) - if err != nil { - panic(err) - } - - if err := os.WriteFile("filter_op.gen.go", pretty, 0644); err != nil { - panic(err) - } -} diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ir/ir.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ir/ir.go deleted file mode 100644 index b89481168..000000000 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ir/ir.go +++ /dev/null @@ -1,113 +0,0 @@ -package ir - -import ( - "fmt" - "strings" -) - -type File struct { - PkgPath string - - RuleGroups []RuleGroup - - CustomDecls []string - - BundleImports []BundleImport -} - -type BundleImport struct { - Line int - - PkgPath string - Prefix string -} - -type RuleGroup struct { - Line int - Name string - MatcherName string - - DocTags []string - DocSummary string - DocBefore string - DocAfter string - DocNote string - - Imports []PackageImport - - Rules []Rule -} - -type PackageImport struct { - Path string - Name string -} - -type Rule struct { - Line int - - SyntaxPatterns []PatternString - CommentPatterns []PatternString - - ReportTemplate string - SuggestTemplate string - DoFuncName string - - WhereExpr FilterExpr - - LocationVar string -} - -type PatternString struct { - Line int - Value string -} - -// stringer -type=FilterOp -trimprefix=Filter - -//go:generate go run ./gen_filter_op.go -type FilterOp int - -func (op FilterOp) String() string { return filterOpNames[op] } - -type FilterExpr struct { - Line int - - Op FilterOp - Src string - Value interface{} - Args []FilterExpr -} - -func (e FilterExpr) IsValid() bool { return e.Op != FilterInvalidOp } - -func (e FilterExpr) IsBinaryExpr() bool { return filterOpFlags[e.Op]&flagIsBinaryExpr != 0 } -func (e FilterExpr) IsBasicLit() bool { return filterOpFlags[e.Op]&flagIsBasicLit != 0 } -func (e FilterExpr) HasVar() bool { return filterOpFlags[e.Op]&flagHasVar != 0 } - -func (e FilterExpr) String() string { - switch e.Op { - case FilterStringOp: - return `"` + e.Value.(string) + `"` - case FilterIntOp: - return fmt.Sprint(e.Value.(int64)) - } - parts := make([]string, 0, len(e.Args)+2) - parts = append(parts, e.Op.String()) - if e.Value != nil { - parts = append(parts, fmt.Sprintf("[%#v]", e.Value)) - } - for _, arg := range e.Args { - parts = append(parts, arg.String()) - } - if len(parts) == 1 { - return parts[0] - } - return "(" + strings.Join(parts, " ") + ")" -} - -const ( - flagIsBinaryExpr uint64 = 1 << iota - flagIsBasicLit - flagHasVar -) diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ir_loader.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ir_loader.go deleted file mode 100644 index 90dea56ac..000000000 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ir_loader.go +++ /dev/null @@ -1,890 +0,0 @@ -package ruleguard - -import ( - "bytes" - "fmt" - "go/ast" - "go/constant" - "go/parser" - "go/token" - "go/types" - "os" - "regexp" - - "github.com/quasilyte/gogrep" - "github.com/quasilyte/gogrep/nodetag" - - "github.com/quasilyte/go-ruleguard/ruleguard/goutil" - "github.com/quasilyte/go-ruleguard/ruleguard/ir" - "github.com/quasilyte/go-ruleguard/ruleguard/quasigo" - "github.com/quasilyte/go-ruleguard/ruleguard/textmatch" - "github.com/quasilyte/go-ruleguard/ruleguard/typematch" -) - -type irLoaderConfig struct { - ctx *LoadContext - - state *engineState - - importer *goImporter - - itab *typematch.ImportsTab - - pkg *types.Package - - gogrepFset *token.FileSet - - prefix string - importedPkg string -} - -type irLoader struct { - state *engineState - ctx *LoadContext - itab *typematch.ImportsTab - - pkg *types.Package - - file *ir.File - gogrepFset *token.FileSet - - filename string - res *goRuleSet - - importer *goImporter - - group *GoRuleGroup - - prefix string // For imported packages, a prefix that is added to a rule group name - importedPkg string // Package path; only for imported packages - - imported []*goRuleSet -} - -func newIRLoader(config irLoaderConfig) *irLoader { - return &irLoader{ - state: config.state, - ctx: config.ctx, - importer: config.importer, - itab: config.itab, - pkg: config.pkg, - prefix: config.prefix, - gogrepFset: config.gogrepFset, - } -} - -func (l *irLoader) LoadFile(filename string, f *ir.File) (*goRuleSet, error) { - l.filename = filename - l.file = f - l.res = &goRuleSet{ - universal: &scopedGoRuleSet{}, - groups: make(map[string]*GoRuleGroup), - } - - for _, imp := range f.BundleImports { - if l.importedPkg != "" { - return nil, l.errorf(imp.Line, nil, "imports from imported packages are not supported yet") - } - if err := l.loadBundle(imp); err != nil { - return nil, err - } - } - - if err := l.compileFilterFuncs(filename, f); err != nil { - return nil, err - } - - for i := range f.RuleGroups { - if err := l.loadRuleGroup(&f.RuleGroups[i]); err != nil { - return nil, err - } - } - - if len(l.imported) != 0 { - toMerge := []*goRuleSet{l.res} - toMerge = append(toMerge, l.imported...) - merged, err := mergeRuleSets(toMerge) - if err != nil { - return nil, err - } - l.res = merged - } - - return l.res, nil -} - -func (l *irLoader) importErrorf(line int, wrapped error, format string, args ...interface{}) error { - return &ImportError{ - msg: fmt.Sprintf("%s:%d: %s", l.filename, line, fmt.Sprintf(format, args...)), - err: wrapped, - } -} - -func (l *irLoader) errorf(line int, wrapped error, format string, args ...interface{}) error { - if wrapped == nil { - return fmt.Errorf("%s:%d: %s", l.filename, line, fmt.Sprintf(format, args...)) - } - return fmt.Errorf("%s:%d: %s: %w", l.filename, line, fmt.Sprintf(format, args...), wrapped) -} - -func (l *irLoader) loadBundle(bundle ir.BundleImport) error { - files, err := findBundleFiles(bundle.PkgPath) - if err != nil { - return l.errorf(bundle.Line, err, "can't find imported bundle files") - } - for _, filename := range files { - rset, err := l.loadExternFile(bundle.Prefix, bundle.PkgPath, filename) - if err != nil { - return l.errorf(bundle.Line, err, "error during bundle file loading") - } - l.imported = append(l.imported, rset) - } - - return nil -} - -func (l *irLoader) loadExternFile(prefix, pkgPath, filename string) (*goRuleSet, error) { - src, err := os.ReadFile(filename) - if err != nil { - return nil, err - } - irfile, pkg, err := convertAST(l.ctx, l.importer, filename, src) - if err != nil { - return nil, err - } - config := irLoaderConfig{ - state: l.state, - ctx: l.ctx, - importer: l.importer, - prefix: prefix, - pkg: pkg, - importedPkg: pkgPath, - itab: l.itab, - gogrepFset: l.gogrepFset, - } - rset, err := newIRLoader(config).LoadFile(filename, irfile) - if err != nil { - return nil, fmt.Errorf("%s: %w", l.importedPkg, err) - } - return rset, nil -} - -func (l *irLoader) compileFilterFuncs(filename string, irfile *ir.File) error { - if len(irfile.CustomDecls) == 0 { - return nil - } - - var buf bytes.Buffer - buf.WriteString("package gorules\n") - buf.WriteString("import \"github.com/quasilyte/go-ruleguard/dsl\"\n") - buf.WriteString("import \"github.com/quasilyte/go-ruleguard/dsl/types\"\n") - for _, src := range irfile.CustomDecls { - buf.WriteString(src) - buf.WriteString("\n") - } - buf.WriteString("type _ = dsl.Matcher\n") - buf.WriteString("type _ = types.Type\n") - - fset := token.NewFileSet() - f, err := goutil.LoadGoFile(goutil.LoadConfig{ - Fset: fset, - Filename: filename, - Data: &buf, - Importer: l.importer, - }) - if err != nil { - // If this ever happens, user will get unexpected error - // lines for it; but we should trust that 99.9% errors - // should be caught at irconv phase so we get a valid Go - // source here as well? - return fmt.Errorf("parse custom decls: %w", err) - } - - for _, decl := range f.Syntax.Decls { - decl, ok := decl.(*ast.FuncDecl) - if !ok { - continue - } - ctx := &quasigo.CompileContext{ - Env: l.state.env, - Package: f.Pkg, - Types: f.Types, - Fset: fset, - } - compiled, err := quasigo.Compile(ctx, decl) - if err != nil { - return err - } - if l.ctx.DebugFunc == decl.Name.String() { - l.ctx.DebugPrint(quasigo.Disasm(l.state.env, compiled)) - } - ctx.Env.AddFunc(f.Pkg.Path(), decl.Name.String(), compiled) - } - - return nil -} - -func (l *irLoader) loadRuleGroup(group *ir.RuleGroup) error { - l.group = &GoRuleGroup{ - Line: group.Line, - Filename: l.filename, - Name: group.Name, - DocSummary: group.DocSummary, - DocBefore: group.DocBefore, - DocAfter: group.DocAfter, - DocNote: group.DocNote, - DocTags: group.DocTags, - } - if l.prefix != "" { - l.group.Name = l.prefix + "/" + l.group.Name - } - - if l.ctx.GroupFilter != nil && !l.ctx.GroupFilter(l.group) { - return nil // Skip this group - } - if _, ok := l.res.groups[l.group.Name]; ok { - panic(fmt.Sprintf("duplicated function %s after the typecheck", l.group.Name)) // Should never happen - } - l.res.groups[l.group.Name] = l.group - - l.itab.EnterScope() - defer l.itab.LeaveScope() - - for _, imported := range group.Imports { - l.itab.Load(imported.Name, imported.Path) - } - - for i := range group.Rules { - rule := &group.Rules[i] - if err := l.loadRule(group, rule); err != nil { - return err - } - } - - return nil -} - -func (l *irLoader) loadRule(group *ir.RuleGroup, rule *ir.Rule) error { - proto := goRule{ - line: rule.Line, - group: l.group, - suggestion: rule.SuggestTemplate, - msg: rule.ReportTemplate, - location: rule.LocationVar, - } - - if rule.DoFuncName != "" { - doFn := l.state.env.GetFunc(l.file.PkgPath, rule.DoFuncName) - if doFn == nil { - return l.errorf(rule.Line, nil, "can't find a compiled version of %s", rule.DoFuncName) - } - proto.do = doFn - } - - info := filterInfo{ - Vars: make(map[string]struct{}), - group: group, - } - if rule.WhereExpr.IsValid() { - filter, err := l.newFilter(rule.WhereExpr, &info) - if err != nil { - return err - } - proto.filter = filter - } - - for _, pat := range rule.SyntaxPatterns { - if err := l.loadSyntaxRule(group, proto, info, rule, pat.Value, pat.Line); err != nil { - return err - } - } - for _, pat := range rule.CommentPatterns { - if err := l.loadCommentRule(proto, rule, pat.Value, pat.Line); err != nil { - return err - } - } - return nil -} - -func (l *irLoader) loadCommentRule(resultProto goRule, rule *ir.Rule, src string, line int) error { - dst := l.res.universal - pat, err := regexp.Compile(src) - if err != nil { - return l.errorf(rule.Line, err, "compile regexp") - } - resultBase := resultProto - resultBase.line = line - result := goCommentRule{ - base: resultProto, - pat: pat, - captureGroups: regexpHasCaptureGroups(src), - } - dst.commentRules = append(dst.commentRules, result) - - return nil -} - -func (l *irLoader) gogrepCompile(group *ir.RuleGroup, src string) (*gogrep.Pattern, gogrep.PatternInfo, error) { - var imports map[string]string - if len(group.Imports) != 0 { - imports = make(map[string]string) - for _, imported := range group.Imports { - imports[imported.Name] = imported.Path - } - } - - gogrepConfig := gogrep.CompileConfig{ - Fset: l.gogrepFset, - Src: src, - Strict: false, - WithTypes: true, - Imports: imports, - } - return gogrep.Compile(gogrepConfig) -} - -func (l *irLoader) loadSyntaxRule(group *ir.RuleGroup, resultProto goRule, filterInfo filterInfo, rule *ir.Rule, src string, line int) error { - result := resultProto - result.line = line - - pat, info, err := l.gogrepCompile(group, src) - if err != nil { - return l.errorf(rule.Line, err, "parse match pattern") - } - result.pat = pat - - for filterVar := range filterInfo.Vars { - if filterVar == "$$" { - continue // OK: a predefined var for the "entire match" - } - _, ok := info.Vars[filterVar] - if !ok { - return l.errorf(rule.Line, nil, "filter refers to a non-existing var %s", filterVar) - } - } - - dst := l.res.universal - var dstTags []nodetag.Value - switch tag := pat.NodeTag(); tag { - case nodetag.Unknown: - return l.errorf(rule.Line, nil, "can't infer a tag of %s", src) - case nodetag.Node: - return l.errorf(rule.Line, nil, "%s pattern is too general", src) - case nodetag.StmtList: - dstTags = []nodetag.Value{ - nodetag.BlockStmt, - nodetag.CaseClause, - nodetag.CommClause, - } - case nodetag.ExprList: - dstTags = []nodetag.Value{ - nodetag.CallExpr, - nodetag.CompositeLit, - nodetag.ReturnStmt, - } - default: - dstTags = []nodetag.Value{tag} - } - for _, tag := range dstTags { - dst.rulesByTag[tag] = append(dst.rulesByTag[tag], result) - } - dst.categorizedNum++ - - return nil -} - -func (l *irLoader) unwrapTypeExpr(filter ir.FilterExpr) (types.Type, error) { - typeString := l.unwrapStringExpr(filter) - if typeString == "" { - return nil, l.errorf(filter.Line, nil, "expected a non-empty type string") - } - typ, err := typeFromString(typeString) - if err != nil { - return nil, l.errorf(filter.Line, err, "parse type expr") - } - if typ == nil { - return nil, l.errorf(filter.Line, nil, "can't convert %s into a type constraint yet", typeString) - } - return typ, nil -} - -func (l *irLoader) unwrapFuncRefExpr(filter ir.FilterExpr) (*types.Func, error) { - s := l.unwrapStringExpr(filter) - if s == "" { - return nil, l.errorf(filter.Line, nil, "expected a non-empty func ref string") - } - - n, err := parser.ParseExpr(s) - if err != nil { - return nil, err - } - - switch n := n.(type) { - case *ast.CallExpr: - // TODO: implement this. - return nil, l.errorf(filter.Line, nil, "inline func signatures are not supported yet") - case *ast.SelectorExpr: - funcName := n.Sel.Name - pkgAndType, ok := n.X.(*ast.SelectorExpr) - if !ok { - return nil, l.errorf(filter.Line, nil, "invalid selector expression") - } - pkgID, ok := pkgAndType.X.(*ast.Ident) - if !ok { - return nil, l.errorf(filter.Line, nil, "invalid package name selector part") - } - pkgName := pkgID.Name - typeName := pkgAndType.Sel.Name - fqn := pkgName + "." + typeName - typ, err := l.state.FindType(l.importer, l.pkg, fqn) - if err != nil { - return nil, l.errorf(filter.Line, nil, "can't find %s type", fqn) - } - switch typ := typ.Underlying().(type) { - case *types.Interface: - for i := 0; i < typ.NumMethods(); i++ { - fn := typ.Method(i) - if fn.Name() == funcName { - return fn, nil - } - } - default: - return nil, l.errorf(filter.Line, nil, "only interfaces are supported, but %s is %T", fqn, typ) - } - - default: - return nil, l.errorf(filter.Line, nil, "unexpected %T node", n) - } - - return nil, nil -} - -func (l *irLoader) unwrapInterfaceExpr(filter ir.FilterExpr) (*types.Interface, error) { - typeString := l.unwrapStringExpr(filter) - if typeString == "" { - return nil, l.errorf(filter.Line, nil, "expected a non-empty type name string") - } - - typ, err := l.state.FindType(l.importer, l.pkg, typeString) - if err == nil { - iface, ok := typ.Underlying().(*types.Interface) - if !ok { - return nil, l.errorf(filter.Line, nil, "%s is not an interface type", typeString) - } - return iface, nil - } - - n, err := parser.ParseExpr(typeString) - if err != nil { - return nil, l.errorf(filter.Line, err, "parse %s type expr", typeString) - } - qn, ok := n.(*ast.SelectorExpr) - if !ok { - return nil, l.errorf(filter.Line, nil, "can't resolve %s type; try a fully-qualified name", typeString) - } - pkgName, ok := qn.X.(*ast.Ident) - if !ok { - return nil, l.errorf(filter.Line, nil, "invalid package name") - } - pkgPath, ok := l.itab.Lookup(pkgName.Name) - if !ok { - return nil, l.errorf(filter.Line, nil, "package %s is not imported", pkgName.Name) - } - pkg, err := l.importer.Import(pkgPath) - if err != nil { - return nil, l.importErrorf(filter.Line, err, "can't load %s", pkgPath) - } - obj := pkg.Scope().Lookup(qn.Sel.Name) - if obj == nil { - return nil, l.errorf(filter.Line, nil, "%s is not found in %s", qn.Sel.Name, pkgPath) - } - iface, ok := obj.Type().Underlying().(*types.Interface) - if !ok { - return nil, l.errorf(filter.Line, nil, "%s is not an interface type", qn.Sel.Name) - } - return iface, nil -} - -func (l *irLoader) unwrapRegexpExpr(filter ir.FilterExpr) (textmatch.Pattern, error) { - patternString := l.unwrapStringExpr(filter) - if patternString == "" { - return nil, l.errorf(filter.Line, nil, "expected a non-empty regexp pattern argument") - } - re, err := textmatch.Compile(patternString) - if err != nil { - return nil, l.errorf(filter.Line, err, "compile regexp") - } - return re, nil -} - -func (l *irLoader) unwrapNodeTagExpr(filter ir.FilterExpr) (nodetag.Value, error) { - typeString := l.unwrapStringExpr(filter) - if typeString == "" { - return nodetag.Unknown, l.errorf(filter.Line, nil, "expected a non-empty string argument") - } - tag := nodetag.FromString(typeString) - if tag == nodetag.Unknown { - return tag, l.errorf(filter.Line, nil, "%s is not a valid go/ast type name", typeString) - } - return tag, nil -} - -func (l *irLoader) unwrapStringExpr(filter ir.FilterExpr) string { - if filter.Op == ir.FilterStringOp { - return filter.Value.(string) - } - return "" -} - -func (l *irLoader) stringToBasicKind(s string) types.BasicInfo { - switch s { - case "integer": - return types.IsInteger - case "unsigned": - return types.IsUnsigned - case "float": - return types.IsFloat - case "complex": - return types.IsComplex - case "untyped": - return types.IsUnsigned - case "numeric": - return types.IsNumeric - default: - return 0 - } -} - -func (l *irLoader) newFilter(filter ir.FilterExpr, info *filterInfo) (matchFilter, error) { - if filter.HasVar() { - info.Vars[filter.Value.(string)] = struct{}{} - } - - if filter.IsBinaryExpr() { - return l.newBinaryExprFilter(filter, info) - } - - result := matchFilter{src: filter.Src} - - switch filter.Op { - case ir.FilterNotOp: - x, err := l.newFilter(filter.Args[0], info) - if err != nil { - return result, err - } - result.fn = makeNotFilter(result.src, x) - - case ir.FilterVarTextMatchesOp: - re, err := l.unwrapRegexpExpr(filter.Args[0]) - if err != nil { - return result, err - } - result.fn = makeTextMatchesFilter(result.src, filter.Value.(string), re) - - case ir.FilterVarObjectIsOp: - typeString := l.unwrapStringExpr(filter.Args[0]) - if typeString == "" { - return result, l.errorf(filter.Line, nil, "expected a non-empty string argument") - } - switch typeString { - case "Func", "Var", "Const", "TypeName", "Label", "PkgName", "Builtin", "Nil": - // OK. - default: - return result, l.errorf(filter.Line, nil, "%s is not a valid go/types object name", typeString) - } - result.fn = makeObjectIsFilter(result.src, filter.Value.(string), typeString) - - case ir.FilterRootNodeParentIsOp: - tag, err := l.unwrapNodeTagExpr(filter.Args[0]) - if err != nil { - return result, err - } - result.fn = makeRootParentNodeIsFilter(result.src, tag) - - case ir.FilterVarNodeIsOp: - tag, err := l.unwrapNodeTagExpr(filter.Args[0]) - if err != nil { - return result, err - } - result.fn = makeNodeIsFilter(result.src, filter.Value.(string), tag) - - case ir.FilterRootSinkTypeIsOp: - typeString := l.unwrapStringExpr(filter.Args[0]) - if typeString == "" { - return result, l.errorf(filter.Line, nil, "expected a non-empty string argument") - } - ctx := typematch.Context{Itab: l.itab} - pat, err := typematch.Parse(&ctx, typeString) - if err != nil { - return result, l.errorf(filter.Line, err, "parse type expr") - } - result.fn = makeRootSinkTypeIsFilter(result.src, pat) - - case ir.FilterVarTypeHasPointersOp: - result.fn = makeTypeHasPointersFilter(result.src, filter.Value.(string)) - - case ir.FilterVarTypeOfKindOp, ir.FilterVarTypeUnderlyingOfKindOp: - kindString := l.unwrapStringExpr(filter.Args[0]) - if kindString == "" { - return result, l.errorf(filter.Line, nil, "expected a non-empty string argument") - } - underlying := filter.Op == ir.FilterVarTypeUnderlyingOfKindOp - switch kindString { - case "signed": - result.fn = makeTypeIsSignedFilter(result.src, filter.Value.(string), underlying) - case "int": - result.fn = makeTypeIsIntUintFilter(result.src, filter.Value.(string), underlying, types.Int) - case "uint": - result.fn = makeTypeIsIntUintFilter(result.src, filter.Value.(string), underlying, types.Uint) - default: - kind := l.stringToBasicKind(kindString) - if kind == 0 { - return result, l.errorf(filter.Line, nil, "unknown kind %s", kindString) - } - result.fn = makeTypeOfKindFilter(result.src, filter.Value.(string), underlying, kind) - } - - case ir.FilterVarTypeIdenticalToOp: - lhsVarname := filter.Value.(string) - rhsVarname := filter.Args[0].Value.(string) - result.fn = makeTypesIdenticalFilter(result.src, lhsVarname, rhsVarname) - - case ir.FilterVarTypeIsOp, ir.FilterVarTypeUnderlyingIsOp: - typeString := l.unwrapStringExpr(filter.Args[0]) - if typeString == "" { - return result, l.errorf(filter.Line, nil, "expected a non-empty string argument") - } - ctx := typematch.Context{Itab: l.itab} - pat, err := typematch.Parse(&ctx, typeString) - if err != nil { - return result, l.errorf(filter.Line, err, "parse type expr") - } - underlying := filter.Op == ir.FilterVarTypeUnderlyingIsOp - result.fn = makeTypeIsFilter(result.src, filter.Value.(string), underlying, pat) - - case ir.FilterVarTypeConvertibleToOp: - dstType, err := l.unwrapTypeExpr(filter.Args[0]) - if err != nil { - return result, err - } - result.fn = makeTypeConvertibleToFilter(result.src, filter.Value.(string), dstType) - - case ir.FilterVarTypeAssignableToOp: - dstType, err := l.unwrapTypeExpr(filter.Args[0]) - if err != nil { - return result, err - } - result.fn = makeTypeAssignableToFilter(result.src, filter.Value.(string), dstType) - - case ir.FilterVarTypeImplementsOp: - iface, err := l.unwrapInterfaceExpr(filter.Args[0]) - if err != nil { - return result, err - } - result.fn = makeTypeImplementsFilter(result.src, filter.Value.(string), iface) - - case ir.FilterVarTypeHasMethodOp: - fn, err := l.unwrapFuncRefExpr(filter.Args[0]) - if err != nil { - return result, err - } - if fn == nil { - return result, l.errorf(filter.Line, nil, "can't resolve HasMethod() argument") - } - result.fn = makeTypeHasMethodFilter(result.src, filter.Value.(string), fn) - - case ir.FilterVarPureOp: - result.fn = makePureFilter(result.src, filter.Value.(string)) - case ir.FilterVarConstOp: - result.fn = makeConstFilter(result.src, filter.Value.(string)) - case ir.FilterVarObjectIsGlobalOp: - result.fn = makeObjectIsGlobalFilter(result.src, filter.Value.(string)) - case ir.FilterVarObjectIsVariadicParamOp: - result.fn = makeObjectIsVariadicParamFilter(result.src, filter.Value.(string)) - case ir.FilterVarConstSliceOp: - result.fn = makeConstSliceFilter(result.src, filter.Value.(string)) - case ir.FilterVarAddressableOp: - result.fn = makeAddressableFilter(result.src, filter.Value.(string)) - case ir.FilterVarComparableOp: - result.fn = makeComparableFilter(result.src, filter.Value.(string)) - - case ir.FilterFileImportsOp: - result.fn = makeFileImportsFilter(result.src, filter.Value.(string)) - - case ir.FilterDeadcodeOp: - result.fn = makeDeadcodeFilter(result.src) - - case ir.FilterGoVersionEqOp: - version, err := ParseGoVersion(filter.Value.(string)) - if err != nil { - return result, l.errorf(filter.Line, err, "parse Go version") - } - result.fn = makeGoVersionFilter(result.src, token.EQL, version) - case ir.FilterGoVersionLessThanOp: - version, err := ParseGoVersion(filter.Value.(string)) - if err != nil { - return result, l.errorf(filter.Line, err, "parse Go version") - } - result.fn = makeGoVersionFilter(result.src, token.LSS, version) - case ir.FilterGoVersionGreaterThanOp: - version, err := ParseGoVersion(filter.Value.(string)) - if err != nil { - return result, l.errorf(filter.Line, err, "parse Go version") - } - result.fn = makeGoVersionFilter(result.src, token.GTR, version) - case ir.FilterGoVersionLessEqThanOp: - version, err := ParseGoVersion(filter.Value.(string)) - if err != nil { - return result, l.errorf(filter.Line, err, "parse Go version") - } - result.fn = makeGoVersionFilter(result.src, token.LEQ, version) - case ir.FilterGoVersionGreaterEqThanOp: - version, err := ParseGoVersion(filter.Value.(string)) - if err != nil { - return result, l.errorf(filter.Line, err, "parse Go version") - } - result.fn = makeGoVersionFilter(result.src, token.GEQ, version) - - case ir.FilterFilePkgPathMatchesOp: - re, err := regexp.Compile(filter.Value.(string)) - if err != nil { - return result, l.errorf(filter.Line, err, "compile regexp") - } - result.fn = makeFilePkgPathMatchesFilter(result.src, re) - - case ir.FilterFileNameMatchesOp: - re, err := regexp.Compile(filter.Value.(string)) - if err != nil { - return result, l.errorf(filter.Line, err, "compile regexp") - } - result.fn = makeFileNameMatchesFilter(result.src, re) - - case ir.FilterVarContainsOp: - src := filter.Args[0].Value.(string) - pat, _, err := l.gogrepCompile(info.group, src) - if err != nil { - return result, l.errorf(filter.Line, err, "parse contains pattern") - } - result.fn = makeVarContainsFilter(result.src, filter.Value.(string), pat) - - case ir.FilterVarFilterOp: - funcName := filter.Args[0].Value.(string) - userFn := l.state.env.GetFunc(l.file.PkgPath, funcName) - if userFn == nil { - return result, l.errorf(filter.Line, nil, "can't find a compiled version of %s", funcName) - } - result.fn = makeCustomVarFilter(result.src, filter.Value.(string), userFn) - } - - if result.fn == nil { - return result, l.errorf(filter.Line, nil, "unsupported expr: %s (%s)", result.src, filter.Op) - } - - return result, nil -} - -func (l *irLoader) newBinaryExprFilter(filter ir.FilterExpr, info *filterInfo) (matchFilter, error) { - if filter.Op == ir.FilterAndOp || filter.Op == ir.FilterOrOp { - result := matchFilter{src: filter.Src} - lhs, err := l.newFilter(filter.Args[0], info) - if err != nil { - return result, err - } - rhs, err := l.newFilter(filter.Args[1], info) - if err != nil { - return result, err - } - if filter.Op == ir.FilterAndOp { - result.fn = makeAndFilter(lhs, rhs) - } else { - result.fn = makeOrFilter(lhs, rhs) - } - return result, nil - } - - // If constexpr is on the LHS, move it to the right, so the code below - // can imply constants being on the RHS all the time. - if filter.Args[0].IsBasicLit() && !filter.Args[1].IsBasicLit() { - // Just a precaution: if we ever have a float values here, - // we may not want to rearrange anything. - switch filter.Args[0].Value.(type) { - case string, int64: - switch filter.Op { - case ir.FilterEqOp, ir.FilterNeqOp: - // Simple commutative ops. Just swap the args. - newFilter := filter - newFilter.Args = []ir.FilterExpr{filter.Args[1], filter.Args[0]} - return l.newBinaryExprFilter(newFilter, info) - } - } - } - - result := matchFilter{src: filter.Src} - - var tok token.Token - switch filter.Op { - case ir.FilterEqOp: - tok = token.EQL - case ir.FilterNeqOp: - tok = token.NEQ - case ir.FilterGtOp: - tok = token.GTR - case ir.FilterGtEqOp: - tok = token.GEQ - case ir.FilterLtOp: - tok = token.LSS - case ir.FilterLtEqOp: - tok = token.LEQ - default: - return result, l.errorf(filter.Line, nil, "unsupported operator in binary expr: %s", result.src) - } - - lhs := filter.Args[0] - rhs := filter.Args[1] - var rhsValue constant.Value - switch rhs.Op { - case ir.FilterStringOp: - rhsValue = constant.MakeString(rhs.Value.(string)) - case ir.FilterIntOp: - rhsValue = constant.MakeInt64(rhs.Value.(int64)) - } - - switch lhs.Op { - case ir.FilterVarLineOp: - if rhsValue != nil { - result.fn = makeLineConstFilter(result.src, lhs.Value.(string), tok, rhsValue) - } else if rhs.Op == lhs.Op { - result.fn = makeLineFilter(result.src, lhs.Value.(string), tok, rhs.Value.(string)) - } - case ir.FilterVarTypeSizeOp: - if rhsValue != nil { - result.fn = makeTypeSizeConstFilter(result.src, lhs.Value.(string), tok, rhsValue) - } else { - result.fn = makeTypeSizeFilter(result.src, lhs.Value.(string), tok, rhs.Value.(string)) - } - case ir.FilterVarValueIntOp: - if rhsValue != nil { - result.fn = makeValueIntConstFilter(result.src, lhs.Value.(string), tok, rhsValue) - } else if rhs.Op == lhs.Op { - result.fn = makeValueIntFilter(result.src, lhs.Value.(string), tok, rhs.Value.(string)) - } - case ir.FilterVarTextOp: - if rhsValue != nil { - result.fn = makeTextConstFilter(result.src, lhs.Value.(string), tok, rhsValue) - } else if rhs.Op == lhs.Op { - result.fn = makeTextFilter(result.src, lhs.Value.(string), tok, rhs.Value.(string)) - } - } - - if result.fn == nil { - return result, l.errorf(filter.Line, nil, "unsupported binary expr: %s", result.src) - } - - return result, nil -} - -type filterInfo struct { - Vars map[string]struct{} - - group *ir.RuleGroup -} diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ir_utils.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ir_utils.go deleted file mode 100644 index 62c24bf15..000000000 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ir_utils.go +++ /dev/null @@ -1,41 +0,0 @@ -package ruleguard - -import ( - "fmt" - "go/ast" - "go/parser" - "go/types" - - "github.com/quasilyte/go-ruleguard/ruleguard/ir" - "github.com/quasilyte/go-ruleguard/ruleguard/irconv" -) - -func convertAST(ctx *LoadContext, imp *goImporter, filename string, src []byte) (*ir.File, *types.Package, error) { - parserFlags := parser.ParseComments - f, err := parser.ParseFile(ctx.Fset, filename, src, parserFlags) - if err != nil { - return nil, nil, fmt.Errorf("parse file error: %w", err) - } - - typechecker := types.Config{Importer: imp} - typesInfo := &types.Info{ - Types: map[ast.Expr]types.TypeAndValue{}, - Uses: map[*ast.Ident]types.Object{}, - Defs: map[*ast.Ident]types.Object{}, - } - pkg, err := typechecker.Check("gorules", ctx.Fset, []*ast.File{f}, typesInfo) - if err != nil { - return nil, nil, fmt.Errorf("typechecker error: %w", err) - } - irconvCtx := &irconv.Context{ - Pkg: pkg, - Types: typesInfo, - Fset: ctx.Fset, - Src: src, - } - irfile, err := irconv.ConvertFile(irconvCtx, f) - if err != nil { - return nil, nil, fmt.Errorf("irconv error: %w", err) - } - return irfile, pkg, nil -} diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/irconv/irconv.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/irconv/irconv.go deleted file mode 100644 index 4eb90d51b..000000000 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/irconv/irconv.go +++ /dev/null @@ -1,858 +0,0 @@ -package irconv - -import ( - "fmt" - "go/ast" - "go/constant" - "go/token" - "go/types" - "path" - "strconv" - "strings" - - "github.com/go-toolsmith/astcopy" - "golang.org/x/tools/go/ast/astutil" - - "github.com/quasilyte/go-ruleguard/ruleguard/goutil" - "github.com/quasilyte/go-ruleguard/ruleguard/ir" -) - -type Context struct { - Pkg *types.Package - Types *types.Info - Fset *token.FileSet - Src []byte -} - -func ConvertFile(ctx *Context, f *ast.File) (result *ir.File, err error) { - defer func() { - if err != nil { - return - } - rv := recover() - if rv == nil { - return - } - if convErr, ok := rv.(convError); ok { - err = convErr.err - return - } - panic(rv) // not our panic - }() - - conv := &converter{ - types: ctx.Types, - pkg: ctx.Pkg, - fset: ctx.Fset, - src: ctx.Src, - } - result = conv.ConvertFile(f) - return result, nil -} - -type convError struct { - err error -} - -type localMacroFunc struct { - name string - params []string - template ast.Expr -} - -type converter struct { - types *types.Info - pkg *types.Package - fset *token.FileSet - src []byte - - group *ir.RuleGroup - groupFuncs []localMacroFunc - - dslPkgname string // The local name of the "ruleguard/dsl" package (usually its just "dsl") -} - -func (conv *converter) errorf(n ast.Node, format string, args ...interface{}) convError { - loc := conv.fset.Position(n.Pos()) - msg := fmt.Sprintf(format, args...) - return convError{err: fmt.Errorf("%s:%d: %s", loc.Filename, loc.Line, msg)} -} - -func (conv *converter) ConvertFile(f *ast.File) *ir.File { - result := &ir.File{ - PkgPath: conv.pkg.Path(), - } - - conv.dslPkgname = "dsl" - - for _, imp := range f.Imports { - importPath, err := strconv.Unquote(imp.Path.Value) - if err != nil { - panic(conv.errorf(imp, "unquote %s import path: %s", imp.Path.Value, err)) - } - if importPath == "github.com/quasilyte/go-ruleguard/dsl" { - if imp.Name != nil { - conv.dslPkgname = imp.Name.Name - } - } - // Right now this list is hardcoded from the knowledge of which - // stdlib packages are supported inside the bytecode. - switch importPath { - case "fmt", "strings", "strconv": - conv.addCustomImport(result, importPath) - } - } - - for _, decl := range f.Decls { - funcDecl, ok := decl.(*ast.FuncDecl) - if !ok { - genDecl := decl.(*ast.GenDecl) - if genDecl.Tok != token.IMPORT { - conv.addCustomDecl(result, decl) - } - continue - } - - if funcDecl.Name.String() == "init" { - conv.convertInitFunc(result, funcDecl) - continue - } - - if conv.isMatcherFunc(funcDecl) { - result.RuleGroups = append(result.RuleGroups, *conv.convertRuleGroup(funcDecl)) - } else { - conv.addCustomDecl(result, funcDecl) - } - } - - return result -} - -func (conv *converter) convertInitFunc(dst *ir.File, decl *ast.FuncDecl) { - for _, stmt := range decl.Body.List { - exprStmt, ok := stmt.(*ast.ExprStmt) - if !ok { - panic(conv.errorf(stmt, "unsupported statement")) - } - call, ok := exprStmt.X.(*ast.CallExpr) - if !ok { - panic(conv.errorf(stmt, "unsupported expr")) - } - fn, ok := call.Fun.(*ast.SelectorExpr) - if !ok { - panic(conv.errorf(stmt, "unsupported call")) - } - pkg, ok := fn.X.(*ast.Ident) - if !ok || pkg.Name != conv.dslPkgname { - panic(conv.errorf(stmt, "unsupported call")) - } - - switch fn.Sel.Name { - case "ImportRules": - prefix := conv.parseStringArg(call.Args[0]) - bundleSelector, ok := call.Args[1].(*ast.SelectorExpr) - if !ok { - panic(conv.errorf(call.Args[1], "expected a `pkgname.Bundle` argument")) - } - bundleObj := conv.types.ObjectOf(bundleSelector.Sel) - dst.BundleImports = append(dst.BundleImports, ir.BundleImport{ - Prefix: prefix, - PkgPath: bundleObj.Pkg().Path(), - Line: conv.fset.Position(exprStmt.Pos()).Line, - }) - - default: - panic(conv.errorf(stmt, "unsupported %s call", fn.Sel.Name)) - } - } -} - -func (conv *converter) addCustomImport(dst *ir.File, pkgPath string) { - dst.CustomDecls = append(dst.CustomDecls, `import "`+pkgPath+`"`) -} - -func (conv *converter) addCustomDecl(dst *ir.File, decl ast.Decl) { - begin := conv.fset.Position(decl.Pos()) - end := conv.fset.Position(decl.End()) - src := conv.src[begin.Offset:end.Offset] - dst.CustomDecls = append(dst.CustomDecls, string(src)) -} - -func (conv *converter) isMatcherFunc(f *ast.FuncDecl) bool { - typ := conv.types.ObjectOf(f.Name).Type().(*types.Signature) - return typ.Results().Len() == 0 && - typ.Params().Len() == 1 && - typ.Params().At(0).Type().String() == "github.com/quasilyte/go-ruleguard/dsl.Matcher" -} - -func (conv *converter) convertRuleGroup(decl *ast.FuncDecl) *ir.RuleGroup { - result := &ir.RuleGroup{ - Line: conv.fset.Position(decl.Name.Pos()).Line, - } - conv.group = result - conv.groupFuncs = conv.groupFuncs[:0] - - result.Name = decl.Name.String() - result.MatcherName = decl.Type.Params.List[0].Names[0].String() - - if decl.Doc != nil { - conv.convertDocComments(decl.Doc) - } - - seenRules := false - for _, stmt := range decl.Body.List { - if assign, ok := stmt.(*ast.AssignStmt); ok && assign.Tok == token.DEFINE { - conv.localDefine(assign) - continue - } - - if _, ok := stmt.(*ast.DeclStmt); ok { - continue - } - stmtExpr, ok := stmt.(*ast.ExprStmt) - if !ok { - panic(conv.errorf(stmt, "expected a %s method call, found %s", result.MatcherName, goutil.SprintNode(conv.fset, stmt))) - } - call, ok := stmtExpr.X.(*ast.CallExpr) - if !ok { - panic(conv.errorf(stmt, "expected a %s method call, found %s", result.MatcherName, goutil.SprintNode(conv.fset, stmt))) - } - - switch conv.matcherMethodName(call) { - case "Import": - if seenRules { - panic(conv.errorf(call, "Import() should be used before any rules definitions")) - } - conv.doMatcherImport(call) - default: - seenRules = true - conv.convertRuleExpr(call) - } - } - - return result -} - -func (conv *converter) findLocalMacro(call *ast.CallExpr) *localMacroFunc { - fn, ok := call.Fun.(*ast.Ident) - if !ok { - return nil - } - for i := range conv.groupFuncs { - if conv.groupFuncs[i].name == fn.Name { - return &conv.groupFuncs[i] - } - } - return nil -} - -func (conv *converter) expandMacro(macro *localMacroFunc, call *ast.CallExpr) ir.FilterExpr { - // Check that call args are OK. - // Since "function calls" are implemented as a macro expansion here, - // we don't allow arguments that have a non-trivial evaluation. - isSafe := func(arg ast.Expr) bool { - switch arg := astutil.Unparen(arg).(type) { - case *ast.BasicLit, *ast.Ident: - return true - - case *ast.IndexExpr: - mapIdent, ok := astutil.Unparen(arg.X).(*ast.Ident) - if !ok { - return false - } - if mapIdent.Name != conv.group.MatcherName { - return false - } - key, ok := astutil.Unparen(arg.Index).(*ast.BasicLit) - if !ok || key.Kind != token.STRING { - return false - } - return true - - default: - return false - } - } - args := map[string]ast.Expr{} - for i, arg := range call.Args { - paramName := macro.params[i] - if !isSafe(arg) { - panic(conv.errorf(arg, "unsupported/too complex %s argument", paramName)) - } - args[paramName] = astutil.Unparen(arg) - } - - body := astcopy.Expr(macro.template) - expanded := astutil.Apply(body, nil, func(cur *astutil.Cursor) bool { - if ident, ok := cur.Node().(*ast.Ident); ok { - arg, ok := args[ident.Name] - if ok { - cur.Replace(arg) - return true - } - } - // astcopy above will copy the AST tree, but it won't update - // the associated types.Info map of const values. - // We'll try to solve that issue at least partially here. - if lit, ok := cur.Node().(*ast.BasicLit); ok { - switch lit.Kind { - case token.STRING: - val, err := strconv.Unquote(lit.Value) - if err == nil { - conv.types.Types[lit] = types.TypeAndValue{ - Type: types.Typ[types.UntypedString], - Value: constant.MakeString(val), - } - } - case token.INT: - val, err := strconv.ParseInt(lit.Value, 0, 64) - if err == nil { - conv.types.Types[lit] = types.TypeAndValue{ - Type: types.Typ[types.UntypedInt], - Value: constant.MakeInt64(val), - } - } - case token.FLOAT: - val, err := strconv.ParseFloat(lit.Value, 64) - if err == nil { - conv.types.Types[lit] = types.TypeAndValue{ - Type: types.Typ[types.UntypedFloat], - Value: constant.MakeFloat64(val), - } - } - } - } - return true - }) - - return conv.convertFilterExpr(expanded.(ast.Expr)) -} - -func (conv *converter) localDefine(assign *ast.AssignStmt) { - if len(assign.Lhs) != 1 || len(assign.Rhs) != 1 { - panic(conv.errorf(assign, "multi-value := is not supported")) - } - lhs, ok := assign.Lhs[0].(*ast.Ident) - if !ok { - panic(conv.errorf(assign.Lhs[0], "only simple ident lhs is supported")) - } - rhs := assign.Rhs[0] - fn, ok := rhs.(*ast.FuncLit) - if !ok { - panic(conv.errorf(rhs, "only func literals are supported on the rhs")) - } - typ := conv.types.TypeOf(fn).(*types.Signature) - isBoolResult := typ.Results() != nil && - typ.Results().Len() == 1 && - typ.Results().At(0).Type() == types.Typ[types.Bool] - if !isBoolResult { - var loc ast.Node = fn.Type - if fn.Type.Results != nil { - loc = fn.Type.Results - } - panic(conv.errorf(loc, "only funcs returning bool are supported")) - } - if len(fn.Body.List) != 1 { - panic(conv.errorf(fn.Body, "only simple 1 return statement funcs are supported")) - } - stmt, ok := fn.Body.List[0].(*ast.ReturnStmt) - if !ok { - panic(conv.errorf(fn.Body.List[0], "expected a return statement, found %T", fn.Body.List[0])) - } - var params []string - for _, field := range fn.Type.Params.List { - for _, id := range field.Names { - params = append(params, id.Name) - } - } - macro := localMacroFunc{ - name: lhs.Name, - params: params, - template: stmt.Results[0], - } - conv.groupFuncs = append(conv.groupFuncs, macro) -} - -func (conv *converter) doMatcherImport(call *ast.CallExpr) { - pkgPath := conv.parseStringArg(call.Args[0]) - pkgName := path.Base(pkgPath) - conv.group.Imports = append(conv.group.Imports, ir.PackageImport{ - Path: pkgPath, - Name: pkgName, - }) -} - -func (conv *converter) matcherMethodName(call *ast.CallExpr) string { - selector, ok := call.Fun.(*ast.SelectorExpr) - if !ok { - return "" - } - id, ok := selector.X.(*ast.Ident) - if !ok || id.Name != conv.group.MatcherName { - return "" - } - return selector.Sel.Name -} - -func (conv *converter) convertDocComments(comment *ast.CommentGroup) { - knownPragmas := []string{ - "tags", - "summary", - "before", - "after", - "note", - } - - for _, c := range comment.List { - if !strings.HasPrefix(c.Text, "//doc:") { - continue - } - s := strings.TrimPrefix(c.Text, "//doc:") - var pragma string - for i := range knownPragmas { - if strings.HasPrefix(s, knownPragmas[i]) { - pragma = knownPragmas[i] - break - } - } - if pragma == "" { - panic(conv.errorf(c, "unrecognized 'doc' pragma in comment")) - } - s = strings.TrimPrefix(s, pragma) - s = strings.TrimSpace(s) - switch pragma { - case "summary": - conv.group.DocSummary = s - case "before": - conv.group.DocBefore = s - case "after": - conv.group.DocAfter = s - case "note": - conv.group.DocNote = s - case "tags": - conv.group.DocTags = strings.Fields(s) - default: - panic("unhandled 'doc' pragma: " + pragma) // Should never happen - } - } -} - -func (conv *converter) convertRuleExpr(call *ast.CallExpr) { - origCall := call - var ( - matchArgs *[]ast.Expr - matchCommentArgs *[]ast.Expr - whereArgs *[]ast.Expr - suggestArgs *[]ast.Expr - reportArgs *[]ast.Expr - atArgs *[]ast.Expr - doArgs *[]ast.Expr - ) - - for { - chain, ok := call.Fun.(*ast.SelectorExpr) - if !ok { - break - } - switch chain.Sel.Name { - case "Match": - if matchArgs != nil { - panic(conv.errorf(chain.Sel, "Match() can't be repeated")) - } - if matchCommentArgs != nil { - panic(conv.errorf(chain.Sel, "Match() and MatchComment() can't be combined")) - } - matchArgs = &call.Args - case "MatchComment": - if matchCommentArgs != nil { - panic(conv.errorf(chain.Sel, "MatchComment() can't be repeated")) - } - if matchArgs != nil { - panic(conv.errorf(chain.Sel, "Match() and MatchComment() can't be combined")) - } - matchCommentArgs = &call.Args - case "Where": - if whereArgs != nil { - panic(conv.errorf(chain.Sel, "Where() can't be repeated")) - } - whereArgs = &call.Args - case "Suggest": - if suggestArgs != nil { - panic(conv.errorf(chain.Sel, "Suggest() can't be repeated")) - } - suggestArgs = &call.Args - case "Report": - if reportArgs != nil { - panic(conv.errorf(chain.Sel, "Report() can't be repeated")) - } - reportArgs = &call.Args - case "Do": - doArgs = &call.Args - case "At": - if atArgs != nil { - panic(conv.errorf(chain.Sel, "At() can't be repeated")) - } - atArgs = &call.Args - default: - panic(conv.errorf(chain.Sel, "unexpected %s method", chain.Sel.Name)) - } - call, ok = chain.X.(*ast.CallExpr) - if !ok { - break - } - } - - // AST patterns for Match() or regexp patterns for MatchComment(). - var alternatives []string - var alternativeLines []int - - if matchArgs == nil && matchCommentArgs == nil { - panic(conv.errorf(origCall, "missing Match() or MatchComment() call")) - } - - if matchArgs != nil { - for _, arg := range *matchArgs { - alternatives = append(alternatives, conv.parseStringArg(arg)) - alternativeLines = append(alternativeLines, conv.fset.Position(arg.Pos()).Line) - } - } else { - for _, arg := range *matchCommentArgs { - alternatives = append(alternatives, conv.parseStringArg(arg)) - alternativeLines = append(alternativeLines, conv.fset.Position(arg.Pos()).Line) - } - } - - rule := ir.Rule{Line: conv.fset.Position(origCall.Pos()).Line} - - if atArgs != nil { - index, ok := (*atArgs)[0].(*ast.IndexExpr) - if !ok { - panic(conv.errorf((*atArgs)[0], "expected %s[`varname`] expression", conv.group.MatcherName)) - } - rule.LocationVar = conv.parseStringArg(index.Index) - } - - if whereArgs != nil { - rule.WhereExpr = conv.convertFilterExpr((*whereArgs)[0]) - } - - if suggestArgs != nil { - rule.SuggestTemplate = conv.parseStringArg((*suggestArgs)[0]) - } - - if suggestArgs == nil && reportArgs == nil && doArgs == nil { - panic(conv.errorf(origCall, "missing Report(), Suggest() or Do() call")) - } - if doArgs != nil { - if suggestArgs != nil || reportArgs != nil { - panic(conv.errorf(origCall, "can't combine Report/Suggest with Do yet")) - } - if matchCommentArgs != nil { - panic(conv.errorf(origCall, "can't use Do() with MatchComment() yet")) - } - funcName, ok := (*doArgs)[0].(*ast.Ident) - if !ok { - panic(conv.errorf((*doArgs)[0], "only named function args are supported")) - } - rule.DoFuncName = funcName.String() - } else { - if reportArgs == nil { - rule.ReportTemplate = "suggestion: " + rule.SuggestTemplate - } else { - rule.ReportTemplate = conv.parseStringArg((*reportArgs)[0]) - } - } - - for i, alt := range alternatives { - pat := ir.PatternString{ - Line: alternativeLines[i], - Value: alt, - } - if matchArgs != nil { - rule.SyntaxPatterns = append(rule.SyntaxPatterns, pat) - } else { - rule.CommentPatterns = append(rule.CommentPatterns, pat) - } - } - conv.group.Rules = append(conv.group.Rules, rule) -} - -func (conv *converter) convertFilterExpr(e ast.Expr) ir.FilterExpr { - result := conv.convertFilterExprImpl(e) - result.Src = goutil.SprintNode(conv.fset, e) - result.Line = conv.fset.Position(e.Pos()).Line - if !result.IsValid() { - panic(conv.errorf(e, "unsupported expr: %s (%T)", result.Src, e)) - } - return result -} - -func (conv *converter) convertFilterExprImpl(e ast.Expr) ir.FilterExpr { - if cv := conv.types.Types[e].Value; cv != nil { - switch cv.Kind() { - case constant.String: - v := constant.StringVal(cv) - return ir.FilterExpr{Op: ir.FilterStringOp, Value: v} - case constant.Int: - v, ok := constant.Int64Val(cv) - if ok { - return ir.FilterExpr{Op: ir.FilterIntOp, Value: v} - } - } - } - convertExprList := func(list []ast.Expr) []ir.FilterExpr { - if len(list) == 0 { - return nil - } - result := make([]ir.FilterExpr, len(list)) - for i, e := range list { - result[i] = conv.convertFilterExpr(e) - } - return result - } - - switch e := e.(type) { - case *ast.ParenExpr: - return conv.convertFilterExpr(e.X) - - case *ast.UnaryExpr: - x := conv.convertFilterExpr(e.X) - args := []ir.FilterExpr{x} - switch e.Op { - case token.NOT: - return ir.FilterExpr{Op: ir.FilterNotOp, Args: args} - } - - case *ast.BinaryExpr: - x := conv.convertFilterExpr(e.X) - y := conv.convertFilterExpr(e.Y) - args := []ir.FilterExpr{x, y} - switch e.Op { - case token.LAND: - return ir.FilterExpr{Op: ir.FilterAndOp, Args: args} - case token.LOR: - return ir.FilterExpr{Op: ir.FilterOrOp, Args: args} - case token.NEQ: - return ir.FilterExpr{Op: ir.FilterNeqOp, Args: args} - case token.EQL: - return ir.FilterExpr{Op: ir.FilterEqOp, Args: args} - case token.GTR: - return ir.FilterExpr{Op: ir.FilterGtOp, Args: args} - case token.LSS: - return ir.FilterExpr{Op: ir.FilterLtOp, Args: args} - case token.GEQ: - return ir.FilterExpr{Op: ir.FilterGtEqOp, Args: args} - case token.LEQ: - return ir.FilterExpr{Op: ir.FilterLtEqOp, Args: args} - default: - panic(conv.errorf(e, "unexpected binary op: %s", e.Op.String())) - } - - case *ast.SelectorExpr: - op := conv.inspectFilterSelector(e) - switch op.path { - case "Text": - return ir.FilterExpr{Op: ir.FilterVarTextOp, Value: op.varName} - case "Line": - return ir.FilterExpr{Op: ir.FilterVarLineOp, Value: op.varName} - case "Pure": - return ir.FilterExpr{Op: ir.FilterVarPureOp, Value: op.varName} - case "Const": - return ir.FilterExpr{Op: ir.FilterVarConstOp, Value: op.varName} - case "ConstSlice": - return ir.FilterExpr{Op: ir.FilterVarConstSliceOp, Value: op.varName} - case "Addressable": - return ir.FilterExpr{Op: ir.FilterVarAddressableOp, Value: op.varName} - case "Comparable": - return ir.FilterExpr{Op: ir.FilterVarComparableOp, Value: op.varName} - case "Type.Size": - return ir.FilterExpr{Op: ir.FilterVarTypeSizeOp, Value: op.varName} - } - - case *ast.CallExpr: - op := conv.inspectFilterSelector(e) - switch op.path { - case "Deadcode": - return ir.FilterExpr{Op: ir.FilterDeadcodeOp} - case "GoVersion.Eq": - return ir.FilterExpr{Op: ir.FilterGoVersionEqOp, Value: conv.parseStringArg(e.Args[0])} - case "GoVersion.LessThan": - return ir.FilterExpr{Op: ir.FilterGoVersionLessThanOp, Value: conv.parseStringArg(e.Args[0])} - case "GoVersion.GreaterThan": - return ir.FilterExpr{Op: ir.FilterGoVersionGreaterThanOp, Value: conv.parseStringArg(e.Args[0])} - case "GoVersion.LessEqThan": - return ir.FilterExpr{Op: ir.FilterGoVersionLessEqThanOp, Value: conv.parseStringArg(e.Args[0])} - case "GoVersion.GreaterEqThan": - return ir.FilterExpr{Op: ir.FilterGoVersionGreaterEqThanOp, Value: conv.parseStringArg(e.Args[0])} - case "File.Imports": - return ir.FilterExpr{Op: ir.FilterFileImportsOp, Value: conv.parseStringArg(e.Args[0])} - case "File.PkgPath.Matches": - return ir.FilterExpr{Op: ir.FilterFilePkgPathMatchesOp, Value: conv.parseStringArg(e.Args[0])} - case "File.Name.Matches": - return ir.FilterExpr{Op: ir.FilterFileNameMatchesOp, Value: conv.parseStringArg(e.Args[0])} - - case "Contains": - pat := conv.parseStringArg(e.Args[0]) - return ir.FilterExpr{ - Op: ir.FilterVarContainsOp, - Value: op.varName, - Args: []ir.FilterExpr{ - {Op: ir.FilterStringOp, Value: pat}, - }, - } - - case "Type.IdenticalTo": - // TODO: reuse the code with parsing At() args? - index, ok := e.Args[0].(*ast.IndexExpr) - if !ok { - panic(conv.errorf(e.Args[0], "expected %s[`varname`] expression", conv.group.MatcherName)) - } - rhsVarname := conv.parseStringArg(index.Index) - args := []ir.FilterExpr{ - {Op: ir.FilterStringOp, Value: rhsVarname}, - } - return ir.FilterExpr{Op: ir.FilterVarTypeIdenticalToOp, Value: op.varName, Args: args} - - case "Filter": - funcName, ok := e.Args[0].(*ast.Ident) - if !ok { - panic(conv.errorf(e.Args[0], "only named function args are supported")) - } - args := []ir.FilterExpr{ - {Op: ir.FilterFilterFuncRefOp, Value: funcName.String()}, - } - return ir.FilterExpr{Op: ir.FilterVarFilterOp, Value: op.varName, Args: args} - } - - if macro := conv.findLocalMacro(e); macro != nil { - return conv.expandMacro(macro, e) - } - - args := convertExprList(e.Args) - switch op.path { - case "Value.Int": - return ir.FilterExpr{Op: ir.FilterVarValueIntOp, Value: op.varName, Args: args} - case "Text.Matches": - return ir.FilterExpr{Op: ir.FilterVarTextMatchesOp, Value: op.varName, Args: args} - case "Node.Is": - return ir.FilterExpr{Op: ir.FilterVarNodeIsOp, Value: op.varName, Args: args} - case "Node.Parent.Is": - if op.varName != "$$" { - // TODO: remove this restriction. - panic(conv.errorf(e.Args[0], "only $$ parent nodes are implemented")) - } - return ir.FilterExpr{Op: ir.FilterRootNodeParentIsOp, Args: args} - case "Object.Is": - return ir.FilterExpr{Op: ir.FilterVarObjectIsOp, Value: op.varName, Args: args} - case "Object.IsGlobal": - return ir.FilterExpr{Op: ir.FilterVarObjectIsGlobalOp, Value: op.varName} - case "Object.IsVariadicParam": - return ir.FilterExpr{Op: ir.FilterVarObjectIsVariadicParamOp, Value: op.varName} - case "SinkType.Is": - if op.varName != "$$" { - // TODO: remove this restriction. - panic(conv.errorf(e.Args[0], "sink type is only implemented for $$ var")) - } - return ir.FilterExpr{Op: ir.FilterRootSinkTypeIsOp, Value: op.varName, Args: args} - case "Type.HasPointers": - return ir.FilterExpr{Op: ir.FilterVarTypeHasPointersOp, Value: op.varName} - case "Type.Is": - return ir.FilterExpr{Op: ir.FilterVarTypeIsOp, Value: op.varName, Args: args} - case "Type.Underlying.Is": - return ir.FilterExpr{Op: ir.FilterVarTypeUnderlyingIsOp, Value: op.varName, Args: args} - case "Type.OfKind": - return ir.FilterExpr{Op: ir.FilterVarTypeOfKindOp, Value: op.varName, Args: args} - case "Type.Underlying.OfKind": - return ir.FilterExpr{Op: ir.FilterVarTypeUnderlyingOfKindOp, Value: op.varName, Args: args} - case "Type.ConvertibleTo": - return ir.FilterExpr{Op: ir.FilterVarTypeConvertibleToOp, Value: op.varName, Args: args} - case "Type.AssignableTo": - return ir.FilterExpr{Op: ir.FilterVarTypeAssignableToOp, Value: op.varName, Args: args} - case "Type.Implements": - return ir.FilterExpr{Op: ir.FilterVarTypeImplementsOp, Value: op.varName, Args: args} - case "Type.HasMethod": - return ir.FilterExpr{Op: ir.FilterVarTypeHasMethodOp, Value: op.varName, Args: args} - } - } - - return ir.FilterExpr{} -} - -func (conv *converter) parseStringArg(e ast.Expr) string { - s, ok := conv.toStringValue(e) - if !ok { - panic(conv.errorf(e, "expected a string literal argument")) - } - return s -} - -func (conv *converter) toStringValue(x ast.Node) (string, bool) { - switch x := x.(type) { - case *ast.BasicLit: - if x.Kind != token.STRING { - return "", false - } - s, err := strconv.Unquote(x.Value) - if err != nil { - return "", false - } - return s, true - case ast.Expr: - typ, ok := conv.types.Types[x] - if !ok || typ.Type.String() != "string" { - return "", false - } - str := constant.StringVal(typ.Value) - return str, true - } - return "", false -} - -func (conv *converter) inspectFilterSelector(e ast.Expr) filterExprSelector { - var o filterExprSelector - - if call, ok := e.(*ast.CallExpr); ok { - o.args = call.Args - e = call.Fun - } - var path string - for { - if call, ok := e.(*ast.CallExpr); ok { - e = call.Fun - continue - } - selector, ok := e.(*ast.SelectorExpr) - if !ok { - break - } - if path == "" { - path = selector.Sel.Name - } else { - path = selector.Sel.Name + "." + path - } - e = astutil.Unparen(selector.X) - } - - o.path = path - - indexing, ok := astutil.Unparen(e).(*ast.IndexExpr) - if !ok { - return o - } - mapIdent, ok := astutil.Unparen(indexing.X).(*ast.Ident) - if !ok { - return o - } - o.mapName = mapIdent.Name - indexString, _ := conv.toStringValue(indexing.Index) - o.varName = indexString - - return o -} - -type filterExprSelector struct { - mapName string - varName string - path string - args []ast.Expr -} diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/libdsl.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/libdsl.go deleted file mode 100644 index d0e161613..000000000 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/libdsl.go +++ /dev/null @@ -1,401 +0,0 @@ -package ruleguard - -import ( - "fmt" - "go/types" - - "github.com/quasilyte/go-ruleguard/internal/xtypes" - "github.com/quasilyte/go-ruleguard/ruleguard/quasigo" -) - -// This file implements `dsl/*` packages as native functions in quasigo. -// -// Every function and method defined in any `dsl/*` package should have -// associated Go function that implements it. -// -// In quasigo, it's impossible to have a pointer to an interface and -// non-pointer struct type. All interface type methods have FQN without `*` prefix -// while all struct type methods always begin with `*`. -// -// Fields are readonly. -// Field access is compiled into a method call that have a name identical to the field. -// For example, `foo.Bar` field access will be compiled as `foo.Bar()`. -// This may change in the future; benchmarks are needed to figure out -// what is more efficient: reflect-based field access or a function call. -// -// To keep this code organized, every type and package functions are represented -// as structs with methods. Then we bind a method value to quasigo symbol. -// The naming scheme is `dsl{$name}Package` for packages and `dsl{$pkg}{$name}` for types. - -func initEnv(state *engineState, env *quasigo.Env) { - nativeTypes := map[string]quasigoNative{ - `*github.com/quasilyte/go-ruleguard/dsl.MatchedText`: dslMatchedText{}, - `*github.com/quasilyte/go-ruleguard/dsl.DoVar`: dslDoVar{}, - `*github.com/quasilyte/go-ruleguard/dsl.DoContext`: dslDoContext{}, - `*github.com/quasilyte/go-ruleguard/dsl.VarFilterContext`: dslVarFilterContext{state: state}, - `github.com/quasilyte/go-ruleguard/dsl/types.Type`: dslTypesType{}, - `*github.com/quasilyte/go-ruleguard/dsl/types.Interface`: dslTypesInterface{}, - `*github.com/quasilyte/go-ruleguard/dsl/types.Pointer`: dslTypesPointer{}, - `*github.com/quasilyte/go-ruleguard/dsl/types.Struct`: dslTypesStruct{}, - `*github.com/quasilyte/go-ruleguard/dsl/types.Array`: dslTypesArray{}, - `*github.com/quasilyte/go-ruleguard/dsl/types.Slice`: dslTypesSlice{}, - `*github.com/quasilyte/go-ruleguard/dsl/types.Var`: dslTypesVar{}, - } - - for qualifier, typ := range nativeTypes { - for methodName, fn := range typ.funcs() { - env.AddNativeMethod(qualifier, methodName, fn) - } - } - - nativePackages := map[string]quasigoNative{ - `github.com/quasilyte/go-ruleguard/dsl/types`: dslTypesPackage{}, - } - - for qualifier, pkg := range nativePackages { - for funcName, fn := range pkg.funcs() { - env.AddNativeMethod(qualifier, funcName, fn) - } - } -} - -type quasigoNative interface { - funcs() map[string]func(*quasigo.ValueStack) -} - -type dslTypesType struct{} - -func (native dslTypesType) funcs() map[string]func(*quasigo.ValueStack) { - return map[string]func(*quasigo.ValueStack){ - "Underlying": native.Underlying, - "String": native.String, - } -} - -func (dslTypesType) Underlying(stack *quasigo.ValueStack) { - stack.Push(stack.Pop().(types.Type).Underlying()) -} - -func (dslTypesType) String(stack *quasigo.ValueStack) { - stack.Push(stack.Pop().(types.Type).String()) -} - -type dslTypesInterface struct{} - -func (native dslTypesInterface) funcs() map[string]func(*quasigo.ValueStack) { - return map[string]func(*quasigo.ValueStack){ - "Underlying": native.Underlying, - "String": native.String, - } -} - -func (dslTypesInterface) Underlying(stack *quasigo.ValueStack) { - stack.Push(stack.Pop().(*types.Interface).Underlying()) -} - -func (dslTypesInterface) String(stack *quasigo.ValueStack) { - stack.Push(stack.Pop().(*types.Interface).String()) -} - -type dslTypesSlice struct{} - -func (native dslTypesSlice) funcs() map[string]func(*quasigo.ValueStack) { - return map[string]func(*quasigo.ValueStack){ - "Underlying": native.Underlying, - "String": native.String, - "Elem": native.Elem, - } -} - -func (dslTypesSlice) Underlying(stack *quasigo.ValueStack) { - stack.Push(stack.Pop().(*types.Slice).Underlying()) -} - -func (dslTypesSlice) String(stack *quasigo.ValueStack) { - stack.Push(stack.Pop().(*types.Slice).String()) -} - -func (dslTypesSlice) Elem(stack *quasigo.ValueStack) { - stack.Push(stack.Pop().(*types.Slice).Elem()) -} - -type dslTypesArray struct{} - -func (native dslTypesArray) funcs() map[string]func(*quasigo.ValueStack) { - return map[string]func(*quasigo.ValueStack){ - "Underlying": native.Underlying, - "String": native.String, - "Elem": native.Elem, - "Len": native.Len, - } -} - -func (dslTypesArray) Underlying(stack *quasigo.ValueStack) { - stack.Push(stack.Pop().(*types.Array).Underlying()) -} - -func (dslTypesArray) String(stack *quasigo.ValueStack) { - stack.Push(stack.Pop().(*types.Array).String()) -} - -func (dslTypesArray) Elem(stack *quasigo.ValueStack) { - stack.Push(stack.Pop().(*types.Array).Elem()) -} - -func (dslTypesArray) Len(stack *quasigo.ValueStack) { - stack.PushInt(int(stack.Pop().(*types.Array).Len())) -} - -type dslTypesPointer struct{} - -func (native dslTypesPointer) funcs() map[string]func(*quasigo.ValueStack) { - return map[string]func(*quasigo.ValueStack){ - "Underlying": native.Underlying, - "String": native.String, - "Elem": native.Elem, - } -} - -func (dslTypesPointer) Underlying(stack *quasigo.ValueStack) { - stack.Push(stack.Pop().(*types.Pointer).Underlying()) -} - -func (dslTypesPointer) String(stack *quasigo.ValueStack) { - stack.Push(stack.Pop().(*types.Pointer).String()) -} - -func (dslTypesPointer) Elem(stack *quasigo.ValueStack) { - stack.Push(stack.Pop().(*types.Pointer).Elem()) -} - -type dslTypesStruct struct{} - -func (native dslTypesStruct) funcs() map[string]func(*quasigo.ValueStack) { - return map[string]func(*quasigo.ValueStack){ - "Underlying": native.Underlying, - "String": native.String, - "NumFields": native.NumFields, - "Field": native.Field, - } -} - -func (dslTypesStruct) Underlying(stack *quasigo.ValueStack) { - stack.Push(stack.Pop().(*types.Struct).Underlying()) -} - -func (dslTypesStruct) String(stack *quasigo.ValueStack) { - stack.Push(stack.Pop().(*types.Struct).String()) -} - -func (dslTypesStruct) NumFields(stack *quasigo.ValueStack) { - stack.PushInt(stack.Pop().(*types.Struct).NumFields()) -} - -func (dslTypesStruct) Field(stack *quasigo.ValueStack) { - i := stack.PopInt() - typ := stack.Pop().(*types.Struct) - stack.Push(typ.Field(i)) -} - -type dslTypesPackage struct{} - -func (native dslTypesPackage) funcs() map[string]func(*quasigo.ValueStack) { - return map[string]func(*quasigo.ValueStack){ - "Implements": native.Implements, - "Identical": native.Identical, - "NewArray": native.NewArray, - "NewSlice": native.NewSlice, - "NewPointer": native.NewPointer, - "AsArray": native.AsArray, - "AsSlice": native.AsSlice, - "AsPointer": native.AsPointer, - "AsInterface": native.AsInterface, - "AsStruct": native.AsStruct, - } -} - -func (dslTypesPackage) Implements(stack *quasigo.ValueStack) { - iface := stack.Pop().(*types.Interface) - typ := stack.Pop().(types.Type) - stack.Push(xtypes.Implements(typ, iface)) -} - -func (dslTypesPackage) Identical(stack *quasigo.ValueStack) { - y := stack.Pop().(types.Type) - x := stack.Pop().(types.Type) - stack.Push(xtypes.Identical(x, y)) -} - -func (dslTypesPackage) NewArray(stack *quasigo.ValueStack) { - length := stack.PopInt() - typ := stack.Pop().(types.Type) - stack.Push(types.NewArray(typ, int64(length))) -} - -func (dslTypesPackage) NewSlice(stack *quasigo.ValueStack) { - typ := stack.Pop().(types.Type) - stack.Push(types.NewSlice(typ)) -} - -func (dslTypesPackage) NewPointer(stack *quasigo.ValueStack) { - typ := stack.Pop().(types.Type) - stack.Push(types.NewPointer(typ)) -} - -func (dslTypesPackage) AsArray(stack *quasigo.ValueStack) { - typ, _ := stack.Pop().(types.Type).(*types.Array) - stack.Push(typ) -} - -func (dslTypesPackage) AsSlice(stack *quasigo.ValueStack) { - typ, _ := stack.Pop().(types.Type).(*types.Slice) - stack.Push(typ) -} - -func (dslTypesPackage) AsPointer(stack *quasigo.ValueStack) { - typ, _ := stack.Pop().(types.Type).(*types.Pointer) - stack.Push(typ) -} - -func (dslTypesPackage) AsInterface(stack *quasigo.ValueStack) { - typ, _ := stack.Pop().(types.Type).(*types.Interface) - stack.Push(typ) -} - -func (dslTypesPackage) AsStruct(stack *quasigo.ValueStack) { - typ, _ := stack.Pop().(types.Type).(*types.Struct) - stack.Push(typ) -} - -type dslTypesVar struct{} - -func (native dslTypesVar) funcs() map[string]func(*quasigo.ValueStack) { - return map[string]func(*quasigo.ValueStack){ - "Embedded": native.Embedded, - "Type": native.Type, - } -} - -func (dslTypesVar) Embedded(stack *quasigo.ValueStack) { - stack.Push(stack.Pop().(*types.Var).Embedded()) -} - -func (dslTypesVar) Type(stack *quasigo.ValueStack) { - stack.Push(stack.Pop().(*types.Var).Type()) -} - -type dslDoContext struct{} - -func (native dslDoContext) funcs() map[string]func(*quasigo.ValueStack) { - return map[string]func(*quasigo.ValueStack){ - "SetReport": native.SetReport, - "SetSuggest": native.SetSuggest, - "Var": native.Var, - } -} - -func (native dslDoContext) Var(stack *quasigo.ValueStack) { - s := stack.Pop().(string) - params := stack.Pop().(*filterParams) - stack.Push(&dslDoVarRepr{params: params, name: s}) -} - -func (native dslDoContext) SetReport(stack *quasigo.ValueStack) { - s := stack.Pop().(string) - params := stack.Pop().(*filterParams) - params.reportString = s -} - -func (native dslDoContext) SetSuggest(stack *quasigo.ValueStack) { - s := stack.Pop().(string) - params := stack.Pop().(*filterParams) - params.suggestString = s -} - -type dslMatchedText struct{} - -func (native dslMatchedText) funcs() map[string]func(*quasigo.ValueStack) { - return map[string]func(*quasigo.ValueStack){ - "String": native.String, - } -} - -func (dslMatchedText) String(stack *quasigo.ValueStack) { - fmt.Printf("%T\n", stack.Pop()) - stack.Push("ok2") -} - -type dslDoVarRepr struct { - params *filterParams - name string -} - -type dslDoVar struct{} - -func (native dslDoVar) funcs() map[string]func(*quasigo.ValueStack) { - return map[string]func(*quasigo.ValueStack){ - "Text": native.Text, - "Type": native.Type, - } -} - -func (dslDoVar) Text(stack *quasigo.ValueStack) { - v := stack.Pop().(*dslDoVarRepr) - params := v.params - stack.Push(params.nodeString(params.subNode(v.name))) -} - -func (dslDoVar) Type(stack *quasigo.ValueStack) { - v := stack.Pop().(*dslDoVarRepr) - params := v.params - stack.Push(params.typeofNode(params.subNode(v.name))) -} - -type dslVarFilterContext struct { - state *engineState -} - -func (native dslVarFilterContext) funcs() map[string]func(*quasigo.ValueStack) { - return map[string]func(*quasigo.ValueStack){ - "Type": native.Type, - "SizeOf": native.SizeOf, - "GetType": native.GetType, - "GetInterface": native.GetInterface, - } -} - -func (dslVarFilterContext) Type(stack *quasigo.ValueStack) { - params := stack.Pop().(*filterParams) - typ := params.typeofNode(params.subExpr(params.varname)) - stack.Push(typ) -} - -func (native dslVarFilterContext) SizeOf(stack *quasigo.ValueStack) { - typ := stack.Pop().(types.Type) - params := stack.Pop().(*filterParams) - stack.PushInt(int(params.ctx.Sizes.Sizeof(typ))) -} - -func (native dslVarFilterContext) GetType(stack *quasigo.ValueStack) { - fqn := stack.Pop().(string) - params := stack.Pop().(*filterParams) - typ, err := native.state.FindType(params.importer, params.ctx.Pkg, fqn) - if err != nil { - panic(err) - } - stack.Push(typ) -} - -func (native dslVarFilterContext) GetInterface(stack *quasigo.ValueStack) { - fqn := stack.Pop().(string) - params := stack.Pop().(*filterParams) - typ, err := native.state.FindType(params.importer, params.ctx.Pkg, fqn) - if err != nil { - panic(err) - } - if ifaceType, ok := typ.Underlying().(*types.Interface); ok { - stack.Push(ifaceType) - return - } - stack.Push((*types.Interface)(nil)) // Not found or not an interface -} diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/match_data.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/match_data.go deleted file mode 100644 index b0909f75f..000000000 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/match_data.go +++ /dev/null @@ -1,19 +0,0 @@ -package ruleguard - -import ( - "go/ast" - - "github.com/quasilyte/gogrep" -) - -type matchData struct { - match gogrep.MatchData -} - -func (m matchData) Node() ast.Node { return m.match.Node } - -func (m matchData) CaptureList() []gogrep.CapturedNode { return m.match.Capture } - -func (m matchData) CapturedByName(name string) (ast.Node, bool) { - return m.match.CapturedByName(name) -} diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/nodepath.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/nodepath.go deleted file mode 100644 index 4ba741ee2..000000000 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/nodepath.go +++ /dev/null @@ -1,49 +0,0 @@ -package ruleguard - -import ( - "fmt" - "go/ast" - "strings" -) - -type nodePath struct { - stack []ast.Node -} - -func newNodePath() *nodePath { - return &nodePath{stack: make([]ast.Node, 0, 32)} -} - -func (p nodePath) String() string { - parts := make([]string, len(p.stack)) - for i, n := range p.stack { - parts[i] = fmt.Sprintf("%T", n) - } - return strings.Join(parts, "/") -} - -func (p *nodePath) Parent() ast.Node { - return p.NthParent(1) -} - -func (p *nodePath) Current() ast.Node { - return p.NthParent(0) -} - -func (p *nodePath) NthParent(n int) ast.Node { - index := uint(len(p.stack) - n - 1) - if index < uint(len(p.stack)) { - return p.stack[index] - } - return nil -} - -func (p *nodePath) Len() int { return len(p.stack) } - -func (p *nodePath) Push(n ast.Node) { - p.stack = append(p.stack, n) -} - -func (p *nodePath) Pop() { - p.stack = p.stack[:len(p.stack)-1] -} diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/profiling/no_labels.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/profiling/no_labels.go deleted file mode 100644 index c5b26e230..000000000 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/profiling/no_labels.go +++ /dev/null @@ -1,16 +0,0 @@ -//go:build !pproflabels -// +build !pproflabels - -package profiling - -import ( - "context" -) - -const LabelsEnabled = false - -func EnterWithLabels(origContext context.Context, name string) { -} - -func Leave(origContext context.Context) { -} diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/profiling/with_labels.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/profiling/with_labels.go deleted file mode 100644 index 6a35a13ad..000000000 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/profiling/with_labels.go +++ /dev/null @@ -1,21 +0,0 @@ -//go:build pproflabels -// +build pproflabels - -package profiling - -import ( - "context" - "runtime/pprof" -) - -const LabelsEnabled = true - -func EnterWithLabels(origContext context.Context, name string) { - labels := pprof.Labels("rules", name) - ctx := pprof.WithLabels(origContext, labels) - pprof.SetGoroutineLabels(ctx) -} - -func Leave(origContext context.Context) { - pprof.SetGoroutineLabels(origContext) -} diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/compile.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/compile.go deleted file mode 100644 index b81fb8f1d..000000000 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/compile.go +++ /dev/null @@ -1,868 +0,0 @@ -package quasigo - -import ( - "fmt" - "go/ast" - "go/constant" - "go/token" - "go/types" - - "github.com/quasilyte/go-ruleguard/ruleguard/goutil" - "golang.org/x/tools/go/ast/astutil" -) - -var voidType = &types.Tuple{} - -func compile(ctx *CompileContext, fn *ast.FuncDecl) (compiled *Func, err error) { - defer func() { - if err != nil { - return - } - rv := recover() - if rv == nil { - return - } - if compileErr, ok := rv.(compileError); ok { - err = compileErr - return - } - panic(rv) // not our panic - }() - - return compileFunc(ctx, fn), nil -} - -func compileFunc(ctx *CompileContext, fn *ast.FuncDecl) *Func { - cl := compiler{ - ctx: ctx, - fnType: ctx.Types.ObjectOf(fn.Name).Type().(*types.Signature), - constantsPool: make(map[interface{}]int), - intConstantsPool: make(map[int]int), - locals: make(map[string]int), - } - return cl.compileFunc(fn) -} - -type compiler struct { - ctx *CompileContext - - fnType *types.Signature - retType types.Type - - lastOp opcode - - locals map[string]int - constantsPool map[interface{}]int - intConstantsPool map[int]int - - params map[string]int - intParams map[string]int - - code []byte - constants []interface{} - intConstants []int - - breakTarget *label - continueTarget *label - - labels []*label -} - -type label struct { - targetPos int - sources []int -} - -type compileError string - -func (e compileError) Error() string { return string(e) } - -func (cl *compiler) compileFunc(fn *ast.FuncDecl) *Func { - switch cl.fnType.Results().Len() { - case 0: - cl.retType = voidType - case 1: - cl.retType = cl.fnType.Results().At(0).Type() - default: - panic(cl.errorf(fn.Name, "multi-result functions are not supported")) - } - - if !cl.isSupportedType(cl.retType) { - panic(cl.errorUnsupportedType(fn.Name, cl.retType, "function result")) - } - - cl.params = make(map[string]int, cl.fnType.Params().Len()) - cl.intParams = make(map[string]int, cl.fnType.Params().Len()) - for i := 0; i < cl.fnType.Params().Len(); i++ { - p := cl.fnType.Params().At(i) - paramName := p.Name() - paramType := p.Type() - if !cl.isSupportedType(paramType) { - panic(cl.errorUnsupportedType(fn.Name, paramType, paramName+" param")) - } - if typeIsInt(paramType) { - cl.intParams[paramName] = len(cl.intParams) - } else { - cl.params[paramName] = len(cl.params) - } - } - - dbg := funcDebugInfo{ - paramNames: make([]string, len(cl.params)), - intParamNames: make([]string, len(cl.intParams)), - } - for paramName, i := range cl.params { - dbg.paramNames[i] = paramName - } - for paramName, i := range cl.intParams { - dbg.intParamNames[i] = paramName - } - - cl.compileStmt(fn.Body) - if cl.retType == voidType { - cl.emit(opReturn) - } - - compiled := &Func{ - code: cl.code, - constants: cl.constants, - intConstants: cl.intConstants, - numObjectParams: len(cl.params), - numIntParams: len(cl.intParams), - name: cl.ctx.Package.Path() + "." + fn.Name.String(), - } - if len(cl.locals) != 0 { - dbg.localNames = make([]string, len(cl.locals)) - for localName, localIndex := range cl.locals { - dbg.localNames[localIndex] = localName - } - } - cl.ctx.Env.debug.funcs[compiled] = dbg - cl.linkJumps() - return compiled -} - -func (cl *compiler) compileStmt(stmt ast.Stmt) { - switch stmt := stmt.(type) { - case *ast.ReturnStmt: - cl.compileReturnStmt(stmt) - - case *ast.AssignStmt: - cl.compileAssignStmt(stmt) - - case *ast.IncDecStmt: - cl.compileIncDecStmt(stmt) - - case *ast.IfStmt: - cl.compileIfStmt(stmt) - - case *ast.ForStmt: - cl.compileForStmt(stmt) - - case *ast.BranchStmt: - cl.compileBranchStmt(stmt) - - case *ast.ExprStmt: - cl.compileExprStmt(stmt) - - case *ast.BlockStmt: - for i := range stmt.List { - cl.compileStmt(stmt.List[i]) - } - - default: - panic(cl.errorf(stmt, "can't compile %T yet", stmt)) - } -} - -func (cl *compiler) compileIncDecStmt(stmt *ast.IncDecStmt) { - varname, ok := stmt.X.(*ast.Ident) - if !ok { - panic(cl.errorf(stmt.X, "can assign only to simple variables")) - } - id := cl.getLocal(varname, varname.String()) - if stmt.Tok == token.INC { - cl.emit8(opIncLocal, id) - } else { - cl.emit8(opDecLocal, id) - } -} - -func (cl *compiler) compileBranchStmt(branch *ast.BranchStmt) { - if branch.Label != nil { - panic(cl.errorf(branch.Label, "can't compile %s with a label", branch.Tok)) - } - - switch branch.Tok { - case token.BREAK: - cl.emitJump(opJump, cl.breakTarget) - default: - panic(cl.errorf(branch, "can't compile %s yet", branch.Tok)) - } -} - -func (cl *compiler) compileExprStmt(stmt *ast.ExprStmt) { - if call, ok := stmt.X.(*ast.CallExpr); ok { - sig := cl.ctx.Types.TypeOf(call.Fun).(*types.Signature) - if sig.Results() != nil { - panic(cl.errorf(call, "only void funcs can be used in stmt context")) - } - cl.compileCallExpr(call) - return - } - - panic(cl.errorf(stmt.X, "can't compile this expr stmt yet: %T", stmt.X)) -} - -func (cl *compiler) compileForStmt(stmt *ast.ForStmt) { - labelBreak := cl.newLabel() - labelContinue := cl.newLabel() - prevBreakTarget := cl.breakTarget - prevContinueTarget := cl.continueTarget - cl.breakTarget = labelBreak - cl.continueTarget = labelContinue - - switch { - case stmt.Cond != nil && stmt.Init != nil && stmt.Post != nil: - // Will be implemented later; probably when the max number of locals will be lifted. - panic(cl.errorf(stmt, "can't compile C-style for loops yet")) - - case stmt.Cond != nil && stmt.Init == nil && stmt.Post == nil: - // `for { ... }` - labelBody := cl.newLabel() - cl.emitJump(opJump, labelContinue) - cl.bindLabel(labelBody) - cl.compileStmt(stmt.Body) - cl.bindLabel(labelContinue) - cl.compileExpr(stmt.Cond) - cl.emitJump(opJumpTrue, labelBody) - cl.bindLabel(labelBreak) - - default: - // `for { ... }` - cl.bindLabel(labelContinue) - cl.compileStmt(stmt.Body) - cl.emitJump(opJump, labelContinue) - cl.bindLabel(labelBreak) - } - - cl.breakTarget = prevBreakTarget - cl.continueTarget = prevContinueTarget -} - -func (cl *compiler) compileIfStmt(stmt *ast.IfStmt) { - if stmt.Else == nil { - labelEnd := cl.newLabel() - cl.compileExpr(stmt.Cond) - cl.emitJump(opJumpFalse, labelEnd) - cl.compileStmt(stmt.Body) - cl.bindLabel(labelEnd) - return - } - - labelEnd := cl.newLabel() - labelElse := cl.newLabel() - cl.compileExpr(stmt.Cond) - cl.emitJump(opJumpFalse, labelElse) - cl.compileStmt(stmt.Body) - if !cl.isUncondJump(cl.lastOp) { - cl.emitJump(opJump, labelEnd) - } - cl.bindLabel(labelElse) - cl.compileStmt(stmt.Else) - cl.bindLabel(labelEnd) -} - -func (cl *compiler) compileAssignStmt(assign *ast.AssignStmt) { - if len(assign.Rhs) != 1 { - panic(cl.errorf(assign, "only single right operand is allowed in assignments")) - } - for _, lhs := range assign.Lhs { - _, ok := lhs.(*ast.Ident) - if !ok { - panic(cl.errorf(lhs, "can assign only to simple variables")) - } - } - - rhs := assign.Rhs[0] - cl.compileExpr(rhs) - - if assign.Tok == token.DEFINE { - for i := len(assign.Lhs) - 1; i >= 0; i-- { - varname := assign.Lhs[i].(*ast.Ident) - typ := cl.ctx.Types.TypeOf(varname) - if _, ok := cl.locals[varname.String()]; ok { - panic(cl.errorf(varname, "%s variable shadowing is not allowed", varname)) - } - if !cl.isSupportedType(typ) { - panic(cl.errorUnsupportedType(varname, typ, varname.String()+" local variable")) - } - if len(cl.locals) == maxFuncLocals { - panic(cl.errorf(varname, "can't define %s: too many locals", varname)) - } - id := len(cl.locals) - cl.locals[varname.String()] = id - cl.emit8(pickOp(typeIsInt(typ), opSetIntLocal, opSetLocal), id) - } - } else { - for i := len(assign.Lhs) - 1; i >= 0; i-- { - varname := assign.Lhs[i].(*ast.Ident) - typ := cl.ctx.Types.TypeOf(varname) - id := cl.getLocal(varname, varname.String()) - cl.emit8(pickOp(typeIsInt(typ), opSetIntLocal, opSetLocal), id) - } - } -} - -func (cl *compiler) isParamName(varname string) bool { - if _, ok := cl.params[varname]; ok { - return true - } - if _, ok := cl.intParams[varname]; ok { - return true - } - return false -} - -func (cl *compiler) getLocal(v ast.Expr, varname string) int { - id, ok := cl.locals[varname] - if !ok { - if cl.isParamName(varname) { - panic(cl.errorf(v, "can't assign to %s, params are readonly", varname)) - } - panic(cl.errorf(v, "%s is not a writeable local variable", varname)) - } - return id -} - -func (cl *compiler) compileReturnStmt(ret *ast.ReturnStmt) { - if cl.retType == voidType { - cl.emit(opReturn) - return - } - - if ret.Results == nil { - panic(cl.errorf(ret, "'naked' return statements are not allowed")) - } - - switch { - case identName(ret.Results[0]) == "true": - cl.emit(opReturnTrue) - case identName(ret.Results[0]) == "false": - cl.emit(opReturnFalse) - default: - cl.compileExpr(ret.Results[0]) - typ := cl.ctx.Types.TypeOf(ret.Results[0]) - cl.emit(pickOp(typeIsInt(typ), opReturnIntTop, opReturnTop)) - } -} - -func (cl *compiler) compileExpr(e ast.Expr) { - cv := cl.ctx.Types.Types[e].Value - if cv != nil { - cl.compileConstantValue(e, cv) - return - } - - switch e := e.(type) { - case *ast.ParenExpr: - cl.compileExpr(e.X) - - case *ast.Ident: - cl.compileIdent(e) - - case *ast.SelectorExpr: - cl.compileSelectorExpr(e) - - case *ast.UnaryExpr: - switch e.Op { - case token.NOT: - cl.compileUnaryOp(opNot, e) - default: - panic(cl.errorf(e, "can't compile unary %s yet", e.Op)) - } - - case *ast.SliceExpr: - cl.compileSliceExpr(e) - - case *ast.BinaryExpr: - cl.compileBinaryExpr(e) - - case *ast.CallExpr: - cl.compileCallExpr(e) - - default: - panic(cl.errorf(e, "can't compile %T yet", e)) - } -} - -func (cl *compiler) compileSelectorExpr(e *ast.SelectorExpr) { - typ := cl.ctx.Types.TypeOf(e.X) - key := funcKey{ - name: e.Sel.String(), - qualifier: typ.String(), - } - - if funcID, ok := cl.ctx.Env.nameToNativeFuncID[key]; ok { - cl.compileExpr(e.X) - cl.emit16(opCallNative, int(funcID)) - return - } - - panic(cl.errorf(e, "can't compile %s field access", e.Sel)) -} - -func (cl *compiler) compileBinaryExpr(e *ast.BinaryExpr) { - typ := cl.ctx.Types.TypeOf(e.X) - - switch e.Op { - case token.LOR: - cl.compileOr(e) - case token.LAND: - cl.compileAnd(e) - - case token.NEQ: - switch { - case identName(e.X) == "nil": - cl.compileExpr(e.Y) - cl.emit(opIsNotNil) - case identName(e.Y) == "nil": - cl.compileExpr(e.X) - cl.emit(opIsNotNil) - case typeIsString(typ): - cl.compileBinaryOp(opNotEqString, e) - case typeIsInt(typ): - cl.compileBinaryOp(opNotEqInt, e) - default: - panic(cl.errorf(e, "!= is not implemented for %s operands", typ)) - } - case token.EQL: - switch { - case identName(e.X) == "nil": - cl.compileExpr(e.Y) - cl.emit(opIsNil) - case identName(e.Y) == "nil": - cl.compileExpr(e.X) - cl.emit(opIsNil) - case typeIsString(cl.ctx.Types.TypeOf(e.X)): - cl.compileBinaryOp(opEqString, e) - case typeIsInt(cl.ctx.Types.TypeOf(e.X)): - cl.compileBinaryOp(opEqInt, e) - default: - panic(cl.errorf(e, "== is not implemented for %s operands", typ)) - } - - case token.GTR: - cl.compileIntBinaryOp(e, opGtInt, typ) - case token.GEQ: - cl.compileIntBinaryOp(e, opGtEqInt, typ) - case token.LSS: - cl.compileIntBinaryOp(e, opLtInt, typ) - case token.LEQ: - cl.compileIntBinaryOp(e, opLtEqInt, typ) - - case token.ADD: - switch { - case typeIsString(typ): - cl.compileBinaryOp(opConcat, e) - case typeIsInt(typ): - cl.compileBinaryOp(opAdd, e) - default: - panic(cl.errorf(e, "+ is not implemented for %s operands", typ)) - } - - case token.SUB: - cl.compileIntBinaryOp(e, opSub, typ) - - default: - panic(cl.errorf(e, "can't compile binary %s yet", e.Op)) - } -} - -func (cl *compiler) compileIntBinaryOp(e *ast.BinaryExpr, op opcode, typ types.Type) { - switch { - case typeIsInt(typ): - cl.compileBinaryOp(op, e) - default: - panic(cl.errorf(e, "%s is not implemented for %s operands", e.Op, typ)) - } -} - -func (cl *compiler) compileSliceExpr(slice *ast.SliceExpr) { - if slice.Slice3 { - panic(cl.errorf(slice, "can't compile 3-index slicing")) - } - - // No need to do slicing, its no-op `s[:]`. - if slice.Low == nil && slice.High == nil { - cl.compileExpr(slice.X) - return - } - - sliceOp := opStringSlice - sliceFromOp := opStringSliceFrom - sliceToOp := opStringSliceTo - - if !typeIsString(cl.ctx.Types.TypeOf(slice.X)) { - panic(cl.errorf(slice.X, "can't compile slicing of something that is not a string")) - } - - switch { - case slice.Low == nil && slice.High != nil: - cl.compileExpr(slice.X) - cl.compileExpr(slice.High) - cl.emit(sliceToOp) - case slice.Low != nil && slice.High == nil: - cl.compileExpr(slice.X) - cl.compileExpr(slice.Low) - cl.emit(sliceFromOp) - default: - cl.compileExpr(slice.X) - cl.compileExpr(slice.Low) - cl.compileExpr(slice.High) - cl.emit(sliceOp) - } -} - -func (cl *compiler) compileBuiltinCall(fn *ast.Ident, call *ast.CallExpr) { - switch fn.Name { - case `len`: - s := call.Args[0] - cl.compileExpr(s) - if !typeIsString(cl.ctx.Types.TypeOf(s)) { - panic(cl.errorf(s, "can't compile len() with non-string argument yet")) - } - cl.emit(opStringLen) - - case `println`: - if len(call.Args) != 1 { - panic(cl.errorf(call, "only 1-arg form of println() is supported")) - } - funcName := "Print" - if typeIsInt(cl.ctx.Types.TypeOf(call.Args[0])) { - funcName = "PrintInt" - } - key := funcKey{qualifier: "builtin", name: funcName} - if !cl.compileNativeCall(key, 0, nil, call.Args) { - panic(cl.errorf(fn, "builtin.%s native func is not registered", funcName)) - } - - default: - panic(cl.errorf(fn, "can't compile %s() builtin function call yet", fn)) - } -} - -func (cl *compiler) compileCallExpr(call *ast.CallExpr) { - if id, ok := astutil.Unparen(call.Fun).(*ast.Ident); ok { - _, isBuiltin := cl.ctx.Types.ObjectOf(id).(*types.Builtin) - if isBuiltin { - cl.compileBuiltinCall(id, call) - return - } - } - - expr, fn := goutil.ResolveFunc(cl.ctx.Types, call.Fun) - if fn == nil { - panic(cl.errorf(call.Fun, "can't resolve the called function")) - } - - // TODO: just use Func.FullName as a key? - key := funcKey{name: fn.Name()} - sig := fn.Type().(*types.Signature) - if sig.Recv() != nil { - key.qualifier = sig.Recv().Type().String() - } else { - key.qualifier = fn.Pkg().Path() - } - variadic := 0 - if sig.Variadic() { - variadic = sig.Params().Len() - 1 - } - if expr != nil { - cl.compileExpr(expr) - } - if cl.compileNativeCall(key, variadic, expr, call.Args) { - return - } - if cl.compileCall(key, sig, call.Args) { - return - } - panic(cl.errorf(call.Fun, "can't compile a call to %s func", key)) -} - -func (cl *compiler) compileCall(key funcKey, sig *types.Signature, args []ast.Expr) bool { - if sig.Variadic() { - return false - } - - funcID, ok := cl.ctx.Env.nameToFuncID[key] - if !ok { - return false - } - - for _, arg := range args { - cl.compileExpr(arg) - } - - var op opcode - if sig.Results().Len() == 0 { - op = opVoidCall - } else if typeIsInt(sig.Results().At(0).Type()) { - op = opIntCall - } else { - op = opCall - } - - cl.emit16(op, int(funcID)) - return true -} - -func (cl *compiler) compileNativeCall(key funcKey, variadic int, funcExpr ast.Expr, args []ast.Expr) bool { - funcID, ok := cl.ctx.Env.nameToNativeFuncID[key] - if !ok { - return false - } - - if len(args) == 1 { - // Check that it's not a f(g()) call, where g() returns - // a multi-value result; we can't compile that yet. - if call, ok := args[0].(*ast.CallExpr); ok { - results := cl.ctx.Types.TypeOf(call.Fun).(*types.Signature).Results() - if results != nil && results.Len() > 1 { - panic(cl.errorf(args[0], "can't pass tuple as a func argument")) - } - } - } - - normalArgs := args - var variadicArgs []ast.Expr - if variadic != 0 { - normalArgs = args[:variadic] - variadicArgs = args[variadic:] - } - - for _, arg := range normalArgs { - cl.compileExpr(arg) - } - if variadic != 0 { - for _, arg := range variadicArgs { - cl.compileExpr(arg) - // int-typed values should appear in the interface{}-typed - // objects slice, so we get all variadic args placed in one place. - if typeIsInt(cl.ctx.Types.TypeOf(arg)) { - cl.emit(opConvIntToIface) - } - } - if len(variadicArgs) > 255 { - panic(cl.errorf(funcExpr, "too many variadic args")) - } - // Even if len(variadicArgs) is 0, we still need to overwrite - // the old variadicLen value, so the variadic func is not confused - // by some unrelated value. - cl.emit8(opSetVariadicLen, len(variadicArgs)) - } - - cl.emit16(opCallNative, int(funcID)) - return true -} - -func (cl *compiler) compileUnaryOp(op opcode, e *ast.UnaryExpr) { - cl.compileExpr(e.X) - cl.emit(op) -} - -func (cl *compiler) compileBinaryOp(op opcode, e *ast.BinaryExpr) { - cl.compileExpr(e.X) - cl.compileExpr(e.Y) - cl.emit(op) -} - -func (cl *compiler) compileOr(e *ast.BinaryExpr) { - labelEnd := cl.newLabel() - cl.compileExpr(e.X) - cl.emit(opDup) - cl.emitJump(opJumpTrue, labelEnd) - cl.compileExpr(e.Y) - cl.bindLabel(labelEnd) -} - -func (cl *compiler) compileAnd(e *ast.BinaryExpr) { - labelEnd := cl.newLabel() - cl.compileExpr(e.X) - cl.emit(opDup) - cl.emitJump(opJumpFalse, labelEnd) - cl.compileExpr(e.Y) - cl.bindLabel(labelEnd) -} - -func (cl *compiler) compileIdent(ident *ast.Ident) { - tv := cl.ctx.Types.Types[ident] - cv := tv.Value - if cv != nil { - cl.compileConstantValue(ident, cv) - return - } - if paramIndex, ok := cl.params[ident.String()]; ok { - cl.emit8(opPushParam, paramIndex) - return - } - if paramIndex, ok := cl.intParams[ident.String()]; ok { - cl.emit8(opPushIntParam, paramIndex) - return - } - if localIndex, ok := cl.locals[ident.String()]; ok { - cl.emit8(pickOp(typeIsInt(tv.Type), opPushIntLocal, opPushLocal), localIndex) - return - } - - panic(cl.errorf(ident, "can't compile a %s (type %s) variable read", ident.String(), tv.Type)) -} - -func (cl *compiler) compileConstantValue(source ast.Expr, cv constant.Value) { - switch cv.Kind() { - case constant.Bool: - v := constant.BoolVal(cv) - if v { - cl.emit(opPushTrue) - } else { - cl.emit(opPushFalse) - } - - case constant.String: - v := constant.StringVal(cv) - id := cl.internConstant(v) - cl.emit8(opPushConst, id) - - case constant.Int: - v, exact := constant.Int64Val(cv) - if !exact { - panic(cl.errorf(source, "non-exact int value")) - } - id := cl.internIntConstant(int(v)) - cl.emit8(opPushIntConst, id) - - case constant.Complex: - panic(cl.errorf(source, "can't compile complex number constants yet")) - - case constant.Float: - panic(cl.errorf(source, "can't compile float constants yet")) - - default: - panic(cl.errorf(source, "unexpected constant %v", cv)) - } -} - -func (cl *compiler) internIntConstant(v int) int { - if id, ok := cl.intConstantsPool[v]; ok { - return id - } - id := len(cl.intConstants) - cl.intConstants = append(cl.intConstants, v) - cl.intConstantsPool[v] = id - return id -} - -func (cl *compiler) internConstant(v interface{}) int { - if _, ok := v.(int); ok { - panic("compiler error: int constant interned as interface{}") - } - if id, ok := cl.constantsPool[v]; ok { - return id - } - id := len(cl.constants) - cl.constants = append(cl.constants, v) - cl.constantsPool[v] = id - return id -} - -func (cl *compiler) linkJumps() { - for _, l := range cl.labels { - for _, jumpPos := range l.sources { - offset := l.targetPos - jumpPos - patchPos := jumpPos + 1 - put16(cl.code, patchPos, offset) - } - } -} - -func (cl *compiler) newLabel() *label { - l := &label{} - cl.labels = append(cl.labels, l) - return l -} - -func (cl *compiler) bindLabel(l *label) { - l.targetPos = len(cl.code) -} - -func (cl *compiler) emit(op opcode) { - cl.lastOp = op - cl.code = append(cl.code, byte(op)) -} - -func (cl *compiler) emitJump(op opcode, l *label) { - l.sources = append(l.sources, len(cl.code)) - cl.emit(op) - cl.code = append(cl.code, 0, 0) -} - -func (cl *compiler) emit8(op opcode, arg8 int) { - cl.emit(op) - cl.code = append(cl.code, byte(arg8)) -} - -func (cl *compiler) emit16(op opcode, arg16 int) { - cl.emit(op) - buf := make([]byte, 2) - put16(buf, 0, arg16) - cl.code = append(cl.code, buf...) -} - -func (cl *compiler) errorUnsupportedType(e ast.Node, typ types.Type, where string) compileError { - return cl.errorf(e, "%s type: %s is not supported, try something simpler", where, typ) -} - -func (cl *compiler) errorf(n ast.Node, format string, args ...interface{}) compileError { - loc := cl.ctx.Fset.Position(n.Pos()) - message := fmt.Sprintf("%s:%d: %s", loc.Filename, loc.Line, fmt.Sprintf(format, args...)) - return compileError(message) -} - -func (cl *compiler) isUncondJump(op opcode) bool { - switch op { - case opJump, opReturnFalse, opReturnTrue, opReturnTop, opReturnIntTop: - return true - default: - return false - } -} - -func (cl *compiler) isSupportedType(typ types.Type) bool { - if typ == voidType { - return true - } - - switch typ := typ.Underlying().(type) { - case *types.Pointer: - // 1. Pointers to structs are supported. - _, isStruct := typ.Elem().Underlying().(*types.Struct) - return isStruct - - case *types.Basic: - // 2. Some of the basic types are supported. - // TODO: support byte/uint8 and maybe float64. - switch typ.Kind() { - case types.Bool, types.Int, types.String: - return true - default: - return false - } - - case *types.Interface: - // 3. Interfaces are supported. - return true - - default: - return false - } -} diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/debug_info.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/debug_info.go deleted file mode 100644 index 057c02bc1..000000000 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/debug_info.go +++ /dev/null @@ -1,17 +0,0 @@ -package quasigo - -type debugInfo struct { - funcs map[*Func]funcDebugInfo -} - -type funcDebugInfo struct { - paramNames []string - intParamNames []string - localNames []string -} - -func newDebugInfo() *debugInfo { - return &debugInfo{ - funcs: make(map[*Func]funcDebugInfo), - } -} diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/disasm.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/disasm.go deleted file mode 100644 index cafc9ed5e..000000000 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/disasm.go +++ /dev/null @@ -1,84 +0,0 @@ -package quasigo - -import ( - "fmt" - "strings" -) - -// TODO(quasilyte): generate extra opcode info so we can simplify disasm function? - -func disasm(env *Env, fn *Func) string { - var out strings.Builder - - dbg, ok := env.debug.funcs[fn] - if !ok { - return "\n" - } - - code := fn.code - labels := map[int]string{} - walkBytecode(code, func(pc int, op opcode) { - switch op { - case opJumpTrue, opJumpFalse, opJump: - offset := decode16(code, pc+1) - targetPC := pc + offset - if _, ok := labels[targetPC]; !ok { - labels[targetPC] = fmt.Sprintf("L%d", len(labels)) - } - } - }) - - walkBytecode(code, func(pc int, op opcode) { - if l := labels[pc]; l != "" { - fmt.Fprintf(&out, "%s:\n", l) - } - var arg interface{} - var comment string - switch op { - case opCallNative: - id := decode16(code, pc+1) - arg = id - comment = env.nativeFuncs[id].name - case opCall, opIntCall, opVoidCall: - id := decode16(code, pc+1) - arg = id - comment = env.userFuncs[id].name - case opPushParam: - index := int(code[pc+1]) - arg = index - comment = dbg.paramNames[index] - case opPushIntParam: - index := int(code[pc+1]) - arg = index - comment = dbg.intParamNames[index] - case opSetLocal, opSetIntLocal, opPushLocal, opPushIntLocal, opIncLocal, opDecLocal: - index := int(code[pc+1]) - arg = index - comment = dbg.localNames[index] - case opSetVariadicLen: - arg = int(code[pc+1]) - case opPushConst: - arg = int(code[pc+1]) - comment = fmt.Sprintf("value=%#v", fn.constants[code[pc+1]]) - case opPushIntConst: - arg = int(code[pc+1]) - comment = fmt.Sprintf("value=%#v", fn.intConstants[code[pc+1]]) - case opJumpTrue, opJumpFalse, opJump: - offset := decode16(code, pc+1) - targetPC := pc + offset - arg = offset - comment = labels[targetPC] - } - - if comment != "" { - comment = " # " + comment - } - if arg == nil { - fmt.Fprintf(&out, " %s%s\n", op, comment) - } else { - fmt.Fprintf(&out, " %s %#v%s\n", op, arg, comment) - } - }) - - return out.String() -} diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/env.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/env.go deleted file mode 100644 index 0e2a450b1..000000000 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/env.go +++ /dev/null @@ -1,42 +0,0 @@ -package quasigo - -type funcKey struct { - qualifier string - name string -} - -func (k funcKey) String() string { - if k.qualifier != "" { - return k.qualifier + "." + k.name - } - return k.name -} - -type nativeFunc struct { - mappedFunc func(*ValueStack) - name string // Needed for the readable disasm -} - -func newEnv() *Env { - return &Env{ - nameToNativeFuncID: make(map[funcKey]uint16), - nameToFuncID: make(map[funcKey]uint16), - - debug: newDebugInfo(), - } -} - -func (env *Env) addNativeFunc(key funcKey, f func(*ValueStack)) { - id := len(env.nativeFuncs) - env.nativeFuncs = append(env.nativeFuncs, nativeFunc{ - mappedFunc: f, - name: key.String(), - }) - env.nameToNativeFuncID[key] = uint16(id) -} - -func (env *Env) addFunc(key funcKey, f *Func) { - id := len(env.userFuncs) - env.userFuncs = append(env.userFuncs, f) - env.nameToFuncID[key] = uint16(id) -} diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/eval.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/eval.go deleted file mode 100644 index 311da15ad..000000000 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/eval.go +++ /dev/null @@ -1,265 +0,0 @@ -package quasigo - -import ( - "fmt" - "reflect" -) - -const maxFuncLocals = 8 - -// pop2 removes the two top stack elements and returns them. -// -// Note that it returns the popped elements in the reverse order -// to make it easier to map the order in which they were pushed. -func (s *ValueStack) pop2() (second, top interface{}) { - x := s.objects[len(s.objects)-2] - y := s.objects[len(s.objects)-1] - s.objects = s.objects[:len(s.objects)-2] - return x, y -} - -func (s *ValueStack) popInt2() (second, top int) { - x := s.ints[len(s.ints)-2] - y := s.ints[len(s.ints)-1] - s.ints = s.ints[:len(s.ints)-2] - return x, y -} - -// top returns top of the stack without popping it. -func (s *ValueStack) top() interface{} { return s.objects[len(s.objects)-1] } - -func (s *ValueStack) topInt() int { return s.ints[len(s.ints)-1] } - -// dup copies the top stack element. -// Identical to s.Push(s.Top()), but more concise. -func (s *ValueStack) dup() { s.objects = append(s.objects, s.objects[len(s.objects)-1]) } - -// discard drops the top stack element. -// Identical to s.Pop() without using the result. -func (s *ValueStack) discard() { s.objects = s.objects[:len(s.objects)-1] } - -func eval(env *EvalEnv, fn *Func, top, intTop int) CallResult { - pc := 0 - code := fn.code - stack := &env.Stack - var locals [maxFuncLocals]interface{} - var intLocals [maxFuncLocals]int - - for { - switch op := opcode(code[pc]); op { - case opPushParam: - index := int(code[pc+1]) - stack.Push(stack.objects[top+index]) - pc += 2 - case opPushIntParam: - index := int(code[pc+1]) - stack.PushInt(stack.ints[intTop+index]) - pc += 2 - - case opPushLocal: - index := code[pc+1] - stack.Push(locals[index]) - pc += 2 - case opPushIntLocal: - index := code[pc+1] - stack.PushInt(intLocals[index]) - pc += 2 - - case opSetLocal: - index := code[pc+1] - locals[index] = stack.Pop() - pc += 2 - case opSetIntLocal: - index := code[pc+1] - intLocals[index] = stack.PopInt() - pc += 2 - - case opIncLocal: - index := code[pc+1] - intLocals[index]++ - pc += 2 - case opDecLocal: - index := code[pc+1] - intLocals[index]-- - pc += 2 - - case opPop: - stack.discard() - pc++ - case opDup: - stack.dup() - pc++ - - case opPushConst: - id := code[pc+1] - stack.Push(fn.constants[id]) - pc += 2 - case opPushIntConst: - id := code[pc+1] - stack.PushInt(fn.intConstants[id]) - pc += 2 - - case opConvIntToIface: - stack.Push(stack.PopInt()) - pc++ - - case opPushTrue: - stack.Push(true) - pc++ - case opPushFalse: - stack.Push(false) - pc++ - - case opReturnTrue: - return CallResult{value: true} - case opReturnFalse: - return CallResult{value: false} - case opReturnTop: - return CallResult{value: stack.top()} - case opReturnIntTop: - return CallResult{scalarValue: uint64(stack.topInt())} - case opReturn: - return CallResult{} - - case opSetVariadicLen: - stack.variadicLen = int(code[pc+1]) - pc += 2 - case opCallNative: - id := decode16(code, pc+1) - fn := env.nativeFuncs[id].mappedFunc - fn(stack) - pc += 3 - case opCall: - id := decode16(code, pc+1) - fn := env.userFuncs[id] - result := eval(env, fn, len(stack.objects)-fn.numObjectParams, len(stack.ints)-fn.numIntParams) - stack.Push(result.Value()) - pc += 3 - case opIntCall: - id := decode16(code, pc+1) - fn := env.userFuncs[id] - result := eval(env, fn, len(stack.objects)-fn.numObjectParams, len(stack.ints)-fn.numIntParams) - stack.PushInt(result.IntValue()) - pc += 3 - case opVoidCall: - id := decode16(code, pc+1) - fn := env.userFuncs[id] - eval(env, fn, len(stack.objects)-fn.numObjectParams, len(stack.ints)-fn.numIntParams) - pc += 3 - - case opJump: - offset := decode16(code, pc+1) - pc += offset - - case opJumpFalse: - if !stack.Pop().(bool) { - offset := decode16(code, pc+1) - pc += offset - } else { - pc += 3 - } - case opJumpTrue: - if stack.Pop().(bool) { - offset := decode16(code, pc+1) - pc += offset - } else { - pc += 3 - } - - case opNot: - stack.Push(!stack.Pop().(bool)) - pc++ - - case opConcat: - x, y := stack.pop2() - stack.Push(x.(string) + y.(string)) - pc++ - - case opAdd: - x, y := stack.popInt2() - stack.PushInt(x + y) - pc++ - - case opSub: - x, y := stack.popInt2() - stack.PushInt(x - y) - pc++ - - case opEqInt: - x, y := stack.popInt2() - stack.Push(x == y) - pc++ - - case opNotEqInt: - x, y := stack.popInt2() - stack.Push(x != y) - pc++ - - case opGtInt: - x, y := stack.popInt2() - stack.Push(x > y) - pc++ - - case opGtEqInt: - x, y := stack.popInt2() - stack.Push(x >= y) - pc++ - - case opLtInt: - x, y := stack.popInt2() - stack.Push(x < y) - pc++ - - case opLtEqInt: - x, y := stack.popInt2() - stack.Push(x <= y) - pc++ - - case opEqString: - x, y := stack.pop2() - stack.Push(x.(string) == y.(string)) - pc++ - - case opNotEqString: - x, y := stack.pop2() - stack.Push(x.(string) != y.(string)) - pc++ - - case opIsNil: - x := stack.Pop() - stack.Push(x == nil || reflect.ValueOf(x).IsNil()) - pc++ - - case opIsNotNil: - x := stack.Pop() - stack.Push(x != nil && !reflect.ValueOf(x).IsNil()) - pc++ - - case opStringSlice: - to := stack.PopInt() - from := stack.PopInt() - s := stack.Pop().(string) - stack.Push(s[from:to]) - pc++ - - case opStringSliceFrom: - from := stack.PopInt() - s := stack.Pop().(string) - stack.Push(s[from:]) - pc++ - - case opStringSliceTo: - to := stack.PopInt() - s := stack.Pop().(string) - stack.Push(s[:to]) - pc++ - - case opStringLen: - stack.PushInt(len(stack.Pop().(string))) - pc++ - - default: - panic(fmt.Sprintf("malformed bytecode: unexpected %s found", op)) - } - } -} diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/gen_opcodes.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/gen_opcodes.go deleted file mode 100644 index 80386df28..000000000 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/gen_opcodes.go +++ /dev/null @@ -1,192 +0,0 @@ -//go:build main -// +build main - -package main - -import ( - "bytes" - "fmt" - "go/format" - "log" - "os" - "strings" - "text/template" -) - -var opcodePrototypes = []opcodeProto{ - {"Pop", "op", "(value) -> ()"}, - {"Dup", "op", "(x) -> (x x)"}, - - {"PushParam", "op index:u8", "() -> (value)"}, - {"PushIntParam", "op index:u8", "() -> (value:int)"}, - {"PushLocal", "op index:u8", "() -> (value)"}, - {"PushIntLocal", "op index:u8", "() -> (value:int)"}, - {"PushFalse", "op", "() -> (false)"}, - {"PushTrue", "op", "() -> (true)"}, - {"PushConst", "op constid:u8", "() -> (const)"}, - {"PushIntConst", "op constid:u8", "() -> (const:int)"}, - - {"ConvIntToIface", "op", "(value:int) -> (value)"}, - - {"SetLocal", "op index:u8", "(value) -> ()"}, - {"SetIntLocal", "op index:u8", "(value:int) -> ()"}, - {"IncLocal", "op index:u8", stackUnchanged}, - {"DecLocal", "op index:u8", stackUnchanged}, - - {"ReturnTop", "op", "(value) -> (value)"}, - {"ReturnIntTop", "op", "(value) -> (value)"}, - {"ReturnFalse", "op", stackUnchanged}, - {"ReturnTrue", "op", stackUnchanged}, - {"Return", "op", stackUnchanged}, - - {"Jump", "op offset:i16", stackUnchanged}, - {"JumpFalse", "op offset:i16", "(cond:bool) -> ()"}, - {"JumpTrue", "op offset:i16", "(cond:bool) -> ()"}, - - {"SetVariadicLen", "op len:u8", stackUnchanged}, - {"CallNative", "op funcid:u16", "(args...) -> (results...)"}, - {"Call", "op funcid:u16", "(args...) -> (result)"}, - {"IntCall", "op funcid:u16", "(args...) -> (result:int)"}, - {"VoidCall", "op funcid:u16", "(args...) -> ()"}, - - {"IsNil", "op", "(value) -> (result:bool)"}, - {"IsNotNil", "op", "(value) -> (result:bool)"}, - - {"Not", "op", "(value:bool) -> (result:bool)"}, - - {"EqInt", "op", "(x:int y:int) -> (result:bool)"}, - {"NotEqInt", "op", "(x:int y:int) -> (result:bool)"}, - {"GtInt", "op", "(x:int y:int) -> (result:bool)"}, - {"GtEqInt", "op", "(x:int y:int) -> (result:bool)"}, - {"LtInt", "op", "(x:int y:int) -> (result:bool)"}, - {"LtEqInt", "op", "(x:int y:int) -> (result:bool)"}, - - {"EqString", "op", "(x:string y:string) -> (result:bool)"}, - {"NotEqString", "op", "(x:string y:string) -> (result:bool)"}, - - {"Concat", "op", "(x:string y:string) -> (result:string)"}, - {"Add", "op", "(x:int y:int) -> (result:int)"}, - {"Sub", "op", "(x:int y:int) -> (result:int)"}, - - {"StringSlice", "op", "(s:string from:int to:int) -> (result:string)"}, - {"StringSliceFrom", "op", "(s:string from:int) -> (result:string)"}, - {"StringSliceTo", "op", "(s:string to:int) -> (result:string)"}, - {"StringLen", "op", "(s:string) -> (result:int)"}, -} - -type opcodeProto struct { - name string - enc string - stack string -} - -type encodingInfo struct { - width int - parts int -} - -type opcodeInfo struct { - Opcode byte - Name string - Enc string - EncString string - Stack string - Width int -} - -const stackUnchanged = "" - -var fileTemplate = template.Must(template.New("opcodes.go").Parse(`// Code generated "gen_opcodes.go"; DO NOT EDIT. - -package quasigo - -//go:generate stringer -type=opcode -trimprefix=op -type opcode byte - -const ( - opInvalid opcode = 0 -{{ range .Opcodes }} - // Encoding: {{.EncString}} - // Stack effect: {{ if .Stack}}{{.Stack}}{{else}}unchanged{{end}} - op{{ .Name }} opcode = {{.Opcode}} -{{ end -}} -) - -type opcodeInfo struct { - width int -} - -var opcodeInfoTable = [256]opcodeInfo{ - opInvalid: {width: 1}, - -{{ range .Opcodes -}} - op{{.Name}}: {width: {{.Width}}}, -{{ end }} -} -`)) - -func main() { - opcodes := make([]opcodeInfo, len(opcodePrototypes)) - for i, proto := range opcodePrototypes { - opcode := byte(i + 1) - encInfo := decodeEnc(proto.enc) - var encString string - if encInfo.parts == 1 { - encString = fmt.Sprintf("0x%02x (width=%d)", opcode, encInfo.width) - } else { - encString = fmt.Sprintf("0x%02x %s (width=%d)", - opcode, strings.TrimPrefix(proto.enc, "op "), encInfo.width) - } - - opcodes[i] = opcodeInfo{ - Opcode: opcode, - Name: proto.name, - Enc: proto.enc, - EncString: encString, - Stack: proto.stack, - Width: encInfo.width, - } - } - - var buf bytes.Buffer - err := fileTemplate.Execute(&buf, map[string]interface{}{ - "Opcodes": opcodes, - }) - if err != nil { - log.Panicf("execute template: %v", err) - } - writeFile("opcodes.gen.go", buf.Bytes()) -} - -func decodeEnc(enc string) encodingInfo { - fields := strings.Fields(enc) - width := 0 - for _, f := range fields { - parts := strings.Split(f, ":") - var typ string - if len(parts) == 2 { - typ = parts[1] - } else { - typ = "u8" - } - switch typ { - case "i8", "u8": - width++ - case "i16", "u16": - width += 2 - default: - panic(fmt.Sprintf("unknown op argument type: %s", typ)) - } - } - return encodingInfo{width: width, parts: len(fields)} -} - -func writeFile(filename string, data []byte) { - pretty, err := format.Source(data) - if err != nil { - log.Panicf("gofmt: %v", err) - } - if err := os.WriteFile(filename, pretty, 0666); err != nil { - log.Panicf("write %s: %v", filename, err) - } -} diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/opcode_string.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/opcode_string.go deleted file mode 100644 index 3136214bb..000000000 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/opcode_string.go +++ /dev/null @@ -1,69 +0,0 @@ -// Code generated by "stringer -type=opcode -trimprefix=op"; DO NOT EDIT. - -package quasigo - -import "strconv" - -func _() { - // An "invalid array index" compiler error signifies that the constant values have changed. - // Re-run the stringer command to generate them again. - var x [1]struct{} - _ = x[opInvalid-0] - _ = x[opPop-1] - _ = x[opDup-2] - _ = x[opPushParam-3] - _ = x[opPushIntParam-4] - _ = x[opPushLocal-5] - _ = x[opPushIntLocal-6] - _ = x[opPushFalse-7] - _ = x[opPushTrue-8] - _ = x[opPushConst-9] - _ = x[opPushIntConst-10] - _ = x[opConvIntToIface-11] - _ = x[opSetLocal-12] - _ = x[opSetIntLocal-13] - _ = x[opIncLocal-14] - _ = x[opDecLocal-15] - _ = x[opReturnTop-16] - _ = x[opReturnIntTop-17] - _ = x[opReturnFalse-18] - _ = x[opReturnTrue-19] - _ = x[opReturn-20] - _ = x[opJump-21] - _ = x[opJumpFalse-22] - _ = x[opJumpTrue-23] - _ = x[opSetVariadicLen-24] - _ = x[opCallNative-25] - _ = x[opCall-26] - _ = x[opIntCall-27] - _ = x[opVoidCall-28] - _ = x[opIsNil-29] - _ = x[opIsNotNil-30] - _ = x[opNot-31] - _ = x[opEqInt-32] - _ = x[opNotEqInt-33] - _ = x[opGtInt-34] - _ = x[opGtEqInt-35] - _ = x[opLtInt-36] - _ = x[opLtEqInt-37] - _ = x[opEqString-38] - _ = x[opNotEqString-39] - _ = x[opConcat-40] - _ = x[opAdd-41] - _ = x[opSub-42] - _ = x[opStringSlice-43] - _ = x[opStringSliceFrom-44] - _ = x[opStringSliceTo-45] - _ = x[opStringLen-46] -} - -const _opcode_name = "InvalidPopDupPushParamPushIntParamPushLocalPushIntLocalPushFalsePushTruePushConstPushIntConstConvIntToIfaceSetLocalSetIntLocalIncLocalDecLocalReturnTopReturnIntTopReturnFalseReturnTrueReturnJumpJumpFalseJumpTrueSetVariadicLenCallNativeCallIntCallVoidCallIsNilIsNotNilNotEqIntNotEqIntGtIntGtEqIntLtIntLtEqIntEqStringNotEqStringConcatAddSubStringSliceStringSliceFromStringSliceToStringLen" - -var _opcode_index = [...]uint16{0, 7, 10, 13, 22, 34, 43, 55, 64, 72, 81, 93, 107, 115, 126, 134, 142, 151, 163, 174, 184, 190, 194, 203, 211, 225, 235, 239, 246, 254, 259, 267, 270, 275, 283, 288, 295, 300, 307, 315, 326, 332, 335, 338, 349, 364, 377, 386} - -func (i opcode) String() string { - if i >= opcode(len(_opcode_index)-1) { - return "opcode(" + strconv.FormatInt(int64(i), 10) + ")" - } - return _opcode_name[_opcode_index[i]:_opcode_index[i+1]] -} diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/opcodes.gen.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/opcodes.gen.go deleted file mode 100644 index a3ec270d4..000000000 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/opcodes.gen.go +++ /dev/null @@ -1,249 +0,0 @@ -// Code generated "gen_opcodes.go"; DO NOT EDIT. - -package quasigo - -//go:generate stringer -type=opcode -trimprefix=op -type opcode byte - -const ( - opInvalid opcode = 0 - - // Encoding: 0x01 (width=1) - // Stack effect: (value) -> () - opPop opcode = 1 - - // Encoding: 0x02 (width=1) - // Stack effect: (x) -> (x x) - opDup opcode = 2 - - // Encoding: 0x03 index:u8 (width=2) - // Stack effect: () -> (value) - opPushParam opcode = 3 - - // Encoding: 0x04 index:u8 (width=2) - // Stack effect: () -> (value:int) - opPushIntParam opcode = 4 - - // Encoding: 0x05 index:u8 (width=2) - // Stack effect: () -> (value) - opPushLocal opcode = 5 - - // Encoding: 0x06 index:u8 (width=2) - // Stack effect: () -> (value:int) - opPushIntLocal opcode = 6 - - // Encoding: 0x07 (width=1) - // Stack effect: () -> (false) - opPushFalse opcode = 7 - - // Encoding: 0x08 (width=1) - // Stack effect: () -> (true) - opPushTrue opcode = 8 - - // Encoding: 0x09 constid:u8 (width=2) - // Stack effect: () -> (const) - opPushConst opcode = 9 - - // Encoding: 0x0a constid:u8 (width=2) - // Stack effect: () -> (const:int) - opPushIntConst opcode = 10 - - // Encoding: 0x0b (width=1) - // Stack effect: (value:int) -> (value) - opConvIntToIface opcode = 11 - - // Encoding: 0x0c index:u8 (width=2) - // Stack effect: (value) -> () - opSetLocal opcode = 12 - - // Encoding: 0x0d index:u8 (width=2) - // Stack effect: (value:int) -> () - opSetIntLocal opcode = 13 - - // Encoding: 0x0e index:u8 (width=2) - // Stack effect: unchanged - opIncLocal opcode = 14 - - // Encoding: 0x0f index:u8 (width=2) - // Stack effect: unchanged - opDecLocal opcode = 15 - - // Encoding: 0x10 (width=1) - // Stack effect: (value) -> (value) - opReturnTop opcode = 16 - - // Encoding: 0x11 (width=1) - // Stack effect: (value) -> (value) - opReturnIntTop opcode = 17 - - // Encoding: 0x12 (width=1) - // Stack effect: unchanged - opReturnFalse opcode = 18 - - // Encoding: 0x13 (width=1) - // Stack effect: unchanged - opReturnTrue opcode = 19 - - // Encoding: 0x14 (width=1) - // Stack effect: unchanged - opReturn opcode = 20 - - // Encoding: 0x15 offset:i16 (width=3) - // Stack effect: unchanged - opJump opcode = 21 - - // Encoding: 0x16 offset:i16 (width=3) - // Stack effect: (cond:bool) -> () - opJumpFalse opcode = 22 - - // Encoding: 0x17 offset:i16 (width=3) - // Stack effect: (cond:bool) -> () - opJumpTrue opcode = 23 - - // Encoding: 0x18 len:u8 (width=2) - // Stack effect: unchanged - opSetVariadicLen opcode = 24 - - // Encoding: 0x19 funcid:u16 (width=3) - // Stack effect: (args...) -> (results...) - opCallNative opcode = 25 - - // Encoding: 0x1a funcid:u16 (width=3) - // Stack effect: (args...) -> (result) - opCall opcode = 26 - - // Encoding: 0x1b funcid:u16 (width=3) - // Stack effect: (args...) -> (result:int) - opIntCall opcode = 27 - - // Encoding: 0x1c funcid:u16 (width=3) - // Stack effect: (args...) -> () - opVoidCall opcode = 28 - - // Encoding: 0x1d (width=1) - // Stack effect: (value) -> (result:bool) - opIsNil opcode = 29 - - // Encoding: 0x1e (width=1) - // Stack effect: (value) -> (result:bool) - opIsNotNil opcode = 30 - - // Encoding: 0x1f (width=1) - // Stack effect: (value:bool) -> (result:bool) - opNot opcode = 31 - - // Encoding: 0x20 (width=1) - // Stack effect: (x:int y:int) -> (result:bool) - opEqInt opcode = 32 - - // Encoding: 0x21 (width=1) - // Stack effect: (x:int y:int) -> (result:bool) - opNotEqInt opcode = 33 - - // Encoding: 0x22 (width=1) - // Stack effect: (x:int y:int) -> (result:bool) - opGtInt opcode = 34 - - // Encoding: 0x23 (width=1) - // Stack effect: (x:int y:int) -> (result:bool) - opGtEqInt opcode = 35 - - // Encoding: 0x24 (width=1) - // Stack effect: (x:int y:int) -> (result:bool) - opLtInt opcode = 36 - - // Encoding: 0x25 (width=1) - // Stack effect: (x:int y:int) -> (result:bool) - opLtEqInt opcode = 37 - - // Encoding: 0x26 (width=1) - // Stack effect: (x:string y:string) -> (result:bool) - opEqString opcode = 38 - - // Encoding: 0x27 (width=1) - // Stack effect: (x:string y:string) -> (result:bool) - opNotEqString opcode = 39 - - // Encoding: 0x28 (width=1) - // Stack effect: (x:string y:string) -> (result:string) - opConcat opcode = 40 - - // Encoding: 0x29 (width=1) - // Stack effect: (x:int y:int) -> (result:int) - opAdd opcode = 41 - - // Encoding: 0x2a (width=1) - // Stack effect: (x:int y:int) -> (result:int) - opSub opcode = 42 - - // Encoding: 0x2b (width=1) - // Stack effect: (s:string from:int to:int) -> (result:string) - opStringSlice opcode = 43 - - // Encoding: 0x2c (width=1) - // Stack effect: (s:string from:int) -> (result:string) - opStringSliceFrom opcode = 44 - - // Encoding: 0x2d (width=1) - // Stack effect: (s:string to:int) -> (result:string) - opStringSliceTo opcode = 45 - - // Encoding: 0x2e (width=1) - // Stack effect: (s:string) -> (result:int) - opStringLen opcode = 46 -) - -type opcodeInfo struct { - width int -} - -var opcodeInfoTable = [256]opcodeInfo{ - opInvalid: {width: 1}, - - opPop: {width: 1}, - opDup: {width: 1}, - opPushParam: {width: 2}, - opPushIntParam: {width: 2}, - opPushLocal: {width: 2}, - opPushIntLocal: {width: 2}, - opPushFalse: {width: 1}, - opPushTrue: {width: 1}, - opPushConst: {width: 2}, - opPushIntConst: {width: 2}, - opConvIntToIface: {width: 1}, - opSetLocal: {width: 2}, - opSetIntLocal: {width: 2}, - opIncLocal: {width: 2}, - opDecLocal: {width: 2}, - opReturnTop: {width: 1}, - opReturnIntTop: {width: 1}, - opReturnFalse: {width: 1}, - opReturnTrue: {width: 1}, - opReturn: {width: 1}, - opJump: {width: 3}, - opJumpFalse: {width: 3}, - opJumpTrue: {width: 3}, - opSetVariadicLen: {width: 2}, - opCallNative: {width: 3}, - opCall: {width: 3}, - opIntCall: {width: 3}, - opVoidCall: {width: 3}, - opIsNil: {width: 1}, - opIsNotNil: {width: 1}, - opNot: {width: 1}, - opEqInt: {width: 1}, - opNotEqInt: {width: 1}, - opGtInt: {width: 1}, - opGtEqInt: {width: 1}, - opLtInt: {width: 1}, - opLtEqInt: {width: 1}, - opEqString: {width: 1}, - opNotEqString: {width: 1}, - opConcat: {width: 1}, - opAdd: {width: 1}, - opSub: {width: 1}, - opStringSlice: {width: 1}, - opStringSliceFrom: {width: 1}, - opStringSliceTo: {width: 1}, - opStringLen: {width: 1}, -} diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/quasigo.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/quasigo.go deleted file mode 100644 index 8ac75771f..000000000 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/quasigo.go +++ /dev/null @@ -1,199 +0,0 @@ -// Package quasigo implements a Go subset compiler and interpreter. -// -// The implementation details are not part of the contract of this package. -package quasigo - -import ( - "go/ast" - "go/token" - "go/types" -) - -// TODO(quasilyte): document what is thread-safe and what not. -// TODO(quasilyte): add a readme. - -// Env is used to hold both compilation and evaluation data. -type Env struct { - // TODO(quasilyte): store both native and user func ids in one map? - - nativeFuncs []nativeFunc - nameToNativeFuncID map[funcKey]uint16 - - userFuncs []*Func - nameToFuncID map[funcKey]uint16 - - // debug contains all information that is only needed - // for better debugging and compiled code introspection. - // Right now it's always enabled, but we may allow stripping it later. - debug *debugInfo -} - -// EvalEnv is a goroutine-local handle for Env. -// To get one, use Env.GetEvalEnv() method. -type EvalEnv struct { - nativeFuncs []nativeFunc - userFuncs []*Func - - Stack ValueStack -} - -// NewEnv creates a new empty environment. -func NewEnv() *Env { - return newEnv() -} - -// GetEvalEnv creates a new goroutine-local handle of env. -func (env *Env) GetEvalEnv() *EvalEnv { - return &EvalEnv{ - nativeFuncs: env.nativeFuncs, - userFuncs: env.userFuncs, - Stack: ValueStack{ - objects: make([]interface{}, 0, 32), - ints: make([]int, 0, 16), - }, - } -} - -// AddNativeMethod binds `$typeName.$methodName` symbol with f. -// A typeName should be fully qualified, like `github.com/user/pkgname.TypeName`. -// It method is defined only on pointer type, the typeName should start with `*`. -func (env *Env) AddNativeMethod(typeName, methodName string, f func(*ValueStack)) { - env.addNativeFunc(funcKey{qualifier: typeName, name: methodName}, f) -} - -// AddNativeFunc binds `$pkgPath.$funcName` symbol with f. -// A pkgPath should be a full package path in which funcName is defined. -func (env *Env) AddNativeFunc(pkgPath, funcName string, f func(*ValueStack)) { - env.addNativeFunc(funcKey{qualifier: pkgPath, name: funcName}, f) -} - -// AddFunc binds `$pkgPath.$funcName` symbol with f. -func (env *Env) AddFunc(pkgPath, funcName string, f *Func) { - env.addFunc(funcKey{qualifier: pkgPath, name: funcName}, f) -} - -// GetFunc finds previously bound function searching for the `$pkgPath.$funcName` symbol. -func (env *Env) GetFunc(pkgPath, funcName string) *Func { - id := env.nameToFuncID[funcKey{qualifier: pkgPath, name: funcName}] - return env.userFuncs[id] -} - -// CompileContext is used to provide necessary data to the compiler. -type CompileContext struct { - // Env is shared environment that should be used for all functions - // being compiled; then it should be used to execute these functions. - Env *Env - - Package *types.Package - Types *types.Info - Fset *token.FileSet -} - -// Compile prepares an executable version of fn. -func Compile(ctx *CompileContext, fn *ast.FuncDecl) (compiled *Func, err error) { - return compile(ctx, fn) -} - -// Call invokes a given function. -// All arguments should be pushed to env.Stack prior to this call. -// -// Note that arguments are not popped off the stack, -// so you can bind the args once and use Call multiple times. -// If you want to reset arguments, do env.Stack.Reset(). -func Call(env *EvalEnv, fn *Func) CallResult { - numObjectArgs := len(env.Stack.objects) - numIntArgs := len(env.Stack.ints) - result := eval(env, fn, 0, 0) - env.Stack.objects = env.Stack.objects[:numObjectArgs] - env.Stack.ints = env.Stack.ints[:numIntArgs] - return result -} - -// CallResult is a return value of Call function. -// For most functions, Value() should be called to get the actual result. -// For int-typed functions, IntValue() should be used instead. -type CallResult struct { - value interface{} - scalarValue uint64 -} - -// Value unboxes an actual call return value. -// For int results, use IntValue(). -func (res CallResult) Value() interface{} { return res.value } - -// IntValue unboxes an actual call return value. -func (res CallResult) IntValue() int { return int(res.scalarValue) } - -// Disasm returns the compiled function disassembly text. -// This output is not guaranteed to be stable between versions -// and should be used only for debugging purposes. -func Disasm(env *Env, fn *Func) string { - return disasm(env, fn) -} - -// Func is a compiled function that is ready to be executed. -type Func struct { - code []byte - - constants []interface{} - intConstants []int - - numObjectParams int - numIntParams int - - name string -} - -// ValueStack is used to manipulate runtime values during the evaluation. -// Function arguments are pushed to the stack. -// Function results are returned via stack as well. -// -// For the sake of efficiency, it stores different types separately. -// If int was pushed with PushInt(), it should be retrieved by PopInt(). -// It's a bad idea to do a Push() and then PopInt() and vice-versa. -type ValueStack struct { - objects []interface{} - ints []int - variadicLen int -} - -// Reset empties the stack. -func (s *ValueStack) Reset() { - s.objects = s.objects[:0] - s.ints = s.ints[:0] -} - -// Pop removes the top stack element and returns it. -// Important: for int-typed values, use PopInt. -func (s *ValueStack) Pop() interface{} { - x := s.objects[len(s.objects)-1] - s.objects = s.objects[:len(s.objects)-1] - return x -} - -// PopInt removes the top stack element and returns it. -func (s *ValueStack) PopInt() int { - x := s.ints[len(s.ints)-1] - s.ints = s.ints[:len(s.ints)-1] - return x -} - -// PopVariadic removes the `...` argument and returns it as a slice. -// -// Slice elements are in the order they were passed to the function, -// for example, a call Sprintf("%s:%d", filename, line) returns -// the slice []interface{filename, line}. -func (s *ValueStack) PopVariadic() []interface{} { - to := len(s.objects) - from := to - s.variadicLen - xs := s.objects[from:to] - s.objects = s.objects[:from] - return xs -} - -// Push adds x to the stack. -// Important: for int-typed values, use PushInt. -func (s *ValueStack) Push(x interface{}) { s.objects = append(s.objects, x) } - -// PushInt adds x to the stack. -func (s *ValueStack) PushInt(x int) { s.ints = append(s.ints, x) } diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/stdlib/qfmt/qfmt.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/stdlib/qfmt/qfmt.go deleted file mode 100644 index 249ac2563..000000000 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/stdlib/qfmt/qfmt.go +++ /dev/null @@ -1,17 +0,0 @@ -package qfmt - -import ( - "fmt" - - "github.com/quasilyte/go-ruleguard/ruleguard/quasigo" -) - -func ImportAll(env *quasigo.Env) { - env.AddNativeFunc(`fmt`, `Sprintf`, Sprintf) -} - -func Sprintf(stack *quasigo.ValueStack) { - args := stack.PopVariadic() - format := stack.Pop().(string) - stack.Push(fmt.Sprintf(format, args...)) -} diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/stdlib/qstrconv/qstrconv.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/stdlib/qstrconv/qstrconv.go deleted file mode 100644 index 8bc2d943f..000000000 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/stdlib/qstrconv/qstrconv.go +++ /dev/null @@ -1,24 +0,0 @@ -package qstrconv - -import ( - "strconv" - - "github.com/quasilyte/go-ruleguard/ruleguard/quasigo" -) - -func ImportAll(env *quasigo.Env) { - env.AddNativeFunc(`strconv`, `Atoi`, Atoi) - env.AddNativeFunc(`strconv`, `Itoa`, Itoa) -} - -func Atoi(stack *quasigo.ValueStack) { - s := stack.Pop().(string) - v, err := strconv.Atoi(s) - stack.PushInt(v) - stack.Push(err) -} - -func Itoa(stack *quasigo.ValueStack) { - i := stack.PopInt() - stack.Push(strconv.Itoa(i)) -} diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/stdlib/qstrings/qstrings.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/stdlib/qstrings/qstrings.go deleted file mode 100644 index 6b708ad9c..000000000 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/stdlib/qstrings/qstrings.go +++ /dev/null @@ -1,62 +0,0 @@ -package qstrings - -import ( - "strings" - - "github.com/quasilyte/go-ruleguard/ruleguard/quasigo" -) - -func ImportAll(env *quasigo.Env) { - env.AddNativeFunc(`strings`, `Replace`, Replace) - env.AddNativeFunc(`strings`, `ReplaceAll`, ReplaceAll) - env.AddNativeFunc(`strings`, `TrimPrefix`, TrimPrefix) - env.AddNativeFunc(`strings`, `TrimSuffix`, TrimSuffix) - env.AddNativeFunc(`strings`, `HasPrefix`, HasPrefix) - env.AddNativeFunc(`strings`, `HasSuffix`, HasSuffix) - env.AddNativeFunc(`strings`, `Contains`, Contains) -} - -func Replace(stack *quasigo.ValueStack) { - n := stack.PopInt() - newPart := stack.Pop().(string) - oldPart := stack.Pop().(string) - s := stack.Pop().(string) - stack.Push(strings.Replace(s, oldPart, newPart, n)) -} - -func ReplaceAll(stack *quasigo.ValueStack) { - newPart := stack.Pop().(string) - oldPart := stack.Pop().(string) - s := stack.Pop().(string) - stack.Push(strings.ReplaceAll(s, oldPart, newPart)) -} - -func TrimPrefix(stack *quasigo.ValueStack) { - prefix := stack.Pop().(string) - s := stack.Pop().(string) - stack.Push(strings.TrimPrefix(s, prefix)) -} - -func TrimSuffix(stack *quasigo.ValueStack) { - prefix := stack.Pop().(string) - s := stack.Pop().(string) - stack.Push(strings.TrimSuffix(s, prefix)) -} - -func HasPrefix(stack *quasigo.ValueStack) { - prefix := stack.Pop().(string) - s := stack.Pop().(string) - stack.Push(strings.HasPrefix(s, prefix)) -} - -func HasSuffix(stack *quasigo.ValueStack) { - suffix := stack.Pop().(string) - s := stack.Pop().(string) - stack.Push(strings.HasSuffix(s, suffix)) -} - -func Contains(stack *quasigo.ValueStack) { - substr := stack.Pop().(string) - s := stack.Pop().(string) - stack.Push(strings.Contains(s, substr)) -} diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/utils.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/utils.go deleted file mode 100644 index a5c3676a4..000000000 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/utils.go +++ /dev/null @@ -1,60 +0,0 @@ -package quasigo - -import ( - "encoding/binary" - "go/ast" - "go/types" -) - -func pickOp(cond bool, ifTrue, otherwise opcode) opcode { - if cond { - return ifTrue - } - return otherwise -} - -func put16(code []byte, pos, value int) { - binary.LittleEndian.PutUint16(code[pos:], uint16(value)) -} - -func decode16(code []byte, pos int) int { - return int(int16(binary.LittleEndian.Uint16(code[pos:]))) -} - -func typeIsInt(typ types.Type) bool { - basic, ok := typ.Underlying().(*types.Basic) - if !ok { - return false - } - switch basic.Kind() { - case types.Int, types.UntypedInt: - return true - default: - return false - } -} - -func typeIsString(typ types.Type) bool { - basic, ok := typ.Underlying().(*types.Basic) - if !ok { - return false - } - return basic.Info()&types.IsString != 0 -} - -func walkBytecode(code []byte, fn func(pc int, op opcode)) { - pc := 0 - for pc < len(code) { - op := opcode(code[pc]) - fn(pc, op) - pc += opcodeInfoTable[op].width - } -} - -func identName(n ast.Expr) string { - id, ok := n.(*ast.Ident) - if ok { - return id.Name - } - return "" -} diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ruleguard.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ruleguard.go deleted file mode 100644 index 41fbc8995..000000000 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ruleguard.go +++ /dev/null @@ -1,221 +0,0 @@ -package ruleguard - -import ( - "go/ast" - "go/build" - "go/token" - "go/types" - "io" - - "github.com/quasilyte/go-ruleguard/ruleguard/ir" - "github.com/quasilyte/go-ruleguard/ruleguard/quasigo" - "github.com/quasilyte/go-ruleguard/ruleguard/typematch" - "github.com/quasilyte/gogrep" -) - -// Engine is the main ruleguard package API object. -// -// First, load some ruleguard files with Load() to build a rule set. -// Then use Run() to execute the rules. -// -// It's advised to have only 1 engine per application as it does a lot of caching. -// The Run() method is synchronized, so it can be used concurrently. -// -// An Engine must be created with NewEngine() function. -type Engine struct { - impl *engine - - // BuildContext can be used as an override for build.Default context. - // Used during the Go packages resolving. - // - // Use Engine.InferBuildContext() to create a sensible default - // for this field that is better than build.Default. - // We're not using this by default to avoid the excessive work - // if you already have a properly initialized build.Context object. - // - // nil will result in build.Default usage. - BuildContext *build.Context -} - -// NewEngine creates an engine with empty rule set. -func NewEngine() *Engine { - return &Engine{impl: newEngine()} -} - -func (e *Engine) InferBuildContext() { - e.BuildContext = inferBuildContext() -} - -// Load reads a ruleguard file from r and adds it to the engine rule set. -// -// Load() is not thread-safe, especially if used concurrently with Run() method. -// It's advised to Load() all ruleguard files under a critical section (like sync.Once) -// and then use Run() to execute all of them. -func (e *Engine) Load(ctx *LoadContext, filename string, r io.Reader) error { - return e.impl.Load(ctx, e.BuildContext, filename, r) -} - -// LoadFromIR is like Load(), but it takes already parsed IR file as an input. -// -// This method can be useful if you're trying to embed a precompiled rules file -// into your binary. -func (e *Engine) LoadFromIR(ctx *LoadContext, filename string, f *ir.File) error { - return e.impl.LoadFromIR(ctx, e.BuildContext, filename, f) -} - -// LoadedGroups returns information about all currently loaded rule groups. -func (e *Engine) LoadedGroups() []GoRuleGroup { - return e.impl.LoadedGroups() -} - -// Run executes all loaded rules on a given file. -// Matched rules invoke `RunContext.Report()` method. -// -// Run() is thread-safe, unless used in parallel with Load(), -// which modifies the engine state. -func (e *Engine) Run(ctx *RunContext, f *ast.File) error { - return e.impl.Run(ctx, e.BuildContext, f) -} - -type LoadContext struct { - DebugFunc string - DebugImports bool - DebugPrint func(string) - - // GroupFilter is called for every rule group being parsed. - // If function returns false, that group will not be included - // in the resulting rules set. - // Nil filter accepts all rule groups. - GroupFilter func(*GoRuleGroup) bool - - Fset *token.FileSet -} - -type RunnerState struct { - gogrepState gogrep.MatcherState - gogrepSubState gogrep.MatcherState - nodePath *nodePath - evalEnv *quasigo.EvalEnv - typematchState *typematch.MatcherState - - object *rulesRunner -} - -// NewRunnerState creates a state object that can be used with RunContext. -func NewRunnerState(e *Engine) *RunnerState { - return newRunnerState(e.impl.state) -} - -type RunContext struct { - Debug string - DebugImports bool - DebugPrint func(string) - - Types *types.Info - Sizes types.Sizes - Fset *token.FileSet - Pkg *types.Package - - // Report is a function that is called for every successful ruleguard match. - // The pointer to ReportData is reused, it should not be kept. - // If you want to keep it after Report() returns, make a copy. - Report func(*ReportData) - - GoVersion GoVersion - - // TruncateLen is a length threshold (in bytes) for interpolated vars in Report() templates. - // - // Truncation removes the part of the string in the middle and replaces it with <...> - // so it meets the max length constraint. - // - // The default value is 60 (implied if value is 0). - // - // Note that this value is ignored for Suggest templates. - // Ruleguard doesn't truncate suggested replacement candidates. - TruncateLen int - - // State is an object that contains reusable resources needed for the rules to be executed. - // - // If nil, a new state will be allocated. - // - // The State object access is not synchronized. - // State should not be shared between multiple goroutines. - // There are 3 patterns that are safe: - // 1. For single-threaded programs, you can use a single state. - // 2. For controlled concurrency with workers, you can use a per-worker state. - // 3. For uncontrolled concurrency you can use a sync.Pool of states. - // - // Reusing the state properly can increase the performance significantly. - State *RunnerState -} - -type ReportData struct { - RuleInfo GoRuleInfo - Node ast.Node - Message string - Suggestion *Suggestion - - // Experimental: fields below are part of the experiment. - // They'll probably be removed or changed over time. - - Func *ast.FuncDecl -} - -type Suggestion struct { - From token.Pos - To token.Pos - Replacement []byte -} - -type GoRuleInfo struct { - // Line is a line inside a file that defined this rule. - Line int - - // Group is a function that contains this rule. - Group *GoRuleGroup -} - -type GoRuleGroup struct { - // Name is a function name associated with this rule group. - Name string - - // Pos is a location where this rule group was defined. - Pos token.Position - - // Line is a source code line number inside associated file. - // A pair of Filename:Line form a conventional location string. - Line int - - // Filename is a file that defined this rule group. - Filename string - - // DocTags contains a list of keys from the `gorules:tags` comment. - DocTags []string - - // DocSummary is a short one sentence description. - // Filled from the `doc:summary` pragma content. - DocSummary string - - // DocBefore is a code snippet of code that will violate rule. - // Filled from the `doc:before` pragma content. - DocBefore string - - // DocAfter is a code snippet of fixed code that complies to the rule. - // Filled from the `doc:after` pragma content. - DocAfter string - - // DocNote is an optional caution message or advice. - // Usually, it's used to reference some external resource, like - // issue on the GitHub. - // Filled from the `doc:note` pragma content. - DocNote string -} - -// ImportError is returned when a ruleguard file references a package that cannot be imported. -type ImportError struct { - msg string - err error -} - -func (e *ImportError) Error() string { return e.msg } -func (e *ImportError) Unwrap() error { return e.err } diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/runner.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/runner.go deleted file mode 100644 index fdc95ab5e..000000000 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/runner.go +++ /dev/null @@ -1,562 +0,0 @@ -package ruleguard - -import ( - "bytes" - "context" - "fmt" - "go/ast" - "go/build" - "go/printer" - "go/token" - "os" - "path/filepath" - "reflect" - "sort" - "strconv" - "strings" - - "github.com/quasilyte/go-ruleguard/ruleguard/goutil" - "github.com/quasilyte/go-ruleguard/ruleguard/profiling" - "github.com/quasilyte/go-ruleguard/ruleguard/quasigo" - "github.com/quasilyte/go-ruleguard/ruleguard/typematch" - "github.com/quasilyte/gogrep" - "github.com/quasilyte/gogrep/nodetag" -) - -type rulesRunner struct { - state *engineState - - bgContext context.Context - - ctx *RunContext - rules *goRuleSet - - truncateLen int - - reportData ReportData - - gogrepState gogrep.MatcherState - gogrepSubState gogrep.MatcherState - - importer *goImporter - - filename string - src []byte - - // nodePath is a stack of ast.Nodes we visited to this point. - // When we enter a new node, it's placed on the top of the stack. - // When we leave that node, it's popped. - // The stack is a slice that is allocated only once and reused - // for the lifetime of the runner. - // The only overhead it has is a slice append and pop operations - // that are quire cheap. - // - // Note: we need this path to get a Node.Parent() for `$$` matches. - // So it's used to climb up the tree there. - // For named submatches we can't use it as the node can be located - // deeper into the tree than the current node. - // In those cases we need a more complicated algorithm. - nodePath *nodePath - - filterParams filterParams -} - -func newRunnerState(es *engineState) *RunnerState { - gogrepState := gogrep.NewMatcherState() - gogrepSubState := gogrep.NewMatcherState() - state := &RunnerState{ - gogrepState: gogrepState, - gogrepSubState: gogrepSubState, - nodePath: newNodePath(), - evalEnv: es.env.GetEvalEnv(), - typematchState: typematch.NewMatcherState(), - object: &rulesRunner{}, - } - return state -} - -func (state *RunnerState) Reset() { - state.nodePath.stack = state.nodePath.stack[:0] - state.evalEnv.Stack.Reset() -} - -func newRulesRunner(ctx *RunContext, buildContext *build.Context, state *engineState, rules *goRuleSet) *rulesRunner { - runnerState := ctx.State - if runnerState == nil { - runnerState = newRunnerState(state) - } else { - runnerState.Reset() - } - - importer := newGoImporter(state, goImporterConfig{ - fset: ctx.Fset, - debugImports: ctx.DebugImports, - debugPrint: ctx.DebugPrint, - buildContext: buildContext, - }) - gogrepState := runnerState.gogrepState - gogrepState.Types = ctx.Types - gogrepSubState := runnerState.gogrepSubState - gogrepSubState.Types = ctx.Types - evalEnv := runnerState.evalEnv - - rr := runnerState.object - *rr = rulesRunner{ - bgContext: context.Background(), - ctx: ctx, - importer: importer, - rules: rules, - gogrepState: gogrepState, - gogrepSubState: gogrepSubState, - nodePath: runnerState.nodePath, - truncateLen: ctx.TruncateLen, - filterParams: filterParams{ - typematchState: runnerState.typematchState, - env: evalEnv, - importer: importer, - ctx: ctx, - }, - } - - evalEnv.Stack.Push(&rr.filterParams) - if ctx.TruncateLen == 0 { - rr.truncateLen = 60 - } - rr.filterParams.nodeText = rr.nodeText - rr.filterParams.nodeString = rr.nodeString - rr.filterParams.nodePath = rr.nodePath - rr.filterParams.gogrepSubState = &rr.gogrepSubState - - return rr -} - -func (rr *rulesRunner) nodeString(n ast.Node) string { - b := rr.nodeText(n) - return string(b) -} - -func (rr *rulesRunner) nodeText(n ast.Node) []byte { - if gogrep.IsEmptyNodeSlice(n) { - return nil - } - - from := rr.ctx.Fset.Position(n.Pos()).Offset - to := rr.ctx.Fset.Position(n.End()).Offset - src := rr.fileBytes() - if (from >= 0 && from < len(src)) && (to >= 0 && to < len(src)) { - return src[from:to] - } - - // Go printer would panic on comments. - if n, ok := n.(*ast.Comment); ok { - return []byte(n.Text) - } - - // Fallback to the printer. - var buf bytes.Buffer - if err := printer.Fprint(&buf, rr.ctx.Fset, n); err != nil { - panic(err) - } - return buf.Bytes() -} - -func (rr *rulesRunner) fileBytes() []byte { - if rr.src != nil { - return rr.src - } - - // TODO(quasilyte): re-use src slice? - src, err := os.ReadFile(rr.filename) - if err != nil || src == nil { - // Assign a zero-length slice so rr.src - // is never nil during the second fileBytes call. - rr.src = make([]byte, 0) - } else { - rr.src = src - } - return rr.src -} - -func (rr *rulesRunner) run(f *ast.File) error { - // If it's not empty then we're leaking memory. - // For every Push() there should be a Pop() call. - if rr.nodePath.Len() != 0 { - panic("internal error: node path is not empty") - } - - rr.filename = rr.ctx.Fset.Position(f.Pos()).Filename - rr.filterParams.filename = rr.filename - rr.collectImports(f) - - if rr.rules.universal.categorizedNum != 0 { - var inspector astWalker - inspector.nodePath = rr.nodePath - inspector.filterParams = &rr.filterParams - inspector.Walk(f, func(n ast.Node, tag nodetag.Value) { - rr.runRules(n, tag) - }) - } - - if len(rr.rules.universal.commentRules) != 0 { - for _, commentGroup := range f.Comments { - for _, comment := range commentGroup.List { - rr.runCommentRules(comment) - } - } - } - - return nil -} - -func (rr *rulesRunner) runCommentRules(comment *ast.Comment) { - // We'll need that file to create a token.Pos from the artificial offset. - file := rr.ctx.Fset.File(comment.Pos()) - - for _, rule := range rr.rules.universal.commentRules { - var m matchData - if rule.captureGroups { - result := rule.pat.FindStringSubmatchIndex(comment.Text) - if result == nil { - continue - } - for i, name := range rule.pat.SubexpNames() { - if i == 0 || name == "" { - continue - } - resultIndex := i * 2 - beginPos := result[resultIndex+0] - endPos := result[resultIndex+1] - // Negative index a special case when named group captured nothing. - // Consider this pattern: `(?Pfoo)|(bar)`. - // If we have `bar` input string, will remain empty. - if beginPos < 0 || endPos < 0 { - m.match.Capture = append(m.match.Capture, gogrep.CapturedNode{ - Name: name, - Node: &ast.Comment{Slash: comment.Pos()}, - }) - continue - } - m.match.Capture = append(m.match.Capture, gogrep.CapturedNode{ - Name: name, - Node: &ast.Comment{ - Slash: file.Pos(beginPos + file.Offset(comment.Pos())), - Text: comment.Text[beginPos:endPos], - }, - }) - } - m.match.Node = &ast.Comment{ - Slash: file.Pos(result[0] + file.Offset(comment.Pos())), - Text: comment.Text[result[0]:result[1]], - } - } else { - // Fast path: no need to save any submatches. - result := rule.pat.FindStringIndex(comment.Text) - if result == nil { - continue - } - m.match.Node = &ast.Comment{ - Slash: file.Pos(result[0] + file.Offset(comment.Pos())), - Text: comment.Text[result[0]:result[1]], - } - } - - accept := rr.handleCommentMatch(rule, m) - if accept { - break - } - } -} - -func (rr *rulesRunner) runRules(n ast.Node, tag nodetag.Value) { - // profiling.LabelsEnabled is constant, so labels-related - // code should be a no-op inside normal build. - // To enable labels, use "-tags pproflabels" build tag. - - for _, rule := range rr.rules.universal.rulesByTag[tag] { - if profiling.LabelsEnabled { - profiling.EnterWithLabels(rr.bgContext, rule.group.Name) - } - - matched := false - rule.pat.MatchNode(&rr.gogrepState, n, func(m gogrep.MatchData) { - matched = rr.handleMatch(rule, m) - }) - - if profiling.LabelsEnabled { - profiling.Leave(rr.bgContext) - } - - if matched && !multiMatchTags[tag] { - break - } - } -} - -func (rr *rulesRunner) reject(rule goRule, reason string, m matchData) { - if rule.group.Name != rr.ctx.Debug { - return // This rule is not being debugged - } - - pos := rr.ctx.Fset.Position(m.Node().Pos()) - rr.ctx.DebugPrint(fmt.Sprintf("%s:%d: [%s:%d] rejected by %s", - pos.Filename, pos.Line, filepath.Base(rule.group.Filename), rule.line, reason)) - - values := make([]gogrep.CapturedNode, len(m.CaptureList())) - copy(values, m.CaptureList()) - sort.Slice(values, func(i, j int) bool { - return values[i].Name < values[j].Name - }) - - for _, v := range values { - name := v.Name - node := v.Node - - if comment, ok := node.(*ast.Comment); ok { - s := strings.ReplaceAll(comment.Text, "\n", `\n`) - rr.ctx.DebugPrint(fmt.Sprintf(" $%s: %s", name, s)) - continue - } - - var expr ast.Expr - switch node := node.(type) { - case ast.Expr: - expr = node - case *ast.ExprStmt: - expr = node.X - default: - continue - } - - typ := rr.ctx.Types.TypeOf(expr) - typeString := "" - if typ != nil { - typeString = typ.String() - } - s := strings.ReplaceAll(goutil.SprintNode(rr.ctx.Fset, expr), "\n", `\n`) - rr.ctx.DebugPrint(fmt.Sprintf(" $%s %s: %s", name, typeString, s)) - } -} - -func (rr *rulesRunner) handleCommentMatch(rule goCommentRule, m matchData) bool { - if rule.base.filter.fn != nil { - rr.filterParams.match = m - filterResult := rule.base.filter.fn(&rr.filterParams) - if !filterResult.Matched() { - rr.reject(rule.base, filterResult.RejectReason(), m) - return false - } - } - - message := rr.renderMessage(rule.base.msg, m, true) - node := m.Node() - if rule.base.location != "" { - node, _ = m.CapturedByName(rule.base.location) - } - var suggestion *Suggestion - if rule.base.suggestion != "" { - suggestion = &Suggestion{ - Replacement: []byte(rr.renderMessage(rule.base.suggestion, m, false)), - From: node.Pos(), - To: node.End(), - } - } - info := GoRuleInfo{ - Group: rule.base.group, - Line: rule.base.line, - } - rr.reportData.RuleInfo = info - rr.reportData.Node = node - rr.reportData.Message = message - rr.reportData.Suggestion = suggestion - - rr.ctx.Report(&rr.reportData) - return true -} - -func (rr *rulesRunner) handleMatch(rule goRule, m gogrep.MatchData) bool { - if rule.filter.fn != nil || rule.do != nil { - rr.filterParams.match = matchData{match: m} - } - - if rule.filter.fn != nil { - filterResult := rule.filter.fn(&rr.filterParams) - if !filterResult.Matched() { - rr.reject(rule, filterResult.RejectReason(), matchData{match: m}) - return false - } - } - - node := m.Node - if rule.location != "" { - node, _ = m.CapturedByName(rule.location) - } - - var messageText string - var suggestText string - if rule.do != nil { - rr.filterParams.reportString = "" - rr.filterParams.suggestString = "" - _ = quasigo.Call(rr.filterParams.env, rule.do) - messageText = rr.filterParams.reportString - if messageText == "" { - if rr.filterParams.suggestString != "" { - messageText = "suggestion: " + rr.filterParams.suggestString - } else { - messageText = "" - } - } - if rr.filterParams.suggestString != "" { - suggestText = rr.filterParams.suggestString - } - } else { - messageText = rr.renderMessage(rule.msg, matchData{match: m}, true) - if rule.suggestion != "" { - suggestText = rr.renderMessage(rule.suggestion, matchData{match: m}, false) - } - } - - var suggestion *Suggestion - if suggestText != "" { - suggestion = &Suggestion{ - Replacement: []byte(suggestText), - From: node.Pos(), - To: node.End(), - } - } - - info := GoRuleInfo{ - Group: rule.group, - Line: rule.line, - } - rr.reportData.RuleInfo = info - rr.reportData.Node = node - rr.reportData.Message = messageText - rr.reportData.Suggestion = suggestion - - rr.reportData.Func = rr.filterParams.currentFunc - - rr.ctx.Report(&rr.reportData) - return true -} - -func (rr *rulesRunner) collectImports(f *ast.File) { - rr.filterParams.imports = make(map[string]struct{}, len(f.Imports)) - for _, spec := range f.Imports { - s, err := strconv.Unquote(spec.Path.Value) - if err != nil { - continue - } - rr.filterParams.imports[s] = struct{}{} - } -} - -func (rr *rulesRunner) renderMessage(msg string, m matchData, truncate bool) string { - if !strings.Contains(msg, "$") { - return msg - } - - var capture []gogrep.CapturedNode - if len(m.CaptureList()) != 0 { - capture = make([]gogrep.CapturedNode, 0, len(m.CaptureList())) - for _, c := range m.CaptureList() { - n := c.Node - // Some captured nodes are typed, but nil. - // We can't really get their text, so skip them here. - // For example, pattern `func $_() $results { $*_ }` may - // match a nil *ast.FieldList for $results if executed - // against a function with no results. - if reflect.ValueOf(n).IsNil() && !gogrep.IsEmptyNodeSlice(n) { - continue - } - capture = append(capture, c) - } - if len(capture) > 1 { - sort.Slice(capture, func(i, j int) bool { - return len(capture[i].Name) > len(capture[j].Name) - }) - } - } - - result := make([]byte, 0, len(msg)*2) - i := 0 - for { - j := strings.IndexByte(msg[i:], '$') - if j == -1 { - result = append(result, msg[i:]...) - break - } - dollarPos := i + j - result = append(result, msg[i:dollarPos]...) - var n ast.Node - var nameLen int - if strings.HasPrefix(msg[dollarPos+1:], "$") { - n = m.Node() - nameLen = 1 - } else { - for _, c := range capture { - if strings.HasPrefix(msg[dollarPos+1:], c.Name) { - n = c.Node - nameLen = len(c.Name) - break - } - } - } - if n != nil { - text := rr.nodeText(n) - text = rr.fixedText(text, n, msg[dollarPos+1+nameLen:]) - if truncate { - text = truncateText(text, rr.truncateLen) - } - result = append(result, text...) - } else { - result = append(result, '$') - } - i = dollarPos + len("$") + nameLen - } - - return string(result) -} - -func (rr *rulesRunner) fixedText(text []byte, n ast.Node, following string) []byte { - // pattern=`$x.y` $x=`&buf` following=`.y` - // Insert $x as `buf`, so we get `buf.y` instead of incorrect `&buf.y`. - if n, ok := n.(*ast.UnaryExpr); ok && n.Op == token.AND { - shouldFix := false - switch n.X.(type) { - case *ast.Ident, *ast.IndexExpr, *ast.SelectorExpr: - shouldFix = true - } - if shouldFix && strings.HasPrefix(following, ".") { - return bytes.TrimPrefix(text, []byte("&")) - } - } - - return text -} - -var longTextPlaceholder = []byte("<...>") - -func truncateText(s []byte, maxLen int) []byte { - if len(s) <= maxLen-len(longTextPlaceholder) { - return s - } - maxLen -= len(longTextPlaceholder) - leftLen := maxLen / 2 - rightLen := (maxLen % 2) + leftLen - left := s[:leftLen] - right := s[len(s)-rightLen:] - - result := make([]byte, 0, len(left)+len(longTextPlaceholder)+len(right)) - result = append(result, left...) - result = append(result, longTextPlaceholder...) - result = append(result, right...) - - return result -} - -var multiMatchTags = [nodetag.NumBuckets]bool{ - nodetag.BlockStmt: true, - nodetag.CaseClause: true, - nodetag.CommClause: true, - nodetag.File: true, -} diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/textmatch/compile.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/textmatch/compile.go deleted file mode 100644 index d320bf880..000000000 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/textmatch/compile.go +++ /dev/null @@ -1,84 +0,0 @@ -package textmatch - -import ( - "regexp" - "regexp/syntax" - "unicode" -) - -func compile(s string) (Pattern, error) { - reSyntax, err := syntax.Parse(s, syntax.Perl) - if err == nil { - if optimized := compileOptimized(s, reSyntax); optimized != nil { - return optimized, nil - } - } - return regexp.Compile(s) -} - -func compileOptimized(s string, re *syntax.Regexp) Pattern { - // .* - isAny := func(re *syntax.Regexp) bool { - return re.Op == syntax.OpStar && re.Sub[0].Op == syntax.OpAnyCharNotNL - } - // "literal" - isLit := func(re *syntax.Regexp) bool { - return re.Op == syntax.OpLiteral - } - // ^ - isBegin := func(re *syntax.Regexp) bool { - return re.Op == syntax.OpBeginText - } - // $ - isEnd := func(re *syntax.Regexp) bool { - return re.Op == syntax.OpEndText - } - - // TODO: analyze what kind of regexps people use in rules - // more often and optimize those as well. - - // lit => strings.Contains($input, lit) - if re.Op == syntax.OpLiteral { - return &containsLiteralMatcher{value: newInputValue(string(re.Rune))} - } - - // `.*` lit `.*` => strings.Contains($input, lit) - if re.Op == syntax.OpConcat && len(re.Sub) == 3 { - if isAny(re.Sub[0]) && isLit(re.Sub[1]) && isAny(re.Sub[2]) { - return &containsLiteralMatcher{value: newInputValue(string(re.Sub[1].Rune))} - } - } - - // `^` lit => strings.HasPrefix($input, lit) - if re.Op == syntax.OpConcat && len(re.Sub) == 2 { - if isBegin(re.Sub[0]) && isLit(re.Sub[1]) { - return &prefixLiteralMatcher{value: newInputValue(string(re.Sub[1].Rune))} - } - } - - // lit `$` => strings.HasSuffix($input, lit) - if re.Op == syntax.OpConcat && len(re.Sub) == 2 { - if isLit(re.Sub[0]) && isEnd(re.Sub[1]) { - return &suffixLiteralMatcher{value: newInputValue(string(re.Sub[0].Rune))} - } - } - - // `^` lit `$` => $input == lit - if re.Op == syntax.OpConcat && len(re.Sub) == 3 { - if isBegin(re.Sub[0]) && isLit(re.Sub[1]) && isEnd(re.Sub[2]) { - return &eqLiteralMatcher{value: newInputValue(string(re.Sub[1].Rune))} - } - } - - // `^\p{Lu}` => prefixRunePredMatcher:unicode.IsUpper - // `^\p{Ll}` => prefixRunePredMatcher:unicode.IsLower - switch s { - case `^\p{Lu}`: - return &prefixRunePredMatcher{pred: unicode.IsUpper} - case `^\p{Ll}`: - return &prefixRunePredMatcher{pred: unicode.IsLower} - } - - // Can't optimize. - return nil -} diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/textmatch/matchers.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/textmatch/matchers.go deleted file mode 100644 index 2f68c9aee..000000000 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/textmatch/matchers.go +++ /dev/null @@ -1,72 +0,0 @@ -package textmatch - -import ( - "bytes" - "strings" - "unicode/utf8" -) - -// inputValue is a wrapper for string|[]byte. -// -// We hold both values to avoid string->[]byte and vice versa -// conversions when doing Match and MatchString. -type inputValue struct { - s string - b []byte -} - -func newInputValue(s string) inputValue { - return inputValue{s: s, b: []byte(s)} -} - -type containsLiteralMatcher struct{ value inputValue } - -func (m *containsLiteralMatcher) MatchString(s string) bool { - return strings.Contains(s, m.value.s) -} - -func (m *containsLiteralMatcher) Match(b []byte) bool { - return bytes.Contains(b, m.value.b) -} - -type prefixLiteralMatcher struct{ value inputValue } - -func (m *prefixLiteralMatcher) MatchString(s string) bool { - return strings.HasPrefix(s, m.value.s) -} - -func (m *prefixLiteralMatcher) Match(b []byte) bool { - return bytes.HasPrefix(b, m.value.b) -} - -type suffixLiteralMatcher struct{ value inputValue } - -func (m *suffixLiteralMatcher) MatchString(s string) bool { - return strings.HasSuffix(s, m.value.s) -} - -func (m *suffixLiteralMatcher) Match(b []byte) bool { - return bytes.HasSuffix(b, m.value.b) -} - -type eqLiteralMatcher struct{ value inputValue } - -func (m *eqLiteralMatcher) MatchString(s string) bool { - return m.value.s == s -} - -func (m *eqLiteralMatcher) Match(b []byte) bool { - return bytes.Equal(m.value.b, b) -} - -type prefixRunePredMatcher struct{ pred func(rune) bool } - -func (m *prefixRunePredMatcher) MatchString(s string) bool { - r, _ := utf8.DecodeRuneInString(s) - return m.pred(r) -} - -func (m *prefixRunePredMatcher) Match(b []byte) bool { - r, _ := utf8.DecodeRune(b) - return m.pred(r) -} diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/textmatch/textmatch.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/textmatch/textmatch.go deleted file mode 100644 index 135f95740..000000000 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/textmatch/textmatch.go +++ /dev/null @@ -1,26 +0,0 @@ -package textmatch - -import "regexp" - -// Pattern is a compiled regular expression. -type Pattern interface { - MatchString(s string) bool - Match(b []byte) bool -} - -// Compile parses a regular expression and returns a compiled -// pattern that can match inputs described by the regexp. -// -// Semantically it's close to the regexp.Compile, but -// it does recognize some common patterns and creates -// a more optimized matcher for them. -func Compile(re string) (Pattern, error) { - return compile(re) -} - -// IsRegexp reports whether p is implemented using regexp. -// False means that the underlying matcher is something optimized. -func IsRegexp(p Pattern) bool { - _, ok := p.(*regexp.Regexp) - return ok -} diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/typematch/patternop_string.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/typematch/patternop_string.go deleted file mode 100644 index 672b6b45b..000000000 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/typematch/patternop_string.go +++ /dev/null @@ -1,36 +0,0 @@ -// Code generated by "stringer -type=patternOp"; DO NOT EDIT. - -package typematch - -import "strconv" - -func _() { - // An "invalid array index" compiler error signifies that the constant values have changed. - // Re-run the stringer command to generate them again. - var x [1]struct{} - _ = x[opBuiltinType-0] - _ = x[opPointer-1] - _ = x[opVar-2] - _ = x[opVarSeq-3] - _ = x[opSlice-4] - _ = x[opArray-5] - _ = x[opMap-6] - _ = x[opChan-7] - _ = x[opFuncNoSeq-8] - _ = x[opFunc-9] - _ = x[opStructNoSeq-10] - _ = x[opStruct-11] - _ = x[opAnyInterface-12] - _ = x[opNamed-13] -} - -const _patternOp_name = "opBuiltinTypeopPointeropVaropVarSeqopSliceopArrayopMapopChanopFuncNoSeqopFuncopStructNoSeqopStructopAnyInterfaceopNamed" - -var _patternOp_index = [...]uint8{0, 13, 22, 27, 35, 42, 49, 54, 60, 71, 77, 90, 98, 112, 119} - -func (i patternOp) String() string { - if i < 0 || i >= patternOp(len(_patternOp_index)-1) { - return "patternOp(" + strconv.FormatInt(int64(i), 10) + ")" - } - return _patternOp_name[_patternOp_index[i]:_patternOp_index[i+1]] -} diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/typematch/typematch.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/typematch/typematch.go deleted file mode 100644 index 4b740b207..000000000 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/typematch/typematch.go +++ /dev/null @@ -1,607 +0,0 @@ -package typematch - -import ( - "fmt" - "go/ast" - "go/parser" - "go/token" - "go/types" - "strconv" - "strings" - - "github.com/quasilyte/go-ruleguard/internal/xtypes" -) - -//go:generate stringer -type=patternOp -type patternOp int - -const ( - opBuiltinType patternOp = iota - opPointer - opVar - opVarSeq - opSlice - opArray - opMap - opChan - opFuncNoSeq - opFunc - opStructNoSeq - opStruct - opAnyInterface - opNamed -) - -type MatcherState struct { - typeMatches map[string]types.Type - int64Matches map[string]int64 -} - -func NewMatcherState() *MatcherState { - return &MatcherState{ - typeMatches: map[string]types.Type{}, - int64Matches: map[string]int64{}, - } -} - -func (state *MatcherState) reset() { - if len(state.int64Matches) != 0 { - for k := range state.int64Matches { - delete(state.int64Matches, k) - } - } - if len(state.typeMatches) != 0 { - for k := range state.typeMatches { - delete(state.typeMatches, k) - } - } -} - -type Pattern struct { - root *pattern -} - -type pattern struct { - value interface{} - op patternOp - subs []*pattern -} - -func (pat pattern) String() string { - if len(pat.subs) == 0 { - return fmt.Sprintf("<%s %#v>", pat.op, pat.value) - } - parts := make([]string, len(pat.subs)) - for i, sub := range pat.subs { - parts[i] = sub.String() - } - return fmt.Sprintf("<%s %#v (%s)>", pat.op, pat.value, strings.Join(parts, ", ")) -} - -type ImportsTab struct { - imports []map[string]string -} - -func NewImportsTab(initial map[string]string) *ImportsTab { - return &ImportsTab{imports: []map[string]string{initial}} -} - -func (itab *ImportsTab) Lookup(pkgName string) (string, bool) { - for i := len(itab.imports) - 1; i >= 0; i-- { - pkgPath, ok := itab.imports[i][pkgName] - if ok { - return pkgPath, true - } - } - return "", false -} - -func (itab *ImportsTab) Load(pkgName, pkgPath string) { - itab.imports[len(itab.imports)-1][pkgName] = pkgPath -} - -func (itab *ImportsTab) EnterScope() { - itab.imports = append(itab.imports, map[string]string{}) -} - -func (itab *ImportsTab) LeaveScope() { - itab.imports = itab.imports[:len(itab.imports)-1] -} - -type Context struct { - Itab *ImportsTab -} - -const ( - varPrefix = `ᐸvarᐳ` - varSeqPrefix = `ᐸvar_seqᐳ` -) - -func Parse(ctx *Context, s string) (*Pattern, error) { - noDollars := strings.ReplaceAll(s, "$*", varSeqPrefix) - noDollars = strings.ReplaceAll(noDollars, "$", varPrefix) - n, err := parser.ParseExpr(noDollars) - if err != nil { - return nil, err - } - root := parseExpr(ctx, n) - if root == nil { - return nil, fmt.Errorf("can't convert %s type expression", s) - } - p := &Pattern{ - root: root, - } - return p, nil -} - -var ( - builtinTypeByName = map[string]types.Type{ - "bool": types.Typ[types.Bool], - "int": types.Typ[types.Int], - "int8": types.Typ[types.Int8], - "int16": types.Typ[types.Int16], - "int32": types.Typ[types.Int32], - "int64": types.Typ[types.Int64], - "uint": types.Typ[types.Uint], - "uint8": types.Typ[types.Uint8], - "uint16": types.Typ[types.Uint16], - "uint32": types.Typ[types.Uint32], - "uint64": types.Typ[types.Uint64], - "uintptr": types.Typ[types.Uintptr], - "float32": types.Typ[types.Float32], - "float64": types.Typ[types.Float64], - "complex64": types.Typ[types.Complex64], - "complex128": types.Typ[types.Complex128], - "string": types.Typ[types.String], - - "error": types.Universe.Lookup("error").Type(), - - // Aliases. - "byte": types.Typ[types.Uint8], - "rune": types.Typ[types.Int32], - } - - efaceType = types.NewInterfaceType(nil, nil) -) - -func parseExpr(ctx *Context, e ast.Expr) *pattern { - switch e := e.(type) { - case *ast.Ident: - basic, ok := builtinTypeByName[e.Name] - if ok { - return &pattern{op: opBuiltinType, value: basic} - } - if strings.HasPrefix(e.Name, varPrefix) { - name := strings.TrimPrefix(e.Name, varPrefix) - return &pattern{op: opVar, value: name} - } - if strings.HasPrefix(e.Name, varSeqPrefix) { - name := strings.TrimPrefix(e.Name, varSeqPrefix) - // Only unnamed seq are supported right now. - if name == "_" { - return &pattern{op: opVarSeq, value: name} - } - } - - case *ast.SelectorExpr: - pkg, ok := e.X.(*ast.Ident) - if !ok { - return nil - } - if pkg.Name == "unsafe" && e.Sel.Name == "Pointer" { - return &pattern{op: opBuiltinType, value: types.Typ[types.UnsafePointer]} - } - pkgPath, ok := ctx.Itab.Lookup(pkg.Name) - if !ok { - return nil - } - return &pattern{op: opNamed, value: [2]string{pkgPath, e.Sel.Name}} - - case *ast.StarExpr: - elem := parseExpr(ctx, e.X) - if elem == nil { - return nil - } - return &pattern{op: opPointer, subs: []*pattern{elem}} - - case *ast.ArrayType: - elem := parseExpr(ctx, e.Elt) - if elem == nil { - return nil - } - if e.Len == nil { - return &pattern{ - op: opSlice, - subs: []*pattern{elem}, - } - } - if id, ok := e.Len.(*ast.Ident); ok && strings.HasPrefix(id.Name, varPrefix) { - name := strings.TrimPrefix(id.Name, varPrefix) - return &pattern{ - op: opArray, - value: name, - subs: []*pattern{elem}, - } - } - lit, ok := e.Len.(*ast.BasicLit) - if !ok || lit.Kind != token.INT { - return nil - } - length, err := strconv.ParseInt(lit.Value, 10, 64) - if err != nil { - return nil - } - return &pattern{ - op: opArray, - value: length, - subs: []*pattern{elem}, - } - - case *ast.MapType: - keyType := parseExpr(ctx, e.Key) - if keyType == nil { - return nil - } - valType := parseExpr(ctx, e.Value) - if valType == nil { - return nil - } - return &pattern{ - op: opMap, - subs: []*pattern{keyType, valType}, - } - - case *ast.ChanType: - valType := parseExpr(ctx, e.Value) - if valType == nil { - return nil - } - var dir types.ChanDir - switch { - case e.Dir&ast.SEND != 0 && e.Dir&ast.RECV != 0: - dir = types.SendRecv - case e.Dir&ast.SEND != 0: - dir = types.SendOnly - case e.Dir&ast.RECV != 0: - dir = types.RecvOnly - default: - return nil - } - return &pattern{ - op: opChan, - value: dir, - subs: []*pattern{valType}, - } - - case *ast.ParenExpr: - return parseExpr(ctx, e.X) - - case *ast.FuncType: - hasSeq := false - var params []*pattern - var results []*pattern - if e.Params != nil { - for _, field := range e.Params.List { - p := parseExpr(ctx, field.Type) - if p == nil { - return nil - } - if len(field.Names) != 0 { - return nil - } - if p.op == opVarSeq { - hasSeq = true - } - params = append(params, p) - } - } - if e.Results != nil { - for _, field := range e.Results.List { - p := parseExpr(ctx, field.Type) - if p == nil { - return nil - } - if len(field.Names) != 0 { - return nil - } - if p.op == opVarSeq { - hasSeq = true - } - results = append(results, p) - } - } - op := opFuncNoSeq - if hasSeq { - op = opFunc - } - return &pattern{ - op: op, - value: len(params), - subs: append(params, results...), - } - - case *ast.StructType: - hasSeq := false - members := make([]*pattern, 0, len(e.Fields.List)) - for _, field := range e.Fields.List { - p := parseExpr(ctx, field.Type) - if p == nil { - return nil - } - if len(field.Names) != 0 { - return nil - } - if p.op == opVarSeq { - hasSeq = true - } - members = append(members, p) - } - op := opStructNoSeq - if hasSeq { - op = opStruct - } - return &pattern{ - op: op, - subs: members, - } - - case *ast.InterfaceType: - if len(e.Methods.List) == 0 { - return &pattern{op: opBuiltinType, value: efaceType} - } - if len(e.Methods.List) == 1 { - p := parseExpr(ctx, e.Methods.List[0].Type) - if p == nil { - return nil - } - if p.op != opVarSeq { - return nil - } - return &pattern{op: opAnyInterface} - } - } - - return nil -} - -// MatchIdentical returns true if the go typ matches pattern p. -func (p *Pattern) MatchIdentical(state *MatcherState, typ types.Type) bool { - state.reset() - return p.matchIdentical(state, p.root, typ) -} - -func (p *Pattern) matchIdenticalFielder(state *MatcherState, subs []*pattern, f fielder) bool { - // TODO: do backtracking. - - numFields := f.NumFields() - fieldsMatched := 0 - - if len(subs) == 0 && numFields != 0 { - return false - } - - matchAny := false - - i := 0 - for i < len(subs) { - pat := subs[i] - - if pat.op == opVarSeq { - matchAny = true - } - - fieldsLeft := numFields - fieldsMatched - if matchAny { - switch { - // "Nothing left to match" stop condition. - case fieldsLeft == 0: - matchAny = false - i++ - // Lookahead for non-greedy matching. - case i+1 < len(subs) && p.matchIdentical(state, subs[i+1], f.Field(fieldsMatched).Type()): - matchAny = false - i += 2 - fieldsMatched++ - default: - fieldsMatched++ - } - continue - } - - if fieldsLeft == 0 || !p.matchIdentical(state, pat, f.Field(fieldsMatched).Type()) { - return false - } - i++ - fieldsMatched++ - } - - return numFields == fieldsMatched -} - -func (p *Pattern) matchIdentical(state *MatcherState, sub *pattern, typ types.Type) bool { - switch sub.op { - case opVar: - name := sub.value.(string) - if name == "_" { - return true - } - y, ok := state.typeMatches[name] - if !ok { - state.typeMatches[name] = typ - return true - } - if y == nil { - return typ == nil - } - return xtypes.Identical(typ, y) - - case opBuiltinType: - return xtypes.Identical(typ, sub.value.(types.Type)) - - case opPointer: - typ, ok := typ.(*types.Pointer) - if !ok { - return false - } - return p.matchIdentical(state, sub.subs[0], typ.Elem()) - - case opSlice: - typ, ok := typ.(*types.Slice) - if !ok { - return false - } - return p.matchIdentical(state, sub.subs[0], typ.Elem()) - - case opArray: - typ, ok := typ.(*types.Array) - if !ok { - return false - } - var wantLen int64 - switch v := sub.value.(type) { - case string: - if v == "_" { - wantLen = typ.Len() - break - } - length, ok := state.int64Matches[v] - if ok { - wantLen = length - } else { - state.int64Matches[v] = typ.Len() - wantLen = typ.Len() - } - case int64: - wantLen = v - } - return wantLen == typ.Len() && p.matchIdentical(state, sub.subs[0], typ.Elem()) - - case opMap: - typ, ok := typ.(*types.Map) - if !ok { - return false - } - return p.matchIdentical(state, sub.subs[0], typ.Key()) && - p.matchIdentical(state, sub.subs[1], typ.Elem()) - - case opChan: - typ, ok := typ.(*types.Chan) - if !ok { - return false - } - dir := sub.value.(types.ChanDir) - return dir == typ.Dir() && p.matchIdentical(state, sub.subs[0], typ.Elem()) - - case opNamed: - typ, ok := typ.(*types.Named) - if !ok { - return false - } - obj := typ.Obj() - pkg := obj.Pkg() - // pkg can be nil for builtin named types. - // There is no point in checking anything else as we never - // generate the opNamed for such types. - if pkg == nil { - return false - } - pkgPath := sub.value.([2]string)[0] - typeName := sub.value.([2]string)[1] - if typeName != obj.Name() { - return false - } - objPath := obj.Pkg().Path() - if vendorPos := strings.Index(objPath, "/vendor/"); vendorPos != -1 { - objPath = objPath[vendorPos+len("/vendor/"):] - } - return objPath == pkgPath - - case opFuncNoSeq: - typ, ok := typ.(*types.Signature) - if !ok { - return false - } - numParams := sub.value.(int) - params := sub.subs[:numParams] - results := sub.subs[numParams:] - if typ.Params().Len() != len(params) { - return false - } - if typ.Results().Len() != len(results) { - return false - } - for i := 0; i < typ.Params().Len(); i++ { - if !p.matchIdentical(state, params[i], typ.Params().At(i).Type()) { - return false - } - } - for i := 0; i < typ.Results().Len(); i++ { - if !p.matchIdentical(state, results[i], typ.Results().At(i).Type()) { - return false - } - } - return true - - case opFunc: - typ, ok := typ.(*types.Signature) - if !ok { - return false - } - numParams := sub.value.(int) - params := sub.subs[:numParams] - results := sub.subs[numParams:] - adapter := tupleFielder{x: typ.Params()} - if !p.matchIdenticalFielder(state, params, &adapter) { - return false - } - adapter.x = typ.Results() - if !p.matchIdenticalFielder(state, results, &adapter) { - return false - } - return true - - case opStructNoSeq: - typ, ok := typ.(*types.Struct) - if !ok { - return false - } - if typ.NumFields() != len(sub.subs) { - return false - } - for i, member := range sub.subs { - if !p.matchIdentical(state, member, typ.Field(i).Type()) { - return false - } - } - return true - - case opStruct: - typ, ok := typ.(*types.Struct) - if !ok { - return false - } - if !p.matchIdenticalFielder(state, sub.subs, typ) { - return false - } - return true - - case opAnyInterface: - _, ok := typ.(*types.Interface) - return ok - - default: - return false - } -} - -type fielder interface { - Field(i int) *types.Var - NumFields() int -} - -type tupleFielder struct { - x *types.Tuple -} - -func (tup *tupleFielder) Field(i int) *types.Var { return tup.x.At(i) } -func (tup *tupleFielder) NumFields() int { return tup.x.Len() } diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/utils.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/utils.go deleted file mode 100644 index 6403d91cd..000000000 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/utils.go +++ /dev/null @@ -1,304 +0,0 @@ -package ruleguard - -import ( - "go/ast" - "go/constant" - "go/parser" - "go/token" - "go/types" - "regexp/syntax" - "strconv" - "strings" - - "golang.org/x/exp/typeparams" -) - -var invalidType = types.Typ[types.Invalid] - -func regexpHasCaptureGroups(pattern string) bool { - // regexp.Compile() uses syntax.Perl flags, so - // we use the same flags here. - re, err := syntax.Parse(pattern, syntax.Perl) - if err != nil { - return true // true is more conservative than false - } - - found := false - - var walkRegexp func(*syntax.Regexp) - walkRegexp = func(re *syntax.Regexp) { - if found { - return - } - // OpCapture handles both named and unnamed capture groups. - if re.Op == syntax.OpCapture { - found = true - return - } - for _, sub := range re.Sub { - walkRegexp(sub) - } - } - walkRegexp(re) - - return found -} - -func findDependency(pkg *types.Package, path string) *types.Package { - if pkg.Path() == path { - return pkg - } - // It looks like indirect dependencies are always incomplete? - // If it's true, then we don't have to recurse here. - for _, imported := range pkg.Imports() { - if dep := findDependency(imported, path); dep != nil && dep.Complete() { - return dep - } - } - return nil -} - -var typeByName = map[string]types.Type{ - // Predeclared types. - `error`: types.Universe.Lookup("error").Type(), - `bool`: types.Typ[types.Bool], - `int`: types.Typ[types.Int], - `int8`: types.Typ[types.Int8], - `int16`: types.Typ[types.Int16], - `int32`: types.Typ[types.Int32], - `int64`: types.Typ[types.Int64], - `uint`: types.Typ[types.Uint], - `uint8`: types.Typ[types.Uint8], - `uint16`: types.Typ[types.Uint16], - `uint32`: types.Typ[types.Uint32], - `uint64`: types.Typ[types.Uint64], - `uintptr`: types.Typ[types.Uintptr], - `string`: types.Typ[types.String], - `float32`: types.Typ[types.Float32], - `float64`: types.Typ[types.Float64], - `complex64`: types.Typ[types.Complex64], - `complex128`: types.Typ[types.Complex128], - - // Predeclared aliases (provided for convenience). - `byte`: types.Typ[types.Uint8], - `rune`: types.Typ[types.Int32], -} - -func typeFromString(s string) (types.Type, error) { - s = strings.ReplaceAll(s, "?", "__any") - - n, err := parser.ParseExpr(s) - if err != nil { - return nil, err - } - return typeFromNode(n), nil -} - -func typeFromNode(e ast.Expr) types.Type { - switch e := e.(type) { - case *ast.Ident: - typ, ok := typeByName[e.Name] - if ok { - return typ - } - - case *ast.ArrayType: - elem := typeFromNode(e.Elt) - if elem == nil { - return nil - } - if e.Len == nil { - return types.NewSlice(elem) - } - lit, ok := e.Len.(*ast.BasicLit) - if !ok || lit.Kind != token.INT { - return nil - } - length, err := strconv.Atoi(lit.Value) - if err != nil { - return nil - } - return types.NewArray(elem, int64(length)) - - case *ast.MapType: - keyType := typeFromNode(e.Key) - if keyType == nil { - return nil - } - valType := typeFromNode(e.Value) - if valType == nil { - return nil - } - return types.NewMap(keyType, valType) - - case *ast.StarExpr: - typ := typeFromNode(e.X) - if typ != nil { - return types.NewPointer(typ) - } - - case *ast.ParenExpr: - return typeFromNode(e.X) - - case *ast.InterfaceType: - if len(e.Methods.List) == 0 { - return types.NewInterfaceType(nil, nil) - } - } - - return nil -} - -func intValueOf(info *types.Info, expr ast.Expr) constant.Value { - tv := info.Types[expr] - if tv.Value == nil { - return nil - } - if tv.Value.Kind() != constant.Int { - return nil - } - return tv.Value -} - -// isPure reports whether expr is a softly safe expression and contains -// no significant side-effects. As opposed to strictly safe expressions, -// soft safe expressions permit some forms of side-effects, like -// panic possibility during indexing or nil pointer dereference. -// -// Uses types info to determine type conversion expressions that -// are the only permitted kinds of call expressions. -// Note that is does not check whether called function really -// has any side effects. The analysis is very conservative. -func isPure(info *types.Info, expr ast.Expr) bool { - // This list switch is not comprehensive and uses - // whitelist to be on the conservative side. - // Can be extended as needed. - - switch expr := expr.(type) { - case *ast.StarExpr: - return isPure(info, expr.X) - case *ast.BinaryExpr: - return isPure(info, expr.X) && - isPure(info, expr.Y) - case *ast.UnaryExpr: - return expr.Op != token.ARROW && - isPure(info, expr.X) - case *ast.BasicLit, *ast.Ident, *ast.FuncLit: - return true - case *ast.IndexExpr: - return isPure(info, expr.X) && - isPure(info, expr.Index) - case *ast.SelectorExpr: - return isPure(info, expr.X) - case *ast.ParenExpr: - return isPure(info, expr.X) - case *ast.CompositeLit: - return isPureList(info, expr.Elts) - case *ast.CallExpr: - return isTypeExpr(info, expr.Fun) && isPureList(info, expr.Args) - - default: - return false - } -} - -// isPureList reports whether every expr in list is safe. -// -// See isPure. -func isPureList(info *types.Info, list []ast.Expr) bool { - for _, expr := range list { - if !isPure(info, expr) { - return false - } - } - return true -} - -func isAddressable(info *types.Info, expr ast.Expr) bool { - tv, ok := info.Types[expr] - return ok && tv.Addressable() -} - -func isConstant(info *types.Info, expr ast.Expr) bool { - tv, ok := info.Types[expr] - return ok && tv.Value != nil -} - -func isConstantSlice(info *types.Info, expr ast.Expr) bool { - switch expr := expr.(type) { - case *ast.CallExpr: - // Matches []byte("string"). - if len(expr.Args) != 1 { - return false - } - lit, ok := expr.Args[0].(*ast.BasicLit) - if !ok || lit.Kind != token.STRING { - return false - } - typ, ok := info.TypeOf(expr.Fun).(*types.Slice) - if !ok { - return false - } - basicType, ok := typ.Elem().(*types.Basic) - return ok && basicType.Kind() == types.Uint8 - - case *ast.CompositeLit: - for _, elt := range expr.Elts { - if !isConstant(info, elt) { - return false - } - } - return true - - default: - return false - } -} - -// isTypeExpr reports whether x represents a type expression. -// -// Type expression does not evaluate to any run time value, -// but rather describes a type that is used inside Go expression. -// -// For example, (*T)(v) is a CallExpr that "calls" (*T). -// (*T) is a type expression that tells Go compiler type v should be converted to. -func isTypeExpr(info *types.Info, x ast.Expr) bool { - switch x := x.(type) { - case *ast.StarExpr: - return isTypeExpr(info, x.X) - case *ast.ParenExpr: - return isTypeExpr(info, x.X) - case *ast.SelectorExpr: - return isTypeExpr(info, x.Sel) - - case *ast.Ident: - // Identifier may be a type expression if object - // it refers to is a type name. - _, ok := info.ObjectOf(x).(*types.TypeName) - return ok - - case *ast.FuncType, *ast.StructType, *ast.InterfaceType, *ast.ArrayType, *ast.MapType, *ast.ChanType: - return true - - default: - return false - } -} - -func identOf(e ast.Expr) *ast.Ident { - switch e := e.(type) { - case *ast.ParenExpr: - return identOf(e.X) - case *ast.Ident: - return e - case *ast.SelectorExpr: - return e.Sel - default: - return nil - } -} - -func isTypeParam(typ types.Type) bool { - _, ok := typ.(*typeparams.TypeParam) - return ok -} diff --git a/vendor/github.com/quasilyte/gogrep/.gitignore b/vendor/github.com/quasilyte/gogrep/.gitignore deleted file mode 100644 index ec560f1c9..000000000 --- a/vendor/github.com/quasilyte/gogrep/.gitignore +++ /dev/null @@ -1,4 +0,0 @@ -.idea -.vscode -coverage.txt -bin diff --git a/vendor/github.com/quasilyte/gogrep/.golangci.yml b/vendor/github.com/quasilyte/gogrep/.golangci.yml deleted file mode 100644 index 16d03c54d..000000000 --- a/vendor/github.com/quasilyte/gogrep/.golangci.yml +++ /dev/null @@ -1,49 +0,0 @@ -{ - "run": { - # timeout for analysis, e.g. 30s, 5m, default is 1m - "deadline": "3m", - }, - "fast": false, - "linters": { - "enable": [ - "deadcode", - "errcheck", - "gas", - "gocritic", - "gofmt", - "goimports", - "revive", - "govet", - "gosimple", - "ineffassign", - "megacheck", - "misspell", - "nakedret", - "staticcheck", - "structcheck", - "typecheck", - "unconvert", - "unused", - "varcheck", - ], - }, - "disable": [ - "depguard", - "dupl", - "gocyclo", - "interfacer", - "lll", - "maligned", - "prealloc", - ], - "linters-settings": { - "gocritic": { - "enabled-tags": [ - "style", - "diagnostic", - "performance", - "experimental", - ], - }, - }, -} diff --git a/vendor/github.com/quasilyte/gogrep/LICENSE b/vendor/github.com/quasilyte/gogrep/LICENSE deleted file mode 100644 index 575b56ae1..000000000 --- a/vendor/github.com/quasilyte/gogrep/LICENSE +++ /dev/null @@ -1,33 +0,0 @@ -BSD 3-Clause License - -Copyright (c) 2021, Iskander (Alex) Sharipov - -Originally based on the Daniel Martí code | Copyright (c) 2017, Daniel Martí. All rights reserved. -See https://github.com/mvdan/gogrep - -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - -1. Redistributions of source code must retain the above copyright notice, this - list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - -3. Neither the name of the copyright holder nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE -FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/quasilyte/gogrep/Makefile b/vendor/github.com/quasilyte/gogrep/Makefile deleted file mode 100644 index 01dd2192e..000000000 --- a/vendor/github.com/quasilyte/gogrep/Makefile +++ /dev/null @@ -1,19 +0,0 @@ -GOPATH_DIR=`go env GOPATH` - -test: - go test -count 2 -coverpkg=./... -coverprofile=coverage.txt -covermode=atomic ./... - go test -bench=. ./... - @echo "everything is OK" - -ci-lint: - curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s -- -b $(GOPATH_DIR)/bin v1.45.2 - $(GOPATH_DIR)/bin/golangci-lint run ./... - go install github.com/quasilyte/go-consistent@master - $(GOPATH_DIR)/bin/go-consistent . ./internal/... ./nodetag/... ./filters/... - @echo "everything is OK" - -lint: - golangci-lint run ./... - @echo "everything is OK" - -.PHONY: ci-lint lint test diff --git a/vendor/github.com/quasilyte/gogrep/README.md b/vendor/github.com/quasilyte/gogrep/README.md deleted file mode 100644 index ecf0dc4c7..000000000 --- a/vendor/github.com/quasilyte/gogrep/README.md +++ /dev/null @@ -1,41 +0,0 @@ -![logo](https://github.com/quasilyte/vscode-gogrep/blob/master/docs/logo.png?raw=true) - -![Build Status](https://github.com/quasilyte/gogrep/workflows/Go/badge.svg) -[![PkgGoDev](https://pkg.go.dev/badge/mod/github.com/quasilyte/gogrep)](https://pkg.go.dev/github.com/quasilyte/gogrep) -[![Go Report Card](https://goreportcard.com/badge/github.com/quasilyte/gogrep)](https://goreportcard.com/report/github.com/quasilyte/gogrep) -![Code Coverage](https://codecov.io/gh/quasilyte/gogrep/branch/master/graph/badge.svg) - -# gogrep - -This is an attempt to move a modified [gogrep](https://github.com/mvdan/gogrep) from the [go-ruleguard](https://github.com/quasilyte/go-ruleguard) project, so it can be used independently. - -This repository contains two Go modules. One for the gogrep library and the second one for the command-line tool. - -## gogrep as a library - -To get a gogrep library module, install the root Go module. - -```bash -$ go get github.com/quasilyte/gogrep -``` - -## gogrep as a command-line utility - -To get a gogrep command-line tool, install the `cmd/gogrep` Go submodule. - -```bash -$ go install github.com/quasilyte/gogrep/cmd/gogrep@latest -``` - -See [docs/gogrep_cli.md](_docs/gogrep_cli.md) to learn how to use it. - -## Used by - -A gogrep library is used by: - -* [go-ruleguard](https://github.com/quasilyte/go-ruleguard) -* [gocorpus](https://github.com/quasilyte/gocorpus) - -## Acknowledgements - -The original gogrep is written by the [Daniel Martí](https://github.com/mvdan). diff --git a/vendor/github.com/quasilyte/gogrep/compile.go b/vendor/github.com/quasilyte/gogrep/compile.go deleted file mode 100644 index 31b60dfa5..000000000 --- a/vendor/github.com/quasilyte/gogrep/compile.go +++ /dev/null @@ -1,1264 +0,0 @@ -package gogrep - -import ( - "fmt" - "go/ast" - "go/token" - - "github.com/quasilyte/gogrep/internal/stdinfo" - "golang.org/x/exp/typeparams" -) - -type compileError string - -func (e compileError) Error() string { return string(e) } - -type compiler struct { - config CompileConfig - - prog *program - stringIndexes map[string]uint8 - ifaceIndexes map[interface{}]uint8 - - info *PatternInfo - - insideStmtList bool -} - -func (c *compiler) Compile(root ast.Node, info *PatternInfo) (p *program, err error) { - defer func() { - if err != nil { - return - } - rv := recover() - if rv == nil { - return - } - if parseErr, ok := rv.(compileError); ok { - err = parseErr - return - } - panic(rv) // Not our panic - }() - - c.info = info - c.prog = &program{ - insts: make([]instruction, 0, 8), - } - c.stringIndexes = make(map[string]uint8) - c.ifaceIndexes = make(map[interface{}]uint8) - - c.compileNode(root) - - if len(c.prog.insts) == 0 { - return nil, c.errorf(root, "0 instructions generated") - } - - return c.prog, nil -} - -func (c *compiler) errorf(n ast.Node, format string, args ...interface{}) compileError { - loc := c.config.Fset.Position(n.Pos()) - message := fmt.Sprintf("%s:%d: %s", loc.Filename, loc.Line, fmt.Sprintf(format, args...)) - return compileError(message) -} - -func (c *compiler) toUint8(n ast.Node, v int) uint8 { - if !fitsUint8(v) { - panic(c.errorf(n, "implementation error: %v can't be converted to uint8", v)) - } - return uint8(v) -} - -func (c *compiler) internVar(n ast.Node, s string) uint8 { - c.info.Vars[s] = struct{}{} - index := c.internString(n, s) - return index -} - -func (c *compiler) internString(n ast.Node, s string) uint8 { - if index, ok := c.stringIndexes[s]; ok { - return index - } - index := len(c.prog.strings) - if !fitsUint8(index) { - panic(c.errorf(n, "implementation limitation: too many string values")) - } - c.stringIndexes[s] = uint8(index) - c.prog.strings = append(c.prog.strings, s) - return uint8(index) -} - -func (c *compiler) internIface(n ast.Node, v interface{}) uint8 { - if index, ok := c.ifaceIndexes[v]; ok { - return index - } - index := len(c.prog.ifaces) - if !fitsUint8(index) { - panic(c.errorf(n, "implementation limitation: too many values")) - } - c.ifaceIndexes[v] = uint8(index) - c.prog.ifaces = append(c.prog.ifaces, v) - return uint8(index) -} - -func (c *compiler) emitInst(inst instruction) { - c.prog.insts = append(c.prog.insts, inst) -} - -func (c *compiler) emitInstOp(op operation) { - c.emitInst(instruction{op: op}) -} - -func (c *compiler) compileNode(n ast.Node) { - switch n := n.(type) { - case *ast.File: - c.compileFile(n) - case ast.Decl: - c.compileDecl(n) - case ast.Expr: - c.compileExpr(n) - case ast.Stmt: - c.compileStmt(n) - case *ast.ValueSpec: - c.compileValueSpec(n) - case *rangeClause: - c.compileRangeClause(n) - case *rangeHeader: - c.compileRangeHeader(n) - case *NodeSlice: - switch n.Kind { - case StmtNodeSlice: - c.compileStmtSlice(n.stmtSlice) - case DeclNodeSlice: - c.compileDeclSlice(n.declSlice) - case ExprNodeSlice: - c.compileExprSlice(n.exprSlice) - } - default: - panic(c.errorf(n, "compileNode: unexpected %T", n)) - } -} - -func (c *compiler) compileOptStmt(n ast.Stmt) { - if exprStmt, ok := n.(*ast.ExprStmt); ok { - if ident, ok := exprStmt.X.(*ast.Ident); ok && isWildName(ident.Name) { - c.compileWildIdent(ident, true) - return - } - } - c.compileStmt(n) -} - -func (c *compiler) compileOptExpr(n ast.Expr) { - if ident, ok := n.(*ast.Ident); ok && isWildName(ident.Name) { - c.compileWildIdent(ident, true) - return - } - c.compileExpr(n) -} - -func (c *compiler) compileOptFieldList(n *ast.FieldList) { - if len(n.List) == 1 { - if ident, ok := n.List[0].Type.(*ast.Ident); ok && isWildName(ident.Name) && len(n.List[0].Names) == 0 { - // `func (...) $*result` - result could be anything - // `func (...) $result` - result is a field list of 1 element - info := decodeWildName(ident.Name) - switch { - case info.Seq: - c.compileWildIdent(ident, true) - case info.Name == "_": - c.emitInstOp(opFieldNode) - default: - c.emitInst(instruction{ - op: opNamedFieldNode, - valueIndex: c.internVar(n, info.Name), - }) - } - return - } - } - c.compileFieldList(n) -} - -func (c *compiler) compileFieldList(n *ast.FieldList) { - c.emitInstOp(opFieldList) - for _, x := range n.List { - c.compileField(x) - } - c.emitInstOp(opEnd) -} - -func (c *compiler) compileField(n *ast.Field) { - switch { - case len(n.Names) == 0: - if ident, ok := n.Type.(*ast.Ident); ok && isWildName(ident.Name) { - c.compileWildIdent(ident, false) - return - } - c.emitInstOp(opUnnamedField) - case len(n.Names) == 1: - name := n.Names[0] - if isWildName(name.Name) { - c.emitInstOp(opField) - c.compileWildIdent(name, false) - } else { - c.emitInst(instruction{ - op: opSimpleField, - valueIndex: c.internString(name, name.Name), - }) - } - default: - c.emitInstOp(opMultiField) - for _, name := range n.Names { - c.compileIdent(name) - } - c.emitInstOp(opEnd) - } - c.compileTypeExpr(n.Type) -} - -func (c *compiler) compileValueSpec(spec *ast.ValueSpec) { - switch { - case spec.Type == nil && len(spec.Values) == 0: - if isWildName(spec.Names[0].String()) { - c.compileIdent(spec.Names[0]) - return - } - c.emitInstOp(opValueSpec) - case spec.Type == nil: - c.emitInstOp(opValueInitSpec) - case len(spec.Values) == 0: - c.emitInstOp(opTypedValueSpec) - default: - c.emitInstOp(opTypedValueInitSpec) - } - for _, name := range spec.Names { - c.compileIdent(name) - } - c.emitInstOp(opEnd) - if spec.Type != nil { - c.compileOptTypeExpr(spec.Type) - } - if len(spec.Values) != 0 { - for _, v := range spec.Values { - c.compileExpr(v) - } - c.emitInstOp(opEnd) - } -} - -func (c *compiler) compileTypeSpec(spec *ast.TypeSpec) { - // Generic types can't have aliases, so we use that fact here. - typeParams := typeparams.ForTypeSpec(spec) - if !spec.Assign.IsValid() && !isWildName(spec.Name.Name) && typeParams == nil { - c.emitInst(instruction{ - op: opSimpleTypeSpec, - valueIndex: c.internString(spec.Name, spec.Name.Name), - }) - c.compileTypeExpr(spec.Type) - return - } - if typeParams != nil { - c.emitInstOp(opGenericTypeSpec) - c.compileIdent(spec.Name) - c.compileFieldList(typeParams) - c.compileTypeExpr(spec.Type) - return - } - c.emitInstOp(pickOp(spec.Assign.IsValid(), opTypeAliasSpec, opTypeSpec)) - c.compileIdent(spec.Name) - c.compileTypeExpr(spec.Type) -} - -func (c *compiler) compileFile(n *ast.File) { - if len(n.Imports) == 0 && len(n.Decls) == 0 { - c.emitInstOp(opEmptyPackage) - c.compileIdent(n.Name) - return - } - - panic(c.errorf(n, "compileFile: unsupported file pattern")) -} - -func (c *compiler) compileDecl(n ast.Decl) { - switch n := n.(type) { - case *ast.FuncDecl: - c.compileFuncDecl(n) - case *ast.GenDecl: - c.compileGenDecl(n) - - default: - panic(c.errorf(n, "compileDecl: unexpected %T", n)) - } -} - -func (c *compiler) compileFuncDecl(n *ast.FuncDecl) { - if n.Recv == nil { - if !isWildName(n.Name.Name) && typeparams.ForFuncType(n.Type) == nil && n.Body != nil { - // Generic functions can't live without body, so there is no generic proto decls. - c.emitInst(instruction{ - op: opSimpleFuncDecl, - valueIndex: c.internString(n.Name, n.Name.Name), - }) - c.compileFuncType(n.Type) - c.compileBlockStmt(n.Body) - return - } - c.emitInstOp(pickOp(n.Body == nil, opFuncProtoDecl, opFuncDecl)) - } else { - c.emitInstOp(pickOp(n.Body == nil, opMethodProtoDecl, opMethodDecl)) - } - - if n.Recv != nil { - c.compileFieldList(n.Recv) - } - c.compileIdent(n.Name) - c.compileFuncType(n.Type) - if n.Body != nil { - c.compileBlockStmt(n.Body) - } -} - -func (c *compiler) compileGenDecl(n *ast.GenDecl) { - if c.insideStmtList { - c.emitInstOp(opDeclStmt) - } - - switch n.Tok { - case token.CONST, token.VAR: - c.emitInstOp(pickOp(n.Tok == token.CONST, opConstDecl, opVarDecl)) - for _, spec := range n.Specs { - c.compileValueSpec(spec.(*ast.ValueSpec)) - } - c.emitInstOp(opEnd) - case token.TYPE: - c.emitInstOp(opTypeDecl) - for _, spec := range n.Specs { - c.compileTypeSpec(spec.(*ast.TypeSpec)) - } - c.emitInstOp(opEnd) - - default: - panic(c.errorf(n, "unexpected gen decl")) - } -} - -func (c *compiler) compileTypeExpr(n ast.Expr) { - if ident, ok := n.(*ast.Ident); ok && ident.Name == "any" && !c.config.Strict && typeparams.Enabled() { - c.emitInstOp(opEfaceType) - return - } - c.compileExpr(n) -} - -func (c *compiler) compileOptTypeExpr(n ast.Expr) { - if ident, ok := n.(*ast.Ident); ok && isWildName(ident.Name) { - c.compileWildIdent(ident, true) - return - } - c.compileTypeExpr(n) -} - -func (c *compiler) compileExpr(n ast.Expr) { - switch n := n.(type) { - case *ast.BasicLit: - c.compileBasicLit(n) - case *ast.BinaryExpr: - c.compileBinaryExpr(n) - case *ast.IndexExpr: - c.compileIndexExpr(n) - case *typeparams.IndexListExpr: - c.compileIndexListExpr(n) - case *ast.Ident: - c.compileIdent(n) - case *ast.CallExpr: - c.compileCallExpr(n) - case *ast.UnaryExpr: - c.compileUnaryExpr(n) - case *ast.StarExpr: - c.compileStarExpr(n) - case *ast.ParenExpr: - c.compileParenExpr(n) - case *ast.SliceExpr: - c.compileSliceExpr(n) - case *ast.StructType: - c.compileStructType(n) - case *ast.InterfaceType: - c.compileInterfaceType(n) - case *ast.FuncType: - c.compileFuncType(n) - case *ast.ArrayType: - c.compileArrayType(n) - case *ast.MapType: - c.compileMapType(n) - case *ast.ChanType: - c.compileChanType(n) - case *ast.CompositeLit: - c.compileCompositeLit(n) - case *ast.FuncLit: - c.compileFuncLit(n) - case *ast.Ellipsis: - c.compileEllipsis(n) - case *ast.KeyValueExpr: - c.compileKeyValueExpr(n) - case *ast.SelectorExpr: - c.compileSelectorExpr(n) - case *ast.TypeAssertExpr: - c.compileTypeAssertExpr(n) - - default: - panic(c.errorf(n, "compileExpr: unexpected %T", n)) - } -} - -func (c *compiler) compileBasicLit(n *ast.BasicLit) { - if !c.config.Strict { - v := literalValue(n) - if v == nil { - panic(c.errorf(n, "can't convert %s (%s) value", n.Value, n.Kind)) - } - c.prog.insts = append(c.prog.insts, instruction{ - op: opBasicLit, - valueIndex: c.internIface(n, v), - }) - return - } - - var inst instruction - switch n.Kind { - case token.INT: - inst.op = opStrictIntLit - case token.FLOAT: - inst.op = opStrictFloatLit - case token.STRING: - inst.op = opStrictStringLit - case token.CHAR: - inst.op = opStrictCharLit - default: - inst.op = opStrictComplexLit - } - inst.valueIndex = c.internString(n, n.Value) - c.prog.insts = append(c.prog.insts, inst) -} - -func (c *compiler) compileBinaryExpr(n *ast.BinaryExpr) { - c.prog.insts = append(c.prog.insts, instruction{ - op: opBinaryExpr, - value: c.toUint8(n, int(n.Op)), - }) - c.compileExpr(n.X) - c.compileExpr(n.Y) -} - -func (c *compiler) compileIndexExpr(n *ast.IndexExpr) { - c.emitInstOp(opIndexExpr) - c.compileExpr(n.X) - c.compileExpr(n.Index) -} - -func (c *compiler) compileIndexListExpr(n *typeparams.IndexListExpr) { - c.emitInstOp(opIndexListExpr) - c.compileExpr(n.X) - for _, x := range n.Indices { - c.compileExpr(x) - } - c.emitInstOp(opEnd) -} - -func (c *compiler) compileWildIdent(n *ast.Ident, optional bool) { - info := decodeWildName(n.Name) - var inst instruction - switch { - case info.Name == "_" && !info.Seq: - inst.op = opNode - case info.Name == "_" && info.Seq: - inst.op = pickOp(optional, opOptNode, opNodeSeq) - case info.Name != "_" && !info.Seq: - inst.op = opNamedNode - inst.valueIndex = c.internVar(n, info.Name) - default: - inst.op = pickOp(optional, opNamedOptNode, opNamedNodeSeq) - inst.valueIndex = c.internVar(n, info.Name) - } - c.prog.insts = append(c.prog.insts, inst) -} - -func (c *compiler) compileIdent(n *ast.Ident) { - if isWildName(n.Name) { - c.compileWildIdent(n, false) - return - } - - c.prog.insts = append(c.prog.insts, instruction{ - op: opIdent, - valueIndex: c.internString(n, n.Name), - }) -} - -func (c *compiler) compileExprMembers(list []ast.Expr) { - isSimple := len(list) <= 255 - if isSimple { - for _, x := range list { - if decodeWildNode(x).Seq { - isSimple = false - break - } - } - } - - if isSimple { - c.emitInst(instruction{ - op: opSimpleArgList, - value: uint8(len(list)), - }) - for _, x := range list { - c.compileExpr(x) - } - } else { - c.emitInstOp(opArgList) - for _, x := range list { - c.compileExpr(x) - } - c.emitInstOp(opEnd) - } -} - -func (c *compiler) compileCallExpr(n *ast.CallExpr) { - variadicOperation := func(n *ast.CallExpr) (operation, uint8) { - if len(n.Args) == 0 { - return opNonVariadicCallExpr, 0 - } - lastArg, ok := n.Args[len(n.Args)-1].(*ast.Ident) - if !ok { - return opNonVariadicCallExpr, 0 - } - if !isWildName(lastArg.Name) || !decodeWildName(lastArg.Name).Seq { - return opNonVariadicCallExpr, 0 - } - if len(n.Args) == 1 { - return opCallExpr, 0 - } - // If there is any seq op before the lastArg, we emit opCallExpr too. - for i := 0; i < len(n.Args)-1; i++ { - if decodeWildNode(n.Args[i]).Seq { - return opCallExpr, 0 - } - } - return opMaybeVariadicCallExpr, c.toUint8(n, len(n.Args)-1) - } - - var value uint8 - var op operation - if n.Ellipsis.IsValid() { - op = opVariadicCallExpr - } else { - op, value = variadicOperation(n) - } - - c.prog.insts = append(c.prog.insts, instruction{ - op: op, - value: value, - }) - c.compileSymbol(n.Fun) - c.compileExprMembers(n.Args) -} - -// compileSymbol is mostly like a normal compileExpr, but it's used -// in places where we can find a type/function symbol. -// -// For example, in function call expressions a called function expression -// can look like `fmt.Sprint`. It will be compiled as a special -// selector expression that requires `fmt` to be a package as opposed -// to only check that it's an identifier with "fmt" value. -func (c *compiler) compileSymbol(sym ast.Expr) { - compilePkgSymbol := func(c *compiler, sym ast.Expr) bool { - e, ok := sym.(*ast.SelectorExpr) - if !ok { - return false - } - ident, ok := e.X.(*ast.Ident) - if !ok || isWildName(e.Sel.Name) { - return false - } - pkgPath := c.config.Imports[ident.Name] - if pkgPath == "" && stdinfo.Packages[ident.Name] != "" { - pkgPath = stdinfo.Packages[ident.Name] - } - if pkgPath == "" { - return false - } - c.emitInst(instruction{ - op: opSimpleSelectorExpr, - valueIndex: c.internString(e.Sel, e.Sel.String()), - }) - c.emitInst(instruction{ - op: opPkg, - valueIndex: c.internString(ident, pkgPath), - }) - return true - } - - if c.config.WithTypes { - if compilePkgSymbol(c, sym) { - return - } - } - - c.compileExpr(sym) -} - -func (c *compiler) compileUnaryExpr(n *ast.UnaryExpr) { - c.prog.insts = append(c.prog.insts, instruction{ - op: opUnaryExpr, - value: c.toUint8(n, int(n.Op)), - }) - c.compileExpr(n.X) -} - -func (c *compiler) compileStarExpr(n *ast.StarExpr) { - c.emitInstOp(opStarExpr) - c.compileExpr(n.X) -} - -func (c *compiler) compileParenExpr(n *ast.ParenExpr) { - c.emitInstOp(opParenExpr) - c.compileExpr(n.X) -} - -func (c *compiler) compileSliceExpr(n *ast.SliceExpr) { - switch { - case n.Low == nil && n.High == nil && !n.Slice3: - c.emitInstOp(opSliceExpr) - c.compileOptExpr(n.X) - case n.Low != nil && n.High == nil && !n.Slice3: - c.emitInstOp(opSliceFromExpr) - c.compileOptExpr(n.X) - c.compileOptExpr(n.Low) - case n.Low == nil && n.High != nil && !n.Slice3: - c.emitInstOp(opSliceToExpr) - c.compileOptExpr(n.X) - c.compileOptExpr(n.High) - case n.Low != nil && n.High != nil && !n.Slice3: - c.emitInstOp(opSliceFromToExpr) - c.compileOptExpr(n.X) - c.compileOptExpr(n.Low) - c.compileOptExpr(n.High) - case n.Low == nil && n.Slice3: - c.emitInstOp(opSliceToCapExpr) - c.compileOptExpr(n.X) - c.compileOptExpr(n.High) - c.compileOptExpr(n.Max) - case n.Low != nil && n.Slice3: - c.emitInstOp(opSliceFromToCapExpr) - c.compileOptExpr(n.X) - c.compileOptExpr(n.Low) - c.compileOptExpr(n.High) - c.compileOptExpr(n.Max) - default: - panic(c.errorf(n, "unexpected slice expr")) - } -} - -func (c *compiler) compileStructType(n *ast.StructType) { - c.emitInstOp(opStructType) - c.compileOptFieldList(n.Fields) -} - -func (c *compiler) compileInterfaceType(n *ast.InterfaceType) { - if len(n.Methods.List) == 0 && !c.config.Strict { - c.emitInstOp(opEfaceType) - return - } - c.emitInstOp(opInterfaceType) - c.compileOptFieldList(n.Methods) -} - -func (c *compiler) compileFuncType(n *ast.FuncType) { - void := n.Results == nil || len(n.Results.List) == 0 - typeParams := typeparams.ForFuncType(n) - if void { - if typeParams == nil { - c.emitInstOp(opVoidFuncType) - } else { - c.emitInstOp(opGenericVoidFuncType) - } - } else { - if typeParams == nil { - c.emitInstOp(opFuncType) - } else { - c.emitInstOp(opGenericFuncType) - } - } - if typeParams != nil { - c.compileOptFieldList(typeParams) - } - c.compileOptFieldList(n.Params) - if !void { - c.compileOptFieldList(n.Results) - } -} - -func (c *compiler) compileArrayType(n *ast.ArrayType) { - if n.Len == nil { - c.emitInstOp(opSliceType) - c.compileTypeExpr(n.Elt) - } else { - c.emitInstOp(opArrayType) - c.compileExpr(n.Len) - c.compileTypeExpr(n.Elt) - } -} - -func (c *compiler) compileMapType(n *ast.MapType) { - c.emitInstOp(opMapType) - c.compileTypeExpr(n.Key) - c.compileTypeExpr(n.Value) -} - -func (c *compiler) compileChanType(n *ast.ChanType) { - c.emitInst(instruction{ - op: opChanType, - value: c.toUint8(n, int(n.Dir)), - }) - c.compileTypeExpr(n.Value) -} - -func (c *compiler) compileCompositeLit(n *ast.CompositeLit) { - if n.Type == nil { - c.emitInstOp(opCompositeLit) - } else { - c.emitInstOp(opTypedCompositeLit) - c.compileTypeExpr(n.Type) - } - for _, elt := range n.Elts { - c.compileExpr(elt) - } - c.emitInstOp(opEnd) -} - -func (c *compiler) compileFuncLit(n *ast.FuncLit) { - c.emitInstOp(opFuncLit) - c.compileFuncType(n.Type) - c.compileBlockStmt(n.Body) -} - -func (c *compiler) compileEllipsis(n *ast.Ellipsis) { - if n.Elt == nil { - c.emitInstOp(opEllipsis) - } else { - c.emitInstOp(opTypedEllipsis) - c.compileExpr(n.Elt) - } -} - -func (c *compiler) compileKeyValueExpr(n *ast.KeyValueExpr) { - c.emitInstOp(opKeyValueExpr) - c.compileExpr(n.Key) - c.compileExpr(n.Value) -} - -func (c *compiler) compileSelectorExpr(n *ast.SelectorExpr) { - if isWildName(n.Sel.Name) { - c.emitInstOp(opSelectorExpr) - c.compileWildIdent(n.Sel, false) - c.compileExpr(n.X) - return - } - - c.prog.insts = append(c.prog.insts, instruction{ - op: opSimpleSelectorExpr, - valueIndex: c.internString(n.Sel, n.Sel.String()), - }) - c.compileExpr(n.X) -} - -func (c *compiler) compileTypeAssertExpr(n *ast.TypeAssertExpr) { - if n.Type != nil { - c.emitInstOp(opTypeAssertExpr) - c.compileExpr(n.X) - c.compileTypeExpr(n.Type) - } else { - c.emitInstOp(opTypeSwitchAssertExpr) - c.compileExpr(n.X) - } -} - -func (c *compiler) compileStmt(n ast.Stmt) { - switch n := n.(type) { - case *ast.AssignStmt: - c.compileAssignStmt(n) - case *ast.BlockStmt: - c.compileBlockStmt(n) - case *ast.ExprStmt: - c.compileExprStmt(n) - case *ast.IfStmt: - c.compileIfStmt(n) - case *ast.CaseClause: - c.compileCaseClause(n) - case *ast.SwitchStmt: - c.compileSwitchStmt(n) - case *ast.TypeSwitchStmt: - c.compileTypeSwitchStmt(n) - case *ast.SelectStmt: - c.compileSelectStmt(n) - case *ast.ForStmt: - c.compileForStmt(n) - case *ast.RangeStmt: - c.compileRangeStmt(n) - case *ast.IncDecStmt: - c.compileIncDecStmt(n) - case *ast.EmptyStmt: - c.compileEmptyStmt(n) - case *ast.ReturnStmt: - c.compileReturnStmt(n) - case *ast.BranchStmt: - c.compileBranchStmt(n) - case *ast.LabeledStmt: - c.compileLabeledStmt(n) - case *ast.GoStmt: - c.compileGoStmt(n) - case *ast.DeferStmt: - c.compileDeferStmt(n) - case *ast.SendStmt: - c.compileSendStmt(n) - case *ast.DeclStmt: - c.compileDecl(n.Decl) - - default: - panic(c.errorf(n, "compileStmt: unexpected %T", n)) - } -} - -func (c *compiler) compileAssignStmt(n *ast.AssignStmt) { - if len(n.Lhs) == 1 && len(n.Rhs) == 1 { - lhsInfo := decodeWildNode(n.Lhs[0]) - rhsInfo := decodeWildNode(n.Rhs[0]) - if !lhsInfo.Seq && !rhsInfo.Seq { - c.emitInst(instruction{ - op: opAssignStmt, - value: uint8(n.Tok), - }) - c.compileExpr(n.Lhs[0]) - c.compileExpr(n.Rhs[0]) - return - } - } - - c.emitInst(instruction{ - op: opMultiAssignStmt, - value: uint8(n.Tok), - }) - for _, x := range n.Lhs { - c.compileExpr(x) - } - c.emitInstOp(opEnd) - for _, x := range n.Rhs { - c.compileExpr(x) - } - c.emitInstOp(opEnd) -} - -func (c *compiler) compileBlockStmt(n *ast.BlockStmt) { - c.emitInstOp(opBlockStmt) - insideStmtList := c.insideStmtList - c.insideStmtList = true - for _, elt := range n.List { - c.compileStmt(elt) - } - c.insideStmtList = insideStmtList - c.emitInstOp(opEnd) -} - -func (c *compiler) compileExprStmt(n *ast.ExprStmt) { - if ident, ok := n.X.(*ast.Ident); ok && isWildName(ident.Name) { - c.compileIdent(ident) - } else { - c.emitInstOp(opExprStmt) - c.compileExpr(n.X) - } -} - -func (c *compiler) compileIfStmt(n *ast.IfStmt) { - // Check for the special case: `if $*_ ...` should match all if statements. - if ident, ok := n.Cond.(*ast.Ident); ok && n.Init == nil && isWildName(ident.Name) { - info := decodeWildName(ident.Name) - if info.Seq && info.Name == "_" { - // Set Init to Cond, change cond from $*_ to $_. - n.Init = &ast.ExprStmt{X: n.Cond} - cond := &ast.Ident{Name: encodeWildName(info.Name, false)} - n.Cond = cond - c.compileIfStmt(n) - return - } - // Named $* is harder and slower. - if info.Seq { - c.prog.insts = append(c.prog.insts, instruction{ - op: pickOp(n.Else == nil, opIfNamedOptStmt, opIfNamedOptElseStmt), - valueIndex: c.internVar(ident, info.Name), - }) - c.compileStmt(n.Body) - if n.Else != nil { - c.compileStmt(n.Else) - } - return - } - } - - switch { - case n.Init == nil && n.Else == nil: - c.emitInstOp(opIfStmt) - c.compileExpr(n.Cond) - c.compileStmt(n.Body) - case n.Init != nil && n.Else == nil: - c.emitInstOp(opIfInitStmt) - c.compileOptStmt(n.Init) - c.compileExpr(n.Cond) - c.compileStmt(n.Body) - case n.Init == nil && n.Else != nil: - c.emitInstOp(opIfElseStmt) - c.compileExpr(n.Cond) - c.compileStmt(n.Body) - c.compileStmt(n.Else) - case n.Init != nil && n.Else != nil: - c.emitInstOp(opIfInitElseStmt) - c.compileOptStmt(n.Init) - c.compileExpr(n.Cond) - c.compileStmt(n.Body) - c.compileStmt(n.Else) - - default: - panic(c.errorf(n, "unexpected if stmt")) - } -} - -func (c *compiler) compileCommClause(n *ast.CommClause) { - c.emitInstOp(pickOp(n.Comm == nil, opDefaultCommClause, opCommClause)) - if n.Comm != nil { - c.compileStmt(n.Comm) - } - for _, x := range n.Body { - c.compileStmt(x) - } - c.emitInstOp(opEnd) -} - -func (c *compiler) compileCaseClause(n *ast.CaseClause) { - c.emitInstOp(pickOp(n.List == nil, opDefaultCaseClause, opCaseClause)) - if n.List != nil { - for _, x := range n.List { - c.compileExpr(x) - } - c.emitInstOp(opEnd) - } - for _, x := range n.Body { - c.compileStmt(x) - } - c.emitInstOp(opEnd) -} - -func (c *compiler) compileSwitchBody(n *ast.BlockStmt) { - wildcardCase := func(cc *ast.CaseClause) *ast.Ident { - if len(cc.List) != 1 || len(cc.Body) != 1 { - return nil - } - v, ok := cc.List[0].(*ast.Ident) - if !ok || !isWildName(v.Name) { - return nil - } - bodyStmt, ok := cc.Body[0].(*ast.ExprStmt) - if !ok { - return nil - } - bodyIdent, ok := bodyStmt.X.(*ast.Ident) - if !ok || bodyIdent.Name != "gogrep_body" { - return nil - } - return v - } - for _, cc := range n.List { - cc := cc.(*ast.CaseClause) - wildcard := wildcardCase(cc) - if wildcard == nil { - c.compileCaseClause(cc) - continue - } - c.compileWildIdent(wildcard, false) - } - c.emitInstOp(opEnd) -} - -func (c *compiler) compileSwitchStmt(n *ast.SwitchStmt) { - var op operation - switch { - case n.Init == nil && n.Tag == nil: - op = opSwitchStmt - case n.Init == nil && n.Tag != nil: - op = opSwitchTagStmt - case n.Init != nil && n.Tag == nil: - op = opSwitchInitStmt - default: - op = opSwitchInitTagStmt - } - - c.emitInstOp(op) - if n.Init != nil { - c.compileOptStmt(n.Init) - } - if n.Tag != nil { - c.compileOptExpr(n.Tag) - } - c.compileSwitchBody(n.Body) -} - -func (c *compiler) compileTypeSwitchStmt(n *ast.TypeSwitchStmt) { - c.emitInstOp(pickOp(n.Init == nil, opTypeSwitchStmt, opTypeSwitchInitStmt)) - if n.Init != nil { - c.compileOptStmt(n.Init) - } - c.compileStmt(n.Assign) - c.compileSwitchBody(n.Body) -} - -func (c *compiler) compileSelectStmt(n *ast.SelectStmt) { - c.emitInstOp(opSelectStmt) - - wildcardCase := func(cc *ast.CommClause) *ast.Ident { - if cc.Comm == nil { - return nil - } - vStmt, ok := cc.Comm.(*ast.ExprStmt) - if !ok { - return nil - } - v, ok := vStmt.X.(*ast.Ident) - if !ok || !isWildName(v.Name) { - return nil - } - bodyStmt, ok := cc.Body[0].(*ast.ExprStmt) - if !ok { - return nil - } - bodyIdent, ok := bodyStmt.X.(*ast.Ident) - if !ok || bodyIdent.Name != "gogrep_body" { - return nil - } - return v - } - for _, cc := range n.Body.List { - cc := cc.(*ast.CommClause) - wildcard := wildcardCase(cc) - if wildcard == nil { - c.compileCommClause(cc) - continue - } - c.compileWildIdent(wildcard, false) - } - c.emitInstOp(opEnd) -} - -func (c *compiler) compileForStmt(n *ast.ForStmt) { - var op operation - switch { - case n.Init == nil && n.Cond == nil && n.Post == nil: - op = opForStmt - case n.Init == nil && n.Cond == nil && n.Post != nil: - op = opForPostStmt - case n.Init == nil && n.Cond != nil && n.Post == nil: - op = opForCondStmt - case n.Init == nil && n.Cond != nil && n.Post != nil: - op = opForCondPostStmt - case n.Init != nil && n.Cond == nil && n.Post == nil: - op = opForInitStmt - case n.Init != nil && n.Cond == nil && n.Post != nil: - op = opForInitPostStmt - case n.Init != nil && n.Cond != nil && n.Post == nil: - op = opForInitCondStmt - default: - op = opForInitCondPostStmt - } - - c.emitInstOp(op) - if n.Init != nil { - c.compileOptStmt(n.Init) - } - if n.Cond != nil { - c.compileOptExpr(n.Cond) - } - if n.Post != nil { - c.compileOptStmt(n.Post) - } - c.compileBlockStmt(n.Body) -} - -func (c *compiler) compileRangeStmt(n *ast.RangeStmt) { - switch { - case n.Key == nil && n.Value == nil: - c.emitInstOp(opRangeStmt) - c.compileExpr(n.X) - c.compileStmt(n.Body) - case n.Key != nil && n.Value == nil: - c.emitInst(instruction{ - op: opRangeKeyStmt, - value: c.toUint8(n, int(n.Tok)), - }) - c.compileExpr(n.Key) - c.compileExpr(n.X) - c.compileStmt(n.Body) - case n.Key != nil && n.Value != nil: - c.emitInst(instruction{ - op: opRangeKeyValueStmt, - value: c.toUint8(n, int(n.Tok)), - }) - c.compileExpr(n.Key) - c.compileExpr(n.Value) - c.compileExpr(n.X) - c.compileStmt(n.Body) - default: - panic(c.errorf(n, "unexpected range stmt")) - } -} - -func (c *compiler) compileIncDecStmt(n *ast.IncDecStmt) { - c.prog.insts = append(c.prog.insts, instruction{ - op: opIncDecStmt, - value: c.toUint8(n, int(n.Tok)), - }) - c.compileExpr(n.X) -} - -func (c *compiler) compileEmptyStmt(n *ast.EmptyStmt) { - _ = n // unused - c.emitInstOp(opEmptyStmt) -} - -func (c *compiler) compileReturnStmt(n *ast.ReturnStmt) { - c.emitInstOp(opReturnStmt) - for _, x := range n.Results { - c.compileExpr(x) - } - c.emitInstOp(opEnd) -} - -func (c *compiler) compileBranchStmt(n *ast.BranchStmt) { - if n.Label != nil { - if isWildName(n.Label.Name) { - c.prog.insts = append(c.prog.insts, instruction{ - op: opLabeledBranchStmt, - value: c.toUint8(n, int(n.Tok)), - }) - c.compileWildIdent(n.Label, false) - } else { - c.prog.insts = append(c.prog.insts, instruction{ - op: opSimpleLabeledBranchStmt, - value: c.toUint8(n, int(n.Tok)), - valueIndex: c.internString(n.Label, n.Label.Name), - }) - } - return - } - c.prog.insts = append(c.prog.insts, instruction{ - op: opBranchStmt, - value: c.toUint8(n, int(n.Tok)), - }) -} - -func (c *compiler) compileLabeledStmt(n *ast.LabeledStmt) { - if isWildName(n.Label.Name) { - c.emitInstOp(opLabeledStmt) - c.compileWildIdent(n.Label, false) - c.compileStmt(n.Stmt) - return - } - - c.prog.insts = append(c.prog.insts, instruction{ - op: opSimpleLabeledStmt, - valueIndex: c.internString(n.Label, n.Label.Name), - }) - c.compileStmt(n.Stmt) -} - -func (c *compiler) compileGoStmt(n *ast.GoStmt) { - c.emitInstOp(opGoStmt) - c.compileExpr(n.Call) -} - -func (c *compiler) compileDeferStmt(n *ast.DeferStmt) { - c.emitInstOp(opDeferStmt) - c.compileExpr(n.Call) -} - -func (c *compiler) compileSendStmt(n *ast.SendStmt) { - c.emitInstOp(opSendStmt) - c.compileExpr(n.Chan) - c.compileExpr(n.Value) -} - -func (c *compiler) compileDeclSlice(decls []ast.Decl) { - c.emitInstOp(opMultiDecl) - for _, n := range decls { - c.compileDecl(n) - } - c.emitInstOp(opEnd) -} - -func (c *compiler) compileStmtSlice(stmts []ast.Stmt) { - c.emitInstOp(opMultiStmt) - insideStmtList := c.insideStmtList - c.insideStmtList = true - for _, n := range stmts { - c.compileStmt(n) - } - c.insideStmtList = insideStmtList - c.emitInstOp(opEnd) -} - -func (c *compiler) compileExprSlice(exprs []ast.Expr) { - c.emitInstOp(opMultiExpr) - for _, n := range exprs { - c.compileExpr(n) - } - c.emitInstOp(opEnd) -} - -func (c *compiler) compileRangeClause(clause *rangeClause) { - c.emitInstOp(opRangeClause) - c.compileExpr(clause.X) -} - -func (c *compiler) compileRangeHeader(h *rangeHeader) { - n := h.Node - switch { - case n.Key == nil && n.Value == nil: - c.emitInstOp(opRangeHeader) - c.compileExpr(n.X) - case n.Key != nil && n.Value == nil: - c.emitInst(instruction{ - op: opRangeKeyHeader, - value: c.toUint8(n, int(n.Tok)), - }) - c.compileExpr(n.Key) - c.compileExpr(n.X) - case n.Key != nil && n.Value != nil: - c.emitInst(instruction{ - op: opRangeKeyValueHeader, - value: c.toUint8(n, int(n.Tok)), - }) - c.compileExpr(n.Key) - c.compileExpr(n.Value) - c.compileExpr(n.X) - default: - panic(c.errorf(n, "unexpected range header")) - } -} - -func pickOp(cond bool, ifTrue, ifFalse operation) operation { - if cond { - return ifTrue - } - return ifFalse -} - -func fitsUint8(v int) bool { - return v >= 0 && v <= 0xff -} diff --git a/vendor/github.com/quasilyte/gogrep/compile_import.go b/vendor/github.com/quasilyte/gogrep/compile_import.go deleted file mode 100644 index ab0dd12a7..000000000 --- a/vendor/github.com/quasilyte/gogrep/compile_import.go +++ /dev/null @@ -1,57 +0,0 @@ -package gogrep - -import ( - "errors" - "fmt" - "strings" - "unicode" - "unicode/utf8" -) - -func compileImportPattern(config CompileConfig) (*Pattern, PatternInfo, error) { - // TODO: figure out how to compile it as a part of a normal pattern compilation? - // This is an adhoc solution to a problem. - - readIdent := func(s string) (varname, rest string) { - first := true - var offset int - for _, ch := range s { - ok := unicode.IsLetter(ch) || - ch == '_' || - (!first && unicode.IsDigit(ch)) - if !ok { - break - } - offset += utf8.RuneLen(ch) - first = false - } - return s[:offset], s[offset:] - } - - info := newPatternInfo() - src := config.Src - src = src[len("import $"):] - if src == "" { - return nil, info, errors.New("expected ident after $, found EOF") - } - varname, rest := readIdent(src) - if strings.TrimSpace(rest) != "" { - return nil, info, fmt.Errorf("unexpected %s", rest) - } - var p program - if varname != "_" { - info.Vars[src] = struct{}{} - p.strings = []string{varname} - p.insts = []instruction{ - {op: opImportDecl}, - {op: opNamedNodeSeq, valueIndex: 0}, - {op: opEnd}, - } - } else { - p.insts = []instruction{ - {op: opAnyImportDecl}, - } - } - m := matcher{prog: &p, insts: p.insts} - return &Pattern{m: &m}, info, nil -} diff --git a/vendor/github.com/quasilyte/gogrep/gen_operations.go b/vendor/github.com/quasilyte/gogrep/gen_operations.go deleted file mode 100644 index fd8035774..000000000 --- a/vendor/github.com/quasilyte/gogrep/gen_operations.go +++ /dev/null @@ -1,366 +0,0 @@ -//go:build main -// +build main - -package main - -import ( - "bytes" - "fmt" - "go/format" - "io/ioutil" - "log" - "strings" - "text/template" -) - -var opPrototypes = []operationProto{ - {name: "Node", tag: "Node"}, - {name: "NamedNode", tag: "Node", valueIndex: "strings | wildcard name"}, - {name: "NodeSeq"}, - {name: "NamedNodeSeq", valueIndex: "strings | wildcard name"}, - {name: "OptNode"}, - {name: "NamedOptNode", valueIndex: "strings | wildcard name"}, - - {name: "FieldNode", tag: "Node"}, - {name: "NamedFieldNode", tag: "Node", valueIndex: "strings | wildcard name"}, - - {name: "MultiStmt", tag: "StmtList", args: "stmts...", example: "f(); g()"}, - {name: "MultiExpr", tag: "ExprList", args: "exprs...", example: "f(), g()"}, - {name: "MultiDecl", tag: "DeclList", args: "exprs...", example: "f(), g()"}, - - {name: "End"}, - - {name: "BasicLit", tag: "BasicLit", valueIndex: "ifaces | parsed literal value"}, - {name: "StrictIntLit", tag: "BasicLit", valueIndex: "strings | raw literal value"}, - {name: "StrictFloatLit", tag: "BasicLit", valueIndex: "strings | raw literal value"}, - {name: "StrictCharLit", tag: "BasicLit", valueIndex: "strings | raw literal value"}, - {name: "StrictStringLit", tag: "BasicLit", valueIndex: "strings | raw literal value"}, - {name: "StrictComplexLit", tag: "BasicLit", valueIndex: "strings | raw literal value"}, - - {name: "Ident", tag: "Ident", valueIndex: "strings | ident name"}, - {name: "Pkg", tag: "Ident", valueIndex: "strings | package path"}, - - {name: "IndexExpr", tag: "IndexExpr", args: "x expr"}, - - {name: "IndexListExpr", tag: "IndexListExpr", args: "x exprs..."}, - - {name: "SliceExpr", tag: "SliceExpr", args: "x"}, - {name: "SliceFromExpr", tag: "SliceExpr", args: "x from", example: "x[from:]"}, - {name: "SliceToExpr", tag: "SliceExpr", args: "x to", example: "x[:to]"}, - {name: "SliceFromToExpr", tag: "SliceExpr", args: "x from to", example: "x[from:to]"}, - {name: "SliceToCapExpr", tag: "SliceExpr", args: "x from cap", example: "x[:from:cap]"}, - {name: "SliceFromToCapExpr", tag: "SliceExpr", args: "x from to cap", example: "x[from:to:cap]"}, - - {name: "FuncLit", tag: "FuncLit", args: "type block"}, - - {name: "CompositeLit", tag: "CompositeLit", args: "elts...", example: "{elts...}"}, - {name: "TypedCompositeLit", tag: "CompositeLit", args: "typ elts...", example: "typ{elts...}"}, - - {name: "SimpleSelectorExpr", tag: "SelectorExpr", args: "x", valueIndex: "strings | selector name"}, - {name: "SelectorExpr", tag: "SelectorExpr", args: "x sel"}, - {name: "TypeAssertExpr", tag: "TypeAssertExpr", args: "x typ"}, - {name: "TypeSwitchAssertExpr", tag: "TypeAssertExpr", args: "x"}, - - {name: "StructType", tag: "StructType", args: "fields"}, - {name: "InterfaceType", tag: "InterfaceType", args: "fields"}, - {name: "EfaceType", tag: "InterfaceType"}, - {name: "VoidFuncType", tag: "FuncType", args: "params"}, - {name: "GenericVoidFuncType", tag: "FuncType", args: "typeparams params"}, - {name: "FuncType", tag: "FuncType", args: "params results"}, - {name: "GenericFuncType", tag: "FuncType", args: "typeparams params results"}, - {name: "ArrayType", tag: "ArrayType", args: "length elem"}, - {name: "SliceType", tag: "ArrayType", args: "elem"}, - {name: "MapType", tag: "MapType", args: "key value"}, - {name: "ChanType", tag: "ChanType", args: "value", value: "ast.ChanDir | channel direction"}, - {name: "KeyValueExpr", tag: "KeyValueExpr", args: "key value"}, - - {name: "Ellipsis", tag: "Ellipsis"}, - {name: "TypedEllipsis", tag: "Ellipsis", args: "type"}, - - {name: "StarExpr", tag: "StarExpr", args: "x"}, - {name: "UnaryExpr", tag: "UnaryExpr", args: "x", value: "token.Token | unary operator"}, - {name: "BinaryExpr", tag: "BinaryExpr", args: "x y", value: "token.Token | binary operator"}, - {name: "ParenExpr", tag: "ParenExpr", args: "x"}, - - { - name: "ArgList", - args: "exprs...", - example: "1, 2, 3", - }, - { - name: "SimpleArgList", - note: "Like ArgList, but pattern contains no $*", - args: "exprs[]", - value: "int | slice len", - example: "1, 2, 3", - }, - - {name: "VariadicCallExpr", tag: "CallExpr", args: "fn args", example: "f(1, xs...)"}, - {name: "NonVariadicCallExpr", tag: "CallExpr", args: "fn args", example: "f(1, xs)"}, - {name: "MaybeVariadicCallExpr", tag: "CallExpr", args: "fn args", value: "int | can be variadic if len(args)>value", example: "f(1, xs) or f(1, xs...)"}, - {name: "CallExpr", tag: "CallExpr", args: "fn args", example: "f(1, xs) or f(1, xs...)"}, - - {name: "AssignStmt", tag: "AssignStmt", args: "lhs rhs", value: "token.Token | ':=' or '='", example: "lhs := rhs()"}, - {name: "MultiAssignStmt", tag: "AssignStmt", args: "lhs... rhs...", value: "token.Token | ':=' or '='", example: "lhs1, lhs2 := rhs()"}, - - {name: "BranchStmt", tag: "BranchStmt", args: "x", value: "token.Token | branch kind"}, - {name: "SimpleLabeledBranchStmt", tag: "BranchStmt", args: "x", valueIndex: "strings | label name", value: "token.Token | branch kind"}, - {name: "LabeledBranchStmt", tag: "BranchStmt", args: "label x", value: "token.Token | branch kind"}, - {name: "SimpleLabeledStmt", tag: "LabeledStmt", args: "x", valueIndex: "strings | label name"}, - {name: "LabeledStmt", tag: "LabeledStmt", args: "label x"}, - - {name: "BlockStmt", tag: "BlockStmt", args: "body..."}, - {name: "ExprStmt", tag: "ExprStmt", args: "x"}, - - {name: "GoStmt", tag: "GoStmt", args: "x"}, - {name: "DeferStmt", tag: "DeferStmt", args: "x"}, - - {name: "SendStmt", tag: "SendStmt", args: "ch value"}, - - {name: "EmptyStmt", tag: "EmptyStmt"}, - {name: "IncDecStmt", tag: "IncDecStmt", args: "x", value: "token.Token | '++' or '--'"}, - {name: "ReturnStmt", tag: "ReturnStmt", args: "results..."}, - - {name: "IfStmt", tag: "IfStmt", args: "cond block", example: "if cond {}"}, - {name: "IfInitStmt", tag: "IfStmt", args: "init cond block", example: "if init; cond {}"}, - {name: "IfElseStmt", tag: "IfStmt", args: "cond block else", example: "if cond {} else ..."}, - {name: "IfInitElseStmt", tag: "IfStmt", args: "init cond block else", example: "if init; cond {} else ..."}, - {name: "IfNamedOptStmt", tag: "IfStmt", args: "block", valueIndex: "strings | wildcard name", example: "if $*x {}"}, - {name: "IfNamedOptElseStmt", tag: "IfStmt", args: "block else", valueIndex: "strings | wildcard name", example: "if $*x {} else ..."}, - - {name: "SwitchStmt", tag: "SwitchStmt", args: "body...", example: "switch {}"}, - {name: "SwitchTagStmt", tag: "SwitchStmt", args: "tag body...", example: "switch tag {}"}, - {name: "SwitchInitStmt", tag: "SwitchStmt", args: "init body...", example: "switch init; {}"}, - {name: "SwitchInitTagStmt", tag: "SwitchStmt", args: "init tag body...", example: "switch init; tag {}"}, - - {name: "SelectStmt", tag: "SelectStmt", args: "body..."}, - - {name: "TypeSwitchStmt", tag: "TypeSwitchStmt", args: "x block", example: "switch x.(type) {}"}, - {name: "TypeSwitchInitStmt", tag: "TypeSwitchStmt", args: "init x block", example: "switch init; x.(type) {}"}, - - {name: "CaseClause", tag: "CaseClause", args: "values... body..."}, - {name: "DefaultCaseClause", tag: "CaseClause", args: "body..."}, - - {name: "CommClause", tag: "CommClause", args: "comm body..."}, - {name: "DefaultCommClause", tag: "CommClause", args: "body..."}, - - {name: "ForStmt", tag: "ForStmt", args: "blocl", example: "for {}"}, - {name: "ForPostStmt", tag: "ForStmt", args: "post block", example: "for ; ; post {}"}, - {name: "ForCondStmt", tag: "ForStmt", args: "cond block", example: "for ; cond; {}"}, - {name: "ForCondPostStmt", tag: "ForStmt", args: "cond post block", example: "for ; cond; post {}"}, - {name: "ForInitStmt", tag: "ForStmt", args: "init block", example: "for init; ; {}"}, - {name: "ForInitPostStmt", tag: "ForStmt", args: "init post block", example: "for init; ; post {}"}, - {name: "ForInitCondStmt", tag: "ForStmt", args: "init cond block", example: "for init; cond; {}"}, - {name: "ForInitCondPostStmt", tag: "ForStmt", args: "init cond post block", example: "for init; cond; post {}"}, - - {name: "RangeStmt", tag: "RangeStmt", args: "x block", example: "for range x {}"}, - {name: "RangeKeyStmt", tag: "RangeStmt", args: "key x block", value: "token.Token | ':=' or '='", example: "for key := range x {}"}, - {name: "RangeKeyValueStmt", tag: "RangeStmt", args: "key value x block", value: "token.Token | ':=' or '='", example: "for key, value := range x {}"}, - - {name: "RangeClause", tag: "RangeStmt", args: "x", example: "range x"}, - {name: "RangeHeader", tag: "RangeStmt", args: "x", example: "for range x"}, - {name: "RangeKeyHeader", tag: "RangeStmt", args: "key x", value: "token.Token | ':=' or '='", example: "for key := range x"}, - {name: "RangeKeyValueHeader", tag: "RangeStmt", args: "key value x", value: "token.Token | ':=' or '='", example: "for key, value := range x"}, - - {name: "FieldList", args: "fields..."}, - {name: "UnnamedField", args: "typ", example: "type"}, - {name: "SimpleField", args: "typ", valueIndex: "strings | field name", example: "name type"}, - {name: "Field", args: "name typ", example: "$name type"}, - {name: "MultiField", args: "names... typ", example: "name1, name2 type"}, - - {name: "ValueSpec", tag: "ValueSpec", args: "value"}, - {name: "ValueInitSpec", tag: "ValueSpec", args: "lhs... rhs...", example: "lhs = rhs"}, - {name: "TypedValueInitSpec", tag: "ValueSpec", args: "lhs... type rhs...", example: "lhs typ = rhs"}, - {name: "TypedValueSpec", tag: "ValueSpec", args: "lhs... type", example: "lhs typ"}, - - {name: "SimpleTypeSpec", tag: "TypeSpec", args: "type", valueIndex: "strings | type name", example: "name type"}, - {name: "TypeSpec", tag: "TypeSpec", args: "name type", example: "name type"}, - {name: "GenericTypeSpec", tag: "TypeSpec", args: "name typeparasm type", example: "name[typeparams] type"}, - {name: "TypeAliasSpec", tag: "TypeSpec", args: "name type", example: "name = type"}, - - {name: "SimpleFuncDecl", tag: "FuncDecl", args: "type block", valueIndex: "strings | field name"}, - {name: "FuncDecl", tag: "FuncDecl", args: "name type block"}, - {name: "MethodDecl", tag: "FuncDecl", args: "recv name type block"}, - {name: "FuncProtoDecl", tag: "FuncDecl", args: "name type"}, - {name: "MethodProtoDecl", tag: "FuncDecl", args: "recv name type"}, - - {name: "DeclStmt", tag: "DeclStmt", args: "decl"}, - {name: "ConstDecl", tag: "GenDecl", args: "valuespecs..."}, - {name: "VarDecl", tag: "GenDecl", args: "valuespecs..."}, - {name: "TypeDecl", tag: "GenDecl", args: "typespecs..."}, - - {name: "AnyImportDecl", tag: "GenDecl"}, - {name: "ImportDecl", tag: "GenDecl", args: "importspecs..."}, - - {name: "EmptyPackage", tag: "File", args: "name"}, -} - -type operationProto struct { - name string - value string - valueIndex string - tag string - example string - args string - note string -} - -type operationInfo struct { - Example string - Note string - Args string - Enum uint8 - TagName string - Name string - ValueDoc string - ValueIndexDoc string - ExtraValueKindName string - ValueKindName string - VariadicMap uint64 - NumArgs int - SliceIndex int -} - -const stackUnchanged = "" - -var fileTemplate = template.Must(template.New("operations.go").Parse(`// Code generated "gen_operations.go"; DO NOT EDIT. - -package gogrep - -import ( - "github.com/quasilyte/gogrep/nodetag" -) - -//go:generate stringer -type=operation -trimprefix=op -type operation uint8 - -const ( - opInvalid operation = 0 -{{ range .Operations }} - // Tag: {{.TagName}} - {{- if .Note}}{{print "\n"}}// {{.Note}}{{end}} - {{- if .Args}}{{print "\n"}}// Args: {{.Args}}{{end}} - {{- if .Example}}{{print "\n"}}// Example: {{.Example}}{{end}} - {{- if .ValueDoc}}{{print "\n"}}// Value: {{.ValueDoc}}{{end}} - {{- if .ValueIndexDoc}}{{print "\n"}}// ValueIndex: {{.ValueIndexDoc}}{{end}} - op{{ .Name }} operation = {{.Enum}} -{{ end -}} -) - -type operationInfo struct { - Tag nodetag.Value - NumArgs int - ValueKind valueKind - ExtraValueKind valueKind - VariadicMap bitmap64 - SliceIndex int -} - -var operationInfoTable = [256]operationInfo{ - opInvalid: {}, - -{{ range .Operations -}} - op{{.Name}}: { - Tag: nodetag.{{.TagName}}, - NumArgs: {{.NumArgs}}, - ValueKind: {{.ValueKindName}}, - ExtraValueKind: {{.ExtraValueKindName}}, - VariadicMap: {{.VariadicMap}}, // {{printf "%b" .VariadicMap}} - SliceIndex: {{.SliceIndex}}, - }, -{{ end }} -} -`)) - -func main() { - operations := make([]operationInfo, len(opPrototypes)) - for i, proto := range opPrototypes { - enum := uint8(i + 1) - - tagName := proto.tag - if tagName == "" { - tagName = "Unknown" - } - - variadicMap := uint64(0) - numArgs := 0 - sliceLenIndex := -1 - if proto.args != "" { - args := strings.Split(proto.args, " ") - numArgs = len(args) - for i, arg := range args { - isVariadic := strings.HasSuffix(arg, "...") - if isVariadic { - variadicMap |= 1 << i - } - if strings.HasSuffix(arg, "[]") { - sliceLenIndex = i - } - } - } - - extraValueKindName := "emptyValue" - if proto.valueIndex != "" { - parts := strings.Split(proto.valueIndex, " | ") - typ := parts[0] - switch typ { - case "strings": - extraValueKindName = "stringValue" - case "ifaces": - extraValueKindName = "ifaceValue" - default: - panic(fmt.Sprintf("%s: unexpected %s type", proto.name, typ)) - } - } - valueKindName := "emptyValue" - if proto.value != "" { - parts := strings.Split(proto.value, " | ") - typ := parts[0] - switch typ { - case "token.Token": - valueKindName = "tokenValue" - case "ast.ChanDir": - valueKindName = "chandirValue" - case "int": - valueKindName = "intValue" - default: - panic(fmt.Sprintf("%s: unexpected %s type", proto.name, typ)) - } - } - - operations[i] = operationInfo{ - Example: proto.example, - Note: proto.note, - Args: proto.args, - Enum: enum, - TagName: tagName, - Name: proto.name, - ValueDoc: proto.value, - ValueIndexDoc: proto.valueIndex, - NumArgs: numArgs, - VariadicMap: variadicMap, - ExtraValueKindName: extraValueKindName, - ValueKindName: valueKindName, - SliceIndex: sliceLenIndex, - } - } - - var buf bytes.Buffer - err := fileTemplate.Execute(&buf, map[string]interface{}{ - "Operations": operations, - }) - if err != nil { - log.Panicf("execute template: %v", err) - } - writeFile("operations.gen.go", buf.Bytes()) -} - -func writeFile(filename string, data []byte) { - pretty, err := format.Source(data) - if err != nil { - log.Panicf("gofmt: %v", err) - } - if err := ioutil.WriteFile(filename, pretty, 0666); err != nil { - log.Panicf("write %s: %v", filename, err) - } -} diff --git a/vendor/github.com/quasilyte/gogrep/gogrep.go b/vendor/github.com/quasilyte/gogrep/gogrep.go deleted file mode 100644 index 47a03f9b4..000000000 --- a/vendor/github.com/quasilyte/gogrep/gogrep.go +++ /dev/null @@ -1,187 +0,0 @@ -package gogrep - -import ( - "errors" - "go/ast" - "go/token" - "go/types" - "strings" - - "github.com/quasilyte/gogrep/nodetag" -) - -func IsEmptyNodeSlice(n ast.Node) bool { - if list, ok := n.(*NodeSlice); ok { - return list.Len() == 0 - } - return false -} - -// MatchData describes a successful pattern match. -type MatchData struct { - Node ast.Node - Capture []CapturedNode -} - -type CapturedNode struct { - Name string - Node ast.Node -} - -func (data MatchData) CapturedByName(name string) (ast.Node, bool) { - if name == "$$" { - return data.Node, true - } - return findNamed(data.Capture, name) -} - -type PartialNode struct { - X ast.Node - - from token.Pos - to token.Pos -} - -func (p *PartialNode) Pos() token.Pos { return p.from } -func (p *PartialNode) End() token.Pos { return p.to } - -type MatcherState struct { - Types *types.Info - - // CapturePreset is a key-value pairs to use in the next match calls - // as predefined variables. - // For example, if the pattern is `$x = f()` and CapturePreset contains - // a pair with Name=x and value of `obj.x`, then the above mentioned - // pattern will only match `obj.x = f()` statements. - // - // If nil, the default behavior will be used. A first syntax element - // matching the matcher var will be captured. - CapturePreset []CapturedNode - - // node values recorded by name, excluding "_" (used only by the - // actual matching phase) - capture []CapturedNode - - nodeSlices []NodeSlice - nodeSlicesUsed int - - pc int - - partial PartialNode -} - -func NewMatcherState() MatcherState { - return MatcherState{ - capture: make([]CapturedNode, 0, 8), - nodeSlices: make([]NodeSlice, 16), - } -} - -type Pattern struct { - m *matcher -} - -type PatternInfo struct { - Vars map[string]struct{} -} - -func (p *Pattern) NodeTag() nodetag.Value { - return operationInfoTable[p.m.prog.insts[0].op].Tag -} - -// MatchNode calls cb if n matches a pattern. -func (p *Pattern) MatchNode(state *MatcherState, n ast.Node, cb func(MatchData)) { - p.m.MatchNode(state, n, cb) -} - -// Clone creates a pattern copy. -func (p *Pattern) Clone() *Pattern { - clone := *p - clone.m = &matcher{} - *clone.m = *p.m - return &clone -} - -type CompileConfig struct { - Fset *token.FileSet - - // Src is a gogrep pattern expression string. - Src string - - // When strict is false, gogrep may consider 0xA and 10 to be identical. - // If true, a compiled pattern will require a full syntax match. - Strict bool - - // WithTypes controls whether gogrep would have types.Info during the pattern execution. - // If set to true, it will compile a pattern to a potentially more precise form, where - // fmt.Printf maps to the stdlib function call but not Printf method call on some - // random fmt variable. - WithTypes bool - - // Imports specifies packages that should be recognized for the type-aware matching. - // It maps a package name to a package path. - // Only used if WithTypes is true. - Imports map[string]string -} - -func Compile(config CompileConfig) (*Pattern, PatternInfo, error) { - if strings.HasPrefix(config.Src, "import $") { - return compileImportPattern(config) - } - info := newPatternInfo() - n, err := parseExpr(config.Fset, config.Src) - if err != nil { - return nil, info, err - } - if n == nil { - return nil, info, errors.New("invalid pattern syntax") - } - var c compiler - c.config = config - prog, err := c.Compile(n, &info) - if err != nil { - return nil, info, err - } - m := newMatcher(prog) - return &Pattern{m: m}, info, nil -} - -func Walk(root ast.Node, fn func(n ast.Node) bool) { - if root, ok := root.(*NodeSlice); ok { - switch root.Kind { - case ExprNodeSlice: - for _, e := range root.exprSlice { - ast.Inspect(e, fn) - } - case StmtNodeSlice: - for _, e := range root.stmtSlice { - ast.Inspect(e, fn) - } - case FieldNodeSlice: - for _, e := range root.fieldSlice { - ast.Inspect(e, fn) - } - case IdentNodeSlice: - for _, e := range root.identSlice { - ast.Inspect(e, fn) - } - case SpecNodeSlice: - for _, e := range root.specSlice { - ast.Inspect(e, fn) - } - default: - for _, e := range root.declSlice { - ast.Inspect(e, fn) - } - } - return - } - - ast.Inspect(root, fn) -} - -func newPatternInfo() PatternInfo { - return PatternInfo{ - Vars: make(map[string]struct{}), - } -} diff --git a/vendor/github.com/quasilyte/gogrep/instructions.go b/vendor/github.com/quasilyte/gogrep/instructions.go deleted file mode 100644 index 9f4f72d88..000000000 --- a/vendor/github.com/quasilyte/gogrep/instructions.go +++ /dev/null @@ -1,116 +0,0 @@ -package gogrep - -import ( - "fmt" - "go/ast" - "go/token" - "strings" -) - -type bitmap64 uint64 - -func (m bitmap64) IsSet(pos int) bool { - return m&(1<= sliceLen { - break - } - } -} - -func (m *matcher) matchNamed(state *MatcherState, name string, n ast.Node) bool { - prev, ok := findNamed(state.capture, name) - if !ok { - // First occurrence, record value. - state.capture = append(state.capture, CapturedNode{Name: name, Node: n}) - return true - } - - return equalNodes(prev, n) -} - -func (m *matcher) matchNamedField(state *MatcherState, name string, n ast.Node) bool { - prev, ok := findNamed(state.capture, name) - if !ok { - // First occurrence, record value. - unwrapped := m.unwrapNode(n) - state.capture = append(state.capture, CapturedNode{Name: name, Node: unwrapped}) - return true - } - n = m.unwrapNode(n) - return equalNodes(prev, n) -} - -func (m *matcher) unwrapNode(x ast.Node) ast.Node { - switch x := x.(type) { - case *ast.Field: - if len(x.Names) == 0 { - return x.Type - } - case *ast.FieldList: - if x != nil && len(x.List) == 1 && len(x.List[0].Names) == 0 { - return x.List[0].Type - } - } - return x -} - -func (m *matcher) matchNodeWithInst(state *MatcherState, inst instruction, n ast.Node) bool { - switch inst.op { - case opNode: - return n != nil - case opOptNode: - return true - - case opNamedNode: - return n != nil && m.matchNamed(state, m.stringValue(inst), n) - case opNamedOptNode: - return m.matchNamed(state, m.stringValue(inst), n) - - case opFieldNode: - n, ok := n.(*ast.FieldList) - return ok && n != nil && len(n.List) == 1 && len(n.List[0].Names) == 0 - case opNamedFieldNode: - return n != nil && m.matchNamedField(state, m.stringValue(inst), n) - - case opBasicLit: - n, ok := n.(*ast.BasicLit) - return ok && m.ifaceValue(inst) == literalValue(n) - - case opStrictIntLit: - n, ok := n.(*ast.BasicLit) - return ok && n.Kind == token.INT && m.stringValue(inst) == n.Value - case opStrictFloatLit: - n, ok := n.(*ast.BasicLit) - return ok && n.Kind == token.FLOAT && m.stringValue(inst) == n.Value - case opStrictCharLit: - n, ok := n.(*ast.BasicLit) - return ok && n.Kind == token.CHAR && m.stringValue(inst) == n.Value - case opStrictStringLit: - n, ok := n.(*ast.BasicLit) - return ok && n.Kind == token.STRING && m.stringValue(inst) == n.Value - case opStrictComplexLit: - n, ok := n.(*ast.BasicLit) - return ok && n.Kind == token.IMAG && m.stringValue(inst) == n.Value - - case opIdent: - n, ok := n.(*ast.Ident) - return ok && m.stringValue(inst) == n.Name - - case opPkg: - n, ok := n.(*ast.Ident) - if !ok { - return false - } - obj := state.Types.ObjectOf(n) - if obj == nil { - return false - } - pkgName, ok := obj.(*types.PkgName) - return ok && pkgName.Imported().Path() == m.stringValue(inst) - - case opBinaryExpr: - n, ok := n.(*ast.BinaryExpr) - return ok && n.Op == token.Token(inst.value) && - m.matchNode(state, n.X) && m.matchNode(state, n.Y) - - case opUnaryExpr: - n, ok := n.(*ast.UnaryExpr) - return ok && n.Op == token.Token(inst.value) && m.matchNode(state, n.X) - - case opStarExpr: - n, ok := n.(*ast.StarExpr) - return ok && m.matchNode(state, n.X) - - case opVariadicCallExpr: - n, ok := n.(*ast.CallExpr) - return ok && n.Ellipsis.IsValid() && m.matchNode(state, n.Fun) && m.matchArgList(state, n.Args) - case opNonVariadicCallExpr: - n, ok := n.(*ast.CallExpr) - return ok && !n.Ellipsis.IsValid() && m.matchNode(state, n.Fun) && m.matchArgList(state, n.Args) - case opMaybeVariadicCallExpr: - n, ok := n.(*ast.CallExpr) - if !ok { - return false - } - if n.Ellipsis.IsValid() && len(n.Args) <= int(inst.value) { - return false - } - return m.matchNode(state, n.Fun) && m.matchArgList(state, n.Args) - case opCallExpr: - n, ok := n.(*ast.CallExpr) - return ok && m.matchNode(state, n.Fun) && m.matchArgList(state, n.Args) - - case opSimpleSelectorExpr: - n, ok := n.(*ast.SelectorExpr) - return ok && m.stringValue(inst) == n.Sel.Name && m.matchNode(state, n.X) - case opSelectorExpr: - n, ok := n.(*ast.SelectorExpr) - return ok && m.matchNode(state, n.Sel) && m.matchNode(state, n.X) - - case opTypeAssertExpr: - n, ok := n.(*ast.TypeAssertExpr) - return ok && m.matchNode(state, n.X) && m.matchNode(state, n.Type) - case opTypeSwitchAssertExpr: - n, ok := n.(*ast.TypeAssertExpr) - return ok && n.Type == nil && m.matchNode(state, n.X) - - case opSliceExpr: - n, ok := n.(*ast.SliceExpr) - return ok && n.Low == nil && n.High == nil && m.matchNode(state, n.X) - case opSliceFromExpr: - n, ok := n.(*ast.SliceExpr) - return ok && n.High == nil && !n.Slice3 && - m.matchNode(state, n.X) && m.matchNode(state, n.Low) - case opSliceToExpr: - n, ok := n.(*ast.SliceExpr) - return ok && n.Low == nil && !n.Slice3 && - m.matchNode(state, n.X) && m.matchNode(state, n.High) - case opSliceFromToExpr: - n, ok := n.(*ast.SliceExpr) - return ok && !n.Slice3 && - m.matchNode(state, n.X) && m.matchNode(state, n.Low) && m.matchNode(state, n.High) - case opSliceToCapExpr: - n, ok := n.(*ast.SliceExpr) - return ok && n.Low == nil && - m.matchNode(state, n.X) && m.matchNode(state, n.High) && m.matchNode(state, n.Max) - case opSliceFromToCapExpr: - n, ok := n.(*ast.SliceExpr) - return ok && m.matchNode(state, n.X) && m.matchNode(state, n.Low) && m.matchNode(state, n.High) && m.matchNode(state, n.Max) - - case opIndexExpr: - n, ok := n.(*ast.IndexExpr) - return ok && m.matchNode(state, n.X) && m.matchNode(state, n.Index) - - case opIndexListExpr: - n, ok := n.(*typeparams.IndexListExpr) - return ok && m.matchNode(state, n.X) && m.matchExprSlice(state, n.Indices) - - case opKeyValueExpr: - n, ok := n.(*ast.KeyValueExpr) - return ok && m.matchNode(state, n.Key) && m.matchNode(state, n.Value) - - case opParenExpr: - n, ok := n.(*ast.ParenExpr) - return ok && m.matchNode(state, n.X) - - case opEllipsis: - n, ok := n.(*ast.Ellipsis) - return ok && n.Elt == nil - case opTypedEllipsis: - n, ok := n.(*ast.Ellipsis) - return ok && n.Elt != nil && m.matchNode(state, n.Elt) - - case opSliceType: - n, ok := n.(*ast.ArrayType) - return ok && n.Len == nil && m.matchNode(state, n.Elt) - case opArrayType: - n, ok := n.(*ast.ArrayType) - return ok && n.Len != nil && m.matchNode(state, n.Len) && m.matchNode(state, n.Elt) - case opMapType: - n, ok := n.(*ast.MapType) - return ok && m.matchNode(state, n.Key) && m.matchNode(state, n.Value) - case opChanType: - n, ok := n.(*ast.ChanType) - return ok && ast.ChanDir(inst.value) == n.Dir && m.matchNode(state, n.Value) - case opVoidFuncType: - n, ok := n.(*ast.FuncType) - return ok && n.Results == nil && m.matchNode(state, n.Params) - case opGenericVoidFuncType: - n, ok := n.(*ast.FuncType) - return ok && n.Results == nil && m.matchNode(state, typeparams.ForFuncType(n)) && m.matchNode(state, n.Params) - case opFuncType: - n, ok := n.(*ast.FuncType) - return ok && m.matchNode(state, n.Params) && m.matchNode(state, n.Results) - case opGenericFuncType: - n, ok := n.(*ast.FuncType) - return ok && m.matchNode(state, typeparams.ForFuncType(n)) && m.matchNode(state, n.Params) && m.matchNode(state, n.Results) - case opStructType: - n, ok := n.(*ast.StructType) - return ok && m.matchNode(state, n.Fields) - case opInterfaceType: - n, ok := n.(*ast.InterfaceType) - return ok && m.matchNode(state, n.Methods) - case opEfaceType: - switch n := n.(type) { - case *ast.InterfaceType: - return len(n.Methods.List) == 0 - case *ast.Ident: - return n.Name == "any" - default: - return false - } - - case opCompositeLit: - n, ok := n.(*ast.CompositeLit) - return ok && n.Type == nil && m.matchExprSlice(state, n.Elts) - case opTypedCompositeLit: - n, ok := n.(*ast.CompositeLit) - return ok && n.Type != nil && m.matchNode(state, n.Type) && m.matchExprSlice(state, n.Elts) - - case opUnnamedField: - n, ok := n.(*ast.Field) - return ok && len(n.Names) == 0 && m.matchNode(state, n.Type) - case opSimpleField: - n, ok := n.(*ast.Field) - return ok && len(n.Names) == 1 && m.stringValue(inst) == n.Names[0].Name && m.matchNode(state, n.Type) - case opField: - n, ok := n.(*ast.Field) - return ok && len(n.Names) == 1 && m.matchNode(state, n.Names[0]) && m.matchNode(state, n.Type) - case opMultiField: - n, ok := n.(*ast.Field) - return ok && len(n.Names) >= 2 && m.matchIdentSlice(state, n.Names) && m.matchNode(state, n.Type) - case opFieldList: - // FieldList could be nil in places like function return types. - n, ok := n.(*ast.FieldList) - return ok && n != nil && m.matchFieldSlice(state, n.List) - - case opFuncLit: - n, ok := n.(*ast.FuncLit) - return ok && m.matchNode(state, n.Type) && m.matchNode(state, n.Body) - - case opAssignStmt: - n, ok := n.(*ast.AssignStmt) - return ok && token.Token(inst.value) == n.Tok && - len(n.Lhs) == 1 && m.matchNode(state, n.Lhs[0]) && - len(n.Rhs) == 1 && m.matchNode(state, n.Rhs[0]) - case opMultiAssignStmt: - n, ok := n.(*ast.AssignStmt) - return ok && token.Token(inst.value) == n.Tok && - m.matchExprSlice(state, n.Lhs) && m.matchExprSlice(state, n.Rhs) - - case opExprStmt: - n, ok := n.(*ast.ExprStmt) - return ok && m.matchNode(state, n.X) - - case opGoStmt: - n, ok := n.(*ast.GoStmt) - return ok && m.matchNode(state, n.Call) - case opDeferStmt: - n, ok := n.(*ast.DeferStmt) - return ok && m.matchNode(state, n.Call) - case opSendStmt: - n, ok := n.(*ast.SendStmt) - return ok && m.matchNode(state, n.Chan) && m.matchNode(state, n.Value) - - case opBlockStmt: - n, ok := n.(*ast.BlockStmt) - return ok && m.matchStmtSlice(state, n.List) - - case opIfStmt: - n, ok := n.(*ast.IfStmt) - return ok && n.Init == nil && n.Else == nil && - m.matchNode(state, n.Cond) && m.matchNode(state, n.Body) - case opIfElseStmt: - n, ok := n.(*ast.IfStmt) - return ok && n.Init == nil && n.Else != nil && - m.matchNode(state, n.Cond) && m.matchNode(state, n.Body) && m.matchNode(state, n.Else) - case opIfInitStmt: - n, ok := n.(*ast.IfStmt) - return ok && n.Else == nil && - m.matchNode(state, n.Init) && m.matchNode(state, n.Cond) && m.matchNode(state, n.Body) - case opIfInitElseStmt: - n, ok := n.(*ast.IfStmt) - return ok && n.Else != nil && - m.matchNode(state, n.Init) && m.matchNode(state, n.Cond) && m.matchNode(state, n.Body) && m.matchNode(state, n.Else) - - case opIfNamedOptStmt: - n, ok := n.(*ast.IfStmt) - return ok && n.Else == nil && m.matchNode(state, n.Body) && - m.matchNamed(state, m.stringValue(inst), m.toStmtSlice(state, n.Cond, n.Init)) - case opIfNamedOptElseStmt: - n, ok := n.(*ast.IfStmt) - return ok && n.Else != nil && m.matchNode(state, n.Body) && m.matchNode(state, n.Else) && - m.matchNamed(state, m.stringValue(inst), m.toStmtSlice(state, n.Cond, n.Init)) - - case opCaseClause: - n, ok := n.(*ast.CaseClause) - return ok && n.List != nil && m.matchExprSlice(state, n.List) && m.matchStmtSlice(state, n.Body) - case opDefaultCaseClause: - n, ok := n.(*ast.CaseClause) - return ok && n.List == nil && m.matchStmtSlice(state, n.Body) - - case opSwitchStmt: - n, ok := n.(*ast.SwitchStmt) - return ok && n.Init == nil && n.Tag == nil && m.matchStmtSlice(state, n.Body.List) - case opSwitchTagStmt: - n, ok := n.(*ast.SwitchStmt) - return ok && n.Init == nil && m.matchNode(state, n.Tag) && m.matchStmtSlice(state, n.Body.List) - case opSwitchInitStmt: - n, ok := n.(*ast.SwitchStmt) - return ok && n.Tag == nil && m.matchNode(state, n.Init) && m.matchStmtSlice(state, n.Body.List) - case opSwitchInitTagStmt: - n, ok := n.(*ast.SwitchStmt) - return ok && m.matchNode(state, n.Init) && m.matchNode(state, n.Tag) && m.matchStmtSlice(state, n.Body.List) - - case opTypeSwitchStmt: - n, ok := n.(*ast.TypeSwitchStmt) - return ok && n.Init == nil && m.matchNode(state, n.Assign) && m.matchStmtSlice(state, n.Body.List) - case opTypeSwitchInitStmt: - n, ok := n.(*ast.TypeSwitchStmt) - return ok && m.matchNode(state, n.Init) && - m.matchNode(state, n.Assign) && m.matchStmtSlice(state, n.Body.List) - - case opCommClause: - n, ok := n.(*ast.CommClause) - return ok && n.Comm != nil && m.matchNode(state, n.Comm) && m.matchStmtSlice(state, n.Body) - case opDefaultCommClause: - n, ok := n.(*ast.CommClause) - return ok && n.Comm == nil && m.matchStmtSlice(state, n.Body) - - case opSelectStmt: - n, ok := n.(*ast.SelectStmt) - return ok && m.matchStmtSlice(state, n.Body.List) - - case opRangeStmt: - n, ok := n.(*ast.RangeStmt) - return ok && n.Key == nil && n.Value == nil && m.matchNode(state, n.X) && m.matchNode(state, n.Body) - case opRangeKeyStmt: - n, ok := n.(*ast.RangeStmt) - return ok && n.Key != nil && n.Value == nil && token.Token(inst.value) == n.Tok && - m.matchNode(state, n.Key) && m.matchNode(state, n.X) && m.matchNode(state, n.Body) - case opRangeKeyValueStmt: - n, ok := n.(*ast.RangeStmt) - return ok && n.Key != nil && n.Value != nil && token.Token(inst.value) == n.Tok && - m.matchNode(state, n.Key) && m.matchNode(state, n.Value) && m.matchNode(state, n.X) && m.matchNode(state, n.Body) - - case opForStmt: - n, ok := n.(*ast.ForStmt) - return ok && n.Init == nil && n.Cond == nil && n.Post == nil && - m.matchNode(state, n.Body) - case opForPostStmt: - n, ok := n.(*ast.ForStmt) - return ok && n.Init == nil && n.Cond == nil && n.Post != nil && - m.matchNode(state, n.Post) && m.matchNode(state, n.Body) - case opForCondStmt: - n, ok := n.(*ast.ForStmt) - return ok && n.Init == nil && n.Cond != nil && n.Post == nil && - m.matchNode(state, n.Cond) && m.matchNode(state, n.Body) - case opForCondPostStmt: - n, ok := n.(*ast.ForStmt) - return ok && n.Init == nil && n.Cond != nil && n.Post != nil && - m.matchNode(state, n.Cond) && m.matchNode(state, n.Post) && m.matchNode(state, n.Body) - case opForInitStmt: - n, ok := n.(*ast.ForStmt) - return ok && n.Init != nil && n.Cond == nil && n.Post == nil && - m.matchNode(state, n.Init) && m.matchNode(state, n.Body) - case opForInitPostStmt: - n, ok := n.(*ast.ForStmt) - return ok && n.Init != nil && n.Cond == nil && n.Post != nil && - m.matchNode(state, n.Init) && m.matchNode(state, n.Post) && m.matchNode(state, n.Body) - case opForInitCondStmt: - n, ok := n.(*ast.ForStmt) - return ok && n.Init != nil && n.Cond != nil && n.Post == nil && - m.matchNode(state, n.Init) && m.matchNode(state, n.Cond) && m.matchNode(state, n.Body) - case opForInitCondPostStmt: - n, ok := n.(*ast.ForStmt) - return ok && m.matchNode(state, n.Init) && m.matchNode(state, n.Cond) && m.matchNode(state, n.Post) && m.matchNode(state, n.Body) - - case opIncDecStmt: - n, ok := n.(*ast.IncDecStmt) - return ok && token.Token(inst.value) == n.Tok && m.matchNode(state, n.X) - - case opReturnStmt: - n, ok := n.(*ast.ReturnStmt) - return ok && m.matchExprSlice(state, n.Results) - - case opLabeledStmt: - n, ok := n.(*ast.LabeledStmt) - return ok && m.matchNode(state, n.Label) && m.matchNode(state, n.Stmt) - case opSimpleLabeledStmt: - n, ok := n.(*ast.LabeledStmt) - return ok && m.stringValue(inst) == n.Label.Name && m.matchNode(state, n.Stmt) - - case opLabeledBranchStmt: - n, ok := n.(*ast.BranchStmt) - return ok && n.Label != nil && token.Token(inst.value) == n.Tok && m.matchNode(state, n.Label) - case opSimpleLabeledBranchStmt: - n, ok := n.(*ast.BranchStmt) - return ok && n.Label != nil && m.stringValue(inst) == n.Label.Name && token.Token(inst.value) == n.Tok - case opBranchStmt: - n, ok := n.(*ast.BranchStmt) - return ok && n.Label == nil && token.Token(inst.value) == n.Tok - - case opEmptyStmt: - _, ok := n.(*ast.EmptyStmt) - return ok - - case opSimpleFuncDecl: - n, ok := n.(*ast.FuncDecl) - return ok && n.Recv == nil && n.Body != nil && typeparams.ForFuncType(n.Type) == nil && - n.Name.Name == m.stringValue(inst) && m.matchNode(state, n.Type) && m.matchNode(state, n.Body) - case opFuncDecl: - n, ok := n.(*ast.FuncDecl) - return ok && n.Recv == nil && n.Body != nil && - m.matchNode(state, n.Name) && m.matchNode(state, n.Type) && m.matchNode(state, n.Body) - case opFuncProtoDecl: - n, ok := n.(*ast.FuncDecl) - return ok && n.Recv == nil && n.Body == nil && typeparams.ForFuncType(n.Type) == nil && - m.matchNode(state, n.Name) && m.matchNode(state, n.Type) - case opMethodDecl: - n, ok := n.(*ast.FuncDecl) - return ok && n.Recv != nil && n.Body != nil && - m.matchNode(state, n.Recv) && m.matchNode(state, n.Name) && m.matchNode(state, n.Type) && m.matchNode(state, n.Body) - case opMethodProtoDecl: - n, ok := n.(*ast.FuncDecl) - return ok && n.Recv != nil && n.Body == nil && - m.matchNode(state, n.Recv) && m.matchNode(state, n.Name) && m.matchNode(state, n.Type) - - case opValueSpec: - n, ok := n.(*ast.ValueSpec) - return ok && len(n.Values) == 0 && n.Type == nil && - len(n.Names) == 1 && m.matchNode(state, n.Names[0]) - case opValueInitSpec: - n, ok := n.(*ast.ValueSpec) - return ok && len(n.Values) != 0 && n.Type == nil && - m.matchIdentSlice(state, n.Names) && m.matchExprSlice(state, n.Values) - case opTypedValueSpec: - n, ok := n.(*ast.ValueSpec) - return ok && len(n.Values) == 0 && n.Type != nil && - m.matchIdentSlice(state, n.Names) && m.matchNode(state, n.Type) - case opTypedValueInitSpec: - n, ok := n.(*ast.ValueSpec) - return ok && len(n.Values) != 0 && - m.matchIdentSlice(state, n.Names) && m.matchNode(state, n.Type) && m.matchExprSlice(state, n.Values) - - case opSimpleTypeSpec: - n, ok := n.(*ast.TypeSpec) - return ok && !n.Assign.IsValid() && typeparams.ForTypeSpec(n) == nil && n.Name.Name == m.stringValue(inst) && m.matchNode(state, n.Type) - case opTypeSpec: - n, ok := n.(*ast.TypeSpec) - return ok && !n.Assign.IsValid() && m.matchNode(state, n.Name) && m.matchNode(state, n.Type) - case opGenericTypeSpec: - n, ok := n.(*ast.TypeSpec) - return ok && !n.Assign.IsValid() && m.matchNode(state, n.Name) && m.matchNode(state, typeparams.ForTypeSpec(n)) && m.matchNode(state, n.Type) - case opTypeAliasSpec: - n, ok := n.(*ast.TypeSpec) - return ok && n.Assign.IsValid() && m.matchNode(state, n.Name) && m.matchNode(state, n.Type) - - case opDeclStmt: - n, ok := n.(*ast.DeclStmt) - return ok && m.matchNode(state, n.Decl) - - case opConstDecl: - n, ok := n.(*ast.GenDecl) - return ok && n.Tok == token.CONST && m.matchSpecSlice(state, n.Specs) - case opVarDecl: - n, ok := n.(*ast.GenDecl) - return ok && n.Tok == token.VAR && m.matchSpecSlice(state, n.Specs) - case opTypeDecl: - n, ok := n.(*ast.GenDecl) - return ok && n.Tok == token.TYPE && m.matchSpecSlice(state, n.Specs) - case opAnyImportDecl: - n, ok := n.(*ast.GenDecl) - return ok && n.Tok == token.IMPORT - case opImportDecl: - n, ok := n.(*ast.GenDecl) - return ok && n.Tok == token.IMPORT && m.matchSpecSlice(state, n.Specs) - - case opEmptyPackage: - n, ok := n.(*ast.File) - return ok && len(n.Imports) == 0 && len(n.Decls) == 0 && m.matchNode(state, n.Name) - - default: - panic(fmt.Sprintf("unexpected op %s", inst.op)) - } -} - -func (m *matcher) matchNode(state *MatcherState, n ast.Node) bool { - return m.matchNodeWithInst(state, m.nextInst(state), n) -} - -func (m *matcher) matchArgList(state *MatcherState, exprs []ast.Expr) bool { - inst := m.nextInst(state) - if inst.op != opSimpleArgList { - return m.matchExprSlice(state, exprs) - } - if len(exprs) != int(inst.value) { - return false - } - for _, x := range exprs { - if !m.matchNode(state, x) { - return false - } - } - return true -} - -func (m *matcher) matchStmtSlice(state *MatcherState, stmts []ast.Stmt) bool { - slice := m.allocNodeSlice(state) - slice.assignStmtSlice(stmts) - matched, _ := m.matchNodeList(state, slice, false) - return matched != nil -} - -func (m *matcher) matchExprSlice(state *MatcherState, exprs []ast.Expr) bool { - slice := m.allocNodeSlice(state) - slice.assignExprSlice(exprs) - matched, _ := m.matchNodeList(state, slice, false) - return matched != nil -} - -func (m *matcher) matchFieldSlice(state *MatcherState, fields []*ast.Field) bool { - slice := m.allocNodeSlice(state) - slice.assignFieldSlice(fields) - matched, _ := m.matchNodeList(state, slice, false) - return matched != nil -} - -func (m *matcher) matchIdentSlice(state *MatcherState, idents []*ast.Ident) bool { - slice := m.allocNodeSlice(state) - slice.assignIdentSlice(idents) - matched, _ := m.matchNodeList(state, slice, false) - return matched != nil -} - -func (m *matcher) matchSpecSlice(state *MatcherState, specs []ast.Spec) bool { - slice := m.allocNodeSlice(state) - slice.assignSpecSlice(specs) - matched, _ := m.matchNodeList(state, slice, false) - return matched != nil -} - -// matchNodeList matches two lists of nodes. It uses a common algorithm to match -// wildcard patterns with any number of nodes without recursion. -func (m *matcher) matchNodeList(state *MatcherState, nodes *NodeSlice, partial bool) (matched ast.Node, offset int) { - sliceLen := nodes.Len() - inst := m.nextInst(state) - if inst.op == opEnd { - if sliceLen == 0 { - return nodes, 0 - } - return nil, -1 - } - pcBase := state.pc - pcNext := 0 - j := 0 - jNext := 0 - partialStart, partialEnd := 0, sliceLen - - type restart struct { - matches []CapturedNode - pc int - j int - wildStart int - wildName string - } - // We need to stack these because otherwise some edge cases - // would not match properly. Since we have various kinds of - // wildcards (nodes containing them, $_, and $*_), in some cases - // we may have to go back and do multiple restarts to get to the - // right starting position. - var stack []restart - wildName := "" - wildStart := 0 - push := func(next int) { - if next > sliceLen { - return // would be discarded anyway - } - pcNext = state.pc - 1 - jNext = next - stack = append(stack, restart{state.capture, pcNext, next, wildStart, wildName}) - } - pop := func() { - j = jNext - state.pc = pcNext - state.capture = stack[len(stack)-1].matches - wildName = stack[len(stack)-1].wildName - wildStart = stack[len(stack)-1].wildStart - stack = stack[:len(stack)-1] - pcNext = 0 - jNext = 0 - if len(stack) != 0 { - pcNext = stack[len(stack)-1].pc - jNext = stack[len(stack)-1].j - } - } - - // wouldMatch returns whether the current wildcard - if any - - // matches the nodes we are currently trying it on. - wouldMatch := func() bool { - switch wildName { - case "", "_": - return true - } - slice := m.allocNodeSlice(state) - nodes.SliceInto(slice, wildStart, j) - return m.matchNamed(state, wildName, slice) - } - for ; inst.op != opEnd || j < sliceLen; inst = m.nextInst(state) { - if inst.op != opEnd { - if inst.op == opNodeSeq || inst.op == opNamedNodeSeq { - // keep track of where this wildcard - // started (if name == wildName, - // we're trying the same wildcard - // matching one more node) - name := "_" - if inst.op == opNamedNodeSeq { - name = m.stringValue(inst) - } - if name != wildName { - wildStart = j - wildName = name - } - // try to match zero or more at j, - // restarting at j+1 if it fails - push(j + 1) - continue - } - if partial && state.pc == pcBase { - // let "b; c" match "a; b; c" - // (simulates a $*_ at the beginning) - partialStart = j - push(j + 1) - } - if j < sliceLen && wouldMatch() && m.matchNodeWithInst(state, inst, nodes.At(j)) { - // ordinary match - wildName = "" - j++ - continue - } - } - if partial && inst.op == opEnd && wildName == "" { - partialEnd = j - break // let "b; c" match "b; c; d" - } - // mismatch, try to restart - if 0 < jNext && jNext <= sliceLen && (state.pc != pcNext || j != jNext) { - pop() - continue - } - return nil, -1 - } - if !wouldMatch() { - return nil, -1 - } - slice := m.allocNodeSlice(state) - nodes.SliceInto(slice, partialStart, partialEnd) - return slice, partialEnd + 1 -} - -func (m *matcher) matchRangeClause(state *MatcherState, n ast.Node, accept func(MatchData)) { - rng, ok := n.(*ast.RangeStmt) - if !ok { - return - } - m.resetCapture(state) - if !m.matchNode(state, rng.X) { - return - } - - // Now the fun begins: there is no Range pos in RangeStmt, so we need - // to make our best guess to find it. - // See https://github.com/golang/go/issues/50429 - // - // In gogrep we don't have []byte sources available, and - // it would be cumbersome to walk bytes manually to find the "range" keyword. - // What we can do is to hope that code is: - // 1. Properly gofmt-ed. - // 2. There are no some freefloating artifacts between TokPos and "range". - var from int - if rng.TokPos != token.NoPos { - // Start from the end of the '=' or ':=' token. - from = int(rng.TokPos + 1) - if rng.Tok == token.DEFINE { - from++ // ':=' is 1 byte longer that '=' - } - // Now suppose we have 'for _, x := range xs {...}' - // If this is true, then `xs.Pos.Offset - len(" range ")` would - // lead us to the current 'from' value. - // It's syntactically correct to have `:=range`, so we don't - // unconditionally add a space here. - if int(rng.X.Pos())-len(" range ") == from { - // This means that there is exactly one space between Tok and "range". - // There are some afwul cases where this might break, but let's - // not think about them too much. - from += len(" ") - } - } else { - // `for range xs {...}` form. - // There should be at least 1 space between "for" and "range". - from = int(rng.For) + len("for ") - } - - state.partial.X = rng - state.partial.from = token.Pos(from) - state.partial.to = rng.X.End() - - accept(MatchData{ - Capture: state.capture, - Node: &state.partial, - }) -} - -func (m *matcher) matchRangeHeader(state *MatcherState, n ast.Node, accept func(MatchData)) { - rng, ok := n.(*ast.RangeStmt) - if ok && rng.Key == nil && rng.Value == nil && m.matchNode(state, rng.X) { - m.setRangeHeaderPos(state, rng) - accept(MatchData{ - Capture: state.capture, - Node: &state.partial, - }) - } -} - -func (m *matcher) matchRangeKeyHeader(state *MatcherState, inst instruction, n ast.Node, accept func(MatchData)) { - rng, ok := n.(*ast.RangeStmt) - if ok && rng.Key != nil && rng.Value == nil && token.Token(inst.value) == rng.Tok && m.matchNode(state, rng.Key) && m.matchNode(state, rng.X) { - m.setRangeHeaderPos(state, rng) - accept(MatchData{ - Capture: state.capture, - Node: &state.partial, - }) - } -} - -func (m *matcher) matchRangeKeyValueHeader(state *MatcherState, inst instruction, n ast.Node, accept func(MatchData)) { - rng, ok := n.(*ast.RangeStmt) - if ok && rng.Key != nil && rng.Value != nil && token.Token(inst.value) == rng.Tok && m.matchNode(state, rng.Key) && m.matchNode(state, rng.Value) && m.matchNode(state, rng.X) { - m.setRangeHeaderPos(state, rng) - accept(MatchData{ - Capture: state.capture, - Node: &state.partial, - }) - } -} - -func (m *matcher) setRangeHeaderPos(state *MatcherState, rng *ast.RangeStmt) { - state.partial.X = rng - state.partial.from = rng.Pos() - state.partial.to = rng.Body.Pos() - 1 -} - -func findNamed(capture []CapturedNode, name string) (ast.Node, bool) { - for _, c := range capture { - if c.Name == name { - return c.Node, true - } - } - return nil, false -} - -func literalValue(lit *ast.BasicLit) interface{} { - switch lit.Kind { - case token.INT: - v, err := strconv.ParseInt(lit.Value, 0, 64) - if err == nil { - return v - } - case token.CHAR: - s, err := strconv.Unquote(lit.Value) - if err != nil { - return nil - } - // Return the first rune. - for _, c := range s { - return c - } - case token.STRING: - s, err := strconv.Unquote(lit.Value) - if err == nil { - return s - } - case token.FLOAT: - v, err := strconv.ParseFloat(lit.Value, 64) - if err == nil { - return v - } - case token.IMAG: - v, err := strconv.ParseComplex(lit.Value, 128) - if err == nil { - return v - } - } - return nil -} - -func equalNodes(x, y ast.Node) bool { - if x == nil || y == nil { - return x == y - } - if x, ok := x.(*NodeSlice); ok { - y, ok := y.(*NodeSlice) - if !ok || x.Kind != y.Kind || x.Len() != y.Len() { - return false - } - switch x.Kind { - case ExprNodeSlice: - for i, n1 := range x.exprSlice { - n2 := y.exprSlice[i] - if !astequal.Expr(n1, n2) { - return false - } - } - case StmtNodeSlice: - for i, n1 := range x.stmtSlice { - n2 := y.stmtSlice[i] - if !astequal.Stmt(n1, n2) { - return false - } - } - case FieldNodeSlice: - for i, n1 := range x.fieldSlice { - n2 := y.fieldSlice[i] - if !astequal.Node(n1, n2) { - return false - } - } - case IdentNodeSlice: - for i, n1 := range x.identSlice { - n2 := y.identSlice[i] - if n1.Name != n2.Name { - return false - } - } - case SpecNodeSlice: - for i, n1 := range x.specSlice { - n2 := y.specSlice[i] - if !astequal.Node(n1, n2) { - return false - } - } - case DeclNodeSlice: - for i, n1 := range x.declSlice { - n2 := y.declSlice[i] - if !astequal.Decl(n1, n2) { - return false - } - } - } - return true - } - return astequal.Node(x, y) -} diff --git a/vendor/github.com/quasilyte/gogrep/nodetag/nodetag.go b/vendor/github.com/quasilyte/gogrep/nodetag/nodetag.go deleted file mode 100644 index 8060e0ece..000000000 --- a/vendor/github.com/quasilyte/gogrep/nodetag/nodetag.go +++ /dev/null @@ -1,287 +0,0 @@ -package nodetag - -import ( - "go/ast" - - "golang.org/x/exp/typeparams" -) - -type Value int - -const ( - Unknown Value = iota - - ArrayType - AssignStmt - BasicLit - BinaryExpr - BlockStmt - BranchStmt - CallExpr - CaseClause - ChanType - CommClause - CompositeLit - DeclStmt - DeferStmt - Ellipsis - EmptyStmt - ExprStmt - File - ForStmt - FuncDecl - FuncLit - FuncType - GenDecl - GoStmt - Ident - IfStmt - ImportSpec - IncDecStmt - IndexExpr - IndexListExpr - InterfaceType - KeyValueExpr - LabeledStmt - MapType - ParenExpr - RangeStmt - ReturnStmt - SelectStmt - SelectorExpr - SendStmt - SliceExpr - StarExpr - StructType - SwitchStmt - TypeAssertExpr - TypeSpec - TypeSwitchStmt - UnaryExpr - ValueSpec - - NumBuckets - - StmtList // gogrep stmt list - ExprList // gogrep expr list - DeclList // gogrep decl list - - Node // ast.Node - Expr // ast.Expr - Stmt // ast.Stmt -) - -func FromNode(n ast.Node) Value { - switch n.(type) { - case *ast.ArrayType: - return ArrayType - case *ast.AssignStmt: - return AssignStmt - case *ast.BasicLit: - return BasicLit - case *ast.BinaryExpr: - return BinaryExpr - case *ast.BlockStmt: - return BlockStmt - case *ast.BranchStmt: - return BranchStmt - case *ast.CallExpr: - return CallExpr - case *ast.CaseClause: - return CaseClause - case *ast.ChanType: - return ChanType - case *ast.CommClause: - return CommClause - case *ast.CompositeLit: - return CompositeLit - case *ast.DeclStmt: - return DeclStmt - case *ast.DeferStmt: - return DeferStmt - case *ast.Ellipsis: - return Ellipsis - case *ast.EmptyStmt: - return EmptyStmt - case *ast.ExprStmt: - return ExprStmt - case *ast.File: - return File - case *ast.ForStmt: - return ForStmt - case *ast.FuncDecl: - return FuncDecl - case *ast.FuncLit: - return FuncLit - case *ast.FuncType: - return FuncType - case *ast.GenDecl: - return GenDecl - case *ast.GoStmt: - return GoStmt - case *ast.Ident: - return Ident - case *ast.IfStmt: - return IfStmt - case *ast.ImportSpec: - return ImportSpec - case *ast.IncDecStmt: - return IncDecStmt - case *ast.IndexExpr: - return IndexExpr - case *typeparams.IndexListExpr: - return IndexListExpr - case *ast.InterfaceType: - return InterfaceType - case *ast.KeyValueExpr: - return KeyValueExpr - case *ast.LabeledStmt: - return LabeledStmt - case *ast.MapType: - return MapType - case *ast.ParenExpr: - return ParenExpr - case *ast.RangeStmt: - return RangeStmt - case *ast.ReturnStmt: - return ReturnStmt - case *ast.SelectStmt: - return SelectStmt - case *ast.SelectorExpr: - return SelectorExpr - case *ast.SendStmt: - return SendStmt - case *ast.SliceExpr: - return SliceExpr - case *ast.StarExpr: - return StarExpr - case *ast.StructType: - return StructType - case *ast.SwitchStmt: - return SwitchStmt - case *ast.TypeAssertExpr: - return TypeAssertExpr - case *ast.TypeSpec: - return TypeSpec - case *ast.TypeSwitchStmt: - return TypeSwitchStmt - case *ast.UnaryExpr: - return UnaryExpr - case *ast.ValueSpec: - return ValueSpec - default: - return Unknown - } -} - -func FromString(s string) Value { - switch s { - case "Expr": - return Expr - case "Stmt": - return Stmt - case "Node": - return Node - } - - switch s { - case "ArrayType": - return ArrayType - case "AssignStmt": - return AssignStmt - case "BasicLit": - return BasicLit - case "BinaryExpr": - return BinaryExpr - case "BlockStmt": - return BlockStmt - case "BranchStmt": - return BranchStmt - case "CallExpr": - return CallExpr - case "CaseClause": - return CaseClause - case "ChanType": - return ChanType - case "CommClause": - return CommClause - case "CompositeLit": - return CompositeLit - case "DeclStmt": - return DeclStmt - case "DeferStmt": - return DeferStmt - case "Ellipsis": - return Ellipsis - case "EmptyStmt": - return EmptyStmt - case "ExprStmt": - return ExprStmt - case "File": - return File - case "ForStmt": - return ForStmt - case "FuncDecl": - return FuncDecl - case "FuncLit": - return FuncLit - case "FuncType": - return FuncType - case "GenDecl": - return GenDecl - case "GoStmt": - return GoStmt - case "Ident": - return Ident - case "IfStmt": - return IfStmt - case "ImportSpec": - return ImportSpec - case "IncDecStmt": - return IncDecStmt - case "IndexExpr": - return IndexExpr - case "IndexListExpr": - return IndexListExpr - case "InterfaceType": - return InterfaceType - case "KeyValueExpr": - return KeyValueExpr - case "LabeledStmt": - return LabeledStmt - case "MapType": - return MapType - case "ParenExpr": - return ParenExpr - case "RangeStmt": - return RangeStmt - case "ReturnStmt": - return ReturnStmt - case "SelectStmt": - return SelectStmt - case "SelectorExpr": - return SelectorExpr - case "SendStmt": - return SendStmt - case "SliceExpr": - return SliceExpr - case "StarExpr": - return StarExpr - case "StructType": - return StructType - case "SwitchStmt": - return SwitchStmt - case "TypeAssertExpr": - return TypeAssertExpr - case "TypeSpec": - return TypeSpec - case "TypeSwitchStmt": - return TypeSwitchStmt - case "UnaryExpr": - return UnaryExpr - case "ValueSpec": - return ValueSpec - default: - return Unknown - } -} diff --git a/vendor/github.com/quasilyte/gogrep/operation_string.go b/vendor/github.com/quasilyte/gogrep/operation_string.go deleted file mode 100644 index 23eb20062..000000000 --- a/vendor/github.com/quasilyte/gogrep/operation_string.go +++ /dev/null @@ -1,154 +0,0 @@ -// Code generated by "stringer -type=operation -trimprefix=op"; DO NOT EDIT. - -package gogrep - -import "strconv" - -func _() { - // An "invalid array index" compiler error signifies that the constant values have changed. - // Re-run the stringer command to generate them again. - var x [1]struct{} - _ = x[opInvalid-0] - _ = x[opNode-1] - _ = x[opNamedNode-2] - _ = x[opNodeSeq-3] - _ = x[opNamedNodeSeq-4] - _ = x[opOptNode-5] - _ = x[opNamedOptNode-6] - _ = x[opFieldNode-7] - _ = x[opNamedFieldNode-8] - _ = x[opMultiStmt-9] - _ = x[opMultiExpr-10] - _ = x[opMultiDecl-11] - _ = x[opEnd-12] - _ = x[opBasicLit-13] - _ = x[opStrictIntLit-14] - _ = x[opStrictFloatLit-15] - _ = x[opStrictCharLit-16] - _ = x[opStrictStringLit-17] - _ = x[opStrictComplexLit-18] - _ = x[opIdent-19] - _ = x[opPkg-20] - _ = x[opIndexExpr-21] - _ = x[opIndexListExpr-22] - _ = x[opSliceExpr-23] - _ = x[opSliceFromExpr-24] - _ = x[opSliceToExpr-25] - _ = x[opSliceFromToExpr-26] - _ = x[opSliceToCapExpr-27] - _ = x[opSliceFromToCapExpr-28] - _ = x[opFuncLit-29] - _ = x[opCompositeLit-30] - _ = x[opTypedCompositeLit-31] - _ = x[opSimpleSelectorExpr-32] - _ = x[opSelectorExpr-33] - _ = x[opTypeAssertExpr-34] - _ = x[opTypeSwitchAssertExpr-35] - _ = x[opStructType-36] - _ = x[opInterfaceType-37] - _ = x[opEfaceType-38] - _ = x[opVoidFuncType-39] - _ = x[opGenericVoidFuncType-40] - _ = x[opFuncType-41] - _ = x[opGenericFuncType-42] - _ = x[opArrayType-43] - _ = x[opSliceType-44] - _ = x[opMapType-45] - _ = x[opChanType-46] - _ = x[opKeyValueExpr-47] - _ = x[opEllipsis-48] - _ = x[opTypedEllipsis-49] - _ = x[opStarExpr-50] - _ = x[opUnaryExpr-51] - _ = x[opBinaryExpr-52] - _ = x[opParenExpr-53] - _ = x[opArgList-54] - _ = x[opSimpleArgList-55] - _ = x[opVariadicCallExpr-56] - _ = x[opNonVariadicCallExpr-57] - _ = x[opMaybeVariadicCallExpr-58] - _ = x[opCallExpr-59] - _ = x[opAssignStmt-60] - _ = x[opMultiAssignStmt-61] - _ = x[opBranchStmt-62] - _ = x[opSimpleLabeledBranchStmt-63] - _ = x[opLabeledBranchStmt-64] - _ = x[opSimpleLabeledStmt-65] - _ = x[opLabeledStmt-66] - _ = x[opBlockStmt-67] - _ = x[opExprStmt-68] - _ = x[opGoStmt-69] - _ = x[opDeferStmt-70] - _ = x[opSendStmt-71] - _ = x[opEmptyStmt-72] - _ = x[opIncDecStmt-73] - _ = x[opReturnStmt-74] - _ = x[opIfStmt-75] - _ = x[opIfInitStmt-76] - _ = x[opIfElseStmt-77] - _ = x[opIfInitElseStmt-78] - _ = x[opIfNamedOptStmt-79] - _ = x[opIfNamedOptElseStmt-80] - _ = x[opSwitchStmt-81] - _ = x[opSwitchTagStmt-82] - _ = x[opSwitchInitStmt-83] - _ = x[opSwitchInitTagStmt-84] - _ = x[opSelectStmt-85] - _ = x[opTypeSwitchStmt-86] - _ = x[opTypeSwitchInitStmt-87] - _ = x[opCaseClause-88] - _ = x[opDefaultCaseClause-89] - _ = x[opCommClause-90] - _ = x[opDefaultCommClause-91] - _ = x[opForStmt-92] - _ = x[opForPostStmt-93] - _ = x[opForCondStmt-94] - _ = x[opForCondPostStmt-95] - _ = x[opForInitStmt-96] - _ = x[opForInitPostStmt-97] - _ = x[opForInitCondStmt-98] - _ = x[opForInitCondPostStmt-99] - _ = x[opRangeStmt-100] - _ = x[opRangeKeyStmt-101] - _ = x[opRangeKeyValueStmt-102] - _ = x[opRangeClause-103] - _ = x[opRangeHeader-104] - _ = x[opRangeKeyHeader-105] - _ = x[opRangeKeyValueHeader-106] - _ = x[opFieldList-107] - _ = x[opUnnamedField-108] - _ = x[opSimpleField-109] - _ = x[opField-110] - _ = x[opMultiField-111] - _ = x[opValueSpec-112] - _ = x[opValueInitSpec-113] - _ = x[opTypedValueInitSpec-114] - _ = x[opTypedValueSpec-115] - _ = x[opSimpleTypeSpec-116] - _ = x[opTypeSpec-117] - _ = x[opGenericTypeSpec-118] - _ = x[opTypeAliasSpec-119] - _ = x[opSimpleFuncDecl-120] - _ = x[opFuncDecl-121] - _ = x[opMethodDecl-122] - _ = x[opFuncProtoDecl-123] - _ = x[opMethodProtoDecl-124] - _ = x[opDeclStmt-125] - _ = x[opConstDecl-126] - _ = x[opVarDecl-127] - _ = x[opTypeDecl-128] - _ = x[opAnyImportDecl-129] - _ = x[opImportDecl-130] - _ = x[opEmptyPackage-131] -} - -const _operation_name = "InvalidNodeNamedNodeNodeSeqNamedNodeSeqOptNodeNamedOptNodeFieldNodeNamedFieldNodeMultiStmtMultiExprMultiDeclEndBasicLitStrictIntLitStrictFloatLitStrictCharLitStrictStringLitStrictComplexLitIdentPkgIndexExprIndexListExprSliceExprSliceFromExprSliceToExprSliceFromToExprSliceToCapExprSliceFromToCapExprFuncLitCompositeLitTypedCompositeLitSimpleSelectorExprSelectorExprTypeAssertExprTypeSwitchAssertExprStructTypeInterfaceTypeEfaceTypeVoidFuncTypeGenericVoidFuncTypeFuncTypeGenericFuncTypeArrayTypeSliceTypeMapTypeChanTypeKeyValueExprEllipsisTypedEllipsisStarExprUnaryExprBinaryExprParenExprArgListSimpleArgListVariadicCallExprNonVariadicCallExprMaybeVariadicCallExprCallExprAssignStmtMultiAssignStmtBranchStmtSimpleLabeledBranchStmtLabeledBranchStmtSimpleLabeledStmtLabeledStmtBlockStmtExprStmtGoStmtDeferStmtSendStmtEmptyStmtIncDecStmtReturnStmtIfStmtIfInitStmtIfElseStmtIfInitElseStmtIfNamedOptStmtIfNamedOptElseStmtSwitchStmtSwitchTagStmtSwitchInitStmtSwitchInitTagStmtSelectStmtTypeSwitchStmtTypeSwitchInitStmtCaseClauseDefaultCaseClauseCommClauseDefaultCommClauseForStmtForPostStmtForCondStmtForCondPostStmtForInitStmtForInitPostStmtForInitCondStmtForInitCondPostStmtRangeStmtRangeKeyStmtRangeKeyValueStmtRangeClauseRangeHeaderRangeKeyHeaderRangeKeyValueHeaderFieldListUnnamedFieldSimpleFieldFieldMultiFieldValueSpecValueInitSpecTypedValueInitSpecTypedValueSpecSimpleTypeSpecTypeSpecGenericTypeSpecTypeAliasSpecSimpleFuncDeclFuncDeclMethodDeclFuncProtoDeclMethodProtoDeclDeclStmtConstDeclVarDeclTypeDeclAnyImportDeclImportDeclEmptyPackage" - -var _operation_index = [...]uint16{0, 7, 11, 20, 27, 39, 46, 58, 67, 81, 90, 99, 108, 111, 119, 131, 145, 158, 173, 189, 194, 197, 206, 219, 228, 241, 252, 267, 281, 299, 306, 318, 335, 353, 365, 379, 399, 409, 422, 431, 443, 462, 470, 485, 494, 503, 510, 518, 530, 538, 551, 559, 568, 578, 587, 594, 607, 623, 642, 663, 671, 681, 696, 706, 729, 746, 763, 774, 783, 791, 797, 806, 814, 823, 833, 843, 849, 859, 869, 883, 897, 915, 925, 938, 952, 969, 979, 993, 1011, 1021, 1038, 1048, 1065, 1072, 1083, 1094, 1109, 1120, 1135, 1150, 1169, 1178, 1190, 1207, 1218, 1229, 1243, 1262, 1271, 1283, 1294, 1299, 1309, 1318, 1331, 1349, 1363, 1377, 1385, 1400, 1413, 1427, 1435, 1445, 1458, 1473, 1481, 1490, 1497, 1505, 1518, 1528, 1540} - -func (i operation) String() string { - if i >= operation(len(_operation_index)-1) { - return "operation(" + strconv.FormatInt(int64(i), 10) + ")" - } - return _operation_name[_operation_index[i]:_operation_index[i+1]] -} diff --git a/vendor/github.com/quasilyte/gogrep/operations.gen.go b/vendor/github.com/quasilyte/gogrep/operations.gen.go deleted file mode 100644 index 9af838ab8..000000000 --- a/vendor/github.com/quasilyte/gogrep/operations.gen.go +++ /dev/null @@ -1,1671 +0,0 @@ -// Code generated "gen_operations.go"; DO NOT EDIT. - -package gogrep - -import ( - "github.com/quasilyte/gogrep/nodetag" -) - -//go:generate stringer -type=operation -trimprefix=op -type operation uint8 - -const ( - opInvalid operation = 0 - - // Tag: Node - opNode operation = 1 - - // Tag: Node - // ValueIndex: strings | wildcard name - opNamedNode operation = 2 - - // Tag: Unknown - opNodeSeq operation = 3 - - // Tag: Unknown - // ValueIndex: strings | wildcard name - opNamedNodeSeq operation = 4 - - // Tag: Unknown - opOptNode operation = 5 - - // Tag: Unknown - // ValueIndex: strings | wildcard name - opNamedOptNode operation = 6 - - // Tag: Node - opFieldNode operation = 7 - - // Tag: Node - // ValueIndex: strings | wildcard name - opNamedFieldNode operation = 8 - - // Tag: StmtList - // Args: stmts... - // Example: f(); g() - opMultiStmt operation = 9 - - // Tag: ExprList - // Args: exprs... - // Example: f(), g() - opMultiExpr operation = 10 - - // Tag: DeclList - // Args: exprs... - // Example: f(), g() - opMultiDecl operation = 11 - - // Tag: Unknown - opEnd operation = 12 - - // Tag: BasicLit - // ValueIndex: ifaces | parsed literal value - opBasicLit operation = 13 - - // Tag: BasicLit - // ValueIndex: strings | raw literal value - opStrictIntLit operation = 14 - - // Tag: BasicLit - // ValueIndex: strings | raw literal value - opStrictFloatLit operation = 15 - - // Tag: BasicLit - // ValueIndex: strings | raw literal value - opStrictCharLit operation = 16 - - // Tag: BasicLit - // ValueIndex: strings | raw literal value - opStrictStringLit operation = 17 - - // Tag: BasicLit - // ValueIndex: strings | raw literal value - opStrictComplexLit operation = 18 - - // Tag: Ident - // ValueIndex: strings | ident name - opIdent operation = 19 - - // Tag: Ident - // ValueIndex: strings | package path - opPkg operation = 20 - - // Tag: IndexExpr - // Args: x expr - opIndexExpr operation = 21 - - // Tag: IndexListExpr - // Args: x exprs... - opIndexListExpr operation = 22 - - // Tag: SliceExpr - // Args: x - opSliceExpr operation = 23 - - // Tag: SliceExpr - // Args: x from - // Example: x[from:] - opSliceFromExpr operation = 24 - - // Tag: SliceExpr - // Args: x to - // Example: x[:to] - opSliceToExpr operation = 25 - - // Tag: SliceExpr - // Args: x from to - // Example: x[from:to] - opSliceFromToExpr operation = 26 - - // Tag: SliceExpr - // Args: x from cap - // Example: x[:from:cap] - opSliceToCapExpr operation = 27 - - // Tag: SliceExpr - // Args: x from to cap - // Example: x[from:to:cap] - opSliceFromToCapExpr operation = 28 - - // Tag: FuncLit - // Args: type block - opFuncLit operation = 29 - - // Tag: CompositeLit - // Args: elts... - // Example: {elts...} - opCompositeLit operation = 30 - - // Tag: CompositeLit - // Args: typ elts... - // Example: typ{elts...} - opTypedCompositeLit operation = 31 - - // Tag: SelectorExpr - // Args: x - // ValueIndex: strings | selector name - opSimpleSelectorExpr operation = 32 - - // Tag: SelectorExpr - // Args: x sel - opSelectorExpr operation = 33 - - // Tag: TypeAssertExpr - // Args: x typ - opTypeAssertExpr operation = 34 - - // Tag: TypeAssertExpr - // Args: x - opTypeSwitchAssertExpr operation = 35 - - // Tag: StructType - // Args: fields - opStructType operation = 36 - - // Tag: InterfaceType - // Args: fields - opInterfaceType operation = 37 - - // Tag: InterfaceType - opEfaceType operation = 38 - - // Tag: FuncType - // Args: params - opVoidFuncType operation = 39 - - // Tag: FuncType - // Args: typeparams params - opGenericVoidFuncType operation = 40 - - // Tag: FuncType - // Args: params results - opFuncType operation = 41 - - // Tag: FuncType - // Args: typeparams params results - opGenericFuncType operation = 42 - - // Tag: ArrayType - // Args: length elem - opArrayType operation = 43 - - // Tag: ArrayType - // Args: elem - opSliceType operation = 44 - - // Tag: MapType - // Args: key value - opMapType operation = 45 - - // Tag: ChanType - // Args: value - // Value: ast.ChanDir | channel direction - opChanType operation = 46 - - // Tag: KeyValueExpr - // Args: key value - opKeyValueExpr operation = 47 - - // Tag: Ellipsis - opEllipsis operation = 48 - - // Tag: Ellipsis - // Args: type - opTypedEllipsis operation = 49 - - // Tag: StarExpr - // Args: x - opStarExpr operation = 50 - - // Tag: UnaryExpr - // Args: x - // Value: token.Token | unary operator - opUnaryExpr operation = 51 - - // Tag: BinaryExpr - // Args: x y - // Value: token.Token | binary operator - opBinaryExpr operation = 52 - - // Tag: ParenExpr - // Args: x - opParenExpr operation = 53 - - // Tag: Unknown - // Args: exprs... - // Example: 1, 2, 3 - opArgList operation = 54 - - // Tag: Unknown - // Like ArgList, but pattern contains no $* - // Args: exprs[] - // Example: 1, 2, 3 - // Value: int | slice len - opSimpleArgList operation = 55 - - // Tag: CallExpr - // Args: fn args - // Example: f(1, xs...) - opVariadicCallExpr operation = 56 - - // Tag: CallExpr - // Args: fn args - // Example: f(1, xs) - opNonVariadicCallExpr operation = 57 - - // Tag: CallExpr - // Args: fn args - // Example: f(1, xs) or f(1, xs...) - // Value: int | can be variadic if len(args)>value - opMaybeVariadicCallExpr operation = 58 - - // Tag: CallExpr - // Args: fn args - // Example: f(1, xs) or f(1, xs...) - opCallExpr operation = 59 - - // Tag: AssignStmt - // Args: lhs rhs - // Example: lhs := rhs() - // Value: token.Token | ':=' or '=' - opAssignStmt operation = 60 - - // Tag: AssignStmt - // Args: lhs... rhs... - // Example: lhs1, lhs2 := rhs() - // Value: token.Token | ':=' or '=' - opMultiAssignStmt operation = 61 - - // Tag: BranchStmt - // Args: x - // Value: token.Token | branch kind - opBranchStmt operation = 62 - - // Tag: BranchStmt - // Args: x - // Value: token.Token | branch kind - // ValueIndex: strings | label name - opSimpleLabeledBranchStmt operation = 63 - - // Tag: BranchStmt - // Args: label x - // Value: token.Token | branch kind - opLabeledBranchStmt operation = 64 - - // Tag: LabeledStmt - // Args: x - // ValueIndex: strings | label name - opSimpleLabeledStmt operation = 65 - - // Tag: LabeledStmt - // Args: label x - opLabeledStmt operation = 66 - - // Tag: BlockStmt - // Args: body... - opBlockStmt operation = 67 - - // Tag: ExprStmt - // Args: x - opExprStmt operation = 68 - - // Tag: GoStmt - // Args: x - opGoStmt operation = 69 - - // Tag: DeferStmt - // Args: x - opDeferStmt operation = 70 - - // Tag: SendStmt - // Args: ch value - opSendStmt operation = 71 - - // Tag: EmptyStmt - opEmptyStmt operation = 72 - - // Tag: IncDecStmt - // Args: x - // Value: token.Token | '++' or '--' - opIncDecStmt operation = 73 - - // Tag: ReturnStmt - // Args: results... - opReturnStmt operation = 74 - - // Tag: IfStmt - // Args: cond block - // Example: if cond {} - opIfStmt operation = 75 - - // Tag: IfStmt - // Args: init cond block - // Example: if init; cond {} - opIfInitStmt operation = 76 - - // Tag: IfStmt - // Args: cond block else - // Example: if cond {} else ... - opIfElseStmt operation = 77 - - // Tag: IfStmt - // Args: init cond block else - // Example: if init; cond {} else ... - opIfInitElseStmt operation = 78 - - // Tag: IfStmt - // Args: block - // Example: if $*x {} - // ValueIndex: strings | wildcard name - opIfNamedOptStmt operation = 79 - - // Tag: IfStmt - // Args: block else - // Example: if $*x {} else ... - // ValueIndex: strings | wildcard name - opIfNamedOptElseStmt operation = 80 - - // Tag: SwitchStmt - // Args: body... - // Example: switch {} - opSwitchStmt operation = 81 - - // Tag: SwitchStmt - // Args: tag body... - // Example: switch tag {} - opSwitchTagStmt operation = 82 - - // Tag: SwitchStmt - // Args: init body... - // Example: switch init; {} - opSwitchInitStmt operation = 83 - - // Tag: SwitchStmt - // Args: init tag body... - // Example: switch init; tag {} - opSwitchInitTagStmt operation = 84 - - // Tag: SelectStmt - // Args: body... - opSelectStmt operation = 85 - - // Tag: TypeSwitchStmt - // Args: x block - // Example: switch x.(type) {} - opTypeSwitchStmt operation = 86 - - // Tag: TypeSwitchStmt - // Args: init x block - // Example: switch init; x.(type) {} - opTypeSwitchInitStmt operation = 87 - - // Tag: CaseClause - // Args: values... body... - opCaseClause operation = 88 - - // Tag: CaseClause - // Args: body... - opDefaultCaseClause operation = 89 - - // Tag: CommClause - // Args: comm body... - opCommClause operation = 90 - - // Tag: CommClause - // Args: body... - opDefaultCommClause operation = 91 - - // Tag: ForStmt - // Args: blocl - // Example: for {} - opForStmt operation = 92 - - // Tag: ForStmt - // Args: post block - // Example: for ; ; post {} - opForPostStmt operation = 93 - - // Tag: ForStmt - // Args: cond block - // Example: for ; cond; {} - opForCondStmt operation = 94 - - // Tag: ForStmt - // Args: cond post block - // Example: for ; cond; post {} - opForCondPostStmt operation = 95 - - // Tag: ForStmt - // Args: init block - // Example: for init; ; {} - opForInitStmt operation = 96 - - // Tag: ForStmt - // Args: init post block - // Example: for init; ; post {} - opForInitPostStmt operation = 97 - - // Tag: ForStmt - // Args: init cond block - // Example: for init; cond; {} - opForInitCondStmt operation = 98 - - // Tag: ForStmt - // Args: init cond post block - // Example: for init; cond; post {} - opForInitCondPostStmt operation = 99 - - // Tag: RangeStmt - // Args: x block - // Example: for range x {} - opRangeStmt operation = 100 - - // Tag: RangeStmt - // Args: key x block - // Example: for key := range x {} - // Value: token.Token | ':=' or '=' - opRangeKeyStmt operation = 101 - - // Tag: RangeStmt - // Args: key value x block - // Example: for key, value := range x {} - // Value: token.Token | ':=' or '=' - opRangeKeyValueStmt operation = 102 - - // Tag: RangeStmt - // Args: x - // Example: range x - opRangeClause operation = 103 - - // Tag: RangeStmt - // Args: x - // Example: for range x - opRangeHeader operation = 104 - - // Tag: RangeStmt - // Args: key x - // Example: for key := range x - // Value: token.Token | ':=' or '=' - opRangeKeyHeader operation = 105 - - // Tag: RangeStmt - // Args: key value x - // Example: for key, value := range x - // Value: token.Token | ':=' or '=' - opRangeKeyValueHeader operation = 106 - - // Tag: Unknown - // Args: fields... - opFieldList operation = 107 - - // Tag: Unknown - // Args: typ - // Example: type - opUnnamedField operation = 108 - - // Tag: Unknown - // Args: typ - // Example: name type - // ValueIndex: strings | field name - opSimpleField operation = 109 - - // Tag: Unknown - // Args: name typ - // Example: $name type - opField operation = 110 - - // Tag: Unknown - // Args: names... typ - // Example: name1, name2 type - opMultiField operation = 111 - - // Tag: ValueSpec - // Args: value - opValueSpec operation = 112 - - // Tag: ValueSpec - // Args: lhs... rhs... - // Example: lhs = rhs - opValueInitSpec operation = 113 - - // Tag: ValueSpec - // Args: lhs... type rhs... - // Example: lhs typ = rhs - opTypedValueInitSpec operation = 114 - - // Tag: ValueSpec - // Args: lhs... type - // Example: lhs typ - opTypedValueSpec operation = 115 - - // Tag: TypeSpec - // Args: type - // Example: name type - // ValueIndex: strings | type name - opSimpleTypeSpec operation = 116 - - // Tag: TypeSpec - // Args: name type - // Example: name type - opTypeSpec operation = 117 - - // Tag: TypeSpec - // Args: name typeparasm type - // Example: name[typeparams] type - opGenericTypeSpec operation = 118 - - // Tag: TypeSpec - // Args: name type - // Example: name = type - opTypeAliasSpec operation = 119 - - // Tag: FuncDecl - // Args: type block - // ValueIndex: strings | field name - opSimpleFuncDecl operation = 120 - - // Tag: FuncDecl - // Args: name type block - opFuncDecl operation = 121 - - // Tag: FuncDecl - // Args: recv name type block - opMethodDecl operation = 122 - - // Tag: FuncDecl - // Args: name type - opFuncProtoDecl operation = 123 - - // Tag: FuncDecl - // Args: recv name type - opMethodProtoDecl operation = 124 - - // Tag: DeclStmt - // Args: decl - opDeclStmt operation = 125 - - // Tag: GenDecl - // Args: valuespecs... - opConstDecl operation = 126 - - // Tag: GenDecl - // Args: valuespecs... - opVarDecl operation = 127 - - // Tag: GenDecl - // Args: typespecs... - opTypeDecl operation = 128 - - // Tag: GenDecl - opAnyImportDecl operation = 129 - - // Tag: GenDecl - // Args: importspecs... - opImportDecl operation = 130 - - // Tag: File - // Args: name - opEmptyPackage operation = 131 -) - -type operationInfo struct { - Tag nodetag.Value - NumArgs int - ValueKind valueKind - ExtraValueKind valueKind - VariadicMap bitmap64 - SliceIndex int -} - -var operationInfoTable = [256]operationInfo{ - opInvalid: {}, - - opNode: { - Tag: nodetag.Node, - NumArgs: 0, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opNamedNode: { - Tag: nodetag.Node, - NumArgs: 0, - ValueKind: emptyValue, - ExtraValueKind: stringValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opNodeSeq: { - Tag: nodetag.Unknown, - NumArgs: 0, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opNamedNodeSeq: { - Tag: nodetag.Unknown, - NumArgs: 0, - ValueKind: emptyValue, - ExtraValueKind: stringValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opOptNode: { - Tag: nodetag.Unknown, - NumArgs: 0, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opNamedOptNode: { - Tag: nodetag.Unknown, - NumArgs: 0, - ValueKind: emptyValue, - ExtraValueKind: stringValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opFieldNode: { - Tag: nodetag.Node, - NumArgs: 0, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opNamedFieldNode: { - Tag: nodetag.Node, - NumArgs: 0, - ValueKind: emptyValue, - ExtraValueKind: stringValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opMultiStmt: { - Tag: nodetag.StmtList, - NumArgs: 1, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 1, // 1 - SliceIndex: -1, - }, - opMultiExpr: { - Tag: nodetag.ExprList, - NumArgs: 1, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 1, // 1 - SliceIndex: -1, - }, - opMultiDecl: { - Tag: nodetag.DeclList, - NumArgs: 1, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 1, // 1 - SliceIndex: -1, - }, - opEnd: { - Tag: nodetag.Unknown, - NumArgs: 0, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opBasicLit: { - Tag: nodetag.BasicLit, - NumArgs: 0, - ValueKind: emptyValue, - ExtraValueKind: ifaceValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opStrictIntLit: { - Tag: nodetag.BasicLit, - NumArgs: 0, - ValueKind: emptyValue, - ExtraValueKind: stringValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opStrictFloatLit: { - Tag: nodetag.BasicLit, - NumArgs: 0, - ValueKind: emptyValue, - ExtraValueKind: stringValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opStrictCharLit: { - Tag: nodetag.BasicLit, - NumArgs: 0, - ValueKind: emptyValue, - ExtraValueKind: stringValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opStrictStringLit: { - Tag: nodetag.BasicLit, - NumArgs: 0, - ValueKind: emptyValue, - ExtraValueKind: stringValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opStrictComplexLit: { - Tag: nodetag.BasicLit, - NumArgs: 0, - ValueKind: emptyValue, - ExtraValueKind: stringValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opIdent: { - Tag: nodetag.Ident, - NumArgs: 0, - ValueKind: emptyValue, - ExtraValueKind: stringValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opPkg: { - Tag: nodetag.Ident, - NumArgs: 0, - ValueKind: emptyValue, - ExtraValueKind: stringValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opIndexExpr: { - Tag: nodetag.IndexExpr, - NumArgs: 2, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opIndexListExpr: { - Tag: nodetag.IndexListExpr, - NumArgs: 2, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 2, // 10 - SliceIndex: -1, - }, - opSliceExpr: { - Tag: nodetag.SliceExpr, - NumArgs: 1, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opSliceFromExpr: { - Tag: nodetag.SliceExpr, - NumArgs: 2, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opSliceToExpr: { - Tag: nodetag.SliceExpr, - NumArgs: 2, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opSliceFromToExpr: { - Tag: nodetag.SliceExpr, - NumArgs: 3, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opSliceToCapExpr: { - Tag: nodetag.SliceExpr, - NumArgs: 3, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opSliceFromToCapExpr: { - Tag: nodetag.SliceExpr, - NumArgs: 4, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opFuncLit: { - Tag: nodetag.FuncLit, - NumArgs: 2, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opCompositeLit: { - Tag: nodetag.CompositeLit, - NumArgs: 1, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 1, // 1 - SliceIndex: -1, - }, - opTypedCompositeLit: { - Tag: nodetag.CompositeLit, - NumArgs: 2, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 2, // 10 - SliceIndex: -1, - }, - opSimpleSelectorExpr: { - Tag: nodetag.SelectorExpr, - NumArgs: 1, - ValueKind: emptyValue, - ExtraValueKind: stringValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opSelectorExpr: { - Tag: nodetag.SelectorExpr, - NumArgs: 2, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opTypeAssertExpr: { - Tag: nodetag.TypeAssertExpr, - NumArgs: 2, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opTypeSwitchAssertExpr: { - Tag: nodetag.TypeAssertExpr, - NumArgs: 1, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opStructType: { - Tag: nodetag.StructType, - NumArgs: 1, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opInterfaceType: { - Tag: nodetag.InterfaceType, - NumArgs: 1, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opEfaceType: { - Tag: nodetag.InterfaceType, - NumArgs: 0, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opVoidFuncType: { - Tag: nodetag.FuncType, - NumArgs: 1, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opGenericVoidFuncType: { - Tag: nodetag.FuncType, - NumArgs: 2, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opFuncType: { - Tag: nodetag.FuncType, - NumArgs: 2, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opGenericFuncType: { - Tag: nodetag.FuncType, - NumArgs: 3, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opArrayType: { - Tag: nodetag.ArrayType, - NumArgs: 2, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opSliceType: { - Tag: nodetag.ArrayType, - NumArgs: 1, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opMapType: { - Tag: nodetag.MapType, - NumArgs: 2, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opChanType: { - Tag: nodetag.ChanType, - NumArgs: 1, - ValueKind: chandirValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opKeyValueExpr: { - Tag: nodetag.KeyValueExpr, - NumArgs: 2, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opEllipsis: { - Tag: nodetag.Ellipsis, - NumArgs: 0, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opTypedEllipsis: { - Tag: nodetag.Ellipsis, - NumArgs: 1, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opStarExpr: { - Tag: nodetag.StarExpr, - NumArgs: 1, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opUnaryExpr: { - Tag: nodetag.UnaryExpr, - NumArgs: 1, - ValueKind: tokenValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opBinaryExpr: { - Tag: nodetag.BinaryExpr, - NumArgs: 2, - ValueKind: tokenValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opParenExpr: { - Tag: nodetag.ParenExpr, - NumArgs: 1, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opArgList: { - Tag: nodetag.Unknown, - NumArgs: 1, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 1, // 1 - SliceIndex: -1, - }, - opSimpleArgList: { - Tag: nodetag.Unknown, - NumArgs: 1, - ValueKind: intValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: 0, - }, - opVariadicCallExpr: { - Tag: nodetag.CallExpr, - NumArgs: 2, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opNonVariadicCallExpr: { - Tag: nodetag.CallExpr, - NumArgs: 2, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opMaybeVariadicCallExpr: { - Tag: nodetag.CallExpr, - NumArgs: 2, - ValueKind: intValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opCallExpr: { - Tag: nodetag.CallExpr, - NumArgs: 2, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opAssignStmt: { - Tag: nodetag.AssignStmt, - NumArgs: 2, - ValueKind: tokenValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opMultiAssignStmt: { - Tag: nodetag.AssignStmt, - NumArgs: 2, - ValueKind: tokenValue, - ExtraValueKind: emptyValue, - VariadicMap: 3, // 11 - SliceIndex: -1, - }, - opBranchStmt: { - Tag: nodetag.BranchStmt, - NumArgs: 1, - ValueKind: tokenValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opSimpleLabeledBranchStmt: { - Tag: nodetag.BranchStmt, - NumArgs: 1, - ValueKind: tokenValue, - ExtraValueKind: stringValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opLabeledBranchStmt: { - Tag: nodetag.BranchStmt, - NumArgs: 2, - ValueKind: tokenValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opSimpleLabeledStmt: { - Tag: nodetag.LabeledStmt, - NumArgs: 1, - ValueKind: emptyValue, - ExtraValueKind: stringValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opLabeledStmt: { - Tag: nodetag.LabeledStmt, - NumArgs: 2, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opBlockStmt: { - Tag: nodetag.BlockStmt, - NumArgs: 1, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 1, // 1 - SliceIndex: -1, - }, - opExprStmt: { - Tag: nodetag.ExprStmt, - NumArgs: 1, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opGoStmt: { - Tag: nodetag.GoStmt, - NumArgs: 1, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opDeferStmt: { - Tag: nodetag.DeferStmt, - NumArgs: 1, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opSendStmt: { - Tag: nodetag.SendStmt, - NumArgs: 2, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opEmptyStmt: { - Tag: nodetag.EmptyStmt, - NumArgs: 0, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opIncDecStmt: { - Tag: nodetag.IncDecStmt, - NumArgs: 1, - ValueKind: tokenValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opReturnStmt: { - Tag: nodetag.ReturnStmt, - NumArgs: 1, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 1, // 1 - SliceIndex: -1, - }, - opIfStmt: { - Tag: nodetag.IfStmt, - NumArgs: 2, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opIfInitStmt: { - Tag: nodetag.IfStmt, - NumArgs: 3, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opIfElseStmt: { - Tag: nodetag.IfStmt, - NumArgs: 3, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opIfInitElseStmt: { - Tag: nodetag.IfStmt, - NumArgs: 4, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opIfNamedOptStmt: { - Tag: nodetag.IfStmt, - NumArgs: 1, - ValueKind: emptyValue, - ExtraValueKind: stringValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opIfNamedOptElseStmt: { - Tag: nodetag.IfStmt, - NumArgs: 2, - ValueKind: emptyValue, - ExtraValueKind: stringValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opSwitchStmt: { - Tag: nodetag.SwitchStmt, - NumArgs: 1, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 1, // 1 - SliceIndex: -1, - }, - opSwitchTagStmt: { - Tag: nodetag.SwitchStmt, - NumArgs: 2, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 2, // 10 - SliceIndex: -1, - }, - opSwitchInitStmt: { - Tag: nodetag.SwitchStmt, - NumArgs: 2, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 2, // 10 - SliceIndex: -1, - }, - opSwitchInitTagStmt: { - Tag: nodetag.SwitchStmt, - NumArgs: 3, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 4, // 100 - SliceIndex: -1, - }, - opSelectStmt: { - Tag: nodetag.SelectStmt, - NumArgs: 1, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 1, // 1 - SliceIndex: -1, - }, - opTypeSwitchStmt: { - Tag: nodetag.TypeSwitchStmt, - NumArgs: 2, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opTypeSwitchInitStmt: { - Tag: nodetag.TypeSwitchStmt, - NumArgs: 3, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opCaseClause: { - Tag: nodetag.CaseClause, - NumArgs: 2, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 3, // 11 - SliceIndex: -1, - }, - opDefaultCaseClause: { - Tag: nodetag.CaseClause, - NumArgs: 1, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 1, // 1 - SliceIndex: -1, - }, - opCommClause: { - Tag: nodetag.CommClause, - NumArgs: 2, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 2, // 10 - SliceIndex: -1, - }, - opDefaultCommClause: { - Tag: nodetag.CommClause, - NumArgs: 1, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 1, // 1 - SliceIndex: -1, - }, - opForStmt: { - Tag: nodetag.ForStmt, - NumArgs: 1, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opForPostStmt: { - Tag: nodetag.ForStmt, - NumArgs: 2, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opForCondStmt: { - Tag: nodetag.ForStmt, - NumArgs: 2, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opForCondPostStmt: { - Tag: nodetag.ForStmt, - NumArgs: 3, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opForInitStmt: { - Tag: nodetag.ForStmt, - NumArgs: 2, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opForInitPostStmt: { - Tag: nodetag.ForStmt, - NumArgs: 3, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opForInitCondStmt: { - Tag: nodetag.ForStmt, - NumArgs: 3, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opForInitCondPostStmt: { - Tag: nodetag.ForStmt, - NumArgs: 4, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opRangeStmt: { - Tag: nodetag.RangeStmt, - NumArgs: 2, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opRangeKeyStmt: { - Tag: nodetag.RangeStmt, - NumArgs: 3, - ValueKind: tokenValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opRangeKeyValueStmt: { - Tag: nodetag.RangeStmt, - NumArgs: 4, - ValueKind: tokenValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opRangeClause: { - Tag: nodetag.RangeStmt, - NumArgs: 1, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opRangeHeader: { - Tag: nodetag.RangeStmt, - NumArgs: 1, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opRangeKeyHeader: { - Tag: nodetag.RangeStmt, - NumArgs: 2, - ValueKind: tokenValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opRangeKeyValueHeader: { - Tag: nodetag.RangeStmt, - NumArgs: 3, - ValueKind: tokenValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opFieldList: { - Tag: nodetag.Unknown, - NumArgs: 1, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 1, // 1 - SliceIndex: -1, - }, - opUnnamedField: { - Tag: nodetag.Unknown, - NumArgs: 1, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opSimpleField: { - Tag: nodetag.Unknown, - NumArgs: 1, - ValueKind: emptyValue, - ExtraValueKind: stringValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opField: { - Tag: nodetag.Unknown, - NumArgs: 2, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opMultiField: { - Tag: nodetag.Unknown, - NumArgs: 2, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 1, // 1 - SliceIndex: -1, - }, - opValueSpec: { - Tag: nodetag.ValueSpec, - NumArgs: 1, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opValueInitSpec: { - Tag: nodetag.ValueSpec, - NumArgs: 2, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 3, // 11 - SliceIndex: -1, - }, - opTypedValueInitSpec: { - Tag: nodetag.ValueSpec, - NumArgs: 3, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 5, // 101 - SliceIndex: -1, - }, - opTypedValueSpec: { - Tag: nodetag.ValueSpec, - NumArgs: 2, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 1, // 1 - SliceIndex: -1, - }, - opSimpleTypeSpec: { - Tag: nodetag.TypeSpec, - NumArgs: 1, - ValueKind: emptyValue, - ExtraValueKind: stringValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opTypeSpec: { - Tag: nodetag.TypeSpec, - NumArgs: 2, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opGenericTypeSpec: { - Tag: nodetag.TypeSpec, - NumArgs: 3, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opTypeAliasSpec: { - Tag: nodetag.TypeSpec, - NumArgs: 2, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opSimpleFuncDecl: { - Tag: nodetag.FuncDecl, - NumArgs: 2, - ValueKind: emptyValue, - ExtraValueKind: stringValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opFuncDecl: { - Tag: nodetag.FuncDecl, - NumArgs: 3, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opMethodDecl: { - Tag: nodetag.FuncDecl, - NumArgs: 4, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opFuncProtoDecl: { - Tag: nodetag.FuncDecl, - NumArgs: 2, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opMethodProtoDecl: { - Tag: nodetag.FuncDecl, - NumArgs: 3, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opDeclStmt: { - Tag: nodetag.DeclStmt, - NumArgs: 1, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opConstDecl: { - Tag: nodetag.GenDecl, - NumArgs: 1, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 1, // 1 - SliceIndex: -1, - }, - opVarDecl: { - Tag: nodetag.GenDecl, - NumArgs: 1, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 1, // 1 - SliceIndex: -1, - }, - opTypeDecl: { - Tag: nodetag.GenDecl, - NumArgs: 1, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 1, // 1 - SliceIndex: -1, - }, - opAnyImportDecl: { - Tag: nodetag.GenDecl, - NumArgs: 0, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, - opImportDecl: { - Tag: nodetag.GenDecl, - NumArgs: 1, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 1, // 1 - SliceIndex: -1, - }, - opEmptyPackage: { - Tag: nodetag.File, - NumArgs: 1, - ValueKind: emptyValue, - ExtraValueKind: emptyValue, - VariadicMap: 0, // 0 - SliceIndex: -1, - }, -} diff --git a/vendor/github.com/quasilyte/gogrep/parse.go b/vendor/github.com/quasilyte/gogrep/parse.go deleted file mode 100644 index 3c6854bda..000000000 --- a/vendor/github.com/quasilyte/gogrep/parse.go +++ /dev/null @@ -1,403 +0,0 @@ -// Copyright (c) 2017, Daniel Martí -// See LICENSE for licensing information - -package gogrep - -import ( - "bytes" - "fmt" - "go/ast" - "go/parser" - "go/scanner" - "go/token" - "strings" - "text/template" -) - -func transformSource(expr string) (string, []posOffset, error) { - toks, err := tokenize([]byte(expr)) - if err != nil { - return "", nil, fmt.Errorf("cannot tokenize expr: %v", err) - } - var offs []posOffset - lbuf := lineColBuffer{line: 1, col: 1} - lastLit := false - for _, t := range toks { - if lbuf.offs >= t.pos.Offset && lastLit && t.lit != "" { - _, _ = lbuf.WriteString(" ") - } - for lbuf.offs < t.pos.Offset { - _, _ = lbuf.WriteString(" ") - } - if t.lit == "" { - _, _ = lbuf.WriteString(t.tok.String()) - lastLit = false - continue - } - _, _ = lbuf.WriteString(t.lit) - lastLit = strings.TrimSpace(t.lit) != "" - } - // trailing newlines can cause issues with commas - return strings.TrimSpace(lbuf.String()), offs, nil -} - -func parseExpr(fset *token.FileSet, expr string) (ast.Node, error) { - exprStr, offs, err := transformSource(expr) - if err != nil { - return nil, err - } - node, err := parseDetectingNode(fset, exprStr) - if err != nil { - err = subPosOffsets(err, offs...) - return nil, fmt.Errorf("cannot parse expr: %v", err) - } - return node, nil -} - -type lineColBuffer struct { - bytes.Buffer - line, col, offs int -} - -func (l *lineColBuffer) WriteString(s string) (n int, err error) { - for _, r := range s { - if r == '\n' { - l.line++ - l.col = 1 - } else { - l.col++ - } - l.offs++ - } - return l.Buffer.WriteString(s) -} - -var tmplDecl = template.Must(template.New("").Parse(`` + - `package p; {{ . }}`)) - -var tmplBlock = template.Must(template.New("").Parse(`` + - `package p; func _() { if true {{ . }} else {} }`)) - -var tmplExprs = template.Must(template.New("").Parse(`` + - `package p; var _ = []interface{}{ {{ . }}, }`)) - -var tmplStmts = template.Must(template.New("").Parse(`` + - `package p; func _() { {{ . }} }`)) - -var tmplType = template.Must(template.New("").Parse(`` + - `package p; var _ {{ . }}`)) - -var tmplValSpec = template.Must(template.New("").Parse(`` + - `package p; var {{ . }}`)) - -func execTmpl(tmpl *template.Template, src string) string { - var buf bytes.Buffer - if err := tmpl.Execute(&buf, src); err != nil { - panic(err) - } - return buf.String() -} - -func noBadNodes(node ast.Node) bool { - any := false - ast.Inspect(node, func(n ast.Node) bool { - if any { - return false - } - switch n.(type) { - case *ast.BadExpr, *ast.BadDecl: - any = true - } - return true - }) - return !any -} - -func parseType(fset *token.FileSet, src string) (ast.Expr, *ast.File, error) { - asType := execTmpl(tmplType, src) - f, err := parser.ParseFile(fset, "", asType, 0) - if err != nil { - err = subPosOffsets(err, posOffset{1, 1, 17}) - return nil, nil, err - } - vs := f.Decls[0].(*ast.GenDecl).Specs[0].(*ast.ValueSpec) - return vs.Type, f, nil -} - -// parseDetectingNode tries its best to parse the ast.Node contained in src, as -// one of: *ast.File, ast.Decl, ast.Expr, ast.Stmt, *ast.ValueSpec. -// It also returns the *ast.File used for the parsing, so that the returned node -// can be easily type-checked. -func parseDetectingNode(fset *token.FileSet, src string) (ast.Node, error) { - file := fset.AddFile("", fset.Base(), len(src)) - scan := scanner.Scanner{} - scan.Init(file, []byte(src), nil, 0) - if _, tok, _ := scan.Scan(); tok == token.EOF { - return nil, fmt.Errorf("empty source code") - } - var mainErr error - - // Some adhoc patterns first. - if strings.HasPrefix(src, "range ") { - e, err := parser.ParseExpr(src[len("range "):]) - if err == nil && noBadNodes(e) { - return &rangeClause{X: e}, nil - } - } - if strings.HasPrefix(src, "for ") && !strings.HasSuffix(src, "}") { - asStmts := execTmpl(tmplStmts, src+"{}") - f, err := parser.ParseFile(fset, "", asStmts, parser.SkipObjectResolution) - if err == nil && noBadNodes(f) { - bl := f.Decls[0].(*ast.FuncDecl).Body - if len(bl.List) == 1 { - return &rangeHeader{Node: bl.List[0].(*ast.RangeStmt)}, nil - } - } - } - - // try as a block; otherwise blocks might be mistaken for composite - // literals further below\ - asBlock := execTmpl(tmplBlock, src) - if f, err := parser.ParseFile(fset, "", asBlock, parser.SkipObjectResolution); err == nil && noBadNodes(f) { - bl := f.Decls[0].(*ast.FuncDecl).Body - if len(bl.List) == 1 { - ifs := bl.List[0].(*ast.IfStmt) - return ifs.Body, nil - } - } - - // then as value expressions - asExprs := execTmpl(tmplExprs, src) - if f, err := parser.ParseFile(fset, "", asExprs, parser.SkipObjectResolution); err == nil && noBadNodes(f) { - vs := f.Decls[0].(*ast.GenDecl).Specs[0].(*ast.ValueSpec) - cl := vs.Values[0].(*ast.CompositeLit) - if len(cl.Elts) == 1 { - return cl.Elts[0], nil - } - slice := &NodeSlice{} - slice.assignExprSlice(cl.Elts) - return slice, nil - } - - // then try as statements - asStmts := execTmpl(tmplStmts, src) - f, err := parser.ParseFile(fset, "", asStmts, parser.SkipObjectResolution) - if err == nil && noBadNodes(f) { - bl := f.Decls[0].(*ast.FuncDecl).Body - if len(bl.List) == 1 { - return bl.List[0], nil - } - slice := &NodeSlice{} - slice.assignStmtSlice(bl.List) - return slice, nil - } - // Statements is what covers most cases, so it will give - // the best overall error message. Show positions - // relative to where the user's code is put in the - // template. - mainErr = subPosOffsets(err, posOffset{1, 1, 22}) - - // try as a single declaration, or many - asDecl := execTmpl(tmplDecl, src) - if f, err := parser.ParseFile(fset, "", asDecl, 0); err == nil && noBadNodes(f) { - if len(f.Decls) == 1 { - return f.Decls[0], nil - } - slice := &NodeSlice{} - slice.assignDeclSlice(f.Decls) - return slice, nil - } - - // try as a whole file - if f, err := parser.ParseFile(fset, "", src, 0); err == nil && noBadNodes(f) { - return f, nil - } - - // type expressions not yet picked up, for e.g. chans and interfaces - if typ, f, err := parseType(fset, src); err == nil && noBadNodes(f) { - return typ, nil - } - - // value specs - asValSpec := execTmpl(tmplValSpec, src) - if f, err := parser.ParseFile(fset, "", asValSpec, 0); err == nil && noBadNodes(f) { - decl := f.Decls[0].(*ast.GenDecl) - if len(decl.Specs) != 0 { - vs := f.Decls[0].(*ast.GenDecl).Specs[0].(*ast.ValueSpec) - return vs, nil - } - } - - return nil, mainErr -} - -type posOffset struct { - atLine, atCol int - offset int -} - -func subPosOffsets(err error, offs ...posOffset) error { - list, ok := err.(scanner.ErrorList) - if !ok { - return err - } - for i, err := range list { - for _, off := range offs { - if err.Pos.Line != off.atLine { - continue - } - if err.Pos.Column < off.atCol { - continue - } - err.Pos.Column -= off.offset - } - list[i] = err - } - return list -} - -type fullToken struct { - pos token.Position - tok token.Token - lit string -} - -type caseStatus uint - -const ( - caseNone caseStatus = iota - caseNeedBlock - caseHere -) - -func tokenize(src []byte) ([]fullToken, error) { - var s scanner.Scanner - fset := token.NewFileSet() - file := fset.AddFile("", fset.Base(), len(src)) - - var err error - onError := func(pos token.Position, msg string) { - switch msg { // allow certain extra chars - case `illegal character U+0024 '$'`: - case `illegal character U+007E '~'`: - default: - err = fmt.Errorf("%v: %s", pos, msg) - } - } - - // we will modify the input source under the scanner's nose to - // enable some features such as regexes. - s.Init(file, src, onError, scanner.ScanComments) - - next := func() fullToken { - pos, tok, lit := s.Scan() - return fullToken{fset.Position(pos), tok, lit} - } - - caseStat := caseNone - - var toks []fullToken - for t := next(); t.tok != token.EOF; t = next() { - switch t.lit { - case "$": // continues below - case "switch", "select", "case": - if t.lit == "case" { - caseStat = caseNone - } else { - caseStat = caseNeedBlock - } - fallthrough - default: // regular Go code - if t.tok == token.LBRACE && caseStat == caseNeedBlock { - caseStat = caseHere - } - toks = append(toks, t) - continue - } - wt, err := tokenizeWildcard(t.pos, next) - if err != nil { - return nil, err - } - if caseStat == caseHere { - toks = append(toks, fullToken{wt.pos, token.IDENT, "case"}) - } - toks = append(toks, wt) - if caseStat == caseHere { - toks = append(toks, - fullToken{wt.pos, token.COLON, ""}, - fullToken{wt.pos, token.IDENT, "gogrep_body"}) - } - } - return toks, err -} - -type varInfo struct { - Name string - Seq bool -} - -func tokenizeWildcard(pos token.Position, next func() fullToken) (fullToken, error) { - t := next() - any := false - if t.tok == token.MUL { - t = next() - any = true - } - wildName := encodeWildName(t.lit, any) - wt := fullToken{pos, token.IDENT, wildName} - if t.tok != token.IDENT { - return wt, fmt.Errorf("%v: $ must be followed by ident, got %v", - t.pos, t.tok) - } - return wt, nil -} - -const wildSeparator = "ᐸᐳ" - -func isWildName(s string) bool { - return strings.HasPrefix(s, wildSeparator) -} - -func encodeWildName(name string, any bool) string { - suffix := "v" - if any { - suffix = "a" - } - return wildSeparator + name + wildSeparator + suffix -} - -func decodeWildName(s string) varInfo { - s = s[len(wildSeparator):] - nameEnd := strings.Index(s, wildSeparator) - name := s[:nameEnd+0] - s = s[nameEnd:] - s = s[len(wildSeparator):] - kind := s - return varInfo{Name: name, Seq: kind == "a"} -} - -func decodeWildNode(n ast.Node) varInfo { - switch n := n.(type) { - case *ast.ExprStmt: - return decodeWildNode(n.X) - case *ast.Ident: - if isWildName(n.Name) { - return decodeWildName(n.Name) - } - } - return varInfo{} -} - -type rangeClause struct { - X ast.Expr -} - -type rangeHeader struct { - Node *ast.RangeStmt -} - -func (*rangeClause) Pos() token.Pos { return 0 } -func (*rangeClause) End() token.Pos { return 0 } - -func (*rangeHeader) Pos() token.Pos { return 0 } -func (*rangeHeader) End() token.Pos { return 0 } diff --git a/vendor/github.com/quasilyte/gogrep/slices.go b/vendor/github.com/quasilyte/gogrep/slices.go deleted file mode 100644 index fb969b51b..000000000 --- a/vendor/github.com/quasilyte/gogrep/slices.go +++ /dev/null @@ -1,150 +0,0 @@ -package gogrep - -import ( - "go/ast" - "go/token" -) - -type NodeSliceKind uint32 - -const ( - ExprNodeSlice NodeSliceKind = iota - StmtNodeSlice - FieldNodeSlice - IdentNodeSlice - SpecNodeSlice - DeclNodeSlice -) - -type NodeSlice struct { - Kind NodeSliceKind - - exprSlice []ast.Expr - stmtSlice []ast.Stmt - fieldSlice []*ast.Field - identSlice []*ast.Ident - specSlice []ast.Spec - declSlice []ast.Decl -} - -func (s *NodeSlice) GetExprSlice() []ast.Expr { return s.exprSlice } -func (s *NodeSlice) GetStmtSlice() []ast.Stmt { return s.stmtSlice } -func (s *NodeSlice) GetFieldSlice() []*ast.Field { return s.fieldSlice } -func (s *NodeSlice) GetIdentSlice() []*ast.Ident { return s.identSlice } -func (s *NodeSlice) GetSpecSlice() []ast.Spec { return s.specSlice } -func (s *NodeSlice) GetDeclSlice() []ast.Decl { return s.declSlice } - -func (s *NodeSlice) assignExprSlice(xs []ast.Expr) { - s.Kind = ExprNodeSlice - s.exprSlice = xs -} - -func (s *NodeSlice) assignStmtSlice(xs []ast.Stmt) { - s.Kind = StmtNodeSlice - s.stmtSlice = xs -} - -func (s *NodeSlice) assignFieldSlice(xs []*ast.Field) { - s.Kind = FieldNodeSlice - s.fieldSlice = xs -} - -func (s *NodeSlice) assignIdentSlice(xs []*ast.Ident) { - s.Kind = IdentNodeSlice - s.identSlice = xs -} - -func (s *NodeSlice) assignSpecSlice(xs []ast.Spec) { - s.Kind = SpecNodeSlice - s.specSlice = xs -} - -func (s *NodeSlice) assignDeclSlice(xs []ast.Decl) { - s.Kind = DeclNodeSlice - s.declSlice = xs -} - -func (s *NodeSlice) Len() int { - switch s.Kind { - case ExprNodeSlice: - return len(s.exprSlice) - case StmtNodeSlice: - return len(s.stmtSlice) - case FieldNodeSlice: - return len(s.fieldSlice) - case IdentNodeSlice: - return len(s.identSlice) - case SpecNodeSlice: - return len(s.specSlice) - default: - return len(s.declSlice) - } -} - -func (s *NodeSlice) At(i int) ast.Node { - switch s.Kind { - case ExprNodeSlice: - return s.exprSlice[i] - case StmtNodeSlice: - return s.stmtSlice[i] - case FieldNodeSlice: - return s.fieldSlice[i] - case IdentNodeSlice: - return s.identSlice[i] - case SpecNodeSlice: - return s.specSlice[i] - default: - return s.declSlice[i] - } -} - -func (s *NodeSlice) SliceInto(dst *NodeSlice, i, j int) { - switch s.Kind { - case ExprNodeSlice: - dst.assignExprSlice(s.exprSlice[i:j]) - case StmtNodeSlice: - dst.assignStmtSlice(s.stmtSlice[i:j]) - case FieldNodeSlice: - dst.assignFieldSlice(s.fieldSlice[i:j]) - case IdentNodeSlice: - dst.assignIdentSlice(s.identSlice[i:j]) - case SpecNodeSlice: - dst.assignSpecSlice(s.specSlice[i:j]) - default: - dst.assignDeclSlice(s.declSlice[i:j]) - } -} - -func (s *NodeSlice) Pos() token.Pos { - switch s.Kind { - case ExprNodeSlice: - return s.exprSlice[0].Pos() - case StmtNodeSlice: - return s.stmtSlice[0].Pos() - case FieldNodeSlice: - return s.fieldSlice[0].Pos() - case IdentNodeSlice: - return s.identSlice[0].Pos() - case SpecNodeSlice: - return s.specSlice[0].Pos() - default: - return s.declSlice[0].Pos() - } -} - -func (s *NodeSlice) End() token.Pos { - switch s.Kind { - case ExprNodeSlice: - return s.exprSlice[len(s.exprSlice)-1].End() - case StmtNodeSlice: - return s.stmtSlice[len(s.stmtSlice)-1].End() - case FieldNodeSlice: - return s.fieldSlice[len(s.fieldSlice)-1].End() - case IdentNodeSlice: - return s.identSlice[len(s.identSlice)-1].End() - case SpecNodeSlice: - return s.specSlice[len(s.specSlice)-1].End() - default: - return s.declSlice[len(s.declSlice)-1].End() - } -} diff --git a/vendor/github.com/quasilyte/regex/syntax/LICENSE b/vendor/github.com/quasilyte/regex/syntax/LICENSE deleted file mode 100644 index f0c81282b..000000000 --- a/vendor/github.com/quasilyte/regex/syntax/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2020 Iskander (Alex) Sharipov / quasilyte - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/quasilyte/regex/syntax/README.md b/vendor/github.com/quasilyte/regex/syntax/README.md deleted file mode 100644 index b70e25ad9..000000000 --- a/vendor/github.com/quasilyte/regex/syntax/README.md +++ /dev/null @@ -1,29 +0,0 @@ -# Package `regex/syntax` - -Package `syntax` provides regular expressions parser as well as AST definitions. - -## Rationale - -The advantages of this package over stdlib [regexp/syntax](https://golang.org/pkg/regexp/syntax/): - -1. Does not transformations/optimizations during the parsing. - The produced parse tree is loseless. - -2. Simpler AST representation. - -3. Can parse most PCRE operations in addition to [re2](https://github.com/google/re2/wiki/Syntax) syntax. - It can also handle PHP/Perl style patterns with delimiters. - -4. This package is easier to extend than something from the standard library. - -This package does almost no assumptions about how generated AST is going to be used -so it preserves as much syntax information as possible. - -It's easy to write another intermediate representation on top of it. The main -function of this package is to convert a textual regexp pattern into a more -structured form that can be processed more easily. - -## Users - -* [go-critic](https://github.com/go-critic/go-critic) - Go static analyzer -* [NoVerify](https://github.com/VKCOM/noverify) - PHP static analyzer diff --git a/vendor/github.com/quasilyte/regex/syntax/ast.go b/vendor/github.com/quasilyte/regex/syntax/ast.go deleted file mode 100644 index 4d21a9432..000000000 --- a/vendor/github.com/quasilyte/regex/syntax/ast.go +++ /dev/null @@ -1,64 +0,0 @@ -package syntax - -import ( - "strings" -) - -type Regexp struct { - Pattern string - Expr Expr -} - -type RegexpPCRE struct { - Pattern string - Expr Expr - - Source string - Modifiers string - Delim [2]byte -} - -func (re *RegexpPCRE) HasModifier(mod byte) bool { - return strings.IndexByte(re.Modifiers, mod) >= 0 -} - -type Expr struct { - // The operations that this expression performs. See `operation.go`. - Op Operation - - Form Form - - _ [2]byte // Reserved - - // Pos describes a source location inside regexp pattern. - Pos Position - - // Args is a list of sub-expressions of this expression. - // - // See Operation constants documentation to learn how to - // interpret the particular expression args. - Args []Expr - - // Value holds expression textual value. - // - // Usually, that value is identical to src[Begin():End()], - // but this is not true for programmatically generated objects. - Value string -} - -// Begin returns expression leftmost offset. -func (e Expr) Begin() uint16 { return e.Pos.Begin } - -// End returns expression rightmost offset. -func (e Expr) End() uint16 { return e.Pos.End } - -// LastArg returns expression last argument. -// -// Should not be called on expressions that may have 0 arguments. -func (e Expr) LastArg() Expr { - return e.Args[len(e.Args)-1] -} - -type Operation byte - -type Form byte diff --git a/vendor/github.com/quasilyte/regex/syntax/errors.go b/vendor/github.com/quasilyte/regex/syntax/errors.go deleted file mode 100644 index beefba5f9..000000000 --- a/vendor/github.com/quasilyte/regex/syntax/errors.go +++ /dev/null @@ -1,27 +0,0 @@ -package syntax - -type ParseError struct { - Pos Position - Message string -} - -func (e ParseError) Error() string { return e.Message } - -func throw(pos Position, message string) { - panic(ParseError{Pos: pos, Message: message}) -} - -func throwExpectedFound(pos Position, expected, found string) { - throw(pos, "expected '"+expected+"', found '"+found+"'") -} - -func throwUnexpectedToken(pos Position, token string) { - throw(pos, "unexpected token: "+token) -} - -func newPos(begin, end int) Position { - return Position{ - Begin: uint16(begin), - End: uint16(end), - } -} diff --git a/vendor/github.com/quasilyte/regex/syntax/lexer.go b/vendor/github.com/quasilyte/regex/syntax/lexer.go deleted file mode 100644 index aae146c2e..000000000 --- a/vendor/github.com/quasilyte/regex/syntax/lexer.go +++ /dev/null @@ -1,454 +0,0 @@ -package syntax - -import ( - "strings" - "unicode/utf8" -) - -type token struct { - kind tokenKind - pos Position -} - -func (tok token) String() string { - return tok.kind.String() -} - -type tokenKind byte - -//go:generate stringer -type=tokenKind -trimprefix=tok -linecomment=true -const ( - tokNone tokenKind = iota - - tokChar - tokGroupFlags - tokPosixClass - tokConcat - tokRepeat - tokEscapeChar - tokEscapeMeta - tokEscapeOctal - tokEscapeUni - tokEscapeUniFull - tokEscapeHex - tokEscapeHexFull - tokComment - - tokQ // \Q - tokMinus // - - tokLbracket // [ - tokLbracketCaret // [^ - tokRbracket // ] - tokDollar // $ - tokCaret // ^ - tokQuestion // ? - tokDot // . - tokPlus // + - tokStar // * - tokPipe // | - tokLparen // ( - tokLparenName // (?P - tokLparenNameAngle // (? - tokLparenNameQuote // (?'name' - tokLparenFlags // (?flags - tokLparenAtomic // (?> - tokLparenPositiveLookahead // (?= - tokLparenPositiveLookbehind // (?<= - tokLparenNegativeLookahead // (?! - tokLparenNegativeLookbehind // (?= utf8.RuneSelf { - _, size := utf8.DecodeRuneInString(l.input[l.pos:]) - l.pushTok(tokChar, size) - l.maybeInsertConcat() - continue - } - switch ch { - case '\\': - l.scanEscape(false) - case '.': - l.pushTok(tokDot, 1) - case '+': - l.pushTok(tokPlus, 1) - case '*': - l.pushTok(tokStar, 1) - case '^': - l.pushTok(tokCaret, 1) - case '$': - l.pushTok(tokDollar, 1) - case '?': - l.pushTok(tokQuestion, 1) - case ')': - l.pushTok(tokRparen, 1) - case '|': - l.pushTok(tokPipe, 1) - case '[': - if l.byteAt(l.pos+1) == '^' { - l.pushTok(tokLbracketCaret, 2) - } else { - l.pushTok(tokLbracket, 1) - } - l.scanCharClass() - case '(': - if l.byteAt(l.pos+1) == '?' { - switch { - case l.byteAt(l.pos+2) == '>': - l.pushTok(tokLparenAtomic, len("(?>")) - case l.byteAt(l.pos+2) == '=': - l.pushTok(tokLparenPositiveLookahead, len("(?=")) - case l.byteAt(l.pos+2) == '!': - l.pushTok(tokLparenNegativeLookahead, len("(?!")) - case l.byteAt(l.pos+2) == '<' && l.byteAt(l.pos+3) == '=': - l.pushTok(tokLparenPositiveLookbehind, len("(?<=")) - case l.byteAt(l.pos+2) == '<' && l.byteAt(l.pos+3) == '!': - l.pushTok(tokLparenNegativeLookbehind, len("(?= 0 { - l.pushTok(tokRepeat, len("{")+j) - } else { - l.pushTok(tokChar, 1) - } - default: - l.pushTok(tokChar, 1) - } - l.maybeInsertConcat() - } -} - -func (l *lexer) scanCharClass() { - l.maybeInsertConcat() - - // We need to handle first `]` in a special way. See #3. - if l.byteAt(l.pos) == ']' { - l.pushTok(tokChar, 1) - } - - for l.pos < len(l.input) { - ch := l.input[l.pos] - if ch >= utf8.RuneSelf { - _, size := utf8.DecodeRuneInString(l.input[l.pos:]) - l.pushTok(tokChar, size) - continue - } - switch ch { - case '\\': - l.scanEscape(true) - case '[': - isPosixClass := false - if l.byteAt(l.pos+1) == ':' { - j := l.stringIndex(l.pos+2, ":]") - if j >= 0 { - isPosixClass = true - l.pushTok(tokPosixClass, j+len("[::]")) - } - } - if !isPosixClass { - l.pushTok(tokChar, 1) - } - case '-': - l.pushTok(tokMinus, 1) - case ']': - l.pushTok(tokRbracket, 1) - return // Stop scanning in the char context - default: - l.pushTok(tokChar, 1) - } - } -} - -func (l *lexer) scanEscape(insideCharClass bool) { - s := l.input - if l.pos+1 >= len(s) { - throw(newPos(l.pos, l.pos+1), `unexpected end of pattern: trailing '\'`) - } - switch { - case s[l.pos+1] == 'p' || s[l.pos+1] == 'P': - if l.pos+2 >= len(s) { - throw(newPos(l.pos, l.pos+2), "unexpected end of pattern: expected uni-class-short or '{'") - } - if s[l.pos+2] == '{' { - j := strings.IndexByte(s[l.pos+2:], '}') - if j < 0 { - throw(newPos(l.pos, l.pos+2), "can't find closing '}'") - } - l.pushTok(tokEscapeUniFull, len(`\p{`)+j) - } else { - l.pushTok(tokEscapeUni, len(`\pL`)) - } - case s[l.pos+1] == 'x': - if l.pos+2 >= len(s) { - throw(newPos(l.pos, l.pos+2), "unexpected end of pattern: expected hex-digit or '{'") - } - if s[l.pos+2] == '{' { - j := strings.IndexByte(s[l.pos+2:], '}') - if j < 0 { - throw(newPos(l.pos, l.pos+2), "can't find closing '}'") - } - l.pushTok(tokEscapeHexFull, len(`\x{`)+j) - } else { - if isHexDigit(l.byteAt(l.pos + 3)) { - l.pushTok(tokEscapeHex, len(`\xFF`)) - } else { - l.pushTok(tokEscapeHex, len(`\xF`)) - } - } - case isOctalDigit(s[l.pos+1]): - digits := 1 - if isOctalDigit(l.byteAt(l.pos + 2)) { - if isOctalDigit(l.byteAt(l.pos + 3)) { - digits = 3 - } else { - digits = 2 - } - } - l.pushTok(tokEscapeOctal, len(`\`)+digits) - case s[l.pos+1] == 'Q': - size := len(s) - l.pos // Until the pattern ends - j := l.stringIndex(l.pos+2, `\E`) - if j >= 0 { - size = j + len(`\Q\E`) - } - l.pushTok(tokQ, size) - - default: - ch := l.byteAt(l.pos + 1) - if ch >= utf8.RuneSelf { - _, size := utf8.DecodeRuneInString(l.input[l.pos+1:]) - l.pushTok(tokEscapeChar, len(`\`)+size) - return - } - kind := tokEscapeChar - if insideCharClass { - if charClassMetachar[ch] { - kind = tokEscapeMeta - } - } else { - if reMetachar[ch] { - kind = tokEscapeMeta - } - } - l.pushTok(kind, 2) - } -} - -func (l *lexer) maybeInsertConcat() { - if l.isConcatPos() { - last := len(l.tokens) - 1 - tok := l.tokens[last] - l.tokens[last].kind = tokConcat - l.tokens = append(l.tokens, tok) - } -} - -func (l *lexer) Init(s string) { - l.pos = 0 - l.tokens = l.tokens[:0] - l.input = s - - l.scan() - - l.pos = 0 -} - -func (l *lexer) tryScanGroupName(pos int) bool { - tok := tokLparenName - endCh := byte('>') - offset := 1 - switch l.byteAt(pos) { - case '\'': - endCh = '\'' - tok = tokLparenNameQuote - case '<': - tok = tokLparenNameAngle - case 'P': - offset = 2 - default: - return false - } - if pos+offset >= len(l.input) { - return false - } - end := strings.IndexByte(l.input[pos+offset:], endCh) - if end < 0 { - return false - } - l.pushTok(tok, len("(?")+offset+end+1) - return true -} - -func (l *lexer) tryScanGroupFlags(pos int) bool { - colonPos := strings.IndexByte(l.input[pos:], ':') - parenPos := strings.IndexByte(l.input[pos:], ')') - if parenPos < 0 { - return false - } - end := parenPos - if colonPos >= 0 && colonPos < parenPos { - end = colonPos + len(":") - } - l.pushTok(tokLparenFlags, len("(?")+end) - return true -} - -func (l *lexer) tryScanComment(pos int) bool { - if l.byteAt(pos) != '#' { - return false - } - parenPos := strings.IndexByte(l.input[pos:], ')') - if parenPos < 0 { - return false - } - l.pushTok(tokComment, len("(?")+parenPos+len(")")) - return true -} - -func (l *lexer) repeatWidth(pos int) int { - j := pos - for isDigit(l.byteAt(j)) { - j++ - } - if j == pos { - return -1 - } - if l.byteAt(j) == '}' { - return (j + len("}")) - pos // {min} - } - if l.byteAt(j) != ',' { - return -1 - } - j += len(",") - for isDigit(l.byteAt(j)) { - j++ - } - if l.byteAt(j) == '}' { - return (j + len("}")) - pos // {min,} or {min,max} - } - return -1 -} - -func (l *lexer) stringIndex(offset int, s string) int { - if offset < len(l.input) { - return strings.Index(l.input[offset:], s) - } - return -1 -} - -func (l *lexer) byteAt(pos int) byte { - if pos >= 0 && pos < len(l.input) { - return l.input[pos] - } - return 0 -} - -func (l *lexer) pushTok(kind tokenKind, size int) { - l.tokens = append(l.tokens, token{ - kind: kind, - pos: Position{Begin: uint16(l.pos), End: uint16(l.pos + size)}, - }) - l.pos += size -} - -func (l *lexer) isConcatPos() bool { - if len(l.tokens) < 2 { - return false - } - x := l.tokens[len(l.tokens)-2].kind - if concatTable[x]&concatX != 0 { - return false - } - y := l.tokens[len(l.tokens)-1].kind - return concatTable[y]&concatY == 0 -} - -const ( - concatX byte = 1 << iota - concatY -) - -var concatTable = [256]byte{ - tokPipe: concatX | concatY, - - tokLparen: concatX, - tokLparenFlags: concatX, - tokLparenName: concatX, - tokLparenNameAngle: concatX, - tokLparenNameQuote: concatX, - tokLparenAtomic: concatX, - tokLbracket: concatX, - tokLbracketCaret: concatX, - tokLparenPositiveLookahead: concatX, - tokLparenPositiveLookbehind: concatX, - tokLparenNegativeLookahead: concatX, - tokLparenNegativeLookbehind: concatX, - - tokRparen: concatY, - tokRbracket: concatY, - tokPlus: concatY, - tokStar: concatY, - tokQuestion: concatY, - tokRepeat: concatY, -} diff --git a/vendor/github.com/quasilyte/regex/syntax/operation.go b/vendor/github.com/quasilyte/regex/syntax/operation.go deleted file mode 100644 index 0fc8fc521..000000000 --- a/vendor/github.com/quasilyte/regex/syntax/operation.go +++ /dev/null @@ -1,195 +0,0 @@ -package syntax - -//go:generate stringer -type=Operation -trimprefix=Op -const ( - OpNone Operation = iota - - // OpConcat is a concatenation of ops. - // Examples: `xy` `abc\d` `` - // Args - concatenated ops - // - // As a special case, OpConcat with 0 Args is used for "empty" - // set of operations. - OpConcat - - // OpDot is a '.' wildcard. - OpDot - - // OpAlt is x|y alternation of ops. - // Examples: `a|bc` `x(.*?)|y(.*?)` - // Args - union-connected regexp branches - OpAlt - - // OpStar is a shorthand for {0,} repetition. - // Examples: `x*` - // Args[0] - repeated expression - OpStar - - // OpPlus is a shorthand for {1,} repetition. - // Examples: `x+` - // Args[0] - repeated expression - OpPlus - - // OpQuestion is a shorthand for {0,1} repetition. - // Examples: `x?` - // Args[0] - repeated expression - OpQuestion - - // OpNonGreedy makes its operand quantifier non-greedy. - // Examples: `x??` `x*?` `x+?` - // Args[0] - quantified expression - OpNonGreedy - - // OpPossessive makes its operand quantifier possessive. - // Examples: `x?+` `x*+` `x++` - // Args[0] - quantified expression - OpPossessive - - // OpCaret is ^ anchor. - OpCaret - - // OpDollar is $ anchor. - OpDollar - - // OpLiteral is a collection of consecutive chars. - // Examples: `ab` `10x` - // Args - enclosed characters (OpChar) - OpLiteral - - // OpChar is a single literal pattern character. - // Examples: `a` `6` `ф` - OpChar - - // OpString is an artificial element that is used in other expressions. - OpString - - // OpQuote is a \Q...\E enclosed literal. - // Examples: `\Q.?\E` `\Q?q[]=1` - // FormQuoteUnclosed: `\Qabc` - // Args[0] - literal value (OpString) - OpQuote - - // OpEscapeChar is a single char escape. - // Examples: `\d` `\a` `\n` - // Args[0] - escaped value (OpString) - OpEscapeChar - - // OpEscapeMeta is an escaped meta char. - // Examples: `\(` `\[` `\+` - // Args[0] - escaped value (OpString) - OpEscapeMeta - - // OpEscapeOctal is an octal char code escape (up to 3 digits). - // Examples: `\123` `\12` - // Args[0] - escaped value (OpString) - OpEscapeOctal - - // OpEscapeHex is a hex char code escape. - // Examples: `\x7F` `\xF7` - // FormEscapeHexFull examples: `\x{10FFFF}` `\x{F}`. - // Args[0] - escaped value (OpString) - OpEscapeHex - - // OpEscapeUni is a Unicode char class escape. - // Examples: `\pS` `\pL` `\PL` - // FormEscapeUniFull examples: `\p{Greek}` `\p{Symbol}` `\p{^L}` - // Args[0] - escaped value (OpString) - OpEscapeUni - - // OpCharClass is a char class enclosed in []. - // Examples: `[abc]` `[a-z0-9\]]` - // Args - char class elements (can include OpCharRange and OpPosixClass) - OpCharClass - - // OpNegCharClass is a negated char class enclosed in []. - // Examples: `[^abc]` `[^a-z0-9\]]` - // Args - char class elements (can include OpCharRange and OpPosixClass) - OpNegCharClass - - // OpCharRange is an inclusive char range inside a char class. - // Examples: `0-9` `A-Z` - // Args[0] - range lower bound - // Args[1] - range upper bound - OpCharRange - - // OpPosixClass is a named ASCII char set inside a char class. - // Examples: `[:alpha:]` `[:blank:]` - OpPosixClass - - // OpRepeat is a {min,max} repetition quantifier. - // Examples: `x{5}` `x{min,max}` `x{min,}` - // Args[0] - repeated expression - // Args[1] - repeat count (OpString) - OpRepeat - - // OpCapture is `(re)` capturing group. - // Examples: `(abc)` `(x|y)` - // Args[0] - enclosed expression - OpCapture - - // OpNamedCapture is `(?Pre)` capturing group. - // Examples: `(?Pabc)` `(?Px|y)` - // FormNamedCaptureAngle examples: `(?abc)` `(?x|y)` - // FormNamedCaptureQuote examples: `(?'foo'abc)` `(?'name'x|y)` - // Args[0] - enclosed expression (OpConcat with 0 args for empty group) - // Args[1] - group name (OpString) - OpNamedCapture - - // OpGroup is `(?:re)` non-capturing group. - // Examples: `(?:abc)` `(?:x|y)` - // Args[0] - enclosed expression (OpConcat with 0 args for empty group) - OpGroup - - // OpGroupWithFlags is `(?flags:re)` non-capturing group. - // Examples: `(?i:abc)` `(?i:x|y)` - // Args[0] - enclosed expression (OpConcat with 0 args for empty group) - // Args[1] - flags (OpString) - OpGroupWithFlags - - // OpAtomicGroup is `(?>re)` non-capturing group without backtracking. - // Examples: `(?>foo)` `(?>)` - // Args[0] - enclosed expression (OpConcat with 0 args for empty group) - OpAtomicGroup - - // OpPositiveLookahead is `(?=re)` asserts that following text matches re. - // Examples: `(?=foo)` - // Args[0] - enclosed expression (OpConcat with 0 args for empty group) - OpPositiveLookahead - - // OpNegativeLookahead is `(?!re)` asserts that following text doesn't match re. - // Examples: `(?!foo)` - // Args[0] - enclosed expression (OpConcat with 0 args for empty group) - OpNegativeLookahead - - // OpPositiveLookbehind is `(?<=re)` asserts that preceding text matches re. - // Examples: `(?<=foo)` - // Args[0] - enclosed expression (OpConcat with 0 args for empty group) - OpPositiveLookbehind - - // OpNegativeLookbehind is `(?=re)` asserts that preceding text doesn't match re. - // Examples: `(?= Operation(len(_Operation_index)-1) { - return "Operation(" + strconv.FormatInt(int64(i), 10) + ")" - } - return _Operation_name[_Operation_index[i]:_Operation_index[i+1]] -} diff --git a/vendor/github.com/quasilyte/regex/syntax/parser.go b/vendor/github.com/quasilyte/regex/syntax/parser.go deleted file mode 100644 index f1c154f31..000000000 --- a/vendor/github.com/quasilyte/regex/syntax/parser.go +++ /dev/null @@ -1,503 +0,0 @@ -package syntax - -import ( - "errors" - "strings" -) - -type ParserOptions struct { - // NoLiterals disables OpChar merging into OpLiteral. - NoLiterals bool -} - -func NewParser(opts *ParserOptions) *Parser { - return newParser(opts) -} - -type Parser struct { - out Regexp - lexer lexer - exprPool []Expr - - prefixParselets [256]prefixParselet - infixParselets [256]infixParselet - - charClass []Expr - allocated uint - - opts ParserOptions -} - -// ParsePCRE parses PHP-style pattern with delimiters. -// An example of such pattern is `/foo/i`. -func (p *Parser) ParsePCRE(pattern string) (*RegexpPCRE, error) { - pcre, err := p.newPCRE(pattern) - if err != nil { - return nil, err - } - if pcre.HasModifier('x') { - return nil, errors.New("'x' modifier is not supported") - } - re, err := p.Parse(pcre.Pattern) - if re != nil { - pcre.Expr = re.Expr - } - return pcre, err -} - -func (p *Parser) Parse(pattern string) (result *Regexp, err error) { - defer func() { - r := recover() - if r == nil { - return - } - if err2, ok := r.(ParseError); ok { - err = err2 - return - } - panic(r) - }() - - p.lexer.Init(pattern) - p.allocated = 0 - p.out.Pattern = pattern - if pattern == "" { - p.out.Expr = *p.newExpr(OpConcat, Position{}) - } else { - p.out.Expr = *p.parseExpr(0) - } - - if !p.opts.NoLiterals { - p.mergeChars(&p.out.Expr) - } - p.setValues(&p.out.Expr) - - return &p.out, nil -} - -type prefixParselet func(token) *Expr - -type infixParselet func(*Expr, token) *Expr - -func newParser(opts *ParserOptions) *Parser { - var p Parser - - if opts != nil { - p.opts = *opts - } - p.exprPool = make([]Expr, 256) - - for tok, op := range tok2op { - if op != 0 { - p.prefixParselets[tokenKind(tok)] = p.parsePrefixElementary - } - } - - p.prefixParselets[tokQ] = func(tok token) *Expr { - litPos := tok.pos - litPos.Begin += uint16(len(`\Q`)) - form := FormQuoteUnclosed - if strings.HasSuffix(p.tokenValue(tok), `\E`) { - litPos.End -= uint16(len(`\E`)) - form = FormDefault - } - lit := p.newExpr(OpString, litPos) - return p.newExprForm(OpQuote, form, tok.pos, lit) - } - - p.prefixParselets[tokEscapeHexFull] = func(tok token) *Expr { - litPos := tok.pos - litPos.Begin += uint16(len(`\x{`)) - litPos.End -= uint16(len(`}`)) - lit := p.newExpr(OpString, litPos) - return p.newExprForm(OpEscapeHex, FormEscapeHexFull, tok.pos, lit) - } - p.prefixParselets[tokEscapeUniFull] = func(tok token) *Expr { - litPos := tok.pos - litPos.Begin += uint16(len(`\p{`)) - litPos.End -= uint16(len(`}`)) - lit := p.newExpr(OpString, litPos) - return p.newExprForm(OpEscapeUni, FormEscapeUniFull, tok.pos, lit) - } - - p.prefixParselets[tokEscapeHex] = func(tok token) *Expr { return p.parseEscape(OpEscapeHex, `\x`, tok) } - p.prefixParselets[tokEscapeOctal] = func(tok token) *Expr { return p.parseEscape(OpEscapeOctal, `\`, tok) } - p.prefixParselets[tokEscapeChar] = func(tok token) *Expr { return p.parseEscape(OpEscapeChar, `\`, tok) } - p.prefixParselets[tokEscapeMeta] = func(tok token) *Expr { return p.parseEscape(OpEscapeMeta, `\`, tok) } - p.prefixParselets[tokEscapeUni] = func(tok token) *Expr { return p.parseEscape(OpEscapeUni, `\p`, tok) } - - p.prefixParselets[tokLparen] = func(tok token) *Expr { return p.parseGroup(OpCapture, tok) } - p.prefixParselets[tokLparenAtomic] = func(tok token) *Expr { return p.parseGroup(OpAtomicGroup, tok) } - p.prefixParselets[tokLparenPositiveLookahead] = func(tok token) *Expr { return p.parseGroup(OpPositiveLookahead, tok) } - p.prefixParselets[tokLparenNegativeLookahead] = func(tok token) *Expr { return p.parseGroup(OpNegativeLookahead, tok) } - p.prefixParselets[tokLparenPositiveLookbehind] = func(tok token) *Expr { return p.parseGroup(OpPositiveLookbehind, tok) } - p.prefixParselets[tokLparenNegativeLookbehind] = func(tok token) *Expr { return p.parseGroup(OpNegativeLookbehind, tok) } - - p.prefixParselets[tokLparenName] = func(tok token) *Expr { - return p.parseNamedCapture(FormDefault, tok) - } - p.prefixParselets[tokLparenNameAngle] = func(tok token) *Expr { - return p.parseNamedCapture(FormNamedCaptureAngle, tok) - } - p.prefixParselets[tokLparenNameQuote] = func(tok token) *Expr { - return p.parseNamedCapture(FormNamedCaptureQuote, tok) - } - - p.prefixParselets[tokLparenFlags] = p.parseGroupWithFlags - - p.prefixParselets[tokPipe] = func(tok token) *Expr { - // We need prefix pipe parselet to handle `(|x)` syntax. - right := p.parseExpr(1) - return p.newExpr(OpAlt, tok.pos, p.newEmpty(tok.pos), right) - } - p.prefixParselets[tokLbracket] = func(tok token) *Expr { - return p.parseCharClass(OpCharClass, tok) - } - p.prefixParselets[tokLbracketCaret] = func(tok token) *Expr { - return p.parseCharClass(OpNegCharClass, tok) - } - - p.infixParselets[tokRepeat] = func(left *Expr, tok token) *Expr { - repeatLit := p.newExpr(OpString, tok.pos) - return p.newExpr(OpRepeat, combinePos(left.Pos, tok.pos), left, repeatLit) - } - p.infixParselets[tokStar] = func(left *Expr, tok token) *Expr { - return p.newExpr(OpStar, combinePos(left.Pos, tok.pos), left) - } - p.infixParselets[tokConcat] = func(left *Expr, tok token) *Expr { - right := p.parseExpr(2) - if left.Op == OpConcat { - left.Args = append(left.Args, *right) - left.Pos.End = right.End() - return left - } - return p.newExpr(OpConcat, combinePos(left.Pos, right.Pos), left, right) - } - p.infixParselets[tokPipe] = p.parseAlt - p.infixParselets[tokMinus] = p.parseMinus - p.infixParselets[tokPlus] = p.parsePlus - p.infixParselets[tokQuestion] = p.parseQuestion - - return &p -} - -func (p *Parser) setValues(e *Expr) { - for i := range e.Args { - p.setValues(&e.Args[i]) - } - e.Value = p.exprValue(e) -} - -func (p *Parser) tokenValue(tok token) string { - return p.out.Pattern[tok.pos.Begin:tok.pos.End] -} - -func (p *Parser) exprValue(e *Expr) string { - return p.out.Pattern[e.Begin():e.End()] -} - -func (p *Parser) mergeChars(e *Expr) { - for i := range e.Args { - p.mergeChars(&e.Args[i]) - } - if e.Op != OpConcat || len(e.Args) < 2 { - return - } - - args := e.Args[:0] - i := 0 - for i < len(e.Args) { - first := i - chars := 0 - for j := i; j < len(e.Args) && e.Args[j].Op == OpChar; j++ { - chars++ - } - if chars > 1 { - c1 := e.Args[first] - c2 := e.Args[first+chars-1] - lit := p.newExpr(OpLiteral, combinePos(c1.Pos, c2.Pos)) - for j := 0; j < chars; j++ { - lit.Args = append(lit.Args, e.Args[first+j]) - } - args = append(args, *lit) - i += chars - } else { - args = append(args, e.Args[i]) - i++ - } - } - if len(args) == 1 { - *e = args[0] // Turn OpConcat into OpLiteral - } else { - e.Args = args - } -} - -func (p *Parser) newEmpty(pos Position) *Expr { - return p.newExpr(OpConcat, pos) -} - -func (p *Parser) newExprForm(op Operation, form Form, pos Position, args ...*Expr) *Expr { - e := p.newExpr(op, pos, args...) - e.Form = form - return e -} - -func (p *Parser) newExpr(op Operation, pos Position, args ...*Expr) *Expr { - e := p.allocExpr() - *e = Expr{ - Op: op, - Pos: pos, - Args: e.Args[:0], - } - for _, arg := range args { - e.Args = append(e.Args, *arg) - } - return e -} - -func (p *Parser) allocExpr() *Expr { - i := p.allocated - if i < uint(len(p.exprPool)) { - p.allocated++ - return &p.exprPool[i] - } - return &Expr{} -} - -func (p *Parser) expect(kind tokenKind) Position { - tok := p.lexer.NextToken() - if tok.kind != kind { - throwExpectedFound(tok.pos, kind.String(), tok.kind.String()) - } - return tok.pos -} - -func (p *Parser) parseExpr(precedence int) *Expr { - tok := p.lexer.NextToken() - prefix := p.prefixParselets[tok.kind] - if prefix == nil { - throwUnexpectedToken(tok.pos, tok.String()) - } - left := prefix(tok) - - for precedence < p.precedenceOf(p.lexer.Peek()) { - tok := p.lexer.NextToken() - infix := p.infixParselets[tok.kind] - left = infix(left, tok) - } - - return left -} - -func (p *Parser) parsePrefixElementary(tok token) *Expr { - return p.newExpr(tok2op[tok.kind], tok.pos) -} - -func (p *Parser) parseCharClass(op Operation, tok token) *Expr { - var endPos Position - p.charClass = p.charClass[:0] - for { - p.charClass = append(p.charClass, *p.parseExpr(0)) - next := p.lexer.Peek() - if next.kind == tokRbracket { - endPos = next.pos - p.lexer.NextToken() - break - } - if next.kind == tokNone { - throw(tok.pos, "unterminated '['") - } - } - - result := p.newExpr(op, combinePos(tok.pos, endPos)) - result.Args = append(result.Args, p.charClass...) - return result -} - -func (p *Parser) parseMinus(left *Expr, tok token) *Expr { - if p.isValidCharRangeOperand(left) { - if p.lexer.Peek().kind != tokRbracket { - right := p.parseExpr(2) - return p.newExpr(OpCharRange, combinePos(left.Pos, right.Pos), left, right) - } - } - p.charClass = append(p.charClass, *left) - return p.newExpr(OpChar, tok.pos) -} - -func (p *Parser) isValidCharRangeOperand(e *Expr) bool { - switch e.Op { - case OpEscapeHex, OpEscapeOctal, OpEscapeMeta, OpChar: - return true - case OpEscapeChar: - switch p.exprValue(e) { - case `\\`, `\|`, `\*`, `\+`, `\?`, `\.`, `\[`, `\^`, `\$`, `\(`, `\)`: - return true - } - } - return false -} - -func (p *Parser) parsePlus(left *Expr, tok token) *Expr { - op := OpPlus - switch left.Op { - case OpPlus, OpStar, OpQuestion, OpRepeat: - op = OpPossessive - } - return p.newExpr(op, combinePos(left.Pos, tok.pos), left) -} - -func (p *Parser) parseQuestion(left *Expr, tok token) *Expr { - op := OpQuestion - switch left.Op { - case OpPlus, OpStar, OpQuestion, OpRepeat: - op = OpNonGreedy - } - return p.newExpr(op, combinePos(left.Pos, tok.pos), left) -} - -func (p *Parser) parseAlt(left *Expr, tok token) *Expr { - var right *Expr - switch p.lexer.Peek().kind { - case tokRparen, tokNone: - // This is needed to handle `(x|)` syntax. - right = p.newEmpty(tok.pos) - default: - right = p.parseExpr(1) - } - if left.Op == OpAlt { - left.Args = append(left.Args, *right) - left.Pos.End = right.End() - return left - } - return p.newExpr(OpAlt, combinePos(left.Pos, right.Pos), left, right) -} - -func (p *Parser) parseGroupItem(tok token) *Expr { - if p.lexer.Peek().kind == tokRparen { - // This is needed to handle `() syntax.` - return p.newEmpty(tok.pos) - } - return p.parseExpr(0) -} - -func (p *Parser) parseGroup(op Operation, tok token) *Expr { - x := p.parseGroupItem(tok) - result := p.newExpr(op, tok.pos, x) - result.Pos.End = p.expect(tokRparen).End - return result -} - -func (p *Parser) parseNamedCapture(form Form, tok token) *Expr { - prefixLen := len("(?<") - if form == FormDefault { - prefixLen = len("(?P<") - } - name := p.newExpr(OpString, Position{ - Begin: tok.pos.Begin + uint16(prefixLen), - End: tok.pos.End - uint16(len(">")), - }) - x := p.parseGroupItem(tok) - result := p.newExprForm(OpNamedCapture, form, tok.pos, x, name) - result.Pos.End = p.expect(tokRparen).End - return result -} - -func (p *Parser) parseGroupWithFlags(tok token) *Expr { - var result *Expr - val := p.out.Pattern[tok.pos.Begin+1 : tok.pos.End] - switch { - case !strings.HasSuffix(val, ":"): - flags := p.newExpr(OpString, Position{ - Begin: tok.pos.Begin + uint16(len("(?")), - End: tok.pos.End, - }) - result = p.newExpr(OpFlagOnlyGroup, tok.pos, flags) - case val == "?:": - x := p.parseGroupItem(tok) - result = p.newExpr(OpGroup, tok.pos, x) - default: - flags := p.newExpr(OpString, Position{ - Begin: tok.pos.Begin + uint16(len("(?")), - End: tok.pos.End - uint16(len(":")), - }) - x := p.parseGroupItem(tok) - result = p.newExpr(OpGroupWithFlags, tok.pos, x, flags) - } - result.Pos.End = p.expect(tokRparen).End - return result -} - -func (p *Parser) parseEscape(op Operation, prefix string, tok token) *Expr { - litPos := tok.pos - litPos.Begin += uint16(len(prefix)) - lit := p.newExpr(OpString, litPos) - return p.newExpr(op, tok.pos, lit) -} - -func (p *Parser) precedenceOf(tok token) int { - switch tok.kind { - case tokPipe: - return 1 - case tokConcat, tokMinus: - return 2 - case tokPlus, tokStar, tokQuestion, tokRepeat: - return 3 - default: - return 0 - } -} - -func (p *Parser) newPCRE(source string) (*RegexpPCRE, error) { - if source == "" { - return nil, errors.New("empty pattern: can't find delimiters") - } - - delim := source[0] - endDelim := delim - switch delim { - case '(': - endDelim = ')' - case '{': - endDelim = '}' - case '[': - endDelim = ']' - case '<': - endDelim = '>' - case '\\': - return nil, errors.New("'\\' is not a valid delimiter") - default: - if isSpace(delim) { - return nil, errors.New("whitespace is not a valid delimiter") - } - if isAlphanumeric(delim) { - return nil, errors.New("'" + string(delim) + "' is not a valid delimiter") - } - } - - const delimLen = 1 - j := strings.LastIndexByte(source[delimLen:], endDelim) - if j == -1 { - return nil, errors.New("can't find '" + string(endDelim) + "' ending delimiter") - } - j += delimLen - - pcre := &RegexpPCRE{ - Pattern: source[delimLen:j], - Source: source, - Delim: [2]byte{delim, endDelim}, - Modifiers: source[j+delimLen:], - } - return pcre, nil -} - -var tok2op = [256]Operation{ - tokDollar: OpDollar, - tokCaret: OpCaret, - tokDot: OpDot, - tokChar: OpChar, - tokMinus: OpChar, - tokPosixClass: OpPosixClass, - tokComment: OpComment, -} diff --git a/vendor/github.com/quasilyte/regex/syntax/pos.go b/vendor/github.com/quasilyte/regex/syntax/pos.go deleted file mode 100644 index 51bdbf87a..000000000 --- a/vendor/github.com/quasilyte/regex/syntax/pos.go +++ /dev/null @@ -1,10 +0,0 @@ -package syntax - -type Position struct { - Begin uint16 - End uint16 -} - -func combinePos(begin, end Position) Position { - return Position{Begin: begin.Begin, End: end.End} -} diff --git a/vendor/github.com/quasilyte/regex/syntax/tokenkind_string.go b/vendor/github.com/quasilyte/regex/syntax/tokenkind_string.go deleted file mode 100644 index 8800436bc..000000000 --- a/vendor/github.com/quasilyte/regex/syntax/tokenkind_string.go +++ /dev/null @@ -1,59 +0,0 @@ -// Code generated by "stringer -type=tokenKind -trimprefix=tok -linecomment=true"; DO NOT EDIT. - -package syntax - -import "strconv" - -func _() { - // An "invalid array index" compiler error signifies that the constant values have changed. - // Re-run the stringer command to generate them again. - var x [1]struct{} - _ = x[tokNone-0] - _ = x[tokChar-1] - _ = x[tokGroupFlags-2] - _ = x[tokPosixClass-3] - _ = x[tokConcat-4] - _ = x[tokRepeat-5] - _ = x[tokEscapeChar-6] - _ = x[tokEscapeMeta-7] - _ = x[tokEscapeOctal-8] - _ = x[tokEscapeUni-9] - _ = x[tokEscapeUniFull-10] - _ = x[tokEscapeHex-11] - _ = x[tokEscapeHexFull-12] - _ = x[tokComment-13] - _ = x[tokQ-14] - _ = x[tokMinus-15] - _ = x[tokLbracket-16] - _ = x[tokLbracketCaret-17] - _ = x[tokRbracket-18] - _ = x[tokDollar-19] - _ = x[tokCaret-20] - _ = x[tokQuestion-21] - _ = x[tokDot-22] - _ = x[tokPlus-23] - _ = x[tokStar-24] - _ = x[tokPipe-25] - _ = x[tokLparen-26] - _ = x[tokLparenName-27] - _ = x[tokLparenNameAngle-28] - _ = x[tokLparenNameQuote-29] - _ = x[tokLparenFlags-30] - _ = x[tokLparenAtomic-31] - _ = x[tokLparenPositiveLookahead-32] - _ = x[tokLparenPositiveLookbehind-33] - _ = x[tokLparenNegativeLookahead-34] - _ = x[tokLparenNegativeLookbehind-35] - _ = x[tokRparen-36] -} - -const _tokenKind_name = "NoneCharGroupFlagsPosixClassConcatRepeatEscapeCharEscapeMetaEscapeOctalEscapeUniEscapeUniFullEscapeHexEscapeHexFullComment\\Q-[[^]$^?.+*|((?P(?(?'name'(?flags(?>(?=(?<=(?!(?= tokenKind(len(_tokenKind_index)-1) { - return "tokenKind(" + strconv.FormatInt(int64(i), 10) + ")" - } - return _tokenKind_name[_tokenKind_index[i]:_tokenKind_index[i+1]] -} diff --git a/vendor/github.com/quasilyte/regex/syntax/utils.go b/vendor/github.com/quasilyte/regex/syntax/utils.go deleted file mode 100644 index e5b654825..000000000 --- a/vendor/github.com/quasilyte/regex/syntax/utils.go +++ /dev/null @@ -1,30 +0,0 @@ -package syntax - -func isSpace(ch byte) bool { - switch ch { - case '\r', '\n', '\t', '\f', '\v', ' ': - return true - default: - return false - } -} - -func isAlphanumeric(ch byte) bool { - return (ch >= 'a' && ch <= 'z') || - (ch >= 'A' && ch <= 'Z') || - (ch >= '0' && ch <= '9') -} - -func isDigit(ch byte) bool { - return ch >= '0' && ch <= '9' -} - -func isOctalDigit(ch byte) bool { - return ch >= '0' && ch <= '7' -} - -func isHexDigit(ch byte) bool { - return (ch >= '0' && ch <= '9') || - (ch >= 'a' && ch <= 'f') || - (ch >= 'A' && ch <= 'F') -} diff --git a/vendor/github.com/quasilyte/stdinfo/LICENSE b/vendor/github.com/quasilyte/stdinfo/LICENSE deleted file mode 100644 index 87a453862..000000000 --- a/vendor/github.com/quasilyte/stdinfo/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2022 Iskander (Alex) Sharipov - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/quasilyte/stdinfo/stdinfo.go b/vendor/github.com/quasilyte/stdinfo/stdinfo.go deleted file mode 100644 index 040f63445..000000000 --- a/vendor/github.com/quasilyte/stdinfo/stdinfo.go +++ /dev/null @@ -1,30 +0,0 @@ -package stdinfo - -type Package struct { - // Name is a package name. - // For "encoding/json" the package name is "json". - Name string - - // Path is a package path, like "encoding/json". - Path string - - // Freq is a package import frequency approximation. - // A value of -1 means "unknown". - Freq int -} - -// PathByName maps a std package name to its package path. -// -// For packages with multiple choices, like "template", -// only the more common one is accessible ("text/template" in this case). -// -// This map doesn't contain extremely rare packages either. -// Use PackageList variable if you want to construct a different mapping. -// -// It's exported as map to make it easier to re-use it in libraries -// without copying. -var PathByName = generatedPathByName - -// PackagesList is a list of std packages information. -// It's sorted by a package name. -var PackagesList = generatedPackagesList diff --git a/vendor/github.com/quasilyte/stdinfo/stdinfo_gen.go b/vendor/github.com/quasilyte/stdinfo/stdinfo_gen.go deleted file mode 100644 index ecfff9b6c..000000000 --- a/vendor/github.com/quasilyte/stdinfo/stdinfo_gen.go +++ /dev/null @@ -1,274 +0,0 @@ -// Code generated by "script/gen.go"; DO NOT EDIT. - -package stdinfo - -var generatedPathByName = map[string]string{ - "fmt": "fmt", // Freq=15795 - "testing": "testing", // Freq=12807 - "context": "context", // Freq=10797 - "time": "time", // Freq=8900 - "strings": "strings", // Freq=8852 - "os": "os", // Freq=5712 - "bytes": "bytes", // Freq=4129 - "io": "io", // Freq=3981 - "http": "net/http", // Freq=3691 - "sync": "sync", // Freq=3492 - "errors": "errors", // Freq=3107 - "strconv": "strconv", // Freq=3076 - "reflect": "reflect", // Freq=3025 - "filepath": "path/filepath", // Freq=2843 - "json": "encoding/json", // Freq=2537 - "sort": "sort", // Freq=2382 - "ioutil": "io/ioutil", // Freq=2164 - "net": "net", // Freq=2025 - "math": "math", // Freq=1746 - "url": "net/url", // Freq=1411 - "regexp": "regexp", // Freq=1320 - "runtime": "runtime", // Freq=1318 - "log": "log", // Freq=1149 - "flag": "flag", // Freq=1002 - "path": "path", // Freq=993 - "unsafe": "unsafe", // Freq=992 - "rand": "math/rand", // Freq=981 - "syscall": "syscall", // Freq=902 - "atomic": "sync/atomic", // Freq=804 - "bufio": "bufio", // Freq=695 - "httptest": "net/http/httptest", // Freq=676 - "exec": "os/exec", // Freq=676 - "binary": "encoding/binary", // Freq=476 - "tls": "crypto/tls", // Freq=475 - "token": "go/token", // Freq=471 - "utf8": "unicode/utf8", // Freq=404 - "base64": "encoding/base64", // Freq=383 - "ast": "go/ast", // Freq=373 - "x509": "crypto/x509", // Freq=357 - "hex": "encoding/hex", // Freq=340 - "unicode": "unicode", // Freq=309 - "types": "go/types", // Freq=309 - "big": "math/big", // Freq=230 - "sha256": "crypto/sha256", // Freq=227 - "template": "text/template", // Freq=211 - "fs": "io/fs", // Freq=162 - "parser": "go/parser", // Freq=160 - "sql": "database/sql", // Freq=157 - "gzip": "compress/gzip", // Freq=150 - "signal": "os/signal", // Freq=139 - "pem": "encoding/pem", // Freq=137 - "hash": "hash", // Freq=137 - "crypto": "crypto", // Freq=132 - "build": "go/build", // Freq=121 - "debug": "runtime/debug", // Freq=121 - "bits": "math/bits", // Freq=120 - "constant": "go/constant", // Freq=120 - "xml": "encoding/xml", // Freq=118 - "tabwriter": "text/tabwriter", // Freq=116 - "md5": "crypto/md5", // Freq=110 - "rsa": "crypto/rsa", // Freq=103 - "format": "go/format", // Freq=88 - "sha1": "crypto/sha1", // Freq=85 - "driver": "database/sql/driver", // Freq=81 - "pkix": "crypto/x509/pkix", // Freq=80 - "heap": "container/heap", // Freq=78 - "tar": "archive/tar", // Freq=77 - "ecdsa": "crypto/ecdsa", // Freq=75 - "cipher": "crypto/cipher", // Freq=74 - "crc32": "hash/crc32", // Freq=70 - "gob": "encoding/gob", // Freq=65 - "elliptic": "crypto/elliptic", // Freq=60 - "subtle": "crypto/subtle", // Freq=54 - "zip": "archive/zip", // Freq=54 - "aes": "crypto/aes", // Freq=53 - "mime": "mime", // Freq=51 - "pprof": "runtime/pprof", // Freq=47 - "textproto": "net/textproto", // Freq=47 - "image": "image", // Freq=45 - "fnv": "hash/fnv", // Freq=45 - "hmac": "crypto/hmac", // Freq=45 - "httputil": "net/http/httputil", // Freq=44 - "elf": "debug/elf", // Freq=44 - "encoding": "encoding", // Freq=41 - "sha512": "crypto/sha512", // Freq=41 - "cmplx": "math/cmplx", // Freq=40 - "color": "image/color", // Freq=38 - "html": "html", // Freq=37 - "expvar": "expvar", // Freq=34 - "embed": "embed", // Freq=32 - "csv": "encoding/csv", // Freq=31 - "importer": "go/importer", // Freq=31 - "multipart": "mime/multipart", // Freq=30 - "printer": "go/printer", // Freq=27 - "syslog": "log/syslog", // Freq=27 - "asn1": "encoding/asn1", // Freq=27 - "list": "container/list", // Freq=27 - "scanner": "go/scanner", // Freq=25 - "ed25519": "crypto/ed25519", // Freq=25 - "dwarf": "debug/dwarf", // Freq=23 - "flate": "compress/flate", // Freq=22 - "zlib": "compress/zlib", // Freq=21 - "png": "image/png", // Freq=20 - "trace": "runtime/trace", // Freq=20 - "httptrace": "net/http/httptrace", // Freq=19 - "utf16": "unicode/utf16", // Freq=19 - "rpc": "net/rpc", // Freq=19 - "macho": "debug/macho", // Freq=16 - "iotest": "testing/iotest", // Freq=15 - "dsa": "crypto/dsa", // Freq=13 - "parse": "text/template/parse", // Freq=13 - "cookiejar": "net/http/cookiejar", // Freq=12 - "fstest": "testing/fstest", // Freq=11 - "jpeg": "image/jpeg", // Freq=11 -} - -var generatedPackagesList = []Package{ - {Name: "adler32", Path: "hash/adler32", Freq: 7}, - {Name: "aes", Path: "crypto/aes", Freq: 53}, - {Name: "ascii85", Path: "encoding/ascii85", Freq: -1}, - {Name: "asn1", Path: "encoding/asn1", Freq: 27}, - {Name: "ast", Path: "go/ast", Freq: 373}, - {Name: "atomic", Path: "sync/atomic", Freq: 804}, - {Name: "base32", Path: "encoding/base32", Freq: 5}, - {Name: "base64", Path: "encoding/base64", Freq: 383}, - {Name: "big", Path: "math/big", Freq: 230}, - {Name: "binary", Path: "encoding/binary", Freq: 476}, - {Name: "bits", Path: "math/bits", Freq: 120}, - {Name: "bufio", Path: "bufio", Freq: 695}, - {Name: "build", Path: "go/build", Freq: 121}, - {Name: "bytes", Path: "bytes", Freq: 4129}, - {Name: "bzip2", Path: "compress/bzip2", Freq: 7}, - {Name: "cgi", Path: "net/http/cgi", Freq: 1}, - {Name: "cgo", Path: "runtime/cgo", Freq: -1}, - {Name: "cipher", Path: "crypto/cipher", Freq: 74}, - {Name: "cmplx", Path: "math/cmplx", Freq: 40}, - {Name: "color", Path: "image/color", Freq: 38}, - {Name: "constant", Path: "go/constant", Freq: 120}, - {Name: "constraint", Path: "go/build/constraint", Freq: 5}, - {Name: "context", Path: "context", Freq: 10797}, - {Name: "cookiejar", Path: "net/http/cookiejar", Freq: 12}, - {Name: "crc32", Path: "hash/crc32", Freq: 70}, - {Name: "crc64", Path: "hash/crc64", Freq: 3}, - {Name: "crypto", Path: "crypto", Freq: 132}, - {Name: "csv", Path: "encoding/csv", Freq: 31}, - {Name: "debug", Path: "runtime/debug", Freq: 121}, - {Name: "des", Path: "crypto/des", Freq: 8}, - {Name: "doc", Path: "go/doc", Freq: 15}, - {Name: "draw", Path: "image/draw", Freq: 7}, - {Name: "driver", Path: "database/sql/driver", Freq: 81}, - {Name: "dsa", Path: "crypto/dsa", Freq: 13}, - {Name: "dwarf", Path: "debug/dwarf", Freq: 23}, - {Name: "ecdsa", Path: "crypto/ecdsa", Freq: 75}, - {Name: "ed25519", Path: "crypto/ed25519", Freq: 25}, - {Name: "elf", Path: "debug/elf", Freq: 44}, - {Name: "elliptic", Path: "crypto/elliptic", Freq: 60}, - {Name: "embed", Path: "embed", Freq: 32}, - {Name: "encoding", Path: "encoding", Freq: 41}, - {Name: "errors", Path: "errors", Freq: 3107}, - {Name: "exec", Path: "os/exec", Freq: 676}, - {Name: "expvar", Path: "expvar", Freq: 34}, - {Name: "fcgi", Path: "net/http/fcgi", Freq: 2}, - {Name: "filepath", Path: "path/filepath", Freq: 2843}, - {Name: "flag", Path: "flag", Freq: 1002}, - {Name: "flate", Path: "compress/flate", Freq: 22}, - {Name: "fmt", Path: "fmt", Freq: 15795}, - {Name: "fnv", Path: "hash/fnv", Freq: 45}, - {Name: "format", Path: "go/format", Freq: 88}, - {Name: "fs", Path: "io/fs", Freq: 162}, - {Name: "fstest", Path: "testing/fstest", Freq: 11}, - {Name: "gif", Path: "image/gif", Freq: 5}, - {Name: "gob", Path: "encoding/gob", Freq: 65}, - {Name: "gosym", Path: "debug/gosym", Freq: 3}, - {Name: "gzip", Path: "compress/gzip", Freq: 150}, - {Name: "hash", Path: "hash", Freq: 137}, - {Name: "heap", Path: "container/heap", Freq: 78}, - {Name: "hex", Path: "encoding/hex", Freq: 340}, - {Name: "hmac", Path: "crypto/hmac", Freq: 45}, - {Name: "html", Path: "html", Freq: 37}, - {Name: "http", Path: "net/http", Freq: 3691}, - {Name: "httptest", Path: "net/http/httptest", Freq: 676}, - {Name: "httptrace", Path: "net/http/httptrace", Freq: 19}, - {Name: "httputil", Path: "net/http/httputil", Freq: 44}, - {Name: "image", Path: "image", Freq: 45}, - {Name: "importer", Path: "go/importer", Freq: 31}, - {Name: "io", Path: "io", Freq: 3981}, - {Name: "iotest", Path: "testing/iotest", Freq: 15}, - {Name: "ioutil", Path: "io/ioutil", Freq: 2164}, - {Name: "jpeg", Path: "image/jpeg", Freq: 11}, - {Name: "json", Path: "encoding/json", Freq: 2537}, - {Name: "jsonrpc", Path: "net/rpc/jsonrpc", Freq: -1}, - {Name: "list", Path: "container/list", Freq: 27}, - {Name: "log", Path: "log", Freq: 1149}, - {Name: "lzw", Path: "compress/lzw", Freq: 3}, - {Name: "macho", Path: "debug/macho", Freq: 16}, - {Name: "mail", Path: "net/mail", Freq: 7}, - {Name: "maphash", Path: "hash/maphash", Freq: 1}, - {Name: "math", Path: "math", Freq: 1746}, - {Name: "md5", Path: "crypto/md5", Freq: 110}, - {Name: "metrics", Path: "runtime/metrics", Freq: 3}, - {Name: "mime", Path: "mime", Freq: 51}, - {Name: "multipart", Path: "mime/multipart", Freq: 30}, - {Name: "net", Path: "net", Freq: 2025}, - {Name: "os", Path: "os", Freq: 5712}, - {Name: "palette", Path: "image/color/palette", Freq: 4}, - {Name: "parse", Path: "text/template/parse", Freq: 13}, - {Name: "parser", Path: "go/parser", Freq: 160}, - {Name: "path", Path: "path", Freq: 993}, - {Name: "pe", Path: "debug/pe", Freq: 12}, - {Name: "pem", Path: "encoding/pem", Freq: 137}, - {Name: "pkix", Path: "crypto/x509/pkix", Freq: 80}, - {Name: "plan9obj", Path: "debug/plan9obj", Freq: 1}, - {Name: "plugin", Path: "plugin", Freq: 4}, - {Name: "png", Path: "image/png", Freq: 20}, - {Name: "pprof", Path: "runtime/pprof", Freq: 47}, - {Name: "pprof", Path: "net/http/pprof", Freq: 33}, - {Name: "printer", Path: "go/printer", Freq: 27}, - {Name: "quick", Path: "testing/quick", Freq: 51}, - {Name: "quotedprintable", Path: "mime/quotedprintable", Freq: 2}, - {Name: "race", Path: "runtime/race", Freq: -1}, - {Name: "rand", Path: "math/rand", Freq: 981}, - {Name: "rand", Path: "crypto/rand", Freq: 256}, - {Name: "rc4", Path: "crypto/rc4", Freq: 3}, - {Name: "reflect", Path: "reflect", Freq: 3025}, - {Name: "regexp", Path: "regexp", Freq: 1320}, - {Name: "ring", Path: "container/ring", Freq: 2}, - {Name: "rpc", Path: "net/rpc", Freq: 19}, - {Name: "rsa", Path: "crypto/rsa", Freq: 103}, - {Name: "runtime", Path: "runtime", Freq: 1318}, - {Name: "scanner", Path: "text/scanner", Freq: 23}, - {Name: "scanner", Path: "go/scanner", Freq: 25}, - {Name: "sha1", Path: "crypto/sha1", Freq: 85}, - {Name: "sha256", Path: "crypto/sha256", Freq: 227}, - {Name: "sha512", Path: "crypto/sha512", Freq: 41}, - {Name: "signal", Path: "os/signal", Freq: 139}, - {Name: "smtp", Path: "net/smtp", Freq: 6}, - {Name: "sort", Path: "sort", Freq: 2382}, - {Name: "sql", Path: "database/sql", Freq: 157}, - {Name: "strconv", Path: "strconv", Freq: 3076}, - {Name: "strings", Path: "strings", Freq: 8852}, - {Name: "subtle", Path: "crypto/subtle", Freq: 54}, - {Name: "suffixarray", Path: "index/suffixarray", Freq: 2}, - {Name: "sync", Path: "sync", Freq: 3492}, - {Name: "syntax", Path: "regexp/syntax", Freq: 11}, - {Name: "syscall", Path: "syscall", Freq: 902}, - {Name: "syslog", Path: "log/syslog", Freq: 27}, - {Name: "tabwriter", Path: "text/tabwriter", Freq: 116}, - {Name: "tar", Path: "archive/tar", Freq: 77}, - {Name: "template", Path: "html/template", Freq: 173}, - {Name: "template", Path: "text/template", Freq: 211}, - {Name: "testing", Path: "testing", Freq: 12807}, - {Name: "textproto", Path: "net/textproto", Freq: 47}, - {Name: "time", Path: "time", Freq: 8900}, - {Name: "tls", Path: "crypto/tls", Freq: 475}, - {Name: "token", Path: "go/token", Freq: 471}, - {Name: "trace", Path: "runtime/trace", Freq: 20}, - {Name: "types", Path: "go/types", Freq: 309}, - {Name: "tzdata", Path: "time/tzdata", Freq: 6}, - {Name: "unicode", Path: "unicode", Freq: 309}, - {Name: "unsafe", Path: "unsafe", Freq: 992}, - {Name: "url", Path: "net/url", Freq: 1411}, - {Name: "user", Path: "os/user", Freq: 51}, - {Name: "utf16", Path: "unicode/utf16", Freq: 19}, - {Name: "utf8", Path: "unicode/utf8", Freq: 404}, - {Name: "x509", Path: "crypto/x509", Freq: 357}, - {Name: "xml", Path: "encoding/xml", Freq: 118}, - {Name: "zip", Path: "archive/zip", Freq: 54}, - {Name: "zlib", Path: "compress/zlib", Freq: 21}, -} diff --git a/vendor/github.com/ryancurrah/gomodguard/.dockerignore b/vendor/github.com/ryancurrah/gomodguard/.dockerignore deleted file mode 100644 index 77738287f..000000000 --- a/vendor/github.com/ryancurrah/gomodguard/.dockerignore +++ /dev/null @@ -1 +0,0 @@ -dist/ \ No newline at end of file diff --git a/vendor/github.com/ryancurrah/gomodguard/.gitignore b/vendor/github.com/ryancurrah/gomodguard/.gitignore deleted file mode 100644 index 4ebc79c5d..000000000 --- a/vendor/github.com/ryancurrah/gomodguard/.gitignore +++ /dev/null @@ -1,25 +0,0 @@ -# Binaries for programs and plugins -*.exe -*.exe~ -*.dll -*.so -*.dylib - -# Test binary, built with `go test -c` -*.test - -# Output of the go coverage tool, specifically when used with LiteIDE -*.out - -# Dependency directories (remove the comment below to include it) -# vendor/ - -/gomodguard - -*.xml - -dist/ - -coverage.* - -.idea/ diff --git a/vendor/github.com/ryancurrah/gomodguard/.golangci.yml b/vendor/github.com/ryancurrah/gomodguard/.golangci.yml deleted file mode 100644 index a0e6fd55e..000000000 --- a/vendor/github.com/ryancurrah/gomodguard/.golangci.yml +++ /dev/null @@ -1,111 +0,0 @@ -# See https://golangci-lint.run/usage/configuration/ - -linters-settings: - revive: - # see https://github.com/mgechev/revive#available-rules for details. - ignore-generated-header: true - severity: warning - rules: - - name: atomic - - name: blank-imports - - name: bool-literal-in-expr - - name: call-to-gc - - name: confusing-naming - - name: confusing-results - - name: constant-logical-expr - - name: context-as-argument - - name: context-keys-type - - name: deep-exit - - name: defer - - name: dot-imports - - name: duplicated-imports - - name: early-return - - name: empty-block - - name: empty-lines - - name: error-naming - - name: error-return - - name: error-strings - - name: errorf - - name: exported - - name: get-return - - name: identical-branches - - name: if-return - - name: import-shadowing - - name: increment-decrement - - name: indent-error-flow - - name: modifies-parameter - - name: modifies-value-receiver - - name: package-comments - - name: range - - name: range-val-address - - name: range-val-in-closure - - name: receiver-naming - - name: redefines-builtin-id - - name: string-of-int - - name: struct-tag - - name: superfluous-else - - name: time-naming - - name: unconditional-recursion - - name: unexported-naming - - name: unexported-return - - name: unnecessary-stmt - - name: unreachable-code - - name: unused-parameter - - name: var-declaration - - name: var-naming - - name: waitgroup-by-value - -linters: - disable-all: true - enable: - - asciicheck - - bodyclose - - dogsled - - dupl - - durationcheck - - errcheck - - errorlint - - exhaustive - - exportloopref - - forcetypeassert - - funlen - - gochecknoinits - - gocognit - - goconst - - gocritic - - gocyclo - - godot - - godox - - goimports - - gomoddirectives - - gomodguard - - goprintffuncname - - gosec - - gosimple - - govet - - importas - - ineffassign - - lll - - makezero - - misspell - - nakedret - - nestif - - nilerr - - noctx - - nolintlint - - prealloc - - predeclared - - revive - - rowserrcheck - - sqlclosecheck - - staticcheck - - stylecheck - - testpackage - - thelper - - tparallel - - typecheck - - unconvert - - unparam - - unused - - whitespace - - wsl diff --git a/vendor/github.com/ryancurrah/gomodguard/.goreleaser.yml b/vendor/github.com/ryancurrah/gomodguard/.goreleaser.yml deleted file mode 100644 index f3675a9c2..000000000 --- a/vendor/github.com/ryancurrah/gomodguard/.goreleaser.yml +++ /dev/null @@ -1,28 +0,0 @@ -builds: -- main: ./cmd/gomodguard/main.go - env: - - CGO_ENABLED=0 -archives: -- name_template: >- - {{ .ProjectName }}_ - {{- title .Os }}_ - {{- if eq .Arch "amd64" }}x86_64 - {{- else if eq .Arch "386" }}i386 - {{- else }}{{ .Arch }}{{ end }} -checksum: - name_template: 'checksums.txt' -dockers: -- goos: linux - goarch: amd64 - image_templates: - - "ryancurrah/gomodguard:latest" - - "ryancurrah/gomodguard:{{.Tag}}" - skip_push: false - dockerfile: Dockerfile.goreleaser - build_flag_templates: - - "--pull" - - "--label=org.opencontainers.image.created={{.Date}}" - - "--label=org.opencontainers.image.name={{.ProjectName}}" - - "--label=org.opencontainers.image.revision={{.FullCommit}}" - - "--label=org.opencontainers.image.version={{.Version}}" - - "--label=org.opencontainers.image.source={{.GitURL}}" diff --git a/vendor/github.com/ryancurrah/gomodguard/Dockerfile b/vendor/github.com/ryancurrah/gomodguard/Dockerfile deleted file mode 100644 index 2f1d3340c..000000000 --- a/vendor/github.com/ryancurrah/gomodguard/Dockerfile +++ /dev/null @@ -1,13 +0,0 @@ -# ---- Build container -FROM golang:alpine AS builder -WORKDIR /gomodguard -COPY . . -RUN apk add --no-cache git -RUN go build -o gomodguard cmd/gomodguard/main.go - -# ---- App container -FROM golang:alpine -WORKDIR / -RUN apk --no-cache add ca-certificates -COPY --from=builder gomodguard/gomodguard / -ENTRYPOINT ./gomodguard diff --git a/vendor/github.com/ryancurrah/gomodguard/Dockerfile.goreleaser b/vendor/github.com/ryancurrah/gomodguard/Dockerfile.goreleaser deleted file mode 100644 index ccaaa8959..000000000 --- a/vendor/github.com/ryancurrah/gomodguard/Dockerfile.goreleaser +++ /dev/null @@ -1,6 +0,0 @@ -# ---- App container -FROM golang:alpine -WORKDIR / -RUN apk --no-cache add ca-certificates -COPY gomodguard /gomodguard -ENTRYPOINT ./gomodguard diff --git a/vendor/github.com/ryancurrah/gomodguard/LICENSE b/vendor/github.com/ryancurrah/gomodguard/LICENSE deleted file mode 100644 index acd8a81e1..000000000 --- a/vendor/github.com/ryancurrah/gomodguard/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2020 Ryan Currah - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/ryancurrah/gomodguard/Makefile b/vendor/github.com/ryancurrah/gomodguard/Makefile deleted file mode 100644 index 5235d5aad..000000000 --- a/vendor/github.com/ryancurrah/gomodguard/Makefile +++ /dev/null @@ -1,46 +0,0 @@ -current_dir = $(shell pwd) - -.PHONY: lint -lint: - golangci-lint run ./... - -.PHONY: build -build: - go build -o gomodguard cmd/gomodguard/main.go - -.PHONY: run -run: build - ./gomodguard - -.PHONY: test -test: - go test -v -coverprofile coverage.out - -.PHONY: cover -cover: - gocover-cobertura < coverage.out > coverage.xml - -.PHONY: dockerrun -dockerrun: dockerbuild - docker run -v "${current_dir}/.gomodguard.yaml:/.gomodguard.yaml" ryancurrah/gomodguard:latest - -.PHONY: snapshot -snapshot: - goreleaser --rm-dist --snapshot - -.PHONY: release -release: - goreleaser --rm-dist - -.PHONY: clean -clean: - rm -rf dist/ - rm -f gomodguard coverage.xml coverage.out - -.PHONY: install-mac-tools -install-tools-mac: - brew install goreleaser/tap/goreleaser - -.PHONY: install-go-tools -install-go-tools: - go install -v github.com/t-yuki/gocover-cobertura diff --git a/vendor/github.com/ryancurrah/gomodguard/README.md b/vendor/github.com/ryancurrah/gomodguard/README.md deleted file mode 100644 index 4945f0101..000000000 --- a/vendor/github.com/ryancurrah/gomodguard/README.md +++ /dev/null @@ -1,131 +0,0 @@ -# gomodguard -[![License](https://img.shields.io/github/license/ryancurrah/gomodguard?style=flat-square)](/LICENSE) -[![Codecov](https://img.shields.io/codecov/c/gh/ryancurrah/gomodguard?style=flat-square)](https://codecov.io/gh/ryancurrah/gomodguard) -[![GitHub Workflow Status](https://img.shields.io/github/workflow/status/ryancurrah/gomodguard/Go?logo=Go&style=flat-square)](https://github.com/ryancurrah/gomodguard/actions?query=workflow%3AGo) -[![GitHub release (latest SemVer)](https://img.shields.io/github/v/release/ryancurrah/gomodguard?style=flat-square)](https://github.com/ryancurrah/gomodguard/releases/latest) -[![Docker](https://img.shields.io/docker/pulls/ryancurrah/gomodguard?style=flat-square)](https://hub.docker.com/r/ryancurrah/gomodguard) -[![Github Releases Stats of golangci-lint](https://img.shields.io/github/downloads/ryancurrah/gomodguard/total.svg?logo=github&style=flat-square)](https://somsubhra.com/github-release-stats/?username=ryancurrah&repository=gomodguard) - - - -Allow and block list linter for direct Go module dependencies. This is useful for organizations where they want to standardize on the modules used and be able to recommend alternative modules. - -## Description - -Allowed and blocked modules are defined in a `./.gomodguard.yaml` or `~/.gomodguard.yaml` file. - -Modules can be allowed by module or domain name. When allowed modules are specified any modules not in the allowed configuration are blocked. - -If no allowed modules or domains are specified then all modules are allowed except for blocked ones. - -The linter looks for blocked modules in `go.mod` and searches for imported packages where the imported packages module is blocked. Indirect modules are not considered. - -Alternative modules can be optionally recommended in the blocked modules list. - -If the linted module imports a blocked module but the linted module is in the recommended modules list the blocked module is ignored. Usually, this means the linted module wraps that blocked module for use by other modules, therefore the import of the blocked module should not be blocked. - -Version constraints can be specified for modules as well which lets you block new or old versions of modules or specific versions. - -Results are printed to `stdout`. - -Logging statements are printed to `stderr`. - -Results can be exported to different report formats. Which can be imported into CI tools. See the help section for more information. - -## Configuration - -```yaml -allowed: - modules: # List of allowed modules - - gopkg.in/yaml.v2 - - github.com/go-xmlfmt/xmlfmt - - github.com/phayes/checkstyle - - github.com/mitchellh/go-homedir - domains: # List of allowed module domains - - golang.org - -blocked: - modules: # List of blocked modules - - github.com/uudashr/go-module: # Blocked module - recommendations: # Recommended modules that should be used instead (Optional) - - golang.org/x/mod - reason: "`mod` is the official go.mod parser library." # Reason why the recommended module should be used (Optional) - versions: # List of blocked module version constraints. - - github.com/mitchellh/go-homedir: # Blocked module with version constraint. - version: "<= 1.1.0" # Version constraint, see https://github.com/Masterminds/semver#basic-comparisons. - reason: "testing if blocked version constraint works." # Reason why the version constraint exists. -``` - -## Usage - -``` -╰─ ./gomodguard -h -Usage: gomodguard [files...] -Also supports package syntax but will use it in relative path, i.e. ./pkg/... -Flags: - -f string - Report results to the specified file. A report type must also be specified - -file string - - -h Show this help text - -help - - -i int - Exit code when issues were found (default 2) - -issues-exit-code int - (default 2) - - -n Don't lint test files - -no-test - - -r string - Report results to one of the following formats: checkstyle. A report file destination must also be specified - -report string -``` - -## Example - -``` -╰─ ./gomodguard -r checkstyle -f gomodguard-checkstyle.xml ./... - -info: allowed modules, [gopkg.in/yaml.v2 github.com/go-xmlfmt/xmlfmt github.com/phayes/checkstyle github.com/mitchellh/go-homedir] -info: allowed module domains, [golang.org] -info: blocked modules, [github.com/uudashr/go-module] -info: found `2` blocked modules in the go.mod file, [github.com/gofrs/uuid github.com/uudashr/go-module] -blocked_example.go:6: import of package `github.com/gofrs/uuid` is blocked because the module is not in the allowed modules list. -blocked_example.go:7: import of package `github.com/uudashr/go-module` is blocked because the module is in the blocked modules list. `golang.org/x/mod` is a recommended module. `mod` is the official go.mod parser library. -``` - -Resulting checkstyle file - -``` -╰─ cat gomodguard-checkstyle.xml - - - - - - - - - - -``` - -## Install - -``` -go install github.com/ryancurrah/gomodguard/cmd/gomodguard -``` - -## Develop - -``` -git clone https://github.com/ryancurrah/gomodguard.git && cd gomodguard - -go build -o gomodguard cmd/gomodguard/main.go -``` - -## License - -**MIT** diff --git a/vendor/github.com/ryancurrah/gomodguard/allowed.go b/vendor/github.com/ryancurrah/gomodguard/allowed.go deleted file mode 100644 index 5b0d26f83..000000000 --- a/vendor/github.com/ryancurrah/gomodguard/allowed.go +++ /dev/null @@ -1,39 +0,0 @@ -package gomodguard - -import "strings" - -// Allowed is a list of modules and module -// domains that are allowed to be used. -type Allowed struct { - Modules []string `yaml:"modules"` - Domains []string `yaml:"domains"` -} - -// IsAllowedModule returns true if the given module -// name is in the allowed modules list. -func (a *Allowed) IsAllowedModule(moduleName string) bool { - allowedModules := a.Modules - - for i := range allowedModules { - if strings.TrimSpace(moduleName) == strings.TrimSpace(allowedModules[i]) { - return true - } - } - - return false -} - -// IsAllowedModuleDomain returns true if the given modules domain is -// in the allowed module domains list. -func (a *Allowed) IsAllowedModuleDomain(moduleName string) bool { - allowedDomains := a.Domains - - for i := range allowedDomains { - if strings.HasPrefix(strings.TrimSpace(strings.ToLower(moduleName)), - strings.TrimSpace(strings.ToLower(allowedDomains[i]))) { - return true - } - } - - return false -} diff --git a/vendor/github.com/ryancurrah/gomodguard/blocked.go b/vendor/github.com/ryancurrah/gomodguard/blocked.go deleted file mode 100644 index 2a6e5c215..000000000 --- a/vendor/github.com/ryancurrah/gomodguard/blocked.go +++ /dev/null @@ -1,189 +0,0 @@ -package gomodguard - -import ( - "fmt" - "strings" - - "github.com/Masterminds/semver" -) - -// Blocked is a list of modules that are -// blocked and not to be used. -type Blocked struct { - Modules BlockedModules `yaml:"modules"` - Versions BlockedVersions `yaml:"versions"` - LocalReplaceDirectives bool `yaml:"local_replace_directives"` -} - -// BlockedVersion has a version constraint a reason why the the module version is blocked. -type BlockedVersion struct { - Version string `yaml:"version"` - Reason string `yaml:"reason"` -} - -// IsLintedModuleVersionBlocked returns true if a version constraint is specified and the -// linted module version matches the constraint. -func (r *BlockedVersion) IsLintedModuleVersionBlocked(lintedModuleVersion string) bool { - if r.Version == "" { - return false - } - - constraint, err := semver.NewConstraint(r.Version) - if err != nil { - return false - } - - version, err := semver.NewVersion(lintedModuleVersion) - if err != nil { - return false - } - - meet := constraint.Check(version) - - return meet -} - -// Message returns the reason why the module version is blocked. -func (r *BlockedVersion) Message(lintedModuleVersion string) string { - var sb strings.Builder - - // Add version contraint to message. - _, _ = fmt.Fprintf(&sb, "version `%s` is blocked because it does not meet the version constraint `%s`.", - lintedModuleVersion, r.Version) - - if r.Reason == "" { - return sb.String() - } - - // Add reason to message. - _, _ = fmt.Fprintf(&sb, " %s.", strings.TrimRight(r.Reason, ".")) - - return sb.String() -} - -// BlockedModule has alternative modules to use and a reason why the module is blocked. -type BlockedModule struct { - Recommendations []string `yaml:"recommendations"` - Reason string `yaml:"reason"` -} - -// IsCurrentModuleARecommendation returns true if the current module is in the Recommendations list. -// -// If the current go.mod file being linted is a recommended module of a -// blocked module and it imports that blocked module, do not set as blocked. -// This could mean that the linted module is a wrapper for that blocked module. -func (r *BlockedModule) IsCurrentModuleARecommendation(currentModuleName string) bool { - if r == nil { - return false - } - - for n := range r.Recommendations { - if strings.TrimSpace(currentModuleName) == strings.TrimSpace(r.Recommendations[n]) { - return true - } - } - - return false -} - -// Message returns the reason why the module is blocked and a list of recommended modules if provided. -func (r *BlockedModule) Message() string { - var sb strings.Builder - - // Add recommendations to message - for i := range r.Recommendations { - switch { - case len(r.Recommendations) == 1: - _, _ = fmt.Fprintf(&sb, "`%s` is a recommended module.", r.Recommendations[i]) - case (i+1) != len(r.Recommendations) && (i+1) == (len(r.Recommendations)-1): - _, _ = fmt.Fprintf(&sb, "`%s` ", r.Recommendations[i]) - case (i + 1) != len(r.Recommendations): - _, _ = fmt.Fprintf(&sb, "`%s`, ", r.Recommendations[i]) - default: - _, _ = fmt.Fprintf(&sb, "and `%s` are recommended modules.", r.Recommendations[i]) - } - } - - if r.Reason == "" { - return sb.String() - } - - // Add reason to message - if sb.Len() == 0 { - _, _ = fmt.Fprintf(&sb, "%s.", strings.TrimRight(r.Reason, ".")) - } else { - _, _ = fmt.Fprintf(&sb, " %s.", strings.TrimRight(r.Reason, ".")) - } - - return sb.String() -} - -// HasRecommendations returns true if the blocked package has -// recommended modules. -func (r *BlockedModule) HasRecommendations() bool { - if r == nil { - return false - } - - return len(r.Recommendations) > 0 -} - -// BlockedVersions a list of blocked modules by a version constraint. -type BlockedVersions []map[string]BlockedVersion - -// Get returns the module names that are blocked. -func (b BlockedVersions) Get() []string { - modules := make([]string, len(b)) - - for n := range b { - for module := range b[n] { - modules[n] = module - break - } - } - - return modules -} - -// GetBlockReason returns a block version if one is set for the provided linted module name. -func (b BlockedVersions) GetBlockReason(lintedModuleName string) *BlockedVersion { - for _, blockedModule := range b { - for blockedModuleName, blockedVersion := range blockedModule { - if strings.TrimSpace(lintedModuleName) == strings.TrimSpace(blockedModuleName) { - return &blockedVersion - } - } - } - - return nil -} - -// BlockedModules a list of blocked modules. -type BlockedModules []map[string]BlockedModule - -// Get returns the module names that are blocked. -func (b BlockedModules) Get() []string { - modules := make([]string, len(b)) - - for n := range b { - for module := range b[n] { - modules[n] = module - break - } - } - - return modules -} - -// GetBlockReason returns a block module if one is set for the provided linted module name. -func (b BlockedModules) GetBlockReason(lintedModuleName string) *BlockedModule { - for _, blockedModule := range b { - for blockedModuleName, blockedModule := range blockedModule { - if strings.TrimSpace(lintedModuleName) == strings.TrimSpace(blockedModuleName) { - return &blockedModule - } - } - } - - return nil -} diff --git a/vendor/github.com/ryancurrah/gomodguard/issue.go b/vendor/github.com/ryancurrah/gomodguard/issue.go deleted file mode 100644 index d60fc3a86..000000000 --- a/vendor/github.com/ryancurrah/gomodguard/issue.go +++ /dev/null @@ -1,20 +0,0 @@ -package gomodguard - -import ( - "fmt" - "go/token" -) - -// Issue represents the result of one error. -type Issue struct { - FileName string - LineNumber int - Position token.Position - Reason string -} - -// String returns the filename, line -// number and reason of a Issue. -func (r *Issue) String() string { - return fmt.Sprintf("%s:%d:1 %s", r.FileName, r.LineNumber, r.Reason) -} diff --git a/vendor/github.com/ryancurrah/gomodguard/processor.go b/vendor/github.com/ryancurrah/gomodguard/processor.go deleted file mode 100644 index 8457e3b07..000000000 --- a/vendor/github.com/ryancurrah/gomodguard/processor.go +++ /dev/null @@ -1,291 +0,0 @@ -package gomodguard - -import ( - "bytes" - "encoding/json" - "errors" - "fmt" - "go/parser" - "go/token" - "os" - "os/exec" - "regexp" - "strings" - - "golang.org/x/mod/modfile" -) - -const ( - goModFilename = "go.mod" - errReadingGoModFile = "unable to read module file %s: %w" - errParsingGoModFile = "unable to parse module file %s: %w" -) - -var ( - blockReasonNotInAllowedList = "import of package `%s` is blocked because the module is not in the " + - "allowed modules list." - blockReasonInBlockedList = "import of package `%s` is blocked because the module is in the " + - "blocked modules list." - blockReasonHasLocalReplaceDirective = "import of package `%s` is blocked because the module has a " + - "local replace directive." - - // startsWithVersion is used to test when a string begins with the version identifier of a module, - // after having stripped the prefix base module name. IE "github.com/foo/bar/v2/baz" => "v2/baz" - // probably indicates that the module is actually github.com/foo/bar/v2, not github.com/foo/bar. - startsWithVersion = regexp.MustCompile(`^v[0-9]+`) -) - -// Configuration of gomodguard allow and block lists. -type Configuration struct { - Allowed Allowed `yaml:"allowed"` - Blocked Blocked `yaml:"blocked"` -} - -// Processor processes Go files. -type Processor struct { - Config *Configuration - Modfile *modfile.File - blockedModulesFromModFile map[string][]string -} - -// NewProcessor will create a Processor to lint blocked packages. -func NewProcessor(config *Configuration) (*Processor, error) { - goModFileBytes, err := loadGoModFile() - if err != nil { - return nil, fmt.Errorf(errReadingGoModFile, goModFilename, err) - } - - modFile, err := modfile.Parse(goModFilename, goModFileBytes, nil) - if err != nil { - return nil, fmt.Errorf(errParsingGoModFile, goModFilename, err) - } - - p := &Processor{ - Config: config, - Modfile: modFile, - } - - p.SetBlockedModules() - - return p, nil -} - -// ProcessFiles takes a string slice with file names (full paths) -// and lints them. -func (p *Processor) ProcessFiles(filenames []string) (issues []Issue) { - for _, filename := range filenames { - data, err := os.ReadFile(filename) - if err != nil { - issues = append(issues, Issue{ - FileName: filename, - LineNumber: 0, - Reason: fmt.Sprintf("unable to read file, file cannot be linted (%s)", err.Error()), - }) - - continue - } - - issues = append(issues, p.process(filename, data)...) - } - - return issues -} - -// process file imports and add lint error if blocked package is imported. -func (p *Processor) process(filename string, data []byte) (issues []Issue) { - fileSet := token.NewFileSet() - - file, err := parser.ParseFile(fileSet, filename, data, parser.ParseComments) - if err != nil { - issues = append(issues, Issue{ - FileName: filename, - LineNumber: 0, - Reason: fmt.Sprintf("invalid syntax, file cannot be linted (%s)", err.Error()), - }) - - return - } - - imports := file.Imports - for n := range imports { - importedPkg := strings.TrimSpace(strings.Trim(imports[n].Path.Value, "\"")) - - blockReasons := p.isBlockedPackageFromModFile(importedPkg) - if blockReasons == nil { - continue - } - - for _, blockReason := range blockReasons { - issues = append(issues, p.addError(fileSet, imports[n].Pos(), blockReason)) - } - } - - return issues -} - -// addError adds an error for the file and line number for the current token.Pos -// with the given reason. -func (p *Processor) addError(fileset *token.FileSet, pos token.Pos, reason string) Issue { - position := fileset.Position(pos) - - return Issue{ - FileName: position.Filename, - LineNumber: position.Line, - Position: position, - Reason: reason, - } -} - -// SetBlockedModules determines and sets which modules are blocked by reading -// the go.mod file of the module that is being linted. -// -// It works by iterating over the dependant modules specified in the require -// directive, checking if the module domain or full name is in the allowed list. -func (p *Processor) SetBlockedModules() { //nolint:gocognit,funlen - blockedModules := make(map[string][]string, len(p.Modfile.Require)) - currentModuleName := p.Modfile.Module.Mod.Path - lintedModules := p.Modfile.Require - replacedModules := p.Modfile.Replace - - for i := range lintedModules { - if lintedModules[i].Indirect { - continue // Do not lint indirect modules. - } - - lintedModuleName := strings.TrimSpace(lintedModules[i].Mod.Path) - lintedModuleVersion := strings.TrimSpace(lintedModules[i].Mod.Version) - - var isAllowed bool - - switch { - case len(p.Config.Allowed.Modules) == 0 && len(p.Config.Allowed.Domains) == 0: - isAllowed = true - case p.Config.Allowed.IsAllowedModuleDomain(lintedModuleName): - isAllowed = true - case p.Config.Allowed.IsAllowedModule(lintedModuleName): - isAllowed = true - default: - isAllowed = false - } - - blockModuleReason := p.Config.Blocked.Modules.GetBlockReason(lintedModuleName) - blockVersionReason := p.Config.Blocked.Versions.GetBlockReason(lintedModuleName) - - if !isAllowed && blockModuleReason == nil && blockVersionReason == nil { - blockedModules[lintedModuleName] = append(blockedModules[lintedModuleName], blockReasonNotInAllowedList) - continue - } - - if blockModuleReason != nil && !blockModuleReason.IsCurrentModuleARecommendation(currentModuleName) { - blockedModules[lintedModuleName] = append(blockedModules[lintedModuleName], - fmt.Sprintf("%s %s", blockReasonInBlockedList, blockModuleReason.Message())) - } - - if blockVersionReason != nil && blockVersionReason.IsLintedModuleVersionBlocked(lintedModuleVersion) { - blockedModules[lintedModuleName] = append(blockedModules[lintedModuleName], - fmt.Sprintf("%s %s", blockReasonInBlockedList, blockVersionReason.Message(lintedModuleVersion))) - } - } - - // Replace directives with local paths are blocked. - // Filesystem paths found in "replace" directives are represented by a path with an empty version. - // https://github.com/golang/mod/blob/bc388b264a244501debfb9caea700c6dcaff10e2/module/module.go#L122-L124 - if p.Config.Blocked.LocalReplaceDirectives { - for i := range replacedModules { - replacedModuleOldName := strings.TrimSpace(replacedModules[i].Old.Path) - replacedModuleNewName := strings.TrimSpace(replacedModules[i].New.Path) - replacedModuleNewVersion := strings.TrimSpace(replacedModules[i].New.Version) - - if replacedModuleNewName != "" && replacedModuleNewVersion == "" { - blockedModules[replacedModuleOldName] = append(blockedModules[replacedModuleOldName], - blockReasonHasLocalReplaceDirective) - } - } - } - - p.blockedModulesFromModFile = blockedModules -} - -// isBlockedPackageFromModFile returns the block reason if the package is blocked. -func (p *Processor) isBlockedPackageFromModFile(packageName string) []string { - for blockedModuleName, blockReasons := range p.blockedModulesFromModFile { - if isPackageInModule(packageName, blockedModuleName) { - formattedReasons := make([]string, 0, len(blockReasons)) - - for _, blockReason := range blockReasons { - formattedReasons = append(formattedReasons, fmt.Sprintf(blockReason, packageName)) - } - - return formattedReasons - } - } - - return nil -} - -func loadGoModFile() ([]byte, error) { - cmd := exec.Command("go", "env", "-json") - stdout, _ := cmd.StdoutPipe() - _ = cmd.Start() - - if stdout == nil { - return os.ReadFile(goModFilename) - } - - buf := new(bytes.Buffer) - _, _ = buf.ReadFrom(stdout) - - goEnv := make(map[string]string) - - err := json.Unmarshal(buf.Bytes(), &goEnv) - if err != nil { - return os.ReadFile(goModFilename) - } - - if _, ok := goEnv["GOMOD"]; !ok { - return os.ReadFile(goModFilename) - } - - if _, err = os.Stat(goEnv["GOMOD"]); os.IsNotExist(err) { - return os.ReadFile(goModFilename) - } - - if goEnv["GOMOD"] == "/dev/null" { - return nil, errors.New("current working directory must have a go.mod file") - } - - return os.ReadFile(goEnv["GOMOD"]) -} - -// isPackageInModule determines if a package is apart of the specified go module. -func isPackageInModule(pkg, mod string) bool { - // Split pkg and mod paths into parts - pkgPart := strings.Split(pkg, "/") - modPart := strings.Split(mod, "/") - - pkgPartMatches := 0 - - // Count number of times pkg path matches the mod path - for i, m := range modPart { - if len(pkgPart) > i && pkgPart[i] == m { - pkgPartMatches++ - } - } - - // If pkgPartMatches are not the same length as modPart - // than the package is not in this module - if pkgPartMatches != len(modPart) { - return false - } - - if len(pkgPart) > len(modPart) { - // If pkgPart path starts with a major version - // than the package is not in this module as - // major versions are completely different modules - if startsWithVersion.MatchString(pkgPart[len(modPart)]) { - return false - } - } - - return true -} diff --git a/vendor/github.com/ryancurrah/gomodguard/tools.go b/vendor/github.com/ryancurrah/gomodguard/tools.go deleted file mode 100644 index d56bcc747..000000000 --- a/vendor/github.com/ryancurrah/gomodguard/tools.go +++ /dev/null @@ -1,5 +0,0 @@ -//go:build tools - -package gomodguard - -import _ "github.com/t-yuki/gocover-cobertura" diff --git a/vendor/github.com/ryanrolds/sqlclosecheck/LICENSE b/vendor/github.com/ryanrolds/sqlclosecheck/LICENSE deleted file mode 100644 index 77b261d7a..000000000 --- a/vendor/github.com/ryanrolds/sqlclosecheck/LICENSE +++ /dev/null @@ -1,19 +0,0 @@ -Copyright (c) 2020 Ryan R. Olds - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. \ No newline at end of file diff --git a/vendor/github.com/ryanrolds/sqlclosecheck/pkg/analyzer/analyzer.go b/vendor/github.com/ryanrolds/sqlclosecheck/pkg/analyzer/analyzer.go deleted file mode 100644 index 55e931a89..000000000 --- a/vendor/github.com/ryanrolds/sqlclosecheck/pkg/analyzer/analyzer.go +++ /dev/null @@ -1,405 +0,0 @@ -package analyzer - -import ( - "go/types" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/buildssa" - "golang.org/x/tools/go/ssa" -) - -const ( - rowsName = "Rows" - stmtName = "Stmt" - namedStmtName = "NamedStmt" - closeMethod = "Close" -) - -type action uint8 - -const ( - actionUnhandled action = iota - actionHandled - actionReturned - actionPassed - actionClosed - actionUnvaluedCall - actionUnvaluedDefer - actionNoOp -) - -var ( - sqlPackages = []string{ - "database/sql", - "github.com/jmoiron/sqlx", - "github.com/jackc/pgx/v5", - "github.com/jackc/pgx/v5/pgxpool", - } -) - -func NewAnalyzer() *analysis.Analyzer { - return &analysis.Analyzer{ - Name: "sqlclosecheck", - Doc: "Checks that sql.Rows, sql.Stmt, sqlx.NamedStmt, pgx.Query are closed.", - Run: run, - Requires: []*analysis.Analyzer{ - buildssa.Analyzer, - }, - } -} - -func run(pass *analysis.Pass) (interface{}, error) { - pssa, ok := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA) - if !ok { - return nil, nil - } - - // Build list of types we are looking for - targetTypes := getTargetTypes(pssa, sqlPackages) - - // If non of the types are found, skip - if len(targetTypes) == 0 { - return nil, nil - } - - funcs := pssa.SrcFuncs - for _, f := range funcs { - for _, b := range f.Blocks { - for i := range b.Instrs { - // Check if instruction is call that returns a target pointer type - targetValues := getTargetTypesValues(b, i, targetTypes) - if len(targetValues) == 0 { - continue - } - - // For each found target check if they are closed and deferred - for _, targetValue := range targetValues { - refs := (*targetValue.value).Referrers() - isClosed := checkClosed(refs, targetTypes) - if !isClosed { - pass.Reportf((targetValue.instr).Pos(), "Rows/Stmt/NamedStmt was not closed") - } - - checkDeferred(pass, refs, targetTypes, false) - } - } - } - } - - return nil, nil -} - -func getTargetTypes(pssa *buildssa.SSA, targetPackages []string) []any { - targets := []any{} - - for _, sqlPkg := range targetPackages { - pkg := pssa.Pkg.Prog.ImportedPackage(sqlPkg) - if pkg == nil { - // the SQL package being checked isn't imported - continue - } - - rowsPtrType := getTypePointerFromName(pkg, rowsName) - if rowsPtrType != nil { - targets = append(targets, rowsPtrType) - } - - rowsType := getTypeFromName(pkg, rowsName) - if rowsType != nil { - targets = append(targets, rowsType) - } - - stmtType := getTypePointerFromName(pkg, stmtName) - if stmtType != nil { - targets = append(targets, stmtType) - } - - namedStmtType := getTypePointerFromName(pkg, namedStmtName) - if namedStmtType != nil { - targets = append(targets, namedStmtType) - } - } - - return targets -} - -func getTypePointerFromName(pkg *ssa.Package, name string) *types.Pointer { - pkgType := pkg.Type(name) - if pkgType == nil { - // this package does not use Rows/Stmt/NamedStmt - return nil - } - - obj := pkgType.Object() - named, ok := obj.Type().(*types.Named) - if !ok { - return nil - } - - return types.NewPointer(named) -} - -func getTypeFromName(pkg *ssa.Package, name string) *types.Named { - pkgType := pkg.Type(name) - if pkgType == nil { - // this package does not use Rows/Stmt - return nil - } - - obj := pkgType.Object() - named, ok := obj.Type().(*types.Named) - if !ok { - return nil - } - - return named -} - -type targetValue struct { - value *ssa.Value - instr ssa.Instruction -} - -func getTargetTypesValues(b *ssa.BasicBlock, i int, targetTypes []any) []targetValue { - targetValues := []targetValue{} - - instr := b.Instrs[i] - call, ok := instr.(*ssa.Call) - if !ok { - return targetValues - } - - signature := call.Call.Signature() - results := signature.Results() - for i := 0; i < results.Len(); i++ { - v := results.At(i) - varType := v.Type() - - for _, targetType := range targetTypes { - var tt types.Type - - switch t := targetType.(type) { - case *types.Pointer: - tt = t - case *types.Named: - tt = t - default: - continue - } - - if !types.Identical(varType, tt) { - continue - } - - for _, cRef := range *call.Referrers() { - switch instr := cRef.(type) { - case *ssa.Call: - if len(instr.Call.Args) >= 1 && types.Identical(instr.Call.Args[0].Type(), tt) { - targetValues = append(targetValues, targetValue{ - value: &instr.Call.Args[0], - instr: call, - }) - } - case ssa.Value: - if types.Identical(instr.Type(), tt) { - targetValues = append(targetValues, targetValue{ - value: &instr, - instr: call, - }) - } - } - } - } - } - - return targetValues -} - -func checkClosed(refs *[]ssa.Instruction, targetTypes []any) bool { - numInstrs := len(*refs) - for idx, ref := range *refs { - action := getAction(ref, targetTypes) - switch action { - case actionClosed, actionReturned, actionHandled: - return true - case actionPassed: - // Passed and not used after - if numInstrs == idx+1 { - return true - } - } - } - - return false -} - -func getAction(instr ssa.Instruction, targetTypes []any) action { - switch instr := instr.(type) { - case *ssa.Defer: - if instr.Call.Value != nil { - name := instr.Call.Value.Name() - if name == closeMethod { - return actionClosed - } - } - - if instr.Call.Method != nil { - name := instr.Call.Method.Name() - if name == closeMethod { - return actionClosed - } - } - - return actionUnvaluedDefer - case *ssa.Call: - if instr.Call.Value == nil { - return actionUnvaluedCall - } - - isTarget := false - staticCallee := instr.Call.StaticCallee() - if staticCallee != nil { - receiver := instr.Call.StaticCallee().Signature.Recv() - if receiver != nil { - isTarget = isTargetType(receiver.Type(), targetTypes) - } - } - - name := instr.Call.Value.Name() - if isTarget && name == closeMethod { - return actionClosed - } - - if !isTarget { - return actionPassed - } - case *ssa.Phi: - return actionPassed - case *ssa.MakeInterface: - return actionPassed - case *ssa.Store: - // A Row/Stmt is stored in a struct, which may be closed later - // by a different flow. - if _, ok := instr.Addr.(*ssa.FieldAddr); ok { - return actionReturned - } - - if len(*instr.Addr.Referrers()) == 0 { - return actionNoOp - } - - for _, aRef := range *instr.Addr.Referrers() { - if c, ok := aRef.(*ssa.MakeClosure); ok { - if f, ok := c.Fn.(*ssa.Function); ok { - for _, b := range f.Blocks { - if checkClosed(&b.Instrs, targetTypes) { - return actionHandled - } - } - } - } - } - case *ssa.UnOp: - instrType := instr.Type() - for _, targetType := range targetTypes { - var tt types.Type - - switch t := targetType.(type) { - case *types.Pointer: - tt = t - case *types.Named: - tt = t - default: - continue - } - - if types.Identical(instrType, tt) { - if checkClosed(instr.Referrers(), targetTypes) { - return actionHandled - } - } - } - case *ssa.FieldAddr: - if checkClosed(instr.Referrers(), targetTypes) { - return actionHandled - } - case *ssa.Return: - return actionReturned - } - - return actionUnhandled -} - -func checkDeferred(pass *analysis.Pass, instrs *[]ssa.Instruction, targetTypes []any, inDefer bool) { - for _, instr := range *instrs { - switch instr := instr.(type) { - case *ssa.Defer: - if instr.Call.Value != nil && instr.Call.Value.Name() == closeMethod { - return - } - - if instr.Call.Method != nil && instr.Call.Method.Name() == closeMethod { - return - } - case *ssa.Call: - if instr.Call.Value != nil && instr.Call.Value.Name() == closeMethod { - if !inDefer { - pass.Reportf(instr.Pos(), "Close should use defer") - } - - return - } - case *ssa.Store: - if len(*instr.Addr.Referrers()) == 0 { - return - } - - for _, aRef := range *instr.Addr.Referrers() { - if c, ok := aRef.(*ssa.MakeClosure); ok { - if f, ok := c.Fn.(*ssa.Function); ok { - for _, b := range f.Blocks { - checkDeferred(pass, &b.Instrs, targetTypes, true) - } - } - } - } - case *ssa.UnOp: - instrType := instr.Type() - for _, targetType := range targetTypes { - var tt types.Type - - switch t := targetType.(type) { - case *types.Pointer: - tt = t - case *types.Named: - tt = t - default: - continue - } - - if types.Identical(instrType, tt) { - checkDeferred(pass, instr.Referrers(), targetTypes, inDefer) - } - } - case *ssa.FieldAddr: - checkDeferred(pass, instr.Referrers(), targetTypes, inDefer) - } - } -} - -func isTargetType(t types.Type, targetTypes []any) bool { - for _, targetType := range targetTypes { - switch tt := targetType.(type) { - case *types.Pointer: - if types.Identical(t, tt) { - return true - } - case *types.Named: - if types.Identical(t, tt) { - return true - } - } - } - - return false -} diff --git a/vendor/github.com/sagikazarmark/locafero/.editorconfig b/vendor/github.com/sagikazarmark/locafero/.editorconfig deleted file mode 100644 index 6f944f540..000000000 --- a/vendor/github.com/sagikazarmark/locafero/.editorconfig +++ /dev/null @@ -1,21 +0,0 @@ -root = true - -[*] -charset = utf-8 -end_of_line = lf -indent_size = 4 -indent_style = space -insert_final_newline = true -trim_trailing_whitespace = true - -[{Makefile,*.mk}] -indent_style = tab - -[*.nix] -indent_size = 2 - -[*.go] -indent_style = tab - -[{*.yml,*.yaml}] -indent_size = 2 diff --git a/vendor/github.com/sagikazarmark/locafero/.envrc b/vendor/github.com/sagikazarmark/locafero/.envrc deleted file mode 100644 index 3ce7171a3..000000000 --- a/vendor/github.com/sagikazarmark/locafero/.envrc +++ /dev/null @@ -1,4 +0,0 @@ -if ! has nix_direnv_version || ! nix_direnv_version 2.3.0; then - source_url "https://raw.githubusercontent.com/nix-community/nix-direnv/2.3.0/direnvrc" "sha256-Dmd+j63L84wuzgyjITIfSxSD57Tx7v51DMxVZOsiUD8=" -fi -use flake . --impure diff --git a/vendor/github.com/sagikazarmark/locafero/.gitignore b/vendor/github.com/sagikazarmark/locafero/.gitignore deleted file mode 100644 index 8f07e6016..000000000 --- a/vendor/github.com/sagikazarmark/locafero/.gitignore +++ /dev/null @@ -1,8 +0,0 @@ -/.devenv/ -/.direnv/ -/.task/ -/bin/ -/build/ -/tmp/ -/var/ -/vendor/ diff --git a/vendor/github.com/sagikazarmark/locafero/.golangci.yaml b/vendor/github.com/sagikazarmark/locafero/.golangci.yaml deleted file mode 100644 index 829de2a4a..000000000 --- a/vendor/github.com/sagikazarmark/locafero/.golangci.yaml +++ /dev/null @@ -1,27 +0,0 @@ -run: - timeout: 10m - -linters-settings: - gci: - sections: - - standard - - default - - prefix(github.com/sagikazarmark/locafero) - goimports: - local-prefixes: github.com/sagikazarmark/locafero - misspell: - locale: US - nolintlint: - allow-leading-space: false # require machine-readable nolint directives (with no leading space) - allow-unused: false # report any unused nolint directives - require-specific: false # don't require nolint directives to be specific about which linter is being skipped - revive: - confidence: 0 - -linters: - enable: - - gci - - goimports - - misspell - - nolintlint - - revive diff --git a/vendor/github.com/sagikazarmark/locafero/LICENSE b/vendor/github.com/sagikazarmark/locafero/LICENSE deleted file mode 100644 index a70b0f296..000000000 --- a/vendor/github.com/sagikazarmark/locafero/LICENSE +++ /dev/null @@ -1,19 +0,0 @@ -Copyright (c) 2023 Márk Sági-Kazár - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is furnished -to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/vendor/github.com/sagikazarmark/locafero/README.md b/vendor/github.com/sagikazarmark/locafero/README.md deleted file mode 100644 index a48e8e978..000000000 --- a/vendor/github.com/sagikazarmark/locafero/README.md +++ /dev/null @@ -1,37 +0,0 @@ -# Finder library for [Afero](https://github.com/spf13/afero) - -[![GitHub Workflow Status](https://img.shields.io/github/actions/workflow/status/sagikazarmark/locafero/ci.yaml?style=flat-square)](https://github.com/sagikazarmark/locafero/actions/workflows/ci.yaml) -[![go.dev reference](https://img.shields.io/badge/go.dev-reference-007d9c?logo=go&logoColor=white&style=flat-square)](https://pkg.go.dev/mod/github.com/sagikazarmark/locafero) -![Go Version](https://img.shields.io/badge/go%20version-%3E=1.20-61CFDD.svg?style=flat-square) -[![built with nix](https://img.shields.io/badge/builtwith-nix-7d81f7?style=flat-square)](https://builtwithnix.org) - -**Finder library for [Afero](https://github.com/spf13/afero) ported from [go-finder](https://github.com/sagikazarmark/go-finder).** - -> [!WARNING] -> This is an experimental library under development. -> -> **Backwards compatibility is not guaranteed, expect breaking changes.** - -## Installation - -```shell -go get github.com/sagikazarmark/locafero -``` - -## Usage - -Check out the [package example](https://pkg.go.dev/github.com/sagikazarmark/locafero#example-package) on go.dev. - -## Development - -**For an optimal developer experience, it is recommended to install [Nix](https://nixos.org/download.html) and [direnv](https://direnv.net/docs/installation.html).** - -Run the test suite: - -```shell -just test -``` - -## License - -The project is licensed under the [MIT License](LICENSE). diff --git a/vendor/github.com/sagikazarmark/locafero/file_type.go b/vendor/github.com/sagikazarmark/locafero/file_type.go deleted file mode 100644 index 9a9b14023..000000000 --- a/vendor/github.com/sagikazarmark/locafero/file_type.go +++ /dev/null @@ -1,28 +0,0 @@ -package locafero - -import "io/fs" - -// FileType represents the kind of entries [Finder] can return. -type FileType int - -const ( - FileTypeAll FileType = iota - FileTypeFile - FileTypeDir -) - -func (ft FileType) matchFileInfo(info fs.FileInfo) bool { - switch ft { - case FileTypeAll: - return true - - case FileTypeFile: - return !info.IsDir() - - case FileTypeDir: - return info.IsDir() - - default: - return false - } -} diff --git a/vendor/github.com/sagikazarmark/locafero/finder.go b/vendor/github.com/sagikazarmark/locafero/finder.go deleted file mode 100644 index 754c8b260..000000000 --- a/vendor/github.com/sagikazarmark/locafero/finder.go +++ /dev/null @@ -1,165 +0,0 @@ -// Package finder looks for files and directories in an {fs.Fs} filesystem. -package locafero - -import ( - "errors" - "io/fs" - "path/filepath" - "strings" - - "github.com/sourcegraph/conc/iter" - "github.com/spf13/afero" -) - -// Finder looks for files and directories in an [afero.Fs] filesystem. -type Finder struct { - // Paths represents a list of locations that the [Finder] will search in. - // - // They are essentially the root directories or starting points for the search. - // - // Examples: - // - home/user - // - etc - Paths []string - - // Names are specific entries that the [Finder] will look for within the given Paths. - // - // It provides the capability to search for entries with depth, - // meaning it can target deeper locations within the directory structure. - // - // It also supports glob syntax (as defined by [filepat.Match]), offering greater flexibility in search patterns. - // - // Examples: - // - config.yaml - // - home/*/config.yaml - // - home/*/config.* - Names []string - - // Type restricts the kind of entries returned by the [Finder]. - // - // This parameter helps in differentiating and filtering out files from directories or vice versa. - Type FileType -} - -// Find looks for files and directories in an [afero.Fs] filesystem. -func (f Finder) Find(fsys afero.Fs) ([]string, error) { - // Arbitrary go routine limit (TODO: make this a parameter) - // pool := pool.NewWithResults[[]string]().WithMaxGoroutines(5).WithErrors().WithFirstError() - - type searchItem struct { - path string - name string - } - - var searchItems []searchItem - - for _, searchPath := range f.Paths { - searchPath := searchPath - - for _, searchName := range f.Names { - searchName := searchName - - searchItems = append(searchItems, searchItem{searchPath, searchName}) - - // pool.Go(func() ([]string, error) { - // // If the name contains any glob character, perform a glob match - // if strings.ContainsAny(searchName, "*?[]\\^") { - // return globWalkSearch(fsys, searchPath, searchName, f.Type) - // } - // - // return statSearch(fsys, searchPath, searchName, f.Type) - // }) - } - } - - // allResults, err := pool.Wait() - // if err != nil { - // return nil, err - // } - - allResults, err := iter.MapErr(searchItems, func(item *searchItem) ([]string, error) { - // If the name contains any glob character, perform a glob match - if strings.ContainsAny(item.name, "*?[]\\^") { - return globWalkSearch(fsys, item.path, item.name, f.Type) - } - - return statSearch(fsys, item.path, item.name, f.Type) - }) - if err != nil { - return nil, err - } - - var results []string - - for _, r := range allResults { - results = append(results, r...) - } - - // Sort results in alphabetical order for now - // sort.Strings(results) - - return results, nil -} - -func globWalkSearch(fsys afero.Fs, searchPath string, searchName string, searchType FileType) ([]string, error) { - var results []string - - err := afero.Walk(fsys, searchPath, func(p string, fileInfo fs.FileInfo, err error) error { - if err != nil { - return err - } - - // Skip the root path - if p == searchPath { - return nil - } - - var result error - - // Stop reading subdirectories - // TODO: add depth detection here - if fileInfo.IsDir() && filepath.Dir(p) == searchPath { - result = fs.SkipDir - } - - // Skip unmatching type - if !searchType.matchFileInfo(fileInfo) { - return result - } - - match, err := filepath.Match(searchName, fileInfo.Name()) - if err != nil { - return err - } - - if match { - results = append(results, p) - } - - return result - }) - if err != nil { - return results, err - } - - return results, nil -} - -func statSearch(fsys afero.Fs, searchPath string, searchName string, searchType FileType) ([]string, error) { - filePath := filepath.Join(searchPath, searchName) - - fileInfo, err := fsys.Stat(filePath) - if errors.Is(err, fs.ErrNotExist) { - return nil, nil - } - if err != nil { - return nil, err - } - - // Skip unmatching type - if !searchType.matchFileInfo(fileInfo) { - return nil, nil - } - - return []string{filePath}, nil -} diff --git a/vendor/github.com/sagikazarmark/locafero/flake.lock b/vendor/github.com/sagikazarmark/locafero/flake.lock deleted file mode 100644 index 46d28f805..000000000 --- a/vendor/github.com/sagikazarmark/locafero/flake.lock +++ /dev/null @@ -1,273 +0,0 @@ -{ - "nodes": { - "devenv": { - "inputs": { - "flake-compat": "flake-compat", - "nix": "nix", - "nixpkgs": "nixpkgs", - "pre-commit-hooks": "pre-commit-hooks" - }, - "locked": { - "lastModified": 1694097209, - "narHash": "sha256-gQmBjjxeSyySjbh0yQVBKApo2KWIFqqbRUvG+Fa+QpM=", - "owner": "cachix", - "repo": "devenv", - "rev": "7a8e6a91510efe89d8dcb8e43233f93e86f6b189", - "type": "github" - }, - "original": { - "owner": "cachix", - "repo": "devenv", - "type": "github" - } - }, - "flake-compat": { - "flake": false, - "locked": { - "lastModified": 1673956053, - "narHash": "sha256-4gtG9iQuiKITOjNQQeQIpoIB6b16fm+504Ch3sNKLd8=", - "owner": "edolstra", - "repo": "flake-compat", - "rev": "35bb57c0c8d8b62bbfd284272c928ceb64ddbde9", - "type": "github" - }, - "original": { - "owner": "edolstra", - "repo": "flake-compat", - "type": "github" - } - }, - "flake-parts": { - "inputs": { - "nixpkgs-lib": "nixpkgs-lib" - }, - "locked": { - "lastModified": 1693611461, - "narHash": "sha256-aPODl8vAgGQ0ZYFIRisxYG5MOGSkIczvu2Cd8Gb9+1Y=", - "owner": "hercules-ci", - "repo": "flake-parts", - "rev": "7f53fdb7bdc5bb237da7fefef12d099e4fd611ca", - "type": "github" - }, - "original": { - "owner": "hercules-ci", - "repo": "flake-parts", - "type": "github" - } - }, - "flake-utils": { - "inputs": { - "systems": "systems" - }, - "locked": { - "lastModified": 1685518550, - "narHash": "sha256-o2d0KcvaXzTrPRIo0kOLV0/QXHhDQ5DTi+OxcjO8xqY=", - "owner": "numtide", - "repo": "flake-utils", - "rev": "a1720a10a6cfe8234c0e93907ffe81be440f4cef", - "type": "github" - }, - "original": { - "owner": "numtide", - "repo": "flake-utils", - "type": "github" - } - }, - "gitignore": { - "inputs": { - "nixpkgs": [ - "devenv", - "pre-commit-hooks", - "nixpkgs" - ] - }, - "locked": { - "lastModified": 1660459072, - "narHash": "sha256-8DFJjXG8zqoONA1vXtgeKXy68KdJL5UaXR8NtVMUbx8=", - "owner": "hercules-ci", - "repo": "gitignore.nix", - "rev": "a20de23b925fd8264fd7fad6454652e142fd7f73", - "type": "github" - }, - "original": { - "owner": "hercules-ci", - "repo": "gitignore.nix", - "type": "github" - } - }, - "lowdown-src": { - "flake": false, - "locked": { - "lastModified": 1633514407, - "narHash": "sha256-Dw32tiMjdK9t3ETl5fzGrutQTzh2rufgZV4A/BbxuD4=", - "owner": "kristapsdz", - "repo": "lowdown", - "rev": "d2c2b44ff6c27b936ec27358a2653caaef8f73b8", - "type": "github" - }, - "original": { - "owner": "kristapsdz", - "repo": "lowdown", - "type": "github" - } - }, - "nix": { - "inputs": { - "lowdown-src": "lowdown-src", - "nixpkgs": [ - "devenv", - "nixpkgs" - ], - "nixpkgs-regression": "nixpkgs-regression" - }, - "locked": { - "lastModified": 1676545802, - "narHash": "sha256-EK4rZ+Hd5hsvXnzSzk2ikhStJnD63odF7SzsQ8CuSPU=", - "owner": "domenkozar", - "repo": "nix", - "rev": "7c91803598ffbcfe4a55c44ac6d49b2cf07a527f", - "type": "github" - }, - "original": { - "owner": "domenkozar", - "ref": "relaxed-flakes", - "repo": "nix", - "type": "github" - } - }, - "nixpkgs": { - "locked": { - "lastModified": 1678875422, - "narHash": "sha256-T3o6NcQPwXjxJMn2shz86Chch4ljXgZn746c2caGxd8=", - "owner": "NixOS", - "repo": "nixpkgs", - "rev": "126f49a01de5b7e35a43fd43f891ecf6d3a51459", - "type": "github" - }, - "original": { - "owner": "NixOS", - "ref": "nixpkgs-unstable", - "repo": "nixpkgs", - "type": "github" - } - }, - "nixpkgs-lib": { - "locked": { - "dir": "lib", - "lastModified": 1693471703, - "narHash": "sha256-0l03ZBL8P1P6z8MaSDS/MvuU8E75rVxe5eE1N6gxeTo=", - "owner": "NixOS", - "repo": "nixpkgs", - "rev": "3e52e76b70d5508f3cec70b882a29199f4d1ee85", - "type": "github" - }, - "original": { - "dir": "lib", - "owner": "NixOS", - "ref": "nixos-unstable", - "repo": "nixpkgs", - "type": "github" - } - }, - "nixpkgs-regression": { - "locked": { - "lastModified": 1643052045, - "narHash": "sha256-uGJ0VXIhWKGXxkeNnq4TvV3CIOkUJ3PAoLZ3HMzNVMw=", - "owner": "NixOS", - "repo": "nixpkgs", - "rev": "215d4d0fd80ca5163643b03a33fde804a29cc1e2", - "type": "github" - }, - "original": { - "owner": "NixOS", - "repo": "nixpkgs", - "rev": "215d4d0fd80ca5163643b03a33fde804a29cc1e2", - "type": "github" - } - }, - "nixpkgs-stable": { - "locked": { - "lastModified": 1685801374, - "narHash": "sha256-otaSUoFEMM+LjBI1XL/xGB5ao6IwnZOXc47qhIgJe8U=", - "owner": "NixOS", - "repo": "nixpkgs", - "rev": "c37ca420157f4abc31e26f436c1145f8951ff373", - "type": "github" - }, - "original": { - "owner": "NixOS", - "ref": "nixos-23.05", - "repo": "nixpkgs", - "type": "github" - } - }, - "nixpkgs_2": { - "locked": { - "lastModified": 1694343207, - "narHash": "sha256-jWi7OwFxU5Owi4k2JmiL1sa/OuBCQtpaAesuj5LXC8w=", - "owner": "NixOS", - "repo": "nixpkgs", - "rev": "78058d810644f5ed276804ce7ea9e82d92bee293", - "type": "github" - }, - "original": { - "owner": "NixOS", - "ref": "nixpkgs-unstable", - "repo": "nixpkgs", - "type": "github" - } - }, - "pre-commit-hooks": { - "inputs": { - "flake-compat": [ - "devenv", - "flake-compat" - ], - "flake-utils": "flake-utils", - "gitignore": "gitignore", - "nixpkgs": [ - "devenv", - "nixpkgs" - ], - "nixpkgs-stable": "nixpkgs-stable" - }, - "locked": { - "lastModified": 1688056373, - "narHash": "sha256-2+SDlNRTKsgo3LBRiMUcoEUb6sDViRNQhzJquZ4koOI=", - "owner": "cachix", - "repo": "pre-commit-hooks.nix", - "rev": "5843cf069272d92b60c3ed9e55b7a8989c01d4c7", - "type": "github" - }, - "original": { - "owner": "cachix", - "repo": "pre-commit-hooks.nix", - "type": "github" - } - }, - "root": { - "inputs": { - "devenv": "devenv", - "flake-parts": "flake-parts", - "nixpkgs": "nixpkgs_2" - } - }, - "systems": { - "locked": { - "lastModified": 1681028828, - "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=", - "owner": "nix-systems", - "repo": "default", - "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e", - "type": "github" - }, - "original": { - "owner": "nix-systems", - "repo": "default", - "type": "github" - } - } - }, - "root": "root", - "version": 7 -} diff --git a/vendor/github.com/sagikazarmark/locafero/flake.nix b/vendor/github.com/sagikazarmark/locafero/flake.nix deleted file mode 100644 index 209ecf286..000000000 --- a/vendor/github.com/sagikazarmark/locafero/flake.nix +++ /dev/null @@ -1,47 +0,0 @@ -{ - description = "Finder library for Afero"; - - inputs = { - nixpkgs.url = "github:NixOS/nixpkgs/nixpkgs-unstable"; - flake-parts.url = "github:hercules-ci/flake-parts"; - devenv.url = "github:cachix/devenv"; - }; - - outputs = inputs@{ flake-parts, ... }: - flake-parts.lib.mkFlake { inherit inputs; } { - imports = [ - inputs.devenv.flakeModule - ]; - - systems = [ "x86_64-linux" "aarch64-darwin" ]; - - perSystem = { config, self', inputs', pkgs, system, ... }: rec { - devenv.shells = { - default = { - languages = { - go.enable = true; - }; - - packages = with pkgs; [ - just - - golangci-lint - ]; - - # https://github.com/cachix/devenv/issues/528#issuecomment-1556108767 - containers = pkgs.lib.mkForce { }; - }; - - ci = devenv.shells.default; - - ci_1_20 = { - imports = [ devenv.shells.ci ]; - - languages = { - go.package = pkgs.go_1_20; - }; - }; - }; - }; - }; -} diff --git a/vendor/github.com/sagikazarmark/locafero/helpers.go b/vendor/github.com/sagikazarmark/locafero/helpers.go deleted file mode 100644 index 05b434481..000000000 --- a/vendor/github.com/sagikazarmark/locafero/helpers.go +++ /dev/null @@ -1,41 +0,0 @@ -package locafero - -import "fmt" - -// NameWithExtensions creates a list of names from a base name and a list of extensions. -// -// TODO: find a better name for this function. -func NameWithExtensions(baseName string, extensions ...string) []string { - var names []string - - if baseName == "" { - return names - } - - for _, ext := range extensions { - if ext == "" { - continue - } - - names = append(names, fmt.Sprintf("%s.%s", baseName, ext)) - } - - return names -} - -// NameWithOptionalExtensions creates a list of names from a base name and a list of extensions, -// plus it adds the base name (without any extensions) to the end of the list. -// -// TODO: find a better name for this function. -func NameWithOptionalExtensions(baseName string, extensions ...string) []string { - var names []string - - if baseName == "" { - return names - } - - names = NameWithExtensions(baseName, extensions...) - names = append(names, baseName) - - return names -} diff --git a/vendor/github.com/sagikazarmark/locafero/justfile b/vendor/github.com/sagikazarmark/locafero/justfile deleted file mode 100644 index 00a88850c..000000000 --- a/vendor/github.com/sagikazarmark/locafero/justfile +++ /dev/null @@ -1,11 +0,0 @@ -default: - just --list - -test: - go test -race -v ./... - -lint: - golangci-lint run - -fmt: - golangci-lint run --fix diff --git a/vendor/github.com/sagikazarmark/slog-shim/.editorconfig b/vendor/github.com/sagikazarmark/slog-shim/.editorconfig deleted file mode 100644 index 1fb0e1bec..000000000 --- a/vendor/github.com/sagikazarmark/slog-shim/.editorconfig +++ /dev/null @@ -1,18 +0,0 @@ -root = true - -[*] -charset = utf-8 -end_of_line = lf -indent_size = 4 -indent_style = space -insert_final_newline = true -trim_trailing_whitespace = true - -[*.nix] -indent_size = 2 - -[{Makefile,*.mk}] -indent_style = tab - -[Taskfile.yaml] -indent_size = 2 diff --git a/vendor/github.com/sagikazarmark/slog-shim/.envrc b/vendor/github.com/sagikazarmark/slog-shim/.envrc deleted file mode 100644 index 3ce7171a3..000000000 --- a/vendor/github.com/sagikazarmark/slog-shim/.envrc +++ /dev/null @@ -1,4 +0,0 @@ -if ! has nix_direnv_version || ! nix_direnv_version 2.3.0; then - source_url "https://raw.githubusercontent.com/nix-community/nix-direnv/2.3.0/direnvrc" "sha256-Dmd+j63L84wuzgyjITIfSxSD57Tx7v51DMxVZOsiUD8=" -fi -use flake . --impure diff --git a/vendor/github.com/sagikazarmark/slog-shim/.gitignore b/vendor/github.com/sagikazarmark/slog-shim/.gitignore deleted file mode 100644 index dc6d8b587..000000000 --- a/vendor/github.com/sagikazarmark/slog-shim/.gitignore +++ /dev/null @@ -1,4 +0,0 @@ -/.devenv/ -/.direnv/ -/.task/ -/build/ diff --git a/vendor/github.com/sagikazarmark/slog-shim/LICENSE b/vendor/github.com/sagikazarmark/slog-shim/LICENSE deleted file mode 100644 index 6a66aea5e..000000000 --- a/vendor/github.com/sagikazarmark/slog-shim/LICENSE +++ /dev/null @@ -1,27 +0,0 @@ -Copyright (c) 2009 The Go Authors. All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above -copyright notice, this list of conditions and the following disclaimer -in the documentation and/or other materials provided with the -distribution. - * Neither the name of Google Inc. nor the names of its -contributors may be used to endorse or promote products derived from -this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/sagikazarmark/slog-shim/README.md b/vendor/github.com/sagikazarmark/slog-shim/README.md deleted file mode 100644 index 1f5be85e1..000000000 --- a/vendor/github.com/sagikazarmark/slog-shim/README.md +++ /dev/null @@ -1,81 +0,0 @@ -# [slog](https://pkg.go.dev/log/slog) shim - -[![GitHub Workflow Status](https://img.shields.io/github/actions/workflow/status/sagikazarmark/slog-shim/ci.yaml?style=flat-square)](https://github.com/sagikazarmark/slog-shim/actions/workflows/ci.yaml) -[![go.dev reference](https://img.shields.io/badge/go.dev-reference-007d9c?logo=go&logoColor=white&style=flat-square)](https://pkg.go.dev/mod/github.com/sagikazarmark/slog-shim) -![Go Version](https://img.shields.io/badge/go%20version-%3E=1.20-61CFDD.svg?style=flat-square) -[![built with nix](https://img.shields.io/badge/builtwith-nix-7d81f7?style=flat-square)](https://builtwithnix.org) - -Go 1.21 introduced a [new structured logging package](https://golang.org/doc/go1.21#slog), `log/slog`, to the standard library. -Although it's been eagerly anticipated by many, widespread adoption isn't expected to occur immediately, -especially since updating to Go 1.21 is a decision that most libraries won't make overnight. - -Before this package was added to the standard library, there was an _experimental_ version available at [golang.org/x/exp/slog](https://pkg.go.dev/golang.org/x/exp/slog). -While it's generally advised against using experimental packages in production, -this one served as a sort of backport package for the last few years, -incorporating new features before they were added to the standard library (like `slices`, `maps` or `errors`). - -This package serves as a bridge, helping libraries integrate slog in a backward-compatible way without having to immediately update their Go version requirement to 1.21. On Go 1.21 (and above), it acts as a drop-in replacement for `log/slog`, while below 1.21 it falls back to `golang.org/x/exp/slog`. - -**How does it achieve backwards compatibility?** - -Although there's no consensus on whether dropping support for older Go versions is considered backward compatible, a majority seems to believe it is. -(I don't have scientific proof for this, but it's based on conversations with various individuals across different channels.) - -This package adheres to that interpretation of backward compatibility. On Go 1.21, the shim uses type aliases to offer the same API as `slog/log`. -Once a library upgrades its version requirement to Go 1.21, it should be able to discard this shim and use `log/slog` directly. - -For older Go versions, the library might become unstable after removing the shim. -However, since those older versions are no longer supported, the promise of backward compatibility remains intact. - -## Installation - -```shell -go get github.com/sagikazarmark/slog-shim -``` - -## Usage - -Import this package into your library and use it in your public API: - -```go -package mylib - -import slog "github.com/sagikazarmark/slog-shim" - -func New(logger *slog.Logger) MyLib { - // ... -} -``` - -When using the library, clients can either use `log/slog` (when on Go 1.21) or `golang.org/x/exp/slog` (below Go 1.21): - -```go -package main - -import "log/slog" - -// OR - -import "golang.org/x/exp/slog" - -mylib.New(slog.Default()) -``` - -**Make sure consumers are aware that your API behaves differently on different Go versions.** - -Once you bump your Go version requirement to Go 1.21, you can drop the shim entirely from your code: - -```diff -package mylib - -- import slog "github.com/sagikazarmark/slog-shim" -+ import "log/slog" - -func New(logger *slog.Logger) MyLib { - // ... -} -``` - -## License - -The project is licensed under a [BSD-style license](LICENSE). diff --git a/vendor/github.com/sagikazarmark/slog-shim/attr.go b/vendor/github.com/sagikazarmark/slog-shim/attr.go deleted file mode 100644 index 89608bf3a..000000000 --- a/vendor/github.com/sagikazarmark/slog-shim/attr.go +++ /dev/null @@ -1,74 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.21 - -package slog - -import ( - "log/slog" - "time" -) - -// An Attr is a key-value pair. -type Attr = slog.Attr - -// String returns an Attr for a string value. -func String(key, value string) Attr { - return slog.String(key, value) -} - -// Int64 returns an Attr for an int64. -func Int64(key string, value int64) Attr { - return slog.Int64(key, value) -} - -// Int converts an int to an int64 and returns -// an Attr with that value. -func Int(key string, value int) Attr { - return slog.Int(key, value) -} - -// Uint64 returns an Attr for a uint64. -func Uint64(key string, v uint64) Attr { - return slog.Uint64(key, v) -} - -// Float64 returns an Attr for a floating-point number. -func Float64(key string, v float64) Attr { - return slog.Float64(key, v) -} - -// Bool returns an Attr for a bool. -func Bool(key string, v bool) Attr { - return slog.Bool(key, v) -} - -// Time returns an Attr for a time.Time. -// It discards the monotonic portion. -func Time(key string, v time.Time) Attr { - return slog.Time(key, v) -} - -// Duration returns an Attr for a time.Duration. -func Duration(key string, v time.Duration) Attr { - return slog.Duration(key, v) -} - -// Group returns an Attr for a Group Value. -// The first argument is the key; the remaining arguments -// are converted to Attrs as in [Logger.Log]. -// -// Use Group to collect several key-value pairs under a single -// key on a log line, or as the result of LogValue -// in order to log a single value as multiple Attrs. -func Group(key string, args ...any) Attr { - return slog.Group(key, args...) -} - -// Any returns an Attr for the supplied value. -// See [Value.AnyValue] for how values are treated. -func Any(key string, value any) Attr { - return slog.Any(key, value) -} diff --git a/vendor/github.com/sagikazarmark/slog-shim/attr_120.go b/vendor/github.com/sagikazarmark/slog-shim/attr_120.go deleted file mode 100644 index b66481333..000000000 --- a/vendor/github.com/sagikazarmark/slog-shim/attr_120.go +++ /dev/null @@ -1,75 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build !go1.21 - -package slog - -import ( - "time" - - "golang.org/x/exp/slog" -) - -// An Attr is a key-value pair. -type Attr = slog.Attr - -// String returns an Attr for a string value. -func String(key, value string) Attr { - return slog.String(key, value) -} - -// Int64 returns an Attr for an int64. -func Int64(key string, value int64) Attr { - return slog.Int64(key, value) -} - -// Int converts an int to an int64 and returns -// an Attr with that value. -func Int(key string, value int) Attr { - return slog.Int(key, value) -} - -// Uint64 returns an Attr for a uint64. -func Uint64(key string, v uint64) Attr { - return slog.Uint64(key, v) -} - -// Float64 returns an Attr for a floating-point number. -func Float64(key string, v float64) Attr { - return slog.Float64(key, v) -} - -// Bool returns an Attr for a bool. -func Bool(key string, v bool) Attr { - return slog.Bool(key, v) -} - -// Time returns an Attr for a time.Time. -// It discards the monotonic portion. -func Time(key string, v time.Time) Attr { - return slog.Time(key, v) -} - -// Duration returns an Attr for a time.Duration. -func Duration(key string, v time.Duration) Attr { - return slog.Duration(key, v) -} - -// Group returns an Attr for a Group Value. -// The first argument is the key; the remaining arguments -// are converted to Attrs as in [Logger.Log]. -// -// Use Group to collect several key-value pairs under a single -// key on a log line, or as the result of LogValue -// in order to log a single value as multiple Attrs. -func Group(key string, args ...any) Attr { - return slog.Group(key, args...) -} - -// Any returns an Attr for the supplied value. -// See [Value.AnyValue] for how values are treated. -func Any(key string, value any) Attr { - return slog.Any(key, value) -} diff --git a/vendor/github.com/sagikazarmark/slog-shim/flake.lock b/vendor/github.com/sagikazarmark/slog-shim/flake.lock deleted file mode 100644 index 7e8898e9e..000000000 --- a/vendor/github.com/sagikazarmark/slog-shim/flake.lock +++ /dev/null @@ -1,273 +0,0 @@ -{ - "nodes": { - "devenv": { - "inputs": { - "flake-compat": "flake-compat", - "nix": "nix", - "nixpkgs": "nixpkgs", - "pre-commit-hooks": "pre-commit-hooks" - }, - "locked": { - "lastModified": 1694097209, - "narHash": "sha256-gQmBjjxeSyySjbh0yQVBKApo2KWIFqqbRUvG+Fa+QpM=", - "owner": "cachix", - "repo": "devenv", - "rev": "7a8e6a91510efe89d8dcb8e43233f93e86f6b189", - "type": "github" - }, - "original": { - "owner": "cachix", - "repo": "devenv", - "type": "github" - } - }, - "flake-compat": { - "flake": false, - "locked": { - "lastModified": 1673956053, - "narHash": "sha256-4gtG9iQuiKITOjNQQeQIpoIB6b16fm+504Ch3sNKLd8=", - "owner": "edolstra", - "repo": "flake-compat", - "rev": "35bb57c0c8d8b62bbfd284272c928ceb64ddbde9", - "type": "github" - }, - "original": { - "owner": "edolstra", - "repo": "flake-compat", - "type": "github" - } - }, - "flake-parts": { - "inputs": { - "nixpkgs-lib": "nixpkgs-lib" - }, - "locked": { - "lastModified": 1693611461, - "narHash": "sha256-aPODl8vAgGQ0ZYFIRisxYG5MOGSkIczvu2Cd8Gb9+1Y=", - "owner": "hercules-ci", - "repo": "flake-parts", - "rev": "7f53fdb7bdc5bb237da7fefef12d099e4fd611ca", - "type": "github" - }, - "original": { - "owner": "hercules-ci", - "repo": "flake-parts", - "type": "github" - } - }, - "flake-utils": { - "inputs": { - "systems": "systems" - }, - "locked": { - "lastModified": 1685518550, - "narHash": "sha256-o2d0KcvaXzTrPRIo0kOLV0/QXHhDQ5DTi+OxcjO8xqY=", - "owner": "numtide", - "repo": "flake-utils", - "rev": "a1720a10a6cfe8234c0e93907ffe81be440f4cef", - "type": "github" - }, - "original": { - "owner": "numtide", - "repo": "flake-utils", - "type": "github" - } - }, - "gitignore": { - "inputs": { - "nixpkgs": [ - "devenv", - "pre-commit-hooks", - "nixpkgs" - ] - }, - "locked": { - "lastModified": 1660459072, - "narHash": "sha256-8DFJjXG8zqoONA1vXtgeKXy68KdJL5UaXR8NtVMUbx8=", - "owner": "hercules-ci", - "repo": "gitignore.nix", - "rev": "a20de23b925fd8264fd7fad6454652e142fd7f73", - "type": "github" - }, - "original": { - "owner": "hercules-ci", - "repo": "gitignore.nix", - "type": "github" - } - }, - "lowdown-src": { - "flake": false, - "locked": { - "lastModified": 1633514407, - "narHash": "sha256-Dw32tiMjdK9t3ETl5fzGrutQTzh2rufgZV4A/BbxuD4=", - "owner": "kristapsdz", - "repo": "lowdown", - "rev": "d2c2b44ff6c27b936ec27358a2653caaef8f73b8", - "type": "github" - }, - "original": { - "owner": "kristapsdz", - "repo": "lowdown", - "type": "github" - } - }, - "nix": { - "inputs": { - "lowdown-src": "lowdown-src", - "nixpkgs": [ - "devenv", - "nixpkgs" - ], - "nixpkgs-regression": "nixpkgs-regression" - }, - "locked": { - "lastModified": 1676545802, - "narHash": "sha256-EK4rZ+Hd5hsvXnzSzk2ikhStJnD63odF7SzsQ8CuSPU=", - "owner": "domenkozar", - "repo": "nix", - "rev": "7c91803598ffbcfe4a55c44ac6d49b2cf07a527f", - "type": "github" - }, - "original": { - "owner": "domenkozar", - "ref": "relaxed-flakes", - "repo": "nix", - "type": "github" - } - }, - "nixpkgs": { - "locked": { - "lastModified": 1678875422, - "narHash": "sha256-T3o6NcQPwXjxJMn2shz86Chch4ljXgZn746c2caGxd8=", - "owner": "NixOS", - "repo": "nixpkgs", - "rev": "126f49a01de5b7e35a43fd43f891ecf6d3a51459", - "type": "github" - }, - "original": { - "owner": "NixOS", - "ref": "nixpkgs-unstable", - "repo": "nixpkgs", - "type": "github" - } - }, - "nixpkgs-lib": { - "locked": { - "dir": "lib", - "lastModified": 1693471703, - "narHash": "sha256-0l03ZBL8P1P6z8MaSDS/MvuU8E75rVxe5eE1N6gxeTo=", - "owner": "NixOS", - "repo": "nixpkgs", - "rev": "3e52e76b70d5508f3cec70b882a29199f4d1ee85", - "type": "github" - }, - "original": { - "dir": "lib", - "owner": "NixOS", - "ref": "nixos-unstable", - "repo": "nixpkgs", - "type": "github" - } - }, - "nixpkgs-regression": { - "locked": { - "lastModified": 1643052045, - "narHash": "sha256-uGJ0VXIhWKGXxkeNnq4TvV3CIOkUJ3PAoLZ3HMzNVMw=", - "owner": "NixOS", - "repo": "nixpkgs", - "rev": "215d4d0fd80ca5163643b03a33fde804a29cc1e2", - "type": "github" - }, - "original": { - "owner": "NixOS", - "repo": "nixpkgs", - "rev": "215d4d0fd80ca5163643b03a33fde804a29cc1e2", - "type": "github" - } - }, - "nixpkgs-stable": { - "locked": { - "lastModified": 1685801374, - "narHash": "sha256-otaSUoFEMM+LjBI1XL/xGB5ao6IwnZOXc47qhIgJe8U=", - "owner": "NixOS", - "repo": "nixpkgs", - "rev": "c37ca420157f4abc31e26f436c1145f8951ff373", - "type": "github" - }, - "original": { - "owner": "NixOS", - "ref": "nixos-23.05", - "repo": "nixpkgs", - "type": "github" - } - }, - "nixpkgs_2": { - "locked": { - "lastModified": 1694345580, - "narHash": "sha256-BbG0NUxQTz1dN/Y87yPWZc/0Kp/coJ0vM3+7sNa5kUM=", - "owner": "NixOS", - "repo": "nixpkgs", - "rev": "f002de6834fdde9c864f33c1ec51da7df19cd832", - "type": "github" - }, - "original": { - "owner": "NixOS", - "ref": "master", - "repo": "nixpkgs", - "type": "github" - } - }, - "pre-commit-hooks": { - "inputs": { - "flake-compat": [ - "devenv", - "flake-compat" - ], - "flake-utils": "flake-utils", - "gitignore": "gitignore", - "nixpkgs": [ - "devenv", - "nixpkgs" - ], - "nixpkgs-stable": "nixpkgs-stable" - }, - "locked": { - "lastModified": 1688056373, - "narHash": "sha256-2+SDlNRTKsgo3LBRiMUcoEUb6sDViRNQhzJquZ4koOI=", - "owner": "cachix", - "repo": "pre-commit-hooks.nix", - "rev": "5843cf069272d92b60c3ed9e55b7a8989c01d4c7", - "type": "github" - }, - "original": { - "owner": "cachix", - "repo": "pre-commit-hooks.nix", - "type": "github" - } - }, - "root": { - "inputs": { - "devenv": "devenv", - "flake-parts": "flake-parts", - "nixpkgs": "nixpkgs_2" - } - }, - "systems": { - "locked": { - "lastModified": 1681028828, - "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=", - "owner": "nix-systems", - "repo": "default", - "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e", - "type": "github" - }, - "original": { - "owner": "nix-systems", - "repo": "default", - "type": "github" - } - } - }, - "root": "root", - "version": 7 -} diff --git a/vendor/github.com/sagikazarmark/slog-shim/flake.nix b/vendor/github.com/sagikazarmark/slog-shim/flake.nix deleted file mode 100644 index 7239bbc2e..000000000 --- a/vendor/github.com/sagikazarmark/slog-shim/flake.nix +++ /dev/null @@ -1,57 +0,0 @@ -{ - inputs = { - # nixpkgs.url = "github:NixOS/nixpkgs/nixpkgs-unstable"; - nixpkgs.url = "github:NixOS/nixpkgs/master"; - flake-parts.url = "github:hercules-ci/flake-parts"; - devenv.url = "github:cachix/devenv"; - }; - - outputs = inputs@{ flake-parts, ... }: - flake-parts.lib.mkFlake { inherit inputs; } { - imports = [ - inputs.devenv.flakeModule - ]; - - systems = [ "x86_64-linux" "x86_64-darwin" "aarch64-darwin" ]; - - perSystem = { config, self', inputs', pkgs, system, ... }: rec { - devenv.shells = { - default = { - languages = { - go.enable = true; - go.package = pkgs.lib.mkDefault pkgs.go_1_21; - }; - - # https://github.com/cachix/devenv/issues/528#issuecomment-1556108767 - containers = pkgs.lib.mkForce { }; - }; - - ci = devenv.shells.default; - - ci_1_19 = { - imports = [ devenv.shells.ci ]; - - languages = { - go.package = pkgs.go_1_19; - }; - }; - - ci_1_20 = { - imports = [ devenv.shells.ci ]; - - languages = { - go.package = pkgs.go_1_20; - }; - }; - - ci_1_21 = { - imports = [ devenv.shells.ci ]; - - languages = { - go.package = pkgs.go_1_21; - }; - }; - }; - }; - }; -} diff --git a/vendor/github.com/sagikazarmark/slog-shim/handler.go b/vendor/github.com/sagikazarmark/slog-shim/handler.go deleted file mode 100644 index f55556ae1..000000000 --- a/vendor/github.com/sagikazarmark/slog-shim/handler.go +++ /dev/null @@ -1,45 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.21 - -package slog - -import ( - "log/slog" -) - -// A Handler handles log records produced by a Logger.. -// -// A typical handler may print log records to standard error, -// or write them to a file or database, or perhaps augment them -// with additional attributes and pass them on to another handler. -// -// Any of the Handler's methods may be called concurrently with itself -// or with other methods. It is the responsibility of the Handler to -// manage this concurrency. -// -// Users of the slog package should not invoke Handler methods directly. -// They should use the methods of [Logger] instead. -type Handler = slog.Handler - -// HandlerOptions are options for a TextHandler or JSONHandler. -// A zero HandlerOptions consists entirely of default values. -type HandlerOptions = slog.HandlerOptions - -// Keys for "built-in" attributes. -const ( - // TimeKey is the key used by the built-in handlers for the time - // when the log method is called. The associated Value is a [time.Time]. - TimeKey = slog.TimeKey - // LevelKey is the key used by the built-in handlers for the level - // of the log call. The associated value is a [Level]. - LevelKey = slog.LevelKey - // MessageKey is the key used by the built-in handlers for the - // message of the log call. The associated value is a string. - MessageKey = slog.MessageKey - // SourceKey is the key used by the built-in handlers for the source file - // and line of the log call. The associated value is a string. - SourceKey = slog.SourceKey -) diff --git a/vendor/github.com/sagikazarmark/slog-shim/handler_120.go b/vendor/github.com/sagikazarmark/slog-shim/handler_120.go deleted file mode 100644 index 670057573..000000000 --- a/vendor/github.com/sagikazarmark/slog-shim/handler_120.go +++ /dev/null @@ -1,45 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build !go1.21 - -package slog - -import ( - "golang.org/x/exp/slog" -) - -// A Handler handles log records produced by a Logger.. -// -// A typical handler may print log records to standard error, -// or write them to a file or database, or perhaps augment them -// with additional attributes and pass them on to another handler. -// -// Any of the Handler's methods may be called concurrently with itself -// or with other methods. It is the responsibility of the Handler to -// manage this concurrency. -// -// Users of the slog package should not invoke Handler methods directly. -// They should use the methods of [Logger] instead. -type Handler = slog.Handler - -// HandlerOptions are options for a TextHandler or JSONHandler. -// A zero HandlerOptions consists entirely of default values. -type HandlerOptions = slog.HandlerOptions - -// Keys for "built-in" attributes. -const ( - // TimeKey is the key used by the built-in handlers for the time - // when the log method is called. The associated Value is a [time.Time]. - TimeKey = slog.TimeKey - // LevelKey is the key used by the built-in handlers for the level - // of the log call. The associated value is a [Level]. - LevelKey = slog.LevelKey - // MessageKey is the key used by the built-in handlers for the - // message of the log call. The associated value is a string. - MessageKey = slog.MessageKey - // SourceKey is the key used by the built-in handlers for the source file - // and line of the log call. The associated value is a string. - SourceKey = slog.SourceKey -) diff --git a/vendor/github.com/sagikazarmark/slog-shim/json_handler.go b/vendor/github.com/sagikazarmark/slog-shim/json_handler.go deleted file mode 100644 index 7c22bd81e..000000000 --- a/vendor/github.com/sagikazarmark/slog-shim/json_handler.go +++ /dev/null @@ -1,23 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.21 - -package slog - -import ( - "io" - "log/slog" -) - -// JSONHandler is a Handler that writes Records to an io.Writer as -// line-delimited JSON objects. -type JSONHandler = slog.JSONHandler - -// NewJSONHandler creates a JSONHandler that writes to w, -// using the given options. -// If opts is nil, the default options are used. -func NewJSONHandler(w io.Writer, opts *HandlerOptions) *JSONHandler { - return slog.NewJSONHandler(w, opts) -} diff --git a/vendor/github.com/sagikazarmark/slog-shim/json_handler_120.go b/vendor/github.com/sagikazarmark/slog-shim/json_handler_120.go deleted file mode 100644 index 7b14f10ba..000000000 --- a/vendor/github.com/sagikazarmark/slog-shim/json_handler_120.go +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build !go1.21 - -package slog - -import ( - "io" - - "golang.org/x/exp/slog" -) - -// JSONHandler is a Handler that writes Records to an io.Writer as -// line-delimited JSON objects. -type JSONHandler = slog.JSONHandler - -// NewJSONHandler creates a JSONHandler that writes to w, -// using the given options. -// If opts is nil, the default options are used. -func NewJSONHandler(w io.Writer, opts *HandlerOptions) *JSONHandler { - return slog.NewJSONHandler(w, opts) -} diff --git a/vendor/github.com/sagikazarmark/slog-shim/level.go b/vendor/github.com/sagikazarmark/slog-shim/level.go deleted file mode 100644 index 07288cf89..000000000 --- a/vendor/github.com/sagikazarmark/slog-shim/level.go +++ /dev/null @@ -1,61 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.21 - -package slog - -import ( - "log/slog" -) - -// A Level is the importance or severity of a log event. -// The higher the level, the more important or severe the event. -type Level = slog.Level - -// Level numbers are inherently arbitrary, -// but we picked them to satisfy three constraints. -// Any system can map them to another numbering scheme if it wishes. -// -// First, we wanted the default level to be Info, Since Levels are ints, Info is -// the default value for int, zero. -// -// Second, we wanted to make it easy to use levels to specify logger verbosity. -// Since a larger level means a more severe event, a logger that accepts events -// with smaller (or more negative) level means a more verbose logger. Logger -// verbosity is thus the negation of event severity, and the default verbosity -// of 0 accepts all events at least as severe as INFO. -// -// Third, we wanted some room between levels to accommodate schemes with named -// levels between ours. For example, Google Cloud Logging defines a Notice level -// between Info and Warn. Since there are only a few of these intermediate -// levels, the gap between the numbers need not be large. Our gap of 4 matches -// OpenTelemetry's mapping. Subtracting 9 from an OpenTelemetry level in the -// DEBUG, INFO, WARN and ERROR ranges converts it to the corresponding slog -// Level range. OpenTelemetry also has the names TRACE and FATAL, which slog -// does not. But those OpenTelemetry levels can still be represented as slog -// Levels by using the appropriate integers. -// -// Names for common levels. -const ( - LevelDebug Level = slog.LevelDebug - LevelInfo Level = slog.LevelInfo - LevelWarn Level = slog.LevelWarn - LevelError Level = slog.LevelError -) - -// A LevelVar is a Level variable, to allow a Handler level to change -// dynamically. -// It implements Leveler as well as a Set method, -// and it is safe for use by multiple goroutines. -// The zero LevelVar corresponds to LevelInfo. -type LevelVar = slog.LevelVar - -// A Leveler provides a Level value. -// -// As Level itself implements Leveler, clients typically supply -// a Level value wherever a Leveler is needed, such as in HandlerOptions. -// Clients who need to vary the level dynamically can provide a more complex -// Leveler implementation such as *LevelVar. -type Leveler = slog.Leveler diff --git a/vendor/github.com/sagikazarmark/slog-shim/level_120.go b/vendor/github.com/sagikazarmark/slog-shim/level_120.go deleted file mode 100644 index d3feb9420..000000000 --- a/vendor/github.com/sagikazarmark/slog-shim/level_120.go +++ /dev/null @@ -1,61 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build !go1.21 - -package slog - -import ( - "golang.org/x/exp/slog" -) - -// A Level is the importance or severity of a log event. -// The higher the level, the more important or severe the event. -type Level = slog.Level - -// Level numbers are inherently arbitrary, -// but we picked them to satisfy three constraints. -// Any system can map them to another numbering scheme if it wishes. -// -// First, we wanted the default level to be Info, Since Levels are ints, Info is -// the default value for int, zero. -// -// Second, we wanted to make it easy to use levels to specify logger verbosity. -// Since a larger level means a more severe event, a logger that accepts events -// with smaller (or more negative) level means a more verbose logger. Logger -// verbosity is thus the negation of event severity, and the default verbosity -// of 0 accepts all events at least as severe as INFO. -// -// Third, we wanted some room between levels to accommodate schemes with named -// levels between ours. For example, Google Cloud Logging defines a Notice level -// between Info and Warn. Since there are only a few of these intermediate -// levels, the gap between the numbers need not be large. Our gap of 4 matches -// OpenTelemetry's mapping. Subtracting 9 from an OpenTelemetry level in the -// DEBUG, INFO, WARN and ERROR ranges converts it to the corresponding slog -// Level range. OpenTelemetry also has the names TRACE and FATAL, which slog -// does not. But those OpenTelemetry levels can still be represented as slog -// Levels by using the appropriate integers. -// -// Names for common levels. -const ( - LevelDebug Level = slog.LevelDebug - LevelInfo Level = slog.LevelInfo - LevelWarn Level = slog.LevelWarn - LevelError Level = slog.LevelError -) - -// A LevelVar is a Level variable, to allow a Handler level to change -// dynamically. -// It implements Leveler as well as a Set method, -// and it is safe for use by multiple goroutines. -// The zero LevelVar corresponds to LevelInfo. -type LevelVar = slog.LevelVar - -// A Leveler provides a Level value. -// -// As Level itself implements Leveler, clients typically supply -// a Level value wherever a Leveler is needed, such as in HandlerOptions. -// Clients who need to vary the level dynamically can provide a more complex -// Leveler implementation such as *LevelVar. -type Leveler = slog.Leveler diff --git a/vendor/github.com/sagikazarmark/slog-shim/logger.go b/vendor/github.com/sagikazarmark/slog-shim/logger.go deleted file mode 100644 index e80036bec..000000000 --- a/vendor/github.com/sagikazarmark/slog-shim/logger.go +++ /dev/null @@ -1,98 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.21 - -package slog - -import ( - "context" - "log" - "log/slog" -) - -// Default returns the default Logger. -func Default() *Logger { return slog.Default() } - -// SetDefault makes l the default Logger. -// After this call, output from the log package's default Logger -// (as with [log.Print], etc.) will be logged at LevelInfo using l's Handler. -func SetDefault(l *Logger) { - slog.SetDefault(l) -} - -// A Logger records structured information about each call to its -// Log, Debug, Info, Warn, and Error methods. -// For each call, it creates a Record and passes it to a Handler. -// -// To create a new Logger, call [New] or a Logger method -// that begins "With". -type Logger = slog.Logger - -// New creates a new Logger with the given non-nil Handler. -func New(h Handler) *Logger { - return slog.New(h) -} - -// With calls Logger.With on the default logger. -func With(args ...any) *Logger { - return slog.With(args...) -} - -// NewLogLogger returns a new log.Logger such that each call to its Output method -// dispatches a Record to the specified handler. The logger acts as a bridge from -// the older log API to newer structured logging handlers. -func NewLogLogger(h Handler, level Level) *log.Logger { - return slog.NewLogLogger(h, level) -} - -// Debug calls Logger.Debug on the default logger. -func Debug(msg string, args ...any) { - slog.Debug(msg, args...) -} - -// DebugContext calls Logger.DebugContext on the default logger. -func DebugContext(ctx context.Context, msg string, args ...any) { - slog.DebugContext(ctx, msg, args...) -} - -// Info calls Logger.Info on the default logger. -func Info(msg string, args ...any) { - slog.Info(msg, args...) -} - -// InfoContext calls Logger.InfoContext on the default logger. -func InfoContext(ctx context.Context, msg string, args ...any) { - slog.InfoContext(ctx, msg, args...) -} - -// Warn calls Logger.Warn on the default logger. -func Warn(msg string, args ...any) { - slog.Warn(msg, args...) -} - -// WarnContext calls Logger.WarnContext on the default logger. -func WarnContext(ctx context.Context, msg string, args ...any) { - slog.WarnContext(ctx, msg, args...) -} - -// Error calls Logger.Error on the default logger. -func Error(msg string, args ...any) { - slog.Error(msg, args...) -} - -// ErrorContext calls Logger.ErrorContext on the default logger. -func ErrorContext(ctx context.Context, msg string, args ...any) { - slog.ErrorContext(ctx, msg, args...) -} - -// Log calls Logger.Log on the default logger. -func Log(ctx context.Context, level Level, msg string, args ...any) { - slog.Log(ctx, level, msg, args...) -} - -// LogAttrs calls Logger.LogAttrs on the default logger. -func LogAttrs(ctx context.Context, level Level, msg string, attrs ...Attr) { - slog.LogAttrs(ctx, level, msg, attrs...) -} diff --git a/vendor/github.com/sagikazarmark/slog-shim/logger_120.go b/vendor/github.com/sagikazarmark/slog-shim/logger_120.go deleted file mode 100644 index 97ebdd5e1..000000000 --- a/vendor/github.com/sagikazarmark/slog-shim/logger_120.go +++ /dev/null @@ -1,99 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build !go1.21 - -package slog - -import ( - "context" - "log" - - "golang.org/x/exp/slog" -) - -// Default returns the default Logger. -func Default() *Logger { return slog.Default() } - -// SetDefault makes l the default Logger. -// After this call, output from the log package's default Logger -// (as with [log.Print], etc.) will be logged at LevelInfo using l's Handler. -func SetDefault(l *Logger) { - slog.SetDefault(l) -} - -// A Logger records structured information about each call to its -// Log, Debug, Info, Warn, and Error methods. -// For each call, it creates a Record and passes it to a Handler. -// -// To create a new Logger, call [New] or a Logger method -// that begins "With". -type Logger = slog.Logger - -// New creates a new Logger with the given non-nil Handler. -func New(h Handler) *Logger { - return slog.New(h) -} - -// With calls Logger.With on the default logger. -func With(args ...any) *Logger { - return slog.With(args...) -} - -// NewLogLogger returns a new log.Logger such that each call to its Output method -// dispatches a Record to the specified handler. The logger acts as a bridge from -// the older log API to newer structured logging handlers. -func NewLogLogger(h Handler, level Level) *log.Logger { - return slog.NewLogLogger(h, level) -} - -// Debug calls Logger.Debug on the default logger. -func Debug(msg string, args ...any) { - slog.Debug(msg, args...) -} - -// DebugContext calls Logger.DebugContext on the default logger. -func DebugContext(ctx context.Context, msg string, args ...any) { - slog.DebugContext(ctx, msg, args...) -} - -// Info calls Logger.Info on the default logger. -func Info(msg string, args ...any) { - slog.Info(msg, args...) -} - -// InfoContext calls Logger.InfoContext on the default logger. -func InfoContext(ctx context.Context, msg string, args ...any) { - slog.InfoContext(ctx, msg, args...) -} - -// Warn calls Logger.Warn on the default logger. -func Warn(msg string, args ...any) { - slog.Warn(msg, args...) -} - -// WarnContext calls Logger.WarnContext on the default logger. -func WarnContext(ctx context.Context, msg string, args ...any) { - slog.WarnContext(ctx, msg, args...) -} - -// Error calls Logger.Error on the default logger. -func Error(msg string, args ...any) { - slog.Error(msg, args...) -} - -// ErrorContext calls Logger.ErrorContext on the default logger. -func ErrorContext(ctx context.Context, msg string, args ...any) { - slog.ErrorContext(ctx, msg, args...) -} - -// Log calls Logger.Log on the default logger. -func Log(ctx context.Context, level Level, msg string, args ...any) { - slog.Log(ctx, level, msg, args...) -} - -// LogAttrs calls Logger.LogAttrs on the default logger. -func LogAttrs(ctx context.Context, level Level, msg string, attrs ...Attr) { - slog.LogAttrs(ctx, level, msg, attrs...) -} diff --git a/vendor/github.com/sagikazarmark/slog-shim/record.go b/vendor/github.com/sagikazarmark/slog-shim/record.go deleted file mode 100644 index 85ad1f784..000000000 --- a/vendor/github.com/sagikazarmark/slog-shim/record.go +++ /dev/null @@ -1,31 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.21 - -package slog - -import ( - "log/slog" - "time" -) - -// A Record holds information about a log event. -// Copies of a Record share state. -// Do not modify a Record after handing out a copy to it. -// Call [NewRecord] to create a new Record. -// Use [Record.Clone] to create a copy with no shared state. -type Record = slog.Record - -// NewRecord creates a Record from the given arguments. -// Use [Record.AddAttrs] to add attributes to the Record. -// -// NewRecord is intended for logging APIs that want to support a [Handler] as -// a backend. -func NewRecord(t time.Time, level Level, msg string, pc uintptr) Record { - return slog.NewRecord(t, level, msg, pc) -} - -// Source describes the location of a line of source code. -type Source = slog.Source diff --git a/vendor/github.com/sagikazarmark/slog-shim/record_120.go b/vendor/github.com/sagikazarmark/slog-shim/record_120.go deleted file mode 100644 index c2eaf4e79..000000000 --- a/vendor/github.com/sagikazarmark/slog-shim/record_120.go +++ /dev/null @@ -1,32 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build !go1.21 - -package slog - -import ( - "time" - - "golang.org/x/exp/slog" -) - -// A Record holds information about a log event. -// Copies of a Record share state. -// Do not modify a Record after handing out a copy to it. -// Call [NewRecord] to create a new Record. -// Use [Record.Clone] to create a copy with no shared state. -type Record = slog.Record - -// NewRecord creates a Record from the given arguments. -// Use [Record.AddAttrs] to add attributes to the Record. -// -// NewRecord is intended for logging APIs that want to support a [Handler] as -// a backend. -func NewRecord(t time.Time, level Level, msg string, pc uintptr) Record { - return slog.NewRecord(t, level, msg, pc) -} - -// Source describes the location of a line of source code. -type Source = slog.Source diff --git a/vendor/github.com/sagikazarmark/slog-shim/text_handler.go b/vendor/github.com/sagikazarmark/slog-shim/text_handler.go deleted file mode 100644 index 45f6cfcba..000000000 --- a/vendor/github.com/sagikazarmark/slog-shim/text_handler.go +++ /dev/null @@ -1,23 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.21 - -package slog - -import ( - "io" - "log/slog" -) - -// TextHandler is a Handler that writes Records to an io.Writer as a -// sequence of key=value pairs separated by spaces and followed by a newline. -type TextHandler = slog.TextHandler - -// NewTextHandler creates a TextHandler that writes to w, -// using the given options. -// If opts is nil, the default options are used. -func NewTextHandler(w io.Writer, opts *HandlerOptions) *TextHandler { - return slog.NewTextHandler(w, opts) -} diff --git a/vendor/github.com/sagikazarmark/slog-shim/text_handler_120.go b/vendor/github.com/sagikazarmark/slog-shim/text_handler_120.go deleted file mode 100644 index a69d63cce..000000000 --- a/vendor/github.com/sagikazarmark/slog-shim/text_handler_120.go +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build !go1.21 - -package slog - -import ( - "io" - - "golang.org/x/exp/slog" -) - -// TextHandler is a Handler that writes Records to an io.Writer as a -// sequence of key=value pairs separated by spaces and followed by a newline. -type TextHandler = slog.TextHandler - -// NewTextHandler creates a TextHandler that writes to w, -// using the given options. -// If opts is nil, the default options are used. -func NewTextHandler(w io.Writer, opts *HandlerOptions) *TextHandler { - return slog.NewTextHandler(w, opts) -} diff --git a/vendor/github.com/sagikazarmark/slog-shim/value.go b/vendor/github.com/sagikazarmark/slog-shim/value.go deleted file mode 100644 index 61173eb94..000000000 --- a/vendor/github.com/sagikazarmark/slog-shim/value.go +++ /dev/null @@ -1,109 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.21 - -package slog - -import ( - "log/slog" - "time" -) - -// A Value can represent any Go value, but unlike type any, -// it can represent most small values without an allocation. -// The zero Value corresponds to nil. -type Value = slog.Value - -// Kind is the kind of a Value. -type Kind = slog.Kind - -// The following list is sorted alphabetically, but it's also important that -// KindAny is 0 so that a zero Value represents nil. -const ( - KindAny = slog.KindAny - KindBool = slog.KindBool - KindDuration = slog.KindDuration - KindFloat64 = slog.KindFloat64 - KindInt64 = slog.KindInt64 - KindString = slog.KindString - KindTime = slog.KindTime - KindUint64 = slog.KindUint64 - KindGroup = slog.KindGroup - KindLogValuer = slog.KindLogValuer -) - -//////////////// Constructors - -// StringValue returns a new Value for a string. -func StringValue(value string) Value { - return slog.StringValue(value) -} - -// IntValue returns a Value for an int. -func IntValue(v int) Value { - return slog.IntValue(v) -} - -// Int64Value returns a Value for an int64. -func Int64Value(v int64) Value { - return slog.Int64Value(v) -} - -// Uint64Value returns a Value for a uint64. -func Uint64Value(v uint64) Value { - return slog.Uint64Value(v) -} - -// Float64Value returns a Value for a floating-point number. -func Float64Value(v float64) Value { - return slog.Float64Value(v) -} - -// BoolValue returns a Value for a bool. -func BoolValue(v bool) Value { - return slog.BoolValue(v) -} - -// TimeValue returns a Value for a time.Time. -// It discards the monotonic portion. -func TimeValue(v time.Time) Value { - return slog.TimeValue(v) -} - -// DurationValue returns a Value for a time.Duration. -func DurationValue(v time.Duration) Value { - return slog.DurationValue(v) -} - -// GroupValue returns a new Value for a list of Attrs. -// The caller must not subsequently mutate the argument slice. -func GroupValue(as ...Attr) Value { - return slog.GroupValue(as...) -} - -// AnyValue returns a Value for the supplied value. -// -// If the supplied value is of type Value, it is returned -// unmodified. -// -// Given a value of one of Go's predeclared string, bool, or -// (non-complex) numeric types, AnyValue returns a Value of kind -// String, Bool, Uint64, Int64, or Float64. The width of the -// original numeric type is not preserved. -// -// Given a time.Time or time.Duration value, AnyValue returns a Value of kind -// KindTime or KindDuration. The monotonic time is not preserved. -// -// For nil, or values of all other types, including named types whose -// underlying type is numeric, AnyValue returns a value of kind KindAny. -func AnyValue(v any) Value { - return slog.AnyValue(v) -} - -// A LogValuer is any Go value that can convert itself into a Value for logging. -// -// This mechanism may be used to defer expensive operations until they are -// needed, or to expand a single value into a sequence of components. -type LogValuer = slog.LogValuer diff --git a/vendor/github.com/sagikazarmark/slog-shim/value_120.go b/vendor/github.com/sagikazarmark/slog-shim/value_120.go deleted file mode 100644 index 0f9f871ee..000000000 --- a/vendor/github.com/sagikazarmark/slog-shim/value_120.go +++ /dev/null @@ -1,110 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build !go1.21 - -package slog - -import ( - "time" - - "golang.org/x/exp/slog" -) - -// A Value can represent any Go value, but unlike type any, -// it can represent most small values without an allocation. -// The zero Value corresponds to nil. -type Value = slog.Value - -// Kind is the kind of a Value. -type Kind = slog.Kind - -// The following list is sorted alphabetically, but it's also important that -// KindAny is 0 so that a zero Value represents nil. -const ( - KindAny = slog.KindAny - KindBool = slog.KindBool - KindDuration = slog.KindDuration - KindFloat64 = slog.KindFloat64 - KindInt64 = slog.KindInt64 - KindString = slog.KindString - KindTime = slog.KindTime - KindUint64 = slog.KindUint64 - KindGroup = slog.KindGroup - KindLogValuer = slog.KindLogValuer -) - -//////////////// Constructors - -// StringValue returns a new Value for a string. -func StringValue(value string) Value { - return slog.StringValue(value) -} - -// IntValue returns a Value for an int. -func IntValue(v int) Value { - return slog.IntValue(v) -} - -// Int64Value returns a Value for an int64. -func Int64Value(v int64) Value { - return slog.Int64Value(v) -} - -// Uint64Value returns a Value for a uint64. -func Uint64Value(v uint64) Value { - return slog.Uint64Value(v) -} - -// Float64Value returns a Value for a floating-point number. -func Float64Value(v float64) Value { - return slog.Float64Value(v) -} - -// BoolValue returns a Value for a bool. -func BoolValue(v bool) Value { - return slog.BoolValue(v) -} - -// TimeValue returns a Value for a time.Time. -// It discards the monotonic portion. -func TimeValue(v time.Time) Value { - return slog.TimeValue(v) -} - -// DurationValue returns a Value for a time.Duration. -func DurationValue(v time.Duration) Value { - return slog.DurationValue(v) -} - -// GroupValue returns a new Value for a list of Attrs. -// The caller must not subsequently mutate the argument slice. -func GroupValue(as ...Attr) Value { - return slog.GroupValue(as...) -} - -// AnyValue returns a Value for the supplied value. -// -// If the supplied value is of type Value, it is returned -// unmodified. -// -// Given a value of one of Go's predeclared string, bool, or -// (non-complex) numeric types, AnyValue returns a Value of kind -// String, Bool, Uint64, Int64, or Float64. The width of the -// original numeric type is not preserved. -// -// Given a time.Time or time.Duration value, AnyValue returns a Value of kind -// KindTime or KindDuration. The monotonic time is not preserved. -// -// For nil, or values of all other types, including named types whose -// underlying type is numeric, AnyValue returns a value of kind KindAny. -func AnyValue(v any) Value { - return slog.AnyValue(v) -} - -// A LogValuer is any Go value that can convert itself into a Value for logging. -// -// This mechanism may be used to defer expensive operations until they are -// needed, or to expand a single value into a sequence of components. -type LogValuer = slog.LogValuer diff --git a/vendor/github.com/sanposhiho/wastedassign/v2/LICENSE b/vendor/github.com/sanposhiho/wastedassign/v2/LICENSE deleted file mode 100644 index 4ed7724fe..000000000 --- a/vendor/github.com/sanposhiho/wastedassign/v2/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2020 Kensei Nakada - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/sanposhiho/wastedassign/v2/README.md b/vendor/github.com/sanposhiho/wastedassign/v2/README.md deleted file mode 100644 index 6b736f7f1..000000000 --- a/vendor/github.com/sanposhiho/wastedassign/v2/README.md +++ /dev/null @@ -1,75 +0,0 @@ -# wastedassign -`wastedassign` finds wasted assignment statements - -found the value ... - -- reassigned, but never used afterward -- reassigned, but reassigned without using the value - -## Example - -```go -package main - -import "fmt" - -func f() int { - a := 0 - b := 0 - fmt.Print(a) - fmt.Print(b) - a = 1 // This reassignment is wasted, because never used afterwards. Wastedassign find this - - b = 1 // This reassignment is wasted, because reassigned without use this value. Wastedassign find this - b = 2 - fmt.Print(b) - - return 1 + 2 -} -``` - - -```bash -$ go vet -vettool=`which wastedassign` sample.go -# command-line-arguments -./sample.go:10:2: assigned to a, but never used afterwards -./sample.go:12:2: assigned to b, but reassigned without using the value -``` - - -## Installation - - -### Go version < 1.16 - -``` -go get -u github.com/sanposhiho/wastedassign/v2/cmd/wastedassign -``` - -### Go version 1.16+ - -``` -go install github.com/sanposhiho/wastedassign/v2/cmd/wastedassign@latest -``` - -## Usage - -``` -# in your project - -go vet -vettool=`which wastedassign` ./... -``` - -And, you can use wastedassign in [golangci-lint](https://github.com/golangci/golangci-lint). - -## Contribution - -I am waiting for your contribution :D - -Feel free to create an issue or a PR! - -### Run test - -``` -go test -``` diff --git a/vendor/github.com/sanposhiho/wastedassign/v2/wastedassign.go b/vendor/github.com/sanposhiho/wastedassign/v2/wastedassign.go deleted file mode 100644 index e0c0da616..000000000 --- a/vendor/github.com/sanposhiho/wastedassign/v2/wastedassign.go +++ /dev/null @@ -1,272 +0,0 @@ -package wastedassign - -import ( - "errors" - "fmt" - "go/ast" - "go/token" - "go/types" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/ast/inspector" - "golang.org/x/tools/go/ssa" -) - -const doc = "wastedassign finds wasted assignment statements." - -// Analyzer is the wastedassign analyzer. -var Analyzer = &analysis.Analyzer{ - Name: "wastedassign", - Doc: doc, - Run: run, - Requires: []*analysis.Analyzer{ - inspect.Analyzer, - }, -} - -type wastedAssignStruct struct { - pos token.Pos - reason string -} - -func run(pass *analysis.Pass) (interface{}, error) { - // Plundered from buildssa.Run. - prog := ssa.NewProgram(pass.Fset, ssa.NaiveForm) - - // Create SSA packages for all imports. - // Order is not significant. - created := make(map[*types.Package]bool) - var createAll func(pkgs []*types.Package) - createAll = func(pkgs []*types.Package) { - for _, p := range pkgs { - if !created[p] { - created[p] = true - prog.CreatePackage(p, nil, nil, true) - createAll(p.Imports()) - } - } - } - createAll(pass.Pkg.Imports()) - - // Create and build the primary package. - ssapkg := prog.CreatePackage(pass.Pkg, pass.Files, pass.TypesInfo, false) - ssapkg.Build() - - var srcFuncs []*ssa.Function - for _, f := range pass.Files { - for _, decl := range f.Decls { - if fdecl, ok := decl.(*ast.FuncDecl); ok { - - // SSA will not build a Function - // for a FuncDecl named blank. - // That's arguably too strict but - // relaxing it would break uniqueness of - // names of package members. - if fdecl.Name.Name == "_" { - continue - } - - // (init functions have distinct Func - // objects named "init" and distinct - // ssa.Functions named "init#1", ...) - - fn := pass.TypesInfo.Defs[fdecl.Name].(*types.Func) - if fn == nil { - return nil, errors.New("failed to get func's typesinfo") - } - - f := ssapkg.Prog.FuncValue(fn) - if f == nil { - return nil, errors.New("failed to get func's SSA-form intermediate representation") - } - - var addAnons func(f *ssa.Function) - addAnons = func(f *ssa.Function) { - srcFuncs = append(srcFuncs, f) - for _, anon := range f.AnonFuncs { - addAnons(anon) - } - } - addAnons(f) - } - } - } - - typeSwitchPos := map[int]bool{} - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - inspect.Preorder([]ast.Node{new(ast.TypeSwitchStmt)}, func(n ast.Node) { - if _, ok := n.(*ast.TypeSwitchStmt); ok { - typeSwitchPos[pass.Fset.Position(n.Pos()).Line] = true - } - }) - - var wastedAssignMap []wastedAssignStruct - - for _, sf := range srcFuncs { - for _, bl := range sf.Blocks { - blCopy := *bl - for _, ist := range bl.Instrs { - blCopy.Instrs = rmInstrFromInstrs(blCopy.Instrs, ist) - if _, ok := ist.(*ssa.Store); !ok { - continue - } - - var buf [10]*ssa.Value - for _, op := range ist.Operands(buf[:0]) { - if (*op) == nil || !opInLocals(sf.Locals, op) { - continue - } - - reason := isNextOperationToOpIsStore([]*ssa.BasicBlock{&blCopy}, op, nil) - if reason == notWasted { - continue - } - - if ist.Pos() == 0 || typeSwitchPos[pass.Fset.Position(ist.Pos()).Line] { - continue - } - - v, ok := (*op).(*ssa.Alloc) - if !ok { - // This block should never have been executed. - continue - } - wastedAssignMap = append(wastedAssignMap, wastedAssignStruct{ - pos: ist.Pos(), - reason: reason.String(v), - }) - } - } - } - } - - for _, was := range wastedAssignMap { - pass.Reportf(was.pos, was.reason) - } - - return nil, nil -} - -type wastedReason string - -const ( - noUseUntilReturn wastedReason = "assigned, but never used afterwards" - reassignedSoon wastedReason = "wasted assignment" - notWasted wastedReason = "" -) - -func (wr wastedReason) String(a *ssa.Alloc) string { - switch wr { - case noUseUntilReturn: - return fmt.Sprintf("assigned to %s, but never used afterwards", a.Comment) - case reassignedSoon: - return fmt.Sprintf("assigned to %s, but reassigned without using the value", a.Comment) - case notWasted: - return "" - default: - return "" - } -} - -func isNextOperationToOpIsStore(bls []*ssa.BasicBlock, currentOp *ssa.Value, haveCheckedMap map[int]int) wastedReason { - var wastedReasons []wastedReason - var wastedReasonsCurrentBls []wastedReason - - if haveCheckedMap == nil { - haveCheckedMap = map[int]int{} - } - - for _, bl := range bls { - if haveCheckedMap[bl.Index] == 2 { - continue - } - - haveCheckedMap[bl.Index]++ - breakFlag := false - for _, ist := range bl.Instrs { - if breakFlag { - break - } - - switch w := ist.(type) { - case *ssa.Store: - var buf [10]*ssa.Value - for _, op := range ist.Operands(buf[:0]) { - if *op == *currentOp { - if w.Addr.Name() == (*currentOp).Name() { - wastedReasonsCurrentBls = append(wastedReasonsCurrentBls, reassignedSoon) - breakFlag = true - break - } else { - return notWasted - } - } - } - default: - var buf [10]*ssa.Value - for _, op := range ist.Operands(buf[:0]) { - if *op == *currentOp { - // It wasn't a continuous store. - return notWasted - } - } - } - } - - if len(bl.Succs) != 0 && !breakFlag { - wastedReason := isNextOperationToOpIsStore(rmSameBlock(bl.Succs, bl), currentOp, haveCheckedMap) - if wastedReason == notWasted { - return notWasted - } - wastedReasons = append(wastedReasons, wastedReason) - } - } - - wastedReasons = append(wastedReasons, wastedReasonsCurrentBls...) - - if len(wastedReasons) != 0 && containReassignedSoon(wastedReasons) { - return reassignedSoon - } - - return noUseUntilReturn -} - -func rmSameBlock(bls []*ssa.BasicBlock, currentBl *ssa.BasicBlock) []*ssa.BasicBlock { - var rto []*ssa.BasicBlock - - for _, bl := range bls { - if bl != currentBl { - rto = append(rto, bl) - } - } - return rto -} - -func containReassignedSoon(ws []wastedReason) bool { - for _, w := range ws { - if w == reassignedSoon { - return true - } - } - return false -} - -func rmInstrFromInstrs(instrs []ssa.Instruction, instrToRm ssa.Instruction) []ssa.Instruction { - var rto []ssa.Instruction - for _, i := range instrs { - if i != instrToRm { - rto = append(rto, i) - } - } - return rto -} - -func opInLocals(locals []*ssa.Alloc, op *ssa.Value) bool { - for _, l := range locals { - if *op == ssa.Value(l) { - return true - } - } - return false -} diff --git a/vendor/github.com/sashamelentyev/interfacebloat/LICENSE b/vendor/github.com/sashamelentyev/interfacebloat/LICENSE deleted file mode 100644 index a52602b3d..000000000 --- a/vendor/github.com/sashamelentyev/interfacebloat/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2022 Sasha Melentyev - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/sashamelentyev/interfacebloat/pkg/analyzer/analyzer.go b/vendor/github.com/sashamelentyev/interfacebloat/pkg/analyzer/analyzer.go deleted file mode 100644 index 4a6afdf8c..000000000 --- a/vendor/github.com/sashamelentyev/interfacebloat/pkg/analyzer/analyzer.go +++ /dev/null @@ -1,57 +0,0 @@ -package analyzer - -import ( - "flag" - "go/ast" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/ast/inspector" -) - -const InterfaceMaxMethodsFlag = "max" - -const defaultMaxMethods = 10 - -// New returns new interfacebloat analyzer. -func New() *analysis.Analyzer { - return &analysis.Analyzer{ - Name: "interfacebloat", - Doc: "A linter that checks the number of methods inside an interface.", - Run: run, - Flags: flags(), - Requires: []*analysis.Analyzer{inspect.Analyzer}, - } -} - -func flags() flag.FlagSet { - flags := flag.NewFlagSet("", flag.ExitOnError) - flags.Int(InterfaceMaxMethodsFlag, 10, "maximum number of methods") - return *flags -} - -func run(pass *analysis.Pass) (interface{}, error) { - maxMethods, ok := pass.Analyzer.Flags.Lookup(InterfaceMaxMethodsFlag).Value.(flag.Getter).Get().(int) - if !ok { - maxMethods = defaultMaxMethods - } - - insp := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - - filter := []ast.Node{ - (*ast.InterfaceType)(nil), - } - - insp.Preorder(filter, func(node ast.Node) { - i, ok := node.(*ast.InterfaceType) - if !ok { - return - } - - if len(i.Methods.List) > maxMethods { - pass.Reportf(node.Pos(), `the interface has more than %d methods: %d`, maxMethods, len(i.Methods.List)) - } - }) - - return nil, nil -} diff --git a/vendor/github.com/sashamelentyev/usestdlibvars/LICENSE b/vendor/github.com/sashamelentyev/usestdlibvars/LICENSE deleted file mode 100644 index a52602b3d..000000000 --- a/vendor/github.com/sashamelentyev/usestdlibvars/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2022 Sasha Melentyev - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/sashamelentyev/usestdlibvars/pkg/analyzer/analyzer.go b/vendor/github.com/sashamelentyev/usestdlibvars/pkg/analyzer/analyzer.go deleted file mode 100644 index 4d6ab3cca..000000000 --- a/vendor/github.com/sashamelentyev/usestdlibvars/pkg/analyzer/analyzer.go +++ /dev/null @@ -1,569 +0,0 @@ -package analyzer - -import ( - "flag" - "go/ast" - "go/token" - "strings" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/ast/inspector" - - "github.com/sashamelentyev/usestdlibvars/pkg/analyzer/internal/mapping" -) - -const ( - TimeWeekdayFlag = "time-weekday" - TimeMonthFlag = "time-month" - TimeLayoutFlag = "time-layout" - CryptoHashFlag = "crypto-hash" - HTTPMethodFlag = "http-method" - HTTPStatusCodeFlag = "http-status-code" - RPCDefaultPathFlag = "rpc-default-path" - OSDevNullFlag = "os-dev-null" - SQLIsolationLevelFlag = "sql-isolation-level" - TLSSignatureSchemeFlag = "tls-signature-scheme" - ConstantKindFlag = "constant-kind" - SyslogPriorityFlag = "syslog-priority" -) - -// New returns new usestdlibvars analyzer. -func New() *analysis.Analyzer { - return &analysis.Analyzer{ - Name: "usestdlibvars", - Doc: "A linter that detect the possibility to use variables/constants from the Go standard library.", - Run: run, - Flags: flags(), - Requires: []*analysis.Analyzer{inspect.Analyzer}, - } -} - -func flags() flag.FlagSet { - flags := flag.NewFlagSet("", flag.ExitOnError) - flags.Bool(HTTPMethodFlag, true, "suggest the use of http.MethodXX") - flags.Bool(HTTPStatusCodeFlag, true, "suggest the use of http.StatusXX") - flags.Bool(TimeWeekdayFlag, false, "suggest the use of time.Weekday.String()") - flags.Bool(TimeMonthFlag, false, "suggest the use of time.Month.String()") - flags.Bool(TimeLayoutFlag, false, "suggest the use of time.Layout") - flags.Bool(CryptoHashFlag, false, "suggest the use of crypto.Hash.String()") - flags.Bool(RPCDefaultPathFlag, false, "suggest the use of rpc.DefaultXXPath") - flags.Bool(OSDevNullFlag, false, "[DEPRECATED] suggest the use of os.DevNull") - flags.Bool(SQLIsolationLevelFlag, false, "suggest the use of sql.LevelXX.String()") - flags.Bool(TLSSignatureSchemeFlag, false, "suggest the use of tls.SignatureScheme.String()") - flags.Bool(ConstantKindFlag, false, "suggest the use of constant.Kind.String()") - flags.Bool(SyslogPriorityFlag, false, "[DEPRECATED] suggest the use of syslog.Priority") - return *flags -} - -func run(pass *analysis.Pass) (interface{}, error) { - insp := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - - types := []ast.Node{ - (*ast.CallExpr)(nil), - (*ast.BasicLit)(nil), - (*ast.CompositeLit)(nil), - (*ast.IfStmt)(nil), - (*ast.SwitchStmt)(nil), - (*ast.ForStmt)(nil), - } - - insp.Preorder(types, func(node ast.Node) { - switch n := node.(type) { - case *ast.CallExpr: - fun, ok := n.Fun.(*ast.SelectorExpr) - if !ok { - return - } - - x, ok := fun.X.(*ast.Ident) - if !ok { - return - } - - funArgs(pass, x, fun, n.Args) - - case *ast.BasicLit: - for _, c := range []struct { - flag string - checkFunc func(pass *analysis.Pass, basicLit *ast.BasicLit) - }{ - {flag: TimeWeekdayFlag, checkFunc: checkTimeWeekday}, - {flag: TimeMonthFlag, checkFunc: checkTimeMonth}, - {flag: TimeLayoutFlag, checkFunc: checkTimeLayout}, - {flag: CryptoHashFlag, checkFunc: checkCryptoHash}, - {flag: RPCDefaultPathFlag, checkFunc: checkRPCDefaultPath}, - {flag: OSDevNullFlag, checkFunc: checkOSDevNull}, - {flag: SQLIsolationLevelFlag, checkFunc: checkSQLIsolationLevel}, - {flag: TLSSignatureSchemeFlag, checkFunc: checkTLSSignatureScheme}, - {flag: ConstantKindFlag, checkFunc: checkConstantKind}, - } { - if lookupFlag(pass, c.flag) { - c.checkFunc(pass, n) - } - } - - case *ast.CompositeLit: - typ, ok := n.Type.(*ast.SelectorExpr) - if !ok { - return - } - - x, ok := typ.X.(*ast.Ident) - if !ok { - return - } - - typeElts(pass, x, typ, n.Elts) - - case *ast.IfStmt: - cond, ok := n.Cond.(*ast.BinaryExpr) - if !ok { - return - } - - switch cond.Op { - case token.LSS, token.GTR, token.LEQ, token.GEQ: - return - } - - x, ok := cond.X.(*ast.SelectorExpr) - if !ok { - return - } - - y, ok := cond.Y.(*ast.BasicLit) - if !ok { - return - } - - ifElseStmt(pass, x, y) - - case *ast.SwitchStmt: - x, ok := n.Tag.(*ast.SelectorExpr) - if ok { - switchStmt(pass, x, n.Body.List) - } else { - switchStmtAsIfElseStmt(pass, n.Body.List) - } - - case *ast.ForStmt: - cond, ok := n.Cond.(*ast.BinaryExpr) - if !ok { - return - } - - x, ok := cond.X.(*ast.SelectorExpr) - if !ok { - return - } - - y, ok := cond.Y.(*ast.BasicLit) - if !ok { - return - } - - ifElseStmt(pass, x, y) - } - }) - - return nil, nil -} - -// funArgs checks arguments of function or method. -func funArgs(pass *analysis.Pass, x *ast.Ident, fun *ast.SelectorExpr, args []ast.Expr) { - switch x.Name { - case "http": - switch fun.Sel.Name { - // http.NewRequest(http.MethodGet, "localhost", http.NoBody) - case "NewRequest": - if !lookupFlag(pass, HTTPMethodFlag) { - return - } - - if basicLit := getBasicLitFromArgs(args, 3, 0, token.STRING); basicLit != nil { - checkHTTPMethod(pass, basicLit) - } - - // http.NewRequestWithContext(context.Background(), http.MethodGet, "localhost", http.NoBody) - case "NewRequestWithContext": - if !lookupFlag(pass, HTTPMethodFlag) { - return - } - - if basicLit := getBasicLitFromArgs(args, 4, 1, token.STRING); basicLit != nil { - checkHTTPMethod(pass, basicLit) - } - - // http.Error(w, err, http.StatusInternalServerError) - case "Error": - if !lookupFlag(pass, HTTPStatusCodeFlag) { - return - } - - if basicLit := getBasicLitFromArgs(args, 3, 2, token.INT); basicLit != nil { - checkHTTPStatusCode(pass, basicLit) - } - - // http.StatusText(http.StatusOK) - case "StatusText": - if !lookupFlag(pass, HTTPStatusCodeFlag) { - return - } - - if basicLit := getBasicLitFromArgs(args, 1, 0, token.INT); basicLit != nil { - checkHTTPStatusCode(pass, basicLit) - } - - // http.Redirect(w, r, "localhost", http.StatusMovedPermanently) - case "Redirect": - if !lookupFlag(pass, HTTPStatusCodeFlag) { - return - } - - if basicLit := getBasicLitFromArgs(args, 4, 3, token.INT); basicLit != nil { - checkHTTPStatusCode(pass, basicLit) - } - - // http.RedirectHandler("localhost", http.StatusMovedPermanently) - case "RedirectHandler": - if !lookupFlag(pass, HTTPStatusCodeFlag) { - return - } - - if basicLit := getBasicLitFromArgs(args, 2, 1, token.INT); basicLit != nil { - checkHTTPStatusCode(pass, basicLit) - } - } - case "httptest": - if fun.Sel.Name == "NewRequest" { - if !lookupFlag(pass, HTTPMethodFlag) { - return - } - - if basicLit := getBasicLitFromArgs(args, 3, 0, token.STRING); basicLit != nil { - checkHTTPMethod(pass, basicLit) - } - } - case "syslog": - if !lookupFlag(pass, SyslogPriorityFlag) { - return - } - - switch fun.Sel.Name { - case "New": - if basicLit := getBasicLitFromArgs(args, 2, 0, token.INT); basicLit != nil { - checkSyslogPriority(pass, basicLit) - } - - case "Dial": - if basicLit := getBasicLitFromArgs(args, 4, 2, token.INT); basicLit != nil { - checkSyslogPriority(pass, basicLit) - } - - case "NewLogger": - if basicLit := getBasicLitFromArgs(args, 2, 0, token.INT); basicLit != nil { - checkSyslogPriority(pass, basicLit) - } - } - default: - // w.WriteHeader(http.StatusOk) - if fun.Sel.Name == "WriteHeader" { - if !lookupFlag(pass, HTTPStatusCodeFlag) { - return - } - - if basicLit := getBasicLitFromArgs(args, 1, 0, token.INT); basicLit != nil { - checkHTTPStatusCode(pass, basicLit) - } - } - } -} - -// typeElts checks elements of type. -func typeElts(pass *analysis.Pass, x *ast.Ident, typ *ast.SelectorExpr, elts []ast.Expr) { - switch x.Name { - case "http": - switch typ.Sel.Name { - // http.Request{Method: http.MethodGet} - case "Request": - if !lookupFlag(pass, HTTPMethodFlag) { - return - } - - if basicLit := getBasicLitFromElts(elts, "Method"); basicLit != nil { - checkHTTPMethod(pass, basicLit) - } - - // http.Response{StatusCode: http.StatusOK} - case "Response": - if !lookupFlag(pass, HTTPStatusCodeFlag) { - return - } - - if basicLit := getBasicLitFromElts(elts, "StatusCode"); basicLit != nil { - checkHTTPStatusCode(pass, basicLit) - } - } - case "httptest": - if typ.Sel.Name == "ResponseRecorder" { - if !lookupFlag(pass, HTTPStatusCodeFlag) { - return - } - - if basicLit := getBasicLitFromElts(elts, "Code"); basicLit != nil { - checkHTTPStatusCode(pass, basicLit) - } - } - } -} - -// ifElseStmt checks X and Y in if-else-statement. -func ifElseStmt(pass *analysis.Pass, x *ast.SelectorExpr, y *ast.BasicLit) { - switch x.Sel.Name { - case "StatusCode": - if !lookupFlag(pass, HTTPStatusCodeFlag) { - return - } - - checkHTTPStatusCode(pass, y) - - case "Method": - if !lookupFlag(pass, HTTPMethodFlag) { - return - } - - checkHTTPMethod(pass, y) - } -} - -func switchStmt(pass *analysis.Pass, x *ast.SelectorExpr, cases []ast.Stmt) { - var checkFunc func(pass *analysis.Pass, basicLit *ast.BasicLit) - - switch x.Sel.Name { - case "StatusCode": - if !lookupFlag(pass, HTTPStatusCodeFlag) { - return - } - - checkFunc = checkHTTPStatusCode - - case "Method": - if !lookupFlag(pass, HTTPMethodFlag) { - return - } - - checkFunc = checkHTTPMethod - - default: - return - } - - for _, c := range cases { - caseClause, ok := c.(*ast.CaseClause) - if !ok { - continue - } - - for _, expr := range caseClause.List { - basicLit, ok := expr.(*ast.BasicLit) - if !ok { - continue - } - - checkFunc(pass, basicLit) - } - } -} - -func switchStmtAsIfElseStmt(pass *analysis.Pass, cases []ast.Stmt) { - for _, c := range cases { - caseClause, ok := c.(*ast.CaseClause) - if !ok { - continue - } - - for _, expr := range caseClause.List { - binaryExpr, ok := expr.(*ast.BinaryExpr) - if !ok { - continue - } - - x, ok := binaryExpr.X.(*ast.SelectorExpr) - if !ok { - continue - } - - y, ok := binaryExpr.Y.(*ast.BasicLit) - if !ok { - continue - } - - ifElseStmt(pass, x, y) - } - } -} - -func lookupFlag(pass *analysis.Pass, name string) bool { - return pass.Analyzer.Flags.Lookup(name).Value.(flag.Getter).Get().(bool) -} - -func checkHTTPMethod(pass *analysis.Pass, basicLit *ast.BasicLit) { - currentVal := getBasicLitValue(basicLit) - - key := strings.ToUpper(currentVal) - - if newVal, ok := mapping.HTTPMethod[key]; ok { - report(pass, basicLit.Pos(), currentVal, newVal) - } -} - -func checkHTTPStatusCode(pass *analysis.Pass, basicLit *ast.BasicLit) { - currentVal := getBasicLitValue(basicLit) - - if newVal, ok := mapping.HTTPStatusCode[currentVal]; ok { - report(pass, basicLit.Pos(), currentVal, newVal) - } -} - -func checkTimeWeekday(pass *analysis.Pass, basicLit *ast.BasicLit) { - currentVal := getBasicLitValue(basicLit) - - if newVal, ok := mapping.TimeWeekday[currentVal]; ok { - report(pass, basicLit.Pos(), currentVal, newVal) - } -} - -func checkTimeMonth(pass *analysis.Pass, basicLit *ast.BasicLit) { - currentVal := getBasicLitValue(basicLit) - - if newVal, ok := mapping.TimeMonth[currentVal]; ok { - report(pass, basicLit.Pos(), currentVal, newVal) - } -} - -func checkTimeLayout(pass *analysis.Pass, basicLit *ast.BasicLit) { - currentVal := getBasicLitValue(basicLit) - - if newVal, ok := mapping.TimeLayout[currentVal]; ok { - report(pass, basicLit.Pos(), currentVal, newVal) - } -} - -func checkCryptoHash(pass *analysis.Pass, basicLit *ast.BasicLit) { - currentVal := getBasicLitValue(basicLit) - - if newVal, ok := mapping.CryptoHash[currentVal]; ok { - report(pass, basicLit.Pos(), currentVal, newVal) - } -} - -func checkRPCDefaultPath(pass *analysis.Pass, basicLit *ast.BasicLit) { - currentVal := getBasicLitValue(basicLit) - - if newVal, ok := mapping.RPCDefaultPath[currentVal]; ok { - report(pass, basicLit.Pos(), currentVal, newVal) - } -} - -func checkOSDevNull(pass *analysis.Pass, basicLit *ast.BasicLit) {} - -func checkSQLIsolationLevel(pass *analysis.Pass, basicLit *ast.BasicLit) { - currentVal := getBasicLitValue(basicLit) - - if newVal, ok := mapping.SQLIsolationLevel[currentVal]; ok { - report(pass, basicLit.Pos(), currentVal, newVal) - } -} - -func checkTLSSignatureScheme(pass *analysis.Pass, basicLit *ast.BasicLit) { - currentVal := getBasicLitValue(basicLit) - - if newVal, ok := mapping.TLSSignatureScheme[currentVal]; ok { - report(pass, basicLit.Pos(), currentVal, newVal) - } -} - -func checkConstantKind(pass *analysis.Pass, basicLit *ast.BasicLit) { - currentVal := getBasicLitValue(basicLit) - - if newVal, ok := mapping.ConstantKind[currentVal]; ok { - report(pass, basicLit.Pos(), currentVal, newVal) - } -} - -func checkSyslogPriority(pass *analysis.Pass, basicLit *ast.BasicLit) {} - -// getBasicLitFromArgs gets the *ast.BasicLit of a function argument. -// -// Arguments: -// - args - slice of function arguments -// - count - expected number of argument in function -// - idx - index of the argument to get the *ast.BasicLit -// - typ - argument type -func getBasicLitFromArgs(args []ast.Expr, count, idx int, typ token.Token) *ast.BasicLit { - if len(args) != count { - return nil - } - - if idx > count-1 { - return nil - } - - basicLit, ok := args[idx].(*ast.BasicLit) - if !ok { - return nil - } - - if basicLit.Kind != typ { - return nil - } - - return basicLit -} - -// getBasicLitFromElts gets the *ast.BasicLit of a struct elements. -// -// Arguments: -// - elts - slice of a struct elements -// - key - name of key in struct -func getBasicLitFromElts(elts []ast.Expr, key string) *ast.BasicLit { - for _, e := range elts { - keyValueExpr, ok := e.(*ast.KeyValueExpr) - if !ok { - continue - } - - ident, ok := keyValueExpr.Key.(*ast.Ident) - if !ok { - continue - } - - if ident.Name != key { - continue - } - - if basicLit, ok := keyValueExpr.Value.(*ast.BasicLit); ok { - return basicLit - } - } - - return nil -} - -// getBasicLitValue returns BasicLit value as string without quotes. -func getBasicLitValue(basicLit *ast.BasicLit) string { - var val strings.Builder - for _, r := range basicLit.Value { - if r == '"' { - continue - } else { - val.WriteRune(r) - } - } - return val.String() -} - -func report(pass *analysis.Pass, pos token.Pos, currentVal, newVal string) { - pass.Reportf(pos, "%q can be replaced by %s", currentVal, newVal) -} diff --git a/vendor/github.com/sashamelentyev/usestdlibvars/pkg/analyzer/internal/mapping/mapping.go b/vendor/github.com/sashamelentyev/usestdlibvars/pkg/analyzer/internal/mapping/mapping.go deleted file mode 100644 index 5bad23d28..000000000 --- a/vendor/github.com/sashamelentyev/usestdlibvars/pkg/analyzer/internal/mapping/mapping.go +++ /dev/null @@ -1,202 +0,0 @@ -package mapping - -import ( - "crypto" - "crypto/tls" - "database/sql" - "go/constant" - "net/http" - "net/rpc" - "strconv" - "time" -) - -var CryptoHash = map[string]string{ - crypto.MD4.String(): "crypto.MD4.String()", - crypto.MD5.String(): "crypto.MD5.String()", - crypto.SHA1.String(): "crypto.SHA1.String()", - crypto.SHA224.String(): "crypto.SHA224.String()", - crypto.SHA256.String(): "crypto.SHA256.String()", - crypto.SHA384.String(): "crypto.SHA384.String()", - crypto.SHA512.String(): "crypto.SHA512.String()", - crypto.MD5SHA1.String(): "crypto.MD5SHA1.String()", - crypto.RIPEMD160.String(): "crypto.RIPEMD160.String()", - crypto.SHA3_224.String(): "crypto.SHA3_224.String()", - crypto.SHA3_256.String(): "crypto.SHA3_256.String()", - crypto.SHA3_384.String(): "crypto.SHA3_384.String()", - crypto.SHA3_512.String(): "crypto.SHA3_512.String()", - crypto.SHA512_224.String(): "crypto.SHA512_224.String()", - crypto.SHA512_256.String(): "crypto.SHA512_256.String()", - crypto.BLAKE2s_256.String(): "crypto.BLAKE2s_256.String()", - crypto.BLAKE2b_256.String(): "crypto.BLAKE2b_256.String()", - crypto.BLAKE2b_384.String(): "crypto.BLAKE2b_384.String()", - crypto.BLAKE2b_512.String(): "crypto.BLAKE2b_512.String()", -} - -var HTTPMethod = map[string]string{ - http.MethodGet: "http.MethodGet", - http.MethodHead: "http.MethodHead", - http.MethodPost: "http.MethodPost", - http.MethodPut: "http.MethodPut", - http.MethodPatch: "http.MethodPatch", - http.MethodDelete: "http.MethodDelete", - http.MethodConnect: "http.MethodConnect", - http.MethodOptions: "http.MethodOptions", - http.MethodTrace: "http.MethodTrace", -} - -var HTTPStatusCode = map[string]string{ - strconv.Itoa(http.StatusContinue): "http.StatusContinue", - strconv.Itoa(http.StatusSwitchingProtocols): "http.StatusSwitchingProtocols", - strconv.Itoa(http.StatusProcessing): "http.StatusProcessing", - strconv.Itoa(http.StatusEarlyHints): "http.StatusEarlyHints", - - strconv.Itoa(http.StatusOK): "http.StatusOK", - strconv.Itoa(http.StatusCreated): "http.StatusCreated", - strconv.Itoa(http.StatusAccepted): "http.StatusAccepted", - strconv.Itoa(http.StatusNonAuthoritativeInfo): "http.StatusNonAuthoritativeInfo", - strconv.Itoa(http.StatusNoContent): "http.StatusNoContent", - strconv.Itoa(http.StatusResetContent): "http.StatusResetContent", - strconv.Itoa(http.StatusPartialContent): "http.StatusPartialContent", - strconv.Itoa(http.StatusMultiStatus): "http.StatusMultiStatus", - strconv.Itoa(http.StatusAlreadyReported): "http.StatusAlreadyReported", - strconv.Itoa(http.StatusIMUsed): "http.StatusIMUsed", - - strconv.Itoa(http.StatusMultipleChoices): "http.StatusMultipleChoices", - strconv.Itoa(http.StatusMovedPermanently): "http.StatusMovedPermanently", - strconv.Itoa(http.StatusFound): "http.StatusFound", - strconv.Itoa(http.StatusSeeOther): "http.StatusSeeOther", - strconv.Itoa(http.StatusNotModified): "http.StatusNotModified", - strconv.Itoa(http.StatusUseProxy): "http.StatusUseProxy", - strconv.Itoa(http.StatusTemporaryRedirect): "http.StatusTemporaryRedirect", - strconv.Itoa(http.StatusPermanentRedirect): "http.StatusPermanentRedirect", - - strconv.Itoa(http.StatusBadRequest): "http.StatusBadRequest", - strconv.Itoa(http.StatusUnauthorized): "http.StatusUnauthorized", - strconv.Itoa(http.StatusPaymentRequired): "http.StatusPaymentRequired", - strconv.Itoa(http.StatusForbidden): "http.StatusForbidden", - strconv.Itoa(http.StatusNotFound): "http.StatusNotFound", - strconv.Itoa(http.StatusMethodNotAllowed): "http.StatusMethodNotAllowed", - strconv.Itoa(http.StatusNotAcceptable): "http.StatusNotAcceptable", - strconv.Itoa(http.StatusProxyAuthRequired): "http.StatusProxyAuthRequired", - strconv.Itoa(http.StatusRequestTimeout): "http.StatusRequestTimeout", - strconv.Itoa(http.StatusConflict): "http.StatusConflict", - strconv.Itoa(http.StatusGone): "http.StatusGone", - strconv.Itoa(http.StatusLengthRequired): "http.StatusLengthRequired", - strconv.Itoa(http.StatusPreconditionFailed): "http.StatusPreconditionFailed", - strconv.Itoa(http.StatusRequestEntityTooLarge): "http.StatusRequestEntityTooLarge", - strconv.Itoa(http.StatusRequestURITooLong): "http.StatusRequestURITooLong", - strconv.Itoa(http.StatusUnsupportedMediaType): "http.StatusUnsupportedMediaType", - strconv.Itoa(http.StatusRequestedRangeNotSatisfiable): "http.StatusRequestedRangeNotSatisfiable", - strconv.Itoa(http.StatusExpectationFailed): "http.StatusExpectationFailed", - strconv.Itoa(http.StatusTeapot): "http.StatusTeapot", - strconv.Itoa(http.StatusMisdirectedRequest): "http.StatusMisdirectedRequest", - strconv.Itoa(http.StatusUnprocessableEntity): "http.StatusUnprocessableEntity", - strconv.Itoa(http.StatusLocked): "http.StatusLocked", - strconv.Itoa(http.StatusFailedDependency): "http.StatusFailedDependency", - strconv.Itoa(http.StatusTooEarly): "http.StatusTooEarly", - strconv.Itoa(http.StatusUpgradeRequired): "http.StatusUpgradeRequired", - strconv.Itoa(http.StatusPreconditionRequired): "http.StatusPreconditionRequired", - strconv.Itoa(http.StatusTooManyRequests): "http.StatusTooManyRequests", - strconv.Itoa(http.StatusRequestHeaderFieldsTooLarge): "http.StatusRequestHeaderFieldsTooLarge", - strconv.Itoa(http.StatusUnavailableForLegalReasons): "http.StatusUnavailableForLegalReasons", - - strconv.Itoa(http.StatusInternalServerError): "http.StatusInternalServerError", - strconv.Itoa(http.StatusNotImplemented): "http.StatusNotImplemented", - strconv.Itoa(http.StatusBadGateway): "http.StatusBadGateway", - strconv.Itoa(http.StatusServiceUnavailable): "http.StatusServiceUnavailable", - strconv.Itoa(http.StatusGatewayTimeout): "http.StatusGatewayTimeout", - strconv.Itoa(http.StatusHTTPVersionNotSupported): "http.StatusHTTPVersionNotSupported", - strconv.Itoa(http.StatusVariantAlsoNegotiates): "http.StatusVariantAlsoNegotiates", - strconv.Itoa(http.StatusInsufficientStorage): "http.StatusInsufficientStorage", - strconv.Itoa(http.StatusLoopDetected): "http.StatusLoopDetected", - strconv.Itoa(http.StatusNotExtended): "http.StatusNotExtended", - strconv.Itoa(http.StatusNetworkAuthenticationRequired): "http.StatusNetworkAuthenticationRequired", -} - -var RPCDefaultPath = map[string]string{ - rpc.DefaultRPCPath: "rpc.DefaultRPCPath", - rpc.DefaultDebugPath: "rpc.DefaultDebugPath", -} - -var TimeWeekday = map[string]string{ - time.Sunday.String(): "time.Sunday.String()", - time.Monday.String(): "time.Monday.String()", - time.Tuesday.String(): "time.Tuesday.String()", - time.Wednesday.String(): "time.Wednesday.String()", - time.Thursday.String(): "time.Thursday.String()", - time.Friday.String(): "time.Friday.String()", - time.Saturday.String(): "time.Saturday.String()", -} - -var TimeMonth = map[string]string{ - time.January.String(): "time.January.String()", - time.February.String(): "time.February.String()", - time.March.String(): "time.March.String()", - time.April.String(): "time.April.String()", - time.May.String(): "time.May.String()", - time.June.String(): "time.June.String()", - time.July.String(): "time.July.String()", - time.August.String(): "time.August.String()", - time.September.String(): "time.September.String()", - time.October.String(): "time.October.String()", - time.November.String(): "time.November.String()", - time.December.String(): "time.December.String()", -} - -var TimeLayout = map[string]string{ - time.Layout: "time.Layout", - time.ANSIC: "time.ANSIC", - time.UnixDate: "time.UnixDate", - time.RubyDate: "time.RubyDate", - time.RFC822: "time.RFC822", - time.RFC822Z: "time.RFC822Z", - time.RFC850: "time.RFC850", - time.RFC1123: "time.RFC1123", - time.RFC1123Z: "time.RFC1123Z", - time.RFC3339: "time.RFC3339", - time.RFC3339Nano: "time.RFC3339Nano", - time.Kitchen: "time.Kitchen", - time.Stamp: "time.Stamp", - time.StampMilli: "time.StampMilli", - time.StampMicro: "time.StampMicro", - time.StampNano: "time.StampNano", - time.DateTime: "time.DateTime", - time.DateOnly: "time.DateOnly", - time.TimeOnly: "time.TimeOnly", -} - -var SQLIsolationLevel = map[string]string{ - // sql.LevelDefault.String(): "sql.LevelDefault.String()", - sql.LevelReadUncommitted.String(): "sql.LevelReadUncommitted.String()", - sql.LevelReadCommitted.String(): "sql.LevelReadCommitted.String()", - sql.LevelWriteCommitted.String(): "sql.LevelWriteCommitted.String()", - sql.LevelRepeatableRead.String(): "sql.LevelRepeatableRead.String()", - // sql.LevelSnapshot.String(): "sql.LevelSnapshot.String()", - // sql.LevelSerializable.String(): "sql.LevelSerializable.String()", - // sql.LevelLinearizable.String(): "sql.LevelLinearizable.String()", -} - -var TLSSignatureScheme = map[string]string{ - tls.PSSWithSHA256.String(): "tls.PSSWithSHA256.String()", - tls.ECDSAWithP256AndSHA256.String(): "tls.ECDSAWithP256AndSHA256.String()", - tls.Ed25519.String(): "tls.Ed25519.String()", - tls.PSSWithSHA384.String(): "tls.PSSWithSHA384.String()", - tls.PSSWithSHA512.String(): "tls.PSSWithSHA512.String()", - tls.PKCS1WithSHA256.String(): "tls.PKCS1WithSHA256.String()", - tls.PKCS1WithSHA384.String(): "tls.PKCS1WithSHA384.String()", - tls.PKCS1WithSHA512.String(): "tls.PKCS1WithSHA512.String()", - tls.ECDSAWithP384AndSHA384.String(): "tls.ECDSAWithP384AndSHA384.String()", - tls.ECDSAWithP521AndSHA512.String(): "tls.ECDSAWithP521AndSHA512.String()", - tls.PKCS1WithSHA1.String(): "tls.PKCS1WithSHA1.String()", - tls.ECDSAWithSHA1.String(): "tls.ECDSAWithSHA1.String()", -} - -var ConstantKind = map[string]string{ - // constant.Unknown.String(): "constant.Unknown.String()", - constant.Bool.String(): "constant.Bool.String()", - constant.String.String(): "constant.String.String()", - constant.Int.String(): "constant.Int.String()", - constant.Float.String(): "constant.Float.String()", - constant.Complex.String(): "constant.Complex.String()", -} diff --git a/vendor/github.com/securego/gosec/v2/.gitignore b/vendor/github.com/securego/gosec/v2/.gitignore deleted file mode 100644 index f6c8065b4..000000000 --- a/vendor/github.com/securego/gosec/v2/.gitignore +++ /dev/null @@ -1,39 +0,0 @@ -# transient files -/image - -# Compiled Object files, Static and Dynamic libs (Shared Objects) -*.o -*.a -*.so -*.swp -/gosec - -# Folders -_obj -_test -vendor -dist - -# Architecture specific extensions/prefixes -*.[568vq] -[568vq].out - -*.cgo1.go -*.cgo2.c -_cgo_defun.c -_cgo_gotypes.go -_cgo_export.* - -_testmain.go - -*.exe -*.test -*.prof - -.DS_Store - -.vscode -.idea - -# SBOMs generated during CI -/bom.json diff --git a/vendor/github.com/securego/gosec/v2/.golangci.yml b/vendor/github.com/securego/gosec/v2/.golangci.yml deleted file mode 100644 index d591dc249..000000000 --- a/vendor/github.com/securego/gosec/v2/.golangci.yml +++ /dev/null @@ -1,45 +0,0 @@ -linters: - enable: - - asciicheck - - bodyclose - - dogsled - - durationcheck - - errcheck - - errorlint - - exportloopref - - gci - - ginkgolinter - - gochecknoinits - - gofmt - - gofumpt - - goimports - - gosec - - gosimple - - govet - - importas - - ineffassign - - megacheck - - misspell - - nakedret - - nolintlint - - revive - - staticcheck - - typecheck - - unconvert - - unparam - - unused - - wastedassign - -linters-settings: - gci: - sections: - - standard - - default - - prefix(github.com/securego) - revive: - rules: - - name: dot-imports - disabled: true - -run: - timeout: 5m diff --git a/vendor/github.com/securego/gosec/v2/.goreleaser.yml b/vendor/github.com/securego/gosec/v2/.goreleaser.yml deleted file mode 100644 index bd85bab3a..000000000 --- a/vendor/github.com/securego/gosec/v2/.goreleaser.yml +++ /dev/null @@ -1,37 +0,0 @@ ---- -project_name: gosec - -release: - extra_files: - - glob: ./bom.json - github: - owner: securego - name: gosec - -builds: - - main: ./cmd/gosec/ - binary: gosec - goos: - - darwin - - linux - - windows - goarch: - - amd64 - - arm64 - - s390x - - ppc64le - ldflags: -X main.Version={{.Version}} -X main.GitTag={{.Tag}} -X main.BuildDate={{.Date}} - env: - - CGO_ENABLED=0 - -signs: -- cmd: cosign - stdin: '{{ .Env.COSIGN_PASSWORD}}' - args: - - "sign-blob" - - "--key=/tmp/cosign.key" - - "--output=${signature}" - - "${artifact}" - - "--yes" - artifacts: all - diff --git a/vendor/github.com/securego/gosec/v2/Dockerfile b/vendor/github.com/securego/gosec/v2/Dockerfile deleted file mode 100644 index 1bf94da7d..000000000 --- a/vendor/github.com/securego/gosec/v2/Dockerfile +++ /dev/null @@ -1,15 +0,0 @@ -ARG GO_VERSION -FROM golang:${GO_VERSION}-alpine AS builder -RUN apk add --no-cache ca-certificates make git curl gcc libc-dev \ - && mkdir -p /build -WORKDIR /build -COPY . /build/ -RUN go mod download \ - && make build-linux - -FROM golang:${GO_VERSION}-alpine -RUN apk add --no-cache ca-certificates bash git gcc libc-dev openssh -ENV GO111MODULE on -COPY --from=builder /build/gosec /bin/gosec -COPY entrypoint.sh /bin/entrypoint.sh -ENTRYPOINT ["/bin/entrypoint.sh"] diff --git a/vendor/github.com/securego/gosec/v2/LICENSE.txt b/vendor/github.com/securego/gosec/v2/LICENSE.txt deleted file mode 100644 index 1756c7821..000000000 --- a/vendor/github.com/securego/gosec/v2/LICENSE.txt +++ /dev/null @@ -1,154 +0,0 @@ -Apache License - -Version 2.0, January 2004 - -http://www.apache.org/licenses/ - -TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - -1. Definitions. - -"License" shall mean the terms and conditions for use, reproduction, and -distribution as defined by Sections 1 through 9 of this document. - -"Licensor" shall mean the copyright owner or entity authorized by the copyright -owner that is granting the License. - -"Legal Entity" shall mean the union of the acting entity and all other entities -that control, are controlled by, or are under common control with that entity. -For the purposes of this definition, "control" means (i) the power, direct or -indirect, to cause the direction or management of such entity, whether by -contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the -outstanding shares, or (iii) beneficial ownership of such entity. - -"You" (or "Your") shall mean an individual or Legal Entity exercising -permissions granted by this License. - -"Source" form shall mean the preferred form for making modifications, including -but not limited to software source code, documentation source, and configuration -files. - -"Object" form shall mean any form resulting from mechanical transformation or -translation of a Source form, including but not limited to compiled object code, -generated documentation, and conversions to other media types. - -"Work" shall mean the work of authorship, whether in Source or Object form, made -available under the License, as indicated by a copyright notice that is included -in or attached to the work (an example is provided in the Appendix below). - -"Derivative Works" shall mean any work, whether in Source or Object form, that -is based on (or derived from) the Work and for which the editorial revisions, -annotations, elaborations, or other modifications represent, as a whole, an -original work of authorship. For the purposes of this License, Derivative Works -shall not include works that remain separable from, or merely link (or bind by -name) to the interfaces of, the Work and Derivative Works thereof. - -"Contribution" shall mean any work of authorship, including the original version -of the Work and any modifications or additions to that Work or Derivative Works -thereof, that is intentionally submitted to Licensor for inclusion in the Work -by the copyright owner or by an individual or Legal Entity authorized to submit -on behalf of the copyright owner. For the purposes of this definition, -"submitted" means any form of electronic, verbal, or written communication sent -to the Licensor or its representatives, including but not limited to -communication on electronic mailing lists, source code control systems, and -issue tracking systems that are managed by, or on behalf of, the Licensor for -the purpose of discussing and improving the Work, but excluding communication -that is conspicuously marked or otherwise designated in writing by the copyright -owner as "Not a Contribution." - -"Contributor" shall mean Licensor and any individual or Legal Entity on behalf -of whom a Contribution has been received by Licensor and subsequently -incorporated within the Work. - -2. Grant of Copyright License. Subject to the terms and conditions of this -License, each Contributor hereby grants to You a perpetual, worldwide, -non-exclusive, no-charge, royalty-free, irrevocable copyright license to -reproduce, prepare Derivative Works of, publicly display, publicly perform, -sublicense, and distribute the Work and such Derivative Works in Source or -Object form. - -3. Grant of Patent License. Subject to the terms and conditions of this License, -each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, -no-charge, royalty-free, irrevocable (except as stated in this section) patent -license to make, have made, use, offer to sell, sell, import, and otherwise -transfer the Work, where such license applies only to those patent claims -licensable by such Contributor that are necessarily infringed by their -Contribution(s) alone or by combination of their Contribution(s) with the Work -to which such Contribution(s) was submitted. If You institute patent litigation -against any entity (including a cross-claim or counterclaim in a lawsuit) -alleging that the Work or a Contribution incorporated within the Work -constitutes direct or contributory patent infringement, then any patent licenses -granted to You under this License for that Work shall terminate as of the date -such litigation is filed. - -4. Redistribution. You may reproduce and distribute copies of the Work or -Derivative Works thereof in any medium, with or without modifications, and in -Source or Object form, provided that You meet the following conditions: - -You must give any other recipients of the Work or Derivative Works a copy of -this License; and You must cause any modified files to carry prominent notices -stating that You changed the files; and You must retain, in the Source form of -any Derivative Works that You distribute, all copyright, patent, trademark, and -attribution notices from the Source form of the Work, excluding those notices -that do not pertain to any part of the Derivative Works; and If the Work -includes a "NOTICE" text file as part of its distribution, then any Derivative -Works that You distribute must include a readable copy of the attribution -notices contained within such NOTICE file, excluding those notices that do not -pertain to any part of the Derivative Works, in at least one of the following -places: within a NOTICE text file distributed as part of the Derivative Works; -within the Source form or documentation, if provided along with the Derivative -Works; or, within a display generated by the Derivative Works, if and wherever -such third-party notices normally appear. The contents of the NOTICE file are -for informational purposes only and do not modify the License. You may add Your -own attribution notices within Derivative Works that You distribute, alongside -or as an addendum to the NOTICE text from the Work, provided that such -additional attribution notices cannot be construed as modifying the License. - -You may add Your own copyright statement to Your modifications and may provide -additional or different license terms and conditions for use, reproduction, or -distribution of Your modifications, or for any such Derivative Works as a whole, -provided Your use, reproduction, and distribution of the Work otherwise complies -with the conditions stated in this License. 5. Submission of Contributions. -Unless You explicitly state otherwise, any Contribution intentionally submitted -for inclusion in the Work by You to the Licensor shall be under the terms and -conditions of this License, without any additional terms or conditions. -Notwithstanding the above, nothing herein shall supersede or modify the terms of -any separate license agreement you may have executed with Licensor regarding -such Contributions. - -6. Trademarks. This License does not grant permission to use the trade names, -trademarks, service marks, or product names of the Licensor, except as required -for reasonable and customary use in describing the origin of the Work and -reproducing the content of the NOTICE file. - -7. Disclaimer of Warranty. Unless required by applicable law or agreed to in -writing, Licensor provides the Work (and each Contributor provides its -Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -KIND, either express or implied, including, without limitation, any warranties -or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A -PARTICULAR PURPOSE. You are solely responsible for determining the -appropriateness of using or redistributing the Work and assume any risks -associated with Your exercise of permissions under this License. - -8. Limitation of Liability. In no event and under no legal theory, whether in -tort (including negligence), contract, or otherwise, unless required by -applicable law (such as deliberate and grossly negligent acts) or agreed to in -writing, shall any Contributor be liable to You for damages, including any -direct, indirect, special, incidental, or consequential damages of any character -arising as a result of this License or out of the use or inability to use the -Work (including but not limited to damages for loss of goodwill, work stoppage, -computer failure or malfunction, or any and all other commercial damages or -losses), even if such Contributor has been advised of the possibility of such -damages. - -9. Accepting Warranty or Additional Liability. While redistributing the Work or -Derivative Works thereof, You may choose to offer, and charge a fee for, -acceptance of support, warranty, indemnity, or other liability obligations -and/or rights consistent with this License. However, in accepting such -obligations, You may act only on Your own behalf and on Your sole -responsibility, not on behalf of any other Contributor, and only if You agree to -indemnify, defend, and hold each Contributor harmless for any liability incurred -by, or claims asserted against, such Contributor by reason of your accepting any -such warranty or additional liability. - -END OF TERMS AND CONDITIONS diff --git a/vendor/github.com/securego/gosec/v2/Makefile b/vendor/github.com/securego/gosec/v2/Makefile deleted file mode 100644 index 4f6cce765..000000000 --- a/vendor/github.com/securego/gosec/v2/Makefile +++ /dev/null @@ -1,95 +0,0 @@ -GIT_TAG?= $(shell git describe --always --tags) -BIN = gosec -FMT_CMD = $(gofmt -s -l -w $(find . -type f -name '*.go' -not -path './vendor/*') | tee /dev/stderr) -IMAGE_REPO = securego -DATE_FMT=+%Y-%m-%d -ifdef SOURCE_DATE_EPOCH - BUILD_DATE ?= $(shell date -u -d "@$(SOURCE_DATE_EPOCH)" "$(DATE_FMT)" 2>/dev/null || date -u -r "$(SOURCE_DATE_EPOCH)" "$(DATE_FMT)" 2>/dev/null || date -u "$(DATE_FMT)") -else - BUILD_DATE ?= $(shell date "$(DATE_FMT)") -endif -BUILDFLAGS := "-w -s -X 'main.Version=$(GIT_TAG)' -X 'main.GitTag=$(GIT_TAG)' -X 'main.BuildDate=$(BUILD_DATE)'" -CGO_ENABLED = 0 -GO := GO111MODULE=on go -GOPATH ?= $(shell $(GO) env GOPATH) -GOBIN ?= $(GOPATH)/bin -GOSEC ?= $(GOBIN)/gosec -GINKGO ?= $(GOBIN)/ginkgo -GO_MINOR_VERSION = $(shell $(GO) version | cut -c 14- | cut -d' ' -f1 | cut -d'.' -f2) -GOVULN_MIN_VERSION = 17 -GO_VERSION = 1.22 - -default: - $(MAKE) build - -install-test-deps: - go install github.com/onsi/ginkgo/v2/ginkgo@latest - go install golang.org/x/crypto/...@latest - go install github.com/lib/pq/...@latest - -install-govulncheck: - @if [ $(GO_MINOR_VERSION) -gt $(GOVULN_MIN_VERSION) ]; then \ - go install golang.org/x/vuln/cmd/govulncheck@latest; \ - fi - -test: install-test-deps build-race fmt vet sec govulncheck - $(GINKGO) -v --fail-fast - -fmt: - @echo "FORMATTING" - @FORMATTED=`$(GO) fmt ./...` - @([ ! -z "$(FORMATTED)" ] && printf "Fixed unformatted files:\n$(FORMATTED)") || true - -vet: - @echo "VETTING" - $(GO) vet ./... - -golangci: - @echo "LINTING: golangci-lint" - golangci-lint run - -sec: - @echo "SECURITY SCANNING" - ./$(BIN) ./... - -govulncheck: install-govulncheck - @echo "CHECKING VULNERABILITIES" - @if [ $(GO_MINOR_VERSION) -gt $(GOVULN_MIN_VERSION) ]; then \ - govulncheck ./...; \ - fi - -test-coverage: install-test-deps - go test -race -v -count=1 -coverprofile=coverage.out ./... - -build: - go build -o $(BIN) ./cmd/gosec/ - -build-race: - go build -race -o $(BIN) ./cmd/gosec/ - -clean: - rm -rf build vendor dist coverage.txt - rm -f release image $(BIN) - -release: - @echo "Releasing the gosec binary..." - goreleaser release - -build-linux: - CGO_ENABLED=$(CGO_ENABLED) GOOS=linux go build -ldflags=$(BUILDFLAGS) -o $(BIN) ./cmd/gosec/ - -image: - @echo "Building the Docker image..." - docker build -t $(IMAGE_REPO)/$(BIN):$(GIT_TAG) --build-arg GO_VERSION=$(GO_VERSION) . - docker tag $(IMAGE_REPO)/$(BIN):$(GIT_TAG) $(IMAGE_REPO)/$(BIN):latest - touch image - -image-push: image - @echo "Pushing the Docker image..." - docker push $(IMAGE_REPO)/$(BIN):$(GIT_TAG) - docker push $(IMAGE_REPO)/$(BIN):latest - -tlsconfig: - go generate ./... - -.PHONY: test build clean release image image-push tlsconfig diff --git a/vendor/github.com/securego/gosec/v2/README.md b/vendor/github.com/securego/gosec/v2/README.md deleted file mode 100644 index f7b41df2e..000000000 --- a/vendor/github.com/securego/gosec/v2/README.md +++ /dev/null @@ -1,466 +0,0 @@ - -# gosec - Go Security Checker - -Inspects source code for security problems by scanning the Go AST and SSA code representation. - - - -## License - -Licensed under the Apache License, Version 2.0 (the "License"). -You may not use this file except in compliance with the License. -You may obtain a copy of the License [here](http://www.apache.org/licenses/LICENSE-2.0). - -## Project status - -[![CII Best Practices](https://bestpractices.coreinfrastructure.org/projects/3218/badge)](https://bestpractices.coreinfrastructure.org/projects/3218) -[![Build Status](https://github.com/securego/gosec/workflows/CI/badge.svg)](https://github.com/securego/gosec/actions?query=workflows%3ACI) -[![Coverage Status](https://codecov.io/gh/securego/gosec/branch/master/graph/badge.svg)](https://codecov.io/gh/securego/gosec) -[![GoReport](https://goreportcard.com/badge/github.com/securego/gosec)](https://goreportcard.com/report/github.com/securego/gosec) -[![GoDoc](https://pkg.go.dev/badge/github.com/securego/gosec/v2)](https://pkg.go.dev/github.com/securego/gosec/v2) -[![Docs](https://readthedocs.org/projects/docs/badge/?version=latest)](https://securego.io/) -[![Downloads](https://img.shields.io/github/downloads/securego/gosec/total.svg)](https://github.com/securego/gosec/releases) -[![Docker Pulls](https://img.shields.io/docker/pulls/securego/gosec.svg)](https://hub.docker.com/r/securego/gosec/tags) -[![Slack](https://img.shields.io/badge/Slack-4A154B?style=for-the-badge&logo=slack&logoColor=white)](http://securego.slack.com) - -## Install - -### CI Installation - -```bash -# binary will be $(go env GOPATH)/bin/gosec -curl -sfL https://raw.githubusercontent.com/securego/gosec/master/install.sh | sh -s -- -b $(go env GOPATH)/bin vX.Y.Z - -# or install it into ./bin/ -curl -sfL https://raw.githubusercontent.com/securego/gosec/master/install.sh | sh -s vX.Y.Z - -# In alpine linux (as it does not come with curl by default) -wget -O - -q https://raw.githubusercontent.com/securego/gosec/master/install.sh | sh -s vX.Y.Z - -# If you want to use the checksums provided on the "Releases" page -# then you will have to download a tar.gz file for your operating system instead of a binary file -wget https://github.com/securego/gosec/releases/download/vX.Y.Z/gosec_vX.Y.Z_OS.tar.gz - -# The file will be in the current folder where you run the command -# and you can check the checksum like this -echo " gosec_vX.Y.Z_OS.tar.gz" | sha256sum -c - - -gosec --help -``` - -### GitHub Action - -You can run `gosec` as a GitHub action as follows: - -```yaml -name: Run Gosec -on: - push: - branches: - - master - pull_request: - branches: - - master -jobs: - tests: - runs-on: ubuntu-latest - env: - GO111MODULE: on - steps: - - name: Checkout Source - uses: actions/checkout@v3 - - name: Run Gosec Security Scanner - uses: securego/gosec@master - with: - args: ./... -``` - -### Integrating with code scanning - -You can [integrate third-party code analysis tools](https://docs.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/integrating-with-code-scanning) with GitHub code scanning by uploading data as SARIF files. - -The workflow shows an example of running the `gosec` as a step in a GitHub action workflow which outputs the `results.sarif` file. The workflow then uploads the `results.sarif` file to GitHub using the `upload-sarif` action. - -```yaml -name: "Security Scan" - -# Run workflow each time code is pushed to your repository and on a schedule. -# The scheduled workflow runs every at 00:00 on Sunday UTC time. -on: - push: - schedule: - - cron: '0 0 * * 0' - -jobs: - tests: - runs-on: ubuntu-latest - env: - GO111MODULE: on - steps: - - name: Checkout Source - uses: actions/checkout@v3 - - name: Run Gosec Security Scanner - uses: securego/gosec@master - with: - # we let the report trigger content trigger a failure using the GitHub Security features. - args: '-no-fail -fmt sarif -out results.sarif ./...' - - name: Upload SARIF file - uses: github/codeql-action/upload-sarif@v2 - with: - # Path to SARIF file relative to the root of the repository - sarif_file: results.sarif -``` - -### Local Installation - -```bash -go install github.com/securego/gosec/v2/cmd/gosec@latest -``` - -## Usage - -Gosec can be configured to only run a subset of rules, to exclude certain file -paths, and produce reports in different formats. By default all rules will be -run against the supplied input files. To recursively scan from the current -directory you can supply `./...` as the input argument. - -### Available rules - -- G101: Look for hard coded credentials -- G102: Bind to all interfaces -- G103: Audit the use of unsafe block -- G104: Audit errors not checked -- G106: Audit the use of ssh.InsecureIgnoreHostKey -- G107: Url provided to HTTP request as taint input -- G108: Profiling endpoint automatically exposed on /debug/pprof -- G109: Potential Integer overflow made by strconv.Atoi result conversion to int16/32 -- G110: Potential DoS vulnerability via decompression bomb -- G111: Potential directory traversal -- G112: Potential slowloris attack -- G113: Usage of Rat.SetString in math/big with an overflow (CVE-2022-23772) -- G114: Use of net/http serve function that has no support for setting timeouts -- G201: SQL query construction using format string -- G202: SQL query construction using string concatenation -- G203: Use of unescaped data in HTML templates -- G204: Audit use of command execution -- G301: Poor file permissions used when creating a directory -- G302: Poor file permissions used with chmod -- G303: Creating tempfile using a predictable path -- G304: File path provided as taint input -- G305: File traversal when extracting zip/tar archive -- G306: Poor file permissions used when writing to a new file -- G307: Poor file permissions used when creating a file with os.Create -- G401: Detect the usage of DES, RC4, MD5 or SHA1 -- G402: Look for bad TLS connection settings -- G403: Ensure minimum RSA key length of 2048 bits -- G404: Insecure random number source (rand) -- G501: Import blocklist: crypto/md5 -- G502: Import blocklist: crypto/des -- G503: Import blocklist: crypto/rc4 -- G504: Import blocklist: net/http/cgi -- G505: Import blocklist: crypto/sha1 -- G601: Implicit memory aliasing of items from a range statement -- G602: Slice access out of bounds - -### Retired rules - -- G105: Audit the use of math/big.Int.Exp - [CVE is fixed](https://github.com/golang/go/issues/15184) -- G307: Deferring a method which returns an error - causing more inconvenience than fixing a security issue, despite the details from this [blog post](https://www.joeshaw.org/dont-defer-close-on-writable-files/) - -### Selecting rules - -By default, gosec will run all rules against the supplied file paths. It is however possible to select a subset of rules to run via the `-include=` flag, -or to specify a set of rules to explicitly exclude using the `-exclude=` flag. - -```bash -# Run a specific set of rules -$ gosec -include=G101,G203,G401 ./... - -# Run everything except for rule G303 -$ gosec -exclude=G303 ./... -``` - -### CWE Mapping - -Every issue detected by `gosec` is mapped to a [CWE (Common Weakness Enumeration)](http://cwe.mitre.org/data/index.html) which describes in more generic terms the vulnerability. The exact mapping can be found [here](https://github.com/securego/gosec/blob/master/issue/issue.go#L50). - -### Configuration - -A number of global settings can be provided in a configuration file as follows: - -```JSON -{ - "global": { - "nosec": "enabled", - "audit": "enabled" - } -} -``` - -- `nosec`: this setting will overwrite all `#nosec` directives defined throughout the code base -- `audit`: runs in audit mode which enables addition checks that for normal code analysis might be too nosy - -```bash -# Run with a global configuration file -$ gosec -conf config.json . -``` - -Also some rules accept configuration. For instance on rule `G104`, it is possible to define packages along with a list -of functions which will be skipped when auditing the not checked errors: - -```JSON -{ - "G104": { - "ioutil": ["WriteFile"] - } -} -``` - -You can also configure the hard-coded credentials rule `G101` with additional patterns, or adjust the entropy threshold: - -```JSON -{ - "G101": { - "pattern": "(?i)passwd|pass|password|pwd|secret|private_key|token", - "ignore_entropy": false, - "entropy_threshold": "80.0", - "per_char_threshold": "3.0", - "truncate": "32" - } -} -``` - -### Dependencies - -gosec will fetch automatically the dependencies of the code which is being analyzed when go module is turned on (e.g.`GO111MODULE=on`). If this is not the case, -the dependencies need to be explicitly downloaded by running the `go get -d` command before the scan. - -### Excluding test files and folders - -gosec will ignore test files across all packages and any dependencies in your vendor directory. - -The scanning of test files can be enabled with the following flag: - -```bash -gosec -tests ./... -``` - -Also additional folders can be excluded as follows: - -```bash - gosec -exclude-dir=rules -exclude-dir=cmd ./... -``` - -### Excluding generated files - -gosec can ignore generated go files with default generated code comment. - -``` -// Code generated by some generator DO NOT EDIT. -``` - -```bash -gosec -exclude-generated ./... -``` - - -### Annotating code - -As with all automated detection tools, there will be cases of false positives. -In cases where gosec reports a failure that has been manually verified as being safe, -it is possible to annotate the code with a comment that starts with `#nosec`. - -The `#nosec` comment should have the format `#nosec [RuleList] [-- Justification]`. - -The `#nosec` comment needs to be placed on the line where the warning is reported. - -```go -func main() { - tr := &http.Transport{ - TLSClientConfig: &tls.Config{ - InsecureSkipVerify: true, // #nosec G402 - }, - } - - client := &http.Client{Transport: tr} - _, err := client.Get("https://golang.org/") - if err != nil { - fmt.Println(err) - } -} -``` - -When a specific false positive has been identified and verified as safe, you may -wish to suppress only that single rule (or a specific set of rules) within a section of code, -while continuing to scan for other problems. To do this, you can list the rule(s) to be suppressed within -the `#nosec` annotation, e.g: `/* #nosec G401 */` or `//#nosec G201 G202 G203` - -You could put the description or justification text for the annotation. The -justification should be after the rule(s) to suppress and start with two or -more dashes, e.g: `//#nosec G101 G102 -- This is a false positive` - -In some cases you may also want to revisit places where `#nosec` annotations -have been used. To run the scanner and ignore any `#nosec` annotations you -can do the following: - -```bash -gosec -nosec=true ./... -``` - -### Tracking suppressions - -As described above, we could suppress violations externally (using `-include`/ -`-exclude`) or inline (using `#nosec` annotations) in gosec. This suppression -inflammation can be used to generate corresponding signals for auditing -purposes. - -We could track suppressions by the `-track-suppressions` flag as follows: - -```bash -gosec -track-suppressions -exclude=G101 -fmt=sarif -out=results.sarif ./... -``` - -- For external suppressions, gosec records suppression info where `kind` is -`external` and `justification` is a certain sentence "Globally suppressed". -- For inline suppressions, gosec records suppression info where `kind` is -`inSource` and `justification` is the text after two or more dashes in the -comment. - -**Note:** Only SARIF and JSON formats support tracking suppressions. - -### Build tags - -gosec is able to pass your [Go build tags](https://golang.org/pkg/go/build/) to the analyzer. -They can be provided as a comma separated list as follows: - -```bash -gosec -tags debug,ignore ./... -``` - -### Output formats - -gosec currently supports `text`, `json`, `yaml`, `csv`, `sonarqube`, `JUnit XML`, `html` and `golint` output formats. By default -results will be reported to stdout, but can also be written to an output -file. The output format is controlled by the `-fmt` flag, and the output file is controlled by the `-out` flag as follows: - -```bash -# Write output in json format to results.json -$ gosec -fmt=json -out=results.json *.go -``` - -Results will be reported to stdout as well as to the provided output file by `-stdout` flag. The `-verbose` flag overrides the -output format when stdout the results while saving them in the output file -```bash -# Write output in json format to results.json as well as stdout -$ gosec -fmt=json -out=results.json -stdout *.go - -# Overrides the output format to 'text' when stdout the results, while writing it to results.json -$ gosec -fmt=json -out=results.json -stdout -verbose=text *.go -``` - -**Note:** gosec generates the [generic issue import format](https://docs.sonarqube.org/latest/analysis/generic-issue/) for SonarQube, and a report has to be imported into SonarQube using `sonar.externalIssuesReportPaths=path/to/gosec-report.json`. - -## Development - -### Build - -You can build the binary with: - -```bash -make -``` - -### Note on Sarif Types Generation - -Install the tool with : - -```bash -go get -u github.com/a-h/generate/cmd/schema-generate -``` - -Then generate the types with : - -```bash -schema-generate -i sarif-schema-2.1.0.json -o mypath/types.go -``` - -Most of the MarshallJSON/UnmarshalJSON are removed except the one for PropertyBag which is handy to inline the additional properties. The rest can be removed. -The URI,ID, UUID, GUID were renamed so it fits the Go convention defined [here](https://github.com/golang/lint/blob/master/lint.go#L700) - -### Tests - -You can run all unit tests using: - -```bash -make test -``` - -### Release - -You can create a release by tagging the version as follows: - -``` bash -git tag v1.0.0 -m "Release version v1.0.0" -git push origin v1.0.0 -``` - -The GitHub [release workflow](.github/workflows/release.yml) triggers immediately after the tag is pushed upstream. This flow will -release the binaries using the [goreleaser](https://goreleaser.com/actions/) action and then it will build and publish the docker image into Docker Hub. - -The released artifacts are signed using [cosign](https://docs.sigstore.dev/). You can use the public key from [cosign.pub](cosign.pub) -file to verify the signature of docker image and binaries files. - -The docker image signature can be verified with the following command: -``` -cosign verify --key cosign.pub securego/gosec: -``` - -The binary files signature can be verified with the following command: -``` -cosign verify-blob --key cosign.pub --signature gosec__darwin_amd64.tar.gz.sig gosec__darwin_amd64.tar.gz -``` - -### Docker image - -You can also build locally the docker image by using the command: - -```bash -make image -``` - -You can run the `gosec` tool in a container against your local Go project. You only have to mount the project -into a volume as follows: - -```bash -docker run --rm -it -w // -v /:/ securego/gosec //... -``` - -**Note:** the current working directory needs to be set with `-w` option in order to get successfully resolved the dependencies from go module file - -### Generate TLS rule - -The configuration of TLS rule can be generated from [Mozilla's TLS ciphers recommendation](https://statics.tls.security.mozilla.org/server-side-tls-conf.json). - -First you need to install the generator tool: - -```bash -go get github.com/securego/gosec/v2/cmd/tlsconfig/... -``` - -You can invoke now the `go generate` in the root of the project: - -```bash -go generate ./... -``` - -This will generate the `rules/tls_config.go` file which will contain the current ciphers recommendation from Mozilla. - -## Who is using gosec? - -This is a [list](USERS.md) with some of the gosec's users. - -## Sponsors - -Support this project by becoming a sponsor. Your logo will show up here with a link to your website - - diff --git a/vendor/github.com/securego/gosec/v2/USERS.md b/vendor/github.com/securego/gosec/v2/USERS.md deleted file mode 100644 index 9b6e4eeee..000000000 --- a/vendor/github.com/securego/gosec/v2/USERS.md +++ /dev/null @@ -1,30 +0,0 @@ -# Users - -This is a list of gosec's users. Please send a pull request with your organisation or project name if you are using gosec. - -## Companies - -1. [Gitlab](https://docs.gitlab.com/ee/user/application_security/sast/) -2. [CloudBees](https://cloudbees.com) -3. [VMware](https://www.vmware.com) -4. [Codacy](https://support.codacy.com/hc/en-us/articles/213632009-Engines) -5. [Coinbase](https://github.com/coinbase/watchdog/blob/master/Makefile#L12) -6. [RedHat/OpenShift](https://github.com/openshift/openshift-azure) -7. [Guardalis](https://www.guardrails.io/) -8. [1Password](https://github.com/1Password/srp) -9. [PingCAP/tidb](https://github.com/pingcap/tidb) -10. [Checkmarx](https://www.checkmarx.com/) -11. [SeatGeek](https://www.seatgeek.com/) -12. [reMarkable](https://remarkable.com) - -## Projects - -1. [golangci-lint](https://github.com/golangci/golangci-lint) -2. [Kubernetes](https://github.com/kubernetes/kubernetes) (via golangci) -3. [caddy](https://github.com/caddyserver/caddy) (via golangci) -4. [Jenkins X](https://github.com/jenkins-x/jx/blob/bdc51840a41b75776159c1c7b7faa1cf477be473/hack/linter.sh#L25) -5. [HuskyCI](https://huskyci.opensource.globo.com/) -6. [GolangCI](https://golangci.com/) -7. [semgrep.live](https://semgrep.live/) -8. [gofiber](https://github.com/gofiber/fiber) -9. [KICS](https://github.com/Checkmarx/kics) diff --git a/vendor/github.com/securego/gosec/v2/action.yml b/vendor/github.com/securego/gosec/v2/action.yml deleted file mode 100644 index 3097075ce..000000000 --- a/vendor/github.com/securego/gosec/v2/action.yml +++ /dev/null @@ -1,19 +0,0 @@ -name: 'Gosec Security Checker' -description: 'Runs the gosec security checker' -author: '@ccojocar' - -inputs: - args: - description: 'Arguments for gosec' - required: true - default: '-h' - -runs: - using: 'docker' - image: 'docker://securego/gosec:2.18.2' - args: - - ${{ inputs.args }} - -branding: - icon: 'shield' - color: 'blue' diff --git a/vendor/github.com/securego/gosec/v2/analyzer.go b/vendor/github.com/securego/gosec/v2/analyzer.go deleted file mode 100644 index 0b1225b9b..000000000 --- a/vendor/github.com/securego/gosec/v2/analyzer.go +++ /dev/null @@ -1,721 +0,0 @@ -// (c) Copyright 2016 Hewlett Packard Enterprise Development LP -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// Package gosec holds the central scanning logic used by gosec security scanner -package gosec - -import ( - "fmt" - "go/ast" - "go/build" - "go/token" - "go/types" - "log" - "os" - "path" - "path/filepath" - "reflect" - "regexp" - "strconv" - "strings" - "sync" - - "github.com/securego/gosec/v2/analyzers" - "github.com/securego/gosec/v2/issue" - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/buildssa" - "golang.org/x/tools/go/packages" -) - -// LoadMode controls the amount of details to return when loading the packages -const LoadMode = packages.NeedName | - packages.NeedFiles | - packages.NeedCompiledGoFiles | - packages.NeedImports | - packages.NeedTypes | - packages.NeedTypesSizes | - packages.NeedTypesInfo | - packages.NeedSyntax | - packages.NeedModule | - packages.NeedEmbedFiles | - packages.NeedEmbedPatterns - -const externalSuppressionJustification = "Globally suppressed." - -const aliasOfAllRules = "*" - -var generatedCodePattern = regexp.MustCompile(`^// Code generated .* DO NOT EDIT\.$`) - -type ignore struct { - start int - end int - suppressions map[string][]issue.SuppressionInfo -} - -type ignores map[string][]ignore - -func newIgnores() ignores { - return make(map[string][]ignore) -} - -func (i ignores) parseLine(line string) (int, int) { - parts := strings.Split(line, "-") - start, err := strconv.Atoi(parts[0]) - if err != nil { - start = 0 - } - end := start - if len(parts) > 1 { - if e, err := strconv.Atoi(parts[1]); err == nil { - end = e - } - } - return start, end -} - -func (i ignores) add(file string, line string, suppressions map[string]issue.SuppressionInfo) { - is := []ignore{} - if _, ok := i[file]; ok { - is = i[file] - } - found := false - start, end := i.parseLine(line) - for _, ig := range is { - if ig.start <= start && ig.end >= end { - found = true - for r, s := range suppressions { - ss, ok := ig.suppressions[r] - if !ok { - ss = []issue.SuppressionInfo{} - } - ss = append(ss, s) - ig.suppressions[r] = ss - } - break - } - } - if !found { - ig := ignore{ - start: start, - end: end, - suppressions: map[string][]issue.SuppressionInfo{}, - } - for r, s := range suppressions { - ig.suppressions[r] = []issue.SuppressionInfo{s} - } - is = append(is, ig) - } - i[file] = is -} - -func (i ignores) get(file string, line string) map[string][]issue.SuppressionInfo { - start, end := i.parseLine(line) - if is, ok := i[file]; ok { - for _, i := range is { - if start <= i.start && end >= i.end { - return i.suppressions - } - } - } - return map[string][]issue.SuppressionInfo{} -} - -// The Context is populated with data parsed from the source code as it is scanned. -// It is passed through to all rule functions as they are called. Rules may use -// this data in conjunction with the encountered AST node. -type Context struct { - FileSet *token.FileSet - Comments ast.CommentMap - Info *types.Info - Pkg *types.Package - PkgFiles []*ast.File - Root *ast.File - Imports *ImportTracker - Config Config - Ignores ignores - PassedValues map[string]interface{} -} - -// GetFileAtNodePos returns the file at the node position in the file set available in the context. -func (ctx *Context) GetFileAtNodePos(node ast.Node) *token.File { - return ctx.FileSet.File(node.Pos()) -} - -// NewIssue creates a new issue -func (ctx *Context) NewIssue(node ast.Node, ruleID, desc string, - severity, confidence issue.Score, -) *issue.Issue { - return issue.New(ctx.GetFileAtNodePos(node), node, ruleID, desc, severity, confidence) -} - -// Metrics used when reporting information about a scanning run. -type Metrics struct { - NumFiles int `json:"files"` - NumLines int `json:"lines"` - NumNosec int `json:"nosec"` - NumFound int `json:"found"` -} - -// Analyzer object is the main object of gosec. It has methods traverse an AST -// and invoke the correct checking rules as on each node as required. -type Analyzer struct { - ignoreNosec bool - ruleset RuleSet - context *Context - config Config - logger *log.Logger - issues []*issue.Issue - stats *Metrics - errors map[string][]Error // keys are file paths; values are the golang errors in those files - tests bool - excludeGenerated bool - showIgnored bool - trackSuppressions bool - concurrency int - analyzerList []*analysis.Analyzer - mu sync.Mutex -} - -// NewAnalyzer builds a new analyzer. -func NewAnalyzer(conf Config, tests bool, excludeGenerated bool, trackSuppressions bool, concurrency int, logger *log.Logger) *Analyzer { - ignoreNoSec := false - if enabled, err := conf.IsGlobalEnabled(Nosec); err == nil { - ignoreNoSec = enabled - } - showIgnored := false - if enabled, err := conf.IsGlobalEnabled(ShowIgnored); err == nil { - showIgnored = enabled - } - if logger == nil { - logger = log.New(os.Stderr, "[gosec]", log.LstdFlags) - } - return &Analyzer{ - ignoreNosec: ignoreNoSec, - showIgnored: showIgnored, - ruleset: NewRuleSet(), - context: &Context{}, - config: conf, - logger: logger, - issues: make([]*issue.Issue, 0, 16), - stats: &Metrics{}, - errors: make(map[string][]Error), - tests: tests, - concurrency: concurrency, - excludeGenerated: excludeGenerated, - trackSuppressions: trackSuppressions, - analyzerList: analyzers.BuildDefaultAnalyzers(), - } -} - -// SetConfig updates the analyzer configuration -func (gosec *Analyzer) SetConfig(conf Config) { - gosec.config = conf -} - -// Config returns the current configuration -func (gosec *Analyzer) Config() Config { - return gosec.config -} - -// LoadRules instantiates all the rules to be used when analyzing source -// packages -func (gosec *Analyzer) LoadRules(ruleDefinitions map[string]RuleBuilder, ruleSuppressed map[string]bool) { - for id, def := range ruleDefinitions { - r, nodes := def(id, gosec.config) - gosec.ruleset.Register(r, ruleSuppressed[id], nodes...) - } -} - -// Process kicks off the analysis process for a given package -func (gosec *Analyzer) Process(buildTags []string, packagePaths ...string) error { - config := &packages.Config{ - Mode: LoadMode, - BuildFlags: buildTags, - Tests: gosec.tests, - } - - type result struct { - pkgPath string - pkgs []*packages.Package - err error - } - - results := make(chan result) - jobs := make(chan string, len(packagePaths)) - quit := make(chan struct{}) - - var wg sync.WaitGroup - - worker := func(j chan string, r chan result, quit chan struct{}) { - for { - select { - case s := <-j: - pkgs, err := gosec.load(s, config) - select { - case r <- result{pkgPath: s, pkgs: pkgs, err: err}: - case <-quit: - // we've been told to stop, probably an error while - // processing a previous result. - wg.Done() - return - } - default: - // j is empty and there are no jobs left - wg.Done() - return - } - } - } - - // fill the buffer - for _, pkgPath := range packagePaths { - jobs <- pkgPath - } - - for i := 0; i < gosec.concurrency; i++ { - wg.Add(1) - go worker(jobs, results, quit) - } - - go func() { - wg.Wait() - close(results) - }() - - for r := range results { - if r.err != nil { - gosec.AppendError(r.pkgPath, r.err) - } - for _, pkg := range r.pkgs { - if pkg.Name != "" { - err := gosec.ParseErrors(pkg) - if err != nil { - close(quit) - wg.Wait() // wait for the goroutines to stop - return fmt.Errorf("parsing errors in pkg %q: %w", pkg.Name, err) - } - gosec.CheckRules(pkg) - gosec.CheckAnalyzers(pkg) - } - } - } - sortErrors(gosec.errors) - return nil -} - -func (gosec *Analyzer) load(pkgPath string, conf *packages.Config) ([]*packages.Package, error) { - abspath, err := GetPkgAbsPath(pkgPath) - if err != nil { - gosec.logger.Printf("Skipping: %s. Path doesn't exist.", abspath) - return []*packages.Package{}, nil - } - - gosec.logger.Println("Import directory:", abspath) - // step 1/3 create build context. - buildD := build.Default - // step 2/3: add build tags to get env dependent files into basePackage. - gosec.mu.Lock() - buildD.BuildTags = conf.BuildFlags - gosec.mu.Unlock() - basePackage, err := buildD.ImportDir(pkgPath, build.ImportComment) - if err != nil { - return []*packages.Package{}, fmt.Errorf("importing dir %q: %w", pkgPath, err) - } - - var packageFiles []string - for _, filename := range basePackage.GoFiles { - packageFiles = append(packageFiles, path.Join(pkgPath, filename)) - } - for _, filename := range basePackage.CgoFiles { - packageFiles = append(packageFiles, path.Join(pkgPath, filename)) - } - - if gosec.tests { - testsFiles := make([]string, 0) - testsFiles = append(testsFiles, basePackage.TestGoFiles...) - testsFiles = append(testsFiles, basePackage.XTestGoFiles...) - for _, filename := range testsFiles { - packageFiles = append(packageFiles, path.Join(pkgPath, filename)) - } - } - - // step 3/3 remove build tags from conf to proceed build correctly. - gosec.mu.Lock() - conf.BuildFlags = nil - defer gosec.mu.Unlock() - pkgs, err := packages.Load(conf, packageFiles...) - if err != nil { - return []*packages.Package{}, fmt.Errorf("loading files from package %q: %w", pkgPath, err) - } - return pkgs, nil -} - -// CheckRules runs analysis on the given package. -func (gosec *Analyzer) CheckRules(pkg *packages.Package) { - gosec.logger.Println("Checking package:", pkg.Name) - for _, file := range pkg.Syntax { - fp := pkg.Fset.File(file.Pos()) - if fp == nil { - // skip files which cannot be located - continue - } - checkedFile := fp.Name() - // Skip the no-Go file from analysis (e.g. a Cgo files is expanded in 3 different files - // stored in the cache which do not need to by analyzed) - if filepath.Ext(checkedFile) != ".go" { - continue - } - if gosec.excludeGenerated && isGeneratedFile(file) { - gosec.logger.Println("Ignoring generated file:", checkedFile) - continue - } - - gosec.logger.Println("Checking file:", checkedFile) - gosec.context.FileSet = pkg.Fset - gosec.context.Config = gosec.config - gosec.context.Comments = ast.NewCommentMap(gosec.context.FileSet, file, file.Comments) - gosec.context.Root = file - gosec.context.Info = pkg.TypesInfo - gosec.context.Pkg = pkg.Types - gosec.context.PkgFiles = pkg.Syntax - gosec.context.Imports = NewImportTracker() - gosec.context.PassedValues = make(map[string]interface{}) - gosec.context.Ignores = newIgnores() - gosec.updateIgnores() - ast.Walk(gosec, file) - gosec.stats.NumFiles++ - gosec.stats.NumLines += pkg.Fset.File(file.Pos()).LineCount() - } -} - -// CheckAnalyzers runs analyzers on a given package. -func (gosec *Analyzer) CheckAnalyzers(pkg *packages.Package) { - ssaResult, err := gosec.buildSSA(pkg) - if err != nil || ssaResult == nil { - gosec.logger.Printf("Error building the SSA representation of the package %q: %s", pkg.Name, err) - return - } - - resultMap := map[*analysis.Analyzer]interface{}{ - buildssa.Analyzer: &analyzers.SSAAnalyzerResult{ - Config: gosec.Config(), - Logger: gosec.logger, - SSA: ssaResult.(*buildssa.SSA), - }, - } - - generatedFiles := gosec.generatedFiles(pkg) - - for _, analyzer := range gosec.analyzerList { - pass := &analysis.Pass{ - Analyzer: analyzer, - Fset: pkg.Fset, - Files: pkg.Syntax, - OtherFiles: pkg.OtherFiles, - IgnoredFiles: pkg.IgnoredFiles, - Pkg: pkg.Types, - TypesInfo: pkg.TypesInfo, - TypesSizes: pkg.TypesSizes, - ResultOf: resultMap, - Report: func(d analysis.Diagnostic) {}, - ImportObjectFact: nil, - ExportObjectFact: nil, - ImportPackageFact: nil, - ExportPackageFact: nil, - AllObjectFacts: nil, - AllPackageFacts: nil, - } - result, err := pass.Analyzer.Run(pass) - if err != nil { - gosec.logger.Printf("Error running analyzer %s: %s\n", analyzer.Name, err) - continue - } - if result != nil { - if passIssues, ok := result.([]*issue.Issue); ok { - for _, iss := range passIssues { - if gosec.excludeGenerated { - if _, ok := generatedFiles[iss.File]; ok { - continue - } - } - gosec.updateIssues(iss) - } - } - } - } -} - -func (gosec *Analyzer) generatedFiles(pkg *packages.Package) map[string]bool { - generatedFiles := map[string]bool{} - for _, file := range pkg.Syntax { - if isGeneratedFile(file) { - fp := pkg.Fset.File(file.Pos()) - if fp == nil { - // skip files which cannot be located - continue - } - generatedFiles[fp.Name()] = true - } - } - return generatedFiles -} - -// buildSSA runs the SSA pass which builds the SSA representation of the package. It handles gracefully any panic. -func (gosec *Analyzer) buildSSA(pkg *packages.Package) (interface{}, error) { - defer func() { - if r := recover(); r != nil { - gosec.logger.Printf("Panic when running SSA analyser on package: %s", pkg.Name) - } - }() - ssaPass := &analysis.Pass{ - Analyzer: buildssa.Analyzer, - Fset: pkg.Fset, - Files: pkg.Syntax, - OtherFiles: pkg.OtherFiles, - IgnoredFiles: pkg.IgnoredFiles, - Pkg: pkg.Types, - TypesInfo: pkg.TypesInfo, - TypesSizes: pkg.TypesSizes, - ResultOf: nil, - Report: nil, - ImportObjectFact: nil, - ExportObjectFact: nil, - ImportPackageFact: nil, - ExportPackageFact: nil, - AllObjectFacts: nil, - AllPackageFacts: nil, - } - - return ssaPass.Analyzer.Run(ssaPass) -} - -func isGeneratedFile(file *ast.File) bool { - for _, comment := range file.Comments { - for _, row := range comment.List { - if generatedCodePattern.MatchString(row.Text) { - return true - } - } - } - return false -} - -// ParseErrors parses the errors from given package -func (gosec *Analyzer) ParseErrors(pkg *packages.Package) error { - if len(pkg.Errors) == 0 { - return nil - } - for _, pkgErr := range pkg.Errors { - parts := strings.Split(pkgErr.Pos, ":") - file := parts[0] - var err error - var line int - if len(parts) > 1 { - if line, err = strconv.Atoi(parts[1]); err != nil { - return fmt.Errorf("parsing line: %w", err) - } - } - var column int - if len(parts) > 2 { - if column, err = strconv.Atoi(parts[2]); err != nil { - return fmt.Errorf("parsing column: %w", err) - } - } - msg := strings.TrimSpace(pkgErr.Msg) - newErr := NewError(line, column, msg) - if errSlice, ok := gosec.errors[file]; ok { - gosec.errors[file] = append(errSlice, *newErr) - } else { - errSlice = []Error{} - gosec.errors[file] = append(errSlice, *newErr) - } - } - return nil -} - -// AppendError appends an error to the file errors -func (gosec *Analyzer) AppendError(file string, err error) { - // Do not report the error for empty packages (e.g. files excluded from build with a tag) - r := regexp.MustCompile(`no buildable Go source files in`) - if r.MatchString(err.Error()) { - return - } - errors := make([]Error, 0) - if ferrs, ok := gosec.errors[file]; ok { - errors = ferrs - } - ferr := NewError(0, 0, err.Error()) - errors = append(errors, *ferr) - gosec.errors[file] = errors -} - -// ignore a node (and sub-tree) if it is tagged with a nosec tag comment -func (gosec *Analyzer) ignore(n ast.Node) map[string]issue.SuppressionInfo { - if groups, ok := gosec.context.Comments[n]; ok && !gosec.ignoreNosec { - - // Checks if an alternative for #nosec is set and, if not, uses the default. - noSecDefaultTag, err := gosec.config.GetGlobal(Nosec) - if err != nil { - noSecDefaultTag = NoSecTag(string(Nosec)) - } else { - noSecDefaultTag = NoSecTag(noSecDefaultTag) - } - noSecAlternativeTag, err := gosec.config.GetGlobal(NoSecAlternative) - if err != nil { - noSecAlternativeTag = noSecDefaultTag - } else { - noSecAlternativeTag = NoSecTag(noSecAlternativeTag) - } - - for _, group := range groups { - comment := strings.TrimSpace(group.Text()) - foundDefaultTag := strings.HasPrefix(comment, noSecDefaultTag) || regexp.MustCompile("\n *"+noSecDefaultTag).MatchString(comment) - foundAlternativeTag := strings.HasPrefix(comment, noSecAlternativeTag) || regexp.MustCompile("\n *"+noSecAlternativeTag).MatchString(comment) - - if foundDefaultTag || foundAlternativeTag { - gosec.stats.NumNosec++ - - // Discard what's in front of the nosec tag. - if foundDefaultTag { - comment = strings.SplitN(comment, noSecDefaultTag, 2)[1] - } else { - comment = strings.SplitN(comment, noSecAlternativeTag, 2)[1] - } - - // Extract the directive and the justification. - justification := "" - commentParts := regexp.MustCompile(`-{2,}`).Split(comment, 2) - directive := commentParts[0] - if len(commentParts) > 1 { - justification = strings.TrimSpace(strings.TrimRight(commentParts[1], "\n")) - } - - // Pull out the specific rules that are listed to be ignored. - re := regexp.MustCompile(`(G\d{3})`) - matches := re.FindAllStringSubmatch(directive, -1) - - suppression := issue.SuppressionInfo{ - Kind: "inSource", - Justification: justification, - } - - // Find the rule IDs to ignore. - ignores := make(map[string]issue.SuppressionInfo) - for _, v := range matches { - ignores[v[1]] = suppression - } - - // If no specific rules were given, ignore everything. - if len(matches) == 0 { - ignores[aliasOfAllRules] = suppression - } - return ignores - } - } - } - return nil -} - -// Visit runs the gosec visitor logic over an AST created by parsing go code. -// Rule methods added with AddRule will be invoked as necessary. -func (gosec *Analyzer) Visit(n ast.Node) ast.Visitor { - // Using ast.File instead of ast.ImportSpec, so that we can track all imports at once. - switch i := n.(type) { - case *ast.File: - gosec.context.Imports.TrackFile(i) - } - - for _, rule := range gosec.ruleset.RegisteredFor(n) { - issue, err := rule.Match(n, gosec.context) - if err != nil { - file, line := GetLocation(n, gosec.context) - file = path.Base(file) - gosec.logger.Printf("Rule error: %v => %s (%s:%d)\n", reflect.TypeOf(rule), err, file, line) - } - gosec.updateIssues(issue) - } - return gosec -} - -func (gosec *Analyzer) updateIgnores() { - for n := range gosec.context.Comments { - gosec.updateIgnoredRulesForNode(n) - } -} - -func (gosec *Analyzer) updateIgnoredRulesForNode(n ast.Node) { - ignoredRules := gosec.ignore(n) - if len(ignoredRules) > 0 { - if gosec.context.Ignores == nil { - gosec.context.Ignores = newIgnores() - } - line := issue.GetLine(gosec.context.FileSet.File(n.Pos()), n) - gosec.context.Ignores.add( - gosec.context.FileSet.File(n.Pos()).Name(), - line, - ignoredRules, - ) - } -} - -func (gosec *Analyzer) getSuppressionsAtLineInFile(file string, line string, id string) ([]issue.SuppressionInfo, bool) { - ignoredRules := gosec.context.Ignores.get(file, line) - - // Check if the rule was specifically suppressed at this location. - generalSuppressions, generalIgnored := ignoredRules[aliasOfAllRules] - ruleSuppressions, ruleIgnored := ignoredRules[id] - ignored := generalIgnored || ruleIgnored - suppressions := append(generalSuppressions, ruleSuppressions...) - - // Track external suppressions of this rule. - if gosec.ruleset.IsRuleSuppressed(id) { - ignored = true - suppressions = append(suppressions, issue.SuppressionInfo{ - Kind: "external", - Justification: externalSuppressionJustification, - }) - } - return suppressions, ignored -} - -func (gosec *Analyzer) updateIssues(issue *issue.Issue) { - if issue != nil { - suppressions, ignored := gosec.getSuppressionsAtLineInFile(issue.File, issue.Line, issue.RuleID) - if gosec.showIgnored { - issue.NoSec = ignored - } - if !ignored || !gosec.showIgnored { - gosec.stats.NumFound++ - } - if ignored && gosec.trackSuppressions { - issue.WithSuppressions(suppressions) - gosec.issues = append(gosec.issues, issue) - } else if !ignored || gosec.showIgnored || gosec.ignoreNosec { - gosec.issues = append(gosec.issues, issue) - } - } -} - -// Report returns the current issues discovered and the metrics about the scan -func (gosec *Analyzer) Report() ([]*issue.Issue, *Metrics, map[string][]Error) { - return gosec.issues, gosec.stats, gosec.errors -} - -// Reset clears state such as context, issues and metrics from the configured analyzer -func (gosec *Analyzer) Reset() { - gosec.context = &Context{} - gosec.issues = make([]*issue.Issue, 0, 16) - gosec.stats = &Metrics{} - gosec.ruleset = NewRuleSet() -} diff --git a/vendor/github.com/securego/gosec/v2/analyzers/slice_bounds.go b/vendor/github.com/securego/gosec/v2/analyzers/slice_bounds.go deleted file mode 100644 index 08a55eb42..000000000 --- a/vendor/github.com/securego/gosec/v2/analyzers/slice_bounds.go +++ /dev/null @@ -1,386 +0,0 @@ -// (c) Copyright gosec's authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package analyzers - -import ( - "errors" - "fmt" - "go/token" - "regexp" - "strconv" - "strings" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/buildssa" - "golang.org/x/tools/go/ssa" - - "github.com/securego/gosec/v2/issue" -) - -type bound int - -const ( - lowerUnbounded bound = iota - upperUnbounded - unbounded - upperBounded -) - -const maxDepth = 20 - -func newSliceBoundsAnalyzer(id string, description string) *analysis.Analyzer { - return &analysis.Analyzer{ - Name: id, - Doc: description, - Run: runSliceBounds, - Requires: []*analysis.Analyzer{buildssa.Analyzer}, - } -} - -func runSliceBounds(pass *analysis.Pass) (interface{}, error) { - ssaResult, err := getSSAResult(pass) - if err != nil { - return nil, err - } - - issues := map[ssa.Instruction]*issue.Issue{} - ifs := map[ssa.If]*ssa.BinOp{} - for _, mcall := range ssaResult.SSA.SrcFuncs { - for _, block := range mcall.DomPreorder() { - for _, instr := range block.Instrs { - switch instr := instr.(type) { - case *ssa.Alloc: - sliceCap, err := extractSliceCapFromAlloc(instr.String()) - if err != nil { - break - } - allocRefs := instr.Referrers() - if allocRefs == nil { - break - } - for _, instr := range *allocRefs { - if slice, ok := instr.(*ssa.Slice); ok { - if _, ok := slice.X.(*ssa.Alloc); ok { - if slice.Parent() != nil { - l, h := extractSliceBounds(slice) - newCap := computeSliceNewCap(l, h, sliceCap) - violations := []ssa.Instruction{} - trackSliceBounds(0, newCap, slice, &violations, ifs) - for _, s := range violations { - switch s := s.(type) { - case *ssa.Slice: - issue := newIssue( - pass.Analyzer.Name, - "slice bounds out of range", - pass.Fset, - s.Pos(), - issue.Low, - issue.High) - issues[s] = issue - case *ssa.IndexAddr: - issue := newIssue( - pass.Analyzer.Name, - "slice index out of range", - pass.Fset, - s.Pos(), - issue.Low, - issue.High) - issues[s] = issue - } - } - } - } - } - } - } - } - } - } - - for ifref, binop := range ifs { - bound, value, err := extractBinOpBound(binop) - if err != nil { - continue - } - for i, block := range ifref.Block().Succs { - if i == 1 { - bound = invBound(bound) - } - for _, instr := range block.Instrs { - if _, ok := issues[instr]; ok { - switch bound { - case lowerUnbounded: - break - case upperUnbounded, unbounded: - delete(issues, instr) - case upperBounded: - switch tinstr := instr.(type) { - case *ssa.Slice: - lower, upper := extractSliceBounds(tinstr) - if isSliceInsideBounds(0, value, lower, upper) { - delete(issues, instr) - } - case *ssa.IndexAddr: - indexValue, err := extractIntValue(tinstr.Index.String()) - if err != nil { - break - } - if isSliceIndexInsideBounds(0, value, indexValue) { - delete(issues, instr) - } - } - } - } - } - } - } - - foundIssues := []*issue.Issue{} - for _, issue := range issues { - foundIssues = append(foundIssues, issue) - } - if len(foundIssues) > 0 { - return foundIssues, nil - } - return nil, nil -} - -func trackSliceBounds(depth int, sliceCap int, slice ssa.Node, violations *[]ssa.Instruction, ifs map[ssa.If]*ssa.BinOp) { - if depth == maxDepth { - return - } - depth++ - if violations == nil { - violations = &[]ssa.Instruction{} - } - referrers := slice.Referrers() - if referrers != nil { - for _, refinstr := range *referrers { - switch refinstr := refinstr.(type) { - case *ssa.Slice: - checkAllSlicesBounds(depth, sliceCap, refinstr, violations, ifs) - switch refinstr.X.(type) { - case *ssa.Alloc, *ssa.Parameter: - l, h := extractSliceBounds(refinstr) - newCap := computeSliceNewCap(l, h, sliceCap) - trackSliceBounds(depth, newCap, refinstr, violations, ifs) - } - case *ssa.IndexAddr: - indexValue, err := extractIntValue(refinstr.Index.String()) - if err == nil && !isSliceIndexInsideBounds(0, sliceCap, indexValue) { - *violations = append(*violations, refinstr) - } - case *ssa.Call: - if ifref, cond := extractSliceIfLenCondition(refinstr); ifref != nil && cond != nil { - ifs[*ifref] = cond - } else { - parPos := -1 - for pos, arg := range refinstr.Call.Args { - if a, ok := arg.(*ssa.Slice); ok && a == slice { - parPos = pos - } - } - if fn, ok := refinstr.Call.Value.(*ssa.Function); ok { - if len(fn.Params) > parPos && parPos > -1 { - param := fn.Params[parPos] - trackSliceBounds(depth, sliceCap, param, violations, ifs) - } - } - } - } - } - } -} - -func checkAllSlicesBounds(depth int, sliceCap int, slice *ssa.Slice, violations *[]ssa.Instruction, ifs map[ssa.If]*ssa.BinOp) { - if depth == maxDepth { - return - } - depth++ - if violations == nil { - violations = &[]ssa.Instruction{} - } - sliceLow, sliceHigh := extractSliceBounds(slice) - if !isSliceInsideBounds(0, sliceCap, sliceLow, sliceHigh) { - *violations = append(*violations, slice) - } - switch slice.X.(type) { - case *ssa.Alloc, *ssa.Parameter, *ssa.Slice: - l, h := extractSliceBounds(slice) - newCap := computeSliceNewCap(l, h, sliceCap) - trackSliceBounds(depth, newCap, slice, violations, ifs) - } - - references := slice.Referrers() - if references == nil { - return - } - for _, ref := range *references { - switch s := ref.(type) { - case *ssa.Slice: - checkAllSlicesBounds(depth, sliceCap, s, violations, ifs) - switch s.X.(type) { - case *ssa.Alloc, *ssa.Parameter: - l, h := extractSliceBounds(s) - newCap := computeSliceNewCap(l, h, sliceCap) - trackSliceBounds(depth, newCap, s, violations, ifs) - } - } - } -} - -func extractSliceIfLenCondition(call *ssa.Call) (*ssa.If, *ssa.BinOp) { - if builtInLen, ok := call.Call.Value.(*ssa.Builtin); ok { - if builtInLen.Name() == "len" { - refs := call.Referrers() - if refs != nil { - for _, ref := range *refs { - if binop, ok := ref.(*ssa.BinOp); ok { - binoprefs := binop.Referrers() - for _, ref := range *binoprefs { - if ifref, ok := ref.(*ssa.If); ok { - return ifref, binop - } - } - } - } - } - } - } - return nil, nil -} - -func computeSliceNewCap(l, h, oldCap int) int { - if l == 0 && h == 0 { - return oldCap - } - if l > 0 && h == 0 { - return oldCap - l - } - if l == 0 && h > 0 { - return h - } - return h - l -} - -func invBound(bound bound) bound { - switch bound { - case lowerUnbounded: - return upperUnbounded - case upperUnbounded: - return lowerUnbounded - case upperBounded: - return unbounded - case unbounded: - return upperBounded - default: - return unbounded - } -} - -func extractBinOpBound(binop *ssa.BinOp) (bound, int, error) { - if binop.X != nil { - if x, ok := binop.X.(*ssa.Const); ok { - value, err := strconv.Atoi(x.Value.String()) - if err != nil { - return lowerUnbounded, value, err - } - switch binop.Op { - case token.LSS, token.LEQ: - return upperUnbounded, value, nil - case token.GTR, token.GEQ: - return lowerUnbounded, value, nil - case token.EQL: - return upperBounded, value, nil - case token.NEQ: - return unbounded, value, nil - } - } - } - if binop.Y != nil { - if y, ok := binop.Y.(*ssa.Const); ok { - value, err := strconv.Atoi(y.Value.String()) - if err != nil { - return lowerUnbounded, value, err - } - switch binop.Op { - case token.LSS, token.LEQ: - return lowerUnbounded, value, nil - case token.GTR, token.GEQ: - return upperUnbounded, value, nil - case token.EQL: - return upperBounded, value, nil - case token.NEQ: - return unbounded, value, nil - } - } - } - return lowerUnbounded, 0, fmt.Errorf("unable to extract constant from binop") -} - -func isSliceIndexInsideBounds(l, h int, index int) bool { - return (l <= index && index < h) -} - -func isSliceInsideBounds(l, h int, cl, ch int) bool { - return (l <= cl && h >= ch) && (l <= ch && h >= cl) -} - -func extractSliceBounds(slice *ssa.Slice) (int, int) { - var low int - if slice.Low != nil { - l, err := extractIntValue(slice.Low.String()) - if err == nil { - low = l - } - } - var high int - if slice.High != nil { - h, err := extractIntValue(slice.High.String()) - if err == nil { - high = h - } - } - return low, high -} - -func extractIntValue(value string) (int, error) { - parts := strings.Split(value, ":") - if len(parts) != 2 { - return 0, fmt.Errorf("invalid value: %s", value) - } - if parts[1] != "int" { - return 0, fmt.Errorf("invalid value: %s", value) - } - return strconv.Atoi(parts[0]) -} - -func extractSliceCapFromAlloc(instr string) (int, error) { - re := regexp.MustCompile(`new \[(\d+)\]*`) - var sliceCap int - matches := re.FindAllStringSubmatch(instr, -1) - if matches == nil { - return sliceCap, errors.New("no slice cap found") - } - - if len(matches) > 0 { - m := matches[0] - if len(m) > 1 { - return strconv.Atoi(m[1]) - } - } - - return 0, errors.New("no slice cap found") -} diff --git a/vendor/github.com/securego/gosec/v2/analyzers/util.go b/vendor/github.com/securego/gosec/v2/analyzers/util.go deleted file mode 100644 index 5941184aa..000000000 --- a/vendor/github.com/securego/gosec/v2/analyzers/util.go +++ /dev/null @@ -1,98 +0,0 @@ -// (c) Copyright gosec's authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package analyzers - -import ( - "fmt" - "go/token" - "log" - "os" - "strconv" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/buildssa" - - "github.com/securego/gosec/v2/issue" -) - -// SSAAnalyzerResult contains various information returned by the -// SSA analysis along with some configuration -type SSAAnalyzerResult struct { - Config map[string]interface{} - Logger *log.Logger - SSA *buildssa.SSA -} - -// BuildDefaultAnalyzers returns the default list of analyzers -func BuildDefaultAnalyzers() []*analysis.Analyzer { - return []*analysis.Analyzer{ - newSliceBoundsAnalyzer("G602", "Possible slice bounds out of range"), - } -} - -// getSSAResult retrieves the SSA result from analysis pass -func getSSAResult(pass *analysis.Pass) (*SSAAnalyzerResult, error) { - result, ok := pass.ResultOf[buildssa.Analyzer] - if !ok { - return nil, fmt.Errorf("no SSA result found in the analysis pass") - } - ssaResult, ok := result.(*SSAAnalyzerResult) - if !ok { - return nil, fmt.Errorf("the analysis pass result is not of type SSA") - } - return ssaResult, nil -} - -// newIssue creates a new gosec issue -func newIssue(analyzerID string, desc string, fileSet *token.FileSet, - pos token.Pos, severity, confidence issue.Score, -) *issue.Issue { - file := fileSet.File(pos) - line := file.Line(pos) - col := file.Position(pos).Column - - return &issue.Issue{ - RuleID: analyzerID, - File: file.Name(), - Line: strconv.Itoa(line), - Col: strconv.Itoa(col), - Severity: severity, - Confidence: confidence, - What: desc, - Cwe: issue.GetCweByRule(analyzerID), - Code: issueCodeSnippet(fileSet, pos), - } -} - -func issueCodeSnippet(fileSet *token.FileSet, pos token.Pos) string { - file := fileSet.File(pos) - - start := (int64)(file.Line(pos)) - if start-issue.SnippetOffset > 0 { - start = start - issue.SnippetOffset - } - end := (int64)(file.Line(pos)) - end = end + issue.SnippetOffset - - var code string - if file, err := os.Open(file.Name()); err == nil { - defer file.Close() // #nosec - code, err = issue.CodeSnippet(file, start, end) - if err != nil { - return err.Error() - } - } - return code -} diff --git a/vendor/github.com/securego/gosec/v2/call_list.go b/vendor/github.com/securego/gosec/v2/call_list.go deleted file mode 100644 index 4f2d6c54e..000000000 --- a/vendor/github.com/securego/gosec/v2/call_list.go +++ /dev/null @@ -1,118 +0,0 @@ -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package gosec - -import ( - "go/ast" - "strings" -) - -const vendorPath = "vendor/" - -type set map[string]bool - -// CallList is used to check for usage of specific packages -// and functions. -type CallList map[string]set - -// NewCallList creates a new empty CallList -func NewCallList() CallList { - return make(CallList) -} - -// AddAll will add several calls to the call list at once -func (c CallList) AddAll(selector string, idents ...string) { - for _, ident := range idents { - c.Add(selector, ident) - } -} - -// Add a selector and call to the call list -func (c CallList) Add(selector, ident string) { - if _, ok := c[selector]; !ok { - c[selector] = make(set) - } - c[selector][ident] = true -} - -// Contains returns true if the package and function are -// members of this call list. -func (c CallList) Contains(selector, ident string) bool { - if idents, ok := c[selector]; ok { - _, found := idents[ident] - return found - } - return false -} - -// ContainsPointer returns true if a pointer to the selector type or the type -// itself is a members of this call list. -func (c CallList) ContainsPointer(selector, indent string) bool { - if strings.HasPrefix(selector, "*") { - if c.Contains(selector, indent) { - return true - } - s := strings.TrimPrefix(selector, "*") - return c.Contains(s, indent) - } - return false -} - -// ContainsPkgCallExpr resolves the call expression name and type, and then further looks -// up the package path for that type. Finally, it determines if the call exists within the call list -func (c CallList) ContainsPkgCallExpr(n ast.Node, ctx *Context, stripVendor bool) *ast.CallExpr { - selector, ident, err := GetCallInfo(n, ctx) - if err != nil { - return nil - } - - // Selector can have two forms: - // 1. A short name if a module function is called (expr.Name). - // E.g., "big" if called function from math/big. - // 2. A full name if a structure function is called (TypeOf(expr)). - // E.g., "math/big.Rat" if called function of Rat structure from math/big. - if !strings.ContainsRune(selector, '.') { - // Use only explicit path (optionally strip vendor path prefix) to reduce conflicts - path, ok := GetImportPath(selector, ctx) - if !ok { - return nil - } - selector = path - } - - if stripVendor { - if vendorIdx := strings.Index(selector, vendorPath); vendorIdx >= 0 { - selector = selector[vendorIdx+len(vendorPath):] - } - } - if !c.Contains(selector, ident) { - return nil - } - - return n.(*ast.CallExpr) -} - -// ContainsCallExpr resolves the call expression name and type, and then determines -// if the call exists with the call list -func (c CallList) ContainsCallExpr(n ast.Node, ctx *Context) *ast.CallExpr { - selector, ident, err := GetCallInfo(n, ctx) - if err != nil { - return nil - } - if !c.Contains(selector, ident) && !c.ContainsPointer(selector, ident) { - return nil - } - - return n.(*ast.CallExpr) -} diff --git a/vendor/github.com/securego/gosec/v2/config.go b/vendor/github.com/securego/gosec/v2/config.go deleted file mode 100644 index 9cbb7a713..000000000 --- a/vendor/github.com/securego/gosec/v2/config.go +++ /dev/null @@ -1,137 +0,0 @@ -package gosec - -import ( - "bytes" - "encoding/json" - "fmt" - "io" -) - -const ( - // Globals are applicable to all rules and used for general - // configuration settings for gosec. - Globals = "global" -) - -// GlobalOption defines the name of the global options -type GlobalOption string - -const ( - // Nosec global option for #nosec directive - Nosec GlobalOption = "nosec" - // ShowIgnored defines whether nosec issues are counted as finding or not - ShowIgnored GlobalOption = "show-ignored" - // Audit global option which indicates that gosec runs in audit mode - Audit GlobalOption = "audit" - // NoSecAlternative global option alternative for #nosec directive - NoSecAlternative GlobalOption = "#nosec" - // ExcludeRules global option for some rules should not be load - ExcludeRules GlobalOption = "exclude" - // IncludeRules global option for should be load - IncludeRules GlobalOption = "include" - // SSA global option to enable go analysis framework with SSA support - SSA GlobalOption = "ssa" -) - -// NoSecTag returns the tag used to disable gosec for a line of code. -func NoSecTag(tag string) string { - return fmt.Sprintf("%s%s", "#", tag) -} - -// Config is used to provide configuration and customization to each of the rules. -type Config map[string]interface{} - -// NewConfig initializes a new configuration instance. The configuration data then -// needs to be loaded via c.ReadFrom(strings.NewReader("config data")) -// or from a *os.File. -func NewConfig() Config { - cfg := make(Config) - cfg[Globals] = make(map[GlobalOption]string) - return cfg -} - -func (c Config) keyToGlobalOptions(key string) GlobalOption { - return GlobalOption(key) -} - -func (c Config) convertGlobals() { - if globals, ok := c[Globals]; ok { - if settings, ok := globals.(map[string]interface{}); ok { - validGlobals := map[GlobalOption]string{} - for k, v := range settings { - validGlobals[c.keyToGlobalOptions(k)] = fmt.Sprintf("%v", v) - } - c[Globals] = validGlobals - } - } -} - -// ReadFrom implements the io.ReaderFrom interface. This -// should be used with io.Reader to load configuration from -// file or from string etc. -func (c Config) ReadFrom(r io.Reader) (int64, error) { - data, err := io.ReadAll(r) - if err != nil { - return int64(len(data)), err - } - if err = json.Unmarshal(data, &c); err != nil { - return int64(len(data)), err - } - c.convertGlobals() - return int64(len(data)), nil -} - -// WriteTo implements the io.WriteTo interface. This should -// be used to save or print out the configuration information. -func (c Config) WriteTo(w io.Writer) (int64, error) { - data, err := json.Marshal(c) - if err != nil { - return int64(len(data)), err - } - return io.Copy(w, bytes.NewReader(data)) -} - -// Get returns the configuration section for the supplied key -func (c Config) Get(section string) (interface{}, error) { - settings, found := c[section] - if !found { - return nil, fmt.Errorf("Section %s not in configuration", section) - } - return settings, nil -} - -// Set section in the configuration to specified value -func (c Config) Set(section string, value interface{}) { - c[section] = value -} - -// GetGlobal returns value associated with global configuration option -func (c Config) GetGlobal(option GlobalOption) (string, error) { - if globals, ok := c[Globals]; ok { - if settings, ok := globals.(map[GlobalOption]string); ok { - if value, ok := settings[option]; ok { - return value, nil - } - return "", fmt.Errorf("global setting for %s not found", option) - } - } - return "", fmt.Errorf("no global config options found") -} - -// SetGlobal associates a value with a global configuration option -func (c Config) SetGlobal(option GlobalOption, value string) { - if globals, ok := c[Globals]; ok { - if settings, ok := globals.(map[GlobalOption]string); ok { - settings[option] = value - } - } -} - -// IsGlobalEnabled checks if a global option is enabled -func (c Config) IsGlobalEnabled(option GlobalOption) (bool, error) { - value, err := c.GetGlobal(option) - if err != nil { - return false, err - } - return (value == "true" || value == "enabled"), nil -} diff --git a/vendor/github.com/securego/gosec/v2/cosign.pub b/vendor/github.com/securego/gosec/v2/cosign.pub deleted file mode 100644 index c6fd55988..000000000 --- a/vendor/github.com/securego/gosec/v2/cosign.pub +++ /dev/null @@ -1,4 +0,0 @@ ------BEGIN PUBLIC KEY----- -MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEFphl7f2VuFRfsi4wqiLUCQ9xHQgV -O2VMDNcvh+kxiymLXa+GkPzSKExFYIlVwfg13URvCiB+kFvITmLzuLiGQg== ------END PUBLIC KEY----- diff --git a/vendor/github.com/securego/gosec/v2/cwe/data.go b/vendor/github.com/securego/gosec/v2/cwe/data.go deleted file mode 100644 index 79a6b9d23..000000000 --- a/vendor/github.com/securego/gosec/v2/cwe/data.go +++ /dev/null @@ -1,140 +0,0 @@ -package cwe - -const ( - // Acronym is the acronym of CWE - Acronym = "CWE" - // Version the CWE version - Version = "4.4" - // ReleaseDateUtc the release Date of CWE Version - ReleaseDateUtc = "2021-03-15" - // Organization MITRE - Organization = "MITRE" - // Description the description of CWE - Description = "The MITRE Common Weakness Enumeration" - // InformationURI link to the published CWE PDF - InformationURI = "https://cwe.mitre.org/data/published/cwe_v" + Version + ".pdf/" - // DownloadURI link to the zipped XML of the CWE list - DownloadURI = "https://cwe.mitre.org/data/xml/cwec_v" + Version + ".xml.zip" -) - -var idWeaknesses = map[string]*Weakness{ - "118": { - ID: "118", - Description: "The software does not restrict or incorrectly restricts operations within the boundaries of a resource that is accessed using an index or pointer, such as memory or files.", - Name: "Incorrect Access of Indexable Resource ('Range Error')", - }, - "190": { - ID: "190", - Description: "The software performs a calculation that can produce an integer overflow or wraparound, when the logic assumes that the resulting value will always be larger than the original value. This can introduce other weaknesses when the calculation is used for resource management or execution control.", - Name: "Integer Overflow or Wraparound", - }, - "200": { - ID: "200", - Description: "The product exposes sensitive information to an actor that is not explicitly authorized to have access to that information.", - Name: "Exposure of Sensitive Information to an Unauthorized Actor", - }, - "22": { - ID: "22", - Description: "The software uses external input to construct a pathname that is intended to identify a file or directory that is located underneath a restricted parent directory, but the software does not properly neutralize special elements within the pathname that can cause the pathname to resolve to a location that is outside of the restricted directory.", - Name: "Improper Limitation of a Pathname to a Restricted Directory ('Path Traversal')", - }, - "242": { - ID: "242", - Description: "The program calls a function that can never be guaranteed to work safely.", - Name: "Use of Inherently Dangerous Function", - }, - "276": { - ID: "276", - Description: "During installation, installed file permissions are set to allow anyone to modify those files.", - Name: "Incorrect Default Permissions", - }, - "295": { - ID: "295", - Description: "The software does not validate, or incorrectly validates, a certificate.", - Name: "Improper Certificate Validation", - }, - "310": { - ID: "310", - Description: "Weaknesses in this category are related to the design and implementation of data confidentiality and integrity. Frequently these deal with the use of encoding techniques, encryption libraries, and hashing algorithms. The weaknesses in this category could lead to a degradation of the quality data if they are not addressed.", - Name: "Cryptographic Issues", - }, - "322": { - ID: "322", - Description: "The software performs a key exchange with an actor without verifying the identity of that actor.", - Name: "Key Exchange without Entity Authentication", - }, - "326": { - ID: "326", - Description: "The software stores or transmits sensitive data using an encryption scheme that is theoretically sound, but is not strong enough for the level of protection required.", - Name: "Inadequate Encryption Strength", - }, - "327": { - ID: "327", - Description: "The use of a broken or risky cryptographic algorithm is an unnecessary risk that may result in the exposure of sensitive information.", - Name: "Use of a Broken or Risky Cryptographic Algorithm", - }, - "338": { - ID: "338", - Description: "The product uses a Pseudo-Random Number Generator (PRNG) in a security context, but the PRNG's algorithm is not cryptographically strong.", - Name: "Use of Cryptographically Weak Pseudo-Random Number Generator (PRNG)", - }, - "377": { - ID: "377", - Description: "Creating and using insecure temporary files can leave application and system data vulnerable to attack.", - Name: "Insecure Temporary File", - }, - "400": { - ID: "400", - Description: "The software does not properly control the allocation and maintenance of a limited resource, thereby enabling an actor to influence the amount of resources consumed, eventually leading to the exhaustion of available resources.", - Name: "Uncontrolled Resource Consumption", - }, - "409": { - ID: "409", - Description: "The software does not handle or incorrectly handles a compressed input with a very high compression ratio that produces a large output.", - Name: "Improper Handling of Highly Compressed Data (Data Amplification)", - }, - "703": { - ID: "703", - Description: "The software does not properly anticipate or handle exceptional conditions that rarely occur during normal operation of the software.", - Name: "Improper Check or Handling of Exceptional Conditions", - }, - "78": { - ID: "78", - Description: "The software constructs all or part of an OS command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended OS command when it is sent to a downstream component.", - Name: "Improper Neutralization of Special Elements used in an OS Command ('OS Command Injection')", - }, - "79": { - ID: "79", - Description: "The software does not neutralize or incorrectly neutralizes user-controllable input before it is placed in output that is used as a web page that is served to other users.", - Name: "Improper Neutralization of Input During Web Page Generation ('Cross-site Scripting')", - }, - "798": { - ID: "798", - Description: "The software contains hard-coded credentials, such as a password or cryptographic key, which it uses for its own inbound authentication, outbound communication to external components, or encryption of internal data.", - Name: "Use of Hard-coded Credentials", - }, - "88": { - ID: "88", - Description: "The software constructs a string for a command to executed by a separate component\nin another control sphere, but it does not properly delimit the\nintended arguments, options, or switches within that command string.", - Name: "Improper Neutralization of Argument Delimiters in a Command ('Argument Injection')", - }, - "89": { - ID: "89", - Description: "The software constructs all or part of an SQL command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended SQL command when it is sent to a downstream component.", - Name: "Improper Neutralization of Special Elements used in an SQL Command ('SQL Injection')", - }, - "676": { - ID: "676", - Description: "The program invokes a potentially dangerous function that could introduce a vulnerability if it is used incorrectly, but the function can also be used safely.", - Name: "Use of Potentially Dangerous Function", - }, -} - -// Get Retrieves a CWE weakness by it's id -func Get(id string) *Weakness { - weakness, ok := idWeaknesses[id] - if ok && weakness != nil { - return weakness - } - return nil -} diff --git a/vendor/github.com/securego/gosec/v2/cwe/types.go b/vendor/github.com/securego/gosec/v2/cwe/types.go deleted file mode 100644 index 562510a8b..000000000 --- a/vendor/github.com/securego/gosec/v2/cwe/types.go +++ /dev/null @@ -1,38 +0,0 @@ -package cwe - -import ( - "encoding/json" - "fmt" -) - -// Weakness defines a CWE weakness based on http://cwe.mitre.org/data/xsd/cwe_schema_v6.4.xsd -type Weakness struct { - ID string - Name string - Description string -} - -// SprintURL format the CWE URL -func (w *Weakness) SprintURL() string { - return fmt.Sprintf("https://cwe.mitre.org/data/definitions/%s.html", w.ID) -} - -// SprintID format the CWE ID -func (w *Weakness) SprintID() string { - id := "0000" - if w != nil { - id = w.ID - } - return fmt.Sprintf("%s-%s", Acronym, id) -} - -// MarshalJSON print only id and URL -func (w *Weakness) MarshalJSON() ([]byte, error) { - return json.Marshal(&struct { - ID string `json:"id"` - URL string `json:"url"` - }{ - ID: w.ID, - URL: w.SprintURL(), - }) -} diff --git a/vendor/github.com/securego/gosec/v2/entrypoint.sh b/vendor/github.com/securego/gosec/v2/entrypoint.sh deleted file mode 100644 index bc6ad6a24..000000000 --- a/vendor/github.com/securego/gosec/v2/entrypoint.sh +++ /dev/null @@ -1,11 +0,0 @@ -#!/usr/bin/env bash - -# Expand the arguments into an array of strings. This is required because the GitHub action -# provides all arguments concatenated as a single string. -ARGS=("$@") - -if [[ ! -z "${GITHUB_AUTHENTICATION_TOKEN}" ]]; then - git config --global --add url."https://x-access-token:${GITHUB_AUTHENTICATION_TOKEN}@github.com/".insteadOf "https://github.com/" -fi - -/bin/gosec ${ARGS[*]} diff --git a/vendor/github.com/securego/gosec/v2/errors.go b/vendor/github.com/securego/gosec/v2/errors.go deleted file mode 100644 index 2f6672704..000000000 --- a/vendor/github.com/securego/gosec/v2/errors.go +++ /dev/null @@ -1,33 +0,0 @@ -package gosec - -import ( - "sort" -) - -// Error is used when there are golang errors while parsing the AST -type Error struct { - Line int `json:"line"` - Column int `json:"column"` - Err string `json:"error"` -} - -// NewError creates Error object -func NewError(line, column int, err string) *Error { - return &Error{ - Line: line, - Column: column, - Err: err, - } -} - -// sortErrors sorts the golang errors by line -func sortErrors(allErrors map[string][]Error) { - for _, errors := range allErrors { - sort.Slice(errors, func(i, j int) bool { - if errors[i].Line == errors[j].Line { - return errors[i].Column <= errors[j].Column - } - return errors[i].Line < errors[j].Line - }) - } -} diff --git a/vendor/github.com/securego/gosec/v2/helpers.go b/vendor/github.com/securego/gosec/v2/helpers.go deleted file mode 100644 index 15b2b5f3a..000000000 --- a/vendor/github.com/securego/gosec/v2/helpers.go +++ /dev/null @@ -1,519 +0,0 @@ -// (c) Copyright 2016 Hewlett Packard Enterprise Development LP -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package gosec - -import ( - "errors" - "fmt" - "go/ast" - "go/token" - "go/types" - "os" - "os/user" - "path/filepath" - "regexp" - "runtime" - "strconv" - "strings" -) - -// MatchCallByPackage ensures that the specified package is imported, -// adjusts the name for any aliases and ignores cases that are -// initialization only imports. -// -// Usage: -// -// node, matched := MatchCallByPackage(n, ctx, "math/rand", "Read") -func MatchCallByPackage(n ast.Node, c *Context, pkg string, names ...string) (*ast.CallExpr, bool) { - importedNames, found := GetImportedNames(pkg, c) - if !found { - return nil, false - } - - if callExpr, ok := n.(*ast.CallExpr); ok { - packageName, callName, err := GetCallInfo(callExpr, c) - if err != nil { - return nil, false - } - for _, in := range importedNames { - if packageName != in { - continue - } - for _, name := range names { - if callName == name { - return callExpr, true - } - } - } - } - return nil, false -} - -// MatchCompLit will match an ast.CompositeLit based on the supplied type -func MatchCompLit(n ast.Node, ctx *Context, required string) *ast.CompositeLit { - if complit, ok := n.(*ast.CompositeLit); ok { - typeOf := ctx.Info.TypeOf(complit) - if typeOf.String() == required { - return complit - } - } - return nil -} - -// GetInt will read and return an integer value from an ast.BasicLit -func GetInt(n ast.Node) (int64, error) { - if node, ok := n.(*ast.BasicLit); ok && node.Kind == token.INT { - return strconv.ParseInt(node.Value, 0, 64) - } - return 0, fmt.Errorf("Unexpected AST node type: %T", n) -} - -// GetFloat will read and return a float value from an ast.BasicLit -func GetFloat(n ast.Node) (float64, error) { - if node, ok := n.(*ast.BasicLit); ok && node.Kind == token.FLOAT { - return strconv.ParseFloat(node.Value, 64) - } - return 0.0, fmt.Errorf("Unexpected AST node type: %T", n) -} - -// GetChar will read and return a char value from an ast.BasicLit -func GetChar(n ast.Node) (byte, error) { - if node, ok := n.(*ast.BasicLit); ok && node.Kind == token.CHAR { - return node.Value[0], nil - } - return 0, fmt.Errorf("Unexpected AST node type: %T", n) -} - -// GetStringRecursive will recursively walk down a tree of *ast.BinaryExpr. It will then concat the results, and return. -// Unlike the other getters, it does _not_ raise an error for unknown ast.Node types. At the base, the recursion will hit a non-BinaryExpr type, -// either BasicLit or other, so it's not an error case. It will only error if `strconv.Unquote` errors. This matters, because there's -// currently functionality that relies on error values being returned by GetString if and when it hits a non-basiclit string node type, -// hence for cases where recursion is needed, we use this separate function, so that we can still be backwards compatible. -// -// This was added to handle a SQL injection concatenation case where the injected value is infixed between two strings, not at the start or end. See example below -// -// Do note that this will omit non-string values. So for example, if you were to use this node: -// ```go -// q := "SELECT * FROM foo WHERE name = '" + os.Args[0] + "' AND 1=1" // will result in "SELECT * FROM foo WHERE ” AND 1=1" - -func GetStringRecursive(n ast.Node) (string, error) { - if node, ok := n.(*ast.BasicLit); ok && node.Kind == token.STRING { - return strconv.Unquote(node.Value) - } - - if expr, ok := n.(*ast.BinaryExpr); ok { - x, err := GetStringRecursive(expr.X) - if err != nil { - return "", err - } - - y, err := GetStringRecursive(expr.Y) - if err != nil { - return "", err - } - - return x + y, nil - } - - return "", nil -} - -// GetString will read and return a string value from an ast.BasicLit -func GetString(n ast.Node) (string, error) { - if node, ok := n.(*ast.BasicLit); ok && node.Kind == token.STRING { - return strconv.Unquote(node.Value) - } - - return "", fmt.Errorf("Unexpected AST node type: %T", n) -} - -// GetCallObject returns the object and call expression and associated -// object for a given AST node. nil, nil will be returned if the -// object cannot be resolved. -func GetCallObject(n ast.Node, ctx *Context) (*ast.CallExpr, types.Object) { - switch node := n.(type) { - case *ast.CallExpr: - switch fn := node.Fun.(type) { - case *ast.Ident: - return node, ctx.Info.Uses[fn] - case *ast.SelectorExpr: - return node, ctx.Info.Uses[fn.Sel] - } - } - return nil, nil -} - -// GetCallInfo returns the package or type and name associated with a -// call expression. -func GetCallInfo(n ast.Node, ctx *Context) (string, string, error) { - switch node := n.(type) { - case *ast.CallExpr: - switch fn := node.Fun.(type) { - case *ast.SelectorExpr: - switch expr := fn.X.(type) { - case *ast.Ident: - if expr.Obj != nil && expr.Obj.Kind == ast.Var { - t := ctx.Info.TypeOf(expr) - if t != nil { - return t.String(), fn.Sel.Name, nil - } - return "undefined", fn.Sel.Name, fmt.Errorf("missing type info") - } - return expr.Name, fn.Sel.Name, nil - case *ast.SelectorExpr: - if expr.Sel != nil { - t := ctx.Info.TypeOf(expr.Sel) - if t != nil { - return t.String(), fn.Sel.Name, nil - } - return "undefined", fn.Sel.Name, fmt.Errorf("missing type info") - } - case *ast.CallExpr: - switch call := expr.Fun.(type) { - case *ast.Ident: - if call.Name == "new" && len(expr.Args) > 0 { - t := ctx.Info.TypeOf(expr.Args[0]) - if t != nil { - return t.String(), fn.Sel.Name, nil - } - return "undefined", fn.Sel.Name, fmt.Errorf("missing type info") - } - if call.Obj != nil { - switch decl := call.Obj.Decl.(type) { - case *ast.FuncDecl: - ret := decl.Type.Results - if ret != nil && len(ret.List) > 0 { - ret1 := ret.List[0] - if ret1 != nil { - t := ctx.Info.TypeOf(ret1.Type) - if t != nil { - return t.String(), fn.Sel.Name, nil - } - return "undefined", fn.Sel.Name, fmt.Errorf("missing type info") - } - } - } - } - } - } - case *ast.Ident: - return ctx.Pkg.Name(), fn.Name, nil - } - } - - return "", "", fmt.Errorf("unable to determine call info") -} - -// GetCallStringArgsValues returns the values of strings arguments if they can be resolved -func GetCallStringArgsValues(n ast.Node, _ *Context) []string { - values := []string{} - switch node := n.(type) { - case *ast.CallExpr: - for _, arg := range node.Args { - switch param := arg.(type) { - case *ast.BasicLit: - value, err := GetString(param) - if err == nil { - values = append(values, value) - } - case *ast.Ident: - values = append(values, GetIdentStringValues(param)...) - } - } - } - return values -} - -func getIdentStringValues(ident *ast.Ident, stringFinder func(ast.Node) (string, error)) []string { - values := []string{} - obj := ident.Obj - if obj != nil { - switch decl := obj.Decl.(type) { - case *ast.ValueSpec: - for _, v := range decl.Values { - value, err := stringFinder(v) - if err == nil { - values = append(values, value) - } - } - case *ast.AssignStmt: - for _, v := range decl.Rhs { - value, err := stringFinder(v) - if err == nil { - values = append(values, value) - } - } - } - } - return values -} - -// getIdentStringRecursive returns the string of values of an Ident if they can be resolved -// The difference between this and GetIdentStringValues is that it will attempt to resolve the strings recursively, -// if it is passed a *ast.BinaryExpr. See GetStringRecursive for details -func GetIdentStringValuesRecursive(ident *ast.Ident) []string { - return getIdentStringValues(ident, GetStringRecursive) -} - -// GetIdentStringValues return the string values of an Ident if they can be resolved -func GetIdentStringValues(ident *ast.Ident) []string { - return getIdentStringValues(ident, GetString) -} - -// GetBinaryExprOperands returns all operands of a binary expression by traversing -// the expression tree -func GetBinaryExprOperands(be *ast.BinaryExpr) []ast.Node { - var traverse func(be *ast.BinaryExpr) - result := []ast.Node{} - traverse = func(be *ast.BinaryExpr) { - if lhs, ok := be.X.(*ast.BinaryExpr); ok { - traverse(lhs) - } else { - result = append(result, be.X) - } - if rhs, ok := be.Y.(*ast.BinaryExpr); ok { - traverse(rhs) - } else { - result = append(result, be.Y) - } - } - traverse(be) - return result -} - -// GetImportedNames returns the name(s)/alias(es) used for the package within -// the code. It ignores initialization-only imports. -func GetImportedNames(path string, ctx *Context) (names []string, found bool) { - importNames, imported := ctx.Imports.Imported[path] - return importNames, imported -} - -// GetImportPath resolves the full import path of an identifier based on -// the imports in the current context(including aliases). -func GetImportPath(name string, ctx *Context) (string, bool) { - for path := range ctx.Imports.Imported { - if imported, ok := GetImportedNames(path, ctx); ok { - for _, n := range imported { - if n == name { - return path, true - } - } - } - } - - return "", false -} - -// GetLocation returns the filename and line number of an ast.Node -func GetLocation(n ast.Node, ctx *Context) (string, int) { - fobj := ctx.FileSet.File(n.Pos()) - return fobj.Name(), fobj.Line(n.Pos()) -} - -// Gopath returns all GOPATHs -func Gopath() []string { - defaultGoPath := runtime.GOROOT() - if u, err := user.Current(); err == nil { - defaultGoPath = filepath.Join(u.HomeDir, "go") - } - path := Getenv("GOPATH", defaultGoPath) - paths := strings.Split(path, string(os.PathListSeparator)) - for idx, path := range paths { - if abs, err := filepath.Abs(path); err == nil { - paths[idx] = abs - } - } - return paths -} - -// Getenv returns the values of the environment variable, otherwise -// returns the default if variable is not set -func Getenv(key, userDefault string) string { - if val := os.Getenv(key); val != "" { - return val - } - return userDefault -} - -// GetPkgRelativePath returns the Go relative path derived -// form the given path -func GetPkgRelativePath(path string) (string, error) { - abspath, err := filepath.Abs(path) - if err != nil { - abspath = path - } - if strings.HasSuffix(abspath, ".go") { - abspath = filepath.Dir(abspath) - } - for _, base := range Gopath() { - projectRoot := filepath.FromSlash(fmt.Sprintf("%s/src/", base)) - if strings.HasPrefix(abspath, projectRoot) { - return strings.TrimPrefix(abspath, projectRoot), nil - } - } - return "", errors.New("no project relative path found") -} - -// GetPkgAbsPath returns the Go package absolute path derived from -// the given path -func GetPkgAbsPath(pkgPath string) (string, error) { - absPath, err := filepath.Abs(pkgPath) - if err != nil { - return "", err - } - if _, err := os.Stat(absPath); os.IsNotExist(err) { - return "", errors.New("no project absolute path found") - } - return absPath, nil -} - -// ConcatString recursively concatenates strings from a binary expression -func ConcatString(n *ast.BinaryExpr) (string, bool) { - var s string - // sub expressions are found in X object, Y object is always last BasicLit - if rightOperand, ok := n.Y.(*ast.BasicLit); ok { - if str, err := GetString(rightOperand); err == nil { - s = str + s - } - } else { - return "", false - } - if leftOperand, ok := n.X.(*ast.BinaryExpr); ok { - if recursion, ok := ConcatString(leftOperand); ok { - s = recursion + s - } - } else if leftOperand, ok := n.X.(*ast.BasicLit); ok { - if str, err := GetString(leftOperand); err == nil { - s = str + s - } - } else { - return "", false - } - return s, true -} - -// FindVarIdentities returns array of all variable identities in a given binary expression -func FindVarIdentities(n *ast.BinaryExpr, c *Context) ([]*ast.Ident, bool) { - identities := []*ast.Ident{} - // sub expressions are found in X object, Y object is always the last term - if rightOperand, ok := n.Y.(*ast.Ident); ok { - obj := c.Info.ObjectOf(rightOperand) - if _, ok := obj.(*types.Var); ok && !TryResolve(rightOperand, c) { - identities = append(identities, rightOperand) - } - } - if leftOperand, ok := n.X.(*ast.BinaryExpr); ok { - if leftIdentities, ok := FindVarIdentities(leftOperand, c); ok { - identities = append(identities, leftIdentities...) - } - } else { - if leftOperand, ok := n.X.(*ast.Ident); ok { - obj := c.Info.ObjectOf(leftOperand) - if _, ok := obj.(*types.Var); ok && !TryResolve(leftOperand, c) { - identities = append(identities, leftOperand) - } - } - } - - if len(identities) > 0 { - return identities, true - } - // if nil or error, return false - return nil, false -} - -// PackagePaths returns a slice with all packages path at given root directory -func PackagePaths(root string, excludes []*regexp.Regexp) ([]string, error) { - if strings.HasSuffix(root, "...") { - root = root[0 : len(root)-3] - } else { - return []string{root}, nil - } - paths := map[string]bool{} - err := filepath.Walk(root, func(path string, f os.FileInfo, err error) error { - if filepath.Ext(path) == ".go" { - path = filepath.Dir(path) - if isExcluded(filepath.ToSlash(path), excludes) { - return nil - } - paths[path] = true - } - return nil - }) - if err != nil { - return []string{}, err - } - - result := []string{} - for path := range paths { - result = append(result, path) - } - return result, nil -} - -// isExcluded checks if a string matches any of the exclusion regexps -func isExcluded(str string, excludes []*regexp.Regexp) bool { - if excludes == nil { - return false - } - for _, exclude := range excludes { - if exclude != nil && exclude.MatchString(str) { - return true - } - } - return false -} - -// ExcludedDirsRegExp builds the regexps for a list of excluded dirs provided as strings -func ExcludedDirsRegExp(excludedDirs []string) []*regexp.Regexp { - var exps []*regexp.Regexp - for _, excludedDir := range excludedDirs { - str := fmt.Sprintf(`([\\/])?%s([\\/])?`, strings.ReplaceAll(filepath.ToSlash(excludedDir), "/", `\/`)) - r := regexp.MustCompile(str) - exps = append(exps, r) - } - return exps -} - -// RootPath returns the absolute root path of a scan -func RootPath(root string) (string, error) { - root = strings.TrimSuffix(root, "...") - return filepath.Abs(root) -} - -// GoVersion returns parsed version of Go from runtime -func GoVersion() (int, int, int) { - return parseGoVersion(runtime.Version()) -} - -// parseGoVersion parses Go version. -// example: -// - go1.19rc2 -// - go1.19beta2 -// - go1.19.4 -// - go1.19 -func parseGoVersion(version string) (int, int, int) { - exp := regexp.MustCompile(`go(\d+).(\d+)(?:.(\d+))?.*`) - parts := exp.FindStringSubmatch(version) - if len(parts) <= 1 { - return 0, 0, 0 - } - - major, _ := strconv.Atoi(parts[1]) - minor, _ := strconv.Atoi(parts[2]) - build, _ := strconv.Atoi(parts[3]) - - return major, minor, build -} diff --git a/vendor/github.com/securego/gosec/v2/import_tracker.go b/vendor/github.com/securego/gosec/v2/import_tracker.go deleted file mode 100644 index 7984e99f4..000000000 --- a/vendor/github.com/securego/gosec/v2/import_tracker.go +++ /dev/null @@ -1,70 +0,0 @@ -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package gosec - -import ( - "go/ast" - "go/types" - "strings" -) - -// ImportTracker is used to normalize the packages that have been imported -// by a source file. It is able to differentiate between plain imports, aliased -// imports and init only imports. -type ImportTracker struct { - // Imported is a map of Imported with their associated names/aliases. - Imported map[string][]string -} - -// NewImportTracker creates an empty Import tracker instance -func NewImportTracker() *ImportTracker { - return &ImportTracker{ - Imported: make(map[string][]string), - } -} - -// TrackFile track all the imports used by the supplied file -func (t *ImportTracker) TrackFile(file *ast.File) { - for _, imp := range file.Imports { - t.TrackImport(imp) - } -} - -// TrackPackages tracks all the imports used by the supplied packages -func (t *ImportTracker) TrackPackages(pkgs ...*types.Package) { - for _, pkg := range pkgs { - t.Imported[pkg.Path()] = []string{pkg.Name()} - } -} - -// TrackImport tracks imports. -func (t *ImportTracker) TrackImport(imported *ast.ImportSpec) { - importPath := strings.Trim(imported.Path.Value, `"`) - if imported.Name != nil { - if imported.Name.Name != "_" { - // Aliased import - t.Imported[importPath] = append(t.Imported[importPath], imported.Name.String()) - } - } else { - t.Imported[importPath] = append(t.Imported[importPath], importName(importPath)) - } -} - -func importName(importPath string) string { - parts := strings.Split(importPath, "/") - name := importPath - if len(parts) > 0 { - name = parts[len(parts)-1] - } - return name -} diff --git a/vendor/github.com/securego/gosec/v2/install.sh b/vendor/github.com/securego/gosec/v2/install.sh deleted file mode 100644 index 2b6403cb2..000000000 --- a/vendor/github.com/securego/gosec/v2/install.sh +++ /dev/null @@ -1,377 +0,0 @@ -#!/bin/sh -set -e -# Code generated by godownloader. DO NOT EDIT. -# - -usage() { - this=$1 - cat </dev/null -} -echoerr() { - echo "$@" 1>&2 -} -log_prefix() { - echo "$0" -} -_logp=6 -log_set_priority() { - _logp="$1" -} -log_priority() { - if test -z "$1"; then - echo "$_logp" - return - fi - [ "$1" -le "$_logp" ] -} -log_tag() { - case $1 in - 0) echo "emerg" ;; - 1) echo "alert" ;; - 2) echo "crit" ;; - 3) echo "err" ;; - 4) echo "warning" ;; - 5) echo "notice" ;; - 6) echo "info" ;; - 7) echo "debug" ;; - *) echo "$1" ;; - esac -} -log_debug() { - log_priority 7 || return 0 - echoerr "$(log_prefix)" "$(log_tag 7)" "$@" -} -log_info() { - log_priority 6 || return 0 - echoerr "$(log_prefix)" "$(log_tag 6)" "$@" -} -log_err() { - log_priority 3 || return 0 - echoerr "$(log_prefix)" "$(log_tag 3)" "$@" -} -log_crit() { - log_priority 2 || return 0 - echoerr "$(log_prefix)" "$(log_tag 2)" "$@" -} -uname_os() { - os=$(uname -s | tr '[:upper:]' '[:lower:]') - case "$os" in - cygwin_nt*) os="windows" ;; - mingw*) os="windows" ;; - msys_nt*) os="windows" ;; - esac - echo "$os" -} -uname_arch() { - arch=$(uname -m) - case $arch in - x86_64) arch="amd64" ;; - x86) arch="386" ;; - i686) arch="386" ;; - i386) arch="386" ;; - aarch64) arch="arm64" ;; - armv5*) arch="armv5" ;; - armv6*) arch="armv6" ;; - armv7*) arch="armv7" ;; - esac - echo ${arch} -} -uname_os_check() { - os=$(uname_os) - case "$os" in - darwin) return 0 ;; - dragonfly) return 0 ;; - freebsd) return 0 ;; - linux) return 0 ;; - android) return 0 ;; - nacl) return 0 ;; - netbsd) return 0 ;; - openbsd) return 0 ;; - plan9) return 0 ;; - solaris) return 0 ;; - windows) return 0 ;; - esac - log_crit "uname_os_check '$(uname -s)' got converted to '$os' which is not a GOOS value. Please file bug at https://github.com/client9/shlib" - return 1 -} -uname_arch_check() { - arch=$(uname_arch) - case "$arch" in - 386) return 0 ;; - amd64) return 0 ;; - arm64) return 0 ;; - armv5) return 0 ;; - armv6) return 0 ;; - armv7) return 0 ;; - ppc64) return 0 ;; - ppc64le) return 0 ;; - mips) return 0 ;; - mipsle) return 0 ;; - mips64) return 0 ;; - mips64le) return 0 ;; - s390x) return 0 ;; - amd64p32) return 0 ;; - esac - log_crit "uname_arch_check '$(uname -m)' got converted to '$arch' which is not a GOARCH value. Please file bug report at https://github.com/client9/shlib" - return 1 -} -untar() { - tarball=$1 - case "${tarball}" in - *.tar.gz | *.tgz) tar --no-same-owner -xzf "${tarball}" ;; - *.tar) tar --no-same-owner -xf "${tarball}" ;; - *.zip) unzip "${tarball}" ;; - *) - log_err "untar unknown archive format for ${tarball}" - return 1 - ;; - esac -} -http_download_curl() { - local_file=$1 - source_url=$2 - header=$3 - if [ -z "$header" ]; then - code=$(curl -w '%{http_code}' -sL -o "$local_file" "$source_url") - else - code=$(curl -w '%{http_code}' -sL -H "$header" -o "$local_file" "$source_url") - fi - if [ "$code" != "200" ]; then - log_debug "http_download_curl received HTTP status $code" - return 1 - fi - return 0 -} -http_download_wget() { - local_file=$1 - source_url=$2 - header=$3 - if [ -z "$header" ]; then - wget -q -O "$local_file" "$source_url" - else - wget -q --header "$header" -O "$local_file" "$source_url" - fi -} -http_download() { - log_debug "http_download $2" - if is_command curl; then - http_download_curl "$@" - return - elif is_command wget; then - http_download_wget "$@" - return - fi - log_crit "http_download unable to find wget or curl" - return 1 -} -http_copy() { - tmp=$(mktemp) - http_download "${tmp}" "$1" "$2" || return 1 - body=$(cat "$tmp") - rm -f "${tmp}" - echo "$body" -} -github_release() { - owner_repo=$1 - version=$2 - giturl="https://api.github.com/repos/${owner_repo}/releases/tags/${version}" - if [ -z "${version}" ]; then - giturl="https://api.github.com/repos/${owner_repo}/releases/latest" - fi - json=$(http_copy "$giturl" "Accept:application/json") - test -z "$json" && return 1 - version=$(echo "$json" | tr -s '\n' ' ' | sed 's/.*"tag_name": *"//' | sed 's/".*//') - test -z "$version" && return 1 - echo "$version" -} -hash_sha256() { - TARGET=${1:-/dev/stdin} - if is_command gsha256sum; then - hash=$(gsha256sum "$TARGET") || return 1 - echo "$hash" | cut -d ' ' -f 1 - elif is_command sha256sum; then - hash=$(sha256sum "$TARGET") || return 1 - echo "$hash" | cut -d ' ' -f 1 - elif is_command shasum; then - hash=$(shasum -a 256 "$TARGET" 2>/dev/null) || return 1 - echo "$hash" | cut -d ' ' -f 1 - elif is_command openssl; then - hash=$(openssl -dst openssl dgst -sha256 "$TARGET") || return 1 - echo "$hash" | cut -d ' ' -f a - else - log_crit "hash_sha256 unable to find command to compute sha-256 hash" - return 1 - fi -} -hash_sha256_verify() { - TARGET=$1 - checksums=$2 - if [ -z "$checksums" ]; then - log_err "hash_sha256_verify checksum file not specified in arg2" - return 1 - fi - BASENAME=${TARGET##*/} - want=$(grep "${BASENAME}" "${checksums}" 2>/dev/null | tr '\t' ' ' | cut -d ' ' -f 1) - if [ -z "$want" ]; then - log_err "hash_sha256_verify unable to find checksum for '${TARGET}' in '${checksums}'" - return 1 - fi - got=$(hash_sha256 "$TARGET") - if [ "$want" != "$got" ]; then - log_err "hash_sha256_verify checksum for '$TARGET' did not verify ${want} vs $got" - return 1 - fi -} -cat /dev/null < end { - break - } else if pos >= start && pos <= end { - code := fmt.Sprintf("%d: %s\n", pos, scanner.Text()) - buf.WriteString(code) - } - } - return buf.String(), nil -} - -func codeSnippetStartLine(node ast.Node, fobj *token.File) int64 { - s := (int64)(fobj.Line(node.Pos())) - if s-SnippetOffset > 0 { - return s - SnippetOffset - } - return s -} - -func codeSnippetEndLine(node ast.Node, fobj *token.File) int64 { - e := (int64)(fobj.Line(node.End())) - return e + SnippetOffset -} - -// New creates a new Issue -func New(fobj *token.File, node ast.Node, ruleID, desc string, severity, confidence Score) *Issue { - name := fobj.Name() - line := GetLine(fobj, node) - col := strconv.Itoa(fobj.Position(node.Pos()).Column) - - var code string - if node == nil { - code = "invalid AST node provided" - } - if file, err := os.Open(fobj.Name()); err == nil && node != nil { - defer file.Close() // #nosec - s := codeSnippetStartLine(node, fobj) - e := codeSnippetEndLine(node, fobj) - code, err = CodeSnippet(file, s, e) - if err != nil { - code = err.Error() - } - } - - return &Issue{ - File: name, - Line: line, - Col: col, - RuleID: ruleID, - What: desc, - Confidence: confidence, - Severity: severity, - Code: code, - Cwe: GetCweByRule(ruleID), - } -} - -// WithSuppressions set the suppressions of the issue -func (i *Issue) WithSuppressions(suppressions []SuppressionInfo) *Issue { - i.Suppressions = suppressions - return i -} - -// GetLine returns the line number of a given ast.Node -func GetLine(fobj *token.File, node ast.Node) string { - start, end := fobj.Line(node.Pos()), fobj.Line(node.End()) - line := strconv.Itoa(start) - if start != end { - line = fmt.Sprintf("%d-%d", start, end) - } - return line -} diff --git a/vendor/github.com/securego/gosec/v2/renovate.json b/vendor/github.com/securego/gosec/v2/renovate.json deleted file mode 100644 index 58ee1e0ea..000000000 --- a/vendor/github.com/securego/gosec/v2/renovate.json +++ /dev/null @@ -1,25 +0,0 @@ -{ - "dependencyDashboard": true, - "dependencyDashboardTitle" : "Renovate(bot) : dependency dashboard", - "vulnerabilityAlerts": { - "enabled": true - }, - "extends": [ - ":preserveSemverRanges", - "group:all", - "schedule:weekly" - ], - "lockFileMaintenance": { - "commitMessageAction": "Update", - "enabled": true, - "extends": [ - "group:all", - "schedule:weekly" - ] - }, - "postUpdateOptions": [ - "gomodTidy", - "gomodUpdateImportPaths" - ], - "separateMajorMinor": false -} diff --git a/vendor/github.com/securego/gosec/v2/report.go b/vendor/github.com/securego/gosec/v2/report.go deleted file mode 100644 index 4fdeea520..000000000 --- a/vendor/github.com/securego/gosec/v2/report.go +++ /dev/null @@ -1,28 +0,0 @@ -package gosec - -import ( - "github.com/securego/gosec/v2/issue" -) - -// ReportInfo this is report information -type ReportInfo struct { - Errors map[string][]Error `json:"Golang errors"` - Issues []*issue.Issue - Stats *Metrics - GosecVersion string -} - -// NewReportInfo instantiate a ReportInfo -func NewReportInfo(issues []*issue.Issue, metrics *Metrics, errors map[string][]Error) *ReportInfo { - return &ReportInfo{ - Errors: errors, - Issues: issues, - Stats: metrics, - } -} - -// WithVersion defines the version of gosec used to generate the report -func (r *ReportInfo) WithVersion(version string) *ReportInfo { - r.GosecVersion = version - return r -} diff --git a/vendor/github.com/securego/gosec/v2/resolve.go b/vendor/github.com/securego/gosec/v2/resolve.go deleted file mode 100644 index a201b8d32..000000000 --- a/vendor/github.com/securego/gosec/v2/resolve.go +++ /dev/null @@ -1,95 +0,0 @@ -// (c) Copyright 2016 Hewlett Packard Enterprise Development LP -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package gosec - -import "go/ast" - -func resolveIdent(n *ast.Ident, c *Context) bool { - if n.Obj == nil || n.Obj.Kind != ast.Var { - return true - } - if node, ok := n.Obj.Decl.(ast.Node); ok { - return TryResolve(node, c) - } - return false -} - -func resolveValueSpec(n *ast.ValueSpec, c *Context) bool { - if len(n.Values) == 0 { - return false - } - for _, value := range n.Values { - if !TryResolve(value, c) { - return false - } - } - return true -} - -func resolveAssign(n *ast.AssignStmt, c *Context) bool { - if len(n.Rhs) == 0 { - return false - } - for _, arg := range n.Rhs { - if !TryResolve(arg, c) { - return false - } - } - return true -} - -func resolveCompLit(n *ast.CompositeLit, c *Context) bool { - if len(n.Elts) == 0 { - return false - } - for _, arg := range n.Elts { - if !TryResolve(arg, c) { - return false - } - } - return true -} - -func resolveBinExpr(n *ast.BinaryExpr, c *Context) bool { - return (TryResolve(n.X, c) && TryResolve(n.Y, c)) -} - -func resolveCallExpr(_ *ast.CallExpr, _ *Context) bool { - // TODO(tkelsey): next step, full function resolution - return false -} - -// TryResolve will attempt, given a subtree starting at some AST node, to resolve -// all values contained within to a known constant. It is used to check for any -// unknown values in compound expressions. -func TryResolve(n ast.Node, c *Context) bool { - switch node := n.(type) { - case *ast.BasicLit: - return true - case *ast.CompositeLit: - return resolveCompLit(node, c) - case *ast.Ident: - return resolveIdent(node, c) - case *ast.ValueSpec: - return resolveValueSpec(node, c) - case *ast.AssignStmt: - return resolveAssign(node, c) - case *ast.CallExpr: - return resolveCallExpr(node, c) - case *ast.BinaryExpr: - return resolveBinExpr(node, c) - } - return false -} diff --git a/vendor/github.com/securego/gosec/v2/rule.go b/vendor/github.com/securego/gosec/v2/rule.go deleted file mode 100644 index 490a25da0..000000000 --- a/vendor/github.com/securego/gosec/v2/rule.go +++ /dev/null @@ -1,72 +0,0 @@ -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package gosec - -import ( - "go/ast" - "reflect" - - "github.com/securego/gosec/v2/issue" -) - -// The Rule interface used by all rules supported by gosec. -type Rule interface { - ID() string - Match(ast.Node, *Context) (*issue.Issue, error) -} - -// RuleBuilder is used to register a rule definition with the analyzer -type RuleBuilder func(id string, c Config) (Rule, []ast.Node) - -// A RuleSet contains a mapping of lists of rules to the type of AST node they -// should be run on and a mapping of rule ID's to whether the rule are -// suppressed. -// The analyzer will only invoke rules contained in the list associated with the -// type of AST node it is currently visiting. -type RuleSet struct { - Rules map[reflect.Type][]Rule - RuleSuppressedMap map[string]bool -} - -// NewRuleSet constructs a new RuleSet -func NewRuleSet() RuleSet { - return RuleSet{make(map[reflect.Type][]Rule), make(map[string]bool)} -} - -// Register adds a trigger for the supplied rule for the -// specified ast nodes. -func (r RuleSet) Register(rule Rule, isSuppressed bool, nodes ...ast.Node) { - for _, n := range nodes { - t := reflect.TypeOf(n) - if rules, ok := r.Rules[t]; ok { - r.Rules[t] = append(rules, rule) - } else { - r.Rules[t] = []Rule{rule} - } - } - r.RuleSuppressedMap[rule.ID()] = isSuppressed -} - -// RegisteredFor will return all rules that are registered for a -// specified ast node. -func (r RuleSet) RegisteredFor(n ast.Node) []Rule { - if rules, found := r.Rules[reflect.TypeOf(n)]; found { - return rules - } - return []Rule{} -} - -// IsRuleSuppressed will return whether the rule is suppressed. -func (r RuleSet) IsRuleSuppressed(ruleID string) bool { - return r.RuleSuppressedMap[ruleID] -} diff --git a/vendor/github.com/securego/gosec/v2/rules/archive.go b/vendor/github.com/securego/gosec/v2/rules/archive.go deleted file mode 100644 index 987047435..000000000 --- a/vendor/github.com/securego/gosec/v2/rules/archive.go +++ /dev/null @@ -1,66 +0,0 @@ -package rules - -import ( - "go/ast" - "go/types" - - "github.com/securego/gosec/v2" - "github.com/securego/gosec/v2/issue" -) - -type archive struct { - issue.MetaData - calls gosec.CallList - argTypes []string -} - -func (a *archive) ID() string { - return a.MetaData.ID -} - -// Match inspects AST nodes to determine if the filepath.Joins uses any argument derived from type zip.File or tar.Header -func (a *archive) Match(n ast.Node, c *gosec.Context) (*issue.Issue, error) { - if node := a.calls.ContainsPkgCallExpr(n, c, false); node != nil { - for _, arg := range node.Args { - var argType types.Type - if selector, ok := arg.(*ast.SelectorExpr); ok { - argType = c.Info.TypeOf(selector.X) - } else if ident, ok := arg.(*ast.Ident); ok { - if ident.Obj != nil && ident.Obj.Kind == ast.Var { - decl := ident.Obj.Decl - if assign, ok := decl.(*ast.AssignStmt); ok { - if selector, ok := assign.Rhs[0].(*ast.SelectorExpr); ok { - argType = c.Info.TypeOf(selector.X) - } - } - } - } - - if argType != nil { - for _, t := range a.argTypes { - if argType.String() == t { - return c.NewIssue(n, a.ID(), a.What, a.Severity, a.Confidence), nil - } - } - } - } - } - return nil, nil -} - -// NewArchive creates a new rule which detects the file traversal when extracting zip/tar archives -func NewArchive(id string, _ gosec.Config) (gosec.Rule, []ast.Node) { - calls := gosec.NewCallList() - calls.Add("path/filepath", "Join") - calls.Add("path", "Join") - return &archive{ - calls: calls, - argTypes: []string{"*archive/zip.File", "*archive/tar.Header"}, - MetaData: issue.MetaData{ - ID: id, - Severity: issue.Medium, - Confidence: issue.High, - What: "File traversal when extracting zip/tar archive", - }, - }, []ast.Node{(*ast.CallExpr)(nil)} -} diff --git a/vendor/github.com/securego/gosec/v2/rules/bind.go b/vendor/github.com/securego/gosec/v2/rules/bind.go deleted file mode 100644 index fef760c80..000000000 --- a/vendor/github.com/securego/gosec/v2/rules/bind.go +++ /dev/null @@ -1,84 +0,0 @@ -// (c) Copyright 2016 Hewlett Packard Enterprise Development LP -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package rules - -import ( - "go/ast" - "regexp" - - "github.com/securego/gosec/v2" - "github.com/securego/gosec/v2/issue" -) - -// Looks for net.Listen("0.0.0.0") or net.Listen(":8080") -type bindsToAllNetworkInterfaces struct { - issue.MetaData - calls gosec.CallList - pattern *regexp.Regexp -} - -func (r *bindsToAllNetworkInterfaces) ID() string { - return r.MetaData.ID -} - -func (r *bindsToAllNetworkInterfaces) Match(n ast.Node, c *gosec.Context) (*issue.Issue, error) { - callExpr := r.calls.ContainsPkgCallExpr(n, c, false) - if callExpr == nil { - return nil, nil - } - if len(callExpr.Args) > 1 { - arg := callExpr.Args[1] - if bl, ok := arg.(*ast.BasicLit); ok { - if arg, err := gosec.GetString(bl); err == nil { - if r.pattern.MatchString(arg) { - return c.NewIssue(n, r.ID(), r.What, r.Severity, r.Confidence), nil - } - } - } else if ident, ok := arg.(*ast.Ident); ok { - values := gosec.GetIdentStringValues(ident) - for _, value := range values { - if r.pattern.MatchString(value) { - return c.NewIssue(n, r.ID(), r.What, r.Severity, r.Confidence), nil - } - } - } - } else if len(callExpr.Args) > 0 { - values := gosec.GetCallStringArgsValues(callExpr.Args[0], c) - for _, value := range values { - if r.pattern.MatchString(value) { - return c.NewIssue(n, r.ID(), r.What, r.Severity, r.Confidence), nil - } - } - } - return nil, nil -} - -// NewBindsToAllNetworkInterfaces detects socket connections that are setup to -// listen on all network interfaces. -func NewBindsToAllNetworkInterfaces(id string, _ gosec.Config) (gosec.Rule, []ast.Node) { - calls := gosec.NewCallList() - calls.Add("net", "Listen") - calls.Add("crypto/tls", "Listen") - return &bindsToAllNetworkInterfaces{ - calls: calls, - pattern: regexp.MustCompile(`^(0.0.0.0|:).*$`), - MetaData: issue.MetaData{ - ID: id, - Severity: issue.Medium, - Confidence: issue.High, - What: "Binds to all network interfaces", - }, - }, []ast.Node{(*ast.CallExpr)(nil)} -} diff --git a/vendor/github.com/securego/gosec/v2/rules/blocklist.go b/vendor/github.com/securego/gosec/v2/rules/blocklist.go deleted file mode 100644 index 5e03cf7a0..000000000 --- a/vendor/github.com/securego/gosec/v2/rules/blocklist.go +++ /dev/null @@ -1,95 +0,0 @@ -// (c) Copyright 2016 Hewlett Packard Enterprise Development LP -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package rules - -import ( - "go/ast" - "strings" - - "github.com/securego/gosec/v2" - "github.com/securego/gosec/v2/issue" -) - -type blocklistedImport struct { - issue.MetaData - Blocklisted map[string]string -} - -func unquote(original string) string { - cleaned := strings.TrimSpace(original) - cleaned = strings.TrimLeft(cleaned, `"`) - return strings.TrimRight(cleaned, `"`) -} - -func (r *blocklistedImport) ID() string { - return r.MetaData.ID -} - -func (r *blocklistedImport) Match(n ast.Node, c *gosec.Context) (*issue.Issue, error) { - if node, ok := n.(*ast.ImportSpec); ok { - if description, ok := r.Blocklisted[unquote(node.Path.Value)]; ok { - return c.NewIssue(node, r.ID(), description, r.Severity, r.Confidence), nil - } - } - return nil, nil -} - -// NewBlocklistedImports reports when a blocklisted import is being used. -// Typically when a deprecated technology is being used. -func NewBlocklistedImports(id string, _ gosec.Config, blocklist map[string]string) (gosec.Rule, []ast.Node) { - return &blocklistedImport{ - MetaData: issue.MetaData{ - ID: id, - Severity: issue.Medium, - Confidence: issue.High, - }, - Blocklisted: blocklist, - }, []ast.Node{(*ast.ImportSpec)(nil)} -} - -// NewBlocklistedImportMD5 fails if MD5 is imported -func NewBlocklistedImportMD5(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { - return NewBlocklistedImports(id, conf, map[string]string{ - "crypto/md5": "Blocklisted import crypto/md5: weak cryptographic primitive", - }) -} - -// NewBlocklistedImportDES fails if DES is imported -func NewBlocklistedImportDES(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { - return NewBlocklistedImports(id, conf, map[string]string{ - "crypto/des": "Blocklisted import crypto/des: weak cryptographic primitive", - }) -} - -// NewBlocklistedImportRC4 fails if DES is imported -func NewBlocklistedImportRC4(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { - return NewBlocklistedImports(id, conf, map[string]string{ - "crypto/rc4": "Blocklisted import crypto/rc4: weak cryptographic primitive", - }) -} - -// NewBlocklistedImportCGI fails if CGI is imported -func NewBlocklistedImportCGI(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { - return NewBlocklistedImports(id, conf, map[string]string{ - "net/http/cgi": "Blocklisted import net/http/cgi: Go versions < 1.6.3 are vulnerable to Httpoxy attack: (CVE-2016-5386)", - }) -} - -// NewBlocklistedImportSHA1 fails if SHA1 is imported -func NewBlocklistedImportSHA1(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { - return NewBlocklistedImports(id, conf, map[string]string{ - "crypto/sha1": "Blocklisted import crypto/sha1: weak cryptographic primitive", - }) -} diff --git a/vendor/github.com/securego/gosec/v2/rules/decompression-bomb.go b/vendor/github.com/securego/gosec/v2/rules/decompression-bomb.go deleted file mode 100644 index 7e57f1a5b..000000000 --- a/vendor/github.com/securego/gosec/v2/rules/decompression-bomb.go +++ /dev/null @@ -1,111 +0,0 @@ -// (c) Copyright 2016 Hewlett Packard Enterprise Development LP -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package rules - -import ( - "fmt" - "go/ast" - - "github.com/securego/gosec/v2" - "github.com/securego/gosec/v2/issue" -) - -type decompressionBombCheck struct { - issue.MetaData - readerCalls gosec.CallList - copyCalls gosec.CallList -} - -func (d *decompressionBombCheck) ID() string { - return d.MetaData.ID -} - -func containsReaderCall(node ast.Node, ctx *gosec.Context, list gosec.CallList) bool { - if list.ContainsPkgCallExpr(node, ctx, false) != nil { - return true - } - // Resolve type info of ident (for *archive/zip.File.Open) - s, idt, _ := gosec.GetCallInfo(node, ctx) - return list.Contains(s, idt) -} - -func (d *decompressionBombCheck) Match(node ast.Node, ctx *gosec.Context) (*issue.Issue, error) { - var readerVarObj map[*ast.Object]struct{} - - // To check multiple lines, ctx.PassedValues is used to store temporary data. - if _, ok := ctx.PassedValues[d.ID()]; !ok { - readerVarObj = make(map[*ast.Object]struct{}) - ctx.PassedValues[d.ID()] = readerVarObj - } else if pv, ok := ctx.PassedValues[d.ID()].(map[*ast.Object]struct{}); ok { - readerVarObj = pv - } else { - return nil, fmt.Errorf("PassedValues[%s] of Context is not map[*ast.Object]struct{}, but %T", d.ID(), ctx.PassedValues[d.ID()]) - } - - // io.Copy is a common function. - // To reduce false positives, This rule detects code which is used for compressed data only. - switch n := node.(type) { - case *ast.AssignStmt: - for _, expr := range n.Rhs { - if callExpr, ok := expr.(*ast.CallExpr); ok && containsReaderCall(callExpr, ctx, d.readerCalls) { - if idt, ok := n.Lhs[0].(*ast.Ident); ok && idt.Name != "_" { - // Example: - // r, _ := zlib.NewReader(buf) - // Add r's Obj to readerVarObj map - readerVarObj[idt.Obj] = struct{}{} - } - } - } - case *ast.CallExpr: - if d.copyCalls.ContainsPkgCallExpr(n, ctx, false) != nil { - if idt, ok := n.Args[1].(*ast.Ident); ok { - if _, ok := readerVarObj[idt.Obj]; ok { - // Detect io.Copy(x, r) - return ctx.NewIssue(n, d.ID(), d.What, d.Severity, d.Confidence), nil - } - } - } - } - - return nil, nil -} - -// NewDecompressionBombCheck detects if there is potential DoS vulnerability via decompression bomb -func NewDecompressionBombCheck(id string, _ gosec.Config) (gosec.Rule, []ast.Node) { - readerCalls := gosec.NewCallList() - readerCalls.Add("compress/gzip", "NewReader") - readerCalls.AddAll("compress/zlib", "NewReader", "NewReaderDict") - readerCalls.Add("compress/bzip2", "NewReader") - readerCalls.AddAll("compress/flate", "NewReader", "NewReaderDict") - readerCalls.Add("compress/lzw", "NewReader") - readerCalls.Add("archive/tar", "NewReader") - readerCalls.Add("archive/zip", "NewReader") - readerCalls.Add("*archive/zip.File", "Open") - - copyCalls := gosec.NewCallList() - copyCalls.Add("io", "Copy") - copyCalls.Add("io", "CopyBuffer") - - return &decompressionBombCheck{ - MetaData: issue.MetaData{ - ID: id, - Severity: issue.Medium, - Confidence: issue.Medium, - What: "Potential DoS vulnerability via decompression bomb", - }, - readerCalls: readerCalls, - copyCalls: copyCalls, - }, []ast.Node{(*ast.FuncDecl)(nil), (*ast.AssignStmt)(nil), (*ast.CallExpr)(nil)} -} diff --git a/vendor/github.com/securego/gosec/v2/rules/directory-traversal.go b/vendor/github.com/securego/gosec/v2/rules/directory-traversal.go deleted file mode 100644 index 47bcb2dc4..000000000 --- a/vendor/github.com/securego/gosec/v2/rules/directory-traversal.go +++ /dev/null @@ -1,65 +0,0 @@ -package rules - -import ( - "go/ast" - "regexp" - - "github.com/securego/gosec/v2" - "github.com/securego/gosec/v2/issue" -) - -type traversal struct { - pattern *regexp.Regexp - issue.MetaData -} - -func (r *traversal) ID() string { - return r.MetaData.ID -} - -func (r *traversal) Match(n ast.Node, ctx *gosec.Context) (*issue.Issue, error) { - switch node := n.(type) { - case *ast.CallExpr: - return r.matchCallExpr(node, ctx) - } - return nil, nil -} - -func (r *traversal) matchCallExpr(assign *ast.CallExpr, ctx *gosec.Context) (*issue.Issue, error) { - for _, i := range assign.Args { - if basiclit, ok1 := i.(*ast.BasicLit); ok1 { - if fun, ok2 := assign.Fun.(*ast.SelectorExpr); ok2 { - if x, ok3 := fun.X.(*ast.Ident); ok3 { - str := x.Name + "." + fun.Sel.Name + "(" + basiclit.Value + ")" - if r.pattern.MatchString(str) { - return ctx.NewIssue(assign, r.ID(), r.What, r.Severity, r.Confidence), nil - } - } - } - } - } - return nil, nil -} - -// NewDirectoryTraversal attempts to find the use of http.Dir("/") -func NewDirectoryTraversal(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { - pattern := `http\.Dir\("\/"\)|http\.Dir\('\/'\)` - if val, ok := conf[id]; ok { - conf := val.(map[string]interface{}) - if configPattern, ok := conf["pattern"]; ok { - if cfgPattern, ok := configPattern.(string); ok { - pattern = cfgPattern - } - } - } - - return &traversal{ - pattern: regexp.MustCompile(pattern), - MetaData: issue.MetaData{ - ID: id, - What: "Potential directory traversal", - Confidence: issue.Medium, - Severity: issue.Medium, - }, - }, []ast.Node{(*ast.CallExpr)(nil)} -} diff --git a/vendor/github.com/securego/gosec/v2/rules/errors.go b/vendor/github.com/securego/gosec/v2/rules/errors.go deleted file mode 100644 index d31248ccb..000000000 --- a/vendor/github.com/securego/gosec/v2/rules/errors.go +++ /dev/null @@ -1,122 +0,0 @@ -// (c) Copyright 2016 Hewlett Packard Enterprise Development LP -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package rules - -import ( - "go/ast" - "go/types" - - "github.com/securego/gosec/v2" - "github.com/securego/gosec/v2/issue" -) - -type noErrorCheck struct { - issue.MetaData - whitelist gosec.CallList -} - -func (r *noErrorCheck) ID() string { - return r.MetaData.ID -} - -func returnsError(callExpr *ast.CallExpr, ctx *gosec.Context) int { - if tv := ctx.Info.TypeOf(callExpr); tv != nil { - switch t := tv.(type) { - case *types.Tuple: - for pos := 0; pos < t.Len(); pos++ { - variable := t.At(pos) - if variable != nil && variable.Type().String() == "error" { - return pos - } - } - case *types.Named: - if t.String() == "error" { - return 0 - } - } - } - return -1 -} - -func (r *noErrorCheck) Match(n ast.Node, ctx *gosec.Context) (*issue.Issue, error) { - switch stmt := n.(type) { - case *ast.AssignStmt: - cfg := ctx.Config - if enabled, err := cfg.IsGlobalEnabled(gosec.Audit); err == nil && enabled { - for _, expr := range stmt.Rhs { - if callExpr, ok := expr.(*ast.CallExpr); ok && r.whitelist.ContainsCallExpr(expr, ctx) == nil { - pos := returnsError(callExpr, ctx) - if pos < 0 || pos >= len(stmt.Lhs) { - return nil, nil - } - if id, ok := stmt.Lhs[pos].(*ast.Ident); ok && id.Name == "_" { - return ctx.NewIssue(n, r.ID(), r.What, r.Severity, r.Confidence), nil - } - } - } - } - case *ast.ExprStmt: - if callExpr, ok := stmt.X.(*ast.CallExpr); ok && r.whitelist.ContainsCallExpr(stmt.X, ctx) == nil { - pos := returnsError(callExpr, ctx) - if pos >= 0 { - return ctx.NewIssue(n, r.ID(), r.What, r.Severity, r.Confidence), nil - } - } - } - return nil, nil -} - -// NewNoErrorCheck detects if the returned error is unchecked -func NewNoErrorCheck(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { - // TODO(gm) Come up with sensible defaults here. Or flip it to use a - // black list instead. - whitelist := gosec.NewCallList() - whitelist.AddAll("bytes.Buffer", "Write", "WriteByte", "WriteRune", "WriteString") - whitelist.AddAll("fmt", "Print", "Printf", "Println", "Fprint", "Fprintf", "Fprintln") - whitelist.AddAll("strings.Builder", "Write", "WriteByte", "WriteRune", "WriteString") - whitelist.Add("io.PipeWriter", "CloseWithError") - whitelist.Add("hash.Hash", "Write") - whitelist.Add("os", "Unsetenv") - - if configured, ok := conf[id]; ok { - if whitelisted, ok := configured.(map[string]interface{}); ok { - for pkg, funcs := range whitelisted { - if funcs, ok := funcs.([]interface{}); ok { - whitelist.AddAll(pkg, toStringSlice(funcs)...) - } - } - } - } - - return &noErrorCheck{ - MetaData: issue.MetaData{ - ID: id, - Severity: issue.Low, - Confidence: issue.High, - What: "Errors unhandled.", - }, - whitelist: whitelist, - }, []ast.Node{(*ast.AssignStmt)(nil), (*ast.ExprStmt)(nil)} -} - -func toStringSlice(values []interface{}) []string { - result := []string{} - for _, value := range values { - if value, ok := value.(string); ok { - result = append(result, value) - } - } - return result -} diff --git a/vendor/github.com/securego/gosec/v2/rules/fileperms.go b/vendor/github.com/securego/gosec/v2/rules/fileperms.go deleted file mode 100644 index 5311f74c6..000000000 --- a/vendor/github.com/securego/gosec/v2/rules/fileperms.go +++ /dev/null @@ -1,164 +0,0 @@ -// (c) Copyright 2016 Hewlett Packard Enterprise Development LP -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package rules - -import ( - "fmt" - "go/ast" - "strconv" - - "github.com/securego/gosec/v2" - "github.com/securego/gosec/v2/issue" -) - -type filePermissions struct { - issue.MetaData - mode int64 - pkgs []string - calls []string -} - -// ID returns the ID of the rule. -func (r *filePermissions) ID() string { - return r.MetaData.ID -} - -func getConfiguredMode(conf map[string]interface{}, configKey string, defaultMode int64) int64 { - mode := defaultMode - if value, ok := conf[configKey]; ok { - switch value := value.(type) { - case int64: - mode = value - case string: - if m, e := strconv.ParseInt(value, 0, 64); e != nil { - mode = defaultMode - } else { - mode = m - } - } - } - return mode -} - -func modeIsSubset(subset int64, superset int64) bool { - return (subset | superset) == superset -} - -// Match checks if the rule is matched. -func (r *filePermissions) Match(n ast.Node, c *gosec.Context) (*issue.Issue, error) { - for _, pkg := range r.pkgs { - if callexpr, matched := gosec.MatchCallByPackage(n, c, pkg, r.calls...); matched { - modeArg := callexpr.Args[len(callexpr.Args)-1] - if mode, err := gosec.GetInt(modeArg); err == nil && !modeIsSubset(mode, r.mode) { - return c.NewIssue(n, r.ID(), r.What, r.Severity, r.Confidence), nil - } - } - } - return nil, nil -} - -// NewWritePerms creates a rule to detect file Writes with bad permissions. -func NewWritePerms(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { - mode := getConfiguredMode(conf, id, 0o600) - return &filePermissions{ - mode: mode, - pkgs: []string{"io/ioutil", "os"}, - calls: []string{"WriteFile"}, - MetaData: issue.MetaData{ - ID: id, - Severity: issue.Medium, - Confidence: issue.High, - What: fmt.Sprintf("Expect WriteFile permissions to be %#o or less", mode), - }, - }, []ast.Node{(*ast.CallExpr)(nil)} -} - -// NewFilePerms creates a rule to detect file creation with a more permissive than configured -// permission mask. -func NewFilePerms(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { - mode := getConfiguredMode(conf, id, 0o600) - return &filePermissions{ - mode: mode, - pkgs: []string{"os"}, - calls: []string{"OpenFile", "Chmod"}, - MetaData: issue.MetaData{ - ID: id, - Severity: issue.Medium, - Confidence: issue.High, - What: fmt.Sprintf("Expect file permissions to be %#o or less", mode), - }, - }, []ast.Node{(*ast.CallExpr)(nil)} -} - -// NewMkdirPerms creates a rule to detect directory creation with more permissive than -// configured permission mask. -func NewMkdirPerms(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { - mode := getConfiguredMode(conf, id, 0o750) - return &filePermissions{ - mode: mode, - pkgs: []string{"os"}, - calls: []string{"Mkdir", "MkdirAll"}, - MetaData: issue.MetaData{ - ID: id, - Severity: issue.Medium, - Confidence: issue.High, - What: fmt.Sprintf("Expect directory permissions to be %#o or less", mode), - }, - }, []ast.Node{(*ast.CallExpr)(nil)} -} - -type osCreatePermissions struct { - issue.MetaData - mode int64 - pkgs []string - calls []string -} - -const defaultOsCreateMode = 0o666 - -// ID returns the ID of the rule. -func (r *osCreatePermissions) ID() string { - return r.MetaData.ID -} - -// Match checks if the rule is matched. -func (r *osCreatePermissions) Match(n ast.Node, c *gosec.Context) (*issue.Issue, error) { - for _, pkg := range r.pkgs { - if _, matched := gosec.MatchCallByPackage(n, c, pkg, r.calls...); matched { - if !modeIsSubset(defaultOsCreateMode, r.mode) { - return c.NewIssue(n, r.ID(), r.What, r.Severity, r.Confidence), nil - } - } - } - return nil, nil -} - -// NewOsCreatePerms reates a rule to detect file creation with a more permissive than configured -// permission mask. -func NewOsCreatePerms(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { - mode := getConfiguredMode(conf, id, 0o666) - return &osCreatePermissions{ - mode: mode, - pkgs: []string{"os"}, - calls: []string{"Create"}, - MetaData: issue.MetaData{ - ID: id, - Severity: issue.Medium, - Confidence: issue.High, - What: fmt.Sprintf("Expect file permissions to be %#o or less but os.Create used with default permissions %#o", - mode, defaultOsCreateMode), - }, - }, []ast.Node{(*ast.CallExpr)(nil)} -} diff --git a/vendor/github.com/securego/gosec/v2/rules/hardcoded_credentials.go b/vendor/github.com/securego/gosec/v2/rules/hardcoded_credentials.go deleted file mode 100644 index ed1fb947d..000000000 --- a/vendor/github.com/securego/gosec/v2/rules/hardcoded_credentials.go +++ /dev/null @@ -1,398 +0,0 @@ -// (c) Copyright 2016 Hewlett Packard Enterprise Development LP -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package rules - -import ( - "fmt" - "go/ast" - "go/token" - "regexp" - "strconv" - - zxcvbn "github.com/ccojocar/zxcvbn-go" - - "github.com/securego/gosec/v2" - "github.com/securego/gosec/v2/issue" -) - -type secretPattern struct { - name string - regexp *regexp.Regexp -} - -var secretsPatterns = [...]secretPattern{ - { - name: "RSA private key", - regexp: regexp.MustCompile(`-----BEGIN RSA PRIVATE KEY-----`), - }, - { - name: "SSH (DSA) private key", - regexp: regexp.MustCompile(`-----BEGIN DSA PRIVATE KEY-----`), - }, - { - name: "SSH (EC) private key", - regexp: regexp.MustCompile(`-----BEGIN EC PRIVATE KEY-----`), - }, - { - name: "PGP private key block", - regexp: regexp.MustCompile(`-----BEGIN PGP PRIVATE KEY BLOCK-----`), - }, - { - name: "Slack Token", - regexp: regexp.MustCompile(`xox[pborsa]-[0-9]{12}-[0-9]{12}-[0-9]{12}-[a-z0-9]{32}`), - }, - { - name: "AWS API Key", - regexp: regexp.MustCompile(`AKIA[0-9A-Z]{16}`), - }, - { - name: "Amazon MWS Auth Token", - regexp: regexp.MustCompile(`amzn\.mws\.[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}`), - }, - { - name: "AWS AppSync GraphQL Key", - regexp: regexp.MustCompile(`da2-[a-z0-9]{26}`), - }, - { - name: "GitHub personal access token", - regexp: regexp.MustCompile(`ghp_[a-zA-Z0-9]{36}`), - }, - { - name: "GitHub fine-grained access token", - regexp: regexp.MustCompile(`github_pat_[a-zA-Z0-9]{22}_[a-zA-Z0-9]{59}`), - }, - { - name: "GitHub action temporary token", - regexp: regexp.MustCompile(`ghs_[a-zA-Z0-9]{36}`), - }, - { - name: "Google API Key", - regexp: regexp.MustCompile(`AIza[0-9A-Za-z\-_]{35}`), - }, - { - name: "Google Cloud Platform API Key", - regexp: regexp.MustCompile(`AIza[0-9A-Za-z\-_]{35}`), - }, - { - name: "Google Cloud Platform OAuth", - regexp: regexp.MustCompile(`[0-9]+-[0-9A-Za-z_]{32}\.apps\.googleusercontent\.com`), - }, - { - name: "Google Drive API Key", - regexp: regexp.MustCompile(`AIza[0-9A-Za-z\-_]{35}`), - }, - { - name: "Google Drive OAuth", - regexp: regexp.MustCompile(`[0-9]+-[0-9A-Za-z_]{32}\.apps\.googleusercontent\.com`), - }, - { - name: "Google (GCP) Service-account", - regexp: regexp.MustCompile(`"type": "service_account"`), - }, - { - name: "Google Gmail API Key", - regexp: regexp.MustCompile(`AIza[0-9A-Za-z\-_]{35}`), - }, - { - name: "Google Gmail OAuth", - regexp: regexp.MustCompile(`[0-9]+-[0-9A-Za-z_]{32}\.apps\.googleusercontent\.com`), - }, - { - name: "Google OAuth Access Token", - regexp: regexp.MustCompile(`ya29\.[0-9A-Za-z\-_]+`), - }, - { - name: "Google YouTube API Key", - regexp: regexp.MustCompile(`AIza[0-9A-Za-z\-_]{35}`), - }, - { - name: "Google YouTube OAuth", - regexp: regexp.MustCompile(`[0-9]+-[0-9A-Za-z_]{32}\.apps\.googleusercontent\.com`), - }, - { - name: "Generic API Key", - regexp: regexp.MustCompile(`[aA][pP][iI]_?[kK][eE][yY].*[''|"][0-9a-zA-Z]{32,45}[''|"]`), - }, - { - name: "Generic Secret", - regexp: regexp.MustCompile(`[sS][eE][cC][rR][eE][tT].*[''|"][0-9a-zA-Z]{32,45}[''|"]`), - }, - { - name: "Heroku API Key", - regexp: regexp.MustCompile(`[hH][eE][rR][oO][kK][uU].*[0-9A-F]{8}-[0-9A-F]{4}-[0-9A-F]{4}-[0-9A-F]{4}-[0-9A-F]{12}`), - }, - { - name: "MailChimp API Key", - regexp: regexp.MustCompile(`[0-9a-f]{32}-us[0-9]{1,2}`), - }, - { - name: "Mailgun API Key", - regexp: regexp.MustCompile(`key-[0-9a-zA-Z]{32}`), - }, - { - name: "Password in URL", - regexp: regexp.MustCompile(`[a-zA-Z]{3,10}://[^/\\s:@]{3,20}:[^/\\s:@]{3,20}@.{1,100}["'\\s]`), - }, - { - name: "Slack Webhook", - regexp: regexp.MustCompile(`https://hooks\.slack\.com/services/T[a-zA-Z0-9_]{8}/B[a-zA-Z0-9_]{8}/[a-zA-Z0-9_]{24}`), - }, - { - name: "Stripe API Key", - regexp: regexp.MustCompile(`sk_live_[0-9a-zA-Z]{24}`), - }, - { - name: "Stripe API Key", - regexp: regexp.MustCompile(`sk_live_[0-9a-zA-Z]{24}`), - }, - { - name: "Stripe Restricted API Key", - regexp: regexp.MustCompile(`rk_live_[0-9a-zA-Z]{24}`), - }, - { - name: "Square Access Token", - regexp: regexp.MustCompile(`sq0atp-[0-9A-Za-z\-_]{22}`), - }, - { - name: "Square OAuth Secret", - regexp: regexp.MustCompile(`sq0csp-[0-9A-Za-z\-_]{43}`), - }, - { - name: "Telegram Bot API Key", - regexp: regexp.MustCompile(`[0-9]+:AA[0-9A-Za-z\-_]{33}`), - }, - { - name: "Twilio API Key", - regexp: regexp.MustCompile(`SK[0-9a-fA-F]{32}`), - }, - { - name: "Twitter Access Token", - regexp: regexp.MustCompile(`[tT][wW][iI][tT][tT][eE][rR].*[1-9][0-9]+-[0-9a-zA-Z]{40}`), - }, - { - name: "Twitter OAuth", - regexp: regexp.MustCompile(`[tT][wW][iI][tT][tT][eE][rR].*[''|"][0-9a-zA-Z]{35,44}[''|"]`), - }, -} - -type credentials struct { - issue.MetaData - pattern *regexp.Regexp - entropyThreshold float64 - perCharThreshold float64 - truncate int - ignoreEntropy bool -} - -func (r *credentials) ID() string { - return r.MetaData.ID -} - -func truncate(s string, n int) string { - if n > len(s) { - return s - } - return s[:n] -} - -func (r *credentials) isHighEntropyString(str string) bool { - s := truncate(str, r.truncate) - info := zxcvbn.PasswordStrength(s, []string{}) - entropyPerChar := info.Entropy / float64(len(s)) - return (info.Entropy >= r.entropyThreshold || - (info.Entropy >= (r.entropyThreshold/2) && - entropyPerChar >= r.perCharThreshold)) -} - -func (r *credentials) isSecretPattern(str string) (bool, string) { - for _, pattern := range secretsPatterns { - if pattern.regexp.MatchString(str) { - return true, pattern.name - } - } - return false, "" -} - -func (r *credentials) Match(n ast.Node, ctx *gosec.Context) (*issue.Issue, error) { - switch node := n.(type) { - case *ast.AssignStmt: - return r.matchAssign(node, ctx) - case *ast.ValueSpec: - return r.matchValueSpec(node, ctx) - case *ast.BinaryExpr: - return r.matchEqualityCheck(node, ctx) - } - return nil, nil -} - -func (r *credentials) matchAssign(assign *ast.AssignStmt, ctx *gosec.Context) (*issue.Issue, error) { - for _, i := range assign.Lhs { - if ident, ok := i.(*ast.Ident); ok { - // First check LHS to find anything being assigned to variables whose name appears to be a cred - if r.pattern.MatchString(ident.Name) { - for _, e := range assign.Rhs { - if val, err := gosec.GetString(e); err == nil { - if r.ignoreEntropy || (!r.ignoreEntropy && r.isHighEntropyString(val)) { - return ctx.NewIssue(assign, r.ID(), r.What, r.Severity, r.Confidence), nil - } - } - } - } - - // Now that no names were matched, match the RHS to see if the actual values being assigned are creds - for _, e := range assign.Rhs { - val, err := gosec.GetString(e) - if err != nil { - continue - } - - if r.ignoreEntropy || r.isHighEntropyString(val) { - if ok, patternName := r.isSecretPattern(val); ok { - return ctx.NewIssue(assign, r.ID(), fmt.Sprintf("%s: %s", r.What, patternName), r.Severity, r.Confidence), nil - } - } - } - } - } - return nil, nil -} - -func (r *credentials) matchValueSpec(valueSpec *ast.ValueSpec, ctx *gosec.Context) (*issue.Issue, error) { - // Running match against the variable name(s) first. Will catch any creds whose var name matches the pattern, - // then will go back over to check the values themselves. - for index, ident := range valueSpec.Names { - if r.pattern.MatchString(ident.Name) && valueSpec.Values != nil { - // const foo, bar = "same value" - if len(valueSpec.Values) <= index { - index = len(valueSpec.Values) - 1 - } - if val, err := gosec.GetString(valueSpec.Values[index]); err == nil { - if r.ignoreEntropy || (!r.ignoreEntropy && r.isHighEntropyString(val)) { - return ctx.NewIssue(valueSpec, r.ID(), r.What, r.Severity, r.Confidence), nil - } - } - } - } - - // Now that no variable names have been matched, match the actual values to find any creds - for _, ident := range valueSpec.Values { - if val, err := gosec.GetString(ident); err == nil { - if r.ignoreEntropy || r.isHighEntropyString(val) { - if ok, patternName := r.isSecretPattern(val); ok { - return ctx.NewIssue(valueSpec, r.ID(), fmt.Sprintf("%s: %s", r.What, patternName), r.Severity, r.Confidence), nil - } - } - } - } - - return nil, nil -} - -func (r *credentials) matchEqualityCheck(binaryExpr *ast.BinaryExpr, ctx *gosec.Context) (*issue.Issue, error) { - if binaryExpr.Op == token.EQL || binaryExpr.Op == token.NEQ { - ident, ok := binaryExpr.X.(*ast.Ident) - if !ok { - ident, _ = binaryExpr.Y.(*ast.Ident) - } - - if ident != nil && r.pattern.MatchString(ident.Name) { - valueNode := binaryExpr.Y - if !ok { - valueNode = binaryExpr.X - } - if val, err := gosec.GetString(valueNode); err == nil { - if r.ignoreEntropy || (!r.ignoreEntropy && r.isHighEntropyString(val)) { - return ctx.NewIssue(binaryExpr, r.ID(), r.What, r.Severity, r.Confidence), nil - } - } - } - - // Now that the variable names have been checked, and no matches were found, make sure that - // either the left or right operands is a string literal so we can match the value. - identStrConst, ok := binaryExpr.X.(*ast.BasicLit) - if !ok { - identStrConst, ok = binaryExpr.Y.(*ast.BasicLit) - } - - if ok && identStrConst.Kind == token.STRING { - s, _ := gosec.GetString(identStrConst) - if r.ignoreEntropy || r.isHighEntropyString(s) { - if ok, patternName := r.isSecretPattern(s); ok { - return ctx.NewIssue(binaryExpr, r.ID(), fmt.Sprintf("%s: %s", r.What, patternName), r.Severity, r.Confidence), nil - } - } - } - } - return nil, nil -} - -// NewHardcodedCredentials attempts to find high entropy string constants being -// assigned to variables that appear to be related to credentials. -func NewHardcodedCredentials(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { - pattern := `(?i)passwd|pass|password|pwd|secret|token|pw|apiKey|bearer|cred` - entropyThreshold := 80.0 - perCharThreshold := 3.0 - ignoreEntropy := false - truncateString := 16 - if val, ok := conf[id]; ok { - conf := val.(map[string]interface{}) - if configPattern, ok := conf["pattern"]; ok { - if cfgPattern, ok := configPattern.(string); ok { - pattern = cfgPattern - } - } - - if configIgnoreEntropy, ok := conf["ignore_entropy"]; ok { - if cfgIgnoreEntropy, ok := configIgnoreEntropy.(bool); ok { - ignoreEntropy = cfgIgnoreEntropy - } - } - if configEntropyThreshold, ok := conf["entropy_threshold"]; ok { - if cfgEntropyThreshold, ok := configEntropyThreshold.(string); ok { - if parsedNum, err := strconv.ParseFloat(cfgEntropyThreshold, 64); err == nil { - entropyThreshold = parsedNum - } - } - } - if configCharThreshold, ok := conf["per_char_threshold"]; ok { - if cfgCharThreshold, ok := configCharThreshold.(string); ok { - if parsedNum, err := strconv.ParseFloat(cfgCharThreshold, 64); err == nil { - perCharThreshold = parsedNum - } - } - } - if configTruncate, ok := conf["truncate"]; ok { - if cfgTruncate, ok := configTruncate.(string); ok { - if parsedInt, err := strconv.Atoi(cfgTruncate); err == nil { - truncateString = parsedInt - } - } - } - } - - return &credentials{ - pattern: regexp.MustCompile(pattern), - entropyThreshold: entropyThreshold, - perCharThreshold: perCharThreshold, - ignoreEntropy: ignoreEntropy, - truncate: truncateString, - MetaData: issue.MetaData{ - ID: id, - What: "Potential hardcoded credentials", - Confidence: issue.Low, - Severity: issue.High, - }, - }, []ast.Node{(*ast.AssignStmt)(nil), (*ast.ValueSpec)(nil), (*ast.BinaryExpr)(nil)} -} diff --git a/vendor/github.com/securego/gosec/v2/rules/http_serve.go b/vendor/github.com/securego/gosec/v2/rules/http_serve.go deleted file mode 100644 index 525ed4ebc..000000000 --- a/vendor/github.com/securego/gosec/v2/rules/http_serve.go +++ /dev/null @@ -1,39 +0,0 @@ -package rules - -import ( - "go/ast" - - "github.com/securego/gosec/v2" - "github.com/securego/gosec/v2/issue" -) - -type httpServeWithoutTimeouts struct { - issue.MetaData - pkg string - calls []string -} - -func (r *httpServeWithoutTimeouts) ID() string { - return r.MetaData.ID -} - -func (r *httpServeWithoutTimeouts) Match(n ast.Node, c *gosec.Context) (gi *issue.Issue, err error) { - if _, matches := gosec.MatchCallByPackage(n, c, r.pkg, r.calls...); matches { - return c.NewIssue(n, r.ID(), r.What, r.Severity, r.Confidence), nil - } - return nil, nil -} - -// NewHTTPServeWithoutTimeouts detects use of net/http serve functions that have no support for setting timeouts. -func NewHTTPServeWithoutTimeouts(id string, _ gosec.Config) (gosec.Rule, []ast.Node) { - return &httpServeWithoutTimeouts{ - pkg: "net/http", - calls: []string{"ListenAndServe", "ListenAndServeTLS", "Serve", "ServeTLS"}, - MetaData: issue.MetaData{ - ID: id, - What: "Use of net/http serve function that has no support for setting timeouts", - Severity: issue.Medium, - Confidence: issue.High, - }, - }, []ast.Node{(*ast.CallExpr)(nil)} -} diff --git a/vendor/github.com/securego/gosec/v2/rules/implicit_aliasing.go b/vendor/github.com/securego/gosec/v2/rules/implicit_aliasing.go deleted file mode 100644 index a7eabb20b..000000000 --- a/vendor/github.com/securego/gosec/v2/rules/implicit_aliasing.go +++ /dev/null @@ -1,142 +0,0 @@ -package rules - -import ( - "go/ast" - "go/token" - "go/types" - - "github.com/securego/gosec/v2" - "github.com/securego/gosec/v2/issue" -) - -type implicitAliasing struct { - issue.MetaData - aliases map[*ast.Object]struct{} - rightBrace token.Pos - acceptableAlias []*ast.UnaryExpr -} - -func (r *implicitAliasing) ID() string { - return r.MetaData.ID -} - -func containsUnary(exprs []*ast.UnaryExpr, expr *ast.UnaryExpr) bool { - for _, e := range exprs { - if e == expr { - return true - } - } - return false -} - -func getIdentExpr(expr ast.Expr) (*ast.Ident, bool) { - return doGetIdentExpr(expr, false) -} - -func doGetIdentExpr(expr ast.Expr, hasSelector bool) (*ast.Ident, bool) { - switch node := expr.(type) { - case *ast.Ident: - return node, hasSelector - case *ast.SelectorExpr: - return doGetIdentExpr(node.X, true) - case *ast.UnaryExpr: - return doGetIdentExpr(node.X, hasSelector) - default: - return nil, false - } -} - -func (r *implicitAliasing) Match(n ast.Node, c *gosec.Context) (*issue.Issue, error) { - switch node := n.(type) { - case *ast.RangeStmt: - // When presented with a range statement, get the underlying Object bound to - // by assignment and add it to our set (r.aliases) of objects to check for. - if key, ok := node.Value.(*ast.Ident); ok { - if key.Obj != nil { - if assignment, ok := key.Obj.Decl.(*ast.AssignStmt); ok { - if len(assignment.Lhs) < 2 { - return nil, nil - } - - if object, ok := assignment.Lhs[1].(*ast.Ident); ok { - r.aliases[object.Obj] = struct{}{} - - if r.rightBrace < node.Body.Rbrace { - r.rightBrace = node.Body.Rbrace - } - } - } - } - } - - case *ast.UnaryExpr: - // If this unary expression is outside of the last range statement we were looking at - // then clear the list of objects we're concerned about because they're no longer in - // scope - if node.Pos() > r.rightBrace { - r.aliases = make(map[*ast.Object]struct{}) - r.acceptableAlias = make([]*ast.UnaryExpr, 0) - } - - // Short circuit logic to skip checking aliases if we have nothing to check against. - if len(r.aliases) == 0 { - return nil, nil - } - - // If this unary is at the top level of a return statement then it is okay-- - // see *ast.ReturnStmt comment below. - if containsUnary(r.acceptableAlias, node) { - return nil, nil - } - - // If we find a unary op of & (reference) of an object within r.aliases, complain. - if identExpr, hasSelector := getIdentExpr(node); identExpr != nil && node.Op.String() == "&" { - if _, contains := r.aliases[identExpr.Obj]; contains { - _, isPointer := c.Info.TypeOf(identExpr).(*types.Pointer) - - if !hasSelector || !isPointer { - return c.NewIssue(n, r.ID(), r.What, r.Severity, r.Confidence), nil - } - } - } - case *ast.ReturnStmt: - // Returning a rangeStmt yielded value is acceptable since only one value will be returned - for _, item := range node.Results { - if unary, ok := item.(*ast.UnaryExpr); ok && unary.Op.String() == "&" { - r.acceptableAlias = append(r.acceptableAlias, unary) - } - } - } - - return nil, nil -} - -// NewImplicitAliasing detects implicit memory aliasing of type: for blah := SomeCall() {... SomeOtherCall(&blah) ...} -func NewImplicitAliasing(id string, _ gosec.Config) (gosec.Rule, []ast.Node) { - return &implicitAliasing{ - aliases: make(map[*ast.Object]struct{}), - rightBrace: token.NoPos, - acceptableAlias: make([]*ast.UnaryExpr, 0), - MetaData: issue.MetaData{ - ID: id, - Severity: issue.Medium, - Confidence: issue.Medium, - What: "Implicit memory aliasing in for loop.", - }, - }, []ast.Node{(*ast.RangeStmt)(nil), (*ast.UnaryExpr)(nil), (*ast.ReturnStmt)(nil)} -} - -/* -This rule is prone to flag false positives. - -Within GoSec, the rule is just an AST match-- there are a handful of other -implementation strategies which might lend more nuance to the rule at the -cost of allowing false negatives. - -From a tooling side, I'd rather have this rule flag false positives than -potentially have some false negatives-- especially if the sentiment of this -rule (as I understand it, and Go) is that referencing a rangeStmt-yielded -value is kinda strange and does not have a strongly justified use case. - -Which is to say-- a false positive _should_ just be changed. -*/ diff --git a/vendor/github.com/securego/gosec/v2/rules/integer_overflow.go b/vendor/github.com/securego/gosec/v2/rules/integer_overflow.go deleted file mode 100644 index 1d5790664..000000000 --- a/vendor/github.com/securego/gosec/v2/rules/integer_overflow.go +++ /dev/null @@ -1,90 +0,0 @@ -// (c) Copyright 2016 Hewlett Packard Enterprise Development LP -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package rules - -import ( - "fmt" - "go/ast" - - "github.com/securego/gosec/v2" - "github.com/securego/gosec/v2/issue" -) - -type integerOverflowCheck struct { - issue.MetaData - calls gosec.CallList -} - -func (i *integerOverflowCheck) ID() string { - return i.MetaData.ID -} - -func (i *integerOverflowCheck) Match(node ast.Node, ctx *gosec.Context) (*issue.Issue, error) { - var atoiVarObj map[*ast.Object]ast.Node - - // To check multiple lines, ctx.PassedValues is used to store temporary data. - if _, ok := ctx.PassedValues[i.ID()]; !ok { - atoiVarObj = make(map[*ast.Object]ast.Node) - ctx.PassedValues[i.ID()] = atoiVarObj - } else if pv, ok := ctx.PassedValues[i.ID()].(map[*ast.Object]ast.Node); ok { - atoiVarObj = pv - } else { - return nil, fmt.Errorf("PassedValues[%s] of Context is not map[*ast.Object]ast.Node, but %T", i.ID(), ctx.PassedValues[i.ID()]) - } - - // strconv.Atoi is a common function. - // To reduce false positives, This rule detects code which is converted to int32/int16 only. - switch n := node.(type) { - case *ast.AssignStmt: - for _, expr := range n.Rhs { - if callExpr, ok := expr.(*ast.CallExpr); ok && i.calls.ContainsPkgCallExpr(callExpr, ctx, false) != nil { - if idt, ok := n.Lhs[0].(*ast.Ident); ok && idt.Name != "_" { - // Example: - // v, _ := strconv.Atoi("1111") - // Add v's Obj to atoiVarObj map - atoiVarObj[idt.Obj] = n - } - } - } - case *ast.CallExpr: - if fun, ok := n.Fun.(*ast.Ident); ok { - if fun.Name == "int32" || fun.Name == "int16" { - if idt, ok := n.Args[0].(*ast.Ident); ok { - if _, ok := atoiVarObj[idt.Obj]; ok { - // Detect int32(v) and int16(v) - return ctx.NewIssue(n, i.ID(), i.What, i.Severity, i.Confidence), nil - } - } - } - } - } - - return nil, nil -} - -// NewIntegerOverflowCheck detects if there is potential Integer OverFlow -func NewIntegerOverflowCheck(id string, _ gosec.Config) (gosec.Rule, []ast.Node) { - calls := gosec.NewCallList() - calls.Add("strconv", "Atoi") - return &integerOverflowCheck{ - MetaData: issue.MetaData{ - ID: id, - Severity: issue.High, - Confidence: issue.Medium, - What: "Potential Integer overflow made by strconv.Atoi result conversion to int16/32", - }, - calls: calls, - }, []ast.Node{(*ast.FuncDecl)(nil), (*ast.AssignStmt)(nil), (*ast.CallExpr)(nil)} -} diff --git a/vendor/github.com/securego/gosec/v2/rules/math_big_rat.go b/vendor/github.com/securego/gosec/v2/rules/math_big_rat.go deleted file mode 100644 index 1aac1fa20..000000000 --- a/vendor/github.com/securego/gosec/v2/rules/math_big_rat.go +++ /dev/null @@ -1,45 +0,0 @@ -package rules - -import ( - "go/ast" - - "github.com/securego/gosec/v2" - "github.com/securego/gosec/v2/issue" -) - -type usingOldMathBig struct { - issue.MetaData - calls gosec.CallList -} - -func (r *usingOldMathBig) ID() string { - return r.MetaData.ID -} - -func (r *usingOldMathBig) Match(node ast.Node, ctx *gosec.Context) (gi *issue.Issue, err error) { - if callExpr := r.calls.ContainsPkgCallExpr(node, ctx, false); callExpr == nil { - return nil, nil - } - - confidence := issue.Low - major, minor, build := gosec.GoVersion() - if major == 1 && (minor == 16 && build < 14 || minor == 17 && build < 7) { - confidence = issue.Medium - } - - return ctx.NewIssue(node, r.ID(), r.What, r.Severity, confidence), nil -} - -// NewUsingOldMathBig rule detects the use of Rat.SetString from math/big. -func NewUsingOldMathBig(id string, _ gosec.Config) (gosec.Rule, []ast.Node) { - calls := gosec.NewCallList() - calls.Add("math/big.Rat", "SetString") - return &usingOldMathBig{ - calls: calls, - MetaData: issue.MetaData{ - ID: id, - What: "Potential uncontrolled memory consumption in Rat.SetString (CVE-2022-23772)", - Severity: issue.High, - }, - }, []ast.Node{(*ast.CallExpr)(nil)} -} diff --git a/vendor/github.com/securego/gosec/v2/rules/pprof.go b/vendor/github.com/securego/gosec/v2/rules/pprof.go deleted file mode 100644 index 68498dd5e..000000000 --- a/vendor/github.com/securego/gosec/v2/rules/pprof.go +++ /dev/null @@ -1,43 +0,0 @@ -package rules - -import ( - "go/ast" - - "github.com/securego/gosec/v2" - "github.com/securego/gosec/v2/issue" -) - -type pprofCheck struct { - issue.MetaData - importPath string - importName string -} - -// ID returns the ID of the check -func (p *pprofCheck) ID() string { - return p.MetaData.ID -} - -// Match checks for pprof imports -func (p *pprofCheck) Match(n ast.Node, c *gosec.Context) (*issue.Issue, error) { - if node, ok := n.(*ast.ImportSpec); ok { - if p.importPath == unquote(node.Path.Value) && node.Name != nil && p.importName == node.Name.Name { - return c.NewIssue(node, p.ID(), p.What, p.Severity, p.Confidence), nil - } - } - return nil, nil -} - -// NewPprofCheck detects when the profiling endpoint is automatically exposed -func NewPprofCheck(id string, _ gosec.Config) (gosec.Rule, []ast.Node) { - return &pprofCheck{ - MetaData: issue.MetaData{ - ID: id, - Severity: issue.High, - Confidence: issue.High, - What: "Profiling endpoint is automatically exposed on /debug/pprof", - }, - importPath: "net/http/pprof", - importName: "_", - }, []ast.Node{(*ast.ImportSpec)(nil)} -} diff --git a/vendor/github.com/securego/gosec/v2/rules/rand.go b/vendor/github.com/securego/gosec/v2/rules/rand.go deleted file mode 100644 index 4491fd928..000000000 --- a/vendor/github.com/securego/gosec/v2/rules/rand.go +++ /dev/null @@ -1,59 +0,0 @@ -// (c) Copyright 2016 Hewlett Packard Enterprise Development LP -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package rules - -import ( - "go/ast" - - "github.com/securego/gosec/v2" - "github.com/securego/gosec/v2/issue" -) - -type weakRand struct { - issue.MetaData - funcNames []string - packagePath string -} - -func (w *weakRand) ID() string { - return w.MetaData.ID -} - -func (w *weakRand) Match(n ast.Node, c *gosec.Context) (*issue.Issue, error) { - for _, funcName := range w.funcNames { - if _, matched := gosec.MatchCallByPackage(n, c, w.packagePath, funcName); matched { - return c.NewIssue(n, w.ID(), w.What, w.Severity, w.Confidence), nil - } - } - - return nil, nil -} - -// NewWeakRandCheck detects the use of random number generator that isn't cryptographically secure -func NewWeakRandCheck(id string, _ gosec.Config) (gosec.Rule, []ast.Node) { - return &weakRand{ - funcNames: []string{ - "New", "Read", "Float32", "Float64", "Int", "Int31", - "Int31n", "Int63", "Int63n", "Intn", "NormalFloat64", "Uint32", "Uint64", - }, - packagePath: "math/rand", - MetaData: issue.MetaData{ - ID: id, - Severity: issue.High, - Confidence: issue.Medium, - What: "Use of weak random number generator (math/rand instead of crypto/rand)", - }, - }, []ast.Node{(*ast.CallExpr)(nil)} -} diff --git a/vendor/github.com/securego/gosec/v2/rules/readfile.go b/vendor/github.com/securego/gosec/v2/rules/readfile.go deleted file mode 100644 index 7ef4bbad1..000000000 --- a/vendor/github.com/securego/gosec/v2/rules/readfile.go +++ /dev/null @@ -1,152 +0,0 @@ -// (c) Copyright 2016 Hewlett Packard Enterprise Development LP -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package rules - -import ( - "go/ast" - "go/types" - - "github.com/securego/gosec/v2" - "github.com/securego/gosec/v2/issue" -) - -type readfile struct { - issue.MetaData - gosec.CallList - pathJoin gosec.CallList - clean gosec.CallList - cleanedVar map[any]ast.Node -} - -// ID returns the identifier for this rule -func (r *readfile) ID() string { - return r.MetaData.ID -} - -// isJoinFunc checks if there is a filepath.Join or other join function -func (r *readfile) isJoinFunc(n ast.Node, c *gosec.Context) bool { - if call := r.pathJoin.ContainsPkgCallExpr(n, c, false); call != nil { - for _, arg := range call.Args { - // edge case: check if one of the args is a BinaryExpr - if binExp, ok := arg.(*ast.BinaryExpr); ok { - // iterate and resolve all found identities from the BinaryExpr - if _, ok := gosec.FindVarIdentities(binExp, c); ok { - return true - } - } - - // try and resolve identity - if ident, ok := arg.(*ast.Ident); ok { - obj := c.Info.ObjectOf(ident) - if _, ok := obj.(*types.Var); ok && !gosec.TryResolve(ident, c) { - return true - } - } - } - } - return false -} - -// isFilepathClean checks if there is a filepath.Clean for given variable -func (r *readfile) isFilepathClean(n *ast.Ident, c *gosec.Context) bool { - if _, ok := r.cleanedVar[n.Obj.Decl]; ok { - return true - } - if n.Obj.Kind != ast.Var { - return false - } - if node, ok := n.Obj.Decl.(*ast.AssignStmt); ok { - if call, ok := node.Rhs[0].(*ast.CallExpr); ok { - if clean := r.clean.ContainsPkgCallExpr(call, c, false); clean != nil { - return true - } - } - } - return false -} - -// trackFilepathClean tracks back the declaration of variable from filepath.Clean argument -func (r *readfile) trackFilepathClean(n ast.Node) { - if clean, ok := n.(*ast.CallExpr); ok && len(clean.Args) > 0 { - if ident, ok := clean.Args[0].(*ast.Ident); ok { - // ident.Obj may be nil if the referenced declaration is in another file. It also may be incorrect. - // if it is nil, do not follow it. - if ident.Obj != nil { - r.cleanedVar[ident.Obj.Decl] = n - } - } - } -} - -// Match inspects AST nodes to determine if the match the methods `os.Open` or `ioutil.ReadFile` -func (r *readfile) Match(n ast.Node, c *gosec.Context) (*issue.Issue, error) { - if node := r.clean.ContainsPkgCallExpr(n, c, false); node != nil { - r.trackFilepathClean(n) - return nil, nil - } else if node := r.ContainsPkgCallExpr(n, c, false); node != nil { - for _, arg := range node.Args { - // handles path joining functions in Arg - // eg. os.Open(filepath.Join("/tmp/", file)) - if callExpr, ok := arg.(*ast.CallExpr); ok { - if r.isJoinFunc(callExpr, c) { - return c.NewIssue(n, r.ID(), r.What, r.Severity, r.Confidence), nil - } - } - // handles binary string concatenation eg. ioutil.Readfile("/tmp/" + file + "/blob") - if binExp, ok := arg.(*ast.BinaryExpr); ok { - // resolve all found identities from the BinaryExpr - if _, ok := gosec.FindVarIdentities(binExp, c); ok { - return c.NewIssue(n, r.ID(), r.What, r.Severity, r.Confidence), nil - } - } - - if ident, ok := arg.(*ast.Ident); ok { - obj := c.Info.ObjectOf(ident) - if _, ok := obj.(*types.Var); ok && - !gosec.TryResolve(ident, c) && - !r.isFilepathClean(ident, c) { - return c.NewIssue(n, r.ID(), r.What, r.Severity, r.Confidence), nil - } - } - } - } - return nil, nil -} - -// NewReadFile detects cases where we read files -func NewReadFile(id string, _ gosec.Config) (gosec.Rule, []ast.Node) { - rule := &readfile{ - pathJoin: gosec.NewCallList(), - clean: gosec.NewCallList(), - CallList: gosec.NewCallList(), - MetaData: issue.MetaData{ - ID: id, - What: "Potential file inclusion via variable", - Severity: issue.Medium, - Confidence: issue.High, - }, - cleanedVar: map[any]ast.Node{}, - } - rule.pathJoin.Add("path/filepath", "Join") - rule.pathJoin.Add("path", "Join") - rule.clean.Add("path/filepath", "Clean") - rule.clean.Add("path/filepath", "Rel") - rule.Add("io/ioutil", "ReadFile") - rule.Add("os", "ReadFile") - rule.Add("os", "Open") - rule.Add("os", "OpenFile") - rule.Add("os", "Create") - return rule, []ast.Node{(*ast.CallExpr)(nil)} -} diff --git a/vendor/github.com/securego/gosec/v2/rules/rsa.go b/vendor/github.com/securego/gosec/v2/rules/rsa.go deleted file mode 100644 index 331e7fc80..000000000 --- a/vendor/github.com/securego/gosec/v2/rules/rsa.go +++ /dev/null @@ -1,59 +0,0 @@ -// (c) Copyright 2016 Hewlett Packard Enterprise Development LP -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package rules - -import ( - "fmt" - "go/ast" - - "github.com/securego/gosec/v2" - "github.com/securego/gosec/v2/issue" -) - -type weakKeyStrength struct { - issue.MetaData - calls gosec.CallList - bits int -} - -func (w *weakKeyStrength) ID() string { - return w.MetaData.ID -} - -func (w *weakKeyStrength) Match(n ast.Node, c *gosec.Context) (*issue.Issue, error) { - if callExpr := w.calls.ContainsPkgCallExpr(n, c, false); callExpr != nil { - if bits, err := gosec.GetInt(callExpr.Args[1]); err == nil && bits < (int64)(w.bits) { - return c.NewIssue(n, w.ID(), w.What, w.Severity, w.Confidence), nil - } - } - return nil, nil -} - -// NewWeakKeyStrength builds a rule that detects RSA keys < 2048 bits -func NewWeakKeyStrength(id string, _ gosec.Config) (gosec.Rule, []ast.Node) { - calls := gosec.NewCallList() - calls.Add("crypto/rsa", "GenerateKey") - bits := 2048 - return &weakKeyStrength{ - calls: calls, - bits: bits, - MetaData: issue.MetaData{ - ID: id, - Severity: issue.Medium, - Confidence: issue.High, - What: fmt.Sprintf("RSA keys should be at least %d bits", bits), - }, - }, []ast.Node{(*ast.CallExpr)(nil)} -} diff --git a/vendor/github.com/securego/gosec/v2/rules/rulelist.go b/vendor/github.com/securego/gosec/v2/rules/rulelist.go deleted file mode 100644 index f9ca4f52c..000000000 --- a/vendor/github.com/securego/gosec/v2/rules/rulelist.go +++ /dev/null @@ -1,130 +0,0 @@ -// (c) Copyright 2016 Hewlett Packard Enterprise Development LP -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package rules - -import "github.com/securego/gosec/v2" - -// RuleDefinition contains the description of a rule and a mechanism to -// create it. -type RuleDefinition struct { - ID string - Description string - Create gosec.RuleBuilder -} - -// RuleList contains a mapping of rule ID's to rule definitions and a mapping -// of rule ID's to whether rules are suppressed. -type RuleList struct { - Rules map[string]RuleDefinition - RuleSuppressed map[string]bool -} - -// RulesInfo returns all the create methods and the rule suppressed map for a -// given list -func (rl RuleList) RulesInfo() (map[string]gosec.RuleBuilder, map[string]bool) { - builders := make(map[string]gosec.RuleBuilder) - for _, def := range rl.Rules { - builders[def.ID] = def.Create - } - return builders, rl.RuleSuppressed -} - -// RuleFilter can be used to include or exclude a rule depending on the return -// value of the function -type RuleFilter func(string) bool - -// NewRuleFilter is a closure that will include/exclude the rule ID's based on -// the supplied boolean value. -func NewRuleFilter(action bool, ruleIDs ...string) RuleFilter { - rulelist := make(map[string]bool) - for _, rule := range ruleIDs { - rulelist[rule] = true - } - return func(rule string) bool { - if _, found := rulelist[rule]; found { - return action - } - return !action - } -} - -// Generate the list of rules to use -func Generate(trackSuppressions bool, filters ...RuleFilter) RuleList { - rules := []RuleDefinition{ - // misc - {"G101", "Look for hardcoded credentials", NewHardcodedCredentials}, - {"G102", "Bind to all interfaces", NewBindsToAllNetworkInterfaces}, - {"G103", "Audit the use of unsafe block", NewUsingUnsafe}, - {"G104", "Audit errors not checked", NewNoErrorCheck}, - {"G106", "Audit the use of ssh.InsecureIgnoreHostKey function", NewSSHHostKey}, - {"G107", "Url provided to HTTP request as taint input", NewSSRFCheck}, - {"G108", "Profiling endpoint is automatically exposed", NewPprofCheck}, - {"G109", "Converting strconv.Atoi result to int32/int16", NewIntegerOverflowCheck}, - {"G110", "Detect io.Copy instead of io.CopyN when decompression", NewDecompressionBombCheck}, - {"G111", "Detect http.Dir('/') as a potential risk", NewDirectoryTraversal}, - {"G112", "Detect ReadHeaderTimeout not configured as a potential risk", NewSlowloris}, - {"G113", "Usage of Rat.SetString in math/big with an overflow", NewUsingOldMathBig}, - {"G114", "Use of net/http serve function that has no support for setting timeouts", NewHTTPServeWithoutTimeouts}, - - // injection - {"G201", "SQL query construction using format string", NewSQLStrFormat}, - {"G202", "SQL query construction using string concatenation", NewSQLStrConcat}, - {"G203", "Use of unescaped data in HTML templates", NewTemplateCheck}, - {"G204", "Audit use of command execution", NewSubproc}, - - // filesystem - {"G301", "Poor file permissions used when creating a directory", NewMkdirPerms}, - {"G302", "Poor file permissions used when creation file or using chmod", NewFilePerms}, - {"G303", "Creating tempfile using a predictable path", NewBadTempFile}, - {"G304", "File path provided as taint input", NewReadFile}, - {"G305", "File path traversal when extracting zip archive", NewArchive}, - {"G306", "Poor file permissions used when writing to a file", NewWritePerms}, - {"G307", "Poor file permissions used when creating a file with os.Create", NewOsCreatePerms}, - - // crypto - {"G401", "Detect the usage of DES, RC4, MD5 or SHA1", NewUsesWeakCryptography}, - {"G402", "Look for bad TLS connection settings", NewIntermediateTLSCheck}, - {"G403", "Ensure minimum RSA key length of 2048 bits", NewWeakKeyStrength}, - {"G404", "Insecure random number source (rand)", NewWeakRandCheck}, - - // blocklist - {"G501", "Import blocklist: crypto/md5", NewBlocklistedImportMD5}, - {"G502", "Import blocklist: crypto/des", NewBlocklistedImportDES}, - {"G503", "Import blocklist: crypto/rc4", NewBlocklistedImportRC4}, - {"G504", "Import blocklist: net/http/cgi", NewBlocklistedImportCGI}, - {"G505", "Import blocklist: crypto/sha1", NewBlocklistedImportSHA1}, - - // memory safety - {"G601", "Implicit memory aliasing in RangeStmt", NewImplicitAliasing}, - } - - ruleMap := make(map[string]RuleDefinition) - ruleSuppressedMap := make(map[string]bool) - -RULES: - for _, rule := range rules { - ruleSuppressedMap[rule.ID] = false - for _, filter := range filters { - if filter(rule.ID) { - ruleSuppressedMap[rule.ID] = true - if !trackSuppressions { - continue RULES - } - } - } - ruleMap[rule.ID] = rule - } - return RuleList{ruleMap, ruleSuppressedMap} -} diff --git a/vendor/github.com/securego/gosec/v2/rules/slowloris.go b/vendor/github.com/securego/gosec/v2/rules/slowloris.go deleted file mode 100644 index 70db73f5f..000000000 --- a/vendor/github.com/securego/gosec/v2/rules/slowloris.go +++ /dev/null @@ -1,71 +0,0 @@ -// (c) Copyright 2016 Hewlett Packard Enterprise Development LP -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package rules - -import ( - "go/ast" - - "github.com/securego/gosec/v2" - "github.com/securego/gosec/v2/issue" -) - -type slowloris struct { - issue.MetaData -} - -func (r *slowloris) ID() string { - return r.MetaData.ID -} - -func containsReadHeaderTimeout(node *ast.CompositeLit) bool { - if node == nil { - return false - } - for _, elt := range node.Elts { - if kv, ok := elt.(*ast.KeyValueExpr); ok { - if ident, ok := kv.Key.(*ast.Ident); ok { - if ident.Name == "ReadHeaderTimeout" || ident.Name == "ReadTimeout" { - return true - } - } - } - } - return false -} - -func (r *slowloris) Match(n ast.Node, ctx *gosec.Context) (*issue.Issue, error) { - switch node := n.(type) { - case *ast.CompositeLit: - actualType := ctx.Info.TypeOf(node.Type) - if actualType != nil && actualType.String() == "net/http.Server" { - if !containsReadHeaderTimeout(node) { - return ctx.NewIssue(node, r.ID(), r.What, r.Severity, r.Confidence), nil - } - } - } - return nil, nil -} - -// NewSlowloris attempts to find the http.Server struct and check if the ReadHeaderTimeout is configured. -func NewSlowloris(id string, _ gosec.Config) (gosec.Rule, []ast.Node) { - return &slowloris{ - MetaData: issue.MetaData{ - ID: id, - What: "Potential Slowloris Attack because ReadHeaderTimeout is not configured in the http.Server", - Confidence: issue.Low, - Severity: issue.Medium, - }, - }, []ast.Node{(*ast.CompositeLit)(nil)} -} diff --git a/vendor/github.com/securego/gosec/v2/rules/sql.go b/vendor/github.com/securego/gosec/v2/rules/sql.go deleted file mode 100644 index 61222bfdb..000000000 --- a/vendor/github.com/securego/gosec/v2/rules/sql.go +++ /dev/null @@ -1,405 +0,0 @@ -// (c) Copyright 2016 Hewlett Packard Enterprise Development LP -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package rules - -import ( - "fmt" - "go/ast" - "regexp" - - "github.com/securego/gosec/v2" - "github.com/securego/gosec/v2/issue" -) - -type sqlStatement struct { - issue.MetaData - gosec.CallList - - // Contains a list of patterns which must all match for the rule to match. - patterns []*regexp.Regexp -} - -var sqlCallIdents = map[string]map[string]int{ - "*database/sql.DB": { - "Exec": 0, - "ExecContext": 1, - "Query": 0, - "QueryContext": 1, - "QueryRow": 0, - "QueryRowContext": 1, - "Prepare": 0, - "PrepareContext": 1, - }, - "*database/sql.Tx": { - "Exec": 0, - "ExecContext": 1, - "Query": 0, - "QueryContext": 1, - "QueryRow": 0, - "QueryRowContext": 1, - "Prepare": 0, - "PrepareContext": 1, - }, -} - -// findQueryArg locates the argument taking raw SQL -func findQueryArg(call *ast.CallExpr, ctx *gosec.Context) (ast.Expr, error) { - typeName, fnName, err := gosec.GetCallInfo(call, ctx) - if err != nil { - return nil, err - } - i := -1 - if ni, ok := sqlCallIdents[typeName]; ok { - if i, ok = ni[fnName]; !ok { - i = -1 - } - } - if i == -1 { - return nil, fmt.Errorf("SQL argument index not found for %s.%s", typeName, fnName) - } - if i >= len(call.Args) { - return nil, nil - } - query := call.Args[i] - return query, nil -} - -func (s *sqlStatement) ID() string { - return s.MetaData.ID -} - -// See if the string matches the patterns for the statement. -func (s *sqlStatement) MatchPatterns(str string) bool { - for _, pattern := range s.patterns { - if !pattern.MatchString(str) { - return false - } - } - return true -} - -type sqlStrConcat struct { - sqlStatement -} - -func (s *sqlStrConcat) ID() string { - return s.MetaData.ID -} - -// findInjectionInBranch walks diwb a set if expressions, and will create new issues if it finds SQL injections -// This method assumes you've already verified that the branch contains SQL syntax -func (s *sqlStrConcat) findInjectionInBranch(ctx *gosec.Context, branch []ast.Expr) *ast.BinaryExpr { - for _, node := range branch { - be, ok := node.(*ast.BinaryExpr) - if !ok { - continue - } - - operands := gosec.GetBinaryExprOperands(be) - - for _, op := range operands { - if _, ok := op.(*ast.BasicLit); ok { - continue - } - - if ident, ok := op.(*ast.Ident); ok && s.checkObject(ident, ctx) { - continue - } - - return be - } - } - return nil -} - -// see if we can figure out what it is -func (s *sqlStrConcat) checkObject(n *ast.Ident, c *gosec.Context) bool { - if n.Obj != nil { - return n.Obj.Kind != ast.Var && n.Obj.Kind != ast.Fun - } - - // Try to resolve unresolved identifiers using other files in same package - for _, file := range c.PkgFiles { - if node, ok := file.Scope.Objects[n.String()]; ok { - return node.Kind != ast.Var && node.Kind != ast.Fun - } - } - return false -} - -// checkQuery verifies if the query parameters is a string concatenation -func (s *sqlStrConcat) checkQuery(call *ast.CallExpr, ctx *gosec.Context) (*issue.Issue, error) { - query, err := findQueryArg(call, ctx) - if err != nil { - return nil, err - } - - if be, ok := query.(*ast.BinaryExpr); ok { - operands := gosec.GetBinaryExprOperands(be) - if start, ok := operands[0].(*ast.BasicLit); ok { - if str, e := gosec.GetString(start); e == nil { - if !s.MatchPatterns(str) { - return nil, nil - } - } - for _, op := range operands[1:] { - if _, ok := op.(*ast.BasicLit); ok { - continue - } - if op, ok := op.(*ast.Ident); ok && s.checkObject(op, ctx) { - continue - } - return ctx.NewIssue(be, s.ID(), s.What, s.Severity, s.Confidence), nil - } - } - } - - // Handle the case where an injection occurs as an infixed string concatenation, ie "SELECT * FROM foo WHERE name = '" + os.Args[0] + "' AND 1=1" - if id, ok := query.(*ast.Ident); ok { - var match bool - for _, str := range gosec.GetIdentStringValuesRecursive(id) { - if s.MatchPatterns(str) { - match = true - break - } - } - - if !match { - return nil, nil - } - - switch decl := id.Obj.Decl.(type) { - case *ast.AssignStmt: - if injection := s.findInjectionInBranch(ctx, decl.Rhs); injection != nil { - return ctx.NewIssue(injection, s.ID(), s.What, s.Severity, s.Confidence), nil - } - } - } - - return nil, nil -} - -// Checks SQL query concatenation issues such as "SELECT * FROM table WHERE " + " ' OR 1=1" -func (s *sqlStrConcat) Match(n ast.Node, ctx *gosec.Context) (*issue.Issue, error) { - switch stmt := n.(type) { - case *ast.AssignStmt: - for _, expr := range stmt.Rhs { - if sqlQueryCall, ok := expr.(*ast.CallExpr); ok && s.ContainsCallExpr(expr, ctx) != nil { - return s.checkQuery(sqlQueryCall, ctx) - } - } - case *ast.ExprStmt: - if sqlQueryCall, ok := stmt.X.(*ast.CallExpr); ok && s.ContainsCallExpr(stmt.X, ctx) != nil { - return s.checkQuery(sqlQueryCall, ctx) - } - } - - return nil, nil -} - -// NewSQLStrConcat looks for cases where we are building SQL strings via concatenation -func NewSQLStrConcat(id string, _ gosec.Config) (gosec.Rule, []ast.Node) { - rule := &sqlStrConcat{ - sqlStatement: sqlStatement{ - patterns: []*regexp.Regexp{ - regexp.MustCompile("(?i)(SELECT|DELETE|INSERT|UPDATE|INTO|FROM|WHERE)( |\n|\r|\t)"), - }, - MetaData: issue.MetaData{ - ID: id, - Severity: issue.Medium, - Confidence: issue.High, - What: "SQL string concatenation", - }, - CallList: gosec.NewCallList(), - }, - } - - for s, si := range sqlCallIdents { - for i := range si { - rule.Add(s, i) - } - } - return rule, []ast.Node{(*ast.AssignStmt)(nil), (*ast.ExprStmt)(nil)} -} - -type sqlStrFormat struct { - gosec.CallList - sqlStatement - fmtCalls gosec.CallList - noIssue gosec.CallList - noIssueQuoted gosec.CallList -} - -// see if we can figure out what it is -func (s *sqlStrFormat) constObject(e ast.Expr, c *gosec.Context) bool { - n, ok := e.(*ast.Ident) - if !ok { - return false - } - - if n.Obj != nil { - return n.Obj.Kind == ast.Con - } - - // Try to resolve unresolved identifiers using other files in same package - for _, file := range c.PkgFiles { - if node, ok := file.Scope.Objects[n.String()]; ok { - return node.Kind == ast.Con - } - } - return false -} - -func (s *sqlStrFormat) checkQuery(call *ast.CallExpr, ctx *gosec.Context) (*issue.Issue, error) { - query, err := findQueryArg(call, ctx) - if err != nil { - return nil, err - } - - if ident, ok := query.(*ast.Ident); ok && ident.Obj != nil { - decl := ident.Obj.Decl - if assign, ok := decl.(*ast.AssignStmt); ok { - for _, expr := range assign.Rhs { - issue := s.checkFormatting(expr, ctx) - if issue != nil { - return issue, err - } - } - } - } - - return nil, nil -} - -func (s *sqlStrFormat) checkFormatting(n ast.Node, ctx *gosec.Context) *issue.Issue { - // argIndex changes the function argument which gets matched to the regex - argIndex := 0 - if node := s.fmtCalls.ContainsPkgCallExpr(n, ctx, false); node != nil { - // if the function is fmt.Fprintf, search for SQL statement in Args[1] instead - if sel, ok := node.Fun.(*ast.SelectorExpr); ok { - if sel.Sel.Name == "Fprintf" { - // if os.Stderr or os.Stdout is in Arg[0], mark as no issue - if arg, ok := node.Args[0].(*ast.SelectorExpr); ok { - if ident, ok := arg.X.(*ast.Ident); ok { - if s.noIssue.Contains(ident.Name, arg.Sel.Name) { - return nil - } - } - } - // the function is Fprintf so set argIndex = 1 - argIndex = 1 - } - } - - // no formatter - if len(node.Args) == 0 { - return nil - } - - var formatter string - - // concats callexpr arg strings together if needed before regex evaluation - if argExpr, ok := node.Args[argIndex].(*ast.BinaryExpr); ok { - if fullStr, ok := gosec.ConcatString(argExpr); ok { - formatter = fullStr - } - } else if arg, e := gosec.GetString(node.Args[argIndex]); e == nil { - formatter = arg - } - if len(formatter) <= 0 { - return nil - } - - // If all formatter args are quoted or constant, then the SQL construction is safe - if argIndex+1 < len(node.Args) { - allSafe := true - for _, arg := range node.Args[argIndex+1:] { - if n := s.noIssueQuoted.ContainsPkgCallExpr(arg, ctx, true); n == nil && !s.constObject(arg, ctx) { - allSafe = false - break - } - } - if allSafe { - return nil - } - } - if s.MatchPatterns(formatter) { - return ctx.NewIssue(n, s.ID(), s.What, s.Severity, s.Confidence) - } - } - return nil -} - -// Check SQL query formatting issues such as "fmt.Sprintf("SELECT * FROM foo where '%s', userInput)" -func (s *sqlStrFormat) Match(n ast.Node, ctx *gosec.Context) (*issue.Issue, error) { - switch stmt := n.(type) { - case *ast.AssignStmt: - for _, expr := range stmt.Rhs { - if call, ok := expr.(*ast.CallExpr); ok { - selector, ok := call.Fun.(*ast.SelectorExpr) - if !ok { - continue - } - sqlQueryCall, ok := selector.X.(*ast.CallExpr) - if ok && s.ContainsCallExpr(sqlQueryCall, ctx) != nil { - issue, err := s.checkQuery(sqlQueryCall, ctx) - if err == nil && issue != nil { - return issue, err - } - } - } - if sqlQueryCall, ok := expr.(*ast.CallExpr); ok && s.ContainsCallExpr(expr, ctx) != nil { - return s.checkQuery(sqlQueryCall, ctx) - } - } - case *ast.ExprStmt: - if sqlQueryCall, ok := stmt.X.(*ast.CallExpr); ok && s.ContainsCallExpr(stmt.X, ctx) != nil { - return s.checkQuery(sqlQueryCall, ctx) - } - } - return nil, nil -} - -// NewSQLStrFormat looks for cases where we're building SQL query strings using format strings -func NewSQLStrFormat(id string, _ gosec.Config) (gosec.Rule, []ast.Node) { - rule := &sqlStrFormat{ - CallList: gosec.NewCallList(), - fmtCalls: gosec.NewCallList(), - noIssue: gosec.NewCallList(), - noIssueQuoted: gosec.NewCallList(), - sqlStatement: sqlStatement{ - patterns: []*regexp.Regexp{ - regexp.MustCompile("(?i)(SELECT|DELETE|INSERT|UPDATE|INTO|FROM|WHERE)( |\n|\r|\t)"), - regexp.MustCompile("%[^bdoxXfFp]"), - }, - MetaData: issue.MetaData{ - ID: id, - Severity: issue.Medium, - Confidence: issue.High, - What: "SQL string formatting", - }, - }, - } - for s, si := range sqlCallIdents { - for i := range si { - rule.Add(s, i) - } - } - rule.fmtCalls.AddAll("fmt", "Sprint", "Sprintf", "Sprintln", "Fprintf") - rule.noIssue.AddAll("os", "Stdout", "Stderr") - rule.noIssueQuoted.Add("github.com/lib/pq", "QuoteIdentifier") - - return rule, []ast.Node{(*ast.AssignStmt)(nil), (*ast.ExprStmt)(nil)} -} diff --git a/vendor/github.com/securego/gosec/v2/rules/ssh.go b/vendor/github.com/securego/gosec/v2/rules/ssh.go deleted file mode 100644 index e2ba5a3f4..000000000 --- a/vendor/github.com/securego/gosec/v2/rules/ssh.go +++ /dev/null @@ -1,39 +0,0 @@ -package rules - -import ( - "go/ast" - - "github.com/securego/gosec/v2" - "github.com/securego/gosec/v2/issue" -) - -type sshHostKey struct { - issue.MetaData - pkg string - calls []string -} - -func (r *sshHostKey) ID() string { - return r.MetaData.ID -} - -func (r *sshHostKey) Match(n ast.Node, c *gosec.Context) (gi *issue.Issue, err error) { - if _, matches := gosec.MatchCallByPackage(n, c, r.pkg, r.calls...); matches { - return c.NewIssue(n, r.ID(), r.What, r.Severity, r.Confidence), nil - } - return nil, nil -} - -// NewSSHHostKey rule detects the use of insecure ssh HostKeyCallback. -func NewSSHHostKey(id string, _ gosec.Config) (gosec.Rule, []ast.Node) { - return &sshHostKey{ - pkg: "golang.org/x/crypto/ssh", - calls: []string{"InsecureIgnoreHostKey"}, - MetaData: issue.MetaData{ - ID: id, - What: "Use of ssh InsecureIgnoreHostKey should be audited", - Severity: issue.Medium, - Confidence: issue.High, - }, - }, []ast.Node{(*ast.CallExpr)(nil)} -} diff --git a/vendor/github.com/securego/gosec/v2/rules/ssrf.go b/vendor/github.com/securego/gosec/v2/rules/ssrf.go deleted file mode 100644 index dbf01081b..000000000 --- a/vendor/github.com/securego/gosec/v2/rules/ssrf.go +++ /dev/null @@ -1,67 +0,0 @@ -package rules - -import ( - "go/ast" - "go/types" - - "github.com/securego/gosec/v2" - "github.com/securego/gosec/v2/issue" -) - -type ssrf struct { - issue.MetaData - gosec.CallList -} - -// ID returns the identifier for this rule -func (r *ssrf) ID() string { - return r.MetaData.ID -} - -// ResolveVar tries to resolve the first argument of a call expression -// The first argument is the url -func (r *ssrf) ResolveVar(n *ast.CallExpr, c *gosec.Context) bool { - if len(n.Args) > 0 { - arg := n.Args[0] - if ident, ok := arg.(*ast.Ident); ok { - obj := c.Info.ObjectOf(ident) - if _, ok := obj.(*types.Var); ok { - scope := c.Pkg.Scope() - if scope != nil && scope.Lookup(ident.Name) != nil { - // a URL defined in a variable at package scope can be changed at any time - return true - } - if !gosec.TryResolve(ident, c) { - return true - } - } - } - } - return false -} - -// Match inspects AST nodes to determine if certain net/http methods are called with variable input -func (r *ssrf) Match(n ast.Node, c *gosec.Context) (*issue.Issue, error) { - // Call expression is using http package directly - if node := r.ContainsPkgCallExpr(n, c, false); node != nil { - if r.ResolveVar(node, c) { - return c.NewIssue(n, r.ID(), r.What, r.Severity, r.Confidence), nil - } - } - return nil, nil -} - -// NewSSRFCheck detects cases where HTTP requests are sent -func NewSSRFCheck(id string, _ gosec.Config) (gosec.Rule, []ast.Node) { - rule := &ssrf{ - CallList: gosec.NewCallList(), - MetaData: issue.MetaData{ - ID: id, - What: "Potential HTTP request made with variable url", - Severity: issue.Medium, - Confidence: issue.Medium, - }, - } - rule.AddAll("net/http", "Do", "Get", "Head", "Post", "PostForm", "RoundTrip") - return rule, []ast.Node{(*ast.CallExpr)(nil)} -} diff --git a/vendor/github.com/securego/gosec/v2/rules/subproc.go b/vendor/github.com/securego/gosec/v2/rules/subproc.go deleted file mode 100644 index 1e2cedaa5..000000000 --- a/vendor/github.com/securego/gosec/v2/rules/subproc.go +++ /dev/null @@ -1,123 +0,0 @@ -// (c) Copyright 2016 Hewlett Packard Enterprise Development LP -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package rules - -import ( - "go/ast" - "go/types" - - "github.com/securego/gosec/v2" - "github.com/securego/gosec/v2/issue" -) - -type subprocess struct { - issue.MetaData - gosec.CallList -} - -func (r *subprocess) ID() string { - return r.MetaData.ID -} - -// TODO(gm) The only real potential for command injection with a Go project -// is something like this: -// -// syscall.Exec("/bin/sh", []string{"-c", tainted}) -// -// E.g. Input is correctly escaped but the execution context being used -// is unsafe. For example: -// -// syscall.Exec("echo", "foobar" + tainted) -func (r *subprocess) Match(n ast.Node, c *gosec.Context) (*issue.Issue, error) { - if node := r.ContainsPkgCallExpr(n, c, false); node != nil { - args := node.Args - if r.isContext(n, c) { - args = args[1:] - } - for _, arg := range args { - if ident, ok := arg.(*ast.Ident); ok { - obj := c.Info.ObjectOf(ident) - - // need to cast and check whether it is for a variable ? - _, variable := obj.(*types.Var) - - // .. indeed it is a variable then processing is different than a normal - // field assignment - if variable { - // skip the check when the declaration is not available - if ident.Obj == nil { - continue - } - switch ident.Obj.Decl.(type) { - case *ast.AssignStmt: - _, assignment := ident.Obj.Decl.(*ast.AssignStmt) - if variable && assignment { - if !gosec.TryResolve(ident, c) { - return c.NewIssue(n, r.ID(), "Subprocess launched with variable", issue.Medium, issue.High), nil - } - } - case *ast.Field: - _, field := ident.Obj.Decl.(*ast.Field) - if variable && field { - // check if the variable exist in the scope - vv, vvok := obj.(*types.Var) - - if vvok && vv.Parent().Lookup(ident.Name) == nil { - return c.NewIssue(n, r.ID(), "Subprocess launched with variable", issue.Medium, issue.High), nil - } - } - case *ast.ValueSpec: - _, valueSpec := ident.Obj.Decl.(*ast.ValueSpec) - if variable && valueSpec { - if !gosec.TryResolve(ident, c) { - return c.NewIssue(n, r.ID(), "Subprocess launched with variable", issue.Medium, issue.High), nil - } - } - } - } - } else if !gosec.TryResolve(arg, c) { - // the arg is not a constant or a variable but instead a function call or os.Args[i] - return c.NewIssue(n, r.ID(), "Subprocess launched with a potential tainted input or cmd arguments", issue.Medium, issue.High), nil - } - } - } - return nil, nil -} - -// isContext checks whether or not the node is a CommandContext call or not -// This is required in order to skip the first argument from the check. -func (r *subprocess) isContext(n ast.Node, ctx *gosec.Context) bool { - selector, indent, err := gosec.GetCallInfo(n, ctx) - if err != nil { - return false - } - if selector == "exec" && indent == "CommandContext" { - return true - } - return false -} - -// NewSubproc detects cases where we are forking out to an external process -func NewSubproc(id string, _ gosec.Config) (gosec.Rule, []ast.Node) { - rule := &subprocess{issue.MetaData{ID: id}, gosec.NewCallList()} - rule.Add("os/exec", "Command") - rule.Add("os/exec", "CommandContext") - rule.Add("syscall", "Exec") - rule.Add("syscall", "ForkExec") - rule.Add("syscall", "StartProcess") - rule.Add("golang.org/x/sys/execabs", "Command") - rule.Add("golang.org/x/sys/execabs", "CommandContext") - return rule, []ast.Node{(*ast.CallExpr)(nil)} -} diff --git a/vendor/github.com/securego/gosec/v2/rules/tempfiles.go b/vendor/github.com/securego/gosec/v2/rules/tempfiles.go deleted file mode 100644 index 6fef52a2c..000000000 --- a/vendor/github.com/securego/gosec/v2/rules/tempfiles.go +++ /dev/null @@ -1,88 +0,0 @@ -// (c) Copyright 2016 Hewlett Packard Enterprise Development LP -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package rules - -import ( - "go/ast" - "regexp" - - "github.com/securego/gosec/v2" - "github.com/securego/gosec/v2/issue" -) - -type badTempFile struct { - issue.MetaData - calls gosec.CallList - args *regexp.Regexp - argCalls gosec.CallList - nestedCalls gosec.CallList -} - -func (t *badTempFile) ID() string { - return t.MetaData.ID -} - -func (t *badTempFile) findTempDirArgs(n ast.Node, c *gosec.Context, suspect ast.Node) *issue.Issue { - if s, e := gosec.GetString(suspect); e == nil { - if t.args.MatchString(s) { - return c.NewIssue(n, t.ID(), t.What, t.Severity, t.Confidence) - } - return nil - } - if ce := t.argCalls.ContainsPkgCallExpr(suspect, c, false); ce != nil { - return c.NewIssue(n, t.ID(), t.What, t.Severity, t.Confidence) - } - if be, ok := suspect.(*ast.BinaryExpr); ok { - if ops := gosec.GetBinaryExprOperands(be); len(ops) != 0 { - return t.findTempDirArgs(n, c, ops[0]) - } - return nil - } - if ce := t.nestedCalls.ContainsPkgCallExpr(suspect, c, false); ce != nil { - return t.findTempDirArgs(n, c, ce.Args[0]) - } - return nil -} - -func (t *badTempFile) Match(n ast.Node, c *gosec.Context) (gi *issue.Issue, err error) { - if node := t.calls.ContainsPkgCallExpr(n, c, false); node != nil { - return t.findTempDirArgs(n, c, node.Args[0]), nil - } - return nil, nil -} - -// NewBadTempFile detects direct writes to predictable path in temporary directory -func NewBadTempFile(id string, _ gosec.Config) (gosec.Rule, []ast.Node) { - calls := gosec.NewCallList() - calls.Add("io/ioutil", "WriteFile") - calls.AddAll("os", "Create", "WriteFile") - argCalls := gosec.NewCallList() - argCalls.Add("os", "TempDir") - nestedCalls := gosec.NewCallList() - nestedCalls.Add("path", "Join") - nestedCalls.Add("path/filepath", "Join") - return &badTempFile{ - calls: calls, - args: regexp.MustCompile(`^(/(usr|var))?/tmp(/.*)?$`), - argCalls: argCalls, - nestedCalls: nestedCalls, - MetaData: issue.MetaData{ - ID: id, - Severity: issue.Medium, - Confidence: issue.High, - What: "File creation in shared tmp directory without using ioutil.Tempfile", - }, - }, []ast.Node{(*ast.CallExpr)(nil)} -} diff --git a/vendor/github.com/securego/gosec/v2/rules/templates.go b/vendor/github.com/securego/gosec/v2/rules/templates.go deleted file mode 100644 index 728766f45..000000000 --- a/vendor/github.com/securego/gosec/v2/rules/templates.go +++ /dev/null @@ -1,61 +0,0 @@ -// (c) Copyright 2016 Hewlett Packard Enterprise Development LP -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package rules - -import ( - "go/ast" - - "github.com/securego/gosec/v2" - "github.com/securego/gosec/v2/issue" -) - -type templateCheck struct { - issue.MetaData - calls gosec.CallList -} - -func (t *templateCheck) ID() string { - return t.MetaData.ID -} - -func (t *templateCheck) Match(n ast.Node, c *gosec.Context) (*issue.Issue, error) { - if node := t.calls.ContainsPkgCallExpr(n, c, false); node != nil { - for _, arg := range node.Args { - if _, ok := arg.(*ast.BasicLit); !ok { // basic lits are safe - return c.NewIssue(n, t.ID(), t.What, t.Severity, t.Confidence), nil - } - } - } - return nil, nil -} - -// NewTemplateCheck constructs the template check rule. This rule is used to -// find use of templates where HTML/JS escaping is not being used -func NewTemplateCheck(id string, _ gosec.Config) (gosec.Rule, []ast.Node) { - calls := gosec.NewCallList() - calls.Add("html/template", "HTML") - calls.Add("html/template", "HTMLAttr") - calls.Add("html/template", "JS") - calls.Add("html/template", "URL") - return &templateCheck{ - calls: calls, - MetaData: issue.MetaData{ - ID: id, - Severity: issue.Medium, - Confidence: issue.Low, - What: "The used method does not auto-escape HTML. This can potentially lead to 'Cross-site Scripting' vulnerabilities, in case the attacker controls the input.", - }, - }, []ast.Node{(*ast.CallExpr)(nil)} -} diff --git a/vendor/github.com/securego/gosec/v2/rules/tls.go b/vendor/github.com/securego/gosec/v2/rules/tls.go deleted file mode 100644 index 65a0b5a33..000000000 --- a/vendor/github.com/securego/gosec/v2/rules/tls.go +++ /dev/null @@ -1,239 +0,0 @@ -// (c) Copyright 2016 Hewlett Packard Enterprise Development LP -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//go:generate tlsconfig - -package rules - -import ( - "crypto/tls" - "fmt" - "go/ast" - "go/types" - "strconv" - - "github.com/securego/gosec/v2" - "github.com/securego/gosec/v2/issue" -) - -type insecureConfigTLS struct { - issue.MetaData - MinVersion int64 - MaxVersion int64 - requiredType string - goodCiphers []string - actualMinVersion int64 - actualMaxVersion int64 -} - -func (t *insecureConfigTLS) ID() string { - return t.MetaData.ID -} - -func stringInSlice(a string, list []string) bool { - for _, b := range list { - if b == a { - return true - } - } - return false -} - -func (t *insecureConfigTLS) processTLSCipherSuites(n ast.Node, c *gosec.Context) *issue.Issue { - if ciphers, ok := n.(*ast.CompositeLit); ok { - for _, cipher := range ciphers.Elts { - if ident, ok := cipher.(*ast.SelectorExpr); ok { - if !stringInSlice(ident.Sel.Name, t.goodCiphers) { - err := fmt.Sprintf("TLS Bad Cipher Suite: %s", ident.Sel.Name) - return c.NewIssue(ident, t.ID(), err, issue.High, issue.High) - } - } - } - } - return nil -} - -func (t *insecureConfigTLS) processTLSConf(n ast.Node, c *gosec.Context) *issue.Issue { - if kve, ok := n.(*ast.KeyValueExpr); ok { - issue := t.processTLSConfVal(kve.Key, kve.Value, c) - if issue != nil { - return issue - } - } else if assign, ok := n.(*ast.AssignStmt); ok { - if len(assign.Lhs) < 1 || len(assign.Rhs) < 1 { - return nil - } - if selector, ok := assign.Lhs[0].(*ast.SelectorExpr); ok { - issue := t.processTLSConfVal(selector.Sel, assign.Rhs[0], c) - if issue != nil { - return issue - } - } - } - return nil -} - -func (t *insecureConfigTLS) processTLSConfVal(key ast.Expr, value ast.Expr, c *gosec.Context) *issue.Issue { - if ident, ok := key.(*ast.Ident); ok { - switch ident.Name { - case "InsecureSkipVerify": - if node, ok := value.(*ast.Ident); ok { - if node.Name != "false" { - return c.NewIssue(value, t.ID(), "TLS InsecureSkipVerify set true.", issue.High, issue.High) - } - } else { - // TODO(tk): symbol tab look up to get the actual value - return c.NewIssue(value, t.ID(), "TLS InsecureSkipVerify may be true.", issue.High, issue.Low) - } - - case "PreferServerCipherSuites": - if node, ok := value.(*ast.Ident); ok { - if node.Name == "false" { - return c.NewIssue(value, t.ID(), "TLS PreferServerCipherSuites set false.", issue.Medium, issue.High) - } - } else { - // TODO(tk): symbol tab look up to get the actual value - return c.NewIssue(value, t.ID(), "TLS PreferServerCipherSuites may be false.", issue.Medium, issue.Low) - } - - case "MinVersion": - if d, ok := value.(*ast.Ident); ok { - obj := d.Obj - if obj == nil { - for _, f := range c.PkgFiles { - obj = f.Scope.Lookup(d.Name) - if obj != nil { - break - } - } - } - if vs, ok := obj.Decl.(*ast.ValueSpec); ok && len(vs.Values) > 0 { - if s, ok := vs.Values[0].(*ast.SelectorExpr); ok { - x := s.X.(*ast.Ident).Name - sel := s.Sel.Name - - for _, imp := range c.Pkg.Imports() { - if imp.Name() == x { - tObj := imp.Scope().Lookup(sel) - if cst, ok := tObj.(*types.Const); ok { - // ..got the value check if this can be translated - if minVersion, err := strconv.ParseInt(cst.Val().String(), 0, 64); err == nil { - t.actualMinVersion = minVersion - } - } - } - } - } - if ival, ierr := gosec.GetInt(vs.Values[0]); ierr == nil { - t.actualMinVersion = ival - } - } - } else if ival, ierr := gosec.GetInt(value); ierr == nil { - t.actualMinVersion = ival - } else { - if se, ok := value.(*ast.SelectorExpr); ok { - if pkg, ok := se.X.(*ast.Ident); ok { - if ip, ok := gosec.GetImportPath(pkg.Name, c); ok && ip == "crypto/tls" { - t.actualMinVersion = t.mapVersion(se.Sel.Name) - } - } - } - } - - case "MaxVersion": - if ival, ierr := gosec.GetInt(value); ierr == nil { - t.actualMaxVersion = ival - } else { - if se, ok := value.(*ast.SelectorExpr); ok { - if pkg, ok := se.X.(*ast.Ident); ok { - if ip, ok := gosec.GetImportPath(pkg.Name, c); ok && ip == "crypto/tls" { - t.actualMaxVersion = t.mapVersion(se.Sel.Name) - } - } - } - } - - case "CipherSuites": - if ret := t.processTLSCipherSuites(value, c); ret != nil { - return ret - } - - } - } - return nil -} - -func (t *insecureConfigTLS) mapVersion(version string) int64 { - var v int64 - switch version { - case "VersionTLS13": - v = tls.VersionTLS13 - case "VersionTLS12": - v = tls.VersionTLS12 - case "VersionTLS11": - v = tls.VersionTLS11 - case "VersionTLS10": - v = tls.VersionTLS10 - } - return v -} - -func (t *insecureConfigTLS) checkVersion(n ast.Node, c *gosec.Context) *issue.Issue { - if t.actualMaxVersion == 0 && t.actualMinVersion >= t.MinVersion { - // no warning is generated since the min version is greater than the secure min version - return nil - } - if t.actualMinVersion < t.MinVersion { - return c.NewIssue(n, t.ID(), "TLS MinVersion too low.", issue.High, issue.High) - } - if t.actualMaxVersion < t.MaxVersion { - return c.NewIssue(n, t.ID(), "TLS MaxVersion too low.", issue.High, issue.High) - } - return nil -} - -func (t *insecureConfigTLS) resetVersion() { - t.actualMaxVersion = 0 - t.actualMinVersion = 0 -} - -func (t *insecureConfigTLS) Match(n ast.Node, c *gosec.Context) (*issue.Issue, error) { - if complit, ok := n.(*ast.CompositeLit); ok && complit.Type != nil { - actualType := c.Info.TypeOf(complit.Type) - if actualType != nil && actualType.String() == t.requiredType { - for _, elt := range complit.Elts { - issue := t.processTLSConf(elt, c) - if issue != nil { - return issue, nil - } - } - issue := t.checkVersion(complit, c) - t.resetVersion() - return issue, nil - } - } else { - if assign, ok := n.(*ast.AssignStmt); ok && len(assign.Lhs) > 0 { - if selector, ok := assign.Lhs[0].(*ast.SelectorExpr); ok { - actualType := c.Info.TypeOf(selector.X) - if actualType != nil && actualType.String() == t.requiredType { - issue := t.processTLSConf(assign, c) - if issue != nil { - return issue, nil - } - } - } - } - } - return nil, nil -} diff --git a/vendor/github.com/securego/gosec/v2/rules/tls_config.go b/vendor/github.com/securego/gosec/v2/rules/tls_config.go deleted file mode 100644 index cbbdf7983..000000000 --- a/vendor/github.com/securego/gosec/v2/rules/tls_config.go +++ /dev/null @@ -1,93 +0,0 @@ -package rules - -import ( - "go/ast" - - "github.com/securego/gosec/v2" - "github.com/securego/gosec/v2/issue" -) - -// NewModernTLSCheck creates a check for Modern TLS ciphers -// DO NOT EDIT - generated by tlsconfig tool -func NewModernTLSCheck(id string, _ gosec.Config) (gosec.Rule, []ast.Node) { - return &insecureConfigTLS{ - MetaData: issue.MetaData{ID: id}, - requiredType: "crypto/tls.Config", - MinVersion: 0x0304, - MaxVersion: 0x0304, - goodCiphers: []string{ - "TLS_AES_128_GCM_SHA256", - "TLS_AES_256_GCM_SHA384", - "TLS_CHACHA20_POLY1305_SHA256", - }, - }, []ast.Node{(*ast.CompositeLit)(nil), (*ast.AssignStmt)(nil)} -} - -// NewIntermediateTLSCheck creates a check for Intermediate TLS ciphers -// DO NOT EDIT - generated by tlsconfig tool -func NewIntermediateTLSCheck(id string, _ gosec.Config) (gosec.Rule, []ast.Node) { - return &insecureConfigTLS{ - MetaData: issue.MetaData{ID: id}, - requiredType: "crypto/tls.Config", - MinVersion: 0x0303, - MaxVersion: 0x0304, - goodCiphers: []string{ - "TLS_AES_128_GCM_SHA256", - "TLS_AES_256_GCM_SHA384", - "TLS_CHACHA20_POLY1305_SHA256", - "TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256", - "TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256", - "TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384", - "TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384", - "TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305", - "TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256", - "TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305", - "TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256", - "TLS_DHE_RSA_WITH_AES_128_GCM_SHA256", - "TLS_DHE_RSA_WITH_AES_256_GCM_SHA384", - }, - }, []ast.Node{(*ast.CompositeLit)(nil), (*ast.AssignStmt)(nil)} -} - -// NewOldTLSCheck creates a check for Old TLS ciphers -// DO NOT EDIT - generated by tlsconfig tool -func NewOldTLSCheck(id string, _ gosec.Config) (gosec.Rule, []ast.Node) { - return &insecureConfigTLS{ - MetaData: issue.MetaData{ID: id}, - requiredType: "crypto/tls.Config", - MinVersion: 0x0301, - MaxVersion: 0x0304, - goodCiphers: []string{ - "TLS_AES_128_GCM_SHA256", - "TLS_AES_256_GCM_SHA384", - "TLS_CHACHA20_POLY1305_SHA256", - "TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256", - "TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256", - "TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384", - "TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384", - "TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305", - "TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256", - "TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305", - "TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256", - "TLS_DHE_RSA_WITH_AES_128_GCM_SHA256", - "TLS_DHE_RSA_WITH_AES_256_GCM_SHA384", - "TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256", - "TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256", - "TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA", - "TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA", - "TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384", - "TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384", - "TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA", - "TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA", - "TLS_DHE_RSA_WITH_AES_128_CBC_SHA256", - "TLS_DHE_RSA_WITH_AES_256_CBC_SHA256", - "TLS_RSA_WITH_AES_128_GCM_SHA256", - "TLS_RSA_WITH_AES_256_GCM_SHA384", - "TLS_RSA_WITH_AES_128_CBC_SHA256", - "TLS_RSA_WITH_AES_256_CBC_SHA256", - "TLS_RSA_WITH_AES_128_CBC_SHA", - "TLS_RSA_WITH_AES_256_CBC_SHA", - "TLS_RSA_WITH_3DES_EDE_CBC_SHA", - }, - }, []ast.Node{(*ast.CompositeLit)(nil), (*ast.AssignStmt)(nil)} -} diff --git a/vendor/github.com/securego/gosec/v2/rules/unsafe.go b/vendor/github.com/securego/gosec/v2/rules/unsafe.go deleted file mode 100644 index 2e2adca7c..000000000 --- a/vendor/github.com/securego/gosec/v2/rules/unsafe.go +++ /dev/null @@ -1,54 +0,0 @@ -// (c) Copyright 2016 Hewlett Packard Enterprise Development LP -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package rules - -import ( - "go/ast" - - "github.com/securego/gosec/v2" - "github.com/securego/gosec/v2/issue" -) - -type usingUnsafe struct { - issue.MetaData - pkg string - calls []string -} - -func (r *usingUnsafe) ID() string { - return r.MetaData.ID -} - -func (r *usingUnsafe) Match(n ast.Node, c *gosec.Context) (gi *issue.Issue, err error) { - if _, matches := gosec.MatchCallByPackage(n, c, r.pkg, r.calls...); matches { - return c.NewIssue(n, r.ID(), r.What, r.Severity, r.Confidence), nil - } - return nil, nil -} - -// NewUsingUnsafe rule detects the use of the unsafe package. This is only -// really useful for auditing purposes. -func NewUsingUnsafe(id string, _ gosec.Config) (gosec.Rule, []ast.Node) { - return &usingUnsafe{ - pkg: "unsafe", - calls: []string{"Pointer", "String", "StringData", "Slice", "SliceData"}, - MetaData: issue.MetaData{ - ID: id, - What: "Use of unsafe calls should be audited", - Severity: issue.Low, - Confidence: issue.High, - }, - }, []ast.Node{(*ast.CallExpr)(nil)} -} diff --git a/vendor/github.com/securego/gosec/v2/rules/weakcrypto.go b/vendor/github.com/securego/gosec/v2/rules/weakcrypto.go deleted file mode 100644 index 4f2ab11d1..000000000 --- a/vendor/github.com/securego/gosec/v2/rules/weakcrypto.go +++ /dev/null @@ -1,59 +0,0 @@ -// (c) Copyright 2016 Hewlett Packard Enterprise Development LP -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package rules - -import ( - "go/ast" - - "github.com/securego/gosec/v2" - "github.com/securego/gosec/v2/issue" -) - -type usesWeakCryptography struct { - issue.MetaData - blocklist map[string][]string -} - -func (r *usesWeakCryptography) ID() string { - return r.MetaData.ID -} - -func (r *usesWeakCryptography) Match(n ast.Node, c *gosec.Context) (*issue.Issue, error) { - for pkg, funcs := range r.blocklist { - if _, matched := gosec.MatchCallByPackage(n, c, pkg, funcs...); matched { - return c.NewIssue(n, r.ID(), r.What, r.Severity, r.Confidence), nil - } - } - return nil, nil -} - -// NewUsesWeakCryptography detects uses of des.* md5.* or rc4.* -func NewUsesWeakCryptography(id string, _ gosec.Config) (gosec.Rule, []ast.Node) { - calls := make(map[string][]string) - calls["crypto/des"] = []string{"NewCipher", "NewTripleDESCipher"} - calls["crypto/md5"] = []string{"New", "Sum"} - calls["crypto/sha1"] = []string{"New", "Sum"} - calls["crypto/rc4"] = []string{"NewCipher"} - rule := &usesWeakCryptography{ - blocklist: calls, - MetaData: issue.MetaData{ - ID: id, - Severity: issue.Medium, - Confidence: issue.High, - What: "Use of weak cryptographic primitive", - }, - } - return rule, []ast.Node{(*ast.CallExpr)(nil)} -} diff --git a/vendor/github.com/shazow/go-diff/LICENSE b/vendor/github.com/shazow/go-diff/LICENSE deleted file mode 100644 index 85e1e4b33..000000000 --- a/vendor/github.com/shazow/go-diff/LICENSE +++ /dev/null @@ -1,22 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2015 Andrey Petrov - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - diff --git a/vendor/github.com/shazow/go-diff/difflib/differ.go b/vendor/github.com/shazow/go-diff/difflib/differ.go deleted file mode 100644 index 43dc84d9a..000000000 --- a/vendor/github.com/shazow/go-diff/difflib/differ.go +++ /dev/null @@ -1,39 +0,0 @@ -// This package implements the diff.Differ interface using github.com/mb0/diff as a backend. -package difflib - -import ( - "io" - "io/ioutil" - - "github.com/pmezard/go-difflib/difflib" -) - -type differ struct{} - -// New returns an implementation of diff.Differ using mb0diff as the backend. -func New() *differ { - return &differ{} -} - -// Diff consumes the entire reader streams into memory before generating a diff -// which then gets filled into the buffer. This implementation stores and -// manipulates all three values in memory. -func (diff *differ) Diff(out io.Writer, a io.ReadSeeker, b io.ReadSeeker) error { - var src, dst []byte - var err error - - if src, err = ioutil.ReadAll(a); err != nil { - return err - } - if dst, err = ioutil.ReadAll(b); err != nil { - return err - } - - d := difflib.UnifiedDiff{ - A: difflib.SplitLines(string(src)), - B: difflib.SplitLines(string(dst)), - Context: 3, - } - - return difflib.WriteUnifiedDiff(out, d) -} diff --git a/vendor/github.com/sirupsen/logrus/.gitignore b/vendor/github.com/sirupsen/logrus/.gitignore deleted file mode 100644 index 1fb13abeb..000000000 --- a/vendor/github.com/sirupsen/logrus/.gitignore +++ /dev/null @@ -1,4 +0,0 @@ -logrus -vendor - -.idea/ diff --git a/vendor/github.com/sirupsen/logrus/.golangci.yml b/vendor/github.com/sirupsen/logrus/.golangci.yml deleted file mode 100644 index 65dc28503..000000000 --- a/vendor/github.com/sirupsen/logrus/.golangci.yml +++ /dev/null @@ -1,40 +0,0 @@ -run: - # do not run on test files yet - tests: false - -# all available settings of specific linters -linters-settings: - errcheck: - # report about not checking of errors in type assetions: `a := b.(MyStruct)`; - # default is false: such cases aren't reported by default. - check-type-assertions: false - - # report about assignment of errors to blank identifier: `num, _ := strconv.Atoi(numStr)`; - # default is false: such cases aren't reported by default. - check-blank: false - - lll: - line-length: 100 - tab-width: 4 - - prealloc: - simple: false - range-loops: false - for-loops: false - - whitespace: - multi-if: false # Enforces newlines (or comments) after every multi-line if statement - multi-func: false # Enforces newlines (or comments) after every multi-line function signature - -linters: - enable: - - megacheck - - govet - disable: - - maligned - - prealloc - disable-all: false - presets: - - bugs - - unused - fast: false diff --git a/vendor/github.com/sirupsen/logrus/.travis.yml b/vendor/github.com/sirupsen/logrus/.travis.yml deleted file mode 100644 index c1dbd5a3a..000000000 --- a/vendor/github.com/sirupsen/logrus/.travis.yml +++ /dev/null @@ -1,15 +0,0 @@ -language: go -go_import_path: github.com/sirupsen/logrus -git: - depth: 1 -env: - - GO111MODULE=on -go: 1.15.x -os: linux -install: - - ./travis/install.sh -script: - - cd ci - - go run mage.go -v -w ../ crossBuild - - go run mage.go -v -w ../ lint - - go run mage.go -v -w ../ test diff --git a/vendor/github.com/sirupsen/logrus/CHANGELOG.md b/vendor/github.com/sirupsen/logrus/CHANGELOG.md deleted file mode 100644 index 7567f6128..000000000 --- a/vendor/github.com/sirupsen/logrus/CHANGELOG.md +++ /dev/null @@ -1,259 +0,0 @@ -# 1.8.1 -Code quality: - * move magefile in its own subdir/submodule to remove magefile dependency on logrus consumer - * improve timestamp format documentation - -Fixes: - * fix race condition on logger hooks - - -# 1.8.0 - -Correct versioning number replacing v1.7.1. - -# 1.7.1 - -Beware this release has introduced a new public API and its semver is therefore incorrect. - -Code quality: - * use go 1.15 in travis - * use magefile as task runner - -Fixes: - * small fixes about new go 1.13 error formatting system - * Fix for long time race condiction with mutating data hooks - -Features: - * build support for zos - -# 1.7.0 -Fixes: - * the dependency toward a windows terminal library has been removed - -Features: - * a new buffer pool management API has been added - * a set of `Fn()` functions have been added - -# 1.6.0 -Fixes: - * end of line cleanup - * revert the entry concurrency bug fix whic leads to deadlock under some circumstances - * update dependency on go-windows-terminal-sequences to fix a crash with go 1.14 - -Features: - * add an option to the `TextFormatter` to completely disable fields quoting - -# 1.5.0 -Code quality: - * add golangci linter run on travis - -Fixes: - * add mutex for hooks concurrent access on `Entry` data - * caller function field for go1.14 - * fix build issue for gopherjs target - -Feature: - * add an hooks/writer sub-package whose goal is to split output on different stream depending on the trace level - * add a `DisableHTMLEscape` option in the `JSONFormatter` - * add `ForceQuote` and `PadLevelText` options in the `TextFormatter` - -# 1.4.2 - * Fixes build break for plan9, nacl, solaris -# 1.4.1 -This new release introduces: - * Enhance TextFormatter to not print caller information when they are empty (#944) - * Remove dependency on golang.org/x/crypto (#932, #943) - -Fixes: - * Fix Entry.WithContext method to return a copy of the initial entry (#941) - -# 1.4.0 -This new release introduces: - * Add `DeferExitHandler`, similar to `RegisterExitHandler` but prepending the handler to the list of handlers (semantically like `defer`) (#848). - * Add `CallerPrettyfier` to `JSONFormatter` and `TextFormatter` (#909, #911) - * Add `Entry.WithContext()` and `Entry.Context`, to set a context on entries to be used e.g. in hooks (#919). - -Fixes: - * Fix wrong method calls `Logger.Print` and `Logger.Warningln` (#893). - * Update `Entry.Logf` to not do string formatting unless the log level is enabled (#903) - * Fix infinite recursion on unknown `Level.String()` (#907) - * Fix race condition in `getCaller` (#916). - - -# 1.3.0 -This new release introduces: - * Log, Logf, Logln functions for Logger and Entry that take a Level - -Fixes: - * Building prometheus node_exporter on AIX (#840) - * Race condition in TextFormatter (#468) - * Travis CI import path (#868) - * Remove coloured output on Windows (#862) - * Pointer to func as field in JSONFormatter (#870) - * Properly marshal Levels (#873) - -# 1.2.0 -This new release introduces: - * A new method `SetReportCaller` in the `Logger` to enable the file, line and calling function from which the trace has been issued - * A new trace level named `Trace` whose level is below `Debug` - * A configurable exit function to be called upon a Fatal trace - * The `Level` object now implements `encoding.TextUnmarshaler` interface - -# 1.1.1 -This is a bug fix release. - * fix the build break on Solaris - * don't drop a whole trace in JSONFormatter when a field param is a function pointer which can not be serialized - -# 1.1.0 -This new release introduces: - * several fixes: - * a fix for a race condition on entry formatting - * proper cleanup of previously used entries before putting them back in the pool - * the extra new line at the end of message in text formatter has been removed - * a new global public API to check if a level is activated: IsLevelEnabled - * the following methods have been added to the Logger object - * IsLevelEnabled - * SetFormatter - * SetOutput - * ReplaceHooks - * introduction of go module - * an indent configuration for the json formatter - * output colour support for windows - * the field sort function is now configurable for text formatter - * the CLICOLOR and CLICOLOR\_FORCE environment variable support in text formater - -# 1.0.6 - -This new release introduces: - * a new api WithTime which allows to easily force the time of the log entry - which is mostly useful for logger wrapper - * a fix reverting the immutability of the entry given as parameter to the hooks - a new configuration field of the json formatter in order to put all the fields - in a nested dictionnary - * a new SetOutput method in the Logger - * a new configuration of the textformatter to configure the name of the default keys - * a new configuration of the text formatter to disable the level truncation - -# 1.0.5 - -* Fix hooks race (#707) -* Fix panic deadlock (#695) - -# 1.0.4 - -* Fix race when adding hooks (#612) -* Fix terminal check in AppEngine (#635) - -# 1.0.3 - -* Replace example files with testable examples - -# 1.0.2 - -* bug: quote non-string values in text formatter (#583) -* Make (*Logger) SetLevel a public method - -# 1.0.1 - -* bug: fix escaping in text formatter (#575) - -# 1.0.0 - -* Officially changed name to lower-case -* bug: colors on Windows 10 (#541) -* bug: fix race in accessing level (#512) - -# 0.11.5 - -* feature: add writer and writerlevel to entry (#372) - -# 0.11.4 - -* bug: fix undefined variable on solaris (#493) - -# 0.11.3 - -* formatter: configure quoting of empty values (#484) -* formatter: configure quoting character (default is `"`) (#484) -* bug: fix not importing io correctly in non-linux environments (#481) - -# 0.11.2 - -* bug: fix windows terminal detection (#476) - -# 0.11.1 - -* bug: fix tty detection with custom out (#471) - -# 0.11.0 - -* performance: Use bufferpool to allocate (#370) -* terminal: terminal detection for app-engine (#343) -* feature: exit handler (#375) - -# 0.10.0 - -* feature: Add a test hook (#180) -* feature: `ParseLevel` is now case-insensitive (#326) -* feature: `FieldLogger` interface that generalizes `Logger` and `Entry` (#308) -* performance: avoid re-allocations on `WithFields` (#335) - -# 0.9.0 - -* logrus/text_formatter: don't emit empty msg -* logrus/hooks/airbrake: move out of main repository -* logrus/hooks/sentry: move out of main repository -* logrus/hooks/papertrail: move out of main repository -* logrus/hooks/bugsnag: move out of main repository -* logrus/core: run tests with `-race` -* logrus/core: detect TTY based on `stderr` -* logrus/core: support `WithError` on logger -* logrus/core: Solaris support - -# 0.8.7 - -* logrus/core: fix possible race (#216) -* logrus/doc: small typo fixes and doc improvements - - -# 0.8.6 - -* hooks/raven: allow passing an initialized client - -# 0.8.5 - -* logrus/core: revert #208 - -# 0.8.4 - -* formatter/text: fix data race (#218) - -# 0.8.3 - -* logrus/core: fix entry log level (#208) -* logrus/core: improve performance of text formatter by 40% -* logrus/core: expose `LevelHooks` type -* logrus/core: add support for DragonflyBSD and NetBSD -* formatter/text: print structs more verbosely - -# 0.8.2 - -* logrus: fix more Fatal family functions - -# 0.8.1 - -* logrus: fix not exiting on `Fatalf` and `Fatalln` - -# 0.8.0 - -* logrus: defaults to stderr instead of stdout -* hooks/sentry: add special field for `*http.Request` -* formatter/text: ignore Windows for colors - -# 0.7.3 - -* formatter/\*: allow configuration of timestamp layout - -# 0.7.2 - -* formatter/text: Add configuration option for time format (#158) diff --git a/vendor/github.com/sirupsen/logrus/LICENSE b/vendor/github.com/sirupsen/logrus/LICENSE deleted file mode 100644 index f090cb42f..000000000 --- a/vendor/github.com/sirupsen/logrus/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2014 Simon Eskildsen - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/vendor/github.com/sirupsen/logrus/README.md b/vendor/github.com/sirupsen/logrus/README.md deleted file mode 100644 index d1d4a85fd..000000000 --- a/vendor/github.com/sirupsen/logrus/README.md +++ /dev/null @@ -1,515 +0,0 @@ -# Logrus :walrus: [![Build Status](https://github.com/sirupsen/logrus/workflows/CI/badge.svg)](https://github.com/sirupsen/logrus/actions?query=workflow%3ACI) [![Build Status](https://travis-ci.org/sirupsen/logrus.svg?branch=master)](https://travis-ci.org/sirupsen/logrus) [![Go Reference](https://pkg.go.dev/badge/github.com/sirupsen/logrus.svg)](https://pkg.go.dev/github.com/sirupsen/logrus) - -Logrus is a structured logger for Go (golang), completely API compatible with -the standard library logger. - -**Logrus is in maintenance-mode.** We will not be introducing new features. It's -simply too hard to do in a way that won't break many people's projects, which is -the last thing you want from your Logging library (again...). - -This does not mean Logrus is dead. Logrus will continue to be maintained for -security, (backwards compatible) bug fixes, and performance (where we are -limited by the interface). - -I believe Logrus' biggest contribution is to have played a part in today's -widespread use of structured logging in Golang. There doesn't seem to be a -reason to do a major, breaking iteration into Logrus V2, since the fantastic Go -community has built those independently. Many fantastic alternatives have sprung -up. Logrus would look like those, had it been re-designed with what we know -about structured logging in Go today. Check out, for example, -[Zerolog][zerolog], [Zap][zap], and [Apex][apex]. - -[zerolog]: https://github.com/rs/zerolog -[zap]: https://github.com/uber-go/zap -[apex]: https://github.com/apex/log - -**Seeing weird case-sensitive problems?** It's in the past been possible to -import Logrus as both upper- and lower-case. Due to the Go package environment, -this caused issues in the community and we needed a standard. Some environments -experienced problems with the upper-case variant, so the lower-case was decided. -Everything using `logrus` will need to use the lower-case: -`github.com/sirupsen/logrus`. Any package that isn't, should be changed. - -To fix Glide, see [these -comments](https://github.com/sirupsen/logrus/issues/553#issuecomment-306591437). -For an in-depth explanation of the casing issue, see [this -comment](https://github.com/sirupsen/logrus/issues/570#issuecomment-313933276). - -Nicely color-coded in development (when a TTY is attached, otherwise just -plain text): - -![Colored](http://i.imgur.com/PY7qMwd.png) - -With `log.SetFormatter(&log.JSONFormatter{})`, for easy parsing by logstash -or Splunk: - -```text -{"animal":"walrus","level":"info","msg":"A group of walrus emerges from the -ocean","size":10,"time":"2014-03-10 19:57:38.562264131 -0400 EDT"} - -{"level":"warning","msg":"The group's number increased tremendously!", -"number":122,"omg":true,"time":"2014-03-10 19:57:38.562471297 -0400 EDT"} - -{"animal":"walrus","level":"info","msg":"A giant walrus appears!", -"size":10,"time":"2014-03-10 19:57:38.562500591 -0400 EDT"} - -{"animal":"walrus","level":"info","msg":"Tremendously sized cow enters the ocean.", -"size":9,"time":"2014-03-10 19:57:38.562527896 -0400 EDT"} - -{"level":"fatal","msg":"The ice breaks!","number":100,"omg":true, -"time":"2014-03-10 19:57:38.562543128 -0400 EDT"} -``` - -With the default `log.SetFormatter(&log.TextFormatter{})` when a TTY is not -attached, the output is compatible with the -[logfmt](http://godoc.org/github.com/kr/logfmt) format: - -```text -time="2015-03-26T01:27:38-04:00" level=debug msg="Started observing beach" animal=walrus number=8 -time="2015-03-26T01:27:38-04:00" level=info msg="A group of walrus emerges from the ocean" animal=walrus size=10 -time="2015-03-26T01:27:38-04:00" level=warning msg="The group's number increased tremendously!" number=122 omg=true -time="2015-03-26T01:27:38-04:00" level=debug msg="Temperature changes" temperature=-4 -time="2015-03-26T01:27:38-04:00" level=panic msg="It's over 9000!" animal=orca size=9009 -time="2015-03-26T01:27:38-04:00" level=fatal msg="The ice breaks!" err=&{0x2082280c0 map[animal:orca size:9009] 2015-03-26 01:27:38.441574009 -0400 EDT panic It's over 9000!} number=100 omg=true -``` -To ensure this behaviour even if a TTY is attached, set your formatter as follows: - -```go - log.SetFormatter(&log.TextFormatter{ - DisableColors: true, - FullTimestamp: true, - }) -``` - -#### Logging Method Name - -If you wish to add the calling method as a field, instruct the logger via: -```go -log.SetReportCaller(true) -``` -This adds the caller as 'method' like so: - -```json -{"animal":"penguin","level":"fatal","method":"github.com/sirupsen/arcticcreatures.migrate","msg":"a penguin swims by", -"time":"2014-03-10 19:57:38.562543129 -0400 EDT"} -``` - -```text -time="2015-03-26T01:27:38-04:00" level=fatal method=github.com/sirupsen/arcticcreatures.migrate msg="a penguin swims by" animal=penguin -``` -Note that this does add measurable overhead - the cost will depend on the version of Go, but is -between 20 and 40% in recent tests with 1.6 and 1.7. You can validate this in your -environment via benchmarks: -``` -go test -bench=.*CallerTracing -``` - - -#### Case-sensitivity - -The organization's name was changed to lower-case--and this will not be changed -back. If you are getting import conflicts due to case sensitivity, please use -the lower-case import: `github.com/sirupsen/logrus`. - -#### Example - -The simplest way to use Logrus is simply the package-level exported logger: - -```go -package main - -import ( - log "github.com/sirupsen/logrus" -) - -func main() { - log.WithFields(log.Fields{ - "animal": "walrus", - }).Info("A walrus appears") -} -``` - -Note that it's completely api-compatible with the stdlib logger, so you can -replace your `log` imports everywhere with `log "github.com/sirupsen/logrus"` -and you'll now have the flexibility of Logrus. You can customize it all you -want: - -```go -package main - -import ( - "os" - log "github.com/sirupsen/logrus" -) - -func init() { - // Log as JSON instead of the default ASCII formatter. - log.SetFormatter(&log.JSONFormatter{}) - - // Output to stdout instead of the default stderr - // Can be any io.Writer, see below for File example - log.SetOutput(os.Stdout) - - // Only log the warning severity or above. - log.SetLevel(log.WarnLevel) -} - -func main() { - log.WithFields(log.Fields{ - "animal": "walrus", - "size": 10, - }).Info("A group of walrus emerges from the ocean") - - log.WithFields(log.Fields{ - "omg": true, - "number": 122, - }).Warn("The group's number increased tremendously!") - - log.WithFields(log.Fields{ - "omg": true, - "number": 100, - }).Fatal("The ice breaks!") - - // A common pattern is to re-use fields between logging statements by re-using - // the logrus.Entry returned from WithFields() - contextLogger := log.WithFields(log.Fields{ - "common": "this is a common field", - "other": "I also should be logged always", - }) - - contextLogger.Info("I'll be logged with common and other field") - contextLogger.Info("Me too") -} -``` - -For more advanced usage such as logging to multiple locations from the same -application, you can also create an instance of the `logrus` Logger: - -```go -package main - -import ( - "os" - "github.com/sirupsen/logrus" -) - -// Create a new instance of the logger. You can have any number of instances. -var log = logrus.New() - -func main() { - // The API for setting attributes is a little different than the package level - // exported logger. See Godoc. - log.Out = os.Stdout - - // You could set this to any `io.Writer` such as a file - // file, err := os.OpenFile("logrus.log", os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0666) - // if err == nil { - // log.Out = file - // } else { - // log.Info("Failed to log to file, using default stderr") - // } - - log.WithFields(logrus.Fields{ - "animal": "walrus", - "size": 10, - }).Info("A group of walrus emerges from the ocean") -} -``` - -#### Fields - -Logrus encourages careful, structured logging through logging fields instead of -long, unparseable error messages. For example, instead of: `log.Fatalf("Failed -to send event %s to topic %s with key %d")`, you should log the much more -discoverable: - -```go -log.WithFields(log.Fields{ - "event": event, - "topic": topic, - "key": key, -}).Fatal("Failed to send event") -``` - -We've found this API forces you to think about logging in a way that produces -much more useful logging messages. We've been in countless situations where just -a single added field to a log statement that was already there would've saved us -hours. The `WithFields` call is optional. - -In general, with Logrus using any of the `printf`-family functions should be -seen as a hint you should add a field, however, you can still use the -`printf`-family functions with Logrus. - -#### Default Fields - -Often it's helpful to have fields _always_ attached to log statements in an -application or parts of one. For example, you may want to always log the -`request_id` and `user_ip` in the context of a request. Instead of writing -`log.WithFields(log.Fields{"request_id": request_id, "user_ip": user_ip})` on -every line, you can create a `logrus.Entry` to pass around instead: - -```go -requestLogger := log.WithFields(log.Fields{"request_id": request_id, "user_ip": user_ip}) -requestLogger.Info("something happened on that request") # will log request_id and user_ip -requestLogger.Warn("something not great happened") -``` - -#### Hooks - -You can add hooks for logging levels. For example to send errors to an exception -tracking service on `Error`, `Fatal` and `Panic`, info to StatsD or log to -multiple places simultaneously, e.g. syslog. - -Logrus comes with [built-in hooks](hooks/). Add those, or your custom hook, in -`init`: - -```go -import ( - log "github.com/sirupsen/logrus" - "gopkg.in/gemnasium/logrus-airbrake-hook.v2" // the package is named "airbrake" - logrus_syslog "github.com/sirupsen/logrus/hooks/syslog" - "log/syslog" -) - -func init() { - - // Use the Airbrake hook to report errors that have Error severity or above to - // an exception tracker. You can create custom hooks, see the Hooks section. - log.AddHook(airbrake.NewHook(123, "xyz", "production")) - - hook, err := logrus_syslog.NewSyslogHook("udp", "localhost:514", syslog.LOG_INFO, "") - if err != nil { - log.Error("Unable to connect to local syslog daemon") - } else { - log.AddHook(hook) - } -} -``` -Note: Syslog hook also support connecting to local syslog (Ex. "/dev/log" or "/var/run/syslog" or "/var/run/log"). For the detail, please check the [syslog hook README](hooks/syslog/README.md). - -A list of currently known service hooks can be found in this wiki [page](https://github.com/sirupsen/logrus/wiki/Hooks) - - -#### Level logging - -Logrus has seven logging levels: Trace, Debug, Info, Warning, Error, Fatal and Panic. - -```go -log.Trace("Something very low level.") -log.Debug("Useful debugging information.") -log.Info("Something noteworthy happened!") -log.Warn("You should probably take a look at this.") -log.Error("Something failed but I'm not quitting.") -// Calls os.Exit(1) after logging -log.Fatal("Bye.") -// Calls panic() after logging -log.Panic("I'm bailing.") -``` - -You can set the logging level on a `Logger`, then it will only log entries with -that severity or anything above it: - -```go -// Will log anything that is info or above (warn, error, fatal, panic). Default. -log.SetLevel(log.InfoLevel) -``` - -It may be useful to set `log.Level = logrus.DebugLevel` in a debug or verbose -environment if your application has that. - -Note: If you want different log levels for global (`log.SetLevel(...)`) and syslog logging, please check the [syslog hook README](hooks/syslog/README.md#different-log-levels-for-local-and-remote-logging). - -#### Entries - -Besides the fields added with `WithField` or `WithFields` some fields are -automatically added to all logging events: - -1. `time`. The timestamp when the entry was created. -2. `msg`. The logging message passed to `{Info,Warn,Error,Fatal,Panic}` after - the `AddFields` call. E.g. `Failed to send event.` -3. `level`. The logging level. E.g. `info`. - -#### Environments - -Logrus has no notion of environment. - -If you wish for hooks and formatters to only be used in specific environments, -you should handle that yourself. For example, if your application has a global -variable `Environment`, which is a string representation of the environment you -could do: - -```go -import ( - log "github.com/sirupsen/logrus" -) - -func init() { - // do something here to set environment depending on an environment variable - // or command-line flag - if Environment == "production" { - log.SetFormatter(&log.JSONFormatter{}) - } else { - // The TextFormatter is default, you don't actually have to do this. - log.SetFormatter(&log.TextFormatter{}) - } -} -``` - -This configuration is how `logrus` was intended to be used, but JSON in -production is mostly only useful if you do log aggregation with tools like -Splunk or Logstash. - -#### Formatters - -The built-in logging formatters are: - -* `logrus.TextFormatter`. Logs the event in colors if stdout is a tty, otherwise - without colors. - * *Note:* to force colored output when there is no TTY, set the `ForceColors` - field to `true`. To force no colored output even if there is a TTY set the - `DisableColors` field to `true`. For Windows, see - [github.com/mattn/go-colorable](https://github.com/mattn/go-colorable). - * When colors are enabled, levels are truncated to 4 characters by default. To disable - truncation set the `DisableLevelTruncation` field to `true`. - * When outputting to a TTY, it's often helpful to visually scan down a column where all the levels are the same width. Setting the `PadLevelText` field to `true` enables this behavior, by adding padding to the level text. - * All options are listed in the [generated docs](https://godoc.org/github.com/sirupsen/logrus#TextFormatter). -* `logrus.JSONFormatter`. Logs fields as JSON. - * All options are listed in the [generated docs](https://godoc.org/github.com/sirupsen/logrus#JSONFormatter). - -Third party logging formatters: - -* [`FluentdFormatter`](https://github.com/joonix/log). Formats entries that can be parsed by Kubernetes and Google Container Engine. -* [`GELF`](https://github.com/fabienm/go-logrus-formatters). Formats entries so they comply to Graylog's [GELF 1.1 specification](http://docs.graylog.org/en/2.4/pages/gelf.html). -* [`logstash`](https://github.com/bshuster-repo/logrus-logstash-hook). Logs fields as [Logstash](http://logstash.net) Events. -* [`prefixed`](https://github.com/x-cray/logrus-prefixed-formatter). Displays log entry source along with alternative layout. -* [`zalgo`](https://github.com/aybabtme/logzalgo). Invoking the Power of Zalgo. -* [`nested-logrus-formatter`](https://github.com/antonfisher/nested-logrus-formatter). Converts logrus fields to a nested structure. -* [`powerful-logrus-formatter`](https://github.com/zput/zxcTool). get fileName, log's line number and the latest function's name when print log; Sava log to files. -* [`caption-json-formatter`](https://github.com/nolleh/caption_json_formatter). logrus's message json formatter with human-readable caption added. - -You can define your formatter by implementing the `Formatter` interface, -requiring a `Format` method. `Format` takes an `*Entry`. `entry.Data` is a -`Fields` type (`map[string]interface{}`) with all your fields as well as the -default ones (see Entries section above): - -```go -type MyJSONFormatter struct { -} - -log.SetFormatter(new(MyJSONFormatter)) - -func (f *MyJSONFormatter) Format(entry *Entry) ([]byte, error) { - // Note this doesn't include Time, Level and Message which are available on - // the Entry. Consult `godoc` on information about those fields or read the - // source of the official loggers. - serialized, err := json.Marshal(entry.Data) - if err != nil { - return nil, fmt.Errorf("Failed to marshal fields to JSON, %w", err) - } - return append(serialized, '\n'), nil -} -``` - -#### Logger as an `io.Writer` - -Logrus can be transformed into an `io.Writer`. That writer is the end of an `io.Pipe` and it is your responsibility to close it. - -```go -w := logger.Writer() -defer w.Close() - -srv := http.Server{ - // create a stdlib log.Logger that writes to - // logrus.Logger. - ErrorLog: log.New(w, "", 0), -} -``` - -Each line written to that writer will be printed the usual way, using formatters -and hooks. The level for those entries is `info`. - -This means that we can override the standard library logger easily: - -```go -logger := logrus.New() -logger.Formatter = &logrus.JSONFormatter{} - -// Use logrus for standard log output -// Note that `log` here references stdlib's log -// Not logrus imported under the name `log`. -log.SetOutput(logger.Writer()) -``` - -#### Rotation - -Log rotation is not provided with Logrus. Log rotation should be done by an -external program (like `logrotate(8)`) that can compress and delete old log -entries. It should not be a feature of the application-level logger. - -#### Tools - -| Tool | Description | -| ---- | ----------- | -|[Logrus Mate](https://github.com/gogap/logrus_mate)|Logrus mate is a tool for Logrus to manage loggers, you can initial logger's level, hook and formatter by config file, the logger will be generated with different configs in different environments.| -|[Logrus Viper Helper](https://github.com/heirko/go-contrib/tree/master/logrusHelper)|An Helper around Logrus to wrap with spf13/Viper to load configuration with fangs! And to simplify Logrus configuration use some behavior of [Logrus Mate](https://github.com/gogap/logrus_mate). [sample](https://github.com/heirko/iris-contrib/blob/master/middleware/logrus-logger/example) | - -#### Testing - -Logrus has a built in facility for asserting the presence of log messages. This is implemented through the `test` hook and provides: - -* decorators for existing logger (`test.NewLocal` and `test.NewGlobal`) which basically just adds the `test` hook -* a test logger (`test.NewNullLogger`) that just records log messages (and does not output any): - -```go -import( - "github.com/sirupsen/logrus" - "github.com/sirupsen/logrus/hooks/test" - "github.com/stretchr/testify/assert" - "testing" -) - -func TestSomething(t*testing.T){ - logger, hook := test.NewNullLogger() - logger.Error("Helloerror") - - assert.Equal(t, 1, len(hook.Entries)) - assert.Equal(t, logrus.ErrorLevel, hook.LastEntry().Level) - assert.Equal(t, "Helloerror", hook.LastEntry().Message) - - hook.Reset() - assert.Nil(t, hook.LastEntry()) -} -``` - -#### Fatal handlers - -Logrus can register one or more functions that will be called when any `fatal` -level message is logged. The registered handlers will be executed before -logrus performs an `os.Exit(1)`. This behavior may be helpful if callers need -to gracefully shutdown. Unlike a `panic("Something went wrong...")` call which can be intercepted with a deferred `recover` a call to `os.Exit(1)` can not be intercepted. - -``` -... -handler := func() { - // gracefully shutdown something... -} -logrus.RegisterExitHandler(handler) -... -``` - -#### Thread safety - -By default, Logger is protected by a mutex for concurrent writes. The mutex is held when calling hooks and writing logs. -If you are sure such locking is not needed, you can call logger.SetNoLock() to disable the locking. - -Situation when locking is not needed includes: - -* You have no hooks registered, or hooks calling is already thread-safe. - -* Writing to logger.Out is already thread-safe, for example: - - 1) logger.Out is protected by locks. - - 2) logger.Out is an os.File handler opened with `O_APPEND` flag, and every write is smaller than 4k. (This allows multi-thread/multi-process writing) - - (Refer to http://www.notthewizard.com/2014/06/17/are-files-appends-really-atomic/) diff --git a/vendor/github.com/sirupsen/logrus/alt_exit.go b/vendor/github.com/sirupsen/logrus/alt_exit.go deleted file mode 100644 index 8fd189e1c..000000000 --- a/vendor/github.com/sirupsen/logrus/alt_exit.go +++ /dev/null @@ -1,76 +0,0 @@ -package logrus - -// The following code was sourced and modified from the -// https://github.com/tebeka/atexit package governed by the following license: -// -// Copyright (c) 2012 Miki Tebeka . -// -// Permission is hereby granted, free of charge, to any person obtaining a copy of -// this software and associated documentation files (the "Software"), to deal in -// the Software without restriction, including without limitation the rights to -// use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of -// the Software, and to permit persons to whom the Software is furnished to do so, -// subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in all -// copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS -// FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR -// COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER -// IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -// CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -import ( - "fmt" - "os" -) - -var handlers = []func(){} - -func runHandler(handler func()) { - defer func() { - if err := recover(); err != nil { - fmt.Fprintln(os.Stderr, "Error: Logrus exit handler error:", err) - } - }() - - handler() -} - -func runHandlers() { - for _, handler := range handlers { - runHandler(handler) - } -} - -// Exit runs all the Logrus atexit handlers and then terminates the program using os.Exit(code) -func Exit(code int) { - runHandlers() - os.Exit(code) -} - -// RegisterExitHandler appends a Logrus Exit handler to the list of handlers, -// call logrus.Exit to invoke all handlers. The handlers will also be invoked when -// any Fatal log entry is made. -// -// This method is useful when a caller wishes to use logrus to log a fatal -// message but also needs to gracefully shutdown. An example usecase could be -// closing database connections, or sending a alert that the application is -// closing. -func RegisterExitHandler(handler func()) { - handlers = append(handlers, handler) -} - -// DeferExitHandler prepends a Logrus Exit handler to the list of handlers, -// call logrus.Exit to invoke all handlers. The handlers will also be invoked when -// any Fatal log entry is made. -// -// This method is useful when a caller wishes to use logrus to log a fatal -// message but also needs to gracefully shutdown. An example usecase could be -// closing database connections, or sending a alert that the application is -// closing. -func DeferExitHandler(handler func()) { - handlers = append([]func(){handler}, handlers...) -} diff --git a/vendor/github.com/sirupsen/logrus/appveyor.yml b/vendor/github.com/sirupsen/logrus/appveyor.yml deleted file mode 100644 index df9d65c3a..000000000 --- a/vendor/github.com/sirupsen/logrus/appveyor.yml +++ /dev/null @@ -1,14 +0,0 @@ -version: "{build}" -platform: x64 -clone_folder: c:\gopath\src\github.com\sirupsen\logrus -environment: - GOPATH: c:\gopath -branches: - only: - - master -install: - - set PATH=%GOPATH%\bin;c:\go\bin;%PATH% - - go version -build_script: - - go get -t - - go test diff --git a/vendor/github.com/sirupsen/logrus/buffer_pool.go b/vendor/github.com/sirupsen/logrus/buffer_pool.go deleted file mode 100644 index c7787f77c..000000000 --- a/vendor/github.com/sirupsen/logrus/buffer_pool.go +++ /dev/null @@ -1,43 +0,0 @@ -package logrus - -import ( - "bytes" - "sync" -) - -var ( - bufferPool BufferPool -) - -type BufferPool interface { - Put(*bytes.Buffer) - Get() *bytes.Buffer -} - -type defaultPool struct { - pool *sync.Pool -} - -func (p *defaultPool) Put(buf *bytes.Buffer) { - p.pool.Put(buf) -} - -func (p *defaultPool) Get() *bytes.Buffer { - return p.pool.Get().(*bytes.Buffer) -} - -// SetBufferPool allows to replace the default logrus buffer pool -// to better meets the specific needs of an application. -func SetBufferPool(bp BufferPool) { - bufferPool = bp -} - -func init() { - SetBufferPool(&defaultPool{ - pool: &sync.Pool{ - New: func() interface{} { - return new(bytes.Buffer) - }, - }, - }) -} diff --git a/vendor/github.com/sirupsen/logrus/doc.go b/vendor/github.com/sirupsen/logrus/doc.go deleted file mode 100644 index da67aba06..000000000 --- a/vendor/github.com/sirupsen/logrus/doc.go +++ /dev/null @@ -1,26 +0,0 @@ -/* -Package logrus is a structured logger for Go, completely API compatible with the standard library logger. - - -The simplest way to use Logrus is simply the package-level exported logger: - - package main - - import ( - log "github.com/sirupsen/logrus" - ) - - func main() { - log.WithFields(log.Fields{ - "animal": "walrus", - "number": 1, - "size": 10, - }).Info("A walrus appears") - } - -Output: - time="2015-09-07T08:48:33Z" level=info msg="A walrus appears" animal=walrus number=1 size=10 - -For a full guide visit https://github.com/sirupsen/logrus -*/ -package logrus diff --git a/vendor/github.com/sirupsen/logrus/entry.go b/vendor/github.com/sirupsen/logrus/entry.go deleted file mode 100644 index 71cdbbc35..000000000 --- a/vendor/github.com/sirupsen/logrus/entry.go +++ /dev/null @@ -1,442 +0,0 @@ -package logrus - -import ( - "bytes" - "context" - "fmt" - "os" - "reflect" - "runtime" - "strings" - "sync" - "time" -) - -var ( - - // qualified package name, cached at first use - logrusPackage string - - // Positions in the call stack when tracing to report the calling method - minimumCallerDepth int - - // Used for caller information initialisation - callerInitOnce sync.Once -) - -const ( - maximumCallerDepth int = 25 - knownLogrusFrames int = 4 -) - -func init() { - // start at the bottom of the stack before the package-name cache is primed - minimumCallerDepth = 1 -} - -// Defines the key when adding errors using WithError. -var ErrorKey = "error" - -// An entry is the final or intermediate Logrus logging entry. It contains all -// the fields passed with WithField{,s}. It's finally logged when Trace, Debug, -// Info, Warn, Error, Fatal or Panic is called on it. These objects can be -// reused and passed around as much as you wish to avoid field duplication. -type Entry struct { - Logger *Logger - - // Contains all the fields set by the user. - Data Fields - - // Time at which the log entry was created - Time time.Time - - // Level the log entry was logged at: Trace, Debug, Info, Warn, Error, Fatal or Panic - // This field will be set on entry firing and the value will be equal to the one in Logger struct field. - Level Level - - // Calling method, with package name - Caller *runtime.Frame - - // Message passed to Trace, Debug, Info, Warn, Error, Fatal or Panic - Message string - - // When formatter is called in entry.log(), a Buffer may be set to entry - Buffer *bytes.Buffer - - // Contains the context set by the user. Useful for hook processing etc. - Context context.Context - - // err may contain a field formatting error - err string -} - -func NewEntry(logger *Logger) *Entry { - return &Entry{ - Logger: logger, - // Default is three fields, plus one optional. Give a little extra room. - Data: make(Fields, 6), - } -} - -func (entry *Entry) Dup() *Entry { - data := make(Fields, len(entry.Data)) - for k, v := range entry.Data { - data[k] = v - } - return &Entry{Logger: entry.Logger, Data: data, Time: entry.Time, Context: entry.Context, err: entry.err} -} - -// Returns the bytes representation of this entry from the formatter. -func (entry *Entry) Bytes() ([]byte, error) { - return entry.Logger.Formatter.Format(entry) -} - -// Returns the string representation from the reader and ultimately the -// formatter. -func (entry *Entry) String() (string, error) { - serialized, err := entry.Bytes() - if err != nil { - return "", err - } - str := string(serialized) - return str, nil -} - -// Add an error as single field (using the key defined in ErrorKey) to the Entry. -func (entry *Entry) WithError(err error) *Entry { - return entry.WithField(ErrorKey, err) -} - -// Add a context to the Entry. -func (entry *Entry) WithContext(ctx context.Context) *Entry { - dataCopy := make(Fields, len(entry.Data)) - for k, v := range entry.Data { - dataCopy[k] = v - } - return &Entry{Logger: entry.Logger, Data: dataCopy, Time: entry.Time, err: entry.err, Context: ctx} -} - -// Add a single field to the Entry. -func (entry *Entry) WithField(key string, value interface{}) *Entry { - return entry.WithFields(Fields{key: value}) -} - -// Add a map of fields to the Entry. -func (entry *Entry) WithFields(fields Fields) *Entry { - data := make(Fields, len(entry.Data)+len(fields)) - for k, v := range entry.Data { - data[k] = v - } - fieldErr := entry.err - for k, v := range fields { - isErrField := false - if t := reflect.TypeOf(v); t != nil { - switch { - case t.Kind() == reflect.Func, t.Kind() == reflect.Ptr && t.Elem().Kind() == reflect.Func: - isErrField = true - } - } - if isErrField { - tmp := fmt.Sprintf("can not add field %q", k) - if fieldErr != "" { - fieldErr = entry.err + ", " + tmp - } else { - fieldErr = tmp - } - } else { - data[k] = v - } - } - return &Entry{Logger: entry.Logger, Data: data, Time: entry.Time, err: fieldErr, Context: entry.Context} -} - -// Overrides the time of the Entry. -func (entry *Entry) WithTime(t time.Time) *Entry { - dataCopy := make(Fields, len(entry.Data)) - for k, v := range entry.Data { - dataCopy[k] = v - } - return &Entry{Logger: entry.Logger, Data: dataCopy, Time: t, err: entry.err, Context: entry.Context} -} - -// getPackageName reduces a fully qualified function name to the package name -// There really ought to be to be a better way... -func getPackageName(f string) string { - for { - lastPeriod := strings.LastIndex(f, ".") - lastSlash := strings.LastIndex(f, "/") - if lastPeriod > lastSlash { - f = f[:lastPeriod] - } else { - break - } - } - - return f -} - -// getCaller retrieves the name of the first non-logrus calling function -func getCaller() *runtime.Frame { - // cache this package's fully-qualified name - callerInitOnce.Do(func() { - pcs := make([]uintptr, maximumCallerDepth) - _ = runtime.Callers(0, pcs) - - // dynamic get the package name and the minimum caller depth - for i := 0; i < maximumCallerDepth; i++ { - funcName := runtime.FuncForPC(pcs[i]).Name() - if strings.Contains(funcName, "getCaller") { - logrusPackage = getPackageName(funcName) - break - } - } - - minimumCallerDepth = knownLogrusFrames - }) - - // Restrict the lookback frames to avoid runaway lookups - pcs := make([]uintptr, maximumCallerDepth) - depth := runtime.Callers(minimumCallerDepth, pcs) - frames := runtime.CallersFrames(pcs[:depth]) - - for f, again := frames.Next(); again; f, again = frames.Next() { - pkg := getPackageName(f.Function) - - // If the caller isn't part of this package, we're done - if pkg != logrusPackage { - return &f //nolint:scopelint - } - } - - // if we got here, we failed to find the caller's context - return nil -} - -func (entry Entry) HasCaller() (has bool) { - return entry.Logger != nil && - entry.Logger.ReportCaller && - entry.Caller != nil -} - -func (entry *Entry) log(level Level, msg string) { - var buffer *bytes.Buffer - - newEntry := entry.Dup() - - if newEntry.Time.IsZero() { - newEntry.Time = time.Now() - } - - newEntry.Level = level - newEntry.Message = msg - - newEntry.Logger.mu.Lock() - reportCaller := newEntry.Logger.ReportCaller - bufPool := newEntry.getBufferPool() - newEntry.Logger.mu.Unlock() - - if reportCaller { - newEntry.Caller = getCaller() - } - - newEntry.fireHooks() - buffer = bufPool.Get() - defer func() { - newEntry.Buffer = nil - buffer.Reset() - bufPool.Put(buffer) - }() - buffer.Reset() - newEntry.Buffer = buffer - - newEntry.write() - - newEntry.Buffer = nil - - // To avoid Entry#log() returning a value that only would make sense for - // panic() to use in Entry#Panic(), we avoid the allocation by checking - // directly here. - if level <= PanicLevel { - panic(newEntry) - } -} - -func (entry *Entry) getBufferPool() (pool BufferPool) { - if entry.Logger.BufferPool != nil { - return entry.Logger.BufferPool - } - return bufferPool -} - -func (entry *Entry) fireHooks() { - var tmpHooks LevelHooks - entry.Logger.mu.Lock() - tmpHooks = make(LevelHooks, len(entry.Logger.Hooks)) - for k, v := range entry.Logger.Hooks { - tmpHooks[k] = v - } - entry.Logger.mu.Unlock() - - err := tmpHooks.Fire(entry.Level, entry) - if err != nil { - fmt.Fprintf(os.Stderr, "Failed to fire hook: %v\n", err) - } -} - -func (entry *Entry) write() { - entry.Logger.mu.Lock() - defer entry.Logger.mu.Unlock() - serialized, err := entry.Logger.Formatter.Format(entry) - if err != nil { - fmt.Fprintf(os.Stderr, "Failed to obtain reader, %v\n", err) - return - } - if _, err := entry.Logger.Out.Write(serialized); err != nil { - fmt.Fprintf(os.Stderr, "Failed to write to log, %v\n", err) - } -} - -// Log will log a message at the level given as parameter. -// Warning: using Log at Panic or Fatal level will not respectively Panic nor Exit. -// For this behaviour Entry.Panic or Entry.Fatal should be used instead. -func (entry *Entry) Log(level Level, args ...interface{}) { - if entry.Logger.IsLevelEnabled(level) { - entry.log(level, fmt.Sprint(args...)) - } -} - -func (entry *Entry) Trace(args ...interface{}) { - entry.Log(TraceLevel, args...) -} - -func (entry *Entry) Debug(args ...interface{}) { - entry.Log(DebugLevel, args...) -} - -func (entry *Entry) Print(args ...interface{}) { - entry.Info(args...) -} - -func (entry *Entry) Info(args ...interface{}) { - entry.Log(InfoLevel, args...) -} - -func (entry *Entry) Warn(args ...interface{}) { - entry.Log(WarnLevel, args...) -} - -func (entry *Entry) Warning(args ...interface{}) { - entry.Warn(args...) -} - -func (entry *Entry) Error(args ...interface{}) { - entry.Log(ErrorLevel, args...) -} - -func (entry *Entry) Fatal(args ...interface{}) { - entry.Log(FatalLevel, args...) - entry.Logger.Exit(1) -} - -func (entry *Entry) Panic(args ...interface{}) { - entry.Log(PanicLevel, args...) -} - -// Entry Printf family functions - -func (entry *Entry) Logf(level Level, format string, args ...interface{}) { - if entry.Logger.IsLevelEnabled(level) { - entry.Log(level, fmt.Sprintf(format, args...)) - } -} - -func (entry *Entry) Tracef(format string, args ...interface{}) { - entry.Logf(TraceLevel, format, args...) -} - -func (entry *Entry) Debugf(format string, args ...interface{}) { - entry.Logf(DebugLevel, format, args...) -} - -func (entry *Entry) Infof(format string, args ...interface{}) { - entry.Logf(InfoLevel, format, args...) -} - -func (entry *Entry) Printf(format string, args ...interface{}) { - entry.Infof(format, args...) -} - -func (entry *Entry) Warnf(format string, args ...interface{}) { - entry.Logf(WarnLevel, format, args...) -} - -func (entry *Entry) Warningf(format string, args ...interface{}) { - entry.Warnf(format, args...) -} - -func (entry *Entry) Errorf(format string, args ...interface{}) { - entry.Logf(ErrorLevel, format, args...) -} - -func (entry *Entry) Fatalf(format string, args ...interface{}) { - entry.Logf(FatalLevel, format, args...) - entry.Logger.Exit(1) -} - -func (entry *Entry) Panicf(format string, args ...interface{}) { - entry.Logf(PanicLevel, format, args...) -} - -// Entry Println family functions - -func (entry *Entry) Logln(level Level, args ...interface{}) { - if entry.Logger.IsLevelEnabled(level) { - entry.Log(level, entry.sprintlnn(args...)) - } -} - -func (entry *Entry) Traceln(args ...interface{}) { - entry.Logln(TraceLevel, args...) -} - -func (entry *Entry) Debugln(args ...interface{}) { - entry.Logln(DebugLevel, args...) -} - -func (entry *Entry) Infoln(args ...interface{}) { - entry.Logln(InfoLevel, args...) -} - -func (entry *Entry) Println(args ...interface{}) { - entry.Infoln(args...) -} - -func (entry *Entry) Warnln(args ...interface{}) { - entry.Logln(WarnLevel, args...) -} - -func (entry *Entry) Warningln(args ...interface{}) { - entry.Warnln(args...) -} - -func (entry *Entry) Errorln(args ...interface{}) { - entry.Logln(ErrorLevel, args...) -} - -func (entry *Entry) Fatalln(args ...interface{}) { - entry.Logln(FatalLevel, args...) - entry.Logger.Exit(1) -} - -func (entry *Entry) Panicln(args ...interface{}) { - entry.Logln(PanicLevel, args...) -} - -// Sprintlnn => Sprint no newline. This is to get the behavior of how -// fmt.Sprintln where spaces are always added between operands, regardless of -// their type. Instead of vendoring the Sprintln implementation to spare a -// string allocation, we do the simplest thing. -func (entry *Entry) sprintlnn(args ...interface{}) string { - msg := fmt.Sprintln(args...) - return msg[:len(msg)-1] -} diff --git a/vendor/github.com/sirupsen/logrus/exported.go b/vendor/github.com/sirupsen/logrus/exported.go deleted file mode 100644 index 017c30ce6..000000000 --- a/vendor/github.com/sirupsen/logrus/exported.go +++ /dev/null @@ -1,270 +0,0 @@ -package logrus - -import ( - "context" - "io" - "time" -) - -var ( - // std is the name of the standard logger in stdlib `log` - std = New() -) - -func StandardLogger() *Logger { - return std -} - -// SetOutput sets the standard logger output. -func SetOutput(out io.Writer) { - std.SetOutput(out) -} - -// SetFormatter sets the standard logger formatter. -func SetFormatter(formatter Formatter) { - std.SetFormatter(formatter) -} - -// SetReportCaller sets whether the standard logger will include the calling -// method as a field. -func SetReportCaller(include bool) { - std.SetReportCaller(include) -} - -// SetLevel sets the standard logger level. -func SetLevel(level Level) { - std.SetLevel(level) -} - -// GetLevel returns the standard logger level. -func GetLevel() Level { - return std.GetLevel() -} - -// IsLevelEnabled checks if the log level of the standard logger is greater than the level param -func IsLevelEnabled(level Level) bool { - return std.IsLevelEnabled(level) -} - -// AddHook adds a hook to the standard logger hooks. -func AddHook(hook Hook) { - std.AddHook(hook) -} - -// WithError creates an entry from the standard logger and adds an error to it, using the value defined in ErrorKey as key. -func WithError(err error) *Entry { - return std.WithField(ErrorKey, err) -} - -// WithContext creates an entry from the standard logger and adds a context to it. -func WithContext(ctx context.Context) *Entry { - return std.WithContext(ctx) -} - -// WithField creates an entry from the standard logger and adds a field to -// it. If you want multiple fields, use `WithFields`. -// -// Note that it doesn't log until you call Debug, Print, Info, Warn, Fatal -// or Panic on the Entry it returns. -func WithField(key string, value interface{}) *Entry { - return std.WithField(key, value) -} - -// WithFields creates an entry from the standard logger and adds multiple -// fields to it. This is simply a helper for `WithField`, invoking it -// once for each field. -// -// Note that it doesn't log until you call Debug, Print, Info, Warn, Fatal -// or Panic on the Entry it returns. -func WithFields(fields Fields) *Entry { - return std.WithFields(fields) -} - -// WithTime creates an entry from the standard logger and overrides the time of -// logs generated with it. -// -// Note that it doesn't log until you call Debug, Print, Info, Warn, Fatal -// or Panic on the Entry it returns. -func WithTime(t time.Time) *Entry { - return std.WithTime(t) -} - -// Trace logs a message at level Trace on the standard logger. -func Trace(args ...interface{}) { - std.Trace(args...) -} - -// Debug logs a message at level Debug on the standard logger. -func Debug(args ...interface{}) { - std.Debug(args...) -} - -// Print logs a message at level Info on the standard logger. -func Print(args ...interface{}) { - std.Print(args...) -} - -// Info logs a message at level Info on the standard logger. -func Info(args ...interface{}) { - std.Info(args...) -} - -// Warn logs a message at level Warn on the standard logger. -func Warn(args ...interface{}) { - std.Warn(args...) -} - -// Warning logs a message at level Warn on the standard logger. -func Warning(args ...interface{}) { - std.Warning(args...) -} - -// Error logs a message at level Error on the standard logger. -func Error(args ...interface{}) { - std.Error(args...) -} - -// Panic logs a message at level Panic on the standard logger. -func Panic(args ...interface{}) { - std.Panic(args...) -} - -// Fatal logs a message at level Fatal on the standard logger then the process will exit with status set to 1. -func Fatal(args ...interface{}) { - std.Fatal(args...) -} - -// TraceFn logs a message from a func at level Trace on the standard logger. -func TraceFn(fn LogFunction) { - std.TraceFn(fn) -} - -// DebugFn logs a message from a func at level Debug on the standard logger. -func DebugFn(fn LogFunction) { - std.DebugFn(fn) -} - -// PrintFn logs a message from a func at level Info on the standard logger. -func PrintFn(fn LogFunction) { - std.PrintFn(fn) -} - -// InfoFn logs a message from a func at level Info on the standard logger. -func InfoFn(fn LogFunction) { - std.InfoFn(fn) -} - -// WarnFn logs a message from a func at level Warn on the standard logger. -func WarnFn(fn LogFunction) { - std.WarnFn(fn) -} - -// WarningFn logs a message from a func at level Warn on the standard logger. -func WarningFn(fn LogFunction) { - std.WarningFn(fn) -} - -// ErrorFn logs a message from a func at level Error on the standard logger. -func ErrorFn(fn LogFunction) { - std.ErrorFn(fn) -} - -// PanicFn logs a message from a func at level Panic on the standard logger. -func PanicFn(fn LogFunction) { - std.PanicFn(fn) -} - -// FatalFn logs a message from a func at level Fatal on the standard logger then the process will exit with status set to 1. -func FatalFn(fn LogFunction) { - std.FatalFn(fn) -} - -// Tracef logs a message at level Trace on the standard logger. -func Tracef(format string, args ...interface{}) { - std.Tracef(format, args...) -} - -// Debugf logs a message at level Debug on the standard logger. -func Debugf(format string, args ...interface{}) { - std.Debugf(format, args...) -} - -// Printf logs a message at level Info on the standard logger. -func Printf(format string, args ...interface{}) { - std.Printf(format, args...) -} - -// Infof logs a message at level Info on the standard logger. -func Infof(format string, args ...interface{}) { - std.Infof(format, args...) -} - -// Warnf logs a message at level Warn on the standard logger. -func Warnf(format string, args ...interface{}) { - std.Warnf(format, args...) -} - -// Warningf logs a message at level Warn on the standard logger. -func Warningf(format string, args ...interface{}) { - std.Warningf(format, args...) -} - -// Errorf logs a message at level Error on the standard logger. -func Errorf(format string, args ...interface{}) { - std.Errorf(format, args...) -} - -// Panicf logs a message at level Panic on the standard logger. -func Panicf(format string, args ...interface{}) { - std.Panicf(format, args...) -} - -// Fatalf logs a message at level Fatal on the standard logger then the process will exit with status set to 1. -func Fatalf(format string, args ...interface{}) { - std.Fatalf(format, args...) -} - -// Traceln logs a message at level Trace on the standard logger. -func Traceln(args ...interface{}) { - std.Traceln(args...) -} - -// Debugln logs a message at level Debug on the standard logger. -func Debugln(args ...interface{}) { - std.Debugln(args...) -} - -// Println logs a message at level Info on the standard logger. -func Println(args ...interface{}) { - std.Println(args...) -} - -// Infoln logs a message at level Info on the standard logger. -func Infoln(args ...interface{}) { - std.Infoln(args...) -} - -// Warnln logs a message at level Warn on the standard logger. -func Warnln(args ...interface{}) { - std.Warnln(args...) -} - -// Warningln logs a message at level Warn on the standard logger. -func Warningln(args ...interface{}) { - std.Warningln(args...) -} - -// Errorln logs a message at level Error on the standard logger. -func Errorln(args ...interface{}) { - std.Errorln(args...) -} - -// Panicln logs a message at level Panic on the standard logger. -func Panicln(args ...interface{}) { - std.Panicln(args...) -} - -// Fatalln logs a message at level Fatal on the standard logger then the process will exit with status set to 1. -func Fatalln(args ...interface{}) { - std.Fatalln(args...) -} diff --git a/vendor/github.com/sirupsen/logrus/formatter.go b/vendor/github.com/sirupsen/logrus/formatter.go deleted file mode 100644 index 408883773..000000000 --- a/vendor/github.com/sirupsen/logrus/formatter.go +++ /dev/null @@ -1,78 +0,0 @@ -package logrus - -import "time" - -// Default key names for the default fields -const ( - defaultTimestampFormat = time.RFC3339 - FieldKeyMsg = "msg" - FieldKeyLevel = "level" - FieldKeyTime = "time" - FieldKeyLogrusError = "logrus_error" - FieldKeyFunc = "func" - FieldKeyFile = "file" -) - -// The Formatter interface is used to implement a custom Formatter. It takes an -// `Entry`. It exposes all the fields, including the default ones: -// -// * `entry.Data["msg"]`. The message passed from Info, Warn, Error .. -// * `entry.Data["time"]`. The timestamp. -// * `entry.Data["level"]. The level the entry was logged at. -// -// Any additional fields added with `WithField` or `WithFields` are also in -// `entry.Data`. Format is expected to return an array of bytes which are then -// logged to `logger.Out`. -type Formatter interface { - Format(*Entry) ([]byte, error) -} - -// This is to not silently overwrite `time`, `msg`, `func` and `level` fields when -// dumping it. If this code wasn't there doing: -// -// logrus.WithField("level", 1).Info("hello") -// -// Would just silently drop the user provided level. Instead with this code -// it'll logged as: -// -// {"level": "info", "fields.level": 1, "msg": "hello", "time": "..."} -// -// It's not exported because it's still using Data in an opinionated way. It's to -// avoid code duplication between the two default formatters. -func prefixFieldClashes(data Fields, fieldMap FieldMap, reportCaller bool) { - timeKey := fieldMap.resolve(FieldKeyTime) - if t, ok := data[timeKey]; ok { - data["fields."+timeKey] = t - delete(data, timeKey) - } - - msgKey := fieldMap.resolve(FieldKeyMsg) - if m, ok := data[msgKey]; ok { - data["fields."+msgKey] = m - delete(data, msgKey) - } - - levelKey := fieldMap.resolve(FieldKeyLevel) - if l, ok := data[levelKey]; ok { - data["fields."+levelKey] = l - delete(data, levelKey) - } - - logrusErrKey := fieldMap.resolve(FieldKeyLogrusError) - if l, ok := data[logrusErrKey]; ok { - data["fields."+logrusErrKey] = l - delete(data, logrusErrKey) - } - - // If reportCaller is not set, 'func' will not conflict. - if reportCaller { - funcKey := fieldMap.resolve(FieldKeyFunc) - if l, ok := data[funcKey]; ok { - data["fields."+funcKey] = l - } - fileKey := fieldMap.resolve(FieldKeyFile) - if l, ok := data[fileKey]; ok { - data["fields."+fileKey] = l - } - } -} diff --git a/vendor/github.com/sirupsen/logrus/hooks.go b/vendor/github.com/sirupsen/logrus/hooks.go deleted file mode 100644 index 3f151cdc3..000000000 --- a/vendor/github.com/sirupsen/logrus/hooks.go +++ /dev/null @@ -1,34 +0,0 @@ -package logrus - -// A hook to be fired when logging on the logging levels returned from -// `Levels()` on your implementation of the interface. Note that this is not -// fired in a goroutine or a channel with workers, you should handle such -// functionality yourself if your call is non-blocking and you don't wish for -// the logging calls for levels returned from `Levels()` to block. -type Hook interface { - Levels() []Level - Fire(*Entry) error -} - -// Internal type for storing the hooks on a logger instance. -type LevelHooks map[Level][]Hook - -// Add a hook to an instance of logger. This is called with -// `log.Hooks.Add(new(MyHook))` where `MyHook` implements the `Hook` interface. -func (hooks LevelHooks) Add(hook Hook) { - for _, level := range hook.Levels() { - hooks[level] = append(hooks[level], hook) - } -} - -// Fire all the hooks for the passed level. Used by `entry.log` to fire -// appropriate hooks for a log entry. -func (hooks LevelHooks) Fire(level Level, entry *Entry) error { - for _, hook := range hooks[level] { - if err := hook.Fire(entry); err != nil { - return err - } - } - - return nil -} diff --git a/vendor/github.com/sirupsen/logrus/json_formatter.go b/vendor/github.com/sirupsen/logrus/json_formatter.go deleted file mode 100644 index c96dc5636..000000000 --- a/vendor/github.com/sirupsen/logrus/json_formatter.go +++ /dev/null @@ -1,128 +0,0 @@ -package logrus - -import ( - "bytes" - "encoding/json" - "fmt" - "runtime" -) - -type fieldKey string - -// FieldMap allows customization of the key names for default fields. -type FieldMap map[fieldKey]string - -func (f FieldMap) resolve(key fieldKey) string { - if k, ok := f[key]; ok { - return k - } - - return string(key) -} - -// JSONFormatter formats logs into parsable json -type JSONFormatter struct { - // TimestampFormat sets the format used for marshaling timestamps. - // The format to use is the same than for time.Format or time.Parse from the standard - // library. - // The standard Library already provides a set of predefined format. - TimestampFormat string - - // DisableTimestamp allows disabling automatic timestamps in output - DisableTimestamp bool - - // DisableHTMLEscape allows disabling html escaping in output - DisableHTMLEscape bool - - // DataKey allows users to put all the log entry parameters into a nested dictionary at a given key. - DataKey string - - // FieldMap allows users to customize the names of keys for default fields. - // As an example: - // formatter := &JSONFormatter{ - // FieldMap: FieldMap{ - // FieldKeyTime: "@timestamp", - // FieldKeyLevel: "@level", - // FieldKeyMsg: "@message", - // FieldKeyFunc: "@caller", - // }, - // } - FieldMap FieldMap - - // CallerPrettyfier can be set by the user to modify the content - // of the function and file keys in the json data when ReportCaller is - // activated. If any of the returned value is the empty string the - // corresponding key will be removed from json fields. - CallerPrettyfier func(*runtime.Frame) (function string, file string) - - // PrettyPrint will indent all json logs - PrettyPrint bool -} - -// Format renders a single log entry -func (f *JSONFormatter) Format(entry *Entry) ([]byte, error) { - data := make(Fields, len(entry.Data)+4) - for k, v := range entry.Data { - switch v := v.(type) { - case error: - // Otherwise errors are ignored by `encoding/json` - // https://github.com/sirupsen/logrus/issues/137 - data[k] = v.Error() - default: - data[k] = v - } - } - - if f.DataKey != "" { - newData := make(Fields, 4) - newData[f.DataKey] = data - data = newData - } - - prefixFieldClashes(data, f.FieldMap, entry.HasCaller()) - - timestampFormat := f.TimestampFormat - if timestampFormat == "" { - timestampFormat = defaultTimestampFormat - } - - if entry.err != "" { - data[f.FieldMap.resolve(FieldKeyLogrusError)] = entry.err - } - if !f.DisableTimestamp { - data[f.FieldMap.resolve(FieldKeyTime)] = entry.Time.Format(timestampFormat) - } - data[f.FieldMap.resolve(FieldKeyMsg)] = entry.Message - data[f.FieldMap.resolve(FieldKeyLevel)] = entry.Level.String() - if entry.HasCaller() { - funcVal := entry.Caller.Function - fileVal := fmt.Sprintf("%s:%d", entry.Caller.File, entry.Caller.Line) - if f.CallerPrettyfier != nil { - funcVal, fileVal = f.CallerPrettyfier(entry.Caller) - } - if funcVal != "" { - data[f.FieldMap.resolve(FieldKeyFunc)] = funcVal - } - if fileVal != "" { - data[f.FieldMap.resolve(FieldKeyFile)] = fileVal - } - } - - var b *bytes.Buffer - if entry.Buffer != nil { - b = entry.Buffer - } else { - b = &bytes.Buffer{} - } - - encoder := json.NewEncoder(b) - encoder.SetEscapeHTML(!f.DisableHTMLEscape) - if f.PrettyPrint { - encoder.SetIndent("", " ") - } - if err := encoder.Encode(data); err != nil { - return nil, fmt.Errorf("failed to marshal fields to JSON, %w", err) - } - - return b.Bytes(), nil -} diff --git a/vendor/github.com/sirupsen/logrus/logger.go b/vendor/github.com/sirupsen/logrus/logger.go deleted file mode 100644 index 5ff0aef6d..000000000 --- a/vendor/github.com/sirupsen/logrus/logger.go +++ /dev/null @@ -1,417 +0,0 @@ -package logrus - -import ( - "context" - "io" - "os" - "sync" - "sync/atomic" - "time" -) - -// LogFunction For big messages, it can be more efficient to pass a function -// and only call it if the log level is actually enables rather than -// generating the log message and then checking if the level is enabled -type LogFunction func() []interface{} - -type Logger struct { - // The logs are `io.Copy`'d to this in a mutex. It's common to set this to a - // file, or leave it default which is `os.Stderr`. You can also set this to - // something more adventurous, such as logging to Kafka. - Out io.Writer - // Hooks for the logger instance. These allow firing events based on logging - // levels and log entries. For example, to send errors to an error tracking - // service, log to StatsD or dump the core on fatal errors. - Hooks LevelHooks - // All log entries pass through the formatter before logged to Out. The - // included formatters are `TextFormatter` and `JSONFormatter` for which - // TextFormatter is the default. In development (when a TTY is attached) it - // logs with colors, but to a file it wouldn't. You can easily implement your - // own that implements the `Formatter` interface, see the `README` or included - // formatters for examples. - Formatter Formatter - - // Flag for whether to log caller info (off by default) - ReportCaller bool - - // The logging level the logger should log at. This is typically (and defaults - // to) `logrus.Info`, which allows Info(), Warn(), Error() and Fatal() to be - // logged. - Level Level - // Used to sync writing to the log. Locking is enabled by Default - mu MutexWrap - // Reusable empty entry - entryPool sync.Pool - // Function to exit the application, defaults to `os.Exit()` - ExitFunc exitFunc - // The buffer pool used to format the log. If it is nil, the default global - // buffer pool will be used. - BufferPool BufferPool -} - -type exitFunc func(int) - -type MutexWrap struct { - lock sync.Mutex - disabled bool -} - -func (mw *MutexWrap) Lock() { - if !mw.disabled { - mw.lock.Lock() - } -} - -func (mw *MutexWrap) Unlock() { - if !mw.disabled { - mw.lock.Unlock() - } -} - -func (mw *MutexWrap) Disable() { - mw.disabled = true -} - -// Creates a new logger. Configuration should be set by changing `Formatter`, -// `Out` and `Hooks` directly on the default logger instance. You can also just -// instantiate your own: -// -// var log = &logrus.Logger{ -// Out: os.Stderr, -// Formatter: new(logrus.TextFormatter), -// Hooks: make(logrus.LevelHooks), -// Level: logrus.DebugLevel, -// } -// -// It's recommended to make this a global instance called `log`. -func New() *Logger { - return &Logger{ - Out: os.Stderr, - Formatter: new(TextFormatter), - Hooks: make(LevelHooks), - Level: InfoLevel, - ExitFunc: os.Exit, - ReportCaller: false, - } -} - -func (logger *Logger) newEntry() *Entry { - entry, ok := logger.entryPool.Get().(*Entry) - if ok { - return entry - } - return NewEntry(logger) -} - -func (logger *Logger) releaseEntry(entry *Entry) { - entry.Data = map[string]interface{}{} - logger.entryPool.Put(entry) -} - -// WithField allocates a new entry and adds a field to it. -// Debug, Print, Info, Warn, Error, Fatal or Panic must be then applied to -// this new returned entry. -// If you want multiple fields, use `WithFields`. -func (logger *Logger) WithField(key string, value interface{}) *Entry { - entry := logger.newEntry() - defer logger.releaseEntry(entry) - return entry.WithField(key, value) -} - -// Adds a struct of fields to the log entry. All it does is call `WithField` for -// each `Field`. -func (logger *Logger) WithFields(fields Fields) *Entry { - entry := logger.newEntry() - defer logger.releaseEntry(entry) - return entry.WithFields(fields) -} - -// Add an error as single field to the log entry. All it does is call -// `WithError` for the given `error`. -func (logger *Logger) WithError(err error) *Entry { - entry := logger.newEntry() - defer logger.releaseEntry(entry) - return entry.WithError(err) -} - -// Add a context to the log entry. -func (logger *Logger) WithContext(ctx context.Context) *Entry { - entry := logger.newEntry() - defer logger.releaseEntry(entry) - return entry.WithContext(ctx) -} - -// Overrides the time of the log entry. -func (logger *Logger) WithTime(t time.Time) *Entry { - entry := logger.newEntry() - defer logger.releaseEntry(entry) - return entry.WithTime(t) -} - -func (logger *Logger) Logf(level Level, format string, args ...interface{}) { - if logger.IsLevelEnabled(level) { - entry := logger.newEntry() - entry.Logf(level, format, args...) - logger.releaseEntry(entry) - } -} - -func (logger *Logger) Tracef(format string, args ...interface{}) { - logger.Logf(TraceLevel, format, args...) -} - -func (logger *Logger) Debugf(format string, args ...interface{}) { - logger.Logf(DebugLevel, format, args...) -} - -func (logger *Logger) Infof(format string, args ...interface{}) { - logger.Logf(InfoLevel, format, args...) -} - -func (logger *Logger) Printf(format string, args ...interface{}) { - entry := logger.newEntry() - entry.Printf(format, args...) - logger.releaseEntry(entry) -} - -func (logger *Logger) Warnf(format string, args ...interface{}) { - logger.Logf(WarnLevel, format, args...) -} - -func (logger *Logger) Warningf(format string, args ...interface{}) { - logger.Warnf(format, args...) -} - -func (logger *Logger) Errorf(format string, args ...interface{}) { - logger.Logf(ErrorLevel, format, args...) -} - -func (logger *Logger) Fatalf(format string, args ...interface{}) { - logger.Logf(FatalLevel, format, args...) - logger.Exit(1) -} - -func (logger *Logger) Panicf(format string, args ...interface{}) { - logger.Logf(PanicLevel, format, args...) -} - -// Log will log a message at the level given as parameter. -// Warning: using Log at Panic or Fatal level will not respectively Panic nor Exit. -// For this behaviour Logger.Panic or Logger.Fatal should be used instead. -func (logger *Logger) Log(level Level, args ...interface{}) { - if logger.IsLevelEnabled(level) { - entry := logger.newEntry() - entry.Log(level, args...) - logger.releaseEntry(entry) - } -} - -func (logger *Logger) LogFn(level Level, fn LogFunction) { - if logger.IsLevelEnabled(level) { - entry := logger.newEntry() - entry.Log(level, fn()...) - logger.releaseEntry(entry) - } -} - -func (logger *Logger) Trace(args ...interface{}) { - logger.Log(TraceLevel, args...) -} - -func (logger *Logger) Debug(args ...interface{}) { - logger.Log(DebugLevel, args...) -} - -func (logger *Logger) Info(args ...interface{}) { - logger.Log(InfoLevel, args...) -} - -func (logger *Logger) Print(args ...interface{}) { - entry := logger.newEntry() - entry.Print(args...) - logger.releaseEntry(entry) -} - -func (logger *Logger) Warn(args ...interface{}) { - logger.Log(WarnLevel, args...) -} - -func (logger *Logger) Warning(args ...interface{}) { - logger.Warn(args...) -} - -func (logger *Logger) Error(args ...interface{}) { - logger.Log(ErrorLevel, args...) -} - -func (logger *Logger) Fatal(args ...interface{}) { - logger.Log(FatalLevel, args...) - logger.Exit(1) -} - -func (logger *Logger) Panic(args ...interface{}) { - logger.Log(PanicLevel, args...) -} - -func (logger *Logger) TraceFn(fn LogFunction) { - logger.LogFn(TraceLevel, fn) -} - -func (logger *Logger) DebugFn(fn LogFunction) { - logger.LogFn(DebugLevel, fn) -} - -func (logger *Logger) InfoFn(fn LogFunction) { - logger.LogFn(InfoLevel, fn) -} - -func (logger *Logger) PrintFn(fn LogFunction) { - entry := logger.newEntry() - entry.Print(fn()...) - logger.releaseEntry(entry) -} - -func (logger *Logger) WarnFn(fn LogFunction) { - logger.LogFn(WarnLevel, fn) -} - -func (logger *Logger) WarningFn(fn LogFunction) { - logger.WarnFn(fn) -} - -func (logger *Logger) ErrorFn(fn LogFunction) { - logger.LogFn(ErrorLevel, fn) -} - -func (logger *Logger) FatalFn(fn LogFunction) { - logger.LogFn(FatalLevel, fn) - logger.Exit(1) -} - -func (logger *Logger) PanicFn(fn LogFunction) { - logger.LogFn(PanicLevel, fn) -} - -func (logger *Logger) Logln(level Level, args ...interface{}) { - if logger.IsLevelEnabled(level) { - entry := logger.newEntry() - entry.Logln(level, args...) - logger.releaseEntry(entry) - } -} - -func (logger *Logger) Traceln(args ...interface{}) { - logger.Logln(TraceLevel, args...) -} - -func (logger *Logger) Debugln(args ...interface{}) { - logger.Logln(DebugLevel, args...) -} - -func (logger *Logger) Infoln(args ...interface{}) { - logger.Logln(InfoLevel, args...) -} - -func (logger *Logger) Println(args ...interface{}) { - entry := logger.newEntry() - entry.Println(args...) - logger.releaseEntry(entry) -} - -func (logger *Logger) Warnln(args ...interface{}) { - logger.Logln(WarnLevel, args...) -} - -func (logger *Logger) Warningln(args ...interface{}) { - logger.Warnln(args...) -} - -func (logger *Logger) Errorln(args ...interface{}) { - logger.Logln(ErrorLevel, args...) -} - -func (logger *Logger) Fatalln(args ...interface{}) { - logger.Logln(FatalLevel, args...) - logger.Exit(1) -} - -func (logger *Logger) Panicln(args ...interface{}) { - logger.Logln(PanicLevel, args...) -} - -func (logger *Logger) Exit(code int) { - runHandlers() - if logger.ExitFunc == nil { - logger.ExitFunc = os.Exit - } - logger.ExitFunc(code) -} - -//When file is opened with appending mode, it's safe to -//write concurrently to a file (within 4k message on Linux). -//In these cases user can choose to disable the lock. -func (logger *Logger) SetNoLock() { - logger.mu.Disable() -} - -func (logger *Logger) level() Level { - return Level(atomic.LoadUint32((*uint32)(&logger.Level))) -} - -// SetLevel sets the logger level. -func (logger *Logger) SetLevel(level Level) { - atomic.StoreUint32((*uint32)(&logger.Level), uint32(level)) -} - -// GetLevel returns the logger level. -func (logger *Logger) GetLevel() Level { - return logger.level() -} - -// AddHook adds a hook to the logger hooks. -func (logger *Logger) AddHook(hook Hook) { - logger.mu.Lock() - defer logger.mu.Unlock() - logger.Hooks.Add(hook) -} - -// IsLevelEnabled checks if the log level of the logger is greater than the level param -func (logger *Logger) IsLevelEnabled(level Level) bool { - return logger.level() >= level -} - -// SetFormatter sets the logger formatter. -func (logger *Logger) SetFormatter(formatter Formatter) { - logger.mu.Lock() - defer logger.mu.Unlock() - logger.Formatter = formatter -} - -// SetOutput sets the logger output. -func (logger *Logger) SetOutput(output io.Writer) { - logger.mu.Lock() - defer logger.mu.Unlock() - logger.Out = output -} - -func (logger *Logger) SetReportCaller(reportCaller bool) { - logger.mu.Lock() - defer logger.mu.Unlock() - logger.ReportCaller = reportCaller -} - -// ReplaceHooks replaces the logger hooks and returns the old ones -func (logger *Logger) ReplaceHooks(hooks LevelHooks) LevelHooks { - logger.mu.Lock() - oldHooks := logger.Hooks - logger.Hooks = hooks - logger.mu.Unlock() - return oldHooks -} - -// SetBufferPool sets the logger buffer pool. -func (logger *Logger) SetBufferPool(pool BufferPool) { - logger.mu.Lock() - defer logger.mu.Unlock() - logger.BufferPool = pool -} diff --git a/vendor/github.com/sirupsen/logrus/logrus.go b/vendor/github.com/sirupsen/logrus/logrus.go deleted file mode 100644 index 2f16224cb..000000000 --- a/vendor/github.com/sirupsen/logrus/logrus.go +++ /dev/null @@ -1,186 +0,0 @@ -package logrus - -import ( - "fmt" - "log" - "strings" -) - -// Fields type, used to pass to `WithFields`. -type Fields map[string]interface{} - -// Level type -type Level uint32 - -// Convert the Level to a string. E.g. PanicLevel becomes "panic". -func (level Level) String() string { - if b, err := level.MarshalText(); err == nil { - return string(b) - } else { - return "unknown" - } -} - -// ParseLevel takes a string level and returns the Logrus log level constant. -func ParseLevel(lvl string) (Level, error) { - switch strings.ToLower(lvl) { - case "panic": - return PanicLevel, nil - case "fatal": - return FatalLevel, nil - case "error": - return ErrorLevel, nil - case "warn", "warning": - return WarnLevel, nil - case "info": - return InfoLevel, nil - case "debug": - return DebugLevel, nil - case "trace": - return TraceLevel, nil - } - - var l Level - return l, fmt.Errorf("not a valid logrus Level: %q", lvl) -} - -// UnmarshalText implements encoding.TextUnmarshaler. -func (level *Level) UnmarshalText(text []byte) error { - l, err := ParseLevel(string(text)) - if err != nil { - return err - } - - *level = l - - return nil -} - -func (level Level) MarshalText() ([]byte, error) { - switch level { - case TraceLevel: - return []byte("trace"), nil - case DebugLevel: - return []byte("debug"), nil - case InfoLevel: - return []byte("info"), nil - case WarnLevel: - return []byte("warning"), nil - case ErrorLevel: - return []byte("error"), nil - case FatalLevel: - return []byte("fatal"), nil - case PanicLevel: - return []byte("panic"), nil - } - - return nil, fmt.Errorf("not a valid logrus level %d", level) -} - -// A constant exposing all logging levels -var AllLevels = []Level{ - PanicLevel, - FatalLevel, - ErrorLevel, - WarnLevel, - InfoLevel, - DebugLevel, - TraceLevel, -} - -// These are the different logging levels. You can set the logging level to log -// on your instance of logger, obtained with `logrus.New()`. -const ( - // PanicLevel level, highest level of severity. Logs and then calls panic with the - // message passed to Debug, Info, ... - PanicLevel Level = iota - // FatalLevel level. Logs and then calls `logger.Exit(1)`. It will exit even if the - // logging level is set to Panic. - FatalLevel - // ErrorLevel level. Logs. Used for errors that should definitely be noted. - // Commonly used for hooks to send errors to an error tracking service. - ErrorLevel - // WarnLevel level. Non-critical entries that deserve eyes. - WarnLevel - // InfoLevel level. General operational entries about what's going on inside the - // application. - InfoLevel - // DebugLevel level. Usually only enabled when debugging. Very verbose logging. - DebugLevel - // TraceLevel level. Designates finer-grained informational events than the Debug. - TraceLevel -) - -// Won't compile if StdLogger can't be realized by a log.Logger -var ( - _ StdLogger = &log.Logger{} - _ StdLogger = &Entry{} - _ StdLogger = &Logger{} -) - -// StdLogger is what your logrus-enabled library should take, that way -// it'll accept a stdlib logger and a logrus logger. There's no standard -// interface, this is the closest we get, unfortunately. -type StdLogger interface { - Print(...interface{}) - Printf(string, ...interface{}) - Println(...interface{}) - - Fatal(...interface{}) - Fatalf(string, ...interface{}) - Fatalln(...interface{}) - - Panic(...interface{}) - Panicf(string, ...interface{}) - Panicln(...interface{}) -} - -// The FieldLogger interface generalizes the Entry and Logger types -type FieldLogger interface { - WithField(key string, value interface{}) *Entry - WithFields(fields Fields) *Entry - WithError(err error) *Entry - - Debugf(format string, args ...interface{}) - Infof(format string, args ...interface{}) - Printf(format string, args ...interface{}) - Warnf(format string, args ...interface{}) - Warningf(format string, args ...interface{}) - Errorf(format string, args ...interface{}) - Fatalf(format string, args ...interface{}) - Panicf(format string, args ...interface{}) - - Debug(args ...interface{}) - Info(args ...interface{}) - Print(args ...interface{}) - Warn(args ...interface{}) - Warning(args ...interface{}) - Error(args ...interface{}) - Fatal(args ...interface{}) - Panic(args ...interface{}) - - Debugln(args ...interface{}) - Infoln(args ...interface{}) - Println(args ...interface{}) - Warnln(args ...interface{}) - Warningln(args ...interface{}) - Errorln(args ...interface{}) - Fatalln(args ...interface{}) - Panicln(args ...interface{}) - - // IsDebugEnabled() bool - // IsInfoEnabled() bool - // IsWarnEnabled() bool - // IsErrorEnabled() bool - // IsFatalEnabled() bool - // IsPanicEnabled() bool -} - -// Ext1FieldLogger (the first extension to FieldLogger) is superfluous, it is -// here for consistancy. Do not use. Use Logger or Entry instead. -type Ext1FieldLogger interface { - FieldLogger - Tracef(format string, args ...interface{}) - Trace(args ...interface{}) - Traceln(args ...interface{}) -} diff --git a/vendor/github.com/sirupsen/logrus/terminal_check_appengine.go b/vendor/github.com/sirupsen/logrus/terminal_check_appengine.go deleted file mode 100644 index 2403de981..000000000 --- a/vendor/github.com/sirupsen/logrus/terminal_check_appengine.go +++ /dev/null @@ -1,11 +0,0 @@ -// +build appengine - -package logrus - -import ( - "io" -) - -func checkIfTerminal(w io.Writer) bool { - return true -} diff --git a/vendor/github.com/sirupsen/logrus/terminal_check_bsd.go b/vendor/github.com/sirupsen/logrus/terminal_check_bsd.go deleted file mode 100644 index 499789984..000000000 --- a/vendor/github.com/sirupsen/logrus/terminal_check_bsd.go +++ /dev/null @@ -1,13 +0,0 @@ -// +build darwin dragonfly freebsd netbsd openbsd -// +build !js - -package logrus - -import "golang.org/x/sys/unix" - -const ioctlReadTermios = unix.TIOCGETA - -func isTerminal(fd int) bool { - _, err := unix.IoctlGetTermios(fd, ioctlReadTermios) - return err == nil -} diff --git a/vendor/github.com/sirupsen/logrus/terminal_check_js.go b/vendor/github.com/sirupsen/logrus/terminal_check_js.go deleted file mode 100644 index ebdae3ec6..000000000 --- a/vendor/github.com/sirupsen/logrus/terminal_check_js.go +++ /dev/null @@ -1,7 +0,0 @@ -// +build js - -package logrus - -func isTerminal(fd int) bool { - return false -} diff --git a/vendor/github.com/sirupsen/logrus/terminal_check_no_terminal.go b/vendor/github.com/sirupsen/logrus/terminal_check_no_terminal.go deleted file mode 100644 index 97af92c68..000000000 --- a/vendor/github.com/sirupsen/logrus/terminal_check_no_terminal.go +++ /dev/null @@ -1,11 +0,0 @@ -// +build js nacl plan9 - -package logrus - -import ( - "io" -) - -func checkIfTerminal(w io.Writer) bool { - return false -} diff --git a/vendor/github.com/sirupsen/logrus/terminal_check_notappengine.go b/vendor/github.com/sirupsen/logrus/terminal_check_notappengine.go deleted file mode 100644 index 3293fb3ca..000000000 --- a/vendor/github.com/sirupsen/logrus/terminal_check_notappengine.go +++ /dev/null @@ -1,17 +0,0 @@ -// +build !appengine,!js,!windows,!nacl,!plan9 - -package logrus - -import ( - "io" - "os" -) - -func checkIfTerminal(w io.Writer) bool { - switch v := w.(type) { - case *os.File: - return isTerminal(int(v.Fd())) - default: - return false - } -} diff --git a/vendor/github.com/sirupsen/logrus/terminal_check_solaris.go b/vendor/github.com/sirupsen/logrus/terminal_check_solaris.go deleted file mode 100644 index f6710b3bd..000000000 --- a/vendor/github.com/sirupsen/logrus/terminal_check_solaris.go +++ /dev/null @@ -1,11 +0,0 @@ -package logrus - -import ( - "golang.org/x/sys/unix" -) - -// IsTerminal returns true if the given file descriptor is a terminal. -func isTerminal(fd int) bool { - _, err := unix.IoctlGetTermio(fd, unix.TCGETA) - return err == nil -} diff --git a/vendor/github.com/sirupsen/logrus/terminal_check_unix.go b/vendor/github.com/sirupsen/logrus/terminal_check_unix.go deleted file mode 100644 index 04748b851..000000000 --- a/vendor/github.com/sirupsen/logrus/terminal_check_unix.go +++ /dev/null @@ -1,13 +0,0 @@ -// +build linux aix zos -// +build !js - -package logrus - -import "golang.org/x/sys/unix" - -const ioctlReadTermios = unix.TCGETS - -func isTerminal(fd int) bool { - _, err := unix.IoctlGetTermios(fd, ioctlReadTermios) - return err == nil -} diff --git a/vendor/github.com/sirupsen/logrus/terminal_check_windows.go b/vendor/github.com/sirupsen/logrus/terminal_check_windows.go deleted file mode 100644 index 2879eb50e..000000000 --- a/vendor/github.com/sirupsen/logrus/terminal_check_windows.go +++ /dev/null @@ -1,27 +0,0 @@ -// +build !appengine,!js,windows - -package logrus - -import ( - "io" - "os" - - "golang.org/x/sys/windows" -) - -func checkIfTerminal(w io.Writer) bool { - switch v := w.(type) { - case *os.File: - handle := windows.Handle(v.Fd()) - var mode uint32 - if err := windows.GetConsoleMode(handle, &mode); err != nil { - return false - } - mode |= windows.ENABLE_VIRTUAL_TERMINAL_PROCESSING - if err := windows.SetConsoleMode(handle, mode); err != nil { - return false - } - return true - } - return false -} diff --git a/vendor/github.com/sirupsen/logrus/text_formatter.go b/vendor/github.com/sirupsen/logrus/text_formatter.go deleted file mode 100644 index be2c6efe5..000000000 --- a/vendor/github.com/sirupsen/logrus/text_formatter.go +++ /dev/null @@ -1,339 +0,0 @@ -package logrus - -import ( - "bytes" - "fmt" - "os" - "runtime" - "sort" - "strconv" - "strings" - "sync" - "time" - "unicode/utf8" -) - -const ( - red = 31 - yellow = 33 - blue = 36 - gray = 37 -) - -var baseTimestamp time.Time - -func init() { - baseTimestamp = time.Now() -} - -// TextFormatter formats logs into text -type TextFormatter struct { - // Set to true to bypass checking for a TTY before outputting colors. - ForceColors bool - - // Force disabling colors. - DisableColors bool - - // Force quoting of all values - ForceQuote bool - - // DisableQuote disables quoting for all values. - // DisableQuote will have a lower priority than ForceQuote. - // If both of them are set to true, quote will be forced on all values. - DisableQuote bool - - // Override coloring based on CLICOLOR and CLICOLOR_FORCE. - https://bixense.com/clicolors/ - EnvironmentOverrideColors bool - - // Disable timestamp logging. useful when output is redirected to logging - // system that already adds timestamps. - DisableTimestamp bool - - // Enable logging the full timestamp when a TTY is attached instead of just - // the time passed since beginning of execution. - FullTimestamp bool - - // TimestampFormat to use for display when a full timestamp is printed. - // The format to use is the same than for time.Format or time.Parse from the standard - // library. - // The standard Library already provides a set of predefined format. - TimestampFormat string - - // The fields are sorted by default for a consistent output. For applications - // that log extremely frequently and don't use the JSON formatter this may not - // be desired. - DisableSorting bool - - // The keys sorting function, when uninitialized it uses sort.Strings. - SortingFunc func([]string) - - // Disables the truncation of the level text to 4 characters. - DisableLevelTruncation bool - - // PadLevelText Adds padding the level text so that all the levels output at the same length - // PadLevelText is a superset of the DisableLevelTruncation option - PadLevelText bool - - // QuoteEmptyFields will wrap empty fields in quotes if true - QuoteEmptyFields bool - - // Whether the logger's out is to a terminal - isTerminal bool - - // FieldMap allows users to customize the names of keys for default fields. - // As an example: - // formatter := &TextFormatter{ - // FieldMap: FieldMap{ - // FieldKeyTime: "@timestamp", - // FieldKeyLevel: "@level", - // FieldKeyMsg: "@message"}} - FieldMap FieldMap - - // CallerPrettyfier can be set by the user to modify the content - // of the function and file keys in the data when ReportCaller is - // activated. If any of the returned value is the empty string the - // corresponding key will be removed from fields. - CallerPrettyfier func(*runtime.Frame) (function string, file string) - - terminalInitOnce sync.Once - - // The max length of the level text, generated dynamically on init - levelTextMaxLength int -} - -func (f *TextFormatter) init(entry *Entry) { - if entry.Logger != nil { - f.isTerminal = checkIfTerminal(entry.Logger.Out) - } - // Get the max length of the level text - for _, level := range AllLevels { - levelTextLength := utf8.RuneCount([]byte(level.String())) - if levelTextLength > f.levelTextMaxLength { - f.levelTextMaxLength = levelTextLength - } - } -} - -func (f *TextFormatter) isColored() bool { - isColored := f.ForceColors || (f.isTerminal && (runtime.GOOS != "windows")) - - if f.EnvironmentOverrideColors { - switch force, ok := os.LookupEnv("CLICOLOR_FORCE"); { - case ok && force != "0": - isColored = true - case ok && force == "0", os.Getenv("CLICOLOR") == "0": - isColored = false - } - } - - return isColored && !f.DisableColors -} - -// Format renders a single log entry -func (f *TextFormatter) Format(entry *Entry) ([]byte, error) { - data := make(Fields) - for k, v := range entry.Data { - data[k] = v - } - prefixFieldClashes(data, f.FieldMap, entry.HasCaller()) - keys := make([]string, 0, len(data)) - for k := range data { - keys = append(keys, k) - } - - var funcVal, fileVal string - - fixedKeys := make([]string, 0, 4+len(data)) - if !f.DisableTimestamp { - fixedKeys = append(fixedKeys, f.FieldMap.resolve(FieldKeyTime)) - } - fixedKeys = append(fixedKeys, f.FieldMap.resolve(FieldKeyLevel)) - if entry.Message != "" { - fixedKeys = append(fixedKeys, f.FieldMap.resolve(FieldKeyMsg)) - } - if entry.err != "" { - fixedKeys = append(fixedKeys, f.FieldMap.resolve(FieldKeyLogrusError)) - } - if entry.HasCaller() { - if f.CallerPrettyfier != nil { - funcVal, fileVal = f.CallerPrettyfier(entry.Caller) - } else { - funcVal = entry.Caller.Function - fileVal = fmt.Sprintf("%s:%d", entry.Caller.File, entry.Caller.Line) - } - - if funcVal != "" { - fixedKeys = append(fixedKeys, f.FieldMap.resolve(FieldKeyFunc)) - } - if fileVal != "" { - fixedKeys = append(fixedKeys, f.FieldMap.resolve(FieldKeyFile)) - } - } - - if !f.DisableSorting { - if f.SortingFunc == nil { - sort.Strings(keys) - fixedKeys = append(fixedKeys, keys...) - } else { - if !f.isColored() { - fixedKeys = append(fixedKeys, keys...) - f.SortingFunc(fixedKeys) - } else { - f.SortingFunc(keys) - } - } - } else { - fixedKeys = append(fixedKeys, keys...) - } - - var b *bytes.Buffer - if entry.Buffer != nil { - b = entry.Buffer - } else { - b = &bytes.Buffer{} - } - - f.terminalInitOnce.Do(func() { f.init(entry) }) - - timestampFormat := f.TimestampFormat - if timestampFormat == "" { - timestampFormat = defaultTimestampFormat - } - if f.isColored() { - f.printColored(b, entry, keys, data, timestampFormat) - } else { - - for _, key := range fixedKeys { - var value interface{} - switch { - case key == f.FieldMap.resolve(FieldKeyTime): - value = entry.Time.Format(timestampFormat) - case key == f.FieldMap.resolve(FieldKeyLevel): - value = entry.Level.String() - case key == f.FieldMap.resolve(FieldKeyMsg): - value = entry.Message - case key == f.FieldMap.resolve(FieldKeyLogrusError): - value = entry.err - case key == f.FieldMap.resolve(FieldKeyFunc) && entry.HasCaller(): - value = funcVal - case key == f.FieldMap.resolve(FieldKeyFile) && entry.HasCaller(): - value = fileVal - default: - value = data[key] - } - f.appendKeyValue(b, key, value) - } - } - - b.WriteByte('\n') - return b.Bytes(), nil -} - -func (f *TextFormatter) printColored(b *bytes.Buffer, entry *Entry, keys []string, data Fields, timestampFormat string) { - var levelColor int - switch entry.Level { - case DebugLevel, TraceLevel: - levelColor = gray - case WarnLevel: - levelColor = yellow - case ErrorLevel, FatalLevel, PanicLevel: - levelColor = red - case InfoLevel: - levelColor = blue - default: - levelColor = blue - } - - levelText := strings.ToUpper(entry.Level.String()) - if !f.DisableLevelTruncation && !f.PadLevelText { - levelText = levelText[0:4] - } - if f.PadLevelText { - // Generates the format string used in the next line, for example "%-6s" or "%-7s". - // Based on the max level text length. - formatString := "%-" + strconv.Itoa(f.levelTextMaxLength) + "s" - // Formats the level text by appending spaces up to the max length, for example: - // - "INFO " - // - "WARNING" - levelText = fmt.Sprintf(formatString, levelText) - } - - // Remove a single newline if it already exists in the message to keep - // the behavior of logrus text_formatter the same as the stdlib log package - entry.Message = strings.TrimSuffix(entry.Message, "\n") - - caller := "" - if entry.HasCaller() { - funcVal := fmt.Sprintf("%s()", entry.Caller.Function) - fileVal := fmt.Sprintf("%s:%d", entry.Caller.File, entry.Caller.Line) - - if f.CallerPrettyfier != nil { - funcVal, fileVal = f.CallerPrettyfier(entry.Caller) - } - - if fileVal == "" { - caller = funcVal - } else if funcVal == "" { - caller = fileVal - } else { - caller = fileVal + " " + funcVal - } - } - - switch { - case f.DisableTimestamp: - fmt.Fprintf(b, "\x1b[%dm%s\x1b[0m%s %-44s ", levelColor, levelText, caller, entry.Message) - case !f.FullTimestamp: - fmt.Fprintf(b, "\x1b[%dm%s\x1b[0m[%04d]%s %-44s ", levelColor, levelText, int(entry.Time.Sub(baseTimestamp)/time.Second), caller, entry.Message) - default: - fmt.Fprintf(b, "\x1b[%dm%s\x1b[0m[%s]%s %-44s ", levelColor, levelText, entry.Time.Format(timestampFormat), caller, entry.Message) - } - for _, k := range keys { - v := data[k] - fmt.Fprintf(b, " \x1b[%dm%s\x1b[0m=", levelColor, k) - f.appendValue(b, v) - } -} - -func (f *TextFormatter) needsQuoting(text string) bool { - if f.ForceQuote { - return true - } - if f.QuoteEmptyFields && len(text) == 0 { - return true - } - if f.DisableQuote { - return false - } - for _, ch := range text { - if !((ch >= 'a' && ch <= 'z') || - (ch >= 'A' && ch <= 'Z') || - (ch >= '0' && ch <= '9') || - ch == '-' || ch == '.' || ch == '_' || ch == '/' || ch == '@' || ch == '^' || ch == '+') { - return true - } - } - return false -} - -func (f *TextFormatter) appendKeyValue(b *bytes.Buffer, key string, value interface{}) { - if b.Len() > 0 { - b.WriteByte(' ') - } - b.WriteString(key) - b.WriteByte('=') - f.appendValue(b, value) -} - -func (f *TextFormatter) appendValue(b *bytes.Buffer, value interface{}) { - stringVal, ok := value.(string) - if !ok { - stringVal = fmt.Sprint(value) - } - - if !f.needsQuoting(stringVal) { - b.WriteString(stringVal) - } else { - b.WriteString(fmt.Sprintf("%q", stringVal)) - } -} diff --git a/vendor/github.com/sirupsen/logrus/writer.go b/vendor/github.com/sirupsen/logrus/writer.go deleted file mode 100644 index 074fd4b8b..000000000 --- a/vendor/github.com/sirupsen/logrus/writer.go +++ /dev/null @@ -1,102 +0,0 @@ -package logrus - -import ( - "bufio" - "io" - "runtime" - "strings" -) - -// Writer at INFO level. See WriterLevel for details. -func (logger *Logger) Writer() *io.PipeWriter { - return logger.WriterLevel(InfoLevel) -} - -// WriterLevel returns an io.Writer that can be used to write arbitrary text to -// the logger at the given log level. Each line written to the writer will be -// printed in the usual way using formatters and hooks. The writer is part of an -// io.Pipe and it is the callers responsibility to close the writer when done. -// This can be used to override the standard library logger easily. -func (logger *Logger) WriterLevel(level Level) *io.PipeWriter { - return NewEntry(logger).WriterLevel(level) -} - -// Writer returns an io.Writer that writes to the logger at the info log level -func (entry *Entry) Writer() *io.PipeWriter { - return entry.WriterLevel(InfoLevel) -} - -// WriterLevel returns an io.Writer that writes to the logger at the given log level -func (entry *Entry) WriterLevel(level Level) *io.PipeWriter { - reader, writer := io.Pipe() - - var printFunc func(args ...interface{}) - - // Determine which log function to use based on the specified log level - switch level { - case TraceLevel: - printFunc = entry.Trace - case DebugLevel: - printFunc = entry.Debug - case InfoLevel: - printFunc = entry.Info - case WarnLevel: - printFunc = entry.Warn - case ErrorLevel: - printFunc = entry.Error - case FatalLevel: - printFunc = entry.Fatal - case PanicLevel: - printFunc = entry.Panic - default: - printFunc = entry.Print - } - - // Start a new goroutine to scan the input and write it to the logger using the specified print function. - // It splits the input into chunks of up to 64KB to avoid buffer overflows. - go entry.writerScanner(reader, printFunc) - - // Set a finalizer function to close the writer when it is garbage collected - runtime.SetFinalizer(writer, writerFinalizer) - - return writer -} - -// writerScanner scans the input from the reader and writes it to the logger -func (entry *Entry) writerScanner(reader *io.PipeReader, printFunc func(args ...interface{})) { - scanner := bufio.NewScanner(reader) - - // Set the buffer size to the maximum token size to avoid buffer overflows - scanner.Buffer(make([]byte, bufio.MaxScanTokenSize), bufio.MaxScanTokenSize) - - // Define a split function to split the input into chunks of up to 64KB - chunkSize := bufio.MaxScanTokenSize // 64KB - splitFunc := func(data []byte, atEOF bool) (int, []byte, error) { - if len(data) >= chunkSize { - return chunkSize, data[:chunkSize], nil - } - - return bufio.ScanLines(data, atEOF) - } - - // Use the custom split function to split the input - scanner.Split(splitFunc) - - // Scan the input and write it to the logger using the specified print function - for scanner.Scan() { - printFunc(strings.TrimRight(scanner.Text(), "\r\n")) - } - - // If there was an error while scanning the input, log an error - if err := scanner.Err(); err != nil { - entry.Errorf("Error while reading from Writer: %s", err) - } - - // Close the reader when we are done - reader.Close() -} - -// WriterFinalizer is a finalizer function that closes then given writer when it is garbage collected -func writerFinalizer(writer *io.PipeWriter) { - writer.Close() -} diff --git a/vendor/github.com/sivchari/containedctx/.golangci.yml b/vendor/github.com/sivchari/containedctx/.golangci.yml deleted file mode 100644 index f687df836..000000000 --- a/vendor/github.com/sivchari/containedctx/.golangci.yml +++ /dev/null @@ -1,38 +0,0 @@ -run: - timeout: 5m - skip-files: [] - -linters-settings: - govet: - enable-all: true - disable: - - fieldalignment - gocyclo: - min-complexity: 12 - misspell: - locale: US - godox: - keywords: - - FIXME - gofumpt: - extra-rules: true - -linters: - disable-all: true - enable: - - govet - - revive - - goimports - - staticcheck - - gosimple - - unused - - godox - - gofumpt - - misspell - - gocyclo - -issues: - exclude-use-default: true - max-per-linter: 0 - max-same-issues: 0 - exclude: [] diff --git a/vendor/github.com/sivchari/containedctx/LICENCE b/vendor/github.com/sivchari/containedctx/LICENCE deleted file mode 100644 index 5185ec09a..000000000 --- a/vendor/github.com/sivchari/containedctx/LICENCE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2021 sivchari - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/sivchari/containedctx/README.md b/vendor/github.com/sivchari/containedctx/README.md deleted file mode 100644 index 0c2dd208d..000000000 --- a/vendor/github.com/sivchari/containedctx/README.md +++ /dev/null @@ -1,64 +0,0 @@ -# containedctx - -[![test_and_lint](https://github.com/sivchari/containedctx/actions/workflows/ci.yml/badge.svg?branch=main)](https://github.com/sivchari/containedctx/actions/workflows/ci.yml) - -containedctx is a linter that detects struct contained context.Context field. -This is discouraged technique in favour of passing context as first argument of method or function. -For rationale please read [Contexts and structs](https://go.dev/blog/context-and-structs) the Go blog post. - -## Instruction - -```sh -go install github.com/sivchari/containedctx/cmd/containedctx -``` - -## Usage - -```go -package main - -import "context" - -type ok struct { - i int - s string -} - -type ng struct { - ctx context.Context -} - -type empty struct{} -``` - -```console -go vet -vettool=(which containedctx) ./... - -# a -./main.go:11:2: found a struct that contains a context.Context field -``` - - -## CI - -### CircleCI - -```yaml -- run: - name: install containedctx - command: go install github.com/sivchari/containedctx/cmd/containedctx - -- run: - name: run containedctx - command: go vet -vettool=`which containedctx` ./... -``` - -### GitHub Actions - -```yaml -- name: install containedctx - run: go install github.com/sivchari/containedctx/cmd/containedctx - -- name: run containedctx - run: go vet -vettool=`which containedctx` ./... -``` diff --git a/vendor/github.com/sivchari/containedctx/containedctx.go b/vendor/github.com/sivchari/containedctx/containedctx.go deleted file mode 100644 index 0260d6a6e..000000000 --- a/vendor/github.com/sivchari/containedctx/containedctx.go +++ /dev/null @@ -1,45 +0,0 @@ -package containedctx - -import ( - "go/ast" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/ast/inspector" -) - -const doc = "containedctx is a linter that detects struct contained context.Context field" - -// Analyzer is the contanedctx analyzer -var Analyzer = &analysis.Analyzer{ - Name: "containedctx", - Doc: doc, - Run: run, - Requires: []*analysis.Analyzer{ - inspect.Analyzer, - }, -} - -func run(pass *analysis.Pass) (interface{}, error) { - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - - nodeFilter := []ast.Node{ - (*ast.StructType)(nil), - } - - inspect.Preorder(nodeFilter, func(n ast.Node) { - switch structTyp := n.(type) { - case *ast.StructType: - if structTyp.Fields.List == nil { - return - } - for _, field := range structTyp.Fields.List { - if pass.TypesInfo.TypeOf(field.Type).String() == "context.Context" { - pass.Reportf(field.Pos(), "found a struct that contains a context.Context field") - } - } - } - }) - - return nil, nil -} diff --git a/vendor/github.com/sivchari/nosnakecase/.gitignore b/vendor/github.com/sivchari/nosnakecase/.gitignore deleted file mode 100644 index 66fd13c90..000000000 --- a/vendor/github.com/sivchari/nosnakecase/.gitignore +++ /dev/null @@ -1,15 +0,0 @@ -# Binaries for programs and plugins -*.exe -*.exe~ -*.dll -*.so -*.dylib - -# Test binary, built with `go test -c` -*.test - -# Output of the go coverage tool, specifically when used with LiteIDE -*.out - -# Dependency directories (remove the comment below to include it) -# vendor/ diff --git a/vendor/github.com/sivchari/nosnakecase/.golangci.yml b/vendor/github.com/sivchari/nosnakecase/.golangci.yml deleted file mode 100644 index 31e05c4ee..000000000 --- a/vendor/github.com/sivchari/nosnakecase/.golangci.yml +++ /dev/null @@ -1,40 +0,0 @@ -run: - timeout: 5m - skip-files: [] - go: '1.17' - -linters-settings: - govet: - enable-all: true - disable: - - fieldalignment - gocyclo: - min-complexity: 18 - misspell: - locale: US - godox: - keywords: - - FIXME - gofumpt: - extra-rules: true - -linters: - disable-all: true - enable: - - govet - - revive - - goimports - - staticcheck - - gosimple - - unused - - godox - - gofumpt - - misspell - - gocyclo - -issues: - exclude-use-default: true - max-per-linter: 0 - max-same-issues: 0 - exclude: [] - diff --git a/vendor/github.com/sivchari/nosnakecase/LICENSE b/vendor/github.com/sivchari/nosnakecase/LICENSE deleted file mode 100644 index fb4127677..000000000 --- a/vendor/github.com/sivchari/nosnakecase/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2022 sivchari - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/sivchari/nosnakecase/README.md b/vendor/github.com/sivchari/nosnakecase/README.md deleted file mode 100644 index 69bb66046..000000000 --- a/vendor/github.com/sivchari/nosnakecase/README.md +++ /dev/null @@ -1,224 +0,0 @@ -# nosnakecase -nosnakecase is a linter that detects snake case of variable naming and function name. - -## Instruction - -```sh -go install github.com/sivchari/nosnakecase/cmd/nosnakecase@latest -``` - -## Usage - -```go -package sandbox - -// global variable name with underscore. -var v_v = 0 // want "v_v is used under score. You should use mixedCap or MixedCap." - -// global constant name with underscore. -const c_c = 0 // want "c_c is used under score. You should use mixedCap or MixedCap." - -// struct name with underscore. -type S_a struct { // want "S_a is used under score. You should use mixedCap or MixedCap." - fi int -} - -// non-exported struct field name with underscore. -type Sa struct { - fi_a int // // want "fi_a is used under score. You should use mixedCap or MixedCap." -} - -// function as struct field, with parameter name with underscore. -type Sb struct { - fib func(p_a int) // want "p_a is used under score. You should use mixedCap or MixedCap." -} - -// exported struct field with underscore. -type Sc struct { - Fi_A int // want "Fi_A is used under score. You should use mixedCap or MixedCap." -} - -// function as struct field, with return name with underscore. -type Sd struct { - fib func(p int) (r_a int) // want "r_a is used under score. You should use mixedCap or MixedCap." -} - -// interface name with underscore. -type I_a interface { // want "I_a is used under score. You should use mixedCap or MixedCap." - fn(p int) -} - -// interface with parameter name with underscore. -type Ia interface { - fn(p_a int) // want "p_a is used under score. You should use mixedCap or MixedCap." -} - -// interface with parameter name with underscore. -type Ib interface { - Fn(p_a int) // want "p_a is used under score. You should use mixedCap or MixedCap." -} - -// function as struct field, with return name with underscore. -type Ic interface { - Fn_a() // want "Fn_a is used under score. You should use mixedCap or MixedCap." -} - -// interface with return name with underscore. -type Id interface { - Fn() (r_a int) // want "r_a is used under score. You should use mixedCap or MixedCap." -} - -// function name with underscore. -func f_a() {} // want "f_a is used under score. You should use mixedCap or MixedCap." - -// function's parameter name with underscore. -func fb(p_a int) {} // want "p_a is used under score. You should use mixedCap or MixedCap." - -// named return with underscore. -func fc() (r_b int) { // want "r_b is used under score. You should use mixedCap or MixedCap." - return 0 -} - -// local variable (short declaration) with underscore. -func fd(p int) int { - v_b := p * 2 // want "v_b is used under score. You should use mixedCap or MixedCap." - - return v_b // want "v_b is used under score. You should use mixedCap or MixedCap." -} - -// local constant with underscore. -func fe(p int) int { - const v_b = 2 // want "v_b is used under score. You should use mixedCap or MixedCap." - - return v_b * p // want "v_b is used under score. You should use mixedCap or MixedCap." -} - -// local variable with underscore. -func ff(p int) int { - var v_b = 2 // want "v_b is used under score. You should use mixedCap or MixedCap." - - return v_b * p // want "v_b is used under score. You should use mixedCap or MixedCap." -} - -// inner function, parameter name with underscore. -func fg() { - fgl := func(p_a int) {} // want "p_a is used under score. You should use mixedCap or MixedCap." - fgl(1) -} - -type Foo struct{} - -// method name with underscore. -func (f Foo) f_a() {} // want "f_a is used under score. You should use mixedCap or MixedCap." - -// method's parameter name with underscore. -func (f Foo) fb(p_a int) {} // want "p_a is used under score. You should use mixedCap or MixedCap." - -// named return with underscore. -func (f Foo) fc() (r_b int) { return 0 } // want "r_b is used under score. You should use mixedCap or MixedCap." - -// local variable (short declaration) with underscore. -func (f Foo) fd(p int) int { - v_b := p * 2 // want "v_b is used under score. You should use mixedCap or MixedCap." - - return v_b // want "v_b is used under score. You should use mixedCap or MixedCap." -} - -// local constant with underscore. -func (f Foo) fe(p int) int { - const v_b = 2 // want "v_b is used under score. You should use mixedCap or MixedCap." - - return v_b * p // want "v_b is used under score. You should use mixedCap or MixedCap." -} - -// local variable with underscore. -func (f Foo) ff(p int) int { - var v_b = 2 // want "v_b is used under score. You should use mixedCap or MixedCap." - - return v_b * p // want "v_b is used under score. You should use mixedCap or MixedCap." -} - -func fna(a, p_a int) {} // want "p_a is used under score. You should use mixedCap or MixedCap." - -func fna1(a string, p_a int) {} // want "p_a is used under score. You should use mixedCap or MixedCap." - -func fnb(a, b, p_a int) {} // want "p_a is used under score. You should use mixedCap or MixedCap." - -func fnb1(a, b string, p_a int) {} // want "p_a is used under score. You should use mixedCap or MixedCap." - -func fnd( - p_a int, // want "p_a is used under score. You should use mixedCap or MixedCap." - p_b int, // want "p_b is used under score. You should use mixedCap or MixedCap." - p_c int, // want "p_c is used under score. You should use mixedCap or MixedCap." -) { -} -``` - -```console -go vet -vettool=(which nosnakecase) ./... - -# command-line-arguments -# a -./a.go:4:5: v_v is used under score. You should use mixedCap or MixedCap. -./a.go:7:7: c_c is used under score. You should use mixedCap or MixedCap. -./a.go:10:6: S_a is used under score. You should use mixedCap or MixedCap. -./a.go:16:2: fi_a is used under score. You should use mixedCap or MixedCap. -./a.go:21:11: p_a is used under score. You should use mixedCap or MixedCap. -./a.go:26:2: Fi_A is used under score. You should use mixedCap or MixedCap. -./a.go:31:19: r_a is used under score. You should use mixedCap or MixedCap. -./a.go:35:6: I_a is used under score. You should use mixedCap or MixedCap. -./a.go:41:5: p_a is used under score. You should use mixedCap or MixedCap. -./a.go:46:5: p_a is used under score. You should use mixedCap or MixedCap. -./a.go:51:2: Fn_a is used under score. You should use mixedCap or MixedCap. -./a.go:56:8: r_a is used under score. You should use mixedCap or MixedCap. -./a.go:60:6: f_a is used under score. You should use mixedCap or MixedCap. -./a.go:63:9: p_a is used under score. You should use mixedCap or MixedCap. -./a.go:66:12: r_b is used under score. You should use mixedCap or MixedCap. -./a.go:72:2: v_b is used under score. You should use mixedCap or MixedCap. -./a.go:74:9: v_b is used under score. You should use mixedCap or MixedCap. -./a.go:79:8: v_b is used under score. You should use mixedCap or MixedCap. -./a.go:81:9: v_b is used under score. You should use mixedCap or MixedCap. -./a.go:86:6: v_b is used under score. You should use mixedCap or MixedCap. -./a.go:88:9: v_b is used under score. You should use mixedCap or MixedCap. -./a.go:93:14: p_a is used under score. You should use mixedCap or MixedCap. -./a.go:100:14: f_a is used under score. You should use mixedCap or MixedCap. -./a.go:103:17: p_a is used under score. You should use mixedCap or MixedCap. -./a.go:106:20: r_b is used under score. You should use mixedCap or MixedCap. -./a.go:110:2: v_b is used under score. You should use mixedCap or MixedCap. -./a.go:112:9: v_b is used under score. You should use mixedCap or MixedCap. -./a.go:117:8: v_b is used under score. You should use mixedCap or MixedCap. -./a.go:119:9: v_b is used under score. You should use mixedCap or MixedCap. -./a.go:124:6: v_b is used under score. You should use mixedCap or MixedCap. -./a.go:126:9: v_b is used under score. You should use mixedCap or MixedCap. -./a.go:129:13: p_a is used under score. You should use mixedCap or MixedCap. -./a.go:131:21: p_a is used under score. You should use mixedCap or MixedCap. -./a.go:133:16: p_a is used under score. You should use mixedCap or MixedCap. -./a.go:135:24: p_a is used under score. You should use mixedCap or MixedCap. -./a.go:138:2: p_a is used under score. You should use mixedCap or MixedCap. -./a.go:139:2: p_b is used under score. You should use mixedCap or MixedCap. -./a.go:140:2: p_c is used under score. You should use mixedCap or MixedCap. -``` - -## CI - -### CircleCI - -```yaml -- run: - name: install nosnakecase - command: go install github.com/sivchari/nosnakecase/cmd/nosnakecase@latest - -- run: - name: run nosnakecase - command: go vet -vettool=`which nosnakecase` ./... -``` - -### GitHub Actions - -```yaml -- name: install nosnakecase - run: go install github.com/sivchari/nosnakecase/cmd/nosnakecase@latest - -- name: run nosnakecase - run: go vet -vettool=`which nosnakecase` ./... -``` diff --git a/vendor/github.com/sivchari/nosnakecase/nosnakecase.go b/vendor/github.com/sivchari/nosnakecase/nosnakecase.go deleted file mode 100644 index 88cf70e3f..000000000 --- a/vendor/github.com/sivchari/nosnakecase/nosnakecase.go +++ /dev/null @@ -1,63 +0,0 @@ -package nosnakecase - -import ( - "go/ast" - "go/token" - "strings" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/ast/inspector" -) - -const doc = "nosnakecase is a linter that detects snake case of variable naming and function name." - -// Analyzer is a nosnakecase linter. -var Analyzer = &analysis.Analyzer{ - Name: "nosnakecase", - Doc: doc, - Run: run, - Requires: []*analysis.Analyzer{ - inspect.Analyzer, - }, -} - -func run(pass *analysis.Pass) (interface{}, error) { - result := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - - nodeFilter := []ast.Node{ - (*ast.Ident)(nil), - } - - result.Preorder(nodeFilter, func(n ast.Node) { - switch n := n.(type) { - case *ast.Ident: - report(pass, n.Pos(), n.Name) - } - }) - - return nil, nil -} - -func report(pass *analysis.Pass, pos token.Pos, name string) { - // skip import _ "xxx" - if name == "_" { - return - } - - // skip package xxx_test - if strings.Contains(name, "_test") { - return - } - - // If prefix is Test or Benchmark, Fuzz, skip - // FYI https://go.dev/blog/examples - if strings.HasPrefix(name, "Test") || strings.HasPrefix(name, "Benchmark") || strings.HasPrefix(name, "Fuzz") { - return - } - - if strings.Contains(name, "_") { - pass.Reportf(pos, "%s contains underscore. You should use mixedCap or MixedCap.", name) - return - } -} diff --git a/vendor/github.com/sivchari/tenv/.gitignore b/vendor/github.com/sivchari/tenv/.gitignore deleted file mode 100644 index 83470100f..000000000 --- a/vendor/github.com/sivchari/tenv/.gitignore +++ /dev/null @@ -1,17 +0,0 @@ -# Binaries for programs and plugins -*.exe -*.exe~ -*.dll -*.so -*.dylib - -# Test binary, built with `go test -c` -*.test - -# Output of the go coverage tool, specifically when used with LiteIDE -*.out - -.idea - -# Dependency directories (remove the comment below to include it) -# vendor/ diff --git a/vendor/github.com/sivchari/tenv/.golangci.yml b/vendor/github.com/sivchari/tenv/.golangci.yml deleted file mode 100644 index f687df836..000000000 --- a/vendor/github.com/sivchari/tenv/.golangci.yml +++ /dev/null @@ -1,38 +0,0 @@ -run: - timeout: 5m - skip-files: [] - -linters-settings: - govet: - enable-all: true - disable: - - fieldalignment - gocyclo: - min-complexity: 12 - misspell: - locale: US - godox: - keywords: - - FIXME - gofumpt: - extra-rules: true - -linters: - disable-all: true - enable: - - govet - - revive - - goimports - - staticcheck - - gosimple - - unused - - godox - - gofumpt - - misspell - - gocyclo - -issues: - exclude-use-default: true - max-per-linter: 0 - max-same-issues: 0 - exclude: [] diff --git a/vendor/github.com/sivchari/tenv/LICENSE b/vendor/github.com/sivchari/tenv/LICENSE deleted file mode 100644 index 5185ec09a..000000000 --- a/vendor/github.com/sivchari/tenv/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2021 sivchari - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/sivchari/tenv/README.md b/vendor/github.com/sivchari/tenv/README.md deleted file mode 100644 index c5d004773..000000000 --- a/vendor/github.com/sivchari/tenv/README.md +++ /dev/null @@ -1,107 +0,0 @@ -# tenv - -![tenv Gopher](./tenv.png "Gopher") - - -[![test_and_lint](https://github.com/sivchari/tenv/actions/workflows/workflows.yml/badge.svg?branch=main)](https://github.com/sivchari/tenv/actions/workflows/workflows.yml) - -tenv is analyzer that detects using os.Setenv instead of t.Setenv since Go1.17 - -## Instruction - -```sh -go install github.com/sivchari/tenv/cmd/tenv -``` - -## Usage - -```go -package main - -import ( - "fmt" - "os" - "testing" -) - -func TestMain(t *testing.T) { - fmt.Println(os.Getenv("GO")) - os.Setenv("GO", "HACKING GOPHER") -} - -func TestMain2(t *testing.T) { - fmt.Println(os.Getenv("GO")) -} - -func helper() { - os.Setenv("GO", "HACKING GOPHER") -} -``` - -```console -go vet -vettool=(which tenv) ./... - -# a -./main_test.go:11:2: os.Setenv() can be replaced by `t.Setenv()` in TestMain -``` - -### option - -The option `all` will run against whole test files (`_test.go`) regardless of method/function signatures. - -By default, only methods that take `*testing.T`, `*testing.B`, and `testing.TB` as arguments are checked. - -```go -package main - -import ( - "fmt" - "os" - "testing" -) - -func TestMain(t *testing.T) { - fmt.Println(os.Getenv("GO")) - os.Setenv("GO", "HACKING GOPHER") -} - -func TestMain2(t *testing.T) { - fmt.Println(os.Getenv("GO")) -} - -func helper() { - os.Setenv("GO", "HACKING GOPHER") -} -``` - -```console -go vet -vettool=(which tenv) -tenv.all ./... - -# a -./main_test.go:11:2: os.Setenv() can be replaced by `t.Setenv()` in TestMain -./main_test.go:19:2: os.Setenv() can be replaced by `testing.Setenv()` in helper -``` - -## CI - -### CircleCI - -```yaml -- run: - name: install tenv - command: go install github.com/sivchari/tenv - -- run: - name: run tenv - command: go vet -vettool=`which tenv` ./... -``` - -### GitHub Actions - -```yaml -- name: install tenv - run: go install github.com/sivchari/tenv - -- name: run tenv - run: go vet -vettool=`which tenv` ./... -``` diff --git a/vendor/github.com/sivchari/tenv/tenv.go b/vendor/github.com/sivchari/tenv/tenv.go deleted file mode 100644 index fcff98d05..000000000 --- a/vendor/github.com/sivchari/tenv/tenv.go +++ /dev/null @@ -1,213 +0,0 @@ -package tenv - -import ( - "go/ast" - "strings" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/ast/inspector" -) - -const doc = "tenv is analyzer that detects using os.Setenv instead of t.Setenv since Go1.17" - -// Analyzer is tenv analyzer -var Analyzer = &analysis.Analyzer{ - Name: "tenv", - Doc: doc, - Run: run, - Requires: []*analysis.Analyzer{ - inspect.Analyzer, - }, -} - -var ( - A = "all" - aflag bool -) - -func init() { - Analyzer.Flags.BoolVar(&aflag, A, false, "the all option will run against all method in test file") -} - -func run(pass *analysis.Pass) (interface{}, error) { - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - - nodeFilter := []ast.Node{ - (*ast.FuncDecl)(nil), - (*ast.FuncLit)(nil), - } - - inspect.Preorder(nodeFilter, func(n ast.Node) { - switch n := n.(type) { - case *ast.FuncDecl: - checkFuncDecl(pass, n, pass.Fset.File(n.Pos()).Name()) - case *ast.FuncLit: - checkFuncLit(pass, n, pass.Fset.File(n.Pos()).Name()) - } - }) - - return nil, nil -} - -func checkFuncDecl(pass *analysis.Pass, f *ast.FuncDecl, fileName string) { - argName, ok := targetRunner(f.Type.Params.List, fileName) - if !ok { - return - } - checkStmts(pass, f.Body.List, f.Name.Name, argName) -} - -func checkFuncLit(pass *analysis.Pass, f *ast.FuncLit, fileName string) { - argName, ok := targetRunner(f.Type.Params.List, fileName) - if !ok { - return - } - checkStmts(pass, f.Body.List, "anonymous function", argName) -} - -func checkStmts(pass *analysis.Pass, stmts []ast.Stmt, funcName, argName string) { - for _, stmt := range stmts { - switch stmt := stmt.(type) { - case *ast.ExprStmt: - if !checkExprStmt(pass, stmt, funcName, argName) { - continue - } - case *ast.IfStmt: - if !checkIfStmt(pass, stmt, funcName, argName) { - continue - } - case *ast.AssignStmt: - if !checkAssignStmt(pass, stmt, funcName, argName) { - continue - } - } - } -} - -func checkExprStmt(pass *analysis.Pass, stmt *ast.ExprStmt, funcName, argName string) bool { - callExpr, ok := stmt.X.(*ast.CallExpr) - if !ok { - return false - } - fun, ok := callExpr.Fun.(*ast.SelectorExpr) - if !ok { - return false - } - x, ok := fun.X.(*ast.Ident) - if !ok { - return false - } - targetName := x.Name + "." + fun.Sel.Name - if targetName == "os.Setenv" { - if argName == "" { - argName = "testing" - } - pass.Reportf(stmt.Pos(), "os.Setenv() can be replaced by `%s.Setenv()` in %s", argName, funcName) - } - return true -} - -func checkIfStmt(pass *analysis.Pass, stmt *ast.IfStmt, funcName, argName string) bool { - assignStmt, ok := stmt.Init.(*ast.AssignStmt) - if !ok { - return false - } - rhs, ok := assignStmt.Rhs[0].(*ast.CallExpr) - if !ok { - return false - } - fun, ok := rhs.Fun.(*ast.SelectorExpr) - if !ok { - return false - } - x, ok := fun.X.(*ast.Ident) - if !ok { - return false - } - targetName := x.Name + "." + fun.Sel.Name - if targetName == "os.Setenv" { - if argName == "" { - argName = "testing" - } - pass.Reportf(stmt.Pos(), "os.Setenv() can be replaced by `%s.Setenv()` in %s", argName, funcName) - } - return true -} - -func checkAssignStmt(pass *analysis.Pass, stmt *ast.AssignStmt, funcName, argName string) bool { - rhs, ok := stmt.Rhs[0].(*ast.CallExpr) - if !ok { - return false - } - fun, ok := rhs.Fun.(*ast.SelectorExpr) - if !ok { - return false - } - x, ok := fun.X.(*ast.Ident) - if !ok { - return false - } - targetName := x.Name + "." + fun.Sel.Name - if targetName == "os.Setenv" { - if argName == "" { - argName = "testing" - } - pass.Reportf(stmt.Pos(), "os.Setenv() can be replaced by `%s.Setenv()` in %s", argName, funcName) - } - return true -} - -func targetRunner(params []*ast.Field, fileName string) (string, bool) { - for _, p := range params { - switch typ := p.Type.(type) { - case *ast.StarExpr: - if checkStarExprTarget(typ) { - if len(p.Names) == 0 { - return "", false - } - argName := p.Names[0].Name - return argName, true - } - case *ast.SelectorExpr: - if checkSelectorExprTarget(typ) { - if len(p.Names) == 0 { - return "", false - } - argName := p.Names[0].Name - return argName, true - } - } - } - if aflag && strings.HasSuffix(fileName, "_test.go") { - return "", true - } - return "", false -} - -func checkStarExprTarget(typ *ast.StarExpr) bool { - selector, ok := typ.X.(*ast.SelectorExpr) - if !ok { - return false - } - x, ok := selector.X.(*ast.Ident) - if !ok { - return false - } - targetName := x.Name + "." + selector.Sel.Name - switch targetName { - case "testing.T", "testing.B", "testing.F": - return true - default: - return false - } -} - -func checkSelectorExprTarget(typ *ast.SelectorExpr) bool { - x, ok := typ.X.(*ast.Ident) - if !ok { - return false - } - targetName := x.Name + "." + typ.Sel.Name - return targetName == "testing.TB" -} diff --git a/vendor/github.com/sivchari/tenv/tenv.png b/vendor/github.com/sivchari/tenv/tenv.png deleted file mode 100644 index 96dc967e3..000000000 Binary files a/vendor/github.com/sivchari/tenv/tenv.png and /dev/null differ diff --git a/vendor/github.com/sonatard/noctx/.gitignore b/vendor/github.com/sonatard/noctx/.gitignore deleted file mode 100644 index 2d830686d..000000000 --- a/vendor/github.com/sonatard/noctx/.gitignore +++ /dev/null @@ -1 +0,0 @@ -coverage.out diff --git a/vendor/github.com/sonatard/noctx/.golangci.yml b/vendor/github.com/sonatard/noctx/.golangci.yml deleted file mode 100644 index 55ebeebdb..000000000 --- a/vendor/github.com/sonatard/noctx/.golangci.yml +++ /dev/null @@ -1,14 +0,0 @@ -run: - linters-settings: - govet: - enable-all: true - linters: - enable-all: true - disable: - - gochecknoglobals - - gomnd - - gocognit - - nestif - - nilnil - - paralleltest - - varnamelen \ No newline at end of file diff --git a/vendor/github.com/sonatard/noctx/LICENSE b/vendor/github.com/sonatard/noctx/LICENSE deleted file mode 100644 index a00d5727f..000000000 --- a/vendor/github.com/sonatard/noctx/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2020 sonatard - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/sonatard/noctx/Makefile b/vendor/github.com/sonatard/noctx/Makefile deleted file mode 100644 index 1a27f6b59..000000000 --- a/vendor/github.com/sonatard/noctx/Makefile +++ /dev/null @@ -1,16 +0,0 @@ -.PHONY: all imports test lint - -all: imports test lint - -imports: - goimports -w ./ - -test: - go test -race ./... - -test_coverage: - go test -race -coverprofile=coverage.out -covermode=atomic ./... - -lint: - golangci-lint run ./... - diff --git a/vendor/github.com/sonatard/noctx/README.md b/vendor/github.com/sonatard/noctx/README.md deleted file mode 100644 index b3793fc96..000000000 --- a/vendor/github.com/sonatard/noctx/README.md +++ /dev/null @@ -1,174 +0,0 @@ -# noctx - -![](https://github.com/sonatard/noctx/workflows/CI/badge.svg) - -`noctx` finds sending http request without context.Context. - -You should use `noctx` if sending http request in your library. -Passing `context.Context` enables library user to cancel http request, getting trace information and so on. - -## Usage - - -### noctx with go vet - -go vet is a Go standard tool for analyzing source code. - -1. Install noctx. -```sh -$ go install github.com/sonatard/noctx/cmd/noctx@latest -``` - -2. noctx execute -```sh -$ go vet -vettool=`which noctx` main.go -./main.go:6:11: net/http.Get must not be called -``` - -### noctx with golangci-lint - -golangci-lint is a fast Go linters runner. - -1. Install golangci-lint. -[golangci-lint - Install](https://golangci-lint.run/usage/install/) - -2. Setup .golangci.yml -```yaml: -# Add noctx to enable linters. -linters: - enable: - - noctx - -# Or enable-all is true. -linters: - enable-all: true - disable: - - xxx # Add unused linter to disable linters. -``` - -3. noctx execute -```sh -# Use .golangci.yml -$ golangci-lint run - -# Only noctx execute -golangci-lint run --disable-all -E noctx -``` - -## Detection rules -- Executing following functions - - `net/http.Get` - - `net/http.Head` - - `net/http.Post` - - `net/http.PostForm` - - `(*net/http.Client).Get` - - `(*net/http.Client).Head` - - `(*net/http.Client).Post` - - `(*net/http.Client).PostForm` -- `http.Request` returned by `http.NewRequest` function and passes it to other function. - -## How to fix -- Send http request using `(*http.Client).Do(*http.Request)` method. -- In Go 1.13 and later, use `http.NewRequestWithContext` function instead of using `http.NewRequest` function. -- In Go 1.12 and earlier, call `(http.Request).WithContext(ctx)` after `http.NewRequest`. - -`(http.Request).WithContext(ctx)` has a disadvantage of performance because it returns a copy of `http.Request`. Use `http.NewRequestWithContext` function if you only support Go1.13 or later. - - -If your library already provides functions that don't accept context, you define a new function that accepts context and make the existing function a wrapper for a new function. - - -```go -// Before fix code -// Sending an HTTP request but not accepting context -func Send(body io.Reader) error { - req,err := http.NewRequest(http.MethodPost, "http://example.com", body) - if err != nil { - return err - } - _, err = http.DefaultClient.Do(req) - if err != nil{ - return err - } - - return nil -} -``` - -```go -// After fix code -func Send(body io.Reader) error { - // Pass context.Background() to SendWithContext - return SendWithContext(context.Background(), body) -} - -// Sending an HTTP request and accepting context -func SendWithContext(ctx context.Context, body io.Reader) error { - // Change NewRequest to NewRequestWithContext and pass context it - req, err := http.NewRequestWithContext(ctx, http.MethodPost, "http://example.com", body) - if err != nil { - return nil - } - _, err = http.DefaultClient.Do(req) - if err != nil { - return err - } - - return nil -} -``` - -## Detection sample - -```go -package main - -import ( - "context" - "net/http" -) - -func main() { - const url = "http://example.com" - http.Get(url) // want `net/http\.Get must not be called` - http.Head(url) // want `net/http\.Head must not be called` - http.Post(url, "", nil) // want `net/http\.Post must not be called` - http.PostForm(url, nil) // want `net/http\.PostForm must not be called` - - cli := &http.Client{} - cli.Get(url) // want `\(\*net/http\.Client\)\.Get must not be called` - cli.Head(url) // want `\(\*net/http\.Client\)\.Head must not be called` - cli.Post(url, "", nil) // want `\(\*net/http\.Client\)\.Post must not be called` - cli.PostForm(url, nil) // want `\(\*net/http\.Client\)\.PostForm must not be called` - - req, _ := http.NewRequest(http.MethodPost, url, nil) // want `should rewrite http.NewRequestWithContext or add \(\*Request\).WithContext` - cli.Do(req) - - ctx := context.Background() - req2, _ := http.NewRequestWithContext(ctx, http.MethodPost, url, nil) // OK - cli.Do(req2) - - req3, _ := http.NewRequest(http.MethodPost, url, nil) // OK - req3 = req3.WithContext(ctx) - cli.Do(req3) - - f2 := func(req *http.Request, ctx context.Context) *http.Request { - return req - } - req4, _ := http.NewRequest(http.MethodPost, url, nil) // want `should rewrite http.NewRequestWithContext or add \(\*Request\).WithContext` - req4 = f2(req4, ctx) - cli.Do(req4) - - req5, _ := func() (*http.Request, error) { - return http.NewRequest(http.MethodPost, url, nil) // want `should rewrite http.NewRequestWithContext or add \(\*Request\).WithContext` - }() - cli.Do(req5) - -} -``` - -## Reference -- [net/http - NewRequest](https://golang.org/pkg/net/http/#NewRequest) -- [net/http - NewRequestWithContext](https://golang.org/pkg/net/http/#NewRequestWithContext) -- [net/http - Request.WithContext](https://golang.org/pkg/net/http/#Request.WithContext) - diff --git a/vendor/github.com/sonatard/noctx/ngfunc/main.go b/vendor/github.com/sonatard/noctx/ngfunc/main.go deleted file mode 100644 index 46306218d..000000000 --- a/vendor/github.com/sonatard/noctx/ngfunc/main.go +++ /dev/null @@ -1,61 +0,0 @@ -package ngfunc - -import ( - "fmt" - "go/types" - - "github.com/gostaticanalysis/analysisutil" - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/buildssa" -) - -func Run(pass *analysis.Pass) (interface{}, error) { - ngFuncNames := []string{ - "net/http.Get", - "net/http.Head", - "net/http.Post", - "net/http.PostForm", - "(*net/http.Client).Get", - "(*net/http.Client).Head", - "(*net/http.Client).Post", - "(*net/http.Client).PostForm", - } - - ngFuncs := typeFuncs(pass, ngFuncNames) - if len(ngFuncs) == 0 { - return nil, nil - } - - reportFuncs := ngCalledFuncs(pass, ngFuncs) - report(pass, reportFuncs) - - return nil, nil -} - -func ngCalledFuncs(pass *analysis.Pass, ngFuncs []*types.Func) []*Report { - var reports []*Report - - ssa, ok := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA) - if !ok { - panic(fmt.Sprintf("%T is not *buildssa.SSA", pass.ResultOf[buildssa.Analyzer])) - } - for _, sf := range ssa.SrcFuncs { - for _, b := range sf.Blocks { - for _, instr := range b.Instrs { - for _, ngFunc := range ngFuncs { - if analysisutil.Called(instr, nil, ngFunc) { - ngCalledFunc := &Report{ - Instruction: instr, - function: ngFunc, - } - reports = append(reports, ngCalledFunc) - - break - } - } - } - } - } - - return reports -} diff --git a/vendor/github.com/sonatard/noctx/ngfunc/report.go b/vendor/github.com/sonatard/noctx/ngfunc/report.go deleted file mode 100644 index e50051798..000000000 --- a/vendor/github.com/sonatard/noctx/ngfunc/report.go +++ /dev/null @@ -1,29 +0,0 @@ -package ngfunc - -import ( - "fmt" - "go/token" - "go/types" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/ssa" -) - -type Report struct { - Instruction ssa.Instruction - function *types.Func -} - -func (n *Report) Pos() token.Pos { - return n.Instruction.Pos() -} - -func (n *Report) Message() string { - return fmt.Sprintf("%s must not be called", n.function.FullName()) -} - -func report(pass *analysis.Pass, reports []*Report) { - for _, report := range reports { - pass.Reportf(report.Pos(), report.Message()) - } -} diff --git a/vendor/github.com/sonatard/noctx/ngfunc/types.go b/vendor/github.com/sonatard/noctx/ngfunc/types.go deleted file mode 100644 index f1877386c..000000000 --- a/vendor/github.com/sonatard/noctx/ngfunc/types.go +++ /dev/null @@ -1,65 +0,0 @@ -package ngfunc - -import ( - "fmt" - "go/types" - "strings" - - "github.com/gostaticanalysis/analysisutil" - "golang.org/x/tools/go/analysis" -) - -var errNotFound = fmt.Errorf("function not found") - -func typeFuncs(pass *analysis.Pass, funcs []string) []*types.Func { - fs := make([]*types.Func, 0, len(funcs)) - - for _, fn := range funcs { - f, err := typeFunc(pass, fn) - if err != nil { - continue - } - - fs = append(fs, f) - } - - return fs -} - -func typeFunc(pass *analysis.Pass, funcName string) (*types.Func, error) { - ss := strings.Split(strings.TrimSpace(funcName), ".") - - switch len(ss) { - case 2: - // package function: pkgname.Func - f, ok := analysisutil.ObjectOf(pass, ss[0], ss[1]).(*types.Func) - if !ok || f == nil { - return nil, errNotFound - } - - return f, nil - case 3: - // method: (*pkgname.Type).Method - pkgname := strings.TrimLeft(ss[0], "(") - typename := strings.TrimRight(ss[1], ")") - - if pkgname != "" && pkgname[0] == '*' { - pkgname = pkgname[1:] - typename = "*" + typename - } - - typ := analysisutil.TypeOf(pass, pkgname, typename) - if typ == nil { - return nil, errNotFound - } - - m := analysisutil.MethodOf(typ, ss[2]) - if m == nil { - return nil, errNotFound - } - - return m, nil - } - - return nil, errNotFound -} diff --git a/vendor/github.com/sonatard/noctx/noctx.go b/vendor/github.com/sonatard/noctx/noctx.go deleted file mode 100644 index 89e0446ec..000000000 --- a/vendor/github.com/sonatard/noctx/noctx.go +++ /dev/null @@ -1,35 +0,0 @@ -package noctx - -import ( - "fmt" - "github.com/sonatard/noctx/ngfunc" - "github.com/sonatard/noctx/reqwithoutctx" - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/buildssa" -) - -var Analyzer = &analysis.Analyzer{ - Name: "noctx", - Doc: Doc, - Run: run, - RunDespiteErrors: false, - Requires: []*analysis.Analyzer{ - buildssa.Analyzer, - }, - ResultType: nil, - FactTypes: nil, -} - -const Doc = "noctx finds sending http request without context.Context" - -func run(pass *analysis.Pass) (interface{}, error) { - if _, err := ngfunc.Run(pass); err != nil { - return nil, fmt.Errorf("run: %w", err) - } - - if _, err := reqwithoutctx.Run(pass); err != nil { - return nil, fmt.Errorf("run: %w", err) - } - - return nil, nil -} diff --git a/vendor/github.com/sonatard/noctx/reqwithoutctx/main.go b/vendor/github.com/sonatard/noctx/reqwithoutctx/main.go deleted file mode 100644 index b09e1de1b..000000000 --- a/vendor/github.com/sonatard/noctx/reqwithoutctx/main.go +++ /dev/null @@ -1,14 +0,0 @@ -package reqwithoutctx - -import ( - "golang.org/x/tools/go/analysis" -) - -func Run(pass *analysis.Pass) (interface{}, error) { - analyzer := NewAnalyzer(pass) - reports := analyzer.Exec() - - report(pass, reports) - - return nil, nil -} diff --git a/vendor/github.com/sonatard/noctx/reqwithoutctx/report.go b/vendor/github.com/sonatard/noctx/reqwithoutctx/report.go deleted file mode 100644 index 1c94e3148..000000000 --- a/vendor/github.com/sonatard/noctx/reqwithoutctx/report.go +++ /dev/null @@ -1,26 +0,0 @@ -package reqwithoutctx - -import ( - "go/token" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/ssa" -) - -type Report struct { - Instruction ssa.Instruction -} - -func (n *Report) Pos() token.Pos { - return n.Instruction.Pos() -} - -func (n *Report) Message() string { - return "should rewrite http.NewRequestWithContext or add (*Request).WithContext" -} - -func report(pass *analysis.Pass, reports []*Report) { - for _, report := range reports { - pass.Reportf(report.Pos(), report.Message()) - } -} diff --git a/vendor/github.com/sonatard/noctx/reqwithoutctx/ssa.go b/vendor/github.com/sonatard/noctx/reqwithoutctx/ssa.go deleted file mode 100644 index d7e0f5084..000000000 --- a/vendor/github.com/sonatard/noctx/reqwithoutctx/ssa.go +++ /dev/null @@ -1,185 +0,0 @@ -package reqwithoutctx - -import ( - "fmt" - "go/types" - - "github.com/gostaticanalysis/analysisutil" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/buildssa" - "golang.org/x/tools/go/ssa" -) - -//nolint:govet -type Analyzer struct { - Funcs []*ssa.Function - newRequestType types.Type - requestType types.Type -} - -func NewAnalyzer(pass *analysis.Pass) *Analyzer { - newRequestType := analysisutil.TypeOf(pass, "net/http", "NewRequest") - requestType := analysisutil.TypeOf(pass, "net/http", "*Request") - - ssa, ok := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA) - if !ok { - panic(fmt.Sprintf("%T is not *buildssa.SSA", pass.ResultOf[buildssa.Analyzer])) - } - - return &Analyzer{ - Funcs: ssa.SrcFuncs, - newRequestType: newRequestType, - requestType: requestType, - } -} - -func (a *Analyzer) Exec() []*Report { - if a.newRequestType == nil || a.requestType == nil { - return []*Report{} - } - - usedReqs := a.usedReqs() - newReqs := a.requestsByNewRequest() - - return a.report(usedReqs, newReqs) -} - -func (a *Analyzer) report(usedReqs map[string]*ssa.Extract, newReqs map[*ssa.Call]*ssa.Extract) []*Report { - var reports []*Report - - for _, fReq := range usedReqs { - for newRequest, req := range newReqs { - if fReq == req { - reports = append(reports, &Report{Instruction: newRequest}) - } - } - } - - return reports -} - -func (a *Analyzer) usedReqs() map[string]*ssa.Extract { - reqExts := make(map[string]*ssa.Extract) - - for _, f := range a.Funcs { - for _, b := range f.Blocks { - for _, instr := range b.Instrs { - switch i := instr.(type) { - case *ssa.Call: - exts := a.usedReqByCall(i) - for _, ext := range exts { - key := i.String() + ext.String() - reqExts[key] = ext - } - case *ssa.UnOp: - ext := a.usedReqByUnOp(i) - if ext != nil { - key := i.String() + ext.String() - reqExts[key] = ext - } - case *ssa.Return: - exts := a.usedReqByReturn(i) - for _, ext := range exts { - key := i.String() + ext.String() - reqExts[key] = ext - } - } - } - } - } - - return reqExts -} - -func (a *Analyzer) usedReqByCall(call *ssa.Call) []*ssa.Extract { - args := call.Common().Args - exts := make([]*ssa.Extract, 0, len(args)) - - // skip net/http.Request method call - if call.Common().Signature().Recv() != nil && types.Identical(call.Value().Type(), a.requestType) { - return exts - } - - if len(args) == 0 { - return exts - } - - for _, arg := range args { - ext, ok := arg.(*ssa.Extract) - if !ok { - continue - } - - if !types.Identical(ext.Type(), a.requestType) { - continue - } - - exts = append(exts, ext) - } - - return exts -} - -func (a *Analyzer) usedReqByUnOp(op *ssa.UnOp) *ssa.Extract { - if ext, ok := op.X.(*ssa.Extract); ok && types.Identical(ext.Type(), a.requestType) { - return ext - } - - return nil -} - -func (a *Analyzer) usedReqByReturn(ret *ssa.Return) []*ssa.Extract { - rets := ret.Results - exts := make([]*ssa.Extract, 0, len(rets)) - - for _, ret := range rets { - ext, ok := ret.(*ssa.Extract) - if !ok { - continue - } - - if types.Identical(ext.Type(), a.requestType) { - exts = append(exts, ext) - } - } - - return exts -} - -func (a *Analyzer) requestsByNewRequest() map[*ssa.Call]*ssa.Extract { - reqs := make(map[*ssa.Call]*ssa.Extract) - - for _, f := range a.Funcs { - for _, b := range f.Blocks { - for _, instr := range b.Instrs { - ext, ok := instr.(*ssa.Extract) - if !ok { - continue - } - - if !types.Identical(ext.Type(), a.requestType) { - continue - } - - operands := ext.Operands([]*ssa.Value{}) - if len(operands) != 1 { - continue - } - - operand := *operands[0] - - f, ok := operand.(*ssa.Call) - if !ok { - continue - } - - if types.Identical(f.Call.Value.Type(), a.newRequestType) { - reqs[f] = ext - } - } - } - } - - return reqs -} diff --git a/vendor/github.com/sourcegraph/conc/.golangci.yml b/vendor/github.com/sourcegraph/conc/.golangci.yml deleted file mode 100644 index ae65a760a..000000000 --- a/vendor/github.com/sourcegraph/conc/.golangci.yml +++ /dev/null @@ -1,11 +0,0 @@ -linters: - disable-all: true - enable: - - errcheck - - godot - - gosimple - - govet - - ineffassign - - staticcheck - - typecheck - - unused diff --git a/vendor/github.com/sourcegraph/conc/LICENSE b/vendor/github.com/sourcegraph/conc/LICENSE deleted file mode 100644 index 1081f4ef4..000000000 --- a/vendor/github.com/sourcegraph/conc/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2023 Sourcegraph - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/sourcegraph/conc/README.md b/vendor/github.com/sourcegraph/conc/README.md deleted file mode 100644 index 1c87c3c96..000000000 --- a/vendor/github.com/sourcegraph/conc/README.md +++ /dev/null @@ -1,464 +0,0 @@ -![conch](https://user-images.githubusercontent.com/12631702/210295964-785cc63d-d697-420c-99ff-f492eb81dec9.svg) - -# `conc`: better structured concurrency for go - -[![Go Reference](https://pkg.go.dev/badge/github.com/sourcegraph/conc.svg)](https://pkg.go.dev/github.com/sourcegraph/conc) -[![Sourcegraph](https://img.shields.io/badge/view%20on-sourcegraph-A112FE?logo=data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADIAAAAyCAYAAAAeP4ixAAAEZklEQVRoQ+2aXWgUZxSG3292sxtNN43BhBakFPyhxSujRSxiU1pr7SaGXqgUxOIEW0IFkeYighYUxAuLUlq0lrq2iCDpjWtmFVtoG6QVNOCFVShVLyxIk0DVjZLMxt3xTGTccd2ZOd/8JBHci0CY9zvnPPN+/7sCIXwKavOwAcy2QgngQiIztDSE0OwQlDPYR1ebiaH6J5kZChyfW12gRG4QVgGTBfMchMbFP9Sn5nlZL2D0JjLD6710lc+z0NfqSGTXQRQ4bX07Mq423yoBL3OSyHSvUxirMuaEvgbJWrdcvkHMoJwxYuq4INUhyuWvQa1jvdMGxAvCxJlyEC9XOBCWL04wwRzpbDoDQ7wfZJzIQLi5Eggk6DiRhZgWIAbE3NrM4A3LPT8Q7UgqAqLqTmLSHLGPkyzG/qXEczhd0q6RH+zaSBfaUoc4iQx19pIClIscrTkNZzG6gd7qMY6eC2Hqyo705ZfTf+eqJmhMzcSbYtQpOXc92ZsZjLVAL4YNUQbJ5Ttg4CQrQdGYj44Xr9m1XJCzmZusFDJOWNpHjmh5x624a2ZFtOKDVL+uNo2TuXE3bZQQZUf8gtgqP31uI94Z/rMqix+IGiRfWw3xN9dCgVx+L3WrHm4Dju6PXz/EkjuXJ6R+IGgyOE1TbZqTq9y1eo0EZo7oMo1ktPu3xjHvuiLT5AFNszUyDULtWpzE2/fEsey8O5TbWuGWwxrs5rS7nFNMWJrNh2No74s9Ec4vRNmRRzPXMP19fBMSVsGcOJ98G8N3Wl2gXcbTjbX7vUBxLaeASDQCm5Cu/0E2tvtb0Ea+BowtskFD0wvlc6Rf2M+Jx7dTu7ubFr2dnKDRaMQe2v/tcIrNB7FH0O50AcrBaApmRDVwFO31ql3pD8QW4dP0feNwl/Q+kFEtRyIGyaWXnpy1OO0qNJWHo1y6iCmAGkBb/Ru+HenDWIF2mo4r8G+tRRzoniSn2uqFLxANhe9LKHVyTbz6egk9+x5w5fK6ulSNNMhZ/Feno+GebLZV6isTTa6k5qNl5RnZ5u56Ib6SBvFzaWBBVFZzvnERWlt/Cg4l27XChLCqFyLekjhy6xJyoytgjPf7opIB8QPx7sYFiMXHPGt76m741MhCKMZfng0nBOIjmoJPsLqWHwgFpe6V6qtfcopxveR2Oy+J0ntIN/zCWkf8QNAJ7y6d8Bq4lxLc2/qJl5K7t432XwcqX5CrI34gzATWuYILQtdQPyePDK3iuOekCR3Efjhig1B1Uq5UoXEEoZX7d1q535J5S9VOeFyYyEBku5XTMXXKQTToX5Rg7OI44nbW5oKYeYK4EniMeF0YFNSmb+grhc84LyRCEP1/OurOcipCQbKxDeK2V5FcVyIDMQvsgz5gwFhcWWwKyRlvQ3gv29RwWoDYAbIofNyBxI9eDlQ+n3YgsgCWnr4MStGXQXmv9pF2La/k3OccV54JEBM4yp9EsXa/3LfO0dGPcYq0Y7DfZB8nJzZw2rppHgKgVHs8L5wvRwAAAABJRU5ErkJggg==)](https://sourcegraph.com/github.com/sourcegraph/conc) -[![Go Report Card](https://goreportcard.com/badge/github.com/sourcegraph/conc)](https://goreportcard.com/report/github.com/sourcegraph/conc) -[![codecov](https://codecov.io/gh/sourcegraph/conc/branch/main/graph/badge.svg?token=MQZTEA1QWT)](https://codecov.io/gh/sourcegraph/conc) -[![Discord](https://img.shields.io/badge/discord-chat-%235765F2)](https://discord.gg/bvXQXmtRjN) - -`conc` is your toolbelt for structured concurrency in go, making common tasks -easier and safer. - -```sh -go get github.com/sourcegraph/conc -``` - -# At a glance - -- Use [`conc.WaitGroup`](https://pkg.go.dev/github.com/sourcegraph/conc#WaitGroup) if you just want a safer version of `sync.WaitGroup` -- Use [`pool.Pool`](https://pkg.go.dev/github.com/sourcegraph/conc/pool#Pool) if you want a concurrency-limited task runner -- Use [`pool.ResultPool`](https://pkg.go.dev/github.com/sourcegraph/conc/pool#ResultPool) if you want a concurrent task runner that collects task results -- Use [`pool.(Result)?ErrorPool`](https://pkg.go.dev/github.com/sourcegraph/conc/pool#ErrorPool) if your tasks are fallible -- Use [`pool.(Result)?ContextPool`](https://pkg.go.dev/github.com/sourcegraph/conc/pool#ContextPool) if your tasks should be canceled on failure -- Use [`stream.Stream`](https://pkg.go.dev/github.com/sourcegraph/conc/stream#Stream) if you want to process an ordered stream of tasks in parallel with serial callbacks -- Use [`iter.Map`](https://pkg.go.dev/github.com/sourcegraph/conc/iter#Map) if you want to concurrently map a slice -- Use [`iter.ForEach`](https://pkg.go.dev/github.com/sourcegraph/conc/iter#ForEach) if you want to concurrently iterate over a slice -- Use [`panics.Catcher`](https://pkg.go.dev/github.com/sourcegraph/conc/panics#Catcher) if you want to catch panics in your own goroutines - -All pools are created with -[`pool.New()`](https://pkg.go.dev/github.com/sourcegraph/conc/pool#New) -or -[`pool.NewWithResults[T]()`](https://pkg.go.dev/github.com/sourcegraph/conc/pool#NewWithResults), -then configured with methods: - -- [`p.WithMaxGoroutines()`](https://pkg.go.dev/github.com/sourcegraph/conc/pool#Pool.MaxGoroutines) configures the maximum number of goroutines in the pool -- [`p.WithErrors()`](https://pkg.go.dev/github.com/sourcegraph/conc/pool#Pool.WithErrors) configures the pool to run tasks that return errors -- [`p.WithContext(ctx)`](https://pkg.go.dev/github.com/sourcegraph/conc/pool#Pool.WithContext) configures the pool to run tasks that should be canceled on first error -- [`p.WithFirstError()`](https://pkg.go.dev/github.com/sourcegraph/conc/pool#ErrorPool.WithFirstError) configures error pools to only keep the first returned error rather than an aggregated error -- [`p.WithCollectErrored()`](https://pkg.go.dev/github.com/sourcegraph/conc/pool#ResultContextPool.WithCollectErrored) configures result pools to collect results even when the task errored - -# Goals - -The main goals of the package are: -1) Make it harder to leak goroutines -2) Handle panics gracefully -3) Make concurrent code easier to read - -## Goal #1: Make it harder to leak goroutines - -A common pain point when working with goroutines is cleaning them up. It's -really easy to fire off a `go` statement and fail to properly wait for it to -complete. - -`conc` takes the opinionated stance that all concurrency should be scoped. -That is, goroutines should have an owner and that owner should always -ensure that its owned goroutines exit properly. - -In `conc`, the owner of a goroutine is always a `conc.WaitGroup`. Goroutines -are spawned in a `WaitGroup` with `(*WaitGroup).Go()`, and -`(*WaitGroup).Wait()` should always be called before the `WaitGroup` goes out -of scope. - -In some cases, you might want a spawned goroutine to outlast the scope of the -caller. In that case, you could pass a `WaitGroup` into the spawning function. - -```go -func main() { - var wg conc.WaitGroup - defer wg.Wait() - - startTheThing(&wg) -} - -func startTheThing(wg *conc.WaitGroup) { - wg.Go(func() { ... }) -} -``` - -For some more discussion on why scoped concurrency is nice, check out [this -blog -post](https://vorpus.org/blog/notes-on-structured-concurrency-or-go-statement-considered-harmful/). - -## Goal #2: Handle panics gracefully - -A frequent problem with goroutines in long-running applications is handling -panics. A goroutine spawned without a panic handler will crash the whole process -on panic. This is usually undesirable. - -However, if you do add a panic handler to a goroutine, what do you do with the -panic once you catch it? Some options: -1) Ignore it -2) Log it -3) Turn it into an error and return that to the goroutine spawner -4) Propagate the panic to the goroutine spawner - -Ignoring panics is a bad idea since panics usually mean there is actually -something wrong and someone should fix it. - -Just logging panics isn't great either because then there is no indication to the spawner -that something bad happened, and it might just continue on as normal even though your -program is in a really bad state. - -Both (3) and (4) are reasonable options, but both require the goroutine to have -an owner that can actually receive the message that something went wrong. This -is generally not true with a goroutine spawned with `go`, but in the `conc` -package, all goroutines have an owner that must collect the spawned goroutine. -In the conc package, any call to `Wait()` will panic if any of the spawned goroutines -panicked. Additionally, it decorates the panic value with a stacktrace from the child -goroutine so that you don't lose information about what caused the panic. - -Doing this all correctly every time you spawn something with `go` is not -trivial and it requires a lot of boilerplate that makes the important parts of -the code more difficult to read, so `conc` does this for you. - - - - - - - - - - -
stdlibconc
- -```go -type caughtPanicError struct { - val any - stack []byte -} - -func (e *caughtPanicError) Error() string { - return fmt.Sprintf( - "panic: %q\n%s", - e.val, - string(e.stack) - ) -} - -func main() { - done := make(chan error) - go func() { - defer func() { - if v := recover(); v != nil { - done <- &caughtPanicError{ - val: v, - stack: debug.Stack() - } - } else { - done <- nil - } - }() - doSomethingThatMightPanic() - }() - err := <-done - if err != nil { - panic(err) - } -} -``` - - -```go -func main() { - var wg conc.WaitGroup - wg.Go(doSomethingThatMightPanic) - // panics with a nice stacktrace - wg.Wait() -} -``` -
- -## Goal #3: Make concurrent code easier to read - -Doing concurrency correctly is difficult. Doing it in a way that doesn't -obfuscate what the code is actually doing is more difficult. The `conc` package -attempts to make common operations easier by abstracting as much boilerplate -complexity as possible. - -Want to run a set of concurrent tasks with a bounded set of goroutines? Use -`pool.New()`. Want to process an ordered stream of results concurrently, but -still maintain order? Try `stream.New()`. What about a concurrent map over -a slice? Take a peek at `iter.Map()`. - -Browse some examples below for some comparisons with doing these by hand. - -# Examples - -Each of these examples forgoes propagating panics for simplicity. To see -what kind of complexity that would add, check out the "Goal #2" header above. - -Spawn a set of goroutines and waiting for them to finish: - - - - - - - - - - -
stdlibconc
- -```go -func main() { - var wg sync.WaitGroup - for i := 0; i < 10; i++ { - wg.Add(1) - go func() { - defer wg.Done() - // crashes on panic! - doSomething() - }() - } - wg.Wait() -} -``` - - -```go -func main() { - var wg conc.WaitGroup - for i := 0; i < 10; i++ { - wg.Go(doSomething) - } - wg.Wait() -} -``` -
- -Process each element of a stream in a static pool of goroutines: - - - - - - - - - - -
stdlibconc
- -```go -func process(stream chan int) { - var wg sync.WaitGroup - for i := 0; i < 10; i++ { - wg.Add(1) - go func() { - defer wg.Done() - for elem := range stream { - handle(elem) - } - }() - } - wg.Wait() -} -``` - - -```go -func process(stream chan int) { - p := pool.New().WithMaxGoroutines(10) - for elem := range stream { - elem := elem - p.Go(func() { - handle(elem) - }) - } - p.Wait() -} -``` -
- -Process each element of a slice in a static pool of goroutines: - - - - - - - - - - -
stdlibconc
- -```go -func process(values []int) { - feeder := make(chan int, 8) - - var wg sync.WaitGroup - for i := 0; i < 10; i++ { - wg.Add(1) - go func() { - defer wg.Done() - for elem := range feeder { - handle(elem) - } - }() - } - - for _, value := range values { - feeder <- value - } - close(feeder) - wg.Wait() -} -``` - - -```go -func process(values []int) { - iter.ForEach(values, handle) -} -``` -
- -Concurrently map a slice: - - - - - - - - - - -
stdlibconc
- -```go -func concMap( - input []int, - f func(int) int, -) []int { - res := make([]int, len(input)) - var idx atomic.Int64 - - var wg sync.WaitGroup - for i := 0; i < 10; i++ { - wg.Add(1) - go func() { - defer wg.Done() - - for { - i := int(idx.Add(1) - 1) - if i >= len(input) { - return - } - - res[i] = f(input[i]) - } - }() - } - wg.Wait() - return res -} -``` - - -```go -func concMap( - input []int, - f func(*int) int, -) []int { - return iter.Map(input, f) -} -``` -
- -Process an ordered stream concurrently: - - - - - - - - - - - -
stdlibconc
- -```go -func mapStream( - in chan int, - out chan int, - f func(int) int, -) { - tasks := make(chan func()) - taskResults := make(chan chan int) - - // Worker goroutines - var workerWg sync.WaitGroup - for i := 0; i < 10; i++ { - workerWg.Add(1) - go func() { - defer workerWg.Done() - for task := range tasks { - task() - } - }() - } - - // Ordered reader goroutines - var readerWg sync.WaitGroup - readerWg.Add(1) - go func() { - defer readerWg.Done() - for result := range taskResults { - item := <-result - out <- item - } - }() - - // Feed the workers with tasks - for elem := range in { - resultCh := make(chan int, 1) - taskResults <- resultCh - tasks <- func() { - resultCh <- f(elem) - } - } - - // We've exhausted input. - // Wait for everything to finish - close(tasks) - workerWg.Wait() - close(taskResults) - readerWg.Wait() -} -``` - - -```go -func mapStream( - in chan int, - out chan int, - f func(int) int, -) { - s := stream.New().WithMaxGoroutines(10) - for elem := range in { - elem := elem - s.Go(func() stream.Callback { - res := f(elem) - return func() { out <- res } - }) - } - s.Wait() -} -``` -
- -# Status - -This package is currently pre-1.0. There are likely to be minor breaking -changes before a 1.0 release as we stabilize the APIs and tweak defaults. -Please open an issue if you have questions, concerns, or requests that you'd -like addressed before the 1.0 release. Currently, a 1.0 is targeted for -March 2023. diff --git a/vendor/github.com/sourcegraph/conc/internal/multierror/multierror_go119.go b/vendor/github.com/sourcegraph/conc/internal/multierror/multierror_go119.go deleted file mode 100644 index 7087e32a8..000000000 --- a/vendor/github.com/sourcegraph/conc/internal/multierror/multierror_go119.go +++ /dev/null @@ -1,10 +0,0 @@ -//go:build !go1.20 -// +build !go1.20 - -package multierror - -import "go.uber.org/multierr" - -var ( - Join = multierr.Combine -) diff --git a/vendor/github.com/sourcegraph/conc/internal/multierror/multierror_go120.go b/vendor/github.com/sourcegraph/conc/internal/multierror/multierror_go120.go deleted file mode 100644 index 39cff829a..000000000 --- a/vendor/github.com/sourcegraph/conc/internal/multierror/multierror_go120.go +++ /dev/null @@ -1,10 +0,0 @@ -//go:build go1.20 -// +build go1.20 - -package multierror - -import "errors" - -var ( - Join = errors.Join -) diff --git a/vendor/github.com/sourcegraph/conc/iter/iter.go b/vendor/github.com/sourcegraph/conc/iter/iter.go deleted file mode 100644 index 124b4f940..000000000 --- a/vendor/github.com/sourcegraph/conc/iter/iter.go +++ /dev/null @@ -1,85 +0,0 @@ -package iter - -import ( - "runtime" - "sync/atomic" - - "github.com/sourcegraph/conc" -) - -// defaultMaxGoroutines returns the default maximum number of -// goroutines to use within this package. -func defaultMaxGoroutines() int { return runtime.GOMAXPROCS(0) } - -// Iterator can be used to configure the behaviour of ForEach -// and ForEachIdx. The zero value is safe to use with reasonable -// defaults. -// -// Iterator is also safe for reuse and concurrent use. -type Iterator[T any] struct { - // MaxGoroutines controls the maximum number of goroutines - // to use on this Iterator's methods. - // - // If unset, MaxGoroutines defaults to runtime.GOMAXPROCS(0). - MaxGoroutines int -} - -// ForEach executes f in parallel over each element in input. -// -// It is safe to mutate the input parameter, which makes it -// possible to map in place. -// -// ForEach always uses at most runtime.GOMAXPROCS goroutines. -// It takes roughly 2µs to start up the goroutines and adds -// an overhead of roughly 50ns per element of input. For -// a configurable goroutine limit, use a custom Iterator. -func ForEach[T any](input []T, f func(*T)) { Iterator[T]{}.ForEach(input, f) } - -// ForEach executes f in parallel over each element in input, -// using up to the Iterator's configured maximum number of -// goroutines. -// -// It is safe to mutate the input parameter, which makes it -// possible to map in place. -// -// It takes roughly 2µs to start up the goroutines and adds -// an overhead of roughly 50ns per element of input. -func (iter Iterator[T]) ForEach(input []T, f func(*T)) { - iter.ForEachIdx(input, func(_ int, t *T) { - f(t) - }) -} - -// ForEachIdx is the same as ForEach except it also provides the -// index of the element to the callback. -func ForEachIdx[T any](input []T, f func(int, *T)) { Iterator[T]{}.ForEachIdx(input, f) } - -// ForEachIdx is the same as ForEach except it also provides the -// index of the element to the callback. -func (iter Iterator[T]) ForEachIdx(input []T, f func(int, *T)) { - if iter.MaxGoroutines == 0 { - // iter is a value receiver and is hence safe to mutate - iter.MaxGoroutines = defaultMaxGoroutines() - } - - numInput := len(input) - if iter.MaxGoroutines > numInput { - // No more concurrent tasks than the number of input items. - iter.MaxGoroutines = numInput - } - - var idx atomic.Int64 - // Create the task outside the loop to avoid extra closure allocations. - task := func() { - i := int(idx.Add(1) - 1) - for ; i < numInput; i = int(idx.Add(1) - 1) { - f(i, &input[i]) - } - } - - var wg conc.WaitGroup - for i := 0; i < iter.MaxGoroutines; i++ { - wg.Go(task) - } - wg.Wait() -} diff --git a/vendor/github.com/sourcegraph/conc/iter/map.go b/vendor/github.com/sourcegraph/conc/iter/map.go deleted file mode 100644 index efbe6bfaf..000000000 --- a/vendor/github.com/sourcegraph/conc/iter/map.go +++ /dev/null @@ -1,65 +0,0 @@ -package iter - -import ( - "sync" - - "github.com/sourcegraph/conc/internal/multierror" -) - -// Mapper is an Iterator with a result type R. It can be used to configure -// the behaviour of Map and MapErr. The zero value is safe to use with -// reasonable defaults. -// -// Mapper is also safe for reuse and concurrent use. -type Mapper[T, R any] Iterator[T] - -// Map applies f to each element of input, returning the mapped result. -// -// Map always uses at most runtime.GOMAXPROCS goroutines. For a configurable -// goroutine limit, use a custom Mapper. -func Map[T, R any](input []T, f func(*T) R) []R { - return Mapper[T, R]{}.Map(input, f) -} - -// Map applies f to each element of input, returning the mapped result. -// -// Map uses up to the configured Mapper's maximum number of goroutines. -func (m Mapper[T, R]) Map(input []T, f func(*T) R) []R { - res := make([]R, len(input)) - Iterator[T](m).ForEachIdx(input, func(i int, t *T) { - res[i] = f(t) - }) - return res -} - -// MapErr applies f to each element of the input, returning the mapped result -// and a combined error of all returned errors. -// -// Map always uses at most runtime.GOMAXPROCS goroutines. For a configurable -// goroutine limit, use a custom Mapper. -func MapErr[T, R any](input []T, f func(*T) (R, error)) ([]R, error) { - return Mapper[T, R]{}.MapErr(input, f) -} - -// MapErr applies f to each element of the input, returning the mapped result -// and a combined error of all returned errors. -// -// Map uses up to the configured Mapper's maximum number of goroutines. -func (m Mapper[T, R]) MapErr(input []T, f func(*T) (R, error)) ([]R, error) { - var ( - res = make([]R, len(input)) - errMux sync.Mutex - errs error - ) - Iterator[T](m).ForEachIdx(input, func(i int, t *T) { - var err error - res[i], err = f(t) - if err != nil { - errMux.Lock() - // TODO: use stdlib errors once multierrors land in go 1.20 - errs = multierror.Join(errs, err) - errMux.Unlock() - } - }) - return res, errs -} diff --git a/vendor/github.com/sourcegraph/conc/panics/panics.go b/vendor/github.com/sourcegraph/conc/panics/panics.go deleted file mode 100644 index abbed7fa0..000000000 --- a/vendor/github.com/sourcegraph/conc/panics/panics.go +++ /dev/null @@ -1,102 +0,0 @@ -package panics - -import ( - "fmt" - "runtime" - "runtime/debug" - "sync/atomic" -) - -// Catcher is used to catch panics. You can execute a function with Try, -// which will catch any spawned panic. Try can be called any number of times, -// from any number of goroutines. Once all calls to Try have completed, you can -// get the value of the first panic (if any) with Recovered(), or you can just -// propagate the panic (re-panic) with Repanic(). -type Catcher struct { - recovered atomic.Pointer[Recovered] -} - -// Try executes f, catching any panic it might spawn. It is safe -// to call from multiple goroutines simultaneously. -func (p *Catcher) Try(f func()) { - defer p.tryRecover() - f() -} - -func (p *Catcher) tryRecover() { - if val := recover(); val != nil { - rp := NewRecovered(1, val) - p.recovered.CompareAndSwap(nil, &rp) - } -} - -// Repanic panics if any calls to Try caught a panic. It will panic with the -// value of the first panic caught, wrapped in a panics.Recovered with caller -// information. -func (p *Catcher) Repanic() { - if val := p.Recovered(); val != nil { - panic(val) - } -} - -// Recovered returns the value of the first panic caught by Try, or nil if -// no calls to Try panicked. -func (p *Catcher) Recovered() *Recovered { - return p.recovered.Load() -} - -// NewRecovered creates a panics.Recovered from a panic value and a collected -// stacktrace. The skip parameter allows the caller to skip stack frames when -// collecting the stacktrace. Calling with a skip of 0 means include the call to -// NewRecovered in the stacktrace. -func NewRecovered(skip int, value any) Recovered { - // 64 frames should be plenty - var callers [64]uintptr - n := runtime.Callers(skip+1, callers[:]) - return Recovered{ - Value: value, - Callers: callers[:n], - Stack: debug.Stack(), - } -} - -// Recovered is a panic that was caught with recover(). -type Recovered struct { - // The original value of the panic. - Value any - // The caller list as returned by runtime.Callers when the panic was - // recovered. Can be used to produce a more detailed stack information with - // runtime.CallersFrames. - Callers []uintptr - // The formatted stacktrace from the goroutine where the panic was recovered. - // Easier to use than Callers. - Stack []byte -} - -// String renders a human-readable formatting of the panic. -func (p *Recovered) String() string { - return fmt.Sprintf("panic: %v\nstacktrace:\n%s\n", p.Value, p.Stack) -} - -// AsError casts the panic into an error implementation. The implementation -// is unwrappable with the cause of the panic, if the panic was provided one. -func (p *Recovered) AsError() error { - if p == nil { - return nil - } - return &ErrRecovered{*p} -} - -// ErrRecovered wraps a panics.Recovered in an error implementation. -type ErrRecovered struct{ Recovered } - -var _ error = (*ErrRecovered)(nil) - -func (p *ErrRecovered) Error() string { return p.String() } - -func (p *ErrRecovered) Unwrap() error { - if err, ok := p.Value.(error); ok { - return err - } - return nil -} diff --git a/vendor/github.com/sourcegraph/conc/panics/try.go b/vendor/github.com/sourcegraph/conc/panics/try.go deleted file mode 100644 index 4ded92a1c..000000000 --- a/vendor/github.com/sourcegraph/conc/panics/try.go +++ /dev/null @@ -1,11 +0,0 @@ -package panics - -// Try executes f, catching and returning any panic it might spawn. -// -// The recovered panic can be propagated with panic(), or handled as a normal error with -// (*panics.Recovered).AsError(). -func Try(f func()) *Recovered { - var c Catcher - c.Try(f) - return c.Recovered() -} diff --git a/vendor/github.com/sourcegraph/conc/waitgroup.go b/vendor/github.com/sourcegraph/conc/waitgroup.go deleted file mode 100644 index 47b1bc1a5..000000000 --- a/vendor/github.com/sourcegraph/conc/waitgroup.go +++ /dev/null @@ -1,52 +0,0 @@ -package conc - -import ( - "sync" - - "github.com/sourcegraph/conc/panics" -) - -// NewWaitGroup creates a new WaitGroup. -func NewWaitGroup() *WaitGroup { - return &WaitGroup{} -} - -// WaitGroup is the primary building block for scoped concurrency. -// Goroutines can be spawned in the WaitGroup with the Go method, -// and calling Wait() will ensure that each of those goroutines exits -// before continuing. Any panics in a child goroutine will be caught -// and propagated to the caller of Wait(). -// -// The zero value of WaitGroup is usable, just like sync.WaitGroup. -// Also like sync.WaitGroup, it must not be copied after first use. -type WaitGroup struct { - wg sync.WaitGroup - pc panics.Catcher -} - -// Go spawns a new goroutine in the WaitGroup. -func (h *WaitGroup) Go(f func()) { - h.wg.Add(1) - go func() { - defer h.wg.Done() - h.pc.Try(f) - }() -} - -// Wait will block until all goroutines spawned with Go exit and will -// propagate any panics spawned in a child goroutine. -func (h *WaitGroup) Wait() { - h.wg.Wait() - - // Propagate a panic if we caught one from a child goroutine. - h.pc.Repanic() -} - -// WaitAndRecover will block until all goroutines spawned with Go exit and -// will return a *panics.Recovered if one of the child goroutines panics. -func (h *WaitGroup) WaitAndRecover() *panics.Recovered { - h.wg.Wait() - - // Return a recovered panic if we caught one from a child goroutine. - return h.pc.Recovered() -} diff --git a/vendor/github.com/sourcegraph/go-diff/LICENSE b/vendor/github.com/sourcegraph/go-diff/LICENSE deleted file mode 100644 index 5ba1c4436..000000000 --- a/vendor/github.com/sourcegraph/go-diff/LICENSE +++ /dev/null @@ -1,46 +0,0 @@ -Copyright (c) 2014 Sourcegraph, Inc. - -Permission is hereby granted, free of charge, to any person -obtaining a copy of this software and associated documentation -files (the "Software"), to deal in the Software without -restriction, including without limitation the rights to use, -copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following -conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES -OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT -HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR -OTHER DEALINGS IN THE SOFTWARE. - ------------------------------------------------------------------ - -Portions adapted from python-unidiff: - -Copyright (c) 2012 Matias Bordese - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. -IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE -OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/github.com/sourcegraph/go-diff/diff/diff.go b/vendor/github.com/sourcegraph/go-diff/diff/diff.go deleted file mode 100644 index 81aa65570..000000000 --- a/vendor/github.com/sourcegraph/go-diff/diff/diff.go +++ /dev/null @@ -1,136 +0,0 @@ -package diff - -import ( - "bytes" - "time" -) - -// A FileDiff represents a unified diff for a single file. -// -// A file unified diff has a header that resembles the following: -// -// --- oldname 2009-10-11 15:12:20.000000000 -0700 -// +++ newname 2009-10-11 15:12:30.000000000 -0700 -type FileDiff struct { - // the original name of the file - OrigName string - // the original timestamp (nil if not present) - OrigTime *time.Time - // the new name of the file (often same as OrigName) - NewName string - // the new timestamp (nil if not present) - NewTime *time.Time - // extended header lines (e.g., git's "new mode ", "rename from ", etc.) - Extended []string - // hunks that were changed from orig to new - Hunks []*Hunk -} - -// A Hunk represents a series of changes (additions or deletions) in a file's -// unified diff. -type Hunk struct { - // starting line number in original file - OrigStartLine int32 - // number of lines the hunk applies to in the original file - OrigLines int32 - // if > 0, then the original file had a 'No newline at end of file' mark at this offset - OrigNoNewlineAt int32 - // starting line number in new file - NewStartLine int32 - // number of lines the hunk applies to in the new file - NewLines int32 - // optional section heading - Section string - // 0-indexed line offset in unified file diff (including section headers); this is - // only set when Hunks are read from entire file diff (i.e., when ReadAllHunks is - // called) This accounts for hunk headers, too, so the StartPosition of the first - // hunk will be 1. - StartPosition int32 - // hunk body (lines prefixed with '-', '+', or ' ') - Body []byte -} - -// A Stat is a diff stat that represents the number of lines added/changed/deleted. -type Stat struct { - // number of lines added - Added int32 - // number of lines changed - Changed int32 - // number of lines deleted - Deleted int32 -} - -// Stat computes the number of lines added/changed/deleted in all -// hunks in this file's diff. -func (d *FileDiff) Stat() Stat { - total := Stat{} - for _, h := range d.Hunks { - total.add(h.Stat()) - } - return total -} - -// Stat computes the number of lines added/changed/deleted in this -// hunk. -func (h *Hunk) Stat() Stat { - lines := bytes.Split(h.Body, []byte{'\n'}) - var last byte - st := Stat{} - for _, line := range lines { - if len(line) == 0 { - last = 0 - continue - } - switch line[0] { - case '-': - if last == '+' { - st.Added-- - st.Changed++ - last = 0 // next line can't change this one since this is already a change - } else { - st.Deleted++ - last = line[0] - } - case '+': - if last == '-' { - st.Deleted-- - st.Changed++ - last = 0 // next line can't change this one since this is already a change - } else { - st.Added++ - last = line[0] - } - default: - last = 0 - } - } - return st -} - -var ( - hunkPrefix = []byte("@@ ") - onlyInMessagePrefix = []byte("Only in ") -) - -const hunkHeader = "@@ -%d,%d +%d,%d @@" -const onlyInMessage = "Only in %s: %s\n" - -// diffTimeParseLayout is the layout used to parse the time in unified diff file -// header timestamps. -// See https://www.gnu.org/software/diffutils/manual/html_node/Detailed-Unified.html. -const diffTimeParseLayout = "2006-01-02 15:04:05 -0700" - -// Apple's diff is based on freebsd diff, which uses a timestamp format that does -// not include the timezone offset. -const diffTimeParseWithoutTZLayout = "2006-01-02 15:04:05" - -// diffTimeFormatLayout is the layout used to format (i.e., print) the time in unified diff file -// header timestamps. -// See https://www.gnu.org/software/diffutils/manual/html_node/Detailed-Unified.html. -const diffTimeFormatLayout = "2006-01-02 15:04:05.000000000 -0700" - -func (s *Stat) add(o Stat) { - s.Added += o.Added - s.Changed += o.Changed - s.Deleted += o.Deleted -} diff --git a/vendor/github.com/sourcegraph/go-diff/diff/doc.go b/vendor/github.com/sourcegraph/go-diff/diff/doc.go deleted file mode 100644 index 12fe96a07..000000000 --- a/vendor/github.com/sourcegraph/go-diff/diff/doc.go +++ /dev/null @@ -1,2 +0,0 @@ -// Package diff provides a parser for unified diffs. -package diff diff --git a/vendor/github.com/sourcegraph/go-diff/diff/parse.go b/vendor/github.com/sourcegraph/go-diff/diff/parse.go deleted file mode 100644 index 48eeb9670..000000000 --- a/vendor/github.com/sourcegraph/go-diff/diff/parse.go +++ /dev/null @@ -1,865 +0,0 @@ -package diff - -import ( - "bufio" - "bytes" - "errors" - "fmt" - "io" - "path/filepath" - "strconv" - "strings" - "time" -) - -// ParseMultiFileDiff parses a multi-file unified diff. It returns an error if -// parsing failed as a whole, but does its best to parse as many files in the -// case of per-file errors. If it cannot detect when the diff of the next file -// begins, the hunks are added to the FileDiff of the previous file. -func ParseMultiFileDiff(diff []byte) ([]*FileDiff, error) { - return NewMultiFileDiffReader(bytes.NewReader(diff)).ReadAllFiles() -} - -// NewMultiFileDiffReader returns a new MultiFileDiffReader that reads -// a multi-file unified diff from r. -func NewMultiFileDiffReader(r io.Reader) *MultiFileDiffReader { - return &MultiFileDiffReader{reader: newLineReader(r)} -} - -// MultiFileDiffReader reads a multi-file unified diff. -type MultiFileDiffReader struct { - line int - offset int64 - reader *lineReader - - // TODO(sqs): line and offset tracking in multi-file diffs is broken; add tests and fix - - // nextFileFirstLine is a line that was read by a HunksReader that - // was how it determined the hunk was complete. But to determine - // that, it needed to read the first line of the next file. We - // store nextFileFirstLine so we can "give the first line back" to - // the next file. - nextFileFirstLine []byte -} - -// ReadFile reads the next file unified diff (including headers and -// all hunks) from r. If there are no more files in the diff, it -// returns error io.EOF. -func (r *MultiFileDiffReader) ReadFile() (*FileDiff, error) { - fd, _, err := r.ReadFileWithTrailingContent() - return fd, err -} - -// ReadFileWithTrailingContent reads the next file unified diff (including -// headers and all hunks) from r, also returning any trailing content. If there -// are no more files in the diff, it returns error io.EOF. -func (r *MultiFileDiffReader) ReadFileWithTrailingContent() (*FileDiff, string, error) { - fr := &FileDiffReader{ - line: r.line, - offset: r.offset, - reader: r.reader, - fileHeaderLine: r.nextFileFirstLine, - } - r.nextFileFirstLine = nil - - fd, err := fr.ReadAllHeaders() - if err != nil { - switch e := err.(type) { - case *ParseError: - if e.Err == ErrNoFileHeader || e.Err == ErrExtendedHeadersEOF { - // Any non-diff content preceding a valid diff is included in the - // extended headers of the following diff. In this way, mixed diff / - // non-diff content can be parsed. Trailing non-diff content is - // different: it doesn't make sense to return a FileDiff with only - // extended headers populated. Instead, we return any trailing content - // in case the caller needs it. - trailing := "" - if fd != nil { - trailing = strings.Join(fd.Extended, "\n") - } - return nil, trailing, io.EOF - } - return nil, "", err - - case OverflowError: - r.nextFileFirstLine = []byte(e) - return fd, "", nil - - default: - return nil, "", err - } - } - - // FileDiff is added/deleted file - // No further collection of hunks needed - if fd.NewName == "" { - return fd, "", nil - } - - // Before reading hunks, check to see if there are any. If there - // aren't any, and there's another file after this file in the - // diff, then the hunks reader will complain ErrNoHunkHeader. It's - // not easy for us to tell from that error alone if that was - // caused by the lack of any hunks, or a malformatted hunk, so we - // need to perform the check here. - hr := fr.HunksReader() - line, err := r.reader.readLine() - if err != nil && err != io.EOF { - return fd, "", err - } - line = bytes.TrimSuffix(line, []byte{'\n'}) - if bytes.HasPrefix(line, hunkPrefix) { - hr.nextHunkHeaderLine = line - fd.Hunks, err = hr.ReadAllHunks() - r.line = fr.line - r.offset = fr.offset - if err != nil { - if e0, ok := err.(*ParseError); ok { - if e, ok := e0.Err.(*ErrBadHunkLine); ok { - // This just means we finished reading the hunks for the - // current file. See the ErrBadHunkLine doc for more info. - r.nextFileFirstLine = e.Line - return fd, "", nil - } - } - return nil, "", err - } - } else { - // There weren't any hunks, so that line we peeked ahead at - // actually belongs to the next file. Put it back. - r.nextFileFirstLine = line - } - - return fd, "", nil -} - -// ReadAllFiles reads all file unified diffs (including headers and all -// hunks) remaining in r. -func (r *MultiFileDiffReader) ReadAllFiles() ([]*FileDiff, error) { - var ds []*FileDiff - for { - d, err := r.ReadFile() - if d != nil { - ds = append(ds, d) - } - if err == io.EOF { - return ds, nil - } - if err != nil { - return nil, err - } - } -} - -// ParseFileDiff parses a file unified diff. -func ParseFileDiff(diff []byte) (*FileDiff, error) { - return NewFileDiffReader(bytes.NewReader(diff)).Read() -} - -// NewFileDiffReader returns a new FileDiffReader that reads a file -// unified diff. -func NewFileDiffReader(r io.Reader) *FileDiffReader { - return &FileDiffReader{reader: &lineReader{reader: bufio.NewReader(r)}} -} - -// FileDiffReader reads a unified file diff. -type FileDiffReader struct { - line int - offset int64 - reader *lineReader - - // fileHeaderLine is the first file header line, set by: - // - // (1) ReadExtendedHeaders if it encroaches on a file header line - // (which it must to detect when extended headers are done); or - // (2) (*MultiFileDiffReader).ReadFile() if it encroaches on a - // file header line while reading the previous file's hunks (in a - // multi-file diff). - fileHeaderLine []byte -} - -// Read reads a file unified diff, including headers and hunks, from r. -func (r *FileDiffReader) Read() (*FileDiff, error) { - fd, err := r.ReadAllHeaders() - if err != nil { - return nil, err - } - - fd.Hunks, err = r.HunksReader().ReadAllHunks() - if err != nil { - return nil, err - } - - return fd, nil -} - -// ReadAllHeaders reads the file headers and extended headers (if any) -// from a file unified diff. It does not read hunks, and the returned -// FileDiff's Hunks field is nil. To read the hunks, call the -// (*FileDiffReader).HunksReader() method to get a HunksReader and -// read hunks from that. -func (r *FileDiffReader) ReadAllHeaders() (*FileDiff, error) { - var err error - fd := &FileDiff{} - - fd.Extended, err = r.ReadExtendedHeaders() - if pe, ok := err.(*ParseError); ok && pe.Err == ErrExtendedHeadersEOF { - wasEmpty := handleEmpty(fd) - if wasEmpty { - return fd, nil - } - return fd, err - } else if _, ok := err.(OverflowError); ok { - handleEmpty(fd) - return fd, err - } else if err != nil { - return fd, err - } - - var origTime, newTime *time.Time - fd.OrigName, fd.NewName, origTime, newTime, err = r.ReadFileHeaders() - if err != nil { - return nil, err - } - if origTime != nil { - fd.OrigTime = origTime - } - if newTime != nil { - fd.NewTime = newTime - } - - return fd, nil -} - -// HunksReader returns a new HunksReader that reads hunks from r. The -// HunksReader's line and offset (used in error messages) is set to -// start where the file diff header ended (which means errors have the -// correct position information). -func (r *FileDiffReader) HunksReader() *HunksReader { - return &HunksReader{ - line: r.line, - offset: r.offset, - reader: r.reader, - } -} - -// ReadFileHeaders reads the unified file diff header (the lines that -// start with "---" and "+++" with the orig/new file names and -// timestamps). Or which starts with "Only in " with dir path and filename. -// "Only in" message is supported in POSIX locale: https://pubs.opengroup.org/onlinepubs/9699919799/utilities/diff.html#tag_20_34_10 -func (r *FileDiffReader) ReadFileHeaders() (origName, newName string, origTimestamp, newTimestamp *time.Time, err error) { - if r.fileHeaderLine != nil { - if isOnlyMessage, source, filename := parseOnlyInMessage(r.fileHeaderLine); isOnlyMessage { - return filepath.Join(string(source), string(filename)), - "", nil, nil, nil - } - } - origName, origTimestamp, err = r.readOneFileHeader([]byte("--- ")) - if err != nil { - return "", "", nil, nil, err - } - - newName, newTimestamp, err = r.readOneFileHeader([]byte("+++ ")) - if err != nil { - return "", "", nil, nil, err - } - - unquotedOrigName, err := strconv.Unquote(origName) - if err == nil { - origName = unquotedOrigName - } - unquotedNewName, err := strconv.Unquote(newName) - if err == nil { - newName = unquotedNewName - } - - return origName, newName, origTimestamp, newTimestamp, nil -} - -// readOneFileHeader reads one of the file headers (prefix should be -// either "+++ " or "--- "). -func (r *FileDiffReader) readOneFileHeader(prefix []byte) (filename string, timestamp *time.Time, err error) { - var line []byte - - if r.fileHeaderLine == nil { - var err error - line, err = r.reader.readLine() - if err == io.EOF { - return "", nil, &ParseError{r.line, r.offset, ErrNoFileHeader} - } else if err != nil { - return "", nil, err - } - } else { - line = r.fileHeaderLine - r.fileHeaderLine = nil - } - - if !bytes.HasPrefix(line, prefix) { - return "", nil, &ParseError{r.line, r.offset, ErrBadFileHeader} - } - - r.offset += int64(len(line)) - r.line++ - line = line[len(prefix):] - - trimmedLine := strings.TrimSpace(string(line)) // filenames that contain spaces may be terminated by a tab - parts := strings.SplitN(trimmedLine, "\t", 2) - filename = parts[0] - if len(parts) == 2 { - var ts time.Time - // Timestamp is optional, but this header has it. - ts, err = time.Parse(diffTimeParseLayout, parts[1]) - if err != nil { - var err1 error - ts, err1 = time.Parse(diffTimeParseWithoutTZLayout, parts[1]) - if err1 != nil { - return "", nil, err - } - err = nil - } - timestamp = &ts - } - - return filename, timestamp, err -} - -// OverflowError is returned when we have overflowed into the start -// of the next file while reading extended headers. -type OverflowError string - -func (e OverflowError) Error() string { - return fmt.Sprintf("overflowed into next file: %s", string(e)) -} - -// ReadExtendedHeaders reads the extended header lines, if any, from a -// unified diff file (e.g., git's "diff --git a/foo.go b/foo.go", "new -// mode ", "rename from ", etc.). -func (r *FileDiffReader) ReadExtendedHeaders() ([]string, error) { - var xheaders []string - firstLine := true - for { - var line []byte - if r.fileHeaderLine == nil { - var err error - line, err = r.reader.readLine() - if err == io.EOF { - return xheaders, &ParseError{r.line, r.offset, ErrExtendedHeadersEOF} - } else if err != nil { - return xheaders, err - } - } else { - line = r.fileHeaderLine - r.fileHeaderLine = nil - } - - if bytes.HasPrefix(line, []byte("diff --git ")) { - if firstLine { - firstLine = false - } else { - return xheaders, OverflowError(line) - } - } - if bytes.HasPrefix(line, []byte("--- ")) { - // We've reached the file header. - r.fileHeaderLine = line // pass to readOneFileHeader (see fileHeaderLine field doc) - return xheaders, nil - } - - // Reached message that file is added/deleted - if isOnlyInMessage, _, _ := parseOnlyInMessage(line); isOnlyInMessage { - r.fileHeaderLine = line // pass to readOneFileHeader (see fileHeaderLine field doc) - return xheaders, nil - } - - r.line++ - r.offset += int64(len(line)) - xheaders = append(xheaders, string(line)) - } -} - -// readQuotedFilename extracts a quoted filename from the beginning of a string, -// returning the unquoted filename and any remaining text after the filename. -func readQuotedFilename(text string) (value string, remainder string, err error) { - if text == "" || text[0] != '"' { - return "", "", fmt.Errorf(`string must start with a '"': %s`, text) - } - - // The end quote is the first quote NOT preceeded by an uneven number of backslashes. - numberOfBackslashes := 0 - for i, c := range text { - if c == '"' && i > 0 && numberOfBackslashes%2 == 0 { - value, err = strconv.Unquote(text[:i+1]) - remainder = text[i+1:] - return - } else if c == '\\' { - numberOfBackslashes++ - } else { - numberOfBackslashes = 0 - } - } - return "", "", fmt.Errorf(`end of string found while searching for '"': %s`, text) -} - -// parseDiffGitArgs extracts the two filenames from a 'diff --git' line. -// Returns false on syntax error, true if syntax is valid. Even with a -// valid syntax, it may be impossible to extract filenames; if so, the -// function returns ("", "", true). -func parseDiffGitArgs(diffArgs string) (string, string, bool) { - length := len(diffArgs) - if length < 3 { - return "", "", false - } - - if diffArgs[0] != '"' && diffArgs[length-1] != '"' { - // Both filenames are unquoted. - firstSpace := strings.IndexByte(diffArgs, ' ') - if firstSpace <= 0 || firstSpace == length-1 { - return "", "", false - } - - secondSpace := strings.IndexByte(diffArgs[firstSpace+1:], ' ') - if secondSpace == -1 { - if diffArgs[firstSpace+1] == '"' { - // The second filename begins with '"', but doesn't end with one. - return "", "", false - } - return diffArgs[:firstSpace], diffArgs[firstSpace+1:], true - } - - // One or both filenames contain a space, but the names are - // unquoted. Here, the 'diff --git' syntax is ambiguous, and - // we have to obtain the filenames elsewhere (e.g. from the - // hunk headers or extended headers). HOWEVER, if the file - // is newly created and empty, there IS no other place to - // find the filename. In this case, the two filenames are - // identical (except for the leading 'a/' prefix), and we have - // to handle that case here. - first := diffArgs[:length/2] - second := diffArgs[length/2+1:] - - // If the two strings could be equal, based on length, proceed. - if length%2 == 1 { - // If the name minus the a/ b/ prefixes is equal, proceed. - if len(first) >= 3 && first[1] == '/' && first[1:] == second[1:] { - return first, second, true - } - // If the names don't have the a/ and b/ prefixes and they're equal, proceed. - if !(first[:2] == "a/" && second[:2] == "b/") && first == second { - return first, second, true - } - } - - // The syntax is (unfortunately) valid, but we could not extract - // the filenames. - return "", "", true - } - - if diffArgs[0] == '"' { - first, remainder, err := readQuotedFilename(diffArgs) - if err != nil || len(remainder) < 2 || remainder[0] != ' ' { - return "", "", false - } - if remainder[1] == '"' { - second, remainder, err := readQuotedFilename(remainder[1:]) - if remainder != "" || err != nil { - return "", "", false - } - return first, second, true - } - return first, remainder[1:], true - } - - // In this case, second argument MUST be quoted (or it's a syntax error) - i := strings.IndexByte(diffArgs, '"') - if i == -1 || i+2 >= length || diffArgs[i-1] != ' ' { - return "", "", false - } - - second, remainder, err := readQuotedFilename(diffArgs[i:]) - if remainder != "" || err != nil { - return "", "", false - } - return diffArgs[:i-1], second, true -} - -// handleEmpty detects when FileDiff was an empty diff and will not have any hunks -// that follow. It updates fd fields from the parsed extended headers. -func handleEmpty(fd *FileDiff) (wasEmpty bool) { - lineCount := len(fd.Extended) - if lineCount > 0 && !strings.HasPrefix(fd.Extended[0], "diff --git ") { - return false - } - - lineHasPrefix := func(idx int, prefix string) bool { - return strings.HasPrefix(fd.Extended[idx], prefix) - } - - linesHavePrefixes := func(idx1 int, prefix1 string, idx2 int, prefix2 string) bool { - return lineHasPrefix(idx1, prefix1) && lineHasPrefix(idx2, prefix2) - } - - isCopy := (lineCount == 4 && linesHavePrefixes(2, "copy from ", 3, "copy to ")) || - (lineCount == 6 && linesHavePrefixes(2, "copy from ", 3, "copy to ") && lineHasPrefix(5, "Binary files ")) || - (lineCount == 6 && linesHavePrefixes(1, "old mode ", 2, "new mode ") && linesHavePrefixes(4, "copy from ", 5, "copy to ")) - - isRename := (lineCount == 4 && linesHavePrefixes(2, "rename from ", 3, "rename to ")) || - (lineCount == 5 && linesHavePrefixes(2, "rename from ", 3, "rename to ") && lineHasPrefix(4, "Binary files ")) || - (lineCount == 6 && linesHavePrefixes(2, "rename from ", 3, "rename to ") && lineHasPrefix(5, "Binary files ")) || - (lineCount == 6 && linesHavePrefixes(1, "old mode ", 2, "new mode ") && linesHavePrefixes(4, "rename from ", 5, "rename to ")) - - isDeletedFile := (lineCount == 3 || lineCount == 4 && lineHasPrefix(3, "Binary files ") || lineCount > 4 && lineHasPrefix(3, "GIT binary patch")) && - lineHasPrefix(1, "deleted file mode ") - - isNewFile := (lineCount == 3 || lineCount == 4 && lineHasPrefix(3, "Binary files ") || lineCount > 4 && lineHasPrefix(3, "GIT binary patch")) && - lineHasPrefix(1, "new file mode ") - - isModeChange := lineCount == 3 && linesHavePrefixes(1, "old mode ", 2, "new mode ") - - isBinaryPatch := lineCount == 3 && lineHasPrefix(2, "Binary files ") || lineCount > 3 && lineHasPrefix(2, "GIT binary patch") - - if !isModeChange && !isCopy && !isRename && !isBinaryPatch && !isNewFile && !isDeletedFile { - return false - } - - var success bool - fd.OrigName, fd.NewName, success = parseDiffGitArgs(fd.Extended[0][len("diff --git "):]) - if isNewFile { - fd.OrigName = "/dev/null" - } - - if isDeletedFile { - fd.NewName = "/dev/null" - } - - // For ambiguous 'diff --git' lines, try to reconstruct filenames using extended headers. - if success && (isCopy || isRename) && fd.OrigName == "" && fd.NewName == "" { - diffArgs := fd.Extended[0][len("diff --git "):] - - tryReconstruct := func(header string, prefix string, whichFile int, result *string) { - if !strings.HasPrefix(header, prefix) { - return - } - rawFilename := header[len(prefix):] - - // extract the filename prefix (e.g. "a/") from the 'diff --git' line. - var prefixLetterIndex int - if whichFile == 1 { - prefixLetterIndex = 0 - } else if whichFile == 2 { - prefixLetterIndex = len(diffArgs) - len(rawFilename) - 2 - } - if prefixLetterIndex < 0 || diffArgs[prefixLetterIndex+1] != '/' { - return - } - - *result = diffArgs[prefixLetterIndex:prefixLetterIndex+2] + rawFilename - } - - for _, header := range fd.Extended { - tryReconstruct(header, "copy from ", 1, &fd.OrigName) - tryReconstruct(header, "copy to ", 2, &fd.NewName) - tryReconstruct(header, "rename from ", 1, &fd.OrigName) - tryReconstruct(header, "rename to ", 2, &fd.NewName) - } - } - return success -} - -var ( - // ErrNoFileHeader is when a file unified diff has no file header - // (i.e., the lines that begin with "---" and "+++"). - ErrNoFileHeader = errors.New("expected file header, got EOF") - - // ErrBadFileHeader is when a file unified diff has a malformed - // file header (i.e., the lines that begin with "---" and "+++"). - ErrBadFileHeader = errors.New("bad file header") - - // ErrExtendedHeadersEOF is when an EOF was encountered while reading extended file headers, which means that there were no ---/+++ headers encountered before hunks (if any) began. - ErrExtendedHeadersEOF = errors.New("expected file header while reading extended headers, got EOF") - - // ErrBadOnlyInMessage is when a file have a malformed `only in` message - // Should be in format `Only in {source}: {filename}` - ErrBadOnlyInMessage = errors.New("bad 'only in' message") -) - -// ParseHunks parses hunks from a unified diff. The diff must consist -// only of hunks and not include a file header; if it has a file -// header, use ParseFileDiff. -func ParseHunks(diff []byte) ([]*Hunk, error) { - r := NewHunksReader(bytes.NewReader(diff)) - hunks, err := r.ReadAllHunks() - if err != nil { - return nil, err - } - return hunks, nil -} - -// NewHunksReader returns a new HunksReader that reads unified diff hunks -// from r. -func NewHunksReader(r io.Reader) *HunksReader { - return &HunksReader{reader: &lineReader{reader: bufio.NewReader(r)}} -} - -// A HunksReader reads hunks from a unified diff. -type HunksReader struct { - line int - offset int64 - hunk *Hunk - reader *lineReader - - nextHunkHeaderLine []byte -} - -// ReadHunk reads one hunk from r. If there are no more hunks, it -// returns error io.EOF. -func (r *HunksReader) ReadHunk() (*Hunk, error) { - r.hunk = nil - lastLineFromOrig := true - var line []byte - var err error - for { - if r.nextHunkHeaderLine != nil { - // Use stored hunk header line that was scanned in at the - // completion of the previous hunk's ReadHunk. - line = r.nextHunkHeaderLine - r.nextHunkHeaderLine = nil - } else { - line, err = r.reader.readLine() - if err != nil { - if err == io.EOF && r.hunk != nil { - return r.hunk, nil - } - return nil, err - } - } - - // Record position. - r.line++ - r.offset += int64(len(line)) - - if r.hunk == nil { - // Check for presence of hunk header. - if !bytes.HasPrefix(line, hunkPrefix) { - return nil, &ParseError{r.line, r.offset, ErrNoHunkHeader} - } - - // Parse hunk header. - r.hunk = &Hunk{} - items := []interface{}{ - &r.hunk.OrigStartLine, &r.hunk.OrigLines, - &r.hunk.NewStartLine, &r.hunk.NewLines, - } - header, section, err := normalizeHeader(string(line)) - if err != nil { - return nil, &ParseError{r.line, r.offset, err} - } - n, err := fmt.Sscanf(header, hunkHeader, items...) - if err != nil { - return nil, err - } - if n < len(items) { - return nil, &ParseError{r.line, r.offset, &ErrBadHunkHeader{header: string(line)}} - } - - r.hunk.Section = section - } else { - // Read hunk body line. - - // If the line starts with `---` and the next one with `+++` we're - // looking at a non-extended file header and need to abort. - if bytes.HasPrefix(line, []byte("---")) { - ok, err := r.reader.nextLineStartsWith("+++") - if err != nil { - return r.hunk, err - } - if ok { - ok2, _ := r.reader.nextNextLineStartsWith(string(hunkPrefix)) - if ok2 { - return r.hunk, &ParseError{r.line, r.offset, &ErrBadHunkLine{Line: line}} - } - } - } - - // If the line starts with the hunk prefix, this hunk is complete. - if bytes.HasPrefix(line, hunkPrefix) { - // But we've already read in the next hunk's - // header, so we need to be sure that the next call to - // ReadHunk starts with that header. - r.nextHunkHeaderLine = line - - // Rewind position. - r.line-- - r.offset -= int64(len(line)) - - return r.hunk, nil - } - - if len(line) >= 1 && !linePrefix(line[0]) { - // Bad hunk header line. If we're reading a multi-file - // diff, this may be the end of the current - // file. Return a "rich" error that lets our caller - // handle that case. - return r.hunk, &ParseError{r.line, r.offset, &ErrBadHunkLine{Line: line}} - } - if bytes.Equal(line, []byte(noNewlineMessage)) { - if lastLineFromOrig { - // Retain the newline in the body (otherwise the - // diff line would be like "-a+b", where "+b" is - // the the next line of the new file, which is not - // validly formatted) but record that the orig had - // no newline. - r.hunk.OrigNoNewlineAt = int32(len(r.hunk.Body)) - } else { - // Remove previous line's newline. - if len(r.hunk.Body) != 0 { - r.hunk.Body = r.hunk.Body[:len(r.hunk.Body)-1] - } - } - continue - } - - if len(line) > 0 { - lastLineFromOrig = line[0] == '-' - } - - r.hunk.Body = append(r.hunk.Body, line...) - r.hunk.Body = append(r.hunk.Body, '\n') - } - } -} - -const noNewlineMessage = `\ No newline at end of file` - -// linePrefixes is the set of all characters a valid line in a diff -// hunk can start with. '\' can appear in diffs when no newline is -// present at the end of a file. -// See: 'http://www.gnu.org/software/diffutils/manual/diffutils.html#Incomplete-Lines' -var linePrefixes = []byte{' ', '-', '+', '\\'} - -// linePrefix returns true if 'c' is in 'linePrefixes'. -func linePrefix(c byte) bool { - for _, p := range linePrefixes { - if p == c { - return true - } - } - return false -} - -// normalizeHeader takes a header of the form: -// "@@ -linestart[,chunksize] +linestart[,chunksize] @@ section" -// and returns two strings, with the first in the form: -// "@@ -linestart,chunksize +linestart,chunksize @@". -// where linestart and chunksize are both integers. The second is the -// optional section header. chunksize may be omitted from the header -// if its value is 1. normalizeHeader returns an error if the header -// is not in the correct format. -func normalizeHeader(header string) (string, string, error) { - // Split the header into five parts: the first '@@', the two - // ranges, the last '@@', and the optional section. - pieces := strings.SplitN(header, " ", 5) - if len(pieces) < 4 { - return "", "", &ErrBadHunkHeader{header: header} - } - - if pieces[0] != "@@" { - return "", "", &ErrBadHunkHeader{header: header} - } - for i := 1; i < 3; i++ { - if !strings.ContainsRune(pieces[i], ',') { - pieces[i] = pieces[i] + ",1" - } - } - if pieces[3] != "@@" { - return "", "", &ErrBadHunkHeader{header: header} - } - - var section string - if len(pieces) == 5 { - section = pieces[4] - } - return strings.Join(pieces, " "), strings.TrimSpace(section), nil -} - -// ReadAllHunks reads all remaining hunks from r. A successful call -// returns err == nil, not err == EOF. Because ReadAllHunks is defined -// to read until EOF, it does not treat end of file as an error to be -// reported. -func (r *HunksReader) ReadAllHunks() ([]*Hunk, error) { - var hunks []*Hunk - linesRead := int32(0) - for { - hunk, err := r.ReadHunk() - if err == io.EOF { - return hunks, nil - } - if hunk != nil { - linesRead++ // account for the hunk header line - hunk.StartPosition = linesRead - hunks = append(hunks, hunk) - linesRead += int32(bytes.Count(hunk.Body, []byte{'\n'})) - } - if err != nil { - return hunks, err - } - } -} - -// parseOnlyInMessage checks if line is a "Only in {source}: {filename}" and returns source and filename -func parseOnlyInMessage(line []byte) (bool, []byte, []byte) { - if !bytes.HasPrefix(line, onlyInMessagePrefix) { - return false, nil, nil - } - line = line[len(onlyInMessagePrefix):] - idx := bytes.Index(line, []byte(": ")) - if idx < 0 { - return false, nil, nil - } - return true, line[:idx], line[idx+2:] -} - -// A ParseError is a description of a unified diff syntax error. -type ParseError struct { - Line int // Line where the error occurred - Offset int64 // Offset where the error occurred - Err error // The actual error -} - -func (e *ParseError) Error() string { - return fmt.Sprintf("line %d, char %d: %s", e.Line, e.Offset, e.Err) -} - -// ErrNoHunkHeader indicates that a unified diff hunk header was -// expected but not found during parsing. -var ErrNoHunkHeader = errors.New("no hunk header") - -// ErrBadHunkHeader indicates that a malformed unified diff hunk -// header was encountered during parsing. -type ErrBadHunkHeader struct { - header string -} - -func (e *ErrBadHunkHeader) Error() string { - if e.header == "" { - return "bad hunk header" - } - return "bad hunk header: " + e.header -} - -// ErrBadHunkLine is when a line not beginning with ' ', '-', '+', or -// '\' is encountered while reading a hunk. In the context of reading -// a single hunk or file, it is an unexpected error. In a multi-file -// diff, however, it indicates that the current file's diff is -// complete (and remaining diff data will describe another file -// unified diff). -type ErrBadHunkLine struct { - Line []byte -} - -func (e *ErrBadHunkLine) Error() string { - m := "bad hunk line (does not start with ' ', '-', '+', or '\\')" - if len(e.Line) == 0 { - return m - } - return m + ": " + string(e.Line) -} diff --git a/vendor/github.com/sourcegraph/go-diff/diff/print.go b/vendor/github.com/sourcegraph/go-diff/diff/print.go deleted file mode 100644 index 012651a33..000000000 --- a/vendor/github.com/sourcegraph/go-diff/diff/print.go +++ /dev/null @@ -1,141 +0,0 @@ -package diff - -import ( - "bytes" - "fmt" - "io" - "path/filepath" - "time" -) - -// PrintMultiFileDiff prints a multi-file diff in unified diff format. -func PrintMultiFileDiff(ds []*FileDiff) ([]byte, error) { - var buf bytes.Buffer - for _, d := range ds { - diff, err := PrintFileDiff(d) - if err != nil { - return nil, err - } - if _, err := buf.Write(diff); err != nil { - return nil, err - } - } - return buf.Bytes(), nil -} - -// PrintFileDiff prints a FileDiff in unified diff format. -// -// TODO(sqs): handle escaping whitespace/etc. chars in filenames -func PrintFileDiff(d *FileDiff) ([]byte, error) { - var buf bytes.Buffer - - for _, xheader := range d.Extended { - if _, err := fmt.Fprintln(&buf, xheader); err != nil { - return nil, err - } - } - - // FileDiff is added/deleted file - // No further hunks printing needed - if d.NewName == "" { - _, err := fmt.Fprintf(&buf, onlyInMessage, filepath.Dir(d.OrigName), filepath.Base(d.OrigName)) - if err != nil { - return nil, err - } - return buf.Bytes(), nil - } - - if d.Hunks == nil { - return buf.Bytes(), nil - } - - if err := printFileHeader(&buf, "--- ", d.OrigName, d.OrigTime); err != nil { - return nil, err - } - if err := printFileHeader(&buf, "+++ ", d.NewName, d.NewTime); err != nil { - return nil, err - } - - ph, err := PrintHunks(d.Hunks) - if err != nil { - return nil, err - } - - if _, err := buf.Write(ph); err != nil { - return nil, err - } - return buf.Bytes(), nil -} - -func printFileHeader(w io.Writer, prefix string, filename string, timestamp *time.Time) error { - if _, err := fmt.Fprint(w, prefix, filename); err != nil { - return err - } - if timestamp != nil { - if _, err := fmt.Fprint(w, "\t", timestamp.Format(diffTimeFormatLayout)); err != nil { - return err - } - } - if _, err := fmt.Fprintln(w); err != nil { - return err - } - return nil -} - -// PrintHunks prints diff hunks in unified diff format. -func PrintHunks(hunks []*Hunk) ([]byte, error) { - var buf bytes.Buffer - for _, hunk := range hunks { - _, err := fmt.Fprintf(&buf, - "@@ -%d,%d +%d,%d @@", hunk.OrigStartLine, hunk.OrigLines, hunk.NewStartLine, hunk.NewLines, - ) - if err != nil { - return nil, err - } - if hunk.Section != "" { - _, err := fmt.Fprint(&buf, " ", hunk.Section) - if err != nil { - return nil, err - } - } - if _, err := fmt.Fprintln(&buf); err != nil { - return nil, err - } - - if hunk.OrigNoNewlineAt == 0 { - if _, err := buf.Write(hunk.Body); err != nil { - return nil, err - } - } else { - if _, err := buf.Write(hunk.Body[:hunk.OrigNoNewlineAt]); err != nil { - return nil, err - } - if err := printNoNewlineMessage(&buf); err != nil { - return nil, err - } - if _, err := buf.Write(hunk.Body[hunk.OrigNoNewlineAt:]); err != nil { - return nil, err - } - } - - if !bytes.HasSuffix(hunk.Body, []byte{'\n'}) { - if _, err := fmt.Fprintln(&buf); err != nil { - return nil, err - } - if err := printNoNewlineMessage(&buf); err != nil { - return nil, err - } - } - } - return buf.Bytes(), nil -} - -func printNoNewlineMessage(w io.Writer) error { - if _, err := w.Write([]byte(noNewlineMessage)); err != nil { - return err - } - if _, err := fmt.Fprintln(w); err != nil { - return err - } - return nil -} diff --git a/vendor/github.com/sourcegraph/go-diff/diff/reader_util.go b/vendor/github.com/sourcegraph/go-diff/diff/reader_util.go deleted file mode 100644 index 45300252b..000000000 --- a/vendor/github.com/sourcegraph/go-diff/diff/reader_util.go +++ /dev/null @@ -1,120 +0,0 @@ -package diff - -import ( - "bufio" - "bytes" - "errors" - "io" -) - -var ErrLineReaderUninitialized = errors.New("line reader not initialized") - -func newLineReader(r io.Reader) *lineReader { - return &lineReader{reader: bufio.NewReader(r)} -} - -// lineReader is a wrapper around a bufio.Reader that caches the next line to -// provide lookahead functionality for the next two lines. -type lineReader struct { - reader *bufio.Reader - - cachedNextLine []byte - cachedNextLineErr error -} - -// readLine returns the next unconsumed line and advances the internal cache of -// the lineReader. -func (l *lineReader) readLine() ([]byte, error) { - if l.cachedNextLine == nil && l.cachedNextLineErr == nil { - l.cachedNextLine, l.cachedNextLineErr = readLine(l.reader) - } - - if l.cachedNextLineErr != nil { - return nil, l.cachedNextLineErr - } - - next := l.cachedNextLine - - l.cachedNextLine, l.cachedNextLineErr = readLine(l.reader) - - return next, nil -} - -// nextLineStartsWith looks at the line that would be returned by the next call -// to readLine to check whether it has the given prefix. -// -// io.EOF and bufio.ErrBufferFull errors are ignored so that the function can -// be used when at the end of the file. -func (l *lineReader) nextLineStartsWith(prefix string) (bool, error) { - if l.cachedNextLine == nil && l.cachedNextLineErr == nil { - l.cachedNextLine, l.cachedNextLineErr = readLine(l.reader) - } - - return l.lineHasPrefix(l.cachedNextLine, prefix, l.cachedNextLineErr) -} - -// nextNextLineStartsWith checks the prefix of the line *after* the line that -// would be returned by the next readLine. -// -// io.EOF and bufio.ErrBufferFull errors are ignored so that the function can -// be used when at the end of the file. -// -// The lineReader MUST be initialized by calling readLine at least once before -// calling nextLineStartsWith. Otherwise ErrLineReaderUninitialized will be -// returned. -func (l *lineReader) nextNextLineStartsWith(prefix string) (bool, error) { - if l.cachedNextLine == nil && l.cachedNextLineErr == nil { - l.cachedNextLine, l.cachedNextLineErr = readLine(l.reader) - } - - next, err := l.reader.Peek(len(prefix)) - return l.lineHasPrefix(next, prefix, err) -} - -// lineHasPrefix checks whether the given line has the given prefix with -// bytes.HasPrefix. -// -// The readErr should be the error that was returned when the line was read. -// lineHasPrefix checks the error to adjust its return value to, e.g., return -// false and ignore the error when readErr is io.EOF. -func (l *lineReader) lineHasPrefix(line []byte, prefix string, readErr error) (bool, error) { - if readErr != nil { - if readErr == io.EOF || readErr == bufio.ErrBufferFull { - return false, nil - } - return false, readErr - } - - return bytes.HasPrefix(line, []byte(prefix)), nil -} - -// readLine is a helper that mimics the functionality of calling bufio.Scanner.Scan() and -// bufio.Scanner.Bytes(), but without the token size limitation. It will read and return -// the next line in the Reader with the trailing newline stripped. It will return an -// io.EOF error when there is nothing left to read (at the start of the function call). It -// will return any other errors it receives from the underlying call to ReadBytes. -func readLine(r *bufio.Reader) ([]byte, error) { - line_, err := r.ReadBytes('\n') - if err == io.EOF { - if len(line_) == 0 { - return nil, io.EOF - } - - // ReadBytes returned io.EOF, because it didn't find another newline, but there is - // still the remainder of the file to return as a line. - line := line_ - return line, nil - } else if err != nil { - return nil, err - } - line := line_[0 : len(line_)-1] - return dropCR(line), nil -} - -// dropCR drops a terminal \r from the data. -func dropCR(data []byte) []byte { - if len(data) > 0 && data[len(data)-1] == '\r' { - return data[0 : len(data)-1] - } - return data -} diff --git a/vendor/github.com/spf13/afero/.gitignore b/vendor/github.com/spf13/afero/.gitignore deleted file mode 100644 index 9c1d98611..000000000 --- a/vendor/github.com/spf13/afero/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -sftpfs/file1 -sftpfs/test/ diff --git a/vendor/github.com/spf13/afero/LICENSE.txt b/vendor/github.com/spf13/afero/LICENSE.txt deleted file mode 100644 index 298f0e266..000000000 --- a/vendor/github.com/spf13/afero/LICENSE.txt +++ /dev/null @@ -1,174 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. diff --git a/vendor/github.com/spf13/afero/README.md b/vendor/github.com/spf13/afero/README.md deleted file mode 100644 index 3bafbfdfc..000000000 --- a/vendor/github.com/spf13/afero/README.md +++ /dev/null @@ -1,442 +0,0 @@ -![afero logo-sm](https://cloud.githubusercontent.com/assets/173412/11490338/d50e16dc-97a5-11e5-8b12-019a300d0fcb.png) - -A FileSystem Abstraction System for Go - -[![Test](https://github.com/spf13/afero/actions/workflows/test.yml/badge.svg)](https://github.com/spf13/afero/actions/workflows/test.yml) [![GoDoc](https://godoc.org/github.com/spf13/afero?status.svg)](https://godoc.org/github.com/spf13/afero) [![Join the chat at https://gitter.im/spf13/afero](https://badges.gitter.im/Dev%20Chat.svg)](https://gitter.im/spf13/afero?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) - -# Overview - -Afero is a filesystem framework providing a simple, uniform and universal API -interacting with any filesystem, as an abstraction layer providing interfaces, -types and methods. Afero has an exceptionally clean interface and simple design -without needless constructors or initialization methods. - -Afero is also a library providing a base set of interoperable backend -filesystems that make it easy to work with afero while retaining all the power -and benefit of the os and ioutil packages. - -Afero provides significant improvements over using the os package alone, most -notably the ability to create mock and testing filesystems without relying on the disk. - -It is suitable for use in any situation where you would consider using the OS -package as it provides an additional abstraction that makes it easy to use a -memory backed file system during testing. It also adds support for the http -filesystem for full interoperability. - - -## Afero Features - -* A single consistent API for accessing a variety of filesystems -* Interoperation between a variety of file system types -* A set of interfaces to encourage and enforce interoperability between backends -* An atomic cross platform memory backed file system -* Support for compositional (union) file systems by combining multiple file systems acting as one -* Specialized backends which modify existing filesystems (Read Only, Regexp filtered) -* A set of utility functions ported from io, ioutil & hugo to be afero aware -* Wrapper for go 1.16 filesystem abstraction `io/fs.FS` - -# Using Afero - -Afero is easy to use and easier to adopt. - -A few different ways you could use Afero: - -* Use the interfaces alone to define your own file system. -* Wrapper for the OS packages. -* Define different filesystems for different parts of your application. -* Use Afero for mock filesystems while testing - -## Step 1: Install Afero - -First use go get to install the latest version of the library. - - $ go get github.com/spf13/afero - -Next include Afero in your application. -```go -import "github.com/spf13/afero" -``` - -## Step 2: Declare a backend - -First define a package variable and set it to a pointer to a filesystem. -```go -var AppFs = afero.NewMemMapFs() - -or - -var AppFs = afero.NewOsFs() -``` -It is important to note that if you repeat the composite literal you -will be using a completely new and isolated filesystem. In the case of -OsFs it will still use the same underlying filesystem but will reduce -the ability to drop in other filesystems as desired. - -## Step 3: Use it like you would the OS package - -Throughout your application use any function and method like you normally -would. - -So if my application before had: -```go -os.Open("/tmp/foo") -``` -We would replace it with: -```go -AppFs.Open("/tmp/foo") -``` - -`AppFs` being the variable we defined above. - - -## List of all available functions - -File System Methods Available: -```go -Chmod(name string, mode os.FileMode) : error -Chown(name string, uid, gid int) : error -Chtimes(name string, atime time.Time, mtime time.Time) : error -Create(name string) : File, error -Mkdir(name string, perm os.FileMode) : error -MkdirAll(path string, perm os.FileMode) : error -Name() : string -Open(name string) : File, error -OpenFile(name string, flag int, perm os.FileMode) : File, error -Remove(name string) : error -RemoveAll(path string) : error -Rename(oldname, newname string) : error -Stat(name string) : os.FileInfo, error -``` -File Interfaces and Methods Available: -```go -io.Closer -io.Reader -io.ReaderAt -io.Seeker -io.Writer -io.WriterAt - -Name() : string -Readdir(count int) : []os.FileInfo, error -Readdirnames(n int) : []string, error -Stat() : os.FileInfo, error -Sync() : error -Truncate(size int64) : error -WriteString(s string) : ret int, err error -``` -In some applications it may make sense to define a new package that -simply exports the file system variable for easy access from anywhere. - -## Using Afero's utility functions - -Afero provides a set of functions to make it easier to use the underlying file systems. -These functions have been primarily ported from io & ioutil with some developed for Hugo. - -The afero utilities support all afero compatible backends. - -The list of utilities includes: - -```go -DirExists(path string) (bool, error) -Exists(path string) (bool, error) -FileContainsBytes(filename string, subslice []byte) (bool, error) -GetTempDir(subPath string) string -IsDir(path string) (bool, error) -IsEmpty(path string) (bool, error) -ReadDir(dirname string) ([]os.FileInfo, error) -ReadFile(filename string) ([]byte, error) -SafeWriteReader(path string, r io.Reader) (err error) -TempDir(dir, prefix string) (name string, err error) -TempFile(dir, prefix string) (f File, err error) -Walk(root string, walkFn filepath.WalkFunc) error -WriteFile(filename string, data []byte, perm os.FileMode) error -WriteReader(path string, r io.Reader) (err error) -``` -For a complete list see [Afero's GoDoc](https://godoc.org/github.com/spf13/afero) - -They are available under two different approaches to use. You can either call -them directly where the first parameter of each function will be the file -system, or you can declare a new `Afero`, a custom type used to bind these -functions as methods to a given filesystem. - -### Calling utilities directly - -```go -fs := new(afero.MemMapFs) -f, err := afero.TempFile(fs,"", "ioutil-test") - -``` - -### Calling via Afero - -```go -fs := afero.NewMemMapFs() -afs := &afero.Afero{Fs: fs} -f, err := afs.TempFile("", "ioutil-test") -``` - -## Using Afero for Testing - -There is a large benefit to using a mock filesystem for testing. It has a -completely blank state every time it is initialized and can be easily -reproducible regardless of OS. You could create files to your heart’s content -and the file access would be fast while also saving you from all the annoying -issues with deleting temporary files, Windows file locking, etc. The MemMapFs -backend is perfect for testing. - -* Much faster than performing I/O operations on disk -* Avoid security issues and permissions -* Far more control. 'rm -rf /' with confidence -* Test setup is far more easier to do -* No test cleanup needed - -One way to accomplish this is to define a variable as mentioned above. -In your application this will be set to afero.NewOsFs() during testing you -can set it to afero.NewMemMapFs(). - -It wouldn't be uncommon to have each test initialize a blank slate memory -backend. To do this I would define my `appFS = afero.NewOsFs()` somewhere -appropriate in my application code. This approach ensures that Tests are order -independent, with no test relying on the state left by an earlier test. - -Then in my tests I would initialize a new MemMapFs for each test: -```go -func TestExist(t *testing.T) { - appFS := afero.NewMemMapFs() - // create test files and directories - appFS.MkdirAll("src/a", 0755) - afero.WriteFile(appFS, "src/a/b", []byte("file b"), 0644) - afero.WriteFile(appFS, "src/c", []byte("file c"), 0644) - name := "src/c" - _, err := appFS.Stat(name) - if os.IsNotExist(err) { - t.Errorf("file \"%s\" does not exist.\n", name) - } -} -``` - -# Available Backends - -## Operating System Native - -### OsFs - -The first is simply a wrapper around the native OS calls. This makes it -very easy to use as all of the calls are the same as the existing OS -calls. It also makes it trivial to have your code use the OS during -operation and a mock filesystem during testing or as needed. - -```go -appfs := afero.NewOsFs() -appfs.MkdirAll("src/a", 0755) -``` - -## Memory Backed Storage - -### MemMapFs - -Afero also provides a fully atomic memory backed filesystem perfect for use in -mocking and to speed up unnecessary disk io when persistence isn’t -necessary. It is fully concurrent and will work within go routines -safely. - -```go -mm := afero.NewMemMapFs() -mm.MkdirAll("src/a", 0755) -``` - -#### InMemoryFile - -As part of MemMapFs, Afero also provides an atomic, fully concurrent memory -backed file implementation. This can be used in other memory backed file -systems with ease. Plans are to add a radix tree memory stored file -system using InMemoryFile. - -## Network Interfaces - -### SftpFs - -Afero has experimental support for secure file transfer protocol (sftp). Which can -be used to perform file operations over a encrypted channel. - -### GCSFs - -Afero has experimental support for Google Cloud Storage (GCS). You can either set the -`GOOGLE_APPLICATION_CREDENTIALS_JSON` env variable to your JSON credentials or use `opts` in -`NewGcsFS` to configure access to your GCS bucket. - -Some known limitations of the existing implementation: -* No Chmod support - The GCS ACL could probably be mapped to *nix style permissions but that would add another level of complexity and is ignored in this version. -* No Chtimes support - Could be simulated with attributes (gcs a/m-times are set implicitly) but that's is left for another version. -* Not thread safe - Also assumes all file operations are done through the same instance of the GcsFs. File operations between different GcsFs instances are not guaranteed to be consistent. - - -## Filtering Backends - -### BasePathFs - -The BasePathFs restricts all operations to a given path within an Fs. -The given file name to the operations on this Fs will be prepended with -the base path before calling the source Fs. - -```go -bp := afero.NewBasePathFs(afero.NewOsFs(), "/base/path") -``` - -### ReadOnlyFs - -A thin wrapper around the source Fs providing a read only view. - -```go -fs := afero.NewReadOnlyFs(afero.NewOsFs()) -_, err := fs.Create("/file.txt") -// err = syscall.EPERM -``` - -# RegexpFs - -A filtered view on file names, any file NOT matching -the passed regexp will be treated as non-existing. -Files not matching the regexp provided will not be created. -Directories are not filtered. - -```go -fs := afero.NewRegexpFs(afero.NewMemMapFs(), regexp.MustCompile(`\.txt$`)) -_, err := fs.Create("/file.html") -// err = syscall.ENOENT -``` - -### HttpFs - -Afero provides an http compatible backend which can wrap any of the existing -backends. - -The Http package requires a slightly specific version of Open which -returns an http.File type. - -Afero provides an httpFs file system which satisfies this requirement. -Any Afero FileSystem can be used as an httpFs. - -```go -httpFs := afero.NewHttpFs() -fileserver := http.FileServer(httpFs.Dir()) -http.Handle("/", fileserver) -``` - -## Composite Backends - -Afero provides the ability have two filesystems (or more) act as a single -file system. - -### CacheOnReadFs - -The CacheOnReadFs will lazily make copies of any accessed files from the base -layer into the overlay. Subsequent reads will be pulled from the overlay -directly permitting the request is within the cache duration of when it was -created in the overlay. - -If the base filesystem is writeable, any changes to files will be -done first to the base, then to the overlay layer. Write calls to open file -handles like `Write()` or `Truncate()` to the overlay first. - -To writing files to the overlay only, you can use the overlay Fs directly (not -via the union Fs). - -Cache files in the layer for the given time.Duration, a cache duration of 0 -means "forever" meaning the file will not be re-requested from the base ever. - -A read-only base will make the overlay also read-only but still copy files -from the base to the overlay when they're not present (or outdated) in the -caching layer. - -```go -base := afero.NewOsFs() -layer := afero.NewMemMapFs() -ufs := afero.NewCacheOnReadFs(base, layer, 100 * time.Second) -``` - -### CopyOnWriteFs() - -The CopyOnWriteFs is a read only base file system with a potentially -writeable layer on top. - -Read operations will first look in the overlay and if not found there, will -serve the file from the base. - -Changes to the file system will only be made in the overlay. - -Any attempt to modify a file found only in the base will copy the file to the -overlay layer before modification (including opening a file with a writable -handle). - -Removing and Renaming files present only in the base layer is not currently -permitted. If a file is present in the base layer and the overlay, only the -overlay will be removed/renamed. - -```go - base := afero.NewOsFs() - roBase := afero.NewReadOnlyFs(base) - ufs := afero.NewCopyOnWriteFs(roBase, afero.NewMemMapFs()) - - fh, _ = ufs.Create("/home/test/file2.txt") - fh.WriteString("This is a test") - fh.Close() -``` - -In this example all write operations will only occur in memory (MemMapFs) -leaving the base filesystem (OsFs) untouched. - - -## Desired/possible backends - -The following is a short list of possible backends we hope someone will -implement: - -* SSH -* S3 - -# About the project - -## What's in the name - -Afero comes from the latin roots Ad-Facere. - -**"Ad"** is a prefix meaning "to". - -**"Facere"** is a form of the root "faciō" making "make or do". - -The literal meaning of afero is "to make" or "to do" which seems very fitting -for a library that allows one to make files and directories and do things with them. - -The English word that shares the same roots as Afero is "affair". Affair shares -the same concept but as a noun it means "something that is made or done" or "an -object of a particular type". - -It's also nice that unlike some of my other libraries (hugo, cobra, viper) it -Googles very well. - -## Release Notes - -See the [Releases Page](https://github.com/spf13/afero/releases). - -## Contributing - -1. Fork it -2. Create your feature branch (`git checkout -b my-new-feature`) -3. Commit your changes (`git commit -am 'Add some feature'`) -4. Push to the branch (`git push origin my-new-feature`) -5. Create new Pull Request - -## Contributors - -Names in no particular order: - -* [spf13](https://github.com/spf13) -* [jaqx0r](https://github.com/jaqx0r) -* [mbertschler](https://github.com/mbertschler) -* [xor-gate](https://github.com/xor-gate) - -## License - -Afero is released under the Apache 2.0 license. See -[LICENSE.txt](https://github.com/spf13/afero/blob/master/LICENSE.txt) diff --git a/vendor/github.com/spf13/afero/afero.go b/vendor/github.com/spf13/afero/afero.go deleted file mode 100644 index 39f658520..000000000 --- a/vendor/github.com/spf13/afero/afero.go +++ /dev/null @@ -1,111 +0,0 @@ -// Copyright © 2014 Steve Francia . -// Copyright 2013 tsuru authors. All rights reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// Package afero provides types and methods for interacting with the filesystem, -// as an abstraction layer. - -// Afero also provides a few implementations that are mostly interoperable. One that -// uses the operating system filesystem, one that uses memory to store files -// (cross platform) and an interface that should be implemented if you want to -// provide your own filesystem. - -package afero - -import ( - "errors" - "io" - "os" - "time" -) - -type Afero struct { - Fs -} - -// File represents a file in the filesystem. -type File interface { - io.Closer - io.Reader - io.ReaderAt - io.Seeker - io.Writer - io.WriterAt - - Name() string - Readdir(count int) ([]os.FileInfo, error) - Readdirnames(n int) ([]string, error) - Stat() (os.FileInfo, error) - Sync() error - Truncate(size int64) error - WriteString(s string) (ret int, err error) -} - -// Fs is the filesystem interface. -// -// Any simulated or real filesystem should implement this interface. -type Fs interface { - // Create creates a file in the filesystem, returning the file and an - // error, if any happens. - Create(name string) (File, error) - - // Mkdir creates a directory in the filesystem, return an error if any - // happens. - Mkdir(name string, perm os.FileMode) error - - // MkdirAll creates a directory path and all parents that does not exist - // yet. - MkdirAll(path string, perm os.FileMode) error - - // Open opens a file, returning it or an error, if any happens. - Open(name string) (File, error) - - // OpenFile opens a file using the given flags and the given mode. - OpenFile(name string, flag int, perm os.FileMode) (File, error) - - // Remove removes a file identified by name, returning an error, if any - // happens. - Remove(name string) error - - // RemoveAll removes a directory path and any children it contains. It - // does not fail if the path does not exist (return nil). - RemoveAll(path string) error - - // Rename renames a file. - Rename(oldname, newname string) error - - // Stat returns a FileInfo describing the named file, or an error, if any - // happens. - Stat(name string) (os.FileInfo, error) - - // The name of this FileSystem - Name() string - - // Chmod changes the mode of the named file to mode. - Chmod(name string, mode os.FileMode) error - - // Chown changes the uid and gid of the named file. - Chown(name string, uid, gid int) error - - // Chtimes changes the access and modification times of the named file - Chtimes(name string, atime time.Time, mtime time.Time) error -} - -var ( - ErrFileClosed = errors.New("File is closed") - ErrOutOfRange = errors.New("out of range") - ErrTooLarge = errors.New("too large") - ErrFileNotFound = os.ErrNotExist - ErrFileExists = os.ErrExist - ErrDestinationExists = os.ErrExist -) diff --git a/vendor/github.com/spf13/afero/appveyor.yml b/vendor/github.com/spf13/afero/appveyor.yml deleted file mode 100644 index 65e20e8ca..000000000 --- a/vendor/github.com/spf13/afero/appveyor.yml +++ /dev/null @@ -1,10 +0,0 @@ -# This currently does nothing. We have moved to GitHub action, but this is kept -# until spf13 has disabled this project in AppVeyor. -version: '{build}' -clone_folder: C:\gopath\src\github.com\spf13\afero -environment: - GOPATH: C:\gopath -build_script: -- cmd: >- - go version - diff --git a/vendor/github.com/spf13/afero/basepath.go b/vendor/github.com/spf13/afero/basepath.go deleted file mode 100644 index 2e72793a3..000000000 --- a/vendor/github.com/spf13/afero/basepath.go +++ /dev/null @@ -1,222 +0,0 @@ -package afero - -import ( - "io/fs" - "os" - "path/filepath" - "runtime" - "strings" - "time" -) - -var ( - _ Lstater = (*BasePathFs)(nil) - _ fs.ReadDirFile = (*BasePathFile)(nil) -) - -// The BasePathFs restricts all operations to a given path within an Fs. -// The given file name to the operations on this Fs will be prepended with -// the base path before calling the base Fs. -// Any file name (after filepath.Clean()) outside this base path will be -// treated as non existing file. -// -// Note that it does not clean the error messages on return, so you may -// reveal the real path on errors. -type BasePathFs struct { - source Fs - path string -} - -type BasePathFile struct { - File - path string -} - -func (f *BasePathFile) Name() string { - sourcename := f.File.Name() - return strings.TrimPrefix(sourcename, filepath.Clean(f.path)) -} - -func (f *BasePathFile) ReadDir(n int) ([]fs.DirEntry, error) { - if rdf, ok := f.File.(fs.ReadDirFile); ok { - return rdf.ReadDir(n) - } - return readDirFile{f.File}.ReadDir(n) -} - -func NewBasePathFs(source Fs, path string) Fs { - return &BasePathFs{source: source, path: path} -} - -// on a file outside the base path it returns the given file name and an error, -// else the given file with the base path prepended -func (b *BasePathFs) RealPath(name string) (path string, err error) { - if err := validateBasePathName(name); err != nil { - return name, err - } - - bpath := filepath.Clean(b.path) - path = filepath.Clean(filepath.Join(bpath, name)) - if !strings.HasPrefix(path, bpath) { - return name, os.ErrNotExist - } - - return path, nil -} - -func validateBasePathName(name string) error { - if runtime.GOOS != "windows" { - // Not much to do here; - // the virtual file paths all look absolute on *nix. - return nil - } - - // On Windows a common mistake would be to provide an absolute OS path - // We could strip out the base part, but that would not be very portable. - if filepath.IsAbs(name) { - return os.ErrNotExist - } - - return nil -} - -func (b *BasePathFs) Chtimes(name string, atime, mtime time.Time) (err error) { - if name, err = b.RealPath(name); err != nil { - return &os.PathError{Op: "chtimes", Path: name, Err: err} - } - return b.source.Chtimes(name, atime, mtime) -} - -func (b *BasePathFs) Chmod(name string, mode os.FileMode) (err error) { - if name, err = b.RealPath(name); err != nil { - return &os.PathError{Op: "chmod", Path: name, Err: err} - } - return b.source.Chmod(name, mode) -} - -func (b *BasePathFs) Chown(name string, uid, gid int) (err error) { - if name, err = b.RealPath(name); err != nil { - return &os.PathError{Op: "chown", Path: name, Err: err} - } - return b.source.Chown(name, uid, gid) -} - -func (b *BasePathFs) Name() string { - return "BasePathFs" -} - -func (b *BasePathFs) Stat(name string) (fi os.FileInfo, err error) { - if name, err = b.RealPath(name); err != nil { - return nil, &os.PathError{Op: "stat", Path: name, Err: err} - } - return b.source.Stat(name) -} - -func (b *BasePathFs) Rename(oldname, newname string) (err error) { - if oldname, err = b.RealPath(oldname); err != nil { - return &os.PathError{Op: "rename", Path: oldname, Err: err} - } - if newname, err = b.RealPath(newname); err != nil { - return &os.PathError{Op: "rename", Path: newname, Err: err} - } - return b.source.Rename(oldname, newname) -} - -func (b *BasePathFs) RemoveAll(name string) (err error) { - if name, err = b.RealPath(name); err != nil { - return &os.PathError{Op: "remove_all", Path: name, Err: err} - } - return b.source.RemoveAll(name) -} - -func (b *BasePathFs) Remove(name string) (err error) { - if name, err = b.RealPath(name); err != nil { - return &os.PathError{Op: "remove", Path: name, Err: err} - } - return b.source.Remove(name) -} - -func (b *BasePathFs) OpenFile(name string, flag int, mode os.FileMode) (f File, err error) { - if name, err = b.RealPath(name); err != nil { - return nil, &os.PathError{Op: "openfile", Path: name, Err: err} - } - sourcef, err := b.source.OpenFile(name, flag, mode) - if err != nil { - return nil, err - } - return &BasePathFile{sourcef, b.path}, nil -} - -func (b *BasePathFs) Open(name string) (f File, err error) { - if name, err = b.RealPath(name); err != nil { - return nil, &os.PathError{Op: "open", Path: name, Err: err} - } - sourcef, err := b.source.Open(name) - if err != nil { - return nil, err - } - return &BasePathFile{File: sourcef, path: b.path}, nil -} - -func (b *BasePathFs) Mkdir(name string, mode os.FileMode) (err error) { - if name, err = b.RealPath(name); err != nil { - return &os.PathError{Op: "mkdir", Path: name, Err: err} - } - return b.source.Mkdir(name, mode) -} - -func (b *BasePathFs) MkdirAll(name string, mode os.FileMode) (err error) { - if name, err = b.RealPath(name); err != nil { - return &os.PathError{Op: "mkdir", Path: name, Err: err} - } - return b.source.MkdirAll(name, mode) -} - -func (b *BasePathFs) Create(name string) (f File, err error) { - if name, err = b.RealPath(name); err != nil { - return nil, &os.PathError{Op: "create", Path: name, Err: err} - } - sourcef, err := b.source.Create(name) - if err != nil { - return nil, err - } - return &BasePathFile{File: sourcef, path: b.path}, nil -} - -func (b *BasePathFs) LstatIfPossible(name string) (os.FileInfo, bool, error) { - name, err := b.RealPath(name) - if err != nil { - return nil, false, &os.PathError{Op: "lstat", Path: name, Err: err} - } - if lstater, ok := b.source.(Lstater); ok { - return lstater.LstatIfPossible(name) - } - fi, err := b.source.Stat(name) - return fi, false, err -} - -func (b *BasePathFs) SymlinkIfPossible(oldname, newname string) error { - oldname, err := b.RealPath(oldname) - if err != nil { - return &os.LinkError{Op: "symlink", Old: oldname, New: newname, Err: err} - } - newname, err = b.RealPath(newname) - if err != nil { - return &os.LinkError{Op: "symlink", Old: oldname, New: newname, Err: err} - } - if linker, ok := b.source.(Linker); ok { - return linker.SymlinkIfPossible(oldname, newname) - } - return &os.LinkError{Op: "symlink", Old: oldname, New: newname, Err: ErrNoSymlink} -} - -func (b *BasePathFs) ReadlinkIfPossible(name string) (string, error) { - name, err := b.RealPath(name) - if err != nil { - return "", &os.PathError{Op: "readlink", Path: name, Err: err} - } - if reader, ok := b.source.(LinkReader); ok { - return reader.ReadlinkIfPossible(name) - } - return "", &os.PathError{Op: "readlink", Path: name, Err: ErrNoReadlink} -} diff --git a/vendor/github.com/spf13/afero/cacheOnReadFs.go b/vendor/github.com/spf13/afero/cacheOnReadFs.go deleted file mode 100644 index 017d344fd..000000000 --- a/vendor/github.com/spf13/afero/cacheOnReadFs.go +++ /dev/null @@ -1,315 +0,0 @@ -package afero - -import ( - "os" - "syscall" - "time" -) - -// If the cache duration is 0, cache time will be unlimited, i.e. once -// a file is in the layer, the base will never be read again for this file. -// -// For cache times greater than 0, the modification time of a file is -// checked. Note that a lot of file system implementations only allow a -// resolution of a second for timestamps... or as the godoc for os.Chtimes() -// states: "The underlying filesystem may truncate or round the values to a -// less precise time unit." -// -// This caching union will forward all write calls also to the base file -// system first. To prevent writing to the base Fs, wrap it in a read-only -// filter - Note: this will also make the overlay read-only, for writing files -// in the overlay, use the overlay Fs directly, not via the union Fs. -type CacheOnReadFs struct { - base Fs - layer Fs - cacheTime time.Duration -} - -func NewCacheOnReadFs(base Fs, layer Fs, cacheTime time.Duration) Fs { - return &CacheOnReadFs{base: base, layer: layer, cacheTime: cacheTime} -} - -type cacheState int - -const ( - // not present in the overlay, unknown if it exists in the base: - cacheMiss cacheState = iota - // present in the overlay and in base, base file is newer: - cacheStale - // present in the overlay - with cache time == 0 it may exist in the base, - // with cacheTime > 0 it exists in the base and is same age or newer in the - // overlay - cacheHit - // happens if someone writes directly to the overlay without - // going through this union - cacheLocal -) - -func (u *CacheOnReadFs) cacheStatus(name string) (state cacheState, fi os.FileInfo, err error) { - var lfi, bfi os.FileInfo - lfi, err = u.layer.Stat(name) - if err == nil { - if u.cacheTime == 0 { - return cacheHit, lfi, nil - } - if lfi.ModTime().Add(u.cacheTime).Before(time.Now()) { - bfi, err = u.base.Stat(name) - if err != nil { - return cacheLocal, lfi, nil - } - if bfi.ModTime().After(lfi.ModTime()) { - return cacheStale, bfi, nil - } - } - return cacheHit, lfi, nil - } - - if err == syscall.ENOENT || os.IsNotExist(err) { - return cacheMiss, nil, nil - } - - return cacheMiss, nil, err -} - -func (u *CacheOnReadFs) copyToLayer(name string) error { - return copyToLayer(u.base, u.layer, name) -} - -func (u *CacheOnReadFs) copyFileToLayer(name string, flag int, perm os.FileMode) error { - return copyFileToLayer(u.base, u.layer, name, flag, perm) -} - -func (u *CacheOnReadFs) Chtimes(name string, atime, mtime time.Time) error { - st, _, err := u.cacheStatus(name) - if err != nil { - return err - } - switch st { - case cacheLocal: - case cacheHit: - err = u.base.Chtimes(name, atime, mtime) - case cacheStale, cacheMiss: - if err := u.copyToLayer(name); err != nil { - return err - } - err = u.base.Chtimes(name, atime, mtime) - } - if err != nil { - return err - } - return u.layer.Chtimes(name, atime, mtime) -} - -func (u *CacheOnReadFs) Chmod(name string, mode os.FileMode) error { - st, _, err := u.cacheStatus(name) - if err != nil { - return err - } - switch st { - case cacheLocal: - case cacheHit: - err = u.base.Chmod(name, mode) - case cacheStale, cacheMiss: - if err := u.copyToLayer(name); err != nil { - return err - } - err = u.base.Chmod(name, mode) - } - if err != nil { - return err - } - return u.layer.Chmod(name, mode) -} - -func (u *CacheOnReadFs) Chown(name string, uid, gid int) error { - st, _, err := u.cacheStatus(name) - if err != nil { - return err - } - switch st { - case cacheLocal: - case cacheHit: - err = u.base.Chown(name, uid, gid) - case cacheStale, cacheMiss: - if err := u.copyToLayer(name); err != nil { - return err - } - err = u.base.Chown(name, uid, gid) - } - if err != nil { - return err - } - return u.layer.Chown(name, uid, gid) -} - -func (u *CacheOnReadFs) Stat(name string) (os.FileInfo, error) { - st, fi, err := u.cacheStatus(name) - if err != nil { - return nil, err - } - switch st { - case cacheMiss: - return u.base.Stat(name) - default: // cacheStale has base, cacheHit and cacheLocal the layer os.FileInfo - return fi, nil - } -} - -func (u *CacheOnReadFs) Rename(oldname, newname string) error { - st, _, err := u.cacheStatus(oldname) - if err != nil { - return err - } - switch st { - case cacheLocal: - case cacheHit: - err = u.base.Rename(oldname, newname) - case cacheStale, cacheMiss: - if err := u.copyToLayer(oldname); err != nil { - return err - } - err = u.base.Rename(oldname, newname) - } - if err != nil { - return err - } - return u.layer.Rename(oldname, newname) -} - -func (u *CacheOnReadFs) Remove(name string) error { - st, _, err := u.cacheStatus(name) - if err != nil { - return err - } - switch st { - case cacheLocal: - case cacheHit, cacheStale, cacheMiss: - err = u.base.Remove(name) - } - if err != nil { - return err - } - return u.layer.Remove(name) -} - -func (u *CacheOnReadFs) RemoveAll(name string) error { - st, _, err := u.cacheStatus(name) - if err != nil { - return err - } - switch st { - case cacheLocal: - case cacheHit, cacheStale, cacheMiss: - err = u.base.RemoveAll(name) - } - if err != nil { - return err - } - return u.layer.RemoveAll(name) -} - -func (u *CacheOnReadFs) OpenFile(name string, flag int, perm os.FileMode) (File, error) { - st, _, err := u.cacheStatus(name) - if err != nil { - return nil, err - } - switch st { - case cacheLocal, cacheHit: - default: - if err := u.copyFileToLayer(name, flag, perm); err != nil { - return nil, err - } - } - if flag&(os.O_WRONLY|syscall.O_RDWR|os.O_APPEND|os.O_CREATE|os.O_TRUNC) != 0 { - bfi, err := u.base.OpenFile(name, flag, perm) - if err != nil { - return nil, err - } - lfi, err := u.layer.OpenFile(name, flag, perm) - if err != nil { - bfi.Close() // oops, what if O_TRUNC was set and file opening in the layer failed...? - return nil, err - } - return &UnionFile{Base: bfi, Layer: lfi}, nil - } - return u.layer.OpenFile(name, flag, perm) -} - -func (u *CacheOnReadFs) Open(name string) (File, error) { - st, fi, err := u.cacheStatus(name) - if err != nil { - return nil, err - } - - switch st { - case cacheLocal: - return u.layer.Open(name) - - case cacheMiss: - bfi, err := u.base.Stat(name) - if err != nil { - return nil, err - } - if bfi.IsDir() { - return u.base.Open(name) - } - if err := u.copyToLayer(name); err != nil { - return nil, err - } - return u.layer.Open(name) - - case cacheStale: - if !fi.IsDir() { - if err := u.copyToLayer(name); err != nil { - return nil, err - } - return u.layer.Open(name) - } - case cacheHit: - if !fi.IsDir() { - return u.layer.Open(name) - } - } - // the dirs from cacheHit, cacheStale fall down here: - bfile, _ := u.base.Open(name) - lfile, err := u.layer.Open(name) - if err != nil && bfile == nil { - return nil, err - } - return &UnionFile{Base: bfile, Layer: lfile}, nil -} - -func (u *CacheOnReadFs) Mkdir(name string, perm os.FileMode) error { - err := u.base.Mkdir(name, perm) - if err != nil { - return err - } - return u.layer.MkdirAll(name, perm) // yes, MkdirAll... we cannot assume it exists in the cache -} - -func (u *CacheOnReadFs) Name() string { - return "CacheOnReadFs" -} - -func (u *CacheOnReadFs) MkdirAll(name string, perm os.FileMode) error { - err := u.base.MkdirAll(name, perm) - if err != nil { - return err - } - return u.layer.MkdirAll(name, perm) -} - -func (u *CacheOnReadFs) Create(name string) (File, error) { - bfh, err := u.base.Create(name) - if err != nil { - return nil, err - } - lfh, err := u.layer.Create(name) - if err != nil { - // oops, see comment about OS_TRUNC above, should we remove? then we have to - // remember if the file did not exist before - bfh.Close() - return nil, err - } - return &UnionFile{Base: bfh, Layer: lfh}, nil -} diff --git a/vendor/github.com/spf13/afero/const_bsds.go b/vendor/github.com/spf13/afero/const_bsds.go deleted file mode 100644 index 30855de57..000000000 --- a/vendor/github.com/spf13/afero/const_bsds.go +++ /dev/null @@ -1,23 +0,0 @@ -// Copyright © 2016 Steve Francia . -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//go:build aix || darwin || openbsd || freebsd || netbsd || dragonfly || zos -// +build aix darwin openbsd freebsd netbsd dragonfly zos - -package afero - -import ( - "syscall" -) - -const BADFD = syscall.EBADF diff --git a/vendor/github.com/spf13/afero/const_win_unix.go b/vendor/github.com/spf13/afero/const_win_unix.go deleted file mode 100644 index 12792d21e..000000000 --- a/vendor/github.com/spf13/afero/const_win_unix.go +++ /dev/null @@ -1,22 +0,0 @@ -// Copyright © 2016 Steve Francia . -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -//go:build !darwin && !openbsd && !freebsd && !dragonfly && !netbsd && !aix && !zos -// +build !darwin,!openbsd,!freebsd,!dragonfly,!netbsd,!aix,!zos - -package afero - -import ( - "syscall" -) - -const BADFD = syscall.EBADFD diff --git a/vendor/github.com/spf13/afero/copyOnWriteFs.go b/vendor/github.com/spf13/afero/copyOnWriteFs.go deleted file mode 100644 index 184d6dd70..000000000 --- a/vendor/github.com/spf13/afero/copyOnWriteFs.go +++ /dev/null @@ -1,327 +0,0 @@ -package afero - -import ( - "fmt" - "os" - "path/filepath" - "syscall" - "time" -) - -var _ Lstater = (*CopyOnWriteFs)(nil) - -// The CopyOnWriteFs is a union filesystem: a read only base file system with -// a possibly writeable layer on top. Changes to the file system will only -// be made in the overlay: Changing an existing file in the base layer which -// is not present in the overlay will copy the file to the overlay ("changing" -// includes also calls to e.g. Chtimes(), Chmod() and Chown()). -// -// Reading directories is currently only supported via Open(), not OpenFile(). -type CopyOnWriteFs struct { - base Fs - layer Fs -} - -func NewCopyOnWriteFs(base Fs, layer Fs) Fs { - return &CopyOnWriteFs{base: base, layer: layer} -} - -// Returns true if the file is not in the overlay -func (u *CopyOnWriteFs) isBaseFile(name string) (bool, error) { - if _, err := u.layer.Stat(name); err == nil { - return false, nil - } - _, err := u.base.Stat(name) - if err != nil { - if oerr, ok := err.(*os.PathError); ok { - if oerr.Err == os.ErrNotExist || oerr.Err == syscall.ENOENT || oerr.Err == syscall.ENOTDIR { - return false, nil - } - } - if err == syscall.ENOENT { - return false, nil - } - } - return true, err -} - -func (u *CopyOnWriteFs) copyToLayer(name string) error { - return copyToLayer(u.base, u.layer, name) -} - -func (u *CopyOnWriteFs) Chtimes(name string, atime, mtime time.Time) error { - b, err := u.isBaseFile(name) - if err != nil { - return err - } - if b { - if err := u.copyToLayer(name); err != nil { - return err - } - } - return u.layer.Chtimes(name, atime, mtime) -} - -func (u *CopyOnWriteFs) Chmod(name string, mode os.FileMode) error { - b, err := u.isBaseFile(name) - if err != nil { - return err - } - if b { - if err := u.copyToLayer(name); err != nil { - return err - } - } - return u.layer.Chmod(name, mode) -} - -func (u *CopyOnWriteFs) Chown(name string, uid, gid int) error { - b, err := u.isBaseFile(name) - if err != nil { - return err - } - if b { - if err := u.copyToLayer(name); err != nil { - return err - } - } - return u.layer.Chown(name, uid, gid) -} - -func (u *CopyOnWriteFs) Stat(name string) (os.FileInfo, error) { - fi, err := u.layer.Stat(name) - if err != nil { - isNotExist := u.isNotExist(err) - if isNotExist { - return u.base.Stat(name) - } - return nil, err - } - return fi, nil -} - -func (u *CopyOnWriteFs) LstatIfPossible(name string) (os.FileInfo, bool, error) { - llayer, ok1 := u.layer.(Lstater) - lbase, ok2 := u.base.(Lstater) - - if ok1 { - fi, b, err := llayer.LstatIfPossible(name) - if err == nil { - return fi, b, nil - } - - if !u.isNotExist(err) { - return nil, b, err - } - } - - if ok2 { - fi, b, err := lbase.LstatIfPossible(name) - if err == nil { - return fi, b, nil - } - if !u.isNotExist(err) { - return nil, b, err - } - } - - fi, err := u.Stat(name) - - return fi, false, err -} - -func (u *CopyOnWriteFs) SymlinkIfPossible(oldname, newname string) error { - if slayer, ok := u.layer.(Linker); ok { - return slayer.SymlinkIfPossible(oldname, newname) - } - - return &os.LinkError{Op: "symlink", Old: oldname, New: newname, Err: ErrNoSymlink} -} - -func (u *CopyOnWriteFs) ReadlinkIfPossible(name string) (string, error) { - if rlayer, ok := u.layer.(LinkReader); ok { - return rlayer.ReadlinkIfPossible(name) - } - - if rbase, ok := u.base.(LinkReader); ok { - return rbase.ReadlinkIfPossible(name) - } - - return "", &os.PathError{Op: "readlink", Path: name, Err: ErrNoReadlink} -} - -func (u *CopyOnWriteFs) isNotExist(err error) bool { - if e, ok := err.(*os.PathError); ok { - err = e.Err - } - if err == os.ErrNotExist || err == syscall.ENOENT || err == syscall.ENOTDIR { - return true - } - return false -} - -// Renaming files present only in the base layer is not permitted -func (u *CopyOnWriteFs) Rename(oldname, newname string) error { - b, err := u.isBaseFile(oldname) - if err != nil { - return err - } - if b { - return syscall.EPERM - } - return u.layer.Rename(oldname, newname) -} - -// Removing files present only in the base layer is not permitted. If -// a file is present in the base layer and the overlay, only the overlay -// will be removed. -func (u *CopyOnWriteFs) Remove(name string) error { - err := u.layer.Remove(name) - switch err { - case syscall.ENOENT: - _, err = u.base.Stat(name) - if err == nil { - return syscall.EPERM - } - return syscall.ENOENT - default: - return err - } -} - -func (u *CopyOnWriteFs) RemoveAll(name string) error { - err := u.layer.RemoveAll(name) - switch err { - case syscall.ENOENT: - _, err = u.base.Stat(name) - if err == nil { - return syscall.EPERM - } - return syscall.ENOENT - default: - return err - } -} - -func (u *CopyOnWriteFs) OpenFile(name string, flag int, perm os.FileMode) (File, error) { - b, err := u.isBaseFile(name) - if err != nil { - return nil, err - } - - if flag&(os.O_WRONLY|os.O_RDWR|os.O_APPEND|os.O_CREATE|os.O_TRUNC) != 0 { - if b { - if err = u.copyToLayer(name); err != nil { - return nil, err - } - return u.layer.OpenFile(name, flag, perm) - } - - dir := filepath.Dir(name) - isaDir, err := IsDir(u.base, dir) - if err != nil && !os.IsNotExist(err) { - return nil, err - } - if isaDir { - if err = u.layer.MkdirAll(dir, 0o777); err != nil { - return nil, err - } - return u.layer.OpenFile(name, flag, perm) - } - - isaDir, err = IsDir(u.layer, dir) - if err != nil { - return nil, err - } - if isaDir { - return u.layer.OpenFile(name, flag, perm) - } - - return nil, &os.PathError{Op: "open", Path: name, Err: syscall.ENOTDIR} // ...or os.ErrNotExist? - } - if b { - return u.base.OpenFile(name, flag, perm) - } - return u.layer.OpenFile(name, flag, perm) -} - -// This function handles the 9 different possibilities caused -// by the union which are the intersection of the following... -// -// layer: doesn't exist, exists as a file, and exists as a directory -// base: doesn't exist, exists as a file, and exists as a directory -func (u *CopyOnWriteFs) Open(name string) (File, error) { - // Since the overlay overrides the base we check that first - b, err := u.isBaseFile(name) - if err != nil { - return nil, err - } - - // If overlay doesn't exist, return the base (base state irrelevant) - if b { - return u.base.Open(name) - } - - // If overlay is a file, return it (base state irrelevant) - dir, err := IsDir(u.layer, name) - if err != nil { - return nil, err - } - if !dir { - return u.layer.Open(name) - } - - // Overlay is a directory, base state now matters. - // Base state has 3 states to check but 2 outcomes: - // A. It's a file or non-readable in the base (return just the overlay) - // B. It's an accessible directory in the base (return a UnionFile) - - // If base is file or nonreadable, return overlay - dir, err = IsDir(u.base, name) - if !dir || err != nil { - return u.layer.Open(name) - } - - // Both base & layer are directories - // Return union file (if opens are without error) - bfile, bErr := u.base.Open(name) - lfile, lErr := u.layer.Open(name) - - // If either have errors at this point something is very wrong. Return nil and the errors - if bErr != nil || lErr != nil { - return nil, fmt.Errorf("BaseErr: %v\nOverlayErr: %v", bErr, lErr) - } - - return &UnionFile{Base: bfile, Layer: lfile}, nil -} - -func (u *CopyOnWriteFs) Mkdir(name string, perm os.FileMode) error { - dir, err := IsDir(u.base, name) - if err != nil { - return u.layer.MkdirAll(name, perm) - } - if dir { - return ErrFileExists - } - return u.layer.MkdirAll(name, perm) -} - -func (u *CopyOnWriteFs) Name() string { - return "CopyOnWriteFs" -} - -func (u *CopyOnWriteFs) MkdirAll(name string, perm os.FileMode) error { - dir, err := IsDir(u.base, name) - if err != nil { - return u.layer.MkdirAll(name, perm) - } - if dir { - // This is in line with how os.MkdirAll behaves. - return nil - } - return u.layer.MkdirAll(name, perm) -} - -func (u *CopyOnWriteFs) Create(name string) (File, error) { - return u.OpenFile(name, os.O_CREATE|os.O_TRUNC|os.O_RDWR, 0o666) -} diff --git a/vendor/github.com/spf13/afero/httpFs.go b/vendor/github.com/spf13/afero/httpFs.go deleted file mode 100644 index ac0de6d51..000000000 --- a/vendor/github.com/spf13/afero/httpFs.go +++ /dev/null @@ -1,114 +0,0 @@ -// Copyright © 2014 Steve Francia . -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package afero - -import ( - "errors" - "net/http" - "os" - "path" - "path/filepath" - "strings" - "time" -) - -type httpDir struct { - basePath string - fs HttpFs -} - -func (d httpDir) Open(name string) (http.File, error) { - if filepath.Separator != '/' && strings.ContainsRune(name, filepath.Separator) || - strings.Contains(name, "\x00") { - return nil, errors.New("http: invalid character in file path") - } - dir := string(d.basePath) - if dir == "" { - dir = "." - } - - f, err := d.fs.Open(filepath.Join(dir, filepath.FromSlash(path.Clean("/"+name)))) - if err != nil { - return nil, err - } - return f, nil -} - -type HttpFs struct { - source Fs -} - -func NewHttpFs(source Fs) *HttpFs { - return &HttpFs{source: source} -} - -func (h HttpFs) Dir(s string) *httpDir { - return &httpDir{basePath: s, fs: h} -} - -func (h HttpFs) Name() string { return "h HttpFs" } - -func (h HttpFs) Create(name string) (File, error) { - return h.source.Create(name) -} - -func (h HttpFs) Chmod(name string, mode os.FileMode) error { - return h.source.Chmod(name, mode) -} - -func (h HttpFs) Chown(name string, uid, gid int) error { - return h.source.Chown(name, uid, gid) -} - -func (h HttpFs) Chtimes(name string, atime time.Time, mtime time.Time) error { - return h.source.Chtimes(name, atime, mtime) -} - -func (h HttpFs) Mkdir(name string, perm os.FileMode) error { - return h.source.Mkdir(name, perm) -} - -func (h HttpFs) MkdirAll(path string, perm os.FileMode) error { - return h.source.MkdirAll(path, perm) -} - -func (h HttpFs) Open(name string) (http.File, error) { - f, err := h.source.Open(name) - if err == nil { - if httpfile, ok := f.(http.File); ok { - return httpfile, nil - } - } - return nil, err -} - -func (h HttpFs) OpenFile(name string, flag int, perm os.FileMode) (File, error) { - return h.source.OpenFile(name, flag, perm) -} - -func (h HttpFs) Remove(name string) error { - return h.source.Remove(name) -} - -func (h HttpFs) RemoveAll(path string) error { - return h.source.RemoveAll(path) -} - -func (h HttpFs) Rename(oldname, newname string) error { - return h.source.Rename(oldname, newname) -} - -func (h HttpFs) Stat(name string) (os.FileInfo, error) { - return h.source.Stat(name) -} diff --git a/vendor/github.com/spf13/afero/internal/common/adapters.go b/vendor/github.com/spf13/afero/internal/common/adapters.go deleted file mode 100644 index 60685caa5..000000000 --- a/vendor/github.com/spf13/afero/internal/common/adapters.go +++ /dev/null @@ -1,27 +0,0 @@ -// Copyright © 2022 Steve Francia . -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package common - -import "io/fs" - -// FileInfoDirEntry provides an adapter from os.FileInfo to fs.DirEntry -type FileInfoDirEntry struct { - fs.FileInfo -} - -var _ fs.DirEntry = FileInfoDirEntry{} - -func (d FileInfoDirEntry) Type() fs.FileMode { return d.FileInfo.Mode().Type() } - -func (d FileInfoDirEntry) Info() (fs.FileInfo, error) { return d.FileInfo, nil } diff --git a/vendor/github.com/spf13/afero/iofs.go b/vendor/github.com/spf13/afero/iofs.go deleted file mode 100644 index 938b9316e..000000000 --- a/vendor/github.com/spf13/afero/iofs.go +++ /dev/null @@ -1,298 +0,0 @@ -//go:build go1.16 -// +build go1.16 - -package afero - -import ( - "io" - "io/fs" - "os" - "path" - "sort" - "time" - - "github.com/spf13/afero/internal/common" -) - -// IOFS adopts afero.Fs to stdlib io/fs.FS -type IOFS struct { - Fs -} - -func NewIOFS(fs Fs) IOFS { - return IOFS{Fs: fs} -} - -var ( - _ fs.FS = IOFS{} - _ fs.GlobFS = IOFS{} - _ fs.ReadDirFS = IOFS{} - _ fs.ReadFileFS = IOFS{} - _ fs.StatFS = IOFS{} - _ fs.SubFS = IOFS{} -) - -func (iofs IOFS) Open(name string) (fs.File, error) { - const op = "open" - - // by convention for fs.FS implementations we should perform this check - if !fs.ValidPath(name) { - return nil, iofs.wrapError(op, name, fs.ErrInvalid) - } - - file, err := iofs.Fs.Open(name) - if err != nil { - return nil, iofs.wrapError(op, name, err) - } - - // file should implement fs.ReadDirFile - if _, ok := file.(fs.ReadDirFile); !ok { - file = readDirFile{file} - } - - return file, nil -} - -func (iofs IOFS) Glob(pattern string) ([]string, error) { - const op = "glob" - - // afero.Glob does not perform this check but it's required for implementations - if _, err := path.Match(pattern, ""); err != nil { - return nil, iofs.wrapError(op, pattern, err) - } - - items, err := Glob(iofs.Fs, pattern) - if err != nil { - return nil, iofs.wrapError(op, pattern, err) - } - - return items, nil -} - -func (iofs IOFS) ReadDir(name string) ([]fs.DirEntry, error) { - f, err := iofs.Fs.Open(name) - if err != nil { - return nil, iofs.wrapError("readdir", name, err) - } - - defer f.Close() - - if rdf, ok := f.(fs.ReadDirFile); ok { - items, err := rdf.ReadDir(-1) - if err != nil { - return nil, iofs.wrapError("readdir", name, err) - } - sort.Slice(items, func(i, j int) bool { return items[i].Name() < items[j].Name() }) - return items, nil - } - - items, err := f.Readdir(-1) - if err != nil { - return nil, iofs.wrapError("readdir", name, err) - } - sort.Sort(byName(items)) - - ret := make([]fs.DirEntry, len(items)) - for i := range items { - ret[i] = common.FileInfoDirEntry{FileInfo: items[i]} - } - - return ret, nil -} - -func (iofs IOFS) ReadFile(name string) ([]byte, error) { - const op = "readfile" - - if !fs.ValidPath(name) { - return nil, iofs.wrapError(op, name, fs.ErrInvalid) - } - - bytes, err := ReadFile(iofs.Fs, name) - if err != nil { - return nil, iofs.wrapError(op, name, err) - } - - return bytes, nil -} - -func (iofs IOFS) Sub(dir string) (fs.FS, error) { return IOFS{NewBasePathFs(iofs.Fs, dir)}, nil } - -func (IOFS) wrapError(op, path string, err error) error { - if _, ok := err.(*fs.PathError); ok { - return err // don't need to wrap again - } - - return &fs.PathError{ - Op: op, - Path: path, - Err: err, - } -} - -// readDirFile provides adapter from afero.File to fs.ReadDirFile needed for correct Open -type readDirFile struct { - File -} - -var _ fs.ReadDirFile = readDirFile{} - -func (r readDirFile) ReadDir(n int) ([]fs.DirEntry, error) { - items, err := r.File.Readdir(n) - if err != nil { - return nil, err - } - - ret := make([]fs.DirEntry, len(items)) - for i := range items { - ret[i] = common.FileInfoDirEntry{FileInfo: items[i]} - } - - return ret, nil -} - -// FromIOFS adopts io/fs.FS to use it as afero.Fs -// Note that io/fs.FS is read-only so all mutating methods will return fs.PathError with fs.ErrPermission -// To store modifications you may use afero.CopyOnWriteFs -type FromIOFS struct { - fs.FS -} - -var _ Fs = FromIOFS{} - -func (f FromIOFS) Create(name string) (File, error) { return nil, notImplemented("create", name) } - -func (f FromIOFS) Mkdir(name string, perm os.FileMode) error { return notImplemented("mkdir", name) } - -func (f FromIOFS) MkdirAll(path string, perm os.FileMode) error { - return notImplemented("mkdirall", path) -} - -func (f FromIOFS) Open(name string) (File, error) { - file, err := f.FS.Open(name) - if err != nil { - return nil, err - } - - return fromIOFSFile{File: file, name: name}, nil -} - -func (f FromIOFS) OpenFile(name string, flag int, perm os.FileMode) (File, error) { - return f.Open(name) -} - -func (f FromIOFS) Remove(name string) error { - return notImplemented("remove", name) -} - -func (f FromIOFS) RemoveAll(path string) error { - return notImplemented("removeall", path) -} - -func (f FromIOFS) Rename(oldname, newname string) error { - return notImplemented("rename", oldname) -} - -func (f FromIOFS) Stat(name string) (os.FileInfo, error) { return fs.Stat(f.FS, name) } - -func (f FromIOFS) Name() string { return "fromiofs" } - -func (f FromIOFS) Chmod(name string, mode os.FileMode) error { - return notImplemented("chmod", name) -} - -func (f FromIOFS) Chown(name string, uid, gid int) error { - return notImplemented("chown", name) -} - -func (f FromIOFS) Chtimes(name string, atime time.Time, mtime time.Time) error { - return notImplemented("chtimes", name) -} - -type fromIOFSFile struct { - fs.File - name string -} - -func (f fromIOFSFile) ReadAt(p []byte, off int64) (n int, err error) { - readerAt, ok := f.File.(io.ReaderAt) - if !ok { - return -1, notImplemented("readat", f.name) - } - - return readerAt.ReadAt(p, off) -} - -func (f fromIOFSFile) Seek(offset int64, whence int) (int64, error) { - seeker, ok := f.File.(io.Seeker) - if !ok { - return -1, notImplemented("seek", f.name) - } - - return seeker.Seek(offset, whence) -} - -func (f fromIOFSFile) Write(p []byte) (n int, err error) { - return -1, notImplemented("write", f.name) -} - -func (f fromIOFSFile) WriteAt(p []byte, off int64) (n int, err error) { - return -1, notImplemented("writeat", f.name) -} - -func (f fromIOFSFile) Name() string { return f.name } - -func (f fromIOFSFile) Readdir(count int) ([]os.FileInfo, error) { - rdfile, ok := f.File.(fs.ReadDirFile) - if !ok { - return nil, notImplemented("readdir", f.name) - } - - entries, err := rdfile.ReadDir(count) - if err != nil { - return nil, err - } - - ret := make([]os.FileInfo, len(entries)) - for i := range entries { - ret[i], err = entries[i].Info() - - if err != nil { - return nil, err - } - } - - return ret, nil -} - -func (f fromIOFSFile) Readdirnames(n int) ([]string, error) { - rdfile, ok := f.File.(fs.ReadDirFile) - if !ok { - return nil, notImplemented("readdir", f.name) - } - - entries, err := rdfile.ReadDir(n) - if err != nil { - return nil, err - } - - ret := make([]string, len(entries)) - for i := range entries { - ret[i] = entries[i].Name() - } - - return ret, nil -} - -func (f fromIOFSFile) Sync() error { return nil } - -func (f fromIOFSFile) Truncate(size int64) error { - return notImplemented("truncate", f.name) -} - -func (f fromIOFSFile) WriteString(s string) (ret int, err error) { - return -1, notImplemented("writestring", f.name) -} - -func notImplemented(op, path string) error { - return &fs.PathError{Op: op, Path: path, Err: fs.ErrPermission} -} diff --git a/vendor/github.com/spf13/afero/ioutil.go b/vendor/github.com/spf13/afero/ioutil.go deleted file mode 100644 index fa6abe1ee..000000000 --- a/vendor/github.com/spf13/afero/ioutil.go +++ /dev/null @@ -1,243 +0,0 @@ -// Copyright ©2015 The Go Authors -// Copyright ©2015 Steve Francia -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package afero - -import ( - "bytes" - "io" - "os" - "path/filepath" - "sort" - "strconv" - "strings" - "sync" - "time" -) - -// byName implements sort.Interface. -type byName []os.FileInfo - -func (f byName) Len() int { return len(f) } -func (f byName) Less(i, j int) bool { return f[i].Name() < f[j].Name() } -func (f byName) Swap(i, j int) { f[i], f[j] = f[j], f[i] } - -// ReadDir reads the directory named by dirname and returns -// a list of sorted directory entries. -func (a Afero) ReadDir(dirname string) ([]os.FileInfo, error) { - return ReadDir(a.Fs, dirname) -} - -func ReadDir(fs Fs, dirname string) ([]os.FileInfo, error) { - f, err := fs.Open(dirname) - if err != nil { - return nil, err - } - list, err := f.Readdir(-1) - f.Close() - if err != nil { - return nil, err - } - sort.Sort(byName(list)) - return list, nil -} - -// ReadFile reads the file named by filename and returns the contents. -// A successful call returns err == nil, not err == EOF. Because ReadFile -// reads the whole file, it does not treat an EOF from Read as an error -// to be reported. -func (a Afero) ReadFile(filename string) ([]byte, error) { - return ReadFile(a.Fs, filename) -} - -func ReadFile(fs Fs, filename string) ([]byte, error) { - f, err := fs.Open(filename) - if err != nil { - return nil, err - } - defer f.Close() - // It's a good but not certain bet that FileInfo will tell us exactly how much to - // read, so let's try it but be prepared for the answer to be wrong. - var n int64 - - if fi, err := f.Stat(); err == nil { - // Don't preallocate a huge buffer, just in case. - if size := fi.Size(); size < 1e9 { - n = size - } - } - // As initial capacity for readAll, use n + a little extra in case Size is zero, - // and to avoid another allocation after Read has filled the buffer. The readAll - // call will read into its allocated internal buffer cheaply. If the size was - // wrong, we'll either waste some space off the end or reallocate as needed, but - // in the overwhelmingly common case we'll get it just right. - return readAll(f, n+bytes.MinRead) -} - -// readAll reads from r until an error or EOF and returns the data it read -// from the internal buffer allocated with a specified capacity. -func readAll(r io.Reader, capacity int64) (b []byte, err error) { - buf := bytes.NewBuffer(make([]byte, 0, capacity)) - // If the buffer overflows, we will get bytes.ErrTooLarge. - // Return that as an error. Any other panic remains. - defer func() { - e := recover() - if e == nil { - return - } - if panicErr, ok := e.(error); ok && panicErr == bytes.ErrTooLarge { - err = panicErr - } else { - panic(e) - } - }() - _, err = buf.ReadFrom(r) - return buf.Bytes(), err -} - -// ReadAll reads from r until an error or EOF and returns the data it read. -// A successful call returns err == nil, not err == EOF. Because ReadAll is -// defined to read from src until EOF, it does not treat an EOF from Read -// as an error to be reported. -func ReadAll(r io.Reader) ([]byte, error) { - return readAll(r, bytes.MinRead) -} - -// WriteFile writes data to a file named by filename. -// If the file does not exist, WriteFile creates it with permissions perm; -// otherwise WriteFile truncates it before writing. -func (a Afero) WriteFile(filename string, data []byte, perm os.FileMode) error { - return WriteFile(a.Fs, filename, data, perm) -} - -func WriteFile(fs Fs, filename string, data []byte, perm os.FileMode) error { - f, err := fs.OpenFile(filename, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, perm) - if err != nil { - return err - } - n, err := f.Write(data) - if err == nil && n < len(data) { - err = io.ErrShortWrite - } - if err1 := f.Close(); err == nil { - err = err1 - } - return err -} - -// Random number state. -// We generate random temporary file names so that there's a good -// chance the file doesn't exist yet - keeps the number of tries in -// TempFile to a minimum. -var ( - randNum uint32 - randmu sync.Mutex -) - -func reseed() uint32 { - return uint32(time.Now().UnixNano() + int64(os.Getpid())) -} - -func nextRandom() string { - randmu.Lock() - r := randNum - if r == 0 { - r = reseed() - } - r = r*1664525 + 1013904223 // constants from Numerical Recipes - randNum = r - randmu.Unlock() - return strconv.Itoa(int(1e9 + r%1e9))[1:] -} - -// TempFile creates a new temporary file in the directory dir, -// opens the file for reading and writing, and returns the resulting *os.File. -// The filename is generated by taking pattern and adding a random -// string to the end. If pattern includes a "*", the random string -// replaces the last "*". -// If dir is the empty string, TempFile uses the default directory -// for temporary files (see os.TempDir). -// Multiple programs calling TempFile simultaneously -// will not choose the same file. The caller can use f.Name() -// to find the pathname of the file. It is the caller's responsibility -// to remove the file when no longer needed. -func (a Afero) TempFile(dir, pattern string) (f File, err error) { - return TempFile(a.Fs, dir, pattern) -} - -func TempFile(fs Fs, dir, pattern string) (f File, err error) { - if dir == "" { - dir = os.TempDir() - } - - var prefix, suffix string - if pos := strings.LastIndex(pattern, "*"); pos != -1 { - prefix, suffix = pattern[:pos], pattern[pos+1:] - } else { - prefix = pattern - } - - nconflict := 0 - for i := 0; i < 10000; i++ { - name := filepath.Join(dir, prefix+nextRandom()+suffix) - f, err = fs.OpenFile(name, os.O_RDWR|os.O_CREATE|os.O_EXCL, 0o600) - if os.IsExist(err) { - if nconflict++; nconflict > 10 { - randmu.Lock() - randNum = reseed() - randmu.Unlock() - } - continue - } - break - } - return -} - -// TempDir creates a new temporary directory in the directory dir -// with a name beginning with prefix and returns the path of the -// new directory. If dir is the empty string, TempDir uses the -// default directory for temporary files (see os.TempDir). -// Multiple programs calling TempDir simultaneously -// will not choose the same directory. It is the caller's responsibility -// to remove the directory when no longer needed. -func (a Afero) TempDir(dir, prefix string) (name string, err error) { - return TempDir(a.Fs, dir, prefix) -} - -func TempDir(fs Fs, dir, prefix string) (name string, err error) { - if dir == "" { - dir = os.TempDir() - } - - nconflict := 0 - for i := 0; i < 10000; i++ { - try := filepath.Join(dir, prefix+nextRandom()) - err = fs.Mkdir(try, 0o700) - if os.IsExist(err) { - if nconflict++; nconflict > 10 { - randmu.Lock() - randNum = reseed() - randmu.Unlock() - } - continue - } - if err == nil { - name = try - } - break - } - return -} diff --git a/vendor/github.com/spf13/afero/lstater.go b/vendor/github.com/spf13/afero/lstater.go deleted file mode 100644 index 89c1bfc0a..000000000 --- a/vendor/github.com/spf13/afero/lstater.go +++ /dev/null @@ -1,27 +0,0 @@ -// Copyright © 2018 Steve Francia . -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package afero - -import ( - "os" -) - -// Lstater is an optional interface in Afero. It is only implemented by the -// filesystems saying so. -// It will call Lstat if the filesystem iself is, or it delegates to, the os filesystem. -// Else it will call Stat. -// In addtion to the FileInfo, it will return a boolean telling whether Lstat was called or not. -type Lstater interface { - LstatIfPossible(name string) (os.FileInfo, bool, error) -} diff --git a/vendor/github.com/spf13/afero/match.go b/vendor/github.com/spf13/afero/match.go deleted file mode 100644 index 7db4b7de6..000000000 --- a/vendor/github.com/spf13/afero/match.go +++ /dev/null @@ -1,110 +0,0 @@ -// Copyright © 2014 Steve Francia . -// Copyright 2009 The Go Authors. All rights reserved. - -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package afero - -import ( - "path/filepath" - "sort" - "strings" -) - -// Glob returns the names of all files matching pattern or nil -// if there is no matching file. The syntax of patterns is the same -// as in Match. The pattern may describe hierarchical names such as -// /usr/*/bin/ed (assuming the Separator is '/'). -// -// Glob ignores file system errors such as I/O errors reading directories. -// The only possible returned error is ErrBadPattern, when pattern -// is malformed. -// -// This was adapted from (http://golang.org/pkg/path/filepath) and uses several -// built-ins from that package. -func Glob(fs Fs, pattern string) (matches []string, err error) { - if !hasMeta(pattern) { - // Lstat not supported by a ll filesystems. - if _, err = lstatIfPossible(fs, pattern); err != nil { - return nil, nil - } - return []string{pattern}, nil - } - - dir, file := filepath.Split(pattern) - switch dir { - case "": - dir = "." - case string(filepath.Separator): - // nothing - default: - dir = dir[0 : len(dir)-1] // chop off trailing separator - } - - if !hasMeta(dir) { - return glob(fs, dir, file, nil) - } - - var m []string - m, err = Glob(fs, dir) - if err != nil { - return - } - for _, d := range m { - matches, err = glob(fs, d, file, matches) - if err != nil { - return - } - } - return -} - -// glob searches for files matching pattern in the directory dir -// and appends them to matches. If the directory cannot be -// opened, it returns the existing matches. New matches are -// added in lexicographical order. -func glob(fs Fs, dir, pattern string, matches []string) (m []string, e error) { - m = matches - fi, err := fs.Stat(dir) - if err != nil { - return - } - if !fi.IsDir() { - return - } - d, err := fs.Open(dir) - if err != nil { - return - } - defer d.Close() - - names, _ := d.Readdirnames(-1) - sort.Strings(names) - - for _, n := range names { - matched, err := filepath.Match(pattern, n) - if err != nil { - return m, err - } - if matched { - m = append(m, filepath.Join(dir, n)) - } - } - return -} - -// hasMeta reports whether path contains any of the magic characters -// recognized by Match. -func hasMeta(path string) bool { - // TODO(niemeyer): Should other magic characters be added here? - return strings.ContainsAny(path, "*?[") -} diff --git a/vendor/github.com/spf13/afero/mem/dir.go b/vendor/github.com/spf13/afero/mem/dir.go deleted file mode 100644 index e104013f4..000000000 --- a/vendor/github.com/spf13/afero/mem/dir.go +++ /dev/null @@ -1,37 +0,0 @@ -// Copyright © 2014 Steve Francia . -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package mem - -type Dir interface { - Len() int - Names() []string - Files() []*FileData - Add(*FileData) - Remove(*FileData) -} - -func RemoveFromMemDir(dir *FileData, f *FileData) { - dir.memDir.Remove(f) -} - -func AddToMemDir(dir *FileData, f *FileData) { - dir.memDir.Add(f) -} - -func InitializeDir(d *FileData) { - if d.memDir == nil { - d.dir = true - d.memDir = &DirMap{} - } -} diff --git a/vendor/github.com/spf13/afero/mem/dirmap.go b/vendor/github.com/spf13/afero/mem/dirmap.go deleted file mode 100644 index 03a57ee5b..000000000 --- a/vendor/github.com/spf13/afero/mem/dirmap.go +++ /dev/null @@ -1,43 +0,0 @@ -// Copyright © 2015 Steve Francia . -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package mem - -import "sort" - -type DirMap map[string]*FileData - -func (m DirMap) Len() int { return len(m) } -func (m DirMap) Add(f *FileData) { m[f.name] = f } -func (m DirMap) Remove(f *FileData) { delete(m, f.name) } -func (m DirMap) Files() (files []*FileData) { - for _, f := range m { - files = append(files, f) - } - sort.Sort(filesSorter(files)) - return files -} - -// implement sort.Interface for []*FileData -type filesSorter []*FileData - -func (s filesSorter) Len() int { return len(s) } -func (s filesSorter) Swap(i, j int) { s[i], s[j] = s[j], s[i] } -func (s filesSorter) Less(i, j int) bool { return s[i].name < s[j].name } - -func (m DirMap) Names() (names []string) { - for x := range m { - names = append(names, x) - } - return names -} diff --git a/vendor/github.com/spf13/afero/mem/file.go b/vendor/github.com/spf13/afero/mem/file.go deleted file mode 100644 index 62fe4498e..000000000 --- a/vendor/github.com/spf13/afero/mem/file.go +++ /dev/null @@ -1,359 +0,0 @@ -// Copyright © 2015 Steve Francia . -// Copyright 2013 tsuru authors. All rights reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package mem - -import ( - "bytes" - "errors" - "io" - "io/fs" - "os" - "path/filepath" - "sync" - "sync/atomic" - "time" - - "github.com/spf13/afero/internal/common" -) - -const FilePathSeparator = string(filepath.Separator) - -var _ fs.ReadDirFile = &File{} - -type File struct { - // atomic requires 64-bit alignment for struct field access - at int64 - readDirCount int64 - closed bool - readOnly bool - fileData *FileData -} - -func NewFileHandle(data *FileData) *File { - return &File{fileData: data} -} - -func NewReadOnlyFileHandle(data *FileData) *File { - return &File{fileData: data, readOnly: true} -} - -func (f File) Data() *FileData { - return f.fileData -} - -type FileData struct { - sync.Mutex - name string - data []byte - memDir Dir - dir bool - mode os.FileMode - modtime time.Time - uid int - gid int -} - -func (d *FileData) Name() string { - d.Lock() - defer d.Unlock() - return d.name -} - -func CreateFile(name string) *FileData { - return &FileData{name: name, mode: os.ModeTemporary, modtime: time.Now()} -} - -func CreateDir(name string) *FileData { - return &FileData{name: name, memDir: &DirMap{}, dir: true, modtime: time.Now()} -} - -func ChangeFileName(f *FileData, newname string) { - f.Lock() - f.name = newname - f.Unlock() -} - -func SetMode(f *FileData, mode os.FileMode) { - f.Lock() - f.mode = mode - f.Unlock() -} - -func SetModTime(f *FileData, mtime time.Time) { - f.Lock() - setModTime(f, mtime) - f.Unlock() -} - -func setModTime(f *FileData, mtime time.Time) { - f.modtime = mtime -} - -func SetUID(f *FileData, uid int) { - f.Lock() - f.uid = uid - f.Unlock() -} - -func SetGID(f *FileData, gid int) { - f.Lock() - f.gid = gid - f.Unlock() -} - -func GetFileInfo(f *FileData) *FileInfo { - return &FileInfo{f} -} - -func (f *File) Open() error { - atomic.StoreInt64(&f.at, 0) - atomic.StoreInt64(&f.readDirCount, 0) - f.fileData.Lock() - f.closed = false - f.fileData.Unlock() - return nil -} - -func (f *File) Close() error { - f.fileData.Lock() - f.closed = true - if !f.readOnly { - setModTime(f.fileData, time.Now()) - } - f.fileData.Unlock() - return nil -} - -func (f *File) Name() string { - return f.fileData.Name() -} - -func (f *File) Stat() (os.FileInfo, error) { - return &FileInfo{f.fileData}, nil -} - -func (f *File) Sync() error { - return nil -} - -func (f *File) Readdir(count int) (res []os.FileInfo, err error) { - if !f.fileData.dir { - return nil, &os.PathError{Op: "readdir", Path: f.fileData.name, Err: errors.New("not a dir")} - } - var outLength int64 - - f.fileData.Lock() - files := f.fileData.memDir.Files()[f.readDirCount:] - if count > 0 { - if len(files) < count { - outLength = int64(len(files)) - } else { - outLength = int64(count) - } - if len(files) == 0 { - err = io.EOF - } - } else { - outLength = int64(len(files)) - } - f.readDirCount += outLength - f.fileData.Unlock() - - res = make([]os.FileInfo, outLength) - for i := range res { - res[i] = &FileInfo{files[i]} - } - - return res, err -} - -func (f *File) Readdirnames(n int) (names []string, err error) { - fi, err := f.Readdir(n) - names = make([]string, len(fi)) - for i, f := range fi { - _, names[i] = filepath.Split(f.Name()) - } - return names, err -} - -// Implements fs.ReadDirFile -func (f *File) ReadDir(n int) ([]fs.DirEntry, error) { - fi, err := f.Readdir(n) - if err != nil { - return nil, err - } - di := make([]fs.DirEntry, len(fi)) - for i, f := range fi { - di[i] = common.FileInfoDirEntry{FileInfo: f} - } - return di, nil -} - -func (f *File) Read(b []byte) (n int, err error) { - f.fileData.Lock() - defer f.fileData.Unlock() - if f.closed { - return 0, ErrFileClosed - } - if len(b) > 0 && int(f.at) == len(f.fileData.data) { - return 0, io.EOF - } - if int(f.at) > len(f.fileData.data) { - return 0, io.ErrUnexpectedEOF - } - if len(f.fileData.data)-int(f.at) >= len(b) { - n = len(b) - } else { - n = len(f.fileData.data) - int(f.at) - } - copy(b, f.fileData.data[f.at:f.at+int64(n)]) - atomic.AddInt64(&f.at, int64(n)) - return -} - -func (f *File) ReadAt(b []byte, off int64) (n int, err error) { - prev := atomic.LoadInt64(&f.at) - atomic.StoreInt64(&f.at, off) - n, err = f.Read(b) - atomic.StoreInt64(&f.at, prev) - return -} - -func (f *File) Truncate(size int64) error { - if f.closed { - return ErrFileClosed - } - if f.readOnly { - return &os.PathError{Op: "truncate", Path: f.fileData.name, Err: errors.New("file handle is read only")} - } - if size < 0 { - return ErrOutOfRange - } - f.fileData.Lock() - defer f.fileData.Unlock() - if size > int64(len(f.fileData.data)) { - diff := size - int64(len(f.fileData.data)) - f.fileData.data = append(f.fileData.data, bytes.Repeat([]byte{0o0}, int(diff))...) - } else { - f.fileData.data = f.fileData.data[0:size] - } - setModTime(f.fileData, time.Now()) - return nil -} - -func (f *File) Seek(offset int64, whence int) (int64, error) { - if f.closed { - return 0, ErrFileClosed - } - switch whence { - case io.SeekStart: - atomic.StoreInt64(&f.at, offset) - case io.SeekCurrent: - atomic.AddInt64(&f.at, offset) - case io.SeekEnd: - atomic.StoreInt64(&f.at, int64(len(f.fileData.data))+offset) - } - return f.at, nil -} - -func (f *File) Write(b []byte) (n int, err error) { - if f.closed { - return 0, ErrFileClosed - } - if f.readOnly { - return 0, &os.PathError{Op: "write", Path: f.fileData.name, Err: errors.New("file handle is read only")} - } - n = len(b) - cur := atomic.LoadInt64(&f.at) - f.fileData.Lock() - defer f.fileData.Unlock() - diff := cur - int64(len(f.fileData.data)) - var tail []byte - if n+int(cur) < len(f.fileData.data) { - tail = f.fileData.data[n+int(cur):] - } - if diff > 0 { - f.fileData.data = append(f.fileData.data, append(bytes.Repeat([]byte{0o0}, int(diff)), b...)...) - f.fileData.data = append(f.fileData.data, tail...) - } else { - f.fileData.data = append(f.fileData.data[:cur], b...) - f.fileData.data = append(f.fileData.data, tail...) - } - setModTime(f.fileData, time.Now()) - - atomic.AddInt64(&f.at, int64(n)) - return -} - -func (f *File) WriteAt(b []byte, off int64) (n int, err error) { - atomic.StoreInt64(&f.at, off) - return f.Write(b) -} - -func (f *File) WriteString(s string) (ret int, err error) { - return f.Write([]byte(s)) -} - -func (f *File) Info() *FileInfo { - return &FileInfo{f.fileData} -} - -type FileInfo struct { - *FileData -} - -// Implements os.FileInfo -func (s *FileInfo) Name() string { - s.Lock() - _, name := filepath.Split(s.name) - s.Unlock() - return name -} - -func (s *FileInfo) Mode() os.FileMode { - s.Lock() - defer s.Unlock() - return s.mode -} - -func (s *FileInfo) ModTime() time.Time { - s.Lock() - defer s.Unlock() - return s.modtime -} - -func (s *FileInfo) IsDir() bool { - s.Lock() - defer s.Unlock() - return s.dir -} -func (s *FileInfo) Sys() interface{} { return nil } -func (s *FileInfo) Size() int64 { - if s.IsDir() { - return int64(42) - } - s.Lock() - defer s.Unlock() - return int64(len(s.data)) -} - -var ( - ErrFileClosed = errors.New("File is closed") - ErrOutOfRange = errors.New("out of range") - ErrTooLarge = errors.New("too large") - ErrFileNotFound = os.ErrNotExist - ErrFileExists = os.ErrExist - ErrDestinationExists = os.ErrExist -) diff --git a/vendor/github.com/spf13/afero/memmap.go b/vendor/github.com/spf13/afero/memmap.go deleted file mode 100644 index d6c744e8d..000000000 --- a/vendor/github.com/spf13/afero/memmap.go +++ /dev/null @@ -1,465 +0,0 @@ -// Copyright © 2014 Steve Francia . -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package afero - -import ( - "fmt" - "io" - - "log" - "os" - "path/filepath" - - "sort" - "strings" - "sync" - "time" - - "github.com/spf13/afero/mem" -) - -const chmodBits = os.ModePerm | os.ModeSetuid | os.ModeSetgid | os.ModeSticky // Only a subset of bits are allowed to be changed. Documented under os.Chmod() - -type MemMapFs struct { - mu sync.RWMutex - data map[string]*mem.FileData - init sync.Once -} - -func NewMemMapFs() Fs { - return &MemMapFs{} -} - -func (m *MemMapFs) getData() map[string]*mem.FileData { - m.init.Do(func() { - m.data = make(map[string]*mem.FileData) - // Root should always exist, right? - // TODO: what about windows? - root := mem.CreateDir(FilePathSeparator) - mem.SetMode(root, os.ModeDir|0o755) - m.data[FilePathSeparator] = root - }) - return m.data -} - -func (*MemMapFs) Name() string { return "MemMapFS" } - -func (m *MemMapFs) Create(name string) (File, error) { - name = normalizePath(name) - m.mu.Lock() - file := mem.CreateFile(name) - m.getData()[name] = file - m.registerWithParent(file, 0) - m.mu.Unlock() - return mem.NewFileHandle(file), nil -} - -func (m *MemMapFs) unRegisterWithParent(fileName string) error { - f, err := m.lockfreeOpen(fileName) - if err != nil { - return err - } - parent := m.findParent(f) - if parent == nil { - log.Panic("parent of ", f.Name(), " is nil") - } - - parent.Lock() - mem.RemoveFromMemDir(parent, f) - parent.Unlock() - return nil -} - -func (m *MemMapFs) findParent(f *mem.FileData) *mem.FileData { - pdir, _ := filepath.Split(f.Name()) - pdir = filepath.Clean(pdir) - pfile, err := m.lockfreeOpen(pdir) - if err != nil { - return nil - } - return pfile -} - -func (m *MemMapFs) findDescendants(name string) []*mem.FileData { - fData := m.getData() - descendants := make([]*mem.FileData, 0, len(fData)) - for p, dFile := range fData { - if strings.HasPrefix(p, name+FilePathSeparator) { - descendants = append(descendants, dFile) - } - } - - sort.Slice(descendants, func(i, j int) bool { - cur := len(strings.Split(descendants[i].Name(), FilePathSeparator)) - next := len(strings.Split(descendants[j].Name(), FilePathSeparator)) - return cur < next - }) - - return descendants -} - -func (m *MemMapFs) registerWithParent(f *mem.FileData, perm os.FileMode) { - if f == nil { - return - } - parent := m.findParent(f) - if parent == nil { - pdir := filepath.Dir(filepath.Clean(f.Name())) - err := m.lockfreeMkdir(pdir, perm) - if err != nil { - // log.Println("Mkdir error:", err) - return - } - parent, err = m.lockfreeOpen(pdir) - if err != nil { - // log.Println("Open after Mkdir error:", err) - return - } - } - - parent.Lock() - mem.InitializeDir(parent) - mem.AddToMemDir(parent, f) - parent.Unlock() -} - -func (m *MemMapFs) lockfreeMkdir(name string, perm os.FileMode) error { - name = normalizePath(name) - x, ok := m.getData()[name] - if ok { - // Only return ErrFileExists if it's a file, not a directory. - i := mem.FileInfo{FileData: x} - if !i.IsDir() { - return ErrFileExists - } - } else { - item := mem.CreateDir(name) - mem.SetMode(item, os.ModeDir|perm) - m.getData()[name] = item - m.registerWithParent(item, perm) - } - return nil -} - -func (m *MemMapFs) Mkdir(name string, perm os.FileMode) error { - perm &= chmodBits - name = normalizePath(name) - - m.mu.RLock() - _, ok := m.getData()[name] - m.mu.RUnlock() - if ok { - return &os.PathError{Op: "mkdir", Path: name, Err: ErrFileExists} - } - - m.mu.Lock() - // Dobule check that it doesn't exist. - if _, ok := m.getData()[name]; ok { - m.mu.Unlock() - return &os.PathError{Op: "mkdir", Path: name, Err: ErrFileExists} - } - item := mem.CreateDir(name) - mem.SetMode(item, os.ModeDir|perm) - m.getData()[name] = item - m.registerWithParent(item, perm) - m.mu.Unlock() - - return m.setFileMode(name, perm|os.ModeDir) -} - -func (m *MemMapFs) MkdirAll(path string, perm os.FileMode) error { - err := m.Mkdir(path, perm) - if err != nil { - if err.(*os.PathError).Err == ErrFileExists { - return nil - } - return err - } - return nil -} - -// Handle some relative paths -func normalizePath(path string) string { - path = filepath.Clean(path) - - switch path { - case ".": - return FilePathSeparator - case "..": - return FilePathSeparator - default: - return path - } -} - -func (m *MemMapFs) Open(name string) (File, error) { - f, err := m.open(name) - if f != nil { - return mem.NewReadOnlyFileHandle(f), err - } - return nil, err -} - -func (m *MemMapFs) openWrite(name string) (File, error) { - f, err := m.open(name) - if f != nil { - return mem.NewFileHandle(f), err - } - return nil, err -} - -func (m *MemMapFs) open(name string) (*mem.FileData, error) { - name = normalizePath(name) - - m.mu.RLock() - f, ok := m.getData()[name] - m.mu.RUnlock() - if !ok { - return nil, &os.PathError{Op: "open", Path: name, Err: ErrFileNotFound} - } - return f, nil -} - -func (m *MemMapFs) lockfreeOpen(name string) (*mem.FileData, error) { - name = normalizePath(name) - f, ok := m.getData()[name] - if ok { - return f, nil - } else { - return nil, ErrFileNotFound - } -} - -func (m *MemMapFs) OpenFile(name string, flag int, perm os.FileMode) (File, error) { - perm &= chmodBits - chmod := false - file, err := m.openWrite(name) - if err == nil && (flag&os.O_EXCL > 0) { - return nil, &os.PathError{Op: "open", Path: name, Err: ErrFileExists} - } - if os.IsNotExist(err) && (flag&os.O_CREATE > 0) { - file, err = m.Create(name) - chmod = true - } - if err != nil { - return nil, err - } - if flag == os.O_RDONLY { - file = mem.NewReadOnlyFileHandle(file.(*mem.File).Data()) - } - if flag&os.O_APPEND > 0 { - _, err = file.Seek(0, io.SeekEnd) - if err != nil { - file.Close() - return nil, err - } - } - if flag&os.O_TRUNC > 0 && flag&(os.O_RDWR|os.O_WRONLY) > 0 { - err = file.Truncate(0) - if err != nil { - file.Close() - return nil, err - } - } - if chmod { - return file, m.setFileMode(name, perm) - } - return file, nil -} - -func (m *MemMapFs) Remove(name string) error { - name = normalizePath(name) - - m.mu.Lock() - defer m.mu.Unlock() - - if _, ok := m.getData()[name]; ok { - err := m.unRegisterWithParent(name) - if err != nil { - return &os.PathError{Op: "remove", Path: name, Err: err} - } - delete(m.getData(), name) - } else { - return &os.PathError{Op: "remove", Path: name, Err: os.ErrNotExist} - } - return nil -} - -func (m *MemMapFs) RemoveAll(path string) error { - path = normalizePath(path) - m.mu.Lock() - m.unRegisterWithParent(path) - m.mu.Unlock() - - m.mu.RLock() - defer m.mu.RUnlock() - - for p := range m.getData() { - if p == path || strings.HasPrefix(p, path+FilePathSeparator) { - m.mu.RUnlock() - m.mu.Lock() - delete(m.getData(), p) - m.mu.Unlock() - m.mu.RLock() - } - } - return nil -} - -func (m *MemMapFs) Rename(oldname, newname string) error { - oldname = normalizePath(oldname) - newname = normalizePath(newname) - - if oldname == newname { - return nil - } - - m.mu.RLock() - defer m.mu.RUnlock() - if _, ok := m.getData()[oldname]; ok { - m.mu.RUnlock() - m.mu.Lock() - err := m.unRegisterWithParent(oldname) - if err != nil { - return err - } - - fileData := m.getData()[oldname] - mem.ChangeFileName(fileData, newname) - m.getData()[newname] = fileData - - err = m.renameDescendants(oldname, newname) - if err != nil { - return err - } - - delete(m.getData(), oldname) - - m.registerWithParent(fileData, 0) - m.mu.Unlock() - m.mu.RLock() - } else { - return &os.PathError{Op: "rename", Path: oldname, Err: ErrFileNotFound} - } - return nil -} - -func (m *MemMapFs) renameDescendants(oldname, newname string) error { - descendants := m.findDescendants(oldname) - removes := make([]string, 0, len(descendants)) - for _, desc := range descendants { - descNewName := strings.Replace(desc.Name(), oldname, newname, 1) - err := m.unRegisterWithParent(desc.Name()) - if err != nil { - return err - } - - removes = append(removes, desc.Name()) - mem.ChangeFileName(desc, descNewName) - m.getData()[descNewName] = desc - - m.registerWithParent(desc, 0) - } - for _, r := range removes { - delete(m.getData(), r) - } - - return nil -} - -func (m *MemMapFs) LstatIfPossible(name string) (os.FileInfo, bool, error) { - fileInfo, err := m.Stat(name) - return fileInfo, false, err -} - -func (m *MemMapFs) Stat(name string) (os.FileInfo, error) { - f, err := m.Open(name) - if err != nil { - return nil, err - } - fi := mem.GetFileInfo(f.(*mem.File).Data()) - return fi, nil -} - -func (m *MemMapFs) Chmod(name string, mode os.FileMode) error { - mode &= chmodBits - - m.mu.RLock() - f, ok := m.getData()[name] - m.mu.RUnlock() - if !ok { - return &os.PathError{Op: "chmod", Path: name, Err: ErrFileNotFound} - } - prevOtherBits := mem.GetFileInfo(f).Mode() & ^chmodBits - - mode = prevOtherBits | mode - return m.setFileMode(name, mode) -} - -func (m *MemMapFs) setFileMode(name string, mode os.FileMode) error { - name = normalizePath(name) - - m.mu.RLock() - f, ok := m.getData()[name] - m.mu.RUnlock() - if !ok { - return &os.PathError{Op: "chmod", Path: name, Err: ErrFileNotFound} - } - - m.mu.Lock() - mem.SetMode(f, mode) - m.mu.Unlock() - - return nil -} - -func (m *MemMapFs) Chown(name string, uid, gid int) error { - name = normalizePath(name) - - m.mu.RLock() - f, ok := m.getData()[name] - m.mu.RUnlock() - if !ok { - return &os.PathError{Op: "chown", Path: name, Err: ErrFileNotFound} - } - - mem.SetUID(f, uid) - mem.SetGID(f, gid) - - return nil -} - -func (m *MemMapFs) Chtimes(name string, atime time.Time, mtime time.Time) error { - name = normalizePath(name) - - m.mu.RLock() - f, ok := m.getData()[name] - m.mu.RUnlock() - if !ok { - return &os.PathError{Op: "chtimes", Path: name, Err: ErrFileNotFound} - } - - m.mu.Lock() - mem.SetModTime(f, mtime) - m.mu.Unlock() - - return nil -} - -func (m *MemMapFs) List() { - for _, x := range m.data { - y := mem.FileInfo{FileData: x} - fmt.Println(x.Name(), y.Size()) - } -} diff --git a/vendor/github.com/spf13/afero/os.go b/vendor/github.com/spf13/afero/os.go deleted file mode 100644 index f1366321e..000000000 --- a/vendor/github.com/spf13/afero/os.go +++ /dev/null @@ -1,113 +0,0 @@ -// Copyright © 2014 Steve Francia . -// Copyright 2013 tsuru authors. All rights reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package afero - -import ( - "os" - "time" -) - -var _ Lstater = (*OsFs)(nil) - -// OsFs is a Fs implementation that uses functions provided by the os package. -// -// For details in any method, check the documentation of the os package -// (http://golang.org/pkg/os/). -type OsFs struct{} - -func NewOsFs() Fs { - return &OsFs{} -} - -func (OsFs) Name() string { return "OsFs" } - -func (OsFs) Create(name string) (File, error) { - f, e := os.Create(name) - if f == nil { - // while this looks strange, we need to return a bare nil (of type nil) not - // a nil value of type *os.File or nil won't be nil - return nil, e - } - return f, e -} - -func (OsFs) Mkdir(name string, perm os.FileMode) error { - return os.Mkdir(name, perm) -} - -func (OsFs) MkdirAll(path string, perm os.FileMode) error { - return os.MkdirAll(path, perm) -} - -func (OsFs) Open(name string) (File, error) { - f, e := os.Open(name) - if f == nil { - // while this looks strange, we need to return a bare nil (of type nil) not - // a nil value of type *os.File or nil won't be nil - return nil, e - } - return f, e -} - -func (OsFs) OpenFile(name string, flag int, perm os.FileMode) (File, error) { - f, e := os.OpenFile(name, flag, perm) - if f == nil { - // while this looks strange, we need to return a bare nil (of type nil) not - // a nil value of type *os.File or nil won't be nil - return nil, e - } - return f, e -} - -func (OsFs) Remove(name string) error { - return os.Remove(name) -} - -func (OsFs) RemoveAll(path string) error { - return os.RemoveAll(path) -} - -func (OsFs) Rename(oldname, newname string) error { - return os.Rename(oldname, newname) -} - -func (OsFs) Stat(name string) (os.FileInfo, error) { - return os.Stat(name) -} - -func (OsFs) Chmod(name string, mode os.FileMode) error { - return os.Chmod(name, mode) -} - -func (OsFs) Chown(name string, uid, gid int) error { - return os.Chown(name, uid, gid) -} - -func (OsFs) Chtimes(name string, atime time.Time, mtime time.Time) error { - return os.Chtimes(name, atime, mtime) -} - -func (OsFs) LstatIfPossible(name string) (os.FileInfo, bool, error) { - fi, err := os.Lstat(name) - return fi, true, err -} - -func (OsFs) SymlinkIfPossible(oldname, newname string) error { - return os.Symlink(oldname, newname) -} - -func (OsFs) ReadlinkIfPossible(name string) (string, error) { - return os.Readlink(name) -} diff --git a/vendor/github.com/spf13/afero/path.go b/vendor/github.com/spf13/afero/path.go deleted file mode 100644 index 18f60a0f6..000000000 --- a/vendor/github.com/spf13/afero/path.go +++ /dev/null @@ -1,106 +0,0 @@ -// Copyright ©2015 The Go Authors -// Copyright ©2015 Steve Francia -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package afero - -import ( - "os" - "path/filepath" - "sort" -) - -// readDirNames reads the directory named by dirname and returns -// a sorted list of directory entries. -// adapted from https://golang.org/src/path/filepath/path.go -func readDirNames(fs Fs, dirname string) ([]string, error) { - f, err := fs.Open(dirname) - if err != nil { - return nil, err - } - names, err := f.Readdirnames(-1) - f.Close() - if err != nil { - return nil, err - } - sort.Strings(names) - return names, nil -} - -// walk recursively descends path, calling walkFn -// adapted from https://golang.org/src/path/filepath/path.go -func walk(fs Fs, path string, info os.FileInfo, walkFn filepath.WalkFunc) error { - err := walkFn(path, info, nil) - if err != nil { - if info.IsDir() && err == filepath.SkipDir { - return nil - } - return err - } - - if !info.IsDir() { - return nil - } - - names, err := readDirNames(fs, path) - if err != nil { - return walkFn(path, info, err) - } - - for _, name := range names { - filename := filepath.Join(path, name) - fileInfo, err := lstatIfPossible(fs, filename) - if err != nil { - if err := walkFn(filename, fileInfo, err); err != nil && err != filepath.SkipDir { - return err - } - } else { - err = walk(fs, filename, fileInfo, walkFn) - if err != nil { - if !fileInfo.IsDir() || err != filepath.SkipDir { - return err - } - } - } - } - return nil -} - -// if the filesystem supports it, use Lstat, else use fs.Stat -func lstatIfPossible(fs Fs, path string) (os.FileInfo, error) { - if lfs, ok := fs.(Lstater); ok { - fi, _, err := lfs.LstatIfPossible(path) - return fi, err - } - return fs.Stat(path) -} - -// Walk walks the file tree rooted at root, calling walkFn for each file or -// directory in the tree, including root. All errors that arise visiting files -// and directories are filtered by walkFn. The files are walked in lexical -// order, which makes the output deterministic but means that for very -// large directories Walk can be inefficient. -// Walk does not follow symbolic links. - -func (a Afero) Walk(root string, walkFn filepath.WalkFunc) error { - return Walk(a.Fs, root, walkFn) -} - -func Walk(fs Fs, root string, walkFn filepath.WalkFunc) error { - info, err := lstatIfPossible(fs, root) - if err != nil { - return walkFn(root, nil, err) - } - return walk(fs, root, info, walkFn) -} diff --git a/vendor/github.com/spf13/afero/readonlyfs.go b/vendor/github.com/spf13/afero/readonlyfs.go deleted file mode 100644 index bd8f9264d..000000000 --- a/vendor/github.com/spf13/afero/readonlyfs.go +++ /dev/null @@ -1,96 +0,0 @@ -package afero - -import ( - "os" - "syscall" - "time" -) - -var _ Lstater = (*ReadOnlyFs)(nil) - -type ReadOnlyFs struct { - source Fs -} - -func NewReadOnlyFs(source Fs) Fs { - return &ReadOnlyFs{source: source} -} - -func (r *ReadOnlyFs) ReadDir(name string) ([]os.FileInfo, error) { - return ReadDir(r.source, name) -} - -func (r *ReadOnlyFs) Chtimes(n string, a, m time.Time) error { - return syscall.EPERM -} - -func (r *ReadOnlyFs) Chmod(n string, m os.FileMode) error { - return syscall.EPERM -} - -func (r *ReadOnlyFs) Chown(n string, uid, gid int) error { - return syscall.EPERM -} - -func (r *ReadOnlyFs) Name() string { - return "ReadOnlyFilter" -} - -func (r *ReadOnlyFs) Stat(name string) (os.FileInfo, error) { - return r.source.Stat(name) -} - -func (r *ReadOnlyFs) LstatIfPossible(name string) (os.FileInfo, bool, error) { - if lsf, ok := r.source.(Lstater); ok { - return lsf.LstatIfPossible(name) - } - fi, err := r.Stat(name) - return fi, false, err -} - -func (r *ReadOnlyFs) SymlinkIfPossible(oldname, newname string) error { - return &os.LinkError{Op: "symlink", Old: oldname, New: newname, Err: ErrNoSymlink} -} - -func (r *ReadOnlyFs) ReadlinkIfPossible(name string) (string, error) { - if srdr, ok := r.source.(LinkReader); ok { - return srdr.ReadlinkIfPossible(name) - } - - return "", &os.PathError{Op: "readlink", Path: name, Err: ErrNoReadlink} -} - -func (r *ReadOnlyFs) Rename(o, n string) error { - return syscall.EPERM -} - -func (r *ReadOnlyFs) RemoveAll(p string) error { - return syscall.EPERM -} - -func (r *ReadOnlyFs) Remove(n string) error { - return syscall.EPERM -} - -func (r *ReadOnlyFs) OpenFile(name string, flag int, perm os.FileMode) (File, error) { - if flag&(os.O_WRONLY|syscall.O_RDWR|os.O_APPEND|os.O_CREATE|os.O_TRUNC) != 0 { - return nil, syscall.EPERM - } - return r.source.OpenFile(name, flag, perm) -} - -func (r *ReadOnlyFs) Open(n string) (File, error) { - return r.source.Open(n) -} - -func (r *ReadOnlyFs) Mkdir(n string, p os.FileMode) error { - return syscall.EPERM -} - -func (r *ReadOnlyFs) MkdirAll(n string, p os.FileMode) error { - return syscall.EPERM -} - -func (r *ReadOnlyFs) Create(n string) (File, error) { - return nil, syscall.EPERM -} diff --git a/vendor/github.com/spf13/afero/regexpfs.go b/vendor/github.com/spf13/afero/regexpfs.go deleted file mode 100644 index 218f3b235..000000000 --- a/vendor/github.com/spf13/afero/regexpfs.go +++ /dev/null @@ -1,223 +0,0 @@ -package afero - -import ( - "os" - "regexp" - "syscall" - "time" -) - -// The RegexpFs filters files (not directories) by regular expression. Only -// files matching the given regexp will be allowed, all others get a ENOENT error ( -// "No such file or directory"). -type RegexpFs struct { - re *regexp.Regexp - source Fs -} - -func NewRegexpFs(source Fs, re *regexp.Regexp) Fs { - return &RegexpFs{source: source, re: re} -} - -type RegexpFile struct { - f File - re *regexp.Regexp -} - -func (r *RegexpFs) matchesName(name string) error { - if r.re == nil { - return nil - } - if r.re.MatchString(name) { - return nil - } - return syscall.ENOENT -} - -func (r *RegexpFs) dirOrMatches(name string) error { - dir, err := IsDir(r.source, name) - if err != nil { - return err - } - if dir { - return nil - } - return r.matchesName(name) -} - -func (r *RegexpFs) Chtimes(name string, a, m time.Time) error { - if err := r.dirOrMatches(name); err != nil { - return err - } - return r.source.Chtimes(name, a, m) -} - -func (r *RegexpFs) Chmod(name string, mode os.FileMode) error { - if err := r.dirOrMatches(name); err != nil { - return err - } - return r.source.Chmod(name, mode) -} - -func (r *RegexpFs) Chown(name string, uid, gid int) error { - if err := r.dirOrMatches(name); err != nil { - return err - } - return r.source.Chown(name, uid, gid) -} - -func (r *RegexpFs) Name() string { - return "RegexpFs" -} - -func (r *RegexpFs) Stat(name string) (os.FileInfo, error) { - if err := r.dirOrMatches(name); err != nil { - return nil, err - } - return r.source.Stat(name) -} - -func (r *RegexpFs) Rename(oldname, newname string) error { - dir, err := IsDir(r.source, oldname) - if err != nil { - return err - } - if dir { - return nil - } - if err := r.matchesName(oldname); err != nil { - return err - } - if err := r.matchesName(newname); err != nil { - return err - } - return r.source.Rename(oldname, newname) -} - -func (r *RegexpFs) RemoveAll(p string) error { - dir, err := IsDir(r.source, p) - if err != nil { - return err - } - if !dir { - if err := r.matchesName(p); err != nil { - return err - } - } - return r.source.RemoveAll(p) -} - -func (r *RegexpFs) Remove(name string) error { - if err := r.dirOrMatches(name); err != nil { - return err - } - return r.source.Remove(name) -} - -func (r *RegexpFs) OpenFile(name string, flag int, perm os.FileMode) (File, error) { - if err := r.dirOrMatches(name); err != nil { - return nil, err - } - return r.source.OpenFile(name, flag, perm) -} - -func (r *RegexpFs) Open(name string) (File, error) { - dir, err := IsDir(r.source, name) - if err != nil { - return nil, err - } - if !dir { - if err := r.matchesName(name); err != nil { - return nil, err - } - } - f, err := r.source.Open(name) - if err != nil { - return nil, err - } - return &RegexpFile{f: f, re: r.re}, nil -} - -func (r *RegexpFs) Mkdir(n string, p os.FileMode) error { - return r.source.Mkdir(n, p) -} - -func (r *RegexpFs) MkdirAll(n string, p os.FileMode) error { - return r.source.MkdirAll(n, p) -} - -func (r *RegexpFs) Create(name string) (File, error) { - if err := r.matchesName(name); err != nil { - return nil, err - } - return r.source.Create(name) -} - -func (f *RegexpFile) Close() error { - return f.f.Close() -} - -func (f *RegexpFile) Read(s []byte) (int, error) { - return f.f.Read(s) -} - -func (f *RegexpFile) ReadAt(s []byte, o int64) (int, error) { - return f.f.ReadAt(s, o) -} - -func (f *RegexpFile) Seek(o int64, w int) (int64, error) { - return f.f.Seek(o, w) -} - -func (f *RegexpFile) Write(s []byte) (int, error) { - return f.f.Write(s) -} - -func (f *RegexpFile) WriteAt(s []byte, o int64) (int, error) { - return f.f.WriteAt(s, o) -} - -func (f *RegexpFile) Name() string { - return f.f.Name() -} - -func (f *RegexpFile) Readdir(c int) (fi []os.FileInfo, err error) { - var rfi []os.FileInfo - rfi, err = f.f.Readdir(c) - if err != nil { - return nil, err - } - for _, i := range rfi { - if i.IsDir() || f.re.MatchString(i.Name()) { - fi = append(fi, i) - } - } - return fi, nil -} - -func (f *RegexpFile) Readdirnames(c int) (n []string, err error) { - fi, err := f.Readdir(c) - if err != nil { - return nil, err - } - for _, s := range fi { - n = append(n, s.Name()) - } - return n, nil -} - -func (f *RegexpFile) Stat() (os.FileInfo, error) { - return f.f.Stat() -} - -func (f *RegexpFile) Sync() error { - return f.f.Sync() -} - -func (f *RegexpFile) Truncate(s int64) error { - return f.f.Truncate(s) -} - -func (f *RegexpFile) WriteString(s string) (int, error) { - return f.f.WriteString(s) -} diff --git a/vendor/github.com/spf13/afero/symlink.go b/vendor/github.com/spf13/afero/symlink.go deleted file mode 100644 index aa6ae125b..000000000 --- a/vendor/github.com/spf13/afero/symlink.go +++ /dev/null @@ -1,55 +0,0 @@ -// Copyright © 2018 Steve Francia . -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package afero - -import ( - "errors" -) - -// Symlinker is an optional interface in Afero. It is only implemented by the -// filesystems saying so. -// It indicates support for 3 symlink related interfaces that implement the -// behaviors of the os methods: -// - Lstat -// - Symlink, and -// - Readlink -type Symlinker interface { - Lstater - Linker - LinkReader -} - -// Linker is an optional interface in Afero. It is only implemented by the -// filesystems saying so. -// It will call Symlink if the filesystem itself is, or it delegates to, the os filesystem, -// or the filesystem otherwise supports Symlink's. -type Linker interface { - SymlinkIfPossible(oldname, newname string) error -} - -// ErrNoSymlink is the error that will be wrapped in an os.LinkError if a file system -// does not support Symlink's either directly or through its delegated filesystem. -// As expressed by support for the Linker interface. -var ErrNoSymlink = errors.New("symlink not supported") - -// LinkReader is an optional interface in Afero. It is only implemented by the -// filesystems saying so. -type LinkReader interface { - ReadlinkIfPossible(name string) (string, error) -} - -// ErrNoReadlink is the error that will be wrapped in an os.Path if a file system -// does not support the readlink operation either directly or through its delegated filesystem. -// As expressed by support for the LinkReader interface. -var ErrNoReadlink = errors.New("readlink not supported") diff --git a/vendor/github.com/spf13/afero/unionFile.go b/vendor/github.com/spf13/afero/unionFile.go deleted file mode 100644 index 62dd6c93c..000000000 --- a/vendor/github.com/spf13/afero/unionFile.go +++ /dev/null @@ -1,330 +0,0 @@ -package afero - -import ( - "io" - "os" - "path/filepath" - "syscall" -) - -// The UnionFile implements the afero.File interface and will be returned -// when reading a directory present at least in the overlay or opening a file -// for writing. -// -// The calls to -// Readdir() and Readdirnames() merge the file os.FileInfo / names from the -// base and the overlay - for files present in both layers, only those -// from the overlay will be used. -// -// When opening files for writing (Create() / OpenFile() with the right flags) -// the operations will be done in both layers, starting with the overlay. A -// successful read in the overlay will move the cursor position in the base layer -// by the number of bytes read. -type UnionFile struct { - Base File - Layer File - Merger DirsMerger - off int - files []os.FileInfo -} - -func (f *UnionFile) Close() error { - // first close base, so we have a newer timestamp in the overlay. If we'd close - // the overlay first, we'd get a cacheStale the next time we access this file - // -> cache would be useless ;-) - if f.Base != nil { - f.Base.Close() - } - if f.Layer != nil { - return f.Layer.Close() - } - return BADFD -} - -func (f *UnionFile) Read(s []byte) (int, error) { - if f.Layer != nil { - n, err := f.Layer.Read(s) - if (err == nil || err == io.EOF) && f.Base != nil { - // advance the file position also in the base file, the next - // call may be a write at this position (or a seek with SEEK_CUR) - if _, seekErr := f.Base.Seek(int64(n), io.SeekCurrent); seekErr != nil { - // only overwrite err in case the seek fails: we need to - // report an eventual io.EOF to the caller - err = seekErr - } - } - return n, err - } - if f.Base != nil { - return f.Base.Read(s) - } - return 0, BADFD -} - -func (f *UnionFile) ReadAt(s []byte, o int64) (int, error) { - if f.Layer != nil { - n, err := f.Layer.ReadAt(s, o) - if (err == nil || err == io.EOF) && f.Base != nil { - _, err = f.Base.Seek(o+int64(n), io.SeekStart) - } - return n, err - } - if f.Base != nil { - return f.Base.ReadAt(s, o) - } - return 0, BADFD -} - -func (f *UnionFile) Seek(o int64, w int) (pos int64, err error) { - if f.Layer != nil { - pos, err = f.Layer.Seek(o, w) - if (err == nil || err == io.EOF) && f.Base != nil { - _, err = f.Base.Seek(o, w) - } - return pos, err - } - if f.Base != nil { - return f.Base.Seek(o, w) - } - return 0, BADFD -} - -func (f *UnionFile) Write(s []byte) (n int, err error) { - if f.Layer != nil { - n, err = f.Layer.Write(s) - if err == nil && f.Base != nil { // hmm, do we have fixed size files where a write may hit the EOF mark? - _, err = f.Base.Write(s) - } - return n, err - } - if f.Base != nil { - return f.Base.Write(s) - } - return 0, BADFD -} - -func (f *UnionFile) WriteAt(s []byte, o int64) (n int, err error) { - if f.Layer != nil { - n, err = f.Layer.WriteAt(s, o) - if err == nil && f.Base != nil { - _, err = f.Base.WriteAt(s, o) - } - return n, err - } - if f.Base != nil { - return f.Base.WriteAt(s, o) - } - return 0, BADFD -} - -func (f *UnionFile) Name() string { - if f.Layer != nil { - return f.Layer.Name() - } - return f.Base.Name() -} - -// DirsMerger is how UnionFile weaves two directories together. -// It takes the FileInfo slices from the layer and the base and returns a -// single view. -type DirsMerger func(lofi, bofi []os.FileInfo) ([]os.FileInfo, error) - -var defaultUnionMergeDirsFn = func(lofi, bofi []os.FileInfo) ([]os.FileInfo, error) { - files := make(map[string]os.FileInfo) - - for _, fi := range lofi { - files[fi.Name()] = fi - } - - for _, fi := range bofi { - if _, exists := files[fi.Name()]; !exists { - files[fi.Name()] = fi - } - } - - rfi := make([]os.FileInfo, len(files)) - - i := 0 - for _, fi := range files { - rfi[i] = fi - i++ - } - - return rfi, nil -} - -// Readdir will weave the two directories together and -// return a single view of the overlayed directories. -// At the end of the directory view, the error is io.EOF if c > 0. -func (f *UnionFile) Readdir(c int) (ofi []os.FileInfo, err error) { - var merge DirsMerger = f.Merger - if merge == nil { - merge = defaultUnionMergeDirsFn - } - - if f.off == 0 { - var lfi []os.FileInfo - if f.Layer != nil { - lfi, err = f.Layer.Readdir(-1) - if err != nil { - return nil, err - } - } - - var bfi []os.FileInfo - if f.Base != nil { - bfi, err = f.Base.Readdir(-1) - if err != nil { - return nil, err - } - - } - merged, err := merge(lfi, bfi) - if err != nil { - return nil, err - } - f.files = append(f.files, merged...) - } - files := f.files[f.off:] - - if c <= 0 { - return files, nil - } - - if len(files) == 0 { - return nil, io.EOF - } - - if c > len(files) { - c = len(files) - } - - defer func() { f.off += c }() - return files[:c], nil -} - -func (f *UnionFile) Readdirnames(c int) ([]string, error) { - rfi, err := f.Readdir(c) - if err != nil { - return nil, err - } - var names []string - for _, fi := range rfi { - names = append(names, fi.Name()) - } - return names, nil -} - -func (f *UnionFile) Stat() (os.FileInfo, error) { - if f.Layer != nil { - return f.Layer.Stat() - } - if f.Base != nil { - return f.Base.Stat() - } - return nil, BADFD -} - -func (f *UnionFile) Sync() (err error) { - if f.Layer != nil { - err = f.Layer.Sync() - if err == nil && f.Base != nil { - err = f.Base.Sync() - } - return err - } - if f.Base != nil { - return f.Base.Sync() - } - return BADFD -} - -func (f *UnionFile) Truncate(s int64) (err error) { - if f.Layer != nil { - err = f.Layer.Truncate(s) - if err == nil && f.Base != nil { - err = f.Base.Truncate(s) - } - return err - } - if f.Base != nil { - return f.Base.Truncate(s) - } - return BADFD -} - -func (f *UnionFile) WriteString(s string) (n int, err error) { - if f.Layer != nil { - n, err = f.Layer.WriteString(s) - if err == nil && f.Base != nil { - _, err = f.Base.WriteString(s) - } - return n, err - } - if f.Base != nil { - return f.Base.WriteString(s) - } - return 0, BADFD -} - -func copyFile(base Fs, layer Fs, name string, bfh File) error { - // First make sure the directory exists - exists, err := Exists(layer, filepath.Dir(name)) - if err != nil { - return err - } - if !exists { - err = layer.MkdirAll(filepath.Dir(name), 0o777) // FIXME? - if err != nil { - return err - } - } - - // Create the file on the overlay - lfh, err := layer.Create(name) - if err != nil { - return err - } - n, err := io.Copy(lfh, bfh) - if err != nil { - // If anything fails, clean up the file - layer.Remove(name) - lfh.Close() - return err - } - - bfi, err := bfh.Stat() - if err != nil || bfi.Size() != n { - layer.Remove(name) - lfh.Close() - return syscall.EIO - } - - err = lfh.Close() - if err != nil { - layer.Remove(name) - lfh.Close() - return err - } - return layer.Chtimes(name, bfi.ModTime(), bfi.ModTime()) -} - -func copyToLayer(base Fs, layer Fs, name string) error { - bfh, err := base.Open(name) - if err != nil { - return err - } - defer bfh.Close() - - return copyFile(base, layer, name, bfh) -} - -func copyFileToLayer(base Fs, layer Fs, name string, flag int, perm os.FileMode) error { - bfh, err := base.OpenFile(name, flag, perm) - if err != nil { - return err - } - defer bfh.Close() - - return copyFile(base, layer, name, bfh) -} diff --git a/vendor/github.com/spf13/afero/util.go b/vendor/github.com/spf13/afero/util.go deleted file mode 100644 index 9e4cba274..000000000 --- a/vendor/github.com/spf13/afero/util.go +++ /dev/null @@ -1,329 +0,0 @@ -// Copyright ©2015 Steve Francia -// Portions Copyright ©2015 The Hugo Authors -// Portions Copyright 2016-present Bjørn Erik Pedersen -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package afero - -import ( - "bytes" - "fmt" - "io" - "os" - "path/filepath" - "strings" - "unicode" - - "golang.org/x/text/runes" - "golang.org/x/text/transform" - "golang.org/x/text/unicode/norm" -) - -// Filepath separator defined by os.Separator. -const FilePathSeparator = string(filepath.Separator) - -// Takes a reader and a path and writes the content -func (a Afero) WriteReader(path string, r io.Reader) (err error) { - return WriteReader(a.Fs, path, r) -} - -func WriteReader(fs Fs, path string, r io.Reader) (err error) { - dir, _ := filepath.Split(path) - ospath := filepath.FromSlash(dir) - - if ospath != "" { - err = fs.MkdirAll(ospath, 0o777) // rwx, rw, r - if err != nil { - if err != os.ErrExist { - return err - } - } - } - - file, err := fs.Create(path) - if err != nil { - return - } - defer file.Close() - - _, err = io.Copy(file, r) - return -} - -// Same as WriteReader but checks to see if file/directory already exists. -func (a Afero) SafeWriteReader(path string, r io.Reader) (err error) { - return SafeWriteReader(a.Fs, path, r) -} - -func SafeWriteReader(fs Fs, path string, r io.Reader) (err error) { - dir, _ := filepath.Split(path) - ospath := filepath.FromSlash(dir) - - if ospath != "" { - err = fs.MkdirAll(ospath, 0o777) // rwx, rw, r - if err != nil { - return - } - } - - exists, err := Exists(fs, path) - if err != nil { - return - } - if exists { - return fmt.Errorf("%v already exists", path) - } - - file, err := fs.Create(path) - if err != nil { - return - } - defer file.Close() - - _, err = io.Copy(file, r) - return -} - -func (a Afero) GetTempDir(subPath string) string { - return GetTempDir(a.Fs, subPath) -} - -// GetTempDir returns the default temp directory with trailing slash -// if subPath is not empty then it will be created recursively with mode 777 rwx rwx rwx -func GetTempDir(fs Fs, subPath string) string { - addSlash := func(p string) string { - if FilePathSeparator != p[len(p)-1:] { - p = p + FilePathSeparator - } - return p - } - dir := addSlash(os.TempDir()) - - if subPath != "" { - // preserve windows backslash :-( - if FilePathSeparator == "\\" { - subPath = strings.Replace(subPath, "\\", "____", -1) - } - dir = dir + UnicodeSanitize((subPath)) - if FilePathSeparator == "\\" { - dir = strings.Replace(dir, "____", "\\", -1) - } - - if exists, _ := Exists(fs, dir); exists { - return addSlash(dir) - } - - err := fs.MkdirAll(dir, 0o777) - if err != nil { - panic(err) - } - dir = addSlash(dir) - } - return dir -} - -// Rewrite string to remove non-standard path characters -func UnicodeSanitize(s string) string { - source := []rune(s) - target := make([]rune, 0, len(source)) - - for _, r := range source { - if unicode.IsLetter(r) || - unicode.IsDigit(r) || - unicode.IsMark(r) || - r == '.' || - r == '/' || - r == '\\' || - r == '_' || - r == '-' || - r == '%' || - r == ' ' || - r == '#' { - target = append(target, r) - } - } - - return string(target) -} - -// Transform characters with accents into plain forms. -func NeuterAccents(s string) string { - t := transform.Chain(norm.NFD, runes.Remove(runes.In(unicode.Mn)), norm.NFC) - result, _, _ := transform.String(t, string(s)) - - return result -} - -func (a Afero) FileContainsBytes(filename string, subslice []byte) (bool, error) { - return FileContainsBytes(a.Fs, filename, subslice) -} - -// Check if a file contains a specified byte slice. -func FileContainsBytes(fs Fs, filename string, subslice []byte) (bool, error) { - f, err := fs.Open(filename) - if err != nil { - return false, err - } - defer f.Close() - - return readerContainsAny(f, subslice), nil -} - -func (a Afero) FileContainsAnyBytes(filename string, subslices [][]byte) (bool, error) { - return FileContainsAnyBytes(a.Fs, filename, subslices) -} - -// Check if a file contains any of the specified byte slices. -func FileContainsAnyBytes(fs Fs, filename string, subslices [][]byte) (bool, error) { - f, err := fs.Open(filename) - if err != nil { - return false, err - } - defer f.Close() - - return readerContainsAny(f, subslices...), nil -} - -// readerContains reports whether any of the subslices is within r. -func readerContainsAny(r io.Reader, subslices ...[]byte) bool { - if r == nil || len(subslices) == 0 { - return false - } - - largestSlice := 0 - - for _, sl := range subslices { - if len(sl) > largestSlice { - largestSlice = len(sl) - } - } - - if largestSlice == 0 { - return false - } - - bufflen := largestSlice * 4 - halflen := bufflen / 2 - buff := make([]byte, bufflen) - var err error - var n, i int - - for { - i++ - if i == 1 { - n, err = io.ReadAtLeast(r, buff[:halflen], halflen) - } else { - if i != 2 { - // shift left to catch overlapping matches - copy(buff[:], buff[halflen:]) - } - n, err = io.ReadAtLeast(r, buff[halflen:], halflen) - } - - if n > 0 { - for _, sl := range subslices { - if bytes.Contains(buff, sl) { - return true - } - } - } - - if err != nil { - break - } - } - return false -} - -func (a Afero) DirExists(path string) (bool, error) { - return DirExists(a.Fs, path) -} - -// DirExists checks if a path exists and is a directory. -func DirExists(fs Fs, path string) (bool, error) { - fi, err := fs.Stat(path) - if err == nil && fi.IsDir() { - return true, nil - } - if os.IsNotExist(err) { - return false, nil - } - return false, err -} - -func (a Afero) IsDir(path string) (bool, error) { - return IsDir(a.Fs, path) -} - -// IsDir checks if a given path is a directory. -func IsDir(fs Fs, path string) (bool, error) { - fi, err := fs.Stat(path) - if err != nil { - return false, err - } - return fi.IsDir(), nil -} - -func (a Afero) IsEmpty(path string) (bool, error) { - return IsEmpty(a.Fs, path) -} - -// IsEmpty checks if a given file or directory is empty. -func IsEmpty(fs Fs, path string) (bool, error) { - if b, _ := Exists(fs, path); !b { - return false, fmt.Errorf("%q path does not exist", path) - } - fi, err := fs.Stat(path) - if err != nil { - return false, err - } - if fi.IsDir() { - f, err := fs.Open(path) - if err != nil { - return false, err - } - defer f.Close() - list, err := f.Readdir(-1) - if err != nil { - return false, err - } - return len(list) == 0, nil - } - return fi.Size() == 0, nil -} - -func (a Afero) Exists(path string) (bool, error) { - return Exists(a.Fs, path) -} - -// Check if a file or directory exists. -func Exists(fs Fs, path string) (bool, error) { - _, err := fs.Stat(path) - if err == nil { - return true, nil - } - if os.IsNotExist(err) { - return false, nil - } - return false, err -} - -func FullBaseFsPath(basePathFs *BasePathFs, relativePath string) string { - combinedPath := filepath.Join(basePathFs.path, relativePath) - if parent, ok := basePathFs.source.(*BasePathFs); ok { - return FullBaseFsPath(parent, combinedPath) - } - - return combinedPath -} diff --git a/vendor/github.com/spf13/cast/.gitignore b/vendor/github.com/spf13/cast/.gitignore deleted file mode 100644 index 53053a8ac..000000000 --- a/vendor/github.com/spf13/cast/.gitignore +++ /dev/null @@ -1,25 +0,0 @@ -# Compiled Object files, Static and Dynamic libs (Shared Objects) -*.o -*.a -*.so - -# Folders -_obj -_test - -# Architecture specific extensions/prefixes -*.[568vq] -[568vq].out - -*.cgo1.go -*.cgo2.c -_cgo_defun.c -_cgo_gotypes.go -_cgo_export.* - -_testmain.go - -*.exe -*.test - -*.bench diff --git a/vendor/github.com/spf13/cast/LICENSE b/vendor/github.com/spf13/cast/LICENSE deleted file mode 100644 index 4527efb9c..000000000 --- a/vendor/github.com/spf13/cast/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2014 Steve Francia - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. \ No newline at end of file diff --git a/vendor/github.com/spf13/cast/Makefile b/vendor/github.com/spf13/cast/Makefile deleted file mode 100644 index f01a5dbb6..000000000 --- a/vendor/github.com/spf13/cast/Makefile +++ /dev/null @@ -1,40 +0,0 @@ -GOVERSION := $(shell go version | cut -d ' ' -f 3 | cut -d '.' -f 2) - -.PHONY: check fmt lint test test-race vet test-cover-html help -.DEFAULT_GOAL := help - -check: test-race fmt vet lint ## Run tests and linters - -test: ## Run tests - go test ./... - -test-race: ## Run tests with race detector - go test -race ./... - -fmt: ## Run gofmt linter -ifeq "$(GOVERSION)" "12" - @for d in `go list` ; do \ - if [ "`gofmt -l -s $$GOPATH/src/$$d | tee /dev/stderr`" ]; then \ - echo "^ improperly formatted go files" && echo && exit 1; \ - fi \ - done -endif - -lint: ## Run golint linter - @for d in `go list` ; do \ - if [ "`golint $$d | tee /dev/stderr`" ]; then \ - echo "^ golint errors!" && echo && exit 1; \ - fi \ - done - -vet: ## Run go vet linter - @if [ "`go vet | tee /dev/stderr`" ]; then \ - echo "^ go vet errors!" && echo && exit 1; \ - fi - -test-cover-html: ## Generate test coverage report - go test -coverprofile=coverage.out -covermode=count - go tool cover -func=coverage.out - -help: - @grep -E '^[a-zA-Z0-9_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}' diff --git a/vendor/github.com/spf13/cast/README.md b/vendor/github.com/spf13/cast/README.md deleted file mode 100644 index 0e9e14593..000000000 --- a/vendor/github.com/spf13/cast/README.md +++ /dev/null @@ -1,75 +0,0 @@ -# cast - -[![GitHub Workflow Status](https://img.shields.io/github/actions/workflow/status/spf13/cast/ci.yaml?branch=master&style=flat-square)](https://github.com/spf13/cast/actions/workflows/ci.yaml) -[![PkgGoDev](https://pkg.go.dev/badge/mod/github.com/spf13/cast)](https://pkg.go.dev/mod/github.com/spf13/cast) -![Go Version](https://img.shields.io/badge/go%20version-%3E=1.16-61CFDD.svg?style=flat-square) -[![Go Report Card](https://goreportcard.com/badge/github.com/spf13/cast?style=flat-square)](https://goreportcard.com/report/github.com/spf13/cast) - -Easy and safe casting from one type to another in Go - -Don’t Panic! ... Cast - -## What is Cast? - -Cast is a library to convert between different go types in a consistent and easy way. - -Cast provides simple functions to easily convert a number to a string, an -interface into a bool, etc. Cast does this intelligently when an obvious -conversion is possible. It doesn’t make any attempts to guess what you meant, -for example you can only convert a string to an int when it is a string -representation of an int such as “8”. Cast was developed for use in -[Hugo](https://gohugo.io), a website engine which uses YAML, TOML or JSON -for meta data. - -## Why use Cast? - -When working with dynamic data in Go you often need to cast or convert the data -from one type into another. Cast goes beyond just using type assertion (though -it uses that when possible) to provide a very straightforward and convenient -library. - -If you are working with interfaces to handle things like dynamic content -you’ll need an easy way to convert an interface into a given type. This -is the library for you. - -If you are taking in data from YAML, TOML or JSON or other formats which lack -full types, then Cast is the library for you. - -## Usage - -Cast provides a handful of To_____ methods. These methods will always return -the desired type. **If input is provided that will not convert to that type, the -0 or nil value for that type will be returned**. - -Cast also provides identical methods To_____E. These return the same result as -the To_____ methods, plus an additional error which tells you if it successfully -converted. Using these methods you can tell the difference between when the -input matched the zero value or when the conversion failed and the zero value -was returned. - -The following examples are merely a sample of what is available. Please review -the code for a complete set. - -### Example ‘ToString’: - - cast.ToString("mayonegg") // "mayonegg" - cast.ToString(8) // "8" - cast.ToString(8.31) // "8.31" - cast.ToString([]byte("one time")) // "one time" - cast.ToString(nil) // "" - - var foo interface{} = "one more time" - cast.ToString(foo) // "one more time" - - -### Example ‘ToInt’: - - cast.ToInt(8) // 8 - cast.ToInt(8.31) // 8 - cast.ToInt("8") // 8 - cast.ToInt(true) // 1 - cast.ToInt(false) // 0 - - var eight interface{} = 8 - cast.ToInt(eight) // 8 - cast.ToInt(nil) // 0 diff --git a/vendor/github.com/spf13/cast/cast.go b/vendor/github.com/spf13/cast/cast.go deleted file mode 100644 index 0cfe9418d..000000000 --- a/vendor/github.com/spf13/cast/cast.go +++ /dev/null @@ -1,176 +0,0 @@ -// Copyright © 2014 Steve Francia . -// -// Use of this source code is governed by an MIT-style -// license that can be found in the LICENSE file. - -// Package cast provides easy and safe casting in Go. -package cast - -import "time" - -// ToBool casts an interface to a bool type. -func ToBool(i interface{}) bool { - v, _ := ToBoolE(i) - return v -} - -// ToTime casts an interface to a time.Time type. -func ToTime(i interface{}) time.Time { - v, _ := ToTimeE(i) - return v -} - -func ToTimeInDefaultLocation(i interface{}, location *time.Location) time.Time { - v, _ := ToTimeInDefaultLocationE(i, location) - return v -} - -// ToDuration casts an interface to a time.Duration type. -func ToDuration(i interface{}) time.Duration { - v, _ := ToDurationE(i) - return v -} - -// ToFloat64 casts an interface to a float64 type. -func ToFloat64(i interface{}) float64 { - v, _ := ToFloat64E(i) - return v -} - -// ToFloat32 casts an interface to a float32 type. -func ToFloat32(i interface{}) float32 { - v, _ := ToFloat32E(i) - return v -} - -// ToInt64 casts an interface to an int64 type. -func ToInt64(i interface{}) int64 { - v, _ := ToInt64E(i) - return v -} - -// ToInt32 casts an interface to an int32 type. -func ToInt32(i interface{}) int32 { - v, _ := ToInt32E(i) - return v -} - -// ToInt16 casts an interface to an int16 type. -func ToInt16(i interface{}) int16 { - v, _ := ToInt16E(i) - return v -} - -// ToInt8 casts an interface to an int8 type. -func ToInt8(i interface{}) int8 { - v, _ := ToInt8E(i) - return v -} - -// ToInt casts an interface to an int type. -func ToInt(i interface{}) int { - v, _ := ToIntE(i) - return v -} - -// ToUint casts an interface to a uint type. -func ToUint(i interface{}) uint { - v, _ := ToUintE(i) - return v -} - -// ToUint64 casts an interface to a uint64 type. -func ToUint64(i interface{}) uint64 { - v, _ := ToUint64E(i) - return v -} - -// ToUint32 casts an interface to a uint32 type. -func ToUint32(i interface{}) uint32 { - v, _ := ToUint32E(i) - return v -} - -// ToUint16 casts an interface to a uint16 type. -func ToUint16(i interface{}) uint16 { - v, _ := ToUint16E(i) - return v -} - -// ToUint8 casts an interface to a uint8 type. -func ToUint8(i interface{}) uint8 { - v, _ := ToUint8E(i) - return v -} - -// ToString casts an interface to a string type. -func ToString(i interface{}) string { - v, _ := ToStringE(i) - return v -} - -// ToStringMapString casts an interface to a map[string]string type. -func ToStringMapString(i interface{}) map[string]string { - v, _ := ToStringMapStringE(i) - return v -} - -// ToStringMapStringSlice casts an interface to a map[string][]string type. -func ToStringMapStringSlice(i interface{}) map[string][]string { - v, _ := ToStringMapStringSliceE(i) - return v -} - -// ToStringMapBool casts an interface to a map[string]bool type. -func ToStringMapBool(i interface{}) map[string]bool { - v, _ := ToStringMapBoolE(i) - return v -} - -// ToStringMapInt casts an interface to a map[string]int type. -func ToStringMapInt(i interface{}) map[string]int { - v, _ := ToStringMapIntE(i) - return v -} - -// ToStringMapInt64 casts an interface to a map[string]int64 type. -func ToStringMapInt64(i interface{}) map[string]int64 { - v, _ := ToStringMapInt64E(i) - return v -} - -// ToStringMap casts an interface to a map[string]interface{} type. -func ToStringMap(i interface{}) map[string]interface{} { - v, _ := ToStringMapE(i) - return v -} - -// ToSlice casts an interface to a []interface{} type. -func ToSlice(i interface{}) []interface{} { - v, _ := ToSliceE(i) - return v -} - -// ToBoolSlice casts an interface to a []bool type. -func ToBoolSlice(i interface{}) []bool { - v, _ := ToBoolSliceE(i) - return v -} - -// ToStringSlice casts an interface to a []string type. -func ToStringSlice(i interface{}) []string { - v, _ := ToStringSliceE(i) - return v -} - -// ToIntSlice casts an interface to a []int type. -func ToIntSlice(i interface{}) []int { - v, _ := ToIntSliceE(i) - return v -} - -// ToDurationSlice casts an interface to a []time.Duration type. -func ToDurationSlice(i interface{}) []time.Duration { - v, _ := ToDurationSliceE(i) - return v -} diff --git a/vendor/github.com/spf13/cast/caste.go b/vendor/github.com/spf13/cast/caste.go deleted file mode 100644 index d49bbf83e..000000000 --- a/vendor/github.com/spf13/cast/caste.go +++ /dev/null @@ -1,1498 +0,0 @@ -// Copyright © 2014 Steve Francia . -// -// Use of this source code is governed by an MIT-style -// license that can be found in the LICENSE file. - -package cast - -import ( - "encoding/json" - "errors" - "fmt" - "html/template" - "reflect" - "strconv" - "strings" - "time" -) - -var errNegativeNotAllowed = errors.New("unable to cast negative value") - -// ToTimeE casts an interface to a time.Time type. -func ToTimeE(i interface{}) (tim time.Time, err error) { - return ToTimeInDefaultLocationE(i, time.UTC) -} - -// ToTimeInDefaultLocationE casts an empty interface to time.Time, -// interpreting inputs without a timezone to be in the given location, -// or the local timezone if nil. -func ToTimeInDefaultLocationE(i interface{}, location *time.Location) (tim time.Time, err error) { - i = indirect(i) - - switch v := i.(type) { - case time.Time: - return v, nil - case string: - return StringToDateInDefaultLocation(v, location) - case json.Number: - s, err1 := ToInt64E(v) - if err1 != nil { - return time.Time{}, fmt.Errorf("unable to cast %#v of type %T to Time", i, i) - } - return time.Unix(s, 0), nil - case int: - return time.Unix(int64(v), 0), nil - case int64: - return time.Unix(v, 0), nil - case int32: - return time.Unix(int64(v), 0), nil - case uint: - return time.Unix(int64(v), 0), nil - case uint64: - return time.Unix(int64(v), 0), nil - case uint32: - return time.Unix(int64(v), 0), nil - default: - return time.Time{}, fmt.Errorf("unable to cast %#v of type %T to Time", i, i) - } -} - -// ToDurationE casts an interface to a time.Duration type. -func ToDurationE(i interface{}) (d time.Duration, err error) { - i = indirect(i) - - switch s := i.(type) { - case time.Duration: - return s, nil - case int, int64, int32, int16, int8, uint, uint64, uint32, uint16, uint8: - d = time.Duration(ToInt64(s)) - return - case float32, float64: - d = time.Duration(ToFloat64(s)) - return - case string: - if strings.ContainsAny(s, "nsuµmh") { - d, err = time.ParseDuration(s) - } else { - d, err = time.ParseDuration(s + "ns") - } - return - case json.Number: - var v float64 - v, err = s.Float64() - d = time.Duration(v) - return - default: - err = fmt.Errorf("unable to cast %#v of type %T to Duration", i, i) - return - } -} - -// ToBoolE casts an interface to a bool type. -func ToBoolE(i interface{}) (bool, error) { - i = indirect(i) - - switch b := i.(type) { - case bool: - return b, nil - case nil: - return false, nil - case int: - return b != 0, nil - case int64: - return b != 0, nil - case int32: - return b != 0, nil - case int16: - return b != 0, nil - case int8: - return b != 0, nil - case uint: - return b != 0, nil - case uint64: - return b != 0, nil - case uint32: - return b != 0, nil - case uint16: - return b != 0, nil - case uint8: - return b != 0, nil - case float64: - return b != 0, nil - case float32: - return b != 0, nil - case time.Duration: - return b != 0, nil - case string: - return strconv.ParseBool(i.(string)) - case json.Number: - v, err := ToInt64E(b) - if err == nil { - return v != 0, nil - } - return false, fmt.Errorf("unable to cast %#v of type %T to bool", i, i) - default: - return false, fmt.Errorf("unable to cast %#v of type %T to bool", i, i) - } -} - -// ToFloat64E casts an interface to a float64 type. -func ToFloat64E(i interface{}) (float64, error) { - i = indirect(i) - - intv, ok := toInt(i) - if ok { - return float64(intv), nil - } - - switch s := i.(type) { - case float64: - return s, nil - case float32: - return float64(s), nil - case int64: - return float64(s), nil - case int32: - return float64(s), nil - case int16: - return float64(s), nil - case int8: - return float64(s), nil - case uint: - return float64(s), nil - case uint64: - return float64(s), nil - case uint32: - return float64(s), nil - case uint16: - return float64(s), nil - case uint8: - return float64(s), nil - case string: - v, err := strconv.ParseFloat(s, 64) - if err == nil { - return v, nil - } - return 0, fmt.Errorf("unable to cast %#v of type %T to float64", i, i) - case json.Number: - v, err := s.Float64() - if err == nil { - return v, nil - } - return 0, fmt.Errorf("unable to cast %#v of type %T to float64", i, i) - case bool: - if s { - return 1, nil - } - return 0, nil - case nil: - return 0, nil - default: - return 0, fmt.Errorf("unable to cast %#v of type %T to float64", i, i) - } -} - -// ToFloat32E casts an interface to a float32 type. -func ToFloat32E(i interface{}) (float32, error) { - i = indirect(i) - - intv, ok := toInt(i) - if ok { - return float32(intv), nil - } - - switch s := i.(type) { - case float64: - return float32(s), nil - case float32: - return s, nil - case int64: - return float32(s), nil - case int32: - return float32(s), nil - case int16: - return float32(s), nil - case int8: - return float32(s), nil - case uint: - return float32(s), nil - case uint64: - return float32(s), nil - case uint32: - return float32(s), nil - case uint16: - return float32(s), nil - case uint8: - return float32(s), nil - case string: - v, err := strconv.ParseFloat(s, 32) - if err == nil { - return float32(v), nil - } - return 0, fmt.Errorf("unable to cast %#v of type %T to float32", i, i) - case json.Number: - v, err := s.Float64() - if err == nil { - return float32(v), nil - } - return 0, fmt.Errorf("unable to cast %#v of type %T to float32", i, i) - case bool: - if s { - return 1, nil - } - return 0, nil - case nil: - return 0, nil - default: - return 0, fmt.Errorf("unable to cast %#v of type %T to float32", i, i) - } -} - -// ToInt64E casts an interface to an int64 type. -func ToInt64E(i interface{}) (int64, error) { - i = indirect(i) - - intv, ok := toInt(i) - if ok { - return int64(intv), nil - } - - switch s := i.(type) { - case int64: - return s, nil - case int32: - return int64(s), nil - case int16: - return int64(s), nil - case int8: - return int64(s), nil - case uint: - return int64(s), nil - case uint64: - return int64(s), nil - case uint32: - return int64(s), nil - case uint16: - return int64(s), nil - case uint8: - return int64(s), nil - case float64: - return int64(s), nil - case float32: - return int64(s), nil - case string: - v, err := strconv.ParseInt(trimZeroDecimal(s), 0, 0) - if err == nil { - return v, nil - } - return 0, fmt.Errorf("unable to cast %#v of type %T to int64", i, i) - case json.Number: - return ToInt64E(string(s)) - case bool: - if s { - return 1, nil - } - return 0, nil - case nil: - return 0, nil - default: - return 0, fmt.Errorf("unable to cast %#v of type %T to int64", i, i) - } -} - -// ToInt32E casts an interface to an int32 type. -func ToInt32E(i interface{}) (int32, error) { - i = indirect(i) - - intv, ok := toInt(i) - if ok { - return int32(intv), nil - } - - switch s := i.(type) { - case int64: - return int32(s), nil - case int32: - return s, nil - case int16: - return int32(s), nil - case int8: - return int32(s), nil - case uint: - return int32(s), nil - case uint64: - return int32(s), nil - case uint32: - return int32(s), nil - case uint16: - return int32(s), nil - case uint8: - return int32(s), nil - case float64: - return int32(s), nil - case float32: - return int32(s), nil - case string: - v, err := strconv.ParseInt(trimZeroDecimal(s), 0, 0) - if err == nil { - return int32(v), nil - } - return 0, fmt.Errorf("unable to cast %#v of type %T to int32", i, i) - case json.Number: - return ToInt32E(string(s)) - case bool: - if s { - return 1, nil - } - return 0, nil - case nil: - return 0, nil - default: - return 0, fmt.Errorf("unable to cast %#v of type %T to int32", i, i) - } -} - -// ToInt16E casts an interface to an int16 type. -func ToInt16E(i interface{}) (int16, error) { - i = indirect(i) - - intv, ok := toInt(i) - if ok { - return int16(intv), nil - } - - switch s := i.(type) { - case int64: - return int16(s), nil - case int32: - return int16(s), nil - case int16: - return s, nil - case int8: - return int16(s), nil - case uint: - return int16(s), nil - case uint64: - return int16(s), nil - case uint32: - return int16(s), nil - case uint16: - return int16(s), nil - case uint8: - return int16(s), nil - case float64: - return int16(s), nil - case float32: - return int16(s), nil - case string: - v, err := strconv.ParseInt(trimZeroDecimal(s), 0, 0) - if err == nil { - return int16(v), nil - } - return 0, fmt.Errorf("unable to cast %#v of type %T to int16", i, i) - case json.Number: - return ToInt16E(string(s)) - case bool: - if s { - return 1, nil - } - return 0, nil - case nil: - return 0, nil - default: - return 0, fmt.Errorf("unable to cast %#v of type %T to int16", i, i) - } -} - -// ToInt8E casts an interface to an int8 type. -func ToInt8E(i interface{}) (int8, error) { - i = indirect(i) - - intv, ok := toInt(i) - if ok { - return int8(intv), nil - } - - switch s := i.(type) { - case int64: - return int8(s), nil - case int32: - return int8(s), nil - case int16: - return int8(s), nil - case int8: - return s, nil - case uint: - return int8(s), nil - case uint64: - return int8(s), nil - case uint32: - return int8(s), nil - case uint16: - return int8(s), nil - case uint8: - return int8(s), nil - case float64: - return int8(s), nil - case float32: - return int8(s), nil - case string: - v, err := strconv.ParseInt(trimZeroDecimal(s), 0, 0) - if err == nil { - return int8(v), nil - } - return 0, fmt.Errorf("unable to cast %#v of type %T to int8", i, i) - case json.Number: - return ToInt8E(string(s)) - case bool: - if s { - return 1, nil - } - return 0, nil - case nil: - return 0, nil - default: - return 0, fmt.Errorf("unable to cast %#v of type %T to int8", i, i) - } -} - -// ToIntE casts an interface to an int type. -func ToIntE(i interface{}) (int, error) { - i = indirect(i) - - intv, ok := toInt(i) - if ok { - return intv, nil - } - - switch s := i.(type) { - case int64: - return int(s), nil - case int32: - return int(s), nil - case int16: - return int(s), nil - case int8: - return int(s), nil - case uint: - return int(s), nil - case uint64: - return int(s), nil - case uint32: - return int(s), nil - case uint16: - return int(s), nil - case uint8: - return int(s), nil - case float64: - return int(s), nil - case float32: - return int(s), nil - case string: - v, err := strconv.ParseInt(trimZeroDecimal(s), 0, 0) - if err == nil { - return int(v), nil - } - return 0, fmt.Errorf("unable to cast %#v of type %T to int64", i, i) - case json.Number: - return ToIntE(string(s)) - case bool: - if s { - return 1, nil - } - return 0, nil - case nil: - return 0, nil - default: - return 0, fmt.Errorf("unable to cast %#v of type %T to int", i, i) - } -} - -// ToUintE casts an interface to a uint type. -func ToUintE(i interface{}) (uint, error) { - i = indirect(i) - - intv, ok := toInt(i) - if ok { - if intv < 0 { - return 0, errNegativeNotAllowed - } - return uint(intv), nil - } - - switch s := i.(type) { - case string: - v, err := strconv.ParseInt(trimZeroDecimal(s), 0, 0) - if err == nil { - if v < 0 { - return 0, errNegativeNotAllowed - } - return uint(v), nil - } - return 0, fmt.Errorf("unable to cast %#v of type %T to uint", i, i) - case json.Number: - return ToUintE(string(s)) - case int64: - if s < 0 { - return 0, errNegativeNotAllowed - } - return uint(s), nil - case int32: - if s < 0 { - return 0, errNegativeNotAllowed - } - return uint(s), nil - case int16: - if s < 0 { - return 0, errNegativeNotAllowed - } - return uint(s), nil - case int8: - if s < 0 { - return 0, errNegativeNotAllowed - } - return uint(s), nil - case uint: - return s, nil - case uint64: - return uint(s), nil - case uint32: - return uint(s), nil - case uint16: - return uint(s), nil - case uint8: - return uint(s), nil - case float64: - if s < 0 { - return 0, errNegativeNotAllowed - } - return uint(s), nil - case float32: - if s < 0 { - return 0, errNegativeNotAllowed - } - return uint(s), nil - case bool: - if s { - return 1, nil - } - return 0, nil - case nil: - return 0, nil - default: - return 0, fmt.Errorf("unable to cast %#v of type %T to uint", i, i) - } -} - -// ToUint64E casts an interface to a uint64 type. -func ToUint64E(i interface{}) (uint64, error) { - i = indirect(i) - - intv, ok := toInt(i) - if ok { - if intv < 0 { - return 0, errNegativeNotAllowed - } - return uint64(intv), nil - } - - switch s := i.(type) { - case string: - v, err := strconv.ParseInt(trimZeroDecimal(s), 0, 0) - if err == nil { - if v < 0 { - return 0, errNegativeNotAllowed - } - return uint64(v), nil - } - return 0, fmt.Errorf("unable to cast %#v of type %T to uint64", i, i) - case json.Number: - return ToUint64E(string(s)) - case int64: - if s < 0 { - return 0, errNegativeNotAllowed - } - return uint64(s), nil - case int32: - if s < 0 { - return 0, errNegativeNotAllowed - } - return uint64(s), nil - case int16: - if s < 0 { - return 0, errNegativeNotAllowed - } - return uint64(s), nil - case int8: - if s < 0 { - return 0, errNegativeNotAllowed - } - return uint64(s), nil - case uint: - return uint64(s), nil - case uint64: - return s, nil - case uint32: - return uint64(s), nil - case uint16: - return uint64(s), nil - case uint8: - return uint64(s), nil - case float32: - if s < 0 { - return 0, errNegativeNotAllowed - } - return uint64(s), nil - case float64: - if s < 0 { - return 0, errNegativeNotAllowed - } - return uint64(s), nil - case bool: - if s { - return 1, nil - } - return 0, nil - case nil: - return 0, nil - default: - return 0, fmt.Errorf("unable to cast %#v of type %T to uint64", i, i) - } -} - -// ToUint32E casts an interface to a uint32 type. -func ToUint32E(i interface{}) (uint32, error) { - i = indirect(i) - - intv, ok := toInt(i) - if ok { - if intv < 0 { - return 0, errNegativeNotAllowed - } - return uint32(intv), nil - } - - switch s := i.(type) { - case string: - v, err := strconv.ParseInt(trimZeroDecimal(s), 0, 0) - if err == nil { - if v < 0 { - return 0, errNegativeNotAllowed - } - return uint32(v), nil - } - return 0, fmt.Errorf("unable to cast %#v of type %T to uint32", i, i) - case json.Number: - return ToUint32E(string(s)) - case int64: - if s < 0 { - return 0, errNegativeNotAllowed - } - return uint32(s), nil - case int32: - if s < 0 { - return 0, errNegativeNotAllowed - } - return uint32(s), nil - case int16: - if s < 0 { - return 0, errNegativeNotAllowed - } - return uint32(s), nil - case int8: - if s < 0 { - return 0, errNegativeNotAllowed - } - return uint32(s), nil - case uint: - return uint32(s), nil - case uint64: - return uint32(s), nil - case uint32: - return s, nil - case uint16: - return uint32(s), nil - case uint8: - return uint32(s), nil - case float64: - if s < 0 { - return 0, errNegativeNotAllowed - } - return uint32(s), nil - case float32: - if s < 0 { - return 0, errNegativeNotAllowed - } - return uint32(s), nil - case bool: - if s { - return 1, nil - } - return 0, nil - case nil: - return 0, nil - default: - return 0, fmt.Errorf("unable to cast %#v of type %T to uint32", i, i) - } -} - -// ToUint16E casts an interface to a uint16 type. -func ToUint16E(i interface{}) (uint16, error) { - i = indirect(i) - - intv, ok := toInt(i) - if ok { - if intv < 0 { - return 0, errNegativeNotAllowed - } - return uint16(intv), nil - } - - switch s := i.(type) { - case string: - v, err := strconv.ParseInt(trimZeroDecimal(s), 0, 0) - if err == nil { - if v < 0 { - return 0, errNegativeNotAllowed - } - return uint16(v), nil - } - return 0, fmt.Errorf("unable to cast %#v of type %T to uint16", i, i) - case json.Number: - return ToUint16E(string(s)) - case int64: - if s < 0 { - return 0, errNegativeNotAllowed - } - return uint16(s), nil - case int32: - if s < 0 { - return 0, errNegativeNotAllowed - } - return uint16(s), nil - case int16: - if s < 0 { - return 0, errNegativeNotAllowed - } - return uint16(s), nil - case int8: - if s < 0 { - return 0, errNegativeNotAllowed - } - return uint16(s), nil - case uint: - return uint16(s), nil - case uint64: - return uint16(s), nil - case uint32: - return uint16(s), nil - case uint16: - return s, nil - case uint8: - return uint16(s), nil - case float64: - if s < 0 { - return 0, errNegativeNotAllowed - } - return uint16(s), nil - case float32: - if s < 0 { - return 0, errNegativeNotAllowed - } - return uint16(s), nil - case bool: - if s { - return 1, nil - } - return 0, nil - case nil: - return 0, nil - default: - return 0, fmt.Errorf("unable to cast %#v of type %T to uint16", i, i) - } -} - -// ToUint8E casts an interface to a uint type. -func ToUint8E(i interface{}) (uint8, error) { - i = indirect(i) - - intv, ok := toInt(i) - if ok { - if intv < 0 { - return 0, errNegativeNotAllowed - } - return uint8(intv), nil - } - - switch s := i.(type) { - case string: - v, err := strconv.ParseInt(trimZeroDecimal(s), 0, 0) - if err == nil { - if v < 0 { - return 0, errNegativeNotAllowed - } - return uint8(v), nil - } - return 0, fmt.Errorf("unable to cast %#v of type %T to uint8", i, i) - case json.Number: - return ToUint8E(string(s)) - case int64: - if s < 0 { - return 0, errNegativeNotAllowed - } - return uint8(s), nil - case int32: - if s < 0 { - return 0, errNegativeNotAllowed - } - return uint8(s), nil - case int16: - if s < 0 { - return 0, errNegativeNotAllowed - } - return uint8(s), nil - case int8: - if s < 0 { - return 0, errNegativeNotAllowed - } - return uint8(s), nil - case uint: - return uint8(s), nil - case uint64: - return uint8(s), nil - case uint32: - return uint8(s), nil - case uint16: - return uint8(s), nil - case uint8: - return s, nil - case float64: - if s < 0 { - return 0, errNegativeNotAllowed - } - return uint8(s), nil - case float32: - if s < 0 { - return 0, errNegativeNotAllowed - } - return uint8(s), nil - case bool: - if s { - return 1, nil - } - return 0, nil - case nil: - return 0, nil - default: - return 0, fmt.Errorf("unable to cast %#v of type %T to uint8", i, i) - } -} - -// From html/template/content.go -// Copyright 2011 The Go Authors. All rights reserved. -// indirect returns the value, after dereferencing as many times -// as necessary to reach the base type (or nil). -func indirect(a interface{}) interface{} { - if a == nil { - return nil - } - if t := reflect.TypeOf(a); t.Kind() != reflect.Ptr { - // Avoid creating a reflect.Value if it's not a pointer. - return a - } - v := reflect.ValueOf(a) - for v.Kind() == reflect.Ptr && !v.IsNil() { - v = v.Elem() - } - return v.Interface() -} - -// From html/template/content.go -// Copyright 2011 The Go Authors. All rights reserved. -// indirectToStringerOrError returns the value, after dereferencing as many times -// as necessary to reach the base type (or nil) or an implementation of fmt.Stringer -// or error, -func indirectToStringerOrError(a interface{}) interface{} { - if a == nil { - return nil - } - - var errorType = reflect.TypeOf((*error)(nil)).Elem() - var fmtStringerType = reflect.TypeOf((*fmt.Stringer)(nil)).Elem() - - v := reflect.ValueOf(a) - for !v.Type().Implements(fmtStringerType) && !v.Type().Implements(errorType) && v.Kind() == reflect.Ptr && !v.IsNil() { - v = v.Elem() - } - return v.Interface() -} - -// ToStringE casts an interface to a string type. -func ToStringE(i interface{}) (string, error) { - i = indirectToStringerOrError(i) - - switch s := i.(type) { - case string: - return s, nil - case bool: - return strconv.FormatBool(s), nil - case float64: - return strconv.FormatFloat(s, 'f', -1, 64), nil - case float32: - return strconv.FormatFloat(float64(s), 'f', -1, 32), nil - case int: - return strconv.Itoa(s), nil - case int64: - return strconv.FormatInt(s, 10), nil - case int32: - return strconv.Itoa(int(s)), nil - case int16: - return strconv.FormatInt(int64(s), 10), nil - case int8: - return strconv.FormatInt(int64(s), 10), nil - case uint: - return strconv.FormatUint(uint64(s), 10), nil - case uint64: - return strconv.FormatUint(uint64(s), 10), nil - case uint32: - return strconv.FormatUint(uint64(s), 10), nil - case uint16: - return strconv.FormatUint(uint64(s), 10), nil - case uint8: - return strconv.FormatUint(uint64(s), 10), nil - case json.Number: - return s.String(), nil - case []byte: - return string(s), nil - case template.HTML: - return string(s), nil - case template.URL: - return string(s), nil - case template.JS: - return string(s), nil - case template.CSS: - return string(s), nil - case template.HTMLAttr: - return string(s), nil - case nil: - return "", nil - case fmt.Stringer: - return s.String(), nil - case error: - return s.Error(), nil - default: - return "", fmt.Errorf("unable to cast %#v of type %T to string", i, i) - } -} - -// ToStringMapStringE casts an interface to a map[string]string type. -func ToStringMapStringE(i interface{}) (map[string]string, error) { - var m = map[string]string{} - - switch v := i.(type) { - case map[string]string: - return v, nil - case map[string]interface{}: - for k, val := range v { - m[ToString(k)] = ToString(val) - } - return m, nil - case map[interface{}]string: - for k, val := range v { - m[ToString(k)] = ToString(val) - } - return m, nil - case map[interface{}]interface{}: - for k, val := range v { - m[ToString(k)] = ToString(val) - } - return m, nil - case string: - err := jsonStringToObject(v, &m) - return m, err - default: - return m, fmt.Errorf("unable to cast %#v of type %T to map[string]string", i, i) - } -} - -// ToStringMapStringSliceE casts an interface to a map[string][]string type. -func ToStringMapStringSliceE(i interface{}) (map[string][]string, error) { - var m = map[string][]string{} - - switch v := i.(type) { - case map[string][]string: - return v, nil - case map[string][]interface{}: - for k, val := range v { - m[ToString(k)] = ToStringSlice(val) - } - return m, nil - case map[string]string: - for k, val := range v { - m[ToString(k)] = []string{val} - } - case map[string]interface{}: - for k, val := range v { - switch vt := val.(type) { - case []interface{}: - m[ToString(k)] = ToStringSlice(vt) - case []string: - m[ToString(k)] = vt - default: - m[ToString(k)] = []string{ToString(val)} - } - } - return m, nil - case map[interface{}][]string: - for k, val := range v { - m[ToString(k)] = ToStringSlice(val) - } - return m, nil - case map[interface{}]string: - for k, val := range v { - m[ToString(k)] = ToStringSlice(val) - } - return m, nil - case map[interface{}][]interface{}: - for k, val := range v { - m[ToString(k)] = ToStringSlice(val) - } - return m, nil - case map[interface{}]interface{}: - for k, val := range v { - key, err := ToStringE(k) - if err != nil { - return m, fmt.Errorf("unable to cast %#v of type %T to map[string][]string", i, i) - } - value, err := ToStringSliceE(val) - if err != nil { - return m, fmt.Errorf("unable to cast %#v of type %T to map[string][]string", i, i) - } - m[key] = value - } - case string: - err := jsonStringToObject(v, &m) - return m, err - default: - return m, fmt.Errorf("unable to cast %#v of type %T to map[string][]string", i, i) - } - return m, nil -} - -// ToStringMapBoolE casts an interface to a map[string]bool type. -func ToStringMapBoolE(i interface{}) (map[string]bool, error) { - var m = map[string]bool{} - - switch v := i.(type) { - case map[interface{}]interface{}: - for k, val := range v { - m[ToString(k)] = ToBool(val) - } - return m, nil - case map[string]interface{}: - for k, val := range v { - m[ToString(k)] = ToBool(val) - } - return m, nil - case map[string]bool: - return v, nil - case string: - err := jsonStringToObject(v, &m) - return m, err - default: - return m, fmt.Errorf("unable to cast %#v of type %T to map[string]bool", i, i) - } -} - -// ToStringMapE casts an interface to a map[string]interface{} type. -func ToStringMapE(i interface{}) (map[string]interface{}, error) { - var m = map[string]interface{}{} - - switch v := i.(type) { - case map[interface{}]interface{}: - for k, val := range v { - m[ToString(k)] = val - } - return m, nil - case map[string]interface{}: - return v, nil - case string: - err := jsonStringToObject(v, &m) - return m, err - default: - return m, fmt.Errorf("unable to cast %#v of type %T to map[string]interface{}", i, i) - } -} - -// ToStringMapIntE casts an interface to a map[string]int{} type. -func ToStringMapIntE(i interface{}) (map[string]int, error) { - var m = map[string]int{} - if i == nil { - return m, fmt.Errorf("unable to cast %#v of type %T to map[string]int", i, i) - } - - switch v := i.(type) { - case map[interface{}]interface{}: - for k, val := range v { - m[ToString(k)] = ToInt(val) - } - return m, nil - case map[string]interface{}: - for k, val := range v { - m[k] = ToInt(val) - } - return m, nil - case map[string]int: - return v, nil - case string: - err := jsonStringToObject(v, &m) - return m, err - } - - if reflect.TypeOf(i).Kind() != reflect.Map { - return m, fmt.Errorf("unable to cast %#v of type %T to map[string]int", i, i) - } - - mVal := reflect.ValueOf(m) - v := reflect.ValueOf(i) - for _, keyVal := range v.MapKeys() { - val, err := ToIntE(v.MapIndex(keyVal).Interface()) - if err != nil { - return m, fmt.Errorf("unable to cast %#v of type %T to map[string]int", i, i) - } - mVal.SetMapIndex(keyVal, reflect.ValueOf(val)) - } - return m, nil -} - -// ToStringMapInt64E casts an interface to a map[string]int64{} type. -func ToStringMapInt64E(i interface{}) (map[string]int64, error) { - var m = map[string]int64{} - if i == nil { - return m, fmt.Errorf("unable to cast %#v of type %T to map[string]int64", i, i) - } - - switch v := i.(type) { - case map[interface{}]interface{}: - for k, val := range v { - m[ToString(k)] = ToInt64(val) - } - return m, nil - case map[string]interface{}: - for k, val := range v { - m[k] = ToInt64(val) - } - return m, nil - case map[string]int64: - return v, nil - case string: - err := jsonStringToObject(v, &m) - return m, err - } - - if reflect.TypeOf(i).Kind() != reflect.Map { - return m, fmt.Errorf("unable to cast %#v of type %T to map[string]int64", i, i) - } - mVal := reflect.ValueOf(m) - v := reflect.ValueOf(i) - for _, keyVal := range v.MapKeys() { - val, err := ToInt64E(v.MapIndex(keyVal).Interface()) - if err != nil { - return m, fmt.Errorf("unable to cast %#v of type %T to map[string]int64", i, i) - } - mVal.SetMapIndex(keyVal, reflect.ValueOf(val)) - } - return m, nil -} - -// ToSliceE casts an interface to a []interface{} type. -func ToSliceE(i interface{}) ([]interface{}, error) { - var s []interface{} - - switch v := i.(type) { - case []interface{}: - return append(s, v...), nil - case []map[string]interface{}: - for _, u := range v { - s = append(s, u) - } - return s, nil - default: - return s, fmt.Errorf("unable to cast %#v of type %T to []interface{}", i, i) - } -} - -// ToBoolSliceE casts an interface to a []bool type. -func ToBoolSliceE(i interface{}) ([]bool, error) { - if i == nil { - return []bool{}, fmt.Errorf("unable to cast %#v of type %T to []bool", i, i) - } - - switch v := i.(type) { - case []bool: - return v, nil - } - - kind := reflect.TypeOf(i).Kind() - switch kind { - case reflect.Slice, reflect.Array: - s := reflect.ValueOf(i) - a := make([]bool, s.Len()) - for j := 0; j < s.Len(); j++ { - val, err := ToBoolE(s.Index(j).Interface()) - if err != nil { - return []bool{}, fmt.Errorf("unable to cast %#v of type %T to []bool", i, i) - } - a[j] = val - } - return a, nil - default: - return []bool{}, fmt.Errorf("unable to cast %#v of type %T to []bool", i, i) - } -} - -// ToStringSliceE casts an interface to a []string type. -func ToStringSliceE(i interface{}) ([]string, error) { - var a []string - - switch v := i.(type) { - case []interface{}: - for _, u := range v { - a = append(a, ToString(u)) - } - return a, nil - case []string: - return v, nil - case []int8: - for _, u := range v { - a = append(a, ToString(u)) - } - return a, nil - case []int: - for _, u := range v { - a = append(a, ToString(u)) - } - return a, nil - case []int32: - for _, u := range v { - a = append(a, ToString(u)) - } - return a, nil - case []int64: - for _, u := range v { - a = append(a, ToString(u)) - } - return a, nil - case []float32: - for _, u := range v { - a = append(a, ToString(u)) - } - return a, nil - case []float64: - for _, u := range v { - a = append(a, ToString(u)) - } - return a, nil - case string: - return strings.Fields(v), nil - case []error: - for _, err := range i.([]error) { - a = append(a, err.Error()) - } - return a, nil - case interface{}: - str, err := ToStringE(v) - if err != nil { - return a, fmt.Errorf("unable to cast %#v of type %T to []string", i, i) - } - return []string{str}, nil - default: - return a, fmt.Errorf("unable to cast %#v of type %T to []string", i, i) - } -} - -// ToIntSliceE casts an interface to a []int type. -func ToIntSliceE(i interface{}) ([]int, error) { - if i == nil { - return []int{}, fmt.Errorf("unable to cast %#v of type %T to []int", i, i) - } - - switch v := i.(type) { - case []int: - return v, nil - } - - kind := reflect.TypeOf(i).Kind() - switch kind { - case reflect.Slice, reflect.Array: - s := reflect.ValueOf(i) - a := make([]int, s.Len()) - for j := 0; j < s.Len(); j++ { - val, err := ToIntE(s.Index(j).Interface()) - if err != nil { - return []int{}, fmt.Errorf("unable to cast %#v of type %T to []int", i, i) - } - a[j] = val - } - return a, nil - default: - return []int{}, fmt.Errorf("unable to cast %#v of type %T to []int", i, i) - } -} - -// ToDurationSliceE casts an interface to a []time.Duration type. -func ToDurationSliceE(i interface{}) ([]time.Duration, error) { - if i == nil { - return []time.Duration{}, fmt.Errorf("unable to cast %#v of type %T to []time.Duration", i, i) - } - - switch v := i.(type) { - case []time.Duration: - return v, nil - } - - kind := reflect.TypeOf(i).Kind() - switch kind { - case reflect.Slice, reflect.Array: - s := reflect.ValueOf(i) - a := make([]time.Duration, s.Len()) - for j := 0; j < s.Len(); j++ { - val, err := ToDurationE(s.Index(j).Interface()) - if err != nil { - return []time.Duration{}, fmt.Errorf("unable to cast %#v of type %T to []time.Duration", i, i) - } - a[j] = val - } - return a, nil - default: - return []time.Duration{}, fmt.Errorf("unable to cast %#v of type %T to []time.Duration", i, i) - } -} - -// StringToDate attempts to parse a string into a time.Time type using a -// predefined list of formats. If no suitable format is found, an error is -// returned. -func StringToDate(s string) (time.Time, error) { - return parseDateWith(s, time.UTC, timeFormats) -} - -// StringToDateInDefaultLocation casts an empty interface to a time.Time, -// interpreting inputs without a timezone to be in the given location, -// or the local timezone if nil. -func StringToDateInDefaultLocation(s string, location *time.Location) (time.Time, error) { - return parseDateWith(s, location, timeFormats) -} - -type timeFormatType int - -const ( - timeFormatNoTimezone timeFormatType = iota - timeFormatNamedTimezone - timeFormatNumericTimezone - timeFormatNumericAndNamedTimezone - timeFormatTimeOnly -) - -type timeFormat struct { - format string - typ timeFormatType -} - -func (f timeFormat) hasTimezone() bool { - // We don't include the formats with only named timezones, see - // https://github.com/golang/go/issues/19694#issuecomment-289103522 - return f.typ >= timeFormatNumericTimezone && f.typ <= timeFormatNumericAndNamedTimezone -} - -var ( - timeFormats = []timeFormat{ - // Keep common formats at the top. - {"2006-01-02", timeFormatNoTimezone}, - {time.RFC3339, timeFormatNumericTimezone}, - {"2006-01-02T15:04:05", timeFormatNoTimezone}, // iso8601 without timezone - {time.RFC1123Z, timeFormatNumericTimezone}, - {time.RFC1123, timeFormatNamedTimezone}, - {time.RFC822Z, timeFormatNumericTimezone}, - {time.RFC822, timeFormatNamedTimezone}, - {time.RFC850, timeFormatNamedTimezone}, - {"2006-01-02 15:04:05.999999999 -0700 MST", timeFormatNumericAndNamedTimezone}, // Time.String() - {"2006-01-02T15:04:05-0700", timeFormatNumericTimezone}, // RFC3339 without timezone hh:mm colon - {"2006-01-02 15:04:05Z0700", timeFormatNumericTimezone}, // RFC3339 without T or timezone hh:mm colon - {"2006-01-02 15:04:05", timeFormatNoTimezone}, - {time.ANSIC, timeFormatNoTimezone}, - {time.UnixDate, timeFormatNamedTimezone}, - {time.RubyDate, timeFormatNumericTimezone}, - {"2006-01-02 15:04:05Z07:00", timeFormatNumericTimezone}, - {"02 Jan 2006", timeFormatNoTimezone}, - {"2006-01-02 15:04:05 -07:00", timeFormatNumericTimezone}, - {"2006-01-02 15:04:05 -0700", timeFormatNumericTimezone}, - {time.Kitchen, timeFormatTimeOnly}, - {time.Stamp, timeFormatTimeOnly}, - {time.StampMilli, timeFormatTimeOnly}, - {time.StampMicro, timeFormatTimeOnly}, - {time.StampNano, timeFormatTimeOnly}, - } -) - -func parseDateWith(s string, location *time.Location, formats []timeFormat) (d time.Time, e error) { - - for _, format := range formats { - if d, e = time.Parse(format.format, s); e == nil { - - // Some time formats have a zone name, but no offset, so it gets - // put in that zone name (not the default one passed in to us), but - // without that zone's offset. So set the location manually. - if format.typ <= timeFormatNamedTimezone { - if location == nil { - location = time.Local - } - year, month, day := d.Date() - hour, min, sec := d.Clock() - d = time.Date(year, month, day, hour, min, sec, d.Nanosecond(), location) - } - - return - } - } - return d, fmt.Errorf("unable to parse date: %s", s) -} - -// jsonStringToObject attempts to unmarshall a string as JSON into -// the object passed as pointer. -func jsonStringToObject(s string, v interface{}) error { - data := []byte(s) - return json.Unmarshal(data, v) -} - -// toInt returns the int value of v if v or v's underlying type -// is an int. -// Note that this will return false for int64 etc. types. -func toInt(v interface{}) (int, bool) { - switch v := v.(type) { - case int: - return v, true - case time.Weekday: - return int(v), true - case time.Month: - return int(v), true - default: - return 0, false - } -} - -func trimZeroDecimal(s string) string { - var foundZero bool - for i := len(s); i > 0; i-- { - switch s[i-1] { - case '.': - if foundZero { - return s[:i-1] - } - case '0': - foundZero = true - default: - return s - } - } - return s -} diff --git a/vendor/github.com/spf13/cast/timeformattype_string.go b/vendor/github.com/spf13/cast/timeformattype_string.go deleted file mode 100644 index 1524fc82c..000000000 --- a/vendor/github.com/spf13/cast/timeformattype_string.go +++ /dev/null @@ -1,27 +0,0 @@ -// Code generated by "stringer -type timeFormatType"; DO NOT EDIT. - -package cast - -import "strconv" - -func _() { - // An "invalid array index" compiler error signifies that the constant values have changed. - // Re-run the stringer command to generate them again. - var x [1]struct{} - _ = x[timeFormatNoTimezone-0] - _ = x[timeFormatNamedTimezone-1] - _ = x[timeFormatNumericTimezone-2] - _ = x[timeFormatNumericAndNamedTimezone-3] - _ = x[timeFormatTimeOnly-4] -} - -const _timeFormatType_name = "timeFormatNoTimezonetimeFormatNamedTimezonetimeFormatNumericTimezonetimeFormatNumericAndNamedTimezonetimeFormatTimeOnly" - -var _timeFormatType_index = [...]uint8{0, 20, 43, 68, 101, 119} - -func (i timeFormatType) String() string { - if i < 0 || i >= timeFormatType(len(_timeFormatType_index)-1) { - return "timeFormatType(" + strconv.FormatInt(int64(i), 10) + ")" - } - return _timeFormatType_name[_timeFormatType_index[i]:_timeFormatType_index[i+1]] -} diff --git a/vendor/github.com/spf13/viper/.editorconfig b/vendor/github.com/spf13/viper/.editorconfig deleted file mode 100644 index 1f664d13a..000000000 --- a/vendor/github.com/spf13/viper/.editorconfig +++ /dev/null @@ -1,18 +0,0 @@ -root = true - -[*] -charset = utf-8 -end_of_line = lf -indent_size = 4 -indent_style = space -insert_final_newline = true -trim_trailing_whitespace = true - -[*.go] -indent_style = tab - -[{Makefile,*.mk}] -indent_style = tab - -[*.nix] -indent_size = 2 diff --git a/vendor/github.com/spf13/viper/.envrc b/vendor/github.com/spf13/viper/.envrc deleted file mode 100644 index 3ce7171a3..000000000 --- a/vendor/github.com/spf13/viper/.envrc +++ /dev/null @@ -1,4 +0,0 @@ -if ! has nix_direnv_version || ! nix_direnv_version 2.3.0; then - source_url "https://raw.githubusercontent.com/nix-community/nix-direnv/2.3.0/direnvrc" "sha256-Dmd+j63L84wuzgyjITIfSxSD57Tx7v51DMxVZOsiUD8=" -fi -use flake . --impure diff --git a/vendor/github.com/spf13/viper/.gitignore b/vendor/github.com/spf13/viper/.gitignore deleted file mode 100644 index f1bbd4280..000000000 --- a/vendor/github.com/spf13/viper/.gitignore +++ /dev/null @@ -1,8 +0,0 @@ -/.devenv/ -/.direnv/ -/.idea/ -/.pre-commit-config.yaml -/bin/ -/build/ -/var/ -/vendor/ diff --git a/vendor/github.com/spf13/viper/.golangci.yaml b/vendor/github.com/spf13/viper/.golangci.yaml deleted file mode 100644 index 1faeae42c..000000000 --- a/vendor/github.com/spf13/viper/.golangci.yaml +++ /dev/null @@ -1,108 +0,0 @@ -run: - timeout: 5m - -linters-settings: - gci: - sections: - - standard - - default - - prefix(github.com/spf13/viper) - gocritic: - # Enable multiple checks by tags. See "Tags" section in https://github.com/go-critic/go-critic#usage. - enabled-tags: - - diagnostic - - experimental - - opinionated - - style - disabled-checks: - - importShadow - - unnamedResult - golint: - min-confidence: 0 - goimports: - local-prefixes: github.com/spf13/viper - -linters: - disable-all: true - enable: - - bodyclose - - dogsled - - dupl - - durationcheck - - exhaustive - - exportloopref - - gci - - gocritic - - godot - - gofmt - - gofumpt - - goimports - - gomoddirectives - - goprintffuncname - - govet - - importas - - ineffassign - - makezero - - misspell - - nakedret - - nilerr - - noctx - - nolintlint - - prealloc - - predeclared - - revive - - rowserrcheck - - sqlclosecheck - - staticcheck - - stylecheck - - tparallel - - typecheck - - unconvert - - unparam - - unused - - wastedassign - - whitespace - - # fixme - # - cyclop - # - errcheck - # - errorlint - # - exhaustivestruct - # - forbidigo - # - forcetypeassert - # - gochecknoglobals - # - gochecknoinits - # - gocognit - # - goconst - # - gocyclo - # - gosec - # - gosimple - # - ifshort - # - lll - # - nlreturn - # - paralleltest - # - scopelint - # - thelper - # - wrapcheck - - # unused - # - depguard - # - goheader - # - gomodguard - - # deprecated - # - deadcode - # - structcheck - # - varcheck - - # don't enable: - # - asciicheck - # - funlen - # - godox - # - goerr113 - # - gomnd - # - interfacer - # - maligned - # - nestif - # - testpackage - # - wsl diff --git a/vendor/github.com/spf13/viper/.yamlignore b/vendor/github.com/spf13/viper/.yamlignore deleted file mode 100644 index c04c4dead..000000000 --- a/vendor/github.com/spf13/viper/.yamlignore +++ /dev/null @@ -1,2 +0,0 @@ -# TODO: FIXME -/.github/ diff --git a/vendor/github.com/spf13/viper/.yamllint.yaml b/vendor/github.com/spf13/viper/.yamllint.yaml deleted file mode 100644 index bac19ce18..000000000 --- a/vendor/github.com/spf13/viper/.yamllint.yaml +++ /dev/null @@ -1,6 +0,0 @@ -ignore-from-file: [.gitignore, .yamlignore] - -extends: default - -rules: - line-length: disable diff --git a/vendor/github.com/spf13/viper/LICENSE b/vendor/github.com/spf13/viper/LICENSE deleted file mode 100644 index 4527efb9c..000000000 --- a/vendor/github.com/spf13/viper/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2014 Steve Francia - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. \ No newline at end of file diff --git a/vendor/github.com/spf13/viper/Makefile b/vendor/github.com/spf13/viper/Makefile deleted file mode 100644 index a77b9c81c..000000000 --- a/vendor/github.com/spf13/viper/Makefile +++ /dev/null @@ -1,87 +0,0 @@ -# A Self-Documenting Makefile: http://marmelab.com/blog/2016/02/29/auto-documented-makefile.html - -OS = $(shell uname | tr A-Z a-z) -export PATH := $(abspath bin/):${PATH} - -# Build variables -BUILD_DIR ?= build -export CGO_ENABLED ?= 0 -export GOOS = $(shell go env GOOS) -ifeq (${VERBOSE}, 1) -ifeq ($(filter -v,${GOARGS}),) - GOARGS += -v -endif -TEST_FORMAT = short-verbose -endif - -# Dependency versions -GOTESTSUM_VERSION = 1.9.0 -GOLANGCI_VERSION = 1.53.3 - -# Add the ability to override some variables -# Use with care --include override.mk - -.PHONY: clear -clear: ## Clear the working area and the project - rm -rf bin/ - -.PHONY: check -check: test lint ## Run tests and linters - - -TEST_PKGS ?= ./... -.PHONY: test -test: TEST_FORMAT ?= short -test: SHELL = /bin/bash -test: export CGO_ENABLED=1 -test: bin/gotestsum ## Run tests - @mkdir -p ${BUILD_DIR} - bin/gotestsum --no-summary=skipped --junitfile ${BUILD_DIR}/coverage.xml --format ${TEST_FORMAT} -- -race -coverprofile=${BUILD_DIR}/coverage.txt -covermode=atomic $(filter-out -v,${GOARGS}) $(if ${TEST_PKGS},${TEST_PKGS},./...) - -.PHONY: lint -lint: lint-go lint-yaml -lint: ## Run linters - -.PHONY: lint-go -lint-go: - golangci-lint run $(if ${CI},--out-format github-actions,) - -.PHONY: lint-yaml -lint-yaml: - yamllint $(if ${CI},-f github,) --no-warnings . - -.PHONY: fmt -fmt: ## Format code - golangci-lint run --fix - -deps: bin/golangci-lint bin/gotestsum yamllint -deps: ## Install dependencies - -bin/gotestsum: - @mkdir -p bin - curl -L https://github.com/gotestyourself/gotestsum/releases/download/v${GOTESTSUM_VERSION}/gotestsum_${GOTESTSUM_VERSION}_${OS}_amd64.tar.gz | tar -zOxf - gotestsum > ./bin/gotestsum && chmod +x ./bin/gotestsum - -bin/golangci-lint: - @mkdir -p bin - curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | bash -s -- v${GOLANGCI_VERSION} - -.PHONY: yamllint -yamllint: - pip3 install --user yamllint - -# Add custom targets here --include custom.mk - -.PHONY: list -list: ## List all make targets - @${MAKE} -pRrn : -f $(MAKEFILE_LIST) 2>/dev/null | awk -v RS= -F: '/^# File/,/^# Finished Make data base/ {if ($$1 !~ "^[#.]") {print $$1}}' | egrep -v -e '^[^[:alnum:]]' -e '^$@$$' | sort - -.PHONY: help -.DEFAULT_GOAL := help -help: - @grep -h -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}' - -# Variable outputting/exporting rules -var-%: ; @echo $($*) -varexport-%: ; @echo $*=$($*) diff --git a/vendor/github.com/spf13/viper/README.md b/vendor/github.com/spf13/viper/README.md deleted file mode 100644 index b96180b3b..000000000 --- a/vendor/github.com/spf13/viper/README.md +++ /dev/null @@ -1,928 +0,0 @@ -> ## Viper v2 feedback -> Viper is heading towards v2 and we would love to hear what _**you**_ would like to see in it. Share your thoughts here: https://forms.gle/R6faU74qPRPAzchZ9 -> -> **Thank you!** - -![Viper](.github/logo.png?raw=true) - -[![Mentioned in Awesome Go](https://awesome.re/mentioned-badge-flat.svg)](https://github.com/avelino/awesome-go#configuration) -[![run on repl.it](https://repl.it/badge/github/sagikazarmark/Viper-example)](https://repl.it/@sagikazarmark/Viper-example#main.go) - -[![GitHub Workflow Status](https://img.shields.io/github/actions/workflow/status/spf13/viper/ci.yaml?branch=master&style=flat-square)](https://github.com/spf13/viper/actions?query=workflow%3ACI) -[![Join the chat at https://gitter.im/spf13/viper](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/spf13/viper?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) -[![Go Report Card](https://goreportcard.com/badge/github.com/spf13/viper?style=flat-square)](https://goreportcard.com/report/github.com/spf13/viper) -![Go Version](https://img.shields.io/badge/go%20version-%3E=1.19-61CFDD.svg?style=flat-square) -[![PkgGoDev](https://pkg.go.dev/badge/mod/github.com/spf13/viper)](https://pkg.go.dev/mod/github.com/spf13/viper) - -**Go configuration with fangs!** - -Many Go projects are built using Viper including: - -* [Hugo](http://gohugo.io) -* [EMC RexRay](http://rexray.readthedocs.org/en/stable/) -* [Imgur’s Incus](https://github.com/Imgur/incus) -* [Nanobox](https://github.com/nanobox-io/nanobox)/[Nanopack](https://github.com/nanopack) -* [Docker Notary](https://github.com/docker/Notary) -* [BloomApi](https://www.bloomapi.com/) -* [doctl](https://github.com/digitalocean/doctl) -* [Clairctl](https://github.com/jgsqware/clairctl) -* [Mercure](https://mercure.rocks) -* [Meshery](https://github.com/meshery/meshery) -* [Bearer](https://github.com/bearer/bearer) -* [Coder](https://github.com/coder/coder) -* [Vitess](https://vitess.io/) - - -## Install - -```shell -go get github.com/spf13/viper -``` - -**Note:** Viper uses [Go Modules](https://github.com/golang/go/wiki/Modules) to manage dependencies. - - -## What is Viper? - -Viper is a complete configuration solution for Go applications including [12-Factor apps](https://12factor.net/#the_twelve_factors). -It is designed to work within an application, and can handle all types of configuration needs -and formats. It supports: - -* setting defaults -* reading from JSON, TOML, YAML, HCL, envfile and Java properties config files -* live watching and re-reading of config files (optional) -* reading from environment variables -* reading from remote config systems (etcd or Consul), and watching changes -* reading from command line flags -* reading from buffer -* setting explicit values - -Viper can be thought of as a registry for all of your applications configuration needs. - - -## Why Viper? - -When building a modern application, you don’t want to worry about -configuration file formats; you want to focus on building awesome software. -Viper is here to help with that. - -Viper does the following for you: - -1. Find, load, and unmarshal a configuration file in JSON, TOML, YAML, HCL, INI, envfile or Java properties formats. -2. Provide a mechanism to set default values for your different configuration options. -3. Provide a mechanism to set override values for options specified through command line flags. -4. Provide an alias system to easily rename parameters without breaking existing code. -5. Make it easy to tell the difference between when a user has provided a command line or config file which is the same as the default. - -Viper uses the following precedence order. Each item takes precedence over the item below it: - - * explicit call to `Set` - * flag - * env - * config - * key/value store - * default - -**Important:** Viper configuration keys are case insensitive. -There are ongoing discussions about making that optional. - - -## Putting Values into Viper - -### Establishing Defaults - -A good configuration system will support default values. A default value is not -required for a key, but it’s useful in the event that a key hasn't been set via -config file, environment variable, remote configuration or flag. - -Examples: - -```go -viper.SetDefault("ContentDir", "content") -viper.SetDefault("LayoutDir", "layouts") -viper.SetDefault("Taxonomies", map[string]string{"tag": "tags", "category": "categories"}) -``` - -### Reading Config Files - -Viper requires minimal configuration so it knows where to look for config files. -Viper supports JSON, TOML, YAML, HCL, INI, envfile and Java Properties files. Viper can search multiple paths, but -currently a single Viper instance only supports a single configuration file. -Viper does not default to any configuration search paths leaving defaults decision -to an application. - -Here is an example of how to use Viper to search for and read a configuration file. -None of the specific paths are required, but at least one path should be provided -where a configuration file is expected. - -```go -viper.SetConfigName("config") // name of config file (without extension) -viper.SetConfigType("yaml") // REQUIRED if the config file does not have the extension in the name -viper.AddConfigPath("/etc/appname/") // path to look for the config file in -viper.AddConfigPath("$HOME/.appname") // call multiple times to add many search paths -viper.AddConfigPath(".") // optionally look for config in the working directory -err := viper.ReadInConfig() // Find and read the config file -if err != nil { // Handle errors reading the config file - panic(fmt.Errorf("fatal error config file: %w", err)) -} -``` - -You can handle the specific case where no config file is found like this: - -```go -if err := viper.ReadInConfig(); err != nil { - if _, ok := err.(viper.ConfigFileNotFoundError); ok { - // Config file not found; ignore error if desired - } else { - // Config file was found but another error was produced - } -} - -// Config file found and successfully parsed -``` - -*NOTE [since 1.6]:* You can also have a file without an extension and specify the format programmatically. For those configuration files that lie in the home of the user without any extension like `.bashrc` - -### Writing Config Files - -Reading from config files is useful, but at times you want to store all modifications made at run time. -For that, a bunch of commands are available, each with its own purpose: - -* WriteConfig - writes the current viper configuration to the predefined path, if exists. Errors if no predefined path. Will overwrite the current config file, if it exists. -* SafeWriteConfig - writes the current viper configuration to the predefined path. Errors if no predefined path. Will not overwrite the current config file, if it exists. -* WriteConfigAs - writes the current viper configuration to the given filepath. Will overwrite the given file, if it exists. -* SafeWriteConfigAs - writes the current viper configuration to the given filepath. Will not overwrite the given file, if it exists. - -As a rule of the thumb, everything marked with safe won't overwrite any file, but just create if not existent, whilst the default behavior is to create or truncate. - -A small examples section: - -```go -viper.WriteConfig() // writes current config to predefined path set by 'viper.AddConfigPath()' and 'viper.SetConfigName' -viper.SafeWriteConfig() -viper.WriteConfigAs("/path/to/my/.config") -viper.SafeWriteConfigAs("/path/to/my/.config") // will error since it has already been written -viper.SafeWriteConfigAs("/path/to/my/.other_config") -``` - -### Watching and re-reading config files - -Viper supports the ability to have your application live read a config file while running. - -Gone are the days of needing to restart a server to have a config take effect, -viper powered applications can read an update to a config file while running and -not miss a beat. - -Simply tell the viper instance to watchConfig. -Optionally you can provide a function for Viper to run each time a change occurs. - -**Make sure you add all of the configPaths prior to calling `WatchConfig()`** - -```go -viper.OnConfigChange(func(e fsnotify.Event) { - fmt.Println("Config file changed:", e.Name) -}) -viper.WatchConfig() -``` - -### Reading Config from io.Reader - -Viper predefines many configuration sources such as files, environment -variables, flags, and remote K/V store, but you are not bound to them. You can -also implement your own required configuration source and feed it to viper. - -```go -viper.SetConfigType("yaml") // or viper.SetConfigType("YAML") - -// any approach to require this configuration into your program. -var yamlExample = []byte(` -Hacker: true -name: steve -hobbies: -- skateboarding -- snowboarding -- go -clothing: - jacket: leather - trousers: denim -age: 35 -eyes : brown -beard: true -`) - -viper.ReadConfig(bytes.NewBuffer(yamlExample)) - -viper.Get("name") // this would be "steve" -``` - -### Setting Overrides - -These could be from a command line flag, or from your own application logic. - -```go -viper.Set("Verbose", true) -viper.Set("LogFile", LogFile) -viper.Set("host.port", 5899) // set subset -``` - -### Registering and Using Aliases - -Aliases permit a single value to be referenced by multiple keys - -```go -viper.RegisterAlias("loud", "Verbose") - -viper.Set("verbose", true) // same result as next line -viper.Set("loud", true) // same result as prior line - -viper.GetBool("loud") // true -viper.GetBool("verbose") // true -``` - -### Working with Environment Variables - -Viper has full support for environment variables. This enables 12 factor -applications out of the box. There are five methods that exist to aid working -with ENV: - - * `AutomaticEnv()` - * `BindEnv(string...) : error` - * `SetEnvPrefix(string)` - * `SetEnvKeyReplacer(string...) *strings.Replacer` - * `AllowEmptyEnv(bool)` - -_When working with ENV variables, it’s important to recognize that Viper -treats ENV variables as case sensitive._ - -Viper provides a mechanism to try to ensure that ENV variables are unique. By -using `SetEnvPrefix`, you can tell Viper to use a prefix while reading from -the environment variables. Both `BindEnv` and `AutomaticEnv` will use this -prefix. - -`BindEnv` takes one or more parameters. The first parameter is the key name, the -rest are the name of the environment variables to bind to this key. If more than -one are provided, they will take precedence in the specified order. The name of -the environment variable is case sensitive. If the ENV variable name is not provided, then -Viper will automatically assume that the ENV variable matches the following format: prefix + "_" + the key name in ALL CAPS. When you explicitly provide the ENV variable name (the second parameter), -it **does not** automatically add the prefix. For example if the second parameter is "id", -Viper will look for the ENV variable "ID". - -One important thing to recognize when working with ENV variables is that the -value will be read each time it is accessed. Viper does not fix the value when -the `BindEnv` is called. - -`AutomaticEnv` is a powerful helper especially when combined with -`SetEnvPrefix`. When called, Viper will check for an environment variable any -time a `viper.Get` request is made. It will apply the following rules. It will -check for an environment variable with a name matching the key uppercased and -prefixed with the `EnvPrefix` if set. - -`SetEnvKeyReplacer` allows you to use a `strings.Replacer` object to rewrite Env -keys to an extent. This is useful if you want to use `-` or something in your -`Get()` calls, but want your environmental variables to use `_` delimiters. An -example of using it can be found in `viper_test.go`. - -Alternatively, you can use `EnvKeyReplacer` with `NewWithOptions` factory function. -Unlike `SetEnvKeyReplacer`, it accepts a `StringReplacer` interface allowing you to write custom string replacing logic. - -By default empty environment variables are considered unset and will fall back to -the next configuration source. To treat empty environment variables as set, use -the `AllowEmptyEnv` method. - -#### Env example - -```go -SetEnvPrefix("spf") // will be uppercased automatically -BindEnv("id") - -os.Setenv("SPF_ID", "13") // typically done outside of the app - -id := Get("id") // 13 -``` - -### Working with Flags - -Viper has the ability to bind to flags. Specifically, Viper supports `Pflags` -as used in the [Cobra](https://github.com/spf13/cobra) library. - -Like `BindEnv`, the value is not set when the binding method is called, but when -it is accessed. This means you can bind as early as you want, even in an -`init()` function. - -For individual flags, the `BindPFlag()` method provides this functionality. - -Example: - -```go -serverCmd.Flags().Int("port", 1138, "Port to run Application server on") -viper.BindPFlag("port", serverCmd.Flags().Lookup("port")) -``` - -You can also bind an existing set of pflags (pflag.FlagSet): - -Example: - -```go -pflag.Int("flagname", 1234, "help message for flagname") - -pflag.Parse() -viper.BindPFlags(pflag.CommandLine) - -i := viper.GetInt("flagname") // retrieve values from viper instead of pflag -``` - -The use of [pflag](https://github.com/spf13/pflag/) in Viper does not preclude -the use of other packages that use the [flag](https://golang.org/pkg/flag/) -package from the standard library. The pflag package can handle the flags -defined for the flag package by importing these flags. This is accomplished -by a calling a convenience function provided by the pflag package called -AddGoFlagSet(). - -Example: - -```go -package main - -import ( - "flag" - "github.com/spf13/pflag" -) - -func main() { - - // using standard library "flag" package - flag.Int("flagname", 1234, "help message for flagname") - - pflag.CommandLine.AddGoFlagSet(flag.CommandLine) - pflag.Parse() - viper.BindPFlags(pflag.CommandLine) - - i := viper.GetInt("flagname") // retrieve value from viper - - // ... -} -``` - -#### Flag interfaces - -Viper provides two Go interfaces to bind other flag systems if you don’t use `Pflags`. - -`FlagValue` represents a single flag. This is a very simple example on how to implement this interface: - -```go -type myFlag struct {} -func (f myFlag) HasChanged() bool { return false } -func (f myFlag) Name() string { return "my-flag-name" } -func (f myFlag) ValueString() string { return "my-flag-value" } -func (f myFlag) ValueType() string { return "string" } -``` - -Once your flag implements this interface, you can simply tell Viper to bind it: - -```go -viper.BindFlagValue("my-flag-name", myFlag{}) -``` - -`FlagValueSet` represents a group of flags. This is a very simple example on how to implement this interface: - -```go -type myFlagSet struct { - flags []myFlag -} - -func (f myFlagSet) VisitAll(fn func(FlagValue)) { - for _, flag := range flags { - fn(flag) - } -} -``` - -Once your flag set implements this interface, you can simply tell Viper to bind it: - -```go -fSet := myFlagSet{ - flags: []myFlag{myFlag{}, myFlag{}}, -} -viper.BindFlagValues("my-flags", fSet) -``` - -### Remote Key/Value Store Support - -To enable remote support in Viper, do a blank import of the `viper/remote` -package: - -`import _ "github.com/spf13/viper/remote"` - -Viper will read a config string (as JSON, TOML, YAML, HCL or envfile) retrieved from a path -in a Key/Value store such as etcd or Consul. These values take precedence over -default values, but are overridden by configuration values retrieved from disk, -flags, or environment variables. - -Viper supports multiple hosts. To use, pass a list of endpoints separated by `;`. For example `http://127.0.0.1:4001;http://127.0.0.1:4002`. - -Viper uses [crypt](https://github.com/bketelsen/crypt) to retrieve -configuration from the K/V store, which means that you can store your -configuration values encrypted and have them automatically decrypted if you have -the correct gpg keyring. Encryption is optional. - -You can use remote configuration in conjunction with local configuration, or -independently of it. - -`crypt` has a command-line helper that you can use to put configurations in your -K/V store. `crypt` defaults to etcd on http://127.0.0.1:4001. - -```bash -$ go get github.com/bketelsen/crypt/bin/crypt -$ crypt set -plaintext /config/hugo.json /Users/hugo/settings/config.json -``` - -Confirm that your value was set: - -```bash -$ crypt get -plaintext /config/hugo.json -``` - -See the `crypt` documentation for examples of how to set encrypted values, or -how to use Consul. - -### Remote Key/Value Store Example - Unencrypted - -#### etcd -```go -viper.AddRemoteProvider("etcd", "http://127.0.0.1:4001","/config/hugo.json") -viper.SetConfigType("json") // because there is no file extension in a stream of bytes, supported extensions are "json", "toml", "yaml", "yml", "properties", "props", "prop", "env", "dotenv" -err := viper.ReadRemoteConfig() -``` - -#### etcd3 -```go -viper.AddRemoteProvider("etcd3", "http://127.0.0.1:4001","/config/hugo.json") -viper.SetConfigType("json") // because there is no file extension in a stream of bytes, supported extensions are "json", "toml", "yaml", "yml", "properties", "props", "prop", "env", "dotenv" -err := viper.ReadRemoteConfig() -``` - -#### Consul -You need to set a key to Consul key/value storage with JSON value containing your desired config. -For example, create a Consul key/value store key `MY_CONSUL_KEY` with value: - -```json -{ - "port": 8080, - "hostname": "myhostname.com" -} -``` - -```go -viper.AddRemoteProvider("consul", "localhost:8500", "MY_CONSUL_KEY") -viper.SetConfigType("json") // Need to explicitly set this to json -err := viper.ReadRemoteConfig() - -fmt.Println(viper.Get("port")) // 8080 -fmt.Println(viper.Get("hostname")) // myhostname.com -``` - -#### Firestore - -```go -viper.AddRemoteProvider("firestore", "google-cloud-project-id", "collection/document") -viper.SetConfigType("json") // Config's format: "json", "toml", "yaml", "yml" -err := viper.ReadRemoteConfig() -``` - -Of course, you're allowed to use `SecureRemoteProvider` also - - -#### NATS - -```go -viper.AddRemoteProvider("nats", "nats://127.0.0.1:4222", "myapp.config") -viper.SetConfigType("json") -err := viper.ReadRemoteConfig() -``` - -### Remote Key/Value Store Example - Encrypted - -```go -viper.AddSecureRemoteProvider("etcd","http://127.0.0.1:4001","/config/hugo.json","/etc/secrets/mykeyring.gpg") -viper.SetConfigType("json") // because there is no file extension in a stream of bytes, supported extensions are "json", "toml", "yaml", "yml", "properties", "props", "prop", "env", "dotenv" -err := viper.ReadRemoteConfig() -``` - -### Watching Changes in etcd - Unencrypted - -```go -// alternatively, you can create a new viper instance. -var runtime_viper = viper.New() - -runtime_viper.AddRemoteProvider("etcd", "http://127.0.0.1:4001", "/config/hugo.yml") -runtime_viper.SetConfigType("yaml") // because there is no file extension in a stream of bytes, supported extensions are "json", "toml", "yaml", "yml", "properties", "props", "prop", "env", "dotenv" - -// read from remote config the first time. -err := runtime_viper.ReadRemoteConfig() - -// unmarshal config -runtime_viper.Unmarshal(&runtime_conf) - -// open a goroutine to watch remote changes forever -go func(){ - for { - time.Sleep(time.Second * 5) // delay after each request - - // currently, only tested with etcd support - err := runtime_viper.WatchRemoteConfig() - if err != nil { - log.Errorf("unable to read remote config: %v", err) - continue - } - - // unmarshal new config into our runtime config struct. you can also use channel - // to implement a signal to notify the system of the changes - runtime_viper.Unmarshal(&runtime_conf) - } -}() -``` - -## Getting Values From Viper - -In Viper, there are a few ways to get a value depending on the value’s type. -The following functions and methods exist: - - * `Get(key string) : any` - * `GetBool(key string) : bool` - * `GetFloat64(key string) : float64` - * `GetInt(key string) : int` - * `GetIntSlice(key string) : []int` - * `GetString(key string) : string` - * `GetStringMap(key string) : map[string]any` - * `GetStringMapString(key string) : map[string]string` - * `GetStringSlice(key string) : []string` - * `GetTime(key string) : time.Time` - * `GetDuration(key string) : time.Duration` - * `IsSet(key string) : bool` - * `AllSettings() : map[string]any` - -One important thing to recognize is that each Get function will return a zero -value if it’s not found. To check if a given key exists, the `IsSet()` method -has been provided. - -The zero value will also be returned if the value is set, but fails to parse -as the requested type. - -Example: -```go -viper.GetString("logfile") // case-insensitive Setting & Getting -if viper.GetBool("verbose") { - fmt.Println("verbose enabled") -} -``` -### Accessing nested keys - -The accessor methods also accept formatted paths to deeply nested keys. For -example, if the following JSON file is loaded: - -```json -{ - "host": { - "address": "localhost", - "port": 5799 - }, - "datastore": { - "metric": { - "host": "127.0.0.1", - "port": 3099 - }, - "warehouse": { - "host": "198.0.0.1", - "port": 2112 - } - } -} - -``` - -Viper can access a nested field by passing a `.` delimited path of keys: - -```go -GetString("datastore.metric.host") // (returns "127.0.0.1") -``` - -This obeys the precedence rules established above; the search for the path -will cascade through the remaining configuration registries until found. - -For example, given this configuration file, both `datastore.metric.host` and -`datastore.metric.port` are already defined (and may be overridden). If in addition -`datastore.metric.protocol` was defined in the defaults, Viper would also find it. - -However, if `datastore.metric` was overridden (by a flag, an environment variable, -the `Set()` method, …) with an immediate value, then all sub-keys of -`datastore.metric` become undefined, they are “shadowed” by the higher-priority -configuration level. - -Viper can access array indices by using numbers in the path. For example: - -```jsonc -{ - "host": { - "address": "localhost", - "ports": [ - 5799, - 6029 - ] - }, - "datastore": { - "metric": { - "host": "127.0.0.1", - "port": 3099 - }, - "warehouse": { - "host": "198.0.0.1", - "port": 2112 - } - } -} - -GetInt("host.ports.1") // returns 6029 - -``` - -Lastly, if there exists a key that matches the delimited key path, its value -will be returned instead. E.g. - -```jsonc -{ - "datastore.metric.host": "0.0.0.0", - "host": { - "address": "localhost", - "port": 5799 - }, - "datastore": { - "metric": { - "host": "127.0.0.1", - "port": 3099 - }, - "warehouse": { - "host": "198.0.0.1", - "port": 2112 - } - } -} - -GetString("datastore.metric.host") // returns "0.0.0.0" -``` - -### Extracting a sub-tree - -When developing reusable modules, it's often useful to extract a subset of the configuration -and pass it to a module. This way the module can be instantiated more than once, with different configurations. - -For example, an application might use multiple different cache stores for different purposes: - -```yaml -cache: - cache1: - max-items: 100 - item-size: 64 - cache2: - max-items: 200 - item-size: 80 -``` - -We could pass the cache name to a module (eg. `NewCache("cache1")`), -but it would require weird concatenation for accessing config keys and would be less separated from the global config. - -So instead of doing that let's pass a Viper instance to the constructor that represents a subset of the configuration: - -```go -cache1Config := viper.Sub("cache.cache1") -if cache1Config == nil { // Sub returns nil if the key cannot be found - panic("cache configuration not found") -} - -cache1 := NewCache(cache1Config) -``` - -**Note:** Always check the return value of `Sub`. It returns `nil` if a key cannot be found. - -Internally, the `NewCache` function can address `max-items` and `item-size` keys directly: - -```go -func NewCache(v *Viper) *Cache { - return &Cache{ - MaxItems: v.GetInt("max-items"), - ItemSize: v.GetInt("item-size"), - } -} -``` - -The resulting code is easy to test, since it's decoupled from the main config structure, -and easier to reuse (for the same reason). - - -### Unmarshaling - -You also have the option of Unmarshaling all or a specific value to a struct, map, -etc. - -There are two methods to do this: - - * `Unmarshal(rawVal any) : error` - * `UnmarshalKey(key string, rawVal any) : error` - -Example: - -```go -type config struct { - Port int - Name string - PathMap string `mapstructure:"path_map"` -} - -var C config - -err := viper.Unmarshal(&C) -if err != nil { - t.Fatalf("unable to decode into struct, %v", err) -} -``` - -If you want to unmarshal configuration where the keys themselves contain dot (the default key delimiter), -you have to change the delimiter: - -```go -v := viper.NewWithOptions(viper.KeyDelimiter("::")) - -v.SetDefault("chart::values", map[string]any{ - "ingress": map[string]any{ - "annotations": map[string]any{ - "traefik.frontend.rule.type": "PathPrefix", - "traefik.ingress.kubernetes.io/ssl-redirect": "true", - }, - }, -}) - -type config struct { - Chart struct{ - Values map[string]any - } -} - -var C config - -v.Unmarshal(&C) -``` - -Viper also supports unmarshaling into embedded structs: - -```go -/* -Example config: - -module: - enabled: true - token: 89h3f98hbwf987h3f98wenf89ehf -*/ -type config struct { - Module struct { - Enabled bool - - moduleConfig `mapstructure:",squash"` - } -} - -// moduleConfig could be in a module specific package -type moduleConfig struct { - Token string -} - -var C config - -err := viper.Unmarshal(&C) -if err != nil { - t.Fatalf("unable to decode into struct, %v", err) -} -``` - -Viper uses [github.com/mitchellh/mapstructure](https://github.com/mitchellh/mapstructure) under the hood for unmarshaling values which uses `mapstructure` tags by default. - -### Decoding custom formats - -A frequently requested feature for Viper is adding more value formats and decoders. -For example, parsing character (dot, comma, semicolon, etc) separated strings into slices. - -This is already available in Viper using mapstructure decode hooks. - -Read more about the details in [this blog post](https://sagikazarmark.hu/blog/decoding-custom-formats-with-viper/). - -### Marshalling to string - -You may need to marshal all the settings held in viper into a string rather than write them to a file. -You can use your favorite format's marshaller with the config returned by `AllSettings()`. - -```go -import ( - yaml "gopkg.in/yaml.v2" - // ... -) - -func yamlStringSettings() string { - c := viper.AllSettings() - bs, err := yaml.Marshal(c) - if err != nil { - log.Fatalf("unable to marshal config to YAML: %v", err) - } - return string(bs) -} -``` - -## Viper or Vipers? - -Viper comes ready to use out of the box. There is no configuration or -initialization needed to begin using Viper. Since most applications will want -to use a single central repository for their configuration, the viper package -provides this. It is similar to a singleton. - -In all of the examples above, they demonstrate using viper in its singleton -style approach. - -### Working with multiple vipers - -You can also create many different vipers for use in your application. Each will -have its own unique set of configurations and values. Each can read from a -different config file, key value store, etc. All of the functions that viper -package supports are mirrored as methods on a viper. - -Example: - -```go -x := viper.New() -y := viper.New() - -x.SetDefault("ContentDir", "content") -y.SetDefault("ContentDir", "foobar") - -//... -``` - -When working with multiple vipers, it is up to the user to keep track of the -different vipers. - - -## Q & A - -### Why is it called “Viper”? - -A: Viper is designed to be a [companion](http://en.wikipedia.org/wiki/Viper_(G.I._Joe)) -to [Cobra](https://github.com/spf13/cobra). While both can operate completely -independently, together they make a powerful pair to handle much of your -application foundation needs. - -### Why is it called “Cobra”? - -Is there a better name for a [commander](http://en.wikipedia.org/wiki/Cobra_Commander)? - -### Does Viper support case sensitive keys? - -**tl;dr:** No. - -Viper merges configuration from various sources, many of which are either case insensitive or uses different casing than the rest of the sources (eg. env vars). -In order to provide the best experience when using multiple sources, the decision has been made to make all keys case insensitive. - -There has been several attempts to implement case sensitivity, but unfortunately it's not that trivial. We might take a stab at implementing it in [Viper v2](https://github.com/spf13/viper/issues/772), but despite the initial noise, it does not seem to be requested that much. - -You can vote for case sensitivity by filling out this feedback form: https://forms.gle/R6faU74qPRPAzchZ9 - -### Is it safe to concurrently read and write to a viper? - -No, you will need to synchronize access to the viper yourself (for example by using the `sync` package). Concurrent reads and writes can cause a panic. - -## Troubleshooting - -See [TROUBLESHOOTING.md](TROUBLESHOOTING.md). - -## Development - -**For an optimal developer experience, it is recommended to install [Nix](https://nixos.org/download.html) and [direnv](https://direnv.net/docs/installation.html).** - -_Alternatively, install [Go](https://go.dev/dl/) on your computer then run `make deps` to install the rest of the dependencies._ - -Run the test suite: - -```shell -make test -``` - -Run linters: - -```shell -make lint # pass -j option to run them in parallel -``` - -Some linter violations can automatically be fixed: - -```shell -make fmt -``` - -## License - -The project is licensed under the [MIT License](LICENSE). diff --git a/vendor/github.com/spf13/viper/TROUBLESHOOTING.md b/vendor/github.com/spf13/viper/TROUBLESHOOTING.md deleted file mode 100644 index c4e36c686..000000000 --- a/vendor/github.com/spf13/viper/TROUBLESHOOTING.md +++ /dev/null @@ -1,32 +0,0 @@ -# Troubleshooting - -## Unmarshaling doesn't work - -The most common reason for this issue is improper use of struct tags (eg. `yaml` or `json`). Viper uses [github.com/mitchellh/mapstructure](https://github.com/mitchellh/mapstructure) under the hood for unmarshaling values which uses `mapstructure` tags by default. Please refer to the library's documentation for using other struct tags. - -## Cannot find package - -Viper installation seems to fail a lot lately with the following (or a similar) error: - -``` -cannot find package "github.com/hashicorp/hcl/tree/hcl1" in any of: -/usr/local/Cellar/go/1.15.7_1/libexec/src/github.com/hashicorp/hcl/tree/hcl1 (from $GOROOT) -/Users/user/go/src/github.com/hashicorp/hcl/tree/hcl1 (from $GOPATH) -``` - -As the error message suggests, Go tries to look up dependencies in `GOPATH` mode (as it's commonly called) from the `GOPATH`. -Viper opted to use [Go Modules](https://github.com/golang/go/wiki/Modules) to manage its dependencies. While in many cases the two methods are interchangeable, once a dependency releases new (major) versions, `GOPATH` mode is no longer able to decide which version to use, so it'll either use one that's already present or pick a version (usually the `master` branch). - -The solution is easy: switch to using Go Modules. -Please refer to the [wiki](https://github.com/golang/go/wiki/Modules) on how to do that. - -**tl;dr* `export GO111MODULE=on` - -## Unquoted 'y' and 'n' characters get replaced with _true_ and _false_ when reading a YAML file - -This is a YAML 1.1 feature according to [go-yaml/yaml#740](https://github.com/go-yaml/yaml/issues/740). - -Potential solutions are: - -1. Quoting values resolved as boolean -1. Upgrading to YAML v3 (for the time being this is possible by passing the `viper_yaml3` tag to your build) diff --git a/vendor/github.com/spf13/viper/file.go b/vendor/github.com/spf13/viper/file.go deleted file mode 100644 index a54fe5a7a..000000000 --- a/vendor/github.com/spf13/viper/file.go +++ /dev/null @@ -1,56 +0,0 @@ -//go:build !finder - -package viper - -import ( - "fmt" - "os" - "path/filepath" - - "github.com/spf13/afero" -) - -// Search all configPaths for any config file. -// Returns the first path that exists (and is a config file). -func (v *Viper) findConfigFile() (string, error) { - v.logger.Info("searching for config in paths", "paths", v.configPaths) - - for _, cp := range v.configPaths { - file := v.searchInPath(cp) - if file != "" { - return file, nil - } - } - return "", ConfigFileNotFoundError{v.configName, fmt.Sprintf("%s", v.configPaths)} -} - -func (v *Viper) searchInPath(in string) (filename string) { - v.logger.Debug("searching for config in path", "path", in) - for _, ext := range SupportedExts { - v.logger.Debug("checking if file exists", "file", filepath.Join(in, v.configName+"."+ext)) - if b, _ := exists(v.fs, filepath.Join(in, v.configName+"."+ext)); b { - v.logger.Debug("found file", "file", filepath.Join(in, v.configName+"."+ext)) - return filepath.Join(in, v.configName+"."+ext) - } - } - - if v.configType != "" { - if b, _ := exists(v.fs, filepath.Join(in, v.configName)); b { - return filepath.Join(in, v.configName) - } - } - - return "" -} - -// exists checks if file exists. -func exists(fs afero.Fs, path string) (bool, error) { - stat, err := fs.Stat(path) - if err == nil { - return !stat.IsDir(), nil - } - if os.IsNotExist(err) { - return false, nil - } - return false, err -} diff --git a/vendor/github.com/spf13/viper/file_finder.go b/vendor/github.com/spf13/viper/file_finder.go deleted file mode 100644 index d96a1bd22..000000000 --- a/vendor/github.com/spf13/viper/file_finder.go +++ /dev/null @@ -1,38 +0,0 @@ -//go:build finder - -package viper - -import ( - "fmt" - - "github.com/sagikazarmark/locafero" -) - -// Search all configPaths for any config file. -// Returns the first path that exists (and is a config file). -func (v *Viper) findConfigFile() (string, error) { - var names []string - - if v.configType != "" { - names = locafero.NameWithOptionalExtensions(v.configName, SupportedExts...) - } else { - names = locafero.NameWithExtensions(v.configName, SupportedExts...) - } - - finder := locafero.Finder{ - Paths: v.configPaths, - Names: names, - Type: locafero.FileTypeFile, - } - - results, err := finder.Find(v.fs) - if err != nil { - return "", err - } - - if len(results) == 0 { - return "", ConfigFileNotFoundError{v.configName, fmt.Sprintf("%s", v.configPaths)} - } - - return results[0], nil -} diff --git a/vendor/github.com/spf13/viper/flags.go b/vendor/github.com/spf13/viper/flags.go deleted file mode 100644 index de033ed58..000000000 --- a/vendor/github.com/spf13/viper/flags.go +++ /dev/null @@ -1,57 +0,0 @@ -package viper - -import "github.com/spf13/pflag" - -// FlagValueSet is an interface that users can implement -// to bind a set of flags to viper. -type FlagValueSet interface { - VisitAll(fn func(FlagValue)) -} - -// FlagValue is an interface that users can implement -// to bind different flags to viper. -type FlagValue interface { - HasChanged() bool - Name() string - ValueString() string - ValueType() string -} - -// pflagValueSet is a wrapper around *pflag.ValueSet -// that implements FlagValueSet. -type pflagValueSet struct { - flags *pflag.FlagSet -} - -// VisitAll iterates over all *pflag.Flag inside the *pflag.FlagSet. -func (p pflagValueSet) VisitAll(fn func(flag FlagValue)) { - p.flags.VisitAll(func(flag *pflag.Flag) { - fn(pflagValue{flag}) - }) -} - -// pflagValue is a wrapper around *pflag.flag -// that implements FlagValue. -type pflagValue struct { - flag *pflag.Flag -} - -// HasChanged returns whether the flag has changes or not. -func (p pflagValue) HasChanged() bool { - return p.flag.Changed -} - -// Name returns the name of the flag. -func (p pflagValue) Name() string { - return p.flag.Name -} - -// ValueString returns the value of the flag as a string. -func (p pflagValue) ValueString() string { - return p.flag.Value.String() -} - -// ValueType returns the type of the flag as a string. -func (p pflagValue) ValueType() string { - return p.flag.Value.Type() -} diff --git a/vendor/github.com/spf13/viper/flake.lock b/vendor/github.com/spf13/viper/flake.lock deleted file mode 100644 index 78da51090..000000000 --- a/vendor/github.com/spf13/viper/flake.lock +++ /dev/null @@ -1,255 +0,0 @@ -{ - "nodes": { - "devenv": { - "inputs": { - "flake-compat": "flake-compat", - "nix": "nix", - "nixpkgs": "nixpkgs", - "pre-commit-hooks": "pre-commit-hooks" - }, - "locked": { - "lastModified": 1687972261, - "narHash": "sha256-+mxvZfwMVoaZYETmuQWqTi/7T9UKoAE+WpdSQkOVJ2g=", - "owner": "cachix", - "repo": "devenv", - "rev": "e85df562088573305e55906eaa964341f8cb0d9f", - "type": "github" - }, - "original": { - "owner": "cachix", - "repo": "devenv", - "type": "github" - } - }, - "flake-compat": { - "flake": false, - "locked": { - "lastModified": 1673956053, - "narHash": "sha256-4gtG9iQuiKITOjNQQeQIpoIB6b16fm+504Ch3sNKLd8=", - "owner": "edolstra", - "repo": "flake-compat", - "rev": "35bb57c0c8d8b62bbfd284272c928ceb64ddbde9", - "type": "github" - }, - "original": { - "owner": "edolstra", - "repo": "flake-compat", - "type": "github" - } - }, - "flake-parts": { - "inputs": { - "nixpkgs-lib": "nixpkgs-lib" - }, - "locked": { - "lastModified": 1687762428, - "narHash": "sha256-DIf7mi45PKo+s8dOYF+UlXHzE0Wl/+k3tXUyAoAnoGE=", - "owner": "hercules-ci", - "repo": "flake-parts", - "rev": "37dd7bb15791c86d55c5121740a1887ab55ee836", - "type": "github" - }, - "original": { - "owner": "hercules-ci", - "repo": "flake-parts", - "type": "github" - } - }, - "flake-utils": { - "locked": { - "lastModified": 1667395993, - "narHash": "sha256-nuEHfE/LcWyuSWnS8t12N1wc105Qtau+/OdUAjtQ0rA=", - "owner": "numtide", - "repo": "flake-utils", - "rev": "5aed5285a952e0b949eb3ba02c12fa4fcfef535f", - "type": "github" - }, - "original": { - "owner": "numtide", - "repo": "flake-utils", - "type": "github" - } - }, - "gitignore": { - "inputs": { - "nixpkgs": [ - "devenv", - "pre-commit-hooks", - "nixpkgs" - ] - }, - "locked": { - "lastModified": 1660459072, - "narHash": "sha256-8DFJjXG8zqoONA1vXtgeKXy68KdJL5UaXR8NtVMUbx8=", - "owner": "hercules-ci", - "repo": "gitignore.nix", - "rev": "a20de23b925fd8264fd7fad6454652e142fd7f73", - "type": "github" - }, - "original": { - "owner": "hercules-ci", - "repo": "gitignore.nix", - "type": "github" - } - }, - "lowdown-src": { - "flake": false, - "locked": { - "lastModified": 1633514407, - "narHash": "sha256-Dw32tiMjdK9t3ETl5fzGrutQTzh2rufgZV4A/BbxuD4=", - "owner": "kristapsdz", - "repo": "lowdown", - "rev": "d2c2b44ff6c27b936ec27358a2653caaef8f73b8", - "type": "github" - }, - "original": { - "owner": "kristapsdz", - "repo": "lowdown", - "type": "github" - } - }, - "nix": { - "inputs": { - "lowdown-src": "lowdown-src", - "nixpkgs": [ - "devenv", - "nixpkgs" - ], - "nixpkgs-regression": "nixpkgs-regression" - }, - "locked": { - "lastModified": 1676545802, - "narHash": "sha256-EK4rZ+Hd5hsvXnzSzk2ikhStJnD63odF7SzsQ8CuSPU=", - "owner": "domenkozar", - "repo": "nix", - "rev": "7c91803598ffbcfe4a55c44ac6d49b2cf07a527f", - "type": "github" - }, - "original": { - "owner": "domenkozar", - "ref": "relaxed-flakes", - "repo": "nix", - "type": "github" - } - }, - "nixpkgs": { - "locked": { - "lastModified": 1678875422, - "narHash": "sha256-T3o6NcQPwXjxJMn2shz86Chch4ljXgZn746c2caGxd8=", - "owner": "NixOS", - "repo": "nixpkgs", - "rev": "126f49a01de5b7e35a43fd43f891ecf6d3a51459", - "type": "github" - }, - "original": { - "owner": "NixOS", - "ref": "nixpkgs-unstable", - "repo": "nixpkgs", - "type": "github" - } - }, - "nixpkgs-lib": { - "locked": { - "dir": "lib", - "lastModified": 1685564631, - "narHash": "sha256-8ywr3AkblY4++3lIVxmrWZFzac7+f32ZEhH/A8pNscI=", - "owner": "NixOS", - "repo": "nixpkgs", - "rev": "4f53efe34b3a8877ac923b9350c874e3dcd5dc0a", - "type": "github" - }, - "original": { - "dir": "lib", - "owner": "NixOS", - "ref": "nixos-unstable", - "repo": "nixpkgs", - "type": "github" - } - }, - "nixpkgs-regression": { - "locked": { - "lastModified": 1643052045, - "narHash": "sha256-uGJ0VXIhWKGXxkeNnq4TvV3CIOkUJ3PAoLZ3HMzNVMw=", - "owner": "NixOS", - "repo": "nixpkgs", - "rev": "215d4d0fd80ca5163643b03a33fde804a29cc1e2", - "type": "github" - }, - "original": { - "owner": "NixOS", - "repo": "nixpkgs", - "rev": "215d4d0fd80ca5163643b03a33fde804a29cc1e2", - "type": "github" - } - }, - "nixpkgs-stable": { - "locked": { - "lastModified": 1678872516, - "narHash": "sha256-/E1YwtMtFAu2KUQKV/1+KFuReYPANM2Rzehk84VxVoc=", - "owner": "NixOS", - "repo": "nixpkgs", - "rev": "9b8e5abb18324c7fe9f07cb100c3cd4a29cda8b8", - "type": "github" - }, - "original": { - "owner": "NixOS", - "ref": "nixos-22.11", - "repo": "nixpkgs", - "type": "github" - } - }, - "nixpkgs_2": { - "locked": { - "lastModified": 1687886075, - "narHash": "sha256-PeayJDDDy+uw1Ats4moZnRdL1OFuZm1Tj+KiHlD67+o=", - "owner": "NixOS", - "repo": "nixpkgs", - "rev": "a565059a348422af5af9026b5174dc5c0dcefdae", - "type": "github" - }, - "original": { - "owner": "NixOS", - "ref": "nixpkgs-unstable", - "repo": "nixpkgs", - "type": "github" - } - }, - "pre-commit-hooks": { - "inputs": { - "flake-compat": [ - "devenv", - "flake-compat" - ], - "flake-utils": "flake-utils", - "gitignore": "gitignore", - "nixpkgs": [ - "devenv", - "nixpkgs" - ], - "nixpkgs-stable": "nixpkgs-stable" - }, - "locked": { - "lastModified": 1686050334, - "narHash": "sha256-R0mczWjDzBpIvM3XXhO908X5e2CQqjyh/gFbwZk/7/Q=", - "owner": "cachix", - "repo": "pre-commit-hooks.nix", - "rev": "6881eb2ae5d8a3516e34714e7a90d9d95914c4dc", - "type": "github" - }, - "original": { - "owner": "cachix", - "repo": "pre-commit-hooks.nix", - "type": "github" - } - }, - "root": { - "inputs": { - "devenv": "devenv", - "flake-parts": "flake-parts", - "nixpkgs": "nixpkgs_2" - } - } - }, - "root": "root", - "version": 7 -} diff --git a/vendor/github.com/spf13/viper/flake.nix b/vendor/github.com/spf13/viper/flake.nix deleted file mode 100644 index 9b26c3fcf..000000000 --- a/vendor/github.com/spf13/viper/flake.nix +++ /dev/null @@ -1,56 +0,0 @@ -{ - description = "Viper"; - - inputs = { - nixpkgs.url = "github:NixOS/nixpkgs/nixpkgs-unstable"; - flake-parts.url = "github:hercules-ci/flake-parts"; - devenv.url = "github:cachix/devenv"; - }; - - outputs = inputs@{ flake-parts, ... }: - flake-parts.lib.mkFlake { inherit inputs; } { - imports = [ - inputs.devenv.flakeModule - ]; - - systems = [ "x86_64-linux" "x86_64-darwin" "aarch64-darwin" ]; - - perSystem = { config, self', inputs', pkgs, system, ... }: rec { - devenv.shells = { - default = { - languages = { - go.enable = true; - }; - - pre-commit.hooks = { - nixpkgs-fmt.enable = true; - yamllint.enable = true; - }; - - packages = with pkgs; [ - gnumake - - golangci-lint - yamllint - ]; - - scripts = { - versions.exec = '' - go version - golangci-lint version - ''; - }; - - enterShell = '' - versions - ''; - - # https://github.com/cachix/devenv/issues/528#issuecomment-1556108767 - containers = pkgs.lib.mkForce { }; - }; - - ci = devenv.shells.default; - }; - }; - }; -} diff --git a/vendor/github.com/spf13/viper/internal/encoding/decoder.go b/vendor/github.com/spf13/viper/internal/encoding/decoder.go deleted file mode 100644 index 8a7b1dbc9..000000000 --- a/vendor/github.com/spf13/viper/internal/encoding/decoder.go +++ /dev/null @@ -1,61 +0,0 @@ -package encoding - -import ( - "sync" -) - -// Decoder decodes the contents of b into v. -// It's primarily used for decoding contents of a file into a map[string]any. -type Decoder interface { - Decode(b []byte, v map[string]any) error -} - -const ( - // ErrDecoderNotFound is returned when there is no decoder registered for a format. - ErrDecoderNotFound = encodingError("decoder not found for this format") - - // ErrDecoderFormatAlreadyRegistered is returned when an decoder is already registered for a format. - ErrDecoderFormatAlreadyRegistered = encodingError("decoder already registered for this format") -) - -// DecoderRegistry can choose an appropriate Decoder based on the provided format. -type DecoderRegistry struct { - decoders map[string]Decoder - - mu sync.RWMutex -} - -// NewDecoderRegistry returns a new, initialized DecoderRegistry. -func NewDecoderRegistry() *DecoderRegistry { - return &DecoderRegistry{ - decoders: make(map[string]Decoder), - } -} - -// RegisterDecoder registers a Decoder for a format. -// Registering a Decoder for an already existing format is not supported. -func (e *DecoderRegistry) RegisterDecoder(format string, enc Decoder) error { - e.mu.Lock() - defer e.mu.Unlock() - - if _, ok := e.decoders[format]; ok { - return ErrDecoderFormatAlreadyRegistered - } - - e.decoders[format] = enc - - return nil -} - -// Decode calls the underlying Decoder based on the format. -func (e *DecoderRegistry) Decode(format string, b []byte, v map[string]any) error { - e.mu.RLock() - decoder, ok := e.decoders[format] - e.mu.RUnlock() - - if !ok { - return ErrDecoderNotFound - } - - return decoder.Decode(b, v) -} diff --git a/vendor/github.com/spf13/viper/internal/encoding/dotenv/codec.go b/vendor/github.com/spf13/viper/internal/encoding/dotenv/codec.go deleted file mode 100644 index 3ebc76f02..000000000 --- a/vendor/github.com/spf13/viper/internal/encoding/dotenv/codec.go +++ /dev/null @@ -1,61 +0,0 @@ -package dotenv - -import ( - "bytes" - "fmt" - "sort" - "strings" - - "github.com/subosito/gotenv" -) - -const keyDelimiter = "_" - -// Codec implements the encoding.Encoder and encoding.Decoder interfaces for encoding data containing environment variables -// (commonly called as dotenv format). -type Codec struct{} - -func (Codec) Encode(v map[string]any) ([]byte, error) { - flattened := map[string]any{} - - flattened = flattenAndMergeMap(flattened, v, "", keyDelimiter) - - keys := make([]string, 0, len(flattened)) - - for key := range flattened { - keys = append(keys, key) - } - - sort.Strings(keys) - - var buf bytes.Buffer - - for _, key := range keys { - _, err := buf.WriteString(fmt.Sprintf("%v=%v\n", strings.ToUpper(key), flattened[key])) - if err != nil { - return nil, err - } - } - - return buf.Bytes(), nil -} - -func (Codec) Decode(b []byte, v map[string]any) error { - var buf bytes.Buffer - - _, err := buf.Write(b) - if err != nil { - return err - } - - env, err := gotenv.StrictParse(&buf) - if err != nil { - return err - } - - for key, value := range env { - v[key] = value - } - - return nil -} diff --git a/vendor/github.com/spf13/viper/internal/encoding/dotenv/map_utils.go b/vendor/github.com/spf13/viper/internal/encoding/dotenv/map_utils.go deleted file mode 100644 index 8bfe0a9de..000000000 --- a/vendor/github.com/spf13/viper/internal/encoding/dotenv/map_utils.go +++ /dev/null @@ -1,41 +0,0 @@ -package dotenv - -import ( - "strings" - - "github.com/spf13/cast" -) - -// flattenAndMergeMap recursively flattens the given map into a new map -// Code is based on the function with the same name in the main package. -// TODO: move it to a common place. -func flattenAndMergeMap(shadow, m map[string]any, prefix, delimiter string) map[string]any { - if shadow != nil && prefix != "" && shadow[prefix] != nil { - // prefix is shadowed => nothing more to flatten - return shadow - } - if shadow == nil { - shadow = make(map[string]any) - } - - var m2 map[string]any - if prefix != "" { - prefix += delimiter - } - for k, val := range m { - fullKey := prefix + k - switch val := val.(type) { - case map[string]any: - m2 = val - case map[any]any: - m2 = cast.ToStringMap(val) - default: - // immediate value - shadow[strings.ToLower(fullKey)] = val - continue - } - // recursively merge to shadow map - shadow = flattenAndMergeMap(shadow, m2, fullKey, delimiter) - } - return shadow -} diff --git a/vendor/github.com/spf13/viper/internal/encoding/encoder.go b/vendor/github.com/spf13/viper/internal/encoding/encoder.go deleted file mode 100644 index 659585962..000000000 --- a/vendor/github.com/spf13/viper/internal/encoding/encoder.go +++ /dev/null @@ -1,60 +0,0 @@ -package encoding - -import ( - "sync" -) - -// Encoder encodes the contents of v into a byte representation. -// It's primarily used for encoding a map[string]any into a file format. -type Encoder interface { - Encode(v map[string]any) ([]byte, error) -} - -const ( - // ErrEncoderNotFound is returned when there is no encoder registered for a format. - ErrEncoderNotFound = encodingError("encoder not found for this format") - - // ErrEncoderFormatAlreadyRegistered is returned when an encoder is already registered for a format. - ErrEncoderFormatAlreadyRegistered = encodingError("encoder already registered for this format") -) - -// EncoderRegistry can choose an appropriate Encoder based on the provided format. -type EncoderRegistry struct { - encoders map[string]Encoder - - mu sync.RWMutex -} - -// NewEncoderRegistry returns a new, initialized EncoderRegistry. -func NewEncoderRegistry() *EncoderRegistry { - return &EncoderRegistry{ - encoders: make(map[string]Encoder), - } -} - -// RegisterEncoder registers an Encoder for a format. -// Registering a Encoder for an already existing format is not supported. -func (e *EncoderRegistry) RegisterEncoder(format string, enc Encoder) error { - e.mu.Lock() - defer e.mu.Unlock() - - if _, ok := e.encoders[format]; ok { - return ErrEncoderFormatAlreadyRegistered - } - - e.encoders[format] = enc - - return nil -} - -func (e *EncoderRegistry) Encode(format string, v map[string]any) ([]byte, error) { - e.mu.RLock() - encoder, ok := e.encoders[format] - e.mu.RUnlock() - - if !ok { - return nil, ErrEncoderNotFound - } - - return encoder.Encode(v) -} diff --git a/vendor/github.com/spf13/viper/internal/encoding/error.go b/vendor/github.com/spf13/viper/internal/encoding/error.go deleted file mode 100644 index e4cde02d7..000000000 --- a/vendor/github.com/spf13/viper/internal/encoding/error.go +++ /dev/null @@ -1,7 +0,0 @@ -package encoding - -type encodingError string - -func (e encodingError) Error() string { - return string(e) -} diff --git a/vendor/github.com/spf13/viper/internal/encoding/hcl/codec.go b/vendor/github.com/spf13/viper/internal/encoding/hcl/codec.go deleted file mode 100644 index d7fa8a1b7..000000000 --- a/vendor/github.com/spf13/viper/internal/encoding/hcl/codec.go +++ /dev/null @@ -1,40 +0,0 @@ -package hcl - -import ( - "bytes" - "encoding/json" - - "github.com/hashicorp/hcl" - "github.com/hashicorp/hcl/hcl/printer" -) - -// Codec implements the encoding.Encoder and encoding.Decoder interfaces for HCL encoding. -// TODO: add printer config to the codec? -type Codec struct{} - -func (Codec) Encode(v map[string]any) ([]byte, error) { - b, err := json.Marshal(v) - if err != nil { - return nil, err - } - - // TODO: use printer.Format? Is the trailing newline an issue? - - ast, err := hcl.Parse(string(b)) - if err != nil { - return nil, err - } - - var buf bytes.Buffer - - err = printer.Fprint(&buf, ast.Node) - if err != nil { - return nil, err - } - - return buf.Bytes(), nil -} - -func (Codec) Decode(b []byte, v map[string]any) error { - return hcl.Unmarshal(b, &v) -} diff --git a/vendor/github.com/spf13/viper/internal/encoding/ini/codec.go b/vendor/github.com/spf13/viper/internal/encoding/ini/codec.go deleted file mode 100644 index d91cf59d2..000000000 --- a/vendor/github.com/spf13/viper/internal/encoding/ini/codec.go +++ /dev/null @@ -1,99 +0,0 @@ -package ini - -import ( - "bytes" - "sort" - "strings" - - "github.com/spf13/cast" - "gopkg.in/ini.v1" -) - -// LoadOptions contains all customized options used for load data source(s). -// This type is added here for convenience: this way consumers can import a single package called "ini". -type LoadOptions = ini.LoadOptions - -// Codec implements the encoding.Encoder and encoding.Decoder interfaces for INI encoding. -type Codec struct { - KeyDelimiter string - LoadOptions LoadOptions -} - -func (c Codec) Encode(v map[string]any) ([]byte, error) { - cfg := ini.Empty() - ini.PrettyFormat = false - - flattened := map[string]any{} - - flattened = flattenAndMergeMap(flattened, v, "", c.keyDelimiter()) - - keys := make([]string, 0, len(flattened)) - - for key := range flattened { - keys = append(keys, key) - } - - sort.Strings(keys) - - for _, key := range keys { - sectionName, keyName := "", key - - lastSep := strings.LastIndex(key, ".") - if lastSep != -1 { - sectionName = key[:(lastSep)] - keyName = key[(lastSep + 1):] - } - - // TODO: is this a good idea? - if sectionName == "default" { - sectionName = "" - } - - cfg.Section(sectionName).Key(keyName).SetValue(cast.ToString(flattened[key])) - } - - var buf bytes.Buffer - - _, err := cfg.WriteTo(&buf) - if err != nil { - return nil, err - } - - return buf.Bytes(), nil -} - -func (c Codec) Decode(b []byte, v map[string]any) error { - cfg := ini.Empty(c.LoadOptions) - - err := cfg.Append(b) - if err != nil { - return err - } - - sections := cfg.Sections() - - for i := 0; i < len(sections); i++ { - section := sections[i] - keys := section.Keys() - - for j := 0; j < len(keys); j++ { - key := keys[j] - value := cfg.Section(section.Name()).Key(key.Name()).String() - - deepestMap := deepSearch(v, strings.Split(section.Name(), c.keyDelimiter())) - - // set innermost value - deepestMap[key.Name()] = value - } - } - - return nil -} - -func (c Codec) keyDelimiter() string { - if c.KeyDelimiter == "" { - return "." - } - - return c.KeyDelimiter -} diff --git a/vendor/github.com/spf13/viper/internal/encoding/ini/map_utils.go b/vendor/github.com/spf13/viper/internal/encoding/ini/map_utils.go deleted file mode 100644 index 490ab594e..000000000 --- a/vendor/github.com/spf13/viper/internal/encoding/ini/map_utils.go +++ /dev/null @@ -1,74 +0,0 @@ -package ini - -import ( - "strings" - - "github.com/spf13/cast" -) - -// THIS CODE IS COPIED HERE: IT SHOULD NOT BE MODIFIED -// AT SOME POINT IT WILL BE MOVED TO A COMMON PLACE -// deepSearch scans deep maps, following the key indexes listed in the -// sequence "path". -// The last value is expected to be another map, and is returned. -// -// In case intermediate keys do not exist, or map to a non-map value, -// a new map is created and inserted, and the search continues from there: -// the initial map "m" may be modified! -func deepSearch(m map[string]any, path []string) map[string]any { - for _, k := range path { - m2, ok := m[k] - if !ok { - // intermediate key does not exist - // => create it and continue from there - m3 := make(map[string]any) - m[k] = m3 - m = m3 - continue - } - m3, ok := m2.(map[string]any) - if !ok { - // intermediate key is a value - // => replace with a new map - m3 = make(map[string]any) - m[k] = m3 - } - // continue search from here - m = m3 - } - return m -} - -// flattenAndMergeMap recursively flattens the given map into a new map -// Code is based on the function with the same name in the main package. -// TODO: move it to a common place. -func flattenAndMergeMap(shadow, m map[string]any, prefix, delimiter string) map[string]any { - if shadow != nil && prefix != "" && shadow[prefix] != nil { - // prefix is shadowed => nothing more to flatten - return shadow - } - if shadow == nil { - shadow = make(map[string]any) - } - - var m2 map[string]any - if prefix != "" { - prefix += delimiter - } - for k, val := range m { - fullKey := prefix + k - switch val := val.(type) { - case map[string]any: - m2 = val - case map[any]any: - m2 = cast.ToStringMap(val) - default: - // immediate value - shadow[strings.ToLower(fullKey)] = val - continue - } - // recursively merge to shadow map - shadow = flattenAndMergeMap(shadow, m2, fullKey, delimiter) - } - return shadow -} diff --git a/vendor/github.com/spf13/viper/internal/encoding/javaproperties/codec.go b/vendor/github.com/spf13/viper/internal/encoding/javaproperties/codec.go deleted file mode 100644 index e92e5172c..000000000 --- a/vendor/github.com/spf13/viper/internal/encoding/javaproperties/codec.go +++ /dev/null @@ -1,86 +0,0 @@ -package javaproperties - -import ( - "bytes" - "sort" - "strings" - - "github.com/magiconair/properties" - "github.com/spf13/cast" -) - -// Codec implements the encoding.Encoder and encoding.Decoder interfaces for Java properties encoding. -type Codec struct { - KeyDelimiter string - - // Store read properties on the object so that we can write back in order with comments. - // This will only be used if the configuration read is a properties file. - // TODO: drop this feature in v2 - // TODO: make use of the global properties object optional - Properties *properties.Properties -} - -func (c *Codec) Encode(v map[string]any) ([]byte, error) { - if c.Properties == nil { - c.Properties = properties.NewProperties() - } - - flattened := map[string]any{} - - flattened = flattenAndMergeMap(flattened, v, "", c.keyDelimiter()) - - keys := make([]string, 0, len(flattened)) - - for key := range flattened { - keys = append(keys, key) - } - - sort.Strings(keys) - - for _, key := range keys { - _, _, err := c.Properties.Set(key, cast.ToString(flattened[key])) - if err != nil { - return nil, err - } - } - - var buf bytes.Buffer - - _, err := c.Properties.WriteComment(&buf, "#", properties.UTF8) - if err != nil { - return nil, err - } - - return buf.Bytes(), nil -} - -func (c *Codec) Decode(b []byte, v map[string]any) error { - var err error - c.Properties, err = properties.Load(b, properties.UTF8) - if err != nil { - return err - } - - for _, key := range c.Properties.Keys() { - // ignore existence check: we know it's there - value, _ := c.Properties.Get(key) - - // recursively build nested maps - path := strings.Split(key, c.keyDelimiter()) - lastKey := strings.ToLower(path[len(path)-1]) - deepestMap := deepSearch(v, path[0:len(path)-1]) - - // set innermost value - deepestMap[lastKey] = value - } - - return nil -} - -func (c Codec) keyDelimiter() string { - if c.KeyDelimiter == "" { - return "." - } - - return c.KeyDelimiter -} diff --git a/vendor/github.com/spf13/viper/internal/encoding/javaproperties/map_utils.go b/vendor/github.com/spf13/viper/internal/encoding/javaproperties/map_utils.go deleted file mode 100644 index 6e1aff223..000000000 --- a/vendor/github.com/spf13/viper/internal/encoding/javaproperties/map_utils.go +++ /dev/null @@ -1,74 +0,0 @@ -package javaproperties - -import ( - "strings" - - "github.com/spf13/cast" -) - -// THIS CODE IS COPIED HERE: IT SHOULD NOT BE MODIFIED -// AT SOME POINT IT WILL BE MOVED TO A COMMON PLACE -// deepSearch scans deep maps, following the key indexes listed in the -// sequence "path". -// The last value is expected to be another map, and is returned. -// -// In case intermediate keys do not exist, or map to a non-map value, -// a new map is created and inserted, and the search continues from there: -// the initial map "m" may be modified! -func deepSearch(m map[string]any, path []string) map[string]any { - for _, k := range path { - m2, ok := m[k] - if !ok { - // intermediate key does not exist - // => create it and continue from there - m3 := make(map[string]any) - m[k] = m3 - m = m3 - continue - } - m3, ok := m2.(map[string]any) - if !ok { - // intermediate key is a value - // => replace with a new map - m3 = make(map[string]any) - m[k] = m3 - } - // continue search from here - m = m3 - } - return m -} - -// flattenAndMergeMap recursively flattens the given map into a new map -// Code is based on the function with the same name in the main package. -// TODO: move it to a common place. -func flattenAndMergeMap(shadow, m map[string]any, prefix, delimiter string) map[string]any { - if shadow != nil && prefix != "" && shadow[prefix] != nil { - // prefix is shadowed => nothing more to flatten - return shadow - } - if shadow == nil { - shadow = make(map[string]any) - } - - var m2 map[string]any - if prefix != "" { - prefix += delimiter - } - for k, val := range m { - fullKey := prefix + k - switch val := val.(type) { - case map[string]any: - m2 = val - case map[any]any: - m2 = cast.ToStringMap(val) - default: - // immediate value - shadow[strings.ToLower(fullKey)] = val - continue - } - // recursively merge to shadow map - shadow = flattenAndMergeMap(shadow, m2, fullKey, delimiter) - } - return shadow -} diff --git a/vendor/github.com/spf13/viper/internal/encoding/json/codec.go b/vendor/github.com/spf13/viper/internal/encoding/json/codec.go deleted file mode 100644 index da7546b5a..000000000 --- a/vendor/github.com/spf13/viper/internal/encoding/json/codec.go +++ /dev/null @@ -1,17 +0,0 @@ -package json - -import ( - "encoding/json" -) - -// Codec implements the encoding.Encoder and encoding.Decoder interfaces for JSON encoding. -type Codec struct{} - -func (Codec) Encode(v map[string]any) ([]byte, error) { - // TODO: expose prefix and indent in the Codec as setting? - return json.MarshalIndent(v, "", " ") -} - -func (Codec) Decode(b []byte, v map[string]any) error { - return json.Unmarshal(b, &v) -} diff --git a/vendor/github.com/spf13/viper/internal/encoding/toml/codec.go b/vendor/github.com/spf13/viper/internal/encoding/toml/codec.go deleted file mode 100644 index c70aa8d28..000000000 --- a/vendor/github.com/spf13/viper/internal/encoding/toml/codec.go +++ /dev/null @@ -1,16 +0,0 @@ -package toml - -import ( - "github.com/pelletier/go-toml/v2" -) - -// Codec implements the encoding.Encoder and encoding.Decoder interfaces for TOML encoding. -type Codec struct{} - -func (Codec) Encode(v map[string]any) ([]byte, error) { - return toml.Marshal(v) -} - -func (Codec) Decode(b []byte, v map[string]any) error { - return toml.Unmarshal(b, &v) -} diff --git a/vendor/github.com/spf13/viper/internal/encoding/yaml/codec.go b/vendor/github.com/spf13/viper/internal/encoding/yaml/codec.go deleted file mode 100644 index 036879249..000000000 --- a/vendor/github.com/spf13/viper/internal/encoding/yaml/codec.go +++ /dev/null @@ -1,14 +0,0 @@ -package yaml - -import "gopkg.in/yaml.v3" - -// Codec implements the encoding.Encoder and encoding.Decoder interfaces for YAML encoding. -type Codec struct{} - -func (Codec) Encode(v map[string]any) ([]byte, error) { - return yaml.Marshal(v) -} - -func (Codec) Decode(b []byte, v map[string]any) error { - return yaml.Unmarshal(b, &v) -} diff --git a/vendor/github.com/spf13/viper/internal/features/bind_struct.go b/vendor/github.com/spf13/viper/internal/features/bind_struct.go deleted file mode 100644 index 89302c216..000000000 --- a/vendor/github.com/spf13/viper/internal/features/bind_struct.go +++ /dev/null @@ -1,5 +0,0 @@ -//go:build viper_bind_struct - -package features - -const BindStruct = true diff --git a/vendor/github.com/spf13/viper/internal/features/bind_struct_default.go b/vendor/github.com/spf13/viper/internal/features/bind_struct_default.go deleted file mode 100644 index edfaf73b6..000000000 --- a/vendor/github.com/spf13/viper/internal/features/bind_struct_default.go +++ /dev/null @@ -1,5 +0,0 @@ -//go:build !viper_bind_struct - -package features - -const BindStruct = false diff --git a/vendor/github.com/spf13/viper/logger.go b/vendor/github.com/spf13/viper/logger.go deleted file mode 100644 index 8938053b3..000000000 --- a/vendor/github.com/spf13/viper/logger.go +++ /dev/null @@ -1,68 +0,0 @@ -package viper - -import ( - "context" - - slog "github.com/sagikazarmark/slog-shim" -) - -// Logger is a unified interface for various logging use cases and practices, including: -// - leveled logging -// - structured logging -// -// Deprecated: use `log/slog` instead. -type Logger interface { - // Trace logs a Trace event. - // - // Even more fine-grained information than Debug events. - // Loggers not supporting this level should fall back to Debug. - Trace(msg string, keyvals ...any) - - // Debug logs a Debug event. - // - // A verbose series of information events. - // They are useful when debugging the system. - Debug(msg string, keyvals ...any) - - // Info logs an Info event. - // - // General information about what's happening inside the system. - Info(msg string, keyvals ...any) - - // Warn logs a Warn(ing) event. - // - // Non-critical events that should be looked at. - Warn(msg string, keyvals ...any) - - // Error logs an Error event. - // - // Critical events that require immediate attention. - // Loggers commonly provide Fatal and Panic levels above Error level, - // but exiting and panicking is out of scope for a logging library. - Error(msg string, keyvals ...any) -} - -// WithLogger sets a custom logger. -func WithLogger(l *slog.Logger) Option { - return optionFunc(func(v *Viper) { - v.logger = l - }) -} - -type discardHandler struct{} - -func (n *discardHandler) Enabled(_ context.Context, _ slog.Level) bool { - return false -} - -func (n *discardHandler) Handle(_ context.Context, _ slog.Record) error { - return nil -} - -func (n *discardHandler) WithAttrs(_ []slog.Attr) slog.Handler { - return n -} - -func (n *discardHandler) WithGroup(_ string) slog.Handler { - return n -} diff --git a/vendor/github.com/spf13/viper/util.go b/vendor/github.com/spf13/viper/util.go deleted file mode 100644 index 117c6ac31..000000000 --- a/vendor/github.com/spf13/viper/util.go +++ /dev/null @@ -1,223 +0,0 @@ -// Copyright © 2014 Steve Francia . -// -// Use of this source code is governed by an MIT-style -// license that can be found in the LICENSE file. - -// Viper is a application configuration system. -// It believes that applications can be configured a variety of ways -// via flags, ENVIRONMENT variables, configuration files retrieved -// from the file system, or a remote key/value store. - -package viper - -import ( - "fmt" - "os" - "path/filepath" - "runtime" - "strings" - "unicode" - - slog "github.com/sagikazarmark/slog-shim" - "github.com/spf13/cast" -) - -// ConfigParseError denotes failing to parse configuration file. -type ConfigParseError struct { - err error -} - -// Error returns the formatted configuration error. -func (pe ConfigParseError) Error() string { - return fmt.Sprintf("While parsing config: %s", pe.err.Error()) -} - -// Unwrap returns the wrapped error. -func (pe ConfigParseError) Unwrap() error { - return pe.err -} - -// toCaseInsensitiveValue checks if the value is a map; -// if so, create a copy and lower-case the keys recursively. -func toCaseInsensitiveValue(value any) any { - switch v := value.(type) { - case map[any]any: - value = copyAndInsensitiviseMap(cast.ToStringMap(v)) - case map[string]any: - value = copyAndInsensitiviseMap(v) - } - - return value -} - -// copyAndInsensitiviseMap behaves like insensitiviseMap, but creates a copy of -// any map it makes case insensitive. -func copyAndInsensitiviseMap(m map[string]any) map[string]any { - nm := make(map[string]any) - - for key, val := range m { - lkey := strings.ToLower(key) - switch v := val.(type) { - case map[any]any: - nm[lkey] = copyAndInsensitiviseMap(cast.ToStringMap(v)) - case map[string]any: - nm[lkey] = copyAndInsensitiviseMap(v) - default: - nm[lkey] = v - } - } - - return nm -} - -func insensitiviseVal(val any) any { - switch v := val.(type) { - case map[any]any: - // nested map: cast and recursively insensitivise - val = cast.ToStringMap(val) - insensitiviseMap(val.(map[string]any)) - case map[string]any: - // nested map: recursively insensitivise - insensitiviseMap(v) - case []any: - // nested array: recursively insensitivise - insensitiveArray(v) - } - return val -} - -func insensitiviseMap(m map[string]any) { - for key, val := range m { - val = insensitiviseVal(val) - lower := strings.ToLower(key) - if key != lower { - // remove old key (not lower-cased) - delete(m, key) - } - // update map - m[lower] = val - } -} - -func insensitiveArray(a []any) { - for i, val := range a { - a[i] = insensitiviseVal(val) - } -} - -func absPathify(logger *slog.Logger, inPath string) string { - logger.Info("trying to resolve absolute path", "path", inPath) - - if inPath == "$HOME" || strings.HasPrefix(inPath, "$HOME"+string(os.PathSeparator)) { - inPath = userHomeDir() + inPath[5:] - } - - inPath = os.ExpandEnv(inPath) - - if filepath.IsAbs(inPath) { - return filepath.Clean(inPath) - } - - p, err := filepath.Abs(inPath) - if err == nil { - return filepath.Clean(p) - } - - logger.Error(fmt.Errorf("could not discover absolute path: %w", err).Error()) - - return "" -} - -func stringInSlice(a string, list []string) bool { - for _, b := range list { - if b == a { - return true - } - } - return false -} - -func userHomeDir() string { - if runtime.GOOS == "windows" { - home := os.Getenv("HOMEDRIVE") + os.Getenv("HOMEPATH") - if home == "" { - home = os.Getenv("USERPROFILE") - } - return home - } - return os.Getenv("HOME") -} - -func safeMul(a, b uint) uint { - c := a * b - if a > 1 && b > 1 && c/b != a { - return 0 - } - return c -} - -// parseSizeInBytes converts strings like 1GB or 12 mb into an unsigned integer number of bytes. -func parseSizeInBytes(sizeStr string) uint { - sizeStr = strings.TrimSpace(sizeStr) - lastChar := len(sizeStr) - 1 - multiplier := uint(1) - - if lastChar > 0 { - if sizeStr[lastChar] == 'b' || sizeStr[lastChar] == 'B' { - if lastChar > 1 { - switch unicode.ToLower(rune(sizeStr[lastChar-1])) { - case 'k': - multiplier = 1 << 10 - sizeStr = strings.TrimSpace(sizeStr[:lastChar-1]) - case 'm': - multiplier = 1 << 20 - sizeStr = strings.TrimSpace(sizeStr[:lastChar-1]) - case 'g': - multiplier = 1 << 30 - sizeStr = strings.TrimSpace(sizeStr[:lastChar-1]) - default: - multiplier = 1 - sizeStr = strings.TrimSpace(sizeStr[:lastChar]) - } - } - } - } - - size := cast.ToInt(sizeStr) - if size < 0 { - size = 0 - } - - return safeMul(uint(size), multiplier) -} - -// deepSearch scans deep maps, following the key indexes listed in the -// sequence "path". -// The last value is expected to be another map, and is returned. -// -// In case intermediate keys do not exist, or map to a non-map value, -// a new map is created and inserted, and the search continues from there: -// the initial map "m" may be modified! -func deepSearch(m map[string]any, path []string) map[string]any { - for _, k := range path { - m2, ok := m[k] - if !ok { - // intermediate key does not exist - // => create it and continue from there - m3 := make(map[string]any) - m[k] = m3 - m = m3 - continue - } - m3, ok := m2.(map[string]any) - if !ok { - // intermediate key is a value - // => replace with a new map - m3 = make(map[string]any) - m[k] = m3 - } - // continue search from here - m = m3 - } - return m -} diff --git a/vendor/github.com/spf13/viper/viper.go b/vendor/github.com/spf13/viper/viper.go deleted file mode 100644 index 20eb4da17..000000000 --- a/vendor/github.com/spf13/viper/viper.go +++ /dev/null @@ -1,2258 +0,0 @@ -// Copyright © 2014 Steve Francia . -// -// Use of this source code is governed by an MIT-style -// license that can be found in the LICENSE file. - -// Viper is an application configuration system. -// It believes that applications can be configured a variety of ways -// via flags, ENVIRONMENT variables, configuration files retrieved -// from the file system, or a remote key/value store. - -// Each item takes precedence over the item below it: - -// overrides -// flag -// env -// config -// key/value store -// default - -package viper - -import ( - "bytes" - "encoding/csv" - "errors" - "fmt" - "io" - "os" - "path/filepath" - "reflect" - "strconv" - "strings" - "sync" - "time" - - "github.com/fsnotify/fsnotify" - "github.com/mitchellh/mapstructure" - slog "github.com/sagikazarmark/slog-shim" - "github.com/spf13/afero" - "github.com/spf13/cast" - "github.com/spf13/pflag" - - "github.com/spf13/viper/internal/encoding" - "github.com/spf13/viper/internal/encoding/dotenv" - "github.com/spf13/viper/internal/encoding/hcl" - "github.com/spf13/viper/internal/encoding/ini" - "github.com/spf13/viper/internal/encoding/javaproperties" - "github.com/spf13/viper/internal/encoding/json" - "github.com/spf13/viper/internal/encoding/toml" - "github.com/spf13/viper/internal/encoding/yaml" - "github.com/spf13/viper/internal/features" -) - -// ConfigMarshalError happens when failing to marshal the configuration. -type ConfigMarshalError struct { - err error -} - -// Error returns the formatted configuration error. -func (e ConfigMarshalError) Error() string { - return fmt.Sprintf("While marshaling config: %s", e.err.Error()) -} - -var v *Viper - -type RemoteResponse struct { - Value []byte - Error error -} - -func init() { - v = New() -} - -type remoteConfigFactory interface { - Get(rp RemoteProvider) (io.Reader, error) - Watch(rp RemoteProvider) (io.Reader, error) - WatchChannel(rp RemoteProvider) (<-chan *RemoteResponse, chan bool) -} - -// RemoteConfig is optional, see the remote package. -var RemoteConfig remoteConfigFactory - -// UnsupportedConfigError denotes encountering an unsupported -// configuration filetype. -type UnsupportedConfigError string - -// Error returns the formatted configuration error. -func (str UnsupportedConfigError) Error() string { - return fmt.Sprintf("Unsupported Config Type %q", string(str)) -} - -// UnsupportedRemoteProviderError denotes encountering an unsupported remote -// provider. Currently only etcd and Consul are supported. -type UnsupportedRemoteProviderError string - -// Error returns the formatted remote provider error. -func (str UnsupportedRemoteProviderError) Error() string { - return fmt.Sprintf("Unsupported Remote Provider Type %q", string(str)) -} - -// RemoteConfigError denotes encountering an error while trying to -// pull the configuration from the remote provider. -type RemoteConfigError string - -// Error returns the formatted remote provider error. -func (rce RemoteConfigError) Error() string { - return fmt.Sprintf("Remote Configurations Error: %s", string(rce)) -} - -// ConfigFileNotFoundError denotes failing to find configuration file. -type ConfigFileNotFoundError struct { - name, locations string -} - -// Error returns the formatted configuration error. -func (fnfe ConfigFileNotFoundError) Error() string { - return fmt.Sprintf("Config File %q Not Found in %q", fnfe.name, fnfe.locations) -} - -// ConfigFileAlreadyExistsError denotes failure to write new configuration file. -type ConfigFileAlreadyExistsError string - -// Error returns the formatted error when configuration already exists. -func (faee ConfigFileAlreadyExistsError) Error() string { - return fmt.Sprintf("Config File %q Already Exists", string(faee)) -} - -// A DecoderConfigOption can be passed to viper.Unmarshal to configure -// mapstructure.DecoderConfig options. -type DecoderConfigOption func(*mapstructure.DecoderConfig) - -// DecodeHook returns a DecoderConfigOption which overrides the default -// DecoderConfig.DecodeHook value, the default is: -// -// mapstructure.ComposeDecodeHookFunc( -// mapstructure.StringToTimeDurationHookFunc(), -// mapstructure.StringToSliceHookFunc(","), -// ) -func DecodeHook(hook mapstructure.DecodeHookFunc) DecoderConfigOption { - return func(c *mapstructure.DecoderConfig) { - c.DecodeHook = hook - } -} - -// Viper is a prioritized configuration registry. It -// maintains a set of configuration sources, fetches -// values to populate those, and provides them according -// to the source's priority. -// The priority of the sources is the following: -// 1. overrides -// 2. flags -// 3. env. variables -// 4. config file -// 5. key/value store -// 6. defaults -// -// For example, if values from the following sources were loaded: -// -// Defaults : { -// "secret": "", -// "user": "default", -// "endpoint": "https://localhost" -// } -// Config : { -// "user": "root" -// "secret": "defaultsecret" -// } -// Env : { -// "secret": "somesecretkey" -// } -// -// The resulting config will have the following values: -// -// { -// "secret": "somesecretkey", -// "user": "root", -// "endpoint": "https://localhost" -// } -// -// Note: Vipers are not safe for concurrent Get() and Set() operations. -type Viper struct { - // Delimiter that separates a list of keys - // used to access a nested value in one go - keyDelim string - - // A set of paths to look for the config file in - configPaths []string - - // The filesystem to read config from. - fs afero.Fs - - // A set of remote providers to search for the configuration - remoteProviders []*defaultRemoteProvider - - // Name of file to look for inside the path - configName string - configFile string - configType string - configPermissions os.FileMode - envPrefix string - - // Specific commands for ini parsing - iniLoadOptions ini.LoadOptions - - automaticEnvApplied bool - envKeyReplacer StringReplacer - allowEmptyEnv bool - - parents []string - config map[string]any - override map[string]any - defaults map[string]any - kvstore map[string]any - pflags map[string]FlagValue - env map[string][]string - aliases map[string]string - typeByDefValue bool - - onConfigChange func(fsnotify.Event) - - logger *slog.Logger - - // TODO: should probably be protected with a mutex - encoderRegistry *encoding.EncoderRegistry - decoderRegistry *encoding.DecoderRegistry -} - -// New returns an initialized Viper instance. -func New() *Viper { - v := new(Viper) - v.keyDelim = "." - v.configName = "config" - v.configPermissions = os.FileMode(0o644) - v.fs = afero.NewOsFs() - v.config = make(map[string]any) - v.parents = []string{} - v.override = make(map[string]any) - v.defaults = make(map[string]any) - v.kvstore = make(map[string]any) - v.pflags = make(map[string]FlagValue) - v.env = make(map[string][]string) - v.aliases = make(map[string]string) - v.typeByDefValue = false - v.logger = slog.New(&discardHandler{}) - - v.resetEncoding() - - return v -} - -// Option configures Viper using the functional options paradigm popularized by Rob Pike and Dave Cheney. -// If you're unfamiliar with this style, -// see https://commandcenter.blogspot.com/2014/01/self-referential-functions-and-design.html and -// https://dave.cheney.net/2014/10/17/functional-options-for-friendly-apis. -type Option interface { - apply(v *Viper) -} - -type optionFunc func(v *Viper) - -func (fn optionFunc) apply(v *Viper) { - fn(v) -} - -// KeyDelimiter sets the delimiter used for determining key parts. -// By default it's value is ".". -func KeyDelimiter(d string) Option { - return optionFunc(func(v *Viper) { - v.keyDelim = d - }) -} - -// StringReplacer applies a set of replacements to a string. -type StringReplacer interface { - // Replace returns a copy of s with all replacements performed. - Replace(s string) string -} - -// EnvKeyReplacer sets a replacer used for mapping environment variables to internal keys. -func EnvKeyReplacer(r StringReplacer) Option { - return optionFunc(func(v *Viper) { - v.envKeyReplacer = r - }) -} - -// NewWithOptions creates a new Viper instance. -func NewWithOptions(opts ...Option) *Viper { - v := New() - - for _, opt := range opts { - opt.apply(v) - } - - v.resetEncoding() - - return v -} - -// Reset is intended for testing, will reset all to default settings. -// In the public interface for the viper package so applications -// can use it in their testing as well. -func Reset() { - v = New() - SupportedExts = []string{"json", "toml", "yaml", "yml", "properties", "props", "prop", "hcl", "tfvars", "dotenv", "env", "ini"} - SupportedRemoteProviders = []string{"etcd", "etcd3", "consul", "firestore", "nats"} -} - -// TODO: make this lazy initialization instead. -func (v *Viper) resetEncoding() { - encoderRegistry := encoding.NewEncoderRegistry() - decoderRegistry := encoding.NewDecoderRegistry() - - { - codec := yaml.Codec{} - - encoderRegistry.RegisterEncoder("yaml", codec) - decoderRegistry.RegisterDecoder("yaml", codec) - - encoderRegistry.RegisterEncoder("yml", codec) - decoderRegistry.RegisterDecoder("yml", codec) - } - - { - codec := json.Codec{} - - encoderRegistry.RegisterEncoder("json", codec) - decoderRegistry.RegisterDecoder("json", codec) - } - - { - codec := toml.Codec{} - - encoderRegistry.RegisterEncoder("toml", codec) - decoderRegistry.RegisterDecoder("toml", codec) - } - - { - codec := hcl.Codec{} - - encoderRegistry.RegisterEncoder("hcl", codec) - decoderRegistry.RegisterDecoder("hcl", codec) - - encoderRegistry.RegisterEncoder("tfvars", codec) - decoderRegistry.RegisterDecoder("tfvars", codec) - } - - { - codec := ini.Codec{ - KeyDelimiter: v.keyDelim, - LoadOptions: v.iniLoadOptions, - } - - encoderRegistry.RegisterEncoder("ini", codec) - decoderRegistry.RegisterDecoder("ini", codec) - } - - { - codec := &javaproperties.Codec{ - KeyDelimiter: v.keyDelim, - } - - encoderRegistry.RegisterEncoder("properties", codec) - decoderRegistry.RegisterDecoder("properties", codec) - - encoderRegistry.RegisterEncoder("props", codec) - decoderRegistry.RegisterDecoder("props", codec) - - encoderRegistry.RegisterEncoder("prop", codec) - decoderRegistry.RegisterDecoder("prop", codec) - } - - { - codec := &dotenv.Codec{} - - encoderRegistry.RegisterEncoder("dotenv", codec) - decoderRegistry.RegisterDecoder("dotenv", codec) - - encoderRegistry.RegisterEncoder("env", codec) - decoderRegistry.RegisterDecoder("env", codec) - } - - v.encoderRegistry = encoderRegistry - v.decoderRegistry = decoderRegistry -} - -type defaultRemoteProvider struct { - provider string - endpoint string - path string - secretKeyring string -} - -func (rp defaultRemoteProvider) Provider() string { - return rp.provider -} - -func (rp defaultRemoteProvider) Endpoint() string { - return rp.endpoint -} - -func (rp defaultRemoteProvider) Path() string { - return rp.path -} - -func (rp defaultRemoteProvider) SecretKeyring() string { - return rp.secretKeyring -} - -// RemoteProvider stores the configuration necessary -// to connect to a remote key/value store. -// Optional secretKeyring to unencrypt encrypted values -// can be provided. -type RemoteProvider interface { - Provider() string - Endpoint() string - Path() string - SecretKeyring() string -} - -// SupportedExts are universally supported extensions. -var SupportedExts = []string{"json", "toml", "yaml", "yml", "properties", "props", "prop", "hcl", "tfvars", "dotenv", "env", "ini"} - -// SupportedRemoteProviders are universally supported remote providers. -var SupportedRemoteProviders = []string{"etcd", "etcd3", "consul", "firestore", "nats"} - -// OnConfigChange sets the event handler that is called when a config file changes. -func OnConfigChange(run func(in fsnotify.Event)) { v.OnConfigChange(run) } - -// OnConfigChange sets the event handler that is called when a config file changes. -func (v *Viper) OnConfigChange(run func(in fsnotify.Event)) { - v.onConfigChange = run -} - -// WatchConfig starts watching a config file for changes. -func WatchConfig() { v.WatchConfig() } - -// WatchConfig starts watching a config file for changes. -func (v *Viper) WatchConfig() { - initWG := sync.WaitGroup{} - initWG.Add(1) - go func() { - watcher, err := fsnotify.NewWatcher() - if err != nil { - v.logger.Error(fmt.Sprintf("failed to create watcher: %s", err)) - os.Exit(1) - } - defer watcher.Close() - // we have to watch the entire directory to pick up renames/atomic saves in a cross-platform way - filename, err := v.getConfigFile() - if err != nil { - v.logger.Error(fmt.Sprintf("get config file: %s", err)) - initWG.Done() - return - } - - configFile := filepath.Clean(filename) - configDir, _ := filepath.Split(configFile) - realConfigFile, _ := filepath.EvalSymlinks(filename) - - eventsWG := sync.WaitGroup{} - eventsWG.Add(1) - go func() { - for { - select { - case event, ok := <-watcher.Events: - if !ok { // 'Events' channel is closed - eventsWG.Done() - return - } - currentConfigFile, _ := filepath.EvalSymlinks(filename) - // we only care about the config file with the following cases: - // 1 - if the config file was modified or created - // 2 - if the real path to the config file changed (eg: k8s ConfigMap replacement) - if (filepath.Clean(event.Name) == configFile && - (event.Has(fsnotify.Write) || event.Has(fsnotify.Create))) || - (currentConfigFile != "" && currentConfigFile != realConfigFile) { - realConfigFile = currentConfigFile - err := v.ReadInConfig() - if err != nil { - v.logger.Error(fmt.Sprintf("read config file: %s", err)) - } - if v.onConfigChange != nil { - v.onConfigChange(event) - } - } else if filepath.Clean(event.Name) == configFile && event.Has(fsnotify.Remove) { - eventsWG.Done() - return - } - - case err, ok := <-watcher.Errors: - if ok { // 'Errors' channel is not closed - v.logger.Error(fmt.Sprintf("watcher error: %s", err)) - } - eventsWG.Done() - return - } - } - }() - watcher.Add(configDir) - initWG.Done() // done initializing the watch in this go routine, so the parent routine can move on... - eventsWG.Wait() // now, wait for event loop to end in this go-routine... - }() - initWG.Wait() // make sure that the go routine above fully ended before returning -} - -// SetConfigFile explicitly defines the path, name and extension of the config file. -// Viper will use this and not check any of the config paths. -func SetConfigFile(in string) { v.SetConfigFile(in) } - -func (v *Viper) SetConfigFile(in string) { - if in != "" { - v.configFile = in - } -} - -// SetEnvPrefix defines a prefix that ENVIRONMENT variables will use. -// E.g. if your prefix is "spf", the env registry will look for env -// variables that start with "SPF_". -func SetEnvPrefix(in string) { v.SetEnvPrefix(in) } - -func (v *Viper) SetEnvPrefix(in string) { - if in != "" { - v.envPrefix = in - } -} - -func GetEnvPrefix() string { return v.GetEnvPrefix() } - -func (v *Viper) GetEnvPrefix() string { - return v.envPrefix -} - -func (v *Viper) mergeWithEnvPrefix(in string) string { - if v.envPrefix != "" { - return strings.ToUpper(v.envPrefix + "_" + in) - } - - return strings.ToUpper(in) -} - -// AllowEmptyEnv tells Viper to consider set, -// but empty environment variables as valid values instead of falling back. -// For backward compatibility reasons this is false by default. -func AllowEmptyEnv(allowEmptyEnv bool) { v.AllowEmptyEnv(allowEmptyEnv) } - -func (v *Viper) AllowEmptyEnv(allowEmptyEnv bool) { - v.allowEmptyEnv = allowEmptyEnv -} - -// TODO: should getEnv logic be moved into find(). Can generalize the use of -// rewriting keys many things, Ex: Get('someKey') -> some_key -// (camel case to snake case for JSON keys perhaps) - -// getEnv is a wrapper around os.Getenv which replaces characters in the original -// key. This allows env vars which have different keys than the config object -// keys. -func (v *Viper) getEnv(key string) (string, bool) { - if v.envKeyReplacer != nil { - key = v.envKeyReplacer.Replace(key) - } - - val, ok := os.LookupEnv(key) - - return val, ok && (v.allowEmptyEnv || val != "") -} - -// ConfigFileUsed returns the file used to populate the config registry. -func ConfigFileUsed() string { return v.ConfigFileUsed() } -func (v *Viper) ConfigFileUsed() string { return v.configFile } - -// AddConfigPath adds a path for Viper to search for the config file in. -// Can be called multiple times to define multiple search paths. -func AddConfigPath(in string) { v.AddConfigPath(in) } - -func (v *Viper) AddConfigPath(in string) { - if in != "" { - absin := absPathify(v.logger, in) - - v.logger.Info("adding path to search paths", "path", absin) - if !stringInSlice(absin, v.configPaths) { - v.configPaths = append(v.configPaths, absin) - } - } -} - -// AddRemoteProvider adds a remote configuration source. -// Remote Providers are searched in the order they are added. -// provider is a string value: "etcd", "etcd3", "consul", "firestore" or "nats" are currently supported. -// endpoint is the url. etcd requires http://ip:port, consul requires ip:port, nats requires nats://ip:port -// path is the path in the k/v store to retrieve configuration -// To retrieve a config file called myapp.json from /configs/myapp.json -// you should set path to /configs and set config name (SetConfigName()) to -// "myapp". -func AddRemoteProvider(provider, endpoint, path string) error { - return v.AddRemoteProvider(provider, endpoint, path) -} - -func (v *Viper) AddRemoteProvider(provider, endpoint, path string) error { - if !stringInSlice(provider, SupportedRemoteProviders) { - return UnsupportedRemoteProviderError(provider) - } - if provider != "" && endpoint != "" { - v.logger.Info("adding remote provider", "provider", provider, "endpoint", endpoint) - - rp := &defaultRemoteProvider{ - endpoint: endpoint, - provider: provider, - path: path, - } - if !v.providerPathExists(rp) { - v.remoteProviders = append(v.remoteProviders, rp) - } - } - return nil -} - -// AddSecureRemoteProvider adds a remote configuration source. -// Secure Remote Providers are searched in the order they are added. -// provider is a string value: "etcd", "etcd3", "consul", "firestore" or "nats" are currently supported. -// endpoint is the url. etcd requires http://ip:port consul requires ip:port -// secretkeyring is the filepath to your openpgp secret keyring. e.g. /etc/secrets/myring.gpg -// path is the path in the k/v store to retrieve configuration -// To retrieve a config file called myapp.json from /configs/myapp.json -// you should set path to /configs and set config name (SetConfigName()) to -// "myapp". -// Secure Remote Providers are implemented with github.com/bketelsen/crypt. -func AddSecureRemoteProvider(provider, endpoint, path, secretkeyring string) error { - return v.AddSecureRemoteProvider(provider, endpoint, path, secretkeyring) -} - -func (v *Viper) AddSecureRemoteProvider(provider, endpoint, path, secretkeyring string) error { - if !stringInSlice(provider, SupportedRemoteProviders) { - return UnsupportedRemoteProviderError(provider) - } - if provider != "" && endpoint != "" { - v.logger.Info("adding remote provider", "provider", provider, "endpoint", endpoint) - - rp := &defaultRemoteProvider{ - endpoint: endpoint, - provider: provider, - path: path, - secretKeyring: secretkeyring, - } - if !v.providerPathExists(rp) { - v.remoteProviders = append(v.remoteProviders, rp) - } - } - return nil -} - -func (v *Viper) providerPathExists(p *defaultRemoteProvider) bool { - for _, y := range v.remoteProviders { - if reflect.DeepEqual(y, p) { - return true - } - } - return false -} - -// searchMap recursively searches for a value for path in source map. -// Returns nil if not found. -// Note: This assumes that the path entries and map keys are lower cased. -func (v *Viper) searchMap(source map[string]any, path []string) any { - if len(path) == 0 { - return source - } - - next, ok := source[path[0]] - if ok { - // Fast path - if len(path) == 1 { - return next - } - - // Nested case - switch next := next.(type) { - case map[any]any: - return v.searchMap(cast.ToStringMap(next), path[1:]) - case map[string]any: - // Type assertion is safe here since it is only reached - // if the type of `next` is the same as the type being asserted - return v.searchMap(next, path[1:]) - default: - // got a value but nested key expected, return "nil" for not found - return nil - } - } - return nil -} - -// searchIndexableWithPathPrefixes recursively searches for a value for path in source map/slice. -// -// While searchMap() considers each path element as a single map key or slice index, this -// function searches for, and prioritizes, merged path elements. -// e.g., if in the source, "foo" is defined with a sub-key "bar", and "foo.bar" -// is also defined, this latter value is returned for path ["foo", "bar"]. -// -// This should be useful only at config level (other maps may not contain dots -// in their keys). -// -// Note: This assumes that the path entries and map keys are lower cased. -func (v *Viper) searchIndexableWithPathPrefixes(source any, path []string) any { - if len(path) == 0 { - return source - } - - // search for path prefixes, starting from the longest one - for i := len(path); i > 0; i-- { - prefixKey := strings.ToLower(strings.Join(path[0:i], v.keyDelim)) - - var val any - switch sourceIndexable := source.(type) { - case []any: - val = v.searchSliceWithPathPrefixes(sourceIndexable, prefixKey, i, path) - case map[string]any: - val = v.searchMapWithPathPrefixes(sourceIndexable, prefixKey, i, path) - } - if val != nil { - return val - } - } - - // not found - return nil -} - -// searchSliceWithPathPrefixes searches for a value for path in sourceSlice -// -// This function is part of the searchIndexableWithPathPrefixes recurring search and -// should not be called directly from functions other than searchIndexableWithPathPrefixes. -func (v *Viper) searchSliceWithPathPrefixes( - sourceSlice []any, - prefixKey string, - pathIndex int, - path []string, -) any { - // if the prefixKey is not a number or it is out of bounds of the slice - index, err := strconv.Atoi(prefixKey) - if err != nil || len(sourceSlice) <= index { - return nil - } - - next := sourceSlice[index] - - // Fast path - if pathIndex == len(path) { - return next - } - - switch n := next.(type) { - case map[any]any: - return v.searchIndexableWithPathPrefixes(cast.ToStringMap(n), path[pathIndex:]) - case map[string]any, []any: - return v.searchIndexableWithPathPrefixes(n, path[pathIndex:]) - default: - // got a value but nested key expected, do nothing and look for next prefix - } - - // not found - return nil -} - -// searchMapWithPathPrefixes searches for a value for path in sourceMap -// -// This function is part of the searchIndexableWithPathPrefixes recurring search and -// should not be called directly from functions other than searchIndexableWithPathPrefixes. -func (v *Viper) searchMapWithPathPrefixes( - sourceMap map[string]any, - prefixKey string, - pathIndex int, - path []string, -) any { - next, ok := sourceMap[prefixKey] - if !ok { - return nil - } - - // Fast path - if pathIndex == len(path) { - return next - } - - // Nested case - switch n := next.(type) { - case map[any]any: - return v.searchIndexableWithPathPrefixes(cast.ToStringMap(n), path[pathIndex:]) - case map[string]any, []any: - return v.searchIndexableWithPathPrefixes(n, path[pathIndex:]) - default: - // got a value but nested key expected, do nothing and look for next prefix - } - - // not found - return nil -} - -// isPathShadowedInDeepMap makes sure the given path is not shadowed somewhere -// on its path in the map. -// e.g., if "foo.bar" has a value in the given map, it “shadows” -// -// "foo.bar.baz" in a lower-priority map -func (v *Viper) isPathShadowedInDeepMap(path []string, m map[string]any) string { - var parentVal any - for i := 1; i < len(path); i++ { - parentVal = v.searchMap(m, path[0:i]) - if parentVal == nil { - // not found, no need to add more path elements - return "" - } - switch parentVal.(type) { - case map[any]any: - continue - case map[string]any: - continue - default: - // parentVal is a regular value which shadows "path" - return strings.Join(path[0:i], v.keyDelim) - } - } - return "" -} - -// isPathShadowedInFlatMap makes sure the given path is not shadowed somewhere -// in a sub-path of the map. -// e.g., if "foo.bar" has a value in the given map, it “shadows” -// -// "foo.bar.baz" in a lower-priority map -func (v *Viper) isPathShadowedInFlatMap(path []string, mi any) string { - // unify input map - var m map[string]interface{} - switch miv := mi.(type) { - case map[string]string: - m = castMapStringToMapInterface(miv) - case map[string]FlagValue: - m = castMapFlagToMapInterface(miv) - default: - return "" - } - - // scan paths - var parentKey string - for i := 1; i < len(path); i++ { - parentKey = strings.Join(path[0:i], v.keyDelim) - if _, ok := m[parentKey]; ok { - return parentKey - } - } - return "" -} - -// isPathShadowedInAutoEnv makes sure the given path is not shadowed somewhere -// in the environment, when automatic env is on. -// e.g., if "foo.bar" has a value in the environment, it “shadows” -// -// "foo.bar.baz" in a lower-priority map -func (v *Viper) isPathShadowedInAutoEnv(path []string) string { - var parentKey string - for i := 1; i < len(path); i++ { - parentKey = strings.Join(path[0:i], v.keyDelim) - if _, ok := v.getEnv(v.mergeWithEnvPrefix(parentKey)); ok { - return parentKey - } - } - return "" -} - -// SetTypeByDefaultValue enables or disables the inference of a key value's -// type when the Get function is used based upon a key's default value as -// opposed to the value returned based on the normal fetch logic. -// -// For example, if a key has a default value of []string{} and the same key -// is set via an environment variable to "a b c", a call to the Get function -// would return a string slice for the key if the key's type is inferred by -// the default value and the Get function would return: -// -// []string {"a", "b", "c"} -// -// Otherwise the Get function would return: -// -// "a b c" -func SetTypeByDefaultValue(enable bool) { v.SetTypeByDefaultValue(enable) } - -func (v *Viper) SetTypeByDefaultValue(enable bool) { - v.typeByDefValue = enable -} - -// GetViper gets the global Viper instance. -func GetViper() *Viper { - return v -} - -// Get can retrieve any value given the key to use. -// Get is case-insensitive for a key. -// Get has the behavior of returning the value associated with the first -// place from where it is set. Viper will check in the following order: -// override, flag, env, config file, key/value store, default -// -// Get returns an interface. For a specific value use one of the Get____ methods. -func Get(key string) any { return v.Get(key) } - -func (v *Viper) Get(key string) any { - lcaseKey := strings.ToLower(key) - val := v.find(lcaseKey, true) - if val == nil { - return nil - } - - if v.typeByDefValue { - // TODO(bep) this branch isn't covered by a single test. - valType := val - path := strings.Split(lcaseKey, v.keyDelim) - defVal := v.searchMap(v.defaults, path) - if defVal != nil { - valType = defVal - } - - switch valType.(type) { - case bool: - return cast.ToBool(val) - case string: - return cast.ToString(val) - case int32, int16, int8, int: - return cast.ToInt(val) - case uint: - return cast.ToUint(val) - case uint32: - return cast.ToUint32(val) - case uint64: - return cast.ToUint64(val) - case int64: - return cast.ToInt64(val) - case float64, float32: - return cast.ToFloat64(val) - case time.Time: - return cast.ToTime(val) - case time.Duration: - return cast.ToDuration(val) - case []string: - return cast.ToStringSlice(val) - case []int: - return cast.ToIntSlice(val) - case []time.Duration: - return cast.ToDurationSlice(val) - } - } - - return val -} - -// Sub returns new Viper instance representing a sub tree of this instance. -// Sub is case-insensitive for a key. -func Sub(key string) *Viper { return v.Sub(key) } - -func (v *Viper) Sub(key string) *Viper { - subv := New() - data := v.Get(key) - if data == nil { - return nil - } - - if reflect.TypeOf(data).Kind() == reflect.Map { - subv.parents = append([]string(nil), v.parents...) - subv.parents = append(subv.parents, strings.ToLower(key)) - subv.automaticEnvApplied = v.automaticEnvApplied - subv.envPrefix = v.envPrefix - subv.envKeyReplacer = v.envKeyReplacer - subv.config = cast.ToStringMap(data) - return subv - } - return nil -} - -// GetString returns the value associated with the key as a string. -func GetString(key string) string { return v.GetString(key) } - -func (v *Viper) GetString(key string) string { - return cast.ToString(v.Get(key)) -} - -// GetBool returns the value associated with the key as a boolean. -func GetBool(key string) bool { return v.GetBool(key) } - -func (v *Viper) GetBool(key string) bool { - return cast.ToBool(v.Get(key)) -} - -// GetInt returns the value associated with the key as an integer. -func GetInt(key string) int { return v.GetInt(key) } - -func (v *Viper) GetInt(key string) int { - return cast.ToInt(v.Get(key)) -} - -// GetInt32 returns the value associated with the key as an integer. -func GetInt32(key string) int32 { return v.GetInt32(key) } - -func (v *Viper) GetInt32(key string) int32 { - return cast.ToInt32(v.Get(key)) -} - -// GetInt64 returns the value associated with the key as an integer. -func GetInt64(key string) int64 { return v.GetInt64(key) } - -func (v *Viper) GetInt64(key string) int64 { - return cast.ToInt64(v.Get(key)) -} - -// GetUint returns the value associated with the key as an unsigned integer. -func GetUint(key string) uint { return v.GetUint(key) } - -func (v *Viper) GetUint(key string) uint { - return cast.ToUint(v.Get(key)) -} - -// GetUint16 returns the value associated with the key as an unsigned integer. -func GetUint16(key string) uint16 { return v.GetUint16(key) } - -func (v *Viper) GetUint16(key string) uint16 { - return cast.ToUint16(v.Get(key)) -} - -// GetUint32 returns the value associated with the key as an unsigned integer. -func GetUint32(key string) uint32 { return v.GetUint32(key) } - -func (v *Viper) GetUint32(key string) uint32 { - return cast.ToUint32(v.Get(key)) -} - -// GetUint64 returns the value associated with the key as an unsigned integer. -func GetUint64(key string) uint64 { return v.GetUint64(key) } - -func (v *Viper) GetUint64(key string) uint64 { - return cast.ToUint64(v.Get(key)) -} - -// GetFloat64 returns the value associated with the key as a float64. -func GetFloat64(key string) float64 { return v.GetFloat64(key) } - -func (v *Viper) GetFloat64(key string) float64 { - return cast.ToFloat64(v.Get(key)) -} - -// GetTime returns the value associated with the key as time. -func GetTime(key string) time.Time { return v.GetTime(key) } - -func (v *Viper) GetTime(key string) time.Time { - return cast.ToTime(v.Get(key)) -} - -// GetDuration returns the value associated with the key as a duration. -func GetDuration(key string) time.Duration { return v.GetDuration(key) } - -func (v *Viper) GetDuration(key string) time.Duration { - return cast.ToDuration(v.Get(key)) -} - -// GetIntSlice returns the value associated with the key as a slice of int values. -func GetIntSlice(key string) []int { return v.GetIntSlice(key) } - -func (v *Viper) GetIntSlice(key string) []int { - return cast.ToIntSlice(v.Get(key)) -} - -// GetStringSlice returns the value associated with the key as a slice of strings. -func GetStringSlice(key string) []string { return v.GetStringSlice(key) } - -func (v *Viper) GetStringSlice(key string) []string { - return cast.ToStringSlice(v.Get(key)) -} - -// GetStringMap returns the value associated with the key as a map of interfaces. -func GetStringMap(key string) map[string]any { return v.GetStringMap(key) } - -func (v *Viper) GetStringMap(key string) map[string]any { - return cast.ToStringMap(v.Get(key)) -} - -// GetStringMapString returns the value associated with the key as a map of strings. -func GetStringMapString(key string) map[string]string { return v.GetStringMapString(key) } - -func (v *Viper) GetStringMapString(key string) map[string]string { - return cast.ToStringMapString(v.Get(key)) -} - -// GetStringMapStringSlice returns the value associated with the key as a map to a slice of strings. -func GetStringMapStringSlice(key string) map[string][]string { return v.GetStringMapStringSlice(key) } - -func (v *Viper) GetStringMapStringSlice(key string) map[string][]string { - return cast.ToStringMapStringSlice(v.Get(key)) -} - -// GetSizeInBytes returns the size of the value associated with the given key -// in bytes. -func GetSizeInBytes(key string) uint { return v.GetSizeInBytes(key) } - -func (v *Viper) GetSizeInBytes(key string) uint { - sizeStr := cast.ToString(v.Get(key)) - return parseSizeInBytes(sizeStr) -} - -// UnmarshalKey takes a single key and unmarshals it into a Struct. -func UnmarshalKey(key string, rawVal any, opts ...DecoderConfigOption) error { - return v.UnmarshalKey(key, rawVal, opts...) -} - -func (v *Viper) UnmarshalKey(key string, rawVal any, opts ...DecoderConfigOption) error { - return decode(v.Get(key), defaultDecoderConfig(rawVal, opts...)) -} - -// Unmarshal unmarshals the config into a Struct. Make sure that the tags -// on the fields of the structure are properly set. -func Unmarshal(rawVal any, opts ...DecoderConfigOption) error { - return v.Unmarshal(rawVal, opts...) -} - -func (v *Viper) Unmarshal(rawVal any, opts ...DecoderConfigOption) error { - keys := v.AllKeys() - - if features.BindStruct { - // TODO: make this optional? - structKeys, err := v.decodeStructKeys(rawVal, opts...) - if err != nil { - return err - } - - keys = append(keys, structKeys...) - } - - // TODO: struct keys should be enough? - return decode(v.getSettings(keys), defaultDecoderConfig(rawVal, opts...)) -} - -func (v *Viper) decodeStructKeys(input any, opts ...DecoderConfigOption) ([]string, error) { - var structKeyMap map[string]any - - err := decode(input, defaultDecoderConfig(&structKeyMap, opts...)) - if err != nil { - return nil, err - } - - flattenedStructKeyMap := v.flattenAndMergeMap(map[string]bool{}, structKeyMap, "") - - r := make([]string, 0, len(flattenedStructKeyMap)) - for v := range flattenedStructKeyMap { - r = append(r, v) - } - - return r, nil -} - -// defaultDecoderConfig returns default mapstructure.DecoderConfig with support -// of time.Duration values & string slices. -func defaultDecoderConfig(output any, opts ...DecoderConfigOption) *mapstructure.DecoderConfig { - c := &mapstructure.DecoderConfig{ - Metadata: nil, - Result: output, - WeaklyTypedInput: true, - DecodeHook: mapstructure.ComposeDecodeHookFunc( - mapstructure.StringToTimeDurationHookFunc(), - mapstructure.StringToSliceHookFunc(","), - ), - } - for _, opt := range opts { - opt(c) - } - return c -} - -// decode is a wrapper around mapstructure.Decode that mimics the WeakDecode functionality. -func decode(input any, config *mapstructure.DecoderConfig) error { - decoder, err := mapstructure.NewDecoder(config) - if err != nil { - return err - } - return decoder.Decode(input) -} - -// UnmarshalExact unmarshals the config into a Struct, erroring if a field is nonexistent -// in the destination struct. -func UnmarshalExact(rawVal any, opts ...DecoderConfigOption) error { - return v.UnmarshalExact(rawVal, opts...) -} - -func (v *Viper) UnmarshalExact(rawVal any, opts ...DecoderConfigOption) error { - config := defaultDecoderConfig(rawVal, opts...) - config.ErrorUnused = true - - keys := v.AllKeys() - - if features.BindStruct { - // TODO: make this optional? - structKeys, err := v.decodeStructKeys(rawVal, opts...) - if err != nil { - return err - } - - keys = append(keys, structKeys...) - } - - // TODO: struct keys should be enough? - return decode(v.getSettings(keys), config) -} - -// BindPFlags binds a full flag set to the configuration, using each flag's long -// name as the config key. -func BindPFlags(flags *pflag.FlagSet) error { return v.BindPFlags(flags) } - -func (v *Viper) BindPFlags(flags *pflag.FlagSet) error { - return v.BindFlagValues(pflagValueSet{flags}) -} - -// BindPFlag binds a specific key to a pflag (as used by cobra). -// Example (where serverCmd is a Cobra instance): -// -// serverCmd.Flags().Int("port", 1138, "Port to run Application server on") -// Viper.BindPFlag("port", serverCmd.Flags().Lookup("port")) -func BindPFlag(key string, flag *pflag.Flag) error { return v.BindPFlag(key, flag) } - -func (v *Viper) BindPFlag(key string, flag *pflag.Flag) error { - if flag == nil { - return fmt.Errorf("flag for %q is nil", key) - } - return v.BindFlagValue(key, pflagValue{flag}) -} - -// BindFlagValues binds a full FlagValue set to the configuration, using each flag's long -// name as the config key. -func BindFlagValues(flags FlagValueSet) error { return v.BindFlagValues(flags) } - -func (v *Viper) BindFlagValues(flags FlagValueSet) (err error) { - flags.VisitAll(func(flag FlagValue) { - if err = v.BindFlagValue(flag.Name(), flag); err != nil { - return - } - }) - return nil -} - -// BindFlagValue binds a specific key to a FlagValue. -func BindFlagValue(key string, flag FlagValue) error { return v.BindFlagValue(key, flag) } - -func (v *Viper) BindFlagValue(key string, flag FlagValue) error { - if flag == nil { - return fmt.Errorf("flag for %q is nil", key) - } - v.pflags[strings.ToLower(key)] = flag - return nil -} - -// BindEnv binds a Viper key to a ENV variable. -// ENV variables are case sensitive. -// If only a key is provided, it will use the env key matching the key, uppercased. -// If more arguments are provided, they will represent the env variable names that -// should bind to this key and will be taken in the specified order. -// EnvPrefix will be used when set when env name is not provided. -func BindEnv(input ...string) error { return v.BindEnv(input...) } - -func (v *Viper) BindEnv(input ...string) error { - if len(input) == 0 { - return fmt.Errorf("missing key to bind to") - } - - key := strings.ToLower(input[0]) - - if len(input) == 1 { - v.env[key] = append(v.env[key], v.mergeWithEnvPrefix(key)) - } else { - v.env[key] = append(v.env[key], input[1:]...) - } - - return nil -} - -// MustBindEnv wraps BindEnv in a panic. -// If there is an error binding an environment variable, MustBindEnv will -// panic. -func MustBindEnv(input ...string) { v.MustBindEnv(input...) } - -func (v *Viper) MustBindEnv(input ...string) { - if err := v.BindEnv(input...); err != nil { - panic(fmt.Sprintf("error while binding environment variable: %v", err)) - } -} - -// Given a key, find the value. -// -// Viper will check to see if an alias exists first. -// Viper will then check in the following order: -// flag, env, config file, key/value store. -// Lastly, if no value was found and flagDefault is true, and if the key -// corresponds to a flag, the flag's default value is returned. -// -// Note: this assumes a lower-cased key given. -func (v *Viper) find(lcaseKey string, flagDefault bool) any { - var ( - val any - exists bool - path = strings.Split(lcaseKey, v.keyDelim) - nested = len(path) > 1 - ) - - // compute the path through the nested maps to the nested value - if nested && v.isPathShadowedInDeepMap(path, castMapStringToMapInterface(v.aliases)) != "" { - return nil - } - - // if the requested key is an alias, then return the proper key - lcaseKey = v.realKey(lcaseKey) - path = strings.Split(lcaseKey, v.keyDelim) - nested = len(path) > 1 - - // Set() override first - val = v.searchMap(v.override, path) - if val != nil { - return val - } - if nested && v.isPathShadowedInDeepMap(path, v.override) != "" { - return nil - } - - // PFlag override next - flag, exists := v.pflags[lcaseKey] - if exists && flag.HasChanged() { - switch flag.ValueType() { - case "int", "int8", "int16", "int32", "int64": - return cast.ToInt(flag.ValueString()) - case "bool": - return cast.ToBool(flag.ValueString()) - case "stringSlice", "stringArray": - s := strings.TrimPrefix(flag.ValueString(), "[") - s = strings.TrimSuffix(s, "]") - res, _ := readAsCSV(s) - return res - case "intSlice": - s := strings.TrimPrefix(flag.ValueString(), "[") - s = strings.TrimSuffix(s, "]") - res, _ := readAsCSV(s) - return cast.ToIntSlice(res) - case "durationSlice": - s := strings.TrimPrefix(flag.ValueString(), "[") - s = strings.TrimSuffix(s, "]") - slice := strings.Split(s, ",") - return cast.ToDurationSlice(slice) - case "stringToString": - return stringToStringConv(flag.ValueString()) - case "stringToInt": - return stringToIntConv(flag.ValueString()) - default: - return flag.ValueString() - } - } - if nested && v.isPathShadowedInFlatMap(path, v.pflags) != "" { - return nil - } - - // Env override next - if v.automaticEnvApplied { - envKey := strings.Join(append(v.parents, lcaseKey), ".") - // even if it hasn't been registered, if automaticEnv is used, - // check any Get request - if val, ok := v.getEnv(v.mergeWithEnvPrefix(envKey)); ok { - return val - } - if nested && v.isPathShadowedInAutoEnv(path) != "" { - return nil - } - } - envkeys, exists := v.env[lcaseKey] - if exists { - for _, envkey := range envkeys { - if val, ok := v.getEnv(envkey); ok { - return val - } - } - } - if nested && v.isPathShadowedInFlatMap(path, v.env) != "" { - return nil - } - - // Config file next - val = v.searchIndexableWithPathPrefixes(v.config, path) - if val != nil { - return val - } - if nested && v.isPathShadowedInDeepMap(path, v.config) != "" { - return nil - } - - // K/V store next - val = v.searchMap(v.kvstore, path) - if val != nil { - return val - } - if nested && v.isPathShadowedInDeepMap(path, v.kvstore) != "" { - return nil - } - - // Default next - val = v.searchMap(v.defaults, path) - if val != nil { - return val - } - if nested && v.isPathShadowedInDeepMap(path, v.defaults) != "" { - return nil - } - - if flagDefault { - // last chance: if no value is found and a flag does exist for the key, - // get the flag's default value even if the flag's value has not been set. - if flag, exists := v.pflags[lcaseKey]; exists { - switch flag.ValueType() { - case "int", "int8", "int16", "int32", "int64": - return cast.ToInt(flag.ValueString()) - case "bool": - return cast.ToBool(flag.ValueString()) - case "stringSlice", "stringArray": - s := strings.TrimPrefix(flag.ValueString(), "[") - s = strings.TrimSuffix(s, "]") - res, _ := readAsCSV(s) - return res - case "intSlice": - s := strings.TrimPrefix(flag.ValueString(), "[") - s = strings.TrimSuffix(s, "]") - res, _ := readAsCSV(s) - return cast.ToIntSlice(res) - case "stringToString": - return stringToStringConv(flag.ValueString()) - case "stringToInt": - return stringToIntConv(flag.ValueString()) - case "durationSlice": - s := strings.TrimPrefix(flag.ValueString(), "[") - s = strings.TrimSuffix(s, "]") - slice := strings.Split(s, ",") - return cast.ToDurationSlice(slice) - default: - return flag.ValueString() - } - } - // last item, no need to check shadowing - } - - return nil -} - -func readAsCSV(val string) ([]string, error) { - if val == "" { - return []string{}, nil - } - stringReader := strings.NewReader(val) - csvReader := csv.NewReader(stringReader) - return csvReader.Read() -} - -// mostly copied from pflag's implementation of this operation here https://github.com/spf13/pflag/blob/master/string_to_string.go#L79 -// alterations are: errors are swallowed, map[string]any is returned in order to enable cast.ToStringMap. -func stringToStringConv(val string) any { - val = strings.Trim(val, "[]") - // An empty string would cause an empty map - if val == "" { - return map[string]any{} - } - r := csv.NewReader(strings.NewReader(val)) - ss, err := r.Read() - if err != nil { - return nil - } - out := make(map[string]any, len(ss)) - for _, pair := range ss { - k, vv, found := strings.Cut(pair, "=") - if !found { - return nil - } - out[k] = vv - } - return out -} - -// mostly copied from pflag's implementation of this operation here https://github.com/spf13/pflag/blob/d5e0c0615acee7028e1e2740a11102313be88de1/string_to_int.go#L68 -// alterations are: errors are swallowed, map[string]any is returned in order to enable cast.ToStringMap. -func stringToIntConv(val string) any { - val = strings.Trim(val, "[]") - // An empty string would cause an empty map - if val == "" { - return map[string]any{} - } - ss := strings.Split(val, ",") - out := make(map[string]any, len(ss)) - for _, pair := range ss { - k, vv, found := strings.Cut(pair, "=") - if !found { - return nil - } - var err error - out[k], err = strconv.Atoi(vv) - if err != nil { - return nil - } - } - return out -} - -// IsSet checks to see if the key has been set in any of the data locations. -// IsSet is case-insensitive for a key. -func IsSet(key string) bool { return v.IsSet(key) } - -func (v *Viper) IsSet(key string) bool { - lcaseKey := strings.ToLower(key) - val := v.find(lcaseKey, false) - return val != nil -} - -// AutomaticEnv makes Viper check if environment variables match any of the existing keys -// (config, default or flags). If matching env vars are found, they are loaded into Viper. -func AutomaticEnv() { v.AutomaticEnv() } - -func (v *Viper) AutomaticEnv() { - v.automaticEnvApplied = true -} - -// SetEnvKeyReplacer sets the strings.Replacer on the viper object -// Useful for mapping an environmental variable to a key that does -// not match it. -func SetEnvKeyReplacer(r *strings.Replacer) { v.SetEnvKeyReplacer(r) } - -func (v *Viper) SetEnvKeyReplacer(r *strings.Replacer) { - v.envKeyReplacer = r -} - -// RegisterAlias creates an alias that provides another accessor for the same key. -// This enables one to change a name without breaking the application. -func RegisterAlias(alias, key string) { v.RegisterAlias(alias, key) } - -func (v *Viper) RegisterAlias(alias, key string) { - v.registerAlias(alias, strings.ToLower(key)) -} - -func (v *Viper) registerAlias(alias, key string) { - alias = strings.ToLower(alias) - if alias != key && alias != v.realKey(key) { - _, exists := v.aliases[alias] - - if !exists { - // if we alias something that exists in one of the maps to another - // name, we'll never be able to get that value using the original - // name, so move the config value to the new realkey. - if val, ok := v.config[alias]; ok { - delete(v.config, alias) - v.config[key] = val - } - if val, ok := v.kvstore[alias]; ok { - delete(v.kvstore, alias) - v.kvstore[key] = val - } - if val, ok := v.defaults[alias]; ok { - delete(v.defaults, alias) - v.defaults[key] = val - } - if val, ok := v.override[alias]; ok { - delete(v.override, alias) - v.override[key] = val - } - v.aliases[alias] = key - } - } else { - v.logger.Warn("creating circular reference alias", "alias", alias, "key", key, "real_key", v.realKey(key)) - } -} - -func (v *Viper) realKey(key string) string { - newkey, exists := v.aliases[key] - if exists { - v.logger.Debug("key is an alias", "alias", key, "to", newkey) - - return v.realKey(newkey) - } - return key -} - -// InConfig checks to see if the given key (or an alias) is in the config file. -func InConfig(key string) bool { return v.InConfig(key) } - -func (v *Viper) InConfig(key string) bool { - lcaseKey := strings.ToLower(key) - - // if the requested key is an alias, then return the proper key - lcaseKey = v.realKey(lcaseKey) - path := strings.Split(lcaseKey, v.keyDelim) - - return v.searchIndexableWithPathPrefixes(v.config, path) != nil -} - -// SetDefault sets the default value for this key. -// SetDefault is case-insensitive for a key. -// Default only used when no value is provided by the user via flag, config or ENV. -func SetDefault(key string, value any) { v.SetDefault(key, value) } - -func (v *Viper) SetDefault(key string, value any) { - // If alias passed in, then set the proper default - key = v.realKey(strings.ToLower(key)) - value = toCaseInsensitiveValue(value) - - path := strings.Split(key, v.keyDelim) - lastKey := strings.ToLower(path[len(path)-1]) - deepestMap := deepSearch(v.defaults, path[0:len(path)-1]) - - // set innermost value - deepestMap[lastKey] = value -} - -// Set sets the value for the key in the override register. -// Set is case-insensitive for a key. -// Will be used instead of values obtained via -// flags, config file, ENV, default, or key/value store. -func Set(key string, value any) { v.Set(key, value) } - -func (v *Viper) Set(key string, value any) { - // If alias passed in, then set the proper override - key = v.realKey(strings.ToLower(key)) - value = toCaseInsensitiveValue(value) - - path := strings.Split(key, v.keyDelim) - lastKey := strings.ToLower(path[len(path)-1]) - deepestMap := deepSearch(v.override, path[0:len(path)-1]) - - // set innermost value - deepestMap[lastKey] = value -} - -// ReadInConfig will discover and load the configuration file from disk -// and key/value stores, searching in one of the defined paths. -func ReadInConfig() error { return v.ReadInConfig() } - -func (v *Viper) ReadInConfig() error { - v.logger.Info("attempting to read in config file") - filename, err := v.getConfigFile() - if err != nil { - return err - } - - if !stringInSlice(v.getConfigType(), SupportedExts) { - return UnsupportedConfigError(v.getConfigType()) - } - - v.logger.Debug("reading file", "file", filename) - file, err := afero.ReadFile(v.fs, filename) - if err != nil { - return err - } - - config := make(map[string]any) - - err = v.unmarshalReader(bytes.NewReader(file), config) - if err != nil { - return err - } - - v.config = config - return nil -} - -// MergeInConfig merges a new configuration with an existing config. -func MergeInConfig() error { return v.MergeInConfig() } - -func (v *Viper) MergeInConfig() error { - v.logger.Info("attempting to merge in config file") - filename, err := v.getConfigFile() - if err != nil { - return err - } - - if !stringInSlice(v.getConfigType(), SupportedExts) { - return UnsupportedConfigError(v.getConfigType()) - } - - file, err := afero.ReadFile(v.fs, filename) - if err != nil { - return err - } - - return v.MergeConfig(bytes.NewReader(file)) -} - -// ReadConfig will read a configuration file, setting existing keys to nil if the -// key does not exist in the file. -func ReadConfig(in io.Reader) error { return v.ReadConfig(in) } - -func (v *Viper) ReadConfig(in io.Reader) error { - v.config = make(map[string]any) - return v.unmarshalReader(in, v.config) -} - -// MergeConfig merges a new configuration with an existing config. -func MergeConfig(in io.Reader) error { return v.MergeConfig(in) } - -func (v *Viper) MergeConfig(in io.Reader) error { - cfg := make(map[string]any) - if err := v.unmarshalReader(in, cfg); err != nil { - return err - } - return v.MergeConfigMap(cfg) -} - -// MergeConfigMap merges the configuration from the map given with an existing config. -// Note that the map given may be modified. -func MergeConfigMap(cfg map[string]any) error { return v.MergeConfigMap(cfg) } - -func (v *Viper) MergeConfigMap(cfg map[string]any) error { - if v.config == nil { - v.config = make(map[string]any) - } - insensitiviseMap(cfg) - mergeMaps(cfg, v.config, nil) - return nil -} - -// WriteConfig writes the current configuration to a file. -func WriteConfig() error { return v.WriteConfig() } - -func (v *Viper) WriteConfig() error { - filename, err := v.getConfigFile() - if err != nil { - return err - } - return v.writeConfig(filename, true) -} - -// SafeWriteConfig writes current configuration to file only if the file does not exist. -func SafeWriteConfig() error { return v.SafeWriteConfig() } - -func (v *Viper) SafeWriteConfig() error { - if len(v.configPaths) < 1 { - return errors.New("missing configuration for 'configPath'") - } - return v.SafeWriteConfigAs(filepath.Join(v.configPaths[0], v.configName+"."+v.configType)) -} - -// WriteConfigAs writes current configuration to a given filename. -func WriteConfigAs(filename string) error { return v.WriteConfigAs(filename) } - -func (v *Viper) WriteConfigAs(filename string) error { - return v.writeConfig(filename, true) -} - -// SafeWriteConfigAs writes current configuration to a given filename if it does not exist. -func SafeWriteConfigAs(filename string) error { return v.SafeWriteConfigAs(filename) } - -func (v *Viper) SafeWriteConfigAs(filename string) error { - alreadyExists, err := afero.Exists(v.fs, filename) - if alreadyExists && err == nil { - return ConfigFileAlreadyExistsError(filename) - } - return v.writeConfig(filename, false) -} - -func (v *Viper) writeConfig(filename string, force bool) error { - v.logger.Info("attempting to write configuration to file") - - var configType string - - ext := filepath.Ext(filename) - if ext != "" && ext != filepath.Base(filename) { - configType = ext[1:] - } else { - configType = v.configType - } - if configType == "" { - return fmt.Errorf("config type could not be determined for %s", filename) - } - - if !stringInSlice(configType, SupportedExts) { - return UnsupportedConfigError(configType) - } - if v.config == nil { - v.config = make(map[string]any) - } - flags := os.O_CREATE | os.O_TRUNC | os.O_WRONLY - if !force { - flags |= os.O_EXCL - } - f, err := v.fs.OpenFile(filename, flags, v.configPermissions) - if err != nil { - return err - } - defer f.Close() - - if err := v.marshalWriter(f, configType); err != nil { - return err - } - - return f.Sync() -} - -// Unmarshal a Reader into a map. -// Should probably be an unexported function. -func unmarshalReader(in io.Reader, c map[string]any) error { - return v.unmarshalReader(in, c) -} - -func (v *Viper) unmarshalReader(in io.Reader, c map[string]any) error { - buf := new(bytes.Buffer) - buf.ReadFrom(in) - - switch format := strings.ToLower(v.getConfigType()); format { - case "yaml", "yml", "json", "toml", "hcl", "tfvars", "ini", "properties", "props", "prop", "dotenv", "env": - err := v.decoderRegistry.Decode(format, buf.Bytes(), c) - if err != nil { - return ConfigParseError{err} - } - } - - insensitiviseMap(c) - return nil -} - -// Marshal a map into Writer. -func (v *Viper) marshalWriter(f afero.File, configType string) error { - c := v.AllSettings() - switch configType { - case "yaml", "yml", "json", "toml", "hcl", "tfvars", "ini", "prop", "props", "properties", "dotenv", "env": - b, err := v.encoderRegistry.Encode(configType, c) - if err != nil { - return ConfigMarshalError{err} - } - - _, err = f.WriteString(string(b)) - if err != nil { - return ConfigMarshalError{err} - } - } - return nil -} - -func keyExists(k string, m map[string]any) string { - lk := strings.ToLower(k) - for mk := range m { - lmk := strings.ToLower(mk) - if lmk == lk { - return mk - } - } - return "" -} - -func castToMapStringInterface( - src map[any]any, -) map[string]any { - tgt := map[string]any{} - for k, v := range src { - tgt[fmt.Sprintf("%v", k)] = v - } - return tgt -} - -func castMapStringSliceToMapInterface(src map[string][]string) map[string]any { - tgt := map[string]any{} - for k, v := range src { - tgt[k] = v - } - return tgt -} - -func castMapStringToMapInterface(src map[string]string) map[string]any { - tgt := map[string]any{} - for k, v := range src { - tgt[k] = v - } - return tgt -} - -func castMapFlagToMapInterface(src map[string]FlagValue) map[string]any { - tgt := map[string]any{} - for k, v := range src { - tgt[k] = v - } - return tgt -} - -// mergeMaps merges two maps. The `itgt` parameter is for handling go-yaml's -// insistence on parsing nested structures as `map[any]any` -// instead of using a `string` as the key for nest structures beyond one level -// deep. Both map types are supported as there is a go-yaml fork that uses -// `map[string]any` instead. -func mergeMaps(src, tgt map[string]any, itgt map[any]any) { - for sk, sv := range src { - tk := keyExists(sk, tgt) - if tk == "" { - v.logger.Debug("", "tk", "\"\"", fmt.Sprintf("tgt[%s]", sk), sv) - tgt[sk] = sv - if itgt != nil { - itgt[sk] = sv - } - continue - } - - tv, ok := tgt[tk] - if !ok { - v.logger.Debug("", fmt.Sprintf("ok[%s]", tk), false, fmt.Sprintf("tgt[%s]", sk), sv) - tgt[sk] = sv - if itgt != nil { - itgt[sk] = sv - } - continue - } - - svType := reflect.TypeOf(sv) - tvType := reflect.TypeOf(tv) - - v.logger.Debug( - "processing", - "key", sk, - "st", svType, - "tt", tvType, - "sv", sv, - "tv", tv, - ) - - switch ttv := tv.(type) { - case map[any]any: - v.logger.Debug("merging maps (must convert)") - tsv, ok := sv.(map[any]any) - if !ok { - v.logger.Error( - "Could not cast sv to map[any]any", - "key", sk, - "st", svType, - "tt", tvType, - "sv", sv, - "tv", tv, - ) - continue - } - - ssv := castToMapStringInterface(tsv) - stv := castToMapStringInterface(ttv) - mergeMaps(ssv, stv, ttv) - case map[string]any: - v.logger.Debug("merging maps") - tsv, ok := sv.(map[string]any) - if !ok { - v.logger.Error( - "Could not cast sv to map[string]any", - "key", sk, - "st", svType, - "tt", tvType, - "sv", sv, - "tv", tv, - ) - continue - } - mergeMaps(tsv, ttv, nil) - default: - v.logger.Debug("setting value") - tgt[tk] = sv - if itgt != nil { - itgt[tk] = sv - } - } - } -} - -// ReadRemoteConfig attempts to get configuration from a remote source -// and read it in the remote configuration registry. -func ReadRemoteConfig() error { return v.ReadRemoteConfig() } - -func (v *Viper) ReadRemoteConfig() error { - return v.getKeyValueConfig() -} - -func WatchRemoteConfig() error { return v.WatchRemoteConfig() } -func (v *Viper) WatchRemoteConfig() error { - return v.watchKeyValueConfig() -} - -func (v *Viper) WatchRemoteConfigOnChannel() error { - return v.watchKeyValueConfigOnChannel() -} - -// Retrieve the first found remote configuration. -func (v *Viper) getKeyValueConfig() error { - if RemoteConfig == nil { - return RemoteConfigError("Enable the remote features by doing a blank import of the viper/remote package: '_ github.com/spf13/viper/remote'") - } - - if len(v.remoteProviders) == 0 { - return RemoteConfigError("No Remote Providers") - } - - for _, rp := range v.remoteProviders { - val, err := v.getRemoteConfig(rp) - if err != nil { - v.logger.Error(fmt.Errorf("get remote config: %w", err).Error()) - - continue - } - - v.kvstore = val - - return nil - } - return RemoteConfigError("No Files Found") -} - -func (v *Viper) getRemoteConfig(provider RemoteProvider) (map[string]any, error) { - reader, err := RemoteConfig.Get(provider) - if err != nil { - return nil, err - } - err = v.unmarshalReader(reader, v.kvstore) - return v.kvstore, err -} - -// Retrieve the first found remote configuration. -func (v *Viper) watchKeyValueConfigOnChannel() error { - if len(v.remoteProviders) == 0 { - return RemoteConfigError("No Remote Providers") - } - - for _, rp := range v.remoteProviders { - respc, _ := RemoteConfig.WatchChannel(rp) - // Todo: Add quit channel - go func(rc <-chan *RemoteResponse) { - for { - b := <-rc - reader := bytes.NewReader(b.Value) - v.unmarshalReader(reader, v.kvstore) - } - }(respc) - return nil - } - return RemoteConfigError("No Files Found") -} - -// Retrieve the first found remote configuration. -func (v *Viper) watchKeyValueConfig() error { - if len(v.remoteProviders) == 0 { - return RemoteConfigError("No Remote Providers") - } - - for _, rp := range v.remoteProviders { - val, err := v.watchRemoteConfig(rp) - if err != nil { - v.logger.Error(fmt.Errorf("watch remote config: %w", err).Error()) - - continue - } - v.kvstore = val - return nil - } - return RemoteConfigError("No Files Found") -} - -func (v *Viper) watchRemoteConfig(provider RemoteProvider) (map[string]any, error) { - reader, err := RemoteConfig.Watch(provider) - if err != nil { - return nil, err - } - err = v.unmarshalReader(reader, v.kvstore) - return v.kvstore, err -} - -// AllKeys returns all keys holding a value, regardless of where they are set. -// Nested keys are returned with a v.keyDelim separator. -func AllKeys() []string { return v.AllKeys() } - -func (v *Viper) AllKeys() []string { - m := map[string]bool{} - // add all paths, by order of descending priority to ensure correct shadowing - m = v.flattenAndMergeMap(m, castMapStringToMapInterface(v.aliases), "") - m = v.flattenAndMergeMap(m, v.override, "") - m = v.mergeFlatMap(m, castMapFlagToMapInterface(v.pflags)) - m = v.mergeFlatMap(m, castMapStringSliceToMapInterface(v.env)) - m = v.flattenAndMergeMap(m, v.config, "") - m = v.flattenAndMergeMap(m, v.kvstore, "") - m = v.flattenAndMergeMap(m, v.defaults, "") - - // convert set of paths to list - a := make([]string, 0, len(m)) - for x := range m { - a = append(a, x) - } - return a -} - -// flattenAndMergeMap recursively flattens the given map into a map[string]bool -// of key paths (used as a set, easier to manipulate than a []string): -// - each path is merged into a single key string, delimited with v.keyDelim -// - if a path is shadowed by an earlier value in the initial shadow map, -// it is skipped. -// -// The resulting set of paths is merged to the given shadow set at the same time. -func (v *Viper) flattenAndMergeMap(shadow map[string]bool, m map[string]any, prefix string) map[string]bool { - if shadow != nil && prefix != "" && shadow[prefix] { - // prefix is shadowed => nothing more to flatten - return shadow - } - if shadow == nil { - shadow = make(map[string]bool) - } - - var m2 map[string]any - if prefix != "" { - prefix += v.keyDelim - } - for k, val := range m { - fullKey := prefix + k - switch val := val.(type) { - case map[string]any: - m2 = val - case map[any]any: - m2 = cast.ToStringMap(val) - default: - // immediate value - shadow[strings.ToLower(fullKey)] = true - continue - } - // recursively merge to shadow map - shadow = v.flattenAndMergeMap(shadow, m2, fullKey) - } - return shadow -} - -// mergeFlatMap merges the given maps, excluding values of the second map -// shadowed by values from the first map. -func (v *Viper) mergeFlatMap(shadow map[string]bool, m map[string]any) map[string]bool { - // scan keys -outer: - for k := range m { - path := strings.Split(k, v.keyDelim) - // scan intermediate paths - var parentKey string - for i := 1; i < len(path); i++ { - parentKey = strings.Join(path[0:i], v.keyDelim) - if shadow[parentKey] { - // path is shadowed, continue - continue outer - } - } - // add key - shadow[strings.ToLower(k)] = true - } - return shadow -} - -// AllSettings merges all settings and returns them as a map[string]any. -func AllSettings() map[string]any { return v.AllSettings() } - -func (v *Viper) AllSettings() map[string]any { - return v.getSettings(v.AllKeys()) -} - -func (v *Viper) getSettings(keys []string) map[string]any { - m := map[string]any{} - // start from the list of keys, and construct the map one value at a time - for _, k := range keys { - value := v.Get(k) - if value == nil { - // should not happen, since AllKeys() returns only keys holding a value, - // check just in case anything changes - continue - } - path := strings.Split(k, v.keyDelim) - lastKey := strings.ToLower(path[len(path)-1]) - deepestMap := deepSearch(m, path[0:len(path)-1]) - // set innermost value - deepestMap[lastKey] = value - } - return m -} - -// SetFs sets the filesystem to use to read configuration. -func SetFs(fs afero.Fs) { v.SetFs(fs) } - -func (v *Viper) SetFs(fs afero.Fs) { - v.fs = fs -} - -// SetConfigName sets name for the config file. -// Does not include extension. -func SetConfigName(in string) { v.SetConfigName(in) } - -func (v *Viper) SetConfigName(in string) { - if in != "" { - v.configName = in - v.configFile = "" - } -} - -// SetConfigType sets the type of the configuration returned by the -// remote source, e.g. "json". -func SetConfigType(in string) { v.SetConfigType(in) } - -func (v *Viper) SetConfigType(in string) { - if in != "" { - v.configType = in - } -} - -// SetConfigPermissions sets the permissions for the config file. -func SetConfigPermissions(perm os.FileMode) { v.SetConfigPermissions(perm) } - -func (v *Viper) SetConfigPermissions(perm os.FileMode) { - v.configPermissions = perm.Perm() -} - -// IniLoadOptions sets the load options for ini parsing. -func IniLoadOptions(in ini.LoadOptions) Option { - return optionFunc(func(v *Viper) { - v.iniLoadOptions = in - }) -} - -func (v *Viper) getConfigType() string { - if v.configType != "" { - return v.configType - } - - cf, err := v.getConfigFile() - if err != nil { - return "" - } - - ext := filepath.Ext(cf) - - if len(ext) > 1 { - return ext[1:] - } - - return "" -} - -func (v *Viper) getConfigFile() (string, error) { - if v.configFile == "" { - cf, err := v.findConfigFile() - if err != nil { - return "", err - } - v.configFile = cf - } - return v.configFile, nil -} - -// Debug prints all configuration registries for debugging -// purposes. -func Debug() { v.Debug() } -func DebugTo(w io.Writer) { v.DebugTo(w) } - -func (v *Viper) Debug() { v.DebugTo(os.Stdout) } - -func (v *Viper) DebugTo(w io.Writer) { - fmt.Fprintf(w, "Aliases:\n%#v\n", v.aliases) - fmt.Fprintf(w, "Override:\n%#v\n", v.override) - fmt.Fprintf(w, "PFlags:\n%#v\n", v.pflags) - fmt.Fprintf(w, "Env:\n%#v\n", v.env) - fmt.Fprintf(w, "Key/Value Store:\n%#v\n", v.kvstore) - fmt.Fprintf(w, "Config:\n%#v\n", v.config) - fmt.Fprintf(w, "Defaults:\n%#v\n", v.defaults) -} diff --git a/vendor/github.com/ssgreg/nlreturn/v2/LICENSE b/vendor/github.com/ssgreg/nlreturn/v2/LICENSE deleted file mode 100644 index 0a5b4d106..000000000 --- a/vendor/github.com/ssgreg/nlreturn/v2/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2018 Grigory Zubankov - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/ssgreg/nlreturn/v2/pkg/nlreturn/nlreturn.go b/vendor/github.com/ssgreg/nlreturn/v2/pkg/nlreturn/nlreturn.go deleted file mode 100644 index 42b15e9b3..000000000 --- a/vendor/github.com/ssgreg/nlreturn/v2/pkg/nlreturn/nlreturn.go +++ /dev/null @@ -1,95 +0,0 @@ -package nlreturn - -import ( - "flag" - "fmt" - "go/ast" - "go/token" - - "golang.org/x/tools/go/analysis" -) - -const ( - linterName = "nlreturn" - linterDoc = `Linter requires a new line before return and branch statements except when the return is alone inside a statement group (such as an if statement) to increase code clarity.` -) - -var blockSize int - -// NewAnalyzer returns a new nlreturn analyzer. -func NewAnalyzer() *analysis.Analyzer { - a := &analysis.Analyzer{ - Name: linterName, - Doc: linterDoc, - Run: run, - } - - a.Flags.Init("nlreturn", flag.ExitOnError) - a.Flags.IntVar(&blockSize, "block-size", 1, "set block size that is still ok") - - return a -} - -func run(pass *analysis.Pass) (interface{}, error) { - for _, f := range pass.Files { - ast.Inspect(f, func(node ast.Node) bool { - switch c := node.(type) { - case *ast.CaseClause: - inspectBlock(pass, c.Body) - case *ast.CommClause: - inspectBlock(pass, c.Body) - case *ast.BlockStmt: - inspectBlock(pass, c.List) - } - - return true - }) - } - - return nil, nil -} - -func inspectBlock(pass *analysis.Pass, block []ast.Stmt) { - for i, stmt := range block { - switch stmt.(type) { - case *ast.BranchStmt, *ast.ReturnStmt: - - if i == 0 || line(pass, stmt.Pos())-line(pass, block[0].Pos()) < blockSize { - return - } - - if line(pass, stmt.Pos())-line(pass, block[i-1].End()) <= 1 { - pass.Report(analysis.Diagnostic{ - Pos: stmt.Pos(), - Message: fmt.Sprintf("%s with no blank line before", name(stmt)), - SuggestedFixes: []analysis.SuggestedFix{ - { - TextEdits: []analysis.TextEdit{ - { - Pos: stmt.Pos(), - NewText: []byte("\n"), - End: stmt.Pos(), - }, - }, - }, - }, - }) - } - } - } -} - -func name(stmt ast.Stmt) string { - switch c := stmt.(type) { - case *ast.BranchStmt: - return c.Tok.String() - case *ast.ReturnStmt: - return "return" - default: - return "unknown" - } -} - -func line(pass *analysis.Pass, pos token.Pos) int { - return pass.Fset.Position(pos).Line -} diff --git a/vendor/github.com/stbenjam/no-sprintf-host-port/LICENSE b/vendor/github.com/stbenjam/no-sprintf-host-port/LICENSE deleted file mode 100644 index 586dfd8cc..000000000 --- a/vendor/github.com/stbenjam/no-sprintf-host-port/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2022 Stephen Benjamin - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. \ No newline at end of file diff --git a/vendor/github.com/stbenjam/no-sprintf-host-port/pkg/analyzer/analyzer.go b/vendor/github.com/stbenjam/no-sprintf-host-port/pkg/analyzer/analyzer.go deleted file mode 100644 index 374bb0d24..000000000 --- a/vendor/github.com/stbenjam/no-sprintf-host-port/pkg/analyzer/analyzer.go +++ /dev/null @@ -1,96 +0,0 @@ -package analyzer - -import ( - "fmt" - "go/ast" - "go/token" - "regexp" - - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/ast/inspector" - - "golang.org/x/tools/go/analysis" -) - -var Analyzer = &analysis.Analyzer{ - Name: "nosprintfhostport", - Doc: "Checks for misuse of Sprintf to construct a host with port in a URL.", - Run: run, - Requires: []*analysis.Analyzer{inspect.Analyzer}, -} - -func run(pass *analysis.Pass) (interface{}, error) { - inspector := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - nodeFilter := []ast.Node{ - (*ast.CallExpr)(nil), - } - - inspector.Preorder(nodeFilter, func(node ast.Node) { - callExpr := node.(*ast.CallExpr) - if p, f, ok := getCallExprFunction(callExpr); ok && p == "fmt" && f == "Sprintf" { - if err := checkForHostPortConstruction(callExpr); err != nil { - pass.Reportf(node.Pos(), err.Error()) - } - } - }) - - return nil, nil -} - -// getCallExprFunction returns the package and function name from a callExpr, if any. -func getCallExprFunction(callExpr *ast.CallExpr) (pkg string, fn string, result bool) { - selector, ok := callExpr.Fun.(*ast.SelectorExpr) - if !ok { - return "", "", false - } - gopkg, ok := selector.X.(*ast.Ident) - if !ok { - return "", "", false - } - return gopkg.Name, selector.Sel.Name, true -} - -// getStringLiteral returns the value at a position if it's a string literal. -func getStringLiteral(args []ast.Expr, pos int) (string, bool) { - if len(args) < pos + 1 { - return "", false - } - - // Let's see if our format string is a string literal. - fsRaw, ok := args[pos].(*ast.BasicLit) - if !ok { - return "", false - } - if fsRaw.Kind == token.STRING && len(fsRaw.Value) >= 2 { - return fsRaw.Value[1 : len(fsRaw.Value)-1], true - } else { - return "", false - } -} - -// checkForHostPortConstruction checks to see if a sprintf call looks like a URI with a port, -// essentially scheme://%s:, or scheme://user:pass@%s:. -// -// Matching requirements: -// - Scheme as per RFC3986 is ALPHA *( ALPHA / DIGIT / "+" / "-" / "." ) -// - A format string substitution in the host portion, preceded by an optional username/password@ -// - A colon indicating a port will be specified -func checkForHostPortConstruction(sprintf *ast.CallExpr) error { - fs, ok := getStringLiteral(sprintf.Args, 0) - if !ok { - return nil - } - - regexes := []*regexp.Regexp{ - regexp.MustCompile(`^[a-zA-Z][a-zA-Z0-9+-.]*://%s:[^@]*$`), // URL without basic auth user - regexp.MustCompile(`^[a-zA-Z][a-zA-Z0-9+-.]*://[^/]*@%s:.*$`), // URL with basic auth - } - - for _, re := range regexes { - if re.MatchString(fs) { - return fmt.Errorf("host:port in url should be constructed with net.JoinHostPort and not directly with fmt.Sprintf") - } - } - - return nil -} \ No newline at end of file diff --git a/vendor/github.com/stretchr/objx/.codeclimate.yml b/vendor/github.com/stretchr/objx/.codeclimate.yml deleted file mode 100644 index 559fa399c..000000000 --- a/vendor/github.com/stretchr/objx/.codeclimate.yml +++ /dev/null @@ -1,21 +0,0 @@ -engines: - gofmt: - enabled: true - golint: - enabled: true - govet: - enabled: true - -exclude_patterns: -- ".github/" -- "vendor/" -- "codegen/" -- "*.yml" -- ".*.yml" -- "*.md" -- "Gopkg.*" -- "doc.go" -- "type_specific_codegen_test.go" -- "type_specific_codegen.go" -- ".gitignore" -- "LICENSE" diff --git a/vendor/github.com/stretchr/objx/.gitignore b/vendor/github.com/stretchr/objx/.gitignore deleted file mode 100644 index ea58090bd..000000000 --- a/vendor/github.com/stretchr/objx/.gitignore +++ /dev/null @@ -1,11 +0,0 @@ -# Binaries for programs and plugins -*.exe -*.dll -*.so -*.dylib - -# Test binary, build with `go test -c` -*.test - -# Output of the go coverage tool, specifically when used with LiteIDE -*.out diff --git a/vendor/github.com/stretchr/objx/LICENSE b/vendor/github.com/stretchr/objx/LICENSE deleted file mode 100644 index 44d4d9d5a..000000000 --- a/vendor/github.com/stretchr/objx/LICENSE +++ /dev/null @@ -1,22 +0,0 @@ -The MIT License - -Copyright (c) 2014 Stretchr, Inc. -Copyright (c) 2017-2018 objx contributors - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/stretchr/objx/README.md b/vendor/github.com/stretchr/objx/README.md deleted file mode 100644 index 78dc1f8b0..000000000 --- a/vendor/github.com/stretchr/objx/README.md +++ /dev/null @@ -1,80 +0,0 @@ -# Objx -[![Build Status](https://travis-ci.org/stretchr/objx.svg?branch=master)](https://travis-ci.org/stretchr/objx) -[![Go Report Card](https://goreportcard.com/badge/github.com/stretchr/objx)](https://goreportcard.com/report/github.com/stretchr/objx) -[![Maintainability](https://api.codeclimate.com/v1/badges/1d64bc6c8474c2074f2b/maintainability)](https://codeclimate.com/github/stretchr/objx/maintainability) -[![Test Coverage](https://api.codeclimate.com/v1/badges/1d64bc6c8474c2074f2b/test_coverage)](https://codeclimate.com/github/stretchr/objx/test_coverage) -[![Sourcegraph](https://sourcegraph.com/github.com/stretchr/objx/-/badge.svg)](https://sourcegraph.com/github.com/stretchr/objx) -[![GoDoc](https://pkg.go.dev/badge/github.com/stretchr/objx?utm_source=godoc)](https://pkg.go.dev/github.com/stretchr/objx) - -Objx - Go package for dealing with maps, slices, JSON and other data. - -Get started: - -- Install Objx with [one line of code](#installation), or [update it with another](#staying-up-to-date) -- Check out the API Documentation http://pkg.go.dev/github.com/stretchr/objx - -## Overview -Objx provides the `objx.Map` type, which is a `map[string]interface{}` that exposes a powerful `Get` method (among others) that allows you to easily and quickly get access to data within the map, without having to worry too much about type assertions, missing data, default values etc. - -### Pattern -Objx uses a predictable pattern to make access data from within `map[string]interface{}` easy. Call one of the `objx.` functions to create your `objx.Map` to get going: - - m, err := objx.FromJSON(json) - -NOTE: Any methods or functions with the `Must` prefix will panic if something goes wrong, the rest will be optimistic and try to figure things out without panicking. - -Use `Get` to access the value you're interested in. You can use dot and array -notation too: - - m.Get("places[0].latlng") - -Once you have sought the `Value` you're interested in, you can use the `Is*` methods to determine its type. - - if m.Get("code").IsStr() { // Your code... } - -Or you can just assume the type, and use one of the strong type methods to extract the real value: - - m.Get("code").Int() - -If there's no value there (or if it's the wrong type) then a default value will be returned, or you can be explicit about the default value. - - Get("code").Int(-1) - -If you're dealing with a slice of data as a value, Objx provides many useful methods for iterating, manipulating and selecting that data. You can find out more by exploring the index below. - -### Reading data -A simple example of how to use Objx: - - // Use MustFromJSON to make an objx.Map from some JSON - m := objx.MustFromJSON(`{"name": "Mat", "age": 30}`) - - // Get the details - name := m.Get("name").Str() - age := m.Get("age").Int() - - // Get their nickname (or use their name if they don't have one) - nickname := m.Get("nickname").Str(name) - -### Ranging -Since `objx.Map` is a `map[string]interface{}` you can treat it as such. For example, to `range` the data, do what you would expect: - - m := objx.MustFromJSON(json) - for key, value := range m { - // Your code... - } - -## Installation -To install Objx, use go get: - - go get github.com/stretchr/objx - -### Staying up to date -To update Objx to the latest version, run: - - go get -u github.com/stretchr/objx - -### Supported go versions -We currently support the three recent major Go versions. - -## Contributing -Please feel free to submit issues, fork the repository and send pull requests! diff --git a/vendor/github.com/stretchr/objx/Taskfile.yml b/vendor/github.com/stretchr/objx/Taskfile.yml deleted file mode 100644 index 8a79e8d67..000000000 --- a/vendor/github.com/stretchr/objx/Taskfile.yml +++ /dev/null @@ -1,27 +0,0 @@ -version: '3' - -tasks: - default: - deps: [test] - - lint: - desc: Checks code style - cmds: - - gofmt -d -s *.go - - go vet ./... - silent: true - - lint-fix: - desc: Fixes code style - cmds: - - gofmt -w -s *.go - - test: - desc: Runs go tests - cmds: - - go test -race ./... - - test-coverage: - desc: Runs go tests and calculates test coverage - cmds: - - go test -race -coverprofile=c.out ./... diff --git a/vendor/github.com/stretchr/objx/accessors.go b/vendor/github.com/stretchr/objx/accessors.go deleted file mode 100644 index 72f1d1c1c..000000000 --- a/vendor/github.com/stretchr/objx/accessors.go +++ /dev/null @@ -1,197 +0,0 @@ -package objx - -import ( - "reflect" - "regexp" - "strconv" - "strings" -) - -const ( - // PathSeparator is the character used to separate the elements - // of the keypath. - // - // For example, `location.address.city` - PathSeparator string = "." - - // arrayAccessRegexString is the regex used to extract the array number - // from the access path - arrayAccessRegexString = `^(.+)\[([0-9]+)\]$` - - // mapAccessRegexString is the regex used to extract the map key - // from the access path - mapAccessRegexString = `^([^\[]*)\[([^\]]+)\](.*)$` -) - -// arrayAccessRegex is the compiled arrayAccessRegexString -var arrayAccessRegex = regexp.MustCompile(arrayAccessRegexString) - -// mapAccessRegex is the compiled mapAccessRegexString -var mapAccessRegex = regexp.MustCompile(mapAccessRegexString) - -// Get gets the value using the specified selector and -// returns it inside a new Obj object. -// -// If it cannot find the value, Get will return a nil -// value inside an instance of Obj. -// -// Get can only operate directly on map[string]interface{} and []interface. -// -// # Example -// -// To access the title of the third chapter of the second book, do: -// -// o.Get("books[1].chapters[2].title") -func (m Map) Get(selector string) *Value { - rawObj := access(m, selector, nil, false) - return &Value{data: rawObj} -} - -// Set sets the value using the specified selector and -// returns the object on which Set was called. -// -// Set can only operate directly on map[string]interface{} and []interface -// -// # Example -// -// To set the title of the third chapter of the second book, do: -// -// o.Set("books[1].chapters[2].title","Time to Go") -func (m Map) Set(selector string, value interface{}) Map { - access(m, selector, value, true) - return m -} - -// getIndex returns the index, which is hold in s by two branches. -// It also returns s without the index part, e.g. name[1] will return (1, name). -// If no index is found, -1 is returned -func getIndex(s string) (int, string) { - arrayMatches := arrayAccessRegex.FindStringSubmatch(s) - if len(arrayMatches) > 0 { - // Get the key into the map - selector := arrayMatches[1] - // Get the index into the array at the key - // We know this can't fail because arrayMatches[2] is an int for sure - index, _ := strconv.Atoi(arrayMatches[2]) - return index, selector - } - return -1, s -} - -// getKey returns the key which is held in s by two brackets. -// It also returns the next selector. -func getKey(s string) (string, string) { - selSegs := strings.SplitN(s, PathSeparator, 2) - thisSel := selSegs[0] - nextSel := "" - - if len(selSegs) > 1 { - nextSel = selSegs[1] - } - - mapMatches := mapAccessRegex.FindStringSubmatch(s) - if len(mapMatches) > 0 { - if _, err := strconv.Atoi(mapMatches[2]); err != nil { - thisSel = mapMatches[1] - nextSel = "[" + mapMatches[2] + "]" + mapMatches[3] - - if thisSel == "" { - thisSel = mapMatches[2] - nextSel = mapMatches[3] - } - - if nextSel == "" { - selSegs = []string{"", ""} - } else if nextSel[0] == '.' { - nextSel = nextSel[1:] - } - } - } - - return thisSel, nextSel -} - -// access accesses the object using the selector and performs the -// appropriate action. -func access(current interface{}, selector string, value interface{}, isSet bool) interface{} { - thisSel, nextSel := getKey(selector) - - indexes := []int{} - for strings.Contains(thisSel, "[") { - prevSel := thisSel - index := -1 - index, thisSel = getIndex(thisSel) - indexes = append(indexes, index) - if prevSel == thisSel { - break - } - } - - if curMap, ok := current.(Map); ok { - current = map[string]interface{}(curMap) - } - // get the object in question - switch current.(type) { - case map[string]interface{}: - curMSI := current.(map[string]interface{}) - if nextSel == "" && isSet { - curMSI[thisSel] = value - return nil - } - - _, ok := curMSI[thisSel].(map[string]interface{}) - if !ok { - _, ok = curMSI[thisSel].(Map) - } - - if (curMSI[thisSel] == nil || !ok) && len(indexes) == 0 && isSet { - curMSI[thisSel] = map[string]interface{}{} - } - - current = curMSI[thisSel] - default: - current = nil - } - - // do we need to access the item of an array? - if len(indexes) > 0 { - num := len(indexes) - for num > 0 { - num-- - index := indexes[num] - indexes = indexes[:num] - if array, ok := interSlice(current); ok { - if index < len(array) { - current = array[index] - } else { - current = nil - break - } - } - } - } - - if nextSel != "" { - current = access(current, nextSel, value, isSet) - } - return current -} - -func interSlice(slice interface{}) ([]interface{}, bool) { - if array, ok := slice.([]interface{}); ok { - return array, ok - } - - s := reflect.ValueOf(slice) - if s.Kind() != reflect.Slice { - return nil, false - } - - ret := make([]interface{}, s.Len()) - - for i := 0; i < s.Len(); i++ { - ret[i] = s.Index(i).Interface() - } - - return ret, true -} diff --git a/vendor/github.com/stretchr/objx/conversions.go b/vendor/github.com/stretchr/objx/conversions.go deleted file mode 100644 index 01c63d7d3..000000000 --- a/vendor/github.com/stretchr/objx/conversions.go +++ /dev/null @@ -1,280 +0,0 @@ -package objx - -import ( - "bytes" - "encoding/base64" - "encoding/json" - "errors" - "fmt" - "net/url" - "strconv" -) - -// SignatureSeparator is the character that is used to -// separate the Base64 string from the security signature. -const SignatureSeparator = "_" - -// URLValuesSliceKeySuffix is the character that is used to -// specify a suffix for slices parsed by URLValues. -// If the suffix is set to "[i]", then the index of the slice -// is used in place of i -// Ex: Suffix "[]" would have the form a[]=b&a[]=c -// OR Suffix "[i]" would have the form a[0]=b&a[1]=c -// OR Suffix "" would have the form a=b&a=c -var urlValuesSliceKeySuffix = "[]" - -const ( - URLValuesSliceKeySuffixEmpty = "" - URLValuesSliceKeySuffixArray = "[]" - URLValuesSliceKeySuffixIndex = "[i]" -) - -// SetURLValuesSliceKeySuffix sets the character that is used to -// specify a suffix for slices parsed by URLValues. -// If the suffix is set to "[i]", then the index of the slice -// is used in place of i -// Ex: Suffix "[]" would have the form a[]=b&a[]=c -// OR Suffix "[i]" would have the form a[0]=b&a[1]=c -// OR Suffix "" would have the form a=b&a=c -func SetURLValuesSliceKeySuffix(s string) error { - if s == URLValuesSliceKeySuffixEmpty || s == URLValuesSliceKeySuffixArray || s == URLValuesSliceKeySuffixIndex { - urlValuesSliceKeySuffix = s - return nil - } - - return errors.New("objx: Invalid URLValuesSliceKeySuffix provided.") -} - -// JSON converts the contained object to a JSON string -// representation -func (m Map) JSON() (string, error) { - for k, v := range m { - m[k] = cleanUp(v) - } - - result, err := json.Marshal(m) - if err != nil { - err = errors.New("objx: JSON encode failed with: " + err.Error()) - } - return string(result), err -} - -func cleanUpInterfaceArray(in []interface{}) []interface{} { - result := make([]interface{}, len(in)) - for i, v := range in { - result[i] = cleanUp(v) - } - return result -} - -func cleanUpInterfaceMap(in map[interface{}]interface{}) Map { - result := Map{} - for k, v := range in { - result[fmt.Sprintf("%v", k)] = cleanUp(v) - } - return result -} - -func cleanUpStringMap(in map[string]interface{}) Map { - result := Map{} - for k, v := range in { - result[k] = cleanUp(v) - } - return result -} - -func cleanUpMSIArray(in []map[string]interface{}) []Map { - result := make([]Map, len(in)) - for i, v := range in { - result[i] = cleanUpStringMap(v) - } - return result -} - -func cleanUpMapArray(in []Map) []Map { - result := make([]Map, len(in)) - for i, v := range in { - result[i] = cleanUpStringMap(v) - } - return result -} - -func cleanUp(v interface{}) interface{} { - switch v := v.(type) { - case []interface{}: - return cleanUpInterfaceArray(v) - case []map[string]interface{}: - return cleanUpMSIArray(v) - case map[interface{}]interface{}: - return cleanUpInterfaceMap(v) - case Map: - return cleanUpStringMap(v) - case []Map: - return cleanUpMapArray(v) - default: - return v - } -} - -// MustJSON converts the contained object to a JSON string -// representation and panics if there is an error -func (m Map) MustJSON() string { - result, err := m.JSON() - if err != nil { - panic(err.Error()) - } - return result -} - -// Base64 converts the contained object to a Base64 string -// representation of the JSON string representation -func (m Map) Base64() (string, error) { - var buf bytes.Buffer - - jsonData, err := m.JSON() - if err != nil { - return "", err - } - - encoder := base64.NewEncoder(base64.StdEncoding, &buf) - _, _ = encoder.Write([]byte(jsonData)) - _ = encoder.Close() - - return buf.String(), nil -} - -// MustBase64 converts the contained object to a Base64 string -// representation of the JSON string representation and panics -// if there is an error -func (m Map) MustBase64() string { - result, err := m.Base64() - if err != nil { - panic(err.Error()) - } - return result -} - -// SignedBase64 converts the contained object to a Base64 string -// representation of the JSON string representation and signs it -// using the provided key. -func (m Map) SignedBase64(key string) (string, error) { - base64, err := m.Base64() - if err != nil { - return "", err - } - - sig := HashWithKey(base64, key) - return base64 + SignatureSeparator + sig, nil -} - -// MustSignedBase64 converts the contained object to a Base64 string -// representation of the JSON string representation and signs it -// using the provided key and panics if there is an error -func (m Map) MustSignedBase64(key string) string { - result, err := m.SignedBase64(key) - if err != nil { - panic(err.Error()) - } - return result -} - -/* - URL Query - ------------------------------------------------ -*/ - -// URLValues creates a url.Values object from an Obj. This -// function requires that the wrapped object be a map[string]interface{} -func (m Map) URLValues() url.Values { - vals := make(url.Values) - - m.parseURLValues(m, vals, "") - - return vals -} - -func (m Map) parseURLValues(queryMap Map, vals url.Values, key string) { - useSliceIndex := false - if urlValuesSliceKeySuffix == "[i]" { - useSliceIndex = true - } - - for k, v := range queryMap { - val := &Value{data: v} - switch { - case val.IsObjxMap(): - if key == "" { - m.parseURLValues(val.ObjxMap(), vals, k) - } else { - m.parseURLValues(val.ObjxMap(), vals, key+"["+k+"]") - } - case val.IsObjxMapSlice(): - sliceKey := k - if key != "" { - sliceKey = key + "[" + k + "]" - } - - if useSliceIndex { - for i, sv := range val.MustObjxMapSlice() { - sk := sliceKey + "[" + strconv.FormatInt(int64(i), 10) + "]" - m.parseURLValues(sv, vals, sk) - } - } else { - sliceKey = sliceKey + urlValuesSliceKeySuffix - for _, sv := range val.MustObjxMapSlice() { - m.parseURLValues(sv, vals, sliceKey) - } - } - case val.IsMSISlice(): - sliceKey := k - if key != "" { - sliceKey = key + "[" + k + "]" - } - - if useSliceIndex { - for i, sv := range val.MustMSISlice() { - sk := sliceKey + "[" + strconv.FormatInt(int64(i), 10) + "]" - m.parseURLValues(New(sv), vals, sk) - } - } else { - sliceKey = sliceKey + urlValuesSliceKeySuffix - for _, sv := range val.MustMSISlice() { - m.parseURLValues(New(sv), vals, sliceKey) - } - } - case val.IsStrSlice(), val.IsBoolSlice(), - val.IsFloat32Slice(), val.IsFloat64Slice(), - val.IsIntSlice(), val.IsInt8Slice(), val.IsInt16Slice(), val.IsInt32Slice(), val.IsInt64Slice(), - val.IsUintSlice(), val.IsUint8Slice(), val.IsUint16Slice(), val.IsUint32Slice(), val.IsUint64Slice(): - - sliceKey := k - if key != "" { - sliceKey = key + "[" + k + "]" - } - - if useSliceIndex { - for i, sv := range val.StringSlice() { - sk := sliceKey + "[" + strconv.FormatInt(int64(i), 10) + "]" - vals.Set(sk, sv) - } - } else { - sliceKey = sliceKey + urlValuesSliceKeySuffix - vals[sliceKey] = val.StringSlice() - } - - default: - if key == "" { - vals.Set(k, val.String()) - } else { - vals.Set(key+"["+k+"]", val.String()) - } - } - } -} - -// URLQuery gets an encoded URL query representing the given -// Obj. This function requires that the wrapped object be a -// map[string]interface{} -func (m Map) URLQuery() (string, error) { - return m.URLValues().Encode(), nil -} diff --git a/vendor/github.com/stretchr/objx/doc.go b/vendor/github.com/stretchr/objx/doc.go deleted file mode 100644 index b170af74b..000000000 --- a/vendor/github.com/stretchr/objx/doc.go +++ /dev/null @@ -1,66 +0,0 @@ -/* -Package objx provides utilities for dealing with maps, slices, JSON and other data. - -# Overview - -Objx provides the `objx.Map` type, which is a `map[string]interface{}` that exposes -a powerful `Get` method (among others) that allows you to easily and quickly get -access to data within the map, without having to worry too much about type assertions, -missing data, default values etc. - -# Pattern - -Objx uses a predictable pattern to make access data from within `map[string]interface{}` easy. -Call one of the `objx.` functions to create your `objx.Map` to get going: - - m, err := objx.FromJSON(json) - -NOTE: Any methods or functions with the `Must` prefix will panic if something goes wrong, -the rest will be optimistic and try to figure things out without panicking. - -Use `Get` to access the value you're interested in. You can use dot and array -notation too: - - m.Get("places[0].latlng") - -Once you have sought the `Value` you're interested in, you can use the `Is*` methods to determine its type. - - if m.Get("code").IsStr() { // Your code... } - -Or you can just assume the type, and use one of the strong type methods to extract the real value: - - m.Get("code").Int() - -If there's no value there (or if it's the wrong type) then a default value will be returned, -or you can be explicit about the default value. - - Get("code").Int(-1) - -If you're dealing with a slice of data as a value, Objx provides many useful methods for iterating, -manipulating and selecting that data. You can find out more by exploring the index below. - -# Reading data - -A simple example of how to use Objx: - - // Use MustFromJSON to make an objx.Map from some JSON - m := objx.MustFromJSON(`{"name": "Mat", "age": 30}`) - - // Get the details - name := m.Get("name").Str() - age := m.Get("age").Int() - - // Get their nickname (or use their name if they don't have one) - nickname := m.Get("nickname").Str(name) - -# Ranging - -Since `objx.Map` is a `map[string]interface{}` you can treat it as such. -For example, to `range` the data, do what you would expect: - - m := objx.MustFromJSON(json) - for key, value := range m { - // Your code... - } -*/ -package objx diff --git a/vendor/github.com/stretchr/objx/map.go b/vendor/github.com/stretchr/objx/map.go deleted file mode 100644 index ab9f9ae67..000000000 --- a/vendor/github.com/stretchr/objx/map.go +++ /dev/null @@ -1,214 +0,0 @@ -package objx - -import ( - "encoding/base64" - "encoding/json" - "errors" - "io/ioutil" - "net/url" - "strings" -) - -// MSIConvertable is an interface that defines methods for converting your -// custom types to a map[string]interface{} representation. -type MSIConvertable interface { - // MSI gets a map[string]interface{} (msi) representing the - // object. - MSI() map[string]interface{} -} - -// Map provides extended functionality for working with -// untyped data, in particular map[string]interface (msi). -type Map map[string]interface{} - -// Value returns the internal value instance -func (m Map) Value() *Value { - return &Value{data: m} -} - -// Nil represents a nil Map. -var Nil = New(nil) - -// New creates a new Map containing the map[string]interface{} in the data argument. -// If the data argument is not a map[string]interface, New attempts to call the -// MSI() method on the MSIConvertable interface to create one. -func New(data interface{}) Map { - if _, ok := data.(map[string]interface{}); !ok { - if converter, ok := data.(MSIConvertable); ok { - data = converter.MSI() - } else { - return nil - } - } - return Map(data.(map[string]interface{})) -} - -// MSI creates a map[string]interface{} and puts it inside a new Map. -// -// The arguments follow a key, value pattern. -// -// Returns nil if any key argument is non-string or if there are an odd number of arguments. -// -// # Example -// -// To easily create Maps: -// -// m := objx.MSI("name", "Mat", "age", 29, "subobj", objx.MSI("active", true)) -// -// // creates an Map equivalent to -// m := objx.Map{"name": "Mat", "age": 29, "subobj": objx.Map{"active": true}} -func MSI(keyAndValuePairs ...interface{}) Map { - newMap := Map{} - keyAndValuePairsLen := len(keyAndValuePairs) - if keyAndValuePairsLen%2 != 0 { - return nil - } - for i := 0; i < keyAndValuePairsLen; i = i + 2 { - key := keyAndValuePairs[i] - value := keyAndValuePairs[i+1] - - // make sure the key is a string - keyString, keyStringOK := key.(string) - if !keyStringOK { - return nil - } - newMap[keyString] = value - } - return newMap -} - -// ****** Conversion Constructors - -// MustFromJSON creates a new Map containing the data specified in the -// jsonString. -// -// Panics if the JSON is invalid. -func MustFromJSON(jsonString string) Map { - o, err := FromJSON(jsonString) - if err != nil { - panic("objx: MustFromJSON failed with error: " + err.Error()) - } - return o -} - -// MustFromJSONSlice creates a new slice of Map containing the data specified in the -// jsonString. Works with jsons with a top level array -// -// Panics if the JSON is invalid. -func MustFromJSONSlice(jsonString string) []Map { - slice, err := FromJSONSlice(jsonString) - if err != nil { - panic("objx: MustFromJSONSlice failed with error: " + err.Error()) - } - return slice -} - -// FromJSON creates a new Map containing the data specified in the -// jsonString. -// -// Returns an error if the JSON is invalid. -func FromJSON(jsonString string) (Map, error) { - var m Map - err := json.Unmarshal([]byte(jsonString), &m) - if err != nil { - return Nil, err - } - return m, nil -} - -// FromJSONSlice creates a new slice of Map containing the data specified in the -// jsonString. Works with jsons with a top level array -// -// Returns an error if the JSON is invalid. -func FromJSONSlice(jsonString string) ([]Map, error) { - var slice []Map - err := json.Unmarshal([]byte(jsonString), &slice) - if err != nil { - return nil, err - } - return slice, nil -} - -// FromBase64 creates a new Obj containing the data specified -// in the Base64 string. -// -// The string is an encoded JSON string returned by Base64 -func FromBase64(base64String string) (Map, error) { - decoder := base64.NewDecoder(base64.StdEncoding, strings.NewReader(base64String)) - decoded, err := ioutil.ReadAll(decoder) - if err != nil { - return nil, err - } - return FromJSON(string(decoded)) -} - -// MustFromBase64 creates a new Obj containing the data specified -// in the Base64 string and panics if there is an error. -// -// The string is an encoded JSON string returned by Base64 -func MustFromBase64(base64String string) Map { - result, err := FromBase64(base64String) - if err != nil { - panic("objx: MustFromBase64 failed with error: " + err.Error()) - } - return result -} - -// FromSignedBase64 creates a new Obj containing the data specified -// in the Base64 string. -// -// The string is an encoded JSON string returned by SignedBase64 -func FromSignedBase64(base64String, key string) (Map, error) { - parts := strings.Split(base64String, SignatureSeparator) - if len(parts) != 2 { - return nil, errors.New("objx: Signed base64 string is malformed") - } - - sig := HashWithKey(parts[0], key) - if parts[1] != sig { - return nil, errors.New("objx: Signature for base64 data does not match") - } - return FromBase64(parts[0]) -} - -// MustFromSignedBase64 creates a new Obj containing the data specified -// in the Base64 string and panics if there is an error. -// -// The string is an encoded JSON string returned by Base64 -func MustFromSignedBase64(base64String, key string) Map { - result, err := FromSignedBase64(base64String, key) - if err != nil { - panic("objx: MustFromSignedBase64 failed with error: " + err.Error()) - } - return result -} - -// FromURLQuery generates a new Obj by parsing the specified -// query. -// -// For queries with multiple values, the first value is selected. -func FromURLQuery(query string) (Map, error) { - vals, err := url.ParseQuery(query) - if err != nil { - return nil, err - } - m := Map{} - for k, vals := range vals { - m[k] = vals[0] - } - return m, nil -} - -// MustFromURLQuery generates a new Obj by parsing the specified -// query. -// -// For queries with multiple values, the first value is selected. -// -// Panics if it encounters an error -func MustFromURLQuery(query string) Map { - o, err := FromURLQuery(query) - if err != nil { - panic("objx: MustFromURLQuery failed with error: " + err.Error()) - } - return o -} diff --git a/vendor/github.com/stretchr/objx/mutations.go b/vendor/github.com/stretchr/objx/mutations.go deleted file mode 100644 index c3400a3f7..000000000 --- a/vendor/github.com/stretchr/objx/mutations.go +++ /dev/null @@ -1,77 +0,0 @@ -package objx - -// Exclude returns a new Map with the keys in the specified []string -// excluded. -func (m Map) Exclude(exclude []string) Map { - excluded := make(Map) - for k, v := range m { - if !contains(exclude, k) { - excluded[k] = v - } - } - return excluded -} - -// Copy creates a shallow copy of the Obj. -func (m Map) Copy() Map { - copied := Map{} - for k, v := range m { - copied[k] = v - } - return copied -} - -// Merge blends the specified map with a copy of this map and returns the result. -// -// Keys that appear in both will be selected from the specified map. -// This method requires that the wrapped object be a map[string]interface{} -func (m Map) Merge(merge Map) Map { - return m.Copy().MergeHere(merge) -} - -// MergeHere blends the specified map with this map and returns the current map. -// -// Keys that appear in both will be selected from the specified map. The original map -// will be modified. This method requires that -// the wrapped object be a map[string]interface{} -func (m Map) MergeHere(merge Map) Map { - for k, v := range merge { - m[k] = v - } - return m -} - -// Transform builds a new Obj giving the transformer a chance -// to change the keys and values as it goes. This method requires that -// the wrapped object be a map[string]interface{} -func (m Map) Transform(transformer func(key string, value interface{}) (string, interface{})) Map { - newMap := Map{} - for k, v := range m { - modifiedKey, modifiedVal := transformer(k, v) - newMap[modifiedKey] = modifiedVal - } - return newMap -} - -// TransformKeys builds a new map using the specified key mapping. -// -// Unspecified keys will be unaltered. -// This method requires that the wrapped object be a map[string]interface{} -func (m Map) TransformKeys(mapping map[string]string) Map { - return m.Transform(func(key string, value interface{}) (string, interface{}) { - if newKey, ok := mapping[key]; ok { - return newKey, value - } - return key, value - }) -} - -// Checks if a string slice contains a string -func contains(s []string, e string) bool { - for _, a := range s { - if a == e { - return true - } - } - return false -} diff --git a/vendor/github.com/stretchr/objx/security.go b/vendor/github.com/stretchr/objx/security.go deleted file mode 100644 index 692be8e2a..000000000 --- a/vendor/github.com/stretchr/objx/security.go +++ /dev/null @@ -1,12 +0,0 @@ -package objx - -import ( - "crypto/sha1" - "encoding/hex" -) - -// HashWithKey hashes the specified string using the security key -func HashWithKey(data, key string) string { - d := sha1.Sum([]byte(data + ":" + key)) - return hex.EncodeToString(d[:]) -} diff --git a/vendor/github.com/stretchr/objx/tests.go b/vendor/github.com/stretchr/objx/tests.go deleted file mode 100644 index d9e0b479a..000000000 --- a/vendor/github.com/stretchr/objx/tests.go +++ /dev/null @@ -1,17 +0,0 @@ -package objx - -// Has gets whether there is something at the specified selector -// or not. -// -// If m is nil, Has will always return false. -func (m Map) Has(selector string) bool { - if m == nil { - return false - } - return !m.Get(selector).IsNil() -} - -// IsNil gets whether the data is nil or not. -func (v *Value) IsNil() bool { - return v == nil || v.data == nil -} diff --git a/vendor/github.com/stretchr/objx/type_specific.go b/vendor/github.com/stretchr/objx/type_specific.go deleted file mode 100644 index 80f88d9fa..000000000 --- a/vendor/github.com/stretchr/objx/type_specific.go +++ /dev/null @@ -1,346 +0,0 @@ -package objx - -/* - MSI (map[string]interface{} and []map[string]interface{}) -*/ - -// MSI gets the value as a map[string]interface{}, returns the optionalDefault -// value or a system default object if the value is the wrong type. -func (v *Value) MSI(optionalDefault ...map[string]interface{}) map[string]interface{} { - if s, ok := v.data.(map[string]interface{}); ok { - return s - } - if s, ok := v.data.(Map); ok { - return map[string]interface{}(s) - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return nil -} - -// MustMSI gets the value as a map[string]interface{}. -// -// Panics if the object is not a map[string]interface{}. -func (v *Value) MustMSI() map[string]interface{} { - if s, ok := v.data.(Map); ok { - return map[string]interface{}(s) - } - return v.data.(map[string]interface{}) -} - -// MSISlice gets the value as a []map[string]interface{}, returns the optionalDefault -// value or nil if the value is not a []map[string]interface{}. -func (v *Value) MSISlice(optionalDefault ...[]map[string]interface{}) []map[string]interface{} { - if s, ok := v.data.([]map[string]interface{}); ok { - return s - } - - s := v.ObjxMapSlice() - if s == nil { - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return nil - } - - result := make([]map[string]interface{}, len(s)) - for i := range s { - result[i] = s[i].Value().MSI() - } - return result -} - -// MustMSISlice gets the value as a []map[string]interface{}. -// -// Panics if the object is not a []map[string]interface{}. -func (v *Value) MustMSISlice() []map[string]interface{} { - if s := v.MSISlice(); s != nil { - return s - } - - return v.data.([]map[string]interface{}) -} - -// IsMSI gets whether the object contained is a map[string]interface{} or not. -func (v *Value) IsMSI() bool { - _, ok := v.data.(map[string]interface{}) - if !ok { - _, ok = v.data.(Map) - } - return ok -} - -// IsMSISlice gets whether the object contained is a []map[string]interface{} or not. -func (v *Value) IsMSISlice() bool { - _, ok := v.data.([]map[string]interface{}) - if !ok { - _, ok = v.data.([]Map) - if !ok { - s, ok := v.data.([]interface{}) - if ok { - for i := range s { - switch s[i].(type) { - case Map: - case map[string]interface{}: - default: - return false - } - } - return true - } - } - } - return ok -} - -// EachMSI calls the specified callback for each object -// in the []map[string]interface{}. -// -// Panics if the object is the wrong type. -func (v *Value) EachMSI(callback func(int, map[string]interface{}) bool) *Value { - for index, val := range v.MustMSISlice() { - carryon := callback(index, val) - if !carryon { - break - } - } - return v -} - -// WhereMSI uses the specified decider function to select items -// from the []map[string]interface{}. The object contained in the result will contain -// only the selected items. -func (v *Value) WhereMSI(decider func(int, map[string]interface{}) bool) *Value { - var selected []map[string]interface{} - v.EachMSI(func(index int, val map[string]interface{}) bool { - shouldSelect := decider(index, val) - if !shouldSelect { - selected = append(selected, val) - } - return true - }) - return &Value{data: selected} -} - -// GroupMSI uses the specified grouper function to group the items -// keyed by the return of the grouper. The object contained in the -// result will contain a map[string][]map[string]interface{}. -func (v *Value) GroupMSI(grouper func(int, map[string]interface{}) string) *Value { - groups := make(map[string][]map[string]interface{}) - v.EachMSI(func(index int, val map[string]interface{}) bool { - group := grouper(index, val) - if _, ok := groups[group]; !ok { - groups[group] = make([]map[string]interface{}, 0) - } - groups[group] = append(groups[group], val) - return true - }) - return &Value{data: groups} -} - -// ReplaceMSI uses the specified function to replace each map[string]interface{}s -// by iterating each item. The data in the returned result will be a -// []map[string]interface{} containing the replaced items. -func (v *Value) ReplaceMSI(replacer func(int, map[string]interface{}) map[string]interface{}) *Value { - arr := v.MustMSISlice() - replaced := make([]map[string]interface{}, len(arr)) - v.EachMSI(func(index int, val map[string]interface{}) bool { - replaced[index] = replacer(index, val) - return true - }) - return &Value{data: replaced} -} - -// CollectMSI uses the specified collector function to collect a value -// for each of the map[string]interface{}s in the slice. The data returned will be a -// []interface{}. -func (v *Value) CollectMSI(collector func(int, map[string]interface{}) interface{}) *Value { - arr := v.MustMSISlice() - collected := make([]interface{}, len(arr)) - v.EachMSI(func(index int, val map[string]interface{}) bool { - collected[index] = collector(index, val) - return true - }) - return &Value{data: collected} -} - -/* - ObjxMap ((Map) and [](Map)) -*/ - -// ObjxMap gets the value as a (Map), returns the optionalDefault -// value or a system default object if the value is the wrong type. -func (v *Value) ObjxMap(optionalDefault ...(Map)) Map { - if s, ok := v.data.((Map)); ok { - return s - } - if s, ok := v.data.(map[string]interface{}); ok { - return s - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return New(nil) -} - -// MustObjxMap gets the value as a (Map). -// -// Panics if the object is not a (Map). -func (v *Value) MustObjxMap() Map { - if s, ok := v.data.(map[string]interface{}); ok { - return s - } - return v.data.((Map)) -} - -// ObjxMapSlice gets the value as a [](Map), returns the optionalDefault -// value or nil if the value is not a [](Map). -func (v *Value) ObjxMapSlice(optionalDefault ...[](Map)) [](Map) { - if s, ok := v.data.([]Map); ok { - return s - } - - if s, ok := v.data.([]map[string]interface{}); ok { - result := make([]Map, len(s)) - for i := range s { - result[i] = s[i] - } - return result - } - - s, ok := v.data.([]interface{}) - if !ok { - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return nil - } - - result := make([]Map, len(s)) - for i := range s { - switch s[i].(type) { - case Map: - result[i] = s[i].(Map) - case map[string]interface{}: - result[i] = New(s[i]) - default: - return nil - } - } - return result -} - -// MustObjxMapSlice gets the value as a [](Map). -// -// Panics if the object is not a [](Map). -func (v *Value) MustObjxMapSlice() [](Map) { - if s := v.ObjxMapSlice(); s != nil { - return s - } - return v.data.([](Map)) -} - -// IsObjxMap gets whether the object contained is a (Map) or not. -func (v *Value) IsObjxMap() bool { - _, ok := v.data.((Map)) - if !ok { - _, ok = v.data.(map[string]interface{}) - } - return ok -} - -// IsObjxMapSlice gets whether the object contained is a [](Map) or not. -func (v *Value) IsObjxMapSlice() bool { - _, ok := v.data.([](Map)) - if !ok { - _, ok = v.data.([]map[string]interface{}) - if !ok { - s, ok := v.data.([]interface{}) - if ok { - for i := range s { - switch s[i].(type) { - case Map: - case map[string]interface{}: - default: - return false - } - } - return true - } - } - } - - return ok -} - -// EachObjxMap calls the specified callback for each object -// in the [](Map). -// -// Panics if the object is the wrong type. -func (v *Value) EachObjxMap(callback func(int, Map) bool) *Value { - for index, val := range v.MustObjxMapSlice() { - carryon := callback(index, val) - if !carryon { - break - } - } - return v -} - -// WhereObjxMap uses the specified decider function to select items -// from the [](Map). The object contained in the result will contain -// only the selected items. -func (v *Value) WhereObjxMap(decider func(int, Map) bool) *Value { - var selected [](Map) - v.EachObjxMap(func(index int, val Map) bool { - shouldSelect := decider(index, val) - if !shouldSelect { - selected = append(selected, val) - } - return true - }) - return &Value{data: selected} -} - -// GroupObjxMap uses the specified grouper function to group the items -// keyed by the return of the grouper. The object contained in the -// result will contain a map[string][](Map). -func (v *Value) GroupObjxMap(grouper func(int, Map) string) *Value { - groups := make(map[string][](Map)) - v.EachObjxMap(func(index int, val Map) bool { - group := grouper(index, val) - if _, ok := groups[group]; !ok { - groups[group] = make([](Map), 0) - } - groups[group] = append(groups[group], val) - return true - }) - return &Value{data: groups} -} - -// ReplaceObjxMap uses the specified function to replace each (Map)s -// by iterating each item. The data in the returned result will be a -// [](Map) containing the replaced items. -func (v *Value) ReplaceObjxMap(replacer func(int, Map) Map) *Value { - arr := v.MustObjxMapSlice() - replaced := make([](Map), len(arr)) - v.EachObjxMap(func(index int, val Map) bool { - replaced[index] = replacer(index, val) - return true - }) - return &Value{data: replaced} -} - -// CollectObjxMap uses the specified collector function to collect a value -// for each of the (Map)s in the slice. The data returned will be a -// []interface{}. -func (v *Value) CollectObjxMap(collector func(int, Map) interface{}) *Value { - arr := v.MustObjxMapSlice() - collected := make([]interface{}, len(arr)) - v.EachObjxMap(func(index int, val Map) bool { - collected[index] = collector(index, val) - return true - }) - return &Value{data: collected} -} diff --git a/vendor/github.com/stretchr/objx/type_specific_codegen.go b/vendor/github.com/stretchr/objx/type_specific_codegen.go deleted file mode 100644 index 45850456e..000000000 --- a/vendor/github.com/stretchr/objx/type_specific_codegen.go +++ /dev/null @@ -1,2261 +0,0 @@ -package objx - -/* - Inter (interface{} and []interface{}) -*/ - -// Inter gets the value as a interface{}, returns the optionalDefault -// value or a system default object if the value is the wrong type. -func (v *Value) Inter(optionalDefault ...interface{}) interface{} { - if s, ok := v.data.(interface{}); ok { - return s - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return nil -} - -// MustInter gets the value as a interface{}. -// -// Panics if the object is not a interface{}. -func (v *Value) MustInter() interface{} { - return v.data.(interface{}) -} - -// InterSlice gets the value as a []interface{}, returns the optionalDefault -// value or nil if the value is not a []interface{}. -func (v *Value) InterSlice(optionalDefault ...[]interface{}) []interface{} { - if s, ok := v.data.([]interface{}); ok { - return s - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return nil -} - -// MustInterSlice gets the value as a []interface{}. -// -// Panics if the object is not a []interface{}. -func (v *Value) MustInterSlice() []interface{} { - return v.data.([]interface{}) -} - -// IsInter gets whether the object contained is a interface{} or not. -func (v *Value) IsInter() bool { - _, ok := v.data.(interface{}) - return ok -} - -// IsInterSlice gets whether the object contained is a []interface{} or not. -func (v *Value) IsInterSlice() bool { - _, ok := v.data.([]interface{}) - return ok -} - -// EachInter calls the specified callback for each object -// in the []interface{}. -// -// Panics if the object is the wrong type. -func (v *Value) EachInter(callback func(int, interface{}) bool) *Value { - for index, val := range v.MustInterSlice() { - carryon := callback(index, val) - if !carryon { - break - } - } - return v -} - -// WhereInter uses the specified decider function to select items -// from the []interface{}. The object contained in the result will contain -// only the selected items. -func (v *Value) WhereInter(decider func(int, interface{}) bool) *Value { - var selected []interface{} - v.EachInter(func(index int, val interface{}) bool { - shouldSelect := decider(index, val) - if !shouldSelect { - selected = append(selected, val) - } - return true - }) - return &Value{data: selected} -} - -// GroupInter uses the specified grouper function to group the items -// keyed by the return of the grouper. The object contained in the -// result will contain a map[string][]interface{}. -func (v *Value) GroupInter(grouper func(int, interface{}) string) *Value { - groups := make(map[string][]interface{}) - v.EachInter(func(index int, val interface{}) bool { - group := grouper(index, val) - if _, ok := groups[group]; !ok { - groups[group] = make([]interface{}, 0) - } - groups[group] = append(groups[group], val) - return true - }) - return &Value{data: groups} -} - -// ReplaceInter uses the specified function to replace each interface{}s -// by iterating each item. The data in the returned result will be a -// []interface{} containing the replaced items. -func (v *Value) ReplaceInter(replacer func(int, interface{}) interface{}) *Value { - arr := v.MustInterSlice() - replaced := make([]interface{}, len(arr)) - v.EachInter(func(index int, val interface{}) bool { - replaced[index] = replacer(index, val) - return true - }) - return &Value{data: replaced} -} - -// CollectInter uses the specified collector function to collect a value -// for each of the interface{}s in the slice. The data returned will be a -// []interface{}. -func (v *Value) CollectInter(collector func(int, interface{}) interface{}) *Value { - arr := v.MustInterSlice() - collected := make([]interface{}, len(arr)) - v.EachInter(func(index int, val interface{}) bool { - collected[index] = collector(index, val) - return true - }) - return &Value{data: collected} -} - -/* - Bool (bool and []bool) -*/ - -// Bool gets the value as a bool, returns the optionalDefault -// value or a system default object if the value is the wrong type. -func (v *Value) Bool(optionalDefault ...bool) bool { - if s, ok := v.data.(bool); ok { - return s - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return false -} - -// MustBool gets the value as a bool. -// -// Panics if the object is not a bool. -func (v *Value) MustBool() bool { - return v.data.(bool) -} - -// BoolSlice gets the value as a []bool, returns the optionalDefault -// value or nil if the value is not a []bool. -func (v *Value) BoolSlice(optionalDefault ...[]bool) []bool { - if s, ok := v.data.([]bool); ok { - return s - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return nil -} - -// MustBoolSlice gets the value as a []bool. -// -// Panics if the object is not a []bool. -func (v *Value) MustBoolSlice() []bool { - return v.data.([]bool) -} - -// IsBool gets whether the object contained is a bool or not. -func (v *Value) IsBool() bool { - _, ok := v.data.(bool) - return ok -} - -// IsBoolSlice gets whether the object contained is a []bool or not. -func (v *Value) IsBoolSlice() bool { - _, ok := v.data.([]bool) - return ok -} - -// EachBool calls the specified callback for each object -// in the []bool. -// -// Panics if the object is the wrong type. -func (v *Value) EachBool(callback func(int, bool) bool) *Value { - for index, val := range v.MustBoolSlice() { - carryon := callback(index, val) - if !carryon { - break - } - } - return v -} - -// WhereBool uses the specified decider function to select items -// from the []bool. The object contained in the result will contain -// only the selected items. -func (v *Value) WhereBool(decider func(int, bool) bool) *Value { - var selected []bool - v.EachBool(func(index int, val bool) bool { - shouldSelect := decider(index, val) - if !shouldSelect { - selected = append(selected, val) - } - return true - }) - return &Value{data: selected} -} - -// GroupBool uses the specified grouper function to group the items -// keyed by the return of the grouper. The object contained in the -// result will contain a map[string][]bool. -func (v *Value) GroupBool(grouper func(int, bool) string) *Value { - groups := make(map[string][]bool) - v.EachBool(func(index int, val bool) bool { - group := grouper(index, val) - if _, ok := groups[group]; !ok { - groups[group] = make([]bool, 0) - } - groups[group] = append(groups[group], val) - return true - }) - return &Value{data: groups} -} - -// ReplaceBool uses the specified function to replace each bools -// by iterating each item. The data in the returned result will be a -// []bool containing the replaced items. -func (v *Value) ReplaceBool(replacer func(int, bool) bool) *Value { - arr := v.MustBoolSlice() - replaced := make([]bool, len(arr)) - v.EachBool(func(index int, val bool) bool { - replaced[index] = replacer(index, val) - return true - }) - return &Value{data: replaced} -} - -// CollectBool uses the specified collector function to collect a value -// for each of the bools in the slice. The data returned will be a -// []interface{}. -func (v *Value) CollectBool(collector func(int, bool) interface{}) *Value { - arr := v.MustBoolSlice() - collected := make([]interface{}, len(arr)) - v.EachBool(func(index int, val bool) bool { - collected[index] = collector(index, val) - return true - }) - return &Value{data: collected} -} - -/* - Str (string and []string) -*/ - -// Str gets the value as a string, returns the optionalDefault -// value or a system default object if the value is the wrong type. -func (v *Value) Str(optionalDefault ...string) string { - if s, ok := v.data.(string); ok { - return s - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return "" -} - -// MustStr gets the value as a string. -// -// Panics if the object is not a string. -func (v *Value) MustStr() string { - return v.data.(string) -} - -// StrSlice gets the value as a []string, returns the optionalDefault -// value or nil if the value is not a []string. -func (v *Value) StrSlice(optionalDefault ...[]string) []string { - if s, ok := v.data.([]string); ok { - return s - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return nil -} - -// MustStrSlice gets the value as a []string. -// -// Panics if the object is not a []string. -func (v *Value) MustStrSlice() []string { - return v.data.([]string) -} - -// IsStr gets whether the object contained is a string or not. -func (v *Value) IsStr() bool { - _, ok := v.data.(string) - return ok -} - -// IsStrSlice gets whether the object contained is a []string or not. -func (v *Value) IsStrSlice() bool { - _, ok := v.data.([]string) - return ok -} - -// EachStr calls the specified callback for each object -// in the []string. -// -// Panics if the object is the wrong type. -func (v *Value) EachStr(callback func(int, string) bool) *Value { - for index, val := range v.MustStrSlice() { - carryon := callback(index, val) - if !carryon { - break - } - } - return v -} - -// WhereStr uses the specified decider function to select items -// from the []string. The object contained in the result will contain -// only the selected items. -func (v *Value) WhereStr(decider func(int, string) bool) *Value { - var selected []string - v.EachStr(func(index int, val string) bool { - shouldSelect := decider(index, val) - if !shouldSelect { - selected = append(selected, val) - } - return true - }) - return &Value{data: selected} -} - -// GroupStr uses the specified grouper function to group the items -// keyed by the return of the grouper. The object contained in the -// result will contain a map[string][]string. -func (v *Value) GroupStr(grouper func(int, string) string) *Value { - groups := make(map[string][]string) - v.EachStr(func(index int, val string) bool { - group := grouper(index, val) - if _, ok := groups[group]; !ok { - groups[group] = make([]string, 0) - } - groups[group] = append(groups[group], val) - return true - }) - return &Value{data: groups} -} - -// ReplaceStr uses the specified function to replace each strings -// by iterating each item. The data in the returned result will be a -// []string containing the replaced items. -func (v *Value) ReplaceStr(replacer func(int, string) string) *Value { - arr := v.MustStrSlice() - replaced := make([]string, len(arr)) - v.EachStr(func(index int, val string) bool { - replaced[index] = replacer(index, val) - return true - }) - return &Value{data: replaced} -} - -// CollectStr uses the specified collector function to collect a value -// for each of the strings in the slice. The data returned will be a -// []interface{}. -func (v *Value) CollectStr(collector func(int, string) interface{}) *Value { - arr := v.MustStrSlice() - collected := make([]interface{}, len(arr)) - v.EachStr(func(index int, val string) bool { - collected[index] = collector(index, val) - return true - }) - return &Value{data: collected} -} - -/* - Int (int and []int) -*/ - -// Int gets the value as a int, returns the optionalDefault -// value or a system default object if the value is the wrong type. -func (v *Value) Int(optionalDefault ...int) int { - if s, ok := v.data.(int); ok { - return s - } - if s, ok := v.data.(float64); ok { - if float64(int(s)) == s { - return int(s) - } - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return 0 -} - -// MustInt gets the value as a int. -// -// Panics if the object is not a int. -func (v *Value) MustInt() int { - if s, ok := v.data.(float64); ok { - if float64(int(s)) == s { - return int(s) - } - } - return v.data.(int) -} - -// IntSlice gets the value as a []int, returns the optionalDefault -// value or nil if the value is not a []int. -func (v *Value) IntSlice(optionalDefault ...[]int) []int { - if s, ok := v.data.([]int); ok { - return s - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return nil -} - -// MustIntSlice gets the value as a []int. -// -// Panics if the object is not a []int. -func (v *Value) MustIntSlice() []int { - return v.data.([]int) -} - -// IsInt gets whether the object contained is a int or not. -func (v *Value) IsInt() bool { - _, ok := v.data.(int) - return ok -} - -// IsIntSlice gets whether the object contained is a []int or not. -func (v *Value) IsIntSlice() bool { - _, ok := v.data.([]int) - return ok -} - -// EachInt calls the specified callback for each object -// in the []int. -// -// Panics if the object is the wrong type. -func (v *Value) EachInt(callback func(int, int) bool) *Value { - for index, val := range v.MustIntSlice() { - carryon := callback(index, val) - if !carryon { - break - } - } - return v -} - -// WhereInt uses the specified decider function to select items -// from the []int. The object contained in the result will contain -// only the selected items. -func (v *Value) WhereInt(decider func(int, int) bool) *Value { - var selected []int - v.EachInt(func(index int, val int) bool { - shouldSelect := decider(index, val) - if !shouldSelect { - selected = append(selected, val) - } - return true - }) - return &Value{data: selected} -} - -// GroupInt uses the specified grouper function to group the items -// keyed by the return of the grouper. The object contained in the -// result will contain a map[string][]int. -func (v *Value) GroupInt(grouper func(int, int) string) *Value { - groups := make(map[string][]int) - v.EachInt(func(index int, val int) bool { - group := grouper(index, val) - if _, ok := groups[group]; !ok { - groups[group] = make([]int, 0) - } - groups[group] = append(groups[group], val) - return true - }) - return &Value{data: groups} -} - -// ReplaceInt uses the specified function to replace each ints -// by iterating each item. The data in the returned result will be a -// []int containing the replaced items. -func (v *Value) ReplaceInt(replacer func(int, int) int) *Value { - arr := v.MustIntSlice() - replaced := make([]int, len(arr)) - v.EachInt(func(index int, val int) bool { - replaced[index] = replacer(index, val) - return true - }) - return &Value{data: replaced} -} - -// CollectInt uses the specified collector function to collect a value -// for each of the ints in the slice. The data returned will be a -// []interface{}. -func (v *Value) CollectInt(collector func(int, int) interface{}) *Value { - arr := v.MustIntSlice() - collected := make([]interface{}, len(arr)) - v.EachInt(func(index int, val int) bool { - collected[index] = collector(index, val) - return true - }) - return &Value{data: collected} -} - -/* - Int8 (int8 and []int8) -*/ - -// Int8 gets the value as a int8, returns the optionalDefault -// value or a system default object if the value is the wrong type. -func (v *Value) Int8(optionalDefault ...int8) int8 { - if s, ok := v.data.(int8); ok { - return s - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return 0 -} - -// MustInt8 gets the value as a int8. -// -// Panics if the object is not a int8. -func (v *Value) MustInt8() int8 { - return v.data.(int8) -} - -// Int8Slice gets the value as a []int8, returns the optionalDefault -// value or nil if the value is not a []int8. -func (v *Value) Int8Slice(optionalDefault ...[]int8) []int8 { - if s, ok := v.data.([]int8); ok { - return s - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return nil -} - -// MustInt8Slice gets the value as a []int8. -// -// Panics if the object is not a []int8. -func (v *Value) MustInt8Slice() []int8 { - return v.data.([]int8) -} - -// IsInt8 gets whether the object contained is a int8 or not. -func (v *Value) IsInt8() bool { - _, ok := v.data.(int8) - return ok -} - -// IsInt8Slice gets whether the object contained is a []int8 or not. -func (v *Value) IsInt8Slice() bool { - _, ok := v.data.([]int8) - return ok -} - -// EachInt8 calls the specified callback for each object -// in the []int8. -// -// Panics if the object is the wrong type. -func (v *Value) EachInt8(callback func(int, int8) bool) *Value { - for index, val := range v.MustInt8Slice() { - carryon := callback(index, val) - if !carryon { - break - } - } - return v -} - -// WhereInt8 uses the specified decider function to select items -// from the []int8. The object contained in the result will contain -// only the selected items. -func (v *Value) WhereInt8(decider func(int, int8) bool) *Value { - var selected []int8 - v.EachInt8(func(index int, val int8) bool { - shouldSelect := decider(index, val) - if !shouldSelect { - selected = append(selected, val) - } - return true - }) - return &Value{data: selected} -} - -// GroupInt8 uses the specified grouper function to group the items -// keyed by the return of the grouper. The object contained in the -// result will contain a map[string][]int8. -func (v *Value) GroupInt8(grouper func(int, int8) string) *Value { - groups := make(map[string][]int8) - v.EachInt8(func(index int, val int8) bool { - group := grouper(index, val) - if _, ok := groups[group]; !ok { - groups[group] = make([]int8, 0) - } - groups[group] = append(groups[group], val) - return true - }) - return &Value{data: groups} -} - -// ReplaceInt8 uses the specified function to replace each int8s -// by iterating each item. The data in the returned result will be a -// []int8 containing the replaced items. -func (v *Value) ReplaceInt8(replacer func(int, int8) int8) *Value { - arr := v.MustInt8Slice() - replaced := make([]int8, len(arr)) - v.EachInt8(func(index int, val int8) bool { - replaced[index] = replacer(index, val) - return true - }) - return &Value{data: replaced} -} - -// CollectInt8 uses the specified collector function to collect a value -// for each of the int8s in the slice. The data returned will be a -// []interface{}. -func (v *Value) CollectInt8(collector func(int, int8) interface{}) *Value { - arr := v.MustInt8Slice() - collected := make([]interface{}, len(arr)) - v.EachInt8(func(index int, val int8) bool { - collected[index] = collector(index, val) - return true - }) - return &Value{data: collected} -} - -/* - Int16 (int16 and []int16) -*/ - -// Int16 gets the value as a int16, returns the optionalDefault -// value or a system default object if the value is the wrong type. -func (v *Value) Int16(optionalDefault ...int16) int16 { - if s, ok := v.data.(int16); ok { - return s - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return 0 -} - -// MustInt16 gets the value as a int16. -// -// Panics if the object is not a int16. -func (v *Value) MustInt16() int16 { - return v.data.(int16) -} - -// Int16Slice gets the value as a []int16, returns the optionalDefault -// value or nil if the value is not a []int16. -func (v *Value) Int16Slice(optionalDefault ...[]int16) []int16 { - if s, ok := v.data.([]int16); ok { - return s - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return nil -} - -// MustInt16Slice gets the value as a []int16. -// -// Panics if the object is not a []int16. -func (v *Value) MustInt16Slice() []int16 { - return v.data.([]int16) -} - -// IsInt16 gets whether the object contained is a int16 or not. -func (v *Value) IsInt16() bool { - _, ok := v.data.(int16) - return ok -} - -// IsInt16Slice gets whether the object contained is a []int16 or not. -func (v *Value) IsInt16Slice() bool { - _, ok := v.data.([]int16) - return ok -} - -// EachInt16 calls the specified callback for each object -// in the []int16. -// -// Panics if the object is the wrong type. -func (v *Value) EachInt16(callback func(int, int16) bool) *Value { - for index, val := range v.MustInt16Slice() { - carryon := callback(index, val) - if !carryon { - break - } - } - return v -} - -// WhereInt16 uses the specified decider function to select items -// from the []int16. The object contained in the result will contain -// only the selected items. -func (v *Value) WhereInt16(decider func(int, int16) bool) *Value { - var selected []int16 - v.EachInt16(func(index int, val int16) bool { - shouldSelect := decider(index, val) - if !shouldSelect { - selected = append(selected, val) - } - return true - }) - return &Value{data: selected} -} - -// GroupInt16 uses the specified grouper function to group the items -// keyed by the return of the grouper. The object contained in the -// result will contain a map[string][]int16. -func (v *Value) GroupInt16(grouper func(int, int16) string) *Value { - groups := make(map[string][]int16) - v.EachInt16(func(index int, val int16) bool { - group := grouper(index, val) - if _, ok := groups[group]; !ok { - groups[group] = make([]int16, 0) - } - groups[group] = append(groups[group], val) - return true - }) - return &Value{data: groups} -} - -// ReplaceInt16 uses the specified function to replace each int16s -// by iterating each item. The data in the returned result will be a -// []int16 containing the replaced items. -func (v *Value) ReplaceInt16(replacer func(int, int16) int16) *Value { - arr := v.MustInt16Slice() - replaced := make([]int16, len(arr)) - v.EachInt16(func(index int, val int16) bool { - replaced[index] = replacer(index, val) - return true - }) - return &Value{data: replaced} -} - -// CollectInt16 uses the specified collector function to collect a value -// for each of the int16s in the slice. The data returned will be a -// []interface{}. -func (v *Value) CollectInt16(collector func(int, int16) interface{}) *Value { - arr := v.MustInt16Slice() - collected := make([]interface{}, len(arr)) - v.EachInt16(func(index int, val int16) bool { - collected[index] = collector(index, val) - return true - }) - return &Value{data: collected} -} - -/* - Int32 (int32 and []int32) -*/ - -// Int32 gets the value as a int32, returns the optionalDefault -// value or a system default object if the value is the wrong type. -func (v *Value) Int32(optionalDefault ...int32) int32 { - if s, ok := v.data.(int32); ok { - return s - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return 0 -} - -// MustInt32 gets the value as a int32. -// -// Panics if the object is not a int32. -func (v *Value) MustInt32() int32 { - return v.data.(int32) -} - -// Int32Slice gets the value as a []int32, returns the optionalDefault -// value or nil if the value is not a []int32. -func (v *Value) Int32Slice(optionalDefault ...[]int32) []int32 { - if s, ok := v.data.([]int32); ok { - return s - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return nil -} - -// MustInt32Slice gets the value as a []int32. -// -// Panics if the object is not a []int32. -func (v *Value) MustInt32Slice() []int32 { - return v.data.([]int32) -} - -// IsInt32 gets whether the object contained is a int32 or not. -func (v *Value) IsInt32() bool { - _, ok := v.data.(int32) - return ok -} - -// IsInt32Slice gets whether the object contained is a []int32 or not. -func (v *Value) IsInt32Slice() bool { - _, ok := v.data.([]int32) - return ok -} - -// EachInt32 calls the specified callback for each object -// in the []int32. -// -// Panics if the object is the wrong type. -func (v *Value) EachInt32(callback func(int, int32) bool) *Value { - for index, val := range v.MustInt32Slice() { - carryon := callback(index, val) - if !carryon { - break - } - } - return v -} - -// WhereInt32 uses the specified decider function to select items -// from the []int32. The object contained in the result will contain -// only the selected items. -func (v *Value) WhereInt32(decider func(int, int32) bool) *Value { - var selected []int32 - v.EachInt32(func(index int, val int32) bool { - shouldSelect := decider(index, val) - if !shouldSelect { - selected = append(selected, val) - } - return true - }) - return &Value{data: selected} -} - -// GroupInt32 uses the specified grouper function to group the items -// keyed by the return of the grouper. The object contained in the -// result will contain a map[string][]int32. -func (v *Value) GroupInt32(grouper func(int, int32) string) *Value { - groups := make(map[string][]int32) - v.EachInt32(func(index int, val int32) bool { - group := grouper(index, val) - if _, ok := groups[group]; !ok { - groups[group] = make([]int32, 0) - } - groups[group] = append(groups[group], val) - return true - }) - return &Value{data: groups} -} - -// ReplaceInt32 uses the specified function to replace each int32s -// by iterating each item. The data in the returned result will be a -// []int32 containing the replaced items. -func (v *Value) ReplaceInt32(replacer func(int, int32) int32) *Value { - arr := v.MustInt32Slice() - replaced := make([]int32, len(arr)) - v.EachInt32(func(index int, val int32) bool { - replaced[index] = replacer(index, val) - return true - }) - return &Value{data: replaced} -} - -// CollectInt32 uses the specified collector function to collect a value -// for each of the int32s in the slice. The data returned will be a -// []interface{}. -func (v *Value) CollectInt32(collector func(int, int32) interface{}) *Value { - arr := v.MustInt32Slice() - collected := make([]interface{}, len(arr)) - v.EachInt32(func(index int, val int32) bool { - collected[index] = collector(index, val) - return true - }) - return &Value{data: collected} -} - -/* - Int64 (int64 and []int64) -*/ - -// Int64 gets the value as a int64, returns the optionalDefault -// value or a system default object if the value is the wrong type. -func (v *Value) Int64(optionalDefault ...int64) int64 { - if s, ok := v.data.(int64); ok { - return s - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return 0 -} - -// MustInt64 gets the value as a int64. -// -// Panics if the object is not a int64. -func (v *Value) MustInt64() int64 { - return v.data.(int64) -} - -// Int64Slice gets the value as a []int64, returns the optionalDefault -// value or nil if the value is not a []int64. -func (v *Value) Int64Slice(optionalDefault ...[]int64) []int64 { - if s, ok := v.data.([]int64); ok { - return s - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return nil -} - -// MustInt64Slice gets the value as a []int64. -// -// Panics if the object is not a []int64. -func (v *Value) MustInt64Slice() []int64 { - return v.data.([]int64) -} - -// IsInt64 gets whether the object contained is a int64 or not. -func (v *Value) IsInt64() bool { - _, ok := v.data.(int64) - return ok -} - -// IsInt64Slice gets whether the object contained is a []int64 or not. -func (v *Value) IsInt64Slice() bool { - _, ok := v.data.([]int64) - return ok -} - -// EachInt64 calls the specified callback for each object -// in the []int64. -// -// Panics if the object is the wrong type. -func (v *Value) EachInt64(callback func(int, int64) bool) *Value { - for index, val := range v.MustInt64Slice() { - carryon := callback(index, val) - if !carryon { - break - } - } - return v -} - -// WhereInt64 uses the specified decider function to select items -// from the []int64. The object contained in the result will contain -// only the selected items. -func (v *Value) WhereInt64(decider func(int, int64) bool) *Value { - var selected []int64 - v.EachInt64(func(index int, val int64) bool { - shouldSelect := decider(index, val) - if !shouldSelect { - selected = append(selected, val) - } - return true - }) - return &Value{data: selected} -} - -// GroupInt64 uses the specified grouper function to group the items -// keyed by the return of the grouper. The object contained in the -// result will contain a map[string][]int64. -func (v *Value) GroupInt64(grouper func(int, int64) string) *Value { - groups := make(map[string][]int64) - v.EachInt64(func(index int, val int64) bool { - group := grouper(index, val) - if _, ok := groups[group]; !ok { - groups[group] = make([]int64, 0) - } - groups[group] = append(groups[group], val) - return true - }) - return &Value{data: groups} -} - -// ReplaceInt64 uses the specified function to replace each int64s -// by iterating each item. The data in the returned result will be a -// []int64 containing the replaced items. -func (v *Value) ReplaceInt64(replacer func(int, int64) int64) *Value { - arr := v.MustInt64Slice() - replaced := make([]int64, len(arr)) - v.EachInt64(func(index int, val int64) bool { - replaced[index] = replacer(index, val) - return true - }) - return &Value{data: replaced} -} - -// CollectInt64 uses the specified collector function to collect a value -// for each of the int64s in the slice. The data returned will be a -// []interface{}. -func (v *Value) CollectInt64(collector func(int, int64) interface{}) *Value { - arr := v.MustInt64Slice() - collected := make([]interface{}, len(arr)) - v.EachInt64(func(index int, val int64) bool { - collected[index] = collector(index, val) - return true - }) - return &Value{data: collected} -} - -/* - Uint (uint and []uint) -*/ - -// Uint gets the value as a uint, returns the optionalDefault -// value or a system default object if the value is the wrong type. -func (v *Value) Uint(optionalDefault ...uint) uint { - if s, ok := v.data.(uint); ok { - return s - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return 0 -} - -// MustUint gets the value as a uint. -// -// Panics if the object is not a uint. -func (v *Value) MustUint() uint { - return v.data.(uint) -} - -// UintSlice gets the value as a []uint, returns the optionalDefault -// value or nil if the value is not a []uint. -func (v *Value) UintSlice(optionalDefault ...[]uint) []uint { - if s, ok := v.data.([]uint); ok { - return s - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return nil -} - -// MustUintSlice gets the value as a []uint. -// -// Panics if the object is not a []uint. -func (v *Value) MustUintSlice() []uint { - return v.data.([]uint) -} - -// IsUint gets whether the object contained is a uint or not. -func (v *Value) IsUint() bool { - _, ok := v.data.(uint) - return ok -} - -// IsUintSlice gets whether the object contained is a []uint or not. -func (v *Value) IsUintSlice() bool { - _, ok := v.data.([]uint) - return ok -} - -// EachUint calls the specified callback for each object -// in the []uint. -// -// Panics if the object is the wrong type. -func (v *Value) EachUint(callback func(int, uint) bool) *Value { - for index, val := range v.MustUintSlice() { - carryon := callback(index, val) - if !carryon { - break - } - } - return v -} - -// WhereUint uses the specified decider function to select items -// from the []uint. The object contained in the result will contain -// only the selected items. -func (v *Value) WhereUint(decider func(int, uint) bool) *Value { - var selected []uint - v.EachUint(func(index int, val uint) bool { - shouldSelect := decider(index, val) - if !shouldSelect { - selected = append(selected, val) - } - return true - }) - return &Value{data: selected} -} - -// GroupUint uses the specified grouper function to group the items -// keyed by the return of the grouper. The object contained in the -// result will contain a map[string][]uint. -func (v *Value) GroupUint(grouper func(int, uint) string) *Value { - groups := make(map[string][]uint) - v.EachUint(func(index int, val uint) bool { - group := grouper(index, val) - if _, ok := groups[group]; !ok { - groups[group] = make([]uint, 0) - } - groups[group] = append(groups[group], val) - return true - }) - return &Value{data: groups} -} - -// ReplaceUint uses the specified function to replace each uints -// by iterating each item. The data in the returned result will be a -// []uint containing the replaced items. -func (v *Value) ReplaceUint(replacer func(int, uint) uint) *Value { - arr := v.MustUintSlice() - replaced := make([]uint, len(arr)) - v.EachUint(func(index int, val uint) bool { - replaced[index] = replacer(index, val) - return true - }) - return &Value{data: replaced} -} - -// CollectUint uses the specified collector function to collect a value -// for each of the uints in the slice. The data returned will be a -// []interface{}. -func (v *Value) CollectUint(collector func(int, uint) interface{}) *Value { - arr := v.MustUintSlice() - collected := make([]interface{}, len(arr)) - v.EachUint(func(index int, val uint) bool { - collected[index] = collector(index, val) - return true - }) - return &Value{data: collected} -} - -/* - Uint8 (uint8 and []uint8) -*/ - -// Uint8 gets the value as a uint8, returns the optionalDefault -// value or a system default object if the value is the wrong type. -func (v *Value) Uint8(optionalDefault ...uint8) uint8 { - if s, ok := v.data.(uint8); ok { - return s - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return 0 -} - -// MustUint8 gets the value as a uint8. -// -// Panics if the object is not a uint8. -func (v *Value) MustUint8() uint8 { - return v.data.(uint8) -} - -// Uint8Slice gets the value as a []uint8, returns the optionalDefault -// value or nil if the value is not a []uint8. -func (v *Value) Uint8Slice(optionalDefault ...[]uint8) []uint8 { - if s, ok := v.data.([]uint8); ok { - return s - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return nil -} - -// MustUint8Slice gets the value as a []uint8. -// -// Panics if the object is not a []uint8. -func (v *Value) MustUint8Slice() []uint8 { - return v.data.([]uint8) -} - -// IsUint8 gets whether the object contained is a uint8 or not. -func (v *Value) IsUint8() bool { - _, ok := v.data.(uint8) - return ok -} - -// IsUint8Slice gets whether the object contained is a []uint8 or not. -func (v *Value) IsUint8Slice() bool { - _, ok := v.data.([]uint8) - return ok -} - -// EachUint8 calls the specified callback for each object -// in the []uint8. -// -// Panics if the object is the wrong type. -func (v *Value) EachUint8(callback func(int, uint8) bool) *Value { - for index, val := range v.MustUint8Slice() { - carryon := callback(index, val) - if !carryon { - break - } - } - return v -} - -// WhereUint8 uses the specified decider function to select items -// from the []uint8. The object contained in the result will contain -// only the selected items. -func (v *Value) WhereUint8(decider func(int, uint8) bool) *Value { - var selected []uint8 - v.EachUint8(func(index int, val uint8) bool { - shouldSelect := decider(index, val) - if !shouldSelect { - selected = append(selected, val) - } - return true - }) - return &Value{data: selected} -} - -// GroupUint8 uses the specified grouper function to group the items -// keyed by the return of the grouper. The object contained in the -// result will contain a map[string][]uint8. -func (v *Value) GroupUint8(grouper func(int, uint8) string) *Value { - groups := make(map[string][]uint8) - v.EachUint8(func(index int, val uint8) bool { - group := grouper(index, val) - if _, ok := groups[group]; !ok { - groups[group] = make([]uint8, 0) - } - groups[group] = append(groups[group], val) - return true - }) - return &Value{data: groups} -} - -// ReplaceUint8 uses the specified function to replace each uint8s -// by iterating each item. The data in the returned result will be a -// []uint8 containing the replaced items. -func (v *Value) ReplaceUint8(replacer func(int, uint8) uint8) *Value { - arr := v.MustUint8Slice() - replaced := make([]uint8, len(arr)) - v.EachUint8(func(index int, val uint8) bool { - replaced[index] = replacer(index, val) - return true - }) - return &Value{data: replaced} -} - -// CollectUint8 uses the specified collector function to collect a value -// for each of the uint8s in the slice. The data returned will be a -// []interface{}. -func (v *Value) CollectUint8(collector func(int, uint8) interface{}) *Value { - arr := v.MustUint8Slice() - collected := make([]interface{}, len(arr)) - v.EachUint8(func(index int, val uint8) bool { - collected[index] = collector(index, val) - return true - }) - return &Value{data: collected} -} - -/* - Uint16 (uint16 and []uint16) -*/ - -// Uint16 gets the value as a uint16, returns the optionalDefault -// value or a system default object if the value is the wrong type. -func (v *Value) Uint16(optionalDefault ...uint16) uint16 { - if s, ok := v.data.(uint16); ok { - return s - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return 0 -} - -// MustUint16 gets the value as a uint16. -// -// Panics if the object is not a uint16. -func (v *Value) MustUint16() uint16 { - return v.data.(uint16) -} - -// Uint16Slice gets the value as a []uint16, returns the optionalDefault -// value or nil if the value is not a []uint16. -func (v *Value) Uint16Slice(optionalDefault ...[]uint16) []uint16 { - if s, ok := v.data.([]uint16); ok { - return s - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return nil -} - -// MustUint16Slice gets the value as a []uint16. -// -// Panics if the object is not a []uint16. -func (v *Value) MustUint16Slice() []uint16 { - return v.data.([]uint16) -} - -// IsUint16 gets whether the object contained is a uint16 or not. -func (v *Value) IsUint16() bool { - _, ok := v.data.(uint16) - return ok -} - -// IsUint16Slice gets whether the object contained is a []uint16 or not. -func (v *Value) IsUint16Slice() bool { - _, ok := v.data.([]uint16) - return ok -} - -// EachUint16 calls the specified callback for each object -// in the []uint16. -// -// Panics if the object is the wrong type. -func (v *Value) EachUint16(callback func(int, uint16) bool) *Value { - for index, val := range v.MustUint16Slice() { - carryon := callback(index, val) - if !carryon { - break - } - } - return v -} - -// WhereUint16 uses the specified decider function to select items -// from the []uint16. The object contained in the result will contain -// only the selected items. -func (v *Value) WhereUint16(decider func(int, uint16) bool) *Value { - var selected []uint16 - v.EachUint16(func(index int, val uint16) bool { - shouldSelect := decider(index, val) - if !shouldSelect { - selected = append(selected, val) - } - return true - }) - return &Value{data: selected} -} - -// GroupUint16 uses the specified grouper function to group the items -// keyed by the return of the grouper. The object contained in the -// result will contain a map[string][]uint16. -func (v *Value) GroupUint16(grouper func(int, uint16) string) *Value { - groups := make(map[string][]uint16) - v.EachUint16(func(index int, val uint16) bool { - group := grouper(index, val) - if _, ok := groups[group]; !ok { - groups[group] = make([]uint16, 0) - } - groups[group] = append(groups[group], val) - return true - }) - return &Value{data: groups} -} - -// ReplaceUint16 uses the specified function to replace each uint16s -// by iterating each item. The data in the returned result will be a -// []uint16 containing the replaced items. -func (v *Value) ReplaceUint16(replacer func(int, uint16) uint16) *Value { - arr := v.MustUint16Slice() - replaced := make([]uint16, len(arr)) - v.EachUint16(func(index int, val uint16) bool { - replaced[index] = replacer(index, val) - return true - }) - return &Value{data: replaced} -} - -// CollectUint16 uses the specified collector function to collect a value -// for each of the uint16s in the slice. The data returned will be a -// []interface{}. -func (v *Value) CollectUint16(collector func(int, uint16) interface{}) *Value { - arr := v.MustUint16Slice() - collected := make([]interface{}, len(arr)) - v.EachUint16(func(index int, val uint16) bool { - collected[index] = collector(index, val) - return true - }) - return &Value{data: collected} -} - -/* - Uint32 (uint32 and []uint32) -*/ - -// Uint32 gets the value as a uint32, returns the optionalDefault -// value or a system default object if the value is the wrong type. -func (v *Value) Uint32(optionalDefault ...uint32) uint32 { - if s, ok := v.data.(uint32); ok { - return s - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return 0 -} - -// MustUint32 gets the value as a uint32. -// -// Panics if the object is not a uint32. -func (v *Value) MustUint32() uint32 { - return v.data.(uint32) -} - -// Uint32Slice gets the value as a []uint32, returns the optionalDefault -// value or nil if the value is not a []uint32. -func (v *Value) Uint32Slice(optionalDefault ...[]uint32) []uint32 { - if s, ok := v.data.([]uint32); ok { - return s - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return nil -} - -// MustUint32Slice gets the value as a []uint32. -// -// Panics if the object is not a []uint32. -func (v *Value) MustUint32Slice() []uint32 { - return v.data.([]uint32) -} - -// IsUint32 gets whether the object contained is a uint32 or not. -func (v *Value) IsUint32() bool { - _, ok := v.data.(uint32) - return ok -} - -// IsUint32Slice gets whether the object contained is a []uint32 or not. -func (v *Value) IsUint32Slice() bool { - _, ok := v.data.([]uint32) - return ok -} - -// EachUint32 calls the specified callback for each object -// in the []uint32. -// -// Panics if the object is the wrong type. -func (v *Value) EachUint32(callback func(int, uint32) bool) *Value { - for index, val := range v.MustUint32Slice() { - carryon := callback(index, val) - if !carryon { - break - } - } - return v -} - -// WhereUint32 uses the specified decider function to select items -// from the []uint32. The object contained in the result will contain -// only the selected items. -func (v *Value) WhereUint32(decider func(int, uint32) bool) *Value { - var selected []uint32 - v.EachUint32(func(index int, val uint32) bool { - shouldSelect := decider(index, val) - if !shouldSelect { - selected = append(selected, val) - } - return true - }) - return &Value{data: selected} -} - -// GroupUint32 uses the specified grouper function to group the items -// keyed by the return of the grouper. The object contained in the -// result will contain a map[string][]uint32. -func (v *Value) GroupUint32(grouper func(int, uint32) string) *Value { - groups := make(map[string][]uint32) - v.EachUint32(func(index int, val uint32) bool { - group := grouper(index, val) - if _, ok := groups[group]; !ok { - groups[group] = make([]uint32, 0) - } - groups[group] = append(groups[group], val) - return true - }) - return &Value{data: groups} -} - -// ReplaceUint32 uses the specified function to replace each uint32s -// by iterating each item. The data in the returned result will be a -// []uint32 containing the replaced items. -func (v *Value) ReplaceUint32(replacer func(int, uint32) uint32) *Value { - arr := v.MustUint32Slice() - replaced := make([]uint32, len(arr)) - v.EachUint32(func(index int, val uint32) bool { - replaced[index] = replacer(index, val) - return true - }) - return &Value{data: replaced} -} - -// CollectUint32 uses the specified collector function to collect a value -// for each of the uint32s in the slice. The data returned will be a -// []interface{}. -func (v *Value) CollectUint32(collector func(int, uint32) interface{}) *Value { - arr := v.MustUint32Slice() - collected := make([]interface{}, len(arr)) - v.EachUint32(func(index int, val uint32) bool { - collected[index] = collector(index, val) - return true - }) - return &Value{data: collected} -} - -/* - Uint64 (uint64 and []uint64) -*/ - -// Uint64 gets the value as a uint64, returns the optionalDefault -// value or a system default object if the value is the wrong type. -func (v *Value) Uint64(optionalDefault ...uint64) uint64 { - if s, ok := v.data.(uint64); ok { - return s - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return 0 -} - -// MustUint64 gets the value as a uint64. -// -// Panics if the object is not a uint64. -func (v *Value) MustUint64() uint64 { - return v.data.(uint64) -} - -// Uint64Slice gets the value as a []uint64, returns the optionalDefault -// value or nil if the value is not a []uint64. -func (v *Value) Uint64Slice(optionalDefault ...[]uint64) []uint64 { - if s, ok := v.data.([]uint64); ok { - return s - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return nil -} - -// MustUint64Slice gets the value as a []uint64. -// -// Panics if the object is not a []uint64. -func (v *Value) MustUint64Slice() []uint64 { - return v.data.([]uint64) -} - -// IsUint64 gets whether the object contained is a uint64 or not. -func (v *Value) IsUint64() bool { - _, ok := v.data.(uint64) - return ok -} - -// IsUint64Slice gets whether the object contained is a []uint64 or not. -func (v *Value) IsUint64Slice() bool { - _, ok := v.data.([]uint64) - return ok -} - -// EachUint64 calls the specified callback for each object -// in the []uint64. -// -// Panics if the object is the wrong type. -func (v *Value) EachUint64(callback func(int, uint64) bool) *Value { - for index, val := range v.MustUint64Slice() { - carryon := callback(index, val) - if !carryon { - break - } - } - return v -} - -// WhereUint64 uses the specified decider function to select items -// from the []uint64. The object contained in the result will contain -// only the selected items. -func (v *Value) WhereUint64(decider func(int, uint64) bool) *Value { - var selected []uint64 - v.EachUint64(func(index int, val uint64) bool { - shouldSelect := decider(index, val) - if !shouldSelect { - selected = append(selected, val) - } - return true - }) - return &Value{data: selected} -} - -// GroupUint64 uses the specified grouper function to group the items -// keyed by the return of the grouper. The object contained in the -// result will contain a map[string][]uint64. -func (v *Value) GroupUint64(grouper func(int, uint64) string) *Value { - groups := make(map[string][]uint64) - v.EachUint64(func(index int, val uint64) bool { - group := grouper(index, val) - if _, ok := groups[group]; !ok { - groups[group] = make([]uint64, 0) - } - groups[group] = append(groups[group], val) - return true - }) - return &Value{data: groups} -} - -// ReplaceUint64 uses the specified function to replace each uint64s -// by iterating each item. The data in the returned result will be a -// []uint64 containing the replaced items. -func (v *Value) ReplaceUint64(replacer func(int, uint64) uint64) *Value { - arr := v.MustUint64Slice() - replaced := make([]uint64, len(arr)) - v.EachUint64(func(index int, val uint64) bool { - replaced[index] = replacer(index, val) - return true - }) - return &Value{data: replaced} -} - -// CollectUint64 uses the specified collector function to collect a value -// for each of the uint64s in the slice. The data returned will be a -// []interface{}. -func (v *Value) CollectUint64(collector func(int, uint64) interface{}) *Value { - arr := v.MustUint64Slice() - collected := make([]interface{}, len(arr)) - v.EachUint64(func(index int, val uint64) bool { - collected[index] = collector(index, val) - return true - }) - return &Value{data: collected} -} - -/* - Uintptr (uintptr and []uintptr) -*/ - -// Uintptr gets the value as a uintptr, returns the optionalDefault -// value or a system default object if the value is the wrong type. -func (v *Value) Uintptr(optionalDefault ...uintptr) uintptr { - if s, ok := v.data.(uintptr); ok { - return s - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return 0 -} - -// MustUintptr gets the value as a uintptr. -// -// Panics if the object is not a uintptr. -func (v *Value) MustUintptr() uintptr { - return v.data.(uintptr) -} - -// UintptrSlice gets the value as a []uintptr, returns the optionalDefault -// value or nil if the value is not a []uintptr. -func (v *Value) UintptrSlice(optionalDefault ...[]uintptr) []uintptr { - if s, ok := v.data.([]uintptr); ok { - return s - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return nil -} - -// MustUintptrSlice gets the value as a []uintptr. -// -// Panics if the object is not a []uintptr. -func (v *Value) MustUintptrSlice() []uintptr { - return v.data.([]uintptr) -} - -// IsUintptr gets whether the object contained is a uintptr or not. -func (v *Value) IsUintptr() bool { - _, ok := v.data.(uintptr) - return ok -} - -// IsUintptrSlice gets whether the object contained is a []uintptr or not. -func (v *Value) IsUintptrSlice() bool { - _, ok := v.data.([]uintptr) - return ok -} - -// EachUintptr calls the specified callback for each object -// in the []uintptr. -// -// Panics if the object is the wrong type. -func (v *Value) EachUintptr(callback func(int, uintptr) bool) *Value { - for index, val := range v.MustUintptrSlice() { - carryon := callback(index, val) - if !carryon { - break - } - } - return v -} - -// WhereUintptr uses the specified decider function to select items -// from the []uintptr. The object contained in the result will contain -// only the selected items. -func (v *Value) WhereUintptr(decider func(int, uintptr) bool) *Value { - var selected []uintptr - v.EachUintptr(func(index int, val uintptr) bool { - shouldSelect := decider(index, val) - if !shouldSelect { - selected = append(selected, val) - } - return true - }) - return &Value{data: selected} -} - -// GroupUintptr uses the specified grouper function to group the items -// keyed by the return of the grouper. The object contained in the -// result will contain a map[string][]uintptr. -func (v *Value) GroupUintptr(grouper func(int, uintptr) string) *Value { - groups := make(map[string][]uintptr) - v.EachUintptr(func(index int, val uintptr) bool { - group := grouper(index, val) - if _, ok := groups[group]; !ok { - groups[group] = make([]uintptr, 0) - } - groups[group] = append(groups[group], val) - return true - }) - return &Value{data: groups} -} - -// ReplaceUintptr uses the specified function to replace each uintptrs -// by iterating each item. The data in the returned result will be a -// []uintptr containing the replaced items. -func (v *Value) ReplaceUintptr(replacer func(int, uintptr) uintptr) *Value { - arr := v.MustUintptrSlice() - replaced := make([]uintptr, len(arr)) - v.EachUintptr(func(index int, val uintptr) bool { - replaced[index] = replacer(index, val) - return true - }) - return &Value{data: replaced} -} - -// CollectUintptr uses the specified collector function to collect a value -// for each of the uintptrs in the slice. The data returned will be a -// []interface{}. -func (v *Value) CollectUintptr(collector func(int, uintptr) interface{}) *Value { - arr := v.MustUintptrSlice() - collected := make([]interface{}, len(arr)) - v.EachUintptr(func(index int, val uintptr) bool { - collected[index] = collector(index, val) - return true - }) - return &Value{data: collected} -} - -/* - Float32 (float32 and []float32) -*/ - -// Float32 gets the value as a float32, returns the optionalDefault -// value or a system default object if the value is the wrong type. -func (v *Value) Float32(optionalDefault ...float32) float32 { - if s, ok := v.data.(float32); ok { - return s - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return 0 -} - -// MustFloat32 gets the value as a float32. -// -// Panics if the object is not a float32. -func (v *Value) MustFloat32() float32 { - return v.data.(float32) -} - -// Float32Slice gets the value as a []float32, returns the optionalDefault -// value or nil if the value is not a []float32. -func (v *Value) Float32Slice(optionalDefault ...[]float32) []float32 { - if s, ok := v.data.([]float32); ok { - return s - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return nil -} - -// MustFloat32Slice gets the value as a []float32. -// -// Panics if the object is not a []float32. -func (v *Value) MustFloat32Slice() []float32 { - return v.data.([]float32) -} - -// IsFloat32 gets whether the object contained is a float32 or not. -func (v *Value) IsFloat32() bool { - _, ok := v.data.(float32) - return ok -} - -// IsFloat32Slice gets whether the object contained is a []float32 or not. -func (v *Value) IsFloat32Slice() bool { - _, ok := v.data.([]float32) - return ok -} - -// EachFloat32 calls the specified callback for each object -// in the []float32. -// -// Panics if the object is the wrong type. -func (v *Value) EachFloat32(callback func(int, float32) bool) *Value { - for index, val := range v.MustFloat32Slice() { - carryon := callback(index, val) - if !carryon { - break - } - } - return v -} - -// WhereFloat32 uses the specified decider function to select items -// from the []float32. The object contained in the result will contain -// only the selected items. -func (v *Value) WhereFloat32(decider func(int, float32) bool) *Value { - var selected []float32 - v.EachFloat32(func(index int, val float32) bool { - shouldSelect := decider(index, val) - if !shouldSelect { - selected = append(selected, val) - } - return true - }) - return &Value{data: selected} -} - -// GroupFloat32 uses the specified grouper function to group the items -// keyed by the return of the grouper. The object contained in the -// result will contain a map[string][]float32. -func (v *Value) GroupFloat32(grouper func(int, float32) string) *Value { - groups := make(map[string][]float32) - v.EachFloat32(func(index int, val float32) bool { - group := grouper(index, val) - if _, ok := groups[group]; !ok { - groups[group] = make([]float32, 0) - } - groups[group] = append(groups[group], val) - return true - }) - return &Value{data: groups} -} - -// ReplaceFloat32 uses the specified function to replace each float32s -// by iterating each item. The data in the returned result will be a -// []float32 containing the replaced items. -func (v *Value) ReplaceFloat32(replacer func(int, float32) float32) *Value { - arr := v.MustFloat32Slice() - replaced := make([]float32, len(arr)) - v.EachFloat32(func(index int, val float32) bool { - replaced[index] = replacer(index, val) - return true - }) - return &Value{data: replaced} -} - -// CollectFloat32 uses the specified collector function to collect a value -// for each of the float32s in the slice. The data returned will be a -// []interface{}. -func (v *Value) CollectFloat32(collector func(int, float32) interface{}) *Value { - arr := v.MustFloat32Slice() - collected := make([]interface{}, len(arr)) - v.EachFloat32(func(index int, val float32) bool { - collected[index] = collector(index, val) - return true - }) - return &Value{data: collected} -} - -/* - Float64 (float64 and []float64) -*/ - -// Float64 gets the value as a float64, returns the optionalDefault -// value or a system default object if the value is the wrong type. -func (v *Value) Float64(optionalDefault ...float64) float64 { - if s, ok := v.data.(float64); ok { - return s - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return 0 -} - -// MustFloat64 gets the value as a float64. -// -// Panics if the object is not a float64. -func (v *Value) MustFloat64() float64 { - return v.data.(float64) -} - -// Float64Slice gets the value as a []float64, returns the optionalDefault -// value or nil if the value is not a []float64. -func (v *Value) Float64Slice(optionalDefault ...[]float64) []float64 { - if s, ok := v.data.([]float64); ok { - return s - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return nil -} - -// MustFloat64Slice gets the value as a []float64. -// -// Panics if the object is not a []float64. -func (v *Value) MustFloat64Slice() []float64 { - return v.data.([]float64) -} - -// IsFloat64 gets whether the object contained is a float64 or not. -func (v *Value) IsFloat64() bool { - _, ok := v.data.(float64) - return ok -} - -// IsFloat64Slice gets whether the object contained is a []float64 or not. -func (v *Value) IsFloat64Slice() bool { - _, ok := v.data.([]float64) - return ok -} - -// EachFloat64 calls the specified callback for each object -// in the []float64. -// -// Panics if the object is the wrong type. -func (v *Value) EachFloat64(callback func(int, float64) bool) *Value { - for index, val := range v.MustFloat64Slice() { - carryon := callback(index, val) - if !carryon { - break - } - } - return v -} - -// WhereFloat64 uses the specified decider function to select items -// from the []float64. The object contained in the result will contain -// only the selected items. -func (v *Value) WhereFloat64(decider func(int, float64) bool) *Value { - var selected []float64 - v.EachFloat64(func(index int, val float64) bool { - shouldSelect := decider(index, val) - if !shouldSelect { - selected = append(selected, val) - } - return true - }) - return &Value{data: selected} -} - -// GroupFloat64 uses the specified grouper function to group the items -// keyed by the return of the grouper. The object contained in the -// result will contain a map[string][]float64. -func (v *Value) GroupFloat64(grouper func(int, float64) string) *Value { - groups := make(map[string][]float64) - v.EachFloat64(func(index int, val float64) bool { - group := grouper(index, val) - if _, ok := groups[group]; !ok { - groups[group] = make([]float64, 0) - } - groups[group] = append(groups[group], val) - return true - }) - return &Value{data: groups} -} - -// ReplaceFloat64 uses the specified function to replace each float64s -// by iterating each item. The data in the returned result will be a -// []float64 containing the replaced items. -func (v *Value) ReplaceFloat64(replacer func(int, float64) float64) *Value { - arr := v.MustFloat64Slice() - replaced := make([]float64, len(arr)) - v.EachFloat64(func(index int, val float64) bool { - replaced[index] = replacer(index, val) - return true - }) - return &Value{data: replaced} -} - -// CollectFloat64 uses the specified collector function to collect a value -// for each of the float64s in the slice. The data returned will be a -// []interface{}. -func (v *Value) CollectFloat64(collector func(int, float64) interface{}) *Value { - arr := v.MustFloat64Slice() - collected := make([]interface{}, len(arr)) - v.EachFloat64(func(index int, val float64) bool { - collected[index] = collector(index, val) - return true - }) - return &Value{data: collected} -} - -/* - Complex64 (complex64 and []complex64) -*/ - -// Complex64 gets the value as a complex64, returns the optionalDefault -// value or a system default object if the value is the wrong type. -func (v *Value) Complex64(optionalDefault ...complex64) complex64 { - if s, ok := v.data.(complex64); ok { - return s - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return 0 -} - -// MustComplex64 gets the value as a complex64. -// -// Panics if the object is not a complex64. -func (v *Value) MustComplex64() complex64 { - return v.data.(complex64) -} - -// Complex64Slice gets the value as a []complex64, returns the optionalDefault -// value or nil if the value is not a []complex64. -func (v *Value) Complex64Slice(optionalDefault ...[]complex64) []complex64 { - if s, ok := v.data.([]complex64); ok { - return s - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return nil -} - -// MustComplex64Slice gets the value as a []complex64. -// -// Panics if the object is not a []complex64. -func (v *Value) MustComplex64Slice() []complex64 { - return v.data.([]complex64) -} - -// IsComplex64 gets whether the object contained is a complex64 or not. -func (v *Value) IsComplex64() bool { - _, ok := v.data.(complex64) - return ok -} - -// IsComplex64Slice gets whether the object contained is a []complex64 or not. -func (v *Value) IsComplex64Slice() bool { - _, ok := v.data.([]complex64) - return ok -} - -// EachComplex64 calls the specified callback for each object -// in the []complex64. -// -// Panics if the object is the wrong type. -func (v *Value) EachComplex64(callback func(int, complex64) bool) *Value { - for index, val := range v.MustComplex64Slice() { - carryon := callback(index, val) - if !carryon { - break - } - } - return v -} - -// WhereComplex64 uses the specified decider function to select items -// from the []complex64. The object contained in the result will contain -// only the selected items. -func (v *Value) WhereComplex64(decider func(int, complex64) bool) *Value { - var selected []complex64 - v.EachComplex64(func(index int, val complex64) bool { - shouldSelect := decider(index, val) - if !shouldSelect { - selected = append(selected, val) - } - return true - }) - return &Value{data: selected} -} - -// GroupComplex64 uses the specified grouper function to group the items -// keyed by the return of the grouper. The object contained in the -// result will contain a map[string][]complex64. -func (v *Value) GroupComplex64(grouper func(int, complex64) string) *Value { - groups := make(map[string][]complex64) - v.EachComplex64(func(index int, val complex64) bool { - group := grouper(index, val) - if _, ok := groups[group]; !ok { - groups[group] = make([]complex64, 0) - } - groups[group] = append(groups[group], val) - return true - }) - return &Value{data: groups} -} - -// ReplaceComplex64 uses the specified function to replace each complex64s -// by iterating each item. The data in the returned result will be a -// []complex64 containing the replaced items. -func (v *Value) ReplaceComplex64(replacer func(int, complex64) complex64) *Value { - arr := v.MustComplex64Slice() - replaced := make([]complex64, len(arr)) - v.EachComplex64(func(index int, val complex64) bool { - replaced[index] = replacer(index, val) - return true - }) - return &Value{data: replaced} -} - -// CollectComplex64 uses the specified collector function to collect a value -// for each of the complex64s in the slice. The data returned will be a -// []interface{}. -func (v *Value) CollectComplex64(collector func(int, complex64) interface{}) *Value { - arr := v.MustComplex64Slice() - collected := make([]interface{}, len(arr)) - v.EachComplex64(func(index int, val complex64) bool { - collected[index] = collector(index, val) - return true - }) - return &Value{data: collected} -} - -/* - Complex128 (complex128 and []complex128) -*/ - -// Complex128 gets the value as a complex128, returns the optionalDefault -// value or a system default object if the value is the wrong type. -func (v *Value) Complex128(optionalDefault ...complex128) complex128 { - if s, ok := v.data.(complex128); ok { - return s - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return 0 -} - -// MustComplex128 gets the value as a complex128. -// -// Panics if the object is not a complex128. -func (v *Value) MustComplex128() complex128 { - return v.data.(complex128) -} - -// Complex128Slice gets the value as a []complex128, returns the optionalDefault -// value or nil if the value is not a []complex128. -func (v *Value) Complex128Slice(optionalDefault ...[]complex128) []complex128 { - if s, ok := v.data.([]complex128); ok { - return s - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - return nil -} - -// MustComplex128Slice gets the value as a []complex128. -// -// Panics if the object is not a []complex128. -func (v *Value) MustComplex128Slice() []complex128 { - return v.data.([]complex128) -} - -// IsComplex128 gets whether the object contained is a complex128 or not. -func (v *Value) IsComplex128() bool { - _, ok := v.data.(complex128) - return ok -} - -// IsComplex128Slice gets whether the object contained is a []complex128 or not. -func (v *Value) IsComplex128Slice() bool { - _, ok := v.data.([]complex128) - return ok -} - -// EachComplex128 calls the specified callback for each object -// in the []complex128. -// -// Panics if the object is the wrong type. -func (v *Value) EachComplex128(callback func(int, complex128) bool) *Value { - for index, val := range v.MustComplex128Slice() { - carryon := callback(index, val) - if !carryon { - break - } - } - return v -} - -// WhereComplex128 uses the specified decider function to select items -// from the []complex128. The object contained in the result will contain -// only the selected items. -func (v *Value) WhereComplex128(decider func(int, complex128) bool) *Value { - var selected []complex128 - v.EachComplex128(func(index int, val complex128) bool { - shouldSelect := decider(index, val) - if !shouldSelect { - selected = append(selected, val) - } - return true - }) - return &Value{data: selected} -} - -// GroupComplex128 uses the specified grouper function to group the items -// keyed by the return of the grouper. The object contained in the -// result will contain a map[string][]complex128. -func (v *Value) GroupComplex128(grouper func(int, complex128) string) *Value { - groups := make(map[string][]complex128) - v.EachComplex128(func(index int, val complex128) bool { - group := grouper(index, val) - if _, ok := groups[group]; !ok { - groups[group] = make([]complex128, 0) - } - groups[group] = append(groups[group], val) - return true - }) - return &Value{data: groups} -} - -// ReplaceComplex128 uses the specified function to replace each complex128s -// by iterating each item. The data in the returned result will be a -// []complex128 containing the replaced items. -func (v *Value) ReplaceComplex128(replacer func(int, complex128) complex128) *Value { - arr := v.MustComplex128Slice() - replaced := make([]complex128, len(arr)) - v.EachComplex128(func(index int, val complex128) bool { - replaced[index] = replacer(index, val) - return true - }) - return &Value{data: replaced} -} - -// CollectComplex128 uses the specified collector function to collect a value -// for each of the complex128s in the slice. The data returned will be a -// []interface{}. -func (v *Value) CollectComplex128(collector func(int, complex128) interface{}) *Value { - arr := v.MustComplex128Slice() - collected := make([]interface{}, len(arr)) - v.EachComplex128(func(index int, val complex128) bool { - collected[index] = collector(index, val) - return true - }) - return &Value{data: collected} -} diff --git a/vendor/github.com/stretchr/objx/value.go b/vendor/github.com/stretchr/objx/value.go deleted file mode 100644 index 4e5f9b77e..000000000 --- a/vendor/github.com/stretchr/objx/value.go +++ /dev/null @@ -1,159 +0,0 @@ -package objx - -import ( - "fmt" - "strconv" -) - -// Value provides methods for extracting interface{} data in various -// types. -type Value struct { - // data contains the raw data being managed by this Value - data interface{} -} - -// Data returns the raw data contained by this Value -func (v *Value) Data() interface{} { - return v.data -} - -// String returns the value always as a string -func (v *Value) String() string { - switch { - case v.IsNil(): - return "" - case v.IsStr(): - return v.Str() - case v.IsBool(): - return strconv.FormatBool(v.Bool()) - case v.IsFloat32(): - return strconv.FormatFloat(float64(v.Float32()), 'f', -1, 32) - case v.IsFloat64(): - return strconv.FormatFloat(v.Float64(), 'f', -1, 64) - case v.IsInt(): - return strconv.FormatInt(int64(v.Int()), 10) - case v.IsInt8(): - return strconv.FormatInt(int64(v.Int8()), 10) - case v.IsInt16(): - return strconv.FormatInt(int64(v.Int16()), 10) - case v.IsInt32(): - return strconv.FormatInt(int64(v.Int32()), 10) - case v.IsInt64(): - return strconv.FormatInt(v.Int64(), 10) - case v.IsUint(): - return strconv.FormatUint(uint64(v.Uint()), 10) - case v.IsUint8(): - return strconv.FormatUint(uint64(v.Uint8()), 10) - case v.IsUint16(): - return strconv.FormatUint(uint64(v.Uint16()), 10) - case v.IsUint32(): - return strconv.FormatUint(uint64(v.Uint32()), 10) - case v.IsUint64(): - return strconv.FormatUint(v.Uint64(), 10) - } - return fmt.Sprintf("%#v", v.Data()) -} - -// StringSlice returns the value always as a []string -func (v *Value) StringSlice(optionalDefault ...[]string) []string { - switch { - case v.IsStrSlice(): - return v.MustStrSlice() - case v.IsBoolSlice(): - slice := v.MustBoolSlice() - vals := make([]string, len(slice)) - for i, iv := range slice { - vals[i] = strconv.FormatBool(iv) - } - return vals - case v.IsFloat32Slice(): - slice := v.MustFloat32Slice() - vals := make([]string, len(slice)) - for i, iv := range slice { - vals[i] = strconv.FormatFloat(float64(iv), 'f', -1, 32) - } - return vals - case v.IsFloat64Slice(): - slice := v.MustFloat64Slice() - vals := make([]string, len(slice)) - for i, iv := range slice { - vals[i] = strconv.FormatFloat(iv, 'f', -1, 64) - } - return vals - case v.IsIntSlice(): - slice := v.MustIntSlice() - vals := make([]string, len(slice)) - for i, iv := range slice { - vals[i] = strconv.FormatInt(int64(iv), 10) - } - return vals - case v.IsInt8Slice(): - slice := v.MustInt8Slice() - vals := make([]string, len(slice)) - for i, iv := range slice { - vals[i] = strconv.FormatInt(int64(iv), 10) - } - return vals - case v.IsInt16Slice(): - slice := v.MustInt16Slice() - vals := make([]string, len(slice)) - for i, iv := range slice { - vals[i] = strconv.FormatInt(int64(iv), 10) - } - return vals - case v.IsInt32Slice(): - slice := v.MustInt32Slice() - vals := make([]string, len(slice)) - for i, iv := range slice { - vals[i] = strconv.FormatInt(int64(iv), 10) - } - return vals - case v.IsInt64Slice(): - slice := v.MustInt64Slice() - vals := make([]string, len(slice)) - for i, iv := range slice { - vals[i] = strconv.FormatInt(iv, 10) - } - return vals - case v.IsUintSlice(): - slice := v.MustUintSlice() - vals := make([]string, len(slice)) - for i, iv := range slice { - vals[i] = strconv.FormatUint(uint64(iv), 10) - } - return vals - case v.IsUint8Slice(): - slice := v.MustUint8Slice() - vals := make([]string, len(slice)) - for i, iv := range slice { - vals[i] = strconv.FormatUint(uint64(iv), 10) - } - return vals - case v.IsUint16Slice(): - slice := v.MustUint16Slice() - vals := make([]string, len(slice)) - for i, iv := range slice { - vals[i] = strconv.FormatUint(uint64(iv), 10) - } - return vals - case v.IsUint32Slice(): - slice := v.MustUint32Slice() - vals := make([]string, len(slice)) - for i, iv := range slice { - vals[i] = strconv.FormatUint(uint64(iv), 10) - } - return vals - case v.IsUint64Slice(): - slice := v.MustUint64Slice() - vals := make([]string, len(slice)) - for i, iv := range slice { - vals[i] = strconv.FormatUint(iv, 10) - } - return vals - } - if len(optionalDefault) == 1 { - return optionalDefault[0] - } - - return []string{} -} diff --git a/vendor/github.com/stretchr/testify/LICENSE b/vendor/github.com/stretchr/testify/LICENSE deleted file mode 100644 index 4b0421cf9..000000000 --- a/vendor/github.com/stretchr/testify/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2012-2020 Mat Ryer, Tyler Bunnell and contributors. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/stretchr/testify/assert/assertion_compare.go b/vendor/github.com/stretchr/testify/assert/assertion_compare.go deleted file mode 100644 index 4d4b4aad6..000000000 --- a/vendor/github.com/stretchr/testify/assert/assertion_compare.go +++ /dev/null @@ -1,480 +0,0 @@ -package assert - -import ( - "bytes" - "fmt" - "reflect" - "time" -) - -type CompareType int - -const ( - compareLess CompareType = iota - 1 - compareEqual - compareGreater -) - -var ( - intType = reflect.TypeOf(int(1)) - int8Type = reflect.TypeOf(int8(1)) - int16Type = reflect.TypeOf(int16(1)) - int32Type = reflect.TypeOf(int32(1)) - int64Type = reflect.TypeOf(int64(1)) - - uintType = reflect.TypeOf(uint(1)) - uint8Type = reflect.TypeOf(uint8(1)) - uint16Type = reflect.TypeOf(uint16(1)) - uint32Type = reflect.TypeOf(uint32(1)) - uint64Type = reflect.TypeOf(uint64(1)) - - uintptrType = reflect.TypeOf(uintptr(1)) - - float32Type = reflect.TypeOf(float32(1)) - float64Type = reflect.TypeOf(float64(1)) - - stringType = reflect.TypeOf("") - - timeType = reflect.TypeOf(time.Time{}) - bytesType = reflect.TypeOf([]byte{}) -) - -func compare(obj1, obj2 interface{}, kind reflect.Kind) (CompareType, bool) { - obj1Value := reflect.ValueOf(obj1) - obj2Value := reflect.ValueOf(obj2) - - // throughout this switch we try and avoid calling .Convert() if possible, - // as this has a pretty big performance impact - switch kind { - case reflect.Int: - { - intobj1, ok := obj1.(int) - if !ok { - intobj1 = obj1Value.Convert(intType).Interface().(int) - } - intobj2, ok := obj2.(int) - if !ok { - intobj2 = obj2Value.Convert(intType).Interface().(int) - } - if intobj1 > intobj2 { - return compareGreater, true - } - if intobj1 == intobj2 { - return compareEqual, true - } - if intobj1 < intobj2 { - return compareLess, true - } - } - case reflect.Int8: - { - int8obj1, ok := obj1.(int8) - if !ok { - int8obj1 = obj1Value.Convert(int8Type).Interface().(int8) - } - int8obj2, ok := obj2.(int8) - if !ok { - int8obj2 = obj2Value.Convert(int8Type).Interface().(int8) - } - if int8obj1 > int8obj2 { - return compareGreater, true - } - if int8obj1 == int8obj2 { - return compareEqual, true - } - if int8obj1 < int8obj2 { - return compareLess, true - } - } - case reflect.Int16: - { - int16obj1, ok := obj1.(int16) - if !ok { - int16obj1 = obj1Value.Convert(int16Type).Interface().(int16) - } - int16obj2, ok := obj2.(int16) - if !ok { - int16obj2 = obj2Value.Convert(int16Type).Interface().(int16) - } - if int16obj1 > int16obj2 { - return compareGreater, true - } - if int16obj1 == int16obj2 { - return compareEqual, true - } - if int16obj1 < int16obj2 { - return compareLess, true - } - } - case reflect.Int32: - { - int32obj1, ok := obj1.(int32) - if !ok { - int32obj1 = obj1Value.Convert(int32Type).Interface().(int32) - } - int32obj2, ok := obj2.(int32) - if !ok { - int32obj2 = obj2Value.Convert(int32Type).Interface().(int32) - } - if int32obj1 > int32obj2 { - return compareGreater, true - } - if int32obj1 == int32obj2 { - return compareEqual, true - } - if int32obj1 < int32obj2 { - return compareLess, true - } - } - case reflect.Int64: - { - int64obj1, ok := obj1.(int64) - if !ok { - int64obj1 = obj1Value.Convert(int64Type).Interface().(int64) - } - int64obj2, ok := obj2.(int64) - if !ok { - int64obj2 = obj2Value.Convert(int64Type).Interface().(int64) - } - if int64obj1 > int64obj2 { - return compareGreater, true - } - if int64obj1 == int64obj2 { - return compareEqual, true - } - if int64obj1 < int64obj2 { - return compareLess, true - } - } - case reflect.Uint: - { - uintobj1, ok := obj1.(uint) - if !ok { - uintobj1 = obj1Value.Convert(uintType).Interface().(uint) - } - uintobj2, ok := obj2.(uint) - if !ok { - uintobj2 = obj2Value.Convert(uintType).Interface().(uint) - } - if uintobj1 > uintobj2 { - return compareGreater, true - } - if uintobj1 == uintobj2 { - return compareEqual, true - } - if uintobj1 < uintobj2 { - return compareLess, true - } - } - case reflect.Uint8: - { - uint8obj1, ok := obj1.(uint8) - if !ok { - uint8obj1 = obj1Value.Convert(uint8Type).Interface().(uint8) - } - uint8obj2, ok := obj2.(uint8) - if !ok { - uint8obj2 = obj2Value.Convert(uint8Type).Interface().(uint8) - } - if uint8obj1 > uint8obj2 { - return compareGreater, true - } - if uint8obj1 == uint8obj2 { - return compareEqual, true - } - if uint8obj1 < uint8obj2 { - return compareLess, true - } - } - case reflect.Uint16: - { - uint16obj1, ok := obj1.(uint16) - if !ok { - uint16obj1 = obj1Value.Convert(uint16Type).Interface().(uint16) - } - uint16obj2, ok := obj2.(uint16) - if !ok { - uint16obj2 = obj2Value.Convert(uint16Type).Interface().(uint16) - } - if uint16obj1 > uint16obj2 { - return compareGreater, true - } - if uint16obj1 == uint16obj2 { - return compareEqual, true - } - if uint16obj1 < uint16obj2 { - return compareLess, true - } - } - case reflect.Uint32: - { - uint32obj1, ok := obj1.(uint32) - if !ok { - uint32obj1 = obj1Value.Convert(uint32Type).Interface().(uint32) - } - uint32obj2, ok := obj2.(uint32) - if !ok { - uint32obj2 = obj2Value.Convert(uint32Type).Interface().(uint32) - } - if uint32obj1 > uint32obj2 { - return compareGreater, true - } - if uint32obj1 == uint32obj2 { - return compareEqual, true - } - if uint32obj1 < uint32obj2 { - return compareLess, true - } - } - case reflect.Uint64: - { - uint64obj1, ok := obj1.(uint64) - if !ok { - uint64obj1 = obj1Value.Convert(uint64Type).Interface().(uint64) - } - uint64obj2, ok := obj2.(uint64) - if !ok { - uint64obj2 = obj2Value.Convert(uint64Type).Interface().(uint64) - } - if uint64obj1 > uint64obj2 { - return compareGreater, true - } - if uint64obj1 == uint64obj2 { - return compareEqual, true - } - if uint64obj1 < uint64obj2 { - return compareLess, true - } - } - case reflect.Float32: - { - float32obj1, ok := obj1.(float32) - if !ok { - float32obj1 = obj1Value.Convert(float32Type).Interface().(float32) - } - float32obj2, ok := obj2.(float32) - if !ok { - float32obj2 = obj2Value.Convert(float32Type).Interface().(float32) - } - if float32obj1 > float32obj2 { - return compareGreater, true - } - if float32obj1 == float32obj2 { - return compareEqual, true - } - if float32obj1 < float32obj2 { - return compareLess, true - } - } - case reflect.Float64: - { - float64obj1, ok := obj1.(float64) - if !ok { - float64obj1 = obj1Value.Convert(float64Type).Interface().(float64) - } - float64obj2, ok := obj2.(float64) - if !ok { - float64obj2 = obj2Value.Convert(float64Type).Interface().(float64) - } - if float64obj1 > float64obj2 { - return compareGreater, true - } - if float64obj1 == float64obj2 { - return compareEqual, true - } - if float64obj1 < float64obj2 { - return compareLess, true - } - } - case reflect.String: - { - stringobj1, ok := obj1.(string) - if !ok { - stringobj1 = obj1Value.Convert(stringType).Interface().(string) - } - stringobj2, ok := obj2.(string) - if !ok { - stringobj2 = obj2Value.Convert(stringType).Interface().(string) - } - if stringobj1 > stringobj2 { - return compareGreater, true - } - if stringobj1 == stringobj2 { - return compareEqual, true - } - if stringobj1 < stringobj2 { - return compareLess, true - } - } - // Check for known struct types we can check for compare results. - case reflect.Struct: - { - // All structs enter here. We're not interested in most types. - if !obj1Value.CanConvert(timeType) { - break - } - - // time.Time can be compared! - timeObj1, ok := obj1.(time.Time) - if !ok { - timeObj1 = obj1Value.Convert(timeType).Interface().(time.Time) - } - - timeObj2, ok := obj2.(time.Time) - if !ok { - timeObj2 = obj2Value.Convert(timeType).Interface().(time.Time) - } - - return compare(timeObj1.UnixNano(), timeObj2.UnixNano(), reflect.Int64) - } - case reflect.Slice: - { - // We only care about the []byte type. - if !obj1Value.CanConvert(bytesType) { - break - } - - // []byte can be compared! - bytesObj1, ok := obj1.([]byte) - if !ok { - bytesObj1 = obj1Value.Convert(bytesType).Interface().([]byte) - - } - bytesObj2, ok := obj2.([]byte) - if !ok { - bytesObj2 = obj2Value.Convert(bytesType).Interface().([]byte) - } - - return CompareType(bytes.Compare(bytesObj1, bytesObj2)), true - } - case reflect.Uintptr: - { - uintptrObj1, ok := obj1.(uintptr) - if !ok { - uintptrObj1 = obj1Value.Convert(uintptrType).Interface().(uintptr) - } - uintptrObj2, ok := obj2.(uintptr) - if !ok { - uintptrObj2 = obj2Value.Convert(uintptrType).Interface().(uintptr) - } - if uintptrObj1 > uintptrObj2 { - return compareGreater, true - } - if uintptrObj1 == uintptrObj2 { - return compareEqual, true - } - if uintptrObj1 < uintptrObj2 { - return compareLess, true - } - } - } - - return compareEqual, false -} - -// Greater asserts that the first element is greater than the second -// -// assert.Greater(t, 2, 1) -// assert.Greater(t, float64(2), float64(1)) -// assert.Greater(t, "b", "a") -func Greater(t TestingT, e1 interface{}, e2 interface{}, msgAndArgs ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return compareTwoValues(t, e1, e2, []CompareType{compareGreater}, "\"%v\" is not greater than \"%v\"", msgAndArgs...) -} - -// GreaterOrEqual asserts that the first element is greater than or equal to the second -// -// assert.GreaterOrEqual(t, 2, 1) -// assert.GreaterOrEqual(t, 2, 2) -// assert.GreaterOrEqual(t, "b", "a") -// assert.GreaterOrEqual(t, "b", "b") -func GreaterOrEqual(t TestingT, e1 interface{}, e2 interface{}, msgAndArgs ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return compareTwoValues(t, e1, e2, []CompareType{compareGreater, compareEqual}, "\"%v\" is not greater than or equal to \"%v\"", msgAndArgs...) -} - -// Less asserts that the first element is less than the second -// -// assert.Less(t, 1, 2) -// assert.Less(t, float64(1), float64(2)) -// assert.Less(t, "a", "b") -func Less(t TestingT, e1 interface{}, e2 interface{}, msgAndArgs ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return compareTwoValues(t, e1, e2, []CompareType{compareLess}, "\"%v\" is not less than \"%v\"", msgAndArgs...) -} - -// LessOrEqual asserts that the first element is less than or equal to the second -// -// assert.LessOrEqual(t, 1, 2) -// assert.LessOrEqual(t, 2, 2) -// assert.LessOrEqual(t, "a", "b") -// assert.LessOrEqual(t, "b", "b") -func LessOrEqual(t TestingT, e1 interface{}, e2 interface{}, msgAndArgs ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return compareTwoValues(t, e1, e2, []CompareType{compareLess, compareEqual}, "\"%v\" is not less than or equal to \"%v\"", msgAndArgs...) -} - -// Positive asserts that the specified element is positive -// -// assert.Positive(t, 1) -// assert.Positive(t, 1.23) -func Positive(t TestingT, e interface{}, msgAndArgs ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - zero := reflect.Zero(reflect.TypeOf(e)) - return compareTwoValues(t, e, zero.Interface(), []CompareType{compareGreater}, "\"%v\" is not positive", msgAndArgs...) -} - -// Negative asserts that the specified element is negative -// -// assert.Negative(t, -1) -// assert.Negative(t, -1.23) -func Negative(t TestingT, e interface{}, msgAndArgs ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - zero := reflect.Zero(reflect.TypeOf(e)) - return compareTwoValues(t, e, zero.Interface(), []CompareType{compareLess}, "\"%v\" is not negative", msgAndArgs...) -} - -func compareTwoValues(t TestingT, e1 interface{}, e2 interface{}, allowedComparesResults []CompareType, failMessage string, msgAndArgs ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - - e1Kind := reflect.ValueOf(e1).Kind() - e2Kind := reflect.ValueOf(e2).Kind() - if e1Kind != e2Kind { - return Fail(t, "Elements should be the same type", msgAndArgs...) - } - - compareResult, isComparable := compare(e1, e2, e1Kind) - if !isComparable { - return Fail(t, fmt.Sprintf("Can not compare type \"%s\"", reflect.TypeOf(e1)), msgAndArgs...) - } - - if !containsValue(allowedComparesResults, compareResult) { - return Fail(t, fmt.Sprintf(failMessage, e1, e2), msgAndArgs...) - } - - return true -} - -func containsValue(values []CompareType, value CompareType) bool { - for _, v := range values { - if v == value { - return true - } - } - - return false -} diff --git a/vendor/github.com/stretchr/testify/assert/assertion_format.go b/vendor/github.com/stretchr/testify/assert/assertion_format.go deleted file mode 100644 index 3ddab109a..000000000 --- a/vendor/github.com/stretchr/testify/assert/assertion_format.go +++ /dev/null @@ -1,815 +0,0 @@ -// Code generated with github.com/stretchr/testify/_codegen; DO NOT EDIT. - -package assert - -import ( - http "net/http" - url "net/url" - time "time" -) - -// Conditionf uses a Comparison to assert a complex condition. -func Conditionf(t TestingT, comp Comparison, msg string, args ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return Condition(t, comp, append([]interface{}{msg}, args...)...) -} - -// Containsf asserts that the specified string, list(array, slice...) or map contains the -// specified substring or element. -// -// assert.Containsf(t, "Hello World", "World", "error message %s", "formatted") -// assert.Containsf(t, ["Hello", "World"], "World", "error message %s", "formatted") -// assert.Containsf(t, {"Hello": "World"}, "Hello", "error message %s", "formatted") -func Containsf(t TestingT, s interface{}, contains interface{}, msg string, args ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return Contains(t, s, contains, append([]interface{}{msg}, args...)...) -} - -// DirExistsf checks whether a directory exists in the given path. It also fails -// if the path is a file rather a directory or there is an error checking whether it exists. -func DirExistsf(t TestingT, path string, msg string, args ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return DirExists(t, path, append([]interface{}{msg}, args...)...) -} - -// ElementsMatchf asserts that the specified listA(array, slice...) is equal to specified -// listB(array, slice...) ignoring the order of the elements. If there are duplicate elements, -// the number of appearances of each of them in both lists should match. -// -// assert.ElementsMatchf(t, [1, 3, 2, 3], [1, 3, 3, 2], "error message %s", "formatted") -func ElementsMatchf(t TestingT, listA interface{}, listB interface{}, msg string, args ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return ElementsMatch(t, listA, listB, append([]interface{}{msg}, args...)...) -} - -// Emptyf asserts that the specified object is empty. I.e. nil, "", false, 0 or either -// a slice or a channel with len == 0. -// -// assert.Emptyf(t, obj, "error message %s", "formatted") -func Emptyf(t TestingT, object interface{}, msg string, args ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return Empty(t, object, append([]interface{}{msg}, args...)...) -} - -// Equalf asserts that two objects are equal. -// -// assert.Equalf(t, 123, 123, "error message %s", "formatted") -// -// Pointer variable equality is determined based on the equality of the -// referenced values (as opposed to the memory addresses). Function equality -// cannot be determined and will always fail. -func Equalf(t TestingT, expected interface{}, actual interface{}, msg string, args ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return Equal(t, expected, actual, append([]interface{}{msg}, args...)...) -} - -// EqualErrorf asserts that a function returned an error (i.e. not `nil`) -// and that it is equal to the provided error. -// -// actualObj, err := SomeFunction() -// assert.EqualErrorf(t, err, expectedErrorString, "error message %s", "formatted") -func EqualErrorf(t TestingT, theError error, errString string, msg string, args ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return EqualError(t, theError, errString, append([]interface{}{msg}, args...)...) -} - -// EqualExportedValuesf asserts that the types of two objects are equal and their public -// fields are also equal. This is useful for comparing structs that have private fields -// that could potentially differ. -// -// type S struct { -// Exported int -// notExported int -// } -// assert.EqualExportedValuesf(t, S{1, 2}, S{1, 3}, "error message %s", "formatted") => true -// assert.EqualExportedValuesf(t, S{1, 2}, S{2, 3}, "error message %s", "formatted") => false -func EqualExportedValuesf(t TestingT, expected interface{}, actual interface{}, msg string, args ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return EqualExportedValues(t, expected, actual, append([]interface{}{msg}, args...)...) -} - -// EqualValuesf asserts that two objects are equal or convertible to the same types -// and equal. -// -// assert.EqualValuesf(t, uint32(123), int32(123), "error message %s", "formatted") -func EqualValuesf(t TestingT, expected interface{}, actual interface{}, msg string, args ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return EqualValues(t, expected, actual, append([]interface{}{msg}, args...)...) -} - -// Errorf asserts that a function returned an error (i.e. not `nil`). -// -// actualObj, err := SomeFunction() -// if assert.Errorf(t, err, "error message %s", "formatted") { -// assert.Equal(t, expectedErrorf, err) -// } -func Errorf(t TestingT, err error, msg string, args ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return Error(t, err, append([]interface{}{msg}, args...)...) -} - -// ErrorAsf asserts that at least one of the errors in err's chain matches target, and if so, sets target to that error value. -// This is a wrapper for errors.As. -func ErrorAsf(t TestingT, err error, target interface{}, msg string, args ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return ErrorAs(t, err, target, append([]interface{}{msg}, args...)...) -} - -// ErrorContainsf asserts that a function returned an error (i.e. not `nil`) -// and that the error contains the specified substring. -// -// actualObj, err := SomeFunction() -// assert.ErrorContainsf(t, err, expectedErrorSubString, "error message %s", "formatted") -func ErrorContainsf(t TestingT, theError error, contains string, msg string, args ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return ErrorContains(t, theError, contains, append([]interface{}{msg}, args...)...) -} - -// ErrorIsf asserts that at least one of the errors in err's chain matches target. -// This is a wrapper for errors.Is. -func ErrorIsf(t TestingT, err error, target error, msg string, args ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return ErrorIs(t, err, target, append([]interface{}{msg}, args...)...) -} - -// Eventuallyf asserts that given condition will be met in waitFor time, -// periodically checking target function each tick. -// -// assert.Eventuallyf(t, func() bool { return true; }, time.Second, 10*time.Millisecond, "error message %s", "formatted") -func Eventuallyf(t TestingT, condition func() bool, waitFor time.Duration, tick time.Duration, msg string, args ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return Eventually(t, condition, waitFor, tick, append([]interface{}{msg}, args...)...) -} - -// EventuallyWithTf asserts that given condition will be met in waitFor time, -// periodically checking target function each tick. In contrast to Eventually, -// it supplies a CollectT to the condition function, so that the condition -// function can use the CollectT to call other assertions. -// The condition is considered "met" if no errors are raised in a tick. -// The supplied CollectT collects all errors from one tick (if there are any). -// If the condition is not met before waitFor, the collected errors of -// the last tick are copied to t. -// -// externalValue := false -// go func() { -// time.Sleep(8*time.Second) -// externalValue = true -// }() -// assert.EventuallyWithTf(t, func(c *assert.CollectT, "error message %s", "formatted") { -// // add assertions as needed; any assertion failure will fail the current tick -// assert.True(c, externalValue, "expected 'externalValue' to be true") -// }, 1*time.Second, 10*time.Second, "external state has not changed to 'true'; still false") -func EventuallyWithTf(t TestingT, condition func(collect *CollectT), waitFor time.Duration, tick time.Duration, msg string, args ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return EventuallyWithT(t, condition, waitFor, tick, append([]interface{}{msg}, args...)...) -} - -// Exactlyf asserts that two objects are equal in value and type. -// -// assert.Exactlyf(t, int32(123), int64(123), "error message %s", "formatted") -func Exactlyf(t TestingT, expected interface{}, actual interface{}, msg string, args ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return Exactly(t, expected, actual, append([]interface{}{msg}, args...)...) -} - -// Failf reports a failure through -func Failf(t TestingT, failureMessage string, msg string, args ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return Fail(t, failureMessage, append([]interface{}{msg}, args...)...) -} - -// FailNowf fails test -func FailNowf(t TestingT, failureMessage string, msg string, args ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return FailNow(t, failureMessage, append([]interface{}{msg}, args...)...) -} - -// Falsef asserts that the specified value is false. -// -// assert.Falsef(t, myBool, "error message %s", "formatted") -func Falsef(t TestingT, value bool, msg string, args ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return False(t, value, append([]interface{}{msg}, args...)...) -} - -// FileExistsf checks whether a file exists in the given path. It also fails if -// the path points to a directory or there is an error when trying to check the file. -func FileExistsf(t TestingT, path string, msg string, args ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return FileExists(t, path, append([]interface{}{msg}, args...)...) -} - -// Greaterf asserts that the first element is greater than the second -// -// assert.Greaterf(t, 2, 1, "error message %s", "formatted") -// assert.Greaterf(t, float64(2), float64(1), "error message %s", "formatted") -// assert.Greaterf(t, "b", "a", "error message %s", "formatted") -func Greaterf(t TestingT, e1 interface{}, e2 interface{}, msg string, args ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return Greater(t, e1, e2, append([]interface{}{msg}, args...)...) -} - -// GreaterOrEqualf asserts that the first element is greater than or equal to the second -// -// assert.GreaterOrEqualf(t, 2, 1, "error message %s", "formatted") -// assert.GreaterOrEqualf(t, 2, 2, "error message %s", "formatted") -// assert.GreaterOrEqualf(t, "b", "a", "error message %s", "formatted") -// assert.GreaterOrEqualf(t, "b", "b", "error message %s", "formatted") -func GreaterOrEqualf(t TestingT, e1 interface{}, e2 interface{}, msg string, args ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return GreaterOrEqual(t, e1, e2, append([]interface{}{msg}, args...)...) -} - -// HTTPBodyContainsf asserts that a specified handler returns a -// body that contains a string. -// -// assert.HTTPBodyContainsf(t, myHandler, "GET", "www.google.com", nil, "I'm Feeling Lucky", "error message %s", "formatted") -// -// Returns whether the assertion was successful (true) or not (false). -func HTTPBodyContainsf(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, str interface{}, msg string, args ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return HTTPBodyContains(t, handler, method, url, values, str, append([]interface{}{msg}, args...)...) -} - -// HTTPBodyNotContainsf asserts that a specified handler returns a -// body that does not contain a string. -// -// assert.HTTPBodyNotContainsf(t, myHandler, "GET", "www.google.com", nil, "I'm Feeling Lucky", "error message %s", "formatted") -// -// Returns whether the assertion was successful (true) or not (false). -func HTTPBodyNotContainsf(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, str interface{}, msg string, args ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return HTTPBodyNotContains(t, handler, method, url, values, str, append([]interface{}{msg}, args...)...) -} - -// HTTPErrorf asserts that a specified handler returns an error status code. -// -// assert.HTTPErrorf(t, myHandler, "POST", "/a/b/c", url.Values{"a": []string{"b", "c"}} -// -// Returns whether the assertion was successful (true) or not (false). -func HTTPErrorf(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, msg string, args ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return HTTPError(t, handler, method, url, values, append([]interface{}{msg}, args...)...) -} - -// HTTPRedirectf asserts that a specified handler returns a redirect status code. -// -// assert.HTTPRedirectf(t, myHandler, "GET", "/a/b/c", url.Values{"a": []string{"b", "c"}} -// -// Returns whether the assertion was successful (true) or not (false). -func HTTPRedirectf(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, msg string, args ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return HTTPRedirect(t, handler, method, url, values, append([]interface{}{msg}, args...)...) -} - -// HTTPStatusCodef asserts that a specified handler returns a specified status code. -// -// assert.HTTPStatusCodef(t, myHandler, "GET", "/notImplemented", nil, 501, "error message %s", "formatted") -// -// Returns whether the assertion was successful (true) or not (false). -func HTTPStatusCodef(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, statuscode int, msg string, args ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return HTTPStatusCode(t, handler, method, url, values, statuscode, append([]interface{}{msg}, args...)...) -} - -// HTTPSuccessf asserts that a specified handler returns a success status code. -// -// assert.HTTPSuccessf(t, myHandler, "POST", "http://www.google.com", nil, "error message %s", "formatted") -// -// Returns whether the assertion was successful (true) or not (false). -func HTTPSuccessf(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, msg string, args ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return HTTPSuccess(t, handler, method, url, values, append([]interface{}{msg}, args...)...) -} - -// Implementsf asserts that an object is implemented by the specified interface. -// -// assert.Implementsf(t, (*MyInterface)(nil), new(MyObject), "error message %s", "formatted") -func Implementsf(t TestingT, interfaceObject interface{}, object interface{}, msg string, args ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return Implements(t, interfaceObject, object, append([]interface{}{msg}, args...)...) -} - -// InDeltaf asserts that the two numerals are within delta of each other. -// -// assert.InDeltaf(t, math.Pi, 22/7.0, 0.01, "error message %s", "formatted") -func InDeltaf(t TestingT, expected interface{}, actual interface{}, delta float64, msg string, args ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return InDelta(t, expected, actual, delta, append([]interface{}{msg}, args...)...) -} - -// InDeltaMapValuesf is the same as InDelta, but it compares all values between two maps. Both maps must have exactly the same keys. -func InDeltaMapValuesf(t TestingT, expected interface{}, actual interface{}, delta float64, msg string, args ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return InDeltaMapValues(t, expected, actual, delta, append([]interface{}{msg}, args...)...) -} - -// InDeltaSlicef is the same as InDelta, except it compares two slices. -func InDeltaSlicef(t TestingT, expected interface{}, actual interface{}, delta float64, msg string, args ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return InDeltaSlice(t, expected, actual, delta, append([]interface{}{msg}, args...)...) -} - -// InEpsilonf asserts that expected and actual have a relative error less than epsilon -func InEpsilonf(t TestingT, expected interface{}, actual interface{}, epsilon float64, msg string, args ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return InEpsilon(t, expected, actual, epsilon, append([]interface{}{msg}, args...)...) -} - -// InEpsilonSlicef is the same as InEpsilon, except it compares each value from two slices. -func InEpsilonSlicef(t TestingT, expected interface{}, actual interface{}, epsilon float64, msg string, args ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return InEpsilonSlice(t, expected, actual, epsilon, append([]interface{}{msg}, args...)...) -} - -// IsDecreasingf asserts that the collection is decreasing -// -// assert.IsDecreasingf(t, []int{2, 1, 0}, "error message %s", "formatted") -// assert.IsDecreasingf(t, []float{2, 1}, "error message %s", "formatted") -// assert.IsDecreasingf(t, []string{"b", "a"}, "error message %s", "formatted") -func IsDecreasingf(t TestingT, object interface{}, msg string, args ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return IsDecreasing(t, object, append([]interface{}{msg}, args...)...) -} - -// IsIncreasingf asserts that the collection is increasing -// -// assert.IsIncreasingf(t, []int{1, 2, 3}, "error message %s", "formatted") -// assert.IsIncreasingf(t, []float{1, 2}, "error message %s", "formatted") -// assert.IsIncreasingf(t, []string{"a", "b"}, "error message %s", "formatted") -func IsIncreasingf(t TestingT, object interface{}, msg string, args ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return IsIncreasing(t, object, append([]interface{}{msg}, args...)...) -} - -// IsNonDecreasingf asserts that the collection is not decreasing -// -// assert.IsNonDecreasingf(t, []int{1, 1, 2}, "error message %s", "formatted") -// assert.IsNonDecreasingf(t, []float{1, 2}, "error message %s", "formatted") -// assert.IsNonDecreasingf(t, []string{"a", "b"}, "error message %s", "formatted") -func IsNonDecreasingf(t TestingT, object interface{}, msg string, args ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return IsNonDecreasing(t, object, append([]interface{}{msg}, args...)...) -} - -// IsNonIncreasingf asserts that the collection is not increasing -// -// assert.IsNonIncreasingf(t, []int{2, 1, 1}, "error message %s", "formatted") -// assert.IsNonIncreasingf(t, []float{2, 1}, "error message %s", "formatted") -// assert.IsNonIncreasingf(t, []string{"b", "a"}, "error message %s", "formatted") -func IsNonIncreasingf(t TestingT, object interface{}, msg string, args ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return IsNonIncreasing(t, object, append([]interface{}{msg}, args...)...) -} - -// IsTypef asserts that the specified objects are of the same type. -func IsTypef(t TestingT, expectedType interface{}, object interface{}, msg string, args ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return IsType(t, expectedType, object, append([]interface{}{msg}, args...)...) -} - -// JSONEqf asserts that two JSON strings are equivalent. -// -// assert.JSONEqf(t, `{"hello": "world", "foo": "bar"}`, `{"foo": "bar", "hello": "world"}`, "error message %s", "formatted") -func JSONEqf(t TestingT, expected string, actual string, msg string, args ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return JSONEq(t, expected, actual, append([]interface{}{msg}, args...)...) -} - -// Lenf asserts that the specified object has specific length. -// Lenf also fails if the object has a type that len() not accept. -// -// assert.Lenf(t, mySlice, 3, "error message %s", "formatted") -func Lenf(t TestingT, object interface{}, length int, msg string, args ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return Len(t, object, length, append([]interface{}{msg}, args...)...) -} - -// Lessf asserts that the first element is less than the second -// -// assert.Lessf(t, 1, 2, "error message %s", "formatted") -// assert.Lessf(t, float64(1), float64(2), "error message %s", "formatted") -// assert.Lessf(t, "a", "b", "error message %s", "formatted") -func Lessf(t TestingT, e1 interface{}, e2 interface{}, msg string, args ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return Less(t, e1, e2, append([]interface{}{msg}, args...)...) -} - -// LessOrEqualf asserts that the first element is less than or equal to the second -// -// assert.LessOrEqualf(t, 1, 2, "error message %s", "formatted") -// assert.LessOrEqualf(t, 2, 2, "error message %s", "formatted") -// assert.LessOrEqualf(t, "a", "b", "error message %s", "formatted") -// assert.LessOrEqualf(t, "b", "b", "error message %s", "formatted") -func LessOrEqualf(t TestingT, e1 interface{}, e2 interface{}, msg string, args ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return LessOrEqual(t, e1, e2, append([]interface{}{msg}, args...)...) -} - -// Negativef asserts that the specified element is negative -// -// assert.Negativef(t, -1, "error message %s", "formatted") -// assert.Negativef(t, -1.23, "error message %s", "formatted") -func Negativef(t TestingT, e interface{}, msg string, args ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return Negative(t, e, append([]interface{}{msg}, args...)...) -} - -// Neverf asserts that the given condition doesn't satisfy in waitFor time, -// periodically checking the target function each tick. -// -// assert.Neverf(t, func() bool { return false; }, time.Second, 10*time.Millisecond, "error message %s", "formatted") -func Neverf(t TestingT, condition func() bool, waitFor time.Duration, tick time.Duration, msg string, args ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return Never(t, condition, waitFor, tick, append([]interface{}{msg}, args...)...) -} - -// Nilf asserts that the specified object is nil. -// -// assert.Nilf(t, err, "error message %s", "formatted") -func Nilf(t TestingT, object interface{}, msg string, args ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return Nil(t, object, append([]interface{}{msg}, args...)...) -} - -// NoDirExistsf checks whether a directory does not exist in the given path. -// It fails if the path points to an existing _directory_ only. -func NoDirExistsf(t TestingT, path string, msg string, args ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return NoDirExists(t, path, append([]interface{}{msg}, args...)...) -} - -// NoErrorf asserts that a function returned no error (i.e. `nil`). -// -// actualObj, err := SomeFunction() -// if assert.NoErrorf(t, err, "error message %s", "formatted") { -// assert.Equal(t, expectedObj, actualObj) -// } -func NoErrorf(t TestingT, err error, msg string, args ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return NoError(t, err, append([]interface{}{msg}, args...)...) -} - -// NoFileExistsf checks whether a file does not exist in a given path. It fails -// if the path points to an existing _file_ only. -func NoFileExistsf(t TestingT, path string, msg string, args ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return NoFileExists(t, path, append([]interface{}{msg}, args...)...) -} - -// NotContainsf asserts that the specified string, list(array, slice...) or map does NOT contain the -// specified substring or element. -// -// assert.NotContainsf(t, "Hello World", "Earth", "error message %s", "formatted") -// assert.NotContainsf(t, ["Hello", "World"], "Earth", "error message %s", "formatted") -// assert.NotContainsf(t, {"Hello": "World"}, "Earth", "error message %s", "formatted") -func NotContainsf(t TestingT, s interface{}, contains interface{}, msg string, args ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return NotContains(t, s, contains, append([]interface{}{msg}, args...)...) -} - -// NotEmptyf asserts that the specified object is NOT empty. I.e. not nil, "", false, 0 or either -// a slice or a channel with len == 0. -// -// if assert.NotEmptyf(t, obj, "error message %s", "formatted") { -// assert.Equal(t, "two", obj[1]) -// } -func NotEmptyf(t TestingT, object interface{}, msg string, args ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return NotEmpty(t, object, append([]interface{}{msg}, args...)...) -} - -// NotEqualf asserts that the specified values are NOT equal. -// -// assert.NotEqualf(t, obj1, obj2, "error message %s", "formatted") -// -// Pointer variable equality is determined based on the equality of the -// referenced values (as opposed to the memory addresses). -func NotEqualf(t TestingT, expected interface{}, actual interface{}, msg string, args ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return NotEqual(t, expected, actual, append([]interface{}{msg}, args...)...) -} - -// NotEqualValuesf asserts that two objects are not equal even when converted to the same type -// -// assert.NotEqualValuesf(t, obj1, obj2, "error message %s", "formatted") -func NotEqualValuesf(t TestingT, expected interface{}, actual interface{}, msg string, args ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return NotEqualValues(t, expected, actual, append([]interface{}{msg}, args...)...) -} - -// NotErrorIsf asserts that at none of the errors in err's chain matches target. -// This is a wrapper for errors.Is. -func NotErrorIsf(t TestingT, err error, target error, msg string, args ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return NotErrorIs(t, err, target, append([]interface{}{msg}, args...)...) -} - -// NotImplementsf asserts that an object does not implement the specified interface. -// -// assert.NotImplementsf(t, (*MyInterface)(nil), new(MyObject), "error message %s", "formatted") -func NotImplementsf(t TestingT, interfaceObject interface{}, object interface{}, msg string, args ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return NotImplements(t, interfaceObject, object, append([]interface{}{msg}, args...)...) -} - -// NotNilf asserts that the specified object is not nil. -// -// assert.NotNilf(t, err, "error message %s", "formatted") -func NotNilf(t TestingT, object interface{}, msg string, args ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return NotNil(t, object, append([]interface{}{msg}, args...)...) -} - -// NotPanicsf asserts that the code inside the specified PanicTestFunc does NOT panic. -// -// assert.NotPanicsf(t, func(){ RemainCalm() }, "error message %s", "formatted") -func NotPanicsf(t TestingT, f PanicTestFunc, msg string, args ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return NotPanics(t, f, append([]interface{}{msg}, args...)...) -} - -// NotRegexpf asserts that a specified regexp does not match a string. -// -// assert.NotRegexpf(t, regexp.MustCompile("starts"), "it's starting", "error message %s", "formatted") -// assert.NotRegexpf(t, "^start", "it's not starting", "error message %s", "formatted") -func NotRegexpf(t TestingT, rx interface{}, str interface{}, msg string, args ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return NotRegexp(t, rx, str, append([]interface{}{msg}, args...)...) -} - -// NotSamef asserts that two pointers do not reference the same object. -// -// assert.NotSamef(t, ptr1, ptr2, "error message %s", "formatted") -// -// Both arguments must be pointer variables. Pointer variable sameness is -// determined based on the equality of both type and value. -func NotSamef(t TestingT, expected interface{}, actual interface{}, msg string, args ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return NotSame(t, expected, actual, append([]interface{}{msg}, args...)...) -} - -// NotSubsetf asserts that the specified list(array, slice...) or map does NOT -// contain all elements given in the specified subset list(array, slice...) or -// map. -// -// assert.NotSubsetf(t, [1, 3, 4], [1, 2], "error message %s", "formatted") -// assert.NotSubsetf(t, {"x": 1, "y": 2}, {"z": 3}, "error message %s", "formatted") -func NotSubsetf(t TestingT, list interface{}, subset interface{}, msg string, args ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return NotSubset(t, list, subset, append([]interface{}{msg}, args...)...) -} - -// NotZerof asserts that i is not the zero value for its type. -func NotZerof(t TestingT, i interface{}, msg string, args ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return NotZero(t, i, append([]interface{}{msg}, args...)...) -} - -// Panicsf asserts that the code inside the specified PanicTestFunc panics. -// -// assert.Panicsf(t, func(){ GoCrazy() }, "error message %s", "formatted") -func Panicsf(t TestingT, f PanicTestFunc, msg string, args ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return Panics(t, f, append([]interface{}{msg}, args...)...) -} - -// PanicsWithErrorf asserts that the code inside the specified PanicTestFunc -// panics, and that the recovered panic value is an error that satisfies the -// EqualError comparison. -// -// assert.PanicsWithErrorf(t, "crazy error", func(){ GoCrazy() }, "error message %s", "formatted") -func PanicsWithErrorf(t TestingT, errString string, f PanicTestFunc, msg string, args ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return PanicsWithError(t, errString, f, append([]interface{}{msg}, args...)...) -} - -// PanicsWithValuef asserts that the code inside the specified PanicTestFunc panics, and that -// the recovered panic value equals the expected panic value. -// -// assert.PanicsWithValuef(t, "crazy error", func(){ GoCrazy() }, "error message %s", "formatted") -func PanicsWithValuef(t TestingT, expected interface{}, f PanicTestFunc, msg string, args ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return PanicsWithValue(t, expected, f, append([]interface{}{msg}, args...)...) -} - -// Positivef asserts that the specified element is positive -// -// assert.Positivef(t, 1, "error message %s", "formatted") -// assert.Positivef(t, 1.23, "error message %s", "formatted") -func Positivef(t TestingT, e interface{}, msg string, args ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return Positive(t, e, append([]interface{}{msg}, args...)...) -} - -// Regexpf asserts that a specified regexp matches a string. -// -// assert.Regexpf(t, regexp.MustCompile("start"), "it's starting", "error message %s", "formatted") -// assert.Regexpf(t, "start...$", "it's not starting", "error message %s", "formatted") -func Regexpf(t TestingT, rx interface{}, str interface{}, msg string, args ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return Regexp(t, rx, str, append([]interface{}{msg}, args...)...) -} - -// Samef asserts that two pointers reference the same object. -// -// assert.Samef(t, ptr1, ptr2, "error message %s", "formatted") -// -// Both arguments must be pointer variables. Pointer variable sameness is -// determined based on the equality of both type and value. -func Samef(t TestingT, expected interface{}, actual interface{}, msg string, args ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return Same(t, expected, actual, append([]interface{}{msg}, args...)...) -} - -// Subsetf asserts that the specified list(array, slice...) or map contains all -// elements given in the specified subset list(array, slice...) or map. -// -// assert.Subsetf(t, [1, 2, 3], [1, 2], "error message %s", "formatted") -// assert.Subsetf(t, {"x": 1, "y": 2}, {"x": 1}, "error message %s", "formatted") -func Subsetf(t TestingT, list interface{}, subset interface{}, msg string, args ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return Subset(t, list, subset, append([]interface{}{msg}, args...)...) -} - -// Truef asserts that the specified value is true. -// -// assert.Truef(t, myBool, "error message %s", "formatted") -func Truef(t TestingT, value bool, msg string, args ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return True(t, value, append([]interface{}{msg}, args...)...) -} - -// WithinDurationf asserts that the two times are within duration delta of each other. -// -// assert.WithinDurationf(t, time.Now(), time.Now(), 10*time.Second, "error message %s", "formatted") -func WithinDurationf(t TestingT, expected time.Time, actual time.Time, delta time.Duration, msg string, args ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return WithinDuration(t, expected, actual, delta, append([]interface{}{msg}, args...)...) -} - -// WithinRangef asserts that a time is within a time range (inclusive). -// -// assert.WithinRangef(t, time.Now(), time.Now().Add(-time.Second), time.Now().Add(time.Second), "error message %s", "formatted") -func WithinRangef(t TestingT, actual time.Time, start time.Time, end time.Time, msg string, args ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return WithinRange(t, actual, start, end, append([]interface{}{msg}, args...)...) -} - -// YAMLEqf asserts that two YAML strings are equivalent. -func YAMLEqf(t TestingT, expected string, actual string, msg string, args ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return YAMLEq(t, expected, actual, append([]interface{}{msg}, args...)...) -} - -// Zerof asserts that i is the zero value for its type. -func Zerof(t TestingT, i interface{}, msg string, args ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return Zero(t, i, append([]interface{}{msg}, args...)...) -} diff --git a/vendor/github.com/stretchr/testify/assert/assertion_format.go.tmpl b/vendor/github.com/stretchr/testify/assert/assertion_format.go.tmpl deleted file mode 100644 index d2bb0b817..000000000 --- a/vendor/github.com/stretchr/testify/assert/assertion_format.go.tmpl +++ /dev/null @@ -1,5 +0,0 @@ -{{.CommentFormat}} -func {{.DocInfo.Name}}f(t TestingT, {{.ParamsFormat}}) bool { - if h, ok := t.(tHelper); ok { h.Helper() } - return {{.DocInfo.Name}}(t, {{.ForwardedParamsFormat}}) -} diff --git a/vendor/github.com/stretchr/testify/assert/assertion_forward.go b/vendor/github.com/stretchr/testify/assert/assertion_forward.go deleted file mode 100644 index a84e09bd4..000000000 --- a/vendor/github.com/stretchr/testify/assert/assertion_forward.go +++ /dev/null @@ -1,1621 +0,0 @@ -// Code generated with github.com/stretchr/testify/_codegen; DO NOT EDIT. - -package assert - -import ( - http "net/http" - url "net/url" - time "time" -) - -// Condition uses a Comparison to assert a complex condition. -func (a *Assertions) Condition(comp Comparison, msgAndArgs ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return Condition(a.t, comp, msgAndArgs...) -} - -// Conditionf uses a Comparison to assert a complex condition. -func (a *Assertions) Conditionf(comp Comparison, msg string, args ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return Conditionf(a.t, comp, msg, args...) -} - -// Contains asserts that the specified string, list(array, slice...) or map contains the -// specified substring or element. -// -// a.Contains("Hello World", "World") -// a.Contains(["Hello", "World"], "World") -// a.Contains({"Hello": "World"}, "Hello") -func (a *Assertions) Contains(s interface{}, contains interface{}, msgAndArgs ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return Contains(a.t, s, contains, msgAndArgs...) -} - -// Containsf asserts that the specified string, list(array, slice...) or map contains the -// specified substring or element. -// -// a.Containsf("Hello World", "World", "error message %s", "formatted") -// a.Containsf(["Hello", "World"], "World", "error message %s", "formatted") -// a.Containsf({"Hello": "World"}, "Hello", "error message %s", "formatted") -func (a *Assertions) Containsf(s interface{}, contains interface{}, msg string, args ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return Containsf(a.t, s, contains, msg, args...) -} - -// DirExists checks whether a directory exists in the given path. It also fails -// if the path is a file rather a directory or there is an error checking whether it exists. -func (a *Assertions) DirExists(path string, msgAndArgs ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return DirExists(a.t, path, msgAndArgs...) -} - -// DirExistsf checks whether a directory exists in the given path. It also fails -// if the path is a file rather a directory or there is an error checking whether it exists. -func (a *Assertions) DirExistsf(path string, msg string, args ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return DirExistsf(a.t, path, msg, args...) -} - -// ElementsMatch asserts that the specified listA(array, slice...) is equal to specified -// listB(array, slice...) ignoring the order of the elements. If there are duplicate elements, -// the number of appearances of each of them in both lists should match. -// -// a.ElementsMatch([1, 3, 2, 3], [1, 3, 3, 2]) -func (a *Assertions) ElementsMatch(listA interface{}, listB interface{}, msgAndArgs ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return ElementsMatch(a.t, listA, listB, msgAndArgs...) -} - -// ElementsMatchf asserts that the specified listA(array, slice...) is equal to specified -// listB(array, slice...) ignoring the order of the elements. If there are duplicate elements, -// the number of appearances of each of them in both lists should match. -// -// a.ElementsMatchf([1, 3, 2, 3], [1, 3, 3, 2], "error message %s", "formatted") -func (a *Assertions) ElementsMatchf(listA interface{}, listB interface{}, msg string, args ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return ElementsMatchf(a.t, listA, listB, msg, args...) -} - -// Empty asserts that the specified object is empty. I.e. nil, "", false, 0 or either -// a slice or a channel with len == 0. -// -// a.Empty(obj) -func (a *Assertions) Empty(object interface{}, msgAndArgs ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return Empty(a.t, object, msgAndArgs...) -} - -// Emptyf asserts that the specified object is empty. I.e. nil, "", false, 0 or either -// a slice or a channel with len == 0. -// -// a.Emptyf(obj, "error message %s", "formatted") -func (a *Assertions) Emptyf(object interface{}, msg string, args ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return Emptyf(a.t, object, msg, args...) -} - -// Equal asserts that two objects are equal. -// -// a.Equal(123, 123) -// -// Pointer variable equality is determined based on the equality of the -// referenced values (as opposed to the memory addresses). Function equality -// cannot be determined and will always fail. -func (a *Assertions) Equal(expected interface{}, actual interface{}, msgAndArgs ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return Equal(a.t, expected, actual, msgAndArgs...) -} - -// EqualError asserts that a function returned an error (i.e. not `nil`) -// and that it is equal to the provided error. -// -// actualObj, err := SomeFunction() -// a.EqualError(err, expectedErrorString) -func (a *Assertions) EqualError(theError error, errString string, msgAndArgs ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return EqualError(a.t, theError, errString, msgAndArgs...) -} - -// EqualErrorf asserts that a function returned an error (i.e. not `nil`) -// and that it is equal to the provided error. -// -// actualObj, err := SomeFunction() -// a.EqualErrorf(err, expectedErrorString, "error message %s", "formatted") -func (a *Assertions) EqualErrorf(theError error, errString string, msg string, args ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return EqualErrorf(a.t, theError, errString, msg, args...) -} - -// EqualExportedValues asserts that the types of two objects are equal and their public -// fields are also equal. This is useful for comparing structs that have private fields -// that could potentially differ. -// -// type S struct { -// Exported int -// notExported int -// } -// a.EqualExportedValues(S{1, 2}, S{1, 3}) => true -// a.EqualExportedValues(S{1, 2}, S{2, 3}) => false -func (a *Assertions) EqualExportedValues(expected interface{}, actual interface{}, msgAndArgs ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return EqualExportedValues(a.t, expected, actual, msgAndArgs...) -} - -// EqualExportedValuesf asserts that the types of two objects are equal and their public -// fields are also equal. This is useful for comparing structs that have private fields -// that could potentially differ. -// -// type S struct { -// Exported int -// notExported int -// } -// a.EqualExportedValuesf(S{1, 2}, S{1, 3}, "error message %s", "formatted") => true -// a.EqualExportedValuesf(S{1, 2}, S{2, 3}, "error message %s", "formatted") => false -func (a *Assertions) EqualExportedValuesf(expected interface{}, actual interface{}, msg string, args ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return EqualExportedValuesf(a.t, expected, actual, msg, args...) -} - -// EqualValues asserts that two objects are equal or convertible to the same types -// and equal. -// -// a.EqualValues(uint32(123), int32(123)) -func (a *Assertions) EqualValues(expected interface{}, actual interface{}, msgAndArgs ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return EqualValues(a.t, expected, actual, msgAndArgs...) -} - -// EqualValuesf asserts that two objects are equal or convertible to the same types -// and equal. -// -// a.EqualValuesf(uint32(123), int32(123), "error message %s", "formatted") -func (a *Assertions) EqualValuesf(expected interface{}, actual interface{}, msg string, args ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return EqualValuesf(a.t, expected, actual, msg, args...) -} - -// Equalf asserts that two objects are equal. -// -// a.Equalf(123, 123, "error message %s", "formatted") -// -// Pointer variable equality is determined based on the equality of the -// referenced values (as opposed to the memory addresses). Function equality -// cannot be determined and will always fail. -func (a *Assertions) Equalf(expected interface{}, actual interface{}, msg string, args ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return Equalf(a.t, expected, actual, msg, args...) -} - -// Error asserts that a function returned an error (i.e. not `nil`). -// -// actualObj, err := SomeFunction() -// if a.Error(err) { -// assert.Equal(t, expectedError, err) -// } -func (a *Assertions) Error(err error, msgAndArgs ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return Error(a.t, err, msgAndArgs...) -} - -// ErrorAs asserts that at least one of the errors in err's chain matches target, and if so, sets target to that error value. -// This is a wrapper for errors.As. -func (a *Assertions) ErrorAs(err error, target interface{}, msgAndArgs ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return ErrorAs(a.t, err, target, msgAndArgs...) -} - -// ErrorAsf asserts that at least one of the errors in err's chain matches target, and if so, sets target to that error value. -// This is a wrapper for errors.As. -func (a *Assertions) ErrorAsf(err error, target interface{}, msg string, args ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return ErrorAsf(a.t, err, target, msg, args...) -} - -// ErrorContains asserts that a function returned an error (i.e. not `nil`) -// and that the error contains the specified substring. -// -// actualObj, err := SomeFunction() -// a.ErrorContains(err, expectedErrorSubString) -func (a *Assertions) ErrorContains(theError error, contains string, msgAndArgs ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return ErrorContains(a.t, theError, contains, msgAndArgs...) -} - -// ErrorContainsf asserts that a function returned an error (i.e. not `nil`) -// and that the error contains the specified substring. -// -// actualObj, err := SomeFunction() -// a.ErrorContainsf(err, expectedErrorSubString, "error message %s", "formatted") -func (a *Assertions) ErrorContainsf(theError error, contains string, msg string, args ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return ErrorContainsf(a.t, theError, contains, msg, args...) -} - -// ErrorIs asserts that at least one of the errors in err's chain matches target. -// This is a wrapper for errors.Is. -func (a *Assertions) ErrorIs(err error, target error, msgAndArgs ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return ErrorIs(a.t, err, target, msgAndArgs...) -} - -// ErrorIsf asserts that at least one of the errors in err's chain matches target. -// This is a wrapper for errors.Is. -func (a *Assertions) ErrorIsf(err error, target error, msg string, args ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return ErrorIsf(a.t, err, target, msg, args...) -} - -// Errorf asserts that a function returned an error (i.e. not `nil`). -// -// actualObj, err := SomeFunction() -// if a.Errorf(err, "error message %s", "formatted") { -// assert.Equal(t, expectedErrorf, err) -// } -func (a *Assertions) Errorf(err error, msg string, args ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return Errorf(a.t, err, msg, args...) -} - -// Eventually asserts that given condition will be met in waitFor time, -// periodically checking target function each tick. -// -// a.Eventually(func() bool { return true; }, time.Second, 10*time.Millisecond) -func (a *Assertions) Eventually(condition func() bool, waitFor time.Duration, tick time.Duration, msgAndArgs ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return Eventually(a.t, condition, waitFor, tick, msgAndArgs...) -} - -// EventuallyWithT asserts that given condition will be met in waitFor time, -// periodically checking target function each tick. In contrast to Eventually, -// it supplies a CollectT to the condition function, so that the condition -// function can use the CollectT to call other assertions. -// The condition is considered "met" if no errors are raised in a tick. -// The supplied CollectT collects all errors from one tick (if there are any). -// If the condition is not met before waitFor, the collected errors of -// the last tick are copied to t. -// -// externalValue := false -// go func() { -// time.Sleep(8*time.Second) -// externalValue = true -// }() -// a.EventuallyWithT(func(c *assert.CollectT) { -// // add assertions as needed; any assertion failure will fail the current tick -// assert.True(c, externalValue, "expected 'externalValue' to be true") -// }, 1*time.Second, 10*time.Second, "external state has not changed to 'true'; still false") -func (a *Assertions) EventuallyWithT(condition func(collect *CollectT), waitFor time.Duration, tick time.Duration, msgAndArgs ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return EventuallyWithT(a.t, condition, waitFor, tick, msgAndArgs...) -} - -// EventuallyWithTf asserts that given condition will be met in waitFor time, -// periodically checking target function each tick. In contrast to Eventually, -// it supplies a CollectT to the condition function, so that the condition -// function can use the CollectT to call other assertions. -// The condition is considered "met" if no errors are raised in a tick. -// The supplied CollectT collects all errors from one tick (if there are any). -// If the condition is not met before waitFor, the collected errors of -// the last tick are copied to t. -// -// externalValue := false -// go func() { -// time.Sleep(8*time.Second) -// externalValue = true -// }() -// a.EventuallyWithTf(func(c *assert.CollectT, "error message %s", "formatted") { -// // add assertions as needed; any assertion failure will fail the current tick -// assert.True(c, externalValue, "expected 'externalValue' to be true") -// }, 1*time.Second, 10*time.Second, "external state has not changed to 'true'; still false") -func (a *Assertions) EventuallyWithTf(condition func(collect *CollectT), waitFor time.Duration, tick time.Duration, msg string, args ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return EventuallyWithTf(a.t, condition, waitFor, tick, msg, args...) -} - -// Eventuallyf asserts that given condition will be met in waitFor time, -// periodically checking target function each tick. -// -// a.Eventuallyf(func() bool { return true; }, time.Second, 10*time.Millisecond, "error message %s", "formatted") -func (a *Assertions) Eventuallyf(condition func() bool, waitFor time.Duration, tick time.Duration, msg string, args ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return Eventuallyf(a.t, condition, waitFor, tick, msg, args...) -} - -// Exactly asserts that two objects are equal in value and type. -// -// a.Exactly(int32(123), int64(123)) -func (a *Assertions) Exactly(expected interface{}, actual interface{}, msgAndArgs ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return Exactly(a.t, expected, actual, msgAndArgs...) -} - -// Exactlyf asserts that two objects are equal in value and type. -// -// a.Exactlyf(int32(123), int64(123), "error message %s", "formatted") -func (a *Assertions) Exactlyf(expected interface{}, actual interface{}, msg string, args ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return Exactlyf(a.t, expected, actual, msg, args...) -} - -// Fail reports a failure through -func (a *Assertions) Fail(failureMessage string, msgAndArgs ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return Fail(a.t, failureMessage, msgAndArgs...) -} - -// FailNow fails test -func (a *Assertions) FailNow(failureMessage string, msgAndArgs ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return FailNow(a.t, failureMessage, msgAndArgs...) -} - -// FailNowf fails test -func (a *Assertions) FailNowf(failureMessage string, msg string, args ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return FailNowf(a.t, failureMessage, msg, args...) -} - -// Failf reports a failure through -func (a *Assertions) Failf(failureMessage string, msg string, args ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return Failf(a.t, failureMessage, msg, args...) -} - -// False asserts that the specified value is false. -// -// a.False(myBool) -func (a *Assertions) False(value bool, msgAndArgs ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return False(a.t, value, msgAndArgs...) -} - -// Falsef asserts that the specified value is false. -// -// a.Falsef(myBool, "error message %s", "formatted") -func (a *Assertions) Falsef(value bool, msg string, args ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return Falsef(a.t, value, msg, args...) -} - -// FileExists checks whether a file exists in the given path. It also fails if -// the path points to a directory or there is an error when trying to check the file. -func (a *Assertions) FileExists(path string, msgAndArgs ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return FileExists(a.t, path, msgAndArgs...) -} - -// FileExistsf checks whether a file exists in the given path. It also fails if -// the path points to a directory or there is an error when trying to check the file. -func (a *Assertions) FileExistsf(path string, msg string, args ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return FileExistsf(a.t, path, msg, args...) -} - -// Greater asserts that the first element is greater than the second -// -// a.Greater(2, 1) -// a.Greater(float64(2), float64(1)) -// a.Greater("b", "a") -func (a *Assertions) Greater(e1 interface{}, e2 interface{}, msgAndArgs ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return Greater(a.t, e1, e2, msgAndArgs...) -} - -// GreaterOrEqual asserts that the first element is greater than or equal to the second -// -// a.GreaterOrEqual(2, 1) -// a.GreaterOrEqual(2, 2) -// a.GreaterOrEqual("b", "a") -// a.GreaterOrEqual("b", "b") -func (a *Assertions) GreaterOrEqual(e1 interface{}, e2 interface{}, msgAndArgs ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return GreaterOrEqual(a.t, e1, e2, msgAndArgs...) -} - -// GreaterOrEqualf asserts that the first element is greater than or equal to the second -// -// a.GreaterOrEqualf(2, 1, "error message %s", "formatted") -// a.GreaterOrEqualf(2, 2, "error message %s", "formatted") -// a.GreaterOrEqualf("b", "a", "error message %s", "formatted") -// a.GreaterOrEqualf("b", "b", "error message %s", "formatted") -func (a *Assertions) GreaterOrEqualf(e1 interface{}, e2 interface{}, msg string, args ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return GreaterOrEqualf(a.t, e1, e2, msg, args...) -} - -// Greaterf asserts that the first element is greater than the second -// -// a.Greaterf(2, 1, "error message %s", "formatted") -// a.Greaterf(float64(2), float64(1), "error message %s", "formatted") -// a.Greaterf("b", "a", "error message %s", "formatted") -func (a *Assertions) Greaterf(e1 interface{}, e2 interface{}, msg string, args ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return Greaterf(a.t, e1, e2, msg, args...) -} - -// HTTPBodyContains asserts that a specified handler returns a -// body that contains a string. -// -// a.HTTPBodyContains(myHandler, "GET", "www.google.com", nil, "I'm Feeling Lucky") -// -// Returns whether the assertion was successful (true) or not (false). -func (a *Assertions) HTTPBodyContains(handler http.HandlerFunc, method string, url string, values url.Values, str interface{}, msgAndArgs ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return HTTPBodyContains(a.t, handler, method, url, values, str, msgAndArgs...) -} - -// HTTPBodyContainsf asserts that a specified handler returns a -// body that contains a string. -// -// a.HTTPBodyContainsf(myHandler, "GET", "www.google.com", nil, "I'm Feeling Lucky", "error message %s", "formatted") -// -// Returns whether the assertion was successful (true) or not (false). -func (a *Assertions) HTTPBodyContainsf(handler http.HandlerFunc, method string, url string, values url.Values, str interface{}, msg string, args ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return HTTPBodyContainsf(a.t, handler, method, url, values, str, msg, args...) -} - -// HTTPBodyNotContains asserts that a specified handler returns a -// body that does not contain a string. -// -// a.HTTPBodyNotContains(myHandler, "GET", "www.google.com", nil, "I'm Feeling Lucky") -// -// Returns whether the assertion was successful (true) or not (false). -func (a *Assertions) HTTPBodyNotContains(handler http.HandlerFunc, method string, url string, values url.Values, str interface{}, msgAndArgs ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return HTTPBodyNotContains(a.t, handler, method, url, values, str, msgAndArgs...) -} - -// HTTPBodyNotContainsf asserts that a specified handler returns a -// body that does not contain a string. -// -// a.HTTPBodyNotContainsf(myHandler, "GET", "www.google.com", nil, "I'm Feeling Lucky", "error message %s", "formatted") -// -// Returns whether the assertion was successful (true) or not (false). -func (a *Assertions) HTTPBodyNotContainsf(handler http.HandlerFunc, method string, url string, values url.Values, str interface{}, msg string, args ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return HTTPBodyNotContainsf(a.t, handler, method, url, values, str, msg, args...) -} - -// HTTPError asserts that a specified handler returns an error status code. -// -// a.HTTPError(myHandler, "POST", "/a/b/c", url.Values{"a": []string{"b", "c"}} -// -// Returns whether the assertion was successful (true) or not (false). -func (a *Assertions) HTTPError(handler http.HandlerFunc, method string, url string, values url.Values, msgAndArgs ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return HTTPError(a.t, handler, method, url, values, msgAndArgs...) -} - -// HTTPErrorf asserts that a specified handler returns an error status code. -// -// a.HTTPErrorf(myHandler, "POST", "/a/b/c", url.Values{"a": []string{"b", "c"}} -// -// Returns whether the assertion was successful (true) or not (false). -func (a *Assertions) HTTPErrorf(handler http.HandlerFunc, method string, url string, values url.Values, msg string, args ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return HTTPErrorf(a.t, handler, method, url, values, msg, args...) -} - -// HTTPRedirect asserts that a specified handler returns a redirect status code. -// -// a.HTTPRedirect(myHandler, "GET", "/a/b/c", url.Values{"a": []string{"b", "c"}} -// -// Returns whether the assertion was successful (true) or not (false). -func (a *Assertions) HTTPRedirect(handler http.HandlerFunc, method string, url string, values url.Values, msgAndArgs ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return HTTPRedirect(a.t, handler, method, url, values, msgAndArgs...) -} - -// HTTPRedirectf asserts that a specified handler returns a redirect status code. -// -// a.HTTPRedirectf(myHandler, "GET", "/a/b/c", url.Values{"a": []string{"b", "c"}} -// -// Returns whether the assertion was successful (true) or not (false). -func (a *Assertions) HTTPRedirectf(handler http.HandlerFunc, method string, url string, values url.Values, msg string, args ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return HTTPRedirectf(a.t, handler, method, url, values, msg, args...) -} - -// HTTPStatusCode asserts that a specified handler returns a specified status code. -// -// a.HTTPStatusCode(myHandler, "GET", "/notImplemented", nil, 501) -// -// Returns whether the assertion was successful (true) or not (false). -func (a *Assertions) HTTPStatusCode(handler http.HandlerFunc, method string, url string, values url.Values, statuscode int, msgAndArgs ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return HTTPStatusCode(a.t, handler, method, url, values, statuscode, msgAndArgs...) -} - -// HTTPStatusCodef asserts that a specified handler returns a specified status code. -// -// a.HTTPStatusCodef(myHandler, "GET", "/notImplemented", nil, 501, "error message %s", "formatted") -// -// Returns whether the assertion was successful (true) or not (false). -func (a *Assertions) HTTPStatusCodef(handler http.HandlerFunc, method string, url string, values url.Values, statuscode int, msg string, args ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return HTTPStatusCodef(a.t, handler, method, url, values, statuscode, msg, args...) -} - -// HTTPSuccess asserts that a specified handler returns a success status code. -// -// a.HTTPSuccess(myHandler, "POST", "http://www.google.com", nil) -// -// Returns whether the assertion was successful (true) or not (false). -func (a *Assertions) HTTPSuccess(handler http.HandlerFunc, method string, url string, values url.Values, msgAndArgs ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return HTTPSuccess(a.t, handler, method, url, values, msgAndArgs...) -} - -// HTTPSuccessf asserts that a specified handler returns a success status code. -// -// a.HTTPSuccessf(myHandler, "POST", "http://www.google.com", nil, "error message %s", "formatted") -// -// Returns whether the assertion was successful (true) or not (false). -func (a *Assertions) HTTPSuccessf(handler http.HandlerFunc, method string, url string, values url.Values, msg string, args ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return HTTPSuccessf(a.t, handler, method, url, values, msg, args...) -} - -// Implements asserts that an object is implemented by the specified interface. -// -// a.Implements((*MyInterface)(nil), new(MyObject)) -func (a *Assertions) Implements(interfaceObject interface{}, object interface{}, msgAndArgs ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return Implements(a.t, interfaceObject, object, msgAndArgs...) -} - -// Implementsf asserts that an object is implemented by the specified interface. -// -// a.Implementsf((*MyInterface)(nil), new(MyObject), "error message %s", "formatted") -func (a *Assertions) Implementsf(interfaceObject interface{}, object interface{}, msg string, args ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return Implementsf(a.t, interfaceObject, object, msg, args...) -} - -// InDelta asserts that the two numerals are within delta of each other. -// -// a.InDelta(math.Pi, 22/7.0, 0.01) -func (a *Assertions) InDelta(expected interface{}, actual interface{}, delta float64, msgAndArgs ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return InDelta(a.t, expected, actual, delta, msgAndArgs...) -} - -// InDeltaMapValues is the same as InDelta, but it compares all values between two maps. Both maps must have exactly the same keys. -func (a *Assertions) InDeltaMapValues(expected interface{}, actual interface{}, delta float64, msgAndArgs ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return InDeltaMapValues(a.t, expected, actual, delta, msgAndArgs...) -} - -// InDeltaMapValuesf is the same as InDelta, but it compares all values between two maps. Both maps must have exactly the same keys. -func (a *Assertions) InDeltaMapValuesf(expected interface{}, actual interface{}, delta float64, msg string, args ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return InDeltaMapValuesf(a.t, expected, actual, delta, msg, args...) -} - -// InDeltaSlice is the same as InDelta, except it compares two slices. -func (a *Assertions) InDeltaSlice(expected interface{}, actual interface{}, delta float64, msgAndArgs ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return InDeltaSlice(a.t, expected, actual, delta, msgAndArgs...) -} - -// InDeltaSlicef is the same as InDelta, except it compares two slices. -func (a *Assertions) InDeltaSlicef(expected interface{}, actual interface{}, delta float64, msg string, args ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return InDeltaSlicef(a.t, expected, actual, delta, msg, args...) -} - -// InDeltaf asserts that the two numerals are within delta of each other. -// -// a.InDeltaf(math.Pi, 22/7.0, 0.01, "error message %s", "formatted") -func (a *Assertions) InDeltaf(expected interface{}, actual interface{}, delta float64, msg string, args ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return InDeltaf(a.t, expected, actual, delta, msg, args...) -} - -// InEpsilon asserts that expected and actual have a relative error less than epsilon -func (a *Assertions) InEpsilon(expected interface{}, actual interface{}, epsilon float64, msgAndArgs ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return InEpsilon(a.t, expected, actual, epsilon, msgAndArgs...) -} - -// InEpsilonSlice is the same as InEpsilon, except it compares each value from two slices. -func (a *Assertions) InEpsilonSlice(expected interface{}, actual interface{}, epsilon float64, msgAndArgs ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return InEpsilonSlice(a.t, expected, actual, epsilon, msgAndArgs...) -} - -// InEpsilonSlicef is the same as InEpsilon, except it compares each value from two slices. -func (a *Assertions) InEpsilonSlicef(expected interface{}, actual interface{}, epsilon float64, msg string, args ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return InEpsilonSlicef(a.t, expected, actual, epsilon, msg, args...) -} - -// InEpsilonf asserts that expected and actual have a relative error less than epsilon -func (a *Assertions) InEpsilonf(expected interface{}, actual interface{}, epsilon float64, msg string, args ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return InEpsilonf(a.t, expected, actual, epsilon, msg, args...) -} - -// IsDecreasing asserts that the collection is decreasing -// -// a.IsDecreasing([]int{2, 1, 0}) -// a.IsDecreasing([]float{2, 1}) -// a.IsDecreasing([]string{"b", "a"}) -func (a *Assertions) IsDecreasing(object interface{}, msgAndArgs ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return IsDecreasing(a.t, object, msgAndArgs...) -} - -// IsDecreasingf asserts that the collection is decreasing -// -// a.IsDecreasingf([]int{2, 1, 0}, "error message %s", "formatted") -// a.IsDecreasingf([]float{2, 1}, "error message %s", "formatted") -// a.IsDecreasingf([]string{"b", "a"}, "error message %s", "formatted") -func (a *Assertions) IsDecreasingf(object interface{}, msg string, args ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return IsDecreasingf(a.t, object, msg, args...) -} - -// IsIncreasing asserts that the collection is increasing -// -// a.IsIncreasing([]int{1, 2, 3}) -// a.IsIncreasing([]float{1, 2}) -// a.IsIncreasing([]string{"a", "b"}) -func (a *Assertions) IsIncreasing(object interface{}, msgAndArgs ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return IsIncreasing(a.t, object, msgAndArgs...) -} - -// IsIncreasingf asserts that the collection is increasing -// -// a.IsIncreasingf([]int{1, 2, 3}, "error message %s", "formatted") -// a.IsIncreasingf([]float{1, 2}, "error message %s", "formatted") -// a.IsIncreasingf([]string{"a", "b"}, "error message %s", "formatted") -func (a *Assertions) IsIncreasingf(object interface{}, msg string, args ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return IsIncreasingf(a.t, object, msg, args...) -} - -// IsNonDecreasing asserts that the collection is not decreasing -// -// a.IsNonDecreasing([]int{1, 1, 2}) -// a.IsNonDecreasing([]float{1, 2}) -// a.IsNonDecreasing([]string{"a", "b"}) -func (a *Assertions) IsNonDecreasing(object interface{}, msgAndArgs ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return IsNonDecreasing(a.t, object, msgAndArgs...) -} - -// IsNonDecreasingf asserts that the collection is not decreasing -// -// a.IsNonDecreasingf([]int{1, 1, 2}, "error message %s", "formatted") -// a.IsNonDecreasingf([]float{1, 2}, "error message %s", "formatted") -// a.IsNonDecreasingf([]string{"a", "b"}, "error message %s", "formatted") -func (a *Assertions) IsNonDecreasingf(object interface{}, msg string, args ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return IsNonDecreasingf(a.t, object, msg, args...) -} - -// IsNonIncreasing asserts that the collection is not increasing -// -// a.IsNonIncreasing([]int{2, 1, 1}) -// a.IsNonIncreasing([]float{2, 1}) -// a.IsNonIncreasing([]string{"b", "a"}) -func (a *Assertions) IsNonIncreasing(object interface{}, msgAndArgs ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return IsNonIncreasing(a.t, object, msgAndArgs...) -} - -// IsNonIncreasingf asserts that the collection is not increasing -// -// a.IsNonIncreasingf([]int{2, 1, 1}, "error message %s", "formatted") -// a.IsNonIncreasingf([]float{2, 1}, "error message %s", "formatted") -// a.IsNonIncreasingf([]string{"b", "a"}, "error message %s", "formatted") -func (a *Assertions) IsNonIncreasingf(object interface{}, msg string, args ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return IsNonIncreasingf(a.t, object, msg, args...) -} - -// IsType asserts that the specified objects are of the same type. -func (a *Assertions) IsType(expectedType interface{}, object interface{}, msgAndArgs ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return IsType(a.t, expectedType, object, msgAndArgs...) -} - -// IsTypef asserts that the specified objects are of the same type. -func (a *Assertions) IsTypef(expectedType interface{}, object interface{}, msg string, args ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return IsTypef(a.t, expectedType, object, msg, args...) -} - -// JSONEq asserts that two JSON strings are equivalent. -// -// a.JSONEq(`{"hello": "world", "foo": "bar"}`, `{"foo": "bar", "hello": "world"}`) -func (a *Assertions) JSONEq(expected string, actual string, msgAndArgs ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return JSONEq(a.t, expected, actual, msgAndArgs...) -} - -// JSONEqf asserts that two JSON strings are equivalent. -// -// a.JSONEqf(`{"hello": "world", "foo": "bar"}`, `{"foo": "bar", "hello": "world"}`, "error message %s", "formatted") -func (a *Assertions) JSONEqf(expected string, actual string, msg string, args ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return JSONEqf(a.t, expected, actual, msg, args...) -} - -// Len asserts that the specified object has specific length. -// Len also fails if the object has a type that len() not accept. -// -// a.Len(mySlice, 3) -func (a *Assertions) Len(object interface{}, length int, msgAndArgs ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return Len(a.t, object, length, msgAndArgs...) -} - -// Lenf asserts that the specified object has specific length. -// Lenf also fails if the object has a type that len() not accept. -// -// a.Lenf(mySlice, 3, "error message %s", "formatted") -func (a *Assertions) Lenf(object interface{}, length int, msg string, args ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return Lenf(a.t, object, length, msg, args...) -} - -// Less asserts that the first element is less than the second -// -// a.Less(1, 2) -// a.Less(float64(1), float64(2)) -// a.Less("a", "b") -func (a *Assertions) Less(e1 interface{}, e2 interface{}, msgAndArgs ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return Less(a.t, e1, e2, msgAndArgs...) -} - -// LessOrEqual asserts that the first element is less than or equal to the second -// -// a.LessOrEqual(1, 2) -// a.LessOrEqual(2, 2) -// a.LessOrEqual("a", "b") -// a.LessOrEqual("b", "b") -func (a *Assertions) LessOrEqual(e1 interface{}, e2 interface{}, msgAndArgs ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return LessOrEqual(a.t, e1, e2, msgAndArgs...) -} - -// LessOrEqualf asserts that the first element is less than or equal to the second -// -// a.LessOrEqualf(1, 2, "error message %s", "formatted") -// a.LessOrEqualf(2, 2, "error message %s", "formatted") -// a.LessOrEqualf("a", "b", "error message %s", "formatted") -// a.LessOrEqualf("b", "b", "error message %s", "formatted") -func (a *Assertions) LessOrEqualf(e1 interface{}, e2 interface{}, msg string, args ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return LessOrEqualf(a.t, e1, e2, msg, args...) -} - -// Lessf asserts that the first element is less than the second -// -// a.Lessf(1, 2, "error message %s", "formatted") -// a.Lessf(float64(1), float64(2), "error message %s", "formatted") -// a.Lessf("a", "b", "error message %s", "formatted") -func (a *Assertions) Lessf(e1 interface{}, e2 interface{}, msg string, args ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return Lessf(a.t, e1, e2, msg, args...) -} - -// Negative asserts that the specified element is negative -// -// a.Negative(-1) -// a.Negative(-1.23) -func (a *Assertions) Negative(e interface{}, msgAndArgs ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return Negative(a.t, e, msgAndArgs...) -} - -// Negativef asserts that the specified element is negative -// -// a.Negativef(-1, "error message %s", "formatted") -// a.Negativef(-1.23, "error message %s", "formatted") -func (a *Assertions) Negativef(e interface{}, msg string, args ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return Negativef(a.t, e, msg, args...) -} - -// Never asserts that the given condition doesn't satisfy in waitFor time, -// periodically checking the target function each tick. -// -// a.Never(func() bool { return false; }, time.Second, 10*time.Millisecond) -func (a *Assertions) Never(condition func() bool, waitFor time.Duration, tick time.Duration, msgAndArgs ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return Never(a.t, condition, waitFor, tick, msgAndArgs...) -} - -// Neverf asserts that the given condition doesn't satisfy in waitFor time, -// periodically checking the target function each tick. -// -// a.Neverf(func() bool { return false; }, time.Second, 10*time.Millisecond, "error message %s", "formatted") -func (a *Assertions) Neverf(condition func() bool, waitFor time.Duration, tick time.Duration, msg string, args ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return Neverf(a.t, condition, waitFor, tick, msg, args...) -} - -// Nil asserts that the specified object is nil. -// -// a.Nil(err) -func (a *Assertions) Nil(object interface{}, msgAndArgs ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return Nil(a.t, object, msgAndArgs...) -} - -// Nilf asserts that the specified object is nil. -// -// a.Nilf(err, "error message %s", "formatted") -func (a *Assertions) Nilf(object interface{}, msg string, args ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return Nilf(a.t, object, msg, args...) -} - -// NoDirExists checks whether a directory does not exist in the given path. -// It fails if the path points to an existing _directory_ only. -func (a *Assertions) NoDirExists(path string, msgAndArgs ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return NoDirExists(a.t, path, msgAndArgs...) -} - -// NoDirExistsf checks whether a directory does not exist in the given path. -// It fails if the path points to an existing _directory_ only. -func (a *Assertions) NoDirExistsf(path string, msg string, args ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return NoDirExistsf(a.t, path, msg, args...) -} - -// NoError asserts that a function returned no error (i.e. `nil`). -// -// actualObj, err := SomeFunction() -// if a.NoError(err) { -// assert.Equal(t, expectedObj, actualObj) -// } -func (a *Assertions) NoError(err error, msgAndArgs ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return NoError(a.t, err, msgAndArgs...) -} - -// NoErrorf asserts that a function returned no error (i.e. `nil`). -// -// actualObj, err := SomeFunction() -// if a.NoErrorf(err, "error message %s", "formatted") { -// assert.Equal(t, expectedObj, actualObj) -// } -func (a *Assertions) NoErrorf(err error, msg string, args ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return NoErrorf(a.t, err, msg, args...) -} - -// NoFileExists checks whether a file does not exist in a given path. It fails -// if the path points to an existing _file_ only. -func (a *Assertions) NoFileExists(path string, msgAndArgs ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return NoFileExists(a.t, path, msgAndArgs...) -} - -// NoFileExistsf checks whether a file does not exist in a given path. It fails -// if the path points to an existing _file_ only. -func (a *Assertions) NoFileExistsf(path string, msg string, args ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return NoFileExistsf(a.t, path, msg, args...) -} - -// NotContains asserts that the specified string, list(array, slice...) or map does NOT contain the -// specified substring or element. -// -// a.NotContains("Hello World", "Earth") -// a.NotContains(["Hello", "World"], "Earth") -// a.NotContains({"Hello": "World"}, "Earth") -func (a *Assertions) NotContains(s interface{}, contains interface{}, msgAndArgs ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return NotContains(a.t, s, contains, msgAndArgs...) -} - -// NotContainsf asserts that the specified string, list(array, slice...) or map does NOT contain the -// specified substring or element. -// -// a.NotContainsf("Hello World", "Earth", "error message %s", "formatted") -// a.NotContainsf(["Hello", "World"], "Earth", "error message %s", "formatted") -// a.NotContainsf({"Hello": "World"}, "Earth", "error message %s", "formatted") -func (a *Assertions) NotContainsf(s interface{}, contains interface{}, msg string, args ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return NotContainsf(a.t, s, contains, msg, args...) -} - -// NotEmpty asserts that the specified object is NOT empty. I.e. not nil, "", false, 0 or either -// a slice or a channel with len == 0. -// -// if a.NotEmpty(obj) { -// assert.Equal(t, "two", obj[1]) -// } -func (a *Assertions) NotEmpty(object interface{}, msgAndArgs ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return NotEmpty(a.t, object, msgAndArgs...) -} - -// NotEmptyf asserts that the specified object is NOT empty. I.e. not nil, "", false, 0 or either -// a slice or a channel with len == 0. -// -// if a.NotEmptyf(obj, "error message %s", "formatted") { -// assert.Equal(t, "two", obj[1]) -// } -func (a *Assertions) NotEmptyf(object interface{}, msg string, args ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return NotEmptyf(a.t, object, msg, args...) -} - -// NotEqual asserts that the specified values are NOT equal. -// -// a.NotEqual(obj1, obj2) -// -// Pointer variable equality is determined based on the equality of the -// referenced values (as opposed to the memory addresses). -func (a *Assertions) NotEqual(expected interface{}, actual interface{}, msgAndArgs ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return NotEqual(a.t, expected, actual, msgAndArgs...) -} - -// NotEqualValues asserts that two objects are not equal even when converted to the same type -// -// a.NotEqualValues(obj1, obj2) -func (a *Assertions) NotEqualValues(expected interface{}, actual interface{}, msgAndArgs ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return NotEqualValues(a.t, expected, actual, msgAndArgs...) -} - -// NotEqualValuesf asserts that two objects are not equal even when converted to the same type -// -// a.NotEqualValuesf(obj1, obj2, "error message %s", "formatted") -func (a *Assertions) NotEqualValuesf(expected interface{}, actual interface{}, msg string, args ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return NotEqualValuesf(a.t, expected, actual, msg, args...) -} - -// NotEqualf asserts that the specified values are NOT equal. -// -// a.NotEqualf(obj1, obj2, "error message %s", "formatted") -// -// Pointer variable equality is determined based on the equality of the -// referenced values (as opposed to the memory addresses). -func (a *Assertions) NotEqualf(expected interface{}, actual interface{}, msg string, args ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return NotEqualf(a.t, expected, actual, msg, args...) -} - -// NotErrorIs asserts that at none of the errors in err's chain matches target. -// This is a wrapper for errors.Is. -func (a *Assertions) NotErrorIs(err error, target error, msgAndArgs ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return NotErrorIs(a.t, err, target, msgAndArgs...) -} - -// NotErrorIsf asserts that at none of the errors in err's chain matches target. -// This is a wrapper for errors.Is. -func (a *Assertions) NotErrorIsf(err error, target error, msg string, args ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return NotErrorIsf(a.t, err, target, msg, args...) -} - -// NotImplements asserts that an object does not implement the specified interface. -// -// a.NotImplements((*MyInterface)(nil), new(MyObject)) -func (a *Assertions) NotImplements(interfaceObject interface{}, object interface{}, msgAndArgs ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return NotImplements(a.t, interfaceObject, object, msgAndArgs...) -} - -// NotImplementsf asserts that an object does not implement the specified interface. -// -// a.NotImplementsf((*MyInterface)(nil), new(MyObject), "error message %s", "formatted") -func (a *Assertions) NotImplementsf(interfaceObject interface{}, object interface{}, msg string, args ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return NotImplementsf(a.t, interfaceObject, object, msg, args...) -} - -// NotNil asserts that the specified object is not nil. -// -// a.NotNil(err) -func (a *Assertions) NotNil(object interface{}, msgAndArgs ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return NotNil(a.t, object, msgAndArgs...) -} - -// NotNilf asserts that the specified object is not nil. -// -// a.NotNilf(err, "error message %s", "formatted") -func (a *Assertions) NotNilf(object interface{}, msg string, args ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return NotNilf(a.t, object, msg, args...) -} - -// NotPanics asserts that the code inside the specified PanicTestFunc does NOT panic. -// -// a.NotPanics(func(){ RemainCalm() }) -func (a *Assertions) NotPanics(f PanicTestFunc, msgAndArgs ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return NotPanics(a.t, f, msgAndArgs...) -} - -// NotPanicsf asserts that the code inside the specified PanicTestFunc does NOT panic. -// -// a.NotPanicsf(func(){ RemainCalm() }, "error message %s", "formatted") -func (a *Assertions) NotPanicsf(f PanicTestFunc, msg string, args ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return NotPanicsf(a.t, f, msg, args...) -} - -// NotRegexp asserts that a specified regexp does not match a string. -// -// a.NotRegexp(regexp.MustCompile("starts"), "it's starting") -// a.NotRegexp("^start", "it's not starting") -func (a *Assertions) NotRegexp(rx interface{}, str interface{}, msgAndArgs ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return NotRegexp(a.t, rx, str, msgAndArgs...) -} - -// NotRegexpf asserts that a specified regexp does not match a string. -// -// a.NotRegexpf(regexp.MustCompile("starts"), "it's starting", "error message %s", "formatted") -// a.NotRegexpf("^start", "it's not starting", "error message %s", "formatted") -func (a *Assertions) NotRegexpf(rx interface{}, str interface{}, msg string, args ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return NotRegexpf(a.t, rx, str, msg, args...) -} - -// NotSame asserts that two pointers do not reference the same object. -// -// a.NotSame(ptr1, ptr2) -// -// Both arguments must be pointer variables. Pointer variable sameness is -// determined based on the equality of both type and value. -func (a *Assertions) NotSame(expected interface{}, actual interface{}, msgAndArgs ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return NotSame(a.t, expected, actual, msgAndArgs...) -} - -// NotSamef asserts that two pointers do not reference the same object. -// -// a.NotSamef(ptr1, ptr2, "error message %s", "formatted") -// -// Both arguments must be pointer variables. Pointer variable sameness is -// determined based on the equality of both type and value. -func (a *Assertions) NotSamef(expected interface{}, actual interface{}, msg string, args ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return NotSamef(a.t, expected, actual, msg, args...) -} - -// NotSubset asserts that the specified list(array, slice...) or map does NOT -// contain all elements given in the specified subset list(array, slice...) or -// map. -// -// a.NotSubset([1, 3, 4], [1, 2]) -// a.NotSubset({"x": 1, "y": 2}, {"z": 3}) -func (a *Assertions) NotSubset(list interface{}, subset interface{}, msgAndArgs ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return NotSubset(a.t, list, subset, msgAndArgs...) -} - -// NotSubsetf asserts that the specified list(array, slice...) or map does NOT -// contain all elements given in the specified subset list(array, slice...) or -// map. -// -// a.NotSubsetf([1, 3, 4], [1, 2], "error message %s", "formatted") -// a.NotSubsetf({"x": 1, "y": 2}, {"z": 3}, "error message %s", "formatted") -func (a *Assertions) NotSubsetf(list interface{}, subset interface{}, msg string, args ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return NotSubsetf(a.t, list, subset, msg, args...) -} - -// NotZero asserts that i is not the zero value for its type. -func (a *Assertions) NotZero(i interface{}, msgAndArgs ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return NotZero(a.t, i, msgAndArgs...) -} - -// NotZerof asserts that i is not the zero value for its type. -func (a *Assertions) NotZerof(i interface{}, msg string, args ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return NotZerof(a.t, i, msg, args...) -} - -// Panics asserts that the code inside the specified PanicTestFunc panics. -// -// a.Panics(func(){ GoCrazy() }) -func (a *Assertions) Panics(f PanicTestFunc, msgAndArgs ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return Panics(a.t, f, msgAndArgs...) -} - -// PanicsWithError asserts that the code inside the specified PanicTestFunc -// panics, and that the recovered panic value is an error that satisfies the -// EqualError comparison. -// -// a.PanicsWithError("crazy error", func(){ GoCrazy() }) -func (a *Assertions) PanicsWithError(errString string, f PanicTestFunc, msgAndArgs ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return PanicsWithError(a.t, errString, f, msgAndArgs...) -} - -// PanicsWithErrorf asserts that the code inside the specified PanicTestFunc -// panics, and that the recovered panic value is an error that satisfies the -// EqualError comparison. -// -// a.PanicsWithErrorf("crazy error", func(){ GoCrazy() }, "error message %s", "formatted") -func (a *Assertions) PanicsWithErrorf(errString string, f PanicTestFunc, msg string, args ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return PanicsWithErrorf(a.t, errString, f, msg, args...) -} - -// PanicsWithValue asserts that the code inside the specified PanicTestFunc panics, and that -// the recovered panic value equals the expected panic value. -// -// a.PanicsWithValue("crazy error", func(){ GoCrazy() }) -func (a *Assertions) PanicsWithValue(expected interface{}, f PanicTestFunc, msgAndArgs ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return PanicsWithValue(a.t, expected, f, msgAndArgs...) -} - -// PanicsWithValuef asserts that the code inside the specified PanicTestFunc panics, and that -// the recovered panic value equals the expected panic value. -// -// a.PanicsWithValuef("crazy error", func(){ GoCrazy() }, "error message %s", "formatted") -func (a *Assertions) PanicsWithValuef(expected interface{}, f PanicTestFunc, msg string, args ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return PanicsWithValuef(a.t, expected, f, msg, args...) -} - -// Panicsf asserts that the code inside the specified PanicTestFunc panics. -// -// a.Panicsf(func(){ GoCrazy() }, "error message %s", "formatted") -func (a *Assertions) Panicsf(f PanicTestFunc, msg string, args ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return Panicsf(a.t, f, msg, args...) -} - -// Positive asserts that the specified element is positive -// -// a.Positive(1) -// a.Positive(1.23) -func (a *Assertions) Positive(e interface{}, msgAndArgs ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return Positive(a.t, e, msgAndArgs...) -} - -// Positivef asserts that the specified element is positive -// -// a.Positivef(1, "error message %s", "formatted") -// a.Positivef(1.23, "error message %s", "formatted") -func (a *Assertions) Positivef(e interface{}, msg string, args ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return Positivef(a.t, e, msg, args...) -} - -// Regexp asserts that a specified regexp matches a string. -// -// a.Regexp(regexp.MustCompile("start"), "it's starting") -// a.Regexp("start...$", "it's not starting") -func (a *Assertions) Regexp(rx interface{}, str interface{}, msgAndArgs ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return Regexp(a.t, rx, str, msgAndArgs...) -} - -// Regexpf asserts that a specified regexp matches a string. -// -// a.Regexpf(regexp.MustCompile("start"), "it's starting", "error message %s", "formatted") -// a.Regexpf("start...$", "it's not starting", "error message %s", "formatted") -func (a *Assertions) Regexpf(rx interface{}, str interface{}, msg string, args ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return Regexpf(a.t, rx, str, msg, args...) -} - -// Same asserts that two pointers reference the same object. -// -// a.Same(ptr1, ptr2) -// -// Both arguments must be pointer variables. Pointer variable sameness is -// determined based on the equality of both type and value. -func (a *Assertions) Same(expected interface{}, actual interface{}, msgAndArgs ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return Same(a.t, expected, actual, msgAndArgs...) -} - -// Samef asserts that two pointers reference the same object. -// -// a.Samef(ptr1, ptr2, "error message %s", "formatted") -// -// Both arguments must be pointer variables. Pointer variable sameness is -// determined based on the equality of both type and value. -func (a *Assertions) Samef(expected interface{}, actual interface{}, msg string, args ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return Samef(a.t, expected, actual, msg, args...) -} - -// Subset asserts that the specified list(array, slice...) or map contains all -// elements given in the specified subset list(array, slice...) or map. -// -// a.Subset([1, 2, 3], [1, 2]) -// a.Subset({"x": 1, "y": 2}, {"x": 1}) -func (a *Assertions) Subset(list interface{}, subset interface{}, msgAndArgs ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return Subset(a.t, list, subset, msgAndArgs...) -} - -// Subsetf asserts that the specified list(array, slice...) or map contains all -// elements given in the specified subset list(array, slice...) or map. -// -// a.Subsetf([1, 2, 3], [1, 2], "error message %s", "formatted") -// a.Subsetf({"x": 1, "y": 2}, {"x": 1}, "error message %s", "formatted") -func (a *Assertions) Subsetf(list interface{}, subset interface{}, msg string, args ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return Subsetf(a.t, list, subset, msg, args...) -} - -// True asserts that the specified value is true. -// -// a.True(myBool) -func (a *Assertions) True(value bool, msgAndArgs ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return True(a.t, value, msgAndArgs...) -} - -// Truef asserts that the specified value is true. -// -// a.Truef(myBool, "error message %s", "formatted") -func (a *Assertions) Truef(value bool, msg string, args ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return Truef(a.t, value, msg, args...) -} - -// WithinDuration asserts that the two times are within duration delta of each other. -// -// a.WithinDuration(time.Now(), time.Now(), 10*time.Second) -func (a *Assertions) WithinDuration(expected time.Time, actual time.Time, delta time.Duration, msgAndArgs ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return WithinDuration(a.t, expected, actual, delta, msgAndArgs...) -} - -// WithinDurationf asserts that the two times are within duration delta of each other. -// -// a.WithinDurationf(time.Now(), time.Now(), 10*time.Second, "error message %s", "formatted") -func (a *Assertions) WithinDurationf(expected time.Time, actual time.Time, delta time.Duration, msg string, args ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return WithinDurationf(a.t, expected, actual, delta, msg, args...) -} - -// WithinRange asserts that a time is within a time range (inclusive). -// -// a.WithinRange(time.Now(), time.Now().Add(-time.Second), time.Now().Add(time.Second)) -func (a *Assertions) WithinRange(actual time.Time, start time.Time, end time.Time, msgAndArgs ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return WithinRange(a.t, actual, start, end, msgAndArgs...) -} - -// WithinRangef asserts that a time is within a time range (inclusive). -// -// a.WithinRangef(time.Now(), time.Now().Add(-time.Second), time.Now().Add(time.Second), "error message %s", "formatted") -func (a *Assertions) WithinRangef(actual time.Time, start time.Time, end time.Time, msg string, args ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return WithinRangef(a.t, actual, start, end, msg, args...) -} - -// YAMLEq asserts that two YAML strings are equivalent. -func (a *Assertions) YAMLEq(expected string, actual string, msgAndArgs ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return YAMLEq(a.t, expected, actual, msgAndArgs...) -} - -// YAMLEqf asserts that two YAML strings are equivalent. -func (a *Assertions) YAMLEqf(expected string, actual string, msg string, args ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return YAMLEqf(a.t, expected, actual, msg, args...) -} - -// Zero asserts that i is the zero value for its type. -func (a *Assertions) Zero(i interface{}, msgAndArgs ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return Zero(a.t, i, msgAndArgs...) -} - -// Zerof asserts that i is the zero value for its type. -func (a *Assertions) Zerof(i interface{}, msg string, args ...interface{}) bool { - if h, ok := a.t.(tHelper); ok { - h.Helper() - } - return Zerof(a.t, i, msg, args...) -} diff --git a/vendor/github.com/stretchr/testify/assert/assertion_forward.go.tmpl b/vendor/github.com/stretchr/testify/assert/assertion_forward.go.tmpl deleted file mode 100644 index 188bb9e17..000000000 --- a/vendor/github.com/stretchr/testify/assert/assertion_forward.go.tmpl +++ /dev/null @@ -1,5 +0,0 @@ -{{.CommentWithoutT "a"}} -func (a *Assertions) {{.DocInfo.Name}}({{.Params}}) bool { - if h, ok := a.t.(tHelper); ok { h.Helper() } - return {{.DocInfo.Name}}(a.t, {{.ForwardedParams}}) -} diff --git a/vendor/github.com/stretchr/testify/assert/assertion_order.go b/vendor/github.com/stretchr/testify/assert/assertion_order.go deleted file mode 100644 index 00df62a05..000000000 --- a/vendor/github.com/stretchr/testify/assert/assertion_order.go +++ /dev/null @@ -1,81 +0,0 @@ -package assert - -import ( - "fmt" - "reflect" -) - -// isOrdered checks that collection contains orderable elements. -func isOrdered(t TestingT, object interface{}, allowedComparesResults []CompareType, failMessage string, msgAndArgs ...interface{}) bool { - objKind := reflect.TypeOf(object).Kind() - if objKind != reflect.Slice && objKind != reflect.Array { - return false - } - - objValue := reflect.ValueOf(object) - objLen := objValue.Len() - - if objLen <= 1 { - return true - } - - value := objValue.Index(0) - valueInterface := value.Interface() - firstValueKind := value.Kind() - - for i := 1; i < objLen; i++ { - prevValue := value - prevValueInterface := valueInterface - - value = objValue.Index(i) - valueInterface = value.Interface() - - compareResult, isComparable := compare(prevValueInterface, valueInterface, firstValueKind) - - if !isComparable { - return Fail(t, fmt.Sprintf("Can not compare type \"%s\" and \"%s\"", reflect.TypeOf(value), reflect.TypeOf(prevValue)), msgAndArgs...) - } - - if !containsValue(allowedComparesResults, compareResult) { - return Fail(t, fmt.Sprintf(failMessage, prevValue, value), msgAndArgs...) - } - } - - return true -} - -// IsIncreasing asserts that the collection is increasing -// -// assert.IsIncreasing(t, []int{1, 2, 3}) -// assert.IsIncreasing(t, []float{1, 2}) -// assert.IsIncreasing(t, []string{"a", "b"}) -func IsIncreasing(t TestingT, object interface{}, msgAndArgs ...interface{}) bool { - return isOrdered(t, object, []CompareType{compareLess}, "\"%v\" is not less than \"%v\"", msgAndArgs...) -} - -// IsNonIncreasing asserts that the collection is not increasing -// -// assert.IsNonIncreasing(t, []int{2, 1, 1}) -// assert.IsNonIncreasing(t, []float{2, 1}) -// assert.IsNonIncreasing(t, []string{"b", "a"}) -func IsNonIncreasing(t TestingT, object interface{}, msgAndArgs ...interface{}) bool { - return isOrdered(t, object, []CompareType{compareEqual, compareGreater}, "\"%v\" is not greater than or equal to \"%v\"", msgAndArgs...) -} - -// IsDecreasing asserts that the collection is decreasing -// -// assert.IsDecreasing(t, []int{2, 1, 0}) -// assert.IsDecreasing(t, []float{2, 1}) -// assert.IsDecreasing(t, []string{"b", "a"}) -func IsDecreasing(t TestingT, object interface{}, msgAndArgs ...interface{}) bool { - return isOrdered(t, object, []CompareType{compareGreater}, "\"%v\" is not greater than \"%v\"", msgAndArgs...) -} - -// IsNonDecreasing asserts that the collection is not decreasing -// -// assert.IsNonDecreasing(t, []int{1, 1, 2}) -// assert.IsNonDecreasing(t, []float{1, 2}) -// assert.IsNonDecreasing(t, []string{"a", "b"}) -func IsNonDecreasing(t TestingT, object interface{}, msgAndArgs ...interface{}) bool { - return isOrdered(t, object, []CompareType{compareLess, compareEqual}, "\"%v\" is not less than or equal to \"%v\"", msgAndArgs...) -} diff --git a/vendor/github.com/stretchr/testify/assert/assertions.go b/vendor/github.com/stretchr/testify/assert/assertions.go deleted file mode 100644 index 0b7570f21..000000000 --- a/vendor/github.com/stretchr/testify/assert/assertions.go +++ /dev/null @@ -1,2105 +0,0 @@ -package assert - -import ( - "bufio" - "bytes" - "encoding/json" - "errors" - "fmt" - "math" - "os" - "reflect" - "regexp" - "runtime" - "runtime/debug" - "strings" - "time" - "unicode" - "unicode/utf8" - - "github.com/davecgh/go-spew/spew" - "github.com/pmezard/go-difflib/difflib" - "gopkg.in/yaml.v3" -) - -//go:generate sh -c "cd ../_codegen && go build && cd - && ../_codegen/_codegen -output-package=assert -template=assertion_format.go.tmpl" - -// TestingT is an interface wrapper around *testing.T -type TestingT interface { - Errorf(format string, args ...interface{}) -} - -// ComparisonAssertionFunc is a common function prototype when comparing two values. Can be useful -// for table driven tests. -type ComparisonAssertionFunc func(TestingT, interface{}, interface{}, ...interface{}) bool - -// ValueAssertionFunc is a common function prototype when validating a single value. Can be useful -// for table driven tests. -type ValueAssertionFunc func(TestingT, interface{}, ...interface{}) bool - -// BoolAssertionFunc is a common function prototype when validating a bool value. Can be useful -// for table driven tests. -type BoolAssertionFunc func(TestingT, bool, ...interface{}) bool - -// ErrorAssertionFunc is a common function prototype when validating an error value. Can be useful -// for table driven tests. -type ErrorAssertionFunc func(TestingT, error, ...interface{}) bool - -// Comparison is a custom function that returns true on success and false on failure -type Comparison func() (success bool) - -/* - Helper functions -*/ - -// ObjectsAreEqual determines if two objects are considered equal. -// -// This function does no assertion of any kind. -func ObjectsAreEqual(expected, actual interface{}) bool { - if expected == nil || actual == nil { - return expected == actual - } - - exp, ok := expected.([]byte) - if !ok { - return reflect.DeepEqual(expected, actual) - } - - act, ok := actual.([]byte) - if !ok { - return false - } - if exp == nil || act == nil { - return exp == nil && act == nil - } - return bytes.Equal(exp, act) -} - -// copyExportedFields iterates downward through nested data structures and creates a copy -// that only contains the exported struct fields. -func copyExportedFields(expected interface{}) interface{} { - if isNil(expected) { - return expected - } - - expectedType := reflect.TypeOf(expected) - expectedKind := expectedType.Kind() - expectedValue := reflect.ValueOf(expected) - - switch expectedKind { - case reflect.Struct: - result := reflect.New(expectedType).Elem() - for i := 0; i < expectedType.NumField(); i++ { - field := expectedType.Field(i) - isExported := field.IsExported() - if isExported { - fieldValue := expectedValue.Field(i) - if isNil(fieldValue) || isNil(fieldValue.Interface()) { - continue - } - newValue := copyExportedFields(fieldValue.Interface()) - result.Field(i).Set(reflect.ValueOf(newValue)) - } - } - return result.Interface() - - case reflect.Ptr: - result := reflect.New(expectedType.Elem()) - unexportedRemoved := copyExportedFields(expectedValue.Elem().Interface()) - result.Elem().Set(reflect.ValueOf(unexportedRemoved)) - return result.Interface() - - case reflect.Array, reflect.Slice: - var result reflect.Value - if expectedKind == reflect.Array { - result = reflect.New(reflect.ArrayOf(expectedValue.Len(), expectedType.Elem())).Elem() - } else { - result = reflect.MakeSlice(expectedType, expectedValue.Len(), expectedValue.Len()) - } - for i := 0; i < expectedValue.Len(); i++ { - index := expectedValue.Index(i) - if isNil(index) { - continue - } - unexportedRemoved := copyExportedFields(index.Interface()) - result.Index(i).Set(reflect.ValueOf(unexportedRemoved)) - } - return result.Interface() - - case reflect.Map: - result := reflect.MakeMap(expectedType) - for _, k := range expectedValue.MapKeys() { - index := expectedValue.MapIndex(k) - unexportedRemoved := copyExportedFields(index.Interface()) - result.SetMapIndex(k, reflect.ValueOf(unexportedRemoved)) - } - return result.Interface() - - default: - return expected - } -} - -// ObjectsExportedFieldsAreEqual determines if the exported (public) fields of two objects are -// considered equal. This comparison of only exported fields is applied recursively to nested data -// structures. -// -// This function does no assertion of any kind. -// -// Deprecated: Use [EqualExportedValues] instead. -func ObjectsExportedFieldsAreEqual(expected, actual interface{}) bool { - expectedCleaned := copyExportedFields(expected) - actualCleaned := copyExportedFields(actual) - return ObjectsAreEqualValues(expectedCleaned, actualCleaned) -} - -// ObjectsAreEqualValues gets whether two objects are equal, or if their -// values are equal. -func ObjectsAreEqualValues(expected, actual interface{}) bool { - if ObjectsAreEqual(expected, actual) { - return true - } - - expectedValue := reflect.ValueOf(expected) - actualValue := reflect.ValueOf(actual) - if !expectedValue.IsValid() || !actualValue.IsValid() { - return false - } - - expectedType := expectedValue.Type() - actualType := actualValue.Type() - if !expectedType.ConvertibleTo(actualType) { - return false - } - - if !isNumericType(expectedType) || !isNumericType(actualType) { - // Attempt comparison after type conversion - return reflect.DeepEqual( - expectedValue.Convert(actualType).Interface(), actual, - ) - } - - // If BOTH values are numeric, there are chances of false positives due - // to overflow or underflow. So, we need to make sure to always convert - // the smaller type to a larger type before comparing. - if expectedType.Size() >= actualType.Size() { - return actualValue.Convert(expectedType).Interface() == expected - } - - return expectedValue.Convert(actualType).Interface() == actual -} - -// isNumericType returns true if the type is one of: -// int, int8, int16, int32, int64, uint, uint8, uint16, uint32, uint64, -// float32, float64, complex64, complex128 -func isNumericType(t reflect.Type) bool { - return t.Kind() >= reflect.Int && t.Kind() <= reflect.Complex128 -} - -/* CallerInfo is necessary because the assert functions use the testing object -internally, causing it to print the file:line of the assert method, rather than where -the problem actually occurred in calling code.*/ - -// CallerInfo returns an array of strings containing the file and line number -// of each stack frame leading from the current test to the assert call that -// failed. -func CallerInfo() []string { - - var pc uintptr - var ok bool - var file string - var line int - var name string - - callers := []string{} - for i := 0; ; i++ { - pc, file, line, ok = runtime.Caller(i) - if !ok { - // The breaks below failed to terminate the loop, and we ran off the - // end of the call stack. - break - } - - // This is a huge edge case, but it will panic if this is the case, see #180 - if file == "" { - break - } - - f := runtime.FuncForPC(pc) - if f == nil { - break - } - name = f.Name() - - // testing.tRunner is the standard library function that calls - // tests. Subtests are called directly by tRunner, without going through - // the Test/Benchmark/Example function that contains the t.Run calls, so - // with subtests we should break when we hit tRunner, without adding it - // to the list of callers. - if name == "testing.tRunner" { - break - } - - parts := strings.Split(file, "/") - if len(parts) > 1 { - filename := parts[len(parts)-1] - dir := parts[len(parts)-2] - if (dir != "assert" && dir != "mock" && dir != "require") || filename == "mock_test.go" { - callers = append(callers, fmt.Sprintf("%s:%d", file, line)) - } - } - - // Drop the package - segments := strings.Split(name, ".") - name = segments[len(segments)-1] - if isTest(name, "Test") || - isTest(name, "Benchmark") || - isTest(name, "Example") { - break - } - } - - return callers -} - -// Stolen from the `go test` tool. -// isTest tells whether name looks like a test (or benchmark, according to prefix). -// It is a Test (say) if there is a character after Test that is not a lower-case letter. -// We don't want TesticularCancer. -func isTest(name, prefix string) bool { - if !strings.HasPrefix(name, prefix) { - return false - } - if len(name) == len(prefix) { // "Test" is ok - return true - } - r, _ := utf8.DecodeRuneInString(name[len(prefix):]) - return !unicode.IsLower(r) -} - -func messageFromMsgAndArgs(msgAndArgs ...interface{}) string { - if len(msgAndArgs) == 0 || msgAndArgs == nil { - return "" - } - if len(msgAndArgs) == 1 { - msg := msgAndArgs[0] - if msgAsStr, ok := msg.(string); ok { - return msgAsStr - } - return fmt.Sprintf("%+v", msg) - } - if len(msgAndArgs) > 1 { - return fmt.Sprintf(msgAndArgs[0].(string), msgAndArgs[1:]...) - } - return "" -} - -// Aligns the provided message so that all lines after the first line start at the same location as the first line. -// Assumes that the first line starts at the correct location (after carriage return, tab, label, spacer and tab). -// The longestLabelLen parameter specifies the length of the longest label in the output (required because this is the -// basis on which the alignment occurs). -func indentMessageLines(message string, longestLabelLen int) string { - outBuf := new(bytes.Buffer) - - for i, scanner := 0, bufio.NewScanner(strings.NewReader(message)); scanner.Scan(); i++ { - // no need to align first line because it starts at the correct location (after the label) - if i != 0 { - // append alignLen+1 spaces to align with "{{longestLabel}}:" before adding tab - outBuf.WriteString("\n\t" + strings.Repeat(" ", longestLabelLen+1) + "\t") - } - outBuf.WriteString(scanner.Text()) - } - - return outBuf.String() -} - -type failNower interface { - FailNow() -} - -// FailNow fails test -func FailNow(t TestingT, failureMessage string, msgAndArgs ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - Fail(t, failureMessage, msgAndArgs...) - - // We cannot extend TestingT with FailNow() and - // maintain backwards compatibility, so we fallback - // to panicking when FailNow is not available in - // TestingT. - // See issue #263 - - if t, ok := t.(failNower); ok { - t.FailNow() - } else { - panic("test failed and t is missing `FailNow()`") - } - return false -} - -// Fail reports a failure through -func Fail(t TestingT, failureMessage string, msgAndArgs ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - content := []labeledContent{ - {"Error Trace", strings.Join(CallerInfo(), "\n\t\t\t")}, - {"Error", failureMessage}, - } - - // Add test name if the Go version supports it - if n, ok := t.(interface { - Name() string - }); ok { - content = append(content, labeledContent{"Test", n.Name()}) - } - - message := messageFromMsgAndArgs(msgAndArgs...) - if len(message) > 0 { - content = append(content, labeledContent{"Messages", message}) - } - - t.Errorf("\n%s", ""+labeledOutput(content...)) - - return false -} - -type labeledContent struct { - label string - content string -} - -// labeledOutput returns a string consisting of the provided labeledContent. Each labeled output is appended in the following manner: -// -// \t{{label}}:{{align_spaces}}\t{{content}}\n -// -// The initial carriage return is required to undo/erase any padding added by testing.T.Errorf. The "\t{{label}}:" is for the label. -// If a label is shorter than the longest label provided, padding spaces are added to make all the labels match in length. Once this -// alignment is achieved, "\t{{content}}\n" is added for the output. -// -// If the content of the labeledOutput contains line breaks, the subsequent lines are aligned so that they start at the same location as the first line. -func labeledOutput(content ...labeledContent) string { - longestLabel := 0 - for _, v := range content { - if len(v.label) > longestLabel { - longestLabel = len(v.label) - } - } - var output string - for _, v := range content { - output += "\t" + v.label + ":" + strings.Repeat(" ", longestLabel-len(v.label)) + "\t" + indentMessageLines(v.content, longestLabel) + "\n" - } - return output -} - -// Implements asserts that an object is implemented by the specified interface. -// -// assert.Implements(t, (*MyInterface)(nil), new(MyObject)) -func Implements(t TestingT, interfaceObject interface{}, object interface{}, msgAndArgs ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - interfaceType := reflect.TypeOf(interfaceObject).Elem() - - if object == nil { - return Fail(t, fmt.Sprintf("Cannot check if nil implements %v", interfaceType), msgAndArgs...) - } - if !reflect.TypeOf(object).Implements(interfaceType) { - return Fail(t, fmt.Sprintf("%T must implement %v", object, interfaceType), msgAndArgs...) - } - - return true -} - -// NotImplements asserts that an object does not implement the specified interface. -// -// assert.NotImplements(t, (*MyInterface)(nil), new(MyObject)) -func NotImplements(t TestingT, interfaceObject interface{}, object interface{}, msgAndArgs ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - interfaceType := reflect.TypeOf(interfaceObject).Elem() - - if object == nil { - return Fail(t, fmt.Sprintf("Cannot check if nil does not implement %v", interfaceType), msgAndArgs...) - } - if reflect.TypeOf(object).Implements(interfaceType) { - return Fail(t, fmt.Sprintf("%T implements %v", object, interfaceType), msgAndArgs...) - } - - return true -} - -// IsType asserts that the specified objects are of the same type. -func IsType(t TestingT, expectedType interface{}, object interface{}, msgAndArgs ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - - if !ObjectsAreEqual(reflect.TypeOf(object), reflect.TypeOf(expectedType)) { - return Fail(t, fmt.Sprintf("Object expected to be of type %v, but was %v", reflect.TypeOf(expectedType), reflect.TypeOf(object)), msgAndArgs...) - } - - return true -} - -// Equal asserts that two objects are equal. -// -// assert.Equal(t, 123, 123) -// -// Pointer variable equality is determined based on the equality of the -// referenced values (as opposed to the memory addresses). Function equality -// cannot be determined and will always fail. -func Equal(t TestingT, expected, actual interface{}, msgAndArgs ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - if err := validateEqualArgs(expected, actual); err != nil { - return Fail(t, fmt.Sprintf("Invalid operation: %#v == %#v (%s)", - expected, actual, err), msgAndArgs...) - } - - if !ObjectsAreEqual(expected, actual) { - diff := diff(expected, actual) - expected, actual = formatUnequalValues(expected, actual) - return Fail(t, fmt.Sprintf("Not equal: \n"+ - "expected: %s\n"+ - "actual : %s%s", expected, actual, diff), msgAndArgs...) - } - - return true - -} - -// validateEqualArgs checks whether provided arguments can be safely used in the -// Equal/NotEqual functions. -func validateEqualArgs(expected, actual interface{}) error { - if expected == nil && actual == nil { - return nil - } - - if isFunction(expected) || isFunction(actual) { - return errors.New("cannot take func type as argument") - } - return nil -} - -// Same asserts that two pointers reference the same object. -// -// assert.Same(t, ptr1, ptr2) -// -// Both arguments must be pointer variables. Pointer variable sameness is -// determined based on the equality of both type and value. -func Same(t TestingT, expected, actual interface{}, msgAndArgs ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - - if !samePointers(expected, actual) { - return Fail(t, fmt.Sprintf("Not same: \n"+ - "expected: %p %#v\n"+ - "actual : %p %#v", expected, expected, actual, actual), msgAndArgs...) - } - - return true -} - -// NotSame asserts that two pointers do not reference the same object. -// -// assert.NotSame(t, ptr1, ptr2) -// -// Both arguments must be pointer variables. Pointer variable sameness is -// determined based on the equality of both type and value. -func NotSame(t TestingT, expected, actual interface{}, msgAndArgs ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - - if samePointers(expected, actual) { - return Fail(t, fmt.Sprintf( - "Expected and actual point to the same object: %p %#v", - expected, expected), msgAndArgs...) - } - return true -} - -// samePointers compares two generic interface objects and returns whether -// they point to the same object -func samePointers(first, second interface{}) bool { - firstPtr, secondPtr := reflect.ValueOf(first), reflect.ValueOf(second) - if firstPtr.Kind() != reflect.Ptr || secondPtr.Kind() != reflect.Ptr { - return false - } - - firstType, secondType := reflect.TypeOf(first), reflect.TypeOf(second) - if firstType != secondType { - return false - } - - // compare pointer addresses - return first == second -} - -// formatUnequalValues takes two values of arbitrary types and returns string -// representations appropriate to be presented to the user. -// -// If the values are not of like type, the returned strings will be prefixed -// with the type name, and the value will be enclosed in parentheses similar -// to a type conversion in the Go grammar. -func formatUnequalValues(expected, actual interface{}) (e string, a string) { - if reflect.TypeOf(expected) != reflect.TypeOf(actual) { - return fmt.Sprintf("%T(%s)", expected, truncatingFormat(expected)), - fmt.Sprintf("%T(%s)", actual, truncatingFormat(actual)) - } - switch expected.(type) { - case time.Duration: - return fmt.Sprintf("%v", expected), fmt.Sprintf("%v", actual) - } - return truncatingFormat(expected), truncatingFormat(actual) -} - -// truncatingFormat formats the data and truncates it if it's too long. -// -// This helps keep formatted error messages lines from exceeding the -// bufio.MaxScanTokenSize max line length that the go testing framework imposes. -func truncatingFormat(data interface{}) string { - value := fmt.Sprintf("%#v", data) - max := bufio.MaxScanTokenSize - 100 // Give us some space the type info too if needed. - if len(value) > max { - value = value[0:max] + "<... truncated>" - } - return value -} - -// EqualValues asserts that two objects are equal or convertible to the same types -// and equal. -// -// assert.EqualValues(t, uint32(123), int32(123)) -func EqualValues(t TestingT, expected, actual interface{}, msgAndArgs ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - - if !ObjectsAreEqualValues(expected, actual) { - diff := diff(expected, actual) - expected, actual = formatUnequalValues(expected, actual) - return Fail(t, fmt.Sprintf("Not equal: \n"+ - "expected: %s\n"+ - "actual : %s%s", expected, actual, diff), msgAndArgs...) - } - - return true - -} - -// EqualExportedValues asserts that the types of two objects are equal and their public -// fields are also equal. This is useful for comparing structs that have private fields -// that could potentially differ. -// -// type S struct { -// Exported int -// notExported int -// } -// assert.EqualExportedValues(t, S{1, 2}, S{1, 3}) => true -// assert.EqualExportedValues(t, S{1, 2}, S{2, 3}) => false -func EqualExportedValues(t TestingT, expected, actual interface{}, msgAndArgs ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - - aType := reflect.TypeOf(expected) - bType := reflect.TypeOf(actual) - - if aType != bType { - return Fail(t, fmt.Sprintf("Types expected to match exactly\n\t%v != %v", aType, bType), msgAndArgs...) - } - - if aType.Kind() == reflect.Ptr { - aType = aType.Elem() - } - if bType.Kind() == reflect.Ptr { - bType = bType.Elem() - } - - if aType.Kind() != reflect.Struct { - return Fail(t, fmt.Sprintf("Types expected to both be struct or pointer to struct \n\t%v != %v", aType.Kind(), reflect.Struct), msgAndArgs...) - } - - if bType.Kind() != reflect.Struct { - return Fail(t, fmt.Sprintf("Types expected to both be struct or pointer to struct \n\t%v != %v", bType.Kind(), reflect.Struct), msgAndArgs...) - } - - expected = copyExportedFields(expected) - actual = copyExportedFields(actual) - - if !ObjectsAreEqualValues(expected, actual) { - diff := diff(expected, actual) - expected, actual = formatUnequalValues(expected, actual) - return Fail(t, fmt.Sprintf("Not equal (comparing only exported fields): \n"+ - "expected: %s\n"+ - "actual : %s%s", expected, actual, diff), msgAndArgs...) - } - - return true -} - -// Exactly asserts that two objects are equal in value and type. -// -// assert.Exactly(t, int32(123), int64(123)) -func Exactly(t TestingT, expected, actual interface{}, msgAndArgs ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - - aType := reflect.TypeOf(expected) - bType := reflect.TypeOf(actual) - - if aType != bType { - return Fail(t, fmt.Sprintf("Types expected to match exactly\n\t%v != %v", aType, bType), msgAndArgs...) - } - - return Equal(t, expected, actual, msgAndArgs...) - -} - -// NotNil asserts that the specified object is not nil. -// -// assert.NotNil(t, err) -func NotNil(t TestingT, object interface{}, msgAndArgs ...interface{}) bool { - if !isNil(object) { - return true - } - if h, ok := t.(tHelper); ok { - h.Helper() - } - return Fail(t, "Expected value not to be nil.", msgAndArgs...) -} - -// isNil checks if a specified object is nil or not, without Failing. -func isNil(object interface{}) bool { - if object == nil { - return true - } - - value := reflect.ValueOf(object) - switch value.Kind() { - case - reflect.Chan, reflect.Func, - reflect.Interface, reflect.Map, - reflect.Ptr, reflect.Slice, reflect.UnsafePointer: - - return value.IsNil() - } - - return false -} - -// Nil asserts that the specified object is nil. -// -// assert.Nil(t, err) -func Nil(t TestingT, object interface{}, msgAndArgs ...interface{}) bool { - if isNil(object) { - return true - } - if h, ok := t.(tHelper); ok { - h.Helper() - } - return Fail(t, fmt.Sprintf("Expected nil, but got: %#v", object), msgAndArgs...) -} - -// isEmpty gets whether the specified object is considered empty or not. -func isEmpty(object interface{}) bool { - - // get nil case out of the way - if object == nil { - return true - } - - objValue := reflect.ValueOf(object) - - switch objValue.Kind() { - // collection types are empty when they have no element - case reflect.Chan, reflect.Map, reflect.Slice: - return objValue.Len() == 0 - // pointers are empty if nil or if the value they point to is empty - case reflect.Ptr: - if objValue.IsNil() { - return true - } - deref := objValue.Elem().Interface() - return isEmpty(deref) - // for all other types, compare against the zero value - // array types are empty when they match their zero-initialized state - default: - zero := reflect.Zero(objValue.Type()) - return reflect.DeepEqual(object, zero.Interface()) - } -} - -// Empty asserts that the specified object is empty. I.e. nil, "", false, 0 or either -// a slice or a channel with len == 0. -// -// assert.Empty(t, obj) -func Empty(t TestingT, object interface{}, msgAndArgs ...interface{}) bool { - pass := isEmpty(object) - if !pass { - if h, ok := t.(tHelper); ok { - h.Helper() - } - Fail(t, fmt.Sprintf("Should be empty, but was %v", object), msgAndArgs...) - } - - return pass - -} - -// NotEmpty asserts that the specified object is NOT empty. I.e. not nil, "", false, 0 or either -// a slice or a channel with len == 0. -// -// if assert.NotEmpty(t, obj) { -// assert.Equal(t, "two", obj[1]) -// } -func NotEmpty(t TestingT, object interface{}, msgAndArgs ...interface{}) bool { - pass := !isEmpty(object) - if !pass { - if h, ok := t.(tHelper); ok { - h.Helper() - } - Fail(t, fmt.Sprintf("Should NOT be empty, but was %v", object), msgAndArgs...) - } - - return pass - -} - -// getLen tries to get the length of an object. -// It returns (0, false) if impossible. -func getLen(x interface{}) (length int, ok bool) { - v := reflect.ValueOf(x) - defer func() { - ok = recover() == nil - }() - return v.Len(), true -} - -// Len asserts that the specified object has specific length. -// Len also fails if the object has a type that len() not accept. -// -// assert.Len(t, mySlice, 3) -func Len(t TestingT, object interface{}, length int, msgAndArgs ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - l, ok := getLen(object) - if !ok { - return Fail(t, fmt.Sprintf("\"%v\" could not be applied builtin len()", object), msgAndArgs...) - } - - if l != length { - return Fail(t, fmt.Sprintf("\"%v\" should have %d item(s), but has %d", object, length, l), msgAndArgs...) - } - return true -} - -// True asserts that the specified value is true. -// -// assert.True(t, myBool) -func True(t TestingT, value bool, msgAndArgs ...interface{}) bool { - if !value { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return Fail(t, "Should be true", msgAndArgs...) - } - - return true - -} - -// False asserts that the specified value is false. -// -// assert.False(t, myBool) -func False(t TestingT, value bool, msgAndArgs ...interface{}) bool { - if value { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return Fail(t, "Should be false", msgAndArgs...) - } - - return true - -} - -// NotEqual asserts that the specified values are NOT equal. -// -// assert.NotEqual(t, obj1, obj2) -// -// Pointer variable equality is determined based on the equality of the -// referenced values (as opposed to the memory addresses). -func NotEqual(t TestingT, expected, actual interface{}, msgAndArgs ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - if err := validateEqualArgs(expected, actual); err != nil { - return Fail(t, fmt.Sprintf("Invalid operation: %#v != %#v (%s)", - expected, actual, err), msgAndArgs...) - } - - if ObjectsAreEqual(expected, actual) { - return Fail(t, fmt.Sprintf("Should not be: %#v\n", actual), msgAndArgs...) - } - - return true - -} - -// NotEqualValues asserts that two objects are not equal even when converted to the same type -// -// assert.NotEqualValues(t, obj1, obj2) -func NotEqualValues(t TestingT, expected, actual interface{}, msgAndArgs ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - - if ObjectsAreEqualValues(expected, actual) { - return Fail(t, fmt.Sprintf("Should not be: %#v\n", actual), msgAndArgs...) - } - - return true -} - -// containsElement try loop over the list check if the list includes the element. -// return (false, false) if impossible. -// return (true, false) if element was not found. -// return (true, true) if element was found. -func containsElement(list interface{}, element interface{}) (ok, found bool) { - - listValue := reflect.ValueOf(list) - listType := reflect.TypeOf(list) - if listType == nil { - return false, false - } - listKind := listType.Kind() - defer func() { - if e := recover(); e != nil { - ok = false - found = false - } - }() - - if listKind == reflect.String { - elementValue := reflect.ValueOf(element) - return true, strings.Contains(listValue.String(), elementValue.String()) - } - - if listKind == reflect.Map { - mapKeys := listValue.MapKeys() - for i := 0; i < len(mapKeys); i++ { - if ObjectsAreEqual(mapKeys[i].Interface(), element) { - return true, true - } - } - return true, false - } - - for i := 0; i < listValue.Len(); i++ { - if ObjectsAreEqual(listValue.Index(i).Interface(), element) { - return true, true - } - } - return true, false - -} - -// Contains asserts that the specified string, list(array, slice...) or map contains the -// specified substring or element. -// -// assert.Contains(t, "Hello World", "World") -// assert.Contains(t, ["Hello", "World"], "World") -// assert.Contains(t, {"Hello": "World"}, "Hello") -func Contains(t TestingT, s, contains interface{}, msgAndArgs ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - - ok, found := containsElement(s, contains) - if !ok { - return Fail(t, fmt.Sprintf("%#v could not be applied builtin len()", s), msgAndArgs...) - } - if !found { - return Fail(t, fmt.Sprintf("%#v does not contain %#v", s, contains), msgAndArgs...) - } - - return true - -} - -// NotContains asserts that the specified string, list(array, slice...) or map does NOT contain the -// specified substring or element. -// -// assert.NotContains(t, "Hello World", "Earth") -// assert.NotContains(t, ["Hello", "World"], "Earth") -// assert.NotContains(t, {"Hello": "World"}, "Earth") -func NotContains(t TestingT, s, contains interface{}, msgAndArgs ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - - ok, found := containsElement(s, contains) - if !ok { - return Fail(t, fmt.Sprintf("%#v could not be applied builtin len()", s), msgAndArgs...) - } - if found { - return Fail(t, fmt.Sprintf("%#v should not contain %#v", s, contains), msgAndArgs...) - } - - return true - -} - -// Subset asserts that the specified list(array, slice...) or map contains all -// elements given in the specified subset list(array, slice...) or map. -// -// assert.Subset(t, [1, 2, 3], [1, 2]) -// assert.Subset(t, {"x": 1, "y": 2}, {"x": 1}) -func Subset(t TestingT, list, subset interface{}, msgAndArgs ...interface{}) (ok bool) { - if h, ok := t.(tHelper); ok { - h.Helper() - } - if subset == nil { - return true // we consider nil to be equal to the nil set - } - - listKind := reflect.TypeOf(list).Kind() - if listKind != reflect.Array && listKind != reflect.Slice && listKind != reflect.Map { - return Fail(t, fmt.Sprintf("%q has an unsupported type %s", list, listKind), msgAndArgs...) - } - - subsetKind := reflect.TypeOf(subset).Kind() - if subsetKind != reflect.Array && subsetKind != reflect.Slice && listKind != reflect.Map { - return Fail(t, fmt.Sprintf("%q has an unsupported type %s", subset, subsetKind), msgAndArgs...) - } - - if subsetKind == reflect.Map && listKind == reflect.Map { - subsetMap := reflect.ValueOf(subset) - actualMap := reflect.ValueOf(list) - - for _, k := range subsetMap.MapKeys() { - ev := subsetMap.MapIndex(k) - av := actualMap.MapIndex(k) - - if !av.IsValid() { - return Fail(t, fmt.Sprintf("%#v does not contain %#v", list, subset), msgAndArgs...) - } - if !ObjectsAreEqual(ev.Interface(), av.Interface()) { - return Fail(t, fmt.Sprintf("%#v does not contain %#v", list, subset), msgAndArgs...) - } - } - - return true - } - - subsetList := reflect.ValueOf(subset) - for i := 0; i < subsetList.Len(); i++ { - element := subsetList.Index(i).Interface() - ok, found := containsElement(list, element) - if !ok { - return Fail(t, fmt.Sprintf("%#v could not be applied builtin len()", list), msgAndArgs...) - } - if !found { - return Fail(t, fmt.Sprintf("%#v does not contain %#v", list, element), msgAndArgs...) - } - } - - return true -} - -// NotSubset asserts that the specified list(array, slice...) or map does NOT -// contain all elements given in the specified subset list(array, slice...) or -// map. -// -// assert.NotSubset(t, [1, 3, 4], [1, 2]) -// assert.NotSubset(t, {"x": 1, "y": 2}, {"z": 3}) -func NotSubset(t TestingT, list, subset interface{}, msgAndArgs ...interface{}) (ok bool) { - if h, ok := t.(tHelper); ok { - h.Helper() - } - if subset == nil { - return Fail(t, "nil is the empty set which is a subset of every set", msgAndArgs...) - } - - listKind := reflect.TypeOf(list).Kind() - if listKind != reflect.Array && listKind != reflect.Slice && listKind != reflect.Map { - return Fail(t, fmt.Sprintf("%q has an unsupported type %s", list, listKind), msgAndArgs...) - } - - subsetKind := reflect.TypeOf(subset).Kind() - if subsetKind != reflect.Array && subsetKind != reflect.Slice && listKind != reflect.Map { - return Fail(t, fmt.Sprintf("%q has an unsupported type %s", subset, subsetKind), msgAndArgs...) - } - - if subsetKind == reflect.Map && listKind == reflect.Map { - subsetMap := reflect.ValueOf(subset) - actualMap := reflect.ValueOf(list) - - for _, k := range subsetMap.MapKeys() { - ev := subsetMap.MapIndex(k) - av := actualMap.MapIndex(k) - - if !av.IsValid() { - return true - } - if !ObjectsAreEqual(ev.Interface(), av.Interface()) { - return true - } - } - - return Fail(t, fmt.Sprintf("%q is a subset of %q", subset, list), msgAndArgs...) - } - - subsetList := reflect.ValueOf(subset) - for i := 0; i < subsetList.Len(); i++ { - element := subsetList.Index(i).Interface() - ok, found := containsElement(list, element) - if !ok { - return Fail(t, fmt.Sprintf("\"%s\" could not be applied builtin len()", list), msgAndArgs...) - } - if !found { - return true - } - } - - return Fail(t, fmt.Sprintf("%q is a subset of %q", subset, list), msgAndArgs...) -} - -// ElementsMatch asserts that the specified listA(array, slice...) is equal to specified -// listB(array, slice...) ignoring the order of the elements. If there are duplicate elements, -// the number of appearances of each of them in both lists should match. -// -// assert.ElementsMatch(t, [1, 3, 2, 3], [1, 3, 3, 2]) -func ElementsMatch(t TestingT, listA, listB interface{}, msgAndArgs ...interface{}) (ok bool) { - if h, ok := t.(tHelper); ok { - h.Helper() - } - if isEmpty(listA) && isEmpty(listB) { - return true - } - - if !isList(t, listA, msgAndArgs...) || !isList(t, listB, msgAndArgs...) { - return false - } - - extraA, extraB := diffLists(listA, listB) - - if len(extraA) == 0 && len(extraB) == 0 { - return true - } - - return Fail(t, formatListDiff(listA, listB, extraA, extraB), msgAndArgs...) -} - -// isList checks that the provided value is array or slice. -func isList(t TestingT, list interface{}, msgAndArgs ...interface{}) (ok bool) { - kind := reflect.TypeOf(list).Kind() - if kind != reflect.Array && kind != reflect.Slice { - return Fail(t, fmt.Sprintf("%q has an unsupported type %s, expecting array or slice", list, kind), - msgAndArgs...) - } - return true -} - -// diffLists diffs two arrays/slices and returns slices of elements that are only in A and only in B. -// If some element is present multiple times, each instance is counted separately (e.g. if something is 2x in A and -// 5x in B, it will be 0x in extraA and 3x in extraB). The order of items in both lists is ignored. -func diffLists(listA, listB interface{}) (extraA, extraB []interface{}) { - aValue := reflect.ValueOf(listA) - bValue := reflect.ValueOf(listB) - - aLen := aValue.Len() - bLen := bValue.Len() - - // Mark indexes in bValue that we already used - visited := make([]bool, bLen) - for i := 0; i < aLen; i++ { - element := aValue.Index(i).Interface() - found := false - for j := 0; j < bLen; j++ { - if visited[j] { - continue - } - if ObjectsAreEqual(bValue.Index(j).Interface(), element) { - visited[j] = true - found = true - break - } - } - if !found { - extraA = append(extraA, element) - } - } - - for j := 0; j < bLen; j++ { - if visited[j] { - continue - } - extraB = append(extraB, bValue.Index(j).Interface()) - } - - return -} - -func formatListDiff(listA, listB interface{}, extraA, extraB []interface{}) string { - var msg bytes.Buffer - - msg.WriteString("elements differ") - if len(extraA) > 0 { - msg.WriteString("\n\nextra elements in list A:\n") - msg.WriteString(spewConfig.Sdump(extraA)) - } - if len(extraB) > 0 { - msg.WriteString("\n\nextra elements in list B:\n") - msg.WriteString(spewConfig.Sdump(extraB)) - } - msg.WriteString("\n\nlistA:\n") - msg.WriteString(spewConfig.Sdump(listA)) - msg.WriteString("\n\nlistB:\n") - msg.WriteString(spewConfig.Sdump(listB)) - - return msg.String() -} - -// Condition uses a Comparison to assert a complex condition. -func Condition(t TestingT, comp Comparison, msgAndArgs ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - result := comp() - if !result { - Fail(t, "Condition failed!", msgAndArgs...) - } - return result -} - -// PanicTestFunc defines a func that should be passed to the assert.Panics and assert.NotPanics -// methods, and represents a simple func that takes no arguments, and returns nothing. -type PanicTestFunc func() - -// didPanic returns true if the function passed to it panics. Otherwise, it returns false. -func didPanic(f PanicTestFunc) (didPanic bool, message interface{}, stack string) { - didPanic = true - - defer func() { - message = recover() - if didPanic { - stack = string(debug.Stack()) - } - }() - - // call the target function - f() - didPanic = false - - return -} - -// Panics asserts that the code inside the specified PanicTestFunc panics. -// -// assert.Panics(t, func(){ GoCrazy() }) -func Panics(t TestingT, f PanicTestFunc, msgAndArgs ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - - if funcDidPanic, panicValue, _ := didPanic(f); !funcDidPanic { - return Fail(t, fmt.Sprintf("func %#v should panic\n\tPanic value:\t%#v", f, panicValue), msgAndArgs...) - } - - return true -} - -// PanicsWithValue asserts that the code inside the specified PanicTestFunc panics, and that -// the recovered panic value equals the expected panic value. -// -// assert.PanicsWithValue(t, "crazy error", func(){ GoCrazy() }) -func PanicsWithValue(t TestingT, expected interface{}, f PanicTestFunc, msgAndArgs ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - - funcDidPanic, panicValue, panickedStack := didPanic(f) - if !funcDidPanic { - return Fail(t, fmt.Sprintf("func %#v should panic\n\tPanic value:\t%#v", f, panicValue), msgAndArgs...) - } - if panicValue != expected { - return Fail(t, fmt.Sprintf("func %#v should panic with value:\t%#v\n\tPanic value:\t%#v\n\tPanic stack:\t%s", f, expected, panicValue, panickedStack), msgAndArgs...) - } - - return true -} - -// PanicsWithError asserts that the code inside the specified PanicTestFunc -// panics, and that the recovered panic value is an error that satisfies the -// EqualError comparison. -// -// assert.PanicsWithError(t, "crazy error", func(){ GoCrazy() }) -func PanicsWithError(t TestingT, errString string, f PanicTestFunc, msgAndArgs ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - - funcDidPanic, panicValue, panickedStack := didPanic(f) - if !funcDidPanic { - return Fail(t, fmt.Sprintf("func %#v should panic\n\tPanic value:\t%#v", f, panicValue), msgAndArgs...) - } - panicErr, ok := panicValue.(error) - if !ok || panicErr.Error() != errString { - return Fail(t, fmt.Sprintf("func %#v should panic with error message:\t%#v\n\tPanic value:\t%#v\n\tPanic stack:\t%s", f, errString, panicValue, panickedStack), msgAndArgs...) - } - - return true -} - -// NotPanics asserts that the code inside the specified PanicTestFunc does NOT panic. -// -// assert.NotPanics(t, func(){ RemainCalm() }) -func NotPanics(t TestingT, f PanicTestFunc, msgAndArgs ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - - if funcDidPanic, panicValue, panickedStack := didPanic(f); funcDidPanic { - return Fail(t, fmt.Sprintf("func %#v should not panic\n\tPanic value:\t%v\n\tPanic stack:\t%s", f, panicValue, panickedStack), msgAndArgs...) - } - - return true -} - -// WithinDuration asserts that the two times are within duration delta of each other. -// -// assert.WithinDuration(t, time.Now(), time.Now(), 10*time.Second) -func WithinDuration(t TestingT, expected, actual time.Time, delta time.Duration, msgAndArgs ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - - dt := expected.Sub(actual) - if dt < -delta || dt > delta { - return Fail(t, fmt.Sprintf("Max difference between %v and %v allowed is %v, but difference was %v", expected, actual, delta, dt), msgAndArgs...) - } - - return true -} - -// WithinRange asserts that a time is within a time range (inclusive). -// -// assert.WithinRange(t, time.Now(), time.Now().Add(-time.Second), time.Now().Add(time.Second)) -func WithinRange(t TestingT, actual, start, end time.Time, msgAndArgs ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - - if end.Before(start) { - return Fail(t, "Start should be before end", msgAndArgs...) - } - - if actual.Before(start) { - return Fail(t, fmt.Sprintf("Time %v expected to be in time range %v to %v, but is before the range", actual, start, end), msgAndArgs...) - } else if actual.After(end) { - return Fail(t, fmt.Sprintf("Time %v expected to be in time range %v to %v, but is after the range", actual, start, end), msgAndArgs...) - } - - return true -} - -func toFloat(x interface{}) (float64, bool) { - var xf float64 - xok := true - - switch xn := x.(type) { - case uint: - xf = float64(xn) - case uint8: - xf = float64(xn) - case uint16: - xf = float64(xn) - case uint32: - xf = float64(xn) - case uint64: - xf = float64(xn) - case int: - xf = float64(xn) - case int8: - xf = float64(xn) - case int16: - xf = float64(xn) - case int32: - xf = float64(xn) - case int64: - xf = float64(xn) - case float32: - xf = float64(xn) - case float64: - xf = xn - case time.Duration: - xf = float64(xn) - default: - xok = false - } - - return xf, xok -} - -// InDelta asserts that the two numerals are within delta of each other. -// -// assert.InDelta(t, math.Pi, 22/7.0, 0.01) -func InDelta(t TestingT, expected, actual interface{}, delta float64, msgAndArgs ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - - af, aok := toFloat(expected) - bf, bok := toFloat(actual) - - if !aok || !bok { - return Fail(t, "Parameters must be numerical", msgAndArgs...) - } - - if math.IsNaN(af) && math.IsNaN(bf) { - return true - } - - if math.IsNaN(af) { - return Fail(t, "Expected must not be NaN", msgAndArgs...) - } - - if math.IsNaN(bf) { - return Fail(t, fmt.Sprintf("Expected %v with delta %v, but was NaN", expected, delta), msgAndArgs...) - } - - dt := af - bf - if dt < -delta || dt > delta { - return Fail(t, fmt.Sprintf("Max difference between %v and %v allowed is %v, but difference was %v", expected, actual, delta, dt), msgAndArgs...) - } - - return true -} - -// InDeltaSlice is the same as InDelta, except it compares two slices. -func InDeltaSlice(t TestingT, expected, actual interface{}, delta float64, msgAndArgs ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - if expected == nil || actual == nil || - reflect.TypeOf(actual).Kind() != reflect.Slice || - reflect.TypeOf(expected).Kind() != reflect.Slice { - return Fail(t, "Parameters must be slice", msgAndArgs...) - } - - actualSlice := reflect.ValueOf(actual) - expectedSlice := reflect.ValueOf(expected) - - for i := 0; i < actualSlice.Len(); i++ { - result := InDelta(t, actualSlice.Index(i).Interface(), expectedSlice.Index(i).Interface(), delta, msgAndArgs...) - if !result { - return result - } - } - - return true -} - -// InDeltaMapValues is the same as InDelta, but it compares all values between two maps. Both maps must have exactly the same keys. -func InDeltaMapValues(t TestingT, expected, actual interface{}, delta float64, msgAndArgs ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - if expected == nil || actual == nil || - reflect.TypeOf(actual).Kind() != reflect.Map || - reflect.TypeOf(expected).Kind() != reflect.Map { - return Fail(t, "Arguments must be maps", msgAndArgs...) - } - - expectedMap := reflect.ValueOf(expected) - actualMap := reflect.ValueOf(actual) - - if expectedMap.Len() != actualMap.Len() { - return Fail(t, "Arguments must have the same number of keys", msgAndArgs...) - } - - for _, k := range expectedMap.MapKeys() { - ev := expectedMap.MapIndex(k) - av := actualMap.MapIndex(k) - - if !ev.IsValid() { - return Fail(t, fmt.Sprintf("missing key %q in expected map", k), msgAndArgs...) - } - - if !av.IsValid() { - return Fail(t, fmt.Sprintf("missing key %q in actual map", k), msgAndArgs...) - } - - if !InDelta( - t, - ev.Interface(), - av.Interface(), - delta, - msgAndArgs..., - ) { - return false - } - } - - return true -} - -func calcRelativeError(expected, actual interface{}) (float64, error) { - af, aok := toFloat(expected) - bf, bok := toFloat(actual) - if !aok || !bok { - return 0, fmt.Errorf("Parameters must be numerical") - } - if math.IsNaN(af) && math.IsNaN(bf) { - return 0, nil - } - if math.IsNaN(af) { - return 0, errors.New("expected value must not be NaN") - } - if af == 0 { - return 0, fmt.Errorf("expected value must have a value other than zero to calculate the relative error") - } - if math.IsNaN(bf) { - return 0, errors.New("actual value must not be NaN") - } - - return math.Abs(af-bf) / math.Abs(af), nil -} - -// InEpsilon asserts that expected and actual have a relative error less than epsilon -func InEpsilon(t TestingT, expected, actual interface{}, epsilon float64, msgAndArgs ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - if math.IsNaN(epsilon) { - return Fail(t, "epsilon must not be NaN", msgAndArgs...) - } - actualEpsilon, err := calcRelativeError(expected, actual) - if err != nil { - return Fail(t, err.Error(), msgAndArgs...) - } - if actualEpsilon > epsilon { - return Fail(t, fmt.Sprintf("Relative error is too high: %#v (expected)\n"+ - " < %#v (actual)", epsilon, actualEpsilon), msgAndArgs...) - } - - return true -} - -// InEpsilonSlice is the same as InEpsilon, except it compares each value from two slices. -func InEpsilonSlice(t TestingT, expected, actual interface{}, epsilon float64, msgAndArgs ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - - if expected == nil || actual == nil { - return Fail(t, "Parameters must be slice", msgAndArgs...) - } - - expectedSlice := reflect.ValueOf(expected) - actualSlice := reflect.ValueOf(actual) - - if expectedSlice.Type().Kind() != reflect.Slice { - return Fail(t, "Expected value must be slice", msgAndArgs...) - } - - expectedLen := expectedSlice.Len() - if !IsType(t, expected, actual) || !Len(t, actual, expectedLen) { - return false - } - - for i := 0; i < expectedLen; i++ { - if !InEpsilon(t, expectedSlice.Index(i).Interface(), actualSlice.Index(i).Interface(), epsilon, "at index %d", i) { - return false - } - } - - return true -} - -/* - Errors -*/ - -// NoError asserts that a function returned no error (i.e. `nil`). -// -// actualObj, err := SomeFunction() -// if assert.NoError(t, err) { -// assert.Equal(t, expectedObj, actualObj) -// } -func NoError(t TestingT, err error, msgAndArgs ...interface{}) bool { - if err != nil { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return Fail(t, fmt.Sprintf("Received unexpected error:\n%+v", err), msgAndArgs...) - } - - return true -} - -// Error asserts that a function returned an error (i.e. not `nil`). -// -// actualObj, err := SomeFunction() -// if assert.Error(t, err) { -// assert.Equal(t, expectedError, err) -// } -func Error(t TestingT, err error, msgAndArgs ...interface{}) bool { - if err == nil { - if h, ok := t.(tHelper); ok { - h.Helper() - } - return Fail(t, "An error is expected but got nil.", msgAndArgs...) - } - - return true -} - -// EqualError asserts that a function returned an error (i.e. not `nil`) -// and that it is equal to the provided error. -// -// actualObj, err := SomeFunction() -// assert.EqualError(t, err, expectedErrorString) -func EqualError(t TestingT, theError error, errString string, msgAndArgs ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - if !Error(t, theError, msgAndArgs...) { - return false - } - expected := errString - actual := theError.Error() - // don't need to use deep equals here, we know they are both strings - if expected != actual { - return Fail(t, fmt.Sprintf("Error message not equal:\n"+ - "expected: %q\n"+ - "actual : %q", expected, actual), msgAndArgs...) - } - return true -} - -// ErrorContains asserts that a function returned an error (i.e. not `nil`) -// and that the error contains the specified substring. -// -// actualObj, err := SomeFunction() -// assert.ErrorContains(t, err, expectedErrorSubString) -func ErrorContains(t TestingT, theError error, contains string, msgAndArgs ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - if !Error(t, theError, msgAndArgs...) { - return false - } - - actual := theError.Error() - if !strings.Contains(actual, contains) { - return Fail(t, fmt.Sprintf("Error %#v does not contain %#v", actual, contains), msgAndArgs...) - } - - return true -} - -// matchRegexp return true if a specified regexp matches a string. -func matchRegexp(rx interface{}, str interface{}) bool { - - var r *regexp.Regexp - if rr, ok := rx.(*regexp.Regexp); ok { - r = rr - } else { - r = regexp.MustCompile(fmt.Sprint(rx)) - } - - return (r.FindStringIndex(fmt.Sprint(str)) != nil) - -} - -// Regexp asserts that a specified regexp matches a string. -// -// assert.Regexp(t, regexp.MustCompile("start"), "it's starting") -// assert.Regexp(t, "start...$", "it's not starting") -func Regexp(t TestingT, rx interface{}, str interface{}, msgAndArgs ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - - match := matchRegexp(rx, str) - - if !match { - Fail(t, fmt.Sprintf("Expect \"%v\" to match \"%v\"", str, rx), msgAndArgs...) - } - - return match -} - -// NotRegexp asserts that a specified regexp does not match a string. -// -// assert.NotRegexp(t, regexp.MustCompile("starts"), "it's starting") -// assert.NotRegexp(t, "^start", "it's not starting") -func NotRegexp(t TestingT, rx interface{}, str interface{}, msgAndArgs ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - match := matchRegexp(rx, str) - - if match { - Fail(t, fmt.Sprintf("Expect \"%v\" to NOT match \"%v\"", str, rx), msgAndArgs...) - } - - return !match - -} - -// Zero asserts that i is the zero value for its type. -func Zero(t TestingT, i interface{}, msgAndArgs ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - if i != nil && !reflect.DeepEqual(i, reflect.Zero(reflect.TypeOf(i)).Interface()) { - return Fail(t, fmt.Sprintf("Should be zero, but was %v", i), msgAndArgs...) - } - return true -} - -// NotZero asserts that i is not the zero value for its type. -func NotZero(t TestingT, i interface{}, msgAndArgs ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - if i == nil || reflect.DeepEqual(i, reflect.Zero(reflect.TypeOf(i)).Interface()) { - return Fail(t, fmt.Sprintf("Should not be zero, but was %v", i), msgAndArgs...) - } - return true -} - -// FileExists checks whether a file exists in the given path. It also fails if -// the path points to a directory or there is an error when trying to check the file. -func FileExists(t TestingT, path string, msgAndArgs ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - info, err := os.Lstat(path) - if err != nil { - if os.IsNotExist(err) { - return Fail(t, fmt.Sprintf("unable to find file %q", path), msgAndArgs...) - } - return Fail(t, fmt.Sprintf("error when running os.Lstat(%q): %s", path, err), msgAndArgs...) - } - if info.IsDir() { - return Fail(t, fmt.Sprintf("%q is a directory", path), msgAndArgs...) - } - return true -} - -// NoFileExists checks whether a file does not exist in a given path. It fails -// if the path points to an existing _file_ only. -func NoFileExists(t TestingT, path string, msgAndArgs ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - info, err := os.Lstat(path) - if err != nil { - return true - } - if info.IsDir() { - return true - } - return Fail(t, fmt.Sprintf("file %q exists", path), msgAndArgs...) -} - -// DirExists checks whether a directory exists in the given path. It also fails -// if the path is a file rather a directory or there is an error checking whether it exists. -func DirExists(t TestingT, path string, msgAndArgs ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - info, err := os.Lstat(path) - if err != nil { - if os.IsNotExist(err) { - return Fail(t, fmt.Sprintf("unable to find file %q", path), msgAndArgs...) - } - return Fail(t, fmt.Sprintf("error when running os.Lstat(%q): %s", path, err), msgAndArgs...) - } - if !info.IsDir() { - return Fail(t, fmt.Sprintf("%q is a file", path), msgAndArgs...) - } - return true -} - -// NoDirExists checks whether a directory does not exist in the given path. -// It fails if the path points to an existing _directory_ only. -func NoDirExists(t TestingT, path string, msgAndArgs ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - info, err := os.Lstat(path) - if err != nil { - if os.IsNotExist(err) { - return true - } - return true - } - if !info.IsDir() { - return true - } - return Fail(t, fmt.Sprintf("directory %q exists", path), msgAndArgs...) -} - -// JSONEq asserts that two JSON strings are equivalent. -// -// assert.JSONEq(t, `{"hello": "world", "foo": "bar"}`, `{"foo": "bar", "hello": "world"}`) -func JSONEq(t TestingT, expected string, actual string, msgAndArgs ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - var expectedJSONAsInterface, actualJSONAsInterface interface{} - - if err := json.Unmarshal([]byte(expected), &expectedJSONAsInterface); err != nil { - return Fail(t, fmt.Sprintf("Expected value ('%s') is not valid json.\nJSON parsing error: '%s'", expected, err.Error()), msgAndArgs...) - } - - if err := json.Unmarshal([]byte(actual), &actualJSONAsInterface); err != nil { - return Fail(t, fmt.Sprintf("Input ('%s') needs to be valid json.\nJSON parsing error: '%s'", actual, err.Error()), msgAndArgs...) - } - - return Equal(t, expectedJSONAsInterface, actualJSONAsInterface, msgAndArgs...) -} - -// YAMLEq asserts that two YAML strings are equivalent. -func YAMLEq(t TestingT, expected string, actual string, msgAndArgs ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - var expectedYAMLAsInterface, actualYAMLAsInterface interface{} - - if err := yaml.Unmarshal([]byte(expected), &expectedYAMLAsInterface); err != nil { - return Fail(t, fmt.Sprintf("Expected value ('%s') is not valid yaml.\nYAML parsing error: '%s'", expected, err.Error()), msgAndArgs...) - } - - if err := yaml.Unmarshal([]byte(actual), &actualYAMLAsInterface); err != nil { - return Fail(t, fmt.Sprintf("Input ('%s') needs to be valid yaml.\nYAML error: '%s'", actual, err.Error()), msgAndArgs...) - } - - return Equal(t, expectedYAMLAsInterface, actualYAMLAsInterface, msgAndArgs...) -} - -func typeAndKind(v interface{}) (reflect.Type, reflect.Kind) { - t := reflect.TypeOf(v) - k := t.Kind() - - if k == reflect.Ptr { - t = t.Elem() - k = t.Kind() - } - return t, k -} - -// diff returns a diff of both values as long as both are of the same type and -// are a struct, map, slice, array or string. Otherwise it returns an empty string. -func diff(expected interface{}, actual interface{}) string { - if expected == nil || actual == nil { - return "" - } - - et, ek := typeAndKind(expected) - at, _ := typeAndKind(actual) - - if et != at { - return "" - } - - if ek != reflect.Struct && ek != reflect.Map && ek != reflect.Slice && ek != reflect.Array && ek != reflect.String { - return "" - } - - var e, a string - - switch et { - case reflect.TypeOf(""): - e = reflect.ValueOf(expected).String() - a = reflect.ValueOf(actual).String() - case reflect.TypeOf(time.Time{}): - e = spewConfigStringerEnabled.Sdump(expected) - a = spewConfigStringerEnabled.Sdump(actual) - default: - e = spewConfig.Sdump(expected) - a = spewConfig.Sdump(actual) - } - - diff, _ := difflib.GetUnifiedDiffString(difflib.UnifiedDiff{ - A: difflib.SplitLines(e), - B: difflib.SplitLines(a), - FromFile: "Expected", - FromDate: "", - ToFile: "Actual", - ToDate: "", - Context: 1, - }) - - return "\n\nDiff:\n" + diff -} - -func isFunction(arg interface{}) bool { - if arg == nil { - return false - } - return reflect.TypeOf(arg).Kind() == reflect.Func -} - -var spewConfig = spew.ConfigState{ - Indent: " ", - DisablePointerAddresses: true, - DisableCapacities: true, - SortKeys: true, - DisableMethods: true, - MaxDepth: 10, -} - -var spewConfigStringerEnabled = spew.ConfigState{ - Indent: " ", - DisablePointerAddresses: true, - DisableCapacities: true, - SortKeys: true, - MaxDepth: 10, -} - -type tHelper interface { - Helper() -} - -// Eventually asserts that given condition will be met in waitFor time, -// periodically checking target function each tick. -// -// assert.Eventually(t, func() bool { return true; }, time.Second, 10*time.Millisecond) -func Eventually(t TestingT, condition func() bool, waitFor time.Duration, tick time.Duration, msgAndArgs ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - - ch := make(chan bool, 1) - - timer := time.NewTimer(waitFor) - defer timer.Stop() - - ticker := time.NewTicker(tick) - defer ticker.Stop() - - for tick := ticker.C; ; { - select { - case <-timer.C: - return Fail(t, "Condition never satisfied", msgAndArgs...) - case <-tick: - tick = nil - go func() { ch <- condition() }() - case v := <-ch: - if v { - return true - } - tick = ticker.C - } - } -} - -// CollectT implements the TestingT interface and collects all errors. -type CollectT struct { - errors []error -} - -// Errorf collects the error. -func (c *CollectT) Errorf(format string, args ...interface{}) { - c.errors = append(c.errors, fmt.Errorf(format, args...)) -} - -// FailNow panics. -func (*CollectT) FailNow() { - panic("Assertion failed") -} - -// Deprecated: That was a method for internal usage that should not have been published. Now just panics. -func (*CollectT) Reset() { - panic("Reset() is deprecated") -} - -// Deprecated: That was a method for internal usage that should not have been published. Now just panics. -func (*CollectT) Copy(TestingT) { - panic("Copy() is deprecated") -} - -// EventuallyWithT asserts that given condition will be met in waitFor time, -// periodically checking target function each tick. In contrast to Eventually, -// it supplies a CollectT to the condition function, so that the condition -// function can use the CollectT to call other assertions. -// The condition is considered "met" if no errors are raised in a tick. -// The supplied CollectT collects all errors from one tick (if there are any). -// If the condition is not met before waitFor, the collected errors of -// the last tick are copied to t. -// -// externalValue := false -// go func() { -// time.Sleep(8*time.Second) -// externalValue = true -// }() -// assert.EventuallyWithT(t, func(c *assert.CollectT) { -// // add assertions as needed; any assertion failure will fail the current tick -// assert.True(c, externalValue, "expected 'externalValue' to be true") -// }, 1*time.Second, 10*time.Second, "external state has not changed to 'true'; still false") -func EventuallyWithT(t TestingT, condition func(collect *CollectT), waitFor time.Duration, tick time.Duration, msgAndArgs ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - - var lastFinishedTickErrs []error - ch := make(chan []error, 1) - - timer := time.NewTimer(waitFor) - defer timer.Stop() - - ticker := time.NewTicker(tick) - defer ticker.Stop() - - for tick := ticker.C; ; { - select { - case <-timer.C: - for _, err := range lastFinishedTickErrs { - t.Errorf("%v", err) - } - return Fail(t, "Condition never satisfied", msgAndArgs...) - case <-tick: - tick = nil - go func() { - collect := new(CollectT) - defer func() { - ch <- collect.errors - }() - condition(collect) - }() - case errs := <-ch: - if len(errs) == 0 { - return true - } - // Keep the errors from the last ended condition, so that they can be copied to t if timeout is reached. - lastFinishedTickErrs = errs - tick = ticker.C - } - } -} - -// Never asserts that the given condition doesn't satisfy in waitFor time, -// periodically checking the target function each tick. -// -// assert.Never(t, func() bool { return false; }, time.Second, 10*time.Millisecond) -func Never(t TestingT, condition func() bool, waitFor time.Duration, tick time.Duration, msgAndArgs ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - - ch := make(chan bool, 1) - - timer := time.NewTimer(waitFor) - defer timer.Stop() - - ticker := time.NewTicker(tick) - defer ticker.Stop() - - for tick := ticker.C; ; { - select { - case <-timer.C: - return true - case <-tick: - tick = nil - go func() { ch <- condition() }() - case v := <-ch: - if v { - return Fail(t, "Condition satisfied", msgAndArgs...) - } - tick = ticker.C - } - } -} - -// ErrorIs asserts that at least one of the errors in err's chain matches target. -// This is a wrapper for errors.Is. -func ErrorIs(t TestingT, err, target error, msgAndArgs ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - if errors.Is(err, target) { - return true - } - - var expectedText string - if target != nil { - expectedText = target.Error() - } - - chain := buildErrorChainString(err) - - return Fail(t, fmt.Sprintf("Target error should be in err chain:\n"+ - "expected: %q\n"+ - "in chain: %s", expectedText, chain, - ), msgAndArgs...) -} - -// NotErrorIs asserts that at none of the errors in err's chain matches target. -// This is a wrapper for errors.Is. -func NotErrorIs(t TestingT, err, target error, msgAndArgs ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - if !errors.Is(err, target) { - return true - } - - var expectedText string - if target != nil { - expectedText = target.Error() - } - - chain := buildErrorChainString(err) - - return Fail(t, fmt.Sprintf("Target error should not be in err chain:\n"+ - "found: %q\n"+ - "in chain: %s", expectedText, chain, - ), msgAndArgs...) -} - -// ErrorAs asserts that at least one of the errors in err's chain matches target, and if so, sets target to that error value. -// This is a wrapper for errors.As. -func ErrorAs(t TestingT, err error, target interface{}, msgAndArgs ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - if errors.As(err, target) { - return true - } - - chain := buildErrorChainString(err) - - return Fail(t, fmt.Sprintf("Should be in error chain:\n"+ - "expected: %q\n"+ - "in chain: %s", target, chain, - ), msgAndArgs...) -} - -func buildErrorChainString(err error) string { - if err == nil { - return "" - } - - e := errors.Unwrap(err) - chain := fmt.Sprintf("%q", err.Error()) - for e != nil { - chain += fmt.Sprintf("\n\t%q", e.Error()) - e = errors.Unwrap(e) - } - return chain -} diff --git a/vendor/github.com/stretchr/testify/assert/doc.go b/vendor/github.com/stretchr/testify/assert/doc.go deleted file mode 100644 index 4953981d3..000000000 --- a/vendor/github.com/stretchr/testify/assert/doc.go +++ /dev/null @@ -1,46 +0,0 @@ -// Package assert provides a set of comprehensive testing tools for use with the normal Go testing system. -// -// # Example Usage -// -// The following is a complete example using assert in a standard test function: -// -// import ( -// "testing" -// "github.com/stretchr/testify/assert" -// ) -// -// func TestSomething(t *testing.T) { -// -// var a string = "Hello" -// var b string = "Hello" -// -// assert.Equal(t, a, b, "The two words should be the same.") -// -// } -// -// if you assert many times, use the format below: -// -// import ( -// "testing" -// "github.com/stretchr/testify/assert" -// ) -// -// func TestSomething(t *testing.T) { -// assert := assert.New(t) -// -// var a string = "Hello" -// var b string = "Hello" -// -// assert.Equal(a, b, "The two words should be the same.") -// } -// -// # Assertions -// -// Assertions allow you to easily write test code, and are global funcs in the `assert` package. -// All assertion functions take, as the first argument, the `*testing.T` object provided by the -// testing framework. This allows the assertion funcs to write the failings and other details to -// the correct place. -// -// Every assertion function also takes an optional string message as the final argument, -// allowing custom error messages to be appended to the message the assertion method outputs. -package assert diff --git a/vendor/github.com/stretchr/testify/assert/errors.go b/vendor/github.com/stretchr/testify/assert/errors.go deleted file mode 100644 index ac9dc9d1d..000000000 --- a/vendor/github.com/stretchr/testify/assert/errors.go +++ /dev/null @@ -1,10 +0,0 @@ -package assert - -import ( - "errors" -) - -// AnError is an error instance useful for testing. If the code does not care -// about error specifics, and only needs to return the error for example, this -// error should be used to make the test code more readable. -var AnError = errors.New("assert.AnError general error for testing") diff --git a/vendor/github.com/stretchr/testify/assert/forward_assertions.go b/vendor/github.com/stretchr/testify/assert/forward_assertions.go deleted file mode 100644 index df189d234..000000000 --- a/vendor/github.com/stretchr/testify/assert/forward_assertions.go +++ /dev/null @@ -1,16 +0,0 @@ -package assert - -// Assertions provides assertion methods around the -// TestingT interface. -type Assertions struct { - t TestingT -} - -// New makes a new Assertions object for the specified TestingT. -func New(t TestingT) *Assertions { - return &Assertions{ - t: t, - } -} - -//go:generate sh -c "cd ../_codegen && go build && cd - && ../_codegen/_codegen -output-package=assert -template=assertion_forward.go.tmpl -include-format-funcs" diff --git a/vendor/github.com/stretchr/testify/assert/http_assertions.go b/vendor/github.com/stretchr/testify/assert/http_assertions.go deleted file mode 100644 index 861ed4b7c..000000000 --- a/vendor/github.com/stretchr/testify/assert/http_assertions.go +++ /dev/null @@ -1,165 +0,0 @@ -package assert - -import ( - "fmt" - "net/http" - "net/http/httptest" - "net/url" - "strings" -) - -// httpCode is a helper that returns HTTP code of the response. It returns -1 and -// an error if building a new request fails. -func httpCode(handler http.HandlerFunc, method, url string, values url.Values) (int, error) { - w := httptest.NewRecorder() - req, err := http.NewRequest(method, url, http.NoBody) - if err != nil { - return -1, err - } - req.URL.RawQuery = values.Encode() - handler(w, req) - return w.Code, nil -} - -// HTTPSuccess asserts that a specified handler returns a success status code. -// -// assert.HTTPSuccess(t, myHandler, "POST", "http://www.google.com", nil) -// -// Returns whether the assertion was successful (true) or not (false). -func HTTPSuccess(t TestingT, handler http.HandlerFunc, method, url string, values url.Values, msgAndArgs ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - code, err := httpCode(handler, method, url, values) - if err != nil { - Fail(t, fmt.Sprintf("Failed to build test request, got error: %s", err), msgAndArgs...) - } - - isSuccessCode := code >= http.StatusOK && code <= http.StatusPartialContent - if !isSuccessCode { - Fail(t, fmt.Sprintf("Expected HTTP success status code for %q but received %d", url+"?"+values.Encode(), code), msgAndArgs...) - } - - return isSuccessCode -} - -// HTTPRedirect asserts that a specified handler returns a redirect status code. -// -// assert.HTTPRedirect(t, myHandler, "GET", "/a/b/c", url.Values{"a": []string{"b", "c"}} -// -// Returns whether the assertion was successful (true) or not (false). -func HTTPRedirect(t TestingT, handler http.HandlerFunc, method, url string, values url.Values, msgAndArgs ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - code, err := httpCode(handler, method, url, values) - if err != nil { - Fail(t, fmt.Sprintf("Failed to build test request, got error: %s", err), msgAndArgs...) - } - - isRedirectCode := code >= http.StatusMultipleChoices && code <= http.StatusTemporaryRedirect - if !isRedirectCode { - Fail(t, fmt.Sprintf("Expected HTTP redirect status code for %q but received %d", url+"?"+values.Encode(), code), msgAndArgs...) - } - - return isRedirectCode -} - -// HTTPError asserts that a specified handler returns an error status code. -// -// assert.HTTPError(t, myHandler, "POST", "/a/b/c", url.Values{"a": []string{"b", "c"}} -// -// Returns whether the assertion was successful (true) or not (false). -func HTTPError(t TestingT, handler http.HandlerFunc, method, url string, values url.Values, msgAndArgs ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - code, err := httpCode(handler, method, url, values) - if err != nil { - Fail(t, fmt.Sprintf("Failed to build test request, got error: %s", err), msgAndArgs...) - } - - isErrorCode := code >= http.StatusBadRequest - if !isErrorCode { - Fail(t, fmt.Sprintf("Expected HTTP error status code for %q but received %d", url+"?"+values.Encode(), code), msgAndArgs...) - } - - return isErrorCode -} - -// HTTPStatusCode asserts that a specified handler returns a specified status code. -// -// assert.HTTPStatusCode(t, myHandler, "GET", "/notImplemented", nil, 501) -// -// Returns whether the assertion was successful (true) or not (false). -func HTTPStatusCode(t TestingT, handler http.HandlerFunc, method, url string, values url.Values, statuscode int, msgAndArgs ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - code, err := httpCode(handler, method, url, values) - if err != nil { - Fail(t, fmt.Sprintf("Failed to build test request, got error: %s", err), msgAndArgs...) - } - - successful := code == statuscode - if !successful { - Fail(t, fmt.Sprintf("Expected HTTP status code %d for %q but received %d", statuscode, url+"?"+values.Encode(), code), msgAndArgs...) - } - - return successful -} - -// HTTPBody is a helper that returns HTTP body of the response. It returns -// empty string if building a new request fails. -func HTTPBody(handler http.HandlerFunc, method, url string, values url.Values) string { - w := httptest.NewRecorder() - if len(values) > 0 { - url += "?" + values.Encode() - } - req, err := http.NewRequest(method, url, http.NoBody) - if err != nil { - return "" - } - handler(w, req) - return w.Body.String() -} - -// HTTPBodyContains asserts that a specified handler returns a -// body that contains a string. -// -// assert.HTTPBodyContains(t, myHandler, "GET", "www.google.com", nil, "I'm Feeling Lucky") -// -// Returns whether the assertion was successful (true) or not (false). -func HTTPBodyContains(t TestingT, handler http.HandlerFunc, method, url string, values url.Values, str interface{}, msgAndArgs ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - body := HTTPBody(handler, method, url, values) - - contains := strings.Contains(body, fmt.Sprint(str)) - if !contains { - Fail(t, fmt.Sprintf("Expected response body for \"%s\" to contain \"%s\" but found \"%s\"", url+"?"+values.Encode(), str, body), msgAndArgs...) - } - - return contains -} - -// HTTPBodyNotContains asserts that a specified handler returns a -// body that does not contain a string. -// -// assert.HTTPBodyNotContains(t, myHandler, "GET", "www.google.com", nil, "I'm Feeling Lucky") -// -// Returns whether the assertion was successful (true) or not (false). -func HTTPBodyNotContains(t TestingT, handler http.HandlerFunc, method, url string, values url.Values, str interface{}, msgAndArgs ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - body := HTTPBody(handler, method, url, values) - - contains := strings.Contains(body, fmt.Sprint(str)) - if contains { - Fail(t, fmt.Sprintf("Expected response body for \"%s\" to NOT contain \"%s\" but found \"%s\"", url+"?"+values.Encode(), str, body), msgAndArgs...) - } - - return !contains -} diff --git a/vendor/github.com/stretchr/testify/mock/doc.go b/vendor/github.com/stretchr/testify/mock/doc.go deleted file mode 100644 index d6b3c844c..000000000 --- a/vendor/github.com/stretchr/testify/mock/doc.go +++ /dev/null @@ -1,44 +0,0 @@ -// Package mock provides a system by which it is possible to mock your objects -// and verify calls are happening as expected. -// -// # Example Usage -// -// The mock package provides an object, Mock, that tracks activity on another object. It is usually -// embedded into a test object as shown below: -// -// type MyTestObject struct { -// // add a Mock object instance -// mock.Mock -// -// // other fields go here as normal -// } -// -// When implementing the methods of an interface, you wire your functions up -// to call the Mock.Called(args...) method, and return the appropriate values. -// -// For example, to mock a method that saves the name and age of a person and returns -// the year of their birth or an error, you might write this: -// -// func (o *MyTestObject) SavePersonDetails(firstname, lastname string, age int) (int, error) { -// args := o.Called(firstname, lastname, age) -// return args.Int(0), args.Error(1) -// } -// -// The Int, Error and Bool methods are examples of strongly typed getters that take the argument -// index position. Given this argument list: -// -// (12, true, "Something") -// -// You could read them out strongly typed like this: -// -// args.Int(0) -// args.Bool(1) -// args.String(2) -// -// For objects of your own type, use the generic Arguments.Get(index) method and make a type assertion: -// -// return args.Get(0).(*MyObject), args.Get(1).(*AnotherObjectOfMine) -// -// This may cause a panic if the object you are getting is nil (the type assertion will fail), in those -// cases you should check for nil first. -package mock diff --git a/vendor/github.com/stretchr/testify/mock/mock.go b/vendor/github.com/stretchr/testify/mock/mock.go deleted file mode 100644 index 213bde2ea..000000000 --- a/vendor/github.com/stretchr/testify/mock/mock.go +++ /dev/null @@ -1,1241 +0,0 @@ -package mock - -import ( - "errors" - "fmt" - "path" - "reflect" - "regexp" - "runtime" - "strings" - "sync" - "time" - - "github.com/davecgh/go-spew/spew" - "github.com/pmezard/go-difflib/difflib" - "github.com/stretchr/objx" - - "github.com/stretchr/testify/assert" -) - -// regex for GCCGO functions -var gccgoRE = regexp.MustCompile(`\.pN\d+_`) - -// TestingT is an interface wrapper around *testing.T -type TestingT interface { - Logf(format string, args ...interface{}) - Errorf(format string, args ...interface{}) - FailNow() -} - -/* - Call -*/ - -// Call represents a method call and is used for setting expectations, -// as well as recording activity. -type Call struct { - Parent *Mock - - // The name of the method that was or will be called. - Method string - - // Holds the arguments of the method. - Arguments Arguments - - // Holds the arguments that should be returned when - // this method is called. - ReturnArguments Arguments - - // Holds the caller info for the On() call - callerInfo []string - - // The number of times to return the return arguments when setting - // expectations. 0 means to always return the value. - Repeatability int - - // Amount of times this call has been called - totalCalls int - - // Call to this method can be optional - optional bool - - // Holds a channel that will be used to block the Return until it either - // receives a message or is closed. nil means it returns immediately. - WaitFor <-chan time.Time - - waitTime time.Duration - - // Holds a handler used to manipulate arguments content that are passed by - // reference. It's useful when mocking methods such as unmarshalers or - // decoders. - RunFn func(Arguments) - - // PanicMsg holds msg to be used to mock panic on the function call - // if the PanicMsg is set to a non nil string the function call will panic - // irrespective of other settings - PanicMsg *string - - // Calls which must be satisfied before this call can be - requires []*Call -} - -func newCall(parent *Mock, methodName string, callerInfo []string, methodArguments ...interface{}) *Call { - return &Call{ - Parent: parent, - Method: methodName, - Arguments: methodArguments, - ReturnArguments: make([]interface{}, 0), - callerInfo: callerInfo, - Repeatability: 0, - WaitFor: nil, - RunFn: nil, - PanicMsg: nil, - } -} - -func (c *Call) lock() { - c.Parent.mutex.Lock() -} - -func (c *Call) unlock() { - c.Parent.mutex.Unlock() -} - -// Return specifies the return arguments for the expectation. -// -// Mock.On("DoSomething").Return(errors.New("failed")) -func (c *Call) Return(returnArguments ...interface{}) *Call { - c.lock() - defer c.unlock() - - c.ReturnArguments = returnArguments - - return c -} - -// Panic specifies if the function call should fail and the panic message -// -// Mock.On("DoSomething").Panic("test panic") -func (c *Call) Panic(msg string) *Call { - c.lock() - defer c.unlock() - - c.PanicMsg = &msg - - return c -} - -// Once indicates that the mock should only return the value once. -// -// Mock.On("MyMethod", arg1, arg2).Return(returnArg1, returnArg2).Once() -func (c *Call) Once() *Call { - return c.Times(1) -} - -// Twice indicates that the mock should only return the value twice. -// -// Mock.On("MyMethod", arg1, arg2).Return(returnArg1, returnArg2).Twice() -func (c *Call) Twice() *Call { - return c.Times(2) -} - -// Times indicates that the mock should only return the indicated number -// of times. -// -// Mock.On("MyMethod", arg1, arg2).Return(returnArg1, returnArg2).Times(5) -func (c *Call) Times(i int) *Call { - c.lock() - defer c.unlock() - c.Repeatability = i - return c -} - -// WaitUntil sets the channel that will block the mock's return until its closed -// or a message is received. -// -// Mock.On("MyMethod", arg1, arg2).WaitUntil(time.After(time.Second)) -func (c *Call) WaitUntil(w <-chan time.Time) *Call { - c.lock() - defer c.unlock() - c.WaitFor = w - return c -} - -// After sets how long to block until the call returns -// -// Mock.On("MyMethod", arg1, arg2).After(time.Second) -func (c *Call) After(d time.Duration) *Call { - c.lock() - defer c.unlock() - c.waitTime = d - return c -} - -// Run sets a handler to be called before returning. It can be used when -// mocking a method (such as an unmarshaler) that takes a pointer to a struct and -// sets properties in such struct -// -// Mock.On("Unmarshal", AnythingOfType("*map[string]interface{}")).Return().Run(func(args Arguments) { -// arg := args.Get(0).(*map[string]interface{}) -// arg["foo"] = "bar" -// }) -func (c *Call) Run(fn func(args Arguments)) *Call { - c.lock() - defer c.unlock() - c.RunFn = fn - return c -} - -// Maybe allows the method call to be optional. Not calling an optional method -// will not cause an error while asserting expectations -func (c *Call) Maybe() *Call { - c.lock() - defer c.unlock() - c.optional = true - return c -} - -// On chains a new expectation description onto the mocked interface. This -// allows syntax like. -// -// Mock. -// On("MyMethod", 1).Return(nil). -// On("MyOtherMethod", 'a', 'b', 'c').Return(errors.New("Some Error")) -// -//go:noinline -func (c *Call) On(methodName string, arguments ...interface{}) *Call { - return c.Parent.On(methodName, arguments...) -} - -// Unset removes a mock handler from being called. -// -// test.On("func", mock.Anything).Unset() -func (c *Call) Unset() *Call { - var unlockOnce sync.Once - - for _, arg := range c.Arguments { - if v := reflect.ValueOf(arg); v.Kind() == reflect.Func { - panic(fmt.Sprintf("cannot use Func in expectations. Use mock.AnythingOfType(\"%T\")", arg)) - } - } - - c.lock() - defer unlockOnce.Do(c.unlock) - - foundMatchingCall := false - - // in-place filter slice for calls to be removed - iterate from 0'th to last skipping unnecessary ones - var index int // write index - for _, call := range c.Parent.ExpectedCalls { - if call.Method == c.Method { - _, diffCount := call.Arguments.Diff(c.Arguments) - if diffCount == 0 { - foundMatchingCall = true - // Remove from ExpectedCalls - just skip it - continue - } - } - c.Parent.ExpectedCalls[index] = call - index++ - } - // trim slice up to last copied index - c.Parent.ExpectedCalls = c.Parent.ExpectedCalls[:index] - - if !foundMatchingCall { - unlockOnce.Do(c.unlock) - c.Parent.fail("\n\nmock: Could not find expected call\n-----------------------------\n\n%s\n\n", - callString(c.Method, c.Arguments, true), - ) - } - - return c -} - -// NotBefore indicates that the mock should only be called after the referenced -// calls have been called as expected. The referenced calls may be from the -// same mock instance and/or other mock instances. -// -// Mock.On("Do").Return(nil).Notbefore( -// Mock.On("Init").Return(nil) -// ) -func (c *Call) NotBefore(calls ...*Call) *Call { - c.lock() - defer c.unlock() - - for _, call := range calls { - if call.Parent == nil { - panic("not before calls must be created with Mock.On()") - } - } - - c.requires = append(c.requires, calls...) - return c -} - -// Mock is the workhorse used to track activity on another object. -// For an example of its usage, refer to the "Example Usage" section at the top -// of this document. -type Mock struct { - // Represents the calls that are expected of - // an object. - ExpectedCalls []*Call - - // Holds the calls that were made to this mocked object. - Calls []Call - - // test is An optional variable that holds the test struct, to be used when an - // invalid mock call was made. - test TestingT - - // TestData holds any data that might be useful for testing. Testify ignores - // this data completely allowing you to do whatever you like with it. - testData objx.Map - - mutex sync.Mutex -} - -// String provides a %v format string for Mock. -// Note: this is used implicitly by Arguments.Diff if a Mock is passed. -// It exists because go's default %v formatting traverses the struct -// without acquiring the mutex, which is detected by go test -race. -func (m *Mock) String() string { - return fmt.Sprintf("%[1]T<%[1]p>", m) -} - -// TestData holds any data that might be useful for testing. Testify ignores -// this data completely allowing you to do whatever you like with it. -func (m *Mock) TestData() objx.Map { - if m.testData == nil { - m.testData = make(objx.Map) - } - - return m.testData -} - -/* - Setting expectations -*/ - -// Test sets the test struct variable of the mock object -func (m *Mock) Test(t TestingT) { - m.mutex.Lock() - defer m.mutex.Unlock() - m.test = t -} - -// fail fails the current test with the given formatted format and args. -// In case that a test was defined, it uses the test APIs for failing a test, -// otherwise it uses panic. -func (m *Mock) fail(format string, args ...interface{}) { - m.mutex.Lock() - defer m.mutex.Unlock() - - if m.test == nil { - panic(fmt.Sprintf(format, args...)) - } - m.test.Errorf(format, args...) - m.test.FailNow() -} - -// On starts a description of an expectation of the specified method -// being called. -// -// Mock.On("MyMethod", arg1, arg2) -func (m *Mock) On(methodName string, arguments ...interface{}) *Call { - for _, arg := range arguments { - if v := reflect.ValueOf(arg); v.Kind() == reflect.Func { - panic(fmt.Sprintf("cannot use Func in expectations. Use mock.AnythingOfType(\"%T\")", arg)) - } - } - - m.mutex.Lock() - defer m.mutex.Unlock() - c := newCall(m, methodName, assert.CallerInfo(), arguments...) - m.ExpectedCalls = append(m.ExpectedCalls, c) - return c -} - -// /* -// Recording and responding to activity -// */ - -func (m *Mock) findExpectedCall(method string, arguments ...interface{}) (int, *Call) { - var expectedCall *Call - - for i, call := range m.ExpectedCalls { - if call.Method == method { - _, diffCount := call.Arguments.Diff(arguments) - if diffCount == 0 { - expectedCall = call - if call.Repeatability > -1 { - return i, call - } - } - } - } - - return -1, expectedCall -} - -type matchCandidate struct { - call *Call - mismatch string - diffCount int -} - -func (c matchCandidate) isBetterMatchThan(other matchCandidate) bool { - if c.call == nil { - return false - } - if other.call == nil { - return true - } - - if c.diffCount > other.diffCount { - return false - } - if c.diffCount < other.diffCount { - return true - } - - if c.call.Repeatability > 0 && other.call.Repeatability <= 0 { - return true - } - return false -} - -func (m *Mock) findClosestCall(method string, arguments ...interface{}) (*Call, string) { - var bestMatch matchCandidate - - for _, call := range m.expectedCalls() { - if call.Method == method { - - errInfo, tempDiffCount := call.Arguments.Diff(arguments) - tempCandidate := matchCandidate{ - call: call, - mismatch: errInfo, - diffCount: tempDiffCount, - } - if tempCandidate.isBetterMatchThan(bestMatch) { - bestMatch = tempCandidate - } - } - } - - return bestMatch.call, bestMatch.mismatch -} - -func callString(method string, arguments Arguments, includeArgumentValues bool) string { - var argValsString string - if includeArgumentValues { - var argVals []string - for argIndex, arg := range arguments { - if _, ok := arg.(*FunctionalOptionsArgument); ok { - argVals = append(argVals, fmt.Sprintf("%d: %s", argIndex, arg)) - continue - } - argVals = append(argVals, fmt.Sprintf("%d: %#v", argIndex, arg)) - } - argValsString = fmt.Sprintf("\n\t\t%s", strings.Join(argVals, "\n\t\t")) - } - - return fmt.Sprintf("%s(%s)%s", method, arguments.String(), argValsString) -} - -// Called tells the mock object that a method has been called, and gets an array -// of arguments to return. Panics if the call is unexpected (i.e. not preceded by -// appropriate .On .Return() calls) -// If Call.WaitFor is set, blocks until the channel is closed or receives a message. -func (m *Mock) Called(arguments ...interface{}) Arguments { - // get the calling function's name - pc, _, _, ok := runtime.Caller(1) - if !ok { - panic("Couldn't get the caller information") - } - functionPath := runtime.FuncForPC(pc).Name() - // Next four lines are required to use GCCGO function naming conventions. - // For Ex: github_com_docker_libkv_store_mock.WatchTree.pN39_github_com_docker_libkv_store_mock.Mock - // uses interface information unlike golang github.com/docker/libkv/store/mock.(*Mock).WatchTree - // With GCCGO we need to remove interface information starting from pN
. - if gccgoRE.MatchString(functionPath) { - functionPath = gccgoRE.Split(functionPath, -1)[0] - } - parts := strings.Split(functionPath, ".") - functionName := parts[len(parts)-1] - return m.MethodCalled(functionName, arguments...) -} - -// MethodCalled tells the mock object that the given method has been called, and gets -// an array of arguments to return. Panics if the call is unexpected (i.e. not preceded -// by appropriate .On .Return() calls) -// If Call.WaitFor is set, blocks until the channel is closed or receives a message. -func (m *Mock) MethodCalled(methodName string, arguments ...interface{}) Arguments { - m.mutex.Lock() - // TODO: could combine expected and closes in single loop - found, call := m.findExpectedCall(methodName, arguments...) - - if found < 0 { - // expected call found, but it has already been called with repeatable times - if call != nil { - m.mutex.Unlock() - m.fail("\nassert: mock: The method has been called over %d times.\n\tEither do one more Mock.On(\"%s\").Return(...), or remove extra call.\n\tThis call was unexpected:\n\t\t%s\n\tat: %s", call.totalCalls, methodName, callString(methodName, arguments, true), assert.CallerInfo()) - } - // we have to fail here - because we don't know what to do - // as the return arguments. This is because: - // - // a) this is a totally unexpected call to this method, - // b) the arguments are not what was expected, or - // c) the developer has forgotten to add an accompanying On...Return pair. - closestCall, mismatch := m.findClosestCall(methodName, arguments...) - m.mutex.Unlock() - - if closestCall != nil { - m.fail("\n\nmock: Unexpected Method Call\n-----------------------------\n\n%s\n\nThe closest call I have is: \n\n%s\n\n%s\nDiff: %s", - callString(methodName, arguments, true), - callString(methodName, closestCall.Arguments, true), - diffArguments(closestCall.Arguments, arguments), - strings.TrimSpace(mismatch), - ) - } else { - m.fail("\nassert: mock: I don't know what to return because the method call was unexpected.\n\tEither do Mock.On(\"%s\").Return(...) first, or remove the %s() call.\n\tThis method was unexpected:\n\t\t%s\n\tat: %s", methodName, methodName, callString(methodName, arguments, true), assert.CallerInfo()) - } - } - - for _, requirement := range call.requires { - if satisfied, _ := requirement.Parent.checkExpectation(requirement); !satisfied { - m.mutex.Unlock() - m.fail("mock: Unexpected Method Call\n-----------------------------\n\n%s\n\nMust not be called before%s:\n\n%s", - callString(call.Method, call.Arguments, true), - func() (s string) { - if requirement.totalCalls > 0 { - s = " another call of" - } - if call.Parent != requirement.Parent { - s += " method from another mock instance" - } - return - }(), - callString(requirement.Method, requirement.Arguments, true), - ) - } - } - - if call.Repeatability == 1 { - call.Repeatability = -1 - } else if call.Repeatability > 1 { - call.Repeatability-- - } - call.totalCalls++ - - // add the call - m.Calls = append(m.Calls, *newCall(m, methodName, assert.CallerInfo(), arguments...)) - m.mutex.Unlock() - - // block if specified - if call.WaitFor != nil { - <-call.WaitFor - } else { - time.Sleep(call.waitTime) - } - - m.mutex.Lock() - panicMsg := call.PanicMsg - m.mutex.Unlock() - if panicMsg != nil { - panic(*panicMsg) - } - - m.mutex.Lock() - runFn := call.RunFn - m.mutex.Unlock() - - if runFn != nil { - runFn(arguments) - } - - m.mutex.Lock() - returnArgs := call.ReturnArguments - m.mutex.Unlock() - - return returnArgs -} - -/* - Assertions -*/ - -type assertExpectationiser interface { - AssertExpectations(TestingT) bool -} - -// AssertExpectationsForObjects asserts that everything specified with On and Return -// of the specified objects was in fact called as expected. -// -// Calls may have occurred in any order. -func AssertExpectationsForObjects(t TestingT, testObjects ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - for _, obj := range testObjects { - if m, ok := obj.(*Mock); ok { - t.Logf("Deprecated mock.AssertExpectationsForObjects(myMock.Mock) use mock.AssertExpectationsForObjects(myMock)") - obj = m - } - m := obj.(assertExpectationiser) - if !m.AssertExpectations(t) { - t.Logf("Expectations didn't match for Mock: %+v", reflect.TypeOf(m)) - return false - } - } - return true -} - -// AssertExpectations asserts that everything specified with On and Return was -// in fact called as expected. Calls may have occurred in any order. -func (m *Mock) AssertExpectations(t TestingT) bool { - if s, ok := t.(interface{ Skipped() bool }); ok && s.Skipped() { - return true - } - if h, ok := t.(tHelper); ok { - h.Helper() - } - - m.mutex.Lock() - defer m.mutex.Unlock() - var failedExpectations int - - // iterate through each expectation - expectedCalls := m.expectedCalls() - for _, expectedCall := range expectedCalls { - satisfied, reason := m.checkExpectation(expectedCall) - if !satisfied { - failedExpectations++ - t.Logf(reason) - } - } - - if failedExpectations != 0 { - t.Errorf("FAIL: %d out of %d expectation(s) were met.\n\tThe code you are testing needs to make %d more call(s).\n\tat: %s", len(expectedCalls)-failedExpectations, len(expectedCalls), failedExpectations, assert.CallerInfo()) - } - - return failedExpectations == 0 -} - -func (m *Mock) checkExpectation(call *Call) (bool, string) { - if !call.optional && !m.methodWasCalled(call.Method, call.Arguments) && call.totalCalls == 0 { - return false, fmt.Sprintf("FAIL:\t%s(%s)\n\t\tat: %s", call.Method, call.Arguments.String(), call.callerInfo) - } - if call.Repeatability > 0 { - return false, fmt.Sprintf("FAIL:\t%s(%s)\n\t\tat: %s", call.Method, call.Arguments.String(), call.callerInfo) - } - return true, fmt.Sprintf("PASS:\t%s(%s)", call.Method, call.Arguments.String()) -} - -// AssertNumberOfCalls asserts that the method was called expectedCalls times. -func (m *Mock) AssertNumberOfCalls(t TestingT, methodName string, expectedCalls int) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - m.mutex.Lock() - defer m.mutex.Unlock() - var actualCalls int - for _, call := range m.calls() { - if call.Method == methodName { - actualCalls++ - } - } - return assert.Equal(t, expectedCalls, actualCalls, fmt.Sprintf("Expected number of calls (%d) does not match the actual number of calls (%d).", expectedCalls, actualCalls)) -} - -// AssertCalled asserts that the method was called. -// It can produce a false result when an argument is a pointer type and the underlying value changed after calling the mocked method. -func (m *Mock) AssertCalled(t TestingT, methodName string, arguments ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - m.mutex.Lock() - defer m.mutex.Unlock() - if !m.methodWasCalled(methodName, arguments) { - var calledWithArgs []string - for _, call := range m.calls() { - calledWithArgs = append(calledWithArgs, fmt.Sprintf("%v", call.Arguments)) - } - if len(calledWithArgs) == 0 { - return assert.Fail(t, "Should have called with given arguments", - fmt.Sprintf("Expected %q to have been called with:\n%v\nbut no actual calls happened", methodName, arguments)) - } - return assert.Fail(t, "Should have called with given arguments", - fmt.Sprintf("Expected %q to have been called with:\n%v\nbut actual calls were:\n %v", methodName, arguments, strings.Join(calledWithArgs, "\n"))) - } - return true -} - -// AssertNotCalled asserts that the method was not called. -// It can produce a false result when an argument is a pointer type and the underlying value changed after calling the mocked method. -func (m *Mock) AssertNotCalled(t TestingT, methodName string, arguments ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - m.mutex.Lock() - defer m.mutex.Unlock() - if m.methodWasCalled(methodName, arguments) { - return assert.Fail(t, "Should not have called with given arguments", - fmt.Sprintf("Expected %q to not have been called with:\n%v\nbut actually it was.", methodName, arguments)) - } - return true -} - -// IsMethodCallable checking that the method can be called -// If the method was called more than `Repeatability` return false -func (m *Mock) IsMethodCallable(t TestingT, methodName string, arguments ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - m.mutex.Lock() - defer m.mutex.Unlock() - - for _, v := range m.ExpectedCalls { - if v.Method != methodName { - continue - } - if len(arguments) != len(v.Arguments) { - continue - } - if v.Repeatability < v.totalCalls { - continue - } - if isArgsEqual(v.Arguments, arguments) { - return true - } - } - return false -} - -// isArgsEqual compares arguments -func isArgsEqual(expected Arguments, args []interface{}) bool { - if len(expected) != len(args) { - return false - } - for i, v := range args { - if !reflect.DeepEqual(expected[i], v) { - return false - } - } - return true -} - -func (m *Mock) methodWasCalled(methodName string, expected []interface{}) bool { - for _, call := range m.calls() { - if call.Method == methodName { - - _, differences := Arguments(expected).Diff(call.Arguments) - - if differences == 0 { - // found the expected call - return true - } - - } - } - // we didn't find the expected call - return false -} - -func (m *Mock) expectedCalls() []*Call { - return append([]*Call{}, m.ExpectedCalls...) -} - -func (m *Mock) calls() []Call { - return append([]Call{}, m.Calls...) -} - -/* - Arguments -*/ - -// Arguments holds an array of method arguments or return values. -type Arguments []interface{} - -const ( - // Anything is used in Diff and Assert when the argument being tested - // shouldn't be taken into consideration. - Anything = "mock.Anything" -) - -// AnythingOfTypeArgument contains the type of an argument -// for use when type checking. Used in Diff and Assert. -// -// Deprecated: this is an implementation detail that must not be used. Use [AnythingOfType] instead. -type AnythingOfTypeArgument = anythingOfTypeArgument - -// anythingOfTypeArgument is a string that contains the type of an argument -// for use when type checking. Used in Diff and Assert. -type anythingOfTypeArgument string - -// AnythingOfType returns a special value containing the -// name of the type to check for. The type name will be matched against the type name returned by [reflect.Type.String]. -// -// Used in Diff and Assert. -// -// For example: -// -// Assert(t, AnythingOfType("string"), AnythingOfType("int")) -func AnythingOfType(t string) AnythingOfTypeArgument { - return anythingOfTypeArgument(t) -} - -// IsTypeArgument is a struct that contains the type of an argument -// for use when type checking. This is an alternative to AnythingOfType. -// Used in Diff and Assert. -type IsTypeArgument struct { - t reflect.Type -} - -// IsType returns an IsTypeArgument object containing the type to check for. -// You can provide a zero-value of the type to check. This is an -// alternative to AnythingOfType. Used in Diff and Assert. -// -// For example: -// Assert(t, IsType(""), IsType(0)) -func IsType(t interface{}) *IsTypeArgument { - return &IsTypeArgument{t: reflect.TypeOf(t)} -} - -// FunctionalOptionsArgument is a struct that contains the type and value of an functional option argument -// for use when type checking. -type FunctionalOptionsArgument struct { - value interface{} -} - -// String returns the string representation of FunctionalOptionsArgument -func (f *FunctionalOptionsArgument) String() string { - var name string - tValue := reflect.ValueOf(f.value) - if tValue.Len() > 0 { - name = "[]" + reflect.TypeOf(tValue.Index(0).Interface()).String() - } - - return strings.Replace(fmt.Sprintf("%#v", f.value), "[]interface {}", name, 1) -} - -// FunctionalOptions returns an FunctionalOptionsArgument object containing the functional option type -// and the values to check of -// -// For example: -// Assert(t, FunctionalOptions("[]foo.FunctionalOption", foo.Opt1(), foo.Opt2())) -func FunctionalOptions(value ...interface{}) *FunctionalOptionsArgument { - return &FunctionalOptionsArgument{ - value: value, - } -} - -// argumentMatcher performs custom argument matching, returning whether or -// not the argument is matched by the expectation fixture function. -type argumentMatcher struct { - // fn is a function which accepts one argument, and returns a bool. - fn reflect.Value -} - -func (f argumentMatcher) Matches(argument interface{}) bool { - expectType := f.fn.Type().In(0) - expectTypeNilSupported := false - switch expectType.Kind() { - case reflect.Interface, reflect.Chan, reflect.Func, reflect.Map, reflect.Slice, reflect.Ptr: - expectTypeNilSupported = true - } - - argType := reflect.TypeOf(argument) - var arg reflect.Value - if argType == nil { - arg = reflect.New(expectType).Elem() - } else { - arg = reflect.ValueOf(argument) - } - - if argType == nil && !expectTypeNilSupported { - panic(errors.New("attempting to call matcher with nil for non-nil expected type")) - } - if argType == nil || argType.AssignableTo(expectType) { - result := f.fn.Call([]reflect.Value{arg}) - return result[0].Bool() - } - return false -} - -func (f argumentMatcher) String() string { - return fmt.Sprintf("func(%s) bool", f.fn.Type().In(0).String()) -} - -// MatchedBy can be used to match a mock call based on only certain properties -// from a complex struct or some calculation. It takes a function that will be -// evaluated with the called argument and will return true when there's a match -// and false otherwise. -// -// Example: -// m.On("Do", MatchedBy(func(req *http.Request) bool { return req.Host == "example.com" })) -// -// |fn|, must be a function accepting a single argument (of the expected type) -// which returns a bool. If |fn| doesn't match the required signature, -// MatchedBy() panics. -func MatchedBy(fn interface{}) argumentMatcher { - fnType := reflect.TypeOf(fn) - - if fnType.Kind() != reflect.Func { - panic(fmt.Sprintf("assert: arguments: %s is not a func", fn)) - } - if fnType.NumIn() != 1 { - panic(fmt.Sprintf("assert: arguments: %s does not take exactly one argument", fn)) - } - if fnType.NumOut() != 1 || fnType.Out(0).Kind() != reflect.Bool { - panic(fmt.Sprintf("assert: arguments: %s does not return a bool", fn)) - } - - return argumentMatcher{fn: reflect.ValueOf(fn)} -} - -// Get Returns the argument at the specified index. -func (args Arguments) Get(index int) interface{} { - if index+1 > len(args) { - panic(fmt.Sprintf("assert: arguments: Cannot call Get(%d) because there are %d argument(s).", index, len(args))) - } - return args[index] -} - -// Is gets whether the objects match the arguments specified. -func (args Arguments) Is(objects ...interface{}) bool { - for i, obj := range args { - if obj != objects[i] { - return false - } - } - return true -} - -// Diff gets a string describing the differences between the arguments -// and the specified objects. -// -// Returns the diff string and number of differences found. -func (args Arguments) Diff(objects []interface{}) (string, int) { - // TODO: could return string as error and nil for No difference - - output := "\n" - var differences int - - maxArgCount := len(args) - if len(objects) > maxArgCount { - maxArgCount = len(objects) - } - - for i := 0; i < maxArgCount; i++ { - var actual, expected interface{} - var actualFmt, expectedFmt string - - if len(objects) <= i { - actual = "(Missing)" - actualFmt = "(Missing)" - } else { - actual = objects[i] - actualFmt = fmt.Sprintf("(%[1]T=%[1]v)", actual) - } - - if len(args) <= i { - expected = "(Missing)" - expectedFmt = "(Missing)" - } else { - expected = args[i] - expectedFmt = fmt.Sprintf("(%[1]T=%[1]v)", expected) - } - - if matcher, ok := expected.(argumentMatcher); ok { - var matches bool - func() { - defer func() { - if r := recover(); r != nil { - actualFmt = fmt.Sprintf("panic in argument matcher: %v", r) - } - }() - matches = matcher.Matches(actual) - }() - if matches { - output = fmt.Sprintf("%s\t%d: PASS: %s matched by %s\n", output, i, actualFmt, matcher) - } else { - differences++ - output = fmt.Sprintf("%s\t%d: FAIL: %s not matched by %s\n", output, i, actualFmt, matcher) - } - } else { - switch expected := expected.(type) { - case anythingOfTypeArgument: - // type checking - if reflect.TypeOf(actual).Name() != string(expected) && reflect.TypeOf(actual).String() != string(expected) { - // not match - differences++ - output = fmt.Sprintf("%s\t%d: FAIL: type %s != type %s - %s\n", output, i, expected, reflect.TypeOf(actual).Name(), actualFmt) - } - case *IsTypeArgument: - actualT := reflect.TypeOf(actual) - if actualT != expected.t { - differences++ - output = fmt.Sprintf("%s\t%d: FAIL: type %s != type %s - %s\n", output, i, expected.t.Name(), actualT.Name(), actualFmt) - } - case *FunctionalOptionsArgument: - t := expected.value - - var name string - tValue := reflect.ValueOf(t) - if tValue.Len() > 0 { - name = "[]" + reflect.TypeOf(tValue.Index(0).Interface()).String() - } - - tName := reflect.TypeOf(t).Name() - if name != reflect.TypeOf(actual).String() && tValue.Len() != 0 { - differences++ - output = fmt.Sprintf("%s\t%d: FAIL: type %s != type %s - %s\n", output, i, tName, reflect.TypeOf(actual).Name(), actualFmt) - } else { - if ef, af := assertOpts(t, actual); ef == "" && af == "" { - // match - output = fmt.Sprintf("%s\t%d: PASS: %s == %s\n", output, i, tName, tName) - } else { - // not match - differences++ - output = fmt.Sprintf("%s\t%d: FAIL: %s != %s\n", output, i, af, ef) - } - } - - default: - if assert.ObjectsAreEqual(expected, Anything) || assert.ObjectsAreEqual(actual, Anything) || assert.ObjectsAreEqual(actual, expected) { - // match - output = fmt.Sprintf("%s\t%d: PASS: %s == %s\n", output, i, actualFmt, expectedFmt) - } else { - // not match - differences++ - output = fmt.Sprintf("%s\t%d: FAIL: %s != %s\n", output, i, actualFmt, expectedFmt) - } - } - } - - } - - if differences == 0 { - return "No differences.", differences - } - - return output, differences -} - -// Assert compares the arguments with the specified objects and fails if -// they do not exactly match. -func (args Arguments) Assert(t TestingT, objects ...interface{}) bool { - if h, ok := t.(tHelper); ok { - h.Helper() - } - - // get the differences - diff, diffCount := args.Diff(objects) - - if diffCount == 0 { - return true - } - - // there are differences... report them... - t.Logf(diff) - t.Errorf("%sArguments do not match.", assert.CallerInfo()) - - return false -} - -// String gets the argument at the specified index. Panics if there is no argument, or -// if the argument is of the wrong type. -// -// If no index is provided, String() returns a complete string representation -// of the arguments. -func (args Arguments) String(indexOrNil ...int) string { - if len(indexOrNil) == 0 { - // normal String() method - return a string representation of the args - var argsStr []string - for _, arg := range args { - argsStr = append(argsStr, fmt.Sprintf("%T", arg)) // handles nil nicely - } - return strings.Join(argsStr, ",") - } else if len(indexOrNil) == 1 { - // Index has been specified - get the argument at that index - index := indexOrNil[0] - var s string - var ok bool - if s, ok = args.Get(index).(string); !ok { - panic(fmt.Sprintf("assert: arguments: String(%d) failed because object wasn't correct type: %s", index, args.Get(index))) - } - return s - } - - panic(fmt.Sprintf("assert: arguments: Wrong number of arguments passed to String. Must be 0 or 1, not %d", len(indexOrNil))) -} - -// Int gets the argument at the specified index. Panics if there is no argument, or -// if the argument is of the wrong type. -func (args Arguments) Int(index int) int { - var s int - var ok bool - if s, ok = args.Get(index).(int); !ok { - panic(fmt.Sprintf("assert: arguments: Int(%d) failed because object wasn't correct type: %v", index, args.Get(index))) - } - return s -} - -// Error gets the argument at the specified index. Panics if there is no argument, or -// if the argument is of the wrong type. -func (args Arguments) Error(index int) error { - obj := args.Get(index) - var s error - var ok bool - if obj == nil { - return nil - } - if s, ok = obj.(error); !ok { - panic(fmt.Sprintf("assert: arguments: Error(%d) failed because object wasn't correct type: %v", index, args.Get(index))) - } - return s -} - -// Bool gets the argument at the specified index. Panics if there is no argument, or -// if the argument is of the wrong type. -func (args Arguments) Bool(index int) bool { - var s bool - var ok bool - if s, ok = args.Get(index).(bool); !ok { - panic(fmt.Sprintf("assert: arguments: Bool(%d) failed because object wasn't correct type: %v", index, args.Get(index))) - } - return s -} - -func typeAndKind(v interface{}) (reflect.Type, reflect.Kind) { - t := reflect.TypeOf(v) - k := t.Kind() - - if k == reflect.Ptr { - t = t.Elem() - k = t.Kind() - } - return t, k -} - -func diffArguments(expected Arguments, actual Arguments) string { - if len(expected) != len(actual) { - return fmt.Sprintf("Provided %v arguments, mocked for %v arguments", len(expected), len(actual)) - } - - for x := range expected { - if diffString := diff(expected[x], actual[x]); diffString != "" { - return fmt.Sprintf("Difference found in argument %v:\n\n%s", x, diffString) - } - } - - return "" -} - -// diff returns a diff of both values as long as both are of the same type and -// are a struct, map, slice or array. Otherwise it returns an empty string. -func diff(expected interface{}, actual interface{}) string { - if expected == nil || actual == nil { - return "" - } - - et, ek := typeAndKind(expected) - at, _ := typeAndKind(actual) - - if et != at { - return "" - } - - if ek != reflect.Struct && ek != reflect.Map && ek != reflect.Slice && ek != reflect.Array { - return "" - } - - e := spewConfig.Sdump(expected) - a := spewConfig.Sdump(actual) - - diff, _ := difflib.GetUnifiedDiffString(difflib.UnifiedDiff{ - A: difflib.SplitLines(e), - B: difflib.SplitLines(a), - FromFile: "Expected", - FromDate: "", - ToFile: "Actual", - ToDate: "", - Context: 1, - }) - - return diff -} - -var spewConfig = spew.ConfigState{ - Indent: " ", - DisablePointerAddresses: true, - DisableCapacities: true, - SortKeys: true, -} - -type tHelper interface { - Helper() -} - -func assertOpts(expected, actual interface{}) (expectedFmt, actualFmt string) { - expectedOpts := reflect.ValueOf(expected) - actualOpts := reflect.ValueOf(actual) - var expectedNames []string - for i := 0; i < expectedOpts.Len(); i++ { - expectedNames = append(expectedNames, funcName(expectedOpts.Index(i).Interface())) - } - var actualNames []string - for i := 0; i < actualOpts.Len(); i++ { - actualNames = append(actualNames, funcName(actualOpts.Index(i).Interface())) - } - if !assert.ObjectsAreEqual(expectedNames, actualNames) { - expectedFmt = fmt.Sprintf("%v", expectedNames) - actualFmt = fmt.Sprintf("%v", actualNames) - return - } - - for i := 0; i < expectedOpts.Len(); i++ { - expectedOpt := expectedOpts.Index(i).Interface() - actualOpt := actualOpts.Index(i).Interface() - - expectedFunc := expectedNames[i] - actualFunc := actualNames[i] - if expectedFunc != actualFunc { - expectedFmt = expectedFunc - actualFmt = actualFunc - return - } - - ot := reflect.TypeOf(expectedOpt) - var expectedValues []reflect.Value - var actualValues []reflect.Value - if ot.NumIn() == 0 { - return - } - - for i := 0; i < ot.NumIn(); i++ { - vt := ot.In(i).Elem() - expectedValues = append(expectedValues, reflect.New(vt)) - actualValues = append(actualValues, reflect.New(vt)) - } - - reflect.ValueOf(expectedOpt).Call(expectedValues) - reflect.ValueOf(actualOpt).Call(actualValues) - - for i := 0; i < ot.NumIn(); i++ { - if !assert.ObjectsAreEqual(expectedValues[i].Interface(), actualValues[i].Interface()) { - expectedFmt = fmt.Sprintf("%s %+v", expectedNames[i], expectedValues[i].Interface()) - actualFmt = fmt.Sprintf("%s %+v", expectedNames[i], actualValues[i].Interface()) - return - } - } - } - - return "", "" -} - -func funcName(opt interface{}) string { - n := runtime.FuncForPC(reflect.ValueOf(opt).Pointer()).Name() - return strings.TrimSuffix(path.Base(n), path.Ext(n)) -} diff --git a/vendor/github.com/subosito/gotenv/.env b/vendor/github.com/subosito/gotenv/.env deleted file mode 100644 index 6405eca71..000000000 --- a/vendor/github.com/subosito/gotenv/.env +++ /dev/null @@ -1 +0,0 @@ -HELLO=world diff --git a/vendor/github.com/subosito/gotenv/.env.invalid b/vendor/github.com/subosito/gotenv/.env.invalid deleted file mode 100644 index 016d5e0ce..000000000 --- a/vendor/github.com/subosito/gotenv/.env.invalid +++ /dev/null @@ -1 +0,0 @@ -lol$wut diff --git a/vendor/github.com/subosito/gotenv/.gitignore b/vendor/github.com/subosito/gotenv/.gitignore deleted file mode 100644 index 7db37c1db..000000000 --- a/vendor/github.com/subosito/gotenv/.gitignore +++ /dev/null @@ -1,4 +0,0 @@ -*.test -*.out -annotate.json -profile.cov diff --git a/vendor/github.com/subosito/gotenv/.golangci.yaml b/vendor/github.com/subosito/gotenv/.golangci.yaml deleted file mode 100644 index 8c82a762e..000000000 --- a/vendor/github.com/subosito/gotenv/.golangci.yaml +++ /dev/null @@ -1,7 +0,0 @@ -# Options for analysis running. -run: - timeout: 1m - -linters-settings: - gofmt: - simplify: true diff --git a/vendor/github.com/subosito/gotenv/CHANGELOG.md b/vendor/github.com/subosito/gotenv/CHANGELOG.md deleted file mode 100644 index c4fe7d326..000000000 --- a/vendor/github.com/subosito/gotenv/CHANGELOG.md +++ /dev/null @@ -1,105 +0,0 @@ -# Changelog - -## [1.5.0] - 2023-08-15 - -### Fixed - -- Use io.Reader instead of custom Reader - -## [1.5.0] - 2023-08-15 - -### Added - -- Support for reading UTF16 files - -### Fixed - -- Scanner error handling -- Reader error handling - -## [1.4.2] - 2023-01-11 - -### Fixed - -- Env var initialization - -### Changed - -- More consitent line splitting - -## [1.4.1] - 2022-08-23 - -### Fixed - -- Missing file close - -### Changed - -- Updated dependencies - -## [1.4.0] - 2022-06-02 - -### Added - -- Add `Marshal` and `Unmarshal` helpers - -### Changed - -- The CI will now run a linter and the tests on PRs. - -## [1.3.0] - 2022-05-23 - -### Added - -- Support = within double-quoted strings -- Add support for multiline values - -### Changed - -- `OverLoad` prefer environment variables over local variables - -## [1.2.0] - 2019-08-03 - -### Added - -- Add `Must` helper to raise an error as panic. It can be used with `Load` and `OverLoad`. -- Add more tests to be 100% coverage. -- Add CHANGELOG -- Add more OS for the test: OSX and Windows - -### Changed - -- Reduce complexity and improve source code for having `A+` score in [goreportcard](https://goreportcard.com/report/github.com/subosito/gotenv). -- Updated README with mentions to all available functions - -### Removed - -- Remove `ErrFormat` -- Remove `MustLoad` and `MustOverload`, replaced with `Must` helper. - -## [1.1.1] - 2018-06-05 - -### Changed - -- Replace `os.Getenv` with `os.LookupEnv` to ensure that the environment variable is not set, by [radding](https://github.com/radding) - -## [1.1.0] - 2017-03-20 - -### Added - -- Supports carriage return in env -- Handle files with UTF-8 BOM - -### Changed - -- Whitespace handling - -### Fixed - -- Incorrect variable expansion -- Handling escaped '$' characters - -## [1.0.0] - 2014-10-05 - -First stable release. - diff --git a/vendor/github.com/subosito/gotenv/LICENSE b/vendor/github.com/subosito/gotenv/LICENSE deleted file mode 100644 index f64ccaedc..000000000 --- a/vendor/github.com/subosito/gotenv/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2013 Alif Rachmawadi - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/vendor/github.com/subosito/gotenv/README.md b/vendor/github.com/subosito/gotenv/README.md deleted file mode 100644 index fc9616e3b..000000000 --- a/vendor/github.com/subosito/gotenv/README.md +++ /dev/null @@ -1,129 +0,0 @@ -# gotenv - -[![Build Status](https://github.com/subosito/gotenv/workflows/Go%20workflow/badge.svg)](https://github.com/subosito/gotenv/actions) -[![Coverage Status](https://badgen.net/codecov/c/github/subosito/gotenv)](https://codecov.io/gh/subosito/gotenv) -[![Go Report Card](https://goreportcard.com/badge/github.com/subosito/gotenv)](https://goreportcard.com/report/github.com/subosito/gotenv) -[![GoDoc](https://godoc.org/github.com/subosito/gotenv?status.svg)](https://godoc.org/github.com/subosito/gotenv) - -Load environment variables from `.env` or `io.Reader` in Go. - -## Usage - -Put the gotenv package on your `import` statement: - -```go -import "github.com/subosito/gotenv" -``` - -To modify your app environment variables, `gotenv` expose 2 main functions: - -- `gotenv.Load` -- `gotenv.Apply` - -By default, `gotenv.Load` will look for a file called `.env` in the current working directory. - -Behind the scene, it will then load `.env` file and export the valid variables to the environment variables. Make sure you call the method as soon as possible to ensure it loads all variables, say, put it on `init()` function. - -Once loaded you can use `os.Getenv()` to get the value of the variable. - -Let's say you have `.env` file: - -```sh -APP_ID=1234567 -APP_SECRET=abcdef -``` - -Here's the example of your app: - -```go -package main - -import ( - "github.com/subosito/gotenv" - "log" - "os" -) - -func init() { - gotenv.Load() -} - -func main() { - log.Println(os.Getenv("APP_ID")) // "1234567" - log.Println(os.Getenv("APP_SECRET")) // "abcdef" -} -``` - -You can also load other than `.env` file if you wish. Just supply filenames when calling `Load()`. It will load them in order and the first value set for a variable will win.: - -```go -gotenv.Load(".env.production", "credentials") -``` - -While `gotenv.Load` loads entries from `.env` file, `gotenv.Apply` allows you to use any `io.Reader`: - -```go -gotenv.Apply(strings.NewReader("APP_ID=1234567")) - -log.Println(os.Getenv("APP_ID")) -// Output: "1234567" -``` - -Both `gotenv.Load` and `gotenv.Apply` **DO NOT** overrides existing environment variables. If you want to override existing ones, you can see section below. - -### Environment Overrides - -Besides above functions, `gotenv` also provides another functions that overrides existing: - -- `gotenv.OverLoad` -- `gotenv.OverApply` - -Here's the example of this overrides behavior: - -```go -os.Setenv("HELLO", "world") - -// NOTE: using Apply existing value will be reserved -gotenv.Apply(strings.NewReader("HELLO=universe")) -fmt.Println(os.Getenv("HELLO")) -// Output: "world" - -// NOTE: using OverApply existing value will be overridden -gotenv.OverApply(strings.NewReader("HELLO=universe")) -fmt.Println(os.Getenv("HELLO")) -// Output: "universe" -``` - -### Throw a Panic - -Both `gotenv.Load` and `gotenv.OverLoad` returns an error on something wrong occurred, like your env file is not exist, and so on. To make it easier to use, `gotenv` also provides `gotenv.Must` helper, to let it panic when an error returned. - -```go -err := gotenv.Load(".env-is-not-exist") -fmt.Println("error", err) -// error: open .env-is-not-exist: no such file or directory - -gotenv.Must(gotenv.Load, ".env-is-not-exist") -// it will throw a panic -// panic: open .env-is-not-exist: no such file or directory -``` - -### Another Scenario - -Just in case you want to parse environment variables from any `io.Reader`, gotenv keeps its `Parse` and `StrictParse` function as public API so you can use that. - -```go -// import "strings" - -pairs := gotenv.Parse(strings.NewReader("FOO=test\nBAR=$FOO")) -// gotenv.Env{"FOO": "test", "BAR": "test"} - -pairs, err := gotenv.StrictParse(strings.NewReader(`FOO="bar"`)) -// gotenv.Env{"FOO": "bar"} -``` - -`Parse` ignores invalid lines and returns `Env` of valid environment variables, while `StrictParse` returns an error for invalid lines. - -## Notes - -The gotenv package is a Go port of [`dotenv`](https://github.com/bkeepers/dotenv) project with some additions made for Go. For general features, it aims to be compatible as close as possible. diff --git a/vendor/github.com/subosito/gotenv/gotenv.go b/vendor/github.com/subosito/gotenv/gotenv.go deleted file mode 100644 index 1191d3587..000000000 --- a/vendor/github.com/subosito/gotenv/gotenv.go +++ /dev/null @@ -1,409 +0,0 @@ -// Package gotenv provides functionality to dynamically load the environment variables -package gotenv - -import ( - "bufio" - "bytes" - "fmt" - "io" - "os" - "path/filepath" - "regexp" - "sort" - "strconv" - "strings" - - "golang.org/x/text/encoding/unicode" - "golang.org/x/text/transform" -) - -const ( - // Pattern for detecting valid line format - linePattern = `\A\s*(?:export\s+)?([\w\.]+)(?:\s*=\s*|:\s+?)('(?:\'|[^'])*'|"(?:\"|[^"])*"|[^#\n]+)?\s*(?:\s*\#.*)?\z` - - // Pattern for detecting valid variable within a value - variablePattern = `(\\)?(\$)(\{?([A-Z0-9_]+)?\}?)` -) - -// Byte order mark character -var ( - bomUTF8 = []byte("\xEF\xBB\xBF") - bomUTF16LE = []byte("\xFF\xFE") - bomUTF16BE = []byte("\xFE\xFF") -) - -// Env holds key/value pair of valid environment variable -type Env map[string]string - -// Load is a function to load a file or multiple files and then export the valid variables into environment variables if they do not exist. -// When it's called with no argument, it will load `.env` file on the current path and set the environment variables. -// Otherwise, it will loop over the filenames parameter and set the proper environment variables. -func Load(filenames ...string) error { - return loadenv(false, filenames...) -} - -// OverLoad is a function to load a file or multiple files and then export and override the valid variables into environment variables. -func OverLoad(filenames ...string) error { - return loadenv(true, filenames...) -} - -// Must is wrapper function that will panic when supplied function returns an error. -func Must(fn func(filenames ...string) error, filenames ...string) { - if err := fn(filenames...); err != nil { - panic(err.Error()) - } -} - -// Apply is a function to load an io Reader then export the valid variables into environment variables if they do not exist. -func Apply(r io.Reader) error { - return parset(r, false) -} - -// OverApply is a function to load an io Reader then export and override the valid variables into environment variables. -func OverApply(r io.Reader) error { - return parset(r, true) -} - -func loadenv(override bool, filenames ...string) error { - if len(filenames) == 0 { - filenames = []string{".env"} - } - - for _, filename := range filenames { - f, err := os.Open(filename) - if err != nil { - return err - } - - err = parset(f, override) - f.Close() - if err != nil { - return err - } - } - - return nil -} - -// parse and set :) -func parset(r io.Reader, override bool) error { - env, err := strictParse(r, override) - if err != nil { - return err - } - - for key, val := range env { - setenv(key, val, override) - } - - return nil -} - -func setenv(key, val string, override bool) { - if override { - os.Setenv(key, val) - } else { - if _, present := os.LookupEnv(key); !present { - os.Setenv(key, val) - } - } -} - -// Parse is a function to parse line by line any io.Reader supplied and returns the valid Env key/value pair of valid variables. -// It expands the value of a variable from the environment variable but does not set the value to the environment itself. -// This function is skipping any invalid lines and only processing the valid one. -func Parse(r io.Reader) Env { - env, _ := strictParse(r, false) - return env -} - -// StrictParse is a function to parse line by line any io.Reader supplied and returns the valid Env key/value pair of valid variables. -// It expands the value of a variable from the environment variable but does not set the value to the environment itself. -// This function is returning an error if there are any invalid lines. -func StrictParse(r io.Reader) (Env, error) { - return strictParse(r, false) -} - -// Read is a function to parse a file line by line and returns the valid Env key/value pair of valid variables. -// It expands the value of a variable from the environment variable but does not set the value to the environment itself. -// This function is skipping any invalid lines and only processing the valid one. -func Read(filename string) (Env, error) { - f, err := os.Open(filename) - if err != nil { - return nil, err - } - defer f.Close() - return strictParse(f, false) -} - -// Unmarshal reads a string line by line and returns the valid Env key/value pair of valid variables. -// It expands the value of a variable from the environment variable but does not set the value to the environment itself. -// This function is returning an error if there are any invalid lines. -func Unmarshal(str string) (Env, error) { - return strictParse(strings.NewReader(str), false) -} - -// Marshal outputs the given environment as a env file. -// Variables will be sorted by name. -func Marshal(env Env) (string, error) { - lines := make([]string, 0, len(env)) - for k, v := range env { - if d, err := strconv.Atoi(v); err == nil { - lines = append(lines, fmt.Sprintf(`%s=%d`, k, d)) - } else { - lines = append(lines, fmt.Sprintf(`%s=%q`, k, v)) - } - } - sort.Strings(lines) - return strings.Join(lines, "\n"), nil -} - -// Write serializes the given environment and writes it to a file -func Write(env Env, filename string) error { - content, err := Marshal(env) - if err != nil { - return err - } - // ensure the path exists - if err := os.MkdirAll(filepath.Dir(filename), 0o775); err != nil { - return err - } - // create or truncate the file - file, err := os.Create(filename) - if err != nil { - return err - } - defer file.Close() - _, err = file.WriteString(content + "\n") - if err != nil { - return err - } - - return file.Sync() -} - -// splitLines is a valid SplitFunc for a bufio.Scanner. It will split lines on CR ('\r'), LF ('\n') or CRLF (any of the three sequences). -// If a CR is immediately followed by a LF, it is treated as a CRLF (one single line break). -func splitLines(data []byte, atEOF bool) (advance int, token []byte, err error) { - if atEOF && len(data) == 0 { - return 0, nil, bufio.ErrFinalToken - } - - idx := bytes.IndexAny(data, "\r\n") - switch { - case atEOF && idx < 0: - return len(data), data, bufio.ErrFinalToken - - case idx < 0: - return 0, nil, nil - } - - // consume CR or LF - eol := idx + 1 - // detect CRLF - if len(data) > eol && data[eol-1] == '\r' && data[eol] == '\n' { - eol++ - } - - return eol, data[:idx], nil -} - -func strictParse(r io.Reader, override bool) (Env, error) { - env := make(Env) - - buf := new(bytes.Buffer) - tee := io.TeeReader(r, buf) - - // There can be a maximum of 3 BOM bytes. - bomByteBuffer := make([]byte, 3) - _, err := tee.Read(bomByteBuffer) - if err != nil && err != io.EOF { - return env, err - } - - z := io.MultiReader(buf, r) - - // We chooes a different scanner depending on file encoding. - var scanner *bufio.Scanner - - if bytes.HasPrefix(bomByteBuffer, bomUTF8) { - scanner = bufio.NewScanner(transform.NewReader(z, unicode.UTF8BOM.NewDecoder())) - } else if bytes.HasPrefix(bomByteBuffer, bomUTF16LE) { - scanner = bufio.NewScanner(transform.NewReader(z, unicode.UTF16(unicode.LittleEndian, unicode.ExpectBOM).NewDecoder())) - } else if bytes.HasPrefix(bomByteBuffer, bomUTF16BE) { - scanner = bufio.NewScanner(transform.NewReader(z, unicode.UTF16(unicode.BigEndian, unicode.ExpectBOM).NewDecoder())) - } else { - scanner = bufio.NewScanner(z) - } - - scanner.Split(splitLines) - - for scanner.Scan() { - if err := scanner.Err(); err != nil { - return env, err - } - - line := strings.TrimSpace(scanner.Text()) - if line == "" || line[0] == '#' { - continue - } - - quote := "" - // look for the delimiter character - idx := strings.Index(line, "=") - if idx == -1 { - idx = strings.Index(line, ":") - } - // look for a quote character - if idx > 0 && idx < len(line)-1 { - val := strings.TrimSpace(line[idx+1:]) - if val[0] == '"' || val[0] == '\'' { - quote = val[:1] - // look for the closing quote character within the same line - idx = strings.LastIndex(strings.TrimSpace(val[1:]), quote) - if idx >= 0 && val[idx] != '\\' { - quote = "" - } - } - } - // look for the closing quote character - for quote != "" && scanner.Scan() { - l := scanner.Text() - line += "\n" + l - idx := strings.LastIndex(l, quote) - if idx > 0 && l[idx-1] == '\\' { - // foud a matching quote character but it's escaped - continue - } - if idx >= 0 { - // foud a matching quote - quote = "" - } - } - - if quote != "" { - return env, fmt.Errorf("missing quotes") - } - - err := parseLine(line, env, override) - if err != nil { - return env, err - } - } - - return env, scanner.Err() -} - -var ( - lineRgx = regexp.MustCompile(linePattern) - unescapeRgx = regexp.MustCompile(`\\([^$])`) - varRgx = regexp.MustCompile(variablePattern) -) - -func parseLine(s string, env Env, override bool) error { - rm := lineRgx.FindStringSubmatch(s) - - if len(rm) == 0 { - return checkFormat(s, env) - } - - key := strings.TrimSpace(rm[1]) - val := strings.TrimSpace(rm[2]) - - var hsq, hdq bool - - // check if the value is quoted - if l := len(val); l >= 2 { - l -= 1 - // has double quotes - hdq = val[0] == '"' && val[l] == '"' - // has single quotes - hsq = val[0] == '\'' && val[l] == '\'' - - // remove quotes '' or "" - if hsq || hdq { - val = val[1:l] - } - } - - if hdq { - val = strings.ReplaceAll(val, `\n`, "\n") - val = strings.ReplaceAll(val, `\r`, "\r") - - // Unescape all characters except $ so variables can be escaped properly - val = unescapeRgx.ReplaceAllString(val, "$1") - } - - if !hsq { - fv := func(s string) string { - return varReplacement(s, hsq, env, override) - } - val = varRgx.ReplaceAllStringFunc(val, fv) - } - - env[key] = val - return nil -} - -func parseExport(st string, env Env) error { - if strings.HasPrefix(st, "export") { - vs := strings.SplitN(st, " ", 2) - - if len(vs) > 1 { - if _, ok := env[vs[1]]; !ok { - return fmt.Errorf("line `%s` has an unset variable", st) - } - } - } - - return nil -} - -var varNameRgx = regexp.MustCompile(`(\$)(\{?([A-Z0-9_]+)\}?)`) - -func varReplacement(s string, hsq bool, env Env, override bool) string { - if s == "" { - return s - } - - if s[0] == '\\' { - // the dollar sign is escaped - return s[1:] - } - - if hsq { - return s - } - - mn := varNameRgx.FindStringSubmatch(s) - - if len(mn) == 0 { - return s - } - - v := mn[3] - - if replace, ok := os.LookupEnv(v); ok && !override { - return replace - } - - if replace, ok := env[v]; ok { - return replace - } - - return os.Getenv(v) -} - -func checkFormat(s string, env Env) error { - st := strings.TrimSpace(s) - - if st == "" || st[0] == '#' { - return nil - } - - if err := parseExport(st, env); err != nil { - return err - } - - return fmt.Errorf("line `%s` doesn't match format", s) -} diff --git a/vendor/github.com/t-yuki/gocover-cobertura/.travis.yml b/vendor/github.com/t-yuki/gocover-cobertura/.travis.yml deleted file mode 100644 index f17bb6169..000000000 --- a/vendor/github.com/t-yuki/gocover-cobertura/.travis.yml +++ /dev/null @@ -1,12 +0,0 @@ -language: go -go: - - 1.6 - - 1.7 - - 1.8 - - tip - -sudo: false -before_install: - - go get github.com/mattn/goveralls -script: - - $GOPATH/bin/goveralls -service=travis-ci diff --git a/vendor/github.com/t-yuki/gocover-cobertura/LICENSE b/vendor/github.com/t-yuki/gocover-cobertura/LICENSE deleted file mode 100644 index 7ec1b3d85..000000000 --- a/vendor/github.com/t-yuki/gocover-cobertura/LICENSE +++ /dev/null @@ -1,19 +0,0 @@ -Copyright (c) 2013 Yukinari Toyota - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -of the Software, and to permit persons to whom the Software is furnished to do -so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/t-yuki/gocover-cobertura/README.md b/vendor/github.com/t-yuki/gocover-cobertura/README.md deleted file mode 100644 index 60ab1dbe7..000000000 --- a/vendor/github.com/t-yuki/gocover-cobertura/README.md +++ /dev/null @@ -1,35 +0,0 @@ -[![Build Status](https://travis-ci.org/t-yuki/gocover-cobertura.svg?branch=master)](https://travis-ci.org/t-yuki/gocover-cobertura) -[![Coverage Status](https://coveralls.io/repos/github/t-yuki/gocover-cobertura/badge.svg?branch=master)](https://coveralls.io/github/t-yuki/gocover-cobertura?branch=master) - -go tool cover XML (Cobertura) export -==================================== - -This is a simple helper tool for generating XML output in [Cobertura](http://cobertura.sourceforge.net/) format -for CIs like [Jenkins](https://wiki.jenkins-ci.org/display/JENKINS/Cobertura+Plugin) and others -from [go tool cover](https://code.google.com/p/go.tools/) output. - -Installation ------------- - -Just type the following to install the program and its dependencies: - - $ go get code.google.com/p/go.tools/cmd/cover - $ go get github.com/t-yuki/gocover-cobertura - -Usage ------ - -`gocover-cobertura` reads from the standard input: - - $ go test -coverprofile=coverage.txt -covermode count github.com/gorilla/mux - $ gocover-cobertura < coverage.txt > coverage.xml - -Authors -------- - -* [Yukinari Toyota (t-yuki)](https://github.com/t-yuki) - -Thanks ------- - -This tool is originated from [gocov-xml](https://github.com/AlekSi/gocov-xml) by [Alexey Palazhchenko (AlekSi)](https://github.com/AlekSi) diff --git a/vendor/github.com/t-yuki/gocover-cobertura/cobertura.go b/vendor/github.com/t-yuki/gocover-cobertura/cobertura.go deleted file mode 100644 index 8556dc563..000000000 --- a/vendor/github.com/t-yuki/gocover-cobertura/cobertura.go +++ /dev/null @@ -1,178 +0,0 @@ -package main - -import ( - "encoding/xml" -) - -type Coverage struct { - XMLName xml.Name `xml:"coverage"` - LineRate float32 `xml:"line-rate,attr"` - BranchRate float32 `xml:"branch-rate,attr"` - Version string `xml:"version,attr"` - Timestamp int64 `xml:"timestamp,attr"` - LinesCovered int64 `xml:"lines-covered,attr"` - LinesValid int64 `xml:"lines-valid,attr"` - BranchesCovered int64 `xml:"branches-covered,attr"` - BranchesValid int64 `xml:"branches-valid,attr"` - Complexity float32 `xml:"complexity,attr"` - Sources []*Source `xml:"sources>source"` - Packages []*Package `xml:"packages>package"` -} - -type Source struct { - Path string `xml:",chardata"` -} - -type Package struct { - Name string `xml:"name,attr"` - LineRate float32 `xml:"line-rate,attr"` - BranchRate float32 `xml:"branch-rate,attr"` - Complexity float32 `xml:"complexity,attr"` - Classes []*Class `xml:"classes>class"` -} - -type Class struct { - Name string `xml:"name,attr"` - Filename string `xml:"filename,attr"` - LineRate float32 `xml:"line-rate,attr"` - BranchRate float32 `xml:"branch-rate,attr"` - Complexity float32 `xml:"complexity,attr"` - Methods []*Method `xml:"methods>method"` - Lines Lines `xml:"lines>line"` -} - -type Method struct { - Name string `xml:"name,attr"` - Signature string `xml:"signature,attr"` - LineRate float32 `xml:"line-rate,attr"` - BranchRate float32 `xml:"branch-rate,attr"` - Complexity float32 `xml:"complexity,attr"` - Lines Lines `xml:"lines>line"` -} - -type Line struct { - Number int `xml:"number,attr"` - Hits int64 `xml:"hits,attr"` -} - -// Lines is a slice of Line pointers, with some convenience methods -type Lines []*Line - -// HitRate returns a float32 from 0.0 to 1.0 representing what fraction of lines -// have hits -func (lines Lines) HitRate() (hitRate float32) { - return float32(lines.NumLinesWithHits()) / float32(len(lines)) -} - -// NumLines returns the number of lines -func (lines Lines) NumLines() int64 { - return int64(len(lines)) -} - -// NumLinesWithHits returns the number of lines with a hit count > 0 -func (lines Lines) NumLinesWithHits() (numLinesWithHits int64) { - for _, line := range lines { - if line.Hits > 0 { - numLinesWithHits++ - } - } - return numLinesWithHits -} - -// AddOrUpdateLine adds a line if it is a different line than the last line recorded. -// If it's the same line as the last line recorded then we update the hits down -// if the new hits is less; otherwise just leave it as-is -func (lines *Lines) AddOrUpdateLine(lineNumber int, hits int64) { - if len(*lines) > 0 { - lastLine := (*lines)[len(*lines)-1] - if lineNumber == lastLine.Number { - if hits < lastLine.Hits { - lastLine.Hits = hits - } - return - } - } - *lines = append(*lines, &Line{Number: lineNumber, Hits: hits}) -} - -// HitRate returns a float32 from 0.0 to 1.0 representing what fraction of lines -// have hits -func (method Method) HitRate() float32 { - return method.Lines.HitRate() -} - -// NumLines returns the number of lines -func (method Method) NumLines() int64 { - return method.Lines.NumLines() -} - -// NumLinesWithHits returns the number of lines with a hit count > 0 -func (method Method) NumLinesWithHits() int64 { - return method.Lines.NumLinesWithHits() -} - -// HitRate returns a float32 from 0.0 to 1.0 representing what fraction of lines -// have hits -func (class Class) HitRate() float32 { - return float32(class.NumLinesWithHits()) / float32(class.NumLines()) -} - -// NumLines returns the number of lines -func (class Class) NumLines() (numLines int64) { - for _, method := range class.Methods { - numLines += method.NumLines() - } - return numLines -} - -// NumLinesWithHits returns the number of lines with a hit count > 0 -func (class Class) NumLinesWithHits() (numLinesWithHits int64) { - for _, method := range class.Methods { - numLinesWithHits += method.NumLinesWithHits() - } - return numLinesWithHits -} - -// HitRate returns a float32 from 0.0 to 1.0 representing what fraction of lines -// have hits -func (pkg Package) HitRate() float32 { - return float32(pkg.NumLinesWithHits()) / float32(pkg.NumLines()) -} - -// NumLines returns the number of lines -func (pkg Package) NumLines() (numLines int64) { - for _, class := range pkg.Classes { - numLines += class.NumLines() - } - return numLines -} - -// NumLinesWithHits returns the number of lines with a hit count > 0 -func (pkg Package) NumLinesWithHits() (numLinesWithHits int64) { - for _, class := range pkg.Classes { - numLinesWithHits += class.NumLinesWithHits() - } - return numLinesWithHits -} - -// HitRate returns a float32 from 0.0 to 1.0 representing what fraction of lines -// have hits -func (cov Coverage) HitRate() float32 { - return float32(cov.NumLinesWithHits()) / float32(cov.NumLines()) -} - -// NumLines returns the number of lines -func (cov Coverage) NumLines() (numLines int64) { - for _, pkg := range cov.Packages { - numLines += pkg.NumLines() - } - return numLines -} - -// NumLinesWithHits returns the number of lines with a hit count > 0 -func (cov Coverage) NumLinesWithHits() (numLinesWithHits int64) { - for _, pkg := range cov.Packages { - numLinesWithHits += pkg.NumLinesWithHits() - } - return numLinesWithHits -} diff --git a/vendor/github.com/t-yuki/gocover-cobertura/gocover-cobertura.go b/vendor/github.com/t-yuki/gocover-cobertura/gocover-cobertura.go deleted file mode 100644 index e64b5de02..000000000 --- a/vendor/github.com/t-yuki/gocover-cobertura/gocover-cobertura.go +++ /dev/null @@ -1,176 +0,0 @@ -package main - -import ( - "encoding/xml" - "fmt" - "go/ast" - "go/build" - "go/parser" - "go/token" - "io" - "io/ioutil" - "os" - "path/filepath" - "strings" - "time" -) - -const coberturaDTDDecl = "\n" - -func main() { - convert(os.Stdin, os.Stdout) -} - -func convert(in io.Reader, out io.Writer) { - profiles, err := ParseProfiles(in) - if err != nil { - panic("Can't parse profiles") - } - - srcDirs := build.Default.SrcDirs() - sources := make([]*Source, len(srcDirs)) - for i, dir := range srcDirs { - sources[i] = &Source{dir} - } - - coverage := Coverage{Sources: sources, Packages: nil, Timestamp: time.Now().UnixNano() / int64(time.Millisecond)} - coverage.parseProfiles(profiles) - - fmt.Fprintf(out, xml.Header) - fmt.Fprintf(out, coberturaDTDDecl) - - encoder := xml.NewEncoder(out) - encoder.Indent("", "\t") - err = encoder.Encode(coverage) - if err != nil { - panic(err) - } - - fmt.Fprintln(out) -} - -func (cov *Coverage) parseProfiles(profiles []*Profile) error { - cov.Packages = []*Package{} - for _, profile := range profiles { - cov.parseProfile(profile) - } - cov.LinesValid = cov.NumLines() - cov.LinesCovered = cov.NumLinesWithHits() - cov.LineRate = cov.HitRate() - return nil -} - -func (cov *Coverage) parseProfile(profile *Profile) error { - fileName := profile.FileName - absFilePath, err := findFile(fileName) - if err != nil { - return err - } - fset := token.NewFileSet() - parsed, err := parser.ParseFile(fset, absFilePath, nil, 0) - if err != nil { - return err - } - data, err := ioutil.ReadFile(absFilePath) - if err != nil { - return err - } - - pkgPath, _ := filepath.Split(fileName) - pkgPath = strings.TrimRight(pkgPath, string(os.PathSeparator)) - - var pkg *Package - for _, p := range cov.Packages { - if p.Name == pkgPath { - pkg = p - } - } - if pkg == nil { - pkg = &Package{Name: pkgPath, Classes: []*Class{}} - cov.Packages = append(cov.Packages, pkg) - } - visitor := &fileVisitor{ - fset: fset, - fileName: fileName, - fileData: data, - classes: make(map[string]*Class), - pkg: pkg, - profile: profile, - } - ast.Walk(visitor, parsed) - pkg.LineRate = pkg.HitRate() - return nil -} - -type fileVisitor struct { - fset *token.FileSet - fileName string - fileData []byte - pkg *Package - classes map[string]*Class - profile *Profile -} - -func (v *fileVisitor) Visit(node ast.Node) ast.Visitor { - switch n := node.(type) { - case *ast.FuncDecl: - class := v.class(n) - method := v.method(n) - method.LineRate = method.Lines.HitRate() - class.Methods = append(class.Methods, method) - for _, line := range method.Lines { - class.Lines = append(class.Lines, line) - } - class.LineRate = class.Lines.HitRate() - } - return v -} - -func (v *fileVisitor) method(n *ast.FuncDecl) *Method { - method := &Method{Name: n.Name.Name} - method.Lines = []*Line{} - - start := v.fset.Position(n.Pos()) - end := v.fset.Position(n.End()) - startLine := start.Line - startCol := start.Column - endLine := end.Line - endCol := end.Column - // The blocks are sorted, so we can stop counting as soon as we reach the end of the relevant block. - for _, b := range v.profile.Blocks { - if b.StartLine > endLine || (b.StartLine == endLine && b.StartCol >= endCol) { - // Past the end of the function. - break - } - if b.EndLine < startLine || (b.EndLine == startLine && b.EndCol <= startCol) { - // Before the beginning of the function - continue - } - for i := b.StartLine; i <= b.EndLine; i++ { - method.Lines.AddOrUpdateLine(i, int64(b.Count)) - } - } - return method -} - -func (v *fileVisitor) class(n *ast.FuncDecl) *Class { - className := v.recvName(n) - var class *Class = v.classes[className] - if class == nil { - class = &Class{Name: className, Filename: v.fileName, Methods: []*Method{}, Lines: []*Line{}} - v.classes[className] = class - v.pkg.Classes = append(v.pkg.Classes, class) - } - return class -} - -func (v *fileVisitor) recvName(n *ast.FuncDecl) string { - if n.Recv == nil { - return "-" - } - recv := n.Recv.List[0].Type - start := v.fset.Position(recv.Pos()) - end := v.fset.Position(recv.End()) - name := string(v.fileData[start.Offset:end.Offset]) - return strings.TrimSpace(strings.TrimLeft(name, "*")) -} diff --git a/vendor/github.com/t-yuki/gocover-cobertura/profile.go b/vendor/github.com/t-yuki/gocover-cobertura/profile.go deleted file mode 100644 index 99cbac234..000000000 --- a/vendor/github.com/t-yuki/gocover-cobertura/profile.go +++ /dev/null @@ -1,202 +0,0 @@ -// Imported from https://code.google.com/p/go/source/browse/cmd/cover/profile.go?repo=tools&r=c10a9dd5e0b0a859a8385b6f004584cb083a3934 - -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package main - -import ( - "bufio" - "fmt" - "go/build" - "io" - "math" - "os" - "path/filepath" - "regexp" - "sort" - "strconv" - "strings" -) - -// Profile represents the profiling data for a specific file. -type Profile struct { - FileName string - Mode string - Blocks []ProfileBlock -} - -// ProfileBlock represents a single block of profiling data. -type ProfileBlock struct { - StartLine, StartCol int - EndLine, EndCol int - NumStmt, Count int -} - -type byFileName []*Profile - -func (p byFileName) Len() int { return len(p) } -func (p byFileName) Less(i, j int) bool { return p[i].FileName < p[j].FileName } -func (p byFileName) Swap(i, j int) { p[i], p[j] = p[j], p[i] } - -// ParseProfiles parses profile data from the given Reader and returns a -// Profile for each file. -func ParseProfiles(in io.Reader) ([]*Profile, error) { - files := make(map[string]*Profile) - // First line is "mode: foo", where foo is "set", "count", or "atomic". - // Rest of file is in the format - // encoding/base64/base64.go:34.44,37.40 3 1 - // where the fields are: name.go:line.column,line.column numberOfStatements count - s := bufio.NewScanner(in) - mode := "" - for s.Scan() { - line := s.Text() - if mode == "" { - const p = "mode: " - if !strings.HasPrefix(line, p) || line == p { - return nil, fmt.Errorf("bad mode line: %v", line) - } - mode = line[len(p):] - continue - } - m := lineRe.FindStringSubmatch(line) - if m == nil { - return nil, fmt.Errorf("line %q doesn't match expected format: %v", m, lineRe) - } - fn := m[1] - p := files[fn] - if p == nil { - p = &Profile{ - FileName: fn, - Mode: mode, - } - files[fn] = p - } - p.Blocks = append(p.Blocks, ProfileBlock{ - StartLine: toInt(m[2]), - StartCol: toInt(m[3]), - EndLine: toInt(m[4]), - EndCol: toInt(m[5]), - NumStmt: toInt(m[6]), - Count: toInt(m[7]), - }) - } - if err := s.Err(); err != nil { - return nil, err - } - for _, p := range files { - sort.Sort(blocksByStart(p.Blocks)) - } - // Generate a sorted slice. - profiles := make([]*Profile, 0, len(files)) - for _, profile := range files { - profiles = append(profiles, profile) - } - sort.Sort(byFileName(profiles)) - return profiles, nil -} - -type blocksByStart []ProfileBlock - -func (b blocksByStart) Len() int { return len(b) } -func (b blocksByStart) Swap(i, j int) { b[i], b[j] = b[j], b[i] } -func (b blocksByStart) Less(i, j int) bool { - bi, bj := b[i], b[j] - return bi.StartLine < bj.StartLine || bi.StartLine == bj.StartLine && bi.StartCol < bj.StartCol -} - -var lineRe = regexp.MustCompile(`^(.+):([0-9]+).([0-9]+),([0-9]+).([0-9]+) ([0-9]+) ([0-9]+)$`) - -func toInt(s string) int { - i, err := strconv.Atoi(s) - if err != nil { - panic(err) - } - return i -} - -// Boundary represents the position in a source file of the beginning or end of a -// block as reported by the coverage profile. In HTML mode, it will correspond to -// the opening or closing of a tag and will be used to colorize the source -type Boundary struct { - Offset int // Location as a byte offset in the source file. - Start bool // Is this the start of a block? - Count int // Event count from the cover profile. - Norm float64 // Count normalized to [0..1]. -} - -// Boundaries returns a Profile as a set of Boundary objects within the provided src. -func (p *Profile) Boundaries(src []byte) (boundaries []Boundary) { - // Find maximum count. - max := 0 - for _, b := range p.Blocks { - if b.Count > max { - max = b.Count - } - } - // Divisor for normalization. - divisor := math.Log(float64(max)) - - // boundary returns a Boundary, populating the Norm field with a normalized Count. - boundary := func(offset int, start bool, count int) Boundary { - b := Boundary{Offset: offset, Start: start, Count: count} - if !start || count == 0 { - return b - } - if max <= 1 { - b.Norm = 0.8 // Profile is in"set" mode; we want a heat map. Use cov8 in the CSS. - } else if count > 0 { - b.Norm = math.Log(float64(count)) / divisor - } - return b - } - - line, col := 1, 2 // TODO: Why is this 2? - for si, bi := 0, 0; si < len(src) && bi < len(p.Blocks); { - b := p.Blocks[bi] - if b.StartLine == line && b.StartCol == col { - boundaries = append(boundaries, boundary(si, true, b.Count)) - } - if b.EndLine == line && b.EndCol == col { - boundaries = append(boundaries, boundary(si, false, 0)) - bi++ - continue // Don't advance through src; maybe the next block starts here. - } - if src[si] == '\n' { - line++ - col = 0 - } - col++ - si++ - } - sort.Sort(boundariesByPos(boundaries)) - return -} - -type boundariesByPos []Boundary - -func (b boundariesByPos) Len() int { return len(b) } -func (b boundariesByPos) Swap(i, j int) { b[i], b[j] = b[j], b[i] } -func (b boundariesByPos) Less(i, j int) bool { - if b[i].Offset == b[j].Offset { - return !b[i].Start && b[j].Start - } - return b[i].Offset < b[j].Offset -} - -// findFile finds the location of the named file in GOROOT, GOPATH etc. -func findFile(file string) (string, error) { - if strings.HasPrefix(file, "_") { - file = file[1:] - } - if _, err := os.Stat(file); err == nil { - return file, nil - } - dir, file := filepath.Split(file) - pkg, err := build.Import(dir, ".", build.FindOnly) - if err != nil { - return "", fmt.Errorf("can't find %q: %v", file, err) - } - return filepath.Join(pkg.Dir, file), nil -} diff --git a/vendor/github.com/tdakkota/asciicheck/.gitignore b/vendor/github.com/tdakkota/asciicheck/.gitignore deleted file mode 100644 index dfa562d3e..000000000 --- a/vendor/github.com/tdakkota/asciicheck/.gitignore +++ /dev/null @@ -1,32 +0,0 @@ -# IntelliJ project files -.idea -*.iml -out -gen - -# Go template -# Binaries for programs and plugins -*.exe -*.exe~ -*.dll -*.so -*.dylib - -# Test binary, built with `go test -c` -*.test - -# Output of the go coverage tool, specifically when used with LiteIDE -*.out - -# Dependency directories (remove the comment below to include it) -# vendor/ -.idea/$CACHE_FILE$ -.idea/$PRODUCT_WORKSPACE_FILE$ -.idea/.gitignore -.idea/codeStyles -.idea/deployment.xml -.idea/inspectionProfiles/ -.idea/kotlinc.xml -.idea/misc.xml -.idea/modules.xml -asciicheck.iml diff --git a/vendor/github.com/tdakkota/asciicheck/LICENSE b/vendor/github.com/tdakkota/asciicheck/LICENSE deleted file mode 100644 index 48a60cf1c..000000000 --- a/vendor/github.com/tdakkota/asciicheck/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2020 tdakkota - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/tdakkota/asciicheck/README.md b/vendor/github.com/tdakkota/asciicheck/README.md deleted file mode 100644 index a7ff5884f..000000000 --- a/vendor/github.com/tdakkota/asciicheck/README.md +++ /dev/null @@ -1,72 +0,0 @@ -# asciicheck [![Go Report Card](https://goreportcard.com/badge/github.com/tdakkota/asciicheck)](https://goreportcard.com/report/github.com/tdakkota/asciicheck) [![codecov](https://codecov.io/gh/tdakkota/asciicheck/branch/master/graph/badge.svg)](https://codecov.io/gh/tdakkota/asciicheck) ![Go](https://github.com/tdakkota/asciicheck/workflows/Go/badge.svg) -Simple linter to check that your code does not contain non-ASCII identifiers - -# Install - -``` -go get -u github.com/tdakkota/asciicheck/cmd/asciicheck -``` - -# Reason to use -So, do you see this code? Looks correct, isn't it? - -```go -package main - -import "fmt" - -type TеstStruct struct{} - -func main() { - s := TestStruct{} - fmt.Println(s) -} -``` -But if you try to run it, you will get an error: -``` -./prog.go:8:7: undefined: TestStruct -``` -What? `TestStruct` is defined above, but compiler thinks diffrent. Why? - -**Answer**: -Because `TestStruct` is not `TеstStruct`. -``` -type TеstStruct struct{} - ^ this 'e' (U+0435) is not 'e' (U+0065) -``` - -# Usage -asciicheck uses [`singlechecker`](https://pkg.go.dev/golang.org/x/tools/go/analysis/singlechecker) package to run: - -``` -asciicheck: checks that all code identifiers does not have non-ASCII symbols in the name - -Usage: asciicheck [-flag] [package] - - -Flags: - -V print version and exit - -all - no effect (deprecated) - -c int - display offending line with this many lines of context (default -1) - -cpuprofile string - write CPU profile to this file - -debug string - debug flags, any subset of "fpstv" - -fix - apply all suggested fixes - -flags - print analyzer flags in JSON - -json - emit JSON output - -memprofile string - write memory profile to this file - -source - no effect (deprecated) - -tags string - no effect (deprecated) - -trace string - write trace log to this file - -v no effect (deprecated) -``` diff --git a/vendor/github.com/tdakkota/asciicheck/ascii.go b/vendor/github.com/tdakkota/asciicheck/ascii.go deleted file mode 100644 index 43fe25b59..000000000 --- a/vendor/github.com/tdakkota/asciicheck/ascii.go +++ /dev/null @@ -1,21 +0,0 @@ -package asciicheck - -import ( - "unicode" - "unicode/utf8" -) - -func isASCII(s string) (rune, bool) { - if len(s) == 1 { - r, size := utf8.DecodeRuneInString(s) - return r, size < 2 - } - - for _, r := range s { - if r > unicode.MaxASCII { - return r, false - } - } - - return 0, true -} diff --git a/vendor/github.com/tdakkota/asciicheck/asciicheck.go b/vendor/github.com/tdakkota/asciicheck/asciicheck.go deleted file mode 100644 index 690728022..000000000 --- a/vendor/github.com/tdakkota/asciicheck/asciicheck.go +++ /dev/null @@ -1,49 +0,0 @@ -package asciicheck - -import ( - "fmt" - "go/ast" - "golang.org/x/tools/go/analysis" -) - -func NewAnalyzer() *analysis.Analyzer { - return &analysis.Analyzer{ - Name: "asciicheck", - Doc: "checks that all code identifiers does not have non-ASCII symbols in the name", - Run: run, - } -} - -func run(pass *analysis.Pass) (interface{}, error) { - for _, file := range pass.Files { - alreadyViewed := map[*ast.Object]struct{}{} - ast.Inspect( - file, func(node ast.Node) bool { - cb(pass, node, alreadyViewed) - return true - }, - ) - } - - return nil, nil -} - -func cb(pass *analysis.Pass, n ast.Node, m map[*ast.Object]struct{}) { - if v, ok := n.(*ast.Ident); ok { - if _, ok := m[v.Obj]; ok { - return - } else { - m[v.Obj] = struct{}{} - } - - ch, ascii := isASCII(v.Name) - if !ascii { - pass.Report( - analysis.Diagnostic{ - Pos: v.Pos(), - Message: fmt.Sprintf("identifier \"%s\" contain non-ASCII character: %#U", v.Name, ch), - }, - ) - } - } -} diff --git a/vendor/github.com/tetafro/godot/.gitignore b/vendor/github.com/tetafro/godot/.gitignore deleted file mode 100644 index 0b17eac4c..000000000 --- a/vendor/github.com/tetafro/godot/.gitignore +++ /dev/null @@ -1,5 +0,0 @@ -/dist/ -/tmp/ -/vendor/ -/godot -/profile.out diff --git a/vendor/github.com/tetafro/godot/.godot.yaml b/vendor/github.com/tetafro/godot/.godot.yaml deleted file mode 100644 index af36858f9..000000000 --- a/vendor/github.com/tetafro/godot/.godot.yaml +++ /dev/null @@ -1,16 +0,0 @@ -# Which comments to check: -# declarations - for top level declaration comments (default); -# toplevel - for top level comments; -# all - for all comments. -scope: declarations - -# List of regexps for excluding particular comment lines from check. -# Example: exclude comments which contain numbers. -exclude: - # - '[0-9]+' - -# Check periods at the end of sentences. -period: true - -# Check that first letter of each sentence is capital. -capital: false diff --git a/vendor/github.com/tetafro/godot/.golangci.yml b/vendor/github.com/tetafro/godot/.golangci.yml deleted file mode 100644 index ea380eb83..000000000 --- a/vendor/github.com/tetafro/godot/.golangci.yml +++ /dev/null @@ -1,80 +0,0 @@ -run: - concurrency: 2 - deadline: 5m - -skip-dirs: - - path: ./testdata/ - -linters: - disable-all: true - enable: - - asciicheck - - bodyclose - - cyclop - - dogsled - - durationcheck - - errcheck - - errname - - errorlint - - exhaustive - - exportloopref - - exportloopref - - gochecknoinits - - gocognit - - goconst - - gocritic - - gocyclo - - godot - - goerr113 - - gofmt - - gofumpt - - goimports - - goprintffuncname - - gosec - - gosimple - - govet - - importas - - ineffassign - - lll - - misspell - - nakedret - - nestif - - noctx - - nolintlint - - prealloc - - revive - - rowserrcheck - - sqlclosecheck - - sqlclosecheck - - staticcheck - - stylecheck - - typecheck - - unconvert - - unparam - - unused - - wastedassign - - whitespace - - wrapcheck - -linters-settings: - godot: - scope: toplevel - -issues: - exclude-use-default: false - exclude: - - "do not define dynamic errors, use wrapped static errors instead" - exclude-rules: - - path: _test\.go - linters: - - dupl - - errcheck - - funlen - - gocognit - - cyclop - - gosec - - noctx - - path: main\.go - linters: - - cyclop - - gomnd diff --git a/vendor/github.com/tetafro/godot/.goreleaser.yml b/vendor/github.com/tetafro/godot/.goreleaser.yml deleted file mode 100644 index c0fc2b6b1..000000000 --- a/vendor/github.com/tetafro/godot/.goreleaser.yml +++ /dev/null @@ -1,11 +0,0 @@ -builds: - - dir: ./cmd/godot -checksum: - name_template: checksums.txt -snapshot: - name_template: "{{ .Tag }}" -changelog: - sort: asc - filters: - exclude: - - '^Merge pull request' diff --git a/vendor/github.com/tetafro/godot/LICENSE b/vendor/github.com/tetafro/godot/LICENSE deleted file mode 100644 index 120c6d502..000000000 --- a/vendor/github.com/tetafro/godot/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2020 Denis Krivak - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/tetafro/godot/Makefile b/vendor/github.com/tetafro/godot/Makefile deleted file mode 100644 index f167051e8..000000000 --- a/vendor/github.com/tetafro/godot/Makefile +++ /dev/null @@ -1,25 +0,0 @@ -.PHONY: dep -dep: - go mod tidy && go mod verify - -.PHONY: test -test: - go test ./... - -.PHONY: cover -cover: - go test -coverprofile cover.out ./... - go tool cover -html=cover.out - rm -f cover.out - -.PHONY: lint -lint: - golangci-lint run - -.PHONY: build -build: - go build -o godot ./cmd/godot - -.PHONY: release -release: - goreleaser release --rm-dist diff --git a/vendor/github.com/tetafro/godot/README.md b/vendor/github.com/tetafro/godot/README.md deleted file mode 100644 index 6b2e530b9..000000000 --- a/vendor/github.com/tetafro/godot/README.md +++ /dev/null @@ -1,84 +0,0 @@ -# godot - -[![License](http://img.shields.io/badge/license-MIT-green.svg?style=flat)](https://raw.githubusercontent.com/tetafro/godot/master/LICENSE) -[![Github CI](https://img.shields.io/github/actions/workflow/status/tetafro/godot/push.yml)](https://github.com/tetafro/godot/actions) -[![Go Report](https://goreportcard.com/badge/github.com/tetafro/godot)](https://goreportcard.com/report/github.com/tetafro/godot) -[![Codecov](https://codecov.io/gh/tetafro/godot/branch/master/graph/badge.svg)](https://codecov.io/gh/tetafro/godot) - -Linter that checks if all top-level comments contain a period at the -end of the last sentence if needed. - -[CodeReviewComments](https://github.com/golang/go/wiki/CodeReviewComments#comment-sentences) quote: - -> Comments should begin with the name of the thing being described -> and end in a period - -## Install - -*NOTE: Godot is available as a part of [GolangCI Lint](https://github.com/golangci/golangci-lint) -(disabled by default).* - -Build from source - -```sh -go install github.com/tetafro/godot/cmd/godot@latest -``` - -or download binary from [releases page](https://github.com/tetafro/godot/releases). - -## Config - -You can specify options using config file. Use default name `.godot.yaml`, or -set it using `-c filename.yaml` argument. If no config provided the following -defaults are used: - -```yaml -# Which comments to check: -# declarations - for top level declaration comments (default); -# toplevel - for top level comments; -# all - for all comments. -scope: declarations - -# List of regexps for excluding particular comment lines from check. -exclude: - -# Check periods at the end of sentences. -period: true - -# Check that first letter of each sentence is capital. -capital: false -``` - -## Run - -```sh -godot ./myproject -``` - -Autofix flags are also available - -```sh -godot -f ./myproject # fix issues and print the result -godot -w ./myproject # fix issues and replace the original file -``` - -See all flags with `godot -h`. - -## Example - -Code - -```go -package math - -// Sum sums two integers -func Sum(a, b int) int { - return a + b // result -} -``` - -Output - -```sh -Comment should end in a period: math/math.go:3:1 -``` diff --git a/vendor/github.com/tetafro/godot/checks.go b/vendor/github.com/tetafro/godot/checks.go deleted file mode 100644 index 0e53c220a..000000000 --- a/vendor/github.com/tetafro/godot/checks.go +++ /dev/null @@ -1,296 +0,0 @@ -package godot - -import ( - "go/token" - "regexp" - "strings" - "unicode" -) - -// Error messages. -const ( - noPeriodMessage = "Comment should end in a period" - noCapitalMessage = "Sentence should start with a capital letter" -) - -var ( - // List of valid sentence ending. - // A sentence can be inside parenthesis, and therefore ends with parenthesis. - lastChars = []string{".", "?", "!", ".)", "?)", "!)", "。", "?", "!", "。)", "?)", "!)", specialReplacer} - - // Abbreviations to exclude from capital letters check. - abbreviations = []string{"i.e.", "i. e.", "e.g.", "e. g.", "etc."} - - // Special tags in comments like "//nolint:", or "//+k8s:". - tags = regexp.MustCompile(`^\+?[a-z0-9]+:`) - - // Special hashtags in comments like "// #nosec". - hashtags = regexp.MustCompile(`^#[a-z]+($|\s)`) - - // URL at the end of the line. - endURL = regexp.MustCompile(`[a-z]+://[^\s]+$`) -) - -// position is a position inside a comment (might be multiline comment). -type position struct { - line int // starts at 1 - column int // starts at 1, byte count -} - -// checkComments checks every comment accordings to the rules from -// `settings` argument. -func checkComments(comments []comment, settings Settings) []Issue { - var issues []Issue - for _, c := range comments { - if settings.Period { - if iss := checkPeriod(c); iss != nil { - issues = append(issues, *iss) - } - } - if settings.Capital { - if iss := checkCapital(c); len(iss) > 0 { - issues = append(issues, iss...) - } - } - } - return issues -} - -// checkPeriod checks that the last sentense of the comment ends -// in a period. -func checkPeriod(c comment) *Issue { - // Check last non-empty line - var found bool - var line string - var pos position - lines := strings.Split(c.text, "\n") - for i := len(lines) - 1; i >= 0; i-- { - line = strings.TrimRightFunc(lines[i], unicode.IsSpace) - if line == "" { - continue - } - found = true - pos.line = i + 1 - break - } - // All lines are empty - if !found { - return nil - } - // Correct line - if hasSuffix(line, lastChars) { - return nil - } - - pos.column = len(line) + 1 - - // Shift position to its real value. `c.text` doesn't contain comment's - // special symbols: /* or //, and line indentations inside. It also - // contains */ in the end in case of block comment. - pos.column += strings.Index( - c.lines[pos.line-1], - strings.Split(c.text, "\n")[pos.line-1], - ) - - iss := Issue{ - Pos: token.Position{ - Filename: c.start.Filename, - Offset: c.start.Offset, - Line: pos.line + c.start.Line - 1, - Column: pos.column, - }, - Message: noPeriodMessage, - } - - // Make a replacement. Use `pos.line` to get an original line from - // attached lines. Use `iss.Pos.Column` because it's a position in - // the original line. - original := c.lines[pos.line-1] - if len(original) < iss.Pos.Column-1 { - // This should never happen. Avoid panics, skip this check. - return nil - } - iss.Replacement = original[:iss.Pos.Column-1] + "." + - original[iss.Pos.Column-1:] - - // Save replacement to raw lines to be able to combine it with - // further replacements - c.lines[pos.line-1] = iss.Replacement - - return &iss -} - -// checkCapital checks that each sentense of the comment starts with -// a capital letter. -// -//nolint:cyclop,funlen -func checkCapital(c comment) []Issue { - // Remove common abbreviations from the comment - for _, abbr := range abbreviations { - repl := strings.ReplaceAll(abbr, ".", "_") - c.text = strings.ReplaceAll(c.text, abbr, repl) - } - - // List of states during the scan: `empty` - nothing special, - // `endChar` - found one of sentence ending chars (.!?), - // `endOfSentence` - found `endChar`, and then space or newline. - const empty, endChar, endOfSentence = 1, 2, 3 - - var pp []position - pos := position{line: 1} - state := endOfSentence - if c.decl { - // Skip first - state = empty - } - for _, r := range c.text { - s := string(r) - - pos.column++ - if s == "\n" { - pos.line++ - pos.column = 0 - if state == endChar { - state = endOfSentence - } - continue - } - if s == "." || s == "!" || s == "?" { - state = endChar - continue - } - if s == ")" && state == endChar { - continue - } - if s == " " { - if state == endChar { - state = endOfSentence - } - continue - } - if state == endOfSentence && unicode.IsLower(r) { - pp = append(pp, position{ - line: pos.line, - column: runeToByteColumn(c.text, pos.column), - }) - } - state = empty - } - if len(pp) == 0 { - return nil - } - - issues := make([]Issue, len(pp)) - for i, pos := range pp { - // Shift position by the length of comment's special symbols: /* or // - isBlock := strings.HasPrefix(c.lines[0], "/*") - if (isBlock && pos.line == 1) || !isBlock { - pos.column += 2 - } - - iss := Issue{ - Pos: token.Position{ - Filename: c.start.Filename, - Offset: c.start.Offset, - Line: pos.line + c.start.Line - 1, - Column: pos.column + c.start.Column - 1, - }, - Message: noCapitalMessage, - } - - // Make a replacement. Use `pos.original` to get an original original from - // attached lines. Use `iss.Pos.Column` because it's a position in - // the original original. - original := c.lines[pos.line-1] - col := byteToRuneColumn(original, iss.Pos.Column) - 1 - rep := string(unicode.ToTitle([]rune(original)[col])) // capital letter - if len(original) < iss.Pos.Column-1+len(rep) { - // This should never happen. Avoid panics, skip this check. - continue - } - iss.Replacement = original[:iss.Pos.Column-1] + rep + - original[iss.Pos.Column-1+len(rep):] - - // Save replacement to raw lines to be able to combine it with - // further replacements - c.lines[pos.line-1] = iss.Replacement - - issues[i] = iss - } - - return issues -} - -// isSpecialBlock checks that given block of comment lines is special and -// shouldn't be checked as a regular sentence. -func isSpecialBlock(comment string) bool { - // Skip cgo code blocks - // TODO: Find a better way to detect cgo code - if strings.HasPrefix(comment, "/*") && (strings.Contains(comment, "#include") || - strings.Contains(comment, "#define")) { - return true - } - if strings.HasPrefix(comment, "// Output: ") { - return true - } - return false -} - -// isSpecialLine checks that given comment line is special and -// shouldn't be checked as a regular sentence. -func isSpecialLine(comment string) bool { - // Skip cgo export tags: https://golang.org/cmd/cgo/#hdr-C_references_to_Go - if strings.HasPrefix(comment, "//export ") { - return true - } - - comment = strings.TrimPrefix(comment, "//") - comment = strings.TrimPrefix(comment, "/*") - - // Don't check comments starting with space indentation - they may - // contain code examples, which shouldn't end with period - if strings.HasPrefix(comment, " ") || - strings.HasPrefix(comment, " \t") || - strings.HasPrefix(comment, "\t") { - return true - } - - // Skip tags and URLs - comment = strings.TrimSpace(comment) - if tags.MatchString(comment) || - hashtags.MatchString(comment) || - endURL.MatchString(comment) || - strings.HasPrefix(comment, "+build") { - return true - } - - return false -} - -func hasSuffix(s string, suffixes []string) bool { - for _, suffix := range suffixes { - if strings.HasSuffix(s, suffix) { - return true - } - } - return false -} - -// The following two functions convert byte and rune indexes. -// -// Example: -// text: a b c Ш e f -// runes: 1 2 3 4 5 6 -// bytes: 0 1 2 3 5 6 -// The reason of the difference is that the size of "Ш" is 2 bytes. -// NOTE: Works only for 1-based indexes (line columns). - -// byteToRuneColumn converts byte index inside the string to rune index. -func byteToRuneColumn(s string, i int) int { - return len([]rune(s[:i-1])) + 1 -} - -// runeToByteColumn converts rune index inside the string to byte index. -func runeToByteColumn(s string, i int) int { - return len(string([]rune(s)[:i-1])) + 1 -} diff --git a/vendor/github.com/tetafro/godot/getters.go b/vendor/github.com/tetafro/godot/getters.go deleted file mode 100644 index 7d3d22fb1..000000000 --- a/vendor/github.com/tetafro/godot/getters.go +++ /dev/null @@ -1,289 +0,0 @@ -package godot - -import ( - "errors" - "fmt" - "go/ast" - "go/token" - "os" - "regexp" - "strings" -) - -var ( - errEmptyInput = errors.New("empty input") - errUnsuitableInput = errors.New("unsuitable input") -) - -// specialReplacer is a replacer for some types of special lines in comments, -// which shouldn't be checked. For example, if comment ends with a block of -// code it should not necessarily have a period at the end. -const specialReplacer = "" - -type parsedFile struct { - fset *token.FileSet - file *ast.File - lines []string -} - -func newParsedFile(file *ast.File, fset *token.FileSet) (*parsedFile, error) { - if file == nil || fset == nil || len(file.Comments) == 0 { - return nil, errEmptyInput - } - - pf := parsedFile{ - fset: fset, - file: file, - } - - var err error - - // Read original file. This is necessary for making a replacements for - // inline comments. I couldn't find a better way to get original line - // with code and comment without reading the file. Function `Format` - // from "go/format" won't help here if the original file is not gofmt-ed. - pf.lines, err = readFile(file, fset) - if err != nil { - return nil, fmt.Errorf("read file: %w", err) - } - - // Dirty hack. For some cases Go generates temporary files during - // compilation process if there is a cgo block in the source file. Some of - // these temporary files are just copies of original source files but with - // new generated comments at the top. Because of them the content differs - // from AST. For some reason it differs only in golangci-lint. I failed to - // find out the exact description of the process, so let's just skip files - // generated by cgo. - if isCgoGenerated(pf.lines) { - return nil, errUnsuitableInput - } - - // Check consistency to avoid checking slice indexes in each function. - // Note that `PositionFor` is used with `adjusted=false` to skip `//line` - // directives that can set references to other files (e.g. templates) - // instead of the real ones, and break consistency here. - // Issue: https://github.com/tetafro/godot/issues/32 - lastComment := pf.file.Comments[len(pf.file.Comments)-1] - if p := pf.fset.PositionFor(lastComment.End(), false); len(pf.lines) < p.Line { - return nil, fmt.Errorf("inconsistency between file and AST: %s", p.Filename) - } - - return &pf, nil -} - -// getComments extracts comments from a file. -func (pf *parsedFile) getComments(scope Scope, exclude []*regexp.Regexp) []comment { - var comments []comment - decl := pf.getDeclarationComments(exclude) - switch scope { - case AllScope: - // All comments - comments = pf.getAllComments(exclude) - case TopLevelScope: - // All top level comments and comments from the inside - // of top level blocks - comments = append( - pf.getBlockComments(exclude), - pf.getTopLevelComments(exclude)..., - ) - case DeclScope: - // Top level declaration comments and comments from the inside - // of top level blocks - comments = append(pf.getBlockComments(exclude), decl...) - } - - // Set `decl` flag - setDecl(comments, decl) - - return comments -} - -// getBlockComments gets comments from the inside of top level blocks: -// var (...), const (...). -func (pf *parsedFile) getBlockComments(exclude []*regexp.Regexp) []comment { - var comments []comment - for _, decl := range pf.file.Decls { - d, ok := decl.(*ast.GenDecl) - if !ok { - continue - } - // No parenthesis == no block - if d.Lparen == 0 { - continue - } - for _, c := range pf.file.Comments { - if c == nil || len(c.List) == 0 { - continue - } - // Skip comments outside this block - if d.Lparen > c.Pos() || c.Pos() > d.Rparen { - continue - } - // Skip comments that are not top-level for this block - // (the block itself is top level, so comments inside this block - // would be on column 2) - //nolint:gomnd - if pf.fset.Position(c.Pos()).Column != 2 { - continue - } - firstLine := pf.fset.Position(c.Pos()).Line - lastLine := pf.fset.Position(c.End()).Line - comments = append(comments, comment{ - lines: pf.lines[firstLine-1 : lastLine], - text: getText(c, exclude), - start: pf.fset.Position(c.List[0].Slash), - }) - } - } - return comments -} - -// getTopLevelComments gets all top level comments. -func (pf *parsedFile) getTopLevelComments(exclude []*regexp.Regexp) []comment { - var comments []comment //nolint:prealloc - for _, c := range pf.file.Comments { - if c == nil || len(c.List) == 0 { - continue - } - if pf.fset.Position(c.Pos()).Column != 1 { - continue - } - firstLine := pf.fset.Position(c.Pos()).Line - lastLine := pf.fset.Position(c.End()).Line - comments = append(comments, comment{ - lines: pf.lines[firstLine-1 : lastLine], - text: getText(c, exclude), - start: pf.fset.Position(c.List[0].Slash), - }) - } - return comments -} - -// getDeclarationComments gets top level declaration comments. -func (pf *parsedFile) getDeclarationComments(exclude []*regexp.Regexp) []comment { - var comments []comment //nolint:prealloc - for _, decl := range pf.file.Decls { - var cg *ast.CommentGroup - switch d := decl.(type) { - case *ast.GenDecl: - cg = d.Doc - case *ast.FuncDecl: - cg = d.Doc - } - - if cg == nil || len(cg.List) == 0 { - continue - } - - firstLine := pf.fset.Position(cg.Pos()).Line - lastLine := pf.fset.Position(cg.End()).Line - comments = append(comments, comment{ - lines: pf.lines[firstLine-1 : lastLine], - text: getText(cg, exclude), - start: pf.fset.Position(cg.List[0].Slash), - }) - } - return comments -} - -// getAllComments gets every single comment from the file. -func (pf *parsedFile) getAllComments(exclude []*regexp.Regexp) []comment { - var comments []comment //nolint:prealloc - for _, c := range pf.file.Comments { - if c == nil || len(c.List) == 0 { - continue - } - firstLine := pf.fset.Position(c.Pos()).Line - lastLine := pf.fset.Position(c.End()).Line - comments = append(comments, comment{ - lines: pf.lines[firstLine-1 : lastLine], - start: pf.fset.Position(c.List[0].Slash), - text: getText(c, exclude), - }) - } - return comments -} - -// getText extracts text from comment. If the comment is a special block -// (e.g., CGO code), a block of empty lines is returned. If comment contains -// special lines (e.g., tags or indented code examples), they are replaced -// with `specialReplacer` to skip checks for them. -// The result can be multiline. -// -//nolint:cyclop -func getText(comment *ast.CommentGroup, exclude []*regexp.Regexp) (s string) { - if len(comment.List) == 1 && - strings.HasPrefix(comment.List[0].Text, "/*") && - isSpecialBlock(comment.List[0].Text) { - return "" - } - - for _, c := range comment.List { - text := c.Text - isBlock := false - if strings.HasPrefix(c.Text, "/*") { - isBlock = true - text = strings.TrimPrefix(text, "/*") - text = strings.TrimSuffix(text, "*/") - } - for _, line := range strings.Split(text, "\n") { - if isSpecialLine(line) { - s += specialReplacer + "\n" - continue - } - if !isBlock { - line = strings.TrimPrefix(line, "//") - } - if matchAny(line, exclude) { - s += specialReplacer + "\n" - continue - } - s += line + "\n" - } - } - if len(s) == 0 { - return "" - } - return s[:len(s)-1] // trim last "\n" -} - -// readFile reads file and returns its lines as strings. -func readFile(file *ast.File, fset *token.FileSet) ([]string, error) { - fname := fset.File(file.Package) - f, err := os.ReadFile(fname.Name()) - if err != nil { - return nil, err //nolint:wrapcheck - } - return strings.Split(string(f), "\n"), nil -} - -// setDecl sets `decl` flag to comments which are declaration comments. -func setDecl(comments, decl []comment) { - for _, d := range decl { - for i, c := range comments { - if d.start == c.start { - comments[i].decl = true - break - } - } - } -} - -// matchAny checks if string matches any of given regexps. -func matchAny(s string, rr []*regexp.Regexp) bool { - for _, re := range rr { - if re.MatchString(s) { - return true - } - } - return false -} - -func isCgoGenerated(lines []string) bool { - for i := range lines { - if strings.Contains(lines[i], "Code generated by cmd/cgo") { - return true - } - } - return false -} diff --git a/vendor/github.com/tetafro/godot/godot.go b/vendor/github.com/tetafro/godot/godot.go deleted file mode 100644 index e825e9a6d..000000000 --- a/vendor/github.com/tetafro/godot/godot.go +++ /dev/null @@ -1,129 +0,0 @@ -// Package godot checks if comments contain a period at the end of the last -// sentence if needed. -package godot - -import ( - "errors" - "fmt" - "go/ast" - "go/token" - "os" - "regexp" - "sort" - "strings" -) - -// NOTE: Line and column indexes are 1-based. - -// NOTE: Errors `invalid line number inside comment...` should never happen. -// Their goal is to prevent panic, if there's a bug with array indexes. - -// Issue contains a description of linting error and a recommended replacement. -type Issue struct { - Pos token.Position - Message string - Replacement string -} - -// comment is an internal representation of AST comment entity with additional -// data attached. The latter is used for creating a full replacement for -// the line with issues. -type comment struct { - lines []string // unmodified lines from file - text string // concatenated `lines` with special parts excluded - start token.Position // position of the first symbol in comment - decl bool // whether comment is a declaration comment -} - -// Run runs this linter on the provided code. -func Run(file *ast.File, fset *token.FileSet, settings Settings) ([]Issue, error) { - pf, err := newParsedFile(file, fset) - if errors.Is(err, errEmptyInput) || errors.Is(err, errUnsuitableInput) { - return nil, nil - } - if err != nil { - return nil, fmt.Errorf("parse input file: %w", err) - } - - exclude := make([]*regexp.Regexp, len(settings.Exclude)) - for i := 0; i < len(settings.Exclude); i++ { - exclude[i], err = regexp.Compile(settings.Exclude[i]) - if err != nil { - return nil, fmt.Errorf("invalid regexp: %w", err) - } - } - - comments := pf.getComments(settings.Scope, exclude) - issues := checkComments(comments, settings) - sortIssues(issues) - - return issues, nil -} - -// Fix fixes all issues and returns new version of file content. -func Fix(path string, file *ast.File, fset *token.FileSet, settings Settings) ([]byte, error) { - // Read file - content, err := os.ReadFile(path) //nolint:gosec - if err != nil { - return nil, fmt.Errorf("read file: %w", err) - } - if len(content) == 0 { - return nil, nil - } - - issues, err := Run(file, fset, settings) - if err != nil { - return nil, fmt.Errorf("run linter: %w", err) - } - - // slice -> map - m := map[int]Issue{} - for _, iss := range issues { - m[iss.Pos.Line] = iss - } - - // Replace lines from issues - fixed := make([]byte, 0, len(content)) - for i, line := range strings.Split(string(content), "\n") { - newline := line - if iss, ok := m[i+1]; ok { - newline = iss.Replacement - } - fixed = append(fixed, []byte(newline+"\n")...) - } - fixed = fixed[:len(fixed)-1] // trim last "\n" - - return fixed, nil -} - -// Replace rewrites original file with its fixed version. -func Replace(path string, file *ast.File, fset *token.FileSet, settings Settings) error { - info, err := os.Stat(path) - if err != nil { - return fmt.Errorf("check file: %w", err) - } - mode := info.Mode() - - fixed, err := Fix(path, file, fset, settings) - if err != nil { - return fmt.Errorf("fix issues: %w", err) - } - - if err := os.WriteFile(path, fixed, mode); err != nil { - return fmt.Errorf("write file: %w", err) - } - return nil -} - -// sortIssues sorts by filename, line and column. -func sortIssues(iss []Issue) { - sort.Slice(iss, func(i, j int) bool { - if iss[i].Pos.Filename != iss[j].Pos.Filename { - return iss[i].Pos.Filename < iss[j].Pos.Filename - } - if iss[i].Pos.Line != iss[j].Pos.Line { - return iss[i].Pos.Line < iss[j].Pos.Line - } - return iss[i].Pos.Column < iss[j].Pos.Column - }) -} diff --git a/vendor/github.com/tetafro/godot/settings.go b/vendor/github.com/tetafro/godot/settings.go deleted file mode 100644 index b71bf5d58..000000000 --- a/vendor/github.com/tetafro/godot/settings.go +++ /dev/null @@ -1,29 +0,0 @@ -package godot - -// Settings contains linter settings. -type Settings struct { - // Which comments to check (top level declarations, top level, all). - Scope Scope - - // Regexp for excluding particular comment lines from check. - Exclude []string - - // Check periods at the end of sentences. - Period bool - - // Check that first letter of each sentence is capital. - Capital bool -} - -// Scope sets which comments should be checked. -type Scope string - -// List of available check scopes. -const ( - // DeclScope is for top level declaration comments. - DeclScope Scope = "declarations" - // TopLevelScope is for all top level comments. - TopLevelScope Scope = "toplevel" - // AllScope is for all comments. - AllScope Scope = "all" -) diff --git a/vendor/github.com/timakin/bodyclose/LICENSE b/vendor/github.com/timakin/bodyclose/LICENSE deleted file mode 100644 index 6957f1889..000000000 --- a/vendor/github.com/timakin/bodyclose/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2019 Seiji Takahashi - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/timakin/bodyclose/passes/bodyclose/bodyclose.go b/vendor/github.com/timakin/bodyclose/passes/bodyclose/bodyclose.go deleted file mode 100644 index 27042a153..000000000 --- a/vendor/github.com/timakin/bodyclose/passes/bodyclose/bodyclose.go +++ /dev/null @@ -1,388 +0,0 @@ -package bodyclose - -import ( - "fmt" - "go/ast" - "go/types" - "strconv" - "strings" - - "github.com/gostaticanalysis/analysisutil" - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/buildssa" - "golang.org/x/tools/go/ssa" -) - -var Analyzer = &analysis.Analyzer{ - Name: "bodyclose", - Doc: Doc, - Run: new(runner).run, - Requires: []*analysis.Analyzer{ - buildssa.Analyzer, - }, -} - -const ( - Doc = "checks whether HTTP response body is closed successfully" - - nethttpPath = "net/http" - closeMethod = "Close" -) - -type runner struct { - pass *analysis.Pass - resObj types.Object - resTyp *types.Pointer - bodyObj types.Object - closeMthd *types.Func - skipFile map[*ast.File]bool -} - -// run executes an analysis for the pass. The receiver is passed -// by value because this func is called in parallel for different passes. -func (r runner) run(pass *analysis.Pass) (interface{}, error) { - r.pass = pass - funcs := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs - - r.resObj = analysisutil.LookupFromImports(pass.Pkg.Imports(), nethttpPath, "Response") - if r.resObj == nil { - // skip checking - return nil, nil - } - - resNamed, ok := r.resObj.Type().(*types.Named) - if !ok { - return nil, fmt.Errorf("cannot find http.Response") - } - r.resTyp = types.NewPointer(resNamed) - - resStruct, ok := r.resObj.Type().Underlying().(*types.Struct) - if !ok { - return nil, fmt.Errorf("cannot find http.Response") - } - for i := 0; i < resStruct.NumFields(); i++ { - field := resStruct.Field(i) - if field.Id() == "Body" { - r.bodyObj = field - - break - } - } - if r.bodyObj == nil { - return nil, fmt.Errorf("cannot find the object http.Response.Body") - } - bodyNamed := r.bodyObj.Type().(*types.Named) - bodyItrf := bodyNamed.Underlying().(*types.Interface) - for i := 0; i < bodyItrf.NumMethods(); i++ { - bmthd := bodyItrf.Method(i) - if bmthd.Id() == closeMethod { - r.closeMthd = bmthd - - break - } - } - - r.skipFile = map[*ast.File]bool{} -FuncLoop: - for _, f := range funcs { - // skip if the function is just referenced - for i := 0; i < f.Signature.Results().Len(); i++ { - if f.Signature.Results().At(i).Type().String() == r.resTyp.String() { - continue FuncLoop - } - } - - for _, b := range f.Blocks { - for i := range b.Instrs { - pos := b.Instrs[i].Pos() - if r.isopen(b, i) { - pass.Reportf(pos, "response body must be closed") - } - } - } - } - - return nil, nil -} - -func (r *runner) isopen(b *ssa.BasicBlock, i int) bool { - call, ok := r.getReqCall(b.Instrs[i]) - if !ok { - return false - } - - if len(*call.Referrers()) == 0 { - return true - } - cRefs := *call.Referrers() - for _, cRef := range cRefs { - val, ok := r.getResVal(cRef) - if !ok { - continue - } - - if len(*val.Referrers()) == 0 { - return true - } - resRefs := *val.Referrers() - for _, resRef := range resRefs { - switch resRef := resRef.(type) { - case *ssa.Store: // Call in Closure function / Response is global variable - if _, ok := resRef.Addr.(*ssa.Global); ok { - // Referrers for globals are always nil, so skip. - return false - } - - if len(*resRef.Addr.Referrers()) == 0 { - return true - } - - for _, aref := range *resRef.Addr.Referrers() { - if c, ok := aref.(*ssa.MakeClosure); ok { - f := c.Fn.(*ssa.Function) - if r.noImportedNetHTTP(f) { - // skip this - return false - } - called := r.isClosureCalled(c) - - return r.calledInFunc(f, called) - } - - } - case *ssa.Call, *ssa.Defer: // Indirect function call - // Hacky way to extract CommonCall - var call ssa.CallCommon - switch rr := resRef.(type) { - case *ssa.Call: - call = rr.Call - case *ssa.Defer: - call = rr.Call - } - - if f, ok := call.Value.(*ssa.Function); ok { - for _, b := range f.Blocks { - for i, bi := range b.Instrs { - if r.isCloseCall(bi) { - return false - } - - if r.isopen(b, i) { - return true - } - } - } - } - case *ssa.FieldAddr: // Normal reference to response entity - if resRef.Referrers() == nil { - return true - } - - bRefs := *resRef.Referrers() - - for _, bRef := range bRefs { - bOp, ok := r.getBodyOp(bRef) - if !ok { - continue - } - if len(*bOp.Referrers()) == 0 { - return true - } - ccalls := *bOp.Referrers() - for _, ccall := range ccalls { - if r.isCloseCall(ccall) { - return false - } - } - } - } - } - } - - return true -} - -func (r *runner) getReqCall(instr ssa.Instruction) (*ssa.Call, bool) { - call, ok := instr.(*ssa.Call) - if !ok { - return nil, false - } - if !strings.Contains(call.Type().String(), r.resTyp.String()) { - return nil, false - } - return call, true -} - -func (r *runner) getResVal(instr ssa.Instruction) (ssa.Value, bool) { - switch instr := instr.(type) { - case *ssa.FieldAddr: - if instr.X.Type().String() == r.resTyp.String() { - return instr.X.(ssa.Value), true - } - case ssa.Value: - if instr.Type().String() == r.resTyp.String() { - return instr, true - } - case *ssa.Store: - if instr.Val.Type().String() == r.resTyp.String() { - return instr.Val, true - } - } - return nil, false -} - -func (r *runner) getBodyOp(instr ssa.Instruction) (*ssa.UnOp, bool) { - op, ok := instr.(*ssa.UnOp) - if !ok { - return nil, false - } - if op.Type() != r.bodyObj.Type() { - return nil, false - } - return op, true -} - -func (r *runner) isCloseCall(ccall ssa.Instruction) bool { - switch ccall := ccall.(type) { - case *ssa.Defer: - if ccall.Call.Method != nil && ccall.Call.Method.Name() == r.closeMthd.Name() { - return true - } - case *ssa.Call: - if ccall.Call.Method != nil && ccall.Call.Method.Name() == r.closeMthd.Name() { - return true - } - case *ssa.ChangeInterface: - if ccall.Type().String() == "io.Closer" { - closeMtd := ccall.Type().Underlying().(*types.Interface).Method(0) - crs := *ccall.Referrers() - for _, cs := range crs { - if cs, ok := cs.(*ssa.Defer); ok { - if val, ok := cs.Common().Value.(*ssa.Function); ok { - for _, b := range val.Blocks { - for _, instr := range b.Instrs { - if c, ok := instr.(*ssa.Call); ok { - if c.Call.Method == closeMtd { - return true - } - } - } - } - } - } - - if returnOp, ok := cs.(*ssa.Return); ok { - for _, resultValue := range returnOp.Results { - if resultValue.Type().String() == "io.Closer" { - return true - } - } - } - } - } - case *ssa.Return: - for _, resultValue := range ccall.Results { - if resultValue.Type().String() == "io.ReadCloser" { - return true - } - } - } - return false -} - -func (r *runner) isClosureCalled(c *ssa.MakeClosure) bool { - refs := *c.Referrers() - if len(refs) == 0 { - return false - } - for _, ref := range refs { - switch ref.(type) { - case *ssa.Call, *ssa.Defer: - return true - } - } - return false -} - -func (r *runner) noImportedNetHTTP(f *ssa.Function) (ret bool) { - obj := f.Object() - if obj == nil { - return false - } - - file := analysisutil.File(r.pass, obj.Pos()) - if file == nil { - return false - } - - if skip, has := r.skipFile[file]; has { - return skip - } - defer func() { - r.skipFile[file] = ret - }() - - for _, impt := range file.Imports { - path, err := strconv.Unquote(impt.Path.Value) - if err != nil { - continue - } - path = analysisutil.RemoveVendor(path) - if path == nethttpPath { - return false - } - } - - return true -} - -func (r *runner) calledInFunc(f *ssa.Function, called bool) bool { - for _, b := range f.Blocks { - for i, instr := range b.Instrs { - switch instr := instr.(type) { - case *ssa.UnOp: - refs := *instr.Referrers() - if len(refs) == 0 { - return true - } - for _, r := range refs { - if v, ok := r.(ssa.Value); ok { - if ptr, ok := v.Type().(*types.Pointer); !ok || !isNamedType(ptr.Elem(), "io", "ReadCloser") { - continue - } - vrefs := *v.Referrers() - for _, vref := range vrefs { - if vref, ok := vref.(*ssa.UnOp); ok { - vrefs := *vref.Referrers() - if len(vrefs) == 0 { - return true - } - for _, vref := range vrefs { - if c, ok := vref.(*ssa.Call); ok { - if c.Call.Method != nil && c.Call.Method.Name() == closeMethod { - return !called - } - } - } - } - } - } - - } - default: - return r.isopen(b, i) || !called - } - } - } - return false -} - -// isNamedType reports whether t is the named type path.name. -func isNamedType(t types.Type, path, name string) bool { - n, ok := t.(*types.Named) - if !ok { - return false - } - obj := n.Obj() - return obj.Name() == name && obj.Pkg() != nil && obj.Pkg().Path() == path -} diff --git a/vendor/github.com/timonwong/loggercheck/.codecov.yml b/vendor/github.com/timonwong/loggercheck/.codecov.yml deleted file mode 100644 index ef90457ca..000000000 --- a/vendor/github.com/timonwong/loggercheck/.codecov.yml +++ /dev/null @@ -1,16 +0,0 @@ -coverage: - range: 70..90 # green if 90+, red if 70- - status: - patch: - # coverage status for pull request diff - default: - threshold: 1% # allow a little drop - project: - # coverage status for whole project - default: - target: auto # use coverage of base commit as target - threshold: 1% # allow a little drop - -ignore: - - "plugin/**" - - "cmd/**" diff --git a/vendor/github.com/timonwong/loggercheck/.gitignore b/vendor/github.com/timonwong/loggercheck/.gitignore deleted file mode 100644 index 33df0df91..000000000 --- a/vendor/github.com/timonwong/loggercheck/.gitignore +++ /dev/null @@ -1,9 +0,0 @@ -.vscode/ -.idea/ - -bin/ -vendor/ - -dist/ - -cover.out diff --git a/vendor/github.com/timonwong/loggercheck/.golangci.yml b/vendor/github.com/timonwong/loggercheck/.golangci.yml deleted file mode 100644 index 287327893..000000000 --- a/vendor/github.com/timonwong/loggercheck/.golangci.yml +++ /dev/null @@ -1,94 +0,0 @@ -linters-settings: - dupl: - threshold: 100 - funlen: - lines: 100 - statements: 50 - goconst: - min-len: 2 - min-occurrences: 3 - gocritic: - enabled-tags: - - diagnostic - - experimental - - opinionated - - performance - - style - disabled-checks: - - whyNoLint - gocyclo: - min-complexity: 15 - goimports: - local-prefixes: github.com/timonwong/loggercheck - gomnd: - # don't include the "operation" and "assign" - checks: - - argument - - case - - condition - - return - ignored-numbers: - - '0' - - '1' - - '2' - - '3' - ignored-functions: - - strings.SplitN - - strconv.ParseInt - govet: - check-shadowing: true - lll: - line-length: 140 - misspell: - locale: US - nolintlint: - allow-unused: false # report any unused nolint directives - require-explanation: false # don't require an explanation for nolint directives - require-specific: false # don't require nolint directives to be specific about which linter is being skipped -linters: - disable-all: true - enable: - - bodyclose - - dogsled - - dupl - - errcheck - - exportloopref - - funlen - - gochecknoinits - - goconst - - gocritic - - gocyclo - - gofumpt - - goimports - - gomnd - - goprintffuncname - - gosec - - gosimple - - govet - - ineffassign - - lll - - misspell - - nakedret - - noctx - - nolintlint - - revive - - staticcheck - - stylecheck - - typecheck - - unconvert - - unparam - - unused - - whitespace - -issues: - # Excluding configuration per-path, per-linter, per-text and per-source - exclude-rules: - - path: _test\.go - linters: - - gomnd - -run: - timeout: 5m - go: '1.17' - skip-dirs: - - testdata \ No newline at end of file diff --git a/vendor/github.com/timonwong/loggercheck/.goreleaser.yml b/vendor/github.com/timonwong/loggercheck/.goreleaser.yml deleted file mode 100644 index 55ffe7ea0..000000000 --- a/vendor/github.com/timonwong/loggercheck/.goreleaser.yml +++ /dev/null @@ -1,59 +0,0 @@ ---- -project_name: loggercheck - -release: - github: - owner: timonwong - name: loggercheck - -builds: - - binary: loggercheck - goos: - - darwin - - windows - - linux - goarch: - - '386' - - amd64 - - arm - - arm64 - goarm: - - '7' - env: - - CGO_ENABLED=0 - ignore: - - goos: darwin - goarch: '386' - main: ./cmd/loggercheck/ - flags: - - -trimpath - ldflags: -s -w - -archives: - - name_template: '{{ .ProjectName }}_{{ .Os }}_{{ .Arch }}{{ if .Arm }}v{{ .Arm }}{{ end }}' - format_overrides: - - goos: windows - format: zip - files: - - LICENSE - - README.md - -snapshot: - name_template: '{{ incpatch .Version }}-next' - -checksum: - name_template: 'checksums.txt' - -changelog: - sort: asc - filters: - exclude: - - '(?i)^docs?:' - - '(?i)^docs\([^:]+\):' - - '(?i)^docs\[[^:]+\]:' - - '^tests?:' - - '(?i)^dev:' - - '^build\(deps\): bump .* in /docs \(#\d+\)' - - '^build\(deps\): bump .* in /\.github/peril \(#\d+\)' - - Merge pull request - - Merge branch diff --git a/vendor/github.com/timonwong/loggercheck/LICENSE b/vendor/github.com/timonwong/loggercheck/LICENSE deleted file mode 100644 index fb6531090..000000000 --- a/vendor/github.com/timonwong/loggercheck/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2022 Timon Wong - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/timonwong/loggercheck/Makefile b/vendor/github.com/timonwong/loggercheck/Makefile deleted file mode 100644 index 37bf87202..000000000 --- a/vendor/github.com/timonwong/loggercheck/Makefile +++ /dev/null @@ -1,22 +0,0 @@ -.PHONY: lint -lint: - golangci-lint run ./... - -.PHONY: test-deps -test-deps: - cd testdata/src/a && go mod vendor - -.PHONY: test -test: test-deps - go test -v -covermode=atomic -coverprofile=cover.out -coverpkg ./... ./... - -.PHONY: build -build: - go build -o bin/loggercheck ./cmd/loggercheck - -.PHONY: build-plugin -build-plugin: - CGO_ENABLED=1 go build -o bin/loggercheck.so -buildmode=plugin ./plugin - -.PHONY: build-all -build-all: build build-plugin diff --git a/vendor/github.com/timonwong/loggercheck/README.md b/vendor/github.com/timonwong/loggercheck/README.md deleted file mode 100644 index 14aeca371..000000000 --- a/vendor/github.com/timonwong/loggercheck/README.md +++ /dev/null @@ -1,103 +0,0 @@ -# loggercheck - -## Description - -A linter checks the odd number of key and value pairs for common logger libraries: -- [kitlog](https://github.com/go-kit/log) -- [klog](https://github.com/kubernetes/klog) -- [logr](https://github.com/go-logr/logr) -- [zap](https://github.com/uber-go/zap) - -## Badges - -![Build Status](https://github.com/timonwong/loggercheck/workflows/CI/badge.svg) -[![Coverage](https://img.shields.io/codecov/c/github/timonwong/loggercheck?token=Nutf41gwoG)](https://app.codecov.io/gh/timonwong/loggercheck) -[![License](https://img.shields.io/github/license/timonwong/loggercheck.svg)](/LICENSE) -[![Release](https://img.shields.io/github/release/timonwong/loggercheck.svg)](https://github.com/timonwong/loggercheck/releases/latest) - -## Install - -```shel -go install github.com/timonwong/loggercheck/cmd/loggercheck -``` - -## Usage - -``` -loggercheck: Checks key value pairs for common logger libraries (kitlog,logr,klog,zap). - -Usage: loggercheck [-flag] [package] - - -Flags: - -V print version and exit - -all - no effect (deprecated) - -c int - display offending line with this many lines of context (default -1) - -cpuprofile string - write CPU profile to this file - -debug string - debug flags, any subset of "fpstv" - -disable value - comma-separated list of disabled logger checker (kitlog,klog,logr,zap) (default kitlog) - -fix - apply all suggested fixes - -flags - print analyzer flags in JSON - -json - emit JSON output - -memprofile string - write memory profile to this file - -noprintflike - require printf-like format specifier not present in args - -requirestringkey - require all logging keys to be inlined constant strings - -rulefile string - path to a file contains a list of rules - -source - no effect (deprecated) - -tags string - no effect (deprecated) - -test - indicates whether test files should be analyzed, too (default true) - -trace string - write trace log to this file - -v no effect (deprecated) -``` - -## Example - -```go -package a - -import ( - "context" - "fmt" - - "github.com/go-logr/logr" -) - -func Example() { - log := logr.Discard() - log = log.WithValues("key") - log.Info("message", "key1", "value1", "key2", "value2", "key3") - log.Error(fmt.Errorf("error"), "message", "key1", "value1", "key2") - log.Error(fmt.Errorf("error"), "message", "key1", "value1", "key2", "value2") - - var log2 logr.Logger - log2 = log - log2.Info("message", "key1") - - log3 := logr.FromContextOrDiscard(context.TODO()) - log3.Error(fmt.Errorf("error"), "message", "key1") -} -``` - -``` -a.go:12:23: odd number of arguments passed as key-value pairs for logging -a.go:13:22: odd number of arguments passed as key-value pairs for logging -a.go:14:44: odd number of arguments passed as key-value pairs for logging -a.go:19:23: odd number of arguments passed as key-value pairs for logging -a.go:22:45: odd number of arguments passed as key-value pairs for logging -``` \ No newline at end of file diff --git a/vendor/github.com/timonwong/loggercheck/internal/bytebufferpool/pool.go b/vendor/github.com/timonwong/loggercheck/internal/bytebufferpool/pool.go deleted file mode 100644 index 9d88d21c4..000000000 --- a/vendor/github.com/timonwong/loggercheck/internal/bytebufferpool/pool.go +++ /dev/null @@ -1,22 +0,0 @@ -package bytebufferpool - -import ( - "bytes" - "sync" -) - -var pool = sync.Pool{ - New: func() interface{} { - return new(bytes.Buffer) - }, -} - -func Get() *bytes.Buffer { - buf := pool.Get().(*bytes.Buffer) - buf.Reset() - return buf -} - -func Put(buf *bytes.Buffer) { - pool.Put(buf) -} diff --git a/vendor/github.com/timonwong/loggercheck/internal/checkers/checker.go b/vendor/github.com/timonwong/loggercheck/internal/checkers/checker.go deleted file mode 100644 index 5615636ef..000000000 --- a/vendor/github.com/timonwong/loggercheck/internal/checkers/checker.go +++ /dev/null @@ -1,59 +0,0 @@ -package checkers - -import ( - "go/ast" - "go/types" - - "golang.org/x/tools/go/analysis" -) - -type Config struct { - RequireStringKey bool - NoPrintfLike bool -} - -type CallContext struct { - Expr *ast.CallExpr - Func *types.Func - Signature *types.Signature -} - -type Checker interface { - FilterKeyAndValues(pass *analysis.Pass, keyAndValues []ast.Expr) []ast.Expr - CheckLoggingKey(pass *analysis.Pass, keyAndValues []ast.Expr) - CheckPrintfLikeSpecifier(pass *analysis.Pass, args []ast.Expr) -} - -func ExecuteChecker(c Checker, pass *analysis.Pass, call CallContext, cfg Config) { - params := call.Signature.Params() - nparams := params.Len() // variadic => nonzero - startIndex := nparams - 1 - - lastArg := params.At(nparams - 1) - iface, ok := lastArg.Type().(*types.Slice).Elem().(*types.Interface) - if !ok || !iface.Empty() { - return // final (args) param is not ...interface{} - } - - keyValuesArgs := c.FilterKeyAndValues(pass, call.Expr.Args[startIndex:]) - - if len(keyValuesArgs)%2 != 0 { - firstArg := keyValuesArgs[0] - lastArg := keyValuesArgs[len(keyValuesArgs)-1] - pass.Report(analysis.Diagnostic{ - Pos: firstArg.Pos(), - End: lastArg.End(), - Category: DiagnosticCategory, - Message: "odd number of arguments passed as key-value pairs for logging", - }) - } - - if cfg.RequireStringKey { - c.CheckLoggingKey(pass, keyValuesArgs) - } - - if cfg.NoPrintfLike { - // Check all args - c.CheckPrintfLikeSpecifier(pass, call.Expr.Args) - } -} diff --git a/vendor/github.com/timonwong/loggercheck/internal/checkers/common.go b/vendor/github.com/timonwong/loggercheck/internal/checkers/common.go deleted file mode 100644 index 42cbd0193..000000000 --- a/vendor/github.com/timonwong/loggercheck/internal/checkers/common.go +++ /dev/null @@ -1,49 +0,0 @@ -package checkers - -import ( - "go/ast" - "go/constant" - "go/printer" - "go/token" - "go/types" - "unicode/utf8" - - "golang.org/x/tools/go/analysis" - - "github.com/timonwong/loggercheck/internal/bytebufferpool" -) - -const ( - DiagnosticCategory = "logging" -) - -// extractValueFromStringArg returns true if the argument is a string type (literal or constant). -func extractValueFromStringArg(pass *analysis.Pass, arg ast.Expr) (value string, ok bool) { - if typeAndValue, ok := pass.TypesInfo.Types[arg]; ok { - if typ, ok := typeAndValue.Type.(*types.Basic); ok && typ.Kind() == types.String && typeAndValue.Value != nil { - return constant.StringVal(typeAndValue.Value), true - } - } - - return "", false -} - -func renderNodeEllipsis(fset *token.FileSet, v interface{}) string { - const maxLen = 20 - - buf := bytebufferpool.Get() - defer bytebufferpool.Put(buf) - - _ = printer.Fprint(buf, fset, v) - s := buf.String() - if utf8.RuneCountInString(s) > maxLen { - // Copied from go/constant/value.go - i := 0 - for n := 0; n < maxLen-3; n++ { - _, size := utf8.DecodeRuneInString(s[i:]) - i += size - } - s = s[:i] + "..." - } - return s -} diff --git a/vendor/github.com/timonwong/loggercheck/internal/checkers/general.go b/vendor/github.com/timonwong/loggercheck/internal/checkers/general.go deleted file mode 100644 index 6512cce30..000000000 --- a/vendor/github.com/timonwong/loggercheck/internal/checkers/general.go +++ /dev/null @@ -1,68 +0,0 @@ -package checkers - -import ( - "fmt" - "go/ast" - - "golang.org/x/tools/go/analysis" - - "github.com/timonwong/loggercheck/internal/checkers/printf" - "github.com/timonwong/loggercheck/internal/stringutil" -) - -type General struct{} - -func (g General) FilterKeyAndValues(_ *analysis.Pass, keyAndValues []ast.Expr) []ast.Expr { - return keyAndValues -} - -func (g General) CheckLoggingKey(pass *analysis.Pass, keyAndValues []ast.Expr) { - for i := 0; i < len(keyAndValues); i += 2 { - arg := keyAndValues[i] - if value, ok := extractValueFromStringArg(pass, arg); ok { - if stringutil.IsASCII(value) { - continue - } - - pass.Report(analysis.Diagnostic{ - Pos: arg.Pos(), - End: arg.End(), - Category: DiagnosticCategory, - Message: fmt.Sprintf( - "logging keys are expected to be alphanumeric strings, please remove any non-latin characters from %q", - value), - }) - } else { - pass.Report(analysis.Diagnostic{ - Pos: arg.Pos(), - End: arg.End(), - Category: DiagnosticCategory, - Message: fmt.Sprintf( - "logging keys are expected to be inlined constant strings, please replace %q provided with string", - renderNodeEllipsis(pass.Fset, arg)), - }) - } - } -} - -func (g General) CheckPrintfLikeSpecifier(pass *analysis.Pass, args []ast.Expr) { - for _, arg := range args { - format, ok := extractValueFromStringArg(pass, arg) - if !ok { - continue - } - - if specifier, ok := printf.IsPrintfLike(format); ok { - pass.Report(analysis.Diagnostic{ - Pos: arg.Pos(), - End: arg.End(), - Category: DiagnosticCategory, - Message: fmt.Sprintf("logging message should not use format specifier %q", specifier), - }) - - return // One error diagnostic is enough - } - } -} - -var _ Checker = (*General)(nil) diff --git a/vendor/github.com/timonwong/loggercheck/internal/checkers/printf/printf.go b/vendor/github.com/timonwong/loggercheck/internal/checkers/printf/printf.go deleted file mode 100644 index b38f46f20..000000000 --- a/vendor/github.com/timonwong/loggercheck/internal/checkers/printf/printf.go +++ /dev/null @@ -1,252 +0,0 @@ -package printf - -import ( - "strconv" - "strings" - "unicode/utf8" -) - -// Copied from golang.org/x/tools -// Copyright 2010 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -type printVerb struct { - verb rune // User may provide verb through Formatter; could be a rune. - flags string // known flags are all ASCII -} - -// Common flag sets for printf verbs. -const ( - noFlag = "" - numFlag = " -+.0" - sharpNumFlag = " -+.0#" - allFlags = " -+.0#" -) - -// printVerbs identifies which flags are known to printf for each verb. -var printVerbs = []printVerb{ - // '-' is a width modifier, always valid. - // '.' is a precision for float, max width for strings. - // '+' is required sign for numbers, Go format for %v. - // '#' is alternate format for several verbs. - // ' ' is spacer for numbers - {'%', noFlag}, - {'b', sharpNumFlag}, - {'c', "-"}, - {'d', numFlag}, - {'e', sharpNumFlag}, - {'E', sharpNumFlag}, - {'f', sharpNumFlag}, - {'F', sharpNumFlag}, - {'g', sharpNumFlag}, - {'G', sharpNumFlag}, - {'o', sharpNumFlag}, - {'O', sharpNumFlag}, - {'p', "-#"}, - {'q', " -+.0#"}, - {'s', " -+.0"}, - {'t', "-"}, - {'T', "-"}, - {'U', "-#"}, - {'v', allFlags}, - {'w', allFlags}, - {'x', sharpNumFlag}, - {'X', sharpNumFlag}, -} - -// formatState holds the parsed representation of a printf directive such as "%3.*[4]d". -// It is constructed by parsePrintfVerb. -type formatState struct { - verb rune // the format verb: 'd' for "%d" - format string // the full format directive from % through verb, "%.3d". - flags []byte // the list of # + etc. - // Used only during parse. - hasIndex bool // Whether the argument is indexed. - indexPending bool // Whether we have an indexed argument that has not resolved. - nbytes int // number of bytes of the format string consumed. -} - -// parseFlags accepts any printf flags. -func (s *formatState) parseFlags() { - for s.nbytes < len(s.format) { - switch c := s.format[s.nbytes]; c { - case '#', '0', '+', '-', ' ': - s.flags = append(s.flags, c) - s.nbytes++ - default: - return - } - } -} - -// scanNum advances through a decimal number if present. -func (s *formatState) scanNum() { - for ; s.nbytes < len(s.format); s.nbytes++ { - c := s.format[s.nbytes] - if c < '0' || '9' < c { - return - } - } -} - -func stringIndexAt(s, substr string, start int) int { - idx := strings.Index(s[start:], substr) - if idx < 0 { - return idx - } - return idx + start -} - -// parseIndex scans an index expression. It returns false if there is a syntax error. -func (s *formatState) parseIndex() bool { - if s.nbytes == len(s.format) || s.format[s.nbytes] != '[' { - return true - } - // Argument index present. - s.nbytes++ // skip '[' - start := s.nbytes - s.scanNum() - ok := true - if s.nbytes == len(s.format) || s.nbytes == start || s.format[s.nbytes] != ']' { - ok = false - s.nbytes = stringIndexAt(s.format, "]", start) - if s.nbytes < 0 { - return false - } - } - arg32, err := strconv.ParseInt(s.format[start:s.nbytes], 10, 32) - if err != nil || !ok || arg32 <= 0 { - return false - } - s.nbytes++ // skip ']' - s.hasIndex = true - s.indexPending = true - return true -} - -// parseNum scans a width or precision (or *). -func (s *formatState) parseNum() { - if s.nbytes < len(s.format) && s.format[s.nbytes] == '*' { - if s.indexPending { // Absorb it. - s.indexPending = false - } - s.nbytes++ - } else { - s.scanNum() - } -} - -// parsePrecision scans for a precision. It returns false if there's a bad index expression. -func (s *formatState) parsePrecision() bool { - // If there's a period, there may be a precision. - if s.nbytes < len(s.format) && s.format[s.nbytes] == '.' { - s.flags = append(s.flags, '.') // Treat precision as a flag. - s.nbytes++ - if !s.parseIndex() { - return false - } - s.parseNum() - } - return true -} - -// parsePrintfVerb looks the formatting directive that begins the format string -// and returns a formatState that encodes what the directive wants, without looking -// at the actual arguments present in the call. The result is nil if there is an error. -func parsePrintfVerb(format string) *formatState { - state := &formatState{ - format: format, - flags: make([]byte, 0, 5), //nolint:gomnd - nbytes: 1, // There's guaranteed to be a percent sign. - } - - // There may be flags. - state.parseFlags() - // There may be an index. - if !state.parseIndex() { - return nil - } - // There may be a width. - state.parseNum() - // There may be a precision. - if !state.parsePrecision() { - return nil - } - // Now a verb, possibly prefixed by an index (which we may already have). - if !state.indexPending && !state.parseIndex() { - return nil - } - if state.nbytes == len(state.format) { - // missing verb at end of string - return nil - } - verb, w := utf8.DecodeRuneInString(state.format[state.nbytes:]) - state.verb = verb - state.nbytes += w - state.format = state.format[:state.nbytes] - return state -} - -func containsAll(s string, pattern []byte) bool { - for _, c := range pattern { - if !strings.ContainsRune(s, rune(c)) { - return false - } - } - return true -} - -func isPrintfArg(state *formatState) bool { - var v printVerb - found := false - // Linear scan is fast enough for a small list. - for _, v = range printVerbs { - if v.verb == state.verb { - found = true - break - } - } - - if !found { - // unknown verb, just skip - return false - } - - if !containsAll(v.flags, state.flags) { - // unrecognized format flag, just skip - return false - } - - return true -} - -func IsPrintfLike(format string) (firstSpecifier string, ok bool) { - if !strings.Contains(format, "%") { - return "", false - } - - for i, w := 0, 0; i < len(format); i += w { - w = 1 - if format[i] != '%' { - continue - } - - state := parsePrintfVerb(format[i:]) - if state == nil { - return "", false - } - - w = len(state.format) - if !isPrintfArg(state) { - return "", false - } - - if !ok { - firstSpecifier = state.format - ok = true - } - } - - return -} diff --git a/vendor/github.com/timonwong/loggercheck/internal/checkers/zap.go b/vendor/github.com/timonwong/loggercheck/internal/checkers/zap.go deleted file mode 100644 index 2356f8348..000000000 --- a/vendor/github.com/timonwong/loggercheck/internal/checkers/zap.go +++ /dev/null @@ -1,42 +0,0 @@ -package checkers - -import ( - "go/ast" - "go/types" - - "golang.org/x/tools/go/analysis" -) - -type Zap struct { - General -} - -func (z Zap) FilterKeyAndValues(pass *analysis.Pass, keyAndValues []ast.Expr) []ast.Expr { - // Check the argument count - filtered := make([]ast.Expr, 0, len(keyAndValues)) - for _, arg := range keyAndValues { - // Skip any zapcore.Field we found - switch arg := arg.(type) { - case *ast.CallExpr, *ast.Ident: - typ := pass.TypesInfo.TypeOf(arg) - switch typ := typ.(type) { - case *types.Named: - obj := typ.Obj() - // This is a strongly-typed field. Consume it and move on. - // Actually it's go.uber.org/zap/zapcore.Field, however for simplicity - // we don't check the import path - if obj != nil && obj.Name() == "Field" { - continue - } - default: - // pass - } - } - - filtered = append(filtered, arg) - } - - return filtered -} - -var _ Checker = (*Zap)(nil) diff --git a/vendor/github.com/timonwong/loggercheck/internal/rules/rules.go b/vendor/github.com/timonwong/loggercheck/internal/rules/rules.go deleted file mode 100644 index 27d6ebb27..000000000 --- a/vendor/github.com/timonwong/loggercheck/internal/rules/rules.go +++ /dev/null @@ -1,201 +0,0 @@ -package rules - -import ( - "bufio" - "errors" - "fmt" - "go/types" - "io" - "strings" - - "github.com/timonwong/loggercheck/internal/bytebufferpool" -) - -var ErrInvalidRule = errors.New("invalid rule format") - -const CustomRulesetName = "custom" - -type Ruleset struct { - Name string - PackageImport string - Rules []FuncRule - - ruleIndicesByFuncName map[string][]int -} - -func (rs *Ruleset) Match(fn *types.Func) bool { - // PackageImport is already checked (by indices), skip checking it here - sig := fn.Type().(*types.Signature) // it's safe since we already checked - - // Fail fast if the function name is not in the rule list. - indices, ok := rs.ruleIndicesByFuncName[fn.Name()] - if !ok { - return false - } - - for _, idx := range indices { - rule := &rs.Rules[idx] - if matchRule(rule, sig) { - return true - } - } - - return false -} - -func receiverTypeOf(recvType types.Type) string { - buf := bytebufferpool.Get() - defer bytebufferpool.Put(buf) - - var recvNamed *types.Named - switch recvType := recvType.(type) { - case *types.Pointer: - buf.WriteByte('*') - if elem, ok := recvType.Elem().(*types.Named); ok { - recvNamed = elem - } - case *types.Named: - recvNamed = recvType - } - - if recvNamed == nil { - // not supported type - return "" - } - - buf.WriteString(recvNamed.Obj().Name()) - typeParams := recvNamed.TypeParams() - if typeParamsLen := typeParams.Len(); typeParamsLen > 0 { - buf.WriteByte('[') - for i := 0; i < typeParamsLen; i++ { - if i > 0 { - // comma as separator - buf.WriteByte(',') - } - p := typeParams.At(i) - buf.WriteString(p.Obj().Name()) - } - buf.WriteByte(']') - } - - return buf.String() -} - -func matchRule(p *FuncRule, sig *types.Signature) bool { - // we do not check package import here since it's already checked in Match() - recv := sig.Recv() - isReceiver := recv != nil - if isReceiver != p.IsReceiver { - return false - } - - if isReceiver { - recvType := recv.Type() - receiverType := receiverTypeOf(recvType) - if receiverType != p.ReceiverType { - return false - } - } - - return true -} - -type FuncRule struct { // package import should be accessed from Rulset - ReceiverType string - FuncName string - IsReceiver bool -} - -func ParseFuncRule(rule string) (packageImport string, pat FuncRule, err error) { - lastDot := strings.LastIndexFunc(rule, func(r rune) bool { - return r == '.' || r == '/' - }) - if lastDot == -1 || rule[lastDot] == '/' { - return "", pat, ErrInvalidRule - } - - importOrReceiver := rule[:lastDot] - pat.FuncName = rule[lastDot+1:] - - if strings.HasPrefix(rule, "(") { // package - if !strings.HasSuffix(importOrReceiver, ")") { - return "", FuncRule{}, ErrInvalidRule - } - - var isPointerReceiver bool - pat.IsReceiver = true - receiver := importOrReceiver[1 : len(importOrReceiver)-1] - if strings.HasPrefix(receiver, "*") { - isPointerReceiver = true - receiver = receiver[1:] - } - - typeDotIdx := strings.LastIndexFunc(receiver, func(r rune) bool { - return r == '.' || r == '/' - }) - if typeDotIdx == -1 || receiver[typeDotIdx] == '/' { - return "", FuncRule{}, ErrInvalidRule - } - receiverType := receiver[typeDotIdx+1:] - if isPointerReceiver { - receiverType = "*" + receiverType - } - pat.ReceiverType = receiverType - packageImport = receiver[:typeDotIdx] - } else { - packageImport = importOrReceiver - } - - return packageImport, pat, nil -} - -func ParseRules(lines []string) (result []Ruleset, err error) { - rulesByImport := make(map[string][]FuncRule) - for i, line := range lines { - if line == "" { - continue - } - - if strings.HasPrefix(line, "#") { // comments - continue - } - - packageImport, pat, err := ParseFuncRule(line) - if err != nil { - return nil, fmt.Errorf("error parse rule at line %d: %w", i+1, err) - } - rulesByImport[packageImport] = append(rulesByImport[packageImport], pat) - } - - for packageImport, rules := range rulesByImport { - ruleIndicesByFuncName := make(map[string][]int, len(rules)) - for idx, rule := range rules { - fnName := rule.FuncName - ruleIndicesByFuncName[fnName] = append(ruleIndicesByFuncName[fnName], idx) - } - - result = append(result, Ruleset{ - Name: CustomRulesetName, // NOTE(timonwong) Always "custom" for custom rule - PackageImport: packageImport, - Rules: rules, - ruleIndicesByFuncName: ruleIndicesByFuncName, - }) - } - return result, nil -} - -func ParseRuleFile(r io.Reader) (result []Ruleset, err error) { - // Rule files are relatively small, so read it into string slice first. - var lines []string - - scanner := bufio.NewScanner(r) - for scanner.Scan() { - line := strings.TrimSpace(scanner.Text()) - lines = append(lines, line) - } - if err := scanner.Err(); err != nil { - return nil, err - } - - return ParseRules(lines) -} diff --git a/vendor/github.com/timonwong/loggercheck/internal/sets/string.go b/vendor/github.com/timonwong/loggercheck/internal/sets/string.go deleted file mode 100644 index daf8d57fc..000000000 --- a/vendor/github.com/timonwong/loggercheck/internal/sets/string.go +++ /dev/null @@ -1,59 +0,0 @@ -package sets - -import ( - "sort" - "strings" -) - -type Empty struct{} - -type StringSet map[string]Empty - -func NewString(items ...string) StringSet { - s := make(StringSet) - s.Insert(items...) - return s -} - -func (s StringSet) Insert(items ...string) { - for _, item := range items { - s[item] = Empty{} - } -} - -func (s StringSet) Has(item string) bool { - _, contained := s[item] - return contained -} - -func (s StringSet) List() []string { - if len(s) == 0 { - return nil - } - - res := make([]string, 0, len(s)) - for key := range s { - res = append(res, key) - } - sort.Strings(res) - return res -} - -// Set implements flag.Value interface. -func (s *StringSet) Set(v string) error { - v = strings.TrimSpace(v) - if v == "" { - *s = nil - return nil - } - - parts := strings.Split(v, ",") - set := NewString(parts...) - *s = set - return nil -} - -// String implements flag.Value interface -func (s StringSet) String() string { - return strings.Join(s.List(), ",") -} diff --git a/vendor/github.com/timonwong/loggercheck/internal/stringutil/is.go b/vendor/github.com/timonwong/loggercheck/internal/stringutil/is.go deleted file mode 100644 index a36b742fc..000000000 --- a/vendor/github.com/timonwong/loggercheck/internal/stringutil/is.go +++ /dev/null @@ -1,15 +0,0 @@ -package stringutil - -import "unicode/utf8" - -// IsASCII returns true if string are ASCII. -func IsASCII(s string) bool { - for _, r := range s { - if r >= utf8.RuneSelf { - // Not ASCII. - return false - } - } - - return true -} diff --git a/vendor/github.com/timonwong/loggercheck/loggercheck.go b/vendor/github.com/timonwong/loggercheck/loggercheck.go deleted file mode 100644 index 8bd10aee8..000000000 --- a/vendor/github.com/timonwong/loggercheck/loggercheck.go +++ /dev/null @@ -1,196 +0,0 @@ -package loggercheck - -import ( - "flag" - "fmt" - "go/ast" - "go/types" - "os" - "strings" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/ast/inspector" - "golang.org/x/tools/go/types/typeutil" - - "github.com/timonwong/loggercheck/internal/checkers" - "github.com/timonwong/loggercheck/internal/rules" - "github.com/timonwong/loggercheck/internal/sets" -) - -const Doc = `Checks key value pairs for common logger libraries (kitlog,klog,logr,zap).` - -func NewAnalyzer(opts ...Option) *analysis.Analyzer { - l := newLoggerCheck(opts...) - a := &analysis.Analyzer{ - Name: "loggercheck", - Doc: Doc, - Flags: *l.fs, - Run: l.run, - Requires: []*analysis.Analyzer{inspect.Analyzer}, - } - return a -} - -type loggercheck struct { - fs *flag.FlagSet - - disable sets.StringSet // flag -disable - ruleFile string // flag -rulefile - requireStringKey bool // flag -requirestringkey - noPrintfLike bool // flag -noprintflike - - rules []string // used for external integration, for example golangci-lint - rulesetList []rules.Ruleset // populate at runtime - rulesetIndicesByImport map[string][]int // ruleset index, populate at runtime -} - -func newLoggerCheck(opts ...Option) *loggercheck { - fs := flag.NewFlagSet("loggercheck", flag.ExitOnError) - l := &loggercheck{ - fs: fs, - disable: sets.NewString("kitlog"), - rulesetList: append([]rules.Ruleset{}, staticRuleList...), // ensure we make a clone of static rules first - } - - fs.StringVar(&l.ruleFile, "rulefile", "", "path to a file contains a list of rules") - fs.Var(&l.disable, "disable", "comma-separated list of disabled logger checker (kitlog,klog,logr,zap)") - fs.BoolVar(&l.requireStringKey, "requirestringkey", false, "require all logging keys to be inlined constant strings") - fs.BoolVar(&l.noPrintfLike, "noprintflike", false, "require printf-like format specifier not present in args") - - for _, opt := range opts { - opt(l) - } - - return l -} - -func (l *loggercheck) isCheckerDisabled(name string) bool { - return l.disable.Has(name) -} - -// vendorLessPath returns the devendorized version of the import path ipath. -// For example: "a/vendor/github.com/go-logr/logr" will become "github.com/go-logr/logr". -func vendorLessPath(ipath string) string { - if i := strings.LastIndex(ipath, "/vendor/"); i >= 0 { - return ipath[i+len("/vendor/"):] - } - return ipath -} - -func (l *loggercheck) getCheckerForFunc(fn *types.Func) checkers.Checker { - pkg := fn.Pkg() - if pkg == nil { - return nil - } - - pkgPath := vendorLessPath(pkg.Path()) - indices := l.rulesetIndicesByImport[pkgPath] - - for _, idx := range indices { - rs := &l.rulesetList[idx] - if l.isCheckerDisabled(rs.Name) { - // Skip ignored logger checker. - continue - } - - if !rs.Match(fn) { - continue - } - - checker := checkerByRulesetName[rs.Name] - if checker == nil { - return checkers.General{} - } - return checker - } - - return nil -} - -func (l *loggercheck) checkLoggerArguments(pass *analysis.Pass, call *ast.CallExpr) { - fn, _ := typeutil.Callee(pass.TypesInfo, call).(*types.Func) - if fn == nil { - return // function pointer is not supported - } - - sig, ok := fn.Type().(*types.Signature) - if !ok || !sig.Variadic() { - return // not variadic - } - - // ellipsis args is hard, just skip - if call.Ellipsis.IsValid() { - return - } - - checker := l.getCheckerForFunc(fn) - if checker == nil { - return - } - - checkers.ExecuteChecker(checker, pass, checkers.CallContext{ - Expr: call, - Func: fn, - Signature: sig, - }, checkers.Config{ - RequireStringKey: l.requireStringKey, - NoPrintfLike: l.noPrintfLike, - }) -} - -func (l *loggercheck) processConfig() error { - if l.ruleFile != "" { // flags takes precedence over configs - f, err := os.Open(l.ruleFile) - if err != nil { - return fmt.Errorf("failed to open rule file: %w", err) - } - defer f.Close() - - custom, err := rules.ParseRuleFile(f) - if err != nil { - return fmt.Errorf("failed to parse rule file: %w", err) - } - l.rulesetList = append(l.rulesetList, custom...) - } else if len(l.rules) > 0 { - custom, err := rules.ParseRules(l.rules) - if err != nil { - return fmt.Errorf("failed to parse rules: %w", err) - } - l.rulesetList = append(l.rulesetList, custom...) - } - - // Build index - indices := make(map[string][]int) - for i, rs := range l.rulesetList { - indices[rs.PackageImport] = append(indices[rs.PackageImport], i) - } - l.rulesetIndicesByImport = indices - - return nil -} - -func (l *loggercheck) run(pass *analysis.Pass) (interface{}, error) { - err := l.processConfig() - if err != nil { - return nil, err - } - - insp := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - nodeFilter := []ast.Node{ - (*ast.CallExpr)(nil), - } - insp.Preorder(nodeFilter, func(node ast.Node) { - call := node.(*ast.CallExpr) - - typ := pass.TypesInfo.Types[call.Fun].Type - if typ == nil { - // Skip checking functions with unknown type. - return - } - - l.checkLoggerArguments(pass, call) - }) - - return nil, nil -} diff --git a/vendor/github.com/timonwong/loggercheck/options.go b/vendor/github.com/timonwong/loggercheck/options.go deleted file mode 100644 index 6b5f00af1..000000000 --- a/vendor/github.com/timonwong/loggercheck/options.go +++ /dev/null @@ -1,31 +0,0 @@ -package loggercheck - -import ( - "github.com/timonwong/loggercheck/internal/sets" -) - -type Option func(*loggercheck) - -func WithDisable(disable []string) Option { - return func(l *loggercheck) { - l.disable = sets.NewString(disable...) - } -} - -func WithRules(customRules []string) Option { - return func(l *loggercheck) { - l.rules = customRules - } -} - -func WithRequireStringKey(requireStringKey bool) Option { - return func(l *loggercheck) { - l.requireStringKey = requireStringKey - } -} - -func WithNoPrintfLike(noPrintfLike bool) Option { - return func(l *loggercheck) { - l.noPrintfLike = noPrintfLike - } -} diff --git a/vendor/github.com/timonwong/loggercheck/staticrules.go b/vendor/github.com/timonwong/loggercheck/staticrules.go deleted file mode 100644 index f955b3434..000000000 --- a/vendor/github.com/timonwong/loggercheck/staticrules.go +++ /dev/null @@ -1,68 +0,0 @@ -package loggercheck - -import ( - "errors" - "fmt" - - "github.com/timonwong/loggercheck/internal/checkers" - "github.com/timonwong/loggercheck/internal/rules" -) - -var ( - staticRuleList = []rules.Ruleset{ - mustNewStaticRuleSet("logr", []string{ - "(github.com/go-logr/logr.Logger).Error", - "(github.com/go-logr/logr.Logger).Info", - "(github.com/go-logr/logr.Logger).WithValues", - }), - mustNewStaticRuleSet("klog", []string{ - "k8s.io/klog/v2.InfoS", - "k8s.io/klog/v2.InfoSDepth", - "k8s.io/klog/v2.ErrorS", - "(k8s.io/klog/v2.Verbose).InfoS", - "(k8s.io/klog/v2.Verbose).InfoSDepth", - "(k8s.io/klog/v2.Verbose).ErrorS", - }), - mustNewStaticRuleSet("zap", []string{ - "(*go.uber.org/zap.SugaredLogger).With", - "(*go.uber.org/zap.SugaredLogger).Debugw", - "(*go.uber.org/zap.SugaredLogger).Infow", - "(*go.uber.org/zap.SugaredLogger).Warnw", - "(*go.uber.org/zap.SugaredLogger).Errorw", - "(*go.uber.org/zap.SugaredLogger).DPanicw", - "(*go.uber.org/zap.SugaredLogger).Panicw", - "(*go.uber.org/zap.SugaredLogger).Fatalw", - }), - mustNewStaticRuleSet("kitlog", []string{ - "github.com/go-kit/log.With", - "github.com/go-kit/log.WithPrefix", - "github.com/go-kit/log.WithSuffix", - "(github.com/go-kit/log.Logger).Log", - }), - } - checkerByRulesetName = map[string]checkers.Checker{ - // by default, checkers.General will be used. - "zap": checkers.Zap{}, - } -) - -// mustNewStaticRuleSet only called at init, catch errors during development. -// In production it will not panic. -func mustNewStaticRuleSet(name string, lines []string) rules.Ruleset { - if len(lines) == 0 { - panic(errors.New("no rules provided")) - } - - rulesetList, err := rules.ParseRules(lines) - if err != nil { - panic(err) - } - - if len(rulesetList) != 1 { - panic(fmt.Errorf("expected 1 ruleset, got %d", len(rulesetList))) - } - - ruleset := rulesetList[0] - ruleset.Name = name - return ruleset -} diff --git a/vendor/github.com/tomarrell/wrapcheck/v2/LICENSE b/vendor/github.com/tomarrell/wrapcheck/v2/LICENSE deleted file mode 100644 index b5d9d30d3..000000000 --- a/vendor/github.com/tomarrell/wrapcheck/v2/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2020 Tom Arrell - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/tomarrell/wrapcheck/v2/wrapcheck/wrapcheck.go b/vendor/github.com/tomarrell/wrapcheck/v2/wrapcheck/wrapcheck.go deleted file mode 100644 index 79e7bba86..000000000 --- a/vendor/github.com/tomarrell/wrapcheck/v2/wrapcheck/wrapcheck.go +++ /dev/null @@ -1,463 +0,0 @@ -package wrapcheck - -import ( - "fmt" - "go/ast" - "go/token" - "go/types" - "regexp" - "strings" - - "github.com/gobwas/glob" - "golang.org/x/tools/go/analysis" -) - -var DefaultIgnoreSigs = []string{ - ".Errorf(", - "errors.New(", - "errors.Unwrap(", - "errors.Join(", - ".Wrap(", - ".Wrapf(", - ".WithMessage(", - ".WithMessagef(", - ".WithStack(", -} - -// WrapcheckConfig is the set of configuration values which configure the -// behaviour of the linter. -type WrapcheckConfig struct { - // IgnoreSigs defines a list of substrings which if contained within the - // signature of the function call returning the error, will be ignored. This - // allows you to specify functions that wrapcheck will not report as - // unwrapped. - // - // For example, an ignoreSig of `[]string{"errors.New("}` will ignore errors - // returned from the stdlib package error's function: - // - // `func errors.New(message string) error` - // - // Due to the signature containing the substring `errors.New(`. - // - // Note: Setting this value will intentionally override the default ignored - // sigs. To achieve the same behaviour as default, you should add the default - // list to your config. - IgnoreSigs []string `mapstructure:"ignoreSigs" yaml:"ignoreSigs"` - - // IgnoreSigRegexps defines a list of regular expressions which if matched - // to the signature of the function call returning the error, will be ignored. This - // allows you to specify functions that wrapcheck will not report as - // unwrapped. - // - // For example, an ignoreSigRegexp of `[]string{"\.New.*Err\("}`` will ignore errors - // returned from any signature whose method name starts with "New" and ends with "Err" - // due to the signature matching the regular expression `\.New.*Err\(`. - // - // Note that this is similar to the ignoreSigs configuration, but provides - // slightly more flexibility in defining rules by which signatures will be - // ignored. - IgnoreSigRegexps []string `mapstructure:"ignoreSigRegexps" yaml:"ignoreSigRegexps"` - - // IgnorePackageGlobs defines a list of globs which, if matching the package - // of the function returning the error, will ignore the error when doing - // wrapcheck analysis. - // - // This is useful for broadly ignoring packages and subpackages from wrapcheck - // analysis. For example, to ignore all errors from all packages and - // subpackages of "encoding" you may include the configuration: - // - // -- .wrapcheck.yaml - // ignorePackageGlobs: - // - encoding/* - IgnorePackageGlobs []string `mapstructure:"ignorePackageGlobs" yaml:"ignorePackageGlobs"` - - // IgnoreInterfaceRegexps defines a list of regular expressions which, if matched - // to a underlying interface name, will ignore unwrapped errors returned from a - // function whose call is defined on the given interface. - // - // For example, an ignoreInterfaceRegexps of `[]string{"Transac(tor|tion)"}` will ignore errors - // returned from any function whose call is defined on a interface named 'Transactor' - // or 'Transaction' due to the name matching the regular expression `Transac(tor|tion)`. - IgnoreInterfaceRegexps []string `mapstructure:"ignoreInterfaceRegexps" yaml:"ignoreInterfaceRegexps"` -} - -func NewDefaultConfig() WrapcheckConfig { - return WrapcheckConfig{ - IgnoreSigs: DefaultIgnoreSigs, - IgnoreSigRegexps: []string{}, - IgnorePackageGlobs: []string{}, - IgnoreInterfaceRegexps: []string{}, - } -} - -func NewAnalyzer(cfg WrapcheckConfig) *analysis.Analyzer { - return &analysis.Analyzer{ - Name: "wrapcheck", - Doc: "Checks that errors returned from external packages are wrapped", - Run: run(cfg), - } -} - -func run(cfg WrapcheckConfig) func(*analysis.Pass) (interface{}, error) { - // Precompile the regexps, report the error - var ( - ignoreSigRegexp []*regexp.Regexp - ignoreInterfaceRegexps []*regexp.Regexp - ignorePackageGlobs []glob.Glob - err error - ) - - ignoreSigRegexp, err = compileRegexps(cfg.IgnoreSigRegexps) - if err == nil { - ignoreInterfaceRegexps, err = compileRegexps(cfg.IgnoreInterfaceRegexps) - } - if err == nil { - ignorePackageGlobs, err = compileGlobs(cfg.IgnorePackageGlobs) - } - - return func(pass *analysis.Pass) (interface{}, error) { - if err != nil { - return nil, err - } - - for _, file := range pass.Files { - // Keep track of parents so that can can traverse upwards to check for - // FuncDecls and FuncLits. - var parents []ast.Node - - ast.Inspect(file, func(n ast.Node) bool { - if n == nil { - // Pop, since we're done with this node and its children. - parents = parents[:len(parents)-1] - } else { - // Push this node on the stack, since its children will be visited - // next. - parents = append(parents, n) - } - - ret, ok := n.(*ast.ReturnStmt) - if !ok { - return true - } - - if len(ret.Results) < 1 { - return true - } - - // Iterate over the values to be returned looking for errors - for _, expr := range ret.Results { - // Check if the return expression is a function call, if it is, we need - // to handle it by checking the return params of the function. - retFn, ok := expr.(*ast.CallExpr) - if ok { - // If you go up, and the parent is a FuncLit, then don't report an - // error as you are in an anonymous function. If you are inside a - // FuncDecl, then continue as normal. - for i := len(parents) - 1; i > 0; i-- { - if _, ok := parents[i].(*ast.FuncLit); ok { - return true - } else if _, ok := parents[i].(*ast.FuncDecl); ok { - break - } - } - - // If the return type of the function is a single error. This will not - // match an error within multiple return values, for that, the below - // tuple check is required. - - if isError(pass.TypesInfo.TypeOf(expr)) { - reportUnwrapped(pass, retFn, retFn.Pos(), cfg, ignoreSigRegexp, ignoreInterfaceRegexps, ignorePackageGlobs) - return true - } - - // Check if one of the return values from the function is an error - tup, ok := pass.TypesInfo.TypeOf(expr).(*types.Tuple) - if !ok { - return true - } - - // Iterate over the return values of the function looking for error - // types - for i := 0; i < tup.Len(); i++ { - v := tup.At(i) - if v == nil { - return true - } - if isError(v.Type()) { - reportUnwrapped(pass, retFn, expr.Pos(), cfg, ignoreSigRegexp, ignoreInterfaceRegexps, ignorePackageGlobs) - return true - } - } - } - - if !isError(pass.TypesInfo.TypeOf(expr)) { - continue - } - - ident, ok := expr.(*ast.Ident) - if !ok { - return true - } - - var call *ast.CallExpr - - // Attempt to find the most recent short assign - if shortAss := prevErrAssign(pass, file, ident); shortAss != nil { - call, ok = shortAss.Rhs[0].(*ast.CallExpr) - if !ok { - return true - } - } else if isUnresolved(file, ident) { - // TODO Check if the identifier is unresolved, and try to resolve it in - // another file. - return true - } else { - // Check for ValueSpec nodes in order to locate a possible var - // declaration. - if ident.Obj == nil { - return true - } - - vSpec, ok := ident.Obj.Decl.(*ast.ValueSpec) - if !ok { - // We couldn't find a short or var assign for this error return. - // This is an error. Where did this identifier come from? Possibly a - // function param. - // - // TODO decide how to handle this case, whether to follow function - // param back, or assert wrapping at call site. - - return true - } - - if len(vSpec.Values) < 1 { - return true - } - - call, ok = vSpec.Values[0].(*ast.CallExpr) - if !ok { - return true - } - } - - // Make sure there is a call identified as producing the error being - // returned, otherwise just bail - if call == nil { - return true - } - - reportUnwrapped(pass, call, ident.NamePos, cfg, ignoreSigRegexp, ignoreInterfaceRegexps, ignorePackageGlobs) - } - - return true - }) - } - - return nil, nil - } -} - -// Report unwrapped takes a call expression and an identifier and reports -// if the call is unwrapped. -func reportUnwrapped( - pass *analysis.Pass, - call *ast.CallExpr, - tokenPos token.Pos, - cfg WrapcheckConfig, - regexpsSig []*regexp.Regexp, - regexpsInter []*regexp.Regexp, - pkgGlobs []glob.Glob, -) { - - sel, ok := call.Fun.(*ast.SelectorExpr) - if !ok { - return - } - - // Check for ignored signatures - fnSig := pass.TypesInfo.ObjectOf(sel.Sel).String() - if contains(cfg.IgnoreSigs, fnSig) { - return - } else if containsMatch(regexpsSig, fnSig) { - return - } - - // Check if the underlying type of the "x" in x.y.z is an interface, as - // errors returned from interface types should be wrapped, unless ignored - // as per `ignoreInterfaceRegexps` - if isInterface(pass, sel) { - pkgPath := pass.TypesInfo.ObjectOf(sel.Sel).Pkg().Path() - name := types.TypeString(pass.TypesInfo.TypeOf(sel.X), func(p *types.Package) string { return p.Name() }) - if !containsMatch(regexpsInter, name) && !containsMatchGlob(pkgGlobs, pkgPath) { - pass.Reportf(tokenPos, "error returned from interface method should be wrapped: sig: %s", fnSig) - return - } - } - - // Check whether the function being called comes from another package, - // as functions called across package boundaries which returns errors - // should be wrapped - if isFromOtherPkg(pass, sel, pkgGlobs) { - pass.Reportf(tokenPos, "error returned from external package is unwrapped: sig: %s", fnSig) - return - } -} - -// isInterface returns whether the function call is one defined on an interface. -func isInterface(pass *analysis.Pass, sel *ast.SelectorExpr) bool { - _, ok := pass.TypesInfo.TypeOf(sel.X).Underlying().(*types.Interface) - - return ok -} - -// isFromotherPkg returns whether the function is defined in the package -// currently under analysis or is considered external. It will ignore packages -// defined in config.IgnorePackageGlobs. -func isFromOtherPkg(pass *analysis.Pass, sel *ast.SelectorExpr, pkgGlobs []glob.Glob) bool { - // The package of the function that we are calling which returns the error - fn := pass.TypesInfo.ObjectOf(sel.Sel) - if containsMatchGlob(pkgGlobs, fn.Pkg().Path()) { - return false - } - - // If it's not a package name, then we should check the selector to make sure - // that it's an identifier from the same package - if pass.Pkg.Path() == fn.Pkg().Path() { - return false - } - - return true -} - -// prevErrAssign traverses the AST of a file looking for the most recent -// assignment to an error declaration which is specified by the returnIdent -// identifier. -// -// This only returns short form assignments and reassignments, e.g. `:=` and -// `=`. This does not include `var` statements. This function will return nil if -// the only declaration is a `var` (aka ValueSpec) declaration. -func prevErrAssign(pass *analysis.Pass, file *ast.File, returnIdent *ast.Ident) *ast.AssignStmt { - // A slice containing all the assignments which contain an identifier - // referring to the source declaration of the error. This is to catch - // cases where err is defined once, and then reassigned multiple times - // within the same block. In these cases, we should check the method of - // the most recent call. - var assigns []*ast.AssignStmt - - // Find all assignments which have the same declaration - ast.Inspect(file, func(n ast.Node) bool { - if ass, ok := n.(*ast.AssignStmt); ok { - for _, expr := range ass.Lhs { - if !isError(pass.TypesInfo.TypeOf(expr)) { - continue - } - - if assIdent, ok := expr.(*ast.Ident); ok { - if assIdent.Obj == nil || returnIdent.Obj == nil { - // If we can't find the Obj for one of the identifiers, just skip - // it. - return true - } else if assIdent.Obj.Decl == returnIdent.Obj.Decl { - assigns = append(assigns, ass) - } - } - } - } - - return true - }) - - // Iterate through the assignments, comparing the token positions to - // find the assignment that directly precedes the return position - var mostRecentAssign *ast.AssignStmt - - for _, ass := range assigns { - if ass.Pos() > returnIdent.Pos() { - break - } - - mostRecentAssign = ass - } - - return mostRecentAssign -} - -func contains(slice []string, el string) bool { - for _, s := range slice { - if strings.Contains(el, s) { - return true - } - } - - return false -} - -func containsMatch(regexps []*regexp.Regexp, el string) bool { - for _, re := range regexps { - if re.MatchString(el) { - return true - } - } - - return false -} - -func containsMatchGlob(globs []glob.Glob, el string) bool { - for _, g := range globs { - if g.Match(el) { - return true - } - } - - return false -} - -// isError returns whether or not the provided type interface is an error -func isError(typ types.Type) bool { - if typ == nil { - return false - } - - return typ.String() == "error" -} - -func isUnresolved(file *ast.File, ident *ast.Ident) bool { - for _, unresolvedIdent := range file.Unresolved { - if unresolvedIdent.Pos() == ident.Pos() { - return true - } - } - - return false -} - -// compileRegexps compiles a set of regular expressions returning them for use, -// or the first encountered error due to an invalid expression. -func compileRegexps(regexps []string) ([]*regexp.Regexp, error) { - compiledRegexps := make([]*regexp.Regexp, len(regexps)) - for idx, reg := range regexps { - re, err := regexp.Compile(reg) - if err != nil { - return nil, fmt.Errorf("unable to compile regexp %s: %v\n", reg, err) - } - - compiledRegexps[idx] = re - } - - return compiledRegexps, nil -} - -// compileGlobs compiles a set of globs, returning them for use, -// or the first encountered error due to an invalid expression. -func compileGlobs(globs []string) ([]glob.Glob, error) { - compiledGlobs := make([]glob.Glob, len(globs)) - for idx, globString := range globs { - glob, err := glob.Compile(globString) - if err != nil { - return nil, fmt.Errorf("unable to compile globs %s: %v\n", glob, err) - } - - compiledGlobs[idx] = glob - } - return compiledGlobs, nil -} diff --git a/vendor/github.com/tommy-muehle/go-mnd/v2/.editorconfig b/vendor/github.com/tommy-muehle/go-mnd/v2/.editorconfig deleted file mode 100644 index fe2c20fb0..000000000 --- a/vendor/github.com/tommy-muehle/go-mnd/v2/.editorconfig +++ /dev/null @@ -1,21 +0,0 @@ -root = true - -[*] -charset = utf-8 -indent_size = 4 -indent_style = space -end_of_line = lf -insert_final_newline = true -trim_trailing_whitespace = true - -[*.md] -trim_trailing_whitespace = false - -[*.json] -indent_size = 2 - -[*.{yaml,yml}] -indent_size = 2 - -[Makefile] -indent_style = tab diff --git a/vendor/github.com/tommy-muehle/go-mnd/v2/.gitattributes b/vendor/github.com/tommy-muehle/go-mnd/v2/.gitattributes deleted file mode 100644 index 005358190..000000000 --- a/vendor/github.com/tommy-muehle/go-mnd/v2/.gitattributes +++ /dev/null @@ -1,9 +0,0 @@ -/.gitattributes export-ignore -/.gitignore export-ignore -/.editorconfig export-ignore -/.goreleaser.yml export-ignore -/.github/ export-ignore -/examples/ export-ignore -/testdata/ export-ignore -/tools/ export-ignore -/Makefile export-ignore diff --git a/vendor/github.com/tommy-muehle/go-mnd/v2/.gitignore b/vendor/github.com/tommy-muehle/go-mnd/v2/.gitignore deleted file mode 100644 index abc11b330..000000000 --- a/vendor/github.com/tommy-muehle/go-mnd/v2/.gitignore +++ /dev/null @@ -1,3 +0,0 @@ -build/ -dist/ -coverage.txt diff --git a/vendor/github.com/tommy-muehle/go-mnd/v2/.goreleaser.yml b/vendor/github.com/tommy-muehle/go-mnd/v2/.goreleaser.yml deleted file mode 100644 index 7516de0b9..000000000 --- a/vendor/github.com/tommy-muehle/go-mnd/v2/.goreleaser.yml +++ /dev/null @@ -1,54 +0,0 @@ -env: - - GO_VERSION=1.16 - -before: - hooks: - - go mod download - -builds: - - main: ./cmd/mnd/main.go - binary: mnd - goos: - - windows - - darwin - - linux - goarch: - - amd64 - ldflags: -s -w -X main.version={{.Version}} -X main.commit={{.Commit}} -X main.buildTime={{.Date}}`. - -archives: - - format: tar.gz - format_overrides: - - goos: windows - format: zip - -brews: - - name: mnd - tap: - owner: tommy-muehle - name: homebrew-tap - folder: Formula - homepage: https://github.com/tommy-muehle/go-mnd - description: Magic number detector for Go - test: | - system "#{bin}/mnd --version" - install: | - bin.install "mnd" - -dockers: - - - goos: linux - goarch: amd64 - image_templates: - - "tommymuehle/go-mnd:latest" - - "tommymuehle/go-mnd:{{ .Tag }}" - build_flag_templates: - - "--build-arg=GO_VERSION={{.Env.GO_VERSION}}" - extra_files: - - checks - - cmd - - config - - analyzer.go - - entrypoint.sh - - go.mod - - go.sum diff --git a/vendor/github.com/tommy-muehle/go-mnd/v2/Dockerfile b/vendor/github.com/tommy-muehle/go-mnd/v2/Dockerfile deleted file mode 100644 index 25c8d5475..000000000 --- a/vendor/github.com/tommy-muehle/go-mnd/v2/Dockerfile +++ /dev/null @@ -1,17 +0,0 @@ -ARG GO_VERSION=1.16 - -FROM golang:${GO_VERSION}-alpine AS builder -RUN apk add --update --no-cache make git curl gcc libc-dev -RUN mkdir -p /build -WORKDIR /build -COPY . /build/ -RUN go mod download -RUN go build -o go-mnd cmd/mnd/main.go - -FROM golang:${GO_VERSION}-alpine -RUN apk add --update --no-cache bash git gcc libc-dev -COPY --from=builder /build/go-mnd /bin/go-mnd -COPY entrypoint.sh /bin/entrypoint.sh -VOLUME /app -WORKDIR /app -ENTRYPOINT ["/bin/entrypoint.sh"] diff --git a/vendor/github.com/tommy-muehle/go-mnd/v2/LICENSE b/vendor/github.com/tommy-muehle/go-mnd/v2/LICENSE deleted file mode 100644 index 8825fad20..000000000 --- a/vendor/github.com/tommy-muehle/go-mnd/v2/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2019 Tommy Muehle - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/tommy-muehle/go-mnd/v2/Makefile b/vendor/github.com/tommy-muehle/go-mnd/v2/Makefile deleted file mode 100644 index a21c1dcac..000000000 --- a/vendor/github.com/tommy-muehle/go-mnd/v2/Makefile +++ /dev/null @@ -1,32 +0,0 @@ -GIT_TAG?= $(shell git describe --abbrev=0) - -GO_VERSION = 1.16 -BUILDFLAGS := '-w -s' - -IMAGE_REPO = "tommymuehle" -BIN = "go-mnd" - -clean: - rm -rf build dist coverage.txt - -test: - go test -race ./... - -test-coverage: - go test -race -coverprofile=coverage.txt -covermode=atomic -coverpkg=./checks,./config - -build: - go build -o build/$(BIN) cmd/mnd/main.go - -image: - @echo "Building the Docker image..." - docker build --rm -t $(IMAGE_REPO)/$(BIN):$(GIT_TAG) --build-arg GO_VERSION=$(GO_VERSION) . - docker tag $(IMAGE_REPO)/$(BIN):$(GIT_TAG) $(IMAGE_REPO)/$(BIN):$(GIT_TAG) - docker tag $(IMAGE_REPO)/$(BIN):$(GIT_TAG) $(IMAGE_REPO)/$(BIN):latest - -image-push: image - @echo "Pushing the Docker image..." - docker push $(IMAGE_REPO)/$(BIN):$(GIT_TAG) - docker push $(IMAGE_REPO)/$(BIN):latest - -.PHONY: clean test test-coverage build image image-push diff --git a/vendor/github.com/tommy-muehle/go-mnd/v2/README.md b/vendor/github.com/tommy-muehle/go-mnd/v2/README.md deleted file mode 100644 index bca0815db..000000000 --- a/vendor/github.com/tommy-muehle/go-mnd/v2/README.md +++ /dev/null @@ -1,230 +0,0 @@ -# go-mnd - Magic number detector for Golang - - - -A vet analyzer to detect magic numbers. - -> **What is a magic number?** -> A magic number is a numeric literal that is not defined as a constant, but which may change, and therefore can be hard to update. It's considered a bad programming practice to use numbers directly in any source code without an explanation. It makes programs harder to read, understand, and maintain. - -## Project status - -![CI](https://github.com/tommy-muehle/go-mnd/workflows/CI/badge.svg) -[![Go Report Card](https://goreportcard.com/badge/github.com/tommy-muehle/go-mnd)](https://goreportcard.com/report/github.com/tommy-muehle/go-mnd) -[![codecov](https://codecov.io/gh/tommy-muehle/go-mnd/branch/master/graph/badge.svg)](https://codecov.io/gh/tommy-muehle/go-mnd) - -## Install - -### Local - -This analyzer requires Golang in version >= 1.12 because it's depends on the **go/analysis** API. - -``` -go get -u github.com/tommy-muehle/go-mnd/v2/cmd/mnd -``` - -### Github action - -You can run go-mnd as a GitHub action as follows: - -``` -name: Example workflow -on: - push: - branches: - - master - pull_request: - branches: - - master -jobs: - tests: - runs-on: ubuntu-latest - env: - GO111MODULE: on - steps: - - name: Checkout Source - uses: actions/checkout@v2 - - name: Run go-mnd - uses: tommy-muehle/go-mnd@master - with: - args: ./... -``` - -### GitLab CI - -You can run go-mnd inside a GitLab CI pipeline as follows: - -``` -stages: - - lint - -go:lint:mnd: - stage: lint - needs: [] - image: golang:latest - before_script: - - go get -u github.com/tommy-muehle/go-mnd/v2/cmd/mnd - - go mod tidy - - go mod vendor - script: - - go vet -vettool $(which mnd) ./... -``` - -### Homebrew - -To install with [Homebrew](https://brew.sh/), run: - -``` -brew tap tommy-muehle/tap && brew install tommy-muehle/tap/mnd -``` - -### Docker - -To get the latest available Docker image: - -``` -docker pull tommymuehle/go-mnd -``` - -### Windows - -On Windows download the [latest release](https://github.com/tommy-muehle/go-mnd/releases). - -## Usage - -[![asciicast](https://asciinema.org/a/231021.svg)](https://asciinema.org/a/231021) - -``` -go vet -vettool $(which mnd) ./... -``` - -or directly - -``` -mnd ./... -``` - -or via Docker - -``` -docker run --rm -v "$PWD":/app -w /app tommymuehle/go-mnd:latest ./... -``` - -## Options - -The ```-checks``` option let's you define a comma separated list of checks. - -The ```-ignored-numbers``` option let's you define a comma separated list of numbers to ignore. -For example: `-ignored-numbers=1000,10_000,3.14159264` - -The ```-ignored-functions``` option let's you define a comma separated list of function name regexp patterns to exclude. -For example: `-ignored-functions=math.*,http.StatusText,make` - -The ```-ignored-files``` option let's you define a comma separated list of filename regexp patterns to exclude. -For example: `-ignored-files=magic_.*.go,.*_numbers.go` - -## Checks - -By default this detector analyses arguments, assigns, cases, conditions, operations and return statements. - -* argument - -``` -t := http.StatusText(200) -``` - -* assign - -``` -c := &http.Client{ - Timeout: 5 * time.Second, -} -``` - -* case - -``` -switch x { - case 3: -} -``` - -* condition - -``` -if x > 7 { -} -``` - -* operation - -``` -var x, y int -y = 10 * x -``` - -* return - -``` -return 3 -``` - -## Excludes - -By default the numbers 0 and 1 as well as test files are excluded! - -### Further known excludes - -The function "Date" in the "Time" package. - -``` -t := time.Date(2017, time.September, 26, 12, 13, 14, 0, time.UTC) -``` - -Additional custom excludes can be defined via option flag. - -## Development - -### Build - -You can build the binary with: - -``` -make -``` - -### Tests - -You can run all unit tests using: - -``` -make test -``` - -And with coverage report: - -``` -make test-coverage -``` - -### Docker image - -You can also build locally the docker image by using the command: - -``` -make image -``` - -## Stickers - -

- Stickers image - Sticker image -

- -Just drop me a message via Twitter DM or email if you want some go-mnd stickers -for you or your Gopher usergroup. - -## License - -The MIT License (MIT). Please see [LICENSE](LICENSE) for more information. diff --git a/vendor/github.com/tommy-muehle/go-mnd/v2/action.yml b/vendor/github.com/tommy-muehle/go-mnd/v2/action.yml deleted file mode 100644 index 3a1f8eb11..000000000 --- a/vendor/github.com/tommy-muehle/go-mnd/v2/action.yml +++ /dev/null @@ -1,19 +0,0 @@ -name: 'go-mnd' -description: 'Runs the Golang magic number detector' -author: '@tommy-muehle' - -inputs: - args: - description: 'Arguments for go-mnd' - required: true - default: '-h' - -runs: - using: 'docker' - image: 'Dockerfile' - args: - - ${{ inputs.args }} - -branding: - icon: 'check-circle' - color: 'blue' diff --git a/vendor/github.com/tommy-muehle/go-mnd/v2/analyzer.go b/vendor/github.com/tommy-muehle/go-mnd/v2/analyzer.go deleted file mode 100644 index bf658f42d..000000000 --- a/vendor/github.com/tommy-muehle/go-mnd/v2/analyzer.go +++ /dev/null @@ -1,118 +0,0 @@ -package magic_numbers - -import ( - "flag" - "go/ast" - "strings" - - "github.com/tommy-muehle/go-mnd/v2/checks" - "github.com/tommy-muehle/go-mnd/v2/config" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/ast/inspector" -) - -const Doc = `magic number detector` - -var Analyzer = &analysis.Analyzer{ - Name: "mnd", - Doc: Doc, - Run: run, - Flags: options(), - Requires: []*analysis.Analyzer{inspect.Analyzer}, - RunDespiteErrors: true, -} - -type Checker interface { - NodeFilter() []ast.Node - Check(n ast.Node) -} - -func options() flag.FlagSet { - options := flag.NewFlagSet("", flag.ExitOnError) - options.String("excludes", "", "deprecated: use ignored-files instead") - options.String("ignored-files", "", "comma separated list of file patterns to exclude from analysis") - options.String("ignored-functions", "", "comma separated list of function patterns to exclude from analysis") - options.String("ignored-numbers", "", "comma separated list of numbers to exclude from analysis") - options.String( - "checks", - checks.ArgumentCheck+","+ - checks.CaseCheck+","+ - checks.ConditionCheck+","+ - checks.OperationCheck+","+ - checks.ReturnCheck+","+ - checks.AssignCheck, - "comma separated list of checks", - ) - - return *options -} - -func run(pass *analysis.Pass) (interface{}, error) { - var ignoredFiles string - - ignoredFiles = strings.Join( - []string{ - pass.Analyzer.Flags.Lookup("excludes").Value.String(), // is deprecated - pass.Analyzer.Flags.Lookup("ignored-files").Value.String(), - }, - ",", - ) - - if ignoredFiles == "," { - ignoredFiles = "" - } - - conf := config.WithOptions( - config.WithCustomChecks(pass.Analyzer.Flags.Lookup("checks").Value.String()), - config.WithIgnoredFiles(ignoredFiles), - config.WithIgnoredFunctions(pass.Analyzer.Flags.Lookup("ignored-functions").Value.String()), - config.WithIgnoredNumbers(pass.Analyzer.Flags.Lookup("ignored-numbers").Value.String()), - ) - - var checker []Checker - if conf.IsCheckEnabled(checks.ArgumentCheck) { - checker = append(checker, checks.NewArgumentAnalyzer(pass, conf)) - } - - if conf.IsCheckEnabled(checks.CaseCheck) { - checker = append(checker, checks.NewCaseAnalyzer(pass, conf)) - } - - if conf.IsCheckEnabled(checks.ConditionCheck) { - checker = append(checker, checks.NewConditionAnalyzer(pass, conf)) - } - - if conf.IsCheckEnabled(checks.OperationCheck) { - checker = append(checker, checks.NewOperationAnalyzer(pass, conf)) - } - - if conf.IsCheckEnabled(checks.ReturnCheck) { - checker = append(checker, checks.NewReturnAnalyzer(pass, conf)) - } - - if conf.IsCheckEnabled(checks.AssignCheck) { - checker = append(checker, checks.NewAssignAnalyzer(pass, conf)) - } - - i := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - - for _, c := range checker { - c := c - i.Preorder(c.NodeFilter(), func(node ast.Node) { - for _, exclude := range conf.IgnoredFiles { - if exclude.String() == "" { - continue - } - if exclude.MatchString(pass.Fset.Position(node.Pos()).Filename) { - return - } - } - - c.Check(node) - }) - } - - return nil, nil -} diff --git a/vendor/github.com/tommy-muehle/go-mnd/v2/checks/argument.go b/vendor/github.com/tommy-muehle/go-mnd/v2/checks/argument.go deleted file mode 100644 index 5d880f0f9..000000000 --- a/vendor/github.com/tommy-muehle/go-mnd/v2/checks/argument.go +++ /dev/null @@ -1,125 +0,0 @@ -package checks - -import ( - "go/ast" - "go/token" - "strconv" - "sync" - - "golang.org/x/tools/go/analysis" - - "github.com/tommy-muehle/go-mnd/v2/config" -) - -const ArgumentCheck = "argument" - -// constantDefinitions is used to save lines (by number) which contain a constant definition. -var constantDefinitions = map[string]bool{} -var mu sync.RWMutex - -type ArgumentAnalyzer struct { - config *config.Config - pass *analysis.Pass -} - -func NewArgumentAnalyzer(pass *analysis.Pass, config *config.Config) *ArgumentAnalyzer { - return &ArgumentAnalyzer{ - pass: pass, - config: config, - } -} - -func (a *ArgumentAnalyzer) NodeFilter() []ast.Node { - return []ast.Node{ - (*ast.GenDecl)(nil), - (*ast.CallExpr)(nil), - } -} - -func (a *ArgumentAnalyzer) Check(n ast.Node) { - switch expr := n.(type) { - case *ast.CallExpr: - a.checkCallExpr(expr) - case *ast.GenDecl: - if expr.Tok != token.CONST { - return - } - - for _, x := range expr.Specs { - pos := a.pass.Fset.Position(x.Pos()) - - mu.Lock() - constantDefinitions[pos.Filename+":"+strconv.Itoa(pos.Line)] = true - mu.Unlock() - } - } -} - -func (a *ArgumentAnalyzer) checkCallExpr(expr *ast.CallExpr) { - pos := a.pass.Fset.Position(expr.Pos()) - - mu.RLock() - ok := constantDefinitions[pos.Filename+":"+strconv.Itoa(pos.Line)] - mu.RUnlock() - - if ok { - return - } - - switch f := expr.Fun.(type) { - case *ast.SelectorExpr: - switch prefix := f.X.(type) { - case *ast.Ident: - if a.config.IsIgnoredFunction(prefix.Name + "." + f.Sel.Name) { - return - } - } - case *ast.Ident: - if a.config.IsIgnoredFunction(f.Name) { - return - } - } - - for i, arg := range expr.Args { - switch x := arg.(type) { - case *ast.BasicLit: - if !a.isMagicNumber(x) { - continue - } - // If it's a magic number and has no previous element, report it - if i == 0 { - a.pass.Reportf(x.Pos(), reportMsg, x.Value, ArgumentCheck) - } else { - // Otherwise check all args - switch expr.Args[i].(type) { - case *ast.BasicLit: - if a.isMagicNumber(x) { - a.pass.Reportf(x.Pos(), reportMsg, x.Value, ArgumentCheck) - } - } - } - case *ast.BinaryExpr: - a.checkBinaryExpr(x) - } - } -} - -func (a *ArgumentAnalyzer) checkBinaryExpr(expr *ast.BinaryExpr) { - switch x := expr.X.(type) { - case *ast.BasicLit: - if a.isMagicNumber(x) { - a.pass.Reportf(x.Pos(), reportMsg, x.Value, ArgumentCheck) - } - } - - switch y := expr.Y.(type) { - case *ast.BasicLit: - if a.isMagicNumber(y) { - a.pass.Reportf(y.Pos(), reportMsg, y.Value, ArgumentCheck) - } - } -} - -func (a *ArgumentAnalyzer) isMagicNumber(l *ast.BasicLit) bool { - return (l.Kind == token.FLOAT || l.Kind == token.INT) && !a.config.IsIgnoredNumber(l.Value) -} diff --git a/vendor/github.com/tommy-muehle/go-mnd/v2/checks/assign.go b/vendor/github.com/tommy-muehle/go-mnd/v2/checks/assign.go deleted file mode 100644 index f930d0880..000000000 --- a/vendor/github.com/tommy-muehle/go-mnd/v2/checks/assign.go +++ /dev/null @@ -1,86 +0,0 @@ -package checks - -import ( - "go/ast" - "go/token" - - "golang.org/x/tools/go/analysis" - - config "github.com/tommy-muehle/go-mnd/v2/config" -) - -const AssignCheck = "assign" - -type AssignAnalyzer struct { - pass *analysis.Pass - config *config.Config -} - -func NewAssignAnalyzer(pass *analysis.Pass, config *config.Config) *AssignAnalyzer { - return &AssignAnalyzer{ - pass: pass, - config: config, - } -} - -func (a *AssignAnalyzer) NodeFilter() []ast.Node { - return []ast.Node{ - (*ast.KeyValueExpr)(nil), - (*ast.AssignStmt)(nil), - } -} - -func (a *AssignAnalyzer) Check(n ast.Node) { - switch expr := n.(type) { - case *ast.KeyValueExpr: - switch x := expr.Value.(type) { - case *ast.BasicLit: - if a.isMagicNumber(x) { - a.pass.Reportf(x.Pos(), reportMsg, x.Value, AssignCheck) - } - case *ast.BinaryExpr: - a.checkBinaryExpr(x) - } - case *ast.AssignStmt: - for _, e := range expr.Rhs { - switch y := e.(type) { - case *ast.UnaryExpr: - a.checkUnaryExpr(y) - case *ast.BinaryExpr: - switch x := y.Y.(type) { - case *ast.UnaryExpr: - a.checkUnaryExpr(x) - } - } - } - } -} - -func (a *AssignAnalyzer) checkUnaryExpr(expr *ast.UnaryExpr) { - switch x := expr.X.(type) { - case *ast.BasicLit: - if a.isMagicNumber(x) { - a.pass.Reportf(x.Pos(), reportMsg, x.Value, AssignCheck) - } - } -} - -func (a *AssignAnalyzer) checkBinaryExpr(expr *ast.BinaryExpr) { - switch x := expr.X.(type) { - case *ast.BasicLit: - if a.isMagicNumber(x) { - a.pass.Reportf(x.Pos(), reportMsg, x.Value, AssignCheck) - } - } - - switch y := expr.Y.(type) { - case *ast.BasicLit: - if a.isMagicNumber(y) { - a.pass.Reportf(y.Pos(), reportMsg, y.Value, AssignCheck) - } - } -} - -func (a *AssignAnalyzer) isMagicNumber(l *ast.BasicLit) bool { - return (l.Kind == token.FLOAT || l.Kind == token.INT) && !a.config.IsIgnoredNumber(l.Value) -} diff --git a/vendor/github.com/tommy-muehle/go-mnd/v2/checks/case.go b/vendor/github.com/tommy-muehle/go-mnd/v2/checks/case.go deleted file mode 100644 index 228cab4b8..000000000 --- a/vendor/github.com/tommy-muehle/go-mnd/v2/checks/case.go +++ /dev/null @@ -1,68 +0,0 @@ -package checks - -import ( - "go/ast" - "go/token" - - "golang.org/x/tools/go/analysis" - - config "github.com/tommy-muehle/go-mnd/v2/config" -) - -const CaseCheck = "case" - -type CaseAnalyzer struct { - pass *analysis.Pass - config *config.Config -} - -func NewCaseAnalyzer(pass *analysis.Pass, config *config.Config) *CaseAnalyzer { - return &CaseAnalyzer{ - pass: pass, - config: config, - } -} - -func (a *CaseAnalyzer) NodeFilter() []ast.Node { - return []ast.Node{ - (*ast.CaseClause)(nil), - } -} - -func (a *CaseAnalyzer) Check(n ast.Node) { - caseClause, ok := n.(*ast.CaseClause) - if !ok { - return - } - - for _, c := range caseClause.List { - switch x := c.(type) { - case *ast.BasicLit: - if a.isMagicNumber(x) { - a.pass.Reportf(x.Pos(), reportMsg, x.Value, CaseCheck) - } - case *ast.BinaryExpr: - a.checkBinaryExpr(x) - } - } -} - -func (a *CaseAnalyzer) checkBinaryExpr(expr *ast.BinaryExpr) { - switch x := expr.X.(type) { - case *ast.BasicLit: - if a.isMagicNumber(x) { - a.pass.Reportf(x.Pos(), reportMsg, x.Value, CaseCheck) - } - } - - switch y := expr.Y.(type) { - case *ast.BasicLit: - if a.isMagicNumber(y) { - a.pass.Reportf(y.Pos(), reportMsg, y.Value, CaseCheck) - } - } -} - -func (a *CaseAnalyzer) isMagicNumber(l *ast.BasicLit) bool { - return (l.Kind == token.FLOAT || l.Kind == token.INT) && !a.config.IsIgnoredNumber(l.Value) -} diff --git a/vendor/github.com/tommy-muehle/go-mnd/v2/checks/checks.go b/vendor/github.com/tommy-muehle/go-mnd/v2/checks/checks.go deleted file mode 100644 index deff0c7bf..000000000 --- a/vendor/github.com/tommy-muehle/go-mnd/v2/checks/checks.go +++ /dev/null @@ -1,3 +0,0 @@ -package checks - -const reportMsg = "Magic number: %v, in <%s> detected" diff --git a/vendor/github.com/tommy-muehle/go-mnd/v2/checks/condition.go b/vendor/github.com/tommy-muehle/go-mnd/v2/checks/condition.go deleted file mode 100644 index 20f892ede..000000000 --- a/vendor/github.com/tommy-muehle/go-mnd/v2/checks/condition.go +++ /dev/null @@ -1,55 +0,0 @@ -package checks - -import ( - "go/ast" - "go/token" - - "golang.org/x/tools/go/analysis" - - config "github.com/tommy-muehle/go-mnd/v2/config" -) - -const ConditionCheck = "condition" - -type ConditionAnalyzer struct { - pass *analysis.Pass - config *config.Config -} - -func NewConditionAnalyzer(pass *analysis.Pass, config *config.Config) *ConditionAnalyzer { - return &ConditionAnalyzer{ - pass: pass, - config: config, - } -} - -func (a *ConditionAnalyzer) NodeFilter() []ast.Node { - return []ast.Node{ - (*ast.IfStmt)(nil), - } -} - -func (a *ConditionAnalyzer) Check(n ast.Node) { - expr, ok := n.(*ast.IfStmt).Cond.(*ast.BinaryExpr) - if !ok { - return - } - - switch x := expr.X.(type) { - case *ast.BasicLit: - if a.isMagicNumber(x) { - a.pass.Reportf(x.Pos(), reportMsg, x.Value, ConditionCheck) - } - } - - switch y := expr.Y.(type) { - case *ast.BasicLit: - if a.isMagicNumber(y) { - a.pass.Reportf(y.Pos(), reportMsg, y.Value, ConditionCheck) - } - } -} - -func (a *ConditionAnalyzer) isMagicNumber(l *ast.BasicLit) bool { - return (l.Kind == token.FLOAT || l.Kind == token.INT) && !a.config.IsIgnoredNumber(l.Value) -} diff --git a/vendor/github.com/tommy-muehle/go-mnd/v2/checks/operation.go b/vendor/github.com/tommy-muehle/go-mnd/v2/checks/operation.go deleted file mode 100644 index ddf3a0363..000000000 --- a/vendor/github.com/tommy-muehle/go-mnd/v2/checks/operation.go +++ /dev/null @@ -1,77 +0,0 @@ -package checks - -import ( - "go/ast" - "go/token" - - "golang.org/x/tools/go/analysis" - - config "github.com/tommy-muehle/go-mnd/v2/config" -) - -const OperationCheck = "operation" - -type OperationAnalyzer struct { - pass *analysis.Pass - config *config.Config -} - -func NewOperationAnalyzer(pass *analysis.Pass, config *config.Config) *OperationAnalyzer { - return &OperationAnalyzer{ - pass: pass, - config: config, - } -} - -func (a *OperationAnalyzer) NodeFilter() []ast.Node { - return []ast.Node{ - (*ast.AssignStmt)(nil), - (*ast.ParenExpr)(nil), - } -} - -func (a *OperationAnalyzer) Check(n ast.Node) { - switch expr := n.(type) { - case *ast.ParenExpr: - switch x := expr.X.(type) { - case *ast.BinaryExpr: - a.checkBinaryExpr(x) - } - case *ast.AssignStmt: - for _, y := range expr.Rhs { - switch x := y.(type) { - case *ast.BinaryExpr: - switch xExpr := x.X.(type) { - case *ast.BinaryExpr: - a.checkBinaryExpr(xExpr) - } - switch yExpr := x.Y.(type) { - case *ast.BinaryExpr: - a.checkBinaryExpr(yExpr) - } - - a.checkBinaryExpr(x) - } - } - } -} - -func (a *OperationAnalyzer) checkBinaryExpr(expr *ast.BinaryExpr) { - switch x := expr.X.(type) { - case *ast.BasicLit: - if a.isMagicNumber(x) { - a.pass.Reportf(x.Pos(), reportMsg, x.Value, OperationCheck) - } - } - - switch y := expr.Y.(type) { - case *ast.BasicLit: - if a.isMagicNumber(y) { - a.pass.Reportf(y.Pos(), reportMsg, y.Value, OperationCheck) - } - } -} - -func (a *OperationAnalyzer) isMagicNumber(l *ast.BasicLit) bool { - return (l.Kind == token.FLOAT || l.Kind == token.INT) && !a.config.IsIgnoredNumber(l.Value) -} diff --git a/vendor/github.com/tommy-muehle/go-mnd/v2/checks/return.go b/vendor/github.com/tommy-muehle/go-mnd/v2/checks/return.go deleted file mode 100644 index bc53940c7..000000000 --- a/vendor/github.com/tommy-muehle/go-mnd/v2/checks/return.go +++ /dev/null @@ -1,68 +0,0 @@ -package checks - -import ( - "go/ast" - "go/token" - - "golang.org/x/tools/go/analysis" - - config "github.com/tommy-muehle/go-mnd/v2/config" -) - -const ReturnCheck = "return" - -type ReturnAnalyzer struct { - pass *analysis.Pass - config *config.Config -} - -func NewReturnAnalyzer(pass *analysis.Pass, config *config.Config) *ReturnAnalyzer { - return &ReturnAnalyzer{ - pass: pass, - config: config, - } -} - -func (a *ReturnAnalyzer) NodeFilter() []ast.Node { - return []ast.Node{ - (*ast.ReturnStmt)(nil), - } -} - -func (a *ReturnAnalyzer) Check(n ast.Node) { - stmt, ok := n.(*ast.ReturnStmt) - if !ok { - return - } - - for _, expr := range stmt.Results { - switch x := expr.(type) { - case *ast.BasicLit: - if a.isMagicNumber(x) { - a.pass.Reportf(x.Pos(), reportMsg, x.Value, ReturnCheck) - } - case *ast.BinaryExpr: - a.checkBinaryExpr(x) - } - } -} - -func (a *ReturnAnalyzer) checkBinaryExpr(expr *ast.BinaryExpr) { - switch x := expr.X.(type) { - case *ast.BasicLit: - if a.isMagicNumber(x) { - a.pass.Reportf(x.Pos(), reportMsg, x.Value, ReturnCheck) - } - } - - switch y := expr.Y.(type) { - case *ast.BasicLit: - if a.isMagicNumber(y) { - a.pass.Reportf(y.Pos(), reportMsg, y.Value, ReturnCheck) - } - } -} - -func (a *ReturnAnalyzer) isMagicNumber(l *ast.BasicLit) bool { - return (l.Kind == token.FLOAT || l.Kind == token.INT) && !a.config.IsIgnoredNumber(l.Value) -} diff --git a/vendor/github.com/tommy-muehle/go-mnd/v2/config/config.go b/vendor/github.com/tommy-muehle/go-mnd/v2/config/config.go deleted file mode 100644 index b9fc91e5e..000000000 --- a/vendor/github.com/tommy-muehle/go-mnd/v2/config/config.go +++ /dev/null @@ -1,124 +0,0 @@ -package config - -import ( - "regexp" - "strings" -) - -type Config struct { - Checks map[string]bool - IgnoredNumbers map[string]struct{} - IgnoredFunctions []*regexp.Regexp - IgnoredFiles []*regexp.Regexp -} - -type Option func(config *Config) - -func DefaultConfig() *Config { - return &Config{ - Checks: map[string]bool{}, - IgnoredNumbers: map[string]struct{}{ - "0": {}, - "0.0": {}, - "1": {}, - "1.0": {}, - }, - IgnoredFiles: []*regexp.Regexp{ - regexp.MustCompile(`_test.go`), - }, - IgnoredFunctions: []*regexp.Regexp{ - regexp.MustCompile(`time.Date`), - regexp.MustCompile(`strconv.FormatInt`), - regexp.MustCompile(`strconv.FormatUint`), - regexp.MustCompile(`strconv.FormatFloat`), - regexp.MustCompile(`strconv.ParseInt`), - regexp.MustCompile(`strconv.ParseUint`), - regexp.MustCompile(`strconv.ParseFloat`), - }, - } -} - -func WithOptions(options ...Option) *Config { - c := DefaultConfig() - - for _, option := range options { - option(c) - } - - return c -} - -func WithIgnoredFunctions(excludes string) Option { - return func(config *Config) { - for _, exclude := range strings.Split(excludes, ",") { - if exclude == "" { - continue - } - config.IgnoredFunctions = append(config.IgnoredFunctions, regexp.MustCompile(exclude)) - } - } -} - -func WithIgnoredFiles(excludes string) Option { - return func(config *Config) { - for _, exclude := range strings.Split(excludes, ",") { - if exclude == "" { - continue - } - config.IgnoredFiles = append(config.IgnoredFiles, regexp.MustCompile(exclude)) - } - } -} - -func WithIgnoredNumbers(numbers string) Option { - return func(config *Config) { - for _, number := range strings.Split(numbers, ",") { - if number == "" { - continue - } - config.IgnoredNumbers[config.removeDigitSeparator(number)] = struct{}{} - } - } -} - -func WithCustomChecks(checks string) Option { - return func(config *Config) { - if checks == "" { - return - } - - for name, _ := range config.Checks { - config.Checks[name] = false - } - - for _, name := range strings.Split(checks, ",") { - if name == "" { - continue - } - config.Checks[name] = true - } - } -} - -func (c *Config) IsCheckEnabled(name string) bool { - return c.Checks[name] -} - -func (c *Config) IsIgnoredNumber(number string) bool { - _, ok := c.IgnoredNumbers[c.removeDigitSeparator(number)] - return ok -} - -func (c *Config) IsIgnoredFunction(f string) bool { - for _, pattern := range c.IgnoredFunctions { - if pattern.MatchString(f) { - return true - } - } - - return false -} - -func (c *Config) removeDigitSeparator(number string) string { - return strings.Replace(number, "_", "", -1) -} diff --git a/vendor/github.com/tommy-muehle/go-mnd/v2/entrypoint.sh b/vendor/github.com/tommy-muehle/go-mnd/v2/entrypoint.sh deleted file mode 100644 index cabc2f63d..000000000 --- a/vendor/github.com/tommy-muehle/go-mnd/v2/entrypoint.sh +++ /dev/null @@ -1,7 +0,0 @@ -#!/usr/bin/env bash - -# Expand the arguments into an array of strings. This is required because the GitHub action -# provides all arguments concatenated as a single string. -ARGS=("$@") - -/bin/go-mnd "${ARGS[*]}" diff --git a/vendor/github.com/ultraware/funlen/LICENSE b/vendor/github.com/ultraware/funlen/LICENSE deleted file mode 100644 index dca75556d..000000000 --- a/vendor/github.com/ultraware/funlen/LICENSE +++ /dev/null @@ -1,7 +0,0 @@ -Copyright 2018 Ultraware Consultancy and Development B.V. - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/github.com/ultraware/funlen/README.md b/vendor/github.com/ultraware/funlen/README.md deleted file mode 100644 index af2187694..000000000 --- a/vendor/github.com/ultraware/funlen/README.md +++ /dev/null @@ -1,47 +0,0 @@ -# Funlen linter - -Funlen is a linter that checks for long functions. It can check both on the number of lines and the number of statements. - -The default limits are 60 lines and 40 statements. You can configure these. - -## Description - -The intent for the funlen linter is to fit a function within one screen. If you need to scroll through a long function, tracing variables back to their definition or even just finding matching brackets can become difficult. - -Besides checking lines there's also a separate check for the number of statements, which gives a clearer idea of how much is actually being done in a function. - -The default values are used internally, but might to be adjusted for your specific environment. - -## Installation - -Funlen is included in [golangci-lint](https://github.com/golangci/golangci-lint/). Install it and enable funlen. - -# Exclude for tests - -golangci-lint offers a way to exclude linters in certain cases. More info can be found here: https://golangci-lint.run/usage/configuration/#issues-configuration. - -## Disable funlen for \_test.go files - -You can utilize the issues configuration in `.golangci.yml` to exclude the funlen linter for all test files: - -```yaml -issues: - exclude-rules: - # disable funlen for all _test.go files - - path: _test.go - linters: - - funlen -``` - -## Disable funlen only for Test funcs - -If you want to keep funlen enabled for example in helper functions in test files but disable it specifically for Test funcs, you can use the following configuration: - -```yaml -issues: - exclude-rules: - # disable funlen for test funcs - - source: "^func Test" - linters: - - funlen -``` diff --git a/vendor/github.com/ultraware/funlen/main.go b/vendor/github.com/ultraware/funlen/main.go deleted file mode 100644 index b68ddb926..000000000 --- a/vendor/github.com/ultraware/funlen/main.go +++ /dev/null @@ -1,124 +0,0 @@ -package funlen - -import ( - "fmt" - "go/ast" - "go/token" - "reflect" -) - -const ( - defaultLineLimit = 60 - defaultStmtLimit = 40 -) - -// Run runs this linter on the provided code -func Run(file *ast.File, fset *token.FileSet, lineLimit int, stmtLimit int, ignoreComments bool) []Message { - if lineLimit == 0 { - lineLimit = defaultLineLimit - } - if stmtLimit == 0 { - stmtLimit = defaultStmtLimit - } - - cmap := ast.NewCommentMap(fset, file, file.Comments) - - var msgs []Message - for _, f := range file.Decls { - decl, ok := f.(*ast.FuncDecl) - if !ok || decl.Body == nil { // decl.Body can be nil for e.g. cgo - continue - } - - if stmtLimit > 0 { - if stmts := parseStmts(decl.Body.List); stmts > stmtLimit { - msgs = append(msgs, makeStmtMessage(fset, decl.Name, stmts, stmtLimit)) - continue - } - } - - if lineLimit > 0 { - if lines := getLines(fset, decl, cmap.Filter(decl), ignoreComments); lines > lineLimit { - msgs = append(msgs, makeLineMessage(fset, decl.Name, lines, lineLimit)) - } - } - } - - return msgs -} - -// Message contains a message -type Message struct { - Pos token.Position - Message string -} - -func makeLineMessage(fset *token.FileSet, funcInfo *ast.Ident, lines, lineLimit int) Message { - return Message{ - fset.Position(funcInfo.Pos()), - fmt.Sprintf("Function '%s' is too long (%d > %d)\n", funcInfo.Name, lines, lineLimit), - } -} - -func makeStmtMessage(fset *token.FileSet, funcInfo *ast.Ident, stmts, stmtLimit int) Message { - return Message{ - fset.Position(funcInfo.Pos()), - fmt.Sprintf("Function '%s' has too many statements (%d > %d)\n", funcInfo.Name, stmts, stmtLimit), - } -} - -func getLines(fset *token.FileSet, f *ast.FuncDecl, cmap ast.CommentMap, ignoreComments bool) int { // nolint: interfacer - var lineCount int - var commentCount int - - lineCount = fset.Position(f.End()).Line - fset.Position(f.Pos()).Line - 1 - - if !ignoreComments { - return lineCount - } - - for _, c := range cmap.Comments() { - // If the CommenGroup's lines are inside the function - // count how many comments are in the CommentGroup - if (fset.Position(c.Pos()).Line > fset.Position(f.Pos()).Line) && - (fset.Position(c.End()).Line < fset.Position(f.End()).Line) { - commentCount += len(c.List) - } - } - - return lineCount - commentCount -} - -func parseStmts(s []ast.Stmt) (total int) { - for _, v := range s { - total++ - switch stmt := v.(type) { - case *ast.BlockStmt: - total += parseStmts(stmt.List) - 1 - case *ast.ForStmt, *ast.RangeStmt, *ast.IfStmt, - *ast.SwitchStmt, *ast.TypeSwitchStmt, *ast.SelectStmt: - total += parseBodyListStmts(stmt) - case *ast.CaseClause: - total += parseStmts(stmt.Body) - case *ast.AssignStmt: - total += checkInlineFunc(stmt.Rhs[0]) - case *ast.GoStmt: - total += checkInlineFunc(stmt.Call.Fun) - case *ast.DeferStmt: - total += checkInlineFunc(stmt.Call.Fun) - } - } - return -} - -func checkInlineFunc(stmt ast.Expr) int { - if block, ok := stmt.(*ast.FuncLit); ok { - return parseStmts(block.Body.List) - } - return 0 -} - -func parseBodyListStmts(t interface{}) int { - i := reflect.ValueOf(t).Elem().FieldByName(`Body`).Elem().FieldByName(`List`).Interface() - return parseStmts(i.([]ast.Stmt)) -} diff --git a/vendor/github.com/ultraware/whitespace/LICENSE b/vendor/github.com/ultraware/whitespace/LICENSE deleted file mode 100644 index dca75556d..000000000 --- a/vendor/github.com/ultraware/whitespace/LICENSE +++ /dev/null @@ -1,7 +0,0 @@ -Copyright 2018 Ultraware Consultancy and Development B.V. - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/github.com/ultraware/whitespace/README.md b/vendor/github.com/ultraware/whitespace/README.md deleted file mode 100644 index f99ecce36..000000000 --- a/vendor/github.com/ultraware/whitespace/README.md +++ /dev/null @@ -1,9 +0,0 @@ -# Whitespace linter - -Whitespace is a linter that checks for unnecessary newlines at the start and end of functions, if, for, etc. - -## Installation guide - -To install as a standalone linter, run `go install github.com/ultraware/whitespace`. - -Whitespace is also included in [golangci-lint](https://github.com/golangci/golangci-lint/). Install it and enable whitespace. diff --git a/vendor/github.com/ultraware/whitespace/whitespace.go b/vendor/github.com/ultraware/whitespace/whitespace.go deleted file mode 100644 index 350e9b7e4..000000000 --- a/vendor/github.com/ultraware/whitespace/whitespace.go +++ /dev/null @@ -1,307 +0,0 @@ -package whitespace - -import ( - "flag" - "go/ast" - "go/token" - "strings" - - "golang.org/x/tools/go/analysis" -) - -// MessageType describes what should happen to fix the warning. -type MessageType uint8 - -// List of MessageTypes. -const ( - MessageTypeRemove MessageType = iota + 1 - MessageTypeAdd -) - -// RunningMode describes the mode the linter is run in. This can be either -// native or golangci-lint. -type RunningMode uint8 - -const ( - RunningModeNative RunningMode = iota - RunningModeGolangCI -) - -// Message contains a message and diagnostic information. -type Message struct { - // Diagnostic is what position the diagnostic should be put at. This isn't - // always the same as the fix start, f.ex. when we fix trailing newlines we - // put the diagnostic at the right bracket but we fix between the end of the - // last statement and the bracket. - Diagnostic token.Pos - - // FixStart is the span start of the fix. - FixStart token.Pos - - // FixEnd is the span end of the fix. - FixEnd token.Pos - - // LineNumbers represent the actual line numbers in the file. This is set - // when finding the diagnostic to make it easier to suggest fixes in - // golangci-lint. - LineNumbers []int - - // MessageType represents the type of message it is. - MessageType MessageType - - // Message is the diagnostic to show. - Message string -} - -// Settings contains settings for edge-cases. -type Settings struct { - Mode RunningMode - MultiIf bool - MultiFunc bool -} - -// NewAnalyzer creates a new whitespace analyzer. -func NewAnalyzer(settings *Settings) *analysis.Analyzer { - if settings == nil { - settings = &Settings{} - } - - return &analysis.Analyzer{ - Name: "whitespace", - Doc: "Whitespace is a linter that checks for unnecessary newlines at the start and end of functions, if, for, etc.", - Flags: flags(settings), - Run: func(p *analysis.Pass) (any, error) { - Run(p, settings) - return nil, nil - }, - RunDespiteErrors: true, - } -} - -func flags(settings *Settings) flag.FlagSet { - flags := flag.NewFlagSet("", flag.ExitOnError) - flags.BoolVar(&settings.MultiIf, "multi-if", settings.MultiIf, "Check that multi line if-statements have a leading newline") - flags.BoolVar(&settings.MultiFunc, "multi-func", settings.MultiFunc, "Check that multi line functions have a leading newline") - - return *flags -} - -func Run(pass *analysis.Pass, settings *Settings) []Message { - messages := []Message{} - - for _, file := range pass.Files { - filename := pass.Fset.Position(file.Pos()).Filename - if !strings.HasSuffix(filename, ".go") { - continue - } - - fileMessages := runFile(file, pass.Fset, *settings) - - if settings.Mode == RunningModeGolangCI { - messages = append(messages, fileMessages...) - continue - } - - for _, message := range fileMessages { - pass.Report(analysis.Diagnostic{ - Pos: message.Diagnostic, - Category: "whitespace", - Message: message.Message, - SuggestedFixes: []analysis.SuggestedFix{ - { - TextEdits: []analysis.TextEdit{ - { - Pos: message.FixStart, - End: message.FixEnd, - NewText: []byte("\n"), - }, - }, - }, - }, - }) - } - } - - return messages -} - -func runFile(file *ast.File, fset *token.FileSet, settings Settings) []Message { - var messages []Message - - for _, f := range file.Decls { - decl, ok := f.(*ast.FuncDecl) - if !ok || decl.Body == nil { // decl.Body can be nil for e.g. cgo - continue - } - - vis := visitor{file.Comments, fset, nil, make(map[*ast.BlockStmt]bool), settings} - ast.Walk(&vis, decl) - - messages = append(messages, vis.messages...) - } - - return messages -} - -type visitor struct { - comments []*ast.CommentGroup - fset *token.FileSet - messages []Message - wantNewline map[*ast.BlockStmt]bool - settings Settings -} - -func (v *visitor) Visit(node ast.Node) ast.Visitor { - if node == nil { - return v - } - - if stmt, ok := node.(*ast.IfStmt); ok && v.settings.MultiIf { - checkMultiLine(v, stmt.Body, stmt.Cond) - } - - if stmt, ok := node.(*ast.FuncLit); ok && v.settings.MultiFunc { - checkMultiLine(v, stmt.Body, stmt.Type) - } - - if stmt, ok := node.(*ast.FuncDecl); ok && v.settings.MultiFunc { - checkMultiLine(v, stmt.Body, stmt.Type) - } - - if stmt, ok := node.(*ast.BlockStmt); ok { - wantNewline := v.wantNewline[stmt] - - comments := v.comments - if wantNewline { - comments = nil // Comments also count as a newline if we want a newline - } - - opening, first, last := firstAndLast(comments, v.fset, stmt) - startMsg := checkStart(v.fset, opening, first) - - if wantNewline && startMsg == nil && len(stmt.List) >= 1 { - v.messages = append(v.messages, Message{ - Diagnostic: opening, - FixStart: stmt.List[0].Pos(), - FixEnd: stmt.List[0].Pos(), - LineNumbers: []int{v.fset.PositionFor(stmt.List[0].Pos(), false).Line}, - MessageType: MessageTypeAdd, - Message: "multi-line statement should be followed by a newline", - }) - } else if !wantNewline && startMsg != nil { - v.messages = append(v.messages, *startMsg) - } - - if msg := checkEnd(v.fset, stmt.Rbrace, last); msg != nil { - v.messages = append(v.messages, *msg) - } - } - - return v -} - -func checkMultiLine(v *visitor, body *ast.BlockStmt, stmtStart ast.Node) { - start, end := posLine(v.fset, stmtStart.Pos()), posLine(v.fset, stmtStart.End()) - - if end > start { // Check only multi line conditions - v.wantNewline[body] = true - } -} - -func posLine(fset *token.FileSet, pos token.Pos) int { - return fset.PositionFor(pos, false).Line -} - -func firstAndLast(comments []*ast.CommentGroup, fset *token.FileSet, stmt *ast.BlockStmt) (token.Pos, ast.Node, ast.Node) { - openingPos := stmt.Lbrace + 1 - - if len(stmt.List) == 0 { - return openingPos, nil, nil - } - - first, last := ast.Node(stmt.List[0]), ast.Node(stmt.List[len(stmt.List)-1]) - - for _, c := range comments { - // If the comment is on the same line as the opening pos (initially the - // left bracket) but it starts after the pos the comment must be after - // the bracket and where that comment ends should be considered where - // the fix should start. - if posLine(fset, c.Pos()) == posLine(fset, openingPos) && c.Pos() > openingPos { - if posLine(fset, c.End()) != posLine(fset, openingPos) { - // This is a multiline comment that spans from the `LBrace` line - // to a line further down. This should always be seen as ok! - first = c - } else { - openingPos = c.End() - } - } - - if posLine(fset, c.Pos()) == posLine(fset, stmt.Pos()) || posLine(fset, c.End()) == posLine(fset, stmt.End()) { - continue - } - - if c.Pos() < stmt.Pos() || c.End() > stmt.End() { - continue - } - - if c.Pos() < first.Pos() { - first = c - } - - if c.End() > last.End() { - last = c - } - } - - return openingPos, first, last -} - -func checkStart(fset *token.FileSet, start token.Pos, first ast.Node) *Message { - if first == nil { - return nil - } - - if posLine(fset, start)+1 < posLine(fset, first.Pos()) { - return &Message{ - Diagnostic: start, - FixStart: start, - FixEnd: first.Pos(), - LineNumbers: linesBetween(fset, start, first.Pos()), - MessageType: MessageTypeRemove, - Message: "unnecessary leading newline", - } - } - - return nil -} - -func checkEnd(fset *token.FileSet, end token.Pos, last ast.Node) *Message { - if last == nil { - return nil - } - - if posLine(fset, end)-1 > posLine(fset, last.End()) { - return &Message{ - Diagnostic: end, - FixStart: last.End(), - FixEnd: end, - LineNumbers: linesBetween(fset, last.End(), end), - MessageType: MessageTypeRemove, - Message: "unnecessary trailing newline", - } - } - - return nil -} - -func linesBetween(fset *token.FileSet, a, b token.Pos) []int { - lines := []int{} - aPosition := fset.PositionFor(a, false) - bPosition := fset.PositionFor(b, false) - - for i := aPosition.Line + 1; i < bPosition.Line; i++ { - lines = append(lines, i) - } - - return lines -} diff --git a/vendor/github.com/uudashr/gocognit/LICENSE b/vendor/github.com/uudashr/gocognit/LICENSE deleted file mode 100644 index 75d4b9c98..000000000 --- a/vendor/github.com/uudashr/gocognit/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2019 Nuruddin Ashr - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/uudashr/gocognit/README.md b/vendor/github.com/uudashr/gocognit/README.md deleted file mode 100644 index 57f31cf74..000000000 --- a/vendor/github.com/uudashr/gocognit/README.md +++ /dev/null @@ -1,226 +0,0 @@ -[![GoDoc](https://godoc.org/github.com/uudashr/gocognit?status.svg)](https://godoc.org/github.com/uudashr/gocognit) -# Gocognit -Gocognit calculates cognitive complexities of functions (and methods) in Go source code. A measurement of how hard does the code is intuitively to understand. - -## Understanding the complexity - -Given code using `if` statement, -```go -func GetWords(number int) string { - if number == 1 { // +1 - return "one" - } else if number == 2 { // +1 - return "a couple" - } else if number == 3 { // +1 - return "a few" - } else { // +1 - return "lots" - } -} // Cognitive complexity = 4 -``` - -Above code can be refactored using `switch` statement, -```go -func GetWords(number int) string { - switch number { // +1 - case 1: - return "one" - case 2: - return "a couple" - case 3: - return "a few" - default: - return "lots" - } -} // Cognitive complexity = 1 -``` - -As you see above codes are the same, but the second code are easier to understand, that is why the cognitive complexity score are lower compare to the first one. - -## Comparison with cyclomatic complexity - -### Example 1 -#### Cyclomatic complexity -```go -func GetWords(number int) string { // +1 - switch number { - case 1: // +1 - return "one" - case 2: // +1 - return "a couple" - case 3: // +1 - return "a few" - default: - return "lots" - } -} // Cyclomatic complexity = 4 -``` - -#### Cognitive complexity -```go -func GetWords(number int) string { - switch number { // +1 - case 1: - return "one" - case 2: - return "a couple" - case 3: - return "a few" - default: - return "lots" - } -} // Cognitive complexity = 1 -``` - -Cognitive complexity give lower score compare to cyclomatic complexity. - -### Example 2 -#### Cyclomatic complexity -```go -func SumOfPrimes(max int) int { // +1 - var total int - -OUT: - for i := 1; i < max; i++ { // +1 - for j := 2; j < i; j++ { // +1 - if i%j == 0 { // +1 - continue OUT - } - } - total += i - } - - return total -} // Cyclomatic complexity = 4 -``` - -#### Cognitive complexity -```go -func SumOfPrimes(max int) int { - var total int - -OUT: - for i := 1; i < max; i++ { // +1 - for j := 2; j < i; j++ { // +2 (nesting = 1) - if i%j == 0 { // +3 (nesting = 2) - continue OUT // +1 - } - } - total += i - } - - return total -} // Cognitive complexity = 7 -``` - -Cognitive complexity give higher score compare to cyclomatic complexity. - -## Rules - -The cognitive complexity of a function is calculated according to the -following rules: -> Note: these rules are specific for Go, please see the [original whitepaper](https://www.sonarsource.com/docs/CognitiveComplexity.pdf) for more complete reference. - -### Increments -There is an increment for each of the following: -1. `if`, `else if`, `else` -2. `switch`, `select` -3. `for` -4. `goto` LABEL, `break` LABEL, `continue` LABEL -5. sequence of binary logical operators -6. each method in a recursion cycle - -### Nesting level -The following structures increment the nesting level: -1. `if`, `else if`, `else` -2. `switch`, `select` -3. `for` -4. function literal or lambda - -### Nesting increments -The following structures receive a nesting increment commensurate with their nested depth inside nesting structures: -1. `if` -2. `switch`, `select` -3. `for` - -## Installation - -``` -$ go install github.com/uudashr/gocognit/cmd/gocognit@latest -``` - -or - -``` -$ go get github.com/uudashr/gocognit/cmd/gocognit -``` - -## Usage - -``` -$ gocognit -Calculate cognitive complexities of Go functions. - -Usage: - - gocognit [ ...] ... - -Flags: - - -over N show functions with complexity > N only - and return exit code 1 if the output is non-empty - -top N show the top N most complex functions only - -avg show the average complexity over all functions, - not depending on whether -over or -top are set - -json encode the output as JSON - -f format string the format to use - (default "{{.PkgName}}.{{.FuncName}}:{{.Complexity}}:{{.Pos}}") - -The (default) output fields for each line are: - - - -The (default) output fields for each line are: - - {{.Complexity}} {{.PkgName}} {{.FuncName}} {{.Pos}} - -or equal to - -The struct being passed to the template is: - - type Stat struct { - PkgName string - FuncName string - Complexity int - Pos token.Position - } -``` - -Examples: - -``` -$ gocognit . -$ gocognit main.go -$ gocognit -top 10 src/ -$ gocognit -over 25 docker -$ gocognit -avg . -$ gocognit -ignore "_test|testdata" . -``` - -The output fields for each line are: -``` - -``` - -## Ignore individual functions -Ignore individual functions by specifying `gocognit:ignore` directive. -```go -//gocognit:ignore -func IgnoreMe() { - // ... -} -``` - -## Related project -- [Gocyclo](https://github.com/fzipp/gocyclo) where the code are based on. -- [Cognitive Complexity: A new way of measuring understandability](https://www.sonarsource.com/docs/CognitiveComplexity.pdf) white paper by G. Ann Campbell. \ No newline at end of file diff --git a/vendor/github.com/uudashr/gocognit/doc.go b/vendor/github.com/uudashr/gocognit/doc.go deleted file mode 100644 index ae3d0a226..000000000 --- a/vendor/github.com/uudashr/gocognit/doc.go +++ /dev/null @@ -1,2 +0,0 @@ -// Package gocognit defines Analyzer other utilities to checks and calculate the complexity of function based on "cognitive complexity" methods. -package gocognit diff --git a/vendor/github.com/uudashr/gocognit/gocognit.go b/vendor/github.com/uudashr/gocognit/gocognit.go deleted file mode 100644 index 2bba2eb4f..000000000 --- a/vendor/github.com/uudashr/gocognit/gocognit.go +++ /dev/null @@ -1,399 +0,0 @@ -package gocognit - -import ( - "fmt" - "go/ast" - "go/token" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/ast/inspector" -) - -// Stat is statistic of the complexity. -type Stat struct { - PkgName string - FuncName string - Complexity int - Pos token.Position -} - -func (s Stat) String() string { - return fmt.Sprintf("%d %s %s %s", s.Complexity, s.PkgName, s.FuncName, s.Pos) -} - -// ComplexityStats builds the complexity statistics. -func ComplexityStats(f *ast.File, fset *token.FileSet, stats []Stat) []Stat { - for _, decl := range f.Decls { - if fn, ok := decl.(*ast.FuncDecl); ok { - d := parseDirective(fn.Doc) - if d.Ignore { - continue - } - - stats = append(stats, Stat{ - PkgName: f.Name.Name, - FuncName: funcName(fn), - Complexity: Complexity(fn), - Pos: fset.Position(fn.Pos()), - }) - } - } - return stats -} - -type directive struct { - Ignore bool -} - -func parseDirective(doc *ast.CommentGroup) directive { - if doc == nil { - return directive{} - } - - for _, c := range doc.List { - if c.Text == "//gocognit:ignore" { - return directive{Ignore: true} - } - } - - return directive{} -} - -// funcName returns the name representation of a function or method: -// "(Type).Name" for methods or simply "Name" for functions. -func funcName(fn *ast.FuncDecl) string { - if fn.Recv != nil { - if fn.Recv.NumFields() > 0 { - typ := fn.Recv.List[0].Type - return fmt.Sprintf("(%s).%s", recvString(typ), fn.Name) - } - } - return fn.Name.Name -} - -// Complexity calculates the cognitive complexity of a function. -func Complexity(fn *ast.FuncDecl) int { - v := complexityVisitor{ - name: fn.Name, - } - - ast.Walk(&v, fn) - return v.complexity -} - -type complexityVisitor struct { - name *ast.Ident - complexity int - nesting int - elseNodes map[ast.Node]bool - calculatedExprs map[ast.Expr]bool -} - -func (v *complexityVisitor) incNesting() { - v.nesting++ -} - -func (v *complexityVisitor) decNesting() { - v.nesting-- -} - -func (v *complexityVisitor) incComplexity() { - v.complexity++ -} - -func (v *complexityVisitor) nestIncComplexity() { - v.complexity += (v.nesting + 1) -} - -func (v *complexityVisitor) markAsElseNode(n ast.Node) { - if v.elseNodes == nil { - v.elseNodes = make(map[ast.Node]bool) - } - - v.elseNodes[n] = true -} - -func (v *complexityVisitor) markedAsElseNode(n ast.Node) bool { - if v.elseNodes == nil { - return false - } - - return v.elseNodes[n] -} - -func (v *complexityVisitor) markCalculated(e ast.Expr) { - if v.calculatedExprs == nil { - v.calculatedExprs = make(map[ast.Expr]bool) - } - - v.calculatedExprs[e] = true -} - -func (v *complexityVisitor) isCalculated(e ast.Expr) bool { - if v.calculatedExprs == nil { - return false - } - - return v.calculatedExprs[e] -} - -// Visit implements the ast.Visitor interface. -func (v *complexityVisitor) Visit(n ast.Node) ast.Visitor { - switch n := n.(type) { - case *ast.IfStmt: - return v.visitIfStmt(n) - case *ast.SwitchStmt: - return v.visitSwitchStmt(n) - case *ast.TypeSwitchStmt: - return v.visitTypeSwitchStmt(n) - case *ast.SelectStmt: - return v.visitSelectStmt(n) - case *ast.ForStmt: - return v.visitForStmt(n) - case *ast.RangeStmt: - return v.visitRangeStmt(n) - case *ast.FuncLit: - return v.visitFuncLit(n) - case *ast.BranchStmt: - return v.visitBranchStmt(n) - case *ast.BinaryExpr: - return v.visitBinaryExpr(n) - case *ast.CallExpr: - return v.visitCallExpr(n) - } - return v -} - -func (v *complexityVisitor) visitIfStmt(n *ast.IfStmt) ast.Visitor { - v.incIfComplexity(n) - - if n := n.Init; n != nil { - ast.Walk(v, n) - } - - ast.Walk(v, n.Cond) - - pure := !v.markedAsElseNode(n) // pure `if` statement, not an `else if` - if pure { - v.incNesting() - ast.Walk(v, n.Body) - v.decNesting() - } else { - ast.Walk(v, n.Body) - } - - if _, ok := n.Else.(*ast.BlockStmt); ok { - v.incComplexity() - - ast.Walk(v, n.Else) - } else if _, ok := n.Else.(*ast.IfStmt); ok { - v.markAsElseNode(n.Else) - ast.Walk(v, n.Else) - } - - return nil -} - -func (v *complexityVisitor) visitSwitchStmt(n *ast.SwitchStmt) ast.Visitor { - v.nestIncComplexity() - - if n := n.Init; n != nil { - ast.Walk(v, n) - } - - if n := n.Tag; n != nil { - ast.Walk(v, n) - } - - v.incNesting() - ast.Walk(v, n.Body) - v.decNesting() - return nil -} - -func (v *complexityVisitor) visitTypeSwitchStmt(n *ast.TypeSwitchStmt) ast.Visitor { - v.nestIncComplexity() - - if n := n.Init; n != nil { - ast.Walk(v, n) - } - - if n := n.Assign; n != nil { - ast.Walk(v, n) - } - - v.incNesting() - ast.Walk(v, n.Body) - v.decNesting() - return nil -} - -func (v *complexityVisitor) visitSelectStmt(n *ast.SelectStmt) ast.Visitor { - v.nestIncComplexity() - - v.incNesting() - ast.Walk(v, n.Body) - v.decNesting() - return nil -} - -func (v *complexityVisitor) visitForStmt(n *ast.ForStmt) ast.Visitor { - v.nestIncComplexity() - - if n := n.Init; n != nil { - ast.Walk(v, n) - } - - if n := n.Cond; n != nil { - ast.Walk(v, n) - } - - if n := n.Post; n != nil { - ast.Walk(v, n) - } - - v.incNesting() - ast.Walk(v, n.Body) - v.decNesting() - return nil -} - -func (v *complexityVisitor) visitRangeStmt(n *ast.RangeStmt) ast.Visitor { - v.nestIncComplexity() - - if n := n.Key; n != nil { - ast.Walk(v, n) - } - - if n := n.Value; n != nil { - ast.Walk(v, n) - } - - ast.Walk(v, n.X) - - v.incNesting() - ast.Walk(v, n.Body) - v.decNesting() - return nil -} - -func (v *complexityVisitor) visitFuncLit(n *ast.FuncLit) ast.Visitor { - ast.Walk(v, n.Type) - - v.incNesting() - ast.Walk(v, n.Body) - v.decNesting() - return nil -} - -func (v *complexityVisitor) visitBranchStmt(n *ast.BranchStmt) ast.Visitor { - if n.Label != nil { - v.incComplexity() - } - return v -} - -func (v *complexityVisitor) visitBinaryExpr(n *ast.BinaryExpr) ast.Visitor { - if isBinaryLogicalOp(n.Op) && !v.isCalculated(n) { - ops := v.collectBinaryOps(n) - - var lastOp token.Token - for _, op := range ops { - if lastOp != op { - v.incComplexity() - lastOp = op - } - } - } - return v -} - -func (v *complexityVisitor) visitCallExpr(n *ast.CallExpr) ast.Visitor { - if callIdent, ok := n.Fun.(*ast.Ident); ok { - obj, name := callIdent.Obj, callIdent.Name - if obj == v.name.Obj && name == v.name.Name { - // called by same function directly (direct recursion) - v.incComplexity() - } - } - return v -} - -func (v *complexityVisitor) collectBinaryOps(exp ast.Expr) []token.Token { - v.markCalculated(exp) - if exp, ok := exp.(*ast.BinaryExpr); ok { - return mergeBinaryOps(v.collectBinaryOps(exp.X), exp.Op, v.collectBinaryOps(exp.Y)) - } - return nil -} - -func (v *complexityVisitor) incIfComplexity(n *ast.IfStmt) { - if v.markedAsElseNode(n) { - v.incComplexity() - } else { - v.nestIncComplexity() - } -} - -func mergeBinaryOps(x []token.Token, op token.Token, y []token.Token) []token.Token { - var out []token.Token - out = append(out, x...) - if isBinaryLogicalOp(op) { - out = append(out, op) - } - out = append(out, y...) - return out -} - -func isBinaryLogicalOp(op token.Token) bool { - return op == token.LAND || op == token.LOR -} - -const Doc = `Find complex function using cognitive complexity calculation. - -The gocognit analysis reports functions or methods which the complexity is over -than the specified limit.` - -// Analyzer reports a diagnostic for every function or method which is -// too complex specified by its -over flag. -var Analyzer = &analysis.Analyzer{ - Name: "gocognit", - Doc: Doc, - Requires: []*analysis.Analyzer{inspect.Analyzer}, - Run: run, -} - -var ( - over int // -over flag -) - -func init() { - Analyzer.Flags.IntVar(&over, "over", over, "show functions with complexity > N only") -} - -func run(pass *analysis.Pass) (interface{}, error) { - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - - nodeFilter := []ast.Node{ - (*ast.FuncDecl)(nil), - } - inspect.Preorder(nodeFilter, func(n ast.Node) { - funcDecl := n.(*ast.FuncDecl) - - d := parseDirective(funcDecl.Doc) - if d.Ignore { - return - } - - fnName := funcName(funcDecl) - - fnComplexity := Complexity(funcDecl) - - if fnComplexity > over { - pass.Reportf(funcDecl.Pos(), "cognitive complexity %d of func %s is high (> %d)", fnComplexity, fnName, over) - } - }) - - return nil, nil -} diff --git a/vendor/github.com/uudashr/gocognit/recv.go b/vendor/github.com/uudashr/gocognit/recv.go deleted file mode 100644 index 2f20d843a..000000000 --- a/vendor/github.com/uudashr/gocognit/recv.go +++ /dev/null @@ -1,24 +0,0 @@ -//go:build go1.18 -// +build go1.18 - -package gocognit - -import ( - "go/ast" -) - -// recvString returns a string representation of recv of the -// form "T", "*T", or "BADRECV" (if not a proper receiver type). -func recvString(recv ast.Expr) string { - switch t := recv.(type) { - case *ast.Ident: - return t.Name - case *ast.StarExpr: - return "*" + recvString(t.X) - case *ast.IndexExpr: - return recvString(t.X) - case *ast.IndexListExpr: - return recvString(t.X) - } - return "BADRECV" -} diff --git a/vendor/github.com/uudashr/gocognit/recv_pre118.go b/vendor/github.com/uudashr/gocognit/recv_pre118.go deleted file mode 100644 index 9e0ebfd82..000000000 --- a/vendor/github.com/uudashr/gocognit/recv_pre118.go +++ /dev/null @@ -1,20 +0,0 @@ -//go:build !go1.18 -// +build !go1.18 - -package gocognit - -import ( - "go/ast" -) - -// recvString returns a string representation of recv of the -// form "T", "*T", or "BADRECV" (if not a proper receiver type). -func recvString(recv ast.Expr) string { - switch t := recv.(type) { - case *ast.Ident: - return t.Name - case *ast.StarExpr: - return "*" + recvString(t.X) - } - return "BADRECV" -} diff --git a/vendor/github.com/xen0n/gosmopolitan/.editorconfig b/vendor/github.com/xen0n/gosmopolitan/.editorconfig deleted file mode 100644 index 0c0f7e7e2..000000000 --- a/vendor/github.com/xen0n/gosmopolitan/.editorconfig +++ /dev/null @@ -1,23 +0,0 @@ -# EditorConfig is awesome: http://EditorConfig.org - -root = true - -[*] -indent_style = space -trim_trailing_whitespace = true -end_of_line = lf -insert_final_newline = true -charset = utf-8 - -[{*.sh,*.md}] -indent_size = 4 - -[{*.yaml,*.yml}] -indent_size = 2 - -# hard tabs for Go and Makefile per best practice of file format -[*.go] -indent_style = tab - -[Makefile] -indent_style = tab diff --git a/vendor/github.com/xen0n/gosmopolitan/.gitignore b/vendor/github.com/xen0n/gosmopolitan/.gitignore deleted file mode 100644 index 1a1abaa20..000000000 --- a/vendor/github.com/xen0n/gosmopolitan/.gitignore +++ /dev/null @@ -1,188 +0,0 @@ -# ignore the local build artifact -/gosmopolitan - -# and test artifacts -/coverage.txt - -# the following are auto-generated - -# Created by https://www.toptal.com/developers/gitignore/api/go,visualstudiocode,vim,goland -# Edit at https://www.toptal.com/developers/gitignore?templates=go,visualstudiocode,vim,goland - -### Go ### -# If you prefer the allow list template instead of the deny list, see community template: -# https://github.com/github/gitignore/blob/main/community/Golang/Go.AllowList.gitignore -# -# Binaries for programs and plugins -*.exe -*.exe~ -*.dll -*.so -*.dylib - -# Test binary, built with `go test -c` -*.test - -# Output of the go coverage tool, specifically when used with LiteIDE -*.out - -# Dependency directories (remove the comment below to include it) -# vendor/ - -# Go workspace file -go.work - -### GoLand ### -# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider -# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 - -# User-specific stuff -.idea/**/workspace.xml -.idea/**/tasks.xml -.idea/**/usage.statistics.xml -.idea/**/dictionaries -.idea/**/shelf - -# AWS User-specific -.idea/**/aws.xml - -# Generated files -.idea/**/contentModel.xml - -# Sensitive or high-churn files -.idea/**/dataSources/ -.idea/**/dataSources.ids -.idea/**/dataSources.local.xml -.idea/**/sqlDataSources.xml -.idea/**/dynamic.xml -.idea/**/uiDesigner.xml -.idea/**/dbnavigator.xml - -# Gradle -.idea/**/gradle.xml -.idea/**/libraries - -# Gradle and Maven with auto-import -# When using Gradle or Maven with auto-import, you should exclude module files, -# since they will be recreated, and may cause churn. Uncomment if using -# auto-import. -# .idea/artifacts -# .idea/compiler.xml -# .idea/jarRepositories.xml -# .idea/modules.xml -# .idea/*.iml -# .idea/modules -# *.iml -# *.ipr - -# CMake -cmake-build-*/ - -# Mongo Explorer plugin -.idea/**/mongoSettings.xml - -# File-based project format -*.iws - -# IntelliJ -out/ - -# mpeltonen/sbt-idea plugin -.idea_modules/ - -# JIRA plugin -atlassian-ide-plugin.xml - -# Cursive Clojure plugin -.idea/replstate.xml - -# SonarLint plugin -.idea/sonarlint/ - -# Crashlytics plugin (for Android Studio and IntelliJ) -com_crashlytics_export_strings.xml -crashlytics.properties -crashlytics-build.properties -fabric.properties - -# Editor-based Rest Client -.idea/httpRequests - -# Android studio 3.1+ serialized cache file -.idea/caches/build_file_checksums.ser - -### GoLand Patch ### -# Comment Reason: https://github.com/joeblau/gitignore.io/issues/186#issuecomment-215987721 - -# *.iml -# modules.xml -# .idea/misc.xml -# *.ipr - -# Sonarlint plugin -# https://plugins.jetbrains.com/plugin/7973-sonarlint -.idea/**/sonarlint/ - -# SonarQube Plugin -# https://plugins.jetbrains.com/plugin/7238-sonarqube-community-plugin -.idea/**/sonarIssues.xml - -# Markdown Navigator plugin -# https://plugins.jetbrains.com/plugin/7896-markdown-navigator-enhanced -.idea/**/markdown-navigator.xml -.idea/**/markdown-navigator-enh.xml -.idea/**/markdown-navigator/ - -# Cache file creation bug -# See https://youtrack.jetbrains.com/issue/JBR-2257 -.idea/$CACHE_FILE$ - -# CodeStream plugin -# https://plugins.jetbrains.com/plugin/12206-codestream -.idea/codestream.xml - -# Azure Toolkit for IntelliJ plugin -# https://plugins.jetbrains.com/plugin/8053-azure-toolkit-for-intellij -.idea/**/azureSettings.xml - -### Vim ### -# Swap -[._]*.s[a-v][a-z] -!*.svg # comment out if you don't need vector files -[._]*.sw[a-p] -[._]s[a-rt-v][a-z] -[._]ss[a-gi-z] -[._]sw[a-p] - -# Session -Session.vim -Sessionx.vim - -# Temporary -.netrwhist -*~ -# Auto-generated tag files -tags -# Persistent undo -[._]*.un~ - -### VisualStudioCode ### -.vscode/* -!.vscode/settings.json -!.vscode/tasks.json -!.vscode/launch.json -!.vscode/extensions.json -!.vscode/*.code-snippets - -# Local History for Visual Studio Code -.history/ - -# Built Visual Studio Code Extensions -*.vsix - -### VisualStudioCode Patch ### -# Ignore all local history of files -.history -.ionide - -# End of https://www.toptal.com/developers/gitignore/api/go,visualstudiocode,vim,goland diff --git a/vendor/github.com/xen0n/gosmopolitan/.golangci.yml b/vendor/github.com/xen0n/gosmopolitan/.golangci.yml deleted file mode 100644 index 0bce12501..000000000 --- a/vendor/github.com/xen0n/gosmopolitan/.golangci.yml +++ /dev/null @@ -1,31 +0,0 @@ -run: - go: '1.19' - modules-download-mode: readonly - -linters: - enable: - - goheader - - goimports - - gosec - - gosimple - - lll - - nakedret - - revive - - stylecheck - - unused - -linters-settings: - goheader: - template: |- - SPDX-License-Identifier: GPL-3.0-or-later - goimports: - local-prefixes: github.com/xen0n/gosmopolitan - gosimple: - go: '1.19' - lll: - line-length: 120 - tab-width: 4 - nakedret: - max-func-lines: 1 - stylecheck: - go: '1.19' diff --git a/vendor/github.com/xen0n/gosmopolitan/LICENSE b/vendor/github.com/xen0n/gosmopolitan/LICENSE deleted file mode 100644 index 94a9ed024..000000000 --- a/vendor/github.com/xen0n/gosmopolitan/LICENSE +++ /dev/null @@ -1,674 +0,0 @@ - GNU GENERAL PUBLIC LICENSE - Version 3, 29 June 2007 - - Copyright (C) 2007 Free Software Foundation, Inc. - Everyone is permitted to copy and distribute verbatim copies - of this license document, but changing it is not allowed. - - Preamble - - The GNU General Public License is a free, copyleft license for -software and other kinds of works. - - The licenses for most software and other practical works are designed -to take away your freedom to share and change the works. By contrast, -the GNU General Public License is intended to guarantee your freedom to -share and change all versions of a program--to make sure it remains free -software for all its users. We, the Free Software Foundation, use the -GNU General Public License for most of our software; it applies also to -any other work released this way by its authors. You can apply it to -your programs, too. - - When we speak of free software, we are referring to freedom, not -price. Our General Public Licenses are designed to make sure that you -have the freedom to distribute copies of free software (and charge for -them if you wish), that you receive source code or can get it if you -want it, that you can change the software or use pieces of it in new -free programs, and that you know you can do these things. - - To protect your rights, we need to prevent others from denying you -these rights or asking you to surrender the rights. Therefore, you have -certain responsibilities if you distribute copies of the software, or if -you modify it: responsibilities to respect the freedom of others. - - For example, if you distribute copies of such a program, whether -gratis or for a fee, you must pass on to the recipients the same -freedoms that you received. You must make sure that they, too, receive -or can get the source code. And you must show them these terms so they -know their rights. - - Developers that use the GNU GPL protect your rights with two steps: -(1) assert copyright on the software, and (2) offer you this License -giving you legal permission to copy, distribute and/or modify it. - - For the developers' and authors' protection, the GPL clearly explains -that there is no warranty for this free software. For both users' and -authors' sake, the GPL requires that modified versions be marked as -changed, so that their problems will not be attributed erroneously to -authors of previous versions. - - Some devices are designed to deny users access to install or run -modified versions of the software inside them, although the manufacturer -can do so. This is fundamentally incompatible with the aim of -protecting users' freedom to change the software. The systematic -pattern of such abuse occurs in the area of products for individuals to -use, which is precisely where it is most unacceptable. Therefore, we -have designed this version of the GPL to prohibit the practice for those -products. If such problems arise substantially in other domains, we -stand ready to extend this provision to those domains in future versions -of the GPL, as needed to protect the freedom of users. - - Finally, every program is threatened constantly by software patents. -States should not allow patents to restrict development and use of -software on general-purpose computers, but in those that do, we wish to -avoid the special danger that patents applied to a free program could -make it effectively proprietary. To prevent this, the GPL assures that -patents cannot be used to render the program non-free. - - The precise terms and conditions for copying, distribution and -modification follow. - - TERMS AND CONDITIONS - - 0. Definitions. - - "This License" refers to version 3 of the GNU General Public License. - - "Copyright" also means copyright-like laws that apply to other kinds of -works, such as semiconductor masks. - - "The Program" refers to any copyrightable work licensed under this -License. Each licensee is addressed as "you". "Licensees" and -"recipients" may be individuals or organizations. - - To "modify" a work means to copy from or adapt all or part of the work -in a fashion requiring copyright permission, other than the making of an -exact copy. The resulting work is called a "modified version" of the -earlier work or a work "based on" the earlier work. - - A "covered work" means either the unmodified Program or a work based -on the Program. - - To "propagate" a work means to do anything with it that, without -permission, would make you directly or secondarily liable for -infringement under applicable copyright law, except executing it on a -computer or modifying a private copy. Propagation includes copying, -distribution (with or without modification), making available to the -public, and in some countries other activities as well. - - To "convey" a work means any kind of propagation that enables other -parties to make or receive copies. Mere interaction with a user through -a computer network, with no transfer of a copy, is not conveying. - - An interactive user interface displays "Appropriate Legal Notices" -to the extent that it includes a convenient and prominently visible -feature that (1) displays an appropriate copyright notice, and (2) -tells the user that there is no warranty for the work (except to the -extent that warranties are provided), that licensees may convey the -work under this License, and how to view a copy of this License. If -the interface presents a list of user commands or options, such as a -menu, a prominent item in the list meets this criterion. - - 1. Source Code. - - The "source code" for a work means the preferred form of the work -for making modifications to it. "Object code" means any non-source -form of a work. - - A "Standard Interface" means an interface that either is an official -standard defined by a recognized standards body, or, in the case of -interfaces specified for a particular programming language, one that -is widely used among developers working in that language. - - The "System Libraries" of an executable work include anything, other -than the work as a whole, that (a) is included in the normal form of -packaging a Major Component, but which is not part of that Major -Component, and (b) serves only to enable use of the work with that -Major Component, or to implement a Standard Interface for which an -implementation is available to the public in source code form. A -"Major Component", in this context, means a major essential component -(kernel, window system, and so on) of the specific operating system -(if any) on which the executable work runs, or a compiler used to -produce the work, or an object code interpreter used to run it. - - The "Corresponding Source" for a work in object code form means all -the source code needed to generate, install, and (for an executable -work) run the object code and to modify the work, including scripts to -control those activities. However, it does not include the work's -System Libraries, or general-purpose tools or generally available free -programs which are used unmodified in performing those activities but -which are not part of the work. For example, Corresponding Source -includes interface definition files associated with source files for -the work, and the source code for shared libraries and dynamically -linked subprograms that the work is specifically designed to require, -such as by intimate data communication or control flow between those -subprograms and other parts of the work. - - The Corresponding Source need not include anything that users -can regenerate automatically from other parts of the Corresponding -Source. - - The Corresponding Source for a work in source code form is that -same work. - - 2. Basic Permissions. - - All rights granted under this License are granted for the term of -copyright on the Program, and are irrevocable provided the stated -conditions are met. This License explicitly affirms your unlimited -permission to run the unmodified Program. The output from running a -covered work is covered by this License only if the output, given its -content, constitutes a covered work. This License acknowledges your -rights of fair use or other equivalent, as provided by copyright law. - - You may make, run and propagate covered works that you do not -convey, without conditions so long as your license otherwise remains -in force. You may convey covered works to others for the sole purpose -of having them make modifications exclusively for you, or provide you -with facilities for running those works, provided that you comply with -the terms of this License in conveying all material for which you do -not control copyright. Those thus making or running the covered works -for you must do so exclusively on your behalf, under your direction -and control, on terms that prohibit them from making any copies of -your copyrighted material outside their relationship with you. - - Conveying under any other circumstances is permitted solely under -the conditions stated below. Sublicensing is not allowed; section 10 -makes it unnecessary. - - 3. Protecting Users' Legal Rights From Anti-Circumvention Law. - - No covered work shall be deemed part of an effective technological -measure under any applicable law fulfilling obligations under article -11 of the WIPO copyright treaty adopted on 20 December 1996, or -similar laws prohibiting or restricting circumvention of such -measures. - - When you convey a covered work, you waive any legal power to forbid -circumvention of technological measures to the extent such circumvention -is effected by exercising rights under this License with respect to -the covered work, and you disclaim any intention to limit operation or -modification of the work as a means of enforcing, against the work's -users, your or third parties' legal rights to forbid circumvention of -technological measures. - - 4. Conveying Verbatim Copies. - - You may convey verbatim copies of the Program's source code as you -receive it, in any medium, provided that you conspicuously and -appropriately publish on each copy an appropriate copyright notice; -keep intact all notices stating that this License and any -non-permissive terms added in accord with section 7 apply to the code; -keep intact all notices of the absence of any warranty; and give all -recipients a copy of this License along with the Program. - - You may charge any price or no price for each copy that you convey, -and you may offer support or warranty protection for a fee. - - 5. Conveying Modified Source Versions. - - You may convey a work based on the Program, or the modifications to -produce it from the Program, in the form of source code under the -terms of section 4, provided that you also meet all of these conditions: - - a) The work must carry prominent notices stating that you modified - it, and giving a relevant date. - - b) The work must carry prominent notices stating that it is - released under this License and any conditions added under section - 7. This requirement modifies the requirement in section 4 to - "keep intact all notices". - - c) You must license the entire work, as a whole, under this - License to anyone who comes into possession of a copy. This - License will therefore apply, along with any applicable section 7 - additional terms, to the whole of the work, and all its parts, - regardless of how they are packaged. This License gives no - permission to license the work in any other way, but it does not - invalidate such permission if you have separately received it. - - d) If the work has interactive user interfaces, each must display - Appropriate Legal Notices; however, if the Program has interactive - interfaces that do not display Appropriate Legal Notices, your - work need not make them do so. - - A compilation of a covered work with other separate and independent -works, which are not by their nature extensions of the covered work, -and which are not combined with it such as to form a larger program, -in or on a volume of a storage or distribution medium, is called an -"aggregate" if the compilation and its resulting copyright are not -used to limit the access or legal rights of the compilation's users -beyond what the individual works permit. Inclusion of a covered work -in an aggregate does not cause this License to apply to the other -parts of the aggregate. - - 6. Conveying Non-Source Forms. - - You may convey a covered work in object code form under the terms -of sections 4 and 5, provided that you also convey the -machine-readable Corresponding Source under the terms of this License, -in one of these ways: - - a) Convey the object code in, or embodied in, a physical product - (including a physical distribution medium), accompanied by the - Corresponding Source fixed on a durable physical medium - customarily used for software interchange. - - b) Convey the object code in, or embodied in, a physical product - (including a physical distribution medium), accompanied by a - written offer, valid for at least three years and valid for as - long as you offer spare parts or customer support for that product - model, to give anyone who possesses the object code either (1) a - copy of the Corresponding Source for all the software in the - product that is covered by this License, on a durable physical - medium customarily used for software interchange, for a price no - more than your reasonable cost of physically performing this - conveying of source, or (2) access to copy the - Corresponding Source from a network server at no charge. - - c) Convey individual copies of the object code with a copy of the - written offer to provide the Corresponding Source. This - alternative is allowed only occasionally and noncommercially, and - only if you received the object code with such an offer, in accord - with subsection 6b. - - d) Convey the object code by offering access from a designated - place (gratis or for a charge), and offer equivalent access to the - Corresponding Source in the same way through the same place at no - further charge. You need not require recipients to copy the - Corresponding Source along with the object code. If the place to - copy the object code is a network server, the Corresponding Source - may be on a different server (operated by you or a third party) - that supports equivalent copying facilities, provided you maintain - clear directions next to the object code saying where to find the - Corresponding Source. Regardless of what server hosts the - Corresponding Source, you remain obligated to ensure that it is - available for as long as needed to satisfy these requirements. - - e) Convey the object code using peer-to-peer transmission, provided - you inform other peers where the object code and Corresponding - Source of the work are being offered to the general public at no - charge under subsection 6d. - - A separable portion of the object code, whose source code is excluded -from the Corresponding Source as a System Library, need not be -included in conveying the object code work. - - A "User Product" is either (1) a "consumer product", which means any -tangible personal property which is normally used for personal, family, -or household purposes, or (2) anything designed or sold for incorporation -into a dwelling. In determining whether a product is a consumer product, -doubtful cases shall be resolved in favor of coverage. For a particular -product received by a particular user, "normally used" refers to a -typical or common use of that class of product, regardless of the status -of the particular user or of the way in which the particular user -actually uses, or expects or is expected to use, the product. A product -is a consumer product regardless of whether the product has substantial -commercial, industrial or non-consumer uses, unless such uses represent -the only significant mode of use of the product. - - "Installation Information" for a User Product means any methods, -procedures, authorization keys, or other information required to install -and execute modified versions of a covered work in that User Product from -a modified version of its Corresponding Source. The information must -suffice to ensure that the continued functioning of the modified object -code is in no case prevented or interfered with solely because -modification has been made. - - If you convey an object code work under this section in, or with, or -specifically for use in, a User Product, and the conveying occurs as -part of a transaction in which the right of possession and use of the -User Product is transferred to the recipient in perpetuity or for a -fixed term (regardless of how the transaction is characterized), the -Corresponding Source conveyed under this section must be accompanied -by the Installation Information. But this requirement does not apply -if neither you nor any third party retains the ability to install -modified object code on the User Product (for example, the work has -been installed in ROM). - - The requirement to provide Installation Information does not include a -requirement to continue to provide support service, warranty, or updates -for a work that has been modified or installed by the recipient, or for -the User Product in which it has been modified or installed. Access to a -network may be denied when the modification itself materially and -adversely affects the operation of the network or violates the rules and -protocols for communication across the network. - - Corresponding Source conveyed, and Installation Information provided, -in accord with this section must be in a format that is publicly -documented (and with an implementation available to the public in -source code form), and must require no special password or key for -unpacking, reading or copying. - - 7. Additional Terms. - - "Additional permissions" are terms that supplement the terms of this -License by making exceptions from one or more of its conditions. -Additional permissions that are applicable to the entire Program shall -be treated as though they were included in this License, to the extent -that they are valid under applicable law. If additional permissions -apply only to part of the Program, that part may be used separately -under those permissions, but the entire Program remains governed by -this License without regard to the additional permissions. - - When you convey a copy of a covered work, you may at your option -remove any additional permissions from that copy, or from any part of -it. (Additional permissions may be written to require their own -removal in certain cases when you modify the work.) You may place -additional permissions on material, added by you to a covered work, -for which you have or can give appropriate copyright permission. - - Notwithstanding any other provision of this License, for material you -add to a covered work, you may (if authorized by the copyright holders of -that material) supplement the terms of this License with terms: - - a) Disclaiming warranty or limiting liability differently from the - terms of sections 15 and 16 of this License; or - - b) Requiring preservation of specified reasonable legal notices or - author attributions in that material or in the Appropriate Legal - Notices displayed by works containing it; or - - c) Prohibiting misrepresentation of the origin of that material, or - requiring that modified versions of such material be marked in - reasonable ways as different from the original version; or - - d) Limiting the use for publicity purposes of names of licensors or - authors of the material; or - - e) Declining to grant rights under trademark law for use of some - trade names, trademarks, or service marks; or - - f) Requiring indemnification of licensors and authors of that - material by anyone who conveys the material (or modified versions of - it) with contractual assumptions of liability to the recipient, for - any liability that these contractual assumptions directly impose on - those licensors and authors. - - All other non-permissive additional terms are considered "further -restrictions" within the meaning of section 10. If the Program as you -received it, or any part of it, contains a notice stating that it is -governed by this License along with a term that is a further -restriction, you may remove that term. If a license document contains -a further restriction but permits relicensing or conveying under this -License, you may add to a covered work material governed by the terms -of that license document, provided that the further restriction does -not survive such relicensing or conveying. - - If you add terms to a covered work in accord with this section, you -must place, in the relevant source files, a statement of the -additional terms that apply to those files, or a notice indicating -where to find the applicable terms. - - Additional terms, permissive or non-permissive, may be stated in the -form of a separately written license, or stated as exceptions; -the above requirements apply either way. - - 8. Termination. - - You may not propagate or modify a covered work except as expressly -provided under this License. Any attempt otherwise to propagate or -modify it is void, and will automatically terminate your rights under -this License (including any patent licenses granted under the third -paragraph of section 11). - - However, if you cease all violation of this License, then your -license from a particular copyright holder is reinstated (a) -provisionally, unless and until the copyright holder explicitly and -finally terminates your license, and (b) permanently, if the copyright -holder fails to notify you of the violation by some reasonable means -prior to 60 days after the cessation. - - Moreover, your license from a particular copyright holder is -reinstated permanently if the copyright holder notifies you of the -violation by some reasonable means, this is the first time you have -received notice of violation of this License (for any work) from that -copyright holder, and you cure the violation prior to 30 days after -your receipt of the notice. - - Termination of your rights under this section does not terminate the -licenses of parties who have received copies or rights from you under -this License. If your rights have been terminated and not permanently -reinstated, you do not qualify to receive new licenses for the same -material under section 10. - - 9. Acceptance Not Required for Having Copies. - - You are not required to accept this License in order to receive or -run a copy of the Program. Ancillary propagation of a covered work -occurring solely as a consequence of using peer-to-peer transmission -to receive a copy likewise does not require acceptance. However, -nothing other than this License grants you permission to propagate or -modify any covered work. These actions infringe copyright if you do -not accept this License. Therefore, by modifying or propagating a -covered work, you indicate your acceptance of this License to do so. - - 10. Automatic Licensing of Downstream Recipients. - - Each time you convey a covered work, the recipient automatically -receives a license from the original licensors, to run, modify and -propagate that work, subject to this License. You are not responsible -for enforcing compliance by third parties with this License. - - An "entity transaction" is a transaction transferring control of an -organization, or substantially all assets of one, or subdividing an -organization, or merging organizations. If propagation of a covered -work results from an entity transaction, each party to that -transaction who receives a copy of the work also receives whatever -licenses to the work the party's predecessor in interest had or could -give under the previous paragraph, plus a right to possession of the -Corresponding Source of the work from the predecessor in interest, if -the predecessor has it or can get it with reasonable efforts. - - You may not impose any further restrictions on the exercise of the -rights granted or affirmed under this License. For example, you may -not impose a license fee, royalty, or other charge for exercise of -rights granted under this License, and you may not initiate litigation -(including a cross-claim or counterclaim in a lawsuit) alleging that -any patent claim is infringed by making, using, selling, offering for -sale, or importing the Program or any portion of it. - - 11. Patents. - - A "contributor" is a copyright holder who authorizes use under this -License of the Program or a work on which the Program is based. The -work thus licensed is called the contributor's "contributor version". - - A contributor's "essential patent claims" are all patent claims -owned or controlled by the contributor, whether already acquired or -hereafter acquired, that would be infringed by some manner, permitted -by this License, of making, using, or selling its contributor version, -but do not include claims that would be infringed only as a -consequence of further modification of the contributor version. For -purposes of this definition, "control" includes the right to grant -patent sublicenses in a manner consistent with the requirements of -this License. - - Each contributor grants you a non-exclusive, worldwide, royalty-free -patent license under the contributor's essential patent claims, to -make, use, sell, offer for sale, import and otherwise run, modify and -propagate the contents of its contributor version. - - In the following three paragraphs, a "patent license" is any express -agreement or commitment, however denominated, not to enforce a patent -(such as an express permission to practice a patent or covenant not to -sue for patent infringement). To "grant" such a patent license to a -party means to make such an agreement or commitment not to enforce a -patent against the party. - - If you convey a covered work, knowingly relying on a patent license, -and the Corresponding Source of the work is not available for anyone -to copy, free of charge and under the terms of this License, through a -publicly available network server or other readily accessible means, -then you must either (1) cause the Corresponding Source to be so -available, or (2) arrange to deprive yourself of the benefit of the -patent license for this particular work, or (3) arrange, in a manner -consistent with the requirements of this License, to extend the patent -license to downstream recipients. "Knowingly relying" means you have -actual knowledge that, but for the patent license, your conveying the -covered work in a country, or your recipient's use of the covered work -in a country, would infringe one or more identifiable patents in that -country that you have reason to believe are valid. - - If, pursuant to or in connection with a single transaction or -arrangement, you convey, or propagate by procuring conveyance of, a -covered work, and grant a patent license to some of the parties -receiving the covered work authorizing them to use, propagate, modify -or convey a specific copy of the covered work, then the patent license -you grant is automatically extended to all recipients of the covered -work and works based on it. - - A patent license is "discriminatory" if it does not include within -the scope of its coverage, prohibits the exercise of, or is -conditioned on the non-exercise of one or more of the rights that are -specifically granted under this License. You may not convey a covered -work if you are a party to an arrangement with a third party that is -in the business of distributing software, under which you make payment -to the third party based on the extent of your activity of conveying -the work, and under which the third party grants, to any of the -parties who would receive the covered work from you, a discriminatory -patent license (a) in connection with copies of the covered work -conveyed by you (or copies made from those copies), or (b) primarily -for and in connection with specific products or compilations that -contain the covered work, unless you entered into that arrangement, -or that patent license was granted, prior to 28 March 2007. - - Nothing in this License shall be construed as excluding or limiting -any implied license or other defenses to infringement that may -otherwise be available to you under applicable patent law. - - 12. No Surrender of Others' Freedom. - - If conditions are imposed on you (whether by court order, agreement or -otherwise) that contradict the conditions of this License, they do not -excuse you from the conditions of this License. If you cannot convey a -covered work so as to satisfy simultaneously your obligations under this -License and any other pertinent obligations, then as a consequence you may -not convey it at all. For example, if you agree to terms that obligate you -to collect a royalty for further conveying from those to whom you convey -the Program, the only way you could satisfy both those terms and this -License would be to refrain entirely from conveying the Program. - - 13. Use with the GNU Affero General Public License. - - Notwithstanding any other provision of this License, you have -permission to link or combine any covered work with a work licensed -under version 3 of the GNU Affero General Public License into a single -combined work, and to convey the resulting work. The terms of this -License will continue to apply to the part which is the covered work, -but the special requirements of the GNU Affero General Public License, -section 13, concerning interaction through a network will apply to the -combination as such. - - 14. Revised Versions of this License. - - The Free Software Foundation may publish revised and/or new versions of -the GNU General Public License from time to time. Such new versions will -be similar in spirit to the present version, but may differ in detail to -address new problems or concerns. - - Each version is given a distinguishing version number. If the -Program specifies that a certain numbered version of the GNU General -Public License "or any later version" applies to it, you have the -option of following the terms and conditions either of that numbered -version or of any later version published by the Free Software -Foundation. If the Program does not specify a version number of the -GNU General Public License, you may choose any version ever published -by the Free Software Foundation. - - If the Program specifies that a proxy can decide which future -versions of the GNU General Public License can be used, that proxy's -public statement of acceptance of a version permanently authorizes you -to choose that version for the Program. - - Later license versions may give you additional or different -permissions. However, no additional obligations are imposed on any -author or copyright holder as a result of your choosing to follow a -later version. - - 15. Disclaimer of Warranty. - - THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY -APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT -HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY -OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, -THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR -PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM -IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF -ALL NECESSARY SERVICING, REPAIR OR CORRECTION. - - 16. Limitation of Liability. - - IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING -WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS -THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY -GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE -USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF -DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD -PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), -EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF -SUCH DAMAGES. - - 17. Interpretation of Sections 15 and 16. - - If the disclaimer of warranty and limitation of liability provided -above cannot be given local legal effect according to their terms, -reviewing courts shall apply local law that most closely approximates -an absolute waiver of all civil liability in connection with the -Program, unless a warranty or assumption of liability accompanies a -copy of the Program in return for a fee. - - END OF TERMS AND CONDITIONS - - How to Apply These Terms to Your New Programs - - If you develop a new program, and you want it to be of the greatest -possible use to the public, the best way to achieve this is to make it -free software which everyone can redistribute and change under these terms. - - To do so, attach the following notices to the program. It is safest -to attach them to the start of each source file to most effectively -state the exclusion of warranty; and each file should have at least -the "copyright" line and a pointer to where the full notice is found. - - - Copyright (C) - - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . - -Also add information on how to contact you by electronic and paper mail. - - If the program does terminal interaction, make it output a short -notice like this when it starts in an interactive mode: - - Copyright (C) - This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. - This is free software, and you are welcome to redistribute it - under certain conditions; type `show c' for details. - -The hypothetical commands `show w' and `show c' should show the appropriate -parts of the General Public License. Of course, your program's commands -might be different; for a GUI interface, you would use an "about box". - - You should also get your employer (if you work as a programmer) or school, -if any, to sign a "copyright disclaimer" for the program, if necessary. -For more information on this, and how to apply and follow the GNU GPL, see -. - - The GNU General Public License does not permit incorporating your program -into proprietary programs. If your program is a subroutine library, you -may consider it more useful to permit linking proprietary applications with -the library. If this is what you want to do, use the GNU Lesser General -Public License instead of this License. But first, please read -. diff --git a/vendor/github.com/xen0n/gosmopolitan/README.md b/vendor/github.com/xen0n/gosmopolitan/README.md deleted file mode 100644 index 86a5e64e0..000000000 --- a/vendor/github.com/xen0n/gosmopolitan/README.md +++ /dev/null @@ -1,84 +0,0 @@ -# gosmopolitan - -![GitHub Workflow Status (main branch)](https://img.shields.io/github/actions/workflow/status/xen0n/gosmopolitan/go.yml?branch=main) -![Codecov](https://img.shields.io/codecov/c/gh/xen0n/gosmopolitan) -![GitHub license info](https://img.shields.io/github/license/xen0n/gosmopolitan) -![GitHub go.mod Go version](https://img.shields.io/github/go-mod/go-version/xen0n/gosmopolitan) -[![Go Report Card](https://goreportcard.com/badge/github.com/xen0n/gosmopolitan)](https://goreportcard.com/report/github.com/xen0n/gosmopolitan) -[![Go Reference](https://pkg.go.dev/badge/github.com/xen0n/gosmopolitan.svg)](https://pkg.go.dev/github.com/xen0n/gosmopolitan) - -[简体中文](./README.zh-Hans.md) - -`gosmopolitan` checks your Go codebase for code smells that may prove to be -hindrance to internationalization ("i18n") and/or localization ("l10n"). - -The name is a wordplay on "cosmopolitan". - -## Checks - -Currently `gosmopolitan` checks for the following anti-patterns: - -* Occurrences of string literals containing characters from certain writing - systems. - - Existence of such strings often means the relevant logic is hard to - internationalize, or at least, require special care when doing i18n/l10n. - -* Usages of `time.Local`. - - An internationalized app or library should almost never process time and - date values in the timezone in which it is running; instead one should use - the respective user preference, or the timezone as dictated by the domain - logic. - -Note that local times are produced in a lot more ways than via direct casts to -`time.Local` alone, such as: - -* `time.LoadLocation("Local")` -* received from a `time.Ticker` -* functions explicitly documented to return local times - * `time.Now()` - * `time.Unix()` - * `time.UnixMilli()` - * `time.UnixMicro()` - -Proper identification of these use cases require a fairly complete dataflow -analysis pass, which is not implemented currently. In addition, right now you -have to pay close attention to externally-provided time values (such as from -your framework like Gin or gRPC) as they are not properly tracked either. - -## Caveats - -Note that the checks implemented here are only suitable for codebases with the -following characteristics, and may not suit your particular project's needs: - -* Originally developed for an audience using non-Latin writing system(s), -* Returns bare strings intended for humans containing such non-Latin characters, and -* May occasionally (or frequently) refer to the system timezone, but is - architecturally forbidden/discouraged to just treat the system timezone as - the reference timezone. - -For example, the lints may prove valuable if you're revamping a web service -originally targetting the Chinese market (hence producing strings with Chinese -characters all over the place) to be more i18n-aware. Conversely, if you want -to identify some of the i18n-naïve places in an English-only app, the linter -will output nothing. - -## golangci-lint integration - -`gosmopolitan` support [has been merged][gcl-pr] into [`golangci-lint`][gcl-home], -and will be usable out-of-the-box in golangci-lint v1.53.0 or later. - -Due to the opinionated coding style this linter advocates and checks for, if -you have `enable-all: true` in your `golangci.yml` and your project deals a -lot with Chinese text and/or `time.Local`, then you'll get flooded with lints -when you upgrade to golangci-lint v1.53.0. Just disable this linter (and -better yet, move away from `enable-all: true`) if the style does not suit your -specific use case. - -[gcl-pr]: https://github.com/golangci/golangci-lint/pull/3458 -[gcl-home]: https://golangci-lint.run - -## License - -`gosmopolitan` is licensed under the GPL license, version 3 or later. diff --git a/vendor/github.com/xen0n/gosmopolitan/README.zh-Hans.md b/vendor/github.com/xen0n/gosmopolitan/README.zh-Hans.md deleted file mode 100644 index 682d10880..000000000 --- a/vendor/github.com/xen0n/gosmopolitan/README.zh-Hans.md +++ /dev/null @@ -1,69 +0,0 @@ -# gosmopolitan - -![GitHub Workflow Status (main branch)](https://img.shields.io/github/actions/workflow/status/xen0n/gosmopolitan/go.yml?branch=main) -![Codecov](https://img.shields.io/codecov/c/gh/xen0n/gosmopolitan) -![GitHub license info](https://img.shields.io/github/license/xen0n/gosmopolitan) -![GitHub go.mod Go version](https://img.shields.io/github/go-mod/go-version/xen0n/gosmopolitan) -[![Go Report Card](https://goreportcard.com/badge/github.com/xen0n/gosmopolitan)](https://goreportcard.com/report/github.com/xen0n/gosmopolitan) -[![Go Reference](https://pkg.go.dev/badge/github.com/xen0n/gosmopolitan.svg)](https://pkg.go.dev/github.com/xen0n/gosmopolitan) - -[English](./README.md) - -用 `gosmopolitan` 检查你的 Go 代码库里有没有国际化(“i18n“)或者本地化(”l10n“)的阻碍。 - -项目名字来自“cosmopolitan”的文字游戏。 - -## 检查 - -`gosmopolitan` 目前会检查以下的反模式(anti-patterns): - -* 含有来自特定书写系统字符的字符串字面量(string literals)。 - - 项目中存在这种字符串,通常意味着相关的逻辑不便于国际化,或者至少在国际化/本地化适配过程中会涉及特殊对待。 - -* `time.Local` 的使用。 - - 支持国际化的应用或程序库,几乎永远不应以程序当前运行环境的时区来处理时间、日期数据。 - 相反,在这种场景下,开发者应该使用相应的用户偏好,或者按照领域逻辑确定应该使用的时区。 - -注意:除了直接向 `time.Local` 转换之外,还有很多其他写法会产生本地时区的时刻,例如: - -* `time.LoadLocation("Local")` -* 从 `time.Ticker` 收到的值 -* 文档中明确了会返回本地时刻的函数 - * `time.Now()` - * `time.Unix()` - * `time.UnixMilli()` - * `time.UnixMicro()` - -为了正确识别这些使用场景,需要有一个相当完善的数据流分析 pass,目前还没实现。 -此外,当前您还需要自行密切注意从外部传入的时刻值(例如从您使用的 Gin 或 gRPC -之类框架传来的那些),因为这些值当前也没有被正确跟踪。 - -## 注意事项 - -请注意,本库中实现的检查仅适用于具有以下性质的代码库,因此可能不适用于您的具体场景: - -* 项目原先是为使用非拉丁字母书写系统的受众群体开发的, -* 项目会返回包含这些非拉丁字母字符的裸的字符串(即,未经处理或变换的), -* 项目可能偶尔(或者经常)引用程序当前运行环境的系统时区,但项目架构上禁止或不建议把系统时区直接作为业务参考时区使用。 - -举个例子:如果您在翻新一个本来面向中国用户群体(因此到处都在产生含有汉字的字符串)的 -web 服务,以使其更加国际化,这里的 lints 可能会很有价值。 -反之,如果您想在一个仅支持英语的应用里,寻找其中不利于国际化的那部分写法,本 -linter 则什么都不会输出。 - -## 与 golangci-lint 集成 - -`gosmopolitan` 支持[已经被合并][gcl-pr]入 [`golangci-lint`][gcl-home] 上游,在 golangci-lint v1.53.0 及以后的版本可以开箱即用。 - -[gcl-pr]: https://github.com/golangci/golangci-lint/pull/3458 -[gcl-home]: https://golangci-lint.run - -由于本 linter 倡导和检查的代码风格带有鲜明立场,如果您在 `golangci.yml` 开了 -`enable-all: true` 并且您的项目处理很多中文文本或者 `time.Local`,那么您一旦升级到 -golangci-lint v1.53.0 就将被 lints 淹没。如果这种代码风格不适合您的具体使用场景,直接禁用本 linter(或者更彻底一些,不要 `enable-all: true` 了)就好。 - -## 许可证 - -`gosmopolitan` 以 GPL v3 或更新的版本许可使用。 diff --git a/vendor/github.com/xen0n/gosmopolitan/lib.go b/vendor/github.com/xen0n/gosmopolitan/lib.go deleted file mode 100644 index 67b1151c7..000000000 --- a/vendor/github.com/xen0n/gosmopolitan/lib.go +++ /dev/null @@ -1,385 +0,0 @@ -// SPDX-License-Identifier: GPL-3.0-or-later - -package gosmopolitan - -import ( - "fmt" - "go/ast" - "go/token" - "go/types" - "regexp" - "strings" - "unicode" - - "golang.org/x/text/runes" - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/ast/inspector" -) - -const analyzerName = "gosmopolitan" -const analyzerDoc = "Report certain i18n/l10n anti-patterns in your Go codebase" - -type AnalyzerConfig struct { - // LookAtTests is flag controlling whether the lints are going to look at - // test files, despite other config knobs of the Go analysis tooling - // framework telling us otherwise. - // - // By default gosmopolitan does not look at test files, because i18n-aware - // apps most probably have many unmarked strings in test cases, and names - // and descriptions *of* test cases are probably in the program's original - // natural language too. - LookAtTests bool - // EscapeHatches is optionally a list of fully qualified names, in the - // `(full/pkg/path).name` form, to act as "i18n escape hatches". Inside - // call-like expressions to those names, the string literal script check - // is ignored. - // - // With this functionality in place, you can use type aliases like - // `type R = string` as markers, or have explicitly i18n-aware functions - // exempt from the checks. - EscapeHatches []string - // WatchForScripts is optionally a list of Unicode script names to watch - // for any usage in string literals. The range of supported scripts is - // determined by the [unicode.Scripts] map and values are case-sensitive. - WatchForScripts []string - // AllowTimeLocal is flag controlling whether usages of [time.Local] are - // allowed (i.e. not reported). - AllowTimeLocal bool -} - -func NewAnalyzer() *analysis.Analyzer { - var lookAtTests bool - var escapeHatchesStr string - var watchForScriptsStr string - var allowTimeLocal bool - - a := &analysis.Analyzer{ - Name: analyzerName, - Doc: analyzerDoc, - Requires: []*analysis.Analyzer{ - inspect.Analyzer, - }, - Run: func(p *analysis.Pass) (any, error) { - cfg := AnalyzerConfig{ - LookAtTests: lookAtTests, - EscapeHatches: strings.Split(escapeHatchesStr, ","), - WatchForScripts: strings.Split(watchForScriptsStr, ","), - AllowTimeLocal: allowTimeLocal, - } - pctx := processCtx{cfg: &cfg, p: p} - return pctx.run() - }, - RunDespiteErrors: false, - } - - a.Flags.BoolVar(&lookAtTests, - "lookattests", - false, - "also check the test files", - ) - a.Flags.StringVar( - &escapeHatchesStr, - "escapehatches", - "", - "comma-separated list of fully qualified names to act as i18n escape hatches", - ) - a.Flags.StringVar( - &watchForScriptsStr, - "watchforscripts", - "Han", - "comma-separated list of Unicode scripts to watch out for occurrence in string literals", - ) - a.Flags.BoolVar(&allowTimeLocal, - "allowtimelocal", - false, - "allow time.Local usages", - ) - - return a -} - -func NewAnalyzerWithConfig(cfg *AnalyzerConfig) *analysis.Analyzer { - return &analysis.Analyzer{ - Name: analyzerName, - Doc: analyzerDoc, - Requires: []*analysis.Analyzer{ - inspect.Analyzer, - }, - Run: func(p *analysis.Pass) (any, error) { - pctx := processCtx{cfg: cfg, p: p} - return pctx.run() - }, - RunDespiteErrors: false, - } -} - -var DefaultAnalyzer = NewAnalyzer() - -func validateUnicodeScriptName(name string) error { - if _, ok := unicode.Scripts[name]; !ok { - return fmt.Errorf("invalid Unicode script name: %s", name) - } - return nil -} - -// example input: ["Han", "Arabic"] -// example output: `\p{Han}|\p{Arabic}` -// assumes len(scriptNames) > 0 -func makeUnicodeScriptMatcherRegexpString(scriptNames []string) string { - var sb strings.Builder - for i, s := range scriptNames { - if i > 0 { - sb.WriteRune('|') - } - sb.WriteString(`\p{`) - sb.WriteString(s) - sb.WriteRune('}') - } - return sb.String() -} - -func makeUnicodeScriptMatcherRegexp(scriptNames []string) (*regexp.Regexp, error) { - return regexp.Compile(makeUnicodeScriptMatcherRegexpString(scriptNames)) -} - -type processCtx struct { - cfg *AnalyzerConfig - p *analysis.Pass -} - -func mapSlice[T any, U any](x []T, fn func(T) U) []U { - if x == nil { - return nil - } - y := make([]U, len(x)) - for i, v := range x { - y[i] = fn(v) - } - return y -} - -func sliceToSet[T comparable](x []T) map[T]struct{} { - // lo.SliceToMap(x, func(k T) (T, struct{}) { return k, struct{}{} }) - y := make(map[T]struct{}, len(x)) - for _, k := range x { - y[k] = struct{}{} - } - return y -} - -func getFullyQualifiedName(x types.Object) string { - pkg := x.Pkg() - if pkg == nil { - return x.Name() - } - return fmt.Sprintf("%s.%s", pkg.Path(), x.Name()) -} - -// if input is in the "(%s).%s" form, remove the parens, else return the -// unchanged input -// -// this is for maintaining compatibility with the previous FQN notation that -// was born out of my confusion (the previous notation, while commonly seen, -// seems to be only for methods or pointer receiver types; the parens-less -// form is in fact unambiguous, because Go identifiers can't contain periods.) -func unquoteInputFQN(x string) string { - if len(x) == 0 || x[0] != '(' { - return x - } - - before, after, found := strings.Cut(x[1:], ")") - if !found { - // malformed input: string in "(xxxxx" form with unclosed parens! - // in this case, only removing the opening parens might be better than - // doing nothing after all - return x[1:] - } - - // at this point, - // input: "(foo).bar" - // before: "foo" - // after: ".bar" - return before + after -} - -func (c *processCtx) run() (any, error) { - escapeHatchesSet := sliceToSet(mapSlice(c.cfg.EscapeHatches, unquoteInputFQN)) - - if len(c.cfg.WatchForScripts) == 0 { - c.cfg.WatchForScripts = []string{"Han"} - } - - for _, s := range c.cfg.WatchForScripts { - if err := validateUnicodeScriptName(s); err != nil { - return nil, err - } - } - - charRE, err := makeUnicodeScriptMatcherRegexp(c.cfg.WatchForScripts) - if err != nil { - return nil, err - } - - usq := newUnicodeScriptQuerier(c.cfg.WatchForScripts) - - insp := c.p.ResultOf[inspect.Analyzer].(*inspector.Inspector) - - // support ignoring the test files, because test files could be full of - // i18n and l10n fixtures, and we want to focus on the actual run-time - // logic - // - // TODO: is there a way to both ignore test files earlier, and make use of - // inspect.Analyzer's cached results? currently Inspector doesn't provide - // a way to selectively travese some files' AST but not others. - isBelongingToTestFiles := func(n ast.Node) bool { - return strings.HasSuffix(c.p.Fset.File(n.Pos()).Name(), "_test.go") - } - - shouldSkipTheContainingFile := func(n ast.Node) bool { - if c.cfg.LookAtTests { - return false - } - return isBelongingToTestFiles(n) - } - - insp.Nodes(nil, func(n ast.Node, push bool) bool { - // we only need to look at each node once - if !push { - return false - } - - if shouldSkipTheContainingFile(n) { - return false - } - - // skip blocks that can contain string literals but are not otherwise - // interesting for us - switch n.(type) { - case *ast.ImportSpec, *ast.TypeSpec: - // import blocks, type declarations - return false - } - - // and don't look inside escape hatches - referentFQN := c.getFullyQualifiedNameOfReferent(n) - if referentFQN != "" { - _, isEscapeHatch := escapeHatchesSet[referentFQN] - // if isEscapeHatch: don't recurse (false) - return !isEscapeHatch - } - - // check only string literals - lit, ok := n.(*ast.BasicLit) - if !ok { - return true - } - if lit.Kind != token.STRING { - return true - } - - // report string literals containing characters of given script (in - // the sense of "writing system") - if charRE.MatchString(lit.Value) { - match := charRE.FindIndex([]byte(lit.Value)) - matchCh := []byte(lit.Value)[match[0]:match[1]] - scriptName := usq.queryScriptForRuneBytes(matchCh) - - c.p.Report(analysis.Diagnostic{ - Pos: lit.Pos() + token.Pos(match[0]), - End: lit.Pos() + token.Pos(match[1]), - Message: fmt.Sprintf("string literal contains rune in %s script", scriptName), - }) - } - - return true - }) - - if !c.cfg.AllowTimeLocal { - // check time.Local usages - insp.Nodes([]ast.Node{(*ast.Ident)(nil)}, func(n ast.Node, push bool) bool { - // we only need to look at each node once - if !push { - return false - } - - if shouldSkipTheContainingFile(n) { - return false - } - - ident := n.(*ast.Ident) - - d := c.p.TypesInfo.ObjectOf(ident) - if d == nil || d.Pkg() == nil { - return true - } - - if d.Pkg().Path() == "time" && d.Name() == "Local" { - c.p.Report(analysis.Diagnostic{ - Pos: n.Pos(), - End: n.End(), - Message: "usage of time.Local", - }) - } - - return true - }) - } - - return nil, nil -} - -func (c *processCtx) getFullyQualifiedNameOfReferent(n ast.Node) string { - var ident *ast.Ident - switch e := n.(type) { - case *ast.CallExpr: - ident = getIdentOfTypeOfExpr(e.Fun) - - case *ast.CompositeLit: - ident = getIdentOfTypeOfExpr(e.Type) - - default: - return "" - } - - referent := c.p.TypesInfo.Uses[ident] - if referent == nil { - return "" - } - - return getFullyQualifiedName(referent) -} - -func getIdentOfTypeOfExpr(e ast.Expr) *ast.Ident { - switch x := e.(type) { - case *ast.Ident: - return x - case *ast.SelectorExpr: - return x.Sel - } - return nil -} - -type unicodeScriptQuerier struct { - sets map[string]runes.Set -} - -func newUnicodeScriptQuerier(scriptNames []string) *unicodeScriptQuerier { - sets := make(map[string]runes.Set, len(scriptNames)) - for _, s := range scriptNames { - sets[s] = runes.In(unicode.Scripts[s]) - } - return &unicodeScriptQuerier{ - sets: sets, - } -} - -func (x *unicodeScriptQuerier) queryScriptForRuneBytes(b []byte) string { - r := []rune(string(b))[0] - for s, set := range x.sets { - if set.Contains(r) { - return s - } - } - return "" -} diff --git a/vendor/github.com/yagipy/maintidx/.gitignore b/vendor/github.com/yagipy/maintidx/.gitignore deleted file mode 100644 index a676215fa..000000000 --- a/vendor/github.com/yagipy/maintidx/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -.idea -bin diff --git a/vendor/github.com/yagipy/maintidx/LICENSE b/vendor/github.com/yagipy/maintidx/LICENSE deleted file mode 100644 index b94c2ede8..000000000 --- a/vendor/github.com/yagipy/maintidx/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2021 Hiroyuki Yagihashi - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/vendor/github.com/yagipy/maintidx/Makefile b/vendor/github.com/yagipy/maintidx/Makefile deleted file mode 100644 index 14b8fc979..000000000 --- a/vendor/github.com/yagipy/maintidx/Makefile +++ /dev/null @@ -1,2 +0,0 @@ -build: - go build -o bin/maintidx ./cmd/maintidx diff --git a/vendor/github.com/yagipy/maintidx/README.md b/vendor/github.com/yagipy/maintidx/README.md deleted file mode 100644 index 8d5e26df0..000000000 --- a/vendor/github.com/yagipy/maintidx/README.md +++ /dev/null @@ -1,45 +0,0 @@ -# maintidx -`maintidx` measures the maintainability index of each function. -https://docs.microsoft.com/en-us/visualstudio/code-quality/code-metrics-maintainability-index-range-and-meaning - -## Installation -### Go version < 1.16 -```shell -go get -u github.com/yagipy/maintidx/cmd/maintidx -``` - -### Go version 1.16+ -```shell -go install github.com/yagipy/maintidx/cmd/maintidx -``` - -## Usage -### standalone -```shell -maintidx ./... -``` - -### with go run -No installation required -```shell -go run github.com/yagipy/maintidx/cmd/maintidx ./... -``` - -### with go vet -```shell -go vet -vettool=`which maintidx` ./... -``` - -## Flag -```shell -Flags: - -under int - show functions with maintainability index < N only. (default 20) -``` - -## TODO -- [ ] Setup execute env on container -- [ ] Impl cyc.Cyc.Calc() -- [ ] Move maintidx.Visitor.PrintHalstVol to halstval package -- [ ] Consider the necessity of halstvol.incrIfAllTrue -- [ ] Test under pkg file diff --git a/vendor/github.com/yagipy/maintidx/maintidx.go b/vendor/github.com/yagipy/maintidx/maintidx.go deleted file mode 100644 index 31ad9ca0c..000000000 --- a/vendor/github.com/yagipy/maintidx/maintidx.go +++ /dev/null @@ -1,63 +0,0 @@ -package maintidx - -import ( - "go/ast" - "go/token" - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/ast/inspector" -) - -const doc = "maintidx measures the maintainability index of each function." - -var Analyzer = &analysis.Analyzer{ - Name: "maintidx", - Doc: doc, - Run: run, - Requires: []*analysis.Analyzer{ - inspect.Analyzer, - }, -} - -var under int - -func init() { - Analyzer.Flags.IntVar(&under, "under", 20, "show functions with maintainability index < N only.") -} - -func run(pass *analysis.Pass) (interface{}, error) { - i := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - - nodeFilter := []ast.Node{ - (*ast.FuncDecl)(nil), - } - - i.Preorder(nodeFilter, func(n ast.Node) { - switch n := n.(type) { - case *ast.FuncDecl: - v := analyze(n) - - v.Coef.Cyc.Calc() - v.Coef.HalstVol.Calc() - v.calc(loc(pass.Fset, n)) - if v.MaintIdx < under { - pass.Reportf(n.Pos(), "Function name: %v, Cyclomatic Complexity: %v, Halstead Volume: %0.2f, Maintainability Index: %v", n.Name, v.Coef.Cyc.Val, v.Coef.HalstVol.Val, v.MaintIdx) - } - } - }) - - return nil, nil -} - -func analyze(n ast.Node) Visitor { - v := NewVisitor() - ast.Walk(v, n) - return *v -} - -func loc(fs *token.FileSet, n *ast.FuncDecl) int { - f := fs.File(n.Pos()) - startLine := f.Line(n.Pos()) - endLine := f.Line(n.End()) - return endLine - startLine + 1 -} diff --git a/vendor/github.com/yagipy/maintidx/pkg/cyc/cyc.go b/vendor/github.com/yagipy/maintidx/pkg/cyc/cyc.go deleted file mode 100644 index 9ea009106..000000000 --- a/vendor/github.com/yagipy/maintidx/pkg/cyc/cyc.go +++ /dev/null @@ -1,36 +0,0 @@ -package cyc - -import ( - "go/ast" - "go/token" -) - -type Cyc struct { - Val int - Coef Coef -} - -type Coef struct{} - -func (c *Cyc) Analyze(n ast.Node) { - switch n := n.(type) { - case *ast.IfStmt, *ast.ForStmt, *ast.RangeStmt: - c.Val++ - case *ast.CaseClause: - if n.List != nil { - c.Val++ - } - case *ast.CommClause: - if n.Comm != nil { - c.Val++ - } - case *ast.BinaryExpr: - if n.Op == token.LAND || n.Op == token.LOR { - c.Val++ - } - } -} - -// TODO: Implement -func (c *Cyc) Calc() { -} diff --git a/vendor/github.com/yagipy/maintidx/pkg/halstvol/halstvol.go b/vendor/github.com/yagipy/maintidx/pkg/halstvol/halstvol.go deleted file mode 100644 index f0212759b..000000000 --- a/vendor/github.com/yagipy/maintidx/pkg/halstvol/halstvol.go +++ /dev/null @@ -1,71 +0,0 @@ -package halstvol - -import ( - "go/ast" - "math" -) - -type HalstVol struct { - Val float64 - Coef Coef -} - -type Coef struct { - Opt map[string]int - Opd map[string]int -} - -func (v *HalstVol) Analyze(n ast.Node) { - switch n := n.(type) { - case *ast.FuncDecl, *ast.GenDecl: - v.handleDecl(n) - case *ast.ParenExpr, *ast.IndexExpr, *ast.SliceExpr, *ast.TypeAssertExpr, *ast.CallExpr, *ast.StarExpr, *ast.UnaryExpr, *ast.BinaryExpr, *ast.KeyValueExpr: - v.handleExpr(n) - case *ast.BasicLit, *ast.CompositeLit: - v.handleLit(n) - case *ast.Ident: - v.handleIdent(n) - case *ast.Ellipsis: - incrIfAllTrue(v.Coef.Opt, "...", []bool{n.Ellipsis.IsValid()}) - case *ast.FuncType: - incrIfAllTrue(v.Coef.Opt, "func", []bool{n.Func.IsValid()}) - v.Coef.Opt["()"]++ - case *ast.ChanType: - incrIfAllTrue(v.Coef.Opt, "chan", []bool{n.Begin.IsValid()}) - incrIfAllTrue(v.Coef.Opt, "<-", []bool{n.Arrow.IsValid()}) - case *ast.SendStmt, *ast.IncDecStmt, *ast.AssignStmt, *ast.GoStmt, *ast.DeferStmt, *ast.ReturnStmt, *ast.BranchStmt, *ast.BlockStmt, *ast.IfStmt, *ast.SwitchStmt, *ast.SelectStmt, *ast.ForStmt, *ast.RangeStmt: - v.handleStmt(n) - case *ast.CaseClause: - v.handleCaseClause(n) - } -} - -func (v *HalstVol) Calc() { - distOpt := len(v.Coef.Opt) - distOpd := len(v.Coef.Opd) - - var sumOpt, sumOpd int - - for _, val := range v.Coef.Opt { - sumOpt += val - } - - for _, val := range v.Coef.Opd { - sumOpd += val - } - - vocab := distOpt + distOpd - length := sumOpt + sumOpd - - v.Val = float64(length) * math.Log2(float64(vocab)) -} - -// TODO: Consider the necessity -func incrIfAllTrue(coef map[string]int, sym string, cond []bool) { - for _, ok := range cond { - if !ok { - return - } - } - coef[sym]++ -} diff --git a/vendor/github.com/yagipy/maintidx/pkg/halstvol/handle.go b/vendor/github.com/yagipy/maintidx/pkg/halstvol/handle.go deleted file mode 100644 index 9f5e33500..000000000 --- a/vendor/github.com/yagipy/maintidx/pkg/halstvol/handle.go +++ /dev/null @@ -1,151 +0,0 @@ -package halstvol - -import "go/ast" - -func (v *HalstVol) handleDecl(decl ast.Node) { - switch n := decl.(type) { - case *ast.FuncDecl: - if n.Recv == nil { - // In the case of receiver functions, the function name is incremented in *ast.Ident - v.Coef.Opt[n.Name.Name]++ - } else { - v.Coef.Opt["()"]++ - } - case *ast.GenDecl: - if n.Lparen.IsValid() && n.Rparen.IsValid() { - v.Coef.Opt["()"]++ - } - - if n.Tok.IsOperator() { - v.Coef.Opt[n.Tok.String()]++ - } else { - v.Coef.Opd[n.Tok.String()]++ - } - } -} - -func (v *HalstVol) handleIdent(ident *ast.Ident) { - if ident.Obj == nil { - v.Coef.Opt[ident.Name]++ - } else { - if ident.Obj.Kind.String() != "func" { - v.Coef.Opd[ident.Name]++ - } - } -} - -func (v *HalstVol) handleLit(lit ast.Node) { - switch n := lit.(type) { - case *ast.BasicLit: - if n.Kind.IsLiteral() { - v.Coef.Opd[n.Value]++ - } else { - v.Coef.Opt[n.Value]++ - } - case *ast.CompositeLit: - incrIfAllTrue(v.Coef.Opt, "{}", []bool{n.Lbrace.IsValid(), n.Rbrace.IsValid()}) - } -} - -func (v *HalstVol) handleExpr(expr ast.Node) { - switch n := expr.(type) { - case *ast.ParenExpr: - incrIfAllTrue(v.Coef.Opt, "()", []bool{n.Lparen.IsValid(), n.Rparen.IsValid()}) - case *ast.IndexExpr: - incrIfAllTrue(v.Coef.Opt, "{}", []bool{n.Lbrack.IsValid(), n.Rbrack.IsValid()}) - case *ast.SliceExpr: - incrIfAllTrue(v.Coef.Opt, "[]", []bool{n.Lbrack.IsValid(), n.Rbrack.IsValid()}) - case *ast.TypeAssertExpr: - incrIfAllTrue(v.Coef.Opt, "()", []bool{n.Lparen.IsValid(), n.Rparen.IsValid()}) - case *ast.CallExpr: - incrIfAllTrue(v.Coef.Opt, "()", []bool{n.Lparen.IsValid(), n.Rparen.IsValid()}) - incrIfAllTrue(v.Coef.Opt, "...", []bool{n.Ellipsis != 0}) - case *ast.StarExpr: - incrIfAllTrue(v.Coef.Opt, "*", []bool{n.Star.IsValid()}) - case *ast.UnaryExpr: - if n.Op.IsOperator() { - v.Coef.Opt[n.Op.String()]++ - } else { - v.Coef.Opd[n.Op.String()]++ - } - case *ast.BinaryExpr: - v.Coef.Opt[n.Op.String()]++ - case *ast.KeyValueExpr: - incrIfAllTrue(v.Coef.Opt, ":", []bool{n.Colon.IsValid()}) - } -} - -func (v *HalstVol) handleStmt(stmt ast.Node) { - switch n := stmt.(type) { - case *ast.SendStmt: - incrIfAllTrue(v.Coef.Opt, "<-", []bool{n.Arrow.IsValid()}) - case *ast.IncDecStmt: - incrIfAllTrue(v.Coef.Opt, n.Tok.String(), []bool{n.Tok.IsOperator()}) - case *ast.AssignStmt: - if n.Tok.IsOperator() { - v.Coef.Opt[n.Tok.String()]++ - } - case *ast.GoStmt: - if n.Go.IsValid() { - v.Coef.Opt["go"]++ - } - case *ast.DeferStmt: - if n.Defer.IsValid() { - v.Coef.Opt["defer"]++ - } - case *ast.ReturnStmt: - if n.Return.IsValid() { - v.Coef.Opt["return"]++ - } - case *ast.BranchStmt: - if n.Tok.IsOperator() { - v.Coef.Opt[n.Tok.String()]++ - } else { - v.Coef.Opd[n.Tok.String()]++ - } - case *ast.BlockStmt: - if n.Lbrace.IsValid() && n.Rbrace.IsValid() { - v.Coef.Opt["{}"]++ - } - case *ast.IfStmt: - if n.If.IsValid() { - v.Coef.Opt["if"]++ - } - if n.Else != nil { - v.Coef.Opt["else"]++ - } - case *ast.SwitchStmt: - if n.Switch.IsValid() { - v.Coef.Opt["switch"]++ - } - case *ast.SelectStmt: - if n.Select.IsValid() { - v.Coef.Opt["select"]++ - } - case *ast.ForStmt: - if n.For.IsValid() { - v.Coef.Opt["for"]++ - } - case *ast.RangeStmt: - if n.For.IsValid() { - v.Coef.Opt["for"]++ - } - if n.Key != nil { - if n.Tok.IsOperator() { - v.Coef.Opt[n.Tok.String()]++ - } else { - v.Coef.Opd[n.Tok.String()]++ - } - } - v.Coef.Opt["range"]++ - } -} - -func (v *HalstVol) handleCaseClause(cc *ast.CaseClause) { - if cc.List == nil { - v.Coef.Opt["default"]++ - } - if cc.Colon.IsValid() { - v.Coef.Opt[":"]++ - } -} diff --git a/vendor/github.com/yagipy/maintidx/visitor.go b/vendor/github.com/yagipy/maintidx/visitor.go deleted file mode 100644 index e6f74c50d..000000000 --- a/vendor/github.com/yagipy/maintidx/visitor.go +++ /dev/null @@ -1,77 +0,0 @@ -package maintidx - -import ( - "github.com/yagipy/maintidx/pkg/cyc" - "github.com/yagipy/maintidx/pkg/halstvol" - "go/ast" - "math" - "sort" -) - -type Visitor struct { - MaintIdx int - Coef Coef -} - -var _ ast.Visitor = &Visitor{} - -type Coef struct { - Cyc cyc.Cyc - HalstVol halstvol.HalstVol -} - -func NewVisitor() *Visitor { - return &Visitor{ - MaintIdx: 0, - Coef: Coef{ - Cyc: cyc.Cyc{ - Val: 1, - Coef: cyc.Coef{}, - }, - HalstVol: halstvol.HalstVol{ - Val: 0.0, - Coef: halstvol.Coef{ - Opt: map[string]int{}, - Opd: map[string]int{}, - }, - }, - }, - } -} - -func (v *Visitor) Visit(n ast.Node) ast.Visitor { - v.Coef.Cyc.Analyze(n) - v.Coef.HalstVol.Analyze(n) - return v -} - -// Calc https://docs.microsoft.com/ja-jp/archive/blogs/codeanalysis/maintainability-index-range-and-meaning -func (v *Visitor) calc(loc int) { - origVal := 171.0 - 5.2*math.Log(v.Coef.HalstVol.Val) - 0.23*float64(v.Coef.Cyc.Val) - 16.2*math.Log(float64(loc)) - normVal := int(math.Max(0.0, origVal*100.0/171.0)) - v.MaintIdx = normVal -} - -// TODO: Move halstvol package -func (v *Visitor) printHalstVol() { - sortedOpt := make([]string, len(v.Coef.HalstVol.Coef.Opt)) - sortedOpd := make([]string, len(v.Coef.HalstVol.Coef.Opd)) - optIndex := 0 - opdIndex := 0 - for key := range v.Coef.HalstVol.Coef.Opt { - sortedOpt[optIndex] = key - optIndex++ - } - for key := range v.Coef.HalstVol.Coef.Opd { - sortedOpd[opdIndex] = key - opdIndex++ - } - sort.Strings(sortedOpt) - sort.Strings(sortedOpd) - for _, val := range sortedOpt { - println("operators", val, v.Coef.HalstVol.Coef.Opt[val]) - } - for _, val := range sortedOpd { - println("operands", val, v.Coef.HalstVol.Coef.Opd[val]) - } -} diff --git a/vendor/github.com/yeya24/promlinter/.gitignore b/vendor/github.com/yeya24/promlinter/.gitignore deleted file mode 100644 index bffb9a029..000000000 --- a/vendor/github.com/yeya24/promlinter/.gitignore +++ /dev/null @@ -1,20 +0,0 @@ -# Binaries for programs and plugins -*.exe -*.exe~ -*.dll -*.so -*.dylib - -# Test binary, built with `go test -c` -*.test - -# binary -bin - -# Output of the go coverage tool, specifically when used with LiteIDE -*.out - -# Dependency directories (remove the comment below to include it) -# vendor/ - -.idea diff --git a/vendor/github.com/yeya24/promlinter/LICENSE b/vendor/github.com/yeya24/promlinter/LICENSE deleted file mode 100644 index 261eeb9e9..000000000 --- a/vendor/github.com/yeya24/promlinter/LICENSE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/vendor/github.com/yeya24/promlinter/Makefile b/vendor/github.com/yeya24/promlinter/Makefile deleted file mode 100644 index d50db1ee7..000000000 --- a/vendor/github.com/yeya24/promlinter/Makefile +++ /dev/null @@ -1,39 +0,0 @@ -GOOS := $(if $(GOOS),$(GOOS),linux) -GOARCH := $(if $(GOARCH),$(GOARCH),amd64) -GO=CGO_ENABLED=0 GOOS=$(GOOS) GOARCH=$(GOARCH) GO111MODULE=on go -GOVERSION = $(shell $(GO) version | cut -c 14- | cut -d' ' -f1) - -GIT_COMMIT = $(shell git rev-parse --short HEAD) - -BUILDFLAGS ?= - -ifeq ($(shell expr ${GOVERSION} \>= 1.14), 1) - BUILDFLAGS += -mod=mod -endif - -PACKAGE_LIST := go list ./... -PACKAGES := $$($(PACKAGE_LIST)) -FILES_TO_FMT := $(shell find . -path -prune -o -name '*.go' -print) - -all: format build test - -format: vet fmt - -fmt: - @echo "gofmt" - @gofmt -w ${FILES_TO_FMT} - @git diff --exit-code . - -build: mod - $(GO) build ${BUILDFLAGS} -o ./bin/promlinter cmd/promlinter/main.go - -vet: - $(GO) vet ${BUILDFLAGS} ./... - -mod: - @echo "go mod tidy" - $(GO) mod tidy - @git diff --exit-code -- go.mod - -test: - $(GO) test ${BUILDFLAGS} ./... -cover $(PACKAGES) diff --git a/vendor/github.com/yeya24/promlinter/README.md b/vendor/github.com/yeya24/promlinter/README.md deleted file mode 100644 index 4fbffaa6b..000000000 --- a/vendor/github.com/yeya24/promlinter/README.md +++ /dev/null @@ -1,80 +0,0 @@ -# promlinter - -A linter for checking Prometheus metrics name via promlint. - -![usage](assets/promlinter.gif) - -## Installation - -### Go Get - -go get github.com/yeya24/promlinter/cmd/promlinter - -### Download from release - -Please go to https://github.com/yeya24/promlinter/releases. - -### Build from source - -#### Requirements - -- Go >= 1.13 -- make - -``` bash -git clone https://github.com/yeya24/promlinter.git -make build -``` - -Then you can find the `promlinter` binary file in the `./bin` directory. - -## Usage - -``` bash -usage: promlinter [] [ ...] - -Prometheus metrics linter for Go code. - -This tool can cover most of the patterns of metrics naming issues, but it cannot detect metric values that can only be determined in the runtime. - -By default it doesn't output parsing failures, if you want to see them, you can add --strict flag to enable it. - -It is also supported to disable the lint functions using repeated flag --disable. Current supported functions are: - - [Help]: Help detects issues related to the help text for a metric. - - [MetricUnits]: MetricUnits detects issues with metric unit names. - - [Counter]: Counter detects issues specific to counters, as well as patterns that should only be used with counters. - - [HistogramSummaryReserved]: HistogramSummaryReserved detects when other types of metrics use names or labels reserved for use by histograms and/or summaries. - - [MetricTypeInName]: MetricTypeInName detects when metric types are included in the metric name. - - [ReservedChars]: ReservedChars detects colons in metric names. - - [CamelCase]: CamelCase detects metric names and label names written in camelCase. - - [UnitAbbreviations]: UnitAbbreviations detects abbreviated units in the metric name. - -Flags: - -h, --help Show context-sensitive help (also try --help-long and --help-man). - --version Show application version. - -Commands: - help [...] - Show help. - - list [] [...] - List metrics name. - - lint [] [...] - Lint metrics via promlint. - -``` - -## Run tests - -``` bash -make test -``` diff --git a/vendor/github.com/yeya24/promlinter/promlinter.go b/vendor/github.com/yeya24/promlinter/promlinter.go deleted file mode 100644 index 2ed4d60a8..000000000 --- a/vendor/github.com/yeya24/promlinter/promlinter.go +++ /dev/null @@ -1,668 +0,0 @@ -package promlinter - -import ( - "fmt" - "go/ast" - "go/token" - "sort" - "strconv" - "strings" - - "github.com/prometheus/client_golang/prometheus" - "github.com/prometheus/client_golang/prometheus/testutil/promlint" - dto "github.com/prometheus/client_model/go" -) - -var ( - metricsType map[string]dto.MetricType - constMetricArgNum map[string]int - validOptsFields map[string]bool - lintFuncText map[string][]string - LintFuncNames []string -) - -func init() { - metricsType = map[string]dto.MetricType{ - "Counter": dto.MetricType_COUNTER, - "NewCounter": dto.MetricType_COUNTER, - "NewCounterVec": dto.MetricType_COUNTER, - "Gauge": dto.MetricType_GAUGE, - "NewGauge": dto.MetricType_GAUGE, - "NewGaugeVec": dto.MetricType_GAUGE, - "NewHistogram": dto.MetricType_HISTOGRAM, - "NewHistogramVec": dto.MetricType_HISTOGRAM, - "NewSummary": dto.MetricType_SUMMARY, - "NewSummaryVec": dto.MetricType_SUMMARY, - } - - constMetricArgNum = map[string]int{ - "MustNewConstMetric": 3, - "MustNewHistogram": 4, - "MustNewSummary": 4, - "NewLazyConstMetric": 3, - } - - // Doesn't contain ConstLabels since we don't need this field here. - validOptsFields = map[string]bool{ - "Name": true, - "Namespace": true, - "Subsystem": true, - "Help": true, - } - - lintFuncText = map[string][]string{ - "Help": {"no help text"}, - "MetricUnits": {"use base unit"}, - "Counter": {"counter metrics should"}, - "HistogramSummaryReserved": {"non-histogram", "non-summary"}, - "MetricTypeInName": {"metric name should not include type"}, - "ReservedChars": {"metric names should not contain ':'"}, - "CamelCase": {"'snake_case' not 'camelCase'"}, - "lintUnitAbbreviations": {"metric names should not contain abbreviated units"}, - } - - LintFuncNames = []string{"Help", "MetricUnits", "Counter", "HistogramSummaryReserved", - "MetricTypeInName", "ReservedChars", "CamelCase", "lintUnitAbbreviations"} -} - -type Setting struct { - Strict bool - DisabledLintFuncs []string -} - -// Issue contains metric name, error text and metric position. -type Issue struct { - Text string - Metric string - Pos token.Position -} - -type MetricFamilyWithPos struct { - MetricFamily *dto.MetricFamily - Pos token.Position -} - -type visitor struct { - fs *token.FileSet - metrics []MetricFamilyWithPos - issues []Issue - strict bool -} - -type opt struct { - namespace string - subsystem string - name string -} - -func RunList(fs *token.FileSet, files []*ast.File, strict bool) []MetricFamilyWithPos { - v := &visitor{ - fs: fs, - metrics: make([]MetricFamilyWithPos, 0), - issues: make([]Issue, 0), - strict: strict, - } - - for _, file := range files { - ast.Walk(v, file) - } - - sort.Slice(v.metrics, func(i, j int) bool { - return v.metrics[i].Pos.String() < v.metrics[j].Pos.String() - }) - return v.metrics -} - -func RunLint(fs *token.FileSet, files []*ast.File, s Setting) []Issue { - v := &visitor{ - fs: fs, - metrics: make([]MetricFamilyWithPos, 0), - issues: make([]Issue, 0), - strict: s.Strict, - } - - for _, file := range files { - ast.Walk(v, file) - } - - // lint metrics - for _, mfp := range v.metrics { - problems, err := promlint.NewWithMetricFamilies([]*dto.MetricFamily{mfp.MetricFamily}).Lint() - if err != nil { - panic(err) - } - - for _, p := range problems { - for _, disabledFunc := range s.DisabledLintFuncs { - for _, pattern := range lintFuncText[disabledFunc] { - if strings.Contains(p.Text, pattern) { - goto END - } - } - } - - v.issues = append(v.issues, Issue{ - Pos: mfp.Pos, - Metric: p.Metric, - Text: p.Text, - }) - - END: - } - } - - sort.Slice(v.issues, func(i, j int) bool { - return v.issues[i].Pos.String() < v.issues[j].Pos.String() - }) - return v.issues -} - -func (v *visitor) Visit(n ast.Node) ast.Visitor { - if n == nil { - return v - } - - switch t := n.(type) { - case *ast.CallExpr: - return v.parseCallerExpr(t) - - case *ast.SendStmt: - return v.parseSendMetricChanExpr(t) - } - - return v -} - -func (v *visitor) parseCallerExpr(call *ast.CallExpr) ast.Visitor { - var ( - metricType dto.MetricType - methodName string - ok bool - ) - - switch stmt := call.Fun.(type) { - - /* - That's the case of setting alias . to client_golang/prometheus or promauto package. - - import . "github.com/prometheus/client_golang/prometheus" - metric := NewCounter(CounterOpts{}) - */ - case *ast.Ident: - if stmt.Name == "NewCounterFunc" { - return v.parseOpts(call.Args[0], dto.MetricType_COUNTER) - } - - if stmt.Name == "NewGaugeFunc" { - return v.parseOpts(call.Args[0], dto.MetricType_GAUGE) - } - - if metricType, ok = metricsType[stmt.Name]; !ok { - return v - } - methodName = stmt.Name - - /* - This case covers the most of cases to initialize metrics. - - prometheus.NewCounter(CounterOpts{}) - - promauto.With(nil).NewCounter(CounterOpts{}) - - factory := promauto.With(nil) - factory.NewCounter(CounterOpts{}) - - prometheus.NewCounterFunc() - */ - case *ast.SelectorExpr: - if stmt.Sel.Name == "NewCounterFunc" { - return v.parseOpts(call.Args[0], dto.MetricType_COUNTER) - } - - if stmt.Sel.Name == "NewGaugeFunc" { - return v.parseOpts(call.Args[0], dto.MetricType_GAUGE) - } - - if stmt.Sel.Name == "NewFamilyGenerator" && len(call.Args) == 5 { - return v.parseKSMMetrics(call.Args[0], call.Args[1], call.Args[2]) - } - - if metricType, ok = metricsType[stmt.Sel.Name]; !ok { - return v - } - methodName = stmt.Sel.Name - - default: - return v - } - - argNum := 1 - if strings.HasSuffix(methodName, "Vec") { - argNum = 2 - } - // The methods used to initialize metrics should have at least one arg. - if len(call.Args) < argNum && v.strict { - v.issues = append(v.issues, Issue{ - Pos: v.fs.Position(call.Pos()), - Metric: "", - Text: fmt.Sprintf("%s should have at least %d arguments", methodName, argNum), - }) - return v - } - - if len(call.Args) == 0 { - return v - } - - return v.parseOpts(call.Args[0], metricType) -} - -func (v *visitor) parseOpts(optArg ast.Node, metricType dto.MetricType) ast.Visitor { - // position for the first arg of the CallExpr - optsPosition := v.fs.Position(optArg.Pos()) - opts, help := v.parseOptsExpr(optArg) - if opts == nil { - return v - } - currentMetric := dto.MetricFamily{ - Type: &metricType, - Help: help, - } - - metricName := prometheus.BuildFQName(opts.namespace, opts.subsystem, opts.name) - // We skip the invalid metric if the name is an empty string. - // This kind of metric declaration might be used as a stud metric - // https://github.com/thanos-io/thanos/blob/main/cmd/thanos/tools_bucket.go#L538. - if metricName == "" { - return v - } - currentMetric.Name = &metricName - - v.metrics = append(v.metrics, MetricFamilyWithPos{MetricFamily: ¤tMetric, Pos: optsPosition}) - return v -} - -// Parser for kube-state-metrics generators. -func (v *visitor) parseKSMMetrics(nameArg ast.Node, helpArg ast.Node, metricTypeArg ast.Node) ast.Visitor { - optsPosition := v.fs.Position(nameArg.Pos()) - currentMetric := dto.MetricFamily{} - name, ok := v.parseValue("name", nameArg) - if !ok { - return v - } - currentMetric.Name = &name - - help, ok := v.parseValue("help", helpArg) - if !ok { - return v - } - currentMetric.Help = &help - - switch stmt := metricTypeArg.(type) { - case *ast.SelectorExpr: - if metricType, ok := metricsType[stmt.Sel.Name]; !ok { - return v - } else { - currentMetric.Type = &metricType - } - } - - v.metrics = append(v.metrics, MetricFamilyWithPos{MetricFamily: ¤tMetric, Pos: optsPosition}) - return v -} - -func (v *visitor) parseSendMetricChanExpr(chExpr *ast.SendStmt) ast.Visitor { - var ( - ok bool - requiredArgNum int - methodName string - metricType dto.MetricType - ) - - call, ok := chExpr.Value.(*ast.CallExpr) - if !ok { - return v - } - - switch stmt := call.Fun.(type) { - case *ast.Ident: - if requiredArgNum, ok = constMetricArgNum[stmt.Name]; !ok { - return v - } - methodName = stmt.Name - - case *ast.SelectorExpr: - if requiredArgNum, ok = constMetricArgNum[stmt.Sel.Name]; !ok { - return v - } - methodName = stmt.Sel.Name - } - - if len(call.Args) < requiredArgNum && v.strict { - v.issues = append(v.issues, Issue{ - Metric: "", - Pos: v.fs.Position(call.Pos()), - Text: fmt.Sprintf("%s should have at least %d arguments", methodName, requiredArgNum), - }) - return v - } - - name, help := v.parseConstMetricOptsExpr(call.Args[0]) - if name == nil { - return v - } - - metric := &dto.MetricFamily{ - Name: name, - Help: help, - } - switch methodName { - case "MustNewConstMetric", "NewLazyConstMetric": - switch t := call.Args[1].(type) { - case *ast.Ident: - metric.Type = getConstMetricType(t.Name) - case *ast.SelectorExpr: - metric.Type = getConstMetricType(t.Sel.Name) - } - - case "MustNewHistogram": - metricType = dto.MetricType_HISTOGRAM - metric.Type = &metricType - case "MustNewSummary": - metricType = dto.MetricType_SUMMARY - metric.Type = &metricType - } - - v.metrics = append(v.metrics, MetricFamilyWithPos{MetricFamily: metric, Pos: v.fs.Position(call.Pos())}) - return v -} - -func (v *visitor) parseOptsExpr(n ast.Node) (*opt, *string) { - switch stmt := n.(type) { - case *ast.CompositeLit: - return v.parseCompositeOpts(stmt) - - case *ast.Ident: - if stmt.Obj != nil { - if decl, ok := stmt.Obj.Decl.(*ast.AssignStmt); ok && len(decl.Rhs) > 0 { - if t, ok := decl.Rhs[0].(*ast.CompositeLit); ok { - return v.parseCompositeOpts(t) - } - } - } - - case *ast.UnaryExpr: - return v.parseOptsExpr(stmt.X) - } - - return nil, nil -} - -func (v *visitor) parseCompositeOpts(stmt *ast.CompositeLit) (*opt, *string) { - metricOption := &opt{} - var help *string - for _, elt := range stmt.Elts { - kvExpr, ok := elt.(*ast.KeyValueExpr) - if !ok { - continue - } - object, ok := kvExpr.Key.(*ast.Ident) - if !ok { - continue - } - - if _, ok := validOptsFields[object.Name]; !ok { - continue - } - - // If failed to parse field value, stop parsing. - stringLiteral, ok := v.parseValue(object.Name, kvExpr.Value) - if !ok { - return nil, nil - } - - switch object.Name { - case "Namespace": - metricOption.namespace = stringLiteral - case "Subsystem": - metricOption.subsystem = stringLiteral - case "Name": - metricOption.name = stringLiteral - case "Help": - help = &stringLiteral - } - } - - return metricOption, help -} - -func (v *visitor) parseValue(object string, n ast.Node) (string, bool) { - switch t := n.(type) { - - // make sure it is string literal value - case *ast.BasicLit: - if t.Kind == token.STRING { - return mustUnquote(t.Value), true - } - - return "", false - - case *ast.Ident: - if t.Obj == nil { - return "", false - } - - if vs, ok := t.Obj.Decl.(*ast.ValueSpec); ok { - return v.parseValue(object, vs) - } - - case *ast.ValueSpec: - if len(t.Values) == 0 { - return "", false - } - return v.parseValue(object, t.Values[0]) - - // For binary expr, we only support adding two strings like `foo` + `bar`. - case *ast.BinaryExpr: - if t.Op == token.ADD { - x, ok := v.parseValue(object, t.X) - if !ok { - return "", false - } - - y, ok := v.parseValue(object, t.Y) - if !ok { - return "", false - } - - return x + y, true - } - - // We can only cover some basic cases here - case *ast.CallExpr: - return v.parseValueCallExpr(object, t) - - default: - if v.strict { - v.issues = append(v.issues, Issue{ - Pos: v.fs.Position(n.Pos()), - Metric: "", - Text: fmt.Sprintf("parsing %s with type %T is not supported", object, t), - }) - } - } - - return "", false -} - -func (v *visitor) parseValueCallExpr(object string, call *ast.CallExpr) (string, bool) { - var ( - methodName string - namespace string - subsystem string - name string - ok bool - ) - switch expr := call.Fun.(type) { - case *ast.SelectorExpr: - methodName = expr.Sel.Name - case *ast.Ident: - methodName = expr.Name - default: - return "", false - } - - if methodName == "BuildFQName" && len(call.Args) == 3 { - namespace, ok = v.parseValue("namespace", call.Args[0]) - if !ok { - return "", false - } - subsystem, ok = v.parseValue("subsystem", call.Args[1]) - if !ok { - return "", false - } - name, ok = v.parseValue("name", call.Args[2]) - if !ok { - return "", false - } - return prometheus.BuildFQName(namespace, subsystem, name), true - } - - if v.strict { - v.issues = append(v.issues, Issue{ - Metric: "", - Pos: v.fs.Position(call.Pos()), - Text: fmt.Sprintf("parsing %s with function %s is not supported", object, methodName), - }) - } - - return "", false -} - -func (v *visitor) parseConstMetricOptsExpr(n ast.Node) (*string, *string) { - switch stmt := n.(type) { - case *ast.CallExpr: - return v.parseNewDescCallExpr(stmt) - - case *ast.Ident: - if stmt.Obj != nil { - switch t := stmt.Obj.Decl.(type) { - case *ast.AssignStmt: - if len(t.Rhs) > 0 { - if call, ok := t.Rhs[0].(*ast.CallExpr); ok { - return v.parseNewDescCallExpr(call) - } - } - case *ast.ValueSpec: - if len(t.Values) > 0 { - if call, ok := t.Values[0].(*ast.CallExpr); ok { - return v.parseNewDescCallExpr(call) - } - } - } - - if v.strict { - v.issues = append(v.issues, Issue{ - Pos: v.fs.Position(stmt.Pos()), - Metric: "", - Text: fmt.Sprintf("parsing desc of type %T is not supported", stmt.Obj.Decl), - }) - } - } - - default: - if v.strict { - v.issues = append(v.issues, Issue{ - Pos: v.fs.Position(stmt.Pos()), - Metric: "", - Text: fmt.Sprintf("parsing desc of type %T is not supported", stmt), - }) - } - } - - return nil, nil -} - -func (v *visitor) parseNewDescCallExpr(call *ast.CallExpr) (*string, *string) { - var ( - help string - name string - ok bool - ) - - switch expr := call.Fun.(type) { - case *ast.Ident: - if expr.Name != "NewDesc" { - if v.strict { - v.issues = append(v.issues, Issue{ - Pos: v.fs.Position(expr.Pos()), - Metric: "", - Text: fmt.Sprintf("parsing desc with function %s is not supported", expr.Name), - }) - } - return nil, nil - } - case *ast.SelectorExpr: - if expr.Sel.Name != "NewDesc" { - if v.strict { - v.issues = append(v.issues, Issue{ - Pos: v.fs.Position(expr.Sel.Pos()), - Metric: "", - Text: fmt.Sprintf("parsing desc with function %s is not supported", expr.Sel.Name), - }) - } - return nil, nil - } - default: - if v.strict { - v.issues = append(v.issues, Issue{ - Pos: v.fs.Position(expr.Pos()), - Metric: "", - Text: fmt.Sprintf("parsing desc of %T is not supported", expr), - }) - } - return nil, nil - } - - // k8s.io/component-base/metrics.NewDesc has 6 args - // while prometheus.NewDesc has 4 args - if len(call.Args) < 4 && v.strict { - v.issues = append(v.issues, Issue{ - Metric: "", - Pos: v.fs.Position(call.Pos()), - Text: "NewDesc should have at least 4 args", - }) - return nil, nil - } - - name, ok = v.parseValue("fqName", call.Args[0]) - if !ok { - return nil, nil - } - help, ok = v.parseValue("help", call.Args[1]) - if !ok { - return nil, nil - } - - return &name, &help -} - -func mustUnquote(str string) string { - stringLiteral, err := strconv.Unquote(str) - if err != nil { - panic(err) - } - - return stringLiteral -} - -func getConstMetricType(name string) *dto.MetricType { - metricType := dto.MetricType_UNTYPED - if name == "CounterValue" { - metricType = dto.MetricType_COUNTER - } else if name == "GaugeValue" { - metricType = dto.MetricType_GAUGE - } - - return &metricType -} diff --git a/vendor/github.com/ykadowak/zerologlint/.goreleaser.yaml b/vendor/github.com/ykadowak/zerologlint/.goreleaser.yaml deleted file mode 100644 index f3af3f212..000000000 --- a/vendor/github.com/ykadowak/zerologlint/.goreleaser.yaml +++ /dev/null @@ -1,24 +0,0 @@ -before: - hooks: - - go mod tidy -builds: - - id: zerologlint - main: ./cmd/zerologlint - binary: zerologlint - env: - - CGO_ENABLED=0 - goos: - - linux - - windows - - darwin -checksum: - name_template: 'checksums.txt' -snapshot: - name_template: "{{ incpatch .Version }}-next" -changelog: - sort: asc - filters: - exclude: - - '^docs:' - - '^test:' - - '^ci:' diff --git a/vendor/github.com/ykadowak/zerologlint/LICENSE b/vendor/github.com/ykadowak/zerologlint/LICENSE deleted file mode 100644 index 92a1e3b31..000000000 --- a/vendor/github.com/ykadowak/zerologlint/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2023 Yusuke Kadowaki - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/ykadowak/zerologlint/README.md b/vendor/github.com/ykadowak/zerologlint/README.md deleted file mode 100644 index b0a5fc0f9..000000000 --- a/vendor/github.com/ykadowak/zerologlint/README.md +++ /dev/null @@ -1,63 +0,0 @@ -# zerologlint -![build](https://github.com/ykadowak/zerologlint/actions/workflows/testing.yaml/badge.svg) - -`zerologlint` is a linter for [zerolog](https://github.com/rs/zerolog) that can be run with `go vet` or through [golangci-lint](https://golangci-lint.run/) since `v1.53.0`. -It detects the wrong usage of `zerolog` that a user forgets to dispatch `zerolog.Event` with `Send` or `Msg` like functions, in which case nothing will be logged. For more detailed explanations of the cases it detects, see [Examples](#Example). - -## Install - -```bash -go install github.com/ykadowak/zerologlint/cmd/zerologlint@latest -``` - -## Usage -```bash -go vet -vettool=`which zerologlint` ./... -``` - -or you can also use it with [golangci-lint](https://golangci-lint.run/) since `v1.53.0`. - -## Examples -```go -package main - -import ( - "github.com/rs/zerolog" - "github.com/rs/zerolog/log" -) - -func main() { - // 1. Basic case - log.Info() // "must be dispatched by Msg or Send method" - - // 2. Nested case - log.Info(). // "must be dispatched by Msg or Send method" - Str("foo", "bar"). - Dict("dict", zerolog.Dict(). - Str("bar", "baz"). - Int("n", 1), - ) - - // 3. Reassignment case - logger := log.Info() // "must be dispatched by Msg or Send method" - if err != nil { - logger = log.Error() // "must be dispatched by Msg or Send method" - } - logger.Str("foo", "bar") - - // 4. Deferred case - defer log.Info() // "must be dispatched by Msg or Send method" - - // 5. zerolog.Logger case - logger2 := zerolog.New(os.Stdout) - logger2.Info().Send() - - // 6. Dispatch in other function case - event := log.Info() - dispatcher(event) -} - -func dispatcher(e *zerolog.Event) { - e.Send() -} -``` diff --git a/vendor/github.com/ykadowak/zerologlint/zerologlint.go b/vendor/github.com/ykadowak/zerologlint/zerologlint.go deleted file mode 100644 index 8c8fb74fc..000000000 --- a/vendor/github.com/ykadowak/zerologlint/zerologlint.go +++ /dev/null @@ -1,261 +0,0 @@ -package zerologlint - -import ( - "go/token" - "go/types" - "strings" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/buildssa" - "golang.org/x/tools/go/ssa" - - "github.com/gostaticanalysis/comment/passes/commentmap" -) - -var Analyzer = &analysis.Analyzer{ - Name: "zerologlint", - Doc: "Detects the wrong usage of `zerolog` that a user forgets to dispatch with `Send` or `Msg`", - Run: run, - Requires: []*analysis.Analyzer{ - buildssa.Analyzer, - commentmap.Analyzer, - }, -} - -type posser interface { - Pos() token.Pos -} - -// callDefer is an interface just to hold both ssa.Call and ssa.Defer in our set -type callDefer interface { - Common() *ssa.CallCommon - Pos() token.Pos -} - -type linter struct { - // eventSet holds all the ssa block that is a zerolog.Event type instance - // that should be dispatched. - // Everytime the zerolog.Event is dispatched with Msg() or Send(), - // deletes that block from this set. - // At the end, check if the set is empty, or report the not dispatched block. - eventSet map[posser]struct{} - // deleteLater holds the ssa block that should be deleted from eventSet after - // all the inspection is done. - // this is required because `else` ssa block comes after the dispatch of `if`` block. - // e.g., if err != nil { log.Error() } else { log.Info() } log.Send() - // deleteLater takes care of the log.Info() block. - deleteLater map[posser]struct{} - recLimit uint -} - -func run(pass *analysis.Pass) (interface{}, error) { - srcFuncs := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs - - l := &linter{ - eventSet: make(map[posser]struct{}), - deleteLater: make(map[posser]struct{}), - recLimit: 100, - } - - for _, sf := range srcFuncs { - for _, b := range sf.Blocks { - for _, instr := range b.Instrs { - if c, ok := instr.(*ssa.Call); ok { - l.inspect(c) - } else if c, ok := instr.(*ssa.Defer); ok { - l.inspect(c) - } - } - } - } - - // apply deleteLater to envetSet for else branches of if-else cases - - for k := range l.deleteLater { - delete(l.eventSet, k) - } - - // At the end, if the set is clear -> ok. - // Otherwise, there must be a left zerolog.Event var that weren't dispatched. So report it. - for k := range l.eventSet { - pass.Reportf(k.Pos(), "must be dispatched by Msg or Send method") - } - - return nil, nil -} - -func (l *linter) inspect(cd callDefer) { - c := cd.Common() - - // check if it's in github.com/rs/zerolog/log since there's some - // functions in github.com/rs/zerolog that returns zerolog.Event - // which should not be included. However, zerolog.Logger receiver is an exception. - if isInLogPkg(*c) || isLoggerRecv(*c) { - if isZerologEvent(c.Value) { - // this ssa block should be dispatched afterwards at some point - l.eventSet[cd] = struct{}{} - return - } - } - - // if the call does not return zerolog.Event, - // check if the base is zerolog.Event. - // if so, check if the StaticCallee is Send() or Msg(). - // if so, remove the arg[0] from the set. - f := c.StaticCallee() - if f == nil { - return - } - if !isDispatchMethod(f) { - shouldReturn := true - for _, p := range f.Params { - if isZerologEvent(p) { - // check if this zerolog.Event as a parameter is dispatched in the function - // TODO: technically, it can be dispatched in another function that is called in this function, and - // this algorithm cannot track that. But I'm tired of thinking about that for now. - for _, b := range f.Blocks { - for _, instr := range b.Instrs { - switch v := instr.(type) { - case *ssa.Call: - if inspectDispatchInFunction(v.Common()) { - shouldReturn = false - break - } - case *ssa.Defer: - if inspectDispatchInFunction(v.Common()) { - shouldReturn = false - break - } - } - } - } - } - } - if shouldReturn { - return - } - } - for _, arg := range c.Args { - if isZerologEvent(arg) { - // if there's branch, track both ways - // this is for the case like: - // logger := log.Info() - // if err != nil { - // logger = log.Error() - // } - // logger.Send() - // - // Similar case like below goes to the same root but that doesn't - // have any side effect. - // logger := log.Info() - // if err != nil { - // logger = logger.Str("a", "b") - // } - // logger.Send() - if phi, ok := arg.(*ssa.Phi); ok { - for _, edge := range phi.Edges { - l.dfsEdge(edge, make(map[ssa.Value]struct{}), 0) - } - } else { - val := getRootSsaValue(arg) - delete(l.eventSet, val) - } - } - } -} - -func (l *linter) dfsEdge(v ssa.Value, visit map[ssa.Value]struct{}, cnt uint) { - // only for safety - if cnt > l.recLimit { - return - } - cnt++ - - if _, ok := visit[v]; ok { - return - } - visit[v] = struct{}{} - - val := getRootSsaValue(v) - phi, ok := val.(*ssa.Phi) - if !ok { - l.deleteLater[val] = struct{}{} - return - } - for _, edge := range phi.Edges { - l.dfsEdge(edge, visit, cnt) - } -} - -func inspectDispatchInFunction(cc *ssa.CallCommon) bool { - if isDispatchMethod(cc.StaticCallee()) { - for _, arg := range cc.Args { - if isZerologEvent(arg) { - return true - } - } - } - return false -} - -func isInLogPkg(c ssa.CallCommon) bool { - switch v := c.Value.(type) { - case ssa.Member: - p := v.Package() - if p == nil { - return false - } - return strings.HasSuffix(p.Pkg.Path(), "github.com/rs/zerolog/log") - } - return false -} - -func isLoggerRecv(c ssa.CallCommon) bool { - switch f := c.Value.(type) { - case *ssa.Function: - if recv := f.Signature.Recv(); recv != nil { - return strings.HasSuffix(types.TypeString(recv.Type(), nil), "zerolog.Logger") - } - } - return false -} - -func isZerologEvent(v ssa.Value) bool { - ts := v.Type().String() - return strings.HasSuffix(ts, "github.com/rs/zerolog.Event") -} - -func isDispatchMethod(f *ssa.Function) bool { - if f == nil { - return false - } - m := f.Name() - if m == "Send" || m == "Msg" || m == "Msgf" || m == "MsgFunc" { - return true - } - return false -} - -func getRootSsaValue(v ssa.Value) ssa.Value { - if c, ok := v.(*ssa.Call); ok { - v := c.Value() - - // When there is no receiver, that's the block of zerolog.Event - // eg. Error() method in log.Error().Str("foo", "bar").Send() - if len(v.Call.Args) == 0 { - return v - } - - // Even when there is a receiver, if it's a zerolog.Logger instance, return this block - // eg. Info() method in zerolog.New(os.Stdout).Info() - root := v.Call.Args[0] - if !isZerologEvent(root) { - return v - } - - // Ok to just return the receiver because all the method in this - // chain is zerolog.Event at this point. - return getRootSsaValue(root) - } - return v -} diff --git a/vendor/gitlab.com/bosi/decorder/.gitignore b/vendor/gitlab.com/bosi/decorder/.gitignore deleted file mode 100644 index 48baa654b..000000000 --- a/vendor/gitlab.com/bosi/decorder/.gitignore +++ /dev/null @@ -1,7 +0,0 @@ -/.idea -/.env -/.env.example -/decorder -/LICENSES-3RD-PARTY -/ytt -/yq \ No newline at end of file diff --git a/vendor/gitlab.com/bosi/decorder/.gitlab-ci.params.yml b/vendor/gitlab.com/bosi/decorder/.gitlab-ci.params.yml deleted file mode 100644 index fe6b85288..000000000 --- a/vendor/gitlab.com/bosi/decorder/.gitlab-ci.params.yml +++ /dev/null @@ -1,15 +0,0 @@ -#@data/values ---- - -app: - name: decorder - -code_quality: - enable_tests: true - enable_static_code_analyses: true - enable_license_check: true - -deployment: - enable_rc_handling: false - use_gitlab_container_registry: false - enable_image_build_and_deploy: false diff --git a/vendor/gitlab.com/bosi/decorder/.gitlab-ci.yml b/vendor/gitlab.com/bosi/decorder/.gitlab-ci.yml deleted file mode 100644 index 73e1273b3..000000000 --- a/vendor/gitlab.com/bosi/decorder/.gitlab-ci.yml +++ /dev/null @@ -1,63 +0,0 @@ -############################### -# This file is auto-generated # -############################### - -variables: - APP_NAME: decorder - -stages: - - test - - build - - release - -test: - stage: test - image: golang:1.21.0@sha256:b490ae1f0ece153648dd3c5d25be59a63f966b5f9e1311245c947de4506981aa - before_script: - - set -eu - - if [[ -f .env.pipeline ]];then cp .env.pipeline .env;fi - - mkdir -p ~/.ssh - - touch ~/.ssh/known_hosts - - ssh-keyscan gitlab.com > ~/.ssh/known_hosts - retry: 2 - script: - - '### run tests ###' - - make test - - make test-cover - -lint:source-code: - stage: test - image: golangci/golangci-lint:v1.54.2-alpine@sha256:e950721f6ae622dcc041f57cc0b61c3a78d4bbfc588facfc8b0166901a9f4848 - script: - - apk add make bash - - make settings - - '### run linter ###' - - golangci-lint run ./... - -license-check: - stage: test - image: golang:1.21.0@sha256:b490ae1f0ece153648dd3c5d25be59a63f966b5f9e1311245c947de4506981aa - before_script: - - set -eu - - if [[ -f .env.pipeline ]];then cp .env.pipeline .env;fi - - mkdir -p ~/.ssh - - touch ~/.ssh/known_hosts - - ssh-keyscan gitlab.com > ~/.ssh/known_hosts - script: - - '### run license-check ###' - - make check-licenses - artifacts: - paths: - - LICENSES-3RD-PARTY - expire_in: 7 days - -pages: - stage: release - image: golang:1.21.0@sha256:b490ae1f0ece153648dd3c5d25be59a63f966b5f9e1311245c947de4506981aa - only: - - tags - script: - - make gitlab-pages - artifacts: - paths: - - public/ diff --git a/vendor/gitlab.com/bosi/decorder/LICENSE.md b/vendor/gitlab.com/bosi/decorder/LICENSE.md deleted file mode 100644 index d46c30e18..000000000 --- a/vendor/gitlab.com/bosi/decorder/LICENSE.md +++ /dev/null @@ -1,16 +0,0 @@ -MIT License - -Copyright (c) 2021 Florian Bosdorff - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated -documentation files (the "Software"), to deal in the Software without restriction, including without limitation the -rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit -persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the -Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR -COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/vendor/gitlab.com/bosi/decorder/Makefile b/vendor/gitlab.com/bosi/decorder/Makefile deleted file mode 100644 index 8d4c05690..000000000 --- a/vendor/gitlab.com/bosi/decorder/Makefile +++ /dev/null @@ -1,7 +0,0 @@ -include project-templates/base.mk - -project-templates/base.mk: - @cp -ar ~/.dotfiles/projects/golang ./project-templates - -.env: - touch .env \ No newline at end of file diff --git a/vendor/gitlab.com/bosi/decorder/README.md b/vendor/gitlab.com/bosi/decorder/README.md deleted file mode 100644 index 5947e5ca2..000000000 --- a/vendor/gitlab.com/bosi/decorder/README.md +++ /dev/null @@ -1,50 +0,0 @@ -# Decorder - -A declaration order linter for Go. In case of this tool declarations are `type`, `const`, `var` and `func`. - -## Rules - -This linter applies multiple rules where each can be disabled via cli parameter. - -| rule | description | cli-options | -|--------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| declaration order | Enforces the order of global declarations (e.g. all global constants are always defined before variables).
You can also define a subset of declarations if you don't want to enforce the order of all of them. | * disable all checks: `-disable-dec-order-check`
* disable type checks: `-disable-type-dec-order-check`
* disable const checks: `-disable-const-dec-order-check`
* disable var checks: `-disable-var-dec-order-check`
* custom order: `-dec-order var,const,func,type` | -| declaration number | Enforces that the statements const, var and type are only used once per file. You have to use parenthesis
to declare e.g multiple global types inside a file. | disable check: `-disable-dec-num-check` | -| init func first | Enforces the init func to be the first function in file. | disable check: `-disable-init-func-first-check` | - -You may find the implementation of the rules inside `analyzer.go`. - -Underscore var declarations can be ignored via `-ignore-underscore-vars`. - -## Installation - -```shell -go install gitlab.com/bosi/decorder/cmd/decorder -``` - -You can use the linter via golangci-lint as well: https://golangci-lint.run/usage/linters/#decorder. - -## Usage - -```shell -# with default options -decorder ./... - -# custom declaration order -decorder -dec-order var,const,func,type ./... - -# disable declaration order check -decorder -disable-dec-order-check ./... - -# disable check for multiple declarations statements -decorder -disable-dec-num-check ./... - -# disable check for multiple declarations (var only) statements -decorder -disable-var-dec-num-check ./... - -# disable check that init func is always first function -decorder -disable-init-func-first-check ./... - -# ignore underscore variables for all checks -decorder -ignore-underscore-vars ./... -``` \ No newline at end of file diff --git a/vendor/gitlab.com/bosi/decorder/analyzer.go b/vendor/gitlab.com/bosi/decorder/analyzer.go deleted file mode 100644 index 08f82ccc1..000000000 --- a/vendor/gitlab.com/bosi/decorder/analyzer.go +++ /dev/null @@ -1,255 +0,0 @@ -package decorder - -import ( - "fmt" - "go/ast" - "go/token" - "strings" - "sync" - - "golang.org/x/tools/go/analysis" -) - -type ( - decNumChecker struct { - tokenMap map[string]token.Token - tokenCounts map[token.Token]int - decOrder []string - funcPoss []funcPos - } - - options struct { - decOrder string - ignoreUnderscoreVars bool - disableDecNumCheck bool - disableTypeDecNumCheck bool - disableConstDecNumCheck bool - disableVarDecNumCheck bool - disableDecOrderCheck bool - disableInitFuncFirstCheck bool - } - - funcPos struct { - start token.Pos - end token.Pos - } -) - -const ( - Name = "decorder" - - FlagDo = "dec-order" - FlagIuv = "ignore-underscore-vars" - FlagDdnc = "disable-dec-num-check" - FlagDtdnc = "disable-type-dec-num-check" - FlagDcdnc = "disable-const-dec-num-check" - FlagDvdnc = "disable-var-dec-num-check" - FlagDdoc = "disable-dec-order-check" - FlagDiffc = "disable-init-func-first-check" - - defaultDecOrder = "type,const,var,func" -) - -var ( - Analyzer = &analysis.Analyzer{ - Name: Name, - Doc: "check declaration order and count of types, constants, variables and functions", - Run: run, - } - - opts = options{} - - tokens = []token.Token{token.TYPE, token.CONST, token.VAR, token.FUNC} - - decNumConf = map[token.Token]bool{ - token.TYPE: false, - token.CONST: false, - token.VAR: false, - } - decLock sync.Mutex -) - -//nolint:lll -func init() { - Analyzer.Flags.StringVar(&opts.decOrder, FlagDo, defaultDecOrder, "define the required order of types, constants, variables and functions declarations inside a file") - Analyzer.Flags.BoolVar(&opts.ignoreUnderscoreVars, FlagIuv, false, "option to ignore underscore vars for dec order and dec num check") - Analyzer.Flags.BoolVar(&opts.disableDecNumCheck, FlagDdnc, false, "option to disable (all) checks for number of declarations inside file") - Analyzer.Flags.BoolVar(&opts.disableTypeDecNumCheck, FlagDtdnc, false, "option to disable check for number of type declarations inside file") - Analyzer.Flags.BoolVar(&opts.disableConstDecNumCheck, FlagDcdnc, false, "option to disable check for number of const declarations inside file") - Analyzer.Flags.BoolVar(&opts.disableVarDecNumCheck, FlagDvdnc, false, "option to disable check for number of var declarations inside file") - Analyzer.Flags.BoolVar(&opts.disableDecOrderCheck, FlagDdoc, false, "option to disable check for order of declarations inside file") - Analyzer.Flags.BoolVar(&opts.disableInitFuncFirstCheck, FlagDiffc, false, "option to disable check that init function is always first function in file") -} - -func initDec() { - decLock.Lock() - decNumConf[token.TYPE] = opts.disableTypeDecNumCheck - decNumConf[token.CONST] = opts.disableConstDecNumCheck - decNumConf[token.VAR] = opts.disableVarDecNumCheck - decLock.Unlock() -} - -func run(pass *analysis.Pass) (interface{}, error) { - initDec() - - for _, f := range pass.Files { - ast.Inspect(f, runDeclNumAndDecOrderCheck(pass)) - - if !opts.disableInitFuncFirstCheck { - ast.Inspect(f, runInitFuncFirstCheck(pass)) - } - } - - return nil, nil -} - -func runInitFuncFirstCheck(pass *analysis.Pass) func(ast.Node) bool { - nonInitFound := false - - return func(n ast.Node) bool { - dec, ok := n.(*ast.FuncDecl) - if !ok { - return true - } - - if dec.Name.Name == "init" && dec.Recv == nil { - if nonInitFound { - pass.Reportf(dec.Pos(), "init func must be the first function in file") - } - } else { - nonInitFound = true - } - - return true - } -} - -func runDeclNumAndDecOrderCheck(pass *analysis.Pass) func(ast.Node) bool { - dnc := newDecNumChecker() - - if opts.disableDecNumCheck && opts.disableDecOrderCheck { - return func(n ast.Node) bool { - return true - } - } - - return func(n ast.Node) bool { - fd, ok := n.(*ast.FuncDecl) - if ok { - return dnc.handleFuncDec(fd, pass) - } - - gd, ok := n.(*ast.GenDecl) - if !ok { - return true - } - - if dnc.isInsideFunction(gd) { - return true - } - - dnc.handleGenDecl(gd, pass) - - if !opts.disableDecOrderCheck { - dnc.handleDecOrderCheck(gd, pass) - } - - return true - } -} - -func newDecNumChecker() decNumChecker { - dnc := decNumChecker{ - tokenMap: map[string]token.Token{}, - tokenCounts: map[token.Token]int{}, - decOrder: []string{}, - funcPoss: []funcPos{}, - } - - for _, t := range tokens { - dnc.tokenCounts[t] = 0 - dnc.tokenMap[t.String()] = t - } - - for _, do := range strings.Split(opts.decOrder, ",") { - dnc.decOrder = append(dnc.decOrder, strings.TrimSpace(do)) - } - - return dnc -} - -func (dnc decNumChecker) isToLate(t token.Token) (string, bool) { - for i, do := range dnc.decOrder { - if do == t.String() { - for j := i + 1; j < len(dnc.decOrder); j++ { - if dnc.tokenCounts[dnc.tokenMap[dnc.decOrder[j]]] > 0 { - return dnc.decOrder[j], false - } - } - return "", true - } - } - - return "", true -} - -func (dnc *decNumChecker) handleGenDecl(gd *ast.GenDecl, pass *analysis.Pass) { - for _, t := range tokens { - if gd.Tok == t { - if opts.ignoreUnderscoreVars && declName(gd) == "_" { - continue - } - - dnc.tokenCounts[t]++ - - if !opts.disableDecNumCheck && !decNumConf[t] && dnc.tokenCounts[t] > 1 { - pass.Reportf(gd.Pos(), "multiple \"%s\" declarations are not allowed; use parentheses instead", t.String()) - } - } - } -} - -func declName(gd *ast.GenDecl) string { - for _, spec := range gd.Specs { - s, ok := spec.(*ast.ValueSpec) - if ok && len(s.Names) > 0 && s.Names[0] != nil { - return s.Names[0].Name - } - } - return "" -} - -func (dnc decNumChecker) handleDecOrderCheck(gd *ast.GenDecl, pass *analysis.Pass) { - l, c := dnc.isToLate(gd.Tok) - if !c { - pass.Reportf(gd.Pos(), fmtWrongOrderMsg(gd.Tok.String(), l)) - } -} - -func (dnc decNumChecker) isInsideFunction(dn *ast.GenDecl) bool { - for _, poss := range dnc.funcPoss { - if poss.start < dn.Pos() && poss.end > dn.Pos() { - return true - } - } - return false -} - -func (dnc *decNumChecker) handleFuncDec(fd *ast.FuncDecl, pass *analysis.Pass) bool { - dnc.funcPoss = append(dnc.funcPoss, funcPos{start: fd.Pos(), end: fd.End()}) - - dnc.tokenCounts[token.FUNC]++ - - if !opts.disableDecOrderCheck { - l, c := dnc.isToLate(token.FUNC) - if !c { - pass.Reportf(fd.Pos(), fmtWrongOrderMsg(token.FUNC.String(), l)) - } - } - - return true -} - -func fmtWrongOrderMsg(target string, notAfter string) string { - return fmt.Sprintf("%s must not be placed after %s (desired order: %s)", target, notAfter, opts.decOrder) -} diff --git a/vendor/gitlab.com/bosi/decorder/renovate.json b/vendor/gitlab.com/bosi/decorder/renovate.json deleted file mode 100644 index 60041578c..000000000 --- a/vendor/gitlab.com/bosi/decorder/renovate.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "$schema": "https://docs.renovatebot.com/renovate-schema.json", - "extends": [ - "gitlab>bosi/renovate-configs//configs/golang", - "gitlab>bosi/renovate-configs//configs/automerge" - ] -} diff --git a/vendor/go-simpler.org/musttag/.golangci.yml b/vendor/go-simpler.org/musttag/.golangci.yml deleted file mode 100644 index 641471cc4..000000000 --- a/vendor/go-simpler.org/musttag/.golangci.yml +++ /dev/null @@ -1,23 +0,0 @@ -linters: - disable-all: true - enable: - # enabled by default: - - errcheck - - gosimple - - govet - - ineffassign - - staticcheck - - typecheck - - unused - # disabled by default: - - gocritic - - gofumpt - -linters-settings: - gocritic: - enabled-tags: - - diagnostic - - style - - performance - - experimental - - opinionated diff --git a/vendor/go-simpler.org/musttag/.goreleaser.yml b/vendor/go-simpler.org/musttag/.goreleaser.yml deleted file mode 100644 index dd75ac945..000000000 --- a/vendor/go-simpler.org/musttag/.goreleaser.yml +++ /dev/null @@ -1,18 +0,0 @@ -builds: - - main: ./cmd/musttag - env: - - CGO_ENABLED=0 - flags: - - -trimpath - ldflags: - - -s -w -X main.version={{ .Version }} - targets: - - darwin_amd64 - - darwin_arm64 - - linux_amd64 - - windows_amd64 - -archives: - - format_overrides: - - goos: windows - format: zip diff --git a/vendor/go-simpler.org/musttag/LICENSE b/vendor/go-simpler.org/musttag/LICENSE deleted file mode 100644 index a612ad981..000000000 --- a/vendor/go-simpler.org/musttag/LICENSE +++ /dev/null @@ -1,373 +0,0 @@ -Mozilla Public License Version 2.0 -================================== - -1. Definitions --------------- - -1.1. "Contributor" - means each individual or legal entity that creates, contributes to - the creation of, or owns Covered Software. - -1.2. "Contributor Version" - means the combination of the Contributions of others (if any) used - by a Contributor and that particular Contributor's Contribution. - -1.3. "Contribution" - means Covered Software of a particular Contributor. - -1.4. "Covered Software" - means Source Code Form to which the initial Contributor has attached - the notice in Exhibit A, the Executable Form of such Source Code - Form, and Modifications of such Source Code Form, in each case - including portions thereof. - -1.5. "Incompatible With Secondary Licenses" - means - - (a) that the initial Contributor has attached the notice described - in Exhibit B to the Covered Software; or - - (b) that the Covered Software was made available under the terms of - version 1.1 or earlier of the License, but not also under the - terms of a Secondary License. - -1.6. "Executable Form" - means any form of the work other than Source Code Form. - -1.7. "Larger Work" - means a work that combines Covered Software with other material, in - a separate file or files, that is not Covered Software. - -1.8. "License" - means this document. - -1.9. "Licensable" - means having the right to grant, to the maximum extent possible, - whether at the time of the initial grant or subsequently, any and - all of the rights conveyed by this License. - -1.10. "Modifications" - means any of the following: - - (a) any file in Source Code Form that results from an addition to, - deletion from, or modification of the contents of Covered - Software; or - - (b) any new file in Source Code Form that contains any Covered - Software. - -1.11. "Patent Claims" of a Contributor - means any patent claim(s), including without limitation, method, - process, and apparatus claims, in any patent Licensable by such - Contributor that would be infringed, but for the grant of the - License, by the making, using, selling, offering for sale, having - made, import, or transfer of either its Contributions or its - Contributor Version. - -1.12. "Secondary License" - means either the GNU General Public License, Version 2.0, the GNU - Lesser General Public License, Version 2.1, the GNU Affero General - Public License, Version 3.0, or any later versions of those - licenses. - -1.13. "Source Code Form" - means the form of the work preferred for making modifications. - -1.14. "You" (or "Your") - means an individual or a legal entity exercising rights under this - License. For legal entities, "You" includes any entity that - controls, is controlled by, or is under common control with You. For - purposes of this definition, "control" means (a) the power, direct - or indirect, to cause the direction or management of such entity, - whether by contract or otherwise, or (b) ownership of more than - fifty percent (50%) of the outstanding shares or beneficial - ownership of such entity. - -2. License Grants and Conditions --------------------------------- - -2.1. Grants - -Each Contributor hereby grants You a world-wide, royalty-free, -non-exclusive license: - -(a) under intellectual property rights (other than patent or trademark) - Licensable by such Contributor to use, reproduce, make available, - modify, display, perform, distribute, and otherwise exploit its - Contributions, either on an unmodified basis, with Modifications, or - as part of a Larger Work; and - -(b) under Patent Claims of such Contributor to make, use, sell, offer - for sale, have made, import, and otherwise transfer either its - Contributions or its Contributor Version. - -2.2. Effective Date - -The licenses granted in Section 2.1 with respect to any Contribution -become effective for each Contribution on the date the Contributor first -distributes such Contribution. - -2.3. Limitations on Grant Scope - -The licenses granted in this Section 2 are the only rights granted under -this License. No additional rights or licenses will be implied from the -distribution or licensing of Covered Software under this License. -Notwithstanding Section 2.1(b) above, no patent license is granted by a -Contributor: - -(a) for any code that a Contributor has removed from Covered Software; - or - -(b) for infringements caused by: (i) Your and any other third party's - modifications of Covered Software, or (ii) the combination of its - Contributions with other software (except as part of its Contributor - Version); or - -(c) under Patent Claims infringed by Covered Software in the absence of - its Contributions. - -This License does not grant any rights in the trademarks, service marks, -or logos of any Contributor (except as may be necessary to comply with -the notice requirements in Section 3.4). - -2.4. Subsequent Licenses - -No Contributor makes additional grants as a result of Your choice to -distribute the Covered Software under a subsequent version of this -License (see Section 10.2) or under the terms of a Secondary License (if -permitted under the terms of Section 3.3). - -2.5. Representation - -Each Contributor represents that the Contributor believes its -Contributions are its original creation(s) or it has sufficient rights -to grant the rights to its Contributions conveyed by this License. - -2.6. Fair Use - -This License is not intended to limit any rights You have under -applicable copyright doctrines of fair use, fair dealing, or other -equivalents. - -2.7. Conditions - -Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted -in Section 2.1. - -3. Responsibilities -------------------- - -3.1. Distribution of Source Form - -All distribution of Covered Software in Source Code Form, including any -Modifications that You create or to which You contribute, must be under -the terms of this License. You must inform recipients that the Source -Code Form of the Covered Software is governed by the terms of this -License, and how they can obtain a copy of this License. You may not -attempt to alter or restrict the recipients' rights in the Source Code -Form. - -3.2. Distribution of Executable Form - -If You distribute Covered Software in Executable Form then: - -(a) such Covered Software must also be made available in Source Code - Form, as described in Section 3.1, and You must inform recipients of - the Executable Form how they can obtain a copy of such Source Code - Form by reasonable means in a timely manner, at a charge no more - than the cost of distribution to the recipient; and - -(b) You may distribute such Executable Form under the terms of this - License, or sublicense it under different terms, provided that the - license for the Executable Form does not attempt to limit or alter - the recipients' rights in the Source Code Form under this License. - -3.3. Distribution of a Larger Work - -You may create and distribute a Larger Work under terms of Your choice, -provided that You also comply with the requirements of this License for -the Covered Software. If the Larger Work is a combination of Covered -Software with a work governed by one or more Secondary Licenses, and the -Covered Software is not Incompatible With Secondary Licenses, this -License permits You to additionally distribute such Covered Software -under the terms of such Secondary License(s), so that the recipient of -the Larger Work may, at their option, further distribute the Covered -Software under the terms of either this License or such Secondary -License(s). - -3.4. Notices - -You may not remove or alter the substance of any license notices -(including copyright notices, patent notices, disclaimers of warranty, -or limitations of liability) contained within the Source Code Form of -the Covered Software, except that You may alter any license notices to -the extent required to remedy known factual inaccuracies. - -3.5. Application of Additional Terms - -You may choose to offer, and to charge a fee for, warranty, support, -indemnity or liability obligations to one or more recipients of Covered -Software. However, You may do so only on Your own behalf, and not on -behalf of any Contributor. You must make it absolutely clear that any -such warranty, support, indemnity, or liability obligation is offered by -You alone, and You hereby agree to indemnify every Contributor for any -liability incurred by such Contributor as a result of warranty, support, -indemnity or liability terms You offer. You may include additional -disclaimers of warranty and limitations of liability specific to any -jurisdiction. - -4. Inability to Comply Due to Statute or Regulation ---------------------------------------------------- - -If it is impossible for You to comply with any of the terms of this -License with respect to some or all of the Covered Software due to -statute, judicial order, or regulation then You must: (a) comply with -the terms of this License to the maximum extent possible; and (b) -describe the limitations and the code they affect. Such description must -be placed in a text file included with all distributions of the Covered -Software under this License. Except to the extent prohibited by statute -or regulation, such description must be sufficiently detailed for a -recipient of ordinary skill to be able to understand it. - -5. Termination --------------- - -5.1. The rights granted under this License will terminate automatically -if You fail to comply with any of its terms. However, if You become -compliant, then the rights granted under this License from a particular -Contributor are reinstated (a) provisionally, unless and until such -Contributor explicitly and finally terminates Your grants, and (b) on an -ongoing basis, if such Contributor fails to notify You of the -non-compliance by some reasonable means prior to 60 days after You have -come back into compliance. Moreover, Your grants from a particular -Contributor are reinstated on an ongoing basis if such Contributor -notifies You of the non-compliance by some reasonable means, this is the -first time You have received notice of non-compliance with this License -from such Contributor, and You become compliant prior to 30 days after -Your receipt of the notice. - -5.2. If You initiate litigation against any entity by asserting a patent -infringement claim (excluding declaratory judgment actions, -counter-claims, and cross-claims) alleging that a Contributor Version -directly or indirectly infringes any patent, then the rights granted to -You by any and all Contributors for the Covered Software under Section -2.1 of this License shall terminate. - -5.3. In the event of termination under Sections 5.1 or 5.2 above, all -end user license agreements (excluding distributors and resellers) which -have been validly granted by You or Your distributors under this License -prior to termination shall survive termination. - -************************************************************************ -* * -* 6. Disclaimer of Warranty * -* ------------------------- * -* * -* Covered Software is provided under this License on an "as is" * -* basis, without warranty of any kind, either expressed, implied, or * -* statutory, including, without limitation, warranties that the * -* Covered Software is free of defects, merchantable, fit for a * -* particular purpose or non-infringing. The entire risk as to the * -* quality and performance of the Covered Software is with You. * -* Should any Covered Software prove defective in any respect, You * -* (not any Contributor) assume the cost of any necessary servicing, * -* repair, or correction. This disclaimer of warranty constitutes an * -* essential part of this License. No use of any Covered Software is * -* authorized under this License except under this disclaimer. * -* * -************************************************************************ - -************************************************************************ -* * -* 7. Limitation of Liability * -* -------------------------- * -* * -* Under no circumstances and under no legal theory, whether tort * -* (including negligence), contract, or otherwise, shall any * -* Contributor, or anyone who distributes Covered Software as * -* permitted above, be liable to You for any direct, indirect, * -* special, incidental, or consequential damages of any character * -* including, without limitation, damages for lost profits, loss of * -* goodwill, work stoppage, computer failure or malfunction, or any * -* and all other commercial damages or losses, even if such party * -* shall have been informed of the possibility of such damages. This * -* limitation of liability shall not apply to liability for death or * -* personal injury resulting from such party's negligence to the * -* extent applicable law prohibits such limitation. Some * -* jurisdictions do not allow the exclusion or limitation of * -* incidental or consequential damages, so this exclusion and * -* limitation may not apply to You. * -* * -************************************************************************ - -8. Litigation -------------- - -Any litigation relating to this License may be brought only in the -courts of a jurisdiction where the defendant maintains its principal -place of business and such litigation shall be governed by laws of that -jurisdiction, without reference to its conflict-of-law provisions. -Nothing in this Section shall prevent a party's ability to bring -cross-claims or counter-claims. - -9. Miscellaneous ----------------- - -This License represents the complete agreement concerning the subject -matter hereof. If any provision of this License is held to be -unenforceable, such provision shall be reformed only to the extent -necessary to make it enforceable. Any law or regulation which provides -that the language of a contract shall be construed against the drafter -shall not be used to construe this License against a Contributor. - -10. Versions of the License ---------------------------- - -10.1. New Versions - -Mozilla Foundation is the license steward. Except as provided in Section -10.3, no one other than the license steward has the right to modify or -publish new versions of this License. Each version will be given a -distinguishing version number. - -10.2. Effect of New Versions - -You may distribute the Covered Software under the terms of the version -of the License under which You originally received the Covered Software, -or under the terms of any subsequent version published by the license -steward. - -10.3. Modified Versions - -If you create software not governed by this License, and you want to -create a new license for such software, you may create and use a -modified version of this License if you rename the license and remove -any references to the name of the license steward (except to note that -such modified license differs from this License). - -10.4. Distributing Source Code Form that is Incompatible With Secondary -Licenses - -If You choose to distribute Source Code Form that is Incompatible With -Secondary Licenses under the terms of this version of the License, the -notice described in Exhibit B of this License must be attached. - -Exhibit A - Source Code Form License Notice -------------------------------------------- - - This Source Code Form is subject to the terms of the Mozilla Public - License, v. 2.0. If a copy of the MPL was not distributed with this - file, You can obtain one at http://mozilla.org/MPL/2.0/. - -If it is not possible or desirable to put the notice in a particular -file, then You may include the notice in a location (such as a LICENSE -file in a relevant directory) where a recipient would be likely to look -for such a notice. - -You may add additional accurate notices of copyright ownership. - -Exhibit B - "Incompatible With Secondary Licenses" Notice ---------------------------------------------------------- - - This Source Code Form is "Incompatible With Secondary Licenses", as - defined by the Mozilla Public License, v. 2.0. diff --git a/vendor/go-simpler.org/musttag/README.md b/vendor/go-simpler.org/musttag/README.md deleted file mode 100644 index 3f3a25330..000000000 --- a/vendor/go-simpler.org/musttag/README.md +++ /dev/null @@ -1,102 +0,0 @@ -# musttag - -[![checks](https://github.com/go-simpler/musttag/actions/workflows/checks.yml/badge.svg)](https://github.com/go-simpler/musttag/actions/workflows/checks.yml) -[![pkg.go.dev](https://pkg.go.dev/badge/go-simpler.org/musttag.svg)](https://pkg.go.dev/go-simpler.org/musttag) -[![goreportcard](https://goreportcard.com/badge/go-simpler.org/musttag)](https://goreportcard.com/report/go-simpler.org/musttag) -[![codecov](https://codecov.io/gh/go-simpler/musttag/branch/main/graph/badge.svg)](https://codecov.io/gh/go-simpler/musttag) - -A Go linter that enforces field tags in (un)marshaled structs. - -## 📌 About - -`musttag` checks that exported fields of a struct passed to a `Marshal`-like function are annotated with the relevant tag: - -```go -// BAD: -var user struct { - Name string -} -data, err := json.Marshal(user) - -// GOOD: -var user struct { - Name string `json:"name"` -} -data, err := json.Marshal(user) -``` - -The rational from [Uber Style Guide][1]: - -> The serialized form of the structure is a contract between different systems. -> Changes to the structure of the serialized form, including field names, break this contract. -> Specifying field names inside tags makes the contract explicit, -> and it guards against accidentally breaking the contract by refactoring or renaming fields. - -## 🚀 Features - -The following packages are supported out of the box: - -* [encoding/json][2] -* [encoding/xml][3] -* [gopkg.in/yaml.v3][4] -* [github.com/BurntSushi/toml][5] -* [github.com/mitchellh/mapstructure][6] -* [github.com/jmoiron/sqlx][7] - -In addition, any [custom package](#custom-packages) can be added to the list. - -## 📦 Install - -`musttag` is integrated into [`golangci-lint`][8], and this is the recommended way to use it. - -To enable the linter, add the following lines to `.golangci.yml`: - -```yaml -linters: - enable: - - musttag -``` - -Alternatively, you can download a prebuilt binary from the [Releases][9] page to use `musttag` standalone. - -## 📋 Usage - -Run `golangci-lint` with `musttag` enabled. -See the list of [available options][10] to configure the linter. - -When using `musttag` standalone, pass the options as flags. - -### Custom packages - -To report a custom function, you need to add its description to `.golangci.yml`. -The following is an example of adding support for [`hclsimple.Decode`][11]: - -```yaml -linters-settings: - musttag: - functions: - # The full name of the function, including the package. - - name: github.com/hashicorp/hcl/v2/hclsimple.Decode - # The struct tag whose presence should be ensured. - tag: hcl - # The position of the argument to check. - arg-pos: 2 -``` - -The same can be done via the `-fn=` flag when using `musttag` standalone: - -```shell -musttag -fn="github.com/hashicorp/hcl/v2/hclsimple.DecodeFile:hcl:2" ./... -``` - -[1]: https://github.com/uber-go/guide/blob/master/style.md#use-field-tags-in-marshaled-structs -[2]: https://pkg.go.dev/encoding/json -[3]: https://pkg.go.dev/encoding/xml -[4]: https://pkg.go.dev/gopkg.in/yaml.v3 -[5]: https://pkg.go.dev/github.com/BurntSushi/toml -[6]: https://pkg.go.dev/github.com/mitchellh/mapstructure -[7]: https://pkg.go.dev/github.com/jmoiron/sqlx -[8]: https://golangci-lint.run -[9]: https://github.com/go-simpler/musttag/releases -[10]: https://golangci-lint.run/usage/linters/#musttag -[11]: https://pkg.go.dev/github.com/hashicorp/hcl/v2/hclsimple#Decode diff --git a/vendor/go-simpler.org/musttag/builtins.go b/vendor/go-simpler.org/musttag/builtins.go deleted file mode 100644 index 3305513f8..000000000 --- a/vendor/go-simpler.org/musttag/builtins.go +++ /dev/null @@ -1,133 +0,0 @@ -package musttag - -// builtins is a set of functions supported out of the box. -var builtins = []Func{ - // https://pkg.go.dev/encoding/json - { - Name: "encoding/json.Marshal", Tag: "json", ArgPos: 0, - ifaceWhitelist: []string{"encoding/json.Marshaler", "encoding.TextMarshaler"}, - }, - { - Name: "encoding/json.MarshalIndent", Tag: "json", ArgPos: 0, - ifaceWhitelist: []string{"encoding/json.Marshaler", "encoding.TextMarshaler"}, - }, - { - Name: "encoding/json.Unmarshal", Tag: "json", ArgPos: 1, - ifaceWhitelist: []string{"encoding/json.Unmarshaler", "encoding.TextUnmarshaler"}, - }, - { - Name: "(*encoding/json.Encoder).Encode", Tag: "json", ArgPos: 0, - ifaceWhitelist: []string{"encoding/json.Marshaler", "encoding.TextMarshaler"}, - }, - { - Name: "(*encoding/json.Decoder).Decode", Tag: "json", ArgPos: 0, - ifaceWhitelist: []string{"encoding/json.Unmarshaler", "encoding.TextUnmarshaler"}, - }, - - // https://pkg.go.dev/encoding/xml - { - Name: "encoding/xml.Marshal", Tag: "xml", ArgPos: 0, - ifaceWhitelist: []string{"encoding/xml.Marshaler", "encoding.TextMarshaler"}, - }, - { - Name: "encoding/xml.MarshalIndent", Tag: "xml", ArgPos: 0, - ifaceWhitelist: []string{"encoding/xml.Marshaler", "encoding.TextMarshaler"}, - }, - { - Name: "encoding/xml.Unmarshal", Tag: "xml", ArgPos: 1, - ifaceWhitelist: []string{"encoding/xml.Unmarshaler", "encoding.TextUnmarshaler"}, - }, - { - Name: "(*encoding/xml.Encoder).Encode", Tag: "xml", ArgPos: 0, - ifaceWhitelist: []string{"encoding/xml.Marshaler", "encoding.TextMarshaler"}, - }, - { - Name: "(*encoding/xml.Decoder).Decode", Tag: "xml", ArgPos: 0, - ifaceWhitelist: []string{"encoding/xml.Unmarshaler", "encoding.TextUnmarshaler"}, - }, - { - Name: "(*encoding/xml.Encoder).EncodeElement", Tag: "xml", ArgPos: 0, - ifaceWhitelist: []string{"encoding/xml.Marshaler", "encoding.TextMarshaler"}, - }, - { - Name: "(*encoding/xml.Decoder).DecodeElement", Tag: "xml", ArgPos: 0, - ifaceWhitelist: []string{"encoding/xml.Unmarshaler", "encoding.TextUnmarshaler"}, - }, - - // https://pkg.go.dev/gopkg.in/yaml.v3 - { - Name: "gopkg.in/yaml.v3.Marshal", Tag: "yaml", ArgPos: 0, - ifaceWhitelist: []string{"gopkg.in/yaml.v3.Marshaler"}, - }, - { - Name: "gopkg.in/yaml.v3.Unmarshal", Tag: "yaml", ArgPos: 1, - ifaceWhitelist: []string{"gopkg.in/yaml.v3.Unmarshaler"}, - }, - { - Name: "(*gopkg.in/yaml.v3.Encoder).Encode", Tag: "yaml", ArgPos: 0, - ifaceWhitelist: []string{"gopkg.in/yaml.v3.Marshaler"}, - }, - { - Name: "(*gopkg.in/yaml.v3.Decoder).Decode", Tag: "yaml", ArgPos: 0, - ifaceWhitelist: []string{"gopkg.in/yaml.v3.Unmarshaler"}, - }, - - // https://pkg.go.dev/github.com/BurntSushi/toml - { - Name: "github.com/BurntSushi/toml.Unmarshal", Tag: "toml", ArgPos: 1, - ifaceWhitelist: []string{"github.com/BurntSushi/toml.Unmarshaler", "encoding.TextUnmarshaler"}, - }, - { - Name: "github.com/BurntSushi/toml.Decode", Tag: "toml", ArgPos: 1, - ifaceWhitelist: []string{"github.com/BurntSushi/toml.Unmarshaler", "encoding.TextUnmarshaler"}, - }, - { - Name: "github.com/BurntSushi/toml.DecodeFS", Tag: "toml", ArgPos: 2, - ifaceWhitelist: []string{"github.com/BurntSushi/toml.Unmarshaler", "encoding.TextUnmarshaler"}, - }, - { - Name: "github.com/BurntSushi/toml.DecodeFile", Tag: "toml", ArgPos: 1, - ifaceWhitelist: []string{"github.com/BurntSushi/toml.Unmarshaler", "encoding.TextUnmarshaler"}, - }, - { - Name: "(*github.com/BurntSushi/toml.Encoder).Encode", Tag: "toml", ArgPos: 0, - ifaceWhitelist: []string{"encoding.TextMarshaler"}, - }, - { - Name: "(*github.com/BurntSushi/toml.Decoder).Decode", Tag: "toml", ArgPos: 0, - ifaceWhitelist: []string{"github.com/BurntSushi/toml.Unmarshaler", "encoding.TextUnmarshaler"}, - }, - - // https://pkg.go.dev/github.com/mitchellh/mapstructure - {Name: "github.com/mitchellh/mapstructure.Decode", Tag: "mapstructure", ArgPos: 1}, - {Name: "github.com/mitchellh/mapstructure.DecodeMetadata", Tag: "mapstructure", ArgPos: 1}, - {Name: "github.com/mitchellh/mapstructure.WeakDecode", Tag: "mapstructure", ArgPos: 1}, - {Name: "github.com/mitchellh/mapstructure.WeakDecodeMetadata", Tag: "mapstructure", ArgPos: 1}, - - // https://pkg.go.dev/github.com/jmoiron/sqlx - {Name: "github.com/jmoiron/sqlx.Get", Tag: "db", ArgPos: 1}, - {Name: "github.com/jmoiron/sqlx.GetContext", Tag: "db", ArgPos: 2}, - {Name: "github.com/jmoiron/sqlx.Select", Tag: "db", ArgPos: 1}, - {Name: "github.com/jmoiron/sqlx.SelectContext", Tag: "db", ArgPos: 2}, - {Name: "github.com/jmoiron/sqlx.StructScan", Tag: "db", ArgPos: 1}, - {Name: "(*github.com/jmoiron/sqlx.Conn).GetContext", Tag: "db", ArgPos: 1}, - {Name: "(*github.com/jmoiron/sqlx.Conn).SelectContext", Tag: "db", ArgPos: 1}, - {Name: "(*github.com/jmoiron/sqlx.DB).Get", Tag: "db", ArgPos: 0}, - {Name: "(*github.com/jmoiron/sqlx.DB).GetContext", Tag: "db", ArgPos: 1}, - {Name: "(*github.com/jmoiron/sqlx.DB).Select", Tag: "db", ArgPos: 0}, - {Name: "(*github.com/jmoiron/sqlx.DB).SelectContext", Tag: "db", ArgPos: 1}, - {Name: "(*github.com/jmoiron/sqlx.NamedStmt).Get", Tag: "db", ArgPos: 0}, - {Name: "(*github.com/jmoiron/sqlx.NamedStmt).GetContext", Tag: "db", ArgPos: 1}, - {Name: "(*github.com/jmoiron/sqlx.NamedStmt).Select", Tag: "db", ArgPos: 0}, - {Name: "(*github.com/jmoiron/sqlx.NamedStmt).SelectContext", Tag: "db", ArgPos: 1}, - {Name: "(*github.com/jmoiron/sqlx.Row).StructScan", Tag: "db", ArgPos: 0}, - {Name: "(*github.com/jmoiron/sqlx.Rows).StructScan", Tag: "db", ArgPos: 0}, - {Name: "(*github.com/jmoiron/sqlx.Stmt).Get", Tag: "db", ArgPos: 0}, - {Name: "(*github.com/jmoiron/sqlx.Stmt).GetContext", Tag: "db", ArgPos: 1}, - {Name: "(*github.com/jmoiron/sqlx.Stmt).Select", Tag: "db", ArgPos: 0}, - {Name: "(*github.com/jmoiron/sqlx.Stmt).SelectContext", Tag: "db", ArgPos: 1}, - {Name: "(*github.com/jmoiron/sqlx.Tx).Get", Tag: "db", ArgPos: 0}, - {Name: "(*github.com/jmoiron/sqlx.Tx).GetContext", Tag: "db", ArgPos: 1}, - {Name: "(*github.com/jmoiron/sqlx.Tx).Select", Tag: "db", ArgPos: 0}, - {Name: "(*github.com/jmoiron/sqlx.Tx).SelectContext", Tag: "db", ArgPos: 1}, -} diff --git a/vendor/go-simpler.org/musttag/musttag.go b/vendor/go-simpler.org/musttag/musttag.go deleted file mode 100644 index d7911770f..000000000 --- a/vendor/go-simpler.org/musttag/musttag.go +++ /dev/null @@ -1,265 +0,0 @@ -// Package musttag implements the musttag analyzer. -package musttag - -import ( - "flag" - "fmt" - "go/ast" - "go/types" - "reflect" - "strconv" - "strings" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/ast/inspector" - "golang.org/x/tools/go/types/typeutil" -) - -// Func describes a function call to look for, e.g. [json.Marshal]. -type Func struct { - Name string // The full name of the function, including the package. - Tag string // The struct tag whose presence should be ensured. - ArgPos int // The position of the argument to check. - - // a list of interface names (including the package); - // if at least one is implemented by the argument, no check is performed. - ifaceWhitelist []string -} - -// New creates a new musttag analyzer. -// To report a custom function, provide its description as [Func]. -func New(funcs ...Func) *analysis.Analyzer { - var flagFuncs []Func - return &analysis.Analyzer{ - Name: "musttag", - Doc: "enforce field tags in (un)marshaled structs", - Flags: flags(&flagFuncs), - Requires: []*analysis.Analyzer{inspect.Analyzer}, - Run: func(pass *analysis.Pass) (any, error) { - l := len(builtins) + len(funcs) + len(flagFuncs) - allFuncs := make(map[string]Func, l) - - merge := func(slice []Func) { - for _, fn := range slice { - allFuncs[fn.Name] = fn - } - } - merge(builtins) - merge(funcs) - merge(flagFuncs) - - mainModule, err := getMainModule() - if err != nil { - return nil, err - } - - return run(pass, mainModule, allFuncs) - }, - } -} - -func flags(funcs *[]Func) flag.FlagSet { - fs := flag.NewFlagSet("musttag", flag.ContinueOnError) - fs.Func("fn", "report a custom function (name:tag:arg-pos)", func(s string) error { - parts := strings.Split(s, ":") - if len(parts) != 3 || parts[0] == "" || parts[1] == "" { - return strconv.ErrSyntax - } - pos, err := strconv.Atoi(parts[2]) - if err != nil { - return err - } - *funcs = append(*funcs, Func{ - Name: parts[0], - Tag: parts[1], - ArgPos: pos, - }) - return nil - }) - return *fs -} - -func run(pass *analysis.Pass, mainModule string, funcs map[string]Func) (_ any, err error) { - visit := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - filter := []ast.Node{(*ast.CallExpr)(nil)} - - visit.Preorder(filter, func(node ast.Node) { - if err != nil { - return // there is already an error. - } - - call, ok := node.(*ast.CallExpr) - if !ok { - return // not a function call. - } - - callee := typeutil.StaticCallee(pass.TypesInfo, call) - if callee == nil { - return // not a static call. - } - - fn, ok := funcs[cutVendor(callee.FullName())] - if !ok { - return // unsupported function. - } - - if len(call.Args) <= fn.ArgPos { - err = fmt.Errorf("musttag: Func.ArgPos cannot be %d: %s accepts only %d argument(s)", fn.ArgPos, fn.Name, len(call.Args)) - return - } - - arg := call.Args[fn.ArgPos] - if ident, ok := arg.(*ast.Ident); ok && ident.Obj == nil { - return // e.g. json.Marshal(nil) - } - - typ := pass.TypesInfo.TypeOf(arg) - if typ == nil { - return // no type info found. - } - - // TODO: check nested structs too. - if implementsInterface(typ, fn.ifaceWhitelist, pass.Pkg.Imports()) { - return // the type implements a Marshaler interface; see issue #64. - } - - checker := checker{ - mainModule: mainModule, - seenTypes: make(map[string]struct{}), - } - - styp, ok := checker.parseStruct(typ) - if !ok { - return // not a struct. - } - - if valid := checker.checkStruct(styp, fn.Tag); valid { - return // nothing to report. - } - - pass.Reportf(arg.Pos(), "the given struct should be annotated with the `%s` tag", fn.Tag) - }) - - return nil, err -} - -type checker struct { - mainModule string - seenTypes map[string]struct{} -} - -func (c *checker) parseStruct(typ types.Type) (*types.Struct, bool) { - switch typ := typ.(type) { - case *types.Pointer: - return c.parseStruct(typ.Elem()) - - case *types.Array: - return c.parseStruct(typ.Elem()) - - case *types.Slice: - return c.parseStruct(typ.Elem()) - - case *types.Map: - return c.parseStruct(typ.Elem()) - - case *types.Named: // a struct of the named type. - pkg := typ.Obj().Pkg() - if pkg == nil { - return nil, false - } - if !strings.HasPrefix(pkg.Path(), c.mainModule) { - return nil, false - } - styp, ok := typ.Underlying().(*types.Struct) - if !ok { - return nil, false - } - return styp, true - - case *types.Struct: // an anonymous struct. - return typ, true - - default: - return nil, false - } -} - -func (c *checker) checkStruct(styp *types.Struct, tag string) (valid bool) { - c.seenTypes[styp.String()] = struct{}{} - - for i := 0; i < styp.NumFields(); i++ { - field := styp.Field(i) - if !field.Exported() { - continue - } - - if _, ok := reflect.StructTag(styp.Tag(i)).Lookup(tag); !ok { - // tag is not required for embedded types; see issue #12. - if !field.Embedded() { - return false - } - } - - nested, ok := c.parseStruct(field.Type()) - if !ok { - continue - } - if _, ok := c.seenTypes[nested.String()]; ok { - continue - } - if valid := c.checkStruct(nested, tag); !valid { - return false - } - } - - return true -} - -func implementsInterface(typ types.Type, ifaces []string, imports []*types.Package) bool { - findScope := func(pkgName string) (*types.Scope, bool) { - // fast path: check direct imports (e.g. looking for "encoding/json.Marshaler"). - for _, direct := range imports { - if pkgName == cutVendor(direct.Path()) { - return direct.Scope(), true - } - } - // slow path: check indirect imports (e.g. looking for "encoding.TextMarshaler"). - // TODO: only check indirect imports from the package (e.g. "encoding/json") of the analyzed function (e.g. "encoding/json.Marshal"). - for _, direct := range imports { - for _, indirect := range direct.Imports() { - if pkgName == cutVendor(indirect.Path()) { - return indirect.Scope(), true - } - } - } - return nil, false - } - - for _, ifacePath := range ifaces { - // "encoding/json.Marshaler" -> "encoding/json" + "Marshaler" - idx := strings.LastIndex(ifacePath, ".") - if idx == -1 { - continue - } - pkgName, ifaceName := ifacePath[:idx], ifacePath[idx+1:] - - scope, ok := findScope(pkgName) - if !ok { - continue - } - obj := scope.Lookup(ifaceName) - if obj == nil { - continue - } - iface, ok := obj.Type().Underlying().(*types.Interface) - if !ok { - continue - } - if types.Implements(typ, iface) { - return true - } - } - - return false -} diff --git a/vendor/go-simpler.org/musttag/utils.go b/vendor/go-simpler.org/musttag/utils.go deleted file mode 100644 index 1a13f96c2..000000000 --- a/vendor/go-simpler.org/musttag/utils.go +++ /dev/null @@ -1,49 +0,0 @@ -package musttag - -import ( - "encoding/json" - "fmt" - "os/exec" - "strings" -) - -func getMainModule() (string, error) { - args := [...]string{"go", "mod", "edit", "-json"} - - out, err := exec.Command(args[0], args[1:]...).Output() - if err != nil { - return "", fmt.Errorf("running %q: %w", strings.Join(args[:], " "), err) - } - - var info struct { - Module struct { - Path string `json:"Path"` - } `json:"Module"` - } - if err := json.Unmarshal(out, &info); err != nil { - return "", fmt.Errorf("decoding module info: %w\n%s", err, out) - } - - return info.Module.Path, nil -} - -// based on golang.org/x/tools/imports.VendorlessPath -func cutVendor(path string) string { - var prefix string - - switch { - case strings.HasPrefix(path, "(*"): - prefix, path = "(*", path[len("(*"):] - case strings.HasPrefix(path, "("): - prefix, path = "(", path[len("("):] - } - - if i := strings.LastIndex(path, "/vendor/"); i >= 0 { - return prefix + path[i+len("/vendor/"):] - } - if strings.HasPrefix(path, "vendor/") { - return prefix + path[len("vendor/"):] - } - - return prefix + path -} diff --git a/vendor/go-simpler.org/sloglint/.golangci.yml b/vendor/go-simpler.org/sloglint/.golangci.yml deleted file mode 100644 index ef926a056..000000000 --- a/vendor/go-simpler.org/sloglint/.golangci.yml +++ /dev/null @@ -1,22 +0,0 @@ -linters: - disable-all: true - enable: - # enabled by default: - - errcheck - - gosimple - - govet - - ineffassign - - staticcheck - - unused - # disabled by default: - - gocritic - - gofumpt - -linters-settings: - gocritic: - enabled-tags: - - diagnostic - - style - - performance - - experimental - - opinionated diff --git a/vendor/go-simpler.org/sloglint/.goreleaser.yml b/vendor/go-simpler.org/sloglint/.goreleaser.yml deleted file mode 100644 index d31ea11d3..000000000 --- a/vendor/go-simpler.org/sloglint/.goreleaser.yml +++ /dev/null @@ -1,18 +0,0 @@ -builds: - - main: ./cmd/sloglint - env: - - CGO_ENABLED=0 - flags: - - -trimpath - ldflags: - - -s -w -X main.version={{.Version}} - targets: - - darwin_amd64 - - darwin_arm64 - - linux_amd64 - - windows_amd64 - -archives: - - format_overrides: - - goos: windows - format: zip diff --git a/vendor/go-simpler.org/sloglint/LICENSE b/vendor/go-simpler.org/sloglint/LICENSE deleted file mode 100644 index a612ad981..000000000 --- a/vendor/go-simpler.org/sloglint/LICENSE +++ /dev/null @@ -1,373 +0,0 @@ -Mozilla Public License Version 2.0 -================================== - -1. Definitions --------------- - -1.1. "Contributor" - means each individual or legal entity that creates, contributes to - the creation of, or owns Covered Software. - -1.2. "Contributor Version" - means the combination of the Contributions of others (if any) used - by a Contributor and that particular Contributor's Contribution. - -1.3. "Contribution" - means Covered Software of a particular Contributor. - -1.4. "Covered Software" - means Source Code Form to which the initial Contributor has attached - the notice in Exhibit A, the Executable Form of such Source Code - Form, and Modifications of such Source Code Form, in each case - including portions thereof. - -1.5. "Incompatible With Secondary Licenses" - means - - (a) that the initial Contributor has attached the notice described - in Exhibit B to the Covered Software; or - - (b) that the Covered Software was made available under the terms of - version 1.1 or earlier of the License, but not also under the - terms of a Secondary License. - -1.6. "Executable Form" - means any form of the work other than Source Code Form. - -1.7. "Larger Work" - means a work that combines Covered Software with other material, in - a separate file or files, that is not Covered Software. - -1.8. "License" - means this document. - -1.9. "Licensable" - means having the right to grant, to the maximum extent possible, - whether at the time of the initial grant or subsequently, any and - all of the rights conveyed by this License. - -1.10. "Modifications" - means any of the following: - - (a) any file in Source Code Form that results from an addition to, - deletion from, or modification of the contents of Covered - Software; or - - (b) any new file in Source Code Form that contains any Covered - Software. - -1.11. "Patent Claims" of a Contributor - means any patent claim(s), including without limitation, method, - process, and apparatus claims, in any patent Licensable by such - Contributor that would be infringed, but for the grant of the - License, by the making, using, selling, offering for sale, having - made, import, or transfer of either its Contributions or its - Contributor Version. - -1.12. "Secondary License" - means either the GNU General Public License, Version 2.0, the GNU - Lesser General Public License, Version 2.1, the GNU Affero General - Public License, Version 3.0, or any later versions of those - licenses. - -1.13. "Source Code Form" - means the form of the work preferred for making modifications. - -1.14. "You" (or "Your") - means an individual or a legal entity exercising rights under this - License. For legal entities, "You" includes any entity that - controls, is controlled by, or is under common control with You. For - purposes of this definition, "control" means (a) the power, direct - or indirect, to cause the direction or management of such entity, - whether by contract or otherwise, or (b) ownership of more than - fifty percent (50%) of the outstanding shares or beneficial - ownership of such entity. - -2. License Grants and Conditions --------------------------------- - -2.1. Grants - -Each Contributor hereby grants You a world-wide, royalty-free, -non-exclusive license: - -(a) under intellectual property rights (other than patent or trademark) - Licensable by such Contributor to use, reproduce, make available, - modify, display, perform, distribute, and otherwise exploit its - Contributions, either on an unmodified basis, with Modifications, or - as part of a Larger Work; and - -(b) under Patent Claims of such Contributor to make, use, sell, offer - for sale, have made, import, and otherwise transfer either its - Contributions or its Contributor Version. - -2.2. Effective Date - -The licenses granted in Section 2.1 with respect to any Contribution -become effective for each Contribution on the date the Contributor first -distributes such Contribution. - -2.3. Limitations on Grant Scope - -The licenses granted in this Section 2 are the only rights granted under -this License. No additional rights or licenses will be implied from the -distribution or licensing of Covered Software under this License. -Notwithstanding Section 2.1(b) above, no patent license is granted by a -Contributor: - -(a) for any code that a Contributor has removed from Covered Software; - or - -(b) for infringements caused by: (i) Your and any other third party's - modifications of Covered Software, or (ii) the combination of its - Contributions with other software (except as part of its Contributor - Version); or - -(c) under Patent Claims infringed by Covered Software in the absence of - its Contributions. - -This License does not grant any rights in the trademarks, service marks, -or logos of any Contributor (except as may be necessary to comply with -the notice requirements in Section 3.4). - -2.4. Subsequent Licenses - -No Contributor makes additional grants as a result of Your choice to -distribute the Covered Software under a subsequent version of this -License (see Section 10.2) or under the terms of a Secondary License (if -permitted under the terms of Section 3.3). - -2.5. Representation - -Each Contributor represents that the Contributor believes its -Contributions are its original creation(s) or it has sufficient rights -to grant the rights to its Contributions conveyed by this License. - -2.6. Fair Use - -This License is not intended to limit any rights You have under -applicable copyright doctrines of fair use, fair dealing, or other -equivalents. - -2.7. Conditions - -Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted -in Section 2.1. - -3. Responsibilities -------------------- - -3.1. Distribution of Source Form - -All distribution of Covered Software in Source Code Form, including any -Modifications that You create or to which You contribute, must be under -the terms of this License. You must inform recipients that the Source -Code Form of the Covered Software is governed by the terms of this -License, and how they can obtain a copy of this License. You may not -attempt to alter or restrict the recipients' rights in the Source Code -Form. - -3.2. Distribution of Executable Form - -If You distribute Covered Software in Executable Form then: - -(a) such Covered Software must also be made available in Source Code - Form, as described in Section 3.1, and You must inform recipients of - the Executable Form how they can obtain a copy of such Source Code - Form by reasonable means in a timely manner, at a charge no more - than the cost of distribution to the recipient; and - -(b) You may distribute such Executable Form under the terms of this - License, or sublicense it under different terms, provided that the - license for the Executable Form does not attempt to limit or alter - the recipients' rights in the Source Code Form under this License. - -3.3. Distribution of a Larger Work - -You may create and distribute a Larger Work under terms of Your choice, -provided that You also comply with the requirements of this License for -the Covered Software. If the Larger Work is a combination of Covered -Software with a work governed by one or more Secondary Licenses, and the -Covered Software is not Incompatible With Secondary Licenses, this -License permits You to additionally distribute such Covered Software -under the terms of such Secondary License(s), so that the recipient of -the Larger Work may, at their option, further distribute the Covered -Software under the terms of either this License or such Secondary -License(s). - -3.4. Notices - -You may not remove or alter the substance of any license notices -(including copyright notices, patent notices, disclaimers of warranty, -or limitations of liability) contained within the Source Code Form of -the Covered Software, except that You may alter any license notices to -the extent required to remedy known factual inaccuracies. - -3.5. Application of Additional Terms - -You may choose to offer, and to charge a fee for, warranty, support, -indemnity or liability obligations to one or more recipients of Covered -Software. However, You may do so only on Your own behalf, and not on -behalf of any Contributor. You must make it absolutely clear that any -such warranty, support, indemnity, or liability obligation is offered by -You alone, and You hereby agree to indemnify every Contributor for any -liability incurred by such Contributor as a result of warranty, support, -indemnity or liability terms You offer. You may include additional -disclaimers of warranty and limitations of liability specific to any -jurisdiction. - -4. Inability to Comply Due to Statute or Regulation ---------------------------------------------------- - -If it is impossible for You to comply with any of the terms of this -License with respect to some or all of the Covered Software due to -statute, judicial order, or regulation then You must: (a) comply with -the terms of this License to the maximum extent possible; and (b) -describe the limitations and the code they affect. Such description must -be placed in a text file included with all distributions of the Covered -Software under this License. Except to the extent prohibited by statute -or regulation, such description must be sufficiently detailed for a -recipient of ordinary skill to be able to understand it. - -5. Termination --------------- - -5.1. The rights granted under this License will terminate automatically -if You fail to comply with any of its terms. However, if You become -compliant, then the rights granted under this License from a particular -Contributor are reinstated (a) provisionally, unless and until such -Contributor explicitly and finally terminates Your grants, and (b) on an -ongoing basis, if such Contributor fails to notify You of the -non-compliance by some reasonable means prior to 60 days after You have -come back into compliance. Moreover, Your grants from a particular -Contributor are reinstated on an ongoing basis if such Contributor -notifies You of the non-compliance by some reasonable means, this is the -first time You have received notice of non-compliance with this License -from such Contributor, and You become compliant prior to 30 days after -Your receipt of the notice. - -5.2. If You initiate litigation against any entity by asserting a patent -infringement claim (excluding declaratory judgment actions, -counter-claims, and cross-claims) alleging that a Contributor Version -directly or indirectly infringes any patent, then the rights granted to -You by any and all Contributors for the Covered Software under Section -2.1 of this License shall terminate. - -5.3. In the event of termination under Sections 5.1 or 5.2 above, all -end user license agreements (excluding distributors and resellers) which -have been validly granted by You or Your distributors under this License -prior to termination shall survive termination. - -************************************************************************ -* * -* 6. Disclaimer of Warranty * -* ------------------------- * -* * -* Covered Software is provided under this License on an "as is" * -* basis, without warranty of any kind, either expressed, implied, or * -* statutory, including, without limitation, warranties that the * -* Covered Software is free of defects, merchantable, fit for a * -* particular purpose or non-infringing. The entire risk as to the * -* quality and performance of the Covered Software is with You. * -* Should any Covered Software prove defective in any respect, You * -* (not any Contributor) assume the cost of any necessary servicing, * -* repair, or correction. This disclaimer of warranty constitutes an * -* essential part of this License. No use of any Covered Software is * -* authorized under this License except under this disclaimer. * -* * -************************************************************************ - -************************************************************************ -* * -* 7. Limitation of Liability * -* -------------------------- * -* * -* Under no circumstances and under no legal theory, whether tort * -* (including negligence), contract, or otherwise, shall any * -* Contributor, or anyone who distributes Covered Software as * -* permitted above, be liable to You for any direct, indirect, * -* special, incidental, or consequential damages of any character * -* including, without limitation, damages for lost profits, loss of * -* goodwill, work stoppage, computer failure or malfunction, or any * -* and all other commercial damages or losses, even if such party * -* shall have been informed of the possibility of such damages. This * -* limitation of liability shall not apply to liability for death or * -* personal injury resulting from such party's negligence to the * -* extent applicable law prohibits such limitation. Some * -* jurisdictions do not allow the exclusion or limitation of * -* incidental or consequential damages, so this exclusion and * -* limitation may not apply to You. * -* * -************************************************************************ - -8. Litigation -------------- - -Any litigation relating to this License may be brought only in the -courts of a jurisdiction where the defendant maintains its principal -place of business and such litigation shall be governed by laws of that -jurisdiction, without reference to its conflict-of-law provisions. -Nothing in this Section shall prevent a party's ability to bring -cross-claims or counter-claims. - -9. Miscellaneous ----------------- - -This License represents the complete agreement concerning the subject -matter hereof. If any provision of this License is held to be -unenforceable, such provision shall be reformed only to the extent -necessary to make it enforceable. Any law or regulation which provides -that the language of a contract shall be construed against the drafter -shall not be used to construe this License against a Contributor. - -10. Versions of the License ---------------------------- - -10.1. New Versions - -Mozilla Foundation is the license steward. Except as provided in Section -10.3, no one other than the license steward has the right to modify or -publish new versions of this License. Each version will be given a -distinguishing version number. - -10.2. Effect of New Versions - -You may distribute the Covered Software under the terms of the version -of the License under which You originally received the Covered Software, -or under the terms of any subsequent version published by the license -steward. - -10.3. Modified Versions - -If you create software not governed by this License, and you want to -create a new license for such software, you may create and use a -modified version of this License if you rename the license and remove -any references to the name of the license steward (except to note that -such modified license differs from this License). - -10.4. Distributing Source Code Form that is Incompatible With Secondary -Licenses - -If You choose to distribute Source Code Form that is Incompatible With -Secondary Licenses under the terms of this version of the License, the -notice described in Exhibit B of this License must be attached. - -Exhibit A - Source Code Form License Notice -------------------------------------------- - - This Source Code Form is subject to the terms of the Mozilla Public - License, v. 2.0. If a copy of the MPL was not distributed with this - file, You can obtain one at http://mozilla.org/MPL/2.0/. - -If it is not possible or desirable to put the notice in a particular -file, then You may include the notice in a location (such as a LICENSE -file in a relevant directory) where a recipient would be likely to look -for such a notice. - -You may add additional accurate notices of copyright ownership. - -Exhibit B - "Incompatible With Secondary Licenses" Notice ---------------------------------------------------------- - - This Source Code Form is "Incompatible With Secondary Licenses", as - defined by the Mozilla Public License, v. 2.0. diff --git a/vendor/go-simpler.org/sloglint/README.md b/vendor/go-simpler.org/sloglint/README.md deleted file mode 100644 index fc4d9debb..000000000 --- a/vendor/go-simpler.org/sloglint/README.md +++ /dev/null @@ -1,165 +0,0 @@ -# sloglint - -[![checks](https://github.com/go-simpler/sloglint/actions/workflows/checks.yml/badge.svg)](https://github.com/go-simpler/sloglint/actions/workflows/checks.yml) -[![pkg.go.dev](https://pkg.go.dev/badge/go-simpler.org/sloglint.svg)](https://pkg.go.dev/go-simpler.org/sloglint) -[![goreportcard](https://goreportcard.com/badge/go-simpler.org/sloglint)](https://goreportcard.com/report/go-simpler.org/sloglint) -[![codecov](https://codecov.io/gh/go-simpler/sloglint/branch/main/graph/badge.svg)](https://codecov.io/gh/go-simpler/sloglint) - -A Go linter that ensures consistent code style when using `log/slog`. - -## 📌 About - -The `log/slog` API allows two different types of arguments: key-value pairs and attributes. -While people may have different opinions about which one is better, most seem to agree on one thing: it should be consistent. -With `sloglint` you can enforce various rules for `log/slog` based on your preferred code style. - -## 🚀 Features - -* Enforce not mixing key-value pairs and attributes (default) -* Enforce using either key-value pairs only or attributes only (optional) -* Enforce using methods that accept a context (optional) -* Enforce using static log messages (optional) -* Enforce using constants instead of raw keys (optional) -* Enforce a single key naming convention (optional) -* Enforce putting arguments on separate lines (optional) - -## 📦 Install - -`sloglint` is integrated into [`golangci-lint`][1], and this is the recommended way to use it. - -To enable the linter, add the following lines to `.golangci.yml`: - -```yaml -linters: - enable: - - sloglint -``` - -Alternatively, you can download a prebuilt binary from the [Releases][2] page to use `sloglint` standalone. - -## 📋 Usage - -Run `golangci-lint` with `sloglint` enabled. -See the list of [available options][3] to configure the linter. - -When using `sloglint` standalone, pass the options as flags of the same name. - -### No mixed arguments - -The `no-mixed-args` option causes `sloglint` to report mixing key-values pairs and attributes within a single function call: - -```go -slog.Info("a user has logged in", "user_id", 42, slog.String("ip_address", "192.0.2.0")) // sloglint: key-value pairs and attributes should not be mixed -``` - -It is enabled by default. - -### Key-value pairs only - -The `kv-only` option causes `sloglint` to report any use of attributes: - -```go -slog.Info("a user has logged in", slog.Int("user_id", 42)) // sloglint: attributes should not be used -``` - -### Attributes only - -In contrast, the `attr-only` option causes `sloglint` to report any use of key-value pairs: - -```go -slog.Info("a user has logged in", "user_id", 42) // sloglint: key-value pairs should not be used -``` - -### Context only - -Some `slog.Handler` implementations make use of the given `context.Context` (e.g. to access context values). -For them to work properly, you need to pass a context to all logger calls. -The `context-only` option causes `sloglint` to report the use of methods without a context: - -```go -slog.Info("a user has logged in") // sloglint: methods without a context should not be used -``` - -This report can be fixed by using the equivalent method with the `Context` suffix: - -```go -slog.InfoContext(ctx, "a user has logged in") -``` - -### Static messages - -To get the most out of structured logging, you may want to require log messages to be static. -The `static-msg` option causes `sloglint` to report non-static messages: - -```go -slog.Info(fmt.Sprintf("a user with id %d has logged in", 42)) // sloglint: message should be a string literal or a constant -``` - -The report can be fixed by moving dynamic values to arguments: - -```go -slog.Info("a user has logged in", "user_id", 42) -``` - -### No raw keys - -To prevent typos, you may want to forbid the use of raw keys altogether. -The `no-raw-keys` option causes `sloglint` to report the use of strings as keys -(including `slog.Attr` calls, e.g. `slog.Int("user_id", 42)`): - -```go -slog.Info("a user has logged in", "user_id", 42) // sloglint: raw keys should not be used -``` - -This report can be fixed by using either constants... - -```go -const UserId = "user_id" - -slog.Info("a user has logged in", UserId, 42) -``` - -...or custom `slog.Attr` constructors: - -```go -func UserId(value int) slog.Attr { return slog.Int("user_id", value) } - -slog.Info("a user has logged in", UserId(42)) -``` - -> [!TIP] -> Such helpers can be automatically generated for you by the [`sloggen`][4] tool. Give it a try too! - -### Key naming convention - -To ensure consistency in logs, you may want to enforce a single key naming convention. -The `key-naming-case` option causes `sloglint` to report keys written in a case other than the given one: - -```go -slog.Info("a user has logged in", "user-id", 42) // sloglint: keys should be written in snake_case -``` - -Possible values are `snake`, `kebab`, `camel`, or `pascal`. - -### Arguments on separate lines - -To improve code readability, you may want to put arguments on separate lines, especially when using key-value pairs. -The `args-on-sep-lines` option causes `sloglint` to report 2+ arguments on the same line: - -```go -slog.Info("a user has logged in", "user_id", 42, "ip_address", "192.0.2.0") // sloglint: arguments should be put on separate lines -``` - -This report can be fixed by reformatting the code: - -```go -slog.Info("a user has logged in", - "user_id", 42, - "ip_address", "192.0.2.0", -) -``` - -[1]: https://golangci-lint.run -[2]: https://github.com/go-simpler/sloglint/releases -[3]: https://golangci-lint.run/usage/linters/#sloglint -[4]: https://github.com/go-simpler/sloggen diff --git a/vendor/go-simpler.org/sloglint/sloglint.go b/vendor/go-simpler.org/sloglint/sloglint.go deleted file mode 100644 index b7f72ce48..000000000 --- a/vendor/go-simpler.org/sloglint/sloglint.go +++ /dev/null @@ -1,347 +0,0 @@ -// Package sloglint implements the sloglint analyzer. -package sloglint - -import ( - "errors" - "flag" - "fmt" - "go/ast" - "go/token" - "go/types" - "strconv" - - "github.com/ettle/strcase" - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/ast/inspector" - "golang.org/x/tools/go/types/typeutil" -) - -// Options are options for the sloglint analyzer. -type Options struct { - NoMixedArgs bool // Enforce not mixing key-value pairs and attributes (default true). - KVOnly bool // Enforce using key-value pairs only (overrides NoMixedArgs, incompatible with AttrOnly). - AttrOnly bool // Enforce using attributes only (overrides NoMixedArgs, incompatible with KVOnly). - ContextOnly bool // Enforce using methods that accept a context. - StaticMsg bool // Enforce using static log messages. - NoRawKeys bool // Enforce using constants instead of raw keys. - KeyNamingCase string // Enforce a single key naming convention ("snake", "kebab", "camel", or "pascal"). - ArgsOnSepLines bool // Enforce putting arguments on separate lines. -} - -// New creates a new sloglint analyzer. -func New(opts *Options) *analysis.Analyzer { - if opts == nil { - opts = &Options{NoMixedArgs: true} - } - return &analysis.Analyzer{ - Name: "sloglint", - Doc: "ensure consistent code style when using log/slog", - Flags: flags(opts), - Requires: []*analysis.Analyzer{inspect.Analyzer}, - Run: func(pass *analysis.Pass) (any, error) { - if opts.KVOnly && opts.AttrOnly { - return nil, fmt.Errorf("sloglint: Options.KVOnly and Options.AttrOnly: %w", errIncompatible) - } - switch opts.KeyNamingCase { - case "", snakeCase, kebabCase, camelCase, pascalCase: - default: - return nil, fmt.Errorf("sloglint: Options.KeyNamingCase=%s: %w", opts.KeyNamingCase, errInvalidValue) - } - run(pass, opts) - return nil, nil - }, - } -} - -var ( - errIncompatible = errors.New("incompatible options") - errInvalidValue = errors.New("invalid value") -) - -func flags(opts *Options) flag.FlagSet { - fs := flag.NewFlagSet("sloglint", flag.ContinueOnError) - - boolVar := func(value *bool, name, usage string) { - fs.Func(name, usage, func(s string) error { - v, err := strconv.ParseBool(s) - *value = v - return err - }) - } - - boolVar(&opts.NoMixedArgs, "no-mixed-args", "enforce not mixing key-value pairs and attributes (default true)") - boolVar(&opts.KVOnly, "kv-only", "enforce using key-value pairs only (overrides -no-mixed-args, incompatible with -attr-only)") - boolVar(&opts.AttrOnly, "attr-only", "enforce using attributes only (overrides -no-mixed-args, incompatible with -kv-only)") - boolVar(&opts.ContextOnly, "context-only", "enforce using methods that accept a context") - boolVar(&opts.StaticMsg, "static-msg", "enforce using static log messages") - boolVar(&opts.NoRawKeys, "no-raw-keys", "enforce using constants instead of raw keys") - boolVar(&opts.ArgsOnSepLines, "args-on-sep-lines", "enforce putting arguments on separate lines") - - fs.Func("key-naming-case", "enforce a single key naming convention (snake|kebab|camel|pascal)", func(s string) error { - opts.KeyNamingCase = s - return nil - }) - - return *fs -} - -var slogFuncs = map[string]int{ // funcName:argsPos - "log/slog.Log": 3, - "log/slog.Debug": 1, - "log/slog.Info": 1, - "log/slog.Warn": 1, - "log/slog.Error": 1, - "log/slog.DebugContext": 2, - "log/slog.InfoContext": 2, - "log/slog.WarnContext": 2, - "log/slog.ErrorContext": 2, - "(*log/slog.Logger).Log": 3, - "(*log/slog.Logger).Debug": 1, - "(*log/slog.Logger).Info": 1, - "(*log/slog.Logger).Warn": 1, - "(*log/slog.Logger).Error": 1, - "(*log/slog.Logger).DebugContext": 2, - "(*log/slog.Logger).InfoContext": 2, - "(*log/slog.Logger).WarnContext": 2, - "(*log/slog.Logger).ErrorContext": 2, -} - -var attrFuncs = map[string]struct{}{ - "log/slog.String": {}, - "log/slog.Int64": {}, - "log/slog.Int": {}, - "log/slog.Uint64": {}, - "log/slog.Float64": {}, - "log/slog.Bool": {}, - "log/slog.Time": {}, - "log/slog.Duration": {}, - "log/slog.Group": {}, - "log/slog.Any": {}, -} - -const ( - snakeCase = "snake" - kebabCase = "kebab" - camelCase = "camel" - pascalCase = "pascal" -) - -func run(pass *analysis.Pass, opts *Options) { - visit := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - filter := []ast.Node{(*ast.CallExpr)(nil)} - - visit.Preorder(filter, func(node ast.Node) { - call := node.(*ast.CallExpr) - - fn := typeutil.StaticCallee(pass.TypesInfo, call) - if fn == nil { - return - } - - argsPos, ok := slogFuncs[fn.FullName()] - if !ok { - return - } - - if opts.ContextOnly { - typ := pass.TypesInfo.TypeOf(call.Args[0]) - if typ != nil && typ.String() != "context.Context" { - pass.Reportf(call.Pos(), "methods without a context should not be used") - } - } - if opts.StaticMsg && !staticMsg(call.Args[argsPos-1]) { - pass.Reportf(call.Pos(), "message should be a string literal or a constant") - } - - // NOTE: we assume that the arguments have already been validated by govet. - args := call.Args[argsPos:] - if len(args) == 0 { - return - } - - var keys []ast.Expr - var attrs []ast.Expr - - for i := 0; i < len(args); i++ { - typ := pass.TypesInfo.TypeOf(args[i]) - if typ == nil { - continue - } - switch typ.String() { - case "string": - keys = append(keys, args[i]) - i++ // skip the value. - case "log/slog.Attr": - attrs = append(attrs, args[i]) - } - } - - switch { - case opts.KVOnly && len(attrs) > 0: - pass.Reportf(call.Pos(), "attributes should not be used") - case opts.AttrOnly && len(attrs) < len(args): - pass.Reportf(call.Pos(), "key-value pairs should not be used") - case opts.NoMixedArgs && 0 < len(attrs) && len(attrs) < len(args): - pass.Reportf(call.Pos(), "key-value pairs and attributes should not be mixed") - } - - if opts.NoRawKeys && rawKeysUsed(pass.TypesInfo, keys, attrs) { - pass.Reportf(call.Pos(), "raw keys should not be used") - } - if opts.ArgsOnSepLines && argsOnSameLine(pass.Fset, call, keys, attrs) { - pass.Reportf(call.Pos(), "arguments should be put on separate lines") - } - - switch { - case opts.KeyNamingCase == snakeCase && badKeyNames(pass.TypesInfo, strcase.ToSnake, keys, attrs): - pass.Reportf(call.Pos(), "keys should be written in snake_case") - case opts.KeyNamingCase == kebabCase && badKeyNames(pass.TypesInfo, strcase.ToKebab, keys, attrs): - pass.Reportf(call.Pos(), "keys should be written in kebab-case") - case opts.KeyNamingCase == camelCase && badKeyNames(pass.TypesInfo, strcase.ToCamel, keys, attrs): - pass.Reportf(call.Pos(), "keys should be written in camelCase") - case opts.KeyNamingCase == pascalCase && badKeyNames(pass.TypesInfo, strcase.ToPascal, keys, attrs): - pass.Reportf(call.Pos(), "keys should be written in PascalCase") - } - }) -} - -func staticMsg(expr ast.Expr) bool { - switch msg := expr.(type) { - case *ast.BasicLit: // e.g. slog.Info("msg") - return msg.Kind == token.STRING - case *ast.Ident: // e.g. const msg = "msg"; slog.Info(msg) - return msg.Obj != nil && msg.Obj.Kind == ast.Con - default: - return false - } -} - -func rawKeysUsed(info *types.Info, keys, attrs []ast.Expr) bool { - isConst := func(expr ast.Expr) bool { - ident, ok := expr.(*ast.Ident) - return ok && ident.Obj != nil && ident.Obj.Kind == ast.Con - } - - for _, key := range keys { - if !isConst(key) { - return true - } - } - - for _, attr := range attrs { - switch attr := attr.(type) { - case *ast.CallExpr: // e.g. slog.Int() - fn := typeutil.StaticCallee(info, attr) - if _, ok := attrFuncs[fn.FullName()]; ok && !isConst(attr.Args[0]) { - return true - } - - case *ast.CompositeLit: // slog.Attr{} - isRawKey := func(kv *ast.KeyValueExpr) bool { - return kv.Key.(*ast.Ident).Name == "Key" && !isConst(kv.Value) - } - - switch len(attr.Elts) { - case 1: // slog.Attr{Key: ...} | slog.Attr{Value: ...} - kv := attr.Elts[0].(*ast.KeyValueExpr) - if isRawKey(kv) { - return true - } - case 2: // slog.Attr{..., ...} | slog.Attr{Key: ..., Value: ...} - kv1, ok := attr.Elts[0].(*ast.KeyValueExpr) - if ok { - kv2 := attr.Elts[1].(*ast.KeyValueExpr) - if isRawKey(kv1) || isRawKey(kv2) { - return true - } - } else if !isConst(attr.Elts[0]) { - return true - } - } - } - } - - return false -} - -func badKeyNames(info *types.Info, caseFn func(string) string, keys, attrs []ast.Expr) bool { - for _, key := range keys { - if name, ok := getKeyName(key); ok && name != caseFn(name) { - return true - } - } - - for _, attr := range attrs { - var expr ast.Expr - switch attr := attr.(type) { - case *ast.CallExpr: // e.g. slog.Int() - fn := typeutil.StaticCallee(info, attr) - if _, ok := attrFuncs[fn.FullName()]; ok { - expr = attr.Args[0] - } - case *ast.CompositeLit: // slog.Attr{} - switch len(attr.Elts) { - case 1: // slog.Attr{Key: ...} | slog.Attr{Value: ...} - if kv := attr.Elts[0].(*ast.KeyValueExpr); kv.Key.(*ast.Ident).Name == "Key" { - expr = kv.Value - } - case 2: // slog.Attr{..., ...} | slog.Attr{Key: ..., Value: ...} - expr = attr.Elts[0] - if kv1, ok := attr.Elts[0].(*ast.KeyValueExpr); ok && kv1.Key.(*ast.Ident).Name == "Key" { - expr = kv1.Value - } - if kv2, ok := attr.Elts[1].(*ast.KeyValueExpr); ok && kv2.Key.(*ast.Ident).Name == "Key" { - expr = kv2.Value - } - } - } - if name, ok := getKeyName(expr); ok && name != caseFn(name) { - return true - } - } - - return false -} - -func getKeyName(expr ast.Expr) (string, bool) { - if expr == nil { - return "", false - } - if ident, ok := expr.(*ast.Ident); ok { - if ident.Obj == nil || ident.Obj.Decl == nil || ident.Obj.Kind != ast.Con { - return "", false - } - if spec, ok := ident.Obj.Decl.(*ast.ValueSpec); ok && len(spec.Values) > 0 { - // TODO: support len(spec.Values) > 1; e.g. "const foo, bar = 1, 2" - expr = spec.Values[0] - } - } - if lit, ok := expr.(*ast.BasicLit); ok && lit.Kind == token.STRING { - return lit.Value, true - } - return "", false -} - -func argsOnSameLine(fset *token.FileSet, call ast.Expr, keys, attrs []ast.Expr) bool { - if len(keys)+len(attrs) <= 1 { - return false // special case: slog.Info("msg", "key", "value") is ok. - } - - l := len(keys) + len(attrs) + 1 - args := make([]ast.Expr, 0, l) - args = append(args, call) - args = append(args, keys...) - args = append(args, attrs...) - - lines := make(map[int]struct{}, l) - for _, arg := range args { - line := fset.Position(arg.Pos()).Line - if _, ok := lines[line]; ok { - return true - } - lines[line] = struct{}{} - } - - return false -} diff --git a/vendor/go.uber.org/multierr/.codecov.yml b/vendor/go.uber.org/multierr/.codecov.yml deleted file mode 100644 index 6d4d1be7b..000000000 --- a/vendor/go.uber.org/multierr/.codecov.yml +++ /dev/null @@ -1,15 +0,0 @@ -coverage: - range: 80..100 - round: down - precision: 2 - - status: - project: # measuring the overall project coverage - default: # context, you can create multiple ones with custom titles - enabled: yes # must be yes|true to enable this status - target: 100 # specify the target coverage for each commit status - # option: "auto" (must increase from parent commit or pull request base) - # option: "X%" a static target percentage to hit - if_not_found: success # if parent is not found report status as success, error, or failure - if_ci_failed: error # if ci fails report status as success, error, or failure - diff --git a/vendor/go.uber.org/multierr/.gitignore b/vendor/go.uber.org/multierr/.gitignore deleted file mode 100644 index b9a05e3da..000000000 --- a/vendor/go.uber.org/multierr/.gitignore +++ /dev/null @@ -1,4 +0,0 @@ -/vendor -cover.html -cover.out -/bin diff --git a/vendor/go.uber.org/multierr/CHANGELOG.md b/vendor/go.uber.org/multierr/CHANGELOG.md deleted file mode 100644 index f8177b978..000000000 --- a/vendor/go.uber.org/multierr/CHANGELOG.md +++ /dev/null @@ -1,95 +0,0 @@ -Releases -======== - -v1.11.0 (2023-03-28) -==================== -- `Errors` now supports any error that implements multiple-error - interface. -- Add `Every` function to allow checking if all errors in the chain - satisfies `errors.Is` against the target error. - -v1.10.0 (2023-03-08) -==================== - -- Comply with Go 1.20's multiple-error interface. -- Drop Go 1.18 support. - Per the support policy, only Go 1.19 and 1.20 are supported now. -- Drop all non-test external dependencies. - -v1.9.0 (2022-12-12) -=================== - -- Add `AppendFunc` that allow passsing functions to similar to - `AppendInvoke`. - -- Bump up yaml.v3 dependency to 3.0.1. - -v1.8.0 (2022-02-28) -=================== - -- `Combine`: perform zero allocations when there are no errors. - - -v1.7.0 (2021-05-06) -=================== - -- Add `AppendInvoke` to append into errors from `defer` blocks. - - -v1.6.0 (2020-09-14) -=================== - -- Actually drop library dependency on development-time tooling. - - -v1.5.0 (2020-02-24) -=================== - -- Drop library dependency on development-time tooling. - - -v1.4.0 (2019-11-04) -=================== - -- Add `AppendInto` function to more ergonomically build errors inside a - loop. - - -v1.3.0 (2019-10-29) -=================== - -- Switch to Go modules. - - -v1.2.0 (2019-09-26) -=================== - -- Support extracting and matching against wrapped errors with `errors.As` - and `errors.Is`. - - -v1.1.0 (2017-06-30) -=================== - -- Added an `Errors(error) []error` function to extract the underlying list of - errors for a multierr error. - - -v1.0.0 (2017-05-31) -=================== - -No changes since v0.2.0. This release is committing to making no breaking -changes to the current API in the 1.X series. - - -v0.2.0 (2017-04-11) -=================== - -- Repeatedly appending to the same error is now faster due to fewer - allocations. - - -v0.1.0 (2017-31-03) -=================== - -- Initial release diff --git a/vendor/go.uber.org/multierr/LICENSE.txt b/vendor/go.uber.org/multierr/LICENSE.txt deleted file mode 100644 index 413e30f7c..000000000 --- a/vendor/go.uber.org/multierr/LICENSE.txt +++ /dev/null @@ -1,19 +0,0 @@ -Copyright (c) 2017-2021 Uber Technologies, Inc. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/vendor/go.uber.org/multierr/Makefile b/vendor/go.uber.org/multierr/Makefile deleted file mode 100644 index dcb6fe723..000000000 --- a/vendor/go.uber.org/multierr/Makefile +++ /dev/null @@ -1,38 +0,0 @@ -# Directory to put `go install`ed binaries in. -export GOBIN ?= $(shell pwd)/bin - -GO_FILES := $(shell \ - find . '(' -path '*/.*' -o -path './vendor' ')' -prune \ - -o -name '*.go' -print | cut -b3-) - -.PHONY: build -build: - go build ./... - -.PHONY: test -test: - go test -race ./... - -.PHONY: gofmt -gofmt: - $(eval FMT_LOG := $(shell mktemp -t gofmt.XXXXX)) - @gofmt -e -s -l $(GO_FILES) > $(FMT_LOG) || true - @[ ! -s "$(FMT_LOG)" ] || (echo "gofmt failed:" | cat - $(FMT_LOG) && false) - -.PHONY: golint -golint: - @cd tools && go install golang.org/x/lint/golint - @$(GOBIN)/golint ./... - -.PHONY: staticcheck -staticcheck: - @cd tools && go install honnef.co/go/tools/cmd/staticcheck - @$(GOBIN)/staticcheck ./... - -.PHONY: lint -lint: gofmt golint staticcheck - -.PHONY: cover -cover: - go test -race -coverprofile=cover.out -coverpkg=./... -v ./... - go tool cover -html=cover.out -o cover.html diff --git a/vendor/go.uber.org/multierr/README.md b/vendor/go.uber.org/multierr/README.md deleted file mode 100644 index 5ab6ac40f..000000000 --- a/vendor/go.uber.org/multierr/README.md +++ /dev/null @@ -1,43 +0,0 @@ -# multierr [![GoDoc][doc-img]][doc] [![Build Status][ci-img]][ci] [![Coverage Status][cov-img]][cov] - -`multierr` allows combining one or more Go `error`s together. - -## Features - -- **Idiomatic**: - multierr follows best practices in Go, and keeps your code idiomatic. - - It keeps the underlying error type hidden, - allowing you to deal in `error` values exclusively. - - It provides APIs to safely append into an error from a `defer` statement. -- **Performant**: - multierr is optimized for performance: - - It avoids allocations where possible. - - It utilizes slice resizing semantics to optimize common cases - like appending into the same error object from a loop. -- **Interoperable**: - multierr interoperates with the Go standard library's error APIs seamlessly: - - The `errors.Is` and `errors.As` functions *just work*. -- **Lightweight**: - multierr comes with virtually no dependencies. - -## Installation - -```bash -go get -u go.uber.org/multierr@latest -``` - -## Status - -Stable: No breaking changes will be made before 2.0. - -------------------------------------------------------------------------------- - -Released under the [MIT License]. - -[MIT License]: LICENSE.txt -[doc-img]: https://pkg.go.dev/badge/go.uber.org/multierr -[doc]: https://pkg.go.dev/go.uber.org/multierr -[ci-img]: https://github.com/uber-go/multierr/actions/workflows/go.yml/badge.svg -[cov-img]: https://codecov.io/gh/uber-go/multierr/branch/master/graph/badge.svg -[ci]: https://github.com/uber-go/multierr/actions/workflows/go.yml -[cov]: https://codecov.io/gh/uber-go/multierr diff --git a/vendor/go.uber.org/multierr/error.go b/vendor/go.uber.org/multierr/error.go deleted file mode 100644 index 3a828b2df..000000000 --- a/vendor/go.uber.org/multierr/error.go +++ /dev/null @@ -1,646 +0,0 @@ -// Copyright (c) 2017-2023 Uber Technologies, Inc. -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in -// all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -// THE SOFTWARE. - -// Package multierr allows combining one or more errors together. -// -// # Overview -// -// Errors can be combined with the use of the Combine function. -// -// multierr.Combine( -// reader.Close(), -// writer.Close(), -// conn.Close(), -// ) -// -// If only two errors are being combined, the Append function may be used -// instead. -// -// err = multierr.Append(reader.Close(), writer.Close()) -// -// The underlying list of errors for a returned error object may be retrieved -// with the Errors function. -// -// errors := multierr.Errors(err) -// if len(errors) > 0 { -// fmt.Println("The following errors occurred:", errors) -// } -// -// # Appending from a loop -// -// You sometimes need to append into an error from a loop. -// -// var err error -// for _, item := range items { -// err = multierr.Append(err, process(item)) -// } -// -// Cases like this may require knowledge of whether an individual instance -// failed. This usually requires introduction of a new variable. -// -// var err error -// for _, item := range items { -// if perr := process(item); perr != nil { -// log.Warn("skipping item", item) -// err = multierr.Append(err, perr) -// } -// } -// -// multierr includes AppendInto to simplify cases like this. -// -// var err error -// for _, item := range items { -// if multierr.AppendInto(&err, process(item)) { -// log.Warn("skipping item", item) -// } -// } -// -// This will append the error into the err variable, and return true if that -// individual error was non-nil. -// -// See [AppendInto] for more information. -// -// # Deferred Functions -// -// Go makes it possible to modify the return value of a function in a defer -// block if the function was using named returns. This makes it possible to -// record resource cleanup failures from deferred blocks. -// -// func sendRequest(req Request) (err error) { -// conn, err := openConnection() -// if err != nil { -// return err -// } -// defer func() { -// err = multierr.Append(err, conn.Close()) -// }() -// // ... -// } -// -// multierr provides the Invoker type and AppendInvoke function to make cases -// like the above simpler and obviate the need for a closure. The following is -// roughly equivalent to the example above. -// -// func sendRequest(req Request) (err error) { -// conn, err := openConnection() -// if err != nil { -// return err -// } -// defer multierr.AppendInvoke(&err, multierr.Close(conn)) -// // ... -// } -// -// See [AppendInvoke] and [Invoker] for more information. -// -// NOTE: If you're modifying an error from inside a defer, you MUST use a named -// return value for that function. -// -// # Advanced Usage -// -// Errors returned by Combine and Append MAY implement the following -// interface. -// -// type errorGroup interface { -// // Returns a slice containing the underlying list of errors. -// // -// // This slice MUST NOT be modified by the caller. -// Errors() []error -// } -// -// Note that if you need access to list of errors behind a multierr error, you -// should prefer using the Errors function. That said, if you need cheap -// read-only access to the underlying errors slice, you can attempt to cast -// the error to this interface. You MUST handle the failure case gracefully -// because errors returned by Combine and Append are not guaranteed to -// implement this interface. -// -// var errors []error -// group, ok := err.(errorGroup) -// if ok { -// errors = group.Errors() -// } else { -// errors = []error{err} -// } -package multierr // import "go.uber.org/multierr" - -import ( - "bytes" - "errors" - "fmt" - "io" - "strings" - "sync" - "sync/atomic" -) - -var ( - // Separator for single-line error messages. - _singlelineSeparator = []byte("; ") - - // Prefix for multi-line messages - _multilinePrefix = []byte("the following errors occurred:") - - // Prefix for the first and following lines of an item in a list of - // multi-line error messages. - // - // For example, if a single item is: - // - // foo - // bar - // - // It will become, - // - // - foo - // bar - _multilineSeparator = []byte("\n - ") - _multilineIndent = []byte(" ") -) - -// _bufferPool is a pool of bytes.Buffers. -var _bufferPool = sync.Pool{ - New: func() interface{} { - return &bytes.Buffer{} - }, -} - -type errorGroup interface { - Errors() []error -} - -// Errors returns a slice containing zero or more errors that the supplied -// error is composed of. If the error is nil, a nil slice is returned. -// -// err := multierr.Append(r.Close(), w.Close()) -// errors := multierr.Errors(err) -// -// If the error is not composed of other errors, the returned slice contains -// just the error that was passed in. -// -// Callers of this function are free to modify the returned slice. -func Errors(err error) []error { - return extractErrors(err) -} - -// multiError is an error that holds one or more errors. -// -// An instance of this is guaranteed to be non-empty and flattened. That is, -// none of the errors inside multiError are other multiErrors. -// -// multiError formats to a semi-colon delimited list of error messages with -// %v and with a more readable multi-line format with %+v. -type multiError struct { - copyNeeded atomic.Bool - errors []error -} - -// Errors returns the list of underlying errors. -// -// This slice MUST NOT be modified. -func (merr *multiError) Errors() []error { - if merr == nil { - return nil - } - return merr.errors -} - -func (merr *multiError) Error() string { - if merr == nil { - return "" - } - - buff := _bufferPool.Get().(*bytes.Buffer) - buff.Reset() - - merr.writeSingleline(buff) - - result := buff.String() - _bufferPool.Put(buff) - return result -} - -// Every compares every error in the given err against the given target error -// using [errors.Is], and returns true only if every comparison returned true. -func Every(err error, target error) bool { - for _, e := range extractErrors(err) { - if !errors.Is(e, target) { - return false - } - } - return true -} - -func (merr *multiError) Format(f fmt.State, c rune) { - if c == 'v' && f.Flag('+') { - merr.writeMultiline(f) - } else { - merr.writeSingleline(f) - } -} - -func (merr *multiError) writeSingleline(w io.Writer) { - first := true - for _, item := range merr.errors { - if first { - first = false - } else { - w.Write(_singlelineSeparator) - } - io.WriteString(w, item.Error()) - } -} - -func (merr *multiError) writeMultiline(w io.Writer) { - w.Write(_multilinePrefix) - for _, item := range merr.errors { - w.Write(_multilineSeparator) - writePrefixLine(w, _multilineIndent, fmt.Sprintf("%+v", item)) - } -} - -// Writes s to the writer with the given prefix added before each line after -// the first. -func writePrefixLine(w io.Writer, prefix []byte, s string) { - first := true - for len(s) > 0 { - if first { - first = false - } else { - w.Write(prefix) - } - - idx := strings.IndexByte(s, '\n') - if idx < 0 { - idx = len(s) - 1 - } - - io.WriteString(w, s[:idx+1]) - s = s[idx+1:] - } -} - -type inspectResult struct { - // Number of top-level non-nil errors - Count int - - // Total number of errors including multiErrors - Capacity int - - // Index of the first non-nil error in the list. Value is meaningless if - // Count is zero. - FirstErrorIdx int - - // Whether the list contains at least one multiError - ContainsMultiError bool -} - -// Inspects the given slice of errors so that we can efficiently allocate -// space for it. -func inspect(errors []error) (res inspectResult) { - first := true - for i, err := range errors { - if err == nil { - continue - } - - res.Count++ - if first { - first = false - res.FirstErrorIdx = i - } - - if merr, ok := err.(*multiError); ok { - res.Capacity += len(merr.errors) - res.ContainsMultiError = true - } else { - res.Capacity++ - } - } - return -} - -// fromSlice converts the given list of errors into a single error. -func fromSlice(errors []error) error { - // Don't pay to inspect small slices. - switch len(errors) { - case 0: - return nil - case 1: - return errors[0] - } - - res := inspect(errors) - switch res.Count { - case 0: - return nil - case 1: - // only one non-nil entry - return errors[res.FirstErrorIdx] - case len(errors): - if !res.ContainsMultiError { - // Error list is flat. Make a copy of it - // Otherwise "errors" escapes to the heap - // unconditionally for all other cases. - // This lets us optimize for the "no errors" case. - out := append(([]error)(nil), errors...) - return &multiError{errors: out} - } - } - - nonNilErrs := make([]error, 0, res.Capacity) - for _, err := range errors[res.FirstErrorIdx:] { - if err == nil { - continue - } - - if nested, ok := err.(*multiError); ok { - nonNilErrs = append(nonNilErrs, nested.errors...) - } else { - nonNilErrs = append(nonNilErrs, err) - } - } - - return &multiError{errors: nonNilErrs} -} - -// Combine combines the passed errors into a single error. -// -// If zero arguments were passed or if all items are nil, a nil error is -// returned. -// -// Combine(nil, nil) // == nil -// -// If only a single error was passed, it is returned as-is. -// -// Combine(err) // == err -// -// Combine skips over nil arguments so this function may be used to combine -// together errors from operations that fail independently of each other. -// -// multierr.Combine( -// reader.Close(), -// writer.Close(), -// pipe.Close(), -// ) -// -// If any of the passed errors is a multierr error, it will be flattened along -// with the other errors. -// -// multierr.Combine(multierr.Combine(err1, err2), err3) -// // is the same as -// multierr.Combine(err1, err2, err3) -// -// The returned error formats into a readable multi-line error message if -// formatted with %+v. -// -// fmt.Sprintf("%+v", multierr.Combine(err1, err2)) -func Combine(errors ...error) error { - return fromSlice(errors) -} - -// Append appends the given errors together. Either value may be nil. -// -// This function is a specialization of Combine for the common case where -// there are only two errors. -// -// err = multierr.Append(reader.Close(), writer.Close()) -// -// The following pattern may also be used to record failure of deferred -// operations without losing information about the original error. -// -// func doSomething(..) (err error) { -// f := acquireResource() -// defer func() { -// err = multierr.Append(err, f.Close()) -// }() -// -// Note that the variable MUST be a named return to append an error to it from -// the defer statement. See also [AppendInvoke]. -func Append(left error, right error) error { - switch { - case left == nil: - return right - case right == nil: - return left - } - - if _, ok := right.(*multiError); !ok { - if l, ok := left.(*multiError); ok && !l.copyNeeded.Swap(true) { - // Common case where the error on the left is constantly being - // appended to. - errs := append(l.errors, right) - return &multiError{errors: errs} - } else if !ok { - // Both errors are single errors. - return &multiError{errors: []error{left, right}} - } - } - - // Either right or both, left and right, are multiErrors. Rely on usual - // expensive logic. - errors := [2]error{left, right} - return fromSlice(errors[0:]) -} - -// AppendInto appends an error into the destination of an error pointer and -// returns whether the error being appended was non-nil. -// -// var err error -// multierr.AppendInto(&err, r.Close()) -// multierr.AppendInto(&err, w.Close()) -// -// The above is equivalent to, -// -// err := multierr.Append(r.Close(), w.Close()) -// -// As AppendInto reports whether the provided error was non-nil, it may be -// used to build a multierr error in a loop more ergonomically. For example: -// -// var err error -// for line := range lines { -// var item Item -// if multierr.AppendInto(&err, parse(line, &item)) { -// continue -// } -// items = append(items, item) -// } -// -// Compare this with a version that relies solely on Append: -// -// var err error -// for line := range lines { -// var item Item -// if parseErr := parse(line, &item); parseErr != nil { -// err = multierr.Append(err, parseErr) -// continue -// } -// items = append(items, item) -// } -func AppendInto(into *error, err error) (errored bool) { - if into == nil { - // We panic if 'into' is nil. This is not documented above - // because suggesting that the pointer must be non-nil may - // confuse users into thinking that the error that it points - // to must be non-nil. - panic("misuse of multierr.AppendInto: into pointer must not be nil") - } - - if err == nil { - return false - } - *into = Append(*into, err) - return true -} - -// Invoker is an operation that may fail with an error. Use it with -// AppendInvoke to append the result of calling the function into an error. -// This allows you to conveniently defer capture of failing operations. -// -// See also, [Close] and [Invoke]. -type Invoker interface { - Invoke() error -} - -// Invoke wraps a function which may fail with an error to match the Invoker -// interface. Use it to supply functions matching this signature to -// AppendInvoke. -// -// For example, -// -// func processReader(r io.Reader) (err error) { -// scanner := bufio.NewScanner(r) -// defer multierr.AppendInvoke(&err, multierr.Invoke(scanner.Err)) -// for scanner.Scan() { -// // ... -// } -// // ... -// } -// -// In this example, the following line will construct the Invoker right away, -// but defer the invocation of scanner.Err() until the function returns. -// -// defer multierr.AppendInvoke(&err, multierr.Invoke(scanner.Err)) -// -// Note that the error you're appending to from the defer statement MUST be a -// named return. -type Invoke func() error - -// Invoke calls the supplied function and returns its result. -func (i Invoke) Invoke() error { return i() } - -// Close builds an Invoker that closes the provided io.Closer. Use it with -// AppendInvoke to close io.Closers and append their results into an error. -// -// For example, -// -// func processFile(path string) (err error) { -// f, err := os.Open(path) -// if err != nil { -// return err -// } -// defer multierr.AppendInvoke(&err, multierr.Close(f)) -// return processReader(f) -// } -// -// In this example, multierr.Close will construct the Invoker right away, but -// defer the invocation of f.Close until the function returns. -// -// defer multierr.AppendInvoke(&err, multierr.Close(f)) -// -// Note that the error you're appending to from the defer statement MUST be a -// named return. -func Close(closer io.Closer) Invoker { - return Invoke(closer.Close) -} - -// AppendInvoke appends the result of calling the given Invoker into the -// provided error pointer. Use it with named returns to safely defer -// invocation of fallible operations until a function returns, and capture the -// resulting errors. -// -// func doSomething(...) (err error) { -// // ... -// f, err := openFile(..) -// if err != nil { -// return err -// } -// -// // multierr will call f.Close() when this function returns and -// // if the operation fails, its append its error into the -// // returned error. -// defer multierr.AppendInvoke(&err, multierr.Close(f)) -// -// scanner := bufio.NewScanner(f) -// // Similarly, this scheduled scanner.Err to be called and -// // inspected when the function returns and append its error -// // into the returned error. -// defer multierr.AppendInvoke(&err, multierr.Invoke(scanner.Err)) -// -// // ... -// } -// -// NOTE: If used with a defer, the error variable MUST be a named return. -// -// Without defer, AppendInvoke behaves exactly like AppendInto. -// -// err := // ... -// multierr.AppendInvoke(&err, mutltierr.Invoke(foo)) -// -// // ...is roughly equivalent to... -// -// err := // ... -// multierr.AppendInto(&err, foo()) -// -// The advantage of the indirection introduced by Invoker is to make it easy -// to defer the invocation of a function. Without this indirection, the -// invoked function will be evaluated at the time of the defer block rather -// than when the function returns. -// -// // BAD: This is likely not what the caller intended. This will evaluate -// // foo() right away and append its result into the error when the -// // function returns. -// defer multierr.AppendInto(&err, foo()) -// -// // GOOD: This will defer invocation of foo unutil the function returns. -// defer multierr.AppendInvoke(&err, multierr.Invoke(foo)) -// -// multierr provides a few Invoker implementations out of the box for -// convenience. See [Invoker] for more information. -func AppendInvoke(into *error, invoker Invoker) { - AppendInto(into, invoker.Invoke()) -} - -// AppendFunc is a shorthand for [AppendInvoke]. -// It allows using function or method value directly -// without having to wrap it into an [Invoker] interface. -// -// func doSomething(...) (err error) { -// w, err := startWorker(...) -// if err != nil { -// return err -// } -// -// // multierr will call w.Stop() when this function returns and -// // if the operation fails, it appends its error into the -// // returned error. -// defer multierr.AppendFunc(&err, w.Stop) -// } -func AppendFunc(into *error, fn func() error) { - AppendInvoke(into, Invoke(fn)) -} diff --git a/vendor/go.uber.org/multierr/error_post_go120.go b/vendor/go.uber.org/multierr/error_post_go120.go deleted file mode 100644 index a173f9c25..000000000 --- a/vendor/go.uber.org/multierr/error_post_go120.go +++ /dev/null @@ -1,48 +0,0 @@ -// Copyright (c) 2017-2023 Uber Technologies, Inc. -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in -// all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -// THE SOFTWARE. - -//go:build go1.20 -// +build go1.20 - -package multierr - -// Unwrap returns a list of errors wrapped by this multierr. -func (merr *multiError) Unwrap() []error { - return merr.Errors() -} - -type multipleErrors interface { - Unwrap() []error -} - -func extractErrors(err error) []error { - if err == nil { - return nil - } - - // check if the given err is an Unwrapable error that - // implements multipleErrors interface. - eg, ok := err.(multipleErrors) - if !ok { - return []error{err} - } - - return append(([]error)(nil), eg.Unwrap()...) -} diff --git a/vendor/go.uber.org/multierr/error_pre_go120.go b/vendor/go.uber.org/multierr/error_pre_go120.go deleted file mode 100644 index 93872a3fc..000000000 --- a/vendor/go.uber.org/multierr/error_pre_go120.go +++ /dev/null @@ -1,79 +0,0 @@ -// Copyright (c) 2017-2023 Uber Technologies, Inc. -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in -// all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -// THE SOFTWARE. - -//go:build !go1.20 -// +build !go1.20 - -package multierr - -import "errors" - -// Versions of Go before 1.20 did not support the Unwrap() []error method. -// This provides a similar behavior by implementing the Is(..) and As(..) -// methods. -// See the errors.Join proposal for details: -// https://github.com/golang/go/issues/53435 - -// As attempts to find the first error in the error list that matches the type -// of the value that target points to. -// -// This function allows errors.As to traverse the values stored on the -// multierr error. -func (merr *multiError) As(target interface{}) bool { - for _, err := range merr.Errors() { - if errors.As(err, target) { - return true - } - } - return false -} - -// Is attempts to match the provided error against errors in the error list. -// -// This function allows errors.Is to traverse the values stored on the -// multierr error. -func (merr *multiError) Is(target error) bool { - for _, err := range merr.Errors() { - if errors.Is(err, target) { - return true - } - } - return false -} - -func extractErrors(err error) []error { - if err == nil { - return nil - } - - // Note that we're casting to multiError, not errorGroup. Our contract is - // that returned errors MAY implement errorGroup. Errors, however, only - // has special behavior for multierr-specific error objects. - // - // This behavior can be expanded in the future but I think it's prudent to - // start with as little as possible in terms of contract and possibility - // of misuse. - eg, ok := err.(*multiError) - if !ok { - return []error{err} - } - - return append(([]error)(nil), eg.Errors()...) -} diff --git a/vendor/go.uber.org/zap/.codecov.yml b/vendor/go.uber.org/zap/.codecov.yml deleted file mode 100644 index 8e5ca7d3e..000000000 --- a/vendor/go.uber.org/zap/.codecov.yml +++ /dev/null @@ -1,17 +0,0 @@ -coverage: - range: 80..100 - round: down - precision: 2 - - status: - project: # measuring the overall project coverage - default: # context, you can create multiple ones with custom titles - enabled: yes # must be yes|true to enable this status - target: 95% # specify the target coverage for each commit status - # option: "auto" (must increase from parent commit or pull request base) - # option: "X%" a static target percentage to hit - if_not_found: success # if parent is not found report status as success, error, or failure - if_ci_failed: error # if ci fails report status as success, error, or failure -ignore: - - internal/readme/readme.go - diff --git a/vendor/go.uber.org/zap/.gitignore b/vendor/go.uber.org/zap/.gitignore deleted file mode 100644 index da9d9d00b..000000000 --- a/vendor/go.uber.org/zap/.gitignore +++ /dev/null @@ -1,32 +0,0 @@ -# Compiled Object files, Static and Dynamic libs (Shared Objects) -*.o -*.a -*.so - -# Folders -_obj -_test -vendor - -# Architecture specific extensions/prefixes -*.[568vq] -[568vq].out - -*.cgo1.go -*.cgo2.c -_cgo_defun.c -_cgo_gotypes.go -_cgo_export.* - -_testmain.go - -*.exe -*.test -*.prof -*.pprof -*.out -*.log - -/bin -cover.out -cover.html diff --git a/vendor/go.uber.org/zap/.golangci.yml b/vendor/go.uber.org/zap/.golangci.yml deleted file mode 100644 index 2346df135..000000000 --- a/vendor/go.uber.org/zap/.golangci.yml +++ /dev/null @@ -1,77 +0,0 @@ -output: - # Make output more digestible with quickfix in vim/emacs/etc. - sort-results: true - print-issued-lines: false - -linters: - # We'll track the golangci-lint default linters manually - # instead of letting them change without our control. - disable-all: true - enable: - # golangci-lint defaults: - - errcheck - - gosimple - - govet - - ineffassign - - staticcheck - - unused - - # Our own extras: - - gofumpt - - nolintlint # lints nolint directives - - revive - -linters-settings: - govet: - # These govet checks are disabled by default, but they're useful. - enable: - - niliness - - reflectvaluecompare - - sortslice - - unusedwrite - - errcheck: - exclude-functions: - # These methods can not fail. - # They operate on an in-memory buffer. - - (*go.uber.org/zap/buffer.Buffer).Write - - (*go.uber.org/zap/buffer.Buffer).WriteByte - - (*go.uber.org/zap/buffer.Buffer).WriteString - - - (*go.uber.org/zap/zapio.Writer).Close - - (*go.uber.org/zap/zapio.Writer).Sync - - (*go.uber.org/zap/zapio.Writer).Write - # Write to zapio.Writer cannot fail, - # so io.WriteString on it cannot fail. - - io.WriteString(*go.uber.org/zap/zapio.Writer) - - # Writing a plain string to a fmt.State cannot fail. - - io.WriteString(fmt.State) - -issues: - # Print all issues reported by all linters. - max-issues-per-linter: 0 - max-same-issues: 0 - - # Don't ignore some of the issues that golangci-lint considers okay. - # This includes documenting all exported entities. - exclude-use-default: false - - exclude-rules: - # Don't warn on unused parameters. - # Parameter names are useful; replacing them with '_' is undesirable. - - linters: [revive] - text: 'unused-parameter: parameter \S+ seems to be unused, consider removing or renaming it as _' - - # staticcheck already has smarter checks for empty blocks. - # revive's empty-block linter has false positives. - # For example, as of writing this, the following is not allowed. - # for foo() { } - - linters: [revive] - text: 'empty-block: this block is empty, you can remove it' - - # Ignore logger.Sync() errcheck failures in example_test.go - # since those are intended to be uncomplicated examples. - - linters: [errcheck] - path: example_test.go - text: 'Error return value of `logger.Sync` is not checked' diff --git a/vendor/go.uber.org/zap/.readme.tmpl b/vendor/go.uber.org/zap/.readme.tmpl deleted file mode 100644 index 4fea3027a..000000000 --- a/vendor/go.uber.org/zap/.readme.tmpl +++ /dev/null @@ -1,117 +0,0 @@ -# :zap: zap [![GoDoc][doc-img]][doc] [![Build Status][ci-img]][ci] [![Coverage Status][cov-img]][cov] - -
- -Blazing fast, structured, leveled logging in Go. - -![Zap logo](assets/logo.png) - -[![GoDoc][doc-img]][doc] [![Build Status][ci-img]][ci] [![Coverage Status][cov-img]][cov] - -
- -## Installation - -`go get -u go.uber.org/zap` - -Note that zap only supports the two most recent minor versions of Go. - -## Quick Start - -In contexts where performance is nice, but not critical, use the -`SugaredLogger`. It's 4-10x faster than other structured logging -packages and includes both structured and `printf`-style APIs. - -```go -logger, _ := zap.NewProduction() -defer logger.Sync() // flushes buffer, if any -sugar := logger.Sugar() -sugar.Infow("failed to fetch URL", - // Structured context as loosely typed key-value pairs. - "url", url, - "attempt", 3, - "backoff", time.Second, -) -sugar.Infof("Failed to fetch URL: %s", url) -``` - -When performance and type safety are critical, use the `Logger`. It's even -faster than the `SugaredLogger` and allocates far less, but it only supports -structured logging. - -```go -logger, _ := zap.NewProduction() -defer logger.Sync() -logger.Info("failed to fetch URL", - // Structured context as strongly typed Field values. - zap.String("url", url), - zap.Int("attempt", 3), - zap.Duration("backoff", time.Second), -) -``` - -See the [documentation][doc] and [FAQ](FAQ.md) for more details. - -## Performance - -For applications that log in the hot path, reflection-based serialization and -string formatting are prohibitively expensive — they're CPU-intensive -and make many small allocations. Put differently, using `encoding/json` and -`fmt.Fprintf` to log tons of `interface{}`s makes your application slow. - -Zap takes a different approach. It includes a reflection-free, zero-allocation -JSON encoder, and the base `Logger` strives to avoid serialization overhead -and allocations wherever possible. By building the high-level `SugaredLogger` -on that foundation, zap lets users *choose* when they need to count every -allocation and when they'd prefer a more familiar, loosely typed API. - -As measured by its own [benchmarking suite][], not only is zap more performant -than comparable structured logging packages — it's also faster than the -standard library. Like all benchmarks, take these with a grain of salt.[1](#footnote-versions) - -Log a message and 10 fields: - -{{.BenchmarkAddingFields}} - -Log a message with a logger that already has 10 fields of context: - -{{.BenchmarkAccumulatedContext}} - -Log a static string, without any context or `printf`-style templating: - -{{.BenchmarkWithoutFields}} - -## Development Status: Stable - -All APIs are finalized, and no breaking changes will be made in the 1.x series -of releases. Users of semver-aware dependency management systems should pin -zap to `^1`. - -## Contributing - -We encourage and support an active, healthy community of contributors — -including you! Details are in the [contribution guide](CONTRIBUTING.md) and -the [code of conduct](CODE_OF_CONDUCT.md). The zap maintainers keep an eye on -issues and pull requests, but you can also report any negative conduct to -oss-conduct@uber.com. That email list is a private, safe space; even the zap -maintainers don't have access, so don't hesitate to hold us to a high -standard. - -
- -Released under the [MIT License](LICENSE). - -1 In particular, keep in mind that we may be -benchmarking against slightly older versions of other packages. Versions are -pinned in the [benchmarks/go.mod][] file. [↩](#anchor-versions) - -[doc-img]: https://pkg.go.dev/badge/go.uber.org/zap -[doc]: https://pkg.go.dev/go.uber.org/zap -[ci-img]: https://github.com/uber-go/zap/actions/workflows/go.yml/badge.svg -[ci]: https://github.com/uber-go/zap/actions/workflows/go.yml -[cov-img]: https://codecov.io/gh/uber-go/zap/branch/master/graph/badge.svg -[cov]: https://codecov.io/gh/uber-go/zap -[benchmarking suite]: https://github.com/uber-go/zap/tree/master/benchmarks -[benchmarks/go.mod]: https://github.com/uber-go/zap/blob/master/benchmarks/go.mod - diff --git a/vendor/go.uber.org/zap/CHANGELOG.md b/vendor/go.uber.org/zap/CHANGELOG.md deleted file mode 100644 index 6d6cd5f4d..000000000 --- a/vendor/go.uber.org/zap/CHANGELOG.md +++ /dev/null @@ -1,687 +0,0 @@ -# Changelog -All notable changes to this project will be documented in this file. - -This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). - -## 1.27.0 (20 Feb 2024) -Enhancements: -* [#1378][]: Add `WithLazy` method for `SugaredLogger`. -* [#1399][]: zaptest: Add `NewTestingWriter` for customizing TestingWriter with more flexibility than `NewLogger`. -* [#1406][]: Add `Log`, `Logw`, `Logln` methods for `SugaredLogger`. -* [#1416][]: Add `WithPanicHook` option for testing panic logs. - -Thanks to @defval, @dimmo, @arxeiss, and @MKrupauskas for their contributions to this release. - -[#1378]: https://github.com/uber-go/zap/pull/1378 -[#1399]: https://github.com/uber-go/zap/pull/1399 -[#1406]: https://github.com/uber-go/zap/pull/1406 -[#1416]: https://github.com/uber-go/zap/pull/1416 - -## 1.26.0 (14 Sep 2023) -Enhancements: -* [#1297][]: Add Dict as a Field. -* [#1319][]: Add `WithLazy` method to `Logger` which lazily evaluates the structured -context. -* [#1350][]: String encoding is much (~50%) faster now. - -Thanks to @hhk7734, @jquirke, and @cdvr1993 for their contributions to this release. - -[#1297]: https://github.com/uber-go/zap/pull/1297 -[#1319]: https://github.com/uber-go/zap/pull/1319 -[#1350]: https://github.com/uber-go/zap/pull/1350 - -## 1.25.0 (1 Aug 2023) - -This release contains several improvements including performance, API additions, -and two new experimental packages whose APIs are unstable and may change in the -future. - -Enhancements: -* [#1246][]: Add `zap/exp/zapslog` package for integration with slog. -* [#1273][]: Add `Name` to `Logger` which returns the Logger's name if one is set. -* [#1281][]: Add `zap/exp/expfield` package which contains helper methods -`Str` and `Strs` for constructing String-like zap.Fields. -* [#1310][]: Reduce stack size on `Any`. - -Thanks to @knight42, @dzakaammar, @bcspragu, and @rexywork for their contributions -to this release. - -[#1246]: https://github.com/uber-go/zap/pull/1246 -[#1273]: https://github.com/uber-go/zap/pull/1273 -[#1281]: https://github.com/uber-go/zap/pull/1281 -[#1310]: https://github.com/uber-go/zap/pull/1310 - -## 1.24.0 (30 Nov 2022) - -Enhancements: -* [#1148][]: Add `Level` to both `Logger` and `SugaredLogger` that reports the - current minimum enabled log level. -* [#1185][]: `SugaredLogger` turns errors to zap.Error automatically. - -Thanks to @Abirdcfly, @craigpastro, @nnnkkk7, and @sashamelentyev for their -contributions to this release. - -[#1148]: https://github.coml/uber-go/zap/pull/1148 -[#1185]: https://github.coml/uber-go/zap/pull/1185 - -## 1.23.0 (24 Aug 2022) - -Enhancements: -* [#1147][]: Add a `zapcore.LevelOf` function to determine the level of a - `LevelEnabler` or `Core`. -* [#1155][]: Add `zap.Stringers` field constructor to log arrays of objects - that implement `String() string`. - -[#1147]: https://github.com/uber-go/zap/pull/1147 -[#1155]: https://github.com/uber-go/zap/pull/1155 - -## 1.22.0 (8 Aug 2022) - -Enhancements: -* [#1071][]: Add `zap.Objects` and `zap.ObjectValues` field constructors to log - arrays of objects. With these two constructors, you don't need to implement - `zapcore.ArrayMarshaler` for use with `zap.Array` if those objects implement - `zapcore.ObjectMarshaler`. -* [#1079][]: Add `SugaredLogger.WithOptions` to build a copy of an existing - `SugaredLogger` with the provided options applied. -* [#1080][]: Add `*ln` variants to `SugaredLogger` for each log level. - These functions provide a string joining behavior similar to `fmt.Println`. -* [#1088][]: Add `zap.WithFatalHook` option to control the behavior of the - logger for `Fatal`-level log entries. This defaults to exiting the program. -* [#1108][]: Add a `zap.Must` function that you can use with `NewProduction` or - `NewDevelopment` to panic if the system was unable to build the logger. -* [#1118][]: Add a `Logger.Log` method that allows specifying the log level for - a statement dynamically. - -Thanks to @cardil, @craigpastro, @sashamelentyev, @shota3506, and @zhupeijun -for their contributions to this release. - -[#1071]: https://github.com/uber-go/zap/pull/1071 -[#1079]: https://github.com/uber-go/zap/pull/1079 -[#1080]: https://github.com/uber-go/zap/pull/1080 -[#1088]: https://github.com/uber-go/zap/pull/1088 -[#1108]: https://github.com/uber-go/zap/pull/1108 -[#1118]: https://github.com/uber-go/zap/pull/1118 - -## 1.21.0 (7 Feb 2022) - -Enhancements: -* [#1047][]: Add `zapcore.ParseLevel` to parse a `Level` from a string. -* [#1048][]: Add `zap.ParseAtomicLevel` to parse an `AtomicLevel` from a - string. - -Bugfixes: -* [#1058][]: Fix panic in JSON encoder when `EncodeLevel` is unset. - -Other changes: -* [#1052][]: Improve encoding performance when the `AddCaller` and - `AddStacktrace` options are used together. - -[#1047]: https://github.com/uber-go/zap/pull/1047 -[#1048]: https://github.com/uber-go/zap/pull/1048 -[#1052]: https://github.com/uber-go/zap/pull/1052 -[#1058]: https://github.com/uber-go/zap/pull/1058 - -Thanks to @aerosol and @Techassi for their contributions to this release. - -## 1.20.0 (4 Jan 2022) - -Enhancements: -* [#989][]: Add `EncoderConfig.SkipLineEnding` flag to disable adding newline - characters between log statements. -* [#1039][]: Add `EncoderConfig.NewReflectedEncoder` field to customize JSON - encoding of reflected log fields. - -Bugfixes: -* [#1011][]: Fix inaccurate precision when encoding complex64 as JSON. -* [#554][], [#1017][]: Close JSON namespaces opened in `MarshalLogObject` - methods when the methods return. -* [#1033][]: Avoid panicking in Sampler core if `thereafter` is zero. - -Other changes: -* [#1028][]: Drop support for Go < 1.15. - -[#554]: https://github.com/uber-go/zap/pull/554 -[#989]: https://github.com/uber-go/zap/pull/989 -[#1011]: https://github.com/uber-go/zap/pull/1011 -[#1017]: https://github.com/uber-go/zap/pull/1017 -[#1028]: https://github.com/uber-go/zap/pull/1028 -[#1033]: https://github.com/uber-go/zap/pull/1033 -[#1039]: https://github.com/uber-go/zap/pull/1039 - -Thanks to @psrajat, @lruggieri, @sammyrnycreal for their contributions to this release. - -## 1.19.1 (8 Sep 2021) - -Bugfixes: -* [#1001][]: JSON: Fix complex number encoding with negative imaginary part. Thanks to @hemantjadon. -* [#1003][]: JSON: Fix inaccurate precision when encoding float32. - -[#1001]: https://github.com/uber-go/zap/pull/1001 -[#1003]: https://github.com/uber-go/zap/pull/1003 - -## 1.19.0 (9 Aug 2021) - -Enhancements: -* [#975][]: Avoid panicking in Sampler core if the level is out of bounds. -* [#984][]: Reduce the size of BufferedWriteSyncer by aligning the fields - better. - -[#975]: https://github.com/uber-go/zap/pull/975 -[#984]: https://github.com/uber-go/zap/pull/984 - -Thanks to @lancoLiu and @thockin for their contributions to this release. - -## 1.18.1 (28 Jun 2021) - -Bugfixes: -* [#974][]: Fix nil dereference in logger constructed by `zap.NewNop`. - -[#974]: https://github.com/uber-go/zap/pull/974 - -## 1.18.0 (28 Jun 2021) - -Enhancements: -* [#961][]: Add `zapcore.BufferedWriteSyncer`, a new `WriteSyncer` that buffers - messages in-memory and flushes them periodically. -* [#971][]: Add `zapio.Writer` to use a Zap logger as an `io.Writer`. -* [#897][]: Add `zap.WithClock` option to control the source of time via the - new `zapcore.Clock` interface. -* [#949][]: Avoid panicking in `zap.SugaredLogger` when arguments of `*w` - methods don't match expectations. -* [#943][]: Add support for filtering by level or arbitrary matcher function to - `zaptest/observer`. -* [#691][]: Comply with `io.StringWriter` and `io.ByteWriter` in Zap's - `buffer.Buffer`. - -Thanks to @atrn0, @ernado, @heyanfu, @hnlq715, @zchee -for their contributions to this release. - -[#691]: https://github.com/uber-go/zap/pull/691 -[#897]: https://github.com/uber-go/zap/pull/897 -[#943]: https://github.com/uber-go/zap/pull/943 -[#949]: https://github.com/uber-go/zap/pull/949 -[#961]: https://github.com/uber-go/zap/pull/961 -[#971]: https://github.com/uber-go/zap/pull/971 - -## 1.17.0 (25 May 2021) - -Bugfixes: -* [#867][]: Encode `` for nil `error` instead of a panic. -* [#931][], [#936][]: Update minimum version constraints to address - vulnerabilities in dependencies. - -Enhancements: -* [#865][]: Improve alignment of fields of the Logger struct, reducing its - size from 96 to 80 bytes. -* [#881][]: Support `grpclog.LoggerV2` in zapgrpc. -* [#903][]: Support URL-encoded POST requests to the AtomicLevel HTTP handler - with the `application/x-www-form-urlencoded` content type. -* [#912][]: Support multi-field encoding with `zap.Inline`. -* [#913][]: Speed up SugaredLogger for calls with a single string. -* [#928][]: Add support for filtering by field name to `zaptest/observer`. - -Thanks to @ash2k, @FMLS, @jimmystewpot, @Oncilla, @tsoslow, @tylitianrui, @withshubh, and @wziww for their contributions to this release. - -[#865]: https://github.com/uber-go/zap/pull/865 -[#867]: https://github.com/uber-go/zap/pull/867 -[#881]: https://github.com/uber-go/zap/pull/881 -[#903]: https://github.com/uber-go/zap/pull/903 -[#912]: https://github.com/uber-go/zap/pull/912 -[#913]: https://github.com/uber-go/zap/pull/913 -[#928]: https://github.com/uber-go/zap/pull/928 -[#931]: https://github.com/uber-go/zap/pull/931 -[#936]: https://github.com/uber-go/zap/pull/936 - -## 1.16.0 (1 Sep 2020) - -Bugfixes: -* [#828][]: Fix missing newline in IncreaseLevel error messages. -* [#835][]: Fix panic in JSON encoder when encoding times or durations - without specifying a time or duration encoder. -* [#843][]: Honor CallerSkip when taking stack traces. -* [#862][]: Fix the default file permissions to use `0666` and rely on the umask instead. -* [#854][]: Encode `` for nil `Stringer` instead of a panic error log. - -Enhancements: -* [#629][]: Added `zapcore.TimeEncoderOfLayout` to easily create time encoders - for custom layouts. -* [#697][]: Added support for a configurable delimiter in the console encoder. -* [#852][]: Optimize console encoder by pooling the underlying JSON encoder. -* [#844][]: Add ability to include the calling function as part of logs. -* [#843][]: Add `StackSkip` for including truncated stacks as a field. -* [#861][]: Add options to customize Fatal behaviour for better testability. - -Thanks to @SteelPhase, @tmshn, @lixingwang, @wyxloading, @moul, @segevfiner, @andy-retailnext and @jcorbin for their contributions to this release. - -[#629]: https://github.com/uber-go/zap/pull/629 -[#697]: https://github.com/uber-go/zap/pull/697 -[#828]: https://github.com/uber-go/zap/pull/828 -[#835]: https://github.com/uber-go/zap/pull/835 -[#843]: https://github.com/uber-go/zap/pull/843 -[#844]: https://github.com/uber-go/zap/pull/844 -[#852]: https://github.com/uber-go/zap/pull/852 -[#854]: https://github.com/uber-go/zap/pull/854 -[#861]: https://github.com/uber-go/zap/pull/861 -[#862]: https://github.com/uber-go/zap/pull/862 - -## 1.15.0 (23 Apr 2020) - -Bugfixes: -* [#804][]: Fix handling of `Time` values out of `UnixNano` range. -* [#812][]: Fix `IncreaseLevel` being reset after a call to `With`. - -Enhancements: -* [#806][]: Add `WithCaller` option to supersede the `AddCaller` option. This - allows disabling annotation of log entries with caller information if - previously enabled with `AddCaller`. -* [#813][]: Deprecate `NewSampler` constructor in favor of - `NewSamplerWithOptions` which supports a `SamplerHook` option. This option - adds support for monitoring sampling decisions through a hook. - -Thanks to @danielbprice for their contributions to this release. - -[#804]: https://github.com/uber-go/zap/pull/804 -[#812]: https://github.com/uber-go/zap/pull/812 -[#806]: https://github.com/uber-go/zap/pull/806 -[#813]: https://github.com/uber-go/zap/pull/813 - -## 1.14.1 (14 Mar 2020) - -Bugfixes: -* [#791][]: Fix panic on attempting to build a logger with an invalid Config. -* [#795][]: Vendoring Zap with `go mod vendor` no longer includes Zap's - development-time dependencies. -* [#799][]: Fix issue introduced in 1.14.0 that caused invalid JSON output to - be generated for arrays of `time.Time` objects when using string-based time - formats. - -Thanks to @YashishDua for their contributions to this release. - -[#791]: https://github.com/uber-go/zap/pull/791 -[#795]: https://github.com/uber-go/zap/pull/795 -[#799]: https://github.com/uber-go/zap/pull/799 - -## 1.14.0 (20 Feb 2020) - -Enhancements: -* [#771][]: Optimize calls for disabled log levels. -* [#773][]: Add millisecond duration encoder. -* [#775][]: Add option to increase the level of a logger. -* [#786][]: Optimize time formatters using `Time.AppendFormat` where possible. - -Thanks to @caibirdme for their contributions to this release. - -[#771]: https://github.com/uber-go/zap/pull/771 -[#773]: https://github.com/uber-go/zap/pull/773 -[#775]: https://github.com/uber-go/zap/pull/775 -[#786]: https://github.com/uber-go/zap/pull/786 - -## 1.13.0 (13 Nov 2019) - -Enhancements: -* [#758][]: Add `Intp`, `Stringp`, and other similar `*p` field constructors - to log pointers to primitives with support for `nil` values. - -Thanks to @jbizzle for their contributions to this release. - -[#758]: https://github.com/uber-go/zap/pull/758 - -## 1.12.0 (29 Oct 2019) - -Enhancements: -* [#751][]: Migrate to Go modules. - -[#751]: https://github.com/uber-go/zap/pull/751 - -## 1.11.0 (21 Oct 2019) - -Enhancements: -* [#725][]: Add `zapcore.OmitKey` to omit keys in an `EncoderConfig`. -* [#736][]: Add `RFC3339` and `RFC3339Nano` time encoders. - -Thanks to @juicemia, @uhthomas for their contributions to this release. - -[#725]: https://github.com/uber-go/zap/pull/725 -[#736]: https://github.com/uber-go/zap/pull/736 - -## 1.10.0 (29 Apr 2019) - -Bugfixes: -* [#657][]: Fix `MapObjectEncoder.AppendByteString` not adding value as a - string. -* [#706][]: Fix incorrect call depth to determine caller in Go 1.12. - -Enhancements: -* [#610][]: Add `zaptest.WrapOptions` to wrap `zap.Option` for creating test - loggers. -* [#675][]: Don't panic when encoding a String field. -* [#704][]: Disable HTML escaping for JSON objects encoded using the - reflect-based encoder. - -Thanks to @iaroslav-ciupin, @lelenanam, @joa, @NWilson for their contributions -to this release. - -[#657]: https://github.com/uber-go/zap/pull/657 -[#706]: https://github.com/uber-go/zap/pull/706 -[#610]: https://github.com/uber-go/zap/pull/610 -[#675]: https://github.com/uber-go/zap/pull/675 -[#704]: https://github.com/uber-go/zap/pull/704 - -## 1.9.1 (06 Aug 2018) - -Bugfixes: - -* [#614][]: MapObjectEncoder should not ignore empty slices. - -[#614]: https://github.com/uber-go/zap/pull/614 - -## 1.9.0 (19 Jul 2018) - -Enhancements: -* [#602][]: Reduce number of allocations when logging with reflection. -* [#572][], [#606][]: Expose a registry for third-party logging sinks. - -Thanks to @nfarah86, @AlekSi, @JeanMertz, @philippgille, @etsangsplk, and -@dimroc for their contributions to this release. - -[#602]: https://github.com/uber-go/zap/pull/602 -[#572]: https://github.com/uber-go/zap/pull/572 -[#606]: https://github.com/uber-go/zap/pull/606 - -## 1.8.0 (13 Apr 2018) - -Enhancements: -* [#508][]: Make log level configurable when redirecting the standard - library's logger. -* [#518][]: Add a logger that writes to a `*testing.TB`. -* [#577][]: Add a top-level alias for `zapcore.Field` to clean up GoDoc. - -Bugfixes: -* [#574][]: Add a missing import comment to `go.uber.org/zap/buffer`. - -Thanks to @DiSiqueira and @djui for their contributions to this release. - -[#508]: https://github.com/uber-go/zap/pull/508 -[#518]: https://github.com/uber-go/zap/pull/518 -[#577]: https://github.com/uber-go/zap/pull/577 -[#574]: https://github.com/uber-go/zap/pull/574 - -## 1.7.1 (25 Sep 2017) - -Bugfixes: -* [#504][]: Store strings when using AddByteString with the map encoder. - -[#504]: https://github.com/uber-go/zap/pull/504 - -## 1.7.0 (21 Sep 2017) - -Enhancements: - -* [#487][]: Add `NewStdLogAt`, which extends `NewStdLog` by allowing the user - to specify the level of the logged messages. - -[#487]: https://github.com/uber-go/zap/pull/487 - -## 1.6.0 (30 Aug 2017) - -Enhancements: - -* [#491][]: Omit zap stack frames from stacktraces. -* [#490][]: Add a `ContextMap` method to observer logs for simpler - field validation in tests. - -[#490]: https://github.com/uber-go/zap/pull/490 -[#491]: https://github.com/uber-go/zap/pull/491 - -## 1.5.0 (22 Jul 2017) - -Enhancements: - -* [#460][] and [#470][]: Support errors produced by `go.uber.org/multierr`. -* [#465][]: Support user-supplied encoders for logger names. - -Bugfixes: - -* [#477][]: Fix a bug that incorrectly truncated deep stacktraces. - -Thanks to @richard-tunein and @pavius for their contributions to this release. - -[#477]: https://github.com/uber-go/zap/pull/477 -[#465]: https://github.com/uber-go/zap/pull/465 -[#460]: https://github.com/uber-go/zap/pull/460 -[#470]: https://github.com/uber-go/zap/pull/470 - -## 1.4.1 (08 Jun 2017) - -This release fixes two bugs. - -Bugfixes: - -* [#435][]: Support a variety of case conventions when unmarshaling levels. -* [#444][]: Fix a panic in the observer. - -[#435]: https://github.com/uber-go/zap/pull/435 -[#444]: https://github.com/uber-go/zap/pull/444 - -## 1.4.0 (12 May 2017) - -This release adds a few small features and is fully backward-compatible. - -Enhancements: - -* [#424][]: Add a `LineEnding` field to `EncoderConfig`, allowing users to - override the Unix-style default. -* [#425][]: Preserve time zones when logging times. -* [#431][]: Make `zap.AtomicLevel` implement `fmt.Stringer`, which makes a - variety of operations a bit simpler. - -[#424]: https://github.com/uber-go/zap/pull/424 -[#425]: https://github.com/uber-go/zap/pull/425 -[#431]: https://github.com/uber-go/zap/pull/431 - -## 1.3.0 (25 Apr 2017) - -This release adds an enhancement to zap's testing helpers as well as the -ability to marshal an AtomicLevel. It is fully backward-compatible. - -Enhancements: - -* [#415][]: Add a substring-filtering helper to zap's observer. This is - particularly useful when testing the `SugaredLogger`. -* [#416][]: Make `AtomicLevel` implement `encoding.TextMarshaler`. - -[#415]: https://github.com/uber-go/zap/pull/415 -[#416]: https://github.com/uber-go/zap/pull/416 - -## 1.2.0 (13 Apr 2017) - -This release adds a gRPC compatibility wrapper. It is fully backward-compatible. - -Enhancements: - -* [#402][]: Add a `zapgrpc` package that wraps zap's Logger and implements - `grpclog.Logger`. - -[#402]: https://github.com/uber-go/zap/pull/402 - -## 1.1.0 (31 Mar 2017) - -This release fixes two bugs and adds some enhancements to zap's testing helpers. -It is fully backward-compatible. - -Bugfixes: - -* [#385][]: Fix caller path trimming on Windows. -* [#396][]: Fix a panic when attempting to use non-existent directories with - zap's configuration struct. - -Enhancements: - -* [#386][]: Add filtering helpers to zaptest's observing logger. - -Thanks to @moitias for contributing to this release. - -[#385]: https://github.com/uber-go/zap/pull/385 -[#396]: https://github.com/uber-go/zap/pull/396 -[#386]: https://github.com/uber-go/zap/pull/386 - -## 1.0.0 (14 Mar 2017) - -This is zap's first stable release. All exported APIs are now final, and no -further breaking changes will be made in the 1.x release series. Anyone using a -semver-aware dependency manager should now pin to `^1`. - -Breaking changes: - -* [#366][]: Add byte-oriented APIs to encoders to log UTF-8 encoded text without - casting from `[]byte` to `string`. -* [#364][]: To support buffering outputs, add `Sync` methods to `zapcore.Core`, - `zap.Logger`, and `zap.SugaredLogger`. -* [#371][]: Rename the `testutils` package to `zaptest`, which is less likely to - clash with other testing helpers. - -Bugfixes: - -* [#362][]: Make the ISO8601 time formatters fixed-width, which is friendlier - for tab-separated console output. -* [#369][]: Remove the automatic locks in `zapcore.NewCore`, which allows zap to - work with concurrency-safe `WriteSyncer` implementations. -* [#347][]: Stop reporting errors when trying to `fsync` standard out on Linux - systems. -* [#373][]: Report the correct caller from zap's standard library - interoperability wrappers. - -Enhancements: - -* [#348][]: Add a registry allowing third-party encodings to work with zap's - built-in `Config`. -* [#327][]: Make the representation of logger callers configurable (like times, - levels, and durations). -* [#376][]: Allow third-party encoders to use their own buffer pools, which - removes the last performance advantage that zap's encoders have over plugins. -* [#346][]: Add `CombineWriteSyncers`, a convenience function to tee multiple - `WriteSyncer`s and lock the result. -* [#365][]: Make zap's stacktraces compatible with mid-stack inlining (coming in - Go 1.9). -* [#372][]: Export zap's observing logger as `zaptest/observer`. This makes it - easier for particularly punctilious users to unit test their application's - logging. - -Thanks to @suyash, @htrendev, @flisky, @Ulexus, and @skipor for their -contributions to this release. - -[#366]: https://github.com/uber-go/zap/pull/366 -[#364]: https://github.com/uber-go/zap/pull/364 -[#371]: https://github.com/uber-go/zap/pull/371 -[#362]: https://github.com/uber-go/zap/pull/362 -[#369]: https://github.com/uber-go/zap/pull/369 -[#347]: https://github.com/uber-go/zap/pull/347 -[#373]: https://github.com/uber-go/zap/pull/373 -[#348]: https://github.com/uber-go/zap/pull/348 -[#327]: https://github.com/uber-go/zap/pull/327 -[#376]: https://github.com/uber-go/zap/pull/376 -[#346]: https://github.com/uber-go/zap/pull/346 -[#365]: https://github.com/uber-go/zap/pull/365 -[#372]: https://github.com/uber-go/zap/pull/372 - -## 1.0.0-rc.3 (7 Mar 2017) - -This is the third release candidate for zap's stable release. There are no -breaking changes. - -Bugfixes: - -* [#339][]: Byte slices passed to `zap.Any` are now correctly treated as binary blobs - rather than `[]uint8`. - -Enhancements: - -* [#307][]: Users can opt into colored output for log levels. -* [#353][]: In addition to hijacking the output of the standard library's - package-global logging functions, users can now construct a zap-backed - `log.Logger` instance. -* [#311][]: Frames from common runtime functions and some of zap's internal - machinery are now omitted from stacktraces. - -Thanks to @ansel1 and @suyash for their contributions to this release. - -[#339]: https://github.com/uber-go/zap/pull/339 -[#307]: https://github.com/uber-go/zap/pull/307 -[#353]: https://github.com/uber-go/zap/pull/353 -[#311]: https://github.com/uber-go/zap/pull/311 - -## 1.0.0-rc.2 (21 Feb 2017) - -This is the second release candidate for zap's stable release. It includes two -breaking changes. - -Breaking changes: - -* [#316][]: Zap's global loggers are now fully concurrency-safe - (previously, users had to ensure that `ReplaceGlobals` was called before the - loggers were in use). However, they must now be accessed via the `L()` and - `S()` functions. Users can update their projects with - - ``` - gofmt -r "zap.L -> zap.L()" -w . - gofmt -r "zap.S -> zap.S()" -w . - ``` -* [#309][] and [#317][]: RC1 was mistakenly shipped with invalid - JSON and YAML struct tags on all config structs. This release fixes the tags - and adds static analysis to prevent similar bugs in the future. - -Bugfixes: - -* [#321][]: Redirecting the standard library's `log` output now - correctly reports the logger's caller. - -Enhancements: - -* [#325][] and [#333][]: Zap now transparently supports non-standard, rich - errors like those produced by `github.com/pkg/errors`. -* [#326][]: Though `New(nil)` continues to return a no-op logger, `NewNop()` is - now preferred. Users can update their projects with `gofmt -r 'zap.New(nil) -> - zap.NewNop()' -w .`. -* [#300][]: Incorrectly importing zap as `github.com/uber-go/zap` now returns a - more informative error. - -Thanks to @skipor and @chapsuk for their contributions to this release. - -[#316]: https://github.com/uber-go/zap/pull/316 -[#309]: https://github.com/uber-go/zap/pull/309 -[#317]: https://github.com/uber-go/zap/pull/317 -[#321]: https://github.com/uber-go/zap/pull/321 -[#325]: https://github.com/uber-go/zap/pull/325 -[#333]: https://github.com/uber-go/zap/pull/333 -[#326]: https://github.com/uber-go/zap/pull/326 -[#300]: https://github.com/uber-go/zap/pull/300 - -## 1.0.0-rc.1 (14 Feb 2017) - -This is the first release candidate for zap's stable release. There are multiple -breaking changes and improvements from the pre-release version. Most notably: - -* **Zap's import path is now "go.uber.org/zap"** — all users will - need to update their code. -* User-facing types and functions remain in the `zap` package. Code relevant - largely to extension authors is now in the `zapcore` package. -* The `zapcore.Core` type makes it easy for third-party packages to use zap's - internals but provide a different user-facing API. -* `Logger` is now a concrete type instead of an interface. -* A less verbose (though slower) logging API is included by default. -* Package-global loggers `L` and `S` are included. -* A human-friendly console encoder is included. -* A declarative config struct allows common logger configurations to be managed - as configuration instead of code. -* Sampling is more accurate, and doesn't depend on the standard library's shared - timer heap. - -## 0.1.0-beta.1 (6 Feb 2017) - -This is a minor version, tagged to allow users to pin to the pre-1.0 APIs and -upgrade at their leisure. Since this is the first tagged release, there are no -backward compatibility concerns and all functionality is new. - -Early zap adopters should pin to the 0.1.x minor version until they're ready to -upgrade to the upcoming stable release. diff --git a/vendor/go.uber.org/zap/CODE_OF_CONDUCT.md b/vendor/go.uber.org/zap/CODE_OF_CONDUCT.md deleted file mode 100644 index e327d9aa5..000000000 --- a/vendor/go.uber.org/zap/CODE_OF_CONDUCT.md +++ /dev/null @@ -1,75 +0,0 @@ -# Contributor Covenant Code of Conduct - -## Our Pledge - -In the interest of fostering an open and welcoming environment, we as -contributors and maintainers pledge to making participation in our project and -our community a harassment-free experience for everyone, regardless of age, -body size, disability, ethnicity, gender identity and expression, level of -experience, nationality, personal appearance, race, religion, or sexual -identity and orientation. - -## Our Standards - -Examples of behavior that contributes to creating a positive environment -include: - -* Using welcoming and inclusive language -* Being respectful of differing viewpoints and experiences -* Gracefully accepting constructive criticism -* Focusing on what is best for the community -* Showing empathy towards other community members - -Examples of unacceptable behavior by participants include: - -* The use of sexualized language or imagery and unwelcome sexual attention or - advances -* Trolling, insulting/derogatory comments, and personal or political attacks -* Public or private harassment -* Publishing others' private information, such as a physical or electronic - address, without explicit permission -* Other conduct which could reasonably be considered inappropriate in a - professional setting - -## Our Responsibilities - -Project maintainers are responsible for clarifying the standards of acceptable -behavior and are expected to take appropriate and fair corrective action in -response to any instances of unacceptable behavior. - -Project maintainers have the right and responsibility to remove, edit, or -reject comments, commits, code, wiki edits, issues, and other contributions -that are not aligned to this Code of Conduct, or to ban temporarily or -permanently any contributor for other behaviors that they deem inappropriate, -threatening, offensive, or harmful. - -## Scope - -This Code of Conduct applies both within project spaces and in public spaces -when an individual is representing the project or its community. Examples of -representing a project or community include using an official project e-mail -address, posting via an official social media account, or acting as an -appointed representative at an online or offline event. Representation of a -project may be further defined and clarified by project maintainers. - -## Enforcement - -Instances of abusive, harassing, or otherwise unacceptable behavior may be -reported by contacting the project team at oss-conduct@uber.com. The project -team will review and investigate all complaints, and will respond in a way -that it deems appropriate to the circumstances. The project team is obligated -to maintain confidentiality with regard to the reporter of an incident. -Further details of specific enforcement policies may be posted separately. - -Project maintainers who do not follow or enforce the Code of Conduct in good -faith may face temporary or permanent repercussions as determined by other -members of the project's leadership. - -## Attribution - -This Code of Conduct is adapted from the [Contributor Covenant][homepage], -version 1.4, available at -[http://contributor-covenant.org/version/1/4][version]. - -[homepage]: http://contributor-covenant.org -[version]: http://contributor-covenant.org/version/1/4/ diff --git a/vendor/go.uber.org/zap/CONTRIBUTING.md b/vendor/go.uber.org/zap/CONTRIBUTING.md deleted file mode 100644 index ea02f3cae..000000000 --- a/vendor/go.uber.org/zap/CONTRIBUTING.md +++ /dev/null @@ -1,70 +0,0 @@ -# Contributing - -We'd love your help making zap the very best structured logging library in Go! - -If you'd like to add new exported APIs, please [open an issue][open-issue] -describing your proposal — discussing API changes ahead of time makes -pull request review much smoother. In your issue, pull request, and any other -communications, please remember to treat your fellow contributors with -respect! We take our [code of conduct](CODE_OF_CONDUCT.md) seriously. - -Note that you'll need to sign [Uber's Contributor License Agreement][cla] -before we can accept any of your contributions. If necessary, a bot will remind -you to accept the CLA when you open your pull request. - -## Setup - -[Fork][fork], then clone the repository: - -```bash -mkdir -p $GOPATH/src/go.uber.org -cd $GOPATH/src/go.uber.org -git clone git@github.com:your_github_username/zap.git -cd zap -git remote add upstream https://github.com/uber-go/zap.git -git fetch upstream -``` - -Make sure that the tests and the linters pass: - -```bash -make test -make lint -``` - -## Making Changes - -Start by creating a new branch for your changes: - -```bash -cd $GOPATH/src/go.uber.org/zap -git checkout master -git fetch upstream -git rebase upstream/master -git checkout -b cool_new_feature -``` - -Make your changes, then ensure that `make lint` and `make test` still pass. If -you're satisfied with your changes, push them to your fork. - -```bash -git push origin cool_new_feature -``` - -Then use the GitHub UI to open a pull request. - -At this point, you're waiting on us to review your changes. We _try_ to respond -to issues and pull requests within a few business days, and we may suggest some -improvements or alternatives. Once your changes are approved, one of the -project maintainers will merge them. - -We're much more likely to approve your changes if you: - -- Add tests for new functionality. -- Write a [good commit message][commit-message]. -- Maintain backward compatibility. - -[fork]: https://github.com/uber-go/zap/fork -[open-issue]: https://github.com/uber-go/zap/issues/new -[cla]: https://cla-assistant.io/uber-go/zap -[commit-message]: http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html diff --git a/vendor/go.uber.org/zap/FAQ.md b/vendor/go.uber.org/zap/FAQ.md deleted file mode 100644 index b183b20bc..000000000 --- a/vendor/go.uber.org/zap/FAQ.md +++ /dev/null @@ -1,164 +0,0 @@ -# Frequently Asked Questions - -## Design - -### Why spend so much effort on logger performance? - -Of course, most applications won't notice the impact of a slow logger: they -already take tens or hundreds of milliseconds for each operation, so an extra -millisecond doesn't matter. - -On the other hand, why *not* make structured logging fast? The `SugaredLogger` -isn't any harder to use than other logging packages, and the `Logger` makes -structured logging possible in performance-sensitive contexts. Across a fleet -of Go microservices, making each application even slightly more efficient adds -up quickly. - -### Why aren't `Logger` and `SugaredLogger` interfaces? - -Unlike the familiar `io.Writer` and `http.Handler`, `Logger` and -`SugaredLogger` interfaces would include *many* methods. As [Rob Pike points -out][go-proverbs], "The bigger the interface, the weaker the abstraction." -Interfaces are also rigid — *any* change requires releasing a new major -version, since it breaks all third-party implementations. - -Making the `Logger` and `SugaredLogger` concrete types doesn't sacrifice much -abstraction, and it lets us add methods without introducing breaking changes. -Your applications should define and depend upon an interface that includes -just the methods you use. - -### Why are some of my logs missing? - -Logs are dropped intentionally by zap when sampling is enabled. The production -configuration (as returned by `NewProductionConfig()` enables sampling which will -cause repeated logs within a second to be sampled. See more details on why sampling -is enabled in [Why sample application logs](https://github.com/uber-go/zap/blob/master/FAQ.md#why-sample-application-logs). - -### Why sample application logs? - -Applications often experience runs of errors, either because of a bug or -because of a misbehaving user. Logging errors is usually a good idea, but it -can easily make this bad situation worse: not only is your application coping -with a flood of errors, it's also spending extra CPU cycles and I/O logging -those errors. Since writes are typically serialized, logging limits throughput -when you need it most. - -Sampling fixes this problem by dropping repetitive log entries. Under normal -conditions, your application writes out every entry. When similar entries are -logged hundreds or thousands of times each second, though, zap begins dropping -duplicates to preserve throughput. - -### Why do the structured logging APIs take a message in addition to fields? - -Subjectively, we find it helpful to accompany structured context with a brief -description. This isn't critical during development, but it makes debugging -and operating unfamiliar systems much easier. - -More concretely, zap's sampling algorithm uses the message to identify -duplicate entries. In our experience, this is a practical middle ground -between random sampling (which often drops the exact entry that you need while -debugging) and hashing the complete entry (which is prohibitively expensive). - -### Why include package-global loggers? - -Since so many other logging packages include a global logger, many -applications aren't designed to accept loggers as explicit parameters. -Changing function signatures is often a breaking change, so zap includes -global loggers to simplify migration. - -Avoid them where possible. - -### Why include dedicated Panic and Fatal log levels? - -In general, application code should handle errors gracefully instead of using -`panic` or `os.Exit`. However, every rule has exceptions, and it's common to -crash when an error is truly unrecoverable. To avoid losing any information -— especially the reason for the crash — the logger must flush any -buffered entries before the process exits. - -Zap makes this easy by offering `Panic` and `Fatal` logging methods that -automatically flush before exiting. Of course, this doesn't guarantee that -logs will never be lost, but it eliminates a common error. - -See the discussion in uber-go/zap#207 for more details. - -### What's `DPanic`? - -`DPanic` stands for "panic in development." In development, it logs at -`PanicLevel`; otherwise, it logs at `ErrorLevel`. `DPanic` makes it easier to -catch errors that are theoretically possible, but shouldn't actually happen, -*without* crashing in production. - -If you've ever written code like this, you need `DPanic`: - -```go -if err != nil { - panic(fmt.Sprintf("shouldn't ever get here: %v", err)) -} -``` - -## Installation - -### What does the error `expects import "go.uber.org/zap"` mean? - -Either zap was installed incorrectly or you're referencing the wrong package -name in your code. - -Zap's source code happens to be hosted on GitHub, but the [import -path][import-path] is `go.uber.org/zap`. This gives us, the project -maintainers, the freedom to move the source code if necessary. However, it -means that you need to take a little care when installing and using the -package. - -If you follow two simple rules, everything should work: install zap with `go -get -u go.uber.org/zap`, and always import it in your code with `import -"go.uber.org/zap"`. Your code shouldn't contain *any* references to -`github.com/uber-go/zap`. - -## Usage - -### Does zap support log rotation? - -Zap doesn't natively support rotating log files, since we prefer to leave this -to an external program like `logrotate`. - -However, it's easy to integrate a log rotation package like -[`gopkg.in/natefinch/lumberjack.v2`][lumberjack] as a `zapcore.WriteSyncer`. - -```go -// lumberjack.Logger is already safe for concurrent use, so we don't need to -// lock it. -w := zapcore.AddSync(&lumberjack.Logger{ - Filename: "/var/log/myapp/foo.log", - MaxSize: 500, // megabytes - MaxBackups: 3, - MaxAge: 28, // days -}) -core := zapcore.NewCore( - zapcore.NewJSONEncoder(zap.NewProductionEncoderConfig()), - w, - zap.InfoLevel, -) -logger := zap.New(core) -``` - -## Extensions - -We'd love to support every logging need within zap itself, but we're only -familiar with a handful of log ingestion systems, flag-parsing packages, and -the like. Rather than merging code that we can't effectively debug and -support, we'd rather grow an ecosystem of zap extensions. - -We're aware of the following extensions, but haven't used them ourselves: - -| Package | Integration | -| --- | --- | -| `github.com/tchap/zapext` | Sentry, syslog | -| `github.com/fgrosse/zaptest` | Ginkgo | -| `github.com/blendle/zapdriver` | Stackdriver | -| `github.com/moul/zapgorm` | Gorm | -| `github.com/moul/zapfilter` | Advanced filtering rules | - -[go-proverbs]: https://go-proverbs.github.io/ -[import-path]: https://golang.org/cmd/go/#hdr-Remote_import_paths -[lumberjack]: https://godoc.org/gopkg.in/natefinch/lumberjack.v2 diff --git a/vendor/go.uber.org/zap/LICENSE b/vendor/go.uber.org/zap/LICENSE deleted file mode 100644 index 6652bed45..000000000 --- a/vendor/go.uber.org/zap/LICENSE +++ /dev/null @@ -1,19 +0,0 @@ -Copyright (c) 2016-2017 Uber Technologies, Inc. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/vendor/go.uber.org/zap/Makefile b/vendor/go.uber.org/zap/Makefile deleted file mode 100644 index eb1cee53b..000000000 --- a/vendor/go.uber.org/zap/Makefile +++ /dev/null @@ -1,76 +0,0 @@ -# Directory containing the Makefile. -PROJECT_ROOT = $(dir $(abspath $(lastword $(MAKEFILE_LIST)))) - -export GOBIN ?= $(PROJECT_ROOT)/bin -export PATH := $(GOBIN):$(PATH) - -GOVULNCHECK = $(GOBIN)/govulncheck -BENCH_FLAGS ?= -cpuprofile=cpu.pprof -memprofile=mem.pprof -benchmem - -# Directories containing independent Go modules. -MODULE_DIRS = . ./exp ./benchmarks ./zapgrpc/internal/test - -# Directories that we want to track coverage for. -COVER_DIRS = . ./exp - -.PHONY: all -all: lint test - -.PHONY: lint -lint: golangci-lint tidy-lint license-lint - -.PHONY: golangci-lint -golangci-lint: - @$(foreach mod,$(MODULE_DIRS), \ - (cd $(mod) && \ - echo "[lint] golangci-lint: $(mod)" && \ - golangci-lint run --path-prefix $(mod)) &&) true - -.PHONY: tidy -tidy: - @$(foreach dir,$(MODULE_DIRS), \ - (cd $(dir) && go mod tidy) &&) true - -.PHONY: tidy-lint -tidy-lint: - @$(foreach mod,$(MODULE_DIRS), \ - (cd $(mod) && \ - echo "[lint] tidy: $(mod)" && \ - go mod tidy && \ - git diff --exit-code -- go.mod go.sum) &&) true - - -.PHONY: license-lint -license-lint: - ./checklicense.sh - -$(GOVULNCHECK): - cd tools && go install golang.org/x/vuln/cmd/govulncheck - -.PHONY: test -test: - @$(foreach dir,$(MODULE_DIRS),(cd $(dir) && go test -race ./...) &&) true - -.PHONY: cover -cover: - @$(foreach dir,$(COVER_DIRS), ( \ - cd $(dir) && \ - go test -race -coverprofile=cover.out -coverpkg=./... ./... \ - && go tool cover -html=cover.out -o cover.html) &&) true - -.PHONY: bench -BENCH ?= . -bench: - @$(foreach dir,$(MODULE_DIRS), ( \ - cd $(dir) && \ - go list ./... | xargs -n1 go test -bench=$(BENCH) -run="^$$" $(BENCH_FLAGS) \ - ) &&) true - -.PHONY: updatereadme -updatereadme: - rm -f README.md - cat .readme.tmpl | go run internal/readme/readme.go > README.md - -.PHONY: vulncheck -vulncheck: $(GOVULNCHECK) - $(GOVULNCHECK) ./... diff --git a/vendor/go.uber.org/zap/README.md b/vendor/go.uber.org/zap/README.md deleted file mode 100644 index a17035cb6..000000000 --- a/vendor/go.uber.org/zap/README.md +++ /dev/null @@ -1,149 +0,0 @@ -# :zap: zap - - -
- -Blazing fast, structured, leveled logging in Go. - -![Zap logo](assets/logo.png) - -[![GoDoc][doc-img]][doc] [![Build Status][ci-img]][ci] [![Coverage Status][cov-img]][cov] - -
- -## Installation - -`go get -u go.uber.org/zap` - -Note that zap only supports the two most recent minor versions of Go. - -## Quick Start - -In contexts where performance is nice, but not critical, use the -`SugaredLogger`. It's 4-10x faster than other structured logging -packages and includes both structured and `printf`-style APIs. - -```go -logger, _ := zap.NewProduction() -defer logger.Sync() // flushes buffer, if any -sugar := logger.Sugar() -sugar.Infow("failed to fetch URL", - // Structured context as loosely typed key-value pairs. - "url", url, - "attempt", 3, - "backoff", time.Second, -) -sugar.Infof("Failed to fetch URL: %s", url) -``` - -When performance and type safety are critical, use the `Logger`. It's even -faster than the `SugaredLogger` and allocates far less, but it only supports -structured logging. - -```go -logger, _ := zap.NewProduction() -defer logger.Sync() -logger.Info("failed to fetch URL", - // Structured context as strongly typed Field values. - zap.String("url", url), - zap.Int("attempt", 3), - zap.Duration("backoff", time.Second), -) -``` - -See the [documentation][doc] and [FAQ](FAQ.md) for more details. - -## Performance - -For applications that log in the hot path, reflection-based serialization and -string formatting are prohibitively expensive — they're CPU-intensive -and make many small allocations. Put differently, using `encoding/json` and -`fmt.Fprintf` to log tons of `interface{}`s makes your application slow. - -Zap takes a different approach. It includes a reflection-free, zero-allocation -JSON encoder, and the base `Logger` strives to avoid serialization overhead -and allocations wherever possible. By building the high-level `SugaredLogger` -on that foundation, zap lets users *choose* when they need to count every -allocation and when they'd prefer a more familiar, loosely typed API. - -As measured by its own [benchmarking suite][], not only is zap more performant -than comparable structured logging packages — it's also faster than the -standard library. Like all benchmarks, take these with a grain of salt.[1](#footnote-versions) - -Log a message and 10 fields: - -| Package | Time | Time % to zap | Objects Allocated | -| :------ | :--: | :-----------: | :---------------: | -| :zap: zap | 656 ns/op | +0% | 5 allocs/op -| :zap: zap (sugared) | 935 ns/op | +43% | 10 allocs/op -| zerolog | 380 ns/op | -42% | 1 allocs/op -| go-kit | 2249 ns/op | +243% | 57 allocs/op -| slog (LogAttrs) | 2479 ns/op | +278% | 40 allocs/op -| slog | 2481 ns/op | +278% | 42 allocs/op -| apex/log | 9591 ns/op | +1362% | 63 allocs/op -| log15 | 11393 ns/op | +1637% | 75 allocs/op -| logrus | 11654 ns/op | +1677% | 79 allocs/op - -Log a message with a logger that already has 10 fields of context: - -| Package | Time | Time % to zap | Objects Allocated | -| :------ | :--: | :-----------: | :---------------: | -| :zap: zap | 67 ns/op | +0% | 0 allocs/op -| :zap: zap (sugared) | 84 ns/op | +25% | 1 allocs/op -| zerolog | 35 ns/op | -48% | 0 allocs/op -| slog | 193 ns/op | +188% | 0 allocs/op -| slog (LogAttrs) | 200 ns/op | +199% | 0 allocs/op -| go-kit | 2460 ns/op | +3572% | 56 allocs/op -| log15 | 9038 ns/op | +13390% | 70 allocs/op -| apex/log | 9068 ns/op | +13434% | 53 allocs/op -| logrus | 10521 ns/op | +15603% | 68 allocs/op - -Log a static string, without any context or `printf`-style templating: - -| Package | Time | Time % to zap | Objects Allocated | -| :------ | :--: | :-----------: | :---------------: | -| :zap: zap | 63 ns/op | +0% | 0 allocs/op -| :zap: zap (sugared) | 81 ns/op | +29% | 1 allocs/op -| zerolog | 32 ns/op | -49% | 0 allocs/op -| standard library | 124 ns/op | +97% | 1 allocs/op -| slog | 196 ns/op | +211% | 0 allocs/op -| slog (LogAttrs) | 200 ns/op | +217% | 0 allocs/op -| go-kit | 213 ns/op | +238% | 9 allocs/op -| apex/log | 771 ns/op | +1124% | 5 allocs/op -| logrus | 1439 ns/op | +2184% | 23 allocs/op -| log15 | 2069 ns/op | +3184% | 20 allocs/op - -## Development Status: Stable - -All APIs are finalized, and no breaking changes will be made in the 1.x series -of releases. Users of semver-aware dependency management systems should pin -zap to `^1`. - -## Contributing - -We encourage and support an active, healthy community of contributors — -including you! Details are in the [contribution guide](CONTRIBUTING.md) and -the [code of conduct](CODE_OF_CONDUCT.md). The zap maintainers keep an eye on -issues and pull requests, but you can also report any negative conduct to -oss-conduct@uber.com. That email list is a private, safe space; even the zap -maintainers don't have access, so don't hesitate to hold us to a high -standard. - -
- -Released under the [MIT License](LICENSE). - -1 In particular, keep in mind that we may be -benchmarking against slightly older versions of other packages. Versions are -pinned in the [benchmarks/go.mod][] file. [↩](#anchor-versions) - -[doc-img]: https://pkg.go.dev/badge/go.uber.org/zap -[doc]: https://pkg.go.dev/go.uber.org/zap -[ci-img]: https://github.com/uber-go/zap/actions/workflows/go.yml/badge.svg -[ci]: https://github.com/uber-go/zap/actions/workflows/go.yml -[cov-img]: https://codecov.io/gh/uber-go/zap/branch/master/graph/badge.svg -[cov]: https://codecov.io/gh/uber-go/zap -[benchmarking suite]: https://github.com/uber-go/zap/tree/master/benchmarks -[benchmarks/go.mod]: https://github.com/uber-go/zap/blob/master/benchmarks/go.mod - diff --git a/vendor/go.uber.org/zap/array.go b/vendor/go.uber.org/zap/array.go deleted file mode 100644 index abfccb566..000000000 --- a/vendor/go.uber.org/zap/array.go +++ /dev/null @@ -1,447 +0,0 @@ -// Copyright (c) 2016 Uber Technologies, Inc. -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in -// all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -// THE SOFTWARE. - -package zap - -import ( - "fmt" - "time" - - "go.uber.org/zap/zapcore" -) - -// Array constructs a field with the given key and ArrayMarshaler. It provides -// a flexible, but still type-safe and efficient, way to add array-like types -// to the logging context. The struct's MarshalLogArray method is called lazily. -func Array(key string, val zapcore.ArrayMarshaler) Field { - return Field{Key: key, Type: zapcore.ArrayMarshalerType, Interface: val} -} - -// Bools constructs a field that carries a slice of bools. -func Bools(key string, bs []bool) Field { - return Array(key, bools(bs)) -} - -// ByteStrings constructs a field that carries a slice of []byte, each of which -// must be UTF-8 encoded text. -func ByteStrings(key string, bss [][]byte) Field { - return Array(key, byteStringsArray(bss)) -} - -// Complex128s constructs a field that carries a slice of complex numbers. -func Complex128s(key string, nums []complex128) Field { - return Array(key, complex128s(nums)) -} - -// Complex64s constructs a field that carries a slice of complex numbers. -func Complex64s(key string, nums []complex64) Field { - return Array(key, complex64s(nums)) -} - -// Durations constructs a field that carries a slice of time.Durations. -func Durations(key string, ds []time.Duration) Field { - return Array(key, durations(ds)) -} - -// Float64s constructs a field that carries a slice of floats. -func Float64s(key string, nums []float64) Field { - return Array(key, float64s(nums)) -} - -// Float32s constructs a field that carries a slice of floats. -func Float32s(key string, nums []float32) Field { - return Array(key, float32s(nums)) -} - -// Ints constructs a field that carries a slice of integers. -func Ints(key string, nums []int) Field { - return Array(key, ints(nums)) -} - -// Int64s constructs a field that carries a slice of integers. -func Int64s(key string, nums []int64) Field { - return Array(key, int64s(nums)) -} - -// Int32s constructs a field that carries a slice of integers. -func Int32s(key string, nums []int32) Field { - return Array(key, int32s(nums)) -} - -// Int16s constructs a field that carries a slice of integers. -func Int16s(key string, nums []int16) Field { - return Array(key, int16s(nums)) -} - -// Int8s constructs a field that carries a slice of integers. -func Int8s(key string, nums []int8) Field { - return Array(key, int8s(nums)) -} - -// Objects constructs a field with the given key, holding a list of the -// provided objects that can be marshaled by Zap. -// -// Note that these objects must implement zapcore.ObjectMarshaler directly. -// That is, if you're trying to marshal a []Request, the MarshalLogObject -// method must be declared on the Request type, not its pointer (*Request). -// If it's on the pointer, use ObjectValues. -// -// Given an object that implements MarshalLogObject on the value receiver, you -// can log a slice of those objects with Objects like so: -// -// type Author struct{ ... } -// func (a Author) MarshalLogObject(enc zapcore.ObjectEncoder) error -// -// var authors []Author = ... -// logger.Info("loading article", zap.Objects("authors", authors)) -// -// Similarly, given a type that implements MarshalLogObject on its pointer -// receiver, you can log a slice of pointers to that object with Objects like -// so: -// -// type Request struct{ ... } -// func (r *Request) MarshalLogObject(enc zapcore.ObjectEncoder) error -// -// var requests []*Request = ... -// logger.Info("sending requests", zap.Objects("requests", requests)) -// -// If instead, you have a slice of values of such an object, use the -// ObjectValues constructor. -// -// var requests []Request = ... -// logger.Info("sending requests", zap.ObjectValues("requests", requests)) -func Objects[T zapcore.ObjectMarshaler](key string, values []T) Field { - return Array(key, objects[T](values)) -} - -type objects[T zapcore.ObjectMarshaler] []T - -func (os objects[T]) MarshalLogArray(arr zapcore.ArrayEncoder) error { - for _, o := range os { - if err := arr.AppendObject(o); err != nil { - return err - } - } - return nil -} - -// ObjectMarshalerPtr is a constraint that specifies that the given type -// implements zapcore.ObjectMarshaler on a pointer receiver. -type ObjectMarshalerPtr[T any] interface { - *T - zapcore.ObjectMarshaler -} - -// ObjectValues constructs a field with the given key, holding a list of the -// provided objects, where pointers to these objects can be marshaled by Zap. -// -// Note that pointers to these objects must implement zapcore.ObjectMarshaler. -// That is, if you're trying to marshal a []Request, the MarshalLogObject -// method must be declared on the *Request type, not the value (Request). -// If it's on the value, use Objects. -// -// Given an object that implements MarshalLogObject on the pointer receiver, -// you can log a slice of those objects with ObjectValues like so: -// -// type Request struct{ ... } -// func (r *Request) MarshalLogObject(enc zapcore.ObjectEncoder) error -// -// var requests []Request = ... -// logger.Info("sending requests", zap.ObjectValues("requests", requests)) -// -// If instead, you have a slice of pointers of such an object, use the Objects -// field constructor. -// -// var requests []*Request = ... -// logger.Info("sending requests", zap.Objects("requests", requests)) -func ObjectValues[T any, P ObjectMarshalerPtr[T]](key string, values []T) Field { - return Array(key, objectValues[T, P](values)) -} - -type objectValues[T any, P ObjectMarshalerPtr[T]] []T - -func (os objectValues[T, P]) MarshalLogArray(arr zapcore.ArrayEncoder) error { - for i := range os { - // It is necessary for us to explicitly reference the "P" type. - // We cannot simply pass "&os[i]" to AppendObject because its type - // is "*T", which the type system does not consider as - // implementing ObjectMarshaler. - // Only the type "P" satisfies ObjectMarshaler, which we have - // to convert "*T" to explicitly. - var p P = &os[i] - if err := arr.AppendObject(p); err != nil { - return err - } - } - return nil -} - -// Strings constructs a field that carries a slice of strings. -func Strings(key string, ss []string) Field { - return Array(key, stringArray(ss)) -} - -// Stringers constructs a field with the given key, holding a list of the -// output provided by the value's String method -// -// Given an object that implements String on the value receiver, you -// can log a slice of those objects with Objects like so: -// -// type Request struct{ ... } -// func (a Request) String() string -// -// var requests []Request = ... -// logger.Info("sending requests", zap.Stringers("requests", requests)) -// -// Note that these objects must implement fmt.Stringer directly. -// That is, if you're trying to marshal a []Request, the String method -// must be declared on the Request type, not its pointer (*Request). -func Stringers[T fmt.Stringer](key string, values []T) Field { - return Array(key, stringers[T](values)) -} - -type stringers[T fmt.Stringer] []T - -func (os stringers[T]) MarshalLogArray(arr zapcore.ArrayEncoder) error { - for _, o := range os { - arr.AppendString(o.String()) - } - return nil -} - -// Times constructs a field that carries a slice of time.Times. -func Times(key string, ts []time.Time) Field { - return Array(key, times(ts)) -} - -// Uints constructs a field that carries a slice of unsigned integers. -func Uints(key string, nums []uint) Field { - return Array(key, uints(nums)) -} - -// Uint64s constructs a field that carries a slice of unsigned integers. -func Uint64s(key string, nums []uint64) Field { - return Array(key, uint64s(nums)) -} - -// Uint32s constructs a field that carries a slice of unsigned integers. -func Uint32s(key string, nums []uint32) Field { - return Array(key, uint32s(nums)) -} - -// Uint16s constructs a field that carries a slice of unsigned integers. -func Uint16s(key string, nums []uint16) Field { - return Array(key, uint16s(nums)) -} - -// Uint8s constructs a field that carries a slice of unsigned integers. -func Uint8s(key string, nums []uint8) Field { - return Array(key, uint8s(nums)) -} - -// Uintptrs constructs a field that carries a slice of pointer addresses. -func Uintptrs(key string, us []uintptr) Field { - return Array(key, uintptrs(us)) -} - -// Errors constructs a field that carries a slice of errors. -func Errors(key string, errs []error) Field { - return Array(key, errArray(errs)) -} - -type bools []bool - -func (bs bools) MarshalLogArray(arr zapcore.ArrayEncoder) error { - for i := range bs { - arr.AppendBool(bs[i]) - } - return nil -} - -type byteStringsArray [][]byte - -func (bss byteStringsArray) MarshalLogArray(arr zapcore.ArrayEncoder) error { - for i := range bss { - arr.AppendByteString(bss[i]) - } - return nil -} - -type complex128s []complex128 - -func (nums complex128s) MarshalLogArray(arr zapcore.ArrayEncoder) error { - for i := range nums { - arr.AppendComplex128(nums[i]) - } - return nil -} - -type complex64s []complex64 - -func (nums complex64s) MarshalLogArray(arr zapcore.ArrayEncoder) error { - for i := range nums { - arr.AppendComplex64(nums[i]) - } - return nil -} - -type durations []time.Duration - -func (ds durations) MarshalLogArray(arr zapcore.ArrayEncoder) error { - for i := range ds { - arr.AppendDuration(ds[i]) - } - return nil -} - -type float64s []float64 - -func (nums float64s) MarshalLogArray(arr zapcore.ArrayEncoder) error { - for i := range nums { - arr.AppendFloat64(nums[i]) - } - return nil -} - -type float32s []float32 - -func (nums float32s) MarshalLogArray(arr zapcore.ArrayEncoder) error { - for i := range nums { - arr.AppendFloat32(nums[i]) - } - return nil -} - -type ints []int - -func (nums ints) MarshalLogArray(arr zapcore.ArrayEncoder) error { - for i := range nums { - arr.AppendInt(nums[i]) - } - return nil -} - -type int64s []int64 - -func (nums int64s) MarshalLogArray(arr zapcore.ArrayEncoder) error { - for i := range nums { - arr.AppendInt64(nums[i]) - } - return nil -} - -type int32s []int32 - -func (nums int32s) MarshalLogArray(arr zapcore.ArrayEncoder) error { - for i := range nums { - arr.AppendInt32(nums[i]) - } - return nil -} - -type int16s []int16 - -func (nums int16s) MarshalLogArray(arr zapcore.ArrayEncoder) error { - for i := range nums { - arr.AppendInt16(nums[i]) - } - return nil -} - -type int8s []int8 - -func (nums int8s) MarshalLogArray(arr zapcore.ArrayEncoder) error { - for i := range nums { - arr.AppendInt8(nums[i]) - } - return nil -} - -type stringArray []string - -func (ss stringArray) MarshalLogArray(arr zapcore.ArrayEncoder) error { - for i := range ss { - arr.AppendString(ss[i]) - } - return nil -} - -type times []time.Time - -func (ts times) MarshalLogArray(arr zapcore.ArrayEncoder) error { - for i := range ts { - arr.AppendTime(ts[i]) - } - return nil -} - -type uints []uint - -func (nums uints) MarshalLogArray(arr zapcore.ArrayEncoder) error { - for i := range nums { - arr.AppendUint(nums[i]) - } - return nil -} - -type uint64s []uint64 - -func (nums uint64s) MarshalLogArray(arr zapcore.ArrayEncoder) error { - for i := range nums { - arr.AppendUint64(nums[i]) - } - return nil -} - -type uint32s []uint32 - -func (nums uint32s) MarshalLogArray(arr zapcore.ArrayEncoder) error { - for i := range nums { - arr.AppendUint32(nums[i]) - } - return nil -} - -type uint16s []uint16 - -func (nums uint16s) MarshalLogArray(arr zapcore.ArrayEncoder) error { - for i := range nums { - arr.AppendUint16(nums[i]) - } - return nil -} - -type uint8s []uint8 - -func (nums uint8s) MarshalLogArray(arr zapcore.ArrayEncoder) error { - for i := range nums { - arr.AppendUint8(nums[i]) - } - return nil -} - -type uintptrs []uintptr - -func (nums uintptrs) MarshalLogArray(arr zapcore.ArrayEncoder) error { - for i := range nums { - arr.AppendUintptr(nums[i]) - } - return nil -} diff --git a/vendor/go.uber.org/zap/buffer/buffer.go b/vendor/go.uber.org/zap/buffer/buffer.go deleted file mode 100644 index 0b8540c21..000000000 --- a/vendor/go.uber.org/zap/buffer/buffer.go +++ /dev/null @@ -1,146 +0,0 @@ -// Copyright (c) 2016 Uber Technologies, Inc. -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in -// all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -// THE SOFTWARE. - -// Package buffer provides a thin wrapper around a byte slice. Unlike the -// standard library's bytes.Buffer, it supports a portion of the strconv -// package's zero-allocation formatters. -package buffer // import "go.uber.org/zap/buffer" - -import ( - "strconv" - "time" -) - -const _size = 1024 // by default, create 1 KiB buffers - -// Buffer is a thin wrapper around a byte slice. It's intended to be pooled, so -// the only way to construct one is via a Pool. -type Buffer struct { - bs []byte - pool Pool -} - -// AppendByte writes a single byte to the Buffer. -func (b *Buffer) AppendByte(v byte) { - b.bs = append(b.bs, v) -} - -// AppendBytes writes the given slice of bytes to the Buffer. -func (b *Buffer) AppendBytes(v []byte) { - b.bs = append(b.bs, v...) -} - -// AppendString writes a string to the Buffer. -func (b *Buffer) AppendString(s string) { - b.bs = append(b.bs, s...) -} - -// AppendInt appends an integer to the underlying buffer (assuming base 10). -func (b *Buffer) AppendInt(i int64) { - b.bs = strconv.AppendInt(b.bs, i, 10) -} - -// AppendTime appends the time formatted using the specified layout. -func (b *Buffer) AppendTime(t time.Time, layout string) { - b.bs = t.AppendFormat(b.bs, layout) -} - -// AppendUint appends an unsigned integer to the underlying buffer (assuming -// base 10). -func (b *Buffer) AppendUint(i uint64) { - b.bs = strconv.AppendUint(b.bs, i, 10) -} - -// AppendBool appends a bool to the underlying buffer. -func (b *Buffer) AppendBool(v bool) { - b.bs = strconv.AppendBool(b.bs, v) -} - -// AppendFloat appends a float to the underlying buffer. It doesn't quote NaN -// or +/- Inf. -func (b *Buffer) AppendFloat(f float64, bitSize int) { - b.bs = strconv.AppendFloat(b.bs, f, 'f', -1, bitSize) -} - -// Len returns the length of the underlying byte slice. -func (b *Buffer) Len() int { - return len(b.bs) -} - -// Cap returns the capacity of the underlying byte slice. -func (b *Buffer) Cap() int { - return cap(b.bs) -} - -// Bytes returns a mutable reference to the underlying byte slice. -func (b *Buffer) Bytes() []byte { - return b.bs -} - -// String returns a string copy of the underlying byte slice. -func (b *Buffer) String() string { - return string(b.bs) -} - -// Reset resets the underlying byte slice. Subsequent writes re-use the slice's -// backing array. -func (b *Buffer) Reset() { - b.bs = b.bs[:0] -} - -// Write implements io.Writer. -func (b *Buffer) Write(bs []byte) (int, error) { - b.bs = append(b.bs, bs...) - return len(bs), nil -} - -// WriteByte writes a single byte to the Buffer. -// -// Error returned is always nil, function signature is compatible -// with bytes.Buffer and bufio.Writer -func (b *Buffer) WriteByte(v byte) error { - b.AppendByte(v) - return nil -} - -// WriteString writes a string to the Buffer. -// -// Error returned is always nil, function signature is compatible -// with bytes.Buffer and bufio.Writer -func (b *Buffer) WriteString(s string) (int, error) { - b.AppendString(s) - return len(s), nil -} - -// TrimNewline trims any final "\n" byte from the end of the buffer. -func (b *Buffer) TrimNewline() { - if i := len(b.bs) - 1; i >= 0 { - if b.bs[i] == '\n' { - b.bs = b.bs[:i] - } - } -} - -// Free returns the Buffer to its Pool. -// -// Callers must not retain references to the Buffer after calling Free. -func (b *Buffer) Free() { - b.pool.put(b) -} diff --git a/vendor/go.uber.org/zap/buffer/pool.go b/vendor/go.uber.org/zap/buffer/pool.go deleted file mode 100644 index 846323360..000000000 --- a/vendor/go.uber.org/zap/buffer/pool.go +++ /dev/null @@ -1,53 +0,0 @@ -// Copyright (c) 2016 Uber Technologies, Inc. -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in -// all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -// THE SOFTWARE. - -package buffer - -import ( - "go.uber.org/zap/internal/pool" -) - -// A Pool is a type-safe wrapper around a sync.Pool. -type Pool struct { - p *pool.Pool[*Buffer] -} - -// NewPool constructs a new Pool. -func NewPool() Pool { - return Pool{ - p: pool.New(func() *Buffer { - return &Buffer{ - bs: make([]byte, 0, _size), - } - }), - } -} - -// Get retrieves a Buffer from the pool, creating one if necessary. -func (p Pool) Get() *Buffer { - buf := p.p.Get() - buf.Reset() - buf.pool = p - return buf -} - -func (p Pool) put(buf *Buffer) { - p.p.Put(buf) -} diff --git a/vendor/go.uber.org/zap/checklicense.sh b/vendor/go.uber.org/zap/checklicense.sh deleted file mode 100644 index 345ac8b89..000000000 --- a/vendor/go.uber.org/zap/checklicense.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/bash -e - -ERROR_COUNT=0 -while read -r file -do - case "$(head -1 "${file}")" in - *"Copyright (c) "*" Uber Technologies, Inc.") - # everything's cool - ;; - *) - echo "$file is missing license header." - (( ERROR_COUNT++ )) - ;; - esac -done < <(git ls-files "*\.go") - -exit $ERROR_COUNT diff --git a/vendor/go.uber.org/zap/config.go b/vendor/go.uber.org/zap/config.go deleted file mode 100644 index e76e4e64f..000000000 --- a/vendor/go.uber.org/zap/config.go +++ /dev/null @@ -1,330 +0,0 @@ -// Copyright (c) 2016 Uber Technologies, Inc. -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in -// all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -// THE SOFTWARE. - -package zap - -import ( - "errors" - "sort" - "time" - - "go.uber.org/zap/zapcore" -) - -// SamplingConfig sets a sampling strategy for the logger. Sampling caps the -// global CPU and I/O load that logging puts on your process while attempting -// to preserve a representative subset of your logs. -// -// If specified, the Sampler will invoke the Hook after each decision. -// -// Values configured here are per-second. See zapcore.NewSamplerWithOptions for -// details. -type SamplingConfig struct { - Initial int `json:"initial" yaml:"initial"` - Thereafter int `json:"thereafter" yaml:"thereafter"` - Hook func(zapcore.Entry, zapcore.SamplingDecision) `json:"-" yaml:"-"` -} - -// Config offers a declarative way to construct a logger. It doesn't do -// anything that can't be done with New, Options, and the various -// zapcore.WriteSyncer and zapcore.Core wrappers, but it's a simpler way to -// toggle common options. -// -// Note that Config intentionally supports only the most common options. More -// unusual logging setups (logging to network connections or message queues, -// splitting output between multiple files, etc.) are possible, but require -// direct use of the zapcore package. For sample code, see the package-level -// BasicConfiguration and AdvancedConfiguration examples. -// -// For an example showing runtime log level changes, see the documentation for -// AtomicLevel. -type Config struct { - // Level is the minimum enabled logging level. Note that this is a dynamic - // level, so calling Config.Level.SetLevel will atomically change the log - // level of all loggers descended from this config. - Level AtomicLevel `json:"level" yaml:"level"` - // Development puts the logger in development mode, which changes the - // behavior of DPanicLevel and takes stacktraces more liberally. - Development bool `json:"development" yaml:"development"` - // DisableCaller stops annotating logs with the calling function's file - // name and line number. By default, all logs are annotated. - DisableCaller bool `json:"disableCaller" yaml:"disableCaller"` - // DisableStacktrace completely disables automatic stacktrace capturing. By - // default, stacktraces are captured for WarnLevel and above logs in - // development and ErrorLevel and above in production. - DisableStacktrace bool `json:"disableStacktrace" yaml:"disableStacktrace"` - // Sampling sets a sampling policy. A nil SamplingConfig disables sampling. - Sampling *SamplingConfig `json:"sampling" yaml:"sampling"` - // Encoding sets the logger's encoding. Valid values are "json" and - // "console", as well as any third-party encodings registered via - // RegisterEncoder. - Encoding string `json:"encoding" yaml:"encoding"` - // EncoderConfig sets options for the chosen encoder. See - // zapcore.EncoderConfig for details. - EncoderConfig zapcore.EncoderConfig `json:"encoderConfig" yaml:"encoderConfig"` - // OutputPaths is a list of URLs or file paths to write logging output to. - // See Open for details. - OutputPaths []string `json:"outputPaths" yaml:"outputPaths"` - // ErrorOutputPaths is a list of URLs to write internal logger errors to. - // The default is standard error. - // - // Note that this setting only affects internal errors; for sample code that - // sends error-level logs to a different location from info- and debug-level - // logs, see the package-level AdvancedConfiguration example. - ErrorOutputPaths []string `json:"errorOutputPaths" yaml:"errorOutputPaths"` - // InitialFields is a collection of fields to add to the root logger. - InitialFields map[string]interface{} `json:"initialFields" yaml:"initialFields"` -} - -// NewProductionEncoderConfig returns an opinionated EncoderConfig for -// production environments. -// -// Messages encoded with this configuration will be JSON-formatted -// and will have the following keys by default: -// -// - "level": The logging level (e.g. "info", "error"). -// - "ts": The current time in number of seconds since the Unix epoch. -// - "msg": The message passed to the log statement. -// - "caller": If available, a short path to the file and line number -// where the log statement was issued. -// The logger configuration determines whether this field is captured. -// - "stacktrace": If available, a stack trace from the line -// where the log statement was issued. -// The logger configuration determines whether this field is captured. -// -// By default, the following formats are used for different types: -// -// - Time is formatted as floating-point number of seconds since the Unix -// epoch. -// - Duration is formatted as floating-point number of seconds. -// -// You may change these by setting the appropriate fields in the returned -// object. -// For example, use the following to change the time encoding format: -// -// cfg := zap.NewProductionEncoderConfig() -// cfg.EncodeTime = zapcore.ISO8601TimeEncoder -func NewProductionEncoderConfig() zapcore.EncoderConfig { - return zapcore.EncoderConfig{ - TimeKey: "ts", - LevelKey: "level", - NameKey: "logger", - CallerKey: "caller", - FunctionKey: zapcore.OmitKey, - MessageKey: "msg", - StacktraceKey: "stacktrace", - LineEnding: zapcore.DefaultLineEnding, - EncodeLevel: zapcore.LowercaseLevelEncoder, - EncodeTime: zapcore.EpochTimeEncoder, - EncodeDuration: zapcore.SecondsDurationEncoder, - EncodeCaller: zapcore.ShortCallerEncoder, - } -} - -// NewProductionConfig builds a reasonable default production logging -// configuration. -// Logging is enabled at InfoLevel and above, and uses a JSON encoder. -// Logs are written to standard error. -// Stacktraces are included on logs of ErrorLevel and above. -// DPanicLevel logs will not panic, but will write a stacktrace. -// -// Sampling is enabled at 100:100 by default, -// meaning that after the first 100 log entries -// with the same level and message in the same second, -// it will log every 100th entry -// with the same level and message in the same second. -// You may disable this behavior by setting Sampling to nil. -// -// See [NewProductionEncoderConfig] for information -// on the default encoder configuration. -func NewProductionConfig() Config { - return Config{ - Level: NewAtomicLevelAt(InfoLevel), - Development: false, - Sampling: &SamplingConfig{ - Initial: 100, - Thereafter: 100, - }, - Encoding: "json", - EncoderConfig: NewProductionEncoderConfig(), - OutputPaths: []string{"stderr"}, - ErrorOutputPaths: []string{"stderr"}, - } -} - -// NewDevelopmentEncoderConfig returns an opinionated EncoderConfig for -// development environments. -// -// Messages encoded with this configuration will use Zap's console encoder -// intended to print human-readable output. -// It will print log messages with the following information: -// -// - The log level (e.g. "INFO", "ERROR"). -// - The time in ISO8601 format (e.g. "2017-01-01T12:00:00Z"). -// - The message passed to the log statement. -// - If available, a short path to the file and line number -// where the log statement was issued. -// The logger configuration determines whether this field is captured. -// - If available, a stacktrace from the line -// where the log statement was issued. -// The logger configuration determines whether this field is captured. -// -// By default, the following formats are used for different types: -// -// - Time is formatted in ISO8601 format (e.g. "2017-01-01T12:00:00Z"). -// - Duration is formatted as a string (e.g. "1.234s"). -// -// You may change these by setting the appropriate fields in the returned -// object. -// For example, use the following to change the time encoding format: -// -// cfg := zap.NewDevelopmentEncoderConfig() -// cfg.EncodeTime = zapcore.ISO8601TimeEncoder -func NewDevelopmentEncoderConfig() zapcore.EncoderConfig { - return zapcore.EncoderConfig{ - // Keys can be anything except the empty string. - TimeKey: "T", - LevelKey: "L", - NameKey: "N", - CallerKey: "C", - FunctionKey: zapcore.OmitKey, - MessageKey: "M", - StacktraceKey: "S", - LineEnding: zapcore.DefaultLineEnding, - EncodeLevel: zapcore.CapitalLevelEncoder, - EncodeTime: zapcore.ISO8601TimeEncoder, - EncodeDuration: zapcore.StringDurationEncoder, - EncodeCaller: zapcore.ShortCallerEncoder, - } -} - -// NewDevelopmentConfig builds a reasonable default development logging -// configuration. -// Logging is enabled at DebugLevel and above, and uses a console encoder. -// Logs are written to standard error. -// Stacktraces are included on logs of WarnLevel and above. -// DPanicLevel logs will panic. -// -// See [NewDevelopmentEncoderConfig] for information -// on the default encoder configuration. -func NewDevelopmentConfig() Config { - return Config{ - Level: NewAtomicLevelAt(DebugLevel), - Development: true, - Encoding: "console", - EncoderConfig: NewDevelopmentEncoderConfig(), - OutputPaths: []string{"stderr"}, - ErrorOutputPaths: []string{"stderr"}, - } -} - -// Build constructs a logger from the Config and Options. -func (cfg Config) Build(opts ...Option) (*Logger, error) { - enc, err := cfg.buildEncoder() - if err != nil { - return nil, err - } - - sink, errSink, err := cfg.openSinks() - if err != nil { - return nil, err - } - - if cfg.Level == (AtomicLevel{}) { - return nil, errors.New("missing Level") - } - - log := New( - zapcore.NewCore(enc, sink, cfg.Level), - cfg.buildOptions(errSink)..., - ) - if len(opts) > 0 { - log = log.WithOptions(opts...) - } - return log, nil -} - -func (cfg Config) buildOptions(errSink zapcore.WriteSyncer) []Option { - opts := []Option{ErrorOutput(errSink)} - - if cfg.Development { - opts = append(opts, Development()) - } - - if !cfg.DisableCaller { - opts = append(opts, AddCaller()) - } - - stackLevel := ErrorLevel - if cfg.Development { - stackLevel = WarnLevel - } - if !cfg.DisableStacktrace { - opts = append(opts, AddStacktrace(stackLevel)) - } - - if scfg := cfg.Sampling; scfg != nil { - opts = append(opts, WrapCore(func(core zapcore.Core) zapcore.Core { - var samplerOpts []zapcore.SamplerOption - if scfg.Hook != nil { - samplerOpts = append(samplerOpts, zapcore.SamplerHook(scfg.Hook)) - } - return zapcore.NewSamplerWithOptions( - core, - time.Second, - cfg.Sampling.Initial, - cfg.Sampling.Thereafter, - samplerOpts..., - ) - })) - } - - if len(cfg.InitialFields) > 0 { - fs := make([]Field, 0, len(cfg.InitialFields)) - keys := make([]string, 0, len(cfg.InitialFields)) - for k := range cfg.InitialFields { - keys = append(keys, k) - } - sort.Strings(keys) - for _, k := range keys { - fs = append(fs, Any(k, cfg.InitialFields[k])) - } - opts = append(opts, Fields(fs...)) - } - - return opts -} - -func (cfg Config) openSinks() (zapcore.WriteSyncer, zapcore.WriteSyncer, error) { - sink, closeOut, err := Open(cfg.OutputPaths...) - if err != nil { - return nil, nil, err - } - errSink, _, err := Open(cfg.ErrorOutputPaths...) - if err != nil { - closeOut() - return nil, nil, err - } - return sink, errSink, nil -} - -func (cfg Config) buildEncoder() (zapcore.Encoder, error) { - return newEncoder(cfg.Encoding, cfg.EncoderConfig) -} diff --git a/vendor/go.uber.org/zap/doc.go b/vendor/go.uber.org/zap/doc.go deleted file mode 100644 index 3c50d7b4d..000000000 --- a/vendor/go.uber.org/zap/doc.go +++ /dev/null @@ -1,117 +0,0 @@ -// Copyright (c) 2016 Uber Technologies, Inc. -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in -// all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -// THE SOFTWARE. - -// Package zap provides fast, structured, leveled logging. -// -// For applications that log in the hot path, reflection-based serialization -// and string formatting are prohibitively expensive - they're CPU-intensive -// and make many small allocations. Put differently, using json.Marshal and -// fmt.Fprintf to log tons of interface{} makes your application slow. -// -// Zap takes a different approach. It includes a reflection-free, -// zero-allocation JSON encoder, and the base Logger strives to avoid -// serialization overhead and allocations wherever possible. By building the -// high-level SugaredLogger on that foundation, zap lets users choose when -// they need to count every allocation and when they'd prefer a more familiar, -// loosely typed API. -// -// # Choosing a Logger -// -// In contexts where performance is nice, but not critical, use the -// SugaredLogger. It's 4-10x faster than other structured logging packages and -// supports both structured and printf-style logging. Like log15 and go-kit, -// the SugaredLogger's structured logging APIs are loosely typed and accept a -// variadic number of key-value pairs. (For more advanced use cases, they also -// accept strongly typed fields - see the SugaredLogger.With documentation for -// details.) -// -// sugar := zap.NewExample().Sugar() -// defer sugar.Sync() -// sugar.Infow("failed to fetch URL", -// "url", "http://example.com", -// "attempt", 3, -// "backoff", time.Second, -// ) -// sugar.Infof("failed to fetch URL: %s", "http://example.com") -// -// By default, loggers are unbuffered. However, since zap's low-level APIs -// allow buffering, calling Sync before letting your process exit is a good -// habit. -// -// In the rare contexts where every microsecond and every allocation matter, -// use the Logger. It's even faster than the SugaredLogger and allocates far -// less, but it only supports strongly-typed, structured logging. -// -// logger := zap.NewExample() -// defer logger.Sync() -// logger.Info("failed to fetch URL", -// zap.String("url", "http://example.com"), -// zap.Int("attempt", 3), -// zap.Duration("backoff", time.Second), -// ) -// -// Choosing between the Logger and SugaredLogger doesn't need to be an -// application-wide decision: converting between the two is simple and -// inexpensive. -// -// logger := zap.NewExample() -// defer logger.Sync() -// sugar := logger.Sugar() -// plain := sugar.Desugar() -// -// # Configuring Zap -// -// The simplest way to build a Logger is to use zap's opinionated presets: -// NewExample, NewProduction, and NewDevelopment. These presets build a logger -// with a single function call: -// -// logger, err := zap.NewProduction() -// if err != nil { -// log.Fatalf("can't initialize zap logger: %v", err) -// } -// defer logger.Sync() -// -// Presets are fine for small projects, but larger projects and organizations -// naturally require a bit more customization. For most users, zap's Config -// struct strikes the right balance between flexibility and convenience. See -// the package-level BasicConfiguration example for sample code. -// -// More unusual configurations (splitting output between files, sending logs -// to a message queue, etc.) are possible, but require direct use of -// go.uber.org/zap/zapcore. See the package-level AdvancedConfiguration -// example for sample code. -// -// # Extending Zap -// -// The zap package itself is a relatively thin wrapper around the interfaces -// in go.uber.org/zap/zapcore. Extending zap to support a new encoding (e.g., -// BSON), a new log sink (e.g., Kafka), or something more exotic (perhaps an -// exception aggregation service, like Sentry or Rollbar) typically requires -// implementing the zapcore.Encoder, zapcore.WriteSyncer, or zapcore.Core -// interfaces. See the zapcore documentation for details. -// -// Similarly, package authors can use the high-performance Encoder and Core -// implementations in the zapcore package to build their own loggers. -// -// # Frequently Asked Questions -// -// An FAQ covering everything from installation errors to design decisions is -// available at https://github.com/uber-go/zap/blob/master/FAQ.md. -package zap // import "go.uber.org/zap" diff --git a/vendor/go.uber.org/zap/encoder.go b/vendor/go.uber.org/zap/encoder.go deleted file mode 100644 index caa04ceef..000000000 --- a/vendor/go.uber.org/zap/encoder.go +++ /dev/null @@ -1,79 +0,0 @@ -// Copyright (c) 2016 Uber Technologies, Inc. -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in -// all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -// THE SOFTWARE. - -package zap - -import ( - "errors" - "fmt" - "sync" - - "go.uber.org/zap/zapcore" -) - -var ( - errNoEncoderNameSpecified = errors.New("no encoder name specified") - - _encoderNameToConstructor = map[string]func(zapcore.EncoderConfig) (zapcore.Encoder, error){ - "console": func(encoderConfig zapcore.EncoderConfig) (zapcore.Encoder, error) { - return zapcore.NewConsoleEncoder(encoderConfig), nil - }, - "json": func(encoderConfig zapcore.EncoderConfig) (zapcore.Encoder, error) { - return zapcore.NewJSONEncoder(encoderConfig), nil - }, - } - _encoderMutex sync.RWMutex -) - -// RegisterEncoder registers an encoder constructor, which the Config struct -// can then reference. By default, the "json" and "console" encoders are -// registered. -// -// Attempting to register an encoder whose name is already taken returns an -// error. -func RegisterEncoder(name string, constructor func(zapcore.EncoderConfig) (zapcore.Encoder, error)) error { - _encoderMutex.Lock() - defer _encoderMutex.Unlock() - if name == "" { - return errNoEncoderNameSpecified - } - if _, ok := _encoderNameToConstructor[name]; ok { - return fmt.Errorf("encoder already registered for name %q", name) - } - _encoderNameToConstructor[name] = constructor - return nil -} - -func newEncoder(name string, encoderConfig zapcore.EncoderConfig) (zapcore.Encoder, error) { - if encoderConfig.TimeKey != "" && encoderConfig.EncodeTime == nil { - return nil, errors.New("missing EncodeTime in EncoderConfig") - } - - _encoderMutex.RLock() - defer _encoderMutex.RUnlock() - if name == "" { - return nil, errNoEncoderNameSpecified - } - constructor, ok := _encoderNameToConstructor[name] - if !ok { - return nil, fmt.Errorf("no encoder registered for name %q", name) - } - return constructor(encoderConfig) -} diff --git a/vendor/go.uber.org/zap/error.go b/vendor/go.uber.org/zap/error.go deleted file mode 100644 index 45f7b838d..000000000 --- a/vendor/go.uber.org/zap/error.go +++ /dev/null @@ -1,82 +0,0 @@ -// Copyright (c) 2017 Uber Technologies, Inc. -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in -// all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -// THE SOFTWARE. - -package zap - -import ( - "go.uber.org/zap/internal/pool" - "go.uber.org/zap/zapcore" -) - -var _errArrayElemPool = pool.New(func() *errArrayElem { - return &errArrayElem{} -}) - -// Error is shorthand for the common idiom NamedError("error", err). -func Error(err error) Field { - return NamedError("error", err) -} - -// NamedError constructs a field that lazily stores err.Error() under the -// provided key. Errors which also implement fmt.Formatter (like those produced -// by github.com/pkg/errors) will also have their verbose representation stored -// under key+"Verbose". If passed a nil error, the field is a no-op. -// -// For the common case in which the key is simply "error", the Error function -// is shorter and less repetitive. -func NamedError(key string, err error) Field { - if err == nil { - return Skip() - } - return Field{Key: key, Type: zapcore.ErrorType, Interface: err} -} - -type errArray []error - -func (errs errArray) MarshalLogArray(arr zapcore.ArrayEncoder) error { - for i := range errs { - if errs[i] == nil { - continue - } - // To represent each error as an object with an "error" attribute and - // potentially an "errorVerbose" attribute, we need to wrap it in a - // type that implements LogObjectMarshaler. To prevent this from - // allocating, pool the wrapper type. - elem := _errArrayElemPool.Get() - elem.error = errs[i] - err := arr.AppendObject(elem) - elem.error = nil - _errArrayElemPool.Put(elem) - if err != nil { - return err - } - } - return nil -} - -type errArrayElem struct { - error -} - -func (e *errArrayElem) MarshalLogObject(enc zapcore.ObjectEncoder) error { - // Re-use the error field's logic, which supports non-standard error types. - Error(e.error).AddTo(enc) - return nil -} diff --git a/vendor/go.uber.org/zap/field.go b/vendor/go.uber.org/zap/field.go deleted file mode 100644 index 6743930b8..000000000 --- a/vendor/go.uber.org/zap/field.go +++ /dev/null @@ -1,615 +0,0 @@ -// Copyright (c) 2016 Uber Technologies, Inc. -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in -// all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -// THE SOFTWARE. - -package zap - -import ( - "fmt" - "math" - "time" - - "go.uber.org/zap/internal/stacktrace" - "go.uber.org/zap/zapcore" -) - -// Field is an alias for Field. Aliasing this type dramatically -// improves the navigability of this package's API documentation. -type Field = zapcore.Field - -var ( - _minTimeInt64 = time.Unix(0, math.MinInt64) - _maxTimeInt64 = time.Unix(0, math.MaxInt64) -) - -// Skip constructs a no-op field, which is often useful when handling invalid -// inputs in other Field constructors. -func Skip() Field { - return Field{Type: zapcore.SkipType} -} - -// nilField returns a field which will marshal explicitly as nil. See motivation -// in https://github.com/uber-go/zap/issues/753 . If we ever make breaking -// changes and add zapcore.NilType and zapcore.ObjectEncoder.AddNil, the -// implementation here should be changed to reflect that. -func nilField(key string) Field { return Reflect(key, nil) } - -// Binary constructs a field that carries an opaque binary blob. -// -// Binary data is serialized in an encoding-appropriate format. For example, -// zap's JSON encoder base64-encodes binary blobs. To log UTF-8 encoded text, -// use ByteString. -func Binary(key string, val []byte) Field { - return Field{Key: key, Type: zapcore.BinaryType, Interface: val} -} - -// Bool constructs a field that carries a bool. -func Bool(key string, val bool) Field { - var ival int64 - if val { - ival = 1 - } - return Field{Key: key, Type: zapcore.BoolType, Integer: ival} -} - -// Boolp constructs a field that carries a *bool. The returned Field will safely -// and explicitly represent `nil` when appropriate. -func Boolp(key string, val *bool) Field { - if val == nil { - return nilField(key) - } - return Bool(key, *val) -} - -// ByteString constructs a field that carries UTF-8 encoded text as a []byte. -// To log opaque binary blobs (which aren't necessarily valid UTF-8), use -// Binary. -func ByteString(key string, val []byte) Field { - return Field{Key: key, Type: zapcore.ByteStringType, Interface: val} -} - -// Complex128 constructs a field that carries a complex number. Unlike most -// numeric fields, this costs an allocation (to convert the complex128 to -// interface{}). -func Complex128(key string, val complex128) Field { - return Field{Key: key, Type: zapcore.Complex128Type, Interface: val} -} - -// Complex128p constructs a field that carries a *complex128. The returned Field will safely -// and explicitly represent `nil` when appropriate. -func Complex128p(key string, val *complex128) Field { - if val == nil { - return nilField(key) - } - return Complex128(key, *val) -} - -// Complex64 constructs a field that carries a complex number. Unlike most -// numeric fields, this costs an allocation (to convert the complex64 to -// interface{}). -func Complex64(key string, val complex64) Field { - return Field{Key: key, Type: zapcore.Complex64Type, Interface: val} -} - -// Complex64p constructs a field that carries a *complex64. The returned Field will safely -// and explicitly represent `nil` when appropriate. -func Complex64p(key string, val *complex64) Field { - if val == nil { - return nilField(key) - } - return Complex64(key, *val) -} - -// Float64 constructs a field that carries a float64. The way the -// floating-point value is represented is encoder-dependent, so marshaling is -// necessarily lazy. -func Float64(key string, val float64) Field { - return Field{Key: key, Type: zapcore.Float64Type, Integer: int64(math.Float64bits(val))} -} - -// Float64p constructs a field that carries a *float64. The returned Field will safely -// and explicitly represent `nil` when appropriate. -func Float64p(key string, val *float64) Field { - if val == nil { - return nilField(key) - } - return Float64(key, *val) -} - -// Float32 constructs a field that carries a float32. The way the -// floating-point value is represented is encoder-dependent, so marshaling is -// necessarily lazy. -func Float32(key string, val float32) Field { - return Field{Key: key, Type: zapcore.Float32Type, Integer: int64(math.Float32bits(val))} -} - -// Float32p constructs a field that carries a *float32. The returned Field will safely -// and explicitly represent `nil` when appropriate. -func Float32p(key string, val *float32) Field { - if val == nil { - return nilField(key) - } - return Float32(key, *val) -} - -// Int constructs a field with the given key and value. -func Int(key string, val int) Field { - return Int64(key, int64(val)) -} - -// Intp constructs a field that carries a *int. The returned Field will safely -// and explicitly represent `nil` when appropriate. -func Intp(key string, val *int) Field { - if val == nil { - return nilField(key) - } - return Int(key, *val) -} - -// Int64 constructs a field with the given key and value. -func Int64(key string, val int64) Field { - return Field{Key: key, Type: zapcore.Int64Type, Integer: val} -} - -// Int64p constructs a field that carries a *int64. The returned Field will safely -// and explicitly represent `nil` when appropriate. -func Int64p(key string, val *int64) Field { - if val == nil { - return nilField(key) - } - return Int64(key, *val) -} - -// Int32 constructs a field with the given key and value. -func Int32(key string, val int32) Field { - return Field{Key: key, Type: zapcore.Int32Type, Integer: int64(val)} -} - -// Int32p constructs a field that carries a *int32. The returned Field will safely -// and explicitly represent `nil` when appropriate. -func Int32p(key string, val *int32) Field { - if val == nil { - return nilField(key) - } - return Int32(key, *val) -} - -// Int16 constructs a field with the given key and value. -func Int16(key string, val int16) Field { - return Field{Key: key, Type: zapcore.Int16Type, Integer: int64(val)} -} - -// Int16p constructs a field that carries a *int16. The returned Field will safely -// and explicitly represent `nil` when appropriate. -func Int16p(key string, val *int16) Field { - if val == nil { - return nilField(key) - } - return Int16(key, *val) -} - -// Int8 constructs a field with the given key and value. -func Int8(key string, val int8) Field { - return Field{Key: key, Type: zapcore.Int8Type, Integer: int64(val)} -} - -// Int8p constructs a field that carries a *int8. The returned Field will safely -// and explicitly represent `nil` when appropriate. -func Int8p(key string, val *int8) Field { - if val == nil { - return nilField(key) - } - return Int8(key, *val) -} - -// String constructs a field with the given key and value. -func String(key string, val string) Field { - return Field{Key: key, Type: zapcore.StringType, String: val} -} - -// Stringp constructs a field that carries a *string. The returned Field will safely -// and explicitly represent `nil` when appropriate. -func Stringp(key string, val *string) Field { - if val == nil { - return nilField(key) - } - return String(key, *val) -} - -// Uint constructs a field with the given key and value. -func Uint(key string, val uint) Field { - return Uint64(key, uint64(val)) -} - -// Uintp constructs a field that carries a *uint. The returned Field will safely -// and explicitly represent `nil` when appropriate. -func Uintp(key string, val *uint) Field { - if val == nil { - return nilField(key) - } - return Uint(key, *val) -} - -// Uint64 constructs a field with the given key and value. -func Uint64(key string, val uint64) Field { - return Field{Key: key, Type: zapcore.Uint64Type, Integer: int64(val)} -} - -// Uint64p constructs a field that carries a *uint64. The returned Field will safely -// and explicitly represent `nil` when appropriate. -func Uint64p(key string, val *uint64) Field { - if val == nil { - return nilField(key) - } - return Uint64(key, *val) -} - -// Uint32 constructs a field with the given key and value. -func Uint32(key string, val uint32) Field { - return Field{Key: key, Type: zapcore.Uint32Type, Integer: int64(val)} -} - -// Uint32p constructs a field that carries a *uint32. The returned Field will safely -// and explicitly represent `nil` when appropriate. -func Uint32p(key string, val *uint32) Field { - if val == nil { - return nilField(key) - } - return Uint32(key, *val) -} - -// Uint16 constructs a field with the given key and value. -func Uint16(key string, val uint16) Field { - return Field{Key: key, Type: zapcore.Uint16Type, Integer: int64(val)} -} - -// Uint16p constructs a field that carries a *uint16. The returned Field will safely -// and explicitly represent `nil` when appropriate. -func Uint16p(key string, val *uint16) Field { - if val == nil { - return nilField(key) - } - return Uint16(key, *val) -} - -// Uint8 constructs a field with the given key and value. -func Uint8(key string, val uint8) Field { - return Field{Key: key, Type: zapcore.Uint8Type, Integer: int64(val)} -} - -// Uint8p constructs a field that carries a *uint8. The returned Field will safely -// and explicitly represent `nil` when appropriate. -func Uint8p(key string, val *uint8) Field { - if val == nil { - return nilField(key) - } - return Uint8(key, *val) -} - -// Uintptr constructs a field with the given key and value. -func Uintptr(key string, val uintptr) Field { - return Field{Key: key, Type: zapcore.UintptrType, Integer: int64(val)} -} - -// Uintptrp constructs a field that carries a *uintptr. The returned Field will safely -// and explicitly represent `nil` when appropriate. -func Uintptrp(key string, val *uintptr) Field { - if val == nil { - return nilField(key) - } - return Uintptr(key, *val) -} - -// Reflect constructs a field with the given key and an arbitrary object. It uses -// an encoding-appropriate, reflection-based function to lazily serialize nearly -// any object into the logging context, but it's relatively slow and -// allocation-heavy. Outside tests, Any is always a better choice. -// -// If encoding fails (e.g., trying to serialize a map[int]string to JSON), Reflect -// includes the error message in the final log output. -func Reflect(key string, val interface{}) Field { - return Field{Key: key, Type: zapcore.ReflectType, Interface: val} -} - -// Namespace creates a named, isolated scope within the logger's context. All -// subsequent fields will be added to the new namespace. -// -// This helps prevent key collisions when injecting loggers into sub-components -// or third-party libraries. -func Namespace(key string) Field { - return Field{Key: key, Type: zapcore.NamespaceType} -} - -// Stringer constructs a field with the given key and the output of the value's -// String method. The Stringer's String method is called lazily. -func Stringer(key string, val fmt.Stringer) Field { - return Field{Key: key, Type: zapcore.StringerType, Interface: val} -} - -// Time constructs a Field with the given key and value. The encoder -// controls how the time is serialized. -func Time(key string, val time.Time) Field { - if val.Before(_minTimeInt64) || val.After(_maxTimeInt64) { - return Field{Key: key, Type: zapcore.TimeFullType, Interface: val} - } - return Field{Key: key, Type: zapcore.TimeType, Integer: val.UnixNano(), Interface: val.Location()} -} - -// Timep constructs a field that carries a *time.Time. The returned Field will safely -// and explicitly represent `nil` when appropriate. -func Timep(key string, val *time.Time) Field { - if val == nil { - return nilField(key) - } - return Time(key, *val) -} - -// Stack constructs a field that stores a stacktrace of the current goroutine -// under provided key. Keep in mind that taking a stacktrace is eager and -// expensive (relatively speaking); this function both makes an allocation and -// takes about two microseconds. -func Stack(key string) Field { - return StackSkip(key, 1) // skip Stack -} - -// StackSkip constructs a field similarly to Stack, but also skips the given -// number of frames from the top of the stacktrace. -func StackSkip(key string, skip int) Field { - // Returning the stacktrace as a string costs an allocation, but saves us - // from expanding the zapcore.Field union struct to include a byte slice. Since - // taking a stacktrace is already so expensive (~10us), the extra allocation - // is okay. - return String(key, stacktrace.Take(skip+1)) // skip StackSkip -} - -// Duration constructs a field with the given key and value. The encoder -// controls how the duration is serialized. -func Duration(key string, val time.Duration) Field { - return Field{Key: key, Type: zapcore.DurationType, Integer: int64(val)} -} - -// Durationp constructs a field that carries a *time.Duration. The returned Field will safely -// and explicitly represent `nil` when appropriate. -func Durationp(key string, val *time.Duration) Field { - if val == nil { - return nilField(key) - } - return Duration(key, *val) -} - -// Object constructs a field with the given key and ObjectMarshaler. It -// provides a flexible, but still type-safe and efficient, way to add map- or -// struct-like user-defined types to the logging context. The struct's -// MarshalLogObject method is called lazily. -func Object(key string, val zapcore.ObjectMarshaler) Field { - return Field{Key: key, Type: zapcore.ObjectMarshalerType, Interface: val} -} - -// Inline constructs a Field that is similar to Object, but it -// will add the elements of the provided ObjectMarshaler to the -// current namespace. -func Inline(val zapcore.ObjectMarshaler) Field { - return zapcore.Field{ - Type: zapcore.InlineMarshalerType, - Interface: val, - } -} - -// Dict constructs a field containing the provided key-value pairs. -// It acts similar to [Object], but with the fields specified as arguments. -func Dict(key string, val ...Field) Field { - return dictField(key, val) -} - -// We need a function with the signature (string, T) for zap.Any. -func dictField(key string, val []Field) Field { - return Object(key, dictObject(val)) -} - -type dictObject []Field - -func (d dictObject) MarshalLogObject(enc zapcore.ObjectEncoder) error { - for _, f := range d { - f.AddTo(enc) - } - return nil -} - -// We discovered an issue where zap.Any can cause a performance degradation -// when used in new goroutines. -// -// This happens because the compiler assigns 4.8kb (one zap.Field per arm of -// switch statement) of stack space for zap.Any when it takes the form: -// -// switch v := v.(type) { -// case string: -// return String(key, v) -// case int: -// return Int(key, v) -// // ... -// default: -// return Reflect(key, v) -// } -// -// To avoid this, we use the type switch to assign a value to a single local variable -// and then call a function on it. -// The local variable is just a function reference so it doesn't allocate -// when converted to an interface{}. -// -// A fair bit of experimentation went into this. -// See also: -// -// - https://github.com/uber-go/zap/pull/1301 -// - https://github.com/uber-go/zap/pull/1303 -// - https://github.com/uber-go/zap/pull/1304 -// - https://github.com/uber-go/zap/pull/1305 -// - https://github.com/uber-go/zap/pull/1308 -// -// See https://github.com/golang/go/issues/62077 for upstream issue. -type anyFieldC[T any] func(string, T) Field - -func (f anyFieldC[T]) Any(key string, val any) Field { - v, _ := val.(T) - // val is guaranteed to be a T, except when it's nil. - return f(key, v) -} - -// Any takes a key and an arbitrary value and chooses the best way to represent -// them as a field, falling back to a reflection-based approach only if -// necessary. -// -// Since byte/uint8 and rune/int32 are aliases, Any can't differentiate between -// them. To minimize surprises, []byte values are treated as binary blobs, byte -// values are treated as uint8, and runes are always treated as integers. -func Any(key string, value interface{}) Field { - var c interface{ Any(string, any) Field } - - switch value.(type) { - case zapcore.ObjectMarshaler: - c = anyFieldC[zapcore.ObjectMarshaler](Object) - case zapcore.ArrayMarshaler: - c = anyFieldC[zapcore.ArrayMarshaler](Array) - case []Field: - c = anyFieldC[[]Field](dictField) - case bool: - c = anyFieldC[bool](Bool) - case *bool: - c = anyFieldC[*bool](Boolp) - case []bool: - c = anyFieldC[[]bool](Bools) - case complex128: - c = anyFieldC[complex128](Complex128) - case *complex128: - c = anyFieldC[*complex128](Complex128p) - case []complex128: - c = anyFieldC[[]complex128](Complex128s) - case complex64: - c = anyFieldC[complex64](Complex64) - case *complex64: - c = anyFieldC[*complex64](Complex64p) - case []complex64: - c = anyFieldC[[]complex64](Complex64s) - case float64: - c = anyFieldC[float64](Float64) - case *float64: - c = anyFieldC[*float64](Float64p) - case []float64: - c = anyFieldC[[]float64](Float64s) - case float32: - c = anyFieldC[float32](Float32) - case *float32: - c = anyFieldC[*float32](Float32p) - case []float32: - c = anyFieldC[[]float32](Float32s) - case int: - c = anyFieldC[int](Int) - case *int: - c = anyFieldC[*int](Intp) - case []int: - c = anyFieldC[[]int](Ints) - case int64: - c = anyFieldC[int64](Int64) - case *int64: - c = anyFieldC[*int64](Int64p) - case []int64: - c = anyFieldC[[]int64](Int64s) - case int32: - c = anyFieldC[int32](Int32) - case *int32: - c = anyFieldC[*int32](Int32p) - case []int32: - c = anyFieldC[[]int32](Int32s) - case int16: - c = anyFieldC[int16](Int16) - case *int16: - c = anyFieldC[*int16](Int16p) - case []int16: - c = anyFieldC[[]int16](Int16s) - case int8: - c = anyFieldC[int8](Int8) - case *int8: - c = anyFieldC[*int8](Int8p) - case []int8: - c = anyFieldC[[]int8](Int8s) - case string: - c = anyFieldC[string](String) - case *string: - c = anyFieldC[*string](Stringp) - case []string: - c = anyFieldC[[]string](Strings) - case uint: - c = anyFieldC[uint](Uint) - case *uint: - c = anyFieldC[*uint](Uintp) - case []uint: - c = anyFieldC[[]uint](Uints) - case uint64: - c = anyFieldC[uint64](Uint64) - case *uint64: - c = anyFieldC[*uint64](Uint64p) - case []uint64: - c = anyFieldC[[]uint64](Uint64s) - case uint32: - c = anyFieldC[uint32](Uint32) - case *uint32: - c = anyFieldC[*uint32](Uint32p) - case []uint32: - c = anyFieldC[[]uint32](Uint32s) - case uint16: - c = anyFieldC[uint16](Uint16) - case *uint16: - c = anyFieldC[*uint16](Uint16p) - case []uint16: - c = anyFieldC[[]uint16](Uint16s) - case uint8: - c = anyFieldC[uint8](Uint8) - case *uint8: - c = anyFieldC[*uint8](Uint8p) - case []byte: - c = anyFieldC[[]byte](Binary) - case uintptr: - c = anyFieldC[uintptr](Uintptr) - case *uintptr: - c = anyFieldC[*uintptr](Uintptrp) - case []uintptr: - c = anyFieldC[[]uintptr](Uintptrs) - case time.Time: - c = anyFieldC[time.Time](Time) - case *time.Time: - c = anyFieldC[*time.Time](Timep) - case []time.Time: - c = anyFieldC[[]time.Time](Times) - case time.Duration: - c = anyFieldC[time.Duration](Duration) - case *time.Duration: - c = anyFieldC[*time.Duration](Durationp) - case []time.Duration: - c = anyFieldC[[]time.Duration](Durations) - case error: - c = anyFieldC[error](NamedError) - case []error: - c = anyFieldC[[]error](Errors) - case fmt.Stringer: - c = anyFieldC[fmt.Stringer](Stringer) - default: - c = anyFieldC[any](Reflect) - } - - return c.Any(key, value) -} diff --git a/vendor/go.uber.org/zap/flag.go b/vendor/go.uber.org/zap/flag.go deleted file mode 100644 index 131287507..000000000 --- a/vendor/go.uber.org/zap/flag.go +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright (c) 2016 Uber Technologies, Inc. -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in -// all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -// THE SOFTWARE. - -package zap - -import ( - "flag" - - "go.uber.org/zap/zapcore" -) - -// LevelFlag uses the standard library's flag.Var to declare a global flag -// with the specified name, default, and usage guidance. The returned value is -// a pointer to the value of the flag. -// -// If you don't want to use the flag package's global state, you can use any -// non-nil *Level as a flag.Value with your own *flag.FlagSet. -func LevelFlag(name string, defaultLevel zapcore.Level, usage string) *zapcore.Level { - lvl := defaultLevel - flag.Var(&lvl, name, usage) - return &lvl -} diff --git a/vendor/go.uber.org/zap/glide.yaml b/vendor/go.uber.org/zap/glide.yaml deleted file mode 100644 index 8e1d05e9a..000000000 --- a/vendor/go.uber.org/zap/glide.yaml +++ /dev/null @@ -1,34 +0,0 @@ -package: go.uber.org/zap -license: MIT -import: -- package: go.uber.org/atomic - version: ^1 -- package: go.uber.org/multierr - version: ^1 -testImport: -- package: github.com/satori/go.uuid -- package: github.com/sirupsen/logrus -- package: github.com/apex/log - subpackages: - - handlers/json -- package: github.com/go-kit/kit - subpackages: - - log -- package: github.com/stretchr/testify - subpackages: - - assert - - require -- package: gopkg.in/inconshreveable/log15.v2 -- package: github.com/mattn/goveralls -- package: github.com/pborman/uuid -- package: github.com/pkg/errors -- package: github.com/rs/zerolog -- package: golang.org/x/tools - subpackages: - - cover -- package: golang.org/x/lint - subpackages: - - golint -- package: github.com/axw/gocov - subpackages: - - gocov diff --git a/vendor/go.uber.org/zap/global.go b/vendor/go.uber.org/zap/global.go deleted file mode 100644 index 3cb46c9e0..000000000 --- a/vendor/go.uber.org/zap/global.go +++ /dev/null @@ -1,169 +0,0 @@ -// Copyright (c) 2016 Uber Technologies, Inc. -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in -// all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -// THE SOFTWARE. - -package zap - -import ( - "bytes" - "fmt" - "log" - "os" - "sync" - - "go.uber.org/zap/zapcore" -) - -const ( - _stdLogDefaultDepth = 1 - _loggerWriterDepth = 2 - _programmerErrorTemplate = "You've found a bug in zap! Please file a bug at " + - "https://github.com/uber-go/zap/issues/new and reference this error: %v" -) - -var ( - _globalMu sync.RWMutex - _globalL = NewNop() - _globalS = _globalL.Sugar() -) - -// L returns the global Logger, which can be reconfigured with ReplaceGlobals. -// It's safe for concurrent use. -func L() *Logger { - _globalMu.RLock() - l := _globalL - _globalMu.RUnlock() - return l -} - -// S returns the global SugaredLogger, which can be reconfigured with -// ReplaceGlobals. It's safe for concurrent use. -func S() *SugaredLogger { - _globalMu.RLock() - s := _globalS - _globalMu.RUnlock() - return s -} - -// ReplaceGlobals replaces the global Logger and SugaredLogger, and returns a -// function to restore the original values. It's safe for concurrent use. -func ReplaceGlobals(logger *Logger) func() { - _globalMu.Lock() - prev := _globalL - _globalL = logger - _globalS = logger.Sugar() - _globalMu.Unlock() - return func() { ReplaceGlobals(prev) } -} - -// NewStdLog returns a *log.Logger which writes to the supplied zap Logger at -// InfoLevel. To redirect the standard library's package-global logging -// functions, use RedirectStdLog instead. -func NewStdLog(l *Logger) *log.Logger { - logger := l.WithOptions(AddCallerSkip(_stdLogDefaultDepth + _loggerWriterDepth)) - f := logger.Info - return log.New(&loggerWriter{f}, "" /* prefix */, 0 /* flags */) -} - -// NewStdLogAt returns *log.Logger which writes to supplied zap logger at -// required level. -func NewStdLogAt(l *Logger, level zapcore.Level) (*log.Logger, error) { - logger := l.WithOptions(AddCallerSkip(_stdLogDefaultDepth + _loggerWriterDepth)) - logFunc, err := levelToFunc(logger, level) - if err != nil { - return nil, err - } - return log.New(&loggerWriter{logFunc}, "" /* prefix */, 0 /* flags */), nil -} - -// RedirectStdLog redirects output from the standard library's package-global -// logger to the supplied logger at InfoLevel. Since zap already handles caller -// annotations, timestamps, etc., it automatically disables the standard -// library's annotations and prefixing. -// -// It returns a function to restore the original prefix and flags and reset the -// standard library's output to os.Stderr. -func RedirectStdLog(l *Logger) func() { - f, err := redirectStdLogAt(l, InfoLevel) - if err != nil { - // Can't get here, since passing InfoLevel to redirectStdLogAt always - // works. - panic(fmt.Sprintf(_programmerErrorTemplate, err)) - } - return f -} - -// RedirectStdLogAt redirects output from the standard library's package-global -// logger to the supplied logger at the specified level. Since zap already -// handles caller annotations, timestamps, etc., it automatically disables the -// standard library's annotations and prefixing. -// -// It returns a function to restore the original prefix and flags and reset the -// standard library's output to os.Stderr. -func RedirectStdLogAt(l *Logger, level zapcore.Level) (func(), error) { - return redirectStdLogAt(l, level) -} - -func redirectStdLogAt(l *Logger, level zapcore.Level) (func(), error) { - flags := log.Flags() - prefix := log.Prefix() - log.SetFlags(0) - log.SetPrefix("") - logger := l.WithOptions(AddCallerSkip(_stdLogDefaultDepth + _loggerWriterDepth)) - logFunc, err := levelToFunc(logger, level) - if err != nil { - return nil, err - } - log.SetOutput(&loggerWriter{logFunc}) - return func() { - log.SetFlags(flags) - log.SetPrefix(prefix) - log.SetOutput(os.Stderr) - }, nil -} - -func levelToFunc(logger *Logger, lvl zapcore.Level) (func(string, ...Field), error) { - switch lvl { - case DebugLevel: - return logger.Debug, nil - case InfoLevel: - return logger.Info, nil - case WarnLevel: - return logger.Warn, nil - case ErrorLevel: - return logger.Error, nil - case DPanicLevel: - return logger.DPanic, nil - case PanicLevel: - return logger.Panic, nil - case FatalLevel: - return logger.Fatal, nil - } - return nil, fmt.Errorf("unrecognized level: %q", lvl) -} - -type loggerWriter struct { - logFunc func(msg string, fields ...Field) -} - -func (l *loggerWriter) Write(p []byte) (int, error) { - p = bytes.TrimSpace(p) - l.logFunc(string(p)) - return len(p), nil -} diff --git a/vendor/go.uber.org/zap/http_handler.go b/vendor/go.uber.org/zap/http_handler.go deleted file mode 100644 index 2be8f6515..000000000 --- a/vendor/go.uber.org/zap/http_handler.go +++ /dev/null @@ -1,140 +0,0 @@ -// Copyright (c) 2016 Uber Technologies, Inc. -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in -// all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -// THE SOFTWARE. - -package zap - -import ( - "encoding/json" - "errors" - "fmt" - "io" - "net/http" - - "go.uber.org/zap/zapcore" -) - -// ServeHTTP is a simple JSON endpoint that can report on or change the current -// logging level. -// -// # GET -// -// The GET request returns a JSON description of the current logging level like: -// -// {"level":"info"} -// -// # PUT -// -// The PUT request changes the logging level. It is perfectly safe to change the -// logging level while a program is running. Two content types are supported: -// -// Content-Type: application/x-www-form-urlencoded -// -// With this content type, the level can be provided through the request body or -// a query parameter. The log level is URL encoded like: -// -// level=debug -// -// The request body takes precedence over the query parameter, if both are -// specified. -// -// This content type is the default for a curl PUT request. Following are two -// example curl requests that both set the logging level to debug. -// -// curl -X PUT localhost:8080/log/level?level=debug -// curl -X PUT localhost:8080/log/level -d level=debug -// -// For any other content type, the payload is expected to be JSON encoded and -// look like: -// -// {"level":"info"} -// -// An example curl request could look like this: -// -// curl -X PUT localhost:8080/log/level -H "Content-Type: application/json" -d '{"level":"debug"}' -func (lvl AtomicLevel) ServeHTTP(w http.ResponseWriter, r *http.Request) { - if err := lvl.serveHTTP(w, r); err != nil { - w.WriteHeader(http.StatusInternalServerError) - fmt.Fprintf(w, "internal error: %v", err) - } -} - -func (lvl AtomicLevel) serveHTTP(w http.ResponseWriter, r *http.Request) error { - type errorResponse struct { - Error string `json:"error"` - } - type payload struct { - Level zapcore.Level `json:"level"` - } - - enc := json.NewEncoder(w) - - switch r.Method { - case http.MethodGet: - return enc.Encode(payload{Level: lvl.Level()}) - - case http.MethodPut: - requestedLvl, err := decodePutRequest(r.Header.Get("Content-Type"), r) - if err != nil { - w.WriteHeader(http.StatusBadRequest) - return enc.Encode(errorResponse{Error: err.Error()}) - } - lvl.SetLevel(requestedLvl) - return enc.Encode(payload{Level: lvl.Level()}) - - default: - w.WriteHeader(http.StatusMethodNotAllowed) - return enc.Encode(errorResponse{ - Error: "Only GET and PUT are supported.", - }) - } -} - -// Decodes incoming PUT requests and returns the requested logging level. -func decodePutRequest(contentType string, r *http.Request) (zapcore.Level, error) { - if contentType == "application/x-www-form-urlencoded" { - return decodePutURL(r) - } - return decodePutJSON(r.Body) -} - -func decodePutURL(r *http.Request) (zapcore.Level, error) { - lvl := r.FormValue("level") - if lvl == "" { - return 0, errors.New("must specify logging level") - } - var l zapcore.Level - if err := l.UnmarshalText([]byte(lvl)); err != nil { - return 0, err - } - return l, nil -} - -func decodePutJSON(body io.Reader) (zapcore.Level, error) { - var pld struct { - Level *zapcore.Level `json:"level"` - } - if err := json.NewDecoder(body).Decode(&pld); err != nil { - return 0, fmt.Errorf("malformed request body: %v", err) - } - if pld.Level == nil { - return 0, errors.New("must specify logging level") - } - return *pld.Level, nil -} diff --git a/vendor/go.uber.org/zap/internal/bufferpool/bufferpool.go b/vendor/go.uber.org/zap/internal/bufferpool/bufferpool.go deleted file mode 100644 index dad583aaa..000000000 --- a/vendor/go.uber.org/zap/internal/bufferpool/bufferpool.go +++ /dev/null @@ -1,31 +0,0 @@ -// Copyright (c) 2016 Uber Technologies, Inc. -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in -// all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -// THE SOFTWARE. - -// Package bufferpool houses zap's shared internal buffer pool. Third-party -// packages can recreate the same functionality with buffers.NewPool. -package bufferpool - -import "go.uber.org/zap/buffer" - -var ( - _pool = buffer.NewPool() - // Get retrieves a buffer from the pool, creating one if necessary. - Get = _pool.Get -) diff --git a/vendor/go.uber.org/zap/internal/color/color.go b/vendor/go.uber.org/zap/internal/color/color.go deleted file mode 100644 index c4d5d02ab..000000000 --- a/vendor/go.uber.org/zap/internal/color/color.go +++ /dev/null @@ -1,44 +0,0 @@ -// Copyright (c) 2016 Uber Technologies, Inc. -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in -// all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -// THE SOFTWARE. - -// Package color adds coloring functionality for TTY output. -package color - -import "fmt" - -// Foreground colors. -const ( - Black Color = iota + 30 - Red - Green - Yellow - Blue - Magenta - Cyan - White -) - -// Color represents a text color. -type Color uint8 - -// Add adds the coloring to the given string. -func (c Color) Add(s string) string { - return fmt.Sprintf("\x1b[%dm%s\x1b[0m", uint8(c), s) -} diff --git a/vendor/go.uber.org/zap/internal/exit/exit.go b/vendor/go.uber.org/zap/internal/exit/exit.go deleted file mode 100644 index f673f9947..000000000 --- a/vendor/go.uber.org/zap/internal/exit/exit.go +++ /dev/null @@ -1,66 +0,0 @@ -// Copyright (c) 2016 Uber Technologies, Inc. -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in -// all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -// THE SOFTWARE. - -// Package exit provides stubs so that unit tests can exercise code that calls -// os.Exit(1). -package exit - -import "os" - -var _exit = os.Exit - -// With terminates the process by calling os.Exit(code). If the package is -// stubbed, it instead records a call in the testing spy. -func With(code int) { - _exit(code) -} - -// A StubbedExit is a testing fake for os.Exit. -type StubbedExit struct { - Exited bool - Code int - prev func(code int) -} - -// Stub substitutes a fake for the call to os.Exit(1). -func Stub() *StubbedExit { - s := &StubbedExit{prev: _exit} - _exit = s.exit - return s -} - -// WithStub runs the supplied function with Exit stubbed. It returns the stub -// used, so that users can test whether the process would have crashed. -func WithStub(f func()) *StubbedExit { - s := Stub() - defer s.Unstub() - f() - return s -} - -// Unstub restores the previous exit function. -func (se *StubbedExit) Unstub() { - _exit = se.prev -} - -func (se *StubbedExit) exit(code int) { - se.Exited = true - se.Code = code -} diff --git a/vendor/go.uber.org/zap/internal/level_enabler.go b/vendor/go.uber.org/zap/internal/level_enabler.go deleted file mode 100644 index 40bfed81e..000000000 --- a/vendor/go.uber.org/zap/internal/level_enabler.go +++ /dev/null @@ -1,37 +0,0 @@ -// Copyright (c) 2022 Uber Technologies, Inc. -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in -// all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -// THE SOFTWARE. - -// Package internal and its subpackages hold types and functionality -// that are not part of Zap's public API. -package internal - -import "go.uber.org/zap/zapcore" - -// LeveledEnabler is an interface satisfied by LevelEnablers that are able to -// report their own level. -// -// This interface is defined to use more conveniently in tests and non-zapcore -// packages. -// This cannot be imported from zapcore because of the cyclic dependency. -type LeveledEnabler interface { - zapcore.LevelEnabler - - Level() zapcore.Level -} diff --git a/vendor/go.uber.org/zap/internal/pool/pool.go b/vendor/go.uber.org/zap/internal/pool/pool.go deleted file mode 100644 index 60e9d2c43..000000000 --- a/vendor/go.uber.org/zap/internal/pool/pool.go +++ /dev/null @@ -1,58 +0,0 @@ -// Copyright (c) 2023 Uber Technologies, Inc. -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in -// all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -// THE SOFTWARE. - -// Package pool provides internal pool utilities. -package pool - -import ( - "sync" -) - -// A Pool is a generic wrapper around [sync.Pool] to provide strongly-typed -// object pooling. -// -// Note that SA6002 (ref: https://staticcheck.io/docs/checks/#SA6002) will -// not be detected, so all internal pool use must take care to only store -// pointer types. -type Pool[T any] struct { - pool sync.Pool -} - -// New returns a new [Pool] for T, and will use fn to construct new Ts when -// the pool is empty. -func New[T any](fn func() T) *Pool[T] { - return &Pool[T]{ - pool: sync.Pool{ - New: func() any { - return fn() - }, - }, - } -} - -// Get gets a T from the pool, or creates a new one if the pool is empty. -func (p *Pool[T]) Get() T { - return p.pool.Get().(T) -} - -// Put returns x into the pool. -func (p *Pool[T]) Put(x T) { - p.pool.Put(x) -} diff --git a/vendor/go.uber.org/zap/internal/stacktrace/stack.go b/vendor/go.uber.org/zap/internal/stacktrace/stack.go deleted file mode 100644 index 82af7551f..000000000 --- a/vendor/go.uber.org/zap/internal/stacktrace/stack.go +++ /dev/null @@ -1,181 +0,0 @@ -// Copyright (c) 2023 Uber Technologies, Inc. -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in -// all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -// THE SOFTWARE. - -// Package stacktrace provides support for gathering stack traces -// efficiently. -package stacktrace - -import ( - "runtime" - - "go.uber.org/zap/buffer" - "go.uber.org/zap/internal/bufferpool" - "go.uber.org/zap/internal/pool" -) - -var _stackPool = pool.New(func() *Stack { - return &Stack{ - storage: make([]uintptr, 64), - } -}) - -// Stack is a captured stack trace. -type Stack struct { - pcs []uintptr // program counters; always a subslice of storage - frames *runtime.Frames - - // The size of pcs varies depending on requirements: - // it will be one if the only the first frame was requested, - // and otherwise it will reflect the depth of the call stack. - // - // storage decouples the slice we need (pcs) from the slice we pool. - // We will always allocate a reasonably large storage, but we'll use - // only as much of it as we need. - storage []uintptr -} - -// Depth specifies how deep of a stack trace should be captured. -type Depth int - -const ( - // First captures only the first frame. - First Depth = iota - - // Full captures the entire call stack, allocating more - // storage for it if needed. - Full -) - -// Capture captures a stack trace of the specified depth, skipping -// the provided number of frames. skip=0 identifies the caller of -// Capture. -// -// The caller must call Free on the returned stacktrace after using it. -func Capture(skip int, depth Depth) *Stack { - stack := _stackPool.Get() - - switch depth { - case First: - stack.pcs = stack.storage[:1] - case Full: - stack.pcs = stack.storage - } - - // Unlike other "skip"-based APIs, skip=0 identifies runtime.Callers - // itself. +2 to skip captureStacktrace and runtime.Callers. - numFrames := runtime.Callers( - skip+2, - stack.pcs, - ) - - // runtime.Callers truncates the recorded stacktrace if there is no - // room in the provided slice. For the full stack trace, keep expanding - // storage until there are fewer frames than there is room. - if depth == Full { - pcs := stack.pcs - for numFrames == len(pcs) { - pcs = make([]uintptr, len(pcs)*2) - numFrames = runtime.Callers(skip+2, pcs) - } - - // Discard old storage instead of returning it to the pool. - // This will adjust the pool size over time if stack traces are - // consistently very deep. - stack.storage = pcs - stack.pcs = pcs[:numFrames] - } else { - stack.pcs = stack.pcs[:numFrames] - } - - stack.frames = runtime.CallersFrames(stack.pcs) - return stack -} - -// Free releases resources associated with this stacktrace -// and returns it back to the pool. -func (st *Stack) Free() { - st.frames = nil - st.pcs = nil - _stackPool.Put(st) -} - -// Count reports the total number of frames in this stacktrace. -// Count DOES NOT change as Next is called. -func (st *Stack) Count() int { - return len(st.pcs) -} - -// Next returns the next frame in the stack trace, -// and a boolean indicating whether there are more after it. -func (st *Stack) Next() (_ runtime.Frame, more bool) { - return st.frames.Next() -} - -// Take returns a string representation of the current stacktrace. -// -// skip is the number of frames to skip before recording the stack trace. -// skip=0 identifies the caller of Take. -func Take(skip int) string { - stack := Capture(skip+1, Full) - defer stack.Free() - - buffer := bufferpool.Get() - defer buffer.Free() - - stackfmt := NewFormatter(buffer) - stackfmt.FormatStack(stack) - return buffer.String() -} - -// Formatter formats a stack trace into a readable string representation. -type Formatter struct { - b *buffer.Buffer - nonEmpty bool // whehther we've written at least one frame already -} - -// NewFormatter builds a new Formatter. -func NewFormatter(b *buffer.Buffer) Formatter { - return Formatter{b: b} -} - -// FormatStack formats all remaining frames in the provided stacktrace -- minus -// the final runtime.main/runtime.goexit frame. -func (sf *Formatter) FormatStack(stack *Stack) { - // Note: On the last iteration, frames.Next() returns false, with a valid - // frame, but we ignore this frame. The last frame is a runtime frame which - // adds noise, since it's only either runtime.main or runtime.goexit. - for frame, more := stack.Next(); more; frame, more = stack.Next() { - sf.FormatFrame(frame) - } -} - -// FormatFrame formats the given frame. -func (sf *Formatter) FormatFrame(frame runtime.Frame) { - if sf.nonEmpty { - sf.b.AppendByte('\n') - } - sf.nonEmpty = true - sf.b.AppendString(frame.Function) - sf.b.AppendByte('\n') - sf.b.AppendByte('\t') - sf.b.AppendString(frame.File) - sf.b.AppendByte(':') - sf.b.AppendInt(int64(frame.Line)) -} diff --git a/vendor/go.uber.org/zap/level.go b/vendor/go.uber.org/zap/level.go deleted file mode 100644 index 155b208bd..000000000 --- a/vendor/go.uber.org/zap/level.go +++ /dev/null @@ -1,153 +0,0 @@ -// Copyright (c) 2016 Uber Technologies, Inc. -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in -// all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -// THE SOFTWARE. - -package zap - -import ( - "sync/atomic" - - "go.uber.org/zap/internal" - "go.uber.org/zap/zapcore" -) - -const ( - // DebugLevel logs are typically voluminous, and are usually disabled in - // production. - DebugLevel = zapcore.DebugLevel - // InfoLevel is the default logging priority. - InfoLevel = zapcore.InfoLevel - // WarnLevel logs are more important than Info, but don't need individual - // human review. - WarnLevel = zapcore.WarnLevel - // ErrorLevel logs are high-priority. If an application is running smoothly, - // it shouldn't generate any error-level logs. - ErrorLevel = zapcore.ErrorLevel - // DPanicLevel logs are particularly important errors. In development the - // logger panics after writing the message. - DPanicLevel = zapcore.DPanicLevel - // PanicLevel logs a message, then panics. - PanicLevel = zapcore.PanicLevel - // FatalLevel logs a message, then calls os.Exit(1). - FatalLevel = zapcore.FatalLevel -) - -// LevelEnablerFunc is a convenient way to implement zapcore.LevelEnabler with -// an anonymous function. -// -// It's particularly useful when splitting log output between different -// outputs (e.g., standard error and standard out). For sample code, see the -// package-level AdvancedConfiguration example. -type LevelEnablerFunc func(zapcore.Level) bool - -// Enabled calls the wrapped function. -func (f LevelEnablerFunc) Enabled(lvl zapcore.Level) bool { return f(lvl) } - -// An AtomicLevel is an atomically changeable, dynamic logging level. It lets -// you safely change the log level of a tree of loggers (the root logger and -// any children created by adding context) at runtime. -// -// The AtomicLevel itself is an http.Handler that serves a JSON endpoint to -// alter its level. -// -// AtomicLevels must be created with the NewAtomicLevel constructor to allocate -// their internal atomic pointer. -type AtomicLevel struct { - l *atomic.Int32 -} - -var _ internal.LeveledEnabler = AtomicLevel{} - -// NewAtomicLevel creates an AtomicLevel with InfoLevel and above logging -// enabled. -func NewAtomicLevel() AtomicLevel { - lvl := AtomicLevel{l: new(atomic.Int32)} - lvl.l.Store(int32(InfoLevel)) - return lvl -} - -// NewAtomicLevelAt is a convenience function that creates an AtomicLevel -// and then calls SetLevel with the given level. -func NewAtomicLevelAt(l zapcore.Level) AtomicLevel { - a := NewAtomicLevel() - a.SetLevel(l) - return a -} - -// ParseAtomicLevel parses an AtomicLevel based on a lowercase or all-caps ASCII -// representation of the log level. If the provided ASCII representation is -// invalid an error is returned. -// -// This is particularly useful when dealing with text input to configure log -// levels. -func ParseAtomicLevel(text string) (AtomicLevel, error) { - a := NewAtomicLevel() - l, err := zapcore.ParseLevel(text) - if err != nil { - return a, err - } - - a.SetLevel(l) - return a, nil -} - -// Enabled implements the zapcore.LevelEnabler interface, which allows the -// AtomicLevel to be used in place of traditional static levels. -func (lvl AtomicLevel) Enabled(l zapcore.Level) bool { - return lvl.Level().Enabled(l) -} - -// Level returns the minimum enabled log level. -func (lvl AtomicLevel) Level() zapcore.Level { - return zapcore.Level(int8(lvl.l.Load())) -} - -// SetLevel alters the logging level. -func (lvl AtomicLevel) SetLevel(l zapcore.Level) { - lvl.l.Store(int32(l)) -} - -// String returns the string representation of the underlying Level. -func (lvl AtomicLevel) String() string { - return lvl.Level().String() -} - -// UnmarshalText unmarshals the text to an AtomicLevel. It uses the same text -// representations as the static zapcore.Levels ("debug", "info", "warn", -// "error", "dpanic", "panic", and "fatal"). -func (lvl *AtomicLevel) UnmarshalText(text []byte) error { - if lvl.l == nil { - lvl.l = &atomic.Int32{} - } - - var l zapcore.Level - if err := l.UnmarshalText(text); err != nil { - return err - } - - lvl.SetLevel(l) - return nil -} - -// MarshalText marshals the AtomicLevel to a byte slice. It uses the same -// text representation as the static zapcore.Levels ("debug", "info", "warn", -// "error", "dpanic", "panic", and "fatal"). -func (lvl AtomicLevel) MarshalText() (text []byte, err error) { - return lvl.Level().MarshalText() -} diff --git a/vendor/go.uber.org/zap/logger.go b/vendor/go.uber.org/zap/logger.go deleted file mode 100644 index c4d300323..000000000 --- a/vendor/go.uber.org/zap/logger.go +++ /dev/null @@ -1,435 +0,0 @@ -// Copyright (c) 2016 Uber Technologies, Inc. -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in -// all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -// THE SOFTWARE. - -package zap - -import ( - "fmt" - "io" - "os" - "strings" - - "go.uber.org/zap/internal/bufferpool" - "go.uber.org/zap/internal/stacktrace" - "go.uber.org/zap/zapcore" -) - -// A Logger provides fast, leveled, structured logging. All methods are safe -// for concurrent use. -// -// The Logger is designed for contexts in which every microsecond and every -// allocation matters, so its API intentionally favors performance and type -// safety over brevity. For most applications, the SugaredLogger strikes a -// better balance between performance and ergonomics. -type Logger struct { - core zapcore.Core - - development bool - addCaller bool - onPanic zapcore.CheckWriteHook // default is WriteThenPanic - onFatal zapcore.CheckWriteHook // default is WriteThenFatal - - name string - errorOutput zapcore.WriteSyncer - - addStack zapcore.LevelEnabler - - callerSkip int - - clock zapcore.Clock -} - -// New constructs a new Logger from the provided zapcore.Core and Options. If -// the passed zapcore.Core is nil, it falls back to using a no-op -// implementation. -// -// This is the most flexible way to construct a Logger, but also the most -// verbose. For typical use cases, the highly-opinionated presets -// (NewProduction, NewDevelopment, and NewExample) or the Config struct are -// more convenient. -// -// For sample code, see the package-level AdvancedConfiguration example. -func New(core zapcore.Core, options ...Option) *Logger { - if core == nil { - return NewNop() - } - log := &Logger{ - core: core, - errorOutput: zapcore.Lock(os.Stderr), - addStack: zapcore.FatalLevel + 1, - clock: zapcore.DefaultClock, - } - return log.WithOptions(options...) -} - -// NewNop returns a no-op Logger. It never writes out logs or internal errors, -// and it never runs user-defined hooks. -// -// Using WithOptions to replace the Core or error output of a no-op Logger can -// re-enable logging. -func NewNop() *Logger { - return &Logger{ - core: zapcore.NewNopCore(), - errorOutput: zapcore.AddSync(io.Discard), - addStack: zapcore.FatalLevel + 1, - clock: zapcore.DefaultClock, - } -} - -// NewProduction builds a sensible production Logger that writes InfoLevel and -// above logs to standard error as JSON. -// -// It's a shortcut for NewProductionConfig().Build(...Option). -func NewProduction(options ...Option) (*Logger, error) { - return NewProductionConfig().Build(options...) -} - -// NewDevelopment builds a development Logger that writes DebugLevel and above -// logs to standard error in a human-friendly format. -// -// It's a shortcut for NewDevelopmentConfig().Build(...Option). -func NewDevelopment(options ...Option) (*Logger, error) { - return NewDevelopmentConfig().Build(options...) -} - -// Must is a helper that wraps a call to a function returning (*Logger, error) -// and panics if the error is non-nil. It is intended for use in variable -// initialization such as: -// -// var logger = zap.Must(zap.NewProduction()) -func Must(logger *Logger, err error) *Logger { - if err != nil { - panic(err) - } - - return logger -} - -// NewExample builds a Logger that's designed for use in zap's testable -// examples. It writes DebugLevel and above logs to standard out as JSON, but -// omits the timestamp and calling function to keep example output -// short and deterministic. -func NewExample(options ...Option) *Logger { - encoderCfg := zapcore.EncoderConfig{ - MessageKey: "msg", - LevelKey: "level", - NameKey: "logger", - EncodeLevel: zapcore.LowercaseLevelEncoder, - EncodeTime: zapcore.ISO8601TimeEncoder, - EncodeDuration: zapcore.StringDurationEncoder, - } - core := zapcore.NewCore(zapcore.NewJSONEncoder(encoderCfg), os.Stdout, DebugLevel) - return New(core).WithOptions(options...) -} - -// Sugar wraps the Logger to provide a more ergonomic, but slightly slower, -// API. Sugaring a Logger is quite inexpensive, so it's reasonable for a -// single application to use both Loggers and SugaredLoggers, converting -// between them on the boundaries of performance-sensitive code. -func (log *Logger) Sugar() *SugaredLogger { - core := log.clone() - core.callerSkip += 2 - return &SugaredLogger{core} -} - -// Named adds a new path segment to the logger's name. Segments are joined by -// periods. By default, Loggers are unnamed. -func (log *Logger) Named(s string) *Logger { - if s == "" { - return log - } - l := log.clone() - if log.name == "" { - l.name = s - } else { - l.name = strings.Join([]string{l.name, s}, ".") - } - return l -} - -// WithOptions clones the current Logger, applies the supplied Options, and -// returns the resulting Logger. It's safe to use concurrently. -func (log *Logger) WithOptions(opts ...Option) *Logger { - c := log.clone() - for _, opt := range opts { - opt.apply(c) - } - return c -} - -// With creates a child logger and adds structured context to it. Fields added -// to the child don't affect the parent, and vice versa. Any fields that -// require evaluation (such as Objects) are evaluated upon invocation of With. -func (log *Logger) With(fields ...Field) *Logger { - if len(fields) == 0 { - return log - } - l := log.clone() - l.core = l.core.With(fields) - return l -} - -// WithLazy creates a child logger and adds structured context to it lazily. -// -// The fields are evaluated only if the logger is further chained with [With] -// or is written to with any of the log level methods. -// Until that occurs, the logger may retain references to objects inside the fields, -// and logging will reflect the state of an object at the time of logging, -// not the time of WithLazy(). -// -// WithLazy provides a worthwhile performance optimization for contextual loggers -// when the likelihood of using the child logger is low, -// such as error paths and rarely taken branches. -// -// Similar to [With], fields added to the child don't affect the parent, and vice versa. -func (log *Logger) WithLazy(fields ...Field) *Logger { - if len(fields) == 0 { - return log - } - return log.WithOptions(WrapCore(func(core zapcore.Core) zapcore.Core { - return zapcore.NewLazyWith(core, fields) - })) -} - -// Level reports the minimum enabled level for this logger. -// -// For NopLoggers, this is [zapcore.InvalidLevel]. -func (log *Logger) Level() zapcore.Level { - return zapcore.LevelOf(log.core) -} - -// Check returns a CheckedEntry if logging a message at the specified level -// is enabled. It's a completely optional optimization; in high-performance -// applications, Check can help avoid allocating a slice to hold fields. -func (log *Logger) Check(lvl zapcore.Level, msg string) *zapcore.CheckedEntry { - return log.check(lvl, msg) -} - -// Log logs a message at the specified level. The message includes any fields -// passed at the log site, as well as any fields accumulated on the logger. -// Any Fields that require evaluation (such as Objects) are evaluated upon -// invocation of Log. -func (log *Logger) Log(lvl zapcore.Level, msg string, fields ...Field) { - if ce := log.check(lvl, msg); ce != nil { - ce.Write(fields...) - } -} - -// Debug logs a message at DebugLevel. The message includes any fields passed -// at the log site, as well as any fields accumulated on the logger. -func (log *Logger) Debug(msg string, fields ...Field) { - if ce := log.check(DebugLevel, msg); ce != nil { - ce.Write(fields...) - } -} - -// Info logs a message at InfoLevel. The message includes any fields passed -// at the log site, as well as any fields accumulated on the logger. -func (log *Logger) Info(msg string, fields ...Field) { - if ce := log.check(InfoLevel, msg); ce != nil { - ce.Write(fields...) - } -} - -// Warn logs a message at WarnLevel. The message includes any fields passed -// at the log site, as well as any fields accumulated on the logger. -func (log *Logger) Warn(msg string, fields ...Field) { - if ce := log.check(WarnLevel, msg); ce != nil { - ce.Write(fields...) - } -} - -// Error logs a message at ErrorLevel. The message includes any fields passed -// at the log site, as well as any fields accumulated on the logger. -func (log *Logger) Error(msg string, fields ...Field) { - if ce := log.check(ErrorLevel, msg); ce != nil { - ce.Write(fields...) - } -} - -// DPanic logs a message at DPanicLevel. The message includes any fields -// passed at the log site, as well as any fields accumulated on the logger. -// -// If the logger is in development mode, it then panics (DPanic means -// "development panic"). This is useful for catching errors that are -// recoverable, but shouldn't ever happen. -func (log *Logger) DPanic(msg string, fields ...Field) { - if ce := log.check(DPanicLevel, msg); ce != nil { - ce.Write(fields...) - } -} - -// Panic logs a message at PanicLevel. The message includes any fields passed -// at the log site, as well as any fields accumulated on the logger. -// -// The logger then panics, even if logging at PanicLevel is disabled. -func (log *Logger) Panic(msg string, fields ...Field) { - if ce := log.check(PanicLevel, msg); ce != nil { - ce.Write(fields...) - } -} - -// Fatal logs a message at FatalLevel. The message includes any fields passed -// at the log site, as well as any fields accumulated on the logger. -// -// The logger then calls os.Exit(1), even if logging at FatalLevel is -// disabled. -func (log *Logger) Fatal(msg string, fields ...Field) { - if ce := log.check(FatalLevel, msg); ce != nil { - ce.Write(fields...) - } -} - -// Sync calls the underlying Core's Sync method, flushing any buffered log -// entries. Applications should take care to call Sync before exiting. -func (log *Logger) Sync() error { - return log.core.Sync() -} - -// Core returns the Logger's underlying zapcore.Core. -func (log *Logger) Core() zapcore.Core { - return log.core -} - -// Name returns the Logger's underlying name, -// or an empty string if the logger is unnamed. -func (log *Logger) Name() string { - return log.name -} - -func (log *Logger) clone() *Logger { - clone := *log - return &clone -} - -func (log *Logger) check(lvl zapcore.Level, msg string) *zapcore.CheckedEntry { - // Logger.check must always be called directly by a method in the - // Logger interface (e.g., Check, Info, Fatal). - // This skips Logger.check and the Info/Fatal/Check/etc. method that - // called it. - const callerSkipOffset = 2 - - // Check the level first to reduce the cost of disabled log calls. - // Since Panic and higher may exit, we skip the optimization for those levels. - if lvl < zapcore.DPanicLevel && !log.core.Enabled(lvl) { - return nil - } - - // Create basic checked entry thru the core; this will be non-nil if the - // log message will actually be written somewhere. - ent := zapcore.Entry{ - LoggerName: log.name, - Time: log.clock.Now(), - Level: lvl, - Message: msg, - } - ce := log.core.Check(ent, nil) - willWrite := ce != nil - - // Set up any required terminal behavior. - switch ent.Level { - case zapcore.PanicLevel: - ce = ce.After(ent, terminalHookOverride(zapcore.WriteThenPanic, log.onPanic)) - case zapcore.FatalLevel: - ce = ce.After(ent, terminalHookOverride(zapcore.WriteThenFatal, log.onFatal)) - case zapcore.DPanicLevel: - if log.development { - ce = ce.After(ent, terminalHookOverride(zapcore.WriteThenPanic, log.onPanic)) - } - } - - // Only do further annotation if we're going to write this message; checked - // entries that exist only for terminal behavior don't benefit from - // annotation. - if !willWrite { - return ce - } - - // Thread the error output through to the CheckedEntry. - ce.ErrorOutput = log.errorOutput - - addStack := log.addStack.Enabled(ce.Level) - if !log.addCaller && !addStack { - return ce - } - - // Adding the caller or stack trace requires capturing the callers of - // this function. We'll share information between these two. - stackDepth := stacktrace.First - if addStack { - stackDepth = stacktrace.Full - } - stack := stacktrace.Capture(log.callerSkip+callerSkipOffset, stackDepth) - defer stack.Free() - - if stack.Count() == 0 { - if log.addCaller { - fmt.Fprintf(log.errorOutput, "%v Logger.check error: failed to get caller\n", ent.Time.UTC()) - _ = log.errorOutput.Sync() - } - return ce - } - - frame, more := stack.Next() - - if log.addCaller { - ce.Caller = zapcore.EntryCaller{ - Defined: frame.PC != 0, - PC: frame.PC, - File: frame.File, - Line: frame.Line, - Function: frame.Function, - } - } - - if addStack { - buffer := bufferpool.Get() - defer buffer.Free() - - stackfmt := stacktrace.NewFormatter(buffer) - - // We've already extracted the first frame, so format that - // separately and defer to stackfmt for the rest. - stackfmt.FormatFrame(frame) - if more { - stackfmt.FormatStack(stack) - } - ce.Stack = buffer.String() - } - - return ce -} - -func terminalHookOverride(defaultHook, override zapcore.CheckWriteHook) zapcore.CheckWriteHook { - // A nil or WriteThenNoop hook will lead to continued execution after - // a Panic or Fatal log entry, which is unexpected. For example, - // - // f, err := os.Open(..) - // if err != nil { - // log.Fatal("cannot open", zap.Error(err)) - // } - // fmt.Println(f.Name()) - // - // The f.Name() will panic if we continue execution after the log.Fatal. - if override == nil || override == zapcore.WriteThenNoop { - return defaultHook - } - return override -} diff --git a/vendor/go.uber.org/zap/options.go b/vendor/go.uber.org/zap/options.go deleted file mode 100644 index 43d357ac9..000000000 --- a/vendor/go.uber.org/zap/options.go +++ /dev/null @@ -1,182 +0,0 @@ -// Copyright (c) 2016 Uber Technologies, Inc. -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in -// all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -// THE SOFTWARE. - -package zap - -import ( - "fmt" - - "go.uber.org/zap/zapcore" -) - -// An Option configures a Logger. -type Option interface { - apply(*Logger) -} - -// optionFunc wraps a func so it satisfies the Option interface. -type optionFunc func(*Logger) - -func (f optionFunc) apply(log *Logger) { - f(log) -} - -// WrapCore wraps or replaces the Logger's underlying zapcore.Core. -func WrapCore(f func(zapcore.Core) zapcore.Core) Option { - return optionFunc(func(log *Logger) { - log.core = f(log.core) - }) -} - -// Hooks registers functions which will be called each time the Logger writes -// out an Entry. Repeated use of Hooks is additive. -// -// Hooks are useful for simple side effects, like capturing metrics for the -// number of emitted logs. More complex side effects, including anything that -// requires access to the Entry's structured fields, should be implemented as -// a zapcore.Core instead. See zapcore.RegisterHooks for details. -func Hooks(hooks ...func(zapcore.Entry) error) Option { - return optionFunc(func(log *Logger) { - log.core = zapcore.RegisterHooks(log.core, hooks...) - }) -} - -// Fields adds fields to the Logger. -func Fields(fs ...Field) Option { - return optionFunc(func(log *Logger) { - log.core = log.core.With(fs) - }) -} - -// ErrorOutput sets the destination for errors generated by the Logger. Note -// that this option only affects internal errors; for sample code that sends -// error-level logs to a different location from info- and debug-level logs, -// see the package-level AdvancedConfiguration example. -// -// The supplied WriteSyncer must be safe for concurrent use. The Open and -// zapcore.Lock functions are the simplest ways to protect files with a mutex. -func ErrorOutput(w zapcore.WriteSyncer) Option { - return optionFunc(func(log *Logger) { - log.errorOutput = w - }) -} - -// Development puts the logger in development mode, which makes DPanic-level -// logs panic instead of simply logging an error. -func Development() Option { - return optionFunc(func(log *Logger) { - log.development = true - }) -} - -// AddCaller configures the Logger to annotate each message with the filename, -// line number, and function name of zap's caller. See also WithCaller. -func AddCaller() Option { - return WithCaller(true) -} - -// WithCaller configures the Logger to annotate each message with the filename, -// line number, and function name of zap's caller, or not, depending on the -// value of enabled. This is a generalized form of AddCaller. -func WithCaller(enabled bool) Option { - return optionFunc(func(log *Logger) { - log.addCaller = enabled - }) -} - -// AddCallerSkip increases the number of callers skipped by caller annotation -// (as enabled by the AddCaller option). When building wrappers around the -// Logger and SugaredLogger, supplying this Option prevents zap from always -// reporting the wrapper code as the caller. -func AddCallerSkip(skip int) Option { - return optionFunc(func(log *Logger) { - log.callerSkip += skip - }) -} - -// AddStacktrace configures the Logger to record a stack trace for all messages at -// or above a given level. -func AddStacktrace(lvl zapcore.LevelEnabler) Option { - return optionFunc(func(log *Logger) { - log.addStack = lvl - }) -} - -// IncreaseLevel increase the level of the logger. It has no effect if -// the passed in level tries to decrease the level of the logger. -func IncreaseLevel(lvl zapcore.LevelEnabler) Option { - return optionFunc(func(log *Logger) { - core, err := zapcore.NewIncreaseLevelCore(log.core, lvl) - if err != nil { - fmt.Fprintf(log.errorOutput, "failed to IncreaseLevel: %v\n", err) - } else { - log.core = core - } - }) -} - -// WithPanicHook sets a CheckWriteHook to run on Panic/DPanic logs. -// Zap will call this hook after writing a log statement with a Panic/DPanic level. -// -// For example, the following builds a logger that will exit the current -// goroutine after writing a Panic/DPanic log message, but it will not start a panic. -// -// zap.New(core, zap.WithPanicHook(zapcore.WriteThenGoexit)) -// -// This is useful for testing Panic/DPanic log output. -func WithPanicHook(hook zapcore.CheckWriteHook) Option { - return optionFunc(func(log *Logger) { - log.onPanic = hook - }) -} - -// OnFatal sets the action to take on fatal logs. -// -// Deprecated: Use [WithFatalHook] instead. -func OnFatal(action zapcore.CheckWriteAction) Option { - return WithFatalHook(action) -} - -// WithFatalHook sets a CheckWriteHook to run on fatal logs. -// Zap will call this hook after writing a log statement with a Fatal level. -// -// For example, the following builds a logger that will exit the current -// goroutine after writing a fatal log message, but it will not exit the -// program. -// -// zap.New(core, zap.WithFatalHook(zapcore.WriteThenGoexit)) -// -// It is important that the provided CheckWriteHook stops the control flow at -// the current statement to meet expectations of callers of the logger. -// We recommend calling os.Exit or runtime.Goexit inside custom hooks at -// minimum. -func WithFatalHook(hook zapcore.CheckWriteHook) Option { - return optionFunc(func(log *Logger) { - log.onFatal = hook - }) -} - -// WithClock specifies the clock used by the logger to determine the current -// time for logged entries. Defaults to the system clock with time.Now. -func WithClock(clock zapcore.Clock) Option { - return optionFunc(func(log *Logger) { - log.clock = clock - }) -} diff --git a/vendor/go.uber.org/zap/sink.go b/vendor/go.uber.org/zap/sink.go deleted file mode 100644 index 499772a00..000000000 --- a/vendor/go.uber.org/zap/sink.go +++ /dev/null @@ -1,180 +0,0 @@ -// Copyright (c) 2016-2022 Uber Technologies, Inc. -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in -// all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -// THE SOFTWARE. - -package zap - -import ( - "errors" - "fmt" - "io" - "net/url" - "os" - "path/filepath" - "strings" - "sync" - - "go.uber.org/zap/zapcore" -) - -const schemeFile = "file" - -var _sinkRegistry = newSinkRegistry() - -// Sink defines the interface to write to and close logger destinations. -type Sink interface { - zapcore.WriteSyncer - io.Closer -} - -type errSinkNotFound struct { - scheme string -} - -func (e *errSinkNotFound) Error() string { - return fmt.Sprintf("no sink found for scheme %q", e.scheme) -} - -type nopCloserSink struct{ zapcore.WriteSyncer } - -func (nopCloserSink) Close() error { return nil } - -type sinkRegistry struct { - mu sync.Mutex - factories map[string]func(*url.URL) (Sink, error) // keyed by scheme - openFile func(string, int, os.FileMode) (*os.File, error) // type matches os.OpenFile -} - -func newSinkRegistry() *sinkRegistry { - sr := &sinkRegistry{ - factories: make(map[string]func(*url.URL) (Sink, error)), - openFile: os.OpenFile, - } - // Infallible operation: the registry is empty, so we can't have a conflict. - _ = sr.RegisterSink(schemeFile, sr.newFileSinkFromURL) - return sr -} - -// RegisterScheme registers the given factory for the specific scheme. -func (sr *sinkRegistry) RegisterSink(scheme string, factory func(*url.URL) (Sink, error)) error { - sr.mu.Lock() - defer sr.mu.Unlock() - - if scheme == "" { - return errors.New("can't register a sink factory for empty string") - } - normalized, err := normalizeScheme(scheme) - if err != nil { - return fmt.Errorf("%q is not a valid scheme: %v", scheme, err) - } - if _, ok := sr.factories[normalized]; ok { - return fmt.Errorf("sink factory already registered for scheme %q", normalized) - } - sr.factories[normalized] = factory - return nil -} - -func (sr *sinkRegistry) newSink(rawURL string) (Sink, error) { - // URL parsing doesn't work well for Windows paths such as `c:\log.txt`, as scheme is set to - // the drive, and path is unset unless `c:/log.txt` is used. - // To avoid Windows-specific URL handling, we instead check IsAbs to open as a file. - // filepath.IsAbs is OS-specific, so IsAbs('c:/log.txt') is false outside of Windows. - if filepath.IsAbs(rawURL) { - return sr.newFileSinkFromPath(rawURL) - } - - u, err := url.Parse(rawURL) - if err != nil { - return nil, fmt.Errorf("can't parse %q as a URL: %v", rawURL, err) - } - if u.Scheme == "" { - u.Scheme = schemeFile - } - - sr.mu.Lock() - factory, ok := sr.factories[u.Scheme] - sr.mu.Unlock() - if !ok { - return nil, &errSinkNotFound{u.Scheme} - } - return factory(u) -} - -// RegisterSink registers a user-supplied factory for all sinks with a -// particular scheme. -// -// All schemes must be ASCII, valid under section 0.1 of RFC 3986 -// (https://tools.ietf.org/html/rfc3983#section-3.1), and must not already -// have a factory registered. Zap automatically registers a factory for the -// "file" scheme. -func RegisterSink(scheme string, factory func(*url.URL) (Sink, error)) error { - return _sinkRegistry.RegisterSink(scheme, factory) -} - -func (sr *sinkRegistry) newFileSinkFromURL(u *url.URL) (Sink, error) { - if u.User != nil { - return nil, fmt.Errorf("user and password not allowed with file URLs: got %v", u) - } - if u.Fragment != "" { - return nil, fmt.Errorf("fragments not allowed with file URLs: got %v", u) - } - if u.RawQuery != "" { - return nil, fmt.Errorf("query parameters not allowed with file URLs: got %v", u) - } - // Error messages are better if we check hostname and port separately. - if u.Port() != "" { - return nil, fmt.Errorf("ports not allowed with file URLs: got %v", u) - } - if hn := u.Hostname(); hn != "" && hn != "localhost" { - return nil, fmt.Errorf("file URLs must leave host empty or use localhost: got %v", u) - } - - return sr.newFileSinkFromPath(u.Path) -} - -func (sr *sinkRegistry) newFileSinkFromPath(path string) (Sink, error) { - switch path { - case "stdout": - return nopCloserSink{os.Stdout}, nil - case "stderr": - return nopCloserSink{os.Stderr}, nil - } - return sr.openFile(path, os.O_WRONLY|os.O_APPEND|os.O_CREATE, 0o666) -} - -func normalizeScheme(s string) (string, error) { - // https://tools.ietf.org/html/rfc3986#section-3.1 - s = strings.ToLower(s) - if first := s[0]; 'a' > first || 'z' < first { - return "", errors.New("must start with a letter") - } - for i := 1; i < len(s); i++ { // iterate over bytes, not runes - c := s[i] - switch { - case 'a' <= c && c <= 'z': - continue - case '0' <= c && c <= '9': - continue - case c == '.' || c == '+' || c == '-': - continue - } - return "", fmt.Errorf("may not contain %q", c) - } - return s, nil -} diff --git a/vendor/go.uber.org/zap/sugar.go b/vendor/go.uber.org/zap/sugar.go deleted file mode 100644 index 8904cd087..000000000 --- a/vendor/go.uber.org/zap/sugar.go +++ /dev/null @@ -1,476 +0,0 @@ -// Copyright (c) 2016 Uber Technologies, Inc. -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in -// all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -// THE SOFTWARE. - -package zap - -import ( - "fmt" - - "go.uber.org/zap/zapcore" - - "go.uber.org/multierr" -) - -const ( - _oddNumberErrMsg = "Ignored key without a value." - _nonStringKeyErrMsg = "Ignored key-value pairs with non-string keys." - _multipleErrMsg = "Multiple errors without a key." -) - -// A SugaredLogger wraps the base Logger functionality in a slower, but less -// verbose, API. Any Logger can be converted to a SugaredLogger with its Sugar -// method. -// -// Unlike the Logger, the SugaredLogger doesn't insist on structured logging. -// For each log level, it exposes four methods: -// -// - methods named after the log level for log.Print-style logging -// - methods ending in "w" for loosely-typed structured logging -// - methods ending in "f" for log.Printf-style logging -// - methods ending in "ln" for log.Println-style logging -// -// For example, the methods for InfoLevel are: -// -// Info(...any) Print-style logging -// Infow(...any) Structured logging (read as "info with") -// Infof(string, ...any) Printf-style logging -// Infoln(...any) Println-style logging -type SugaredLogger struct { - base *Logger -} - -// Desugar unwraps a SugaredLogger, exposing the original Logger. Desugaring -// is quite inexpensive, so it's reasonable for a single application to use -// both Loggers and SugaredLoggers, converting between them on the boundaries -// of performance-sensitive code. -func (s *SugaredLogger) Desugar() *Logger { - base := s.base.clone() - base.callerSkip -= 2 - return base -} - -// Named adds a sub-scope to the logger's name. See Logger.Named for details. -func (s *SugaredLogger) Named(name string) *SugaredLogger { - return &SugaredLogger{base: s.base.Named(name)} -} - -// WithOptions clones the current SugaredLogger, applies the supplied Options, -// and returns the result. It's safe to use concurrently. -func (s *SugaredLogger) WithOptions(opts ...Option) *SugaredLogger { - base := s.base.clone() - for _, opt := range opts { - opt.apply(base) - } - return &SugaredLogger{base: base} -} - -// With adds a variadic number of fields to the logging context. It accepts a -// mix of strongly-typed Field objects and loosely-typed key-value pairs. When -// processing pairs, the first element of the pair is used as the field key -// and the second as the field value. -// -// For example, -// -// sugaredLogger.With( -// "hello", "world", -// "failure", errors.New("oh no"), -// Stack(), -// "count", 42, -// "user", User{Name: "alice"}, -// ) -// -// is the equivalent of -// -// unsugared.With( -// String("hello", "world"), -// String("failure", "oh no"), -// Stack(), -// Int("count", 42), -// Object("user", User{Name: "alice"}), -// ) -// -// Note that the keys in key-value pairs should be strings. In development, -// passing a non-string key panics. In production, the logger is more -// forgiving: a separate error is logged, but the key-value pair is skipped -// and execution continues. Passing an orphaned key triggers similar behavior: -// panics in development and errors in production. -func (s *SugaredLogger) With(args ...interface{}) *SugaredLogger { - return &SugaredLogger{base: s.base.With(s.sweetenFields(args)...)} -} - -// WithLazy adds a variadic number of fields to the logging context lazily. -// The fields are evaluated only if the logger is further chained with [With] -// or is written to with any of the log level methods. -// Until that occurs, the logger may retain references to objects inside the fields, -// and logging will reflect the state of an object at the time of logging, -// not the time of WithLazy(). -// -// Similar to [With], fields added to the child don't affect the parent, -// and vice versa. Also, the keys in key-value pairs should be strings. In development, -// passing a non-string key panics, while in production it logs an error and skips the pair. -// Passing an orphaned key has the same behavior. -func (s *SugaredLogger) WithLazy(args ...interface{}) *SugaredLogger { - return &SugaredLogger{base: s.base.WithLazy(s.sweetenFields(args)...)} -} - -// Level reports the minimum enabled level for this logger. -// -// For NopLoggers, this is [zapcore.InvalidLevel]. -func (s *SugaredLogger) Level() zapcore.Level { - return zapcore.LevelOf(s.base.core) -} - -// Log logs the provided arguments at provided level. -// Spaces are added between arguments when neither is a string. -func (s *SugaredLogger) Log(lvl zapcore.Level, args ...interface{}) { - s.log(lvl, "", args, nil) -} - -// Debug logs the provided arguments at [DebugLevel]. -// Spaces are added between arguments when neither is a string. -func (s *SugaredLogger) Debug(args ...interface{}) { - s.log(DebugLevel, "", args, nil) -} - -// Info logs the provided arguments at [InfoLevel]. -// Spaces are added between arguments when neither is a string. -func (s *SugaredLogger) Info(args ...interface{}) { - s.log(InfoLevel, "", args, nil) -} - -// Warn logs the provided arguments at [WarnLevel]. -// Spaces are added between arguments when neither is a string. -func (s *SugaredLogger) Warn(args ...interface{}) { - s.log(WarnLevel, "", args, nil) -} - -// Error logs the provided arguments at [ErrorLevel]. -// Spaces are added between arguments when neither is a string. -func (s *SugaredLogger) Error(args ...interface{}) { - s.log(ErrorLevel, "", args, nil) -} - -// DPanic logs the provided arguments at [DPanicLevel]. -// In development, the logger then panics. (See [DPanicLevel] for details.) -// Spaces are added between arguments when neither is a string. -func (s *SugaredLogger) DPanic(args ...interface{}) { - s.log(DPanicLevel, "", args, nil) -} - -// Panic constructs a message with the provided arguments and panics. -// Spaces are added between arguments when neither is a string. -func (s *SugaredLogger) Panic(args ...interface{}) { - s.log(PanicLevel, "", args, nil) -} - -// Fatal constructs a message with the provided arguments and calls os.Exit. -// Spaces are added between arguments when neither is a string. -func (s *SugaredLogger) Fatal(args ...interface{}) { - s.log(FatalLevel, "", args, nil) -} - -// Logf formats the message according to the format specifier -// and logs it at provided level. -func (s *SugaredLogger) Logf(lvl zapcore.Level, template string, args ...interface{}) { - s.log(lvl, template, args, nil) -} - -// Debugf formats the message according to the format specifier -// and logs it at [DebugLevel]. -func (s *SugaredLogger) Debugf(template string, args ...interface{}) { - s.log(DebugLevel, template, args, nil) -} - -// Infof formats the message according to the format specifier -// and logs it at [InfoLevel]. -func (s *SugaredLogger) Infof(template string, args ...interface{}) { - s.log(InfoLevel, template, args, nil) -} - -// Warnf formats the message according to the format specifier -// and logs it at [WarnLevel]. -func (s *SugaredLogger) Warnf(template string, args ...interface{}) { - s.log(WarnLevel, template, args, nil) -} - -// Errorf formats the message according to the format specifier -// and logs it at [ErrorLevel]. -func (s *SugaredLogger) Errorf(template string, args ...interface{}) { - s.log(ErrorLevel, template, args, nil) -} - -// DPanicf formats the message according to the format specifier -// and logs it at [DPanicLevel]. -// In development, the logger then panics. (See [DPanicLevel] for details.) -func (s *SugaredLogger) DPanicf(template string, args ...interface{}) { - s.log(DPanicLevel, template, args, nil) -} - -// Panicf formats the message according to the format specifier -// and panics. -func (s *SugaredLogger) Panicf(template string, args ...interface{}) { - s.log(PanicLevel, template, args, nil) -} - -// Fatalf formats the message according to the format specifier -// and calls os.Exit. -func (s *SugaredLogger) Fatalf(template string, args ...interface{}) { - s.log(FatalLevel, template, args, nil) -} - -// Logw logs a message with some additional context. The variadic key-value -// pairs are treated as they are in With. -func (s *SugaredLogger) Logw(lvl zapcore.Level, msg string, keysAndValues ...interface{}) { - s.log(lvl, msg, nil, keysAndValues) -} - -// Debugw logs a message with some additional context. The variadic key-value -// pairs are treated as they are in With. -// -// When debug-level logging is disabled, this is much faster than -// -// s.With(keysAndValues).Debug(msg) -func (s *SugaredLogger) Debugw(msg string, keysAndValues ...interface{}) { - s.log(DebugLevel, msg, nil, keysAndValues) -} - -// Infow logs a message with some additional context. The variadic key-value -// pairs are treated as they are in With. -func (s *SugaredLogger) Infow(msg string, keysAndValues ...interface{}) { - s.log(InfoLevel, msg, nil, keysAndValues) -} - -// Warnw logs a message with some additional context. The variadic key-value -// pairs are treated as they are in With. -func (s *SugaredLogger) Warnw(msg string, keysAndValues ...interface{}) { - s.log(WarnLevel, msg, nil, keysAndValues) -} - -// Errorw logs a message with some additional context. The variadic key-value -// pairs are treated as they are in With. -func (s *SugaredLogger) Errorw(msg string, keysAndValues ...interface{}) { - s.log(ErrorLevel, msg, nil, keysAndValues) -} - -// DPanicw logs a message with some additional context. In development, the -// logger then panics. (See DPanicLevel for details.) The variadic key-value -// pairs are treated as they are in With. -func (s *SugaredLogger) DPanicw(msg string, keysAndValues ...interface{}) { - s.log(DPanicLevel, msg, nil, keysAndValues) -} - -// Panicw logs a message with some additional context, then panics. The -// variadic key-value pairs are treated as they are in With. -func (s *SugaredLogger) Panicw(msg string, keysAndValues ...interface{}) { - s.log(PanicLevel, msg, nil, keysAndValues) -} - -// Fatalw logs a message with some additional context, then calls os.Exit. The -// variadic key-value pairs are treated as they are in With. -func (s *SugaredLogger) Fatalw(msg string, keysAndValues ...interface{}) { - s.log(FatalLevel, msg, nil, keysAndValues) -} - -// Logln logs a message at provided level. -// Spaces are always added between arguments. -func (s *SugaredLogger) Logln(lvl zapcore.Level, args ...interface{}) { - s.logln(lvl, args, nil) -} - -// Debugln logs a message at [DebugLevel]. -// Spaces are always added between arguments. -func (s *SugaredLogger) Debugln(args ...interface{}) { - s.logln(DebugLevel, args, nil) -} - -// Infoln logs a message at [InfoLevel]. -// Spaces are always added between arguments. -func (s *SugaredLogger) Infoln(args ...interface{}) { - s.logln(InfoLevel, args, nil) -} - -// Warnln logs a message at [WarnLevel]. -// Spaces are always added between arguments. -func (s *SugaredLogger) Warnln(args ...interface{}) { - s.logln(WarnLevel, args, nil) -} - -// Errorln logs a message at [ErrorLevel]. -// Spaces are always added between arguments. -func (s *SugaredLogger) Errorln(args ...interface{}) { - s.logln(ErrorLevel, args, nil) -} - -// DPanicln logs a message at [DPanicLevel]. -// In development, the logger then panics. (See [DPanicLevel] for details.) -// Spaces are always added between arguments. -func (s *SugaredLogger) DPanicln(args ...interface{}) { - s.logln(DPanicLevel, args, nil) -} - -// Panicln logs a message at [PanicLevel] and panics. -// Spaces are always added between arguments. -func (s *SugaredLogger) Panicln(args ...interface{}) { - s.logln(PanicLevel, args, nil) -} - -// Fatalln logs a message at [FatalLevel] and calls os.Exit. -// Spaces are always added between arguments. -func (s *SugaredLogger) Fatalln(args ...interface{}) { - s.logln(FatalLevel, args, nil) -} - -// Sync flushes any buffered log entries. -func (s *SugaredLogger) Sync() error { - return s.base.Sync() -} - -// log message with Sprint, Sprintf, or neither. -func (s *SugaredLogger) log(lvl zapcore.Level, template string, fmtArgs []interface{}, context []interface{}) { - // If logging at this level is completely disabled, skip the overhead of - // string formatting. - if lvl < DPanicLevel && !s.base.Core().Enabled(lvl) { - return - } - - msg := getMessage(template, fmtArgs) - if ce := s.base.Check(lvl, msg); ce != nil { - ce.Write(s.sweetenFields(context)...) - } -} - -// logln message with Sprintln -func (s *SugaredLogger) logln(lvl zapcore.Level, fmtArgs []interface{}, context []interface{}) { - if lvl < DPanicLevel && !s.base.Core().Enabled(lvl) { - return - } - - msg := getMessageln(fmtArgs) - if ce := s.base.Check(lvl, msg); ce != nil { - ce.Write(s.sweetenFields(context)...) - } -} - -// getMessage format with Sprint, Sprintf, or neither. -func getMessage(template string, fmtArgs []interface{}) string { - if len(fmtArgs) == 0 { - return template - } - - if template != "" { - return fmt.Sprintf(template, fmtArgs...) - } - - if len(fmtArgs) == 1 { - if str, ok := fmtArgs[0].(string); ok { - return str - } - } - return fmt.Sprint(fmtArgs...) -} - -// getMessageln format with Sprintln. -func getMessageln(fmtArgs []interface{}) string { - msg := fmt.Sprintln(fmtArgs...) - return msg[:len(msg)-1] -} - -func (s *SugaredLogger) sweetenFields(args []interface{}) []Field { - if len(args) == 0 { - return nil - } - - var ( - // Allocate enough space for the worst case; if users pass only structured - // fields, we shouldn't penalize them with extra allocations. - fields = make([]Field, 0, len(args)) - invalid invalidPairs - seenError bool - ) - - for i := 0; i < len(args); { - // This is a strongly-typed field. Consume it and move on. - if f, ok := args[i].(Field); ok { - fields = append(fields, f) - i++ - continue - } - - // If it is an error, consume it and move on. - if err, ok := args[i].(error); ok { - if !seenError { - seenError = true - fields = append(fields, Error(err)) - } else { - s.base.Error(_multipleErrMsg, Error(err)) - } - i++ - continue - } - - // Make sure this element isn't a dangling key. - if i == len(args)-1 { - s.base.Error(_oddNumberErrMsg, Any("ignored", args[i])) - break - } - - // Consume this value and the next, treating them as a key-value pair. If the - // key isn't a string, add this pair to the slice of invalid pairs. - key, val := args[i], args[i+1] - if keyStr, ok := key.(string); !ok { - // Subsequent errors are likely, so allocate once up front. - if cap(invalid) == 0 { - invalid = make(invalidPairs, 0, len(args)/2) - } - invalid = append(invalid, invalidPair{i, key, val}) - } else { - fields = append(fields, Any(keyStr, val)) - } - i += 2 - } - - // If we encountered any invalid key-value pairs, log an error. - if len(invalid) > 0 { - s.base.Error(_nonStringKeyErrMsg, Array("invalid", invalid)) - } - return fields -} - -type invalidPair struct { - position int - key, value interface{} -} - -func (p invalidPair) MarshalLogObject(enc zapcore.ObjectEncoder) error { - enc.AddInt64("position", int64(p.position)) - Any("key", p.key).AddTo(enc) - Any("value", p.value).AddTo(enc) - return nil -} - -type invalidPairs []invalidPair - -func (ps invalidPairs) MarshalLogArray(enc zapcore.ArrayEncoder) error { - var err error - for i := range ps { - err = multierr.Append(err, enc.AppendObject(ps[i])) - } - return err -} diff --git a/vendor/go.uber.org/zap/time.go b/vendor/go.uber.org/zap/time.go deleted file mode 100644 index c5a1f1622..000000000 --- a/vendor/go.uber.org/zap/time.go +++ /dev/null @@ -1,27 +0,0 @@ -// Copyright (c) 2016 Uber Technologies, Inc. -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in -// all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -// THE SOFTWARE. - -package zap - -import "time" - -func timeToMillis(t time.Time) int64 { - return t.UnixNano() / int64(time.Millisecond) -} diff --git a/vendor/go.uber.org/zap/writer.go b/vendor/go.uber.org/zap/writer.go deleted file mode 100644 index 06768c679..000000000 --- a/vendor/go.uber.org/zap/writer.go +++ /dev/null @@ -1,98 +0,0 @@ -// Copyright (c) 2016-2022 Uber Technologies, Inc. -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in -// all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -// THE SOFTWARE. - -package zap - -import ( - "fmt" - "io" - - "go.uber.org/zap/zapcore" - - "go.uber.org/multierr" -) - -// Open is a high-level wrapper that takes a variadic number of URLs, opens or -// creates each of the specified resources, and combines them into a locked -// WriteSyncer. It also returns any error encountered and a function to close -// any opened files. -// -// Passing no URLs returns a no-op WriteSyncer. Zap handles URLs without a -// scheme and URLs with the "file" scheme. Third-party code may register -// factories for other schemes using RegisterSink. -// -// URLs with the "file" scheme must use absolute paths on the local -// filesystem. No user, password, port, fragments, or query parameters are -// allowed, and the hostname must be empty or "localhost". -// -// Since it's common to write logs to the local filesystem, URLs without a -// scheme (e.g., "/var/log/foo.log") are treated as local file paths. Without -// a scheme, the special paths "stdout" and "stderr" are interpreted as -// os.Stdout and os.Stderr. When specified without a scheme, relative file -// paths also work. -func Open(paths ...string) (zapcore.WriteSyncer, func(), error) { - writers, closeAll, err := open(paths) - if err != nil { - return nil, nil, err - } - - writer := CombineWriteSyncers(writers...) - return writer, closeAll, nil -} - -func open(paths []string) ([]zapcore.WriteSyncer, func(), error) { - writers := make([]zapcore.WriteSyncer, 0, len(paths)) - closers := make([]io.Closer, 0, len(paths)) - closeAll := func() { - for _, c := range closers { - _ = c.Close() - } - } - - var openErr error - for _, path := range paths { - sink, err := _sinkRegistry.newSink(path) - if err != nil { - openErr = multierr.Append(openErr, fmt.Errorf("open sink %q: %w", path, err)) - continue - } - writers = append(writers, sink) - closers = append(closers, sink) - } - if openErr != nil { - closeAll() - return nil, nil, openErr - } - - return writers, closeAll, nil -} - -// CombineWriteSyncers is a utility that combines multiple WriteSyncers into a -// single, locked WriteSyncer. If no inputs are supplied, it returns a no-op -// WriteSyncer. -// -// It's provided purely as a convenience; the result is no different from -// using zapcore.NewMultiWriteSyncer and zapcore.Lock individually. -func CombineWriteSyncers(writers ...zapcore.WriteSyncer) zapcore.WriteSyncer { - if len(writers) == 0 { - return zapcore.AddSync(io.Discard) - } - return zapcore.Lock(zapcore.NewMultiWriteSyncer(writers...)) -} diff --git a/vendor/go.uber.org/zap/zapcore/buffered_write_syncer.go b/vendor/go.uber.org/zap/zapcore/buffered_write_syncer.go deleted file mode 100644 index a40e93b3e..000000000 --- a/vendor/go.uber.org/zap/zapcore/buffered_write_syncer.go +++ /dev/null @@ -1,219 +0,0 @@ -// Copyright (c) 2021 Uber Technologies, Inc. -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in -// all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -// THE SOFTWARE. - -package zapcore - -import ( - "bufio" - "sync" - "time" - - "go.uber.org/multierr" -) - -const ( - // _defaultBufferSize specifies the default size used by Buffer. - _defaultBufferSize = 256 * 1024 // 256 kB - - // _defaultFlushInterval specifies the default flush interval for - // Buffer. - _defaultFlushInterval = 30 * time.Second -) - -// A BufferedWriteSyncer is a WriteSyncer that buffers writes in-memory before -// flushing them to a wrapped WriteSyncer after reaching some limit, or at some -// fixed interval--whichever comes first. -// -// BufferedWriteSyncer is safe for concurrent use. You don't need to use -// zapcore.Lock for WriteSyncers with BufferedWriteSyncer. -// -// To set up a BufferedWriteSyncer, construct a WriteSyncer for your log -// destination (*os.File is a valid WriteSyncer), wrap it with -// BufferedWriteSyncer, and defer a Stop() call for when you no longer need the -// object. -// -// func main() { -// ws := ... // your log destination -// bws := &zapcore.BufferedWriteSyncer{WS: ws} -// defer bws.Stop() -// -// // ... -// core := zapcore.NewCore(enc, bws, lvl) -// logger := zap.New(core) -// -// // ... -// } -// -// By default, a BufferedWriteSyncer will buffer up to 256 kilobytes of logs, -// waiting at most 30 seconds between flushes. -// You can customize these parameters by setting the Size or FlushInterval -// fields. -// For example, the following buffers up to 512 kB of logs before flushing them -// to Stderr, with a maximum of one minute between each flush. -// -// ws := &BufferedWriteSyncer{ -// WS: os.Stderr, -// Size: 512 * 1024, // 512 kB -// FlushInterval: time.Minute, -// } -// defer ws.Stop() -type BufferedWriteSyncer struct { - // WS is the WriteSyncer around which BufferedWriteSyncer will buffer - // writes. - // - // This field is required. - WS WriteSyncer - - // Size specifies the maximum amount of data the writer will buffered - // before flushing. - // - // Defaults to 256 kB if unspecified. - Size int - - // FlushInterval specifies how often the writer should flush data if - // there have been no writes. - // - // Defaults to 30 seconds if unspecified. - FlushInterval time.Duration - - // Clock, if specified, provides control of the source of time for the - // writer. - // - // Defaults to the system clock. - Clock Clock - - // unexported fields for state - mu sync.Mutex - initialized bool // whether initialize() has run - stopped bool // whether Stop() has run - writer *bufio.Writer - ticker *time.Ticker - stop chan struct{} // closed when flushLoop should stop - done chan struct{} // closed when flushLoop has stopped -} - -func (s *BufferedWriteSyncer) initialize() { - size := s.Size - if size == 0 { - size = _defaultBufferSize - } - - flushInterval := s.FlushInterval - if flushInterval == 0 { - flushInterval = _defaultFlushInterval - } - - if s.Clock == nil { - s.Clock = DefaultClock - } - - s.ticker = s.Clock.NewTicker(flushInterval) - s.writer = bufio.NewWriterSize(s.WS, size) - s.stop = make(chan struct{}) - s.done = make(chan struct{}) - s.initialized = true - go s.flushLoop() -} - -// Write writes log data into buffer syncer directly, multiple Write calls will be batched, -// and log data will be flushed to disk when the buffer is full or periodically. -func (s *BufferedWriteSyncer) Write(bs []byte) (int, error) { - s.mu.Lock() - defer s.mu.Unlock() - - if !s.initialized { - s.initialize() - } - - // To avoid partial writes from being flushed, we manually flush the existing buffer if: - // * The current write doesn't fit into the buffer fully, and - // * The buffer is not empty (since bufio will not split large writes when the buffer is empty) - if len(bs) > s.writer.Available() && s.writer.Buffered() > 0 { - if err := s.writer.Flush(); err != nil { - return 0, err - } - } - - return s.writer.Write(bs) -} - -// Sync flushes buffered log data into disk directly. -func (s *BufferedWriteSyncer) Sync() error { - s.mu.Lock() - defer s.mu.Unlock() - - var err error - if s.initialized { - err = s.writer.Flush() - } - - return multierr.Append(err, s.WS.Sync()) -} - -// flushLoop flushes the buffer at the configured interval until Stop is -// called. -func (s *BufferedWriteSyncer) flushLoop() { - defer close(s.done) - - for { - select { - case <-s.ticker.C: - // we just simply ignore error here - // because the underlying bufio writer stores any errors - // and we return any error from Sync() as part of the close - _ = s.Sync() - case <-s.stop: - return - } - } -} - -// Stop closes the buffer, cleans up background goroutines, and flushes -// remaining unwritten data. -func (s *BufferedWriteSyncer) Stop() (err error) { - var stopped bool - - // Critical section. - func() { - s.mu.Lock() - defer s.mu.Unlock() - - if !s.initialized { - return - } - - stopped = s.stopped - if stopped { - return - } - s.stopped = true - - s.ticker.Stop() - close(s.stop) // tell flushLoop to stop - <-s.done // and wait until it has - }() - - // Don't call Sync on consecutive Stops. - if !stopped { - err = s.Sync() - } - - return err -} diff --git a/vendor/go.uber.org/zap/zapcore/clock.go b/vendor/go.uber.org/zap/zapcore/clock.go deleted file mode 100644 index 422fd82a6..000000000 --- a/vendor/go.uber.org/zap/zapcore/clock.go +++ /dev/null @@ -1,48 +0,0 @@ -// Copyright (c) 2021 Uber Technologies, Inc. -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in -// all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -// THE SOFTWARE. - -package zapcore - -import "time" - -// DefaultClock is the default clock used by Zap in operations that require -// time. This clock uses the system clock for all operations. -var DefaultClock = systemClock{} - -// Clock is a source of time for logged entries. -type Clock interface { - // Now returns the current local time. - Now() time.Time - - // NewTicker returns *time.Ticker that holds a channel - // that delivers "ticks" of a clock. - NewTicker(time.Duration) *time.Ticker -} - -// systemClock implements default Clock that uses system time. -type systemClock struct{} - -func (systemClock) Now() time.Time { - return time.Now() -} - -func (systemClock) NewTicker(duration time.Duration) *time.Ticker { - return time.NewTicker(duration) -} diff --git a/vendor/go.uber.org/zap/zapcore/console_encoder.go b/vendor/go.uber.org/zap/zapcore/console_encoder.go deleted file mode 100644 index cc2b4e07b..000000000 --- a/vendor/go.uber.org/zap/zapcore/console_encoder.go +++ /dev/null @@ -1,157 +0,0 @@ -// Copyright (c) 2016 Uber Technologies, Inc. -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in -// all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -// THE SOFTWARE. - -package zapcore - -import ( - "fmt" - - "go.uber.org/zap/buffer" - "go.uber.org/zap/internal/bufferpool" - "go.uber.org/zap/internal/pool" -) - -var _sliceEncoderPool = pool.New(func() *sliceArrayEncoder { - return &sliceArrayEncoder{ - elems: make([]interface{}, 0, 2), - } -}) - -func getSliceEncoder() *sliceArrayEncoder { - return _sliceEncoderPool.Get() -} - -func putSliceEncoder(e *sliceArrayEncoder) { - e.elems = e.elems[:0] - _sliceEncoderPool.Put(e) -} - -type consoleEncoder struct { - *jsonEncoder -} - -// NewConsoleEncoder creates an encoder whose output is designed for human - -// rather than machine - consumption. It serializes the core log entry data -// (message, level, timestamp, etc.) in a plain-text format and leaves the -// structured context as JSON. -// -// Note that although the console encoder doesn't use the keys specified in the -// encoder configuration, it will omit any element whose key is set to the empty -// string. -func NewConsoleEncoder(cfg EncoderConfig) Encoder { - if cfg.ConsoleSeparator == "" { - // Use a default delimiter of '\t' for backwards compatibility - cfg.ConsoleSeparator = "\t" - } - return consoleEncoder{newJSONEncoder(cfg, true)} -} - -func (c consoleEncoder) Clone() Encoder { - return consoleEncoder{c.jsonEncoder.Clone().(*jsonEncoder)} -} - -func (c consoleEncoder) EncodeEntry(ent Entry, fields []Field) (*buffer.Buffer, error) { - line := bufferpool.Get() - - // We don't want the entry's metadata to be quoted and escaped (if it's - // encoded as strings), which means that we can't use the JSON encoder. The - // simplest option is to use the memory encoder and fmt.Fprint. - // - // If this ever becomes a performance bottleneck, we can implement - // ArrayEncoder for our plain-text format. - arr := getSliceEncoder() - if c.TimeKey != "" && c.EncodeTime != nil && !ent.Time.IsZero() { - c.EncodeTime(ent.Time, arr) - } - if c.LevelKey != "" && c.EncodeLevel != nil { - c.EncodeLevel(ent.Level, arr) - } - if ent.LoggerName != "" && c.NameKey != "" { - nameEncoder := c.EncodeName - - if nameEncoder == nil { - // Fall back to FullNameEncoder for backward compatibility. - nameEncoder = FullNameEncoder - } - - nameEncoder(ent.LoggerName, arr) - } - if ent.Caller.Defined { - if c.CallerKey != "" && c.EncodeCaller != nil { - c.EncodeCaller(ent.Caller, arr) - } - if c.FunctionKey != "" { - arr.AppendString(ent.Caller.Function) - } - } - for i := range arr.elems { - if i > 0 { - line.AppendString(c.ConsoleSeparator) - } - fmt.Fprint(line, arr.elems[i]) - } - putSliceEncoder(arr) - - // Add the message itself. - if c.MessageKey != "" { - c.addSeparatorIfNecessary(line) - line.AppendString(ent.Message) - } - - // Add any structured context. - c.writeContext(line, fields) - - // If there's no stacktrace key, honor that; this allows users to force - // single-line output. - if ent.Stack != "" && c.StacktraceKey != "" { - line.AppendByte('\n') - line.AppendString(ent.Stack) - } - - line.AppendString(c.LineEnding) - return line, nil -} - -func (c consoleEncoder) writeContext(line *buffer.Buffer, extra []Field) { - context := c.jsonEncoder.Clone().(*jsonEncoder) - defer func() { - // putJSONEncoder assumes the buffer is still used, but we write out the buffer so - // we can free it. - context.buf.Free() - putJSONEncoder(context) - }() - - addFields(context, extra) - context.closeOpenNamespaces() - if context.buf.Len() == 0 { - return - } - - c.addSeparatorIfNecessary(line) - line.AppendByte('{') - line.Write(context.buf.Bytes()) - line.AppendByte('}') -} - -func (c consoleEncoder) addSeparatorIfNecessary(line *buffer.Buffer) { - if line.Len() > 0 { - line.AppendString(c.ConsoleSeparator) - } -} diff --git a/vendor/go.uber.org/zap/zapcore/core.go b/vendor/go.uber.org/zap/zapcore/core.go deleted file mode 100644 index 776e93f6f..000000000 --- a/vendor/go.uber.org/zap/zapcore/core.go +++ /dev/null @@ -1,122 +0,0 @@ -// Copyright (c) 2016 Uber Technologies, Inc. -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in -// all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -// THE SOFTWARE. - -package zapcore - -// Core is a minimal, fast logger interface. It's designed for library authors -// to wrap in a more user-friendly API. -type Core interface { - LevelEnabler - - // With adds structured context to the Core. - With([]Field) Core - // Check determines whether the supplied Entry should be logged (using the - // embedded LevelEnabler and possibly some extra logic). If the entry - // should be logged, the Core adds itself to the CheckedEntry and returns - // the result. - // - // Callers must use Check before calling Write. - Check(Entry, *CheckedEntry) *CheckedEntry - // Write serializes the Entry and any Fields supplied at the log site and - // writes them to their destination. - // - // If called, Write should always log the Entry and Fields; it should not - // replicate the logic of Check. - Write(Entry, []Field) error - // Sync flushes buffered logs (if any). - Sync() error -} - -type nopCore struct{} - -// NewNopCore returns a no-op Core. -func NewNopCore() Core { return nopCore{} } -func (nopCore) Enabled(Level) bool { return false } -func (n nopCore) With([]Field) Core { return n } -func (nopCore) Check(_ Entry, ce *CheckedEntry) *CheckedEntry { return ce } -func (nopCore) Write(Entry, []Field) error { return nil } -func (nopCore) Sync() error { return nil } - -// NewCore creates a Core that writes logs to a WriteSyncer. -func NewCore(enc Encoder, ws WriteSyncer, enab LevelEnabler) Core { - return &ioCore{ - LevelEnabler: enab, - enc: enc, - out: ws, - } -} - -type ioCore struct { - LevelEnabler - enc Encoder - out WriteSyncer -} - -var ( - _ Core = (*ioCore)(nil) - _ leveledEnabler = (*ioCore)(nil) -) - -func (c *ioCore) Level() Level { - return LevelOf(c.LevelEnabler) -} - -func (c *ioCore) With(fields []Field) Core { - clone := c.clone() - addFields(clone.enc, fields) - return clone -} - -func (c *ioCore) Check(ent Entry, ce *CheckedEntry) *CheckedEntry { - if c.Enabled(ent.Level) { - return ce.AddCore(ent, c) - } - return ce -} - -func (c *ioCore) Write(ent Entry, fields []Field) error { - buf, err := c.enc.EncodeEntry(ent, fields) - if err != nil { - return err - } - _, err = c.out.Write(buf.Bytes()) - buf.Free() - if err != nil { - return err - } - if ent.Level > ErrorLevel { - // Since we may be crashing the program, sync the output. - // Ignore Sync errors, pending a clean solution to issue #370. - _ = c.Sync() - } - return nil -} - -func (c *ioCore) Sync() error { - return c.out.Sync() -} - -func (c *ioCore) clone() *ioCore { - return &ioCore{ - LevelEnabler: c.LevelEnabler, - enc: c.enc.Clone(), - out: c.out, - } -} diff --git a/vendor/go.uber.org/zap/zapcore/doc.go b/vendor/go.uber.org/zap/zapcore/doc.go deleted file mode 100644 index 31000e91f..000000000 --- a/vendor/go.uber.org/zap/zapcore/doc.go +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright (c) 2016 Uber Technologies, Inc. -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in -// all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -// THE SOFTWARE. - -// Package zapcore defines and implements the low-level interfaces upon which -// zap is built. By providing alternate implementations of these interfaces, -// external packages can extend zap's capabilities. -package zapcore // import "go.uber.org/zap/zapcore" diff --git a/vendor/go.uber.org/zap/zapcore/encoder.go b/vendor/go.uber.org/zap/zapcore/encoder.go deleted file mode 100644 index 044625415..000000000 --- a/vendor/go.uber.org/zap/zapcore/encoder.go +++ /dev/null @@ -1,466 +0,0 @@ -// Copyright (c) 2016 Uber Technologies, Inc. -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in -// all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -// THE SOFTWARE. - -package zapcore - -import ( - "encoding/json" - "io" - "time" - - "go.uber.org/zap/buffer" -) - -// DefaultLineEnding defines the default line ending when writing logs. -// Alternate line endings specified in EncoderConfig can override this -// behavior. -const DefaultLineEnding = "\n" - -// OmitKey defines the key to use when callers want to remove a key from log output. -const OmitKey = "" - -// A LevelEncoder serializes a Level to a primitive type. -// -// This function must make exactly one call -// to a PrimitiveArrayEncoder's Append* method. -type LevelEncoder func(Level, PrimitiveArrayEncoder) - -// LowercaseLevelEncoder serializes a Level to a lowercase string. For example, -// InfoLevel is serialized to "info". -func LowercaseLevelEncoder(l Level, enc PrimitiveArrayEncoder) { - enc.AppendString(l.String()) -} - -// LowercaseColorLevelEncoder serializes a Level to a lowercase string and adds coloring. -// For example, InfoLevel is serialized to "info" and colored blue. -func LowercaseColorLevelEncoder(l Level, enc PrimitiveArrayEncoder) { - s, ok := _levelToLowercaseColorString[l] - if !ok { - s = _unknownLevelColor.Add(l.String()) - } - enc.AppendString(s) -} - -// CapitalLevelEncoder serializes a Level to an all-caps string. For example, -// InfoLevel is serialized to "INFO". -func CapitalLevelEncoder(l Level, enc PrimitiveArrayEncoder) { - enc.AppendString(l.CapitalString()) -} - -// CapitalColorLevelEncoder serializes a Level to an all-caps string and adds color. -// For example, InfoLevel is serialized to "INFO" and colored blue. -func CapitalColorLevelEncoder(l Level, enc PrimitiveArrayEncoder) { - s, ok := _levelToCapitalColorString[l] - if !ok { - s = _unknownLevelColor.Add(l.CapitalString()) - } - enc.AppendString(s) -} - -// UnmarshalText unmarshals text to a LevelEncoder. "capital" is unmarshaled to -// CapitalLevelEncoder, "coloredCapital" is unmarshaled to CapitalColorLevelEncoder, -// "colored" is unmarshaled to LowercaseColorLevelEncoder, and anything else -// is unmarshaled to LowercaseLevelEncoder. -func (e *LevelEncoder) UnmarshalText(text []byte) error { - switch string(text) { - case "capital": - *e = CapitalLevelEncoder - case "capitalColor": - *e = CapitalColorLevelEncoder - case "color": - *e = LowercaseColorLevelEncoder - default: - *e = LowercaseLevelEncoder - } - return nil -} - -// A TimeEncoder serializes a time.Time to a primitive type. -// -// This function must make exactly one call -// to a PrimitiveArrayEncoder's Append* method. -type TimeEncoder func(time.Time, PrimitiveArrayEncoder) - -// EpochTimeEncoder serializes a time.Time to a floating-point number of seconds -// since the Unix epoch. -func EpochTimeEncoder(t time.Time, enc PrimitiveArrayEncoder) { - nanos := t.UnixNano() - sec := float64(nanos) / float64(time.Second) - enc.AppendFloat64(sec) -} - -// EpochMillisTimeEncoder serializes a time.Time to a floating-point number of -// milliseconds since the Unix epoch. -func EpochMillisTimeEncoder(t time.Time, enc PrimitiveArrayEncoder) { - nanos := t.UnixNano() - millis := float64(nanos) / float64(time.Millisecond) - enc.AppendFloat64(millis) -} - -// EpochNanosTimeEncoder serializes a time.Time to an integer number of -// nanoseconds since the Unix epoch. -func EpochNanosTimeEncoder(t time.Time, enc PrimitiveArrayEncoder) { - enc.AppendInt64(t.UnixNano()) -} - -func encodeTimeLayout(t time.Time, layout string, enc PrimitiveArrayEncoder) { - type appendTimeEncoder interface { - AppendTimeLayout(time.Time, string) - } - - if enc, ok := enc.(appendTimeEncoder); ok { - enc.AppendTimeLayout(t, layout) - return - } - - enc.AppendString(t.Format(layout)) -} - -// ISO8601TimeEncoder serializes a time.Time to an ISO8601-formatted string -// with millisecond precision. -// -// If enc supports AppendTimeLayout(t time.Time,layout string), it's used -// instead of appending a pre-formatted string value. -func ISO8601TimeEncoder(t time.Time, enc PrimitiveArrayEncoder) { - encodeTimeLayout(t, "2006-01-02T15:04:05.000Z0700", enc) -} - -// RFC3339TimeEncoder serializes a time.Time to an RFC3339-formatted string. -// -// If enc supports AppendTimeLayout(t time.Time,layout string), it's used -// instead of appending a pre-formatted string value. -func RFC3339TimeEncoder(t time.Time, enc PrimitiveArrayEncoder) { - encodeTimeLayout(t, time.RFC3339, enc) -} - -// RFC3339NanoTimeEncoder serializes a time.Time to an RFC3339-formatted string -// with nanosecond precision. -// -// If enc supports AppendTimeLayout(t time.Time,layout string), it's used -// instead of appending a pre-formatted string value. -func RFC3339NanoTimeEncoder(t time.Time, enc PrimitiveArrayEncoder) { - encodeTimeLayout(t, time.RFC3339Nano, enc) -} - -// TimeEncoderOfLayout returns TimeEncoder which serializes a time.Time using -// given layout. -func TimeEncoderOfLayout(layout string) TimeEncoder { - return func(t time.Time, enc PrimitiveArrayEncoder) { - encodeTimeLayout(t, layout, enc) - } -} - -// UnmarshalText unmarshals text to a TimeEncoder. -// "rfc3339nano" and "RFC3339Nano" are unmarshaled to RFC3339NanoTimeEncoder. -// "rfc3339" and "RFC3339" are unmarshaled to RFC3339TimeEncoder. -// "iso8601" and "ISO8601" are unmarshaled to ISO8601TimeEncoder. -// "millis" is unmarshaled to EpochMillisTimeEncoder. -// "nanos" is unmarshaled to EpochNanosEncoder. -// Anything else is unmarshaled to EpochTimeEncoder. -func (e *TimeEncoder) UnmarshalText(text []byte) error { - switch string(text) { - case "rfc3339nano", "RFC3339Nano": - *e = RFC3339NanoTimeEncoder - case "rfc3339", "RFC3339": - *e = RFC3339TimeEncoder - case "iso8601", "ISO8601": - *e = ISO8601TimeEncoder - case "millis": - *e = EpochMillisTimeEncoder - case "nanos": - *e = EpochNanosTimeEncoder - default: - *e = EpochTimeEncoder - } - return nil -} - -// UnmarshalYAML unmarshals YAML to a TimeEncoder. -// If value is an object with a "layout" field, it will be unmarshaled to TimeEncoder with given layout. -// -// timeEncoder: -// layout: 06/01/02 03:04pm -// -// If value is string, it uses UnmarshalText. -// -// timeEncoder: iso8601 -func (e *TimeEncoder) UnmarshalYAML(unmarshal func(interface{}) error) error { - var o struct { - Layout string `json:"layout" yaml:"layout"` - } - if err := unmarshal(&o); err == nil { - *e = TimeEncoderOfLayout(o.Layout) - return nil - } - - var s string - if err := unmarshal(&s); err != nil { - return err - } - return e.UnmarshalText([]byte(s)) -} - -// UnmarshalJSON unmarshals JSON to a TimeEncoder as same way UnmarshalYAML does. -func (e *TimeEncoder) UnmarshalJSON(data []byte) error { - return e.UnmarshalYAML(func(v interface{}) error { - return json.Unmarshal(data, v) - }) -} - -// A DurationEncoder serializes a time.Duration to a primitive type. -// -// This function must make exactly one call -// to a PrimitiveArrayEncoder's Append* method. -type DurationEncoder func(time.Duration, PrimitiveArrayEncoder) - -// SecondsDurationEncoder serializes a time.Duration to a floating-point number of seconds elapsed. -func SecondsDurationEncoder(d time.Duration, enc PrimitiveArrayEncoder) { - enc.AppendFloat64(float64(d) / float64(time.Second)) -} - -// NanosDurationEncoder serializes a time.Duration to an integer number of -// nanoseconds elapsed. -func NanosDurationEncoder(d time.Duration, enc PrimitiveArrayEncoder) { - enc.AppendInt64(int64(d)) -} - -// MillisDurationEncoder serializes a time.Duration to an integer number of -// milliseconds elapsed. -func MillisDurationEncoder(d time.Duration, enc PrimitiveArrayEncoder) { - enc.AppendInt64(d.Nanoseconds() / 1e6) -} - -// StringDurationEncoder serializes a time.Duration using its built-in String -// method. -func StringDurationEncoder(d time.Duration, enc PrimitiveArrayEncoder) { - enc.AppendString(d.String()) -} - -// UnmarshalText unmarshals text to a DurationEncoder. "string" is unmarshaled -// to StringDurationEncoder, and anything else is unmarshaled to -// NanosDurationEncoder. -func (e *DurationEncoder) UnmarshalText(text []byte) error { - switch string(text) { - case "string": - *e = StringDurationEncoder - case "nanos": - *e = NanosDurationEncoder - case "ms": - *e = MillisDurationEncoder - default: - *e = SecondsDurationEncoder - } - return nil -} - -// A CallerEncoder serializes an EntryCaller to a primitive type. -// -// This function must make exactly one call -// to a PrimitiveArrayEncoder's Append* method. -type CallerEncoder func(EntryCaller, PrimitiveArrayEncoder) - -// FullCallerEncoder serializes a caller in /full/path/to/package/file:line -// format. -func FullCallerEncoder(caller EntryCaller, enc PrimitiveArrayEncoder) { - // TODO: consider using a byte-oriented API to save an allocation. - enc.AppendString(caller.String()) -} - -// ShortCallerEncoder serializes a caller in package/file:line format, trimming -// all but the final directory from the full path. -func ShortCallerEncoder(caller EntryCaller, enc PrimitiveArrayEncoder) { - // TODO: consider using a byte-oriented API to save an allocation. - enc.AppendString(caller.TrimmedPath()) -} - -// UnmarshalText unmarshals text to a CallerEncoder. "full" is unmarshaled to -// FullCallerEncoder and anything else is unmarshaled to ShortCallerEncoder. -func (e *CallerEncoder) UnmarshalText(text []byte) error { - switch string(text) { - case "full": - *e = FullCallerEncoder - default: - *e = ShortCallerEncoder - } - return nil -} - -// A NameEncoder serializes a period-separated logger name to a primitive -// type. -// -// This function must make exactly one call -// to a PrimitiveArrayEncoder's Append* method. -type NameEncoder func(string, PrimitiveArrayEncoder) - -// FullNameEncoder serializes the logger name as-is. -func FullNameEncoder(loggerName string, enc PrimitiveArrayEncoder) { - enc.AppendString(loggerName) -} - -// UnmarshalText unmarshals text to a NameEncoder. Currently, everything is -// unmarshaled to FullNameEncoder. -func (e *NameEncoder) UnmarshalText(text []byte) error { - switch string(text) { - case "full": - *e = FullNameEncoder - default: - *e = FullNameEncoder - } - return nil -} - -// An EncoderConfig allows users to configure the concrete encoders supplied by -// zapcore. -type EncoderConfig struct { - // Set the keys used for each log entry. If any key is empty, that portion - // of the entry is omitted. - MessageKey string `json:"messageKey" yaml:"messageKey"` - LevelKey string `json:"levelKey" yaml:"levelKey"` - TimeKey string `json:"timeKey" yaml:"timeKey"` - NameKey string `json:"nameKey" yaml:"nameKey"` - CallerKey string `json:"callerKey" yaml:"callerKey"` - FunctionKey string `json:"functionKey" yaml:"functionKey"` - StacktraceKey string `json:"stacktraceKey" yaml:"stacktraceKey"` - SkipLineEnding bool `json:"skipLineEnding" yaml:"skipLineEnding"` - LineEnding string `json:"lineEnding" yaml:"lineEnding"` - // Configure the primitive representations of common complex types. For - // example, some users may want all time.Times serialized as floating-point - // seconds since epoch, while others may prefer ISO8601 strings. - EncodeLevel LevelEncoder `json:"levelEncoder" yaml:"levelEncoder"` - EncodeTime TimeEncoder `json:"timeEncoder" yaml:"timeEncoder"` - EncodeDuration DurationEncoder `json:"durationEncoder" yaml:"durationEncoder"` - EncodeCaller CallerEncoder `json:"callerEncoder" yaml:"callerEncoder"` - // Unlike the other primitive type encoders, EncodeName is optional. The - // zero value falls back to FullNameEncoder. - EncodeName NameEncoder `json:"nameEncoder" yaml:"nameEncoder"` - // Configure the encoder for interface{} type objects. - // If not provided, objects are encoded using json.Encoder - NewReflectedEncoder func(io.Writer) ReflectedEncoder `json:"-" yaml:"-"` - // Configures the field separator used by the console encoder. Defaults - // to tab. - ConsoleSeparator string `json:"consoleSeparator" yaml:"consoleSeparator"` -} - -// ObjectEncoder is a strongly-typed, encoding-agnostic interface for adding a -// map- or struct-like object to the logging context. Like maps, ObjectEncoders -// aren't safe for concurrent use (though typical use shouldn't require locks). -type ObjectEncoder interface { - // Logging-specific marshalers. - AddArray(key string, marshaler ArrayMarshaler) error - AddObject(key string, marshaler ObjectMarshaler) error - - // Built-in types. - AddBinary(key string, value []byte) // for arbitrary bytes - AddByteString(key string, value []byte) // for UTF-8 encoded bytes - AddBool(key string, value bool) - AddComplex128(key string, value complex128) - AddComplex64(key string, value complex64) - AddDuration(key string, value time.Duration) - AddFloat64(key string, value float64) - AddFloat32(key string, value float32) - AddInt(key string, value int) - AddInt64(key string, value int64) - AddInt32(key string, value int32) - AddInt16(key string, value int16) - AddInt8(key string, value int8) - AddString(key, value string) - AddTime(key string, value time.Time) - AddUint(key string, value uint) - AddUint64(key string, value uint64) - AddUint32(key string, value uint32) - AddUint16(key string, value uint16) - AddUint8(key string, value uint8) - AddUintptr(key string, value uintptr) - - // AddReflected uses reflection to serialize arbitrary objects, so it can be - // slow and allocation-heavy. - AddReflected(key string, value interface{}) error - // OpenNamespace opens an isolated namespace where all subsequent fields will - // be added. Applications can use namespaces to prevent key collisions when - // injecting loggers into sub-components or third-party libraries. - OpenNamespace(key string) -} - -// ArrayEncoder is a strongly-typed, encoding-agnostic interface for adding -// array-like objects to the logging context. Of note, it supports mixed-type -// arrays even though they aren't typical in Go. Like slices, ArrayEncoders -// aren't safe for concurrent use (though typical use shouldn't require locks). -type ArrayEncoder interface { - // Built-in types. - PrimitiveArrayEncoder - - // Time-related types. - AppendDuration(time.Duration) - AppendTime(time.Time) - - // Logging-specific marshalers. - AppendArray(ArrayMarshaler) error - AppendObject(ObjectMarshaler) error - - // AppendReflected uses reflection to serialize arbitrary objects, so it's - // slow and allocation-heavy. - AppendReflected(value interface{}) error -} - -// PrimitiveArrayEncoder is the subset of the ArrayEncoder interface that deals -// only in Go's built-in types. It's included only so that Duration- and -// TimeEncoders cannot trigger infinite recursion. -type PrimitiveArrayEncoder interface { - // Built-in types. - AppendBool(bool) - AppendByteString([]byte) // for UTF-8 encoded bytes - AppendComplex128(complex128) - AppendComplex64(complex64) - AppendFloat64(float64) - AppendFloat32(float32) - AppendInt(int) - AppendInt64(int64) - AppendInt32(int32) - AppendInt16(int16) - AppendInt8(int8) - AppendString(string) - AppendUint(uint) - AppendUint64(uint64) - AppendUint32(uint32) - AppendUint16(uint16) - AppendUint8(uint8) - AppendUintptr(uintptr) -} - -// Encoder is a format-agnostic interface for all log entry marshalers. Since -// log encoders don't need to support the same wide range of use cases as -// general-purpose marshalers, it's possible to make them faster and -// lower-allocation. -// -// Implementations of the ObjectEncoder interface's methods can, of course, -// freely modify the receiver. However, the Clone and EncodeEntry methods will -// be called concurrently and shouldn't modify the receiver. -type Encoder interface { - ObjectEncoder - - // Clone copies the encoder, ensuring that adding fields to the copy doesn't - // affect the original. - Clone() Encoder - - // EncodeEntry encodes an entry and fields, along with any accumulated - // context, into a byte buffer and returns it. Any fields that are empty, - // including fields on the `Entry` type, should be omitted. - EncodeEntry(Entry, []Field) (*buffer.Buffer, error) -} diff --git a/vendor/go.uber.org/zap/zapcore/entry.go b/vendor/go.uber.org/zap/zapcore/entry.go deleted file mode 100644 index 459a5d7ce..000000000 --- a/vendor/go.uber.org/zap/zapcore/entry.go +++ /dev/null @@ -1,298 +0,0 @@ -// Copyright (c) 2016 Uber Technologies, Inc. -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in -// all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -// THE SOFTWARE. - -package zapcore - -import ( - "fmt" - "runtime" - "strings" - "time" - - "go.uber.org/multierr" - "go.uber.org/zap/internal/bufferpool" - "go.uber.org/zap/internal/exit" - "go.uber.org/zap/internal/pool" -) - -var _cePool = pool.New(func() *CheckedEntry { - // Pre-allocate some space for cores. - return &CheckedEntry{ - cores: make([]Core, 4), - } -}) - -func getCheckedEntry() *CheckedEntry { - ce := _cePool.Get() - ce.reset() - return ce -} - -func putCheckedEntry(ce *CheckedEntry) { - if ce == nil { - return - } - _cePool.Put(ce) -} - -// NewEntryCaller makes an EntryCaller from the return signature of -// runtime.Caller. -func NewEntryCaller(pc uintptr, file string, line int, ok bool) EntryCaller { - if !ok { - return EntryCaller{} - } - return EntryCaller{ - PC: pc, - File: file, - Line: line, - Defined: true, - } -} - -// EntryCaller represents the caller of a logging function. -type EntryCaller struct { - Defined bool - PC uintptr - File string - Line int - Function string -} - -// String returns the full path and line number of the caller. -func (ec EntryCaller) String() string { - return ec.FullPath() -} - -// FullPath returns a /full/path/to/package/file:line description of the -// caller. -func (ec EntryCaller) FullPath() string { - if !ec.Defined { - return "undefined" - } - buf := bufferpool.Get() - buf.AppendString(ec.File) - buf.AppendByte(':') - buf.AppendInt(int64(ec.Line)) - caller := buf.String() - buf.Free() - return caller -} - -// TrimmedPath returns a package/file:line description of the caller, -// preserving only the leaf directory name and file name. -func (ec EntryCaller) TrimmedPath() string { - if !ec.Defined { - return "undefined" - } - // nb. To make sure we trim the path correctly on Windows too, we - // counter-intuitively need to use '/' and *not* os.PathSeparator here, - // because the path given originates from Go stdlib, specifically - // runtime.Caller() which (as of Mar/17) returns forward slashes even on - // Windows. - // - // See https://github.com/golang/go/issues/3335 - // and https://github.com/golang/go/issues/18151 - // - // for discussion on the issue on Go side. - // - // Find the last separator. - // - idx := strings.LastIndexByte(ec.File, '/') - if idx == -1 { - return ec.FullPath() - } - // Find the penultimate separator. - idx = strings.LastIndexByte(ec.File[:idx], '/') - if idx == -1 { - return ec.FullPath() - } - buf := bufferpool.Get() - // Keep everything after the penultimate separator. - buf.AppendString(ec.File[idx+1:]) - buf.AppendByte(':') - buf.AppendInt(int64(ec.Line)) - caller := buf.String() - buf.Free() - return caller -} - -// An Entry represents a complete log message. The entry's structured context -// is already serialized, but the log level, time, message, and call site -// information are available for inspection and modification. Any fields left -// empty will be omitted when encoding. -// -// Entries are pooled, so any functions that accept them MUST be careful not to -// retain references to them. -type Entry struct { - Level Level - Time time.Time - LoggerName string - Message string - Caller EntryCaller - Stack string -} - -// CheckWriteHook is a custom action that may be executed after an entry is -// written. -// -// Register one on a CheckedEntry with the After method. -// -// if ce := logger.Check(...); ce != nil { -// ce = ce.After(hook) -// ce.Write(...) -// } -// -// You can configure the hook for Fatal log statements at the logger level with -// the zap.WithFatalHook option. -type CheckWriteHook interface { - // OnWrite is invoked with the CheckedEntry that was written and a list - // of fields added with that entry. - // - // The list of fields DOES NOT include fields that were already added - // to the logger with the With method. - OnWrite(*CheckedEntry, []Field) -} - -// CheckWriteAction indicates what action to take after a log entry is -// processed. Actions are ordered in increasing severity. -type CheckWriteAction uint8 - -const ( - // WriteThenNoop indicates that nothing special needs to be done. It's the - // default behavior. - WriteThenNoop CheckWriteAction = iota - // WriteThenGoexit runs runtime.Goexit after Write. - WriteThenGoexit - // WriteThenPanic causes a panic after Write. - WriteThenPanic - // WriteThenFatal causes an os.Exit(1) after Write. - WriteThenFatal -) - -// OnWrite implements the OnWrite method to keep CheckWriteAction compatible -// with the new CheckWriteHook interface which deprecates CheckWriteAction. -func (a CheckWriteAction) OnWrite(ce *CheckedEntry, _ []Field) { - switch a { - case WriteThenGoexit: - runtime.Goexit() - case WriteThenPanic: - panic(ce.Message) - case WriteThenFatal: - exit.With(1) - } -} - -var _ CheckWriteHook = CheckWriteAction(0) - -// CheckedEntry is an Entry together with a collection of Cores that have -// already agreed to log it. -// -// CheckedEntry references should be created by calling AddCore or After on a -// nil *CheckedEntry. References are returned to a pool after Write, and MUST -// NOT be retained after calling their Write method. -type CheckedEntry struct { - Entry - ErrorOutput WriteSyncer - dirty bool // best-effort detection of pool misuse - after CheckWriteHook - cores []Core -} - -func (ce *CheckedEntry) reset() { - ce.Entry = Entry{} - ce.ErrorOutput = nil - ce.dirty = false - ce.after = nil - for i := range ce.cores { - // don't keep references to cores - ce.cores[i] = nil - } - ce.cores = ce.cores[:0] -} - -// Write writes the entry to the stored Cores, returns any errors, and returns -// the CheckedEntry reference to a pool for immediate re-use. Finally, it -// executes any required CheckWriteAction. -func (ce *CheckedEntry) Write(fields ...Field) { - if ce == nil { - return - } - - if ce.dirty { - if ce.ErrorOutput != nil { - // Make a best effort to detect unsafe re-use of this CheckedEntry. - // If the entry is dirty, log an internal error; because the - // CheckedEntry is being used after it was returned to the pool, - // the message may be an amalgamation from multiple call sites. - fmt.Fprintf(ce.ErrorOutput, "%v Unsafe CheckedEntry re-use near Entry %+v.\n", ce.Time, ce.Entry) - _ = ce.ErrorOutput.Sync() // ignore error - } - return - } - ce.dirty = true - - var err error - for i := range ce.cores { - err = multierr.Append(err, ce.cores[i].Write(ce.Entry, fields)) - } - if err != nil && ce.ErrorOutput != nil { - fmt.Fprintf(ce.ErrorOutput, "%v write error: %v\n", ce.Time, err) - _ = ce.ErrorOutput.Sync() // ignore error - } - - hook := ce.after - if hook != nil { - hook.OnWrite(ce, fields) - } - putCheckedEntry(ce) -} - -// AddCore adds a Core that has agreed to log this CheckedEntry. It's intended to be -// used by Core.Check implementations, and is safe to call on nil CheckedEntry -// references. -func (ce *CheckedEntry) AddCore(ent Entry, core Core) *CheckedEntry { - if ce == nil { - ce = getCheckedEntry() - ce.Entry = ent - } - ce.cores = append(ce.cores, core) - return ce -} - -// Should sets this CheckedEntry's CheckWriteAction, which controls whether a -// Core will panic or fatal after writing this log entry. Like AddCore, it's -// safe to call on nil CheckedEntry references. -// -// Deprecated: Use [CheckedEntry.After] instead. -func (ce *CheckedEntry) Should(ent Entry, should CheckWriteAction) *CheckedEntry { - return ce.After(ent, should) -} - -// After sets this CheckEntry's CheckWriteHook, which will be called after this -// log entry has been written. It's safe to call this on nil CheckedEntry -// references. -func (ce *CheckedEntry) After(ent Entry, hook CheckWriteHook) *CheckedEntry { - if ce == nil { - ce = getCheckedEntry() - ce.Entry = ent - } - ce.after = hook - return ce -} diff --git a/vendor/go.uber.org/zap/zapcore/error.go b/vendor/go.uber.org/zap/zapcore/error.go deleted file mode 100644 index c40df1326..000000000 --- a/vendor/go.uber.org/zap/zapcore/error.go +++ /dev/null @@ -1,136 +0,0 @@ -// Copyright (c) 2017 Uber Technologies, Inc. -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in -// all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -// THE SOFTWARE. - -package zapcore - -import ( - "fmt" - "reflect" - - "go.uber.org/zap/internal/pool" -) - -// Encodes the given error into fields of an object. A field with the given -// name is added for the error message. -// -// If the error implements fmt.Formatter, a field with the name ${key}Verbose -// is also added with the full verbose error message. -// -// Finally, if the error implements errorGroup (from go.uber.org/multierr) or -// causer (from github.com/pkg/errors), a ${key}Causes field is added with an -// array of objects containing the errors this error was comprised of. -// -// { -// "error": err.Error(), -// "errorVerbose": fmt.Sprintf("%+v", err), -// "errorCauses": [ -// ... -// ], -// } -func encodeError(key string, err error, enc ObjectEncoder) (retErr error) { - // Try to capture panics (from nil references or otherwise) when calling - // the Error() method - defer func() { - if rerr := recover(); rerr != nil { - // If it's a nil pointer, just say "". The likeliest causes are a - // error that fails to guard against nil or a nil pointer for a - // value receiver, and in either case, "" is a nice result. - if v := reflect.ValueOf(err); v.Kind() == reflect.Ptr && v.IsNil() { - enc.AddString(key, "") - return - } - - retErr = fmt.Errorf("PANIC=%v", rerr) - } - }() - - basic := err.Error() - enc.AddString(key, basic) - - switch e := err.(type) { - case errorGroup: - return enc.AddArray(key+"Causes", errArray(e.Errors())) - case fmt.Formatter: - verbose := fmt.Sprintf("%+v", e) - if verbose != basic { - // This is a rich error type, like those produced by - // github.com/pkg/errors. - enc.AddString(key+"Verbose", verbose) - } - } - return nil -} - -type errorGroup interface { - // Provides read-only access to the underlying list of errors, preferably - // without causing any allocs. - Errors() []error -} - -// Note that errArray and errArrayElem are very similar to the version -// implemented in the top-level error.go file. We can't re-use this because -// that would require exporting errArray as part of the zapcore API. - -// Encodes a list of errors using the standard error encoding logic. -type errArray []error - -func (errs errArray) MarshalLogArray(arr ArrayEncoder) error { - for i := range errs { - if errs[i] == nil { - continue - } - - el := newErrArrayElem(errs[i]) - err := arr.AppendObject(el) - el.Free() - if err != nil { - return err - } - } - return nil -} - -var _errArrayElemPool = pool.New(func() *errArrayElem { - return &errArrayElem{} -}) - -// Encodes any error into a {"error": ...} re-using the same errors logic. -// -// May be passed in place of an array to build a single-element array. -type errArrayElem struct{ err error } - -func newErrArrayElem(err error) *errArrayElem { - e := _errArrayElemPool.Get() - e.err = err - return e -} - -func (e *errArrayElem) MarshalLogArray(arr ArrayEncoder) error { - return arr.AppendObject(e) -} - -func (e *errArrayElem) MarshalLogObject(enc ObjectEncoder) error { - return encodeError("error", e.err, enc) -} - -func (e *errArrayElem) Free() { - e.err = nil - _errArrayElemPool.Put(e) -} diff --git a/vendor/go.uber.org/zap/zapcore/field.go b/vendor/go.uber.org/zap/zapcore/field.go deleted file mode 100644 index 308c9781e..000000000 --- a/vendor/go.uber.org/zap/zapcore/field.go +++ /dev/null @@ -1,233 +0,0 @@ -// Copyright (c) 2016 Uber Technologies, Inc. -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in -// all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -// THE SOFTWARE. - -package zapcore - -import ( - "bytes" - "fmt" - "math" - "reflect" - "time" -) - -// A FieldType indicates which member of the Field union struct should be used -// and how it should be serialized. -type FieldType uint8 - -const ( - // UnknownType is the default field type. Attempting to add it to an encoder will panic. - UnknownType FieldType = iota - // ArrayMarshalerType indicates that the field carries an ArrayMarshaler. - ArrayMarshalerType - // ObjectMarshalerType indicates that the field carries an ObjectMarshaler. - ObjectMarshalerType - // BinaryType indicates that the field carries an opaque binary blob. - BinaryType - // BoolType indicates that the field carries a bool. - BoolType - // ByteStringType indicates that the field carries UTF-8 encoded bytes. - ByteStringType - // Complex128Type indicates that the field carries a complex128. - Complex128Type - // Complex64Type indicates that the field carries a complex64. - Complex64Type - // DurationType indicates that the field carries a time.Duration. - DurationType - // Float64Type indicates that the field carries a float64. - Float64Type - // Float32Type indicates that the field carries a float32. - Float32Type - // Int64Type indicates that the field carries an int64. - Int64Type - // Int32Type indicates that the field carries an int32. - Int32Type - // Int16Type indicates that the field carries an int16. - Int16Type - // Int8Type indicates that the field carries an int8. - Int8Type - // StringType indicates that the field carries a string. - StringType - // TimeType indicates that the field carries a time.Time that is - // representable by a UnixNano() stored as an int64. - TimeType - // TimeFullType indicates that the field carries a time.Time stored as-is. - TimeFullType - // Uint64Type indicates that the field carries a uint64. - Uint64Type - // Uint32Type indicates that the field carries a uint32. - Uint32Type - // Uint16Type indicates that the field carries a uint16. - Uint16Type - // Uint8Type indicates that the field carries a uint8. - Uint8Type - // UintptrType indicates that the field carries a uintptr. - UintptrType - // ReflectType indicates that the field carries an interface{}, which should - // be serialized using reflection. - ReflectType - // NamespaceType signals the beginning of an isolated namespace. All - // subsequent fields should be added to the new namespace. - NamespaceType - // StringerType indicates that the field carries a fmt.Stringer. - StringerType - // ErrorType indicates that the field carries an error. - ErrorType - // SkipType indicates that the field is a no-op. - SkipType - - // InlineMarshalerType indicates that the field carries an ObjectMarshaler - // that should be inlined. - InlineMarshalerType -) - -// A Field is a marshaling operation used to add a key-value pair to a logger's -// context. Most fields are lazily marshaled, so it's inexpensive to add fields -// to disabled debug-level log statements. -type Field struct { - Key string - Type FieldType - Integer int64 - String string - Interface interface{} -} - -// AddTo exports a field through the ObjectEncoder interface. It's primarily -// useful to library authors, and shouldn't be necessary in most applications. -func (f Field) AddTo(enc ObjectEncoder) { - var err error - - switch f.Type { - case ArrayMarshalerType: - err = enc.AddArray(f.Key, f.Interface.(ArrayMarshaler)) - case ObjectMarshalerType: - err = enc.AddObject(f.Key, f.Interface.(ObjectMarshaler)) - case InlineMarshalerType: - err = f.Interface.(ObjectMarshaler).MarshalLogObject(enc) - case BinaryType: - enc.AddBinary(f.Key, f.Interface.([]byte)) - case BoolType: - enc.AddBool(f.Key, f.Integer == 1) - case ByteStringType: - enc.AddByteString(f.Key, f.Interface.([]byte)) - case Complex128Type: - enc.AddComplex128(f.Key, f.Interface.(complex128)) - case Complex64Type: - enc.AddComplex64(f.Key, f.Interface.(complex64)) - case DurationType: - enc.AddDuration(f.Key, time.Duration(f.Integer)) - case Float64Type: - enc.AddFloat64(f.Key, math.Float64frombits(uint64(f.Integer))) - case Float32Type: - enc.AddFloat32(f.Key, math.Float32frombits(uint32(f.Integer))) - case Int64Type: - enc.AddInt64(f.Key, f.Integer) - case Int32Type: - enc.AddInt32(f.Key, int32(f.Integer)) - case Int16Type: - enc.AddInt16(f.Key, int16(f.Integer)) - case Int8Type: - enc.AddInt8(f.Key, int8(f.Integer)) - case StringType: - enc.AddString(f.Key, f.String) - case TimeType: - if f.Interface != nil { - enc.AddTime(f.Key, time.Unix(0, f.Integer).In(f.Interface.(*time.Location))) - } else { - // Fall back to UTC if location is nil. - enc.AddTime(f.Key, time.Unix(0, f.Integer)) - } - case TimeFullType: - enc.AddTime(f.Key, f.Interface.(time.Time)) - case Uint64Type: - enc.AddUint64(f.Key, uint64(f.Integer)) - case Uint32Type: - enc.AddUint32(f.Key, uint32(f.Integer)) - case Uint16Type: - enc.AddUint16(f.Key, uint16(f.Integer)) - case Uint8Type: - enc.AddUint8(f.Key, uint8(f.Integer)) - case UintptrType: - enc.AddUintptr(f.Key, uintptr(f.Integer)) - case ReflectType: - err = enc.AddReflected(f.Key, f.Interface) - case NamespaceType: - enc.OpenNamespace(f.Key) - case StringerType: - err = encodeStringer(f.Key, f.Interface, enc) - case ErrorType: - err = encodeError(f.Key, f.Interface.(error), enc) - case SkipType: - break - default: - panic(fmt.Sprintf("unknown field type: %v", f)) - } - - if err != nil { - enc.AddString(fmt.Sprintf("%sError", f.Key), err.Error()) - } -} - -// Equals returns whether two fields are equal. For non-primitive types such as -// errors, marshalers, or reflect types, it uses reflect.DeepEqual. -func (f Field) Equals(other Field) bool { - if f.Type != other.Type { - return false - } - if f.Key != other.Key { - return false - } - - switch f.Type { - case BinaryType, ByteStringType: - return bytes.Equal(f.Interface.([]byte), other.Interface.([]byte)) - case ArrayMarshalerType, ObjectMarshalerType, ErrorType, ReflectType: - return reflect.DeepEqual(f.Interface, other.Interface) - default: - return f == other - } -} - -func addFields(enc ObjectEncoder, fields []Field) { - for i := range fields { - fields[i].AddTo(enc) - } -} - -func encodeStringer(key string, stringer interface{}, enc ObjectEncoder) (retErr error) { - // Try to capture panics (from nil references or otherwise) when calling - // the String() method, similar to https://golang.org/src/fmt/print.go#L540 - defer func() { - if err := recover(); err != nil { - // If it's a nil pointer, just say "". The likeliest causes are a - // Stringer that fails to guard against nil or a nil pointer for a - // value receiver, and in either case, "" is a nice result. - if v := reflect.ValueOf(stringer); v.Kind() == reflect.Ptr && v.IsNil() { - enc.AddString(key, "") - return - } - - retErr = fmt.Errorf("PANIC=%v", err) - } - }() - - enc.AddString(key, stringer.(fmt.Stringer).String()) - return nil -} diff --git a/vendor/go.uber.org/zap/zapcore/hook.go b/vendor/go.uber.org/zap/zapcore/hook.go deleted file mode 100644 index 198def991..000000000 --- a/vendor/go.uber.org/zap/zapcore/hook.go +++ /dev/null @@ -1,77 +0,0 @@ -// Copyright (c) 2016 Uber Technologies, Inc. -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in -// all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -// THE SOFTWARE. - -package zapcore - -import "go.uber.org/multierr" - -type hooked struct { - Core - funcs []func(Entry) error -} - -var ( - _ Core = (*hooked)(nil) - _ leveledEnabler = (*hooked)(nil) -) - -// RegisterHooks wraps a Core and runs a collection of user-defined callback -// hooks each time a message is logged. Execution of the callbacks is blocking. -// -// This offers users an easy way to register simple callbacks (e.g., metrics -// collection) without implementing the full Core interface. -func RegisterHooks(core Core, hooks ...func(Entry) error) Core { - funcs := append([]func(Entry) error{}, hooks...) - return &hooked{ - Core: core, - funcs: funcs, - } -} - -func (h *hooked) Level() Level { - return LevelOf(h.Core) -} - -func (h *hooked) Check(ent Entry, ce *CheckedEntry) *CheckedEntry { - // Let the wrapped Core decide whether to log this message or not. This - // also gives the downstream a chance to register itself directly with the - // CheckedEntry. - if downstream := h.Core.Check(ent, ce); downstream != nil { - return downstream.AddCore(ent, h) - } - return ce -} - -func (h *hooked) With(fields []Field) Core { - return &hooked{ - Core: h.Core.With(fields), - funcs: h.funcs, - } -} - -func (h *hooked) Write(ent Entry, _ []Field) error { - // Since our downstream had a chance to register itself directly with the - // CheckedMessage, we don't need to call it here. - var err error - for i := range h.funcs { - err = multierr.Append(err, h.funcs[i](ent)) - } - return err -} diff --git a/vendor/go.uber.org/zap/zapcore/increase_level.go b/vendor/go.uber.org/zap/zapcore/increase_level.go deleted file mode 100644 index 7a11237ae..000000000 --- a/vendor/go.uber.org/zap/zapcore/increase_level.go +++ /dev/null @@ -1,75 +0,0 @@ -// Copyright (c) 2020 Uber Technologies, Inc. -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in -// all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -// THE SOFTWARE. - -package zapcore - -import "fmt" - -type levelFilterCore struct { - core Core - level LevelEnabler -} - -var ( - _ Core = (*levelFilterCore)(nil) - _ leveledEnabler = (*levelFilterCore)(nil) -) - -// NewIncreaseLevelCore creates a core that can be used to increase the level of -// an existing Core. It cannot be used to decrease the logging level, as it acts -// as a filter before calling the underlying core. If level decreases the log level, -// an error is returned. -func NewIncreaseLevelCore(core Core, level LevelEnabler) (Core, error) { - for l := _maxLevel; l >= _minLevel; l-- { - if !core.Enabled(l) && level.Enabled(l) { - return nil, fmt.Errorf("invalid increase level, as level %q is allowed by increased level, but not by existing core", l) - } - } - - return &levelFilterCore{core, level}, nil -} - -func (c *levelFilterCore) Enabled(lvl Level) bool { - return c.level.Enabled(lvl) -} - -func (c *levelFilterCore) Level() Level { - return LevelOf(c.level) -} - -func (c *levelFilterCore) With(fields []Field) Core { - return &levelFilterCore{c.core.With(fields), c.level} -} - -func (c *levelFilterCore) Check(ent Entry, ce *CheckedEntry) *CheckedEntry { - if !c.Enabled(ent.Level) { - return ce - } - - return c.core.Check(ent, ce) -} - -func (c *levelFilterCore) Write(ent Entry, fields []Field) error { - return c.core.Write(ent, fields) -} - -func (c *levelFilterCore) Sync() error { - return c.core.Sync() -} diff --git a/vendor/go.uber.org/zap/zapcore/json_encoder.go b/vendor/go.uber.org/zap/zapcore/json_encoder.go deleted file mode 100644 index 9685169b2..000000000 --- a/vendor/go.uber.org/zap/zapcore/json_encoder.go +++ /dev/null @@ -1,583 +0,0 @@ -// Copyright (c) 2016 Uber Technologies, Inc. -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in -// all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -// THE SOFTWARE. - -package zapcore - -import ( - "encoding/base64" - "math" - "time" - "unicode/utf8" - - "go.uber.org/zap/buffer" - "go.uber.org/zap/internal/bufferpool" - "go.uber.org/zap/internal/pool" -) - -// For JSON-escaping; see jsonEncoder.safeAddString below. -const _hex = "0123456789abcdef" - -var _jsonPool = pool.New(func() *jsonEncoder { - return &jsonEncoder{} -}) - -func putJSONEncoder(enc *jsonEncoder) { - if enc.reflectBuf != nil { - enc.reflectBuf.Free() - } - enc.EncoderConfig = nil - enc.buf = nil - enc.spaced = false - enc.openNamespaces = 0 - enc.reflectBuf = nil - enc.reflectEnc = nil - _jsonPool.Put(enc) -} - -type jsonEncoder struct { - *EncoderConfig - buf *buffer.Buffer - spaced bool // include spaces after colons and commas - openNamespaces int - - // for encoding generic values by reflection - reflectBuf *buffer.Buffer - reflectEnc ReflectedEncoder -} - -// NewJSONEncoder creates a fast, low-allocation JSON encoder. The encoder -// appropriately escapes all field keys and values. -// -// Note that the encoder doesn't deduplicate keys, so it's possible to produce -// a message like -// -// {"foo":"bar","foo":"baz"} -// -// This is permitted by the JSON specification, but not encouraged. Many -// libraries will ignore duplicate key-value pairs (typically keeping the last -// pair) when unmarshaling, but users should attempt to avoid adding duplicate -// keys. -func NewJSONEncoder(cfg EncoderConfig) Encoder { - return newJSONEncoder(cfg, false) -} - -func newJSONEncoder(cfg EncoderConfig, spaced bool) *jsonEncoder { - if cfg.SkipLineEnding { - cfg.LineEnding = "" - } else if cfg.LineEnding == "" { - cfg.LineEnding = DefaultLineEnding - } - - // If no EncoderConfig.NewReflectedEncoder is provided by the user, then use default - if cfg.NewReflectedEncoder == nil { - cfg.NewReflectedEncoder = defaultReflectedEncoder - } - - return &jsonEncoder{ - EncoderConfig: &cfg, - buf: bufferpool.Get(), - spaced: spaced, - } -} - -func (enc *jsonEncoder) AddArray(key string, arr ArrayMarshaler) error { - enc.addKey(key) - return enc.AppendArray(arr) -} - -func (enc *jsonEncoder) AddObject(key string, obj ObjectMarshaler) error { - enc.addKey(key) - return enc.AppendObject(obj) -} - -func (enc *jsonEncoder) AddBinary(key string, val []byte) { - enc.AddString(key, base64.StdEncoding.EncodeToString(val)) -} - -func (enc *jsonEncoder) AddByteString(key string, val []byte) { - enc.addKey(key) - enc.AppendByteString(val) -} - -func (enc *jsonEncoder) AddBool(key string, val bool) { - enc.addKey(key) - enc.AppendBool(val) -} - -func (enc *jsonEncoder) AddComplex128(key string, val complex128) { - enc.addKey(key) - enc.AppendComplex128(val) -} - -func (enc *jsonEncoder) AddComplex64(key string, val complex64) { - enc.addKey(key) - enc.AppendComplex64(val) -} - -func (enc *jsonEncoder) AddDuration(key string, val time.Duration) { - enc.addKey(key) - enc.AppendDuration(val) -} - -func (enc *jsonEncoder) AddFloat64(key string, val float64) { - enc.addKey(key) - enc.AppendFloat64(val) -} - -func (enc *jsonEncoder) AddFloat32(key string, val float32) { - enc.addKey(key) - enc.AppendFloat32(val) -} - -func (enc *jsonEncoder) AddInt64(key string, val int64) { - enc.addKey(key) - enc.AppendInt64(val) -} - -func (enc *jsonEncoder) resetReflectBuf() { - if enc.reflectBuf == nil { - enc.reflectBuf = bufferpool.Get() - enc.reflectEnc = enc.NewReflectedEncoder(enc.reflectBuf) - } else { - enc.reflectBuf.Reset() - } -} - -var nullLiteralBytes = []byte("null") - -// Only invoke the standard JSON encoder if there is actually something to -// encode; otherwise write JSON null literal directly. -func (enc *jsonEncoder) encodeReflected(obj interface{}) ([]byte, error) { - if obj == nil { - return nullLiteralBytes, nil - } - enc.resetReflectBuf() - if err := enc.reflectEnc.Encode(obj); err != nil { - return nil, err - } - enc.reflectBuf.TrimNewline() - return enc.reflectBuf.Bytes(), nil -} - -func (enc *jsonEncoder) AddReflected(key string, obj interface{}) error { - valueBytes, err := enc.encodeReflected(obj) - if err != nil { - return err - } - enc.addKey(key) - _, err = enc.buf.Write(valueBytes) - return err -} - -func (enc *jsonEncoder) OpenNamespace(key string) { - enc.addKey(key) - enc.buf.AppendByte('{') - enc.openNamespaces++ -} - -func (enc *jsonEncoder) AddString(key, val string) { - enc.addKey(key) - enc.AppendString(val) -} - -func (enc *jsonEncoder) AddTime(key string, val time.Time) { - enc.addKey(key) - enc.AppendTime(val) -} - -func (enc *jsonEncoder) AddUint64(key string, val uint64) { - enc.addKey(key) - enc.AppendUint64(val) -} - -func (enc *jsonEncoder) AppendArray(arr ArrayMarshaler) error { - enc.addElementSeparator() - enc.buf.AppendByte('[') - err := arr.MarshalLogArray(enc) - enc.buf.AppendByte(']') - return err -} - -func (enc *jsonEncoder) AppendObject(obj ObjectMarshaler) error { - // Close ONLY new openNamespaces that are created during - // AppendObject(). - old := enc.openNamespaces - enc.openNamespaces = 0 - enc.addElementSeparator() - enc.buf.AppendByte('{') - err := obj.MarshalLogObject(enc) - enc.buf.AppendByte('}') - enc.closeOpenNamespaces() - enc.openNamespaces = old - return err -} - -func (enc *jsonEncoder) AppendBool(val bool) { - enc.addElementSeparator() - enc.buf.AppendBool(val) -} - -func (enc *jsonEncoder) AppendByteString(val []byte) { - enc.addElementSeparator() - enc.buf.AppendByte('"') - enc.safeAddByteString(val) - enc.buf.AppendByte('"') -} - -// appendComplex appends the encoded form of the provided complex128 value. -// precision specifies the encoding precision for the real and imaginary -// components of the complex number. -func (enc *jsonEncoder) appendComplex(val complex128, precision int) { - enc.addElementSeparator() - // Cast to a platform-independent, fixed-size type. - r, i := float64(real(val)), float64(imag(val)) - enc.buf.AppendByte('"') - // Because we're always in a quoted string, we can use strconv without - // special-casing NaN and +/-Inf. - enc.buf.AppendFloat(r, precision) - // If imaginary part is less than 0, minus (-) sign is added by default - // by AppendFloat. - if i >= 0 { - enc.buf.AppendByte('+') - } - enc.buf.AppendFloat(i, precision) - enc.buf.AppendByte('i') - enc.buf.AppendByte('"') -} - -func (enc *jsonEncoder) AppendDuration(val time.Duration) { - cur := enc.buf.Len() - if e := enc.EncodeDuration; e != nil { - e(val, enc) - } - if cur == enc.buf.Len() { - // User-supplied EncodeDuration is a no-op. Fall back to nanoseconds to keep - // JSON valid. - enc.AppendInt64(int64(val)) - } -} - -func (enc *jsonEncoder) AppendInt64(val int64) { - enc.addElementSeparator() - enc.buf.AppendInt(val) -} - -func (enc *jsonEncoder) AppendReflected(val interface{}) error { - valueBytes, err := enc.encodeReflected(val) - if err != nil { - return err - } - enc.addElementSeparator() - _, err = enc.buf.Write(valueBytes) - return err -} - -func (enc *jsonEncoder) AppendString(val string) { - enc.addElementSeparator() - enc.buf.AppendByte('"') - enc.safeAddString(val) - enc.buf.AppendByte('"') -} - -func (enc *jsonEncoder) AppendTimeLayout(time time.Time, layout string) { - enc.addElementSeparator() - enc.buf.AppendByte('"') - enc.buf.AppendTime(time, layout) - enc.buf.AppendByte('"') -} - -func (enc *jsonEncoder) AppendTime(val time.Time) { - cur := enc.buf.Len() - if e := enc.EncodeTime; e != nil { - e(val, enc) - } - if cur == enc.buf.Len() { - // User-supplied EncodeTime is a no-op. Fall back to nanos since epoch to keep - // output JSON valid. - enc.AppendInt64(val.UnixNano()) - } -} - -func (enc *jsonEncoder) AppendUint64(val uint64) { - enc.addElementSeparator() - enc.buf.AppendUint(val) -} - -func (enc *jsonEncoder) AddInt(k string, v int) { enc.AddInt64(k, int64(v)) } -func (enc *jsonEncoder) AddInt32(k string, v int32) { enc.AddInt64(k, int64(v)) } -func (enc *jsonEncoder) AddInt16(k string, v int16) { enc.AddInt64(k, int64(v)) } -func (enc *jsonEncoder) AddInt8(k string, v int8) { enc.AddInt64(k, int64(v)) } -func (enc *jsonEncoder) AddUint(k string, v uint) { enc.AddUint64(k, uint64(v)) } -func (enc *jsonEncoder) AddUint32(k string, v uint32) { enc.AddUint64(k, uint64(v)) } -func (enc *jsonEncoder) AddUint16(k string, v uint16) { enc.AddUint64(k, uint64(v)) } -func (enc *jsonEncoder) AddUint8(k string, v uint8) { enc.AddUint64(k, uint64(v)) } -func (enc *jsonEncoder) AddUintptr(k string, v uintptr) { enc.AddUint64(k, uint64(v)) } -func (enc *jsonEncoder) AppendComplex64(v complex64) { enc.appendComplex(complex128(v), 32) } -func (enc *jsonEncoder) AppendComplex128(v complex128) { enc.appendComplex(complex128(v), 64) } -func (enc *jsonEncoder) AppendFloat64(v float64) { enc.appendFloat(v, 64) } -func (enc *jsonEncoder) AppendFloat32(v float32) { enc.appendFloat(float64(v), 32) } -func (enc *jsonEncoder) AppendInt(v int) { enc.AppendInt64(int64(v)) } -func (enc *jsonEncoder) AppendInt32(v int32) { enc.AppendInt64(int64(v)) } -func (enc *jsonEncoder) AppendInt16(v int16) { enc.AppendInt64(int64(v)) } -func (enc *jsonEncoder) AppendInt8(v int8) { enc.AppendInt64(int64(v)) } -func (enc *jsonEncoder) AppendUint(v uint) { enc.AppendUint64(uint64(v)) } -func (enc *jsonEncoder) AppendUint32(v uint32) { enc.AppendUint64(uint64(v)) } -func (enc *jsonEncoder) AppendUint16(v uint16) { enc.AppendUint64(uint64(v)) } -func (enc *jsonEncoder) AppendUint8(v uint8) { enc.AppendUint64(uint64(v)) } -func (enc *jsonEncoder) AppendUintptr(v uintptr) { enc.AppendUint64(uint64(v)) } - -func (enc *jsonEncoder) Clone() Encoder { - clone := enc.clone() - clone.buf.Write(enc.buf.Bytes()) - return clone -} - -func (enc *jsonEncoder) clone() *jsonEncoder { - clone := _jsonPool.Get() - clone.EncoderConfig = enc.EncoderConfig - clone.spaced = enc.spaced - clone.openNamespaces = enc.openNamespaces - clone.buf = bufferpool.Get() - return clone -} - -func (enc *jsonEncoder) EncodeEntry(ent Entry, fields []Field) (*buffer.Buffer, error) { - final := enc.clone() - final.buf.AppendByte('{') - - if final.LevelKey != "" && final.EncodeLevel != nil { - final.addKey(final.LevelKey) - cur := final.buf.Len() - final.EncodeLevel(ent.Level, final) - if cur == final.buf.Len() { - // User-supplied EncodeLevel was a no-op. Fall back to strings to keep - // output JSON valid. - final.AppendString(ent.Level.String()) - } - } - if final.TimeKey != "" && !ent.Time.IsZero() { - final.AddTime(final.TimeKey, ent.Time) - } - if ent.LoggerName != "" && final.NameKey != "" { - final.addKey(final.NameKey) - cur := final.buf.Len() - nameEncoder := final.EncodeName - - // if no name encoder provided, fall back to FullNameEncoder for backwards - // compatibility - if nameEncoder == nil { - nameEncoder = FullNameEncoder - } - - nameEncoder(ent.LoggerName, final) - if cur == final.buf.Len() { - // User-supplied EncodeName was a no-op. Fall back to strings to - // keep output JSON valid. - final.AppendString(ent.LoggerName) - } - } - if ent.Caller.Defined { - if final.CallerKey != "" { - final.addKey(final.CallerKey) - cur := final.buf.Len() - final.EncodeCaller(ent.Caller, final) - if cur == final.buf.Len() { - // User-supplied EncodeCaller was a no-op. Fall back to strings to - // keep output JSON valid. - final.AppendString(ent.Caller.String()) - } - } - if final.FunctionKey != "" { - final.addKey(final.FunctionKey) - final.AppendString(ent.Caller.Function) - } - } - if final.MessageKey != "" { - final.addKey(enc.MessageKey) - final.AppendString(ent.Message) - } - if enc.buf.Len() > 0 { - final.addElementSeparator() - final.buf.Write(enc.buf.Bytes()) - } - addFields(final, fields) - final.closeOpenNamespaces() - if ent.Stack != "" && final.StacktraceKey != "" { - final.AddString(final.StacktraceKey, ent.Stack) - } - final.buf.AppendByte('}') - final.buf.AppendString(final.LineEnding) - - ret := final.buf - putJSONEncoder(final) - return ret, nil -} - -func (enc *jsonEncoder) truncate() { - enc.buf.Reset() -} - -func (enc *jsonEncoder) closeOpenNamespaces() { - for i := 0; i < enc.openNamespaces; i++ { - enc.buf.AppendByte('}') - } - enc.openNamespaces = 0 -} - -func (enc *jsonEncoder) addKey(key string) { - enc.addElementSeparator() - enc.buf.AppendByte('"') - enc.safeAddString(key) - enc.buf.AppendByte('"') - enc.buf.AppendByte(':') - if enc.spaced { - enc.buf.AppendByte(' ') - } -} - -func (enc *jsonEncoder) addElementSeparator() { - last := enc.buf.Len() - 1 - if last < 0 { - return - } - switch enc.buf.Bytes()[last] { - case '{', '[', ':', ',', ' ': - return - default: - enc.buf.AppendByte(',') - if enc.spaced { - enc.buf.AppendByte(' ') - } - } -} - -func (enc *jsonEncoder) appendFloat(val float64, bitSize int) { - enc.addElementSeparator() - switch { - case math.IsNaN(val): - enc.buf.AppendString(`"NaN"`) - case math.IsInf(val, 1): - enc.buf.AppendString(`"+Inf"`) - case math.IsInf(val, -1): - enc.buf.AppendString(`"-Inf"`) - default: - enc.buf.AppendFloat(val, bitSize) - } -} - -// safeAddString JSON-escapes a string and appends it to the internal buffer. -// Unlike the standard library's encoder, it doesn't attempt to protect the -// user from browser vulnerabilities or JSONP-related problems. -func (enc *jsonEncoder) safeAddString(s string) { - safeAppendStringLike( - (*buffer.Buffer).AppendString, - utf8.DecodeRuneInString, - enc.buf, - s, - ) -} - -// safeAddByteString is no-alloc equivalent of safeAddString(string(s)) for s []byte. -func (enc *jsonEncoder) safeAddByteString(s []byte) { - safeAppendStringLike( - (*buffer.Buffer).AppendBytes, - utf8.DecodeRune, - enc.buf, - s, - ) -} - -// safeAppendStringLike is a generic implementation of safeAddString and safeAddByteString. -// It appends a string or byte slice to the buffer, escaping all special characters. -func safeAppendStringLike[S []byte | string]( - // appendTo appends this string-like object to the buffer. - appendTo func(*buffer.Buffer, S), - // decodeRune decodes the next rune from the string-like object - // and returns its value and width in bytes. - decodeRune func(S) (rune, int), - buf *buffer.Buffer, - s S, -) { - // The encoding logic below works by skipping over characters - // that can be safely copied as-is, - // until a character is found that needs special handling. - // At that point, we copy everything we've seen so far, - // and then handle that special character. - // - // last is the index of the last byte that was copied to the buffer. - last := 0 - for i := 0; i < len(s); { - if s[i] >= utf8.RuneSelf { - // Character >= RuneSelf may be part of a multi-byte rune. - // They need to be decoded before we can decide how to handle them. - r, size := decodeRune(s[i:]) - if r != utf8.RuneError || size != 1 { - // No special handling required. - // Skip over this rune and continue. - i += size - continue - } - - // Invalid UTF-8 sequence. - // Replace it with the Unicode replacement character. - appendTo(buf, s[last:i]) - buf.AppendString(`\ufffd`) - - i++ - last = i - } else { - // Character < RuneSelf is a single-byte UTF-8 rune. - if s[i] >= 0x20 && s[i] != '\\' && s[i] != '"' { - // No escaping necessary. - // Skip over this character and continue. - i++ - continue - } - - // This character needs to be escaped. - appendTo(buf, s[last:i]) - switch s[i] { - case '\\', '"': - buf.AppendByte('\\') - buf.AppendByte(s[i]) - case '\n': - buf.AppendByte('\\') - buf.AppendByte('n') - case '\r': - buf.AppendByte('\\') - buf.AppendByte('r') - case '\t': - buf.AppendByte('\\') - buf.AppendByte('t') - default: - // Encode bytes < 0x20, except for the escape sequences above. - buf.AppendString(`\u00`) - buf.AppendByte(_hex[s[i]>>4]) - buf.AppendByte(_hex[s[i]&0xF]) - } - - i++ - last = i - } - } - - // add remaining - appendTo(buf, s[last:]) -} diff --git a/vendor/go.uber.org/zap/zapcore/lazy_with.go b/vendor/go.uber.org/zap/zapcore/lazy_with.go deleted file mode 100644 index 05288d6a8..000000000 --- a/vendor/go.uber.org/zap/zapcore/lazy_with.go +++ /dev/null @@ -1,54 +0,0 @@ -// Copyright (c) 2023 Uber Technologies, Inc. -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in -// all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -// THE SOFTWARE. - -package zapcore - -import "sync" - -type lazyWithCore struct { - Core - sync.Once - fields []Field -} - -// NewLazyWith wraps a Core with a "lazy" Core that will only encode fields if -// the logger is written to (or is further chained in a lon-lazy manner). -func NewLazyWith(core Core, fields []Field) Core { - return &lazyWithCore{ - Core: core, - fields: fields, - } -} - -func (d *lazyWithCore) initOnce() { - d.Once.Do(func() { - d.Core = d.Core.With(d.fields) - }) -} - -func (d *lazyWithCore) With(fields []Field) Core { - d.initOnce() - return d.Core.With(fields) -} - -func (d *lazyWithCore) Check(e Entry, ce *CheckedEntry) *CheckedEntry { - d.initOnce() - return d.Core.Check(e, ce) -} diff --git a/vendor/go.uber.org/zap/zapcore/level.go b/vendor/go.uber.org/zap/zapcore/level.go deleted file mode 100644 index e01a24131..000000000 --- a/vendor/go.uber.org/zap/zapcore/level.go +++ /dev/null @@ -1,229 +0,0 @@ -// Copyright (c) 2016 Uber Technologies, Inc. -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in -// all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -// THE SOFTWARE. - -package zapcore - -import ( - "bytes" - "errors" - "fmt" -) - -var errUnmarshalNilLevel = errors.New("can't unmarshal a nil *Level") - -// A Level is a logging priority. Higher levels are more important. -type Level int8 - -const ( - // DebugLevel logs are typically voluminous, and are usually disabled in - // production. - DebugLevel Level = iota - 1 - // InfoLevel is the default logging priority. - InfoLevel - // WarnLevel logs are more important than Info, but don't need individual - // human review. - WarnLevel - // ErrorLevel logs are high-priority. If an application is running smoothly, - // it shouldn't generate any error-level logs. - ErrorLevel - // DPanicLevel logs are particularly important errors. In development the - // logger panics after writing the message. - DPanicLevel - // PanicLevel logs a message, then panics. - PanicLevel - // FatalLevel logs a message, then calls os.Exit(1). - FatalLevel - - _minLevel = DebugLevel - _maxLevel = FatalLevel - - // InvalidLevel is an invalid value for Level. - // - // Core implementations may panic if they see messages of this level. - InvalidLevel = _maxLevel + 1 -) - -// ParseLevel parses a level based on the lower-case or all-caps ASCII -// representation of the log level. If the provided ASCII representation is -// invalid an error is returned. -// -// This is particularly useful when dealing with text input to configure log -// levels. -func ParseLevel(text string) (Level, error) { - var level Level - err := level.UnmarshalText([]byte(text)) - return level, err -} - -type leveledEnabler interface { - LevelEnabler - - Level() Level -} - -// LevelOf reports the minimum enabled log level for the given LevelEnabler -// from Zap's supported log levels, or [InvalidLevel] if none of them are -// enabled. -// -// A LevelEnabler may implement a 'Level() Level' method to override the -// behavior of this function. -// -// func (c *core) Level() Level { -// return c.currentLevel -// } -// -// It is recommended that [Core] implementations that wrap other cores use -// LevelOf to retrieve the level of the wrapped core. For example, -// -// func (c *coreWrapper) Level() Level { -// return zapcore.LevelOf(c.wrappedCore) -// } -func LevelOf(enab LevelEnabler) Level { - if lvler, ok := enab.(leveledEnabler); ok { - return lvler.Level() - } - - for lvl := _minLevel; lvl <= _maxLevel; lvl++ { - if enab.Enabled(lvl) { - return lvl - } - } - - return InvalidLevel -} - -// String returns a lower-case ASCII representation of the log level. -func (l Level) String() string { - switch l { - case DebugLevel: - return "debug" - case InfoLevel: - return "info" - case WarnLevel: - return "warn" - case ErrorLevel: - return "error" - case DPanicLevel: - return "dpanic" - case PanicLevel: - return "panic" - case FatalLevel: - return "fatal" - default: - return fmt.Sprintf("Level(%d)", l) - } -} - -// CapitalString returns an all-caps ASCII representation of the log level. -func (l Level) CapitalString() string { - // Printing levels in all-caps is common enough that we should export this - // functionality. - switch l { - case DebugLevel: - return "DEBUG" - case InfoLevel: - return "INFO" - case WarnLevel: - return "WARN" - case ErrorLevel: - return "ERROR" - case DPanicLevel: - return "DPANIC" - case PanicLevel: - return "PANIC" - case FatalLevel: - return "FATAL" - default: - return fmt.Sprintf("LEVEL(%d)", l) - } -} - -// MarshalText marshals the Level to text. Note that the text representation -// drops the -Level suffix (see example). -func (l Level) MarshalText() ([]byte, error) { - return []byte(l.String()), nil -} - -// UnmarshalText unmarshals text to a level. Like MarshalText, UnmarshalText -// expects the text representation of a Level to drop the -Level suffix (see -// example). -// -// In particular, this makes it easy to configure logging levels using YAML, -// TOML, or JSON files. -func (l *Level) UnmarshalText(text []byte) error { - if l == nil { - return errUnmarshalNilLevel - } - if !l.unmarshalText(text) && !l.unmarshalText(bytes.ToLower(text)) { - return fmt.Errorf("unrecognized level: %q", text) - } - return nil -} - -func (l *Level) unmarshalText(text []byte) bool { - switch string(text) { - case "debug", "DEBUG": - *l = DebugLevel - case "info", "INFO", "": // make the zero value useful - *l = InfoLevel - case "warn", "WARN": - *l = WarnLevel - case "error", "ERROR": - *l = ErrorLevel - case "dpanic", "DPANIC": - *l = DPanicLevel - case "panic", "PANIC": - *l = PanicLevel - case "fatal", "FATAL": - *l = FatalLevel - default: - return false - } - return true -} - -// Set sets the level for the flag.Value interface. -func (l *Level) Set(s string) error { - return l.UnmarshalText([]byte(s)) -} - -// Get gets the level for the flag.Getter interface. -func (l *Level) Get() interface{} { - return *l -} - -// Enabled returns true if the given level is at or above this level. -func (l Level) Enabled(lvl Level) bool { - return lvl >= l -} - -// LevelEnabler decides whether a given logging level is enabled when logging a -// message. -// -// Enablers are intended to be used to implement deterministic filters; -// concerns like sampling are better implemented as a Core. -// -// Each concrete Level value implements a static LevelEnabler which returns -// true for itself and all higher logging levels. For example WarnLevel.Enabled() -// will return true for WarnLevel, ErrorLevel, DPanicLevel, PanicLevel, and -// FatalLevel, but return false for InfoLevel and DebugLevel. -type LevelEnabler interface { - Enabled(Level) bool -} diff --git a/vendor/go.uber.org/zap/zapcore/level_strings.go b/vendor/go.uber.org/zap/zapcore/level_strings.go deleted file mode 100644 index 7af8dadcb..000000000 --- a/vendor/go.uber.org/zap/zapcore/level_strings.go +++ /dev/null @@ -1,46 +0,0 @@ -// Copyright (c) 2016 Uber Technologies, Inc. -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in -// all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -// THE SOFTWARE. - -package zapcore - -import "go.uber.org/zap/internal/color" - -var ( - _levelToColor = map[Level]color.Color{ - DebugLevel: color.Magenta, - InfoLevel: color.Blue, - WarnLevel: color.Yellow, - ErrorLevel: color.Red, - DPanicLevel: color.Red, - PanicLevel: color.Red, - FatalLevel: color.Red, - } - _unknownLevelColor = color.Red - - _levelToLowercaseColorString = make(map[Level]string, len(_levelToColor)) - _levelToCapitalColorString = make(map[Level]string, len(_levelToColor)) -) - -func init() { - for level, color := range _levelToColor { - _levelToLowercaseColorString[level] = color.Add(level.String()) - _levelToCapitalColorString[level] = color.Add(level.CapitalString()) - } -} diff --git a/vendor/go.uber.org/zap/zapcore/marshaler.go b/vendor/go.uber.org/zap/zapcore/marshaler.go deleted file mode 100644 index c3c55ba0d..000000000 --- a/vendor/go.uber.org/zap/zapcore/marshaler.go +++ /dev/null @@ -1,61 +0,0 @@ -// Copyright (c) 2016 Uber Technologies, Inc. -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in -// all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -// THE SOFTWARE. - -package zapcore - -// ObjectMarshaler allows user-defined types to efficiently add themselves to the -// logging context, and to selectively omit information which shouldn't be -// included in logs (e.g., passwords). -// -// Note: ObjectMarshaler is only used when zap.Object is used or when -// passed directly to zap.Any. It is not used when reflection-based -// encoding is used. -type ObjectMarshaler interface { - MarshalLogObject(ObjectEncoder) error -} - -// ObjectMarshalerFunc is a type adapter that turns a function into an -// ObjectMarshaler. -type ObjectMarshalerFunc func(ObjectEncoder) error - -// MarshalLogObject calls the underlying function. -func (f ObjectMarshalerFunc) MarshalLogObject(enc ObjectEncoder) error { - return f(enc) -} - -// ArrayMarshaler allows user-defined types to efficiently add themselves to the -// logging context, and to selectively omit information which shouldn't be -// included in logs (e.g., passwords). -// -// Note: ArrayMarshaler is only used when zap.Array is used or when -// passed directly to zap.Any. It is not used when reflection-based -// encoding is used. -type ArrayMarshaler interface { - MarshalLogArray(ArrayEncoder) error -} - -// ArrayMarshalerFunc is a type adapter that turns a function into an -// ArrayMarshaler. -type ArrayMarshalerFunc func(ArrayEncoder) error - -// MarshalLogArray calls the underlying function. -func (f ArrayMarshalerFunc) MarshalLogArray(enc ArrayEncoder) error { - return f(enc) -} diff --git a/vendor/go.uber.org/zap/zapcore/memory_encoder.go b/vendor/go.uber.org/zap/zapcore/memory_encoder.go deleted file mode 100644 index dfead0829..000000000 --- a/vendor/go.uber.org/zap/zapcore/memory_encoder.go +++ /dev/null @@ -1,179 +0,0 @@ -// Copyright (c) 2016 Uber Technologies, Inc. -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in -// all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -// THE SOFTWARE. - -package zapcore - -import "time" - -// MapObjectEncoder is an ObjectEncoder backed by a simple -// map[string]interface{}. It's not fast enough for production use, but it's -// helpful in tests. -type MapObjectEncoder struct { - // Fields contains the entire encoded log context. - Fields map[string]interface{} - // cur is a pointer to the namespace we're currently writing to. - cur map[string]interface{} -} - -// NewMapObjectEncoder creates a new map-backed ObjectEncoder. -func NewMapObjectEncoder() *MapObjectEncoder { - m := make(map[string]interface{}) - return &MapObjectEncoder{ - Fields: m, - cur: m, - } -} - -// AddArray implements ObjectEncoder. -func (m *MapObjectEncoder) AddArray(key string, v ArrayMarshaler) error { - arr := &sliceArrayEncoder{elems: make([]interface{}, 0)} - err := v.MarshalLogArray(arr) - m.cur[key] = arr.elems - return err -} - -// AddObject implements ObjectEncoder. -func (m *MapObjectEncoder) AddObject(k string, v ObjectMarshaler) error { - newMap := NewMapObjectEncoder() - m.cur[k] = newMap.Fields - return v.MarshalLogObject(newMap) -} - -// AddBinary implements ObjectEncoder. -func (m *MapObjectEncoder) AddBinary(k string, v []byte) { m.cur[k] = v } - -// AddByteString implements ObjectEncoder. -func (m *MapObjectEncoder) AddByteString(k string, v []byte) { m.cur[k] = string(v) } - -// AddBool implements ObjectEncoder. -func (m *MapObjectEncoder) AddBool(k string, v bool) { m.cur[k] = v } - -// AddDuration implements ObjectEncoder. -func (m MapObjectEncoder) AddDuration(k string, v time.Duration) { m.cur[k] = v } - -// AddComplex128 implements ObjectEncoder. -func (m *MapObjectEncoder) AddComplex128(k string, v complex128) { m.cur[k] = v } - -// AddComplex64 implements ObjectEncoder. -func (m *MapObjectEncoder) AddComplex64(k string, v complex64) { m.cur[k] = v } - -// AddFloat64 implements ObjectEncoder. -func (m *MapObjectEncoder) AddFloat64(k string, v float64) { m.cur[k] = v } - -// AddFloat32 implements ObjectEncoder. -func (m *MapObjectEncoder) AddFloat32(k string, v float32) { m.cur[k] = v } - -// AddInt implements ObjectEncoder. -func (m *MapObjectEncoder) AddInt(k string, v int) { m.cur[k] = v } - -// AddInt64 implements ObjectEncoder. -func (m *MapObjectEncoder) AddInt64(k string, v int64) { m.cur[k] = v } - -// AddInt32 implements ObjectEncoder. -func (m *MapObjectEncoder) AddInt32(k string, v int32) { m.cur[k] = v } - -// AddInt16 implements ObjectEncoder. -func (m *MapObjectEncoder) AddInt16(k string, v int16) { m.cur[k] = v } - -// AddInt8 implements ObjectEncoder. -func (m *MapObjectEncoder) AddInt8(k string, v int8) { m.cur[k] = v } - -// AddString implements ObjectEncoder. -func (m *MapObjectEncoder) AddString(k string, v string) { m.cur[k] = v } - -// AddTime implements ObjectEncoder. -func (m MapObjectEncoder) AddTime(k string, v time.Time) { m.cur[k] = v } - -// AddUint implements ObjectEncoder. -func (m *MapObjectEncoder) AddUint(k string, v uint) { m.cur[k] = v } - -// AddUint64 implements ObjectEncoder. -func (m *MapObjectEncoder) AddUint64(k string, v uint64) { m.cur[k] = v } - -// AddUint32 implements ObjectEncoder. -func (m *MapObjectEncoder) AddUint32(k string, v uint32) { m.cur[k] = v } - -// AddUint16 implements ObjectEncoder. -func (m *MapObjectEncoder) AddUint16(k string, v uint16) { m.cur[k] = v } - -// AddUint8 implements ObjectEncoder. -func (m *MapObjectEncoder) AddUint8(k string, v uint8) { m.cur[k] = v } - -// AddUintptr implements ObjectEncoder. -func (m *MapObjectEncoder) AddUintptr(k string, v uintptr) { m.cur[k] = v } - -// AddReflected implements ObjectEncoder. -func (m *MapObjectEncoder) AddReflected(k string, v interface{}) error { - m.cur[k] = v - return nil -} - -// OpenNamespace implements ObjectEncoder. -func (m *MapObjectEncoder) OpenNamespace(k string) { - ns := make(map[string]interface{}) - m.cur[k] = ns - m.cur = ns -} - -// sliceArrayEncoder is an ArrayEncoder backed by a simple []interface{}. Like -// the MapObjectEncoder, it's not designed for production use. -type sliceArrayEncoder struct { - elems []interface{} -} - -func (s *sliceArrayEncoder) AppendArray(v ArrayMarshaler) error { - enc := &sliceArrayEncoder{} - err := v.MarshalLogArray(enc) - s.elems = append(s.elems, enc.elems) - return err -} - -func (s *sliceArrayEncoder) AppendObject(v ObjectMarshaler) error { - m := NewMapObjectEncoder() - err := v.MarshalLogObject(m) - s.elems = append(s.elems, m.Fields) - return err -} - -func (s *sliceArrayEncoder) AppendReflected(v interface{}) error { - s.elems = append(s.elems, v) - return nil -} - -func (s *sliceArrayEncoder) AppendBool(v bool) { s.elems = append(s.elems, v) } -func (s *sliceArrayEncoder) AppendByteString(v []byte) { s.elems = append(s.elems, string(v)) } -func (s *sliceArrayEncoder) AppendComplex128(v complex128) { s.elems = append(s.elems, v) } -func (s *sliceArrayEncoder) AppendComplex64(v complex64) { s.elems = append(s.elems, v) } -func (s *sliceArrayEncoder) AppendDuration(v time.Duration) { s.elems = append(s.elems, v) } -func (s *sliceArrayEncoder) AppendFloat64(v float64) { s.elems = append(s.elems, v) } -func (s *sliceArrayEncoder) AppendFloat32(v float32) { s.elems = append(s.elems, v) } -func (s *sliceArrayEncoder) AppendInt(v int) { s.elems = append(s.elems, v) } -func (s *sliceArrayEncoder) AppendInt64(v int64) { s.elems = append(s.elems, v) } -func (s *sliceArrayEncoder) AppendInt32(v int32) { s.elems = append(s.elems, v) } -func (s *sliceArrayEncoder) AppendInt16(v int16) { s.elems = append(s.elems, v) } -func (s *sliceArrayEncoder) AppendInt8(v int8) { s.elems = append(s.elems, v) } -func (s *sliceArrayEncoder) AppendString(v string) { s.elems = append(s.elems, v) } -func (s *sliceArrayEncoder) AppendTime(v time.Time) { s.elems = append(s.elems, v) } -func (s *sliceArrayEncoder) AppendUint(v uint) { s.elems = append(s.elems, v) } -func (s *sliceArrayEncoder) AppendUint64(v uint64) { s.elems = append(s.elems, v) } -func (s *sliceArrayEncoder) AppendUint32(v uint32) { s.elems = append(s.elems, v) } -func (s *sliceArrayEncoder) AppendUint16(v uint16) { s.elems = append(s.elems, v) } -func (s *sliceArrayEncoder) AppendUint8(v uint8) { s.elems = append(s.elems, v) } -func (s *sliceArrayEncoder) AppendUintptr(v uintptr) { s.elems = append(s.elems, v) } diff --git a/vendor/go.uber.org/zap/zapcore/reflected_encoder.go b/vendor/go.uber.org/zap/zapcore/reflected_encoder.go deleted file mode 100644 index 8746360ec..000000000 --- a/vendor/go.uber.org/zap/zapcore/reflected_encoder.go +++ /dev/null @@ -1,41 +0,0 @@ -// Copyright (c) 2016 Uber Technologies, Inc. -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in -// all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -// THE SOFTWARE. - -package zapcore - -import ( - "encoding/json" - "io" -) - -// ReflectedEncoder serializes log fields that can't be serialized with Zap's -// JSON encoder. These have the ReflectType field type. -// Use EncoderConfig.NewReflectedEncoder to set this. -type ReflectedEncoder interface { - // Encode encodes and writes to the underlying data stream. - Encode(interface{}) error -} - -func defaultReflectedEncoder(w io.Writer) ReflectedEncoder { - enc := json.NewEncoder(w) - // For consistency with our custom JSON encoder. - enc.SetEscapeHTML(false) - return enc -} diff --git a/vendor/go.uber.org/zap/zapcore/sampler.go b/vendor/go.uber.org/zap/zapcore/sampler.go deleted file mode 100644 index b7c093a4f..000000000 --- a/vendor/go.uber.org/zap/zapcore/sampler.go +++ /dev/null @@ -1,229 +0,0 @@ -// Copyright (c) 2016-2022 Uber Technologies, Inc. -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in -// all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -// THE SOFTWARE. - -package zapcore - -import ( - "sync/atomic" - "time" -) - -const ( - _numLevels = _maxLevel - _minLevel + 1 - _countersPerLevel = 4096 -) - -type counter struct { - resetAt atomic.Int64 - counter atomic.Uint64 -} - -type counters [_numLevels][_countersPerLevel]counter - -func newCounters() *counters { - return &counters{} -} - -func (cs *counters) get(lvl Level, key string) *counter { - i := lvl - _minLevel - j := fnv32a(key) % _countersPerLevel - return &cs[i][j] -} - -// fnv32a, adapted from "hash/fnv", but without a []byte(string) alloc -func fnv32a(s string) uint32 { - const ( - offset32 = 2166136261 - prime32 = 16777619 - ) - hash := uint32(offset32) - for i := 0; i < len(s); i++ { - hash ^= uint32(s[i]) - hash *= prime32 - } - return hash -} - -func (c *counter) IncCheckReset(t time.Time, tick time.Duration) uint64 { - tn := t.UnixNano() - resetAfter := c.resetAt.Load() - if resetAfter > tn { - return c.counter.Add(1) - } - - c.counter.Store(1) - - newResetAfter := tn + tick.Nanoseconds() - if !c.resetAt.CompareAndSwap(resetAfter, newResetAfter) { - // We raced with another goroutine trying to reset, and it also reset - // the counter to 1, so we need to reincrement the counter. - return c.counter.Add(1) - } - - return 1 -} - -// SamplingDecision is a decision represented as a bit field made by sampler. -// More decisions may be added in the future. -type SamplingDecision uint32 - -const ( - // LogDropped indicates that the Sampler dropped a log entry. - LogDropped SamplingDecision = 1 << iota - // LogSampled indicates that the Sampler sampled a log entry. - LogSampled -) - -// optionFunc wraps a func so it satisfies the SamplerOption interface. -type optionFunc func(*sampler) - -func (f optionFunc) apply(s *sampler) { - f(s) -} - -// SamplerOption configures a Sampler. -type SamplerOption interface { - apply(*sampler) -} - -// nopSamplingHook is the default hook used by sampler. -func nopSamplingHook(Entry, SamplingDecision) {} - -// SamplerHook registers a function which will be called when Sampler makes a -// decision. -// -// This hook may be used to get visibility into the performance of the sampler. -// For example, use it to track metrics of dropped versus sampled logs. -// -// var dropped atomic.Int64 -// zapcore.SamplerHook(func(ent zapcore.Entry, dec zapcore.SamplingDecision) { -// if dec&zapcore.LogDropped > 0 { -// dropped.Inc() -// } -// }) -func SamplerHook(hook func(entry Entry, dec SamplingDecision)) SamplerOption { - return optionFunc(func(s *sampler) { - s.hook = hook - }) -} - -// NewSamplerWithOptions creates a Core that samples incoming entries, which -// caps the CPU and I/O load of logging while attempting to preserve a -// representative subset of your logs. -// -// Zap samples by logging the first N entries with a given level and message -// each tick. If more Entries with the same level and message are seen during -// the same interval, every Mth message is logged and the rest are dropped. -// -// For example, -// -// core = NewSamplerWithOptions(core, time.Second, 10, 5) -// -// This will log the first 10 log entries with the same level and message -// in a one second interval as-is. Following that, it will allow through -// every 5th log entry with the same level and message in that interval. -// -// If thereafter is zero, the Core will drop all log entries after the first N -// in that interval. -// -// Sampler can be configured to report sampling decisions with the SamplerHook -// option. -// -// Keep in mind that Zap's sampling implementation is optimized for speed over -// absolute precision; under load, each tick may be slightly over- or -// under-sampled. -func NewSamplerWithOptions(core Core, tick time.Duration, first, thereafter int, opts ...SamplerOption) Core { - s := &sampler{ - Core: core, - tick: tick, - counts: newCounters(), - first: uint64(first), - thereafter: uint64(thereafter), - hook: nopSamplingHook, - } - for _, opt := range opts { - opt.apply(s) - } - - return s -} - -type sampler struct { - Core - - counts *counters - tick time.Duration - first, thereafter uint64 - hook func(Entry, SamplingDecision) -} - -var ( - _ Core = (*sampler)(nil) - _ leveledEnabler = (*sampler)(nil) -) - -// NewSampler creates a Core that samples incoming entries, which -// caps the CPU and I/O load of logging while attempting to preserve a -// representative subset of your logs. -// -// Zap samples by logging the first N entries with a given level and message -// each tick. If more Entries with the same level and message are seen during -// the same interval, every Mth message is logged and the rest are dropped. -// -// Keep in mind that zap's sampling implementation is optimized for speed over -// absolute precision; under load, each tick may be slightly over- or -// under-sampled. -// -// Deprecated: use NewSamplerWithOptions. -func NewSampler(core Core, tick time.Duration, first, thereafter int) Core { - return NewSamplerWithOptions(core, tick, first, thereafter) -} - -func (s *sampler) Level() Level { - return LevelOf(s.Core) -} - -func (s *sampler) With(fields []Field) Core { - return &sampler{ - Core: s.Core.With(fields), - tick: s.tick, - counts: s.counts, - first: s.first, - thereafter: s.thereafter, - hook: s.hook, - } -} - -func (s *sampler) Check(ent Entry, ce *CheckedEntry) *CheckedEntry { - if !s.Enabled(ent.Level) { - return ce - } - - if ent.Level >= _minLevel && ent.Level <= _maxLevel { - counter := s.counts.get(ent.Level, ent.Message) - n := counter.IncCheckReset(ent.Time, s.tick) - if n > s.first && (s.thereafter == 0 || (n-s.first)%s.thereafter != 0) { - s.hook(ent, LogDropped) - return ce - } - s.hook(ent, LogSampled) - } - return s.Core.Check(ent, ce) -} diff --git a/vendor/go.uber.org/zap/zapcore/tee.go b/vendor/go.uber.org/zap/zapcore/tee.go deleted file mode 100644 index 9bb32f055..000000000 --- a/vendor/go.uber.org/zap/zapcore/tee.go +++ /dev/null @@ -1,96 +0,0 @@ -// Copyright (c) 2016-2022 Uber Technologies, Inc. -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in -// all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -// THE SOFTWARE. - -package zapcore - -import "go.uber.org/multierr" - -type multiCore []Core - -var ( - _ leveledEnabler = multiCore(nil) - _ Core = multiCore(nil) -) - -// NewTee creates a Core that duplicates log entries into two or more -// underlying Cores. -// -// Calling it with a single Core returns the input unchanged, and calling -// it with no input returns a no-op Core. -func NewTee(cores ...Core) Core { - switch len(cores) { - case 0: - return NewNopCore() - case 1: - return cores[0] - default: - return multiCore(cores) - } -} - -func (mc multiCore) With(fields []Field) Core { - clone := make(multiCore, len(mc)) - for i := range mc { - clone[i] = mc[i].With(fields) - } - return clone -} - -func (mc multiCore) Level() Level { - minLvl := _maxLevel // mc is never empty - for i := range mc { - if lvl := LevelOf(mc[i]); lvl < minLvl { - minLvl = lvl - } - } - return minLvl -} - -func (mc multiCore) Enabled(lvl Level) bool { - for i := range mc { - if mc[i].Enabled(lvl) { - return true - } - } - return false -} - -func (mc multiCore) Check(ent Entry, ce *CheckedEntry) *CheckedEntry { - for i := range mc { - ce = mc[i].Check(ent, ce) - } - return ce -} - -func (mc multiCore) Write(ent Entry, fields []Field) error { - var err error - for i := range mc { - err = multierr.Append(err, mc[i].Write(ent, fields)) - } - return err -} - -func (mc multiCore) Sync() error { - var err error - for i := range mc { - err = multierr.Append(err, mc[i].Sync()) - } - return err -} diff --git a/vendor/go.uber.org/zap/zapcore/write_syncer.go b/vendor/go.uber.org/zap/zapcore/write_syncer.go deleted file mode 100644 index d4a1af3d0..000000000 --- a/vendor/go.uber.org/zap/zapcore/write_syncer.go +++ /dev/null @@ -1,122 +0,0 @@ -// Copyright (c) 2016 Uber Technologies, Inc. -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in -// all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -// THE SOFTWARE. - -package zapcore - -import ( - "io" - "sync" - - "go.uber.org/multierr" -) - -// A WriteSyncer is an io.Writer that can also flush any buffered data. Note -// that *os.File (and thus, os.Stderr and os.Stdout) implement WriteSyncer. -type WriteSyncer interface { - io.Writer - Sync() error -} - -// AddSync converts an io.Writer to a WriteSyncer. It attempts to be -// intelligent: if the concrete type of the io.Writer implements WriteSyncer, -// we'll use the existing Sync method. If it doesn't, we'll add a no-op Sync. -func AddSync(w io.Writer) WriteSyncer { - switch w := w.(type) { - case WriteSyncer: - return w - default: - return writerWrapper{w} - } -} - -type lockedWriteSyncer struct { - sync.Mutex - ws WriteSyncer -} - -// Lock wraps a WriteSyncer in a mutex to make it safe for concurrent use. In -// particular, *os.Files must be locked before use. -func Lock(ws WriteSyncer) WriteSyncer { - if _, ok := ws.(*lockedWriteSyncer); ok { - // no need to layer on another lock - return ws - } - return &lockedWriteSyncer{ws: ws} -} - -func (s *lockedWriteSyncer) Write(bs []byte) (int, error) { - s.Lock() - n, err := s.ws.Write(bs) - s.Unlock() - return n, err -} - -func (s *lockedWriteSyncer) Sync() error { - s.Lock() - err := s.ws.Sync() - s.Unlock() - return err -} - -type writerWrapper struct { - io.Writer -} - -func (w writerWrapper) Sync() error { - return nil -} - -type multiWriteSyncer []WriteSyncer - -// NewMultiWriteSyncer creates a WriteSyncer that duplicates its writes -// and sync calls, much like io.MultiWriter. -func NewMultiWriteSyncer(ws ...WriteSyncer) WriteSyncer { - if len(ws) == 1 { - return ws[0] - } - return multiWriteSyncer(ws) -} - -// See https://golang.org/src/io/multi.go -// When not all underlying syncers write the same number of bytes, -// the smallest number is returned even though Write() is called on -// all of them. -func (ws multiWriteSyncer) Write(p []byte) (int, error) { - var writeErr error - nWritten := 0 - for _, w := range ws { - n, err := w.Write(p) - writeErr = multierr.Append(writeErr, err) - if nWritten == 0 && n != 0 { - nWritten = n - } else if n < nWritten { - nWritten = n - } - } - return nWritten, writeErr -} - -func (ws multiWriteSyncer) Sync() error { - var err error - for _, w := range ws { - err = multierr.Append(err, w.Sync()) - } - return err -} diff --git a/vendor/golang.org/x/exp/LICENSE b/vendor/golang.org/x/exp/LICENSE deleted file mode 100644 index 6a66aea5e..000000000 --- a/vendor/golang.org/x/exp/LICENSE +++ /dev/null @@ -1,27 +0,0 @@ -Copyright (c) 2009 The Go Authors. All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above -copyright notice, this list of conditions and the following disclaimer -in the documentation and/or other materials provided with the -distribution. - * Neither the name of Google Inc. nor the names of its -contributors may be used to endorse or promote products derived from -this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/golang.org/x/exp/PATENTS b/vendor/golang.org/x/exp/PATENTS deleted file mode 100644 index 733099041..000000000 --- a/vendor/golang.org/x/exp/PATENTS +++ /dev/null @@ -1,22 +0,0 @@ -Additional IP Rights Grant (Patents) - -"This implementation" means the copyrightable works distributed by -Google as part of the Go project. - -Google hereby grants to You a perpetual, worldwide, non-exclusive, -no-charge, royalty-free, irrevocable (except as stated in this section) -patent license to make, have made, use, offer to sell, sell, import, -transfer and otherwise run, modify and propagate the contents of this -implementation of Go, where such license applies only to those patent -claims, both currently owned or controlled by Google and acquired in -the future, licensable by Google that are necessarily infringed by this -implementation of Go. This grant does not include claims that would be -infringed only as a consequence of further modification of this -implementation. If you or your agent or exclusive licensee institute or -order or agree to the institution of patent litigation against any -entity (including a cross-claim or counterclaim in a lawsuit) alleging -that this implementation of Go or any code incorporated within this -implementation of Go constitutes direct or contributory patent -infringement, or inducement of patent infringement, then any patent -rights granted to you under this License for this implementation of Go -shall terminate as of the date such litigation is filed. diff --git a/vendor/golang.org/x/exp/constraints/constraints.go b/vendor/golang.org/x/exp/constraints/constraints.go deleted file mode 100644 index 2c033dff4..000000000 --- a/vendor/golang.org/x/exp/constraints/constraints.go +++ /dev/null @@ -1,50 +0,0 @@ -// Copyright 2021 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package constraints defines a set of useful constraints to be used -// with type parameters. -package constraints - -// Signed is a constraint that permits any signed integer type. -// If future releases of Go add new predeclared signed integer types, -// this constraint will be modified to include them. -type Signed interface { - ~int | ~int8 | ~int16 | ~int32 | ~int64 -} - -// Unsigned is a constraint that permits any unsigned integer type. -// If future releases of Go add new predeclared unsigned integer types, -// this constraint will be modified to include them. -type Unsigned interface { - ~uint | ~uint8 | ~uint16 | ~uint32 | ~uint64 | ~uintptr -} - -// Integer is a constraint that permits any integer type. -// If future releases of Go add new predeclared integer types, -// this constraint will be modified to include them. -type Integer interface { - Signed | Unsigned -} - -// Float is a constraint that permits any floating-point type. -// If future releases of Go add new predeclared floating-point types, -// this constraint will be modified to include them. -type Float interface { - ~float32 | ~float64 -} - -// Complex is a constraint that permits any complex numeric type. -// If future releases of Go add new predeclared complex numeric types, -// this constraint will be modified to include them. -type Complex interface { - ~complex64 | ~complex128 -} - -// Ordered is a constraint that permits any ordered type: any type -// that supports the operators < <= >= >. -// If future releases of Go add new ordered types, -// this constraint will be modified to include them. -type Ordered interface { - Integer | Float | ~string -} diff --git a/vendor/golang.org/x/exp/maps/maps.go b/vendor/golang.org/x/exp/maps/maps.go deleted file mode 100644 index ecc0dabb7..000000000 --- a/vendor/golang.org/x/exp/maps/maps.go +++ /dev/null @@ -1,94 +0,0 @@ -// Copyright 2021 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package maps defines various functions useful with maps of any type. -package maps - -// Keys returns the keys of the map m. -// The keys will be in an indeterminate order. -func Keys[M ~map[K]V, K comparable, V any](m M) []K { - r := make([]K, 0, len(m)) - for k := range m { - r = append(r, k) - } - return r -} - -// Values returns the values of the map m. -// The values will be in an indeterminate order. -func Values[M ~map[K]V, K comparable, V any](m M) []V { - r := make([]V, 0, len(m)) - for _, v := range m { - r = append(r, v) - } - return r -} - -// Equal reports whether two maps contain the same key/value pairs. -// Values are compared using ==. -func Equal[M1, M2 ~map[K]V, K, V comparable](m1 M1, m2 M2) bool { - if len(m1) != len(m2) { - return false - } - for k, v1 := range m1 { - if v2, ok := m2[k]; !ok || v1 != v2 { - return false - } - } - return true -} - -// EqualFunc is like Equal, but compares values using eq. -// Keys are still compared with ==. -func EqualFunc[M1 ~map[K]V1, M2 ~map[K]V2, K comparable, V1, V2 any](m1 M1, m2 M2, eq func(V1, V2) bool) bool { - if len(m1) != len(m2) { - return false - } - for k, v1 := range m1 { - if v2, ok := m2[k]; !ok || !eq(v1, v2) { - return false - } - } - return true -} - -// Clear removes all entries from m, leaving it empty. -func Clear[M ~map[K]V, K comparable, V any](m M) { - for k := range m { - delete(m, k) - } -} - -// Clone returns a copy of m. This is a shallow clone: -// the new keys and values are set using ordinary assignment. -func Clone[M ~map[K]V, K comparable, V any](m M) M { - // Preserve nil in case it matters. - if m == nil { - return nil - } - r := make(M, len(m)) - for k, v := range m { - r[k] = v - } - return r -} - -// Copy copies all key/value pairs in src adding them to dst. -// When a key in src is already present in dst, -// the value in dst will be overwritten by the value associated -// with the key in src. -func Copy[M1 ~map[K]V, M2 ~map[K]V, K comparable, V any](dst M1, src M2) { - for k, v := range src { - dst[k] = v - } -} - -// DeleteFunc deletes any key/value pairs from m for which del returns true. -func DeleteFunc[M ~map[K]V, K comparable, V any](m M, del func(K, V) bool) { - for k, v := range m { - if del(k, v) { - delete(m, k) - } - } -} diff --git a/vendor/golang.org/x/exp/slices/cmp.go b/vendor/golang.org/x/exp/slices/cmp.go deleted file mode 100644 index fbf1934a0..000000000 --- a/vendor/golang.org/x/exp/slices/cmp.go +++ /dev/null @@ -1,44 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package slices - -import "golang.org/x/exp/constraints" - -// min is a version of the predeclared function from the Go 1.21 release. -func min[T constraints.Ordered](a, b T) T { - if a < b || isNaN(a) { - return a - } - return b -} - -// max is a version of the predeclared function from the Go 1.21 release. -func max[T constraints.Ordered](a, b T) T { - if a > b || isNaN(a) { - return a - } - return b -} - -// cmpLess is a copy of cmp.Less from the Go 1.21 release. -func cmpLess[T constraints.Ordered](x, y T) bool { - return (isNaN(x) && !isNaN(y)) || x < y -} - -// cmpCompare is a copy of cmp.Compare from the Go 1.21 release. -func cmpCompare[T constraints.Ordered](x, y T) int { - xNaN := isNaN(x) - yNaN := isNaN(y) - if xNaN && yNaN { - return 0 - } - if xNaN || x < y { - return -1 - } - if yNaN || x > y { - return +1 - } - return 0 -} diff --git a/vendor/golang.org/x/exp/slices/slices.go b/vendor/golang.org/x/exp/slices/slices.go deleted file mode 100644 index 46ceac343..000000000 --- a/vendor/golang.org/x/exp/slices/slices.go +++ /dev/null @@ -1,515 +0,0 @@ -// Copyright 2021 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package slices defines various functions useful with slices of any type. -package slices - -import ( - "unsafe" - - "golang.org/x/exp/constraints" -) - -// Equal reports whether two slices are equal: the same length and all -// elements equal. If the lengths are different, Equal returns false. -// Otherwise, the elements are compared in increasing index order, and the -// comparison stops at the first unequal pair. -// Floating point NaNs are not considered equal. -func Equal[S ~[]E, E comparable](s1, s2 S) bool { - if len(s1) != len(s2) { - return false - } - for i := range s1 { - if s1[i] != s2[i] { - return false - } - } - return true -} - -// EqualFunc reports whether two slices are equal using an equality -// function on each pair of elements. If the lengths are different, -// EqualFunc returns false. Otherwise, the elements are compared in -// increasing index order, and the comparison stops at the first index -// for which eq returns false. -func EqualFunc[S1 ~[]E1, S2 ~[]E2, E1, E2 any](s1 S1, s2 S2, eq func(E1, E2) bool) bool { - if len(s1) != len(s2) { - return false - } - for i, v1 := range s1 { - v2 := s2[i] - if !eq(v1, v2) { - return false - } - } - return true -} - -// Compare compares the elements of s1 and s2, using [cmp.Compare] on each pair -// of elements. The elements are compared sequentially, starting at index 0, -// until one element is not equal to the other. -// The result of comparing the first non-matching elements is returned. -// If both slices are equal until one of them ends, the shorter slice is -// considered less than the longer one. -// The result is 0 if s1 == s2, -1 if s1 < s2, and +1 if s1 > s2. -func Compare[S ~[]E, E constraints.Ordered](s1, s2 S) int { - for i, v1 := range s1 { - if i >= len(s2) { - return +1 - } - v2 := s2[i] - if c := cmpCompare(v1, v2); c != 0 { - return c - } - } - if len(s1) < len(s2) { - return -1 - } - return 0 -} - -// CompareFunc is like [Compare] but uses a custom comparison function on each -// pair of elements. -// The result is the first non-zero result of cmp; if cmp always -// returns 0 the result is 0 if len(s1) == len(s2), -1 if len(s1) < len(s2), -// and +1 if len(s1) > len(s2). -func CompareFunc[S1 ~[]E1, S2 ~[]E2, E1, E2 any](s1 S1, s2 S2, cmp func(E1, E2) int) int { - for i, v1 := range s1 { - if i >= len(s2) { - return +1 - } - v2 := s2[i] - if c := cmp(v1, v2); c != 0 { - return c - } - } - if len(s1) < len(s2) { - return -1 - } - return 0 -} - -// Index returns the index of the first occurrence of v in s, -// or -1 if not present. -func Index[S ~[]E, E comparable](s S, v E) int { - for i := range s { - if v == s[i] { - return i - } - } - return -1 -} - -// IndexFunc returns the first index i satisfying f(s[i]), -// or -1 if none do. -func IndexFunc[S ~[]E, E any](s S, f func(E) bool) int { - for i := range s { - if f(s[i]) { - return i - } - } - return -1 -} - -// Contains reports whether v is present in s. -func Contains[S ~[]E, E comparable](s S, v E) bool { - return Index(s, v) >= 0 -} - -// ContainsFunc reports whether at least one -// element e of s satisfies f(e). -func ContainsFunc[S ~[]E, E any](s S, f func(E) bool) bool { - return IndexFunc(s, f) >= 0 -} - -// Insert inserts the values v... into s at index i, -// returning the modified slice. -// The elements at s[i:] are shifted up to make room. -// In the returned slice r, r[i] == v[0], -// and r[i+len(v)] == value originally at r[i]. -// Insert panics if i is out of range. -// This function is O(len(s) + len(v)). -func Insert[S ~[]E, E any](s S, i int, v ...E) S { - m := len(v) - if m == 0 { - return s - } - n := len(s) - if i == n { - return append(s, v...) - } - if n+m > cap(s) { - // Use append rather than make so that we bump the size of - // the slice up to the next storage class. - // This is what Grow does but we don't call Grow because - // that might copy the values twice. - s2 := append(s[:i], make(S, n+m-i)...) - copy(s2[i:], v) - copy(s2[i+m:], s[i:]) - return s2 - } - s = s[:n+m] - - // before: - // s: aaaaaaaabbbbccccccccdddd - // ^ ^ ^ ^ - // i i+m n n+m - // after: - // s: aaaaaaaavvvvbbbbcccccccc - // ^ ^ ^ ^ - // i i+m n n+m - // - // a are the values that don't move in s. - // v are the values copied in from v. - // b and c are the values from s that are shifted up in index. - // d are the values that get overwritten, never to be seen again. - - if !overlaps(v, s[i+m:]) { - // Easy case - v does not overlap either the c or d regions. - // (It might be in some of a or b, or elsewhere entirely.) - // The data we copy up doesn't write to v at all, so just do it. - - copy(s[i+m:], s[i:]) - - // Now we have - // s: aaaaaaaabbbbbbbbcccccccc - // ^ ^ ^ ^ - // i i+m n n+m - // Note the b values are duplicated. - - copy(s[i:], v) - - // Now we have - // s: aaaaaaaavvvvbbbbcccccccc - // ^ ^ ^ ^ - // i i+m n n+m - // That's the result we want. - return s - } - - // The hard case - v overlaps c or d. We can't just shift up - // the data because we'd move or clobber the values we're trying - // to insert. - // So instead, write v on top of d, then rotate. - copy(s[n:], v) - - // Now we have - // s: aaaaaaaabbbbccccccccvvvv - // ^ ^ ^ ^ - // i i+m n n+m - - rotateRight(s[i:], m) - - // Now we have - // s: aaaaaaaavvvvbbbbcccccccc - // ^ ^ ^ ^ - // i i+m n n+m - // That's the result we want. - return s -} - -// clearSlice sets all elements up to the length of s to the zero value of E. -// We may use the builtin clear func instead, and remove clearSlice, when upgrading -// to Go 1.21+. -func clearSlice[S ~[]E, E any](s S) { - var zero E - for i := range s { - s[i] = zero - } -} - -// Delete removes the elements s[i:j] from s, returning the modified slice. -// Delete panics if j > len(s) or s[i:j] is not a valid slice of s. -// Delete is O(len(s)-i), so if many items must be deleted, it is better to -// make a single call deleting them all together than to delete one at a time. -// Delete zeroes the elements s[len(s)-(j-i):len(s)]. -func Delete[S ~[]E, E any](s S, i, j int) S { - _ = s[i:j:len(s)] // bounds check - - if i == j { - return s - } - - oldlen := len(s) - s = append(s[:i], s[j:]...) - clearSlice(s[len(s):oldlen]) // zero/nil out the obsolete elements, for GC - return s -} - -// DeleteFunc removes any elements from s for which del returns true, -// returning the modified slice. -// DeleteFunc zeroes the elements between the new length and the original length. -func DeleteFunc[S ~[]E, E any](s S, del func(E) bool) S { - i := IndexFunc(s, del) - if i == -1 { - return s - } - // Don't start copying elements until we find one to delete. - for j := i + 1; j < len(s); j++ { - if v := s[j]; !del(v) { - s[i] = v - i++ - } - } - clearSlice(s[i:]) // zero/nil out the obsolete elements, for GC - return s[:i] -} - -// Replace replaces the elements s[i:j] by the given v, and returns the -// modified slice. Replace panics if s[i:j] is not a valid slice of s. -// When len(v) < (j-i), Replace zeroes the elements between the new length and the original length. -func Replace[S ~[]E, E any](s S, i, j int, v ...E) S { - _ = s[i:j] // verify that i:j is a valid subslice - - if i == j { - return Insert(s, i, v...) - } - if j == len(s) { - return append(s[:i], v...) - } - - tot := len(s[:i]) + len(v) + len(s[j:]) - if tot > cap(s) { - // Too big to fit, allocate and copy over. - s2 := append(s[:i], make(S, tot-i)...) // See Insert - copy(s2[i:], v) - copy(s2[i+len(v):], s[j:]) - return s2 - } - - r := s[:tot] - - if i+len(v) <= j { - // Easy, as v fits in the deleted portion. - copy(r[i:], v) - if i+len(v) != j { - copy(r[i+len(v):], s[j:]) - } - clearSlice(s[tot:]) // zero/nil out the obsolete elements, for GC - return r - } - - // We are expanding (v is bigger than j-i). - // The situation is something like this: - // (example has i=4,j=8,len(s)=16,len(v)=6) - // s: aaaaxxxxbbbbbbbbyy - // ^ ^ ^ ^ - // i j len(s) tot - // a: prefix of s - // x: deleted range - // b: more of s - // y: area to expand into - - if !overlaps(r[i+len(v):], v) { - // Easy, as v is not clobbered by the first copy. - copy(r[i+len(v):], s[j:]) - copy(r[i:], v) - return r - } - - // This is a situation where we don't have a single place to which - // we can copy v. Parts of it need to go to two different places. - // We want to copy the prefix of v into y and the suffix into x, then - // rotate |y| spots to the right. - // - // v[2:] v[:2] - // | | - // s: aaaavvvvbbbbbbbbvv - // ^ ^ ^ ^ - // i j len(s) tot - // - // If either of those two destinations don't alias v, then we're good. - y := len(v) - (j - i) // length of y portion - - if !overlaps(r[i:j], v) { - copy(r[i:j], v[y:]) - copy(r[len(s):], v[:y]) - rotateRight(r[i:], y) - return r - } - if !overlaps(r[len(s):], v) { - copy(r[len(s):], v[:y]) - copy(r[i:j], v[y:]) - rotateRight(r[i:], y) - return r - } - - // Now we know that v overlaps both x and y. - // That means that the entirety of b is *inside* v. - // So we don't need to preserve b at all; instead we - // can copy v first, then copy the b part of v out of - // v to the right destination. - k := startIdx(v, s[j:]) - copy(r[i:], v) - copy(r[i+len(v):], r[i+k:]) - return r -} - -// Clone returns a copy of the slice. -// The elements are copied using assignment, so this is a shallow clone. -func Clone[S ~[]E, E any](s S) S { - // Preserve nil in case it matters. - if s == nil { - return nil - } - return append(S([]E{}), s...) -} - -// Compact replaces consecutive runs of equal elements with a single copy. -// This is like the uniq command found on Unix. -// Compact modifies the contents of the slice s and returns the modified slice, -// which may have a smaller length. -// Compact zeroes the elements between the new length and the original length. -func Compact[S ~[]E, E comparable](s S) S { - if len(s) < 2 { - return s - } - i := 1 - for k := 1; k < len(s); k++ { - if s[k] != s[k-1] { - if i != k { - s[i] = s[k] - } - i++ - } - } - clearSlice(s[i:]) // zero/nil out the obsolete elements, for GC - return s[:i] -} - -// CompactFunc is like [Compact] but uses an equality function to compare elements. -// For runs of elements that compare equal, CompactFunc keeps the first one. -// CompactFunc zeroes the elements between the new length and the original length. -func CompactFunc[S ~[]E, E any](s S, eq func(E, E) bool) S { - if len(s) < 2 { - return s - } - i := 1 - for k := 1; k < len(s); k++ { - if !eq(s[k], s[k-1]) { - if i != k { - s[i] = s[k] - } - i++ - } - } - clearSlice(s[i:]) // zero/nil out the obsolete elements, for GC - return s[:i] -} - -// Grow increases the slice's capacity, if necessary, to guarantee space for -// another n elements. After Grow(n), at least n elements can be appended -// to the slice without another allocation. If n is negative or too large to -// allocate the memory, Grow panics. -func Grow[S ~[]E, E any](s S, n int) S { - if n < 0 { - panic("cannot be negative") - } - if n -= cap(s) - len(s); n > 0 { - // TODO(https://go.dev/issue/53888): Make using []E instead of S - // to workaround a compiler bug where the runtime.growslice optimization - // does not take effect. Revert when the compiler is fixed. - s = append([]E(s)[:cap(s)], make([]E, n)...)[:len(s)] - } - return s -} - -// Clip removes unused capacity from the slice, returning s[:len(s):len(s)]. -func Clip[S ~[]E, E any](s S) S { - return s[:len(s):len(s)] -} - -// Rotation algorithm explanation: -// -// rotate left by 2 -// start with -// 0123456789 -// split up like this -// 01 234567 89 -// swap first 2 and last 2 -// 89 234567 01 -// join first parts -// 89234567 01 -// recursively rotate first left part by 2 -// 23456789 01 -// join at the end -// 2345678901 -// -// rotate left by 8 -// start with -// 0123456789 -// split up like this -// 01 234567 89 -// swap first 2 and last 2 -// 89 234567 01 -// join last parts -// 89 23456701 -// recursively rotate second part left by 6 -// 89 01234567 -// join at the end -// 8901234567 - -// TODO: There are other rotate algorithms. -// This algorithm has the desirable property that it moves each element exactly twice. -// The triple-reverse algorithm is simpler and more cache friendly, but takes more writes. -// The follow-cycles algorithm can be 1-write but it is not very cache friendly. - -// rotateLeft rotates b left by n spaces. -// s_final[i] = s_orig[i+r], wrapping around. -func rotateLeft[E any](s []E, r int) { - for r != 0 && r != len(s) { - if r*2 <= len(s) { - swap(s[:r], s[len(s)-r:]) - s = s[:len(s)-r] - } else { - swap(s[:len(s)-r], s[r:]) - s, r = s[len(s)-r:], r*2-len(s) - } - } -} -func rotateRight[E any](s []E, r int) { - rotateLeft(s, len(s)-r) -} - -// swap swaps the contents of x and y. x and y must be equal length and disjoint. -func swap[E any](x, y []E) { - for i := 0; i < len(x); i++ { - x[i], y[i] = y[i], x[i] - } -} - -// overlaps reports whether the memory ranges a[0:len(a)] and b[0:len(b)] overlap. -func overlaps[E any](a, b []E) bool { - if len(a) == 0 || len(b) == 0 { - return false - } - elemSize := unsafe.Sizeof(a[0]) - if elemSize == 0 { - return false - } - // TODO: use a runtime/unsafe facility once one becomes available. See issue 12445. - // Also see crypto/internal/alias/alias.go:AnyOverlap - return uintptr(unsafe.Pointer(&a[0])) <= uintptr(unsafe.Pointer(&b[len(b)-1]))+(elemSize-1) && - uintptr(unsafe.Pointer(&b[0])) <= uintptr(unsafe.Pointer(&a[len(a)-1]))+(elemSize-1) -} - -// startIdx returns the index in haystack where the needle starts. -// prerequisite: the needle must be aliased entirely inside the haystack. -func startIdx[E any](haystack, needle []E) int { - p := &needle[0] - for i := range haystack { - if p == &haystack[i] { - return i - } - } - // TODO: what if the overlap is by a non-integral number of Es? - panic("needle not found") -} - -// Reverse reverses the elements of the slice in place. -func Reverse[S ~[]E, E any](s S) { - for i, j := 0, len(s)-1; i < j; i, j = i+1, j-1 { - s[i], s[j] = s[j], s[i] - } -} diff --git a/vendor/golang.org/x/exp/slices/sort.go b/vendor/golang.org/x/exp/slices/sort.go deleted file mode 100644 index b67897f76..000000000 --- a/vendor/golang.org/x/exp/slices/sort.go +++ /dev/null @@ -1,195 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:generate go run $GOROOT/src/sort/gen_sort_variants.go -exp - -package slices - -import ( - "math/bits" - - "golang.org/x/exp/constraints" -) - -// Sort sorts a slice of any ordered type in ascending order. -// When sorting floating-point numbers, NaNs are ordered before other values. -func Sort[S ~[]E, E constraints.Ordered](x S) { - n := len(x) - pdqsortOrdered(x, 0, n, bits.Len(uint(n))) -} - -// SortFunc sorts the slice x in ascending order as determined by the cmp -// function. This sort is not guaranteed to be stable. -// cmp(a, b) should return a negative number when a < b, a positive number when -// a > b and zero when a == b. -// -// SortFunc requires that cmp is a strict weak ordering. -// See https://en.wikipedia.org/wiki/Weak_ordering#Strict_weak_orderings. -func SortFunc[S ~[]E, E any](x S, cmp func(a, b E) int) { - n := len(x) - pdqsortCmpFunc(x, 0, n, bits.Len(uint(n)), cmp) -} - -// SortStableFunc sorts the slice x while keeping the original order of equal -// elements, using cmp to compare elements in the same way as [SortFunc]. -func SortStableFunc[S ~[]E, E any](x S, cmp func(a, b E) int) { - stableCmpFunc(x, len(x), cmp) -} - -// IsSorted reports whether x is sorted in ascending order. -func IsSorted[S ~[]E, E constraints.Ordered](x S) bool { - for i := len(x) - 1; i > 0; i-- { - if cmpLess(x[i], x[i-1]) { - return false - } - } - return true -} - -// IsSortedFunc reports whether x is sorted in ascending order, with cmp as the -// comparison function as defined by [SortFunc]. -func IsSortedFunc[S ~[]E, E any](x S, cmp func(a, b E) int) bool { - for i := len(x) - 1; i > 0; i-- { - if cmp(x[i], x[i-1]) < 0 { - return false - } - } - return true -} - -// Min returns the minimal value in x. It panics if x is empty. -// For floating-point numbers, Min propagates NaNs (any NaN value in x -// forces the output to be NaN). -func Min[S ~[]E, E constraints.Ordered](x S) E { - if len(x) < 1 { - panic("slices.Min: empty list") - } - m := x[0] - for i := 1; i < len(x); i++ { - m = min(m, x[i]) - } - return m -} - -// MinFunc returns the minimal value in x, using cmp to compare elements. -// It panics if x is empty. If there is more than one minimal element -// according to the cmp function, MinFunc returns the first one. -func MinFunc[S ~[]E, E any](x S, cmp func(a, b E) int) E { - if len(x) < 1 { - panic("slices.MinFunc: empty list") - } - m := x[0] - for i := 1; i < len(x); i++ { - if cmp(x[i], m) < 0 { - m = x[i] - } - } - return m -} - -// Max returns the maximal value in x. It panics if x is empty. -// For floating-point E, Max propagates NaNs (any NaN value in x -// forces the output to be NaN). -func Max[S ~[]E, E constraints.Ordered](x S) E { - if len(x) < 1 { - panic("slices.Max: empty list") - } - m := x[0] - for i := 1; i < len(x); i++ { - m = max(m, x[i]) - } - return m -} - -// MaxFunc returns the maximal value in x, using cmp to compare elements. -// It panics if x is empty. If there is more than one maximal element -// according to the cmp function, MaxFunc returns the first one. -func MaxFunc[S ~[]E, E any](x S, cmp func(a, b E) int) E { - if len(x) < 1 { - panic("slices.MaxFunc: empty list") - } - m := x[0] - for i := 1; i < len(x); i++ { - if cmp(x[i], m) > 0 { - m = x[i] - } - } - return m -} - -// BinarySearch searches for target in a sorted slice and returns the position -// where target is found, or the position where target would appear in the -// sort order; it also returns a bool saying whether the target is really found -// in the slice. The slice must be sorted in increasing order. -func BinarySearch[S ~[]E, E constraints.Ordered](x S, target E) (int, bool) { - // Inlining is faster than calling BinarySearchFunc with a lambda. - n := len(x) - // Define x[-1] < target and x[n] >= target. - // Invariant: x[i-1] < target, x[j] >= target. - i, j := 0, n - for i < j { - h := int(uint(i+j) >> 1) // avoid overflow when computing h - // i ≤ h < j - if cmpLess(x[h], target) { - i = h + 1 // preserves x[i-1] < target - } else { - j = h // preserves x[j] >= target - } - } - // i == j, x[i-1] < target, and x[j] (= x[i]) >= target => answer is i. - return i, i < n && (x[i] == target || (isNaN(x[i]) && isNaN(target))) -} - -// BinarySearchFunc works like [BinarySearch], but uses a custom comparison -// function. The slice must be sorted in increasing order, where "increasing" -// is defined by cmp. cmp should return 0 if the slice element matches -// the target, a negative number if the slice element precedes the target, -// or a positive number if the slice element follows the target. -// cmp must implement the same ordering as the slice, such that if -// cmp(a, t) < 0 and cmp(b, t) >= 0, then a must precede b in the slice. -func BinarySearchFunc[S ~[]E, E, T any](x S, target T, cmp func(E, T) int) (int, bool) { - n := len(x) - // Define cmp(x[-1], target) < 0 and cmp(x[n], target) >= 0 . - // Invariant: cmp(x[i - 1], target) < 0, cmp(x[j], target) >= 0. - i, j := 0, n - for i < j { - h := int(uint(i+j) >> 1) // avoid overflow when computing h - // i ≤ h < j - if cmp(x[h], target) < 0 { - i = h + 1 // preserves cmp(x[i - 1], target) < 0 - } else { - j = h // preserves cmp(x[j], target) >= 0 - } - } - // i == j, cmp(x[i-1], target) < 0, and cmp(x[j], target) (= cmp(x[i], target)) >= 0 => answer is i. - return i, i < n && cmp(x[i], target) == 0 -} - -type sortedHint int // hint for pdqsort when choosing the pivot - -const ( - unknownHint sortedHint = iota - increasingHint - decreasingHint -) - -// xorshift paper: https://www.jstatsoft.org/article/view/v008i14/xorshift.pdf -type xorshift uint64 - -func (r *xorshift) Next() uint64 { - *r ^= *r << 13 - *r ^= *r >> 17 - *r ^= *r << 5 - return uint64(*r) -} - -func nextPowerOfTwo(length int) uint { - return 1 << bits.Len(uint(length)) -} - -// isNaN reports whether x is a NaN without requiring the math package. -// This will always return false if T is not floating-point. -func isNaN[T constraints.Ordered](x T) bool { - return x != x -} diff --git a/vendor/golang.org/x/exp/slices/zsortanyfunc.go b/vendor/golang.org/x/exp/slices/zsortanyfunc.go deleted file mode 100644 index 06f2c7a24..000000000 --- a/vendor/golang.org/x/exp/slices/zsortanyfunc.go +++ /dev/null @@ -1,479 +0,0 @@ -// Code generated by gen_sort_variants.go; DO NOT EDIT. - -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package slices - -// insertionSortCmpFunc sorts data[a:b] using insertion sort. -func insertionSortCmpFunc[E any](data []E, a, b int, cmp func(a, b E) int) { - for i := a + 1; i < b; i++ { - for j := i; j > a && (cmp(data[j], data[j-1]) < 0); j-- { - data[j], data[j-1] = data[j-1], data[j] - } - } -} - -// siftDownCmpFunc implements the heap property on data[lo:hi]. -// first is an offset into the array where the root of the heap lies. -func siftDownCmpFunc[E any](data []E, lo, hi, first int, cmp func(a, b E) int) { - root := lo - for { - child := 2*root + 1 - if child >= hi { - break - } - if child+1 < hi && (cmp(data[first+child], data[first+child+1]) < 0) { - child++ - } - if !(cmp(data[first+root], data[first+child]) < 0) { - return - } - data[first+root], data[first+child] = data[first+child], data[first+root] - root = child - } -} - -func heapSortCmpFunc[E any](data []E, a, b int, cmp func(a, b E) int) { - first := a - lo := 0 - hi := b - a - - // Build heap with greatest element at top. - for i := (hi - 1) / 2; i >= 0; i-- { - siftDownCmpFunc(data, i, hi, first, cmp) - } - - // Pop elements, largest first, into end of data. - for i := hi - 1; i >= 0; i-- { - data[first], data[first+i] = data[first+i], data[first] - siftDownCmpFunc(data, lo, i, first, cmp) - } -} - -// pdqsortCmpFunc sorts data[a:b]. -// The algorithm based on pattern-defeating quicksort(pdqsort), but without the optimizations from BlockQuicksort. -// pdqsort paper: https://arxiv.org/pdf/2106.05123.pdf -// C++ implementation: https://github.com/orlp/pdqsort -// Rust implementation: https://docs.rs/pdqsort/latest/pdqsort/ -// limit is the number of allowed bad (very unbalanced) pivots before falling back to heapsort. -func pdqsortCmpFunc[E any](data []E, a, b, limit int, cmp func(a, b E) int) { - const maxInsertion = 12 - - var ( - wasBalanced = true // whether the last partitioning was reasonably balanced - wasPartitioned = true // whether the slice was already partitioned - ) - - for { - length := b - a - - if length <= maxInsertion { - insertionSortCmpFunc(data, a, b, cmp) - return - } - - // Fall back to heapsort if too many bad choices were made. - if limit == 0 { - heapSortCmpFunc(data, a, b, cmp) - return - } - - // If the last partitioning was imbalanced, we need to breaking patterns. - if !wasBalanced { - breakPatternsCmpFunc(data, a, b, cmp) - limit-- - } - - pivot, hint := choosePivotCmpFunc(data, a, b, cmp) - if hint == decreasingHint { - reverseRangeCmpFunc(data, a, b, cmp) - // The chosen pivot was pivot-a elements after the start of the array. - // After reversing it is pivot-a elements before the end of the array. - // The idea came from Rust's implementation. - pivot = (b - 1) - (pivot - a) - hint = increasingHint - } - - // The slice is likely already sorted. - if wasBalanced && wasPartitioned && hint == increasingHint { - if partialInsertionSortCmpFunc(data, a, b, cmp) { - return - } - } - - // Probably the slice contains many duplicate elements, partition the slice into - // elements equal to and elements greater than the pivot. - if a > 0 && !(cmp(data[a-1], data[pivot]) < 0) { - mid := partitionEqualCmpFunc(data, a, b, pivot, cmp) - a = mid - continue - } - - mid, alreadyPartitioned := partitionCmpFunc(data, a, b, pivot, cmp) - wasPartitioned = alreadyPartitioned - - leftLen, rightLen := mid-a, b-mid - balanceThreshold := length / 8 - if leftLen < rightLen { - wasBalanced = leftLen >= balanceThreshold - pdqsortCmpFunc(data, a, mid, limit, cmp) - a = mid + 1 - } else { - wasBalanced = rightLen >= balanceThreshold - pdqsortCmpFunc(data, mid+1, b, limit, cmp) - b = mid - } - } -} - -// partitionCmpFunc does one quicksort partition. -// Let p = data[pivot] -// Moves elements in data[a:b] around, so that data[i]

=p for inewpivot. -// On return, data[newpivot] = p -func partitionCmpFunc[E any](data []E, a, b, pivot int, cmp func(a, b E) int) (newpivot int, alreadyPartitioned bool) { - data[a], data[pivot] = data[pivot], data[a] - i, j := a+1, b-1 // i and j are inclusive of the elements remaining to be partitioned - - for i <= j && (cmp(data[i], data[a]) < 0) { - i++ - } - for i <= j && !(cmp(data[j], data[a]) < 0) { - j-- - } - if i > j { - data[j], data[a] = data[a], data[j] - return j, true - } - data[i], data[j] = data[j], data[i] - i++ - j-- - - for { - for i <= j && (cmp(data[i], data[a]) < 0) { - i++ - } - for i <= j && !(cmp(data[j], data[a]) < 0) { - j-- - } - if i > j { - break - } - data[i], data[j] = data[j], data[i] - i++ - j-- - } - data[j], data[a] = data[a], data[j] - return j, false -} - -// partitionEqualCmpFunc partitions data[a:b] into elements equal to data[pivot] followed by elements greater than data[pivot]. -// It assumed that data[a:b] does not contain elements smaller than the data[pivot]. -func partitionEqualCmpFunc[E any](data []E, a, b, pivot int, cmp func(a, b E) int) (newpivot int) { - data[a], data[pivot] = data[pivot], data[a] - i, j := a+1, b-1 // i and j are inclusive of the elements remaining to be partitioned - - for { - for i <= j && !(cmp(data[a], data[i]) < 0) { - i++ - } - for i <= j && (cmp(data[a], data[j]) < 0) { - j-- - } - if i > j { - break - } - data[i], data[j] = data[j], data[i] - i++ - j-- - } - return i -} - -// partialInsertionSortCmpFunc partially sorts a slice, returns true if the slice is sorted at the end. -func partialInsertionSortCmpFunc[E any](data []E, a, b int, cmp func(a, b E) int) bool { - const ( - maxSteps = 5 // maximum number of adjacent out-of-order pairs that will get shifted - shortestShifting = 50 // don't shift any elements on short arrays - ) - i := a + 1 - for j := 0; j < maxSteps; j++ { - for i < b && !(cmp(data[i], data[i-1]) < 0) { - i++ - } - - if i == b { - return true - } - - if b-a < shortestShifting { - return false - } - - data[i], data[i-1] = data[i-1], data[i] - - // Shift the smaller one to the left. - if i-a >= 2 { - for j := i - 1; j >= 1; j-- { - if !(cmp(data[j], data[j-1]) < 0) { - break - } - data[j], data[j-1] = data[j-1], data[j] - } - } - // Shift the greater one to the right. - if b-i >= 2 { - for j := i + 1; j < b; j++ { - if !(cmp(data[j], data[j-1]) < 0) { - break - } - data[j], data[j-1] = data[j-1], data[j] - } - } - } - return false -} - -// breakPatternsCmpFunc scatters some elements around in an attempt to break some patterns -// that might cause imbalanced partitions in quicksort. -func breakPatternsCmpFunc[E any](data []E, a, b int, cmp func(a, b E) int) { - length := b - a - if length >= 8 { - random := xorshift(length) - modulus := nextPowerOfTwo(length) - - for idx := a + (length/4)*2 - 1; idx <= a+(length/4)*2+1; idx++ { - other := int(uint(random.Next()) & (modulus - 1)) - if other >= length { - other -= length - } - data[idx], data[a+other] = data[a+other], data[idx] - } - } -} - -// choosePivotCmpFunc chooses a pivot in data[a:b]. -// -// [0,8): chooses a static pivot. -// [8,shortestNinther): uses the simple median-of-three method. -// [shortestNinther,∞): uses the Tukey ninther method. -func choosePivotCmpFunc[E any](data []E, a, b int, cmp func(a, b E) int) (pivot int, hint sortedHint) { - const ( - shortestNinther = 50 - maxSwaps = 4 * 3 - ) - - l := b - a - - var ( - swaps int - i = a + l/4*1 - j = a + l/4*2 - k = a + l/4*3 - ) - - if l >= 8 { - if l >= shortestNinther { - // Tukey ninther method, the idea came from Rust's implementation. - i = medianAdjacentCmpFunc(data, i, &swaps, cmp) - j = medianAdjacentCmpFunc(data, j, &swaps, cmp) - k = medianAdjacentCmpFunc(data, k, &swaps, cmp) - } - // Find the median among i, j, k and stores it into j. - j = medianCmpFunc(data, i, j, k, &swaps, cmp) - } - - switch swaps { - case 0: - return j, increasingHint - case maxSwaps: - return j, decreasingHint - default: - return j, unknownHint - } -} - -// order2CmpFunc returns x,y where data[x] <= data[y], where x,y=a,b or x,y=b,a. -func order2CmpFunc[E any](data []E, a, b int, swaps *int, cmp func(a, b E) int) (int, int) { - if cmp(data[b], data[a]) < 0 { - *swaps++ - return b, a - } - return a, b -} - -// medianCmpFunc returns x where data[x] is the median of data[a],data[b],data[c], where x is a, b, or c. -func medianCmpFunc[E any](data []E, a, b, c int, swaps *int, cmp func(a, b E) int) int { - a, b = order2CmpFunc(data, a, b, swaps, cmp) - b, c = order2CmpFunc(data, b, c, swaps, cmp) - a, b = order2CmpFunc(data, a, b, swaps, cmp) - return b -} - -// medianAdjacentCmpFunc finds the median of data[a - 1], data[a], data[a + 1] and stores the index into a. -func medianAdjacentCmpFunc[E any](data []E, a int, swaps *int, cmp func(a, b E) int) int { - return medianCmpFunc(data, a-1, a, a+1, swaps, cmp) -} - -func reverseRangeCmpFunc[E any](data []E, a, b int, cmp func(a, b E) int) { - i := a - j := b - 1 - for i < j { - data[i], data[j] = data[j], data[i] - i++ - j-- - } -} - -func swapRangeCmpFunc[E any](data []E, a, b, n int, cmp func(a, b E) int) { - for i := 0; i < n; i++ { - data[a+i], data[b+i] = data[b+i], data[a+i] - } -} - -func stableCmpFunc[E any](data []E, n int, cmp func(a, b E) int) { - blockSize := 20 // must be > 0 - a, b := 0, blockSize - for b <= n { - insertionSortCmpFunc(data, a, b, cmp) - a = b - b += blockSize - } - insertionSortCmpFunc(data, a, n, cmp) - - for blockSize < n { - a, b = 0, 2*blockSize - for b <= n { - symMergeCmpFunc(data, a, a+blockSize, b, cmp) - a = b - b += 2 * blockSize - } - if m := a + blockSize; m < n { - symMergeCmpFunc(data, a, m, n, cmp) - } - blockSize *= 2 - } -} - -// symMergeCmpFunc merges the two sorted subsequences data[a:m] and data[m:b] using -// the SymMerge algorithm from Pok-Son Kim and Arne Kutzner, "Stable Minimum -// Storage Merging by Symmetric Comparisons", in Susanne Albers and Tomasz -// Radzik, editors, Algorithms - ESA 2004, volume 3221 of Lecture Notes in -// Computer Science, pages 714-723. Springer, 2004. -// -// Let M = m-a and N = b-n. Wolog M < N. -// The recursion depth is bound by ceil(log(N+M)). -// The algorithm needs O(M*log(N/M + 1)) calls to data.Less. -// The algorithm needs O((M+N)*log(M)) calls to data.Swap. -// -// The paper gives O((M+N)*log(M)) as the number of assignments assuming a -// rotation algorithm which uses O(M+N+gcd(M+N)) assignments. The argumentation -// in the paper carries through for Swap operations, especially as the block -// swapping rotate uses only O(M+N) Swaps. -// -// symMerge assumes non-degenerate arguments: a < m && m < b. -// Having the caller check this condition eliminates many leaf recursion calls, -// which improves performance. -func symMergeCmpFunc[E any](data []E, a, m, b int, cmp func(a, b E) int) { - // Avoid unnecessary recursions of symMerge - // by direct insertion of data[a] into data[m:b] - // if data[a:m] only contains one element. - if m-a == 1 { - // Use binary search to find the lowest index i - // such that data[i] >= data[a] for m <= i < b. - // Exit the search loop with i == b in case no such index exists. - i := m - j := b - for i < j { - h := int(uint(i+j) >> 1) - if cmp(data[h], data[a]) < 0 { - i = h + 1 - } else { - j = h - } - } - // Swap values until data[a] reaches the position before i. - for k := a; k < i-1; k++ { - data[k], data[k+1] = data[k+1], data[k] - } - return - } - - // Avoid unnecessary recursions of symMerge - // by direct insertion of data[m] into data[a:m] - // if data[m:b] only contains one element. - if b-m == 1 { - // Use binary search to find the lowest index i - // such that data[i] > data[m] for a <= i < m. - // Exit the search loop with i == m in case no such index exists. - i := a - j := m - for i < j { - h := int(uint(i+j) >> 1) - if !(cmp(data[m], data[h]) < 0) { - i = h + 1 - } else { - j = h - } - } - // Swap values until data[m] reaches the position i. - for k := m; k > i; k-- { - data[k], data[k-1] = data[k-1], data[k] - } - return - } - - mid := int(uint(a+b) >> 1) - n := mid + m - var start, r int - if m > mid { - start = n - b - r = mid - } else { - start = a - r = m - } - p := n - 1 - - for start < r { - c := int(uint(start+r) >> 1) - if !(cmp(data[p-c], data[c]) < 0) { - start = c + 1 - } else { - r = c - } - } - - end := n - start - if start < m && m < end { - rotateCmpFunc(data, start, m, end, cmp) - } - if a < start && start < mid { - symMergeCmpFunc(data, a, start, mid, cmp) - } - if mid < end && end < b { - symMergeCmpFunc(data, mid, end, b, cmp) - } -} - -// rotateCmpFunc rotates two consecutive blocks u = data[a:m] and v = data[m:b] in data: -// Data of the form 'x u v y' is changed to 'x v u y'. -// rotate performs at most b-a many calls to data.Swap, -// and it assumes non-degenerate arguments: a < m && m < b. -func rotateCmpFunc[E any](data []E, a, m, b int, cmp func(a, b E) int) { - i := m - a - j := b - m - - for i != j { - if i > j { - swapRangeCmpFunc(data, m-i, m, j, cmp) - i -= j - } else { - swapRangeCmpFunc(data, m-i, m+j-i, i, cmp) - j -= i - } - } - // i == j - swapRangeCmpFunc(data, m-i, m, i, cmp) -} diff --git a/vendor/golang.org/x/exp/slices/zsortordered.go b/vendor/golang.org/x/exp/slices/zsortordered.go deleted file mode 100644 index 99b47c398..000000000 --- a/vendor/golang.org/x/exp/slices/zsortordered.go +++ /dev/null @@ -1,481 +0,0 @@ -// Code generated by gen_sort_variants.go; DO NOT EDIT. - -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package slices - -import "golang.org/x/exp/constraints" - -// insertionSortOrdered sorts data[a:b] using insertion sort. -func insertionSortOrdered[E constraints.Ordered](data []E, a, b int) { - for i := a + 1; i < b; i++ { - for j := i; j > a && cmpLess(data[j], data[j-1]); j-- { - data[j], data[j-1] = data[j-1], data[j] - } - } -} - -// siftDownOrdered implements the heap property on data[lo:hi]. -// first is an offset into the array where the root of the heap lies. -func siftDownOrdered[E constraints.Ordered](data []E, lo, hi, first int) { - root := lo - for { - child := 2*root + 1 - if child >= hi { - break - } - if child+1 < hi && cmpLess(data[first+child], data[first+child+1]) { - child++ - } - if !cmpLess(data[first+root], data[first+child]) { - return - } - data[first+root], data[first+child] = data[first+child], data[first+root] - root = child - } -} - -func heapSortOrdered[E constraints.Ordered](data []E, a, b int) { - first := a - lo := 0 - hi := b - a - - // Build heap with greatest element at top. - for i := (hi - 1) / 2; i >= 0; i-- { - siftDownOrdered(data, i, hi, first) - } - - // Pop elements, largest first, into end of data. - for i := hi - 1; i >= 0; i-- { - data[first], data[first+i] = data[first+i], data[first] - siftDownOrdered(data, lo, i, first) - } -} - -// pdqsortOrdered sorts data[a:b]. -// The algorithm based on pattern-defeating quicksort(pdqsort), but without the optimizations from BlockQuicksort. -// pdqsort paper: https://arxiv.org/pdf/2106.05123.pdf -// C++ implementation: https://github.com/orlp/pdqsort -// Rust implementation: https://docs.rs/pdqsort/latest/pdqsort/ -// limit is the number of allowed bad (very unbalanced) pivots before falling back to heapsort. -func pdqsortOrdered[E constraints.Ordered](data []E, a, b, limit int) { - const maxInsertion = 12 - - var ( - wasBalanced = true // whether the last partitioning was reasonably balanced - wasPartitioned = true // whether the slice was already partitioned - ) - - for { - length := b - a - - if length <= maxInsertion { - insertionSortOrdered(data, a, b) - return - } - - // Fall back to heapsort if too many bad choices were made. - if limit == 0 { - heapSortOrdered(data, a, b) - return - } - - // If the last partitioning was imbalanced, we need to breaking patterns. - if !wasBalanced { - breakPatternsOrdered(data, a, b) - limit-- - } - - pivot, hint := choosePivotOrdered(data, a, b) - if hint == decreasingHint { - reverseRangeOrdered(data, a, b) - // The chosen pivot was pivot-a elements after the start of the array. - // After reversing it is pivot-a elements before the end of the array. - // The idea came from Rust's implementation. - pivot = (b - 1) - (pivot - a) - hint = increasingHint - } - - // The slice is likely already sorted. - if wasBalanced && wasPartitioned && hint == increasingHint { - if partialInsertionSortOrdered(data, a, b) { - return - } - } - - // Probably the slice contains many duplicate elements, partition the slice into - // elements equal to and elements greater than the pivot. - if a > 0 && !cmpLess(data[a-1], data[pivot]) { - mid := partitionEqualOrdered(data, a, b, pivot) - a = mid - continue - } - - mid, alreadyPartitioned := partitionOrdered(data, a, b, pivot) - wasPartitioned = alreadyPartitioned - - leftLen, rightLen := mid-a, b-mid - balanceThreshold := length / 8 - if leftLen < rightLen { - wasBalanced = leftLen >= balanceThreshold - pdqsortOrdered(data, a, mid, limit) - a = mid + 1 - } else { - wasBalanced = rightLen >= balanceThreshold - pdqsortOrdered(data, mid+1, b, limit) - b = mid - } - } -} - -// partitionOrdered does one quicksort partition. -// Let p = data[pivot] -// Moves elements in data[a:b] around, so that data[i]

=p for inewpivot. -// On return, data[newpivot] = p -func partitionOrdered[E constraints.Ordered](data []E, a, b, pivot int) (newpivot int, alreadyPartitioned bool) { - data[a], data[pivot] = data[pivot], data[a] - i, j := a+1, b-1 // i and j are inclusive of the elements remaining to be partitioned - - for i <= j && cmpLess(data[i], data[a]) { - i++ - } - for i <= j && !cmpLess(data[j], data[a]) { - j-- - } - if i > j { - data[j], data[a] = data[a], data[j] - return j, true - } - data[i], data[j] = data[j], data[i] - i++ - j-- - - for { - for i <= j && cmpLess(data[i], data[a]) { - i++ - } - for i <= j && !cmpLess(data[j], data[a]) { - j-- - } - if i > j { - break - } - data[i], data[j] = data[j], data[i] - i++ - j-- - } - data[j], data[a] = data[a], data[j] - return j, false -} - -// partitionEqualOrdered partitions data[a:b] into elements equal to data[pivot] followed by elements greater than data[pivot]. -// It assumed that data[a:b] does not contain elements smaller than the data[pivot]. -func partitionEqualOrdered[E constraints.Ordered](data []E, a, b, pivot int) (newpivot int) { - data[a], data[pivot] = data[pivot], data[a] - i, j := a+1, b-1 // i and j are inclusive of the elements remaining to be partitioned - - for { - for i <= j && !cmpLess(data[a], data[i]) { - i++ - } - for i <= j && cmpLess(data[a], data[j]) { - j-- - } - if i > j { - break - } - data[i], data[j] = data[j], data[i] - i++ - j-- - } - return i -} - -// partialInsertionSortOrdered partially sorts a slice, returns true if the slice is sorted at the end. -func partialInsertionSortOrdered[E constraints.Ordered](data []E, a, b int) bool { - const ( - maxSteps = 5 // maximum number of adjacent out-of-order pairs that will get shifted - shortestShifting = 50 // don't shift any elements on short arrays - ) - i := a + 1 - for j := 0; j < maxSteps; j++ { - for i < b && !cmpLess(data[i], data[i-1]) { - i++ - } - - if i == b { - return true - } - - if b-a < shortestShifting { - return false - } - - data[i], data[i-1] = data[i-1], data[i] - - // Shift the smaller one to the left. - if i-a >= 2 { - for j := i - 1; j >= 1; j-- { - if !cmpLess(data[j], data[j-1]) { - break - } - data[j], data[j-1] = data[j-1], data[j] - } - } - // Shift the greater one to the right. - if b-i >= 2 { - for j := i + 1; j < b; j++ { - if !cmpLess(data[j], data[j-1]) { - break - } - data[j], data[j-1] = data[j-1], data[j] - } - } - } - return false -} - -// breakPatternsOrdered scatters some elements around in an attempt to break some patterns -// that might cause imbalanced partitions in quicksort. -func breakPatternsOrdered[E constraints.Ordered](data []E, a, b int) { - length := b - a - if length >= 8 { - random := xorshift(length) - modulus := nextPowerOfTwo(length) - - for idx := a + (length/4)*2 - 1; idx <= a+(length/4)*2+1; idx++ { - other := int(uint(random.Next()) & (modulus - 1)) - if other >= length { - other -= length - } - data[idx], data[a+other] = data[a+other], data[idx] - } - } -} - -// choosePivotOrdered chooses a pivot in data[a:b]. -// -// [0,8): chooses a static pivot. -// [8,shortestNinther): uses the simple median-of-three method. -// [shortestNinther,∞): uses the Tukey ninther method. -func choosePivotOrdered[E constraints.Ordered](data []E, a, b int) (pivot int, hint sortedHint) { - const ( - shortestNinther = 50 - maxSwaps = 4 * 3 - ) - - l := b - a - - var ( - swaps int - i = a + l/4*1 - j = a + l/4*2 - k = a + l/4*3 - ) - - if l >= 8 { - if l >= shortestNinther { - // Tukey ninther method, the idea came from Rust's implementation. - i = medianAdjacentOrdered(data, i, &swaps) - j = medianAdjacentOrdered(data, j, &swaps) - k = medianAdjacentOrdered(data, k, &swaps) - } - // Find the median among i, j, k and stores it into j. - j = medianOrdered(data, i, j, k, &swaps) - } - - switch swaps { - case 0: - return j, increasingHint - case maxSwaps: - return j, decreasingHint - default: - return j, unknownHint - } -} - -// order2Ordered returns x,y where data[x] <= data[y], where x,y=a,b or x,y=b,a. -func order2Ordered[E constraints.Ordered](data []E, a, b int, swaps *int) (int, int) { - if cmpLess(data[b], data[a]) { - *swaps++ - return b, a - } - return a, b -} - -// medianOrdered returns x where data[x] is the median of data[a],data[b],data[c], where x is a, b, or c. -func medianOrdered[E constraints.Ordered](data []E, a, b, c int, swaps *int) int { - a, b = order2Ordered(data, a, b, swaps) - b, c = order2Ordered(data, b, c, swaps) - a, b = order2Ordered(data, a, b, swaps) - return b -} - -// medianAdjacentOrdered finds the median of data[a - 1], data[a], data[a + 1] and stores the index into a. -func medianAdjacentOrdered[E constraints.Ordered](data []E, a int, swaps *int) int { - return medianOrdered(data, a-1, a, a+1, swaps) -} - -func reverseRangeOrdered[E constraints.Ordered](data []E, a, b int) { - i := a - j := b - 1 - for i < j { - data[i], data[j] = data[j], data[i] - i++ - j-- - } -} - -func swapRangeOrdered[E constraints.Ordered](data []E, a, b, n int) { - for i := 0; i < n; i++ { - data[a+i], data[b+i] = data[b+i], data[a+i] - } -} - -func stableOrdered[E constraints.Ordered](data []E, n int) { - blockSize := 20 // must be > 0 - a, b := 0, blockSize - for b <= n { - insertionSortOrdered(data, a, b) - a = b - b += blockSize - } - insertionSortOrdered(data, a, n) - - for blockSize < n { - a, b = 0, 2*blockSize - for b <= n { - symMergeOrdered(data, a, a+blockSize, b) - a = b - b += 2 * blockSize - } - if m := a + blockSize; m < n { - symMergeOrdered(data, a, m, n) - } - blockSize *= 2 - } -} - -// symMergeOrdered merges the two sorted subsequences data[a:m] and data[m:b] using -// the SymMerge algorithm from Pok-Son Kim and Arne Kutzner, "Stable Minimum -// Storage Merging by Symmetric Comparisons", in Susanne Albers and Tomasz -// Radzik, editors, Algorithms - ESA 2004, volume 3221 of Lecture Notes in -// Computer Science, pages 714-723. Springer, 2004. -// -// Let M = m-a and N = b-n. Wolog M < N. -// The recursion depth is bound by ceil(log(N+M)). -// The algorithm needs O(M*log(N/M + 1)) calls to data.Less. -// The algorithm needs O((M+N)*log(M)) calls to data.Swap. -// -// The paper gives O((M+N)*log(M)) as the number of assignments assuming a -// rotation algorithm which uses O(M+N+gcd(M+N)) assignments. The argumentation -// in the paper carries through for Swap operations, especially as the block -// swapping rotate uses only O(M+N) Swaps. -// -// symMerge assumes non-degenerate arguments: a < m && m < b. -// Having the caller check this condition eliminates many leaf recursion calls, -// which improves performance. -func symMergeOrdered[E constraints.Ordered](data []E, a, m, b int) { - // Avoid unnecessary recursions of symMerge - // by direct insertion of data[a] into data[m:b] - // if data[a:m] only contains one element. - if m-a == 1 { - // Use binary search to find the lowest index i - // such that data[i] >= data[a] for m <= i < b. - // Exit the search loop with i == b in case no such index exists. - i := m - j := b - for i < j { - h := int(uint(i+j) >> 1) - if cmpLess(data[h], data[a]) { - i = h + 1 - } else { - j = h - } - } - // Swap values until data[a] reaches the position before i. - for k := a; k < i-1; k++ { - data[k], data[k+1] = data[k+1], data[k] - } - return - } - - // Avoid unnecessary recursions of symMerge - // by direct insertion of data[m] into data[a:m] - // if data[m:b] only contains one element. - if b-m == 1 { - // Use binary search to find the lowest index i - // such that data[i] > data[m] for a <= i < m. - // Exit the search loop with i == m in case no such index exists. - i := a - j := m - for i < j { - h := int(uint(i+j) >> 1) - if !cmpLess(data[m], data[h]) { - i = h + 1 - } else { - j = h - } - } - // Swap values until data[m] reaches the position i. - for k := m; k > i; k-- { - data[k], data[k-1] = data[k-1], data[k] - } - return - } - - mid := int(uint(a+b) >> 1) - n := mid + m - var start, r int - if m > mid { - start = n - b - r = mid - } else { - start = a - r = m - } - p := n - 1 - - for start < r { - c := int(uint(start+r) >> 1) - if !cmpLess(data[p-c], data[c]) { - start = c + 1 - } else { - r = c - } - } - - end := n - start - if start < m && m < end { - rotateOrdered(data, start, m, end) - } - if a < start && start < mid { - symMergeOrdered(data, a, start, mid) - } - if mid < end && end < b { - symMergeOrdered(data, mid, end, b) - } -} - -// rotateOrdered rotates two consecutive blocks u = data[a:m] and v = data[m:b] in data: -// Data of the form 'x u v y' is changed to 'x v u y'. -// rotate performs at most b-a many calls to data.Swap, -// and it assumes non-degenerate arguments: a < m && m < b. -func rotateOrdered[E constraints.Ordered](data []E, a, m, b int) { - i := m - a - j := b - m - - for i != j { - if i > j { - swapRangeOrdered(data, m-i, m, j) - i -= j - } else { - swapRangeOrdered(data, m-i, m+j-i, i) - j -= i - } - } - // i == j - swapRangeOrdered(data, m-i, m, i) -} diff --git a/vendor/golang.org/x/exp/slog/attr.go b/vendor/golang.org/x/exp/slog/attr.go deleted file mode 100644 index a180d0e1d..000000000 --- a/vendor/golang.org/x/exp/slog/attr.go +++ /dev/null @@ -1,102 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package slog - -import ( - "fmt" - "time" -) - -// An Attr is a key-value pair. -type Attr struct { - Key string - Value Value -} - -// String returns an Attr for a string value. -func String(key, value string) Attr { - return Attr{key, StringValue(value)} -} - -// Int64 returns an Attr for an int64. -func Int64(key string, value int64) Attr { - return Attr{key, Int64Value(value)} -} - -// Int converts an int to an int64 and returns -// an Attr with that value. -func Int(key string, value int) Attr { - return Int64(key, int64(value)) -} - -// Uint64 returns an Attr for a uint64. -func Uint64(key string, v uint64) Attr { - return Attr{key, Uint64Value(v)} -} - -// Float64 returns an Attr for a floating-point number. -func Float64(key string, v float64) Attr { - return Attr{key, Float64Value(v)} -} - -// Bool returns an Attr for a bool. -func Bool(key string, v bool) Attr { - return Attr{key, BoolValue(v)} -} - -// Time returns an Attr for a time.Time. -// It discards the monotonic portion. -func Time(key string, v time.Time) Attr { - return Attr{key, TimeValue(v)} -} - -// Duration returns an Attr for a time.Duration. -func Duration(key string, v time.Duration) Attr { - return Attr{key, DurationValue(v)} -} - -// Group returns an Attr for a Group Value. -// The first argument is the key; the remaining arguments -// are converted to Attrs as in [Logger.Log]. -// -// Use Group to collect several key-value pairs under a single -// key on a log line, or as the result of LogValue -// in order to log a single value as multiple Attrs. -func Group(key string, args ...any) Attr { - return Attr{key, GroupValue(argsToAttrSlice(args)...)} -} - -func argsToAttrSlice(args []any) []Attr { - var ( - attr Attr - attrs []Attr - ) - for len(args) > 0 { - attr, args = argsToAttr(args) - attrs = append(attrs, attr) - } - return attrs -} - -// Any returns an Attr for the supplied value. -// See [Value.AnyValue] for how values are treated. -func Any(key string, value any) Attr { - return Attr{key, AnyValue(value)} -} - -// Equal reports whether a and b have equal keys and values. -func (a Attr) Equal(b Attr) bool { - return a.Key == b.Key && a.Value.Equal(b.Value) -} - -func (a Attr) String() string { - return fmt.Sprintf("%s=%s", a.Key, a.Value) -} - -// isEmpty reports whether a has an empty key and a nil value. -// That can be written as Attr{} or Any("", nil). -func (a Attr) isEmpty() bool { - return a.Key == "" && a.Value.num == 0 && a.Value.any == nil -} diff --git a/vendor/golang.org/x/exp/slog/doc.go b/vendor/golang.org/x/exp/slog/doc.go deleted file mode 100644 index 4beaf8674..000000000 --- a/vendor/golang.org/x/exp/slog/doc.go +++ /dev/null @@ -1,316 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -/* -Package slog provides structured logging, -in which log records include a message, -a severity level, and various other attributes -expressed as key-value pairs. - -It defines a type, [Logger], -which provides several methods (such as [Logger.Info] and [Logger.Error]) -for reporting events of interest. - -Each Logger is associated with a [Handler]. -A Logger output method creates a [Record] from the method arguments -and passes it to the Handler, which decides how to handle it. -There is a default Logger accessible through top-level functions -(such as [Info] and [Error]) that call the corresponding Logger methods. - -A log record consists of a time, a level, a message, and a set of key-value -pairs, where the keys are strings and the values may be of any type. -As an example, - - slog.Info("hello", "count", 3) - -creates a record containing the time of the call, -a level of Info, the message "hello", and a single -pair with key "count" and value 3. - -The [Info] top-level function calls the [Logger.Info] method on the default Logger. -In addition to [Logger.Info], there are methods for Debug, Warn and Error levels. -Besides these convenience methods for common levels, -there is also a [Logger.Log] method which takes the level as an argument. -Each of these methods has a corresponding top-level function that uses the -default logger. - -The default handler formats the log record's message, time, level, and attributes -as a string and passes it to the [log] package. - - 2022/11/08 15:28:26 INFO hello count=3 - -For more control over the output format, create a logger with a different handler. -This statement uses [New] to create a new logger with a TextHandler -that writes structured records in text form to standard error: - - logger := slog.New(slog.NewTextHandler(os.Stderr, nil)) - -[TextHandler] output is a sequence of key=value pairs, easily and unambiguously -parsed by machine. This statement: - - logger.Info("hello", "count", 3) - -produces this output: - - time=2022-11-08T15:28:26.000-05:00 level=INFO msg=hello count=3 - -The package also provides [JSONHandler], whose output is line-delimited JSON: - - logger := slog.New(slog.NewJSONHandler(os.Stdout, nil)) - logger.Info("hello", "count", 3) - -produces this output: - - {"time":"2022-11-08T15:28:26.000000000-05:00","level":"INFO","msg":"hello","count":3} - -Both [TextHandler] and [JSONHandler] can be configured with [HandlerOptions]. -There are options for setting the minimum level (see Levels, below), -displaying the source file and line of the log call, and -modifying attributes before they are logged. - -Setting a logger as the default with - - slog.SetDefault(logger) - -will cause the top-level functions like [Info] to use it. -[SetDefault] also updates the default logger used by the [log] package, -so that existing applications that use [log.Printf] and related functions -will send log records to the logger's handler without needing to be rewritten. - -Some attributes are common to many log calls. -For example, you may wish to include the URL or trace identifier of a server request -with all log events arising from the request. -Rather than repeat the attribute with every log call, you can use [Logger.With] -to construct a new Logger containing the attributes: - - logger2 := logger.With("url", r.URL) - -The arguments to With are the same key-value pairs used in [Logger.Info]. -The result is a new Logger with the same handler as the original, but additional -attributes that will appear in the output of every call. - -# Levels - -A [Level] is an integer representing the importance or severity of a log event. -The higher the level, the more severe the event. -This package defines constants for the most common levels, -but any int can be used as a level. - -In an application, you may wish to log messages only at a certain level or greater. -One common configuration is to log messages at Info or higher levels, -suppressing debug logging until it is needed. -The built-in handlers can be configured with the minimum level to output by -setting [HandlerOptions.Level]. -The program's `main` function typically does this. -The default value is LevelInfo. - -Setting the [HandlerOptions.Level] field to a [Level] value -fixes the handler's minimum level throughout its lifetime. -Setting it to a [LevelVar] allows the level to be varied dynamically. -A LevelVar holds a Level and is safe to read or write from multiple -goroutines. -To vary the level dynamically for an entire program, first initialize -a global LevelVar: - - var programLevel = new(slog.LevelVar) // Info by default - -Then use the LevelVar to construct a handler, and make it the default: - - h := slog.NewJSONHandler(os.Stderr, &slog.HandlerOptions{Level: programLevel}) - slog.SetDefault(slog.New(h)) - -Now the program can change its logging level with a single statement: - - programLevel.Set(slog.LevelDebug) - -# Groups - -Attributes can be collected into groups. -A group has a name that is used to qualify the names of its attributes. -How this qualification is displayed depends on the handler. -[TextHandler] separates the group and attribute names with a dot. -[JSONHandler] treats each group as a separate JSON object, with the group name as the key. - -Use [Group] to create a Group attribute from a name and a list of key-value pairs: - - slog.Group("request", - "method", r.Method, - "url", r.URL) - -TextHandler would display this group as - - request.method=GET request.url=http://example.com - -JSONHandler would display it as - - "request":{"method":"GET","url":"http://example.com"} - -Use [Logger.WithGroup] to qualify all of a Logger's output -with a group name. Calling WithGroup on a Logger results in a -new Logger with the same Handler as the original, but with all -its attributes qualified by the group name. - -This can help prevent duplicate attribute keys in large systems, -where subsystems might use the same keys. -Pass each subsystem a different Logger with its own group name so that -potential duplicates are qualified: - - logger := slog.Default().With("id", systemID) - parserLogger := logger.WithGroup("parser") - parseInput(input, parserLogger) - -When parseInput logs with parserLogger, its keys will be qualified with "parser", -so even if it uses the common key "id", the log line will have distinct keys. - -# Contexts - -Some handlers may wish to include information from the [context.Context] that is -available at the call site. One example of such information -is the identifier for the current span when tracing is enabled. - -The [Logger.Log] and [Logger.LogAttrs] methods take a context as a first -argument, as do their corresponding top-level functions. - -Although the convenience methods on Logger (Info and so on) and the -corresponding top-level functions do not take a context, the alternatives ending -in "Context" do. For example, - - slog.InfoContext(ctx, "message") - -It is recommended to pass a context to an output method if one is available. - -# Attrs and Values - -An [Attr] is a key-value pair. The Logger output methods accept Attrs as well as -alternating keys and values. The statement - - slog.Info("hello", slog.Int("count", 3)) - -behaves the same as - - slog.Info("hello", "count", 3) - -There are convenience constructors for [Attr] such as [Int], [String], and [Bool] -for common types, as well as the function [Any] for constructing Attrs of any -type. - -The value part of an Attr is a type called [Value]. -Like an [any], a Value can hold any Go value, -but it can represent typical values, including all numbers and strings, -without an allocation. - -For the most efficient log output, use [Logger.LogAttrs]. -It is similar to [Logger.Log] but accepts only Attrs, not alternating -keys and values; this allows it, too, to avoid allocation. - -The call - - logger.LogAttrs(nil, slog.LevelInfo, "hello", slog.Int("count", 3)) - -is the most efficient way to achieve the same output as - - slog.Info("hello", "count", 3) - -# Customizing a type's logging behavior - -If a type implements the [LogValuer] interface, the [Value] returned from its LogValue -method is used for logging. You can use this to control how values of the type -appear in logs. For example, you can redact secret information like passwords, -or gather a struct's fields in a Group. See the examples under [LogValuer] for -details. - -A LogValue method may return a Value that itself implements [LogValuer]. The [Value.Resolve] -method handles these cases carefully, avoiding infinite loops and unbounded recursion. -Handler authors and others may wish to use Value.Resolve instead of calling LogValue directly. - -# Wrapping output methods - -The logger functions use reflection over the call stack to find the file name -and line number of the logging call within the application. This can produce -incorrect source information for functions that wrap slog. For instance, if you -define this function in file mylog.go: - - func Infof(format string, args ...any) { - slog.Default().Info(fmt.Sprintf(format, args...)) - } - -and you call it like this in main.go: - - Infof(slog.Default(), "hello, %s", "world") - -then slog will report the source file as mylog.go, not main.go. - -A correct implementation of Infof will obtain the source location -(pc) and pass it to NewRecord. -The Infof function in the package-level example called "wrapping" -demonstrates how to do this. - -# Working with Records - -Sometimes a Handler will need to modify a Record -before passing it on to another Handler or backend. -A Record contains a mixture of simple public fields (e.g. Time, Level, Message) -and hidden fields that refer to state (such as attributes) indirectly. This -means that modifying a simple copy of a Record (e.g. by calling -[Record.Add] or [Record.AddAttrs] to add attributes) -may have unexpected effects on the original. -Before modifying a Record, use [Clone] to -create a copy that shares no state with the original, -or create a new Record with [NewRecord] -and build up its Attrs by traversing the old ones with [Record.Attrs]. - -# Performance considerations - -If profiling your application demonstrates that logging is taking significant time, -the following suggestions may help. - -If many log lines have a common attribute, use [Logger.With] to create a Logger with -that attribute. The built-in handlers will format that attribute only once, at the -call to [Logger.With]. The [Handler] interface is designed to allow that optimization, -and a well-written Handler should take advantage of it. - -The arguments to a log call are always evaluated, even if the log event is discarded. -If possible, defer computation so that it happens only if the value is actually logged. -For example, consider the call - - slog.Info("starting request", "url", r.URL.String()) // may compute String unnecessarily - -The URL.String method will be called even if the logger discards Info-level events. -Instead, pass the URL directly: - - slog.Info("starting request", "url", &r.URL) // calls URL.String only if needed - -The built-in [TextHandler] will call its String method, but only -if the log event is enabled. -Avoiding the call to String also preserves the structure of the underlying value. -For example [JSONHandler] emits the components of the parsed URL as a JSON object. -If you want to avoid eagerly paying the cost of the String call -without causing the handler to potentially inspect the structure of the value, -wrap the value in a fmt.Stringer implementation that hides its Marshal methods. - -You can also use the [LogValuer] interface to avoid unnecessary work in disabled log -calls. Say you need to log some expensive value: - - slog.Debug("frobbing", "value", computeExpensiveValue(arg)) - -Even if this line is disabled, computeExpensiveValue will be called. -To avoid that, define a type implementing LogValuer: - - type expensive struct { arg int } - - func (e expensive) LogValue() slog.Value { - return slog.AnyValue(computeExpensiveValue(e.arg)) - } - -Then use a value of that type in log calls: - - slog.Debug("frobbing", "value", expensive{arg}) - -Now computeExpensiveValue will only be called when the line is enabled. - -The built-in handlers acquire a lock before calling [io.Writer.Write] -to ensure that each record is written in one piece. User-defined -handlers are responsible for their own locking. -*/ -package slog diff --git a/vendor/golang.org/x/exp/slog/handler.go b/vendor/golang.org/x/exp/slog/handler.go deleted file mode 100644 index bd635cb81..000000000 --- a/vendor/golang.org/x/exp/slog/handler.go +++ /dev/null @@ -1,577 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package slog - -import ( - "context" - "fmt" - "io" - "reflect" - "strconv" - "sync" - "time" - - "golang.org/x/exp/slices" - "golang.org/x/exp/slog/internal/buffer" -) - -// A Handler handles log records produced by a Logger.. -// -// A typical handler may print log records to standard error, -// or write them to a file or database, or perhaps augment them -// with additional attributes and pass them on to another handler. -// -// Any of the Handler's methods may be called concurrently with itself -// or with other methods. It is the responsibility of the Handler to -// manage this concurrency. -// -// Users of the slog package should not invoke Handler methods directly. -// They should use the methods of [Logger] instead. -type Handler interface { - // Enabled reports whether the handler handles records at the given level. - // The handler ignores records whose level is lower. - // It is called early, before any arguments are processed, - // to save effort if the log event should be discarded. - // If called from a Logger method, the first argument is the context - // passed to that method, or context.Background() if nil was passed - // or the method does not take a context. - // The context is passed so Enabled can use its values - // to make a decision. - Enabled(context.Context, Level) bool - - // Handle handles the Record. - // It will only be called when Enabled returns true. - // The Context argument is as for Enabled. - // It is present solely to provide Handlers access to the context's values. - // Canceling the context should not affect record processing. - // (Among other things, log messages may be necessary to debug a - // cancellation-related problem.) - // - // Handle methods that produce output should observe the following rules: - // - If r.Time is the zero time, ignore the time. - // - If r.PC is zero, ignore it. - // - Attr's values should be resolved. - // - If an Attr's key and value are both the zero value, ignore the Attr. - // This can be tested with attr.Equal(Attr{}). - // - If a group's key is empty, inline the group's Attrs. - // - If a group has no Attrs (even if it has a non-empty key), - // ignore it. - Handle(context.Context, Record) error - - // WithAttrs returns a new Handler whose attributes consist of - // both the receiver's attributes and the arguments. - // The Handler owns the slice: it may retain, modify or discard it. - WithAttrs(attrs []Attr) Handler - - // WithGroup returns a new Handler with the given group appended to - // the receiver's existing groups. - // The keys of all subsequent attributes, whether added by With or in a - // Record, should be qualified by the sequence of group names. - // - // How this qualification happens is up to the Handler, so long as - // this Handler's attribute keys differ from those of another Handler - // with a different sequence of group names. - // - // A Handler should treat WithGroup as starting a Group of Attrs that ends - // at the end of the log event. That is, - // - // logger.WithGroup("s").LogAttrs(level, msg, slog.Int("a", 1), slog.Int("b", 2)) - // - // should behave like - // - // logger.LogAttrs(level, msg, slog.Group("s", slog.Int("a", 1), slog.Int("b", 2))) - // - // If the name is empty, WithGroup returns the receiver. - WithGroup(name string) Handler -} - -type defaultHandler struct { - ch *commonHandler - // log.Output, except for testing - output func(calldepth int, message string) error -} - -func newDefaultHandler(output func(int, string) error) *defaultHandler { - return &defaultHandler{ - ch: &commonHandler{json: false}, - output: output, - } -} - -func (*defaultHandler) Enabled(_ context.Context, l Level) bool { - return l >= LevelInfo -} - -// Collect the level, attributes and message in a string and -// write it with the default log.Logger. -// Let the log.Logger handle time and file/line. -func (h *defaultHandler) Handle(ctx context.Context, r Record) error { - buf := buffer.New() - buf.WriteString(r.Level.String()) - buf.WriteByte(' ') - buf.WriteString(r.Message) - state := h.ch.newHandleState(buf, true, " ", nil) - defer state.free() - state.appendNonBuiltIns(r) - - // skip [h.output, defaultHandler.Handle, handlerWriter.Write, log.Output] - return h.output(4, buf.String()) -} - -func (h *defaultHandler) WithAttrs(as []Attr) Handler { - return &defaultHandler{h.ch.withAttrs(as), h.output} -} - -func (h *defaultHandler) WithGroup(name string) Handler { - return &defaultHandler{h.ch.withGroup(name), h.output} -} - -// HandlerOptions are options for a TextHandler or JSONHandler. -// A zero HandlerOptions consists entirely of default values. -type HandlerOptions struct { - // AddSource causes the handler to compute the source code position - // of the log statement and add a SourceKey attribute to the output. - AddSource bool - - // Level reports the minimum record level that will be logged. - // The handler discards records with lower levels. - // If Level is nil, the handler assumes LevelInfo. - // The handler calls Level.Level for each record processed; - // to adjust the minimum level dynamically, use a LevelVar. - Level Leveler - - // ReplaceAttr is called to rewrite each non-group attribute before it is logged. - // The attribute's value has been resolved (see [Value.Resolve]). - // If ReplaceAttr returns an Attr with Key == "", the attribute is discarded. - // - // The built-in attributes with keys "time", "level", "source", and "msg" - // are passed to this function, except that time is omitted - // if zero, and source is omitted if AddSource is false. - // - // The first argument is a list of currently open groups that contain the - // Attr. It must not be retained or modified. ReplaceAttr is never called - // for Group attributes, only their contents. For example, the attribute - // list - // - // Int("a", 1), Group("g", Int("b", 2)), Int("c", 3) - // - // results in consecutive calls to ReplaceAttr with the following arguments: - // - // nil, Int("a", 1) - // []string{"g"}, Int("b", 2) - // nil, Int("c", 3) - // - // ReplaceAttr can be used to change the default keys of the built-in - // attributes, convert types (for example, to replace a `time.Time` with the - // integer seconds since the Unix epoch), sanitize personal information, or - // remove attributes from the output. - ReplaceAttr func(groups []string, a Attr) Attr -} - -// Keys for "built-in" attributes. -const ( - // TimeKey is the key used by the built-in handlers for the time - // when the log method is called. The associated Value is a [time.Time]. - TimeKey = "time" - // LevelKey is the key used by the built-in handlers for the level - // of the log call. The associated value is a [Level]. - LevelKey = "level" - // MessageKey is the key used by the built-in handlers for the - // message of the log call. The associated value is a string. - MessageKey = "msg" - // SourceKey is the key used by the built-in handlers for the source file - // and line of the log call. The associated value is a string. - SourceKey = "source" -) - -type commonHandler struct { - json bool // true => output JSON; false => output text - opts HandlerOptions - preformattedAttrs []byte - groupPrefix string // for text: prefix of groups opened in preformatting - groups []string // all groups started from WithGroup - nOpenGroups int // the number of groups opened in preformattedAttrs - mu sync.Mutex - w io.Writer -} - -func (h *commonHandler) clone() *commonHandler { - // We can't use assignment because we can't copy the mutex. - return &commonHandler{ - json: h.json, - opts: h.opts, - preformattedAttrs: slices.Clip(h.preformattedAttrs), - groupPrefix: h.groupPrefix, - groups: slices.Clip(h.groups), - nOpenGroups: h.nOpenGroups, - w: h.w, - } -} - -// enabled reports whether l is greater than or equal to the -// minimum level. -func (h *commonHandler) enabled(l Level) bool { - minLevel := LevelInfo - if h.opts.Level != nil { - minLevel = h.opts.Level.Level() - } - return l >= minLevel -} - -func (h *commonHandler) withAttrs(as []Attr) *commonHandler { - h2 := h.clone() - // Pre-format the attributes as an optimization. - prefix := buffer.New() - defer prefix.Free() - prefix.WriteString(h.groupPrefix) - state := h2.newHandleState((*buffer.Buffer)(&h2.preformattedAttrs), false, "", prefix) - defer state.free() - if len(h2.preformattedAttrs) > 0 { - state.sep = h.attrSep() - } - state.openGroups() - for _, a := range as { - state.appendAttr(a) - } - // Remember the new prefix for later keys. - h2.groupPrefix = state.prefix.String() - // Remember how many opened groups are in preformattedAttrs, - // so we don't open them again when we handle a Record. - h2.nOpenGroups = len(h2.groups) - return h2 -} - -func (h *commonHandler) withGroup(name string) *commonHandler { - if name == "" { - return h - } - h2 := h.clone() - h2.groups = append(h2.groups, name) - return h2 -} - -func (h *commonHandler) handle(r Record) error { - state := h.newHandleState(buffer.New(), true, "", nil) - defer state.free() - if h.json { - state.buf.WriteByte('{') - } - // Built-in attributes. They are not in a group. - stateGroups := state.groups - state.groups = nil // So ReplaceAttrs sees no groups instead of the pre groups. - rep := h.opts.ReplaceAttr - // time - if !r.Time.IsZero() { - key := TimeKey - val := r.Time.Round(0) // strip monotonic to match Attr behavior - if rep == nil { - state.appendKey(key) - state.appendTime(val) - } else { - state.appendAttr(Time(key, val)) - } - } - // level - key := LevelKey - val := r.Level - if rep == nil { - state.appendKey(key) - state.appendString(val.String()) - } else { - state.appendAttr(Any(key, val)) - } - // source - if h.opts.AddSource { - state.appendAttr(Any(SourceKey, r.source())) - } - key = MessageKey - msg := r.Message - if rep == nil { - state.appendKey(key) - state.appendString(msg) - } else { - state.appendAttr(String(key, msg)) - } - state.groups = stateGroups // Restore groups passed to ReplaceAttrs. - state.appendNonBuiltIns(r) - state.buf.WriteByte('\n') - - h.mu.Lock() - defer h.mu.Unlock() - _, err := h.w.Write(*state.buf) - return err -} - -func (s *handleState) appendNonBuiltIns(r Record) { - // preformatted Attrs - if len(s.h.preformattedAttrs) > 0 { - s.buf.WriteString(s.sep) - s.buf.Write(s.h.preformattedAttrs) - s.sep = s.h.attrSep() - } - // Attrs in Record -- unlike the built-in ones, they are in groups started - // from WithGroup. - s.prefix = buffer.New() - defer s.prefix.Free() - s.prefix.WriteString(s.h.groupPrefix) - s.openGroups() - r.Attrs(func(a Attr) bool { - s.appendAttr(a) - return true - }) - if s.h.json { - // Close all open groups. - for range s.h.groups { - s.buf.WriteByte('}') - } - // Close the top-level object. - s.buf.WriteByte('}') - } -} - -// attrSep returns the separator between attributes. -func (h *commonHandler) attrSep() string { - if h.json { - return "," - } - return " " -} - -// handleState holds state for a single call to commonHandler.handle. -// The initial value of sep determines whether to emit a separator -// before the next key, after which it stays true. -type handleState struct { - h *commonHandler - buf *buffer.Buffer - freeBuf bool // should buf be freed? - sep string // separator to write before next key - prefix *buffer.Buffer // for text: key prefix - groups *[]string // pool-allocated slice of active groups, for ReplaceAttr -} - -var groupPool = sync.Pool{New: func() any { - s := make([]string, 0, 10) - return &s -}} - -func (h *commonHandler) newHandleState(buf *buffer.Buffer, freeBuf bool, sep string, prefix *buffer.Buffer) handleState { - s := handleState{ - h: h, - buf: buf, - freeBuf: freeBuf, - sep: sep, - prefix: prefix, - } - if h.opts.ReplaceAttr != nil { - s.groups = groupPool.Get().(*[]string) - *s.groups = append(*s.groups, h.groups[:h.nOpenGroups]...) - } - return s -} - -func (s *handleState) free() { - if s.freeBuf { - s.buf.Free() - } - if gs := s.groups; gs != nil { - *gs = (*gs)[:0] - groupPool.Put(gs) - } -} - -func (s *handleState) openGroups() { - for _, n := range s.h.groups[s.h.nOpenGroups:] { - s.openGroup(n) - } -} - -// Separator for group names and keys. -const keyComponentSep = '.' - -// openGroup starts a new group of attributes -// with the given name. -func (s *handleState) openGroup(name string) { - if s.h.json { - s.appendKey(name) - s.buf.WriteByte('{') - s.sep = "" - } else { - s.prefix.WriteString(name) - s.prefix.WriteByte(keyComponentSep) - } - // Collect group names for ReplaceAttr. - if s.groups != nil { - *s.groups = append(*s.groups, name) - } -} - -// closeGroup ends the group with the given name. -func (s *handleState) closeGroup(name string) { - if s.h.json { - s.buf.WriteByte('}') - } else { - (*s.prefix) = (*s.prefix)[:len(*s.prefix)-len(name)-1 /* for keyComponentSep */] - } - s.sep = s.h.attrSep() - if s.groups != nil { - *s.groups = (*s.groups)[:len(*s.groups)-1] - } -} - -// appendAttr appends the Attr's key and value using app. -// It handles replacement and checking for an empty key. -// after replacement). -func (s *handleState) appendAttr(a Attr) { - if rep := s.h.opts.ReplaceAttr; rep != nil && a.Value.Kind() != KindGroup { - var gs []string - if s.groups != nil { - gs = *s.groups - } - // Resolve before calling ReplaceAttr, so the user doesn't have to. - a.Value = a.Value.Resolve() - a = rep(gs, a) - } - a.Value = a.Value.Resolve() - // Elide empty Attrs. - if a.isEmpty() { - return - } - // Special case: Source. - if v := a.Value; v.Kind() == KindAny { - if src, ok := v.Any().(*Source); ok { - if s.h.json { - a.Value = src.group() - } else { - a.Value = StringValue(fmt.Sprintf("%s:%d", src.File, src.Line)) - } - } - } - if a.Value.Kind() == KindGroup { - attrs := a.Value.Group() - // Output only non-empty groups. - if len(attrs) > 0 { - // Inline a group with an empty key. - if a.Key != "" { - s.openGroup(a.Key) - } - for _, aa := range attrs { - s.appendAttr(aa) - } - if a.Key != "" { - s.closeGroup(a.Key) - } - } - } else { - s.appendKey(a.Key) - s.appendValue(a.Value) - } -} - -func (s *handleState) appendError(err error) { - s.appendString(fmt.Sprintf("!ERROR:%v", err)) -} - -func (s *handleState) appendKey(key string) { - s.buf.WriteString(s.sep) - if s.prefix != nil { - // TODO: optimize by avoiding allocation. - s.appendString(string(*s.prefix) + key) - } else { - s.appendString(key) - } - if s.h.json { - s.buf.WriteByte(':') - } else { - s.buf.WriteByte('=') - } - s.sep = s.h.attrSep() -} - -func (s *handleState) appendString(str string) { - if s.h.json { - s.buf.WriteByte('"') - *s.buf = appendEscapedJSONString(*s.buf, str) - s.buf.WriteByte('"') - } else { - // text - if needsQuoting(str) { - *s.buf = strconv.AppendQuote(*s.buf, str) - } else { - s.buf.WriteString(str) - } - } -} - -func (s *handleState) appendValue(v Value) { - defer func() { - if r := recover(); r != nil { - // If it panics with a nil pointer, the most likely cases are - // an encoding.TextMarshaler or error fails to guard against nil, - // in which case "" seems to be the feasible choice. - // - // Adapted from the code in fmt/print.go. - if v := reflect.ValueOf(v.any); v.Kind() == reflect.Pointer && v.IsNil() { - s.appendString("") - return - } - - // Otherwise just print the original panic message. - s.appendString(fmt.Sprintf("!PANIC: %v", r)) - } - }() - - var err error - if s.h.json { - err = appendJSONValue(s, v) - } else { - err = appendTextValue(s, v) - } - if err != nil { - s.appendError(err) - } -} - -func (s *handleState) appendTime(t time.Time) { - if s.h.json { - appendJSONTime(s, t) - } else { - writeTimeRFC3339Millis(s.buf, t) - } -} - -// This takes half the time of Time.AppendFormat. -func writeTimeRFC3339Millis(buf *buffer.Buffer, t time.Time) { - year, month, day := t.Date() - buf.WritePosIntWidth(year, 4) - buf.WriteByte('-') - buf.WritePosIntWidth(int(month), 2) - buf.WriteByte('-') - buf.WritePosIntWidth(day, 2) - buf.WriteByte('T') - hour, min, sec := t.Clock() - buf.WritePosIntWidth(hour, 2) - buf.WriteByte(':') - buf.WritePosIntWidth(min, 2) - buf.WriteByte(':') - buf.WritePosIntWidth(sec, 2) - ns := t.Nanosecond() - buf.WriteByte('.') - buf.WritePosIntWidth(ns/1e6, 3) - _, offsetSeconds := t.Zone() - if offsetSeconds == 0 { - buf.WriteByte('Z') - } else { - offsetMinutes := offsetSeconds / 60 - if offsetMinutes < 0 { - buf.WriteByte('-') - offsetMinutes = -offsetMinutes - } else { - buf.WriteByte('+') - } - buf.WritePosIntWidth(offsetMinutes/60, 2) - buf.WriteByte(':') - buf.WritePosIntWidth(offsetMinutes%60, 2) - } -} diff --git a/vendor/golang.org/x/exp/slog/internal/buffer/buffer.go b/vendor/golang.org/x/exp/slog/internal/buffer/buffer.go deleted file mode 100644 index 7786c166e..000000000 --- a/vendor/golang.org/x/exp/slog/internal/buffer/buffer.go +++ /dev/null @@ -1,84 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package buffer provides a pool-allocated byte buffer. -package buffer - -import ( - "sync" -) - -// Buffer adapted from go/src/fmt/print.go -type Buffer []byte - -// Having an initial size gives a dramatic speedup. -var bufPool = sync.Pool{ - New: func() any { - b := make([]byte, 0, 1024) - return (*Buffer)(&b) - }, -} - -func New() *Buffer { - return bufPool.Get().(*Buffer) -} - -func (b *Buffer) Free() { - // To reduce peak allocation, return only smaller buffers to the pool. - const maxBufferSize = 16 << 10 - if cap(*b) <= maxBufferSize { - *b = (*b)[:0] - bufPool.Put(b) - } -} - -func (b *Buffer) Reset() { - *b = (*b)[:0] -} - -func (b *Buffer) Write(p []byte) (int, error) { - *b = append(*b, p...) - return len(p), nil -} - -func (b *Buffer) WriteString(s string) { - *b = append(*b, s...) -} - -func (b *Buffer) WriteByte(c byte) { - *b = append(*b, c) -} - -func (b *Buffer) WritePosInt(i int) { - b.WritePosIntWidth(i, 0) -} - -// WritePosIntWidth writes non-negative integer i to the buffer, padded on the left -// by zeroes to the given width. Use a width of 0 to omit padding. -func (b *Buffer) WritePosIntWidth(i, width int) { - // Cheap integer to fixed-width decimal ASCII. - // Copied from log/log.go. - - if i < 0 { - panic("negative int") - } - - // Assemble decimal in reverse order. - var bb [20]byte - bp := len(bb) - 1 - for i >= 10 || width > 1 { - width-- - q := i / 10 - bb[bp] = byte('0' + i - q*10) - bp-- - i = q - } - // i < 10 - bb[bp] = byte('0' + i) - b.Write(bb[bp:]) -} - -func (b *Buffer) String() string { - return string(*b) -} diff --git a/vendor/golang.org/x/exp/slog/internal/ignorepc.go b/vendor/golang.org/x/exp/slog/internal/ignorepc.go deleted file mode 100644 index d1256426f..000000000 --- a/vendor/golang.org/x/exp/slog/internal/ignorepc.go +++ /dev/null @@ -1,9 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package internal - -// If IgnorePC is true, do not invoke runtime.Callers to get the pc. -// This is solely for benchmarking the slowdown from runtime.Callers. -var IgnorePC = false diff --git a/vendor/golang.org/x/exp/slog/json_handler.go b/vendor/golang.org/x/exp/slog/json_handler.go deleted file mode 100644 index 157ada869..000000000 --- a/vendor/golang.org/x/exp/slog/json_handler.go +++ /dev/null @@ -1,336 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package slog - -import ( - "bytes" - "context" - "encoding/json" - "errors" - "fmt" - "io" - "strconv" - "time" - "unicode/utf8" - - "golang.org/x/exp/slog/internal/buffer" -) - -// JSONHandler is a Handler that writes Records to an io.Writer as -// line-delimited JSON objects. -type JSONHandler struct { - *commonHandler -} - -// NewJSONHandler creates a JSONHandler that writes to w, -// using the given options. -// If opts is nil, the default options are used. -func NewJSONHandler(w io.Writer, opts *HandlerOptions) *JSONHandler { - if opts == nil { - opts = &HandlerOptions{} - } - return &JSONHandler{ - &commonHandler{ - json: true, - w: w, - opts: *opts, - }, - } -} - -// Enabled reports whether the handler handles records at the given level. -// The handler ignores records whose level is lower. -func (h *JSONHandler) Enabled(_ context.Context, level Level) bool { - return h.commonHandler.enabled(level) -} - -// WithAttrs returns a new JSONHandler whose attributes consists -// of h's attributes followed by attrs. -func (h *JSONHandler) WithAttrs(attrs []Attr) Handler { - return &JSONHandler{commonHandler: h.commonHandler.withAttrs(attrs)} -} - -func (h *JSONHandler) WithGroup(name string) Handler { - return &JSONHandler{commonHandler: h.commonHandler.withGroup(name)} -} - -// Handle formats its argument Record as a JSON object on a single line. -// -// If the Record's time is zero, the time is omitted. -// Otherwise, the key is "time" -// and the value is output as with json.Marshal. -// -// If the Record's level is zero, the level is omitted. -// Otherwise, the key is "level" -// and the value of [Level.String] is output. -// -// If the AddSource option is set and source information is available, -// the key is "source" -// and the value is output as "FILE:LINE". -// -// The message's key is "msg". -// -// To modify these or other attributes, or remove them from the output, use -// [HandlerOptions.ReplaceAttr]. -// -// Values are formatted as with an [encoding/json.Encoder] with SetEscapeHTML(false), -// with two exceptions. -// -// First, an Attr whose Value is of type error is formatted as a string, by -// calling its Error method. Only errors in Attrs receive this special treatment, -// not errors embedded in structs, slices, maps or other data structures that -// are processed by the encoding/json package. -// -// Second, an encoding failure does not cause Handle to return an error. -// Instead, the error message is formatted as a string. -// -// Each call to Handle results in a single serialized call to io.Writer.Write. -func (h *JSONHandler) Handle(_ context.Context, r Record) error { - return h.commonHandler.handle(r) -} - -// Adapted from time.Time.MarshalJSON to avoid allocation. -func appendJSONTime(s *handleState, t time.Time) { - if y := t.Year(); y < 0 || y >= 10000 { - // RFC 3339 is clear that years are 4 digits exactly. - // See golang.org/issue/4556#c15 for more discussion. - s.appendError(errors.New("time.Time year outside of range [0,9999]")) - } - s.buf.WriteByte('"') - *s.buf = t.AppendFormat(*s.buf, time.RFC3339Nano) - s.buf.WriteByte('"') -} - -func appendJSONValue(s *handleState, v Value) error { - switch v.Kind() { - case KindString: - s.appendString(v.str()) - case KindInt64: - *s.buf = strconv.AppendInt(*s.buf, v.Int64(), 10) - case KindUint64: - *s.buf = strconv.AppendUint(*s.buf, v.Uint64(), 10) - case KindFloat64: - // json.Marshal is funny about floats; it doesn't - // always match strconv.AppendFloat. So just call it. - // That's expensive, but floats are rare. - if err := appendJSONMarshal(s.buf, v.Float64()); err != nil { - return err - } - case KindBool: - *s.buf = strconv.AppendBool(*s.buf, v.Bool()) - case KindDuration: - // Do what json.Marshal does. - *s.buf = strconv.AppendInt(*s.buf, int64(v.Duration()), 10) - case KindTime: - s.appendTime(v.Time()) - case KindAny: - a := v.Any() - _, jm := a.(json.Marshaler) - if err, ok := a.(error); ok && !jm { - s.appendString(err.Error()) - } else { - return appendJSONMarshal(s.buf, a) - } - default: - panic(fmt.Sprintf("bad kind: %s", v.Kind())) - } - return nil -} - -func appendJSONMarshal(buf *buffer.Buffer, v any) error { - // Use a json.Encoder to avoid escaping HTML. - var bb bytes.Buffer - enc := json.NewEncoder(&bb) - enc.SetEscapeHTML(false) - if err := enc.Encode(v); err != nil { - return err - } - bs := bb.Bytes() - buf.Write(bs[:len(bs)-1]) // remove final newline - return nil -} - -// appendEscapedJSONString escapes s for JSON and appends it to buf. -// It does not surround the string in quotation marks. -// -// Modified from encoding/json/encode.go:encodeState.string, -// with escapeHTML set to false. -func appendEscapedJSONString(buf []byte, s string) []byte { - char := func(b byte) { buf = append(buf, b) } - str := func(s string) { buf = append(buf, s...) } - - start := 0 - for i := 0; i < len(s); { - if b := s[i]; b < utf8.RuneSelf { - if safeSet[b] { - i++ - continue - } - if start < i { - str(s[start:i]) - } - char('\\') - switch b { - case '\\', '"': - char(b) - case '\n': - char('n') - case '\r': - char('r') - case '\t': - char('t') - default: - // This encodes bytes < 0x20 except for \t, \n and \r. - str(`u00`) - char(hex[b>>4]) - char(hex[b&0xF]) - } - i++ - start = i - continue - } - c, size := utf8.DecodeRuneInString(s[i:]) - if c == utf8.RuneError && size == 1 { - if start < i { - str(s[start:i]) - } - str(`\ufffd`) - i += size - start = i - continue - } - // U+2028 is LINE SEPARATOR. - // U+2029 is PARAGRAPH SEPARATOR. - // They are both technically valid characters in JSON strings, - // but don't work in JSONP, which has to be evaluated as JavaScript, - // and can lead to security holes there. It is valid JSON to - // escape them, so we do so unconditionally. - // See http://timelessrepo.com/json-isnt-a-javascript-subset for discussion. - if c == '\u2028' || c == '\u2029' { - if start < i { - str(s[start:i]) - } - str(`\u202`) - char(hex[c&0xF]) - i += size - start = i - continue - } - i += size - } - if start < len(s) { - str(s[start:]) - } - return buf -} - -var hex = "0123456789abcdef" - -// Copied from encoding/json/tables.go. -// -// safeSet holds the value true if the ASCII character with the given array -// position can be represented inside a JSON string without any further -// escaping. -// -// All values are true except for the ASCII control characters (0-31), the -// double quote ("), and the backslash character ("\"). -var safeSet = [utf8.RuneSelf]bool{ - ' ': true, - '!': true, - '"': false, - '#': true, - '$': true, - '%': true, - '&': true, - '\'': true, - '(': true, - ')': true, - '*': true, - '+': true, - ',': true, - '-': true, - '.': true, - '/': true, - '0': true, - '1': true, - '2': true, - '3': true, - '4': true, - '5': true, - '6': true, - '7': true, - '8': true, - '9': true, - ':': true, - ';': true, - '<': true, - '=': true, - '>': true, - '?': true, - '@': true, - 'A': true, - 'B': true, - 'C': true, - 'D': true, - 'E': true, - 'F': true, - 'G': true, - 'H': true, - 'I': true, - 'J': true, - 'K': true, - 'L': true, - 'M': true, - 'N': true, - 'O': true, - 'P': true, - 'Q': true, - 'R': true, - 'S': true, - 'T': true, - 'U': true, - 'V': true, - 'W': true, - 'X': true, - 'Y': true, - 'Z': true, - '[': true, - '\\': false, - ']': true, - '^': true, - '_': true, - '`': true, - 'a': true, - 'b': true, - 'c': true, - 'd': true, - 'e': true, - 'f': true, - 'g': true, - 'h': true, - 'i': true, - 'j': true, - 'k': true, - 'l': true, - 'm': true, - 'n': true, - 'o': true, - 'p': true, - 'q': true, - 'r': true, - 's': true, - 't': true, - 'u': true, - 'v': true, - 'w': true, - 'x': true, - 'y': true, - 'z': true, - '{': true, - '|': true, - '}': true, - '~': true, - '\u007f': true, -} diff --git a/vendor/golang.org/x/exp/slog/level.go b/vendor/golang.org/x/exp/slog/level.go deleted file mode 100644 index b2365f0aa..000000000 --- a/vendor/golang.org/x/exp/slog/level.go +++ /dev/null @@ -1,201 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package slog - -import ( - "errors" - "fmt" - "strconv" - "strings" - "sync/atomic" -) - -// A Level is the importance or severity of a log event. -// The higher the level, the more important or severe the event. -type Level int - -// Level numbers are inherently arbitrary, -// but we picked them to satisfy three constraints. -// Any system can map them to another numbering scheme if it wishes. -// -// First, we wanted the default level to be Info, Since Levels are ints, Info is -// the default value for int, zero. -// - -// Second, we wanted to make it easy to use levels to specify logger verbosity. -// Since a larger level means a more severe event, a logger that accepts events -// with smaller (or more negative) level means a more verbose logger. Logger -// verbosity is thus the negation of event severity, and the default verbosity -// of 0 accepts all events at least as severe as INFO. -// -// Third, we wanted some room between levels to accommodate schemes with named -// levels between ours. For example, Google Cloud Logging defines a Notice level -// between Info and Warn. Since there are only a few of these intermediate -// levels, the gap between the numbers need not be large. Our gap of 4 matches -// OpenTelemetry's mapping. Subtracting 9 from an OpenTelemetry level in the -// DEBUG, INFO, WARN and ERROR ranges converts it to the corresponding slog -// Level range. OpenTelemetry also has the names TRACE and FATAL, which slog -// does not. But those OpenTelemetry levels can still be represented as slog -// Levels by using the appropriate integers. -// -// Names for common levels. -const ( - LevelDebug Level = -4 - LevelInfo Level = 0 - LevelWarn Level = 4 - LevelError Level = 8 -) - -// String returns a name for the level. -// If the level has a name, then that name -// in uppercase is returned. -// If the level is between named values, then -// an integer is appended to the uppercased name. -// Examples: -// -// LevelWarn.String() => "WARN" -// (LevelInfo+2).String() => "INFO+2" -func (l Level) String() string { - str := func(base string, val Level) string { - if val == 0 { - return base - } - return fmt.Sprintf("%s%+d", base, val) - } - - switch { - case l < LevelInfo: - return str("DEBUG", l-LevelDebug) - case l < LevelWarn: - return str("INFO", l-LevelInfo) - case l < LevelError: - return str("WARN", l-LevelWarn) - default: - return str("ERROR", l-LevelError) - } -} - -// MarshalJSON implements [encoding/json.Marshaler] -// by quoting the output of [Level.String]. -func (l Level) MarshalJSON() ([]byte, error) { - // AppendQuote is sufficient for JSON-encoding all Level strings. - // They don't contain any runes that would produce invalid JSON - // when escaped. - return strconv.AppendQuote(nil, l.String()), nil -} - -// UnmarshalJSON implements [encoding/json.Unmarshaler] -// It accepts any string produced by [Level.MarshalJSON], -// ignoring case. -// It also accepts numeric offsets that would result in a different string on -// output. For example, "Error-8" would marshal as "INFO". -func (l *Level) UnmarshalJSON(data []byte) error { - s, err := strconv.Unquote(string(data)) - if err != nil { - return err - } - return l.parse(s) -} - -// MarshalText implements [encoding.TextMarshaler] -// by calling [Level.String]. -func (l Level) MarshalText() ([]byte, error) { - return []byte(l.String()), nil -} - -// UnmarshalText implements [encoding.TextUnmarshaler]. -// It accepts any string produced by [Level.MarshalText], -// ignoring case. -// It also accepts numeric offsets that would result in a different string on -// output. For example, "Error-8" would marshal as "INFO". -func (l *Level) UnmarshalText(data []byte) error { - return l.parse(string(data)) -} - -func (l *Level) parse(s string) (err error) { - defer func() { - if err != nil { - err = fmt.Errorf("slog: level string %q: %w", s, err) - } - }() - - name := s - offset := 0 - if i := strings.IndexAny(s, "+-"); i >= 0 { - name = s[:i] - offset, err = strconv.Atoi(s[i:]) - if err != nil { - return err - } - } - switch strings.ToUpper(name) { - case "DEBUG": - *l = LevelDebug - case "INFO": - *l = LevelInfo - case "WARN": - *l = LevelWarn - case "ERROR": - *l = LevelError - default: - return errors.New("unknown name") - } - *l += Level(offset) - return nil -} - -// Level returns the receiver. -// It implements Leveler. -func (l Level) Level() Level { return l } - -// A LevelVar is a Level variable, to allow a Handler level to change -// dynamically. -// It implements Leveler as well as a Set method, -// and it is safe for use by multiple goroutines. -// The zero LevelVar corresponds to LevelInfo. -type LevelVar struct { - val atomic.Int64 -} - -// Level returns v's level. -func (v *LevelVar) Level() Level { - return Level(int(v.val.Load())) -} - -// Set sets v's level to l. -func (v *LevelVar) Set(l Level) { - v.val.Store(int64(l)) -} - -func (v *LevelVar) String() string { - return fmt.Sprintf("LevelVar(%s)", v.Level()) -} - -// MarshalText implements [encoding.TextMarshaler] -// by calling [Level.MarshalText]. -func (v *LevelVar) MarshalText() ([]byte, error) { - return v.Level().MarshalText() -} - -// UnmarshalText implements [encoding.TextUnmarshaler] -// by calling [Level.UnmarshalText]. -func (v *LevelVar) UnmarshalText(data []byte) error { - var l Level - if err := l.UnmarshalText(data); err != nil { - return err - } - v.Set(l) - return nil -} - -// A Leveler provides a Level value. -// -// As Level itself implements Leveler, clients typically supply -// a Level value wherever a Leveler is needed, such as in HandlerOptions. -// Clients who need to vary the level dynamically can provide a more complex -// Leveler implementation such as *LevelVar. -type Leveler interface { - Level() Level -} diff --git a/vendor/golang.org/x/exp/slog/logger.go b/vendor/golang.org/x/exp/slog/logger.go deleted file mode 100644 index e87ec9936..000000000 --- a/vendor/golang.org/x/exp/slog/logger.go +++ /dev/null @@ -1,343 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package slog - -import ( - "context" - "log" - "runtime" - "sync/atomic" - "time" - - "golang.org/x/exp/slog/internal" -) - -var defaultLogger atomic.Value - -func init() { - defaultLogger.Store(New(newDefaultHandler(log.Output))) -} - -// Default returns the default Logger. -func Default() *Logger { return defaultLogger.Load().(*Logger) } - -// SetDefault makes l the default Logger. -// After this call, output from the log package's default Logger -// (as with [log.Print], etc.) will be logged at LevelInfo using l's Handler. -func SetDefault(l *Logger) { - defaultLogger.Store(l) - // If the default's handler is a defaultHandler, then don't use a handleWriter, - // or we'll deadlock as they both try to acquire the log default mutex. - // The defaultHandler will use whatever the log default writer is currently - // set to, which is correct. - // This can occur with SetDefault(Default()). - // See TestSetDefault. - if _, ok := l.Handler().(*defaultHandler); !ok { - capturePC := log.Flags()&(log.Lshortfile|log.Llongfile) != 0 - log.SetOutput(&handlerWriter{l.Handler(), LevelInfo, capturePC}) - log.SetFlags(0) // we want just the log message, no time or location - } -} - -// handlerWriter is an io.Writer that calls a Handler. -// It is used to link the default log.Logger to the default slog.Logger. -type handlerWriter struct { - h Handler - level Level - capturePC bool -} - -func (w *handlerWriter) Write(buf []byte) (int, error) { - if !w.h.Enabled(context.Background(), w.level) { - return 0, nil - } - var pc uintptr - if !internal.IgnorePC && w.capturePC { - // skip [runtime.Callers, w.Write, Logger.Output, log.Print] - var pcs [1]uintptr - runtime.Callers(4, pcs[:]) - pc = pcs[0] - } - - // Remove final newline. - origLen := len(buf) // Report that the entire buf was written. - if len(buf) > 0 && buf[len(buf)-1] == '\n' { - buf = buf[:len(buf)-1] - } - r := NewRecord(time.Now(), w.level, string(buf), pc) - return origLen, w.h.Handle(context.Background(), r) -} - -// A Logger records structured information about each call to its -// Log, Debug, Info, Warn, and Error methods. -// For each call, it creates a Record and passes it to a Handler. -// -// To create a new Logger, call [New] or a Logger method -// that begins "With". -type Logger struct { - handler Handler // for structured logging -} - -func (l *Logger) clone() *Logger { - c := *l - return &c -} - -// Handler returns l's Handler. -func (l *Logger) Handler() Handler { return l.handler } - -// With returns a new Logger that includes the given arguments, converted to -// Attrs as in [Logger.Log]. -// The Attrs will be added to each output from the Logger. -// The new Logger shares the old Logger's context. -// The new Logger's handler is the result of calling WithAttrs on the receiver's -// handler. -func (l *Logger) With(args ...any) *Logger { - c := l.clone() - c.handler = l.handler.WithAttrs(argsToAttrSlice(args)) - return c -} - -// WithGroup returns a new Logger that starts a group. The keys of all -// attributes added to the Logger will be qualified by the given name. -// (How that qualification happens depends on the [Handler.WithGroup] -// method of the Logger's Handler.) -// The new Logger shares the old Logger's context. -// -// The new Logger's handler is the result of calling WithGroup on the receiver's -// handler. -func (l *Logger) WithGroup(name string) *Logger { - c := l.clone() - c.handler = l.handler.WithGroup(name) - return c - -} - -// New creates a new Logger with the given non-nil Handler and a nil context. -func New(h Handler) *Logger { - if h == nil { - panic("nil Handler") - } - return &Logger{handler: h} -} - -// With calls Logger.With on the default logger. -func With(args ...any) *Logger { - return Default().With(args...) -} - -// Enabled reports whether l emits log records at the given context and level. -func (l *Logger) Enabled(ctx context.Context, level Level) bool { - if ctx == nil { - ctx = context.Background() - } - return l.Handler().Enabled(ctx, level) -} - -// NewLogLogger returns a new log.Logger such that each call to its Output method -// dispatches a Record to the specified handler. The logger acts as a bridge from -// the older log API to newer structured logging handlers. -func NewLogLogger(h Handler, level Level) *log.Logger { - return log.New(&handlerWriter{h, level, true}, "", 0) -} - -// Log emits a log record with the current time and the given level and message. -// The Record's Attrs consist of the Logger's attributes followed by -// the Attrs specified by args. -// -// The attribute arguments are processed as follows: -// - If an argument is an Attr, it is used as is. -// - If an argument is a string and this is not the last argument, -// the following argument is treated as the value and the two are combined -// into an Attr. -// - Otherwise, the argument is treated as a value with key "!BADKEY". -func (l *Logger) Log(ctx context.Context, level Level, msg string, args ...any) { - l.log(ctx, level, msg, args...) -} - -// LogAttrs is a more efficient version of [Logger.Log] that accepts only Attrs. -func (l *Logger) LogAttrs(ctx context.Context, level Level, msg string, attrs ...Attr) { - l.logAttrs(ctx, level, msg, attrs...) -} - -// Debug logs at LevelDebug. -func (l *Logger) Debug(msg string, args ...any) { - l.log(nil, LevelDebug, msg, args...) -} - -// DebugContext logs at LevelDebug with the given context. -func (l *Logger) DebugContext(ctx context.Context, msg string, args ...any) { - l.log(ctx, LevelDebug, msg, args...) -} - -// DebugCtx logs at LevelDebug with the given context. -// Deprecated: Use Logger.DebugContext. -func (l *Logger) DebugCtx(ctx context.Context, msg string, args ...any) { - l.log(ctx, LevelDebug, msg, args...) -} - -// Info logs at LevelInfo. -func (l *Logger) Info(msg string, args ...any) { - l.log(nil, LevelInfo, msg, args...) -} - -// InfoContext logs at LevelInfo with the given context. -func (l *Logger) InfoContext(ctx context.Context, msg string, args ...any) { - l.log(ctx, LevelInfo, msg, args...) -} - -// InfoCtx logs at LevelInfo with the given context. -// Deprecated: Use Logger.InfoContext. -func (l *Logger) InfoCtx(ctx context.Context, msg string, args ...any) { - l.log(ctx, LevelInfo, msg, args...) -} - -// Warn logs at LevelWarn. -func (l *Logger) Warn(msg string, args ...any) { - l.log(nil, LevelWarn, msg, args...) -} - -// WarnContext logs at LevelWarn with the given context. -func (l *Logger) WarnContext(ctx context.Context, msg string, args ...any) { - l.log(ctx, LevelWarn, msg, args...) -} - -// WarnCtx logs at LevelWarn with the given context. -// Deprecated: Use Logger.WarnContext. -func (l *Logger) WarnCtx(ctx context.Context, msg string, args ...any) { - l.log(ctx, LevelWarn, msg, args...) -} - -// Error logs at LevelError. -func (l *Logger) Error(msg string, args ...any) { - l.log(nil, LevelError, msg, args...) -} - -// ErrorContext logs at LevelError with the given context. -func (l *Logger) ErrorContext(ctx context.Context, msg string, args ...any) { - l.log(ctx, LevelError, msg, args...) -} - -// ErrorCtx logs at LevelError with the given context. -// Deprecated: Use Logger.ErrorContext. -func (l *Logger) ErrorCtx(ctx context.Context, msg string, args ...any) { - l.log(ctx, LevelError, msg, args...) -} - -// log is the low-level logging method for methods that take ...any. -// It must always be called directly by an exported logging method -// or function, because it uses a fixed call depth to obtain the pc. -func (l *Logger) log(ctx context.Context, level Level, msg string, args ...any) { - if !l.Enabled(ctx, level) { - return - } - var pc uintptr - if !internal.IgnorePC { - var pcs [1]uintptr - // skip [runtime.Callers, this function, this function's caller] - runtime.Callers(3, pcs[:]) - pc = pcs[0] - } - r := NewRecord(time.Now(), level, msg, pc) - r.Add(args...) - if ctx == nil { - ctx = context.Background() - } - _ = l.Handler().Handle(ctx, r) -} - -// logAttrs is like [Logger.log], but for methods that take ...Attr. -func (l *Logger) logAttrs(ctx context.Context, level Level, msg string, attrs ...Attr) { - if !l.Enabled(ctx, level) { - return - } - var pc uintptr - if !internal.IgnorePC { - var pcs [1]uintptr - // skip [runtime.Callers, this function, this function's caller] - runtime.Callers(3, pcs[:]) - pc = pcs[0] - } - r := NewRecord(time.Now(), level, msg, pc) - r.AddAttrs(attrs...) - if ctx == nil { - ctx = context.Background() - } - _ = l.Handler().Handle(ctx, r) -} - -// Debug calls Logger.Debug on the default logger. -func Debug(msg string, args ...any) { - Default().log(nil, LevelDebug, msg, args...) -} - -// DebugContext calls Logger.DebugContext on the default logger. -func DebugContext(ctx context.Context, msg string, args ...any) { - Default().log(ctx, LevelDebug, msg, args...) -} - -// Info calls Logger.Info on the default logger. -func Info(msg string, args ...any) { - Default().log(nil, LevelInfo, msg, args...) -} - -// InfoContext calls Logger.InfoContext on the default logger. -func InfoContext(ctx context.Context, msg string, args ...any) { - Default().log(ctx, LevelInfo, msg, args...) -} - -// Warn calls Logger.Warn on the default logger. -func Warn(msg string, args ...any) { - Default().log(nil, LevelWarn, msg, args...) -} - -// WarnContext calls Logger.WarnContext on the default logger. -func WarnContext(ctx context.Context, msg string, args ...any) { - Default().log(ctx, LevelWarn, msg, args...) -} - -// Error calls Logger.Error on the default logger. -func Error(msg string, args ...any) { - Default().log(nil, LevelError, msg, args...) -} - -// ErrorContext calls Logger.ErrorContext on the default logger. -func ErrorContext(ctx context.Context, msg string, args ...any) { - Default().log(ctx, LevelError, msg, args...) -} - -// DebugCtx calls Logger.DebugContext on the default logger. -// Deprecated: call DebugContext. -func DebugCtx(ctx context.Context, msg string, args ...any) { - Default().log(ctx, LevelDebug, msg, args...) -} - -// InfoCtx calls Logger.InfoContext on the default logger. -// Deprecated: call InfoContext. -func InfoCtx(ctx context.Context, msg string, args ...any) { - Default().log(ctx, LevelInfo, msg, args...) -} - -// WarnCtx calls Logger.WarnContext on the default logger. -// Deprecated: call WarnContext. -func WarnCtx(ctx context.Context, msg string, args ...any) { - Default().log(ctx, LevelWarn, msg, args...) -} - -// ErrorCtx calls Logger.ErrorContext on the default logger. -// Deprecated: call ErrorContext. -func ErrorCtx(ctx context.Context, msg string, args ...any) { - Default().log(ctx, LevelError, msg, args...) -} - -// Log calls Logger.Log on the default logger. -func Log(ctx context.Context, level Level, msg string, args ...any) { - Default().log(ctx, level, msg, args...) -} - -// LogAttrs calls Logger.LogAttrs on the default logger. -func LogAttrs(ctx context.Context, level Level, msg string, attrs ...Attr) { - Default().logAttrs(ctx, level, msg, attrs...) -} diff --git a/vendor/golang.org/x/exp/slog/noplog.bench b/vendor/golang.org/x/exp/slog/noplog.bench deleted file mode 100644 index ed9296ff6..000000000 --- a/vendor/golang.org/x/exp/slog/noplog.bench +++ /dev/null @@ -1,36 +0,0 @@ -goos: linux -goarch: amd64 -pkg: golang.org/x/exp/slog -cpu: Intel(R) Xeon(R) CPU @ 2.20GHz -BenchmarkNopLog/attrs-8 1000000 1090 ns/op 0 B/op 0 allocs/op -BenchmarkNopLog/attrs-8 1000000 1097 ns/op 0 B/op 0 allocs/op -BenchmarkNopLog/attrs-8 1000000 1078 ns/op 0 B/op 0 allocs/op -BenchmarkNopLog/attrs-8 1000000 1095 ns/op 0 B/op 0 allocs/op -BenchmarkNopLog/attrs-8 1000000 1096 ns/op 0 B/op 0 allocs/op -BenchmarkNopLog/attrs-parallel-8 4007268 308.2 ns/op 0 B/op 0 allocs/op -BenchmarkNopLog/attrs-parallel-8 4016138 299.7 ns/op 0 B/op 0 allocs/op -BenchmarkNopLog/attrs-parallel-8 4020529 305.9 ns/op 0 B/op 0 allocs/op -BenchmarkNopLog/attrs-parallel-8 3977829 303.4 ns/op 0 B/op 0 allocs/op -BenchmarkNopLog/attrs-parallel-8 3225438 318.5 ns/op 0 B/op 0 allocs/op -BenchmarkNopLog/keys-values-8 1179256 994.2 ns/op 0 B/op 0 allocs/op -BenchmarkNopLog/keys-values-8 1000000 1002 ns/op 0 B/op 0 allocs/op -BenchmarkNopLog/keys-values-8 1216710 993.2 ns/op 0 B/op 0 allocs/op -BenchmarkNopLog/keys-values-8 1000000 1013 ns/op 0 B/op 0 allocs/op -BenchmarkNopLog/keys-values-8 1000000 1016 ns/op 0 B/op 0 allocs/op -BenchmarkNopLog/WithContext-8 989066 1163 ns/op 0 B/op 0 allocs/op -BenchmarkNopLog/WithContext-8 994116 1163 ns/op 0 B/op 0 allocs/op -BenchmarkNopLog/WithContext-8 1000000 1152 ns/op 0 B/op 0 allocs/op -BenchmarkNopLog/WithContext-8 991675 1165 ns/op 0 B/op 0 allocs/op -BenchmarkNopLog/WithContext-8 965268 1166 ns/op 0 B/op 0 allocs/op -BenchmarkNopLog/WithContext-parallel-8 3955503 303.3 ns/op 0 B/op 0 allocs/op -BenchmarkNopLog/WithContext-parallel-8 3861188 307.8 ns/op 0 B/op 0 allocs/op -BenchmarkNopLog/WithContext-parallel-8 3967752 303.9 ns/op 0 B/op 0 allocs/op -BenchmarkNopLog/WithContext-parallel-8 3955203 302.7 ns/op 0 B/op 0 allocs/op -BenchmarkNopLog/WithContext-parallel-8 3948278 301.1 ns/op 0 B/op 0 allocs/op -BenchmarkNopLog/Ctx-8 940622 1247 ns/op 0 B/op 0 allocs/op -BenchmarkNopLog/Ctx-8 936381 1257 ns/op 0 B/op 0 allocs/op -BenchmarkNopLog/Ctx-8 959730 1266 ns/op 0 B/op 0 allocs/op -BenchmarkNopLog/Ctx-8 943473 1290 ns/op 0 B/op 0 allocs/op -BenchmarkNopLog/Ctx-8 919414 1259 ns/op 0 B/op 0 allocs/op -PASS -ok golang.org/x/exp/slog 40.566s diff --git a/vendor/golang.org/x/exp/slog/record.go b/vendor/golang.org/x/exp/slog/record.go deleted file mode 100644 index 38b3440f7..000000000 --- a/vendor/golang.org/x/exp/slog/record.go +++ /dev/null @@ -1,207 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package slog - -import ( - "runtime" - "time" - - "golang.org/x/exp/slices" -) - -const nAttrsInline = 5 - -// A Record holds information about a log event. -// Copies of a Record share state. -// Do not modify a Record after handing out a copy to it. -// Use [Record.Clone] to create a copy with no shared state. -type Record struct { - // The time at which the output method (Log, Info, etc.) was called. - Time time.Time - - // The log message. - Message string - - // The level of the event. - Level Level - - // The program counter at the time the record was constructed, as determined - // by runtime.Callers. If zero, no program counter is available. - // - // The only valid use for this value is as an argument to - // [runtime.CallersFrames]. In particular, it must not be passed to - // [runtime.FuncForPC]. - PC uintptr - - // Allocation optimization: an inline array sized to hold - // the majority of log calls (based on examination of open-source - // code). It holds the start of the list of Attrs. - front [nAttrsInline]Attr - - // The number of Attrs in front. - nFront int - - // The list of Attrs except for those in front. - // Invariants: - // - len(back) > 0 iff nFront == len(front) - // - Unused array elements are zero. Used to detect mistakes. - back []Attr -} - -// NewRecord creates a Record from the given arguments. -// Use [Record.AddAttrs] to add attributes to the Record. -// -// NewRecord is intended for logging APIs that want to support a [Handler] as -// a backend. -func NewRecord(t time.Time, level Level, msg string, pc uintptr) Record { - return Record{ - Time: t, - Message: msg, - Level: level, - PC: pc, - } -} - -// Clone returns a copy of the record with no shared state. -// The original record and the clone can both be modified -// without interfering with each other. -func (r Record) Clone() Record { - r.back = slices.Clip(r.back) // prevent append from mutating shared array - return r -} - -// NumAttrs returns the number of attributes in the Record. -func (r Record) NumAttrs() int { - return r.nFront + len(r.back) -} - -// Attrs calls f on each Attr in the Record. -// Iteration stops if f returns false. -func (r Record) Attrs(f func(Attr) bool) { - for i := 0; i < r.nFront; i++ { - if !f(r.front[i]) { - return - } - } - for _, a := range r.back { - if !f(a) { - return - } - } -} - -// AddAttrs appends the given Attrs to the Record's list of Attrs. -func (r *Record) AddAttrs(attrs ...Attr) { - n := copy(r.front[r.nFront:], attrs) - r.nFront += n - // Check if a copy was modified by slicing past the end - // and seeing if the Attr there is non-zero. - if cap(r.back) > len(r.back) { - end := r.back[:len(r.back)+1][len(r.back)] - if !end.isEmpty() { - panic("copies of a slog.Record were both modified") - } - } - r.back = append(r.back, attrs[n:]...) -} - -// Add converts the args to Attrs as described in [Logger.Log], -// then appends the Attrs to the Record's list of Attrs. -func (r *Record) Add(args ...any) { - var a Attr - for len(args) > 0 { - a, args = argsToAttr(args) - if r.nFront < len(r.front) { - r.front[r.nFront] = a - r.nFront++ - } else { - if r.back == nil { - r.back = make([]Attr, 0, countAttrs(args)) - } - r.back = append(r.back, a) - } - } - -} - -// countAttrs returns the number of Attrs that would be created from args. -func countAttrs(args []any) int { - n := 0 - for i := 0; i < len(args); i++ { - n++ - if _, ok := args[i].(string); ok { - i++ - } - } - return n -} - -const badKey = "!BADKEY" - -// argsToAttr turns a prefix of the nonempty args slice into an Attr -// and returns the unconsumed portion of the slice. -// If args[0] is an Attr, it returns it. -// If args[0] is a string, it treats the first two elements as -// a key-value pair. -// Otherwise, it treats args[0] as a value with a missing key. -func argsToAttr(args []any) (Attr, []any) { - switch x := args[0].(type) { - case string: - if len(args) == 1 { - return String(badKey, x), nil - } - return Any(x, args[1]), args[2:] - - case Attr: - return x, args[1:] - - default: - return Any(badKey, x), args[1:] - } -} - -// Source describes the location of a line of source code. -type Source struct { - // Function is the package path-qualified function name containing the - // source line. If non-empty, this string uniquely identifies a single - // function in the program. This may be the empty string if not known. - Function string `json:"function"` - // File and Line are the file name and line number (1-based) of the source - // line. These may be the empty string and zero, respectively, if not known. - File string `json:"file"` - Line int `json:"line"` -} - -// attrs returns the non-zero fields of s as a slice of attrs. -// It is similar to a LogValue method, but we don't want Source -// to implement LogValuer because it would be resolved before -// the ReplaceAttr function was called. -func (s *Source) group() Value { - var as []Attr - if s.Function != "" { - as = append(as, String("function", s.Function)) - } - if s.File != "" { - as = append(as, String("file", s.File)) - } - if s.Line != 0 { - as = append(as, Int("line", s.Line)) - } - return GroupValue(as...) -} - -// source returns a Source for the log event. -// If the Record was created without the necessary information, -// or if the location is unavailable, it returns a non-nil *Source -// with zero fields. -func (r Record) source() *Source { - fs := runtime.CallersFrames([]uintptr{r.PC}) - f, _ := fs.Next() - return &Source{ - Function: f.Function, - File: f.File, - Line: f.Line, - } -} diff --git a/vendor/golang.org/x/exp/slog/text_handler.go b/vendor/golang.org/x/exp/slog/text_handler.go deleted file mode 100644 index 75b66b716..000000000 --- a/vendor/golang.org/x/exp/slog/text_handler.go +++ /dev/null @@ -1,161 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package slog - -import ( - "context" - "encoding" - "fmt" - "io" - "reflect" - "strconv" - "unicode" - "unicode/utf8" -) - -// TextHandler is a Handler that writes Records to an io.Writer as a -// sequence of key=value pairs separated by spaces and followed by a newline. -type TextHandler struct { - *commonHandler -} - -// NewTextHandler creates a TextHandler that writes to w, -// using the given options. -// If opts is nil, the default options are used. -func NewTextHandler(w io.Writer, opts *HandlerOptions) *TextHandler { - if opts == nil { - opts = &HandlerOptions{} - } - return &TextHandler{ - &commonHandler{ - json: false, - w: w, - opts: *opts, - }, - } -} - -// Enabled reports whether the handler handles records at the given level. -// The handler ignores records whose level is lower. -func (h *TextHandler) Enabled(_ context.Context, level Level) bool { - return h.commonHandler.enabled(level) -} - -// WithAttrs returns a new TextHandler whose attributes consists -// of h's attributes followed by attrs. -func (h *TextHandler) WithAttrs(attrs []Attr) Handler { - return &TextHandler{commonHandler: h.commonHandler.withAttrs(attrs)} -} - -func (h *TextHandler) WithGroup(name string) Handler { - return &TextHandler{commonHandler: h.commonHandler.withGroup(name)} -} - -// Handle formats its argument Record as a single line of space-separated -// key=value items. -// -// If the Record's time is zero, the time is omitted. -// Otherwise, the key is "time" -// and the value is output in RFC3339 format with millisecond precision. -// -// If the Record's level is zero, the level is omitted. -// Otherwise, the key is "level" -// and the value of [Level.String] is output. -// -// If the AddSource option is set and source information is available, -// the key is "source" and the value is output as FILE:LINE. -// -// The message's key is "msg". -// -// To modify these or other attributes, or remove them from the output, use -// [HandlerOptions.ReplaceAttr]. -// -// If a value implements [encoding.TextMarshaler], the result of MarshalText is -// written. Otherwise, the result of fmt.Sprint is written. -// -// Keys and values are quoted with [strconv.Quote] if they contain Unicode space -// characters, non-printing characters, '"' or '='. -// -// Keys inside groups consist of components (keys or group names) separated by -// dots. No further escaping is performed. -// Thus there is no way to determine from the key "a.b.c" whether there -// are two groups "a" and "b" and a key "c", or a single group "a.b" and a key "c", -// or single group "a" and a key "b.c". -// If it is necessary to reconstruct the group structure of a key -// even in the presence of dots inside components, use -// [HandlerOptions.ReplaceAttr] to encode that information in the key. -// -// Each call to Handle results in a single serialized call to -// io.Writer.Write. -func (h *TextHandler) Handle(_ context.Context, r Record) error { - return h.commonHandler.handle(r) -} - -func appendTextValue(s *handleState, v Value) error { - switch v.Kind() { - case KindString: - s.appendString(v.str()) - case KindTime: - s.appendTime(v.time()) - case KindAny: - if tm, ok := v.any.(encoding.TextMarshaler); ok { - data, err := tm.MarshalText() - if err != nil { - return err - } - // TODO: avoid the conversion to string. - s.appendString(string(data)) - return nil - } - if bs, ok := byteSlice(v.any); ok { - // As of Go 1.19, this only allocates for strings longer than 32 bytes. - s.buf.WriteString(strconv.Quote(string(bs))) - return nil - } - s.appendString(fmt.Sprintf("%+v", v.Any())) - default: - *s.buf = v.append(*s.buf) - } - return nil -} - -// byteSlice returns its argument as a []byte if the argument's -// underlying type is []byte, along with a second return value of true. -// Otherwise it returns nil, false. -func byteSlice(a any) ([]byte, bool) { - if bs, ok := a.([]byte); ok { - return bs, true - } - // Like Printf's %s, we allow both the slice type and the byte element type to be named. - t := reflect.TypeOf(a) - if t != nil && t.Kind() == reflect.Slice && t.Elem().Kind() == reflect.Uint8 { - return reflect.ValueOf(a).Bytes(), true - } - return nil, false -} - -func needsQuoting(s string) bool { - if len(s) == 0 { - return true - } - for i := 0; i < len(s); { - b := s[i] - if b < utf8.RuneSelf { - // Quote anything except a backslash that would need quoting in a - // JSON string, as well as space and '=' - if b != '\\' && (b == ' ' || b == '=' || !safeSet[b]) { - return true - } - i++ - continue - } - r, size := utf8.DecodeRuneInString(s[i:]) - if r == utf8.RuneError || unicode.IsSpace(r) || !unicode.IsPrint(r) { - return true - } - i += size - } - return false -} diff --git a/vendor/golang.org/x/exp/slog/value.go b/vendor/golang.org/x/exp/slog/value.go deleted file mode 100644 index 3550c46fc..000000000 --- a/vendor/golang.org/x/exp/slog/value.go +++ /dev/null @@ -1,456 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package slog - -import ( - "fmt" - "math" - "runtime" - "strconv" - "strings" - "time" - "unsafe" - - "golang.org/x/exp/slices" -) - -// A Value can represent any Go value, but unlike type any, -// it can represent most small values without an allocation. -// The zero Value corresponds to nil. -type Value struct { - _ [0]func() // disallow == - // num holds the value for Kinds Int64, Uint64, Float64, Bool and Duration, - // the string length for KindString, and nanoseconds since the epoch for KindTime. - num uint64 - // If any is of type Kind, then the value is in num as described above. - // If any is of type *time.Location, then the Kind is Time and time.Time value - // can be constructed from the Unix nanos in num and the location (monotonic time - // is not preserved). - // If any is of type stringptr, then the Kind is String and the string value - // consists of the length in num and the pointer in any. - // Otherwise, the Kind is Any and any is the value. - // (This implies that Attrs cannot store values of type Kind, *time.Location - // or stringptr.) - any any -} - -// Kind is the kind of a Value. -type Kind int - -// The following list is sorted alphabetically, but it's also important that -// KindAny is 0 so that a zero Value represents nil. - -const ( - KindAny Kind = iota - KindBool - KindDuration - KindFloat64 - KindInt64 - KindString - KindTime - KindUint64 - KindGroup - KindLogValuer -) - -var kindStrings = []string{ - "Any", - "Bool", - "Duration", - "Float64", - "Int64", - "String", - "Time", - "Uint64", - "Group", - "LogValuer", -} - -func (k Kind) String() string { - if k >= 0 && int(k) < len(kindStrings) { - return kindStrings[k] - } - return "" -} - -// Unexported version of Kind, just so we can store Kinds in Values. -// (No user-provided value has this type.) -type kind Kind - -// Kind returns v's Kind. -func (v Value) Kind() Kind { - switch x := v.any.(type) { - case Kind: - return x - case stringptr: - return KindString - case timeLocation: - return KindTime - case groupptr: - return KindGroup - case LogValuer: - return KindLogValuer - case kind: // a kind is just a wrapper for a Kind - return KindAny - default: - return KindAny - } -} - -//////////////// Constructors - -// IntValue returns a Value for an int. -func IntValue(v int) Value { - return Int64Value(int64(v)) -} - -// Int64Value returns a Value for an int64. -func Int64Value(v int64) Value { - return Value{num: uint64(v), any: KindInt64} -} - -// Uint64Value returns a Value for a uint64. -func Uint64Value(v uint64) Value { - return Value{num: v, any: KindUint64} -} - -// Float64Value returns a Value for a floating-point number. -func Float64Value(v float64) Value { - return Value{num: math.Float64bits(v), any: KindFloat64} -} - -// BoolValue returns a Value for a bool. -func BoolValue(v bool) Value { - u := uint64(0) - if v { - u = 1 - } - return Value{num: u, any: KindBool} -} - -// Unexported version of *time.Location, just so we can store *time.Locations in -// Values. (No user-provided value has this type.) -type timeLocation *time.Location - -// TimeValue returns a Value for a time.Time. -// It discards the monotonic portion. -func TimeValue(v time.Time) Value { - if v.IsZero() { - // UnixNano on the zero time is undefined, so represent the zero time - // with a nil *time.Location instead. time.Time.Location method never - // returns nil, so a Value with any == timeLocation(nil) cannot be - // mistaken for any other Value, time.Time or otherwise. - return Value{any: timeLocation(nil)} - } - return Value{num: uint64(v.UnixNano()), any: timeLocation(v.Location())} -} - -// DurationValue returns a Value for a time.Duration. -func DurationValue(v time.Duration) Value { - return Value{num: uint64(v.Nanoseconds()), any: KindDuration} -} - -// AnyValue returns a Value for the supplied value. -// -// If the supplied value is of type Value, it is returned -// unmodified. -// -// Given a value of one of Go's predeclared string, bool, or -// (non-complex) numeric types, AnyValue returns a Value of kind -// String, Bool, Uint64, Int64, or Float64. The width of the -// original numeric type is not preserved. -// -// Given a time.Time or time.Duration value, AnyValue returns a Value of kind -// KindTime or KindDuration. The monotonic time is not preserved. -// -// For nil, or values of all other types, including named types whose -// underlying type is numeric, AnyValue returns a value of kind KindAny. -func AnyValue(v any) Value { - switch v := v.(type) { - case string: - return StringValue(v) - case int: - return Int64Value(int64(v)) - case uint: - return Uint64Value(uint64(v)) - case int64: - return Int64Value(v) - case uint64: - return Uint64Value(v) - case bool: - return BoolValue(v) - case time.Duration: - return DurationValue(v) - case time.Time: - return TimeValue(v) - case uint8: - return Uint64Value(uint64(v)) - case uint16: - return Uint64Value(uint64(v)) - case uint32: - return Uint64Value(uint64(v)) - case uintptr: - return Uint64Value(uint64(v)) - case int8: - return Int64Value(int64(v)) - case int16: - return Int64Value(int64(v)) - case int32: - return Int64Value(int64(v)) - case float64: - return Float64Value(v) - case float32: - return Float64Value(float64(v)) - case []Attr: - return GroupValue(v...) - case Kind: - return Value{any: kind(v)} - case Value: - return v - default: - return Value{any: v} - } -} - -//////////////// Accessors - -// Any returns v's value as an any. -func (v Value) Any() any { - switch v.Kind() { - case KindAny: - if k, ok := v.any.(kind); ok { - return Kind(k) - } - return v.any - case KindLogValuer: - return v.any - case KindGroup: - return v.group() - case KindInt64: - return int64(v.num) - case KindUint64: - return v.num - case KindFloat64: - return v.float() - case KindString: - return v.str() - case KindBool: - return v.bool() - case KindDuration: - return v.duration() - case KindTime: - return v.time() - default: - panic(fmt.Sprintf("bad kind: %s", v.Kind())) - } -} - -// Int64 returns v's value as an int64. It panics -// if v is not a signed integer. -func (v Value) Int64() int64 { - if g, w := v.Kind(), KindInt64; g != w { - panic(fmt.Sprintf("Value kind is %s, not %s", g, w)) - } - return int64(v.num) -} - -// Uint64 returns v's value as a uint64. It panics -// if v is not an unsigned integer. -func (v Value) Uint64() uint64 { - if g, w := v.Kind(), KindUint64; g != w { - panic(fmt.Sprintf("Value kind is %s, not %s", g, w)) - } - return v.num -} - -// Bool returns v's value as a bool. It panics -// if v is not a bool. -func (v Value) Bool() bool { - if g, w := v.Kind(), KindBool; g != w { - panic(fmt.Sprintf("Value kind is %s, not %s", g, w)) - } - return v.bool() -} - -func (v Value) bool() bool { - return v.num == 1 -} - -// Duration returns v's value as a time.Duration. It panics -// if v is not a time.Duration. -func (v Value) Duration() time.Duration { - if g, w := v.Kind(), KindDuration; g != w { - panic(fmt.Sprintf("Value kind is %s, not %s", g, w)) - } - - return v.duration() -} - -func (v Value) duration() time.Duration { - return time.Duration(int64(v.num)) -} - -// Float64 returns v's value as a float64. It panics -// if v is not a float64. -func (v Value) Float64() float64 { - if g, w := v.Kind(), KindFloat64; g != w { - panic(fmt.Sprintf("Value kind is %s, not %s", g, w)) - } - - return v.float() -} - -func (v Value) float() float64 { - return math.Float64frombits(v.num) -} - -// Time returns v's value as a time.Time. It panics -// if v is not a time.Time. -func (v Value) Time() time.Time { - if g, w := v.Kind(), KindTime; g != w { - panic(fmt.Sprintf("Value kind is %s, not %s", g, w)) - } - return v.time() -} - -func (v Value) time() time.Time { - loc := v.any.(timeLocation) - if loc == nil { - return time.Time{} - } - return time.Unix(0, int64(v.num)).In(loc) -} - -// LogValuer returns v's value as a LogValuer. It panics -// if v is not a LogValuer. -func (v Value) LogValuer() LogValuer { - return v.any.(LogValuer) -} - -// Group returns v's value as a []Attr. -// It panics if v's Kind is not KindGroup. -func (v Value) Group() []Attr { - if sp, ok := v.any.(groupptr); ok { - return unsafe.Slice((*Attr)(sp), v.num) - } - panic("Group: bad kind") -} - -func (v Value) group() []Attr { - return unsafe.Slice((*Attr)(v.any.(groupptr)), v.num) -} - -//////////////// Other - -// Equal reports whether v and w represent the same Go value. -func (v Value) Equal(w Value) bool { - k1 := v.Kind() - k2 := w.Kind() - if k1 != k2 { - return false - } - switch k1 { - case KindInt64, KindUint64, KindBool, KindDuration: - return v.num == w.num - case KindString: - return v.str() == w.str() - case KindFloat64: - return v.float() == w.float() - case KindTime: - return v.time().Equal(w.time()) - case KindAny, KindLogValuer: - return v.any == w.any // may panic if non-comparable - case KindGroup: - return slices.EqualFunc(v.group(), w.group(), Attr.Equal) - default: - panic(fmt.Sprintf("bad kind: %s", k1)) - } -} - -// append appends a text representation of v to dst. -// v is formatted as with fmt.Sprint. -func (v Value) append(dst []byte) []byte { - switch v.Kind() { - case KindString: - return append(dst, v.str()...) - case KindInt64: - return strconv.AppendInt(dst, int64(v.num), 10) - case KindUint64: - return strconv.AppendUint(dst, v.num, 10) - case KindFloat64: - return strconv.AppendFloat(dst, v.float(), 'g', -1, 64) - case KindBool: - return strconv.AppendBool(dst, v.bool()) - case KindDuration: - return append(dst, v.duration().String()...) - case KindTime: - return append(dst, v.time().String()...) - case KindGroup: - return fmt.Append(dst, v.group()) - case KindAny, KindLogValuer: - return fmt.Append(dst, v.any) - default: - panic(fmt.Sprintf("bad kind: %s", v.Kind())) - } -} - -// A LogValuer is any Go value that can convert itself into a Value for logging. -// -// This mechanism may be used to defer expensive operations until they are -// needed, or to expand a single value into a sequence of components. -type LogValuer interface { - LogValue() Value -} - -const maxLogValues = 100 - -// Resolve repeatedly calls LogValue on v while it implements LogValuer, -// and returns the result. -// If v resolves to a group, the group's attributes' values are not recursively -// resolved. -// If the number of LogValue calls exceeds a threshold, a Value containing an -// error is returned. -// Resolve's return value is guaranteed not to be of Kind KindLogValuer. -func (v Value) Resolve() (rv Value) { - orig := v - defer func() { - if r := recover(); r != nil { - rv = AnyValue(fmt.Errorf("LogValue panicked\n%s", stack(3, 5))) - } - }() - - for i := 0; i < maxLogValues; i++ { - if v.Kind() != KindLogValuer { - return v - } - v = v.LogValuer().LogValue() - } - err := fmt.Errorf("LogValue called too many times on Value of type %T", orig.Any()) - return AnyValue(err) -} - -func stack(skip, nFrames int) string { - pcs := make([]uintptr, nFrames+1) - n := runtime.Callers(skip+1, pcs) - if n == 0 { - return "(no stack)" - } - frames := runtime.CallersFrames(pcs[:n]) - var b strings.Builder - i := 0 - for { - frame, more := frames.Next() - fmt.Fprintf(&b, "called from %s (%s:%d)\n", frame.Function, frame.File, frame.Line) - if !more { - break - } - i++ - if i >= nFrames { - fmt.Fprintf(&b, "(rest of stack elided)\n") - break - } - } - return b.String() -} diff --git a/vendor/golang.org/x/exp/slog/value_119.go b/vendor/golang.org/x/exp/slog/value_119.go deleted file mode 100644 index 29b0d7329..000000000 --- a/vendor/golang.org/x/exp/slog/value_119.go +++ /dev/null @@ -1,53 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.19 && !go1.20 - -package slog - -import ( - "reflect" - "unsafe" -) - -type ( - stringptr unsafe.Pointer // used in Value.any when the Value is a string - groupptr unsafe.Pointer // used in Value.any when the Value is a []Attr -) - -// StringValue returns a new Value for a string. -func StringValue(value string) Value { - hdr := (*reflect.StringHeader)(unsafe.Pointer(&value)) - return Value{num: uint64(hdr.Len), any: stringptr(hdr.Data)} -} - -func (v Value) str() string { - var s string - hdr := (*reflect.StringHeader)(unsafe.Pointer(&s)) - hdr.Data = uintptr(v.any.(stringptr)) - hdr.Len = int(v.num) - return s -} - -// String returns Value's value as a string, formatted like fmt.Sprint. Unlike -// the methods Int64, Float64, and so on, which panic if v is of the -// wrong kind, String never panics. -func (v Value) String() string { - if sp, ok := v.any.(stringptr); ok { - // Inlining this code makes a huge difference. - var s string - hdr := (*reflect.StringHeader)(unsafe.Pointer(&s)) - hdr.Data = uintptr(sp) - hdr.Len = int(v.num) - return s - } - return string(v.append(nil)) -} - -// GroupValue returns a new Value for a list of Attrs. -// The caller must not subsequently mutate the argument slice. -func GroupValue(as ...Attr) Value { - hdr := (*reflect.SliceHeader)(unsafe.Pointer(&as)) - return Value{num: uint64(hdr.Len), any: groupptr(hdr.Data)} -} diff --git a/vendor/golang.org/x/exp/slog/value_120.go b/vendor/golang.org/x/exp/slog/value_120.go deleted file mode 100644 index f7d4c0932..000000000 --- a/vendor/golang.org/x/exp/slog/value_120.go +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.20 - -package slog - -import "unsafe" - -type ( - stringptr *byte // used in Value.any when the Value is a string - groupptr *Attr // used in Value.any when the Value is a []Attr -) - -// StringValue returns a new Value for a string. -func StringValue(value string) Value { - return Value{num: uint64(len(value)), any: stringptr(unsafe.StringData(value))} -} - -// GroupValue returns a new Value for a list of Attrs. -// The caller must not subsequently mutate the argument slice. -func GroupValue(as ...Attr) Value { - return Value{num: uint64(len(as)), any: groupptr(unsafe.SliceData(as))} -} - -// String returns Value's value as a string, formatted like fmt.Sprint. Unlike -// the methods Int64, Float64, and so on, which panic if v is of the -// wrong kind, String never panics. -func (v Value) String() string { - if sp, ok := v.any.(stringptr); ok { - return unsafe.String(sp, v.num) - } - return string(v.append(nil)) -} - -func (v Value) str() string { - return unsafe.String(v.any.(stringptr), v.num) -} diff --git a/vendor/golang.org/x/exp/typeparams/LICENSE b/vendor/golang.org/x/exp/typeparams/LICENSE deleted file mode 100644 index 6a66aea5e..000000000 --- a/vendor/golang.org/x/exp/typeparams/LICENSE +++ /dev/null @@ -1,27 +0,0 @@ -Copyright (c) 2009 The Go Authors. All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above -copyright notice, this list of conditions and the following disclaimer -in the documentation and/or other materials provided with the -distribution. - * Neither the name of Google Inc. nor the names of its -contributors may be used to endorse or promote products derived from -this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/golang.org/x/exp/typeparams/common.go b/vendor/golang.org/x/exp/typeparams/common.go deleted file mode 100644 index 7f867cf8f..000000000 --- a/vendor/golang.org/x/exp/typeparams/common.go +++ /dev/null @@ -1,182 +0,0 @@ -// Copyright 2021 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package typeparams contains common utilities for writing tools that interact -// with generic Go code, as introduced with Go 1.18. -// -// Many of the types and functions in this package are proxies for the new APIs -// introduced in the standard library with Go 1.18. For example, the -// typeparams.Union type is an alias for go/types.Union, and the ForTypeSpec -// function returns the value of the go/ast.TypeSpec.TypeParams field. At Go -// versions older than 1.18 these helpers are implemented as stubs, allowing -// users of this package to write code that handles generic constructs inline, -// even if the Go version being used to compile does not support generics. -// -// Additionally, this package contains common utilities for working with the -// new generic constructs, to supplement the standard library APIs. Notably, -// the NormalTerms API computes a minimal representation of the structural -// restrictions on a type parameter. In the future, these supplemental APIs may -// be available in the standard library.. -package typeparams - -import ( - "go/ast" - "go/token" - "go/types" -) - -// Enabled reports whether type parameters are enabled in the current build -// environment. -func Enabled() bool { - return enabled -} - -// UnpackIndexExpr extracts data from AST nodes that represent index -// expressions. -// -// For an ast.IndexExpr, the resulting indices slice will contain exactly one -// index expression. For an ast.IndexListExpr (go1.18+), it may have a variable -// number of index expressions. -// -// For nodes that don't represent index expressions, the first return value of -// UnpackIndexExpr will be nil. -func UnpackIndexExpr(n ast.Node) (x ast.Expr, lbrack token.Pos, indices []ast.Expr, rbrack token.Pos) { - switch e := n.(type) { - case *ast.IndexExpr: - return e.X, e.Lbrack, []ast.Expr{e.Index}, e.Rbrack - case *IndexListExpr: - return e.X, e.Lbrack, e.Indices, e.Rbrack - } - return nil, token.NoPos, nil, token.NoPos -} - -// PackIndexExpr returns an *ast.IndexExpr or *ast.IndexListExpr, depending on -// the cardinality of indices. Calling PackIndexExpr with len(indices) == 0 -// will panic. -func PackIndexExpr(x ast.Expr, lbrack token.Pos, indices []ast.Expr, rbrack token.Pos) ast.Expr { - switch len(indices) { - case 0: - panic("empty indices") - case 1: - return &ast.IndexExpr{ - X: x, - Lbrack: lbrack, - Index: indices[0], - Rbrack: rbrack, - } - default: - return &IndexListExpr{ - X: x, - Lbrack: lbrack, - Indices: indices, - Rbrack: rbrack, - } - } -} - -// IsTypeParam reports whether t is a type parameter. -func IsTypeParam(t types.Type) bool { - _, ok := t.(*TypeParam) - return ok -} - -// OriginMethod returns the origin method associated with the method fn. For -// methods on a non-generic receiver base type, this is just fn. However, for -// methods with a generic receiver, OriginMethod returns the corresponding -// method in the method set of the origin type. -// -// As a special case, if fn is not a method (has no receiver), OriginMethod -// returns fn. -func OriginMethod(fn *types.Func) *types.Func { - recv := fn.Type().(*types.Signature).Recv() - if recv == nil { - return fn - } - base := recv.Type() - p, isPtr := base.(*types.Pointer) - if isPtr { - base = p.Elem() - } - named, isNamed := base.(*types.Named) - if !isNamed { - // Receiver is a *types.Interface. - return fn - } - if ForNamed(named).Len() == 0 { - // Receiver base has no type parameters, so we can avoid the lookup below. - return fn - } - orig := NamedTypeOrigin(named) - gfn, _, _ := types.LookupFieldOrMethod(orig, true, fn.Pkg(), fn.Name()) - return gfn.(*types.Func) -} - -// GenericAssignableTo is a generalization of types.AssignableTo that -// implements the following rule for uninstantiated generic types: -// -// If V and T are generic named types, then V is considered assignable to T if, -// for every possible instantation of V[A_1, ..., A_N], the instantiation -// T[A_1, ..., A_N] is valid and V[A_1, ..., A_N] implements T[A_1, ..., A_N]. -// -// If T has structural constraints, they must be satisfied by V. -// -// For example, consider the following type declarations: -// -// type Interface[T any] interface { -// Accept(T) -// } -// -// type Container[T any] struct { -// Element T -// } -// -// func (c Container[T]) Accept(t T) { c.Element = t } -// -// In this case, GenericAssignableTo reports that instantiations of Container -// are assignable to the corresponding instantiation of Interface. -func GenericAssignableTo(ctxt *Context, V, T types.Type) bool { - // If V and T are not both named, or do not have matching non-empty type - // parameter lists, fall back on types.AssignableTo. - - VN, Vnamed := V.(*types.Named) - TN, Tnamed := T.(*types.Named) - if !Vnamed || !Tnamed { - return types.AssignableTo(V, T) - } - - vtparams := ForNamed(VN) - ttparams := ForNamed(TN) - if vtparams.Len() == 0 || vtparams.Len() != ttparams.Len() || NamedTypeArgs(VN).Len() != 0 || NamedTypeArgs(TN).Len() != 0 { - return types.AssignableTo(V, T) - } - - // V and T have the same (non-zero) number of type params. Instantiate both - // with the type parameters of V. This must always succeed for V, and will - // succeed for T if and only if the type set of each type parameter of V is a - // subset of the type set of the corresponding type parameter of T, meaning - // that every instantiation of V corresponds to a valid instantiation of T. - - // Minor optimization: ensure we share a context across the two - // instantiations below. - if ctxt == nil { - ctxt = NewContext() - } - - var targs []types.Type - for i := 0; i < vtparams.Len(); i++ { - targs = append(targs, vtparams.At(i)) - } - - vinst, err := Instantiate(ctxt, V, targs, true) - if err != nil { - panic("type parameters should satisfy their own constraints") - } - - tinst, err := Instantiate(ctxt, T, targs, true) - if err != nil { - return false - } - - return types.AssignableTo(vinst, tinst) -} diff --git a/vendor/golang.org/x/exp/typeparams/normalize.go b/vendor/golang.org/x/exp/typeparams/normalize.go deleted file mode 100644 index 6cf71f045..000000000 --- a/vendor/golang.org/x/exp/typeparams/normalize.go +++ /dev/null @@ -1,200 +0,0 @@ -// Copyright 2021 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package typeparams - -import ( - "errors" - "fmt" - "go/types" - "os" - "strings" -) - -const debug = false - -// ErrEmptyTypeSet is returned if a type set computation results in a type set -// with no types. -var ErrEmptyTypeSet = errors.New("empty type set") - -// NormalTerms returns a slice of terms representing the normalized structural -// type restrictions of a type, if any. -// -// For all types whose underlying type is not *types.TypeParam, -// *types.Interface, or *types.Union, this is just a single term with Tilde() -// == false and Type() == typ. For types whose underlying type is -// *types.TypeParam, *types.Interface, and *types.Union, see below. -// -// Structural type restrictions of a type parameter are created via -// non-interface types embedded in its constraint interface (directly, or via a -// chain of interface embeddings). For example, in the declaration type T[P -// interface{~int; m()}] int is the structural restriction of the type -// parameter P is ~int. -// -// With interface embedding and unions, the specification of structural type -// restrictions may be arbitrarily complex. For example, consider the -// following: -// -// type A interface{ ~string|~[]byte } -// -// type B interface{ int|string } -// -// type C interface { ~string|~int } -// -// type T[P interface{ A|B; C }] int -// -// In this example, the structural type restriction of P is ~string|int: A|B -// expands to ~string|~[]byte|int|string, which reduces to ~string|~[]byte|int, -// which when intersected with C (~string|~int) yields ~string|int. -// -// NormalTerms computes these expansions and reductions, producing a -// "normalized" form of the embeddings. A structural restriction is normalized -// if it is a single union containing no interface terms, and is minimal in the -// sense that removing any term changes the set of types satisfying the -// constraint. It is left as a proof for the reader that, modulo sorting, there -// is exactly one such normalized form. -// -// Because the minimal representation always takes this form, NormalTerms -// returns a slice of tilde terms corresponding to the terms of the union in -// the normalized structural restriction. An error is returned if the type is -// invalid, exceeds complexity bounds, or has an empty type set. In the latter -// case, NormalTerms returns ErrEmptyTypeSet. -// -// NormalTerms makes no guarantees about the order of terms, except that it -// is deterministic. -func NormalTerms(typ types.Type) ([]*Term, error) { - if tparam, ok := typ.(*TypeParam); ok { - constraint := tparam.Constraint() - if constraint == nil { - return nil, fmt.Errorf("%s has nil constraint", tparam) - } - iface, _ := constraint.Underlying().(*types.Interface) - if iface == nil { - return nil, fmt.Errorf("constraint is %T, not *types.Interface", constraint.Underlying()) - } - typ = iface - } - tset, err := computeTermSetInternal(typ, make(map[types.Type]*termSet), 0) - if err != nil { - return nil, err - } - if tset.terms.isEmpty() { - return nil, ErrEmptyTypeSet - } - if tset.terms.isAll() { - return nil, nil - } - var terms []*Term - for _, term := range tset.terms { - terms = append(terms, NewTerm(term.tilde, term.typ)) - } - return terms, nil -} - -// A termSet holds the normalized set of terms for a given type. -// -// The name termSet is intentionally distinct from 'type set': a type set is -// all types that implement a type (and includes method restrictions), whereas -// a term set just represents the structural restrictions on a type. -type termSet struct { - complete bool - terms termlist -} - -func indentf(depth int, format string, args ...interface{}) { - fmt.Fprintf(os.Stderr, strings.Repeat(".", depth)+format+"\n", args...) -} - -func computeTermSetInternal(t types.Type, seen map[types.Type]*termSet, depth int) (res *termSet, err error) { - if t == nil { - panic("nil type") - } - - if debug { - indentf(depth, "%s", t.String()) - defer func() { - if err != nil { - indentf(depth, "=> %s", err) - } else { - indentf(depth, "=> %s", res.terms.String()) - } - }() - } - - const maxTermCount = 100 - if tset, ok := seen[t]; ok { - if !tset.complete { - return nil, fmt.Errorf("cycle detected in the declaration of %s", t) - } - return tset, nil - } - - // Mark the current type as seen to avoid infinite recursion. - tset := new(termSet) - defer func() { - tset.complete = true - }() - seen[t] = tset - - switch u := t.Underlying().(type) { - case *types.Interface: - // The term set of an interface is the intersection of the term sets of its - // embedded types. - tset.terms = allTermlist - for i := 0; i < u.NumEmbeddeds(); i++ { - embedded := u.EmbeddedType(i) - if _, ok := embedded.Underlying().(*TypeParam); ok { - return nil, fmt.Errorf("invalid embedded type %T", embedded) - } - tset2, err := computeTermSetInternal(embedded, seen, depth+1) - if err != nil { - return nil, err - } - tset.terms = tset.terms.intersect(tset2.terms) - } - case *Union: - // The term set of a union is the union of term sets of its terms. - tset.terms = nil - for i := 0; i < u.Len(); i++ { - t := u.Term(i) - var terms termlist - switch t.Type().Underlying().(type) { - case *types.Interface: - tset2, err := computeTermSetInternal(t.Type(), seen, depth+1) - if err != nil { - return nil, err - } - terms = tset2.terms - case *TypeParam, *Union: - // A stand-alone type parameter or union is not permitted as union - // term. - return nil, fmt.Errorf("invalid union term %T", t) - default: - if t.Type() == types.Typ[types.Invalid] { - continue - } - terms = termlist{{t.Tilde(), t.Type()}} - } - tset.terms = tset.terms.union(terms) - if len(tset.terms) > maxTermCount { - return nil, fmt.Errorf("exceeded max term count %d", maxTermCount) - } - } - case *TypeParam: - panic("unreachable") - default: - // For all other types, the term set is just a single non-tilde term - // holding the type itself. - if u != types.Typ[types.Invalid] { - tset.terms = termlist{{false, t}} - } - } - return tset, nil -} - -// under is a facade for the go/types internal function of the same name. It is -// used by typeterm.go. -func under(t types.Type) types.Type { - return t.Underlying() -} diff --git a/vendor/golang.org/x/exp/typeparams/termlist.go b/vendor/golang.org/x/exp/typeparams/termlist.go deleted file mode 100644 index 6f88bfa93..000000000 --- a/vendor/golang.org/x/exp/typeparams/termlist.go +++ /dev/null @@ -1,172 +0,0 @@ -// Copyright 2021 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Code generated by copytermlist.go DO NOT EDIT. - -package typeparams - -import ( - "bytes" - "go/types" -) - -// A termlist represents the type set represented by the union -// t1 ∪ y2 ∪ ... tn of the type sets of the terms t1 to tn. -// A termlist is in normal form if all terms are disjoint. -// termlist operations don't require the operands to be in -// normal form. -type termlist []*term - -// allTermlist represents the set of all types. -// It is in normal form. -var allTermlist = termlist{new(term)} - -// String prints the termlist exactly (without normalization). -func (xl termlist) String() string { - if len(xl) == 0 { - return "∅" - } - var buf bytes.Buffer - for i, x := range xl { - if i > 0 { - buf.WriteString(" ∪ ") - } - buf.WriteString(x.String()) - } - return buf.String() -} - -// isEmpty reports whether the termlist xl represents the empty set of types. -func (xl termlist) isEmpty() bool { - // If there's a non-nil term, the entire list is not empty. - // If the termlist is in normal form, this requires at most - // one iteration. - for _, x := range xl { - if x != nil { - return false - } - } - return true -} - -// isAll reports whether the termlist xl represents the set of all types. -func (xl termlist) isAll() bool { - // If there's a 𝓤 term, the entire list is 𝓤. - // If the termlist is in normal form, this requires at most - // one iteration. - for _, x := range xl { - if x != nil && x.typ == nil { - return true - } - } - return false -} - -// norm returns the normal form of xl. -func (xl termlist) norm() termlist { - // Quadratic algorithm, but good enough for now. - // TODO(gri) fix asymptotic performance - used := make([]bool, len(xl)) - var rl termlist - for i, xi := range xl { - if xi == nil || used[i] { - continue - } - for j := i + 1; j < len(xl); j++ { - xj := xl[j] - if xj == nil || used[j] { - continue - } - if u1, u2 := xi.union(xj); u2 == nil { - // If we encounter a 𝓤 term, the entire list is 𝓤. - // Exit early. - // (Note that this is not just an optimization; - // if we continue, we may end up with a 𝓤 term - // and other terms and the result would not be - // in normal form.) - if u1.typ == nil { - return allTermlist - } - xi = u1 - used[j] = true // xj is now unioned into xi - ignore it in future iterations - } - } - rl = append(rl, xi) - } - return rl -} - -// If the type set represented by xl is specified by a single (non-𝓤) term, -// singleType returns that type. Otherwise it returns nil. -func (xl termlist) singleType() types.Type { - if nl := xl.norm(); len(nl) == 1 { - return nl[0].typ // if nl.isAll() then typ is nil, which is ok - } - return nil -} - -// union returns the union xl ∪ yl. -func (xl termlist) union(yl termlist) termlist { - return append(xl, yl...).norm() -} - -// intersect returns the intersection xl ∩ yl. -func (xl termlist) intersect(yl termlist) termlist { - if xl.isEmpty() || yl.isEmpty() { - return nil - } - - // Quadratic algorithm, but good enough for now. - // TODO(gri) fix asymptotic performance - var rl termlist - for _, x := range xl { - for _, y := range yl { - if r := x.intersect(y); r != nil { - rl = append(rl, r) - } - } - } - return rl.norm() -} - -// equal reports whether xl and yl represent the same type set. -func (xl termlist) equal(yl termlist) bool { - // TODO(gri) this should be more efficient - return xl.subsetOf(yl) && yl.subsetOf(xl) -} - -// includes reports whether t ∈ xl. -func (xl termlist) includes(t types.Type) bool { - for _, x := range xl { - if x.includes(t) { - return true - } - } - return false -} - -// supersetOf reports whether y ⊆ xl. -func (xl termlist) supersetOf(y *term) bool { - for _, x := range xl { - if y.subsetOf(x) { - return true - } - } - return false -} - -// subsetOf reports whether xl ⊆ yl. -func (xl termlist) subsetOf(yl termlist) bool { - if yl.isEmpty() { - return xl.isEmpty() - } - - // each term x of xl must be a subset of yl - for _, x := range xl { - if !yl.supersetOf(x) { - return false // x is not a subset yl - } - } - return true -} diff --git a/vendor/golang.org/x/exp/typeparams/typeparams_go117.go b/vendor/golang.org/x/exp/typeparams/typeparams_go117.go deleted file mode 100644 index c1da79316..000000000 --- a/vendor/golang.org/x/exp/typeparams/typeparams_go117.go +++ /dev/null @@ -1,201 +0,0 @@ -// Copyright 2021 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build !go1.18 - -package typeparams - -import ( - "go/ast" - "go/token" - "go/types" -) - -const enabled = false - -func unsupported() { - panic("type parameters are unsupported at this go version") -} - -// IndexListExpr is a placeholder type, as type parameters are not supported at -// this Go version. Its methods panic on use. -type IndexListExpr struct { - ast.Expr - X ast.Expr // expression - Lbrack token.Pos // position of "[" - Indices []ast.Expr // index expressions - Rbrack token.Pos // position of "]" -} - -func (*IndexListExpr) Pos() token.Pos { unsupported(); return token.NoPos } -func (*IndexListExpr) End() token.Pos { unsupported(); return token.NoPos } - -// ForTypeSpec returns an empty field list, as type parameters on not supported -// at this Go version. -func ForTypeSpec(*ast.TypeSpec) *ast.FieldList { - return nil -} - -// ForFuncType returns an empty field list, as type parameters are not -// supported at this Go version. -func ForFuncType(*ast.FuncType) *ast.FieldList { - return nil -} - -// TypeParam is a placeholder type, as type parameters are not supported at -// this Go version. Its methods panic on use. -type TypeParam struct{ types.Type } - -func (*TypeParam) String() string { unsupported(); return "" } -func (*TypeParam) Underlying() types.Type { unsupported(); return nil } -func (*TypeParam) Index() int { unsupported(); return 0 } -func (*TypeParam) Constraint() types.Type { unsupported(); return nil } -func (*TypeParam) SetConstraint(types.Type) { unsupported() } -func (*TypeParam) Obj() *types.TypeName { unsupported(); return nil } - -// TypeParamList is a placeholder for an empty type parameter list. -type TypeParamList struct{} - -func (*TypeParamList) Len() int { return 0 } -func (*TypeParamList) At(int) *TypeParam { unsupported(); return nil } - -// TypeList is a placeholder for an empty type list. -type TypeList struct{} - -func (*TypeList) Len() int { return 0 } -func (*TypeList) At(int) types.Type { unsupported(); return nil } - -// NewTypeParam is unsupported at this Go version, and panics. -func NewTypeParam(name *types.TypeName, constraint types.Type) *TypeParam { - unsupported() - return nil -} - -// NewSignatureType calls types.NewSignature, panicking if recvTypeParams or -// typeParams is non-empty. -func NewSignatureType(recv *types.Var, recvTypeParams, typeParams []*TypeParam, params, results *types.Tuple, variadic bool) *types.Signature { - if len(recvTypeParams) != 0 || len(typeParams) != 0 { - unsupported() - } - return types.NewSignature(recv, params, results, variadic) -} - -// ForSignature returns an empty slice. -func ForSignature(*types.Signature) *TypeParamList { - return nil -} - -// RecvTypeParams returns a nil slice. -func RecvTypeParams(sig *types.Signature) *TypeParamList { - return nil -} - -// IsComparable returns false, as no interfaces are type-restricted at this Go -// version. -func IsComparable(*types.Interface) bool { - return false -} - -// IsMethodSet returns true, as no interfaces are type-restricted at this Go -// version. -func IsMethodSet(*types.Interface) bool { - return true -} - -// IsImplicit returns false, as no interfaces are implicit at this Go version. -func IsImplicit(*types.Interface) bool { - return false -} - -// MarkImplicit does nothing, because this Go version does not have implicit -// interfaces. -func MarkImplicit(*types.Interface) {} - -// ForNamed returns an empty type parameter list, as type parameters are not -// supported at this Go version. -func ForNamed(*types.Named) *TypeParamList { - return nil -} - -// SetForNamed panics if tparams is non-empty. -func SetForNamed(_ *types.Named, tparams []*TypeParam) { - if len(tparams) > 0 { - unsupported() - } -} - -// NamedTypeArgs returns nil. -func NamedTypeArgs(*types.Named) *TypeList { - return nil -} - -// NamedTypeOrigin is the identity method at this Go version. -func NamedTypeOrigin(named *types.Named) types.Type { - return named -} - -// Term holds information about a structural type restriction. -type Term struct { - tilde bool - typ types.Type -} - -func (m *Term) Tilde() bool { return m.tilde } -func (m *Term) Type() types.Type { return m.typ } -func (m *Term) String() string { - pre := "" - if m.tilde { - pre = "~" - } - return pre + m.typ.String() -} - -// NewTerm creates a new placeholder term type. -func NewTerm(tilde bool, typ types.Type) *Term { - return &Term{tilde, typ} -} - -// Union is a placeholder type, as type parameters are not supported at this Go -// version. Its methods panic on use. -type Union struct{ types.Type } - -func (*Union) String() string { unsupported(); return "" } -func (*Union) Underlying() types.Type { unsupported(); return nil } -func (*Union) Len() int { return 0 } -func (*Union) Term(i int) *Term { unsupported(); return nil } - -// NewUnion is unsupported at this Go version, and panics. -func NewUnion(terms []*Term) *Union { - unsupported() - return nil -} - -// InitInstances is a noop at this Go version. -func InitInstances(*types.Info) {} - -// Instance is a placeholder type, as type parameters are not supported at this -// Go version. -type Instance struct { - TypeArgs *TypeList - Type types.Type -} - -// GetInstances returns a nil map, as type parameters are not supported at this -// Go version. -func GetInstances(info *types.Info) map[*ast.Ident]Instance { return nil } - -// Context is a placeholder type, as type parameters are not supported at -// this Go version. -type Context struct{} - -// NewContext returns a placeholder Context instance. -func NewContext() *Context { - return &Context{} -} - -// Instantiate is unsupported on this Go version, and panics. -func Instantiate(ctxt *Context, typ types.Type, targs []types.Type, validate bool) (types.Type, error) { - unsupported() - return nil, nil -} diff --git a/vendor/golang.org/x/exp/typeparams/typeparams_go118.go b/vendor/golang.org/x/exp/typeparams/typeparams_go118.go deleted file mode 100644 index 0b35449d1..000000000 --- a/vendor/golang.org/x/exp/typeparams/typeparams_go118.go +++ /dev/null @@ -1,147 +0,0 @@ -// Copyright 2021 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.18 - -package typeparams - -import ( - "go/ast" - "go/types" -) - -const enabled = true - -// IndexListExpr is an alias for ast.IndexListExpr. -type IndexListExpr = ast.IndexListExpr - -// ForTypeSpec returns n.TypeParams. -func ForTypeSpec(n *ast.TypeSpec) *ast.FieldList { - if n == nil { - return nil - } - return n.TypeParams -} - -// ForFuncType returns n.TypeParams. -func ForFuncType(n *ast.FuncType) *ast.FieldList { - if n == nil { - return nil - } - return n.TypeParams -} - -// TypeParam is an alias for types.TypeParam -type TypeParam = types.TypeParam - -// TypeParamList is an alias for types.TypeParamList -type TypeParamList = types.TypeParamList - -// TypeList is an alias for types.TypeList -type TypeList = types.TypeList - -// NewTypeParam calls types.NewTypeParam. -func NewTypeParam(name *types.TypeName, constraint types.Type) *TypeParam { - return types.NewTypeParam(name, constraint) -} - -// NewSignatureType calls types.NewSignatureType. -func NewSignatureType(recv *types.Var, recvTypeParams, typeParams []*TypeParam, params, results *types.Tuple, variadic bool) *types.Signature { - return types.NewSignatureType(recv, recvTypeParams, typeParams, params, results, variadic) -} - -// ForSignature returns sig.TypeParams() -func ForSignature(sig *types.Signature) *TypeParamList { - return sig.TypeParams() -} - -// RecvTypeParams returns sig.RecvTypeParams(). -func RecvTypeParams(sig *types.Signature) *TypeParamList { - return sig.RecvTypeParams() -} - -// IsComparable calls iface.IsComparable(). -func IsComparable(iface *types.Interface) bool { - return iface.IsComparable() -} - -// IsMethodSet calls iface.IsMethodSet(). -func IsMethodSet(iface *types.Interface) bool { - return iface.IsMethodSet() -} - -// IsImplicit calls iface.IsImplicit(). -func IsImplicit(iface *types.Interface) bool { - return iface.IsImplicit() -} - -// MarkImplicit calls iface.MarkImplicit(). -func MarkImplicit(iface *types.Interface) { - iface.MarkImplicit() -} - -// ForNamed extracts the (possibly empty) type parameter object list from -// named. -func ForNamed(named *types.Named) *TypeParamList { - return named.TypeParams() -} - -// SetForNamed sets the type params tparams on n. Each tparam must be of -// dynamic type *types.TypeParam. -func SetForNamed(n *types.Named, tparams []*TypeParam) { - n.SetTypeParams(tparams) -} - -// NamedTypeArgs returns named.TypeArgs(). -func NamedTypeArgs(named *types.Named) *TypeList { - return named.TypeArgs() -} - -// NamedTypeOrigin returns named.Orig(). -func NamedTypeOrigin(named *types.Named) types.Type { - return named.Origin() -} - -// Term is an alias for types.Term. -type Term = types.Term - -// NewTerm calls types.NewTerm. -func NewTerm(tilde bool, typ types.Type) *Term { - return types.NewTerm(tilde, typ) -} - -// Union is an alias for types.Union -type Union = types.Union - -// NewUnion calls types.NewUnion. -func NewUnion(terms []*Term) *Union { - return types.NewUnion(terms) -} - -// InitInstances initializes info to record information about type and function -// instances. -func InitInstances(info *types.Info) { - info.Instances = make(map[*ast.Ident]types.Instance) -} - -// Instance is an alias for types.Instance. -type Instance = types.Instance - -// GetInstances returns info.Instances. -func GetInstances(info *types.Info) map[*ast.Ident]Instance { - return info.Instances -} - -// Context is an alias for types.Context. -type Context = types.Context - -// NewContext calls types.NewContext. -func NewContext() *Context { - return types.NewContext() -} - -// Instantiate calls types.Instantiate. -func Instantiate(ctxt *Context, typ types.Type, targs []types.Type, validate bool) (types.Type, error) { - return types.Instantiate(ctxt, typ, targs, validate) -} diff --git a/vendor/golang.org/x/exp/typeparams/typeterm.go b/vendor/golang.org/x/exp/typeparams/typeterm.go deleted file mode 100644 index 7350bb702..000000000 --- a/vendor/golang.org/x/exp/typeparams/typeterm.go +++ /dev/null @@ -1,169 +0,0 @@ -// Copyright 2021 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Code generated by copytermlist.go DO NOT EDIT. - -package typeparams - -import "go/types" - -// A term describes elementary type sets: -// -// ∅: (*term)(nil) == ∅ // set of no types (empty set) -// 𝓤: &term{} == 𝓤 // set of all types (𝓤niverse) -// T: &term{false, T} == {T} // set of type T -// ~t: &term{true, t} == {t' | under(t') == t} // set of types with underlying type t -type term struct { - tilde bool // valid if typ != nil - typ types.Type -} - -func (x *term) String() string { - switch { - case x == nil: - return "∅" - case x.typ == nil: - return "𝓤" - case x.tilde: - return "~" + x.typ.String() - default: - return x.typ.String() - } -} - -// equal reports whether x and y represent the same type set. -func (x *term) equal(y *term) bool { - // easy cases - switch { - case x == nil || y == nil: - return x == y - case x.typ == nil || y.typ == nil: - return x.typ == y.typ - } - // ∅ ⊂ x, y ⊂ 𝓤 - - return x.tilde == y.tilde && types.Identical(x.typ, y.typ) -} - -// union returns the union x ∪ y: zero, one, or two non-nil terms. -func (x *term) union(y *term) (_, _ *term) { - // easy cases - switch { - case x == nil && y == nil: - return nil, nil // ∅ ∪ ∅ == ∅ - case x == nil: - return y, nil // ∅ ∪ y == y - case y == nil: - return x, nil // x ∪ ∅ == x - case x.typ == nil: - return x, nil // 𝓤 ∪ y == 𝓤 - case y.typ == nil: - return y, nil // x ∪ 𝓤 == 𝓤 - } - // ∅ ⊂ x, y ⊂ 𝓤 - - if x.disjoint(y) { - return x, y // x ∪ y == (x, y) if x ∩ y == ∅ - } - // x.typ == y.typ - - // ~t ∪ ~t == ~t - // ~t ∪ T == ~t - // T ∪ ~t == ~t - // T ∪ T == T - if x.tilde || !y.tilde { - return x, nil - } - return y, nil -} - -// intersect returns the intersection x ∩ y. -func (x *term) intersect(y *term) *term { - // easy cases - switch { - case x == nil || y == nil: - return nil // ∅ ∩ y == ∅ and ∩ ∅ == ∅ - case x.typ == nil: - return y // 𝓤 ∩ y == y - case y.typ == nil: - return x // x ∩ 𝓤 == x - } - // ∅ ⊂ x, y ⊂ 𝓤 - - if x.disjoint(y) { - return nil // x ∩ y == ∅ if x ∩ y == ∅ - } - // x.typ == y.typ - - // ~t ∩ ~t == ~t - // ~t ∩ T == T - // T ∩ ~t == T - // T ∩ T == T - if !x.tilde || y.tilde { - return x - } - return y -} - -// includes reports whether t ∈ x. -func (x *term) includes(t types.Type) bool { - // easy cases - switch { - case x == nil: - return false // t ∈ ∅ == false - case x.typ == nil: - return true // t ∈ 𝓤 == true - } - // ∅ ⊂ x ⊂ 𝓤 - - u := t - if x.tilde { - u = under(u) - } - return types.Identical(x.typ, u) -} - -// subsetOf reports whether x ⊆ y. -func (x *term) subsetOf(y *term) bool { - // easy cases - switch { - case x == nil: - return true // ∅ ⊆ y == true - case y == nil: - return false // x ⊆ ∅ == false since x != ∅ - case y.typ == nil: - return true // x ⊆ 𝓤 == true - case x.typ == nil: - return false // 𝓤 ⊆ y == false since y != 𝓤 - } - // ∅ ⊂ x, y ⊂ 𝓤 - - if x.disjoint(y) { - return false // x ⊆ y == false if x ∩ y == ∅ - } - // x.typ == y.typ - - // ~t ⊆ ~t == true - // ~t ⊆ T == false - // T ⊆ ~t == true - // T ⊆ T == true - return !x.tilde || y.tilde -} - -// disjoint reports whether x ∩ y == ∅. -// x.typ and y.typ must not be nil. -func (x *term) disjoint(y *term) bool { - if debug && (x.typ == nil || y.typ == nil) { - panic("invalid argument(s)") - } - ux := x.typ - if y.tilde { - ux = under(ux) - } - uy := y.typ - if x.tilde { - uy = under(uy) - } - return !types.Identical(ux, uy) -} diff --git a/vendor/golang.org/x/mod/modfile/print.go b/vendor/golang.org/x/mod/modfile/print.go deleted file mode 100644 index 2a0123d4b..000000000 --- a/vendor/golang.org/x/mod/modfile/print.go +++ /dev/null @@ -1,184 +0,0 @@ -// Copyright 2018 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Module file printer. - -package modfile - -import ( - "bytes" - "fmt" - "strings" -) - -// Format returns a go.mod file as a byte slice, formatted in standard style. -func Format(f *FileSyntax) []byte { - pr := &printer{} - pr.file(f) - - // remove trailing blank lines - b := pr.Bytes() - for len(b) > 0 && b[len(b)-1] == '\n' && (len(b) == 1 || b[len(b)-2] == '\n') { - b = b[:len(b)-1] - } - return b -} - -// A printer collects the state during printing of a file or expression. -type printer struct { - bytes.Buffer // output buffer - comment []Comment // pending end-of-line comments - margin int // left margin (indent), a number of tabs -} - -// printf prints to the buffer. -func (p *printer) printf(format string, args ...interface{}) { - fmt.Fprintf(p, format, args...) -} - -// indent returns the position on the current line, in bytes, 0-indexed. -func (p *printer) indent() int { - b := p.Bytes() - n := 0 - for n < len(b) && b[len(b)-1-n] != '\n' { - n++ - } - return n -} - -// newline ends the current line, flushing end-of-line comments. -func (p *printer) newline() { - if len(p.comment) > 0 { - p.printf(" ") - for i, com := range p.comment { - if i > 0 { - p.trim() - p.printf("\n") - for i := 0; i < p.margin; i++ { - p.printf("\t") - } - } - p.printf("%s", strings.TrimSpace(com.Token)) - } - p.comment = p.comment[:0] - } - - p.trim() - if b := p.Bytes(); len(b) == 0 || (len(b) >= 2 && b[len(b)-1] == '\n' && b[len(b)-2] == '\n') { - // skip the blank line at top of file or after a blank line - } else { - p.printf("\n") - } - for i := 0; i < p.margin; i++ { - p.printf("\t") - } -} - -// trim removes trailing spaces and tabs from the current line. -func (p *printer) trim() { - // Remove trailing spaces and tabs from line we're about to end. - b := p.Bytes() - n := len(b) - for n > 0 && (b[n-1] == '\t' || b[n-1] == ' ') { - n-- - } - p.Truncate(n) -} - -// file formats the given file into the print buffer. -func (p *printer) file(f *FileSyntax) { - for _, com := range f.Before { - p.printf("%s", strings.TrimSpace(com.Token)) - p.newline() - } - - for i, stmt := range f.Stmt { - switch x := stmt.(type) { - case *CommentBlock: - // comments already handled - p.expr(x) - - default: - p.expr(x) - p.newline() - } - - for _, com := range stmt.Comment().After { - p.printf("%s", strings.TrimSpace(com.Token)) - p.newline() - } - - if i+1 < len(f.Stmt) { - p.newline() - } - } -} - -func (p *printer) expr(x Expr) { - // Emit line-comments preceding this expression. - if before := x.Comment().Before; len(before) > 0 { - // Want to print a line comment. - // Line comments must be at the current margin. - p.trim() - if p.indent() > 0 { - // There's other text on the line. Start a new line. - p.printf("\n") - } - // Re-indent to margin. - for i := 0; i < p.margin; i++ { - p.printf("\t") - } - for _, com := range before { - p.printf("%s", strings.TrimSpace(com.Token)) - p.newline() - } - } - - switch x := x.(type) { - default: - panic(fmt.Errorf("printer: unexpected type %T", x)) - - case *CommentBlock: - // done - - case *LParen: - p.printf("(") - case *RParen: - p.printf(")") - - case *Line: - p.tokens(x.Token) - - case *LineBlock: - p.tokens(x.Token) - p.printf(" ") - p.expr(&x.LParen) - p.margin++ - for _, l := range x.Line { - p.newline() - p.expr(l) - } - p.margin-- - p.newline() - p.expr(&x.RParen) - } - - // Queue end-of-line comments for printing when we - // reach the end of the line. - p.comment = append(p.comment, x.Comment().Suffix...) -} - -func (p *printer) tokens(tokens []string) { - sep := "" - for _, t := range tokens { - if t == "," || t == ")" || t == "]" || t == "}" { - sep = "" - } - p.printf("%s%s", sep, t) - sep = " " - if t == "(" || t == "[" || t == "{" { - sep = "" - } - } -} diff --git a/vendor/golang.org/x/mod/modfile/read.go b/vendor/golang.org/x/mod/modfile/read.go deleted file mode 100644 index 5b5bb5e11..000000000 --- a/vendor/golang.org/x/mod/modfile/read.go +++ /dev/null @@ -1,958 +0,0 @@ -// Copyright 2018 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package modfile - -import ( - "bytes" - "errors" - "fmt" - "os" - "strconv" - "strings" - "unicode" - "unicode/utf8" -) - -// A Position describes an arbitrary source position in a file, including the -// file, line, column, and byte offset. -type Position struct { - Line int // line in input (starting at 1) - LineRune int // rune in line (starting at 1) - Byte int // byte in input (starting at 0) -} - -// add returns the position at the end of s, assuming it starts at p. -func (p Position) add(s string) Position { - p.Byte += len(s) - if n := strings.Count(s, "\n"); n > 0 { - p.Line += n - s = s[strings.LastIndex(s, "\n")+1:] - p.LineRune = 1 - } - p.LineRune += utf8.RuneCountInString(s) - return p -} - -// An Expr represents an input element. -type Expr interface { - // Span returns the start and end position of the expression, - // excluding leading or trailing comments. - Span() (start, end Position) - - // Comment returns the comments attached to the expression. - // This method would normally be named 'Comments' but that - // would interfere with embedding a type of the same name. - Comment() *Comments -} - -// A Comment represents a single // comment. -type Comment struct { - Start Position - Token string // without trailing newline - Suffix bool // an end of line (not whole line) comment -} - -// Comments collects the comments associated with an expression. -type Comments struct { - Before []Comment // whole-line comments before this expression - Suffix []Comment // end-of-line comments after this expression - - // For top-level expressions only, After lists whole-line - // comments following the expression. - After []Comment -} - -// Comment returns the receiver. This isn't useful by itself, but -// a [Comments] struct is embedded into all the expression -// implementation types, and this gives each of those a Comment -// method to satisfy the Expr interface. -func (c *Comments) Comment() *Comments { - return c -} - -// A FileSyntax represents an entire go.mod file. -type FileSyntax struct { - Name string // file path - Comments - Stmt []Expr -} - -func (x *FileSyntax) Span() (start, end Position) { - if len(x.Stmt) == 0 { - return - } - start, _ = x.Stmt[0].Span() - _, end = x.Stmt[len(x.Stmt)-1].Span() - return start, end -} - -// addLine adds a line containing the given tokens to the file. -// -// If the first token of the hint matches the first token of the -// line, the new line is added at the end of the block containing hint, -// extracting hint into a new block if it is not yet in one. -// -// If the hint is non-nil buts its first token does not match, -// the new line is added after the block containing hint -// (or hint itself, if not in a block). -// -// If no hint is provided, addLine appends the line to the end of -// the last block with a matching first token, -// or to the end of the file if no such block exists. -func (x *FileSyntax) addLine(hint Expr, tokens ...string) *Line { - if hint == nil { - // If no hint given, add to the last statement of the given type. - Loop: - for i := len(x.Stmt) - 1; i >= 0; i-- { - stmt := x.Stmt[i] - switch stmt := stmt.(type) { - case *Line: - if stmt.Token != nil && stmt.Token[0] == tokens[0] { - hint = stmt - break Loop - } - case *LineBlock: - if stmt.Token[0] == tokens[0] { - hint = stmt - break Loop - } - } - } - } - - newLineAfter := func(i int) *Line { - new := &Line{Token: tokens} - if i == len(x.Stmt) { - x.Stmt = append(x.Stmt, new) - } else { - x.Stmt = append(x.Stmt, nil) - copy(x.Stmt[i+2:], x.Stmt[i+1:]) - x.Stmt[i+1] = new - } - return new - } - - if hint != nil { - for i, stmt := range x.Stmt { - switch stmt := stmt.(type) { - case *Line: - if stmt == hint { - if stmt.Token == nil || stmt.Token[0] != tokens[0] { - return newLineAfter(i) - } - - // Convert line to line block. - stmt.InBlock = true - block := &LineBlock{Token: stmt.Token[:1], Line: []*Line{stmt}} - stmt.Token = stmt.Token[1:] - x.Stmt[i] = block - new := &Line{Token: tokens[1:], InBlock: true} - block.Line = append(block.Line, new) - return new - } - - case *LineBlock: - if stmt == hint { - if stmt.Token[0] != tokens[0] { - return newLineAfter(i) - } - - new := &Line{Token: tokens[1:], InBlock: true} - stmt.Line = append(stmt.Line, new) - return new - } - - for j, line := range stmt.Line { - if line == hint { - if stmt.Token[0] != tokens[0] { - return newLineAfter(i) - } - - // Add new line after hint within the block. - stmt.Line = append(stmt.Line, nil) - copy(stmt.Line[j+2:], stmt.Line[j+1:]) - new := &Line{Token: tokens[1:], InBlock: true} - stmt.Line[j+1] = new - return new - } - } - } - } - } - - new := &Line{Token: tokens} - x.Stmt = append(x.Stmt, new) - return new -} - -func (x *FileSyntax) updateLine(line *Line, tokens ...string) { - if line.InBlock { - tokens = tokens[1:] - } - line.Token = tokens -} - -// markRemoved modifies line so that it (and its end-of-line comment, if any) -// will be dropped by (*FileSyntax).Cleanup. -func (line *Line) markRemoved() { - line.Token = nil - line.Comments.Suffix = nil -} - -// Cleanup cleans up the file syntax x after any edit operations. -// To avoid quadratic behavior, (*Line).markRemoved marks the line as dead -// by setting line.Token = nil but does not remove it from the slice -// in which it appears. After edits have all been indicated, -// calling Cleanup cleans out the dead lines. -func (x *FileSyntax) Cleanup() { - w := 0 - for _, stmt := range x.Stmt { - switch stmt := stmt.(type) { - case *Line: - if stmt.Token == nil { - continue - } - case *LineBlock: - ww := 0 - for _, line := range stmt.Line { - if line.Token != nil { - stmt.Line[ww] = line - ww++ - } - } - if ww == 0 { - continue - } - if ww == 1 { - // Collapse block into single line. - line := &Line{ - Comments: Comments{ - Before: commentsAdd(stmt.Before, stmt.Line[0].Before), - Suffix: commentsAdd(stmt.Line[0].Suffix, stmt.Suffix), - After: commentsAdd(stmt.Line[0].After, stmt.After), - }, - Token: stringsAdd(stmt.Token, stmt.Line[0].Token), - } - x.Stmt[w] = line - w++ - continue - } - stmt.Line = stmt.Line[:ww] - } - x.Stmt[w] = stmt - w++ - } - x.Stmt = x.Stmt[:w] -} - -func commentsAdd(x, y []Comment) []Comment { - return append(x[:len(x):len(x)], y...) -} - -func stringsAdd(x, y []string) []string { - return append(x[:len(x):len(x)], y...) -} - -// A CommentBlock represents a top-level block of comments separate -// from any rule. -type CommentBlock struct { - Comments - Start Position -} - -func (x *CommentBlock) Span() (start, end Position) { - return x.Start, x.Start -} - -// A Line is a single line of tokens. -type Line struct { - Comments - Start Position - Token []string - InBlock bool - End Position -} - -func (x *Line) Span() (start, end Position) { - return x.Start, x.End -} - -// A LineBlock is a factored block of lines, like -// -// require ( -// "x" -// "y" -// ) -type LineBlock struct { - Comments - Start Position - LParen LParen - Token []string - Line []*Line - RParen RParen -} - -func (x *LineBlock) Span() (start, end Position) { - return x.Start, x.RParen.Pos.add(")") -} - -// An LParen represents the beginning of a parenthesized line block. -// It is a place to store suffix comments. -type LParen struct { - Comments - Pos Position -} - -func (x *LParen) Span() (start, end Position) { - return x.Pos, x.Pos.add(")") -} - -// An RParen represents the end of a parenthesized line block. -// It is a place to store whole-line (before) comments. -type RParen struct { - Comments - Pos Position -} - -func (x *RParen) Span() (start, end Position) { - return x.Pos, x.Pos.add(")") -} - -// An input represents a single input file being parsed. -type input struct { - // Lexing state. - filename string // name of input file, for errors - complete []byte // entire input - remaining []byte // remaining input - tokenStart []byte // token being scanned to end of input - token token // next token to be returned by lex, peek - pos Position // current input position - comments []Comment // accumulated comments - - // Parser state. - file *FileSyntax // returned top-level syntax tree - parseErrors ErrorList // errors encountered during parsing - - // Comment assignment state. - pre []Expr // all expressions, in preorder traversal - post []Expr // all expressions, in postorder traversal -} - -func newInput(filename string, data []byte) *input { - return &input{ - filename: filename, - complete: data, - remaining: data, - pos: Position{Line: 1, LineRune: 1, Byte: 0}, - } -} - -// parse parses the input file. -func parse(file string, data []byte) (f *FileSyntax, err error) { - // The parser panics for both routine errors like syntax errors - // and for programmer bugs like array index errors. - // Turn both into error returns. Catching bug panics is - // especially important when processing many files. - in := newInput(file, data) - defer func() { - if e := recover(); e != nil && e != &in.parseErrors { - in.parseErrors = append(in.parseErrors, Error{ - Filename: in.filename, - Pos: in.pos, - Err: fmt.Errorf("internal error: %v", e), - }) - } - if err == nil && len(in.parseErrors) > 0 { - err = in.parseErrors - } - }() - - // Prime the lexer by reading in the first token. It will be available - // in the next peek() or lex() call. - in.readToken() - - // Invoke the parser. - in.parseFile() - if len(in.parseErrors) > 0 { - return nil, in.parseErrors - } - in.file.Name = in.filename - - // Assign comments to nearby syntax. - in.assignComments() - - return in.file, nil -} - -// Error is called to report an error. -// Error does not return: it panics. -func (in *input) Error(s string) { - in.parseErrors = append(in.parseErrors, Error{ - Filename: in.filename, - Pos: in.pos, - Err: errors.New(s), - }) - panic(&in.parseErrors) -} - -// eof reports whether the input has reached end of file. -func (in *input) eof() bool { - return len(in.remaining) == 0 -} - -// peekRune returns the next rune in the input without consuming it. -func (in *input) peekRune() int { - if len(in.remaining) == 0 { - return 0 - } - r, _ := utf8.DecodeRune(in.remaining) - return int(r) -} - -// peekPrefix reports whether the remaining input begins with the given prefix. -func (in *input) peekPrefix(prefix string) bool { - // This is like bytes.HasPrefix(in.remaining, []byte(prefix)) - // but without the allocation of the []byte copy of prefix. - for i := 0; i < len(prefix); i++ { - if i >= len(in.remaining) || in.remaining[i] != prefix[i] { - return false - } - } - return true -} - -// readRune consumes and returns the next rune in the input. -func (in *input) readRune() int { - if len(in.remaining) == 0 { - in.Error("internal lexer error: readRune at EOF") - } - r, size := utf8.DecodeRune(in.remaining) - in.remaining = in.remaining[size:] - if r == '\n' { - in.pos.Line++ - in.pos.LineRune = 1 - } else { - in.pos.LineRune++ - } - in.pos.Byte += size - return int(r) -} - -type token struct { - kind tokenKind - pos Position - endPos Position - text string -} - -type tokenKind int - -const ( - _EOF tokenKind = -(iota + 1) - _EOLCOMMENT - _IDENT - _STRING - _COMMENT - - // newlines and punctuation tokens are allowed as ASCII codes. -) - -func (k tokenKind) isComment() bool { - return k == _COMMENT || k == _EOLCOMMENT -} - -// isEOL returns whether a token terminates a line. -func (k tokenKind) isEOL() bool { - return k == _EOF || k == _EOLCOMMENT || k == '\n' -} - -// startToken marks the beginning of the next input token. -// It must be followed by a call to endToken, once the token's text has -// been consumed using readRune. -func (in *input) startToken() { - in.tokenStart = in.remaining - in.token.text = "" - in.token.pos = in.pos -} - -// endToken marks the end of an input token. -// It records the actual token string in tok.text. -// A single trailing newline (LF or CRLF) will be removed from comment tokens. -func (in *input) endToken(kind tokenKind) { - in.token.kind = kind - text := string(in.tokenStart[:len(in.tokenStart)-len(in.remaining)]) - if kind.isComment() { - if strings.HasSuffix(text, "\r\n") { - text = text[:len(text)-2] - } else { - text = strings.TrimSuffix(text, "\n") - } - } - in.token.text = text - in.token.endPos = in.pos -} - -// peek returns the kind of the next token returned by lex. -func (in *input) peek() tokenKind { - return in.token.kind -} - -// lex is called from the parser to obtain the next input token. -func (in *input) lex() token { - tok := in.token - in.readToken() - return tok -} - -// readToken lexes the next token from the text and stores it in in.token. -func (in *input) readToken() { - // Skip past spaces, stopping at non-space or EOF. - for !in.eof() { - c := in.peekRune() - if c == ' ' || c == '\t' || c == '\r' { - in.readRune() - continue - } - - // Comment runs to end of line. - if in.peekPrefix("//") { - in.startToken() - - // Is this comment the only thing on its line? - // Find the last \n before this // and see if it's all - // spaces from there to here. - i := bytes.LastIndex(in.complete[:in.pos.Byte], []byte("\n")) - suffix := len(bytes.TrimSpace(in.complete[i+1:in.pos.Byte])) > 0 - in.readRune() - in.readRune() - - // Consume comment. - for len(in.remaining) > 0 && in.readRune() != '\n' { - } - - // If we are at top level (not in a statement), hand the comment to - // the parser as a _COMMENT token. The grammar is written - // to handle top-level comments itself. - if !suffix { - in.endToken(_COMMENT) - return - } - - // Otherwise, save comment for later attachment to syntax tree. - in.endToken(_EOLCOMMENT) - in.comments = append(in.comments, Comment{in.token.pos, in.token.text, suffix}) - return - } - - if in.peekPrefix("/*") { - in.Error("mod files must use // comments (not /* */ comments)") - } - - // Found non-space non-comment. - break - } - - // Found the beginning of the next token. - in.startToken() - - // End of file. - if in.eof() { - in.endToken(_EOF) - return - } - - // Punctuation tokens. - switch c := in.peekRune(); c { - case '\n', '(', ')', '[', ']', '{', '}', ',': - in.readRune() - in.endToken(tokenKind(c)) - return - - case '"', '`': // quoted string - quote := c - in.readRune() - for { - if in.eof() { - in.pos = in.token.pos - in.Error("unexpected EOF in string") - } - if in.peekRune() == '\n' { - in.Error("unexpected newline in string") - } - c := in.readRune() - if c == quote { - break - } - if c == '\\' && quote != '`' { - if in.eof() { - in.pos = in.token.pos - in.Error("unexpected EOF in string") - } - in.readRune() - } - } - in.endToken(_STRING) - return - } - - // Checked all punctuation. Must be identifier token. - if c := in.peekRune(); !isIdent(c) { - in.Error(fmt.Sprintf("unexpected input character %#q", c)) - } - - // Scan over identifier. - for isIdent(in.peekRune()) { - if in.peekPrefix("//") { - break - } - if in.peekPrefix("/*") { - in.Error("mod files must use // comments (not /* */ comments)") - } - in.readRune() - } - in.endToken(_IDENT) -} - -// isIdent reports whether c is an identifier rune. -// We treat most printable runes as identifier runes, except for a handful of -// ASCII punctuation characters. -func isIdent(c int) bool { - switch r := rune(c); r { - case ' ', '(', ')', '[', ']', '{', '}', ',': - return false - default: - return !unicode.IsSpace(r) && unicode.IsPrint(r) - } -} - -// Comment assignment. -// We build two lists of all subexpressions, preorder and postorder. -// The preorder list is ordered by start location, with outer expressions first. -// The postorder list is ordered by end location, with outer expressions last. -// We use the preorder list to assign each whole-line comment to the syntax -// immediately following it, and we use the postorder list to assign each -// end-of-line comment to the syntax immediately preceding it. - -// order walks the expression adding it and its subexpressions to the -// preorder and postorder lists. -func (in *input) order(x Expr) { - if x != nil { - in.pre = append(in.pre, x) - } - switch x := x.(type) { - default: - panic(fmt.Errorf("order: unexpected type %T", x)) - case nil: - // nothing - case *LParen, *RParen: - // nothing - case *CommentBlock: - // nothing - case *Line: - // nothing - case *FileSyntax: - for _, stmt := range x.Stmt { - in.order(stmt) - } - case *LineBlock: - in.order(&x.LParen) - for _, l := range x.Line { - in.order(l) - } - in.order(&x.RParen) - } - if x != nil { - in.post = append(in.post, x) - } -} - -// assignComments attaches comments to nearby syntax. -func (in *input) assignComments() { - const debug = false - - // Generate preorder and postorder lists. - in.order(in.file) - - // Split into whole-line comments and suffix comments. - var line, suffix []Comment - for _, com := range in.comments { - if com.Suffix { - suffix = append(suffix, com) - } else { - line = append(line, com) - } - } - - if debug { - for _, c := range line { - fmt.Fprintf(os.Stderr, "LINE %q :%d:%d #%d\n", c.Token, c.Start.Line, c.Start.LineRune, c.Start.Byte) - } - } - - // Assign line comments to syntax immediately following. - for _, x := range in.pre { - start, _ := x.Span() - if debug { - fmt.Fprintf(os.Stderr, "pre %T :%d:%d #%d\n", x, start.Line, start.LineRune, start.Byte) - } - xcom := x.Comment() - for len(line) > 0 && start.Byte >= line[0].Start.Byte { - if debug { - fmt.Fprintf(os.Stderr, "ASSIGN LINE %q #%d\n", line[0].Token, line[0].Start.Byte) - } - xcom.Before = append(xcom.Before, line[0]) - line = line[1:] - } - } - - // Remaining line comments go at end of file. - in.file.After = append(in.file.After, line...) - - if debug { - for _, c := range suffix { - fmt.Fprintf(os.Stderr, "SUFFIX %q :%d:%d #%d\n", c.Token, c.Start.Line, c.Start.LineRune, c.Start.Byte) - } - } - - // Assign suffix comments to syntax immediately before. - for i := len(in.post) - 1; i >= 0; i-- { - x := in.post[i] - - start, end := x.Span() - if debug { - fmt.Fprintf(os.Stderr, "post %T :%d:%d #%d :%d:%d #%d\n", x, start.Line, start.LineRune, start.Byte, end.Line, end.LineRune, end.Byte) - } - - // Do not assign suffix comments to end of line block or whole file. - // Instead assign them to the last element inside. - switch x.(type) { - case *FileSyntax: - continue - } - - // Do not assign suffix comments to something that starts - // on an earlier line, so that in - // - // x ( y - // z ) // comment - // - // we assign the comment to z and not to x ( ... ). - if start.Line != end.Line { - continue - } - xcom := x.Comment() - for len(suffix) > 0 && end.Byte <= suffix[len(suffix)-1].Start.Byte { - if debug { - fmt.Fprintf(os.Stderr, "ASSIGN SUFFIX %q #%d\n", suffix[len(suffix)-1].Token, suffix[len(suffix)-1].Start.Byte) - } - xcom.Suffix = append(xcom.Suffix, suffix[len(suffix)-1]) - suffix = suffix[:len(suffix)-1] - } - } - - // We assigned suffix comments in reverse. - // If multiple suffix comments were appended to the same - // expression node, they are now in reverse. Fix that. - for _, x := range in.post { - reverseComments(x.Comment().Suffix) - } - - // Remaining suffix comments go at beginning of file. - in.file.Before = append(in.file.Before, suffix...) -} - -// reverseComments reverses the []Comment list. -func reverseComments(list []Comment) { - for i, j := 0, len(list)-1; i < j; i, j = i+1, j-1 { - list[i], list[j] = list[j], list[i] - } -} - -func (in *input) parseFile() { - in.file = new(FileSyntax) - var cb *CommentBlock - for { - switch in.peek() { - case '\n': - in.lex() - if cb != nil { - in.file.Stmt = append(in.file.Stmt, cb) - cb = nil - } - case _COMMENT: - tok := in.lex() - if cb == nil { - cb = &CommentBlock{Start: tok.pos} - } - com := cb.Comment() - com.Before = append(com.Before, Comment{Start: tok.pos, Token: tok.text}) - case _EOF: - if cb != nil { - in.file.Stmt = append(in.file.Stmt, cb) - } - return - default: - in.parseStmt() - if cb != nil { - in.file.Stmt[len(in.file.Stmt)-1].Comment().Before = cb.Before - cb = nil - } - } - } -} - -func (in *input) parseStmt() { - tok := in.lex() - start := tok.pos - end := tok.endPos - tokens := []string{tok.text} - for { - tok := in.lex() - switch { - case tok.kind.isEOL(): - in.file.Stmt = append(in.file.Stmt, &Line{ - Start: start, - Token: tokens, - End: end, - }) - return - - case tok.kind == '(': - if next := in.peek(); next.isEOL() { - // Start of block: no more tokens on this line. - in.file.Stmt = append(in.file.Stmt, in.parseLineBlock(start, tokens, tok)) - return - } else if next == ')' { - rparen := in.lex() - if in.peek().isEOL() { - // Empty block. - in.lex() - in.file.Stmt = append(in.file.Stmt, &LineBlock{ - Start: start, - Token: tokens, - LParen: LParen{Pos: tok.pos}, - RParen: RParen{Pos: rparen.pos}, - }) - return - } - // '( )' in the middle of the line, not a block. - tokens = append(tokens, tok.text, rparen.text) - } else { - // '(' in the middle of the line, not a block. - tokens = append(tokens, tok.text) - } - - default: - tokens = append(tokens, tok.text) - end = tok.endPos - } - } -} - -func (in *input) parseLineBlock(start Position, token []string, lparen token) *LineBlock { - x := &LineBlock{ - Start: start, - Token: token, - LParen: LParen{Pos: lparen.pos}, - } - var comments []Comment - for { - switch in.peek() { - case _EOLCOMMENT: - // Suffix comment, will be attached later by assignComments. - in.lex() - case '\n': - // Blank line. Add an empty comment to preserve it. - in.lex() - if len(comments) == 0 && len(x.Line) > 0 || len(comments) > 0 && comments[len(comments)-1].Token != "" { - comments = append(comments, Comment{}) - } - case _COMMENT: - tok := in.lex() - comments = append(comments, Comment{Start: tok.pos, Token: tok.text}) - case _EOF: - in.Error(fmt.Sprintf("syntax error (unterminated block started at %s:%d:%d)", in.filename, x.Start.Line, x.Start.LineRune)) - case ')': - rparen := in.lex() - x.RParen.Before = comments - x.RParen.Pos = rparen.pos - if !in.peek().isEOL() { - in.Error("syntax error (expected newline after closing paren)") - } - in.lex() - return x - default: - l := in.parseLine() - x.Line = append(x.Line, l) - l.Comment().Before = comments - comments = nil - } - } -} - -func (in *input) parseLine() *Line { - tok := in.lex() - if tok.kind.isEOL() { - in.Error("internal parse error: parseLine at end of line") - } - start := tok.pos - end := tok.endPos - tokens := []string{tok.text} - for { - tok := in.lex() - if tok.kind.isEOL() { - return &Line{ - Start: start, - Token: tokens, - End: end, - InBlock: true, - } - } - tokens = append(tokens, tok.text) - end = tok.endPos - } -} - -var ( - slashSlash = []byte("//") - moduleStr = []byte("module") -) - -// ModulePath returns the module path from the gomod file text. -// If it cannot find a module path, it returns an empty string. -// It is tolerant of unrelated problems in the go.mod file. -func ModulePath(mod []byte) string { - for len(mod) > 0 { - line := mod - mod = nil - if i := bytes.IndexByte(line, '\n'); i >= 0 { - line, mod = line[:i], line[i+1:] - } - if i := bytes.Index(line, slashSlash); i >= 0 { - line = line[:i] - } - line = bytes.TrimSpace(line) - if !bytes.HasPrefix(line, moduleStr) { - continue - } - line = line[len(moduleStr):] - n := len(line) - line = bytes.TrimSpace(line) - if len(line) == n || len(line) == 0 { - continue - } - - if line[0] == '"' || line[0] == '`' { - p, err := strconv.Unquote(string(line)) - if err != nil { - return "" // malformed quoted string or multiline module path - } - return p - } - - return string(line) - } - return "" // missing module path -} diff --git a/vendor/golang.org/x/mod/modfile/rule.go b/vendor/golang.org/x/mod/modfile/rule.go deleted file mode 100644 index 26acaa5f7..000000000 --- a/vendor/golang.org/x/mod/modfile/rule.go +++ /dev/null @@ -1,1664 +0,0 @@ -// Copyright 2018 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package modfile implements a parser and formatter for go.mod files. -// -// The go.mod syntax is described in -// https://pkg.go.dev/cmd/go/#hdr-The_go_mod_file. -// -// The [Parse] and [ParseLax] functions both parse a go.mod file and return an -// abstract syntax tree. ParseLax ignores unknown statements and may be used to -// parse go.mod files that may have been developed with newer versions of Go. -// -// The [File] struct returned by Parse and ParseLax represent an abstract -// go.mod file. File has several methods like [File.AddNewRequire] and -// [File.DropReplace] that can be used to programmatically edit a file. -// -// The [Format] function formats a File back to a byte slice which can be -// written to a file. -package modfile - -import ( - "errors" - "fmt" - "path/filepath" - "sort" - "strconv" - "strings" - "unicode" - - "golang.org/x/mod/internal/lazyregexp" - "golang.org/x/mod/module" - "golang.org/x/mod/semver" -) - -// A File is the parsed, interpreted form of a go.mod file. -type File struct { - Module *Module - Go *Go - Toolchain *Toolchain - Require []*Require - Exclude []*Exclude - Replace []*Replace - Retract []*Retract - - Syntax *FileSyntax -} - -// A Module is the module statement. -type Module struct { - Mod module.Version - Deprecated string - Syntax *Line -} - -// A Go is the go statement. -type Go struct { - Version string // "1.23" - Syntax *Line -} - -// A Toolchain is the toolchain statement. -type Toolchain struct { - Name string // "go1.21rc1" - Syntax *Line -} - -// An Exclude is a single exclude statement. -type Exclude struct { - Mod module.Version - Syntax *Line -} - -// A Replace is a single replace statement. -type Replace struct { - Old module.Version - New module.Version - Syntax *Line -} - -// A Retract is a single retract statement. -type Retract struct { - VersionInterval - Rationale string - Syntax *Line -} - -// A VersionInterval represents a range of versions with upper and lower bounds. -// Intervals are closed: both bounds are included. When Low is equal to High, -// the interval may refer to a single version ('v1.2.3') or an interval -// ('[v1.2.3, v1.2.3]'); both have the same representation. -type VersionInterval struct { - Low, High string -} - -// A Require is a single require statement. -type Require struct { - Mod module.Version - Indirect bool // has "// indirect" comment - Syntax *Line -} - -func (r *Require) markRemoved() { - r.Syntax.markRemoved() - *r = Require{} -} - -func (r *Require) setVersion(v string) { - r.Mod.Version = v - - if line := r.Syntax; len(line.Token) > 0 { - if line.InBlock { - // If the line is preceded by an empty line, remove it; see - // https://golang.org/issue/33779. - if len(line.Comments.Before) == 1 && len(line.Comments.Before[0].Token) == 0 { - line.Comments.Before = line.Comments.Before[:0] - } - if len(line.Token) >= 2 { // example.com v1.2.3 - line.Token[1] = v - } - } else { - if len(line.Token) >= 3 { // require example.com v1.2.3 - line.Token[2] = v - } - } - } -} - -// setIndirect sets line to have (or not have) a "// indirect" comment. -func (r *Require) setIndirect(indirect bool) { - r.Indirect = indirect - line := r.Syntax - if isIndirect(line) == indirect { - return - } - if indirect { - // Adding comment. - if len(line.Suffix) == 0 { - // New comment. - line.Suffix = []Comment{{Token: "// indirect", Suffix: true}} - return - } - - com := &line.Suffix[0] - text := strings.TrimSpace(strings.TrimPrefix(com.Token, string(slashSlash))) - if text == "" { - // Empty comment. - com.Token = "// indirect" - return - } - - // Insert at beginning of existing comment. - com.Token = "// indirect; " + text - return - } - - // Removing comment. - f := strings.TrimSpace(strings.TrimPrefix(line.Suffix[0].Token, string(slashSlash))) - if f == "indirect" { - // Remove whole comment. - line.Suffix = nil - return - } - - // Remove comment prefix. - com := &line.Suffix[0] - i := strings.Index(com.Token, "indirect;") - com.Token = "//" + com.Token[i+len("indirect;"):] -} - -// isIndirect reports whether line has a "// indirect" comment, -// meaning it is in go.mod only for its effect on indirect dependencies, -// so that it can be dropped entirely once the effective version of the -// indirect dependency reaches the given minimum version. -func isIndirect(line *Line) bool { - if len(line.Suffix) == 0 { - return false - } - f := strings.Fields(strings.TrimPrefix(line.Suffix[0].Token, string(slashSlash))) - return (len(f) == 1 && f[0] == "indirect" || len(f) > 1 && f[0] == "indirect;") -} - -func (f *File) AddModuleStmt(path string) error { - if f.Syntax == nil { - f.Syntax = new(FileSyntax) - } - if f.Module == nil { - f.Module = &Module{ - Mod: module.Version{Path: path}, - Syntax: f.Syntax.addLine(nil, "module", AutoQuote(path)), - } - } else { - f.Module.Mod.Path = path - f.Syntax.updateLine(f.Module.Syntax, "module", AutoQuote(path)) - } - return nil -} - -func (f *File) AddComment(text string) { - if f.Syntax == nil { - f.Syntax = new(FileSyntax) - } - f.Syntax.Stmt = append(f.Syntax.Stmt, &CommentBlock{ - Comments: Comments{ - Before: []Comment{ - { - Token: text, - }, - }, - }, - }) -} - -type VersionFixer func(path, version string) (string, error) - -// errDontFix is returned by a VersionFixer to indicate the version should be -// left alone, even if it's not canonical. -var dontFixRetract VersionFixer = func(_, vers string) (string, error) { - return vers, nil -} - -// Parse parses and returns a go.mod file. -// -// file is the name of the file, used in positions and errors. -// -// data is the content of the file. -// -// fix is an optional function that canonicalizes module versions. -// If fix is nil, all module versions must be canonical ([module.CanonicalVersion] -// must return the same string). -func Parse(file string, data []byte, fix VersionFixer) (*File, error) { - return parseToFile(file, data, fix, true) -} - -// ParseLax is like Parse but ignores unknown statements. -// It is used when parsing go.mod files other than the main module, -// under the theory that most statement types we add in the future will -// only apply in the main module, like exclude and replace, -// and so we get better gradual deployments if old go commands -// simply ignore those statements when found in go.mod files -// in dependencies. -func ParseLax(file string, data []byte, fix VersionFixer) (*File, error) { - return parseToFile(file, data, fix, false) -} - -func parseToFile(file string, data []byte, fix VersionFixer, strict bool) (parsed *File, err error) { - fs, err := parse(file, data) - if err != nil { - return nil, err - } - f := &File{ - Syntax: fs, - } - var errs ErrorList - - // fix versions in retract directives after the file is parsed. - // We need the module path to fix versions, and it might be at the end. - defer func() { - oldLen := len(errs) - f.fixRetract(fix, &errs) - if len(errs) > oldLen { - parsed, err = nil, errs - } - }() - - for _, x := range fs.Stmt { - switch x := x.(type) { - case *Line: - f.add(&errs, nil, x, x.Token[0], x.Token[1:], fix, strict) - - case *LineBlock: - if len(x.Token) > 1 { - if strict { - errs = append(errs, Error{ - Filename: file, - Pos: x.Start, - Err: fmt.Errorf("unknown block type: %s", strings.Join(x.Token, " ")), - }) - } - continue - } - switch x.Token[0] { - default: - if strict { - errs = append(errs, Error{ - Filename: file, - Pos: x.Start, - Err: fmt.Errorf("unknown block type: %s", strings.Join(x.Token, " ")), - }) - } - continue - case "module", "require", "exclude", "replace", "retract": - for _, l := range x.Line { - f.add(&errs, x, l, x.Token[0], l.Token, fix, strict) - } - } - } - } - - if len(errs) > 0 { - return nil, errs - } - return f, nil -} - -var GoVersionRE = lazyregexp.New(`^([1-9][0-9]*)\.(0|[1-9][0-9]*)(\.(0|[1-9][0-9]*))?([a-z]+[0-9]+)?$`) -var laxGoVersionRE = lazyregexp.New(`^v?(([1-9][0-9]*)\.(0|[1-9][0-9]*))([^0-9].*)$`) - -// Toolchains must be named beginning with `go1`, -// like "go1.20.3" or "go1.20.3-gccgo". As a special case, "default" is also permitted. -// TODO(samthanawalla): Replace regex with https://pkg.go.dev/go/version#IsValid in 1.23+ -var ToolchainRE = lazyregexp.New(`^default$|^go1($|\.)`) - -func (f *File) add(errs *ErrorList, block *LineBlock, line *Line, verb string, args []string, fix VersionFixer, strict bool) { - // If strict is false, this module is a dependency. - // We ignore all unknown directives as well as main-module-only - // directives like replace and exclude. It will work better for - // forward compatibility if we can depend on modules that have unknown - // statements (presumed relevant only when acting as the main module) - // and simply ignore those statements. - if !strict { - switch verb { - case "go", "module", "retract", "require": - // want these even for dependency go.mods - default: - return - } - } - - wrapModPathError := func(modPath string, err error) { - *errs = append(*errs, Error{ - Filename: f.Syntax.Name, - Pos: line.Start, - ModPath: modPath, - Verb: verb, - Err: err, - }) - } - wrapError := func(err error) { - *errs = append(*errs, Error{ - Filename: f.Syntax.Name, - Pos: line.Start, - Err: err, - }) - } - errorf := func(format string, args ...interface{}) { - wrapError(fmt.Errorf(format, args...)) - } - - switch verb { - default: - errorf("unknown directive: %s", verb) - - case "go": - if f.Go != nil { - errorf("repeated go statement") - return - } - if len(args) != 1 { - errorf("go directive expects exactly one argument") - return - } else if !GoVersionRE.MatchString(args[0]) { - fixed := false - if !strict { - if m := laxGoVersionRE.FindStringSubmatch(args[0]); m != nil { - args[0] = m[1] - fixed = true - } - } - if !fixed { - errorf("invalid go version '%s': must match format 1.23.0", args[0]) - return - } - } - - f.Go = &Go{Syntax: line} - f.Go.Version = args[0] - - case "toolchain": - if f.Toolchain != nil { - errorf("repeated toolchain statement") - return - } - if len(args) != 1 { - errorf("toolchain directive expects exactly one argument") - return - } else if strict && !ToolchainRE.MatchString(args[0]) { - errorf("invalid toolchain version '%s': must match format go1.23.0 or default", args[0]) - return - } - f.Toolchain = &Toolchain{Syntax: line} - f.Toolchain.Name = args[0] - - case "module": - if f.Module != nil { - errorf("repeated module statement") - return - } - deprecated := parseDeprecation(block, line) - f.Module = &Module{ - Syntax: line, - Deprecated: deprecated, - } - if len(args) != 1 { - errorf("usage: module module/path") - return - } - s, err := parseString(&args[0]) - if err != nil { - errorf("invalid quoted string: %v", err) - return - } - f.Module.Mod = module.Version{Path: s} - - case "require", "exclude": - if len(args) != 2 { - errorf("usage: %s module/path v1.2.3", verb) - return - } - s, err := parseString(&args[0]) - if err != nil { - errorf("invalid quoted string: %v", err) - return - } - v, err := parseVersion(verb, s, &args[1], fix) - if err != nil { - wrapError(err) - return - } - pathMajor, err := modulePathMajor(s) - if err != nil { - wrapError(err) - return - } - if err := module.CheckPathMajor(v, pathMajor); err != nil { - wrapModPathError(s, err) - return - } - if verb == "require" { - f.Require = append(f.Require, &Require{ - Mod: module.Version{Path: s, Version: v}, - Syntax: line, - Indirect: isIndirect(line), - }) - } else { - f.Exclude = append(f.Exclude, &Exclude{ - Mod: module.Version{Path: s, Version: v}, - Syntax: line, - }) - } - - case "replace": - replace, wrappederr := parseReplace(f.Syntax.Name, line, verb, args, fix) - if wrappederr != nil { - *errs = append(*errs, *wrappederr) - return - } - f.Replace = append(f.Replace, replace) - - case "retract": - rationale := parseDirectiveComment(block, line) - vi, err := parseVersionInterval(verb, "", &args, dontFixRetract) - if err != nil { - if strict { - wrapError(err) - return - } else { - // Only report errors parsing intervals in the main module. We may - // support additional syntax in the future, such as open and half-open - // intervals. Those can't be supported now, because they break the - // go.mod parser, even in lax mode. - return - } - } - if len(args) > 0 && strict { - // In the future, there may be additional information after the version. - errorf("unexpected token after version: %q", args[0]) - return - } - retract := &Retract{ - VersionInterval: vi, - Rationale: rationale, - Syntax: line, - } - f.Retract = append(f.Retract, retract) - } -} - -func parseReplace(filename string, line *Line, verb string, args []string, fix VersionFixer) (*Replace, *Error) { - wrapModPathError := func(modPath string, err error) *Error { - return &Error{ - Filename: filename, - Pos: line.Start, - ModPath: modPath, - Verb: verb, - Err: err, - } - } - wrapError := func(err error) *Error { - return &Error{ - Filename: filename, - Pos: line.Start, - Err: err, - } - } - errorf := func(format string, args ...interface{}) *Error { - return wrapError(fmt.Errorf(format, args...)) - } - - arrow := 2 - if len(args) >= 2 && args[1] == "=>" { - arrow = 1 - } - if len(args) < arrow+2 || len(args) > arrow+3 || args[arrow] != "=>" { - return nil, errorf("usage: %s module/path [v1.2.3] => other/module v1.4\n\t or %s module/path [v1.2.3] => ../local/directory", verb, verb) - } - s, err := parseString(&args[0]) - if err != nil { - return nil, errorf("invalid quoted string: %v", err) - } - pathMajor, err := modulePathMajor(s) - if err != nil { - return nil, wrapModPathError(s, err) - - } - var v string - if arrow == 2 { - v, err = parseVersion(verb, s, &args[1], fix) - if err != nil { - return nil, wrapError(err) - } - if err := module.CheckPathMajor(v, pathMajor); err != nil { - return nil, wrapModPathError(s, err) - } - } - ns, err := parseString(&args[arrow+1]) - if err != nil { - return nil, errorf("invalid quoted string: %v", err) - } - nv := "" - if len(args) == arrow+2 { - if !IsDirectoryPath(ns) { - if strings.Contains(ns, "@") { - return nil, errorf("replacement module must match format 'path version', not 'path@version'") - } - return nil, errorf("replacement module without version must be directory path (rooted or starting with . or ..)") - } - if filepath.Separator == '/' && strings.Contains(ns, `\`) { - return nil, errorf("replacement directory appears to be Windows path (on a non-windows system)") - } - } - if len(args) == arrow+3 { - nv, err = parseVersion(verb, ns, &args[arrow+2], fix) - if err != nil { - return nil, wrapError(err) - } - if IsDirectoryPath(ns) { - return nil, errorf("replacement module directory path %q cannot have version", ns) - } - } - return &Replace{ - Old: module.Version{Path: s, Version: v}, - New: module.Version{Path: ns, Version: nv}, - Syntax: line, - }, nil -} - -// fixRetract applies fix to each retract directive in f, appending any errors -// to errs. -// -// Most versions are fixed as we parse the file, but for retract directives, -// the relevant module path is the one specified with the module directive, -// and that might appear at the end of the file (or not at all). -func (f *File) fixRetract(fix VersionFixer, errs *ErrorList) { - if fix == nil { - return - } - path := "" - if f.Module != nil { - path = f.Module.Mod.Path - } - var r *Retract - wrapError := func(err error) { - *errs = append(*errs, Error{ - Filename: f.Syntax.Name, - Pos: r.Syntax.Start, - Err: err, - }) - } - - for _, r = range f.Retract { - if path == "" { - wrapError(errors.New("no module directive found, so retract cannot be used")) - return // only print the first one of these - } - - args := r.Syntax.Token - if args[0] == "retract" { - args = args[1:] - } - vi, err := parseVersionInterval("retract", path, &args, fix) - if err != nil { - wrapError(err) - } - r.VersionInterval = vi - } -} - -func (f *WorkFile) add(errs *ErrorList, line *Line, verb string, args []string, fix VersionFixer) { - wrapError := func(err error) { - *errs = append(*errs, Error{ - Filename: f.Syntax.Name, - Pos: line.Start, - Err: err, - }) - } - errorf := func(format string, args ...interface{}) { - wrapError(fmt.Errorf(format, args...)) - } - - switch verb { - default: - errorf("unknown directive: %s", verb) - - case "go": - if f.Go != nil { - errorf("repeated go statement") - return - } - if len(args) != 1 { - errorf("go directive expects exactly one argument") - return - } else if !GoVersionRE.MatchString(args[0]) { - errorf("invalid go version '%s': must match format 1.23.0", args[0]) - return - } - - f.Go = &Go{Syntax: line} - f.Go.Version = args[0] - - case "toolchain": - if f.Toolchain != nil { - errorf("repeated toolchain statement") - return - } - if len(args) != 1 { - errorf("toolchain directive expects exactly one argument") - return - } else if !ToolchainRE.MatchString(args[0]) { - errorf("invalid toolchain version '%s': must match format go1.23.0 or default", args[0]) - return - } - - f.Toolchain = &Toolchain{Syntax: line} - f.Toolchain.Name = args[0] - - case "use": - if len(args) != 1 { - errorf("usage: %s local/dir", verb) - return - } - s, err := parseString(&args[0]) - if err != nil { - errorf("invalid quoted string: %v", err) - return - } - f.Use = append(f.Use, &Use{ - Path: s, - Syntax: line, - }) - - case "replace": - replace, wrappederr := parseReplace(f.Syntax.Name, line, verb, args, fix) - if wrappederr != nil { - *errs = append(*errs, *wrappederr) - return - } - f.Replace = append(f.Replace, replace) - } -} - -// IsDirectoryPath reports whether the given path should be interpreted as a directory path. -// Just like on the go command line, relative paths starting with a '.' or '..' path component -// and rooted paths are directory paths; the rest are module paths. -func IsDirectoryPath(ns string) bool { - // Because go.mod files can move from one system to another, - // we check all known path syntaxes, both Unix and Windows. - return ns == "." || strings.HasPrefix(ns, "./") || strings.HasPrefix(ns, `.\`) || - ns == ".." || strings.HasPrefix(ns, "../") || strings.HasPrefix(ns, `..\`) || - strings.HasPrefix(ns, "/") || strings.HasPrefix(ns, `\`) || - len(ns) >= 2 && ('A' <= ns[0] && ns[0] <= 'Z' || 'a' <= ns[0] && ns[0] <= 'z') && ns[1] == ':' -} - -// MustQuote reports whether s must be quoted in order to appear as -// a single token in a go.mod line. -func MustQuote(s string) bool { - for _, r := range s { - switch r { - case ' ', '"', '\'', '`': - return true - - case '(', ')', '[', ']', '{', '}', ',': - if len(s) > 1 { - return true - } - - default: - if !unicode.IsPrint(r) { - return true - } - } - } - return s == "" || strings.Contains(s, "//") || strings.Contains(s, "/*") -} - -// AutoQuote returns s or, if quoting is required for s to appear in a go.mod, -// the quotation of s. -func AutoQuote(s string) string { - if MustQuote(s) { - return strconv.Quote(s) - } - return s -} - -func parseVersionInterval(verb string, path string, args *[]string, fix VersionFixer) (VersionInterval, error) { - toks := *args - if len(toks) == 0 || toks[0] == "(" { - return VersionInterval{}, fmt.Errorf("expected '[' or version") - } - if toks[0] != "[" { - v, err := parseVersion(verb, path, &toks[0], fix) - if err != nil { - return VersionInterval{}, err - } - *args = toks[1:] - return VersionInterval{Low: v, High: v}, nil - } - toks = toks[1:] - - if len(toks) == 0 { - return VersionInterval{}, fmt.Errorf("expected version after '['") - } - low, err := parseVersion(verb, path, &toks[0], fix) - if err != nil { - return VersionInterval{}, err - } - toks = toks[1:] - - if len(toks) == 0 || toks[0] != "," { - return VersionInterval{}, fmt.Errorf("expected ',' after version") - } - toks = toks[1:] - - if len(toks) == 0 { - return VersionInterval{}, fmt.Errorf("expected version after ','") - } - high, err := parseVersion(verb, path, &toks[0], fix) - if err != nil { - return VersionInterval{}, err - } - toks = toks[1:] - - if len(toks) == 0 || toks[0] != "]" { - return VersionInterval{}, fmt.Errorf("expected ']' after version") - } - toks = toks[1:] - - *args = toks - return VersionInterval{Low: low, High: high}, nil -} - -func parseString(s *string) (string, error) { - t := *s - if strings.HasPrefix(t, `"`) { - var err error - if t, err = strconv.Unquote(t); err != nil { - return "", err - } - } else if strings.ContainsAny(t, "\"'`") { - // Other quotes are reserved both for possible future expansion - // and to avoid confusion. For example if someone types 'x' - // we want that to be a syntax error and not a literal x in literal quotation marks. - return "", fmt.Errorf("unquoted string cannot contain quote") - } - *s = AutoQuote(t) - return t, nil -} - -var deprecatedRE = lazyregexp.New(`(?s)(?:^|\n\n)Deprecated: *(.*?)(?:$|\n\n)`) - -// parseDeprecation extracts the text of comments on a "module" directive and -// extracts a deprecation message from that. -// -// A deprecation message is contained in a paragraph within a block of comments -// that starts with "Deprecated:" (case sensitive). The message runs until the -// end of the paragraph and does not include the "Deprecated:" prefix. If the -// comment block has multiple paragraphs that start with "Deprecated:", -// parseDeprecation returns the message from the first. -func parseDeprecation(block *LineBlock, line *Line) string { - text := parseDirectiveComment(block, line) - m := deprecatedRE.FindStringSubmatch(text) - if m == nil { - return "" - } - return m[1] -} - -// parseDirectiveComment extracts the text of comments on a directive. -// If the directive's line does not have comments and is part of a block that -// does have comments, the block's comments are used. -func parseDirectiveComment(block *LineBlock, line *Line) string { - comments := line.Comment() - if block != nil && len(comments.Before) == 0 && len(comments.Suffix) == 0 { - comments = block.Comment() - } - groups := [][]Comment{comments.Before, comments.Suffix} - var lines []string - for _, g := range groups { - for _, c := range g { - if !strings.HasPrefix(c.Token, "//") { - continue // blank line - } - lines = append(lines, strings.TrimSpace(strings.TrimPrefix(c.Token, "//"))) - } - } - return strings.Join(lines, "\n") -} - -type ErrorList []Error - -func (e ErrorList) Error() string { - errStrs := make([]string, len(e)) - for i, err := range e { - errStrs[i] = err.Error() - } - return strings.Join(errStrs, "\n") -} - -type Error struct { - Filename string - Pos Position - Verb string - ModPath string - Err error -} - -func (e *Error) Error() string { - var pos string - if e.Pos.LineRune > 1 { - // Don't print LineRune if it's 1 (beginning of line). - // It's always 1 except in scanner errors, which are rare. - pos = fmt.Sprintf("%s:%d:%d: ", e.Filename, e.Pos.Line, e.Pos.LineRune) - } else if e.Pos.Line > 0 { - pos = fmt.Sprintf("%s:%d: ", e.Filename, e.Pos.Line) - } else if e.Filename != "" { - pos = fmt.Sprintf("%s: ", e.Filename) - } - - var directive string - if e.ModPath != "" { - directive = fmt.Sprintf("%s %s: ", e.Verb, e.ModPath) - } else if e.Verb != "" { - directive = fmt.Sprintf("%s: ", e.Verb) - } - - return pos + directive + e.Err.Error() -} - -func (e *Error) Unwrap() error { return e.Err } - -func parseVersion(verb string, path string, s *string, fix VersionFixer) (string, error) { - t, err := parseString(s) - if err != nil { - return "", &Error{ - Verb: verb, - ModPath: path, - Err: &module.InvalidVersionError{ - Version: *s, - Err: err, - }, - } - } - if fix != nil { - fixed, err := fix(path, t) - if err != nil { - if err, ok := err.(*module.ModuleError); ok { - return "", &Error{ - Verb: verb, - ModPath: path, - Err: err.Err, - } - } - return "", err - } - t = fixed - } else { - cv := module.CanonicalVersion(t) - if cv == "" { - return "", &Error{ - Verb: verb, - ModPath: path, - Err: &module.InvalidVersionError{ - Version: t, - Err: errors.New("must be of the form v1.2.3"), - }, - } - } - t = cv - } - *s = t - return *s, nil -} - -func modulePathMajor(path string) (string, error) { - _, major, ok := module.SplitPathVersion(path) - if !ok { - return "", fmt.Errorf("invalid module path") - } - return major, nil -} - -func (f *File) Format() ([]byte, error) { - return Format(f.Syntax), nil -} - -// Cleanup cleans up the file f after any edit operations. -// To avoid quadratic behavior, modifications like [File.DropRequire] -// clear the entry but do not remove it from the slice. -// Cleanup cleans out all the cleared entries. -func (f *File) Cleanup() { - w := 0 - for _, r := range f.Require { - if r.Mod.Path != "" { - f.Require[w] = r - w++ - } - } - f.Require = f.Require[:w] - - w = 0 - for _, x := range f.Exclude { - if x.Mod.Path != "" { - f.Exclude[w] = x - w++ - } - } - f.Exclude = f.Exclude[:w] - - w = 0 - for _, r := range f.Replace { - if r.Old.Path != "" { - f.Replace[w] = r - w++ - } - } - f.Replace = f.Replace[:w] - - w = 0 - for _, r := range f.Retract { - if r.Low != "" || r.High != "" { - f.Retract[w] = r - w++ - } - } - f.Retract = f.Retract[:w] - - f.Syntax.Cleanup() -} - -func (f *File) AddGoStmt(version string) error { - if !GoVersionRE.MatchString(version) { - return fmt.Errorf("invalid language version %q", version) - } - if f.Go == nil { - var hint Expr - if f.Module != nil && f.Module.Syntax != nil { - hint = f.Module.Syntax - } - f.Go = &Go{ - Version: version, - Syntax: f.Syntax.addLine(hint, "go", version), - } - } else { - f.Go.Version = version - f.Syntax.updateLine(f.Go.Syntax, "go", version) - } - return nil -} - -// DropGoStmt deletes the go statement from the file. -func (f *File) DropGoStmt() { - if f.Go != nil { - f.Go.Syntax.markRemoved() - f.Go = nil - } -} - -// DropToolchainStmt deletes the toolchain statement from the file. -func (f *File) DropToolchainStmt() { - if f.Toolchain != nil { - f.Toolchain.Syntax.markRemoved() - f.Toolchain = nil - } -} - -func (f *File) AddToolchainStmt(name string) error { - if !ToolchainRE.MatchString(name) { - return fmt.Errorf("invalid toolchain name %q", name) - } - if f.Toolchain == nil { - var hint Expr - if f.Go != nil && f.Go.Syntax != nil { - hint = f.Go.Syntax - } else if f.Module != nil && f.Module.Syntax != nil { - hint = f.Module.Syntax - } - f.Toolchain = &Toolchain{ - Name: name, - Syntax: f.Syntax.addLine(hint, "toolchain", name), - } - } else { - f.Toolchain.Name = name - f.Syntax.updateLine(f.Toolchain.Syntax, "toolchain", name) - } - return nil -} - -// AddRequire sets the first require line for path to version vers, -// preserving any existing comments for that line and removing all -// other lines for path. -// -// If no line currently exists for path, AddRequire adds a new line -// at the end of the last require block. -func (f *File) AddRequire(path, vers string) error { - need := true - for _, r := range f.Require { - if r.Mod.Path == path { - if need { - r.Mod.Version = vers - f.Syntax.updateLine(r.Syntax, "require", AutoQuote(path), vers) - need = false - } else { - r.Syntax.markRemoved() - *r = Require{} - } - } - } - - if need { - f.AddNewRequire(path, vers, false) - } - return nil -} - -// AddNewRequire adds a new require line for path at version vers at the end of -// the last require block, regardless of any existing require lines for path. -func (f *File) AddNewRequire(path, vers string, indirect bool) { - line := f.Syntax.addLine(nil, "require", AutoQuote(path), vers) - r := &Require{ - Mod: module.Version{Path: path, Version: vers}, - Syntax: line, - } - r.setIndirect(indirect) - f.Require = append(f.Require, r) -} - -// SetRequire updates the requirements of f to contain exactly req, preserving -// the existing block structure and line comment contents (except for 'indirect' -// markings) for the first requirement on each named module path. -// -// The Syntax field is ignored for the requirements in req. -// -// Any requirements not already present in the file are added to the block -// containing the last require line. -// -// The requirements in req must specify at most one distinct version for each -// module path. -// -// If any existing requirements may be removed, the caller should call -// [File.Cleanup] after all edits are complete. -func (f *File) SetRequire(req []*Require) { - type elem struct { - version string - indirect bool - } - need := make(map[string]elem) - for _, r := range req { - if prev, dup := need[r.Mod.Path]; dup && prev.version != r.Mod.Version { - panic(fmt.Errorf("SetRequire called with conflicting versions for path %s (%s and %s)", r.Mod.Path, prev.version, r.Mod.Version)) - } - need[r.Mod.Path] = elem{r.Mod.Version, r.Indirect} - } - - // Update or delete the existing Require entries to preserve - // only the first for each module path in req. - for _, r := range f.Require { - e, ok := need[r.Mod.Path] - if ok { - r.setVersion(e.version) - r.setIndirect(e.indirect) - } else { - r.markRemoved() - } - delete(need, r.Mod.Path) - } - - // Add new entries in the last block of the file for any paths that weren't - // already present. - // - // This step is nondeterministic, but the final result will be deterministic - // because we will sort the block. - for path, e := range need { - f.AddNewRequire(path, e.version, e.indirect) - } - - f.SortBlocks() -} - -// SetRequireSeparateIndirect updates the requirements of f to contain the given -// requirements. Comment contents (except for 'indirect' markings) are retained -// from the first existing requirement for each module path. Like SetRequire, -// SetRequireSeparateIndirect adds requirements for new paths in req, -// updates the version and "// indirect" comment on existing requirements, -// and deletes requirements on paths not in req. Existing duplicate requirements -// are deleted. -// -// As its name suggests, SetRequireSeparateIndirect puts direct and indirect -// requirements into two separate blocks, one containing only direct -// requirements, and the other containing only indirect requirements. -// SetRequireSeparateIndirect may move requirements between these two blocks -// when their indirect markings change. However, SetRequireSeparateIndirect -// won't move requirements from other blocks, especially blocks with comments. -// -// If the file initially has one uncommented block of requirements, -// SetRequireSeparateIndirect will split it into a direct-only and indirect-only -// block. This aids in the transition to separate blocks. -func (f *File) SetRequireSeparateIndirect(req []*Require) { - // hasComments returns whether a line or block has comments - // other than "indirect". - hasComments := func(c Comments) bool { - return len(c.Before) > 0 || len(c.After) > 0 || len(c.Suffix) > 1 || - (len(c.Suffix) == 1 && - strings.TrimSpace(strings.TrimPrefix(c.Suffix[0].Token, string(slashSlash))) != "indirect") - } - - // moveReq adds r to block. If r was in another block, moveReq deletes - // it from that block and transfers its comments. - moveReq := func(r *Require, block *LineBlock) { - var line *Line - if r.Syntax == nil { - line = &Line{Token: []string{AutoQuote(r.Mod.Path), r.Mod.Version}} - r.Syntax = line - if r.Indirect { - r.setIndirect(true) - } - } else { - line = new(Line) - *line = *r.Syntax - if !line.InBlock && len(line.Token) > 0 && line.Token[0] == "require" { - line.Token = line.Token[1:] - } - r.Syntax.Token = nil // Cleanup will delete the old line. - r.Syntax = line - } - line.InBlock = true - block.Line = append(block.Line, line) - } - - // Examine existing require lines and blocks. - var ( - // We may insert new requirements into the last uncommented - // direct-only and indirect-only blocks. We may also move requirements - // to the opposite block if their indirect markings change. - lastDirectIndex = -1 - lastIndirectIndex = -1 - - // If there are no direct-only or indirect-only blocks, a new block may - // be inserted after the last require line or block. - lastRequireIndex = -1 - - // If there's only one require line or block, and it's uncommented, - // we'll move its requirements to the direct-only or indirect-only blocks. - requireLineOrBlockCount = 0 - - // Track the block each requirement belongs to (if any) so we can - // move them later. - lineToBlock = make(map[*Line]*LineBlock) - ) - for i, stmt := range f.Syntax.Stmt { - switch stmt := stmt.(type) { - case *Line: - if len(stmt.Token) == 0 || stmt.Token[0] != "require" { - continue - } - lastRequireIndex = i - requireLineOrBlockCount++ - if !hasComments(stmt.Comments) { - if isIndirect(stmt) { - lastIndirectIndex = i - } else { - lastDirectIndex = i - } - } - - case *LineBlock: - if len(stmt.Token) == 0 || stmt.Token[0] != "require" { - continue - } - lastRequireIndex = i - requireLineOrBlockCount++ - allDirect := len(stmt.Line) > 0 && !hasComments(stmt.Comments) - allIndirect := len(stmt.Line) > 0 && !hasComments(stmt.Comments) - for _, line := range stmt.Line { - lineToBlock[line] = stmt - if hasComments(line.Comments) { - allDirect = false - allIndirect = false - } else if isIndirect(line) { - allDirect = false - } else { - allIndirect = false - } - } - if allDirect { - lastDirectIndex = i - } - if allIndirect { - lastIndirectIndex = i - } - } - } - - oneFlatUncommentedBlock := requireLineOrBlockCount == 1 && - !hasComments(*f.Syntax.Stmt[lastRequireIndex].Comment()) - - // Create direct and indirect blocks if needed. Convert lines into blocks - // if needed. If we end up with an empty block or a one-line block, - // Cleanup will delete it or convert it to a line later. - insertBlock := func(i int) *LineBlock { - block := &LineBlock{Token: []string{"require"}} - f.Syntax.Stmt = append(f.Syntax.Stmt, nil) - copy(f.Syntax.Stmt[i+1:], f.Syntax.Stmt[i:]) - f.Syntax.Stmt[i] = block - return block - } - - ensureBlock := func(i int) *LineBlock { - switch stmt := f.Syntax.Stmt[i].(type) { - case *LineBlock: - return stmt - case *Line: - block := &LineBlock{ - Token: []string{"require"}, - Line: []*Line{stmt}, - } - stmt.Token = stmt.Token[1:] // remove "require" - stmt.InBlock = true - f.Syntax.Stmt[i] = block - return block - default: - panic(fmt.Sprintf("unexpected statement: %v", stmt)) - } - } - - var lastDirectBlock *LineBlock - if lastDirectIndex < 0 { - if lastIndirectIndex >= 0 { - lastDirectIndex = lastIndirectIndex - lastIndirectIndex++ - } else if lastRequireIndex >= 0 { - lastDirectIndex = lastRequireIndex + 1 - } else { - lastDirectIndex = len(f.Syntax.Stmt) - } - lastDirectBlock = insertBlock(lastDirectIndex) - } else { - lastDirectBlock = ensureBlock(lastDirectIndex) - } - - var lastIndirectBlock *LineBlock - if lastIndirectIndex < 0 { - lastIndirectIndex = lastDirectIndex + 1 - lastIndirectBlock = insertBlock(lastIndirectIndex) - } else { - lastIndirectBlock = ensureBlock(lastIndirectIndex) - } - - // Delete requirements we don't want anymore. - // Update versions and indirect comments on requirements we want to keep. - // If a requirement is in last{Direct,Indirect}Block with the wrong - // indirect marking after this, or if the requirement is in an single - // uncommented mixed block (oneFlatUncommentedBlock), move it to the - // correct block. - // - // Some blocks may be empty after this. Cleanup will remove them. - need := make(map[string]*Require) - for _, r := range req { - need[r.Mod.Path] = r - } - have := make(map[string]*Require) - for _, r := range f.Require { - path := r.Mod.Path - if need[path] == nil || have[path] != nil { - // Requirement not needed, or duplicate requirement. Delete. - r.markRemoved() - continue - } - have[r.Mod.Path] = r - r.setVersion(need[path].Mod.Version) - r.setIndirect(need[path].Indirect) - if need[path].Indirect && - (oneFlatUncommentedBlock || lineToBlock[r.Syntax] == lastDirectBlock) { - moveReq(r, lastIndirectBlock) - } else if !need[path].Indirect && - (oneFlatUncommentedBlock || lineToBlock[r.Syntax] == lastIndirectBlock) { - moveReq(r, lastDirectBlock) - } - } - - // Add new requirements. - for path, r := range need { - if have[path] == nil { - if r.Indirect { - moveReq(r, lastIndirectBlock) - } else { - moveReq(r, lastDirectBlock) - } - f.Require = append(f.Require, r) - } - } - - f.SortBlocks() -} - -func (f *File) DropRequire(path string) error { - for _, r := range f.Require { - if r.Mod.Path == path { - r.Syntax.markRemoved() - *r = Require{} - } - } - return nil -} - -// AddExclude adds a exclude statement to the mod file. Errors if the provided -// version is not a canonical version string -func (f *File) AddExclude(path, vers string) error { - if err := checkCanonicalVersion(path, vers); err != nil { - return err - } - - var hint *Line - for _, x := range f.Exclude { - if x.Mod.Path == path && x.Mod.Version == vers { - return nil - } - if x.Mod.Path == path { - hint = x.Syntax - } - } - - f.Exclude = append(f.Exclude, &Exclude{Mod: module.Version{Path: path, Version: vers}, Syntax: f.Syntax.addLine(hint, "exclude", AutoQuote(path), vers)}) - return nil -} - -func (f *File) DropExclude(path, vers string) error { - for _, x := range f.Exclude { - if x.Mod.Path == path && x.Mod.Version == vers { - x.Syntax.markRemoved() - *x = Exclude{} - } - } - return nil -} - -func (f *File) AddReplace(oldPath, oldVers, newPath, newVers string) error { - return addReplace(f.Syntax, &f.Replace, oldPath, oldVers, newPath, newVers) -} - -func addReplace(syntax *FileSyntax, replace *[]*Replace, oldPath, oldVers, newPath, newVers string) error { - need := true - old := module.Version{Path: oldPath, Version: oldVers} - new := module.Version{Path: newPath, Version: newVers} - tokens := []string{"replace", AutoQuote(oldPath)} - if oldVers != "" { - tokens = append(tokens, oldVers) - } - tokens = append(tokens, "=>", AutoQuote(newPath)) - if newVers != "" { - tokens = append(tokens, newVers) - } - - var hint *Line - for _, r := range *replace { - if r.Old.Path == oldPath && (oldVers == "" || r.Old.Version == oldVers) { - if need { - // Found replacement for old; update to use new. - r.New = new - syntax.updateLine(r.Syntax, tokens...) - need = false - continue - } - // Already added; delete other replacements for same. - r.Syntax.markRemoved() - *r = Replace{} - } - if r.Old.Path == oldPath { - hint = r.Syntax - } - } - if need { - *replace = append(*replace, &Replace{Old: old, New: new, Syntax: syntax.addLine(hint, tokens...)}) - } - return nil -} - -func (f *File) DropReplace(oldPath, oldVers string) error { - for _, r := range f.Replace { - if r.Old.Path == oldPath && r.Old.Version == oldVers { - r.Syntax.markRemoved() - *r = Replace{} - } - } - return nil -} - -// AddRetract adds a retract statement to the mod file. Errors if the provided -// version interval does not consist of canonical version strings -func (f *File) AddRetract(vi VersionInterval, rationale string) error { - var path string - if f.Module != nil { - path = f.Module.Mod.Path - } - if err := checkCanonicalVersion(path, vi.High); err != nil { - return err - } - if err := checkCanonicalVersion(path, vi.Low); err != nil { - return err - } - - r := &Retract{ - VersionInterval: vi, - } - if vi.Low == vi.High { - r.Syntax = f.Syntax.addLine(nil, "retract", AutoQuote(vi.Low)) - } else { - r.Syntax = f.Syntax.addLine(nil, "retract", "[", AutoQuote(vi.Low), ",", AutoQuote(vi.High), "]") - } - if rationale != "" { - for _, line := range strings.Split(rationale, "\n") { - com := Comment{Token: "// " + line} - r.Syntax.Comment().Before = append(r.Syntax.Comment().Before, com) - } - } - return nil -} - -func (f *File) DropRetract(vi VersionInterval) error { - for _, r := range f.Retract { - if r.VersionInterval == vi { - r.Syntax.markRemoved() - *r = Retract{} - } - } - return nil -} - -func (f *File) SortBlocks() { - f.removeDups() // otherwise sorting is unsafe - - // semanticSortForExcludeVersionV is the Go version (plus leading "v") at which - // lines in exclude blocks start to use semantic sort instead of lexicographic sort. - // See go.dev/issue/60028. - const semanticSortForExcludeVersionV = "v1.21" - useSemanticSortForExclude := f.Go != nil && semver.Compare("v"+f.Go.Version, semanticSortForExcludeVersionV) >= 0 - - for _, stmt := range f.Syntax.Stmt { - block, ok := stmt.(*LineBlock) - if !ok { - continue - } - less := lineLess - if block.Token[0] == "exclude" && useSemanticSortForExclude { - less = lineExcludeLess - } else if block.Token[0] == "retract" { - less = lineRetractLess - } - sort.SliceStable(block.Line, func(i, j int) bool { - return less(block.Line[i], block.Line[j]) - }) - } -} - -// removeDups removes duplicate exclude and replace directives. -// -// Earlier exclude directives take priority. -// -// Later replace directives take priority. -// -// require directives are not de-duplicated. That's left up to higher-level -// logic (MVS). -// -// retract directives are not de-duplicated since comments are -// meaningful, and versions may be retracted multiple times. -func (f *File) removeDups() { - removeDups(f.Syntax, &f.Exclude, &f.Replace) -} - -func removeDups(syntax *FileSyntax, exclude *[]*Exclude, replace *[]*Replace) { - kill := make(map[*Line]bool) - - // Remove duplicate excludes. - if exclude != nil { - haveExclude := make(map[module.Version]bool) - for _, x := range *exclude { - if haveExclude[x.Mod] { - kill[x.Syntax] = true - continue - } - haveExclude[x.Mod] = true - } - var excl []*Exclude - for _, x := range *exclude { - if !kill[x.Syntax] { - excl = append(excl, x) - } - } - *exclude = excl - } - - // Remove duplicate replacements. - // Later replacements take priority over earlier ones. - haveReplace := make(map[module.Version]bool) - for i := len(*replace) - 1; i >= 0; i-- { - x := (*replace)[i] - if haveReplace[x.Old] { - kill[x.Syntax] = true - continue - } - haveReplace[x.Old] = true - } - var repl []*Replace - for _, x := range *replace { - if !kill[x.Syntax] { - repl = append(repl, x) - } - } - *replace = repl - - // Duplicate require and retract directives are not removed. - - // Drop killed statements from the syntax tree. - var stmts []Expr - for _, stmt := range syntax.Stmt { - switch stmt := stmt.(type) { - case *Line: - if kill[stmt] { - continue - } - case *LineBlock: - var lines []*Line - for _, line := range stmt.Line { - if !kill[line] { - lines = append(lines, line) - } - } - stmt.Line = lines - if len(lines) == 0 { - continue - } - } - stmts = append(stmts, stmt) - } - syntax.Stmt = stmts -} - -// lineLess returns whether li should be sorted before lj. It sorts -// lexicographically without assigning any special meaning to tokens. -func lineLess(li, lj *Line) bool { - for k := 0; k < len(li.Token) && k < len(lj.Token); k++ { - if li.Token[k] != lj.Token[k] { - return li.Token[k] < lj.Token[k] - } - } - return len(li.Token) < len(lj.Token) -} - -// lineExcludeLess reports whether li should be sorted before lj for lines in -// an "exclude" block. -func lineExcludeLess(li, lj *Line) bool { - if len(li.Token) != 2 || len(lj.Token) != 2 { - // Not a known exclude specification. - // Fall back to sorting lexicographically. - return lineLess(li, lj) - } - // An exclude specification has two tokens: ModulePath and Version. - // Compare module path by string order and version by semver rules. - if pi, pj := li.Token[0], lj.Token[0]; pi != pj { - return pi < pj - } - return semver.Compare(li.Token[1], lj.Token[1]) < 0 -} - -// lineRetractLess returns whether li should be sorted before lj for lines in -// a "retract" block. It treats each line as a version interval. Single versions -// are compared as if they were intervals with the same low and high version. -// Intervals are sorted in descending order, first by low version, then by -// high version, using semver.Compare. -func lineRetractLess(li, lj *Line) bool { - interval := func(l *Line) VersionInterval { - if len(l.Token) == 1 { - return VersionInterval{Low: l.Token[0], High: l.Token[0]} - } else if len(l.Token) == 5 && l.Token[0] == "[" && l.Token[2] == "," && l.Token[4] == "]" { - return VersionInterval{Low: l.Token[1], High: l.Token[3]} - } else { - // Line in unknown format. Treat as an invalid version. - return VersionInterval{} - } - } - vii := interval(li) - vij := interval(lj) - if cmp := semver.Compare(vii.Low, vij.Low); cmp != 0 { - return cmp > 0 - } - return semver.Compare(vii.High, vij.High) > 0 -} - -// checkCanonicalVersion returns a non-nil error if vers is not a canonical -// version string or does not match the major version of path. -// -// If path is non-empty, the error text suggests a format with a major version -// corresponding to the path. -func checkCanonicalVersion(path, vers string) error { - _, pathMajor, pathMajorOk := module.SplitPathVersion(path) - - if vers == "" || vers != module.CanonicalVersion(vers) { - if pathMajor == "" { - return &module.InvalidVersionError{ - Version: vers, - Err: fmt.Errorf("must be of the form v1.2.3"), - } - } - return &module.InvalidVersionError{ - Version: vers, - Err: fmt.Errorf("must be of the form %s.2.3", module.PathMajorPrefix(pathMajor)), - } - } - - if pathMajorOk { - if err := module.CheckPathMajor(vers, pathMajor); err != nil { - if pathMajor == "" { - // In this context, the user probably wrote "v2.3.4" when they meant - // "v2.3.4+incompatible". Suggest that instead of "v0 or v1". - return &module.InvalidVersionError{ - Version: vers, - Err: fmt.Errorf("should be %s+incompatible (or module %s/%v)", vers, path, semver.Major(vers)), - } - } - return err - } - } - - return nil -} diff --git a/vendor/golang.org/x/mod/modfile/work.go b/vendor/golang.org/x/mod/modfile/work.go deleted file mode 100644 index d7b99376e..000000000 --- a/vendor/golang.org/x/mod/modfile/work.go +++ /dev/null @@ -1,285 +0,0 @@ -// Copyright 2021 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package modfile - -import ( - "fmt" - "sort" - "strings" -) - -// A WorkFile is the parsed, interpreted form of a go.work file. -type WorkFile struct { - Go *Go - Toolchain *Toolchain - Use []*Use - Replace []*Replace - - Syntax *FileSyntax -} - -// A Use is a single directory statement. -type Use struct { - Path string // Use path of module. - ModulePath string // Module path in the comment. - Syntax *Line -} - -// ParseWork parses and returns a go.work file. -// -// file is the name of the file, used in positions and errors. -// -// data is the content of the file. -// -// fix is an optional function that canonicalizes module versions. -// If fix is nil, all module versions must be canonical ([module.CanonicalVersion] -// must return the same string). -func ParseWork(file string, data []byte, fix VersionFixer) (*WorkFile, error) { - fs, err := parse(file, data) - if err != nil { - return nil, err - } - f := &WorkFile{ - Syntax: fs, - } - var errs ErrorList - - for _, x := range fs.Stmt { - switch x := x.(type) { - case *Line: - f.add(&errs, x, x.Token[0], x.Token[1:], fix) - - case *LineBlock: - if len(x.Token) > 1 { - errs = append(errs, Error{ - Filename: file, - Pos: x.Start, - Err: fmt.Errorf("unknown block type: %s", strings.Join(x.Token, " ")), - }) - continue - } - switch x.Token[0] { - default: - errs = append(errs, Error{ - Filename: file, - Pos: x.Start, - Err: fmt.Errorf("unknown block type: %s", strings.Join(x.Token, " ")), - }) - continue - case "use", "replace": - for _, l := range x.Line { - f.add(&errs, l, x.Token[0], l.Token, fix) - } - } - } - } - - if len(errs) > 0 { - return nil, errs - } - return f, nil -} - -// Cleanup cleans up the file f after any edit operations. -// To avoid quadratic behavior, modifications like [WorkFile.DropRequire] -// clear the entry but do not remove it from the slice. -// Cleanup cleans out all the cleared entries. -func (f *WorkFile) Cleanup() { - w := 0 - for _, r := range f.Use { - if r.Path != "" { - f.Use[w] = r - w++ - } - } - f.Use = f.Use[:w] - - w = 0 - for _, r := range f.Replace { - if r.Old.Path != "" { - f.Replace[w] = r - w++ - } - } - f.Replace = f.Replace[:w] - - f.Syntax.Cleanup() -} - -func (f *WorkFile) AddGoStmt(version string) error { - if !GoVersionRE.MatchString(version) { - return fmt.Errorf("invalid language version %q", version) - } - if f.Go == nil { - stmt := &Line{Token: []string{"go", version}} - f.Go = &Go{ - Version: version, - Syntax: stmt, - } - // Find the first non-comment-only block and add - // the go statement before it. That will keep file comments at the top. - i := 0 - for i = 0; i < len(f.Syntax.Stmt); i++ { - if _, ok := f.Syntax.Stmt[i].(*CommentBlock); !ok { - break - } - } - f.Syntax.Stmt = append(append(f.Syntax.Stmt[:i:i], stmt), f.Syntax.Stmt[i:]...) - } else { - f.Go.Version = version - f.Syntax.updateLine(f.Go.Syntax, "go", version) - } - return nil -} - -func (f *WorkFile) AddToolchainStmt(name string) error { - if !ToolchainRE.MatchString(name) { - return fmt.Errorf("invalid toolchain name %q", name) - } - if f.Toolchain == nil { - stmt := &Line{Token: []string{"toolchain", name}} - f.Toolchain = &Toolchain{ - Name: name, - Syntax: stmt, - } - // Find the go line and add the toolchain line after it. - // Or else find the first non-comment-only block and add - // the toolchain line before it. That will keep file comments at the top. - i := 0 - for i = 0; i < len(f.Syntax.Stmt); i++ { - if line, ok := f.Syntax.Stmt[i].(*Line); ok && len(line.Token) > 0 && line.Token[0] == "go" { - i++ - goto Found - } - } - for i = 0; i < len(f.Syntax.Stmt); i++ { - if _, ok := f.Syntax.Stmt[i].(*CommentBlock); !ok { - break - } - } - Found: - f.Syntax.Stmt = append(append(f.Syntax.Stmt[:i:i], stmt), f.Syntax.Stmt[i:]...) - } else { - f.Toolchain.Name = name - f.Syntax.updateLine(f.Toolchain.Syntax, "toolchain", name) - } - return nil -} - -// DropGoStmt deletes the go statement from the file. -func (f *WorkFile) DropGoStmt() { - if f.Go != nil { - f.Go.Syntax.markRemoved() - f.Go = nil - } -} - -// DropToolchainStmt deletes the toolchain statement from the file. -func (f *WorkFile) DropToolchainStmt() { - if f.Toolchain != nil { - f.Toolchain.Syntax.markRemoved() - f.Toolchain = nil - } -} - -func (f *WorkFile) AddUse(diskPath, modulePath string) error { - need := true - for _, d := range f.Use { - if d.Path == diskPath { - if need { - d.ModulePath = modulePath - f.Syntax.updateLine(d.Syntax, "use", AutoQuote(diskPath)) - need = false - } else { - d.Syntax.markRemoved() - *d = Use{} - } - } - } - - if need { - f.AddNewUse(diskPath, modulePath) - } - return nil -} - -func (f *WorkFile) AddNewUse(diskPath, modulePath string) { - line := f.Syntax.addLine(nil, "use", AutoQuote(diskPath)) - f.Use = append(f.Use, &Use{Path: diskPath, ModulePath: modulePath, Syntax: line}) -} - -func (f *WorkFile) SetUse(dirs []*Use) { - need := make(map[string]string) - for _, d := range dirs { - need[d.Path] = d.ModulePath - } - - for _, d := range f.Use { - if modulePath, ok := need[d.Path]; ok { - d.ModulePath = modulePath - } else { - d.Syntax.markRemoved() - *d = Use{} - } - } - - // TODO(#45713): Add module path to comment. - - for diskPath, modulePath := range need { - f.AddNewUse(diskPath, modulePath) - } - f.SortBlocks() -} - -func (f *WorkFile) DropUse(path string) error { - for _, d := range f.Use { - if d.Path == path { - d.Syntax.markRemoved() - *d = Use{} - } - } - return nil -} - -func (f *WorkFile) AddReplace(oldPath, oldVers, newPath, newVers string) error { - return addReplace(f.Syntax, &f.Replace, oldPath, oldVers, newPath, newVers) -} - -func (f *WorkFile) DropReplace(oldPath, oldVers string) error { - for _, r := range f.Replace { - if r.Old.Path == oldPath && r.Old.Version == oldVers { - r.Syntax.markRemoved() - *r = Replace{} - } - } - return nil -} - -func (f *WorkFile) SortBlocks() { - f.removeDups() // otherwise sorting is unsafe - - for _, stmt := range f.Syntax.Stmt { - block, ok := stmt.(*LineBlock) - if !ok { - continue - } - sort.SliceStable(block.Line, func(i, j int) bool { - return lineLess(block.Line[i], block.Line[j]) - }) - } -} - -// removeDups removes duplicate replace directives. -// -// Later replace directives take priority. -// -// require directives are not de-duplicated. That's left up to higher-level -// logic (MVS). -// -// retract directives are not de-duplicated since comments are -// meaningful, and versions may be retracted multiple times. -func (f *WorkFile) removeDups() { - removeDups(f.Syntax, nil, &f.Replace) -} diff --git a/vendor/golang.org/x/sync/errgroup/errgroup.go b/vendor/golang.org/x/sync/errgroup/errgroup.go deleted file mode 100644 index 948a3ee63..000000000 --- a/vendor/golang.org/x/sync/errgroup/errgroup.go +++ /dev/null @@ -1,135 +0,0 @@ -// Copyright 2016 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package errgroup provides synchronization, error propagation, and Context -// cancelation for groups of goroutines working on subtasks of a common task. -// -// [errgroup.Group] is related to [sync.WaitGroup] but adds handling of tasks -// returning errors. -package errgroup - -import ( - "context" - "fmt" - "sync" -) - -type token struct{} - -// A Group is a collection of goroutines working on subtasks that are part of -// the same overall task. -// -// A zero Group is valid, has no limit on the number of active goroutines, -// and does not cancel on error. -type Group struct { - cancel func(error) - - wg sync.WaitGroup - - sem chan token - - errOnce sync.Once - err error -} - -func (g *Group) done() { - if g.sem != nil { - <-g.sem - } - g.wg.Done() -} - -// WithContext returns a new Group and an associated Context derived from ctx. -// -// The derived Context is canceled the first time a function passed to Go -// returns a non-nil error or the first time Wait returns, whichever occurs -// first. -func WithContext(ctx context.Context) (*Group, context.Context) { - ctx, cancel := withCancelCause(ctx) - return &Group{cancel: cancel}, ctx -} - -// Wait blocks until all function calls from the Go method have returned, then -// returns the first non-nil error (if any) from them. -func (g *Group) Wait() error { - g.wg.Wait() - if g.cancel != nil { - g.cancel(g.err) - } - return g.err -} - -// Go calls the given function in a new goroutine. -// It blocks until the new goroutine can be added without the number of -// active goroutines in the group exceeding the configured limit. -// -// The first call to return a non-nil error cancels the group's context, if the -// group was created by calling WithContext. The error will be returned by Wait. -func (g *Group) Go(f func() error) { - if g.sem != nil { - g.sem <- token{} - } - - g.wg.Add(1) - go func() { - defer g.done() - - if err := f(); err != nil { - g.errOnce.Do(func() { - g.err = err - if g.cancel != nil { - g.cancel(g.err) - } - }) - } - }() -} - -// TryGo calls the given function in a new goroutine only if the number of -// active goroutines in the group is currently below the configured limit. -// -// The return value reports whether the goroutine was started. -func (g *Group) TryGo(f func() error) bool { - if g.sem != nil { - select { - case g.sem <- token{}: - // Note: this allows barging iff channels in general allow barging. - default: - return false - } - } - - g.wg.Add(1) - go func() { - defer g.done() - - if err := f(); err != nil { - g.errOnce.Do(func() { - g.err = err - if g.cancel != nil { - g.cancel(g.err) - } - }) - } - }() - return true -} - -// SetLimit limits the number of active goroutines in this group to at most n. -// A negative value indicates no limit. -// -// Any subsequent call to the Go method will block until it can add an active -// goroutine without exceeding the configured limit. -// -// The limit must not be modified while any goroutines in the group are active. -func (g *Group) SetLimit(n int) { - if n < 0 { - g.sem = nil - return - } - if len(g.sem) != 0 { - panic(fmt.Errorf("errgroup: modify limit while %v goroutines in the group are still active", len(g.sem))) - } - g.sem = make(chan token, n) -} diff --git a/vendor/golang.org/x/sync/errgroup/go120.go b/vendor/golang.org/x/sync/errgroup/go120.go deleted file mode 100644 index f93c740b6..000000000 --- a/vendor/golang.org/x/sync/errgroup/go120.go +++ /dev/null @@ -1,13 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.20 - -package errgroup - -import "context" - -func withCancelCause(parent context.Context) (context.Context, func(error)) { - return context.WithCancelCause(parent) -} diff --git a/vendor/golang.org/x/sync/errgroup/pre_go120.go b/vendor/golang.org/x/sync/errgroup/pre_go120.go deleted file mode 100644 index 88ce33434..000000000 --- a/vendor/golang.org/x/sync/errgroup/pre_go120.go +++ /dev/null @@ -1,14 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build !go1.20 - -package errgroup - -import "context" - -func withCancelCause(parent context.Context) (context.Context, func(error)) { - ctx, cancel := context.WithCancel(parent) - return ctx, func(error) { cancel() } -} diff --git a/vendor/golang.org/x/text/width/kind_string.go b/vendor/golang.org/x/text/width/kind_string.go deleted file mode 100644 index dd3febd43..000000000 --- a/vendor/golang.org/x/text/width/kind_string.go +++ /dev/null @@ -1,28 +0,0 @@ -// Code generated by "stringer -type=Kind"; DO NOT EDIT. - -package width - -import "strconv" - -func _() { - // An "invalid array index" compiler error signifies that the constant values have changed. - // Re-run the stringer command to generate them again. - var x [1]struct{} - _ = x[Neutral-0] - _ = x[EastAsianAmbiguous-1] - _ = x[EastAsianWide-2] - _ = x[EastAsianNarrow-3] - _ = x[EastAsianFullwidth-4] - _ = x[EastAsianHalfwidth-5] -} - -const _Kind_name = "NeutralEastAsianAmbiguousEastAsianWideEastAsianNarrowEastAsianFullwidthEastAsianHalfwidth" - -var _Kind_index = [...]uint8{0, 7, 25, 38, 53, 71, 89} - -func (i Kind) String() string { - if i < 0 || i >= Kind(len(_Kind_index)-1) { - return "Kind(" + strconv.FormatInt(int64(i), 10) + ")" - } - return _Kind_name[_Kind_index[i]:_Kind_index[i+1]] -} diff --git a/vendor/golang.org/x/text/width/tables10.0.0.go b/vendor/golang.org/x/text/width/tables10.0.0.go deleted file mode 100644 index 07c1cb17a..000000000 --- a/vendor/golang.org/x/text/width/tables10.0.0.go +++ /dev/null @@ -1,1328 +0,0 @@ -// Code generated by running "go generate" in golang.org/x/text. DO NOT EDIT. - -//go:build go1.10 && !go1.13 - -package width - -// UnicodeVersion is the Unicode version from which the tables in this package are derived. -const UnicodeVersion = "10.0.0" - -// lookup returns the trie value for the first UTF-8 encoding in s and -// the width in bytes of this encoding. The size will be 0 if s does not -// hold enough bytes to complete the encoding. len(s) must be greater than 0. -func (t *widthTrie) lookup(s []byte) (v uint16, sz int) { - c0 := s[0] - switch { - case c0 < 0x80: // is ASCII - return widthValues[c0], 1 - case c0 < 0xC2: - return 0, 1 // Illegal UTF-8: not a starter, not ASCII. - case c0 < 0xE0: // 2-byte UTF-8 - if len(s) < 2 { - return 0, 0 - } - i := widthIndex[c0] - c1 := s[1] - if c1 < 0x80 || 0xC0 <= c1 { - return 0, 1 // Illegal UTF-8: not a continuation byte. - } - return t.lookupValue(uint32(i), c1), 2 - case c0 < 0xF0: // 3-byte UTF-8 - if len(s) < 3 { - return 0, 0 - } - i := widthIndex[c0] - c1 := s[1] - if c1 < 0x80 || 0xC0 <= c1 { - return 0, 1 // Illegal UTF-8: not a continuation byte. - } - o := uint32(i)<<6 + uint32(c1) - i = widthIndex[o] - c2 := s[2] - if c2 < 0x80 || 0xC0 <= c2 { - return 0, 2 // Illegal UTF-8: not a continuation byte. - } - return t.lookupValue(uint32(i), c2), 3 - case c0 < 0xF8: // 4-byte UTF-8 - if len(s) < 4 { - return 0, 0 - } - i := widthIndex[c0] - c1 := s[1] - if c1 < 0x80 || 0xC0 <= c1 { - return 0, 1 // Illegal UTF-8: not a continuation byte. - } - o := uint32(i)<<6 + uint32(c1) - i = widthIndex[o] - c2 := s[2] - if c2 < 0x80 || 0xC0 <= c2 { - return 0, 2 // Illegal UTF-8: not a continuation byte. - } - o = uint32(i)<<6 + uint32(c2) - i = widthIndex[o] - c3 := s[3] - if c3 < 0x80 || 0xC0 <= c3 { - return 0, 3 // Illegal UTF-8: not a continuation byte. - } - return t.lookupValue(uint32(i), c3), 4 - } - // Illegal rune - return 0, 1 -} - -// lookupUnsafe returns the trie value for the first UTF-8 encoding in s. -// s must start with a full and valid UTF-8 encoded rune. -func (t *widthTrie) lookupUnsafe(s []byte) uint16 { - c0 := s[0] - if c0 < 0x80 { // is ASCII - return widthValues[c0] - } - i := widthIndex[c0] - if c0 < 0xE0 { // 2-byte UTF-8 - return t.lookupValue(uint32(i), s[1]) - } - i = widthIndex[uint32(i)<<6+uint32(s[1])] - if c0 < 0xF0 { // 3-byte UTF-8 - return t.lookupValue(uint32(i), s[2]) - } - i = widthIndex[uint32(i)<<6+uint32(s[2])] - if c0 < 0xF8 { // 4-byte UTF-8 - return t.lookupValue(uint32(i), s[3]) - } - return 0 -} - -// lookupString returns the trie value for the first UTF-8 encoding in s and -// the width in bytes of this encoding. The size will be 0 if s does not -// hold enough bytes to complete the encoding. len(s) must be greater than 0. -func (t *widthTrie) lookupString(s string) (v uint16, sz int) { - c0 := s[0] - switch { - case c0 < 0x80: // is ASCII - return widthValues[c0], 1 - case c0 < 0xC2: - return 0, 1 // Illegal UTF-8: not a starter, not ASCII. - case c0 < 0xE0: // 2-byte UTF-8 - if len(s) < 2 { - return 0, 0 - } - i := widthIndex[c0] - c1 := s[1] - if c1 < 0x80 || 0xC0 <= c1 { - return 0, 1 // Illegal UTF-8: not a continuation byte. - } - return t.lookupValue(uint32(i), c1), 2 - case c0 < 0xF0: // 3-byte UTF-8 - if len(s) < 3 { - return 0, 0 - } - i := widthIndex[c0] - c1 := s[1] - if c1 < 0x80 || 0xC0 <= c1 { - return 0, 1 // Illegal UTF-8: not a continuation byte. - } - o := uint32(i)<<6 + uint32(c1) - i = widthIndex[o] - c2 := s[2] - if c2 < 0x80 || 0xC0 <= c2 { - return 0, 2 // Illegal UTF-8: not a continuation byte. - } - return t.lookupValue(uint32(i), c2), 3 - case c0 < 0xF8: // 4-byte UTF-8 - if len(s) < 4 { - return 0, 0 - } - i := widthIndex[c0] - c1 := s[1] - if c1 < 0x80 || 0xC0 <= c1 { - return 0, 1 // Illegal UTF-8: not a continuation byte. - } - o := uint32(i)<<6 + uint32(c1) - i = widthIndex[o] - c2 := s[2] - if c2 < 0x80 || 0xC0 <= c2 { - return 0, 2 // Illegal UTF-8: not a continuation byte. - } - o = uint32(i)<<6 + uint32(c2) - i = widthIndex[o] - c3 := s[3] - if c3 < 0x80 || 0xC0 <= c3 { - return 0, 3 // Illegal UTF-8: not a continuation byte. - } - return t.lookupValue(uint32(i), c3), 4 - } - // Illegal rune - return 0, 1 -} - -// lookupStringUnsafe returns the trie value for the first UTF-8 encoding in s. -// s must start with a full and valid UTF-8 encoded rune. -func (t *widthTrie) lookupStringUnsafe(s string) uint16 { - c0 := s[0] - if c0 < 0x80 { // is ASCII - return widthValues[c0] - } - i := widthIndex[c0] - if c0 < 0xE0 { // 2-byte UTF-8 - return t.lookupValue(uint32(i), s[1]) - } - i = widthIndex[uint32(i)<<6+uint32(s[1])] - if c0 < 0xF0 { // 3-byte UTF-8 - return t.lookupValue(uint32(i), s[2]) - } - i = widthIndex[uint32(i)<<6+uint32(s[2])] - if c0 < 0xF8 { // 4-byte UTF-8 - return t.lookupValue(uint32(i), s[3]) - } - return 0 -} - -// widthTrie. Total size: 14336 bytes (14.00 KiB). Checksum: c59df54630d3dc4a. -type widthTrie struct{} - -func newWidthTrie(i int) *widthTrie { - return &widthTrie{} -} - -// lookupValue determines the type of block n and looks up the value for b. -func (t *widthTrie) lookupValue(n uint32, b byte) uint16 { - switch { - default: - return uint16(widthValues[n<<6+uint32(b)]) - } -} - -// widthValues: 101 blocks, 6464 entries, 12928 bytes -// The third block is the zero block. -var widthValues = [6464]uint16{ - // Block 0x0, offset 0x0 - 0x20: 0x6001, 0x21: 0x6002, 0x22: 0x6002, 0x23: 0x6002, - 0x24: 0x6002, 0x25: 0x6002, 0x26: 0x6002, 0x27: 0x6002, 0x28: 0x6002, 0x29: 0x6002, - 0x2a: 0x6002, 0x2b: 0x6002, 0x2c: 0x6002, 0x2d: 0x6002, 0x2e: 0x6002, 0x2f: 0x6002, - 0x30: 0x6002, 0x31: 0x6002, 0x32: 0x6002, 0x33: 0x6002, 0x34: 0x6002, 0x35: 0x6002, - 0x36: 0x6002, 0x37: 0x6002, 0x38: 0x6002, 0x39: 0x6002, 0x3a: 0x6002, 0x3b: 0x6002, - 0x3c: 0x6002, 0x3d: 0x6002, 0x3e: 0x6002, 0x3f: 0x6002, - // Block 0x1, offset 0x40 - 0x40: 0x6003, 0x41: 0x6003, 0x42: 0x6003, 0x43: 0x6003, 0x44: 0x6003, 0x45: 0x6003, - 0x46: 0x6003, 0x47: 0x6003, 0x48: 0x6003, 0x49: 0x6003, 0x4a: 0x6003, 0x4b: 0x6003, - 0x4c: 0x6003, 0x4d: 0x6003, 0x4e: 0x6003, 0x4f: 0x6003, 0x50: 0x6003, 0x51: 0x6003, - 0x52: 0x6003, 0x53: 0x6003, 0x54: 0x6003, 0x55: 0x6003, 0x56: 0x6003, 0x57: 0x6003, - 0x58: 0x6003, 0x59: 0x6003, 0x5a: 0x6003, 0x5b: 0x6003, 0x5c: 0x6003, 0x5d: 0x6003, - 0x5e: 0x6003, 0x5f: 0x6003, 0x60: 0x6004, 0x61: 0x6004, 0x62: 0x6004, 0x63: 0x6004, - 0x64: 0x6004, 0x65: 0x6004, 0x66: 0x6004, 0x67: 0x6004, 0x68: 0x6004, 0x69: 0x6004, - 0x6a: 0x6004, 0x6b: 0x6004, 0x6c: 0x6004, 0x6d: 0x6004, 0x6e: 0x6004, 0x6f: 0x6004, - 0x70: 0x6004, 0x71: 0x6004, 0x72: 0x6004, 0x73: 0x6004, 0x74: 0x6004, 0x75: 0x6004, - 0x76: 0x6004, 0x77: 0x6004, 0x78: 0x6004, 0x79: 0x6004, 0x7a: 0x6004, 0x7b: 0x6004, - 0x7c: 0x6004, 0x7d: 0x6004, 0x7e: 0x6004, - // Block 0x2, offset 0x80 - // Block 0x3, offset 0xc0 - 0xe1: 0x2000, 0xe2: 0x6005, 0xe3: 0x6005, - 0xe4: 0x2000, 0xe5: 0x6006, 0xe6: 0x6005, 0xe7: 0x2000, 0xe8: 0x2000, - 0xea: 0x2000, 0xec: 0x6007, 0xed: 0x2000, 0xee: 0x2000, 0xef: 0x6008, - 0xf0: 0x2000, 0xf1: 0x2000, 0xf2: 0x2000, 0xf3: 0x2000, 0xf4: 0x2000, - 0xf6: 0x2000, 0xf7: 0x2000, 0xf8: 0x2000, 0xf9: 0x2000, 0xfa: 0x2000, - 0xfc: 0x2000, 0xfd: 0x2000, 0xfe: 0x2000, 0xff: 0x2000, - // Block 0x4, offset 0x100 - 0x106: 0x2000, - 0x110: 0x2000, - 0x117: 0x2000, - 0x118: 0x2000, - 0x11e: 0x2000, 0x11f: 0x2000, 0x120: 0x2000, 0x121: 0x2000, - 0x126: 0x2000, 0x128: 0x2000, 0x129: 0x2000, - 0x12a: 0x2000, 0x12c: 0x2000, 0x12d: 0x2000, - 0x130: 0x2000, 0x132: 0x2000, 0x133: 0x2000, - 0x137: 0x2000, 0x138: 0x2000, 0x139: 0x2000, 0x13a: 0x2000, - 0x13c: 0x2000, 0x13e: 0x2000, - // Block 0x5, offset 0x140 - 0x141: 0x2000, - 0x151: 0x2000, - 0x153: 0x2000, - 0x15b: 0x2000, - 0x166: 0x2000, 0x167: 0x2000, - 0x16b: 0x2000, - 0x171: 0x2000, 0x172: 0x2000, 0x173: 0x2000, - 0x178: 0x2000, - 0x17f: 0x2000, - // Block 0x6, offset 0x180 - 0x180: 0x2000, 0x181: 0x2000, 0x182: 0x2000, 0x184: 0x2000, - 0x188: 0x2000, 0x189: 0x2000, 0x18a: 0x2000, 0x18b: 0x2000, - 0x18d: 0x2000, - 0x192: 0x2000, 0x193: 0x2000, - 0x1a6: 0x2000, 0x1a7: 0x2000, - 0x1ab: 0x2000, - // Block 0x7, offset 0x1c0 - 0x1ce: 0x2000, 0x1d0: 0x2000, - 0x1d2: 0x2000, 0x1d4: 0x2000, 0x1d6: 0x2000, - 0x1d8: 0x2000, 0x1da: 0x2000, 0x1dc: 0x2000, - // Block 0x8, offset 0x200 - 0x211: 0x2000, - 0x221: 0x2000, - // Block 0x9, offset 0x240 - 0x244: 0x2000, - 0x247: 0x2000, 0x249: 0x2000, 0x24a: 0x2000, 0x24b: 0x2000, - 0x24d: 0x2000, 0x250: 0x2000, - 0x258: 0x2000, 0x259: 0x2000, 0x25a: 0x2000, 0x25b: 0x2000, 0x25d: 0x2000, - 0x25f: 0x2000, - // Block 0xa, offset 0x280 - 0x280: 0x2000, 0x281: 0x2000, 0x282: 0x2000, 0x283: 0x2000, 0x284: 0x2000, 0x285: 0x2000, - 0x286: 0x2000, 0x287: 0x2000, 0x288: 0x2000, 0x289: 0x2000, 0x28a: 0x2000, 0x28b: 0x2000, - 0x28c: 0x2000, 0x28d: 0x2000, 0x28e: 0x2000, 0x28f: 0x2000, 0x290: 0x2000, 0x291: 0x2000, - 0x292: 0x2000, 0x293: 0x2000, 0x294: 0x2000, 0x295: 0x2000, 0x296: 0x2000, 0x297: 0x2000, - 0x298: 0x2000, 0x299: 0x2000, 0x29a: 0x2000, 0x29b: 0x2000, 0x29c: 0x2000, 0x29d: 0x2000, - 0x29e: 0x2000, 0x29f: 0x2000, 0x2a0: 0x2000, 0x2a1: 0x2000, 0x2a2: 0x2000, 0x2a3: 0x2000, - 0x2a4: 0x2000, 0x2a5: 0x2000, 0x2a6: 0x2000, 0x2a7: 0x2000, 0x2a8: 0x2000, 0x2a9: 0x2000, - 0x2aa: 0x2000, 0x2ab: 0x2000, 0x2ac: 0x2000, 0x2ad: 0x2000, 0x2ae: 0x2000, 0x2af: 0x2000, - 0x2b0: 0x2000, 0x2b1: 0x2000, 0x2b2: 0x2000, 0x2b3: 0x2000, 0x2b4: 0x2000, 0x2b5: 0x2000, - 0x2b6: 0x2000, 0x2b7: 0x2000, 0x2b8: 0x2000, 0x2b9: 0x2000, 0x2ba: 0x2000, 0x2bb: 0x2000, - 0x2bc: 0x2000, 0x2bd: 0x2000, 0x2be: 0x2000, 0x2bf: 0x2000, - // Block 0xb, offset 0x2c0 - 0x2c0: 0x2000, 0x2c1: 0x2000, 0x2c2: 0x2000, 0x2c3: 0x2000, 0x2c4: 0x2000, 0x2c5: 0x2000, - 0x2c6: 0x2000, 0x2c7: 0x2000, 0x2c8: 0x2000, 0x2c9: 0x2000, 0x2ca: 0x2000, 0x2cb: 0x2000, - 0x2cc: 0x2000, 0x2cd: 0x2000, 0x2ce: 0x2000, 0x2cf: 0x2000, 0x2d0: 0x2000, 0x2d1: 0x2000, - 0x2d2: 0x2000, 0x2d3: 0x2000, 0x2d4: 0x2000, 0x2d5: 0x2000, 0x2d6: 0x2000, 0x2d7: 0x2000, - 0x2d8: 0x2000, 0x2d9: 0x2000, 0x2da: 0x2000, 0x2db: 0x2000, 0x2dc: 0x2000, 0x2dd: 0x2000, - 0x2de: 0x2000, 0x2df: 0x2000, 0x2e0: 0x2000, 0x2e1: 0x2000, 0x2e2: 0x2000, 0x2e3: 0x2000, - 0x2e4: 0x2000, 0x2e5: 0x2000, 0x2e6: 0x2000, 0x2e7: 0x2000, 0x2e8: 0x2000, 0x2e9: 0x2000, - 0x2ea: 0x2000, 0x2eb: 0x2000, 0x2ec: 0x2000, 0x2ed: 0x2000, 0x2ee: 0x2000, 0x2ef: 0x2000, - // Block 0xc, offset 0x300 - 0x311: 0x2000, - 0x312: 0x2000, 0x313: 0x2000, 0x314: 0x2000, 0x315: 0x2000, 0x316: 0x2000, 0x317: 0x2000, - 0x318: 0x2000, 0x319: 0x2000, 0x31a: 0x2000, 0x31b: 0x2000, 0x31c: 0x2000, 0x31d: 0x2000, - 0x31e: 0x2000, 0x31f: 0x2000, 0x320: 0x2000, 0x321: 0x2000, 0x323: 0x2000, - 0x324: 0x2000, 0x325: 0x2000, 0x326: 0x2000, 0x327: 0x2000, 0x328: 0x2000, 0x329: 0x2000, - 0x331: 0x2000, 0x332: 0x2000, 0x333: 0x2000, 0x334: 0x2000, 0x335: 0x2000, - 0x336: 0x2000, 0x337: 0x2000, 0x338: 0x2000, 0x339: 0x2000, 0x33a: 0x2000, 0x33b: 0x2000, - 0x33c: 0x2000, 0x33d: 0x2000, 0x33e: 0x2000, 0x33f: 0x2000, - // Block 0xd, offset 0x340 - 0x340: 0x2000, 0x341: 0x2000, 0x343: 0x2000, 0x344: 0x2000, 0x345: 0x2000, - 0x346: 0x2000, 0x347: 0x2000, 0x348: 0x2000, 0x349: 0x2000, - // Block 0xe, offset 0x380 - 0x381: 0x2000, - 0x390: 0x2000, 0x391: 0x2000, - 0x392: 0x2000, 0x393: 0x2000, 0x394: 0x2000, 0x395: 0x2000, 0x396: 0x2000, 0x397: 0x2000, - 0x398: 0x2000, 0x399: 0x2000, 0x39a: 0x2000, 0x39b: 0x2000, 0x39c: 0x2000, 0x39d: 0x2000, - 0x39e: 0x2000, 0x39f: 0x2000, 0x3a0: 0x2000, 0x3a1: 0x2000, 0x3a2: 0x2000, 0x3a3: 0x2000, - 0x3a4: 0x2000, 0x3a5: 0x2000, 0x3a6: 0x2000, 0x3a7: 0x2000, 0x3a8: 0x2000, 0x3a9: 0x2000, - 0x3aa: 0x2000, 0x3ab: 0x2000, 0x3ac: 0x2000, 0x3ad: 0x2000, 0x3ae: 0x2000, 0x3af: 0x2000, - 0x3b0: 0x2000, 0x3b1: 0x2000, 0x3b2: 0x2000, 0x3b3: 0x2000, 0x3b4: 0x2000, 0x3b5: 0x2000, - 0x3b6: 0x2000, 0x3b7: 0x2000, 0x3b8: 0x2000, 0x3b9: 0x2000, 0x3ba: 0x2000, 0x3bb: 0x2000, - 0x3bc: 0x2000, 0x3bd: 0x2000, 0x3be: 0x2000, 0x3bf: 0x2000, - // Block 0xf, offset 0x3c0 - 0x3c0: 0x2000, 0x3c1: 0x2000, 0x3c2: 0x2000, 0x3c3: 0x2000, 0x3c4: 0x2000, 0x3c5: 0x2000, - 0x3c6: 0x2000, 0x3c7: 0x2000, 0x3c8: 0x2000, 0x3c9: 0x2000, 0x3ca: 0x2000, 0x3cb: 0x2000, - 0x3cc: 0x2000, 0x3cd: 0x2000, 0x3ce: 0x2000, 0x3cf: 0x2000, 0x3d1: 0x2000, - // Block 0x10, offset 0x400 - 0x400: 0x4000, 0x401: 0x4000, 0x402: 0x4000, 0x403: 0x4000, 0x404: 0x4000, 0x405: 0x4000, - 0x406: 0x4000, 0x407: 0x4000, 0x408: 0x4000, 0x409: 0x4000, 0x40a: 0x4000, 0x40b: 0x4000, - 0x40c: 0x4000, 0x40d: 0x4000, 0x40e: 0x4000, 0x40f: 0x4000, 0x410: 0x4000, 0x411: 0x4000, - 0x412: 0x4000, 0x413: 0x4000, 0x414: 0x4000, 0x415: 0x4000, 0x416: 0x4000, 0x417: 0x4000, - 0x418: 0x4000, 0x419: 0x4000, 0x41a: 0x4000, 0x41b: 0x4000, 0x41c: 0x4000, 0x41d: 0x4000, - 0x41e: 0x4000, 0x41f: 0x4000, 0x420: 0x4000, 0x421: 0x4000, 0x422: 0x4000, 0x423: 0x4000, - 0x424: 0x4000, 0x425: 0x4000, 0x426: 0x4000, 0x427: 0x4000, 0x428: 0x4000, 0x429: 0x4000, - 0x42a: 0x4000, 0x42b: 0x4000, 0x42c: 0x4000, 0x42d: 0x4000, 0x42e: 0x4000, 0x42f: 0x4000, - 0x430: 0x4000, 0x431: 0x4000, 0x432: 0x4000, 0x433: 0x4000, 0x434: 0x4000, 0x435: 0x4000, - 0x436: 0x4000, 0x437: 0x4000, 0x438: 0x4000, 0x439: 0x4000, 0x43a: 0x4000, 0x43b: 0x4000, - 0x43c: 0x4000, 0x43d: 0x4000, 0x43e: 0x4000, 0x43f: 0x4000, - // Block 0x11, offset 0x440 - 0x440: 0x4000, 0x441: 0x4000, 0x442: 0x4000, 0x443: 0x4000, 0x444: 0x4000, 0x445: 0x4000, - 0x446: 0x4000, 0x447: 0x4000, 0x448: 0x4000, 0x449: 0x4000, 0x44a: 0x4000, 0x44b: 0x4000, - 0x44c: 0x4000, 0x44d: 0x4000, 0x44e: 0x4000, 0x44f: 0x4000, 0x450: 0x4000, 0x451: 0x4000, - 0x452: 0x4000, 0x453: 0x4000, 0x454: 0x4000, 0x455: 0x4000, 0x456: 0x4000, 0x457: 0x4000, - 0x458: 0x4000, 0x459: 0x4000, 0x45a: 0x4000, 0x45b: 0x4000, 0x45c: 0x4000, 0x45d: 0x4000, - 0x45e: 0x4000, 0x45f: 0x4000, - // Block 0x12, offset 0x480 - 0x490: 0x2000, - 0x493: 0x2000, 0x494: 0x2000, 0x495: 0x2000, 0x496: 0x2000, - 0x498: 0x2000, 0x499: 0x2000, 0x49c: 0x2000, 0x49d: 0x2000, - 0x4a0: 0x2000, 0x4a1: 0x2000, 0x4a2: 0x2000, - 0x4a4: 0x2000, 0x4a5: 0x2000, 0x4a6: 0x2000, 0x4a7: 0x2000, - 0x4b0: 0x2000, 0x4b2: 0x2000, 0x4b3: 0x2000, 0x4b5: 0x2000, - 0x4bb: 0x2000, - 0x4be: 0x2000, - // Block 0x13, offset 0x4c0 - 0x4f4: 0x2000, - 0x4ff: 0x2000, - // Block 0x14, offset 0x500 - 0x501: 0x2000, 0x502: 0x2000, 0x503: 0x2000, 0x504: 0x2000, - 0x529: 0xa009, - 0x52c: 0x2000, - // Block 0x15, offset 0x540 - 0x543: 0x2000, 0x545: 0x2000, - 0x549: 0x2000, - 0x553: 0x2000, 0x556: 0x2000, - 0x561: 0x2000, 0x562: 0x2000, - 0x566: 0x2000, - 0x56b: 0x2000, - // Block 0x16, offset 0x580 - 0x593: 0x2000, 0x594: 0x2000, - 0x59b: 0x2000, 0x59c: 0x2000, 0x59d: 0x2000, - 0x59e: 0x2000, 0x5a0: 0x2000, 0x5a1: 0x2000, 0x5a2: 0x2000, 0x5a3: 0x2000, - 0x5a4: 0x2000, 0x5a5: 0x2000, 0x5a6: 0x2000, 0x5a7: 0x2000, 0x5a8: 0x2000, 0x5a9: 0x2000, - 0x5aa: 0x2000, 0x5ab: 0x2000, - 0x5b0: 0x2000, 0x5b1: 0x2000, 0x5b2: 0x2000, 0x5b3: 0x2000, 0x5b4: 0x2000, 0x5b5: 0x2000, - 0x5b6: 0x2000, 0x5b7: 0x2000, 0x5b8: 0x2000, 0x5b9: 0x2000, - // Block 0x17, offset 0x5c0 - 0x5c9: 0x2000, - 0x5d0: 0x200a, 0x5d1: 0x200b, - 0x5d2: 0x200a, 0x5d3: 0x200c, 0x5d4: 0x2000, 0x5d5: 0x2000, 0x5d6: 0x2000, 0x5d7: 0x2000, - 0x5d8: 0x2000, 0x5d9: 0x2000, - 0x5f8: 0x2000, 0x5f9: 0x2000, - // Block 0x18, offset 0x600 - 0x612: 0x2000, 0x614: 0x2000, - 0x627: 0x2000, - // Block 0x19, offset 0x640 - 0x640: 0x2000, 0x642: 0x2000, 0x643: 0x2000, - 0x647: 0x2000, 0x648: 0x2000, 0x64b: 0x2000, - 0x64f: 0x2000, 0x651: 0x2000, - 0x655: 0x2000, - 0x65a: 0x2000, 0x65d: 0x2000, - 0x65e: 0x2000, 0x65f: 0x2000, 0x660: 0x2000, 0x663: 0x2000, - 0x665: 0x2000, 0x667: 0x2000, 0x668: 0x2000, 0x669: 0x2000, - 0x66a: 0x2000, 0x66b: 0x2000, 0x66c: 0x2000, 0x66e: 0x2000, - 0x674: 0x2000, 0x675: 0x2000, - 0x676: 0x2000, 0x677: 0x2000, - 0x67c: 0x2000, 0x67d: 0x2000, - // Block 0x1a, offset 0x680 - 0x688: 0x2000, - 0x68c: 0x2000, - 0x692: 0x2000, - 0x6a0: 0x2000, 0x6a1: 0x2000, - 0x6a4: 0x2000, 0x6a5: 0x2000, 0x6a6: 0x2000, 0x6a7: 0x2000, - 0x6aa: 0x2000, 0x6ab: 0x2000, 0x6ae: 0x2000, 0x6af: 0x2000, - // Block 0x1b, offset 0x6c0 - 0x6c2: 0x2000, 0x6c3: 0x2000, - 0x6c6: 0x2000, 0x6c7: 0x2000, - 0x6d5: 0x2000, - 0x6d9: 0x2000, - 0x6e5: 0x2000, - 0x6ff: 0x2000, - // Block 0x1c, offset 0x700 - 0x712: 0x2000, - 0x71a: 0x4000, 0x71b: 0x4000, - 0x729: 0x4000, - 0x72a: 0x4000, - // Block 0x1d, offset 0x740 - 0x769: 0x4000, - 0x76a: 0x4000, 0x76b: 0x4000, 0x76c: 0x4000, - 0x770: 0x4000, 0x773: 0x4000, - // Block 0x1e, offset 0x780 - 0x7a0: 0x2000, 0x7a1: 0x2000, 0x7a2: 0x2000, 0x7a3: 0x2000, - 0x7a4: 0x2000, 0x7a5: 0x2000, 0x7a6: 0x2000, 0x7a7: 0x2000, 0x7a8: 0x2000, 0x7a9: 0x2000, - 0x7aa: 0x2000, 0x7ab: 0x2000, 0x7ac: 0x2000, 0x7ad: 0x2000, 0x7ae: 0x2000, 0x7af: 0x2000, - 0x7b0: 0x2000, 0x7b1: 0x2000, 0x7b2: 0x2000, 0x7b3: 0x2000, 0x7b4: 0x2000, 0x7b5: 0x2000, - 0x7b6: 0x2000, 0x7b7: 0x2000, 0x7b8: 0x2000, 0x7b9: 0x2000, 0x7ba: 0x2000, 0x7bb: 0x2000, - 0x7bc: 0x2000, 0x7bd: 0x2000, 0x7be: 0x2000, 0x7bf: 0x2000, - // Block 0x1f, offset 0x7c0 - 0x7c0: 0x2000, 0x7c1: 0x2000, 0x7c2: 0x2000, 0x7c3: 0x2000, 0x7c4: 0x2000, 0x7c5: 0x2000, - 0x7c6: 0x2000, 0x7c7: 0x2000, 0x7c8: 0x2000, 0x7c9: 0x2000, 0x7ca: 0x2000, 0x7cb: 0x2000, - 0x7cc: 0x2000, 0x7cd: 0x2000, 0x7ce: 0x2000, 0x7cf: 0x2000, 0x7d0: 0x2000, 0x7d1: 0x2000, - 0x7d2: 0x2000, 0x7d3: 0x2000, 0x7d4: 0x2000, 0x7d5: 0x2000, 0x7d6: 0x2000, 0x7d7: 0x2000, - 0x7d8: 0x2000, 0x7d9: 0x2000, 0x7da: 0x2000, 0x7db: 0x2000, 0x7dc: 0x2000, 0x7dd: 0x2000, - 0x7de: 0x2000, 0x7df: 0x2000, 0x7e0: 0x2000, 0x7e1: 0x2000, 0x7e2: 0x2000, 0x7e3: 0x2000, - 0x7e4: 0x2000, 0x7e5: 0x2000, 0x7e6: 0x2000, 0x7e7: 0x2000, 0x7e8: 0x2000, 0x7e9: 0x2000, - 0x7eb: 0x2000, 0x7ec: 0x2000, 0x7ed: 0x2000, 0x7ee: 0x2000, 0x7ef: 0x2000, - 0x7f0: 0x2000, 0x7f1: 0x2000, 0x7f2: 0x2000, 0x7f3: 0x2000, 0x7f4: 0x2000, 0x7f5: 0x2000, - 0x7f6: 0x2000, 0x7f7: 0x2000, 0x7f8: 0x2000, 0x7f9: 0x2000, 0x7fa: 0x2000, 0x7fb: 0x2000, - 0x7fc: 0x2000, 0x7fd: 0x2000, 0x7fe: 0x2000, 0x7ff: 0x2000, - // Block 0x20, offset 0x800 - 0x800: 0x2000, 0x801: 0x2000, 0x802: 0x200d, 0x803: 0x2000, 0x804: 0x2000, 0x805: 0x2000, - 0x806: 0x2000, 0x807: 0x2000, 0x808: 0x2000, 0x809: 0x2000, 0x80a: 0x2000, 0x80b: 0x2000, - 0x80c: 0x2000, 0x80d: 0x2000, 0x80e: 0x2000, 0x80f: 0x2000, 0x810: 0x2000, 0x811: 0x2000, - 0x812: 0x2000, 0x813: 0x2000, 0x814: 0x2000, 0x815: 0x2000, 0x816: 0x2000, 0x817: 0x2000, - 0x818: 0x2000, 0x819: 0x2000, 0x81a: 0x2000, 0x81b: 0x2000, 0x81c: 0x2000, 0x81d: 0x2000, - 0x81e: 0x2000, 0x81f: 0x2000, 0x820: 0x2000, 0x821: 0x2000, 0x822: 0x2000, 0x823: 0x2000, - 0x824: 0x2000, 0x825: 0x2000, 0x826: 0x2000, 0x827: 0x2000, 0x828: 0x2000, 0x829: 0x2000, - 0x82a: 0x2000, 0x82b: 0x2000, 0x82c: 0x2000, 0x82d: 0x2000, 0x82e: 0x2000, 0x82f: 0x2000, - 0x830: 0x2000, 0x831: 0x2000, 0x832: 0x2000, 0x833: 0x2000, 0x834: 0x2000, 0x835: 0x2000, - 0x836: 0x2000, 0x837: 0x2000, 0x838: 0x2000, 0x839: 0x2000, 0x83a: 0x2000, 0x83b: 0x2000, - 0x83c: 0x2000, 0x83d: 0x2000, 0x83e: 0x2000, 0x83f: 0x2000, - // Block 0x21, offset 0x840 - 0x840: 0x2000, 0x841: 0x2000, 0x842: 0x2000, 0x843: 0x2000, 0x844: 0x2000, 0x845: 0x2000, - 0x846: 0x2000, 0x847: 0x2000, 0x848: 0x2000, 0x849: 0x2000, 0x84a: 0x2000, 0x84b: 0x2000, - 0x850: 0x2000, 0x851: 0x2000, - 0x852: 0x2000, 0x853: 0x2000, 0x854: 0x2000, 0x855: 0x2000, 0x856: 0x2000, 0x857: 0x2000, - 0x858: 0x2000, 0x859: 0x2000, 0x85a: 0x2000, 0x85b: 0x2000, 0x85c: 0x2000, 0x85d: 0x2000, - 0x85e: 0x2000, 0x85f: 0x2000, 0x860: 0x2000, 0x861: 0x2000, 0x862: 0x2000, 0x863: 0x2000, - 0x864: 0x2000, 0x865: 0x2000, 0x866: 0x2000, 0x867: 0x2000, 0x868: 0x2000, 0x869: 0x2000, - 0x86a: 0x2000, 0x86b: 0x2000, 0x86c: 0x2000, 0x86d: 0x2000, 0x86e: 0x2000, 0x86f: 0x2000, - 0x870: 0x2000, 0x871: 0x2000, 0x872: 0x2000, 0x873: 0x2000, - // Block 0x22, offset 0x880 - 0x880: 0x2000, 0x881: 0x2000, 0x882: 0x2000, 0x883: 0x2000, 0x884: 0x2000, 0x885: 0x2000, - 0x886: 0x2000, 0x887: 0x2000, 0x888: 0x2000, 0x889: 0x2000, 0x88a: 0x2000, 0x88b: 0x2000, - 0x88c: 0x2000, 0x88d: 0x2000, 0x88e: 0x2000, 0x88f: 0x2000, - 0x892: 0x2000, 0x893: 0x2000, 0x894: 0x2000, 0x895: 0x2000, - 0x8a0: 0x200e, 0x8a1: 0x2000, 0x8a3: 0x2000, - 0x8a4: 0x2000, 0x8a5: 0x2000, 0x8a6: 0x2000, 0x8a7: 0x2000, 0x8a8: 0x2000, 0x8a9: 0x2000, - 0x8b2: 0x2000, 0x8b3: 0x2000, - 0x8b6: 0x2000, 0x8b7: 0x2000, - 0x8bc: 0x2000, 0x8bd: 0x2000, - // Block 0x23, offset 0x8c0 - 0x8c0: 0x2000, 0x8c1: 0x2000, - 0x8c6: 0x2000, 0x8c7: 0x2000, 0x8c8: 0x2000, 0x8cb: 0x200f, - 0x8ce: 0x2000, 0x8cf: 0x2000, 0x8d0: 0x2000, 0x8d1: 0x2000, - 0x8e2: 0x2000, 0x8e3: 0x2000, - 0x8e4: 0x2000, 0x8e5: 0x2000, - 0x8ef: 0x2000, - 0x8fd: 0x4000, 0x8fe: 0x4000, - // Block 0x24, offset 0x900 - 0x905: 0x2000, - 0x906: 0x2000, 0x909: 0x2000, - 0x90e: 0x2000, 0x90f: 0x2000, - 0x914: 0x4000, 0x915: 0x4000, - 0x91c: 0x2000, - 0x91e: 0x2000, - // Block 0x25, offset 0x940 - 0x940: 0x2000, 0x942: 0x2000, - 0x948: 0x4000, 0x949: 0x4000, 0x94a: 0x4000, 0x94b: 0x4000, - 0x94c: 0x4000, 0x94d: 0x4000, 0x94e: 0x4000, 0x94f: 0x4000, 0x950: 0x4000, 0x951: 0x4000, - 0x952: 0x4000, 0x953: 0x4000, - 0x960: 0x2000, 0x961: 0x2000, 0x963: 0x2000, - 0x964: 0x2000, 0x965: 0x2000, 0x967: 0x2000, 0x968: 0x2000, 0x969: 0x2000, - 0x96a: 0x2000, 0x96c: 0x2000, 0x96d: 0x2000, 0x96f: 0x2000, - 0x97f: 0x4000, - // Block 0x26, offset 0x980 - 0x993: 0x4000, - 0x99e: 0x2000, 0x99f: 0x2000, 0x9a1: 0x4000, - 0x9aa: 0x4000, 0x9ab: 0x4000, - 0x9bd: 0x4000, 0x9be: 0x4000, 0x9bf: 0x2000, - // Block 0x27, offset 0x9c0 - 0x9c4: 0x4000, 0x9c5: 0x4000, - 0x9c6: 0x2000, 0x9c7: 0x2000, 0x9c8: 0x2000, 0x9c9: 0x2000, 0x9ca: 0x2000, 0x9cb: 0x2000, - 0x9cc: 0x2000, 0x9cd: 0x2000, 0x9ce: 0x4000, 0x9cf: 0x2000, 0x9d0: 0x2000, 0x9d1: 0x2000, - 0x9d2: 0x2000, 0x9d3: 0x2000, 0x9d4: 0x4000, 0x9d5: 0x2000, 0x9d6: 0x2000, 0x9d7: 0x2000, - 0x9d8: 0x2000, 0x9d9: 0x2000, 0x9da: 0x2000, 0x9db: 0x2000, 0x9dc: 0x2000, 0x9dd: 0x2000, - 0x9de: 0x2000, 0x9df: 0x2000, 0x9e0: 0x2000, 0x9e1: 0x2000, 0x9e3: 0x2000, - 0x9e8: 0x2000, 0x9e9: 0x2000, - 0x9ea: 0x4000, 0x9eb: 0x2000, 0x9ec: 0x2000, 0x9ed: 0x2000, 0x9ee: 0x2000, 0x9ef: 0x2000, - 0x9f0: 0x2000, 0x9f1: 0x2000, 0x9f2: 0x4000, 0x9f3: 0x4000, 0x9f4: 0x2000, 0x9f5: 0x4000, - 0x9f6: 0x2000, 0x9f7: 0x2000, 0x9f8: 0x2000, 0x9f9: 0x2000, 0x9fa: 0x4000, 0x9fb: 0x2000, - 0x9fc: 0x2000, 0x9fd: 0x4000, 0x9fe: 0x2000, 0x9ff: 0x2000, - // Block 0x28, offset 0xa00 - 0xa05: 0x4000, - 0xa0a: 0x4000, 0xa0b: 0x4000, - 0xa28: 0x4000, - 0xa3d: 0x2000, - // Block 0x29, offset 0xa40 - 0xa4c: 0x4000, 0xa4e: 0x4000, - 0xa53: 0x4000, 0xa54: 0x4000, 0xa55: 0x4000, 0xa57: 0x4000, - 0xa76: 0x2000, 0xa77: 0x2000, 0xa78: 0x2000, 0xa79: 0x2000, 0xa7a: 0x2000, 0xa7b: 0x2000, - 0xa7c: 0x2000, 0xa7d: 0x2000, 0xa7e: 0x2000, 0xa7f: 0x2000, - // Block 0x2a, offset 0xa80 - 0xa95: 0x4000, 0xa96: 0x4000, 0xa97: 0x4000, - 0xab0: 0x4000, - 0xabf: 0x4000, - // Block 0x2b, offset 0xac0 - 0xae6: 0x6000, 0xae7: 0x6000, 0xae8: 0x6000, 0xae9: 0x6000, - 0xaea: 0x6000, 0xaeb: 0x6000, 0xaec: 0x6000, 0xaed: 0x6000, - // Block 0x2c, offset 0xb00 - 0xb05: 0x6010, - 0xb06: 0x6011, - // Block 0x2d, offset 0xb40 - 0xb5b: 0x4000, 0xb5c: 0x4000, - // Block 0x2e, offset 0xb80 - 0xb90: 0x4000, - 0xb95: 0x4000, 0xb96: 0x2000, 0xb97: 0x2000, - 0xb98: 0x2000, 0xb99: 0x2000, - // Block 0x2f, offset 0xbc0 - 0xbc0: 0x4000, 0xbc1: 0x4000, 0xbc2: 0x4000, 0xbc3: 0x4000, 0xbc4: 0x4000, 0xbc5: 0x4000, - 0xbc6: 0x4000, 0xbc7: 0x4000, 0xbc8: 0x4000, 0xbc9: 0x4000, 0xbca: 0x4000, 0xbcb: 0x4000, - 0xbcc: 0x4000, 0xbcd: 0x4000, 0xbce: 0x4000, 0xbcf: 0x4000, 0xbd0: 0x4000, 0xbd1: 0x4000, - 0xbd2: 0x4000, 0xbd3: 0x4000, 0xbd4: 0x4000, 0xbd5: 0x4000, 0xbd6: 0x4000, 0xbd7: 0x4000, - 0xbd8: 0x4000, 0xbd9: 0x4000, 0xbdb: 0x4000, 0xbdc: 0x4000, 0xbdd: 0x4000, - 0xbde: 0x4000, 0xbdf: 0x4000, 0xbe0: 0x4000, 0xbe1: 0x4000, 0xbe2: 0x4000, 0xbe3: 0x4000, - 0xbe4: 0x4000, 0xbe5: 0x4000, 0xbe6: 0x4000, 0xbe7: 0x4000, 0xbe8: 0x4000, 0xbe9: 0x4000, - 0xbea: 0x4000, 0xbeb: 0x4000, 0xbec: 0x4000, 0xbed: 0x4000, 0xbee: 0x4000, 0xbef: 0x4000, - 0xbf0: 0x4000, 0xbf1: 0x4000, 0xbf2: 0x4000, 0xbf3: 0x4000, 0xbf4: 0x4000, 0xbf5: 0x4000, - 0xbf6: 0x4000, 0xbf7: 0x4000, 0xbf8: 0x4000, 0xbf9: 0x4000, 0xbfa: 0x4000, 0xbfb: 0x4000, - 0xbfc: 0x4000, 0xbfd: 0x4000, 0xbfe: 0x4000, 0xbff: 0x4000, - // Block 0x30, offset 0xc00 - 0xc00: 0x4000, 0xc01: 0x4000, 0xc02: 0x4000, 0xc03: 0x4000, 0xc04: 0x4000, 0xc05: 0x4000, - 0xc06: 0x4000, 0xc07: 0x4000, 0xc08: 0x4000, 0xc09: 0x4000, 0xc0a: 0x4000, 0xc0b: 0x4000, - 0xc0c: 0x4000, 0xc0d: 0x4000, 0xc0e: 0x4000, 0xc0f: 0x4000, 0xc10: 0x4000, 0xc11: 0x4000, - 0xc12: 0x4000, 0xc13: 0x4000, 0xc14: 0x4000, 0xc15: 0x4000, 0xc16: 0x4000, 0xc17: 0x4000, - 0xc18: 0x4000, 0xc19: 0x4000, 0xc1a: 0x4000, 0xc1b: 0x4000, 0xc1c: 0x4000, 0xc1d: 0x4000, - 0xc1e: 0x4000, 0xc1f: 0x4000, 0xc20: 0x4000, 0xc21: 0x4000, 0xc22: 0x4000, 0xc23: 0x4000, - 0xc24: 0x4000, 0xc25: 0x4000, 0xc26: 0x4000, 0xc27: 0x4000, 0xc28: 0x4000, 0xc29: 0x4000, - 0xc2a: 0x4000, 0xc2b: 0x4000, 0xc2c: 0x4000, 0xc2d: 0x4000, 0xc2e: 0x4000, 0xc2f: 0x4000, - 0xc30: 0x4000, 0xc31: 0x4000, 0xc32: 0x4000, 0xc33: 0x4000, - // Block 0x31, offset 0xc40 - 0xc40: 0x4000, 0xc41: 0x4000, 0xc42: 0x4000, 0xc43: 0x4000, 0xc44: 0x4000, 0xc45: 0x4000, - 0xc46: 0x4000, 0xc47: 0x4000, 0xc48: 0x4000, 0xc49: 0x4000, 0xc4a: 0x4000, 0xc4b: 0x4000, - 0xc4c: 0x4000, 0xc4d: 0x4000, 0xc4e: 0x4000, 0xc4f: 0x4000, 0xc50: 0x4000, 0xc51: 0x4000, - 0xc52: 0x4000, 0xc53: 0x4000, 0xc54: 0x4000, 0xc55: 0x4000, - 0xc70: 0x4000, 0xc71: 0x4000, 0xc72: 0x4000, 0xc73: 0x4000, 0xc74: 0x4000, 0xc75: 0x4000, - 0xc76: 0x4000, 0xc77: 0x4000, 0xc78: 0x4000, 0xc79: 0x4000, 0xc7a: 0x4000, 0xc7b: 0x4000, - // Block 0x32, offset 0xc80 - 0xc80: 0x9012, 0xc81: 0x4013, 0xc82: 0x4014, 0xc83: 0x4000, 0xc84: 0x4000, 0xc85: 0x4000, - 0xc86: 0x4000, 0xc87: 0x4000, 0xc88: 0x4000, 0xc89: 0x4000, 0xc8a: 0x4000, 0xc8b: 0x4000, - 0xc8c: 0x4015, 0xc8d: 0x4015, 0xc8e: 0x4000, 0xc8f: 0x4000, 0xc90: 0x4000, 0xc91: 0x4000, - 0xc92: 0x4000, 0xc93: 0x4000, 0xc94: 0x4000, 0xc95: 0x4000, 0xc96: 0x4000, 0xc97: 0x4000, - 0xc98: 0x4000, 0xc99: 0x4000, 0xc9a: 0x4000, 0xc9b: 0x4000, 0xc9c: 0x4000, 0xc9d: 0x4000, - 0xc9e: 0x4000, 0xc9f: 0x4000, 0xca0: 0x4000, 0xca1: 0x4000, 0xca2: 0x4000, 0xca3: 0x4000, - 0xca4: 0x4000, 0xca5: 0x4000, 0xca6: 0x4000, 0xca7: 0x4000, 0xca8: 0x4000, 0xca9: 0x4000, - 0xcaa: 0x4000, 0xcab: 0x4000, 0xcac: 0x4000, 0xcad: 0x4000, 0xcae: 0x4000, 0xcaf: 0x4000, - 0xcb0: 0x4000, 0xcb1: 0x4000, 0xcb2: 0x4000, 0xcb3: 0x4000, 0xcb4: 0x4000, 0xcb5: 0x4000, - 0xcb6: 0x4000, 0xcb7: 0x4000, 0xcb8: 0x4000, 0xcb9: 0x4000, 0xcba: 0x4000, 0xcbb: 0x4000, - 0xcbc: 0x4000, 0xcbd: 0x4000, 0xcbe: 0x4000, - // Block 0x33, offset 0xcc0 - 0xcc1: 0x4000, 0xcc2: 0x4000, 0xcc3: 0x4000, 0xcc4: 0x4000, 0xcc5: 0x4000, - 0xcc6: 0x4000, 0xcc7: 0x4000, 0xcc8: 0x4000, 0xcc9: 0x4000, 0xcca: 0x4000, 0xccb: 0x4000, - 0xccc: 0x4000, 0xccd: 0x4000, 0xcce: 0x4000, 0xccf: 0x4000, 0xcd0: 0x4000, 0xcd1: 0x4000, - 0xcd2: 0x4000, 0xcd3: 0x4000, 0xcd4: 0x4000, 0xcd5: 0x4000, 0xcd6: 0x4000, 0xcd7: 0x4000, - 0xcd8: 0x4000, 0xcd9: 0x4000, 0xcda: 0x4000, 0xcdb: 0x4000, 0xcdc: 0x4000, 0xcdd: 0x4000, - 0xcde: 0x4000, 0xcdf: 0x4000, 0xce0: 0x4000, 0xce1: 0x4000, 0xce2: 0x4000, 0xce3: 0x4000, - 0xce4: 0x4000, 0xce5: 0x4000, 0xce6: 0x4000, 0xce7: 0x4000, 0xce8: 0x4000, 0xce9: 0x4000, - 0xcea: 0x4000, 0xceb: 0x4000, 0xcec: 0x4000, 0xced: 0x4000, 0xcee: 0x4000, 0xcef: 0x4000, - 0xcf0: 0x4000, 0xcf1: 0x4000, 0xcf2: 0x4000, 0xcf3: 0x4000, 0xcf4: 0x4000, 0xcf5: 0x4000, - 0xcf6: 0x4000, 0xcf7: 0x4000, 0xcf8: 0x4000, 0xcf9: 0x4000, 0xcfa: 0x4000, 0xcfb: 0x4000, - 0xcfc: 0x4000, 0xcfd: 0x4000, 0xcfe: 0x4000, 0xcff: 0x4000, - // Block 0x34, offset 0xd00 - 0xd00: 0x4000, 0xd01: 0x4000, 0xd02: 0x4000, 0xd03: 0x4000, 0xd04: 0x4000, 0xd05: 0x4000, - 0xd06: 0x4000, 0xd07: 0x4000, 0xd08: 0x4000, 0xd09: 0x4000, 0xd0a: 0x4000, 0xd0b: 0x4000, - 0xd0c: 0x4000, 0xd0d: 0x4000, 0xd0e: 0x4000, 0xd0f: 0x4000, 0xd10: 0x4000, 0xd11: 0x4000, - 0xd12: 0x4000, 0xd13: 0x4000, 0xd14: 0x4000, 0xd15: 0x4000, 0xd16: 0x4000, - 0xd19: 0x4016, 0xd1a: 0x4017, 0xd1b: 0x4000, 0xd1c: 0x4000, 0xd1d: 0x4000, - 0xd1e: 0x4000, 0xd1f: 0x4000, 0xd20: 0x4000, 0xd21: 0x4018, 0xd22: 0x4019, 0xd23: 0x401a, - 0xd24: 0x401b, 0xd25: 0x401c, 0xd26: 0x401d, 0xd27: 0x401e, 0xd28: 0x401f, 0xd29: 0x4020, - 0xd2a: 0x4021, 0xd2b: 0x4022, 0xd2c: 0x4000, 0xd2d: 0x4010, 0xd2e: 0x4000, 0xd2f: 0x4023, - 0xd30: 0x4000, 0xd31: 0x4024, 0xd32: 0x4000, 0xd33: 0x4025, 0xd34: 0x4000, 0xd35: 0x4026, - 0xd36: 0x4000, 0xd37: 0x401a, 0xd38: 0x4000, 0xd39: 0x4027, 0xd3a: 0x4000, 0xd3b: 0x4028, - 0xd3c: 0x4000, 0xd3d: 0x4020, 0xd3e: 0x4000, 0xd3f: 0x4029, - // Block 0x35, offset 0xd40 - 0xd40: 0x4000, 0xd41: 0x402a, 0xd42: 0x4000, 0xd43: 0x402b, 0xd44: 0x402c, 0xd45: 0x4000, - 0xd46: 0x4017, 0xd47: 0x4000, 0xd48: 0x402d, 0xd49: 0x4000, 0xd4a: 0x402e, 0xd4b: 0x402f, - 0xd4c: 0x4030, 0xd4d: 0x4017, 0xd4e: 0x4016, 0xd4f: 0x4017, 0xd50: 0x4000, 0xd51: 0x4000, - 0xd52: 0x4031, 0xd53: 0x4000, 0xd54: 0x4000, 0xd55: 0x4031, 0xd56: 0x4000, 0xd57: 0x4000, - 0xd58: 0x4032, 0xd59: 0x4000, 0xd5a: 0x4000, 0xd5b: 0x4032, 0xd5c: 0x4000, 0xd5d: 0x4000, - 0xd5e: 0x4033, 0xd5f: 0x402e, 0xd60: 0x4034, 0xd61: 0x4035, 0xd62: 0x4034, 0xd63: 0x4036, - 0xd64: 0x4037, 0xd65: 0x4024, 0xd66: 0x4035, 0xd67: 0x4025, 0xd68: 0x4038, 0xd69: 0x4038, - 0xd6a: 0x4039, 0xd6b: 0x4039, 0xd6c: 0x403a, 0xd6d: 0x403a, 0xd6e: 0x4000, 0xd6f: 0x4035, - 0xd70: 0x4000, 0xd71: 0x4000, 0xd72: 0x403b, 0xd73: 0x403c, 0xd74: 0x4000, 0xd75: 0x4000, - 0xd76: 0x4000, 0xd77: 0x4000, 0xd78: 0x4000, 0xd79: 0x4000, 0xd7a: 0x4000, 0xd7b: 0x403d, - 0xd7c: 0x401c, 0xd7d: 0x4000, 0xd7e: 0x4000, 0xd7f: 0x4000, - // Block 0x36, offset 0xd80 - 0xd85: 0x4000, - 0xd86: 0x4000, 0xd87: 0x4000, 0xd88: 0x4000, 0xd89: 0x4000, 0xd8a: 0x4000, 0xd8b: 0x4000, - 0xd8c: 0x4000, 0xd8d: 0x4000, 0xd8e: 0x4000, 0xd8f: 0x4000, 0xd90: 0x4000, 0xd91: 0x4000, - 0xd92: 0x4000, 0xd93: 0x4000, 0xd94: 0x4000, 0xd95: 0x4000, 0xd96: 0x4000, 0xd97: 0x4000, - 0xd98: 0x4000, 0xd99: 0x4000, 0xd9a: 0x4000, 0xd9b: 0x4000, 0xd9c: 0x4000, 0xd9d: 0x4000, - 0xd9e: 0x4000, 0xd9f: 0x4000, 0xda0: 0x4000, 0xda1: 0x4000, 0xda2: 0x4000, 0xda3: 0x4000, - 0xda4: 0x4000, 0xda5: 0x4000, 0xda6: 0x4000, 0xda7: 0x4000, 0xda8: 0x4000, 0xda9: 0x4000, - 0xdaa: 0x4000, 0xdab: 0x4000, 0xdac: 0x4000, 0xdad: 0x4000, 0xdae: 0x4000, - 0xdb1: 0x403e, 0xdb2: 0x403e, 0xdb3: 0x403e, 0xdb4: 0x403e, 0xdb5: 0x403e, - 0xdb6: 0x403e, 0xdb7: 0x403e, 0xdb8: 0x403e, 0xdb9: 0x403e, 0xdba: 0x403e, 0xdbb: 0x403e, - 0xdbc: 0x403e, 0xdbd: 0x403e, 0xdbe: 0x403e, 0xdbf: 0x403e, - // Block 0x37, offset 0xdc0 - 0xdc0: 0x4037, 0xdc1: 0x4037, 0xdc2: 0x4037, 0xdc3: 0x4037, 0xdc4: 0x4037, 0xdc5: 0x4037, - 0xdc6: 0x4037, 0xdc7: 0x4037, 0xdc8: 0x4037, 0xdc9: 0x4037, 0xdca: 0x4037, 0xdcb: 0x4037, - 0xdcc: 0x4037, 0xdcd: 0x4037, 0xdce: 0x4037, 0xdcf: 0x400e, 0xdd0: 0x403f, 0xdd1: 0x4040, - 0xdd2: 0x4041, 0xdd3: 0x4040, 0xdd4: 0x403f, 0xdd5: 0x4042, 0xdd6: 0x4043, 0xdd7: 0x4044, - 0xdd8: 0x4040, 0xdd9: 0x4041, 0xdda: 0x4040, 0xddb: 0x4045, 0xddc: 0x4009, 0xddd: 0x4045, - 0xdde: 0x4046, 0xddf: 0x4045, 0xde0: 0x4047, 0xde1: 0x400b, 0xde2: 0x400a, 0xde3: 0x400c, - 0xde4: 0x4048, 0xde5: 0x4000, 0xde6: 0x4000, 0xde7: 0x4000, 0xde8: 0x4000, 0xde9: 0x4000, - 0xdea: 0x4000, 0xdeb: 0x4000, 0xdec: 0x4000, 0xded: 0x4000, 0xdee: 0x4000, 0xdef: 0x4000, - 0xdf0: 0x4000, 0xdf1: 0x4000, 0xdf2: 0x4000, 0xdf3: 0x4000, 0xdf4: 0x4000, 0xdf5: 0x4000, - 0xdf6: 0x4000, 0xdf7: 0x4000, 0xdf8: 0x4000, 0xdf9: 0x4000, 0xdfa: 0x4000, 0xdfb: 0x4000, - 0xdfc: 0x4000, 0xdfd: 0x4000, 0xdfe: 0x4000, 0xdff: 0x4000, - // Block 0x38, offset 0xe00 - 0xe00: 0x4000, 0xe01: 0x4000, 0xe02: 0x4000, 0xe03: 0x4000, 0xe04: 0x4000, 0xe05: 0x4000, - 0xe06: 0x4000, 0xe07: 0x4000, 0xe08: 0x4000, 0xe09: 0x4000, 0xe0a: 0x4000, 0xe0b: 0x4000, - 0xe0c: 0x4000, 0xe0d: 0x4000, 0xe0e: 0x4000, 0xe10: 0x4000, 0xe11: 0x4000, - 0xe12: 0x4000, 0xe13: 0x4000, 0xe14: 0x4000, 0xe15: 0x4000, 0xe16: 0x4000, 0xe17: 0x4000, - 0xe18: 0x4000, 0xe19: 0x4000, 0xe1a: 0x4000, 0xe1b: 0x4000, 0xe1c: 0x4000, 0xe1d: 0x4000, - 0xe1e: 0x4000, 0xe1f: 0x4000, 0xe20: 0x4000, 0xe21: 0x4000, 0xe22: 0x4000, 0xe23: 0x4000, - 0xe24: 0x4000, 0xe25: 0x4000, 0xe26: 0x4000, 0xe27: 0x4000, 0xe28: 0x4000, 0xe29: 0x4000, - 0xe2a: 0x4000, 0xe2b: 0x4000, 0xe2c: 0x4000, 0xe2d: 0x4000, 0xe2e: 0x4000, 0xe2f: 0x4000, - 0xe30: 0x4000, 0xe31: 0x4000, 0xe32: 0x4000, 0xe33: 0x4000, 0xe34: 0x4000, 0xe35: 0x4000, - 0xe36: 0x4000, 0xe37: 0x4000, 0xe38: 0x4000, 0xe39: 0x4000, 0xe3a: 0x4000, - // Block 0x39, offset 0xe40 - 0xe40: 0x4000, 0xe41: 0x4000, 0xe42: 0x4000, 0xe43: 0x4000, 0xe44: 0x4000, 0xe45: 0x4000, - 0xe46: 0x4000, 0xe47: 0x4000, 0xe48: 0x4000, 0xe49: 0x4000, 0xe4a: 0x4000, 0xe4b: 0x4000, - 0xe4c: 0x4000, 0xe4d: 0x4000, 0xe4e: 0x4000, 0xe4f: 0x4000, 0xe50: 0x4000, 0xe51: 0x4000, - 0xe52: 0x4000, 0xe53: 0x4000, 0xe54: 0x4000, 0xe55: 0x4000, 0xe56: 0x4000, 0xe57: 0x4000, - 0xe58: 0x4000, 0xe59: 0x4000, 0xe5a: 0x4000, 0xe5b: 0x4000, 0xe5c: 0x4000, 0xe5d: 0x4000, - 0xe5e: 0x4000, 0xe5f: 0x4000, 0xe60: 0x4000, 0xe61: 0x4000, 0xe62: 0x4000, 0xe63: 0x4000, - 0xe70: 0x4000, 0xe71: 0x4000, 0xe72: 0x4000, 0xe73: 0x4000, 0xe74: 0x4000, 0xe75: 0x4000, - 0xe76: 0x4000, 0xe77: 0x4000, 0xe78: 0x4000, 0xe79: 0x4000, 0xe7a: 0x4000, 0xe7b: 0x4000, - 0xe7c: 0x4000, 0xe7d: 0x4000, 0xe7e: 0x4000, 0xe7f: 0x4000, - // Block 0x3a, offset 0xe80 - 0xe80: 0x4000, 0xe81: 0x4000, 0xe82: 0x4000, 0xe83: 0x4000, 0xe84: 0x4000, 0xe85: 0x4000, - 0xe86: 0x4000, 0xe87: 0x4000, 0xe88: 0x4000, 0xe89: 0x4000, 0xe8a: 0x4000, 0xe8b: 0x4000, - 0xe8c: 0x4000, 0xe8d: 0x4000, 0xe8e: 0x4000, 0xe8f: 0x4000, 0xe90: 0x4000, 0xe91: 0x4000, - 0xe92: 0x4000, 0xe93: 0x4000, 0xe94: 0x4000, 0xe95: 0x4000, 0xe96: 0x4000, 0xe97: 0x4000, - 0xe98: 0x4000, 0xe99: 0x4000, 0xe9a: 0x4000, 0xe9b: 0x4000, 0xe9c: 0x4000, 0xe9d: 0x4000, - 0xe9e: 0x4000, 0xea0: 0x4000, 0xea1: 0x4000, 0xea2: 0x4000, 0xea3: 0x4000, - 0xea4: 0x4000, 0xea5: 0x4000, 0xea6: 0x4000, 0xea7: 0x4000, 0xea8: 0x4000, 0xea9: 0x4000, - 0xeaa: 0x4000, 0xeab: 0x4000, 0xeac: 0x4000, 0xead: 0x4000, 0xeae: 0x4000, 0xeaf: 0x4000, - 0xeb0: 0x4000, 0xeb1: 0x4000, 0xeb2: 0x4000, 0xeb3: 0x4000, 0xeb4: 0x4000, 0xeb5: 0x4000, - 0xeb6: 0x4000, 0xeb7: 0x4000, 0xeb8: 0x4000, 0xeb9: 0x4000, 0xeba: 0x4000, 0xebb: 0x4000, - 0xebc: 0x4000, 0xebd: 0x4000, 0xebe: 0x4000, 0xebf: 0x4000, - // Block 0x3b, offset 0xec0 - 0xec0: 0x4000, 0xec1: 0x4000, 0xec2: 0x4000, 0xec3: 0x4000, 0xec4: 0x4000, 0xec5: 0x4000, - 0xec6: 0x4000, 0xec7: 0x4000, 0xec8: 0x2000, 0xec9: 0x2000, 0xeca: 0x2000, 0xecb: 0x2000, - 0xecc: 0x2000, 0xecd: 0x2000, 0xece: 0x2000, 0xecf: 0x2000, 0xed0: 0x4000, 0xed1: 0x4000, - 0xed2: 0x4000, 0xed3: 0x4000, 0xed4: 0x4000, 0xed5: 0x4000, 0xed6: 0x4000, 0xed7: 0x4000, - 0xed8: 0x4000, 0xed9: 0x4000, 0xeda: 0x4000, 0xedb: 0x4000, 0xedc: 0x4000, 0xedd: 0x4000, - 0xede: 0x4000, 0xedf: 0x4000, 0xee0: 0x4000, 0xee1: 0x4000, 0xee2: 0x4000, 0xee3: 0x4000, - 0xee4: 0x4000, 0xee5: 0x4000, 0xee6: 0x4000, 0xee7: 0x4000, 0xee8: 0x4000, 0xee9: 0x4000, - 0xeea: 0x4000, 0xeeb: 0x4000, 0xeec: 0x4000, 0xeed: 0x4000, 0xeee: 0x4000, 0xeef: 0x4000, - 0xef0: 0x4000, 0xef1: 0x4000, 0xef2: 0x4000, 0xef3: 0x4000, 0xef4: 0x4000, 0xef5: 0x4000, - 0xef6: 0x4000, 0xef7: 0x4000, 0xef8: 0x4000, 0xef9: 0x4000, 0xefa: 0x4000, 0xefb: 0x4000, - 0xefc: 0x4000, 0xefd: 0x4000, 0xefe: 0x4000, 0xeff: 0x4000, - // Block 0x3c, offset 0xf00 - 0xf00: 0x4000, 0xf01: 0x4000, 0xf02: 0x4000, 0xf03: 0x4000, 0xf04: 0x4000, 0xf05: 0x4000, - 0xf06: 0x4000, 0xf07: 0x4000, 0xf08: 0x4000, 0xf09: 0x4000, 0xf0a: 0x4000, 0xf0b: 0x4000, - 0xf0c: 0x4000, 0xf0d: 0x4000, 0xf0e: 0x4000, 0xf0f: 0x4000, 0xf10: 0x4000, 0xf11: 0x4000, - 0xf12: 0x4000, 0xf13: 0x4000, 0xf14: 0x4000, 0xf15: 0x4000, 0xf16: 0x4000, 0xf17: 0x4000, - 0xf18: 0x4000, 0xf19: 0x4000, 0xf1a: 0x4000, 0xf1b: 0x4000, 0xf1c: 0x4000, 0xf1d: 0x4000, - 0xf1e: 0x4000, 0xf1f: 0x4000, 0xf20: 0x4000, 0xf21: 0x4000, 0xf22: 0x4000, 0xf23: 0x4000, - 0xf24: 0x4000, 0xf25: 0x4000, 0xf26: 0x4000, 0xf27: 0x4000, 0xf28: 0x4000, 0xf29: 0x4000, - 0xf2a: 0x4000, 0xf2b: 0x4000, 0xf2c: 0x4000, 0xf2d: 0x4000, 0xf2e: 0x4000, 0xf2f: 0x4000, - 0xf30: 0x4000, 0xf31: 0x4000, 0xf32: 0x4000, 0xf33: 0x4000, 0xf34: 0x4000, 0xf35: 0x4000, - 0xf36: 0x4000, 0xf37: 0x4000, 0xf38: 0x4000, 0xf39: 0x4000, 0xf3a: 0x4000, 0xf3b: 0x4000, - 0xf3c: 0x4000, 0xf3d: 0x4000, 0xf3e: 0x4000, - // Block 0x3d, offset 0xf40 - 0xf40: 0x4000, 0xf41: 0x4000, 0xf42: 0x4000, 0xf43: 0x4000, 0xf44: 0x4000, 0xf45: 0x4000, - 0xf46: 0x4000, 0xf47: 0x4000, 0xf48: 0x4000, 0xf49: 0x4000, 0xf4a: 0x4000, 0xf4b: 0x4000, - 0xf4c: 0x4000, 0xf50: 0x4000, 0xf51: 0x4000, - 0xf52: 0x4000, 0xf53: 0x4000, 0xf54: 0x4000, 0xf55: 0x4000, 0xf56: 0x4000, 0xf57: 0x4000, - 0xf58: 0x4000, 0xf59: 0x4000, 0xf5a: 0x4000, 0xf5b: 0x4000, 0xf5c: 0x4000, 0xf5d: 0x4000, - 0xf5e: 0x4000, 0xf5f: 0x4000, 0xf60: 0x4000, 0xf61: 0x4000, 0xf62: 0x4000, 0xf63: 0x4000, - 0xf64: 0x4000, 0xf65: 0x4000, 0xf66: 0x4000, 0xf67: 0x4000, 0xf68: 0x4000, 0xf69: 0x4000, - 0xf6a: 0x4000, 0xf6b: 0x4000, 0xf6c: 0x4000, 0xf6d: 0x4000, 0xf6e: 0x4000, 0xf6f: 0x4000, - 0xf70: 0x4000, 0xf71: 0x4000, 0xf72: 0x4000, 0xf73: 0x4000, 0xf74: 0x4000, 0xf75: 0x4000, - 0xf76: 0x4000, 0xf77: 0x4000, 0xf78: 0x4000, 0xf79: 0x4000, 0xf7a: 0x4000, 0xf7b: 0x4000, - 0xf7c: 0x4000, 0xf7d: 0x4000, 0xf7e: 0x4000, 0xf7f: 0x4000, - // Block 0x3e, offset 0xf80 - 0xf80: 0x4000, 0xf81: 0x4000, 0xf82: 0x4000, 0xf83: 0x4000, 0xf84: 0x4000, 0xf85: 0x4000, - 0xf86: 0x4000, - // Block 0x3f, offset 0xfc0 - 0xfe0: 0x4000, 0xfe1: 0x4000, 0xfe2: 0x4000, 0xfe3: 0x4000, - 0xfe4: 0x4000, 0xfe5: 0x4000, 0xfe6: 0x4000, 0xfe7: 0x4000, 0xfe8: 0x4000, 0xfe9: 0x4000, - 0xfea: 0x4000, 0xfeb: 0x4000, 0xfec: 0x4000, 0xfed: 0x4000, 0xfee: 0x4000, 0xfef: 0x4000, - 0xff0: 0x4000, 0xff1: 0x4000, 0xff2: 0x4000, 0xff3: 0x4000, 0xff4: 0x4000, 0xff5: 0x4000, - 0xff6: 0x4000, 0xff7: 0x4000, 0xff8: 0x4000, 0xff9: 0x4000, 0xffa: 0x4000, 0xffb: 0x4000, - 0xffc: 0x4000, - // Block 0x40, offset 0x1000 - 0x1000: 0x4000, 0x1001: 0x4000, 0x1002: 0x4000, 0x1003: 0x4000, 0x1004: 0x4000, 0x1005: 0x4000, - 0x1006: 0x4000, 0x1007: 0x4000, 0x1008: 0x4000, 0x1009: 0x4000, 0x100a: 0x4000, 0x100b: 0x4000, - 0x100c: 0x4000, 0x100d: 0x4000, 0x100e: 0x4000, 0x100f: 0x4000, 0x1010: 0x4000, 0x1011: 0x4000, - 0x1012: 0x4000, 0x1013: 0x4000, 0x1014: 0x4000, 0x1015: 0x4000, 0x1016: 0x4000, 0x1017: 0x4000, - 0x1018: 0x4000, 0x1019: 0x4000, 0x101a: 0x4000, 0x101b: 0x4000, 0x101c: 0x4000, 0x101d: 0x4000, - 0x101e: 0x4000, 0x101f: 0x4000, 0x1020: 0x4000, 0x1021: 0x4000, 0x1022: 0x4000, 0x1023: 0x4000, - // Block 0x41, offset 0x1040 - 0x1040: 0x2000, 0x1041: 0x2000, 0x1042: 0x2000, 0x1043: 0x2000, 0x1044: 0x2000, 0x1045: 0x2000, - 0x1046: 0x2000, 0x1047: 0x2000, 0x1048: 0x2000, 0x1049: 0x2000, 0x104a: 0x2000, 0x104b: 0x2000, - 0x104c: 0x2000, 0x104d: 0x2000, 0x104e: 0x2000, 0x104f: 0x2000, 0x1050: 0x4000, 0x1051: 0x4000, - 0x1052: 0x4000, 0x1053: 0x4000, 0x1054: 0x4000, 0x1055: 0x4000, 0x1056: 0x4000, 0x1057: 0x4000, - 0x1058: 0x4000, 0x1059: 0x4000, - 0x1070: 0x4000, 0x1071: 0x4000, 0x1072: 0x4000, 0x1073: 0x4000, 0x1074: 0x4000, 0x1075: 0x4000, - 0x1076: 0x4000, 0x1077: 0x4000, 0x1078: 0x4000, 0x1079: 0x4000, 0x107a: 0x4000, 0x107b: 0x4000, - 0x107c: 0x4000, 0x107d: 0x4000, 0x107e: 0x4000, 0x107f: 0x4000, - // Block 0x42, offset 0x1080 - 0x1080: 0x4000, 0x1081: 0x4000, 0x1082: 0x4000, 0x1083: 0x4000, 0x1084: 0x4000, 0x1085: 0x4000, - 0x1086: 0x4000, 0x1087: 0x4000, 0x1088: 0x4000, 0x1089: 0x4000, 0x108a: 0x4000, 0x108b: 0x4000, - 0x108c: 0x4000, 0x108d: 0x4000, 0x108e: 0x4000, 0x108f: 0x4000, 0x1090: 0x4000, 0x1091: 0x4000, - 0x1092: 0x4000, 0x1094: 0x4000, 0x1095: 0x4000, 0x1096: 0x4000, 0x1097: 0x4000, - 0x1098: 0x4000, 0x1099: 0x4000, 0x109a: 0x4000, 0x109b: 0x4000, 0x109c: 0x4000, 0x109d: 0x4000, - 0x109e: 0x4000, 0x109f: 0x4000, 0x10a0: 0x4000, 0x10a1: 0x4000, 0x10a2: 0x4000, 0x10a3: 0x4000, - 0x10a4: 0x4000, 0x10a5: 0x4000, 0x10a6: 0x4000, 0x10a8: 0x4000, 0x10a9: 0x4000, - 0x10aa: 0x4000, 0x10ab: 0x4000, - // Block 0x43, offset 0x10c0 - 0x10c1: 0x9012, 0x10c2: 0x9012, 0x10c3: 0x9012, 0x10c4: 0x9012, 0x10c5: 0x9012, - 0x10c6: 0x9012, 0x10c7: 0x9012, 0x10c8: 0x9012, 0x10c9: 0x9012, 0x10ca: 0x9012, 0x10cb: 0x9012, - 0x10cc: 0x9012, 0x10cd: 0x9012, 0x10ce: 0x9012, 0x10cf: 0x9012, 0x10d0: 0x9012, 0x10d1: 0x9012, - 0x10d2: 0x9012, 0x10d3: 0x9012, 0x10d4: 0x9012, 0x10d5: 0x9012, 0x10d6: 0x9012, 0x10d7: 0x9012, - 0x10d8: 0x9012, 0x10d9: 0x9012, 0x10da: 0x9012, 0x10db: 0x9012, 0x10dc: 0x9012, 0x10dd: 0x9012, - 0x10de: 0x9012, 0x10df: 0x9012, 0x10e0: 0x9049, 0x10e1: 0x9049, 0x10e2: 0x9049, 0x10e3: 0x9049, - 0x10e4: 0x9049, 0x10e5: 0x9049, 0x10e6: 0x9049, 0x10e7: 0x9049, 0x10e8: 0x9049, 0x10e9: 0x9049, - 0x10ea: 0x9049, 0x10eb: 0x9049, 0x10ec: 0x9049, 0x10ed: 0x9049, 0x10ee: 0x9049, 0x10ef: 0x9049, - 0x10f0: 0x9049, 0x10f1: 0x9049, 0x10f2: 0x9049, 0x10f3: 0x9049, 0x10f4: 0x9049, 0x10f5: 0x9049, - 0x10f6: 0x9049, 0x10f7: 0x9049, 0x10f8: 0x9049, 0x10f9: 0x9049, 0x10fa: 0x9049, 0x10fb: 0x9049, - 0x10fc: 0x9049, 0x10fd: 0x9049, 0x10fe: 0x9049, 0x10ff: 0x9049, - // Block 0x44, offset 0x1100 - 0x1100: 0x9049, 0x1101: 0x9049, 0x1102: 0x9049, 0x1103: 0x9049, 0x1104: 0x9049, 0x1105: 0x9049, - 0x1106: 0x9049, 0x1107: 0x9049, 0x1108: 0x9049, 0x1109: 0x9049, 0x110a: 0x9049, 0x110b: 0x9049, - 0x110c: 0x9049, 0x110d: 0x9049, 0x110e: 0x9049, 0x110f: 0x9049, 0x1110: 0x9049, 0x1111: 0x9049, - 0x1112: 0x9049, 0x1113: 0x9049, 0x1114: 0x9049, 0x1115: 0x9049, 0x1116: 0x9049, 0x1117: 0x9049, - 0x1118: 0x9049, 0x1119: 0x9049, 0x111a: 0x9049, 0x111b: 0x9049, 0x111c: 0x9049, 0x111d: 0x9049, - 0x111e: 0x9049, 0x111f: 0x904a, 0x1120: 0x904b, 0x1121: 0xb04c, 0x1122: 0xb04d, 0x1123: 0xb04d, - 0x1124: 0xb04e, 0x1125: 0xb04f, 0x1126: 0xb050, 0x1127: 0xb051, 0x1128: 0xb052, 0x1129: 0xb053, - 0x112a: 0xb054, 0x112b: 0xb055, 0x112c: 0xb056, 0x112d: 0xb057, 0x112e: 0xb058, 0x112f: 0xb059, - 0x1130: 0xb05a, 0x1131: 0xb05b, 0x1132: 0xb05c, 0x1133: 0xb05d, 0x1134: 0xb05e, 0x1135: 0xb05f, - 0x1136: 0xb060, 0x1137: 0xb061, 0x1138: 0xb062, 0x1139: 0xb063, 0x113a: 0xb064, 0x113b: 0xb065, - 0x113c: 0xb052, 0x113d: 0xb066, 0x113e: 0xb067, 0x113f: 0xb055, - // Block 0x45, offset 0x1140 - 0x1140: 0xb068, 0x1141: 0xb069, 0x1142: 0xb06a, 0x1143: 0xb06b, 0x1144: 0xb05a, 0x1145: 0xb056, - 0x1146: 0xb06c, 0x1147: 0xb06d, 0x1148: 0xb06b, 0x1149: 0xb06e, 0x114a: 0xb06b, 0x114b: 0xb06f, - 0x114c: 0xb06f, 0x114d: 0xb070, 0x114e: 0xb070, 0x114f: 0xb071, 0x1150: 0xb056, 0x1151: 0xb072, - 0x1152: 0xb073, 0x1153: 0xb072, 0x1154: 0xb074, 0x1155: 0xb073, 0x1156: 0xb075, 0x1157: 0xb075, - 0x1158: 0xb076, 0x1159: 0xb076, 0x115a: 0xb077, 0x115b: 0xb077, 0x115c: 0xb073, 0x115d: 0xb078, - 0x115e: 0xb079, 0x115f: 0xb067, 0x1160: 0xb07a, 0x1161: 0xb07b, 0x1162: 0xb07b, 0x1163: 0xb07b, - 0x1164: 0xb07b, 0x1165: 0xb07b, 0x1166: 0xb07b, 0x1167: 0xb07b, 0x1168: 0xb07b, 0x1169: 0xb07b, - 0x116a: 0xb07b, 0x116b: 0xb07b, 0x116c: 0xb07b, 0x116d: 0xb07b, 0x116e: 0xb07b, 0x116f: 0xb07b, - 0x1170: 0xb07c, 0x1171: 0xb07c, 0x1172: 0xb07c, 0x1173: 0xb07c, 0x1174: 0xb07c, 0x1175: 0xb07c, - 0x1176: 0xb07c, 0x1177: 0xb07c, 0x1178: 0xb07c, 0x1179: 0xb07c, 0x117a: 0xb07c, 0x117b: 0xb07c, - 0x117c: 0xb07c, 0x117d: 0xb07c, 0x117e: 0xb07c, - // Block 0x46, offset 0x1180 - 0x1182: 0xb07d, 0x1183: 0xb07e, 0x1184: 0xb07f, 0x1185: 0xb080, - 0x1186: 0xb07f, 0x1187: 0xb07e, 0x118a: 0xb081, 0x118b: 0xb082, - 0x118c: 0xb083, 0x118d: 0xb07f, 0x118e: 0xb080, 0x118f: 0xb07f, - 0x1192: 0xb084, 0x1193: 0xb085, 0x1194: 0xb084, 0x1195: 0xb086, 0x1196: 0xb084, 0x1197: 0xb087, - 0x119a: 0xb088, 0x119b: 0xb089, 0x119c: 0xb08a, - 0x11a0: 0x908b, 0x11a1: 0x908b, 0x11a2: 0x908c, 0x11a3: 0x908d, - 0x11a4: 0x908b, 0x11a5: 0x908e, 0x11a6: 0x908f, 0x11a8: 0xb090, 0x11a9: 0xb091, - 0x11aa: 0xb092, 0x11ab: 0xb091, 0x11ac: 0xb093, 0x11ad: 0xb094, 0x11ae: 0xb095, - 0x11bd: 0x2000, - // Block 0x47, offset 0x11c0 - 0x11e0: 0x4000, 0x11e1: 0x4000, - // Block 0x48, offset 0x1200 - 0x1200: 0x4000, 0x1201: 0x4000, 0x1202: 0x4000, 0x1203: 0x4000, 0x1204: 0x4000, 0x1205: 0x4000, - 0x1206: 0x4000, 0x1207: 0x4000, 0x1208: 0x4000, 0x1209: 0x4000, 0x120a: 0x4000, 0x120b: 0x4000, - 0x120c: 0x4000, 0x120d: 0x4000, 0x120e: 0x4000, 0x120f: 0x4000, 0x1210: 0x4000, 0x1211: 0x4000, - 0x1212: 0x4000, 0x1213: 0x4000, 0x1214: 0x4000, 0x1215: 0x4000, 0x1216: 0x4000, 0x1217: 0x4000, - 0x1218: 0x4000, 0x1219: 0x4000, 0x121a: 0x4000, 0x121b: 0x4000, 0x121c: 0x4000, 0x121d: 0x4000, - 0x121e: 0x4000, 0x121f: 0x4000, 0x1220: 0x4000, 0x1221: 0x4000, 0x1222: 0x4000, 0x1223: 0x4000, - 0x1224: 0x4000, 0x1225: 0x4000, 0x1226: 0x4000, 0x1227: 0x4000, 0x1228: 0x4000, 0x1229: 0x4000, - 0x122a: 0x4000, 0x122b: 0x4000, 0x122c: 0x4000, - // Block 0x49, offset 0x1240 - 0x1240: 0x4000, 0x1241: 0x4000, 0x1242: 0x4000, 0x1243: 0x4000, 0x1244: 0x4000, 0x1245: 0x4000, - 0x1246: 0x4000, 0x1247: 0x4000, 0x1248: 0x4000, 0x1249: 0x4000, 0x124a: 0x4000, 0x124b: 0x4000, - 0x124c: 0x4000, 0x124d: 0x4000, 0x124e: 0x4000, 0x124f: 0x4000, 0x1250: 0x4000, 0x1251: 0x4000, - 0x1252: 0x4000, 0x1253: 0x4000, 0x1254: 0x4000, 0x1255: 0x4000, 0x1256: 0x4000, 0x1257: 0x4000, - 0x1258: 0x4000, 0x1259: 0x4000, 0x125a: 0x4000, 0x125b: 0x4000, 0x125c: 0x4000, 0x125d: 0x4000, - 0x125e: 0x4000, 0x125f: 0x4000, 0x1260: 0x4000, 0x1261: 0x4000, 0x1262: 0x4000, 0x1263: 0x4000, - 0x1264: 0x4000, 0x1265: 0x4000, 0x1266: 0x4000, 0x1267: 0x4000, 0x1268: 0x4000, 0x1269: 0x4000, - 0x126a: 0x4000, 0x126b: 0x4000, 0x126c: 0x4000, 0x126d: 0x4000, 0x126e: 0x4000, 0x126f: 0x4000, - 0x1270: 0x4000, 0x1271: 0x4000, 0x1272: 0x4000, - // Block 0x4a, offset 0x1280 - 0x1280: 0x4000, 0x1281: 0x4000, 0x1282: 0x4000, 0x1283: 0x4000, 0x1284: 0x4000, 0x1285: 0x4000, - 0x1286: 0x4000, 0x1287: 0x4000, 0x1288: 0x4000, 0x1289: 0x4000, 0x128a: 0x4000, 0x128b: 0x4000, - 0x128c: 0x4000, 0x128d: 0x4000, 0x128e: 0x4000, 0x128f: 0x4000, 0x1290: 0x4000, 0x1291: 0x4000, - 0x1292: 0x4000, 0x1293: 0x4000, 0x1294: 0x4000, 0x1295: 0x4000, 0x1296: 0x4000, 0x1297: 0x4000, - 0x1298: 0x4000, 0x1299: 0x4000, 0x129a: 0x4000, 0x129b: 0x4000, 0x129c: 0x4000, 0x129d: 0x4000, - 0x129e: 0x4000, - // Block 0x4b, offset 0x12c0 - 0x12f0: 0x4000, 0x12f1: 0x4000, 0x12f2: 0x4000, 0x12f3: 0x4000, 0x12f4: 0x4000, 0x12f5: 0x4000, - 0x12f6: 0x4000, 0x12f7: 0x4000, 0x12f8: 0x4000, 0x12f9: 0x4000, 0x12fa: 0x4000, 0x12fb: 0x4000, - 0x12fc: 0x4000, 0x12fd: 0x4000, 0x12fe: 0x4000, 0x12ff: 0x4000, - // Block 0x4c, offset 0x1300 - 0x1300: 0x4000, 0x1301: 0x4000, 0x1302: 0x4000, 0x1303: 0x4000, 0x1304: 0x4000, 0x1305: 0x4000, - 0x1306: 0x4000, 0x1307: 0x4000, 0x1308: 0x4000, 0x1309: 0x4000, 0x130a: 0x4000, 0x130b: 0x4000, - 0x130c: 0x4000, 0x130d: 0x4000, 0x130e: 0x4000, 0x130f: 0x4000, 0x1310: 0x4000, 0x1311: 0x4000, - 0x1312: 0x4000, 0x1313: 0x4000, 0x1314: 0x4000, 0x1315: 0x4000, 0x1316: 0x4000, 0x1317: 0x4000, - 0x1318: 0x4000, 0x1319: 0x4000, 0x131a: 0x4000, 0x131b: 0x4000, 0x131c: 0x4000, 0x131d: 0x4000, - 0x131e: 0x4000, 0x131f: 0x4000, 0x1320: 0x4000, 0x1321: 0x4000, 0x1322: 0x4000, 0x1323: 0x4000, - 0x1324: 0x4000, 0x1325: 0x4000, 0x1326: 0x4000, 0x1327: 0x4000, 0x1328: 0x4000, 0x1329: 0x4000, - 0x132a: 0x4000, 0x132b: 0x4000, 0x132c: 0x4000, 0x132d: 0x4000, 0x132e: 0x4000, 0x132f: 0x4000, - 0x1330: 0x4000, 0x1331: 0x4000, 0x1332: 0x4000, 0x1333: 0x4000, 0x1334: 0x4000, 0x1335: 0x4000, - 0x1336: 0x4000, 0x1337: 0x4000, 0x1338: 0x4000, 0x1339: 0x4000, 0x133a: 0x4000, 0x133b: 0x4000, - // Block 0x4d, offset 0x1340 - 0x1344: 0x4000, - // Block 0x4e, offset 0x1380 - 0x138f: 0x4000, - // Block 0x4f, offset 0x13c0 - 0x13c0: 0x2000, 0x13c1: 0x2000, 0x13c2: 0x2000, 0x13c3: 0x2000, 0x13c4: 0x2000, 0x13c5: 0x2000, - 0x13c6: 0x2000, 0x13c7: 0x2000, 0x13c8: 0x2000, 0x13c9: 0x2000, 0x13ca: 0x2000, - 0x13d0: 0x2000, 0x13d1: 0x2000, - 0x13d2: 0x2000, 0x13d3: 0x2000, 0x13d4: 0x2000, 0x13d5: 0x2000, 0x13d6: 0x2000, 0x13d7: 0x2000, - 0x13d8: 0x2000, 0x13d9: 0x2000, 0x13da: 0x2000, 0x13db: 0x2000, 0x13dc: 0x2000, 0x13dd: 0x2000, - 0x13de: 0x2000, 0x13df: 0x2000, 0x13e0: 0x2000, 0x13e1: 0x2000, 0x13e2: 0x2000, 0x13e3: 0x2000, - 0x13e4: 0x2000, 0x13e5: 0x2000, 0x13e6: 0x2000, 0x13e7: 0x2000, 0x13e8: 0x2000, 0x13e9: 0x2000, - 0x13ea: 0x2000, 0x13eb: 0x2000, 0x13ec: 0x2000, 0x13ed: 0x2000, - 0x13f0: 0x2000, 0x13f1: 0x2000, 0x13f2: 0x2000, 0x13f3: 0x2000, 0x13f4: 0x2000, 0x13f5: 0x2000, - 0x13f6: 0x2000, 0x13f7: 0x2000, 0x13f8: 0x2000, 0x13f9: 0x2000, 0x13fa: 0x2000, 0x13fb: 0x2000, - 0x13fc: 0x2000, 0x13fd: 0x2000, 0x13fe: 0x2000, 0x13ff: 0x2000, - // Block 0x50, offset 0x1400 - 0x1400: 0x2000, 0x1401: 0x2000, 0x1402: 0x2000, 0x1403: 0x2000, 0x1404: 0x2000, 0x1405: 0x2000, - 0x1406: 0x2000, 0x1407: 0x2000, 0x1408: 0x2000, 0x1409: 0x2000, 0x140a: 0x2000, 0x140b: 0x2000, - 0x140c: 0x2000, 0x140d: 0x2000, 0x140e: 0x2000, 0x140f: 0x2000, 0x1410: 0x2000, 0x1411: 0x2000, - 0x1412: 0x2000, 0x1413: 0x2000, 0x1414: 0x2000, 0x1415: 0x2000, 0x1416: 0x2000, 0x1417: 0x2000, - 0x1418: 0x2000, 0x1419: 0x2000, 0x141a: 0x2000, 0x141b: 0x2000, 0x141c: 0x2000, 0x141d: 0x2000, - 0x141e: 0x2000, 0x141f: 0x2000, 0x1420: 0x2000, 0x1421: 0x2000, 0x1422: 0x2000, 0x1423: 0x2000, - 0x1424: 0x2000, 0x1425: 0x2000, 0x1426: 0x2000, 0x1427: 0x2000, 0x1428: 0x2000, 0x1429: 0x2000, - 0x1430: 0x2000, 0x1431: 0x2000, 0x1432: 0x2000, 0x1433: 0x2000, 0x1434: 0x2000, 0x1435: 0x2000, - 0x1436: 0x2000, 0x1437: 0x2000, 0x1438: 0x2000, 0x1439: 0x2000, 0x143a: 0x2000, 0x143b: 0x2000, - 0x143c: 0x2000, 0x143d: 0x2000, 0x143e: 0x2000, 0x143f: 0x2000, - // Block 0x51, offset 0x1440 - 0x1440: 0x2000, 0x1441: 0x2000, 0x1442: 0x2000, 0x1443: 0x2000, 0x1444: 0x2000, 0x1445: 0x2000, - 0x1446: 0x2000, 0x1447: 0x2000, 0x1448: 0x2000, 0x1449: 0x2000, 0x144a: 0x2000, 0x144b: 0x2000, - 0x144c: 0x2000, 0x144d: 0x2000, 0x144e: 0x4000, 0x144f: 0x2000, 0x1450: 0x2000, 0x1451: 0x4000, - 0x1452: 0x4000, 0x1453: 0x4000, 0x1454: 0x4000, 0x1455: 0x4000, 0x1456: 0x4000, 0x1457: 0x4000, - 0x1458: 0x4000, 0x1459: 0x4000, 0x145a: 0x4000, 0x145b: 0x2000, 0x145c: 0x2000, 0x145d: 0x2000, - 0x145e: 0x2000, 0x145f: 0x2000, 0x1460: 0x2000, 0x1461: 0x2000, 0x1462: 0x2000, 0x1463: 0x2000, - 0x1464: 0x2000, 0x1465: 0x2000, 0x1466: 0x2000, 0x1467: 0x2000, 0x1468: 0x2000, 0x1469: 0x2000, - 0x146a: 0x2000, 0x146b: 0x2000, 0x146c: 0x2000, - // Block 0x52, offset 0x1480 - 0x1480: 0x4000, 0x1481: 0x4000, 0x1482: 0x4000, - 0x1490: 0x4000, 0x1491: 0x4000, - 0x1492: 0x4000, 0x1493: 0x4000, 0x1494: 0x4000, 0x1495: 0x4000, 0x1496: 0x4000, 0x1497: 0x4000, - 0x1498: 0x4000, 0x1499: 0x4000, 0x149a: 0x4000, 0x149b: 0x4000, 0x149c: 0x4000, 0x149d: 0x4000, - 0x149e: 0x4000, 0x149f: 0x4000, 0x14a0: 0x4000, 0x14a1: 0x4000, 0x14a2: 0x4000, 0x14a3: 0x4000, - 0x14a4: 0x4000, 0x14a5: 0x4000, 0x14a6: 0x4000, 0x14a7: 0x4000, 0x14a8: 0x4000, 0x14a9: 0x4000, - 0x14aa: 0x4000, 0x14ab: 0x4000, 0x14ac: 0x4000, 0x14ad: 0x4000, 0x14ae: 0x4000, 0x14af: 0x4000, - 0x14b0: 0x4000, 0x14b1: 0x4000, 0x14b2: 0x4000, 0x14b3: 0x4000, 0x14b4: 0x4000, 0x14b5: 0x4000, - 0x14b6: 0x4000, 0x14b7: 0x4000, 0x14b8: 0x4000, 0x14b9: 0x4000, 0x14ba: 0x4000, 0x14bb: 0x4000, - // Block 0x53, offset 0x14c0 - 0x14c0: 0x4000, 0x14c1: 0x4000, 0x14c2: 0x4000, 0x14c3: 0x4000, 0x14c4: 0x4000, 0x14c5: 0x4000, - 0x14c6: 0x4000, 0x14c7: 0x4000, 0x14c8: 0x4000, - 0x14d0: 0x4000, 0x14d1: 0x4000, - 0x14e0: 0x4000, 0x14e1: 0x4000, 0x14e2: 0x4000, 0x14e3: 0x4000, - 0x14e4: 0x4000, 0x14e5: 0x4000, - // Block 0x54, offset 0x1500 - 0x1500: 0x4000, 0x1501: 0x4000, 0x1502: 0x4000, 0x1503: 0x4000, 0x1504: 0x4000, 0x1505: 0x4000, - 0x1506: 0x4000, 0x1507: 0x4000, 0x1508: 0x4000, 0x1509: 0x4000, 0x150a: 0x4000, 0x150b: 0x4000, - 0x150c: 0x4000, 0x150d: 0x4000, 0x150e: 0x4000, 0x150f: 0x4000, 0x1510: 0x4000, 0x1511: 0x4000, - 0x1512: 0x4000, 0x1513: 0x4000, 0x1514: 0x4000, 0x1515: 0x4000, 0x1516: 0x4000, 0x1517: 0x4000, - 0x1518: 0x4000, 0x1519: 0x4000, 0x151a: 0x4000, 0x151b: 0x4000, 0x151c: 0x4000, 0x151d: 0x4000, - 0x151e: 0x4000, 0x151f: 0x4000, 0x1520: 0x4000, - 0x152d: 0x4000, 0x152e: 0x4000, 0x152f: 0x4000, - 0x1530: 0x4000, 0x1531: 0x4000, 0x1532: 0x4000, 0x1533: 0x4000, 0x1534: 0x4000, 0x1535: 0x4000, - 0x1537: 0x4000, 0x1538: 0x4000, 0x1539: 0x4000, 0x153a: 0x4000, 0x153b: 0x4000, - 0x153c: 0x4000, 0x153d: 0x4000, 0x153e: 0x4000, 0x153f: 0x4000, - // Block 0x55, offset 0x1540 - 0x1540: 0x4000, 0x1541: 0x4000, 0x1542: 0x4000, 0x1543: 0x4000, 0x1544: 0x4000, 0x1545: 0x4000, - 0x1546: 0x4000, 0x1547: 0x4000, 0x1548: 0x4000, 0x1549: 0x4000, 0x154a: 0x4000, 0x154b: 0x4000, - 0x154c: 0x4000, 0x154d: 0x4000, 0x154e: 0x4000, 0x154f: 0x4000, 0x1550: 0x4000, 0x1551: 0x4000, - 0x1552: 0x4000, 0x1553: 0x4000, 0x1554: 0x4000, 0x1555: 0x4000, 0x1556: 0x4000, 0x1557: 0x4000, - 0x1558: 0x4000, 0x1559: 0x4000, 0x155a: 0x4000, 0x155b: 0x4000, 0x155c: 0x4000, 0x155d: 0x4000, - 0x155e: 0x4000, 0x155f: 0x4000, 0x1560: 0x4000, 0x1561: 0x4000, 0x1562: 0x4000, 0x1563: 0x4000, - 0x1564: 0x4000, 0x1565: 0x4000, 0x1566: 0x4000, 0x1567: 0x4000, 0x1568: 0x4000, 0x1569: 0x4000, - 0x156a: 0x4000, 0x156b: 0x4000, 0x156c: 0x4000, 0x156d: 0x4000, 0x156e: 0x4000, 0x156f: 0x4000, - 0x1570: 0x4000, 0x1571: 0x4000, 0x1572: 0x4000, 0x1573: 0x4000, 0x1574: 0x4000, 0x1575: 0x4000, - 0x1576: 0x4000, 0x1577: 0x4000, 0x1578: 0x4000, 0x1579: 0x4000, 0x157a: 0x4000, 0x157b: 0x4000, - 0x157c: 0x4000, 0x157e: 0x4000, 0x157f: 0x4000, - // Block 0x56, offset 0x1580 - 0x1580: 0x4000, 0x1581: 0x4000, 0x1582: 0x4000, 0x1583: 0x4000, 0x1584: 0x4000, 0x1585: 0x4000, - 0x1586: 0x4000, 0x1587: 0x4000, 0x1588: 0x4000, 0x1589: 0x4000, 0x158a: 0x4000, 0x158b: 0x4000, - 0x158c: 0x4000, 0x158d: 0x4000, 0x158e: 0x4000, 0x158f: 0x4000, 0x1590: 0x4000, 0x1591: 0x4000, - 0x1592: 0x4000, 0x1593: 0x4000, - 0x15a0: 0x4000, 0x15a1: 0x4000, 0x15a2: 0x4000, 0x15a3: 0x4000, - 0x15a4: 0x4000, 0x15a5: 0x4000, 0x15a6: 0x4000, 0x15a7: 0x4000, 0x15a8: 0x4000, 0x15a9: 0x4000, - 0x15aa: 0x4000, 0x15ab: 0x4000, 0x15ac: 0x4000, 0x15ad: 0x4000, 0x15ae: 0x4000, 0x15af: 0x4000, - 0x15b0: 0x4000, 0x15b1: 0x4000, 0x15b2: 0x4000, 0x15b3: 0x4000, 0x15b4: 0x4000, 0x15b5: 0x4000, - 0x15b6: 0x4000, 0x15b7: 0x4000, 0x15b8: 0x4000, 0x15b9: 0x4000, 0x15ba: 0x4000, 0x15bb: 0x4000, - 0x15bc: 0x4000, 0x15bd: 0x4000, 0x15be: 0x4000, 0x15bf: 0x4000, - // Block 0x57, offset 0x15c0 - 0x15c0: 0x4000, 0x15c1: 0x4000, 0x15c2: 0x4000, 0x15c3: 0x4000, 0x15c4: 0x4000, 0x15c5: 0x4000, - 0x15c6: 0x4000, 0x15c7: 0x4000, 0x15c8: 0x4000, 0x15c9: 0x4000, 0x15ca: 0x4000, - 0x15cf: 0x4000, 0x15d0: 0x4000, 0x15d1: 0x4000, - 0x15d2: 0x4000, 0x15d3: 0x4000, - 0x15e0: 0x4000, 0x15e1: 0x4000, 0x15e2: 0x4000, 0x15e3: 0x4000, - 0x15e4: 0x4000, 0x15e5: 0x4000, 0x15e6: 0x4000, 0x15e7: 0x4000, 0x15e8: 0x4000, 0x15e9: 0x4000, - 0x15ea: 0x4000, 0x15eb: 0x4000, 0x15ec: 0x4000, 0x15ed: 0x4000, 0x15ee: 0x4000, 0x15ef: 0x4000, - 0x15f0: 0x4000, 0x15f4: 0x4000, - 0x15f8: 0x4000, 0x15f9: 0x4000, 0x15fa: 0x4000, 0x15fb: 0x4000, - 0x15fc: 0x4000, 0x15fd: 0x4000, 0x15fe: 0x4000, 0x15ff: 0x4000, - // Block 0x58, offset 0x1600 - 0x1600: 0x4000, 0x1602: 0x4000, 0x1603: 0x4000, 0x1604: 0x4000, 0x1605: 0x4000, - 0x1606: 0x4000, 0x1607: 0x4000, 0x1608: 0x4000, 0x1609: 0x4000, 0x160a: 0x4000, 0x160b: 0x4000, - 0x160c: 0x4000, 0x160d: 0x4000, 0x160e: 0x4000, 0x160f: 0x4000, 0x1610: 0x4000, 0x1611: 0x4000, - 0x1612: 0x4000, 0x1613: 0x4000, 0x1614: 0x4000, 0x1615: 0x4000, 0x1616: 0x4000, 0x1617: 0x4000, - 0x1618: 0x4000, 0x1619: 0x4000, 0x161a: 0x4000, 0x161b: 0x4000, 0x161c: 0x4000, 0x161d: 0x4000, - 0x161e: 0x4000, 0x161f: 0x4000, 0x1620: 0x4000, 0x1621: 0x4000, 0x1622: 0x4000, 0x1623: 0x4000, - 0x1624: 0x4000, 0x1625: 0x4000, 0x1626: 0x4000, 0x1627: 0x4000, 0x1628: 0x4000, 0x1629: 0x4000, - 0x162a: 0x4000, 0x162b: 0x4000, 0x162c: 0x4000, 0x162d: 0x4000, 0x162e: 0x4000, 0x162f: 0x4000, - 0x1630: 0x4000, 0x1631: 0x4000, 0x1632: 0x4000, 0x1633: 0x4000, 0x1634: 0x4000, 0x1635: 0x4000, - 0x1636: 0x4000, 0x1637: 0x4000, 0x1638: 0x4000, 0x1639: 0x4000, 0x163a: 0x4000, 0x163b: 0x4000, - 0x163c: 0x4000, 0x163d: 0x4000, 0x163e: 0x4000, 0x163f: 0x4000, - // Block 0x59, offset 0x1640 - 0x1640: 0x4000, 0x1641: 0x4000, 0x1642: 0x4000, 0x1643: 0x4000, 0x1644: 0x4000, 0x1645: 0x4000, - 0x1646: 0x4000, 0x1647: 0x4000, 0x1648: 0x4000, 0x1649: 0x4000, 0x164a: 0x4000, 0x164b: 0x4000, - 0x164c: 0x4000, 0x164d: 0x4000, 0x164e: 0x4000, 0x164f: 0x4000, 0x1650: 0x4000, 0x1651: 0x4000, - 0x1652: 0x4000, 0x1653: 0x4000, 0x1654: 0x4000, 0x1655: 0x4000, 0x1656: 0x4000, 0x1657: 0x4000, - 0x1658: 0x4000, 0x1659: 0x4000, 0x165a: 0x4000, 0x165b: 0x4000, 0x165c: 0x4000, 0x165d: 0x4000, - 0x165e: 0x4000, 0x165f: 0x4000, 0x1660: 0x4000, 0x1661: 0x4000, 0x1662: 0x4000, 0x1663: 0x4000, - 0x1664: 0x4000, 0x1665: 0x4000, 0x1666: 0x4000, 0x1667: 0x4000, 0x1668: 0x4000, 0x1669: 0x4000, - 0x166a: 0x4000, 0x166b: 0x4000, 0x166c: 0x4000, 0x166d: 0x4000, 0x166e: 0x4000, 0x166f: 0x4000, - 0x1670: 0x4000, 0x1671: 0x4000, 0x1672: 0x4000, 0x1673: 0x4000, 0x1674: 0x4000, 0x1675: 0x4000, - 0x1676: 0x4000, 0x1677: 0x4000, 0x1678: 0x4000, 0x1679: 0x4000, 0x167a: 0x4000, 0x167b: 0x4000, - 0x167c: 0x4000, 0x167f: 0x4000, - // Block 0x5a, offset 0x1680 - 0x1680: 0x4000, 0x1681: 0x4000, 0x1682: 0x4000, 0x1683: 0x4000, 0x1684: 0x4000, 0x1685: 0x4000, - 0x1686: 0x4000, 0x1687: 0x4000, 0x1688: 0x4000, 0x1689: 0x4000, 0x168a: 0x4000, 0x168b: 0x4000, - 0x168c: 0x4000, 0x168d: 0x4000, 0x168e: 0x4000, 0x168f: 0x4000, 0x1690: 0x4000, 0x1691: 0x4000, - 0x1692: 0x4000, 0x1693: 0x4000, 0x1694: 0x4000, 0x1695: 0x4000, 0x1696: 0x4000, 0x1697: 0x4000, - 0x1698: 0x4000, 0x1699: 0x4000, 0x169a: 0x4000, 0x169b: 0x4000, 0x169c: 0x4000, 0x169d: 0x4000, - 0x169e: 0x4000, 0x169f: 0x4000, 0x16a0: 0x4000, 0x16a1: 0x4000, 0x16a2: 0x4000, 0x16a3: 0x4000, - 0x16a4: 0x4000, 0x16a5: 0x4000, 0x16a6: 0x4000, 0x16a7: 0x4000, 0x16a8: 0x4000, 0x16a9: 0x4000, - 0x16aa: 0x4000, 0x16ab: 0x4000, 0x16ac: 0x4000, 0x16ad: 0x4000, 0x16ae: 0x4000, 0x16af: 0x4000, - 0x16b0: 0x4000, 0x16b1: 0x4000, 0x16b2: 0x4000, 0x16b3: 0x4000, 0x16b4: 0x4000, 0x16b5: 0x4000, - 0x16b6: 0x4000, 0x16b7: 0x4000, 0x16b8: 0x4000, 0x16b9: 0x4000, 0x16ba: 0x4000, 0x16bb: 0x4000, - 0x16bc: 0x4000, 0x16bd: 0x4000, - // Block 0x5b, offset 0x16c0 - 0x16cb: 0x4000, - 0x16cc: 0x4000, 0x16cd: 0x4000, 0x16ce: 0x4000, 0x16d0: 0x4000, 0x16d1: 0x4000, - 0x16d2: 0x4000, 0x16d3: 0x4000, 0x16d4: 0x4000, 0x16d5: 0x4000, 0x16d6: 0x4000, 0x16d7: 0x4000, - 0x16d8: 0x4000, 0x16d9: 0x4000, 0x16da: 0x4000, 0x16db: 0x4000, 0x16dc: 0x4000, 0x16dd: 0x4000, - 0x16de: 0x4000, 0x16df: 0x4000, 0x16e0: 0x4000, 0x16e1: 0x4000, 0x16e2: 0x4000, 0x16e3: 0x4000, - 0x16e4: 0x4000, 0x16e5: 0x4000, 0x16e6: 0x4000, 0x16e7: 0x4000, - 0x16fa: 0x4000, - // Block 0x5c, offset 0x1700 - 0x1715: 0x4000, 0x1716: 0x4000, - 0x1724: 0x4000, - // Block 0x5d, offset 0x1740 - 0x177b: 0x4000, - 0x177c: 0x4000, 0x177d: 0x4000, 0x177e: 0x4000, 0x177f: 0x4000, - // Block 0x5e, offset 0x1780 - 0x1780: 0x4000, 0x1781: 0x4000, 0x1782: 0x4000, 0x1783: 0x4000, 0x1784: 0x4000, 0x1785: 0x4000, - 0x1786: 0x4000, 0x1787: 0x4000, 0x1788: 0x4000, 0x1789: 0x4000, 0x178a: 0x4000, 0x178b: 0x4000, - 0x178c: 0x4000, 0x178d: 0x4000, 0x178e: 0x4000, 0x178f: 0x4000, - // Block 0x5f, offset 0x17c0 - 0x17c0: 0x4000, 0x17c1: 0x4000, 0x17c2: 0x4000, 0x17c3: 0x4000, 0x17c4: 0x4000, 0x17c5: 0x4000, - 0x17cc: 0x4000, 0x17d0: 0x4000, 0x17d1: 0x4000, - 0x17d2: 0x4000, - 0x17eb: 0x4000, 0x17ec: 0x4000, - 0x17f4: 0x4000, 0x17f5: 0x4000, - 0x17f6: 0x4000, 0x17f7: 0x4000, 0x17f8: 0x4000, - // Block 0x60, offset 0x1800 - 0x1810: 0x4000, 0x1811: 0x4000, - 0x1812: 0x4000, 0x1813: 0x4000, 0x1814: 0x4000, 0x1815: 0x4000, 0x1816: 0x4000, 0x1817: 0x4000, - 0x1818: 0x4000, 0x1819: 0x4000, 0x181a: 0x4000, 0x181b: 0x4000, 0x181c: 0x4000, 0x181d: 0x4000, - 0x181e: 0x4000, 0x181f: 0x4000, 0x1820: 0x4000, 0x1821: 0x4000, 0x1822: 0x4000, 0x1823: 0x4000, - 0x1824: 0x4000, 0x1825: 0x4000, 0x1826: 0x4000, 0x1827: 0x4000, 0x1828: 0x4000, 0x1829: 0x4000, - 0x182a: 0x4000, 0x182b: 0x4000, 0x182c: 0x4000, 0x182d: 0x4000, 0x182e: 0x4000, 0x182f: 0x4000, - 0x1830: 0x4000, 0x1831: 0x4000, 0x1832: 0x4000, 0x1833: 0x4000, 0x1834: 0x4000, 0x1835: 0x4000, - 0x1836: 0x4000, 0x1837: 0x4000, 0x1838: 0x4000, 0x1839: 0x4000, 0x183a: 0x4000, 0x183b: 0x4000, - 0x183c: 0x4000, 0x183d: 0x4000, 0x183e: 0x4000, - // Block 0x61, offset 0x1840 - 0x1840: 0x4000, 0x1841: 0x4000, 0x1842: 0x4000, 0x1843: 0x4000, 0x1844: 0x4000, 0x1845: 0x4000, - 0x1846: 0x4000, 0x1847: 0x4000, 0x1848: 0x4000, 0x1849: 0x4000, 0x184a: 0x4000, 0x184b: 0x4000, - 0x184c: 0x4000, 0x1850: 0x4000, 0x1851: 0x4000, - 0x1852: 0x4000, 0x1853: 0x4000, 0x1854: 0x4000, 0x1855: 0x4000, 0x1856: 0x4000, 0x1857: 0x4000, - 0x1858: 0x4000, 0x1859: 0x4000, 0x185a: 0x4000, 0x185b: 0x4000, 0x185c: 0x4000, 0x185d: 0x4000, - 0x185e: 0x4000, 0x185f: 0x4000, 0x1860: 0x4000, 0x1861: 0x4000, 0x1862: 0x4000, 0x1863: 0x4000, - 0x1864: 0x4000, 0x1865: 0x4000, 0x1866: 0x4000, 0x1867: 0x4000, 0x1868: 0x4000, 0x1869: 0x4000, - 0x186a: 0x4000, 0x186b: 0x4000, - // Block 0x62, offset 0x1880 - 0x1880: 0x4000, 0x1881: 0x4000, 0x1882: 0x4000, 0x1883: 0x4000, 0x1884: 0x4000, 0x1885: 0x4000, - 0x1886: 0x4000, 0x1887: 0x4000, 0x1888: 0x4000, 0x1889: 0x4000, 0x188a: 0x4000, 0x188b: 0x4000, - 0x188c: 0x4000, 0x188d: 0x4000, 0x188e: 0x4000, 0x188f: 0x4000, 0x1890: 0x4000, 0x1891: 0x4000, - 0x1892: 0x4000, 0x1893: 0x4000, 0x1894: 0x4000, 0x1895: 0x4000, 0x1896: 0x4000, 0x1897: 0x4000, - // Block 0x63, offset 0x18c0 - 0x18c0: 0x4000, - 0x18d0: 0x4000, 0x18d1: 0x4000, - 0x18d2: 0x4000, 0x18d3: 0x4000, 0x18d4: 0x4000, 0x18d5: 0x4000, 0x18d6: 0x4000, 0x18d7: 0x4000, - 0x18d8: 0x4000, 0x18d9: 0x4000, 0x18da: 0x4000, 0x18db: 0x4000, 0x18dc: 0x4000, 0x18dd: 0x4000, - 0x18de: 0x4000, 0x18df: 0x4000, 0x18e0: 0x4000, 0x18e1: 0x4000, 0x18e2: 0x4000, 0x18e3: 0x4000, - 0x18e4: 0x4000, 0x18e5: 0x4000, 0x18e6: 0x4000, - // Block 0x64, offset 0x1900 - 0x1900: 0x2000, 0x1901: 0x2000, 0x1902: 0x2000, 0x1903: 0x2000, 0x1904: 0x2000, 0x1905: 0x2000, - 0x1906: 0x2000, 0x1907: 0x2000, 0x1908: 0x2000, 0x1909: 0x2000, 0x190a: 0x2000, 0x190b: 0x2000, - 0x190c: 0x2000, 0x190d: 0x2000, 0x190e: 0x2000, 0x190f: 0x2000, 0x1910: 0x2000, 0x1911: 0x2000, - 0x1912: 0x2000, 0x1913: 0x2000, 0x1914: 0x2000, 0x1915: 0x2000, 0x1916: 0x2000, 0x1917: 0x2000, - 0x1918: 0x2000, 0x1919: 0x2000, 0x191a: 0x2000, 0x191b: 0x2000, 0x191c: 0x2000, 0x191d: 0x2000, - 0x191e: 0x2000, 0x191f: 0x2000, 0x1920: 0x2000, 0x1921: 0x2000, 0x1922: 0x2000, 0x1923: 0x2000, - 0x1924: 0x2000, 0x1925: 0x2000, 0x1926: 0x2000, 0x1927: 0x2000, 0x1928: 0x2000, 0x1929: 0x2000, - 0x192a: 0x2000, 0x192b: 0x2000, 0x192c: 0x2000, 0x192d: 0x2000, 0x192e: 0x2000, 0x192f: 0x2000, - 0x1930: 0x2000, 0x1931: 0x2000, 0x1932: 0x2000, 0x1933: 0x2000, 0x1934: 0x2000, 0x1935: 0x2000, - 0x1936: 0x2000, 0x1937: 0x2000, 0x1938: 0x2000, 0x1939: 0x2000, 0x193a: 0x2000, 0x193b: 0x2000, - 0x193c: 0x2000, 0x193d: 0x2000, -} - -// widthIndex: 22 blocks, 1408 entries, 1408 bytes -// Block 0 is the zero block. -var widthIndex = [1408]uint8{ - // Block 0x0, offset 0x0 - // Block 0x1, offset 0x40 - // Block 0x2, offset 0x80 - // Block 0x3, offset 0xc0 - 0xc2: 0x01, 0xc3: 0x02, 0xc4: 0x03, 0xc5: 0x04, 0xc7: 0x05, - 0xc9: 0x06, 0xcb: 0x07, 0xcc: 0x08, 0xcd: 0x09, 0xce: 0x0a, 0xcf: 0x0b, - 0xd0: 0x0c, 0xd1: 0x0d, - 0xe1: 0x02, 0xe2: 0x03, 0xe3: 0x04, 0xe4: 0x05, 0xe5: 0x06, 0xe6: 0x06, 0xe7: 0x06, - 0xe8: 0x06, 0xe9: 0x06, 0xea: 0x07, 0xeb: 0x06, 0xec: 0x06, 0xed: 0x08, 0xee: 0x09, 0xef: 0x0a, - 0xf0: 0x0f, 0xf3: 0x12, 0xf4: 0x13, - // Block 0x4, offset 0x100 - 0x104: 0x0e, 0x105: 0x0f, - // Block 0x5, offset 0x140 - 0x140: 0x10, 0x141: 0x11, 0x142: 0x12, 0x144: 0x13, 0x145: 0x14, 0x146: 0x15, 0x147: 0x16, - 0x148: 0x17, 0x149: 0x18, 0x14a: 0x19, 0x14c: 0x1a, 0x14f: 0x1b, - 0x151: 0x1c, 0x152: 0x08, 0x153: 0x1d, 0x154: 0x1e, 0x155: 0x1f, 0x156: 0x20, 0x157: 0x21, - 0x158: 0x22, 0x159: 0x23, 0x15a: 0x24, 0x15b: 0x25, 0x15c: 0x26, 0x15d: 0x27, 0x15e: 0x28, 0x15f: 0x29, - 0x166: 0x2a, - 0x16c: 0x2b, 0x16d: 0x2c, - 0x17a: 0x2d, 0x17b: 0x2e, 0x17c: 0x0e, 0x17d: 0x0e, 0x17e: 0x0e, 0x17f: 0x2f, - // Block 0x6, offset 0x180 - 0x180: 0x30, 0x181: 0x31, 0x182: 0x32, 0x183: 0x33, 0x184: 0x34, 0x185: 0x35, 0x186: 0x36, 0x187: 0x37, - 0x188: 0x38, 0x189: 0x39, 0x18a: 0x0e, 0x18b: 0x3a, 0x18c: 0x0e, 0x18d: 0x0e, 0x18e: 0x0e, 0x18f: 0x0e, - 0x190: 0x0e, 0x191: 0x0e, 0x192: 0x0e, 0x193: 0x0e, 0x194: 0x0e, 0x195: 0x0e, 0x196: 0x0e, 0x197: 0x0e, - 0x198: 0x0e, 0x199: 0x0e, 0x19a: 0x0e, 0x19b: 0x0e, 0x19c: 0x0e, 0x19d: 0x0e, 0x19e: 0x0e, 0x19f: 0x0e, - 0x1a0: 0x0e, 0x1a1: 0x0e, 0x1a2: 0x0e, 0x1a3: 0x0e, 0x1a4: 0x0e, 0x1a5: 0x0e, 0x1a6: 0x0e, 0x1a7: 0x0e, - 0x1a8: 0x0e, 0x1a9: 0x0e, 0x1aa: 0x0e, 0x1ab: 0x0e, 0x1ac: 0x0e, 0x1ad: 0x0e, 0x1ae: 0x0e, 0x1af: 0x0e, - 0x1b0: 0x0e, 0x1b1: 0x0e, 0x1b2: 0x0e, 0x1b3: 0x0e, 0x1b4: 0x0e, 0x1b5: 0x0e, 0x1b6: 0x0e, 0x1b7: 0x0e, - 0x1b8: 0x0e, 0x1b9: 0x0e, 0x1ba: 0x0e, 0x1bb: 0x0e, 0x1bc: 0x0e, 0x1bd: 0x0e, 0x1be: 0x0e, 0x1bf: 0x0e, - // Block 0x7, offset 0x1c0 - 0x1c0: 0x0e, 0x1c1: 0x0e, 0x1c2: 0x0e, 0x1c3: 0x0e, 0x1c4: 0x0e, 0x1c5: 0x0e, 0x1c6: 0x0e, 0x1c7: 0x0e, - 0x1c8: 0x0e, 0x1c9: 0x0e, 0x1ca: 0x0e, 0x1cb: 0x0e, 0x1cc: 0x0e, 0x1cd: 0x0e, 0x1ce: 0x0e, 0x1cf: 0x0e, - 0x1d0: 0x0e, 0x1d1: 0x0e, 0x1d2: 0x0e, 0x1d3: 0x0e, 0x1d4: 0x0e, 0x1d5: 0x0e, 0x1d6: 0x0e, 0x1d7: 0x0e, - 0x1d8: 0x0e, 0x1d9: 0x0e, 0x1da: 0x0e, 0x1db: 0x0e, 0x1dc: 0x0e, 0x1dd: 0x0e, 0x1de: 0x0e, 0x1df: 0x0e, - 0x1e0: 0x0e, 0x1e1: 0x0e, 0x1e2: 0x0e, 0x1e3: 0x0e, 0x1e4: 0x0e, 0x1e5: 0x0e, 0x1e6: 0x0e, 0x1e7: 0x0e, - 0x1e8: 0x0e, 0x1e9: 0x0e, 0x1ea: 0x0e, 0x1eb: 0x0e, 0x1ec: 0x0e, 0x1ed: 0x0e, 0x1ee: 0x0e, 0x1ef: 0x0e, - 0x1f0: 0x0e, 0x1f1: 0x0e, 0x1f2: 0x0e, 0x1f3: 0x0e, 0x1f4: 0x0e, 0x1f5: 0x0e, 0x1f6: 0x0e, - 0x1f8: 0x0e, 0x1f9: 0x0e, 0x1fa: 0x0e, 0x1fb: 0x0e, 0x1fc: 0x0e, 0x1fd: 0x0e, 0x1fe: 0x0e, 0x1ff: 0x0e, - // Block 0x8, offset 0x200 - 0x200: 0x0e, 0x201: 0x0e, 0x202: 0x0e, 0x203: 0x0e, 0x204: 0x0e, 0x205: 0x0e, 0x206: 0x0e, 0x207: 0x0e, - 0x208: 0x0e, 0x209: 0x0e, 0x20a: 0x0e, 0x20b: 0x0e, 0x20c: 0x0e, 0x20d: 0x0e, 0x20e: 0x0e, 0x20f: 0x0e, - 0x210: 0x0e, 0x211: 0x0e, 0x212: 0x0e, 0x213: 0x0e, 0x214: 0x0e, 0x215: 0x0e, 0x216: 0x0e, 0x217: 0x0e, - 0x218: 0x0e, 0x219: 0x0e, 0x21a: 0x0e, 0x21b: 0x0e, 0x21c: 0x0e, 0x21d: 0x0e, 0x21e: 0x0e, 0x21f: 0x0e, - 0x220: 0x0e, 0x221: 0x0e, 0x222: 0x0e, 0x223: 0x0e, 0x224: 0x0e, 0x225: 0x0e, 0x226: 0x0e, 0x227: 0x0e, - 0x228: 0x0e, 0x229: 0x0e, 0x22a: 0x0e, 0x22b: 0x0e, 0x22c: 0x0e, 0x22d: 0x0e, 0x22e: 0x0e, 0x22f: 0x0e, - 0x230: 0x0e, 0x231: 0x0e, 0x232: 0x0e, 0x233: 0x0e, 0x234: 0x0e, 0x235: 0x0e, 0x236: 0x0e, 0x237: 0x0e, - 0x238: 0x0e, 0x239: 0x0e, 0x23a: 0x0e, 0x23b: 0x0e, 0x23c: 0x0e, 0x23d: 0x0e, 0x23e: 0x0e, 0x23f: 0x0e, - // Block 0x9, offset 0x240 - 0x240: 0x0e, 0x241: 0x0e, 0x242: 0x0e, 0x243: 0x0e, 0x244: 0x0e, 0x245: 0x0e, 0x246: 0x0e, 0x247: 0x0e, - 0x248: 0x0e, 0x249: 0x0e, 0x24a: 0x0e, 0x24b: 0x0e, 0x24c: 0x0e, 0x24d: 0x0e, 0x24e: 0x0e, 0x24f: 0x0e, - 0x250: 0x0e, 0x251: 0x0e, 0x252: 0x3b, 0x253: 0x3c, - 0x265: 0x3d, - 0x270: 0x0e, 0x271: 0x0e, 0x272: 0x0e, 0x273: 0x0e, 0x274: 0x0e, 0x275: 0x0e, 0x276: 0x0e, 0x277: 0x0e, - 0x278: 0x0e, 0x279: 0x0e, 0x27a: 0x0e, 0x27b: 0x0e, 0x27c: 0x0e, 0x27d: 0x0e, 0x27e: 0x0e, 0x27f: 0x0e, - // Block 0xa, offset 0x280 - 0x280: 0x0e, 0x281: 0x0e, 0x282: 0x0e, 0x283: 0x0e, 0x284: 0x0e, 0x285: 0x0e, 0x286: 0x0e, 0x287: 0x0e, - 0x288: 0x0e, 0x289: 0x0e, 0x28a: 0x0e, 0x28b: 0x0e, 0x28c: 0x0e, 0x28d: 0x0e, 0x28e: 0x0e, 0x28f: 0x0e, - 0x290: 0x0e, 0x291: 0x0e, 0x292: 0x0e, 0x293: 0x0e, 0x294: 0x0e, 0x295: 0x0e, 0x296: 0x0e, 0x297: 0x0e, - 0x298: 0x0e, 0x299: 0x0e, 0x29a: 0x0e, 0x29b: 0x0e, 0x29c: 0x0e, 0x29d: 0x0e, 0x29e: 0x3e, - // Block 0xb, offset 0x2c0 - 0x2c0: 0x08, 0x2c1: 0x08, 0x2c2: 0x08, 0x2c3: 0x08, 0x2c4: 0x08, 0x2c5: 0x08, 0x2c6: 0x08, 0x2c7: 0x08, - 0x2c8: 0x08, 0x2c9: 0x08, 0x2ca: 0x08, 0x2cb: 0x08, 0x2cc: 0x08, 0x2cd: 0x08, 0x2ce: 0x08, 0x2cf: 0x08, - 0x2d0: 0x08, 0x2d1: 0x08, 0x2d2: 0x08, 0x2d3: 0x08, 0x2d4: 0x08, 0x2d5: 0x08, 0x2d6: 0x08, 0x2d7: 0x08, - 0x2d8: 0x08, 0x2d9: 0x08, 0x2da: 0x08, 0x2db: 0x08, 0x2dc: 0x08, 0x2dd: 0x08, 0x2de: 0x08, 0x2df: 0x08, - 0x2e0: 0x08, 0x2e1: 0x08, 0x2e2: 0x08, 0x2e3: 0x08, 0x2e4: 0x08, 0x2e5: 0x08, 0x2e6: 0x08, 0x2e7: 0x08, - 0x2e8: 0x08, 0x2e9: 0x08, 0x2ea: 0x08, 0x2eb: 0x08, 0x2ec: 0x08, 0x2ed: 0x08, 0x2ee: 0x08, 0x2ef: 0x08, - 0x2f0: 0x08, 0x2f1: 0x08, 0x2f2: 0x08, 0x2f3: 0x08, 0x2f4: 0x08, 0x2f5: 0x08, 0x2f6: 0x08, 0x2f7: 0x08, - 0x2f8: 0x08, 0x2f9: 0x08, 0x2fa: 0x08, 0x2fb: 0x08, 0x2fc: 0x08, 0x2fd: 0x08, 0x2fe: 0x08, 0x2ff: 0x08, - // Block 0xc, offset 0x300 - 0x300: 0x08, 0x301: 0x08, 0x302: 0x08, 0x303: 0x08, 0x304: 0x08, 0x305: 0x08, 0x306: 0x08, 0x307: 0x08, - 0x308: 0x08, 0x309: 0x08, 0x30a: 0x08, 0x30b: 0x08, 0x30c: 0x08, 0x30d: 0x08, 0x30e: 0x08, 0x30f: 0x08, - 0x310: 0x08, 0x311: 0x08, 0x312: 0x08, 0x313: 0x08, 0x314: 0x08, 0x315: 0x08, 0x316: 0x08, 0x317: 0x08, - 0x318: 0x08, 0x319: 0x08, 0x31a: 0x08, 0x31b: 0x08, 0x31c: 0x08, 0x31d: 0x08, 0x31e: 0x08, 0x31f: 0x08, - 0x320: 0x08, 0x321: 0x08, 0x322: 0x08, 0x323: 0x08, 0x324: 0x0e, 0x325: 0x0e, 0x326: 0x0e, 0x327: 0x0e, - 0x328: 0x0e, 0x329: 0x0e, 0x32a: 0x0e, 0x32b: 0x0e, - 0x338: 0x3f, 0x339: 0x40, 0x33c: 0x41, 0x33d: 0x42, 0x33e: 0x43, 0x33f: 0x44, - // Block 0xd, offset 0x340 - 0x37f: 0x45, - // Block 0xe, offset 0x380 - 0x380: 0x0e, 0x381: 0x0e, 0x382: 0x0e, 0x383: 0x0e, 0x384: 0x0e, 0x385: 0x0e, 0x386: 0x0e, 0x387: 0x0e, - 0x388: 0x0e, 0x389: 0x0e, 0x38a: 0x0e, 0x38b: 0x0e, 0x38c: 0x0e, 0x38d: 0x0e, 0x38e: 0x0e, 0x38f: 0x0e, - 0x390: 0x0e, 0x391: 0x0e, 0x392: 0x0e, 0x393: 0x0e, 0x394: 0x0e, 0x395: 0x0e, 0x396: 0x0e, 0x397: 0x0e, - 0x398: 0x0e, 0x399: 0x0e, 0x39a: 0x0e, 0x39b: 0x0e, 0x39c: 0x0e, 0x39d: 0x0e, 0x39e: 0x0e, 0x39f: 0x46, - 0x3a0: 0x0e, 0x3a1: 0x0e, 0x3a2: 0x0e, 0x3a3: 0x0e, 0x3a4: 0x0e, 0x3a5: 0x0e, 0x3a6: 0x0e, 0x3a7: 0x0e, - 0x3a8: 0x0e, 0x3a9: 0x0e, 0x3aa: 0x0e, 0x3ab: 0x47, - // Block 0xf, offset 0x3c0 - 0x3c0: 0x0e, 0x3c1: 0x0e, 0x3c2: 0x0e, 0x3c3: 0x0e, 0x3c4: 0x48, 0x3c5: 0x49, 0x3c6: 0x0e, 0x3c7: 0x0e, - 0x3c8: 0x0e, 0x3c9: 0x0e, 0x3ca: 0x0e, 0x3cb: 0x4a, - // Block 0x10, offset 0x400 - 0x400: 0x4b, 0x403: 0x4c, 0x404: 0x4d, 0x405: 0x4e, 0x406: 0x4f, - 0x408: 0x50, 0x409: 0x51, 0x40c: 0x52, 0x40d: 0x53, 0x40e: 0x54, 0x40f: 0x55, - 0x410: 0x3a, 0x411: 0x56, 0x412: 0x0e, 0x413: 0x57, 0x414: 0x58, 0x415: 0x59, 0x416: 0x5a, 0x417: 0x5b, - 0x418: 0x0e, 0x419: 0x5c, 0x41a: 0x0e, 0x41b: 0x5d, - 0x424: 0x5e, 0x425: 0x5f, 0x426: 0x60, 0x427: 0x61, - // Block 0x11, offset 0x440 - 0x456: 0x0b, 0x457: 0x06, - 0x458: 0x0c, 0x45b: 0x0d, 0x45f: 0x0e, - 0x460: 0x06, 0x461: 0x06, 0x462: 0x06, 0x463: 0x06, 0x464: 0x06, 0x465: 0x06, 0x466: 0x06, 0x467: 0x06, - 0x468: 0x06, 0x469: 0x06, 0x46a: 0x06, 0x46b: 0x06, 0x46c: 0x06, 0x46d: 0x06, 0x46e: 0x06, 0x46f: 0x06, - 0x470: 0x06, 0x471: 0x06, 0x472: 0x06, 0x473: 0x06, 0x474: 0x06, 0x475: 0x06, 0x476: 0x06, 0x477: 0x06, - 0x478: 0x06, 0x479: 0x06, 0x47a: 0x06, 0x47b: 0x06, 0x47c: 0x06, 0x47d: 0x06, 0x47e: 0x06, 0x47f: 0x06, - // Block 0x12, offset 0x480 - 0x484: 0x08, 0x485: 0x08, 0x486: 0x08, 0x487: 0x09, - // Block 0x13, offset 0x4c0 - 0x4c0: 0x08, 0x4c1: 0x08, 0x4c2: 0x08, 0x4c3: 0x08, 0x4c4: 0x08, 0x4c5: 0x08, 0x4c6: 0x08, 0x4c7: 0x08, - 0x4c8: 0x08, 0x4c9: 0x08, 0x4ca: 0x08, 0x4cb: 0x08, 0x4cc: 0x08, 0x4cd: 0x08, 0x4ce: 0x08, 0x4cf: 0x08, - 0x4d0: 0x08, 0x4d1: 0x08, 0x4d2: 0x08, 0x4d3: 0x08, 0x4d4: 0x08, 0x4d5: 0x08, 0x4d6: 0x08, 0x4d7: 0x08, - 0x4d8: 0x08, 0x4d9: 0x08, 0x4da: 0x08, 0x4db: 0x08, 0x4dc: 0x08, 0x4dd: 0x08, 0x4de: 0x08, 0x4df: 0x08, - 0x4e0: 0x08, 0x4e1: 0x08, 0x4e2: 0x08, 0x4e3: 0x08, 0x4e4: 0x08, 0x4e5: 0x08, 0x4e6: 0x08, 0x4e7: 0x08, - 0x4e8: 0x08, 0x4e9: 0x08, 0x4ea: 0x08, 0x4eb: 0x08, 0x4ec: 0x08, 0x4ed: 0x08, 0x4ee: 0x08, 0x4ef: 0x08, - 0x4f0: 0x08, 0x4f1: 0x08, 0x4f2: 0x08, 0x4f3: 0x08, 0x4f4: 0x08, 0x4f5: 0x08, 0x4f6: 0x08, 0x4f7: 0x08, - 0x4f8: 0x08, 0x4f9: 0x08, 0x4fa: 0x08, 0x4fb: 0x08, 0x4fc: 0x08, 0x4fd: 0x08, 0x4fe: 0x08, 0x4ff: 0x62, - // Block 0x14, offset 0x500 - 0x520: 0x10, - 0x530: 0x09, 0x531: 0x09, 0x532: 0x09, 0x533: 0x09, 0x534: 0x09, 0x535: 0x09, 0x536: 0x09, 0x537: 0x09, - 0x538: 0x09, 0x539: 0x09, 0x53a: 0x09, 0x53b: 0x09, 0x53c: 0x09, 0x53d: 0x09, 0x53e: 0x09, 0x53f: 0x11, - // Block 0x15, offset 0x540 - 0x540: 0x09, 0x541: 0x09, 0x542: 0x09, 0x543: 0x09, 0x544: 0x09, 0x545: 0x09, 0x546: 0x09, 0x547: 0x09, - 0x548: 0x09, 0x549: 0x09, 0x54a: 0x09, 0x54b: 0x09, 0x54c: 0x09, 0x54d: 0x09, 0x54e: 0x09, 0x54f: 0x11, -} - -// inverseData contains 4-byte entries of the following format: -// -// <0 padding> -// -// The last byte of the UTF-8-encoded rune is xor-ed with the last byte of the -// UTF-8 encoding of the original rune. Mappings often have the following -// pattern: -// -// A -> A (U+FF21 -> U+0041) -// B -> B (U+FF22 -> U+0042) -// ... -// -// By xor-ing the last byte the same entry can be shared by many mappings. This -// reduces the total number of distinct entries by about two thirds. -// The resulting entry for the aforementioned mappings is -// -// { 0x01, 0xE0, 0x00, 0x00 } -// -// Using this entry to map U+FF21 (UTF-8 [EF BC A1]), we get -// -// E0 ^ A1 = 41. -// -// Similarly, for U+FF22 (UTF-8 [EF BC A2]), we get -// -// E0 ^ A2 = 42. -// -// Note that because of the xor-ing, the byte sequence stored in the entry is -// not valid UTF-8. -var inverseData = [150][4]byte{ - {0x00, 0x00, 0x00, 0x00}, - {0x03, 0xe3, 0x80, 0xa0}, - {0x03, 0xef, 0xbc, 0xa0}, - {0x03, 0xef, 0xbc, 0xe0}, - {0x03, 0xef, 0xbd, 0xe0}, - {0x03, 0xef, 0xbf, 0x02}, - {0x03, 0xef, 0xbf, 0x00}, - {0x03, 0xef, 0xbf, 0x0e}, - {0x03, 0xef, 0xbf, 0x0c}, - {0x03, 0xef, 0xbf, 0x0f}, - {0x03, 0xef, 0xbf, 0x39}, - {0x03, 0xef, 0xbf, 0x3b}, - {0x03, 0xef, 0xbf, 0x3f}, - {0x03, 0xef, 0xbf, 0x2a}, - {0x03, 0xef, 0xbf, 0x0d}, - {0x03, 0xef, 0xbf, 0x25}, - {0x03, 0xef, 0xbd, 0x1a}, - {0x03, 0xef, 0xbd, 0x26}, - {0x01, 0xa0, 0x00, 0x00}, - {0x03, 0xef, 0xbd, 0x25}, - {0x03, 0xef, 0xbd, 0x23}, - {0x03, 0xef, 0xbd, 0x2e}, - {0x03, 0xef, 0xbe, 0x07}, - {0x03, 0xef, 0xbe, 0x05}, - {0x03, 0xef, 0xbd, 0x06}, - {0x03, 0xef, 0xbd, 0x13}, - {0x03, 0xef, 0xbd, 0x0b}, - {0x03, 0xef, 0xbd, 0x16}, - {0x03, 0xef, 0xbd, 0x0c}, - {0x03, 0xef, 0xbd, 0x15}, - {0x03, 0xef, 0xbd, 0x0d}, - {0x03, 0xef, 0xbd, 0x1c}, - {0x03, 0xef, 0xbd, 0x02}, - {0x03, 0xef, 0xbd, 0x1f}, - {0x03, 0xef, 0xbd, 0x1d}, - {0x03, 0xef, 0xbd, 0x17}, - {0x03, 0xef, 0xbd, 0x08}, - {0x03, 0xef, 0xbd, 0x09}, - {0x03, 0xef, 0xbd, 0x0e}, - {0x03, 0xef, 0xbd, 0x04}, - {0x03, 0xef, 0xbd, 0x05}, - {0x03, 0xef, 0xbe, 0x3f}, - {0x03, 0xef, 0xbe, 0x00}, - {0x03, 0xef, 0xbd, 0x2c}, - {0x03, 0xef, 0xbe, 0x06}, - {0x03, 0xef, 0xbe, 0x0c}, - {0x03, 0xef, 0xbe, 0x0f}, - {0x03, 0xef, 0xbe, 0x0d}, - {0x03, 0xef, 0xbe, 0x0b}, - {0x03, 0xef, 0xbe, 0x19}, - {0x03, 0xef, 0xbe, 0x15}, - {0x03, 0xef, 0xbe, 0x11}, - {0x03, 0xef, 0xbe, 0x31}, - {0x03, 0xef, 0xbe, 0x33}, - {0x03, 0xef, 0xbd, 0x0f}, - {0x03, 0xef, 0xbe, 0x30}, - {0x03, 0xef, 0xbe, 0x3e}, - {0x03, 0xef, 0xbe, 0x32}, - {0x03, 0xef, 0xbe, 0x36}, - {0x03, 0xef, 0xbd, 0x14}, - {0x03, 0xef, 0xbe, 0x2e}, - {0x03, 0xef, 0xbd, 0x1e}, - {0x03, 0xef, 0xbe, 0x10}, - {0x03, 0xef, 0xbf, 0x13}, - {0x03, 0xef, 0xbf, 0x15}, - {0x03, 0xef, 0xbf, 0x17}, - {0x03, 0xef, 0xbf, 0x1f}, - {0x03, 0xef, 0xbf, 0x1d}, - {0x03, 0xef, 0xbf, 0x1b}, - {0x03, 0xef, 0xbf, 0x09}, - {0x03, 0xef, 0xbf, 0x0b}, - {0x03, 0xef, 0xbf, 0x37}, - {0x03, 0xef, 0xbe, 0x04}, - {0x01, 0xe0, 0x00, 0x00}, - {0x03, 0xe2, 0xa6, 0x1a}, - {0x03, 0xe2, 0xa6, 0x26}, - {0x03, 0xe3, 0x80, 0x23}, - {0x03, 0xe3, 0x80, 0x2e}, - {0x03, 0xe3, 0x80, 0x25}, - {0x03, 0xe3, 0x83, 0x1e}, - {0x03, 0xe3, 0x83, 0x14}, - {0x03, 0xe3, 0x82, 0x06}, - {0x03, 0xe3, 0x82, 0x0b}, - {0x03, 0xe3, 0x82, 0x0c}, - {0x03, 0xe3, 0x82, 0x0d}, - {0x03, 0xe3, 0x82, 0x02}, - {0x03, 0xe3, 0x83, 0x0f}, - {0x03, 0xe3, 0x83, 0x08}, - {0x03, 0xe3, 0x83, 0x09}, - {0x03, 0xe3, 0x83, 0x2c}, - {0x03, 0xe3, 0x83, 0x0c}, - {0x03, 0xe3, 0x82, 0x13}, - {0x03, 0xe3, 0x82, 0x16}, - {0x03, 0xe3, 0x82, 0x15}, - {0x03, 0xe3, 0x82, 0x1c}, - {0x03, 0xe3, 0x82, 0x1f}, - {0x03, 0xe3, 0x82, 0x1d}, - {0x03, 0xe3, 0x82, 0x1a}, - {0x03, 0xe3, 0x82, 0x17}, - {0x03, 0xe3, 0x82, 0x08}, - {0x03, 0xe3, 0x82, 0x09}, - {0x03, 0xe3, 0x82, 0x0e}, - {0x03, 0xe3, 0x82, 0x04}, - {0x03, 0xe3, 0x82, 0x05}, - {0x03, 0xe3, 0x82, 0x3f}, - {0x03, 0xe3, 0x83, 0x00}, - {0x03, 0xe3, 0x83, 0x06}, - {0x03, 0xe3, 0x83, 0x05}, - {0x03, 0xe3, 0x83, 0x0d}, - {0x03, 0xe3, 0x83, 0x0b}, - {0x03, 0xe3, 0x83, 0x07}, - {0x03, 0xe3, 0x83, 0x19}, - {0x03, 0xe3, 0x83, 0x15}, - {0x03, 0xe3, 0x83, 0x11}, - {0x03, 0xe3, 0x83, 0x31}, - {0x03, 0xe3, 0x83, 0x33}, - {0x03, 0xe3, 0x83, 0x30}, - {0x03, 0xe3, 0x83, 0x3e}, - {0x03, 0xe3, 0x83, 0x32}, - {0x03, 0xe3, 0x83, 0x36}, - {0x03, 0xe3, 0x83, 0x2e}, - {0x03, 0xe3, 0x82, 0x07}, - {0x03, 0xe3, 0x85, 0x04}, - {0x03, 0xe3, 0x84, 0x10}, - {0x03, 0xe3, 0x85, 0x30}, - {0x03, 0xe3, 0x85, 0x0d}, - {0x03, 0xe3, 0x85, 0x13}, - {0x03, 0xe3, 0x85, 0x15}, - {0x03, 0xe3, 0x85, 0x17}, - {0x03, 0xe3, 0x85, 0x1f}, - {0x03, 0xe3, 0x85, 0x1d}, - {0x03, 0xe3, 0x85, 0x1b}, - {0x03, 0xe3, 0x85, 0x09}, - {0x03, 0xe3, 0x85, 0x0f}, - {0x03, 0xe3, 0x85, 0x0b}, - {0x03, 0xe3, 0x85, 0x37}, - {0x03, 0xe3, 0x85, 0x3b}, - {0x03, 0xe3, 0x85, 0x39}, - {0x03, 0xe3, 0x85, 0x3f}, - {0x02, 0xc2, 0x02, 0x00}, - {0x02, 0xc2, 0x0e, 0x00}, - {0x02, 0xc2, 0x0c, 0x00}, - {0x02, 0xc2, 0x00, 0x00}, - {0x03, 0xe2, 0x82, 0x0f}, - {0x03, 0xe2, 0x94, 0x2a}, - {0x03, 0xe2, 0x86, 0x39}, - {0x03, 0xe2, 0x86, 0x3b}, - {0x03, 0xe2, 0x86, 0x3f}, - {0x03, 0xe2, 0x96, 0x0d}, - {0x03, 0xe2, 0x97, 0x25}, -} - -// Total table size 14936 bytes (14KiB) diff --git a/vendor/golang.org/x/text/width/tables11.0.0.go b/vendor/golang.org/x/text/width/tables11.0.0.go deleted file mode 100644 index 89288b3da..000000000 --- a/vendor/golang.org/x/text/width/tables11.0.0.go +++ /dev/null @@ -1,1340 +0,0 @@ -// Code generated by running "go generate" in golang.org/x/text. DO NOT EDIT. - -//go:build go1.13 && !go1.14 - -package width - -// UnicodeVersion is the Unicode version from which the tables in this package are derived. -const UnicodeVersion = "11.0.0" - -// lookup returns the trie value for the first UTF-8 encoding in s and -// the width in bytes of this encoding. The size will be 0 if s does not -// hold enough bytes to complete the encoding. len(s) must be greater than 0. -func (t *widthTrie) lookup(s []byte) (v uint16, sz int) { - c0 := s[0] - switch { - case c0 < 0x80: // is ASCII - return widthValues[c0], 1 - case c0 < 0xC2: - return 0, 1 // Illegal UTF-8: not a starter, not ASCII. - case c0 < 0xE0: // 2-byte UTF-8 - if len(s) < 2 { - return 0, 0 - } - i := widthIndex[c0] - c1 := s[1] - if c1 < 0x80 || 0xC0 <= c1 { - return 0, 1 // Illegal UTF-8: not a continuation byte. - } - return t.lookupValue(uint32(i), c1), 2 - case c0 < 0xF0: // 3-byte UTF-8 - if len(s) < 3 { - return 0, 0 - } - i := widthIndex[c0] - c1 := s[1] - if c1 < 0x80 || 0xC0 <= c1 { - return 0, 1 // Illegal UTF-8: not a continuation byte. - } - o := uint32(i)<<6 + uint32(c1) - i = widthIndex[o] - c2 := s[2] - if c2 < 0x80 || 0xC0 <= c2 { - return 0, 2 // Illegal UTF-8: not a continuation byte. - } - return t.lookupValue(uint32(i), c2), 3 - case c0 < 0xF8: // 4-byte UTF-8 - if len(s) < 4 { - return 0, 0 - } - i := widthIndex[c0] - c1 := s[1] - if c1 < 0x80 || 0xC0 <= c1 { - return 0, 1 // Illegal UTF-8: not a continuation byte. - } - o := uint32(i)<<6 + uint32(c1) - i = widthIndex[o] - c2 := s[2] - if c2 < 0x80 || 0xC0 <= c2 { - return 0, 2 // Illegal UTF-8: not a continuation byte. - } - o = uint32(i)<<6 + uint32(c2) - i = widthIndex[o] - c3 := s[3] - if c3 < 0x80 || 0xC0 <= c3 { - return 0, 3 // Illegal UTF-8: not a continuation byte. - } - return t.lookupValue(uint32(i), c3), 4 - } - // Illegal rune - return 0, 1 -} - -// lookupUnsafe returns the trie value for the first UTF-8 encoding in s. -// s must start with a full and valid UTF-8 encoded rune. -func (t *widthTrie) lookupUnsafe(s []byte) uint16 { - c0 := s[0] - if c0 < 0x80 { // is ASCII - return widthValues[c0] - } - i := widthIndex[c0] - if c0 < 0xE0 { // 2-byte UTF-8 - return t.lookupValue(uint32(i), s[1]) - } - i = widthIndex[uint32(i)<<6+uint32(s[1])] - if c0 < 0xF0 { // 3-byte UTF-8 - return t.lookupValue(uint32(i), s[2]) - } - i = widthIndex[uint32(i)<<6+uint32(s[2])] - if c0 < 0xF8 { // 4-byte UTF-8 - return t.lookupValue(uint32(i), s[3]) - } - return 0 -} - -// lookupString returns the trie value for the first UTF-8 encoding in s and -// the width in bytes of this encoding. The size will be 0 if s does not -// hold enough bytes to complete the encoding. len(s) must be greater than 0. -func (t *widthTrie) lookupString(s string) (v uint16, sz int) { - c0 := s[0] - switch { - case c0 < 0x80: // is ASCII - return widthValues[c0], 1 - case c0 < 0xC2: - return 0, 1 // Illegal UTF-8: not a starter, not ASCII. - case c0 < 0xE0: // 2-byte UTF-8 - if len(s) < 2 { - return 0, 0 - } - i := widthIndex[c0] - c1 := s[1] - if c1 < 0x80 || 0xC0 <= c1 { - return 0, 1 // Illegal UTF-8: not a continuation byte. - } - return t.lookupValue(uint32(i), c1), 2 - case c0 < 0xF0: // 3-byte UTF-8 - if len(s) < 3 { - return 0, 0 - } - i := widthIndex[c0] - c1 := s[1] - if c1 < 0x80 || 0xC0 <= c1 { - return 0, 1 // Illegal UTF-8: not a continuation byte. - } - o := uint32(i)<<6 + uint32(c1) - i = widthIndex[o] - c2 := s[2] - if c2 < 0x80 || 0xC0 <= c2 { - return 0, 2 // Illegal UTF-8: not a continuation byte. - } - return t.lookupValue(uint32(i), c2), 3 - case c0 < 0xF8: // 4-byte UTF-8 - if len(s) < 4 { - return 0, 0 - } - i := widthIndex[c0] - c1 := s[1] - if c1 < 0x80 || 0xC0 <= c1 { - return 0, 1 // Illegal UTF-8: not a continuation byte. - } - o := uint32(i)<<6 + uint32(c1) - i = widthIndex[o] - c2 := s[2] - if c2 < 0x80 || 0xC0 <= c2 { - return 0, 2 // Illegal UTF-8: not a continuation byte. - } - o = uint32(i)<<6 + uint32(c2) - i = widthIndex[o] - c3 := s[3] - if c3 < 0x80 || 0xC0 <= c3 { - return 0, 3 // Illegal UTF-8: not a continuation byte. - } - return t.lookupValue(uint32(i), c3), 4 - } - // Illegal rune - return 0, 1 -} - -// lookupStringUnsafe returns the trie value for the first UTF-8 encoding in s. -// s must start with a full and valid UTF-8 encoded rune. -func (t *widthTrie) lookupStringUnsafe(s string) uint16 { - c0 := s[0] - if c0 < 0x80 { // is ASCII - return widthValues[c0] - } - i := widthIndex[c0] - if c0 < 0xE0 { // 2-byte UTF-8 - return t.lookupValue(uint32(i), s[1]) - } - i = widthIndex[uint32(i)<<6+uint32(s[1])] - if c0 < 0xF0 { // 3-byte UTF-8 - return t.lookupValue(uint32(i), s[2]) - } - i = widthIndex[uint32(i)<<6+uint32(s[2])] - if c0 < 0xF8 { // 4-byte UTF-8 - return t.lookupValue(uint32(i), s[3]) - } - return 0 -} - -// widthTrie. Total size: 14336 bytes (14.00 KiB). Checksum: c0f7712776e71cd4. -type widthTrie struct{} - -func newWidthTrie(i int) *widthTrie { - return &widthTrie{} -} - -// lookupValue determines the type of block n and looks up the value for b. -func (t *widthTrie) lookupValue(n uint32, b byte) uint16 { - switch { - default: - return uint16(widthValues[n<<6+uint32(b)]) - } -} - -// widthValues: 101 blocks, 6464 entries, 12928 bytes -// The third block is the zero block. -var widthValues = [6464]uint16{ - // Block 0x0, offset 0x0 - 0x20: 0x6001, 0x21: 0x6002, 0x22: 0x6002, 0x23: 0x6002, - 0x24: 0x6002, 0x25: 0x6002, 0x26: 0x6002, 0x27: 0x6002, 0x28: 0x6002, 0x29: 0x6002, - 0x2a: 0x6002, 0x2b: 0x6002, 0x2c: 0x6002, 0x2d: 0x6002, 0x2e: 0x6002, 0x2f: 0x6002, - 0x30: 0x6002, 0x31: 0x6002, 0x32: 0x6002, 0x33: 0x6002, 0x34: 0x6002, 0x35: 0x6002, - 0x36: 0x6002, 0x37: 0x6002, 0x38: 0x6002, 0x39: 0x6002, 0x3a: 0x6002, 0x3b: 0x6002, - 0x3c: 0x6002, 0x3d: 0x6002, 0x3e: 0x6002, 0x3f: 0x6002, - // Block 0x1, offset 0x40 - 0x40: 0x6003, 0x41: 0x6003, 0x42: 0x6003, 0x43: 0x6003, 0x44: 0x6003, 0x45: 0x6003, - 0x46: 0x6003, 0x47: 0x6003, 0x48: 0x6003, 0x49: 0x6003, 0x4a: 0x6003, 0x4b: 0x6003, - 0x4c: 0x6003, 0x4d: 0x6003, 0x4e: 0x6003, 0x4f: 0x6003, 0x50: 0x6003, 0x51: 0x6003, - 0x52: 0x6003, 0x53: 0x6003, 0x54: 0x6003, 0x55: 0x6003, 0x56: 0x6003, 0x57: 0x6003, - 0x58: 0x6003, 0x59: 0x6003, 0x5a: 0x6003, 0x5b: 0x6003, 0x5c: 0x6003, 0x5d: 0x6003, - 0x5e: 0x6003, 0x5f: 0x6003, 0x60: 0x6004, 0x61: 0x6004, 0x62: 0x6004, 0x63: 0x6004, - 0x64: 0x6004, 0x65: 0x6004, 0x66: 0x6004, 0x67: 0x6004, 0x68: 0x6004, 0x69: 0x6004, - 0x6a: 0x6004, 0x6b: 0x6004, 0x6c: 0x6004, 0x6d: 0x6004, 0x6e: 0x6004, 0x6f: 0x6004, - 0x70: 0x6004, 0x71: 0x6004, 0x72: 0x6004, 0x73: 0x6004, 0x74: 0x6004, 0x75: 0x6004, - 0x76: 0x6004, 0x77: 0x6004, 0x78: 0x6004, 0x79: 0x6004, 0x7a: 0x6004, 0x7b: 0x6004, - 0x7c: 0x6004, 0x7d: 0x6004, 0x7e: 0x6004, - // Block 0x2, offset 0x80 - // Block 0x3, offset 0xc0 - 0xe1: 0x2000, 0xe2: 0x6005, 0xe3: 0x6005, - 0xe4: 0x2000, 0xe5: 0x6006, 0xe6: 0x6005, 0xe7: 0x2000, 0xe8: 0x2000, - 0xea: 0x2000, 0xec: 0x6007, 0xed: 0x2000, 0xee: 0x2000, 0xef: 0x6008, - 0xf0: 0x2000, 0xf1: 0x2000, 0xf2: 0x2000, 0xf3: 0x2000, 0xf4: 0x2000, - 0xf6: 0x2000, 0xf7: 0x2000, 0xf8: 0x2000, 0xf9: 0x2000, 0xfa: 0x2000, - 0xfc: 0x2000, 0xfd: 0x2000, 0xfe: 0x2000, 0xff: 0x2000, - // Block 0x4, offset 0x100 - 0x106: 0x2000, - 0x110: 0x2000, - 0x117: 0x2000, - 0x118: 0x2000, - 0x11e: 0x2000, 0x11f: 0x2000, 0x120: 0x2000, 0x121: 0x2000, - 0x126: 0x2000, 0x128: 0x2000, 0x129: 0x2000, - 0x12a: 0x2000, 0x12c: 0x2000, 0x12d: 0x2000, - 0x130: 0x2000, 0x132: 0x2000, 0x133: 0x2000, - 0x137: 0x2000, 0x138: 0x2000, 0x139: 0x2000, 0x13a: 0x2000, - 0x13c: 0x2000, 0x13e: 0x2000, - // Block 0x5, offset 0x140 - 0x141: 0x2000, - 0x151: 0x2000, - 0x153: 0x2000, - 0x15b: 0x2000, - 0x166: 0x2000, 0x167: 0x2000, - 0x16b: 0x2000, - 0x171: 0x2000, 0x172: 0x2000, 0x173: 0x2000, - 0x178: 0x2000, - 0x17f: 0x2000, - // Block 0x6, offset 0x180 - 0x180: 0x2000, 0x181: 0x2000, 0x182: 0x2000, 0x184: 0x2000, - 0x188: 0x2000, 0x189: 0x2000, 0x18a: 0x2000, 0x18b: 0x2000, - 0x18d: 0x2000, - 0x192: 0x2000, 0x193: 0x2000, - 0x1a6: 0x2000, 0x1a7: 0x2000, - 0x1ab: 0x2000, - // Block 0x7, offset 0x1c0 - 0x1ce: 0x2000, 0x1d0: 0x2000, - 0x1d2: 0x2000, 0x1d4: 0x2000, 0x1d6: 0x2000, - 0x1d8: 0x2000, 0x1da: 0x2000, 0x1dc: 0x2000, - // Block 0x8, offset 0x200 - 0x211: 0x2000, - 0x221: 0x2000, - // Block 0x9, offset 0x240 - 0x244: 0x2000, - 0x247: 0x2000, 0x249: 0x2000, 0x24a: 0x2000, 0x24b: 0x2000, - 0x24d: 0x2000, 0x250: 0x2000, - 0x258: 0x2000, 0x259: 0x2000, 0x25a: 0x2000, 0x25b: 0x2000, 0x25d: 0x2000, - 0x25f: 0x2000, - // Block 0xa, offset 0x280 - 0x280: 0x2000, 0x281: 0x2000, 0x282: 0x2000, 0x283: 0x2000, 0x284: 0x2000, 0x285: 0x2000, - 0x286: 0x2000, 0x287: 0x2000, 0x288: 0x2000, 0x289: 0x2000, 0x28a: 0x2000, 0x28b: 0x2000, - 0x28c: 0x2000, 0x28d: 0x2000, 0x28e: 0x2000, 0x28f: 0x2000, 0x290: 0x2000, 0x291: 0x2000, - 0x292: 0x2000, 0x293: 0x2000, 0x294: 0x2000, 0x295: 0x2000, 0x296: 0x2000, 0x297: 0x2000, - 0x298: 0x2000, 0x299: 0x2000, 0x29a: 0x2000, 0x29b: 0x2000, 0x29c: 0x2000, 0x29d: 0x2000, - 0x29e: 0x2000, 0x29f: 0x2000, 0x2a0: 0x2000, 0x2a1: 0x2000, 0x2a2: 0x2000, 0x2a3: 0x2000, - 0x2a4: 0x2000, 0x2a5: 0x2000, 0x2a6: 0x2000, 0x2a7: 0x2000, 0x2a8: 0x2000, 0x2a9: 0x2000, - 0x2aa: 0x2000, 0x2ab: 0x2000, 0x2ac: 0x2000, 0x2ad: 0x2000, 0x2ae: 0x2000, 0x2af: 0x2000, - 0x2b0: 0x2000, 0x2b1: 0x2000, 0x2b2: 0x2000, 0x2b3: 0x2000, 0x2b4: 0x2000, 0x2b5: 0x2000, - 0x2b6: 0x2000, 0x2b7: 0x2000, 0x2b8: 0x2000, 0x2b9: 0x2000, 0x2ba: 0x2000, 0x2bb: 0x2000, - 0x2bc: 0x2000, 0x2bd: 0x2000, 0x2be: 0x2000, 0x2bf: 0x2000, - // Block 0xb, offset 0x2c0 - 0x2c0: 0x2000, 0x2c1: 0x2000, 0x2c2: 0x2000, 0x2c3: 0x2000, 0x2c4: 0x2000, 0x2c5: 0x2000, - 0x2c6: 0x2000, 0x2c7: 0x2000, 0x2c8: 0x2000, 0x2c9: 0x2000, 0x2ca: 0x2000, 0x2cb: 0x2000, - 0x2cc: 0x2000, 0x2cd: 0x2000, 0x2ce: 0x2000, 0x2cf: 0x2000, 0x2d0: 0x2000, 0x2d1: 0x2000, - 0x2d2: 0x2000, 0x2d3: 0x2000, 0x2d4: 0x2000, 0x2d5: 0x2000, 0x2d6: 0x2000, 0x2d7: 0x2000, - 0x2d8: 0x2000, 0x2d9: 0x2000, 0x2da: 0x2000, 0x2db: 0x2000, 0x2dc: 0x2000, 0x2dd: 0x2000, - 0x2de: 0x2000, 0x2df: 0x2000, 0x2e0: 0x2000, 0x2e1: 0x2000, 0x2e2: 0x2000, 0x2e3: 0x2000, - 0x2e4: 0x2000, 0x2e5: 0x2000, 0x2e6: 0x2000, 0x2e7: 0x2000, 0x2e8: 0x2000, 0x2e9: 0x2000, - 0x2ea: 0x2000, 0x2eb: 0x2000, 0x2ec: 0x2000, 0x2ed: 0x2000, 0x2ee: 0x2000, 0x2ef: 0x2000, - // Block 0xc, offset 0x300 - 0x311: 0x2000, - 0x312: 0x2000, 0x313: 0x2000, 0x314: 0x2000, 0x315: 0x2000, 0x316: 0x2000, 0x317: 0x2000, - 0x318: 0x2000, 0x319: 0x2000, 0x31a: 0x2000, 0x31b: 0x2000, 0x31c: 0x2000, 0x31d: 0x2000, - 0x31e: 0x2000, 0x31f: 0x2000, 0x320: 0x2000, 0x321: 0x2000, 0x323: 0x2000, - 0x324: 0x2000, 0x325: 0x2000, 0x326: 0x2000, 0x327: 0x2000, 0x328: 0x2000, 0x329: 0x2000, - 0x331: 0x2000, 0x332: 0x2000, 0x333: 0x2000, 0x334: 0x2000, 0x335: 0x2000, - 0x336: 0x2000, 0x337: 0x2000, 0x338: 0x2000, 0x339: 0x2000, 0x33a: 0x2000, 0x33b: 0x2000, - 0x33c: 0x2000, 0x33d: 0x2000, 0x33e: 0x2000, 0x33f: 0x2000, - // Block 0xd, offset 0x340 - 0x340: 0x2000, 0x341: 0x2000, 0x343: 0x2000, 0x344: 0x2000, 0x345: 0x2000, - 0x346: 0x2000, 0x347: 0x2000, 0x348: 0x2000, 0x349: 0x2000, - // Block 0xe, offset 0x380 - 0x381: 0x2000, - 0x390: 0x2000, 0x391: 0x2000, - 0x392: 0x2000, 0x393: 0x2000, 0x394: 0x2000, 0x395: 0x2000, 0x396: 0x2000, 0x397: 0x2000, - 0x398: 0x2000, 0x399: 0x2000, 0x39a: 0x2000, 0x39b: 0x2000, 0x39c: 0x2000, 0x39d: 0x2000, - 0x39e: 0x2000, 0x39f: 0x2000, 0x3a0: 0x2000, 0x3a1: 0x2000, 0x3a2: 0x2000, 0x3a3: 0x2000, - 0x3a4: 0x2000, 0x3a5: 0x2000, 0x3a6: 0x2000, 0x3a7: 0x2000, 0x3a8: 0x2000, 0x3a9: 0x2000, - 0x3aa: 0x2000, 0x3ab: 0x2000, 0x3ac: 0x2000, 0x3ad: 0x2000, 0x3ae: 0x2000, 0x3af: 0x2000, - 0x3b0: 0x2000, 0x3b1: 0x2000, 0x3b2: 0x2000, 0x3b3: 0x2000, 0x3b4: 0x2000, 0x3b5: 0x2000, - 0x3b6: 0x2000, 0x3b7: 0x2000, 0x3b8: 0x2000, 0x3b9: 0x2000, 0x3ba: 0x2000, 0x3bb: 0x2000, - 0x3bc: 0x2000, 0x3bd: 0x2000, 0x3be: 0x2000, 0x3bf: 0x2000, - // Block 0xf, offset 0x3c0 - 0x3c0: 0x2000, 0x3c1: 0x2000, 0x3c2: 0x2000, 0x3c3: 0x2000, 0x3c4: 0x2000, 0x3c5: 0x2000, - 0x3c6: 0x2000, 0x3c7: 0x2000, 0x3c8: 0x2000, 0x3c9: 0x2000, 0x3ca: 0x2000, 0x3cb: 0x2000, - 0x3cc: 0x2000, 0x3cd: 0x2000, 0x3ce: 0x2000, 0x3cf: 0x2000, 0x3d1: 0x2000, - // Block 0x10, offset 0x400 - 0x400: 0x4000, 0x401: 0x4000, 0x402: 0x4000, 0x403: 0x4000, 0x404: 0x4000, 0x405: 0x4000, - 0x406: 0x4000, 0x407: 0x4000, 0x408: 0x4000, 0x409: 0x4000, 0x40a: 0x4000, 0x40b: 0x4000, - 0x40c: 0x4000, 0x40d: 0x4000, 0x40e: 0x4000, 0x40f: 0x4000, 0x410: 0x4000, 0x411: 0x4000, - 0x412: 0x4000, 0x413: 0x4000, 0x414: 0x4000, 0x415: 0x4000, 0x416: 0x4000, 0x417: 0x4000, - 0x418: 0x4000, 0x419: 0x4000, 0x41a: 0x4000, 0x41b: 0x4000, 0x41c: 0x4000, 0x41d: 0x4000, - 0x41e: 0x4000, 0x41f: 0x4000, 0x420: 0x4000, 0x421: 0x4000, 0x422: 0x4000, 0x423: 0x4000, - 0x424: 0x4000, 0x425: 0x4000, 0x426: 0x4000, 0x427: 0x4000, 0x428: 0x4000, 0x429: 0x4000, - 0x42a: 0x4000, 0x42b: 0x4000, 0x42c: 0x4000, 0x42d: 0x4000, 0x42e: 0x4000, 0x42f: 0x4000, - 0x430: 0x4000, 0x431: 0x4000, 0x432: 0x4000, 0x433: 0x4000, 0x434: 0x4000, 0x435: 0x4000, - 0x436: 0x4000, 0x437: 0x4000, 0x438: 0x4000, 0x439: 0x4000, 0x43a: 0x4000, 0x43b: 0x4000, - 0x43c: 0x4000, 0x43d: 0x4000, 0x43e: 0x4000, 0x43f: 0x4000, - // Block 0x11, offset 0x440 - 0x440: 0x4000, 0x441: 0x4000, 0x442: 0x4000, 0x443: 0x4000, 0x444: 0x4000, 0x445: 0x4000, - 0x446: 0x4000, 0x447: 0x4000, 0x448: 0x4000, 0x449: 0x4000, 0x44a: 0x4000, 0x44b: 0x4000, - 0x44c: 0x4000, 0x44d: 0x4000, 0x44e: 0x4000, 0x44f: 0x4000, 0x450: 0x4000, 0x451: 0x4000, - 0x452: 0x4000, 0x453: 0x4000, 0x454: 0x4000, 0x455: 0x4000, 0x456: 0x4000, 0x457: 0x4000, - 0x458: 0x4000, 0x459: 0x4000, 0x45a: 0x4000, 0x45b: 0x4000, 0x45c: 0x4000, 0x45d: 0x4000, - 0x45e: 0x4000, 0x45f: 0x4000, - // Block 0x12, offset 0x480 - 0x490: 0x2000, - 0x493: 0x2000, 0x494: 0x2000, 0x495: 0x2000, 0x496: 0x2000, - 0x498: 0x2000, 0x499: 0x2000, 0x49c: 0x2000, 0x49d: 0x2000, - 0x4a0: 0x2000, 0x4a1: 0x2000, 0x4a2: 0x2000, - 0x4a4: 0x2000, 0x4a5: 0x2000, 0x4a6: 0x2000, 0x4a7: 0x2000, - 0x4b0: 0x2000, 0x4b2: 0x2000, 0x4b3: 0x2000, 0x4b5: 0x2000, - 0x4bb: 0x2000, - 0x4be: 0x2000, - // Block 0x13, offset 0x4c0 - 0x4f4: 0x2000, - 0x4ff: 0x2000, - // Block 0x14, offset 0x500 - 0x501: 0x2000, 0x502: 0x2000, 0x503: 0x2000, 0x504: 0x2000, - 0x529: 0xa009, - 0x52c: 0x2000, - // Block 0x15, offset 0x540 - 0x543: 0x2000, 0x545: 0x2000, - 0x549: 0x2000, - 0x553: 0x2000, 0x556: 0x2000, - 0x561: 0x2000, 0x562: 0x2000, - 0x566: 0x2000, - 0x56b: 0x2000, - // Block 0x16, offset 0x580 - 0x593: 0x2000, 0x594: 0x2000, - 0x59b: 0x2000, 0x59c: 0x2000, 0x59d: 0x2000, - 0x59e: 0x2000, 0x5a0: 0x2000, 0x5a1: 0x2000, 0x5a2: 0x2000, 0x5a3: 0x2000, - 0x5a4: 0x2000, 0x5a5: 0x2000, 0x5a6: 0x2000, 0x5a7: 0x2000, 0x5a8: 0x2000, 0x5a9: 0x2000, - 0x5aa: 0x2000, 0x5ab: 0x2000, - 0x5b0: 0x2000, 0x5b1: 0x2000, 0x5b2: 0x2000, 0x5b3: 0x2000, 0x5b4: 0x2000, 0x5b5: 0x2000, - 0x5b6: 0x2000, 0x5b7: 0x2000, 0x5b8: 0x2000, 0x5b9: 0x2000, - // Block 0x17, offset 0x5c0 - 0x5c9: 0x2000, - 0x5d0: 0x200a, 0x5d1: 0x200b, - 0x5d2: 0x200a, 0x5d3: 0x200c, 0x5d4: 0x2000, 0x5d5: 0x2000, 0x5d6: 0x2000, 0x5d7: 0x2000, - 0x5d8: 0x2000, 0x5d9: 0x2000, - 0x5f8: 0x2000, 0x5f9: 0x2000, - // Block 0x18, offset 0x600 - 0x612: 0x2000, 0x614: 0x2000, - 0x627: 0x2000, - // Block 0x19, offset 0x640 - 0x640: 0x2000, 0x642: 0x2000, 0x643: 0x2000, - 0x647: 0x2000, 0x648: 0x2000, 0x64b: 0x2000, - 0x64f: 0x2000, 0x651: 0x2000, - 0x655: 0x2000, - 0x65a: 0x2000, 0x65d: 0x2000, - 0x65e: 0x2000, 0x65f: 0x2000, 0x660: 0x2000, 0x663: 0x2000, - 0x665: 0x2000, 0x667: 0x2000, 0x668: 0x2000, 0x669: 0x2000, - 0x66a: 0x2000, 0x66b: 0x2000, 0x66c: 0x2000, 0x66e: 0x2000, - 0x674: 0x2000, 0x675: 0x2000, - 0x676: 0x2000, 0x677: 0x2000, - 0x67c: 0x2000, 0x67d: 0x2000, - // Block 0x1a, offset 0x680 - 0x688: 0x2000, - 0x68c: 0x2000, - 0x692: 0x2000, - 0x6a0: 0x2000, 0x6a1: 0x2000, - 0x6a4: 0x2000, 0x6a5: 0x2000, 0x6a6: 0x2000, 0x6a7: 0x2000, - 0x6aa: 0x2000, 0x6ab: 0x2000, 0x6ae: 0x2000, 0x6af: 0x2000, - // Block 0x1b, offset 0x6c0 - 0x6c2: 0x2000, 0x6c3: 0x2000, - 0x6c6: 0x2000, 0x6c7: 0x2000, - 0x6d5: 0x2000, - 0x6d9: 0x2000, - 0x6e5: 0x2000, - 0x6ff: 0x2000, - // Block 0x1c, offset 0x700 - 0x712: 0x2000, - 0x71a: 0x4000, 0x71b: 0x4000, - 0x729: 0x4000, - 0x72a: 0x4000, - // Block 0x1d, offset 0x740 - 0x769: 0x4000, - 0x76a: 0x4000, 0x76b: 0x4000, 0x76c: 0x4000, - 0x770: 0x4000, 0x773: 0x4000, - // Block 0x1e, offset 0x780 - 0x7a0: 0x2000, 0x7a1: 0x2000, 0x7a2: 0x2000, 0x7a3: 0x2000, - 0x7a4: 0x2000, 0x7a5: 0x2000, 0x7a6: 0x2000, 0x7a7: 0x2000, 0x7a8: 0x2000, 0x7a9: 0x2000, - 0x7aa: 0x2000, 0x7ab: 0x2000, 0x7ac: 0x2000, 0x7ad: 0x2000, 0x7ae: 0x2000, 0x7af: 0x2000, - 0x7b0: 0x2000, 0x7b1: 0x2000, 0x7b2: 0x2000, 0x7b3: 0x2000, 0x7b4: 0x2000, 0x7b5: 0x2000, - 0x7b6: 0x2000, 0x7b7: 0x2000, 0x7b8: 0x2000, 0x7b9: 0x2000, 0x7ba: 0x2000, 0x7bb: 0x2000, - 0x7bc: 0x2000, 0x7bd: 0x2000, 0x7be: 0x2000, 0x7bf: 0x2000, - // Block 0x1f, offset 0x7c0 - 0x7c0: 0x2000, 0x7c1: 0x2000, 0x7c2: 0x2000, 0x7c3: 0x2000, 0x7c4: 0x2000, 0x7c5: 0x2000, - 0x7c6: 0x2000, 0x7c7: 0x2000, 0x7c8: 0x2000, 0x7c9: 0x2000, 0x7ca: 0x2000, 0x7cb: 0x2000, - 0x7cc: 0x2000, 0x7cd: 0x2000, 0x7ce: 0x2000, 0x7cf: 0x2000, 0x7d0: 0x2000, 0x7d1: 0x2000, - 0x7d2: 0x2000, 0x7d3: 0x2000, 0x7d4: 0x2000, 0x7d5: 0x2000, 0x7d6: 0x2000, 0x7d7: 0x2000, - 0x7d8: 0x2000, 0x7d9: 0x2000, 0x7da: 0x2000, 0x7db: 0x2000, 0x7dc: 0x2000, 0x7dd: 0x2000, - 0x7de: 0x2000, 0x7df: 0x2000, 0x7e0: 0x2000, 0x7e1: 0x2000, 0x7e2: 0x2000, 0x7e3: 0x2000, - 0x7e4: 0x2000, 0x7e5: 0x2000, 0x7e6: 0x2000, 0x7e7: 0x2000, 0x7e8: 0x2000, 0x7e9: 0x2000, - 0x7eb: 0x2000, 0x7ec: 0x2000, 0x7ed: 0x2000, 0x7ee: 0x2000, 0x7ef: 0x2000, - 0x7f0: 0x2000, 0x7f1: 0x2000, 0x7f2: 0x2000, 0x7f3: 0x2000, 0x7f4: 0x2000, 0x7f5: 0x2000, - 0x7f6: 0x2000, 0x7f7: 0x2000, 0x7f8: 0x2000, 0x7f9: 0x2000, 0x7fa: 0x2000, 0x7fb: 0x2000, - 0x7fc: 0x2000, 0x7fd: 0x2000, 0x7fe: 0x2000, 0x7ff: 0x2000, - // Block 0x20, offset 0x800 - 0x800: 0x2000, 0x801: 0x2000, 0x802: 0x200d, 0x803: 0x2000, 0x804: 0x2000, 0x805: 0x2000, - 0x806: 0x2000, 0x807: 0x2000, 0x808: 0x2000, 0x809: 0x2000, 0x80a: 0x2000, 0x80b: 0x2000, - 0x80c: 0x2000, 0x80d: 0x2000, 0x80e: 0x2000, 0x80f: 0x2000, 0x810: 0x2000, 0x811: 0x2000, - 0x812: 0x2000, 0x813: 0x2000, 0x814: 0x2000, 0x815: 0x2000, 0x816: 0x2000, 0x817: 0x2000, - 0x818: 0x2000, 0x819: 0x2000, 0x81a: 0x2000, 0x81b: 0x2000, 0x81c: 0x2000, 0x81d: 0x2000, - 0x81e: 0x2000, 0x81f: 0x2000, 0x820: 0x2000, 0x821: 0x2000, 0x822: 0x2000, 0x823: 0x2000, - 0x824: 0x2000, 0x825: 0x2000, 0x826: 0x2000, 0x827: 0x2000, 0x828: 0x2000, 0x829: 0x2000, - 0x82a: 0x2000, 0x82b: 0x2000, 0x82c: 0x2000, 0x82d: 0x2000, 0x82e: 0x2000, 0x82f: 0x2000, - 0x830: 0x2000, 0x831: 0x2000, 0x832: 0x2000, 0x833: 0x2000, 0x834: 0x2000, 0x835: 0x2000, - 0x836: 0x2000, 0x837: 0x2000, 0x838: 0x2000, 0x839: 0x2000, 0x83a: 0x2000, 0x83b: 0x2000, - 0x83c: 0x2000, 0x83d: 0x2000, 0x83e: 0x2000, 0x83f: 0x2000, - // Block 0x21, offset 0x840 - 0x840: 0x2000, 0x841: 0x2000, 0x842: 0x2000, 0x843: 0x2000, 0x844: 0x2000, 0x845: 0x2000, - 0x846: 0x2000, 0x847: 0x2000, 0x848: 0x2000, 0x849: 0x2000, 0x84a: 0x2000, 0x84b: 0x2000, - 0x850: 0x2000, 0x851: 0x2000, - 0x852: 0x2000, 0x853: 0x2000, 0x854: 0x2000, 0x855: 0x2000, 0x856: 0x2000, 0x857: 0x2000, - 0x858: 0x2000, 0x859: 0x2000, 0x85a: 0x2000, 0x85b: 0x2000, 0x85c: 0x2000, 0x85d: 0x2000, - 0x85e: 0x2000, 0x85f: 0x2000, 0x860: 0x2000, 0x861: 0x2000, 0x862: 0x2000, 0x863: 0x2000, - 0x864: 0x2000, 0x865: 0x2000, 0x866: 0x2000, 0x867: 0x2000, 0x868: 0x2000, 0x869: 0x2000, - 0x86a: 0x2000, 0x86b: 0x2000, 0x86c: 0x2000, 0x86d: 0x2000, 0x86e: 0x2000, 0x86f: 0x2000, - 0x870: 0x2000, 0x871: 0x2000, 0x872: 0x2000, 0x873: 0x2000, - // Block 0x22, offset 0x880 - 0x880: 0x2000, 0x881: 0x2000, 0x882: 0x2000, 0x883: 0x2000, 0x884: 0x2000, 0x885: 0x2000, - 0x886: 0x2000, 0x887: 0x2000, 0x888: 0x2000, 0x889: 0x2000, 0x88a: 0x2000, 0x88b: 0x2000, - 0x88c: 0x2000, 0x88d: 0x2000, 0x88e: 0x2000, 0x88f: 0x2000, - 0x892: 0x2000, 0x893: 0x2000, 0x894: 0x2000, 0x895: 0x2000, - 0x8a0: 0x200e, 0x8a1: 0x2000, 0x8a3: 0x2000, - 0x8a4: 0x2000, 0x8a5: 0x2000, 0x8a6: 0x2000, 0x8a7: 0x2000, 0x8a8: 0x2000, 0x8a9: 0x2000, - 0x8b2: 0x2000, 0x8b3: 0x2000, - 0x8b6: 0x2000, 0x8b7: 0x2000, - 0x8bc: 0x2000, 0x8bd: 0x2000, - // Block 0x23, offset 0x8c0 - 0x8c0: 0x2000, 0x8c1: 0x2000, - 0x8c6: 0x2000, 0x8c7: 0x2000, 0x8c8: 0x2000, 0x8cb: 0x200f, - 0x8ce: 0x2000, 0x8cf: 0x2000, 0x8d0: 0x2000, 0x8d1: 0x2000, - 0x8e2: 0x2000, 0x8e3: 0x2000, - 0x8e4: 0x2000, 0x8e5: 0x2000, - 0x8ef: 0x2000, - 0x8fd: 0x4000, 0x8fe: 0x4000, - // Block 0x24, offset 0x900 - 0x905: 0x2000, - 0x906: 0x2000, 0x909: 0x2000, - 0x90e: 0x2000, 0x90f: 0x2000, - 0x914: 0x4000, 0x915: 0x4000, - 0x91c: 0x2000, - 0x91e: 0x2000, - // Block 0x25, offset 0x940 - 0x940: 0x2000, 0x942: 0x2000, - 0x948: 0x4000, 0x949: 0x4000, 0x94a: 0x4000, 0x94b: 0x4000, - 0x94c: 0x4000, 0x94d: 0x4000, 0x94e: 0x4000, 0x94f: 0x4000, 0x950: 0x4000, 0x951: 0x4000, - 0x952: 0x4000, 0x953: 0x4000, - 0x960: 0x2000, 0x961: 0x2000, 0x963: 0x2000, - 0x964: 0x2000, 0x965: 0x2000, 0x967: 0x2000, 0x968: 0x2000, 0x969: 0x2000, - 0x96a: 0x2000, 0x96c: 0x2000, 0x96d: 0x2000, 0x96f: 0x2000, - 0x97f: 0x4000, - // Block 0x26, offset 0x980 - 0x993: 0x4000, - 0x99e: 0x2000, 0x99f: 0x2000, 0x9a1: 0x4000, - 0x9aa: 0x4000, 0x9ab: 0x4000, - 0x9bd: 0x4000, 0x9be: 0x4000, 0x9bf: 0x2000, - // Block 0x27, offset 0x9c0 - 0x9c4: 0x4000, 0x9c5: 0x4000, - 0x9c6: 0x2000, 0x9c7: 0x2000, 0x9c8: 0x2000, 0x9c9: 0x2000, 0x9ca: 0x2000, 0x9cb: 0x2000, - 0x9cc: 0x2000, 0x9cd: 0x2000, 0x9ce: 0x4000, 0x9cf: 0x2000, 0x9d0: 0x2000, 0x9d1: 0x2000, - 0x9d2: 0x2000, 0x9d3: 0x2000, 0x9d4: 0x4000, 0x9d5: 0x2000, 0x9d6: 0x2000, 0x9d7: 0x2000, - 0x9d8: 0x2000, 0x9d9: 0x2000, 0x9da: 0x2000, 0x9db: 0x2000, 0x9dc: 0x2000, 0x9dd: 0x2000, - 0x9de: 0x2000, 0x9df: 0x2000, 0x9e0: 0x2000, 0x9e1: 0x2000, 0x9e3: 0x2000, - 0x9e8: 0x2000, 0x9e9: 0x2000, - 0x9ea: 0x4000, 0x9eb: 0x2000, 0x9ec: 0x2000, 0x9ed: 0x2000, 0x9ee: 0x2000, 0x9ef: 0x2000, - 0x9f0: 0x2000, 0x9f1: 0x2000, 0x9f2: 0x4000, 0x9f3: 0x4000, 0x9f4: 0x2000, 0x9f5: 0x4000, - 0x9f6: 0x2000, 0x9f7: 0x2000, 0x9f8: 0x2000, 0x9f9: 0x2000, 0x9fa: 0x4000, 0x9fb: 0x2000, - 0x9fc: 0x2000, 0x9fd: 0x4000, 0x9fe: 0x2000, 0x9ff: 0x2000, - // Block 0x28, offset 0xa00 - 0xa05: 0x4000, - 0xa0a: 0x4000, 0xa0b: 0x4000, - 0xa28: 0x4000, - 0xa3d: 0x2000, - // Block 0x29, offset 0xa40 - 0xa4c: 0x4000, 0xa4e: 0x4000, - 0xa53: 0x4000, 0xa54: 0x4000, 0xa55: 0x4000, 0xa57: 0x4000, - 0xa76: 0x2000, 0xa77: 0x2000, 0xa78: 0x2000, 0xa79: 0x2000, 0xa7a: 0x2000, 0xa7b: 0x2000, - 0xa7c: 0x2000, 0xa7d: 0x2000, 0xa7e: 0x2000, 0xa7f: 0x2000, - // Block 0x2a, offset 0xa80 - 0xa95: 0x4000, 0xa96: 0x4000, 0xa97: 0x4000, - 0xab0: 0x4000, - 0xabf: 0x4000, - // Block 0x2b, offset 0xac0 - 0xae6: 0x6000, 0xae7: 0x6000, 0xae8: 0x6000, 0xae9: 0x6000, - 0xaea: 0x6000, 0xaeb: 0x6000, 0xaec: 0x6000, 0xaed: 0x6000, - // Block 0x2c, offset 0xb00 - 0xb05: 0x6010, - 0xb06: 0x6011, - // Block 0x2d, offset 0xb40 - 0xb5b: 0x4000, 0xb5c: 0x4000, - // Block 0x2e, offset 0xb80 - 0xb90: 0x4000, - 0xb95: 0x4000, 0xb96: 0x2000, 0xb97: 0x2000, - 0xb98: 0x2000, 0xb99: 0x2000, - // Block 0x2f, offset 0xbc0 - 0xbc0: 0x4000, 0xbc1: 0x4000, 0xbc2: 0x4000, 0xbc3: 0x4000, 0xbc4: 0x4000, 0xbc5: 0x4000, - 0xbc6: 0x4000, 0xbc7: 0x4000, 0xbc8: 0x4000, 0xbc9: 0x4000, 0xbca: 0x4000, 0xbcb: 0x4000, - 0xbcc: 0x4000, 0xbcd: 0x4000, 0xbce: 0x4000, 0xbcf: 0x4000, 0xbd0: 0x4000, 0xbd1: 0x4000, - 0xbd2: 0x4000, 0xbd3: 0x4000, 0xbd4: 0x4000, 0xbd5: 0x4000, 0xbd6: 0x4000, 0xbd7: 0x4000, - 0xbd8: 0x4000, 0xbd9: 0x4000, 0xbdb: 0x4000, 0xbdc: 0x4000, 0xbdd: 0x4000, - 0xbde: 0x4000, 0xbdf: 0x4000, 0xbe0: 0x4000, 0xbe1: 0x4000, 0xbe2: 0x4000, 0xbe3: 0x4000, - 0xbe4: 0x4000, 0xbe5: 0x4000, 0xbe6: 0x4000, 0xbe7: 0x4000, 0xbe8: 0x4000, 0xbe9: 0x4000, - 0xbea: 0x4000, 0xbeb: 0x4000, 0xbec: 0x4000, 0xbed: 0x4000, 0xbee: 0x4000, 0xbef: 0x4000, - 0xbf0: 0x4000, 0xbf1: 0x4000, 0xbf2: 0x4000, 0xbf3: 0x4000, 0xbf4: 0x4000, 0xbf5: 0x4000, - 0xbf6: 0x4000, 0xbf7: 0x4000, 0xbf8: 0x4000, 0xbf9: 0x4000, 0xbfa: 0x4000, 0xbfb: 0x4000, - 0xbfc: 0x4000, 0xbfd: 0x4000, 0xbfe: 0x4000, 0xbff: 0x4000, - // Block 0x30, offset 0xc00 - 0xc00: 0x4000, 0xc01: 0x4000, 0xc02: 0x4000, 0xc03: 0x4000, 0xc04: 0x4000, 0xc05: 0x4000, - 0xc06: 0x4000, 0xc07: 0x4000, 0xc08: 0x4000, 0xc09: 0x4000, 0xc0a: 0x4000, 0xc0b: 0x4000, - 0xc0c: 0x4000, 0xc0d: 0x4000, 0xc0e: 0x4000, 0xc0f: 0x4000, 0xc10: 0x4000, 0xc11: 0x4000, - 0xc12: 0x4000, 0xc13: 0x4000, 0xc14: 0x4000, 0xc15: 0x4000, 0xc16: 0x4000, 0xc17: 0x4000, - 0xc18: 0x4000, 0xc19: 0x4000, 0xc1a: 0x4000, 0xc1b: 0x4000, 0xc1c: 0x4000, 0xc1d: 0x4000, - 0xc1e: 0x4000, 0xc1f: 0x4000, 0xc20: 0x4000, 0xc21: 0x4000, 0xc22: 0x4000, 0xc23: 0x4000, - 0xc24: 0x4000, 0xc25: 0x4000, 0xc26: 0x4000, 0xc27: 0x4000, 0xc28: 0x4000, 0xc29: 0x4000, - 0xc2a: 0x4000, 0xc2b: 0x4000, 0xc2c: 0x4000, 0xc2d: 0x4000, 0xc2e: 0x4000, 0xc2f: 0x4000, - 0xc30: 0x4000, 0xc31: 0x4000, 0xc32: 0x4000, 0xc33: 0x4000, - // Block 0x31, offset 0xc40 - 0xc40: 0x4000, 0xc41: 0x4000, 0xc42: 0x4000, 0xc43: 0x4000, 0xc44: 0x4000, 0xc45: 0x4000, - 0xc46: 0x4000, 0xc47: 0x4000, 0xc48: 0x4000, 0xc49: 0x4000, 0xc4a: 0x4000, 0xc4b: 0x4000, - 0xc4c: 0x4000, 0xc4d: 0x4000, 0xc4e: 0x4000, 0xc4f: 0x4000, 0xc50: 0x4000, 0xc51: 0x4000, - 0xc52: 0x4000, 0xc53: 0x4000, 0xc54: 0x4000, 0xc55: 0x4000, - 0xc70: 0x4000, 0xc71: 0x4000, 0xc72: 0x4000, 0xc73: 0x4000, 0xc74: 0x4000, 0xc75: 0x4000, - 0xc76: 0x4000, 0xc77: 0x4000, 0xc78: 0x4000, 0xc79: 0x4000, 0xc7a: 0x4000, 0xc7b: 0x4000, - // Block 0x32, offset 0xc80 - 0xc80: 0x9012, 0xc81: 0x4013, 0xc82: 0x4014, 0xc83: 0x4000, 0xc84: 0x4000, 0xc85: 0x4000, - 0xc86: 0x4000, 0xc87: 0x4000, 0xc88: 0x4000, 0xc89: 0x4000, 0xc8a: 0x4000, 0xc8b: 0x4000, - 0xc8c: 0x4015, 0xc8d: 0x4015, 0xc8e: 0x4000, 0xc8f: 0x4000, 0xc90: 0x4000, 0xc91: 0x4000, - 0xc92: 0x4000, 0xc93: 0x4000, 0xc94: 0x4000, 0xc95: 0x4000, 0xc96: 0x4000, 0xc97: 0x4000, - 0xc98: 0x4000, 0xc99: 0x4000, 0xc9a: 0x4000, 0xc9b: 0x4000, 0xc9c: 0x4000, 0xc9d: 0x4000, - 0xc9e: 0x4000, 0xc9f: 0x4000, 0xca0: 0x4000, 0xca1: 0x4000, 0xca2: 0x4000, 0xca3: 0x4000, - 0xca4: 0x4000, 0xca5: 0x4000, 0xca6: 0x4000, 0xca7: 0x4000, 0xca8: 0x4000, 0xca9: 0x4000, - 0xcaa: 0x4000, 0xcab: 0x4000, 0xcac: 0x4000, 0xcad: 0x4000, 0xcae: 0x4000, 0xcaf: 0x4000, - 0xcb0: 0x4000, 0xcb1: 0x4000, 0xcb2: 0x4000, 0xcb3: 0x4000, 0xcb4: 0x4000, 0xcb5: 0x4000, - 0xcb6: 0x4000, 0xcb7: 0x4000, 0xcb8: 0x4000, 0xcb9: 0x4000, 0xcba: 0x4000, 0xcbb: 0x4000, - 0xcbc: 0x4000, 0xcbd: 0x4000, 0xcbe: 0x4000, - // Block 0x33, offset 0xcc0 - 0xcc1: 0x4000, 0xcc2: 0x4000, 0xcc3: 0x4000, 0xcc4: 0x4000, 0xcc5: 0x4000, - 0xcc6: 0x4000, 0xcc7: 0x4000, 0xcc8: 0x4000, 0xcc9: 0x4000, 0xcca: 0x4000, 0xccb: 0x4000, - 0xccc: 0x4000, 0xccd: 0x4000, 0xcce: 0x4000, 0xccf: 0x4000, 0xcd0: 0x4000, 0xcd1: 0x4000, - 0xcd2: 0x4000, 0xcd3: 0x4000, 0xcd4: 0x4000, 0xcd5: 0x4000, 0xcd6: 0x4000, 0xcd7: 0x4000, - 0xcd8: 0x4000, 0xcd9: 0x4000, 0xcda: 0x4000, 0xcdb: 0x4000, 0xcdc: 0x4000, 0xcdd: 0x4000, - 0xcde: 0x4000, 0xcdf: 0x4000, 0xce0: 0x4000, 0xce1: 0x4000, 0xce2: 0x4000, 0xce3: 0x4000, - 0xce4: 0x4000, 0xce5: 0x4000, 0xce6: 0x4000, 0xce7: 0x4000, 0xce8: 0x4000, 0xce9: 0x4000, - 0xcea: 0x4000, 0xceb: 0x4000, 0xcec: 0x4000, 0xced: 0x4000, 0xcee: 0x4000, 0xcef: 0x4000, - 0xcf0: 0x4000, 0xcf1: 0x4000, 0xcf2: 0x4000, 0xcf3: 0x4000, 0xcf4: 0x4000, 0xcf5: 0x4000, - 0xcf6: 0x4000, 0xcf7: 0x4000, 0xcf8: 0x4000, 0xcf9: 0x4000, 0xcfa: 0x4000, 0xcfb: 0x4000, - 0xcfc: 0x4000, 0xcfd: 0x4000, 0xcfe: 0x4000, 0xcff: 0x4000, - // Block 0x34, offset 0xd00 - 0xd00: 0x4000, 0xd01: 0x4000, 0xd02: 0x4000, 0xd03: 0x4000, 0xd04: 0x4000, 0xd05: 0x4000, - 0xd06: 0x4000, 0xd07: 0x4000, 0xd08: 0x4000, 0xd09: 0x4000, 0xd0a: 0x4000, 0xd0b: 0x4000, - 0xd0c: 0x4000, 0xd0d: 0x4000, 0xd0e: 0x4000, 0xd0f: 0x4000, 0xd10: 0x4000, 0xd11: 0x4000, - 0xd12: 0x4000, 0xd13: 0x4000, 0xd14: 0x4000, 0xd15: 0x4000, 0xd16: 0x4000, - 0xd19: 0x4016, 0xd1a: 0x4017, 0xd1b: 0x4000, 0xd1c: 0x4000, 0xd1d: 0x4000, - 0xd1e: 0x4000, 0xd1f: 0x4000, 0xd20: 0x4000, 0xd21: 0x4018, 0xd22: 0x4019, 0xd23: 0x401a, - 0xd24: 0x401b, 0xd25: 0x401c, 0xd26: 0x401d, 0xd27: 0x401e, 0xd28: 0x401f, 0xd29: 0x4020, - 0xd2a: 0x4021, 0xd2b: 0x4022, 0xd2c: 0x4000, 0xd2d: 0x4010, 0xd2e: 0x4000, 0xd2f: 0x4023, - 0xd30: 0x4000, 0xd31: 0x4024, 0xd32: 0x4000, 0xd33: 0x4025, 0xd34: 0x4000, 0xd35: 0x4026, - 0xd36: 0x4000, 0xd37: 0x401a, 0xd38: 0x4000, 0xd39: 0x4027, 0xd3a: 0x4000, 0xd3b: 0x4028, - 0xd3c: 0x4000, 0xd3d: 0x4020, 0xd3e: 0x4000, 0xd3f: 0x4029, - // Block 0x35, offset 0xd40 - 0xd40: 0x4000, 0xd41: 0x402a, 0xd42: 0x4000, 0xd43: 0x402b, 0xd44: 0x402c, 0xd45: 0x4000, - 0xd46: 0x4017, 0xd47: 0x4000, 0xd48: 0x402d, 0xd49: 0x4000, 0xd4a: 0x402e, 0xd4b: 0x402f, - 0xd4c: 0x4030, 0xd4d: 0x4017, 0xd4e: 0x4016, 0xd4f: 0x4017, 0xd50: 0x4000, 0xd51: 0x4000, - 0xd52: 0x4031, 0xd53: 0x4000, 0xd54: 0x4000, 0xd55: 0x4031, 0xd56: 0x4000, 0xd57: 0x4000, - 0xd58: 0x4032, 0xd59: 0x4000, 0xd5a: 0x4000, 0xd5b: 0x4032, 0xd5c: 0x4000, 0xd5d: 0x4000, - 0xd5e: 0x4033, 0xd5f: 0x402e, 0xd60: 0x4034, 0xd61: 0x4035, 0xd62: 0x4034, 0xd63: 0x4036, - 0xd64: 0x4037, 0xd65: 0x4024, 0xd66: 0x4035, 0xd67: 0x4025, 0xd68: 0x4038, 0xd69: 0x4038, - 0xd6a: 0x4039, 0xd6b: 0x4039, 0xd6c: 0x403a, 0xd6d: 0x403a, 0xd6e: 0x4000, 0xd6f: 0x4035, - 0xd70: 0x4000, 0xd71: 0x4000, 0xd72: 0x403b, 0xd73: 0x403c, 0xd74: 0x4000, 0xd75: 0x4000, - 0xd76: 0x4000, 0xd77: 0x4000, 0xd78: 0x4000, 0xd79: 0x4000, 0xd7a: 0x4000, 0xd7b: 0x403d, - 0xd7c: 0x401c, 0xd7d: 0x4000, 0xd7e: 0x4000, 0xd7f: 0x4000, - // Block 0x36, offset 0xd80 - 0xd85: 0x4000, - 0xd86: 0x4000, 0xd87: 0x4000, 0xd88: 0x4000, 0xd89: 0x4000, 0xd8a: 0x4000, 0xd8b: 0x4000, - 0xd8c: 0x4000, 0xd8d: 0x4000, 0xd8e: 0x4000, 0xd8f: 0x4000, 0xd90: 0x4000, 0xd91: 0x4000, - 0xd92: 0x4000, 0xd93: 0x4000, 0xd94: 0x4000, 0xd95: 0x4000, 0xd96: 0x4000, 0xd97: 0x4000, - 0xd98: 0x4000, 0xd99: 0x4000, 0xd9a: 0x4000, 0xd9b: 0x4000, 0xd9c: 0x4000, 0xd9d: 0x4000, - 0xd9e: 0x4000, 0xd9f: 0x4000, 0xda0: 0x4000, 0xda1: 0x4000, 0xda2: 0x4000, 0xda3: 0x4000, - 0xda4: 0x4000, 0xda5: 0x4000, 0xda6: 0x4000, 0xda7: 0x4000, 0xda8: 0x4000, 0xda9: 0x4000, - 0xdaa: 0x4000, 0xdab: 0x4000, 0xdac: 0x4000, 0xdad: 0x4000, 0xdae: 0x4000, 0xdaf: 0x4000, - 0xdb1: 0x403e, 0xdb2: 0x403e, 0xdb3: 0x403e, 0xdb4: 0x403e, 0xdb5: 0x403e, - 0xdb6: 0x403e, 0xdb7: 0x403e, 0xdb8: 0x403e, 0xdb9: 0x403e, 0xdba: 0x403e, 0xdbb: 0x403e, - 0xdbc: 0x403e, 0xdbd: 0x403e, 0xdbe: 0x403e, 0xdbf: 0x403e, - // Block 0x37, offset 0xdc0 - 0xdc0: 0x4037, 0xdc1: 0x4037, 0xdc2: 0x4037, 0xdc3: 0x4037, 0xdc4: 0x4037, 0xdc5: 0x4037, - 0xdc6: 0x4037, 0xdc7: 0x4037, 0xdc8: 0x4037, 0xdc9: 0x4037, 0xdca: 0x4037, 0xdcb: 0x4037, - 0xdcc: 0x4037, 0xdcd: 0x4037, 0xdce: 0x4037, 0xdcf: 0x400e, 0xdd0: 0x403f, 0xdd1: 0x4040, - 0xdd2: 0x4041, 0xdd3: 0x4040, 0xdd4: 0x403f, 0xdd5: 0x4042, 0xdd6: 0x4043, 0xdd7: 0x4044, - 0xdd8: 0x4040, 0xdd9: 0x4041, 0xdda: 0x4040, 0xddb: 0x4045, 0xddc: 0x4009, 0xddd: 0x4045, - 0xdde: 0x4046, 0xddf: 0x4045, 0xde0: 0x4047, 0xde1: 0x400b, 0xde2: 0x400a, 0xde3: 0x400c, - 0xde4: 0x4048, 0xde5: 0x4000, 0xde6: 0x4000, 0xde7: 0x4000, 0xde8: 0x4000, 0xde9: 0x4000, - 0xdea: 0x4000, 0xdeb: 0x4000, 0xdec: 0x4000, 0xded: 0x4000, 0xdee: 0x4000, 0xdef: 0x4000, - 0xdf0: 0x4000, 0xdf1: 0x4000, 0xdf2: 0x4000, 0xdf3: 0x4000, 0xdf4: 0x4000, 0xdf5: 0x4000, - 0xdf6: 0x4000, 0xdf7: 0x4000, 0xdf8: 0x4000, 0xdf9: 0x4000, 0xdfa: 0x4000, 0xdfb: 0x4000, - 0xdfc: 0x4000, 0xdfd: 0x4000, 0xdfe: 0x4000, 0xdff: 0x4000, - // Block 0x38, offset 0xe00 - 0xe00: 0x4000, 0xe01: 0x4000, 0xe02: 0x4000, 0xe03: 0x4000, 0xe04: 0x4000, 0xe05: 0x4000, - 0xe06: 0x4000, 0xe07: 0x4000, 0xe08: 0x4000, 0xe09: 0x4000, 0xe0a: 0x4000, 0xe0b: 0x4000, - 0xe0c: 0x4000, 0xe0d: 0x4000, 0xe0e: 0x4000, 0xe10: 0x4000, 0xe11: 0x4000, - 0xe12: 0x4000, 0xe13: 0x4000, 0xe14: 0x4000, 0xe15: 0x4000, 0xe16: 0x4000, 0xe17: 0x4000, - 0xe18: 0x4000, 0xe19: 0x4000, 0xe1a: 0x4000, 0xe1b: 0x4000, 0xe1c: 0x4000, 0xe1d: 0x4000, - 0xe1e: 0x4000, 0xe1f: 0x4000, 0xe20: 0x4000, 0xe21: 0x4000, 0xe22: 0x4000, 0xe23: 0x4000, - 0xe24: 0x4000, 0xe25: 0x4000, 0xe26: 0x4000, 0xe27: 0x4000, 0xe28: 0x4000, 0xe29: 0x4000, - 0xe2a: 0x4000, 0xe2b: 0x4000, 0xe2c: 0x4000, 0xe2d: 0x4000, 0xe2e: 0x4000, 0xe2f: 0x4000, - 0xe30: 0x4000, 0xe31: 0x4000, 0xe32: 0x4000, 0xe33: 0x4000, 0xe34: 0x4000, 0xe35: 0x4000, - 0xe36: 0x4000, 0xe37: 0x4000, 0xe38: 0x4000, 0xe39: 0x4000, 0xe3a: 0x4000, - // Block 0x39, offset 0xe40 - 0xe40: 0x4000, 0xe41: 0x4000, 0xe42: 0x4000, 0xe43: 0x4000, 0xe44: 0x4000, 0xe45: 0x4000, - 0xe46: 0x4000, 0xe47: 0x4000, 0xe48: 0x4000, 0xe49: 0x4000, 0xe4a: 0x4000, 0xe4b: 0x4000, - 0xe4c: 0x4000, 0xe4d: 0x4000, 0xe4e: 0x4000, 0xe4f: 0x4000, 0xe50: 0x4000, 0xe51: 0x4000, - 0xe52: 0x4000, 0xe53: 0x4000, 0xe54: 0x4000, 0xe55: 0x4000, 0xe56: 0x4000, 0xe57: 0x4000, - 0xe58: 0x4000, 0xe59: 0x4000, 0xe5a: 0x4000, 0xe5b: 0x4000, 0xe5c: 0x4000, 0xe5d: 0x4000, - 0xe5e: 0x4000, 0xe5f: 0x4000, 0xe60: 0x4000, 0xe61: 0x4000, 0xe62: 0x4000, 0xe63: 0x4000, - 0xe70: 0x4000, 0xe71: 0x4000, 0xe72: 0x4000, 0xe73: 0x4000, 0xe74: 0x4000, 0xe75: 0x4000, - 0xe76: 0x4000, 0xe77: 0x4000, 0xe78: 0x4000, 0xe79: 0x4000, 0xe7a: 0x4000, 0xe7b: 0x4000, - 0xe7c: 0x4000, 0xe7d: 0x4000, 0xe7e: 0x4000, 0xe7f: 0x4000, - // Block 0x3a, offset 0xe80 - 0xe80: 0x4000, 0xe81: 0x4000, 0xe82: 0x4000, 0xe83: 0x4000, 0xe84: 0x4000, 0xe85: 0x4000, - 0xe86: 0x4000, 0xe87: 0x4000, 0xe88: 0x4000, 0xe89: 0x4000, 0xe8a: 0x4000, 0xe8b: 0x4000, - 0xe8c: 0x4000, 0xe8d: 0x4000, 0xe8e: 0x4000, 0xe8f: 0x4000, 0xe90: 0x4000, 0xe91: 0x4000, - 0xe92: 0x4000, 0xe93: 0x4000, 0xe94: 0x4000, 0xe95: 0x4000, 0xe96: 0x4000, 0xe97: 0x4000, - 0xe98: 0x4000, 0xe99: 0x4000, 0xe9a: 0x4000, 0xe9b: 0x4000, 0xe9c: 0x4000, 0xe9d: 0x4000, - 0xe9e: 0x4000, 0xea0: 0x4000, 0xea1: 0x4000, 0xea2: 0x4000, 0xea3: 0x4000, - 0xea4: 0x4000, 0xea5: 0x4000, 0xea6: 0x4000, 0xea7: 0x4000, 0xea8: 0x4000, 0xea9: 0x4000, - 0xeaa: 0x4000, 0xeab: 0x4000, 0xeac: 0x4000, 0xead: 0x4000, 0xeae: 0x4000, 0xeaf: 0x4000, - 0xeb0: 0x4000, 0xeb1: 0x4000, 0xeb2: 0x4000, 0xeb3: 0x4000, 0xeb4: 0x4000, 0xeb5: 0x4000, - 0xeb6: 0x4000, 0xeb7: 0x4000, 0xeb8: 0x4000, 0xeb9: 0x4000, 0xeba: 0x4000, 0xebb: 0x4000, - 0xebc: 0x4000, 0xebd: 0x4000, 0xebe: 0x4000, 0xebf: 0x4000, - // Block 0x3b, offset 0xec0 - 0xec0: 0x4000, 0xec1: 0x4000, 0xec2: 0x4000, 0xec3: 0x4000, 0xec4: 0x4000, 0xec5: 0x4000, - 0xec6: 0x4000, 0xec7: 0x4000, 0xec8: 0x2000, 0xec9: 0x2000, 0xeca: 0x2000, 0xecb: 0x2000, - 0xecc: 0x2000, 0xecd: 0x2000, 0xece: 0x2000, 0xecf: 0x2000, 0xed0: 0x4000, 0xed1: 0x4000, - 0xed2: 0x4000, 0xed3: 0x4000, 0xed4: 0x4000, 0xed5: 0x4000, 0xed6: 0x4000, 0xed7: 0x4000, - 0xed8: 0x4000, 0xed9: 0x4000, 0xeda: 0x4000, 0xedb: 0x4000, 0xedc: 0x4000, 0xedd: 0x4000, - 0xede: 0x4000, 0xedf: 0x4000, 0xee0: 0x4000, 0xee1: 0x4000, 0xee2: 0x4000, 0xee3: 0x4000, - 0xee4: 0x4000, 0xee5: 0x4000, 0xee6: 0x4000, 0xee7: 0x4000, 0xee8: 0x4000, 0xee9: 0x4000, - 0xeea: 0x4000, 0xeeb: 0x4000, 0xeec: 0x4000, 0xeed: 0x4000, 0xeee: 0x4000, 0xeef: 0x4000, - 0xef0: 0x4000, 0xef1: 0x4000, 0xef2: 0x4000, 0xef3: 0x4000, 0xef4: 0x4000, 0xef5: 0x4000, - 0xef6: 0x4000, 0xef7: 0x4000, 0xef8: 0x4000, 0xef9: 0x4000, 0xefa: 0x4000, 0xefb: 0x4000, - 0xefc: 0x4000, 0xefd: 0x4000, 0xefe: 0x4000, 0xeff: 0x4000, - // Block 0x3c, offset 0xf00 - 0xf00: 0x4000, 0xf01: 0x4000, 0xf02: 0x4000, 0xf03: 0x4000, 0xf04: 0x4000, 0xf05: 0x4000, - 0xf06: 0x4000, 0xf07: 0x4000, 0xf08: 0x4000, 0xf09: 0x4000, 0xf0a: 0x4000, 0xf0b: 0x4000, - 0xf0c: 0x4000, 0xf0d: 0x4000, 0xf0e: 0x4000, 0xf0f: 0x4000, 0xf10: 0x4000, 0xf11: 0x4000, - 0xf12: 0x4000, 0xf13: 0x4000, 0xf14: 0x4000, 0xf15: 0x4000, 0xf16: 0x4000, 0xf17: 0x4000, - 0xf18: 0x4000, 0xf19: 0x4000, 0xf1a: 0x4000, 0xf1b: 0x4000, 0xf1c: 0x4000, 0xf1d: 0x4000, - 0xf1e: 0x4000, 0xf1f: 0x4000, 0xf20: 0x4000, 0xf21: 0x4000, 0xf22: 0x4000, 0xf23: 0x4000, - 0xf24: 0x4000, 0xf25: 0x4000, 0xf26: 0x4000, 0xf27: 0x4000, 0xf28: 0x4000, 0xf29: 0x4000, - 0xf2a: 0x4000, 0xf2b: 0x4000, 0xf2c: 0x4000, 0xf2d: 0x4000, 0xf2e: 0x4000, 0xf2f: 0x4000, - 0xf30: 0x4000, 0xf31: 0x4000, 0xf32: 0x4000, 0xf33: 0x4000, 0xf34: 0x4000, 0xf35: 0x4000, - 0xf36: 0x4000, 0xf37: 0x4000, 0xf38: 0x4000, 0xf39: 0x4000, 0xf3a: 0x4000, 0xf3b: 0x4000, - 0xf3c: 0x4000, 0xf3d: 0x4000, 0xf3e: 0x4000, - // Block 0x3d, offset 0xf40 - 0xf40: 0x4000, 0xf41: 0x4000, 0xf42: 0x4000, 0xf43: 0x4000, 0xf44: 0x4000, 0xf45: 0x4000, - 0xf46: 0x4000, 0xf47: 0x4000, 0xf48: 0x4000, 0xf49: 0x4000, 0xf4a: 0x4000, 0xf4b: 0x4000, - 0xf4c: 0x4000, 0xf50: 0x4000, 0xf51: 0x4000, - 0xf52: 0x4000, 0xf53: 0x4000, 0xf54: 0x4000, 0xf55: 0x4000, 0xf56: 0x4000, 0xf57: 0x4000, - 0xf58: 0x4000, 0xf59: 0x4000, 0xf5a: 0x4000, 0xf5b: 0x4000, 0xf5c: 0x4000, 0xf5d: 0x4000, - 0xf5e: 0x4000, 0xf5f: 0x4000, 0xf60: 0x4000, 0xf61: 0x4000, 0xf62: 0x4000, 0xf63: 0x4000, - 0xf64: 0x4000, 0xf65: 0x4000, 0xf66: 0x4000, 0xf67: 0x4000, 0xf68: 0x4000, 0xf69: 0x4000, - 0xf6a: 0x4000, 0xf6b: 0x4000, 0xf6c: 0x4000, 0xf6d: 0x4000, 0xf6e: 0x4000, 0xf6f: 0x4000, - 0xf70: 0x4000, 0xf71: 0x4000, 0xf72: 0x4000, 0xf73: 0x4000, 0xf74: 0x4000, 0xf75: 0x4000, - 0xf76: 0x4000, 0xf77: 0x4000, 0xf78: 0x4000, 0xf79: 0x4000, 0xf7a: 0x4000, 0xf7b: 0x4000, - 0xf7c: 0x4000, 0xf7d: 0x4000, 0xf7e: 0x4000, 0xf7f: 0x4000, - // Block 0x3e, offset 0xf80 - 0xf80: 0x4000, 0xf81: 0x4000, 0xf82: 0x4000, 0xf83: 0x4000, 0xf84: 0x4000, 0xf85: 0x4000, - 0xf86: 0x4000, - // Block 0x3f, offset 0xfc0 - 0xfe0: 0x4000, 0xfe1: 0x4000, 0xfe2: 0x4000, 0xfe3: 0x4000, - 0xfe4: 0x4000, 0xfe5: 0x4000, 0xfe6: 0x4000, 0xfe7: 0x4000, 0xfe8: 0x4000, 0xfe9: 0x4000, - 0xfea: 0x4000, 0xfeb: 0x4000, 0xfec: 0x4000, 0xfed: 0x4000, 0xfee: 0x4000, 0xfef: 0x4000, - 0xff0: 0x4000, 0xff1: 0x4000, 0xff2: 0x4000, 0xff3: 0x4000, 0xff4: 0x4000, 0xff5: 0x4000, - 0xff6: 0x4000, 0xff7: 0x4000, 0xff8: 0x4000, 0xff9: 0x4000, 0xffa: 0x4000, 0xffb: 0x4000, - 0xffc: 0x4000, - // Block 0x40, offset 0x1000 - 0x1000: 0x4000, 0x1001: 0x4000, 0x1002: 0x4000, 0x1003: 0x4000, 0x1004: 0x4000, 0x1005: 0x4000, - 0x1006: 0x4000, 0x1007: 0x4000, 0x1008: 0x4000, 0x1009: 0x4000, 0x100a: 0x4000, 0x100b: 0x4000, - 0x100c: 0x4000, 0x100d: 0x4000, 0x100e: 0x4000, 0x100f: 0x4000, 0x1010: 0x4000, 0x1011: 0x4000, - 0x1012: 0x4000, 0x1013: 0x4000, 0x1014: 0x4000, 0x1015: 0x4000, 0x1016: 0x4000, 0x1017: 0x4000, - 0x1018: 0x4000, 0x1019: 0x4000, 0x101a: 0x4000, 0x101b: 0x4000, 0x101c: 0x4000, 0x101d: 0x4000, - 0x101e: 0x4000, 0x101f: 0x4000, 0x1020: 0x4000, 0x1021: 0x4000, 0x1022: 0x4000, 0x1023: 0x4000, - // Block 0x41, offset 0x1040 - 0x1040: 0x2000, 0x1041: 0x2000, 0x1042: 0x2000, 0x1043: 0x2000, 0x1044: 0x2000, 0x1045: 0x2000, - 0x1046: 0x2000, 0x1047: 0x2000, 0x1048: 0x2000, 0x1049: 0x2000, 0x104a: 0x2000, 0x104b: 0x2000, - 0x104c: 0x2000, 0x104d: 0x2000, 0x104e: 0x2000, 0x104f: 0x2000, 0x1050: 0x4000, 0x1051: 0x4000, - 0x1052: 0x4000, 0x1053: 0x4000, 0x1054: 0x4000, 0x1055: 0x4000, 0x1056: 0x4000, 0x1057: 0x4000, - 0x1058: 0x4000, 0x1059: 0x4000, - 0x1070: 0x4000, 0x1071: 0x4000, 0x1072: 0x4000, 0x1073: 0x4000, 0x1074: 0x4000, 0x1075: 0x4000, - 0x1076: 0x4000, 0x1077: 0x4000, 0x1078: 0x4000, 0x1079: 0x4000, 0x107a: 0x4000, 0x107b: 0x4000, - 0x107c: 0x4000, 0x107d: 0x4000, 0x107e: 0x4000, 0x107f: 0x4000, - // Block 0x42, offset 0x1080 - 0x1080: 0x4000, 0x1081: 0x4000, 0x1082: 0x4000, 0x1083: 0x4000, 0x1084: 0x4000, 0x1085: 0x4000, - 0x1086: 0x4000, 0x1087: 0x4000, 0x1088: 0x4000, 0x1089: 0x4000, 0x108a: 0x4000, 0x108b: 0x4000, - 0x108c: 0x4000, 0x108d: 0x4000, 0x108e: 0x4000, 0x108f: 0x4000, 0x1090: 0x4000, 0x1091: 0x4000, - 0x1092: 0x4000, 0x1094: 0x4000, 0x1095: 0x4000, 0x1096: 0x4000, 0x1097: 0x4000, - 0x1098: 0x4000, 0x1099: 0x4000, 0x109a: 0x4000, 0x109b: 0x4000, 0x109c: 0x4000, 0x109d: 0x4000, - 0x109e: 0x4000, 0x109f: 0x4000, 0x10a0: 0x4000, 0x10a1: 0x4000, 0x10a2: 0x4000, 0x10a3: 0x4000, - 0x10a4: 0x4000, 0x10a5: 0x4000, 0x10a6: 0x4000, 0x10a8: 0x4000, 0x10a9: 0x4000, - 0x10aa: 0x4000, 0x10ab: 0x4000, - // Block 0x43, offset 0x10c0 - 0x10c1: 0x9012, 0x10c2: 0x9012, 0x10c3: 0x9012, 0x10c4: 0x9012, 0x10c5: 0x9012, - 0x10c6: 0x9012, 0x10c7: 0x9012, 0x10c8: 0x9012, 0x10c9: 0x9012, 0x10ca: 0x9012, 0x10cb: 0x9012, - 0x10cc: 0x9012, 0x10cd: 0x9012, 0x10ce: 0x9012, 0x10cf: 0x9012, 0x10d0: 0x9012, 0x10d1: 0x9012, - 0x10d2: 0x9012, 0x10d3: 0x9012, 0x10d4: 0x9012, 0x10d5: 0x9012, 0x10d6: 0x9012, 0x10d7: 0x9012, - 0x10d8: 0x9012, 0x10d9: 0x9012, 0x10da: 0x9012, 0x10db: 0x9012, 0x10dc: 0x9012, 0x10dd: 0x9012, - 0x10de: 0x9012, 0x10df: 0x9012, 0x10e0: 0x9049, 0x10e1: 0x9049, 0x10e2: 0x9049, 0x10e3: 0x9049, - 0x10e4: 0x9049, 0x10e5: 0x9049, 0x10e6: 0x9049, 0x10e7: 0x9049, 0x10e8: 0x9049, 0x10e9: 0x9049, - 0x10ea: 0x9049, 0x10eb: 0x9049, 0x10ec: 0x9049, 0x10ed: 0x9049, 0x10ee: 0x9049, 0x10ef: 0x9049, - 0x10f0: 0x9049, 0x10f1: 0x9049, 0x10f2: 0x9049, 0x10f3: 0x9049, 0x10f4: 0x9049, 0x10f5: 0x9049, - 0x10f6: 0x9049, 0x10f7: 0x9049, 0x10f8: 0x9049, 0x10f9: 0x9049, 0x10fa: 0x9049, 0x10fb: 0x9049, - 0x10fc: 0x9049, 0x10fd: 0x9049, 0x10fe: 0x9049, 0x10ff: 0x9049, - // Block 0x44, offset 0x1100 - 0x1100: 0x9049, 0x1101: 0x9049, 0x1102: 0x9049, 0x1103: 0x9049, 0x1104: 0x9049, 0x1105: 0x9049, - 0x1106: 0x9049, 0x1107: 0x9049, 0x1108: 0x9049, 0x1109: 0x9049, 0x110a: 0x9049, 0x110b: 0x9049, - 0x110c: 0x9049, 0x110d: 0x9049, 0x110e: 0x9049, 0x110f: 0x9049, 0x1110: 0x9049, 0x1111: 0x9049, - 0x1112: 0x9049, 0x1113: 0x9049, 0x1114: 0x9049, 0x1115: 0x9049, 0x1116: 0x9049, 0x1117: 0x9049, - 0x1118: 0x9049, 0x1119: 0x9049, 0x111a: 0x9049, 0x111b: 0x9049, 0x111c: 0x9049, 0x111d: 0x9049, - 0x111e: 0x9049, 0x111f: 0x904a, 0x1120: 0x904b, 0x1121: 0xb04c, 0x1122: 0xb04d, 0x1123: 0xb04d, - 0x1124: 0xb04e, 0x1125: 0xb04f, 0x1126: 0xb050, 0x1127: 0xb051, 0x1128: 0xb052, 0x1129: 0xb053, - 0x112a: 0xb054, 0x112b: 0xb055, 0x112c: 0xb056, 0x112d: 0xb057, 0x112e: 0xb058, 0x112f: 0xb059, - 0x1130: 0xb05a, 0x1131: 0xb05b, 0x1132: 0xb05c, 0x1133: 0xb05d, 0x1134: 0xb05e, 0x1135: 0xb05f, - 0x1136: 0xb060, 0x1137: 0xb061, 0x1138: 0xb062, 0x1139: 0xb063, 0x113a: 0xb064, 0x113b: 0xb065, - 0x113c: 0xb052, 0x113d: 0xb066, 0x113e: 0xb067, 0x113f: 0xb055, - // Block 0x45, offset 0x1140 - 0x1140: 0xb068, 0x1141: 0xb069, 0x1142: 0xb06a, 0x1143: 0xb06b, 0x1144: 0xb05a, 0x1145: 0xb056, - 0x1146: 0xb06c, 0x1147: 0xb06d, 0x1148: 0xb06b, 0x1149: 0xb06e, 0x114a: 0xb06b, 0x114b: 0xb06f, - 0x114c: 0xb06f, 0x114d: 0xb070, 0x114e: 0xb070, 0x114f: 0xb071, 0x1150: 0xb056, 0x1151: 0xb072, - 0x1152: 0xb073, 0x1153: 0xb072, 0x1154: 0xb074, 0x1155: 0xb073, 0x1156: 0xb075, 0x1157: 0xb075, - 0x1158: 0xb076, 0x1159: 0xb076, 0x115a: 0xb077, 0x115b: 0xb077, 0x115c: 0xb073, 0x115d: 0xb078, - 0x115e: 0xb079, 0x115f: 0xb067, 0x1160: 0xb07a, 0x1161: 0xb07b, 0x1162: 0xb07b, 0x1163: 0xb07b, - 0x1164: 0xb07b, 0x1165: 0xb07b, 0x1166: 0xb07b, 0x1167: 0xb07b, 0x1168: 0xb07b, 0x1169: 0xb07b, - 0x116a: 0xb07b, 0x116b: 0xb07b, 0x116c: 0xb07b, 0x116d: 0xb07b, 0x116e: 0xb07b, 0x116f: 0xb07b, - 0x1170: 0xb07c, 0x1171: 0xb07c, 0x1172: 0xb07c, 0x1173: 0xb07c, 0x1174: 0xb07c, 0x1175: 0xb07c, - 0x1176: 0xb07c, 0x1177: 0xb07c, 0x1178: 0xb07c, 0x1179: 0xb07c, 0x117a: 0xb07c, 0x117b: 0xb07c, - 0x117c: 0xb07c, 0x117d: 0xb07c, 0x117e: 0xb07c, - // Block 0x46, offset 0x1180 - 0x1182: 0xb07d, 0x1183: 0xb07e, 0x1184: 0xb07f, 0x1185: 0xb080, - 0x1186: 0xb07f, 0x1187: 0xb07e, 0x118a: 0xb081, 0x118b: 0xb082, - 0x118c: 0xb083, 0x118d: 0xb07f, 0x118e: 0xb080, 0x118f: 0xb07f, - 0x1192: 0xb084, 0x1193: 0xb085, 0x1194: 0xb084, 0x1195: 0xb086, 0x1196: 0xb084, 0x1197: 0xb087, - 0x119a: 0xb088, 0x119b: 0xb089, 0x119c: 0xb08a, - 0x11a0: 0x908b, 0x11a1: 0x908b, 0x11a2: 0x908c, 0x11a3: 0x908d, - 0x11a4: 0x908b, 0x11a5: 0x908e, 0x11a6: 0x908f, 0x11a8: 0xb090, 0x11a9: 0xb091, - 0x11aa: 0xb092, 0x11ab: 0xb091, 0x11ac: 0xb093, 0x11ad: 0xb094, 0x11ae: 0xb095, - 0x11bd: 0x2000, - // Block 0x47, offset 0x11c0 - 0x11e0: 0x4000, 0x11e1: 0x4000, - // Block 0x48, offset 0x1200 - 0x1200: 0x4000, 0x1201: 0x4000, 0x1202: 0x4000, 0x1203: 0x4000, 0x1204: 0x4000, 0x1205: 0x4000, - 0x1206: 0x4000, 0x1207: 0x4000, 0x1208: 0x4000, 0x1209: 0x4000, 0x120a: 0x4000, 0x120b: 0x4000, - 0x120c: 0x4000, 0x120d: 0x4000, 0x120e: 0x4000, 0x120f: 0x4000, 0x1210: 0x4000, 0x1211: 0x4000, - 0x1212: 0x4000, 0x1213: 0x4000, 0x1214: 0x4000, 0x1215: 0x4000, 0x1216: 0x4000, 0x1217: 0x4000, - 0x1218: 0x4000, 0x1219: 0x4000, 0x121a: 0x4000, 0x121b: 0x4000, 0x121c: 0x4000, 0x121d: 0x4000, - 0x121e: 0x4000, 0x121f: 0x4000, 0x1220: 0x4000, 0x1221: 0x4000, 0x1222: 0x4000, 0x1223: 0x4000, - 0x1224: 0x4000, 0x1225: 0x4000, 0x1226: 0x4000, 0x1227: 0x4000, 0x1228: 0x4000, 0x1229: 0x4000, - 0x122a: 0x4000, 0x122b: 0x4000, 0x122c: 0x4000, 0x122d: 0x4000, 0x122e: 0x4000, 0x122f: 0x4000, - 0x1230: 0x4000, 0x1231: 0x4000, - // Block 0x49, offset 0x1240 - 0x1240: 0x4000, 0x1241: 0x4000, 0x1242: 0x4000, 0x1243: 0x4000, 0x1244: 0x4000, 0x1245: 0x4000, - 0x1246: 0x4000, 0x1247: 0x4000, 0x1248: 0x4000, 0x1249: 0x4000, 0x124a: 0x4000, 0x124b: 0x4000, - 0x124c: 0x4000, 0x124d: 0x4000, 0x124e: 0x4000, 0x124f: 0x4000, 0x1250: 0x4000, 0x1251: 0x4000, - 0x1252: 0x4000, 0x1253: 0x4000, 0x1254: 0x4000, 0x1255: 0x4000, 0x1256: 0x4000, 0x1257: 0x4000, - 0x1258: 0x4000, 0x1259: 0x4000, 0x125a: 0x4000, 0x125b: 0x4000, 0x125c: 0x4000, 0x125d: 0x4000, - 0x125e: 0x4000, 0x125f: 0x4000, 0x1260: 0x4000, 0x1261: 0x4000, 0x1262: 0x4000, 0x1263: 0x4000, - 0x1264: 0x4000, 0x1265: 0x4000, 0x1266: 0x4000, 0x1267: 0x4000, 0x1268: 0x4000, 0x1269: 0x4000, - 0x126a: 0x4000, 0x126b: 0x4000, 0x126c: 0x4000, 0x126d: 0x4000, 0x126e: 0x4000, 0x126f: 0x4000, - 0x1270: 0x4000, 0x1271: 0x4000, 0x1272: 0x4000, - // Block 0x4a, offset 0x1280 - 0x1280: 0x4000, 0x1281: 0x4000, 0x1282: 0x4000, 0x1283: 0x4000, 0x1284: 0x4000, 0x1285: 0x4000, - 0x1286: 0x4000, 0x1287: 0x4000, 0x1288: 0x4000, 0x1289: 0x4000, 0x128a: 0x4000, 0x128b: 0x4000, - 0x128c: 0x4000, 0x128d: 0x4000, 0x128e: 0x4000, 0x128f: 0x4000, 0x1290: 0x4000, 0x1291: 0x4000, - 0x1292: 0x4000, 0x1293: 0x4000, 0x1294: 0x4000, 0x1295: 0x4000, 0x1296: 0x4000, 0x1297: 0x4000, - 0x1298: 0x4000, 0x1299: 0x4000, 0x129a: 0x4000, 0x129b: 0x4000, 0x129c: 0x4000, 0x129d: 0x4000, - 0x129e: 0x4000, - // Block 0x4b, offset 0x12c0 - 0x12f0: 0x4000, 0x12f1: 0x4000, 0x12f2: 0x4000, 0x12f3: 0x4000, 0x12f4: 0x4000, 0x12f5: 0x4000, - 0x12f6: 0x4000, 0x12f7: 0x4000, 0x12f8: 0x4000, 0x12f9: 0x4000, 0x12fa: 0x4000, 0x12fb: 0x4000, - 0x12fc: 0x4000, 0x12fd: 0x4000, 0x12fe: 0x4000, 0x12ff: 0x4000, - // Block 0x4c, offset 0x1300 - 0x1300: 0x4000, 0x1301: 0x4000, 0x1302: 0x4000, 0x1303: 0x4000, 0x1304: 0x4000, 0x1305: 0x4000, - 0x1306: 0x4000, 0x1307: 0x4000, 0x1308: 0x4000, 0x1309: 0x4000, 0x130a: 0x4000, 0x130b: 0x4000, - 0x130c: 0x4000, 0x130d: 0x4000, 0x130e: 0x4000, 0x130f: 0x4000, 0x1310: 0x4000, 0x1311: 0x4000, - 0x1312: 0x4000, 0x1313: 0x4000, 0x1314: 0x4000, 0x1315: 0x4000, 0x1316: 0x4000, 0x1317: 0x4000, - 0x1318: 0x4000, 0x1319: 0x4000, 0x131a: 0x4000, 0x131b: 0x4000, 0x131c: 0x4000, 0x131d: 0x4000, - 0x131e: 0x4000, 0x131f: 0x4000, 0x1320: 0x4000, 0x1321: 0x4000, 0x1322: 0x4000, 0x1323: 0x4000, - 0x1324: 0x4000, 0x1325: 0x4000, 0x1326: 0x4000, 0x1327: 0x4000, 0x1328: 0x4000, 0x1329: 0x4000, - 0x132a: 0x4000, 0x132b: 0x4000, 0x132c: 0x4000, 0x132d: 0x4000, 0x132e: 0x4000, 0x132f: 0x4000, - 0x1330: 0x4000, 0x1331: 0x4000, 0x1332: 0x4000, 0x1333: 0x4000, 0x1334: 0x4000, 0x1335: 0x4000, - 0x1336: 0x4000, 0x1337: 0x4000, 0x1338: 0x4000, 0x1339: 0x4000, 0x133a: 0x4000, 0x133b: 0x4000, - // Block 0x4d, offset 0x1340 - 0x1344: 0x4000, - // Block 0x4e, offset 0x1380 - 0x138f: 0x4000, - // Block 0x4f, offset 0x13c0 - 0x13c0: 0x2000, 0x13c1: 0x2000, 0x13c2: 0x2000, 0x13c3: 0x2000, 0x13c4: 0x2000, 0x13c5: 0x2000, - 0x13c6: 0x2000, 0x13c7: 0x2000, 0x13c8: 0x2000, 0x13c9: 0x2000, 0x13ca: 0x2000, - 0x13d0: 0x2000, 0x13d1: 0x2000, - 0x13d2: 0x2000, 0x13d3: 0x2000, 0x13d4: 0x2000, 0x13d5: 0x2000, 0x13d6: 0x2000, 0x13d7: 0x2000, - 0x13d8: 0x2000, 0x13d9: 0x2000, 0x13da: 0x2000, 0x13db: 0x2000, 0x13dc: 0x2000, 0x13dd: 0x2000, - 0x13de: 0x2000, 0x13df: 0x2000, 0x13e0: 0x2000, 0x13e1: 0x2000, 0x13e2: 0x2000, 0x13e3: 0x2000, - 0x13e4: 0x2000, 0x13e5: 0x2000, 0x13e6: 0x2000, 0x13e7: 0x2000, 0x13e8: 0x2000, 0x13e9: 0x2000, - 0x13ea: 0x2000, 0x13eb: 0x2000, 0x13ec: 0x2000, 0x13ed: 0x2000, - 0x13f0: 0x2000, 0x13f1: 0x2000, 0x13f2: 0x2000, 0x13f3: 0x2000, 0x13f4: 0x2000, 0x13f5: 0x2000, - 0x13f6: 0x2000, 0x13f7: 0x2000, 0x13f8: 0x2000, 0x13f9: 0x2000, 0x13fa: 0x2000, 0x13fb: 0x2000, - 0x13fc: 0x2000, 0x13fd: 0x2000, 0x13fe: 0x2000, 0x13ff: 0x2000, - // Block 0x50, offset 0x1400 - 0x1400: 0x2000, 0x1401: 0x2000, 0x1402: 0x2000, 0x1403: 0x2000, 0x1404: 0x2000, 0x1405: 0x2000, - 0x1406: 0x2000, 0x1407: 0x2000, 0x1408: 0x2000, 0x1409: 0x2000, 0x140a: 0x2000, 0x140b: 0x2000, - 0x140c: 0x2000, 0x140d: 0x2000, 0x140e: 0x2000, 0x140f: 0x2000, 0x1410: 0x2000, 0x1411: 0x2000, - 0x1412: 0x2000, 0x1413: 0x2000, 0x1414: 0x2000, 0x1415: 0x2000, 0x1416: 0x2000, 0x1417: 0x2000, - 0x1418: 0x2000, 0x1419: 0x2000, 0x141a: 0x2000, 0x141b: 0x2000, 0x141c: 0x2000, 0x141d: 0x2000, - 0x141e: 0x2000, 0x141f: 0x2000, 0x1420: 0x2000, 0x1421: 0x2000, 0x1422: 0x2000, 0x1423: 0x2000, - 0x1424: 0x2000, 0x1425: 0x2000, 0x1426: 0x2000, 0x1427: 0x2000, 0x1428: 0x2000, 0x1429: 0x2000, - 0x1430: 0x2000, 0x1431: 0x2000, 0x1432: 0x2000, 0x1433: 0x2000, 0x1434: 0x2000, 0x1435: 0x2000, - 0x1436: 0x2000, 0x1437: 0x2000, 0x1438: 0x2000, 0x1439: 0x2000, 0x143a: 0x2000, 0x143b: 0x2000, - 0x143c: 0x2000, 0x143d: 0x2000, 0x143e: 0x2000, 0x143f: 0x2000, - // Block 0x51, offset 0x1440 - 0x1440: 0x2000, 0x1441: 0x2000, 0x1442: 0x2000, 0x1443: 0x2000, 0x1444: 0x2000, 0x1445: 0x2000, - 0x1446: 0x2000, 0x1447: 0x2000, 0x1448: 0x2000, 0x1449: 0x2000, 0x144a: 0x2000, 0x144b: 0x2000, - 0x144c: 0x2000, 0x144d: 0x2000, 0x144e: 0x4000, 0x144f: 0x2000, 0x1450: 0x2000, 0x1451: 0x4000, - 0x1452: 0x4000, 0x1453: 0x4000, 0x1454: 0x4000, 0x1455: 0x4000, 0x1456: 0x4000, 0x1457: 0x4000, - 0x1458: 0x4000, 0x1459: 0x4000, 0x145a: 0x4000, 0x145b: 0x2000, 0x145c: 0x2000, 0x145d: 0x2000, - 0x145e: 0x2000, 0x145f: 0x2000, 0x1460: 0x2000, 0x1461: 0x2000, 0x1462: 0x2000, 0x1463: 0x2000, - 0x1464: 0x2000, 0x1465: 0x2000, 0x1466: 0x2000, 0x1467: 0x2000, 0x1468: 0x2000, 0x1469: 0x2000, - 0x146a: 0x2000, 0x146b: 0x2000, 0x146c: 0x2000, - // Block 0x52, offset 0x1480 - 0x1480: 0x4000, 0x1481: 0x4000, 0x1482: 0x4000, - 0x1490: 0x4000, 0x1491: 0x4000, - 0x1492: 0x4000, 0x1493: 0x4000, 0x1494: 0x4000, 0x1495: 0x4000, 0x1496: 0x4000, 0x1497: 0x4000, - 0x1498: 0x4000, 0x1499: 0x4000, 0x149a: 0x4000, 0x149b: 0x4000, 0x149c: 0x4000, 0x149d: 0x4000, - 0x149e: 0x4000, 0x149f: 0x4000, 0x14a0: 0x4000, 0x14a1: 0x4000, 0x14a2: 0x4000, 0x14a3: 0x4000, - 0x14a4: 0x4000, 0x14a5: 0x4000, 0x14a6: 0x4000, 0x14a7: 0x4000, 0x14a8: 0x4000, 0x14a9: 0x4000, - 0x14aa: 0x4000, 0x14ab: 0x4000, 0x14ac: 0x4000, 0x14ad: 0x4000, 0x14ae: 0x4000, 0x14af: 0x4000, - 0x14b0: 0x4000, 0x14b1: 0x4000, 0x14b2: 0x4000, 0x14b3: 0x4000, 0x14b4: 0x4000, 0x14b5: 0x4000, - 0x14b6: 0x4000, 0x14b7: 0x4000, 0x14b8: 0x4000, 0x14b9: 0x4000, 0x14ba: 0x4000, 0x14bb: 0x4000, - // Block 0x53, offset 0x14c0 - 0x14c0: 0x4000, 0x14c1: 0x4000, 0x14c2: 0x4000, 0x14c3: 0x4000, 0x14c4: 0x4000, 0x14c5: 0x4000, - 0x14c6: 0x4000, 0x14c7: 0x4000, 0x14c8: 0x4000, - 0x14d0: 0x4000, 0x14d1: 0x4000, - 0x14e0: 0x4000, 0x14e1: 0x4000, 0x14e2: 0x4000, 0x14e3: 0x4000, - 0x14e4: 0x4000, 0x14e5: 0x4000, - // Block 0x54, offset 0x1500 - 0x1500: 0x4000, 0x1501: 0x4000, 0x1502: 0x4000, 0x1503: 0x4000, 0x1504: 0x4000, 0x1505: 0x4000, - 0x1506: 0x4000, 0x1507: 0x4000, 0x1508: 0x4000, 0x1509: 0x4000, 0x150a: 0x4000, 0x150b: 0x4000, - 0x150c: 0x4000, 0x150d: 0x4000, 0x150e: 0x4000, 0x150f: 0x4000, 0x1510: 0x4000, 0x1511: 0x4000, - 0x1512: 0x4000, 0x1513: 0x4000, 0x1514: 0x4000, 0x1515: 0x4000, 0x1516: 0x4000, 0x1517: 0x4000, - 0x1518: 0x4000, 0x1519: 0x4000, 0x151a: 0x4000, 0x151b: 0x4000, 0x151c: 0x4000, 0x151d: 0x4000, - 0x151e: 0x4000, 0x151f: 0x4000, 0x1520: 0x4000, - 0x152d: 0x4000, 0x152e: 0x4000, 0x152f: 0x4000, - 0x1530: 0x4000, 0x1531: 0x4000, 0x1532: 0x4000, 0x1533: 0x4000, 0x1534: 0x4000, 0x1535: 0x4000, - 0x1537: 0x4000, 0x1538: 0x4000, 0x1539: 0x4000, 0x153a: 0x4000, 0x153b: 0x4000, - 0x153c: 0x4000, 0x153d: 0x4000, 0x153e: 0x4000, 0x153f: 0x4000, - // Block 0x55, offset 0x1540 - 0x1540: 0x4000, 0x1541: 0x4000, 0x1542: 0x4000, 0x1543: 0x4000, 0x1544: 0x4000, 0x1545: 0x4000, - 0x1546: 0x4000, 0x1547: 0x4000, 0x1548: 0x4000, 0x1549: 0x4000, 0x154a: 0x4000, 0x154b: 0x4000, - 0x154c: 0x4000, 0x154d: 0x4000, 0x154e: 0x4000, 0x154f: 0x4000, 0x1550: 0x4000, 0x1551: 0x4000, - 0x1552: 0x4000, 0x1553: 0x4000, 0x1554: 0x4000, 0x1555: 0x4000, 0x1556: 0x4000, 0x1557: 0x4000, - 0x1558: 0x4000, 0x1559: 0x4000, 0x155a: 0x4000, 0x155b: 0x4000, 0x155c: 0x4000, 0x155d: 0x4000, - 0x155e: 0x4000, 0x155f: 0x4000, 0x1560: 0x4000, 0x1561: 0x4000, 0x1562: 0x4000, 0x1563: 0x4000, - 0x1564: 0x4000, 0x1565: 0x4000, 0x1566: 0x4000, 0x1567: 0x4000, 0x1568: 0x4000, 0x1569: 0x4000, - 0x156a: 0x4000, 0x156b: 0x4000, 0x156c: 0x4000, 0x156d: 0x4000, 0x156e: 0x4000, 0x156f: 0x4000, - 0x1570: 0x4000, 0x1571: 0x4000, 0x1572: 0x4000, 0x1573: 0x4000, 0x1574: 0x4000, 0x1575: 0x4000, - 0x1576: 0x4000, 0x1577: 0x4000, 0x1578: 0x4000, 0x1579: 0x4000, 0x157a: 0x4000, 0x157b: 0x4000, - 0x157c: 0x4000, 0x157e: 0x4000, 0x157f: 0x4000, - // Block 0x56, offset 0x1580 - 0x1580: 0x4000, 0x1581: 0x4000, 0x1582: 0x4000, 0x1583: 0x4000, 0x1584: 0x4000, 0x1585: 0x4000, - 0x1586: 0x4000, 0x1587: 0x4000, 0x1588: 0x4000, 0x1589: 0x4000, 0x158a: 0x4000, 0x158b: 0x4000, - 0x158c: 0x4000, 0x158d: 0x4000, 0x158e: 0x4000, 0x158f: 0x4000, 0x1590: 0x4000, 0x1591: 0x4000, - 0x1592: 0x4000, 0x1593: 0x4000, - 0x15a0: 0x4000, 0x15a1: 0x4000, 0x15a2: 0x4000, 0x15a3: 0x4000, - 0x15a4: 0x4000, 0x15a5: 0x4000, 0x15a6: 0x4000, 0x15a7: 0x4000, 0x15a8: 0x4000, 0x15a9: 0x4000, - 0x15aa: 0x4000, 0x15ab: 0x4000, 0x15ac: 0x4000, 0x15ad: 0x4000, 0x15ae: 0x4000, 0x15af: 0x4000, - 0x15b0: 0x4000, 0x15b1: 0x4000, 0x15b2: 0x4000, 0x15b3: 0x4000, 0x15b4: 0x4000, 0x15b5: 0x4000, - 0x15b6: 0x4000, 0x15b7: 0x4000, 0x15b8: 0x4000, 0x15b9: 0x4000, 0x15ba: 0x4000, 0x15bb: 0x4000, - 0x15bc: 0x4000, 0x15bd: 0x4000, 0x15be: 0x4000, 0x15bf: 0x4000, - // Block 0x57, offset 0x15c0 - 0x15c0: 0x4000, 0x15c1: 0x4000, 0x15c2: 0x4000, 0x15c3: 0x4000, 0x15c4: 0x4000, 0x15c5: 0x4000, - 0x15c6: 0x4000, 0x15c7: 0x4000, 0x15c8: 0x4000, 0x15c9: 0x4000, 0x15ca: 0x4000, - 0x15cf: 0x4000, 0x15d0: 0x4000, 0x15d1: 0x4000, - 0x15d2: 0x4000, 0x15d3: 0x4000, - 0x15e0: 0x4000, 0x15e1: 0x4000, 0x15e2: 0x4000, 0x15e3: 0x4000, - 0x15e4: 0x4000, 0x15e5: 0x4000, 0x15e6: 0x4000, 0x15e7: 0x4000, 0x15e8: 0x4000, 0x15e9: 0x4000, - 0x15ea: 0x4000, 0x15eb: 0x4000, 0x15ec: 0x4000, 0x15ed: 0x4000, 0x15ee: 0x4000, 0x15ef: 0x4000, - 0x15f0: 0x4000, 0x15f4: 0x4000, - 0x15f8: 0x4000, 0x15f9: 0x4000, 0x15fa: 0x4000, 0x15fb: 0x4000, - 0x15fc: 0x4000, 0x15fd: 0x4000, 0x15fe: 0x4000, 0x15ff: 0x4000, - // Block 0x58, offset 0x1600 - 0x1600: 0x4000, 0x1602: 0x4000, 0x1603: 0x4000, 0x1604: 0x4000, 0x1605: 0x4000, - 0x1606: 0x4000, 0x1607: 0x4000, 0x1608: 0x4000, 0x1609: 0x4000, 0x160a: 0x4000, 0x160b: 0x4000, - 0x160c: 0x4000, 0x160d: 0x4000, 0x160e: 0x4000, 0x160f: 0x4000, 0x1610: 0x4000, 0x1611: 0x4000, - 0x1612: 0x4000, 0x1613: 0x4000, 0x1614: 0x4000, 0x1615: 0x4000, 0x1616: 0x4000, 0x1617: 0x4000, - 0x1618: 0x4000, 0x1619: 0x4000, 0x161a: 0x4000, 0x161b: 0x4000, 0x161c: 0x4000, 0x161d: 0x4000, - 0x161e: 0x4000, 0x161f: 0x4000, 0x1620: 0x4000, 0x1621: 0x4000, 0x1622: 0x4000, 0x1623: 0x4000, - 0x1624: 0x4000, 0x1625: 0x4000, 0x1626: 0x4000, 0x1627: 0x4000, 0x1628: 0x4000, 0x1629: 0x4000, - 0x162a: 0x4000, 0x162b: 0x4000, 0x162c: 0x4000, 0x162d: 0x4000, 0x162e: 0x4000, 0x162f: 0x4000, - 0x1630: 0x4000, 0x1631: 0x4000, 0x1632: 0x4000, 0x1633: 0x4000, 0x1634: 0x4000, 0x1635: 0x4000, - 0x1636: 0x4000, 0x1637: 0x4000, 0x1638: 0x4000, 0x1639: 0x4000, 0x163a: 0x4000, 0x163b: 0x4000, - 0x163c: 0x4000, 0x163d: 0x4000, 0x163e: 0x4000, 0x163f: 0x4000, - // Block 0x59, offset 0x1640 - 0x1640: 0x4000, 0x1641: 0x4000, 0x1642: 0x4000, 0x1643: 0x4000, 0x1644: 0x4000, 0x1645: 0x4000, - 0x1646: 0x4000, 0x1647: 0x4000, 0x1648: 0x4000, 0x1649: 0x4000, 0x164a: 0x4000, 0x164b: 0x4000, - 0x164c: 0x4000, 0x164d: 0x4000, 0x164e: 0x4000, 0x164f: 0x4000, 0x1650: 0x4000, 0x1651: 0x4000, - 0x1652: 0x4000, 0x1653: 0x4000, 0x1654: 0x4000, 0x1655: 0x4000, 0x1656: 0x4000, 0x1657: 0x4000, - 0x1658: 0x4000, 0x1659: 0x4000, 0x165a: 0x4000, 0x165b: 0x4000, 0x165c: 0x4000, 0x165d: 0x4000, - 0x165e: 0x4000, 0x165f: 0x4000, 0x1660: 0x4000, 0x1661: 0x4000, 0x1662: 0x4000, 0x1663: 0x4000, - 0x1664: 0x4000, 0x1665: 0x4000, 0x1666: 0x4000, 0x1667: 0x4000, 0x1668: 0x4000, 0x1669: 0x4000, - 0x166a: 0x4000, 0x166b: 0x4000, 0x166c: 0x4000, 0x166d: 0x4000, 0x166e: 0x4000, 0x166f: 0x4000, - 0x1670: 0x4000, 0x1671: 0x4000, 0x1672: 0x4000, 0x1673: 0x4000, 0x1674: 0x4000, 0x1675: 0x4000, - 0x1676: 0x4000, 0x1677: 0x4000, 0x1678: 0x4000, 0x1679: 0x4000, 0x167a: 0x4000, 0x167b: 0x4000, - 0x167c: 0x4000, 0x167f: 0x4000, - // Block 0x5a, offset 0x1680 - 0x1680: 0x4000, 0x1681: 0x4000, 0x1682: 0x4000, 0x1683: 0x4000, 0x1684: 0x4000, 0x1685: 0x4000, - 0x1686: 0x4000, 0x1687: 0x4000, 0x1688: 0x4000, 0x1689: 0x4000, 0x168a: 0x4000, 0x168b: 0x4000, - 0x168c: 0x4000, 0x168d: 0x4000, 0x168e: 0x4000, 0x168f: 0x4000, 0x1690: 0x4000, 0x1691: 0x4000, - 0x1692: 0x4000, 0x1693: 0x4000, 0x1694: 0x4000, 0x1695: 0x4000, 0x1696: 0x4000, 0x1697: 0x4000, - 0x1698: 0x4000, 0x1699: 0x4000, 0x169a: 0x4000, 0x169b: 0x4000, 0x169c: 0x4000, 0x169d: 0x4000, - 0x169e: 0x4000, 0x169f: 0x4000, 0x16a0: 0x4000, 0x16a1: 0x4000, 0x16a2: 0x4000, 0x16a3: 0x4000, - 0x16a4: 0x4000, 0x16a5: 0x4000, 0x16a6: 0x4000, 0x16a7: 0x4000, 0x16a8: 0x4000, 0x16a9: 0x4000, - 0x16aa: 0x4000, 0x16ab: 0x4000, 0x16ac: 0x4000, 0x16ad: 0x4000, 0x16ae: 0x4000, 0x16af: 0x4000, - 0x16b0: 0x4000, 0x16b1: 0x4000, 0x16b2: 0x4000, 0x16b3: 0x4000, 0x16b4: 0x4000, 0x16b5: 0x4000, - 0x16b6: 0x4000, 0x16b7: 0x4000, 0x16b8: 0x4000, 0x16b9: 0x4000, 0x16ba: 0x4000, 0x16bb: 0x4000, - 0x16bc: 0x4000, 0x16bd: 0x4000, - // Block 0x5b, offset 0x16c0 - 0x16cb: 0x4000, - 0x16cc: 0x4000, 0x16cd: 0x4000, 0x16ce: 0x4000, 0x16d0: 0x4000, 0x16d1: 0x4000, - 0x16d2: 0x4000, 0x16d3: 0x4000, 0x16d4: 0x4000, 0x16d5: 0x4000, 0x16d6: 0x4000, 0x16d7: 0x4000, - 0x16d8: 0x4000, 0x16d9: 0x4000, 0x16da: 0x4000, 0x16db: 0x4000, 0x16dc: 0x4000, 0x16dd: 0x4000, - 0x16de: 0x4000, 0x16df: 0x4000, 0x16e0: 0x4000, 0x16e1: 0x4000, 0x16e2: 0x4000, 0x16e3: 0x4000, - 0x16e4: 0x4000, 0x16e5: 0x4000, 0x16e6: 0x4000, 0x16e7: 0x4000, - 0x16fa: 0x4000, - // Block 0x5c, offset 0x1700 - 0x1715: 0x4000, 0x1716: 0x4000, - 0x1724: 0x4000, - // Block 0x5d, offset 0x1740 - 0x177b: 0x4000, - 0x177c: 0x4000, 0x177d: 0x4000, 0x177e: 0x4000, 0x177f: 0x4000, - // Block 0x5e, offset 0x1780 - 0x1780: 0x4000, 0x1781: 0x4000, 0x1782: 0x4000, 0x1783: 0x4000, 0x1784: 0x4000, 0x1785: 0x4000, - 0x1786: 0x4000, 0x1787: 0x4000, 0x1788: 0x4000, 0x1789: 0x4000, 0x178a: 0x4000, 0x178b: 0x4000, - 0x178c: 0x4000, 0x178d: 0x4000, 0x178e: 0x4000, 0x178f: 0x4000, - // Block 0x5f, offset 0x17c0 - 0x17c0: 0x4000, 0x17c1: 0x4000, 0x17c2: 0x4000, 0x17c3: 0x4000, 0x17c4: 0x4000, 0x17c5: 0x4000, - 0x17cc: 0x4000, 0x17d0: 0x4000, 0x17d1: 0x4000, - 0x17d2: 0x4000, - 0x17eb: 0x4000, 0x17ec: 0x4000, - 0x17f4: 0x4000, 0x17f5: 0x4000, - 0x17f6: 0x4000, 0x17f7: 0x4000, 0x17f8: 0x4000, 0x17f9: 0x4000, - // Block 0x60, offset 0x1800 - 0x1810: 0x4000, 0x1811: 0x4000, - 0x1812: 0x4000, 0x1813: 0x4000, 0x1814: 0x4000, 0x1815: 0x4000, 0x1816: 0x4000, 0x1817: 0x4000, - 0x1818: 0x4000, 0x1819: 0x4000, 0x181a: 0x4000, 0x181b: 0x4000, 0x181c: 0x4000, 0x181d: 0x4000, - 0x181e: 0x4000, 0x181f: 0x4000, 0x1820: 0x4000, 0x1821: 0x4000, 0x1822: 0x4000, 0x1823: 0x4000, - 0x1824: 0x4000, 0x1825: 0x4000, 0x1826: 0x4000, 0x1827: 0x4000, 0x1828: 0x4000, 0x1829: 0x4000, - 0x182a: 0x4000, 0x182b: 0x4000, 0x182c: 0x4000, 0x182d: 0x4000, 0x182e: 0x4000, 0x182f: 0x4000, - 0x1830: 0x4000, 0x1831: 0x4000, 0x1832: 0x4000, 0x1833: 0x4000, 0x1834: 0x4000, 0x1835: 0x4000, - 0x1836: 0x4000, 0x1837: 0x4000, 0x1838: 0x4000, 0x1839: 0x4000, 0x183a: 0x4000, 0x183b: 0x4000, - 0x183c: 0x4000, 0x183d: 0x4000, 0x183e: 0x4000, - // Block 0x61, offset 0x1840 - 0x1840: 0x4000, 0x1841: 0x4000, 0x1842: 0x4000, 0x1843: 0x4000, 0x1844: 0x4000, 0x1845: 0x4000, - 0x1846: 0x4000, 0x1847: 0x4000, 0x1848: 0x4000, 0x1849: 0x4000, 0x184a: 0x4000, 0x184b: 0x4000, - 0x184c: 0x4000, 0x184d: 0x4000, 0x184e: 0x4000, 0x184f: 0x4000, 0x1850: 0x4000, 0x1851: 0x4000, - 0x1852: 0x4000, 0x1853: 0x4000, 0x1854: 0x4000, 0x1855: 0x4000, 0x1856: 0x4000, 0x1857: 0x4000, - 0x1858: 0x4000, 0x1859: 0x4000, 0x185a: 0x4000, 0x185b: 0x4000, 0x185c: 0x4000, 0x185d: 0x4000, - 0x185e: 0x4000, 0x185f: 0x4000, 0x1860: 0x4000, 0x1861: 0x4000, 0x1862: 0x4000, 0x1863: 0x4000, - 0x1864: 0x4000, 0x1865: 0x4000, 0x1866: 0x4000, 0x1867: 0x4000, 0x1868: 0x4000, 0x1869: 0x4000, - 0x186a: 0x4000, 0x186b: 0x4000, 0x186c: 0x4000, 0x186d: 0x4000, 0x186e: 0x4000, 0x186f: 0x4000, - 0x1870: 0x4000, 0x1873: 0x4000, 0x1874: 0x4000, 0x1875: 0x4000, - 0x1876: 0x4000, 0x187a: 0x4000, - 0x187c: 0x4000, 0x187d: 0x4000, 0x187e: 0x4000, 0x187f: 0x4000, - // Block 0x62, offset 0x1880 - 0x1880: 0x4000, 0x1881: 0x4000, 0x1882: 0x4000, 0x1883: 0x4000, 0x1884: 0x4000, 0x1885: 0x4000, - 0x1886: 0x4000, 0x1887: 0x4000, 0x1888: 0x4000, 0x1889: 0x4000, 0x188a: 0x4000, 0x188b: 0x4000, - 0x188c: 0x4000, 0x188d: 0x4000, 0x188e: 0x4000, 0x188f: 0x4000, 0x1890: 0x4000, 0x1891: 0x4000, - 0x1892: 0x4000, 0x1893: 0x4000, 0x1894: 0x4000, 0x1895: 0x4000, 0x1896: 0x4000, 0x1897: 0x4000, - 0x1898: 0x4000, 0x1899: 0x4000, 0x189a: 0x4000, 0x189b: 0x4000, 0x189c: 0x4000, 0x189d: 0x4000, - 0x189e: 0x4000, 0x189f: 0x4000, 0x18a0: 0x4000, 0x18a1: 0x4000, 0x18a2: 0x4000, - 0x18b0: 0x4000, 0x18b1: 0x4000, 0x18b2: 0x4000, 0x18b3: 0x4000, 0x18b4: 0x4000, 0x18b5: 0x4000, - 0x18b6: 0x4000, 0x18b7: 0x4000, 0x18b8: 0x4000, 0x18b9: 0x4000, - // Block 0x63, offset 0x18c0 - 0x18c0: 0x4000, 0x18c1: 0x4000, 0x18c2: 0x4000, - 0x18d0: 0x4000, 0x18d1: 0x4000, - 0x18d2: 0x4000, 0x18d3: 0x4000, 0x18d4: 0x4000, 0x18d5: 0x4000, 0x18d6: 0x4000, 0x18d7: 0x4000, - 0x18d8: 0x4000, 0x18d9: 0x4000, 0x18da: 0x4000, 0x18db: 0x4000, 0x18dc: 0x4000, 0x18dd: 0x4000, - 0x18de: 0x4000, 0x18df: 0x4000, 0x18e0: 0x4000, 0x18e1: 0x4000, 0x18e2: 0x4000, 0x18e3: 0x4000, - 0x18e4: 0x4000, 0x18e5: 0x4000, 0x18e6: 0x4000, 0x18e7: 0x4000, 0x18e8: 0x4000, 0x18e9: 0x4000, - 0x18ea: 0x4000, 0x18eb: 0x4000, 0x18ec: 0x4000, 0x18ed: 0x4000, 0x18ee: 0x4000, 0x18ef: 0x4000, - 0x18f0: 0x4000, 0x18f1: 0x4000, 0x18f2: 0x4000, 0x18f3: 0x4000, 0x18f4: 0x4000, 0x18f5: 0x4000, - 0x18f6: 0x4000, 0x18f7: 0x4000, 0x18f8: 0x4000, 0x18f9: 0x4000, 0x18fa: 0x4000, 0x18fb: 0x4000, - 0x18fc: 0x4000, 0x18fd: 0x4000, 0x18fe: 0x4000, 0x18ff: 0x4000, - // Block 0x64, offset 0x1900 - 0x1900: 0x2000, 0x1901: 0x2000, 0x1902: 0x2000, 0x1903: 0x2000, 0x1904: 0x2000, 0x1905: 0x2000, - 0x1906: 0x2000, 0x1907: 0x2000, 0x1908: 0x2000, 0x1909: 0x2000, 0x190a: 0x2000, 0x190b: 0x2000, - 0x190c: 0x2000, 0x190d: 0x2000, 0x190e: 0x2000, 0x190f: 0x2000, 0x1910: 0x2000, 0x1911: 0x2000, - 0x1912: 0x2000, 0x1913: 0x2000, 0x1914: 0x2000, 0x1915: 0x2000, 0x1916: 0x2000, 0x1917: 0x2000, - 0x1918: 0x2000, 0x1919: 0x2000, 0x191a: 0x2000, 0x191b: 0x2000, 0x191c: 0x2000, 0x191d: 0x2000, - 0x191e: 0x2000, 0x191f: 0x2000, 0x1920: 0x2000, 0x1921: 0x2000, 0x1922: 0x2000, 0x1923: 0x2000, - 0x1924: 0x2000, 0x1925: 0x2000, 0x1926: 0x2000, 0x1927: 0x2000, 0x1928: 0x2000, 0x1929: 0x2000, - 0x192a: 0x2000, 0x192b: 0x2000, 0x192c: 0x2000, 0x192d: 0x2000, 0x192e: 0x2000, 0x192f: 0x2000, - 0x1930: 0x2000, 0x1931: 0x2000, 0x1932: 0x2000, 0x1933: 0x2000, 0x1934: 0x2000, 0x1935: 0x2000, - 0x1936: 0x2000, 0x1937: 0x2000, 0x1938: 0x2000, 0x1939: 0x2000, 0x193a: 0x2000, 0x193b: 0x2000, - 0x193c: 0x2000, 0x193d: 0x2000, -} - -// widthIndex: 22 blocks, 1408 entries, 1408 bytes -// Block 0 is the zero block. -var widthIndex = [1408]uint8{ - // Block 0x0, offset 0x0 - // Block 0x1, offset 0x40 - // Block 0x2, offset 0x80 - // Block 0x3, offset 0xc0 - 0xc2: 0x01, 0xc3: 0x02, 0xc4: 0x03, 0xc5: 0x04, 0xc7: 0x05, - 0xc9: 0x06, 0xcb: 0x07, 0xcc: 0x08, 0xcd: 0x09, 0xce: 0x0a, 0xcf: 0x0b, - 0xd0: 0x0c, 0xd1: 0x0d, - 0xe1: 0x02, 0xe2: 0x03, 0xe3: 0x04, 0xe4: 0x05, 0xe5: 0x06, 0xe6: 0x06, 0xe7: 0x06, - 0xe8: 0x06, 0xe9: 0x06, 0xea: 0x07, 0xeb: 0x06, 0xec: 0x06, 0xed: 0x08, 0xee: 0x09, 0xef: 0x0a, - 0xf0: 0x0f, 0xf3: 0x12, 0xf4: 0x13, - // Block 0x4, offset 0x100 - 0x104: 0x0e, 0x105: 0x0f, - // Block 0x5, offset 0x140 - 0x140: 0x10, 0x141: 0x11, 0x142: 0x12, 0x144: 0x13, 0x145: 0x14, 0x146: 0x15, 0x147: 0x16, - 0x148: 0x17, 0x149: 0x18, 0x14a: 0x19, 0x14c: 0x1a, 0x14f: 0x1b, - 0x151: 0x1c, 0x152: 0x08, 0x153: 0x1d, 0x154: 0x1e, 0x155: 0x1f, 0x156: 0x20, 0x157: 0x21, - 0x158: 0x22, 0x159: 0x23, 0x15a: 0x24, 0x15b: 0x25, 0x15c: 0x26, 0x15d: 0x27, 0x15e: 0x28, 0x15f: 0x29, - 0x166: 0x2a, - 0x16c: 0x2b, 0x16d: 0x2c, - 0x17a: 0x2d, 0x17b: 0x2e, 0x17c: 0x0e, 0x17d: 0x0e, 0x17e: 0x0e, 0x17f: 0x2f, - // Block 0x6, offset 0x180 - 0x180: 0x30, 0x181: 0x31, 0x182: 0x32, 0x183: 0x33, 0x184: 0x34, 0x185: 0x35, 0x186: 0x36, 0x187: 0x37, - 0x188: 0x38, 0x189: 0x39, 0x18a: 0x0e, 0x18b: 0x3a, 0x18c: 0x0e, 0x18d: 0x0e, 0x18e: 0x0e, 0x18f: 0x0e, - 0x190: 0x0e, 0x191: 0x0e, 0x192: 0x0e, 0x193: 0x0e, 0x194: 0x0e, 0x195: 0x0e, 0x196: 0x0e, 0x197: 0x0e, - 0x198: 0x0e, 0x199: 0x0e, 0x19a: 0x0e, 0x19b: 0x0e, 0x19c: 0x0e, 0x19d: 0x0e, 0x19e: 0x0e, 0x19f: 0x0e, - 0x1a0: 0x0e, 0x1a1: 0x0e, 0x1a2: 0x0e, 0x1a3: 0x0e, 0x1a4: 0x0e, 0x1a5: 0x0e, 0x1a6: 0x0e, 0x1a7: 0x0e, - 0x1a8: 0x0e, 0x1a9: 0x0e, 0x1aa: 0x0e, 0x1ab: 0x0e, 0x1ac: 0x0e, 0x1ad: 0x0e, 0x1ae: 0x0e, 0x1af: 0x0e, - 0x1b0: 0x0e, 0x1b1: 0x0e, 0x1b2: 0x0e, 0x1b3: 0x0e, 0x1b4: 0x0e, 0x1b5: 0x0e, 0x1b6: 0x0e, 0x1b7: 0x0e, - 0x1b8: 0x0e, 0x1b9: 0x0e, 0x1ba: 0x0e, 0x1bb: 0x0e, 0x1bc: 0x0e, 0x1bd: 0x0e, 0x1be: 0x0e, 0x1bf: 0x0e, - // Block 0x7, offset 0x1c0 - 0x1c0: 0x0e, 0x1c1: 0x0e, 0x1c2: 0x0e, 0x1c3: 0x0e, 0x1c4: 0x0e, 0x1c5: 0x0e, 0x1c6: 0x0e, 0x1c7: 0x0e, - 0x1c8: 0x0e, 0x1c9: 0x0e, 0x1ca: 0x0e, 0x1cb: 0x0e, 0x1cc: 0x0e, 0x1cd: 0x0e, 0x1ce: 0x0e, 0x1cf: 0x0e, - 0x1d0: 0x0e, 0x1d1: 0x0e, 0x1d2: 0x0e, 0x1d3: 0x0e, 0x1d4: 0x0e, 0x1d5: 0x0e, 0x1d6: 0x0e, 0x1d7: 0x0e, - 0x1d8: 0x0e, 0x1d9: 0x0e, 0x1da: 0x0e, 0x1db: 0x0e, 0x1dc: 0x0e, 0x1dd: 0x0e, 0x1de: 0x0e, 0x1df: 0x0e, - 0x1e0: 0x0e, 0x1e1: 0x0e, 0x1e2: 0x0e, 0x1e3: 0x0e, 0x1e4: 0x0e, 0x1e5: 0x0e, 0x1e6: 0x0e, 0x1e7: 0x0e, - 0x1e8: 0x0e, 0x1e9: 0x0e, 0x1ea: 0x0e, 0x1eb: 0x0e, 0x1ec: 0x0e, 0x1ed: 0x0e, 0x1ee: 0x0e, 0x1ef: 0x0e, - 0x1f0: 0x0e, 0x1f1: 0x0e, 0x1f2: 0x0e, 0x1f3: 0x0e, 0x1f4: 0x0e, 0x1f5: 0x0e, 0x1f6: 0x0e, - 0x1f8: 0x0e, 0x1f9: 0x0e, 0x1fa: 0x0e, 0x1fb: 0x0e, 0x1fc: 0x0e, 0x1fd: 0x0e, 0x1fe: 0x0e, 0x1ff: 0x0e, - // Block 0x8, offset 0x200 - 0x200: 0x0e, 0x201: 0x0e, 0x202: 0x0e, 0x203: 0x0e, 0x204: 0x0e, 0x205: 0x0e, 0x206: 0x0e, 0x207: 0x0e, - 0x208: 0x0e, 0x209: 0x0e, 0x20a: 0x0e, 0x20b: 0x0e, 0x20c: 0x0e, 0x20d: 0x0e, 0x20e: 0x0e, 0x20f: 0x0e, - 0x210: 0x0e, 0x211: 0x0e, 0x212: 0x0e, 0x213: 0x0e, 0x214: 0x0e, 0x215: 0x0e, 0x216: 0x0e, 0x217: 0x0e, - 0x218: 0x0e, 0x219: 0x0e, 0x21a: 0x0e, 0x21b: 0x0e, 0x21c: 0x0e, 0x21d: 0x0e, 0x21e: 0x0e, 0x21f: 0x0e, - 0x220: 0x0e, 0x221: 0x0e, 0x222: 0x0e, 0x223: 0x0e, 0x224: 0x0e, 0x225: 0x0e, 0x226: 0x0e, 0x227: 0x0e, - 0x228: 0x0e, 0x229: 0x0e, 0x22a: 0x0e, 0x22b: 0x0e, 0x22c: 0x0e, 0x22d: 0x0e, 0x22e: 0x0e, 0x22f: 0x0e, - 0x230: 0x0e, 0x231: 0x0e, 0x232: 0x0e, 0x233: 0x0e, 0x234: 0x0e, 0x235: 0x0e, 0x236: 0x0e, 0x237: 0x0e, - 0x238: 0x0e, 0x239: 0x0e, 0x23a: 0x0e, 0x23b: 0x0e, 0x23c: 0x0e, 0x23d: 0x0e, 0x23e: 0x0e, 0x23f: 0x0e, - // Block 0x9, offset 0x240 - 0x240: 0x0e, 0x241: 0x0e, 0x242: 0x0e, 0x243: 0x0e, 0x244: 0x0e, 0x245: 0x0e, 0x246: 0x0e, 0x247: 0x0e, - 0x248: 0x0e, 0x249: 0x0e, 0x24a: 0x0e, 0x24b: 0x0e, 0x24c: 0x0e, 0x24d: 0x0e, 0x24e: 0x0e, 0x24f: 0x0e, - 0x250: 0x0e, 0x251: 0x0e, 0x252: 0x3b, 0x253: 0x3c, - 0x265: 0x3d, - 0x270: 0x0e, 0x271: 0x0e, 0x272: 0x0e, 0x273: 0x0e, 0x274: 0x0e, 0x275: 0x0e, 0x276: 0x0e, 0x277: 0x0e, - 0x278: 0x0e, 0x279: 0x0e, 0x27a: 0x0e, 0x27b: 0x0e, 0x27c: 0x0e, 0x27d: 0x0e, 0x27e: 0x0e, 0x27f: 0x0e, - // Block 0xa, offset 0x280 - 0x280: 0x0e, 0x281: 0x0e, 0x282: 0x0e, 0x283: 0x0e, 0x284: 0x0e, 0x285: 0x0e, 0x286: 0x0e, 0x287: 0x0e, - 0x288: 0x0e, 0x289: 0x0e, 0x28a: 0x0e, 0x28b: 0x0e, 0x28c: 0x0e, 0x28d: 0x0e, 0x28e: 0x0e, 0x28f: 0x0e, - 0x290: 0x0e, 0x291: 0x0e, 0x292: 0x0e, 0x293: 0x0e, 0x294: 0x0e, 0x295: 0x0e, 0x296: 0x0e, 0x297: 0x0e, - 0x298: 0x0e, 0x299: 0x0e, 0x29a: 0x0e, 0x29b: 0x0e, 0x29c: 0x0e, 0x29d: 0x0e, 0x29e: 0x3e, - // Block 0xb, offset 0x2c0 - 0x2c0: 0x08, 0x2c1: 0x08, 0x2c2: 0x08, 0x2c3: 0x08, 0x2c4: 0x08, 0x2c5: 0x08, 0x2c6: 0x08, 0x2c7: 0x08, - 0x2c8: 0x08, 0x2c9: 0x08, 0x2ca: 0x08, 0x2cb: 0x08, 0x2cc: 0x08, 0x2cd: 0x08, 0x2ce: 0x08, 0x2cf: 0x08, - 0x2d0: 0x08, 0x2d1: 0x08, 0x2d2: 0x08, 0x2d3: 0x08, 0x2d4: 0x08, 0x2d5: 0x08, 0x2d6: 0x08, 0x2d7: 0x08, - 0x2d8: 0x08, 0x2d9: 0x08, 0x2da: 0x08, 0x2db: 0x08, 0x2dc: 0x08, 0x2dd: 0x08, 0x2de: 0x08, 0x2df: 0x08, - 0x2e0: 0x08, 0x2e1: 0x08, 0x2e2: 0x08, 0x2e3: 0x08, 0x2e4: 0x08, 0x2e5: 0x08, 0x2e6: 0x08, 0x2e7: 0x08, - 0x2e8: 0x08, 0x2e9: 0x08, 0x2ea: 0x08, 0x2eb: 0x08, 0x2ec: 0x08, 0x2ed: 0x08, 0x2ee: 0x08, 0x2ef: 0x08, - 0x2f0: 0x08, 0x2f1: 0x08, 0x2f2: 0x08, 0x2f3: 0x08, 0x2f4: 0x08, 0x2f5: 0x08, 0x2f6: 0x08, 0x2f7: 0x08, - 0x2f8: 0x08, 0x2f9: 0x08, 0x2fa: 0x08, 0x2fb: 0x08, 0x2fc: 0x08, 0x2fd: 0x08, 0x2fe: 0x08, 0x2ff: 0x08, - // Block 0xc, offset 0x300 - 0x300: 0x08, 0x301: 0x08, 0x302: 0x08, 0x303: 0x08, 0x304: 0x08, 0x305: 0x08, 0x306: 0x08, 0x307: 0x08, - 0x308: 0x08, 0x309: 0x08, 0x30a: 0x08, 0x30b: 0x08, 0x30c: 0x08, 0x30d: 0x08, 0x30e: 0x08, 0x30f: 0x08, - 0x310: 0x08, 0x311: 0x08, 0x312: 0x08, 0x313: 0x08, 0x314: 0x08, 0x315: 0x08, 0x316: 0x08, 0x317: 0x08, - 0x318: 0x08, 0x319: 0x08, 0x31a: 0x08, 0x31b: 0x08, 0x31c: 0x08, 0x31d: 0x08, 0x31e: 0x08, 0x31f: 0x08, - 0x320: 0x08, 0x321: 0x08, 0x322: 0x08, 0x323: 0x08, 0x324: 0x0e, 0x325: 0x0e, 0x326: 0x0e, 0x327: 0x0e, - 0x328: 0x0e, 0x329: 0x0e, 0x32a: 0x0e, 0x32b: 0x0e, - 0x338: 0x3f, 0x339: 0x40, 0x33c: 0x41, 0x33d: 0x42, 0x33e: 0x43, 0x33f: 0x44, - // Block 0xd, offset 0x340 - 0x37f: 0x45, - // Block 0xe, offset 0x380 - 0x380: 0x0e, 0x381: 0x0e, 0x382: 0x0e, 0x383: 0x0e, 0x384: 0x0e, 0x385: 0x0e, 0x386: 0x0e, 0x387: 0x0e, - 0x388: 0x0e, 0x389: 0x0e, 0x38a: 0x0e, 0x38b: 0x0e, 0x38c: 0x0e, 0x38d: 0x0e, 0x38e: 0x0e, 0x38f: 0x0e, - 0x390: 0x0e, 0x391: 0x0e, 0x392: 0x0e, 0x393: 0x0e, 0x394: 0x0e, 0x395: 0x0e, 0x396: 0x0e, 0x397: 0x0e, - 0x398: 0x0e, 0x399: 0x0e, 0x39a: 0x0e, 0x39b: 0x0e, 0x39c: 0x0e, 0x39d: 0x0e, 0x39e: 0x0e, 0x39f: 0x46, - 0x3a0: 0x0e, 0x3a1: 0x0e, 0x3a2: 0x0e, 0x3a3: 0x0e, 0x3a4: 0x0e, 0x3a5: 0x0e, 0x3a6: 0x0e, 0x3a7: 0x0e, - 0x3a8: 0x0e, 0x3a9: 0x0e, 0x3aa: 0x0e, 0x3ab: 0x47, - // Block 0xf, offset 0x3c0 - 0x3c0: 0x0e, 0x3c1: 0x0e, 0x3c2: 0x0e, 0x3c3: 0x0e, 0x3c4: 0x48, 0x3c5: 0x49, 0x3c6: 0x0e, 0x3c7: 0x0e, - 0x3c8: 0x0e, 0x3c9: 0x0e, 0x3ca: 0x0e, 0x3cb: 0x4a, - // Block 0x10, offset 0x400 - 0x400: 0x4b, 0x403: 0x4c, 0x404: 0x4d, 0x405: 0x4e, 0x406: 0x4f, - 0x408: 0x50, 0x409: 0x51, 0x40c: 0x52, 0x40d: 0x53, 0x40e: 0x54, 0x40f: 0x55, - 0x410: 0x3a, 0x411: 0x56, 0x412: 0x0e, 0x413: 0x57, 0x414: 0x58, 0x415: 0x59, 0x416: 0x5a, 0x417: 0x5b, - 0x418: 0x0e, 0x419: 0x5c, 0x41a: 0x0e, 0x41b: 0x5d, - 0x424: 0x5e, 0x425: 0x5f, 0x426: 0x60, 0x427: 0x61, - // Block 0x11, offset 0x440 - 0x456: 0x0b, 0x457: 0x06, - 0x458: 0x0c, 0x45b: 0x0d, 0x45f: 0x0e, - 0x460: 0x06, 0x461: 0x06, 0x462: 0x06, 0x463: 0x06, 0x464: 0x06, 0x465: 0x06, 0x466: 0x06, 0x467: 0x06, - 0x468: 0x06, 0x469: 0x06, 0x46a: 0x06, 0x46b: 0x06, 0x46c: 0x06, 0x46d: 0x06, 0x46e: 0x06, 0x46f: 0x06, - 0x470: 0x06, 0x471: 0x06, 0x472: 0x06, 0x473: 0x06, 0x474: 0x06, 0x475: 0x06, 0x476: 0x06, 0x477: 0x06, - 0x478: 0x06, 0x479: 0x06, 0x47a: 0x06, 0x47b: 0x06, 0x47c: 0x06, 0x47d: 0x06, 0x47e: 0x06, 0x47f: 0x06, - // Block 0x12, offset 0x480 - 0x484: 0x08, 0x485: 0x08, 0x486: 0x08, 0x487: 0x09, - // Block 0x13, offset 0x4c0 - 0x4c0: 0x08, 0x4c1: 0x08, 0x4c2: 0x08, 0x4c3: 0x08, 0x4c4: 0x08, 0x4c5: 0x08, 0x4c6: 0x08, 0x4c7: 0x08, - 0x4c8: 0x08, 0x4c9: 0x08, 0x4ca: 0x08, 0x4cb: 0x08, 0x4cc: 0x08, 0x4cd: 0x08, 0x4ce: 0x08, 0x4cf: 0x08, - 0x4d0: 0x08, 0x4d1: 0x08, 0x4d2: 0x08, 0x4d3: 0x08, 0x4d4: 0x08, 0x4d5: 0x08, 0x4d6: 0x08, 0x4d7: 0x08, - 0x4d8: 0x08, 0x4d9: 0x08, 0x4da: 0x08, 0x4db: 0x08, 0x4dc: 0x08, 0x4dd: 0x08, 0x4de: 0x08, 0x4df: 0x08, - 0x4e0: 0x08, 0x4e1: 0x08, 0x4e2: 0x08, 0x4e3: 0x08, 0x4e4: 0x08, 0x4e5: 0x08, 0x4e6: 0x08, 0x4e7: 0x08, - 0x4e8: 0x08, 0x4e9: 0x08, 0x4ea: 0x08, 0x4eb: 0x08, 0x4ec: 0x08, 0x4ed: 0x08, 0x4ee: 0x08, 0x4ef: 0x08, - 0x4f0: 0x08, 0x4f1: 0x08, 0x4f2: 0x08, 0x4f3: 0x08, 0x4f4: 0x08, 0x4f5: 0x08, 0x4f6: 0x08, 0x4f7: 0x08, - 0x4f8: 0x08, 0x4f9: 0x08, 0x4fa: 0x08, 0x4fb: 0x08, 0x4fc: 0x08, 0x4fd: 0x08, 0x4fe: 0x08, 0x4ff: 0x62, - // Block 0x14, offset 0x500 - 0x520: 0x10, - 0x530: 0x09, 0x531: 0x09, 0x532: 0x09, 0x533: 0x09, 0x534: 0x09, 0x535: 0x09, 0x536: 0x09, 0x537: 0x09, - 0x538: 0x09, 0x539: 0x09, 0x53a: 0x09, 0x53b: 0x09, 0x53c: 0x09, 0x53d: 0x09, 0x53e: 0x09, 0x53f: 0x11, - // Block 0x15, offset 0x540 - 0x540: 0x09, 0x541: 0x09, 0x542: 0x09, 0x543: 0x09, 0x544: 0x09, 0x545: 0x09, 0x546: 0x09, 0x547: 0x09, - 0x548: 0x09, 0x549: 0x09, 0x54a: 0x09, 0x54b: 0x09, 0x54c: 0x09, 0x54d: 0x09, 0x54e: 0x09, 0x54f: 0x11, -} - -// inverseData contains 4-byte entries of the following format: -// -// <0 padding> -// -// The last byte of the UTF-8-encoded rune is xor-ed with the last byte of the -// UTF-8 encoding of the original rune. Mappings often have the following -// pattern: -// -// A -> A (U+FF21 -> U+0041) -// B -> B (U+FF22 -> U+0042) -// ... -// -// By xor-ing the last byte the same entry can be shared by many mappings. This -// reduces the total number of distinct entries by about two thirds. -// The resulting entry for the aforementioned mappings is -// -// { 0x01, 0xE0, 0x00, 0x00 } -// -// Using this entry to map U+FF21 (UTF-8 [EF BC A1]), we get -// -// E0 ^ A1 = 41. -// -// Similarly, for U+FF22 (UTF-8 [EF BC A2]), we get -// -// E0 ^ A2 = 42. -// -// Note that because of the xor-ing, the byte sequence stored in the entry is -// not valid UTF-8. -var inverseData = [150][4]byte{ - {0x00, 0x00, 0x00, 0x00}, - {0x03, 0xe3, 0x80, 0xa0}, - {0x03, 0xef, 0xbc, 0xa0}, - {0x03, 0xef, 0xbc, 0xe0}, - {0x03, 0xef, 0xbd, 0xe0}, - {0x03, 0xef, 0xbf, 0x02}, - {0x03, 0xef, 0xbf, 0x00}, - {0x03, 0xef, 0xbf, 0x0e}, - {0x03, 0xef, 0xbf, 0x0c}, - {0x03, 0xef, 0xbf, 0x0f}, - {0x03, 0xef, 0xbf, 0x39}, - {0x03, 0xef, 0xbf, 0x3b}, - {0x03, 0xef, 0xbf, 0x3f}, - {0x03, 0xef, 0xbf, 0x2a}, - {0x03, 0xef, 0xbf, 0x0d}, - {0x03, 0xef, 0xbf, 0x25}, - {0x03, 0xef, 0xbd, 0x1a}, - {0x03, 0xef, 0xbd, 0x26}, - {0x01, 0xa0, 0x00, 0x00}, - {0x03, 0xef, 0xbd, 0x25}, - {0x03, 0xef, 0xbd, 0x23}, - {0x03, 0xef, 0xbd, 0x2e}, - {0x03, 0xef, 0xbe, 0x07}, - {0x03, 0xef, 0xbe, 0x05}, - {0x03, 0xef, 0xbd, 0x06}, - {0x03, 0xef, 0xbd, 0x13}, - {0x03, 0xef, 0xbd, 0x0b}, - {0x03, 0xef, 0xbd, 0x16}, - {0x03, 0xef, 0xbd, 0x0c}, - {0x03, 0xef, 0xbd, 0x15}, - {0x03, 0xef, 0xbd, 0x0d}, - {0x03, 0xef, 0xbd, 0x1c}, - {0x03, 0xef, 0xbd, 0x02}, - {0x03, 0xef, 0xbd, 0x1f}, - {0x03, 0xef, 0xbd, 0x1d}, - {0x03, 0xef, 0xbd, 0x17}, - {0x03, 0xef, 0xbd, 0x08}, - {0x03, 0xef, 0xbd, 0x09}, - {0x03, 0xef, 0xbd, 0x0e}, - {0x03, 0xef, 0xbd, 0x04}, - {0x03, 0xef, 0xbd, 0x05}, - {0x03, 0xef, 0xbe, 0x3f}, - {0x03, 0xef, 0xbe, 0x00}, - {0x03, 0xef, 0xbd, 0x2c}, - {0x03, 0xef, 0xbe, 0x06}, - {0x03, 0xef, 0xbe, 0x0c}, - {0x03, 0xef, 0xbe, 0x0f}, - {0x03, 0xef, 0xbe, 0x0d}, - {0x03, 0xef, 0xbe, 0x0b}, - {0x03, 0xef, 0xbe, 0x19}, - {0x03, 0xef, 0xbe, 0x15}, - {0x03, 0xef, 0xbe, 0x11}, - {0x03, 0xef, 0xbe, 0x31}, - {0x03, 0xef, 0xbe, 0x33}, - {0x03, 0xef, 0xbd, 0x0f}, - {0x03, 0xef, 0xbe, 0x30}, - {0x03, 0xef, 0xbe, 0x3e}, - {0x03, 0xef, 0xbe, 0x32}, - {0x03, 0xef, 0xbe, 0x36}, - {0x03, 0xef, 0xbd, 0x14}, - {0x03, 0xef, 0xbe, 0x2e}, - {0x03, 0xef, 0xbd, 0x1e}, - {0x03, 0xef, 0xbe, 0x10}, - {0x03, 0xef, 0xbf, 0x13}, - {0x03, 0xef, 0xbf, 0x15}, - {0x03, 0xef, 0xbf, 0x17}, - {0x03, 0xef, 0xbf, 0x1f}, - {0x03, 0xef, 0xbf, 0x1d}, - {0x03, 0xef, 0xbf, 0x1b}, - {0x03, 0xef, 0xbf, 0x09}, - {0x03, 0xef, 0xbf, 0x0b}, - {0x03, 0xef, 0xbf, 0x37}, - {0x03, 0xef, 0xbe, 0x04}, - {0x01, 0xe0, 0x00, 0x00}, - {0x03, 0xe2, 0xa6, 0x1a}, - {0x03, 0xe2, 0xa6, 0x26}, - {0x03, 0xe3, 0x80, 0x23}, - {0x03, 0xe3, 0x80, 0x2e}, - {0x03, 0xe3, 0x80, 0x25}, - {0x03, 0xe3, 0x83, 0x1e}, - {0x03, 0xe3, 0x83, 0x14}, - {0x03, 0xe3, 0x82, 0x06}, - {0x03, 0xe3, 0x82, 0x0b}, - {0x03, 0xe3, 0x82, 0x0c}, - {0x03, 0xe3, 0x82, 0x0d}, - {0x03, 0xe3, 0x82, 0x02}, - {0x03, 0xe3, 0x83, 0x0f}, - {0x03, 0xe3, 0x83, 0x08}, - {0x03, 0xe3, 0x83, 0x09}, - {0x03, 0xe3, 0x83, 0x2c}, - {0x03, 0xe3, 0x83, 0x0c}, - {0x03, 0xe3, 0x82, 0x13}, - {0x03, 0xe3, 0x82, 0x16}, - {0x03, 0xe3, 0x82, 0x15}, - {0x03, 0xe3, 0x82, 0x1c}, - {0x03, 0xe3, 0x82, 0x1f}, - {0x03, 0xe3, 0x82, 0x1d}, - {0x03, 0xe3, 0x82, 0x1a}, - {0x03, 0xe3, 0x82, 0x17}, - {0x03, 0xe3, 0x82, 0x08}, - {0x03, 0xe3, 0x82, 0x09}, - {0x03, 0xe3, 0x82, 0x0e}, - {0x03, 0xe3, 0x82, 0x04}, - {0x03, 0xe3, 0x82, 0x05}, - {0x03, 0xe3, 0x82, 0x3f}, - {0x03, 0xe3, 0x83, 0x00}, - {0x03, 0xe3, 0x83, 0x06}, - {0x03, 0xe3, 0x83, 0x05}, - {0x03, 0xe3, 0x83, 0x0d}, - {0x03, 0xe3, 0x83, 0x0b}, - {0x03, 0xe3, 0x83, 0x07}, - {0x03, 0xe3, 0x83, 0x19}, - {0x03, 0xe3, 0x83, 0x15}, - {0x03, 0xe3, 0x83, 0x11}, - {0x03, 0xe3, 0x83, 0x31}, - {0x03, 0xe3, 0x83, 0x33}, - {0x03, 0xe3, 0x83, 0x30}, - {0x03, 0xe3, 0x83, 0x3e}, - {0x03, 0xe3, 0x83, 0x32}, - {0x03, 0xe3, 0x83, 0x36}, - {0x03, 0xe3, 0x83, 0x2e}, - {0x03, 0xe3, 0x82, 0x07}, - {0x03, 0xe3, 0x85, 0x04}, - {0x03, 0xe3, 0x84, 0x10}, - {0x03, 0xe3, 0x85, 0x30}, - {0x03, 0xe3, 0x85, 0x0d}, - {0x03, 0xe3, 0x85, 0x13}, - {0x03, 0xe3, 0x85, 0x15}, - {0x03, 0xe3, 0x85, 0x17}, - {0x03, 0xe3, 0x85, 0x1f}, - {0x03, 0xe3, 0x85, 0x1d}, - {0x03, 0xe3, 0x85, 0x1b}, - {0x03, 0xe3, 0x85, 0x09}, - {0x03, 0xe3, 0x85, 0x0f}, - {0x03, 0xe3, 0x85, 0x0b}, - {0x03, 0xe3, 0x85, 0x37}, - {0x03, 0xe3, 0x85, 0x3b}, - {0x03, 0xe3, 0x85, 0x39}, - {0x03, 0xe3, 0x85, 0x3f}, - {0x02, 0xc2, 0x02, 0x00}, - {0x02, 0xc2, 0x0e, 0x00}, - {0x02, 0xc2, 0x0c, 0x00}, - {0x02, 0xc2, 0x00, 0x00}, - {0x03, 0xe2, 0x82, 0x0f}, - {0x03, 0xe2, 0x94, 0x2a}, - {0x03, 0xe2, 0x86, 0x39}, - {0x03, 0xe2, 0x86, 0x3b}, - {0x03, 0xe2, 0x86, 0x3f}, - {0x03, 0xe2, 0x96, 0x0d}, - {0x03, 0xe2, 0x97, 0x25}, -} - -// Total table size 14936 bytes (14KiB) diff --git a/vendor/golang.org/x/text/width/tables12.0.0.go b/vendor/golang.org/x/text/width/tables12.0.0.go deleted file mode 100644 index 755ee9122..000000000 --- a/vendor/golang.org/x/text/width/tables12.0.0.go +++ /dev/null @@ -1,1360 +0,0 @@ -// Code generated by running "go generate" in golang.org/x/text. DO NOT EDIT. - -//go:build go1.14 && !go1.16 - -package width - -// UnicodeVersion is the Unicode version from which the tables in this package are derived. -const UnicodeVersion = "12.0.0" - -// lookup returns the trie value for the first UTF-8 encoding in s and -// the width in bytes of this encoding. The size will be 0 if s does not -// hold enough bytes to complete the encoding. len(s) must be greater than 0. -func (t *widthTrie) lookup(s []byte) (v uint16, sz int) { - c0 := s[0] - switch { - case c0 < 0x80: // is ASCII - return widthValues[c0], 1 - case c0 < 0xC2: - return 0, 1 // Illegal UTF-8: not a starter, not ASCII. - case c0 < 0xE0: // 2-byte UTF-8 - if len(s) < 2 { - return 0, 0 - } - i := widthIndex[c0] - c1 := s[1] - if c1 < 0x80 || 0xC0 <= c1 { - return 0, 1 // Illegal UTF-8: not a continuation byte. - } - return t.lookupValue(uint32(i), c1), 2 - case c0 < 0xF0: // 3-byte UTF-8 - if len(s) < 3 { - return 0, 0 - } - i := widthIndex[c0] - c1 := s[1] - if c1 < 0x80 || 0xC0 <= c1 { - return 0, 1 // Illegal UTF-8: not a continuation byte. - } - o := uint32(i)<<6 + uint32(c1) - i = widthIndex[o] - c2 := s[2] - if c2 < 0x80 || 0xC0 <= c2 { - return 0, 2 // Illegal UTF-8: not a continuation byte. - } - return t.lookupValue(uint32(i), c2), 3 - case c0 < 0xF8: // 4-byte UTF-8 - if len(s) < 4 { - return 0, 0 - } - i := widthIndex[c0] - c1 := s[1] - if c1 < 0x80 || 0xC0 <= c1 { - return 0, 1 // Illegal UTF-8: not a continuation byte. - } - o := uint32(i)<<6 + uint32(c1) - i = widthIndex[o] - c2 := s[2] - if c2 < 0x80 || 0xC0 <= c2 { - return 0, 2 // Illegal UTF-8: not a continuation byte. - } - o = uint32(i)<<6 + uint32(c2) - i = widthIndex[o] - c3 := s[3] - if c3 < 0x80 || 0xC0 <= c3 { - return 0, 3 // Illegal UTF-8: not a continuation byte. - } - return t.lookupValue(uint32(i), c3), 4 - } - // Illegal rune - return 0, 1 -} - -// lookupUnsafe returns the trie value for the first UTF-8 encoding in s. -// s must start with a full and valid UTF-8 encoded rune. -func (t *widthTrie) lookupUnsafe(s []byte) uint16 { - c0 := s[0] - if c0 < 0x80 { // is ASCII - return widthValues[c0] - } - i := widthIndex[c0] - if c0 < 0xE0 { // 2-byte UTF-8 - return t.lookupValue(uint32(i), s[1]) - } - i = widthIndex[uint32(i)<<6+uint32(s[1])] - if c0 < 0xF0 { // 3-byte UTF-8 - return t.lookupValue(uint32(i), s[2]) - } - i = widthIndex[uint32(i)<<6+uint32(s[2])] - if c0 < 0xF8 { // 4-byte UTF-8 - return t.lookupValue(uint32(i), s[3]) - } - return 0 -} - -// lookupString returns the trie value for the first UTF-8 encoding in s and -// the width in bytes of this encoding. The size will be 0 if s does not -// hold enough bytes to complete the encoding. len(s) must be greater than 0. -func (t *widthTrie) lookupString(s string) (v uint16, sz int) { - c0 := s[0] - switch { - case c0 < 0x80: // is ASCII - return widthValues[c0], 1 - case c0 < 0xC2: - return 0, 1 // Illegal UTF-8: not a starter, not ASCII. - case c0 < 0xE0: // 2-byte UTF-8 - if len(s) < 2 { - return 0, 0 - } - i := widthIndex[c0] - c1 := s[1] - if c1 < 0x80 || 0xC0 <= c1 { - return 0, 1 // Illegal UTF-8: not a continuation byte. - } - return t.lookupValue(uint32(i), c1), 2 - case c0 < 0xF0: // 3-byte UTF-8 - if len(s) < 3 { - return 0, 0 - } - i := widthIndex[c0] - c1 := s[1] - if c1 < 0x80 || 0xC0 <= c1 { - return 0, 1 // Illegal UTF-8: not a continuation byte. - } - o := uint32(i)<<6 + uint32(c1) - i = widthIndex[o] - c2 := s[2] - if c2 < 0x80 || 0xC0 <= c2 { - return 0, 2 // Illegal UTF-8: not a continuation byte. - } - return t.lookupValue(uint32(i), c2), 3 - case c0 < 0xF8: // 4-byte UTF-8 - if len(s) < 4 { - return 0, 0 - } - i := widthIndex[c0] - c1 := s[1] - if c1 < 0x80 || 0xC0 <= c1 { - return 0, 1 // Illegal UTF-8: not a continuation byte. - } - o := uint32(i)<<6 + uint32(c1) - i = widthIndex[o] - c2 := s[2] - if c2 < 0x80 || 0xC0 <= c2 { - return 0, 2 // Illegal UTF-8: not a continuation byte. - } - o = uint32(i)<<6 + uint32(c2) - i = widthIndex[o] - c3 := s[3] - if c3 < 0x80 || 0xC0 <= c3 { - return 0, 3 // Illegal UTF-8: not a continuation byte. - } - return t.lookupValue(uint32(i), c3), 4 - } - // Illegal rune - return 0, 1 -} - -// lookupStringUnsafe returns the trie value for the first UTF-8 encoding in s. -// s must start with a full and valid UTF-8 encoded rune. -func (t *widthTrie) lookupStringUnsafe(s string) uint16 { - c0 := s[0] - if c0 < 0x80 { // is ASCII - return widthValues[c0] - } - i := widthIndex[c0] - if c0 < 0xE0 { // 2-byte UTF-8 - return t.lookupValue(uint32(i), s[1]) - } - i = widthIndex[uint32(i)<<6+uint32(s[1])] - if c0 < 0xF0 { // 3-byte UTF-8 - return t.lookupValue(uint32(i), s[2]) - } - i = widthIndex[uint32(i)<<6+uint32(s[2])] - if c0 < 0xF8 { // 4-byte UTF-8 - return t.lookupValue(uint32(i), s[3]) - } - return 0 -} - -// widthTrie. Total size: 14720 bytes (14.38 KiB). Checksum: 3f4f2516ded5489b. -type widthTrie struct{} - -func newWidthTrie(i int) *widthTrie { - return &widthTrie{} -} - -// lookupValue determines the type of block n and looks up the value for b. -func (t *widthTrie) lookupValue(n uint32, b byte) uint16 { - switch { - default: - return uint16(widthValues[n<<6+uint32(b)]) - } -} - -// widthValues: 104 blocks, 6656 entries, 13312 bytes -// The third block is the zero block. -var widthValues = [6656]uint16{ - // Block 0x0, offset 0x0 - 0x20: 0x6001, 0x21: 0x6002, 0x22: 0x6002, 0x23: 0x6002, - 0x24: 0x6002, 0x25: 0x6002, 0x26: 0x6002, 0x27: 0x6002, 0x28: 0x6002, 0x29: 0x6002, - 0x2a: 0x6002, 0x2b: 0x6002, 0x2c: 0x6002, 0x2d: 0x6002, 0x2e: 0x6002, 0x2f: 0x6002, - 0x30: 0x6002, 0x31: 0x6002, 0x32: 0x6002, 0x33: 0x6002, 0x34: 0x6002, 0x35: 0x6002, - 0x36: 0x6002, 0x37: 0x6002, 0x38: 0x6002, 0x39: 0x6002, 0x3a: 0x6002, 0x3b: 0x6002, - 0x3c: 0x6002, 0x3d: 0x6002, 0x3e: 0x6002, 0x3f: 0x6002, - // Block 0x1, offset 0x40 - 0x40: 0x6003, 0x41: 0x6003, 0x42: 0x6003, 0x43: 0x6003, 0x44: 0x6003, 0x45: 0x6003, - 0x46: 0x6003, 0x47: 0x6003, 0x48: 0x6003, 0x49: 0x6003, 0x4a: 0x6003, 0x4b: 0x6003, - 0x4c: 0x6003, 0x4d: 0x6003, 0x4e: 0x6003, 0x4f: 0x6003, 0x50: 0x6003, 0x51: 0x6003, - 0x52: 0x6003, 0x53: 0x6003, 0x54: 0x6003, 0x55: 0x6003, 0x56: 0x6003, 0x57: 0x6003, - 0x58: 0x6003, 0x59: 0x6003, 0x5a: 0x6003, 0x5b: 0x6003, 0x5c: 0x6003, 0x5d: 0x6003, - 0x5e: 0x6003, 0x5f: 0x6003, 0x60: 0x6004, 0x61: 0x6004, 0x62: 0x6004, 0x63: 0x6004, - 0x64: 0x6004, 0x65: 0x6004, 0x66: 0x6004, 0x67: 0x6004, 0x68: 0x6004, 0x69: 0x6004, - 0x6a: 0x6004, 0x6b: 0x6004, 0x6c: 0x6004, 0x6d: 0x6004, 0x6e: 0x6004, 0x6f: 0x6004, - 0x70: 0x6004, 0x71: 0x6004, 0x72: 0x6004, 0x73: 0x6004, 0x74: 0x6004, 0x75: 0x6004, - 0x76: 0x6004, 0x77: 0x6004, 0x78: 0x6004, 0x79: 0x6004, 0x7a: 0x6004, 0x7b: 0x6004, - 0x7c: 0x6004, 0x7d: 0x6004, 0x7e: 0x6004, - // Block 0x2, offset 0x80 - // Block 0x3, offset 0xc0 - 0xe1: 0x2000, 0xe2: 0x6005, 0xe3: 0x6005, - 0xe4: 0x2000, 0xe5: 0x6006, 0xe6: 0x6005, 0xe7: 0x2000, 0xe8: 0x2000, - 0xea: 0x2000, 0xec: 0x6007, 0xed: 0x2000, 0xee: 0x2000, 0xef: 0x6008, - 0xf0: 0x2000, 0xf1: 0x2000, 0xf2: 0x2000, 0xf3: 0x2000, 0xf4: 0x2000, - 0xf6: 0x2000, 0xf7: 0x2000, 0xf8: 0x2000, 0xf9: 0x2000, 0xfa: 0x2000, - 0xfc: 0x2000, 0xfd: 0x2000, 0xfe: 0x2000, 0xff: 0x2000, - // Block 0x4, offset 0x100 - 0x106: 0x2000, - 0x110: 0x2000, - 0x117: 0x2000, - 0x118: 0x2000, - 0x11e: 0x2000, 0x11f: 0x2000, 0x120: 0x2000, 0x121: 0x2000, - 0x126: 0x2000, 0x128: 0x2000, 0x129: 0x2000, - 0x12a: 0x2000, 0x12c: 0x2000, 0x12d: 0x2000, - 0x130: 0x2000, 0x132: 0x2000, 0x133: 0x2000, - 0x137: 0x2000, 0x138: 0x2000, 0x139: 0x2000, 0x13a: 0x2000, - 0x13c: 0x2000, 0x13e: 0x2000, - // Block 0x5, offset 0x140 - 0x141: 0x2000, - 0x151: 0x2000, - 0x153: 0x2000, - 0x15b: 0x2000, - 0x166: 0x2000, 0x167: 0x2000, - 0x16b: 0x2000, - 0x171: 0x2000, 0x172: 0x2000, 0x173: 0x2000, - 0x178: 0x2000, - 0x17f: 0x2000, - // Block 0x6, offset 0x180 - 0x180: 0x2000, 0x181: 0x2000, 0x182: 0x2000, 0x184: 0x2000, - 0x188: 0x2000, 0x189: 0x2000, 0x18a: 0x2000, 0x18b: 0x2000, - 0x18d: 0x2000, - 0x192: 0x2000, 0x193: 0x2000, - 0x1a6: 0x2000, 0x1a7: 0x2000, - 0x1ab: 0x2000, - // Block 0x7, offset 0x1c0 - 0x1ce: 0x2000, 0x1d0: 0x2000, - 0x1d2: 0x2000, 0x1d4: 0x2000, 0x1d6: 0x2000, - 0x1d8: 0x2000, 0x1da: 0x2000, 0x1dc: 0x2000, - // Block 0x8, offset 0x200 - 0x211: 0x2000, - 0x221: 0x2000, - // Block 0x9, offset 0x240 - 0x244: 0x2000, - 0x247: 0x2000, 0x249: 0x2000, 0x24a: 0x2000, 0x24b: 0x2000, - 0x24d: 0x2000, 0x250: 0x2000, - 0x258: 0x2000, 0x259: 0x2000, 0x25a: 0x2000, 0x25b: 0x2000, 0x25d: 0x2000, - 0x25f: 0x2000, - // Block 0xa, offset 0x280 - 0x280: 0x2000, 0x281: 0x2000, 0x282: 0x2000, 0x283: 0x2000, 0x284: 0x2000, 0x285: 0x2000, - 0x286: 0x2000, 0x287: 0x2000, 0x288: 0x2000, 0x289: 0x2000, 0x28a: 0x2000, 0x28b: 0x2000, - 0x28c: 0x2000, 0x28d: 0x2000, 0x28e: 0x2000, 0x28f: 0x2000, 0x290: 0x2000, 0x291: 0x2000, - 0x292: 0x2000, 0x293: 0x2000, 0x294: 0x2000, 0x295: 0x2000, 0x296: 0x2000, 0x297: 0x2000, - 0x298: 0x2000, 0x299: 0x2000, 0x29a: 0x2000, 0x29b: 0x2000, 0x29c: 0x2000, 0x29d: 0x2000, - 0x29e: 0x2000, 0x29f: 0x2000, 0x2a0: 0x2000, 0x2a1: 0x2000, 0x2a2: 0x2000, 0x2a3: 0x2000, - 0x2a4: 0x2000, 0x2a5: 0x2000, 0x2a6: 0x2000, 0x2a7: 0x2000, 0x2a8: 0x2000, 0x2a9: 0x2000, - 0x2aa: 0x2000, 0x2ab: 0x2000, 0x2ac: 0x2000, 0x2ad: 0x2000, 0x2ae: 0x2000, 0x2af: 0x2000, - 0x2b0: 0x2000, 0x2b1: 0x2000, 0x2b2: 0x2000, 0x2b3: 0x2000, 0x2b4: 0x2000, 0x2b5: 0x2000, - 0x2b6: 0x2000, 0x2b7: 0x2000, 0x2b8: 0x2000, 0x2b9: 0x2000, 0x2ba: 0x2000, 0x2bb: 0x2000, - 0x2bc: 0x2000, 0x2bd: 0x2000, 0x2be: 0x2000, 0x2bf: 0x2000, - // Block 0xb, offset 0x2c0 - 0x2c0: 0x2000, 0x2c1: 0x2000, 0x2c2: 0x2000, 0x2c3: 0x2000, 0x2c4: 0x2000, 0x2c5: 0x2000, - 0x2c6: 0x2000, 0x2c7: 0x2000, 0x2c8: 0x2000, 0x2c9: 0x2000, 0x2ca: 0x2000, 0x2cb: 0x2000, - 0x2cc: 0x2000, 0x2cd: 0x2000, 0x2ce: 0x2000, 0x2cf: 0x2000, 0x2d0: 0x2000, 0x2d1: 0x2000, - 0x2d2: 0x2000, 0x2d3: 0x2000, 0x2d4: 0x2000, 0x2d5: 0x2000, 0x2d6: 0x2000, 0x2d7: 0x2000, - 0x2d8: 0x2000, 0x2d9: 0x2000, 0x2da: 0x2000, 0x2db: 0x2000, 0x2dc: 0x2000, 0x2dd: 0x2000, - 0x2de: 0x2000, 0x2df: 0x2000, 0x2e0: 0x2000, 0x2e1: 0x2000, 0x2e2: 0x2000, 0x2e3: 0x2000, - 0x2e4: 0x2000, 0x2e5: 0x2000, 0x2e6: 0x2000, 0x2e7: 0x2000, 0x2e8: 0x2000, 0x2e9: 0x2000, - 0x2ea: 0x2000, 0x2eb: 0x2000, 0x2ec: 0x2000, 0x2ed: 0x2000, 0x2ee: 0x2000, 0x2ef: 0x2000, - // Block 0xc, offset 0x300 - 0x311: 0x2000, - 0x312: 0x2000, 0x313: 0x2000, 0x314: 0x2000, 0x315: 0x2000, 0x316: 0x2000, 0x317: 0x2000, - 0x318: 0x2000, 0x319: 0x2000, 0x31a: 0x2000, 0x31b: 0x2000, 0x31c: 0x2000, 0x31d: 0x2000, - 0x31e: 0x2000, 0x31f: 0x2000, 0x320: 0x2000, 0x321: 0x2000, 0x323: 0x2000, - 0x324: 0x2000, 0x325: 0x2000, 0x326: 0x2000, 0x327: 0x2000, 0x328: 0x2000, 0x329: 0x2000, - 0x331: 0x2000, 0x332: 0x2000, 0x333: 0x2000, 0x334: 0x2000, 0x335: 0x2000, - 0x336: 0x2000, 0x337: 0x2000, 0x338: 0x2000, 0x339: 0x2000, 0x33a: 0x2000, 0x33b: 0x2000, - 0x33c: 0x2000, 0x33d: 0x2000, 0x33e: 0x2000, 0x33f: 0x2000, - // Block 0xd, offset 0x340 - 0x340: 0x2000, 0x341: 0x2000, 0x343: 0x2000, 0x344: 0x2000, 0x345: 0x2000, - 0x346: 0x2000, 0x347: 0x2000, 0x348: 0x2000, 0x349: 0x2000, - // Block 0xe, offset 0x380 - 0x381: 0x2000, - 0x390: 0x2000, 0x391: 0x2000, - 0x392: 0x2000, 0x393: 0x2000, 0x394: 0x2000, 0x395: 0x2000, 0x396: 0x2000, 0x397: 0x2000, - 0x398: 0x2000, 0x399: 0x2000, 0x39a: 0x2000, 0x39b: 0x2000, 0x39c: 0x2000, 0x39d: 0x2000, - 0x39e: 0x2000, 0x39f: 0x2000, 0x3a0: 0x2000, 0x3a1: 0x2000, 0x3a2: 0x2000, 0x3a3: 0x2000, - 0x3a4: 0x2000, 0x3a5: 0x2000, 0x3a6: 0x2000, 0x3a7: 0x2000, 0x3a8: 0x2000, 0x3a9: 0x2000, - 0x3aa: 0x2000, 0x3ab: 0x2000, 0x3ac: 0x2000, 0x3ad: 0x2000, 0x3ae: 0x2000, 0x3af: 0x2000, - 0x3b0: 0x2000, 0x3b1: 0x2000, 0x3b2: 0x2000, 0x3b3: 0x2000, 0x3b4: 0x2000, 0x3b5: 0x2000, - 0x3b6: 0x2000, 0x3b7: 0x2000, 0x3b8: 0x2000, 0x3b9: 0x2000, 0x3ba: 0x2000, 0x3bb: 0x2000, - 0x3bc: 0x2000, 0x3bd: 0x2000, 0x3be: 0x2000, 0x3bf: 0x2000, - // Block 0xf, offset 0x3c0 - 0x3c0: 0x2000, 0x3c1: 0x2000, 0x3c2: 0x2000, 0x3c3: 0x2000, 0x3c4: 0x2000, 0x3c5: 0x2000, - 0x3c6: 0x2000, 0x3c7: 0x2000, 0x3c8: 0x2000, 0x3c9: 0x2000, 0x3ca: 0x2000, 0x3cb: 0x2000, - 0x3cc: 0x2000, 0x3cd: 0x2000, 0x3ce: 0x2000, 0x3cf: 0x2000, 0x3d1: 0x2000, - // Block 0x10, offset 0x400 - 0x400: 0x4000, 0x401: 0x4000, 0x402: 0x4000, 0x403: 0x4000, 0x404: 0x4000, 0x405: 0x4000, - 0x406: 0x4000, 0x407: 0x4000, 0x408: 0x4000, 0x409: 0x4000, 0x40a: 0x4000, 0x40b: 0x4000, - 0x40c: 0x4000, 0x40d: 0x4000, 0x40e: 0x4000, 0x40f: 0x4000, 0x410: 0x4000, 0x411: 0x4000, - 0x412: 0x4000, 0x413: 0x4000, 0x414: 0x4000, 0x415: 0x4000, 0x416: 0x4000, 0x417: 0x4000, - 0x418: 0x4000, 0x419: 0x4000, 0x41a: 0x4000, 0x41b: 0x4000, 0x41c: 0x4000, 0x41d: 0x4000, - 0x41e: 0x4000, 0x41f: 0x4000, 0x420: 0x4000, 0x421: 0x4000, 0x422: 0x4000, 0x423: 0x4000, - 0x424: 0x4000, 0x425: 0x4000, 0x426: 0x4000, 0x427: 0x4000, 0x428: 0x4000, 0x429: 0x4000, - 0x42a: 0x4000, 0x42b: 0x4000, 0x42c: 0x4000, 0x42d: 0x4000, 0x42e: 0x4000, 0x42f: 0x4000, - 0x430: 0x4000, 0x431: 0x4000, 0x432: 0x4000, 0x433: 0x4000, 0x434: 0x4000, 0x435: 0x4000, - 0x436: 0x4000, 0x437: 0x4000, 0x438: 0x4000, 0x439: 0x4000, 0x43a: 0x4000, 0x43b: 0x4000, - 0x43c: 0x4000, 0x43d: 0x4000, 0x43e: 0x4000, 0x43f: 0x4000, - // Block 0x11, offset 0x440 - 0x440: 0x4000, 0x441: 0x4000, 0x442: 0x4000, 0x443: 0x4000, 0x444: 0x4000, 0x445: 0x4000, - 0x446: 0x4000, 0x447: 0x4000, 0x448: 0x4000, 0x449: 0x4000, 0x44a: 0x4000, 0x44b: 0x4000, - 0x44c: 0x4000, 0x44d: 0x4000, 0x44e: 0x4000, 0x44f: 0x4000, 0x450: 0x4000, 0x451: 0x4000, - 0x452: 0x4000, 0x453: 0x4000, 0x454: 0x4000, 0x455: 0x4000, 0x456: 0x4000, 0x457: 0x4000, - 0x458: 0x4000, 0x459: 0x4000, 0x45a: 0x4000, 0x45b: 0x4000, 0x45c: 0x4000, 0x45d: 0x4000, - 0x45e: 0x4000, 0x45f: 0x4000, - // Block 0x12, offset 0x480 - 0x490: 0x2000, - 0x493: 0x2000, 0x494: 0x2000, 0x495: 0x2000, 0x496: 0x2000, - 0x498: 0x2000, 0x499: 0x2000, 0x49c: 0x2000, 0x49d: 0x2000, - 0x4a0: 0x2000, 0x4a1: 0x2000, 0x4a2: 0x2000, - 0x4a4: 0x2000, 0x4a5: 0x2000, 0x4a6: 0x2000, 0x4a7: 0x2000, - 0x4b0: 0x2000, 0x4b2: 0x2000, 0x4b3: 0x2000, 0x4b5: 0x2000, - 0x4bb: 0x2000, - 0x4be: 0x2000, - // Block 0x13, offset 0x4c0 - 0x4f4: 0x2000, - 0x4ff: 0x2000, - // Block 0x14, offset 0x500 - 0x501: 0x2000, 0x502: 0x2000, 0x503: 0x2000, 0x504: 0x2000, - 0x529: 0xa009, - 0x52c: 0x2000, - // Block 0x15, offset 0x540 - 0x543: 0x2000, 0x545: 0x2000, - 0x549: 0x2000, - 0x553: 0x2000, 0x556: 0x2000, - 0x561: 0x2000, 0x562: 0x2000, - 0x566: 0x2000, - 0x56b: 0x2000, - // Block 0x16, offset 0x580 - 0x593: 0x2000, 0x594: 0x2000, - 0x59b: 0x2000, 0x59c: 0x2000, 0x59d: 0x2000, - 0x59e: 0x2000, 0x5a0: 0x2000, 0x5a1: 0x2000, 0x5a2: 0x2000, 0x5a3: 0x2000, - 0x5a4: 0x2000, 0x5a5: 0x2000, 0x5a6: 0x2000, 0x5a7: 0x2000, 0x5a8: 0x2000, 0x5a9: 0x2000, - 0x5aa: 0x2000, 0x5ab: 0x2000, - 0x5b0: 0x2000, 0x5b1: 0x2000, 0x5b2: 0x2000, 0x5b3: 0x2000, 0x5b4: 0x2000, 0x5b5: 0x2000, - 0x5b6: 0x2000, 0x5b7: 0x2000, 0x5b8: 0x2000, 0x5b9: 0x2000, - // Block 0x17, offset 0x5c0 - 0x5c9: 0x2000, - 0x5d0: 0x200a, 0x5d1: 0x200b, - 0x5d2: 0x200a, 0x5d3: 0x200c, 0x5d4: 0x2000, 0x5d5: 0x2000, 0x5d6: 0x2000, 0x5d7: 0x2000, - 0x5d8: 0x2000, 0x5d9: 0x2000, - 0x5f8: 0x2000, 0x5f9: 0x2000, - // Block 0x18, offset 0x600 - 0x612: 0x2000, 0x614: 0x2000, - 0x627: 0x2000, - // Block 0x19, offset 0x640 - 0x640: 0x2000, 0x642: 0x2000, 0x643: 0x2000, - 0x647: 0x2000, 0x648: 0x2000, 0x64b: 0x2000, - 0x64f: 0x2000, 0x651: 0x2000, - 0x655: 0x2000, - 0x65a: 0x2000, 0x65d: 0x2000, - 0x65e: 0x2000, 0x65f: 0x2000, 0x660: 0x2000, 0x663: 0x2000, - 0x665: 0x2000, 0x667: 0x2000, 0x668: 0x2000, 0x669: 0x2000, - 0x66a: 0x2000, 0x66b: 0x2000, 0x66c: 0x2000, 0x66e: 0x2000, - 0x674: 0x2000, 0x675: 0x2000, - 0x676: 0x2000, 0x677: 0x2000, - 0x67c: 0x2000, 0x67d: 0x2000, - // Block 0x1a, offset 0x680 - 0x688: 0x2000, - 0x68c: 0x2000, - 0x692: 0x2000, - 0x6a0: 0x2000, 0x6a1: 0x2000, - 0x6a4: 0x2000, 0x6a5: 0x2000, 0x6a6: 0x2000, 0x6a7: 0x2000, - 0x6aa: 0x2000, 0x6ab: 0x2000, 0x6ae: 0x2000, 0x6af: 0x2000, - // Block 0x1b, offset 0x6c0 - 0x6c2: 0x2000, 0x6c3: 0x2000, - 0x6c6: 0x2000, 0x6c7: 0x2000, - 0x6d5: 0x2000, - 0x6d9: 0x2000, - 0x6e5: 0x2000, - 0x6ff: 0x2000, - // Block 0x1c, offset 0x700 - 0x712: 0x2000, - 0x71a: 0x4000, 0x71b: 0x4000, - 0x729: 0x4000, - 0x72a: 0x4000, - // Block 0x1d, offset 0x740 - 0x769: 0x4000, - 0x76a: 0x4000, 0x76b: 0x4000, 0x76c: 0x4000, - 0x770: 0x4000, 0x773: 0x4000, - // Block 0x1e, offset 0x780 - 0x7a0: 0x2000, 0x7a1: 0x2000, 0x7a2: 0x2000, 0x7a3: 0x2000, - 0x7a4: 0x2000, 0x7a5: 0x2000, 0x7a6: 0x2000, 0x7a7: 0x2000, 0x7a8: 0x2000, 0x7a9: 0x2000, - 0x7aa: 0x2000, 0x7ab: 0x2000, 0x7ac: 0x2000, 0x7ad: 0x2000, 0x7ae: 0x2000, 0x7af: 0x2000, - 0x7b0: 0x2000, 0x7b1: 0x2000, 0x7b2: 0x2000, 0x7b3: 0x2000, 0x7b4: 0x2000, 0x7b5: 0x2000, - 0x7b6: 0x2000, 0x7b7: 0x2000, 0x7b8: 0x2000, 0x7b9: 0x2000, 0x7ba: 0x2000, 0x7bb: 0x2000, - 0x7bc: 0x2000, 0x7bd: 0x2000, 0x7be: 0x2000, 0x7bf: 0x2000, - // Block 0x1f, offset 0x7c0 - 0x7c0: 0x2000, 0x7c1: 0x2000, 0x7c2: 0x2000, 0x7c3: 0x2000, 0x7c4: 0x2000, 0x7c5: 0x2000, - 0x7c6: 0x2000, 0x7c7: 0x2000, 0x7c8: 0x2000, 0x7c9: 0x2000, 0x7ca: 0x2000, 0x7cb: 0x2000, - 0x7cc: 0x2000, 0x7cd: 0x2000, 0x7ce: 0x2000, 0x7cf: 0x2000, 0x7d0: 0x2000, 0x7d1: 0x2000, - 0x7d2: 0x2000, 0x7d3: 0x2000, 0x7d4: 0x2000, 0x7d5: 0x2000, 0x7d6: 0x2000, 0x7d7: 0x2000, - 0x7d8: 0x2000, 0x7d9: 0x2000, 0x7da: 0x2000, 0x7db: 0x2000, 0x7dc: 0x2000, 0x7dd: 0x2000, - 0x7de: 0x2000, 0x7df: 0x2000, 0x7e0: 0x2000, 0x7e1: 0x2000, 0x7e2: 0x2000, 0x7e3: 0x2000, - 0x7e4: 0x2000, 0x7e5: 0x2000, 0x7e6: 0x2000, 0x7e7: 0x2000, 0x7e8: 0x2000, 0x7e9: 0x2000, - 0x7eb: 0x2000, 0x7ec: 0x2000, 0x7ed: 0x2000, 0x7ee: 0x2000, 0x7ef: 0x2000, - 0x7f0: 0x2000, 0x7f1: 0x2000, 0x7f2: 0x2000, 0x7f3: 0x2000, 0x7f4: 0x2000, 0x7f5: 0x2000, - 0x7f6: 0x2000, 0x7f7: 0x2000, 0x7f8: 0x2000, 0x7f9: 0x2000, 0x7fa: 0x2000, 0x7fb: 0x2000, - 0x7fc: 0x2000, 0x7fd: 0x2000, 0x7fe: 0x2000, 0x7ff: 0x2000, - // Block 0x20, offset 0x800 - 0x800: 0x2000, 0x801: 0x2000, 0x802: 0x200d, 0x803: 0x2000, 0x804: 0x2000, 0x805: 0x2000, - 0x806: 0x2000, 0x807: 0x2000, 0x808: 0x2000, 0x809: 0x2000, 0x80a: 0x2000, 0x80b: 0x2000, - 0x80c: 0x2000, 0x80d: 0x2000, 0x80e: 0x2000, 0x80f: 0x2000, 0x810: 0x2000, 0x811: 0x2000, - 0x812: 0x2000, 0x813: 0x2000, 0x814: 0x2000, 0x815: 0x2000, 0x816: 0x2000, 0x817: 0x2000, - 0x818: 0x2000, 0x819: 0x2000, 0x81a: 0x2000, 0x81b: 0x2000, 0x81c: 0x2000, 0x81d: 0x2000, - 0x81e: 0x2000, 0x81f: 0x2000, 0x820: 0x2000, 0x821: 0x2000, 0x822: 0x2000, 0x823: 0x2000, - 0x824: 0x2000, 0x825: 0x2000, 0x826: 0x2000, 0x827: 0x2000, 0x828: 0x2000, 0x829: 0x2000, - 0x82a: 0x2000, 0x82b: 0x2000, 0x82c: 0x2000, 0x82d: 0x2000, 0x82e: 0x2000, 0x82f: 0x2000, - 0x830: 0x2000, 0x831: 0x2000, 0x832: 0x2000, 0x833: 0x2000, 0x834: 0x2000, 0x835: 0x2000, - 0x836: 0x2000, 0x837: 0x2000, 0x838: 0x2000, 0x839: 0x2000, 0x83a: 0x2000, 0x83b: 0x2000, - 0x83c: 0x2000, 0x83d: 0x2000, 0x83e: 0x2000, 0x83f: 0x2000, - // Block 0x21, offset 0x840 - 0x840: 0x2000, 0x841: 0x2000, 0x842: 0x2000, 0x843: 0x2000, 0x844: 0x2000, 0x845: 0x2000, - 0x846: 0x2000, 0x847: 0x2000, 0x848: 0x2000, 0x849: 0x2000, 0x84a: 0x2000, 0x84b: 0x2000, - 0x850: 0x2000, 0x851: 0x2000, - 0x852: 0x2000, 0x853: 0x2000, 0x854: 0x2000, 0x855: 0x2000, 0x856: 0x2000, 0x857: 0x2000, - 0x858: 0x2000, 0x859: 0x2000, 0x85a: 0x2000, 0x85b: 0x2000, 0x85c: 0x2000, 0x85d: 0x2000, - 0x85e: 0x2000, 0x85f: 0x2000, 0x860: 0x2000, 0x861: 0x2000, 0x862: 0x2000, 0x863: 0x2000, - 0x864: 0x2000, 0x865: 0x2000, 0x866: 0x2000, 0x867: 0x2000, 0x868: 0x2000, 0x869: 0x2000, - 0x86a: 0x2000, 0x86b: 0x2000, 0x86c: 0x2000, 0x86d: 0x2000, 0x86e: 0x2000, 0x86f: 0x2000, - 0x870: 0x2000, 0x871: 0x2000, 0x872: 0x2000, 0x873: 0x2000, - // Block 0x22, offset 0x880 - 0x880: 0x2000, 0x881: 0x2000, 0x882: 0x2000, 0x883: 0x2000, 0x884: 0x2000, 0x885: 0x2000, - 0x886: 0x2000, 0x887: 0x2000, 0x888: 0x2000, 0x889: 0x2000, 0x88a: 0x2000, 0x88b: 0x2000, - 0x88c: 0x2000, 0x88d: 0x2000, 0x88e: 0x2000, 0x88f: 0x2000, - 0x892: 0x2000, 0x893: 0x2000, 0x894: 0x2000, 0x895: 0x2000, - 0x8a0: 0x200e, 0x8a1: 0x2000, 0x8a3: 0x2000, - 0x8a4: 0x2000, 0x8a5: 0x2000, 0x8a6: 0x2000, 0x8a7: 0x2000, 0x8a8: 0x2000, 0x8a9: 0x2000, - 0x8b2: 0x2000, 0x8b3: 0x2000, - 0x8b6: 0x2000, 0x8b7: 0x2000, - 0x8bc: 0x2000, 0x8bd: 0x2000, - // Block 0x23, offset 0x8c0 - 0x8c0: 0x2000, 0x8c1: 0x2000, - 0x8c6: 0x2000, 0x8c7: 0x2000, 0x8c8: 0x2000, 0x8cb: 0x200f, - 0x8ce: 0x2000, 0x8cf: 0x2000, 0x8d0: 0x2000, 0x8d1: 0x2000, - 0x8e2: 0x2000, 0x8e3: 0x2000, - 0x8e4: 0x2000, 0x8e5: 0x2000, - 0x8ef: 0x2000, - 0x8fd: 0x4000, 0x8fe: 0x4000, - // Block 0x24, offset 0x900 - 0x905: 0x2000, - 0x906: 0x2000, 0x909: 0x2000, - 0x90e: 0x2000, 0x90f: 0x2000, - 0x914: 0x4000, 0x915: 0x4000, - 0x91c: 0x2000, - 0x91e: 0x2000, - // Block 0x25, offset 0x940 - 0x940: 0x2000, 0x942: 0x2000, - 0x948: 0x4000, 0x949: 0x4000, 0x94a: 0x4000, 0x94b: 0x4000, - 0x94c: 0x4000, 0x94d: 0x4000, 0x94e: 0x4000, 0x94f: 0x4000, 0x950: 0x4000, 0x951: 0x4000, - 0x952: 0x4000, 0x953: 0x4000, - 0x960: 0x2000, 0x961: 0x2000, 0x963: 0x2000, - 0x964: 0x2000, 0x965: 0x2000, 0x967: 0x2000, 0x968: 0x2000, 0x969: 0x2000, - 0x96a: 0x2000, 0x96c: 0x2000, 0x96d: 0x2000, 0x96f: 0x2000, - 0x97f: 0x4000, - // Block 0x26, offset 0x980 - 0x993: 0x4000, - 0x99e: 0x2000, 0x99f: 0x2000, 0x9a1: 0x4000, - 0x9aa: 0x4000, 0x9ab: 0x4000, - 0x9bd: 0x4000, 0x9be: 0x4000, 0x9bf: 0x2000, - // Block 0x27, offset 0x9c0 - 0x9c4: 0x4000, 0x9c5: 0x4000, - 0x9c6: 0x2000, 0x9c7: 0x2000, 0x9c8: 0x2000, 0x9c9: 0x2000, 0x9ca: 0x2000, 0x9cb: 0x2000, - 0x9cc: 0x2000, 0x9cd: 0x2000, 0x9ce: 0x4000, 0x9cf: 0x2000, 0x9d0: 0x2000, 0x9d1: 0x2000, - 0x9d2: 0x2000, 0x9d3: 0x2000, 0x9d4: 0x4000, 0x9d5: 0x2000, 0x9d6: 0x2000, 0x9d7: 0x2000, - 0x9d8: 0x2000, 0x9d9: 0x2000, 0x9da: 0x2000, 0x9db: 0x2000, 0x9dc: 0x2000, 0x9dd: 0x2000, - 0x9de: 0x2000, 0x9df: 0x2000, 0x9e0: 0x2000, 0x9e1: 0x2000, 0x9e3: 0x2000, - 0x9e8: 0x2000, 0x9e9: 0x2000, - 0x9ea: 0x4000, 0x9eb: 0x2000, 0x9ec: 0x2000, 0x9ed: 0x2000, 0x9ee: 0x2000, 0x9ef: 0x2000, - 0x9f0: 0x2000, 0x9f1: 0x2000, 0x9f2: 0x4000, 0x9f3: 0x4000, 0x9f4: 0x2000, 0x9f5: 0x4000, - 0x9f6: 0x2000, 0x9f7: 0x2000, 0x9f8: 0x2000, 0x9f9: 0x2000, 0x9fa: 0x4000, 0x9fb: 0x2000, - 0x9fc: 0x2000, 0x9fd: 0x4000, 0x9fe: 0x2000, 0x9ff: 0x2000, - // Block 0x28, offset 0xa00 - 0xa05: 0x4000, - 0xa0a: 0x4000, 0xa0b: 0x4000, - 0xa28: 0x4000, - 0xa3d: 0x2000, - // Block 0x29, offset 0xa40 - 0xa4c: 0x4000, 0xa4e: 0x4000, - 0xa53: 0x4000, 0xa54: 0x4000, 0xa55: 0x4000, 0xa57: 0x4000, - 0xa76: 0x2000, 0xa77: 0x2000, 0xa78: 0x2000, 0xa79: 0x2000, 0xa7a: 0x2000, 0xa7b: 0x2000, - 0xa7c: 0x2000, 0xa7d: 0x2000, 0xa7e: 0x2000, 0xa7f: 0x2000, - // Block 0x2a, offset 0xa80 - 0xa95: 0x4000, 0xa96: 0x4000, 0xa97: 0x4000, - 0xab0: 0x4000, - 0xabf: 0x4000, - // Block 0x2b, offset 0xac0 - 0xae6: 0x6000, 0xae7: 0x6000, 0xae8: 0x6000, 0xae9: 0x6000, - 0xaea: 0x6000, 0xaeb: 0x6000, 0xaec: 0x6000, 0xaed: 0x6000, - // Block 0x2c, offset 0xb00 - 0xb05: 0x6010, - 0xb06: 0x6011, - // Block 0x2d, offset 0xb40 - 0xb5b: 0x4000, 0xb5c: 0x4000, - // Block 0x2e, offset 0xb80 - 0xb90: 0x4000, - 0xb95: 0x4000, 0xb96: 0x2000, 0xb97: 0x2000, - 0xb98: 0x2000, 0xb99: 0x2000, - // Block 0x2f, offset 0xbc0 - 0xbc0: 0x4000, 0xbc1: 0x4000, 0xbc2: 0x4000, 0xbc3: 0x4000, 0xbc4: 0x4000, 0xbc5: 0x4000, - 0xbc6: 0x4000, 0xbc7: 0x4000, 0xbc8: 0x4000, 0xbc9: 0x4000, 0xbca: 0x4000, 0xbcb: 0x4000, - 0xbcc: 0x4000, 0xbcd: 0x4000, 0xbce: 0x4000, 0xbcf: 0x4000, 0xbd0: 0x4000, 0xbd1: 0x4000, - 0xbd2: 0x4000, 0xbd3: 0x4000, 0xbd4: 0x4000, 0xbd5: 0x4000, 0xbd6: 0x4000, 0xbd7: 0x4000, - 0xbd8: 0x4000, 0xbd9: 0x4000, 0xbdb: 0x4000, 0xbdc: 0x4000, 0xbdd: 0x4000, - 0xbde: 0x4000, 0xbdf: 0x4000, 0xbe0: 0x4000, 0xbe1: 0x4000, 0xbe2: 0x4000, 0xbe3: 0x4000, - 0xbe4: 0x4000, 0xbe5: 0x4000, 0xbe6: 0x4000, 0xbe7: 0x4000, 0xbe8: 0x4000, 0xbe9: 0x4000, - 0xbea: 0x4000, 0xbeb: 0x4000, 0xbec: 0x4000, 0xbed: 0x4000, 0xbee: 0x4000, 0xbef: 0x4000, - 0xbf0: 0x4000, 0xbf1: 0x4000, 0xbf2: 0x4000, 0xbf3: 0x4000, 0xbf4: 0x4000, 0xbf5: 0x4000, - 0xbf6: 0x4000, 0xbf7: 0x4000, 0xbf8: 0x4000, 0xbf9: 0x4000, 0xbfa: 0x4000, 0xbfb: 0x4000, - 0xbfc: 0x4000, 0xbfd: 0x4000, 0xbfe: 0x4000, 0xbff: 0x4000, - // Block 0x30, offset 0xc00 - 0xc00: 0x4000, 0xc01: 0x4000, 0xc02: 0x4000, 0xc03: 0x4000, 0xc04: 0x4000, 0xc05: 0x4000, - 0xc06: 0x4000, 0xc07: 0x4000, 0xc08: 0x4000, 0xc09: 0x4000, 0xc0a: 0x4000, 0xc0b: 0x4000, - 0xc0c: 0x4000, 0xc0d: 0x4000, 0xc0e: 0x4000, 0xc0f: 0x4000, 0xc10: 0x4000, 0xc11: 0x4000, - 0xc12: 0x4000, 0xc13: 0x4000, 0xc14: 0x4000, 0xc15: 0x4000, 0xc16: 0x4000, 0xc17: 0x4000, - 0xc18: 0x4000, 0xc19: 0x4000, 0xc1a: 0x4000, 0xc1b: 0x4000, 0xc1c: 0x4000, 0xc1d: 0x4000, - 0xc1e: 0x4000, 0xc1f: 0x4000, 0xc20: 0x4000, 0xc21: 0x4000, 0xc22: 0x4000, 0xc23: 0x4000, - 0xc24: 0x4000, 0xc25: 0x4000, 0xc26: 0x4000, 0xc27: 0x4000, 0xc28: 0x4000, 0xc29: 0x4000, - 0xc2a: 0x4000, 0xc2b: 0x4000, 0xc2c: 0x4000, 0xc2d: 0x4000, 0xc2e: 0x4000, 0xc2f: 0x4000, - 0xc30: 0x4000, 0xc31: 0x4000, 0xc32: 0x4000, 0xc33: 0x4000, - // Block 0x31, offset 0xc40 - 0xc40: 0x4000, 0xc41: 0x4000, 0xc42: 0x4000, 0xc43: 0x4000, 0xc44: 0x4000, 0xc45: 0x4000, - 0xc46: 0x4000, 0xc47: 0x4000, 0xc48: 0x4000, 0xc49: 0x4000, 0xc4a: 0x4000, 0xc4b: 0x4000, - 0xc4c: 0x4000, 0xc4d: 0x4000, 0xc4e: 0x4000, 0xc4f: 0x4000, 0xc50: 0x4000, 0xc51: 0x4000, - 0xc52: 0x4000, 0xc53: 0x4000, 0xc54: 0x4000, 0xc55: 0x4000, - 0xc70: 0x4000, 0xc71: 0x4000, 0xc72: 0x4000, 0xc73: 0x4000, 0xc74: 0x4000, 0xc75: 0x4000, - 0xc76: 0x4000, 0xc77: 0x4000, 0xc78: 0x4000, 0xc79: 0x4000, 0xc7a: 0x4000, 0xc7b: 0x4000, - // Block 0x32, offset 0xc80 - 0xc80: 0x9012, 0xc81: 0x4013, 0xc82: 0x4014, 0xc83: 0x4000, 0xc84: 0x4000, 0xc85: 0x4000, - 0xc86: 0x4000, 0xc87: 0x4000, 0xc88: 0x4000, 0xc89: 0x4000, 0xc8a: 0x4000, 0xc8b: 0x4000, - 0xc8c: 0x4015, 0xc8d: 0x4015, 0xc8e: 0x4000, 0xc8f: 0x4000, 0xc90: 0x4000, 0xc91: 0x4000, - 0xc92: 0x4000, 0xc93: 0x4000, 0xc94: 0x4000, 0xc95: 0x4000, 0xc96: 0x4000, 0xc97: 0x4000, - 0xc98: 0x4000, 0xc99: 0x4000, 0xc9a: 0x4000, 0xc9b: 0x4000, 0xc9c: 0x4000, 0xc9d: 0x4000, - 0xc9e: 0x4000, 0xc9f: 0x4000, 0xca0: 0x4000, 0xca1: 0x4000, 0xca2: 0x4000, 0xca3: 0x4000, - 0xca4: 0x4000, 0xca5: 0x4000, 0xca6: 0x4000, 0xca7: 0x4000, 0xca8: 0x4000, 0xca9: 0x4000, - 0xcaa: 0x4000, 0xcab: 0x4000, 0xcac: 0x4000, 0xcad: 0x4000, 0xcae: 0x4000, 0xcaf: 0x4000, - 0xcb0: 0x4000, 0xcb1: 0x4000, 0xcb2: 0x4000, 0xcb3: 0x4000, 0xcb4: 0x4000, 0xcb5: 0x4000, - 0xcb6: 0x4000, 0xcb7: 0x4000, 0xcb8: 0x4000, 0xcb9: 0x4000, 0xcba: 0x4000, 0xcbb: 0x4000, - 0xcbc: 0x4000, 0xcbd: 0x4000, 0xcbe: 0x4000, - // Block 0x33, offset 0xcc0 - 0xcc1: 0x4000, 0xcc2: 0x4000, 0xcc3: 0x4000, 0xcc4: 0x4000, 0xcc5: 0x4000, - 0xcc6: 0x4000, 0xcc7: 0x4000, 0xcc8: 0x4000, 0xcc9: 0x4000, 0xcca: 0x4000, 0xccb: 0x4000, - 0xccc: 0x4000, 0xccd: 0x4000, 0xcce: 0x4000, 0xccf: 0x4000, 0xcd0: 0x4000, 0xcd1: 0x4000, - 0xcd2: 0x4000, 0xcd3: 0x4000, 0xcd4: 0x4000, 0xcd5: 0x4000, 0xcd6: 0x4000, 0xcd7: 0x4000, - 0xcd8: 0x4000, 0xcd9: 0x4000, 0xcda: 0x4000, 0xcdb: 0x4000, 0xcdc: 0x4000, 0xcdd: 0x4000, - 0xcde: 0x4000, 0xcdf: 0x4000, 0xce0: 0x4000, 0xce1: 0x4000, 0xce2: 0x4000, 0xce3: 0x4000, - 0xce4: 0x4000, 0xce5: 0x4000, 0xce6: 0x4000, 0xce7: 0x4000, 0xce8: 0x4000, 0xce9: 0x4000, - 0xcea: 0x4000, 0xceb: 0x4000, 0xcec: 0x4000, 0xced: 0x4000, 0xcee: 0x4000, 0xcef: 0x4000, - 0xcf0: 0x4000, 0xcf1: 0x4000, 0xcf2: 0x4000, 0xcf3: 0x4000, 0xcf4: 0x4000, 0xcf5: 0x4000, - 0xcf6: 0x4000, 0xcf7: 0x4000, 0xcf8: 0x4000, 0xcf9: 0x4000, 0xcfa: 0x4000, 0xcfb: 0x4000, - 0xcfc: 0x4000, 0xcfd: 0x4000, 0xcfe: 0x4000, 0xcff: 0x4000, - // Block 0x34, offset 0xd00 - 0xd00: 0x4000, 0xd01: 0x4000, 0xd02: 0x4000, 0xd03: 0x4000, 0xd04: 0x4000, 0xd05: 0x4000, - 0xd06: 0x4000, 0xd07: 0x4000, 0xd08: 0x4000, 0xd09: 0x4000, 0xd0a: 0x4000, 0xd0b: 0x4000, - 0xd0c: 0x4000, 0xd0d: 0x4000, 0xd0e: 0x4000, 0xd0f: 0x4000, 0xd10: 0x4000, 0xd11: 0x4000, - 0xd12: 0x4000, 0xd13: 0x4000, 0xd14: 0x4000, 0xd15: 0x4000, 0xd16: 0x4000, - 0xd19: 0x4016, 0xd1a: 0x4017, 0xd1b: 0x4000, 0xd1c: 0x4000, 0xd1d: 0x4000, - 0xd1e: 0x4000, 0xd1f: 0x4000, 0xd20: 0x4000, 0xd21: 0x4018, 0xd22: 0x4019, 0xd23: 0x401a, - 0xd24: 0x401b, 0xd25: 0x401c, 0xd26: 0x401d, 0xd27: 0x401e, 0xd28: 0x401f, 0xd29: 0x4020, - 0xd2a: 0x4021, 0xd2b: 0x4022, 0xd2c: 0x4000, 0xd2d: 0x4010, 0xd2e: 0x4000, 0xd2f: 0x4023, - 0xd30: 0x4000, 0xd31: 0x4024, 0xd32: 0x4000, 0xd33: 0x4025, 0xd34: 0x4000, 0xd35: 0x4026, - 0xd36: 0x4000, 0xd37: 0x401a, 0xd38: 0x4000, 0xd39: 0x4027, 0xd3a: 0x4000, 0xd3b: 0x4028, - 0xd3c: 0x4000, 0xd3d: 0x4020, 0xd3e: 0x4000, 0xd3f: 0x4029, - // Block 0x35, offset 0xd40 - 0xd40: 0x4000, 0xd41: 0x402a, 0xd42: 0x4000, 0xd43: 0x402b, 0xd44: 0x402c, 0xd45: 0x4000, - 0xd46: 0x4017, 0xd47: 0x4000, 0xd48: 0x402d, 0xd49: 0x4000, 0xd4a: 0x402e, 0xd4b: 0x402f, - 0xd4c: 0x4030, 0xd4d: 0x4017, 0xd4e: 0x4016, 0xd4f: 0x4017, 0xd50: 0x4000, 0xd51: 0x4000, - 0xd52: 0x4031, 0xd53: 0x4000, 0xd54: 0x4000, 0xd55: 0x4031, 0xd56: 0x4000, 0xd57: 0x4000, - 0xd58: 0x4032, 0xd59: 0x4000, 0xd5a: 0x4000, 0xd5b: 0x4032, 0xd5c: 0x4000, 0xd5d: 0x4000, - 0xd5e: 0x4033, 0xd5f: 0x402e, 0xd60: 0x4034, 0xd61: 0x4035, 0xd62: 0x4034, 0xd63: 0x4036, - 0xd64: 0x4037, 0xd65: 0x4024, 0xd66: 0x4035, 0xd67: 0x4025, 0xd68: 0x4038, 0xd69: 0x4038, - 0xd6a: 0x4039, 0xd6b: 0x4039, 0xd6c: 0x403a, 0xd6d: 0x403a, 0xd6e: 0x4000, 0xd6f: 0x4035, - 0xd70: 0x4000, 0xd71: 0x4000, 0xd72: 0x403b, 0xd73: 0x403c, 0xd74: 0x4000, 0xd75: 0x4000, - 0xd76: 0x4000, 0xd77: 0x4000, 0xd78: 0x4000, 0xd79: 0x4000, 0xd7a: 0x4000, 0xd7b: 0x403d, - 0xd7c: 0x401c, 0xd7d: 0x4000, 0xd7e: 0x4000, 0xd7f: 0x4000, - // Block 0x36, offset 0xd80 - 0xd85: 0x4000, - 0xd86: 0x4000, 0xd87: 0x4000, 0xd88: 0x4000, 0xd89: 0x4000, 0xd8a: 0x4000, 0xd8b: 0x4000, - 0xd8c: 0x4000, 0xd8d: 0x4000, 0xd8e: 0x4000, 0xd8f: 0x4000, 0xd90: 0x4000, 0xd91: 0x4000, - 0xd92: 0x4000, 0xd93: 0x4000, 0xd94: 0x4000, 0xd95: 0x4000, 0xd96: 0x4000, 0xd97: 0x4000, - 0xd98: 0x4000, 0xd99: 0x4000, 0xd9a: 0x4000, 0xd9b: 0x4000, 0xd9c: 0x4000, 0xd9d: 0x4000, - 0xd9e: 0x4000, 0xd9f: 0x4000, 0xda0: 0x4000, 0xda1: 0x4000, 0xda2: 0x4000, 0xda3: 0x4000, - 0xda4: 0x4000, 0xda5: 0x4000, 0xda6: 0x4000, 0xda7: 0x4000, 0xda8: 0x4000, 0xda9: 0x4000, - 0xdaa: 0x4000, 0xdab: 0x4000, 0xdac: 0x4000, 0xdad: 0x4000, 0xdae: 0x4000, 0xdaf: 0x4000, - 0xdb1: 0x403e, 0xdb2: 0x403e, 0xdb3: 0x403e, 0xdb4: 0x403e, 0xdb5: 0x403e, - 0xdb6: 0x403e, 0xdb7: 0x403e, 0xdb8: 0x403e, 0xdb9: 0x403e, 0xdba: 0x403e, 0xdbb: 0x403e, - 0xdbc: 0x403e, 0xdbd: 0x403e, 0xdbe: 0x403e, 0xdbf: 0x403e, - // Block 0x37, offset 0xdc0 - 0xdc0: 0x4037, 0xdc1: 0x4037, 0xdc2: 0x4037, 0xdc3: 0x4037, 0xdc4: 0x4037, 0xdc5: 0x4037, - 0xdc6: 0x4037, 0xdc7: 0x4037, 0xdc8: 0x4037, 0xdc9: 0x4037, 0xdca: 0x4037, 0xdcb: 0x4037, - 0xdcc: 0x4037, 0xdcd: 0x4037, 0xdce: 0x4037, 0xdcf: 0x400e, 0xdd0: 0x403f, 0xdd1: 0x4040, - 0xdd2: 0x4041, 0xdd3: 0x4040, 0xdd4: 0x403f, 0xdd5: 0x4042, 0xdd6: 0x4043, 0xdd7: 0x4044, - 0xdd8: 0x4040, 0xdd9: 0x4041, 0xdda: 0x4040, 0xddb: 0x4045, 0xddc: 0x4009, 0xddd: 0x4045, - 0xdde: 0x4046, 0xddf: 0x4045, 0xde0: 0x4047, 0xde1: 0x400b, 0xde2: 0x400a, 0xde3: 0x400c, - 0xde4: 0x4048, 0xde5: 0x4000, 0xde6: 0x4000, 0xde7: 0x4000, 0xde8: 0x4000, 0xde9: 0x4000, - 0xdea: 0x4000, 0xdeb: 0x4000, 0xdec: 0x4000, 0xded: 0x4000, 0xdee: 0x4000, 0xdef: 0x4000, - 0xdf0: 0x4000, 0xdf1: 0x4000, 0xdf2: 0x4000, 0xdf3: 0x4000, 0xdf4: 0x4000, 0xdf5: 0x4000, - 0xdf6: 0x4000, 0xdf7: 0x4000, 0xdf8: 0x4000, 0xdf9: 0x4000, 0xdfa: 0x4000, 0xdfb: 0x4000, - 0xdfc: 0x4000, 0xdfd: 0x4000, 0xdfe: 0x4000, 0xdff: 0x4000, - // Block 0x38, offset 0xe00 - 0xe00: 0x4000, 0xe01: 0x4000, 0xe02: 0x4000, 0xe03: 0x4000, 0xe04: 0x4000, 0xe05: 0x4000, - 0xe06: 0x4000, 0xe07: 0x4000, 0xe08: 0x4000, 0xe09: 0x4000, 0xe0a: 0x4000, 0xe0b: 0x4000, - 0xe0c: 0x4000, 0xe0d: 0x4000, 0xe0e: 0x4000, 0xe10: 0x4000, 0xe11: 0x4000, - 0xe12: 0x4000, 0xe13: 0x4000, 0xe14: 0x4000, 0xe15: 0x4000, 0xe16: 0x4000, 0xe17: 0x4000, - 0xe18: 0x4000, 0xe19: 0x4000, 0xe1a: 0x4000, 0xe1b: 0x4000, 0xe1c: 0x4000, 0xe1d: 0x4000, - 0xe1e: 0x4000, 0xe1f: 0x4000, 0xe20: 0x4000, 0xe21: 0x4000, 0xe22: 0x4000, 0xe23: 0x4000, - 0xe24: 0x4000, 0xe25: 0x4000, 0xe26: 0x4000, 0xe27: 0x4000, 0xe28: 0x4000, 0xe29: 0x4000, - 0xe2a: 0x4000, 0xe2b: 0x4000, 0xe2c: 0x4000, 0xe2d: 0x4000, 0xe2e: 0x4000, 0xe2f: 0x4000, - 0xe30: 0x4000, 0xe31: 0x4000, 0xe32: 0x4000, 0xe33: 0x4000, 0xe34: 0x4000, 0xe35: 0x4000, - 0xe36: 0x4000, 0xe37: 0x4000, 0xe38: 0x4000, 0xe39: 0x4000, 0xe3a: 0x4000, - // Block 0x39, offset 0xe40 - 0xe40: 0x4000, 0xe41: 0x4000, 0xe42: 0x4000, 0xe43: 0x4000, 0xe44: 0x4000, 0xe45: 0x4000, - 0xe46: 0x4000, 0xe47: 0x4000, 0xe48: 0x4000, 0xe49: 0x4000, 0xe4a: 0x4000, 0xe4b: 0x4000, - 0xe4c: 0x4000, 0xe4d: 0x4000, 0xe4e: 0x4000, 0xe4f: 0x4000, 0xe50: 0x4000, 0xe51: 0x4000, - 0xe52: 0x4000, 0xe53: 0x4000, 0xe54: 0x4000, 0xe55: 0x4000, 0xe56: 0x4000, 0xe57: 0x4000, - 0xe58: 0x4000, 0xe59: 0x4000, 0xe5a: 0x4000, 0xe5b: 0x4000, 0xe5c: 0x4000, 0xe5d: 0x4000, - 0xe5e: 0x4000, 0xe5f: 0x4000, 0xe60: 0x4000, 0xe61: 0x4000, 0xe62: 0x4000, 0xe63: 0x4000, - 0xe70: 0x4000, 0xe71: 0x4000, 0xe72: 0x4000, 0xe73: 0x4000, 0xe74: 0x4000, 0xe75: 0x4000, - 0xe76: 0x4000, 0xe77: 0x4000, 0xe78: 0x4000, 0xe79: 0x4000, 0xe7a: 0x4000, 0xe7b: 0x4000, - 0xe7c: 0x4000, 0xe7d: 0x4000, 0xe7e: 0x4000, 0xe7f: 0x4000, - // Block 0x3a, offset 0xe80 - 0xe80: 0x4000, 0xe81: 0x4000, 0xe82: 0x4000, 0xe83: 0x4000, 0xe84: 0x4000, 0xe85: 0x4000, - 0xe86: 0x4000, 0xe87: 0x4000, 0xe88: 0x4000, 0xe89: 0x4000, 0xe8a: 0x4000, 0xe8b: 0x4000, - 0xe8c: 0x4000, 0xe8d: 0x4000, 0xe8e: 0x4000, 0xe8f: 0x4000, 0xe90: 0x4000, 0xe91: 0x4000, - 0xe92: 0x4000, 0xe93: 0x4000, 0xe94: 0x4000, 0xe95: 0x4000, 0xe96: 0x4000, 0xe97: 0x4000, - 0xe98: 0x4000, 0xe99: 0x4000, 0xe9a: 0x4000, 0xe9b: 0x4000, 0xe9c: 0x4000, 0xe9d: 0x4000, - 0xe9e: 0x4000, 0xea0: 0x4000, 0xea1: 0x4000, 0xea2: 0x4000, 0xea3: 0x4000, - 0xea4: 0x4000, 0xea5: 0x4000, 0xea6: 0x4000, 0xea7: 0x4000, 0xea8: 0x4000, 0xea9: 0x4000, - 0xeaa: 0x4000, 0xeab: 0x4000, 0xeac: 0x4000, 0xead: 0x4000, 0xeae: 0x4000, 0xeaf: 0x4000, - 0xeb0: 0x4000, 0xeb1: 0x4000, 0xeb2: 0x4000, 0xeb3: 0x4000, 0xeb4: 0x4000, 0xeb5: 0x4000, - 0xeb6: 0x4000, 0xeb7: 0x4000, 0xeb8: 0x4000, 0xeb9: 0x4000, 0xeba: 0x4000, 0xebb: 0x4000, - 0xebc: 0x4000, 0xebd: 0x4000, 0xebe: 0x4000, 0xebf: 0x4000, - // Block 0x3b, offset 0xec0 - 0xec0: 0x4000, 0xec1: 0x4000, 0xec2: 0x4000, 0xec3: 0x4000, 0xec4: 0x4000, 0xec5: 0x4000, - 0xec6: 0x4000, 0xec7: 0x4000, 0xec8: 0x2000, 0xec9: 0x2000, 0xeca: 0x2000, 0xecb: 0x2000, - 0xecc: 0x2000, 0xecd: 0x2000, 0xece: 0x2000, 0xecf: 0x2000, 0xed0: 0x4000, 0xed1: 0x4000, - 0xed2: 0x4000, 0xed3: 0x4000, 0xed4: 0x4000, 0xed5: 0x4000, 0xed6: 0x4000, 0xed7: 0x4000, - 0xed8: 0x4000, 0xed9: 0x4000, 0xeda: 0x4000, 0xedb: 0x4000, 0xedc: 0x4000, 0xedd: 0x4000, - 0xede: 0x4000, 0xedf: 0x4000, 0xee0: 0x4000, 0xee1: 0x4000, 0xee2: 0x4000, 0xee3: 0x4000, - 0xee4: 0x4000, 0xee5: 0x4000, 0xee6: 0x4000, 0xee7: 0x4000, 0xee8: 0x4000, 0xee9: 0x4000, - 0xeea: 0x4000, 0xeeb: 0x4000, 0xeec: 0x4000, 0xeed: 0x4000, 0xeee: 0x4000, 0xeef: 0x4000, - 0xef0: 0x4000, 0xef1: 0x4000, 0xef2: 0x4000, 0xef3: 0x4000, 0xef4: 0x4000, 0xef5: 0x4000, - 0xef6: 0x4000, 0xef7: 0x4000, 0xef8: 0x4000, 0xef9: 0x4000, 0xefa: 0x4000, 0xefb: 0x4000, - 0xefc: 0x4000, 0xefd: 0x4000, 0xefe: 0x4000, 0xeff: 0x4000, - // Block 0x3c, offset 0xf00 - 0xf00: 0x4000, 0xf01: 0x4000, 0xf02: 0x4000, 0xf03: 0x4000, 0xf04: 0x4000, 0xf05: 0x4000, - 0xf06: 0x4000, 0xf07: 0x4000, 0xf08: 0x4000, 0xf09: 0x4000, 0xf0a: 0x4000, 0xf0b: 0x4000, - 0xf0c: 0x4000, 0xf0d: 0x4000, 0xf0e: 0x4000, 0xf0f: 0x4000, 0xf10: 0x4000, 0xf11: 0x4000, - 0xf12: 0x4000, 0xf13: 0x4000, 0xf14: 0x4000, 0xf15: 0x4000, 0xf16: 0x4000, 0xf17: 0x4000, - 0xf18: 0x4000, 0xf19: 0x4000, 0xf1a: 0x4000, 0xf1b: 0x4000, 0xf1c: 0x4000, 0xf1d: 0x4000, - 0xf1e: 0x4000, 0xf1f: 0x4000, 0xf20: 0x4000, 0xf21: 0x4000, 0xf22: 0x4000, 0xf23: 0x4000, - 0xf24: 0x4000, 0xf25: 0x4000, 0xf26: 0x4000, 0xf27: 0x4000, 0xf28: 0x4000, 0xf29: 0x4000, - 0xf2a: 0x4000, 0xf2b: 0x4000, 0xf2c: 0x4000, 0xf2d: 0x4000, 0xf2e: 0x4000, 0xf2f: 0x4000, - 0xf30: 0x4000, 0xf31: 0x4000, 0xf32: 0x4000, 0xf33: 0x4000, 0xf34: 0x4000, 0xf35: 0x4000, - 0xf36: 0x4000, 0xf37: 0x4000, 0xf38: 0x4000, 0xf39: 0x4000, 0xf3a: 0x4000, 0xf3b: 0x4000, - 0xf3c: 0x4000, 0xf3d: 0x4000, 0xf3e: 0x4000, - // Block 0x3d, offset 0xf40 - 0xf40: 0x4000, 0xf41: 0x4000, 0xf42: 0x4000, 0xf43: 0x4000, 0xf44: 0x4000, 0xf45: 0x4000, - 0xf46: 0x4000, 0xf47: 0x4000, 0xf48: 0x4000, 0xf49: 0x4000, 0xf4a: 0x4000, 0xf4b: 0x4000, - 0xf4c: 0x4000, 0xf50: 0x4000, 0xf51: 0x4000, - 0xf52: 0x4000, 0xf53: 0x4000, 0xf54: 0x4000, 0xf55: 0x4000, 0xf56: 0x4000, 0xf57: 0x4000, - 0xf58: 0x4000, 0xf59: 0x4000, 0xf5a: 0x4000, 0xf5b: 0x4000, 0xf5c: 0x4000, 0xf5d: 0x4000, - 0xf5e: 0x4000, 0xf5f: 0x4000, 0xf60: 0x4000, 0xf61: 0x4000, 0xf62: 0x4000, 0xf63: 0x4000, - 0xf64: 0x4000, 0xf65: 0x4000, 0xf66: 0x4000, 0xf67: 0x4000, 0xf68: 0x4000, 0xf69: 0x4000, - 0xf6a: 0x4000, 0xf6b: 0x4000, 0xf6c: 0x4000, 0xf6d: 0x4000, 0xf6e: 0x4000, 0xf6f: 0x4000, - 0xf70: 0x4000, 0xf71: 0x4000, 0xf72: 0x4000, 0xf73: 0x4000, 0xf74: 0x4000, 0xf75: 0x4000, - 0xf76: 0x4000, 0xf77: 0x4000, 0xf78: 0x4000, 0xf79: 0x4000, 0xf7a: 0x4000, 0xf7b: 0x4000, - 0xf7c: 0x4000, 0xf7d: 0x4000, 0xf7e: 0x4000, 0xf7f: 0x4000, - // Block 0x3e, offset 0xf80 - 0xf80: 0x4000, 0xf81: 0x4000, 0xf82: 0x4000, 0xf83: 0x4000, 0xf84: 0x4000, 0xf85: 0x4000, - 0xf86: 0x4000, - // Block 0x3f, offset 0xfc0 - 0xfe0: 0x4000, 0xfe1: 0x4000, 0xfe2: 0x4000, 0xfe3: 0x4000, - 0xfe4: 0x4000, 0xfe5: 0x4000, 0xfe6: 0x4000, 0xfe7: 0x4000, 0xfe8: 0x4000, 0xfe9: 0x4000, - 0xfea: 0x4000, 0xfeb: 0x4000, 0xfec: 0x4000, 0xfed: 0x4000, 0xfee: 0x4000, 0xfef: 0x4000, - 0xff0: 0x4000, 0xff1: 0x4000, 0xff2: 0x4000, 0xff3: 0x4000, 0xff4: 0x4000, 0xff5: 0x4000, - 0xff6: 0x4000, 0xff7: 0x4000, 0xff8: 0x4000, 0xff9: 0x4000, 0xffa: 0x4000, 0xffb: 0x4000, - 0xffc: 0x4000, - // Block 0x40, offset 0x1000 - 0x1000: 0x4000, 0x1001: 0x4000, 0x1002: 0x4000, 0x1003: 0x4000, 0x1004: 0x4000, 0x1005: 0x4000, - 0x1006: 0x4000, 0x1007: 0x4000, 0x1008: 0x4000, 0x1009: 0x4000, 0x100a: 0x4000, 0x100b: 0x4000, - 0x100c: 0x4000, 0x100d: 0x4000, 0x100e: 0x4000, 0x100f: 0x4000, 0x1010: 0x4000, 0x1011: 0x4000, - 0x1012: 0x4000, 0x1013: 0x4000, 0x1014: 0x4000, 0x1015: 0x4000, 0x1016: 0x4000, 0x1017: 0x4000, - 0x1018: 0x4000, 0x1019: 0x4000, 0x101a: 0x4000, 0x101b: 0x4000, 0x101c: 0x4000, 0x101d: 0x4000, - 0x101e: 0x4000, 0x101f: 0x4000, 0x1020: 0x4000, 0x1021: 0x4000, 0x1022: 0x4000, 0x1023: 0x4000, - // Block 0x41, offset 0x1040 - 0x1040: 0x2000, 0x1041: 0x2000, 0x1042: 0x2000, 0x1043: 0x2000, 0x1044: 0x2000, 0x1045: 0x2000, - 0x1046: 0x2000, 0x1047: 0x2000, 0x1048: 0x2000, 0x1049: 0x2000, 0x104a: 0x2000, 0x104b: 0x2000, - 0x104c: 0x2000, 0x104d: 0x2000, 0x104e: 0x2000, 0x104f: 0x2000, 0x1050: 0x4000, 0x1051: 0x4000, - 0x1052: 0x4000, 0x1053: 0x4000, 0x1054: 0x4000, 0x1055: 0x4000, 0x1056: 0x4000, 0x1057: 0x4000, - 0x1058: 0x4000, 0x1059: 0x4000, - 0x1070: 0x4000, 0x1071: 0x4000, 0x1072: 0x4000, 0x1073: 0x4000, 0x1074: 0x4000, 0x1075: 0x4000, - 0x1076: 0x4000, 0x1077: 0x4000, 0x1078: 0x4000, 0x1079: 0x4000, 0x107a: 0x4000, 0x107b: 0x4000, - 0x107c: 0x4000, 0x107d: 0x4000, 0x107e: 0x4000, 0x107f: 0x4000, - // Block 0x42, offset 0x1080 - 0x1080: 0x4000, 0x1081: 0x4000, 0x1082: 0x4000, 0x1083: 0x4000, 0x1084: 0x4000, 0x1085: 0x4000, - 0x1086: 0x4000, 0x1087: 0x4000, 0x1088: 0x4000, 0x1089: 0x4000, 0x108a: 0x4000, 0x108b: 0x4000, - 0x108c: 0x4000, 0x108d: 0x4000, 0x108e: 0x4000, 0x108f: 0x4000, 0x1090: 0x4000, 0x1091: 0x4000, - 0x1092: 0x4000, 0x1094: 0x4000, 0x1095: 0x4000, 0x1096: 0x4000, 0x1097: 0x4000, - 0x1098: 0x4000, 0x1099: 0x4000, 0x109a: 0x4000, 0x109b: 0x4000, 0x109c: 0x4000, 0x109d: 0x4000, - 0x109e: 0x4000, 0x109f: 0x4000, 0x10a0: 0x4000, 0x10a1: 0x4000, 0x10a2: 0x4000, 0x10a3: 0x4000, - 0x10a4: 0x4000, 0x10a5: 0x4000, 0x10a6: 0x4000, 0x10a8: 0x4000, 0x10a9: 0x4000, - 0x10aa: 0x4000, 0x10ab: 0x4000, - // Block 0x43, offset 0x10c0 - 0x10c1: 0x9012, 0x10c2: 0x9012, 0x10c3: 0x9012, 0x10c4: 0x9012, 0x10c5: 0x9012, - 0x10c6: 0x9012, 0x10c7: 0x9012, 0x10c8: 0x9012, 0x10c9: 0x9012, 0x10ca: 0x9012, 0x10cb: 0x9012, - 0x10cc: 0x9012, 0x10cd: 0x9012, 0x10ce: 0x9012, 0x10cf: 0x9012, 0x10d0: 0x9012, 0x10d1: 0x9012, - 0x10d2: 0x9012, 0x10d3: 0x9012, 0x10d4: 0x9012, 0x10d5: 0x9012, 0x10d6: 0x9012, 0x10d7: 0x9012, - 0x10d8: 0x9012, 0x10d9: 0x9012, 0x10da: 0x9012, 0x10db: 0x9012, 0x10dc: 0x9012, 0x10dd: 0x9012, - 0x10de: 0x9012, 0x10df: 0x9012, 0x10e0: 0x9049, 0x10e1: 0x9049, 0x10e2: 0x9049, 0x10e3: 0x9049, - 0x10e4: 0x9049, 0x10e5: 0x9049, 0x10e6: 0x9049, 0x10e7: 0x9049, 0x10e8: 0x9049, 0x10e9: 0x9049, - 0x10ea: 0x9049, 0x10eb: 0x9049, 0x10ec: 0x9049, 0x10ed: 0x9049, 0x10ee: 0x9049, 0x10ef: 0x9049, - 0x10f0: 0x9049, 0x10f1: 0x9049, 0x10f2: 0x9049, 0x10f3: 0x9049, 0x10f4: 0x9049, 0x10f5: 0x9049, - 0x10f6: 0x9049, 0x10f7: 0x9049, 0x10f8: 0x9049, 0x10f9: 0x9049, 0x10fa: 0x9049, 0x10fb: 0x9049, - 0x10fc: 0x9049, 0x10fd: 0x9049, 0x10fe: 0x9049, 0x10ff: 0x9049, - // Block 0x44, offset 0x1100 - 0x1100: 0x9049, 0x1101: 0x9049, 0x1102: 0x9049, 0x1103: 0x9049, 0x1104: 0x9049, 0x1105: 0x9049, - 0x1106: 0x9049, 0x1107: 0x9049, 0x1108: 0x9049, 0x1109: 0x9049, 0x110a: 0x9049, 0x110b: 0x9049, - 0x110c: 0x9049, 0x110d: 0x9049, 0x110e: 0x9049, 0x110f: 0x9049, 0x1110: 0x9049, 0x1111: 0x9049, - 0x1112: 0x9049, 0x1113: 0x9049, 0x1114: 0x9049, 0x1115: 0x9049, 0x1116: 0x9049, 0x1117: 0x9049, - 0x1118: 0x9049, 0x1119: 0x9049, 0x111a: 0x9049, 0x111b: 0x9049, 0x111c: 0x9049, 0x111d: 0x9049, - 0x111e: 0x9049, 0x111f: 0x904a, 0x1120: 0x904b, 0x1121: 0xb04c, 0x1122: 0xb04d, 0x1123: 0xb04d, - 0x1124: 0xb04e, 0x1125: 0xb04f, 0x1126: 0xb050, 0x1127: 0xb051, 0x1128: 0xb052, 0x1129: 0xb053, - 0x112a: 0xb054, 0x112b: 0xb055, 0x112c: 0xb056, 0x112d: 0xb057, 0x112e: 0xb058, 0x112f: 0xb059, - 0x1130: 0xb05a, 0x1131: 0xb05b, 0x1132: 0xb05c, 0x1133: 0xb05d, 0x1134: 0xb05e, 0x1135: 0xb05f, - 0x1136: 0xb060, 0x1137: 0xb061, 0x1138: 0xb062, 0x1139: 0xb063, 0x113a: 0xb064, 0x113b: 0xb065, - 0x113c: 0xb052, 0x113d: 0xb066, 0x113e: 0xb067, 0x113f: 0xb055, - // Block 0x45, offset 0x1140 - 0x1140: 0xb068, 0x1141: 0xb069, 0x1142: 0xb06a, 0x1143: 0xb06b, 0x1144: 0xb05a, 0x1145: 0xb056, - 0x1146: 0xb06c, 0x1147: 0xb06d, 0x1148: 0xb06b, 0x1149: 0xb06e, 0x114a: 0xb06b, 0x114b: 0xb06f, - 0x114c: 0xb06f, 0x114d: 0xb070, 0x114e: 0xb070, 0x114f: 0xb071, 0x1150: 0xb056, 0x1151: 0xb072, - 0x1152: 0xb073, 0x1153: 0xb072, 0x1154: 0xb074, 0x1155: 0xb073, 0x1156: 0xb075, 0x1157: 0xb075, - 0x1158: 0xb076, 0x1159: 0xb076, 0x115a: 0xb077, 0x115b: 0xb077, 0x115c: 0xb073, 0x115d: 0xb078, - 0x115e: 0xb079, 0x115f: 0xb067, 0x1160: 0xb07a, 0x1161: 0xb07b, 0x1162: 0xb07b, 0x1163: 0xb07b, - 0x1164: 0xb07b, 0x1165: 0xb07b, 0x1166: 0xb07b, 0x1167: 0xb07b, 0x1168: 0xb07b, 0x1169: 0xb07b, - 0x116a: 0xb07b, 0x116b: 0xb07b, 0x116c: 0xb07b, 0x116d: 0xb07b, 0x116e: 0xb07b, 0x116f: 0xb07b, - 0x1170: 0xb07c, 0x1171: 0xb07c, 0x1172: 0xb07c, 0x1173: 0xb07c, 0x1174: 0xb07c, 0x1175: 0xb07c, - 0x1176: 0xb07c, 0x1177: 0xb07c, 0x1178: 0xb07c, 0x1179: 0xb07c, 0x117a: 0xb07c, 0x117b: 0xb07c, - 0x117c: 0xb07c, 0x117d: 0xb07c, 0x117e: 0xb07c, - // Block 0x46, offset 0x1180 - 0x1182: 0xb07d, 0x1183: 0xb07e, 0x1184: 0xb07f, 0x1185: 0xb080, - 0x1186: 0xb07f, 0x1187: 0xb07e, 0x118a: 0xb081, 0x118b: 0xb082, - 0x118c: 0xb083, 0x118d: 0xb07f, 0x118e: 0xb080, 0x118f: 0xb07f, - 0x1192: 0xb084, 0x1193: 0xb085, 0x1194: 0xb084, 0x1195: 0xb086, 0x1196: 0xb084, 0x1197: 0xb087, - 0x119a: 0xb088, 0x119b: 0xb089, 0x119c: 0xb08a, - 0x11a0: 0x908b, 0x11a1: 0x908b, 0x11a2: 0x908c, 0x11a3: 0x908d, - 0x11a4: 0x908b, 0x11a5: 0x908e, 0x11a6: 0x908f, 0x11a8: 0xb090, 0x11a9: 0xb091, - 0x11aa: 0xb092, 0x11ab: 0xb091, 0x11ac: 0xb093, 0x11ad: 0xb094, 0x11ae: 0xb095, - 0x11bd: 0x2000, - // Block 0x47, offset 0x11c0 - 0x11e0: 0x4000, 0x11e1: 0x4000, 0x11e2: 0x4000, 0x11e3: 0x4000, - // Block 0x48, offset 0x1200 - 0x1200: 0x4000, 0x1201: 0x4000, 0x1202: 0x4000, 0x1203: 0x4000, 0x1204: 0x4000, 0x1205: 0x4000, - 0x1206: 0x4000, 0x1207: 0x4000, 0x1208: 0x4000, 0x1209: 0x4000, 0x120a: 0x4000, 0x120b: 0x4000, - 0x120c: 0x4000, 0x120d: 0x4000, 0x120e: 0x4000, 0x120f: 0x4000, 0x1210: 0x4000, 0x1211: 0x4000, - 0x1212: 0x4000, 0x1213: 0x4000, 0x1214: 0x4000, 0x1215: 0x4000, 0x1216: 0x4000, 0x1217: 0x4000, - 0x1218: 0x4000, 0x1219: 0x4000, 0x121a: 0x4000, 0x121b: 0x4000, 0x121c: 0x4000, 0x121d: 0x4000, - 0x121e: 0x4000, 0x121f: 0x4000, 0x1220: 0x4000, 0x1221: 0x4000, 0x1222: 0x4000, 0x1223: 0x4000, - 0x1224: 0x4000, 0x1225: 0x4000, 0x1226: 0x4000, 0x1227: 0x4000, 0x1228: 0x4000, 0x1229: 0x4000, - 0x122a: 0x4000, 0x122b: 0x4000, 0x122c: 0x4000, 0x122d: 0x4000, 0x122e: 0x4000, 0x122f: 0x4000, - 0x1230: 0x4000, 0x1231: 0x4000, 0x1232: 0x4000, 0x1233: 0x4000, 0x1234: 0x4000, 0x1235: 0x4000, - 0x1236: 0x4000, 0x1237: 0x4000, - // Block 0x49, offset 0x1240 - 0x1240: 0x4000, 0x1241: 0x4000, 0x1242: 0x4000, 0x1243: 0x4000, 0x1244: 0x4000, 0x1245: 0x4000, - 0x1246: 0x4000, 0x1247: 0x4000, 0x1248: 0x4000, 0x1249: 0x4000, 0x124a: 0x4000, 0x124b: 0x4000, - 0x124c: 0x4000, 0x124d: 0x4000, 0x124e: 0x4000, 0x124f: 0x4000, 0x1250: 0x4000, 0x1251: 0x4000, - 0x1252: 0x4000, 0x1253: 0x4000, 0x1254: 0x4000, 0x1255: 0x4000, 0x1256: 0x4000, 0x1257: 0x4000, - 0x1258: 0x4000, 0x1259: 0x4000, 0x125a: 0x4000, 0x125b: 0x4000, 0x125c: 0x4000, 0x125d: 0x4000, - 0x125e: 0x4000, 0x125f: 0x4000, 0x1260: 0x4000, 0x1261: 0x4000, 0x1262: 0x4000, 0x1263: 0x4000, - 0x1264: 0x4000, 0x1265: 0x4000, 0x1266: 0x4000, 0x1267: 0x4000, 0x1268: 0x4000, 0x1269: 0x4000, - 0x126a: 0x4000, 0x126b: 0x4000, 0x126c: 0x4000, 0x126d: 0x4000, 0x126e: 0x4000, 0x126f: 0x4000, - 0x1270: 0x4000, 0x1271: 0x4000, 0x1272: 0x4000, - // Block 0x4a, offset 0x1280 - 0x1280: 0x4000, 0x1281: 0x4000, 0x1282: 0x4000, 0x1283: 0x4000, 0x1284: 0x4000, 0x1285: 0x4000, - 0x1286: 0x4000, 0x1287: 0x4000, 0x1288: 0x4000, 0x1289: 0x4000, 0x128a: 0x4000, 0x128b: 0x4000, - 0x128c: 0x4000, 0x128d: 0x4000, 0x128e: 0x4000, 0x128f: 0x4000, 0x1290: 0x4000, 0x1291: 0x4000, - 0x1292: 0x4000, 0x1293: 0x4000, 0x1294: 0x4000, 0x1295: 0x4000, 0x1296: 0x4000, 0x1297: 0x4000, - 0x1298: 0x4000, 0x1299: 0x4000, 0x129a: 0x4000, 0x129b: 0x4000, 0x129c: 0x4000, 0x129d: 0x4000, - 0x129e: 0x4000, - // Block 0x4b, offset 0x12c0 - 0x12d0: 0x4000, 0x12d1: 0x4000, - 0x12d2: 0x4000, - 0x12e4: 0x4000, 0x12e5: 0x4000, 0x12e6: 0x4000, 0x12e7: 0x4000, - 0x12f0: 0x4000, 0x12f1: 0x4000, 0x12f2: 0x4000, 0x12f3: 0x4000, 0x12f4: 0x4000, 0x12f5: 0x4000, - 0x12f6: 0x4000, 0x12f7: 0x4000, 0x12f8: 0x4000, 0x12f9: 0x4000, 0x12fa: 0x4000, 0x12fb: 0x4000, - 0x12fc: 0x4000, 0x12fd: 0x4000, 0x12fe: 0x4000, 0x12ff: 0x4000, - // Block 0x4c, offset 0x1300 - 0x1300: 0x4000, 0x1301: 0x4000, 0x1302: 0x4000, 0x1303: 0x4000, 0x1304: 0x4000, 0x1305: 0x4000, - 0x1306: 0x4000, 0x1307: 0x4000, 0x1308: 0x4000, 0x1309: 0x4000, 0x130a: 0x4000, 0x130b: 0x4000, - 0x130c: 0x4000, 0x130d: 0x4000, 0x130e: 0x4000, 0x130f: 0x4000, 0x1310: 0x4000, 0x1311: 0x4000, - 0x1312: 0x4000, 0x1313: 0x4000, 0x1314: 0x4000, 0x1315: 0x4000, 0x1316: 0x4000, 0x1317: 0x4000, - 0x1318: 0x4000, 0x1319: 0x4000, 0x131a: 0x4000, 0x131b: 0x4000, 0x131c: 0x4000, 0x131d: 0x4000, - 0x131e: 0x4000, 0x131f: 0x4000, 0x1320: 0x4000, 0x1321: 0x4000, 0x1322: 0x4000, 0x1323: 0x4000, - 0x1324: 0x4000, 0x1325: 0x4000, 0x1326: 0x4000, 0x1327: 0x4000, 0x1328: 0x4000, 0x1329: 0x4000, - 0x132a: 0x4000, 0x132b: 0x4000, 0x132c: 0x4000, 0x132d: 0x4000, 0x132e: 0x4000, 0x132f: 0x4000, - 0x1330: 0x4000, 0x1331: 0x4000, 0x1332: 0x4000, 0x1333: 0x4000, 0x1334: 0x4000, 0x1335: 0x4000, - 0x1336: 0x4000, 0x1337: 0x4000, 0x1338: 0x4000, 0x1339: 0x4000, 0x133a: 0x4000, 0x133b: 0x4000, - // Block 0x4d, offset 0x1340 - 0x1344: 0x4000, - // Block 0x4e, offset 0x1380 - 0x138f: 0x4000, - // Block 0x4f, offset 0x13c0 - 0x13c0: 0x2000, 0x13c1: 0x2000, 0x13c2: 0x2000, 0x13c3: 0x2000, 0x13c4: 0x2000, 0x13c5: 0x2000, - 0x13c6: 0x2000, 0x13c7: 0x2000, 0x13c8: 0x2000, 0x13c9: 0x2000, 0x13ca: 0x2000, - 0x13d0: 0x2000, 0x13d1: 0x2000, - 0x13d2: 0x2000, 0x13d3: 0x2000, 0x13d4: 0x2000, 0x13d5: 0x2000, 0x13d6: 0x2000, 0x13d7: 0x2000, - 0x13d8: 0x2000, 0x13d9: 0x2000, 0x13da: 0x2000, 0x13db: 0x2000, 0x13dc: 0x2000, 0x13dd: 0x2000, - 0x13de: 0x2000, 0x13df: 0x2000, 0x13e0: 0x2000, 0x13e1: 0x2000, 0x13e2: 0x2000, 0x13e3: 0x2000, - 0x13e4: 0x2000, 0x13e5: 0x2000, 0x13e6: 0x2000, 0x13e7: 0x2000, 0x13e8: 0x2000, 0x13e9: 0x2000, - 0x13ea: 0x2000, 0x13eb: 0x2000, 0x13ec: 0x2000, 0x13ed: 0x2000, - 0x13f0: 0x2000, 0x13f1: 0x2000, 0x13f2: 0x2000, 0x13f3: 0x2000, 0x13f4: 0x2000, 0x13f5: 0x2000, - 0x13f6: 0x2000, 0x13f7: 0x2000, 0x13f8: 0x2000, 0x13f9: 0x2000, 0x13fa: 0x2000, 0x13fb: 0x2000, - 0x13fc: 0x2000, 0x13fd: 0x2000, 0x13fe: 0x2000, 0x13ff: 0x2000, - // Block 0x50, offset 0x1400 - 0x1400: 0x2000, 0x1401: 0x2000, 0x1402: 0x2000, 0x1403: 0x2000, 0x1404: 0x2000, 0x1405: 0x2000, - 0x1406: 0x2000, 0x1407: 0x2000, 0x1408: 0x2000, 0x1409: 0x2000, 0x140a: 0x2000, 0x140b: 0x2000, - 0x140c: 0x2000, 0x140d: 0x2000, 0x140e: 0x2000, 0x140f: 0x2000, 0x1410: 0x2000, 0x1411: 0x2000, - 0x1412: 0x2000, 0x1413: 0x2000, 0x1414: 0x2000, 0x1415: 0x2000, 0x1416: 0x2000, 0x1417: 0x2000, - 0x1418: 0x2000, 0x1419: 0x2000, 0x141a: 0x2000, 0x141b: 0x2000, 0x141c: 0x2000, 0x141d: 0x2000, - 0x141e: 0x2000, 0x141f: 0x2000, 0x1420: 0x2000, 0x1421: 0x2000, 0x1422: 0x2000, 0x1423: 0x2000, - 0x1424: 0x2000, 0x1425: 0x2000, 0x1426: 0x2000, 0x1427: 0x2000, 0x1428: 0x2000, 0x1429: 0x2000, - 0x1430: 0x2000, 0x1431: 0x2000, 0x1432: 0x2000, 0x1433: 0x2000, 0x1434: 0x2000, 0x1435: 0x2000, - 0x1436: 0x2000, 0x1437: 0x2000, 0x1438: 0x2000, 0x1439: 0x2000, 0x143a: 0x2000, 0x143b: 0x2000, - 0x143c: 0x2000, 0x143d: 0x2000, 0x143e: 0x2000, 0x143f: 0x2000, - // Block 0x51, offset 0x1440 - 0x1440: 0x2000, 0x1441: 0x2000, 0x1442: 0x2000, 0x1443: 0x2000, 0x1444: 0x2000, 0x1445: 0x2000, - 0x1446: 0x2000, 0x1447: 0x2000, 0x1448: 0x2000, 0x1449: 0x2000, 0x144a: 0x2000, 0x144b: 0x2000, - 0x144c: 0x2000, 0x144d: 0x2000, 0x144e: 0x4000, 0x144f: 0x2000, 0x1450: 0x2000, 0x1451: 0x4000, - 0x1452: 0x4000, 0x1453: 0x4000, 0x1454: 0x4000, 0x1455: 0x4000, 0x1456: 0x4000, 0x1457: 0x4000, - 0x1458: 0x4000, 0x1459: 0x4000, 0x145a: 0x4000, 0x145b: 0x2000, 0x145c: 0x2000, 0x145d: 0x2000, - 0x145e: 0x2000, 0x145f: 0x2000, 0x1460: 0x2000, 0x1461: 0x2000, 0x1462: 0x2000, 0x1463: 0x2000, - 0x1464: 0x2000, 0x1465: 0x2000, 0x1466: 0x2000, 0x1467: 0x2000, 0x1468: 0x2000, 0x1469: 0x2000, - 0x146a: 0x2000, 0x146b: 0x2000, 0x146c: 0x2000, - // Block 0x52, offset 0x1480 - 0x1480: 0x4000, 0x1481: 0x4000, 0x1482: 0x4000, - 0x1490: 0x4000, 0x1491: 0x4000, - 0x1492: 0x4000, 0x1493: 0x4000, 0x1494: 0x4000, 0x1495: 0x4000, 0x1496: 0x4000, 0x1497: 0x4000, - 0x1498: 0x4000, 0x1499: 0x4000, 0x149a: 0x4000, 0x149b: 0x4000, 0x149c: 0x4000, 0x149d: 0x4000, - 0x149e: 0x4000, 0x149f: 0x4000, 0x14a0: 0x4000, 0x14a1: 0x4000, 0x14a2: 0x4000, 0x14a3: 0x4000, - 0x14a4: 0x4000, 0x14a5: 0x4000, 0x14a6: 0x4000, 0x14a7: 0x4000, 0x14a8: 0x4000, 0x14a9: 0x4000, - 0x14aa: 0x4000, 0x14ab: 0x4000, 0x14ac: 0x4000, 0x14ad: 0x4000, 0x14ae: 0x4000, 0x14af: 0x4000, - 0x14b0: 0x4000, 0x14b1: 0x4000, 0x14b2: 0x4000, 0x14b3: 0x4000, 0x14b4: 0x4000, 0x14b5: 0x4000, - 0x14b6: 0x4000, 0x14b7: 0x4000, 0x14b8: 0x4000, 0x14b9: 0x4000, 0x14ba: 0x4000, 0x14bb: 0x4000, - // Block 0x53, offset 0x14c0 - 0x14c0: 0x4000, 0x14c1: 0x4000, 0x14c2: 0x4000, 0x14c3: 0x4000, 0x14c4: 0x4000, 0x14c5: 0x4000, - 0x14c6: 0x4000, 0x14c7: 0x4000, 0x14c8: 0x4000, - 0x14d0: 0x4000, 0x14d1: 0x4000, - 0x14e0: 0x4000, 0x14e1: 0x4000, 0x14e2: 0x4000, 0x14e3: 0x4000, - 0x14e4: 0x4000, 0x14e5: 0x4000, - // Block 0x54, offset 0x1500 - 0x1500: 0x4000, 0x1501: 0x4000, 0x1502: 0x4000, 0x1503: 0x4000, 0x1504: 0x4000, 0x1505: 0x4000, - 0x1506: 0x4000, 0x1507: 0x4000, 0x1508: 0x4000, 0x1509: 0x4000, 0x150a: 0x4000, 0x150b: 0x4000, - 0x150c: 0x4000, 0x150d: 0x4000, 0x150e: 0x4000, 0x150f: 0x4000, 0x1510: 0x4000, 0x1511: 0x4000, - 0x1512: 0x4000, 0x1513: 0x4000, 0x1514: 0x4000, 0x1515: 0x4000, 0x1516: 0x4000, 0x1517: 0x4000, - 0x1518: 0x4000, 0x1519: 0x4000, 0x151a: 0x4000, 0x151b: 0x4000, 0x151c: 0x4000, 0x151d: 0x4000, - 0x151e: 0x4000, 0x151f: 0x4000, 0x1520: 0x4000, - 0x152d: 0x4000, 0x152e: 0x4000, 0x152f: 0x4000, - 0x1530: 0x4000, 0x1531: 0x4000, 0x1532: 0x4000, 0x1533: 0x4000, 0x1534: 0x4000, 0x1535: 0x4000, - 0x1537: 0x4000, 0x1538: 0x4000, 0x1539: 0x4000, 0x153a: 0x4000, 0x153b: 0x4000, - 0x153c: 0x4000, 0x153d: 0x4000, 0x153e: 0x4000, 0x153f: 0x4000, - // Block 0x55, offset 0x1540 - 0x1540: 0x4000, 0x1541: 0x4000, 0x1542: 0x4000, 0x1543: 0x4000, 0x1544: 0x4000, 0x1545: 0x4000, - 0x1546: 0x4000, 0x1547: 0x4000, 0x1548: 0x4000, 0x1549: 0x4000, 0x154a: 0x4000, 0x154b: 0x4000, - 0x154c: 0x4000, 0x154d: 0x4000, 0x154e: 0x4000, 0x154f: 0x4000, 0x1550: 0x4000, 0x1551: 0x4000, - 0x1552: 0x4000, 0x1553: 0x4000, 0x1554: 0x4000, 0x1555: 0x4000, 0x1556: 0x4000, 0x1557: 0x4000, - 0x1558: 0x4000, 0x1559: 0x4000, 0x155a: 0x4000, 0x155b: 0x4000, 0x155c: 0x4000, 0x155d: 0x4000, - 0x155e: 0x4000, 0x155f: 0x4000, 0x1560: 0x4000, 0x1561: 0x4000, 0x1562: 0x4000, 0x1563: 0x4000, - 0x1564: 0x4000, 0x1565: 0x4000, 0x1566: 0x4000, 0x1567: 0x4000, 0x1568: 0x4000, 0x1569: 0x4000, - 0x156a: 0x4000, 0x156b: 0x4000, 0x156c: 0x4000, 0x156d: 0x4000, 0x156e: 0x4000, 0x156f: 0x4000, - 0x1570: 0x4000, 0x1571: 0x4000, 0x1572: 0x4000, 0x1573: 0x4000, 0x1574: 0x4000, 0x1575: 0x4000, - 0x1576: 0x4000, 0x1577: 0x4000, 0x1578: 0x4000, 0x1579: 0x4000, 0x157a: 0x4000, 0x157b: 0x4000, - 0x157c: 0x4000, 0x157e: 0x4000, 0x157f: 0x4000, - // Block 0x56, offset 0x1580 - 0x1580: 0x4000, 0x1581: 0x4000, 0x1582: 0x4000, 0x1583: 0x4000, 0x1584: 0x4000, 0x1585: 0x4000, - 0x1586: 0x4000, 0x1587: 0x4000, 0x1588: 0x4000, 0x1589: 0x4000, 0x158a: 0x4000, 0x158b: 0x4000, - 0x158c: 0x4000, 0x158d: 0x4000, 0x158e: 0x4000, 0x158f: 0x4000, 0x1590: 0x4000, 0x1591: 0x4000, - 0x1592: 0x4000, 0x1593: 0x4000, - 0x15a0: 0x4000, 0x15a1: 0x4000, 0x15a2: 0x4000, 0x15a3: 0x4000, - 0x15a4: 0x4000, 0x15a5: 0x4000, 0x15a6: 0x4000, 0x15a7: 0x4000, 0x15a8: 0x4000, 0x15a9: 0x4000, - 0x15aa: 0x4000, 0x15ab: 0x4000, 0x15ac: 0x4000, 0x15ad: 0x4000, 0x15ae: 0x4000, 0x15af: 0x4000, - 0x15b0: 0x4000, 0x15b1: 0x4000, 0x15b2: 0x4000, 0x15b3: 0x4000, 0x15b4: 0x4000, 0x15b5: 0x4000, - 0x15b6: 0x4000, 0x15b7: 0x4000, 0x15b8: 0x4000, 0x15b9: 0x4000, 0x15ba: 0x4000, 0x15bb: 0x4000, - 0x15bc: 0x4000, 0x15bd: 0x4000, 0x15be: 0x4000, 0x15bf: 0x4000, - // Block 0x57, offset 0x15c0 - 0x15c0: 0x4000, 0x15c1: 0x4000, 0x15c2: 0x4000, 0x15c3: 0x4000, 0x15c4: 0x4000, 0x15c5: 0x4000, - 0x15c6: 0x4000, 0x15c7: 0x4000, 0x15c8: 0x4000, 0x15c9: 0x4000, 0x15ca: 0x4000, - 0x15cf: 0x4000, 0x15d0: 0x4000, 0x15d1: 0x4000, - 0x15d2: 0x4000, 0x15d3: 0x4000, - 0x15e0: 0x4000, 0x15e1: 0x4000, 0x15e2: 0x4000, 0x15e3: 0x4000, - 0x15e4: 0x4000, 0x15e5: 0x4000, 0x15e6: 0x4000, 0x15e7: 0x4000, 0x15e8: 0x4000, 0x15e9: 0x4000, - 0x15ea: 0x4000, 0x15eb: 0x4000, 0x15ec: 0x4000, 0x15ed: 0x4000, 0x15ee: 0x4000, 0x15ef: 0x4000, - 0x15f0: 0x4000, 0x15f4: 0x4000, - 0x15f8: 0x4000, 0x15f9: 0x4000, 0x15fa: 0x4000, 0x15fb: 0x4000, - 0x15fc: 0x4000, 0x15fd: 0x4000, 0x15fe: 0x4000, 0x15ff: 0x4000, - // Block 0x58, offset 0x1600 - 0x1600: 0x4000, 0x1602: 0x4000, 0x1603: 0x4000, 0x1604: 0x4000, 0x1605: 0x4000, - 0x1606: 0x4000, 0x1607: 0x4000, 0x1608: 0x4000, 0x1609: 0x4000, 0x160a: 0x4000, 0x160b: 0x4000, - 0x160c: 0x4000, 0x160d: 0x4000, 0x160e: 0x4000, 0x160f: 0x4000, 0x1610: 0x4000, 0x1611: 0x4000, - 0x1612: 0x4000, 0x1613: 0x4000, 0x1614: 0x4000, 0x1615: 0x4000, 0x1616: 0x4000, 0x1617: 0x4000, - 0x1618: 0x4000, 0x1619: 0x4000, 0x161a: 0x4000, 0x161b: 0x4000, 0x161c: 0x4000, 0x161d: 0x4000, - 0x161e: 0x4000, 0x161f: 0x4000, 0x1620: 0x4000, 0x1621: 0x4000, 0x1622: 0x4000, 0x1623: 0x4000, - 0x1624: 0x4000, 0x1625: 0x4000, 0x1626: 0x4000, 0x1627: 0x4000, 0x1628: 0x4000, 0x1629: 0x4000, - 0x162a: 0x4000, 0x162b: 0x4000, 0x162c: 0x4000, 0x162d: 0x4000, 0x162e: 0x4000, 0x162f: 0x4000, - 0x1630: 0x4000, 0x1631: 0x4000, 0x1632: 0x4000, 0x1633: 0x4000, 0x1634: 0x4000, 0x1635: 0x4000, - 0x1636: 0x4000, 0x1637: 0x4000, 0x1638: 0x4000, 0x1639: 0x4000, 0x163a: 0x4000, 0x163b: 0x4000, - 0x163c: 0x4000, 0x163d: 0x4000, 0x163e: 0x4000, 0x163f: 0x4000, - // Block 0x59, offset 0x1640 - 0x1640: 0x4000, 0x1641: 0x4000, 0x1642: 0x4000, 0x1643: 0x4000, 0x1644: 0x4000, 0x1645: 0x4000, - 0x1646: 0x4000, 0x1647: 0x4000, 0x1648: 0x4000, 0x1649: 0x4000, 0x164a: 0x4000, 0x164b: 0x4000, - 0x164c: 0x4000, 0x164d: 0x4000, 0x164e: 0x4000, 0x164f: 0x4000, 0x1650: 0x4000, 0x1651: 0x4000, - 0x1652: 0x4000, 0x1653: 0x4000, 0x1654: 0x4000, 0x1655: 0x4000, 0x1656: 0x4000, 0x1657: 0x4000, - 0x1658: 0x4000, 0x1659: 0x4000, 0x165a: 0x4000, 0x165b: 0x4000, 0x165c: 0x4000, 0x165d: 0x4000, - 0x165e: 0x4000, 0x165f: 0x4000, 0x1660: 0x4000, 0x1661: 0x4000, 0x1662: 0x4000, 0x1663: 0x4000, - 0x1664: 0x4000, 0x1665: 0x4000, 0x1666: 0x4000, 0x1667: 0x4000, 0x1668: 0x4000, 0x1669: 0x4000, - 0x166a: 0x4000, 0x166b: 0x4000, 0x166c: 0x4000, 0x166d: 0x4000, 0x166e: 0x4000, 0x166f: 0x4000, - 0x1670: 0x4000, 0x1671: 0x4000, 0x1672: 0x4000, 0x1673: 0x4000, 0x1674: 0x4000, 0x1675: 0x4000, - 0x1676: 0x4000, 0x1677: 0x4000, 0x1678: 0x4000, 0x1679: 0x4000, 0x167a: 0x4000, 0x167b: 0x4000, - 0x167c: 0x4000, 0x167f: 0x4000, - // Block 0x5a, offset 0x1680 - 0x1680: 0x4000, 0x1681: 0x4000, 0x1682: 0x4000, 0x1683: 0x4000, 0x1684: 0x4000, 0x1685: 0x4000, - 0x1686: 0x4000, 0x1687: 0x4000, 0x1688: 0x4000, 0x1689: 0x4000, 0x168a: 0x4000, 0x168b: 0x4000, - 0x168c: 0x4000, 0x168d: 0x4000, 0x168e: 0x4000, 0x168f: 0x4000, 0x1690: 0x4000, 0x1691: 0x4000, - 0x1692: 0x4000, 0x1693: 0x4000, 0x1694: 0x4000, 0x1695: 0x4000, 0x1696: 0x4000, 0x1697: 0x4000, - 0x1698: 0x4000, 0x1699: 0x4000, 0x169a: 0x4000, 0x169b: 0x4000, 0x169c: 0x4000, 0x169d: 0x4000, - 0x169e: 0x4000, 0x169f: 0x4000, 0x16a0: 0x4000, 0x16a1: 0x4000, 0x16a2: 0x4000, 0x16a3: 0x4000, - 0x16a4: 0x4000, 0x16a5: 0x4000, 0x16a6: 0x4000, 0x16a7: 0x4000, 0x16a8: 0x4000, 0x16a9: 0x4000, - 0x16aa: 0x4000, 0x16ab: 0x4000, 0x16ac: 0x4000, 0x16ad: 0x4000, 0x16ae: 0x4000, 0x16af: 0x4000, - 0x16b0: 0x4000, 0x16b1: 0x4000, 0x16b2: 0x4000, 0x16b3: 0x4000, 0x16b4: 0x4000, 0x16b5: 0x4000, - 0x16b6: 0x4000, 0x16b7: 0x4000, 0x16b8: 0x4000, 0x16b9: 0x4000, 0x16ba: 0x4000, 0x16bb: 0x4000, - 0x16bc: 0x4000, 0x16bd: 0x4000, - // Block 0x5b, offset 0x16c0 - 0x16cb: 0x4000, - 0x16cc: 0x4000, 0x16cd: 0x4000, 0x16ce: 0x4000, 0x16d0: 0x4000, 0x16d1: 0x4000, - 0x16d2: 0x4000, 0x16d3: 0x4000, 0x16d4: 0x4000, 0x16d5: 0x4000, 0x16d6: 0x4000, 0x16d7: 0x4000, - 0x16d8: 0x4000, 0x16d9: 0x4000, 0x16da: 0x4000, 0x16db: 0x4000, 0x16dc: 0x4000, 0x16dd: 0x4000, - 0x16de: 0x4000, 0x16df: 0x4000, 0x16e0: 0x4000, 0x16e1: 0x4000, 0x16e2: 0x4000, 0x16e3: 0x4000, - 0x16e4: 0x4000, 0x16e5: 0x4000, 0x16e6: 0x4000, 0x16e7: 0x4000, - 0x16fa: 0x4000, - // Block 0x5c, offset 0x1700 - 0x1715: 0x4000, 0x1716: 0x4000, - 0x1724: 0x4000, - // Block 0x5d, offset 0x1740 - 0x177b: 0x4000, - 0x177c: 0x4000, 0x177d: 0x4000, 0x177e: 0x4000, 0x177f: 0x4000, - // Block 0x5e, offset 0x1780 - 0x1780: 0x4000, 0x1781: 0x4000, 0x1782: 0x4000, 0x1783: 0x4000, 0x1784: 0x4000, 0x1785: 0x4000, - 0x1786: 0x4000, 0x1787: 0x4000, 0x1788: 0x4000, 0x1789: 0x4000, 0x178a: 0x4000, 0x178b: 0x4000, - 0x178c: 0x4000, 0x178d: 0x4000, 0x178e: 0x4000, 0x178f: 0x4000, - // Block 0x5f, offset 0x17c0 - 0x17c0: 0x4000, 0x17c1: 0x4000, 0x17c2: 0x4000, 0x17c3: 0x4000, 0x17c4: 0x4000, 0x17c5: 0x4000, - 0x17cc: 0x4000, 0x17d0: 0x4000, 0x17d1: 0x4000, - 0x17d2: 0x4000, 0x17d5: 0x4000, - 0x17eb: 0x4000, 0x17ec: 0x4000, - 0x17f4: 0x4000, 0x17f5: 0x4000, - 0x17f6: 0x4000, 0x17f7: 0x4000, 0x17f8: 0x4000, 0x17f9: 0x4000, 0x17fa: 0x4000, - // Block 0x60, offset 0x1800 - 0x1820: 0x4000, 0x1821: 0x4000, 0x1822: 0x4000, 0x1823: 0x4000, - 0x1824: 0x4000, 0x1825: 0x4000, 0x1826: 0x4000, 0x1827: 0x4000, 0x1828: 0x4000, 0x1829: 0x4000, - 0x182a: 0x4000, 0x182b: 0x4000, - // Block 0x61, offset 0x1840 - 0x184d: 0x4000, 0x184e: 0x4000, 0x184f: 0x4000, 0x1850: 0x4000, 0x1851: 0x4000, - 0x1852: 0x4000, 0x1853: 0x4000, 0x1854: 0x4000, 0x1855: 0x4000, 0x1856: 0x4000, 0x1857: 0x4000, - 0x1858: 0x4000, 0x1859: 0x4000, 0x185a: 0x4000, 0x185b: 0x4000, 0x185c: 0x4000, 0x185d: 0x4000, - 0x185e: 0x4000, 0x185f: 0x4000, 0x1860: 0x4000, 0x1861: 0x4000, 0x1862: 0x4000, 0x1863: 0x4000, - 0x1864: 0x4000, 0x1865: 0x4000, 0x1866: 0x4000, 0x1867: 0x4000, 0x1868: 0x4000, 0x1869: 0x4000, - 0x186a: 0x4000, 0x186b: 0x4000, 0x186c: 0x4000, 0x186d: 0x4000, 0x186e: 0x4000, 0x186f: 0x4000, - 0x1870: 0x4000, 0x1871: 0x4000, 0x1872: 0x4000, 0x1873: 0x4000, 0x1874: 0x4000, 0x1875: 0x4000, - 0x1876: 0x4000, 0x1877: 0x4000, 0x1878: 0x4000, 0x1879: 0x4000, 0x187a: 0x4000, 0x187b: 0x4000, - 0x187c: 0x4000, 0x187d: 0x4000, 0x187e: 0x4000, 0x187f: 0x4000, - // Block 0x62, offset 0x1880 - 0x1880: 0x4000, 0x1881: 0x4000, 0x1882: 0x4000, 0x1883: 0x4000, 0x1884: 0x4000, 0x1885: 0x4000, - 0x1886: 0x4000, 0x1887: 0x4000, 0x1888: 0x4000, 0x1889: 0x4000, 0x188a: 0x4000, 0x188b: 0x4000, - 0x188c: 0x4000, 0x188d: 0x4000, 0x188e: 0x4000, 0x188f: 0x4000, 0x1890: 0x4000, 0x1891: 0x4000, - 0x1892: 0x4000, 0x1893: 0x4000, 0x1894: 0x4000, 0x1895: 0x4000, 0x1896: 0x4000, 0x1897: 0x4000, - 0x1898: 0x4000, 0x1899: 0x4000, 0x189a: 0x4000, 0x189b: 0x4000, 0x189c: 0x4000, 0x189d: 0x4000, - 0x189e: 0x4000, 0x189f: 0x4000, 0x18a0: 0x4000, 0x18a1: 0x4000, 0x18a2: 0x4000, 0x18a3: 0x4000, - 0x18a4: 0x4000, 0x18a5: 0x4000, 0x18a6: 0x4000, 0x18a7: 0x4000, 0x18a8: 0x4000, 0x18a9: 0x4000, - 0x18aa: 0x4000, 0x18ab: 0x4000, 0x18ac: 0x4000, 0x18ad: 0x4000, 0x18ae: 0x4000, 0x18af: 0x4000, - 0x18b0: 0x4000, 0x18b1: 0x4000, 0x18b3: 0x4000, 0x18b4: 0x4000, 0x18b5: 0x4000, - 0x18b6: 0x4000, 0x18ba: 0x4000, 0x18bb: 0x4000, - 0x18bc: 0x4000, 0x18bd: 0x4000, 0x18be: 0x4000, 0x18bf: 0x4000, - // Block 0x63, offset 0x18c0 - 0x18c0: 0x4000, 0x18c1: 0x4000, 0x18c2: 0x4000, 0x18c3: 0x4000, 0x18c4: 0x4000, 0x18c5: 0x4000, - 0x18c6: 0x4000, 0x18c7: 0x4000, 0x18c8: 0x4000, 0x18c9: 0x4000, 0x18ca: 0x4000, 0x18cb: 0x4000, - 0x18cc: 0x4000, 0x18cd: 0x4000, 0x18ce: 0x4000, 0x18cf: 0x4000, 0x18d0: 0x4000, 0x18d1: 0x4000, - 0x18d2: 0x4000, 0x18d3: 0x4000, 0x18d4: 0x4000, 0x18d5: 0x4000, 0x18d6: 0x4000, 0x18d7: 0x4000, - 0x18d8: 0x4000, 0x18d9: 0x4000, 0x18da: 0x4000, 0x18db: 0x4000, 0x18dc: 0x4000, 0x18dd: 0x4000, - 0x18de: 0x4000, 0x18df: 0x4000, 0x18e0: 0x4000, 0x18e1: 0x4000, 0x18e2: 0x4000, - 0x18e5: 0x4000, 0x18e6: 0x4000, 0x18e7: 0x4000, 0x18e8: 0x4000, 0x18e9: 0x4000, - 0x18ea: 0x4000, 0x18ee: 0x4000, 0x18ef: 0x4000, - 0x18f0: 0x4000, 0x18f1: 0x4000, 0x18f2: 0x4000, 0x18f3: 0x4000, 0x18f4: 0x4000, 0x18f5: 0x4000, - 0x18f6: 0x4000, 0x18f7: 0x4000, 0x18f8: 0x4000, 0x18f9: 0x4000, 0x18fa: 0x4000, 0x18fb: 0x4000, - 0x18fc: 0x4000, 0x18fd: 0x4000, 0x18fe: 0x4000, 0x18ff: 0x4000, - // Block 0x64, offset 0x1900 - 0x1900: 0x4000, 0x1901: 0x4000, 0x1902: 0x4000, 0x1903: 0x4000, 0x1904: 0x4000, 0x1905: 0x4000, - 0x1906: 0x4000, 0x1907: 0x4000, 0x1908: 0x4000, 0x1909: 0x4000, 0x190a: 0x4000, - 0x190d: 0x4000, 0x190e: 0x4000, 0x190f: 0x4000, 0x1910: 0x4000, 0x1911: 0x4000, - 0x1912: 0x4000, 0x1913: 0x4000, 0x1914: 0x4000, 0x1915: 0x4000, 0x1916: 0x4000, 0x1917: 0x4000, - 0x1918: 0x4000, 0x1919: 0x4000, 0x191a: 0x4000, 0x191b: 0x4000, 0x191c: 0x4000, 0x191d: 0x4000, - 0x191e: 0x4000, 0x191f: 0x4000, 0x1920: 0x4000, 0x1921: 0x4000, 0x1922: 0x4000, 0x1923: 0x4000, - 0x1924: 0x4000, 0x1925: 0x4000, 0x1926: 0x4000, 0x1927: 0x4000, 0x1928: 0x4000, 0x1929: 0x4000, - 0x192a: 0x4000, 0x192b: 0x4000, 0x192c: 0x4000, 0x192d: 0x4000, 0x192e: 0x4000, 0x192f: 0x4000, - 0x1930: 0x4000, 0x1931: 0x4000, 0x1932: 0x4000, 0x1933: 0x4000, 0x1934: 0x4000, 0x1935: 0x4000, - 0x1936: 0x4000, 0x1937: 0x4000, 0x1938: 0x4000, 0x1939: 0x4000, 0x193a: 0x4000, 0x193b: 0x4000, - 0x193c: 0x4000, 0x193d: 0x4000, 0x193e: 0x4000, 0x193f: 0x4000, - // Block 0x65, offset 0x1940 - 0x1970: 0x4000, 0x1971: 0x4000, 0x1972: 0x4000, 0x1973: 0x4000, - 0x1978: 0x4000, 0x1979: 0x4000, 0x197a: 0x4000, - // Block 0x66, offset 0x1980 - 0x1980: 0x4000, 0x1981: 0x4000, 0x1982: 0x4000, - 0x1990: 0x4000, 0x1991: 0x4000, - 0x1992: 0x4000, 0x1993: 0x4000, 0x1994: 0x4000, 0x1995: 0x4000, - // Block 0x67, offset 0x19c0 - 0x19c0: 0x2000, 0x19c1: 0x2000, 0x19c2: 0x2000, 0x19c3: 0x2000, 0x19c4: 0x2000, 0x19c5: 0x2000, - 0x19c6: 0x2000, 0x19c7: 0x2000, 0x19c8: 0x2000, 0x19c9: 0x2000, 0x19ca: 0x2000, 0x19cb: 0x2000, - 0x19cc: 0x2000, 0x19cd: 0x2000, 0x19ce: 0x2000, 0x19cf: 0x2000, 0x19d0: 0x2000, 0x19d1: 0x2000, - 0x19d2: 0x2000, 0x19d3: 0x2000, 0x19d4: 0x2000, 0x19d5: 0x2000, 0x19d6: 0x2000, 0x19d7: 0x2000, - 0x19d8: 0x2000, 0x19d9: 0x2000, 0x19da: 0x2000, 0x19db: 0x2000, 0x19dc: 0x2000, 0x19dd: 0x2000, - 0x19de: 0x2000, 0x19df: 0x2000, 0x19e0: 0x2000, 0x19e1: 0x2000, 0x19e2: 0x2000, 0x19e3: 0x2000, - 0x19e4: 0x2000, 0x19e5: 0x2000, 0x19e6: 0x2000, 0x19e7: 0x2000, 0x19e8: 0x2000, 0x19e9: 0x2000, - 0x19ea: 0x2000, 0x19eb: 0x2000, 0x19ec: 0x2000, 0x19ed: 0x2000, 0x19ee: 0x2000, 0x19ef: 0x2000, - 0x19f0: 0x2000, 0x19f1: 0x2000, 0x19f2: 0x2000, 0x19f3: 0x2000, 0x19f4: 0x2000, 0x19f5: 0x2000, - 0x19f6: 0x2000, 0x19f7: 0x2000, 0x19f8: 0x2000, 0x19f9: 0x2000, 0x19fa: 0x2000, 0x19fb: 0x2000, - 0x19fc: 0x2000, 0x19fd: 0x2000, -} - -// widthIndex: 22 blocks, 1408 entries, 1408 bytes -// Block 0 is the zero block. -var widthIndex = [1408]uint8{ - // Block 0x0, offset 0x0 - // Block 0x1, offset 0x40 - // Block 0x2, offset 0x80 - // Block 0x3, offset 0xc0 - 0xc2: 0x01, 0xc3: 0x02, 0xc4: 0x03, 0xc5: 0x04, 0xc7: 0x05, - 0xc9: 0x06, 0xcb: 0x07, 0xcc: 0x08, 0xcd: 0x09, 0xce: 0x0a, 0xcf: 0x0b, - 0xd0: 0x0c, 0xd1: 0x0d, - 0xe1: 0x02, 0xe2: 0x03, 0xe3: 0x04, 0xe4: 0x05, 0xe5: 0x06, 0xe6: 0x06, 0xe7: 0x06, - 0xe8: 0x06, 0xe9: 0x06, 0xea: 0x07, 0xeb: 0x06, 0xec: 0x06, 0xed: 0x08, 0xee: 0x09, 0xef: 0x0a, - 0xf0: 0x0f, 0xf3: 0x12, 0xf4: 0x13, - // Block 0x4, offset 0x100 - 0x104: 0x0e, 0x105: 0x0f, - // Block 0x5, offset 0x140 - 0x140: 0x10, 0x141: 0x11, 0x142: 0x12, 0x144: 0x13, 0x145: 0x14, 0x146: 0x15, 0x147: 0x16, - 0x148: 0x17, 0x149: 0x18, 0x14a: 0x19, 0x14c: 0x1a, 0x14f: 0x1b, - 0x151: 0x1c, 0x152: 0x08, 0x153: 0x1d, 0x154: 0x1e, 0x155: 0x1f, 0x156: 0x20, 0x157: 0x21, - 0x158: 0x22, 0x159: 0x23, 0x15a: 0x24, 0x15b: 0x25, 0x15c: 0x26, 0x15d: 0x27, 0x15e: 0x28, 0x15f: 0x29, - 0x166: 0x2a, - 0x16c: 0x2b, 0x16d: 0x2c, - 0x17a: 0x2d, 0x17b: 0x2e, 0x17c: 0x0e, 0x17d: 0x0e, 0x17e: 0x0e, 0x17f: 0x2f, - // Block 0x6, offset 0x180 - 0x180: 0x30, 0x181: 0x31, 0x182: 0x32, 0x183: 0x33, 0x184: 0x34, 0x185: 0x35, 0x186: 0x36, 0x187: 0x37, - 0x188: 0x38, 0x189: 0x39, 0x18a: 0x0e, 0x18b: 0x3a, 0x18c: 0x0e, 0x18d: 0x0e, 0x18e: 0x0e, 0x18f: 0x0e, - 0x190: 0x0e, 0x191: 0x0e, 0x192: 0x0e, 0x193: 0x0e, 0x194: 0x0e, 0x195: 0x0e, 0x196: 0x0e, 0x197: 0x0e, - 0x198: 0x0e, 0x199: 0x0e, 0x19a: 0x0e, 0x19b: 0x0e, 0x19c: 0x0e, 0x19d: 0x0e, 0x19e: 0x0e, 0x19f: 0x0e, - 0x1a0: 0x0e, 0x1a1: 0x0e, 0x1a2: 0x0e, 0x1a3: 0x0e, 0x1a4: 0x0e, 0x1a5: 0x0e, 0x1a6: 0x0e, 0x1a7: 0x0e, - 0x1a8: 0x0e, 0x1a9: 0x0e, 0x1aa: 0x0e, 0x1ab: 0x0e, 0x1ac: 0x0e, 0x1ad: 0x0e, 0x1ae: 0x0e, 0x1af: 0x0e, - 0x1b0: 0x0e, 0x1b1: 0x0e, 0x1b2: 0x0e, 0x1b3: 0x0e, 0x1b4: 0x0e, 0x1b5: 0x0e, 0x1b6: 0x0e, 0x1b7: 0x0e, - 0x1b8: 0x0e, 0x1b9: 0x0e, 0x1ba: 0x0e, 0x1bb: 0x0e, 0x1bc: 0x0e, 0x1bd: 0x0e, 0x1be: 0x0e, 0x1bf: 0x0e, - // Block 0x7, offset 0x1c0 - 0x1c0: 0x0e, 0x1c1: 0x0e, 0x1c2: 0x0e, 0x1c3: 0x0e, 0x1c4: 0x0e, 0x1c5: 0x0e, 0x1c6: 0x0e, 0x1c7: 0x0e, - 0x1c8: 0x0e, 0x1c9: 0x0e, 0x1ca: 0x0e, 0x1cb: 0x0e, 0x1cc: 0x0e, 0x1cd: 0x0e, 0x1ce: 0x0e, 0x1cf: 0x0e, - 0x1d0: 0x0e, 0x1d1: 0x0e, 0x1d2: 0x0e, 0x1d3: 0x0e, 0x1d4: 0x0e, 0x1d5: 0x0e, 0x1d6: 0x0e, 0x1d7: 0x0e, - 0x1d8: 0x0e, 0x1d9: 0x0e, 0x1da: 0x0e, 0x1db: 0x0e, 0x1dc: 0x0e, 0x1dd: 0x0e, 0x1de: 0x0e, 0x1df: 0x0e, - 0x1e0: 0x0e, 0x1e1: 0x0e, 0x1e2: 0x0e, 0x1e3: 0x0e, 0x1e4: 0x0e, 0x1e5: 0x0e, 0x1e6: 0x0e, 0x1e7: 0x0e, - 0x1e8: 0x0e, 0x1e9: 0x0e, 0x1ea: 0x0e, 0x1eb: 0x0e, 0x1ec: 0x0e, 0x1ed: 0x0e, 0x1ee: 0x0e, 0x1ef: 0x0e, - 0x1f0: 0x0e, 0x1f1: 0x0e, 0x1f2: 0x0e, 0x1f3: 0x0e, 0x1f4: 0x0e, 0x1f5: 0x0e, 0x1f6: 0x0e, - 0x1f8: 0x0e, 0x1f9: 0x0e, 0x1fa: 0x0e, 0x1fb: 0x0e, 0x1fc: 0x0e, 0x1fd: 0x0e, 0x1fe: 0x0e, 0x1ff: 0x0e, - // Block 0x8, offset 0x200 - 0x200: 0x0e, 0x201: 0x0e, 0x202: 0x0e, 0x203: 0x0e, 0x204: 0x0e, 0x205: 0x0e, 0x206: 0x0e, 0x207: 0x0e, - 0x208: 0x0e, 0x209: 0x0e, 0x20a: 0x0e, 0x20b: 0x0e, 0x20c: 0x0e, 0x20d: 0x0e, 0x20e: 0x0e, 0x20f: 0x0e, - 0x210: 0x0e, 0x211: 0x0e, 0x212: 0x0e, 0x213: 0x0e, 0x214: 0x0e, 0x215: 0x0e, 0x216: 0x0e, 0x217: 0x0e, - 0x218: 0x0e, 0x219: 0x0e, 0x21a: 0x0e, 0x21b: 0x0e, 0x21c: 0x0e, 0x21d: 0x0e, 0x21e: 0x0e, 0x21f: 0x0e, - 0x220: 0x0e, 0x221: 0x0e, 0x222: 0x0e, 0x223: 0x0e, 0x224: 0x0e, 0x225: 0x0e, 0x226: 0x0e, 0x227: 0x0e, - 0x228: 0x0e, 0x229: 0x0e, 0x22a: 0x0e, 0x22b: 0x0e, 0x22c: 0x0e, 0x22d: 0x0e, 0x22e: 0x0e, 0x22f: 0x0e, - 0x230: 0x0e, 0x231: 0x0e, 0x232: 0x0e, 0x233: 0x0e, 0x234: 0x0e, 0x235: 0x0e, 0x236: 0x0e, 0x237: 0x0e, - 0x238: 0x0e, 0x239: 0x0e, 0x23a: 0x0e, 0x23b: 0x0e, 0x23c: 0x0e, 0x23d: 0x0e, 0x23e: 0x0e, 0x23f: 0x0e, - // Block 0x9, offset 0x240 - 0x240: 0x0e, 0x241: 0x0e, 0x242: 0x0e, 0x243: 0x0e, 0x244: 0x0e, 0x245: 0x0e, 0x246: 0x0e, 0x247: 0x0e, - 0x248: 0x0e, 0x249: 0x0e, 0x24a: 0x0e, 0x24b: 0x0e, 0x24c: 0x0e, 0x24d: 0x0e, 0x24e: 0x0e, 0x24f: 0x0e, - 0x250: 0x0e, 0x251: 0x0e, 0x252: 0x3b, 0x253: 0x3c, - 0x265: 0x3d, - 0x270: 0x0e, 0x271: 0x0e, 0x272: 0x0e, 0x273: 0x0e, 0x274: 0x0e, 0x275: 0x0e, 0x276: 0x0e, 0x277: 0x0e, - 0x278: 0x0e, 0x279: 0x0e, 0x27a: 0x0e, 0x27b: 0x0e, 0x27c: 0x0e, 0x27d: 0x0e, 0x27e: 0x0e, 0x27f: 0x0e, - // Block 0xa, offset 0x280 - 0x280: 0x0e, 0x281: 0x0e, 0x282: 0x0e, 0x283: 0x0e, 0x284: 0x0e, 0x285: 0x0e, 0x286: 0x0e, 0x287: 0x0e, - 0x288: 0x0e, 0x289: 0x0e, 0x28a: 0x0e, 0x28b: 0x0e, 0x28c: 0x0e, 0x28d: 0x0e, 0x28e: 0x0e, 0x28f: 0x0e, - 0x290: 0x0e, 0x291: 0x0e, 0x292: 0x0e, 0x293: 0x0e, 0x294: 0x0e, 0x295: 0x0e, 0x296: 0x0e, 0x297: 0x0e, - 0x298: 0x0e, 0x299: 0x0e, 0x29a: 0x0e, 0x29b: 0x0e, 0x29c: 0x0e, 0x29d: 0x0e, 0x29e: 0x3e, - // Block 0xb, offset 0x2c0 - 0x2c0: 0x08, 0x2c1: 0x08, 0x2c2: 0x08, 0x2c3: 0x08, 0x2c4: 0x08, 0x2c5: 0x08, 0x2c6: 0x08, 0x2c7: 0x08, - 0x2c8: 0x08, 0x2c9: 0x08, 0x2ca: 0x08, 0x2cb: 0x08, 0x2cc: 0x08, 0x2cd: 0x08, 0x2ce: 0x08, 0x2cf: 0x08, - 0x2d0: 0x08, 0x2d1: 0x08, 0x2d2: 0x08, 0x2d3: 0x08, 0x2d4: 0x08, 0x2d5: 0x08, 0x2d6: 0x08, 0x2d7: 0x08, - 0x2d8: 0x08, 0x2d9: 0x08, 0x2da: 0x08, 0x2db: 0x08, 0x2dc: 0x08, 0x2dd: 0x08, 0x2de: 0x08, 0x2df: 0x08, - 0x2e0: 0x08, 0x2e1: 0x08, 0x2e2: 0x08, 0x2e3: 0x08, 0x2e4: 0x08, 0x2e5: 0x08, 0x2e6: 0x08, 0x2e7: 0x08, - 0x2e8: 0x08, 0x2e9: 0x08, 0x2ea: 0x08, 0x2eb: 0x08, 0x2ec: 0x08, 0x2ed: 0x08, 0x2ee: 0x08, 0x2ef: 0x08, - 0x2f0: 0x08, 0x2f1: 0x08, 0x2f2: 0x08, 0x2f3: 0x08, 0x2f4: 0x08, 0x2f5: 0x08, 0x2f6: 0x08, 0x2f7: 0x08, - 0x2f8: 0x08, 0x2f9: 0x08, 0x2fa: 0x08, 0x2fb: 0x08, 0x2fc: 0x08, 0x2fd: 0x08, 0x2fe: 0x08, 0x2ff: 0x08, - // Block 0xc, offset 0x300 - 0x300: 0x08, 0x301: 0x08, 0x302: 0x08, 0x303: 0x08, 0x304: 0x08, 0x305: 0x08, 0x306: 0x08, 0x307: 0x08, - 0x308: 0x08, 0x309: 0x08, 0x30a: 0x08, 0x30b: 0x08, 0x30c: 0x08, 0x30d: 0x08, 0x30e: 0x08, 0x30f: 0x08, - 0x310: 0x08, 0x311: 0x08, 0x312: 0x08, 0x313: 0x08, 0x314: 0x08, 0x315: 0x08, 0x316: 0x08, 0x317: 0x08, - 0x318: 0x08, 0x319: 0x08, 0x31a: 0x08, 0x31b: 0x08, 0x31c: 0x08, 0x31d: 0x08, 0x31e: 0x08, 0x31f: 0x08, - 0x320: 0x08, 0x321: 0x08, 0x322: 0x08, 0x323: 0x08, 0x324: 0x0e, 0x325: 0x0e, 0x326: 0x0e, 0x327: 0x0e, - 0x328: 0x0e, 0x329: 0x0e, 0x32a: 0x0e, 0x32b: 0x0e, - 0x338: 0x3f, 0x339: 0x40, 0x33c: 0x41, 0x33d: 0x42, 0x33e: 0x43, 0x33f: 0x44, - // Block 0xd, offset 0x340 - 0x37f: 0x45, - // Block 0xe, offset 0x380 - 0x380: 0x0e, 0x381: 0x0e, 0x382: 0x0e, 0x383: 0x0e, 0x384: 0x0e, 0x385: 0x0e, 0x386: 0x0e, 0x387: 0x0e, - 0x388: 0x0e, 0x389: 0x0e, 0x38a: 0x0e, 0x38b: 0x0e, 0x38c: 0x0e, 0x38d: 0x0e, 0x38e: 0x0e, 0x38f: 0x0e, - 0x390: 0x0e, 0x391: 0x0e, 0x392: 0x0e, 0x393: 0x0e, 0x394: 0x0e, 0x395: 0x0e, 0x396: 0x0e, 0x397: 0x0e, - 0x398: 0x0e, 0x399: 0x0e, 0x39a: 0x0e, 0x39b: 0x0e, 0x39c: 0x0e, 0x39d: 0x0e, 0x39e: 0x0e, 0x39f: 0x46, - 0x3a0: 0x0e, 0x3a1: 0x0e, 0x3a2: 0x0e, 0x3a3: 0x0e, 0x3a4: 0x0e, 0x3a5: 0x0e, 0x3a6: 0x0e, 0x3a7: 0x0e, - 0x3a8: 0x0e, 0x3a9: 0x0e, 0x3aa: 0x0e, 0x3ab: 0x47, - // Block 0xf, offset 0x3c0 - 0x3c0: 0x0e, 0x3c1: 0x0e, 0x3c2: 0x0e, 0x3c3: 0x0e, 0x3c4: 0x48, 0x3c5: 0x49, 0x3c6: 0x0e, 0x3c7: 0x0e, - 0x3c8: 0x0e, 0x3c9: 0x0e, 0x3ca: 0x0e, 0x3cb: 0x4a, - // Block 0x10, offset 0x400 - 0x400: 0x4b, 0x403: 0x4c, 0x404: 0x4d, 0x405: 0x4e, 0x406: 0x4f, - 0x408: 0x50, 0x409: 0x51, 0x40c: 0x52, 0x40d: 0x53, 0x40e: 0x54, 0x40f: 0x55, - 0x410: 0x3a, 0x411: 0x56, 0x412: 0x0e, 0x413: 0x57, 0x414: 0x58, 0x415: 0x59, 0x416: 0x5a, 0x417: 0x5b, - 0x418: 0x0e, 0x419: 0x5c, 0x41a: 0x0e, 0x41b: 0x5d, 0x41f: 0x5e, - 0x424: 0x5f, 0x425: 0x60, 0x426: 0x61, 0x427: 0x62, - 0x429: 0x63, 0x42a: 0x64, - // Block 0x11, offset 0x440 - 0x456: 0x0b, 0x457: 0x06, - 0x458: 0x0c, 0x45b: 0x0d, 0x45f: 0x0e, - 0x460: 0x06, 0x461: 0x06, 0x462: 0x06, 0x463: 0x06, 0x464: 0x06, 0x465: 0x06, 0x466: 0x06, 0x467: 0x06, - 0x468: 0x06, 0x469: 0x06, 0x46a: 0x06, 0x46b: 0x06, 0x46c: 0x06, 0x46d: 0x06, 0x46e: 0x06, 0x46f: 0x06, - 0x470: 0x06, 0x471: 0x06, 0x472: 0x06, 0x473: 0x06, 0x474: 0x06, 0x475: 0x06, 0x476: 0x06, 0x477: 0x06, - 0x478: 0x06, 0x479: 0x06, 0x47a: 0x06, 0x47b: 0x06, 0x47c: 0x06, 0x47d: 0x06, 0x47e: 0x06, 0x47f: 0x06, - // Block 0x12, offset 0x480 - 0x484: 0x08, 0x485: 0x08, 0x486: 0x08, 0x487: 0x09, - // Block 0x13, offset 0x4c0 - 0x4c0: 0x08, 0x4c1: 0x08, 0x4c2: 0x08, 0x4c3: 0x08, 0x4c4: 0x08, 0x4c5: 0x08, 0x4c6: 0x08, 0x4c7: 0x08, - 0x4c8: 0x08, 0x4c9: 0x08, 0x4ca: 0x08, 0x4cb: 0x08, 0x4cc: 0x08, 0x4cd: 0x08, 0x4ce: 0x08, 0x4cf: 0x08, - 0x4d0: 0x08, 0x4d1: 0x08, 0x4d2: 0x08, 0x4d3: 0x08, 0x4d4: 0x08, 0x4d5: 0x08, 0x4d6: 0x08, 0x4d7: 0x08, - 0x4d8: 0x08, 0x4d9: 0x08, 0x4da: 0x08, 0x4db: 0x08, 0x4dc: 0x08, 0x4dd: 0x08, 0x4de: 0x08, 0x4df: 0x08, - 0x4e0: 0x08, 0x4e1: 0x08, 0x4e2: 0x08, 0x4e3: 0x08, 0x4e4: 0x08, 0x4e5: 0x08, 0x4e6: 0x08, 0x4e7: 0x08, - 0x4e8: 0x08, 0x4e9: 0x08, 0x4ea: 0x08, 0x4eb: 0x08, 0x4ec: 0x08, 0x4ed: 0x08, 0x4ee: 0x08, 0x4ef: 0x08, - 0x4f0: 0x08, 0x4f1: 0x08, 0x4f2: 0x08, 0x4f3: 0x08, 0x4f4: 0x08, 0x4f5: 0x08, 0x4f6: 0x08, 0x4f7: 0x08, - 0x4f8: 0x08, 0x4f9: 0x08, 0x4fa: 0x08, 0x4fb: 0x08, 0x4fc: 0x08, 0x4fd: 0x08, 0x4fe: 0x08, 0x4ff: 0x65, - // Block 0x14, offset 0x500 - 0x520: 0x10, - 0x530: 0x09, 0x531: 0x09, 0x532: 0x09, 0x533: 0x09, 0x534: 0x09, 0x535: 0x09, 0x536: 0x09, 0x537: 0x09, - 0x538: 0x09, 0x539: 0x09, 0x53a: 0x09, 0x53b: 0x09, 0x53c: 0x09, 0x53d: 0x09, 0x53e: 0x09, 0x53f: 0x11, - // Block 0x15, offset 0x540 - 0x540: 0x09, 0x541: 0x09, 0x542: 0x09, 0x543: 0x09, 0x544: 0x09, 0x545: 0x09, 0x546: 0x09, 0x547: 0x09, - 0x548: 0x09, 0x549: 0x09, 0x54a: 0x09, 0x54b: 0x09, 0x54c: 0x09, 0x54d: 0x09, 0x54e: 0x09, 0x54f: 0x11, -} - -// inverseData contains 4-byte entries of the following format: -// -// <0 padding> -// -// The last byte of the UTF-8-encoded rune is xor-ed with the last byte of the -// UTF-8 encoding of the original rune. Mappings often have the following -// pattern: -// -// A -> A (U+FF21 -> U+0041) -// B -> B (U+FF22 -> U+0042) -// ... -// -// By xor-ing the last byte the same entry can be shared by many mappings. This -// reduces the total number of distinct entries by about two thirds. -// The resulting entry for the aforementioned mappings is -// -// { 0x01, 0xE0, 0x00, 0x00 } -// -// Using this entry to map U+FF21 (UTF-8 [EF BC A1]), we get -// -// E0 ^ A1 = 41. -// -// Similarly, for U+FF22 (UTF-8 [EF BC A2]), we get -// -// E0 ^ A2 = 42. -// -// Note that because of the xor-ing, the byte sequence stored in the entry is -// not valid UTF-8. -var inverseData = [150][4]byte{ - {0x00, 0x00, 0x00, 0x00}, - {0x03, 0xe3, 0x80, 0xa0}, - {0x03, 0xef, 0xbc, 0xa0}, - {0x03, 0xef, 0xbc, 0xe0}, - {0x03, 0xef, 0xbd, 0xe0}, - {0x03, 0xef, 0xbf, 0x02}, - {0x03, 0xef, 0xbf, 0x00}, - {0x03, 0xef, 0xbf, 0x0e}, - {0x03, 0xef, 0xbf, 0x0c}, - {0x03, 0xef, 0xbf, 0x0f}, - {0x03, 0xef, 0xbf, 0x39}, - {0x03, 0xef, 0xbf, 0x3b}, - {0x03, 0xef, 0xbf, 0x3f}, - {0x03, 0xef, 0xbf, 0x2a}, - {0x03, 0xef, 0xbf, 0x0d}, - {0x03, 0xef, 0xbf, 0x25}, - {0x03, 0xef, 0xbd, 0x1a}, - {0x03, 0xef, 0xbd, 0x26}, - {0x01, 0xa0, 0x00, 0x00}, - {0x03, 0xef, 0xbd, 0x25}, - {0x03, 0xef, 0xbd, 0x23}, - {0x03, 0xef, 0xbd, 0x2e}, - {0x03, 0xef, 0xbe, 0x07}, - {0x03, 0xef, 0xbe, 0x05}, - {0x03, 0xef, 0xbd, 0x06}, - {0x03, 0xef, 0xbd, 0x13}, - {0x03, 0xef, 0xbd, 0x0b}, - {0x03, 0xef, 0xbd, 0x16}, - {0x03, 0xef, 0xbd, 0x0c}, - {0x03, 0xef, 0xbd, 0x15}, - {0x03, 0xef, 0xbd, 0x0d}, - {0x03, 0xef, 0xbd, 0x1c}, - {0x03, 0xef, 0xbd, 0x02}, - {0x03, 0xef, 0xbd, 0x1f}, - {0x03, 0xef, 0xbd, 0x1d}, - {0x03, 0xef, 0xbd, 0x17}, - {0x03, 0xef, 0xbd, 0x08}, - {0x03, 0xef, 0xbd, 0x09}, - {0x03, 0xef, 0xbd, 0x0e}, - {0x03, 0xef, 0xbd, 0x04}, - {0x03, 0xef, 0xbd, 0x05}, - {0x03, 0xef, 0xbe, 0x3f}, - {0x03, 0xef, 0xbe, 0x00}, - {0x03, 0xef, 0xbd, 0x2c}, - {0x03, 0xef, 0xbe, 0x06}, - {0x03, 0xef, 0xbe, 0x0c}, - {0x03, 0xef, 0xbe, 0x0f}, - {0x03, 0xef, 0xbe, 0x0d}, - {0x03, 0xef, 0xbe, 0x0b}, - {0x03, 0xef, 0xbe, 0x19}, - {0x03, 0xef, 0xbe, 0x15}, - {0x03, 0xef, 0xbe, 0x11}, - {0x03, 0xef, 0xbe, 0x31}, - {0x03, 0xef, 0xbe, 0x33}, - {0x03, 0xef, 0xbd, 0x0f}, - {0x03, 0xef, 0xbe, 0x30}, - {0x03, 0xef, 0xbe, 0x3e}, - {0x03, 0xef, 0xbe, 0x32}, - {0x03, 0xef, 0xbe, 0x36}, - {0x03, 0xef, 0xbd, 0x14}, - {0x03, 0xef, 0xbe, 0x2e}, - {0x03, 0xef, 0xbd, 0x1e}, - {0x03, 0xef, 0xbe, 0x10}, - {0x03, 0xef, 0xbf, 0x13}, - {0x03, 0xef, 0xbf, 0x15}, - {0x03, 0xef, 0xbf, 0x17}, - {0x03, 0xef, 0xbf, 0x1f}, - {0x03, 0xef, 0xbf, 0x1d}, - {0x03, 0xef, 0xbf, 0x1b}, - {0x03, 0xef, 0xbf, 0x09}, - {0x03, 0xef, 0xbf, 0x0b}, - {0x03, 0xef, 0xbf, 0x37}, - {0x03, 0xef, 0xbe, 0x04}, - {0x01, 0xe0, 0x00, 0x00}, - {0x03, 0xe2, 0xa6, 0x1a}, - {0x03, 0xe2, 0xa6, 0x26}, - {0x03, 0xe3, 0x80, 0x23}, - {0x03, 0xe3, 0x80, 0x2e}, - {0x03, 0xe3, 0x80, 0x25}, - {0x03, 0xe3, 0x83, 0x1e}, - {0x03, 0xe3, 0x83, 0x14}, - {0x03, 0xe3, 0x82, 0x06}, - {0x03, 0xe3, 0x82, 0x0b}, - {0x03, 0xe3, 0x82, 0x0c}, - {0x03, 0xe3, 0x82, 0x0d}, - {0x03, 0xe3, 0x82, 0x02}, - {0x03, 0xe3, 0x83, 0x0f}, - {0x03, 0xe3, 0x83, 0x08}, - {0x03, 0xe3, 0x83, 0x09}, - {0x03, 0xe3, 0x83, 0x2c}, - {0x03, 0xe3, 0x83, 0x0c}, - {0x03, 0xe3, 0x82, 0x13}, - {0x03, 0xe3, 0x82, 0x16}, - {0x03, 0xe3, 0x82, 0x15}, - {0x03, 0xe3, 0x82, 0x1c}, - {0x03, 0xe3, 0x82, 0x1f}, - {0x03, 0xe3, 0x82, 0x1d}, - {0x03, 0xe3, 0x82, 0x1a}, - {0x03, 0xe3, 0x82, 0x17}, - {0x03, 0xe3, 0x82, 0x08}, - {0x03, 0xe3, 0x82, 0x09}, - {0x03, 0xe3, 0x82, 0x0e}, - {0x03, 0xe3, 0x82, 0x04}, - {0x03, 0xe3, 0x82, 0x05}, - {0x03, 0xe3, 0x82, 0x3f}, - {0x03, 0xe3, 0x83, 0x00}, - {0x03, 0xe3, 0x83, 0x06}, - {0x03, 0xe3, 0x83, 0x05}, - {0x03, 0xe3, 0x83, 0x0d}, - {0x03, 0xe3, 0x83, 0x0b}, - {0x03, 0xe3, 0x83, 0x07}, - {0x03, 0xe3, 0x83, 0x19}, - {0x03, 0xe3, 0x83, 0x15}, - {0x03, 0xe3, 0x83, 0x11}, - {0x03, 0xe3, 0x83, 0x31}, - {0x03, 0xe3, 0x83, 0x33}, - {0x03, 0xe3, 0x83, 0x30}, - {0x03, 0xe3, 0x83, 0x3e}, - {0x03, 0xe3, 0x83, 0x32}, - {0x03, 0xe3, 0x83, 0x36}, - {0x03, 0xe3, 0x83, 0x2e}, - {0x03, 0xe3, 0x82, 0x07}, - {0x03, 0xe3, 0x85, 0x04}, - {0x03, 0xe3, 0x84, 0x10}, - {0x03, 0xe3, 0x85, 0x30}, - {0x03, 0xe3, 0x85, 0x0d}, - {0x03, 0xe3, 0x85, 0x13}, - {0x03, 0xe3, 0x85, 0x15}, - {0x03, 0xe3, 0x85, 0x17}, - {0x03, 0xe3, 0x85, 0x1f}, - {0x03, 0xe3, 0x85, 0x1d}, - {0x03, 0xe3, 0x85, 0x1b}, - {0x03, 0xe3, 0x85, 0x09}, - {0x03, 0xe3, 0x85, 0x0f}, - {0x03, 0xe3, 0x85, 0x0b}, - {0x03, 0xe3, 0x85, 0x37}, - {0x03, 0xe3, 0x85, 0x3b}, - {0x03, 0xe3, 0x85, 0x39}, - {0x03, 0xe3, 0x85, 0x3f}, - {0x02, 0xc2, 0x02, 0x00}, - {0x02, 0xc2, 0x0e, 0x00}, - {0x02, 0xc2, 0x0c, 0x00}, - {0x02, 0xc2, 0x00, 0x00}, - {0x03, 0xe2, 0x82, 0x0f}, - {0x03, 0xe2, 0x94, 0x2a}, - {0x03, 0xe2, 0x86, 0x39}, - {0x03, 0xe2, 0x86, 0x3b}, - {0x03, 0xe2, 0x86, 0x3f}, - {0x03, 0xe2, 0x96, 0x0d}, - {0x03, 0xe2, 0x97, 0x25}, -} - -// Total table size 15320 bytes (14KiB) diff --git a/vendor/golang.org/x/text/width/tables13.0.0.go b/vendor/golang.org/x/text/width/tables13.0.0.go deleted file mode 100644 index 40c169edf..000000000 --- a/vendor/golang.org/x/text/width/tables13.0.0.go +++ /dev/null @@ -1,1361 +0,0 @@ -// Code generated by running "go generate" in golang.org/x/text. DO NOT EDIT. - -//go:build go1.16 && !go1.21 - -package width - -// UnicodeVersion is the Unicode version from which the tables in this package are derived. -const UnicodeVersion = "13.0.0" - -// lookup returns the trie value for the first UTF-8 encoding in s and -// the width in bytes of this encoding. The size will be 0 if s does not -// hold enough bytes to complete the encoding. len(s) must be greater than 0. -func (t *widthTrie) lookup(s []byte) (v uint16, sz int) { - c0 := s[0] - switch { - case c0 < 0x80: // is ASCII - return widthValues[c0], 1 - case c0 < 0xC2: - return 0, 1 // Illegal UTF-8: not a starter, not ASCII. - case c0 < 0xE0: // 2-byte UTF-8 - if len(s) < 2 { - return 0, 0 - } - i := widthIndex[c0] - c1 := s[1] - if c1 < 0x80 || 0xC0 <= c1 { - return 0, 1 // Illegal UTF-8: not a continuation byte. - } - return t.lookupValue(uint32(i), c1), 2 - case c0 < 0xF0: // 3-byte UTF-8 - if len(s) < 3 { - return 0, 0 - } - i := widthIndex[c0] - c1 := s[1] - if c1 < 0x80 || 0xC0 <= c1 { - return 0, 1 // Illegal UTF-8: not a continuation byte. - } - o := uint32(i)<<6 + uint32(c1) - i = widthIndex[o] - c2 := s[2] - if c2 < 0x80 || 0xC0 <= c2 { - return 0, 2 // Illegal UTF-8: not a continuation byte. - } - return t.lookupValue(uint32(i), c2), 3 - case c0 < 0xF8: // 4-byte UTF-8 - if len(s) < 4 { - return 0, 0 - } - i := widthIndex[c0] - c1 := s[1] - if c1 < 0x80 || 0xC0 <= c1 { - return 0, 1 // Illegal UTF-8: not a continuation byte. - } - o := uint32(i)<<6 + uint32(c1) - i = widthIndex[o] - c2 := s[2] - if c2 < 0x80 || 0xC0 <= c2 { - return 0, 2 // Illegal UTF-8: not a continuation byte. - } - o = uint32(i)<<6 + uint32(c2) - i = widthIndex[o] - c3 := s[3] - if c3 < 0x80 || 0xC0 <= c3 { - return 0, 3 // Illegal UTF-8: not a continuation byte. - } - return t.lookupValue(uint32(i), c3), 4 - } - // Illegal rune - return 0, 1 -} - -// lookupUnsafe returns the trie value for the first UTF-8 encoding in s. -// s must start with a full and valid UTF-8 encoded rune. -func (t *widthTrie) lookupUnsafe(s []byte) uint16 { - c0 := s[0] - if c0 < 0x80 { // is ASCII - return widthValues[c0] - } - i := widthIndex[c0] - if c0 < 0xE0 { // 2-byte UTF-8 - return t.lookupValue(uint32(i), s[1]) - } - i = widthIndex[uint32(i)<<6+uint32(s[1])] - if c0 < 0xF0 { // 3-byte UTF-8 - return t.lookupValue(uint32(i), s[2]) - } - i = widthIndex[uint32(i)<<6+uint32(s[2])] - if c0 < 0xF8 { // 4-byte UTF-8 - return t.lookupValue(uint32(i), s[3]) - } - return 0 -} - -// lookupString returns the trie value for the first UTF-8 encoding in s and -// the width in bytes of this encoding. The size will be 0 if s does not -// hold enough bytes to complete the encoding. len(s) must be greater than 0. -func (t *widthTrie) lookupString(s string) (v uint16, sz int) { - c0 := s[0] - switch { - case c0 < 0x80: // is ASCII - return widthValues[c0], 1 - case c0 < 0xC2: - return 0, 1 // Illegal UTF-8: not a starter, not ASCII. - case c0 < 0xE0: // 2-byte UTF-8 - if len(s) < 2 { - return 0, 0 - } - i := widthIndex[c0] - c1 := s[1] - if c1 < 0x80 || 0xC0 <= c1 { - return 0, 1 // Illegal UTF-8: not a continuation byte. - } - return t.lookupValue(uint32(i), c1), 2 - case c0 < 0xF0: // 3-byte UTF-8 - if len(s) < 3 { - return 0, 0 - } - i := widthIndex[c0] - c1 := s[1] - if c1 < 0x80 || 0xC0 <= c1 { - return 0, 1 // Illegal UTF-8: not a continuation byte. - } - o := uint32(i)<<6 + uint32(c1) - i = widthIndex[o] - c2 := s[2] - if c2 < 0x80 || 0xC0 <= c2 { - return 0, 2 // Illegal UTF-8: not a continuation byte. - } - return t.lookupValue(uint32(i), c2), 3 - case c0 < 0xF8: // 4-byte UTF-8 - if len(s) < 4 { - return 0, 0 - } - i := widthIndex[c0] - c1 := s[1] - if c1 < 0x80 || 0xC0 <= c1 { - return 0, 1 // Illegal UTF-8: not a continuation byte. - } - o := uint32(i)<<6 + uint32(c1) - i = widthIndex[o] - c2 := s[2] - if c2 < 0x80 || 0xC0 <= c2 { - return 0, 2 // Illegal UTF-8: not a continuation byte. - } - o = uint32(i)<<6 + uint32(c2) - i = widthIndex[o] - c3 := s[3] - if c3 < 0x80 || 0xC0 <= c3 { - return 0, 3 // Illegal UTF-8: not a continuation byte. - } - return t.lookupValue(uint32(i), c3), 4 - } - // Illegal rune - return 0, 1 -} - -// lookupStringUnsafe returns the trie value for the first UTF-8 encoding in s. -// s must start with a full and valid UTF-8 encoded rune. -func (t *widthTrie) lookupStringUnsafe(s string) uint16 { - c0 := s[0] - if c0 < 0x80 { // is ASCII - return widthValues[c0] - } - i := widthIndex[c0] - if c0 < 0xE0 { // 2-byte UTF-8 - return t.lookupValue(uint32(i), s[1]) - } - i = widthIndex[uint32(i)<<6+uint32(s[1])] - if c0 < 0xF0 { // 3-byte UTF-8 - return t.lookupValue(uint32(i), s[2]) - } - i = widthIndex[uint32(i)<<6+uint32(s[2])] - if c0 < 0xF8 { // 4-byte UTF-8 - return t.lookupValue(uint32(i), s[3]) - } - return 0 -} - -// widthTrie. Total size: 14848 bytes (14.50 KiB). Checksum: 17e24343536472f6. -type widthTrie struct{} - -func newWidthTrie(i int) *widthTrie { - return &widthTrie{} -} - -// lookupValue determines the type of block n and looks up the value for b. -func (t *widthTrie) lookupValue(n uint32, b byte) uint16 { - switch { - default: - return uint16(widthValues[n<<6+uint32(b)]) - } -} - -// widthValues: 105 blocks, 6720 entries, 13440 bytes -// The third block is the zero block. -var widthValues = [6720]uint16{ - // Block 0x0, offset 0x0 - 0x20: 0x6001, 0x21: 0x6002, 0x22: 0x6002, 0x23: 0x6002, - 0x24: 0x6002, 0x25: 0x6002, 0x26: 0x6002, 0x27: 0x6002, 0x28: 0x6002, 0x29: 0x6002, - 0x2a: 0x6002, 0x2b: 0x6002, 0x2c: 0x6002, 0x2d: 0x6002, 0x2e: 0x6002, 0x2f: 0x6002, - 0x30: 0x6002, 0x31: 0x6002, 0x32: 0x6002, 0x33: 0x6002, 0x34: 0x6002, 0x35: 0x6002, - 0x36: 0x6002, 0x37: 0x6002, 0x38: 0x6002, 0x39: 0x6002, 0x3a: 0x6002, 0x3b: 0x6002, - 0x3c: 0x6002, 0x3d: 0x6002, 0x3e: 0x6002, 0x3f: 0x6002, - // Block 0x1, offset 0x40 - 0x40: 0x6003, 0x41: 0x6003, 0x42: 0x6003, 0x43: 0x6003, 0x44: 0x6003, 0x45: 0x6003, - 0x46: 0x6003, 0x47: 0x6003, 0x48: 0x6003, 0x49: 0x6003, 0x4a: 0x6003, 0x4b: 0x6003, - 0x4c: 0x6003, 0x4d: 0x6003, 0x4e: 0x6003, 0x4f: 0x6003, 0x50: 0x6003, 0x51: 0x6003, - 0x52: 0x6003, 0x53: 0x6003, 0x54: 0x6003, 0x55: 0x6003, 0x56: 0x6003, 0x57: 0x6003, - 0x58: 0x6003, 0x59: 0x6003, 0x5a: 0x6003, 0x5b: 0x6003, 0x5c: 0x6003, 0x5d: 0x6003, - 0x5e: 0x6003, 0x5f: 0x6003, 0x60: 0x6004, 0x61: 0x6004, 0x62: 0x6004, 0x63: 0x6004, - 0x64: 0x6004, 0x65: 0x6004, 0x66: 0x6004, 0x67: 0x6004, 0x68: 0x6004, 0x69: 0x6004, - 0x6a: 0x6004, 0x6b: 0x6004, 0x6c: 0x6004, 0x6d: 0x6004, 0x6e: 0x6004, 0x6f: 0x6004, - 0x70: 0x6004, 0x71: 0x6004, 0x72: 0x6004, 0x73: 0x6004, 0x74: 0x6004, 0x75: 0x6004, - 0x76: 0x6004, 0x77: 0x6004, 0x78: 0x6004, 0x79: 0x6004, 0x7a: 0x6004, 0x7b: 0x6004, - 0x7c: 0x6004, 0x7d: 0x6004, 0x7e: 0x6004, - // Block 0x2, offset 0x80 - // Block 0x3, offset 0xc0 - 0xe1: 0x2000, 0xe2: 0x6005, 0xe3: 0x6005, - 0xe4: 0x2000, 0xe5: 0x6006, 0xe6: 0x6005, 0xe7: 0x2000, 0xe8: 0x2000, - 0xea: 0x2000, 0xec: 0x6007, 0xed: 0x2000, 0xee: 0x2000, 0xef: 0x6008, - 0xf0: 0x2000, 0xf1: 0x2000, 0xf2: 0x2000, 0xf3: 0x2000, 0xf4: 0x2000, - 0xf6: 0x2000, 0xf7: 0x2000, 0xf8: 0x2000, 0xf9: 0x2000, 0xfa: 0x2000, - 0xfc: 0x2000, 0xfd: 0x2000, 0xfe: 0x2000, 0xff: 0x2000, - // Block 0x4, offset 0x100 - 0x106: 0x2000, - 0x110: 0x2000, - 0x117: 0x2000, - 0x118: 0x2000, - 0x11e: 0x2000, 0x11f: 0x2000, 0x120: 0x2000, 0x121: 0x2000, - 0x126: 0x2000, 0x128: 0x2000, 0x129: 0x2000, - 0x12a: 0x2000, 0x12c: 0x2000, 0x12d: 0x2000, - 0x130: 0x2000, 0x132: 0x2000, 0x133: 0x2000, - 0x137: 0x2000, 0x138: 0x2000, 0x139: 0x2000, 0x13a: 0x2000, - 0x13c: 0x2000, 0x13e: 0x2000, - // Block 0x5, offset 0x140 - 0x141: 0x2000, - 0x151: 0x2000, - 0x153: 0x2000, - 0x15b: 0x2000, - 0x166: 0x2000, 0x167: 0x2000, - 0x16b: 0x2000, - 0x171: 0x2000, 0x172: 0x2000, 0x173: 0x2000, - 0x178: 0x2000, - 0x17f: 0x2000, - // Block 0x6, offset 0x180 - 0x180: 0x2000, 0x181: 0x2000, 0x182: 0x2000, 0x184: 0x2000, - 0x188: 0x2000, 0x189: 0x2000, 0x18a: 0x2000, 0x18b: 0x2000, - 0x18d: 0x2000, - 0x192: 0x2000, 0x193: 0x2000, - 0x1a6: 0x2000, 0x1a7: 0x2000, - 0x1ab: 0x2000, - // Block 0x7, offset 0x1c0 - 0x1ce: 0x2000, 0x1d0: 0x2000, - 0x1d2: 0x2000, 0x1d4: 0x2000, 0x1d6: 0x2000, - 0x1d8: 0x2000, 0x1da: 0x2000, 0x1dc: 0x2000, - // Block 0x8, offset 0x200 - 0x211: 0x2000, - 0x221: 0x2000, - // Block 0x9, offset 0x240 - 0x244: 0x2000, - 0x247: 0x2000, 0x249: 0x2000, 0x24a: 0x2000, 0x24b: 0x2000, - 0x24d: 0x2000, 0x250: 0x2000, - 0x258: 0x2000, 0x259: 0x2000, 0x25a: 0x2000, 0x25b: 0x2000, 0x25d: 0x2000, - 0x25f: 0x2000, - // Block 0xa, offset 0x280 - 0x280: 0x2000, 0x281: 0x2000, 0x282: 0x2000, 0x283: 0x2000, 0x284: 0x2000, 0x285: 0x2000, - 0x286: 0x2000, 0x287: 0x2000, 0x288: 0x2000, 0x289: 0x2000, 0x28a: 0x2000, 0x28b: 0x2000, - 0x28c: 0x2000, 0x28d: 0x2000, 0x28e: 0x2000, 0x28f: 0x2000, 0x290: 0x2000, 0x291: 0x2000, - 0x292: 0x2000, 0x293: 0x2000, 0x294: 0x2000, 0x295: 0x2000, 0x296: 0x2000, 0x297: 0x2000, - 0x298: 0x2000, 0x299: 0x2000, 0x29a: 0x2000, 0x29b: 0x2000, 0x29c: 0x2000, 0x29d: 0x2000, - 0x29e: 0x2000, 0x29f: 0x2000, 0x2a0: 0x2000, 0x2a1: 0x2000, 0x2a2: 0x2000, 0x2a3: 0x2000, - 0x2a4: 0x2000, 0x2a5: 0x2000, 0x2a6: 0x2000, 0x2a7: 0x2000, 0x2a8: 0x2000, 0x2a9: 0x2000, - 0x2aa: 0x2000, 0x2ab: 0x2000, 0x2ac: 0x2000, 0x2ad: 0x2000, 0x2ae: 0x2000, 0x2af: 0x2000, - 0x2b0: 0x2000, 0x2b1: 0x2000, 0x2b2: 0x2000, 0x2b3: 0x2000, 0x2b4: 0x2000, 0x2b5: 0x2000, - 0x2b6: 0x2000, 0x2b7: 0x2000, 0x2b8: 0x2000, 0x2b9: 0x2000, 0x2ba: 0x2000, 0x2bb: 0x2000, - 0x2bc: 0x2000, 0x2bd: 0x2000, 0x2be: 0x2000, 0x2bf: 0x2000, - // Block 0xb, offset 0x2c0 - 0x2c0: 0x2000, 0x2c1: 0x2000, 0x2c2: 0x2000, 0x2c3: 0x2000, 0x2c4: 0x2000, 0x2c5: 0x2000, - 0x2c6: 0x2000, 0x2c7: 0x2000, 0x2c8: 0x2000, 0x2c9: 0x2000, 0x2ca: 0x2000, 0x2cb: 0x2000, - 0x2cc: 0x2000, 0x2cd: 0x2000, 0x2ce: 0x2000, 0x2cf: 0x2000, 0x2d0: 0x2000, 0x2d1: 0x2000, - 0x2d2: 0x2000, 0x2d3: 0x2000, 0x2d4: 0x2000, 0x2d5: 0x2000, 0x2d6: 0x2000, 0x2d7: 0x2000, - 0x2d8: 0x2000, 0x2d9: 0x2000, 0x2da: 0x2000, 0x2db: 0x2000, 0x2dc: 0x2000, 0x2dd: 0x2000, - 0x2de: 0x2000, 0x2df: 0x2000, 0x2e0: 0x2000, 0x2e1: 0x2000, 0x2e2: 0x2000, 0x2e3: 0x2000, - 0x2e4: 0x2000, 0x2e5: 0x2000, 0x2e6: 0x2000, 0x2e7: 0x2000, 0x2e8: 0x2000, 0x2e9: 0x2000, - 0x2ea: 0x2000, 0x2eb: 0x2000, 0x2ec: 0x2000, 0x2ed: 0x2000, 0x2ee: 0x2000, 0x2ef: 0x2000, - // Block 0xc, offset 0x300 - 0x311: 0x2000, - 0x312: 0x2000, 0x313: 0x2000, 0x314: 0x2000, 0x315: 0x2000, 0x316: 0x2000, 0x317: 0x2000, - 0x318: 0x2000, 0x319: 0x2000, 0x31a: 0x2000, 0x31b: 0x2000, 0x31c: 0x2000, 0x31d: 0x2000, - 0x31e: 0x2000, 0x31f: 0x2000, 0x320: 0x2000, 0x321: 0x2000, 0x323: 0x2000, - 0x324: 0x2000, 0x325: 0x2000, 0x326: 0x2000, 0x327: 0x2000, 0x328: 0x2000, 0x329: 0x2000, - 0x331: 0x2000, 0x332: 0x2000, 0x333: 0x2000, 0x334: 0x2000, 0x335: 0x2000, - 0x336: 0x2000, 0x337: 0x2000, 0x338: 0x2000, 0x339: 0x2000, 0x33a: 0x2000, 0x33b: 0x2000, - 0x33c: 0x2000, 0x33d: 0x2000, 0x33e: 0x2000, 0x33f: 0x2000, - // Block 0xd, offset 0x340 - 0x340: 0x2000, 0x341: 0x2000, 0x343: 0x2000, 0x344: 0x2000, 0x345: 0x2000, - 0x346: 0x2000, 0x347: 0x2000, 0x348: 0x2000, 0x349: 0x2000, - // Block 0xe, offset 0x380 - 0x381: 0x2000, - 0x390: 0x2000, 0x391: 0x2000, - 0x392: 0x2000, 0x393: 0x2000, 0x394: 0x2000, 0x395: 0x2000, 0x396: 0x2000, 0x397: 0x2000, - 0x398: 0x2000, 0x399: 0x2000, 0x39a: 0x2000, 0x39b: 0x2000, 0x39c: 0x2000, 0x39d: 0x2000, - 0x39e: 0x2000, 0x39f: 0x2000, 0x3a0: 0x2000, 0x3a1: 0x2000, 0x3a2: 0x2000, 0x3a3: 0x2000, - 0x3a4: 0x2000, 0x3a5: 0x2000, 0x3a6: 0x2000, 0x3a7: 0x2000, 0x3a8: 0x2000, 0x3a9: 0x2000, - 0x3aa: 0x2000, 0x3ab: 0x2000, 0x3ac: 0x2000, 0x3ad: 0x2000, 0x3ae: 0x2000, 0x3af: 0x2000, - 0x3b0: 0x2000, 0x3b1: 0x2000, 0x3b2: 0x2000, 0x3b3: 0x2000, 0x3b4: 0x2000, 0x3b5: 0x2000, - 0x3b6: 0x2000, 0x3b7: 0x2000, 0x3b8: 0x2000, 0x3b9: 0x2000, 0x3ba: 0x2000, 0x3bb: 0x2000, - 0x3bc: 0x2000, 0x3bd: 0x2000, 0x3be: 0x2000, 0x3bf: 0x2000, - // Block 0xf, offset 0x3c0 - 0x3c0: 0x2000, 0x3c1: 0x2000, 0x3c2: 0x2000, 0x3c3: 0x2000, 0x3c4: 0x2000, 0x3c5: 0x2000, - 0x3c6: 0x2000, 0x3c7: 0x2000, 0x3c8: 0x2000, 0x3c9: 0x2000, 0x3ca: 0x2000, 0x3cb: 0x2000, - 0x3cc: 0x2000, 0x3cd: 0x2000, 0x3ce: 0x2000, 0x3cf: 0x2000, 0x3d1: 0x2000, - // Block 0x10, offset 0x400 - 0x400: 0x4000, 0x401: 0x4000, 0x402: 0x4000, 0x403: 0x4000, 0x404: 0x4000, 0x405: 0x4000, - 0x406: 0x4000, 0x407: 0x4000, 0x408: 0x4000, 0x409: 0x4000, 0x40a: 0x4000, 0x40b: 0x4000, - 0x40c: 0x4000, 0x40d: 0x4000, 0x40e: 0x4000, 0x40f: 0x4000, 0x410: 0x4000, 0x411: 0x4000, - 0x412: 0x4000, 0x413: 0x4000, 0x414: 0x4000, 0x415: 0x4000, 0x416: 0x4000, 0x417: 0x4000, - 0x418: 0x4000, 0x419: 0x4000, 0x41a: 0x4000, 0x41b: 0x4000, 0x41c: 0x4000, 0x41d: 0x4000, - 0x41e: 0x4000, 0x41f: 0x4000, 0x420: 0x4000, 0x421: 0x4000, 0x422: 0x4000, 0x423: 0x4000, - 0x424: 0x4000, 0x425: 0x4000, 0x426: 0x4000, 0x427: 0x4000, 0x428: 0x4000, 0x429: 0x4000, - 0x42a: 0x4000, 0x42b: 0x4000, 0x42c: 0x4000, 0x42d: 0x4000, 0x42e: 0x4000, 0x42f: 0x4000, - 0x430: 0x4000, 0x431: 0x4000, 0x432: 0x4000, 0x433: 0x4000, 0x434: 0x4000, 0x435: 0x4000, - 0x436: 0x4000, 0x437: 0x4000, 0x438: 0x4000, 0x439: 0x4000, 0x43a: 0x4000, 0x43b: 0x4000, - 0x43c: 0x4000, 0x43d: 0x4000, 0x43e: 0x4000, 0x43f: 0x4000, - // Block 0x11, offset 0x440 - 0x440: 0x4000, 0x441: 0x4000, 0x442: 0x4000, 0x443: 0x4000, 0x444: 0x4000, 0x445: 0x4000, - 0x446: 0x4000, 0x447: 0x4000, 0x448: 0x4000, 0x449: 0x4000, 0x44a: 0x4000, 0x44b: 0x4000, - 0x44c: 0x4000, 0x44d: 0x4000, 0x44e: 0x4000, 0x44f: 0x4000, 0x450: 0x4000, 0x451: 0x4000, - 0x452: 0x4000, 0x453: 0x4000, 0x454: 0x4000, 0x455: 0x4000, 0x456: 0x4000, 0x457: 0x4000, - 0x458: 0x4000, 0x459: 0x4000, 0x45a: 0x4000, 0x45b: 0x4000, 0x45c: 0x4000, 0x45d: 0x4000, - 0x45e: 0x4000, 0x45f: 0x4000, - // Block 0x12, offset 0x480 - 0x490: 0x2000, - 0x493: 0x2000, 0x494: 0x2000, 0x495: 0x2000, 0x496: 0x2000, - 0x498: 0x2000, 0x499: 0x2000, 0x49c: 0x2000, 0x49d: 0x2000, - 0x4a0: 0x2000, 0x4a1: 0x2000, 0x4a2: 0x2000, - 0x4a4: 0x2000, 0x4a5: 0x2000, 0x4a6: 0x2000, 0x4a7: 0x2000, - 0x4b0: 0x2000, 0x4b2: 0x2000, 0x4b3: 0x2000, 0x4b5: 0x2000, - 0x4bb: 0x2000, - 0x4be: 0x2000, - // Block 0x13, offset 0x4c0 - 0x4f4: 0x2000, - 0x4ff: 0x2000, - // Block 0x14, offset 0x500 - 0x501: 0x2000, 0x502: 0x2000, 0x503: 0x2000, 0x504: 0x2000, - 0x529: 0xa009, - 0x52c: 0x2000, - // Block 0x15, offset 0x540 - 0x543: 0x2000, 0x545: 0x2000, - 0x549: 0x2000, - 0x553: 0x2000, 0x556: 0x2000, - 0x561: 0x2000, 0x562: 0x2000, - 0x566: 0x2000, - 0x56b: 0x2000, - // Block 0x16, offset 0x580 - 0x593: 0x2000, 0x594: 0x2000, - 0x59b: 0x2000, 0x59c: 0x2000, 0x59d: 0x2000, - 0x59e: 0x2000, 0x5a0: 0x2000, 0x5a1: 0x2000, 0x5a2: 0x2000, 0x5a3: 0x2000, - 0x5a4: 0x2000, 0x5a5: 0x2000, 0x5a6: 0x2000, 0x5a7: 0x2000, 0x5a8: 0x2000, 0x5a9: 0x2000, - 0x5aa: 0x2000, 0x5ab: 0x2000, - 0x5b0: 0x2000, 0x5b1: 0x2000, 0x5b2: 0x2000, 0x5b3: 0x2000, 0x5b4: 0x2000, 0x5b5: 0x2000, - 0x5b6: 0x2000, 0x5b7: 0x2000, 0x5b8: 0x2000, 0x5b9: 0x2000, - // Block 0x17, offset 0x5c0 - 0x5c9: 0x2000, - 0x5d0: 0x200a, 0x5d1: 0x200b, - 0x5d2: 0x200a, 0x5d3: 0x200c, 0x5d4: 0x2000, 0x5d5: 0x2000, 0x5d6: 0x2000, 0x5d7: 0x2000, - 0x5d8: 0x2000, 0x5d9: 0x2000, - 0x5f8: 0x2000, 0x5f9: 0x2000, - // Block 0x18, offset 0x600 - 0x612: 0x2000, 0x614: 0x2000, - 0x627: 0x2000, - // Block 0x19, offset 0x640 - 0x640: 0x2000, 0x642: 0x2000, 0x643: 0x2000, - 0x647: 0x2000, 0x648: 0x2000, 0x64b: 0x2000, - 0x64f: 0x2000, 0x651: 0x2000, - 0x655: 0x2000, - 0x65a: 0x2000, 0x65d: 0x2000, - 0x65e: 0x2000, 0x65f: 0x2000, 0x660: 0x2000, 0x663: 0x2000, - 0x665: 0x2000, 0x667: 0x2000, 0x668: 0x2000, 0x669: 0x2000, - 0x66a: 0x2000, 0x66b: 0x2000, 0x66c: 0x2000, 0x66e: 0x2000, - 0x674: 0x2000, 0x675: 0x2000, - 0x676: 0x2000, 0x677: 0x2000, - 0x67c: 0x2000, 0x67d: 0x2000, - // Block 0x1a, offset 0x680 - 0x688: 0x2000, - 0x68c: 0x2000, - 0x692: 0x2000, - 0x6a0: 0x2000, 0x6a1: 0x2000, - 0x6a4: 0x2000, 0x6a5: 0x2000, 0x6a6: 0x2000, 0x6a7: 0x2000, - 0x6aa: 0x2000, 0x6ab: 0x2000, 0x6ae: 0x2000, 0x6af: 0x2000, - // Block 0x1b, offset 0x6c0 - 0x6c2: 0x2000, 0x6c3: 0x2000, - 0x6c6: 0x2000, 0x6c7: 0x2000, - 0x6d5: 0x2000, - 0x6d9: 0x2000, - 0x6e5: 0x2000, - 0x6ff: 0x2000, - // Block 0x1c, offset 0x700 - 0x712: 0x2000, - 0x71a: 0x4000, 0x71b: 0x4000, - 0x729: 0x4000, - 0x72a: 0x4000, - // Block 0x1d, offset 0x740 - 0x769: 0x4000, - 0x76a: 0x4000, 0x76b: 0x4000, 0x76c: 0x4000, - 0x770: 0x4000, 0x773: 0x4000, - // Block 0x1e, offset 0x780 - 0x7a0: 0x2000, 0x7a1: 0x2000, 0x7a2: 0x2000, 0x7a3: 0x2000, - 0x7a4: 0x2000, 0x7a5: 0x2000, 0x7a6: 0x2000, 0x7a7: 0x2000, 0x7a8: 0x2000, 0x7a9: 0x2000, - 0x7aa: 0x2000, 0x7ab: 0x2000, 0x7ac: 0x2000, 0x7ad: 0x2000, 0x7ae: 0x2000, 0x7af: 0x2000, - 0x7b0: 0x2000, 0x7b1: 0x2000, 0x7b2: 0x2000, 0x7b3: 0x2000, 0x7b4: 0x2000, 0x7b5: 0x2000, - 0x7b6: 0x2000, 0x7b7: 0x2000, 0x7b8: 0x2000, 0x7b9: 0x2000, 0x7ba: 0x2000, 0x7bb: 0x2000, - 0x7bc: 0x2000, 0x7bd: 0x2000, 0x7be: 0x2000, 0x7bf: 0x2000, - // Block 0x1f, offset 0x7c0 - 0x7c0: 0x2000, 0x7c1: 0x2000, 0x7c2: 0x2000, 0x7c3: 0x2000, 0x7c4: 0x2000, 0x7c5: 0x2000, - 0x7c6: 0x2000, 0x7c7: 0x2000, 0x7c8: 0x2000, 0x7c9: 0x2000, 0x7ca: 0x2000, 0x7cb: 0x2000, - 0x7cc: 0x2000, 0x7cd: 0x2000, 0x7ce: 0x2000, 0x7cf: 0x2000, 0x7d0: 0x2000, 0x7d1: 0x2000, - 0x7d2: 0x2000, 0x7d3: 0x2000, 0x7d4: 0x2000, 0x7d5: 0x2000, 0x7d6: 0x2000, 0x7d7: 0x2000, - 0x7d8: 0x2000, 0x7d9: 0x2000, 0x7da: 0x2000, 0x7db: 0x2000, 0x7dc: 0x2000, 0x7dd: 0x2000, - 0x7de: 0x2000, 0x7df: 0x2000, 0x7e0: 0x2000, 0x7e1: 0x2000, 0x7e2: 0x2000, 0x7e3: 0x2000, - 0x7e4: 0x2000, 0x7e5: 0x2000, 0x7e6: 0x2000, 0x7e7: 0x2000, 0x7e8: 0x2000, 0x7e9: 0x2000, - 0x7eb: 0x2000, 0x7ec: 0x2000, 0x7ed: 0x2000, 0x7ee: 0x2000, 0x7ef: 0x2000, - 0x7f0: 0x2000, 0x7f1: 0x2000, 0x7f2: 0x2000, 0x7f3: 0x2000, 0x7f4: 0x2000, 0x7f5: 0x2000, - 0x7f6: 0x2000, 0x7f7: 0x2000, 0x7f8: 0x2000, 0x7f9: 0x2000, 0x7fa: 0x2000, 0x7fb: 0x2000, - 0x7fc: 0x2000, 0x7fd: 0x2000, 0x7fe: 0x2000, 0x7ff: 0x2000, - // Block 0x20, offset 0x800 - 0x800: 0x2000, 0x801: 0x2000, 0x802: 0x200d, 0x803: 0x2000, 0x804: 0x2000, 0x805: 0x2000, - 0x806: 0x2000, 0x807: 0x2000, 0x808: 0x2000, 0x809: 0x2000, 0x80a: 0x2000, 0x80b: 0x2000, - 0x80c: 0x2000, 0x80d: 0x2000, 0x80e: 0x2000, 0x80f: 0x2000, 0x810: 0x2000, 0x811: 0x2000, - 0x812: 0x2000, 0x813: 0x2000, 0x814: 0x2000, 0x815: 0x2000, 0x816: 0x2000, 0x817: 0x2000, - 0x818: 0x2000, 0x819: 0x2000, 0x81a: 0x2000, 0x81b: 0x2000, 0x81c: 0x2000, 0x81d: 0x2000, - 0x81e: 0x2000, 0x81f: 0x2000, 0x820: 0x2000, 0x821: 0x2000, 0x822: 0x2000, 0x823: 0x2000, - 0x824: 0x2000, 0x825: 0x2000, 0x826: 0x2000, 0x827: 0x2000, 0x828: 0x2000, 0x829: 0x2000, - 0x82a: 0x2000, 0x82b: 0x2000, 0x82c: 0x2000, 0x82d: 0x2000, 0x82e: 0x2000, 0x82f: 0x2000, - 0x830: 0x2000, 0x831: 0x2000, 0x832: 0x2000, 0x833: 0x2000, 0x834: 0x2000, 0x835: 0x2000, - 0x836: 0x2000, 0x837: 0x2000, 0x838: 0x2000, 0x839: 0x2000, 0x83a: 0x2000, 0x83b: 0x2000, - 0x83c: 0x2000, 0x83d: 0x2000, 0x83e: 0x2000, 0x83f: 0x2000, - // Block 0x21, offset 0x840 - 0x840: 0x2000, 0x841: 0x2000, 0x842: 0x2000, 0x843: 0x2000, 0x844: 0x2000, 0x845: 0x2000, - 0x846: 0x2000, 0x847: 0x2000, 0x848: 0x2000, 0x849: 0x2000, 0x84a: 0x2000, 0x84b: 0x2000, - 0x850: 0x2000, 0x851: 0x2000, - 0x852: 0x2000, 0x853: 0x2000, 0x854: 0x2000, 0x855: 0x2000, 0x856: 0x2000, 0x857: 0x2000, - 0x858: 0x2000, 0x859: 0x2000, 0x85a: 0x2000, 0x85b: 0x2000, 0x85c: 0x2000, 0x85d: 0x2000, - 0x85e: 0x2000, 0x85f: 0x2000, 0x860: 0x2000, 0x861: 0x2000, 0x862: 0x2000, 0x863: 0x2000, - 0x864: 0x2000, 0x865: 0x2000, 0x866: 0x2000, 0x867: 0x2000, 0x868: 0x2000, 0x869: 0x2000, - 0x86a: 0x2000, 0x86b: 0x2000, 0x86c: 0x2000, 0x86d: 0x2000, 0x86e: 0x2000, 0x86f: 0x2000, - 0x870: 0x2000, 0x871: 0x2000, 0x872: 0x2000, 0x873: 0x2000, - // Block 0x22, offset 0x880 - 0x880: 0x2000, 0x881: 0x2000, 0x882: 0x2000, 0x883: 0x2000, 0x884: 0x2000, 0x885: 0x2000, - 0x886: 0x2000, 0x887: 0x2000, 0x888: 0x2000, 0x889: 0x2000, 0x88a: 0x2000, 0x88b: 0x2000, - 0x88c: 0x2000, 0x88d: 0x2000, 0x88e: 0x2000, 0x88f: 0x2000, - 0x892: 0x2000, 0x893: 0x2000, 0x894: 0x2000, 0x895: 0x2000, - 0x8a0: 0x200e, 0x8a1: 0x2000, 0x8a3: 0x2000, - 0x8a4: 0x2000, 0x8a5: 0x2000, 0x8a6: 0x2000, 0x8a7: 0x2000, 0x8a8: 0x2000, 0x8a9: 0x2000, - 0x8b2: 0x2000, 0x8b3: 0x2000, - 0x8b6: 0x2000, 0x8b7: 0x2000, - 0x8bc: 0x2000, 0x8bd: 0x2000, - // Block 0x23, offset 0x8c0 - 0x8c0: 0x2000, 0x8c1: 0x2000, - 0x8c6: 0x2000, 0x8c7: 0x2000, 0x8c8: 0x2000, 0x8cb: 0x200f, - 0x8ce: 0x2000, 0x8cf: 0x2000, 0x8d0: 0x2000, 0x8d1: 0x2000, - 0x8e2: 0x2000, 0x8e3: 0x2000, - 0x8e4: 0x2000, 0x8e5: 0x2000, - 0x8ef: 0x2000, - 0x8fd: 0x4000, 0x8fe: 0x4000, - // Block 0x24, offset 0x900 - 0x905: 0x2000, - 0x906: 0x2000, 0x909: 0x2000, - 0x90e: 0x2000, 0x90f: 0x2000, - 0x914: 0x4000, 0x915: 0x4000, - 0x91c: 0x2000, - 0x91e: 0x2000, - // Block 0x25, offset 0x940 - 0x940: 0x2000, 0x942: 0x2000, - 0x948: 0x4000, 0x949: 0x4000, 0x94a: 0x4000, 0x94b: 0x4000, - 0x94c: 0x4000, 0x94d: 0x4000, 0x94e: 0x4000, 0x94f: 0x4000, 0x950: 0x4000, 0x951: 0x4000, - 0x952: 0x4000, 0x953: 0x4000, - 0x960: 0x2000, 0x961: 0x2000, 0x963: 0x2000, - 0x964: 0x2000, 0x965: 0x2000, 0x967: 0x2000, 0x968: 0x2000, 0x969: 0x2000, - 0x96a: 0x2000, 0x96c: 0x2000, 0x96d: 0x2000, 0x96f: 0x2000, - 0x97f: 0x4000, - // Block 0x26, offset 0x980 - 0x993: 0x4000, - 0x99e: 0x2000, 0x99f: 0x2000, 0x9a1: 0x4000, - 0x9aa: 0x4000, 0x9ab: 0x4000, - 0x9bd: 0x4000, 0x9be: 0x4000, 0x9bf: 0x2000, - // Block 0x27, offset 0x9c0 - 0x9c4: 0x4000, 0x9c5: 0x4000, - 0x9c6: 0x2000, 0x9c7: 0x2000, 0x9c8: 0x2000, 0x9c9: 0x2000, 0x9ca: 0x2000, 0x9cb: 0x2000, - 0x9cc: 0x2000, 0x9cd: 0x2000, 0x9ce: 0x4000, 0x9cf: 0x2000, 0x9d0: 0x2000, 0x9d1: 0x2000, - 0x9d2: 0x2000, 0x9d3: 0x2000, 0x9d4: 0x4000, 0x9d5: 0x2000, 0x9d6: 0x2000, 0x9d7: 0x2000, - 0x9d8: 0x2000, 0x9d9: 0x2000, 0x9da: 0x2000, 0x9db: 0x2000, 0x9dc: 0x2000, 0x9dd: 0x2000, - 0x9de: 0x2000, 0x9df: 0x2000, 0x9e0: 0x2000, 0x9e1: 0x2000, 0x9e3: 0x2000, - 0x9e8: 0x2000, 0x9e9: 0x2000, - 0x9ea: 0x4000, 0x9eb: 0x2000, 0x9ec: 0x2000, 0x9ed: 0x2000, 0x9ee: 0x2000, 0x9ef: 0x2000, - 0x9f0: 0x2000, 0x9f1: 0x2000, 0x9f2: 0x4000, 0x9f3: 0x4000, 0x9f4: 0x2000, 0x9f5: 0x4000, - 0x9f6: 0x2000, 0x9f7: 0x2000, 0x9f8: 0x2000, 0x9f9: 0x2000, 0x9fa: 0x4000, 0x9fb: 0x2000, - 0x9fc: 0x2000, 0x9fd: 0x4000, 0x9fe: 0x2000, 0x9ff: 0x2000, - // Block 0x28, offset 0xa00 - 0xa05: 0x4000, - 0xa0a: 0x4000, 0xa0b: 0x4000, - 0xa28: 0x4000, - 0xa3d: 0x2000, - // Block 0x29, offset 0xa40 - 0xa4c: 0x4000, 0xa4e: 0x4000, - 0xa53: 0x4000, 0xa54: 0x4000, 0xa55: 0x4000, 0xa57: 0x4000, - 0xa76: 0x2000, 0xa77: 0x2000, 0xa78: 0x2000, 0xa79: 0x2000, 0xa7a: 0x2000, 0xa7b: 0x2000, - 0xa7c: 0x2000, 0xa7d: 0x2000, 0xa7e: 0x2000, 0xa7f: 0x2000, - // Block 0x2a, offset 0xa80 - 0xa95: 0x4000, 0xa96: 0x4000, 0xa97: 0x4000, - 0xab0: 0x4000, - 0xabf: 0x4000, - // Block 0x2b, offset 0xac0 - 0xae6: 0x6000, 0xae7: 0x6000, 0xae8: 0x6000, 0xae9: 0x6000, - 0xaea: 0x6000, 0xaeb: 0x6000, 0xaec: 0x6000, 0xaed: 0x6000, - // Block 0x2c, offset 0xb00 - 0xb05: 0x6010, - 0xb06: 0x6011, - // Block 0x2d, offset 0xb40 - 0xb5b: 0x4000, 0xb5c: 0x4000, - // Block 0x2e, offset 0xb80 - 0xb90: 0x4000, - 0xb95: 0x4000, 0xb96: 0x2000, 0xb97: 0x2000, - 0xb98: 0x2000, 0xb99: 0x2000, - // Block 0x2f, offset 0xbc0 - 0xbc0: 0x4000, 0xbc1: 0x4000, 0xbc2: 0x4000, 0xbc3: 0x4000, 0xbc4: 0x4000, 0xbc5: 0x4000, - 0xbc6: 0x4000, 0xbc7: 0x4000, 0xbc8: 0x4000, 0xbc9: 0x4000, 0xbca: 0x4000, 0xbcb: 0x4000, - 0xbcc: 0x4000, 0xbcd: 0x4000, 0xbce: 0x4000, 0xbcf: 0x4000, 0xbd0: 0x4000, 0xbd1: 0x4000, - 0xbd2: 0x4000, 0xbd3: 0x4000, 0xbd4: 0x4000, 0xbd5: 0x4000, 0xbd6: 0x4000, 0xbd7: 0x4000, - 0xbd8: 0x4000, 0xbd9: 0x4000, 0xbdb: 0x4000, 0xbdc: 0x4000, 0xbdd: 0x4000, - 0xbde: 0x4000, 0xbdf: 0x4000, 0xbe0: 0x4000, 0xbe1: 0x4000, 0xbe2: 0x4000, 0xbe3: 0x4000, - 0xbe4: 0x4000, 0xbe5: 0x4000, 0xbe6: 0x4000, 0xbe7: 0x4000, 0xbe8: 0x4000, 0xbe9: 0x4000, - 0xbea: 0x4000, 0xbeb: 0x4000, 0xbec: 0x4000, 0xbed: 0x4000, 0xbee: 0x4000, 0xbef: 0x4000, - 0xbf0: 0x4000, 0xbf1: 0x4000, 0xbf2: 0x4000, 0xbf3: 0x4000, 0xbf4: 0x4000, 0xbf5: 0x4000, - 0xbf6: 0x4000, 0xbf7: 0x4000, 0xbf8: 0x4000, 0xbf9: 0x4000, 0xbfa: 0x4000, 0xbfb: 0x4000, - 0xbfc: 0x4000, 0xbfd: 0x4000, 0xbfe: 0x4000, 0xbff: 0x4000, - // Block 0x30, offset 0xc00 - 0xc00: 0x4000, 0xc01: 0x4000, 0xc02: 0x4000, 0xc03: 0x4000, 0xc04: 0x4000, 0xc05: 0x4000, - 0xc06: 0x4000, 0xc07: 0x4000, 0xc08: 0x4000, 0xc09: 0x4000, 0xc0a: 0x4000, 0xc0b: 0x4000, - 0xc0c: 0x4000, 0xc0d: 0x4000, 0xc0e: 0x4000, 0xc0f: 0x4000, 0xc10: 0x4000, 0xc11: 0x4000, - 0xc12: 0x4000, 0xc13: 0x4000, 0xc14: 0x4000, 0xc15: 0x4000, 0xc16: 0x4000, 0xc17: 0x4000, - 0xc18: 0x4000, 0xc19: 0x4000, 0xc1a: 0x4000, 0xc1b: 0x4000, 0xc1c: 0x4000, 0xc1d: 0x4000, - 0xc1e: 0x4000, 0xc1f: 0x4000, 0xc20: 0x4000, 0xc21: 0x4000, 0xc22: 0x4000, 0xc23: 0x4000, - 0xc24: 0x4000, 0xc25: 0x4000, 0xc26: 0x4000, 0xc27: 0x4000, 0xc28: 0x4000, 0xc29: 0x4000, - 0xc2a: 0x4000, 0xc2b: 0x4000, 0xc2c: 0x4000, 0xc2d: 0x4000, 0xc2e: 0x4000, 0xc2f: 0x4000, - 0xc30: 0x4000, 0xc31: 0x4000, 0xc32: 0x4000, 0xc33: 0x4000, - // Block 0x31, offset 0xc40 - 0xc40: 0x4000, 0xc41: 0x4000, 0xc42: 0x4000, 0xc43: 0x4000, 0xc44: 0x4000, 0xc45: 0x4000, - 0xc46: 0x4000, 0xc47: 0x4000, 0xc48: 0x4000, 0xc49: 0x4000, 0xc4a: 0x4000, 0xc4b: 0x4000, - 0xc4c: 0x4000, 0xc4d: 0x4000, 0xc4e: 0x4000, 0xc4f: 0x4000, 0xc50: 0x4000, 0xc51: 0x4000, - 0xc52: 0x4000, 0xc53: 0x4000, 0xc54: 0x4000, 0xc55: 0x4000, - 0xc70: 0x4000, 0xc71: 0x4000, 0xc72: 0x4000, 0xc73: 0x4000, 0xc74: 0x4000, 0xc75: 0x4000, - 0xc76: 0x4000, 0xc77: 0x4000, 0xc78: 0x4000, 0xc79: 0x4000, 0xc7a: 0x4000, 0xc7b: 0x4000, - // Block 0x32, offset 0xc80 - 0xc80: 0x9012, 0xc81: 0x4013, 0xc82: 0x4014, 0xc83: 0x4000, 0xc84: 0x4000, 0xc85: 0x4000, - 0xc86: 0x4000, 0xc87: 0x4000, 0xc88: 0x4000, 0xc89: 0x4000, 0xc8a: 0x4000, 0xc8b: 0x4000, - 0xc8c: 0x4015, 0xc8d: 0x4015, 0xc8e: 0x4000, 0xc8f: 0x4000, 0xc90: 0x4000, 0xc91: 0x4000, - 0xc92: 0x4000, 0xc93: 0x4000, 0xc94: 0x4000, 0xc95: 0x4000, 0xc96: 0x4000, 0xc97: 0x4000, - 0xc98: 0x4000, 0xc99: 0x4000, 0xc9a: 0x4000, 0xc9b: 0x4000, 0xc9c: 0x4000, 0xc9d: 0x4000, - 0xc9e: 0x4000, 0xc9f: 0x4000, 0xca0: 0x4000, 0xca1: 0x4000, 0xca2: 0x4000, 0xca3: 0x4000, - 0xca4: 0x4000, 0xca5: 0x4000, 0xca6: 0x4000, 0xca7: 0x4000, 0xca8: 0x4000, 0xca9: 0x4000, - 0xcaa: 0x4000, 0xcab: 0x4000, 0xcac: 0x4000, 0xcad: 0x4000, 0xcae: 0x4000, 0xcaf: 0x4000, - 0xcb0: 0x4000, 0xcb1: 0x4000, 0xcb2: 0x4000, 0xcb3: 0x4000, 0xcb4: 0x4000, 0xcb5: 0x4000, - 0xcb6: 0x4000, 0xcb7: 0x4000, 0xcb8: 0x4000, 0xcb9: 0x4000, 0xcba: 0x4000, 0xcbb: 0x4000, - 0xcbc: 0x4000, 0xcbd: 0x4000, 0xcbe: 0x4000, - // Block 0x33, offset 0xcc0 - 0xcc1: 0x4000, 0xcc2: 0x4000, 0xcc3: 0x4000, 0xcc4: 0x4000, 0xcc5: 0x4000, - 0xcc6: 0x4000, 0xcc7: 0x4000, 0xcc8: 0x4000, 0xcc9: 0x4000, 0xcca: 0x4000, 0xccb: 0x4000, - 0xccc: 0x4000, 0xccd: 0x4000, 0xcce: 0x4000, 0xccf: 0x4000, 0xcd0: 0x4000, 0xcd1: 0x4000, - 0xcd2: 0x4000, 0xcd3: 0x4000, 0xcd4: 0x4000, 0xcd5: 0x4000, 0xcd6: 0x4000, 0xcd7: 0x4000, - 0xcd8: 0x4000, 0xcd9: 0x4000, 0xcda: 0x4000, 0xcdb: 0x4000, 0xcdc: 0x4000, 0xcdd: 0x4000, - 0xcde: 0x4000, 0xcdf: 0x4000, 0xce0: 0x4000, 0xce1: 0x4000, 0xce2: 0x4000, 0xce3: 0x4000, - 0xce4: 0x4000, 0xce5: 0x4000, 0xce6: 0x4000, 0xce7: 0x4000, 0xce8: 0x4000, 0xce9: 0x4000, - 0xcea: 0x4000, 0xceb: 0x4000, 0xcec: 0x4000, 0xced: 0x4000, 0xcee: 0x4000, 0xcef: 0x4000, - 0xcf0: 0x4000, 0xcf1: 0x4000, 0xcf2: 0x4000, 0xcf3: 0x4000, 0xcf4: 0x4000, 0xcf5: 0x4000, - 0xcf6: 0x4000, 0xcf7: 0x4000, 0xcf8: 0x4000, 0xcf9: 0x4000, 0xcfa: 0x4000, 0xcfb: 0x4000, - 0xcfc: 0x4000, 0xcfd: 0x4000, 0xcfe: 0x4000, 0xcff: 0x4000, - // Block 0x34, offset 0xd00 - 0xd00: 0x4000, 0xd01: 0x4000, 0xd02: 0x4000, 0xd03: 0x4000, 0xd04: 0x4000, 0xd05: 0x4000, - 0xd06: 0x4000, 0xd07: 0x4000, 0xd08: 0x4000, 0xd09: 0x4000, 0xd0a: 0x4000, 0xd0b: 0x4000, - 0xd0c: 0x4000, 0xd0d: 0x4000, 0xd0e: 0x4000, 0xd0f: 0x4000, 0xd10: 0x4000, 0xd11: 0x4000, - 0xd12: 0x4000, 0xd13: 0x4000, 0xd14: 0x4000, 0xd15: 0x4000, 0xd16: 0x4000, - 0xd19: 0x4016, 0xd1a: 0x4017, 0xd1b: 0x4000, 0xd1c: 0x4000, 0xd1d: 0x4000, - 0xd1e: 0x4000, 0xd1f: 0x4000, 0xd20: 0x4000, 0xd21: 0x4018, 0xd22: 0x4019, 0xd23: 0x401a, - 0xd24: 0x401b, 0xd25: 0x401c, 0xd26: 0x401d, 0xd27: 0x401e, 0xd28: 0x401f, 0xd29: 0x4020, - 0xd2a: 0x4021, 0xd2b: 0x4022, 0xd2c: 0x4000, 0xd2d: 0x4010, 0xd2e: 0x4000, 0xd2f: 0x4023, - 0xd30: 0x4000, 0xd31: 0x4024, 0xd32: 0x4000, 0xd33: 0x4025, 0xd34: 0x4000, 0xd35: 0x4026, - 0xd36: 0x4000, 0xd37: 0x401a, 0xd38: 0x4000, 0xd39: 0x4027, 0xd3a: 0x4000, 0xd3b: 0x4028, - 0xd3c: 0x4000, 0xd3d: 0x4020, 0xd3e: 0x4000, 0xd3f: 0x4029, - // Block 0x35, offset 0xd40 - 0xd40: 0x4000, 0xd41: 0x402a, 0xd42: 0x4000, 0xd43: 0x402b, 0xd44: 0x402c, 0xd45: 0x4000, - 0xd46: 0x4017, 0xd47: 0x4000, 0xd48: 0x402d, 0xd49: 0x4000, 0xd4a: 0x402e, 0xd4b: 0x402f, - 0xd4c: 0x4030, 0xd4d: 0x4017, 0xd4e: 0x4016, 0xd4f: 0x4017, 0xd50: 0x4000, 0xd51: 0x4000, - 0xd52: 0x4031, 0xd53: 0x4000, 0xd54: 0x4000, 0xd55: 0x4031, 0xd56: 0x4000, 0xd57: 0x4000, - 0xd58: 0x4032, 0xd59: 0x4000, 0xd5a: 0x4000, 0xd5b: 0x4032, 0xd5c: 0x4000, 0xd5d: 0x4000, - 0xd5e: 0x4033, 0xd5f: 0x402e, 0xd60: 0x4034, 0xd61: 0x4035, 0xd62: 0x4034, 0xd63: 0x4036, - 0xd64: 0x4037, 0xd65: 0x4024, 0xd66: 0x4035, 0xd67: 0x4025, 0xd68: 0x4038, 0xd69: 0x4038, - 0xd6a: 0x4039, 0xd6b: 0x4039, 0xd6c: 0x403a, 0xd6d: 0x403a, 0xd6e: 0x4000, 0xd6f: 0x4035, - 0xd70: 0x4000, 0xd71: 0x4000, 0xd72: 0x403b, 0xd73: 0x403c, 0xd74: 0x4000, 0xd75: 0x4000, - 0xd76: 0x4000, 0xd77: 0x4000, 0xd78: 0x4000, 0xd79: 0x4000, 0xd7a: 0x4000, 0xd7b: 0x403d, - 0xd7c: 0x401c, 0xd7d: 0x4000, 0xd7e: 0x4000, 0xd7f: 0x4000, - // Block 0x36, offset 0xd80 - 0xd85: 0x4000, - 0xd86: 0x4000, 0xd87: 0x4000, 0xd88: 0x4000, 0xd89: 0x4000, 0xd8a: 0x4000, 0xd8b: 0x4000, - 0xd8c: 0x4000, 0xd8d: 0x4000, 0xd8e: 0x4000, 0xd8f: 0x4000, 0xd90: 0x4000, 0xd91: 0x4000, - 0xd92: 0x4000, 0xd93: 0x4000, 0xd94: 0x4000, 0xd95: 0x4000, 0xd96: 0x4000, 0xd97: 0x4000, - 0xd98: 0x4000, 0xd99: 0x4000, 0xd9a: 0x4000, 0xd9b: 0x4000, 0xd9c: 0x4000, 0xd9d: 0x4000, - 0xd9e: 0x4000, 0xd9f: 0x4000, 0xda0: 0x4000, 0xda1: 0x4000, 0xda2: 0x4000, 0xda3: 0x4000, - 0xda4: 0x4000, 0xda5: 0x4000, 0xda6: 0x4000, 0xda7: 0x4000, 0xda8: 0x4000, 0xda9: 0x4000, - 0xdaa: 0x4000, 0xdab: 0x4000, 0xdac: 0x4000, 0xdad: 0x4000, 0xdae: 0x4000, 0xdaf: 0x4000, - 0xdb1: 0x403e, 0xdb2: 0x403e, 0xdb3: 0x403e, 0xdb4: 0x403e, 0xdb5: 0x403e, - 0xdb6: 0x403e, 0xdb7: 0x403e, 0xdb8: 0x403e, 0xdb9: 0x403e, 0xdba: 0x403e, 0xdbb: 0x403e, - 0xdbc: 0x403e, 0xdbd: 0x403e, 0xdbe: 0x403e, 0xdbf: 0x403e, - // Block 0x37, offset 0xdc0 - 0xdc0: 0x4037, 0xdc1: 0x4037, 0xdc2: 0x4037, 0xdc3: 0x4037, 0xdc4: 0x4037, 0xdc5: 0x4037, - 0xdc6: 0x4037, 0xdc7: 0x4037, 0xdc8: 0x4037, 0xdc9: 0x4037, 0xdca: 0x4037, 0xdcb: 0x4037, - 0xdcc: 0x4037, 0xdcd: 0x4037, 0xdce: 0x4037, 0xdcf: 0x400e, 0xdd0: 0x403f, 0xdd1: 0x4040, - 0xdd2: 0x4041, 0xdd3: 0x4040, 0xdd4: 0x403f, 0xdd5: 0x4042, 0xdd6: 0x4043, 0xdd7: 0x4044, - 0xdd8: 0x4040, 0xdd9: 0x4041, 0xdda: 0x4040, 0xddb: 0x4045, 0xddc: 0x4009, 0xddd: 0x4045, - 0xdde: 0x4046, 0xddf: 0x4045, 0xde0: 0x4047, 0xde1: 0x400b, 0xde2: 0x400a, 0xde3: 0x400c, - 0xde4: 0x4048, 0xde5: 0x4000, 0xde6: 0x4000, 0xde7: 0x4000, 0xde8: 0x4000, 0xde9: 0x4000, - 0xdea: 0x4000, 0xdeb: 0x4000, 0xdec: 0x4000, 0xded: 0x4000, 0xdee: 0x4000, 0xdef: 0x4000, - 0xdf0: 0x4000, 0xdf1: 0x4000, 0xdf2: 0x4000, 0xdf3: 0x4000, 0xdf4: 0x4000, 0xdf5: 0x4000, - 0xdf6: 0x4000, 0xdf7: 0x4000, 0xdf8: 0x4000, 0xdf9: 0x4000, 0xdfa: 0x4000, 0xdfb: 0x4000, - 0xdfc: 0x4000, 0xdfd: 0x4000, 0xdfe: 0x4000, 0xdff: 0x4000, - // Block 0x38, offset 0xe00 - 0xe00: 0x4000, 0xe01: 0x4000, 0xe02: 0x4000, 0xe03: 0x4000, 0xe04: 0x4000, 0xe05: 0x4000, - 0xe06: 0x4000, 0xe07: 0x4000, 0xe08: 0x4000, 0xe09: 0x4000, 0xe0a: 0x4000, 0xe0b: 0x4000, - 0xe0c: 0x4000, 0xe0d: 0x4000, 0xe0e: 0x4000, 0xe10: 0x4000, 0xe11: 0x4000, - 0xe12: 0x4000, 0xe13: 0x4000, 0xe14: 0x4000, 0xe15: 0x4000, 0xe16: 0x4000, 0xe17: 0x4000, - 0xe18: 0x4000, 0xe19: 0x4000, 0xe1a: 0x4000, 0xe1b: 0x4000, 0xe1c: 0x4000, 0xe1d: 0x4000, - 0xe1e: 0x4000, 0xe1f: 0x4000, 0xe20: 0x4000, 0xe21: 0x4000, 0xe22: 0x4000, 0xe23: 0x4000, - 0xe24: 0x4000, 0xe25: 0x4000, 0xe26: 0x4000, 0xe27: 0x4000, 0xe28: 0x4000, 0xe29: 0x4000, - 0xe2a: 0x4000, 0xe2b: 0x4000, 0xe2c: 0x4000, 0xe2d: 0x4000, 0xe2e: 0x4000, 0xe2f: 0x4000, - 0xe30: 0x4000, 0xe31: 0x4000, 0xe32: 0x4000, 0xe33: 0x4000, 0xe34: 0x4000, 0xe35: 0x4000, - 0xe36: 0x4000, 0xe37: 0x4000, 0xe38: 0x4000, 0xe39: 0x4000, 0xe3a: 0x4000, 0xe3b: 0x4000, - 0xe3c: 0x4000, 0xe3d: 0x4000, 0xe3e: 0x4000, 0xe3f: 0x4000, - // Block 0x39, offset 0xe40 - 0xe40: 0x4000, 0xe41: 0x4000, 0xe42: 0x4000, 0xe43: 0x4000, 0xe44: 0x4000, 0xe45: 0x4000, - 0xe46: 0x4000, 0xe47: 0x4000, 0xe48: 0x4000, 0xe49: 0x4000, 0xe4a: 0x4000, 0xe4b: 0x4000, - 0xe4c: 0x4000, 0xe4d: 0x4000, 0xe4e: 0x4000, 0xe4f: 0x4000, 0xe50: 0x4000, 0xe51: 0x4000, - 0xe52: 0x4000, 0xe53: 0x4000, 0xe54: 0x4000, 0xe55: 0x4000, 0xe56: 0x4000, 0xe57: 0x4000, - 0xe58: 0x4000, 0xe59: 0x4000, 0xe5a: 0x4000, 0xe5b: 0x4000, 0xe5c: 0x4000, 0xe5d: 0x4000, - 0xe5e: 0x4000, 0xe5f: 0x4000, 0xe60: 0x4000, 0xe61: 0x4000, 0xe62: 0x4000, 0xe63: 0x4000, - 0xe70: 0x4000, 0xe71: 0x4000, 0xe72: 0x4000, 0xe73: 0x4000, 0xe74: 0x4000, 0xe75: 0x4000, - 0xe76: 0x4000, 0xe77: 0x4000, 0xe78: 0x4000, 0xe79: 0x4000, 0xe7a: 0x4000, 0xe7b: 0x4000, - 0xe7c: 0x4000, 0xe7d: 0x4000, 0xe7e: 0x4000, 0xe7f: 0x4000, - // Block 0x3a, offset 0xe80 - 0xe80: 0x4000, 0xe81: 0x4000, 0xe82: 0x4000, 0xe83: 0x4000, 0xe84: 0x4000, 0xe85: 0x4000, - 0xe86: 0x4000, 0xe87: 0x4000, 0xe88: 0x4000, 0xe89: 0x4000, 0xe8a: 0x4000, 0xe8b: 0x4000, - 0xe8c: 0x4000, 0xe8d: 0x4000, 0xe8e: 0x4000, 0xe8f: 0x4000, 0xe90: 0x4000, 0xe91: 0x4000, - 0xe92: 0x4000, 0xe93: 0x4000, 0xe94: 0x4000, 0xe95: 0x4000, 0xe96: 0x4000, 0xe97: 0x4000, - 0xe98: 0x4000, 0xe99: 0x4000, 0xe9a: 0x4000, 0xe9b: 0x4000, 0xe9c: 0x4000, 0xe9d: 0x4000, - 0xe9e: 0x4000, 0xea0: 0x4000, 0xea1: 0x4000, 0xea2: 0x4000, 0xea3: 0x4000, - 0xea4: 0x4000, 0xea5: 0x4000, 0xea6: 0x4000, 0xea7: 0x4000, 0xea8: 0x4000, 0xea9: 0x4000, - 0xeaa: 0x4000, 0xeab: 0x4000, 0xeac: 0x4000, 0xead: 0x4000, 0xeae: 0x4000, 0xeaf: 0x4000, - 0xeb0: 0x4000, 0xeb1: 0x4000, 0xeb2: 0x4000, 0xeb3: 0x4000, 0xeb4: 0x4000, 0xeb5: 0x4000, - 0xeb6: 0x4000, 0xeb7: 0x4000, 0xeb8: 0x4000, 0xeb9: 0x4000, 0xeba: 0x4000, 0xebb: 0x4000, - 0xebc: 0x4000, 0xebd: 0x4000, 0xebe: 0x4000, 0xebf: 0x4000, - // Block 0x3b, offset 0xec0 - 0xec0: 0x4000, 0xec1: 0x4000, 0xec2: 0x4000, 0xec3: 0x4000, 0xec4: 0x4000, 0xec5: 0x4000, - 0xec6: 0x4000, 0xec7: 0x4000, 0xec8: 0x2000, 0xec9: 0x2000, 0xeca: 0x2000, 0xecb: 0x2000, - 0xecc: 0x2000, 0xecd: 0x2000, 0xece: 0x2000, 0xecf: 0x2000, 0xed0: 0x4000, 0xed1: 0x4000, - 0xed2: 0x4000, 0xed3: 0x4000, 0xed4: 0x4000, 0xed5: 0x4000, 0xed6: 0x4000, 0xed7: 0x4000, - 0xed8: 0x4000, 0xed9: 0x4000, 0xeda: 0x4000, 0xedb: 0x4000, 0xedc: 0x4000, 0xedd: 0x4000, - 0xede: 0x4000, 0xedf: 0x4000, 0xee0: 0x4000, 0xee1: 0x4000, 0xee2: 0x4000, 0xee3: 0x4000, - 0xee4: 0x4000, 0xee5: 0x4000, 0xee6: 0x4000, 0xee7: 0x4000, 0xee8: 0x4000, 0xee9: 0x4000, - 0xeea: 0x4000, 0xeeb: 0x4000, 0xeec: 0x4000, 0xeed: 0x4000, 0xeee: 0x4000, 0xeef: 0x4000, - 0xef0: 0x4000, 0xef1: 0x4000, 0xef2: 0x4000, 0xef3: 0x4000, 0xef4: 0x4000, 0xef5: 0x4000, - 0xef6: 0x4000, 0xef7: 0x4000, 0xef8: 0x4000, 0xef9: 0x4000, 0xefa: 0x4000, 0xefb: 0x4000, - 0xefc: 0x4000, 0xefd: 0x4000, 0xefe: 0x4000, 0xeff: 0x4000, - // Block 0x3c, offset 0xf00 - 0xf00: 0x4000, 0xf01: 0x4000, 0xf02: 0x4000, 0xf03: 0x4000, 0xf04: 0x4000, 0xf05: 0x4000, - 0xf06: 0x4000, 0xf07: 0x4000, 0xf08: 0x4000, 0xf09: 0x4000, 0xf0a: 0x4000, 0xf0b: 0x4000, - 0xf0c: 0x4000, 0xf10: 0x4000, 0xf11: 0x4000, - 0xf12: 0x4000, 0xf13: 0x4000, 0xf14: 0x4000, 0xf15: 0x4000, 0xf16: 0x4000, 0xf17: 0x4000, - 0xf18: 0x4000, 0xf19: 0x4000, 0xf1a: 0x4000, 0xf1b: 0x4000, 0xf1c: 0x4000, 0xf1d: 0x4000, - 0xf1e: 0x4000, 0xf1f: 0x4000, 0xf20: 0x4000, 0xf21: 0x4000, 0xf22: 0x4000, 0xf23: 0x4000, - 0xf24: 0x4000, 0xf25: 0x4000, 0xf26: 0x4000, 0xf27: 0x4000, 0xf28: 0x4000, 0xf29: 0x4000, - 0xf2a: 0x4000, 0xf2b: 0x4000, 0xf2c: 0x4000, 0xf2d: 0x4000, 0xf2e: 0x4000, 0xf2f: 0x4000, - 0xf30: 0x4000, 0xf31: 0x4000, 0xf32: 0x4000, 0xf33: 0x4000, 0xf34: 0x4000, 0xf35: 0x4000, - 0xf36: 0x4000, 0xf37: 0x4000, 0xf38: 0x4000, 0xf39: 0x4000, 0xf3a: 0x4000, 0xf3b: 0x4000, - 0xf3c: 0x4000, 0xf3d: 0x4000, 0xf3e: 0x4000, 0xf3f: 0x4000, - // Block 0x3d, offset 0xf40 - 0xf40: 0x4000, 0xf41: 0x4000, 0xf42: 0x4000, 0xf43: 0x4000, 0xf44: 0x4000, 0xf45: 0x4000, - 0xf46: 0x4000, - // Block 0x3e, offset 0xf80 - 0xfa0: 0x4000, 0xfa1: 0x4000, 0xfa2: 0x4000, 0xfa3: 0x4000, - 0xfa4: 0x4000, 0xfa5: 0x4000, 0xfa6: 0x4000, 0xfa7: 0x4000, 0xfa8: 0x4000, 0xfa9: 0x4000, - 0xfaa: 0x4000, 0xfab: 0x4000, 0xfac: 0x4000, 0xfad: 0x4000, 0xfae: 0x4000, 0xfaf: 0x4000, - 0xfb0: 0x4000, 0xfb1: 0x4000, 0xfb2: 0x4000, 0xfb3: 0x4000, 0xfb4: 0x4000, 0xfb5: 0x4000, - 0xfb6: 0x4000, 0xfb7: 0x4000, 0xfb8: 0x4000, 0xfb9: 0x4000, 0xfba: 0x4000, 0xfbb: 0x4000, - 0xfbc: 0x4000, - // Block 0x3f, offset 0xfc0 - 0xfc0: 0x4000, 0xfc1: 0x4000, 0xfc2: 0x4000, 0xfc3: 0x4000, 0xfc4: 0x4000, 0xfc5: 0x4000, - 0xfc6: 0x4000, 0xfc7: 0x4000, 0xfc8: 0x4000, 0xfc9: 0x4000, 0xfca: 0x4000, 0xfcb: 0x4000, - 0xfcc: 0x4000, 0xfcd: 0x4000, 0xfce: 0x4000, 0xfcf: 0x4000, 0xfd0: 0x4000, 0xfd1: 0x4000, - 0xfd2: 0x4000, 0xfd3: 0x4000, 0xfd4: 0x4000, 0xfd5: 0x4000, 0xfd6: 0x4000, 0xfd7: 0x4000, - 0xfd8: 0x4000, 0xfd9: 0x4000, 0xfda: 0x4000, 0xfdb: 0x4000, 0xfdc: 0x4000, 0xfdd: 0x4000, - 0xfde: 0x4000, 0xfdf: 0x4000, 0xfe0: 0x4000, 0xfe1: 0x4000, 0xfe2: 0x4000, 0xfe3: 0x4000, - // Block 0x40, offset 0x1000 - 0x1000: 0x2000, 0x1001: 0x2000, 0x1002: 0x2000, 0x1003: 0x2000, 0x1004: 0x2000, 0x1005: 0x2000, - 0x1006: 0x2000, 0x1007: 0x2000, 0x1008: 0x2000, 0x1009: 0x2000, 0x100a: 0x2000, 0x100b: 0x2000, - 0x100c: 0x2000, 0x100d: 0x2000, 0x100e: 0x2000, 0x100f: 0x2000, 0x1010: 0x4000, 0x1011: 0x4000, - 0x1012: 0x4000, 0x1013: 0x4000, 0x1014: 0x4000, 0x1015: 0x4000, 0x1016: 0x4000, 0x1017: 0x4000, - 0x1018: 0x4000, 0x1019: 0x4000, - 0x1030: 0x4000, 0x1031: 0x4000, 0x1032: 0x4000, 0x1033: 0x4000, 0x1034: 0x4000, 0x1035: 0x4000, - 0x1036: 0x4000, 0x1037: 0x4000, 0x1038: 0x4000, 0x1039: 0x4000, 0x103a: 0x4000, 0x103b: 0x4000, - 0x103c: 0x4000, 0x103d: 0x4000, 0x103e: 0x4000, 0x103f: 0x4000, - // Block 0x41, offset 0x1040 - 0x1040: 0x4000, 0x1041: 0x4000, 0x1042: 0x4000, 0x1043: 0x4000, 0x1044: 0x4000, 0x1045: 0x4000, - 0x1046: 0x4000, 0x1047: 0x4000, 0x1048: 0x4000, 0x1049: 0x4000, 0x104a: 0x4000, 0x104b: 0x4000, - 0x104c: 0x4000, 0x104d: 0x4000, 0x104e: 0x4000, 0x104f: 0x4000, 0x1050: 0x4000, 0x1051: 0x4000, - 0x1052: 0x4000, 0x1054: 0x4000, 0x1055: 0x4000, 0x1056: 0x4000, 0x1057: 0x4000, - 0x1058: 0x4000, 0x1059: 0x4000, 0x105a: 0x4000, 0x105b: 0x4000, 0x105c: 0x4000, 0x105d: 0x4000, - 0x105e: 0x4000, 0x105f: 0x4000, 0x1060: 0x4000, 0x1061: 0x4000, 0x1062: 0x4000, 0x1063: 0x4000, - 0x1064: 0x4000, 0x1065: 0x4000, 0x1066: 0x4000, 0x1068: 0x4000, 0x1069: 0x4000, - 0x106a: 0x4000, 0x106b: 0x4000, - // Block 0x42, offset 0x1080 - 0x1081: 0x9012, 0x1082: 0x9012, 0x1083: 0x9012, 0x1084: 0x9012, 0x1085: 0x9012, - 0x1086: 0x9012, 0x1087: 0x9012, 0x1088: 0x9012, 0x1089: 0x9012, 0x108a: 0x9012, 0x108b: 0x9012, - 0x108c: 0x9012, 0x108d: 0x9012, 0x108e: 0x9012, 0x108f: 0x9012, 0x1090: 0x9012, 0x1091: 0x9012, - 0x1092: 0x9012, 0x1093: 0x9012, 0x1094: 0x9012, 0x1095: 0x9012, 0x1096: 0x9012, 0x1097: 0x9012, - 0x1098: 0x9012, 0x1099: 0x9012, 0x109a: 0x9012, 0x109b: 0x9012, 0x109c: 0x9012, 0x109d: 0x9012, - 0x109e: 0x9012, 0x109f: 0x9012, 0x10a0: 0x9049, 0x10a1: 0x9049, 0x10a2: 0x9049, 0x10a3: 0x9049, - 0x10a4: 0x9049, 0x10a5: 0x9049, 0x10a6: 0x9049, 0x10a7: 0x9049, 0x10a8: 0x9049, 0x10a9: 0x9049, - 0x10aa: 0x9049, 0x10ab: 0x9049, 0x10ac: 0x9049, 0x10ad: 0x9049, 0x10ae: 0x9049, 0x10af: 0x9049, - 0x10b0: 0x9049, 0x10b1: 0x9049, 0x10b2: 0x9049, 0x10b3: 0x9049, 0x10b4: 0x9049, 0x10b5: 0x9049, - 0x10b6: 0x9049, 0x10b7: 0x9049, 0x10b8: 0x9049, 0x10b9: 0x9049, 0x10ba: 0x9049, 0x10bb: 0x9049, - 0x10bc: 0x9049, 0x10bd: 0x9049, 0x10be: 0x9049, 0x10bf: 0x9049, - // Block 0x43, offset 0x10c0 - 0x10c0: 0x9049, 0x10c1: 0x9049, 0x10c2: 0x9049, 0x10c3: 0x9049, 0x10c4: 0x9049, 0x10c5: 0x9049, - 0x10c6: 0x9049, 0x10c7: 0x9049, 0x10c8: 0x9049, 0x10c9: 0x9049, 0x10ca: 0x9049, 0x10cb: 0x9049, - 0x10cc: 0x9049, 0x10cd: 0x9049, 0x10ce: 0x9049, 0x10cf: 0x9049, 0x10d0: 0x9049, 0x10d1: 0x9049, - 0x10d2: 0x9049, 0x10d3: 0x9049, 0x10d4: 0x9049, 0x10d5: 0x9049, 0x10d6: 0x9049, 0x10d7: 0x9049, - 0x10d8: 0x9049, 0x10d9: 0x9049, 0x10da: 0x9049, 0x10db: 0x9049, 0x10dc: 0x9049, 0x10dd: 0x9049, - 0x10de: 0x9049, 0x10df: 0x904a, 0x10e0: 0x904b, 0x10e1: 0xb04c, 0x10e2: 0xb04d, 0x10e3: 0xb04d, - 0x10e4: 0xb04e, 0x10e5: 0xb04f, 0x10e6: 0xb050, 0x10e7: 0xb051, 0x10e8: 0xb052, 0x10e9: 0xb053, - 0x10ea: 0xb054, 0x10eb: 0xb055, 0x10ec: 0xb056, 0x10ed: 0xb057, 0x10ee: 0xb058, 0x10ef: 0xb059, - 0x10f0: 0xb05a, 0x10f1: 0xb05b, 0x10f2: 0xb05c, 0x10f3: 0xb05d, 0x10f4: 0xb05e, 0x10f5: 0xb05f, - 0x10f6: 0xb060, 0x10f7: 0xb061, 0x10f8: 0xb062, 0x10f9: 0xb063, 0x10fa: 0xb064, 0x10fb: 0xb065, - 0x10fc: 0xb052, 0x10fd: 0xb066, 0x10fe: 0xb067, 0x10ff: 0xb055, - // Block 0x44, offset 0x1100 - 0x1100: 0xb068, 0x1101: 0xb069, 0x1102: 0xb06a, 0x1103: 0xb06b, 0x1104: 0xb05a, 0x1105: 0xb056, - 0x1106: 0xb06c, 0x1107: 0xb06d, 0x1108: 0xb06b, 0x1109: 0xb06e, 0x110a: 0xb06b, 0x110b: 0xb06f, - 0x110c: 0xb06f, 0x110d: 0xb070, 0x110e: 0xb070, 0x110f: 0xb071, 0x1110: 0xb056, 0x1111: 0xb072, - 0x1112: 0xb073, 0x1113: 0xb072, 0x1114: 0xb074, 0x1115: 0xb073, 0x1116: 0xb075, 0x1117: 0xb075, - 0x1118: 0xb076, 0x1119: 0xb076, 0x111a: 0xb077, 0x111b: 0xb077, 0x111c: 0xb073, 0x111d: 0xb078, - 0x111e: 0xb079, 0x111f: 0xb067, 0x1120: 0xb07a, 0x1121: 0xb07b, 0x1122: 0xb07b, 0x1123: 0xb07b, - 0x1124: 0xb07b, 0x1125: 0xb07b, 0x1126: 0xb07b, 0x1127: 0xb07b, 0x1128: 0xb07b, 0x1129: 0xb07b, - 0x112a: 0xb07b, 0x112b: 0xb07b, 0x112c: 0xb07b, 0x112d: 0xb07b, 0x112e: 0xb07b, 0x112f: 0xb07b, - 0x1130: 0xb07c, 0x1131: 0xb07c, 0x1132: 0xb07c, 0x1133: 0xb07c, 0x1134: 0xb07c, 0x1135: 0xb07c, - 0x1136: 0xb07c, 0x1137: 0xb07c, 0x1138: 0xb07c, 0x1139: 0xb07c, 0x113a: 0xb07c, 0x113b: 0xb07c, - 0x113c: 0xb07c, 0x113d: 0xb07c, 0x113e: 0xb07c, - // Block 0x45, offset 0x1140 - 0x1142: 0xb07d, 0x1143: 0xb07e, 0x1144: 0xb07f, 0x1145: 0xb080, - 0x1146: 0xb07f, 0x1147: 0xb07e, 0x114a: 0xb081, 0x114b: 0xb082, - 0x114c: 0xb083, 0x114d: 0xb07f, 0x114e: 0xb080, 0x114f: 0xb07f, - 0x1152: 0xb084, 0x1153: 0xb085, 0x1154: 0xb084, 0x1155: 0xb086, 0x1156: 0xb084, 0x1157: 0xb087, - 0x115a: 0xb088, 0x115b: 0xb089, 0x115c: 0xb08a, - 0x1160: 0x908b, 0x1161: 0x908b, 0x1162: 0x908c, 0x1163: 0x908d, - 0x1164: 0x908b, 0x1165: 0x908e, 0x1166: 0x908f, 0x1168: 0xb090, 0x1169: 0xb091, - 0x116a: 0xb092, 0x116b: 0xb091, 0x116c: 0xb093, 0x116d: 0xb094, 0x116e: 0xb095, - 0x117d: 0x2000, - // Block 0x46, offset 0x1180 - 0x11a0: 0x4000, 0x11a1: 0x4000, 0x11a2: 0x4000, 0x11a3: 0x4000, - 0x11a4: 0x4000, - 0x11b0: 0x4000, 0x11b1: 0x4000, - // Block 0x47, offset 0x11c0 - 0x11c0: 0x4000, 0x11c1: 0x4000, 0x11c2: 0x4000, 0x11c3: 0x4000, 0x11c4: 0x4000, 0x11c5: 0x4000, - 0x11c6: 0x4000, 0x11c7: 0x4000, 0x11c8: 0x4000, 0x11c9: 0x4000, 0x11ca: 0x4000, 0x11cb: 0x4000, - 0x11cc: 0x4000, 0x11cd: 0x4000, 0x11ce: 0x4000, 0x11cf: 0x4000, 0x11d0: 0x4000, 0x11d1: 0x4000, - 0x11d2: 0x4000, 0x11d3: 0x4000, 0x11d4: 0x4000, 0x11d5: 0x4000, 0x11d6: 0x4000, 0x11d7: 0x4000, - 0x11d8: 0x4000, 0x11d9: 0x4000, 0x11da: 0x4000, 0x11db: 0x4000, 0x11dc: 0x4000, 0x11dd: 0x4000, - 0x11de: 0x4000, 0x11df: 0x4000, 0x11e0: 0x4000, 0x11e1: 0x4000, 0x11e2: 0x4000, 0x11e3: 0x4000, - 0x11e4: 0x4000, 0x11e5: 0x4000, 0x11e6: 0x4000, 0x11e7: 0x4000, 0x11e8: 0x4000, 0x11e9: 0x4000, - 0x11ea: 0x4000, 0x11eb: 0x4000, 0x11ec: 0x4000, 0x11ed: 0x4000, 0x11ee: 0x4000, 0x11ef: 0x4000, - 0x11f0: 0x4000, 0x11f1: 0x4000, 0x11f2: 0x4000, 0x11f3: 0x4000, 0x11f4: 0x4000, 0x11f5: 0x4000, - 0x11f6: 0x4000, 0x11f7: 0x4000, - // Block 0x48, offset 0x1200 - 0x1200: 0x4000, 0x1201: 0x4000, 0x1202: 0x4000, 0x1203: 0x4000, 0x1204: 0x4000, 0x1205: 0x4000, - 0x1206: 0x4000, 0x1207: 0x4000, 0x1208: 0x4000, 0x1209: 0x4000, 0x120a: 0x4000, 0x120b: 0x4000, - 0x120c: 0x4000, 0x120d: 0x4000, 0x120e: 0x4000, 0x120f: 0x4000, 0x1210: 0x4000, 0x1211: 0x4000, - 0x1212: 0x4000, 0x1213: 0x4000, 0x1214: 0x4000, 0x1215: 0x4000, - // Block 0x49, offset 0x1240 - 0x1240: 0x4000, 0x1241: 0x4000, 0x1242: 0x4000, 0x1243: 0x4000, 0x1244: 0x4000, 0x1245: 0x4000, - 0x1246: 0x4000, 0x1247: 0x4000, 0x1248: 0x4000, - // Block 0x4a, offset 0x1280 - 0x1280: 0x4000, 0x1281: 0x4000, 0x1282: 0x4000, 0x1283: 0x4000, 0x1284: 0x4000, 0x1285: 0x4000, - 0x1286: 0x4000, 0x1287: 0x4000, 0x1288: 0x4000, 0x1289: 0x4000, 0x128a: 0x4000, 0x128b: 0x4000, - 0x128c: 0x4000, 0x128d: 0x4000, 0x128e: 0x4000, 0x128f: 0x4000, 0x1290: 0x4000, 0x1291: 0x4000, - 0x1292: 0x4000, 0x1293: 0x4000, 0x1294: 0x4000, 0x1295: 0x4000, 0x1296: 0x4000, 0x1297: 0x4000, - 0x1298: 0x4000, 0x1299: 0x4000, 0x129a: 0x4000, 0x129b: 0x4000, 0x129c: 0x4000, 0x129d: 0x4000, - 0x129e: 0x4000, - // Block 0x4b, offset 0x12c0 - 0x12d0: 0x4000, 0x12d1: 0x4000, - 0x12d2: 0x4000, - 0x12e4: 0x4000, 0x12e5: 0x4000, 0x12e6: 0x4000, 0x12e7: 0x4000, - 0x12f0: 0x4000, 0x12f1: 0x4000, 0x12f2: 0x4000, 0x12f3: 0x4000, 0x12f4: 0x4000, 0x12f5: 0x4000, - 0x12f6: 0x4000, 0x12f7: 0x4000, 0x12f8: 0x4000, 0x12f9: 0x4000, 0x12fa: 0x4000, 0x12fb: 0x4000, - 0x12fc: 0x4000, 0x12fd: 0x4000, 0x12fe: 0x4000, 0x12ff: 0x4000, - // Block 0x4c, offset 0x1300 - 0x1300: 0x4000, 0x1301: 0x4000, 0x1302: 0x4000, 0x1303: 0x4000, 0x1304: 0x4000, 0x1305: 0x4000, - 0x1306: 0x4000, 0x1307: 0x4000, 0x1308: 0x4000, 0x1309: 0x4000, 0x130a: 0x4000, 0x130b: 0x4000, - 0x130c: 0x4000, 0x130d: 0x4000, 0x130e: 0x4000, 0x130f: 0x4000, 0x1310: 0x4000, 0x1311: 0x4000, - 0x1312: 0x4000, 0x1313: 0x4000, 0x1314: 0x4000, 0x1315: 0x4000, 0x1316: 0x4000, 0x1317: 0x4000, - 0x1318: 0x4000, 0x1319: 0x4000, 0x131a: 0x4000, 0x131b: 0x4000, 0x131c: 0x4000, 0x131d: 0x4000, - 0x131e: 0x4000, 0x131f: 0x4000, 0x1320: 0x4000, 0x1321: 0x4000, 0x1322: 0x4000, 0x1323: 0x4000, - 0x1324: 0x4000, 0x1325: 0x4000, 0x1326: 0x4000, 0x1327: 0x4000, 0x1328: 0x4000, 0x1329: 0x4000, - 0x132a: 0x4000, 0x132b: 0x4000, 0x132c: 0x4000, 0x132d: 0x4000, 0x132e: 0x4000, 0x132f: 0x4000, - 0x1330: 0x4000, 0x1331: 0x4000, 0x1332: 0x4000, 0x1333: 0x4000, 0x1334: 0x4000, 0x1335: 0x4000, - 0x1336: 0x4000, 0x1337: 0x4000, 0x1338: 0x4000, 0x1339: 0x4000, 0x133a: 0x4000, 0x133b: 0x4000, - // Block 0x4d, offset 0x1340 - 0x1344: 0x4000, - // Block 0x4e, offset 0x1380 - 0x138f: 0x4000, - // Block 0x4f, offset 0x13c0 - 0x13c0: 0x2000, 0x13c1: 0x2000, 0x13c2: 0x2000, 0x13c3: 0x2000, 0x13c4: 0x2000, 0x13c5: 0x2000, - 0x13c6: 0x2000, 0x13c7: 0x2000, 0x13c8: 0x2000, 0x13c9: 0x2000, 0x13ca: 0x2000, - 0x13d0: 0x2000, 0x13d1: 0x2000, - 0x13d2: 0x2000, 0x13d3: 0x2000, 0x13d4: 0x2000, 0x13d5: 0x2000, 0x13d6: 0x2000, 0x13d7: 0x2000, - 0x13d8: 0x2000, 0x13d9: 0x2000, 0x13da: 0x2000, 0x13db: 0x2000, 0x13dc: 0x2000, 0x13dd: 0x2000, - 0x13de: 0x2000, 0x13df: 0x2000, 0x13e0: 0x2000, 0x13e1: 0x2000, 0x13e2: 0x2000, 0x13e3: 0x2000, - 0x13e4: 0x2000, 0x13e5: 0x2000, 0x13e6: 0x2000, 0x13e7: 0x2000, 0x13e8: 0x2000, 0x13e9: 0x2000, - 0x13ea: 0x2000, 0x13eb: 0x2000, 0x13ec: 0x2000, 0x13ed: 0x2000, - 0x13f0: 0x2000, 0x13f1: 0x2000, 0x13f2: 0x2000, 0x13f3: 0x2000, 0x13f4: 0x2000, 0x13f5: 0x2000, - 0x13f6: 0x2000, 0x13f7: 0x2000, 0x13f8: 0x2000, 0x13f9: 0x2000, 0x13fa: 0x2000, 0x13fb: 0x2000, - 0x13fc: 0x2000, 0x13fd: 0x2000, 0x13fe: 0x2000, 0x13ff: 0x2000, - // Block 0x50, offset 0x1400 - 0x1400: 0x2000, 0x1401: 0x2000, 0x1402: 0x2000, 0x1403: 0x2000, 0x1404: 0x2000, 0x1405: 0x2000, - 0x1406: 0x2000, 0x1407: 0x2000, 0x1408: 0x2000, 0x1409: 0x2000, 0x140a: 0x2000, 0x140b: 0x2000, - 0x140c: 0x2000, 0x140d: 0x2000, 0x140e: 0x2000, 0x140f: 0x2000, 0x1410: 0x2000, 0x1411: 0x2000, - 0x1412: 0x2000, 0x1413: 0x2000, 0x1414: 0x2000, 0x1415: 0x2000, 0x1416: 0x2000, 0x1417: 0x2000, - 0x1418: 0x2000, 0x1419: 0x2000, 0x141a: 0x2000, 0x141b: 0x2000, 0x141c: 0x2000, 0x141d: 0x2000, - 0x141e: 0x2000, 0x141f: 0x2000, 0x1420: 0x2000, 0x1421: 0x2000, 0x1422: 0x2000, 0x1423: 0x2000, - 0x1424: 0x2000, 0x1425: 0x2000, 0x1426: 0x2000, 0x1427: 0x2000, 0x1428: 0x2000, 0x1429: 0x2000, - 0x1430: 0x2000, 0x1431: 0x2000, 0x1432: 0x2000, 0x1433: 0x2000, 0x1434: 0x2000, 0x1435: 0x2000, - 0x1436: 0x2000, 0x1437: 0x2000, 0x1438: 0x2000, 0x1439: 0x2000, 0x143a: 0x2000, 0x143b: 0x2000, - 0x143c: 0x2000, 0x143d: 0x2000, 0x143e: 0x2000, 0x143f: 0x2000, - // Block 0x51, offset 0x1440 - 0x1440: 0x2000, 0x1441: 0x2000, 0x1442: 0x2000, 0x1443: 0x2000, 0x1444: 0x2000, 0x1445: 0x2000, - 0x1446: 0x2000, 0x1447: 0x2000, 0x1448: 0x2000, 0x1449: 0x2000, 0x144a: 0x2000, 0x144b: 0x2000, - 0x144c: 0x2000, 0x144d: 0x2000, 0x144e: 0x4000, 0x144f: 0x2000, 0x1450: 0x2000, 0x1451: 0x4000, - 0x1452: 0x4000, 0x1453: 0x4000, 0x1454: 0x4000, 0x1455: 0x4000, 0x1456: 0x4000, 0x1457: 0x4000, - 0x1458: 0x4000, 0x1459: 0x4000, 0x145a: 0x4000, 0x145b: 0x2000, 0x145c: 0x2000, 0x145d: 0x2000, - 0x145e: 0x2000, 0x145f: 0x2000, 0x1460: 0x2000, 0x1461: 0x2000, 0x1462: 0x2000, 0x1463: 0x2000, - 0x1464: 0x2000, 0x1465: 0x2000, 0x1466: 0x2000, 0x1467: 0x2000, 0x1468: 0x2000, 0x1469: 0x2000, - 0x146a: 0x2000, 0x146b: 0x2000, 0x146c: 0x2000, - // Block 0x52, offset 0x1480 - 0x1480: 0x4000, 0x1481: 0x4000, 0x1482: 0x4000, - 0x1490: 0x4000, 0x1491: 0x4000, - 0x1492: 0x4000, 0x1493: 0x4000, 0x1494: 0x4000, 0x1495: 0x4000, 0x1496: 0x4000, 0x1497: 0x4000, - 0x1498: 0x4000, 0x1499: 0x4000, 0x149a: 0x4000, 0x149b: 0x4000, 0x149c: 0x4000, 0x149d: 0x4000, - 0x149e: 0x4000, 0x149f: 0x4000, 0x14a0: 0x4000, 0x14a1: 0x4000, 0x14a2: 0x4000, 0x14a3: 0x4000, - 0x14a4: 0x4000, 0x14a5: 0x4000, 0x14a6: 0x4000, 0x14a7: 0x4000, 0x14a8: 0x4000, 0x14a9: 0x4000, - 0x14aa: 0x4000, 0x14ab: 0x4000, 0x14ac: 0x4000, 0x14ad: 0x4000, 0x14ae: 0x4000, 0x14af: 0x4000, - 0x14b0: 0x4000, 0x14b1: 0x4000, 0x14b2: 0x4000, 0x14b3: 0x4000, 0x14b4: 0x4000, 0x14b5: 0x4000, - 0x14b6: 0x4000, 0x14b7: 0x4000, 0x14b8: 0x4000, 0x14b9: 0x4000, 0x14ba: 0x4000, 0x14bb: 0x4000, - // Block 0x53, offset 0x14c0 - 0x14c0: 0x4000, 0x14c1: 0x4000, 0x14c2: 0x4000, 0x14c3: 0x4000, 0x14c4: 0x4000, 0x14c5: 0x4000, - 0x14c6: 0x4000, 0x14c7: 0x4000, 0x14c8: 0x4000, - 0x14d0: 0x4000, 0x14d1: 0x4000, - 0x14e0: 0x4000, 0x14e1: 0x4000, 0x14e2: 0x4000, 0x14e3: 0x4000, - 0x14e4: 0x4000, 0x14e5: 0x4000, - // Block 0x54, offset 0x1500 - 0x1500: 0x4000, 0x1501: 0x4000, 0x1502: 0x4000, 0x1503: 0x4000, 0x1504: 0x4000, 0x1505: 0x4000, - 0x1506: 0x4000, 0x1507: 0x4000, 0x1508: 0x4000, 0x1509: 0x4000, 0x150a: 0x4000, 0x150b: 0x4000, - 0x150c: 0x4000, 0x150d: 0x4000, 0x150e: 0x4000, 0x150f: 0x4000, 0x1510: 0x4000, 0x1511: 0x4000, - 0x1512: 0x4000, 0x1513: 0x4000, 0x1514: 0x4000, 0x1515: 0x4000, 0x1516: 0x4000, 0x1517: 0x4000, - 0x1518: 0x4000, 0x1519: 0x4000, 0x151a: 0x4000, 0x151b: 0x4000, 0x151c: 0x4000, 0x151d: 0x4000, - 0x151e: 0x4000, 0x151f: 0x4000, 0x1520: 0x4000, - 0x152d: 0x4000, 0x152e: 0x4000, 0x152f: 0x4000, - 0x1530: 0x4000, 0x1531: 0x4000, 0x1532: 0x4000, 0x1533: 0x4000, 0x1534: 0x4000, 0x1535: 0x4000, - 0x1537: 0x4000, 0x1538: 0x4000, 0x1539: 0x4000, 0x153a: 0x4000, 0x153b: 0x4000, - 0x153c: 0x4000, 0x153d: 0x4000, 0x153e: 0x4000, 0x153f: 0x4000, - // Block 0x55, offset 0x1540 - 0x1540: 0x4000, 0x1541: 0x4000, 0x1542: 0x4000, 0x1543: 0x4000, 0x1544: 0x4000, 0x1545: 0x4000, - 0x1546: 0x4000, 0x1547: 0x4000, 0x1548: 0x4000, 0x1549: 0x4000, 0x154a: 0x4000, 0x154b: 0x4000, - 0x154c: 0x4000, 0x154d: 0x4000, 0x154e: 0x4000, 0x154f: 0x4000, 0x1550: 0x4000, 0x1551: 0x4000, - 0x1552: 0x4000, 0x1553: 0x4000, 0x1554: 0x4000, 0x1555: 0x4000, 0x1556: 0x4000, 0x1557: 0x4000, - 0x1558: 0x4000, 0x1559: 0x4000, 0x155a: 0x4000, 0x155b: 0x4000, 0x155c: 0x4000, 0x155d: 0x4000, - 0x155e: 0x4000, 0x155f: 0x4000, 0x1560: 0x4000, 0x1561: 0x4000, 0x1562: 0x4000, 0x1563: 0x4000, - 0x1564: 0x4000, 0x1565: 0x4000, 0x1566: 0x4000, 0x1567: 0x4000, 0x1568: 0x4000, 0x1569: 0x4000, - 0x156a: 0x4000, 0x156b: 0x4000, 0x156c: 0x4000, 0x156d: 0x4000, 0x156e: 0x4000, 0x156f: 0x4000, - 0x1570: 0x4000, 0x1571: 0x4000, 0x1572: 0x4000, 0x1573: 0x4000, 0x1574: 0x4000, 0x1575: 0x4000, - 0x1576: 0x4000, 0x1577: 0x4000, 0x1578: 0x4000, 0x1579: 0x4000, 0x157a: 0x4000, 0x157b: 0x4000, - 0x157c: 0x4000, 0x157e: 0x4000, 0x157f: 0x4000, - // Block 0x56, offset 0x1580 - 0x1580: 0x4000, 0x1581: 0x4000, 0x1582: 0x4000, 0x1583: 0x4000, 0x1584: 0x4000, 0x1585: 0x4000, - 0x1586: 0x4000, 0x1587: 0x4000, 0x1588: 0x4000, 0x1589: 0x4000, 0x158a: 0x4000, 0x158b: 0x4000, - 0x158c: 0x4000, 0x158d: 0x4000, 0x158e: 0x4000, 0x158f: 0x4000, 0x1590: 0x4000, 0x1591: 0x4000, - 0x1592: 0x4000, 0x1593: 0x4000, - 0x15a0: 0x4000, 0x15a1: 0x4000, 0x15a2: 0x4000, 0x15a3: 0x4000, - 0x15a4: 0x4000, 0x15a5: 0x4000, 0x15a6: 0x4000, 0x15a7: 0x4000, 0x15a8: 0x4000, 0x15a9: 0x4000, - 0x15aa: 0x4000, 0x15ab: 0x4000, 0x15ac: 0x4000, 0x15ad: 0x4000, 0x15ae: 0x4000, 0x15af: 0x4000, - 0x15b0: 0x4000, 0x15b1: 0x4000, 0x15b2: 0x4000, 0x15b3: 0x4000, 0x15b4: 0x4000, 0x15b5: 0x4000, - 0x15b6: 0x4000, 0x15b7: 0x4000, 0x15b8: 0x4000, 0x15b9: 0x4000, 0x15ba: 0x4000, 0x15bb: 0x4000, - 0x15bc: 0x4000, 0x15bd: 0x4000, 0x15be: 0x4000, 0x15bf: 0x4000, - // Block 0x57, offset 0x15c0 - 0x15c0: 0x4000, 0x15c1: 0x4000, 0x15c2: 0x4000, 0x15c3: 0x4000, 0x15c4: 0x4000, 0x15c5: 0x4000, - 0x15c6: 0x4000, 0x15c7: 0x4000, 0x15c8: 0x4000, 0x15c9: 0x4000, 0x15ca: 0x4000, - 0x15cf: 0x4000, 0x15d0: 0x4000, 0x15d1: 0x4000, - 0x15d2: 0x4000, 0x15d3: 0x4000, - 0x15e0: 0x4000, 0x15e1: 0x4000, 0x15e2: 0x4000, 0x15e3: 0x4000, - 0x15e4: 0x4000, 0x15e5: 0x4000, 0x15e6: 0x4000, 0x15e7: 0x4000, 0x15e8: 0x4000, 0x15e9: 0x4000, - 0x15ea: 0x4000, 0x15eb: 0x4000, 0x15ec: 0x4000, 0x15ed: 0x4000, 0x15ee: 0x4000, 0x15ef: 0x4000, - 0x15f0: 0x4000, 0x15f4: 0x4000, - 0x15f8: 0x4000, 0x15f9: 0x4000, 0x15fa: 0x4000, 0x15fb: 0x4000, - 0x15fc: 0x4000, 0x15fd: 0x4000, 0x15fe: 0x4000, 0x15ff: 0x4000, - // Block 0x58, offset 0x1600 - 0x1600: 0x4000, 0x1601: 0x4000, 0x1602: 0x4000, 0x1603: 0x4000, 0x1604: 0x4000, 0x1605: 0x4000, - 0x1606: 0x4000, 0x1607: 0x4000, 0x1608: 0x4000, 0x1609: 0x4000, 0x160a: 0x4000, 0x160b: 0x4000, - 0x160c: 0x4000, 0x160d: 0x4000, 0x160e: 0x4000, 0x160f: 0x4000, 0x1610: 0x4000, 0x1611: 0x4000, - 0x1612: 0x4000, 0x1613: 0x4000, 0x1614: 0x4000, 0x1615: 0x4000, 0x1616: 0x4000, 0x1617: 0x4000, - 0x1618: 0x4000, 0x1619: 0x4000, 0x161a: 0x4000, 0x161b: 0x4000, 0x161c: 0x4000, 0x161d: 0x4000, - 0x161e: 0x4000, 0x161f: 0x4000, 0x1620: 0x4000, 0x1621: 0x4000, 0x1622: 0x4000, 0x1623: 0x4000, - 0x1624: 0x4000, 0x1625: 0x4000, 0x1626: 0x4000, 0x1627: 0x4000, 0x1628: 0x4000, 0x1629: 0x4000, - 0x162a: 0x4000, 0x162b: 0x4000, 0x162c: 0x4000, 0x162d: 0x4000, 0x162e: 0x4000, 0x162f: 0x4000, - 0x1630: 0x4000, 0x1631: 0x4000, 0x1632: 0x4000, 0x1633: 0x4000, 0x1634: 0x4000, 0x1635: 0x4000, - 0x1636: 0x4000, 0x1637: 0x4000, 0x1638: 0x4000, 0x1639: 0x4000, 0x163a: 0x4000, 0x163b: 0x4000, - 0x163c: 0x4000, 0x163d: 0x4000, 0x163e: 0x4000, - // Block 0x59, offset 0x1640 - 0x1640: 0x4000, 0x1642: 0x4000, 0x1643: 0x4000, 0x1644: 0x4000, 0x1645: 0x4000, - 0x1646: 0x4000, 0x1647: 0x4000, 0x1648: 0x4000, 0x1649: 0x4000, 0x164a: 0x4000, 0x164b: 0x4000, - 0x164c: 0x4000, 0x164d: 0x4000, 0x164e: 0x4000, 0x164f: 0x4000, 0x1650: 0x4000, 0x1651: 0x4000, - 0x1652: 0x4000, 0x1653: 0x4000, 0x1654: 0x4000, 0x1655: 0x4000, 0x1656: 0x4000, 0x1657: 0x4000, - 0x1658: 0x4000, 0x1659: 0x4000, 0x165a: 0x4000, 0x165b: 0x4000, 0x165c: 0x4000, 0x165d: 0x4000, - 0x165e: 0x4000, 0x165f: 0x4000, 0x1660: 0x4000, 0x1661: 0x4000, 0x1662: 0x4000, 0x1663: 0x4000, - 0x1664: 0x4000, 0x1665: 0x4000, 0x1666: 0x4000, 0x1667: 0x4000, 0x1668: 0x4000, 0x1669: 0x4000, - 0x166a: 0x4000, 0x166b: 0x4000, 0x166c: 0x4000, 0x166d: 0x4000, 0x166e: 0x4000, 0x166f: 0x4000, - 0x1670: 0x4000, 0x1671: 0x4000, 0x1672: 0x4000, 0x1673: 0x4000, 0x1674: 0x4000, 0x1675: 0x4000, - 0x1676: 0x4000, 0x1677: 0x4000, 0x1678: 0x4000, 0x1679: 0x4000, 0x167a: 0x4000, 0x167b: 0x4000, - 0x167c: 0x4000, 0x167d: 0x4000, 0x167e: 0x4000, 0x167f: 0x4000, - // Block 0x5a, offset 0x1680 - 0x1680: 0x4000, 0x1681: 0x4000, 0x1682: 0x4000, 0x1683: 0x4000, 0x1684: 0x4000, 0x1685: 0x4000, - 0x1686: 0x4000, 0x1687: 0x4000, 0x1688: 0x4000, 0x1689: 0x4000, 0x168a: 0x4000, 0x168b: 0x4000, - 0x168c: 0x4000, 0x168d: 0x4000, 0x168e: 0x4000, 0x168f: 0x4000, 0x1690: 0x4000, 0x1691: 0x4000, - 0x1692: 0x4000, 0x1693: 0x4000, 0x1694: 0x4000, 0x1695: 0x4000, 0x1696: 0x4000, 0x1697: 0x4000, - 0x1698: 0x4000, 0x1699: 0x4000, 0x169a: 0x4000, 0x169b: 0x4000, 0x169c: 0x4000, 0x169d: 0x4000, - 0x169e: 0x4000, 0x169f: 0x4000, 0x16a0: 0x4000, 0x16a1: 0x4000, 0x16a2: 0x4000, 0x16a3: 0x4000, - 0x16a4: 0x4000, 0x16a5: 0x4000, 0x16a6: 0x4000, 0x16a7: 0x4000, 0x16a8: 0x4000, 0x16a9: 0x4000, - 0x16aa: 0x4000, 0x16ab: 0x4000, 0x16ac: 0x4000, 0x16ad: 0x4000, 0x16ae: 0x4000, 0x16af: 0x4000, - 0x16b0: 0x4000, 0x16b1: 0x4000, 0x16b2: 0x4000, 0x16b3: 0x4000, 0x16b4: 0x4000, 0x16b5: 0x4000, - 0x16b6: 0x4000, 0x16b7: 0x4000, 0x16b8: 0x4000, 0x16b9: 0x4000, 0x16ba: 0x4000, 0x16bb: 0x4000, - 0x16bc: 0x4000, 0x16bf: 0x4000, - // Block 0x5b, offset 0x16c0 - 0x16c0: 0x4000, 0x16c1: 0x4000, 0x16c2: 0x4000, 0x16c3: 0x4000, 0x16c4: 0x4000, 0x16c5: 0x4000, - 0x16c6: 0x4000, 0x16c7: 0x4000, 0x16c8: 0x4000, 0x16c9: 0x4000, 0x16ca: 0x4000, 0x16cb: 0x4000, - 0x16cc: 0x4000, 0x16cd: 0x4000, 0x16ce: 0x4000, 0x16cf: 0x4000, 0x16d0: 0x4000, 0x16d1: 0x4000, - 0x16d2: 0x4000, 0x16d3: 0x4000, 0x16d4: 0x4000, 0x16d5: 0x4000, 0x16d6: 0x4000, 0x16d7: 0x4000, - 0x16d8: 0x4000, 0x16d9: 0x4000, 0x16da: 0x4000, 0x16db: 0x4000, 0x16dc: 0x4000, 0x16dd: 0x4000, - 0x16de: 0x4000, 0x16df: 0x4000, 0x16e0: 0x4000, 0x16e1: 0x4000, 0x16e2: 0x4000, 0x16e3: 0x4000, - 0x16e4: 0x4000, 0x16e5: 0x4000, 0x16e6: 0x4000, 0x16e7: 0x4000, 0x16e8: 0x4000, 0x16e9: 0x4000, - 0x16ea: 0x4000, 0x16eb: 0x4000, 0x16ec: 0x4000, 0x16ed: 0x4000, 0x16ee: 0x4000, 0x16ef: 0x4000, - 0x16f0: 0x4000, 0x16f1: 0x4000, 0x16f2: 0x4000, 0x16f3: 0x4000, 0x16f4: 0x4000, 0x16f5: 0x4000, - 0x16f6: 0x4000, 0x16f7: 0x4000, 0x16f8: 0x4000, 0x16f9: 0x4000, 0x16fa: 0x4000, 0x16fb: 0x4000, - 0x16fc: 0x4000, 0x16fd: 0x4000, - // Block 0x5c, offset 0x1700 - 0x170b: 0x4000, - 0x170c: 0x4000, 0x170d: 0x4000, 0x170e: 0x4000, 0x1710: 0x4000, 0x1711: 0x4000, - 0x1712: 0x4000, 0x1713: 0x4000, 0x1714: 0x4000, 0x1715: 0x4000, 0x1716: 0x4000, 0x1717: 0x4000, - 0x1718: 0x4000, 0x1719: 0x4000, 0x171a: 0x4000, 0x171b: 0x4000, 0x171c: 0x4000, 0x171d: 0x4000, - 0x171e: 0x4000, 0x171f: 0x4000, 0x1720: 0x4000, 0x1721: 0x4000, 0x1722: 0x4000, 0x1723: 0x4000, - 0x1724: 0x4000, 0x1725: 0x4000, 0x1726: 0x4000, 0x1727: 0x4000, - 0x173a: 0x4000, - // Block 0x5d, offset 0x1740 - 0x1755: 0x4000, 0x1756: 0x4000, - 0x1764: 0x4000, - // Block 0x5e, offset 0x1780 - 0x17bb: 0x4000, - 0x17bc: 0x4000, 0x17bd: 0x4000, 0x17be: 0x4000, 0x17bf: 0x4000, - // Block 0x5f, offset 0x17c0 - 0x17c0: 0x4000, 0x17c1: 0x4000, 0x17c2: 0x4000, 0x17c3: 0x4000, 0x17c4: 0x4000, 0x17c5: 0x4000, - 0x17c6: 0x4000, 0x17c7: 0x4000, 0x17c8: 0x4000, 0x17c9: 0x4000, 0x17ca: 0x4000, 0x17cb: 0x4000, - 0x17cc: 0x4000, 0x17cd: 0x4000, 0x17ce: 0x4000, 0x17cf: 0x4000, - // Block 0x60, offset 0x1800 - 0x1800: 0x4000, 0x1801: 0x4000, 0x1802: 0x4000, 0x1803: 0x4000, 0x1804: 0x4000, 0x1805: 0x4000, - 0x180c: 0x4000, 0x1810: 0x4000, 0x1811: 0x4000, - 0x1812: 0x4000, 0x1815: 0x4000, 0x1816: 0x4000, 0x1817: 0x4000, - 0x182b: 0x4000, 0x182c: 0x4000, - 0x1834: 0x4000, 0x1835: 0x4000, - 0x1836: 0x4000, 0x1837: 0x4000, 0x1838: 0x4000, 0x1839: 0x4000, 0x183a: 0x4000, 0x183b: 0x4000, - 0x183c: 0x4000, - // Block 0x61, offset 0x1840 - 0x1860: 0x4000, 0x1861: 0x4000, 0x1862: 0x4000, 0x1863: 0x4000, - 0x1864: 0x4000, 0x1865: 0x4000, 0x1866: 0x4000, 0x1867: 0x4000, 0x1868: 0x4000, 0x1869: 0x4000, - 0x186a: 0x4000, 0x186b: 0x4000, - // Block 0x62, offset 0x1880 - 0x188c: 0x4000, 0x188d: 0x4000, 0x188e: 0x4000, 0x188f: 0x4000, 0x1890: 0x4000, 0x1891: 0x4000, - 0x1892: 0x4000, 0x1893: 0x4000, 0x1894: 0x4000, 0x1895: 0x4000, 0x1896: 0x4000, 0x1897: 0x4000, - 0x1898: 0x4000, 0x1899: 0x4000, 0x189a: 0x4000, 0x189b: 0x4000, 0x189c: 0x4000, 0x189d: 0x4000, - 0x189e: 0x4000, 0x189f: 0x4000, 0x18a0: 0x4000, 0x18a1: 0x4000, 0x18a2: 0x4000, 0x18a3: 0x4000, - 0x18a4: 0x4000, 0x18a5: 0x4000, 0x18a6: 0x4000, 0x18a7: 0x4000, 0x18a8: 0x4000, 0x18a9: 0x4000, - 0x18aa: 0x4000, 0x18ab: 0x4000, 0x18ac: 0x4000, 0x18ad: 0x4000, 0x18ae: 0x4000, 0x18af: 0x4000, - 0x18b0: 0x4000, 0x18b1: 0x4000, 0x18b2: 0x4000, 0x18b3: 0x4000, 0x18b4: 0x4000, 0x18b5: 0x4000, - 0x18b6: 0x4000, 0x18b7: 0x4000, 0x18b8: 0x4000, 0x18b9: 0x4000, 0x18ba: 0x4000, - 0x18bc: 0x4000, 0x18bd: 0x4000, 0x18be: 0x4000, 0x18bf: 0x4000, - // Block 0x63, offset 0x18c0 - 0x18c0: 0x4000, 0x18c1: 0x4000, 0x18c2: 0x4000, 0x18c3: 0x4000, 0x18c4: 0x4000, 0x18c5: 0x4000, - 0x18c7: 0x4000, 0x18c8: 0x4000, 0x18c9: 0x4000, 0x18ca: 0x4000, 0x18cb: 0x4000, - 0x18cc: 0x4000, 0x18cd: 0x4000, 0x18ce: 0x4000, 0x18cf: 0x4000, 0x18d0: 0x4000, 0x18d1: 0x4000, - 0x18d2: 0x4000, 0x18d3: 0x4000, 0x18d4: 0x4000, 0x18d5: 0x4000, 0x18d6: 0x4000, 0x18d7: 0x4000, - 0x18d8: 0x4000, 0x18d9: 0x4000, 0x18da: 0x4000, 0x18db: 0x4000, 0x18dc: 0x4000, 0x18dd: 0x4000, - 0x18de: 0x4000, 0x18df: 0x4000, 0x18e0: 0x4000, 0x18e1: 0x4000, 0x18e2: 0x4000, 0x18e3: 0x4000, - 0x18e4: 0x4000, 0x18e5: 0x4000, 0x18e6: 0x4000, 0x18e7: 0x4000, 0x18e8: 0x4000, 0x18e9: 0x4000, - 0x18ea: 0x4000, 0x18eb: 0x4000, 0x18ec: 0x4000, 0x18ed: 0x4000, 0x18ee: 0x4000, 0x18ef: 0x4000, - 0x18f0: 0x4000, 0x18f1: 0x4000, 0x18f2: 0x4000, 0x18f3: 0x4000, 0x18f4: 0x4000, 0x18f5: 0x4000, - 0x18f6: 0x4000, 0x18f7: 0x4000, 0x18f8: 0x4000, 0x18fa: 0x4000, 0x18fb: 0x4000, - 0x18fc: 0x4000, 0x18fd: 0x4000, 0x18fe: 0x4000, 0x18ff: 0x4000, - // Block 0x64, offset 0x1900 - 0x1900: 0x4000, 0x1901: 0x4000, 0x1902: 0x4000, 0x1903: 0x4000, 0x1904: 0x4000, 0x1905: 0x4000, - 0x1906: 0x4000, 0x1907: 0x4000, 0x1908: 0x4000, 0x1909: 0x4000, 0x190a: 0x4000, 0x190b: 0x4000, - 0x190d: 0x4000, 0x190e: 0x4000, 0x190f: 0x4000, 0x1910: 0x4000, 0x1911: 0x4000, - 0x1912: 0x4000, 0x1913: 0x4000, 0x1914: 0x4000, 0x1915: 0x4000, 0x1916: 0x4000, 0x1917: 0x4000, - 0x1918: 0x4000, 0x1919: 0x4000, 0x191a: 0x4000, 0x191b: 0x4000, 0x191c: 0x4000, 0x191d: 0x4000, - 0x191e: 0x4000, 0x191f: 0x4000, 0x1920: 0x4000, 0x1921: 0x4000, 0x1922: 0x4000, 0x1923: 0x4000, - 0x1924: 0x4000, 0x1925: 0x4000, 0x1926: 0x4000, 0x1927: 0x4000, 0x1928: 0x4000, 0x1929: 0x4000, - 0x192a: 0x4000, 0x192b: 0x4000, 0x192c: 0x4000, 0x192d: 0x4000, 0x192e: 0x4000, 0x192f: 0x4000, - 0x1930: 0x4000, 0x1931: 0x4000, 0x1932: 0x4000, 0x1933: 0x4000, 0x1934: 0x4000, 0x1935: 0x4000, - 0x1936: 0x4000, 0x1937: 0x4000, 0x1938: 0x4000, 0x1939: 0x4000, 0x193a: 0x4000, 0x193b: 0x4000, - 0x193c: 0x4000, 0x193d: 0x4000, 0x193e: 0x4000, 0x193f: 0x4000, - // Block 0x65, offset 0x1940 - 0x1970: 0x4000, 0x1971: 0x4000, 0x1972: 0x4000, 0x1973: 0x4000, 0x1974: 0x4000, - 0x1978: 0x4000, 0x1979: 0x4000, 0x197a: 0x4000, - // Block 0x66, offset 0x1980 - 0x1980: 0x4000, 0x1981: 0x4000, 0x1982: 0x4000, 0x1983: 0x4000, 0x1984: 0x4000, 0x1985: 0x4000, - 0x1986: 0x4000, - 0x1990: 0x4000, 0x1991: 0x4000, - 0x1992: 0x4000, 0x1993: 0x4000, 0x1994: 0x4000, 0x1995: 0x4000, 0x1996: 0x4000, 0x1997: 0x4000, - 0x1998: 0x4000, 0x1999: 0x4000, 0x199a: 0x4000, 0x199b: 0x4000, 0x199c: 0x4000, 0x199d: 0x4000, - 0x199e: 0x4000, 0x199f: 0x4000, 0x19a0: 0x4000, 0x19a1: 0x4000, 0x19a2: 0x4000, 0x19a3: 0x4000, - 0x19a4: 0x4000, 0x19a5: 0x4000, 0x19a6: 0x4000, 0x19a7: 0x4000, 0x19a8: 0x4000, - 0x19b0: 0x4000, 0x19b1: 0x4000, 0x19b2: 0x4000, 0x19b3: 0x4000, 0x19b4: 0x4000, 0x19b5: 0x4000, - 0x19b6: 0x4000, - // Block 0x67, offset 0x19c0 - 0x19c0: 0x4000, 0x19c1: 0x4000, 0x19c2: 0x4000, - 0x19d0: 0x4000, 0x19d1: 0x4000, - 0x19d2: 0x4000, 0x19d3: 0x4000, 0x19d4: 0x4000, 0x19d5: 0x4000, 0x19d6: 0x4000, - // Block 0x68, offset 0x1a00 - 0x1a00: 0x2000, 0x1a01: 0x2000, 0x1a02: 0x2000, 0x1a03: 0x2000, 0x1a04: 0x2000, 0x1a05: 0x2000, - 0x1a06: 0x2000, 0x1a07: 0x2000, 0x1a08: 0x2000, 0x1a09: 0x2000, 0x1a0a: 0x2000, 0x1a0b: 0x2000, - 0x1a0c: 0x2000, 0x1a0d: 0x2000, 0x1a0e: 0x2000, 0x1a0f: 0x2000, 0x1a10: 0x2000, 0x1a11: 0x2000, - 0x1a12: 0x2000, 0x1a13: 0x2000, 0x1a14: 0x2000, 0x1a15: 0x2000, 0x1a16: 0x2000, 0x1a17: 0x2000, - 0x1a18: 0x2000, 0x1a19: 0x2000, 0x1a1a: 0x2000, 0x1a1b: 0x2000, 0x1a1c: 0x2000, 0x1a1d: 0x2000, - 0x1a1e: 0x2000, 0x1a1f: 0x2000, 0x1a20: 0x2000, 0x1a21: 0x2000, 0x1a22: 0x2000, 0x1a23: 0x2000, - 0x1a24: 0x2000, 0x1a25: 0x2000, 0x1a26: 0x2000, 0x1a27: 0x2000, 0x1a28: 0x2000, 0x1a29: 0x2000, - 0x1a2a: 0x2000, 0x1a2b: 0x2000, 0x1a2c: 0x2000, 0x1a2d: 0x2000, 0x1a2e: 0x2000, 0x1a2f: 0x2000, - 0x1a30: 0x2000, 0x1a31: 0x2000, 0x1a32: 0x2000, 0x1a33: 0x2000, 0x1a34: 0x2000, 0x1a35: 0x2000, - 0x1a36: 0x2000, 0x1a37: 0x2000, 0x1a38: 0x2000, 0x1a39: 0x2000, 0x1a3a: 0x2000, 0x1a3b: 0x2000, - 0x1a3c: 0x2000, 0x1a3d: 0x2000, -} - -// widthIndex: 22 blocks, 1408 entries, 1408 bytes -// Block 0 is the zero block. -var widthIndex = [1408]uint8{ - // Block 0x0, offset 0x0 - // Block 0x1, offset 0x40 - // Block 0x2, offset 0x80 - // Block 0x3, offset 0xc0 - 0xc2: 0x01, 0xc3: 0x02, 0xc4: 0x03, 0xc5: 0x04, 0xc7: 0x05, - 0xc9: 0x06, 0xcb: 0x07, 0xcc: 0x08, 0xcd: 0x09, 0xce: 0x0a, 0xcf: 0x0b, - 0xd0: 0x0c, 0xd1: 0x0d, - 0xe1: 0x02, 0xe2: 0x03, 0xe3: 0x04, 0xe4: 0x05, 0xe5: 0x06, 0xe6: 0x06, 0xe7: 0x06, - 0xe8: 0x06, 0xe9: 0x06, 0xea: 0x07, 0xeb: 0x06, 0xec: 0x06, 0xed: 0x08, 0xee: 0x09, 0xef: 0x0a, - 0xf0: 0x0f, 0xf3: 0x12, 0xf4: 0x13, - // Block 0x4, offset 0x100 - 0x104: 0x0e, 0x105: 0x0f, - // Block 0x5, offset 0x140 - 0x140: 0x10, 0x141: 0x11, 0x142: 0x12, 0x144: 0x13, 0x145: 0x14, 0x146: 0x15, 0x147: 0x16, - 0x148: 0x17, 0x149: 0x18, 0x14a: 0x19, 0x14c: 0x1a, 0x14f: 0x1b, - 0x151: 0x1c, 0x152: 0x08, 0x153: 0x1d, 0x154: 0x1e, 0x155: 0x1f, 0x156: 0x20, 0x157: 0x21, - 0x158: 0x22, 0x159: 0x23, 0x15a: 0x24, 0x15b: 0x25, 0x15c: 0x26, 0x15d: 0x27, 0x15e: 0x28, 0x15f: 0x29, - 0x166: 0x2a, - 0x16c: 0x2b, 0x16d: 0x2c, - 0x17a: 0x2d, 0x17b: 0x2e, 0x17c: 0x0e, 0x17d: 0x0e, 0x17e: 0x0e, 0x17f: 0x2f, - // Block 0x6, offset 0x180 - 0x180: 0x30, 0x181: 0x31, 0x182: 0x32, 0x183: 0x33, 0x184: 0x34, 0x185: 0x35, 0x186: 0x36, 0x187: 0x37, - 0x188: 0x38, 0x189: 0x39, 0x18a: 0x0e, 0x18b: 0x0e, 0x18c: 0x0e, 0x18d: 0x0e, 0x18e: 0x0e, 0x18f: 0x0e, - 0x190: 0x0e, 0x191: 0x0e, 0x192: 0x0e, 0x193: 0x0e, 0x194: 0x0e, 0x195: 0x0e, 0x196: 0x0e, 0x197: 0x0e, - 0x198: 0x0e, 0x199: 0x0e, 0x19a: 0x0e, 0x19b: 0x0e, 0x19c: 0x0e, 0x19d: 0x0e, 0x19e: 0x0e, 0x19f: 0x0e, - 0x1a0: 0x0e, 0x1a1: 0x0e, 0x1a2: 0x0e, 0x1a3: 0x0e, 0x1a4: 0x0e, 0x1a5: 0x0e, 0x1a6: 0x0e, 0x1a7: 0x0e, - 0x1a8: 0x0e, 0x1a9: 0x0e, 0x1aa: 0x0e, 0x1ab: 0x0e, 0x1ac: 0x0e, 0x1ad: 0x0e, 0x1ae: 0x0e, 0x1af: 0x0e, - 0x1b0: 0x0e, 0x1b1: 0x0e, 0x1b2: 0x0e, 0x1b3: 0x0e, 0x1b4: 0x0e, 0x1b5: 0x0e, 0x1b6: 0x0e, 0x1b7: 0x0e, - 0x1b8: 0x0e, 0x1b9: 0x0e, 0x1ba: 0x0e, 0x1bb: 0x0e, 0x1bc: 0x0e, 0x1bd: 0x0e, 0x1be: 0x0e, 0x1bf: 0x0e, - // Block 0x7, offset 0x1c0 - 0x1c0: 0x0e, 0x1c1: 0x0e, 0x1c2: 0x0e, 0x1c3: 0x0e, 0x1c4: 0x0e, 0x1c5: 0x0e, 0x1c6: 0x0e, 0x1c7: 0x0e, - 0x1c8: 0x0e, 0x1c9: 0x0e, 0x1ca: 0x0e, 0x1cb: 0x0e, 0x1cc: 0x0e, 0x1cd: 0x0e, 0x1ce: 0x0e, 0x1cf: 0x0e, - 0x1d0: 0x0e, 0x1d1: 0x0e, 0x1d2: 0x0e, 0x1d3: 0x0e, 0x1d4: 0x0e, 0x1d5: 0x0e, 0x1d6: 0x0e, 0x1d7: 0x0e, - 0x1d8: 0x0e, 0x1d9: 0x0e, 0x1da: 0x0e, 0x1db: 0x0e, 0x1dc: 0x0e, 0x1dd: 0x0e, 0x1de: 0x0e, 0x1df: 0x0e, - 0x1e0: 0x0e, 0x1e1: 0x0e, 0x1e2: 0x0e, 0x1e3: 0x0e, 0x1e4: 0x0e, 0x1e5: 0x0e, 0x1e6: 0x0e, 0x1e7: 0x0e, - 0x1e8: 0x0e, 0x1e9: 0x0e, 0x1ea: 0x0e, 0x1eb: 0x0e, 0x1ec: 0x0e, 0x1ed: 0x0e, 0x1ee: 0x0e, 0x1ef: 0x0e, - 0x1f0: 0x0e, 0x1f1: 0x0e, 0x1f2: 0x0e, 0x1f3: 0x0e, 0x1f4: 0x0e, 0x1f5: 0x0e, 0x1f6: 0x0e, - 0x1f8: 0x0e, 0x1f9: 0x0e, 0x1fa: 0x0e, 0x1fb: 0x0e, 0x1fc: 0x0e, 0x1fd: 0x0e, 0x1fe: 0x0e, 0x1ff: 0x0e, - // Block 0x8, offset 0x200 - 0x200: 0x0e, 0x201: 0x0e, 0x202: 0x0e, 0x203: 0x0e, 0x204: 0x0e, 0x205: 0x0e, 0x206: 0x0e, 0x207: 0x0e, - 0x208: 0x0e, 0x209: 0x0e, 0x20a: 0x0e, 0x20b: 0x0e, 0x20c: 0x0e, 0x20d: 0x0e, 0x20e: 0x0e, 0x20f: 0x0e, - 0x210: 0x0e, 0x211: 0x0e, 0x212: 0x0e, 0x213: 0x0e, 0x214: 0x0e, 0x215: 0x0e, 0x216: 0x0e, 0x217: 0x0e, - 0x218: 0x0e, 0x219: 0x0e, 0x21a: 0x0e, 0x21b: 0x0e, 0x21c: 0x0e, 0x21d: 0x0e, 0x21e: 0x0e, 0x21f: 0x0e, - 0x220: 0x0e, 0x221: 0x0e, 0x222: 0x0e, 0x223: 0x0e, 0x224: 0x0e, 0x225: 0x0e, 0x226: 0x0e, 0x227: 0x0e, - 0x228: 0x0e, 0x229: 0x0e, 0x22a: 0x0e, 0x22b: 0x0e, 0x22c: 0x0e, 0x22d: 0x0e, 0x22e: 0x0e, 0x22f: 0x0e, - 0x230: 0x0e, 0x231: 0x0e, 0x232: 0x0e, 0x233: 0x0e, 0x234: 0x0e, 0x235: 0x0e, 0x236: 0x0e, 0x237: 0x0e, - 0x238: 0x0e, 0x239: 0x0e, 0x23a: 0x0e, 0x23b: 0x0e, 0x23c: 0x0e, 0x23d: 0x0e, 0x23e: 0x0e, 0x23f: 0x0e, - // Block 0x9, offset 0x240 - 0x240: 0x0e, 0x241: 0x0e, 0x242: 0x0e, 0x243: 0x0e, 0x244: 0x0e, 0x245: 0x0e, 0x246: 0x0e, 0x247: 0x0e, - 0x248: 0x0e, 0x249: 0x0e, 0x24a: 0x0e, 0x24b: 0x0e, 0x24c: 0x0e, 0x24d: 0x0e, 0x24e: 0x0e, 0x24f: 0x0e, - 0x250: 0x0e, 0x251: 0x0e, 0x252: 0x3a, 0x253: 0x3b, - 0x265: 0x3c, - 0x270: 0x0e, 0x271: 0x0e, 0x272: 0x0e, 0x273: 0x0e, 0x274: 0x0e, 0x275: 0x0e, 0x276: 0x0e, 0x277: 0x0e, - 0x278: 0x0e, 0x279: 0x0e, 0x27a: 0x0e, 0x27b: 0x0e, 0x27c: 0x0e, 0x27d: 0x0e, 0x27e: 0x0e, 0x27f: 0x0e, - // Block 0xa, offset 0x280 - 0x280: 0x0e, 0x281: 0x0e, 0x282: 0x0e, 0x283: 0x0e, 0x284: 0x0e, 0x285: 0x0e, 0x286: 0x0e, 0x287: 0x0e, - 0x288: 0x0e, 0x289: 0x0e, 0x28a: 0x0e, 0x28b: 0x0e, 0x28c: 0x0e, 0x28d: 0x0e, 0x28e: 0x0e, 0x28f: 0x0e, - 0x290: 0x0e, 0x291: 0x0e, 0x292: 0x0e, 0x293: 0x0e, 0x294: 0x0e, 0x295: 0x0e, 0x296: 0x0e, 0x297: 0x0e, - 0x298: 0x0e, 0x299: 0x0e, 0x29a: 0x0e, 0x29b: 0x0e, 0x29c: 0x0e, 0x29d: 0x0e, 0x29e: 0x3d, - // Block 0xb, offset 0x2c0 - 0x2c0: 0x08, 0x2c1: 0x08, 0x2c2: 0x08, 0x2c3: 0x08, 0x2c4: 0x08, 0x2c5: 0x08, 0x2c6: 0x08, 0x2c7: 0x08, - 0x2c8: 0x08, 0x2c9: 0x08, 0x2ca: 0x08, 0x2cb: 0x08, 0x2cc: 0x08, 0x2cd: 0x08, 0x2ce: 0x08, 0x2cf: 0x08, - 0x2d0: 0x08, 0x2d1: 0x08, 0x2d2: 0x08, 0x2d3: 0x08, 0x2d4: 0x08, 0x2d5: 0x08, 0x2d6: 0x08, 0x2d7: 0x08, - 0x2d8: 0x08, 0x2d9: 0x08, 0x2da: 0x08, 0x2db: 0x08, 0x2dc: 0x08, 0x2dd: 0x08, 0x2de: 0x08, 0x2df: 0x08, - 0x2e0: 0x08, 0x2e1: 0x08, 0x2e2: 0x08, 0x2e3: 0x08, 0x2e4: 0x08, 0x2e5: 0x08, 0x2e6: 0x08, 0x2e7: 0x08, - 0x2e8: 0x08, 0x2e9: 0x08, 0x2ea: 0x08, 0x2eb: 0x08, 0x2ec: 0x08, 0x2ed: 0x08, 0x2ee: 0x08, 0x2ef: 0x08, - 0x2f0: 0x08, 0x2f1: 0x08, 0x2f2: 0x08, 0x2f3: 0x08, 0x2f4: 0x08, 0x2f5: 0x08, 0x2f6: 0x08, 0x2f7: 0x08, - 0x2f8: 0x08, 0x2f9: 0x08, 0x2fa: 0x08, 0x2fb: 0x08, 0x2fc: 0x08, 0x2fd: 0x08, 0x2fe: 0x08, 0x2ff: 0x08, - // Block 0xc, offset 0x300 - 0x300: 0x08, 0x301: 0x08, 0x302: 0x08, 0x303: 0x08, 0x304: 0x08, 0x305: 0x08, 0x306: 0x08, 0x307: 0x08, - 0x308: 0x08, 0x309: 0x08, 0x30a: 0x08, 0x30b: 0x08, 0x30c: 0x08, 0x30d: 0x08, 0x30e: 0x08, 0x30f: 0x08, - 0x310: 0x08, 0x311: 0x08, 0x312: 0x08, 0x313: 0x08, 0x314: 0x08, 0x315: 0x08, 0x316: 0x08, 0x317: 0x08, - 0x318: 0x08, 0x319: 0x08, 0x31a: 0x08, 0x31b: 0x08, 0x31c: 0x08, 0x31d: 0x08, 0x31e: 0x08, 0x31f: 0x08, - 0x320: 0x08, 0x321: 0x08, 0x322: 0x08, 0x323: 0x08, 0x324: 0x0e, 0x325: 0x0e, 0x326: 0x0e, 0x327: 0x0e, - 0x328: 0x0e, 0x329: 0x0e, 0x32a: 0x0e, 0x32b: 0x0e, - 0x338: 0x3e, 0x339: 0x3f, 0x33c: 0x40, 0x33d: 0x41, 0x33e: 0x42, 0x33f: 0x43, - // Block 0xd, offset 0x340 - 0x37f: 0x44, - // Block 0xe, offset 0x380 - 0x380: 0x0e, 0x381: 0x0e, 0x382: 0x0e, 0x383: 0x0e, 0x384: 0x0e, 0x385: 0x0e, 0x386: 0x0e, 0x387: 0x0e, - 0x388: 0x0e, 0x389: 0x0e, 0x38a: 0x0e, 0x38b: 0x0e, 0x38c: 0x0e, 0x38d: 0x0e, 0x38e: 0x0e, 0x38f: 0x0e, - 0x390: 0x0e, 0x391: 0x0e, 0x392: 0x0e, 0x393: 0x0e, 0x394: 0x0e, 0x395: 0x0e, 0x396: 0x0e, 0x397: 0x0e, - 0x398: 0x0e, 0x399: 0x0e, 0x39a: 0x0e, 0x39b: 0x0e, 0x39c: 0x0e, 0x39d: 0x0e, 0x39e: 0x0e, 0x39f: 0x45, - 0x3a0: 0x0e, 0x3a1: 0x0e, 0x3a2: 0x0e, 0x3a3: 0x0e, 0x3a4: 0x0e, 0x3a5: 0x0e, 0x3a6: 0x0e, 0x3a7: 0x0e, - 0x3a8: 0x0e, 0x3a9: 0x0e, 0x3aa: 0x0e, 0x3ab: 0x0e, 0x3ac: 0x0e, 0x3ad: 0x0e, 0x3ae: 0x0e, 0x3af: 0x0e, - 0x3b0: 0x0e, 0x3b1: 0x0e, 0x3b2: 0x0e, 0x3b3: 0x46, 0x3b4: 0x47, - // Block 0xf, offset 0x3c0 - 0x3c0: 0x0e, 0x3c1: 0x0e, 0x3c2: 0x0e, 0x3c3: 0x0e, 0x3c4: 0x48, 0x3c5: 0x49, 0x3c6: 0x0e, 0x3c7: 0x0e, - 0x3c8: 0x0e, 0x3c9: 0x0e, 0x3ca: 0x0e, 0x3cb: 0x4a, - // Block 0x10, offset 0x400 - 0x400: 0x4b, 0x403: 0x4c, 0x404: 0x4d, 0x405: 0x4e, 0x406: 0x4f, - 0x408: 0x50, 0x409: 0x51, 0x40c: 0x52, 0x40d: 0x53, 0x40e: 0x54, 0x40f: 0x55, - 0x410: 0x56, 0x411: 0x57, 0x412: 0x0e, 0x413: 0x58, 0x414: 0x59, 0x415: 0x5a, 0x416: 0x5b, 0x417: 0x5c, - 0x418: 0x0e, 0x419: 0x5d, 0x41a: 0x0e, 0x41b: 0x5e, 0x41f: 0x5f, - 0x424: 0x60, 0x425: 0x61, 0x426: 0x0e, 0x427: 0x62, - 0x429: 0x63, 0x42a: 0x64, 0x42b: 0x65, - // Block 0x11, offset 0x440 - 0x456: 0x0b, 0x457: 0x06, - 0x458: 0x0c, 0x45b: 0x0d, 0x45f: 0x0e, - 0x460: 0x06, 0x461: 0x06, 0x462: 0x06, 0x463: 0x06, 0x464: 0x06, 0x465: 0x06, 0x466: 0x06, 0x467: 0x06, - 0x468: 0x06, 0x469: 0x06, 0x46a: 0x06, 0x46b: 0x06, 0x46c: 0x06, 0x46d: 0x06, 0x46e: 0x06, 0x46f: 0x06, - 0x470: 0x06, 0x471: 0x06, 0x472: 0x06, 0x473: 0x06, 0x474: 0x06, 0x475: 0x06, 0x476: 0x06, 0x477: 0x06, - 0x478: 0x06, 0x479: 0x06, 0x47a: 0x06, 0x47b: 0x06, 0x47c: 0x06, 0x47d: 0x06, 0x47e: 0x06, 0x47f: 0x06, - // Block 0x12, offset 0x480 - 0x484: 0x08, 0x485: 0x08, 0x486: 0x08, 0x487: 0x09, - // Block 0x13, offset 0x4c0 - 0x4c0: 0x08, 0x4c1: 0x08, 0x4c2: 0x08, 0x4c3: 0x08, 0x4c4: 0x08, 0x4c5: 0x08, 0x4c6: 0x08, 0x4c7: 0x08, - 0x4c8: 0x08, 0x4c9: 0x08, 0x4ca: 0x08, 0x4cb: 0x08, 0x4cc: 0x08, 0x4cd: 0x08, 0x4ce: 0x08, 0x4cf: 0x08, - 0x4d0: 0x08, 0x4d1: 0x08, 0x4d2: 0x08, 0x4d3: 0x08, 0x4d4: 0x08, 0x4d5: 0x08, 0x4d6: 0x08, 0x4d7: 0x08, - 0x4d8: 0x08, 0x4d9: 0x08, 0x4da: 0x08, 0x4db: 0x08, 0x4dc: 0x08, 0x4dd: 0x08, 0x4de: 0x08, 0x4df: 0x08, - 0x4e0: 0x08, 0x4e1: 0x08, 0x4e2: 0x08, 0x4e3: 0x08, 0x4e4: 0x08, 0x4e5: 0x08, 0x4e6: 0x08, 0x4e7: 0x08, - 0x4e8: 0x08, 0x4e9: 0x08, 0x4ea: 0x08, 0x4eb: 0x08, 0x4ec: 0x08, 0x4ed: 0x08, 0x4ee: 0x08, 0x4ef: 0x08, - 0x4f0: 0x08, 0x4f1: 0x08, 0x4f2: 0x08, 0x4f3: 0x08, 0x4f4: 0x08, 0x4f5: 0x08, 0x4f6: 0x08, 0x4f7: 0x08, - 0x4f8: 0x08, 0x4f9: 0x08, 0x4fa: 0x08, 0x4fb: 0x08, 0x4fc: 0x08, 0x4fd: 0x08, 0x4fe: 0x08, 0x4ff: 0x66, - // Block 0x14, offset 0x500 - 0x520: 0x10, - 0x530: 0x09, 0x531: 0x09, 0x532: 0x09, 0x533: 0x09, 0x534: 0x09, 0x535: 0x09, 0x536: 0x09, 0x537: 0x09, - 0x538: 0x09, 0x539: 0x09, 0x53a: 0x09, 0x53b: 0x09, 0x53c: 0x09, 0x53d: 0x09, 0x53e: 0x09, 0x53f: 0x11, - // Block 0x15, offset 0x540 - 0x540: 0x09, 0x541: 0x09, 0x542: 0x09, 0x543: 0x09, 0x544: 0x09, 0x545: 0x09, 0x546: 0x09, 0x547: 0x09, - 0x548: 0x09, 0x549: 0x09, 0x54a: 0x09, 0x54b: 0x09, 0x54c: 0x09, 0x54d: 0x09, 0x54e: 0x09, 0x54f: 0x11, -} - -// inverseData contains 4-byte entries of the following format: -// -// <0 padding> -// -// The last byte of the UTF-8-encoded rune is xor-ed with the last byte of the -// UTF-8 encoding of the original rune. Mappings often have the following -// pattern: -// -// A -> A (U+FF21 -> U+0041) -// B -> B (U+FF22 -> U+0042) -// ... -// -// By xor-ing the last byte the same entry can be shared by many mappings. This -// reduces the total number of distinct entries by about two thirds. -// The resulting entry for the aforementioned mappings is -// -// { 0x01, 0xE0, 0x00, 0x00 } -// -// Using this entry to map U+FF21 (UTF-8 [EF BC A1]), we get -// -// E0 ^ A1 = 41. -// -// Similarly, for U+FF22 (UTF-8 [EF BC A2]), we get -// -// E0 ^ A2 = 42. -// -// Note that because of the xor-ing, the byte sequence stored in the entry is -// not valid UTF-8. -var inverseData = [150][4]byte{ - {0x00, 0x00, 0x00, 0x00}, - {0x03, 0xe3, 0x80, 0xa0}, - {0x03, 0xef, 0xbc, 0xa0}, - {0x03, 0xef, 0xbc, 0xe0}, - {0x03, 0xef, 0xbd, 0xe0}, - {0x03, 0xef, 0xbf, 0x02}, - {0x03, 0xef, 0xbf, 0x00}, - {0x03, 0xef, 0xbf, 0x0e}, - {0x03, 0xef, 0xbf, 0x0c}, - {0x03, 0xef, 0xbf, 0x0f}, - {0x03, 0xef, 0xbf, 0x39}, - {0x03, 0xef, 0xbf, 0x3b}, - {0x03, 0xef, 0xbf, 0x3f}, - {0x03, 0xef, 0xbf, 0x2a}, - {0x03, 0xef, 0xbf, 0x0d}, - {0x03, 0xef, 0xbf, 0x25}, - {0x03, 0xef, 0xbd, 0x1a}, - {0x03, 0xef, 0xbd, 0x26}, - {0x01, 0xa0, 0x00, 0x00}, - {0x03, 0xef, 0xbd, 0x25}, - {0x03, 0xef, 0xbd, 0x23}, - {0x03, 0xef, 0xbd, 0x2e}, - {0x03, 0xef, 0xbe, 0x07}, - {0x03, 0xef, 0xbe, 0x05}, - {0x03, 0xef, 0xbd, 0x06}, - {0x03, 0xef, 0xbd, 0x13}, - {0x03, 0xef, 0xbd, 0x0b}, - {0x03, 0xef, 0xbd, 0x16}, - {0x03, 0xef, 0xbd, 0x0c}, - {0x03, 0xef, 0xbd, 0x15}, - {0x03, 0xef, 0xbd, 0x0d}, - {0x03, 0xef, 0xbd, 0x1c}, - {0x03, 0xef, 0xbd, 0x02}, - {0x03, 0xef, 0xbd, 0x1f}, - {0x03, 0xef, 0xbd, 0x1d}, - {0x03, 0xef, 0xbd, 0x17}, - {0x03, 0xef, 0xbd, 0x08}, - {0x03, 0xef, 0xbd, 0x09}, - {0x03, 0xef, 0xbd, 0x0e}, - {0x03, 0xef, 0xbd, 0x04}, - {0x03, 0xef, 0xbd, 0x05}, - {0x03, 0xef, 0xbe, 0x3f}, - {0x03, 0xef, 0xbe, 0x00}, - {0x03, 0xef, 0xbd, 0x2c}, - {0x03, 0xef, 0xbe, 0x06}, - {0x03, 0xef, 0xbe, 0x0c}, - {0x03, 0xef, 0xbe, 0x0f}, - {0x03, 0xef, 0xbe, 0x0d}, - {0x03, 0xef, 0xbe, 0x0b}, - {0x03, 0xef, 0xbe, 0x19}, - {0x03, 0xef, 0xbe, 0x15}, - {0x03, 0xef, 0xbe, 0x11}, - {0x03, 0xef, 0xbe, 0x31}, - {0x03, 0xef, 0xbe, 0x33}, - {0x03, 0xef, 0xbd, 0x0f}, - {0x03, 0xef, 0xbe, 0x30}, - {0x03, 0xef, 0xbe, 0x3e}, - {0x03, 0xef, 0xbe, 0x32}, - {0x03, 0xef, 0xbe, 0x36}, - {0x03, 0xef, 0xbd, 0x14}, - {0x03, 0xef, 0xbe, 0x2e}, - {0x03, 0xef, 0xbd, 0x1e}, - {0x03, 0xef, 0xbe, 0x10}, - {0x03, 0xef, 0xbf, 0x13}, - {0x03, 0xef, 0xbf, 0x15}, - {0x03, 0xef, 0xbf, 0x17}, - {0x03, 0xef, 0xbf, 0x1f}, - {0x03, 0xef, 0xbf, 0x1d}, - {0x03, 0xef, 0xbf, 0x1b}, - {0x03, 0xef, 0xbf, 0x09}, - {0x03, 0xef, 0xbf, 0x0b}, - {0x03, 0xef, 0xbf, 0x37}, - {0x03, 0xef, 0xbe, 0x04}, - {0x01, 0xe0, 0x00, 0x00}, - {0x03, 0xe2, 0xa6, 0x1a}, - {0x03, 0xe2, 0xa6, 0x26}, - {0x03, 0xe3, 0x80, 0x23}, - {0x03, 0xe3, 0x80, 0x2e}, - {0x03, 0xe3, 0x80, 0x25}, - {0x03, 0xe3, 0x83, 0x1e}, - {0x03, 0xe3, 0x83, 0x14}, - {0x03, 0xe3, 0x82, 0x06}, - {0x03, 0xe3, 0x82, 0x0b}, - {0x03, 0xe3, 0x82, 0x0c}, - {0x03, 0xe3, 0x82, 0x0d}, - {0x03, 0xe3, 0x82, 0x02}, - {0x03, 0xe3, 0x83, 0x0f}, - {0x03, 0xe3, 0x83, 0x08}, - {0x03, 0xe3, 0x83, 0x09}, - {0x03, 0xe3, 0x83, 0x2c}, - {0x03, 0xe3, 0x83, 0x0c}, - {0x03, 0xe3, 0x82, 0x13}, - {0x03, 0xe3, 0x82, 0x16}, - {0x03, 0xe3, 0x82, 0x15}, - {0x03, 0xe3, 0x82, 0x1c}, - {0x03, 0xe3, 0x82, 0x1f}, - {0x03, 0xe3, 0x82, 0x1d}, - {0x03, 0xe3, 0x82, 0x1a}, - {0x03, 0xe3, 0x82, 0x17}, - {0x03, 0xe3, 0x82, 0x08}, - {0x03, 0xe3, 0x82, 0x09}, - {0x03, 0xe3, 0x82, 0x0e}, - {0x03, 0xe3, 0x82, 0x04}, - {0x03, 0xe3, 0x82, 0x05}, - {0x03, 0xe3, 0x82, 0x3f}, - {0x03, 0xe3, 0x83, 0x00}, - {0x03, 0xe3, 0x83, 0x06}, - {0x03, 0xe3, 0x83, 0x05}, - {0x03, 0xe3, 0x83, 0x0d}, - {0x03, 0xe3, 0x83, 0x0b}, - {0x03, 0xe3, 0x83, 0x07}, - {0x03, 0xe3, 0x83, 0x19}, - {0x03, 0xe3, 0x83, 0x15}, - {0x03, 0xe3, 0x83, 0x11}, - {0x03, 0xe3, 0x83, 0x31}, - {0x03, 0xe3, 0x83, 0x33}, - {0x03, 0xe3, 0x83, 0x30}, - {0x03, 0xe3, 0x83, 0x3e}, - {0x03, 0xe3, 0x83, 0x32}, - {0x03, 0xe3, 0x83, 0x36}, - {0x03, 0xe3, 0x83, 0x2e}, - {0x03, 0xe3, 0x82, 0x07}, - {0x03, 0xe3, 0x85, 0x04}, - {0x03, 0xe3, 0x84, 0x10}, - {0x03, 0xe3, 0x85, 0x30}, - {0x03, 0xe3, 0x85, 0x0d}, - {0x03, 0xe3, 0x85, 0x13}, - {0x03, 0xe3, 0x85, 0x15}, - {0x03, 0xe3, 0x85, 0x17}, - {0x03, 0xe3, 0x85, 0x1f}, - {0x03, 0xe3, 0x85, 0x1d}, - {0x03, 0xe3, 0x85, 0x1b}, - {0x03, 0xe3, 0x85, 0x09}, - {0x03, 0xe3, 0x85, 0x0f}, - {0x03, 0xe3, 0x85, 0x0b}, - {0x03, 0xe3, 0x85, 0x37}, - {0x03, 0xe3, 0x85, 0x3b}, - {0x03, 0xe3, 0x85, 0x39}, - {0x03, 0xe3, 0x85, 0x3f}, - {0x02, 0xc2, 0x02, 0x00}, - {0x02, 0xc2, 0x0e, 0x00}, - {0x02, 0xc2, 0x0c, 0x00}, - {0x02, 0xc2, 0x00, 0x00}, - {0x03, 0xe2, 0x82, 0x0f}, - {0x03, 0xe2, 0x94, 0x2a}, - {0x03, 0xe2, 0x86, 0x39}, - {0x03, 0xe2, 0x86, 0x3b}, - {0x03, 0xe2, 0x86, 0x3f}, - {0x03, 0xe2, 0x96, 0x0d}, - {0x03, 0xe2, 0x97, 0x25}, -} - -// Total table size 15448 bytes (15KiB) diff --git a/vendor/golang.org/x/text/width/tables15.0.0.go b/vendor/golang.org/x/text/width/tables15.0.0.go deleted file mode 100644 index 2b8528967..000000000 --- a/vendor/golang.org/x/text/width/tables15.0.0.go +++ /dev/null @@ -1,1367 +0,0 @@ -// Code generated by running "go generate" in golang.org/x/text. DO NOT EDIT. - -//go:build go1.21 - -package width - -// UnicodeVersion is the Unicode version from which the tables in this package are derived. -const UnicodeVersion = "15.0.0" - -// lookup returns the trie value for the first UTF-8 encoding in s and -// the width in bytes of this encoding. The size will be 0 if s does not -// hold enough bytes to complete the encoding. len(s) must be greater than 0. -func (t *widthTrie) lookup(s []byte) (v uint16, sz int) { - c0 := s[0] - switch { - case c0 < 0x80: // is ASCII - return widthValues[c0], 1 - case c0 < 0xC2: - return 0, 1 // Illegal UTF-8: not a starter, not ASCII. - case c0 < 0xE0: // 2-byte UTF-8 - if len(s) < 2 { - return 0, 0 - } - i := widthIndex[c0] - c1 := s[1] - if c1 < 0x80 || 0xC0 <= c1 { - return 0, 1 // Illegal UTF-8: not a continuation byte. - } - return t.lookupValue(uint32(i), c1), 2 - case c0 < 0xF0: // 3-byte UTF-8 - if len(s) < 3 { - return 0, 0 - } - i := widthIndex[c0] - c1 := s[1] - if c1 < 0x80 || 0xC0 <= c1 { - return 0, 1 // Illegal UTF-8: not a continuation byte. - } - o := uint32(i)<<6 + uint32(c1) - i = widthIndex[o] - c2 := s[2] - if c2 < 0x80 || 0xC0 <= c2 { - return 0, 2 // Illegal UTF-8: not a continuation byte. - } - return t.lookupValue(uint32(i), c2), 3 - case c0 < 0xF8: // 4-byte UTF-8 - if len(s) < 4 { - return 0, 0 - } - i := widthIndex[c0] - c1 := s[1] - if c1 < 0x80 || 0xC0 <= c1 { - return 0, 1 // Illegal UTF-8: not a continuation byte. - } - o := uint32(i)<<6 + uint32(c1) - i = widthIndex[o] - c2 := s[2] - if c2 < 0x80 || 0xC0 <= c2 { - return 0, 2 // Illegal UTF-8: not a continuation byte. - } - o = uint32(i)<<6 + uint32(c2) - i = widthIndex[o] - c3 := s[3] - if c3 < 0x80 || 0xC0 <= c3 { - return 0, 3 // Illegal UTF-8: not a continuation byte. - } - return t.lookupValue(uint32(i), c3), 4 - } - // Illegal rune - return 0, 1 -} - -// lookupUnsafe returns the trie value for the first UTF-8 encoding in s. -// s must start with a full and valid UTF-8 encoded rune. -func (t *widthTrie) lookupUnsafe(s []byte) uint16 { - c0 := s[0] - if c0 < 0x80 { // is ASCII - return widthValues[c0] - } - i := widthIndex[c0] - if c0 < 0xE0 { // 2-byte UTF-8 - return t.lookupValue(uint32(i), s[1]) - } - i = widthIndex[uint32(i)<<6+uint32(s[1])] - if c0 < 0xF0 { // 3-byte UTF-8 - return t.lookupValue(uint32(i), s[2]) - } - i = widthIndex[uint32(i)<<6+uint32(s[2])] - if c0 < 0xF8 { // 4-byte UTF-8 - return t.lookupValue(uint32(i), s[3]) - } - return 0 -} - -// lookupString returns the trie value for the first UTF-8 encoding in s and -// the width in bytes of this encoding. The size will be 0 if s does not -// hold enough bytes to complete the encoding. len(s) must be greater than 0. -func (t *widthTrie) lookupString(s string) (v uint16, sz int) { - c0 := s[0] - switch { - case c0 < 0x80: // is ASCII - return widthValues[c0], 1 - case c0 < 0xC2: - return 0, 1 // Illegal UTF-8: not a starter, not ASCII. - case c0 < 0xE0: // 2-byte UTF-8 - if len(s) < 2 { - return 0, 0 - } - i := widthIndex[c0] - c1 := s[1] - if c1 < 0x80 || 0xC0 <= c1 { - return 0, 1 // Illegal UTF-8: not a continuation byte. - } - return t.lookupValue(uint32(i), c1), 2 - case c0 < 0xF0: // 3-byte UTF-8 - if len(s) < 3 { - return 0, 0 - } - i := widthIndex[c0] - c1 := s[1] - if c1 < 0x80 || 0xC0 <= c1 { - return 0, 1 // Illegal UTF-8: not a continuation byte. - } - o := uint32(i)<<6 + uint32(c1) - i = widthIndex[o] - c2 := s[2] - if c2 < 0x80 || 0xC0 <= c2 { - return 0, 2 // Illegal UTF-8: not a continuation byte. - } - return t.lookupValue(uint32(i), c2), 3 - case c0 < 0xF8: // 4-byte UTF-8 - if len(s) < 4 { - return 0, 0 - } - i := widthIndex[c0] - c1 := s[1] - if c1 < 0x80 || 0xC0 <= c1 { - return 0, 1 // Illegal UTF-8: not a continuation byte. - } - o := uint32(i)<<6 + uint32(c1) - i = widthIndex[o] - c2 := s[2] - if c2 < 0x80 || 0xC0 <= c2 { - return 0, 2 // Illegal UTF-8: not a continuation byte. - } - o = uint32(i)<<6 + uint32(c2) - i = widthIndex[o] - c3 := s[3] - if c3 < 0x80 || 0xC0 <= c3 { - return 0, 3 // Illegal UTF-8: not a continuation byte. - } - return t.lookupValue(uint32(i), c3), 4 - } - // Illegal rune - return 0, 1 -} - -// lookupStringUnsafe returns the trie value for the first UTF-8 encoding in s. -// s must start with a full and valid UTF-8 encoded rune. -func (t *widthTrie) lookupStringUnsafe(s string) uint16 { - c0 := s[0] - if c0 < 0x80 { // is ASCII - return widthValues[c0] - } - i := widthIndex[c0] - if c0 < 0xE0 { // 2-byte UTF-8 - return t.lookupValue(uint32(i), s[1]) - } - i = widthIndex[uint32(i)<<6+uint32(s[1])] - if c0 < 0xF0 { // 3-byte UTF-8 - return t.lookupValue(uint32(i), s[2]) - } - i = widthIndex[uint32(i)<<6+uint32(s[2])] - if c0 < 0xF8 { // 4-byte UTF-8 - return t.lookupValue(uint32(i), s[3]) - } - return 0 -} - -// widthTrie. Total size: 14912 bytes (14.56 KiB). Checksum: 4468b6cd178303d2. -type widthTrie struct{} - -func newWidthTrie(i int) *widthTrie { - return &widthTrie{} -} - -// lookupValue determines the type of block n and looks up the value for b. -func (t *widthTrie) lookupValue(n uint32, b byte) uint16 { - switch { - default: - return uint16(widthValues[n<<6+uint32(b)]) - } -} - -// widthValues: 105 blocks, 6720 entries, 13440 bytes -// The third block is the zero block. -var widthValues = [6720]uint16{ - // Block 0x0, offset 0x0 - 0x20: 0x6001, 0x21: 0x6002, 0x22: 0x6002, 0x23: 0x6002, - 0x24: 0x6002, 0x25: 0x6002, 0x26: 0x6002, 0x27: 0x6002, 0x28: 0x6002, 0x29: 0x6002, - 0x2a: 0x6002, 0x2b: 0x6002, 0x2c: 0x6002, 0x2d: 0x6002, 0x2e: 0x6002, 0x2f: 0x6002, - 0x30: 0x6002, 0x31: 0x6002, 0x32: 0x6002, 0x33: 0x6002, 0x34: 0x6002, 0x35: 0x6002, - 0x36: 0x6002, 0x37: 0x6002, 0x38: 0x6002, 0x39: 0x6002, 0x3a: 0x6002, 0x3b: 0x6002, - 0x3c: 0x6002, 0x3d: 0x6002, 0x3e: 0x6002, 0x3f: 0x6002, - // Block 0x1, offset 0x40 - 0x40: 0x6003, 0x41: 0x6003, 0x42: 0x6003, 0x43: 0x6003, 0x44: 0x6003, 0x45: 0x6003, - 0x46: 0x6003, 0x47: 0x6003, 0x48: 0x6003, 0x49: 0x6003, 0x4a: 0x6003, 0x4b: 0x6003, - 0x4c: 0x6003, 0x4d: 0x6003, 0x4e: 0x6003, 0x4f: 0x6003, 0x50: 0x6003, 0x51: 0x6003, - 0x52: 0x6003, 0x53: 0x6003, 0x54: 0x6003, 0x55: 0x6003, 0x56: 0x6003, 0x57: 0x6003, - 0x58: 0x6003, 0x59: 0x6003, 0x5a: 0x6003, 0x5b: 0x6003, 0x5c: 0x6003, 0x5d: 0x6003, - 0x5e: 0x6003, 0x5f: 0x6003, 0x60: 0x6004, 0x61: 0x6004, 0x62: 0x6004, 0x63: 0x6004, - 0x64: 0x6004, 0x65: 0x6004, 0x66: 0x6004, 0x67: 0x6004, 0x68: 0x6004, 0x69: 0x6004, - 0x6a: 0x6004, 0x6b: 0x6004, 0x6c: 0x6004, 0x6d: 0x6004, 0x6e: 0x6004, 0x6f: 0x6004, - 0x70: 0x6004, 0x71: 0x6004, 0x72: 0x6004, 0x73: 0x6004, 0x74: 0x6004, 0x75: 0x6004, - 0x76: 0x6004, 0x77: 0x6004, 0x78: 0x6004, 0x79: 0x6004, 0x7a: 0x6004, 0x7b: 0x6004, - 0x7c: 0x6004, 0x7d: 0x6004, 0x7e: 0x6004, - // Block 0x2, offset 0x80 - // Block 0x3, offset 0xc0 - 0xe1: 0x2000, 0xe2: 0x6005, 0xe3: 0x6005, - 0xe4: 0x2000, 0xe5: 0x6006, 0xe6: 0x6005, 0xe7: 0x2000, 0xe8: 0x2000, - 0xea: 0x2000, 0xec: 0x6007, 0xed: 0x2000, 0xee: 0x2000, 0xef: 0x6008, - 0xf0: 0x2000, 0xf1: 0x2000, 0xf2: 0x2000, 0xf3: 0x2000, 0xf4: 0x2000, - 0xf6: 0x2000, 0xf7: 0x2000, 0xf8: 0x2000, 0xf9: 0x2000, 0xfa: 0x2000, - 0xfc: 0x2000, 0xfd: 0x2000, 0xfe: 0x2000, 0xff: 0x2000, - // Block 0x4, offset 0x100 - 0x106: 0x2000, - 0x110: 0x2000, - 0x117: 0x2000, - 0x118: 0x2000, - 0x11e: 0x2000, 0x11f: 0x2000, 0x120: 0x2000, 0x121: 0x2000, - 0x126: 0x2000, 0x128: 0x2000, 0x129: 0x2000, - 0x12a: 0x2000, 0x12c: 0x2000, 0x12d: 0x2000, - 0x130: 0x2000, 0x132: 0x2000, 0x133: 0x2000, - 0x137: 0x2000, 0x138: 0x2000, 0x139: 0x2000, 0x13a: 0x2000, - 0x13c: 0x2000, 0x13e: 0x2000, - // Block 0x5, offset 0x140 - 0x141: 0x2000, - 0x151: 0x2000, - 0x153: 0x2000, - 0x15b: 0x2000, - 0x166: 0x2000, 0x167: 0x2000, - 0x16b: 0x2000, - 0x171: 0x2000, 0x172: 0x2000, 0x173: 0x2000, - 0x178: 0x2000, - 0x17f: 0x2000, - // Block 0x6, offset 0x180 - 0x180: 0x2000, 0x181: 0x2000, 0x182: 0x2000, 0x184: 0x2000, - 0x188: 0x2000, 0x189: 0x2000, 0x18a: 0x2000, 0x18b: 0x2000, - 0x18d: 0x2000, - 0x192: 0x2000, 0x193: 0x2000, - 0x1a6: 0x2000, 0x1a7: 0x2000, - 0x1ab: 0x2000, - // Block 0x7, offset 0x1c0 - 0x1ce: 0x2000, 0x1d0: 0x2000, - 0x1d2: 0x2000, 0x1d4: 0x2000, 0x1d6: 0x2000, - 0x1d8: 0x2000, 0x1da: 0x2000, 0x1dc: 0x2000, - // Block 0x8, offset 0x200 - 0x211: 0x2000, - 0x221: 0x2000, - // Block 0x9, offset 0x240 - 0x244: 0x2000, - 0x247: 0x2000, 0x249: 0x2000, 0x24a: 0x2000, 0x24b: 0x2000, - 0x24d: 0x2000, 0x250: 0x2000, - 0x258: 0x2000, 0x259: 0x2000, 0x25a: 0x2000, 0x25b: 0x2000, 0x25d: 0x2000, - 0x25f: 0x2000, - // Block 0xa, offset 0x280 - 0x280: 0x2000, 0x281: 0x2000, 0x282: 0x2000, 0x283: 0x2000, 0x284: 0x2000, 0x285: 0x2000, - 0x286: 0x2000, 0x287: 0x2000, 0x288: 0x2000, 0x289: 0x2000, 0x28a: 0x2000, 0x28b: 0x2000, - 0x28c: 0x2000, 0x28d: 0x2000, 0x28e: 0x2000, 0x28f: 0x2000, 0x290: 0x2000, 0x291: 0x2000, - 0x292: 0x2000, 0x293: 0x2000, 0x294: 0x2000, 0x295: 0x2000, 0x296: 0x2000, 0x297: 0x2000, - 0x298: 0x2000, 0x299: 0x2000, 0x29a: 0x2000, 0x29b: 0x2000, 0x29c: 0x2000, 0x29d: 0x2000, - 0x29e: 0x2000, 0x29f: 0x2000, 0x2a0: 0x2000, 0x2a1: 0x2000, 0x2a2: 0x2000, 0x2a3: 0x2000, - 0x2a4: 0x2000, 0x2a5: 0x2000, 0x2a6: 0x2000, 0x2a7: 0x2000, 0x2a8: 0x2000, 0x2a9: 0x2000, - 0x2aa: 0x2000, 0x2ab: 0x2000, 0x2ac: 0x2000, 0x2ad: 0x2000, 0x2ae: 0x2000, 0x2af: 0x2000, - 0x2b0: 0x2000, 0x2b1: 0x2000, 0x2b2: 0x2000, 0x2b3: 0x2000, 0x2b4: 0x2000, 0x2b5: 0x2000, - 0x2b6: 0x2000, 0x2b7: 0x2000, 0x2b8: 0x2000, 0x2b9: 0x2000, 0x2ba: 0x2000, 0x2bb: 0x2000, - 0x2bc: 0x2000, 0x2bd: 0x2000, 0x2be: 0x2000, 0x2bf: 0x2000, - // Block 0xb, offset 0x2c0 - 0x2c0: 0x2000, 0x2c1: 0x2000, 0x2c2: 0x2000, 0x2c3: 0x2000, 0x2c4: 0x2000, 0x2c5: 0x2000, - 0x2c6: 0x2000, 0x2c7: 0x2000, 0x2c8: 0x2000, 0x2c9: 0x2000, 0x2ca: 0x2000, 0x2cb: 0x2000, - 0x2cc: 0x2000, 0x2cd: 0x2000, 0x2ce: 0x2000, 0x2cf: 0x2000, 0x2d0: 0x2000, 0x2d1: 0x2000, - 0x2d2: 0x2000, 0x2d3: 0x2000, 0x2d4: 0x2000, 0x2d5: 0x2000, 0x2d6: 0x2000, 0x2d7: 0x2000, - 0x2d8: 0x2000, 0x2d9: 0x2000, 0x2da: 0x2000, 0x2db: 0x2000, 0x2dc: 0x2000, 0x2dd: 0x2000, - 0x2de: 0x2000, 0x2df: 0x2000, 0x2e0: 0x2000, 0x2e1: 0x2000, 0x2e2: 0x2000, 0x2e3: 0x2000, - 0x2e4: 0x2000, 0x2e5: 0x2000, 0x2e6: 0x2000, 0x2e7: 0x2000, 0x2e8: 0x2000, 0x2e9: 0x2000, - 0x2ea: 0x2000, 0x2eb: 0x2000, 0x2ec: 0x2000, 0x2ed: 0x2000, 0x2ee: 0x2000, 0x2ef: 0x2000, - // Block 0xc, offset 0x300 - 0x311: 0x2000, - 0x312: 0x2000, 0x313: 0x2000, 0x314: 0x2000, 0x315: 0x2000, 0x316: 0x2000, 0x317: 0x2000, - 0x318: 0x2000, 0x319: 0x2000, 0x31a: 0x2000, 0x31b: 0x2000, 0x31c: 0x2000, 0x31d: 0x2000, - 0x31e: 0x2000, 0x31f: 0x2000, 0x320: 0x2000, 0x321: 0x2000, 0x323: 0x2000, - 0x324: 0x2000, 0x325: 0x2000, 0x326: 0x2000, 0x327: 0x2000, 0x328: 0x2000, 0x329: 0x2000, - 0x331: 0x2000, 0x332: 0x2000, 0x333: 0x2000, 0x334: 0x2000, 0x335: 0x2000, - 0x336: 0x2000, 0x337: 0x2000, 0x338: 0x2000, 0x339: 0x2000, 0x33a: 0x2000, 0x33b: 0x2000, - 0x33c: 0x2000, 0x33d: 0x2000, 0x33e: 0x2000, 0x33f: 0x2000, - // Block 0xd, offset 0x340 - 0x340: 0x2000, 0x341: 0x2000, 0x343: 0x2000, 0x344: 0x2000, 0x345: 0x2000, - 0x346: 0x2000, 0x347: 0x2000, 0x348: 0x2000, 0x349: 0x2000, - // Block 0xe, offset 0x380 - 0x381: 0x2000, - 0x390: 0x2000, 0x391: 0x2000, - 0x392: 0x2000, 0x393: 0x2000, 0x394: 0x2000, 0x395: 0x2000, 0x396: 0x2000, 0x397: 0x2000, - 0x398: 0x2000, 0x399: 0x2000, 0x39a: 0x2000, 0x39b: 0x2000, 0x39c: 0x2000, 0x39d: 0x2000, - 0x39e: 0x2000, 0x39f: 0x2000, 0x3a0: 0x2000, 0x3a1: 0x2000, 0x3a2: 0x2000, 0x3a3: 0x2000, - 0x3a4: 0x2000, 0x3a5: 0x2000, 0x3a6: 0x2000, 0x3a7: 0x2000, 0x3a8: 0x2000, 0x3a9: 0x2000, - 0x3aa: 0x2000, 0x3ab: 0x2000, 0x3ac: 0x2000, 0x3ad: 0x2000, 0x3ae: 0x2000, 0x3af: 0x2000, - 0x3b0: 0x2000, 0x3b1: 0x2000, 0x3b2: 0x2000, 0x3b3: 0x2000, 0x3b4: 0x2000, 0x3b5: 0x2000, - 0x3b6: 0x2000, 0x3b7: 0x2000, 0x3b8: 0x2000, 0x3b9: 0x2000, 0x3ba: 0x2000, 0x3bb: 0x2000, - 0x3bc: 0x2000, 0x3bd: 0x2000, 0x3be: 0x2000, 0x3bf: 0x2000, - // Block 0xf, offset 0x3c0 - 0x3c0: 0x2000, 0x3c1: 0x2000, 0x3c2: 0x2000, 0x3c3: 0x2000, 0x3c4: 0x2000, 0x3c5: 0x2000, - 0x3c6: 0x2000, 0x3c7: 0x2000, 0x3c8: 0x2000, 0x3c9: 0x2000, 0x3ca: 0x2000, 0x3cb: 0x2000, - 0x3cc: 0x2000, 0x3cd: 0x2000, 0x3ce: 0x2000, 0x3cf: 0x2000, 0x3d1: 0x2000, - // Block 0x10, offset 0x400 - 0x400: 0x4000, 0x401: 0x4000, 0x402: 0x4000, 0x403: 0x4000, 0x404: 0x4000, 0x405: 0x4000, - 0x406: 0x4000, 0x407: 0x4000, 0x408: 0x4000, 0x409: 0x4000, 0x40a: 0x4000, 0x40b: 0x4000, - 0x40c: 0x4000, 0x40d: 0x4000, 0x40e: 0x4000, 0x40f: 0x4000, 0x410: 0x4000, 0x411: 0x4000, - 0x412: 0x4000, 0x413: 0x4000, 0x414: 0x4000, 0x415: 0x4000, 0x416: 0x4000, 0x417: 0x4000, - 0x418: 0x4000, 0x419: 0x4000, 0x41a: 0x4000, 0x41b: 0x4000, 0x41c: 0x4000, 0x41d: 0x4000, - 0x41e: 0x4000, 0x41f: 0x4000, 0x420: 0x4000, 0x421: 0x4000, 0x422: 0x4000, 0x423: 0x4000, - 0x424: 0x4000, 0x425: 0x4000, 0x426: 0x4000, 0x427: 0x4000, 0x428: 0x4000, 0x429: 0x4000, - 0x42a: 0x4000, 0x42b: 0x4000, 0x42c: 0x4000, 0x42d: 0x4000, 0x42e: 0x4000, 0x42f: 0x4000, - 0x430: 0x4000, 0x431: 0x4000, 0x432: 0x4000, 0x433: 0x4000, 0x434: 0x4000, 0x435: 0x4000, - 0x436: 0x4000, 0x437: 0x4000, 0x438: 0x4000, 0x439: 0x4000, 0x43a: 0x4000, 0x43b: 0x4000, - 0x43c: 0x4000, 0x43d: 0x4000, 0x43e: 0x4000, 0x43f: 0x4000, - // Block 0x11, offset 0x440 - 0x440: 0x4000, 0x441: 0x4000, 0x442: 0x4000, 0x443: 0x4000, 0x444: 0x4000, 0x445: 0x4000, - 0x446: 0x4000, 0x447: 0x4000, 0x448: 0x4000, 0x449: 0x4000, 0x44a: 0x4000, 0x44b: 0x4000, - 0x44c: 0x4000, 0x44d: 0x4000, 0x44e: 0x4000, 0x44f: 0x4000, 0x450: 0x4000, 0x451: 0x4000, - 0x452: 0x4000, 0x453: 0x4000, 0x454: 0x4000, 0x455: 0x4000, 0x456: 0x4000, 0x457: 0x4000, - 0x458: 0x4000, 0x459: 0x4000, 0x45a: 0x4000, 0x45b: 0x4000, 0x45c: 0x4000, 0x45d: 0x4000, - 0x45e: 0x4000, 0x45f: 0x4000, - // Block 0x12, offset 0x480 - 0x490: 0x2000, - 0x493: 0x2000, 0x494: 0x2000, 0x495: 0x2000, 0x496: 0x2000, - 0x498: 0x2000, 0x499: 0x2000, 0x49c: 0x2000, 0x49d: 0x2000, - 0x4a0: 0x2000, 0x4a1: 0x2000, 0x4a2: 0x2000, - 0x4a4: 0x2000, 0x4a5: 0x2000, 0x4a6: 0x2000, 0x4a7: 0x2000, - 0x4b0: 0x2000, 0x4b2: 0x2000, 0x4b3: 0x2000, 0x4b5: 0x2000, - 0x4bb: 0x2000, - 0x4be: 0x2000, - // Block 0x13, offset 0x4c0 - 0x4f4: 0x2000, - 0x4ff: 0x2000, - // Block 0x14, offset 0x500 - 0x501: 0x2000, 0x502: 0x2000, 0x503: 0x2000, 0x504: 0x2000, - 0x529: 0xa009, - 0x52c: 0x2000, - // Block 0x15, offset 0x540 - 0x543: 0x2000, 0x545: 0x2000, - 0x549: 0x2000, - 0x553: 0x2000, 0x556: 0x2000, - 0x561: 0x2000, 0x562: 0x2000, - 0x566: 0x2000, - 0x56b: 0x2000, - // Block 0x16, offset 0x580 - 0x593: 0x2000, 0x594: 0x2000, - 0x59b: 0x2000, 0x59c: 0x2000, 0x59d: 0x2000, - 0x59e: 0x2000, 0x5a0: 0x2000, 0x5a1: 0x2000, 0x5a2: 0x2000, 0x5a3: 0x2000, - 0x5a4: 0x2000, 0x5a5: 0x2000, 0x5a6: 0x2000, 0x5a7: 0x2000, 0x5a8: 0x2000, 0x5a9: 0x2000, - 0x5aa: 0x2000, 0x5ab: 0x2000, - 0x5b0: 0x2000, 0x5b1: 0x2000, 0x5b2: 0x2000, 0x5b3: 0x2000, 0x5b4: 0x2000, 0x5b5: 0x2000, - 0x5b6: 0x2000, 0x5b7: 0x2000, 0x5b8: 0x2000, 0x5b9: 0x2000, - // Block 0x17, offset 0x5c0 - 0x5c9: 0x2000, - 0x5d0: 0x200a, 0x5d1: 0x200b, - 0x5d2: 0x200a, 0x5d3: 0x200c, 0x5d4: 0x2000, 0x5d5: 0x2000, 0x5d6: 0x2000, 0x5d7: 0x2000, - 0x5d8: 0x2000, 0x5d9: 0x2000, - 0x5f8: 0x2000, 0x5f9: 0x2000, - // Block 0x18, offset 0x600 - 0x612: 0x2000, 0x614: 0x2000, - 0x627: 0x2000, - // Block 0x19, offset 0x640 - 0x640: 0x2000, 0x642: 0x2000, 0x643: 0x2000, - 0x647: 0x2000, 0x648: 0x2000, 0x64b: 0x2000, - 0x64f: 0x2000, 0x651: 0x2000, - 0x655: 0x2000, - 0x65a: 0x2000, 0x65d: 0x2000, - 0x65e: 0x2000, 0x65f: 0x2000, 0x660: 0x2000, 0x663: 0x2000, - 0x665: 0x2000, 0x667: 0x2000, 0x668: 0x2000, 0x669: 0x2000, - 0x66a: 0x2000, 0x66b: 0x2000, 0x66c: 0x2000, 0x66e: 0x2000, - 0x674: 0x2000, 0x675: 0x2000, - 0x676: 0x2000, 0x677: 0x2000, - 0x67c: 0x2000, 0x67d: 0x2000, - // Block 0x1a, offset 0x680 - 0x688: 0x2000, - 0x68c: 0x2000, - 0x692: 0x2000, - 0x6a0: 0x2000, 0x6a1: 0x2000, - 0x6a4: 0x2000, 0x6a5: 0x2000, 0x6a6: 0x2000, 0x6a7: 0x2000, - 0x6aa: 0x2000, 0x6ab: 0x2000, 0x6ae: 0x2000, 0x6af: 0x2000, - // Block 0x1b, offset 0x6c0 - 0x6c2: 0x2000, 0x6c3: 0x2000, - 0x6c6: 0x2000, 0x6c7: 0x2000, - 0x6d5: 0x2000, - 0x6d9: 0x2000, - 0x6e5: 0x2000, - 0x6ff: 0x2000, - // Block 0x1c, offset 0x700 - 0x712: 0x2000, - 0x71a: 0x4000, 0x71b: 0x4000, - 0x729: 0x4000, - 0x72a: 0x4000, - // Block 0x1d, offset 0x740 - 0x769: 0x4000, - 0x76a: 0x4000, 0x76b: 0x4000, 0x76c: 0x4000, - 0x770: 0x4000, 0x773: 0x4000, - // Block 0x1e, offset 0x780 - 0x7a0: 0x2000, 0x7a1: 0x2000, 0x7a2: 0x2000, 0x7a3: 0x2000, - 0x7a4: 0x2000, 0x7a5: 0x2000, 0x7a6: 0x2000, 0x7a7: 0x2000, 0x7a8: 0x2000, 0x7a9: 0x2000, - 0x7aa: 0x2000, 0x7ab: 0x2000, 0x7ac: 0x2000, 0x7ad: 0x2000, 0x7ae: 0x2000, 0x7af: 0x2000, - 0x7b0: 0x2000, 0x7b1: 0x2000, 0x7b2: 0x2000, 0x7b3: 0x2000, 0x7b4: 0x2000, 0x7b5: 0x2000, - 0x7b6: 0x2000, 0x7b7: 0x2000, 0x7b8: 0x2000, 0x7b9: 0x2000, 0x7ba: 0x2000, 0x7bb: 0x2000, - 0x7bc: 0x2000, 0x7bd: 0x2000, 0x7be: 0x2000, 0x7bf: 0x2000, - // Block 0x1f, offset 0x7c0 - 0x7c0: 0x2000, 0x7c1: 0x2000, 0x7c2: 0x2000, 0x7c3: 0x2000, 0x7c4: 0x2000, 0x7c5: 0x2000, - 0x7c6: 0x2000, 0x7c7: 0x2000, 0x7c8: 0x2000, 0x7c9: 0x2000, 0x7ca: 0x2000, 0x7cb: 0x2000, - 0x7cc: 0x2000, 0x7cd: 0x2000, 0x7ce: 0x2000, 0x7cf: 0x2000, 0x7d0: 0x2000, 0x7d1: 0x2000, - 0x7d2: 0x2000, 0x7d3: 0x2000, 0x7d4: 0x2000, 0x7d5: 0x2000, 0x7d6: 0x2000, 0x7d7: 0x2000, - 0x7d8: 0x2000, 0x7d9: 0x2000, 0x7da: 0x2000, 0x7db: 0x2000, 0x7dc: 0x2000, 0x7dd: 0x2000, - 0x7de: 0x2000, 0x7df: 0x2000, 0x7e0: 0x2000, 0x7e1: 0x2000, 0x7e2: 0x2000, 0x7e3: 0x2000, - 0x7e4: 0x2000, 0x7e5: 0x2000, 0x7e6: 0x2000, 0x7e7: 0x2000, 0x7e8: 0x2000, 0x7e9: 0x2000, - 0x7eb: 0x2000, 0x7ec: 0x2000, 0x7ed: 0x2000, 0x7ee: 0x2000, 0x7ef: 0x2000, - 0x7f0: 0x2000, 0x7f1: 0x2000, 0x7f2: 0x2000, 0x7f3: 0x2000, 0x7f4: 0x2000, 0x7f5: 0x2000, - 0x7f6: 0x2000, 0x7f7: 0x2000, 0x7f8: 0x2000, 0x7f9: 0x2000, 0x7fa: 0x2000, 0x7fb: 0x2000, - 0x7fc: 0x2000, 0x7fd: 0x2000, 0x7fe: 0x2000, 0x7ff: 0x2000, - // Block 0x20, offset 0x800 - 0x800: 0x2000, 0x801: 0x2000, 0x802: 0x200d, 0x803: 0x2000, 0x804: 0x2000, 0x805: 0x2000, - 0x806: 0x2000, 0x807: 0x2000, 0x808: 0x2000, 0x809: 0x2000, 0x80a: 0x2000, 0x80b: 0x2000, - 0x80c: 0x2000, 0x80d: 0x2000, 0x80e: 0x2000, 0x80f: 0x2000, 0x810: 0x2000, 0x811: 0x2000, - 0x812: 0x2000, 0x813: 0x2000, 0x814: 0x2000, 0x815: 0x2000, 0x816: 0x2000, 0x817: 0x2000, - 0x818: 0x2000, 0x819: 0x2000, 0x81a: 0x2000, 0x81b: 0x2000, 0x81c: 0x2000, 0x81d: 0x2000, - 0x81e: 0x2000, 0x81f: 0x2000, 0x820: 0x2000, 0x821: 0x2000, 0x822: 0x2000, 0x823: 0x2000, - 0x824: 0x2000, 0x825: 0x2000, 0x826: 0x2000, 0x827: 0x2000, 0x828: 0x2000, 0x829: 0x2000, - 0x82a: 0x2000, 0x82b: 0x2000, 0x82c: 0x2000, 0x82d: 0x2000, 0x82e: 0x2000, 0x82f: 0x2000, - 0x830: 0x2000, 0x831: 0x2000, 0x832: 0x2000, 0x833: 0x2000, 0x834: 0x2000, 0x835: 0x2000, - 0x836: 0x2000, 0x837: 0x2000, 0x838: 0x2000, 0x839: 0x2000, 0x83a: 0x2000, 0x83b: 0x2000, - 0x83c: 0x2000, 0x83d: 0x2000, 0x83e: 0x2000, 0x83f: 0x2000, - // Block 0x21, offset 0x840 - 0x840: 0x2000, 0x841: 0x2000, 0x842: 0x2000, 0x843: 0x2000, 0x844: 0x2000, 0x845: 0x2000, - 0x846: 0x2000, 0x847: 0x2000, 0x848: 0x2000, 0x849: 0x2000, 0x84a: 0x2000, 0x84b: 0x2000, - 0x850: 0x2000, 0x851: 0x2000, - 0x852: 0x2000, 0x853: 0x2000, 0x854: 0x2000, 0x855: 0x2000, 0x856: 0x2000, 0x857: 0x2000, - 0x858: 0x2000, 0x859: 0x2000, 0x85a: 0x2000, 0x85b: 0x2000, 0x85c: 0x2000, 0x85d: 0x2000, - 0x85e: 0x2000, 0x85f: 0x2000, 0x860: 0x2000, 0x861: 0x2000, 0x862: 0x2000, 0x863: 0x2000, - 0x864: 0x2000, 0x865: 0x2000, 0x866: 0x2000, 0x867: 0x2000, 0x868: 0x2000, 0x869: 0x2000, - 0x86a: 0x2000, 0x86b: 0x2000, 0x86c: 0x2000, 0x86d: 0x2000, 0x86e: 0x2000, 0x86f: 0x2000, - 0x870: 0x2000, 0x871: 0x2000, 0x872: 0x2000, 0x873: 0x2000, - // Block 0x22, offset 0x880 - 0x880: 0x2000, 0x881: 0x2000, 0x882: 0x2000, 0x883: 0x2000, 0x884: 0x2000, 0x885: 0x2000, - 0x886: 0x2000, 0x887: 0x2000, 0x888: 0x2000, 0x889: 0x2000, 0x88a: 0x2000, 0x88b: 0x2000, - 0x88c: 0x2000, 0x88d: 0x2000, 0x88e: 0x2000, 0x88f: 0x2000, - 0x892: 0x2000, 0x893: 0x2000, 0x894: 0x2000, 0x895: 0x2000, - 0x8a0: 0x200e, 0x8a1: 0x2000, 0x8a3: 0x2000, - 0x8a4: 0x2000, 0x8a5: 0x2000, 0x8a6: 0x2000, 0x8a7: 0x2000, 0x8a8: 0x2000, 0x8a9: 0x2000, - 0x8b2: 0x2000, 0x8b3: 0x2000, - 0x8b6: 0x2000, 0x8b7: 0x2000, - 0x8bc: 0x2000, 0x8bd: 0x2000, - // Block 0x23, offset 0x8c0 - 0x8c0: 0x2000, 0x8c1: 0x2000, - 0x8c6: 0x2000, 0x8c7: 0x2000, 0x8c8: 0x2000, 0x8cb: 0x200f, - 0x8ce: 0x2000, 0x8cf: 0x2000, 0x8d0: 0x2000, 0x8d1: 0x2000, - 0x8e2: 0x2000, 0x8e3: 0x2000, - 0x8e4: 0x2000, 0x8e5: 0x2000, - 0x8ef: 0x2000, - 0x8fd: 0x4000, 0x8fe: 0x4000, - // Block 0x24, offset 0x900 - 0x905: 0x2000, - 0x906: 0x2000, 0x909: 0x2000, - 0x90e: 0x2000, 0x90f: 0x2000, - 0x914: 0x4000, 0x915: 0x4000, - 0x91c: 0x2000, - 0x91e: 0x2000, - // Block 0x25, offset 0x940 - 0x940: 0x2000, 0x942: 0x2000, - 0x948: 0x4000, 0x949: 0x4000, 0x94a: 0x4000, 0x94b: 0x4000, - 0x94c: 0x4000, 0x94d: 0x4000, 0x94e: 0x4000, 0x94f: 0x4000, 0x950: 0x4000, 0x951: 0x4000, - 0x952: 0x4000, 0x953: 0x4000, - 0x960: 0x2000, 0x961: 0x2000, 0x963: 0x2000, - 0x964: 0x2000, 0x965: 0x2000, 0x967: 0x2000, 0x968: 0x2000, 0x969: 0x2000, - 0x96a: 0x2000, 0x96c: 0x2000, 0x96d: 0x2000, 0x96f: 0x2000, - 0x97f: 0x4000, - // Block 0x26, offset 0x980 - 0x993: 0x4000, - 0x99e: 0x2000, 0x99f: 0x2000, 0x9a1: 0x4000, - 0x9aa: 0x4000, 0x9ab: 0x4000, - 0x9bd: 0x4000, 0x9be: 0x4000, 0x9bf: 0x2000, - // Block 0x27, offset 0x9c0 - 0x9c4: 0x4000, 0x9c5: 0x4000, - 0x9c6: 0x2000, 0x9c7: 0x2000, 0x9c8: 0x2000, 0x9c9: 0x2000, 0x9ca: 0x2000, 0x9cb: 0x2000, - 0x9cc: 0x2000, 0x9cd: 0x2000, 0x9ce: 0x4000, 0x9cf: 0x2000, 0x9d0: 0x2000, 0x9d1: 0x2000, - 0x9d2: 0x2000, 0x9d3: 0x2000, 0x9d4: 0x4000, 0x9d5: 0x2000, 0x9d6: 0x2000, 0x9d7: 0x2000, - 0x9d8: 0x2000, 0x9d9: 0x2000, 0x9da: 0x2000, 0x9db: 0x2000, 0x9dc: 0x2000, 0x9dd: 0x2000, - 0x9de: 0x2000, 0x9df: 0x2000, 0x9e0: 0x2000, 0x9e1: 0x2000, 0x9e3: 0x2000, - 0x9e8: 0x2000, 0x9e9: 0x2000, - 0x9ea: 0x4000, 0x9eb: 0x2000, 0x9ec: 0x2000, 0x9ed: 0x2000, 0x9ee: 0x2000, 0x9ef: 0x2000, - 0x9f0: 0x2000, 0x9f1: 0x2000, 0x9f2: 0x4000, 0x9f3: 0x4000, 0x9f4: 0x2000, 0x9f5: 0x4000, - 0x9f6: 0x2000, 0x9f7: 0x2000, 0x9f8: 0x2000, 0x9f9: 0x2000, 0x9fa: 0x4000, 0x9fb: 0x2000, - 0x9fc: 0x2000, 0x9fd: 0x4000, 0x9fe: 0x2000, 0x9ff: 0x2000, - // Block 0x28, offset 0xa00 - 0xa05: 0x4000, - 0xa0a: 0x4000, 0xa0b: 0x4000, - 0xa28: 0x4000, - 0xa3d: 0x2000, - // Block 0x29, offset 0xa40 - 0xa4c: 0x4000, 0xa4e: 0x4000, - 0xa53: 0x4000, 0xa54: 0x4000, 0xa55: 0x4000, 0xa57: 0x4000, - 0xa76: 0x2000, 0xa77: 0x2000, 0xa78: 0x2000, 0xa79: 0x2000, 0xa7a: 0x2000, 0xa7b: 0x2000, - 0xa7c: 0x2000, 0xa7d: 0x2000, 0xa7e: 0x2000, 0xa7f: 0x2000, - // Block 0x2a, offset 0xa80 - 0xa95: 0x4000, 0xa96: 0x4000, 0xa97: 0x4000, - 0xab0: 0x4000, - 0xabf: 0x4000, - // Block 0x2b, offset 0xac0 - 0xae6: 0x6000, 0xae7: 0x6000, 0xae8: 0x6000, 0xae9: 0x6000, - 0xaea: 0x6000, 0xaeb: 0x6000, 0xaec: 0x6000, 0xaed: 0x6000, - // Block 0x2c, offset 0xb00 - 0xb05: 0x6010, - 0xb06: 0x6011, - // Block 0x2d, offset 0xb40 - 0xb5b: 0x4000, 0xb5c: 0x4000, - // Block 0x2e, offset 0xb80 - 0xb90: 0x4000, - 0xb95: 0x4000, 0xb96: 0x2000, 0xb97: 0x2000, - 0xb98: 0x2000, 0xb99: 0x2000, - // Block 0x2f, offset 0xbc0 - 0xbc0: 0x4000, 0xbc1: 0x4000, 0xbc2: 0x4000, 0xbc3: 0x4000, 0xbc4: 0x4000, 0xbc5: 0x4000, - 0xbc6: 0x4000, 0xbc7: 0x4000, 0xbc8: 0x4000, 0xbc9: 0x4000, 0xbca: 0x4000, 0xbcb: 0x4000, - 0xbcc: 0x4000, 0xbcd: 0x4000, 0xbce: 0x4000, 0xbcf: 0x4000, 0xbd0: 0x4000, 0xbd1: 0x4000, - 0xbd2: 0x4000, 0xbd3: 0x4000, 0xbd4: 0x4000, 0xbd5: 0x4000, 0xbd6: 0x4000, 0xbd7: 0x4000, - 0xbd8: 0x4000, 0xbd9: 0x4000, 0xbdb: 0x4000, 0xbdc: 0x4000, 0xbdd: 0x4000, - 0xbde: 0x4000, 0xbdf: 0x4000, 0xbe0: 0x4000, 0xbe1: 0x4000, 0xbe2: 0x4000, 0xbe3: 0x4000, - 0xbe4: 0x4000, 0xbe5: 0x4000, 0xbe6: 0x4000, 0xbe7: 0x4000, 0xbe8: 0x4000, 0xbe9: 0x4000, - 0xbea: 0x4000, 0xbeb: 0x4000, 0xbec: 0x4000, 0xbed: 0x4000, 0xbee: 0x4000, 0xbef: 0x4000, - 0xbf0: 0x4000, 0xbf1: 0x4000, 0xbf2: 0x4000, 0xbf3: 0x4000, 0xbf4: 0x4000, 0xbf5: 0x4000, - 0xbf6: 0x4000, 0xbf7: 0x4000, 0xbf8: 0x4000, 0xbf9: 0x4000, 0xbfa: 0x4000, 0xbfb: 0x4000, - 0xbfc: 0x4000, 0xbfd: 0x4000, 0xbfe: 0x4000, 0xbff: 0x4000, - // Block 0x30, offset 0xc00 - 0xc00: 0x4000, 0xc01: 0x4000, 0xc02: 0x4000, 0xc03: 0x4000, 0xc04: 0x4000, 0xc05: 0x4000, - 0xc06: 0x4000, 0xc07: 0x4000, 0xc08: 0x4000, 0xc09: 0x4000, 0xc0a: 0x4000, 0xc0b: 0x4000, - 0xc0c: 0x4000, 0xc0d: 0x4000, 0xc0e: 0x4000, 0xc0f: 0x4000, 0xc10: 0x4000, 0xc11: 0x4000, - 0xc12: 0x4000, 0xc13: 0x4000, 0xc14: 0x4000, 0xc15: 0x4000, 0xc16: 0x4000, 0xc17: 0x4000, - 0xc18: 0x4000, 0xc19: 0x4000, 0xc1a: 0x4000, 0xc1b: 0x4000, 0xc1c: 0x4000, 0xc1d: 0x4000, - 0xc1e: 0x4000, 0xc1f: 0x4000, 0xc20: 0x4000, 0xc21: 0x4000, 0xc22: 0x4000, 0xc23: 0x4000, - 0xc24: 0x4000, 0xc25: 0x4000, 0xc26: 0x4000, 0xc27: 0x4000, 0xc28: 0x4000, 0xc29: 0x4000, - 0xc2a: 0x4000, 0xc2b: 0x4000, 0xc2c: 0x4000, 0xc2d: 0x4000, 0xc2e: 0x4000, 0xc2f: 0x4000, - 0xc30: 0x4000, 0xc31: 0x4000, 0xc32: 0x4000, 0xc33: 0x4000, - // Block 0x31, offset 0xc40 - 0xc40: 0x4000, 0xc41: 0x4000, 0xc42: 0x4000, 0xc43: 0x4000, 0xc44: 0x4000, 0xc45: 0x4000, - 0xc46: 0x4000, 0xc47: 0x4000, 0xc48: 0x4000, 0xc49: 0x4000, 0xc4a: 0x4000, 0xc4b: 0x4000, - 0xc4c: 0x4000, 0xc4d: 0x4000, 0xc4e: 0x4000, 0xc4f: 0x4000, 0xc50: 0x4000, 0xc51: 0x4000, - 0xc52: 0x4000, 0xc53: 0x4000, 0xc54: 0x4000, 0xc55: 0x4000, - 0xc70: 0x4000, 0xc71: 0x4000, 0xc72: 0x4000, 0xc73: 0x4000, 0xc74: 0x4000, 0xc75: 0x4000, - 0xc76: 0x4000, 0xc77: 0x4000, 0xc78: 0x4000, 0xc79: 0x4000, 0xc7a: 0x4000, 0xc7b: 0x4000, - // Block 0x32, offset 0xc80 - 0xc80: 0x9012, 0xc81: 0x4013, 0xc82: 0x4014, 0xc83: 0x4000, 0xc84: 0x4000, 0xc85: 0x4000, - 0xc86: 0x4000, 0xc87: 0x4000, 0xc88: 0x4000, 0xc89: 0x4000, 0xc8a: 0x4000, 0xc8b: 0x4000, - 0xc8c: 0x4015, 0xc8d: 0x4015, 0xc8e: 0x4000, 0xc8f: 0x4000, 0xc90: 0x4000, 0xc91: 0x4000, - 0xc92: 0x4000, 0xc93: 0x4000, 0xc94: 0x4000, 0xc95: 0x4000, 0xc96: 0x4000, 0xc97: 0x4000, - 0xc98: 0x4000, 0xc99: 0x4000, 0xc9a: 0x4000, 0xc9b: 0x4000, 0xc9c: 0x4000, 0xc9d: 0x4000, - 0xc9e: 0x4000, 0xc9f: 0x4000, 0xca0: 0x4000, 0xca1: 0x4000, 0xca2: 0x4000, 0xca3: 0x4000, - 0xca4: 0x4000, 0xca5: 0x4000, 0xca6: 0x4000, 0xca7: 0x4000, 0xca8: 0x4000, 0xca9: 0x4000, - 0xcaa: 0x4000, 0xcab: 0x4000, 0xcac: 0x4000, 0xcad: 0x4000, 0xcae: 0x4000, 0xcaf: 0x4000, - 0xcb0: 0x4000, 0xcb1: 0x4000, 0xcb2: 0x4000, 0xcb3: 0x4000, 0xcb4: 0x4000, 0xcb5: 0x4000, - 0xcb6: 0x4000, 0xcb7: 0x4000, 0xcb8: 0x4000, 0xcb9: 0x4000, 0xcba: 0x4000, 0xcbb: 0x4000, - 0xcbc: 0x4000, 0xcbd: 0x4000, 0xcbe: 0x4000, - // Block 0x33, offset 0xcc0 - 0xcc1: 0x4000, 0xcc2: 0x4000, 0xcc3: 0x4000, 0xcc4: 0x4000, 0xcc5: 0x4000, - 0xcc6: 0x4000, 0xcc7: 0x4000, 0xcc8: 0x4000, 0xcc9: 0x4000, 0xcca: 0x4000, 0xccb: 0x4000, - 0xccc: 0x4000, 0xccd: 0x4000, 0xcce: 0x4000, 0xccf: 0x4000, 0xcd0: 0x4000, 0xcd1: 0x4000, - 0xcd2: 0x4000, 0xcd3: 0x4000, 0xcd4: 0x4000, 0xcd5: 0x4000, 0xcd6: 0x4000, 0xcd7: 0x4000, - 0xcd8: 0x4000, 0xcd9: 0x4000, 0xcda: 0x4000, 0xcdb: 0x4000, 0xcdc: 0x4000, 0xcdd: 0x4000, - 0xcde: 0x4000, 0xcdf: 0x4000, 0xce0: 0x4000, 0xce1: 0x4000, 0xce2: 0x4000, 0xce3: 0x4000, - 0xce4: 0x4000, 0xce5: 0x4000, 0xce6: 0x4000, 0xce7: 0x4000, 0xce8: 0x4000, 0xce9: 0x4000, - 0xcea: 0x4000, 0xceb: 0x4000, 0xcec: 0x4000, 0xced: 0x4000, 0xcee: 0x4000, 0xcef: 0x4000, - 0xcf0: 0x4000, 0xcf1: 0x4000, 0xcf2: 0x4000, 0xcf3: 0x4000, 0xcf4: 0x4000, 0xcf5: 0x4000, - 0xcf6: 0x4000, 0xcf7: 0x4000, 0xcf8: 0x4000, 0xcf9: 0x4000, 0xcfa: 0x4000, 0xcfb: 0x4000, - 0xcfc: 0x4000, 0xcfd: 0x4000, 0xcfe: 0x4000, 0xcff: 0x4000, - // Block 0x34, offset 0xd00 - 0xd00: 0x4000, 0xd01: 0x4000, 0xd02: 0x4000, 0xd03: 0x4000, 0xd04: 0x4000, 0xd05: 0x4000, - 0xd06: 0x4000, 0xd07: 0x4000, 0xd08: 0x4000, 0xd09: 0x4000, 0xd0a: 0x4000, 0xd0b: 0x4000, - 0xd0c: 0x4000, 0xd0d: 0x4000, 0xd0e: 0x4000, 0xd0f: 0x4000, 0xd10: 0x4000, 0xd11: 0x4000, - 0xd12: 0x4000, 0xd13: 0x4000, 0xd14: 0x4000, 0xd15: 0x4000, 0xd16: 0x4000, - 0xd19: 0x4016, 0xd1a: 0x4017, 0xd1b: 0x4000, 0xd1c: 0x4000, 0xd1d: 0x4000, - 0xd1e: 0x4000, 0xd1f: 0x4000, 0xd20: 0x4000, 0xd21: 0x4018, 0xd22: 0x4019, 0xd23: 0x401a, - 0xd24: 0x401b, 0xd25: 0x401c, 0xd26: 0x401d, 0xd27: 0x401e, 0xd28: 0x401f, 0xd29: 0x4020, - 0xd2a: 0x4021, 0xd2b: 0x4022, 0xd2c: 0x4000, 0xd2d: 0x4010, 0xd2e: 0x4000, 0xd2f: 0x4023, - 0xd30: 0x4000, 0xd31: 0x4024, 0xd32: 0x4000, 0xd33: 0x4025, 0xd34: 0x4000, 0xd35: 0x4026, - 0xd36: 0x4000, 0xd37: 0x401a, 0xd38: 0x4000, 0xd39: 0x4027, 0xd3a: 0x4000, 0xd3b: 0x4028, - 0xd3c: 0x4000, 0xd3d: 0x4020, 0xd3e: 0x4000, 0xd3f: 0x4029, - // Block 0x35, offset 0xd40 - 0xd40: 0x4000, 0xd41: 0x402a, 0xd42: 0x4000, 0xd43: 0x402b, 0xd44: 0x402c, 0xd45: 0x4000, - 0xd46: 0x4017, 0xd47: 0x4000, 0xd48: 0x402d, 0xd49: 0x4000, 0xd4a: 0x402e, 0xd4b: 0x402f, - 0xd4c: 0x4030, 0xd4d: 0x4017, 0xd4e: 0x4016, 0xd4f: 0x4017, 0xd50: 0x4000, 0xd51: 0x4000, - 0xd52: 0x4031, 0xd53: 0x4000, 0xd54: 0x4000, 0xd55: 0x4031, 0xd56: 0x4000, 0xd57: 0x4000, - 0xd58: 0x4032, 0xd59: 0x4000, 0xd5a: 0x4000, 0xd5b: 0x4032, 0xd5c: 0x4000, 0xd5d: 0x4000, - 0xd5e: 0x4033, 0xd5f: 0x402e, 0xd60: 0x4034, 0xd61: 0x4035, 0xd62: 0x4034, 0xd63: 0x4036, - 0xd64: 0x4037, 0xd65: 0x4024, 0xd66: 0x4035, 0xd67: 0x4025, 0xd68: 0x4038, 0xd69: 0x4038, - 0xd6a: 0x4039, 0xd6b: 0x4039, 0xd6c: 0x403a, 0xd6d: 0x403a, 0xd6e: 0x4000, 0xd6f: 0x4035, - 0xd70: 0x4000, 0xd71: 0x4000, 0xd72: 0x403b, 0xd73: 0x403c, 0xd74: 0x4000, 0xd75: 0x4000, - 0xd76: 0x4000, 0xd77: 0x4000, 0xd78: 0x4000, 0xd79: 0x4000, 0xd7a: 0x4000, 0xd7b: 0x403d, - 0xd7c: 0x401c, 0xd7d: 0x4000, 0xd7e: 0x4000, 0xd7f: 0x4000, - // Block 0x36, offset 0xd80 - 0xd85: 0x4000, - 0xd86: 0x4000, 0xd87: 0x4000, 0xd88: 0x4000, 0xd89: 0x4000, 0xd8a: 0x4000, 0xd8b: 0x4000, - 0xd8c: 0x4000, 0xd8d: 0x4000, 0xd8e: 0x4000, 0xd8f: 0x4000, 0xd90: 0x4000, 0xd91: 0x4000, - 0xd92: 0x4000, 0xd93: 0x4000, 0xd94: 0x4000, 0xd95: 0x4000, 0xd96: 0x4000, 0xd97: 0x4000, - 0xd98: 0x4000, 0xd99: 0x4000, 0xd9a: 0x4000, 0xd9b: 0x4000, 0xd9c: 0x4000, 0xd9d: 0x4000, - 0xd9e: 0x4000, 0xd9f: 0x4000, 0xda0: 0x4000, 0xda1: 0x4000, 0xda2: 0x4000, 0xda3: 0x4000, - 0xda4: 0x4000, 0xda5: 0x4000, 0xda6: 0x4000, 0xda7: 0x4000, 0xda8: 0x4000, 0xda9: 0x4000, - 0xdaa: 0x4000, 0xdab: 0x4000, 0xdac: 0x4000, 0xdad: 0x4000, 0xdae: 0x4000, 0xdaf: 0x4000, - 0xdb1: 0x403e, 0xdb2: 0x403e, 0xdb3: 0x403e, 0xdb4: 0x403e, 0xdb5: 0x403e, - 0xdb6: 0x403e, 0xdb7: 0x403e, 0xdb8: 0x403e, 0xdb9: 0x403e, 0xdba: 0x403e, 0xdbb: 0x403e, - 0xdbc: 0x403e, 0xdbd: 0x403e, 0xdbe: 0x403e, 0xdbf: 0x403e, - // Block 0x37, offset 0xdc0 - 0xdc0: 0x4037, 0xdc1: 0x4037, 0xdc2: 0x4037, 0xdc3: 0x4037, 0xdc4: 0x4037, 0xdc5: 0x4037, - 0xdc6: 0x4037, 0xdc7: 0x4037, 0xdc8: 0x4037, 0xdc9: 0x4037, 0xdca: 0x4037, 0xdcb: 0x4037, - 0xdcc: 0x4037, 0xdcd: 0x4037, 0xdce: 0x4037, 0xdcf: 0x400e, 0xdd0: 0x403f, 0xdd1: 0x4040, - 0xdd2: 0x4041, 0xdd3: 0x4040, 0xdd4: 0x403f, 0xdd5: 0x4042, 0xdd6: 0x4043, 0xdd7: 0x4044, - 0xdd8: 0x4040, 0xdd9: 0x4041, 0xdda: 0x4040, 0xddb: 0x4045, 0xddc: 0x4009, 0xddd: 0x4045, - 0xdde: 0x4046, 0xddf: 0x4045, 0xde0: 0x4047, 0xde1: 0x400b, 0xde2: 0x400a, 0xde3: 0x400c, - 0xde4: 0x4048, 0xde5: 0x4000, 0xde6: 0x4000, 0xde7: 0x4000, 0xde8: 0x4000, 0xde9: 0x4000, - 0xdea: 0x4000, 0xdeb: 0x4000, 0xdec: 0x4000, 0xded: 0x4000, 0xdee: 0x4000, 0xdef: 0x4000, - 0xdf0: 0x4000, 0xdf1: 0x4000, 0xdf2: 0x4000, 0xdf3: 0x4000, 0xdf4: 0x4000, 0xdf5: 0x4000, - 0xdf6: 0x4000, 0xdf7: 0x4000, 0xdf8: 0x4000, 0xdf9: 0x4000, 0xdfa: 0x4000, 0xdfb: 0x4000, - 0xdfc: 0x4000, 0xdfd: 0x4000, 0xdfe: 0x4000, 0xdff: 0x4000, - // Block 0x38, offset 0xe00 - 0xe00: 0x4000, 0xe01: 0x4000, 0xe02: 0x4000, 0xe03: 0x4000, 0xe04: 0x4000, 0xe05: 0x4000, - 0xe06: 0x4000, 0xe07: 0x4000, 0xe08: 0x4000, 0xe09: 0x4000, 0xe0a: 0x4000, 0xe0b: 0x4000, - 0xe0c: 0x4000, 0xe0d: 0x4000, 0xe0e: 0x4000, 0xe10: 0x4000, 0xe11: 0x4000, - 0xe12: 0x4000, 0xe13: 0x4000, 0xe14: 0x4000, 0xe15: 0x4000, 0xe16: 0x4000, 0xe17: 0x4000, - 0xe18: 0x4000, 0xe19: 0x4000, 0xe1a: 0x4000, 0xe1b: 0x4000, 0xe1c: 0x4000, 0xe1d: 0x4000, - 0xe1e: 0x4000, 0xe1f: 0x4000, 0xe20: 0x4000, 0xe21: 0x4000, 0xe22: 0x4000, 0xe23: 0x4000, - 0xe24: 0x4000, 0xe25: 0x4000, 0xe26: 0x4000, 0xe27: 0x4000, 0xe28: 0x4000, 0xe29: 0x4000, - 0xe2a: 0x4000, 0xe2b: 0x4000, 0xe2c: 0x4000, 0xe2d: 0x4000, 0xe2e: 0x4000, 0xe2f: 0x4000, - 0xe30: 0x4000, 0xe31: 0x4000, 0xe32: 0x4000, 0xe33: 0x4000, 0xe34: 0x4000, 0xe35: 0x4000, - 0xe36: 0x4000, 0xe37: 0x4000, 0xe38: 0x4000, 0xe39: 0x4000, 0xe3a: 0x4000, 0xe3b: 0x4000, - 0xe3c: 0x4000, 0xe3d: 0x4000, 0xe3e: 0x4000, 0xe3f: 0x4000, - // Block 0x39, offset 0xe40 - 0xe40: 0x4000, 0xe41: 0x4000, 0xe42: 0x4000, 0xe43: 0x4000, 0xe44: 0x4000, 0xe45: 0x4000, - 0xe46: 0x4000, 0xe47: 0x4000, 0xe48: 0x4000, 0xe49: 0x4000, 0xe4a: 0x4000, 0xe4b: 0x4000, - 0xe4c: 0x4000, 0xe4d: 0x4000, 0xe4e: 0x4000, 0xe4f: 0x4000, 0xe50: 0x4000, 0xe51: 0x4000, - 0xe52: 0x4000, 0xe53: 0x4000, 0xe54: 0x4000, 0xe55: 0x4000, 0xe56: 0x4000, 0xe57: 0x4000, - 0xe58: 0x4000, 0xe59: 0x4000, 0xe5a: 0x4000, 0xe5b: 0x4000, 0xe5c: 0x4000, 0xe5d: 0x4000, - 0xe5e: 0x4000, 0xe5f: 0x4000, 0xe60: 0x4000, 0xe61: 0x4000, 0xe62: 0x4000, 0xe63: 0x4000, - 0xe70: 0x4000, 0xe71: 0x4000, 0xe72: 0x4000, 0xe73: 0x4000, 0xe74: 0x4000, 0xe75: 0x4000, - 0xe76: 0x4000, 0xe77: 0x4000, 0xe78: 0x4000, 0xe79: 0x4000, 0xe7a: 0x4000, 0xe7b: 0x4000, - 0xe7c: 0x4000, 0xe7d: 0x4000, 0xe7e: 0x4000, 0xe7f: 0x4000, - // Block 0x3a, offset 0xe80 - 0xe80: 0x4000, 0xe81: 0x4000, 0xe82: 0x4000, 0xe83: 0x4000, 0xe84: 0x4000, 0xe85: 0x4000, - 0xe86: 0x4000, 0xe87: 0x4000, 0xe88: 0x4000, 0xe89: 0x4000, 0xe8a: 0x4000, 0xe8b: 0x4000, - 0xe8c: 0x4000, 0xe8d: 0x4000, 0xe8e: 0x4000, 0xe8f: 0x4000, 0xe90: 0x4000, 0xe91: 0x4000, - 0xe92: 0x4000, 0xe93: 0x4000, 0xe94: 0x4000, 0xe95: 0x4000, 0xe96: 0x4000, 0xe97: 0x4000, - 0xe98: 0x4000, 0xe99: 0x4000, 0xe9a: 0x4000, 0xe9b: 0x4000, 0xe9c: 0x4000, 0xe9d: 0x4000, - 0xe9e: 0x4000, 0xea0: 0x4000, 0xea1: 0x4000, 0xea2: 0x4000, 0xea3: 0x4000, - 0xea4: 0x4000, 0xea5: 0x4000, 0xea6: 0x4000, 0xea7: 0x4000, 0xea8: 0x4000, 0xea9: 0x4000, - 0xeaa: 0x4000, 0xeab: 0x4000, 0xeac: 0x4000, 0xead: 0x4000, 0xeae: 0x4000, 0xeaf: 0x4000, - 0xeb0: 0x4000, 0xeb1: 0x4000, 0xeb2: 0x4000, 0xeb3: 0x4000, 0xeb4: 0x4000, 0xeb5: 0x4000, - 0xeb6: 0x4000, 0xeb7: 0x4000, 0xeb8: 0x4000, 0xeb9: 0x4000, 0xeba: 0x4000, 0xebb: 0x4000, - 0xebc: 0x4000, 0xebd: 0x4000, 0xebe: 0x4000, 0xebf: 0x4000, - // Block 0x3b, offset 0xec0 - 0xec0: 0x4000, 0xec1: 0x4000, 0xec2: 0x4000, 0xec3: 0x4000, 0xec4: 0x4000, 0xec5: 0x4000, - 0xec6: 0x4000, 0xec7: 0x4000, 0xec8: 0x2000, 0xec9: 0x2000, 0xeca: 0x2000, 0xecb: 0x2000, - 0xecc: 0x2000, 0xecd: 0x2000, 0xece: 0x2000, 0xecf: 0x2000, 0xed0: 0x4000, 0xed1: 0x4000, - 0xed2: 0x4000, 0xed3: 0x4000, 0xed4: 0x4000, 0xed5: 0x4000, 0xed6: 0x4000, 0xed7: 0x4000, - 0xed8: 0x4000, 0xed9: 0x4000, 0xeda: 0x4000, 0xedb: 0x4000, 0xedc: 0x4000, 0xedd: 0x4000, - 0xede: 0x4000, 0xedf: 0x4000, 0xee0: 0x4000, 0xee1: 0x4000, 0xee2: 0x4000, 0xee3: 0x4000, - 0xee4: 0x4000, 0xee5: 0x4000, 0xee6: 0x4000, 0xee7: 0x4000, 0xee8: 0x4000, 0xee9: 0x4000, - 0xeea: 0x4000, 0xeeb: 0x4000, 0xeec: 0x4000, 0xeed: 0x4000, 0xeee: 0x4000, 0xeef: 0x4000, - 0xef0: 0x4000, 0xef1: 0x4000, 0xef2: 0x4000, 0xef3: 0x4000, 0xef4: 0x4000, 0xef5: 0x4000, - 0xef6: 0x4000, 0xef7: 0x4000, 0xef8: 0x4000, 0xef9: 0x4000, 0xefa: 0x4000, 0xefb: 0x4000, - 0xefc: 0x4000, 0xefd: 0x4000, 0xefe: 0x4000, 0xeff: 0x4000, - // Block 0x3c, offset 0xf00 - 0xf00: 0x4000, 0xf01: 0x4000, 0xf02: 0x4000, 0xf03: 0x4000, 0xf04: 0x4000, 0xf05: 0x4000, - 0xf06: 0x4000, 0xf07: 0x4000, 0xf08: 0x4000, 0xf09: 0x4000, 0xf0a: 0x4000, 0xf0b: 0x4000, - 0xf0c: 0x4000, 0xf10: 0x4000, 0xf11: 0x4000, - 0xf12: 0x4000, 0xf13: 0x4000, 0xf14: 0x4000, 0xf15: 0x4000, 0xf16: 0x4000, 0xf17: 0x4000, - 0xf18: 0x4000, 0xf19: 0x4000, 0xf1a: 0x4000, 0xf1b: 0x4000, 0xf1c: 0x4000, 0xf1d: 0x4000, - 0xf1e: 0x4000, 0xf1f: 0x4000, 0xf20: 0x4000, 0xf21: 0x4000, 0xf22: 0x4000, 0xf23: 0x4000, - 0xf24: 0x4000, 0xf25: 0x4000, 0xf26: 0x4000, 0xf27: 0x4000, 0xf28: 0x4000, 0xf29: 0x4000, - 0xf2a: 0x4000, 0xf2b: 0x4000, 0xf2c: 0x4000, 0xf2d: 0x4000, 0xf2e: 0x4000, 0xf2f: 0x4000, - 0xf30: 0x4000, 0xf31: 0x4000, 0xf32: 0x4000, 0xf33: 0x4000, 0xf34: 0x4000, 0xf35: 0x4000, - 0xf36: 0x4000, 0xf37: 0x4000, 0xf38: 0x4000, 0xf39: 0x4000, 0xf3a: 0x4000, 0xf3b: 0x4000, - 0xf3c: 0x4000, 0xf3d: 0x4000, 0xf3e: 0x4000, 0xf3f: 0x4000, - // Block 0x3d, offset 0xf40 - 0xf40: 0x4000, 0xf41: 0x4000, 0xf42: 0x4000, 0xf43: 0x4000, 0xf44: 0x4000, 0xf45: 0x4000, - 0xf46: 0x4000, - // Block 0x3e, offset 0xf80 - 0xfa0: 0x4000, 0xfa1: 0x4000, 0xfa2: 0x4000, 0xfa3: 0x4000, - 0xfa4: 0x4000, 0xfa5: 0x4000, 0xfa6: 0x4000, 0xfa7: 0x4000, 0xfa8: 0x4000, 0xfa9: 0x4000, - 0xfaa: 0x4000, 0xfab: 0x4000, 0xfac: 0x4000, 0xfad: 0x4000, 0xfae: 0x4000, 0xfaf: 0x4000, - 0xfb0: 0x4000, 0xfb1: 0x4000, 0xfb2: 0x4000, 0xfb3: 0x4000, 0xfb4: 0x4000, 0xfb5: 0x4000, - 0xfb6: 0x4000, 0xfb7: 0x4000, 0xfb8: 0x4000, 0xfb9: 0x4000, 0xfba: 0x4000, 0xfbb: 0x4000, - 0xfbc: 0x4000, - // Block 0x3f, offset 0xfc0 - 0xfc0: 0x4000, 0xfc1: 0x4000, 0xfc2: 0x4000, 0xfc3: 0x4000, 0xfc4: 0x4000, 0xfc5: 0x4000, - 0xfc6: 0x4000, 0xfc7: 0x4000, 0xfc8: 0x4000, 0xfc9: 0x4000, 0xfca: 0x4000, 0xfcb: 0x4000, - 0xfcc: 0x4000, 0xfcd: 0x4000, 0xfce: 0x4000, 0xfcf: 0x4000, 0xfd0: 0x4000, 0xfd1: 0x4000, - 0xfd2: 0x4000, 0xfd3: 0x4000, 0xfd4: 0x4000, 0xfd5: 0x4000, 0xfd6: 0x4000, 0xfd7: 0x4000, - 0xfd8: 0x4000, 0xfd9: 0x4000, 0xfda: 0x4000, 0xfdb: 0x4000, 0xfdc: 0x4000, 0xfdd: 0x4000, - 0xfde: 0x4000, 0xfdf: 0x4000, 0xfe0: 0x4000, 0xfe1: 0x4000, 0xfe2: 0x4000, 0xfe3: 0x4000, - // Block 0x40, offset 0x1000 - 0x1000: 0x2000, 0x1001: 0x2000, 0x1002: 0x2000, 0x1003: 0x2000, 0x1004: 0x2000, 0x1005: 0x2000, - 0x1006: 0x2000, 0x1007: 0x2000, 0x1008: 0x2000, 0x1009: 0x2000, 0x100a: 0x2000, 0x100b: 0x2000, - 0x100c: 0x2000, 0x100d: 0x2000, 0x100e: 0x2000, 0x100f: 0x2000, 0x1010: 0x4000, 0x1011: 0x4000, - 0x1012: 0x4000, 0x1013: 0x4000, 0x1014: 0x4000, 0x1015: 0x4000, 0x1016: 0x4000, 0x1017: 0x4000, - 0x1018: 0x4000, 0x1019: 0x4000, - 0x1030: 0x4000, 0x1031: 0x4000, 0x1032: 0x4000, 0x1033: 0x4000, 0x1034: 0x4000, 0x1035: 0x4000, - 0x1036: 0x4000, 0x1037: 0x4000, 0x1038: 0x4000, 0x1039: 0x4000, 0x103a: 0x4000, 0x103b: 0x4000, - 0x103c: 0x4000, 0x103d: 0x4000, 0x103e: 0x4000, 0x103f: 0x4000, - // Block 0x41, offset 0x1040 - 0x1040: 0x4000, 0x1041: 0x4000, 0x1042: 0x4000, 0x1043: 0x4000, 0x1044: 0x4000, 0x1045: 0x4000, - 0x1046: 0x4000, 0x1047: 0x4000, 0x1048: 0x4000, 0x1049: 0x4000, 0x104a: 0x4000, 0x104b: 0x4000, - 0x104c: 0x4000, 0x104d: 0x4000, 0x104e: 0x4000, 0x104f: 0x4000, 0x1050: 0x4000, 0x1051: 0x4000, - 0x1052: 0x4000, 0x1054: 0x4000, 0x1055: 0x4000, 0x1056: 0x4000, 0x1057: 0x4000, - 0x1058: 0x4000, 0x1059: 0x4000, 0x105a: 0x4000, 0x105b: 0x4000, 0x105c: 0x4000, 0x105d: 0x4000, - 0x105e: 0x4000, 0x105f: 0x4000, 0x1060: 0x4000, 0x1061: 0x4000, 0x1062: 0x4000, 0x1063: 0x4000, - 0x1064: 0x4000, 0x1065: 0x4000, 0x1066: 0x4000, 0x1068: 0x4000, 0x1069: 0x4000, - 0x106a: 0x4000, 0x106b: 0x4000, - // Block 0x42, offset 0x1080 - 0x1081: 0x9012, 0x1082: 0x9012, 0x1083: 0x9012, 0x1084: 0x9012, 0x1085: 0x9012, - 0x1086: 0x9012, 0x1087: 0x9012, 0x1088: 0x9012, 0x1089: 0x9012, 0x108a: 0x9012, 0x108b: 0x9012, - 0x108c: 0x9012, 0x108d: 0x9012, 0x108e: 0x9012, 0x108f: 0x9012, 0x1090: 0x9012, 0x1091: 0x9012, - 0x1092: 0x9012, 0x1093: 0x9012, 0x1094: 0x9012, 0x1095: 0x9012, 0x1096: 0x9012, 0x1097: 0x9012, - 0x1098: 0x9012, 0x1099: 0x9012, 0x109a: 0x9012, 0x109b: 0x9012, 0x109c: 0x9012, 0x109d: 0x9012, - 0x109e: 0x9012, 0x109f: 0x9012, 0x10a0: 0x9049, 0x10a1: 0x9049, 0x10a2: 0x9049, 0x10a3: 0x9049, - 0x10a4: 0x9049, 0x10a5: 0x9049, 0x10a6: 0x9049, 0x10a7: 0x9049, 0x10a8: 0x9049, 0x10a9: 0x9049, - 0x10aa: 0x9049, 0x10ab: 0x9049, 0x10ac: 0x9049, 0x10ad: 0x9049, 0x10ae: 0x9049, 0x10af: 0x9049, - 0x10b0: 0x9049, 0x10b1: 0x9049, 0x10b2: 0x9049, 0x10b3: 0x9049, 0x10b4: 0x9049, 0x10b5: 0x9049, - 0x10b6: 0x9049, 0x10b7: 0x9049, 0x10b8: 0x9049, 0x10b9: 0x9049, 0x10ba: 0x9049, 0x10bb: 0x9049, - 0x10bc: 0x9049, 0x10bd: 0x9049, 0x10be: 0x9049, 0x10bf: 0x9049, - // Block 0x43, offset 0x10c0 - 0x10c0: 0x9049, 0x10c1: 0x9049, 0x10c2: 0x9049, 0x10c3: 0x9049, 0x10c4: 0x9049, 0x10c5: 0x9049, - 0x10c6: 0x9049, 0x10c7: 0x9049, 0x10c8: 0x9049, 0x10c9: 0x9049, 0x10ca: 0x9049, 0x10cb: 0x9049, - 0x10cc: 0x9049, 0x10cd: 0x9049, 0x10ce: 0x9049, 0x10cf: 0x9049, 0x10d0: 0x9049, 0x10d1: 0x9049, - 0x10d2: 0x9049, 0x10d3: 0x9049, 0x10d4: 0x9049, 0x10d5: 0x9049, 0x10d6: 0x9049, 0x10d7: 0x9049, - 0x10d8: 0x9049, 0x10d9: 0x9049, 0x10da: 0x9049, 0x10db: 0x9049, 0x10dc: 0x9049, 0x10dd: 0x9049, - 0x10de: 0x9049, 0x10df: 0x904a, 0x10e0: 0x904b, 0x10e1: 0xb04c, 0x10e2: 0xb04d, 0x10e3: 0xb04d, - 0x10e4: 0xb04e, 0x10e5: 0xb04f, 0x10e6: 0xb050, 0x10e7: 0xb051, 0x10e8: 0xb052, 0x10e9: 0xb053, - 0x10ea: 0xb054, 0x10eb: 0xb055, 0x10ec: 0xb056, 0x10ed: 0xb057, 0x10ee: 0xb058, 0x10ef: 0xb059, - 0x10f0: 0xb05a, 0x10f1: 0xb05b, 0x10f2: 0xb05c, 0x10f3: 0xb05d, 0x10f4: 0xb05e, 0x10f5: 0xb05f, - 0x10f6: 0xb060, 0x10f7: 0xb061, 0x10f8: 0xb062, 0x10f9: 0xb063, 0x10fa: 0xb064, 0x10fb: 0xb065, - 0x10fc: 0xb052, 0x10fd: 0xb066, 0x10fe: 0xb067, 0x10ff: 0xb055, - // Block 0x44, offset 0x1100 - 0x1100: 0xb068, 0x1101: 0xb069, 0x1102: 0xb06a, 0x1103: 0xb06b, 0x1104: 0xb05a, 0x1105: 0xb056, - 0x1106: 0xb06c, 0x1107: 0xb06d, 0x1108: 0xb06b, 0x1109: 0xb06e, 0x110a: 0xb06b, 0x110b: 0xb06f, - 0x110c: 0xb06f, 0x110d: 0xb070, 0x110e: 0xb070, 0x110f: 0xb071, 0x1110: 0xb056, 0x1111: 0xb072, - 0x1112: 0xb073, 0x1113: 0xb072, 0x1114: 0xb074, 0x1115: 0xb073, 0x1116: 0xb075, 0x1117: 0xb075, - 0x1118: 0xb076, 0x1119: 0xb076, 0x111a: 0xb077, 0x111b: 0xb077, 0x111c: 0xb073, 0x111d: 0xb078, - 0x111e: 0xb079, 0x111f: 0xb067, 0x1120: 0xb07a, 0x1121: 0xb07b, 0x1122: 0xb07b, 0x1123: 0xb07b, - 0x1124: 0xb07b, 0x1125: 0xb07b, 0x1126: 0xb07b, 0x1127: 0xb07b, 0x1128: 0xb07b, 0x1129: 0xb07b, - 0x112a: 0xb07b, 0x112b: 0xb07b, 0x112c: 0xb07b, 0x112d: 0xb07b, 0x112e: 0xb07b, 0x112f: 0xb07b, - 0x1130: 0xb07c, 0x1131: 0xb07c, 0x1132: 0xb07c, 0x1133: 0xb07c, 0x1134: 0xb07c, 0x1135: 0xb07c, - 0x1136: 0xb07c, 0x1137: 0xb07c, 0x1138: 0xb07c, 0x1139: 0xb07c, 0x113a: 0xb07c, 0x113b: 0xb07c, - 0x113c: 0xb07c, 0x113d: 0xb07c, 0x113e: 0xb07c, - // Block 0x45, offset 0x1140 - 0x1142: 0xb07d, 0x1143: 0xb07e, 0x1144: 0xb07f, 0x1145: 0xb080, - 0x1146: 0xb07f, 0x1147: 0xb07e, 0x114a: 0xb081, 0x114b: 0xb082, - 0x114c: 0xb083, 0x114d: 0xb07f, 0x114e: 0xb080, 0x114f: 0xb07f, - 0x1152: 0xb084, 0x1153: 0xb085, 0x1154: 0xb084, 0x1155: 0xb086, 0x1156: 0xb084, 0x1157: 0xb087, - 0x115a: 0xb088, 0x115b: 0xb089, 0x115c: 0xb08a, - 0x1160: 0x908b, 0x1161: 0x908b, 0x1162: 0x908c, 0x1163: 0x908d, - 0x1164: 0x908b, 0x1165: 0x908e, 0x1166: 0x908f, 0x1168: 0xb090, 0x1169: 0xb091, - 0x116a: 0xb092, 0x116b: 0xb091, 0x116c: 0xb093, 0x116d: 0xb094, 0x116e: 0xb095, - 0x117d: 0x2000, - // Block 0x46, offset 0x1180 - 0x11a0: 0x4000, 0x11a1: 0x4000, 0x11a2: 0x4000, 0x11a3: 0x4000, - 0x11a4: 0x4000, - 0x11b0: 0x4000, 0x11b1: 0x4000, - // Block 0x47, offset 0x11c0 - 0x11c0: 0x4000, 0x11c1: 0x4000, 0x11c2: 0x4000, 0x11c3: 0x4000, 0x11c4: 0x4000, 0x11c5: 0x4000, - 0x11c6: 0x4000, 0x11c7: 0x4000, 0x11c8: 0x4000, 0x11c9: 0x4000, 0x11ca: 0x4000, 0x11cb: 0x4000, - 0x11cc: 0x4000, 0x11cd: 0x4000, 0x11ce: 0x4000, 0x11cf: 0x4000, 0x11d0: 0x4000, 0x11d1: 0x4000, - 0x11d2: 0x4000, 0x11d3: 0x4000, 0x11d4: 0x4000, 0x11d5: 0x4000, 0x11d6: 0x4000, 0x11d7: 0x4000, - 0x11d8: 0x4000, 0x11d9: 0x4000, 0x11da: 0x4000, 0x11db: 0x4000, 0x11dc: 0x4000, 0x11dd: 0x4000, - 0x11de: 0x4000, 0x11df: 0x4000, 0x11e0: 0x4000, 0x11e1: 0x4000, 0x11e2: 0x4000, 0x11e3: 0x4000, - 0x11e4: 0x4000, 0x11e5: 0x4000, 0x11e6: 0x4000, 0x11e7: 0x4000, 0x11e8: 0x4000, 0x11e9: 0x4000, - 0x11ea: 0x4000, 0x11eb: 0x4000, 0x11ec: 0x4000, 0x11ed: 0x4000, 0x11ee: 0x4000, 0x11ef: 0x4000, - 0x11f0: 0x4000, 0x11f1: 0x4000, 0x11f2: 0x4000, 0x11f3: 0x4000, 0x11f4: 0x4000, 0x11f5: 0x4000, - 0x11f6: 0x4000, 0x11f7: 0x4000, - // Block 0x48, offset 0x1200 - 0x1200: 0x4000, 0x1201: 0x4000, 0x1202: 0x4000, 0x1203: 0x4000, 0x1204: 0x4000, 0x1205: 0x4000, - 0x1206: 0x4000, 0x1207: 0x4000, 0x1208: 0x4000, 0x1209: 0x4000, 0x120a: 0x4000, 0x120b: 0x4000, - 0x120c: 0x4000, 0x120d: 0x4000, 0x120e: 0x4000, 0x120f: 0x4000, 0x1210: 0x4000, 0x1211: 0x4000, - 0x1212: 0x4000, 0x1213: 0x4000, 0x1214: 0x4000, 0x1215: 0x4000, - // Block 0x49, offset 0x1240 - 0x1240: 0x4000, 0x1241: 0x4000, 0x1242: 0x4000, 0x1243: 0x4000, 0x1244: 0x4000, 0x1245: 0x4000, - 0x1246: 0x4000, 0x1247: 0x4000, 0x1248: 0x4000, - // Block 0x4a, offset 0x1280 - 0x12b0: 0x4000, 0x12b1: 0x4000, 0x12b2: 0x4000, 0x12b3: 0x4000, 0x12b5: 0x4000, - 0x12b6: 0x4000, 0x12b7: 0x4000, 0x12b8: 0x4000, 0x12b9: 0x4000, 0x12ba: 0x4000, 0x12bb: 0x4000, - 0x12bd: 0x4000, 0x12be: 0x4000, - // Block 0x4b, offset 0x12c0 - 0x12c0: 0x4000, 0x12c1: 0x4000, 0x12c2: 0x4000, 0x12c3: 0x4000, 0x12c4: 0x4000, 0x12c5: 0x4000, - 0x12c6: 0x4000, 0x12c7: 0x4000, 0x12c8: 0x4000, 0x12c9: 0x4000, 0x12ca: 0x4000, 0x12cb: 0x4000, - 0x12cc: 0x4000, 0x12cd: 0x4000, 0x12ce: 0x4000, 0x12cf: 0x4000, 0x12d0: 0x4000, 0x12d1: 0x4000, - 0x12d2: 0x4000, 0x12d3: 0x4000, 0x12d4: 0x4000, 0x12d5: 0x4000, 0x12d6: 0x4000, 0x12d7: 0x4000, - 0x12d8: 0x4000, 0x12d9: 0x4000, 0x12da: 0x4000, 0x12db: 0x4000, 0x12dc: 0x4000, 0x12dd: 0x4000, - 0x12de: 0x4000, 0x12df: 0x4000, 0x12e0: 0x4000, 0x12e1: 0x4000, 0x12e2: 0x4000, - 0x12f2: 0x4000, - // Block 0x4c, offset 0x1300 - 0x1310: 0x4000, 0x1311: 0x4000, - 0x1312: 0x4000, 0x1315: 0x4000, - 0x1324: 0x4000, 0x1325: 0x4000, 0x1326: 0x4000, 0x1327: 0x4000, - 0x1330: 0x4000, 0x1331: 0x4000, 0x1332: 0x4000, 0x1333: 0x4000, 0x1334: 0x4000, 0x1335: 0x4000, - 0x1336: 0x4000, 0x1337: 0x4000, 0x1338: 0x4000, 0x1339: 0x4000, 0x133a: 0x4000, 0x133b: 0x4000, - 0x133c: 0x4000, 0x133d: 0x4000, 0x133e: 0x4000, 0x133f: 0x4000, - // Block 0x4d, offset 0x1340 - 0x1340: 0x4000, 0x1341: 0x4000, 0x1342: 0x4000, 0x1343: 0x4000, 0x1344: 0x4000, 0x1345: 0x4000, - 0x1346: 0x4000, 0x1347: 0x4000, 0x1348: 0x4000, 0x1349: 0x4000, 0x134a: 0x4000, 0x134b: 0x4000, - 0x134c: 0x4000, 0x134d: 0x4000, 0x134e: 0x4000, 0x134f: 0x4000, 0x1350: 0x4000, 0x1351: 0x4000, - 0x1352: 0x4000, 0x1353: 0x4000, 0x1354: 0x4000, 0x1355: 0x4000, 0x1356: 0x4000, 0x1357: 0x4000, - 0x1358: 0x4000, 0x1359: 0x4000, 0x135a: 0x4000, 0x135b: 0x4000, 0x135c: 0x4000, 0x135d: 0x4000, - 0x135e: 0x4000, 0x135f: 0x4000, 0x1360: 0x4000, 0x1361: 0x4000, 0x1362: 0x4000, 0x1363: 0x4000, - 0x1364: 0x4000, 0x1365: 0x4000, 0x1366: 0x4000, 0x1367: 0x4000, 0x1368: 0x4000, 0x1369: 0x4000, - 0x136a: 0x4000, 0x136b: 0x4000, 0x136c: 0x4000, 0x136d: 0x4000, 0x136e: 0x4000, 0x136f: 0x4000, - 0x1370: 0x4000, 0x1371: 0x4000, 0x1372: 0x4000, 0x1373: 0x4000, 0x1374: 0x4000, 0x1375: 0x4000, - 0x1376: 0x4000, 0x1377: 0x4000, 0x1378: 0x4000, 0x1379: 0x4000, 0x137a: 0x4000, 0x137b: 0x4000, - // Block 0x4e, offset 0x1380 - 0x1384: 0x4000, - // Block 0x4f, offset 0x13c0 - 0x13cf: 0x4000, - // Block 0x50, offset 0x1400 - 0x1400: 0x2000, 0x1401: 0x2000, 0x1402: 0x2000, 0x1403: 0x2000, 0x1404: 0x2000, 0x1405: 0x2000, - 0x1406: 0x2000, 0x1407: 0x2000, 0x1408: 0x2000, 0x1409: 0x2000, 0x140a: 0x2000, - 0x1410: 0x2000, 0x1411: 0x2000, - 0x1412: 0x2000, 0x1413: 0x2000, 0x1414: 0x2000, 0x1415: 0x2000, 0x1416: 0x2000, 0x1417: 0x2000, - 0x1418: 0x2000, 0x1419: 0x2000, 0x141a: 0x2000, 0x141b: 0x2000, 0x141c: 0x2000, 0x141d: 0x2000, - 0x141e: 0x2000, 0x141f: 0x2000, 0x1420: 0x2000, 0x1421: 0x2000, 0x1422: 0x2000, 0x1423: 0x2000, - 0x1424: 0x2000, 0x1425: 0x2000, 0x1426: 0x2000, 0x1427: 0x2000, 0x1428: 0x2000, 0x1429: 0x2000, - 0x142a: 0x2000, 0x142b: 0x2000, 0x142c: 0x2000, 0x142d: 0x2000, - 0x1430: 0x2000, 0x1431: 0x2000, 0x1432: 0x2000, 0x1433: 0x2000, 0x1434: 0x2000, 0x1435: 0x2000, - 0x1436: 0x2000, 0x1437: 0x2000, 0x1438: 0x2000, 0x1439: 0x2000, 0x143a: 0x2000, 0x143b: 0x2000, - 0x143c: 0x2000, 0x143d: 0x2000, 0x143e: 0x2000, 0x143f: 0x2000, - // Block 0x51, offset 0x1440 - 0x1440: 0x2000, 0x1441: 0x2000, 0x1442: 0x2000, 0x1443: 0x2000, 0x1444: 0x2000, 0x1445: 0x2000, - 0x1446: 0x2000, 0x1447: 0x2000, 0x1448: 0x2000, 0x1449: 0x2000, 0x144a: 0x2000, 0x144b: 0x2000, - 0x144c: 0x2000, 0x144d: 0x2000, 0x144e: 0x2000, 0x144f: 0x2000, 0x1450: 0x2000, 0x1451: 0x2000, - 0x1452: 0x2000, 0x1453: 0x2000, 0x1454: 0x2000, 0x1455: 0x2000, 0x1456: 0x2000, 0x1457: 0x2000, - 0x1458: 0x2000, 0x1459: 0x2000, 0x145a: 0x2000, 0x145b: 0x2000, 0x145c: 0x2000, 0x145d: 0x2000, - 0x145e: 0x2000, 0x145f: 0x2000, 0x1460: 0x2000, 0x1461: 0x2000, 0x1462: 0x2000, 0x1463: 0x2000, - 0x1464: 0x2000, 0x1465: 0x2000, 0x1466: 0x2000, 0x1467: 0x2000, 0x1468: 0x2000, 0x1469: 0x2000, - 0x1470: 0x2000, 0x1471: 0x2000, 0x1472: 0x2000, 0x1473: 0x2000, 0x1474: 0x2000, 0x1475: 0x2000, - 0x1476: 0x2000, 0x1477: 0x2000, 0x1478: 0x2000, 0x1479: 0x2000, 0x147a: 0x2000, 0x147b: 0x2000, - 0x147c: 0x2000, 0x147d: 0x2000, 0x147e: 0x2000, 0x147f: 0x2000, - // Block 0x52, offset 0x1480 - 0x1480: 0x2000, 0x1481: 0x2000, 0x1482: 0x2000, 0x1483: 0x2000, 0x1484: 0x2000, 0x1485: 0x2000, - 0x1486: 0x2000, 0x1487: 0x2000, 0x1488: 0x2000, 0x1489: 0x2000, 0x148a: 0x2000, 0x148b: 0x2000, - 0x148c: 0x2000, 0x148d: 0x2000, 0x148e: 0x4000, 0x148f: 0x2000, 0x1490: 0x2000, 0x1491: 0x4000, - 0x1492: 0x4000, 0x1493: 0x4000, 0x1494: 0x4000, 0x1495: 0x4000, 0x1496: 0x4000, 0x1497: 0x4000, - 0x1498: 0x4000, 0x1499: 0x4000, 0x149a: 0x4000, 0x149b: 0x2000, 0x149c: 0x2000, 0x149d: 0x2000, - 0x149e: 0x2000, 0x149f: 0x2000, 0x14a0: 0x2000, 0x14a1: 0x2000, 0x14a2: 0x2000, 0x14a3: 0x2000, - 0x14a4: 0x2000, 0x14a5: 0x2000, 0x14a6: 0x2000, 0x14a7: 0x2000, 0x14a8: 0x2000, 0x14a9: 0x2000, - 0x14aa: 0x2000, 0x14ab: 0x2000, 0x14ac: 0x2000, - // Block 0x53, offset 0x14c0 - 0x14c0: 0x4000, 0x14c1: 0x4000, 0x14c2: 0x4000, - 0x14d0: 0x4000, 0x14d1: 0x4000, - 0x14d2: 0x4000, 0x14d3: 0x4000, 0x14d4: 0x4000, 0x14d5: 0x4000, 0x14d6: 0x4000, 0x14d7: 0x4000, - 0x14d8: 0x4000, 0x14d9: 0x4000, 0x14da: 0x4000, 0x14db: 0x4000, 0x14dc: 0x4000, 0x14dd: 0x4000, - 0x14de: 0x4000, 0x14df: 0x4000, 0x14e0: 0x4000, 0x14e1: 0x4000, 0x14e2: 0x4000, 0x14e3: 0x4000, - 0x14e4: 0x4000, 0x14e5: 0x4000, 0x14e6: 0x4000, 0x14e7: 0x4000, 0x14e8: 0x4000, 0x14e9: 0x4000, - 0x14ea: 0x4000, 0x14eb: 0x4000, 0x14ec: 0x4000, 0x14ed: 0x4000, 0x14ee: 0x4000, 0x14ef: 0x4000, - 0x14f0: 0x4000, 0x14f1: 0x4000, 0x14f2: 0x4000, 0x14f3: 0x4000, 0x14f4: 0x4000, 0x14f5: 0x4000, - 0x14f6: 0x4000, 0x14f7: 0x4000, 0x14f8: 0x4000, 0x14f9: 0x4000, 0x14fa: 0x4000, 0x14fb: 0x4000, - // Block 0x54, offset 0x1500 - 0x1500: 0x4000, 0x1501: 0x4000, 0x1502: 0x4000, 0x1503: 0x4000, 0x1504: 0x4000, 0x1505: 0x4000, - 0x1506: 0x4000, 0x1507: 0x4000, 0x1508: 0x4000, - 0x1510: 0x4000, 0x1511: 0x4000, - 0x1520: 0x4000, 0x1521: 0x4000, 0x1522: 0x4000, 0x1523: 0x4000, - 0x1524: 0x4000, 0x1525: 0x4000, - // Block 0x55, offset 0x1540 - 0x1540: 0x4000, 0x1541: 0x4000, 0x1542: 0x4000, 0x1543: 0x4000, 0x1544: 0x4000, 0x1545: 0x4000, - 0x1546: 0x4000, 0x1547: 0x4000, 0x1548: 0x4000, 0x1549: 0x4000, 0x154a: 0x4000, 0x154b: 0x4000, - 0x154c: 0x4000, 0x154d: 0x4000, 0x154e: 0x4000, 0x154f: 0x4000, 0x1550: 0x4000, 0x1551: 0x4000, - 0x1552: 0x4000, 0x1553: 0x4000, 0x1554: 0x4000, 0x1555: 0x4000, 0x1556: 0x4000, 0x1557: 0x4000, - 0x1558: 0x4000, 0x1559: 0x4000, 0x155a: 0x4000, 0x155b: 0x4000, 0x155c: 0x4000, 0x155d: 0x4000, - 0x155e: 0x4000, 0x155f: 0x4000, 0x1560: 0x4000, - 0x156d: 0x4000, 0x156e: 0x4000, 0x156f: 0x4000, - 0x1570: 0x4000, 0x1571: 0x4000, 0x1572: 0x4000, 0x1573: 0x4000, 0x1574: 0x4000, 0x1575: 0x4000, - 0x1577: 0x4000, 0x1578: 0x4000, 0x1579: 0x4000, 0x157a: 0x4000, 0x157b: 0x4000, - 0x157c: 0x4000, 0x157d: 0x4000, 0x157e: 0x4000, 0x157f: 0x4000, - // Block 0x56, offset 0x1580 - 0x1580: 0x4000, 0x1581: 0x4000, 0x1582: 0x4000, 0x1583: 0x4000, 0x1584: 0x4000, 0x1585: 0x4000, - 0x1586: 0x4000, 0x1587: 0x4000, 0x1588: 0x4000, 0x1589: 0x4000, 0x158a: 0x4000, 0x158b: 0x4000, - 0x158c: 0x4000, 0x158d: 0x4000, 0x158e: 0x4000, 0x158f: 0x4000, 0x1590: 0x4000, 0x1591: 0x4000, - 0x1592: 0x4000, 0x1593: 0x4000, 0x1594: 0x4000, 0x1595: 0x4000, 0x1596: 0x4000, 0x1597: 0x4000, - 0x1598: 0x4000, 0x1599: 0x4000, 0x159a: 0x4000, 0x159b: 0x4000, 0x159c: 0x4000, 0x159d: 0x4000, - 0x159e: 0x4000, 0x159f: 0x4000, 0x15a0: 0x4000, 0x15a1: 0x4000, 0x15a2: 0x4000, 0x15a3: 0x4000, - 0x15a4: 0x4000, 0x15a5: 0x4000, 0x15a6: 0x4000, 0x15a7: 0x4000, 0x15a8: 0x4000, 0x15a9: 0x4000, - 0x15aa: 0x4000, 0x15ab: 0x4000, 0x15ac: 0x4000, 0x15ad: 0x4000, 0x15ae: 0x4000, 0x15af: 0x4000, - 0x15b0: 0x4000, 0x15b1: 0x4000, 0x15b2: 0x4000, 0x15b3: 0x4000, 0x15b4: 0x4000, 0x15b5: 0x4000, - 0x15b6: 0x4000, 0x15b7: 0x4000, 0x15b8: 0x4000, 0x15b9: 0x4000, 0x15ba: 0x4000, 0x15bb: 0x4000, - 0x15bc: 0x4000, 0x15be: 0x4000, 0x15bf: 0x4000, - // Block 0x57, offset 0x15c0 - 0x15c0: 0x4000, 0x15c1: 0x4000, 0x15c2: 0x4000, 0x15c3: 0x4000, 0x15c4: 0x4000, 0x15c5: 0x4000, - 0x15c6: 0x4000, 0x15c7: 0x4000, 0x15c8: 0x4000, 0x15c9: 0x4000, 0x15ca: 0x4000, 0x15cb: 0x4000, - 0x15cc: 0x4000, 0x15cd: 0x4000, 0x15ce: 0x4000, 0x15cf: 0x4000, 0x15d0: 0x4000, 0x15d1: 0x4000, - 0x15d2: 0x4000, 0x15d3: 0x4000, - 0x15e0: 0x4000, 0x15e1: 0x4000, 0x15e2: 0x4000, 0x15e3: 0x4000, - 0x15e4: 0x4000, 0x15e5: 0x4000, 0x15e6: 0x4000, 0x15e7: 0x4000, 0x15e8: 0x4000, 0x15e9: 0x4000, - 0x15ea: 0x4000, 0x15eb: 0x4000, 0x15ec: 0x4000, 0x15ed: 0x4000, 0x15ee: 0x4000, 0x15ef: 0x4000, - 0x15f0: 0x4000, 0x15f1: 0x4000, 0x15f2: 0x4000, 0x15f3: 0x4000, 0x15f4: 0x4000, 0x15f5: 0x4000, - 0x15f6: 0x4000, 0x15f7: 0x4000, 0x15f8: 0x4000, 0x15f9: 0x4000, 0x15fa: 0x4000, 0x15fb: 0x4000, - 0x15fc: 0x4000, 0x15fd: 0x4000, 0x15fe: 0x4000, 0x15ff: 0x4000, - // Block 0x58, offset 0x1600 - 0x1600: 0x4000, 0x1601: 0x4000, 0x1602: 0x4000, 0x1603: 0x4000, 0x1604: 0x4000, 0x1605: 0x4000, - 0x1606: 0x4000, 0x1607: 0x4000, 0x1608: 0x4000, 0x1609: 0x4000, 0x160a: 0x4000, - 0x160f: 0x4000, 0x1610: 0x4000, 0x1611: 0x4000, - 0x1612: 0x4000, 0x1613: 0x4000, - 0x1620: 0x4000, 0x1621: 0x4000, 0x1622: 0x4000, 0x1623: 0x4000, - 0x1624: 0x4000, 0x1625: 0x4000, 0x1626: 0x4000, 0x1627: 0x4000, 0x1628: 0x4000, 0x1629: 0x4000, - 0x162a: 0x4000, 0x162b: 0x4000, 0x162c: 0x4000, 0x162d: 0x4000, 0x162e: 0x4000, 0x162f: 0x4000, - 0x1630: 0x4000, 0x1634: 0x4000, - 0x1638: 0x4000, 0x1639: 0x4000, 0x163a: 0x4000, 0x163b: 0x4000, - 0x163c: 0x4000, 0x163d: 0x4000, 0x163e: 0x4000, 0x163f: 0x4000, - // Block 0x59, offset 0x1640 - 0x1640: 0x4000, 0x1641: 0x4000, 0x1642: 0x4000, 0x1643: 0x4000, 0x1644: 0x4000, 0x1645: 0x4000, - 0x1646: 0x4000, 0x1647: 0x4000, 0x1648: 0x4000, 0x1649: 0x4000, 0x164a: 0x4000, 0x164b: 0x4000, - 0x164c: 0x4000, 0x164d: 0x4000, 0x164e: 0x4000, 0x164f: 0x4000, 0x1650: 0x4000, 0x1651: 0x4000, - 0x1652: 0x4000, 0x1653: 0x4000, 0x1654: 0x4000, 0x1655: 0x4000, 0x1656: 0x4000, 0x1657: 0x4000, - 0x1658: 0x4000, 0x1659: 0x4000, 0x165a: 0x4000, 0x165b: 0x4000, 0x165c: 0x4000, 0x165d: 0x4000, - 0x165e: 0x4000, 0x165f: 0x4000, 0x1660: 0x4000, 0x1661: 0x4000, 0x1662: 0x4000, 0x1663: 0x4000, - 0x1664: 0x4000, 0x1665: 0x4000, 0x1666: 0x4000, 0x1667: 0x4000, 0x1668: 0x4000, 0x1669: 0x4000, - 0x166a: 0x4000, 0x166b: 0x4000, 0x166c: 0x4000, 0x166d: 0x4000, 0x166e: 0x4000, 0x166f: 0x4000, - 0x1670: 0x4000, 0x1671: 0x4000, 0x1672: 0x4000, 0x1673: 0x4000, 0x1674: 0x4000, 0x1675: 0x4000, - 0x1676: 0x4000, 0x1677: 0x4000, 0x1678: 0x4000, 0x1679: 0x4000, 0x167a: 0x4000, 0x167b: 0x4000, - 0x167c: 0x4000, 0x167d: 0x4000, 0x167e: 0x4000, - // Block 0x5a, offset 0x1680 - 0x1680: 0x4000, 0x1682: 0x4000, 0x1683: 0x4000, 0x1684: 0x4000, 0x1685: 0x4000, - 0x1686: 0x4000, 0x1687: 0x4000, 0x1688: 0x4000, 0x1689: 0x4000, 0x168a: 0x4000, 0x168b: 0x4000, - 0x168c: 0x4000, 0x168d: 0x4000, 0x168e: 0x4000, 0x168f: 0x4000, 0x1690: 0x4000, 0x1691: 0x4000, - 0x1692: 0x4000, 0x1693: 0x4000, 0x1694: 0x4000, 0x1695: 0x4000, 0x1696: 0x4000, 0x1697: 0x4000, - 0x1698: 0x4000, 0x1699: 0x4000, 0x169a: 0x4000, 0x169b: 0x4000, 0x169c: 0x4000, 0x169d: 0x4000, - 0x169e: 0x4000, 0x169f: 0x4000, 0x16a0: 0x4000, 0x16a1: 0x4000, 0x16a2: 0x4000, 0x16a3: 0x4000, - 0x16a4: 0x4000, 0x16a5: 0x4000, 0x16a6: 0x4000, 0x16a7: 0x4000, 0x16a8: 0x4000, 0x16a9: 0x4000, - 0x16aa: 0x4000, 0x16ab: 0x4000, 0x16ac: 0x4000, 0x16ad: 0x4000, 0x16ae: 0x4000, 0x16af: 0x4000, - 0x16b0: 0x4000, 0x16b1: 0x4000, 0x16b2: 0x4000, 0x16b3: 0x4000, 0x16b4: 0x4000, 0x16b5: 0x4000, - 0x16b6: 0x4000, 0x16b7: 0x4000, 0x16b8: 0x4000, 0x16b9: 0x4000, 0x16ba: 0x4000, 0x16bb: 0x4000, - 0x16bc: 0x4000, 0x16bd: 0x4000, 0x16be: 0x4000, 0x16bf: 0x4000, - // Block 0x5b, offset 0x16c0 - 0x16c0: 0x4000, 0x16c1: 0x4000, 0x16c2: 0x4000, 0x16c3: 0x4000, 0x16c4: 0x4000, 0x16c5: 0x4000, - 0x16c6: 0x4000, 0x16c7: 0x4000, 0x16c8: 0x4000, 0x16c9: 0x4000, 0x16ca: 0x4000, 0x16cb: 0x4000, - 0x16cc: 0x4000, 0x16cd: 0x4000, 0x16ce: 0x4000, 0x16cf: 0x4000, 0x16d0: 0x4000, 0x16d1: 0x4000, - 0x16d2: 0x4000, 0x16d3: 0x4000, 0x16d4: 0x4000, 0x16d5: 0x4000, 0x16d6: 0x4000, 0x16d7: 0x4000, - 0x16d8: 0x4000, 0x16d9: 0x4000, 0x16da: 0x4000, 0x16db: 0x4000, 0x16dc: 0x4000, 0x16dd: 0x4000, - 0x16de: 0x4000, 0x16df: 0x4000, 0x16e0: 0x4000, 0x16e1: 0x4000, 0x16e2: 0x4000, 0x16e3: 0x4000, - 0x16e4: 0x4000, 0x16e5: 0x4000, 0x16e6: 0x4000, 0x16e7: 0x4000, 0x16e8: 0x4000, 0x16e9: 0x4000, - 0x16ea: 0x4000, 0x16eb: 0x4000, 0x16ec: 0x4000, 0x16ed: 0x4000, 0x16ee: 0x4000, 0x16ef: 0x4000, - 0x16f0: 0x4000, 0x16f1: 0x4000, 0x16f2: 0x4000, 0x16f3: 0x4000, 0x16f4: 0x4000, 0x16f5: 0x4000, - 0x16f6: 0x4000, 0x16f7: 0x4000, 0x16f8: 0x4000, 0x16f9: 0x4000, 0x16fa: 0x4000, 0x16fb: 0x4000, - 0x16fc: 0x4000, 0x16ff: 0x4000, - // Block 0x5c, offset 0x1700 - 0x1700: 0x4000, 0x1701: 0x4000, 0x1702: 0x4000, 0x1703: 0x4000, 0x1704: 0x4000, 0x1705: 0x4000, - 0x1706: 0x4000, 0x1707: 0x4000, 0x1708: 0x4000, 0x1709: 0x4000, 0x170a: 0x4000, 0x170b: 0x4000, - 0x170c: 0x4000, 0x170d: 0x4000, 0x170e: 0x4000, 0x170f: 0x4000, 0x1710: 0x4000, 0x1711: 0x4000, - 0x1712: 0x4000, 0x1713: 0x4000, 0x1714: 0x4000, 0x1715: 0x4000, 0x1716: 0x4000, 0x1717: 0x4000, - 0x1718: 0x4000, 0x1719: 0x4000, 0x171a: 0x4000, 0x171b: 0x4000, 0x171c: 0x4000, 0x171d: 0x4000, - 0x171e: 0x4000, 0x171f: 0x4000, 0x1720: 0x4000, 0x1721: 0x4000, 0x1722: 0x4000, 0x1723: 0x4000, - 0x1724: 0x4000, 0x1725: 0x4000, 0x1726: 0x4000, 0x1727: 0x4000, 0x1728: 0x4000, 0x1729: 0x4000, - 0x172a: 0x4000, 0x172b: 0x4000, 0x172c: 0x4000, 0x172d: 0x4000, 0x172e: 0x4000, 0x172f: 0x4000, - 0x1730: 0x4000, 0x1731: 0x4000, 0x1732: 0x4000, 0x1733: 0x4000, 0x1734: 0x4000, 0x1735: 0x4000, - 0x1736: 0x4000, 0x1737: 0x4000, 0x1738: 0x4000, 0x1739: 0x4000, 0x173a: 0x4000, 0x173b: 0x4000, - 0x173c: 0x4000, 0x173d: 0x4000, - // Block 0x5d, offset 0x1740 - 0x174b: 0x4000, - 0x174c: 0x4000, 0x174d: 0x4000, 0x174e: 0x4000, 0x1750: 0x4000, 0x1751: 0x4000, - 0x1752: 0x4000, 0x1753: 0x4000, 0x1754: 0x4000, 0x1755: 0x4000, 0x1756: 0x4000, 0x1757: 0x4000, - 0x1758: 0x4000, 0x1759: 0x4000, 0x175a: 0x4000, 0x175b: 0x4000, 0x175c: 0x4000, 0x175d: 0x4000, - 0x175e: 0x4000, 0x175f: 0x4000, 0x1760: 0x4000, 0x1761: 0x4000, 0x1762: 0x4000, 0x1763: 0x4000, - 0x1764: 0x4000, 0x1765: 0x4000, 0x1766: 0x4000, 0x1767: 0x4000, - 0x177a: 0x4000, - // Block 0x5e, offset 0x1780 - 0x1795: 0x4000, 0x1796: 0x4000, - 0x17a4: 0x4000, - // Block 0x5f, offset 0x17c0 - 0x17fb: 0x4000, - 0x17fc: 0x4000, 0x17fd: 0x4000, 0x17fe: 0x4000, 0x17ff: 0x4000, - // Block 0x60, offset 0x1800 - 0x1800: 0x4000, 0x1801: 0x4000, 0x1802: 0x4000, 0x1803: 0x4000, 0x1804: 0x4000, 0x1805: 0x4000, - 0x1806: 0x4000, 0x1807: 0x4000, 0x1808: 0x4000, 0x1809: 0x4000, 0x180a: 0x4000, 0x180b: 0x4000, - 0x180c: 0x4000, 0x180d: 0x4000, 0x180e: 0x4000, 0x180f: 0x4000, - // Block 0x61, offset 0x1840 - 0x1840: 0x4000, 0x1841: 0x4000, 0x1842: 0x4000, 0x1843: 0x4000, 0x1844: 0x4000, 0x1845: 0x4000, - 0x184c: 0x4000, 0x1850: 0x4000, 0x1851: 0x4000, - 0x1852: 0x4000, 0x1855: 0x4000, 0x1856: 0x4000, 0x1857: 0x4000, - 0x185c: 0x4000, 0x185d: 0x4000, - 0x185e: 0x4000, 0x185f: 0x4000, - 0x186b: 0x4000, 0x186c: 0x4000, - 0x1874: 0x4000, 0x1875: 0x4000, - 0x1876: 0x4000, 0x1877: 0x4000, 0x1878: 0x4000, 0x1879: 0x4000, 0x187a: 0x4000, 0x187b: 0x4000, - 0x187c: 0x4000, - // Block 0x62, offset 0x1880 - 0x18a0: 0x4000, 0x18a1: 0x4000, 0x18a2: 0x4000, 0x18a3: 0x4000, - 0x18a4: 0x4000, 0x18a5: 0x4000, 0x18a6: 0x4000, 0x18a7: 0x4000, 0x18a8: 0x4000, 0x18a9: 0x4000, - 0x18aa: 0x4000, 0x18ab: 0x4000, - 0x18b0: 0x4000, - // Block 0x63, offset 0x18c0 - 0x18cc: 0x4000, 0x18cd: 0x4000, 0x18ce: 0x4000, 0x18cf: 0x4000, 0x18d0: 0x4000, 0x18d1: 0x4000, - 0x18d2: 0x4000, 0x18d3: 0x4000, 0x18d4: 0x4000, 0x18d5: 0x4000, 0x18d6: 0x4000, 0x18d7: 0x4000, - 0x18d8: 0x4000, 0x18d9: 0x4000, 0x18da: 0x4000, 0x18db: 0x4000, 0x18dc: 0x4000, 0x18dd: 0x4000, - 0x18de: 0x4000, 0x18df: 0x4000, 0x18e0: 0x4000, 0x18e1: 0x4000, 0x18e2: 0x4000, 0x18e3: 0x4000, - 0x18e4: 0x4000, 0x18e5: 0x4000, 0x18e6: 0x4000, 0x18e7: 0x4000, 0x18e8: 0x4000, 0x18e9: 0x4000, - 0x18ea: 0x4000, 0x18eb: 0x4000, 0x18ec: 0x4000, 0x18ed: 0x4000, 0x18ee: 0x4000, 0x18ef: 0x4000, - 0x18f0: 0x4000, 0x18f1: 0x4000, 0x18f2: 0x4000, 0x18f3: 0x4000, 0x18f4: 0x4000, 0x18f5: 0x4000, - 0x18f6: 0x4000, 0x18f7: 0x4000, 0x18f8: 0x4000, 0x18f9: 0x4000, 0x18fa: 0x4000, - 0x18fc: 0x4000, 0x18fd: 0x4000, 0x18fe: 0x4000, 0x18ff: 0x4000, - // Block 0x64, offset 0x1900 - 0x1900: 0x4000, 0x1901: 0x4000, 0x1902: 0x4000, 0x1903: 0x4000, 0x1904: 0x4000, 0x1905: 0x4000, - 0x1907: 0x4000, 0x1908: 0x4000, 0x1909: 0x4000, 0x190a: 0x4000, 0x190b: 0x4000, - 0x190c: 0x4000, 0x190d: 0x4000, 0x190e: 0x4000, 0x190f: 0x4000, 0x1910: 0x4000, 0x1911: 0x4000, - 0x1912: 0x4000, 0x1913: 0x4000, 0x1914: 0x4000, 0x1915: 0x4000, 0x1916: 0x4000, 0x1917: 0x4000, - 0x1918: 0x4000, 0x1919: 0x4000, 0x191a: 0x4000, 0x191b: 0x4000, 0x191c: 0x4000, 0x191d: 0x4000, - 0x191e: 0x4000, 0x191f: 0x4000, 0x1920: 0x4000, 0x1921: 0x4000, 0x1922: 0x4000, 0x1923: 0x4000, - 0x1924: 0x4000, 0x1925: 0x4000, 0x1926: 0x4000, 0x1927: 0x4000, 0x1928: 0x4000, 0x1929: 0x4000, - 0x192a: 0x4000, 0x192b: 0x4000, 0x192c: 0x4000, 0x192d: 0x4000, 0x192e: 0x4000, 0x192f: 0x4000, - 0x1930: 0x4000, 0x1931: 0x4000, 0x1932: 0x4000, 0x1933: 0x4000, 0x1934: 0x4000, 0x1935: 0x4000, - 0x1936: 0x4000, 0x1937: 0x4000, 0x1938: 0x4000, 0x1939: 0x4000, 0x193a: 0x4000, 0x193b: 0x4000, - 0x193c: 0x4000, 0x193d: 0x4000, 0x193e: 0x4000, 0x193f: 0x4000, - // Block 0x65, offset 0x1940 - 0x1970: 0x4000, 0x1971: 0x4000, 0x1972: 0x4000, 0x1973: 0x4000, 0x1974: 0x4000, 0x1975: 0x4000, - 0x1976: 0x4000, 0x1977: 0x4000, 0x1978: 0x4000, 0x1979: 0x4000, 0x197a: 0x4000, 0x197b: 0x4000, - 0x197c: 0x4000, - // Block 0x66, offset 0x1980 - 0x1980: 0x4000, 0x1981: 0x4000, 0x1982: 0x4000, 0x1983: 0x4000, 0x1984: 0x4000, 0x1985: 0x4000, - 0x1986: 0x4000, 0x1987: 0x4000, 0x1988: 0x4000, - 0x1990: 0x4000, 0x1991: 0x4000, - 0x1992: 0x4000, 0x1993: 0x4000, 0x1994: 0x4000, 0x1995: 0x4000, 0x1996: 0x4000, 0x1997: 0x4000, - 0x1998: 0x4000, 0x1999: 0x4000, 0x199a: 0x4000, 0x199b: 0x4000, 0x199c: 0x4000, 0x199d: 0x4000, - 0x199e: 0x4000, 0x199f: 0x4000, 0x19a0: 0x4000, 0x19a1: 0x4000, 0x19a2: 0x4000, 0x19a3: 0x4000, - 0x19a4: 0x4000, 0x19a5: 0x4000, 0x19a6: 0x4000, 0x19a7: 0x4000, 0x19a8: 0x4000, 0x19a9: 0x4000, - 0x19aa: 0x4000, 0x19ab: 0x4000, 0x19ac: 0x4000, 0x19ad: 0x4000, 0x19ae: 0x4000, 0x19af: 0x4000, - 0x19b0: 0x4000, 0x19b1: 0x4000, 0x19b2: 0x4000, 0x19b3: 0x4000, 0x19b4: 0x4000, 0x19b5: 0x4000, - 0x19b6: 0x4000, 0x19b7: 0x4000, 0x19b8: 0x4000, 0x19b9: 0x4000, 0x19ba: 0x4000, 0x19bb: 0x4000, - 0x19bc: 0x4000, 0x19bd: 0x4000, 0x19bf: 0x4000, - // Block 0x67, offset 0x19c0 - 0x19c0: 0x4000, 0x19c1: 0x4000, 0x19c2: 0x4000, 0x19c3: 0x4000, 0x19c4: 0x4000, 0x19c5: 0x4000, - 0x19ce: 0x4000, 0x19cf: 0x4000, 0x19d0: 0x4000, 0x19d1: 0x4000, - 0x19d2: 0x4000, 0x19d3: 0x4000, 0x19d4: 0x4000, 0x19d5: 0x4000, 0x19d6: 0x4000, 0x19d7: 0x4000, - 0x19d8: 0x4000, 0x19d9: 0x4000, 0x19da: 0x4000, 0x19db: 0x4000, - 0x19e0: 0x4000, 0x19e1: 0x4000, 0x19e2: 0x4000, 0x19e3: 0x4000, - 0x19e4: 0x4000, 0x19e5: 0x4000, 0x19e6: 0x4000, 0x19e7: 0x4000, 0x19e8: 0x4000, - 0x19f0: 0x4000, 0x19f1: 0x4000, 0x19f2: 0x4000, 0x19f3: 0x4000, 0x19f4: 0x4000, 0x19f5: 0x4000, - 0x19f6: 0x4000, 0x19f7: 0x4000, 0x19f8: 0x4000, - // Block 0x68, offset 0x1a00 - 0x1a00: 0x2000, 0x1a01: 0x2000, 0x1a02: 0x2000, 0x1a03: 0x2000, 0x1a04: 0x2000, 0x1a05: 0x2000, - 0x1a06: 0x2000, 0x1a07: 0x2000, 0x1a08: 0x2000, 0x1a09: 0x2000, 0x1a0a: 0x2000, 0x1a0b: 0x2000, - 0x1a0c: 0x2000, 0x1a0d: 0x2000, 0x1a0e: 0x2000, 0x1a0f: 0x2000, 0x1a10: 0x2000, 0x1a11: 0x2000, - 0x1a12: 0x2000, 0x1a13: 0x2000, 0x1a14: 0x2000, 0x1a15: 0x2000, 0x1a16: 0x2000, 0x1a17: 0x2000, - 0x1a18: 0x2000, 0x1a19: 0x2000, 0x1a1a: 0x2000, 0x1a1b: 0x2000, 0x1a1c: 0x2000, 0x1a1d: 0x2000, - 0x1a1e: 0x2000, 0x1a1f: 0x2000, 0x1a20: 0x2000, 0x1a21: 0x2000, 0x1a22: 0x2000, 0x1a23: 0x2000, - 0x1a24: 0x2000, 0x1a25: 0x2000, 0x1a26: 0x2000, 0x1a27: 0x2000, 0x1a28: 0x2000, 0x1a29: 0x2000, - 0x1a2a: 0x2000, 0x1a2b: 0x2000, 0x1a2c: 0x2000, 0x1a2d: 0x2000, 0x1a2e: 0x2000, 0x1a2f: 0x2000, - 0x1a30: 0x2000, 0x1a31: 0x2000, 0x1a32: 0x2000, 0x1a33: 0x2000, 0x1a34: 0x2000, 0x1a35: 0x2000, - 0x1a36: 0x2000, 0x1a37: 0x2000, 0x1a38: 0x2000, 0x1a39: 0x2000, 0x1a3a: 0x2000, 0x1a3b: 0x2000, - 0x1a3c: 0x2000, 0x1a3d: 0x2000, -} - -// widthIndex: 23 blocks, 1472 entries, 1472 bytes -// Block 0 is the zero block. -var widthIndex = [1472]uint8{ - // Block 0x0, offset 0x0 - // Block 0x1, offset 0x40 - // Block 0x2, offset 0x80 - // Block 0x3, offset 0xc0 - 0xc2: 0x01, 0xc3: 0x02, 0xc4: 0x03, 0xc5: 0x04, 0xc7: 0x05, - 0xc9: 0x06, 0xcb: 0x07, 0xcc: 0x08, 0xcd: 0x09, 0xce: 0x0a, 0xcf: 0x0b, - 0xd0: 0x0c, 0xd1: 0x0d, - 0xe1: 0x02, 0xe2: 0x03, 0xe3: 0x04, 0xe4: 0x05, 0xe5: 0x06, 0xe6: 0x06, 0xe7: 0x06, - 0xe8: 0x06, 0xe9: 0x06, 0xea: 0x07, 0xeb: 0x06, 0xec: 0x06, 0xed: 0x08, 0xee: 0x09, 0xef: 0x0a, - 0xf0: 0x10, 0xf3: 0x13, 0xf4: 0x14, - // Block 0x4, offset 0x100 - 0x104: 0x0e, 0x105: 0x0f, - // Block 0x5, offset 0x140 - 0x140: 0x10, 0x141: 0x11, 0x142: 0x12, 0x144: 0x13, 0x145: 0x14, 0x146: 0x15, 0x147: 0x16, - 0x148: 0x17, 0x149: 0x18, 0x14a: 0x19, 0x14c: 0x1a, 0x14f: 0x1b, - 0x151: 0x1c, 0x152: 0x08, 0x153: 0x1d, 0x154: 0x1e, 0x155: 0x1f, 0x156: 0x20, 0x157: 0x21, - 0x158: 0x22, 0x159: 0x23, 0x15a: 0x24, 0x15b: 0x25, 0x15c: 0x26, 0x15d: 0x27, 0x15e: 0x28, 0x15f: 0x29, - 0x166: 0x2a, - 0x16c: 0x2b, 0x16d: 0x2c, - 0x17a: 0x2d, 0x17b: 0x2e, 0x17c: 0x0e, 0x17d: 0x0e, 0x17e: 0x0e, 0x17f: 0x2f, - // Block 0x6, offset 0x180 - 0x180: 0x30, 0x181: 0x31, 0x182: 0x32, 0x183: 0x33, 0x184: 0x34, 0x185: 0x35, 0x186: 0x36, 0x187: 0x37, - 0x188: 0x38, 0x189: 0x39, 0x18a: 0x0e, 0x18b: 0x0e, 0x18c: 0x0e, 0x18d: 0x0e, 0x18e: 0x0e, 0x18f: 0x0e, - 0x190: 0x0e, 0x191: 0x0e, 0x192: 0x0e, 0x193: 0x0e, 0x194: 0x0e, 0x195: 0x0e, 0x196: 0x0e, 0x197: 0x0e, - 0x198: 0x0e, 0x199: 0x0e, 0x19a: 0x0e, 0x19b: 0x0e, 0x19c: 0x0e, 0x19d: 0x0e, 0x19e: 0x0e, 0x19f: 0x0e, - 0x1a0: 0x0e, 0x1a1: 0x0e, 0x1a2: 0x0e, 0x1a3: 0x0e, 0x1a4: 0x0e, 0x1a5: 0x0e, 0x1a6: 0x0e, 0x1a7: 0x0e, - 0x1a8: 0x0e, 0x1a9: 0x0e, 0x1aa: 0x0e, 0x1ab: 0x0e, 0x1ac: 0x0e, 0x1ad: 0x0e, 0x1ae: 0x0e, 0x1af: 0x0e, - 0x1b0: 0x0e, 0x1b1: 0x0e, 0x1b2: 0x0e, 0x1b3: 0x0e, 0x1b4: 0x0e, 0x1b5: 0x0e, 0x1b6: 0x0e, 0x1b7: 0x0e, - 0x1b8: 0x0e, 0x1b9: 0x0e, 0x1ba: 0x0e, 0x1bb: 0x0e, 0x1bc: 0x0e, 0x1bd: 0x0e, 0x1be: 0x0e, 0x1bf: 0x0e, - // Block 0x7, offset 0x1c0 - 0x1c0: 0x0e, 0x1c1: 0x0e, 0x1c2: 0x0e, 0x1c3: 0x0e, 0x1c4: 0x0e, 0x1c5: 0x0e, 0x1c6: 0x0e, 0x1c7: 0x0e, - 0x1c8: 0x0e, 0x1c9: 0x0e, 0x1ca: 0x0e, 0x1cb: 0x0e, 0x1cc: 0x0e, 0x1cd: 0x0e, 0x1ce: 0x0e, 0x1cf: 0x0e, - 0x1d0: 0x0e, 0x1d1: 0x0e, 0x1d2: 0x0e, 0x1d3: 0x0e, 0x1d4: 0x0e, 0x1d5: 0x0e, 0x1d6: 0x0e, 0x1d7: 0x0e, - 0x1d8: 0x0e, 0x1d9: 0x0e, 0x1da: 0x0e, 0x1db: 0x0e, 0x1dc: 0x0e, 0x1dd: 0x0e, 0x1de: 0x0e, 0x1df: 0x0e, - 0x1e0: 0x0e, 0x1e1: 0x0e, 0x1e2: 0x0e, 0x1e3: 0x0e, 0x1e4: 0x0e, 0x1e5: 0x0e, 0x1e6: 0x0e, 0x1e7: 0x0e, - 0x1e8: 0x0e, 0x1e9: 0x0e, 0x1ea: 0x0e, 0x1eb: 0x0e, 0x1ec: 0x0e, 0x1ed: 0x0e, 0x1ee: 0x0e, 0x1ef: 0x0e, - 0x1f0: 0x0e, 0x1f1: 0x0e, 0x1f2: 0x0e, 0x1f3: 0x0e, 0x1f4: 0x0e, 0x1f5: 0x0e, 0x1f6: 0x0e, - 0x1f8: 0x0e, 0x1f9: 0x0e, 0x1fa: 0x0e, 0x1fb: 0x0e, 0x1fc: 0x0e, 0x1fd: 0x0e, 0x1fe: 0x0e, 0x1ff: 0x0e, - // Block 0x8, offset 0x200 - 0x200: 0x0e, 0x201: 0x0e, 0x202: 0x0e, 0x203: 0x0e, 0x204: 0x0e, 0x205: 0x0e, 0x206: 0x0e, 0x207: 0x0e, - 0x208: 0x0e, 0x209: 0x0e, 0x20a: 0x0e, 0x20b: 0x0e, 0x20c: 0x0e, 0x20d: 0x0e, 0x20e: 0x0e, 0x20f: 0x0e, - 0x210: 0x0e, 0x211: 0x0e, 0x212: 0x0e, 0x213: 0x0e, 0x214: 0x0e, 0x215: 0x0e, 0x216: 0x0e, 0x217: 0x0e, - 0x218: 0x0e, 0x219: 0x0e, 0x21a: 0x0e, 0x21b: 0x0e, 0x21c: 0x0e, 0x21d: 0x0e, 0x21e: 0x0e, 0x21f: 0x0e, - 0x220: 0x0e, 0x221: 0x0e, 0x222: 0x0e, 0x223: 0x0e, 0x224: 0x0e, 0x225: 0x0e, 0x226: 0x0e, 0x227: 0x0e, - 0x228: 0x0e, 0x229: 0x0e, 0x22a: 0x0e, 0x22b: 0x0e, 0x22c: 0x0e, 0x22d: 0x0e, 0x22e: 0x0e, 0x22f: 0x0e, - 0x230: 0x0e, 0x231: 0x0e, 0x232: 0x0e, 0x233: 0x0e, 0x234: 0x0e, 0x235: 0x0e, 0x236: 0x0e, 0x237: 0x0e, - 0x238: 0x0e, 0x239: 0x0e, 0x23a: 0x0e, 0x23b: 0x0e, 0x23c: 0x0e, 0x23d: 0x0e, 0x23e: 0x0e, 0x23f: 0x0e, - // Block 0x9, offset 0x240 - 0x240: 0x0e, 0x241: 0x0e, 0x242: 0x0e, 0x243: 0x0e, 0x244: 0x0e, 0x245: 0x0e, 0x246: 0x0e, 0x247: 0x0e, - 0x248: 0x0e, 0x249: 0x0e, 0x24a: 0x0e, 0x24b: 0x0e, 0x24c: 0x0e, 0x24d: 0x0e, 0x24e: 0x0e, 0x24f: 0x0e, - 0x250: 0x0e, 0x251: 0x0e, 0x252: 0x3a, 0x253: 0x3b, - 0x265: 0x3c, - 0x270: 0x0e, 0x271: 0x0e, 0x272: 0x0e, 0x273: 0x0e, 0x274: 0x0e, 0x275: 0x0e, 0x276: 0x0e, 0x277: 0x0e, - 0x278: 0x0e, 0x279: 0x0e, 0x27a: 0x0e, 0x27b: 0x0e, 0x27c: 0x0e, 0x27d: 0x0e, 0x27e: 0x0e, 0x27f: 0x0e, - // Block 0xa, offset 0x280 - 0x280: 0x0e, 0x281: 0x0e, 0x282: 0x0e, 0x283: 0x0e, 0x284: 0x0e, 0x285: 0x0e, 0x286: 0x0e, 0x287: 0x0e, - 0x288: 0x0e, 0x289: 0x0e, 0x28a: 0x0e, 0x28b: 0x0e, 0x28c: 0x0e, 0x28d: 0x0e, 0x28e: 0x0e, 0x28f: 0x0e, - 0x290: 0x0e, 0x291: 0x0e, 0x292: 0x0e, 0x293: 0x0e, 0x294: 0x0e, 0x295: 0x0e, 0x296: 0x0e, 0x297: 0x0e, - 0x298: 0x0e, 0x299: 0x0e, 0x29a: 0x0e, 0x29b: 0x0e, 0x29c: 0x0e, 0x29d: 0x0e, 0x29e: 0x3d, - // Block 0xb, offset 0x2c0 - 0x2c0: 0x08, 0x2c1: 0x08, 0x2c2: 0x08, 0x2c3: 0x08, 0x2c4: 0x08, 0x2c5: 0x08, 0x2c6: 0x08, 0x2c7: 0x08, - 0x2c8: 0x08, 0x2c9: 0x08, 0x2ca: 0x08, 0x2cb: 0x08, 0x2cc: 0x08, 0x2cd: 0x08, 0x2ce: 0x08, 0x2cf: 0x08, - 0x2d0: 0x08, 0x2d1: 0x08, 0x2d2: 0x08, 0x2d3: 0x08, 0x2d4: 0x08, 0x2d5: 0x08, 0x2d6: 0x08, 0x2d7: 0x08, - 0x2d8: 0x08, 0x2d9: 0x08, 0x2da: 0x08, 0x2db: 0x08, 0x2dc: 0x08, 0x2dd: 0x08, 0x2de: 0x08, 0x2df: 0x08, - 0x2e0: 0x08, 0x2e1: 0x08, 0x2e2: 0x08, 0x2e3: 0x08, 0x2e4: 0x08, 0x2e5: 0x08, 0x2e6: 0x08, 0x2e7: 0x08, - 0x2e8: 0x08, 0x2e9: 0x08, 0x2ea: 0x08, 0x2eb: 0x08, 0x2ec: 0x08, 0x2ed: 0x08, 0x2ee: 0x08, 0x2ef: 0x08, - 0x2f0: 0x08, 0x2f1: 0x08, 0x2f2: 0x08, 0x2f3: 0x08, 0x2f4: 0x08, 0x2f5: 0x08, 0x2f6: 0x08, 0x2f7: 0x08, - 0x2f8: 0x08, 0x2f9: 0x08, 0x2fa: 0x08, 0x2fb: 0x08, 0x2fc: 0x08, 0x2fd: 0x08, 0x2fe: 0x08, 0x2ff: 0x08, - // Block 0xc, offset 0x300 - 0x300: 0x08, 0x301: 0x08, 0x302: 0x08, 0x303: 0x08, 0x304: 0x08, 0x305: 0x08, 0x306: 0x08, 0x307: 0x08, - 0x308: 0x08, 0x309: 0x08, 0x30a: 0x08, 0x30b: 0x08, 0x30c: 0x08, 0x30d: 0x08, 0x30e: 0x08, 0x30f: 0x08, - 0x310: 0x08, 0x311: 0x08, 0x312: 0x08, 0x313: 0x08, 0x314: 0x08, 0x315: 0x08, 0x316: 0x08, 0x317: 0x08, - 0x318: 0x08, 0x319: 0x08, 0x31a: 0x08, 0x31b: 0x08, 0x31c: 0x08, 0x31d: 0x08, 0x31e: 0x08, 0x31f: 0x08, - 0x320: 0x08, 0x321: 0x08, 0x322: 0x08, 0x323: 0x08, 0x324: 0x0e, 0x325: 0x0e, 0x326: 0x0e, 0x327: 0x0e, - 0x328: 0x0e, 0x329: 0x0e, 0x32a: 0x0e, 0x32b: 0x0e, - 0x338: 0x3e, 0x339: 0x3f, 0x33c: 0x40, 0x33d: 0x41, 0x33e: 0x42, 0x33f: 0x43, - // Block 0xd, offset 0x340 - 0x37f: 0x44, - // Block 0xe, offset 0x380 - 0x380: 0x0e, 0x381: 0x0e, 0x382: 0x0e, 0x383: 0x0e, 0x384: 0x0e, 0x385: 0x0e, 0x386: 0x0e, 0x387: 0x0e, - 0x388: 0x0e, 0x389: 0x0e, 0x38a: 0x0e, 0x38b: 0x0e, 0x38c: 0x0e, 0x38d: 0x0e, 0x38e: 0x0e, 0x38f: 0x0e, - 0x390: 0x0e, 0x391: 0x0e, 0x392: 0x0e, 0x393: 0x0e, 0x394: 0x0e, 0x395: 0x0e, 0x396: 0x0e, 0x397: 0x0e, - 0x398: 0x0e, 0x399: 0x0e, 0x39a: 0x0e, 0x39b: 0x0e, 0x39c: 0x0e, 0x39d: 0x0e, 0x39e: 0x0e, 0x39f: 0x45, - 0x3a0: 0x0e, 0x3a1: 0x0e, 0x3a2: 0x0e, 0x3a3: 0x0e, 0x3a4: 0x0e, 0x3a5: 0x0e, 0x3a6: 0x0e, 0x3a7: 0x0e, - 0x3a8: 0x0e, 0x3a9: 0x0e, 0x3aa: 0x0e, 0x3ab: 0x0e, 0x3ac: 0x0e, 0x3ad: 0x0e, 0x3ae: 0x0e, 0x3af: 0x0e, - 0x3b0: 0x0e, 0x3b1: 0x0e, 0x3b2: 0x0e, 0x3b3: 0x46, 0x3b4: 0x47, - // Block 0xf, offset 0x3c0 - 0x3ff: 0x48, - // Block 0x10, offset 0x400 - 0x400: 0x0e, 0x401: 0x0e, 0x402: 0x0e, 0x403: 0x0e, 0x404: 0x49, 0x405: 0x4a, 0x406: 0x0e, 0x407: 0x0e, - 0x408: 0x0e, 0x409: 0x0e, 0x40a: 0x0e, 0x40b: 0x4b, - // Block 0x11, offset 0x440 - 0x440: 0x4c, 0x443: 0x4d, 0x444: 0x4e, 0x445: 0x4f, 0x446: 0x50, - 0x448: 0x51, 0x449: 0x52, 0x44c: 0x53, 0x44d: 0x54, 0x44e: 0x55, 0x44f: 0x56, - 0x450: 0x57, 0x451: 0x58, 0x452: 0x0e, 0x453: 0x59, 0x454: 0x5a, 0x455: 0x5b, 0x456: 0x5c, 0x457: 0x5d, - 0x458: 0x0e, 0x459: 0x5e, 0x45a: 0x0e, 0x45b: 0x5f, 0x45f: 0x60, - 0x464: 0x61, 0x465: 0x62, 0x466: 0x0e, 0x467: 0x0e, - 0x469: 0x63, 0x46a: 0x64, 0x46b: 0x65, - // Block 0x12, offset 0x480 - 0x496: 0x0b, 0x497: 0x06, - 0x498: 0x0c, 0x49a: 0x0d, 0x49b: 0x0e, 0x49f: 0x0f, - 0x4a0: 0x06, 0x4a1: 0x06, 0x4a2: 0x06, 0x4a3: 0x06, 0x4a4: 0x06, 0x4a5: 0x06, 0x4a6: 0x06, 0x4a7: 0x06, - 0x4a8: 0x06, 0x4a9: 0x06, 0x4aa: 0x06, 0x4ab: 0x06, 0x4ac: 0x06, 0x4ad: 0x06, 0x4ae: 0x06, 0x4af: 0x06, - 0x4b0: 0x06, 0x4b1: 0x06, 0x4b2: 0x06, 0x4b3: 0x06, 0x4b4: 0x06, 0x4b5: 0x06, 0x4b6: 0x06, 0x4b7: 0x06, - 0x4b8: 0x06, 0x4b9: 0x06, 0x4ba: 0x06, 0x4bb: 0x06, 0x4bc: 0x06, 0x4bd: 0x06, 0x4be: 0x06, 0x4bf: 0x06, - // Block 0x13, offset 0x4c0 - 0x4c4: 0x08, 0x4c5: 0x08, 0x4c6: 0x08, 0x4c7: 0x09, - // Block 0x14, offset 0x500 - 0x500: 0x08, 0x501: 0x08, 0x502: 0x08, 0x503: 0x08, 0x504: 0x08, 0x505: 0x08, 0x506: 0x08, 0x507: 0x08, - 0x508: 0x08, 0x509: 0x08, 0x50a: 0x08, 0x50b: 0x08, 0x50c: 0x08, 0x50d: 0x08, 0x50e: 0x08, 0x50f: 0x08, - 0x510: 0x08, 0x511: 0x08, 0x512: 0x08, 0x513: 0x08, 0x514: 0x08, 0x515: 0x08, 0x516: 0x08, 0x517: 0x08, - 0x518: 0x08, 0x519: 0x08, 0x51a: 0x08, 0x51b: 0x08, 0x51c: 0x08, 0x51d: 0x08, 0x51e: 0x08, 0x51f: 0x08, - 0x520: 0x08, 0x521: 0x08, 0x522: 0x08, 0x523: 0x08, 0x524: 0x08, 0x525: 0x08, 0x526: 0x08, 0x527: 0x08, - 0x528: 0x08, 0x529: 0x08, 0x52a: 0x08, 0x52b: 0x08, 0x52c: 0x08, 0x52d: 0x08, 0x52e: 0x08, 0x52f: 0x08, - 0x530: 0x08, 0x531: 0x08, 0x532: 0x08, 0x533: 0x08, 0x534: 0x08, 0x535: 0x08, 0x536: 0x08, 0x537: 0x08, - 0x538: 0x08, 0x539: 0x08, 0x53a: 0x08, 0x53b: 0x08, 0x53c: 0x08, 0x53d: 0x08, 0x53e: 0x08, 0x53f: 0x66, - // Block 0x15, offset 0x540 - 0x560: 0x11, - 0x570: 0x09, 0x571: 0x09, 0x572: 0x09, 0x573: 0x09, 0x574: 0x09, 0x575: 0x09, 0x576: 0x09, 0x577: 0x09, - 0x578: 0x09, 0x579: 0x09, 0x57a: 0x09, 0x57b: 0x09, 0x57c: 0x09, 0x57d: 0x09, 0x57e: 0x09, 0x57f: 0x12, - // Block 0x16, offset 0x580 - 0x580: 0x09, 0x581: 0x09, 0x582: 0x09, 0x583: 0x09, 0x584: 0x09, 0x585: 0x09, 0x586: 0x09, 0x587: 0x09, - 0x588: 0x09, 0x589: 0x09, 0x58a: 0x09, 0x58b: 0x09, 0x58c: 0x09, 0x58d: 0x09, 0x58e: 0x09, 0x58f: 0x12, -} - -// inverseData contains 4-byte entries of the following format: -// -// <0 padding> -// -// The last byte of the UTF-8-encoded rune is xor-ed with the last byte of the -// UTF-8 encoding of the original rune. Mappings often have the following -// pattern: -// -// A -> A (U+FF21 -> U+0041) -// B -> B (U+FF22 -> U+0042) -// ... -// -// By xor-ing the last byte the same entry can be shared by many mappings. This -// reduces the total number of distinct entries by about two thirds. -// The resulting entry for the aforementioned mappings is -// -// { 0x01, 0xE0, 0x00, 0x00 } -// -// Using this entry to map U+FF21 (UTF-8 [EF BC A1]), we get -// -// E0 ^ A1 = 41. -// -// Similarly, for U+FF22 (UTF-8 [EF BC A2]), we get -// -// E0 ^ A2 = 42. -// -// Note that because of the xor-ing, the byte sequence stored in the entry is -// not valid UTF-8. -var inverseData = [150][4]byte{ - {0x00, 0x00, 0x00, 0x00}, - {0x03, 0xe3, 0x80, 0xa0}, - {0x03, 0xef, 0xbc, 0xa0}, - {0x03, 0xef, 0xbc, 0xe0}, - {0x03, 0xef, 0xbd, 0xe0}, - {0x03, 0xef, 0xbf, 0x02}, - {0x03, 0xef, 0xbf, 0x00}, - {0x03, 0xef, 0xbf, 0x0e}, - {0x03, 0xef, 0xbf, 0x0c}, - {0x03, 0xef, 0xbf, 0x0f}, - {0x03, 0xef, 0xbf, 0x39}, - {0x03, 0xef, 0xbf, 0x3b}, - {0x03, 0xef, 0xbf, 0x3f}, - {0x03, 0xef, 0xbf, 0x2a}, - {0x03, 0xef, 0xbf, 0x0d}, - {0x03, 0xef, 0xbf, 0x25}, - {0x03, 0xef, 0xbd, 0x1a}, - {0x03, 0xef, 0xbd, 0x26}, - {0x01, 0xa0, 0x00, 0x00}, - {0x03, 0xef, 0xbd, 0x25}, - {0x03, 0xef, 0xbd, 0x23}, - {0x03, 0xef, 0xbd, 0x2e}, - {0x03, 0xef, 0xbe, 0x07}, - {0x03, 0xef, 0xbe, 0x05}, - {0x03, 0xef, 0xbd, 0x06}, - {0x03, 0xef, 0xbd, 0x13}, - {0x03, 0xef, 0xbd, 0x0b}, - {0x03, 0xef, 0xbd, 0x16}, - {0x03, 0xef, 0xbd, 0x0c}, - {0x03, 0xef, 0xbd, 0x15}, - {0x03, 0xef, 0xbd, 0x0d}, - {0x03, 0xef, 0xbd, 0x1c}, - {0x03, 0xef, 0xbd, 0x02}, - {0x03, 0xef, 0xbd, 0x1f}, - {0x03, 0xef, 0xbd, 0x1d}, - {0x03, 0xef, 0xbd, 0x17}, - {0x03, 0xef, 0xbd, 0x08}, - {0x03, 0xef, 0xbd, 0x09}, - {0x03, 0xef, 0xbd, 0x0e}, - {0x03, 0xef, 0xbd, 0x04}, - {0x03, 0xef, 0xbd, 0x05}, - {0x03, 0xef, 0xbe, 0x3f}, - {0x03, 0xef, 0xbe, 0x00}, - {0x03, 0xef, 0xbd, 0x2c}, - {0x03, 0xef, 0xbe, 0x06}, - {0x03, 0xef, 0xbe, 0x0c}, - {0x03, 0xef, 0xbe, 0x0f}, - {0x03, 0xef, 0xbe, 0x0d}, - {0x03, 0xef, 0xbe, 0x0b}, - {0x03, 0xef, 0xbe, 0x19}, - {0x03, 0xef, 0xbe, 0x15}, - {0x03, 0xef, 0xbe, 0x11}, - {0x03, 0xef, 0xbe, 0x31}, - {0x03, 0xef, 0xbe, 0x33}, - {0x03, 0xef, 0xbd, 0x0f}, - {0x03, 0xef, 0xbe, 0x30}, - {0x03, 0xef, 0xbe, 0x3e}, - {0x03, 0xef, 0xbe, 0x32}, - {0x03, 0xef, 0xbe, 0x36}, - {0x03, 0xef, 0xbd, 0x14}, - {0x03, 0xef, 0xbe, 0x2e}, - {0x03, 0xef, 0xbd, 0x1e}, - {0x03, 0xef, 0xbe, 0x10}, - {0x03, 0xef, 0xbf, 0x13}, - {0x03, 0xef, 0xbf, 0x15}, - {0x03, 0xef, 0xbf, 0x17}, - {0x03, 0xef, 0xbf, 0x1f}, - {0x03, 0xef, 0xbf, 0x1d}, - {0x03, 0xef, 0xbf, 0x1b}, - {0x03, 0xef, 0xbf, 0x09}, - {0x03, 0xef, 0xbf, 0x0b}, - {0x03, 0xef, 0xbf, 0x37}, - {0x03, 0xef, 0xbe, 0x04}, - {0x01, 0xe0, 0x00, 0x00}, - {0x03, 0xe2, 0xa6, 0x1a}, - {0x03, 0xe2, 0xa6, 0x26}, - {0x03, 0xe3, 0x80, 0x23}, - {0x03, 0xe3, 0x80, 0x2e}, - {0x03, 0xe3, 0x80, 0x25}, - {0x03, 0xe3, 0x83, 0x1e}, - {0x03, 0xe3, 0x83, 0x14}, - {0x03, 0xe3, 0x82, 0x06}, - {0x03, 0xe3, 0x82, 0x0b}, - {0x03, 0xe3, 0x82, 0x0c}, - {0x03, 0xe3, 0x82, 0x0d}, - {0x03, 0xe3, 0x82, 0x02}, - {0x03, 0xe3, 0x83, 0x0f}, - {0x03, 0xe3, 0x83, 0x08}, - {0x03, 0xe3, 0x83, 0x09}, - {0x03, 0xe3, 0x83, 0x2c}, - {0x03, 0xe3, 0x83, 0x0c}, - {0x03, 0xe3, 0x82, 0x13}, - {0x03, 0xe3, 0x82, 0x16}, - {0x03, 0xe3, 0x82, 0x15}, - {0x03, 0xe3, 0x82, 0x1c}, - {0x03, 0xe3, 0x82, 0x1f}, - {0x03, 0xe3, 0x82, 0x1d}, - {0x03, 0xe3, 0x82, 0x1a}, - {0x03, 0xe3, 0x82, 0x17}, - {0x03, 0xe3, 0x82, 0x08}, - {0x03, 0xe3, 0x82, 0x09}, - {0x03, 0xe3, 0x82, 0x0e}, - {0x03, 0xe3, 0x82, 0x04}, - {0x03, 0xe3, 0x82, 0x05}, - {0x03, 0xe3, 0x82, 0x3f}, - {0x03, 0xe3, 0x83, 0x00}, - {0x03, 0xe3, 0x83, 0x06}, - {0x03, 0xe3, 0x83, 0x05}, - {0x03, 0xe3, 0x83, 0x0d}, - {0x03, 0xe3, 0x83, 0x0b}, - {0x03, 0xe3, 0x83, 0x07}, - {0x03, 0xe3, 0x83, 0x19}, - {0x03, 0xe3, 0x83, 0x15}, - {0x03, 0xe3, 0x83, 0x11}, - {0x03, 0xe3, 0x83, 0x31}, - {0x03, 0xe3, 0x83, 0x33}, - {0x03, 0xe3, 0x83, 0x30}, - {0x03, 0xe3, 0x83, 0x3e}, - {0x03, 0xe3, 0x83, 0x32}, - {0x03, 0xe3, 0x83, 0x36}, - {0x03, 0xe3, 0x83, 0x2e}, - {0x03, 0xe3, 0x82, 0x07}, - {0x03, 0xe3, 0x85, 0x04}, - {0x03, 0xe3, 0x84, 0x10}, - {0x03, 0xe3, 0x85, 0x30}, - {0x03, 0xe3, 0x85, 0x0d}, - {0x03, 0xe3, 0x85, 0x13}, - {0x03, 0xe3, 0x85, 0x15}, - {0x03, 0xe3, 0x85, 0x17}, - {0x03, 0xe3, 0x85, 0x1f}, - {0x03, 0xe3, 0x85, 0x1d}, - {0x03, 0xe3, 0x85, 0x1b}, - {0x03, 0xe3, 0x85, 0x09}, - {0x03, 0xe3, 0x85, 0x0f}, - {0x03, 0xe3, 0x85, 0x0b}, - {0x03, 0xe3, 0x85, 0x37}, - {0x03, 0xe3, 0x85, 0x3b}, - {0x03, 0xe3, 0x85, 0x39}, - {0x03, 0xe3, 0x85, 0x3f}, - {0x02, 0xc2, 0x02, 0x00}, - {0x02, 0xc2, 0x0e, 0x00}, - {0x02, 0xc2, 0x0c, 0x00}, - {0x02, 0xc2, 0x00, 0x00}, - {0x03, 0xe2, 0x82, 0x0f}, - {0x03, 0xe2, 0x94, 0x2a}, - {0x03, 0xe2, 0x86, 0x39}, - {0x03, 0xe2, 0x86, 0x3b}, - {0x03, 0xe2, 0x86, 0x3f}, - {0x03, 0xe2, 0x96, 0x0d}, - {0x03, 0xe2, 0x97, 0x25}, -} - -// Total table size 15512 bytes (15KiB) diff --git a/vendor/golang.org/x/text/width/tables9.0.0.go b/vendor/golang.org/x/text/width/tables9.0.0.go deleted file mode 100644 index d981330a9..000000000 --- a/vendor/golang.org/x/text/width/tables9.0.0.go +++ /dev/null @@ -1,1296 +0,0 @@ -// Code generated by running "go generate" in golang.org/x/text. DO NOT EDIT. - -//go:build !go1.10 - -package width - -// UnicodeVersion is the Unicode version from which the tables in this package are derived. -const UnicodeVersion = "9.0.0" - -// lookup returns the trie value for the first UTF-8 encoding in s and -// the width in bytes of this encoding. The size will be 0 if s does not -// hold enough bytes to complete the encoding. len(s) must be greater than 0. -func (t *widthTrie) lookup(s []byte) (v uint16, sz int) { - c0 := s[0] - switch { - case c0 < 0x80: // is ASCII - return widthValues[c0], 1 - case c0 < 0xC2: - return 0, 1 // Illegal UTF-8: not a starter, not ASCII. - case c0 < 0xE0: // 2-byte UTF-8 - if len(s) < 2 { - return 0, 0 - } - i := widthIndex[c0] - c1 := s[1] - if c1 < 0x80 || 0xC0 <= c1 { - return 0, 1 // Illegal UTF-8: not a continuation byte. - } - return t.lookupValue(uint32(i), c1), 2 - case c0 < 0xF0: // 3-byte UTF-8 - if len(s) < 3 { - return 0, 0 - } - i := widthIndex[c0] - c1 := s[1] - if c1 < 0x80 || 0xC0 <= c1 { - return 0, 1 // Illegal UTF-8: not a continuation byte. - } - o := uint32(i)<<6 + uint32(c1) - i = widthIndex[o] - c2 := s[2] - if c2 < 0x80 || 0xC0 <= c2 { - return 0, 2 // Illegal UTF-8: not a continuation byte. - } - return t.lookupValue(uint32(i), c2), 3 - case c0 < 0xF8: // 4-byte UTF-8 - if len(s) < 4 { - return 0, 0 - } - i := widthIndex[c0] - c1 := s[1] - if c1 < 0x80 || 0xC0 <= c1 { - return 0, 1 // Illegal UTF-8: not a continuation byte. - } - o := uint32(i)<<6 + uint32(c1) - i = widthIndex[o] - c2 := s[2] - if c2 < 0x80 || 0xC0 <= c2 { - return 0, 2 // Illegal UTF-8: not a continuation byte. - } - o = uint32(i)<<6 + uint32(c2) - i = widthIndex[o] - c3 := s[3] - if c3 < 0x80 || 0xC0 <= c3 { - return 0, 3 // Illegal UTF-8: not a continuation byte. - } - return t.lookupValue(uint32(i), c3), 4 - } - // Illegal rune - return 0, 1 -} - -// lookupUnsafe returns the trie value for the first UTF-8 encoding in s. -// s must start with a full and valid UTF-8 encoded rune. -func (t *widthTrie) lookupUnsafe(s []byte) uint16 { - c0 := s[0] - if c0 < 0x80 { // is ASCII - return widthValues[c0] - } - i := widthIndex[c0] - if c0 < 0xE0 { // 2-byte UTF-8 - return t.lookupValue(uint32(i), s[1]) - } - i = widthIndex[uint32(i)<<6+uint32(s[1])] - if c0 < 0xF0 { // 3-byte UTF-8 - return t.lookupValue(uint32(i), s[2]) - } - i = widthIndex[uint32(i)<<6+uint32(s[2])] - if c0 < 0xF8 { // 4-byte UTF-8 - return t.lookupValue(uint32(i), s[3]) - } - return 0 -} - -// lookupString returns the trie value for the first UTF-8 encoding in s and -// the width in bytes of this encoding. The size will be 0 if s does not -// hold enough bytes to complete the encoding. len(s) must be greater than 0. -func (t *widthTrie) lookupString(s string) (v uint16, sz int) { - c0 := s[0] - switch { - case c0 < 0x80: // is ASCII - return widthValues[c0], 1 - case c0 < 0xC2: - return 0, 1 // Illegal UTF-8: not a starter, not ASCII. - case c0 < 0xE0: // 2-byte UTF-8 - if len(s) < 2 { - return 0, 0 - } - i := widthIndex[c0] - c1 := s[1] - if c1 < 0x80 || 0xC0 <= c1 { - return 0, 1 // Illegal UTF-8: not a continuation byte. - } - return t.lookupValue(uint32(i), c1), 2 - case c0 < 0xF0: // 3-byte UTF-8 - if len(s) < 3 { - return 0, 0 - } - i := widthIndex[c0] - c1 := s[1] - if c1 < 0x80 || 0xC0 <= c1 { - return 0, 1 // Illegal UTF-8: not a continuation byte. - } - o := uint32(i)<<6 + uint32(c1) - i = widthIndex[o] - c2 := s[2] - if c2 < 0x80 || 0xC0 <= c2 { - return 0, 2 // Illegal UTF-8: not a continuation byte. - } - return t.lookupValue(uint32(i), c2), 3 - case c0 < 0xF8: // 4-byte UTF-8 - if len(s) < 4 { - return 0, 0 - } - i := widthIndex[c0] - c1 := s[1] - if c1 < 0x80 || 0xC0 <= c1 { - return 0, 1 // Illegal UTF-8: not a continuation byte. - } - o := uint32(i)<<6 + uint32(c1) - i = widthIndex[o] - c2 := s[2] - if c2 < 0x80 || 0xC0 <= c2 { - return 0, 2 // Illegal UTF-8: not a continuation byte. - } - o = uint32(i)<<6 + uint32(c2) - i = widthIndex[o] - c3 := s[3] - if c3 < 0x80 || 0xC0 <= c3 { - return 0, 3 // Illegal UTF-8: not a continuation byte. - } - return t.lookupValue(uint32(i), c3), 4 - } - // Illegal rune - return 0, 1 -} - -// lookupStringUnsafe returns the trie value for the first UTF-8 encoding in s. -// s must start with a full and valid UTF-8 encoded rune. -func (t *widthTrie) lookupStringUnsafe(s string) uint16 { - c0 := s[0] - if c0 < 0x80 { // is ASCII - return widthValues[c0] - } - i := widthIndex[c0] - if c0 < 0xE0 { // 2-byte UTF-8 - return t.lookupValue(uint32(i), s[1]) - } - i = widthIndex[uint32(i)<<6+uint32(s[1])] - if c0 < 0xF0 { // 3-byte UTF-8 - return t.lookupValue(uint32(i), s[2]) - } - i = widthIndex[uint32(i)<<6+uint32(s[2])] - if c0 < 0xF8 { // 4-byte UTF-8 - return t.lookupValue(uint32(i), s[3]) - } - return 0 -} - -// widthTrie. Total size: 14080 bytes (13.75 KiB). Checksum: 3b8aeb3dc03667a3. -type widthTrie struct{} - -func newWidthTrie(i int) *widthTrie { - return &widthTrie{} -} - -// lookupValue determines the type of block n and looks up the value for b. -func (t *widthTrie) lookupValue(n uint32, b byte) uint16 { - switch { - default: - return uint16(widthValues[n<<6+uint32(b)]) - } -} - -// widthValues: 99 blocks, 6336 entries, 12672 bytes -// The third block is the zero block. -var widthValues = [6336]uint16{ - // Block 0x0, offset 0x0 - 0x20: 0x6001, 0x21: 0x6002, 0x22: 0x6002, 0x23: 0x6002, - 0x24: 0x6002, 0x25: 0x6002, 0x26: 0x6002, 0x27: 0x6002, 0x28: 0x6002, 0x29: 0x6002, - 0x2a: 0x6002, 0x2b: 0x6002, 0x2c: 0x6002, 0x2d: 0x6002, 0x2e: 0x6002, 0x2f: 0x6002, - 0x30: 0x6002, 0x31: 0x6002, 0x32: 0x6002, 0x33: 0x6002, 0x34: 0x6002, 0x35: 0x6002, - 0x36: 0x6002, 0x37: 0x6002, 0x38: 0x6002, 0x39: 0x6002, 0x3a: 0x6002, 0x3b: 0x6002, - 0x3c: 0x6002, 0x3d: 0x6002, 0x3e: 0x6002, 0x3f: 0x6002, - // Block 0x1, offset 0x40 - 0x40: 0x6003, 0x41: 0x6003, 0x42: 0x6003, 0x43: 0x6003, 0x44: 0x6003, 0x45: 0x6003, - 0x46: 0x6003, 0x47: 0x6003, 0x48: 0x6003, 0x49: 0x6003, 0x4a: 0x6003, 0x4b: 0x6003, - 0x4c: 0x6003, 0x4d: 0x6003, 0x4e: 0x6003, 0x4f: 0x6003, 0x50: 0x6003, 0x51: 0x6003, - 0x52: 0x6003, 0x53: 0x6003, 0x54: 0x6003, 0x55: 0x6003, 0x56: 0x6003, 0x57: 0x6003, - 0x58: 0x6003, 0x59: 0x6003, 0x5a: 0x6003, 0x5b: 0x6003, 0x5c: 0x6003, 0x5d: 0x6003, - 0x5e: 0x6003, 0x5f: 0x6003, 0x60: 0x6004, 0x61: 0x6004, 0x62: 0x6004, 0x63: 0x6004, - 0x64: 0x6004, 0x65: 0x6004, 0x66: 0x6004, 0x67: 0x6004, 0x68: 0x6004, 0x69: 0x6004, - 0x6a: 0x6004, 0x6b: 0x6004, 0x6c: 0x6004, 0x6d: 0x6004, 0x6e: 0x6004, 0x6f: 0x6004, - 0x70: 0x6004, 0x71: 0x6004, 0x72: 0x6004, 0x73: 0x6004, 0x74: 0x6004, 0x75: 0x6004, - 0x76: 0x6004, 0x77: 0x6004, 0x78: 0x6004, 0x79: 0x6004, 0x7a: 0x6004, 0x7b: 0x6004, - 0x7c: 0x6004, 0x7d: 0x6004, 0x7e: 0x6004, - // Block 0x2, offset 0x80 - // Block 0x3, offset 0xc0 - 0xe1: 0x2000, 0xe2: 0x6005, 0xe3: 0x6005, - 0xe4: 0x2000, 0xe5: 0x6006, 0xe6: 0x6005, 0xe7: 0x2000, 0xe8: 0x2000, - 0xea: 0x2000, 0xec: 0x6007, 0xed: 0x2000, 0xee: 0x2000, 0xef: 0x6008, - 0xf0: 0x2000, 0xf1: 0x2000, 0xf2: 0x2000, 0xf3: 0x2000, 0xf4: 0x2000, - 0xf6: 0x2000, 0xf7: 0x2000, 0xf8: 0x2000, 0xf9: 0x2000, 0xfa: 0x2000, - 0xfc: 0x2000, 0xfd: 0x2000, 0xfe: 0x2000, 0xff: 0x2000, - // Block 0x4, offset 0x100 - 0x106: 0x2000, - 0x110: 0x2000, - 0x117: 0x2000, - 0x118: 0x2000, - 0x11e: 0x2000, 0x11f: 0x2000, 0x120: 0x2000, 0x121: 0x2000, - 0x126: 0x2000, 0x128: 0x2000, 0x129: 0x2000, - 0x12a: 0x2000, 0x12c: 0x2000, 0x12d: 0x2000, - 0x130: 0x2000, 0x132: 0x2000, 0x133: 0x2000, - 0x137: 0x2000, 0x138: 0x2000, 0x139: 0x2000, 0x13a: 0x2000, - 0x13c: 0x2000, 0x13e: 0x2000, - // Block 0x5, offset 0x140 - 0x141: 0x2000, - 0x151: 0x2000, - 0x153: 0x2000, - 0x15b: 0x2000, - 0x166: 0x2000, 0x167: 0x2000, - 0x16b: 0x2000, - 0x171: 0x2000, 0x172: 0x2000, 0x173: 0x2000, - 0x178: 0x2000, - 0x17f: 0x2000, - // Block 0x6, offset 0x180 - 0x180: 0x2000, 0x181: 0x2000, 0x182: 0x2000, 0x184: 0x2000, - 0x188: 0x2000, 0x189: 0x2000, 0x18a: 0x2000, 0x18b: 0x2000, - 0x18d: 0x2000, - 0x192: 0x2000, 0x193: 0x2000, - 0x1a6: 0x2000, 0x1a7: 0x2000, - 0x1ab: 0x2000, - // Block 0x7, offset 0x1c0 - 0x1ce: 0x2000, 0x1d0: 0x2000, - 0x1d2: 0x2000, 0x1d4: 0x2000, 0x1d6: 0x2000, - 0x1d8: 0x2000, 0x1da: 0x2000, 0x1dc: 0x2000, - // Block 0x8, offset 0x200 - 0x211: 0x2000, - 0x221: 0x2000, - // Block 0x9, offset 0x240 - 0x244: 0x2000, - 0x247: 0x2000, 0x249: 0x2000, 0x24a: 0x2000, 0x24b: 0x2000, - 0x24d: 0x2000, 0x250: 0x2000, - 0x258: 0x2000, 0x259: 0x2000, 0x25a: 0x2000, 0x25b: 0x2000, 0x25d: 0x2000, - 0x25f: 0x2000, - // Block 0xa, offset 0x280 - 0x280: 0x2000, 0x281: 0x2000, 0x282: 0x2000, 0x283: 0x2000, 0x284: 0x2000, 0x285: 0x2000, - 0x286: 0x2000, 0x287: 0x2000, 0x288: 0x2000, 0x289: 0x2000, 0x28a: 0x2000, 0x28b: 0x2000, - 0x28c: 0x2000, 0x28d: 0x2000, 0x28e: 0x2000, 0x28f: 0x2000, 0x290: 0x2000, 0x291: 0x2000, - 0x292: 0x2000, 0x293: 0x2000, 0x294: 0x2000, 0x295: 0x2000, 0x296: 0x2000, 0x297: 0x2000, - 0x298: 0x2000, 0x299: 0x2000, 0x29a: 0x2000, 0x29b: 0x2000, 0x29c: 0x2000, 0x29d: 0x2000, - 0x29e: 0x2000, 0x29f: 0x2000, 0x2a0: 0x2000, 0x2a1: 0x2000, 0x2a2: 0x2000, 0x2a3: 0x2000, - 0x2a4: 0x2000, 0x2a5: 0x2000, 0x2a6: 0x2000, 0x2a7: 0x2000, 0x2a8: 0x2000, 0x2a9: 0x2000, - 0x2aa: 0x2000, 0x2ab: 0x2000, 0x2ac: 0x2000, 0x2ad: 0x2000, 0x2ae: 0x2000, 0x2af: 0x2000, - 0x2b0: 0x2000, 0x2b1: 0x2000, 0x2b2: 0x2000, 0x2b3: 0x2000, 0x2b4: 0x2000, 0x2b5: 0x2000, - 0x2b6: 0x2000, 0x2b7: 0x2000, 0x2b8: 0x2000, 0x2b9: 0x2000, 0x2ba: 0x2000, 0x2bb: 0x2000, - 0x2bc: 0x2000, 0x2bd: 0x2000, 0x2be: 0x2000, 0x2bf: 0x2000, - // Block 0xb, offset 0x2c0 - 0x2c0: 0x2000, 0x2c1: 0x2000, 0x2c2: 0x2000, 0x2c3: 0x2000, 0x2c4: 0x2000, 0x2c5: 0x2000, - 0x2c6: 0x2000, 0x2c7: 0x2000, 0x2c8: 0x2000, 0x2c9: 0x2000, 0x2ca: 0x2000, 0x2cb: 0x2000, - 0x2cc: 0x2000, 0x2cd: 0x2000, 0x2ce: 0x2000, 0x2cf: 0x2000, 0x2d0: 0x2000, 0x2d1: 0x2000, - 0x2d2: 0x2000, 0x2d3: 0x2000, 0x2d4: 0x2000, 0x2d5: 0x2000, 0x2d6: 0x2000, 0x2d7: 0x2000, - 0x2d8: 0x2000, 0x2d9: 0x2000, 0x2da: 0x2000, 0x2db: 0x2000, 0x2dc: 0x2000, 0x2dd: 0x2000, - 0x2de: 0x2000, 0x2df: 0x2000, 0x2e0: 0x2000, 0x2e1: 0x2000, 0x2e2: 0x2000, 0x2e3: 0x2000, - 0x2e4: 0x2000, 0x2e5: 0x2000, 0x2e6: 0x2000, 0x2e7: 0x2000, 0x2e8: 0x2000, 0x2e9: 0x2000, - 0x2ea: 0x2000, 0x2eb: 0x2000, 0x2ec: 0x2000, 0x2ed: 0x2000, 0x2ee: 0x2000, 0x2ef: 0x2000, - // Block 0xc, offset 0x300 - 0x311: 0x2000, - 0x312: 0x2000, 0x313: 0x2000, 0x314: 0x2000, 0x315: 0x2000, 0x316: 0x2000, 0x317: 0x2000, - 0x318: 0x2000, 0x319: 0x2000, 0x31a: 0x2000, 0x31b: 0x2000, 0x31c: 0x2000, 0x31d: 0x2000, - 0x31e: 0x2000, 0x31f: 0x2000, 0x320: 0x2000, 0x321: 0x2000, 0x323: 0x2000, - 0x324: 0x2000, 0x325: 0x2000, 0x326: 0x2000, 0x327: 0x2000, 0x328: 0x2000, 0x329: 0x2000, - 0x331: 0x2000, 0x332: 0x2000, 0x333: 0x2000, 0x334: 0x2000, 0x335: 0x2000, - 0x336: 0x2000, 0x337: 0x2000, 0x338: 0x2000, 0x339: 0x2000, 0x33a: 0x2000, 0x33b: 0x2000, - 0x33c: 0x2000, 0x33d: 0x2000, 0x33e: 0x2000, 0x33f: 0x2000, - // Block 0xd, offset 0x340 - 0x340: 0x2000, 0x341: 0x2000, 0x343: 0x2000, 0x344: 0x2000, 0x345: 0x2000, - 0x346: 0x2000, 0x347: 0x2000, 0x348: 0x2000, 0x349: 0x2000, - // Block 0xe, offset 0x380 - 0x381: 0x2000, - 0x390: 0x2000, 0x391: 0x2000, - 0x392: 0x2000, 0x393: 0x2000, 0x394: 0x2000, 0x395: 0x2000, 0x396: 0x2000, 0x397: 0x2000, - 0x398: 0x2000, 0x399: 0x2000, 0x39a: 0x2000, 0x39b: 0x2000, 0x39c: 0x2000, 0x39d: 0x2000, - 0x39e: 0x2000, 0x39f: 0x2000, 0x3a0: 0x2000, 0x3a1: 0x2000, 0x3a2: 0x2000, 0x3a3: 0x2000, - 0x3a4: 0x2000, 0x3a5: 0x2000, 0x3a6: 0x2000, 0x3a7: 0x2000, 0x3a8: 0x2000, 0x3a9: 0x2000, - 0x3aa: 0x2000, 0x3ab: 0x2000, 0x3ac: 0x2000, 0x3ad: 0x2000, 0x3ae: 0x2000, 0x3af: 0x2000, - 0x3b0: 0x2000, 0x3b1: 0x2000, 0x3b2: 0x2000, 0x3b3: 0x2000, 0x3b4: 0x2000, 0x3b5: 0x2000, - 0x3b6: 0x2000, 0x3b7: 0x2000, 0x3b8: 0x2000, 0x3b9: 0x2000, 0x3ba: 0x2000, 0x3bb: 0x2000, - 0x3bc: 0x2000, 0x3bd: 0x2000, 0x3be: 0x2000, 0x3bf: 0x2000, - // Block 0xf, offset 0x3c0 - 0x3c0: 0x2000, 0x3c1: 0x2000, 0x3c2: 0x2000, 0x3c3: 0x2000, 0x3c4: 0x2000, 0x3c5: 0x2000, - 0x3c6: 0x2000, 0x3c7: 0x2000, 0x3c8: 0x2000, 0x3c9: 0x2000, 0x3ca: 0x2000, 0x3cb: 0x2000, - 0x3cc: 0x2000, 0x3cd: 0x2000, 0x3ce: 0x2000, 0x3cf: 0x2000, 0x3d1: 0x2000, - // Block 0x10, offset 0x400 - 0x400: 0x4000, 0x401: 0x4000, 0x402: 0x4000, 0x403: 0x4000, 0x404: 0x4000, 0x405: 0x4000, - 0x406: 0x4000, 0x407: 0x4000, 0x408: 0x4000, 0x409: 0x4000, 0x40a: 0x4000, 0x40b: 0x4000, - 0x40c: 0x4000, 0x40d: 0x4000, 0x40e: 0x4000, 0x40f: 0x4000, 0x410: 0x4000, 0x411: 0x4000, - 0x412: 0x4000, 0x413: 0x4000, 0x414: 0x4000, 0x415: 0x4000, 0x416: 0x4000, 0x417: 0x4000, - 0x418: 0x4000, 0x419: 0x4000, 0x41a: 0x4000, 0x41b: 0x4000, 0x41c: 0x4000, 0x41d: 0x4000, - 0x41e: 0x4000, 0x41f: 0x4000, 0x420: 0x4000, 0x421: 0x4000, 0x422: 0x4000, 0x423: 0x4000, - 0x424: 0x4000, 0x425: 0x4000, 0x426: 0x4000, 0x427: 0x4000, 0x428: 0x4000, 0x429: 0x4000, - 0x42a: 0x4000, 0x42b: 0x4000, 0x42c: 0x4000, 0x42d: 0x4000, 0x42e: 0x4000, 0x42f: 0x4000, - 0x430: 0x4000, 0x431: 0x4000, 0x432: 0x4000, 0x433: 0x4000, 0x434: 0x4000, 0x435: 0x4000, - 0x436: 0x4000, 0x437: 0x4000, 0x438: 0x4000, 0x439: 0x4000, 0x43a: 0x4000, 0x43b: 0x4000, - 0x43c: 0x4000, 0x43d: 0x4000, 0x43e: 0x4000, 0x43f: 0x4000, - // Block 0x11, offset 0x440 - 0x440: 0x4000, 0x441: 0x4000, 0x442: 0x4000, 0x443: 0x4000, 0x444: 0x4000, 0x445: 0x4000, - 0x446: 0x4000, 0x447: 0x4000, 0x448: 0x4000, 0x449: 0x4000, 0x44a: 0x4000, 0x44b: 0x4000, - 0x44c: 0x4000, 0x44d: 0x4000, 0x44e: 0x4000, 0x44f: 0x4000, 0x450: 0x4000, 0x451: 0x4000, - 0x452: 0x4000, 0x453: 0x4000, 0x454: 0x4000, 0x455: 0x4000, 0x456: 0x4000, 0x457: 0x4000, - 0x458: 0x4000, 0x459: 0x4000, 0x45a: 0x4000, 0x45b: 0x4000, 0x45c: 0x4000, 0x45d: 0x4000, - 0x45e: 0x4000, 0x45f: 0x4000, - // Block 0x12, offset 0x480 - 0x490: 0x2000, - 0x493: 0x2000, 0x494: 0x2000, 0x495: 0x2000, 0x496: 0x2000, - 0x498: 0x2000, 0x499: 0x2000, 0x49c: 0x2000, 0x49d: 0x2000, - 0x4a0: 0x2000, 0x4a1: 0x2000, 0x4a2: 0x2000, - 0x4a4: 0x2000, 0x4a5: 0x2000, 0x4a6: 0x2000, 0x4a7: 0x2000, - 0x4b0: 0x2000, 0x4b2: 0x2000, 0x4b3: 0x2000, 0x4b5: 0x2000, - 0x4bb: 0x2000, - 0x4be: 0x2000, - // Block 0x13, offset 0x4c0 - 0x4f4: 0x2000, - 0x4ff: 0x2000, - // Block 0x14, offset 0x500 - 0x501: 0x2000, 0x502: 0x2000, 0x503: 0x2000, 0x504: 0x2000, - 0x529: 0xa009, - 0x52c: 0x2000, - // Block 0x15, offset 0x540 - 0x543: 0x2000, 0x545: 0x2000, - 0x549: 0x2000, - 0x553: 0x2000, 0x556: 0x2000, - 0x561: 0x2000, 0x562: 0x2000, - 0x566: 0x2000, - 0x56b: 0x2000, - // Block 0x16, offset 0x580 - 0x593: 0x2000, 0x594: 0x2000, - 0x59b: 0x2000, 0x59c: 0x2000, 0x59d: 0x2000, - 0x59e: 0x2000, 0x5a0: 0x2000, 0x5a1: 0x2000, 0x5a2: 0x2000, 0x5a3: 0x2000, - 0x5a4: 0x2000, 0x5a5: 0x2000, 0x5a6: 0x2000, 0x5a7: 0x2000, 0x5a8: 0x2000, 0x5a9: 0x2000, - 0x5aa: 0x2000, 0x5ab: 0x2000, - 0x5b0: 0x2000, 0x5b1: 0x2000, 0x5b2: 0x2000, 0x5b3: 0x2000, 0x5b4: 0x2000, 0x5b5: 0x2000, - 0x5b6: 0x2000, 0x5b7: 0x2000, 0x5b8: 0x2000, 0x5b9: 0x2000, - // Block 0x17, offset 0x5c0 - 0x5c9: 0x2000, - 0x5d0: 0x200a, 0x5d1: 0x200b, - 0x5d2: 0x200a, 0x5d3: 0x200c, 0x5d4: 0x2000, 0x5d5: 0x2000, 0x5d6: 0x2000, 0x5d7: 0x2000, - 0x5d8: 0x2000, 0x5d9: 0x2000, - 0x5f8: 0x2000, 0x5f9: 0x2000, - // Block 0x18, offset 0x600 - 0x612: 0x2000, 0x614: 0x2000, - 0x627: 0x2000, - // Block 0x19, offset 0x640 - 0x640: 0x2000, 0x642: 0x2000, 0x643: 0x2000, - 0x647: 0x2000, 0x648: 0x2000, 0x64b: 0x2000, - 0x64f: 0x2000, 0x651: 0x2000, - 0x655: 0x2000, - 0x65a: 0x2000, 0x65d: 0x2000, - 0x65e: 0x2000, 0x65f: 0x2000, 0x660: 0x2000, 0x663: 0x2000, - 0x665: 0x2000, 0x667: 0x2000, 0x668: 0x2000, 0x669: 0x2000, - 0x66a: 0x2000, 0x66b: 0x2000, 0x66c: 0x2000, 0x66e: 0x2000, - 0x674: 0x2000, 0x675: 0x2000, - 0x676: 0x2000, 0x677: 0x2000, - 0x67c: 0x2000, 0x67d: 0x2000, - // Block 0x1a, offset 0x680 - 0x688: 0x2000, - 0x68c: 0x2000, - 0x692: 0x2000, - 0x6a0: 0x2000, 0x6a1: 0x2000, - 0x6a4: 0x2000, 0x6a5: 0x2000, 0x6a6: 0x2000, 0x6a7: 0x2000, - 0x6aa: 0x2000, 0x6ab: 0x2000, 0x6ae: 0x2000, 0x6af: 0x2000, - // Block 0x1b, offset 0x6c0 - 0x6c2: 0x2000, 0x6c3: 0x2000, - 0x6c6: 0x2000, 0x6c7: 0x2000, - 0x6d5: 0x2000, - 0x6d9: 0x2000, - 0x6e5: 0x2000, - 0x6ff: 0x2000, - // Block 0x1c, offset 0x700 - 0x712: 0x2000, - 0x71a: 0x4000, 0x71b: 0x4000, - 0x729: 0x4000, - 0x72a: 0x4000, - // Block 0x1d, offset 0x740 - 0x769: 0x4000, - 0x76a: 0x4000, 0x76b: 0x4000, 0x76c: 0x4000, - 0x770: 0x4000, 0x773: 0x4000, - // Block 0x1e, offset 0x780 - 0x7a0: 0x2000, 0x7a1: 0x2000, 0x7a2: 0x2000, 0x7a3: 0x2000, - 0x7a4: 0x2000, 0x7a5: 0x2000, 0x7a6: 0x2000, 0x7a7: 0x2000, 0x7a8: 0x2000, 0x7a9: 0x2000, - 0x7aa: 0x2000, 0x7ab: 0x2000, 0x7ac: 0x2000, 0x7ad: 0x2000, 0x7ae: 0x2000, 0x7af: 0x2000, - 0x7b0: 0x2000, 0x7b1: 0x2000, 0x7b2: 0x2000, 0x7b3: 0x2000, 0x7b4: 0x2000, 0x7b5: 0x2000, - 0x7b6: 0x2000, 0x7b7: 0x2000, 0x7b8: 0x2000, 0x7b9: 0x2000, 0x7ba: 0x2000, 0x7bb: 0x2000, - 0x7bc: 0x2000, 0x7bd: 0x2000, 0x7be: 0x2000, 0x7bf: 0x2000, - // Block 0x1f, offset 0x7c0 - 0x7c0: 0x2000, 0x7c1: 0x2000, 0x7c2: 0x2000, 0x7c3: 0x2000, 0x7c4: 0x2000, 0x7c5: 0x2000, - 0x7c6: 0x2000, 0x7c7: 0x2000, 0x7c8: 0x2000, 0x7c9: 0x2000, 0x7ca: 0x2000, 0x7cb: 0x2000, - 0x7cc: 0x2000, 0x7cd: 0x2000, 0x7ce: 0x2000, 0x7cf: 0x2000, 0x7d0: 0x2000, 0x7d1: 0x2000, - 0x7d2: 0x2000, 0x7d3: 0x2000, 0x7d4: 0x2000, 0x7d5: 0x2000, 0x7d6: 0x2000, 0x7d7: 0x2000, - 0x7d8: 0x2000, 0x7d9: 0x2000, 0x7da: 0x2000, 0x7db: 0x2000, 0x7dc: 0x2000, 0x7dd: 0x2000, - 0x7de: 0x2000, 0x7df: 0x2000, 0x7e0: 0x2000, 0x7e1: 0x2000, 0x7e2: 0x2000, 0x7e3: 0x2000, - 0x7e4: 0x2000, 0x7e5: 0x2000, 0x7e6: 0x2000, 0x7e7: 0x2000, 0x7e8: 0x2000, 0x7e9: 0x2000, - 0x7eb: 0x2000, 0x7ec: 0x2000, 0x7ed: 0x2000, 0x7ee: 0x2000, 0x7ef: 0x2000, - 0x7f0: 0x2000, 0x7f1: 0x2000, 0x7f2: 0x2000, 0x7f3: 0x2000, 0x7f4: 0x2000, 0x7f5: 0x2000, - 0x7f6: 0x2000, 0x7f7: 0x2000, 0x7f8: 0x2000, 0x7f9: 0x2000, 0x7fa: 0x2000, 0x7fb: 0x2000, - 0x7fc: 0x2000, 0x7fd: 0x2000, 0x7fe: 0x2000, 0x7ff: 0x2000, - // Block 0x20, offset 0x800 - 0x800: 0x2000, 0x801: 0x2000, 0x802: 0x200d, 0x803: 0x2000, 0x804: 0x2000, 0x805: 0x2000, - 0x806: 0x2000, 0x807: 0x2000, 0x808: 0x2000, 0x809: 0x2000, 0x80a: 0x2000, 0x80b: 0x2000, - 0x80c: 0x2000, 0x80d: 0x2000, 0x80e: 0x2000, 0x80f: 0x2000, 0x810: 0x2000, 0x811: 0x2000, - 0x812: 0x2000, 0x813: 0x2000, 0x814: 0x2000, 0x815: 0x2000, 0x816: 0x2000, 0x817: 0x2000, - 0x818: 0x2000, 0x819: 0x2000, 0x81a: 0x2000, 0x81b: 0x2000, 0x81c: 0x2000, 0x81d: 0x2000, - 0x81e: 0x2000, 0x81f: 0x2000, 0x820: 0x2000, 0x821: 0x2000, 0x822: 0x2000, 0x823: 0x2000, - 0x824: 0x2000, 0x825: 0x2000, 0x826: 0x2000, 0x827: 0x2000, 0x828: 0x2000, 0x829: 0x2000, - 0x82a: 0x2000, 0x82b: 0x2000, 0x82c: 0x2000, 0x82d: 0x2000, 0x82e: 0x2000, 0x82f: 0x2000, - 0x830: 0x2000, 0x831: 0x2000, 0x832: 0x2000, 0x833: 0x2000, 0x834: 0x2000, 0x835: 0x2000, - 0x836: 0x2000, 0x837: 0x2000, 0x838: 0x2000, 0x839: 0x2000, 0x83a: 0x2000, 0x83b: 0x2000, - 0x83c: 0x2000, 0x83d: 0x2000, 0x83e: 0x2000, 0x83f: 0x2000, - // Block 0x21, offset 0x840 - 0x840: 0x2000, 0x841: 0x2000, 0x842: 0x2000, 0x843: 0x2000, 0x844: 0x2000, 0x845: 0x2000, - 0x846: 0x2000, 0x847: 0x2000, 0x848: 0x2000, 0x849: 0x2000, 0x84a: 0x2000, 0x84b: 0x2000, - 0x850: 0x2000, 0x851: 0x2000, - 0x852: 0x2000, 0x853: 0x2000, 0x854: 0x2000, 0x855: 0x2000, 0x856: 0x2000, 0x857: 0x2000, - 0x858: 0x2000, 0x859: 0x2000, 0x85a: 0x2000, 0x85b: 0x2000, 0x85c: 0x2000, 0x85d: 0x2000, - 0x85e: 0x2000, 0x85f: 0x2000, 0x860: 0x2000, 0x861: 0x2000, 0x862: 0x2000, 0x863: 0x2000, - 0x864: 0x2000, 0x865: 0x2000, 0x866: 0x2000, 0x867: 0x2000, 0x868: 0x2000, 0x869: 0x2000, - 0x86a: 0x2000, 0x86b: 0x2000, 0x86c: 0x2000, 0x86d: 0x2000, 0x86e: 0x2000, 0x86f: 0x2000, - 0x870: 0x2000, 0x871: 0x2000, 0x872: 0x2000, 0x873: 0x2000, - // Block 0x22, offset 0x880 - 0x880: 0x2000, 0x881: 0x2000, 0x882: 0x2000, 0x883: 0x2000, 0x884: 0x2000, 0x885: 0x2000, - 0x886: 0x2000, 0x887: 0x2000, 0x888: 0x2000, 0x889: 0x2000, 0x88a: 0x2000, 0x88b: 0x2000, - 0x88c: 0x2000, 0x88d: 0x2000, 0x88e: 0x2000, 0x88f: 0x2000, - 0x892: 0x2000, 0x893: 0x2000, 0x894: 0x2000, 0x895: 0x2000, - 0x8a0: 0x200e, 0x8a1: 0x2000, 0x8a3: 0x2000, - 0x8a4: 0x2000, 0x8a5: 0x2000, 0x8a6: 0x2000, 0x8a7: 0x2000, 0x8a8: 0x2000, 0x8a9: 0x2000, - 0x8b2: 0x2000, 0x8b3: 0x2000, - 0x8b6: 0x2000, 0x8b7: 0x2000, - 0x8bc: 0x2000, 0x8bd: 0x2000, - // Block 0x23, offset 0x8c0 - 0x8c0: 0x2000, 0x8c1: 0x2000, - 0x8c6: 0x2000, 0x8c7: 0x2000, 0x8c8: 0x2000, 0x8cb: 0x200f, - 0x8ce: 0x2000, 0x8cf: 0x2000, 0x8d0: 0x2000, 0x8d1: 0x2000, - 0x8e2: 0x2000, 0x8e3: 0x2000, - 0x8e4: 0x2000, 0x8e5: 0x2000, - 0x8ef: 0x2000, - 0x8fd: 0x4000, 0x8fe: 0x4000, - // Block 0x24, offset 0x900 - 0x905: 0x2000, - 0x906: 0x2000, 0x909: 0x2000, - 0x90e: 0x2000, 0x90f: 0x2000, - 0x914: 0x4000, 0x915: 0x4000, - 0x91c: 0x2000, - 0x91e: 0x2000, - // Block 0x25, offset 0x940 - 0x940: 0x2000, 0x942: 0x2000, - 0x948: 0x4000, 0x949: 0x4000, 0x94a: 0x4000, 0x94b: 0x4000, - 0x94c: 0x4000, 0x94d: 0x4000, 0x94e: 0x4000, 0x94f: 0x4000, 0x950: 0x4000, 0x951: 0x4000, - 0x952: 0x4000, 0x953: 0x4000, - 0x960: 0x2000, 0x961: 0x2000, 0x963: 0x2000, - 0x964: 0x2000, 0x965: 0x2000, 0x967: 0x2000, 0x968: 0x2000, 0x969: 0x2000, - 0x96a: 0x2000, 0x96c: 0x2000, 0x96d: 0x2000, 0x96f: 0x2000, - 0x97f: 0x4000, - // Block 0x26, offset 0x980 - 0x993: 0x4000, - 0x99e: 0x2000, 0x99f: 0x2000, 0x9a1: 0x4000, - 0x9aa: 0x4000, 0x9ab: 0x4000, - 0x9bd: 0x4000, 0x9be: 0x4000, 0x9bf: 0x2000, - // Block 0x27, offset 0x9c0 - 0x9c4: 0x4000, 0x9c5: 0x4000, - 0x9c6: 0x2000, 0x9c7: 0x2000, 0x9c8: 0x2000, 0x9c9: 0x2000, 0x9ca: 0x2000, 0x9cb: 0x2000, - 0x9cc: 0x2000, 0x9cd: 0x2000, 0x9ce: 0x4000, 0x9cf: 0x2000, 0x9d0: 0x2000, 0x9d1: 0x2000, - 0x9d2: 0x2000, 0x9d3: 0x2000, 0x9d4: 0x4000, 0x9d5: 0x2000, 0x9d6: 0x2000, 0x9d7: 0x2000, - 0x9d8: 0x2000, 0x9d9: 0x2000, 0x9da: 0x2000, 0x9db: 0x2000, 0x9dc: 0x2000, 0x9dd: 0x2000, - 0x9de: 0x2000, 0x9df: 0x2000, 0x9e0: 0x2000, 0x9e1: 0x2000, 0x9e3: 0x2000, - 0x9e8: 0x2000, 0x9e9: 0x2000, - 0x9ea: 0x4000, 0x9eb: 0x2000, 0x9ec: 0x2000, 0x9ed: 0x2000, 0x9ee: 0x2000, 0x9ef: 0x2000, - 0x9f0: 0x2000, 0x9f1: 0x2000, 0x9f2: 0x4000, 0x9f3: 0x4000, 0x9f4: 0x2000, 0x9f5: 0x4000, - 0x9f6: 0x2000, 0x9f7: 0x2000, 0x9f8: 0x2000, 0x9f9: 0x2000, 0x9fa: 0x4000, 0x9fb: 0x2000, - 0x9fc: 0x2000, 0x9fd: 0x4000, 0x9fe: 0x2000, 0x9ff: 0x2000, - // Block 0x28, offset 0xa00 - 0xa05: 0x4000, - 0xa0a: 0x4000, 0xa0b: 0x4000, - 0xa28: 0x4000, - 0xa3d: 0x2000, - // Block 0x29, offset 0xa40 - 0xa4c: 0x4000, 0xa4e: 0x4000, - 0xa53: 0x4000, 0xa54: 0x4000, 0xa55: 0x4000, 0xa57: 0x4000, - 0xa76: 0x2000, 0xa77: 0x2000, 0xa78: 0x2000, 0xa79: 0x2000, 0xa7a: 0x2000, 0xa7b: 0x2000, - 0xa7c: 0x2000, 0xa7d: 0x2000, 0xa7e: 0x2000, 0xa7f: 0x2000, - // Block 0x2a, offset 0xa80 - 0xa95: 0x4000, 0xa96: 0x4000, 0xa97: 0x4000, - 0xab0: 0x4000, - 0xabf: 0x4000, - // Block 0x2b, offset 0xac0 - 0xae6: 0x6000, 0xae7: 0x6000, 0xae8: 0x6000, 0xae9: 0x6000, - 0xaea: 0x6000, 0xaeb: 0x6000, 0xaec: 0x6000, 0xaed: 0x6000, - // Block 0x2c, offset 0xb00 - 0xb05: 0x6010, - 0xb06: 0x6011, - // Block 0x2d, offset 0xb40 - 0xb5b: 0x4000, 0xb5c: 0x4000, - // Block 0x2e, offset 0xb80 - 0xb90: 0x4000, - 0xb95: 0x4000, 0xb96: 0x2000, 0xb97: 0x2000, - 0xb98: 0x2000, 0xb99: 0x2000, - // Block 0x2f, offset 0xbc0 - 0xbc0: 0x4000, 0xbc1: 0x4000, 0xbc2: 0x4000, 0xbc3: 0x4000, 0xbc4: 0x4000, 0xbc5: 0x4000, - 0xbc6: 0x4000, 0xbc7: 0x4000, 0xbc8: 0x4000, 0xbc9: 0x4000, 0xbca: 0x4000, 0xbcb: 0x4000, - 0xbcc: 0x4000, 0xbcd: 0x4000, 0xbce: 0x4000, 0xbcf: 0x4000, 0xbd0: 0x4000, 0xbd1: 0x4000, - 0xbd2: 0x4000, 0xbd3: 0x4000, 0xbd4: 0x4000, 0xbd5: 0x4000, 0xbd6: 0x4000, 0xbd7: 0x4000, - 0xbd8: 0x4000, 0xbd9: 0x4000, 0xbdb: 0x4000, 0xbdc: 0x4000, 0xbdd: 0x4000, - 0xbde: 0x4000, 0xbdf: 0x4000, 0xbe0: 0x4000, 0xbe1: 0x4000, 0xbe2: 0x4000, 0xbe3: 0x4000, - 0xbe4: 0x4000, 0xbe5: 0x4000, 0xbe6: 0x4000, 0xbe7: 0x4000, 0xbe8: 0x4000, 0xbe9: 0x4000, - 0xbea: 0x4000, 0xbeb: 0x4000, 0xbec: 0x4000, 0xbed: 0x4000, 0xbee: 0x4000, 0xbef: 0x4000, - 0xbf0: 0x4000, 0xbf1: 0x4000, 0xbf2: 0x4000, 0xbf3: 0x4000, 0xbf4: 0x4000, 0xbf5: 0x4000, - 0xbf6: 0x4000, 0xbf7: 0x4000, 0xbf8: 0x4000, 0xbf9: 0x4000, 0xbfa: 0x4000, 0xbfb: 0x4000, - 0xbfc: 0x4000, 0xbfd: 0x4000, 0xbfe: 0x4000, 0xbff: 0x4000, - // Block 0x30, offset 0xc00 - 0xc00: 0x4000, 0xc01: 0x4000, 0xc02: 0x4000, 0xc03: 0x4000, 0xc04: 0x4000, 0xc05: 0x4000, - 0xc06: 0x4000, 0xc07: 0x4000, 0xc08: 0x4000, 0xc09: 0x4000, 0xc0a: 0x4000, 0xc0b: 0x4000, - 0xc0c: 0x4000, 0xc0d: 0x4000, 0xc0e: 0x4000, 0xc0f: 0x4000, 0xc10: 0x4000, 0xc11: 0x4000, - 0xc12: 0x4000, 0xc13: 0x4000, 0xc14: 0x4000, 0xc15: 0x4000, 0xc16: 0x4000, 0xc17: 0x4000, - 0xc18: 0x4000, 0xc19: 0x4000, 0xc1a: 0x4000, 0xc1b: 0x4000, 0xc1c: 0x4000, 0xc1d: 0x4000, - 0xc1e: 0x4000, 0xc1f: 0x4000, 0xc20: 0x4000, 0xc21: 0x4000, 0xc22: 0x4000, 0xc23: 0x4000, - 0xc24: 0x4000, 0xc25: 0x4000, 0xc26: 0x4000, 0xc27: 0x4000, 0xc28: 0x4000, 0xc29: 0x4000, - 0xc2a: 0x4000, 0xc2b: 0x4000, 0xc2c: 0x4000, 0xc2d: 0x4000, 0xc2e: 0x4000, 0xc2f: 0x4000, - 0xc30: 0x4000, 0xc31: 0x4000, 0xc32: 0x4000, 0xc33: 0x4000, - // Block 0x31, offset 0xc40 - 0xc40: 0x4000, 0xc41: 0x4000, 0xc42: 0x4000, 0xc43: 0x4000, 0xc44: 0x4000, 0xc45: 0x4000, - 0xc46: 0x4000, 0xc47: 0x4000, 0xc48: 0x4000, 0xc49: 0x4000, 0xc4a: 0x4000, 0xc4b: 0x4000, - 0xc4c: 0x4000, 0xc4d: 0x4000, 0xc4e: 0x4000, 0xc4f: 0x4000, 0xc50: 0x4000, 0xc51: 0x4000, - 0xc52: 0x4000, 0xc53: 0x4000, 0xc54: 0x4000, 0xc55: 0x4000, - 0xc70: 0x4000, 0xc71: 0x4000, 0xc72: 0x4000, 0xc73: 0x4000, 0xc74: 0x4000, 0xc75: 0x4000, - 0xc76: 0x4000, 0xc77: 0x4000, 0xc78: 0x4000, 0xc79: 0x4000, 0xc7a: 0x4000, 0xc7b: 0x4000, - // Block 0x32, offset 0xc80 - 0xc80: 0x9012, 0xc81: 0x4013, 0xc82: 0x4014, 0xc83: 0x4000, 0xc84: 0x4000, 0xc85: 0x4000, - 0xc86: 0x4000, 0xc87: 0x4000, 0xc88: 0x4000, 0xc89: 0x4000, 0xc8a: 0x4000, 0xc8b: 0x4000, - 0xc8c: 0x4015, 0xc8d: 0x4015, 0xc8e: 0x4000, 0xc8f: 0x4000, 0xc90: 0x4000, 0xc91: 0x4000, - 0xc92: 0x4000, 0xc93: 0x4000, 0xc94: 0x4000, 0xc95: 0x4000, 0xc96: 0x4000, 0xc97: 0x4000, - 0xc98: 0x4000, 0xc99: 0x4000, 0xc9a: 0x4000, 0xc9b: 0x4000, 0xc9c: 0x4000, 0xc9d: 0x4000, - 0xc9e: 0x4000, 0xc9f: 0x4000, 0xca0: 0x4000, 0xca1: 0x4000, 0xca2: 0x4000, 0xca3: 0x4000, - 0xca4: 0x4000, 0xca5: 0x4000, 0xca6: 0x4000, 0xca7: 0x4000, 0xca8: 0x4000, 0xca9: 0x4000, - 0xcaa: 0x4000, 0xcab: 0x4000, 0xcac: 0x4000, 0xcad: 0x4000, 0xcae: 0x4000, 0xcaf: 0x4000, - 0xcb0: 0x4000, 0xcb1: 0x4000, 0xcb2: 0x4000, 0xcb3: 0x4000, 0xcb4: 0x4000, 0xcb5: 0x4000, - 0xcb6: 0x4000, 0xcb7: 0x4000, 0xcb8: 0x4000, 0xcb9: 0x4000, 0xcba: 0x4000, 0xcbb: 0x4000, - 0xcbc: 0x4000, 0xcbd: 0x4000, 0xcbe: 0x4000, - // Block 0x33, offset 0xcc0 - 0xcc1: 0x4000, 0xcc2: 0x4000, 0xcc3: 0x4000, 0xcc4: 0x4000, 0xcc5: 0x4000, - 0xcc6: 0x4000, 0xcc7: 0x4000, 0xcc8: 0x4000, 0xcc9: 0x4000, 0xcca: 0x4000, 0xccb: 0x4000, - 0xccc: 0x4000, 0xccd: 0x4000, 0xcce: 0x4000, 0xccf: 0x4000, 0xcd0: 0x4000, 0xcd1: 0x4000, - 0xcd2: 0x4000, 0xcd3: 0x4000, 0xcd4: 0x4000, 0xcd5: 0x4000, 0xcd6: 0x4000, 0xcd7: 0x4000, - 0xcd8: 0x4000, 0xcd9: 0x4000, 0xcda: 0x4000, 0xcdb: 0x4000, 0xcdc: 0x4000, 0xcdd: 0x4000, - 0xcde: 0x4000, 0xcdf: 0x4000, 0xce0: 0x4000, 0xce1: 0x4000, 0xce2: 0x4000, 0xce3: 0x4000, - 0xce4: 0x4000, 0xce5: 0x4000, 0xce6: 0x4000, 0xce7: 0x4000, 0xce8: 0x4000, 0xce9: 0x4000, - 0xcea: 0x4000, 0xceb: 0x4000, 0xcec: 0x4000, 0xced: 0x4000, 0xcee: 0x4000, 0xcef: 0x4000, - 0xcf0: 0x4000, 0xcf1: 0x4000, 0xcf2: 0x4000, 0xcf3: 0x4000, 0xcf4: 0x4000, 0xcf5: 0x4000, - 0xcf6: 0x4000, 0xcf7: 0x4000, 0xcf8: 0x4000, 0xcf9: 0x4000, 0xcfa: 0x4000, 0xcfb: 0x4000, - 0xcfc: 0x4000, 0xcfd: 0x4000, 0xcfe: 0x4000, 0xcff: 0x4000, - // Block 0x34, offset 0xd00 - 0xd00: 0x4000, 0xd01: 0x4000, 0xd02: 0x4000, 0xd03: 0x4000, 0xd04: 0x4000, 0xd05: 0x4000, - 0xd06: 0x4000, 0xd07: 0x4000, 0xd08: 0x4000, 0xd09: 0x4000, 0xd0a: 0x4000, 0xd0b: 0x4000, - 0xd0c: 0x4000, 0xd0d: 0x4000, 0xd0e: 0x4000, 0xd0f: 0x4000, 0xd10: 0x4000, 0xd11: 0x4000, - 0xd12: 0x4000, 0xd13: 0x4000, 0xd14: 0x4000, 0xd15: 0x4000, 0xd16: 0x4000, - 0xd19: 0x4016, 0xd1a: 0x4017, 0xd1b: 0x4000, 0xd1c: 0x4000, 0xd1d: 0x4000, - 0xd1e: 0x4000, 0xd1f: 0x4000, 0xd20: 0x4000, 0xd21: 0x4018, 0xd22: 0x4019, 0xd23: 0x401a, - 0xd24: 0x401b, 0xd25: 0x401c, 0xd26: 0x401d, 0xd27: 0x401e, 0xd28: 0x401f, 0xd29: 0x4020, - 0xd2a: 0x4021, 0xd2b: 0x4022, 0xd2c: 0x4000, 0xd2d: 0x4010, 0xd2e: 0x4000, 0xd2f: 0x4023, - 0xd30: 0x4000, 0xd31: 0x4024, 0xd32: 0x4000, 0xd33: 0x4025, 0xd34: 0x4000, 0xd35: 0x4026, - 0xd36: 0x4000, 0xd37: 0x401a, 0xd38: 0x4000, 0xd39: 0x4027, 0xd3a: 0x4000, 0xd3b: 0x4028, - 0xd3c: 0x4000, 0xd3d: 0x4020, 0xd3e: 0x4000, 0xd3f: 0x4029, - // Block 0x35, offset 0xd40 - 0xd40: 0x4000, 0xd41: 0x402a, 0xd42: 0x4000, 0xd43: 0x402b, 0xd44: 0x402c, 0xd45: 0x4000, - 0xd46: 0x4017, 0xd47: 0x4000, 0xd48: 0x402d, 0xd49: 0x4000, 0xd4a: 0x402e, 0xd4b: 0x402f, - 0xd4c: 0x4030, 0xd4d: 0x4017, 0xd4e: 0x4016, 0xd4f: 0x4017, 0xd50: 0x4000, 0xd51: 0x4000, - 0xd52: 0x4031, 0xd53: 0x4000, 0xd54: 0x4000, 0xd55: 0x4031, 0xd56: 0x4000, 0xd57: 0x4000, - 0xd58: 0x4032, 0xd59: 0x4000, 0xd5a: 0x4000, 0xd5b: 0x4032, 0xd5c: 0x4000, 0xd5d: 0x4000, - 0xd5e: 0x4033, 0xd5f: 0x402e, 0xd60: 0x4034, 0xd61: 0x4035, 0xd62: 0x4034, 0xd63: 0x4036, - 0xd64: 0x4037, 0xd65: 0x4024, 0xd66: 0x4035, 0xd67: 0x4025, 0xd68: 0x4038, 0xd69: 0x4038, - 0xd6a: 0x4039, 0xd6b: 0x4039, 0xd6c: 0x403a, 0xd6d: 0x403a, 0xd6e: 0x4000, 0xd6f: 0x4035, - 0xd70: 0x4000, 0xd71: 0x4000, 0xd72: 0x403b, 0xd73: 0x403c, 0xd74: 0x4000, 0xd75: 0x4000, - 0xd76: 0x4000, 0xd77: 0x4000, 0xd78: 0x4000, 0xd79: 0x4000, 0xd7a: 0x4000, 0xd7b: 0x403d, - 0xd7c: 0x401c, 0xd7d: 0x4000, 0xd7e: 0x4000, 0xd7f: 0x4000, - // Block 0x36, offset 0xd80 - 0xd85: 0x4000, - 0xd86: 0x4000, 0xd87: 0x4000, 0xd88: 0x4000, 0xd89: 0x4000, 0xd8a: 0x4000, 0xd8b: 0x4000, - 0xd8c: 0x4000, 0xd8d: 0x4000, 0xd8e: 0x4000, 0xd8f: 0x4000, 0xd90: 0x4000, 0xd91: 0x4000, - 0xd92: 0x4000, 0xd93: 0x4000, 0xd94: 0x4000, 0xd95: 0x4000, 0xd96: 0x4000, 0xd97: 0x4000, - 0xd98: 0x4000, 0xd99: 0x4000, 0xd9a: 0x4000, 0xd9b: 0x4000, 0xd9c: 0x4000, 0xd9d: 0x4000, - 0xd9e: 0x4000, 0xd9f: 0x4000, 0xda0: 0x4000, 0xda1: 0x4000, 0xda2: 0x4000, 0xda3: 0x4000, - 0xda4: 0x4000, 0xda5: 0x4000, 0xda6: 0x4000, 0xda7: 0x4000, 0xda8: 0x4000, 0xda9: 0x4000, - 0xdaa: 0x4000, 0xdab: 0x4000, 0xdac: 0x4000, 0xdad: 0x4000, - 0xdb1: 0x403e, 0xdb2: 0x403e, 0xdb3: 0x403e, 0xdb4: 0x403e, 0xdb5: 0x403e, - 0xdb6: 0x403e, 0xdb7: 0x403e, 0xdb8: 0x403e, 0xdb9: 0x403e, 0xdba: 0x403e, 0xdbb: 0x403e, - 0xdbc: 0x403e, 0xdbd: 0x403e, 0xdbe: 0x403e, 0xdbf: 0x403e, - // Block 0x37, offset 0xdc0 - 0xdc0: 0x4037, 0xdc1: 0x4037, 0xdc2: 0x4037, 0xdc3: 0x4037, 0xdc4: 0x4037, 0xdc5: 0x4037, - 0xdc6: 0x4037, 0xdc7: 0x4037, 0xdc8: 0x4037, 0xdc9: 0x4037, 0xdca: 0x4037, 0xdcb: 0x4037, - 0xdcc: 0x4037, 0xdcd: 0x4037, 0xdce: 0x4037, 0xdcf: 0x400e, 0xdd0: 0x403f, 0xdd1: 0x4040, - 0xdd2: 0x4041, 0xdd3: 0x4040, 0xdd4: 0x403f, 0xdd5: 0x4042, 0xdd6: 0x4043, 0xdd7: 0x4044, - 0xdd8: 0x4040, 0xdd9: 0x4041, 0xdda: 0x4040, 0xddb: 0x4045, 0xddc: 0x4009, 0xddd: 0x4045, - 0xdde: 0x4046, 0xddf: 0x4045, 0xde0: 0x4047, 0xde1: 0x400b, 0xde2: 0x400a, 0xde3: 0x400c, - 0xde4: 0x4048, 0xde5: 0x4000, 0xde6: 0x4000, 0xde7: 0x4000, 0xde8: 0x4000, 0xde9: 0x4000, - 0xdea: 0x4000, 0xdeb: 0x4000, 0xdec: 0x4000, 0xded: 0x4000, 0xdee: 0x4000, 0xdef: 0x4000, - 0xdf0: 0x4000, 0xdf1: 0x4000, 0xdf2: 0x4000, 0xdf3: 0x4000, 0xdf4: 0x4000, 0xdf5: 0x4000, - 0xdf6: 0x4000, 0xdf7: 0x4000, 0xdf8: 0x4000, 0xdf9: 0x4000, 0xdfa: 0x4000, 0xdfb: 0x4000, - 0xdfc: 0x4000, 0xdfd: 0x4000, 0xdfe: 0x4000, 0xdff: 0x4000, - // Block 0x38, offset 0xe00 - 0xe00: 0x4000, 0xe01: 0x4000, 0xe02: 0x4000, 0xe03: 0x4000, 0xe04: 0x4000, 0xe05: 0x4000, - 0xe06: 0x4000, 0xe07: 0x4000, 0xe08: 0x4000, 0xe09: 0x4000, 0xe0a: 0x4000, 0xe0b: 0x4000, - 0xe0c: 0x4000, 0xe0d: 0x4000, 0xe0e: 0x4000, 0xe10: 0x4000, 0xe11: 0x4000, - 0xe12: 0x4000, 0xe13: 0x4000, 0xe14: 0x4000, 0xe15: 0x4000, 0xe16: 0x4000, 0xe17: 0x4000, - 0xe18: 0x4000, 0xe19: 0x4000, 0xe1a: 0x4000, 0xe1b: 0x4000, 0xe1c: 0x4000, 0xe1d: 0x4000, - 0xe1e: 0x4000, 0xe1f: 0x4000, 0xe20: 0x4000, 0xe21: 0x4000, 0xe22: 0x4000, 0xe23: 0x4000, - 0xe24: 0x4000, 0xe25: 0x4000, 0xe26: 0x4000, 0xe27: 0x4000, 0xe28: 0x4000, 0xe29: 0x4000, - 0xe2a: 0x4000, 0xe2b: 0x4000, 0xe2c: 0x4000, 0xe2d: 0x4000, 0xe2e: 0x4000, 0xe2f: 0x4000, - 0xe30: 0x4000, 0xe31: 0x4000, 0xe32: 0x4000, 0xe33: 0x4000, 0xe34: 0x4000, 0xe35: 0x4000, - 0xe36: 0x4000, 0xe37: 0x4000, 0xe38: 0x4000, 0xe39: 0x4000, 0xe3a: 0x4000, - // Block 0x39, offset 0xe40 - 0xe40: 0x4000, 0xe41: 0x4000, 0xe42: 0x4000, 0xe43: 0x4000, 0xe44: 0x4000, 0xe45: 0x4000, - 0xe46: 0x4000, 0xe47: 0x4000, 0xe48: 0x4000, 0xe49: 0x4000, 0xe4a: 0x4000, 0xe4b: 0x4000, - 0xe4c: 0x4000, 0xe4d: 0x4000, 0xe4e: 0x4000, 0xe4f: 0x4000, 0xe50: 0x4000, 0xe51: 0x4000, - 0xe52: 0x4000, 0xe53: 0x4000, 0xe54: 0x4000, 0xe55: 0x4000, 0xe56: 0x4000, 0xe57: 0x4000, - 0xe58: 0x4000, 0xe59: 0x4000, 0xe5a: 0x4000, 0xe5b: 0x4000, 0xe5c: 0x4000, 0xe5d: 0x4000, - 0xe5e: 0x4000, 0xe5f: 0x4000, 0xe60: 0x4000, 0xe61: 0x4000, 0xe62: 0x4000, 0xe63: 0x4000, - 0xe70: 0x4000, 0xe71: 0x4000, 0xe72: 0x4000, 0xe73: 0x4000, 0xe74: 0x4000, 0xe75: 0x4000, - 0xe76: 0x4000, 0xe77: 0x4000, 0xe78: 0x4000, 0xe79: 0x4000, 0xe7a: 0x4000, 0xe7b: 0x4000, - 0xe7c: 0x4000, 0xe7d: 0x4000, 0xe7e: 0x4000, 0xe7f: 0x4000, - // Block 0x3a, offset 0xe80 - 0xe80: 0x4000, 0xe81: 0x4000, 0xe82: 0x4000, 0xe83: 0x4000, 0xe84: 0x4000, 0xe85: 0x4000, - 0xe86: 0x4000, 0xe87: 0x4000, 0xe88: 0x4000, 0xe89: 0x4000, 0xe8a: 0x4000, 0xe8b: 0x4000, - 0xe8c: 0x4000, 0xe8d: 0x4000, 0xe8e: 0x4000, 0xe8f: 0x4000, 0xe90: 0x4000, 0xe91: 0x4000, - 0xe92: 0x4000, 0xe93: 0x4000, 0xe94: 0x4000, 0xe95: 0x4000, 0xe96: 0x4000, 0xe97: 0x4000, - 0xe98: 0x4000, 0xe99: 0x4000, 0xe9a: 0x4000, 0xe9b: 0x4000, 0xe9c: 0x4000, 0xe9d: 0x4000, - 0xe9e: 0x4000, 0xea0: 0x4000, 0xea1: 0x4000, 0xea2: 0x4000, 0xea3: 0x4000, - 0xea4: 0x4000, 0xea5: 0x4000, 0xea6: 0x4000, 0xea7: 0x4000, 0xea8: 0x4000, 0xea9: 0x4000, - 0xeaa: 0x4000, 0xeab: 0x4000, 0xeac: 0x4000, 0xead: 0x4000, 0xeae: 0x4000, 0xeaf: 0x4000, - 0xeb0: 0x4000, 0xeb1: 0x4000, 0xeb2: 0x4000, 0xeb3: 0x4000, 0xeb4: 0x4000, 0xeb5: 0x4000, - 0xeb6: 0x4000, 0xeb7: 0x4000, 0xeb8: 0x4000, 0xeb9: 0x4000, 0xeba: 0x4000, 0xebb: 0x4000, - 0xebc: 0x4000, 0xebd: 0x4000, 0xebe: 0x4000, 0xebf: 0x4000, - // Block 0x3b, offset 0xec0 - 0xec0: 0x4000, 0xec1: 0x4000, 0xec2: 0x4000, 0xec3: 0x4000, 0xec4: 0x4000, 0xec5: 0x4000, - 0xec6: 0x4000, 0xec7: 0x4000, 0xec8: 0x2000, 0xec9: 0x2000, 0xeca: 0x2000, 0xecb: 0x2000, - 0xecc: 0x2000, 0xecd: 0x2000, 0xece: 0x2000, 0xecf: 0x2000, 0xed0: 0x4000, 0xed1: 0x4000, - 0xed2: 0x4000, 0xed3: 0x4000, 0xed4: 0x4000, 0xed5: 0x4000, 0xed6: 0x4000, 0xed7: 0x4000, - 0xed8: 0x4000, 0xed9: 0x4000, 0xeda: 0x4000, 0xedb: 0x4000, 0xedc: 0x4000, 0xedd: 0x4000, - 0xede: 0x4000, 0xedf: 0x4000, 0xee0: 0x4000, 0xee1: 0x4000, 0xee2: 0x4000, 0xee3: 0x4000, - 0xee4: 0x4000, 0xee5: 0x4000, 0xee6: 0x4000, 0xee7: 0x4000, 0xee8: 0x4000, 0xee9: 0x4000, - 0xeea: 0x4000, 0xeeb: 0x4000, 0xeec: 0x4000, 0xeed: 0x4000, 0xeee: 0x4000, 0xeef: 0x4000, - 0xef0: 0x4000, 0xef1: 0x4000, 0xef2: 0x4000, 0xef3: 0x4000, 0xef4: 0x4000, 0xef5: 0x4000, - 0xef6: 0x4000, 0xef7: 0x4000, 0xef8: 0x4000, 0xef9: 0x4000, 0xefa: 0x4000, 0xefb: 0x4000, - 0xefc: 0x4000, 0xefd: 0x4000, 0xefe: 0x4000, 0xeff: 0x4000, - // Block 0x3c, offset 0xf00 - 0xf00: 0x4000, 0xf01: 0x4000, 0xf02: 0x4000, 0xf03: 0x4000, 0xf04: 0x4000, 0xf05: 0x4000, - 0xf06: 0x4000, 0xf07: 0x4000, 0xf08: 0x4000, 0xf09: 0x4000, 0xf0a: 0x4000, 0xf0b: 0x4000, - 0xf0c: 0x4000, 0xf0d: 0x4000, 0xf0e: 0x4000, 0xf0f: 0x4000, 0xf10: 0x4000, 0xf11: 0x4000, - 0xf12: 0x4000, 0xf13: 0x4000, 0xf14: 0x4000, 0xf15: 0x4000, 0xf16: 0x4000, 0xf17: 0x4000, - 0xf18: 0x4000, 0xf19: 0x4000, 0xf1a: 0x4000, 0xf1b: 0x4000, 0xf1c: 0x4000, 0xf1d: 0x4000, - 0xf1e: 0x4000, 0xf1f: 0x4000, 0xf20: 0x4000, 0xf21: 0x4000, 0xf22: 0x4000, 0xf23: 0x4000, - 0xf24: 0x4000, 0xf25: 0x4000, 0xf26: 0x4000, 0xf27: 0x4000, 0xf28: 0x4000, 0xf29: 0x4000, - 0xf2a: 0x4000, 0xf2b: 0x4000, 0xf2c: 0x4000, 0xf2d: 0x4000, 0xf2e: 0x4000, 0xf2f: 0x4000, - 0xf30: 0x4000, 0xf31: 0x4000, 0xf32: 0x4000, 0xf33: 0x4000, 0xf34: 0x4000, 0xf35: 0x4000, - 0xf36: 0x4000, 0xf37: 0x4000, 0xf38: 0x4000, 0xf39: 0x4000, 0xf3a: 0x4000, 0xf3b: 0x4000, - 0xf3c: 0x4000, 0xf3d: 0x4000, 0xf3e: 0x4000, - // Block 0x3d, offset 0xf40 - 0xf40: 0x4000, 0xf41: 0x4000, 0xf42: 0x4000, 0xf43: 0x4000, 0xf44: 0x4000, 0xf45: 0x4000, - 0xf46: 0x4000, 0xf47: 0x4000, 0xf48: 0x4000, 0xf49: 0x4000, 0xf4a: 0x4000, 0xf4b: 0x4000, - 0xf4c: 0x4000, 0xf50: 0x4000, 0xf51: 0x4000, - 0xf52: 0x4000, 0xf53: 0x4000, 0xf54: 0x4000, 0xf55: 0x4000, 0xf56: 0x4000, 0xf57: 0x4000, - 0xf58: 0x4000, 0xf59: 0x4000, 0xf5a: 0x4000, 0xf5b: 0x4000, 0xf5c: 0x4000, 0xf5d: 0x4000, - 0xf5e: 0x4000, 0xf5f: 0x4000, 0xf60: 0x4000, 0xf61: 0x4000, 0xf62: 0x4000, 0xf63: 0x4000, - 0xf64: 0x4000, 0xf65: 0x4000, 0xf66: 0x4000, 0xf67: 0x4000, 0xf68: 0x4000, 0xf69: 0x4000, - 0xf6a: 0x4000, 0xf6b: 0x4000, 0xf6c: 0x4000, 0xf6d: 0x4000, 0xf6e: 0x4000, 0xf6f: 0x4000, - 0xf70: 0x4000, 0xf71: 0x4000, 0xf72: 0x4000, 0xf73: 0x4000, 0xf74: 0x4000, 0xf75: 0x4000, - 0xf76: 0x4000, 0xf77: 0x4000, 0xf78: 0x4000, 0xf79: 0x4000, 0xf7a: 0x4000, 0xf7b: 0x4000, - 0xf7c: 0x4000, 0xf7d: 0x4000, 0xf7e: 0x4000, 0xf7f: 0x4000, - // Block 0x3e, offset 0xf80 - 0xf80: 0x4000, 0xf81: 0x4000, 0xf82: 0x4000, 0xf83: 0x4000, 0xf84: 0x4000, 0xf85: 0x4000, - 0xf86: 0x4000, - // Block 0x3f, offset 0xfc0 - 0xfe0: 0x4000, 0xfe1: 0x4000, 0xfe2: 0x4000, 0xfe3: 0x4000, - 0xfe4: 0x4000, 0xfe5: 0x4000, 0xfe6: 0x4000, 0xfe7: 0x4000, 0xfe8: 0x4000, 0xfe9: 0x4000, - 0xfea: 0x4000, 0xfeb: 0x4000, 0xfec: 0x4000, 0xfed: 0x4000, 0xfee: 0x4000, 0xfef: 0x4000, - 0xff0: 0x4000, 0xff1: 0x4000, 0xff2: 0x4000, 0xff3: 0x4000, 0xff4: 0x4000, 0xff5: 0x4000, - 0xff6: 0x4000, 0xff7: 0x4000, 0xff8: 0x4000, 0xff9: 0x4000, 0xffa: 0x4000, 0xffb: 0x4000, - 0xffc: 0x4000, - // Block 0x40, offset 0x1000 - 0x1000: 0x4000, 0x1001: 0x4000, 0x1002: 0x4000, 0x1003: 0x4000, 0x1004: 0x4000, 0x1005: 0x4000, - 0x1006: 0x4000, 0x1007: 0x4000, 0x1008: 0x4000, 0x1009: 0x4000, 0x100a: 0x4000, 0x100b: 0x4000, - 0x100c: 0x4000, 0x100d: 0x4000, 0x100e: 0x4000, 0x100f: 0x4000, 0x1010: 0x4000, 0x1011: 0x4000, - 0x1012: 0x4000, 0x1013: 0x4000, 0x1014: 0x4000, 0x1015: 0x4000, 0x1016: 0x4000, 0x1017: 0x4000, - 0x1018: 0x4000, 0x1019: 0x4000, 0x101a: 0x4000, 0x101b: 0x4000, 0x101c: 0x4000, 0x101d: 0x4000, - 0x101e: 0x4000, 0x101f: 0x4000, 0x1020: 0x4000, 0x1021: 0x4000, 0x1022: 0x4000, 0x1023: 0x4000, - // Block 0x41, offset 0x1040 - 0x1040: 0x2000, 0x1041: 0x2000, 0x1042: 0x2000, 0x1043: 0x2000, 0x1044: 0x2000, 0x1045: 0x2000, - 0x1046: 0x2000, 0x1047: 0x2000, 0x1048: 0x2000, 0x1049: 0x2000, 0x104a: 0x2000, 0x104b: 0x2000, - 0x104c: 0x2000, 0x104d: 0x2000, 0x104e: 0x2000, 0x104f: 0x2000, 0x1050: 0x4000, 0x1051: 0x4000, - 0x1052: 0x4000, 0x1053: 0x4000, 0x1054: 0x4000, 0x1055: 0x4000, 0x1056: 0x4000, 0x1057: 0x4000, - 0x1058: 0x4000, 0x1059: 0x4000, - 0x1070: 0x4000, 0x1071: 0x4000, 0x1072: 0x4000, 0x1073: 0x4000, 0x1074: 0x4000, 0x1075: 0x4000, - 0x1076: 0x4000, 0x1077: 0x4000, 0x1078: 0x4000, 0x1079: 0x4000, 0x107a: 0x4000, 0x107b: 0x4000, - 0x107c: 0x4000, 0x107d: 0x4000, 0x107e: 0x4000, 0x107f: 0x4000, - // Block 0x42, offset 0x1080 - 0x1080: 0x4000, 0x1081: 0x4000, 0x1082: 0x4000, 0x1083: 0x4000, 0x1084: 0x4000, 0x1085: 0x4000, - 0x1086: 0x4000, 0x1087: 0x4000, 0x1088: 0x4000, 0x1089: 0x4000, 0x108a: 0x4000, 0x108b: 0x4000, - 0x108c: 0x4000, 0x108d: 0x4000, 0x108e: 0x4000, 0x108f: 0x4000, 0x1090: 0x4000, 0x1091: 0x4000, - 0x1092: 0x4000, 0x1094: 0x4000, 0x1095: 0x4000, 0x1096: 0x4000, 0x1097: 0x4000, - 0x1098: 0x4000, 0x1099: 0x4000, 0x109a: 0x4000, 0x109b: 0x4000, 0x109c: 0x4000, 0x109d: 0x4000, - 0x109e: 0x4000, 0x109f: 0x4000, 0x10a0: 0x4000, 0x10a1: 0x4000, 0x10a2: 0x4000, 0x10a3: 0x4000, - 0x10a4: 0x4000, 0x10a5: 0x4000, 0x10a6: 0x4000, 0x10a8: 0x4000, 0x10a9: 0x4000, - 0x10aa: 0x4000, 0x10ab: 0x4000, - // Block 0x43, offset 0x10c0 - 0x10c1: 0x9012, 0x10c2: 0x9012, 0x10c3: 0x9012, 0x10c4: 0x9012, 0x10c5: 0x9012, - 0x10c6: 0x9012, 0x10c7: 0x9012, 0x10c8: 0x9012, 0x10c9: 0x9012, 0x10ca: 0x9012, 0x10cb: 0x9012, - 0x10cc: 0x9012, 0x10cd: 0x9012, 0x10ce: 0x9012, 0x10cf: 0x9012, 0x10d0: 0x9012, 0x10d1: 0x9012, - 0x10d2: 0x9012, 0x10d3: 0x9012, 0x10d4: 0x9012, 0x10d5: 0x9012, 0x10d6: 0x9012, 0x10d7: 0x9012, - 0x10d8: 0x9012, 0x10d9: 0x9012, 0x10da: 0x9012, 0x10db: 0x9012, 0x10dc: 0x9012, 0x10dd: 0x9012, - 0x10de: 0x9012, 0x10df: 0x9012, 0x10e0: 0x9049, 0x10e1: 0x9049, 0x10e2: 0x9049, 0x10e3: 0x9049, - 0x10e4: 0x9049, 0x10e5: 0x9049, 0x10e6: 0x9049, 0x10e7: 0x9049, 0x10e8: 0x9049, 0x10e9: 0x9049, - 0x10ea: 0x9049, 0x10eb: 0x9049, 0x10ec: 0x9049, 0x10ed: 0x9049, 0x10ee: 0x9049, 0x10ef: 0x9049, - 0x10f0: 0x9049, 0x10f1: 0x9049, 0x10f2: 0x9049, 0x10f3: 0x9049, 0x10f4: 0x9049, 0x10f5: 0x9049, - 0x10f6: 0x9049, 0x10f7: 0x9049, 0x10f8: 0x9049, 0x10f9: 0x9049, 0x10fa: 0x9049, 0x10fb: 0x9049, - 0x10fc: 0x9049, 0x10fd: 0x9049, 0x10fe: 0x9049, 0x10ff: 0x9049, - // Block 0x44, offset 0x1100 - 0x1100: 0x9049, 0x1101: 0x9049, 0x1102: 0x9049, 0x1103: 0x9049, 0x1104: 0x9049, 0x1105: 0x9049, - 0x1106: 0x9049, 0x1107: 0x9049, 0x1108: 0x9049, 0x1109: 0x9049, 0x110a: 0x9049, 0x110b: 0x9049, - 0x110c: 0x9049, 0x110d: 0x9049, 0x110e: 0x9049, 0x110f: 0x9049, 0x1110: 0x9049, 0x1111: 0x9049, - 0x1112: 0x9049, 0x1113: 0x9049, 0x1114: 0x9049, 0x1115: 0x9049, 0x1116: 0x9049, 0x1117: 0x9049, - 0x1118: 0x9049, 0x1119: 0x9049, 0x111a: 0x9049, 0x111b: 0x9049, 0x111c: 0x9049, 0x111d: 0x9049, - 0x111e: 0x9049, 0x111f: 0x904a, 0x1120: 0x904b, 0x1121: 0xb04c, 0x1122: 0xb04d, 0x1123: 0xb04d, - 0x1124: 0xb04e, 0x1125: 0xb04f, 0x1126: 0xb050, 0x1127: 0xb051, 0x1128: 0xb052, 0x1129: 0xb053, - 0x112a: 0xb054, 0x112b: 0xb055, 0x112c: 0xb056, 0x112d: 0xb057, 0x112e: 0xb058, 0x112f: 0xb059, - 0x1130: 0xb05a, 0x1131: 0xb05b, 0x1132: 0xb05c, 0x1133: 0xb05d, 0x1134: 0xb05e, 0x1135: 0xb05f, - 0x1136: 0xb060, 0x1137: 0xb061, 0x1138: 0xb062, 0x1139: 0xb063, 0x113a: 0xb064, 0x113b: 0xb065, - 0x113c: 0xb052, 0x113d: 0xb066, 0x113e: 0xb067, 0x113f: 0xb055, - // Block 0x45, offset 0x1140 - 0x1140: 0xb068, 0x1141: 0xb069, 0x1142: 0xb06a, 0x1143: 0xb06b, 0x1144: 0xb05a, 0x1145: 0xb056, - 0x1146: 0xb06c, 0x1147: 0xb06d, 0x1148: 0xb06b, 0x1149: 0xb06e, 0x114a: 0xb06b, 0x114b: 0xb06f, - 0x114c: 0xb06f, 0x114d: 0xb070, 0x114e: 0xb070, 0x114f: 0xb071, 0x1150: 0xb056, 0x1151: 0xb072, - 0x1152: 0xb073, 0x1153: 0xb072, 0x1154: 0xb074, 0x1155: 0xb073, 0x1156: 0xb075, 0x1157: 0xb075, - 0x1158: 0xb076, 0x1159: 0xb076, 0x115a: 0xb077, 0x115b: 0xb077, 0x115c: 0xb073, 0x115d: 0xb078, - 0x115e: 0xb079, 0x115f: 0xb067, 0x1160: 0xb07a, 0x1161: 0xb07b, 0x1162: 0xb07b, 0x1163: 0xb07b, - 0x1164: 0xb07b, 0x1165: 0xb07b, 0x1166: 0xb07b, 0x1167: 0xb07b, 0x1168: 0xb07b, 0x1169: 0xb07b, - 0x116a: 0xb07b, 0x116b: 0xb07b, 0x116c: 0xb07b, 0x116d: 0xb07b, 0x116e: 0xb07b, 0x116f: 0xb07b, - 0x1170: 0xb07c, 0x1171: 0xb07c, 0x1172: 0xb07c, 0x1173: 0xb07c, 0x1174: 0xb07c, 0x1175: 0xb07c, - 0x1176: 0xb07c, 0x1177: 0xb07c, 0x1178: 0xb07c, 0x1179: 0xb07c, 0x117a: 0xb07c, 0x117b: 0xb07c, - 0x117c: 0xb07c, 0x117d: 0xb07c, 0x117e: 0xb07c, - // Block 0x46, offset 0x1180 - 0x1182: 0xb07d, 0x1183: 0xb07e, 0x1184: 0xb07f, 0x1185: 0xb080, - 0x1186: 0xb07f, 0x1187: 0xb07e, 0x118a: 0xb081, 0x118b: 0xb082, - 0x118c: 0xb083, 0x118d: 0xb07f, 0x118e: 0xb080, 0x118f: 0xb07f, - 0x1192: 0xb084, 0x1193: 0xb085, 0x1194: 0xb084, 0x1195: 0xb086, 0x1196: 0xb084, 0x1197: 0xb087, - 0x119a: 0xb088, 0x119b: 0xb089, 0x119c: 0xb08a, - 0x11a0: 0x908b, 0x11a1: 0x908b, 0x11a2: 0x908c, 0x11a3: 0x908d, - 0x11a4: 0x908b, 0x11a5: 0x908e, 0x11a6: 0x908f, 0x11a8: 0xb090, 0x11a9: 0xb091, - 0x11aa: 0xb092, 0x11ab: 0xb091, 0x11ac: 0xb093, 0x11ad: 0xb094, 0x11ae: 0xb095, - 0x11bd: 0x2000, - // Block 0x47, offset 0x11c0 - 0x11e0: 0x4000, - // Block 0x48, offset 0x1200 - 0x1200: 0x4000, 0x1201: 0x4000, 0x1202: 0x4000, 0x1203: 0x4000, 0x1204: 0x4000, 0x1205: 0x4000, - 0x1206: 0x4000, 0x1207: 0x4000, 0x1208: 0x4000, 0x1209: 0x4000, 0x120a: 0x4000, 0x120b: 0x4000, - 0x120c: 0x4000, 0x120d: 0x4000, 0x120e: 0x4000, 0x120f: 0x4000, 0x1210: 0x4000, 0x1211: 0x4000, - 0x1212: 0x4000, 0x1213: 0x4000, 0x1214: 0x4000, 0x1215: 0x4000, 0x1216: 0x4000, 0x1217: 0x4000, - 0x1218: 0x4000, 0x1219: 0x4000, 0x121a: 0x4000, 0x121b: 0x4000, 0x121c: 0x4000, 0x121d: 0x4000, - 0x121e: 0x4000, 0x121f: 0x4000, 0x1220: 0x4000, 0x1221: 0x4000, 0x1222: 0x4000, 0x1223: 0x4000, - 0x1224: 0x4000, 0x1225: 0x4000, 0x1226: 0x4000, 0x1227: 0x4000, 0x1228: 0x4000, 0x1229: 0x4000, - 0x122a: 0x4000, 0x122b: 0x4000, 0x122c: 0x4000, - // Block 0x49, offset 0x1240 - 0x1240: 0x4000, 0x1241: 0x4000, 0x1242: 0x4000, 0x1243: 0x4000, 0x1244: 0x4000, 0x1245: 0x4000, - 0x1246: 0x4000, 0x1247: 0x4000, 0x1248: 0x4000, 0x1249: 0x4000, 0x124a: 0x4000, 0x124b: 0x4000, - 0x124c: 0x4000, 0x124d: 0x4000, 0x124e: 0x4000, 0x124f: 0x4000, 0x1250: 0x4000, 0x1251: 0x4000, - 0x1252: 0x4000, 0x1253: 0x4000, 0x1254: 0x4000, 0x1255: 0x4000, 0x1256: 0x4000, 0x1257: 0x4000, - 0x1258: 0x4000, 0x1259: 0x4000, 0x125a: 0x4000, 0x125b: 0x4000, 0x125c: 0x4000, 0x125d: 0x4000, - 0x125e: 0x4000, 0x125f: 0x4000, 0x1260: 0x4000, 0x1261: 0x4000, 0x1262: 0x4000, 0x1263: 0x4000, - 0x1264: 0x4000, 0x1265: 0x4000, 0x1266: 0x4000, 0x1267: 0x4000, 0x1268: 0x4000, 0x1269: 0x4000, - 0x126a: 0x4000, 0x126b: 0x4000, 0x126c: 0x4000, 0x126d: 0x4000, 0x126e: 0x4000, 0x126f: 0x4000, - 0x1270: 0x4000, 0x1271: 0x4000, 0x1272: 0x4000, - // Block 0x4a, offset 0x1280 - 0x1280: 0x4000, 0x1281: 0x4000, - // Block 0x4b, offset 0x12c0 - 0x12c4: 0x4000, - // Block 0x4c, offset 0x1300 - 0x130f: 0x4000, - // Block 0x4d, offset 0x1340 - 0x1340: 0x2000, 0x1341: 0x2000, 0x1342: 0x2000, 0x1343: 0x2000, 0x1344: 0x2000, 0x1345: 0x2000, - 0x1346: 0x2000, 0x1347: 0x2000, 0x1348: 0x2000, 0x1349: 0x2000, 0x134a: 0x2000, - 0x1350: 0x2000, 0x1351: 0x2000, - 0x1352: 0x2000, 0x1353: 0x2000, 0x1354: 0x2000, 0x1355: 0x2000, 0x1356: 0x2000, 0x1357: 0x2000, - 0x1358: 0x2000, 0x1359: 0x2000, 0x135a: 0x2000, 0x135b: 0x2000, 0x135c: 0x2000, 0x135d: 0x2000, - 0x135e: 0x2000, 0x135f: 0x2000, 0x1360: 0x2000, 0x1361: 0x2000, 0x1362: 0x2000, 0x1363: 0x2000, - 0x1364: 0x2000, 0x1365: 0x2000, 0x1366: 0x2000, 0x1367: 0x2000, 0x1368: 0x2000, 0x1369: 0x2000, - 0x136a: 0x2000, 0x136b: 0x2000, 0x136c: 0x2000, 0x136d: 0x2000, - 0x1370: 0x2000, 0x1371: 0x2000, 0x1372: 0x2000, 0x1373: 0x2000, 0x1374: 0x2000, 0x1375: 0x2000, - 0x1376: 0x2000, 0x1377: 0x2000, 0x1378: 0x2000, 0x1379: 0x2000, 0x137a: 0x2000, 0x137b: 0x2000, - 0x137c: 0x2000, 0x137d: 0x2000, 0x137e: 0x2000, 0x137f: 0x2000, - // Block 0x4e, offset 0x1380 - 0x1380: 0x2000, 0x1381: 0x2000, 0x1382: 0x2000, 0x1383: 0x2000, 0x1384: 0x2000, 0x1385: 0x2000, - 0x1386: 0x2000, 0x1387: 0x2000, 0x1388: 0x2000, 0x1389: 0x2000, 0x138a: 0x2000, 0x138b: 0x2000, - 0x138c: 0x2000, 0x138d: 0x2000, 0x138e: 0x2000, 0x138f: 0x2000, 0x1390: 0x2000, 0x1391: 0x2000, - 0x1392: 0x2000, 0x1393: 0x2000, 0x1394: 0x2000, 0x1395: 0x2000, 0x1396: 0x2000, 0x1397: 0x2000, - 0x1398: 0x2000, 0x1399: 0x2000, 0x139a: 0x2000, 0x139b: 0x2000, 0x139c: 0x2000, 0x139d: 0x2000, - 0x139e: 0x2000, 0x139f: 0x2000, 0x13a0: 0x2000, 0x13a1: 0x2000, 0x13a2: 0x2000, 0x13a3: 0x2000, - 0x13a4: 0x2000, 0x13a5: 0x2000, 0x13a6: 0x2000, 0x13a7: 0x2000, 0x13a8: 0x2000, 0x13a9: 0x2000, - 0x13b0: 0x2000, 0x13b1: 0x2000, 0x13b2: 0x2000, 0x13b3: 0x2000, 0x13b4: 0x2000, 0x13b5: 0x2000, - 0x13b6: 0x2000, 0x13b7: 0x2000, 0x13b8: 0x2000, 0x13b9: 0x2000, 0x13ba: 0x2000, 0x13bb: 0x2000, - 0x13bc: 0x2000, 0x13bd: 0x2000, 0x13be: 0x2000, 0x13bf: 0x2000, - // Block 0x4f, offset 0x13c0 - 0x13c0: 0x2000, 0x13c1: 0x2000, 0x13c2: 0x2000, 0x13c3: 0x2000, 0x13c4: 0x2000, 0x13c5: 0x2000, - 0x13c6: 0x2000, 0x13c7: 0x2000, 0x13c8: 0x2000, 0x13c9: 0x2000, 0x13ca: 0x2000, 0x13cb: 0x2000, - 0x13cc: 0x2000, 0x13cd: 0x2000, 0x13ce: 0x4000, 0x13cf: 0x2000, 0x13d0: 0x2000, 0x13d1: 0x4000, - 0x13d2: 0x4000, 0x13d3: 0x4000, 0x13d4: 0x4000, 0x13d5: 0x4000, 0x13d6: 0x4000, 0x13d7: 0x4000, - 0x13d8: 0x4000, 0x13d9: 0x4000, 0x13da: 0x4000, 0x13db: 0x2000, 0x13dc: 0x2000, 0x13dd: 0x2000, - 0x13de: 0x2000, 0x13df: 0x2000, 0x13e0: 0x2000, 0x13e1: 0x2000, 0x13e2: 0x2000, 0x13e3: 0x2000, - 0x13e4: 0x2000, 0x13e5: 0x2000, 0x13e6: 0x2000, 0x13e7: 0x2000, 0x13e8: 0x2000, 0x13e9: 0x2000, - 0x13ea: 0x2000, 0x13eb: 0x2000, 0x13ec: 0x2000, - // Block 0x50, offset 0x1400 - 0x1400: 0x4000, 0x1401: 0x4000, 0x1402: 0x4000, - 0x1410: 0x4000, 0x1411: 0x4000, - 0x1412: 0x4000, 0x1413: 0x4000, 0x1414: 0x4000, 0x1415: 0x4000, 0x1416: 0x4000, 0x1417: 0x4000, - 0x1418: 0x4000, 0x1419: 0x4000, 0x141a: 0x4000, 0x141b: 0x4000, 0x141c: 0x4000, 0x141d: 0x4000, - 0x141e: 0x4000, 0x141f: 0x4000, 0x1420: 0x4000, 0x1421: 0x4000, 0x1422: 0x4000, 0x1423: 0x4000, - 0x1424: 0x4000, 0x1425: 0x4000, 0x1426: 0x4000, 0x1427: 0x4000, 0x1428: 0x4000, 0x1429: 0x4000, - 0x142a: 0x4000, 0x142b: 0x4000, 0x142c: 0x4000, 0x142d: 0x4000, 0x142e: 0x4000, 0x142f: 0x4000, - 0x1430: 0x4000, 0x1431: 0x4000, 0x1432: 0x4000, 0x1433: 0x4000, 0x1434: 0x4000, 0x1435: 0x4000, - 0x1436: 0x4000, 0x1437: 0x4000, 0x1438: 0x4000, 0x1439: 0x4000, 0x143a: 0x4000, 0x143b: 0x4000, - // Block 0x51, offset 0x1440 - 0x1440: 0x4000, 0x1441: 0x4000, 0x1442: 0x4000, 0x1443: 0x4000, 0x1444: 0x4000, 0x1445: 0x4000, - 0x1446: 0x4000, 0x1447: 0x4000, 0x1448: 0x4000, - 0x1450: 0x4000, 0x1451: 0x4000, - // Block 0x52, offset 0x1480 - 0x1480: 0x4000, 0x1481: 0x4000, 0x1482: 0x4000, 0x1483: 0x4000, 0x1484: 0x4000, 0x1485: 0x4000, - 0x1486: 0x4000, 0x1487: 0x4000, 0x1488: 0x4000, 0x1489: 0x4000, 0x148a: 0x4000, 0x148b: 0x4000, - 0x148c: 0x4000, 0x148d: 0x4000, 0x148e: 0x4000, 0x148f: 0x4000, 0x1490: 0x4000, 0x1491: 0x4000, - 0x1492: 0x4000, 0x1493: 0x4000, 0x1494: 0x4000, 0x1495: 0x4000, 0x1496: 0x4000, 0x1497: 0x4000, - 0x1498: 0x4000, 0x1499: 0x4000, 0x149a: 0x4000, 0x149b: 0x4000, 0x149c: 0x4000, 0x149d: 0x4000, - 0x149e: 0x4000, 0x149f: 0x4000, 0x14a0: 0x4000, - 0x14ad: 0x4000, 0x14ae: 0x4000, 0x14af: 0x4000, - 0x14b0: 0x4000, 0x14b1: 0x4000, 0x14b2: 0x4000, 0x14b3: 0x4000, 0x14b4: 0x4000, 0x14b5: 0x4000, - 0x14b7: 0x4000, 0x14b8: 0x4000, 0x14b9: 0x4000, 0x14ba: 0x4000, 0x14bb: 0x4000, - 0x14bc: 0x4000, 0x14bd: 0x4000, 0x14be: 0x4000, 0x14bf: 0x4000, - // Block 0x53, offset 0x14c0 - 0x14c0: 0x4000, 0x14c1: 0x4000, 0x14c2: 0x4000, 0x14c3: 0x4000, 0x14c4: 0x4000, 0x14c5: 0x4000, - 0x14c6: 0x4000, 0x14c7: 0x4000, 0x14c8: 0x4000, 0x14c9: 0x4000, 0x14ca: 0x4000, 0x14cb: 0x4000, - 0x14cc: 0x4000, 0x14cd: 0x4000, 0x14ce: 0x4000, 0x14cf: 0x4000, 0x14d0: 0x4000, 0x14d1: 0x4000, - 0x14d2: 0x4000, 0x14d3: 0x4000, 0x14d4: 0x4000, 0x14d5: 0x4000, 0x14d6: 0x4000, 0x14d7: 0x4000, - 0x14d8: 0x4000, 0x14d9: 0x4000, 0x14da: 0x4000, 0x14db: 0x4000, 0x14dc: 0x4000, 0x14dd: 0x4000, - 0x14de: 0x4000, 0x14df: 0x4000, 0x14e0: 0x4000, 0x14e1: 0x4000, 0x14e2: 0x4000, 0x14e3: 0x4000, - 0x14e4: 0x4000, 0x14e5: 0x4000, 0x14e6: 0x4000, 0x14e7: 0x4000, 0x14e8: 0x4000, 0x14e9: 0x4000, - 0x14ea: 0x4000, 0x14eb: 0x4000, 0x14ec: 0x4000, 0x14ed: 0x4000, 0x14ee: 0x4000, 0x14ef: 0x4000, - 0x14f0: 0x4000, 0x14f1: 0x4000, 0x14f2: 0x4000, 0x14f3: 0x4000, 0x14f4: 0x4000, 0x14f5: 0x4000, - 0x14f6: 0x4000, 0x14f7: 0x4000, 0x14f8: 0x4000, 0x14f9: 0x4000, 0x14fa: 0x4000, 0x14fb: 0x4000, - 0x14fc: 0x4000, 0x14fe: 0x4000, 0x14ff: 0x4000, - // Block 0x54, offset 0x1500 - 0x1500: 0x4000, 0x1501: 0x4000, 0x1502: 0x4000, 0x1503: 0x4000, 0x1504: 0x4000, 0x1505: 0x4000, - 0x1506: 0x4000, 0x1507: 0x4000, 0x1508: 0x4000, 0x1509: 0x4000, 0x150a: 0x4000, 0x150b: 0x4000, - 0x150c: 0x4000, 0x150d: 0x4000, 0x150e: 0x4000, 0x150f: 0x4000, 0x1510: 0x4000, 0x1511: 0x4000, - 0x1512: 0x4000, 0x1513: 0x4000, - 0x1520: 0x4000, 0x1521: 0x4000, 0x1522: 0x4000, 0x1523: 0x4000, - 0x1524: 0x4000, 0x1525: 0x4000, 0x1526: 0x4000, 0x1527: 0x4000, 0x1528: 0x4000, 0x1529: 0x4000, - 0x152a: 0x4000, 0x152b: 0x4000, 0x152c: 0x4000, 0x152d: 0x4000, 0x152e: 0x4000, 0x152f: 0x4000, - 0x1530: 0x4000, 0x1531: 0x4000, 0x1532: 0x4000, 0x1533: 0x4000, 0x1534: 0x4000, 0x1535: 0x4000, - 0x1536: 0x4000, 0x1537: 0x4000, 0x1538: 0x4000, 0x1539: 0x4000, 0x153a: 0x4000, 0x153b: 0x4000, - 0x153c: 0x4000, 0x153d: 0x4000, 0x153e: 0x4000, 0x153f: 0x4000, - // Block 0x55, offset 0x1540 - 0x1540: 0x4000, 0x1541: 0x4000, 0x1542: 0x4000, 0x1543: 0x4000, 0x1544: 0x4000, 0x1545: 0x4000, - 0x1546: 0x4000, 0x1547: 0x4000, 0x1548: 0x4000, 0x1549: 0x4000, 0x154a: 0x4000, - 0x154f: 0x4000, 0x1550: 0x4000, 0x1551: 0x4000, - 0x1552: 0x4000, 0x1553: 0x4000, - 0x1560: 0x4000, 0x1561: 0x4000, 0x1562: 0x4000, 0x1563: 0x4000, - 0x1564: 0x4000, 0x1565: 0x4000, 0x1566: 0x4000, 0x1567: 0x4000, 0x1568: 0x4000, 0x1569: 0x4000, - 0x156a: 0x4000, 0x156b: 0x4000, 0x156c: 0x4000, 0x156d: 0x4000, 0x156e: 0x4000, 0x156f: 0x4000, - 0x1570: 0x4000, 0x1574: 0x4000, - 0x1578: 0x4000, 0x1579: 0x4000, 0x157a: 0x4000, 0x157b: 0x4000, - 0x157c: 0x4000, 0x157d: 0x4000, 0x157e: 0x4000, 0x157f: 0x4000, - // Block 0x56, offset 0x1580 - 0x1580: 0x4000, 0x1582: 0x4000, 0x1583: 0x4000, 0x1584: 0x4000, 0x1585: 0x4000, - 0x1586: 0x4000, 0x1587: 0x4000, 0x1588: 0x4000, 0x1589: 0x4000, 0x158a: 0x4000, 0x158b: 0x4000, - 0x158c: 0x4000, 0x158d: 0x4000, 0x158e: 0x4000, 0x158f: 0x4000, 0x1590: 0x4000, 0x1591: 0x4000, - 0x1592: 0x4000, 0x1593: 0x4000, 0x1594: 0x4000, 0x1595: 0x4000, 0x1596: 0x4000, 0x1597: 0x4000, - 0x1598: 0x4000, 0x1599: 0x4000, 0x159a: 0x4000, 0x159b: 0x4000, 0x159c: 0x4000, 0x159d: 0x4000, - 0x159e: 0x4000, 0x159f: 0x4000, 0x15a0: 0x4000, 0x15a1: 0x4000, 0x15a2: 0x4000, 0x15a3: 0x4000, - 0x15a4: 0x4000, 0x15a5: 0x4000, 0x15a6: 0x4000, 0x15a7: 0x4000, 0x15a8: 0x4000, 0x15a9: 0x4000, - 0x15aa: 0x4000, 0x15ab: 0x4000, 0x15ac: 0x4000, 0x15ad: 0x4000, 0x15ae: 0x4000, 0x15af: 0x4000, - 0x15b0: 0x4000, 0x15b1: 0x4000, 0x15b2: 0x4000, 0x15b3: 0x4000, 0x15b4: 0x4000, 0x15b5: 0x4000, - 0x15b6: 0x4000, 0x15b7: 0x4000, 0x15b8: 0x4000, 0x15b9: 0x4000, 0x15ba: 0x4000, 0x15bb: 0x4000, - 0x15bc: 0x4000, 0x15bd: 0x4000, 0x15be: 0x4000, 0x15bf: 0x4000, - // Block 0x57, offset 0x15c0 - 0x15c0: 0x4000, 0x15c1: 0x4000, 0x15c2: 0x4000, 0x15c3: 0x4000, 0x15c4: 0x4000, 0x15c5: 0x4000, - 0x15c6: 0x4000, 0x15c7: 0x4000, 0x15c8: 0x4000, 0x15c9: 0x4000, 0x15ca: 0x4000, 0x15cb: 0x4000, - 0x15cc: 0x4000, 0x15cd: 0x4000, 0x15ce: 0x4000, 0x15cf: 0x4000, 0x15d0: 0x4000, 0x15d1: 0x4000, - 0x15d2: 0x4000, 0x15d3: 0x4000, 0x15d4: 0x4000, 0x15d5: 0x4000, 0x15d6: 0x4000, 0x15d7: 0x4000, - 0x15d8: 0x4000, 0x15d9: 0x4000, 0x15da: 0x4000, 0x15db: 0x4000, 0x15dc: 0x4000, 0x15dd: 0x4000, - 0x15de: 0x4000, 0x15df: 0x4000, 0x15e0: 0x4000, 0x15e1: 0x4000, 0x15e2: 0x4000, 0x15e3: 0x4000, - 0x15e4: 0x4000, 0x15e5: 0x4000, 0x15e6: 0x4000, 0x15e7: 0x4000, 0x15e8: 0x4000, 0x15e9: 0x4000, - 0x15ea: 0x4000, 0x15eb: 0x4000, 0x15ec: 0x4000, 0x15ed: 0x4000, 0x15ee: 0x4000, 0x15ef: 0x4000, - 0x15f0: 0x4000, 0x15f1: 0x4000, 0x15f2: 0x4000, 0x15f3: 0x4000, 0x15f4: 0x4000, 0x15f5: 0x4000, - 0x15f6: 0x4000, 0x15f7: 0x4000, 0x15f8: 0x4000, 0x15f9: 0x4000, 0x15fa: 0x4000, 0x15fb: 0x4000, - 0x15fc: 0x4000, 0x15ff: 0x4000, - // Block 0x58, offset 0x1600 - 0x1600: 0x4000, 0x1601: 0x4000, 0x1602: 0x4000, 0x1603: 0x4000, 0x1604: 0x4000, 0x1605: 0x4000, - 0x1606: 0x4000, 0x1607: 0x4000, 0x1608: 0x4000, 0x1609: 0x4000, 0x160a: 0x4000, 0x160b: 0x4000, - 0x160c: 0x4000, 0x160d: 0x4000, 0x160e: 0x4000, 0x160f: 0x4000, 0x1610: 0x4000, 0x1611: 0x4000, - 0x1612: 0x4000, 0x1613: 0x4000, 0x1614: 0x4000, 0x1615: 0x4000, 0x1616: 0x4000, 0x1617: 0x4000, - 0x1618: 0x4000, 0x1619: 0x4000, 0x161a: 0x4000, 0x161b: 0x4000, 0x161c: 0x4000, 0x161d: 0x4000, - 0x161e: 0x4000, 0x161f: 0x4000, 0x1620: 0x4000, 0x1621: 0x4000, 0x1622: 0x4000, 0x1623: 0x4000, - 0x1624: 0x4000, 0x1625: 0x4000, 0x1626: 0x4000, 0x1627: 0x4000, 0x1628: 0x4000, 0x1629: 0x4000, - 0x162a: 0x4000, 0x162b: 0x4000, 0x162c: 0x4000, 0x162d: 0x4000, 0x162e: 0x4000, 0x162f: 0x4000, - 0x1630: 0x4000, 0x1631: 0x4000, 0x1632: 0x4000, 0x1633: 0x4000, 0x1634: 0x4000, 0x1635: 0x4000, - 0x1636: 0x4000, 0x1637: 0x4000, 0x1638: 0x4000, 0x1639: 0x4000, 0x163a: 0x4000, 0x163b: 0x4000, - 0x163c: 0x4000, 0x163d: 0x4000, - // Block 0x59, offset 0x1640 - 0x164b: 0x4000, - 0x164c: 0x4000, 0x164d: 0x4000, 0x164e: 0x4000, 0x1650: 0x4000, 0x1651: 0x4000, - 0x1652: 0x4000, 0x1653: 0x4000, 0x1654: 0x4000, 0x1655: 0x4000, 0x1656: 0x4000, 0x1657: 0x4000, - 0x1658: 0x4000, 0x1659: 0x4000, 0x165a: 0x4000, 0x165b: 0x4000, 0x165c: 0x4000, 0x165d: 0x4000, - 0x165e: 0x4000, 0x165f: 0x4000, 0x1660: 0x4000, 0x1661: 0x4000, 0x1662: 0x4000, 0x1663: 0x4000, - 0x1664: 0x4000, 0x1665: 0x4000, 0x1666: 0x4000, 0x1667: 0x4000, - 0x167a: 0x4000, - // Block 0x5a, offset 0x1680 - 0x1695: 0x4000, 0x1696: 0x4000, - 0x16a4: 0x4000, - // Block 0x5b, offset 0x16c0 - 0x16fb: 0x4000, - 0x16fc: 0x4000, 0x16fd: 0x4000, 0x16fe: 0x4000, 0x16ff: 0x4000, - // Block 0x5c, offset 0x1700 - 0x1700: 0x4000, 0x1701: 0x4000, 0x1702: 0x4000, 0x1703: 0x4000, 0x1704: 0x4000, 0x1705: 0x4000, - 0x1706: 0x4000, 0x1707: 0x4000, 0x1708: 0x4000, 0x1709: 0x4000, 0x170a: 0x4000, 0x170b: 0x4000, - 0x170c: 0x4000, 0x170d: 0x4000, 0x170e: 0x4000, 0x170f: 0x4000, - // Block 0x5d, offset 0x1740 - 0x1740: 0x4000, 0x1741: 0x4000, 0x1742: 0x4000, 0x1743: 0x4000, 0x1744: 0x4000, 0x1745: 0x4000, - 0x174c: 0x4000, 0x1750: 0x4000, 0x1751: 0x4000, - 0x1752: 0x4000, - 0x176b: 0x4000, 0x176c: 0x4000, - 0x1774: 0x4000, 0x1775: 0x4000, - 0x1776: 0x4000, - // Block 0x5e, offset 0x1780 - 0x1790: 0x4000, 0x1791: 0x4000, - 0x1792: 0x4000, 0x1793: 0x4000, 0x1794: 0x4000, 0x1795: 0x4000, 0x1796: 0x4000, 0x1797: 0x4000, - 0x1798: 0x4000, 0x1799: 0x4000, 0x179a: 0x4000, 0x179b: 0x4000, 0x179c: 0x4000, 0x179d: 0x4000, - 0x179e: 0x4000, 0x17a0: 0x4000, 0x17a1: 0x4000, 0x17a2: 0x4000, 0x17a3: 0x4000, - 0x17a4: 0x4000, 0x17a5: 0x4000, 0x17a6: 0x4000, 0x17a7: 0x4000, - 0x17b0: 0x4000, 0x17b3: 0x4000, 0x17b4: 0x4000, 0x17b5: 0x4000, - 0x17b6: 0x4000, 0x17b7: 0x4000, 0x17b8: 0x4000, 0x17b9: 0x4000, 0x17ba: 0x4000, 0x17bb: 0x4000, - 0x17bc: 0x4000, 0x17bd: 0x4000, 0x17be: 0x4000, - // Block 0x5f, offset 0x17c0 - 0x17c0: 0x4000, 0x17c1: 0x4000, 0x17c2: 0x4000, 0x17c3: 0x4000, 0x17c4: 0x4000, 0x17c5: 0x4000, - 0x17c6: 0x4000, 0x17c7: 0x4000, 0x17c8: 0x4000, 0x17c9: 0x4000, 0x17ca: 0x4000, 0x17cb: 0x4000, - 0x17d0: 0x4000, 0x17d1: 0x4000, - 0x17d2: 0x4000, 0x17d3: 0x4000, 0x17d4: 0x4000, 0x17d5: 0x4000, 0x17d6: 0x4000, 0x17d7: 0x4000, - 0x17d8: 0x4000, 0x17d9: 0x4000, 0x17da: 0x4000, 0x17db: 0x4000, 0x17dc: 0x4000, 0x17dd: 0x4000, - 0x17de: 0x4000, - // Block 0x60, offset 0x1800 - 0x1800: 0x4000, 0x1801: 0x4000, 0x1802: 0x4000, 0x1803: 0x4000, 0x1804: 0x4000, 0x1805: 0x4000, - 0x1806: 0x4000, 0x1807: 0x4000, 0x1808: 0x4000, 0x1809: 0x4000, 0x180a: 0x4000, 0x180b: 0x4000, - 0x180c: 0x4000, 0x180d: 0x4000, 0x180e: 0x4000, 0x180f: 0x4000, 0x1810: 0x4000, 0x1811: 0x4000, - // Block 0x61, offset 0x1840 - 0x1840: 0x4000, - // Block 0x62, offset 0x1880 - 0x1880: 0x2000, 0x1881: 0x2000, 0x1882: 0x2000, 0x1883: 0x2000, 0x1884: 0x2000, 0x1885: 0x2000, - 0x1886: 0x2000, 0x1887: 0x2000, 0x1888: 0x2000, 0x1889: 0x2000, 0x188a: 0x2000, 0x188b: 0x2000, - 0x188c: 0x2000, 0x188d: 0x2000, 0x188e: 0x2000, 0x188f: 0x2000, 0x1890: 0x2000, 0x1891: 0x2000, - 0x1892: 0x2000, 0x1893: 0x2000, 0x1894: 0x2000, 0x1895: 0x2000, 0x1896: 0x2000, 0x1897: 0x2000, - 0x1898: 0x2000, 0x1899: 0x2000, 0x189a: 0x2000, 0x189b: 0x2000, 0x189c: 0x2000, 0x189d: 0x2000, - 0x189e: 0x2000, 0x189f: 0x2000, 0x18a0: 0x2000, 0x18a1: 0x2000, 0x18a2: 0x2000, 0x18a3: 0x2000, - 0x18a4: 0x2000, 0x18a5: 0x2000, 0x18a6: 0x2000, 0x18a7: 0x2000, 0x18a8: 0x2000, 0x18a9: 0x2000, - 0x18aa: 0x2000, 0x18ab: 0x2000, 0x18ac: 0x2000, 0x18ad: 0x2000, 0x18ae: 0x2000, 0x18af: 0x2000, - 0x18b0: 0x2000, 0x18b1: 0x2000, 0x18b2: 0x2000, 0x18b3: 0x2000, 0x18b4: 0x2000, 0x18b5: 0x2000, - 0x18b6: 0x2000, 0x18b7: 0x2000, 0x18b8: 0x2000, 0x18b9: 0x2000, 0x18ba: 0x2000, 0x18bb: 0x2000, - 0x18bc: 0x2000, 0x18bd: 0x2000, -} - -// widthIndex: 22 blocks, 1408 entries, 1408 bytes -// Block 0 is the zero block. -var widthIndex = [1408]uint8{ - // Block 0x0, offset 0x0 - // Block 0x1, offset 0x40 - // Block 0x2, offset 0x80 - // Block 0x3, offset 0xc0 - 0xc2: 0x01, 0xc3: 0x02, 0xc4: 0x03, 0xc5: 0x04, 0xc7: 0x05, - 0xc9: 0x06, 0xcb: 0x07, 0xcc: 0x08, 0xcd: 0x09, 0xce: 0x0a, 0xcf: 0x0b, - 0xd0: 0x0c, 0xd1: 0x0d, - 0xe1: 0x02, 0xe2: 0x03, 0xe3: 0x04, 0xe4: 0x05, 0xe5: 0x06, 0xe6: 0x06, 0xe7: 0x06, - 0xe8: 0x06, 0xe9: 0x06, 0xea: 0x07, 0xeb: 0x06, 0xec: 0x06, 0xed: 0x08, 0xee: 0x09, 0xef: 0x0a, - 0xf0: 0x0f, 0xf3: 0x12, 0xf4: 0x13, - // Block 0x4, offset 0x100 - 0x104: 0x0e, 0x105: 0x0f, - // Block 0x5, offset 0x140 - 0x140: 0x10, 0x141: 0x11, 0x142: 0x12, 0x144: 0x13, 0x145: 0x14, 0x146: 0x15, 0x147: 0x16, - 0x148: 0x17, 0x149: 0x18, 0x14a: 0x19, 0x14c: 0x1a, 0x14f: 0x1b, - 0x151: 0x1c, 0x152: 0x08, 0x153: 0x1d, 0x154: 0x1e, 0x155: 0x1f, 0x156: 0x20, 0x157: 0x21, - 0x158: 0x22, 0x159: 0x23, 0x15a: 0x24, 0x15b: 0x25, 0x15c: 0x26, 0x15d: 0x27, 0x15e: 0x28, 0x15f: 0x29, - 0x166: 0x2a, - 0x16c: 0x2b, 0x16d: 0x2c, - 0x17a: 0x2d, 0x17b: 0x2e, 0x17c: 0x0e, 0x17d: 0x0e, 0x17e: 0x0e, 0x17f: 0x2f, - // Block 0x6, offset 0x180 - 0x180: 0x30, 0x181: 0x31, 0x182: 0x32, 0x183: 0x33, 0x184: 0x34, 0x185: 0x35, 0x186: 0x36, 0x187: 0x37, - 0x188: 0x38, 0x189: 0x39, 0x18a: 0x0e, 0x18b: 0x3a, 0x18c: 0x0e, 0x18d: 0x0e, 0x18e: 0x0e, 0x18f: 0x0e, - 0x190: 0x0e, 0x191: 0x0e, 0x192: 0x0e, 0x193: 0x0e, 0x194: 0x0e, 0x195: 0x0e, 0x196: 0x0e, 0x197: 0x0e, - 0x198: 0x0e, 0x199: 0x0e, 0x19a: 0x0e, 0x19b: 0x0e, 0x19c: 0x0e, 0x19d: 0x0e, 0x19e: 0x0e, 0x19f: 0x0e, - 0x1a0: 0x0e, 0x1a1: 0x0e, 0x1a2: 0x0e, 0x1a3: 0x0e, 0x1a4: 0x0e, 0x1a5: 0x0e, 0x1a6: 0x0e, 0x1a7: 0x0e, - 0x1a8: 0x0e, 0x1a9: 0x0e, 0x1aa: 0x0e, 0x1ab: 0x0e, 0x1ac: 0x0e, 0x1ad: 0x0e, 0x1ae: 0x0e, 0x1af: 0x0e, - 0x1b0: 0x0e, 0x1b1: 0x0e, 0x1b2: 0x0e, 0x1b3: 0x0e, 0x1b4: 0x0e, 0x1b5: 0x0e, 0x1b6: 0x0e, 0x1b7: 0x0e, - 0x1b8: 0x0e, 0x1b9: 0x0e, 0x1ba: 0x0e, 0x1bb: 0x0e, 0x1bc: 0x0e, 0x1bd: 0x0e, 0x1be: 0x0e, 0x1bf: 0x0e, - // Block 0x7, offset 0x1c0 - 0x1c0: 0x0e, 0x1c1: 0x0e, 0x1c2: 0x0e, 0x1c3: 0x0e, 0x1c4: 0x0e, 0x1c5: 0x0e, 0x1c6: 0x0e, 0x1c7: 0x0e, - 0x1c8: 0x0e, 0x1c9: 0x0e, 0x1ca: 0x0e, 0x1cb: 0x0e, 0x1cc: 0x0e, 0x1cd: 0x0e, 0x1ce: 0x0e, 0x1cf: 0x0e, - 0x1d0: 0x0e, 0x1d1: 0x0e, 0x1d2: 0x0e, 0x1d3: 0x0e, 0x1d4: 0x0e, 0x1d5: 0x0e, 0x1d6: 0x0e, 0x1d7: 0x0e, - 0x1d8: 0x0e, 0x1d9: 0x0e, 0x1da: 0x0e, 0x1db: 0x0e, 0x1dc: 0x0e, 0x1dd: 0x0e, 0x1de: 0x0e, 0x1df: 0x0e, - 0x1e0: 0x0e, 0x1e1: 0x0e, 0x1e2: 0x0e, 0x1e3: 0x0e, 0x1e4: 0x0e, 0x1e5: 0x0e, 0x1e6: 0x0e, 0x1e7: 0x0e, - 0x1e8: 0x0e, 0x1e9: 0x0e, 0x1ea: 0x0e, 0x1eb: 0x0e, 0x1ec: 0x0e, 0x1ed: 0x0e, 0x1ee: 0x0e, 0x1ef: 0x0e, - 0x1f0: 0x0e, 0x1f1: 0x0e, 0x1f2: 0x0e, 0x1f3: 0x0e, 0x1f4: 0x0e, 0x1f5: 0x0e, 0x1f6: 0x0e, - 0x1f8: 0x0e, 0x1f9: 0x0e, 0x1fa: 0x0e, 0x1fb: 0x0e, 0x1fc: 0x0e, 0x1fd: 0x0e, 0x1fe: 0x0e, 0x1ff: 0x0e, - // Block 0x8, offset 0x200 - 0x200: 0x0e, 0x201: 0x0e, 0x202: 0x0e, 0x203: 0x0e, 0x204: 0x0e, 0x205: 0x0e, 0x206: 0x0e, 0x207: 0x0e, - 0x208: 0x0e, 0x209: 0x0e, 0x20a: 0x0e, 0x20b: 0x0e, 0x20c: 0x0e, 0x20d: 0x0e, 0x20e: 0x0e, 0x20f: 0x0e, - 0x210: 0x0e, 0x211: 0x0e, 0x212: 0x0e, 0x213: 0x0e, 0x214: 0x0e, 0x215: 0x0e, 0x216: 0x0e, 0x217: 0x0e, - 0x218: 0x0e, 0x219: 0x0e, 0x21a: 0x0e, 0x21b: 0x0e, 0x21c: 0x0e, 0x21d: 0x0e, 0x21e: 0x0e, 0x21f: 0x0e, - 0x220: 0x0e, 0x221: 0x0e, 0x222: 0x0e, 0x223: 0x0e, 0x224: 0x0e, 0x225: 0x0e, 0x226: 0x0e, 0x227: 0x0e, - 0x228: 0x0e, 0x229: 0x0e, 0x22a: 0x0e, 0x22b: 0x0e, 0x22c: 0x0e, 0x22d: 0x0e, 0x22e: 0x0e, 0x22f: 0x0e, - 0x230: 0x0e, 0x231: 0x0e, 0x232: 0x0e, 0x233: 0x0e, 0x234: 0x0e, 0x235: 0x0e, 0x236: 0x0e, 0x237: 0x0e, - 0x238: 0x0e, 0x239: 0x0e, 0x23a: 0x0e, 0x23b: 0x0e, 0x23c: 0x0e, 0x23d: 0x0e, 0x23e: 0x0e, 0x23f: 0x0e, - // Block 0x9, offset 0x240 - 0x240: 0x0e, 0x241: 0x0e, 0x242: 0x0e, 0x243: 0x0e, 0x244: 0x0e, 0x245: 0x0e, 0x246: 0x0e, 0x247: 0x0e, - 0x248: 0x0e, 0x249: 0x0e, 0x24a: 0x0e, 0x24b: 0x0e, 0x24c: 0x0e, 0x24d: 0x0e, 0x24e: 0x0e, 0x24f: 0x0e, - 0x250: 0x0e, 0x251: 0x0e, 0x252: 0x3b, 0x253: 0x3c, - 0x265: 0x3d, - 0x270: 0x0e, 0x271: 0x0e, 0x272: 0x0e, 0x273: 0x0e, 0x274: 0x0e, 0x275: 0x0e, 0x276: 0x0e, 0x277: 0x0e, - 0x278: 0x0e, 0x279: 0x0e, 0x27a: 0x0e, 0x27b: 0x0e, 0x27c: 0x0e, 0x27d: 0x0e, 0x27e: 0x0e, 0x27f: 0x0e, - // Block 0xa, offset 0x280 - 0x280: 0x0e, 0x281: 0x0e, 0x282: 0x0e, 0x283: 0x0e, 0x284: 0x0e, 0x285: 0x0e, 0x286: 0x0e, 0x287: 0x0e, - 0x288: 0x0e, 0x289: 0x0e, 0x28a: 0x0e, 0x28b: 0x0e, 0x28c: 0x0e, 0x28d: 0x0e, 0x28e: 0x0e, 0x28f: 0x0e, - 0x290: 0x0e, 0x291: 0x0e, 0x292: 0x0e, 0x293: 0x0e, 0x294: 0x0e, 0x295: 0x0e, 0x296: 0x0e, 0x297: 0x0e, - 0x298: 0x0e, 0x299: 0x0e, 0x29a: 0x0e, 0x29b: 0x0e, 0x29c: 0x0e, 0x29d: 0x0e, 0x29e: 0x3e, - // Block 0xb, offset 0x2c0 - 0x2c0: 0x08, 0x2c1: 0x08, 0x2c2: 0x08, 0x2c3: 0x08, 0x2c4: 0x08, 0x2c5: 0x08, 0x2c6: 0x08, 0x2c7: 0x08, - 0x2c8: 0x08, 0x2c9: 0x08, 0x2ca: 0x08, 0x2cb: 0x08, 0x2cc: 0x08, 0x2cd: 0x08, 0x2ce: 0x08, 0x2cf: 0x08, - 0x2d0: 0x08, 0x2d1: 0x08, 0x2d2: 0x08, 0x2d3: 0x08, 0x2d4: 0x08, 0x2d5: 0x08, 0x2d6: 0x08, 0x2d7: 0x08, - 0x2d8: 0x08, 0x2d9: 0x08, 0x2da: 0x08, 0x2db: 0x08, 0x2dc: 0x08, 0x2dd: 0x08, 0x2de: 0x08, 0x2df: 0x08, - 0x2e0: 0x08, 0x2e1: 0x08, 0x2e2: 0x08, 0x2e3: 0x08, 0x2e4: 0x08, 0x2e5: 0x08, 0x2e6: 0x08, 0x2e7: 0x08, - 0x2e8: 0x08, 0x2e9: 0x08, 0x2ea: 0x08, 0x2eb: 0x08, 0x2ec: 0x08, 0x2ed: 0x08, 0x2ee: 0x08, 0x2ef: 0x08, - 0x2f0: 0x08, 0x2f1: 0x08, 0x2f2: 0x08, 0x2f3: 0x08, 0x2f4: 0x08, 0x2f5: 0x08, 0x2f6: 0x08, 0x2f7: 0x08, - 0x2f8: 0x08, 0x2f9: 0x08, 0x2fa: 0x08, 0x2fb: 0x08, 0x2fc: 0x08, 0x2fd: 0x08, 0x2fe: 0x08, 0x2ff: 0x08, - // Block 0xc, offset 0x300 - 0x300: 0x08, 0x301: 0x08, 0x302: 0x08, 0x303: 0x08, 0x304: 0x08, 0x305: 0x08, 0x306: 0x08, 0x307: 0x08, - 0x308: 0x08, 0x309: 0x08, 0x30a: 0x08, 0x30b: 0x08, 0x30c: 0x08, 0x30d: 0x08, 0x30e: 0x08, 0x30f: 0x08, - 0x310: 0x08, 0x311: 0x08, 0x312: 0x08, 0x313: 0x08, 0x314: 0x08, 0x315: 0x08, 0x316: 0x08, 0x317: 0x08, - 0x318: 0x08, 0x319: 0x08, 0x31a: 0x08, 0x31b: 0x08, 0x31c: 0x08, 0x31d: 0x08, 0x31e: 0x08, 0x31f: 0x08, - 0x320: 0x08, 0x321: 0x08, 0x322: 0x08, 0x323: 0x08, 0x324: 0x0e, 0x325: 0x0e, 0x326: 0x0e, 0x327: 0x0e, - 0x328: 0x0e, 0x329: 0x0e, 0x32a: 0x0e, 0x32b: 0x0e, - 0x338: 0x3f, 0x339: 0x40, 0x33c: 0x41, 0x33d: 0x42, 0x33e: 0x43, 0x33f: 0x44, - // Block 0xd, offset 0x340 - 0x37f: 0x45, - // Block 0xe, offset 0x380 - 0x380: 0x0e, 0x381: 0x0e, 0x382: 0x0e, 0x383: 0x0e, 0x384: 0x0e, 0x385: 0x0e, 0x386: 0x0e, 0x387: 0x0e, - 0x388: 0x0e, 0x389: 0x0e, 0x38a: 0x0e, 0x38b: 0x0e, 0x38c: 0x0e, 0x38d: 0x0e, 0x38e: 0x0e, 0x38f: 0x0e, - 0x390: 0x0e, 0x391: 0x0e, 0x392: 0x0e, 0x393: 0x0e, 0x394: 0x0e, 0x395: 0x0e, 0x396: 0x0e, 0x397: 0x0e, - 0x398: 0x0e, 0x399: 0x0e, 0x39a: 0x0e, 0x39b: 0x0e, 0x39c: 0x0e, 0x39d: 0x0e, 0x39e: 0x0e, 0x39f: 0x46, - 0x3a0: 0x0e, 0x3a1: 0x0e, 0x3a2: 0x0e, 0x3a3: 0x0e, 0x3a4: 0x0e, 0x3a5: 0x0e, 0x3a6: 0x0e, 0x3a7: 0x0e, - 0x3a8: 0x0e, 0x3a9: 0x0e, 0x3aa: 0x0e, 0x3ab: 0x47, - // Block 0xf, offset 0x3c0 - 0x3c0: 0x48, - // Block 0x10, offset 0x400 - 0x400: 0x49, 0x403: 0x4a, 0x404: 0x4b, 0x405: 0x4c, 0x406: 0x4d, - 0x408: 0x4e, 0x409: 0x4f, 0x40c: 0x50, 0x40d: 0x51, 0x40e: 0x52, 0x40f: 0x53, - 0x410: 0x3a, 0x411: 0x54, 0x412: 0x0e, 0x413: 0x55, 0x414: 0x56, 0x415: 0x57, 0x416: 0x58, 0x417: 0x59, - 0x418: 0x0e, 0x419: 0x5a, 0x41a: 0x0e, 0x41b: 0x5b, - 0x424: 0x5c, 0x425: 0x5d, 0x426: 0x5e, 0x427: 0x5f, - // Block 0x11, offset 0x440 - 0x456: 0x0b, 0x457: 0x06, - 0x458: 0x0c, 0x45b: 0x0d, 0x45f: 0x0e, - 0x460: 0x06, 0x461: 0x06, 0x462: 0x06, 0x463: 0x06, 0x464: 0x06, 0x465: 0x06, 0x466: 0x06, 0x467: 0x06, - 0x468: 0x06, 0x469: 0x06, 0x46a: 0x06, 0x46b: 0x06, 0x46c: 0x06, 0x46d: 0x06, 0x46e: 0x06, 0x46f: 0x06, - 0x470: 0x06, 0x471: 0x06, 0x472: 0x06, 0x473: 0x06, 0x474: 0x06, 0x475: 0x06, 0x476: 0x06, 0x477: 0x06, - 0x478: 0x06, 0x479: 0x06, 0x47a: 0x06, 0x47b: 0x06, 0x47c: 0x06, 0x47d: 0x06, 0x47e: 0x06, 0x47f: 0x06, - // Block 0x12, offset 0x480 - 0x484: 0x08, 0x485: 0x08, 0x486: 0x08, 0x487: 0x09, - // Block 0x13, offset 0x4c0 - 0x4c0: 0x08, 0x4c1: 0x08, 0x4c2: 0x08, 0x4c3: 0x08, 0x4c4: 0x08, 0x4c5: 0x08, 0x4c6: 0x08, 0x4c7: 0x08, - 0x4c8: 0x08, 0x4c9: 0x08, 0x4ca: 0x08, 0x4cb: 0x08, 0x4cc: 0x08, 0x4cd: 0x08, 0x4ce: 0x08, 0x4cf: 0x08, - 0x4d0: 0x08, 0x4d1: 0x08, 0x4d2: 0x08, 0x4d3: 0x08, 0x4d4: 0x08, 0x4d5: 0x08, 0x4d6: 0x08, 0x4d7: 0x08, - 0x4d8: 0x08, 0x4d9: 0x08, 0x4da: 0x08, 0x4db: 0x08, 0x4dc: 0x08, 0x4dd: 0x08, 0x4de: 0x08, 0x4df: 0x08, - 0x4e0: 0x08, 0x4e1: 0x08, 0x4e2: 0x08, 0x4e3: 0x08, 0x4e4: 0x08, 0x4e5: 0x08, 0x4e6: 0x08, 0x4e7: 0x08, - 0x4e8: 0x08, 0x4e9: 0x08, 0x4ea: 0x08, 0x4eb: 0x08, 0x4ec: 0x08, 0x4ed: 0x08, 0x4ee: 0x08, 0x4ef: 0x08, - 0x4f0: 0x08, 0x4f1: 0x08, 0x4f2: 0x08, 0x4f3: 0x08, 0x4f4: 0x08, 0x4f5: 0x08, 0x4f6: 0x08, 0x4f7: 0x08, - 0x4f8: 0x08, 0x4f9: 0x08, 0x4fa: 0x08, 0x4fb: 0x08, 0x4fc: 0x08, 0x4fd: 0x08, 0x4fe: 0x08, 0x4ff: 0x60, - // Block 0x14, offset 0x500 - 0x520: 0x10, - 0x530: 0x09, 0x531: 0x09, 0x532: 0x09, 0x533: 0x09, 0x534: 0x09, 0x535: 0x09, 0x536: 0x09, 0x537: 0x09, - 0x538: 0x09, 0x539: 0x09, 0x53a: 0x09, 0x53b: 0x09, 0x53c: 0x09, 0x53d: 0x09, 0x53e: 0x09, 0x53f: 0x11, - // Block 0x15, offset 0x540 - 0x540: 0x09, 0x541: 0x09, 0x542: 0x09, 0x543: 0x09, 0x544: 0x09, 0x545: 0x09, 0x546: 0x09, 0x547: 0x09, - 0x548: 0x09, 0x549: 0x09, 0x54a: 0x09, 0x54b: 0x09, 0x54c: 0x09, 0x54d: 0x09, 0x54e: 0x09, 0x54f: 0x11, -} - -// inverseData contains 4-byte entries of the following format: -// -// <0 padding> -// -// The last byte of the UTF-8-encoded rune is xor-ed with the last byte of the -// UTF-8 encoding of the original rune. Mappings often have the following -// pattern: -// -// A -> A (U+FF21 -> U+0041) -// B -> B (U+FF22 -> U+0042) -// ... -// -// By xor-ing the last byte the same entry can be shared by many mappings. This -// reduces the total number of distinct entries by about two thirds. -// The resulting entry for the aforementioned mappings is -// -// { 0x01, 0xE0, 0x00, 0x00 } -// -// Using this entry to map U+FF21 (UTF-8 [EF BC A1]), we get -// -// E0 ^ A1 = 41. -// -// Similarly, for U+FF22 (UTF-8 [EF BC A2]), we get -// -// E0 ^ A2 = 42. -// -// Note that because of the xor-ing, the byte sequence stored in the entry is -// not valid UTF-8. -var inverseData = [150][4]byte{ - {0x00, 0x00, 0x00, 0x00}, - {0x03, 0xe3, 0x80, 0xa0}, - {0x03, 0xef, 0xbc, 0xa0}, - {0x03, 0xef, 0xbc, 0xe0}, - {0x03, 0xef, 0xbd, 0xe0}, - {0x03, 0xef, 0xbf, 0x02}, - {0x03, 0xef, 0xbf, 0x00}, - {0x03, 0xef, 0xbf, 0x0e}, - {0x03, 0xef, 0xbf, 0x0c}, - {0x03, 0xef, 0xbf, 0x0f}, - {0x03, 0xef, 0xbf, 0x39}, - {0x03, 0xef, 0xbf, 0x3b}, - {0x03, 0xef, 0xbf, 0x3f}, - {0x03, 0xef, 0xbf, 0x2a}, - {0x03, 0xef, 0xbf, 0x0d}, - {0x03, 0xef, 0xbf, 0x25}, - {0x03, 0xef, 0xbd, 0x1a}, - {0x03, 0xef, 0xbd, 0x26}, - {0x01, 0xa0, 0x00, 0x00}, - {0x03, 0xef, 0xbd, 0x25}, - {0x03, 0xef, 0xbd, 0x23}, - {0x03, 0xef, 0xbd, 0x2e}, - {0x03, 0xef, 0xbe, 0x07}, - {0x03, 0xef, 0xbe, 0x05}, - {0x03, 0xef, 0xbd, 0x06}, - {0x03, 0xef, 0xbd, 0x13}, - {0x03, 0xef, 0xbd, 0x0b}, - {0x03, 0xef, 0xbd, 0x16}, - {0x03, 0xef, 0xbd, 0x0c}, - {0x03, 0xef, 0xbd, 0x15}, - {0x03, 0xef, 0xbd, 0x0d}, - {0x03, 0xef, 0xbd, 0x1c}, - {0x03, 0xef, 0xbd, 0x02}, - {0x03, 0xef, 0xbd, 0x1f}, - {0x03, 0xef, 0xbd, 0x1d}, - {0x03, 0xef, 0xbd, 0x17}, - {0x03, 0xef, 0xbd, 0x08}, - {0x03, 0xef, 0xbd, 0x09}, - {0x03, 0xef, 0xbd, 0x0e}, - {0x03, 0xef, 0xbd, 0x04}, - {0x03, 0xef, 0xbd, 0x05}, - {0x03, 0xef, 0xbe, 0x3f}, - {0x03, 0xef, 0xbe, 0x00}, - {0x03, 0xef, 0xbd, 0x2c}, - {0x03, 0xef, 0xbe, 0x06}, - {0x03, 0xef, 0xbe, 0x0c}, - {0x03, 0xef, 0xbe, 0x0f}, - {0x03, 0xef, 0xbe, 0x0d}, - {0x03, 0xef, 0xbe, 0x0b}, - {0x03, 0xef, 0xbe, 0x19}, - {0x03, 0xef, 0xbe, 0x15}, - {0x03, 0xef, 0xbe, 0x11}, - {0x03, 0xef, 0xbe, 0x31}, - {0x03, 0xef, 0xbe, 0x33}, - {0x03, 0xef, 0xbd, 0x0f}, - {0x03, 0xef, 0xbe, 0x30}, - {0x03, 0xef, 0xbe, 0x3e}, - {0x03, 0xef, 0xbe, 0x32}, - {0x03, 0xef, 0xbe, 0x36}, - {0x03, 0xef, 0xbd, 0x14}, - {0x03, 0xef, 0xbe, 0x2e}, - {0x03, 0xef, 0xbd, 0x1e}, - {0x03, 0xef, 0xbe, 0x10}, - {0x03, 0xef, 0xbf, 0x13}, - {0x03, 0xef, 0xbf, 0x15}, - {0x03, 0xef, 0xbf, 0x17}, - {0x03, 0xef, 0xbf, 0x1f}, - {0x03, 0xef, 0xbf, 0x1d}, - {0x03, 0xef, 0xbf, 0x1b}, - {0x03, 0xef, 0xbf, 0x09}, - {0x03, 0xef, 0xbf, 0x0b}, - {0x03, 0xef, 0xbf, 0x37}, - {0x03, 0xef, 0xbe, 0x04}, - {0x01, 0xe0, 0x00, 0x00}, - {0x03, 0xe2, 0xa6, 0x1a}, - {0x03, 0xe2, 0xa6, 0x26}, - {0x03, 0xe3, 0x80, 0x23}, - {0x03, 0xe3, 0x80, 0x2e}, - {0x03, 0xe3, 0x80, 0x25}, - {0x03, 0xe3, 0x83, 0x1e}, - {0x03, 0xe3, 0x83, 0x14}, - {0x03, 0xe3, 0x82, 0x06}, - {0x03, 0xe3, 0x82, 0x0b}, - {0x03, 0xe3, 0x82, 0x0c}, - {0x03, 0xe3, 0x82, 0x0d}, - {0x03, 0xe3, 0x82, 0x02}, - {0x03, 0xe3, 0x83, 0x0f}, - {0x03, 0xe3, 0x83, 0x08}, - {0x03, 0xe3, 0x83, 0x09}, - {0x03, 0xe3, 0x83, 0x2c}, - {0x03, 0xe3, 0x83, 0x0c}, - {0x03, 0xe3, 0x82, 0x13}, - {0x03, 0xe3, 0x82, 0x16}, - {0x03, 0xe3, 0x82, 0x15}, - {0x03, 0xe3, 0x82, 0x1c}, - {0x03, 0xe3, 0x82, 0x1f}, - {0x03, 0xe3, 0x82, 0x1d}, - {0x03, 0xe3, 0x82, 0x1a}, - {0x03, 0xe3, 0x82, 0x17}, - {0x03, 0xe3, 0x82, 0x08}, - {0x03, 0xe3, 0x82, 0x09}, - {0x03, 0xe3, 0x82, 0x0e}, - {0x03, 0xe3, 0x82, 0x04}, - {0x03, 0xe3, 0x82, 0x05}, - {0x03, 0xe3, 0x82, 0x3f}, - {0x03, 0xe3, 0x83, 0x00}, - {0x03, 0xe3, 0x83, 0x06}, - {0x03, 0xe3, 0x83, 0x05}, - {0x03, 0xe3, 0x83, 0x0d}, - {0x03, 0xe3, 0x83, 0x0b}, - {0x03, 0xe3, 0x83, 0x07}, - {0x03, 0xe3, 0x83, 0x19}, - {0x03, 0xe3, 0x83, 0x15}, - {0x03, 0xe3, 0x83, 0x11}, - {0x03, 0xe3, 0x83, 0x31}, - {0x03, 0xe3, 0x83, 0x33}, - {0x03, 0xe3, 0x83, 0x30}, - {0x03, 0xe3, 0x83, 0x3e}, - {0x03, 0xe3, 0x83, 0x32}, - {0x03, 0xe3, 0x83, 0x36}, - {0x03, 0xe3, 0x83, 0x2e}, - {0x03, 0xe3, 0x82, 0x07}, - {0x03, 0xe3, 0x85, 0x04}, - {0x03, 0xe3, 0x84, 0x10}, - {0x03, 0xe3, 0x85, 0x30}, - {0x03, 0xe3, 0x85, 0x0d}, - {0x03, 0xe3, 0x85, 0x13}, - {0x03, 0xe3, 0x85, 0x15}, - {0x03, 0xe3, 0x85, 0x17}, - {0x03, 0xe3, 0x85, 0x1f}, - {0x03, 0xe3, 0x85, 0x1d}, - {0x03, 0xe3, 0x85, 0x1b}, - {0x03, 0xe3, 0x85, 0x09}, - {0x03, 0xe3, 0x85, 0x0f}, - {0x03, 0xe3, 0x85, 0x0b}, - {0x03, 0xe3, 0x85, 0x37}, - {0x03, 0xe3, 0x85, 0x3b}, - {0x03, 0xe3, 0x85, 0x39}, - {0x03, 0xe3, 0x85, 0x3f}, - {0x02, 0xc2, 0x02, 0x00}, - {0x02, 0xc2, 0x0e, 0x00}, - {0x02, 0xc2, 0x0c, 0x00}, - {0x02, 0xc2, 0x00, 0x00}, - {0x03, 0xe2, 0x82, 0x0f}, - {0x03, 0xe2, 0x94, 0x2a}, - {0x03, 0xe2, 0x86, 0x39}, - {0x03, 0xe2, 0x86, 0x3b}, - {0x03, 0xe2, 0x86, 0x3f}, - {0x03, 0xe2, 0x96, 0x0d}, - {0x03, 0xe2, 0x97, 0x25}, -} - -// Total table size 14680 bytes (14KiB) diff --git a/vendor/golang.org/x/text/width/transform.go b/vendor/golang.org/x/text/width/transform.go deleted file mode 100644 index 0049f700a..000000000 --- a/vendor/golang.org/x/text/width/transform.go +++ /dev/null @@ -1,239 +0,0 @@ -// Copyright 2015 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package width - -import ( - "unicode/utf8" - - "golang.org/x/text/transform" -) - -type foldTransform struct { - transform.NopResetter -} - -func (foldTransform) Span(src []byte, atEOF bool) (n int, err error) { - for n < len(src) { - if src[n] < utf8.RuneSelf { - // ASCII fast path. - for n++; n < len(src) && src[n] < utf8.RuneSelf; n++ { - } - continue - } - v, size := trie.lookup(src[n:]) - if size == 0 { // incomplete UTF-8 encoding - if !atEOF { - err = transform.ErrShortSrc - } else { - n = len(src) - } - break - } - if elem(v)&tagNeedsFold != 0 { - err = transform.ErrEndOfSpan - break - } - n += size - } - return n, err -} - -func (foldTransform) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) { - for nSrc < len(src) { - if src[nSrc] < utf8.RuneSelf { - // ASCII fast path. - start, end := nSrc, len(src) - if d := len(dst) - nDst; d < end-start { - end = nSrc + d - } - for nSrc++; nSrc < end && src[nSrc] < utf8.RuneSelf; nSrc++ { - } - n := copy(dst[nDst:], src[start:nSrc]) - if nDst += n; nDst == len(dst) { - nSrc = start + n - if nSrc == len(src) { - return nDst, nSrc, nil - } - if src[nSrc] < utf8.RuneSelf { - return nDst, nSrc, transform.ErrShortDst - } - } - continue - } - v, size := trie.lookup(src[nSrc:]) - if size == 0 { // incomplete UTF-8 encoding - if !atEOF { - return nDst, nSrc, transform.ErrShortSrc - } - size = 1 // gobble 1 byte - } - if elem(v)&tagNeedsFold == 0 { - if size != copy(dst[nDst:], src[nSrc:nSrc+size]) { - return nDst, nSrc, transform.ErrShortDst - } - nDst += size - } else { - data := inverseData[byte(v)] - if len(dst)-nDst < int(data[0]) { - return nDst, nSrc, transform.ErrShortDst - } - i := 1 - for end := int(data[0]); i < end; i++ { - dst[nDst] = data[i] - nDst++ - } - dst[nDst] = data[i] ^ src[nSrc+size-1] - nDst++ - } - nSrc += size - } - return nDst, nSrc, nil -} - -type narrowTransform struct { - transform.NopResetter -} - -func (narrowTransform) Span(src []byte, atEOF bool) (n int, err error) { - for n < len(src) { - if src[n] < utf8.RuneSelf { - // ASCII fast path. - for n++; n < len(src) && src[n] < utf8.RuneSelf; n++ { - } - continue - } - v, size := trie.lookup(src[n:]) - if size == 0 { // incomplete UTF-8 encoding - if !atEOF { - err = transform.ErrShortSrc - } else { - n = len(src) - } - break - } - if k := elem(v).kind(); byte(v) == 0 || k != EastAsianFullwidth && k != EastAsianWide && k != EastAsianAmbiguous { - } else { - err = transform.ErrEndOfSpan - break - } - n += size - } - return n, err -} - -func (narrowTransform) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) { - for nSrc < len(src) { - if src[nSrc] < utf8.RuneSelf { - // ASCII fast path. - start, end := nSrc, len(src) - if d := len(dst) - nDst; d < end-start { - end = nSrc + d - } - for nSrc++; nSrc < end && src[nSrc] < utf8.RuneSelf; nSrc++ { - } - n := copy(dst[nDst:], src[start:nSrc]) - if nDst += n; nDst == len(dst) { - nSrc = start + n - if nSrc == len(src) { - return nDst, nSrc, nil - } - if src[nSrc] < utf8.RuneSelf { - return nDst, nSrc, transform.ErrShortDst - } - } - continue - } - v, size := trie.lookup(src[nSrc:]) - if size == 0 { // incomplete UTF-8 encoding - if !atEOF { - return nDst, nSrc, transform.ErrShortSrc - } - size = 1 // gobble 1 byte - } - if k := elem(v).kind(); byte(v) == 0 || k != EastAsianFullwidth && k != EastAsianWide && k != EastAsianAmbiguous { - if size != copy(dst[nDst:], src[nSrc:nSrc+size]) { - return nDst, nSrc, transform.ErrShortDst - } - nDst += size - } else { - data := inverseData[byte(v)] - if len(dst)-nDst < int(data[0]) { - return nDst, nSrc, transform.ErrShortDst - } - i := 1 - for end := int(data[0]); i < end; i++ { - dst[nDst] = data[i] - nDst++ - } - dst[nDst] = data[i] ^ src[nSrc+size-1] - nDst++ - } - nSrc += size - } - return nDst, nSrc, nil -} - -type wideTransform struct { - transform.NopResetter -} - -func (wideTransform) Span(src []byte, atEOF bool) (n int, err error) { - for n < len(src) { - // TODO: Consider ASCII fast path. Special-casing ASCII handling can - // reduce the ns/op of BenchmarkWideASCII by about 30%. This is probably - // not enough to warrant the extra code and complexity. - v, size := trie.lookup(src[n:]) - if size == 0 { // incomplete UTF-8 encoding - if !atEOF { - err = transform.ErrShortSrc - } else { - n = len(src) - } - break - } - if k := elem(v).kind(); byte(v) == 0 || k != EastAsianHalfwidth && k != EastAsianNarrow { - } else { - err = transform.ErrEndOfSpan - break - } - n += size - } - return n, err -} - -func (wideTransform) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) { - for nSrc < len(src) { - // TODO: Consider ASCII fast path. Special-casing ASCII handling can - // reduce the ns/op of BenchmarkWideASCII by about 30%. This is probably - // not enough to warrant the extra code and complexity. - v, size := trie.lookup(src[nSrc:]) - if size == 0 { // incomplete UTF-8 encoding - if !atEOF { - return nDst, nSrc, transform.ErrShortSrc - } - size = 1 // gobble 1 byte - } - if k := elem(v).kind(); byte(v) == 0 || k != EastAsianHalfwidth && k != EastAsianNarrow { - if size != copy(dst[nDst:], src[nSrc:nSrc+size]) { - return nDst, nSrc, transform.ErrShortDst - } - nDst += size - } else { - data := inverseData[byte(v)] - if len(dst)-nDst < int(data[0]) { - return nDst, nSrc, transform.ErrShortDst - } - i := 1 - for end := int(data[0]); i < end; i++ { - dst[nDst] = data[i] - nDst++ - } - dst[nDst] = data[i] ^ src[nSrc+size-1] - nDst++ - } - nSrc += size - } - return nDst, nSrc, nil -} diff --git a/vendor/golang.org/x/text/width/trieval.go b/vendor/golang.org/x/text/width/trieval.go deleted file mode 100644 index ca8e45fd1..000000000 --- a/vendor/golang.org/x/text/width/trieval.go +++ /dev/null @@ -1,30 +0,0 @@ -// Code generated by running "go generate" in golang.org/x/text. DO NOT EDIT. - -package width - -// elem is an entry of the width trie. The high byte is used to encode the type -// of the rune. The low byte is used to store the index to a mapping entry in -// the inverseData array. -type elem uint16 - -const ( - tagNeutral elem = iota << typeShift - tagAmbiguous - tagWide - tagNarrow - tagFullwidth - tagHalfwidth -) - -const ( - numTypeBits = 3 - typeShift = 16 - numTypeBits - - // tagNeedsFold is true for all fullwidth and halfwidth runes except for - // the Won sign U+20A9. - tagNeedsFold = 0x1000 - - // The Korean Won sign is halfwidth, but SHOULD NOT be mapped to a wide - // variant. - wonSign rune = 0x20A9 -) diff --git a/vendor/golang.org/x/text/width/width.go b/vendor/golang.org/x/text/width/width.go deleted file mode 100644 index 29c7509be..000000000 --- a/vendor/golang.org/x/text/width/width.go +++ /dev/null @@ -1,206 +0,0 @@ -// Copyright 2015 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:generate stringer -type=Kind -//go:generate go run gen.go gen_common.go gen_trieval.go - -// Package width provides functionality for handling different widths in text. -// -// Wide characters behave like ideographs; they tend to allow line breaks after -// each character and remain upright in vertical text layout. Narrow characters -// are kept together in words or runs that are rotated sideways in vertical text -// layout. -// -// For more information, see https://unicode.org/reports/tr11/. -package width // import "golang.org/x/text/width" - -import ( - "unicode/utf8" - - "golang.org/x/text/transform" -) - -// TODO -// 1) Reduce table size by compressing blocks. -// 2) API proposition for computing display length -// (approximation, fixed pitch only). -// 3) Implement display length. - -// Kind indicates the type of width property as defined in https://unicode.org/reports/tr11/. -type Kind int - -const ( - // Neutral characters do not occur in legacy East Asian character sets. - Neutral Kind = iota - - // EastAsianAmbiguous characters that can be sometimes wide and sometimes - // narrow and require additional information not contained in the character - // code to further resolve their width. - EastAsianAmbiguous - - // EastAsianWide characters are wide in its usual form. They occur only in - // the context of East Asian typography. These runes may have explicit - // halfwidth counterparts. - EastAsianWide - - // EastAsianNarrow characters are narrow in its usual form. They often have - // fullwidth counterparts. - EastAsianNarrow - - // Note: there exist Narrow runes that do not have fullwidth or wide - // counterparts, despite what the definition says (e.g. U+27E6). - - // EastAsianFullwidth characters have a compatibility decompositions of type - // wide that map to a narrow counterpart. - EastAsianFullwidth - - // EastAsianHalfwidth characters have a compatibility decomposition of type - // narrow that map to a wide or ambiguous counterpart, plus U+20A9 ₩ WON - // SIGN. - EastAsianHalfwidth - - // Note: there exist runes that have a halfwidth counterparts but that are - // classified as Ambiguous, rather than wide (e.g. U+2190). -) - -// TODO: the generated tries need to return size 1 for invalid runes for the -// width to be computed correctly (each byte should render width 1) - -var trie = newWidthTrie(0) - -// Lookup reports the Properties of the first rune in b and the number of bytes -// of its UTF-8 encoding. -func Lookup(b []byte) (p Properties, size int) { - v, sz := trie.lookup(b) - return Properties{elem(v), b[sz-1]}, sz -} - -// LookupString reports the Properties of the first rune in s and the number of -// bytes of its UTF-8 encoding. -func LookupString(s string) (p Properties, size int) { - v, sz := trie.lookupString(s) - return Properties{elem(v), s[sz-1]}, sz -} - -// LookupRune reports the Properties of rune r. -func LookupRune(r rune) Properties { - var buf [4]byte - n := utf8.EncodeRune(buf[:], r) - v, _ := trie.lookup(buf[:n]) - last := byte(r) - if r >= utf8.RuneSelf { - last = 0x80 + byte(r&0x3f) - } - return Properties{elem(v), last} -} - -// Properties provides access to width properties of a rune. -type Properties struct { - elem elem - last byte -} - -func (e elem) kind() Kind { - return Kind(e >> typeShift) -} - -// Kind returns the Kind of a rune as defined in Unicode TR #11. -// See https://unicode.org/reports/tr11/ for more details. -func (p Properties) Kind() Kind { - return p.elem.kind() -} - -// Folded returns the folded variant of a rune or 0 if the rune is canonical. -func (p Properties) Folded() rune { - if p.elem&tagNeedsFold != 0 { - buf := inverseData[byte(p.elem)] - buf[buf[0]] ^= p.last - r, _ := utf8.DecodeRune(buf[1 : 1+buf[0]]) - return r - } - return 0 -} - -// Narrow returns the narrow variant of a rune or 0 if the rune is already -// narrow or doesn't have a narrow variant. -func (p Properties) Narrow() rune { - if k := p.elem.kind(); byte(p.elem) != 0 && (k == EastAsianFullwidth || k == EastAsianWide || k == EastAsianAmbiguous) { - buf := inverseData[byte(p.elem)] - buf[buf[0]] ^= p.last - r, _ := utf8.DecodeRune(buf[1 : 1+buf[0]]) - return r - } - return 0 -} - -// Wide returns the wide variant of a rune or 0 if the rune is already -// wide or doesn't have a wide variant. -func (p Properties) Wide() rune { - if k := p.elem.kind(); byte(p.elem) != 0 && (k == EastAsianHalfwidth || k == EastAsianNarrow) { - buf := inverseData[byte(p.elem)] - buf[buf[0]] ^= p.last - r, _ := utf8.DecodeRune(buf[1 : 1+buf[0]]) - return r - } - return 0 -} - -// TODO for Properties: -// - Add Fullwidth/Halfwidth or Inverted methods for computing variants -// mapping. -// - Add width information (including information on non-spacing runes). - -// Transformer implements the transform.Transformer interface. -type Transformer struct { - t transform.SpanningTransformer -} - -// Reset implements the transform.Transformer interface. -func (t Transformer) Reset() { t.t.Reset() } - -// Transform implements the transform.Transformer interface. -func (t Transformer) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) { - return t.t.Transform(dst, src, atEOF) -} - -// Span implements the transform.SpanningTransformer interface. -func (t Transformer) Span(src []byte, atEOF bool) (n int, err error) { - return t.t.Span(src, atEOF) -} - -// Bytes returns a new byte slice with the result of applying t to b. -func (t Transformer) Bytes(b []byte) []byte { - b, _, _ = transform.Bytes(t, b) - return b -} - -// String returns a string with the result of applying t to s. -func (t Transformer) String(s string) string { - s, _, _ = transform.String(t, s) - return s -} - -var ( - // Fold is a transform that maps all runes to their canonical width. - // - // Note that the NFKC and NFKD transforms in golang.org/x/text/unicode/norm - // provide a more generic folding mechanism. - Fold Transformer = Transformer{foldTransform{}} - - // Widen is a transform that maps runes to their wide variant, if - // available. - Widen Transformer = Transformer{wideTransform{}} - - // Narrow is a transform that maps runes to their narrow variant, if - // available. - Narrow Transformer = Transformer{narrowTransform{}} -) - -// TODO: Consider the following options: -// - Treat Ambiguous runes that have a halfwidth counterpart as wide, or some -// generalized variant of this. -// - Consider a wide Won character to be the default width (or some generalized -// variant of this). -// - Filter the set of characters that gets converted (the preferred approach is -// to allow applying filters to transforms). diff --git a/vendor/golang.org/x/tools/go/analysis/analysis.go b/vendor/golang.org/x/tools/go/analysis/analysis.go deleted file mode 100644 index 5da33c7e6..000000000 --- a/vendor/golang.org/x/tools/go/analysis/analysis.go +++ /dev/null @@ -1,227 +0,0 @@ -// Copyright 2018 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package analysis - -import ( - "flag" - "fmt" - "go/ast" - "go/token" - "go/types" - "reflect" -) - -// An Analyzer describes an analysis function and its options. -type Analyzer struct { - // The Name of the analyzer must be a valid Go identifier - // as it may appear in command-line flags, URLs, and so on. - Name string - - // Doc is the documentation for the analyzer. - // The part before the first "\n\n" is the title - // (no capital or period, max ~60 letters). - Doc string - - // URL holds an optional link to a web page with additional - // documentation for this analyzer. - URL string - - // Flags defines any flags accepted by the analyzer. - // The manner in which these flags are exposed to the user - // depends on the driver which runs the analyzer. - Flags flag.FlagSet - - // Run applies the analyzer to a package. - // It returns an error if the analyzer failed. - // - // On success, the Run function may return a result - // computed by the Analyzer; its type must match ResultType. - // The driver makes this result available as an input to - // another Analyzer that depends directly on this one (see - // Requires) when it analyzes the same package. - // - // To pass analysis results between packages (and thus - // potentially between address spaces), use Facts, which are - // serializable. - Run func(*Pass) (interface{}, error) - - // RunDespiteErrors allows the driver to invoke - // the Run method of this analyzer even on a - // package that contains parse or type errors. - // The Pass.TypeErrors field may consequently be non-empty. - RunDespiteErrors bool - - // Requires is a set of analyzers that must run successfully - // before this one on a given package. This analyzer may inspect - // the outputs produced by each analyzer in Requires. - // The graph over analyzers implied by Requires edges must be acyclic. - // - // Requires establishes a "horizontal" dependency between - // analysis passes (different analyzers, same package). - Requires []*Analyzer - - // ResultType is the type of the optional result of the Run function. - ResultType reflect.Type - - // FactTypes indicates that this analyzer imports and exports - // Facts of the specified concrete types. - // An analyzer that uses facts may assume that its import - // dependencies have been similarly analyzed before it runs. - // Facts must be pointers. - // - // FactTypes establishes a "vertical" dependency between - // analysis passes (same analyzer, different packages). - FactTypes []Fact -} - -func (a *Analyzer) String() string { return a.Name } - -// A Pass provides information to the Run function that -// applies a specific analyzer to a single Go package. -// -// It forms the interface between the analysis logic and the driver -// program, and has both input and an output components. -// -// As in a compiler, one pass may depend on the result computed by another. -// -// The Run function should not call any of the Pass functions concurrently. -type Pass struct { - Analyzer *Analyzer // the identity of the current analyzer - - // syntax and type information - Fset *token.FileSet // file position information - Files []*ast.File // the abstract syntax tree of each file - OtherFiles []string // names of non-Go files of this package - IgnoredFiles []string // names of ignored source files in this package - Pkg *types.Package // type information about the package - TypesInfo *types.Info // type information about the syntax trees - TypesSizes types.Sizes // function for computing sizes of types - TypeErrors []types.Error // type errors (only if Analyzer.RunDespiteErrors) - - // Report reports a Diagnostic, a finding about a specific location - // in the analyzed source code such as a potential mistake. - // It may be called by the Run function. - Report func(Diagnostic) - - // ResultOf provides the inputs to this analysis pass, which are - // the corresponding results of its prerequisite analyzers. - // The map keys are the elements of Analysis.Required, - // and the type of each corresponding value is the required - // analysis's ResultType. - ResultOf map[*Analyzer]interface{} - - // -- facts -- - - // ImportObjectFact retrieves a fact associated with obj. - // Given a value ptr of type *T, where *T satisfies Fact, - // ImportObjectFact copies the value to *ptr. - // - // ImportObjectFact panics if called after the pass is complete. - // ImportObjectFact is not concurrency-safe. - ImportObjectFact func(obj types.Object, fact Fact) bool - - // ImportPackageFact retrieves a fact associated with package pkg, - // which must be this package or one of its dependencies. - // See comments for ImportObjectFact. - ImportPackageFact func(pkg *types.Package, fact Fact) bool - - // ExportObjectFact associates a fact of type *T with the obj, - // replacing any previous fact of that type. - // - // ExportObjectFact panics if it is called after the pass is - // complete, or if obj does not belong to the package being analyzed. - // ExportObjectFact is not concurrency-safe. - ExportObjectFact func(obj types.Object, fact Fact) - - // ExportPackageFact associates a fact with the current package. - // See comments for ExportObjectFact. - ExportPackageFact func(fact Fact) - - // AllPackageFacts returns a new slice containing all package - // facts of the analysis's FactTypes in unspecified order. - AllPackageFacts func() []PackageFact - - // AllObjectFacts returns a new slice containing all object - // facts of the analysis's FactTypes in unspecified order. - AllObjectFacts func() []ObjectFact - - /* Further fields may be added in future. */ -} - -// PackageFact is a package together with an associated fact. -type PackageFact struct { - Package *types.Package - Fact Fact -} - -// ObjectFact is an object together with an associated fact. -type ObjectFact struct { - Object types.Object - Fact Fact -} - -// Reportf is a helper function that reports a Diagnostic using the -// specified position and formatted error message. -func (pass *Pass) Reportf(pos token.Pos, format string, args ...interface{}) { - msg := fmt.Sprintf(format, args...) - pass.Report(Diagnostic{Pos: pos, Message: msg}) -} - -// The Range interface provides a range. It's equivalent to and satisfied by -// ast.Node. -type Range interface { - Pos() token.Pos // position of first character belonging to the node - End() token.Pos // position of first character immediately after the node -} - -// ReportRangef is a helper function that reports a Diagnostic using the -// range provided. ast.Node values can be passed in as the range because -// they satisfy the Range interface. -func (pass *Pass) ReportRangef(rng Range, format string, args ...interface{}) { - msg := fmt.Sprintf(format, args...) - pass.Report(Diagnostic{Pos: rng.Pos(), End: rng.End(), Message: msg}) -} - -func (pass *Pass) String() string { - return fmt.Sprintf("%s@%s", pass.Analyzer.Name, pass.Pkg.Path()) -} - -// A Fact is an intermediate fact produced during analysis. -// -// Each fact is associated with a named declaration (a types.Object) or -// with a package as a whole. A single object or package may have -// multiple associated facts, but only one of any particular fact type. -// -// A Fact represents a predicate such as "never returns", but does not -// represent the subject of the predicate such as "function F" or "package P". -// -// Facts may be produced in one analysis pass and consumed by another -// analysis pass even if these are in different address spaces. -// If package P imports Q, all facts about Q produced during -// analysis of that package will be available during later analysis of P. -// Facts are analogous to type export data in a build system: -// just as export data enables separate compilation of several passes, -// facts enable "separate analysis". -// -// Each pass (a, p) starts with the set of facts produced by the -// same analyzer a applied to the packages directly imported by p. -// The analysis may add facts to the set, and they may be exported in turn. -// An analysis's Run function may retrieve facts by calling -// Pass.Import{Object,Package}Fact and update them using -// Pass.Export{Object,Package}Fact. -// -// A fact is logically private to its Analysis. To pass values -// between different analyzers, use the results mechanism; -// see Analyzer.Requires, Analyzer.ResultType, and Pass.ResultOf. -// -// A Fact type must be a pointer. -// Facts are encoded and decoded using encoding/gob. -// A Fact may implement the GobEncoder/GobDecoder interfaces -// to customize its encoding. Fact encoding should not fail. -// -// A Fact should not be modified once exported. -type Fact interface { - AFact() // dummy method to avoid type errors -} diff --git a/vendor/golang.org/x/tools/go/analysis/diagnostic.go b/vendor/golang.org/x/tools/go/analysis/diagnostic.go deleted file mode 100644 index c638f2758..000000000 --- a/vendor/golang.org/x/tools/go/analysis/diagnostic.go +++ /dev/null @@ -1,73 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package analysis - -import "go/token" - -// A Diagnostic is a message associated with a source location or range. -// -// An Analyzer may return a variety of diagnostics; the optional Category, -// which should be a constant, may be used to classify them. -// It is primarily intended to make it easy to look up documentation. -// -// If End is provided, the diagnostic is specified to apply to the range between -// Pos and End. -type Diagnostic struct { - Pos token.Pos - End token.Pos // optional - Category string // optional - Message string - - // URL is the optional location of a web page that provides - // additional documentation for this diagnostic. - // - // If URL is empty but a Category is specified, then the - // Analysis driver should treat the URL as "#"+Category. - // - // The URL may be relative. If so, the base URL is that of the - // Analyzer that produced the diagnostic; - // see https://pkg.go.dev/net/url#URL.ResolveReference. - URL string - - // SuggestedFixes is an optional list of fixes to address the - // problem described by the diagnostic, each one representing - // an alternative strategy; at most one may be applied. - SuggestedFixes []SuggestedFix - - // Related contains optional secondary positions and messages - // related to the primary diagnostic. - Related []RelatedInformation -} - -// RelatedInformation contains information related to a diagnostic. -// For example, a diagnostic that flags duplicated declarations of a -// variable may include one RelatedInformation per existing -// declaration. -type RelatedInformation struct { - Pos token.Pos - End token.Pos // optional - Message string -} - -// A SuggestedFix is a code change associated with a Diagnostic that a -// user can choose to apply to their code. Usually the SuggestedFix is -// meant to fix the issue flagged by the diagnostic. -// -// The TextEdits must not overlap, nor contain edits for other packages. -type SuggestedFix struct { - // A description for this suggested fix to be shown to a user deciding - // whether to accept it. - Message string - TextEdits []TextEdit -} - -// A TextEdit represents the replacement of the code between Pos and End with the new text. -// Each TextEdit should apply to a single file. End should not be earlier in the file than Pos. -type TextEdit struct { - // For a pure insertion, End can either be set to Pos or token.NoPos. - Pos token.Pos - End token.Pos - NewText []byte -} diff --git a/vendor/golang.org/x/tools/go/analysis/doc.go b/vendor/golang.org/x/tools/go/analysis/doc.go deleted file mode 100644 index 44867d599..000000000 --- a/vendor/golang.org/x/tools/go/analysis/doc.go +++ /dev/null @@ -1,319 +0,0 @@ -// Copyright 2018 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -/* -Package analysis defines the interface between a modular static -analysis and an analysis driver program. - -# Background - -A static analysis is a function that inspects a package of Go code and -reports a set of diagnostics (typically mistakes in the code), and -perhaps produces other results as well, such as suggested refactorings -or other facts. An analysis that reports mistakes is informally called a -"checker". For example, the printf checker reports mistakes in -fmt.Printf format strings. - -A "modular" analysis is one that inspects one package at a time but can -save information from a lower-level package and use it when inspecting a -higher-level package, analogous to separate compilation in a toolchain. -The printf checker is modular: when it discovers that a function such as -log.Fatalf delegates to fmt.Printf, it records this fact, and checks -calls to that function too, including calls made from another package. - -By implementing a common interface, checkers from a variety of sources -can be easily selected, incorporated, and reused in a wide range of -driver programs including command-line tools (such as vet), text editors and -IDEs, build and test systems (such as go build, Bazel, or Buck), test -frameworks, code review tools, code-base indexers (such as SourceGraph), -documentation viewers (such as godoc), batch pipelines for large code -bases, and so on. - -# Analyzer - -The primary type in the API is Analyzer. An Analyzer statically -describes an analysis function: its name, documentation, flags, -relationship to other analyzers, and of course, its logic. - -To define an analysis, a user declares a (logically constant) variable -of type Analyzer. Here is a typical example from one of the analyzers in -the go/analysis/passes/ subdirectory: - - package unusedresult - - var Analyzer = &analysis.Analyzer{ - Name: "unusedresult", - Doc: "check for unused results of calls to some functions", - Run: run, - ... - } - - func run(pass *analysis.Pass) (interface{}, error) { - ... - } - -An analysis driver is a program such as vet that runs a set of -analyses and prints the diagnostics that they report. -The driver program must import the list of Analyzers it needs. -Typically each Analyzer resides in a separate package. -To add a new Analyzer to an existing driver, add another item to the list: - - import ( "unusedresult"; "nilness"; "printf" ) - - var analyses = []*analysis.Analyzer{ - unusedresult.Analyzer, - nilness.Analyzer, - printf.Analyzer, - } - -A driver may use the name, flags, and documentation to provide on-line -help that describes the analyses it performs. -The doc comment contains a brief one-line summary, -optionally followed by paragraphs of explanation. - -The Analyzer type has more fields besides those shown above: - - type Analyzer struct { - Name string - Doc string - Flags flag.FlagSet - Run func(*Pass) (interface{}, error) - RunDespiteErrors bool - ResultType reflect.Type - Requires []*Analyzer - FactTypes []Fact - } - -The Flags field declares a set of named (global) flag variables that -control analysis behavior. Unlike vet, analysis flags are not declared -directly in the command line FlagSet; it is up to the driver to set the -flag variables. A driver for a single analysis, a, might expose its flag -f directly on the command line as -f, whereas a driver for multiple -analyses might prefix the flag name by the analysis name (-a.f) to avoid -ambiguity. An IDE might expose the flags through a graphical interface, -and a batch pipeline might configure them from a config file. -See the "findcall" analyzer for an example of flags in action. - -The RunDespiteErrors flag indicates whether the analysis is equipped to -handle ill-typed code. If not, the driver will skip the analysis if -there were parse or type errors. -The optional ResultType field specifies the type of the result value -computed by this analysis and made available to other analyses. -The Requires field specifies a list of analyses upon which -this one depends and whose results it may access, and it constrains the -order in which a driver may run analyses. -The FactTypes field is discussed in the section on Modularity. -The analysis package provides a Validate function to perform basic -sanity checks on an Analyzer, such as that its Requires graph is -acyclic, its fact and result types are unique, and so on. - -Finally, the Run field contains a function to be called by the driver to -execute the analysis on a single package. The driver passes it an -instance of the Pass type. - -# Pass - -A Pass describes a single unit of work: the application of a particular -Analyzer to a particular package of Go code. -The Pass provides information to the Analyzer's Run function about the -package being analyzed, and provides operations to the Run function for -reporting diagnostics and other information back to the driver. - - type Pass struct { - Fset *token.FileSet - Files []*ast.File - OtherFiles []string - IgnoredFiles []string - Pkg *types.Package - TypesInfo *types.Info - ResultOf map[*Analyzer]interface{} - Report func(Diagnostic) - ... - } - -The Fset, Files, Pkg, and TypesInfo fields provide the syntax trees, -type information, and source positions for a single package of Go code. - -The OtherFiles field provides the names, but not the contents, of non-Go -files such as assembly that are part of this package. See the "asmdecl" -or "buildtags" analyzers for examples of loading non-Go files and reporting -diagnostics against them. - -The IgnoredFiles field provides the names, but not the contents, -of ignored Go and non-Go source files that are not part of this package -with the current build configuration but may be part of other build -configurations. See the "buildtags" analyzer for an example of loading -and checking IgnoredFiles. - -The ResultOf field provides the results computed by the analyzers -required by this one, as expressed in its Analyzer.Requires field. The -driver runs the required analyzers first and makes their results -available in this map. Each Analyzer must return a value of the type -described in its Analyzer.ResultType field. -For example, the "ctrlflow" analyzer returns a *ctrlflow.CFGs, which -provides a control-flow graph for each function in the package (see -golang.org/x/tools/go/cfg); the "inspect" analyzer returns a value that -enables other Analyzers to traverse the syntax trees of the package more -efficiently; and the "buildssa" analyzer constructs an SSA-form -intermediate representation. -Each of these Analyzers extends the capabilities of later Analyzers -without adding a dependency to the core API, so an analysis tool pays -only for the extensions it needs. - -The Report function emits a diagnostic, a message associated with a -source position. For most analyses, diagnostics are their primary -result. -For convenience, Pass provides a helper method, Reportf, to report a new -diagnostic by formatting a string. -Diagnostic is defined as: - - type Diagnostic struct { - Pos token.Pos - Category string // optional - Message string - } - -The optional Category field is a short identifier that classifies the -kind of message when an analysis produces several kinds of diagnostic. - -The Diagnostic struct does not have a field to indicate its severity -because opinions about the relative importance of Analyzers and their -diagnostics vary widely among users. The design of this framework does -not hold each Analyzer responsible for identifying the severity of its -diagnostics. Instead, we expect that drivers will allow the user to -customize the filtering and prioritization of diagnostics based on the -producing Analyzer and optional Category, according to the user's -preferences. - -Most Analyzers inspect typed Go syntax trees, but a few, such as asmdecl -and buildtag, inspect the raw text of Go source files or even non-Go -files such as assembly. To report a diagnostic against a line of a -raw text file, use the following sequence: - - content, err := os.ReadFile(filename) - if err != nil { ... } - tf := fset.AddFile(filename, -1, len(content)) - tf.SetLinesForContent(content) - ... - pass.Reportf(tf.LineStart(line), "oops") - -# Modular analysis with Facts - -To improve efficiency and scalability, large programs are routinely -built using separate compilation: units of the program are compiled -separately, and recompiled only when one of their dependencies changes; -independent modules may be compiled in parallel. The same technique may -be applied to static analyses, for the same benefits. Such analyses are -described as "modular". - -A compiler’s type checker is an example of a modular static analysis. -Many other checkers we would like to apply to Go programs can be -understood as alternative or non-standard type systems. For example, -vet's printf checker infers whether a function has the "printf wrapper" -type, and it applies stricter checks to calls of such functions. In -addition, it records which functions are printf wrappers for use by -later analysis passes to identify other printf wrappers by induction. -A result such as “f is a printf wrapper” that is not interesting by -itself but serves as a stepping stone to an interesting result (such as -a diagnostic) is called a "fact". - -The analysis API allows an analysis to define new types of facts, to -associate facts of these types with objects (named entities) declared -within the current package, or with the package as a whole, and to query -for an existing fact of a given type associated with an object or -package. - -An Analyzer that uses facts must declare their types: - - var Analyzer = &analysis.Analyzer{ - Name: "printf", - FactTypes: []analysis.Fact{new(isWrapper)}, - ... - } - - type isWrapper struct{} // => *types.Func f “is a printf wrapper” - -The driver program ensures that facts for a pass’s dependencies are -generated before analyzing the package and is responsible for propagating -facts from one package to another, possibly across address spaces. -Consequently, Facts must be serializable. The API requires that drivers -use the gob encoding, an efficient, robust, self-describing binary -protocol. A fact type may implement the GobEncoder/GobDecoder interfaces -if the default encoding is unsuitable. Facts should be stateless. -Because serialized facts may appear within build outputs, the gob encoding -of a fact must be deterministic, to avoid spurious cache misses in -build systems that use content-addressable caches. -The driver makes a single call to the gob encoder for all facts -exported by a given analysis pass, so that the topology of -shared data structures referenced by multiple facts is preserved. - -The Pass type has functions to import and export facts, -associated either with an object or with a package: - - type Pass struct { - ... - ExportObjectFact func(types.Object, Fact) - ImportObjectFact func(types.Object, Fact) bool - - ExportPackageFact func(fact Fact) - ImportPackageFact func(*types.Package, Fact) bool - } - -An Analyzer may only export facts associated with the current package or -its objects, though it may import facts from any package or object that -is an import dependency of the current package. - -Conceptually, ExportObjectFact(obj, fact) inserts fact into a hidden map keyed by -the pair (obj, TypeOf(fact)), and the ImportObjectFact function -retrieves the entry from this map and copies its value into the variable -pointed to by fact. This scheme assumes that the concrete type of fact -is a pointer; this assumption is checked by the Validate function. -See the "printf" analyzer for an example of object facts in action. - -Some driver implementations (such as those based on Bazel and Blaze) do -not currently apply analyzers to packages of the standard library. -Therefore, for best results, analyzer authors should not rely on -analysis facts being available for standard packages. -For example, although the printf checker is capable of deducing during -analysis of the log package that log.Printf is a printf wrapper, -this fact is built in to the analyzer so that it correctly checks -calls to log.Printf even when run in a driver that does not apply -it to standard packages. We would like to remove this limitation in future. - -# Testing an Analyzer - -The analysistest subpackage provides utilities for testing an Analyzer. -In a few lines of code, it is possible to run an analyzer on a package -of testdata files and check that it reported all the expected -diagnostics and facts (and no more). Expectations are expressed using -"// want ..." comments in the input code. - -# Standalone commands - -Analyzers are provided in the form of packages that a driver program is -expected to import. The vet command imports a set of several analyzers, -but users may wish to define their own analysis commands that perform -additional checks. To simplify the task of creating an analysis command, -either for a single analyzer or for a whole suite, we provide the -singlechecker and multichecker subpackages. - -The singlechecker package provides the main function for a command that -runs one analyzer. By convention, each analyzer such as -go/analysis/passes/findcall should be accompanied by a singlechecker-based -command such as go/analysis/passes/findcall/cmd/findcall, defined in its -entirety as: - - package main - - import ( - "golang.org/x/tools/go/analysis/passes/findcall" - "golang.org/x/tools/go/analysis/singlechecker" - ) - - func main() { singlechecker.Main(findcall.Analyzer) } - -A tool that provides multiple analyzers can use multichecker in a -similar way, giving it the list of Analyzers. -*/ -package analysis diff --git a/vendor/golang.org/x/tools/go/analysis/passes/appends/appends.go b/vendor/golang.org/x/tools/go/analysis/passes/appends/appends.go deleted file mode 100644 index 6976f0d90..000000000 --- a/vendor/golang.org/x/tools/go/analysis/passes/appends/appends.go +++ /dev/null @@ -1,47 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package appends defines an Analyzer that detects -// if there is only one variable in append. -package appends - -import ( - _ "embed" - "go/ast" - "go/types" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/analysis/passes/internal/analysisutil" - "golang.org/x/tools/go/ast/inspector" - "golang.org/x/tools/go/types/typeutil" -) - -//go:embed doc.go -var doc string - -var Analyzer = &analysis.Analyzer{ - Name: "appends", - Doc: analysisutil.MustExtractDoc(doc, "appends"), - URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/appends", - Requires: []*analysis.Analyzer{inspect.Analyzer}, - Run: run, -} - -func run(pass *analysis.Pass) (interface{}, error) { - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - - nodeFilter := []ast.Node{ - (*ast.CallExpr)(nil), - } - inspect.Preorder(nodeFilter, func(n ast.Node) { - call := n.(*ast.CallExpr) - b, ok := typeutil.Callee(pass.TypesInfo, call).(*types.Builtin) - if ok && b.Name() == "append" && len(call.Args) == 1 { - pass.ReportRangef(call, "append with no values") - } - }) - - return nil, nil -} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/appends/doc.go b/vendor/golang.org/x/tools/go/analysis/passes/appends/doc.go deleted file mode 100644 index 2e6a2e010..000000000 --- a/vendor/golang.org/x/tools/go/analysis/passes/appends/doc.go +++ /dev/null @@ -1,20 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package appends defines an Analyzer that detects -// if there is only one variable in append. -// -// # Analyzer appends -// -// appends: check for missing values after append -// -// This checker reports calls to append that pass -// no values to be appended to the slice. -// -// s := []string{"a", "b", "c"} -// _ = append(s) -// -// Such calls are always no-ops and often indicate an -// underlying mistake. -package appends diff --git a/vendor/golang.org/x/tools/go/analysis/passes/asmdecl/asmdecl.go b/vendor/golang.org/x/tools/go/analysis/passes/asmdecl/asmdecl.go deleted file mode 100644 index f2ca95aa9..000000000 --- a/vendor/golang.org/x/tools/go/analysis/passes/asmdecl/asmdecl.go +++ /dev/null @@ -1,826 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package asmdecl defines an Analyzer that reports mismatches between -// assembly files and Go declarations. -package asmdecl - -import ( - "bytes" - "fmt" - "go/ast" - "go/build" - "go/token" - "go/types" - "log" - "regexp" - "strconv" - "strings" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/internal/analysisutil" -) - -const Doc = "report mismatches between assembly files and Go declarations" - -var Analyzer = &analysis.Analyzer{ - Name: "asmdecl", - Doc: Doc, - URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/asmdecl", - Run: run, -} - -// 'kind' is a kind of assembly variable. -// The kinds 1, 2, 4, 8 stand for values of that size. -type asmKind int - -// These special kinds are not valid sizes. -const ( - asmString asmKind = 100 + iota - asmSlice - asmArray - asmInterface - asmEmptyInterface - asmStruct - asmComplex -) - -// An asmArch describes assembly parameters for an architecture -type asmArch struct { - name string - bigEndian bool - stack string - lr bool - // retRegs is a list of registers for return value in register ABI (ABIInternal). - // For now, as we only check whether we write to any result, here we only need to - // include the first integer register and first floating-point register. Accessing - // any of them counts as writing to result. - retRegs []string - // calculated during initialization - sizes types.Sizes - intSize int - ptrSize int - maxAlign int -} - -// An asmFunc describes the expected variables for a function on a given architecture. -type asmFunc struct { - arch *asmArch - size int // size of all arguments - vars map[string]*asmVar - varByOffset map[int]*asmVar -} - -// An asmVar describes a single assembly variable. -type asmVar struct { - name string - kind asmKind - typ string - off int - size int - inner []*asmVar -} - -var ( - asmArch386 = asmArch{name: "386", bigEndian: false, stack: "SP", lr: false} - asmArchArm = asmArch{name: "arm", bigEndian: false, stack: "R13", lr: true} - asmArchArm64 = asmArch{name: "arm64", bigEndian: false, stack: "RSP", lr: true, retRegs: []string{"R0", "F0"}} - asmArchAmd64 = asmArch{name: "amd64", bigEndian: false, stack: "SP", lr: false, retRegs: []string{"AX", "X0"}} - asmArchMips = asmArch{name: "mips", bigEndian: true, stack: "R29", lr: true} - asmArchMipsLE = asmArch{name: "mipsle", bigEndian: false, stack: "R29", lr: true} - asmArchMips64 = asmArch{name: "mips64", bigEndian: true, stack: "R29", lr: true} - asmArchMips64LE = asmArch{name: "mips64le", bigEndian: false, stack: "R29", lr: true} - asmArchPpc64 = asmArch{name: "ppc64", bigEndian: true, stack: "R1", lr: true, retRegs: []string{"R3", "F1"}} - asmArchPpc64LE = asmArch{name: "ppc64le", bigEndian: false, stack: "R1", lr: true, retRegs: []string{"R3", "F1"}} - asmArchRISCV64 = asmArch{name: "riscv64", bigEndian: false, stack: "SP", lr: true, retRegs: []string{"X10", "F10"}} - asmArchS390X = asmArch{name: "s390x", bigEndian: true, stack: "R15", lr: true} - asmArchWasm = asmArch{name: "wasm", bigEndian: false, stack: "SP", lr: false} - asmArchLoong64 = asmArch{name: "loong64", bigEndian: false, stack: "R3", lr: true} - - arches = []*asmArch{ - &asmArch386, - &asmArchArm, - &asmArchArm64, - &asmArchAmd64, - &asmArchMips, - &asmArchMipsLE, - &asmArchMips64, - &asmArchMips64LE, - &asmArchPpc64, - &asmArchPpc64LE, - &asmArchRISCV64, - &asmArchS390X, - &asmArchWasm, - &asmArchLoong64, - } -) - -func init() { - for _, arch := range arches { - arch.sizes = types.SizesFor("gc", arch.name) - if arch.sizes == nil { - // TODO(adonovan): fix: now that asmdecl is not in the standard - // library we cannot assume types.SizesFor is consistent with arches. - // For now, assume 64-bit norms and print a warning. - // But this warning should really be deferred until we attempt to use - // arch, which is very unlikely. Better would be - // to defer size computation until we have Pass.TypesSizes. - arch.sizes = types.SizesFor("gc", "amd64") - log.Printf("unknown architecture %s", arch.name) - } - arch.intSize = int(arch.sizes.Sizeof(types.Typ[types.Int])) - arch.ptrSize = int(arch.sizes.Sizeof(types.Typ[types.UnsafePointer])) - arch.maxAlign = int(arch.sizes.Alignof(types.Typ[types.Int64])) - } -} - -var ( - re = regexp.MustCompile - asmPlusBuild = re(`//\s+\+build\s+([^\n]+)`) - asmTEXT = re(`\bTEXT\b(.*)·([^\(]+)\(SB\)(?:\s*,\s*([0-9A-Z|+()]+))?(?:\s*,\s*\$(-?[0-9]+)(?:-([0-9]+))?)?`) - asmDATA = re(`\b(DATA|GLOBL)\b`) - asmNamedFP = re(`\$?([a-zA-Z0-9_\xFF-\x{10FFFF}]+)(?:\+([0-9]+))\(FP\)`) - asmUnnamedFP = re(`[^+\-0-9](([0-9]+)\(FP\))`) - asmSP = re(`[^+\-0-9](([0-9]+)\(([A-Z0-9]+)\))`) - asmOpcode = re(`^\s*(?:[A-Z0-9a-z_]+:)?\s*([A-Z]+)\s*([^,]*)(?:,\s*(.*))?`) - ppc64Suff = re(`([BHWD])(ZU|Z|U|BR)?$`) - abiSuff = re(`^(.+)<(ABI.+)>$`) -) - -func run(pass *analysis.Pass) (interface{}, error) { - // No work if no assembly files. - var sfiles []string - for _, fname := range pass.OtherFiles { - if strings.HasSuffix(fname, ".s") { - sfiles = append(sfiles, fname) - } - } - if sfiles == nil { - return nil, nil - } - - // Gather declarations. knownFunc[name][arch] is func description. - knownFunc := make(map[string]map[string]*asmFunc) - - for _, f := range pass.Files { - for _, decl := range f.Decls { - if decl, ok := decl.(*ast.FuncDecl); ok && decl.Body == nil { - knownFunc[decl.Name.Name] = asmParseDecl(pass, decl) - } - } - } - -Files: - for _, fname := range sfiles { - content, tf, err := analysisutil.ReadFile(pass.Fset, fname) - if err != nil { - return nil, err - } - - // Determine architecture from file name if possible. - var arch string - var archDef *asmArch - for _, a := range arches { - if strings.HasSuffix(fname, "_"+a.name+".s") { - arch = a.name - archDef = a - break - } - } - - lines := strings.SplitAfter(string(content), "\n") - var ( - fn *asmFunc - fnName string - abi string - localSize, argSize int - wroteSP bool - noframe bool - haveRetArg bool - retLine []int - ) - - flushRet := func() { - if fn != nil && fn.vars["ret"] != nil && !haveRetArg && len(retLine) > 0 { - v := fn.vars["ret"] - resultStr := fmt.Sprintf("%d-byte ret+%d(FP)", v.size, v.off) - if abi == "ABIInternal" { - resultStr = "result register" - } - for _, line := range retLine { - pass.Reportf(analysisutil.LineStart(tf, line), "[%s] %s: RET without writing to %s", arch, fnName, resultStr) - } - } - retLine = nil - } - trimABI := func(fnName string) (string, string) { - m := abiSuff.FindStringSubmatch(fnName) - if m != nil { - return m[1], m[2] - } - return fnName, "" - } - for lineno, line := range lines { - lineno++ - - badf := func(format string, args ...interface{}) { - pass.Reportf(analysisutil.LineStart(tf, lineno), "[%s] %s: %s", arch, fnName, fmt.Sprintf(format, args...)) - } - - if arch == "" { - // Determine architecture from +build line if possible. - if m := asmPlusBuild.FindStringSubmatch(line); m != nil { - // There can be multiple architectures in a single +build line, - // so accumulate them all and then prefer the one that - // matches build.Default.GOARCH. - var archCandidates []*asmArch - for _, fld := range strings.Fields(m[1]) { - for _, a := range arches { - if a.name == fld { - archCandidates = append(archCandidates, a) - } - } - } - for _, a := range archCandidates { - if a.name == build.Default.GOARCH { - archCandidates = []*asmArch{a} - break - } - } - if len(archCandidates) > 0 { - arch = archCandidates[0].name - archDef = archCandidates[0] - } - } - } - - // Ignore comments and commented-out code. - if i := strings.Index(line, "//"); i >= 0 { - line = line[:i] - } - - if m := asmTEXT.FindStringSubmatch(line); m != nil { - flushRet() - if arch == "" { - // Arch not specified by filename or build tags. - // Fall back to build.Default.GOARCH. - for _, a := range arches { - if a.name == build.Default.GOARCH { - arch = a.name - archDef = a - break - } - } - if arch == "" { - log.Printf("%s: cannot determine architecture for assembly file", fname) - continue Files - } - } - fnName = m[2] - if pkgPath := strings.TrimSpace(m[1]); pkgPath != "" { - // The assembler uses Unicode division slash within - // identifiers to represent the directory separator. - pkgPath = strings.Replace(pkgPath, "∕", "/", -1) - if pkgPath != pass.Pkg.Path() { - // log.Printf("%s:%d: [%s] cannot check cross-package assembly function: %s is in package %s", fname, lineno, arch, fnName, pkgPath) - fn = nil - fnName = "" - abi = "" - continue - } - } - // Trim off optional ABI selector. - fnName, abi = trimABI(fnName) - flag := m[3] - fn = knownFunc[fnName][arch] - if fn != nil { - size, _ := strconv.Atoi(m[5]) - if size != fn.size && (flag != "7" && !strings.Contains(flag, "NOSPLIT") || size != 0) { - badf("wrong argument size %d; expected $...-%d", size, fn.size) - } - } - localSize, _ = strconv.Atoi(m[4]) - localSize += archDef.intSize - if archDef.lr && !strings.Contains(flag, "NOFRAME") { - // Account for caller's saved LR - localSize += archDef.intSize - } - argSize, _ = strconv.Atoi(m[5]) - noframe = strings.Contains(flag, "NOFRAME") - if fn == nil && !strings.Contains(fnName, "<>") && !noframe { - badf("function %s missing Go declaration", fnName) - } - wroteSP = false - haveRetArg = false - continue - } else if strings.Contains(line, "TEXT") && strings.Contains(line, "SB") { - // function, but not visible from Go (didn't match asmTEXT), so stop checking - flushRet() - fn = nil - fnName = "" - abi = "" - continue - } - - if strings.Contains(line, "RET") && !strings.Contains(line, "(SB)") { - // RET f(SB) is a tail call. It is okay to not write the results. - retLine = append(retLine, lineno) - } - - if fnName == "" { - continue - } - - if asmDATA.FindStringSubmatch(line) != nil { - fn = nil - } - - if archDef == nil { - continue - } - - if strings.Contains(line, ", "+archDef.stack) || strings.Contains(line, ",\t"+archDef.stack) || strings.Contains(line, "NOP "+archDef.stack) || strings.Contains(line, "NOP\t"+archDef.stack) { - wroteSP = true - continue - } - - if arch == "wasm" && strings.Contains(line, "CallImport") { - // CallImport is a call out to magic that can write the result. - haveRetArg = true - } - - if abi == "ABIInternal" && !haveRetArg { - for _, reg := range archDef.retRegs { - if strings.Contains(line, reg) { - haveRetArg = true - break - } - } - } - - for _, m := range asmSP.FindAllStringSubmatch(line, -1) { - if m[3] != archDef.stack || wroteSP || noframe { - continue - } - off := 0 - if m[1] != "" { - off, _ = strconv.Atoi(m[2]) - } - if off >= localSize { - if fn != nil { - v := fn.varByOffset[off-localSize] - if v != nil { - badf("%s should be %s+%d(FP)", m[1], v.name, off-localSize) - continue - } - } - if off >= localSize+argSize { - badf("use of %s points beyond argument frame", m[1]) - continue - } - badf("use of %s to access argument frame", m[1]) - } - } - - if fn == nil { - continue - } - - for _, m := range asmUnnamedFP.FindAllStringSubmatch(line, -1) { - off, _ := strconv.Atoi(m[2]) - v := fn.varByOffset[off] - if v != nil { - badf("use of unnamed argument %s; offset %d is %s+%d(FP)", m[1], off, v.name, v.off) - } else { - badf("use of unnamed argument %s", m[1]) - } - } - - for _, m := range asmNamedFP.FindAllStringSubmatch(line, -1) { - name := m[1] - off := 0 - if m[2] != "" { - off, _ = strconv.Atoi(m[2]) - } - if name == "ret" || strings.HasPrefix(name, "ret_") { - haveRetArg = true - } - v := fn.vars[name] - if v == nil { - // Allow argframe+0(FP). - if name == "argframe" && off == 0 { - continue - } - v = fn.varByOffset[off] - if v != nil { - badf("unknown variable %s; offset %d is %s+%d(FP)", name, off, v.name, v.off) - } else { - badf("unknown variable %s", name) - } - continue - } - asmCheckVar(badf, fn, line, m[0], off, v, archDef) - } - } - flushRet() - } - return nil, nil -} - -func asmKindForType(t types.Type, size int) asmKind { - switch t := t.Underlying().(type) { - case *types.Basic: - switch t.Kind() { - case types.String: - return asmString - case types.Complex64, types.Complex128: - return asmComplex - } - return asmKind(size) - case *types.Pointer, *types.Chan, *types.Map, *types.Signature: - return asmKind(size) - case *types.Struct: - return asmStruct - case *types.Interface: - if t.Empty() { - return asmEmptyInterface - } - return asmInterface - case *types.Array: - return asmArray - case *types.Slice: - return asmSlice - } - panic("unreachable") -} - -// A component is an assembly-addressable component of a composite type, -// or a composite type itself. -type component struct { - size int - offset int - kind asmKind - typ string - suffix string // Such as _base for string base, _0_lo for lo half of first element of [1]uint64 on 32 bit machine. - outer string // The suffix for immediately containing composite type. -} - -func newComponent(suffix string, kind asmKind, typ string, offset, size int, outer string) component { - return component{suffix: suffix, kind: kind, typ: typ, offset: offset, size: size, outer: outer} -} - -// componentsOfType generates a list of components of type t. -// For example, given string, the components are the string itself, the base, and the length. -func componentsOfType(arch *asmArch, t types.Type) []component { - return appendComponentsRecursive(arch, t, nil, "", 0) -} - -// appendComponentsRecursive implements componentsOfType. -// Recursion is required to correct handle structs and arrays, -// which can contain arbitrary other types. -func appendComponentsRecursive(arch *asmArch, t types.Type, cc []component, suffix string, off int) []component { - s := t.String() - size := int(arch.sizes.Sizeof(t)) - kind := asmKindForType(t, size) - cc = append(cc, newComponent(suffix, kind, s, off, size, suffix)) - - switch kind { - case 8: - if arch.ptrSize == 4 { - w1, w2 := "lo", "hi" - if arch.bigEndian { - w1, w2 = w2, w1 - } - cc = append(cc, newComponent(suffix+"_"+w1, 4, "half "+s, off, 4, suffix)) - cc = append(cc, newComponent(suffix+"_"+w2, 4, "half "+s, off+4, 4, suffix)) - } - - case asmEmptyInterface: - cc = append(cc, newComponent(suffix+"_type", asmKind(arch.ptrSize), "interface type", off, arch.ptrSize, suffix)) - cc = append(cc, newComponent(suffix+"_data", asmKind(arch.ptrSize), "interface data", off+arch.ptrSize, arch.ptrSize, suffix)) - - case asmInterface: - cc = append(cc, newComponent(suffix+"_itable", asmKind(arch.ptrSize), "interface itable", off, arch.ptrSize, suffix)) - cc = append(cc, newComponent(suffix+"_data", asmKind(arch.ptrSize), "interface data", off+arch.ptrSize, arch.ptrSize, suffix)) - - case asmSlice: - cc = append(cc, newComponent(suffix+"_base", asmKind(arch.ptrSize), "slice base", off, arch.ptrSize, suffix)) - cc = append(cc, newComponent(suffix+"_len", asmKind(arch.intSize), "slice len", off+arch.ptrSize, arch.intSize, suffix)) - cc = append(cc, newComponent(suffix+"_cap", asmKind(arch.intSize), "slice cap", off+arch.ptrSize+arch.intSize, arch.intSize, suffix)) - - case asmString: - cc = append(cc, newComponent(suffix+"_base", asmKind(arch.ptrSize), "string base", off, arch.ptrSize, suffix)) - cc = append(cc, newComponent(suffix+"_len", asmKind(arch.intSize), "string len", off+arch.ptrSize, arch.intSize, suffix)) - - case asmComplex: - fsize := size / 2 - cc = append(cc, newComponent(suffix+"_real", asmKind(fsize), fmt.Sprintf("real(complex%d)", size*8), off, fsize, suffix)) - cc = append(cc, newComponent(suffix+"_imag", asmKind(fsize), fmt.Sprintf("imag(complex%d)", size*8), off+fsize, fsize, suffix)) - - case asmStruct: - tu := t.Underlying().(*types.Struct) - fields := make([]*types.Var, tu.NumFields()) - for i := 0; i < tu.NumFields(); i++ { - fields[i] = tu.Field(i) - } - offsets := arch.sizes.Offsetsof(fields) - for i, f := range fields { - cc = appendComponentsRecursive(arch, f.Type(), cc, suffix+"_"+f.Name(), off+int(offsets[i])) - } - - case asmArray: - tu := t.Underlying().(*types.Array) - elem := tu.Elem() - // Calculate offset of each element array. - fields := []*types.Var{ - types.NewVar(token.NoPos, nil, "fake0", elem), - types.NewVar(token.NoPos, nil, "fake1", elem), - } - offsets := arch.sizes.Offsetsof(fields) - elemoff := int(offsets[1]) - for i := 0; i < int(tu.Len()); i++ { - cc = appendComponentsRecursive(arch, elem, cc, suffix+"_"+strconv.Itoa(i), off+i*elemoff) - } - } - - return cc -} - -// asmParseDecl parses a function decl for expected assembly variables. -func asmParseDecl(pass *analysis.Pass, decl *ast.FuncDecl) map[string]*asmFunc { - var ( - arch *asmArch - fn *asmFunc - offset int - ) - - // addParams adds asmVars for each of the parameters in list. - // isret indicates whether the list are the arguments or the return values. - // TODO(adonovan): simplify by passing (*types.Signature).{Params,Results} - // instead of list. - addParams := func(list []*ast.Field, isret bool) { - argnum := 0 - for _, fld := range list { - t := pass.TypesInfo.Types[fld.Type].Type - - // Work around https://golang.org/issue/28277. - if t == nil { - if ell, ok := fld.Type.(*ast.Ellipsis); ok { - t = types.NewSlice(pass.TypesInfo.Types[ell.Elt].Type) - } - } - - align := int(arch.sizes.Alignof(t)) - size := int(arch.sizes.Sizeof(t)) - offset += -offset & (align - 1) - cc := componentsOfType(arch, t) - - // names is the list of names with this type. - names := fld.Names - if len(names) == 0 { - // Anonymous args will be called arg, arg1, arg2, ... - // Similarly so for return values: ret, ret1, ret2, ... - name := "arg" - if isret { - name = "ret" - } - if argnum > 0 { - name += strconv.Itoa(argnum) - } - names = []*ast.Ident{ast.NewIdent(name)} - } - argnum += len(names) - - // Create variable for each name. - for _, id := range names { - name := id.Name - for _, c := range cc { - outer := name + c.outer - v := asmVar{ - name: name + c.suffix, - kind: c.kind, - typ: c.typ, - off: offset + c.offset, - size: c.size, - } - if vo := fn.vars[outer]; vo != nil { - vo.inner = append(vo.inner, &v) - } - fn.vars[v.name] = &v - for i := 0; i < v.size; i++ { - fn.varByOffset[v.off+i] = &v - } - } - offset += size - } - } - } - - m := make(map[string]*asmFunc) - for _, arch = range arches { - fn = &asmFunc{ - arch: arch, - vars: make(map[string]*asmVar), - varByOffset: make(map[int]*asmVar), - } - offset = 0 - addParams(decl.Type.Params.List, false) - if decl.Type.Results != nil && len(decl.Type.Results.List) > 0 { - offset += -offset & (arch.maxAlign - 1) - addParams(decl.Type.Results.List, true) - } - fn.size = offset - m[arch.name] = fn - } - - return m -} - -// asmCheckVar checks a single variable reference. -func asmCheckVar(badf func(string, ...interface{}), fn *asmFunc, line, expr string, off int, v *asmVar, archDef *asmArch) { - m := asmOpcode.FindStringSubmatch(line) - if m == nil { - if !strings.HasPrefix(strings.TrimSpace(line), "//") { - badf("cannot find assembly opcode") - } - return - } - - addr := strings.HasPrefix(expr, "$") - - // Determine operand sizes from instruction. - // Typically the suffix suffices, but there are exceptions. - var src, dst, kind asmKind - op := m[1] - switch fn.arch.name + "." + op { - case "386.FMOVLP": - src, dst = 8, 4 - case "arm.MOVD": - src = 8 - case "arm.MOVW": - src = 4 - case "arm.MOVH", "arm.MOVHU": - src = 2 - case "arm.MOVB", "arm.MOVBU": - src = 1 - // LEA* opcodes don't really read the second arg. - // They just take the address of it. - case "386.LEAL": - dst = 4 - addr = true - case "amd64.LEAQ": - dst = 8 - addr = true - default: - switch fn.arch.name { - case "386", "amd64": - if strings.HasPrefix(op, "F") && (strings.HasSuffix(op, "D") || strings.HasSuffix(op, "DP")) { - // FMOVDP, FXCHD, etc - src = 8 - break - } - if strings.HasPrefix(op, "P") && strings.HasSuffix(op, "RD") { - // PINSRD, PEXTRD, etc - src = 4 - break - } - if strings.HasPrefix(op, "F") && (strings.HasSuffix(op, "F") || strings.HasSuffix(op, "FP")) { - // FMOVFP, FXCHF, etc - src = 4 - break - } - if strings.HasSuffix(op, "SD") { - // MOVSD, SQRTSD, etc - src = 8 - break - } - if strings.HasSuffix(op, "SS") { - // MOVSS, SQRTSS, etc - src = 4 - break - } - if op == "MOVO" || op == "MOVOU" { - src = 16 - break - } - if strings.HasPrefix(op, "SET") { - // SETEQ, etc - src = 1 - break - } - switch op[len(op)-1] { - case 'B': - src = 1 - case 'W': - src = 2 - case 'L': - src = 4 - case 'D', 'Q': - src = 8 - } - case "ppc64", "ppc64le": - // Strip standard suffixes to reveal size letter. - m := ppc64Suff.FindStringSubmatch(op) - if m != nil { - switch m[1][0] { - case 'B': - src = 1 - case 'H': - src = 2 - case 'W': - src = 4 - case 'D': - src = 8 - } - } - case "loong64", "mips", "mipsle", "mips64", "mips64le": - switch op { - case "MOVB", "MOVBU": - src = 1 - case "MOVH", "MOVHU": - src = 2 - case "MOVW", "MOVWU", "MOVF": - src = 4 - case "MOVV", "MOVD": - src = 8 - } - case "s390x": - switch op { - case "MOVB", "MOVBZ": - src = 1 - case "MOVH", "MOVHZ": - src = 2 - case "MOVW", "MOVWZ", "FMOVS": - src = 4 - case "MOVD", "FMOVD": - src = 8 - } - } - } - if dst == 0 { - dst = src - } - - // Determine whether the match we're holding - // is the first or second argument. - if strings.Index(line, expr) > strings.Index(line, ",") { - kind = dst - } else { - kind = src - } - - vk := v.kind - vs := v.size - vt := v.typ - switch vk { - case asmInterface, asmEmptyInterface, asmString, asmSlice: - // allow reference to first word (pointer) - vk = v.inner[0].kind - vs = v.inner[0].size - vt = v.inner[0].typ - case asmComplex: - // Allow a single instruction to load both parts of a complex. - if int(kind) == vs { - kind = asmComplex - } - } - if addr { - vk = asmKind(archDef.ptrSize) - vs = archDef.ptrSize - vt = "address" - } - - if off != v.off { - var inner bytes.Buffer - for i, vi := range v.inner { - if len(v.inner) > 1 { - fmt.Fprintf(&inner, ",") - } - fmt.Fprintf(&inner, " ") - if i == len(v.inner)-1 { - fmt.Fprintf(&inner, "or ") - } - fmt.Fprintf(&inner, "%s+%d(FP)", vi.name, vi.off) - } - badf("invalid offset %s; expected %s+%d(FP)%s", expr, v.name, v.off, inner.String()) - return - } - if kind != 0 && kind != vk { - var inner bytes.Buffer - if len(v.inner) > 0 { - fmt.Fprintf(&inner, " containing") - for i, vi := range v.inner { - if i > 0 && len(v.inner) > 2 { - fmt.Fprintf(&inner, ",") - } - fmt.Fprintf(&inner, " ") - if i > 0 && i == len(v.inner)-1 { - fmt.Fprintf(&inner, "and ") - } - fmt.Fprintf(&inner, "%s+%d(FP)", vi.name, vi.off) - } - } - badf("invalid %s of %s; %s is %d-byte value%s", op, expr, vt, vs, inner.String()) - } -} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/assign/assign.go b/vendor/golang.org/x/tools/go/analysis/passes/assign/assign.go deleted file mode 100644 index 3bfd50122..000000000 --- a/vendor/golang.org/x/tools/go/analysis/passes/assign/assign.go +++ /dev/null @@ -1,88 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package assign - -// TODO(adonovan): check also for assignments to struct fields inside -// methods that are on T instead of *T. - -import ( - _ "embed" - "fmt" - "go/ast" - "go/token" - "go/types" - "reflect" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/analysis/passes/internal/analysisutil" - "golang.org/x/tools/go/ast/astutil" - "golang.org/x/tools/go/ast/inspector" -) - -//go:embed doc.go -var doc string - -var Analyzer = &analysis.Analyzer{ - Name: "assign", - Doc: analysisutil.MustExtractDoc(doc, "assign"), - URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/assign", - Requires: []*analysis.Analyzer{inspect.Analyzer}, - Run: run, -} - -func run(pass *analysis.Pass) (interface{}, error) { - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - - nodeFilter := []ast.Node{ - (*ast.AssignStmt)(nil), - } - inspect.Preorder(nodeFilter, func(n ast.Node) { - stmt := n.(*ast.AssignStmt) - if stmt.Tok != token.ASSIGN { - return // ignore := - } - if len(stmt.Lhs) != len(stmt.Rhs) { - // If LHS and RHS have different cardinality, they can't be the same. - return - } - for i, lhs := range stmt.Lhs { - rhs := stmt.Rhs[i] - if analysisutil.HasSideEffects(pass.TypesInfo, lhs) || - analysisutil.HasSideEffects(pass.TypesInfo, rhs) || - isMapIndex(pass.TypesInfo, lhs) { - continue // expressions may not be equal - } - if reflect.TypeOf(lhs) != reflect.TypeOf(rhs) { - continue // short-circuit the heavy-weight gofmt check - } - le := analysisutil.Format(pass.Fset, lhs) - re := analysisutil.Format(pass.Fset, rhs) - if le == re { - pass.Report(analysis.Diagnostic{ - Pos: stmt.Pos(), Message: fmt.Sprintf("self-assignment of %s to %s", re, le), - SuggestedFixes: []analysis.SuggestedFix{ - {Message: "Remove", TextEdits: []analysis.TextEdit{ - {Pos: stmt.Pos(), End: stmt.End(), NewText: []byte{}}, - }}, - }, - }) - } - } - }) - - return nil, nil -} - -// isMapIndex returns true if e is a map index expression. -func isMapIndex(info *types.Info, e ast.Expr) bool { - if idx, ok := astutil.Unparen(e).(*ast.IndexExpr); ok { - if typ := info.Types[idx.X].Type; typ != nil { - _, ok := typ.Underlying().(*types.Map) - return ok - } - } - return false -} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/assign/doc.go b/vendor/golang.org/x/tools/go/analysis/passes/assign/doc.go deleted file mode 100644 index a4b1b64c5..000000000 --- a/vendor/golang.org/x/tools/go/analysis/passes/assign/doc.go +++ /dev/null @@ -1,14 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package assign defines an Analyzer that detects useless assignments. -// -// # Analyzer assign -// -// assign: check for useless assignments -// -// This checker reports assignments of the form x = x or a[i] = a[i]. -// These are almost always useless, and even when they aren't they are -// usually a mistake. -package assign diff --git a/vendor/golang.org/x/tools/go/analysis/passes/atomic/atomic.go b/vendor/golang.org/x/tools/go/analysis/passes/atomic/atomic.go deleted file mode 100644 index 931f9ca75..000000000 --- a/vendor/golang.org/x/tools/go/analysis/passes/atomic/atomic.go +++ /dev/null @@ -1,85 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package atomic - -import ( - _ "embed" - "go/ast" - "go/token" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/analysis/passes/internal/analysisutil" - "golang.org/x/tools/go/ast/inspector" - "golang.org/x/tools/go/types/typeutil" -) - -//go:embed doc.go -var doc string - -var Analyzer = &analysis.Analyzer{ - Name: "atomic", - Doc: analysisutil.MustExtractDoc(doc, "atomic"), - URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/atomic", - Requires: []*analysis.Analyzer{inspect.Analyzer}, - RunDespiteErrors: true, - Run: run, -} - -func run(pass *analysis.Pass) (interface{}, error) { - if !analysisutil.Imports(pass.Pkg, "sync/atomic") { - return nil, nil // doesn't directly import sync/atomic - } - - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - - nodeFilter := []ast.Node{ - (*ast.AssignStmt)(nil), - } - inspect.Preorder(nodeFilter, func(node ast.Node) { - n := node.(*ast.AssignStmt) - if len(n.Lhs) != len(n.Rhs) { - return - } - if len(n.Lhs) == 1 && n.Tok == token.DEFINE { - return - } - - for i, right := range n.Rhs { - call, ok := right.(*ast.CallExpr) - if !ok { - continue - } - fn := typeutil.StaticCallee(pass.TypesInfo, call) - if analysisutil.IsFunctionNamed(fn, "sync/atomic", "AddInt32", "AddInt64", "AddUint32", "AddUint64", "AddUintptr") { - checkAtomicAddAssignment(pass, n.Lhs[i], call) - } - } - }) - return nil, nil -} - -// checkAtomicAddAssignment walks the atomic.Add* method calls checking -// for assigning the return value to the same variable being used in the -// operation -func checkAtomicAddAssignment(pass *analysis.Pass, left ast.Expr, call *ast.CallExpr) { - if len(call.Args) != 2 { - return - } - arg := call.Args[0] - broken := false - - gofmt := func(e ast.Expr) string { return analysisutil.Format(pass.Fset, e) } - - if uarg, ok := arg.(*ast.UnaryExpr); ok && uarg.Op == token.AND { - broken = gofmt(left) == gofmt(uarg.X) - } else if star, ok := left.(*ast.StarExpr); ok { - broken = gofmt(star.X) == gofmt(arg) - } - - if broken { - pass.ReportRangef(left, "direct assignment to atomic value") - } -} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/atomic/doc.go b/vendor/golang.org/x/tools/go/analysis/passes/atomic/doc.go deleted file mode 100644 index 5aafe25d3..000000000 --- a/vendor/golang.org/x/tools/go/analysis/passes/atomic/doc.go +++ /dev/null @@ -1,17 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package atomic defines an Analyzer that checks for common mistakes -// using the sync/atomic package. -// -// # Analyzer atomic -// -// atomic: check for common mistakes using the sync/atomic package -// -// The atomic checker looks for assignment statements of the form: -// -// x = atomic.AddUint64(&x, 1) -// -// which are not atomic. -package atomic diff --git a/vendor/golang.org/x/tools/go/analysis/passes/atomicalign/atomicalign.go b/vendor/golang.org/x/tools/go/analysis/passes/atomicalign/atomicalign.go deleted file mode 100644 index aff6d25b3..000000000 --- a/vendor/golang.org/x/tools/go/analysis/passes/atomicalign/atomicalign.go +++ /dev/null @@ -1,108 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package atomicalign defines an Analyzer that checks for non-64-bit-aligned -// arguments to sync/atomic functions. On non-32-bit platforms, those functions -// panic if their argument variables are not 64-bit aligned. It is therefore -// the caller's responsibility to arrange for 64-bit alignment of such variables. -// See https://golang.org/pkg/sync/atomic/#pkg-note-BUG -package atomicalign - -import ( - "go/ast" - "go/token" - "go/types" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/analysis/passes/internal/analysisutil" - "golang.org/x/tools/go/ast/inspector" - "golang.org/x/tools/go/types/typeutil" -) - -const Doc = "check for non-64-bits-aligned arguments to sync/atomic functions" - -var Analyzer = &analysis.Analyzer{ - Name: "atomicalign", - Doc: Doc, - URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/atomicalign", - Requires: []*analysis.Analyzer{inspect.Analyzer}, - Run: run, -} - -func run(pass *analysis.Pass) (interface{}, error) { - if 8*pass.TypesSizes.Sizeof(types.Typ[types.Uintptr]) == 64 { - return nil, nil // 64-bit platform - } - if !analysisutil.Imports(pass.Pkg, "sync/atomic") { - return nil, nil // doesn't directly import sync/atomic - } - - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - nodeFilter := []ast.Node{ - (*ast.CallExpr)(nil), - } - funcNames := []string{ - "AddInt64", "AddUint64", - "LoadInt64", "LoadUint64", - "StoreInt64", "StoreUint64", - "SwapInt64", "SwapUint64", - "CompareAndSwapInt64", "CompareAndSwapUint64", - } - - inspect.Preorder(nodeFilter, func(node ast.Node) { - call := node.(*ast.CallExpr) - fn := typeutil.StaticCallee(pass.TypesInfo, call) - if analysisutil.IsFunctionNamed(fn, "sync/atomic", funcNames...) { - // For all the listed functions, the expression to check is always the first function argument. - check64BitAlignment(pass, fn.Name(), call.Args[0]) - } - }) - - return nil, nil -} - -func check64BitAlignment(pass *analysis.Pass, funcName string, arg ast.Expr) { - // Checks the argument is made of the address operator (&) applied to - // a struct field (as opposed to a variable as the first word of - // uint64 and int64 variables can be relied upon to be 64-bit aligned). - unary, ok := arg.(*ast.UnaryExpr) - if !ok || unary.Op != token.AND { - return - } - - // Retrieve the types.Struct in order to get the offset of the - // atomically accessed field. - sel, ok := unary.X.(*ast.SelectorExpr) - if !ok { - return - } - tvar, ok := pass.TypesInfo.Selections[sel].Obj().(*types.Var) - if !ok || !tvar.IsField() { - return - } - - stype, ok := pass.TypesInfo.Types[sel.X].Type.Underlying().(*types.Struct) - if !ok { - return - } - - var offset int64 - var fields []*types.Var - for i := 0; i < stype.NumFields(); i++ { - f := stype.Field(i) - fields = append(fields, f) - if f == tvar { - // We're done, this is the field we were looking for, - // no need to fill the fields slice further. - offset = pass.TypesSizes.Offsetsof(fields)[i] - break - } - } - if offset&7 == 0 { - return // 64-bit aligned - } - - pass.ReportRangef(arg, "address of non 64-bit aligned field .%s passed to atomic.%s", tvar.Name(), funcName) -} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/bools/bools.go b/vendor/golang.org/x/tools/go/analysis/passes/bools/bools.go deleted file mode 100644 index 564329774..000000000 --- a/vendor/golang.org/x/tools/go/analysis/passes/bools/bools.go +++ /dev/null @@ -1,183 +0,0 @@ -// Copyright 2014 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package bools defines an Analyzer that detects common mistakes -// involving boolean operators. -package bools - -import ( - "go/ast" - "go/token" - "go/types" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/analysis/passes/internal/analysisutil" - "golang.org/x/tools/go/ast/astutil" - "golang.org/x/tools/go/ast/inspector" -) - -const Doc = "check for common mistakes involving boolean operators" - -var Analyzer = &analysis.Analyzer{ - Name: "bools", - Doc: Doc, - URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/bools", - Requires: []*analysis.Analyzer{inspect.Analyzer}, - Run: run, -} - -func run(pass *analysis.Pass) (interface{}, error) { - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - - nodeFilter := []ast.Node{ - (*ast.BinaryExpr)(nil), - } - seen := make(map[*ast.BinaryExpr]bool) - inspect.Preorder(nodeFilter, func(n ast.Node) { - e := n.(*ast.BinaryExpr) - if seen[e] { - // Already processed as a subexpression of an earlier node. - return - } - - var op boolOp - switch e.Op { - case token.LOR: - op = or - case token.LAND: - op = and - default: - return - } - - comm := op.commutativeSets(pass.TypesInfo, e, seen) - for _, exprs := range comm { - op.checkRedundant(pass, exprs) - op.checkSuspect(pass, exprs) - } - }) - return nil, nil -} - -type boolOp struct { - name string - tok token.Token // token corresponding to this operator - badEq token.Token // token corresponding to the equality test that should not be used with this operator -} - -var ( - or = boolOp{"or", token.LOR, token.NEQ} - and = boolOp{"and", token.LAND, token.EQL} -) - -// commutativeSets returns all side effect free sets of -// expressions in e that are connected by op. -// For example, given 'a || b || f() || c || d' with the or op, -// commutativeSets returns {{b, a}, {d, c}}. -// commutativeSets adds any expanded BinaryExprs to seen. -func (op boolOp) commutativeSets(info *types.Info, e *ast.BinaryExpr, seen map[*ast.BinaryExpr]bool) [][]ast.Expr { - exprs := op.split(e, seen) - - // Partition the slice of expressions into commutative sets. - i := 0 - var sets [][]ast.Expr - for j := 0; j <= len(exprs); j++ { - if j == len(exprs) || analysisutil.HasSideEffects(info, exprs[j]) { - if i < j { - sets = append(sets, exprs[i:j]) - } - i = j + 1 - } - } - - return sets -} - -// checkRedundant checks for expressions of the form -// -// e && e -// e || e -// -// Exprs must contain only side effect free expressions. -func (op boolOp) checkRedundant(pass *analysis.Pass, exprs []ast.Expr) { - seen := make(map[string]bool) - for _, e := range exprs { - efmt := analysisutil.Format(pass.Fset, e) - if seen[efmt] { - pass.ReportRangef(e, "redundant %s: %s %s %s", op.name, efmt, op.tok, efmt) - } else { - seen[efmt] = true - } - } -} - -// checkSuspect checks for expressions of the form -// -// x != c1 || x != c2 -// x == c1 && x == c2 -// -// where c1 and c2 are constant expressions. -// If c1 and c2 are the same then it's redundant; -// if c1 and c2 are different then it's always true or always false. -// Exprs must contain only side effect free expressions. -func (op boolOp) checkSuspect(pass *analysis.Pass, exprs []ast.Expr) { - // seen maps from expressions 'x' to equality expressions 'x != c'. - seen := make(map[string]string) - - for _, e := range exprs { - bin, ok := e.(*ast.BinaryExpr) - if !ok || bin.Op != op.badEq { - continue - } - - // In order to avoid false positives, restrict to cases - // in which one of the operands is constant. We're then - // interested in the other operand. - // In the rare case in which both operands are constant - // (e.g. runtime.GOOS and "windows"), we'll only catch - // mistakes if the LHS is repeated, which is how most - // code is written. - var x ast.Expr - switch { - case pass.TypesInfo.Types[bin.Y].Value != nil: - x = bin.X - case pass.TypesInfo.Types[bin.X].Value != nil: - x = bin.Y - default: - continue - } - - // e is of the form 'x != c' or 'x == c'. - xfmt := analysisutil.Format(pass.Fset, x) - efmt := analysisutil.Format(pass.Fset, e) - if prev, found := seen[xfmt]; found { - // checkRedundant handles the case in which efmt == prev. - if efmt != prev { - pass.ReportRangef(e, "suspect %s: %s %s %s", op.name, efmt, op.tok, prev) - } - } else { - seen[xfmt] = efmt - } - } -} - -// split returns a slice of all subexpressions in e that are connected by op. -// For example, given 'a || (b || c) || d' with the or op, -// split returns []{d, c, b, a}. -// seen[e] is already true; any newly processed exprs are added to seen. -func (op boolOp) split(e ast.Expr, seen map[*ast.BinaryExpr]bool) (exprs []ast.Expr) { - for { - e = astutil.Unparen(e) - if b, ok := e.(*ast.BinaryExpr); ok && b.Op == op.tok { - seen[b] = true - exprs = append(exprs, op.split(b.Y, seen)...) - e = b.X - } else { - exprs = append(exprs, e) - break - } - } - return -} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/buildssa/buildssa.go b/vendor/golang.org/x/tools/go/analysis/passes/buildssa/buildssa.go deleted file mode 100644 index f077ea282..000000000 --- a/vendor/golang.org/x/tools/go/analysis/passes/buildssa/buildssa.go +++ /dev/null @@ -1,94 +0,0 @@ -// Copyright 2018 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package buildssa defines an Analyzer that constructs the SSA -// representation of an error-free package and returns the set of all -// functions within it. It does not report any diagnostics itself but -// may be used as an input to other analyzers. -package buildssa - -import ( - "go/ast" - "go/types" - "reflect" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/ssa" -) - -var Analyzer = &analysis.Analyzer{ - Name: "buildssa", - Doc: "build SSA-form IR for later passes", - URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/buildssa", - Run: run, - ResultType: reflect.TypeOf(new(SSA)), -} - -// SSA provides SSA-form intermediate representation for all the -// source functions in the current package. -type SSA struct { - Pkg *ssa.Package - SrcFuncs []*ssa.Function -} - -func run(pass *analysis.Pass) (interface{}, error) { - // We must create a new Program for each Package because the - // analysis API provides no place to hang a Program shared by - // all Packages. Consequently, SSA Packages and Functions do not - // have a canonical representation across an analysis session of - // multiple packages. This is unlikely to be a problem in - // practice because the analysis API essentially forces all - // packages to be analysed independently, so any given call to - // Analysis.Run on a package will see only SSA objects belonging - // to a single Program. - - // Some Analyzers may need GlobalDebug, in which case we'll have - // to set it globally, but let's wait till we need it. - mode := ssa.BuilderMode(0) - - prog := ssa.NewProgram(pass.Fset, mode) - - // Create SSA packages for direct imports. - for _, p := range pass.Pkg.Imports() { - prog.CreatePackage(p, nil, nil, true) - } - - // Create and build the primary package. - ssapkg := prog.CreatePackage(pass.Pkg, pass.Files, pass.TypesInfo, false) - ssapkg.Build() - - // Compute list of source functions, including literals, - // in source order. - var funcs []*ssa.Function - for _, f := range pass.Files { - for _, decl := range f.Decls { - if fdecl, ok := decl.(*ast.FuncDecl); ok { - // (init functions have distinct Func - // objects named "init" and distinct - // ssa.Functions named "init#1", ...) - - fn := pass.TypesInfo.Defs[fdecl.Name].(*types.Func) - if fn == nil { - panic(fn) - } - - f := ssapkg.Prog.FuncValue(fn) - if f == nil { - panic(fn) - } - - var addAnons func(f *ssa.Function) - addAnons = func(f *ssa.Function) { - funcs = append(funcs, f) - for _, anon := range f.AnonFuncs { - addAnons(anon) - } - } - addAnons(f) - } - } - } - - return &SSA{Pkg: ssapkg, SrcFuncs: funcs}, nil -} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/buildtag/buildtag.go b/vendor/golang.org/x/tools/go/analysis/passes/buildtag/buildtag.go deleted file mode 100644 index 55bdad78b..000000000 --- a/vendor/golang.org/x/tools/go/analysis/passes/buildtag/buildtag.go +++ /dev/null @@ -1,368 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.16 -// +build go1.16 - -// Package buildtag defines an Analyzer that checks build tags. -package buildtag - -import ( - "go/ast" - "go/build/constraint" - "go/parser" - "go/token" - "strings" - "unicode" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/internal/analysisutil" -) - -const Doc = "check //go:build and // +build directives" - -var Analyzer = &analysis.Analyzer{ - Name: "buildtag", - Doc: Doc, - URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/buildtag", - Run: runBuildTag, -} - -func runBuildTag(pass *analysis.Pass) (interface{}, error) { - for _, f := range pass.Files { - checkGoFile(pass, f) - } - for _, name := range pass.OtherFiles { - if err := checkOtherFile(pass, name); err != nil { - return nil, err - } - } - for _, name := range pass.IgnoredFiles { - if strings.HasSuffix(name, ".go") { - f, err := parser.ParseFile(pass.Fset, name, nil, parser.ParseComments|parser.SkipObjectResolution) - if err != nil { - // Not valid Go source code - not our job to diagnose, so ignore. - return nil, nil - } - checkGoFile(pass, f) - } else { - if err := checkOtherFile(pass, name); err != nil { - return nil, err - } - } - } - return nil, nil -} - -func checkGoFile(pass *analysis.Pass, f *ast.File) { - var check checker - check.init(pass) - defer check.finish() - - for _, group := range f.Comments { - // A +build comment is ignored after or adjoining the package declaration. - if group.End()+1 >= f.Package { - check.plusBuildOK = false - } - // A //go:build comment is ignored after the package declaration - // (but adjoining it is OK, in contrast to +build comments). - if group.Pos() >= f.Package { - check.goBuildOK = false - } - - // Check each line of a //-comment. - for _, c := range group.List { - // "+build" is ignored within or after a /*...*/ comment. - if !strings.HasPrefix(c.Text, "//") { - check.plusBuildOK = false - } - check.comment(c.Slash, c.Text) - } - } -} - -func checkOtherFile(pass *analysis.Pass, filename string) error { - var check checker - check.init(pass) - defer check.finish() - - // We cannot use the Go parser, since this may not be a Go source file. - // Read the raw bytes instead. - content, tf, err := analysisutil.ReadFile(pass.Fset, filename) - if err != nil { - return err - } - - check.file(token.Pos(tf.Base()), string(content)) - return nil -} - -type checker struct { - pass *analysis.Pass - plusBuildOK bool // "+build" lines still OK - goBuildOK bool // "go:build" lines still OK - crossCheck bool // cross-check go:build and +build lines when done reading file - inStar bool // currently in a /* */ comment - goBuildPos token.Pos // position of first go:build line found - plusBuildPos token.Pos // position of first "+build" line found - goBuild constraint.Expr // go:build constraint found - plusBuild constraint.Expr // AND of +build constraints found -} - -func (check *checker) init(pass *analysis.Pass) { - check.pass = pass - check.goBuildOK = true - check.plusBuildOK = true - check.crossCheck = true -} - -func (check *checker) file(pos token.Pos, text string) { - // Determine cutpoint where +build comments are no longer valid. - // They are valid in leading // comments in the file followed by - // a blank line. - // - // This must be done as a separate pass because of the - // requirement that the comment be followed by a blank line. - var plusBuildCutoff int - fullText := text - for text != "" { - i := strings.Index(text, "\n") - if i < 0 { - i = len(text) - } else { - i++ - } - offset := len(fullText) - len(text) - line := text[:i] - text = text[i:] - line = strings.TrimSpace(line) - if !strings.HasPrefix(line, "//") && line != "" { - break - } - if line == "" { - plusBuildCutoff = offset - } - } - - // Process each line. - // Must stop once we hit goBuildOK == false - text = fullText - check.inStar = false - for text != "" { - i := strings.Index(text, "\n") - if i < 0 { - i = len(text) - } else { - i++ - } - offset := len(fullText) - len(text) - line := text[:i] - text = text[i:] - check.plusBuildOK = offset < plusBuildCutoff - - if strings.HasPrefix(line, "//") { - check.comment(pos+token.Pos(offset), line) - continue - } - - // Keep looking for the point at which //go:build comments - // stop being allowed. Skip over, cut out any /* */ comments. - for { - line = strings.TrimSpace(line) - if check.inStar { - i := strings.Index(line, "*/") - if i < 0 { - line = "" - break - } - line = line[i+len("*/"):] - check.inStar = false - continue - } - if strings.HasPrefix(line, "/*") { - check.inStar = true - line = line[len("/*"):] - continue - } - break - } - if line != "" { - // Found non-comment non-blank line. - // Ends space for valid //go:build comments, - // but also ends the fraction of the file we can - // reliably parse. From this point on we might - // incorrectly flag "comments" inside multiline - // string constants or anything else (this might - // not even be a Go program). So stop. - break - } - } -} - -func (check *checker) comment(pos token.Pos, text string) { - if strings.HasPrefix(text, "//") { - if strings.Contains(text, "+build") { - check.plusBuildLine(pos, text) - } - if strings.Contains(text, "//go:build") { - check.goBuildLine(pos, text) - } - } - if strings.HasPrefix(text, "/*") { - if i := strings.Index(text, "\n"); i >= 0 { - // multiline /* */ comment - process interior lines - check.inStar = true - i++ - pos += token.Pos(i) - text = text[i:] - for text != "" { - i := strings.Index(text, "\n") - if i < 0 { - i = len(text) - } else { - i++ - } - line := text[:i] - if strings.HasPrefix(line, "//") { - check.comment(pos, line) - } - pos += token.Pos(i) - text = text[i:] - } - check.inStar = false - } - } -} - -func (check *checker) goBuildLine(pos token.Pos, line string) { - if !constraint.IsGoBuild(line) { - if !strings.HasPrefix(line, "//go:build") && constraint.IsGoBuild("//"+strings.TrimSpace(line[len("//"):])) { - check.pass.Reportf(pos, "malformed //go:build line (space between // and go:build)") - } - return - } - if !check.goBuildOK || check.inStar { - check.pass.Reportf(pos, "misplaced //go:build comment") - check.crossCheck = false - return - } - - if check.goBuildPos == token.NoPos { - check.goBuildPos = pos - } else { - check.pass.Reportf(pos, "unexpected extra //go:build line") - check.crossCheck = false - } - - // testing hack: stop at // ERROR - if i := strings.Index(line, " // ERROR "); i >= 0 { - line = line[:i] - } - - x, err := constraint.Parse(line) - if err != nil { - check.pass.Reportf(pos, "%v", err) - check.crossCheck = false - return - } - - if check.goBuild == nil { - check.goBuild = x - } -} - -func (check *checker) plusBuildLine(pos token.Pos, line string) { - line = strings.TrimSpace(line) - if !constraint.IsPlusBuild(line) { - // Comment with +build but not at beginning. - // Only report early in file. - if check.plusBuildOK && !strings.HasPrefix(line, "// want") { - check.pass.Reportf(pos, "possible malformed +build comment") - } - return - } - if !check.plusBuildOK { // inStar implies !plusBuildOK - check.pass.Reportf(pos, "misplaced +build comment") - check.crossCheck = false - } - - if check.plusBuildPos == token.NoPos { - check.plusBuildPos = pos - } - - // testing hack: stop at // ERROR - if i := strings.Index(line, " // ERROR "); i >= 0 { - line = line[:i] - } - - fields := strings.Fields(line[len("//"):]) - // IsPlusBuildConstraint check above implies fields[0] == "+build" - for _, arg := range fields[1:] { - for _, elem := range strings.Split(arg, ",") { - if strings.HasPrefix(elem, "!!") { - check.pass.Reportf(pos, "invalid double negative in build constraint: %s", arg) - check.crossCheck = false - continue - } - elem = strings.TrimPrefix(elem, "!") - for _, c := range elem { - if !unicode.IsLetter(c) && !unicode.IsDigit(c) && c != '_' && c != '.' { - check.pass.Reportf(pos, "invalid non-alphanumeric build constraint: %s", arg) - check.crossCheck = false - break - } - } - } - } - - if check.crossCheck { - y, err := constraint.Parse(line) - if err != nil { - // Should never happen - constraint.Parse never rejects a // +build line. - // Also, we just checked the syntax above. - // Even so, report. - check.pass.Reportf(pos, "%v", err) - check.crossCheck = false - return - } - if check.plusBuild == nil { - check.plusBuild = y - } else { - check.plusBuild = &constraint.AndExpr{X: check.plusBuild, Y: y} - } - } -} - -func (check *checker) finish() { - if !check.crossCheck || check.plusBuildPos == token.NoPos || check.goBuildPos == token.NoPos { - return - } - - // Have both //go:build and // +build, - // with no errors found (crossCheck still true). - // Check they match. - var want constraint.Expr - lines, err := constraint.PlusBuildLines(check.goBuild) - if err != nil { - check.pass.Reportf(check.goBuildPos, "%v", err) - return - } - for _, line := range lines { - y, err := constraint.Parse(line) - if err != nil { - // Definitely should not happen, but not the user's fault. - // Do not report. - return - } - if want == nil { - want = y - } else { - want = &constraint.AndExpr{X: want, Y: y} - } - } - if want.String() != check.plusBuild.String() { - check.pass.Reportf(check.plusBuildPos, "+build lines do not match //go:build condition") - return - } -} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/buildtag/buildtag_old.go b/vendor/golang.org/x/tools/go/analysis/passes/buildtag/buildtag_old.go deleted file mode 100644 index 0001ba536..000000000 --- a/vendor/golang.org/x/tools/go/analysis/passes/buildtag/buildtag_old.go +++ /dev/null @@ -1,174 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// TODO(rsc): Delete this file once Go 1.17 comes out and we can retire Go 1.15 support. - -//go:build !go1.16 -// +build !go1.16 - -// Package buildtag defines an Analyzer that checks build tags. -package buildtag - -import ( - "bytes" - "fmt" - "go/ast" - "go/parser" - "strings" - "unicode" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/internal/analysisutil" -) - -const Doc = "check // +build directives" - -var Analyzer = &analysis.Analyzer{ - Name: "buildtag", - Doc: Doc, - Run: runBuildTag, -} - -func runBuildTag(pass *analysis.Pass) (interface{}, error) { - for _, f := range pass.Files { - checkGoFile(pass, f) - } - for _, name := range pass.OtherFiles { - if err := checkOtherFile(pass, name); err != nil { - return nil, err - } - } - for _, name := range pass.IgnoredFiles { - if strings.HasSuffix(name, ".go") { - f, err := parser.ParseFile(pass.Fset, name, nil, parser.ParseComments) - if err != nil { - // Not valid Go source code - not our job to diagnose, so ignore. - return nil, nil - } - checkGoFile(pass, f) - } else { - if err := checkOtherFile(pass, name); err != nil { - return nil, err - } - } - } - return nil, nil -} - -func checkGoFile(pass *analysis.Pass, f *ast.File) { - pastCutoff := false - for _, group := range f.Comments { - // A +build comment is ignored after or adjoining the package declaration. - if group.End()+1 >= f.Package { - pastCutoff = true - } - - // "+build" is ignored within or after a /*...*/ comment. - if !strings.HasPrefix(group.List[0].Text, "//") { - pastCutoff = true - continue - } - - // Check each line of a //-comment. - for _, c := range group.List { - if !strings.Contains(c.Text, "+build") { - continue - } - if err := checkLine(c.Text, pastCutoff); err != nil { - pass.Reportf(c.Pos(), "%s", err) - } - } - } -} - -func checkOtherFile(pass *analysis.Pass, filename string) error { - content, tf, err := analysisutil.ReadFile(pass.Fset, filename) - if err != nil { - return err - } - - // We must look at the raw lines, as build tags may appear in non-Go - // files such as assembly files. - lines := bytes.SplitAfter(content, nl) - - // Determine cutpoint where +build comments are no longer valid. - // They are valid in leading // comments in the file followed by - // a blank line. - // - // This must be done as a separate pass because of the - // requirement that the comment be followed by a blank line. - var cutoff int - for i, line := range lines { - line = bytes.TrimSpace(line) - if !bytes.HasPrefix(line, slashSlash) { - if len(line) > 0 { - break - } - cutoff = i - } - } - - for i, line := range lines { - line = bytes.TrimSpace(line) - if !bytes.HasPrefix(line, slashSlash) { - continue - } - if !bytes.Contains(line, []byte("+build")) { - continue - } - if err := checkLine(string(line), i >= cutoff); err != nil { - pass.Reportf(analysisutil.LineStart(tf, i+1), "%s", err) - continue - } - } - return nil -} - -// checkLine checks a line that starts with "//" and contains "+build". -func checkLine(line string, pastCutoff bool) error { - line = strings.TrimPrefix(line, "//") - line = strings.TrimSpace(line) - - if strings.HasPrefix(line, "+build") { - fields := strings.Fields(line) - if fields[0] != "+build" { - // Comment is something like +buildasdf not +build. - return fmt.Errorf("possible malformed +build comment") - } - if pastCutoff { - return fmt.Errorf("+build comment must appear before package clause and be followed by a blank line") - } - if err := checkArguments(fields); err != nil { - return err - } - } else { - // Comment with +build but not at beginning. - if !pastCutoff { - return fmt.Errorf("possible malformed +build comment") - } - } - return nil -} - -func checkArguments(fields []string) error { - for _, arg := range fields[1:] { - for _, elem := range strings.Split(arg, ",") { - if strings.HasPrefix(elem, "!!") { - return fmt.Errorf("invalid double negative in build constraint: %s", arg) - } - elem = strings.TrimPrefix(elem, "!") - for _, c := range elem { - if !unicode.IsLetter(c) && !unicode.IsDigit(c) && c != '_' && c != '.' { - return fmt.Errorf("invalid non-alphanumeric build constraint: %s", arg) - } - } - } - } - return nil -} - -var ( - nl = []byte("\n") - slashSlash = []byte("//") -) diff --git a/vendor/golang.org/x/tools/go/analysis/passes/cgocall/cgocall.go b/vendor/golang.org/x/tools/go/analysis/passes/cgocall/cgocall.go deleted file mode 100644 index 4e8643975..000000000 --- a/vendor/golang.org/x/tools/go/analysis/passes/cgocall/cgocall.go +++ /dev/null @@ -1,379 +0,0 @@ -// Copyright 2015 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package cgocall defines an Analyzer that detects some violations of -// the cgo pointer passing rules. -package cgocall - -import ( - "fmt" - "go/ast" - "go/format" - "go/parser" - "go/token" - "go/types" - "log" - "os" - "strconv" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/internal/analysisutil" - "golang.org/x/tools/go/ast/astutil" -) - -const debug = false - -const Doc = `detect some violations of the cgo pointer passing rules - -Check for invalid cgo pointer passing. -This looks for code that uses cgo to call C code passing values -whose types are almost always invalid according to the cgo pointer -sharing rules. -Specifically, it warns about attempts to pass a Go chan, map, func, -or slice to C, either directly, or via a pointer, array, or struct.` - -var Analyzer = &analysis.Analyzer{ - Name: "cgocall", - Doc: Doc, - URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/cgocall", - RunDespiteErrors: true, - Run: run, -} - -func run(pass *analysis.Pass) (interface{}, error) { - if !analysisutil.Imports(pass.Pkg, "runtime/cgo") { - return nil, nil // doesn't use cgo - } - - cgofiles, info, err := typeCheckCgoSourceFiles(pass.Fset, pass.Pkg, pass.Files, pass.TypesInfo, pass.TypesSizes) - if err != nil { - return nil, err - } - for _, f := range cgofiles { - checkCgo(pass.Fset, f, info, pass.Reportf) - } - return nil, nil -} - -func checkCgo(fset *token.FileSet, f *ast.File, info *types.Info, reportf func(token.Pos, string, ...interface{})) { - ast.Inspect(f, func(n ast.Node) bool { - call, ok := n.(*ast.CallExpr) - if !ok { - return true - } - - // Is this a C.f() call? - var name string - if sel, ok := astutil.Unparen(call.Fun).(*ast.SelectorExpr); ok { - if id, ok := sel.X.(*ast.Ident); ok && id.Name == "C" { - name = sel.Sel.Name - } - } - if name == "" { - return true // not a call we need to check - } - - // A call to C.CBytes passes a pointer but is always safe. - if name == "CBytes" { - return true - } - - if debug { - log.Printf("%s: call to C.%s", fset.Position(call.Lparen), name) - } - - for _, arg := range call.Args { - if !typeOKForCgoCall(cgoBaseType(info, arg), make(map[types.Type]bool)) { - reportf(arg.Pos(), "possibly passing Go type with embedded pointer to C") - break - } - - // Check for passing the address of a bad type. - if conv, ok := arg.(*ast.CallExpr); ok && len(conv.Args) == 1 && - isUnsafePointer(info, conv.Fun) { - arg = conv.Args[0] - } - if u, ok := arg.(*ast.UnaryExpr); ok && u.Op == token.AND { - if !typeOKForCgoCall(cgoBaseType(info, u.X), make(map[types.Type]bool)) { - reportf(arg.Pos(), "possibly passing Go type with embedded pointer to C") - break - } - } - } - return true - }) -} - -// typeCheckCgoSourceFiles returns type-checked syntax trees for the raw -// cgo files of a package (those that import "C"). Such files are not -// Go, so there may be gaps in type information around C.f references. -// -// This checker was initially written in vet to inspect raw cgo source -// files using partial type information. However, Analyzers in the new -// analysis API are presented with the type-checked, "cooked" Go ASTs -// resulting from cgo-processing files, so we must choose between -// working with the cooked file generated by cgo (which was tried but -// proved fragile) or locating the raw cgo file (e.g. from //line -// directives) and working with that, as we now do. -// -// Specifically, we must type-check the raw cgo source files (or at -// least the subtrees needed for this analyzer) in an environment that -// simulates the rest of the already type-checked package. -// -// For example, for each raw cgo source file in the original package, -// such as this one: -// -// package p -// import "C" -// import "fmt" -// type T int -// const k = 3 -// var x, y = fmt.Println() -// func f() { ... } -// func g() { ... C.malloc(k) ... } -// func (T) f(int) string { ... } -// -// we synthesize a new ast.File, shown below, that dot-imports the -// original "cooked" package using a special name ("·this·"), so that all -// references to package members resolve correctly. (References to -// unexported names cause an "unexported" error, which we ignore.) -// -// To avoid shadowing names imported from the cooked package, -// package-level declarations in the new source file are modified so -// that they do not declare any names. -// (The cgocall analysis is concerned with uses, not declarations.) -// Specifically, type declarations are discarded; -// all names in each var and const declaration are blanked out; -// each method is turned into a regular function by turning -// the receiver into the first parameter; -// and all functions are renamed to "_". -// -// package p -// import . "·this·" // declares T, k, x, y, f, g, T.f -// import "C" -// import "fmt" -// const _ = 3 -// var _, _ = fmt.Println() -// func _() { ... } -// func _() { ... C.malloc(k) ... } -// func _(T, int) string { ... } -// -// In this way, the raw function bodies and const/var initializer -// expressions are preserved but refer to the "cooked" objects imported -// from "·this·", and none of the transformed package-level declarations -// actually declares anything. In the example above, the reference to k -// in the argument of the call to C.malloc resolves to "·this·".k, which -// has an accurate type. -// -// This approach could in principle be generalized to more complex -// analyses on raw cgo files. One could synthesize a "C" package so that -// C.f would resolve to "·this·"._C_func_f, for example. But we have -// limited ourselves here to preserving function bodies and initializer -// expressions since that is all that the cgocall analyzer needs. -func typeCheckCgoSourceFiles(fset *token.FileSet, pkg *types.Package, files []*ast.File, info *types.Info, sizes types.Sizes) ([]*ast.File, *types.Info, error) { - const thispkg = "·this·" - - // Which files are cgo files? - var cgoFiles []*ast.File - importMap := map[string]*types.Package{thispkg: pkg} - for _, raw := range files { - // If f is a cgo-generated file, Position reports - // the original file, honoring //line directives. - filename := fset.Position(raw.Pos()).Filename - f, err := parser.ParseFile(fset, filename, nil, parser.SkipObjectResolution) - if err != nil { - return nil, nil, fmt.Errorf("can't parse raw cgo file: %v", err) - } - found := false - for _, spec := range f.Imports { - if spec.Path.Value == `"C"` { - found = true - break - } - } - if !found { - continue // not a cgo file - } - - // Record the original import map. - for _, spec := range raw.Imports { - path, _ := strconv.Unquote(spec.Path.Value) - importMap[path] = imported(info, spec) - } - - // Add special dot-import declaration: - // import . "·this·" - var decls []ast.Decl - decls = append(decls, &ast.GenDecl{ - Tok: token.IMPORT, - Specs: []ast.Spec{ - &ast.ImportSpec{ - Name: &ast.Ident{Name: "."}, - Path: &ast.BasicLit{ - Kind: token.STRING, - Value: strconv.Quote(thispkg), - }, - }, - }, - }) - - // Transform declarations from the raw cgo file. - for _, decl := range f.Decls { - switch decl := decl.(type) { - case *ast.GenDecl: - switch decl.Tok { - case token.TYPE: - // Discard type declarations. - continue - case token.IMPORT: - // Keep imports. - case token.VAR, token.CONST: - // Blank the declared var/const names. - for _, spec := range decl.Specs { - spec := spec.(*ast.ValueSpec) - for i := range spec.Names { - spec.Names[i].Name = "_" - } - } - } - case *ast.FuncDecl: - // Blank the declared func name. - decl.Name.Name = "_" - - // Turn a method receiver: func (T) f(P) R {...} - // into regular parameter: func _(T, P) R {...} - if decl.Recv != nil { - var params []*ast.Field - params = append(params, decl.Recv.List...) - params = append(params, decl.Type.Params.List...) - decl.Type.Params.List = params - decl.Recv = nil - } - } - decls = append(decls, decl) - } - f.Decls = decls - if debug { - format.Node(os.Stderr, fset, f) // debugging - } - cgoFiles = append(cgoFiles, f) - } - if cgoFiles == nil { - return nil, nil, nil // nothing to do (can't happen?) - } - - // Type-check the synthetic files. - tc := &types.Config{ - FakeImportC: true, - Importer: importerFunc(func(path string) (*types.Package, error) { - return importMap[path], nil - }), - Sizes: sizes, - Error: func(error) {}, // ignore errors (e.g. unused import) - } - setGoVersion(tc, pkg) - - // It's tempting to record the new types in the - // existing pass.TypesInfo, but we don't own it. - altInfo := &types.Info{ - Types: make(map[ast.Expr]types.TypeAndValue), - } - tc.Check(pkg.Path(), fset, cgoFiles, altInfo) - - return cgoFiles, altInfo, nil -} - -// cgoBaseType tries to look through type conversions involving -// unsafe.Pointer to find the real type. It converts: -// -// unsafe.Pointer(x) => x -// *(*unsafe.Pointer)(unsafe.Pointer(&x)) => x -func cgoBaseType(info *types.Info, arg ast.Expr) types.Type { - switch arg := arg.(type) { - case *ast.CallExpr: - if len(arg.Args) == 1 && isUnsafePointer(info, arg.Fun) { - return cgoBaseType(info, arg.Args[0]) - } - case *ast.StarExpr: - call, ok := arg.X.(*ast.CallExpr) - if !ok || len(call.Args) != 1 { - break - } - // Here arg is *f(v). - t := info.Types[call.Fun].Type - if t == nil { - break - } - ptr, ok := t.Underlying().(*types.Pointer) - if !ok { - break - } - // Here arg is *(*p)(v) - elem, ok := ptr.Elem().Underlying().(*types.Basic) - if !ok || elem.Kind() != types.UnsafePointer { - break - } - // Here arg is *(*unsafe.Pointer)(v) - call, ok = call.Args[0].(*ast.CallExpr) - if !ok || len(call.Args) != 1 { - break - } - // Here arg is *(*unsafe.Pointer)(f(v)) - if !isUnsafePointer(info, call.Fun) { - break - } - // Here arg is *(*unsafe.Pointer)(unsafe.Pointer(v)) - u, ok := call.Args[0].(*ast.UnaryExpr) - if !ok || u.Op != token.AND { - break - } - // Here arg is *(*unsafe.Pointer)(unsafe.Pointer(&v)) - return cgoBaseType(info, u.X) - } - - return info.Types[arg].Type -} - -// typeOKForCgoCall reports whether the type of arg is OK to pass to a -// C function using cgo. This is not true for Go types with embedded -// pointers. m is used to avoid infinite recursion on recursive types. -func typeOKForCgoCall(t types.Type, m map[types.Type]bool) bool { - if t == nil || m[t] { - return true - } - m[t] = true - switch t := t.Underlying().(type) { - case *types.Chan, *types.Map, *types.Signature, *types.Slice: - return false - case *types.Pointer: - return typeOKForCgoCall(t.Elem(), m) - case *types.Array: - return typeOKForCgoCall(t.Elem(), m) - case *types.Struct: - for i := 0; i < t.NumFields(); i++ { - if !typeOKForCgoCall(t.Field(i).Type(), m) { - return false - } - } - } - return true -} - -func isUnsafePointer(info *types.Info, e ast.Expr) bool { - t := info.Types[e].Type - return t != nil && t.Underlying() == types.Typ[types.UnsafePointer] -} - -type importerFunc func(path string) (*types.Package, error) - -func (f importerFunc) Import(path string) (*types.Package, error) { return f(path) } - -// TODO(adonovan): make this a library function or method of Info. -func imported(info *types.Info, spec *ast.ImportSpec) *types.Package { - obj, ok := info.Implicits[spec] - if !ok { - obj = info.Defs[spec.Name] // renaming import - } - return obj.(*types.PkgName).Imported() -} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/cgocall/cgocall_go120.go b/vendor/golang.org/x/tools/go/analysis/passes/cgocall/cgocall_go120.go deleted file mode 100644 index 06b54946d..000000000 --- a/vendor/golang.org/x/tools/go/analysis/passes/cgocall/cgocall_go120.go +++ /dev/null @@ -1,13 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build !go1.21 - -package cgocall - -import "go/types" - -func setGoVersion(tc *types.Config, pkg *types.Package) { - // no types.Package.GoVersion until Go 1.21 -} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/cgocall/cgocall_go121.go b/vendor/golang.org/x/tools/go/analysis/passes/cgocall/cgocall_go121.go deleted file mode 100644 index 2a3e1fad2..000000000 --- a/vendor/golang.org/x/tools/go/analysis/passes/cgocall/cgocall_go121.go +++ /dev/null @@ -1,13 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.21 - -package cgocall - -import "go/types" - -func setGoVersion(tc *types.Config, pkg *types.Package) { - tc.GoVersion = pkg.GoVersion() -} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/composite/composite.go b/vendor/golang.org/x/tools/go/analysis/passes/composite/composite.go deleted file mode 100644 index 6b126f897..000000000 --- a/vendor/golang.org/x/tools/go/analysis/passes/composite/composite.go +++ /dev/null @@ -1,174 +0,0 @@ -// Copyright 2012 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package composite defines an Analyzer that checks for unkeyed -// composite literals. -package composite - -import ( - "fmt" - "go/ast" - "go/types" - "strings" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/ast/inspector" - "golang.org/x/tools/internal/aliases" - "golang.org/x/tools/internal/typeparams" -) - -const Doc = `check for unkeyed composite literals - -This analyzer reports a diagnostic for composite literals of struct -types imported from another package that do not use the field-keyed -syntax. Such literals are fragile because the addition of a new field -(even if unexported) to the struct will cause compilation to fail. - -As an example, - - err = &net.DNSConfigError{err} - -should be replaced by: - - err = &net.DNSConfigError{Err: err} -` - -var Analyzer = &analysis.Analyzer{ - Name: "composites", - Doc: Doc, - URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/composite", - Requires: []*analysis.Analyzer{inspect.Analyzer}, - RunDespiteErrors: true, - Run: run, -} - -var whitelist = true - -func init() { - Analyzer.Flags.BoolVar(&whitelist, "whitelist", whitelist, "use composite white list; for testing only") -} - -// runUnkeyedLiteral checks if a composite literal is a struct literal with -// unkeyed fields. -func run(pass *analysis.Pass) (interface{}, error) { - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - - nodeFilter := []ast.Node{ - (*ast.CompositeLit)(nil), - } - inspect.Preorder(nodeFilter, func(n ast.Node) { - cl := n.(*ast.CompositeLit) - - typ := pass.TypesInfo.Types[cl].Type - if typ == nil { - // cannot determine composite literals' type, skip it - return - } - typeName := typ.String() - if whitelist && unkeyedLiteral[typeName] { - // skip whitelisted types - return - } - var structuralTypes []types.Type - switch typ := aliases.Unalias(typ).(type) { - case *types.TypeParam: - terms, err := typeparams.StructuralTerms(typ) - if err != nil { - return // invalid type - } - for _, term := range terms { - structuralTypes = append(structuralTypes, term.Type()) - } - default: - structuralTypes = append(structuralTypes, typ) - } - for _, typ := range structuralTypes { - // TODO(adonovan): this operation is questionable. - under := aliases.Unalias(deref(typ.Underlying())) - strct, ok := under.(*types.Struct) - if !ok { - // skip non-struct composite literals - continue - } - if isLocalType(pass, typ) { - // allow unkeyed locally defined composite literal - continue - } - - // check if the struct contains an unkeyed field - allKeyValue := true - var suggestedFixAvailable = len(cl.Elts) == strct.NumFields() - var missingKeys []analysis.TextEdit - for i, e := range cl.Elts { - if _, ok := e.(*ast.KeyValueExpr); !ok { - allKeyValue = false - if i >= strct.NumFields() { - break - } - field := strct.Field(i) - if !field.Exported() { - // Adding unexported field names for structs not defined - // locally will not work. - suggestedFixAvailable = false - break - } - missingKeys = append(missingKeys, analysis.TextEdit{ - Pos: e.Pos(), - End: e.Pos(), - NewText: []byte(fmt.Sprintf("%s: ", field.Name())), - }) - } - } - if allKeyValue { - // all the struct fields are keyed - continue - } - - diag := analysis.Diagnostic{ - Pos: cl.Pos(), - End: cl.End(), - Message: fmt.Sprintf("%s struct literal uses unkeyed fields", typeName), - } - if suggestedFixAvailable { - diag.SuggestedFixes = []analysis.SuggestedFix{{ - Message: "Add field names to struct literal", - TextEdits: missingKeys, - }} - } - pass.Report(diag) - return - } - }) - return nil, nil -} - -// Note: this is not the usual deref operator! -// It strips off all Pointer constructors (and their Aliases). -func deref(typ types.Type) types.Type { - for { - ptr, ok := aliases.Unalias(typ).(*types.Pointer) - if !ok { - break - } - typ = ptr.Elem().Underlying() - } - return typ -} - -// isLocalType reports whether typ belongs to the same package as pass. -// TODO(adonovan): local means "internal to a function"; rename to isSamePackageType. -func isLocalType(pass *analysis.Pass, typ types.Type) bool { - switch x := aliases.Unalias(typ).(type) { - case *types.Struct: - // struct literals are local types - return true - case *types.Pointer: - return isLocalType(pass, x.Elem()) - case interface{ Obj() *types.TypeName }: // *Named or *TypeParam (aliases were removed already) - // names in package foo are local to foo_test too - return strings.TrimSuffix(x.Obj().Pkg().Path(), "_test") == strings.TrimSuffix(pass.Pkg.Path(), "_test") - } - return false -} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/composite/whitelist.go b/vendor/golang.org/x/tools/go/analysis/passes/composite/whitelist.go deleted file mode 100644 index f84c1871d..000000000 --- a/vendor/golang.org/x/tools/go/analysis/passes/composite/whitelist.go +++ /dev/null @@ -1,35 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package composite - -// unkeyedLiteral is a white list of types in the standard packages -// that are used with unkeyed literals we deem to be acceptable. -var unkeyedLiteral = map[string]bool{ - // These image and image/color struct types are frozen. We will never add fields to them. - "image/color.Alpha16": true, - "image/color.Alpha": true, - "image/color.CMYK": true, - "image/color.Gray16": true, - "image/color.Gray": true, - "image/color.NRGBA64": true, - "image/color.NRGBA": true, - "image/color.NYCbCrA": true, - "image/color.RGBA64": true, - "image/color.RGBA": true, - "image/color.YCbCr": true, - "image.Point": true, - "image.Rectangle": true, - "image.Uniform": true, - - "unicode.Range16": true, - "unicode.Range32": true, - - // These four structs are used in generated test main files, - // but the generator can be trusted. - "testing.InternalBenchmark": true, - "testing.InternalExample": true, - "testing.InternalTest": true, - "testing.InternalFuzzTarget": true, -} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/copylock/copylock.go b/vendor/golang.org/x/tools/go/analysis/passes/copylock/copylock.go deleted file mode 100644 index 8f39159c0..000000000 --- a/vendor/golang.org/x/tools/go/analysis/passes/copylock/copylock.go +++ /dev/null @@ -1,350 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package copylock defines an Analyzer that checks for locks -// erroneously passed by value. -package copylock - -import ( - "bytes" - "fmt" - "go/ast" - "go/token" - "go/types" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/analysis/passes/internal/analysisutil" - "golang.org/x/tools/go/ast/astutil" - "golang.org/x/tools/go/ast/inspector" - "golang.org/x/tools/internal/aliases" - "golang.org/x/tools/internal/typeparams" -) - -const Doc = `check for locks erroneously passed by value - -Inadvertently copying a value containing a lock, such as sync.Mutex or -sync.WaitGroup, may cause both copies to malfunction. Generally such -values should be referred to through a pointer.` - -var Analyzer = &analysis.Analyzer{ - Name: "copylocks", - Doc: Doc, - URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/copylocks", - Requires: []*analysis.Analyzer{inspect.Analyzer}, - RunDespiteErrors: true, - Run: run, -} - -func run(pass *analysis.Pass) (interface{}, error) { - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - - nodeFilter := []ast.Node{ - (*ast.AssignStmt)(nil), - (*ast.CallExpr)(nil), - (*ast.CompositeLit)(nil), - (*ast.FuncDecl)(nil), - (*ast.FuncLit)(nil), - (*ast.GenDecl)(nil), - (*ast.RangeStmt)(nil), - (*ast.ReturnStmt)(nil), - } - inspect.Preorder(nodeFilter, func(node ast.Node) { - switch node := node.(type) { - case *ast.RangeStmt: - checkCopyLocksRange(pass, node) - case *ast.FuncDecl: - checkCopyLocksFunc(pass, node.Name.Name, node.Recv, node.Type) - case *ast.FuncLit: - checkCopyLocksFunc(pass, "func", nil, node.Type) - case *ast.CallExpr: - checkCopyLocksCallExpr(pass, node) - case *ast.AssignStmt: - checkCopyLocksAssign(pass, node) - case *ast.GenDecl: - checkCopyLocksGenDecl(pass, node) - case *ast.CompositeLit: - checkCopyLocksCompositeLit(pass, node) - case *ast.ReturnStmt: - checkCopyLocksReturnStmt(pass, node) - } - }) - return nil, nil -} - -// checkCopyLocksAssign checks whether an assignment -// copies a lock. -func checkCopyLocksAssign(pass *analysis.Pass, as *ast.AssignStmt) { - for i, x := range as.Rhs { - if path := lockPathRhs(pass, x); path != nil { - pass.ReportRangef(x, "assignment copies lock value to %v: %v", analysisutil.Format(pass.Fset, as.Lhs[i]), path) - } - } -} - -// checkCopyLocksGenDecl checks whether lock is copied -// in variable declaration. -func checkCopyLocksGenDecl(pass *analysis.Pass, gd *ast.GenDecl) { - if gd.Tok != token.VAR { - return - } - for _, spec := range gd.Specs { - valueSpec := spec.(*ast.ValueSpec) - for i, x := range valueSpec.Values { - if path := lockPathRhs(pass, x); path != nil { - pass.ReportRangef(x, "variable declaration copies lock value to %v: %v", valueSpec.Names[i].Name, path) - } - } - } -} - -// checkCopyLocksCompositeLit detects lock copy inside a composite literal -func checkCopyLocksCompositeLit(pass *analysis.Pass, cl *ast.CompositeLit) { - for _, x := range cl.Elts { - if node, ok := x.(*ast.KeyValueExpr); ok { - x = node.Value - } - if path := lockPathRhs(pass, x); path != nil { - pass.ReportRangef(x, "literal copies lock value from %v: %v", analysisutil.Format(pass.Fset, x), path) - } - } -} - -// checkCopyLocksReturnStmt detects lock copy in return statement -func checkCopyLocksReturnStmt(pass *analysis.Pass, rs *ast.ReturnStmt) { - for _, x := range rs.Results { - if path := lockPathRhs(pass, x); path != nil { - pass.ReportRangef(x, "return copies lock value: %v", path) - } - } -} - -// checkCopyLocksCallExpr detects lock copy in the arguments to a function call -func checkCopyLocksCallExpr(pass *analysis.Pass, ce *ast.CallExpr) { - var id *ast.Ident - switch fun := ce.Fun.(type) { - case *ast.Ident: - id = fun - case *ast.SelectorExpr: - id = fun.Sel - } - if fun, ok := pass.TypesInfo.Uses[id].(*types.Builtin); ok { - switch fun.Name() { - case "new", "len", "cap", "Sizeof", "Offsetof", "Alignof": - return - } - } - for _, x := range ce.Args { - if path := lockPathRhs(pass, x); path != nil { - pass.ReportRangef(x, "call of %s copies lock value: %v", analysisutil.Format(pass.Fset, ce.Fun), path) - } - } -} - -// checkCopyLocksFunc checks whether a function might -// inadvertently copy a lock, by checking whether -// its receiver, parameters, or return values -// are locks. -func checkCopyLocksFunc(pass *analysis.Pass, name string, recv *ast.FieldList, typ *ast.FuncType) { - if recv != nil && len(recv.List) > 0 { - expr := recv.List[0].Type - if path := lockPath(pass.Pkg, pass.TypesInfo.Types[expr].Type, nil); path != nil { - pass.ReportRangef(expr, "%s passes lock by value: %v", name, path) - } - } - - if typ.Params != nil { - for _, field := range typ.Params.List { - expr := field.Type - if path := lockPath(pass.Pkg, pass.TypesInfo.Types[expr].Type, nil); path != nil { - pass.ReportRangef(expr, "%s passes lock by value: %v", name, path) - } - } - } - - // Don't check typ.Results. If T has a Lock field it's OK to write - // return T{} - // because that is returning the zero value. Leave result checking - // to the return statement. -} - -// checkCopyLocksRange checks whether a range statement -// might inadvertently copy a lock by checking whether -// any of the range variables are locks. -func checkCopyLocksRange(pass *analysis.Pass, r *ast.RangeStmt) { - checkCopyLocksRangeVar(pass, r.Tok, r.Key) - checkCopyLocksRangeVar(pass, r.Tok, r.Value) -} - -func checkCopyLocksRangeVar(pass *analysis.Pass, rtok token.Token, e ast.Expr) { - if e == nil { - return - } - id, isId := e.(*ast.Ident) - if isId && id.Name == "_" { - return - } - - var typ types.Type - if rtok == token.DEFINE { - if !isId { - return - } - obj := pass.TypesInfo.Defs[id] - if obj == nil { - return - } - typ = obj.Type() - } else { - typ = pass.TypesInfo.Types[e].Type - } - - if typ == nil { - return - } - if path := lockPath(pass.Pkg, typ, nil); path != nil { - pass.Reportf(e.Pos(), "range var %s copies lock: %v", analysisutil.Format(pass.Fset, e), path) - } -} - -type typePath []string - -// String pretty-prints a typePath. -func (path typePath) String() string { - n := len(path) - var buf bytes.Buffer - for i := range path { - if i > 0 { - fmt.Fprint(&buf, " contains ") - } - // The human-readable path is in reverse order, outermost to innermost. - fmt.Fprint(&buf, path[n-i-1]) - } - return buf.String() -} - -func lockPathRhs(pass *analysis.Pass, x ast.Expr) typePath { - x = astutil.Unparen(x) // ignore parens on rhs - - if _, ok := x.(*ast.CompositeLit); ok { - return nil - } - if _, ok := x.(*ast.CallExpr); ok { - // A call may return a zero value. - return nil - } - if star, ok := x.(*ast.StarExpr); ok { - if _, ok := astutil.Unparen(star.X).(*ast.CallExpr); ok { - // A call may return a pointer to a zero value. - return nil - } - } - return lockPath(pass.Pkg, pass.TypesInfo.Types[x].Type, nil) -} - -// lockPath returns a typePath describing the location of a lock value -// contained in typ. If there is no contained lock, it returns nil. -// -// The seen map is used to short-circuit infinite recursion due to type cycles. -func lockPath(tpkg *types.Package, typ types.Type, seen map[types.Type]bool) typePath { - if typ == nil || seen[typ] { - return nil - } - if seen == nil { - seen = make(map[types.Type]bool) - } - seen[typ] = true - - if tpar, ok := aliases.Unalias(typ).(*types.TypeParam); ok { - terms, err := typeparams.StructuralTerms(tpar) - if err != nil { - return nil // invalid type - } - for _, term := range terms { - subpath := lockPath(tpkg, term.Type(), seen) - if len(subpath) > 0 { - if term.Tilde() { - // Prepend a tilde to our lock path entry to clarify the resulting - // diagnostic message. Consider the following example: - // - // func _[Mutex interface{ ~sync.Mutex; M() }](m Mutex) {} - // - // Here the naive error message will be something like "passes lock - // by value: Mutex contains sync.Mutex". This is misleading because - // the local type parameter doesn't actually contain sync.Mutex, - // which lacks the M method. - // - // With tilde, it is clearer that the containment is via an - // approximation element. - subpath[len(subpath)-1] = "~" + subpath[len(subpath)-1] - } - return append(subpath, typ.String()) - } - } - return nil - } - - for { - atyp, ok := typ.Underlying().(*types.Array) - if !ok { - break - } - typ = atyp.Elem() - } - - ttyp, ok := typ.Underlying().(*types.Tuple) - if ok { - for i := 0; i < ttyp.Len(); i++ { - subpath := lockPath(tpkg, ttyp.At(i).Type(), seen) - if subpath != nil { - return append(subpath, typ.String()) - } - } - return nil - } - - // We're only interested in the case in which the underlying - // type is a struct. (Interfaces and pointers are safe to copy.) - styp, ok := typ.Underlying().(*types.Struct) - if !ok { - return nil - } - - // We're looking for cases in which a pointer to this type - // is a sync.Locker, but a value is not. This differentiates - // embedded interfaces from embedded values. - if types.Implements(types.NewPointer(typ), lockerType) && !types.Implements(typ, lockerType) { - return []string{typ.String()} - } - - // In go1.10, sync.noCopy did not implement Locker. - // (The Unlock method was added only in CL 121876.) - // TODO(adonovan): remove workaround when we drop go1.10. - if analysisutil.IsNamedType(typ, "sync", "noCopy") { - return []string{typ.String()} - } - - nfields := styp.NumFields() - for i := 0; i < nfields; i++ { - ftyp := styp.Field(i).Type() - subpath := lockPath(tpkg, ftyp, seen) - if subpath != nil { - return append(subpath, typ.String()) - } - } - - return nil -} - -var lockerType *types.Interface - -// Construct a sync.Locker interface type. -func init() { - nullary := types.NewSignature(nil, nil, nil, false) // func() - methods := []*types.Func{ - types.NewFunc(token.NoPos, nil, "Lock", nullary), - types.NewFunc(token.NoPos, nil, "Unlock", nullary), - } - lockerType = types.NewInterface(methods, nil).Complete() -} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/ctrlflow/ctrlflow.go b/vendor/golang.org/x/tools/go/analysis/passes/ctrlflow/ctrlflow.go deleted file mode 100644 index d21adeee9..000000000 --- a/vendor/golang.org/x/tools/go/analysis/passes/ctrlflow/ctrlflow.go +++ /dev/null @@ -1,231 +0,0 @@ -// Copyright 2018 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package ctrlflow is an analysis that provides a syntactic -// control-flow graph (CFG) for the body of a function. -// It records whether a function cannot return. -// By itself, it does not report any diagnostics. -package ctrlflow - -import ( - "go/ast" - "go/types" - "log" - "reflect" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/ast/inspector" - "golang.org/x/tools/go/cfg" - "golang.org/x/tools/go/types/typeutil" -) - -var Analyzer = &analysis.Analyzer{ - Name: "ctrlflow", - Doc: "build a control-flow graph", - URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/ctrlflow", - Run: run, - ResultType: reflect.TypeOf(new(CFGs)), - FactTypes: []analysis.Fact{new(noReturn)}, - Requires: []*analysis.Analyzer{inspect.Analyzer}, -} - -// noReturn is a fact indicating that a function does not return. -type noReturn struct{} - -func (*noReturn) AFact() {} - -func (*noReturn) String() string { return "noReturn" } - -// A CFGs holds the control-flow graphs -// for all the functions of the current package. -type CFGs struct { - defs map[*ast.Ident]types.Object // from Pass.TypesInfo.Defs - funcDecls map[*types.Func]*declInfo - funcLits map[*ast.FuncLit]*litInfo - pass *analysis.Pass // transient; nil after construction -} - -// CFGs has two maps: funcDecls for named functions and funcLits for -// unnamed ones. Unlike funcLits, the funcDecls map is not keyed by its -// syntax node, *ast.FuncDecl, because callMayReturn needs to do a -// look-up by *types.Func, and you can get from an *ast.FuncDecl to a -// *types.Func but not the other way. - -type declInfo struct { - decl *ast.FuncDecl - cfg *cfg.CFG // iff decl.Body != nil - started bool // to break cycles - noReturn bool -} - -type litInfo struct { - cfg *cfg.CFG - noReturn bool -} - -// FuncDecl returns the control-flow graph for a named function. -// It returns nil if decl.Body==nil. -func (c *CFGs) FuncDecl(decl *ast.FuncDecl) *cfg.CFG { - if decl.Body == nil { - return nil - } - fn := c.defs[decl.Name].(*types.Func) - return c.funcDecls[fn].cfg -} - -// FuncLit returns the control-flow graph for a literal function. -func (c *CFGs) FuncLit(lit *ast.FuncLit) *cfg.CFG { - return c.funcLits[lit].cfg -} - -func run(pass *analysis.Pass) (interface{}, error) { - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - - // Because CFG construction consumes and produces noReturn - // facts, CFGs for exported FuncDecls must be built before 'run' - // returns; we cannot construct them lazily. - // (We could build CFGs for FuncLits lazily, - // but the benefit is marginal.) - - // Pass 1. Map types.Funcs to ast.FuncDecls in this package. - funcDecls := make(map[*types.Func]*declInfo) // functions and methods - funcLits := make(map[*ast.FuncLit]*litInfo) - - var decls []*types.Func // keys(funcDecls), in order - var lits []*ast.FuncLit // keys(funcLits), in order - - nodeFilter := []ast.Node{ - (*ast.FuncDecl)(nil), - (*ast.FuncLit)(nil), - } - inspect.Preorder(nodeFilter, func(n ast.Node) { - switch n := n.(type) { - case *ast.FuncDecl: - // Type information may be incomplete. - if fn, ok := pass.TypesInfo.Defs[n.Name].(*types.Func); ok { - funcDecls[fn] = &declInfo{decl: n} - decls = append(decls, fn) - } - case *ast.FuncLit: - funcLits[n] = new(litInfo) - lits = append(lits, n) - } - }) - - c := &CFGs{ - defs: pass.TypesInfo.Defs, - funcDecls: funcDecls, - funcLits: funcLits, - pass: pass, - } - - // Pass 2. Build CFGs. - - // Build CFGs for named functions. - // Cycles in the static call graph are broken - // arbitrarily but deterministically. - // We create noReturn facts as discovered. - for _, fn := range decls { - c.buildDecl(fn, funcDecls[fn]) - } - - // Build CFGs for literal functions. - // These aren't relevant to facts (since they aren't named) - // but are required for the CFGs.FuncLit API. - for _, lit := range lits { - li := funcLits[lit] - if li.cfg == nil { - li.cfg = cfg.New(lit.Body, c.callMayReturn) - if !hasReachableReturn(li.cfg) { - li.noReturn = true - } - } - } - - // All CFGs are now built. - c.pass = nil - - return c, nil -} - -// di.cfg may be nil on return. -func (c *CFGs) buildDecl(fn *types.Func, di *declInfo) { - // buildDecl may call itself recursively for the same function, - // because cfg.New is passed the callMayReturn method, which - // builds the CFG of the callee, leading to recursion. - // The buildDecl call tree thus resembles the static call graph. - // We mark each node when we start working on it to break cycles. - - if !di.started { // break cycle - di.started = true - - if isIntrinsicNoReturn(fn) { - di.noReturn = true - } - if di.decl.Body != nil { - di.cfg = cfg.New(di.decl.Body, c.callMayReturn) - if !hasReachableReturn(di.cfg) { - di.noReturn = true - } - } - if di.noReturn { - c.pass.ExportObjectFact(fn, new(noReturn)) - } - - // debugging - if false { - log.Printf("CFG for %s:\n%s (noreturn=%t)\n", fn, di.cfg.Format(c.pass.Fset), di.noReturn) - } - } -} - -// callMayReturn reports whether the called function may return. -// It is passed to the CFG builder. -func (c *CFGs) callMayReturn(call *ast.CallExpr) (r bool) { - if id, ok := call.Fun.(*ast.Ident); ok && c.pass.TypesInfo.Uses[id] == panicBuiltin { - return false // panic never returns - } - - // Is this a static call? Also includes static functions - // parameterized by a type. Such functions may or may not - // return depending on the parameter type, but in some - // cases the answer is definite. We let ctrlflow figure - // that out. - fn := typeutil.StaticCallee(c.pass.TypesInfo, call) - if fn == nil { - return true // callee not statically known; be conservative - } - - // Function or method declared in this package? - if di, ok := c.funcDecls[fn]; ok { - c.buildDecl(fn, di) - return !di.noReturn - } - - // Not declared in this package. - // Is there a fact from another package? - return !c.pass.ImportObjectFact(fn, new(noReturn)) -} - -var panicBuiltin = types.Universe.Lookup("panic").(*types.Builtin) - -func hasReachableReturn(g *cfg.CFG) bool { - for _, b := range g.Blocks { - if b.Live && b.Return() != nil { - return true - } - } - return false -} - -// isIntrinsicNoReturn reports whether a function intrinsically never -// returns because it stops execution of the calling thread. -// It is the base case in the recursion. -func isIntrinsicNoReturn(fn *types.Func) bool { - // Add functions here as the need arises, but don't allocate memory. - path, name := fn.Pkg().Path(), fn.Name() - return path == "syscall" && (name == "Exit" || name == "ExitProcess" || name == "ExitThread") || - path == "runtime" && name == "Goexit" -} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/deepequalerrors/deepequalerrors.go b/vendor/golang.org/x/tools/go/analysis/passes/deepequalerrors/deepequalerrors.go deleted file mode 100644 index 95cd9a061..000000000 --- a/vendor/golang.org/x/tools/go/analysis/passes/deepequalerrors/deepequalerrors.go +++ /dev/null @@ -1,119 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package deepequalerrors defines an Analyzer that checks for the use -// of reflect.DeepEqual with error values. -package deepequalerrors - -import ( - "go/ast" - "go/types" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/analysis/passes/internal/analysisutil" - "golang.org/x/tools/go/ast/inspector" - "golang.org/x/tools/go/types/typeutil" - "golang.org/x/tools/internal/aliases" -) - -const Doc = `check for calls of reflect.DeepEqual on error values - -The deepequalerrors checker looks for calls of the form: - - reflect.DeepEqual(err1, err2) - -where err1 and err2 are errors. Using reflect.DeepEqual to compare -errors is discouraged.` - -var Analyzer = &analysis.Analyzer{ - Name: "deepequalerrors", - Doc: Doc, - URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/deepequalerrors", - Requires: []*analysis.Analyzer{inspect.Analyzer}, - Run: run, -} - -func run(pass *analysis.Pass) (interface{}, error) { - if !analysisutil.Imports(pass.Pkg, "reflect") { - return nil, nil // doesn't directly import reflect - } - - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - - nodeFilter := []ast.Node{ - (*ast.CallExpr)(nil), - } - inspect.Preorder(nodeFilter, func(n ast.Node) { - call := n.(*ast.CallExpr) - fn, _ := typeutil.Callee(pass.TypesInfo, call).(*types.Func) - if analysisutil.IsFunctionNamed(fn, "reflect", "DeepEqual") && hasError(pass, call.Args[0]) && hasError(pass, call.Args[1]) { - pass.ReportRangef(call, "avoid using reflect.DeepEqual with errors") - } - }) - return nil, nil -} - -var errorType = types.Universe.Lookup("error").Type().Underlying().(*types.Interface) - -// hasError reports whether the type of e contains the type error. -// See containsError, below, for the meaning of "contains". -func hasError(pass *analysis.Pass, e ast.Expr) bool { - tv, ok := pass.TypesInfo.Types[e] - if !ok { // no type info, assume good - return false - } - return containsError(tv.Type) -} - -// Report whether any type that typ could store and that could be compared is the -// error type. This includes typ itself, as well as the types of struct field, slice -// and array elements, map keys and elements, and pointers. It does not include -// channel types (incomparable), arg and result types of a Signature (not stored), or -// methods of a named or interface type (not stored). -func containsError(typ types.Type) bool { - // Track types being processed, to avoid infinite recursion. - // Using types as keys here is OK because we are checking for the identical pointer, not - // type identity. See analysis/passes/printf/types.go. - inProgress := make(map[types.Type]bool) - - var check func(t types.Type) bool - check = func(t types.Type) bool { - if t == errorType { - return true - } - if inProgress[t] { - return false - } - inProgress[t] = true - switch t := t.(type) { - case *types.Pointer: - return check(t.Elem()) - case *types.Slice: - return check(t.Elem()) - case *types.Array: - return check(t.Elem()) - case *types.Map: - return check(t.Key()) || check(t.Elem()) - case *types.Struct: - for i := 0; i < t.NumFields(); i++ { - if check(t.Field(i).Type()) { - return true - } - } - case *types.Named, *aliases.Alias: - return check(t.Underlying()) - - // We list the remaining valid type kinds for completeness. - case *types.Basic: - case *types.Chan: // channels store values, but they are not comparable - case *types.Signature: - case *types.Tuple: // tuples are only part of signatures - case *types.Interface: - } - return false - } - - return check(typ) -} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/defers/defers.go b/vendor/golang.org/x/tools/go/analysis/passes/defers/defers.go deleted file mode 100644 index 5e8e80a6a..000000000 --- a/vendor/golang.org/x/tools/go/analysis/passes/defers/defers.go +++ /dev/null @@ -1,59 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package defers - -import ( - _ "embed" - "go/ast" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/analysis/passes/internal/analysisutil" - "golang.org/x/tools/go/ast/inspector" - "golang.org/x/tools/go/types/typeutil" -) - -//go:embed doc.go -var doc string - -// Analyzer is the defers analyzer. -var Analyzer = &analysis.Analyzer{ - Name: "defers", - Requires: []*analysis.Analyzer{inspect.Analyzer}, - URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/defers", - Doc: analysisutil.MustExtractDoc(doc, "defers"), - Run: run, -} - -func run(pass *analysis.Pass) (interface{}, error) { - if !analysisutil.Imports(pass.Pkg, "time") { - return nil, nil - } - - checkDeferCall := func(node ast.Node) bool { - switch v := node.(type) { - case *ast.CallExpr: - if analysisutil.IsFunctionNamed(typeutil.StaticCallee(pass.TypesInfo, v), "time", "Since") { - pass.Reportf(v.Pos(), "call to time.Since is not deferred") - } - case *ast.FuncLit: - return false // prune - } - return true - } - - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - - nodeFilter := []ast.Node{ - (*ast.DeferStmt)(nil), - } - - inspect.Preorder(nodeFilter, func(n ast.Node) { - d := n.(*ast.DeferStmt) - ast.Inspect(d.Call, checkDeferCall) - }) - - return nil, nil -} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/defers/doc.go b/vendor/golang.org/x/tools/go/analysis/passes/defers/doc.go deleted file mode 100644 index bdb135162..000000000 --- a/vendor/golang.org/x/tools/go/analysis/passes/defers/doc.go +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package defers defines an Analyzer that checks for common mistakes in defer -// statements. -// -// # Analyzer defers -// -// defers: report common mistakes in defer statements -// -// The defers analyzer reports a diagnostic when a defer statement would -// result in a non-deferred call to time.Since, as experience has shown -// that this is nearly always a mistake. -// -// For example: -// -// start := time.Now() -// ... -// defer recordLatency(time.Since(start)) // error: call to time.Since is not deferred -// -// The correct code is: -// -// defer func() { recordLatency(time.Since(start)) }() -package defers diff --git a/vendor/golang.org/x/tools/go/analysis/passes/directive/directive.go b/vendor/golang.org/x/tools/go/analysis/passes/directive/directive.go deleted file mode 100644 index 2691f189a..000000000 --- a/vendor/golang.org/x/tools/go/analysis/passes/directive/directive.go +++ /dev/null @@ -1,209 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package directive defines an Analyzer that checks known Go toolchain directives. -package directive - -import ( - "go/ast" - "go/parser" - "go/token" - "strings" - "unicode" - "unicode/utf8" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/internal/analysisutil" -) - -const Doc = `check Go toolchain directives such as //go:debug - -This analyzer checks for problems with known Go toolchain directives -in all Go source files in a package directory, even those excluded by -//go:build constraints, and all non-Go source files too. - -For //go:debug (see https://go.dev/doc/godebug), the analyzer checks -that the directives are placed only in Go source files, only above the -package comment, and only in package main or *_test.go files. - -Support for other known directives may be added in the future. - -This analyzer does not check //go:build, which is handled by the -buildtag analyzer. -` - -var Analyzer = &analysis.Analyzer{ - Name: "directive", - Doc: Doc, - URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/directive", - Run: runDirective, -} - -func runDirective(pass *analysis.Pass) (interface{}, error) { - for _, f := range pass.Files { - checkGoFile(pass, f) - } - for _, name := range pass.OtherFiles { - if err := checkOtherFile(pass, name); err != nil { - return nil, err - } - } - for _, name := range pass.IgnoredFiles { - if strings.HasSuffix(name, ".go") { - f, err := parser.ParseFile(pass.Fset, name, nil, parser.ParseComments) - if err != nil { - // Not valid Go source code - not our job to diagnose, so ignore. - continue - } - checkGoFile(pass, f) - } else { - if err := checkOtherFile(pass, name); err != nil { - return nil, err - } - } - } - return nil, nil -} - -func checkGoFile(pass *analysis.Pass, f *ast.File) { - check := newChecker(pass, pass.Fset.File(f.Package).Name(), f) - - for _, group := range f.Comments { - // A +build comment is ignored after or adjoining the package declaration. - if group.End()+1 >= f.Package { - check.inHeader = false - } - // A //go:build comment is ignored after the package declaration - // (but adjoining it is OK, in contrast to +build comments). - if group.Pos() >= f.Package { - check.inHeader = false - } - - // Check each line of a //-comment. - for _, c := range group.List { - check.comment(c.Slash, c.Text) - } - } -} - -func checkOtherFile(pass *analysis.Pass, filename string) error { - // We cannot use the Go parser, since is not a Go source file. - // Read the raw bytes instead. - content, tf, err := analysisutil.ReadFile(pass.Fset, filename) - if err != nil { - return err - } - - check := newChecker(pass, filename, nil) - check.nonGoFile(token.Pos(tf.Base()), string(content)) - return nil -} - -type checker struct { - pass *analysis.Pass - filename string - file *ast.File // nil for non-Go file - inHeader bool // in file header (before package declaration) - inStar bool // currently in a /* */ comment -} - -func newChecker(pass *analysis.Pass, filename string, file *ast.File) *checker { - return &checker{ - pass: pass, - filename: filename, - file: file, - inHeader: true, - } -} - -func (check *checker) nonGoFile(pos token.Pos, fullText string) { - // Process each line. - text := fullText - inStar := false - for text != "" { - offset := len(fullText) - len(text) - var line string - line, text, _ = strings.Cut(text, "\n") - - if !inStar && strings.HasPrefix(line, "//") { - check.comment(pos+token.Pos(offset), line) - continue - } - - // Skip over, cut out any /* */ comments, - // to avoid being confused by a commented-out // comment. - for { - line = strings.TrimSpace(line) - if inStar { - var ok bool - _, line, ok = strings.Cut(line, "*/") - if !ok { - break - } - inStar = false - continue - } - line, inStar = stringsCutPrefix(line, "/*") - if !inStar { - break - } - } - if line != "" { - // Found non-comment non-blank line. - // Ends space for valid //go:build comments, - // but also ends the fraction of the file we can - // reliably parse. From this point on we might - // incorrectly flag "comments" inside multiline - // string constants or anything else (this might - // not even be a Go program). So stop. - break - } - } -} - -func (check *checker) comment(pos token.Pos, line string) { - if !strings.HasPrefix(line, "//go:") { - return - } - // testing hack: stop at // ERROR - if i := strings.Index(line, " // ERROR "); i >= 0 { - line = line[:i] - } - - verb := line - if i := strings.IndexFunc(verb, unicode.IsSpace); i >= 0 { - verb = verb[:i] - if line[i] != ' ' && line[i] != '\t' && line[i] != '\n' { - r, _ := utf8.DecodeRuneInString(line[i:]) - check.pass.Reportf(pos, "invalid space %#q in %s directive", r, verb) - } - } - - switch verb { - default: - // TODO: Use the go language version for the file. - // If that version is not newer than us, then we can - // report unknown directives. - - case "//go:build": - // Ignore. The buildtag analyzer reports misplaced comments. - - case "//go:debug": - if check.file == nil { - check.pass.Reportf(pos, "//go:debug directive only valid in Go source files") - } else if check.file.Name.Name != "main" && !strings.HasSuffix(check.filename, "_test.go") { - check.pass.Reportf(pos, "//go:debug directive only valid in package main or test") - } else if !check.inHeader { - check.pass.Reportf(pos, "//go:debug directive only valid before package declaration") - } - } -} - -// Go 1.20 strings.CutPrefix. -func stringsCutPrefix(s, prefix string) (after string, found bool) { - if !strings.HasPrefix(s, prefix) { - return s, false - } - return s[len(prefix):], true -} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/errorsas/errorsas.go b/vendor/golang.org/x/tools/go/analysis/passes/errorsas/errorsas.go deleted file mode 100644 index 7f62ad4c8..000000000 --- a/vendor/golang.org/x/tools/go/analysis/passes/errorsas/errorsas.go +++ /dev/null @@ -1,89 +0,0 @@ -// Copyright 2018 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// The errorsas package defines an Analyzer that checks that the second argument to -// errors.As is a pointer to a type implementing error. -package errorsas - -import ( - "errors" - "go/ast" - "go/types" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/analysis/passes/internal/analysisutil" - "golang.org/x/tools/go/ast/inspector" - "golang.org/x/tools/go/types/typeutil" -) - -const Doc = `report passing non-pointer or non-error values to errors.As - -The errorsas analysis reports calls to errors.As where the type -of the second argument is not a pointer to a type implementing error.` - -var Analyzer = &analysis.Analyzer{ - Name: "errorsas", - Doc: Doc, - URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/errorsas", - Requires: []*analysis.Analyzer{inspect.Analyzer}, - Run: run, -} - -func run(pass *analysis.Pass) (interface{}, error) { - switch pass.Pkg.Path() { - case "errors", "errors_test": - // These packages know how to use their own APIs. - // Sometimes they are testing what happens to incorrect programs. - return nil, nil - } - - if !analysisutil.Imports(pass.Pkg, "errors") { - return nil, nil // doesn't directly import errors - } - - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - - nodeFilter := []ast.Node{ - (*ast.CallExpr)(nil), - } - inspect.Preorder(nodeFilter, func(n ast.Node) { - call := n.(*ast.CallExpr) - fn := typeutil.StaticCallee(pass.TypesInfo, call) - if !analysisutil.IsFunctionNamed(fn, "errors", "As") { - return - } - if len(call.Args) < 2 { - return // not enough arguments, e.g. called with return values of another function - } - if err := checkAsTarget(pass, call.Args[1]); err != nil { - pass.ReportRangef(call, "%v", err) - } - }) - return nil, nil -} - -var errorType = types.Universe.Lookup("error").Type() - -// checkAsTarget reports an error if the second argument to errors.As is invalid. -func checkAsTarget(pass *analysis.Pass, e ast.Expr) error { - t := pass.TypesInfo.Types[e].Type - if it, ok := t.Underlying().(*types.Interface); ok && it.NumMethods() == 0 { - // A target of interface{} is always allowed, since it often indicates - // a value forwarded from another source. - return nil - } - pt, ok := t.Underlying().(*types.Pointer) - if !ok { - return errors.New("second argument to errors.As must be a non-nil pointer to either a type that implements error, or to any interface type") - } - if pt.Elem() == errorType { - return errors.New("second argument to errors.As should not be *error") - } - _, ok = pt.Elem().Underlying().(*types.Interface) - if ok || types.Implements(pt.Elem(), errorType.Underlying().(*types.Interface)) { - return nil - } - return errors.New("second argument to errors.As must be a non-nil pointer to either a type that implements error, or to any interface type") -} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/fieldalignment/fieldalignment.go b/vendor/golang.org/x/tools/go/analysis/passes/fieldalignment/fieldalignment.go deleted file mode 100644 index 012e2ecd0..000000000 --- a/vendor/golang.org/x/tools/go/analysis/passes/fieldalignment/fieldalignment.go +++ /dev/null @@ -1,374 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package fieldalignment defines an Analyzer that detects structs that would use less -// memory if their fields were sorted. -package fieldalignment - -import ( - "bytes" - "fmt" - "go/ast" - "go/format" - "go/token" - "go/types" - "sort" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/ast/inspector" -) - -const Doc = `find structs that would use less memory if their fields were sorted - -This analyzer find structs that can be rearranged to use less memory, and provides -a suggested edit with the most compact order. - -Note that there are two different diagnostics reported. One checks struct size, -and the other reports "pointer bytes" used. Pointer bytes is how many bytes of the -object that the garbage collector has to potentially scan for pointers, for example: - - struct { uint32; string } - -have 16 pointer bytes because the garbage collector has to scan up through the string's -inner pointer. - - struct { string; *uint32 } - -has 24 pointer bytes because it has to scan further through the *uint32. - - struct { string; uint32 } - -has 8 because it can stop immediately after the string pointer. - -Be aware that the most compact order is not always the most efficient. -In rare cases it may cause two variables each updated by its own goroutine -to occupy the same CPU cache line, inducing a form of memory contention -known as "false sharing" that slows down both goroutines. -` - -var Analyzer = &analysis.Analyzer{ - Name: "fieldalignment", - Doc: Doc, - URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/fieldalignment", - Requires: []*analysis.Analyzer{inspect.Analyzer}, - Run: run, -} - -func run(pass *analysis.Pass) (interface{}, error) { - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - nodeFilter := []ast.Node{ - (*ast.StructType)(nil), - } - inspect.Preorder(nodeFilter, func(node ast.Node) { - var s *ast.StructType - var ok bool - if s, ok = node.(*ast.StructType); !ok { - return - } - if tv, ok := pass.TypesInfo.Types[s]; ok { - fieldalignment(pass, s, tv.Type.(*types.Struct)) - } - }) - return nil, nil -} - -var unsafePointerTyp = types.Unsafe.Scope().Lookup("Pointer").(*types.TypeName).Type() - -func fieldalignment(pass *analysis.Pass, node *ast.StructType, typ *types.Struct) { - wordSize := pass.TypesSizes.Sizeof(unsafePointerTyp) - maxAlign := pass.TypesSizes.Alignof(unsafePointerTyp) - - s := gcSizes{wordSize, maxAlign} - optimal, indexes := optimalOrder(typ, &s) - optsz, optptrs := s.Sizeof(optimal), s.ptrdata(optimal) - - var message string - if sz := s.Sizeof(typ); sz != optsz { - message = fmt.Sprintf("struct of size %d could be %d", sz, optsz) - } else if ptrs := s.ptrdata(typ); ptrs != optptrs { - message = fmt.Sprintf("struct with %d pointer bytes could be %d", ptrs, optptrs) - } else { - // Already optimal order. - return - } - - // Flatten the ast node since it could have multiple field names per list item while - // *types.Struct only have one item per field. - // TODO: Preserve multi-named fields instead of flattening. - var flat []*ast.Field - for _, f := range node.Fields.List { - // TODO: Preserve comment, for now get rid of them. - // See https://github.com/golang/go/issues/20744 - f.Comment = nil - f.Doc = nil - if len(f.Names) <= 1 { - flat = append(flat, f) - continue - } - for _, name := range f.Names { - flat = append(flat, &ast.Field{ - Names: []*ast.Ident{name}, - Type: f.Type, - }) - } - } - - // Sort fields according to the optimal order. - var reordered []*ast.Field - for _, index := range indexes { - reordered = append(reordered, flat[index]) - } - - newStr := &ast.StructType{ - Fields: &ast.FieldList{ - List: reordered, - }, - } - - // Write the newly aligned struct node to get the content for suggested fixes. - var buf bytes.Buffer - if err := format.Node(&buf, token.NewFileSet(), newStr); err != nil { - return - } - - pass.Report(analysis.Diagnostic{ - Pos: node.Pos(), - End: node.Pos() + token.Pos(len("struct")), - Message: message, - SuggestedFixes: []analysis.SuggestedFix{{ - Message: "Rearrange fields", - TextEdits: []analysis.TextEdit{{ - Pos: node.Pos(), - End: node.End(), - NewText: buf.Bytes(), - }}, - }}, - }) -} - -func optimalOrder(str *types.Struct, sizes *gcSizes) (*types.Struct, []int) { - nf := str.NumFields() - - type elem struct { - index int - alignof int64 - sizeof int64 - ptrdata int64 - } - - elems := make([]elem, nf) - for i := 0; i < nf; i++ { - field := str.Field(i) - ft := field.Type() - elems[i] = elem{ - i, - sizes.Alignof(ft), - sizes.Sizeof(ft), - sizes.ptrdata(ft), - } - } - - sort.Slice(elems, func(i, j int) bool { - ei := &elems[i] - ej := &elems[j] - - // Place zero sized objects before non-zero sized objects. - zeroi := ei.sizeof == 0 - zeroj := ej.sizeof == 0 - if zeroi != zeroj { - return zeroi - } - - // Next, place more tightly aligned objects before less tightly aligned objects. - if ei.alignof != ej.alignof { - return ei.alignof > ej.alignof - } - - // Place pointerful objects before pointer-free objects. - noptrsi := ei.ptrdata == 0 - noptrsj := ej.ptrdata == 0 - if noptrsi != noptrsj { - return noptrsj - } - - if !noptrsi { - // If both have pointers... - - // ... then place objects with less trailing - // non-pointer bytes earlier. That is, place - // the field with the most trailing - // non-pointer bytes at the end of the - // pointerful section. - traili := ei.sizeof - ei.ptrdata - trailj := ej.sizeof - ej.ptrdata - if traili != trailj { - return traili < trailj - } - } - - // Lastly, order by size. - if ei.sizeof != ej.sizeof { - return ei.sizeof > ej.sizeof - } - - return false - }) - - fields := make([]*types.Var, nf) - indexes := make([]int, nf) - for i, e := range elems { - fields[i] = str.Field(e.index) - indexes[i] = e.index - } - return types.NewStruct(fields, nil), indexes -} - -// Code below based on go/types.StdSizes. - -type gcSizes struct { - WordSize int64 - MaxAlign int64 -} - -func (s *gcSizes) Alignof(T types.Type) int64 { - // For arrays and structs, alignment is defined in terms - // of alignment of the elements and fields, respectively. - switch t := T.Underlying().(type) { - case *types.Array: - // spec: "For a variable x of array type: unsafe.Alignof(x) - // is the same as unsafe.Alignof(x[0]), but at least 1." - return s.Alignof(t.Elem()) - case *types.Struct: - // spec: "For a variable x of struct type: unsafe.Alignof(x) - // is the largest of the values unsafe.Alignof(x.f) for each - // field f of x, but at least 1." - max := int64(1) - for i, nf := 0, t.NumFields(); i < nf; i++ { - if a := s.Alignof(t.Field(i).Type()); a > max { - max = a - } - } - return max - } - a := s.Sizeof(T) // may be 0 - // spec: "For a variable x of any type: unsafe.Alignof(x) is at least 1." - if a < 1 { - return 1 - } - if a > s.MaxAlign { - return s.MaxAlign - } - return a -} - -var basicSizes = [...]byte{ - types.Bool: 1, - types.Int8: 1, - types.Int16: 2, - types.Int32: 4, - types.Int64: 8, - types.Uint8: 1, - types.Uint16: 2, - types.Uint32: 4, - types.Uint64: 8, - types.Float32: 4, - types.Float64: 8, - types.Complex64: 8, - types.Complex128: 16, -} - -func (s *gcSizes) Sizeof(T types.Type) int64 { - switch t := T.Underlying().(type) { - case *types.Basic: - k := t.Kind() - if int(k) < len(basicSizes) { - if s := basicSizes[k]; s > 0 { - return int64(s) - } - } - if k == types.String { - return s.WordSize * 2 - } - case *types.Array: - return t.Len() * s.Sizeof(t.Elem()) - case *types.Slice: - return s.WordSize * 3 - case *types.Struct: - nf := t.NumFields() - if nf == 0 { - return 0 - } - - var o int64 - max := int64(1) - for i := 0; i < nf; i++ { - ft := t.Field(i).Type() - a, sz := s.Alignof(ft), s.Sizeof(ft) - if a > max { - max = a - } - if i == nf-1 && sz == 0 && o != 0 { - sz = 1 - } - o = align(o, a) + sz - } - return align(o, max) - case *types.Interface: - return s.WordSize * 2 - } - return s.WordSize // catch-all -} - -// align returns the smallest y >= x such that y % a == 0. -func align(x, a int64) int64 { - y := x + a - 1 - return y - y%a -} - -func (s *gcSizes) ptrdata(T types.Type) int64 { - switch t := T.Underlying().(type) { - case *types.Basic: - switch t.Kind() { - case types.String, types.UnsafePointer: - return s.WordSize - } - return 0 - case *types.Chan, *types.Map, *types.Pointer, *types.Signature, *types.Slice: - return s.WordSize - case *types.Interface: - return 2 * s.WordSize - case *types.Array: - n := t.Len() - if n == 0 { - return 0 - } - a := s.ptrdata(t.Elem()) - if a == 0 { - return 0 - } - z := s.Sizeof(t.Elem()) - return (n-1)*z + a - case *types.Struct: - nf := t.NumFields() - if nf == 0 { - return 0 - } - - var o, p int64 - for i := 0; i < nf; i++ { - ft := t.Field(i).Type() - a, sz := s.Alignof(ft), s.Sizeof(ft) - fp := s.ptrdata(ft) - o = align(o, a) - if fp != 0 { - p = o + fp - } - o += sz - } - return p - } - - panic("impossible") -} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/findcall/findcall.go b/vendor/golang.org/x/tools/go/analysis/passes/findcall/findcall.go deleted file mode 100644 index 2671573d1..000000000 --- a/vendor/golang.org/x/tools/go/analysis/passes/findcall/findcall.go +++ /dev/null @@ -1,99 +0,0 @@ -// Copyright 2018 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package findcall defines an Analyzer that serves as a trivial -// example and test of the Analysis API. It reports a diagnostic for -// every call to a function or method of the name specified by its -// -name flag. It also exports a fact for each declaration that -// matches the name, plus a package-level fact if the package contained -// one or more such declarations. -package findcall - -import ( - "fmt" - "go/ast" - "go/types" - - "golang.org/x/tools/go/analysis" -) - -const Doc = `find calls to a particular function - -The findcall analysis reports calls to functions or methods -of a particular name.` - -var Analyzer = &analysis.Analyzer{ - Name: "findcall", - Doc: Doc, - URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/findcall", - Run: run, - RunDespiteErrors: true, - FactTypes: []analysis.Fact{new(foundFact)}, -} - -var name string // -name flag - -func init() { - Analyzer.Flags.StringVar(&name, "name", name, "name of the function to find") -} - -func run(pass *analysis.Pass) (interface{}, error) { - for _, f := range pass.Files { - ast.Inspect(f, func(n ast.Node) bool { - if call, ok := n.(*ast.CallExpr); ok { - var id *ast.Ident - switch fun := call.Fun.(type) { - case *ast.Ident: - id = fun - case *ast.SelectorExpr: - id = fun.Sel - } - if id != nil && !pass.TypesInfo.Types[id].IsType() && id.Name == name { - pass.Report(analysis.Diagnostic{ - Pos: call.Lparen, - Message: fmt.Sprintf("call of %s(...)", id.Name), - SuggestedFixes: []analysis.SuggestedFix{{ - Message: fmt.Sprintf("Add '_TEST_'"), - TextEdits: []analysis.TextEdit{{ - Pos: call.Lparen, - End: call.Lparen, - NewText: []byte("_TEST_"), - }}, - }}, - }) - } - } - return true - }) - } - - // Export a fact for each matching function. - // - // These facts are produced only to test the testing - // infrastructure in the analysistest package. - // They are not consumed by the findcall Analyzer - // itself, as would happen in a more realistic example. - for _, f := range pass.Files { - for _, decl := range f.Decls { - if decl, ok := decl.(*ast.FuncDecl); ok && decl.Name.Name == name { - if obj, ok := pass.TypesInfo.Defs[decl.Name].(*types.Func); ok { - pass.ExportObjectFact(obj, new(foundFact)) - } - } - } - } - - if len(pass.AllObjectFacts()) > 0 { - pass.ExportPackageFact(new(foundFact)) - } - - return nil, nil -} - -// foundFact is a fact associated with functions that match -name. -// We use it to exercise the fact machinery in tests. -type foundFact struct{} - -func (*foundFact) String() string { return "found" } -func (*foundFact) AFact() {} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/framepointer/framepointer.go b/vendor/golang.org/x/tools/go/analysis/passes/framepointer/framepointer.go deleted file mode 100644 index 0b3ded47e..000000000 --- a/vendor/golang.org/x/tools/go/analysis/passes/framepointer/framepointer.go +++ /dev/null @@ -1,92 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package framepointer defines an Analyzer that reports assembly code -// that clobbers the frame pointer before saving it. -package framepointer - -import ( - "go/build" - "regexp" - "strings" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/internal/analysisutil" -) - -const Doc = "report assembly that clobbers the frame pointer before saving it" - -var Analyzer = &analysis.Analyzer{ - Name: "framepointer", - Doc: Doc, - URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/framepointer", - Run: run, -} - -var ( - re = regexp.MustCompile - asmWriteBP = re(`,\s*BP$`) // TODO: can have false positive, e.g. for TESTQ BP,BP. Seems unlikely. - asmMentionBP = re(`\bBP\b`) - asmControlFlow = re(`^(J|RET)`) -) - -func run(pass *analysis.Pass) (interface{}, error) { - if build.Default.GOARCH != "amd64" { // TODO: arm64 also? - return nil, nil - } - if build.Default.GOOS != "linux" && build.Default.GOOS != "darwin" { - return nil, nil - } - - // Find assembly files to work on. - var sfiles []string - for _, fname := range pass.OtherFiles { - if strings.HasSuffix(fname, ".s") && pass.Pkg.Path() != "runtime" { - sfiles = append(sfiles, fname) - } - } - - for _, fname := range sfiles { - content, tf, err := analysisutil.ReadFile(pass.Fset, fname) - if err != nil { - return nil, err - } - - lines := strings.SplitAfter(string(content), "\n") - active := false - for lineno, line := range lines { - lineno++ - - // Ignore comments and commented-out code. - if i := strings.Index(line, "//"); i >= 0 { - line = line[:i] - } - line = strings.TrimSpace(line) - - // We start checking code at a TEXT line for a frameless function. - if strings.HasPrefix(line, "TEXT") && strings.Contains(line, "(SB)") && strings.Contains(line, "$0") { - active = true - continue - } - if !active { - continue - } - - if asmWriteBP.MatchString(line) { // clobber of BP, function is not OK - pass.Reportf(analysisutil.LineStart(tf, lineno), "frame pointer is clobbered before saving") - active = false - continue - } - if asmMentionBP.MatchString(line) { // any other use of BP might be a read, so function is OK - active = false - continue - } - if asmControlFlow.MatchString(line) { // give up after any branch instruction - active = false - continue - } - } - } - return nil, nil -} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/httpresponse/httpresponse.go b/vendor/golang.org/x/tools/go/analysis/passes/httpresponse/httpresponse.go deleted file mode 100644 index 047ae07cc..000000000 --- a/vendor/golang.org/x/tools/go/analysis/passes/httpresponse/httpresponse.go +++ /dev/null @@ -1,176 +0,0 @@ -// Copyright 2016 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package httpresponse defines an Analyzer that checks for mistakes -// using HTTP responses. -package httpresponse - -import ( - "go/ast" - "go/types" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/analysis/passes/internal/analysisutil" - "golang.org/x/tools/go/ast/inspector" - "golang.org/x/tools/internal/aliases" - "golang.org/x/tools/internal/typesinternal" -) - -const Doc = `check for mistakes using HTTP responses - -A common mistake when using the net/http package is to defer a function -call to close the http.Response Body before checking the error that -determines whether the response is valid: - - resp, err := http.Head(url) - defer resp.Body.Close() - if err != nil { - log.Fatal(err) - } - // (defer statement belongs here) - -This checker helps uncover latent nil dereference bugs by reporting a -diagnostic for such mistakes.` - -var Analyzer = &analysis.Analyzer{ - Name: "httpresponse", - Doc: Doc, - URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/httpresponse", - Requires: []*analysis.Analyzer{inspect.Analyzer}, - Run: run, -} - -func run(pass *analysis.Pass) (interface{}, error) { - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - - // Fast path: if the package doesn't import net/http, - // skip the traversal. - if !analysisutil.Imports(pass.Pkg, "net/http") { - return nil, nil - } - - nodeFilter := []ast.Node{ - (*ast.CallExpr)(nil), - } - inspect.WithStack(nodeFilter, func(n ast.Node, push bool, stack []ast.Node) bool { - if !push { - return true - } - call := n.(*ast.CallExpr) - if !isHTTPFuncOrMethodOnClient(pass.TypesInfo, call) { - return true // the function call is not related to this check. - } - - // Find the innermost containing block, and get the list - // of statements starting with the one containing call. - stmts, ncalls := restOfBlock(stack) - if len(stmts) < 2 { - // The call to the http function is the last statement of the block. - return true - } - - // Skip cases in which the call is wrapped by another (#52661). - // Example: resp, err := checkError(http.Get(url)) - if ncalls > 1 { - return true - } - - asg, ok := stmts[0].(*ast.AssignStmt) - if !ok { - return true // the first statement is not assignment. - } - - resp := rootIdent(asg.Lhs[0]) - if resp == nil { - return true // could not find the http.Response in the assignment. - } - - def, ok := stmts[1].(*ast.DeferStmt) - if !ok { - return true // the following statement is not a defer. - } - root := rootIdent(def.Call.Fun) - if root == nil { - return true // could not find the receiver of the defer call. - } - - if resp.Obj == root.Obj { - pass.ReportRangef(root, "using %s before checking for errors", resp.Name) - } - return true - }) - return nil, nil -} - -// isHTTPFuncOrMethodOnClient checks whether the given call expression is on -// either a function of the net/http package or a method of http.Client that -// returns (*http.Response, error). -func isHTTPFuncOrMethodOnClient(info *types.Info, expr *ast.CallExpr) bool { - fun, _ := expr.Fun.(*ast.SelectorExpr) - sig, _ := info.Types[fun].Type.(*types.Signature) - if sig == nil { - return false // the call is not of the form x.f() - } - - res := sig.Results() - if res.Len() != 2 { - return false // the function called does not return two values. - } - isPtr, named := typesinternal.ReceiverNamed(res.At(0)) - if !isPtr || !analysisutil.IsNamedType(named, "net/http", "Response") { - return false // the first return type is not *http.Response. - } - - errorType := types.Universe.Lookup("error").Type() - if !types.Identical(res.At(1).Type(), errorType) { - return false // the second return type is not error - } - - typ := info.Types[fun.X].Type - if typ == nil { - id, ok := fun.X.(*ast.Ident) - return ok && id.Name == "http" // function in net/http package. - } - - if analysisutil.IsNamedType(typ, "net/http", "Client") { - return true // method on http.Client. - } - ptr, ok := aliases.Unalias(typ).(*types.Pointer) - return ok && analysisutil.IsNamedType(ptr.Elem(), "net/http", "Client") // method on *http.Client. -} - -// restOfBlock, given a traversal stack, finds the innermost containing -// block and returns the suffix of its statements starting with the current -// node, along with the number of call expressions encountered. -func restOfBlock(stack []ast.Node) ([]ast.Stmt, int) { - var ncalls int - for i := len(stack) - 1; i >= 0; i-- { - if b, ok := stack[i].(*ast.BlockStmt); ok { - for j, v := range b.List { - if v == stack[i+1] { - return b.List[j:], ncalls - } - } - break - } - - if _, ok := stack[i].(*ast.CallExpr); ok { - ncalls++ - } - } - return nil, 0 -} - -// rootIdent finds the root identifier x in a chain of selections x.y.z, or nil if not found. -func rootIdent(n ast.Node) *ast.Ident { - switch n := n.(type) { - case *ast.SelectorExpr: - return rootIdent(n.X) - case *ast.Ident: - return n - default: - return nil - } -} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/ifaceassert/doc.go b/vendor/golang.org/x/tools/go/analysis/passes/ifaceassert/doc.go deleted file mode 100644 index 3d2b1a3dc..000000000 --- a/vendor/golang.org/x/tools/go/analysis/passes/ifaceassert/doc.go +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package ifaceassert defines an Analyzer that flags -// impossible interface-interface type assertions. -// -// # Analyzer ifaceassert -// -// ifaceassert: detect impossible interface-to-interface type assertions -// -// This checker flags type assertions v.(T) and corresponding type-switch cases -// in which the static type V of v is an interface that cannot possibly implement -// the target interface T. This occurs when V and T contain methods with the same -// name but different signatures. Example: -// -// var v interface { -// Read() -// } -// _ = v.(io.Reader) -// -// The Read method in v has a different signature than the Read method in -// io.Reader, so this assertion cannot succeed. -package ifaceassert diff --git a/vendor/golang.org/x/tools/go/analysis/passes/ifaceassert/ifaceassert.go b/vendor/golang.org/x/tools/go/analysis/passes/ifaceassert/ifaceassert.go deleted file mode 100644 index cd4a47762..000000000 --- a/vendor/golang.org/x/tools/go/analysis/passes/ifaceassert/ifaceassert.go +++ /dev/null @@ -1,99 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package ifaceassert - -import ( - _ "embed" - "go/ast" - "go/types" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/analysis/passes/internal/analysisutil" - "golang.org/x/tools/go/ast/inspector" -) - -//go:embed doc.go -var doc string - -var Analyzer = &analysis.Analyzer{ - Name: "ifaceassert", - Doc: analysisutil.MustExtractDoc(doc, "ifaceassert"), - URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/ifaceassert", - Requires: []*analysis.Analyzer{inspect.Analyzer}, - Run: run, -} - -// assertableTo checks whether interface v can be asserted into t. It returns -// nil on success, or the first conflicting method on failure. -func assertableTo(v, t types.Type) *types.Func { - if t == nil || v == nil { - // not assertable to, but there is no missing method - return nil - } - // ensure that v and t are interfaces - V, _ := v.Underlying().(*types.Interface) - T, _ := t.Underlying().(*types.Interface) - if V == nil || T == nil { - return nil - } - - // Mitigations for interface comparisons and generics. - // TODO(https://github.com/golang/go/issues/50658): Support more precise conclusion. - if isParameterized(V) || isParameterized(T) { - return nil - } - if f, wrongType := types.MissingMethod(V, T, false); wrongType { - return f - } - return nil -} - -func run(pass *analysis.Pass) (interface{}, error) { - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - nodeFilter := []ast.Node{ - (*ast.TypeAssertExpr)(nil), - (*ast.TypeSwitchStmt)(nil), - } - inspect.Preorder(nodeFilter, func(n ast.Node) { - var ( - assert *ast.TypeAssertExpr // v.(T) expression - targets []ast.Expr // interfaces T in v.(T) - ) - switch n := n.(type) { - case *ast.TypeAssertExpr: - // take care of v.(type) in *ast.TypeSwitchStmt - if n.Type == nil { - return - } - assert = n - targets = append(targets, n.Type) - case *ast.TypeSwitchStmt: - // retrieve type assertion from type switch's 'assign' field - switch t := n.Assign.(type) { - case *ast.ExprStmt: - assert = t.X.(*ast.TypeAssertExpr) - case *ast.AssignStmt: - assert = t.Rhs[0].(*ast.TypeAssertExpr) - } - // gather target types from case clauses - for _, c := range n.Body.List { - targets = append(targets, c.(*ast.CaseClause).List...) - } - } - V := pass.TypesInfo.TypeOf(assert.X) - for _, target := range targets { - T := pass.TypesInfo.TypeOf(target) - if f := assertableTo(V, T); f != nil { - pass.Reportf( - target.Pos(), - "impossible type assertion: no type can implement both %v and %v (conflicting types for %v method)", - V, T, f.Name(), - ) - } - } - }) - return nil, nil -} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/ifaceassert/parameterized.go b/vendor/golang.org/x/tools/go/analysis/passes/ifaceassert/parameterized.go deleted file mode 100644 index a077d4402..000000000 --- a/vendor/golang.org/x/tools/go/analysis/passes/ifaceassert/parameterized.go +++ /dev/null @@ -1,118 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package ifaceassert - -import ( - "go/types" - - "golang.org/x/tools/internal/aliases" - "golang.org/x/tools/internal/typeparams" -) - -// isParameterized reports whether typ contains any of the type parameters of tparams. -// -// NOTE: Adapted from go/types/infer.go. If that is exported in a future release remove this copy. -func isParameterized(typ types.Type) bool { - w := tpWalker{ - seen: make(map[types.Type]bool), - } - return w.isParameterized(typ) -} - -type tpWalker struct { - seen map[types.Type]bool -} - -func (w *tpWalker) isParameterized(typ types.Type) (res bool) { - // detect cycles - if x, ok := w.seen[typ]; ok { - return x - } - w.seen[typ] = false - defer func() { - w.seen[typ] = res - }() - - switch t := typ.(type) { - case nil, *types.Basic: // TODO(gri) should nil be handled here? - break - - case *types.Array: - return w.isParameterized(t.Elem()) - - case *types.Slice: - return w.isParameterized(t.Elem()) - - case *types.Struct: - for i, n := 0, t.NumFields(); i < n; i++ { - if w.isParameterized(t.Field(i).Type()) { - return true - } - } - - case *types.Pointer: - return w.isParameterized(t.Elem()) - - case *types.Tuple: - n := t.Len() - for i := 0; i < n; i++ { - if w.isParameterized(t.At(i).Type()) { - return true - } - } - - case *types.Signature: - // t.tparams may not be nil if we are looking at a signature - // of a generic function type (or an interface method) that is - // part of the type we're testing. We don't care about these type - // parameters. - // Similarly, the receiver of a method may declare (rather than - // use) type parameters, we don't care about those either. - // Thus, we only need to look at the input and result parameters. - return w.isParameterized(t.Params()) || w.isParameterized(t.Results()) - - case *types.Interface: - for i, n := 0, t.NumMethods(); i < n; i++ { - if w.isParameterized(t.Method(i).Type()) { - return true - } - } - terms, err := typeparams.InterfaceTermSet(t) - if err != nil { - panic(err) - } - for _, term := range terms { - if w.isParameterized(term.Type()) { - return true - } - } - - case *types.Map: - return w.isParameterized(t.Key()) || w.isParameterized(t.Elem()) - - case *types.Chan: - return w.isParameterized(t.Elem()) - - case *aliases.Alias: - // TODO(adonovan): think about generic aliases. - return w.isParameterized(aliases.Unalias(t)) - - case *types.Named: - list := t.TypeArgs() - for i, n := 0, list.Len(); i < n; i++ { - if w.isParameterized(list.At(i)) { - return true - } - } - - case *types.TypeParam: - return true - - default: - panic(t) // unreachable - } - - return false -} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/inspect/inspect.go b/vendor/golang.org/x/tools/go/analysis/passes/inspect/inspect.go deleted file mode 100644 index 3b121cb0c..000000000 --- a/vendor/golang.org/x/tools/go/analysis/passes/inspect/inspect.go +++ /dev/null @@ -1,49 +0,0 @@ -// Copyright 2018 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package inspect defines an Analyzer that provides an AST inspector -// (golang.org/x/tools/go/ast/inspector.Inspector) for the syntax trees -// of a package. It is only a building block for other analyzers. -// -// Example of use in another analysis: -// -// import ( -// "golang.org/x/tools/go/analysis" -// "golang.org/x/tools/go/analysis/passes/inspect" -// "golang.org/x/tools/go/ast/inspector" -// ) -// -// var Analyzer = &analysis.Analyzer{ -// ... -// Requires: []*analysis.Analyzer{inspect.Analyzer}, -// } -// -// func run(pass *analysis.Pass) (interface{}, error) { -// inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) -// inspect.Preorder(nil, func(n ast.Node) { -// ... -// }) -// return nil, nil -// } -package inspect - -import ( - "reflect" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/ast/inspector" -) - -var Analyzer = &analysis.Analyzer{ - Name: "inspect", - Doc: "optimize AST traversal for later passes", - URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/inspect", - Run: run, - RunDespiteErrors: true, - ResultType: reflect.TypeOf(new(inspector.Inspector)), -} - -func run(pass *analysis.Pass) (interface{}, error) { - return inspector.New(pass.Files), nil -} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/internal/analysisutil/util.go b/vendor/golang.org/x/tools/go/analysis/passes/internal/analysisutil/util.go deleted file mode 100644 index 89291602a..000000000 --- a/vendor/golang.org/x/tools/go/analysis/passes/internal/analysisutil/util.go +++ /dev/null @@ -1,157 +0,0 @@ -// Copyright 2018 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package analysisutil defines various helper functions -// used by two or more packages beneath go/analysis. -package analysisutil - -import ( - "bytes" - "go/ast" - "go/printer" - "go/token" - "go/types" - "os" - - "golang.org/x/tools/internal/aliases" - "golang.org/x/tools/internal/analysisinternal" -) - -// Format returns a string representation of the expression. -func Format(fset *token.FileSet, x ast.Expr) string { - var b bytes.Buffer - printer.Fprint(&b, fset, x) - return b.String() -} - -// HasSideEffects reports whether evaluation of e has side effects. -func HasSideEffects(info *types.Info, e ast.Expr) bool { - safe := true - ast.Inspect(e, func(node ast.Node) bool { - switch n := node.(type) { - case *ast.CallExpr: - typVal := info.Types[n.Fun] - switch { - case typVal.IsType(): - // Type conversion, which is safe. - case typVal.IsBuiltin(): - // Builtin func, conservatively assumed to not - // be safe for now. - safe = false - return false - default: - // A non-builtin func or method call. - // Conservatively assume that all of them have - // side effects for now. - safe = false - return false - } - case *ast.UnaryExpr: - if n.Op == token.ARROW { - safe = false - return false - } - } - return true - }) - return !safe -} - -// ReadFile reads a file and adds it to the FileSet -// so that we can report errors against it using lineStart. -func ReadFile(fset *token.FileSet, filename string) ([]byte, *token.File, error) { - content, err := os.ReadFile(filename) - if err != nil { - return nil, nil, err - } - tf := fset.AddFile(filename, -1, len(content)) - tf.SetLinesForContent(content) - return content, tf, nil -} - -// LineStart returns the position of the start of the specified line -// within file f, or NoPos if there is no line of that number. -func LineStart(f *token.File, line int) token.Pos { - // Use binary search to find the start offset of this line. - // - // TODO(adonovan): eventually replace this function with the - // simpler and more efficient (*go/token.File).LineStart, added - // in go1.12. - - min := 0 // inclusive - max := f.Size() // exclusive - for { - offset := (min + max) / 2 - pos := f.Pos(offset) - posn := f.Position(pos) - if posn.Line == line { - return pos - (token.Pos(posn.Column) - 1) - } - - if min+1 >= max { - return token.NoPos - } - - if posn.Line < line { - min = offset - } else { - max = offset - } - } -} - -// Imports returns true if path is imported by pkg. -func Imports(pkg *types.Package, path string) bool { - for _, imp := range pkg.Imports() { - if imp.Path() == path { - return true - } - } - return false -} - -// IsNamedType reports whether t is the named type with the given package path -// and one of the given names. -// This function avoids allocating the concatenation of "pkg.Name", -// which is important for the performance of syntax matching. -func IsNamedType(t types.Type, pkgPath string, names ...string) bool { - n, ok := aliases.Unalias(t).(*types.Named) - if !ok { - return false - } - obj := n.Obj() - if obj == nil || obj.Pkg() == nil || obj.Pkg().Path() != pkgPath { - return false - } - name := obj.Name() - for _, n := range names { - if name == n { - return true - } - } - return false -} - -// IsFunctionNamed reports whether f is a top-level function defined in the -// given package and has one of the given names. -// It returns false if f is nil or a method. -func IsFunctionNamed(f *types.Func, pkgPath string, names ...string) bool { - if f == nil { - return false - } - if f.Pkg() == nil || f.Pkg().Path() != pkgPath { - return false - } - if f.Type().(*types.Signature).Recv() != nil { - return false - } - for _, n := range names { - if f.Name() == n { - return true - } - } - return false -} - -var MustExtractDoc = analysisinternal.MustExtractDoc diff --git a/vendor/golang.org/x/tools/go/analysis/passes/loopclosure/doc.go b/vendor/golang.org/x/tools/go/analysis/passes/loopclosure/doc.go deleted file mode 100644 index c95b1c1c9..000000000 --- a/vendor/golang.org/x/tools/go/analysis/passes/loopclosure/doc.go +++ /dev/null @@ -1,75 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package loopclosure defines an Analyzer that checks for references to -// enclosing loop variables from within nested functions. -// -// # Analyzer loopclosure -// -// loopclosure: check references to loop variables from within nested functions -// -// This analyzer reports places where a function literal references the -// iteration variable of an enclosing loop, and the loop calls the function -// in such a way (e.g. with go or defer) that it may outlive the loop -// iteration and possibly observe the wrong value of the variable. -// -// Note: An iteration variable can only outlive a loop iteration in Go versions <=1.21. -// In Go 1.22 and later, the loop variable lifetimes changed to create a new -// iteration variable per loop iteration. (See go.dev/issue/60078.) -// -// In this example, all the deferred functions run after the loop has -// completed, so all observe the final value of v [... -func isMethodCall(info *types.Info, expr ast.Expr, pkgPath, typeName, method string) bool { - call, ok := expr.(*ast.CallExpr) - if !ok { - return false - } - - // Check that we are calling a method - f := typeutil.StaticCallee(info, call) - if f == nil || f.Name() != method { - return false - } - recv := f.Type().(*types.Signature).Recv() - if recv == nil { - return false - } - - // Check that the receiver is a . or - // *.. - _, named := typesinternal.ReceiverNamed(recv) - return analysisutil.IsNamedType(named, pkgPath, typeName) -} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/lostcancel/doc.go b/vendor/golang.org/x/tools/go/analysis/passes/lostcancel/doc.go deleted file mode 100644 index 28bf6c7e2..000000000 --- a/vendor/golang.org/x/tools/go/analysis/passes/lostcancel/doc.go +++ /dev/null @@ -1,16 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package lostcancel defines an Analyzer that checks for failure to -// call a context cancellation function. -// -// # Analyzer lostcancel -// -// lostcancel: check cancel func returned by context.WithCancel is called -// -// The cancellation function returned by context.WithCancel, WithTimeout, -// and WithDeadline must be called or the new context will remain live -// until its parent context is cancelled. -// (The background context is never cancelled.) -package lostcancel diff --git a/vendor/golang.org/x/tools/go/analysis/passes/lostcancel/lostcancel.go b/vendor/golang.org/x/tools/go/analysis/passes/lostcancel/lostcancel.go deleted file mode 100644 index bf56a5c06..000000000 --- a/vendor/golang.org/x/tools/go/analysis/passes/lostcancel/lostcancel.go +++ /dev/null @@ -1,329 +0,0 @@ -// Copyright 2016 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package lostcancel - -import ( - _ "embed" - "fmt" - "go/ast" - "go/types" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/ctrlflow" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/analysis/passes/internal/analysisutil" - "golang.org/x/tools/go/ast/inspector" - "golang.org/x/tools/go/cfg" -) - -//go:embed doc.go -var doc string - -var Analyzer = &analysis.Analyzer{ - Name: "lostcancel", - Doc: analysisutil.MustExtractDoc(doc, "lostcancel"), - URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/lostcancel", - Run: run, - Requires: []*analysis.Analyzer{ - inspect.Analyzer, - ctrlflow.Analyzer, - }, -} - -const debug = false - -var contextPackage = "context" - -// checkLostCancel reports a failure to the call the cancel function -// returned by context.WithCancel, either because the variable was -// assigned to the blank identifier, or because there exists a -// control-flow path from the call to a return statement and that path -// does not "use" the cancel function. Any reference to the variable -// counts as a use, even within a nested function literal. -// If the variable's scope is larger than the function -// containing the assignment, we assume that other uses exist. -// -// checkLostCancel analyzes a single named or literal function. -func run(pass *analysis.Pass) (interface{}, error) { - // Fast path: bypass check if file doesn't use context.WithCancel. - if !analysisutil.Imports(pass.Pkg, contextPackage) { - return nil, nil - } - - // Call runFunc for each Func{Decl,Lit}. - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - nodeTypes := []ast.Node{ - (*ast.FuncLit)(nil), - (*ast.FuncDecl)(nil), - } - inspect.Preorder(nodeTypes, func(n ast.Node) { - runFunc(pass, n) - }) - return nil, nil -} - -func runFunc(pass *analysis.Pass, node ast.Node) { - // Find scope of function node - var funcScope *types.Scope - switch v := node.(type) { - case *ast.FuncLit: - funcScope = pass.TypesInfo.Scopes[v.Type] - case *ast.FuncDecl: - funcScope = pass.TypesInfo.Scopes[v.Type] - } - - // Maps each cancel variable to its defining ValueSpec/AssignStmt. - cancelvars := make(map[*types.Var]ast.Node) - - // TODO(adonovan): opt: refactor to make a single pass - // over the AST using inspect.WithStack and node types - // {FuncDecl,FuncLit,CallExpr,SelectorExpr}. - - // Find the set of cancel vars to analyze. - stack := make([]ast.Node, 0, 32) - ast.Inspect(node, func(n ast.Node) bool { - switch n.(type) { - case *ast.FuncLit: - if len(stack) > 0 { - return false // don't stray into nested functions - } - case nil: - stack = stack[:len(stack)-1] // pop - return true - } - stack = append(stack, n) // push - - // Look for [{AssignStmt,ValueSpec} CallExpr SelectorExpr]: - // - // ctx, cancel := context.WithCancel(...) - // ctx, cancel = context.WithCancel(...) - // var ctx, cancel = context.WithCancel(...) - // - if !isContextWithCancel(pass.TypesInfo, n) || !isCall(stack[len(stack)-2]) { - return true - } - var id *ast.Ident // id of cancel var - stmt := stack[len(stack)-3] - switch stmt := stmt.(type) { - case *ast.ValueSpec: - if len(stmt.Names) > 1 { - id = stmt.Names[1] - } - case *ast.AssignStmt: - if len(stmt.Lhs) > 1 { - id, _ = stmt.Lhs[1].(*ast.Ident) - } - } - if id != nil { - if id.Name == "_" { - pass.ReportRangef(id, - "the cancel function returned by context.%s should be called, not discarded, to avoid a context leak", - n.(*ast.SelectorExpr).Sel.Name) - } else if v, ok := pass.TypesInfo.Uses[id].(*types.Var); ok { - // If the cancel variable is defined outside function scope, - // do not analyze it. - if funcScope.Contains(v.Pos()) { - cancelvars[v] = stmt - } - } else if v, ok := pass.TypesInfo.Defs[id].(*types.Var); ok { - cancelvars[v] = stmt - } - } - return true - }) - - if len(cancelvars) == 0 { - return // no need to inspect CFG - } - - // Obtain the CFG. - cfgs := pass.ResultOf[ctrlflow.Analyzer].(*ctrlflow.CFGs) - var g *cfg.CFG - var sig *types.Signature - switch node := node.(type) { - case *ast.FuncDecl: - sig, _ = pass.TypesInfo.Defs[node.Name].Type().(*types.Signature) - if node.Name.Name == "main" && sig.Recv() == nil && pass.Pkg.Name() == "main" { - // Returning from main.main terminates the process, - // so there's no need to cancel contexts. - return - } - g = cfgs.FuncDecl(node) - - case *ast.FuncLit: - sig, _ = pass.TypesInfo.Types[node.Type].Type.(*types.Signature) - g = cfgs.FuncLit(node) - } - if sig == nil { - return // missing type information - } - - // Print CFG. - if debug { - fmt.Println(g.Format(pass.Fset)) - } - - // Examine the CFG for each variable in turn. - // (It would be more efficient to analyze all cancelvars in a - // single pass over the AST, but seldom is there more than one.) - for v, stmt := range cancelvars { - if ret := lostCancelPath(pass, g, v, stmt, sig); ret != nil { - lineno := pass.Fset.Position(stmt.Pos()).Line - pass.ReportRangef(stmt, "the %s function is not used on all paths (possible context leak)", v.Name()) - - pos, end := ret.Pos(), ret.End() - // golang/go#64547: cfg.Block.Return may return a synthetic - // ReturnStmt that overflows the file. - if pass.Fset.File(pos) != pass.Fset.File(end) { - end = pos - } - pass.Report(analysis.Diagnostic{ - Pos: pos, - End: end, - Message: fmt.Sprintf("this return statement may be reached without using the %s var defined on line %d", v.Name(), lineno), - }) - } - } -} - -func isCall(n ast.Node) bool { _, ok := n.(*ast.CallExpr); return ok } - -// isContextWithCancel reports whether n is one of the qualified identifiers -// context.With{Cancel,Timeout,Deadline}. -func isContextWithCancel(info *types.Info, n ast.Node) bool { - sel, ok := n.(*ast.SelectorExpr) - if !ok { - return false - } - switch sel.Sel.Name { - case "WithCancel", "WithTimeout", "WithDeadline": - default: - return false - } - if x, ok := sel.X.(*ast.Ident); ok { - if pkgname, ok := info.Uses[x].(*types.PkgName); ok { - return pkgname.Imported().Path() == contextPackage - } - // Import failed, so we can't check package path. - // Just check the local package name (heuristic). - return x.Name == "context" - } - return false -} - -// lostCancelPath finds a path through the CFG, from stmt (which defines -// the 'cancel' variable v) to a return statement, that doesn't "use" v. -// If it finds one, it returns the return statement (which may be synthetic). -// sig is the function's type, if known. -func lostCancelPath(pass *analysis.Pass, g *cfg.CFG, v *types.Var, stmt ast.Node, sig *types.Signature) *ast.ReturnStmt { - vIsNamedResult := sig != nil && tupleContains(sig.Results(), v) - - // uses reports whether stmts contain a "use" of variable v. - uses := func(pass *analysis.Pass, v *types.Var, stmts []ast.Node) bool { - found := false - for _, stmt := range stmts { - ast.Inspect(stmt, func(n ast.Node) bool { - switch n := n.(type) { - case *ast.Ident: - if pass.TypesInfo.Uses[n] == v { - found = true - } - case *ast.ReturnStmt: - // A naked return statement counts as a use - // of the named result variables. - if n.Results == nil && vIsNamedResult { - found = true - } - } - return !found - }) - } - return found - } - - // blockUses computes "uses" for each block, caching the result. - memo := make(map[*cfg.Block]bool) - blockUses := func(pass *analysis.Pass, v *types.Var, b *cfg.Block) bool { - res, ok := memo[b] - if !ok { - res = uses(pass, v, b.Nodes) - memo[b] = res - } - return res - } - - // Find the var's defining block in the CFG, - // plus the rest of the statements of that block. - var defblock *cfg.Block - var rest []ast.Node -outer: - for _, b := range g.Blocks { - for i, n := range b.Nodes { - if n == stmt { - defblock = b - rest = b.Nodes[i+1:] - break outer - } - } - } - if defblock == nil { - panic("internal error: can't find defining block for cancel var") - } - - // Is v "used" in the remainder of its defining block? - if uses(pass, v, rest) { - return nil - } - - // Does the defining block return without using v? - if ret := defblock.Return(); ret != nil { - return ret - } - - // Search the CFG depth-first for a path, from defblock to a - // return block, in which v is never "used". - seen := make(map[*cfg.Block]bool) - var search func(blocks []*cfg.Block) *ast.ReturnStmt - search = func(blocks []*cfg.Block) *ast.ReturnStmt { - for _, b := range blocks { - if seen[b] { - continue - } - seen[b] = true - - // Prune the search if the block uses v. - if blockUses(pass, v, b) { - continue - } - - // Found path to return statement? - if ret := b.Return(); ret != nil { - if debug { - fmt.Printf("found path to return in block %s\n", b) - } - return ret // found - } - - // Recur - if ret := search(b.Succs); ret != nil { - if debug { - fmt.Printf(" from block %s\n", b) - } - return ret - } - } - return nil - } - return search(defblock.Succs) -} - -func tupleContains(tuple *types.Tuple, v *types.Var) bool { - for i := 0; i < tuple.Len(); i++ { - if tuple.At(i) == v { - return true - } - } - return false -} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/nilfunc/doc.go b/vendor/golang.org/x/tools/go/analysis/passes/nilfunc/doc.go deleted file mode 100644 index 07f79332b..000000000 --- a/vendor/golang.org/x/tools/go/analysis/passes/nilfunc/doc.go +++ /dev/null @@ -1,13 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package nilfunc defines an Analyzer that checks for useless -// comparisons against nil. -// -// # Analyzer nilfunc -// -// nilfunc: check for useless comparisons between functions and nil -// -// A useless comparison is one like f == nil as opposed to f() == nil. -package nilfunc diff --git a/vendor/golang.org/x/tools/go/analysis/passes/nilfunc/nilfunc.go b/vendor/golang.org/x/tools/go/analysis/passes/nilfunc/nilfunc.go deleted file mode 100644 index 778f7f1f8..000000000 --- a/vendor/golang.org/x/tools/go/analysis/passes/nilfunc/nilfunc.go +++ /dev/null @@ -1,83 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package nilfunc defines an Analyzer that checks for useless -// comparisons against nil. -package nilfunc - -import ( - _ "embed" - "go/ast" - "go/token" - "go/types" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/analysis/passes/internal/analysisutil" - "golang.org/x/tools/go/ast/inspector" - "golang.org/x/tools/internal/typeparams" -) - -//go:embed doc.go -var doc string - -var Analyzer = &analysis.Analyzer{ - Name: "nilfunc", - Doc: analysisutil.MustExtractDoc(doc, "nilfunc"), - URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/nilfunc", - Requires: []*analysis.Analyzer{inspect.Analyzer}, - Run: run, -} - -func run(pass *analysis.Pass) (interface{}, error) { - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - - nodeFilter := []ast.Node{ - (*ast.BinaryExpr)(nil), - } - inspect.Preorder(nodeFilter, func(n ast.Node) { - e := n.(*ast.BinaryExpr) - - // Only want == or != comparisons. - if e.Op != token.EQL && e.Op != token.NEQ { - return - } - - // Only want comparisons with a nil identifier on one side. - var e2 ast.Expr - switch { - case pass.TypesInfo.Types[e.X].IsNil(): - e2 = e.Y - case pass.TypesInfo.Types[e.Y].IsNil(): - e2 = e.X - default: - return - } - - // Only want identifiers or selector expressions. - var obj types.Object - switch v := e2.(type) { - case *ast.Ident: - obj = pass.TypesInfo.Uses[v] - case *ast.SelectorExpr: - obj = pass.TypesInfo.Uses[v.Sel] - case *ast.IndexExpr, *ast.IndexListExpr: - // Check generic functions such as "f[T1,T2]". - x, _, _, _ := typeparams.UnpackIndexExpr(v) - if id, ok := x.(*ast.Ident); ok { - obj = pass.TypesInfo.Uses[id] - } - default: - return - } - - // Only want functions. - if _, ok := obj.(*types.Func); !ok { - return - } - - pass.ReportRangef(e, "comparison of function %v %v nil is always %v", obj.Name(), e.Op, e.Op == token.NEQ) - }) - return nil, nil -} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/nilness/doc.go b/vendor/golang.org/x/tools/go/analysis/passes/nilness/doc.go deleted file mode 100644 index 212263741..000000000 --- a/vendor/golang.org/x/tools/go/analysis/passes/nilness/doc.go +++ /dev/null @@ -1,45 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package nilness inspects the control-flow graph of an SSA function -// and reports errors such as nil pointer dereferences and degenerate -// nil pointer comparisons. -// -// # Analyzer nilness -// -// nilness: check for redundant or impossible nil comparisons -// -// The nilness checker inspects the control-flow graph of each function in -// a package and reports nil pointer dereferences, degenerate nil -// pointers, and panics with nil values. A degenerate comparison is of the form -// x==nil or x!=nil where x is statically known to be nil or non-nil. These are -// often a mistake, especially in control flow related to errors. Panics with nil -// values are checked because they are not detectable by -// -// if r := recover(); r != nil { -// -// This check reports conditions such as: -// -// if f == nil { // impossible condition (f is a function) -// } -// -// and: -// -// p := &v -// ... -// if p != nil { // tautological condition -// } -// -// and: -// -// if p == nil { -// print(*p) // nil dereference -// } -// -// and: -// -// if p == nil { -// panic(p) -// } -package nilness diff --git a/vendor/golang.org/x/tools/go/analysis/passes/nilness/nilness.go b/vendor/golang.org/x/tools/go/analysis/passes/nilness/nilness.go deleted file mode 100644 index 774f04c94..000000000 --- a/vendor/golang.org/x/tools/go/analysis/passes/nilness/nilness.go +++ /dev/null @@ -1,475 +0,0 @@ -// Copyright 2018 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package nilness - -import ( - _ "embed" - "fmt" - "go/token" - "go/types" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/buildssa" - "golang.org/x/tools/go/analysis/passes/internal/analysisutil" - "golang.org/x/tools/go/ssa" - "golang.org/x/tools/internal/typeparams" -) - -//go:embed doc.go -var doc string - -var Analyzer = &analysis.Analyzer{ - Name: "nilness", - Doc: analysisutil.MustExtractDoc(doc, "nilness"), - URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/nilness", - Run: run, - Requires: []*analysis.Analyzer{buildssa.Analyzer}, -} - -func run(pass *analysis.Pass) (interface{}, error) { - ssainput := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA) - for _, fn := range ssainput.SrcFuncs { - runFunc(pass, fn) - } - return nil, nil -} - -func runFunc(pass *analysis.Pass, fn *ssa.Function) { - reportf := func(category string, pos token.Pos, format string, args ...interface{}) { - // We ignore nil-checking ssa.Instructions - // that don't correspond to syntax. - if pos.IsValid() { - pass.Report(analysis.Diagnostic{ - Pos: pos, - Category: category, - Message: fmt.Sprintf(format, args...), - }) - } - } - - // notNil reports an error if v is provably nil. - notNil := func(stack []fact, instr ssa.Instruction, v ssa.Value, descr string) { - if nilnessOf(stack, v) == isnil { - reportf("nilderef", instr.Pos(), descr) - } - } - - // visit visits reachable blocks of the CFG in dominance order, - // maintaining a stack of dominating nilness facts. - // - // By traversing the dom tree, we can pop facts off the stack as - // soon as we've visited a subtree. Had we traversed the CFG, - // we would need to retain the set of facts for each block. - seen := make([]bool, len(fn.Blocks)) // seen[i] means visit should ignore block i - var visit func(b *ssa.BasicBlock, stack []fact) - visit = func(b *ssa.BasicBlock, stack []fact) { - if seen[b.Index] { - return - } - seen[b.Index] = true - - // Report nil dereferences. - for _, instr := range b.Instrs { - switch instr := instr.(type) { - case ssa.CallInstruction: - // A nil receiver may be okay for type params. - cc := instr.Common() - if !(cc.IsInvoke() && typeparams.IsTypeParam(cc.Value.Type())) { - notNil(stack, instr, cc.Value, "nil dereference in "+cc.Description()) - } - case *ssa.FieldAddr: - notNil(stack, instr, instr.X, "nil dereference in field selection") - case *ssa.IndexAddr: - switch typeparams.CoreType(instr.X.Type()).(type) { - case *types.Pointer: // *array - notNil(stack, instr, instr.X, "nil dereference in array index operation") - case *types.Slice: - // This is not necessarily a runtime error, because - // it is usually dominated by a bounds check. - if isRangeIndex(instr) { - notNil(stack, instr, instr.X, "range of nil slice") - } else { - notNil(stack, instr, instr.X, "index of nil slice") - } - } - case *ssa.MapUpdate: - notNil(stack, instr, instr.Map, "nil dereference in map update") - case *ssa.Range: - // (Not a runtime error, but a likely mistake.) - notNil(stack, instr, instr.X, "range over nil map") - case *ssa.Slice: - // A nilcheck occurs in ptr[:] iff ptr is a pointer to an array. - if is[*types.Pointer](instr.X.Type().Underlying()) { - notNil(stack, instr, instr.X, "nil dereference in slice operation") - } - case *ssa.Store: - notNil(stack, instr, instr.Addr, "nil dereference in store") - case *ssa.TypeAssert: - if !instr.CommaOk { - notNil(stack, instr, instr.X, "nil dereference in type assertion") - } - case *ssa.UnOp: - switch instr.Op { - case token.MUL: // *X - notNil(stack, instr, instr.X, "nil dereference in load") - case token.ARROW: // <-ch - // (Not a runtime error, but a likely mistake.) - notNil(stack, instr, instr.X, "receive from nil channel") - } - case *ssa.Send: - // (Not a runtime error, but a likely mistake.) - notNil(stack, instr, instr.Chan, "send to nil channel") - } - } - - // Look for panics with nil value - for _, instr := range b.Instrs { - switch instr := instr.(type) { - case *ssa.Panic: - if nilnessOf(stack, instr.X) == isnil { - reportf("nilpanic", instr.Pos(), "panic with nil value") - } - case *ssa.SliceToArrayPointer: - nn := nilnessOf(stack, instr.X) - if nn == isnil && slice2ArrayPtrLen(instr) > 0 { - reportf("conversionpanic", instr.Pos(), "nil slice being cast to an array of len > 0 will always panic") - } - } - } - - // For nil comparison blocks, report an error if the condition - // is degenerate, and push a nilness fact on the stack when - // visiting its true and false successor blocks. - if binop, tsucc, fsucc := eq(b); binop != nil { - xnil := nilnessOf(stack, binop.X) - ynil := nilnessOf(stack, binop.Y) - - if ynil != unknown && xnil != unknown && (xnil == isnil || ynil == isnil) { - // Degenerate condition: - // the nilness of both operands is known, - // and at least one of them is nil. - var adj string - if (xnil == ynil) == (binop.Op == token.EQL) { - adj = "tautological" - } else { - adj = "impossible" - } - reportf("cond", binop.Pos(), "%s condition: %s %s %s", adj, xnil, binop.Op, ynil) - - // If tsucc's or fsucc's sole incoming edge is impossible, - // it is unreachable. Prune traversal of it and - // all the blocks it dominates. - // (We could be more precise with full dataflow - // analysis of control-flow joins.) - var skip *ssa.BasicBlock - if xnil == ynil { - skip = fsucc - } else { - skip = tsucc - } - for _, d := range b.Dominees() { - if d == skip && len(d.Preds) == 1 { - continue - } - visit(d, stack) - } - return - } - - // "if x == nil" or "if nil == y" condition; x, y are unknown. - if xnil == isnil || ynil == isnil { - var newFacts facts - if xnil == isnil { - // x is nil, y is unknown: - // t successor learns y is nil. - newFacts = expandFacts(fact{binop.Y, isnil}) - } else { - // x is nil, y is unknown: - // t successor learns x is nil. - newFacts = expandFacts(fact{binop.X, isnil}) - } - - for _, d := range b.Dominees() { - // Successor blocks learn a fact - // only at non-critical edges. - // (We could do be more precise with full dataflow - // analysis of control-flow joins.) - s := stack - if len(d.Preds) == 1 { - if d == tsucc { - s = append(s, newFacts...) - } else if d == fsucc { - s = append(s, newFacts.negate()...) - } - } - visit(d, s) - } - return - } - } - - // In code of the form: - // - // if ptr, ok := x.(*T); ok { ... } else { fsucc } - // - // the fsucc block learns that ptr == nil, - // since that's its zero value. - if If, ok := b.Instrs[len(b.Instrs)-1].(*ssa.If); ok { - // Handle "if ok" and "if !ok" variants. - cond, fsucc := If.Cond, b.Succs[1] - if unop, ok := cond.(*ssa.UnOp); ok && unop.Op == token.NOT { - cond, fsucc = unop.X, b.Succs[0] - } - - // Match pattern: - // t0 = typeassert (pointerlike) - // t1 = extract t0 #0 // ptr - // t2 = extract t0 #1 // ok - // if t2 goto tsucc, fsucc - if extract1, ok := cond.(*ssa.Extract); ok && extract1.Index == 1 { - if assert, ok := extract1.Tuple.(*ssa.TypeAssert); ok && - isNillable(assert.AssertedType) { - for _, pinstr := range *assert.Referrers() { - if extract0, ok := pinstr.(*ssa.Extract); ok && - extract0.Index == 0 && - extract0.Tuple == extract1.Tuple { - for _, d := range b.Dominees() { - if len(d.Preds) == 1 && d == fsucc { - visit(d, append(stack, fact{extract0, isnil})) - } - } - } - } - } - } - } - - for _, d := range b.Dominees() { - visit(d, stack) - } - } - - // Visit the entry block. No need to visit fn.Recover. - if fn.Blocks != nil { - visit(fn.Blocks[0], make([]fact, 0, 20)) // 20 is plenty - } -} - -// A fact records that a block is dominated -// by the condition v == nil or v != nil. -type fact struct { - value ssa.Value - nilness nilness -} - -func (f fact) negate() fact { return fact{f.value, -f.nilness} } - -type nilness int - -const ( - isnonnil = -1 - unknown nilness = 0 - isnil = 1 -) - -var nilnessStrings = []string{"non-nil", "unknown", "nil"} - -func (n nilness) String() string { return nilnessStrings[n+1] } - -// nilnessOf reports whether v is definitely nil, definitely not nil, -// or unknown given the dominating stack of facts. -func nilnessOf(stack []fact, v ssa.Value) nilness { - switch v := v.(type) { - // unwrap ChangeInterface and Slice values recursively, to detect if underlying - // values have any facts recorded or are otherwise known with regard to nilness. - // - // This work must be in addition to expanding facts about - // ChangeInterfaces during inference/fact gathering because this covers - // cases where the nilness of a value is intrinsic, rather than based - // on inferred facts, such as a zero value interface variable. That - // said, this work alone would only inform us when facts are about - // underlying values, rather than outer values, when the analysis is - // transitive in both directions. - case *ssa.ChangeInterface: - if underlying := nilnessOf(stack, v.X); underlying != unknown { - return underlying - } - case *ssa.Slice: - if underlying := nilnessOf(stack, v.X); underlying != unknown { - return underlying - } - case *ssa.SliceToArrayPointer: - nn := nilnessOf(stack, v.X) - if slice2ArrayPtrLen(v) > 0 { - if nn == isnil { - // We know that *(*[1]byte)(nil) is going to panic because of the - // conversion. So return unknown to the caller, prevent useless - // nil deference reporting due to * operator. - return unknown - } - // Otherwise, the conversion will yield a non-nil pointer to array. - // Note that the instruction can still panic if array length greater - // than slice length. If the value is used by another instruction, - // that instruction can assume the panic did not happen when that - // instruction is reached. - return isnonnil - } - // In case array length is zero, the conversion result depends on nilness of the slice. - if nn != unknown { - return nn - } - } - - // Is value intrinsically nil or non-nil? - switch v := v.(type) { - case *ssa.Alloc, - *ssa.FieldAddr, - *ssa.FreeVar, - *ssa.Function, - *ssa.Global, - *ssa.IndexAddr, - *ssa.MakeChan, - *ssa.MakeClosure, - *ssa.MakeInterface, - *ssa.MakeMap, - *ssa.MakeSlice: - return isnonnil - case *ssa.Const: - if v.IsNil() { - return isnil // nil or zero value of a pointer-like type - } else { - return unknown // non-pointer - } - } - - // Search dominating control-flow facts. - for _, f := range stack { - if f.value == v { - return f.nilness - } - } - return unknown -} - -func slice2ArrayPtrLen(v *ssa.SliceToArrayPointer) int64 { - return v.Type().(*types.Pointer).Elem().Underlying().(*types.Array).Len() -} - -// If b ends with an equality comparison, eq returns the operation and -// its true (equal) and false (not equal) successors. -func eq(b *ssa.BasicBlock) (op *ssa.BinOp, tsucc, fsucc *ssa.BasicBlock) { - if If, ok := b.Instrs[len(b.Instrs)-1].(*ssa.If); ok { - if binop, ok := If.Cond.(*ssa.BinOp); ok { - switch binop.Op { - case token.EQL: - return binop, b.Succs[0], b.Succs[1] - case token.NEQ: - return binop, b.Succs[1], b.Succs[0] - } - } - } - return nil, nil, nil -} - -// expandFacts takes a single fact and returns the set of facts that can be -// known about it or any of its related values. Some operations, like -// ChangeInterface, have transitive nilness, such that if you know the -// underlying value is nil, you also know the value itself is nil, and vice -// versa. This operation allows callers to match on any of the related values -// in analyses, rather than just the one form of the value that happened to -// appear in a comparison. -// -// This work must be in addition to unwrapping values within nilnessOf because -// while this work helps give facts about transitively known values based on -// inferred facts, the recursive check within nilnessOf covers cases where -// nilness facts are intrinsic to the underlying value, such as a zero value -// interface variables. -// -// ChangeInterface is the only expansion currently supported, but others, like -// Slice, could be added. At this time, this tool does not check slice -// operations in a way this expansion could help. See -// https://play.golang.org/p/mGqXEp7w4fR for an example. -func expandFacts(f fact) []fact { - ff := []fact{f} - -Loop: - for { - switch v := f.value.(type) { - case *ssa.ChangeInterface: - f = fact{v.X, f.nilness} - ff = append(ff, f) - default: - break Loop - } - } - - return ff -} - -type facts []fact - -func (ff facts) negate() facts { - nn := make([]fact, len(ff)) - for i, f := range ff { - nn[i] = f.negate() - } - return nn -} - -func is[T any](x any) bool { - _, ok := x.(T) - return ok -} - -func isNillable(t types.Type) bool { - switch t := typeparams.CoreType(t).(type) { - case *types.Pointer, - *types.Map, - *types.Signature, - *types.Chan, - *types.Interface, - *types.Slice: - return true - case *types.Basic: - return t == types.Typ[types.UnsafePointer] - } - return false -} - -// isRangeIndex reports whether the instruction is a slice indexing -// operation slice[i] within a "for range slice" loop. The operation -// could be explicit, such as slice[i] within (or even after) the -// loop, or it could be implicit, such as "for i, v := range slice {}". -// (These cannot be reliably distinguished.) -func isRangeIndex(instr *ssa.IndexAddr) bool { - // Here we reverse-engineer the go/ssa lowering of range-over-slice: - // - // n = len(x) - // jump loop - // loop: "rangeindex.loop" - // phi = φ(-1, incr) #rangeindex - // incr = phi + 1 - // cond = incr < n - // if cond goto body else done - // body: "rangeindex.body" - // instr = &x[incr] - // ... - // done: - if incr, ok := instr.Index.(*ssa.BinOp); ok && incr.Op == token.ADD { - if b := incr.Block(); b.Comment == "rangeindex.loop" { - if If, ok := b.Instrs[len(b.Instrs)-1].(*ssa.If); ok { - if cond := If.Cond.(*ssa.BinOp); cond.X == incr && cond.Op == token.LSS { - if call, ok := cond.Y.(*ssa.Call); ok { - common := call.Common() - if blt, ok := common.Value.(*ssa.Builtin); ok && blt.Name() == "len" { - return common.Args[0] == instr.X - } - } - } - } - } - } - return false -} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/pkgfact/pkgfact.go b/vendor/golang.org/x/tools/go/analysis/passes/pkgfact/pkgfact.go deleted file mode 100644 index 4bf33d45f..000000000 --- a/vendor/golang.org/x/tools/go/analysis/passes/pkgfact/pkgfact.go +++ /dev/null @@ -1,128 +0,0 @@ -// Copyright 2018 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// The pkgfact package is a demonstration and test of the package fact -// mechanism. -// -// The output of the pkgfact analysis is a set of key/values pairs -// gathered from the analyzed package and its imported dependencies. -// Each key/value pair comes from a top-level constant declaration -// whose name starts and ends with "_". For example: -// -// package p -// -// const _greeting_ = "hello" -// const _audience_ = "world" -// -// the pkgfact analysis output for package p would be: -// -// {"greeting": "hello", "audience": "world"}. -// -// In addition, the analysis reports a diagnostic at each import -// showing which key/value pairs it contributes. -package pkgfact - -import ( - "fmt" - "go/ast" - "go/token" - "go/types" - "reflect" - "sort" - "strings" - - "golang.org/x/tools/go/analysis" -) - -var Analyzer = &analysis.Analyzer{ - Name: "pkgfact", - Doc: "gather name/value pairs from constant declarations", - URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/pkgfact", - Run: run, - FactTypes: []analysis.Fact{new(pairsFact)}, - ResultType: reflect.TypeOf(map[string]string{}), -} - -// A pairsFact is a package-level fact that records -// an set of key=value strings accumulated from constant -// declarations in this package and its dependencies. -// Elements are ordered by keys, which are unique. -type pairsFact []string - -func (f *pairsFact) AFact() {} -func (f *pairsFact) String() string { return "pairs(" + strings.Join(*f, ", ") + ")" } - -func run(pass *analysis.Pass) (interface{}, error) { - result := make(map[string]string) - - // At each import, print the fact from the imported - // package and accumulate its information into the result. - // (Warning: accumulation leads to quadratic growth of work.) - doImport := func(spec *ast.ImportSpec) { - pkg := imported(pass.TypesInfo, spec) - var fact pairsFact - if pass.ImportPackageFact(pkg, &fact) { - for _, pair := range fact { - eq := strings.IndexByte(pair, '=') - result[pair[:eq]] = pair[1+eq:] - } - pass.ReportRangef(spec, "%s", strings.Join(fact, " ")) - } - } - - // At each "const _name_ = value", add a fact into env. - doConst := func(spec *ast.ValueSpec) { - if len(spec.Names) == len(spec.Values) { - for i := range spec.Names { - name := spec.Names[i].Name - if strings.HasPrefix(name, "_") && strings.HasSuffix(name, "_") { - - if key := strings.Trim(name, "_"); key != "" { - value := pass.TypesInfo.Types[spec.Values[i]].Value.String() - result[key] = value - } - } - } - } - } - - for _, f := range pass.Files { - for _, decl := range f.Decls { - if decl, ok := decl.(*ast.GenDecl); ok { - for _, spec := range decl.Specs { - switch decl.Tok { - case token.IMPORT: - doImport(spec.(*ast.ImportSpec)) - case token.CONST: - doConst(spec.(*ast.ValueSpec)) - } - } - } - } - } - - // Sort/deduplicate the result and save it as a package fact. - keys := make([]string, 0, len(result)) - for key := range result { - keys = append(keys, key) - } - sort.Strings(keys) - var fact pairsFact - for _, key := range keys { - fact = append(fact, fmt.Sprintf("%s=%s", key, result[key])) - } - if len(fact) > 0 { - pass.ExportPackageFact(&fact) - } - - return result, nil -} - -func imported(info *types.Info, spec *ast.ImportSpec) *types.Package { - obj, ok := info.Implicits[spec] - if !ok { - obj = info.Defs[spec.Name] // renaming import - } - return obj.(*types.PkgName).Imported() -} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/printf/doc.go b/vendor/golang.org/x/tools/go/analysis/passes/printf/doc.go deleted file mode 100644 index 1ee16126a..000000000 --- a/vendor/golang.org/x/tools/go/analysis/passes/printf/doc.go +++ /dev/null @@ -1,47 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package printf defines an Analyzer that checks consistency -// of Printf format strings and arguments. -// -// # Analyzer printf -// -// printf: check consistency of Printf format strings and arguments -// -// The check applies to calls of the formatting functions such as -// [fmt.Printf] and [fmt.Sprintf], as well as any detected wrappers of -// those functions. -// -// In this example, the %d format operator requires an integer operand: -// -// fmt.Printf("%d", "hello") // fmt.Printf format %d has arg "hello" of wrong type string -// -// See the documentation of the fmt package for the complete set of -// format operators and their operand types. -// -// To enable printf checking on a function that is not found by this -// analyzer's heuristics (for example, because control is obscured by -// dynamic method calls), insert a bogus call: -// -// func MyPrintf(format string, args ...any) { -// if false { -// _ = fmt.Sprintf(format, args...) // enable printf checker -// } -// ... -// } -// -// The -funcs flag specifies a comma-separated list of names of additional -// known formatting functions or methods. If the name contains a period, -// it must denote a specific function using one of the following forms: -// -// dir/pkg.Function -// dir/pkg.Type.Method -// (*dir/pkg.Type).Method -// -// Otherwise the name is interpreted as a case-insensitive unqualified -// identifier such as "errorf". Either way, if a listed name ends in f, the -// function is assumed to be Printf-like, taking a format string before the -// argument list. Otherwise it is assumed to be Print-like, taking a list -// of arguments with no format string. -package printf diff --git a/vendor/golang.org/x/tools/go/analysis/passes/printf/printf.go b/vendor/golang.org/x/tools/go/analysis/passes/printf/printf.go deleted file mode 100644 index 323501925..000000000 --- a/vendor/golang.org/x/tools/go/analysis/passes/printf/printf.go +++ /dev/null @@ -1,1113 +0,0 @@ -// Copyright 2010 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package printf - -import ( - "bytes" - _ "embed" - "fmt" - "go/ast" - "go/constant" - "go/token" - "go/types" - "reflect" - "regexp" - "sort" - "strconv" - "strings" - "unicode/utf8" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/analysis/passes/internal/analysisutil" - "golang.org/x/tools/go/ast/inspector" - "golang.org/x/tools/go/types/typeutil" - "golang.org/x/tools/internal/aliases" - "golang.org/x/tools/internal/typeparams" -) - -func init() { - Analyzer.Flags.Var(isPrint, "funcs", "comma-separated list of print function names to check") -} - -//go:embed doc.go -var doc string - -var Analyzer = &analysis.Analyzer{ - Name: "printf", - Doc: analysisutil.MustExtractDoc(doc, "printf"), - URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/printf", - Requires: []*analysis.Analyzer{inspect.Analyzer}, - Run: run, - ResultType: reflect.TypeOf((*Result)(nil)), - FactTypes: []analysis.Fact{new(isWrapper)}, -} - -// Kind is a kind of fmt function behavior. -type Kind int - -const ( - KindNone Kind = iota // not a fmt wrapper function - KindPrint // function behaves like fmt.Print - KindPrintf // function behaves like fmt.Printf - KindErrorf // function behaves like fmt.Errorf -) - -func (kind Kind) String() string { - switch kind { - case KindPrint: - return "print" - case KindPrintf: - return "printf" - case KindErrorf: - return "errorf" - } - return "" -} - -// Result is the printf analyzer's result type. Clients may query the result -// to learn whether a function behaves like fmt.Print or fmt.Printf. -type Result struct { - funcs map[*types.Func]Kind -} - -// Kind reports whether fn behaves like fmt.Print or fmt.Printf. -func (r *Result) Kind(fn *types.Func) Kind { - _, ok := isPrint[fn.FullName()] - if !ok { - // Next look up just "printf", for use with -printf.funcs. - _, ok = isPrint[strings.ToLower(fn.Name())] - } - if ok { - if strings.HasSuffix(fn.Name(), "f") { - return KindPrintf - } else { - return KindPrint - } - } - - return r.funcs[fn] -} - -// isWrapper is a fact indicating that a function is a print or printf wrapper. -type isWrapper struct{ Kind Kind } - -func (f *isWrapper) AFact() {} - -func (f *isWrapper) String() string { - switch f.Kind { - case KindPrintf: - return "printfWrapper" - case KindPrint: - return "printWrapper" - case KindErrorf: - return "errorfWrapper" - default: - return "unknownWrapper" - } -} - -func run(pass *analysis.Pass) (interface{}, error) { - res := &Result{ - funcs: make(map[*types.Func]Kind), - } - findPrintfLike(pass, res) - checkCall(pass) - return res, nil -} - -type printfWrapper struct { - obj *types.Func - fdecl *ast.FuncDecl - format *types.Var - args *types.Var - callers []printfCaller - failed bool // if true, not a printf wrapper -} - -type printfCaller struct { - w *printfWrapper - call *ast.CallExpr -} - -// maybePrintfWrapper decides whether decl (a declared function) may be a wrapper -// around a fmt.Printf or fmt.Print function. If so it returns a printfWrapper -// function describing the declaration. Later processing will analyze the -// graph of potential printf wrappers to pick out the ones that are true wrappers. -// A function may be a Printf or Print wrapper if its last argument is ...interface{}. -// If the next-to-last argument is a string, then this may be a Printf wrapper. -// Otherwise it may be a Print wrapper. -func maybePrintfWrapper(info *types.Info, decl ast.Decl) *printfWrapper { - // Look for functions with final argument type ...interface{}. - fdecl, ok := decl.(*ast.FuncDecl) - if !ok || fdecl.Body == nil { - return nil - } - fn, ok := info.Defs[fdecl.Name].(*types.Func) - // Type information may be incomplete. - if !ok { - return nil - } - - sig := fn.Type().(*types.Signature) - if !sig.Variadic() { - return nil // not variadic - } - - params := sig.Params() - nparams := params.Len() // variadic => nonzero - - args := params.At(nparams - 1) - iface, ok := args.Type().(*types.Slice).Elem().(*types.Interface) - if !ok || !iface.Empty() { - return nil // final (args) param is not ...interface{} - } - - // Is second last param 'format string'? - var format *types.Var - if nparams >= 2 { - if p := params.At(nparams - 2); p.Type() == types.Typ[types.String] { - format = p - } - } - - return &printfWrapper{ - obj: fn, - fdecl: fdecl, - format: format, - args: args, - } -} - -// findPrintfLike scans the entire package to find printf-like functions. -func findPrintfLike(pass *analysis.Pass, res *Result) (interface{}, error) { - // Gather potential wrappers and call graph between them. - byObj := make(map[*types.Func]*printfWrapper) - var wrappers []*printfWrapper - for _, file := range pass.Files { - for _, decl := range file.Decls { - w := maybePrintfWrapper(pass.TypesInfo, decl) - if w == nil { - continue - } - byObj[w.obj] = w - wrappers = append(wrappers, w) - } - } - - // Walk the graph to figure out which are really printf wrappers. - for _, w := range wrappers { - // Scan function for calls that could be to other printf-like functions. - ast.Inspect(w.fdecl.Body, func(n ast.Node) bool { - if w.failed { - return false - } - - // TODO: Relax these checks; issue 26555. - if assign, ok := n.(*ast.AssignStmt); ok { - for _, lhs := range assign.Lhs { - if match(pass.TypesInfo, lhs, w.format) || - match(pass.TypesInfo, lhs, w.args) { - // Modifies the format - // string or args in - // some way, so not a - // simple wrapper. - w.failed = true - return false - } - } - } - if un, ok := n.(*ast.UnaryExpr); ok && un.Op == token.AND { - if match(pass.TypesInfo, un.X, w.format) || - match(pass.TypesInfo, un.X, w.args) { - // Taking the address of the - // format string or args, - // so not a simple wrapper. - w.failed = true - return false - } - } - - call, ok := n.(*ast.CallExpr) - if !ok || len(call.Args) == 0 || !match(pass.TypesInfo, call.Args[len(call.Args)-1], w.args) { - return true - } - - fn, kind := printfNameAndKind(pass, call) - if kind != 0 { - checkPrintfFwd(pass, w, call, kind, res) - return true - } - - // If the call is to another function in this package, - // maybe we will find out it is printf-like later. - // Remember this call for later checking. - if fn != nil && fn.Pkg() == pass.Pkg && byObj[fn] != nil { - callee := byObj[fn] - callee.callers = append(callee.callers, printfCaller{w, call}) - } - - return true - }) - } - return nil, nil -} - -func match(info *types.Info, arg ast.Expr, param *types.Var) bool { - id, ok := arg.(*ast.Ident) - return ok && info.ObjectOf(id) == param -} - -// checkPrintfFwd checks that a printf-forwarding wrapper is forwarding correctly. -// It diagnoses writing fmt.Printf(format, args) instead of fmt.Printf(format, args...). -func checkPrintfFwd(pass *analysis.Pass, w *printfWrapper, call *ast.CallExpr, kind Kind, res *Result) { - matched := kind == KindPrint || - kind != KindNone && len(call.Args) >= 2 && match(pass.TypesInfo, call.Args[len(call.Args)-2], w.format) - if !matched { - return - } - - if !call.Ellipsis.IsValid() { - typ, ok := pass.TypesInfo.Types[call.Fun].Type.(*types.Signature) - if !ok { - return - } - if len(call.Args) > typ.Params().Len() { - // If we're passing more arguments than what the - // print/printf function can take, adding an ellipsis - // would break the program. For example: - // - // func foo(arg1 string, arg2 ...interface{}) { - // fmt.Printf("%s %v", arg1, arg2) - // } - return - } - desc := "printf" - if kind == KindPrint { - desc = "print" - } - pass.ReportRangef(call, "missing ... in args forwarded to %s-like function", desc) - return - } - fn := w.obj - var fact isWrapper - if !pass.ImportObjectFact(fn, &fact) { - fact.Kind = kind - pass.ExportObjectFact(fn, &fact) - res.funcs[fn] = kind - for _, caller := range w.callers { - checkPrintfFwd(pass, caller.w, caller.call, kind, res) - } - } -} - -// isPrint records the print functions. -// If a key ends in 'f' then it is assumed to be a formatted print. -// -// Keys are either values returned by (*types.Func).FullName, -// or case-insensitive identifiers such as "errorf". -// -// The -funcs flag adds to this set. -// -// The set below includes facts for many important standard library -// functions, even though the analysis is capable of deducing that, for -// example, fmt.Printf forwards to fmt.Fprintf. We avoid relying on the -// driver applying analyzers to standard packages because "go vet" does -// not do so with gccgo, and nor do some other build systems. -var isPrint = stringSet{ - "fmt.Appendf": true, - "fmt.Append": true, - "fmt.Appendln": true, - "fmt.Errorf": true, - "fmt.Fprint": true, - "fmt.Fprintf": true, - "fmt.Fprintln": true, - "fmt.Print": true, - "fmt.Printf": true, - "fmt.Println": true, - "fmt.Sprint": true, - "fmt.Sprintf": true, - "fmt.Sprintln": true, - - "runtime/trace.Logf": true, - - "log.Print": true, - "log.Printf": true, - "log.Println": true, - "log.Fatal": true, - "log.Fatalf": true, - "log.Fatalln": true, - "log.Panic": true, - "log.Panicf": true, - "log.Panicln": true, - "(*log.Logger).Fatal": true, - "(*log.Logger).Fatalf": true, - "(*log.Logger).Fatalln": true, - "(*log.Logger).Panic": true, - "(*log.Logger).Panicf": true, - "(*log.Logger).Panicln": true, - "(*log.Logger).Print": true, - "(*log.Logger).Printf": true, - "(*log.Logger).Println": true, - - "(*testing.common).Error": true, - "(*testing.common).Errorf": true, - "(*testing.common).Fatal": true, - "(*testing.common).Fatalf": true, - "(*testing.common).Log": true, - "(*testing.common).Logf": true, - "(*testing.common).Skip": true, - "(*testing.common).Skipf": true, - // *testing.T and B are detected by induction, but testing.TB is - // an interface and the inference can't follow dynamic calls. - "(testing.TB).Error": true, - "(testing.TB).Errorf": true, - "(testing.TB).Fatal": true, - "(testing.TB).Fatalf": true, - "(testing.TB).Log": true, - "(testing.TB).Logf": true, - "(testing.TB).Skip": true, - "(testing.TB).Skipf": true, -} - -// formatString returns the format string argument and its index within -// the given printf-like call expression. -// -// The last parameter before variadic arguments is assumed to be -// a format string. -// -// The first string literal or string constant is assumed to be a format string -// if the call's signature cannot be determined. -// -// If it cannot find any format string parameter, it returns ("", -1). -func formatString(pass *analysis.Pass, call *ast.CallExpr) (format string, idx int) { - typ := pass.TypesInfo.Types[call.Fun].Type - if typ != nil { - if sig, ok := typ.(*types.Signature); ok { - if !sig.Variadic() { - // Skip checking non-variadic functions. - return "", -1 - } - idx := sig.Params().Len() - 2 - if idx < 0 { - // Skip checking variadic functions without - // fixed arguments. - return "", -1 - } - s, ok := stringConstantArg(pass, call, idx) - if !ok { - // The last argument before variadic args isn't a string. - return "", -1 - } - return s, idx - } - } - - // Cannot determine call's signature. Fall back to scanning for the first - // string constant in the call. - for idx := range call.Args { - if s, ok := stringConstantArg(pass, call, idx); ok { - return s, idx - } - if pass.TypesInfo.Types[call.Args[idx]].Type == types.Typ[types.String] { - // Skip checking a call with a non-constant format - // string argument, since its contents are unavailable - // for validation. - return "", -1 - } - } - return "", -1 -} - -// stringConstantArg returns call's string constant argument at the index idx. -// -// ("", false) is returned if call's argument at the index idx isn't a string -// constant. -func stringConstantArg(pass *analysis.Pass, call *ast.CallExpr, idx int) (string, bool) { - if idx >= len(call.Args) { - return "", false - } - return stringConstantExpr(pass, call.Args[idx]) -} - -// stringConstantExpr returns expression's string constant value. -// -// ("", false) is returned if expression isn't a string -// constant. -func stringConstantExpr(pass *analysis.Pass, expr ast.Expr) (string, bool) { - lit := pass.TypesInfo.Types[expr].Value - if lit != nil && lit.Kind() == constant.String { - return constant.StringVal(lit), true - } - return "", false -} - -// checkCall triggers the print-specific checks if the call invokes a print function. -func checkCall(pass *analysis.Pass) { - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - nodeFilter := []ast.Node{ - (*ast.CallExpr)(nil), - } - inspect.Preorder(nodeFilter, func(n ast.Node) { - call := n.(*ast.CallExpr) - fn, kind := printfNameAndKind(pass, call) - switch kind { - case KindPrintf, KindErrorf: - checkPrintf(pass, kind, call, fn) - case KindPrint: - checkPrint(pass, call, fn) - } - }) -} - -func printfNameAndKind(pass *analysis.Pass, call *ast.CallExpr) (fn *types.Func, kind Kind) { - fn, _ = typeutil.Callee(pass.TypesInfo, call).(*types.Func) - if fn == nil { - return nil, 0 - } - - _, ok := isPrint[fn.FullName()] - if !ok { - // Next look up just "printf", for use with -printf.funcs. - _, ok = isPrint[strings.ToLower(fn.Name())] - } - if ok { - if fn.FullName() == "fmt.Errorf" { - kind = KindErrorf - } else if strings.HasSuffix(fn.Name(), "f") { - kind = KindPrintf - } else { - kind = KindPrint - } - return fn, kind - } - - var fact isWrapper - if pass.ImportObjectFact(fn, &fact) { - return fn, fact.Kind - } - - return fn, KindNone -} - -// isFormatter reports whether t could satisfy fmt.Formatter. -// The only interface method to look for is "Format(State, rune)". -func isFormatter(typ types.Type) bool { - // If the type is an interface, the value it holds might satisfy fmt.Formatter. - if _, ok := typ.Underlying().(*types.Interface); ok { - // Don't assume type parameters could be formatters. With the greater - // expressiveness of constraint interface syntax we expect more type safety - // when using type parameters. - if !typeparams.IsTypeParam(typ) { - return true - } - } - obj, _, _ := types.LookupFieldOrMethod(typ, false, nil, "Format") - fn, ok := obj.(*types.Func) - if !ok { - return false - } - sig := fn.Type().(*types.Signature) - return sig.Params().Len() == 2 && - sig.Results().Len() == 0 && - analysisutil.IsNamedType(sig.Params().At(0).Type(), "fmt", "State") && - types.Identical(sig.Params().At(1).Type(), types.Typ[types.Rune]) -} - -// formatState holds the parsed representation of a printf directive such as "%3.*[4]d". -// It is constructed by parsePrintfVerb. -type formatState struct { - verb rune // the format verb: 'd' for "%d" - format string // the full format directive from % through verb, "%.3d". - name string // Printf, Sprintf etc. - flags []byte // the list of # + etc. - argNums []int // the successive argument numbers that are consumed, adjusted to refer to actual arg in call - firstArg int // Index of first argument after the format in the Printf call. - // Used only during parse. - pass *analysis.Pass - call *ast.CallExpr - argNum int // Which argument we're expecting to format now. - hasIndex bool // Whether the argument is indexed. - indexPending bool // Whether we have an indexed argument that has not resolved. - nbytes int // number of bytes of the format string consumed. -} - -// checkPrintf checks a call to a formatted print routine such as Printf. -func checkPrintf(pass *analysis.Pass, kind Kind, call *ast.CallExpr, fn *types.Func) { - format, idx := formatString(pass, call) - if idx < 0 { - if false { - pass.Reportf(call.Lparen, "can't check non-constant format in call to %s", fn.FullName()) - } - return - } - - firstArg := idx + 1 // Arguments are immediately after format string. - if !strings.Contains(format, "%") { - if len(call.Args) > firstArg { - pass.Reportf(call.Lparen, "%s call has arguments but no formatting directives", fn.FullName()) - } - return - } - // Hard part: check formats against args. - argNum := firstArg - maxArgNum := firstArg - anyIndex := false - for i, w := 0, 0; i < len(format); i += w { - w = 1 - if format[i] != '%' { - continue - } - state := parsePrintfVerb(pass, call, fn.FullName(), format[i:], firstArg, argNum) - if state == nil { - return - } - w = len(state.format) - if !okPrintfArg(pass, call, state) { // One error per format is enough. - return - } - if state.hasIndex { - anyIndex = true - } - if state.verb == 'w' { - switch kind { - case KindNone, KindPrint, KindPrintf: - pass.Reportf(call.Pos(), "%s does not support error-wrapping directive %%w", state.name) - return - } - } - if len(state.argNums) > 0 { - // Continue with the next sequential argument. - argNum = state.argNums[len(state.argNums)-1] + 1 - } - for _, n := range state.argNums { - if n >= maxArgNum { - maxArgNum = n + 1 - } - } - } - // Dotdotdot is hard. - if call.Ellipsis.IsValid() && maxArgNum >= len(call.Args)-1 { - return - } - // If any formats are indexed, extra arguments are ignored. - if anyIndex { - return - } - // There should be no leftover arguments. - if maxArgNum != len(call.Args) { - expect := maxArgNum - firstArg - numArgs := len(call.Args) - firstArg - pass.ReportRangef(call, "%s call needs %v but has %v", fn.FullName(), count(expect, "arg"), count(numArgs, "arg")) - } -} - -// parseFlags accepts any printf flags. -func (s *formatState) parseFlags() { - for s.nbytes < len(s.format) { - switch c := s.format[s.nbytes]; c { - case '#', '0', '+', '-', ' ': - s.flags = append(s.flags, c) - s.nbytes++ - default: - return - } - } -} - -// scanNum advances through a decimal number if present. -func (s *formatState) scanNum() { - for ; s.nbytes < len(s.format); s.nbytes++ { - c := s.format[s.nbytes] - if c < '0' || '9' < c { - return - } - } -} - -// parseIndex scans an index expression. It returns false if there is a syntax error. -func (s *formatState) parseIndex() bool { - if s.nbytes == len(s.format) || s.format[s.nbytes] != '[' { - return true - } - // Argument index present. - s.nbytes++ // skip '[' - start := s.nbytes - s.scanNum() - ok := true - if s.nbytes == len(s.format) || s.nbytes == start || s.format[s.nbytes] != ']' { - ok = false // syntax error is either missing "]" or invalid index. - s.nbytes = strings.Index(s.format[start:], "]") - if s.nbytes < 0 { - s.pass.ReportRangef(s.call, "%s format %s is missing closing ]", s.name, s.format) - return false - } - s.nbytes = s.nbytes + start - } - arg32, err := strconv.ParseInt(s.format[start:s.nbytes], 10, 32) - if err != nil || !ok || arg32 <= 0 || arg32 > int64(len(s.call.Args)-s.firstArg) { - s.pass.ReportRangef(s.call, "%s format has invalid argument index [%s]", s.name, s.format[start:s.nbytes]) - return false - } - s.nbytes++ // skip ']' - arg := int(arg32) - arg += s.firstArg - 1 // We want to zero-index the actual arguments. - s.argNum = arg - s.hasIndex = true - s.indexPending = true - return true -} - -// parseNum scans a width or precision (or *). It returns false if there's a bad index expression. -func (s *formatState) parseNum() bool { - if s.nbytes < len(s.format) && s.format[s.nbytes] == '*' { - if s.indexPending { // Absorb it. - s.indexPending = false - } - s.nbytes++ - s.argNums = append(s.argNums, s.argNum) - s.argNum++ - } else { - s.scanNum() - } - return true -} - -// parsePrecision scans for a precision. It returns false if there's a bad index expression. -func (s *formatState) parsePrecision() bool { - // If there's a period, there may be a precision. - if s.nbytes < len(s.format) && s.format[s.nbytes] == '.' { - s.flags = append(s.flags, '.') // Treat precision as a flag. - s.nbytes++ - if !s.parseIndex() { - return false - } - if !s.parseNum() { - return false - } - } - return true -} - -// parsePrintfVerb looks the formatting directive that begins the format string -// and returns a formatState that encodes what the directive wants, without looking -// at the actual arguments present in the call. The result is nil if there is an error. -func parsePrintfVerb(pass *analysis.Pass, call *ast.CallExpr, name, format string, firstArg, argNum int) *formatState { - state := &formatState{ - format: format, - name: name, - flags: make([]byte, 0, 5), - argNum: argNum, - argNums: make([]int, 0, 1), - nbytes: 1, // There's guaranteed to be a percent sign. - firstArg: firstArg, - pass: pass, - call: call, - } - // There may be flags. - state.parseFlags() - // There may be an index. - if !state.parseIndex() { - return nil - } - // There may be a width. - if !state.parseNum() { - return nil - } - // There may be a precision. - if !state.parsePrecision() { - return nil - } - // Now a verb, possibly prefixed by an index (which we may already have). - if !state.indexPending && !state.parseIndex() { - return nil - } - if state.nbytes == len(state.format) { - pass.ReportRangef(call.Fun, "%s format %s is missing verb at end of string", name, state.format) - return nil - } - verb, w := utf8.DecodeRuneInString(state.format[state.nbytes:]) - state.verb = verb - state.nbytes += w - if verb != '%' { - state.argNums = append(state.argNums, state.argNum) - } - state.format = state.format[:state.nbytes] - return state -} - -// printfArgType encodes the types of expressions a printf verb accepts. It is a bitmask. -type printfArgType int - -const ( - argBool printfArgType = 1 << iota - argInt - argRune - argString - argFloat - argComplex - argPointer - argError - anyType printfArgType = ^0 -) - -type printVerb struct { - verb rune // User may provide verb through Formatter; could be a rune. - flags string // known flags are all ASCII - typ printfArgType -} - -// Common flag sets for printf verbs. -const ( - noFlag = "" - numFlag = " -+.0" - sharpNumFlag = " -+.0#" - allFlags = " -+.0#" -) - -// printVerbs identifies which flags are known to printf for each verb. -var printVerbs = []printVerb{ - // '-' is a width modifier, always valid. - // '.' is a precision for float, max width for strings. - // '+' is required sign for numbers, Go format for %v. - // '#' is alternate format for several verbs. - // ' ' is spacer for numbers - {'%', noFlag, 0}, - {'b', sharpNumFlag, argInt | argFloat | argComplex | argPointer}, - {'c', "-", argRune | argInt}, - {'d', numFlag, argInt | argPointer}, - {'e', sharpNumFlag, argFloat | argComplex}, - {'E', sharpNumFlag, argFloat | argComplex}, - {'f', sharpNumFlag, argFloat | argComplex}, - {'F', sharpNumFlag, argFloat | argComplex}, - {'g', sharpNumFlag, argFloat | argComplex}, - {'G', sharpNumFlag, argFloat | argComplex}, - {'o', sharpNumFlag, argInt | argPointer}, - {'O', sharpNumFlag, argInt | argPointer}, - {'p', "-#", argPointer}, - {'q', " -+.0#", argRune | argInt | argString}, - {'s', " -+.0", argString}, - {'t', "-", argBool}, - {'T', "-", anyType}, - {'U', "-#", argRune | argInt}, - {'v', allFlags, anyType}, - {'w', allFlags, argError}, - {'x', sharpNumFlag, argRune | argInt | argString | argPointer | argFloat | argComplex}, - {'X', sharpNumFlag, argRune | argInt | argString | argPointer | argFloat | argComplex}, -} - -// okPrintfArg compares the formatState to the arguments actually present, -// reporting any discrepancies it can discern. If the final argument is ellipsissed, -// there's little it can do for that. -func okPrintfArg(pass *analysis.Pass, call *ast.CallExpr, state *formatState) (ok bool) { - var v printVerb - found := false - // Linear scan is fast enough for a small list. - for _, v = range printVerbs { - if v.verb == state.verb { - found = true - break - } - } - - // Could current arg implement fmt.Formatter? - // Skip check for the %w verb, which requires an error. - formatter := false - if v.typ != argError && state.argNum < len(call.Args) { - if tv, ok := pass.TypesInfo.Types[call.Args[state.argNum]]; ok { - formatter = isFormatter(tv.Type) - } - } - - if !formatter { - if !found { - pass.ReportRangef(call, "%s format %s has unknown verb %c", state.name, state.format, state.verb) - return false - } - for _, flag := range state.flags { - // TODO: Disable complaint about '0' for Go 1.10. To be fixed properly in 1.11. - // See issues 23598 and 23605. - if flag == '0' { - continue - } - if !strings.ContainsRune(v.flags, rune(flag)) { - pass.ReportRangef(call, "%s format %s has unrecognized flag %c", state.name, state.format, flag) - return false - } - } - } - // Verb is good. If len(state.argNums)>trueArgs, we have something like %.*s and all - // but the final arg must be an integer. - trueArgs := 1 - if state.verb == '%' { - trueArgs = 0 - } - nargs := len(state.argNums) - for i := 0; i < nargs-trueArgs; i++ { - argNum := state.argNums[i] - if !argCanBeChecked(pass, call, i, state) { - return - } - arg := call.Args[argNum] - if reason, ok := matchArgType(pass, argInt, arg); !ok { - details := "" - if reason != "" { - details = " (" + reason + ")" - } - pass.ReportRangef(call, "%s format %s uses non-int %s%s as argument of *", state.name, state.format, analysisutil.Format(pass.Fset, arg), details) - return false - } - } - - if state.verb == '%' || formatter { - return true - } - argNum := state.argNums[len(state.argNums)-1] - if !argCanBeChecked(pass, call, len(state.argNums)-1, state) { - return false - } - arg := call.Args[argNum] - if isFunctionValue(pass, arg) && state.verb != 'p' && state.verb != 'T' { - pass.ReportRangef(call, "%s format %s arg %s is a func value, not called", state.name, state.format, analysisutil.Format(pass.Fset, arg)) - return false - } - if reason, ok := matchArgType(pass, v.typ, arg); !ok { - typeString := "" - if typ := pass.TypesInfo.Types[arg].Type; typ != nil { - typeString = typ.String() - } - details := "" - if reason != "" { - details = " (" + reason + ")" - } - pass.ReportRangef(call, "%s format %s has arg %s of wrong type %s%s", state.name, state.format, analysisutil.Format(pass.Fset, arg), typeString, details) - return false - } - if v.typ&argString != 0 && v.verb != 'T' && !bytes.Contains(state.flags, []byte{'#'}) { - if methodName, ok := recursiveStringer(pass, arg); ok { - pass.ReportRangef(call, "%s format %s with arg %s causes recursive %s method call", state.name, state.format, analysisutil.Format(pass.Fset, arg), methodName) - return false - } - } - return true -} - -// recursiveStringer reports whether the argument e is a potential -// recursive call to stringer or is an error, such as t and &t in these examples: -// -// func (t *T) String() string { printf("%s", t) } -// func (t T) Error() string { printf("%s", t) } -// func (t T) String() string { printf("%s", &t) } -func recursiveStringer(pass *analysis.Pass, e ast.Expr) (string, bool) { - typ := pass.TypesInfo.Types[e].Type - - // It's unlikely to be a recursive stringer if it has a Format method. - if isFormatter(typ) { - return "", false - } - - // Does e allow e.String() or e.Error()? - strObj, _, _ := types.LookupFieldOrMethod(typ, false, pass.Pkg, "String") - strMethod, strOk := strObj.(*types.Func) - errObj, _, _ := types.LookupFieldOrMethod(typ, false, pass.Pkg, "Error") - errMethod, errOk := errObj.(*types.Func) - if !strOk && !errOk { - return "", false - } - - // inScope returns true if e is in the scope of f. - inScope := func(e ast.Expr, f *types.Func) bool { - return f.Scope() != nil && f.Scope().Contains(e.Pos()) - } - - // Is the expression e within the body of that String or Error method? - var method *types.Func - if strOk && strMethod.Pkg() == pass.Pkg && inScope(e, strMethod) { - method = strMethod - } else if errOk && errMethod.Pkg() == pass.Pkg && inScope(e, errMethod) { - method = errMethod - } else { - return "", false - } - - sig := method.Type().(*types.Signature) - if !isStringer(sig) { - return "", false - } - - // Is it the receiver r, or &r? - if u, ok := e.(*ast.UnaryExpr); ok && u.Op == token.AND { - e = u.X // strip off & from &r - } - if id, ok := e.(*ast.Ident); ok { - if pass.TypesInfo.Uses[id] == sig.Recv() { - return method.FullName(), true - } - } - return "", false -} - -// isStringer reports whether the method signature matches the String() definition in fmt.Stringer. -func isStringer(sig *types.Signature) bool { - return sig.Params().Len() == 0 && - sig.Results().Len() == 1 && - sig.Results().At(0).Type() == types.Typ[types.String] -} - -// isFunctionValue reports whether the expression is a function as opposed to a function call. -// It is almost always a mistake to print a function value. -func isFunctionValue(pass *analysis.Pass, e ast.Expr) bool { - if typ := pass.TypesInfo.Types[e].Type; typ != nil { - // Don't call Underlying: a named func type with a String method is ok. - // TODO(adonovan): it would be more precise to check isStringer. - _, ok := typ.(*types.Signature) - return ok - } - return false -} - -// argCanBeChecked reports whether the specified argument is statically present; -// it may be beyond the list of arguments or in a terminal slice... argument, which -// means we can't see it. -func argCanBeChecked(pass *analysis.Pass, call *ast.CallExpr, formatArg int, state *formatState) bool { - argNum := state.argNums[formatArg] - if argNum <= 0 { - // Shouldn't happen, so catch it with prejudice. - panic("negative arg num") - } - if argNum < len(call.Args)-1 { - return true // Always OK. - } - if call.Ellipsis.IsValid() { - return false // We just can't tell; there could be many more arguments. - } - if argNum < len(call.Args) { - return true - } - // There are bad indexes in the format or there are fewer arguments than the format needs. - // This is the argument number relative to the format: Printf("%s", "hi") will give 1 for the "hi". - arg := argNum - state.firstArg + 1 // People think of arguments as 1-indexed. - pass.ReportRangef(call, "%s format %s reads arg #%d, but call has %v", state.name, state.format, arg, count(len(call.Args)-state.firstArg, "arg")) - return false -} - -// printFormatRE is the regexp we match and report as a possible format string -// in the first argument to unformatted prints like fmt.Print. -// We exclude the space flag, so that printing a string like "x % y" is not reported as a format. -var printFormatRE = regexp.MustCompile(`%` + flagsRE + numOptRE + `\.?` + numOptRE + indexOptRE + verbRE) - -const ( - flagsRE = `[+\-#]*` - indexOptRE = `(\[[0-9]+\])?` - numOptRE = `([0-9]+|` + indexOptRE + `\*)?` - verbRE = `[bcdefgopqstvxEFGTUX]` -) - -// checkPrint checks a call to an unformatted print routine such as Println. -func checkPrint(pass *analysis.Pass, call *ast.CallExpr, fn *types.Func) { - firstArg := 0 - typ := pass.TypesInfo.Types[call.Fun].Type - if typ == nil { - // Skip checking functions with unknown type. - return - } - if sig, ok := typ.Underlying().(*types.Signature); ok { - if !sig.Variadic() { - // Skip checking non-variadic functions. - return - } - params := sig.Params() - firstArg = params.Len() - 1 - - typ := params.At(firstArg).Type() - typ = typ.(*types.Slice).Elem() - it, ok := aliases.Unalias(typ).(*types.Interface) - if !ok || !it.Empty() { - // Skip variadic functions accepting non-interface{} args. - return - } - } - args := call.Args - if len(args) <= firstArg { - // Skip calls without variadic args. - return - } - args = args[firstArg:] - - if firstArg == 0 { - if sel, ok := call.Args[0].(*ast.SelectorExpr); ok { - if x, ok := sel.X.(*ast.Ident); ok { - if x.Name == "os" && strings.HasPrefix(sel.Sel.Name, "Std") { - pass.ReportRangef(call, "%s does not take io.Writer but has first arg %s", fn.FullName(), analysisutil.Format(pass.Fset, call.Args[0])) - } - } - } - } - - arg := args[0] - if s, ok := stringConstantExpr(pass, arg); ok { - // Ignore trailing % character - // The % in "abc 0.0%" couldn't be a formatting directive. - s = strings.TrimSuffix(s, "%") - if strings.Contains(s, "%") { - m := printFormatRE.FindStringSubmatch(s) - if m != nil { - pass.ReportRangef(call, "%s call has possible Printf formatting directive %s", fn.FullName(), m[0]) - } - } - } - if strings.HasSuffix(fn.Name(), "ln") { - // The last item, if a string, should not have a newline. - arg = args[len(args)-1] - if s, ok := stringConstantExpr(pass, arg); ok { - if strings.HasSuffix(s, "\n") { - pass.ReportRangef(call, "%s arg list ends with redundant newline", fn.FullName()) - } - } - } - for _, arg := range args { - if isFunctionValue(pass, arg) { - pass.ReportRangef(call, "%s arg %s is a func value, not called", fn.FullName(), analysisutil.Format(pass.Fset, arg)) - } - if methodName, ok := recursiveStringer(pass, arg); ok { - pass.ReportRangef(call, "%s arg %s causes recursive call to %s method", fn.FullName(), analysisutil.Format(pass.Fset, arg), methodName) - } - } -} - -// count(n, what) returns "1 what" or "N whats" -// (assuming the plural of what is whats). -func count(n int, what string) string { - if n == 1 { - return "1 " + what - } - return fmt.Sprintf("%d %ss", n, what) -} - -// stringSet is a set-of-nonempty-strings-valued flag. -// Note: elements without a '.' get lower-cased. -type stringSet map[string]bool - -func (ss stringSet) String() string { - var list []string - for name := range ss { - list = append(list, name) - } - sort.Strings(list) - return strings.Join(list, ",") -} - -func (ss stringSet) Set(flag string) error { - for _, name := range strings.Split(flag, ",") { - if len(name) == 0 { - return fmt.Errorf("empty string") - } - if !strings.Contains(name, ".") { - name = strings.ToLower(name) - } - ss[name] = true - } - return nil -} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/printf/types.go b/vendor/golang.org/x/tools/go/analysis/passes/printf/types.go deleted file mode 100644 index 017c8a247..000000000 --- a/vendor/golang.org/x/tools/go/analysis/passes/printf/types.go +++ /dev/null @@ -1,302 +0,0 @@ -// Copyright 2018 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package printf - -import ( - "fmt" - "go/ast" - "go/types" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/internal/aliases" - "golang.org/x/tools/internal/typeparams" -) - -var errorType = types.Universe.Lookup("error").Type().Underlying().(*types.Interface) - -// matchArgType reports an error if printf verb t is not appropriate for -// operand arg. -// -// If arg is a type parameter, the verb t must be appropriate for every type in -// the type parameter type set. -func matchArgType(pass *analysis.Pass, t printfArgType, arg ast.Expr) (reason string, ok bool) { - // %v, %T accept any argument type. - if t == anyType { - return "", true - } - - typ := pass.TypesInfo.Types[arg].Type - if typ == nil { - return "", true // probably a type check problem - } - - m := &argMatcher{t: t, seen: make(map[types.Type]bool)} - ok = m.match(typ, true) - return m.reason, ok -} - -// argMatcher recursively matches types against the printfArgType t. -// -// To short-circuit recursion, it keeps track of types that have already been -// matched (or are in the process of being matched) via the seen map. Recursion -// arises from the compound types {map,chan,slice} which may be printed with %d -// etc. if that is appropriate for their element types, as well as from type -// parameters, which are expanded to the constituents of their type set. -// -// The reason field may be set to report the cause of the mismatch. -type argMatcher struct { - t printfArgType - seen map[types.Type]bool - reason string -} - -// match checks if typ matches m's printf arg type. If topLevel is true, typ is -// the actual type of the printf arg, for which special rules apply. As a -// special case, top level type parameters pass topLevel=true when checking for -// matches among the constituents of their type set, as type arguments will -// replace the type parameter at compile time. -func (m *argMatcher) match(typ types.Type, topLevel bool) bool { - // %w accepts only errors. - if m.t == argError { - return types.ConvertibleTo(typ, errorType) - } - - // If the type implements fmt.Formatter, we have nothing to check. - if isFormatter(typ) { - return true - } - - // If we can use a string, might arg (dynamically) implement the Stringer or Error interface? - if m.t&argString != 0 && isConvertibleToString(typ) { - return true - } - - if typ, _ := aliases.Unalias(typ).(*types.TypeParam); typ != nil { - // Avoid infinite recursion through type parameters. - if m.seen[typ] { - return true - } - m.seen[typ] = true - terms, err := typeparams.StructuralTerms(typ) - if err != nil { - return true // invalid type (possibly an empty type set) - } - - if len(terms) == 0 { - // No restrictions on the underlying of typ. Type parameters implementing - // error, fmt.Formatter, or fmt.Stringer were handled above, and %v and - // %T was handled in matchType. We're about to check restrictions the - // underlying; if the underlying type is unrestricted there must be an - // element of the type set that violates one of the arg type checks - // below, so we can safely return false here. - - if m.t == anyType { // anyType must have already been handled. - panic("unexpected printfArgType") - } - return false - } - - // Only report a reason if typ is the argument type, otherwise it won't - // make sense. Note that it is not sufficient to check if topLevel == here, - // as type parameters can have a type set consisting of other type - // parameters. - reportReason := len(m.seen) == 1 - - for _, term := range terms { - if !m.match(term.Type(), topLevel) { - if reportReason { - if term.Tilde() { - m.reason = fmt.Sprintf("contains ~%s", term.Type()) - } else { - m.reason = fmt.Sprintf("contains %s", term.Type()) - } - } - return false - } - } - return true - } - - typ = typ.Underlying() - if m.seen[typ] { - // We've already considered typ, or are in the process of considering it. - // In case we've already considered typ, it must have been valid (else we - // would have stopped matching). In case we're in the process of - // considering it, we must avoid infinite recursion. - // - // There are some pathological cases where returning true here is - // incorrect, for example `type R struct { F []R }`, but these are - // acceptable false negatives. - return true - } - m.seen[typ] = true - - switch typ := typ.(type) { - case *types.Signature: - return m.t == argPointer - - case *types.Map: - if m.t == argPointer { - return true - } - // Recur: map[int]int matches %d. - return m.match(typ.Key(), false) && m.match(typ.Elem(), false) - - case *types.Chan: - return m.t&argPointer != 0 - - case *types.Array: - // Same as slice. - if types.Identical(typ.Elem().Underlying(), types.Typ[types.Byte]) && m.t&argString != 0 { - return true // %s matches []byte - } - // Recur: []int matches %d. - return m.match(typ.Elem(), false) - - case *types.Slice: - // Same as array. - if types.Identical(typ.Elem().Underlying(), types.Typ[types.Byte]) && m.t&argString != 0 { - return true // %s matches []byte - } - if m.t == argPointer { - return true // %p prints a slice's 0th element - } - // Recur: []int matches %d. But watch out for - // type T []T - // If the element is a pointer type (type T[]*T), it's handled fine by the Pointer case below. - return m.match(typ.Elem(), false) - - case *types.Pointer: - // Ugly, but dealing with an edge case: a known pointer to an invalid type, - // probably something from a failed import. - if typ.Elem() == types.Typ[types.Invalid] { - return true // special case - } - // If it's actually a pointer with %p, it prints as one. - if m.t == argPointer { - return true - } - - if typeparams.IsTypeParam(typ.Elem()) { - return true // We don't know whether the logic below applies. Give up. - } - - under := typ.Elem().Underlying() - switch under.(type) { - case *types.Struct: // see below - case *types.Array: // see below - case *types.Slice: // see below - case *types.Map: // see below - default: - // Check whether the rest can print pointers. - return m.t&argPointer != 0 - } - // If it's a top-level pointer to a struct, array, slice, type param, or - // map, that's equivalent in our analysis to whether we can - // print the type being pointed to. Pointers in nested levels - // are not supported to minimize fmt running into loops. - if !topLevel { - return false - } - return m.match(under, false) - - case *types.Struct: - // report whether all the elements of the struct match the expected type. For - // instance, with "%d" all the elements must be printable with the "%d" format. - for i := 0; i < typ.NumFields(); i++ { - typf := typ.Field(i) - if !m.match(typf.Type(), false) { - return false - } - if m.t&argString != 0 && !typf.Exported() && isConvertibleToString(typf.Type()) { - // Issue #17798: unexported Stringer or error cannot be properly formatted. - return false - } - } - return true - - case *types.Interface: - // There's little we can do. - // Whether any particular verb is valid depends on the argument. - // The user may have reasonable prior knowledge of the contents of the interface. - return true - - case *types.Basic: - switch typ.Kind() { - case types.UntypedBool, - types.Bool: - return m.t&argBool != 0 - - case types.UntypedInt, - types.Int, - types.Int8, - types.Int16, - types.Int32, - types.Int64, - types.Uint, - types.Uint8, - types.Uint16, - types.Uint32, - types.Uint64, - types.Uintptr: - return m.t&argInt != 0 - - case types.UntypedFloat, - types.Float32, - types.Float64: - return m.t&argFloat != 0 - - case types.UntypedComplex, - types.Complex64, - types.Complex128: - return m.t&argComplex != 0 - - case types.UntypedString, - types.String: - return m.t&argString != 0 - - case types.UnsafePointer: - return m.t&(argPointer|argInt) != 0 - - case types.UntypedRune: - return m.t&(argInt|argRune) != 0 - - case types.UntypedNil: - return false - - case types.Invalid: - return true // Probably a type check problem. - } - panic("unreachable") - } - - return false -} - -func isConvertibleToString(typ types.Type) bool { - if bt, ok := aliases.Unalias(typ).(*types.Basic); ok && bt.Kind() == types.UntypedNil { - // We explicitly don't want untyped nil, which is - // convertible to both of the interfaces below, as it - // would just panic anyway. - return false - } - if types.ConvertibleTo(typ, errorType) { - return true // via .Error() - } - - // Does it implement fmt.Stringer? - if obj, _, _ := types.LookupFieldOrMethod(typ, false, nil, "String"); obj != nil { - if fn, ok := obj.(*types.Func); ok { - sig := fn.Type().(*types.Signature) - if sig.Params().Len() == 0 && - sig.Results().Len() == 1 && - sig.Results().At(0).Type() == types.Typ[types.String] { - return true - } - } - } - - return false -} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/reflectvaluecompare/doc.go b/vendor/golang.org/x/tools/go/analysis/passes/reflectvaluecompare/doc.go deleted file mode 100644 index 32f342b97..000000000 --- a/vendor/golang.org/x/tools/go/analysis/passes/reflectvaluecompare/doc.go +++ /dev/null @@ -1,27 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package reflectvaluecompare defines an Analyzer that checks for accidentally -// using == or reflect.DeepEqual to compare reflect.Value values. -// See issues 43993 and 18871. -// -// # Analyzer reflectvaluecompare -// -// reflectvaluecompare: check for comparing reflect.Value values with == or reflect.DeepEqual -// -// The reflectvaluecompare checker looks for expressions of the form: -// -// v1 == v2 -// v1 != v2 -// reflect.DeepEqual(v1, v2) -// -// where v1 or v2 are reflect.Values. Comparing reflect.Values directly -// is almost certainly not correct, as it compares the reflect package's -// internal representation, not the underlying value. -// Likely what is intended is: -// -// v1.Interface() == v2.Interface() -// v1.Interface() != v2.Interface() -// reflect.DeepEqual(v1.Interface(), v2.Interface()) -package reflectvaluecompare diff --git a/vendor/golang.org/x/tools/go/analysis/passes/reflectvaluecompare/reflectvaluecompare.go b/vendor/golang.org/x/tools/go/analysis/passes/reflectvaluecompare/reflectvaluecompare.go deleted file mode 100644 index 6789d7357..000000000 --- a/vendor/golang.org/x/tools/go/analysis/passes/reflectvaluecompare/reflectvaluecompare.go +++ /dev/null @@ -1,77 +0,0 @@ -// Copyright 2021 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package reflectvaluecompare - -import ( - _ "embed" - "go/ast" - "go/token" - "go/types" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/analysis/passes/internal/analysisutil" - "golang.org/x/tools/go/ast/inspector" - "golang.org/x/tools/go/types/typeutil" -) - -//go:embed doc.go -var doc string - -var Analyzer = &analysis.Analyzer{ - Name: "reflectvaluecompare", - Doc: analysisutil.MustExtractDoc(doc, "reflectvaluecompare"), - URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/reflectvaluecompare", - Requires: []*analysis.Analyzer{inspect.Analyzer}, - Run: run, -} - -func run(pass *analysis.Pass) (interface{}, error) { - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - - nodeFilter := []ast.Node{ - (*ast.BinaryExpr)(nil), - (*ast.CallExpr)(nil), - } - inspect.Preorder(nodeFilter, func(n ast.Node) { - switch n := n.(type) { - case *ast.BinaryExpr: - if n.Op != token.EQL && n.Op != token.NEQ { - return - } - if isReflectValue(pass, n.X) || isReflectValue(pass, n.Y) { - if n.Op == token.EQL { - pass.ReportRangef(n, "avoid using == with reflect.Value") - } else { - pass.ReportRangef(n, "avoid using != with reflect.Value") - } - } - case *ast.CallExpr: - fn, _ := typeutil.Callee(pass.TypesInfo, n).(*types.Func) - if analysisutil.IsFunctionNamed(fn, "reflect", "DeepEqual") && (isReflectValue(pass, n.Args[0]) || isReflectValue(pass, n.Args[1])) { - pass.ReportRangef(n, "avoid using reflect.DeepEqual with reflect.Value") - } - } - }) - return nil, nil -} - -// isReflectValue reports whether the type of e is reflect.Value. -func isReflectValue(pass *analysis.Pass, e ast.Expr) bool { - tv, ok := pass.TypesInfo.Types[e] - if !ok { // no type info, something else is wrong - return false - } - // See if the type is reflect.Value - if !analysisutil.IsNamedType(tv.Type, "reflect", "Value") { - return false - } - if _, ok := e.(*ast.CompositeLit); ok { - // This is reflect.Value{}. Don't treat that as an error. - // Users should probably use x.IsValid() rather than x == reflect.Value{}, but the latter isn't wrong. - return false - } - return true -} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/shadow/doc.go b/vendor/golang.org/x/tools/go/analysis/passes/shadow/doc.go deleted file mode 100644 index 781fd2eb8..000000000 --- a/vendor/golang.org/x/tools/go/analysis/passes/shadow/doc.go +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package shadow defines an Analyzer that checks for shadowed variables. -// -// # Analyzer shadow -// -// shadow: check for possible unintended shadowing of variables -// -// This analyzer check for shadowed variables. -// A shadowed variable is a variable declared in an inner scope -// with the same name and type as a variable in an outer scope, -// and where the outer variable is mentioned after the inner one -// is declared. -// -// (This definition can be refined; the module generates too many -// false positives and is not yet enabled by default.) -// -// For example: -// -// func BadRead(f *os.File, buf []byte) error { -// var err error -// for { -// n, err := f.Read(buf) // shadows the function variable 'err' -// if err != nil { -// break // causes return of wrong value -// } -// foo(buf) -// } -// return err -// } -package shadow diff --git a/vendor/golang.org/x/tools/go/analysis/passes/shadow/shadow.go b/vendor/golang.org/x/tools/go/analysis/passes/shadow/shadow.go deleted file mode 100644 index 30258c991..000000000 --- a/vendor/golang.org/x/tools/go/analysis/passes/shadow/shadow.go +++ /dev/null @@ -1,268 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package shadow - -import ( - _ "embed" - "go/ast" - "go/token" - "go/types" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/analysis/passes/internal/analysisutil" - "golang.org/x/tools/go/ast/inspector" -) - -// NOTE: Experimental. Not part of the vet suite. - -//go:embed doc.go -var doc string - -var Analyzer = &analysis.Analyzer{ - Name: "shadow", - Doc: analysisutil.MustExtractDoc(doc, "shadow"), - URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/shadow", - Requires: []*analysis.Analyzer{inspect.Analyzer}, - Run: run, -} - -// flags -var strict = false - -func init() { - Analyzer.Flags.BoolVar(&strict, "strict", strict, "whether to be strict about shadowing; can be noisy") -} - -func run(pass *analysis.Pass) (interface{}, error) { - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - - spans := make(map[types.Object]span) - for id, obj := range pass.TypesInfo.Defs { - // Ignore identifiers that don't denote objects - // (package names, symbolic variables such as t - // in t := x.(type) of type switch headers). - if obj != nil { - growSpan(spans, obj, id.Pos(), id.End()) - } - } - for id, obj := range pass.TypesInfo.Uses { - growSpan(spans, obj, id.Pos(), id.End()) - } - for node, obj := range pass.TypesInfo.Implicits { - // A type switch with a short variable declaration - // such as t := x.(type) doesn't declare the symbolic - // variable (t in the example) at the switch header; - // instead a new variable t (with specific type) is - // declared implicitly for each case. Such variables - // are found in the types.Info.Implicits (not Defs) - // map. Add them here, assuming they are declared at - // the type cases' colon ":". - if cc, ok := node.(*ast.CaseClause); ok { - growSpan(spans, obj, cc.Colon, cc.Colon) - } - } - - nodeFilter := []ast.Node{ - (*ast.AssignStmt)(nil), - (*ast.GenDecl)(nil), - } - inspect.Preorder(nodeFilter, func(n ast.Node) { - switch n := n.(type) { - case *ast.AssignStmt: - checkShadowAssignment(pass, spans, n) - case *ast.GenDecl: - checkShadowDecl(pass, spans, n) - } - }) - return nil, nil -} - -// A span stores the minimum range of byte positions in the file in which a -// given variable (types.Object) is mentioned. It is lexically defined: it spans -// from the beginning of its first mention to the end of its last mention. -// A variable is considered shadowed (if strict is off) only if the -// shadowing variable is declared within the span of the shadowed variable. -// In other words, if a variable is shadowed but not used after the shadowed -// variable is declared, it is inconsequential and not worth complaining about. -// This simple check dramatically reduces the nuisance rate for the shadowing -// check, at least until something cleverer comes along. -// -// One wrinkle: A "naked return" is a silent use of a variable that the Span -// will not capture, but the compilers catch naked returns of shadowed -// variables so we don't need to. -// -// Cases this gets wrong (TODO): -// - If a for loop's continuation statement mentions a variable redeclared in -// the block, we should complain about it but don't. -// - A variable declared inside a function literal can falsely be identified -// as shadowing a variable in the outer function. -type span struct { - min token.Pos - max token.Pos -} - -// contains reports whether the position is inside the span. -func (s span) contains(pos token.Pos) bool { - return s.min <= pos && pos < s.max -} - -// growSpan expands the span for the object to contain the source range [pos, end). -func growSpan(spans map[types.Object]span, obj types.Object, pos, end token.Pos) { - if strict { - return // No need - } - s, ok := spans[obj] - if ok { - if s.min > pos { - s.min = pos - } - if s.max < end { - s.max = end - } - } else { - s = span{pos, end} - } - spans[obj] = s -} - -// checkShadowAssignment checks for shadowing in a short variable declaration. -func checkShadowAssignment(pass *analysis.Pass, spans map[types.Object]span, a *ast.AssignStmt) { - if a.Tok != token.DEFINE { - return - } - if idiomaticShortRedecl(pass, a) { - return - } - for _, expr := range a.Lhs { - ident, ok := expr.(*ast.Ident) - if !ok { - pass.ReportRangef(expr, "invalid AST: short variable declaration of non-identifier") - return - } - checkShadowing(pass, spans, ident) - } -} - -// idiomaticShortRedecl reports whether this short declaration can be ignored for -// the purposes of shadowing, that is, that any redeclarations it contains are deliberate. -func idiomaticShortRedecl(pass *analysis.Pass, a *ast.AssignStmt) bool { - // Don't complain about deliberate redeclarations of the form - // i := i - // Such constructs are idiomatic in range loops to create a new variable - // for each iteration. Another example is - // switch n := n.(type) - if len(a.Rhs) != len(a.Lhs) { - return false - } - // We know it's an assignment, so the LHS must be all identifiers. (We check anyway.) - for i, expr := range a.Lhs { - lhs, ok := expr.(*ast.Ident) - if !ok { - pass.ReportRangef(expr, "invalid AST: short variable declaration of non-identifier") - return true // Don't do any more processing. - } - switch rhs := a.Rhs[i].(type) { - case *ast.Ident: - if lhs.Name != rhs.Name { - return false - } - case *ast.TypeAssertExpr: - if id, ok := rhs.X.(*ast.Ident); ok { - if lhs.Name != id.Name { - return false - } - } - default: - return false - } - } - return true -} - -// idiomaticRedecl reports whether this declaration spec can be ignored for -// the purposes of shadowing, that is, that any redeclarations it contains are deliberate. -func idiomaticRedecl(d *ast.ValueSpec) bool { - // Don't complain about deliberate redeclarations of the form - // var i, j = i, j - // Don't ignore redeclarations of the form - // var i = 3 - if len(d.Names) != len(d.Values) { - return false - } - for i, lhs := range d.Names { - rhs, ok := d.Values[i].(*ast.Ident) - if !ok || lhs.Name != rhs.Name { - return false - } - } - return true -} - -// checkShadowDecl checks for shadowing in a general variable declaration. -func checkShadowDecl(pass *analysis.Pass, spans map[types.Object]span, d *ast.GenDecl) { - if d.Tok != token.VAR { - return - } - for _, spec := range d.Specs { - valueSpec, ok := spec.(*ast.ValueSpec) - if !ok { - pass.ReportRangef(spec, "invalid AST: var GenDecl not ValueSpec") - return - } - // Don't complain about deliberate redeclarations of the form - // var i = i - if idiomaticRedecl(valueSpec) { - return - } - for _, ident := range valueSpec.Names { - checkShadowing(pass, spans, ident) - } - } -} - -// checkShadowing checks whether the identifier shadows an identifier in an outer scope. -func checkShadowing(pass *analysis.Pass, spans map[types.Object]span, ident *ast.Ident) { - if ident.Name == "_" { - // Can't shadow the blank identifier. - return - } - obj := pass.TypesInfo.Defs[ident] - if obj == nil { - return - } - // obj.Parent.Parent is the surrounding scope. If we can find another declaration - // starting from there, we have a shadowed identifier. - _, shadowed := obj.Parent().Parent().LookupParent(obj.Name(), obj.Pos()) - if shadowed == nil { - return - } - // Don't complain if it's shadowing a universe-declared identifier; that's fine. - if shadowed.Parent() == types.Universe { - return - } - if strict { - // The shadowed identifier must appear before this one to be an instance of shadowing. - if shadowed.Pos() > ident.Pos() { - return - } - } else { - // Don't complain if the span of validity of the shadowed identifier doesn't include - // the shadowing identifier. - span, ok := spans[shadowed] - if !ok { - pass.ReportRangef(ident, "internal error: no range for %q", ident.Name) - return - } - if !span.contains(ident.Pos()) { - return - } - } - // Don't complain if the types differ: that implies the programmer really wants two different things. - if types.Identical(obj.Type(), shadowed.Type()) { - line := pass.Fset.Position(shadowed.Pos()).Line - pass.ReportRangef(ident, "declaration of %q shadows declaration at line %d", obj.Name(), line) - } -} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/shift/dead.go b/vendor/golang.org/x/tools/go/analysis/passes/shift/dead.go deleted file mode 100644 index 43415a98d..000000000 --- a/vendor/golang.org/x/tools/go/analysis/passes/shift/dead.go +++ /dev/null @@ -1,101 +0,0 @@ -// Copyright 2017 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package shift - -// Simplified dead code detector. -// Used for skipping shift checks on unreachable arch-specific code. - -import ( - "go/ast" - "go/constant" - "go/types" -) - -// updateDead puts unreachable "if" and "case" nodes into dead. -func updateDead(info *types.Info, dead map[ast.Node]bool, node ast.Node) { - if dead[node] { - // The node is already marked as dead. - return - } - - // setDead marks the node and all the children as dead. - setDead := func(n ast.Node) { - ast.Inspect(n, func(node ast.Node) bool { - if node != nil { - dead[node] = true - } - return true - }) - } - - switch stmt := node.(type) { - case *ast.IfStmt: - // "if" branch is dead if its condition evaluates - // to constant false. - v := info.Types[stmt.Cond].Value - if v == nil { - return - } - if !constant.BoolVal(v) { - setDead(stmt.Body) - return - } - if stmt.Else != nil { - setDead(stmt.Else) - } - case *ast.SwitchStmt: - // Case clause with empty switch tag is dead if it evaluates - // to constant false. - if stmt.Tag == nil { - BodyLoopBool: - for _, stmt := range stmt.Body.List { - cc := stmt.(*ast.CaseClause) - if cc.List == nil { - // Skip default case. - continue - } - for _, expr := range cc.List { - v := info.Types[expr].Value - if v == nil || v.Kind() != constant.Bool || constant.BoolVal(v) { - continue BodyLoopBool - } - } - setDead(cc) - } - return - } - - // Case clause is dead if its constant value doesn't match - // the constant value from the switch tag. - // TODO: This handles integer comparisons only. - v := info.Types[stmt.Tag].Value - if v == nil || v.Kind() != constant.Int { - return - } - tagN, ok := constant.Uint64Val(v) - if !ok { - return - } - BodyLoopInt: - for _, x := range stmt.Body.List { - cc := x.(*ast.CaseClause) - if cc.List == nil { - // Skip default case. - continue - } - for _, expr := range cc.List { - v := info.Types[expr].Value - if v == nil { - continue BodyLoopInt - } - n, ok := constant.Uint64Val(v) - if !ok || tagN == n { - continue BodyLoopInt - } - } - setDead(cc) - } - } -} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/shift/shift.go b/vendor/golang.org/x/tools/go/analysis/passes/shift/shift.go deleted file mode 100644 index d01eb1eeb..000000000 --- a/vendor/golang.org/x/tools/go/analysis/passes/shift/shift.go +++ /dev/null @@ -1,134 +0,0 @@ -// Copyright 2014 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package shift defines an Analyzer that checks for shifts that exceed -// the width of an integer. -package shift - -// TODO(adonovan): integrate with ctrflow (CFG-based) dead code analysis. May -// have impedance mismatch due to its (non-)treatment of constant -// expressions (such as runtime.GOARCH=="386"). - -import ( - "go/ast" - "go/constant" - "go/token" - "go/types" - "math" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/analysis/passes/internal/analysisutil" - "golang.org/x/tools/go/ast/inspector" - "golang.org/x/tools/internal/aliases" - "golang.org/x/tools/internal/typeparams" -) - -const Doc = "check for shifts that equal or exceed the width of the integer" - -var Analyzer = &analysis.Analyzer{ - Name: "shift", - Doc: Doc, - URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/shift", - Requires: []*analysis.Analyzer{inspect.Analyzer}, - Run: run, -} - -func run(pass *analysis.Pass) (interface{}, error) { - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - - // Do a complete pass to compute dead nodes. - dead := make(map[ast.Node]bool) - nodeFilter := []ast.Node{ - (*ast.IfStmt)(nil), - (*ast.SwitchStmt)(nil), - } - inspect.Preorder(nodeFilter, func(n ast.Node) { - // TODO(adonovan): move updateDead into this file. - updateDead(pass.TypesInfo, dead, n) - }) - - nodeFilter = []ast.Node{ - (*ast.AssignStmt)(nil), - (*ast.BinaryExpr)(nil), - } - inspect.Preorder(nodeFilter, func(node ast.Node) { - if dead[node] { - // Skip shift checks on unreachable nodes. - return - } - - switch node := node.(type) { - case *ast.BinaryExpr: - if node.Op == token.SHL || node.Op == token.SHR { - checkLongShift(pass, node, node.X, node.Y) - } - case *ast.AssignStmt: - if len(node.Lhs) != 1 || len(node.Rhs) != 1 { - return - } - if node.Tok == token.SHL_ASSIGN || node.Tok == token.SHR_ASSIGN { - checkLongShift(pass, node, node.Lhs[0], node.Rhs[0]) - } - } - }) - return nil, nil -} - -// checkLongShift checks if shift or shift-assign operations shift by more than -// the length of the underlying variable. -func checkLongShift(pass *analysis.Pass, node ast.Node, x, y ast.Expr) { - if pass.TypesInfo.Types[x].Value != nil { - // Ignore shifts of constants. - // These are frequently used for bit-twiddling tricks - // like ^uint(0) >> 63 for 32/64 bit detection and compatibility. - return - } - - v := pass.TypesInfo.Types[y].Value - if v == nil { - return - } - u := constant.ToInt(v) // either an Int or Unknown - amt, ok := constant.Int64Val(u) - if !ok { - return - } - t := pass.TypesInfo.Types[x].Type - if t == nil { - return - } - var structuralTypes []types.Type - switch t := aliases.Unalias(t).(type) { - case *types.TypeParam: - terms, err := typeparams.StructuralTerms(t) - if err != nil { - return // invalid type - } - for _, term := range terms { - structuralTypes = append(structuralTypes, term.Type()) - } - default: - structuralTypes = append(structuralTypes, t) - } - sizes := make(map[int64]struct{}) - for _, t := range structuralTypes { - size := 8 * pass.TypesSizes.Sizeof(t) - sizes[size] = struct{}{} - } - minSize := int64(math.MaxInt64) - for size := range sizes { - if size < minSize { - minSize = size - } - } - if amt >= minSize { - ident := analysisutil.Format(pass.Fset, x) - qualifier := "" - if len(sizes) > 1 { - qualifier = "may be " - } - pass.ReportRangef(node, "%s (%s%d bits) too small for shift of %d", ident, qualifier, minSize, amt) - } -} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/sigchanyzer/doc.go b/vendor/golang.org/x/tools/go/analysis/passes/sigchanyzer/doc.go deleted file mode 100644 index 583fed014..000000000 --- a/vendor/golang.org/x/tools/go/analysis/passes/sigchanyzer/doc.go +++ /dev/null @@ -1,17 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package sigchanyzer defines an Analyzer that detects -// misuse of unbuffered signal as argument to signal.Notify. -// -// # Analyzer sigchanyzer -// -// sigchanyzer: check for unbuffered channel of os.Signal -// -// This checker reports call expression of the form -// -// signal.Notify(c <-chan os.Signal, sig ...os.Signal), -// -// where c is an unbuffered channel, which can be at risk of missing the signal. -package sigchanyzer diff --git a/vendor/golang.org/x/tools/go/analysis/passes/sigchanyzer/sigchanyzer.go b/vendor/golang.org/x/tools/go/analysis/passes/sigchanyzer/sigchanyzer.go deleted file mode 100644 index 5f121f720..000000000 --- a/vendor/golang.org/x/tools/go/analysis/passes/sigchanyzer/sigchanyzer.go +++ /dev/null @@ -1,159 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package sigchanyzer defines an Analyzer that detects -// misuse of unbuffered signal as argument to signal.Notify. -package sigchanyzer - -import ( - "bytes" - _ "embed" - "go/ast" - "go/format" - "go/token" - "go/types" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/analysis/passes/internal/analysisutil" - "golang.org/x/tools/go/ast/inspector" -) - -//go:embed doc.go -var doc string - -// Analyzer describes sigchanyzer analysis function detector. -var Analyzer = &analysis.Analyzer{ - Name: "sigchanyzer", - Doc: analysisutil.MustExtractDoc(doc, "sigchanyzer"), - URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/sigchanyzer", - Requires: []*analysis.Analyzer{inspect.Analyzer}, - Run: run, -} - -func run(pass *analysis.Pass) (interface{}, error) { - if !analysisutil.Imports(pass.Pkg, "os/signal") { - return nil, nil // doesn't directly import signal - } - - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - - nodeFilter := []ast.Node{ - (*ast.CallExpr)(nil), - } - inspect.Preorder(nodeFilter, func(n ast.Node) { - call := n.(*ast.CallExpr) - if !isSignalNotify(pass.TypesInfo, call) { - return - } - var chanDecl *ast.CallExpr - switch arg := call.Args[0].(type) { - case *ast.Ident: - if decl, ok := findDecl(arg).(*ast.CallExpr); ok { - chanDecl = decl - } - case *ast.CallExpr: - // Only signal.Notify(make(chan os.Signal), os.Interrupt) is safe, - // conservatively treat others as not safe, see golang/go#45043 - if isBuiltinMake(pass.TypesInfo, arg) { - return - } - chanDecl = arg - } - if chanDecl == nil || len(chanDecl.Args) != 1 { - return - } - - // Make a copy of the channel's declaration to avoid - // mutating the AST. See https://golang.org/issue/46129. - chanDeclCopy := &ast.CallExpr{} - *chanDeclCopy = *chanDecl - chanDeclCopy.Args = append([]ast.Expr(nil), chanDecl.Args...) - chanDeclCopy.Args = append(chanDeclCopy.Args, &ast.BasicLit{ - Kind: token.INT, - Value: "1", - }) - - var buf bytes.Buffer - if err := format.Node(&buf, token.NewFileSet(), chanDeclCopy); err != nil { - return - } - pass.Report(analysis.Diagnostic{ - Pos: call.Pos(), - End: call.End(), - Message: "misuse of unbuffered os.Signal channel as argument to signal.Notify", - SuggestedFixes: []analysis.SuggestedFix{{ - Message: "Change to buffer channel", - TextEdits: []analysis.TextEdit{{ - Pos: chanDecl.Pos(), - End: chanDecl.End(), - NewText: buf.Bytes(), - }}, - }}, - }) - }) - return nil, nil -} - -func isSignalNotify(info *types.Info, call *ast.CallExpr) bool { - check := func(id *ast.Ident) bool { - obj := info.ObjectOf(id) - return obj.Name() == "Notify" && obj.Pkg().Path() == "os/signal" - } - switch fun := call.Fun.(type) { - case *ast.SelectorExpr: - return check(fun.Sel) - case *ast.Ident: - if fun, ok := findDecl(fun).(*ast.SelectorExpr); ok { - return check(fun.Sel) - } - return false - default: - return false - } -} - -func findDecl(arg *ast.Ident) ast.Node { - if arg.Obj == nil { - return nil - } - switch as := arg.Obj.Decl.(type) { - case *ast.AssignStmt: - if len(as.Lhs) != len(as.Rhs) { - return nil - } - for i, lhs := range as.Lhs { - lid, ok := lhs.(*ast.Ident) - if !ok { - continue - } - if lid.Obj == arg.Obj { - return as.Rhs[i] - } - } - case *ast.ValueSpec: - if len(as.Names) != len(as.Values) { - return nil - } - for i, name := range as.Names { - if name.Obj == arg.Obj { - return as.Values[i] - } - } - } - return nil -} - -func isBuiltinMake(info *types.Info, call *ast.CallExpr) bool { - typVal := info.Types[call.Fun] - if !typVal.IsBuiltin() { - return false - } - switch fun := call.Fun.(type) { - case *ast.Ident: - return info.ObjectOf(fun).Name() == "make" - default: - return false - } -} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/slog/doc.go b/vendor/golang.org/x/tools/go/analysis/passes/slog/doc.go deleted file mode 100644 index ecb10e094..000000000 --- a/vendor/golang.org/x/tools/go/analysis/passes/slog/doc.go +++ /dev/null @@ -1,23 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package slog defines an Analyzer that checks for -// mismatched key-value pairs in log/slog calls. -// -// # Analyzer slog -// -// slog: check for invalid structured logging calls -// -// The slog checker looks for calls to functions from the log/slog -// package that take alternating key-value pairs. It reports calls -// where an argument in a key position is neither a string nor a -// slog.Attr, and where a final key is missing its value. -// For example,it would report -// -// slog.Warn("message", 11, "k") // slog.Warn arg "11" should be a string or a slog.Attr -// -// and -// -// slog.Info("message", "k1", v1, "k2") // call to slog.Info missing a final value -package slog diff --git a/vendor/golang.org/x/tools/go/analysis/passes/slog/slog.go b/vendor/golang.org/x/tools/go/analysis/passes/slog/slog.go deleted file mode 100644 index b3c683b61..000000000 --- a/vendor/golang.org/x/tools/go/analysis/passes/slog/slog.go +++ /dev/null @@ -1,226 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// TODO(jba) deduce which functions wrap the log/slog functions, and use the -// fact mechanism to propagate this information, so we can provide diagnostics -// for user-supplied wrappers. - -package slog - -import ( - _ "embed" - "fmt" - "go/ast" - "go/token" - "go/types" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/analysis/passes/internal/analysisutil" - "golang.org/x/tools/go/ast/inspector" - "golang.org/x/tools/go/types/typeutil" - "golang.org/x/tools/internal/typesinternal" -) - -//go:embed doc.go -var doc string - -var Analyzer = &analysis.Analyzer{ - Name: "slog", - Doc: analysisutil.MustExtractDoc(doc, "slog"), - URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/slog", - Requires: []*analysis.Analyzer{inspect.Analyzer}, - Run: run, -} - -var stringType = types.Universe.Lookup("string").Type() - -// A position describes what is expected to appear in an argument position. -type position int - -const ( - // key is an argument position that should hold a string key or an Attr. - key position = iota - // value is an argument position that should hold a value. - value - // unknown represents that we do not know if position should hold a key or a value. - unknown -) - -func run(pass *analysis.Pass) (any, error) { - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - nodeFilter := []ast.Node{ - (*ast.CallExpr)(nil), - } - inspect.Preorder(nodeFilter, func(node ast.Node) { - call := node.(*ast.CallExpr) - fn := typeutil.StaticCallee(pass.TypesInfo, call) - if fn == nil { - return // not a static call - } - if call.Ellipsis != token.NoPos { - return // skip calls with "..." args - } - skipArgs, ok := kvFuncSkipArgs(fn) - if !ok { - // Not a slog function that takes key-value pairs. - return - } - if isMethodExpr(pass.TypesInfo, call) { - // Call is to a method value. Skip the first argument. - skipArgs++ - } - if len(call.Args) <= skipArgs { - // Too few args; perhaps there are no k-v pairs. - return - } - - // Check this call. - // The first position should hold a key or Attr. - pos := key - var unknownArg ast.Expr // nil or the last unknown argument - for _, arg := range call.Args[skipArgs:] { - t := pass.TypesInfo.Types[arg].Type - switch pos { - case key: - // Expect a string or Attr. - switch { - case t == stringType: - pos = value - case isAttr(t): - pos = key - case types.IsInterface(t): - // As we do not do dataflow, we do not know what the dynamic type is. - // It could be a string or an Attr so we don't know what to expect next. - pos = unknown - default: - if unknownArg == nil { - pass.ReportRangef(arg, "%s arg %q should be a string or a slog.Attr (possible missing key or value)", - shortName(fn), analysisutil.Format(pass.Fset, arg)) - } else { - pass.ReportRangef(arg, "%s arg %q should probably be a string or a slog.Attr (previous arg %q cannot be a key)", - shortName(fn), analysisutil.Format(pass.Fset, arg), analysisutil.Format(pass.Fset, unknownArg)) - } - // Stop here so we report at most one missing key per call. - return - } - - case value: - // Anything can appear in this position. - // The next position should be a key. - pos = key - - case unknown: - // Once we encounter an unknown position, we can never be - // sure if a problem later or at the end of the call is due to a - // missing final value, or a non-key in key position. - // In both cases, unknownArg != nil. - unknownArg = arg - - // We don't know what is expected about this position, but all hope is not lost. - if t != stringType && !isAttr(t) && !types.IsInterface(t) { - // This argument is definitely not a key. - // - // unknownArg cannot have been a key, in which case this is the - // corresponding value, and the next position should hold another key. - pos = key - } - } - } - if pos == value { - if unknownArg == nil { - pass.ReportRangef(call, "call to %s missing a final value", shortName(fn)) - } else { - pass.ReportRangef(call, "call to %s has a missing or misplaced value", shortName(fn)) - } - } - }) - return nil, nil -} - -func isAttr(t types.Type) bool { - return analysisutil.IsNamedType(t, "log/slog", "Attr") -} - -// shortName returns a name for the function that is shorter than FullName. -// Examples: -// -// "slog.Info" (instead of "log/slog.Info") -// "slog.Logger.With" (instead of "(*log/slog.Logger).With") -func shortName(fn *types.Func) string { - var r string - if recv := fn.Type().(*types.Signature).Recv(); recv != nil { - if _, named := typesinternal.ReceiverNamed(recv); named != nil { - r = named.Obj().Name() - } else { - r = recv.Type().String() // anon struct/interface - } - r += "." - } - return fmt.Sprintf("%s.%s%s", fn.Pkg().Name(), r, fn.Name()) -} - -// If fn is a slog function that has a ...any parameter for key-value pairs, -// kvFuncSkipArgs returns the number of arguments to skip over to reach the -// corresponding arguments, and true. -// Otherwise it returns (0, false). -func kvFuncSkipArgs(fn *types.Func) (int, bool) { - if pkg := fn.Pkg(); pkg == nil || pkg.Path() != "log/slog" { - return 0, false - } - var recvName string // by default a slog package function - if recv := fn.Type().(*types.Signature).Recv(); recv != nil { - _, named := typesinternal.ReceiverNamed(recv) - if named == nil { - return 0, false // anon struct/interface - } - recvName = named.Obj().Name() - } - skip, ok := kvFuncs[recvName][fn.Name()] - return skip, ok -} - -// The names of functions and methods in log/slog that take -// ...any for key-value pairs, mapped to the number of initial args to skip in -// order to get to the ones that match the ...any parameter. -// The first key is the dereferenced receiver type name, or "" for a function. -var kvFuncs = map[string]map[string]int{ - "": map[string]int{ - "Debug": 1, - "Info": 1, - "Warn": 1, - "Error": 1, - "DebugContext": 2, - "InfoContext": 2, - "WarnContext": 2, - "ErrorContext": 2, - "Log": 3, - "Group": 1, - }, - "Logger": map[string]int{ - "Debug": 1, - "Info": 1, - "Warn": 1, - "Error": 1, - "DebugContext": 2, - "InfoContext": 2, - "WarnContext": 2, - "ErrorContext": 2, - "Log": 3, - "With": 0, - }, - "Record": map[string]int{ - "Add": 0, - }, -} - -// isMethodExpr reports whether a call is to a MethodExpr. -func isMethodExpr(info *types.Info, c *ast.CallExpr) bool { - s, ok := c.Fun.(*ast.SelectorExpr) - if !ok { - return false - } - sel := info.Selections[s] - return sel != nil && sel.Kind() == types.MethodExpr -} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/sortslice/analyzer.go b/vendor/golang.org/x/tools/go/analysis/passes/sortslice/analyzer.go deleted file mode 100644 index 6c151a02c..000000000 --- a/vendor/golang.org/x/tools/go/analysis/passes/sortslice/analyzer.go +++ /dev/null @@ -1,134 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package sortslice defines an Analyzer that checks for calls -// to sort.Slice that do not use a slice type as first argument. -package sortslice - -import ( - "bytes" - "fmt" - "go/ast" - "go/format" - "go/types" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/analysis/passes/internal/analysisutil" - "golang.org/x/tools/go/ast/inspector" - "golang.org/x/tools/go/types/typeutil" -) - -const Doc = `check the argument type of sort.Slice - -sort.Slice requires an argument of a slice type. Check that -the interface{} value passed to sort.Slice is actually a slice.` - -var Analyzer = &analysis.Analyzer{ - Name: "sortslice", - Doc: Doc, - URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/sortslice", - Requires: []*analysis.Analyzer{inspect.Analyzer}, - Run: run, -} - -func run(pass *analysis.Pass) (interface{}, error) { - if !analysisutil.Imports(pass.Pkg, "sort") { - return nil, nil // doesn't directly import sort - } - - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - - nodeFilter := []ast.Node{ - (*ast.CallExpr)(nil), - } - - inspect.Preorder(nodeFilter, func(n ast.Node) { - call := n.(*ast.CallExpr) - fn, _ := typeutil.Callee(pass.TypesInfo, call).(*types.Func) - if !analysisutil.IsFunctionNamed(fn, "sort", "Slice", "SliceStable", "SliceIsSorted") { - return - } - - arg := call.Args[0] - typ := pass.TypesInfo.Types[arg].Type - - if tuple, ok := typ.(*types.Tuple); ok { - typ = tuple.At(0).Type() // special case for Slice(f(...)) - } - - switch typ.Underlying().(type) { - case *types.Slice, *types.Interface: - return - } - - // Restore typ to the original type, we may unwrap the tuple above, - // typ might not be the type of arg. - typ = pass.TypesInfo.Types[arg].Type - - var fixes []analysis.SuggestedFix - switch v := typ.Underlying().(type) { - case *types.Array: - var buf bytes.Buffer - format.Node(&buf, pass.Fset, &ast.SliceExpr{ - X: arg, - Slice3: false, - Lbrack: arg.End() + 1, - Rbrack: arg.End() + 3, - }) - fixes = append(fixes, analysis.SuggestedFix{ - Message: "Get a slice of the full array", - TextEdits: []analysis.TextEdit{{ - Pos: arg.Pos(), - End: arg.End(), - NewText: buf.Bytes(), - }}, - }) - case *types.Pointer: - _, ok := v.Elem().Underlying().(*types.Slice) - if !ok { - break - } - var buf bytes.Buffer - format.Node(&buf, pass.Fset, &ast.StarExpr{ - X: arg, - }) - fixes = append(fixes, analysis.SuggestedFix{ - Message: "Dereference the pointer to the slice", - TextEdits: []analysis.TextEdit{{ - Pos: arg.Pos(), - End: arg.End(), - NewText: buf.Bytes(), - }}, - }) - case *types.Signature: - if v.Params().Len() != 0 || v.Results().Len() != 1 { - break - } - if _, ok := v.Results().At(0).Type().Underlying().(*types.Slice); !ok { - break - } - var buf bytes.Buffer - format.Node(&buf, pass.Fset, &ast.CallExpr{ - Fun: arg, - }) - fixes = append(fixes, analysis.SuggestedFix{ - Message: "Call the function", - TextEdits: []analysis.TextEdit{{ - Pos: arg.Pos(), - End: arg.End(), - NewText: buf.Bytes(), - }}, - }) - } - - pass.Report(analysis.Diagnostic{ - Pos: call.Pos(), - End: call.End(), - Message: fmt.Sprintf("%s's argument must be a slice; is called with %s", fn.FullName(), typ.String()), - SuggestedFixes: fixes, - }) - }) - return nil, nil -} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/stdmethods/doc.go b/vendor/golang.org/x/tools/go/analysis/passes/stdmethods/doc.go deleted file mode 100644 index 9ed88698d..000000000 --- a/vendor/golang.org/x/tools/go/analysis/passes/stdmethods/doc.go +++ /dev/null @@ -1,30 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package stdmethods defines an Analyzer that checks for misspellings -// in the signatures of methods similar to well-known interfaces. -// -// # Analyzer stdmethods -// -// stdmethods: check signature of methods of well-known interfaces -// -// Sometimes a type may be intended to satisfy an interface but may fail to -// do so because of a mistake in its method signature. -// For example, the result of this WriteTo method should be (int64, error), -// not error, to satisfy io.WriterTo: -// -// type myWriterTo struct{...} -// func (myWriterTo) WriteTo(w io.Writer) error { ... } -// -// This check ensures that each method whose name matches one of several -// well-known interface methods from the standard library has the correct -// signature for that interface. -// -// Checked method names include: -// -// Format GobEncode GobDecode MarshalJSON MarshalXML -// Peek ReadByte ReadFrom ReadRune Scan Seek -// UnmarshalJSON UnreadByte UnreadRune WriteByte -// WriteTo -package stdmethods diff --git a/vendor/golang.org/x/tools/go/analysis/passes/stdmethods/stdmethods.go b/vendor/golang.org/x/tools/go/analysis/passes/stdmethods/stdmethods.go deleted file mode 100644 index 28f51b1ec..000000000 --- a/vendor/golang.org/x/tools/go/analysis/passes/stdmethods/stdmethods.go +++ /dev/null @@ -1,202 +0,0 @@ -// Copyright 2010 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package stdmethods - -import ( - _ "embed" - "go/ast" - "go/types" - "strings" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/analysis/passes/internal/analysisutil" - "golang.org/x/tools/go/ast/inspector" -) - -//go:embed doc.go -var doc string - -var Analyzer = &analysis.Analyzer{ - Name: "stdmethods", - Doc: analysisutil.MustExtractDoc(doc, "stdmethods"), - URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/stdmethods", - Requires: []*analysis.Analyzer{inspect.Analyzer}, - Run: run, -} - -// canonicalMethods lists the input and output types for Go methods -// that are checked using dynamic interface checks. Because the -// checks are dynamic, such methods would not cause a compile error -// if they have the wrong signature: instead the dynamic check would -// fail, sometimes mysteriously. If a method is found with a name listed -// here but not the input/output types listed here, vet complains. -// -// A few of the canonical methods have very common names. -// For example, a type might implement a Scan method that -// has nothing to do with fmt.Scanner, but we still want to check -// the methods that are intended to implement fmt.Scanner. -// To do that, the arguments that have a = prefix are treated as -// signals that the canonical meaning is intended: if a Scan -// method doesn't have a fmt.ScanState as its first argument, -// we let it go. But if it does have a fmt.ScanState, then the -// rest has to match. -var canonicalMethods = map[string]struct{ args, results []string }{ - "As": {[]string{"any"}, []string{"bool"}}, // errors.As - // "Flush": {{}, {"error"}}, // http.Flusher and jpeg.writer conflict - "Format": {[]string{"=fmt.State", "rune"}, []string{}}, // fmt.Formatter - "GobDecode": {[]string{"[]byte"}, []string{"error"}}, // gob.GobDecoder - "GobEncode": {[]string{}, []string{"[]byte", "error"}}, // gob.GobEncoder - "Is": {[]string{"error"}, []string{"bool"}}, // errors.Is - "MarshalJSON": {[]string{}, []string{"[]byte", "error"}}, // json.Marshaler - "MarshalXML": {[]string{"*xml.Encoder", "xml.StartElement"}, []string{"error"}}, // xml.Marshaler - "ReadByte": {[]string{}, []string{"byte", "error"}}, // io.ByteReader - "ReadFrom": {[]string{"=io.Reader"}, []string{"int64", "error"}}, // io.ReaderFrom - "ReadRune": {[]string{}, []string{"rune", "int", "error"}}, // io.RuneReader - "Scan": {[]string{"=fmt.ScanState", "rune"}, []string{"error"}}, // fmt.Scanner - "Seek": {[]string{"=int64", "int"}, []string{"int64", "error"}}, // io.Seeker - "UnmarshalJSON": {[]string{"[]byte"}, []string{"error"}}, // json.Unmarshaler - "UnmarshalXML": {[]string{"*xml.Decoder", "xml.StartElement"}, []string{"error"}}, // xml.Unmarshaler - "UnreadByte": {[]string{}, []string{"error"}}, - "UnreadRune": {[]string{}, []string{"error"}}, - "Unwrap": {[]string{}, []string{"error"}}, // errors.Unwrap - "WriteByte": {[]string{"byte"}, []string{"error"}}, // jpeg.writer (matching bufio.Writer) - "WriteTo": {[]string{"=io.Writer"}, []string{"int64", "error"}}, // io.WriterTo -} - -func run(pass *analysis.Pass) (interface{}, error) { - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - - nodeFilter := []ast.Node{ - (*ast.FuncDecl)(nil), - (*ast.InterfaceType)(nil), - } - inspect.Preorder(nodeFilter, func(n ast.Node) { - switch n := n.(type) { - case *ast.FuncDecl: - if n.Recv != nil { - canonicalMethod(pass, n.Name) - } - case *ast.InterfaceType: - for _, field := range n.Methods.List { - for _, id := range field.Names { - canonicalMethod(pass, id) - } - } - } - }) - return nil, nil -} - -func canonicalMethod(pass *analysis.Pass, id *ast.Ident) { - // Expected input/output. - expect, ok := canonicalMethods[id.Name] - if !ok { - return - } - - // Actual input/output - sign := pass.TypesInfo.Defs[id].Type().(*types.Signature) - args := sign.Params() - results := sign.Results() - - // Special case: WriteTo with more than one argument, - // not trying at all to implement io.WriterTo, - // comes up often enough to skip. - if id.Name == "WriteTo" && args.Len() > 1 { - return - } - - // Special case: Is, As and Unwrap only apply when type - // implements error. - if id.Name == "Is" || id.Name == "As" || id.Name == "Unwrap" { - if recv := sign.Recv(); recv == nil || !implementsError(recv.Type()) { - return - } - } - - // Special case: Unwrap has two possible signatures. - // Check for Unwrap() []error here. - if id.Name == "Unwrap" { - if args.Len() == 0 && results.Len() == 1 { - t := typeString(results.At(0).Type()) - if t == "error" || t == "[]error" { - return - } - } - pass.ReportRangef(id, "method Unwrap() should have signature Unwrap() error or Unwrap() []error") - return - } - - // Do the =s (if any) all match? - if !matchParams(pass, expect.args, args, "=") || !matchParams(pass, expect.results, results, "=") { - return - } - - // Everything must match. - if !matchParams(pass, expect.args, args, "") || !matchParams(pass, expect.results, results, "") { - expectFmt := id.Name + "(" + argjoin(expect.args) + ")" - if len(expect.results) == 1 { - expectFmt += " " + argjoin(expect.results) - } else if len(expect.results) > 1 { - expectFmt += " (" + argjoin(expect.results) + ")" - } - - actual := typeString(sign) - actual = strings.TrimPrefix(actual, "func") - actual = id.Name + actual - - pass.ReportRangef(id, "method %s should have signature %s", actual, expectFmt) - } -} - -func typeString(typ types.Type) string { - return types.TypeString(typ, (*types.Package).Name) -} - -func argjoin(x []string) string { - y := make([]string, len(x)) - for i, s := range x { - if s[0] == '=' { - s = s[1:] - } - y[i] = s - } - return strings.Join(y, ", ") -} - -// Does each type in expect with the given prefix match the corresponding type in actual? -func matchParams(pass *analysis.Pass, expect []string, actual *types.Tuple, prefix string) bool { - for i, x := range expect { - if !strings.HasPrefix(x, prefix) { - continue - } - if i >= actual.Len() { - return false - } - if !matchParamType(x, actual.At(i).Type()) { - return false - } - } - if prefix == "" && actual.Len() > len(expect) { - return false - } - return true -} - -// Does this one type match? -func matchParamType(expect string, actual types.Type) bool { - expect = strings.TrimPrefix(expect, "=") - // Overkill but easy. - t := typeString(actual) - return t == expect || - (t == "any" || t == "interface{}") && (expect == "any" || expect == "interface{}") -} - -var errorType = types.Universe.Lookup("error").Type().Underlying().(*types.Interface) - -func implementsError(actual types.Type) bool { - return types.Implements(actual, errorType) -} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/stringintconv/doc.go b/vendor/golang.org/x/tools/go/analysis/passes/stringintconv/doc.go deleted file mode 100644 index 205cd6401..000000000 --- a/vendor/golang.org/x/tools/go/analysis/passes/stringintconv/doc.go +++ /dev/null @@ -1,21 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package stringintconv defines an Analyzer that flags type conversions -// from integers to strings. -// -// # Analyzer stringintconv -// -// stringintconv: check for string(int) conversions -// -// This checker flags conversions of the form string(x) where x is an integer -// (but not byte or rune) type. Such conversions are discouraged because they -// return the UTF-8 representation of the Unicode code point x, and not a decimal -// string representation of x as one might expect. Furthermore, if x denotes an -// invalid code point, the conversion cannot be statically rejected. -// -// For conversions that intend on using the code point, consider replacing them -// with string(rune(x)). Otherwise, strconv.Itoa and its equivalents return the -// string representation of the value in the desired base. -package stringintconv diff --git a/vendor/golang.org/x/tools/go/analysis/passes/stringintconv/string.go b/vendor/golang.org/x/tools/go/analysis/passes/stringintconv/string.go deleted file mode 100644 index 16a4b3e55..000000000 --- a/vendor/golang.org/x/tools/go/analysis/passes/stringintconv/string.go +++ /dev/null @@ -1,212 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package stringintconv - -import ( - _ "embed" - "fmt" - "go/ast" - "go/types" - "strings" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/analysis/passes/internal/analysisutil" - "golang.org/x/tools/go/ast/inspector" - "golang.org/x/tools/internal/aliases" - "golang.org/x/tools/internal/typeparams" -) - -//go:embed doc.go -var doc string - -var Analyzer = &analysis.Analyzer{ - Name: "stringintconv", - Doc: analysisutil.MustExtractDoc(doc, "stringintconv"), - URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/stringintconv", - Requires: []*analysis.Analyzer{inspect.Analyzer}, - Run: run, -} - -// describe returns a string describing the type typ contained within the type -// set of inType. If non-empty, inName is used as the name of inType (this is -// necessary so that we can use alias type names that may not be reachable from -// inType itself). -func describe(typ, inType types.Type, inName string) string { - name := inName - if typ != inType { - name = typeName(typ) - } - if name == "" { - return "" - } - - var parentheticals []string - if underName := typeName(typ.Underlying()); underName != "" && underName != name { - parentheticals = append(parentheticals, underName) - } - - if typ != inType && inName != "" && inName != name { - parentheticals = append(parentheticals, "in "+inName) - } - - if len(parentheticals) > 0 { - name += " (" + strings.Join(parentheticals, ", ") + ")" - } - - return name -} - -func typeName(typ types.Type) string { - typ = aliases.Unalias(typ) - // TODO(adonovan): don't discard alias type, return its name. - if v, _ := typ.(*types.Basic); v != nil { - return v.Name() - } - if v, _ := typ.(interface{ Obj() *types.TypeName }); v != nil { // Named, TypeParam - return v.Obj().Name() - } - return "" -} - -func run(pass *analysis.Pass) (interface{}, error) { - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - nodeFilter := []ast.Node{ - (*ast.CallExpr)(nil), - } - inspect.Preorder(nodeFilter, func(n ast.Node) { - call := n.(*ast.CallExpr) - - if len(call.Args) != 1 { - return - } - arg := call.Args[0] - - // Retrieve target type name. - var tname *types.TypeName - switch fun := call.Fun.(type) { - case *ast.Ident: - tname, _ = pass.TypesInfo.Uses[fun].(*types.TypeName) - case *ast.SelectorExpr: - tname, _ = pass.TypesInfo.Uses[fun.Sel].(*types.TypeName) - } - if tname == nil { - return - } - - // In the conversion T(v) of a value v of type V to a target type T, we - // look for types T0 in the type set of T and V0 in the type set of V, such - // that V0->T0 is a problematic conversion. If T and V are not type - // parameters, this amounts to just checking if V->T is a problematic - // conversion. - - // First, find a type T0 in T that has an underlying type of string. - T := tname.Type() - ttypes, err := structuralTypes(T) - if err != nil { - return // invalid type - } - - var T0 types.Type // string type in the type set of T - - for _, tt := range ttypes { - u, _ := tt.Underlying().(*types.Basic) - if u != nil && u.Kind() == types.String { - T0 = tt - break - } - } - - if T0 == nil { - // No target types have an underlying type of string. - return - } - - // Next, find a type V0 in V that has an underlying integral type that is - // not byte or rune. - V := pass.TypesInfo.TypeOf(arg) - vtypes, err := structuralTypes(V) - if err != nil { - return // invalid type - } - - var V0 types.Type // integral type in the type set of V - - for _, vt := range vtypes { - u, _ := vt.Underlying().(*types.Basic) - if u != nil && u.Info()&types.IsInteger != 0 { - switch u.Kind() { - case types.Byte, types.Rune, types.UntypedRune: - continue - } - V0 = vt - break - } - } - - if V0 == nil { - // No source types are non-byte or rune integer types. - return - } - - convertibleToRune := true // if true, we can suggest a fix - for _, t := range vtypes { - if !types.ConvertibleTo(t, types.Typ[types.Rune]) { - convertibleToRune = false - break - } - } - - target := describe(T0, T, tname.Name()) - source := describe(V0, V, typeName(V)) - - if target == "" || source == "" { - return // something went wrong - } - - diag := analysis.Diagnostic{ - Pos: n.Pos(), - Message: fmt.Sprintf("conversion from %s to %s yields a string of one rune, not a string of digits (did you mean fmt.Sprint(x)?)", source, target), - } - - if convertibleToRune { - diag.SuggestedFixes = []analysis.SuggestedFix{ - { - Message: "Did you mean to convert a rune to a string?", - TextEdits: []analysis.TextEdit{ - { - Pos: arg.Pos(), - End: arg.Pos(), - NewText: []byte("rune("), - }, - { - Pos: arg.End(), - End: arg.End(), - NewText: []byte(")"), - }, - }, - }, - } - } - pass.Report(diag) - }) - return nil, nil -} - -func structuralTypes(t types.Type) ([]types.Type, error) { - var structuralTypes []types.Type - if tp, ok := aliases.Unalias(t).(*types.TypeParam); ok { - terms, err := typeparams.StructuralTerms(tp) - if err != nil { - return nil, err - } - for _, term := range terms { - structuralTypes = append(structuralTypes, term.Type()) - } - } else { - structuralTypes = append(structuralTypes, t) - } - return structuralTypes, nil -} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/structtag/structtag.go b/vendor/golang.org/x/tools/go/analysis/passes/structtag/structtag.go deleted file mode 100644 index a0beb46bd..000000000 --- a/vendor/golang.org/x/tools/go/analysis/passes/structtag/structtag.go +++ /dev/null @@ -1,314 +0,0 @@ -// Copyright 2010 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package structtag defines an Analyzer that checks struct field tags -// are well formed. -package structtag - -import ( - "errors" - "go/ast" - "go/token" - "go/types" - "path/filepath" - "reflect" - "strconv" - "strings" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/ast/inspector" -) - -const Doc = `check that struct field tags conform to reflect.StructTag.Get - -Also report certain struct tags (json, xml) used with unexported fields.` - -var Analyzer = &analysis.Analyzer{ - Name: "structtag", - Doc: Doc, - URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/structtag", - Requires: []*analysis.Analyzer{inspect.Analyzer}, - RunDespiteErrors: true, - Run: run, -} - -func run(pass *analysis.Pass) (interface{}, error) { - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - - nodeFilter := []ast.Node{ - (*ast.StructType)(nil), - } - inspect.Preorder(nodeFilter, func(n ast.Node) { - styp, ok := pass.TypesInfo.Types[n.(*ast.StructType)].Type.(*types.Struct) - // Type information may be incomplete. - if !ok { - return - } - var seen namesSeen - for i := 0; i < styp.NumFields(); i++ { - field := styp.Field(i) - tag := styp.Tag(i) - checkCanonicalFieldTag(pass, field, tag, &seen) - } - }) - return nil, nil -} - -// namesSeen keeps track of encoding tags by their key, name, and nested level -// from the initial struct. The level is taken into account because equal -// encoding key names only conflict when at the same level; otherwise, the lower -// level shadows the higher level. -type namesSeen map[uniqueName]token.Pos - -type uniqueName struct { - key string // "xml" or "json" - name string // the encoding name - level int // anonymous struct nesting level -} - -func (s *namesSeen) Get(key, name string, level int) (token.Pos, bool) { - if *s == nil { - *s = make(map[uniqueName]token.Pos) - } - pos, ok := (*s)[uniqueName{key, name, level}] - return pos, ok -} - -func (s *namesSeen) Set(key, name string, level int, pos token.Pos) { - if *s == nil { - *s = make(map[uniqueName]token.Pos) - } - (*s)[uniqueName{key, name, level}] = pos -} - -var checkTagDups = []string{"json", "xml"} -var checkTagSpaces = map[string]bool{"json": true, "xml": true, "asn1": true} - -// checkCanonicalFieldTag checks a single struct field tag. -func checkCanonicalFieldTag(pass *analysis.Pass, field *types.Var, tag string, seen *namesSeen) { - switch pass.Pkg.Path() { - case "encoding/json", "encoding/xml": - // These packages know how to use their own APIs. - // Sometimes they are testing what happens to incorrect programs. - return - } - - for _, key := range checkTagDups { - checkTagDuplicates(pass, tag, key, field, field, seen, 1) - } - - if err := validateStructTag(tag); err != nil { - pass.Reportf(field.Pos(), "struct field tag %#q not compatible with reflect.StructTag.Get: %s", tag, err) - } - - // Check for use of json or xml tags with unexported fields. - - // Embedded struct. Nothing to do for now, but that - // may change, depending on what happens with issue 7363. - // TODO(adonovan): investigate, now that that issue is fixed. - if field.Anonymous() { - return - } - - if field.Exported() { - return - } - - for _, enc := range [...]string{"json", "xml"} { - switch reflect.StructTag(tag).Get(enc) { - // Ignore warning if the field not exported and the tag is marked as - // ignored. - case "", "-": - default: - pass.Reportf(field.Pos(), "struct field %s has %s tag but is not exported", field.Name(), enc) - return - } - } -} - -// checkTagDuplicates checks a single struct field tag to see if any tags are -// duplicated. nearest is the field that's closest to the field being checked, -// while still being part of the top-level struct type. -func checkTagDuplicates(pass *analysis.Pass, tag, key string, nearest, field *types.Var, seen *namesSeen, level int) { - val := reflect.StructTag(tag).Get(key) - if val == "-" { - // Ignored, even if the field is anonymous. - return - } - if val == "" || val[0] == ',' { - if !field.Anonymous() { - // Ignored if the field isn't anonymous. - return - } - typ, ok := field.Type().Underlying().(*types.Struct) - if !ok { - return - } - for i := 0; i < typ.NumFields(); i++ { - field := typ.Field(i) - if !field.Exported() { - continue - } - tag := typ.Tag(i) - checkTagDuplicates(pass, tag, key, nearest, field, seen, level+1) - } - return - } - if key == "xml" && field.Name() == "XMLName" { - // XMLName defines the XML element name of the struct being - // checked. That name cannot collide with element or attribute - // names defined on other fields of the struct. Vet does not have a - // check for untagged fields of type struct defining their own name - // by containing a field named XMLName; see issue 18256. - return - } - if i := strings.Index(val, ","); i >= 0 { - if key == "xml" { - // Use a separate namespace for XML attributes. - for _, opt := range strings.Split(val[i:], ",") { - if opt == "attr" { - key += " attribute" // Key is part of the error message. - break - } - } - } - val = val[:i] - } - if pos, ok := seen.Get(key, val, level); ok { - alsoPos := pass.Fset.Position(pos) - alsoPos.Column = 0 - - // Make the "also at" position relative to the current position, - // to ensure that all warnings are unambiguous and correct. For - // example, via anonymous struct fields, it's possible for the - // two fields to be in different packages and directories. - thisPos := pass.Fset.Position(field.Pos()) - rel, err := filepath.Rel(filepath.Dir(thisPos.Filename), alsoPos.Filename) - if err != nil { - // Possibly because the paths are relative; leave the - // filename alone. - } else { - alsoPos.Filename = rel - } - - pass.Reportf(nearest.Pos(), "struct field %s repeats %s tag %q also at %s", field.Name(), key, val, alsoPos) - } else { - seen.Set(key, val, level, field.Pos()) - } -} - -var ( - errTagSyntax = errors.New("bad syntax for struct tag pair") - errTagKeySyntax = errors.New("bad syntax for struct tag key") - errTagValueSyntax = errors.New("bad syntax for struct tag value") - errTagValueSpace = errors.New("suspicious space in struct tag value") - errTagSpace = errors.New("key:\"value\" pairs not separated by spaces") -) - -// validateStructTag parses the struct tag and returns an error if it is not -// in the canonical format, which is a space-separated list of key:"value" -// settings. The value may contain spaces. -func validateStructTag(tag string) error { - // This code is based on the StructTag.Get code in package reflect. - - n := 0 - for ; tag != ""; n++ { - if n > 0 && tag != "" && tag[0] != ' ' { - // More restrictive than reflect, but catches likely mistakes - // like `x:"foo",y:"bar"`, which parses as `x:"foo" ,y:"bar"` with second key ",y". - return errTagSpace - } - // Skip leading space. - i := 0 - for i < len(tag) && tag[i] == ' ' { - i++ - } - tag = tag[i:] - if tag == "" { - break - } - - // Scan to colon. A space, a quote or a control character is a syntax error. - // Strictly speaking, control chars include the range [0x7f, 0x9f], not just - // [0x00, 0x1f], but in practice, we ignore the multi-byte control characters - // as it is simpler to inspect the tag's bytes than the tag's runes. - i = 0 - for i < len(tag) && tag[i] > ' ' && tag[i] != ':' && tag[i] != '"' && tag[i] != 0x7f { - i++ - } - if i == 0 { - return errTagKeySyntax - } - if i+1 >= len(tag) || tag[i] != ':' { - return errTagSyntax - } - if tag[i+1] != '"' { - return errTagValueSyntax - } - key := tag[:i] - tag = tag[i+1:] - - // Scan quoted string to find value. - i = 1 - for i < len(tag) && tag[i] != '"' { - if tag[i] == '\\' { - i++ - } - i++ - } - if i >= len(tag) { - return errTagValueSyntax - } - qvalue := tag[:i+1] - tag = tag[i+1:] - - value, err := strconv.Unquote(qvalue) - if err != nil { - return errTagValueSyntax - } - - if !checkTagSpaces[key] { - continue - } - - switch key { - case "xml": - // If the first or last character in the XML tag is a space, it is - // suspicious. - if strings.Trim(value, " ") != value { - return errTagValueSpace - } - - // If there are multiple spaces, they are suspicious. - if strings.Count(value, " ") > 1 { - return errTagValueSpace - } - - // If there is no comma, skip the rest of the checks. - comma := strings.IndexRune(value, ',') - if comma < 0 { - continue - } - - // If the character before a comma is a space, this is suspicious. - if comma > 0 && value[comma-1] == ' ' { - return errTagValueSpace - } - value = value[comma+1:] - case "json": - // JSON allows using spaces in the name, so skip it. - comma := strings.IndexRune(value, ',') - if comma < 0 { - continue - } - value = value[comma+1:] - } - - if strings.IndexByte(value, ' ') >= 0 { - return errTagValueSpace - } - } - return nil -} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/testinggoroutine/doc.go b/vendor/golang.org/x/tools/go/analysis/passes/testinggoroutine/doc.go deleted file mode 100644 index 4cd5b71e9..000000000 --- a/vendor/golang.org/x/tools/go/analysis/passes/testinggoroutine/doc.go +++ /dev/null @@ -1,22 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package testinggoroutine defines an Analyzerfor detecting calls to -// Fatal from a test goroutine. -// -// # Analyzer testinggoroutine -// -// testinggoroutine: report calls to (*testing.T).Fatal from goroutines started by a test -// -// Functions that abruptly terminate a test, such as the Fatal, Fatalf, FailNow, and -// Skip{,f,Now} methods of *testing.T, must be called from the test goroutine itself. -// This checker detects calls to these functions that occur within a goroutine -// started by the test. For example: -// -// func TestFoo(t *testing.T) { -// go func() { -// t.Fatal("oops") // error: (*T).Fatal called from non-test goroutine -// }() -// } -package testinggoroutine diff --git a/vendor/golang.org/x/tools/go/analysis/passes/testinggoroutine/testinggoroutine.go b/vendor/golang.org/x/tools/go/analysis/passes/testinggoroutine/testinggoroutine.go deleted file mode 100644 index 828f95bc8..000000000 --- a/vendor/golang.org/x/tools/go/analysis/passes/testinggoroutine/testinggoroutine.go +++ /dev/null @@ -1,279 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package testinggoroutine - -import ( - _ "embed" - "fmt" - "go/ast" - "go/token" - "go/types" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/analysis/passes/internal/analysisutil" - "golang.org/x/tools/go/ast/astutil" - "golang.org/x/tools/go/ast/inspector" - "golang.org/x/tools/go/types/typeutil" - "golang.org/x/tools/internal/aliases" -) - -//go:embed doc.go -var doc string - -var reportSubtest bool - -func init() { - Analyzer.Flags.BoolVar(&reportSubtest, "subtest", false, "whether to check if t.Run subtest is terminated correctly; experimental") -} - -var Analyzer = &analysis.Analyzer{ - Name: "testinggoroutine", - Doc: analysisutil.MustExtractDoc(doc, "testinggoroutine"), - URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/testinggoroutine", - Requires: []*analysis.Analyzer{inspect.Analyzer}, - Run: run, -} - -func run(pass *analysis.Pass) (interface{}, error) { - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - - if !analysisutil.Imports(pass.Pkg, "testing") { - return nil, nil - } - - toDecl := localFunctionDecls(pass.TypesInfo, pass.Files) - - // asyncs maps nodes whose statements will be executed concurrently - // with respect to some test function, to the call sites where they - // are invoked asynchronously. There may be multiple such call sites - // for e.g. test helpers. - asyncs := make(map[ast.Node][]*asyncCall) - var regions []ast.Node - addCall := func(c *asyncCall) { - if c != nil { - r := c.region - if asyncs[r] == nil { - regions = append(regions, r) - } - asyncs[r] = append(asyncs[r], c) - } - } - - // Collect all of the go callee() and t.Run(name, callee) extents. - inspect.Nodes([]ast.Node{ - (*ast.FuncDecl)(nil), - (*ast.GoStmt)(nil), - (*ast.CallExpr)(nil), - }, func(node ast.Node, push bool) bool { - if !push { - return false - } - switch node := node.(type) { - case *ast.FuncDecl: - return hasBenchmarkOrTestParams(node) - - case *ast.GoStmt: - c := goAsyncCall(pass.TypesInfo, node, toDecl) - addCall(c) - - case *ast.CallExpr: - c := tRunAsyncCall(pass.TypesInfo, node) - addCall(c) - } - return true - }) - - // Check for t.Forbidden() calls within each region r that is a - // callee in some go r() or a t.Run("name", r). - // - // Also considers a special case when r is a go t.Forbidden() call. - for _, region := range regions { - ast.Inspect(region, func(n ast.Node) bool { - if n == region { - return true // always descend into the region itself. - } else if asyncs[n] != nil { - return false // will be visited by another region. - } - - call, ok := n.(*ast.CallExpr) - if !ok { - return true - } - x, sel, fn := forbiddenMethod(pass.TypesInfo, call) - if x == nil { - return true - } - - for _, e := range asyncs[region] { - if !withinScope(e.scope, x) { - forbidden := formatMethod(sel, fn) // e.g. "(*testing.T).Forbidden - - var context string - var where analysis.Range = e.async // Put the report at the go fun() or t.Run(name, fun). - if _, local := e.fun.(*ast.FuncLit); local { - where = call // Put the report at the t.Forbidden() call. - } else if id, ok := e.fun.(*ast.Ident); ok { - context = fmt.Sprintf(" (%s calls %s)", id.Name, forbidden) - } - if _, ok := e.async.(*ast.GoStmt); ok { - pass.ReportRangef(where, "call to %s from a non-test goroutine%s", forbidden, context) - } else if reportSubtest { - pass.ReportRangef(where, "call to %s on %s defined outside of the subtest%s", forbidden, x.Name(), context) - } - } - } - return true - }) - } - - return nil, nil -} - -func hasBenchmarkOrTestParams(fnDecl *ast.FuncDecl) bool { - // Check that the function's arguments include "*testing.T" or "*testing.B". - params := fnDecl.Type.Params.List - - for _, param := range params { - if _, ok := typeIsTestingDotTOrB(param.Type); ok { - return true - } - } - - return false -} - -func typeIsTestingDotTOrB(expr ast.Expr) (string, bool) { - starExpr, ok := expr.(*ast.StarExpr) - if !ok { - return "", false - } - selExpr, ok := starExpr.X.(*ast.SelectorExpr) - if !ok { - return "", false - } - varPkg := selExpr.X.(*ast.Ident) - if varPkg.Name != "testing" { - return "", false - } - - varTypeName := selExpr.Sel.Name - ok = varTypeName == "B" || varTypeName == "T" - return varTypeName, ok -} - -// asyncCall describes a region of code that needs to be checked for -// t.Forbidden() calls as it is started asynchronously from an async -// node go fun() or t.Run(name, fun). -type asyncCall struct { - region ast.Node // region of code to check for t.Forbidden() calls. - async ast.Node // *ast.GoStmt or *ast.CallExpr (for t.Run) - scope ast.Node // Report t.Forbidden() if t is not declared within scope. - fun ast.Expr // fun in go fun() or t.Run(name, fun) -} - -// withinScope returns true if x.Pos() is in [scope.Pos(), scope.End()]. -func withinScope(scope ast.Node, x *types.Var) bool { - if scope != nil { - return x.Pos() != token.NoPos && scope.Pos() <= x.Pos() && x.Pos() <= scope.End() - } - return false -} - -// goAsyncCall returns the extent of a call from a go fun() statement. -func goAsyncCall(info *types.Info, goStmt *ast.GoStmt, toDecl func(*types.Func) *ast.FuncDecl) *asyncCall { - call := goStmt.Call - - fun := astutil.Unparen(call.Fun) - if id := funcIdent(fun); id != nil { - if lit := funcLitInScope(id); lit != nil { - return &asyncCall{region: lit, async: goStmt, scope: nil, fun: fun} - } - } - - if fn := typeutil.StaticCallee(info, call); fn != nil { // static call or method in the package? - if decl := toDecl(fn); decl != nil { - return &asyncCall{region: decl, async: goStmt, scope: nil, fun: fun} - } - } - - // Check go statement for go t.Forbidden() or go func(){t.Forbidden()}(). - return &asyncCall{region: goStmt, async: goStmt, scope: nil, fun: fun} -} - -// tRunAsyncCall returns the extent of a call from a t.Run("name", fun) expression. -func tRunAsyncCall(info *types.Info, call *ast.CallExpr) *asyncCall { - if len(call.Args) != 2 { - return nil - } - run := typeutil.Callee(info, call) - if run, ok := run.(*types.Func); !ok || !isMethodNamed(run, "testing", "Run") { - return nil - } - - fun := astutil.Unparen(call.Args[1]) - if lit, ok := fun.(*ast.FuncLit); ok { // function lit? - return &asyncCall{region: lit, async: call, scope: lit, fun: fun} - } - - if id := funcIdent(fun); id != nil { - if lit := funcLitInScope(id); lit != nil { // function lit in variable? - return &asyncCall{region: lit, async: call, scope: lit, fun: fun} - } - } - - // Check within t.Run(name, fun) for calls to t.Forbidden, - // e.g. t.Run(name, func(t *testing.T){ t.Forbidden() }) - return &asyncCall{region: call, async: call, scope: fun, fun: fun} -} - -var forbidden = []string{ - "FailNow", - "Fatal", - "Fatalf", - "Skip", - "Skipf", - "SkipNow", -} - -// forbiddenMethod decomposes a call x.m() into (x, x.m, m) where -// x is a variable, x.m is a selection, and m is the static callee m. -// Returns (nil, nil, nil) if call is not of this form. -func forbiddenMethod(info *types.Info, call *ast.CallExpr) (*types.Var, *types.Selection, *types.Func) { - // Compare to typeutil.StaticCallee. - fun := astutil.Unparen(call.Fun) - selExpr, ok := fun.(*ast.SelectorExpr) - if !ok { - return nil, nil, nil - } - sel := info.Selections[selExpr] - if sel == nil { - return nil, nil, nil - } - - var x *types.Var - if id, ok := astutil.Unparen(selExpr.X).(*ast.Ident); ok { - x, _ = info.Uses[id].(*types.Var) - } - if x == nil { - return nil, nil, nil - } - - fn, _ := sel.Obj().(*types.Func) - if fn == nil || !isMethodNamed(fn, "testing", forbidden...) { - return nil, nil, nil - } - return x, sel, fn -} - -func formatMethod(sel *types.Selection, fn *types.Func) string { - var ptr string - rtype := sel.Recv() - if p, ok := aliases.Unalias(rtype).(*types.Pointer); ok { - ptr = "*" - rtype = p.Elem() - } - return fmt.Sprintf("(%s%s).%s", ptr, rtype.String(), fn.Name()) -} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/testinggoroutine/util.go b/vendor/golang.org/x/tools/go/analysis/passes/testinggoroutine/util.go deleted file mode 100644 index ad815f190..000000000 --- a/vendor/golang.org/x/tools/go/analysis/passes/testinggoroutine/util.go +++ /dev/null @@ -1,96 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package testinggoroutine - -import ( - "go/ast" - "go/types" - - "golang.org/x/tools/go/ast/astutil" - "golang.org/x/tools/internal/typeparams" -) - -// AST and types utilities that not specific to testinggoroutines. - -// localFunctionDecls returns a mapping from *types.Func to *ast.FuncDecl in files. -func localFunctionDecls(info *types.Info, files []*ast.File) func(*types.Func) *ast.FuncDecl { - var fnDecls map[*types.Func]*ast.FuncDecl // computed lazily - return func(f *types.Func) *ast.FuncDecl { - if f != nil && fnDecls == nil { - fnDecls = make(map[*types.Func]*ast.FuncDecl) - for _, file := range files { - for _, decl := range file.Decls { - if fnDecl, ok := decl.(*ast.FuncDecl); ok { - if fn, ok := info.Defs[fnDecl.Name].(*types.Func); ok { - fnDecls[fn] = fnDecl - } - } - } - } - } - // TODO: set f = f.Origin() here. - return fnDecls[f] - } -} - -// isMethodNamed returns true if f is a method defined -// in package with the path pkgPath with a name in names. -func isMethodNamed(f *types.Func, pkgPath string, names ...string) bool { - if f == nil { - return false - } - if f.Pkg() == nil || f.Pkg().Path() != pkgPath { - return false - } - if f.Type().(*types.Signature).Recv() == nil { - return false - } - for _, n := range names { - if f.Name() == n { - return true - } - } - return false -} - -func funcIdent(fun ast.Expr) *ast.Ident { - switch fun := astutil.Unparen(fun).(type) { - case *ast.IndexExpr, *ast.IndexListExpr: - x, _, _, _ := typeparams.UnpackIndexExpr(fun) // necessary? - id, _ := x.(*ast.Ident) - return id - case *ast.Ident: - return fun - default: - return nil - } -} - -// funcLitInScope returns a FuncLit that id is at least initially assigned to. -// -// TODO: This is closely tied to id.Obj which is deprecated. -func funcLitInScope(id *ast.Ident) *ast.FuncLit { - // Compare to (*ast.Object).Pos(). - if id.Obj == nil { - return nil - } - var rhs ast.Expr - switch d := id.Obj.Decl.(type) { - case *ast.AssignStmt: - for i, x := range d.Lhs { - if ident, isIdent := x.(*ast.Ident); isIdent && ident.Name == id.Name && i < len(d.Rhs) { - rhs = d.Rhs[i] - } - } - case *ast.ValueSpec: - for i, n := range d.Names { - if n.Name == id.Name && i < len(d.Values) { - rhs = d.Values[i] - } - } - } - lit, _ := rhs.(*ast.FuncLit) - return lit -} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/tests/doc.go b/vendor/golang.org/x/tools/go/analysis/passes/tests/doc.go deleted file mode 100644 index 3ae27db9c..000000000 --- a/vendor/golang.org/x/tools/go/analysis/passes/tests/doc.go +++ /dev/null @@ -1,18 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package tests defines an Analyzer that checks for common mistaken -// usages of tests and examples. -// -// # Analyzer tests -// -// tests: check for common mistaken usages of tests and examples -// -// The tests checker walks Test, Benchmark, Fuzzing and Example functions checking -// malformed names, wrong signatures and examples documenting non-existent -// identifiers. -// -// Please see the documentation for package testing in golang.org/pkg/testing -// for the conventions that are enforced for Tests, Benchmarks, and Examples. -package tests diff --git a/vendor/golang.org/x/tools/go/analysis/passes/tests/tests.go b/vendor/golang.org/x/tools/go/analysis/passes/tests/tests.go deleted file mode 100644 index 39d0d9e42..000000000 --- a/vendor/golang.org/x/tools/go/analysis/passes/tests/tests.go +++ /dev/null @@ -1,475 +0,0 @@ -// Copyright 2015 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package tests - -import ( - _ "embed" - "fmt" - "go/ast" - "go/token" - "go/types" - "regexp" - "strings" - "unicode" - "unicode/utf8" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/internal/analysisutil" -) - -//go:embed doc.go -var doc string - -var Analyzer = &analysis.Analyzer{ - Name: "tests", - Doc: analysisutil.MustExtractDoc(doc, "tests"), - URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/tests", - Run: run, -} - -var acceptedFuzzTypes = []types.Type{ - types.Typ[types.String], - types.Typ[types.Bool], - types.Typ[types.Float32], - types.Typ[types.Float64], - types.Typ[types.Int], - types.Typ[types.Int8], - types.Typ[types.Int16], - types.Typ[types.Int32], - types.Typ[types.Int64], - types.Typ[types.Uint], - types.Typ[types.Uint8], - types.Typ[types.Uint16], - types.Typ[types.Uint32], - types.Typ[types.Uint64], - types.NewSlice(types.Universe.Lookup("byte").Type()), -} - -func run(pass *analysis.Pass) (interface{}, error) { - for _, f := range pass.Files { - if !strings.HasSuffix(pass.Fset.File(f.Pos()).Name(), "_test.go") { - continue - } - for _, decl := range f.Decls { - fn, ok := decl.(*ast.FuncDecl) - if !ok || fn.Recv != nil { - // Ignore non-functions or functions with receivers. - continue - } - switch { - case strings.HasPrefix(fn.Name.Name, "Example"): - checkExampleName(pass, fn) - checkExampleOutput(pass, fn, f.Comments) - case strings.HasPrefix(fn.Name.Name, "Test"): - checkTest(pass, fn, "Test") - case strings.HasPrefix(fn.Name.Name, "Benchmark"): - checkTest(pass, fn, "Benchmark") - case strings.HasPrefix(fn.Name.Name, "Fuzz"): - checkTest(pass, fn, "Fuzz") - checkFuzz(pass, fn) - } - } - } - return nil, nil -} - -// checkFuzz checks the contents of a fuzz function. -func checkFuzz(pass *analysis.Pass, fn *ast.FuncDecl) { - params := checkFuzzCall(pass, fn) - if params != nil { - checkAddCalls(pass, fn, params) - } -} - -// checkFuzzCall checks the arguments of f.Fuzz() calls: -// -// 1. f.Fuzz() should call a function and it should be of type (*testing.F).Fuzz(). -// 2. The called function in f.Fuzz(func(){}) should not return result. -// 3. First argument of func() should be of type *testing.T -// 4. Second argument onwards should be of type []byte, string, bool, byte, -// rune, float32, float64, int, int8, int16, int32, int64, uint, uint8, uint16, -// uint32, uint64 -// 5. func() must not call any *F methods, e.g. (*F).Log, (*F).Error, (*F).Skip -// The only *F methods that are allowed in the (*F).Fuzz function are (*F).Failed and (*F).Name. -// -// Returns the list of parameters to the fuzz function, if they are valid fuzz parameters. -func checkFuzzCall(pass *analysis.Pass, fn *ast.FuncDecl) (params *types.Tuple) { - ast.Inspect(fn, func(n ast.Node) bool { - call, ok := n.(*ast.CallExpr) - if ok { - if !isFuzzTargetDotFuzz(pass, call) { - return true - } - - // Only one argument (func) must be passed to (*testing.F).Fuzz. - if len(call.Args) != 1 { - return true - } - expr := call.Args[0] - if pass.TypesInfo.Types[expr].Type == nil { - return true - } - t := pass.TypesInfo.Types[expr].Type.Underlying() - tSign, argOk := t.(*types.Signature) - // Argument should be a function - if !argOk { - pass.ReportRangef(expr, "argument to Fuzz must be a function") - return false - } - // ff Argument function should not return - if tSign.Results().Len() != 0 { - pass.ReportRangef(expr, "fuzz target must not return any value") - } - // ff Argument function should have 1 or more argument - if tSign.Params().Len() == 0 { - pass.ReportRangef(expr, "fuzz target must have 1 or more argument") - return false - } - ok := validateFuzzArgs(pass, tSign.Params(), expr) - if ok && params == nil { - params = tSign.Params() - } - // Inspect the function that was passed as an argument to make sure that - // there are no calls to *F methods, except for Name and Failed. - ast.Inspect(expr, func(n ast.Node) bool { - if call, ok := n.(*ast.CallExpr); ok { - if !isFuzzTargetDot(pass, call, "") { - return true - } - if !isFuzzTargetDot(pass, call, "Name") && !isFuzzTargetDot(pass, call, "Failed") { - pass.ReportRangef(call, "fuzz target must not call any *F methods") - } - } - return true - }) - // We do not need to look at any calls to f.Fuzz inside of a Fuzz call, - // since they are not allowed. - return false - } - return true - }) - return params -} - -// checkAddCalls checks that the arguments of f.Add calls have the same number and type of arguments as -// the signature of the function passed to (*testing.F).Fuzz -func checkAddCalls(pass *analysis.Pass, fn *ast.FuncDecl, params *types.Tuple) { - ast.Inspect(fn, func(n ast.Node) bool { - call, ok := n.(*ast.CallExpr) - if ok { - if !isFuzzTargetDotAdd(pass, call) { - return true - } - - // The first argument to function passed to (*testing.F).Fuzz is (*testing.T). - if len(call.Args) != params.Len()-1 { - pass.ReportRangef(call, "wrong number of values in call to (*testing.F).Add: %d, fuzz target expects %d", len(call.Args), params.Len()-1) - return true - } - var mismatched []int - for i, expr := range call.Args { - if pass.TypesInfo.Types[expr].Type == nil { - return true - } - t := pass.TypesInfo.Types[expr].Type - if !types.Identical(t, params.At(i+1).Type()) { - mismatched = append(mismatched, i) - } - } - // If just one of the types is mismatched report for that - // type only. Otherwise report for the whole call to (*testing.F).Add - if len(mismatched) == 1 { - i := mismatched[0] - expr := call.Args[i] - t := pass.TypesInfo.Types[expr].Type - pass.ReportRangef(expr, fmt.Sprintf("mismatched type in call to (*testing.F).Add: %v, fuzz target expects %v", t, params.At(i+1).Type())) - } else if len(mismatched) > 1 { - var gotArgs, wantArgs []types.Type - for i := 0; i < len(call.Args); i++ { - gotArgs, wantArgs = append(gotArgs, pass.TypesInfo.Types[call.Args[i]].Type), append(wantArgs, params.At(i+1).Type()) - } - pass.ReportRangef(call, fmt.Sprintf("mismatched types in call to (*testing.F).Add: %v, fuzz target expects %v", gotArgs, wantArgs)) - } - } - return true - }) -} - -// isFuzzTargetDotFuzz reports whether call is (*testing.F).Fuzz(). -func isFuzzTargetDotFuzz(pass *analysis.Pass, call *ast.CallExpr) bool { - return isFuzzTargetDot(pass, call, "Fuzz") -} - -// isFuzzTargetDotAdd reports whether call is (*testing.F).Add(). -func isFuzzTargetDotAdd(pass *analysis.Pass, call *ast.CallExpr) bool { - return isFuzzTargetDot(pass, call, "Add") -} - -// isFuzzTargetDot reports whether call is (*testing.F).(). -func isFuzzTargetDot(pass *analysis.Pass, call *ast.CallExpr, name string) bool { - if selExpr, ok := call.Fun.(*ast.SelectorExpr); ok { - if !isTestingType(pass.TypesInfo.Types[selExpr.X].Type, "F") { - return false - } - if name == "" || selExpr.Sel.Name == name { - return true - } - } - return false -} - -// Validate the arguments of fuzz target. -func validateFuzzArgs(pass *analysis.Pass, params *types.Tuple, expr ast.Expr) bool { - fLit, isFuncLit := expr.(*ast.FuncLit) - exprRange := expr - ok := true - if !isTestingType(params.At(0).Type(), "T") { - if isFuncLit { - exprRange = fLit.Type.Params.List[0].Type - } - pass.ReportRangef(exprRange, "the first parameter of a fuzz target must be *testing.T") - ok = false - } - for i := 1; i < params.Len(); i++ { - if !isAcceptedFuzzType(params.At(i).Type()) { - if isFuncLit { - curr := 0 - for _, field := range fLit.Type.Params.List { - curr += len(field.Names) - if i < curr { - exprRange = field.Type - break - } - } - } - pass.ReportRangef(exprRange, "fuzzing arguments can only have the following types: "+formatAcceptedFuzzType()) - ok = false - } - } - return ok -} - -func isTestingType(typ types.Type, testingType string) bool { - // No Unalias here: I doubt "go test" recognizes - // "type A = *testing.T; func Test(A) {}" as a test. - ptr, ok := typ.(*types.Pointer) - if !ok { - return false - } - return analysisutil.IsNamedType(ptr.Elem(), "testing", testingType) -} - -// Validate that fuzz target function's arguments are of accepted types. -func isAcceptedFuzzType(paramType types.Type) bool { - for _, typ := range acceptedFuzzTypes { - if types.Identical(typ, paramType) { - return true - } - } - return false -} - -func formatAcceptedFuzzType() string { - var acceptedFuzzTypesStrings []string - for _, typ := range acceptedFuzzTypes { - acceptedFuzzTypesStrings = append(acceptedFuzzTypesStrings, typ.String()) - } - acceptedFuzzTypesMsg := strings.Join(acceptedFuzzTypesStrings, ", ") - return acceptedFuzzTypesMsg -} - -func isExampleSuffix(s string) bool { - r, size := utf8.DecodeRuneInString(s) - return size > 0 && unicode.IsLower(r) -} - -func isTestSuffix(name string) bool { - if len(name) == 0 { - // "Test" is ok. - return true - } - r, _ := utf8.DecodeRuneInString(name) - return !unicode.IsLower(r) -} - -func isTestParam(typ ast.Expr, wantType string) bool { - ptr, ok := typ.(*ast.StarExpr) - if !ok { - // Not a pointer. - return false - } - // No easy way of making sure it's a *testing.T or *testing.B: - // ensure the name of the type matches. - if name, ok := ptr.X.(*ast.Ident); ok { - return name.Name == wantType - } - if sel, ok := ptr.X.(*ast.SelectorExpr); ok { - return sel.Sel.Name == wantType - } - return false -} - -func lookup(pkg *types.Package, name string) []types.Object { - if o := pkg.Scope().Lookup(name); o != nil { - return []types.Object{o} - } - - var ret []types.Object - // Search through the imports to see if any of them define name. - // It's hard to tell in general which package is being tested, so - // for the purposes of the analysis, allow the object to appear - // in any of the imports. This guarantees there are no false positives - // because the example needs to use the object so it must be defined - // in the package or one if its imports. On the other hand, false - // negatives are possible, but should be rare. - for _, imp := range pkg.Imports() { - if obj := imp.Scope().Lookup(name); obj != nil { - ret = append(ret, obj) - } - } - return ret -} - -// This pattern is taken from /go/src/go/doc/example.go -var outputRe = regexp.MustCompile(`(?i)^[[:space:]]*(unordered )?output:`) - -type commentMetadata struct { - isOutput bool - pos token.Pos -} - -func checkExampleOutput(pass *analysis.Pass, fn *ast.FuncDecl, fileComments []*ast.CommentGroup) { - commentsInExample := []commentMetadata{} - numOutputs := 0 - - // Find the comment blocks that are in the example. These comments are - // guaranteed to be in order of appearance. - for _, cg := range fileComments { - if cg.Pos() < fn.Pos() { - continue - } else if cg.End() > fn.End() { - break - } - - isOutput := outputRe.MatchString(cg.Text()) - if isOutput { - numOutputs++ - } - - commentsInExample = append(commentsInExample, commentMetadata{ - isOutput: isOutput, - pos: cg.Pos(), - }) - } - - // Change message based on whether there are multiple output comment blocks. - msg := "output comment block must be the last comment block" - if numOutputs > 1 { - msg = "there can only be one output comment block per example" - } - - for i, cg := range commentsInExample { - // Check for output comments that are not the last comment in the example. - isLast := (i == len(commentsInExample)-1) - if cg.isOutput && !isLast { - pass.Report( - analysis.Diagnostic{ - Pos: cg.pos, - Message: msg, - }, - ) - } - } -} - -func checkExampleName(pass *analysis.Pass, fn *ast.FuncDecl) { - fnName := fn.Name.Name - if params := fn.Type.Params; len(params.List) != 0 { - pass.Reportf(fn.Pos(), "%s should be niladic", fnName) - } - if results := fn.Type.Results; results != nil && len(results.List) != 0 { - pass.Reportf(fn.Pos(), "%s should return nothing", fnName) - } - if tparams := fn.Type.TypeParams; tparams != nil && len(tparams.List) > 0 { - pass.Reportf(fn.Pos(), "%s should not have type params", fnName) - } - - if fnName == "Example" { - // Nothing more to do. - return - } - - var ( - exName = strings.TrimPrefix(fnName, "Example") - elems = strings.SplitN(exName, "_", 3) - ident = elems[0] - objs = lookup(pass.Pkg, ident) - ) - if ident != "" && len(objs) == 0 { - // Check ExampleFoo and ExampleBadFoo. - pass.Reportf(fn.Pos(), "%s refers to unknown identifier: %s", fnName, ident) - // Abort since obj is absent and no subsequent checks can be performed. - return - } - if len(elems) < 2 { - // Nothing more to do. - return - } - - if ident == "" { - // Check Example_suffix and Example_BadSuffix. - if residual := strings.TrimPrefix(exName, "_"); !isExampleSuffix(residual) { - pass.Reportf(fn.Pos(), "%s has malformed example suffix: %s", fnName, residual) - } - return - } - - mmbr := elems[1] - if !isExampleSuffix(mmbr) { - // Check ExampleFoo_Method and ExampleFoo_BadMethod. - found := false - // Check if Foo.Method exists in this package or its imports. - for _, obj := range objs { - if obj, _, _ := types.LookupFieldOrMethod(obj.Type(), true, obj.Pkg(), mmbr); obj != nil { - found = true - break - } - } - if !found { - pass.Reportf(fn.Pos(), "%s refers to unknown field or method: %s.%s", fnName, ident, mmbr) - } - } - if len(elems) == 3 && !isExampleSuffix(elems[2]) { - // Check ExampleFoo_Method_suffix and ExampleFoo_Method_Badsuffix. - pass.Reportf(fn.Pos(), "%s has malformed example suffix: %s", fnName, elems[2]) - } -} - -func checkTest(pass *analysis.Pass, fn *ast.FuncDecl, prefix string) { - // Want functions with 0 results and 1 parameter. - if fn.Type.Results != nil && len(fn.Type.Results.List) > 0 || - fn.Type.Params == nil || - len(fn.Type.Params.List) != 1 || - len(fn.Type.Params.List[0].Names) > 1 { - return - } - - // The param must look like a *testing.T or *testing.B. - if !isTestParam(fn.Type.Params.List[0].Type, prefix[:1]) { - return - } - - if tparams := fn.Type.TypeParams; tparams != nil && len(tparams.List) > 0 { - // Note: cmd/go/internal/load also errors about TestXXX and BenchmarkXXX functions with type parameters. - // We have currently decided to also warn before compilation/package loading. This can help users in IDEs. - // TODO(adonovan): use ReportRangef(tparams). - pass.Reportf(fn.Pos(), "%s has type parameters: it will not be run by go test as a %sXXX function", fn.Name.Name, prefix) - } - - if !isTestSuffix(fn.Name.Name[len(prefix):]) { - // TODO(adonovan): use ReportRangef(fn.Name). - pass.Reportf(fn.Pos(), "%s has malformed name: first letter after '%s' must not be lowercase", fn.Name.Name, prefix) - } -} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/timeformat/doc.go b/vendor/golang.org/x/tools/go/analysis/passes/timeformat/doc.go deleted file mode 100644 index 5c665b298..000000000 --- a/vendor/golang.org/x/tools/go/analysis/passes/timeformat/doc.go +++ /dev/null @@ -1,15 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package timeformat defines an Analyzer that checks for the use -// of time.Format or time.Parse calls with a bad format. -// -// # Analyzer timeformat -// -// timeformat: check for calls of (time.Time).Format or time.Parse with 2006-02-01 -// -// The timeformat checker looks for time formats with the 2006-02-01 (yyyy-dd-mm) -// format. Internationally, "yyyy-dd-mm" does not occur in common calendar date -// standards, and so it is more likely that 2006-01-02 (yyyy-mm-dd) was intended. -package timeformat diff --git a/vendor/golang.org/x/tools/go/analysis/passes/timeformat/timeformat.go b/vendor/golang.org/x/tools/go/analysis/passes/timeformat/timeformat.go deleted file mode 100644 index 4a6c6b8bc..000000000 --- a/vendor/golang.org/x/tools/go/analysis/passes/timeformat/timeformat.go +++ /dev/null @@ -1,120 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package timeformat defines an Analyzer that checks for the use -// of time.Format or time.Parse calls with a bad format. -package timeformat - -import ( - _ "embed" - "go/ast" - "go/constant" - "go/token" - "go/types" - "strings" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/analysis/passes/internal/analysisutil" - "golang.org/x/tools/go/ast/inspector" - "golang.org/x/tools/go/types/typeutil" -) - -const badFormat = "2006-02-01" -const goodFormat = "2006-01-02" - -//go:embed doc.go -var doc string - -var Analyzer = &analysis.Analyzer{ - Name: "timeformat", - Doc: analysisutil.MustExtractDoc(doc, "timeformat"), - URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/timeformat", - Requires: []*analysis.Analyzer{inspect.Analyzer}, - Run: run, -} - -func run(pass *analysis.Pass) (interface{}, error) { - // Note: (time.Time).Format is a method and can be a typeutil.Callee - // without directly importing "time". So we cannot just skip this package - // when !analysisutil.Imports(pass.Pkg, "time"). - // TODO(taking): Consider using a prepass to collect typeutil.Callees. - - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - - nodeFilter := []ast.Node{ - (*ast.CallExpr)(nil), - } - inspect.Preorder(nodeFilter, func(n ast.Node) { - call := n.(*ast.CallExpr) - fn, ok := typeutil.Callee(pass.TypesInfo, call).(*types.Func) - if !ok { - return - } - if !isTimeDotFormat(fn) && !isTimeDotParse(fn) { - return - } - if len(call.Args) > 0 { - arg := call.Args[0] - badAt := badFormatAt(pass.TypesInfo, arg) - - if badAt > -1 { - // Check if it's a literal string, otherwise we can't suggest a fix. - if _, ok := arg.(*ast.BasicLit); ok { - pos := int(arg.Pos()) + badAt + 1 // +1 to skip the " or ` - end := pos + len(badFormat) - - pass.Report(analysis.Diagnostic{ - Pos: token.Pos(pos), - End: token.Pos(end), - Message: badFormat + " should be " + goodFormat, - SuggestedFixes: []analysis.SuggestedFix{{ - Message: "Replace " + badFormat + " with " + goodFormat, - TextEdits: []analysis.TextEdit{{ - Pos: token.Pos(pos), - End: token.Pos(end), - NewText: []byte(goodFormat), - }}, - }}, - }) - } else { - pass.Reportf(arg.Pos(), badFormat+" should be "+goodFormat) - } - } - } - }) - return nil, nil -} - -func isTimeDotFormat(f *types.Func) bool { - if f.Name() != "Format" || f.Pkg() == nil || f.Pkg().Path() != "time" { - return false - } - // Verify that the receiver is time.Time. - recv := f.Type().(*types.Signature).Recv() - return recv != nil && analysisutil.IsNamedType(recv.Type(), "time", "Time") -} - -func isTimeDotParse(f *types.Func) bool { - return analysisutil.IsFunctionNamed(f, "time", "Parse") -} - -// badFormatAt return the start of a bad format in e or -1 if no bad format is found. -func badFormatAt(info *types.Info, e ast.Expr) int { - tv, ok := info.Types[e] - if !ok { // no type info, assume good - return -1 - } - - t, ok := tv.Type.(*types.Basic) // sic, no unalias - if !ok || t.Info()&types.IsString == 0 { - return -1 - } - - if tv.Value == nil { - return -1 - } - - return strings.Index(constant.StringVal(tv.Value), badFormat) -} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/unmarshal/doc.go b/vendor/golang.org/x/tools/go/analysis/passes/unmarshal/doc.go deleted file mode 100644 index 5781bbd32..000000000 --- a/vendor/golang.org/x/tools/go/analysis/passes/unmarshal/doc.go +++ /dev/null @@ -1,14 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// The unmarshal package defines an Analyzer that checks for passing -// non-pointer or non-interface types to unmarshal and decode functions. -// -// # Analyzer unmarshal -// -// unmarshal: report passing non-pointer or non-interface values to unmarshal -// -// The unmarshal analysis reports calls to functions such as json.Unmarshal -// in which the argument type is not a pointer or an interface. -package unmarshal diff --git a/vendor/golang.org/x/tools/go/analysis/passes/unmarshal/unmarshal.go b/vendor/golang.org/x/tools/go/analysis/passes/unmarshal/unmarshal.go deleted file mode 100644 index a7889fa45..000000000 --- a/vendor/golang.org/x/tools/go/analysis/passes/unmarshal/unmarshal.go +++ /dev/null @@ -1,103 +0,0 @@ -// Copyright 2018 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package unmarshal - -import ( - _ "embed" - "go/ast" - "go/types" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/analysis/passes/internal/analysisutil" - "golang.org/x/tools/go/ast/inspector" - "golang.org/x/tools/go/types/typeutil" - "golang.org/x/tools/internal/typesinternal" -) - -//go:embed doc.go -var doc string - -var Analyzer = &analysis.Analyzer{ - Name: "unmarshal", - Doc: analysisutil.MustExtractDoc(doc, "unmarshal"), - URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/unmarshal", - Requires: []*analysis.Analyzer{inspect.Analyzer}, - Run: run, -} - -func run(pass *analysis.Pass) (interface{}, error) { - switch pass.Pkg.Path() { - case "encoding/gob", "encoding/json", "encoding/xml", "encoding/asn1": - // These packages know how to use their own APIs. - // Sometimes they are testing what happens to incorrect programs. - return nil, nil - } - - // Note: (*"encoding/json".Decoder).Decode, (* "encoding/gob".Decoder).Decode - // and (* "encoding/xml".Decoder).Decode are methods and can be a typeutil.Callee - // without directly importing their packages. So we cannot just skip this package - // when !analysisutil.Imports(pass.Pkg, "encoding/..."). - // TODO(taking): Consider using a prepass to collect typeutil.Callees. - - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - - nodeFilter := []ast.Node{ - (*ast.CallExpr)(nil), - } - inspect.Preorder(nodeFilter, func(n ast.Node) { - call := n.(*ast.CallExpr) - fn := typeutil.StaticCallee(pass.TypesInfo, call) - if fn == nil { - return // not a static call - } - - // Classify the callee (without allocating memory). - argidx := -1 - - recv := fn.Type().(*types.Signature).Recv() - if fn.Name() == "Unmarshal" && recv == nil { - // "encoding/json".Unmarshal - // "encoding/xml".Unmarshal - // "encoding/asn1".Unmarshal - switch fn.Pkg().Path() { - case "encoding/json", "encoding/xml", "encoding/asn1": - argidx = 1 // func([]byte, interface{}) - } - } else if fn.Name() == "Decode" && recv != nil { - // (*"encoding/json".Decoder).Decode - // (* "encoding/gob".Decoder).Decode - // (* "encoding/xml".Decoder).Decode - _, named := typesinternal.ReceiverNamed(recv) - if tname := named.Obj(); tname.Name() == "Decoder" { - switch tname.Pkg().Path() { - case "encoding/json", "encoding/xml", "encoding/gob": - argidx = 0 // func(interface{}) - } - } - } - if argidx < 0 { - return // not a function we are interested in - } - - if len(call.Args) < argidx+1 { - return // not enough arguments, e.g. called with return values of another function - } - - t := pass.TypesInfo.Types[call.Args[argidx]].Type - switch t.Underlying().(type) { - case *types.Pointer, *types.Interface, *types.TypeParam: - return - } - - switch argidx { - case 0: - pass.Reportf(call.Lparen, "call of %s passes non-pointer", fn.Name()) - case 1: - pass.Reportf(call.Lparen, "call of %s passes non-pointer as second argument", fn.Name()) - } - }) - return nil, nil -} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/unreachable/doc.go b/vendor/golang.org/x/tools/go/analysis/passes/unreachable/doc.go deleted file mode 100644 index d17d0d944..000000000 --- a/vendor/golang.org/x/tools/go/analysis/passes/unreachable/doc.go +++ /dev/null @@ -1,14 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package unreachable defines an Analyzer that checks for unreachable code. -// -// # Analyzer unreachable -// -// unreachable: check for unreachable code -// -// The unreachable analyzer finds statements that execution can never reach -// because they are preceded by an return statement, a call to panic, an -// infinite loop, or similar constructs. -package unreachable diff --git a/vendor/golang.org/x/tools/go/analysis/passes/unreachable/unreachable.go b/vendor/golang.org/x/tools/go/analysis/passes/unreachable/unreachable.go deleted file mode 100644 index b810db7ee..000000000 --- a/vendor/golang.org/x/tools/go/analysis/passes/unreachable/unreachable.go +++ /dev/null @@ -1,324 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package unreachable - -// TODO(adonovan): use the new cfg package, which is more precise. - -import ( - _ "embed" - "go/ast" - "go/token" - "log" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/analysis/passes/internal/analysisutil" - "golang.org/x/tools/go/ast/inspector" -) - -//go:embed doc.go -var doc string - -var Analyzer = &analysis.Analyzer{ - Name: "unreachable", - Doc: analysisutil.MustExtractDoc(doc, "unreachable"), - URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/unreachable", - Requires: []*analysis.Analyzer{inspect.Analyzer}, - RunDespiteErrors: true, - Run: run, -} - -func run(pass *analysis.Pass) (interface{}, error) { - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - - nodeFilter := []ast.Node{ - (*ast.FuncDecl)(nil), - (*ast.FuncLit)(nil), - } - inspect.Preorder(nodeFilter, func(n ast.Node) { - var body *ast.BlockStmt - switch n := n.(type) { - case *ast.FuncDecl: - body = n.Body - case *ast.FuncLit: - body = n.Body - } - if body == nil { - return - } - d := &deadState{ - pass: pass, - hasBreak: make(map[ast.Stmt]bool), - hasGoto: make(map[string]bool), - labels: make(map[string]ast.Stmt), - } - d.findLabels(body) - d.reachable = true - d.findDead(body) - }) - return nil, nil -} - -type deadState struct { - pass *analysis.Pass - hasBreak map[ast.Stmt]bool - hasGoto map[string]bool - labels map[string]ast.Stmt - breakTarget ast.Stmt - - reachable bool -} - -// findLabels gathers information about the labels defined and used by stmt -// and about which statements break, whether a label is involved or not. -func (d *deadState) findLabels(stmt ast.Stmt) { - switch x := stmt.(type) { - default: - log.Fatalf("%s: internal error in findLabels: unexpected statement %T", d.pass.Fset.Position(x.Pos()), x) - - case *ast.AssignStmt, - *ast.BadStmt, - *ast.DeclStmt, - *ast.DeferStmt, - *ast.EmptyStmt, - *ast.ExprStmt, - *ast.GoStmt, - *ast.IncDecStmt, - *ast.ReturnStmt, - *ast.SendStmt: - // no statements inside - - case *ast.BlockStmt: - for _, stmt := range x.List { - d.findLabels(stmt) - } - - case *ast.BranchStmt: - switch x.Tok { - case token.GOTO: - if x.Label != nil { - d.hasGoto[x.Label.Name] = true - } - - case token.BREAK: - stmt := d.breakTarget - if x.Label != nil { - stmt = d.labels[x.Label.Name] - } - if stmt != nil { - d.hasBreak[stmt] = true - } - } - - case *ast.IfStmt: - d.findLabels(x.Body) - if x.Else != nil { - d.findLabels(x.Else) - } - - case *ast.LabeledStmt: - d.labels[x.Label.Name] = x.Stmt - d.findLabels(x.Stmt) - - // These cases are all the same, but the x.Body only works - // when the specific type of x is known, so the cases cannot - // be merged. - case *ast.ForStmt: - outer := d.breakTarget - d.breakTarget = x - d.findLabels(x.Body) - d.breakTarget = outer - - case *ast.RangeStmt: - outer := d.breakTarget - d.breakTarget = x - d.findLabels(x.Body) - d.breakTarget = outer - - case *ast.SelectStmt: - outer := d.breakTarget - d.breakTarget = x - d.findLabels(x.Body) - d.breakTarget = outer - - case *ast.SwitchStmt: - outer := d.breakTarget - d.breakTarget = x - d.findLabels(x.Body) - d.breakTarget = outer - - case *ast.TypeSwitchStmt: - outer := d.breakTarget - d.breakTarget = x - d.findLabels(x.Body) - d.breakTarget = outer - - case *ast.CommClause: - for _, stmt := range x.Body { - d.findLabels(stmt) - } - - case *ast.CaseClause: - for _, stmt := range x.Body { - d.findLabels(stmt) - } - } -} - -// findDead walks the statement looking for dead code. -// If d.reachable is false on entry, stmt itself is dead. -// When findDead returns, d.reachable tells whether the -// statement following stmt is reachable. -func (d *deadState) findDead(stmt ast.Stmt) { - // Is this a labeled goto target? - // If so, assume it is reachable due to the goto. - // This is slightly conservative, in that we don't - // check that the goto is reachable, so - // L: goto L - // will not provoke a warning. - // But it's good enough. - if x, isLabel := stmt.(*ast.LabeledStmt); isLabel && d.hasGoto[x.Label.Name] { - d.reachable = true - } - - if !d.reachable { - switch stmt.(type) { - case *ast.EmptyStmt: - // do not warn about unreachable empty statements - default: - d.pass.Report(analysis.Diagnostic{ - Pos: stmt.Pos(), - End: stmt.End(), - Message: "unreachable code", - SuggestedFixes: []analysis.SuggestedFix{{ - Message: "Remove", - TextEdits: []analysis.TextEdit{{ - Pos: stmt.Pos(), - End: stmt.End(), - }}, - }}, - }) - d.reachable = true // silence error about next statement - } - } - - switch x := stmt.(type) { - default: - log.Fatalf("%s: internal error in findDead: unexpected statement %T", d.pass.Fset.Position(x.Pos()), x) - - case *ast.AssignStmt, - *ast.BadStmt, - *ast.DeclStmt, - *ast.DeferStmt, - *ast.EmptyStmt, - *ast.GoStmt, - *ast.IncDecStmt, - *ast.SendStmt: - // no control flow - - case *ast.BlockStmt: - for _, stmt := range x.List { - d.findDead(stmt) - } - - case *ast.BranchStmt: - switch x.Tok { - case token.BREAK, token.GOTO, token.FALLTHROUGH: - d.reachable = false - case token.CONTINUE: - // NOTE: We accept "continue" statements as terminating. - // They are not necessary in the spec definition of terminating, - // because a continue statement cannot be the final statement - // before a return. But for the more general problem of syntactically - // identifying dead code, continue redirects control flow just - // like the other terminating statements. - d.reachable = false - } - - case *ast.ExprStmt: - // Call to panic? - call, ok := x.X.(*ast.CallExpr) - if ok { - name, ok := call.Fun.(*ast.Ident) - if ok && name.Name == "panic" && name.Obj == nil { - d.reachable = false - } - } - - case *ast.ForStmt: - d.findDead(x.Body) - d.reachable = x.Cond != nil || d.hasBreak[x] - - case *ast.IfStmt: - d.findDead(x.Body) - if x.Else != nil { - r := d.reachable - d.reachable = true - d.findDead(x.Else) - d.reachable = d.reachable || r - } else { - // might not have executed if statement - d.reachable = true - } - - case *ast.LabeledStmt: - d.findDead(x.Stmt) - - case *ast.RangeStmt: - d.findDead(x.Body) - d.reachable = true - - case *ast.ReturnStmt: - d.reachable = false - - case *ast.SelectStmt: - // NOTE: Unlike switch and type switch below, we don't care - // whether a select has a default, because a select without a - // default blocks until one of the cases can run. That's different - // from a switch without a default, which behaves like it has - // a default with an empty body. - anyReachable := false - for _, comm := range x.Body.List { - d.reachable = true - for _, stmt := range comm.(*ast.CommClause).Body { - d.findDead(stmt) - } - anyReachable = anyReachable || d.reachable - } - d.reachable = anyReachable || d.hasBreak[x] - - case *ast.SwitchStmt: - anyReachable := false - hasDefault := false - for _, cas := range x.Body.List { - cc := cas.(*ast.CaseClause) - if cc.List == nil { - hasDefault = true - } - d.reachable = true - for _, stmt := range cc.Body { - d.findDead(stmt) - } - anyReachable = anyReachable || d.reachable - } - d.reachable = anyReachable || d.hasBreak[x] || !hasDefault - - case *ast.TypeSwitchStmt: - anyReachable := false - hasDefault := false - for _, cas := range x.Body.List { - cc := cas.(*ast.CaseClause) - if cc.List == nil { - hasDefault = true - } - d.reachable = true - for _, stmt := range cc.Body { - d.findDead(stmt) - } - anyReachable = anyReachable || d.reachable - } - d.reachable = anyReachable || d.hasBreak[x] || !hasDefault - } -} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/unsafeptr/doc.go b/vendor/golang.org/x/tools/go/analysis/passes/unsafeptr/doc.go deleted file mode 100644 index de10804cb..000000000 --- a/vendor/golang.org/x/tools/go/analysis/passes/unsafeptr/doc.go +++ /dev/null @@ -1,17 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package unsafeptr defines an Analyzer that checks for invalid -// conversions of uintptr to unsafe.Pointer. -// -// # Analyzer unsafeptr -// -// unsafeptr: check for invalid conversions of uintptr to unsafe.Pointer -// -// The unsafeptr analyzer reports likely incorrect uses of unsafe.Pointer -// to convert integers to pointers. A conversion from uintptr to -// unsafe.Pointer is invalid if it implies that there is a uintptr-typed -// word in memory that holds a pointer value, because that word will be -// invisible to stack copying and to the garbage collector. -package unsafeptr diff --git a/vendor/golang.org/x/tools/go/analysis/passes/unsafeptr/unsafeptr.go b/vendor/golang.org/x/tools/go/analysis/passes/unsafeptr/unsafeptr.go deleted file mode 100644 index 14e4a6c1e..000000000 --- a/vendor/golang.org/x/tools/go/analysis/passes/unsafeptr/unsafeptr.go +++ /dev/null @@ -1,158 +0,0 @@ -// Copyright 2014 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package unsafeptr defines an Analyzer that checks for invalid -// conversions of uintptr to unsafe.Pointer. -package unsafeptr - -import ( - _ "embed" - "go/ast" - "go/token" - "go/types" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/analysis/passes/internal/analysisutil" - "golang.org/x/tools/go/ast/astutil" - "golang.org/x/tools/go/ast/inspector" - "golang.org/x/tools/internal/aliases" -) - -//go:embed doc.go -var doc string - -var Analyzer = &analysis.Analyzer{ - Name: "unsafeptr", - Doc: analysisutil.MustExtractDoc(doc, "unsafeptr"), - URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/unsafeptr", - Requires: []*analysis.Analyzer{inspect.Analyzer}, - Run: run, -} - -func run(pass *analysis.Pass) (interface{}, error) { - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - - nodeFilter := []ast.Node{ - (*ast.CallExpr)(nil), - (*ast.StarExpr)(nil), - (*ast.UnaryExpr)(nil), - } - inspect.Preorder(nodeFilter, func(n ast.Node) { - switch x := n.(type) { - case *ast.CallExpr: - if len(x.Args) == 1 && - hasBasicType(pass.TypesInfo, x.Fun, types.UnsafePointer) && - hasBasicType(pass.TypesInfo, x.Args[0], types.Uintptr) && - !isSafeUintptr(pass.TypesInfo, x.Args[0]) { - pass.ReportRangef(x, "possible misuse of unsafe.Pointer") - } - case *ast.StarExpr: - if t := pass.TypesInfo.Types[x].Type; isReflectHeader(t) { - pass.ReportRangef(x, "possible misuse of %s", t) - } - case *ast.UnaryExpr: - if x.Op != token.AND { - return - } - if t := pass.TypesInfo.Types[x.X].Type; isReflectHeader(t) { - pass.ReportRangef(x, "possible misuse of %s", t) - } - } - }) - return nil, nil -} - -// isSafeUintptr reports whether x - already known to be a uintptr - -// is safe to convert to unsafe.Pointer. -func isSafeUintptr(info *types.Info, x ast.Expr) bool { - // Check unsafe.Pointer safety rules according to - // https://golang.org/pkg/unsafe/#Pointer. - - switch x := astutil.Unparen(x).(type) { - case *ast.SelectorExpr: - // "(6) Conversion of a reflect.SliceHeader or - // reflect.StringHeader Data field to or from Pointer." - if x.Sel.Name != "Data" { - break - } - // reflect.SliceHeader and reflect.StringHeader are okay, - // but only if they are pointing at a real slice or string. - // It's not okay to do: - // var x SliceHeader - // x.Data = uintptr(unsafe.Pointer(...)) - // ... use x ... - // p := unsafe.Pointer(x.Data) - // because in the middle the garbage collector doesn't - // see x.Data as a pointer and so x.Data may be dangling - // by the time we get to the conversion at the end. - // For now approximate by saying that *Header is okay - // but Header is not. - pt, ok := aliases.Unalias(info.Types[x.X].Type).(*types.Pointer) - if ok && isReflectHeader(pt.Elem()) { - return true - } - - case *ast.CallExpr: - // "(5) Conversion of the result of reflect.Value.Pointer or - // reflect.Value.UnsafeAddr from uintptr to Pointer." - if len(x.Args) != 0 { - break - } - sel, ok := x.Fun.(*ast.SelectorExpr) - if !ok { - break - } - switch sel.Sel.Name { - case "Pointer", "UnsafeAddr": - if analysisutil.IsNamedType(info.Types[sel.X].Type, "reflect", "Value") { - return true - } - } - } - - // "(3) Conversion of a Pointer to a uintptr and back, with arithmetic." - return isSafeArith(info, x) -} - -// isSafeArith reports whether x is a pointer arithmetic expression that is safe -// to convert to unsafe.Pointer. -func isSafeArith(info *types.Info, x ast.Expr) bool { - switch x := astutil.Unparen(x).(type) { - case *ast.CallExpr: - // Base case: initial conversion from unsafe.Pointer to uintptr. - return len(x.Args) == 1 && - hasBasicType(info, x.Fun, types.Uintptr) && - hasBasicType(info, x.Args[0], types.UnsafePointer) - - case *ast.BinaryExpr: - // "It is valid both to add and to subtract offsets from a - // pointer in this way. It is also valid to use &^ to round - // pointers, usually for alignment." - switch x.Op { - case token.ADD, token.SUB, token.AND_NOT: - // TODO(mdempsky): Match compiler - // semantics. ADD allows a pointer on either - // side; SUB and AND_NOT don't care about RHS. - return isSafeArith(info, x.X) && !isSafeArith(info, x.Y) - } - } - - return false -} - -// hasBasicType reports whether x's type is a types.Basic with the given kind. -func hasBasicType(info *types.Info, x ast.Expr, kind types.BasicKind) bool { - t := info.Types[x].Type - if t != nil { - t = t.Underlying() - } - b, ok := t.(*types.Basic) - return ok && b.Kind() == kind -} - -// isReflectHeader reports whether t is reflect.SliceHeader or reflect.StringHeader. -func isReflectHeader(t types.Type) bool { - return analysisutil.IsNamedType(t, "reflect", "SliceHeader", "StringHeader") -} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/unusedresult/doc.go b/vendor/golang.org/x/tools/go/analysis/passes/unusedresult/doc.go deleted file mode 100644 index a1bf4cf94..000000000 --- a/vendor/golang.org/x/tools/go/analysis/passes/unusedresult/doc.go +++ /dev/null @@ -1,19 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package unusedresult defines an analyzer that checks for unused -// results of calls to certain pure functions. -// -// # Analyzer unusedresult -// -// unusedresult: check for unused results of calls to some functions -// -// Some functions like fmt.Errorf return a result and have no side -// effects, so it is always a mistake to discard the result. Other -// functions may return an error that must not be ignored, or a cleanup -// operation that must be called. This analyzer reports calls to -// functions like these when the result of the call is ignored. -// -// The set of functions may be controlled using flags. -package unusedresult diff --git a/vendor/golang.org/x/tools/go/analysis/passes/unusedresult/unusedresult.go b/vendor/golang.org/x/tools/go/analysis/passes/unusedresult/unusedresult.go deleted file mode 100644 index 76f42b052..000000000 --- a/vendor/golang.org/x/tools/go/analysis/passes/unusedresult/unusedresult.go +++ /dev/null @@ -1,161 +0,0 @@ -// Copyright 2015 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package unusedresult defines an analyzer that checks for unused -// results of calls to certain functions. -package unusedresult - -// It is tempting to make this analysis inductive: for each function -// that tail-calls one of the functions that we check, check those -// functions too. However, just because you must use the result of -// fmt.Sprintf doesn't mean you need to use the result of every -// function that returns a formatted string: it may have other results -// and effects. - -import ( - _ "embed" - "go/ast" - "go/token" - "go/types" - "sort" - "strings" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/analysis/passes/internal/analysisutil" - "golang.org/x/tools/go/ast/astutil" - "golang.org/x/tools/go/ast/inspector" - "golang.org/x/tools/go/types/typeutil" -) - -//go:embed doc.go -var doc string - -var Analyzer = &analysis.Analyzer{ - Name: "unusedresult", - Doc: analysisutil.MustExtractDoc(doc, "unusedresult"), - URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/unusedresult", - Requires: []*analysis.Analyzer{inspect.Analyzer}, - Run: run, -} - -// flags -var funcs, stringMethods stringSetFlag - -func init() { - // TODO(adonovan): provide a comment or declaration syntax to - // allow users to add their functions to this set using facts. - // For example: - // - // func ignoringTheErrorWouldBeVeryBad() error { - // type mustUseResult struct{} // enables vet unusedresult check - // ... - // } - // - // ignoringTheErrorWouldBeVeryBad() // oops - // - - // List standard library functions here. - // The context.With{Cancel,Deadline,Timeout} entries are - // effectively redundant wrt the lostcancel analyzer. - funcs = stringSetFlag{ - "context.WithCancel": true, - "context.WithDeadline": true, - "context.WithTimeout": true, - "context.WithValue": true, - "errors.New": true, - "fmt.Errorf": true, - "fmt.Sprint": true, - "fmt.Sprintf": true, - "slices.Clip": true, - "slices.Compact": true, - "slices.CompactFunc": true, - "slices.Delete": true, - "slices.DeleteFunc": true, - "slices.Grow": true, - "slices.Insert": true, - "slices.Replace": true, - "sort.Reverse": true, - } - Analyzer.Flags.Var(&funcs, "funcs", - "comma-separated list of functions whose results must be used") - - stringMethods.Set("Error,String") - Analyzer.Flags.Var(&stringMethods, "stringmethods", - "comma-separated list of names of methods of type func() string whose results must be used") -} - -func run(pass *analysis.Pass) (interface{}, error) { - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - - // Split functions into (pkg, name) pairs to save allocation later. - pkgFuncs := make(map[[2]string]bool, len(funcs)) - for s := range funcs { - if i := strings.LastIndexByte(s, '.'); i > 0 { - pkgFuncs[[2]string{s[:i], s[i+1:]}] = true - } - } - - nodeFilter := []ast.Node{ - (*ast.ExprStmt)(nil), - } - inspect.Preorder(nodeFilter, func(n ast.Node) { - call, ok := astutil.Unparen(n.(*ast.ExprStmt).X).(*ast.CallExpr) - if !ok { - return // not a call statement - } - - // Call to function or method? - fn, ok := typeutil.Callee(pass.TypesInfo, call).(*types.Func) - if !ok { - return // e.g. var or builtin - } - if sig := fn.Type().(*types.Signature); sig.Recv() != nil { - // method (e.g. foo.String()) - if types.Identical(sig, sigNoArgsStringResult) { - if stringMethods[fn.Name()] { - pass.Reportf(call.Lparen, "result of (%s).%s call not used", - sig.Recv().Type(), fn.Name()) - } - } - } else { - // package-level function (e.g. fmt.Errorf) - if pkgFuncs[[2]string{fn.Pkg().Path(), fn.Name()}] { - pass.Reportf(call.Lparen, "result of %s.%s call not used", - fn.Pkg().Path(), fn.Name()) - } - } - }) - return nil, nil -} - -// func() string -var sigNoArgsStringResult = types.NewSignature(nil, nil, - types.NewTuple(types.NewVar(token.NoPos, nil, "", types.Typ[types.String])), - false) - -type stringSetFlag map[string]bool - -func (ss *stringSetFlag) String() string { - var items []string - for item := range *ss { - items = append(items, item) - } - sort.Strings(items) - return strings.Join(items, ",") -} - -func (ss *stringSetFlag) Set(s string) error { - m := make(map[string]bool) // clobber previous value - if s != "" { - for _, name := range strings.Split(s, ",") { - if name == "" { - continue // TODO: report error? proceed? - } - m[name] = true - } - } - *ss = m - return nil -} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/unusedwrite/doc.go b/vendor/golang.org/x/tools/go/analysis/passes/unusedwrite/doc.go deleted file mode 100644 index de10dc8c8..000000000 --- a/vendor/golang.org/x/tools/go/analysis/passes/unusedwrite/doc.go +++ /dev/null @@ -1,34 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package unusedwrite checks for unused writes to the elements of a struct or array object. -// -// # Analyzer unusedwrite -// -// unusedwrite: checks for unused writes -// -// The analyzer reports instances of writes to struct fields and -// arrays that are never read. Specifically, when a struct object -// or an array is copied, its elements are copied implicitly by -// the compiler, and any element write to this copy does nothing -// with the original object. -// -// For example: -// -// type T struct { x int } -// -// func f(input []T) { -// for i, v := range input { // v is a copy -// v.x = i // unused write to field x -// } -// } -// -// Another example is about non-pointer receiver: -// -// type T struct { x int } -// -// func (t T) f() { // t is a copy -// t.x = i // unused write to field x -// } -package unusedwrite diff --git a/vendor/golang.org/x/tools/go/analysis/passes/unusedwrite/unusedwrite.go b/vendor/golang.org/x/tools/go/analysis/passes/unusedwrite/unusedwrite.go deleted file mode 100644 index a01cbb8f8..000000000 --- a/vendor/golang.org/x/tools/go/analysis/passes/unusedwrite/unusedwrite.go +++ /dev/null @@ -1,171 +0,0 @@ -// Copyright 2021 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package unusedwrite - -import ( - _ "embed" - "fmt" - "go/types" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/buildssa" - "golang.org/x/tools/go/analysis/passes/internal/analysisutil" - "golang.org/x/tools/go/ssa" - "golang.org/x/tools/internal/aliases" -) - -//go:embed doc.go -var doc string - -// Analyzer reports instances of writes to struct fields and arrays -// that are never read. -var Analyzer = &analysis.Analyzer{ - Name: "unusedwrite", - Doc: analysisutil.MustExtractDoc(doc, "unusedwrite"), - URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/unusedwrite", - Requires: []*analysis.Analyzer{buildssa.Analyzer}, - Run: run, -} - -func run(pass *analysis.Pass) (interface{}, error) { - ssainput := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA) - for _, fn := range ssainput.SrcFuncs { - // TODO(taking): Iterate over fn._Instantiations() once exported. If so, have 1 report per Pos(). - reports := checkStores(fn) - for _, store := range reports { - switch addr := store.Addr.(type) { - case *ssa.FieldAddr: - pass.Reportf(store.Pos(), - "unused write to field %s", - getFieldName(addr.X.Type(), addr.Field)) - case *ssa.IndexAddr: - pass.Reportf(store.Pos(), - "unused write to array index %s", addr.Index) - } - } - } - return nil, nil -} - -// checkStores returns *Stores in fn whose address is written to but never used. -func checkStores(fn *ssa.Function) []*ssa.Store { - var reports []*ssa.Store - // Visit each block. No need to visit fn.Recover. - for _, blk := range fn.Blocks { - for _, instr := range blk.Instrs { - // Identify writes. - if store, ok := instr.(*ssa.Store); ok { - // Consider field/index writes to an object whose elements are copied and not shared. - // MapUpdate is excluded since only the reference of the map is copied. - switch addr := store.Addr.(type) { - case *ssa.FieldAddr: - if isDeadStore(store, addr.X, addr) { - reports = append(reports, store) - } - case *ssa.IndexAddr: - if isDeadStore(store, addr.X, addr) { - reports = append(reports, store) - } - } - } - } - } - return reports -} - -// isDeadStore determines whether a field/index write to an object is dead. -// Argument "obj" is the object, and "addr" is the instruction fetching the field/index. -func isDeadStore(store *ssa.Store, obj ssa.Value, addr ssa.Instruction) bool { - // Consider only struct or array objects. - if !hasStructOrArrayType(obj) { - return false - } - // Check liveness: if the value is used later, then don't report the write. - for _, ref := range *obj.Referrers() { - if ref == store || ref == addr { - continue - } - switch ins := ref.(type) { - case ssa.CallInstruction: - return false - case *ssa.FieldAddr: - // Check whether the same field is used. - if ins.X == obj { - if faddr, ok := addr.(*ssa.FieldAddr); ok { - if faddr.Field == ins.Field { - return false - } - } - } - // Otherwise another field is used, and this usage doesn't count. - continue - case *ssa.IndexAddr: - if ins.X == obj { - return false - } - continue // Otherwise another object is used - case *ssa.Lookup: - if ins.X == obj { - return false - } - continue // Otherwise another object is used - case *ssa.Store: - if ins.Val == obj { - return false - } - continue // Otherwise other object is stored - default: // consider live if the object is used in any other instruction - return false - } - } - return true -} - -// isStructOrArray returns whether the underlying type is struct or array. -func isStructOrArray(tp types.Type) bool { - switch tp.Underlying().(type) { - case *types.Array: - return true - case *types.Struct: - return true - } - return false -} - -// hasStructOrArrayType returns whether a value is of struct or array type. -func hasStructOrArrayType(v ssa.Value) bool { - if instr, ok := v.(ssa.Instruction); ok { - if alloc, ok := instr.(*ssa.Alloc); ok { - // Check the element type of an allocated register (which always has pointer type) - // e.g., for - // func (t T) f() { ...} - // the receiver object is of type *T: - // t0 = local T (t) *T - if tp, ok := aliases.Unalias(alloc.Type()).(*types.Pointer); ok { - return isStructOrArray(tp.Elem()) - } - return false - } - } - return isStructOrArray(v.Type()) -} - -// getFieldName returns the name of a field in a struct. -// It the field is not found, then it returns the string format of the index. -// -// For example, for struct T {x int, y int), getFieldName(*T, 1) returns "y". -func getFieldName(tp types.Type, index int) string { - // TODO(adonovan): use - // stp, ok := typeparams.Deref(tp).Underlying().(*types.Struct); ok { - // when Deref is defined. But see CL 565456 for a better fix. - - if pt, ok := aliases.Unalias(tp).(*types.Pointer); ok { - tp = pt.Elem() - } - if stp, ok := tp.Underlying().(*types.Struct); ok { - return stp.Field(index).Name() - } - return fmt.Sprintf("%d", index) -} diff --git a/vendor/golang.org/x/tools/go/analysis/validate.go b/vendor/golang.org/x/tools/go/analysis/validate.go deleted file mode 100644 index 4f2c40456..000000000 --- a/vendor/golang.org/x/tools/go/analysis/validate.go +++ /dev/null @@ -1,137 +0,0 @@ -// Copyright 2018 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package analysis - -import ( - "fmt" - "reflect" - "strings" - "unicode" -) - -// Validate reports an error if any of the analyzers are misconfigured. -// Checks include: -// that the name is a valid identifier; -// that the Doc is not empty; -// that the Run is non-nil; -// that the Requires graph is acyclic; -// that analyzer fact types are unique; -// that each fact type is a pointer. -// -// Analyzer names need not be unique, though this may be confusing. -func Validate(analyzers []*Analyzer) error { - // Map each fact type to its sole generating analyzer. - factTypes := make(map[reflect.Type]*Analyzer) - - // Traverse the Requires graph, depth first. - const ( - white = iota - grey - black - finished - ) - color := make(map[*Analyzer]uint8) - var visit func(a *Analyzer) error - visit = func(a *Analyzer) error { - if a == nil { - return fmt.Errorf("nil *Analyzer") - } - if color[a] == white { - color[a] = grey - - // names - if !validIdent(a.Name) { - return fmt.Errorf("invalid analyzer name %q", a) - } - - if a.Doc == "" { - return fmt.Errorf("analyzer %q is undocumented", a) - } - - if a.Run == nil { - return fmt.Errorf("analyzer %q has nil Run", a) - } - // fact types - for _, f := range a.FactTypes { - if f == nil { - return fmt.Errorf("analyzer %s has nil FactType", a) - } - t := reflect.TypeOf(f) - if prev := factTypes[t]; prev != nil { - return fmt.Errorf("fact type %s registered by two analyzers: %v, %v", - t, a, prev) - } - if t.Kind() != reflect.Ptr { - return fmt.Errorf("%s: fact type %s is not a pointer", a, t) - } - factTypes[t] = a - } - - // recursion - for _, req := range a.Requires { - if err := visit(req); err != nil { - return err - } - } - color[a] = black - } - - if color[a] == grey { - stack := []*Analyzer{a} - inCycle := map[string]bool{} - for len(stack) > 0 { - current := stack[len(stack)-1] - stack = stack[:len(stack)-1] - if color[current] == grey && !inCycle[current.Name] { - inCycle[current.Name] = true - stack = append(stack, current.Requires...) - } - } - return &CycleInRequiresGraphError{AnalyzerNames: inCycle} - } - - return nil - } - for _, a := range analyzers { - if err := visit(a); err != nil { - return err - } - } - - // Reject duplicates among analyzers. - // Precondition: color[a] == black. - // Postcondition: color[a] == finished. - for _, a := range analyzers { - if color[a] == finished { - return fmt.Errorf("duplicate analyzer: %s", a.Name) - } - color[a] = finished - } - - return nil -} - -func validIdent(name string) bool { - for i, r := range name { - if !(r == '_' || unicode.IsLetter(r) || i > 0 && unicode.IsDigit(r)) { - return false - } - } - return name != "" -} - -type CycleInRequiresGraphError struct { - AnalyzerNames map[string]bool -} - -func (e *CycleInRequiresGraphError) Error() string { - var b strings.Builder - b.WriteString("cycle detected involving the following analyzers:") - for n := range e.AnalyzerNames { - b.WriteByte(' ') - b.WriteString(n) - } - return b.String() -} diff --git a/vendor/golang.org/x/tools/go/buildutil/allpackages.go b/vendor/golang.org/x/tools/go/buildutil/allpackages.go deleted file mode 100644 index dfb8cd6c7..000000000 --- a/vendor/golang.org/x/tools/go/buildutil/allpackages.go +++ /dev/null @@ -1,195 +0,0 @@ -// Copyright 2014 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package buildutil provides utilities related to the go/build -// package in the standard library. -// -// All I/O is done via the build.Context file system interface, which must -// be concurrency-safe. -package buildutil // import "golang.org/x/tools/go/buildutil" - -import ( - "go/build" - "os" - "path/filepath" - "sort" - "strings" - "sync" -) - -// AllPackages returns the package path of each Go package in any source -// directory of the specified build context (e.g. $GOROOT or an element -// of $GOPATH). Errors are ignored. The results are sorted. -// All package paths are canonical, and thus may contain "/vendor/". -// -// The result may include import paths for directories that contain no -// *.go files, such as "archive" (in $GOROOT/src). -// -// All I/O is done via the build.Context file system interface, -// which must be concurrency-safe. -func AllPackages(ctxt *build.Context) []string { - var list []string - ForEachPackage(ctxt, func(pkg string, _ error) { - list = append(list, pkg) - }) - sort.Strings(list) - return list -} - -// ForEachPackage calls the found function with the package path of -// each Go package it finds in any source directory of the specified -// build context (e.g. $GOROOT or an element of $GOPATH). -// All package paths are canonical, and thus may contain "/vendor/". -// -// If the package directory exists but could not be read, the second -// argument to the found function provides the error. -// -// All I/O is done via the build.Context file system interface, -// which must be concurrency-safe. -func ForEachPackage(ctxt *build.Context, found func(importPath string, err error)) { - ch := make(chan item) - - var wg sync.WaitGroup - for _, root := range ctxt.SrcDirs() { - root := root - wg.Add(1) - go func() { - allPackages(ctxt, root, ch) - wg.Done() - }() - } - go func() { - wg.Wait() - close(ch) - }() - - // All calls to found occur in the caller's goroutine. - for i := range ch { - found(i.importPath, i.err) - } -} - -type item struct { - importPath string - err error // (optional) -} - -// We use a process-wide counting semaphore to limit -// the number of parallel calls to ReadDir. -var ioLimit = make(chan bool, 20) - -func allPackages(ctxt *build.Context, root string, ch chan<- item) { - root = filepath.Clean(root) + string(os.PathSeparator) - - var wg sync.WaitGroup - - var walkDir func(dir string) - walkDir = func(dir string) { - // Avoid .foo, _foo, and testdata directory trees. - base := filepath.Base(dir) - if base == "" || base[0] == '.' || base[0] == '_' || base == "testdata" { - return - } - - pkg := filepath.ToSlash(strings.TrimPrefix(dir, root)) - - // Prune search if we encounter any of these import paths. - switch pkg { - case "builtin": - return - } - - ioLimit <- true - files, err := ReadDir(ctxt, dir) - <-ioLimit - if pkg != "" || err != nil { - ch <- item{pkg, err} - } - for _, fi := range files { - fi := fi - if fi.IsDir() { - wg.Add(1) - go func() { - walkDir(filepath.Join(dir, fi.Name())) - wg.Done() - }() - } - } - } - - walkDir(root) - wg.Wait() -} - -// ExpandPatterns returns the set of packages matched by patterns, -// which may have the following forms: -// -// golang.org/x/tools/cmd/guru # a single package -// golang.org/x/tools/... # all packages beneath dir -// ... # the entire workspace. -// -// Order is significant: a pattern preceded by '-' removes matching -// packages from the set. For example, these patterns match all encoding -// packages except encoding/xml: -// -// encoding/... -encoding/xml -// -// A trailing slash in a pattern is ignored. (Path components of Go -// package names are separated by slash, not the platform's path separator.) -func ExpandPatterns(ctxt *build.Context, patterns []string) map[string]bool { - // TODO(adonovan): support other features of 'go list': - // - "std"/"cmd"/"all" meta-packages - // - "..." not at the end of a pattern - // - relative patterns using "./" or "../" prefix - - pkgs := make(map[string]bool) - doPkg := func(pkg string, neg bool) { - if neg { - delete(pkgs, pkg) - } else { - pkgs[pkg] = true - } - } - - // Scan entire workspace if wildcards are present. - // TODO(adonovan): opt: scan only the necessary subtrees of the workspace. - var all []string - for _, arg := range patterns { - if strings.HasSuffix(arg, "...") { - all = AllPackages(ctxt) - break - } - } - - for _, arg := range patterns { - if arg == "" { - continue - } - - neg := arg[0] == '-' - if neg { - arg = arg[1:] - } - - if arg == "..." { - // ... matches all packages - for _, pkg := range all { - doPkg(pkg, neg) - } - } else if dir := strings.TrimSuffix(arg, "/..."); dir != arg { - // dir/... matches all packages beneath dir - for _, pkg := range all { - if strings.HasPrefix(pkg, dir) && - (len(pkg) == len(dir) || pkg[len(dir)] == '/') { - doPkg(pkg, neg) - } - } - } else { - // single package - doPkg(strings.TrimSuffix(arg, "/"), neg) - } - } - - return pkgs -} diff --git a/vendor/golang.org/x/tools/go/buildutil/fakecontext.go b/vendor/golang.org/x/tools/go/buildutil/fakecontext.go deleted file mode 100644 index 763d18809..000000000 --- a/vendor/golang.org/x/tools/go/buildutil/fakecontext.go +++ /dev/null @@ -1,111 +0,0 @@ -// Copyright 2015 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package buildutil - -import ( - "fmt" - "go/build" - "io" - "os" - "path" - "path/filepath" - "sort" - "strings" - "time" -) - -// FakeContext returns a build.Context for the fake file tree specified -// by pkgs, which maps package import paths to a mapping from file base -// names to contents. -// -// The fake Context has a GOROOT of "/go" and no GOPATH, and overrides -// the necessary file access methods to read from memory instead of the -// real file system. -// -// Unlike a real file tree, the fake one has only two levels---packages -// and files---so ReadDir("/go/src/") returns all packages under -// /go/src/ including, for instance, "math" and "math/big". -// ReadDir("/go/src/math/big") would return all the files in the -// "math/big" package. -func FakeContext(pkgs map[string]map[string]string) *build.Context { - clean := func(filename string) string { - f := path.Clean(filepath.ToSlash(filename)) - // Removing "/go/src" while respecting segment - // boundaries has this unfortunate corner case: - if f == "/go/src" { - return "" - } - return strings.TrimPrefix(f, "/go/src/") - } - - ctxt := build.Default // copy - ctxt.GOROOT = "/go" - ctxt.GOPATH = "" - ctxt.Compiler = "gc" - ctxt.IsDir = func(dir string) bool { - dir = clean(dir) - if dir == "" { - return true // needed by (*build.Context).SrcDirs - } - return pkgs[dir] != nil - } - ctxt.ReadDir = func(dir string) ([]os.FileInfo, error) { - dir = clean(dir) - var fis []os.FileInfo - if dir == "" { - // enumerate packages - for importPath := range pkgs { - fis = append(fis, fakeDirInfo(importPath)) - } - } else { - // enumerate files of package - for basename := range pkgs[dir] { - fis = append(fis, fakeFileInfo(basename)) - } - } - sort.Sort(byName(fis)) - return fis, nil - } - ctxt.OpenFile = func(filename string) (io.ReadCloser, error) { - filename = clean(filename) - dir, base := path.Split(filename) - content, ok := pkgs[path.Clean(dir)][base] - if !ok { - return nil, fmt.Errorf("file not found: %s", filename) - } - return io.NopCloser(strings.NewReader(content)), nil - } - ctxt.IsAbsPath = func(path string) bool { - path = filepath.ToSlash(path) - // Don't rely on the default (filepath.Path) since on - // Windows, it reports virtual paths as non-absolute. - return strings.HasPrefix(path, "/") - } - return &ctxt -} - -type byName []os.FileInfo - -func (s byName) Len() int { return len(s) } -func (s byName) Swap(i, j int) { s[i], s[j] = s[j], s[i] } -func (s byName) Less(i, j int) bool { return s[i].Name() < s[j].Name() } - -type fakeFileInfo string - -func (fi fakeFileInfo) Name() string { return string(fi) } -func (fakeFileInfo) Sys() interface{} { return nil } -func (fakeFileInfo) ModTime() time.Time { return time.Time{} } -func (fakeFileInfo) IsDir() bool { return false } -func (fakeFileInfo) Size() int64 { return 0 } -func (fakeFileInfo) Mode() os.FileMode { return 0644 } - -type fakeDirInfo string - -func (fd fakeDirInfo) Name() string { return string(fd) } -func (fakeDirInfo) Sys() interface{} { return nil } -func (fakeDirInfo) ModTime() time.Time { return time.Time{} } -func (fakeDirInfo) IsDir() bool { return true } -func (fakeDirInfo) Size() int64 { return 0 } -func (fakeDirInfo) Mode() os.FileMode { return 0755 } diff --git a/vendor/golang.org/x/tools/go/buildutil/overlay.go b/vendor/golang.org/x/tools/go/buildutil/overlay.go deleted file mode 100644 index 7e371658d..000000000 --- a/vendor/golang.org/x/tools/go/buildutil/overlay.go +++ /dev/null @@ -1,101 +0,0 @@ -// Copyright 2016 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package buildutil - -import ( - "bufio" - "bytes" - "fmt" - "go/build" - "io" - "path/filepath" - "strconv" - "strings" -) - -// OverlayContext overlays a build.Context with additional files from -// a map. Files in the map take precedence over other files. -// -// In addition to plain string comparison, two file names are -// considered equal if their base names match and their directory -// components point at the same directory on the file system. That is, -// symbolic links are followed for directories, but not files. -// -// A common use case for OverlayContext is to allow editors to pass in -// a set of unsaved, modified files. -// -// Currently, only the Context.OpenFile function will respect the -// overlay. This may change in the future. -func OverlayContext(orig *build.Context, overlay map[string][]byte) *build.Context { - // TODO(dominikh): Implement IsDir, HasSubdir and ReadDir - - rc := func(data []byte) (io.ReadCloser, error) { - return io.NopCloser(bytes.NewBuffer(data)), nil - } - - copy := *orig // make a copy - ctxt := © - ctxt.OpenFile = func(path string) (io.ReadCloser, error) { - // Fast path: names match exactly. - if content, ok := overlay[path]; ok { - return rc(content) - } - - // Slow path: check for same file under a different - // alias, perhaps due to a symbolic link. - for filename, content := range overlay { - if sameFile(path, filename) { - return rc(content) - } - } - - return OpenFile(orig, path) - } - return ctxt -} - -// ParseOverlayArchive parses an archive containing Go files and their -// contents. The result is intended to be used with OverlayContext. -// -// # Archive format -// -// The archive consists of a series of files. Each file consists of a -// name, a decimal file size and the file contents, separated by -// newlines. No newline follows after the file contents. -func ParseOverlayArchive(archive io.Reader) (map[string][]byte, error) { - overlay := make(map[string][]byte) - r := bufio.NewReader(archive) - for { - // Read file name. - filename, err := r.ReadString('\n') - if err != nil { - if err == io.EOF { - break // OK - } - return nil, fmt.Errorf("reading archive file name: %v", err) - } - filename = filepath.Clean(strings.TrimSpace(filename)) - - // Read file size. - sz, err := r.ReadString('\n') - if err != nil { - return nil, fmt.Errorf("reading size of archive file %s: %v", filename, err) - } - sz = strings.TrimSpace(sz) - size, err := strconv.ParseUint(sz, 10, 32) - if err != nil { - return nil, fmt.Errorf("parsing size of archive file %s: %v", filename, err) - } - - // Read file content. - content := make([]byte, size) - if _, err := io.ReadFull(r, content); err != nil { - return nil, fmt.Errorf("reading archive file %s: %v", filename, err) - } - overlay[filename] = content - } - - return overlay, nil -} diff --git a/vendor/golang.org/x/tools/go/buildutil/tags.go b/vendor/golang.org/x/tools/go/buildutil/tags.go deleted file mode 100644 index 32c8d1424..000000000 --- a/vendor/golang.org/x/tools/go/buildutil/tags.go +++ /dev/null @@ -1,100 +0,0 @@ -// Copyright 2015 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package buildutil - -// This duplicated logic must be kept in sync with that from go build: -// $GOROOT/src/cmd/go/internal/work/build.go (tagsFlag.Set) -// $GOROOT/src/cmd/go/internal/base/flag.go (StringsFlag.Set) -// $GOROOT/src/cmd/internal/quoted/quoted.go (isSpaceByte, Split) - -import ( - "fmt" - "strings" -) - -const TagsFlagDoc = "a list of `build tags` to consider satisfied during the build. " + - "For more information about build tags, see the description of " + - "build constraints in the documentation for the go/build package" - -// TagsFlag is an implementation of the flag.Value and flag.Getter interfaces that parses -// a flag value the same as go build's -tags flag and populates a []string slice. -// -// See $GOROOT/src/go/build/doc.go for description of build tags. -// See $GOROOT/src/cmd/go/doc.go for description of 'go build -tags' flag. -// -// Example: -// -// flag.Var((*buildutil.TagsFlag)(&build.Default.BuildTags), "tags", buildutil.TagsFlagDoc) -type TagsFlag []string - -func (v *TagsFlag) Set(s string) error { - // See $GOROOT/src/cmd/go/internal/work/build.go (tagsFlag.Set) - // For compatibility with Go 1.12 and earlier, allow "-tags='a b c'" or even just "-tags='a'". - if strings.Contains(s, " ") || strings.Contains(s, "'") { - var err error - *v, err = splitQuotedFields(s) - if *v == nil { - *v = []string{} - } - return err - } - - // Starting in Go 1.13, the -tags flag is a comma-separated list of build tags. - *v = []string{} - for _, s := range strings.Split(s, ",") { - if s != "" { - *v = append(*v, s) - } - } - return nil -} - -func (v *TagsFlag) Get() interface{} { return *v } - -func splitQuotedFields(s string) ([]string, error) { - // See $GOROOT/src/cmd/internal/quoted/quoted.go (Split) - // This must remain in sync with that logic. - var f []string - for len(s) > 0 { - for len(s) > 0 && isSpaceByte(s[0]) { - s = s[1:] - } - if len(s) == 0 { - break - } - // Accepted quoted string. No unescaping inside. - if s[0] == '"' || s[0] == '\'' { - quote := s[0] - s = s[1:] - i := 0 - for i < len(s) && s[i] != quote { - i++ - } - if i >= len(s) { - return nil, fmt.Errorf("unterminated %c string", quote) - } - f = append(f, s[:i]) - s = s[i+1:] - continue - } - i := 0 - for i < len(s) && !isSpaceByte(s[i]) { - i++ - } - f = append(f, s[:i]) - s = s[i:] - } - return f, nil -} - -func (v *TagsFlag) String() string { - return "" -} - -func isSpaceByte(c byte) bool { - // See $GOROOT/src/cmd/internal/quoted/quoted.go (isSpaceByte, Split) - // This list must remain in sync with that. - return c == ' ' || c == '\t' || c == '\n' || c == '\r' -} diff --git a/vendor/golang.org/x/tools/go/buildutil/util.go b/vendor/golang.org/x/tools/go/buildutil/util.go deleted file mode 100644 index bee6390de..000000000 --- a/vendor/golang.org/x/tools/go/buildutil/util.go +++ /dev/null @@ -1,209 +0,0 @@ -// Copyright 2014 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package buildutil - -import ( - "fmt" - "go/ast" - "go/build" - "go/parser" - "go/token" - "io" - "io/ioutil" - "os" - "path" - "path/filepath" - "strings" -) - -// ParseFile behaves like parser.ParseFile, -// but uses the build context's file system interface, if any. -// -// If file is not absolute (as defined by IsAbsPath), the (dir, file) -// components are joined using JoinPath; dir must be absolute. -// -// The displayPath function, if provided, is used to transform the -// filename that will be attached to the ASTs. -// -// TODO(adonovan): call this from go/loader.parseFiles when the tree thaws. -func ParseFile(fset *token.FileSet, ctxt *build.Context, displayPath func(string) string, dir string, file string, mode parser.Mode) (*ast.File, error) { - if !IsAbsPath(ctxt, file) { - file = JoinPath(ctxt, dir, file) - } - rd, err := OpenFile(ctxt, file) - if err != nil { - return nil, err - } - defer rd.Close() // ignore error - if displayPath != nil { - file = displayPath(file) - } - return parser.ParseFile(fset, file, rd, mode) -} - -// ContainingPackage returns the package containing filename. -// -// If filename is not absolute, it is interpreted relative to working directory dir. -// All I/O is via the build context's file system interface, if any. -// -// The '...Files []string' fields of the resulting build.Package are not -// populated (build.FindOnly mode). -func ContainingPackage(ctxt *build.Context, dir, filename string) (*build.Package, error) { - if !IsAbsPath(ctxt, filename) { - filename = JoinPath(ctxt, dir, filename) - } - - // We must not assume the file tree uses - // "/" always, - // `\` always, - // or os.PathSeparator (which varies by platform), - // but to make any progress, we are forced to assume that - // paths will not use `\` unless the PathSeparator - // is also `\`, thus we can rely on filepath.ToSlash for some sanity. - - dirSlash := path.Dir(filepath.ToSlash(filename)) + "/" - - // We assume that no source root (GOPATH[i] or GOROOT) contains any other. - for _, srcdir := range ctxt.SrcDirs() { - srcdirSlash := filepath.ToSlash(srcdir) + "/" - if importPath, ok := HasSubdir(ctxt, srcdirSlash, dirSlash); ok { - return ctxt.Import(importPath, dir, build.FindOnly) - } - } - - return nil, fmt.Errorf("can't find package containing %s", filename) -} - -// -- Effective methods of file system interface ------------------------- - -// (go/build.Context defines these as methods, but does not export them.) - -// HasSubdir calls ctxt.HasSubdir (if not nil) or else uses -// the local file system to answer the question. -func HasSubdir(ctxt *build.Context, root, dir string) (rel string, ok bool) { - if f := ctxt.HasSubdir; f != nil { - return f(root, dir) - } - - // Try using paths we received. - if rel, ok = hasSubdir(root, dir); ok { - return - } - - // Try expanding symlinks and comparing - // expanded against unexpanded and - // expanded against expanded. - rootSym, _ := filepath.EvalSymlinks(root) - dirSym, _ := filepath.EvalSymlinks(dir) - - if rel, ok = hasSubdir(rootSym, dir); ok { - return - } - if rel, ok = hasSubdir(root, dirSym); ok { - return - } - return hasSubdir(rootSym, dirSym) -} - -func hasSubdir(root, dir string) (rel string, ok bool) { - const sep = string(filepath.Separator) - root = filepath.Clean(root) - if !strings.HasSuffix(root, sep) { - root += sep - } - - dir = filepath.Clean(dir) - if !strings.HasPrefix(dir, root) { - return "", false - } - - return filepath.ToSlash(dir[len(root):]), true -} - -// FileExists returns true if the specified file exists, -// using the build context's file system interface. -func FileExists(ctxt *build.Context, path string) bool { - if ctxt.OpenFile != nil { - r, err := ctxt.OpenFile(path) - if err != nil { - return false - } - r.Close() // ignore error - return true - } - _, err := os.Stat(path) - return err == nil -} - -// OpenFile behaves like os.Open, -// but uses the build context's file system interface, if any. -func OpenFile(ctxt *build.Context, path string) (io.ReadCloser, error) { - if ctxt.OpenFile != nil { - return ctxt.OpenFile(path) - } - return os.Open(path) -} - -// IsAbsPath behaves like filepath.IsAbs, -// but uses the build context's file system interface, if any. -func IsAbsPath(ctxt *build.Context, path string) bool { - if ctxt.IsAbsPath != nil { - return ctxt.IsAbsPath(path) - } - return filepath.IsAbs(path) -} - -// JoinPath behaves like filepath.Join, -// but uses the build context's file system interface, if any. -func JoinPath(ctxt *build.Context, path ...string) string { - if ctxt.JoinPath != nil { - return ctxt.JoinPath(path...) - } - return filepath.Join(path...) -} - -// IsDir behaves like os.Stat plus IsDir, -// but uses the build context's file system interface, if any. -func IsDir(ctxt *build.Context, path string) bool { - if ctxt.IsDir != nil { - return ctxt.IsDir(path) - } - fi, err := os.Stat(path) - return err == nil && fi.IsDir() -} - -// ReadDir behaves like ioutil.ReadDir, -// but uses the build context's file system interface, if any. -func ReadDir(ctxt *build.Context, path string) ([]os.FileInfo, error) { - if ctxt.ReadDir != nil { - return ctxt.ReadDir(path) - } - return ioutil.ReadDir(path) -} - -// SplitPathList behaves like filepath.SplitList, -// but uses the build context's file system interface, if any. -func SplitPathList(ctxt *build.Context, s string) []string { - if ctxt.SplitPathList != nil { - return ctxt.SplitPathList(s) - } - return filepath.SplitList(s) -} - -// sameFile returns true if x and y have the same basename and denote -// the same file. -func sameFile(x, y string) bool { - if path.Clean(x) == path.Clean(y) { - return true - } - if filepath.Base(x) == filepath.Base(y) { // (optimisation) - if xi, err := os.Stat(x); err == nil { - if yi, err := os.Stat(y); err == nil { - return os.SameFile(xi, yi) - } - } - } - return false -} diff --git a/vendor/golang.org/x/tools/go/cfg/builder.go b/vendor/golang.org/x/tools/go/cfg/builder.go deleted file mode 100644 index ac4d63c40..000000000 --- a/vendor/golang.org/x/tools/go/cfg/builder.go +++ /dev/null @@ -1,513 +0,0 @@ -// Copyright 2016 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package cfg - -// This file implements the CFG construction pass. - -import ( - "fmt" - "go/ast" - "go/token" -) - -type builder struct { - cfg *CFG - mayReturn func(*ast.CallExpr) bool - current *Block - lblocks map[string]*lblock // labeled blocks - targets *targets // linked stack of branch targets -} - -func (b *builder) stmt(_s ast.Stmt) { - // The label of the current statement. If non-nil, its _goto - // target is always set; its _break and _continue are set only - // within the body of switch/typeswitch/select/for/range. - // It is effectively an additional default-nil parameter of stmt(). - var label *lblock -start: - switch s := _s.(type) { - case *ast.BadStmt, - *ast.SendStmt, - *ast.IncDecStmt, - *ast.GoStmt, - *ast.DeferStmt, - *ast.EmptyStmt, - *ast.AssignStmt: - // No effect on control flow. - b.add(s) - - case *ast.ExprStmt: - b.add(s) - if call, ok := s.X.(*ast.CallExpr); ok && !b.mayReturn(call) { - // Calls to panic, os.Exit, etc, never return. - b.current = b.newBlock(KindUnreachable, s) - } - - case *ast.DeclStmt: - // Treat each var ValueSpec as a separate statement. - d := s.Decl.(*ast.GenDecl) - if d.Tok == token.VAR { - for _, spec := range d.Specs { - if spec, ok := spec.(*ast.ValueSpec); ok { - b.add(spec) - } - } - } - - case *ast.LabeledStmt: - label = b.labeledBlock(s.Label, s) - b.jump(label._goto) - b.current = label._goto - _s = s.Stmt - goto start // effectively: tailcall stmt(g, s.Stmt, label) - - case *ast.ReturnStmt: - b.add(s) - b.current = b.newBlock(KindUnreachable, s) - - case *ast.BranchStmt: - b.branchStmt(s) - - case *ast.BlockStmt: - b.stmtList(s.List) - - case *ast.IfStmt: - if s.Init != nil { - b.stmt(s.Init) - } - then := b.newBlock(KindIfThen, s) - done := b.newBlock(KindIfDone, s) - _else := done - if s.Else != nil { - _else = b.newBlock(KindIfElse, s) - } - b.add(s.Cond) - b.ifelse(then, _else) - b.current = then - b.stmt(s.Body) - b.jump(done) - - if s.Else != nil { - b.current = _else - b.stmt(s.Else) - b.jump(done) - } - - b.current = done - - case *ast.SwitchStmt: - b.switchStmt(s, label) - - case *ast.TypeSwitchStmt: - b.typeSwitchStmt(s, label) - - case *ast.SelectStmt: - b.selectStmt(s, label) - - case *ast.ForStmt: - b.forStmt(s, label) - - case *ast.RangeStmt: - b.rangeStmt(s, label) - - default: - panic(fmt.Sprintf("unexpected statement kind: %T", s)) - } -} - -func (b *builder) stmtList(list []ast.Stmt) { - for _, s := range list { - b.stmt(s) - } -} - -func (b *builder) branchStmt(s *ast.BranchStmt) { - var block *Block - switch s.Tok { - case token.BREAK: - if s.Label != nil { - if lb := b.labeledBlock(s.Label, nil); lb != nil { - block = lb._break - } - } else { - for t := b.targets; t != nil && block == nil; t = t.tail { - block = t._break - } - } - - case token.CONTINUE: - if s.Label != nil { - if lb := b.labeledBlock(s.Label, nil); lb != nil { - block = lb._continue - } - } else { - for t := b.targets; t != nil && block == nil; t = t.tail { - block = t._continue - } - } - - case token.FALLTHROUGH: - for t := b.targets; t != nil && block == nil; t = t.tail { - block = t._fallthrough - } - - case token.GOTO: - if s.Label != nil { - block = b.labeledBlock(s.Label, nil)._goto - } - } - if block == nil { // ill-typed (e.g. undefined label) - block = b.newBlock(KindUnreachable, s) - } - b.jump(block) - b.current = b.newBlock(KindUnreachable, s) -} - -func (b *builder) switchStmt(s *ast.SwitchStmt, label *lblock) { - if s.Init != nil { - b.stmt(s.Init) - } - if s.Tag != nil { - b.add(s.Tag) - } - done := b.newBlock(KindSwitchDone, s) - if label != nil { - label._break = done - } - // We pull the default case (if present) down to the end. - // But each fallthrough label must point to the next - // body block in source order, so we preallocate a - // body block (fallthru) for the next case. - // Unfortunately this makes for a confusing block order. - var defaultBody *[]ast.Stmt - var defaultFallthrough *Block - var fallthru, defaultBlock *Block - ncases := len(s.Body.List) - for i, clause := range s.Body.List { - body := fallthru - if body == nil { - body = b.newBlock(KindSwitchCaseBody, clause) // first case only - } - - // Preallocate body block for the next case. - fallthru = done - if i+1 < ncases { - fallthru = b.newBlock(KindSwitchCaseBody, s.Body.List[i+1]) - } - - cc := clause.(*ast.CaseClause) - if cc.List == nil { - // Default case. - defaultBody = &cc.Body - defaultFallthrough = fallthru - defaultBlock = body - continue - } - - var nextCond *Block - for _, cond := range cc.List { - nextCond = b.newBlock(KindSwitchNextCase, cc) - b.add(cond) // one half of the tag==cond condition - b.ifelse(body, nextCond) - b.current = nextCond - } - b.current = body - b.targets = &targets{ - tail: b.targets, - _break: done, - _fallthrough: fallthru, - } - b.stmtList(cc.Body) - b.targets = b.targets.tail - b.jump(done) - b.current = nextCond - } - if defaultBlock != nil { - b.jump(defaultBlock) - b.current = defaultBlock - b.targets = &targets{ - tail: b.targets, - _break: done, - _fallthrough: defaultFallthrough, - } - b.stmtList(*defaultBody) - b.targets = b.targets.tail - } - b.jump(done) - b.current = done -} - -func (b *builder) typeSwitchStmt(s *ast.TypeSwitchStmt, label *lblock) { - if s.Init != nil { - b.stmt(s.Init) - } - if s.Assign != nil { - b.add(s.Assign) - } - - done := b.newBlock(KindSwitchDone, s) - if label != nil { - label._break = done - } - var default_ *ast.CaseClause - for _, clause := range s.Body.List { - cc := clause.(*ast.CaseClause) - if cc.List == nil { - default_ = cc - continue - } - body := b.newBlock(KindSwitchCaseBody, cc) - var next *Block - for _, casetype := range cc.List { - next = b.newBlock(KindSwitchNextCase, cc) - // casetype is a type, so don't call b.add(casetype). - // This block logically contains a type assertion, - // x.(casetype), but it's unclear how to represent x. - _ = casetype - b.ifelse(body, next) - b.current = next - } - b.current = body - b.typeCaseBody(cc, done) - b.current = next - } - if default_ != nil { - b.typeCaseBody(default_, done) - } else { - b.jump(done) - } - b.current = done -} - -func (b *builder) typeCaseBody(cc *ast.CaseClause, done *Block) { - b.targets = &targets{ - tail: b.targets, - _break: done, - } - b.stmtList(cc.Body) - b.targets = b.targets.tail - b.jump(done) -} - -func (b *builder) selectStmt(s *ast.SelectStmt, label *lblock) { - // First evaluate channel expressions. - // TODO(adonovan): fix: evaluate only channel exprs here. - for _, clause := range s.Body.List { - if comm := clause.(*ast.CommClause).Comm; comm != nil { - b.stmt(comm) - } - } - - done := b.newBlock(KindSelectDone, s) - if label != nil { - label._break = done - } - - var defaultBody *[]ast.Stmt - for _, cc := range s.Body.List { - clause := cc.(*ast.CommClause) - if clause.Comm == nil { - defaultBody = &clause.Body - continue - } - body := b.newBlock(KindSelectCaseBody, clause) - next := b.newBlock(KindSelectAfterCase, clause) - b.ifelse(body, next) - b.current = body - b.targets = &targets{ - tail: b.targets, - _break: done, - } - switch comm := clause.Comm.(type) { - case *ast.ExprStmt: // <-ch - // nop - case *ast.AssignStmt: // x := <-states[state].Chan - b.add(comm.Lhs[0]) - } - b.stmtList(clause.Body) - b.targets = b.targets.tail - b.jump(done) - b.current = next - } - if defaultBody != nil { - b.targets = &targets{ - tail: b.targets, - _break: done, - } - b.stmtList(*defaultBody) - b.targets = b.targets.tail - b.jump(done) - } - b.current = done -} - -func (b *builder) forStmt(s *ast.ForStmt, label *lblock) { - // ...init... - // jump loop - // loop: - // if cond goto body else done - // body: - // ...body... - // jump post - // post: (target of continue) - // ...post... - // jump loop - // done: (target of break) - if s.Init != nil { - b.stmt(s.Init) - } - body := b.newBlock(KindForBody, s) - done := b.newBlock(KindForDone, s) // target of 'break' - loop := body // target of back-edge - if s.Cond != nil { - loop = b.newBlock(KindForLoop, s) - } - cont := loop // target of 'continue' - if s.Post != nil { - cont = b.newBlock(KindForPost, s) - } - if label != nil { - label._break = done - label._continue = cont - } - b.jump(loop) - b.current = loop - if loop != body { - b.add(s.Cond) - b.ifelse(body, done) - b.current = body - } - b.targets = &targets{ - tail: b.targets, - _break: done, - _continue: cont, - } - b.stmt(s.Body) - b.targets = b.targets.tail - b.jump(cont) - - if s.Post != nil { - b.current = cont - b.stmt(s.Post) - b.jump(loop) // back-edge - } - b.current = done -} - -func (b *builder) rangeStmt(s *ast.RangeStmt, label *lblock) { - b.add(s.X) - - if s.Key != nil { - b.add(s.Key) - } - if s.Value != nil { - b.add(s.Value) - } - - // ... - // loop: (target of continue) - // if ... goto body else done - // body: - // ... - // jump loop - // done: (target of break) - - loop := b.newBlock(KindRangeLoop, s) - b.jump(loop) - b.current = loop - - body := b.newBlock(KindRangeBody, s) - done := b.newBlock(KindRangeDone, s) - b.ifelse(body, done) - b.current = body - - if label != nil { - label._break = done - label._continue = loop - } - b.targets = &targets{ - tail: b.targets, - _break: done, - _continue: loop, - } - b.stmt(s.Body) - b.targets = b.targets.tail - b.jump(loop) // back-edge - b.current = done -} - -// -------- helpers -------- - -// Destinations associated with unlabeled for/switch/select stmts. -// We push/pop one of these as we enter/leave each construct and for -// each BranchStmt we scan for the innermost target of the right type. -type targets struct { - tail *targets // rest of stack - _break *Block - _continue *Block - _fallthrough *Block -} - -// Destinations associated with a labeled block. -// We populate these as labels are encountered in forward gotos or -// labeled statements. -type lblock struct { - _goto *Block - _break *Block - _continue *Block -} - -// labeledBlock returns the branch target associated with the -// specified label, creating it if needed. -func (b *builder) labeledBlock(label *ast.Ident, stmt *ast.LabeledStmt) *lblock { - lb := b.lblocks[label.Name] - if lb == nil { - lb = &lblock{_goto: b.newBlock(KindLabel, nil)} - if b.lblocks == nil { - b.lblocks = make(map[string]*lblock) - } - b.lblocks[label.Name] = lb - } - // Fill in the label later (in case of forward goto). - // Stmt may be set already if labels are duplicated (ill-typed). - if stmt != nil && lb._goto.Stmt == nil { - lb._goto.Stmt = stmt - } - return lb -} - -// newBlock appends a new unconnected basic block to b.cfg's block -// slice and returns it. -// It does not automatically become the current block. -// comment is an optional string for more readable debugging output. -func (b *builder) newBlock(kind BlockKind, stmt ast.Stmt) *Block { - g := b.cfg - block := &Block{ - Index: int32(len(g.Blocks)), - Kind: kind, - Stmt: stmt, - } - block.Succs = block.succs2[:0] - g.Blocks = append(g.Blocks, block) - return block -} - -func (b *builder) add(n ast.Node) { - b.current.Nodes = append(b.current.Nodes, n) -} - -// jump adds an edge from the current block to the target block, -// and sets b.current to nil. -func (b *builder) jump(target *Block) { - b.current.Succs = append(b.current.Succs, target) - b.current = nil -} - -// ifelse emits edges from the current block to the t and f blocks, -// and sets b.current to nil. -func (b *builder) ifelse(t, f *Block) { - b.current.Succs = append(b.current.Succs, t, f) - b.current = nil -} diff --git a/vendor/golang.org/x/tools/go/cfg/cfg.go b/vendor/golang.org/x/tools/go/cfg/cfg.go deleted file mode 100644 index 01668359a..000000000 --- a/vendor/golang.org/x/tools/go/cfg/cfg.go +++ /dev/null @@ -1,248 +0,0 @@ -// Copyright 2016 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package cfg constructs a simple control-flow graph (CFG) of the -// statements and expressions within a single function. -// -// Use cfg.New to construct the CFG for a function body. -// -// The blocks of the CFG contain all the function's non-control -// statements. The CFG does not contain control statements such as If, -// Switch, Select, and Branch, but does contain their subexpressions; -// also, each block records the control statement (Block.Stmt) that -// gave rise to it and its relationship (Block.Kind) to that statement. -// -// For example, this source code: -// -// if x := f(); x != nil { -// T() -// } else { -// F() -// } -// -// produces this CFG: -// -// 1: x := f() Body -// x != nil -// succs: 2, 3 -// 2: T() IfThen -// succs: 4 -// 3: F() IfElse -// succs: 4 -// 4: IfDone -// -// The CFG does contain Return statements; even implicit returns are -// materialized (at the position of the function's closing brace). -// -// The CFG does not record conditions associated with conditional branch -// edges, nor the short-circuit semantics of the && and || operators, -// nor abnormal control flow caused by panic. If you need this -// information, use golang.org/x/tools/go/ssa instead. -package cfg - -import ( - "bytes" - "fmt" - "go/ast" - "go/format" - "go/token" -) - -// A CFG represents the control-flow graph of a single function. -// -// The entry point is Blocks[0]; there may be multiple return blocks. -type CFG struct { - fset *token.FileSet - Blocks []*Block // block[0] is entry; order otherwise undefined -} - -// A Block represents a basic block: a list of statements and -// expressions that are always evaluated sequentially. -// -// A block may have 0-2 successors: zero for a return block or a block -// that calls a function such as panic that never returns; one for a -// normal (jump) block; and two for a conditional (if) block. -type Block struct { - Nodes []ast.Node // statements, expressions, and ValueSpecs - Succs []*Block // successor nodes in the graph - Index int32 // index within CFG.Blocks - Live bool // block is reachable from entry - Kind BlockKind // block kind - Stmt ast.Stmt // statement that gave rise to this block (see BlockKind for details) - - succs2 [2]*Block // underlying array for Succs -} - -// A BlockKind identifies the purpose of a block. -// It also determines the possible types of its Stmt field. -type BlockKind uint8 - -const ( - KindInvalid BlockKind = iota // Stmt=nil - - KindUnreachable // unreachable block after {Branch,Return}Stmt / no-return call ExprStmt - KindBody // function body BlockStmt - KindForBody // body of ForStmt - KindForDone // block after ForStmt - KindForLoop // head of ForStmt - KindForPost // post condition of ForStmt - KindIfDone // block after IfStmt - KindIfElse // else block of IfStmt - KindIfThen // then block of IfStmt - KindLabel // labeled block of BranchStmt (Stmt may be nil for dangling label) - KindRangeBody // body of RangeStmt - KindRangeDone // block after RangeStmt - KindRangeLoop // head of RangeStmt - KindSelectCaseBody // body of SelectStmt - KindSelectDone // block after SelectStmt - KindSelectAfterCase // block after a CommClause - KindSwitchCaseBody // body of CaseClause - KindSwitchDone // block after {Type.}SwitchStmt - KindSwitchNextCase // secondary expression of a multi-expression CaseClause -) - -func (kind BlockKind) String() string { - return [...]string{ - KindInvalid: "Invalid", - KindUnreachable: "Unreachable", - KindBody: "Body", - KindForBody: "ForBody", - KindForDone: "ForDone", - KindForLoop: "ForLoop", - KindForPost: "ForPost", - KindIfDone: "IfDone", - KindIfElse: "IfElse", - KindIfThen: "IfThen", - KindLabel: "Label", - KindRangeBody: "RangeBody", - KindRangeDone: "RangeDone", - KindRangeLoop: "RangeLoop", - KindSelectCaseBody: "SelectCaseBody", - KindSelectDone: "SelectDone", - KindSelectAfterCase: "SelectAfterCase", - KindSwitchCaseBody: "SwitchCaseBody", - KindSwitchDone: "SwitchDone", - KindSwitchNextCase: "SwitchNextCase", - }[kind] -} - -// New returns a new control-flow graph for the specified function body, -// which must be non-nil. -// -// The CFG builder calls mayReturn to determine whether a given function -// call may return. For example, calls to panic, os.Exit, and log.Fatal -// do not return, so the builder can remove infeasible graph edges -// following such calls. The builder calls mayReturn only for a -// CallExpr beneath an ExprStmt. -func New(body *ast.BlockStmt, mayReturn func(*ast.CallExpr) bool) *CFG { - b := builder{ - mayReturn: mayReturn, - cfg: new(CFG), - } - b.current = b.newBlock(KindBody, body) - b.stmt(body) - - // Compute liveness (reachability from entry point), breadth-first. - q := make([]*Block, 0, len(b.cfg.Blocks)) - q = append(q, b.cfg.Blocks[0]) // entry point - for len(q) > 0 { - b := q[len(q)-1] - q = q[:len(q)-1] - - if !b.Live { - b.Live = true - q = append(q, b.Succs...) - } - } - - // Does control fall off the end of the function's body? - // Make implicit return explicit. - if b.current != nil && b.current.Live { - b.add(&ast.ReturnStmt{ - Return: body.End() - 1, - }) - } - - return b.cfg -} - -func (b *Block) String() string { - return fmt.Sprintf("block %d (%s)", b.Index, b.comment(nil)) -} - -func (b *Block) comment(fset *token.FileSet) string { - s := b.Kind.String() - if fset != nil && b.Stmt != nil { - s = fmt.Sprintf("%s@L%d", s, fset.Position(b.Stmt.Pos()).Line) - } - return s -} - -// Return returns the return statement at the end of this block if present, nil -// otherwise. -// -// When control falls off the end of the function, the ReturnStmt is synthetic -// and its [ast.Node.End] position may be beyond the end of the file. -func (b *Block) Return() (ret *ast.ReturnStmt) { - if len(b.Nodes) > 0 { - ret, _ = b.Nodes[len(b.Nodes)-1].(*ast.ReturnStmt) - } - return -} - -// Format formats the control-flow graph for ease of debugging. -func (g *CFG) Format(fset *token.FileSet) string { - var buf bytes.Buffer - for _, b := range g.Blocks { - fmt.Fprintf(&buf, ".%d: # %s\n", b.Index, b.comment(fset)) - for _, n := range b.Nodes { - fmt.Fprintf(&buf, "\t%s\n", formatNode(fset, n)) - } - if len(b.Succs) > 0 { - fmt.Fprintf(&buf, "\tsuccs:") - for _, succ := range b.Succs { - fmt.Fprintf(&buf, " %d", succ.Index) - } - buf.WriteByte('\n') - } - buf.WriteByte('\n') - } - return buf.String() -} - -// digraph emits AT&T GraphViz (dot) syntax for the CFG. -// TODO(adonovan): publish; needs a proposal. -func (g *CFG) digraph(fset *token.FileSet) string { - var buf bytes.Buffer - buf.WriteString("digraph CFG {\n") - buf.WriteString(" node [shape=box];\n") - for _, b := range g.Blocks { - // node label - var text bytes.Buffer - text.WriteString(b.comment(fset)) - for _, n := range b.Nodes { - fmt.Fprintf(&text, "\n%s", formatNode(fset, n)) - } - - // node and edges - fmt.Fprintf(&buf, " n%d [label=%q];\n", b.Index, &text) - for _, succ := range b.Succs { - fmt.Fprintf(&buf, " n%d -> n%d;\n", b.Index, succ.Index) - } - } - buf.WriteString("}\n") - return buf.String() -} - -// exposed to main.go -func digraph(g *CFG, fset *token.FileSet) string { - return g.digraph(fset) -} - -func formatNode(fset *token.FileSet, n ast.Node) string { - var buf bytes.Buffer - format.Node(&buf, fset, n) - // Indent secondary lines by a tab. - return string(bytes.Replace(buf.Bytes(), []byte("\n"), []byte("\n\t"), -1)) -} diff --git a/vendor/golang.org/x/tools/go/internal/cgo/cgo.go b/vendor/golang.org/x/tools/go/internal/cgo/cgo.go deleted file mode 100644 index 697974bb9..000000000 --- a/vendor/golang.org/x/tools/go/internal/cgo/cgo.go +++ /dev/null @@ -1,219 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package cgo handles cgo preprocessing of files containing `import "C"`. -// -// DESIGN -// -// The approach taken is to run the cgo processor on the package's -// CgoFiles and parse the output, faking the filenames of the -// resulting ASTs so that the synthetic file containing the C types is -// called "C" (e.g. "~/go/src/net/C") and the preprocessed files -// have their original names (e.g. "~/go/src/net/cgo_unix.go"), -// not the names of the actual temporary files. -// -// The advantage of this approach is its fidelity to 'go build'. The -// downside is that the token.Position.Offset for each AST node is -// incorrect, being an offset within the temporary file. Line numbers -// should still be correct because of the //line comments. -// -// The logic of this file is mostly plundered from the 'go build' -// tool, which also invokes the cgo preprocessor. -// -// -// REJECTED ALTERNATIVE -// -// An alternative approach that we explored is to extend go/types' -// Importer mechanism to provide the identity of the importing package -// so that each time `import "C"` appears it resolves to a different -// synthetic package containing just the objects needed in that case. -// The loader would invoke cgo but parse only the cgo_types.go file -// defining the package-level objects, discarding the other files -// resulting from preprocessing. -// -// The benefit of this approach would have been that source-level -// syntax information would correspond exactly to the original cgo -// file, with no preprocessing involved, making source tools like -// godoc, guru, and eg happy. However, the approach was rejected -// due to the additional complexity it would impose on go/types. (It -// made for a beautiful demo, though.) -// -// cgo files, despite their *.go extension, are not legal Go source -// files per the specification since they may refer to unexported -// members of package "C" such as C.int. Also, a function such as -// C.getpwent has in effect two types, one matching its C type and one -// which additionally returns (errno C.int). The cgo preprocessor -// uses name mangling to distinguish these two functions in the -// processed code, but go/types would need to duplicate this logic in -// its handling of function calls, analogous to the treatment of map -// lookups in which y=m[k] and y,ok=m[k] are both legal. - -package cgo - -import ( - "fmt" - "go/ast" - "go/build" - "go/parser" - "go/token" - "log" - "os" - "os/exec" - "path/filepath" - "regexp" - "strings" -) - -// ProcessFiles invokes the cgo preprocessor on bp.CgoFiles, parses -// the output and returns the resulting ASTs. -func ProcessFiles(bp *build.Package, fset *token.FileSet, DisplayPath func(path string) string, mode parser.Mode) ([]*ast.File, error) { - tmpdir, err := os.MkdirTemp("", strings.Replace(bp.ImportPath, "/", "_", -1)+"_C") - if err != nil { - return nil, err - } - defer os.RemoveAll(tmpdir) - - pkgdir := bp.Dir - if DisplayPath != nil { - pkgdir = DisplayPath(pkgdir) - } - - cgoFiles, cgoDisplayFiles, err := Run(bp, pkgdir, tmpdir, false) - if err != nil { - return nil, err - } - var files []*ast.File - for i := range cgoFiles { - rd, err := os.Open(cgoFiles[i]) - if err != nil { - return nil, err - } - display := filepath.Join(bp.Dir, cgoDisplayFiles[i]) - f, err := parser.ParseFile(fset, display, rd, mode) - rd.Close() - if err != nil { - return nil, err - } - files = append(files, f) - } - return files, nil -} - -var cgoRe = regexp.MustCompile(`[/\\:]`) - -// Run invokes the cgo preprocessor on bp.CgoFiles and returns two -// lists of files: the resulting processed files (in temporary -// directory tmpdir) and the corresponding names of the unprocessed files. -// -// Run is adapted from (*builder).cgo in -// $GOROOT/src/cmd/go/build.go, but these features are unsupported: -// Objective C, CGOPKGPATH, CGO_FLAGS. -// -// If useabs is set to true, absolute paths of the bp.CgoFiles will be passed in -// to the cgo preprocessor. This in turn will set the // line comments -// referring to those files to use absolute paths. This is needed for -// go/packages using the legacy go list support so it is able to find -// the original files. -func Run(bp *build.Package, pkgdir, tmpdir string, useabs bool) (files, displayFiles []string, err error) { - cgoCPPFLAGS, _, _, _ := cflags(bp, true) - _, cgoexeCFLAGS, _, _ := cflags(bp, false) - - if len(bp.CgoPkgConfig) > 0 { - pcCFLAGS, err := pkgConfigFlags(bp) - if err != nil { - return nil, nil, err - } - cgoCPPFLAGS = append(cgoCPPFLAGS, pcCFLAGS...) - } - - // Allows including _cgo_export.h from .[ch] files in the package. - cgoCPPFLAGS = append(cgoCPPFLAGS, "-I", tmpdir) - - // _cgo_gotypes.go (displayed "C") contains the type definitions. - files = append(files, filepath.Join(tmpdir, "_cgo_gotypes.go")) - displayFiles = append(displayFiles, "C") - for _, fn := range bp.CgoFiles { - // "foo.cgo1.go" (displayed "foo.go") is the processed Go source. - f := cgoRe.ReplaceAllString(fn[:len(fn)-len("go")], "_") - files = append(files, filepath.Join(tmpdir, f+"cgo1.go")) - displayFiles = append(displayFiles, fn) - } - - var cgoflags []string - if bp.Goroot && bp.ImportPath == "runtime/cgo" { - cgoflags = append(cgoflags, "-import_runtime_cgo=false") - } - if bp.Goroot && bp.ImportPath == "runtime/race" || bp.ImportPath == "runtime/cgo" { - cgoflags = append(cgoflags, "-import_syscall=false") - } - - var cgoFiles []string = bp.CgoFiles - if useabs { - cgoFiles = make([]string, len(bp.CgoFiles)) - for i := range cgoFiles { - cgoFiles[i] = filepath.Join(pkgdir, bp.CgoFiles[i]) - } - } - - args := stringList( - "go", "tool", "cgo", "-objdir", tmpdir, cgoflags, "--", - cgoCPPFLAGS, cgoexeCFLAGS, cgoFiles, - ) - if false { - log.Printf("Running cgo for package %q: %s (dir=%s)", bp.ImportPath, args, pkgdir) - } - cmd := exec.Command(args[0], args[1:]...) - cmd.Dir = pkgdir - cmd.Env = append(os.Environ(), "PWD="+pkgdir) - cmd.Stdout = os.Stderr - cmd.Stderr = os.Stderr - if err := cmd.Run(); err != nil { - return nil, nil, fmt.Errorf("cgo failed: %s: %s", args, err) - } - - return files, displayFiles, nil -} - -// -- unmodified from 'go build' --------------------------------------- - -// Return the flags to use when invoking the C or C++ compilers, or cgo. -func cflags(p *build.Package, def bool) (cppflags, cflags, cxxflags, ldflags []string) { - var defaults string - if def { - defaults = "-g -O2" - } - - cppflags = stringList(envList("CGO_CPPFLAGS", ""), p.CgoCPPFLAGS) - cflags = stringList(envList("CGO_CFLAGS", defaults), p.CgoCFLAGS) - cxxflags = stringList(envList("CGO_CXXFLAGS", defaults), p.CgoCXXFLAGS) - ldflags = stringList(envList("CGO_LDFLAGS", defaults), p.CgoLDFLAGS) - return -} - -// envList returns the value of the given environment variable broken -// into fields, using the default value when the variable is empty. -func envList(key, def string) []string { - v := os.Getenv(key) - if v == "" { - v = def - } - return strings.Fields(v) -} - -// stringList's arguments should be a sequence of string or []string values. -// stringList flattens them into a single []string. -func stringList(args ...interface{}) []string { - var x []string - for _, arg := range args { - switch arg := arg.(type) { - case []string: - x = append(x, arg...) - case string: - x = append(x, arg) - default: - panic("stringList: invalid argument") - } - } - return x -} diff --git a/vendor/golang.org/x/tools/go/internal/cgo/cgo_pkgconfig.go b/vendor/golang.org/x/tools/go/internal/cgo/cgo_pkgconfig.go deleted file mode 100644 index 2455be54f..000000000 --- a/vendor/golang.org/x/tools/go/internal/cgo/cgo_pkgconfig.go +++ /dev/null @@ -1,42 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package cgo - -import ( - "errors" - "fmt" - "go/build" - "os/exec" - "strings" -) - -// pkgConfig runs pkg-config with the specified arguments and returns the flags it prints. -func pkgConfig(mode string, pkgs []string) (flags []string, err error) { - cmd := exec.Command("pkg-config", append([]string{mode}, pkgs...)...) - out, err := cmd.Output() - if err != nil { - s := fmt.Sprintf("%s failed: %v", strings.Join(cmd.Args, " "), err) - if len(out) > 0 { - s = fmt.Sprintf("%s: %s", s, out) - } - if err, ok := err.(*exec.ExitError); ok && len(err.Stderr) > 0 { - s = fmt.Sprintf("%s\nstderr:\n%s", s, err.Stderr) - } - return nil, errors.New(s) - } - if len(out) > 0 { - flags = strings.Fields(string(out)) - } - return -} - -// pkgConfigFlags calls pkg-config if needed and returns the cflags -// needed to build the package. -func pkgConfigFlags(p *build.Package) (cflags []string, err error) { - if len(p.CgoPkgConfig) == 0 { - return nil, nil - } - return pkgConfig("--cflags", p.CgoPkgConfig) -} diff --git a/vendor/golang.org/x/tools/go/loader/doc.go b/vendor/golang.org/x/tools/go/loader/doc.go deleted file mode 100644 index e35b1fd7d..000000000 --- a/vendor/golang.org/x/tools/go/loader/doc.go +++ /dev/null @@ -1,202 +0,0 @@ -// Copyright 2015 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package loader loads a complete Go program from source code, parsing -// and type-checking the initial packages plus their transitive closure -// of dependencies. The ASTs and the derived facts are retained for -// later use. -// -// Deprecated: This is an older API and does not have support -// for modules. Use golang.org/x/tools/go/packages instead. -// -// The package defines two primary types: Config, which specifies a -// set of initial packages to load and various other options; and -// Program, which is the result of successfully loading the packages -// specified by a configuration. -// -// The configuration can be set directly, but *Config provides various -// convenience methods to simplify the common cases, each of which can -// be called any number of times. Finally, these are followed by a -// call to Load() to actually load and type-check the program. -// -// var conf loader.Config -// -// // Use the command-line arguments to specify -// // a set of initial packages to load from source. -// // See FromArgsUsage for help. -// rest, err := conf.FromArgs(os.Args[1:], wantTests) -// -// // Parse the specified files and create an ad hoc package with path "foo". -// // All files must have the same 'package' declaration. -// conf.CreateFromFilenames("foo", "foo.go", "bar.go") -// -// // Create an ad hoc package with path "foo" from -// // the specified already-parsed files. -// // All ASTs must have the same 'package' declaration. -// conf.CreateFromFiles("foo", parsedFiles) -// -// // Add "runtime" to the set of packages to be loaded. -// conf.Import("runtime") -// -// // Adds "fmt" and "fmt_test" to the set of packages -// // to be loaded. "fmt" will include *_test.go files. -// conf.ImportWithTests("fmt") -// -// // Finally, load all the packages specified by the configuration. -// prog, err := conf.Load() -// -// See examples_test.go for examples of API usage. -// -// # CONCEPTS AND TERMINOLOGY -// -// The WORKSPACE is the set of packages accessible to the loader. The -// workspace is defined by Config.Build, a *build.Context. The -// default context treats subdirectories of $GOROOT and $GOPATH as -// packages, but this behavior may be overridden. -// -// An AD HOC package is one specified as a set of source files on the -// command line. In the simplest case, it may consist of a single file -// such as $GOROOT/src/net/http/triv.go. -// -// EXTERNAL TEST packages are those comprised of a set of *_test.go -// files all with the same 'package foo_test' declaration, all in the -// same directory. (go/build.Package calls these files XTestFiles.) -// -// An IMPORTABLE package is one that can be referred to by some import -// spec. Every importable package is uniquely identified by its -// PACKAGE PATH or just PATH, a string such as "fmt", "encoding/json", -// or "cmd/vendor/golang.org/x/arch/x86/x86asm". A package path -// typically denotes a subdirectory of the workspace. -// -// An import declaration uses an IMPORT PATH to refer to a package. -// Most import declarations use the package path as the import path. -// -// Due to VENDORING (https://golang.org/s/go15vendor), the -// interpretation of an import path may depend on the directory in which -// it appears. To resolve an import path to a package path, go/build -// must search the enclosing directories for a subdirectory named -// "vendor". -// -// ad hoc packages and external test packages are NON-IMPORTABLE. The -// path of an ad hoc package is inferred from the package -// declarations of its files and is therefore not a unique package key. -// For example, Config.CreatePkgs may specify two initial ad hoc -// packages, both with path "main". -// -// An AUGMENTED package is an importable package P plus all the -// *_test.go files with same 'package foo' declaration as P. -// (go/build.Package calls these files TestFiles.) -// -// The INITIAL packages are those specified in the configuration. A -// DEPENDENCY is a package loaded to satisfy an import in an initial -// package or another dependency. -package loader - -// IMPLEMENTATION NOTES -// -// 'go test', in-package test files, and import cycles -// --------------------------------------------------- -// -// An external test package may depend upon members of the augmented -// package that are not in the unaugmented package, such as functions -// that expose internals. (See bufio/export_test.go for an example.) -// So, the loader must ensure that for each external test package -// it loads, it also augments the corresponding non-test package. -// -// The import graph over n unaugmented packages must be acyclic; the -// import graph over n-1 unaugmented packages plus one augmented -// package must also be acyclic. ('go test' relies on this.) But the -// import graph over n augmented packages may contain cycles. -// -// First, all the (unaugmented) non-test packages and their -// dependencies are imported in the usual way; the loader reports an -// error if it detects an import cycle. -// -// Then, each package P for which testing is desired is augmented by -// the list P' of its in-package test files, by calling -// (*types.Checker).Files. This arrangement ensures that P' may -// reference definitions within P, but P may not reference definitions -// within P'. Furthermore, P' may import any other package, including -// ones that depend upon P, without an import cycle error. -// -// Consider two packages A and B, both of which have lists of -// in-package test files we'll call A' and B', and which have the -// following import graph edges: -// B imports A -// B' imports A -// A' imports B -// This last edge would be expected to create an error were it not -// for the special type-checking discipline above. -// Cycles of size greater than two are possible. For example: -// compress/bzip2/bzip2_test.go (package bzip2) imports "io/ioutil" -// io/ioutil/tempfile_test.go (package ioutil) imports "regexp" -// regexp/exec_test.go (package regexp) imports "compress/bzip2" -// -// -// Concurrency -// ----------- -// -// Let us define the import dependency graph as follows. Each node is a -// list of files passed to (Checker).Files at once. Many of these lists -// are the production code of an importable Go package, so those nodes -// are labelled by the package's path. The remaining nodes are -// ad hoc packages and lists of in-package *_test.go files that augment -// an importable package; those nodes have no label. -// -// The edges of the graph represent import statements appearing within a -// file. An edge connects a node (a list of files) to the node it -// imports, which is importable and thus always labelled. -// -// Loading is controlled by this dependency graph. -// -// To reduce I/O latency, we start loading a package's dependencies -// asynchronously as soon as we've parsed its files and enumerated its -// imports (scanImports). This performs a preorder traversal of the -// import dependency graph. -// -// To exploit hardware parallelism, we type-check unrelated packages in -// parallel, where "unrelated" means not ordered by the partial order of -// the import dependency graph. -// -// We use a concurrency-safe non-blocking cache (importer.imported) to -// record the results of type-checking, whether success or failure. An -// entry is created in this cache by startLoad the first time the -// package is imported. The first goroutine to request an entry becomes -// responsible for completing the task and broadcasting completion to -// subsequent requestors, which block until then. -// -// Type checking occurs in (parallel) postorder: we cannot type-check a -// set of files until we have loaded and type-checked all of their -// immediate dependencies (and thus all of their transitive -// dependencies). If the input were guaranteed free of import cycles, -// this would be trivial: we could simply wait for completion of the -// dependencies and then invoke the typechecker. -// -// But as we saw in the 'go test' section above, some cycles in the -// import graph over packages are actually legal, so long as the -// cycle-forming edge originates in the in-package test files that -// augment the package. This explains why the nodes of the import -// dependency graph are not packages, but lists of files: the unlabelled -// nodes avoid the cycles. Consider packages A and B where B imports A -// and A's in-package tests AT import B. The naively constructed import -// graph over packages would contain a cycle (A+AT) --> B --> (A+AT) but -// the graph over lists of files is AT --> B --> A, where AT is an -// unlabelled node. -// -// Awaiting completion of the dependencies in a cyclic graph would -// deadlock, so we must materialize the import dependency graph (as -// importer.graph) and check whether each import edge forms a cycle. If -// x imports y, and the graph already contains a path from y to x, then -// there is an import cycle, in which case the processing of x must not -// wait for the completion of processing of y. -// -// When the type-checker makes a callback (doImport) to the loader for a -// given import edge, there are two possible cases. In the normal case, -// the dependency has already been completely type-checked; doImport -// does a cache lookup and returns it. In the cyclic case, the entry in -// the cache is still necessarily incomplete, indicating a cycle. We -// perform the cycle check again to obtain the error message, and return -// the error. -// -// The result of using concurrency is about a 2.5x speedup for stdlib_test. diff --git a/vendor/golang.org/x/tools/go/loader/loader.go b/vendor/golang.org/x/tools/go/loader/loader.go deleted file mode 100644 index 013c0f505..000000000 --- a/vendor/golang.org/x/tools/go/loader/loader.go +++ /dev/null @@ -1,1066 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package loader - -// See doc.go for package documentation and implementation notes. - -import ( - "errors" - "fmt" - "go/ast" - "go/build" - "go/parser" - "go/token" - "go/types" - "os" - "path/filepath" - "sort" - "strings" - "sync" - "time" - - "golang.org/x/tools/go/ast/astutil" - "golang.org/x/tools/go/internal/cgo" - "golang.org/x/tools/internal/versions" -) - -var ignoreVendor build.ImportMode - -const trace = false // show timing info for type-checking - -// Config specifies the configuration for loading a whole program from -// Go source code. -// The zero value for Config is a ready-to-use default configuration. -type Config struct { - // Fset is the file set for the parser to use when loading the - // program. If nil, it may be lazily initialized by any - // method of Config. - Fset *token.FileSet - - // ParserMode specifies the mode to be used by the parser when - // loading source packages. - ParserMode parser.Mode - - // TypeChecker contains options relating to the type checker. - // - // The supplied IgnoreFuncBodies is not used; the effective - // value comes from the TypeCheckFuncBodies func below. - // The supplied Import function is not used either. - TypeChecker types.Config - - // TypeCheckFuncBodies is a predicate over package paths. - // A package for which the predicate is false will - // have its package-level declarations type checked, but not - // its function bodies; this can be used to quickly load - // dependencies from source. If nil, all func bodies are type - // checked. - TypeCheckFuncBodies func(path string) bool - - // If Build is non-nil, it is used to locate source packages. - // Otherwise &build.Default is used. - // - // By default, cgo is invoked to preprocess Go files that - // import the fake package "C". This behaviour can be - // disabled by setting CGO_ENABLED=0 in the environment prior - // to startup, or by setting Build.CgoEnabled=false. - Build *build.Context - - // The current directory, used for resolving relative package - // references such as "./go/loader". If empty, os.Getwd will be - // used instead. - Cwd string - - // If DisplayPath is non-nil, it is used to transform each - // file name obtained from Build.Import(). This can be used - // to prevent a virtualized build.Config's file names from - // leaking into the user interface. - DisplayPath func(path string) string - - // If AllowErrors is true, Load will return a Program even - // if some of the its packages contained I/O, parser or type - // errors; such errors are accessible via PackageInfo.Errors. If - // false, Load will fail if any package had an error. - AllowErrors bool - - // CreatePkgs specifies a list of non-importable initial - // packages to create. The resulting packages will appear in - // the corresponding elements of the Program.Created slice. - CreatePkgs []PkgSpec - - // ImportPkgs specifies a set of initial packages to load. - // The map keys are package paths. - // - // The map value indicates whether to load tests. If true, Load - // will add and type-check two lists of files to the package: - // non-test files followed by in-package *_test.go files. In - // addition, it will append the external test package (if any) - // to Program.Created. - ImportPkgs map[string]bool - - // FindPackage is called during Load to create the build.Package - // for a given import path from a given directory. - // If FindPackage is nil, (*build.Context).Import is used. - // A client may use this hook to adapt to a proprietary build - // system that does not follow the "go build" layout - // conventions, for example. - // - // It must be safe to call concurrently from multiple goroutines. - FindPackage func(ctxt *build.Context, importPath, fromDir string, mode build.ImportMode) (*build.Package, error) - - // AfterTypeCheck is called immediately after a list of files - // has been type-checked and appended to info.Files. - // - // This optional hook function is the earliest opportunity for - // the client to observe the output of the type checker, - // which may be useful to reduce analysis latency when loading - // a large program. - // - // The function is permitted to modify info.Info, for instance - // to clear data structures that are no longer needed, which can - // dramatically reduce peak memory consumption. - // - // The function may be called twice for the same PackageInfo: - // once for the files of the package and again for the - // in-package test files. - // - // It must be safe to call concurrently from multiple goroutines. - AfterTypeCheck func(info *PackageInfo, files []*ast.File) -} - -// A PkgSpec specifies a non-importable package to be created by Load. -// Files are processed first, but typically only one of Files and -// Filenames is provided. The path needn't be globally unique. -// -// For vendoring purposes, the package's directory is the one that -// contains the first file. -type PkgSpec struct { - Path string // package path ("" => use package declaration) - Files []*ast.File // ASTs of already-parsed files - Filenames []string // names of files to be parsed -} - -// A Program is a Go program loaded from source as specified by a Config. -type Program struct { - Fset *token.FileSet // the file set for this program - - // Created[i] contains the initial package whose ASTs or - // filenames were supplied by Config.CreatePkgs[i], followed by - // the external test package, if any, of each package in - // Config.ImportPkgs ordered by ImportPath. - // - // NOTE: these files must not import "C". Cgo preprocessing is - // only performed on imported packages, not ad hoc packages. - // - // TODO(adonovan): we need to copy and adapt the logic of - // goFilesPackage (from $GOROOT/src/cmd/go/build.go) and make - // Config.Import and Config.Create methods return the same kind - // of entity, essentially a build.Package. - // Perhaps we can even reuse that type directly. - Created []*PackageInfo - - // Imported contains the initially imported packages, - // as specified by Config.ImportPkgs. - Imported map[string]*PackageInfo - - // AllPackages contains the PackageInfo of every package - // encountered by Load: all initial packages and all - // dependencies, including incomplete ones. - AllPackages map[*types.Package]*PackageInfo - - // importMap is the canonical mapping of package paths to - // packages. It contains all Imported initial packages, but not - // Created ones, and all imported dependencies. - importMap map[string]*types.Package -} - -// PackageInfo holds the ASTs and facts derived by the type-checker -// for a single package. -// -// Not mutated once exposed via the API. -type PackageInfo struct { - Pkg *types.Package - Importable bool // true if 'import "Pkg.Path()"' would resolve to this - TransitivelyErrorFree bool // true if Pkg and all its dependencies are free of errors - Files []*ast.File // syntax trees for the package's files - Errors []error // non-nil if the package had errors - types.Info // type-checker deductions. - dir string // package directory - - checker *types.Checker // transient type-checker state - errorFunc func(error) -} - -func (info *PackageInfo) String() string { return info.Pkg.Path() } - -func (info *PackageInfo) appendError(err error) { - if info.errorFunc != nil { - info.errorFunc(err) - } else { - fmt.Fprintln(os.Stderr, err) - } - info.Errors = append(info.Errors, err) -} - -func (conf *Config) fset() *token.FileSet { - if conf.Fset == nil { - conf.Fset = token.NewFileSet() - } - return conf.Fset -} - -// ParseFile is a convenience function (intended for testing) that invokes -// the parser using the Config's FileSet, which is initialized if nil. -// -// src specifies the parser input as a string, []byte, or io.Reader, and -// filename is its apparent name. If src is nil, the contents of -// filename are read from the file system. -func (conf *Config) ParseFile(filename string, src interface{}) (*ast.File, error) { - // TODO(adonovan): use conf.build() etc like parseFiles does. - return parser.ParseFile(conf.fset(), filename, src, conf.ParserMode) -} - -// FromArgsUsage is a partial usage message that applications calling -// FromArgs may wish to include in their -help output. -const FromArgsUsage = ` - is a list of arguments denoting a set of initial packages. -It may take one of two forms: - -1. A list of *.go source files. - - All of the specified files are loaded, parsed and type-checked - as a single package. All the files must belong to the same directory. - -2. A list of import paths, each denoting a package. - - The package's directory is found relative to the $GOROOT and - $GOPATH using similar logic to 'go build', and the *.go files in - that directory are loaded, parsed and type-checked as a single - package. - - In addition, all *_test.go files in the directory are then loaded - and parsed. Those files whose package declaration equals that of - the non-*_test.go files are included in the primary package. Test - files whose package declaration ends with "_test" are type-checked - as another package, the 'external' test package, so that a single - import path may denote two packages. (Whether this behaviour is - enabled is tool-specific, and may depend on additional flags.) - -A '--' argument terminates the list of packages. -` - -// FromArgs interprets args as a set of initial packages to load from -// source and updates the configuration. It returns the list of -// unconsumed arguments. -// -// It is intended for use in command-line interfaces that require a -// set of initial packages to be specified; see FromArgsUsage message -// for details. -// -// Only superficial errors are reported at this stage; errors dependent -// on I/O are detected during Load. -func (conf *Config) FromArgs(args []string, xtest bool) ([]string, error) { - var rest []string - for i, arg := range args { - if arg == "--" { - rest = args[i+1:] - args = args[:i] - break // consume "--" and return the remaining args - } - } - - if len(args) > 0 && strings.HasSuffix(args[0], ".go") { - // Assume args is a list of a *.go files - // denoting a single ad hoc package. - for _, arg := range args { - if !strings.HasSuffix(arg, ".go") { - return nil, fmt.Errorf("named files must be .go files: %s", arg) - } - } - conf.CreateFromFilenames("", args...) - } else { - // Assume args are directories each denoting a - // package and (perhaps) an external test, iff xtest. - for _, arg := range args { - if xtest { - conf.ImportWithTests(arg) - } else { - conf.Import(arg) - } - } - } - - return rest, nil -} - -// CreateFromFilenames is a convenience function that adds -// a conf.CreatePkgs entry to create a package of the specified *.go -// files. -func (conf *Config) CreateFromFilenames(path string, filenames ...string) { - conf.CreatePkgs = append(conf.CreatePkgs, PkgSpec{Path: path, Filenames: filenames}) -} - -// CreateFromFiles is a convenience function that adds a conf.CreatePkgs -// entry to create package of the specified path and parsed files. -func (conf *Config) CreateFromFiles(path string, files ...*ast.File) { - conf.CreatePkgs = append(conf.CreatePkgs, PkgSpec{Path: path, Files: files}) -} - -// ImportWithTests is a convenience function that adds path to -// ImportPkgs, the set of initial source packages located relative to -// $GOPATH. The package will be augmented by any *_test.go files in -// its directory that contain a "package x" (not "package x_test") -// declaration. -// -// In addition, if any *_test.go files contain a "package x_test" -// declaration, an additional package comprising just those files will -// be added to CreatePkgs. -func (conf *Config) ImportWithTests(path string) { conf.addImport(path, true) } - -// Import is a convenience function that adds path to ImportPkgs, the -// set of initial packages that will be imported from source. -func (conf *Config) Import(path string) { conf.addImport(path, false) } - -func (conf *Config) addImport(path string, tests bool) { - if path == "C" { - return // ignore; not a real package - } - if conf.ImportPkgs == nil { - conf.ImportPkgs = make(map[string]bool) - } - conf.ImportPkgs[path] = conf.ImportPkgs[path] || tests -} - -// PathEnclosingInterval returns the PackageInfo and ast.Node that -// contain source interval [start, end), and all the node's ancestors -// up to the AST root. It searches all ast.Files of all packages in prog. -// exact is defined as for astutil.PathEnclosingInterval. -// -// The zero value is returned if not found. -func (prog *Program) PathEnclosingInterval(start, end token.Pos) (pkg *PackageInfo, path []ast.Node, exact bool) { - for _, info := range prog.AllPackages { - for _, f := range info.Files { - if f.Pos() == token.NoPos { - // This can happen if the parser saw - // too many errors and bailed out. - // (Use parser.AllErrors to prevent that.) - continue - } - if !tokenFileContainsPos(prog.Fset.File(f.Pos()), start) { - continue - } - if path, exact := astutil.PathEnclosingInterval(f, start, end); path != nil { - return info, path, exact - } - } - } - return nil, nil, false -} - -// InitialPackages returns a new slice containing the set of initial -// packages (Created + Imported) in unspecified order. -func (prog *Program) InitialPackages() []*PackageInfo { - infos := make([]*PackageInfo, 0, len(prog.Created)+len(prog.Imported)) - infos = append(infos, prog.Created...) - for _, info := range prog.Imported { - infos = append(infos, info) - } - return infos -} - -// Package returns the ASTs and results of type checking for the -// specified package. -func (prog *Program) Package(path string) *PackageInfo { - if info, ok := prog.AllPackages[prog.importMap[path]]; ok { - return info - } - for _, info := range prog.Created { - if path == info.Pkg.Path() { - return info - } - } - return nil -} - -// ---------- Implementation ---------- - -// importer holds the working state of the algorithm. -type importer struct { - conf *Config // the client configuration - start time.Time // for logging - - progMu sync.Mutex // guards prog - prog *Program // the resulting program - - // findpkg is a memoization of FindPackage. - findpkgMu sync.Mutex // guards findpkg - findpkg map[findpkgKey]*findpkgValue - - importedMu sync.Mutex // guards imported - imported map[string]*importInfo // all imported packages (incl. failures) by import path - - // import dependency graph: graph[x][y] => x imports y - // - // Since non-importable packages cannot be cyclic, we ignore - // their imports, thus we only need the subgraph over importable - // packages. Nodes are identified by their import paths. - graphMu sync.Mutex - graph map[string]map[string]bool -} - -type findpkgKey struct { - importPath string - fromDir string - mode build.ImportMode -} - -type findpkgValue struct { - ready chan struct{} // closed to broadcast readiness - bp *build.Package - err error -} - -// importInfo tracks the success or failure of a single import. -// -// Upon completion, exactly one of info and err is non-nil: -// info on successful creation of a package, err otherwise. -// A successful package may still contain type errors. -type importInfo struct { - path string // import path - info *PackageInfo // results of typechecking (including errors) - complete chan struct{} // closed to broadcast that info is set. -} - -// awaitCompletion blocks until ii is complete, -// i.e. the info field is safe to inspect. -func (ii *importInfo) awaitCompletion() { - <-ii.complete // wait for close -} - -// Complete marks ii as complete. -// Its info and err fields will not be subsequently updated. -func (ii *importInfo) Complete(info *PackageInfo) { - if info == nil { - panic("info == nil") - } - ii.info = info - close(ii.complete) -} - -type importError struct { - path string // import path - err error // reason for failure to create a package -} - -// Load creates the initial packages specified by conf.{Create,Import}Pkgs, -// loading their dependencies packages as needed. -// -// On success, Load returns a Program containing a PackageInfo for -// each package. On failure, it returns an error. -// -// If AllowErrors is true, Load will return a Program even if some -// packages contained I/O, parser or type errors, or if dependencies -// were missing. (Such errors are accessible via PackageInfo.Errors. If -// false, Load will fail if any package had an error. -// -// It is an error if no packages were loaded. -func (conf *Config) Load() (*Program, error) { - // Create a simple default error handler for parse/type errors. - if conf.TypeChecker.Error == nil { - conf.TypeChecker.Error = func(e error) { fmt.Fprintln(os.Stderr, e) } - } - - // Set default working directory for relative package references. - if conf.Cwd == "" { - var err error - conf.Cwd, err = os.Getwd() - if err != nil { - return nil, err - } - } - - // Install default FindPackage hook using go/build logic. - if conf.FindPackage == nil { - conf.FindPackage = (*build.Context).Import - } - - prog := &Program{ - Fset: conf.fset(), - Imported: make(map[string]*PackageInfo), - importMap: make(map[string]*types.Package), - AllPackages: make(map[*types.Package]*PackageInfo), - } - - imp := importer{ - conf: conf, - prog: prog, - findpkg: make(map[findpkgKey]*findpkgValue), - imported: make(map[string]*importInfo), - start: time.Now(), - graph: make(map[string]map[string]bool), - } - - // -- loading proper (concurrent phase) -------------------------------- - - var errpkgs []string // packages that contained errors - - // Load the initially imported packages and their dependencies, - // in parallel. - // No vendor check on packages imported from the command line. - infos, importErrors := imp.importAll("", conf.Cwd, conf.ImportPkgs, ignoreVendor) - for _, ie := range importErrors { - conf.TypeChecker.Error(ie.err) // failed to create package - errpkgs = append(errpkgs, ie.path) - } - for _, info := range infos { - prog.Imported[info.Pkg.Path()] = info - } - - // Augment the designated initial packages by their tests. - // Dependencies are loaded in parallel. - var xtestPkgs []*build.Package - for importPath, augment := range conf.ImportPkgs { - if !augment { - continue - } - - // No vendor check on packages imported from command line. - bp, err := imp.findPackage(importPath, conf.Cwd, ignoreVendor) - if err != nil { - // Package not found, or can't even parse package declaration. - // Already reported by previous loop; ignore it. - continue - } - - // Needs external test package? - if len(bp.XTestGoFiles) > 0 { - xtestPkgs = append(xtestPkgs, bp) - } - - // Consult the cache using the canonical package path. - path := bp.ImportPath - imp.importedMu.Lock() // (unnecessary, we're sequential here) - ii, ok := imp.imported[path] - // Paranoid checks added due to issue #11012. - if !ok { - // Unreachable. - // The previous loop called importAll and thus - // startLoad for each path in ImportPkgs, which - // populates imp.imported[path] with a non-zero value. - panic(fmt.Sprintf("imported[%q] not found", path)) - } - if ii == nil { - // Unreachable. - // The ii values in this loop are the same as in - // the previous loop, which enforced the invariant - // that at least one of ii.err and ii.info is non-nil. - panic(fmt.Sprintf("imported[%q] == nil", path)) - } - if ii.info == nil { - // Unreachable. - // awaitCompletion has the postcondition - // ii.info != nil. - panic(fmt.Sprintf("imported[%q].info = nil", path)) - } - info := ii.info - imp.importedMu.Unlock() - - // Parse the in-package test files. - files, errs := imp.conf.parsePackageFiles(bp, 't') - for _, err := range errs { - info.appendError(err) - } - - // The test files augmenting package P cannot be imported, - // but may import packages that import P, - // so we must disable the cycle check. - imp.addFiles(info, files, false) - } - - createPkg := func(path, dir string, files []*ast.File, errs []error) { - info := imp.newPackageInfo(path, dir) - for _, err := range errs { - info.appendError(err) - } - - // Ad hoc packages are non-importable, - // so no cycle check is needed. - // addFiles loads dependencies in parallel. - imp.addFiles(info, files, false) - prog.Created = append(prog.Created, info) - } - - // Create packages specified by conf.CreatePkgs. - for _, cp := range conf.CreatePkgs { - files, errs := parseFiles(conf.fset(), conf.build(), nil, conf.Cwd, cp.Filenames, conf.ParserMode) - files = append(files, cp.Files...) - - path := cp.Path - if path == "" { - if len(files) > 0 { - path = files[0].Name.Name - } else { - path = "(unnamed)" - } - } - - dir := conf.Cwd - if len(files) > 0 && files[0].Pos().IsValid() { - dir = filepath.Dir(conf.fset().File(files[0].Pos()).Name()) - } - createPkg(path, dir, files, errs) - } - - // Create external test packages. - sort.Sort(byImportPath(xtestPkgs)) - for _, bp := range xtestPkgs { - files, errs := imp.conf.parsePackageFiles(bp, 'x') - createPkg(bp.ImportPath+"_test", bp.Dir, files, errs) - } - - // -- finishing up (sequential) ---------------------------------------- - - if len(prog.Imported)+len(prog.Created) == 0 { - return nil, errors.New("no initial packages were loaded") - } - - // Create infos for indirectly imported packages. - // e.g. incomplete packages without syntax, loaded from export data. - for _, obj := range prog.importMap { - info := prog.AllPackages[obj] - if info == nil { - prog.AllPackages[obj] = &PackageInfo{Pkg: obj, Importable: true} - } else { - // finished - info.checker = nil - info.errorFunc = nil - } - } - - if !conf.AllowErrors { - // Report errors in indirectly imported packages. - for _, info := range prog.AllPackages { - if len(info.Errors) > 0 { - errpkgs = append(errpkgs, info.Pkg.Path()) - } - } - if errpkgs != nil { - var more string - if len(errpkgs) > 3 { - more = fmt.Sprintf(" and %d more", len(errpkgs)-3) - errpkgs = errpkgs[:3] - } - return nil, fmt.Errorf("couldn't load packages due to errors: %s%s", - strings.Join(errpkgs, ", "), more) - } - } - - markErrorFreePackages(prog.AllPackages) - - return prog, nil -} - -type byImportPath []*build.Package - -func (b byImportPath) Len() int { return len(b) } -func (b byImportPath) Less(i, j int) bool { return b[i].ImportPath < b[j].ImportPath } -func (b byImportPath) Swap(i, j int) { b[i], b[j] = b[j], b[i] } - -// markErrorFreePackages sets the TransitivelyErrorFree flag on all -// applicable packages. -func markErrorFreePackages(allPackages map[*types.Package]*PackageInfo) { - // Build the transpose of the import graph. - importedBy := make(map[*types.Package]map[*types.Package]bool) - for P := range allPackages { - for _, Q := range P.Imports() { - clients, ok := importedBy[Q] - if !ok { - clients = make(map[*types.Package]bool) - importedBy[Q] = clients - } - clients[P] = true - } - } - - // Find all packages reachable from some error package. - reachable := make(map[*types.Package]bool) - var visit func(*types.Package) - visit = func(p *types.Package) { - if !reachable[p] { - reachable[p] = true - for q := range importedBy[p] { - visit(q) - } - } - } - for _, info := range allPackages { - if len(info.Errors) > 0 { - visit(info.Pkg) - } - } - - // Mark the others as "transitively error-free". - for _, info := range allPackages { - if !reachable[info.Pkg] { - info.TransitivelyErrorFree = true - } - } -} - -// build returns the effective build context. -func (conf *Config) build() *build.Context { - if conf.Build != nil { - return conf.Build - } - return &build.Default -} - -// parsePackageFiles enumerates the files belonging to package path, -// then loads, parses and returns them, plus a list of I/O or parse -// errors that were encountered. -// -// 'which' indicates which files to include: -// -// 'g': include non-test *.go source files (GoFiles + processed CgoFiles) -// 't': include in-package *_test.go source files (TestGoFiles) -// 'x': include external *_test.go source files. (XTestGoFiles) -func (conf *Config) parsePackageFiles(bp *build.Package, which rune) ([]*ast.File, []error) { - if bp.ImportPath == "unsafe" { - return nil, nil - } - var filenames []string - switch which { - case 'g': - filenames = bp.GoFiles - case 't': - filenames = bp.TestGoFiles - case 'x': - filenames = bp.XTestGoFiles - default: - panic(which) - } - - files, errs := parseFiles(conf.fset(), conf.build(), conf.DisplayPath, bp.Dir, filenames, conf.ParserMode) - - // Preprocess CgoFiles and parse the outputs (sequentially). - if which == 'g' && bp.CgoFiles != nil { - cgofiles, err := cgo.ProcessFiles(bp, conf.fset(), conf.DisplayPath, conf.ParserMode) - if err != nil { - errs = append(errs, err) - } else { - files = append(files, cgofiles...) - } - } - - return files, errs -} - -// doImport imports the package denoted by path. -// It implements the types.Importer signature. -// -// It returns an error if a package could not be created -// (e.g. go/build or parse error), but type errors are reported via -// the types.Config.Error callback (the first of which is also saved -// in the package's PackageInfo). -// -// Idempotent. -func (imp *importer) doImport(from *PackageInfo, to string) (*types.Package, error) { - if to == "C" { - // This should be unreachable, but ad hoc packages are - // not currently subject to cgo preprocessing. - // See https://golang.org/issue/11627. - return nil, fmt.Errorf(`the loader doesn't cgo-process ad hoc packages like %q; see Go issue 11627`, - from.Pkg.Path()) - } - - bp, err := imp.findPackage(to, from.dir, 0) - if err != nil { - return nil, err - } - - // The standard unsafe package is handled specially, - // and has no PackageInfo. - if bp.ImportPath == "unsafe" { - return types.Unsafe, nil - } - - // Look for the package in the cache using its canonical path. - path := bp.ImportPath - imp.importedMu.Lock() - ii := imp.imported[path] - imp.importedMu.Unlock() - if ii == nil { - panic("internal error: unexpected import: " + path) - } - if ii.info != nil { - return ii.info.Pkg, nil - } - - // Import of incomplete package: this indicates a cycle. - fromPath := from.Pkg.Path() - if cycle := imp.findPath(path, fromPath); cycle != nil { - // Normalize cycle: start from alphabetically largest node. - pos, start := -1, "" - for i, s := range cycle { - if pos < 0 || s > start { - pos, start = i, s - } - } - cycle = append(cycle, cycle[:pos]...)[pos:] // rotate cycle to start from largest - cycle = append(cycle, cycle[0]) // add start node to end to show cycliness - return nil, fmt.Errorf("import cycle: %s", strings.Join(cycle, " -> ")) - } - - panic("internal error: import of incomplete (yet acyclic) package: " + fromPath) -} - -// findPackage locates the package denoted by the importPath in the -// specified directory. -func (imp *importer) findPackage(importPath, fromDir string, mode build.ImportMode) (*build.Package, error) { - // We use a non-blocking duplicate-suppressing cache (gopl.io §9.7) - // to avoid holding the lock around FindPackage. - key := findpkgKey{importPath, fromDir, mode} - imp.findpkgMu.Lock() - v, ok := imp.findpkg[key] - if ok { - // cache hit - imp.findpkgMu.Unlock() - - <-v.ready // wait for entry to become ready - } else { - // Cache miss: this goroutine becomes responsible for - // populating the map entry and broadcasting its readiness. - v = &findpkgValue{ready: make(chan struct{})} - imp.findpkg[key] = v - imp.findpkgMu.Unlock() - - ioLimit <- true - v.bp, v.err = imp.conf.FindPackage(imp.conf.build(), importPath, fromDir, mode) - <-ioLimit - - if _, ok := v.err.(*build.NoGoError); ok { - v.err = nil // empty directory is not an error - } - - close(v.ready) // broadcast ready condition - } - return v.bp, v.err -} - -// importAll loads, parses, and type-checks the specified packages in -// parallel and returns their completed importInfos in unspecified order. -// -// fromPath is the package path of the importing package, if it is -// importable, "" otherwise. It is used for cycle detection. -// -// fromDir is the directory containing the import declaration that -// caused these imports. -func (imp *importer) importAll(fromPath, fromDir string, imports map[string]bool, mode build.ImportMode) (infos []*PackageInfo, errors []importError) { - if fromPath != "" { - // We're loading a set of imports. - // - // We must record graph edges from the importing package - // to its dependencies, and check for cycles. - imp.graphMu.Lock() - deps, ok := imp.graph[fromPath] - if !ok { - deps = make(map[string]bool) - imp.graph[fromPath] = deps - } - for importPath := range imports { - deps[importPath] = true - } - imp.graphMu.Unlock() - } - - var pending []*importInfo - for importPath := range imports { - if fromPath != "" { - if cycle := imp.findPath(importPath, fromPath); cycle != nil { - // Cycle-forming import: we must not check it - // since it would deadlock. - if trace { - fmt.Fprintf(os.Stderr, "import cycle: %q\n", cycle) - } - continue - } - } - bp, err := imp.findPackage(importPath, fromDir, mode) - if err != nil { - errors = append(errors, importError{ - path: importPath, - err: err, - }) - continue - } - pending = append(pending, imp.startLoad(bp)) - } - - for _, ii := range pending { - ii.awaitCompletion() - infos = append(infos, ii.info) - } - - return infos, errors -} - -// findPath returns an arbitrary path from 'from' to 'to' in the import -// graph, or nil if there was none. -func (imp *importer) findPath(from, to string) []string { - imp.graphMu.Lock() - defer imp.graphMu.Unlock() - - seen := make(map[string]bool) - var search func(stack []string, importPath string) []string - search = func(stack []string, importPath string) []string { - if !seen[importPath] { - seen[importPath] = true - stack = append(stack, importPath) - if importPath == to { - return stack - } - for x := range imp.graph[importPath] { - if p := search(stack, x); p != nil { - return p - } - } - } - return nil - } - return search(make([]string, 0, 20), from) -} - -// startLoad initiates the loading, parsing and type-checking of the -// specified package and its dependencies, if it has not already begun. -// -// It returns an importInfo, not necessarily in a completed state. The -// caller must call awaitCompletion() before accessing its info field. -// -// startLoad is concurrency-safe and idempotent. -func (imp *importer) startLoad(bp *build.Package) *importInfo { - path := bp.ImportPath - imp.importedMu.Lock() - ii, ok := imp.imported[path] - if !ok { - ii = &importInfo{path: path, complete: make(chan struct{})} - imp.imported[path] = ii - go func() { - info := imp.load(bp) - ii.Complete(info) - }() - } - imp.importedMu.Unlock() - - return ii -} - -// load implements package loading by parsing Go source files -// located by go/build. -func (imp *importer) load(bp *build.Package) *PackageInfo { - info := imp.newPackageInfo(bp.ImportPath, bp.Dir) - info.Importable = true - files, errs := imp.conf.parsePackageFiles(bp, 'g') - for _, err := range errs { - info.appendError(err) - } - - imp.addFiles(info, files, true) - - imp.progMu.Lock() - imp.prog.importMap[bp.ImportPath] = info.Pkg - imp.progMu.Unlock() - - return info -} - -// addFiles adds and type-checks the specified files to info, loading -// their dependencies if needed. The order of files determines the -// package initialization order. It may be called multiple times on the -// same package. Errors are appended to the info.Errors field. -// -// cycleCheck determines whether the imports within files create -// dependency edges that should be checked for potential cycles. -func (imp *importer) addFiles(info *PackageInfo, files []*ast.File, cycleCheck bool) { - // Ensure the dependencies are loaded, in parallel. - var fromPath string - if cycleCheck { - fromPath = info.Pkg.Path() - } - // TODO(adonovan): opt: make the caller do scanImports. - // Callers with a build.Package can skip it. - imp.importAll(fromPath, info.dir, scanImports(files), 0) - - if trace { - fmt.Fprintf(os.Stderr, "%s: start %q (%d)\n", - time.Since(imp.start), info.Pkg.Path(), len(files)) - } - - // Don't call checker.Files on Unsafe, even with zero files, - // because it would mutate the package, which is a global. - if info.Pkg == types.Unsafe { - if len(files) > 0 { - panic(`"unsafe" package contains unexpected files`) - } - } else { - // Ignore the returned (first) error since we - // already collect them all in the PackageInfo. - info.checker.Files(files) - info.Files = append(info.Files, files...) - } - - if imp.conf.AfterTypeCheck != nil { - imp.conf.AfterTypeCheck(info, files) - } - - if trace { - fmt.Fprintf(os.Stderr, "%s: stop %q\n", - time.Since(imp.start), info.Pkg.Path()) - } -} - -func (imp *importer) newPackageInfo(path, dir string) *PackageInfo { - var pkg *types.Package - if path == "unsafe" { - pkg = types.Unsafe - } else { - pkg = types.NewPackage(path, "") - } - info := &PackageInfo{ - Pkg: pkg, - Info: types.Info{ - Types: make(map[ast.Expr]types.TypeAndValue), - Defs: make(map[*ast.Ident]types.Object), - Uses: make(map[*ast.Ident]types.Object), - Implicits: make(map[ast.Node]types.Object), - Instances: make(map[*ast.Ident]types.Instance), - Scopes: make(map[ast.Node]*types.Scope), - Selections: make(map[*ast.SelectorExpr]*types.Selection), - }, - errorFunc: imp.conf.TypeChecker.Error, - dir: dir, - } - versions.InitFileVersions(&info.Info) - - // Copy the types.Config so we can vary it across PackageInfos. - tc := imp.conf.TypeChecker - tc.IgnoreFuncBodies = false - if f := imp.conf.TypeCheckFuncBodies; f != nil { - tc.IgnoreFuncBodies = !f(path) - } - tc.Importer = closure{imp, info} - tc.Error = info.appendError // appendError wraps the user's Error function - - info.checker = types.NewChecker(&tc, imp.conf.fset(), pkg, &info.Info) - imp.progMu.Lock() - imp.prog.AllPackages[pkg] = info - imp.progMu.Unlock() - return info -} - -type closure struct { - imp *importer - info *PackageInfo -} - -func (c closure) Import(to string) (*types.Package, error) { return c.imp.doImport(c.info, to) } diff --git a/vendor/golang.org/x/tools/go/loader/util.go b/vendor/golang.org/x/tools/go/loader/util.go deleted file mode 100644 index 3a80acae6..000000000 --- a/vendor/golang.org/x/tools/go/loader/util.go +++ /dev/null @@ -1,123 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package loader - -import ( - "go/ast" - "go/build" - "go/parser" - "go/token" - "io" - "os" - "strconv" - "sync" - - "golang.org/x/tools/go/buildutil" -) - -// We use a counting semaphore to limit -// the number of parallel I/O calls per process. -var ioLimit = make(chan bool, 10) - -// parseFiles parses the Go source files within directory dir and -// returns the ASTs of the ones that could be at least partially parsed, -// along with a list of I/O and parse errors encountered. -// -// I/O is done via ctxt, which may specify a virtual file system. -// displayPath is used to transform the filenames attached to the ASTs. -func parseFiles(fset *token.FileSet, ctxt *build.Context, displayPath func(string) string, dir string, files []string, mode parser.Mode) ([]*ast.File, []error) { - if displayPath == nil { - displayPath = func(path string) string { return path } - } - var wg sync.WaitGroup - n := len(files) - parsed := make([]*ast.File, n) - errors := make([]error, n) - for i, file := range files { - if !buildutil.IsAbsPath(ctxt, file) { - file = buildutil.JoinPath(ctxt, dir, file) - } - wg.Add(1) - go func(i int, file string) { - ioLimit <- true // wait - defer func() { - wg.Done() - <-ioLimit // signal - }() - var rd io.ReadCloser - var err error - if ctxt.OpenFile != nil { - rd, err = ctxt.OpenFile(file) - } else { - rd, err = os.Open(file) - } - if err != nil { - errors[i] = err // open failed - return - } - - // ParseFile may return both an AST and an error. - parsed[i], errors[i] = parser.ParseFile(fset, displayPath(file), rd, mode) - rd.Close() - }(i, file) - } - wg.Wait() - - // Eliminate nils, preserving order. - var o int - for _, f := range parsed { - if f != nil { - parsed[o] = f - o++ - } - } - parsed = parsed[:o] - - o = 0 - for _, err := range errors { - if err != nil { - errors[o] = err - o++ - } - } - errors = errors[:o] - - return parsed, errors -} - -// scanImports returns the set of all import paths from all -// import specs in the specified files. -func scanImports(files []*ast.File) map[string]bool { - imports := make(map[string]bool) - for _, f := range files { - for _, decl := range f.Decls { - if decl, ok := decl.(*ast.GenDecl); ok && decl.Tok == token.IMPORT { - for _, spec := range decl.Specs { - spec := spec.(*ast.ImportSpec) - - // NB: do not assume the program is well-formed! - path, err := strconv.Unquote(spec.Path.Value) - if err != nil { - continue // quietly ignore the error - } - if path == "C" { - continue // skip pseudopackage - } - imports[path] = true - } - } - } - } - return imports -} - -// ---------- Internal helpers ---------- - -// TODO(adonovan): make this a method: func (*token.File) Contains(token.Pos) -func tokenFileContainsPos(f *token.File, pos token.Pos) bool { - p := int(pos) - base := f.Base() - return base <= p && p < base+f.Size() -} diff --git a/vendor/golang.org/x/tools/go/ssa/TODO b/vendor/golang.org/x/tools/go/ssa/TODO deleted file mode 100644 index 6c35253c7..000000000 --- a/vendor/golang.org/x/tools/go/ssa/TODO +++ /dev/null @@ -1,16 +0,0 @@ --*- text -*- - -SSA Generics to-do list -=========================== - -DOCUMENTATION: -- Read me for internals - -TYPE PARAMETERIZED GENERIC FUNCTIONS: -- sanity.go updates. -- Check source functions going to generics. -- Tests, tests, tests... - -USAGE: -- Back fill users for handling ssa.InstantiateGenerics being off. - diff --git a/vendor/golang.org/x/tools/go/ssa/block.go b/vendor/golang.org/x/tools/go/ssa/block.go deleted file mode 100644 index 28170c787..000000000 --- a/vendor/golang.org/x/tools/go/ssa/block.go +++ /dev/null @@ -1,113 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package ssa - -import "fmt" - -// This file implements the BasicBlock type. - -// addEdge adds a control-flow graph edge from from to to. -func addEdge(from, to *BasicBlock) { - from.Succs = append(from.Succs, to) - to.Preds = append(to.Preds, from) -} - -// Parent returns the function that contains block b. -func (b *BasicBlock) Parent() *Function { return b.parent } - -// String returns a human-readable label of this block. -// It is not guaranteed unique within the function. -func (b *BasicBlock) String() string { - return fmt.Sprintf("%d", b.Index) -} - -// emit appends an instruction to the current basic block. -// If the instruction defines a Value, it is returned. -func (b *BasicBlock) emit(i Instruction) Value { - i.setBlock(b) - b.Instrs = append(b.Instrs, i) - v, _ := i.(Value) - return v -} - -// predIndex returns the i such that b.Preds[i] == c or panics if -// there is none. -func (b *BasicBlock) predIndex(c *BasicBlock) int { - for i, pred := range b.Preds { - if pred == c { - return i - } - } - panic(fmt.Sprintf("no edge %s -> %s", c, b)) -} - -// hasPhi returns true if b.Instrs contains φ-nodes. -func (b *BasicBlock) hasPhi() bool { - _, ok := b.Instrs[0].(*Phi) - return ok -} - -// phis returns the prefix of b.Instrs containing all the block's φ-nodes. -func (b *BasicBlock) phis() []Instruction { - for i, instr := range b.Instrs { - if _, ok := instr.(*Phi); !ok { - return b.Instrs[:i] - } - } - return nil // unreachable in well-formed blocks -} - -// replacePred replaces all occurrences of p in b's predecessor list with q. -// Ordinarily there should be at most one. -func (b *BasicBlock) replacePred(p, q *BasicBlock) { - for i, pred := range b.Preds { - if pred == p { - b.Preds[i] = q - } - } -} - -// replaceSucc replaces all occurrences of p in b's successor list with q. -// Ordinarily there should be at most one. -func (b *BasicBlock) replaceSucc(p, q *BasicBlock) { - for i, succ := range b.Succs { - if succ == p { - b.Succs[i] = q - } - } -} - -// removePred removes all occurrences of p in b's -// predecessor list and φ-nodes. -// Ordinarily there should be at most one. -func (b *BasicBlock) removePred(p *BasicBlock) { - phis := b.phis() - - // We must preserve edge order for φ-nodes. - j := 0 - for i, pred := range b.Preds { - if pred != p { - b.Preds[j] = b.Preds[i] - // Strike out φ-edge too. - for _, instr := range phis { - phi := instr.(*Phi) - phi.Edges[j] = phi.Edges[i] - } - j++ - } - } - // Nil out b.Preds[j:] and φ-edges[j:] to aid GC. - for i := j; i < len(b.Preds); i++ { - b.Preds[i] = nil - for _, instr := range phis { - instr.(*Phi).Edges[i] = nil - } - } - b.Preds = b.Preds[:j] - for _, instr := range phis { - phi := instr.(*Phi) - phi.Edges = phi.Edges[:j] - } -} diff --git a/vendor/golang.org/x/tools/go/ssa/blockopt.go b/vendor/golang.org/x/tools/go/ssa/blockopt.go deleted file mode 100644 index 7dabce8ca..000000000 --- a/vendor/golang.org/x/tools/go/ssa/blockopt.go +++ /dev/null @@ -1,183 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package ssa - -// Simple block optimizations to simplify the control flow graph. - -// TODO(adonovan): opt: instead of creating several "unreachable" blocks -// per function in the Builder, reuse a single one (e.g. at Blocks[1]) -// to reduce garbage. - -import ( - "fmt" - "os" -) - -// If true, perform sanity checking and show progress at each -// successive iteration of optimizeBlocks. Very verbose. -const debugBlockOpt = false - -// markReachable sets Index=-1 for all blocks reachable from b. -func markReachable(b *BasicBlock) { - b.Index = -1 - for _, succ := range b.Succs { - if succ.Index == 0 { - markReachable(succ) - } - } -} - -// deleteUnreachableBlocks marks all reachable blocks of f and -// eliminates (nils) all others, including possibly cyclic subgraphs. -func deleteUnreachableBlocks(f *Function) { - const white, black = 0, -1 - // We borrow b.Index temporarily as the mark bit. - for _, b := range f.Blocks { - b.Index = white - } - markReachable(f.Blocks[0]) - if f.Recover != nil { - markReachable(f.Recover) - } - for i, b := range f.Blocks { - if b.Index == white { - for _, c := range b.Succs { - if c.Index == black { - c.removePred(b) // delete white->black edge - } - } - if debugBlockOpt { - fmt.Fprintln(os.Stderr, "unreachable", b) - } - f.Blocks[i] = nil // delete b - } - } - f.removeNilBlocks() -} - -// jumpThreading attempts to apply simple jump-threading to block b, -// in which a->b->c become a->c if b is just a Jump. -// The result is true if the optimization was applied. -func jumpThreading(f *Function, b *BasicBlock) bool { - if b.Index == 0 { - return false // don't apply to entry block - } - if b.Instrs == nil { - return false - } - if _, ok := b.Instrs[0].(*Jump); !ok { - return false // not just a jump - } - c := b.Succs[0] - if c == b { - return false // don't apply to degenerate jump-to-self. - } - if c.hasPhi() { - return false // not sound without more effort - } - for j, a := range b.Preds { - a.replaceSucc(b, c) - - // If a now has two edges to c, replace its degenerate If by Jump. - if len(a.Succs) == 2 && a.Succs[0] == c && a.Succs[1] == c { - jump := new(Jump) - jump.setBlock(a) - a.Instrs[len(a.Instrs)-1] = jump - a.Succs = a.Succs[:1] - c.removePred(b) - } else { - if j == 0 { - c.replacePred(b, a) - } else { - c.Preds = append(c.Preds, a) - } - } - - if debugBlockOpt { - fmt.Fprintln(os.Stderr, "jumpThreading", a, b, c) - } - } - f.Blocks[b.Index] = nil // delete b - return true -} - -// fuseBlocks attempts to apply the block fusion optimization to block -// a, in which a->b becomes ab if len(a.Succs)==len(b.Preds)==1. -// The result is true if the optimization was applied. -func fuseBlocks(f *Function, a *BasicBlock) bool { - if len(a.Succs) != 1 { - return false - } - b := a.Succs[0] - if len(b.Preds) != 1 { - return false - } - - // Degenerate &&/|| ops may result in a straight-line CFG - // containing φ-nodes. (Ideally we'd replace such them with - // their sole operand but that requires Referrers, built later.) - if b.hasPhi() { - return false // not sound without further effort - } - - // Eliminate jump at end of A, then copy all of B across. - a.Instrs = append(a.Instrs[:len(a.Instrs)-1], b.Instrs...) - for _, instr := range b.Instrs { - instr.setBlock(a) - } - - // A inherits B's successors - a.Succs = append(a.succs2[:0], b.Succs...) - - // Fix up Preds links of all successors of B. - for _, c := range b.Succs { - c.replacePred(b, a) - } - - if debugBlockOpt { - fmt.Fprintln(os.Stderr, "fuseBlocks", a, b) - } - - f.Blocks[b.Index] = nil // delete b - return true -} - -// optimizeBlocks() performs some simple block optimizations on a -// completed function: dead block elimination, block fusion, jump -// threading. -func optimizeBlocks(f *Function) { - deleteUnreachableBlocks(f) - - // Loop until no further progress. - changed := true - for changed { - changed = false - - if debugBlockOpt { - f.WriteTo(os.Stderr) - mustSanityCheck(f, nil) - } - - for _, b := range f.Blocks { - // f.Blocks will temporarily contain nils to indicate - // deleted blocks; we remove them at the end. - if b == nil { - continue - } - - // Fuse blocks. b->c becomes bc. - if fuseBlocks(f, b) { - changed = true - } - - // a->b->c becomes a->c if b contains only a Jump. - if jumpThreading(f, b) { - changed = true - continue // (b was disconnected) - } - } - } - f.removeNilBlocks() -} diff --git a/vendor/golang.org/x/tools/go/ssa/builder.go b/vendor/golang.org/x/tools/go/ssa/builder.go deleted file mode 100644 index 72e906c38..000000000 --- a/vendor/golang.org/x/tools/go/ssa/builder.go +++ /dev/null @@ -1,2756 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package ssa - -// This file defines the builder, which builds SSA-form IR for function bodies. -// -// SSA construction has two phases, "create" and "build". First, one -// or more packages are created in any order by a sequence of calls to -// CreatePackage, either from syntax or from mere type information. -// Each created package has a complete set of Members (const, var, -// type, func) that can be accessed through methods like -// Program.FuncValue. -// -// It is not necessary to call CreatePackage for all dependencies of -// each syntax package, only for its direct imports. (In future -// perhaps even this restriction may be lifted.) -// -// Second, packages created from syntax are built, by one or more -// calls to Package.Build, which may be concurrent; or by a call to -// Program.Build, which builds all packages in parallel. Building -// traverses the type-annotated syntax tree of each function body and -// creates SSA-form IR, a control-flow graph of instructions, -// populating fields such as Function.Body, .Params, and others. -// -// Building may create additional methods, including: -// - wrapper methods (e.g. for embeddding, or implicit &recv) -// - bound method closures (e.g. for use(recv.f)) -// - thunks (e.g. for use(I.f) or use(T.f)) -// - generic instances (e.g. to produce f[int] from f[any]). -// As these methods are created, they are added to the build queue, -// and then processed in turn, until a fixed point is reached, -// Since these methods might belong to packages that were not -// created (by a call to CreatePackage), their Pkg field is unset. -// -// Instances of generic functions may be either instantiated (f[int] -// is a copy of f[T] with substitutions) or wrapped (f[int] delegates -// to f[T]), depending on the availability of generic syntax and the -// InstantiateGenerics mode flag. -// -// Each package has an initializer function named "init" that calls -// the initializer functions of each direct import, computes and -// assigns the initial value of each global variable, and calls each -// source-level function named "init". (These generate SSA functions -// named "init#1", "init#2", etc.) -// -// Runtime types -// -// Each MakeInterface operation is a conversion from a non-interface -// type to an interface type. The semantics of this operation requires -// a runtime type descriptor, which is the type portion of an -// interface, and the value abstracted by reflect.Type. -// -// The program accumulates all non-parameterized types that are -// encountered as MakeInterface operands, along with all types that -// may be derived from them using reflection. This set is available as -// Program.RuntimeTypes, and the methods of these types may be -// reachable via interface calls or reflection even if they are never -// referenced from the SSA IR. (In practice, algorithms such as RTA -// that compute reachability from package main perform their own -// tracking of runtime types at a finer grain, so this feature is not -// very useful.) -// -// Function literals -// -// Anonymous functions must be built as soon as they are encountered, -// as it may affect locals of the enclosing function, but they are not -// marked 'built' until the end of the outermost enclosing function. -// (Among other things, this causes them to be logged in top-down order.) -// -// The Function.build fields determines the algorithm for building the -// function body. It is cleared to mark that building is complete. - -import ( - "fmt" - "go/ast" - "go/constant" - "go/token" - "go/types" - "os" - "sync" - - "golang.org/x/tools/internal/aliases" - "golang.org/x/tools/internal/typeparams" - "golang.org/x/tools/internal/versions" -) - -type opaqueType struct{ name string } - -func (t *opaqueType) String() string { return t.name } -func (t *opaqueType) Underlying() types.Type { return t } - -var ( - varOk = newVar("ok", tBool) - varIndex = newVar("index", tInt) - - // Type constants. - tBool = types.Typ[types.Bool] - tByte = types.Typ[types.Byte] - tInt = types.Typ[types.Int] - tInvalid = types.Typ[types.Invalid] - tString = types.Typ[types.String] - tUntypedNil = types.Typ[types.UntypedNil] - tRangeIter = &opaqueType{"iter"} // the type of all "range" iterators - tEface = types.NewInterfaceType(nil, nil).Complete() - - // SSA Value constants. - vZero = intConst(0) - vOne = intConst(1) - vTrue = NewConst(constant.MakeBool(true), tBool) -) - -// builder holds state associated with the package currently being built. -// Its methods contain all the logic for AST-to-SSA conversion. -type builder struct { - // Invariant: 0 <= rtypes <= finished <= created.Len() - created *creator // functions created during building - finished int // Invariant: create[i].built holds for i in [0,finished) -} - -// cond emits to fn code to evaluate boolean condition e and jump -// to t or f depending on its value, performing various simplifications. -// -// Postcondition: fn.currentBlock is nil. -func (b *builder) cond(fn *Function, e ast.Expr, t, f *BasicBlock) { - switch e := e.(type) { - case *ast.ParenExpr: - b.cond(fn, e.X, t, f) - return - - case *ast.BinaryExpr: - switch e.Op { - case token.LAND: - ltrue := fn.newBasicBlock("cond.true") - b.cond(fn, e.X, ltrue, f) - fn.currentBlock = ltrue - b.cond(fn, e.Y, t, f) - return - - case token.LOR: - lfalse := fn.newBasicBlock("cond.false") - b.cond(fn, e.X, t, lfalse) - fn.currentBlock = lfalse - b.cond(fn, e.Y, t, f) - return - } - - case *ast.UnaryExpr: - if e.Op == token.NOT { - b.cond(fn, e.X, f, t) - return - } - } - - // A traditional compiler would simplify "if false" (etc) here - // but we do not, for better fidelity to the source code. - // - // The value of a constant condition may be platform-specific, - // and may cause blocks that are reachable in some configuration - // to be hidden from subsequent analyses such as bug-finding tools. - emitIf(fn, b.expr(fn, e), t, f) -} - -// logicalBinop emits code to fn to evaluate e, a &&- or -// ||-expression whose reified boolean value is wanted. -// The value is returned. -func (b *builder) logicalBinop(fn *Function, e *ast.BinaryExpr) Value { - rhs := fn.newBasicBlock("binop.rhs") - done := fn.newBasicBlock("binop.done") - - // T(e) = T(e.X) = T(e.Y) after untyped constants have been - // eliminated. - // TODO(adonovan): not true; MyBool==MyBool yields UntypedBool. - t := fn.typeOf(e) - - var short Value // value of the short-circuit path - switch e.Op { - case token.LAND: - b.cond(fn, e.X, rhs, done) - short = NewConst(constant.MakeBool(false), t) - - case token.LOR: - b.cond(fn, e.X, done, rhs) - short = NewConst(constant.MakeBool(true), t) - } - - // Is rhs unreachable? - if rhs.Preds == nil { - // Simplify false&&y to false, true||y to true. - fn.currentBlock = done - return short - } - - // Is done unreachable? - if done.Preds == nil { - // Simplify true&&y (or false||y) to y. - fn.currentBlock = rhs - return b.expr(fn, e.Y) - } - - // All edges from e.X to done carry the short-circuit value. - var edges []Value - for range done.Preds { - edges = append(edges, short) - } - - // The edge from e.Y to done carries the value of e.Y. - fn.currentBlock = rhs - edges = append(edges, b.expr(fn, e.Y)) - emitJump(fn, done) - fn.currentBlock = done - - phi := &Phi{Edges: edges, Comment: e.Op.String()} - phi.pos = e.OpPos - phi.typ = t - return done.emit(phi) -} - -// exprN lowers a multi-result expression e to SSA form, emitting code -// to fn and returning a single Value whose type is a *types.Tuple. -// The caller must access the components via Extract. -// -// Multi-result expressions include CallExprs in a multi-value -// assignment or return statement, and "value,ok" uses of -// TypeAssertExpr, IndexExpr (when X is a map), and UnaryExpr (when Op -// is token.ARROW). -func (b *builder) exprN(fn *Function, e ast.Expr) Value { - typ := fn.typeOf(e).(*types.Tuple) - switch e := e.(type) { - case *ast.ParenExpr: - return b.exprN(fn, e.X) - - case *ast.CallExpr: - // Currently, no built-in function nor type conversion - // has multiple results, so we can avoid some of the - // cases for single-valued CallExpr. - var c Call - b.setCall(fn, e, &c.Call) - c.typ = typ - return fn.emit(&c) - - case *ast.IndexExpr: - mapt := typeparams.CoreType(fn.typeOf(e.X)).(*types.Map) // ,ok must be a map. - lookup := &Lookup{ - X: b.expr(fn, e.X), - Index: emitConv(fn, b.expr(fn, e.Index), mapt.Key()), - CommaOk: true, - } - lookup.setType(typ) - lookup.setPos(e.Lbrack) - return fn.emit(lookup) - - case *ast.TypeAssertExpr: - return emitTypeTest(fn, b.expr(fn, e.X), typ.At(0).Type(), e.Lparen) - - case *ast.UnaryExpr: // must be receive <- - unop := &UnOp{ - Op: token.ARROW, - X: b.expr(fn, e.X), - CommaOk: true, - } - unop.setType(typ) - unop.setPos(e.OpPos) - return fn.emit(unop) - } - panic(fmt.Sprintf("exprN(%T) in %s", e, fn)) -} - -// builtin emits to fn SSA instructions to implement a call to the -// built-in function obj with the specified arguments -// and return type. It returns the value defined by the result. -// -// The result is nil if no special handling was required; in this case -// the caller should treat this like an ordinary library function -// call. -func (b *builder) builtin(fn *Function, obj *types.Builtin, args []ast.Expr, typ types.Type, pos token.Pos) Value { - typ = fn.typ(typ) - switch obj.Name() { - case "make": - switch ct := typeparams.CoreType(typ).(type) { - case *types.Slice: - n := b.expr(fn, args[1]) - m := n - if len(args) == 3 { - m = b.expr(fn, args[2]) - } - if m, ok := m.(*Const); ok { - // treat make([]T, n, m) as new([m]T)[:n] - cap := m.Int64() - at := types.NewArray(ct.Elem(), cap) - v := &Slice{ - X: emitNew(fn, at, pos, "makeslice"), - High: n, - } - v.setPos(pos) - v.setType(typ) - return fn.emit(v) - } - v := &MakeSlice{ - Len: n, - Cap: m, - } - v.setPos(pos) - v.setType(typ) - return fn.emit(v) - - case *types.Map: - var res Value - if len(args) == 2 { - res = b.expr(fn, args[1]) - } - v := &MakeMap{Reserve: res} - v.setPos(pos) - v.setType(typ) - return fn.emit(v) - - case *types.Chan: - var sz Value = vZero - if len(args) == 2 { - sz = b.expr(fn, args[1]) - } - v := &MakeChan{Size: sz} - v.setPos(pos) - v.setType(typ) - return fn.emit(v) - } - - case "new": - return emitNew(fn, typeparams.MustDeref(typ), pos, "new") - - case "len", "cap": - // Special case: len or cap of an array or *array is - // based on the type, not the value which may be nil. - // We must still evaluate the value, though. (If it - // was side-effect free, the whole call would have - // been constant-folded.) - t, _ := deref(fn.typeOf(args[0])) - if at, ok := typeparams.CoreType(t).(*types.Array); ok { - b.expr(fn, args[0]) // for effects only - return intConst(at.Len()) - } - // Otherwise treat as normal. - - case "panic": - fn.emit(&Panic{ - X: emitConv(fn, b.expr(fn, args[0]), tEface), - pos: pos, - }) - fn.currentBlock = fn.newBasicBlock("unreachable") - return vTrue // any non-nil Value will do - } - return nil // treat all others as a regular function call -} - -// addr lowers a single-result addressable expression e to SSA form, -// emitting code to fn and returning the location (an lvalue) defined -// by the expression. -// -// If escaping is true, addr marks the base variable of the -// addressable expression e as being a potentially escaping pointer -// value. For example, in this code: -// -// a := A{ -// b: [1]B{B{c: 1}} -// } -// return &a.b[0].c -// -// the application of & causes a.b[0].c to have its address taken, -// which means that ultimately the local variable a must be -// heap-allocated. This is a simple but very conservative escape -// analysis. -// -// Operations forming potentially escaping pointers include: -// - &x, including when implicit in method call or composite literals. -// - a[:] iff a is an array (not *array) -// - references to variables in lexically enclosing functions. -func (b *builder) addr(fn *Function, e ast.Expr, escaping bool) lvalue { - switch e := e.(type) { - case *ast.Ident: - if isBlankIdent(e) { - return blank{} - } - obj := fn.objectOf(e).(*types.Var) - var v Value - if g := fn.Prog.packageLevelMember(obj); g != nil { - v = g.(*Global) // var (address) - } else { - v = fn.lookup(obj, escaping) - } - return &address{addr: v, pos: e.Pos(), expr: e} - - case *ast.CompositeLit: - typ, _ := deref(fn.typeOf(e)) - var v *Alloc - if escaping { - v = emitNew(fn, typ, e.Lbrace, "complit") - } else { - v = emitLocal(fn, typ, e.Lbrace, "complit") - } - var sb storebuf - b.compLit(fn, v, e, true, &sb) - sb.emit(fn) - return &address{addr: v, pos: e.Lbrace, expr: e} - - case *ast.ParenExpr: - return b.addr(fn, e.X, escaping) - - case *ast.SelectorExpr: - sel := fn.selection(e) - if sel == nil { - // qualified identifier - return b.addr(fn, e.Sel, escaping) - } - if sel.kind != types.FieldVal { - panic(sel) - } - wantAddr := true - v := b.receiver(fn, e.X, wantAddr, escaping, sel) - index := sel.index[len(sel.index)-1] - fld := fieldOf(typeparams.MustDeref(v.Type()), index) // v is an addr. - - // Due to the two phases of resolving AssignStmt, a panic from x.f = p() - // when x is nil is required to come after the side-effects of - // evaluating x and p(). - emit := func(fn *Function) Value { - return emitFieldSelection(fn, v, index, true, e.Sel) - } - return &lazyAddress{addr: emit, t: fld.Type(), pos: e.Sel.Pos(), expr: e.Sel} - - case *ast.IndexExpr: - xt := fn.typeOf(e.X) - elem, mode := indexType(xt) - var x Value - var et types.Type - switch mode { - case ixArrVar: // array, array|slice, array|*array, or array|*array|slice. - x = b.addr(fn, e.X, escaping).address(fn) - et = types.NewPointer(elem) - case ixVar: // *array, slice, *array|slice - x = b.expr(fn, e.X) - et = types.NewPointer(elem) - case ixMap: - mt := typeparams.CoreType(xt).(*types.Map) - return &element{ - m: b.expr(fn, e.X), - k: emitConv(fn, b.expr(fn, e.Index), mt.Key()), - t: mt.Elem(), - pos: e.Lbrack, - } - default: - panic("unexpected container type in IndexExpr: " + xt.String()) - } - index := b.expr(fn, e.Index) - if isUntyped(index.Type()) { - index = emitConv(fn, index, tInt) - } - // Due to the two phases of resolving AssignStmt, a panic from x[i] = p() - // when x is nil or i is out-of-bounds is required to come after the - // side-effects of evaluating x, i and p(). - emit := func(fn *Function) Value { - v := &IndexAddr{ - X: x, - Index: index, - } - v.setPos(e.Lbrack) - v.setType(et) - return fn.emit(v) - } - return &lazyAddress{addr: emit, t: typeparams.MustDeref(et), pos: e.Lbrack, expr: e} - - case *ast.StarExpr: - return &address{addr: b.expr(fn, e.X), pos: e.Star, expr: e} - } - - panic(fmt.Sprintf("unexpected address expression: %T", e)) -} - -type store struct { - lhs lvalue - rhs Value -} - -type storebuf struct{ stores []store } - -func (sb *storebuf) store(lhs lvalue, rhs Value) { - sb.stores = append(sb.stores, store{lhs, rhs}) -} - -func (sb *storebuf) emit(fn *Function) { - for _, s := range sb.stores { - s.lhs.store(fn, s.rhs) - } -} - -// assign emits to fn code to initialize the lvalue loc with the value -// of expression e. If isZero is true, assign assumes that loc holds -// the zero value for its type. -// -// This is equivalent to loc.store(fn, b.expr(fn, e)), but may generate -// better code in some cases, e.g., for composite literals in an -// addressable location. -// -// If sb is not nil, assign generates code to evaluate expression e, but -// not to update loc. Instead, the necessary stores are appended to the -// storebuf sb so that they can be executed later. This allows correct -// in-place update of existing variables when the RHS is a composite -// literal that may reference parts of the LHS. -func (b *builder) assign(fn *Function, loc lvalue, e ast.Expr, isZero bool, sb *storebuf) { - // Can we initialize it in place? - if e, ok := unparen(e).(*ast.CompositeLit); ok { - // A CompositeLit never evaluates to a pointer, - // so if the type of the location is a pointer, - // an &-operation is implied. - if _, ok := loc.(blank); !ok { // avoid calling blank.typ() - if _, ok := deref(loc.typ()); ok { - ptr := b.addr(fn, e, true).address(fn) - // copy address - if sb != nil { - sb.store(loc, ptr) - } else { - loc.store(fn, ptr) - } - return - } - } - - if _, ok := loc.(*address); ok { - if isNonTypeParamInterface(loc.typ()) { - // e.g. var x interface{} = T{...} - // Can't in-place initialize an interface value. - // Fall back to copying. - } else { - // x = T{...} or x := T{...} - addr := loc.address(fn) - if sb != nil { - b.compLit(fn, addr, e, isZero, sb) - } else { - var sb storebuf - b.compLit(fn, addr, e, isZero, &sb) - sb.emit(fn) - } - - // Subtle: emit debug ref for aggregate types only; - // slice and map are handled by store ops in compLit. - switch typeparams.CoreType(loc.typ()).(type) { - case *types.Struct, *types.Array: - emitDebugRef(fn, e, addr, true) - } - - return - } - } - } - - // simple case: just copy - rhs := b.expr(fn, e) - if sb != nil { - sb.store(loc, rhs) - } else { - loc.store(fn, rhs) - } -} - -// expr lowers a single-result expression e to SSA form, emitting code -// to fn and returning the Value defined by the expression. -func (b *builder) expr(fn *Function, e ast.Expr) Value { - e = unparen(e) - - tv := fn.info.Types[e] - - // Is expression a constant? - if tv.Value != nil { - return NewConst(tv.Value, fn.typ(tv.Type)) - } - - var v Value - if tv.Addressable() { - // Prefer pointer arithmetic ({Index,Field}Addr) followed - // by Load over subelement extraction (e.g. Index, Field), - // to avoid large copies. - v = b.addr(fn, e, false).load(fn) - } else { - v = b.expr0(fn, e, tv) - } - if fn.debugInfo() { - emitDebugRef(fn, e, v, false) - } - return v -} - -func (b *builder) expr0(fn *Function, e ast.Expr, tv types.TypeAndValue) Value { - switch e := e.(type) { - case *ast.BasicLit: - panic("non-constant BasicLit") // unreachable - - case *ast.FuncLit: - /* function literal */ - anon := &Function{ - name: fmt.Sprintf("%s$%d", fn.Name(), 1+len(fn.AnonFuncs)), - Signature: fn.typeOf(e.Type).(*types.Signature), - pos: e.Type.Func, - parent: fn, - anonIdx: int32(len(fn.AnonFuncs)), - Pkg: fn.Pkg, - Prog: fn.Prog, - syntax: e, - info: fn.info, - goversion: fn.goversion, - build: (*builder).buildFromSyntax, - topLevelOrigin: nil, // use anonIdx to lookup an anon instance's origin. - typeparams: fn.typeparams, // share the parent's type parameters. - typeargs: fn.typeargs, // share the parent's type arguments. - subst: fn.subst, // share the parent's type substitutions. - } - fn.AnonFuncs = append(fn.AnonFuncs, anon) - // Build anon immediately, as it may cause fn's locals to escape. - // (It is not marked 'built' until the end of the enclosing FuncDecl.) - anon.build(b, anon) - if anon.FreeVars == nil { - return anon - } - v := &MakeClosure{Fn: anon} - v.setType(fn.typ(tv.Type)) - for _, fv := range anon.FreeVars { - v.Bindings = append(v.Bindings, fv.outer) - fv.outer = nil - } - return fn.emit(v) - - case *ast.TypeAssertExpr: // single-result form only - return emitTypeAssert(fn, b.expr(fn, e.X), fn.typ(tv.Type), e.Lparen) - - case *ast.CallExpr: - if fn.info.Types[e.Fun].IsType() { - // Explicit type conversion, e.g. string(x) or big.Int(x) - x := b.expr(fn, e.Args[0]) - y := emitConv(fn, x, fn.typ(tv.Type)) - if y != x { - switch y := y.(type) { - case *Convert: - y.pos = e.Lparen - case *ChangeType: - y.pos = e.Lparen - case *MakeInterface: - y.pos = e.Lparen - case *SliceToArrayPointer: - y.pos = e.Lparen - case *UnOp: // conversion from slice to array. - y.pos = e.Lparen - } - } - return y - } - // Call to "intrinsic" built-ins, e.g. new, make, panic. - if id, ok := unparen(e.Fun).(*ast.Ident); ok { - if obj, ok := fn.info.Uses[id].(*types.Builtin); ok { - if v := b.builtin(fn, obj, e.Args, fn.typ(tv.Type), e.Lparen); v != nil { - return v - } - } - } - // Regular function call. - var v Call - b.setCall(fn, e, &v.Call) - v.setType(fn.typ(tv.Type)) - return fn.emit(&v) - - case *ast.UnaryExpr: - switch e.Op { - case token.AND: // &X --- potentially escaping. - addr := b.addr(fn, e.X, true) - if _, ok := unparen(e.X).(*ast.StarExpr); ok { - // &*p must panic if p is nil (http://golang.org/s/go12nil). - // For simplicity, we'll just (suboptimally) rely - // on the side effects of a load. - // TODO(adonovan): emit dedicated nilcheck. - addr.load(fn) - } - return addr.address(fn) - case token.ADD: - return b.expr(fn, e.X) - case token.NOT, token.ARROW, token.SUB, token.XOR: // ! <- - ^ - v := &UnOp{ - Op: e.Op, - X: b.expr(fn, e.X), - } - v.setPos(e.OpPos) - v.setType(fn.typ(tv.Type)) - return fn.emit(v) - default: - panic(e.Op) - } - - case *ast.BinaryExpr: - switch e.Op { - case token.LAND, token.LOR: - return b.logicalBinop(fn, e) - case token.SHL, token.SHR: - fallthrough - case token.ADD, token.SUB, token.MUL, token.QUO, token.REM, token.AND, token.OR, token.XOR, token.AND_NOT: - return emitArith(fn, e.Op, b.expr(fn, e.X), b.expr(fn, e.Y), fn.typ(tv.Type), e.OpPos) - - case token.EQL, token.NEQ, token.GTR, token.LSS, token.LEQ, token.GEQ: - cmp := emitCompare(fn, e.Op, b.expr(fn, e.X), b.expr(fn, e.Y), e.OpPos) - // The type of x==y may be UntypedBool. - return emitConv(fn, cmp, types.Default(fn.typ(tv.Type))) - default: - panic("illegal op in BinaryExpr: " + e.Op.String()) - } - - case *ast.SliceExpr: - var low, high, max Value - var x Value - xtyp := fn.typeOf(e.X) - switch typeparams.CoreType(xtyp).(type) { - case *types.Array: - // Potentially escaping. - x = b.addr(fn, e.X, true).address(fn) - case *types.Basic, *types.Slice, *types.Pointer: // *array - x = b.expr(fn, e.X) - default: - // core type exception? - if isBytestring(xtyp) { - x = b.expr(fn, e.X) // bytestring is handled as string and []byte. - } else { - panic("unexpected sequence type in SliceExpr") - } - } - if e.Low != nil { - low = b.expr(fn, e.Low) - } - if e.High != nil { - high = b.expr(fn, e.High) - } - if e.Slice3 { - max = b.expr(fn, e.Max) - } - v := &Slice{ - X: x, - Low: low, - High: high, - Max: max, - } - v.setPos(e.Lbrack) - v.setType(fn.typ(tv.Type)) - return fn.emit(v) - - case *ast.Ident: - obj := fn.info.Uses[e] - // Universal built-in or nil? - switch obj := obj.(type) { - case *types.Builtin: - return &Builtin{name: obj.Name(), sig: fn.instanceType(e).(*types.Signature)} - case *types.Nil: - return zeroConst(fn.instanceType(e)) - } - - // Package-level func or var? - // (obj must belong to same package or a direct import.) - if v := fn.Prog.packageLevelMember(obj); v != nil { - if g, ok := v.(*Global); ok { - return emitLoad(fn, g) // var (address) - } - callee := v.(*Function) // (func) - if callee.typeparams.Len() > 0 { - targs := fn.subst.types(instanceArgs(fn.info, e)) - callee = callee.instance(targs, b.created) - } - return callee - } - // Local var. - return emitLoad(fn, fn.lookup(obj.(*types.Var), false)) // var (address) - - case *ast.SelectorExpr: - sel := fn.selection(e) - if sel == nil { - // builtin unsafe.{Add,Slice} - if obj, ok := fn.info.Uses[e.Sel].(*types.Builtin); ok { - return &Builtin{name: obj.Name(), sig: fn.typ(tv.Type).(*types.Signature)} - } - // qualified identifier - return b.expr(fn, e.Sel) - } - switch sel.kind { - case types.MethodExpr: - // (*T).f or T.f, the method f from the method-set of type T. - // The result is a "thunk". - thunk := createThunk(fn.Prog, sel, b.created) - return emitConv(fn, thunk, fn.typ(tv.Type)) - - case types.MethodVal: - // e.f where e is an expression and f is a method. - // The result is a "bound". - obj := sel.obj.(*types.Func) - rt := fn.typ(recvType(obj)) - _, wantAddr := deref(rt) - escaping := true - v := b.receiver(fn, e.X, wantAddr, escaping, sel) - - if types.IsInterface(rt) { - // If v may be an interface type I (after instantiating), - // we must emit a check that v is non-nil. - if recv, ok := aliases.Unalias(sel.recv).(*types.TypeParam); ok { - // Emit a nil check if any possible instantiation of the - // type parameter is an interface type. - if typeSetOf(recv).Len() > 0 { - // recv has a concrete term its typeset. - // So it cannot be instantiated as an interface. - // - // Example: - // func _[T interface{~int; Foo()}] () { - // var v T - // _ = v.Foo // <-- MethodVal - // } - } else { - // rt may be instantiated as an interface. - // Emit nil check: typeassert (any(v)).(any). - emitTypeAssert(fn, emitConv(fn, v, tEface), tEface, token.NoPos) - } - } else { - // non-type param interface - // Emit nil check: typeassert v.(I). - emitTypeAssert(fn, v, rt, e.Sel.Pos()) - } - } - if targs := receiverTypeArgs(obj); len(targs) > 0 { - // obj is generic. - obj = fn.Prog.canon.instantiateMethod(obj, fn.subst.types(targs), fn.Prog.ctxt) - } - c := &MakeClosure{ - Fn: createBound(fn.Prog, obj, b.created), - Bindings: []Value{v}, - } - c.setPos(e.Sel.Pos()) - c.setType(fn.typ(tv.Type)) - return fn.emit(c) - - case types.FieldVal: - indices := sel.index - last := len(indices) - 1 - v := b.expr(fn, e.X) - v = emitImplicitSelections(fn, v, indices[:last], e.Pos()) - v = emitFieldSelection(fn, v, indices[last], false, e.Sel) - return v - } - - panic("unexpected expression-relative selector") - - case *ast.IndexListExpr: - // f[X, Y] must be a generic function - if !instance(fn.info, e.X) { - panic("unexpected expression-could not match index list to instantiation") - } - return b.expr(fn, e.X) // Handle instantiation within the *Ident or *SelectorExpr cases. - - case *ast.IndexExpr: - if instance(fn.info, e.X) { - return b.expr(fn, e.X) // Handle instantiation within the *Ident or *SelectorExpr cases. - } - // not a generic instantiation. - xt := fn.typeOf(e.X) - switch et, mode := indexType(xt); mode { - case ixVar: - // Addressable slice/array; use IndexAddr and Load. - return b.addr(fn, e, false).load(fn) - - case ixArrVar, ixValue: - // An array in a register, a string or a combined type that contains - // either an [_]array (ixArrVar) or string (ixValue). - - // Note: for ixArrVar and CoreType(xt)==nil can be IndexAddr and Load. - index := b.expr(fn, e.Index) - if isUntyped(index.Type()) { - index = emitConv(fn, index, tInt) - } - v := &Index{ - X: b.expr(fn, e.X), - Index: index, - } - v.setPos(e.Lbrack) - v.setType(et) - return fn.emit(v) - - case ixMap: - ct := typeparams.CoreType(xt).(*types.Map) - v := &Lookup{ - X: b.expr(fn, e.X), - Index: emitConv(fn, b.expr(fn, e.Index), ct.Key()), - } - v.setPos(e.Lbrack) - v.setType(ct.Elem()) - return fn.emit(v) - default: - panic("unexpected container type in IndexExpr: " + xt.String()) - } - - case *ast.CompositeLit, *ast.StarExpr: - // Addressable types (lvalues) - return b.addr(fn, e, false).load(fn) - } - - panic(fmt.Sprintf("unexpected expr: %T", e)) -} - -// stmtList emits to fn code for all statements in list. -func (b *builder) stmtList(fn *Function, list []ast.Stmt) { - for _, s := range list { - b.stmt(fn, s) - } -} - -// receiver emits to fn code for expression e in the "receiver" -// position of selection e.f (where f may be a field or a method) and -// returns the effective receiver after applying the implicit field -// selections of sel. -// -// wantAddr requests that the result is an address. If -// !sel.indirect, this may require that e be built in addr() mode; it -// must thus be addressable. -// -// escaping is defined as per builder.addr(). -func (b *builder) receiver(fn *Function, e ast.Expr, wantAddr, escaping bool, sel *selection) Value { - var v Value - if _, eptr := deref(fn.typeOf(e)); wantAddr && !sel.indirect && !eptr { - v = b.addr(fn, e, escaping).address(fn) - } else { - v = b.expr(fn, e) - } - - last := len(sel.index) - 1 - // The position of implicit selection is the position of the inducing receiver expression. - v = emitImplicitSelections(fn, v, sel.index[:last], e.Pos()) - if types.IsInterface(v.Type()) { - // When v is an interface, sel.Kind()==MethodValue and v.f is invoked. - // So v is not loaded, even if v has a pointer core type. - } else if _, vptr := deref(v.Type()); !wantAddr && vptr { - v = emitLoad(fn, v) - } - return v -} - -// setCallFunc populates the function parts of a CallCommon structure -// (Func, Method, Recv, Args[0]) based on the kind of invocation -// occurring in e. -func (b *builder) setCallFunc(fn *Function, e *ast.CallExpr, c *CallCommon) { - c.pos = e.Lparen - - // Is this a method call? - if selector, ok := unparen(e.Fun).(*ast.SelectorExpr); ok { - sel := fn.selection(selector) - if sel != nil && sel.kind == types.MethodVal { - obj := sel.obj.(*types.Func) - recv := recvType(obj) - - _, wantAddr := deref(recv) - escaping := true - v := b.receiver(fn, selector.X, wantAddr, escaping, sel) - if types.IsInterface(recv) { - // Invoke-mode call. - c.Value = v // possibly type param - c.Method = obj - } else { - // "Call"-mode call. - c.Value = fn.Prog.objectMethod(obj, b.created) - c.Args = append(c.Args, v) - } - return - } - - // sel.kind==MethodExpr indicates T.f() or (*T).f(): - // a statically dispatched call to the method f in the - // method-set of T or *T. T may be an interface. - // - // e.Fun would evaluate to a concrete method, interface - // wrapper function, or promotion wrapper. - // - // For now, we evaluate it in the usual way. - // - // TODO(adonovan): opt: inline expr() here, to make the - // call static and to avoid generation of wrappers. - // It's somewhat tricky as it may consume the first - // actual parameter if the call is "invoke" mode. - // - // Examples: - // type T struct{}; func (T) f() {} // "call" mode - // type T interface { f() } // "invoke" mode - // - // type S struct{ T } - // - // var s S - // S.f(s) - // (*S).f(&s) - // - // Suggested approach: - // - consume the first actual parameter expression - // and build it with b.expr(). - // - apply implicit field selections. - // - use MethodVal logic to populate fields of c. - } - - // Evaluate the function operand in the usual way. - c.Value = b.expr(fn, e.Fun) -} - -// emitCallArgs emits to f code for the actual parameters of call e to -// a (possibly built-in) function of effective type sig. -// The argument values are appended to args, which is then returned. -func (b *builder) emitCallArgs(fn *Function, sig *types.Signature, e *ast.CallExpr, args []Value) []Value { - // f(x, y, z...): pass slice z straight through. - if e.Ellipsis != 0 { - for i, arg := range e.Args { - v := emitConv(fn, b.expr(fn, arg), sig.Params().At(i).Type()) - args = append(args, v) - } - return args - } - - offset := len(args) // 1 if call has receiver, 0 otherwise - - // Evaluate actual parameter expressions. - // - // If this is a chained call of the form f(g()) where g has - // multiple return values (MRV), they are flattened out into - // args; a suffix of them may end up in a varargs slice. - for _, arg := range e.Args { - v := b.expr(fn, arg) - if ttuple, ok := v.Type().(*types.Tuple); ok { // MRV chain - for i, n := 0, ttuple.Len(); i < n; i++ { - args = append(args, emitExtract(fn, v, i)) - } - } else { - args = append(args, v) - } - } - - // Actual->formal assignability conversions for normal parameters. - np := sig.Params().Len() // number of normal parameters - if sig.Variadic() { - np-- - } - for i := 0; i < np; i++ { - args[offset+i] = emitConv(fn, args[offset+i], sig.Params().At(i).Type()) - } - - // Actual->formal assignability conversions for variadic parameter, - // and construction of slice. - if sig.Variadic() { - varargs := args[offset+np:] - st := sig.Params().At(np).Type().(*types.Slice) - vt := st.Elem() - if len(varargs) == 0 { - args = append(args, zeroConst(st)) - } else { - // Replace a suffix of args with a slice containing it. - at := types.NewArray(vt, int64(len(varargs))) - a := emitNew(fn, at, token.NoPos, "varargs") - a.setPos(e.Rparen) - for i, arg := range varargs { - iaddr := &IndexAddr{ - X: a, - Index: intConst(int64(i)), - } - iaddr.setType(types.NewPointer(vt)) - fn.emit(iaddr) - emitStore(fn, iaddr, arg, arg.Pos()) - } - s := &Slice{X: a} - s.setType(st) - args[offset+np] = fn.emit(s) - args = args[:offset+np+1] - } - } - return args -} - -// setCall emits to fn code to evaluate all the parameters of a function -// call e, and populates *c with those values. -func (b *builder) setCall(fn *Function, e *ast.CallExpr, c *CallCommon) { - // First deal with the f(...) part and optional receiver. - b.setCallFunc(fn, e, c) - - // Then append the other actual parameters. - sig, _ := typeparams.CoreType(fn.typeOf(e.Fun)).(*types.Signature) - if sig == nil { - panic(fmt.Sprintf("no signature for call of %s", e.Fun)) - } - c.Args = b.emitCallArgs(fn, sig, e, c.Args) -} - -// assignOp emits to fn code to perform loc = val. -func (b *builder) assignOp(fn *Function, loc lvalue, val Value, op token.Token, pos token.Pos) { - loc.store(fn, emitArith(fn, op, loc.load(fn), val, loc.typ(), pos)) -} - -// localValueSpec emits to fn code to define all of the vars in the -// function-local ValueSpec, spec. -func (b *builder) localValueSpec(fn *Function, spec *ast.ValueSpec) { - switch { - case len(spec.Values) == len(spec.Names): - // e.g. var x, y = 0, 1 - // 1:1 assignment - for i, id := range spec.Names { - if !isBlankIdent(id) { - emitLocalVar(fn, identVar(fn, id)) - } - lval := b.addr(fn, id, false) // non-escaping - b.assign(fn, lval, spec.Values[i], true, nil) - } - - case len(spec.Values) == 0: - // e.g. var x, y int - // Locals are implicitly zero-initialized. - for _, id := range spec.Names { - if !isBlankIdent(id) { - lhs := emitLocalVar(fn, identVar(fn, id)) - if fn.debugInfo() { - emitDebugRef(fn, id, lhs, true) - } - } - } - - default: - // e.g. var x, y = pos() - tuple := b.exprN(fn, spec.Values[0]) - for i, id := range spec.Names { - if !isBlankIdent(id) { - emitLocalVar(fn, identVar(fn, id)) - lhs := b.addr(fn, id, false) // non-escaping - lhs.store(fn, emitExtract(fn, tuple, i)) - } - } - } -} - -// assignStmt emits code to fn for a parallel assignment of rhss to lhss. -// isDef is true if this is a short variable declaration (:=). -// -// Note the similarity with localValueSpec. -func (b *builder) assignStmt(fn *Function, lhss, rhss []ast.Expr, isDef bool) { - // Side effects of all LHSs and RHSs must occur in left-to-right order. - lvals := make([]lvalue, len(lhss)) - isZero := make([]bool, len(lhss)) - for i, lhs := range lhss { - var lval lvalue = blank{} - if !isBlankIdent(lhs) { - if isDef { - if obj, ok := fn.info.Defs[lhs.(*ast.Ident)].(*types.Var); ok { - emitLocalVar(fn, obj) - isZero[i] = true - } - } - lval = b.addr(fn, lhs, false) // non-escaping - } - lvals[i] = lval - } - if len(lhss) == len(rhss) { - // Simple assignment: x = f() (!isDef) - // Parallel assignment: x, y = f(), g() (!isDef) - // or short var decl: x, y := f(), g() (isDef) - // - // In all cases, the RHSs may refer to the LHSs, - // so we need a storebuf. - var sb storebuf - for i := range rhss { - b.assign(fn, lvals[i], rhss[i], isZero[i], &sb) - } - sb.emit(fn) - } else { - // e.g. x, y = pos() - tuple := b.exprN(fn, rhss[0]) - emitDebugRef(fn, rhss[0], tuple, false) - for i, lval := range lvals { - lval.store(fn, emitExtract(fn, tuple, i)) - } - } -} - -// arrayLen returns the length of the array whose composite literal elements are elts. -func (b *builder) arrayLen(fn *Function, elts []ast.Expr) int64 { - var max int64 = -1 - var i int64 = -1 - for _, e := range elts { - if kv, ok := e.(*ast.KeyValueExpr); ok { - i = b.expr(fn, kv.Key).(*Const).Int64() - } else { - i++ - } - if i > max { - max = i - } - } - return max + 1 -} - -// compLit emits to fn code to initialize a composite literal e at -// address addr with type typ. -// -// Nested composite literals are recursively initialized in place -// where possible. If isZero is true, compLit assumes that addr -// holds the zero value for typ. -// -// Because the elements of a composite literal may refer to the -// variables being updated, as in the second line below, -// -// x := T{a: 1} -// x = T{a: x.a} -// -// all the reads must occur before all the writes. Thus all stores to -// loc are emitted to the storebuf sb for later execution. -// -// A CompositeLit may have pointer type only in the recursive (nested) -// case when the type name is implicit. e.g. in []*T{{}}, the inner -// literal has type *T behaves like &T{}. -// In that case, addr must hold a T, not a *T. -func (b *builder) compLit(fn *Function, addr Value, e *ast.CompositeLit, isZero bool, sb *storebuf) { - typ, _ := deref(fn.typeOf(e)) // type with name [may be type param] - switch t := typeparams.CoreType(typ).(type) { - case *types.Struct: - if !isZero && len(e.Elts) != t.NumFields() { - // memclear - zt, _ := deref(addr.Type()) - sb.store(&address{addr, e.Lbrace, nil}, zeroConst(zt)) - isZero = true - } - for i, e := range e.Elts { - fieldIndex := i - pos := e.Pos() - if kv, ok := e.(*ast.KeyValueExpr); ok { - fname := kv.Key.(*ast.Ident).Name - for i, n := 0, t.NumFields(); i < n; i++ { - sf := t.Field(i) - if sf.Name() == fname { - fieldIndex = i - pos = kv.Colon - e = kv.Value - break - } - } - } - sf := t.Field(fieldIndex) - faddr := &FieldAddr{ - X: addr, - Field: fieldIndex, - } - faddr.setPos(pos) - faddr.setType(types.NewPointer(sf.Type())) - fn.emit(faddr) - b.assign(fn, &address{addr: faddr, pos: pos, expr: e}, e, isZero, sb) - } - - case *types.Array, *types.Slice: - var at *types.Array - var array Value - switch t := aliases.Unalias(t).(type) { - case *types.Slice: - at = types.NewArray(t.Elem(), b.arrayLen(fn, e.Elts)) - array = emitNew(fn, at, e.Lbrace, "slicelit") - case *types.Array: - at = t - array = addr - - if !isZero && int64(len(e.Elts)) != at.Len() { - // memclear - zt, _ := deref(array.Type()) - sb.store(&address{array, e.Lbrace, nil}, zeroConst(zt)) - } - } - - var idx *Const - for _, e := range e.Elts { - pos := e.Pos() - if kv, ok := e.(*ast.KeyValueExpr); ok { - idx = b.expr(fn, kv.Key).(*Const) - pos = kv.Colon - e = kv.Value - } else { - var idxval int64 - if idx != nil { - idxval = idx.Int64() + 1 - } - idx = intConst(idxval) - } - iaddr := &IndexAddr{ - X: array, - Index: idx, - } - iaddr.setType(types.NewPointer(at.Elem())) - fn.emit(iaddr) - if t != at { // slice - // backing array is unaliased => storebuf not needed. - b.assign(fn, &address{addr: iaddr, pos: pos, expr: e}, e, true, nil) - } else { - b.assign(fn, &address{addr: iaddr, pos: pos, expr: e}, e, true, sb) - } - } - - if t != at { // slice - s := &Slice{X: array} - s.setPos(e.Lbrace) - s.setType(typ) - sb.store(&address{addr: addr, pos: e.Lbrace, expr: e}, fn.emit(s)) - } - - case *types.Map: - m := &MakeMap{Reserve: intConst(int64(len(e.Elts)))} - m.setPos(e.Lbrace) - m.setType(typ) - fn.emit(m) - for _, e := range e.Elts { - e := e.(*ast.KeyValueExpr) - - // If a key expression in a map literal is itself a - // composite literal, the type may be omitted. - // For example: - // map[*struct{}]bool{{}: true} - // An &-operation may be implied: - // map[*struct{}]bool{&struct{}{}: true} - wantAddr := false - if _, ok := unparen(e.Key).(*ast.CompositeLit); ok { - _, wantAddr = deref(t.Key()) - } - - var key Value - if wantAddr { - // A CompositeLit never evaluates to a pointer, - // so if the type of the location is a pointer, - // an &-operation is implied. - key = b.addr(fn, e.Key, true).address(fn) - } else { - key = b.expr(fn, e.Key) - } - - loc := element{ - m: m, - k: emitConv(fn, key, t.Key()), - t: t.Elem(), - pos: e.Colon, - } - - // We call assign() only because it takes care - // of any &-operation required in the recursive - // case, e.g., - // map[int]*struct{}{0: {}} implies &struct{}{}. - // In-place update is of course impossible, - // and no storebuf is needed. - b.assign(fn, &loc, e.Value, true, nil) - } - sb.store(&address{addr: addr, pos: e.Lbrace, expr: e}, m) - - default: - panic("unexpected CompositeLit type: " + typ.String()) - } -} - -// switchStmt emits to fn code for the switch statement s, optionally -// labelled by label. -func (b *builder) switchStmt(fn *Function, s *ast.SwitchStmt, label *lblock) { - // We treat SwitchStmt like a sequential if-else chain. - // Multiway dispatch can be recovered later by ssautil.Switches() - // to those cases that are free of side effects. - if s.Init != nil { - b.stmt(fn, s.Init) - } - var tag Value = vTrue - if s.Tag != nil { - tag = b.expr(fn, s.Tag) - } - done := fn.newBasicBlock("switch.done") - if label != nil { - label._break = done - } - // We pull the default case (if present) down to the end. - // But each fallthrough label must point to the next - // body block in source order, so we preallocate a - // body block (fallthru) for the next case. - // Unfortunately this makes for a confusing block order. - var dfltBody *[]ast.Stmt - var dfltFallthrough *BasicBlock - var fallthru, dfltBlock *BasicBlock - ncases := len(s.Body.List) - for i, clause := range s.Body.List { - body := fallthru - if body == nil { - body = fn.newBasicBlock("switch.body") // first case only - } - - // Preallocate body block for the next case. - fallthru = done - if i+1 < ncases { - fallthru = fn.newBasicBlock("switch.body") - } - - cc := clause.(*ast.CaseClause) - if cc.List == nil { - // Default case. - dfltBody = &cc.Body - dfltFallthrough = fallthru - dfltBlock = body - continue - } - - var nextCond *BasicBlock - for _, cond := range cc.List { - nextCond = fn.newBasicBlock("switch.next") - // TODO(adonovan): opt: when tag==vTrue, we'd - // get better code if we use b.cond(cond) - // instead of BinOp(EQL, tag, b.expr(cond)) - // followed by If. Don't forget conversions - // though. - cond := emitCompare(fn, token.EQL, tag, b.expr(fn, cond), cond.Pos()) - emitIf(fn, cond, body, nextCond) - fn.currentBlock = nextCond - } - fn.currentBlock = body - fn.targets = &targets{ - tail: fn.targets, - _break: done, - _fallthrough: fallthru, - } - b.stmtList(fn, cc.Body) - fn.targets = fn.targets.tail - emitJump(fn, done) - fn.currentBlock = nextCond - } - if dfltBlock != nil { - emitJump(fn, dfltBlock) - fn.currentBlock = dfltBlock - fn.targets = &targets{ - tail: fn.targets, - _break: done, - _fallthrough: dfltFallthrough, - } - b.stmtList(fn, *dfltBody) - fn.targets = fn.targets.tail - } - emitJump(fn, done) - fn.currentBlock = done -} - -// typeSwitchStmt emits to fn code for the type switch statement s, optionally -// labelled by label. -func (b *builder) typeSwitchStmt(fn *Function, s *ast.TypeSwitchStmt, label *lblock) { - // We treat TypeSwitchStmt like a sequential if-else chain. - // Multiway dispatch can be recovered later by ssautil.Switches(). - - // Typeswitch lowering: - // - // var x X - // switch y := x.(type) { - // case T1, T2: S1 // >1 (y := x) - // case nil: SN // nil (y := x) - // default: SD // 0 types (y := x) - // case T3: S3 // 1 type (y := x.(T3)) - // } - // - // ...s.Init... - // x := eval x - // .caseT1: - // t1, ok1 := typeswitch,ok x - // if ok1 then goto S1 else goto .caseT2 - // .caseT2: - // t2, ok2 := typeswitch,ok x - // if ok2 then goto S1 else goto .caseNil - // .S1: - // y := x - // ...S1... - // goto done - // .caseNil: - // if t2, ok2 := typeswitch,ok x - // if x == nil then goto SN else goto .caseT3 - // .SN: - // y := x - // ...SN... - // goto done - // .caseT3: - // t3, ok3 := typeswitch,ok x - // if ok3 then goto S3 else goto default - // .S3: - // y := t3 - // ...S3... - // goto done - // .default: - // y := x - // ...SD... - // goto done - // .done: - if s.Init != nil { - b.stmt(fn, s.Init) - } - - var x Value - switch ass := s.Assign.(type) { - case *ast.ExprStmt: // x.(type) - x = b.expr(fn, unparen(ass.X).(*ast.TypeAssertExpr).X) - case *ast.AssignStmt: // y := x.(type) - x = b.expr(fn, unparen(ass.Rhs[0]).(*ast.TypeAssertExpr).X) - } - - done := fn.newBasicBlock("typeswitch.done") - if label != nil { - label._break = done - } - var default_ *ast.CaseClause - for _, clause := range s.Body.List { - cc := clause.(*ast.CaseClause) - if cc.List == nil { - default_ = cc - continue - } - body := fn.newBasicBlock("typeswitch.body") - var next *BasicBlock - var casetype types.Type - var ti Value // ti, ok := typeassert,ok x - for _, cond := range cc.List { - next = fn.newBasicBlock("typeswitch.next") - casetype = fn.typeOf(cond) - var condv Value - if casetype == tUntypedNil { - condv = emitCompare(fn, token.EQL, x, zeroConst(x.Type()), cond.Pos()) - ti = x - } else { - yok := emitTypeTest(fn, x, casetype, cc.Case) - ti = emitExtract(fn, yok, 0) - condv = emitExtract(fn, yok, 1) - } - emitIf(fn, condv, body, next) - fn.currentBlock = next - } - if len(cc.List) != 1 { - ti = x - } - fn.currentBlock = body - b.typeCaseBody(fn, cc, ti, done) - fn.currentBlock = next - } - if default_ != nil { - b.typeCaseBody(fn, default_, x, done) - } else { - emitJump(fn, done) - } - fn.currentBlock = done -} - -func (b *builder) typeCaseBody(fn *Function, cc *ast.CaseClause, x Value, done *BasicBlock) { - if obj, ok := fn.info.Implicits[cc].(*types.Var); ok { - // In a switch y := x.(type), each case clause - // implicitly declares a distinct object y. - // In a single-type case, y has that type. - // In multi-type cases, 'case nil' and default, - // y has the same type as the interface operand. - emitStore(fn, emitLocalVar(fn, obj), x, obj.Pos()) - } - fn.targets = &targets{ - tail: fn.targets, - _break: done, - } - b.stmtList(fn, cc.Body) - fn.targets = fn.targets.tail - emitJump(fn, done) -} - -// selectStmt emits to fn code for the select statement s, optionally -// labelled by label. -func (b *builder) selectStmt(fn *Function, s *ast.SelectStmt, label *lblock) { - // A blocking select of a single case degenerates to a - // simple send or receive. - // TODO(adonovan): opt: is this optimization worth its weight? - if len(s.Body.List) == 1 { - clause := s.Body.List[0].(*ast.CommClause) - if clause.Comm != nil { - b.stmt(fn, clause.Comm) - done := fn.newBasicBlock("select.done") - if label != nil { - label._break = done - } - fn.targets = &targets{ - tail: fn.targets, - _break: done, - } - b.stmtList(fn, clause.Body) - fn.targets = fn.targets.tail - emitJump(fn, done) - fn.currentBlock = done - return - } - } - - // First evaluate all channels in all cases, and find - // the directions of each state. - var states []*SelectState - blocking := true - debugInfo := fn.debugInfo() - for _, clause := range s.Body.List { - var st *SelectState - switch comm := clause.(*ast.CommClause).Comm.(type) { - case nil: // default case - blocking = false - continue - - case *ast.SendStmt: // ch<- i - ch := b.expr(fn, comm.Chan) - chtyp := typeparams.CoreType(fn.typ(ch.Type())).(*types.Chan) - st = &SelectState{ - Dir: types.SendOnly, - Chan: ch, - Send: emitConv(fn, b.expr(fn, comm.Value), chtyp.Elem()), - Pos: comm.Arrow, - } - if debugInfo { - st.DebugNode = comm - } - - case *ast.AssignStmt: // x := <-ch - recv := unparen(comm.Rhs[0]).(*ast.UnaryExpr) - st = &SelectState{ - Dir: types.RecvOnly, - Chan: b.expr(fn, recv.X), - Pos: recv.OpPos, - } - if debugInfo { - st.DebugNode = recv - } - - case *ast.ExprStmt: // <-ch - recv := unparen(comm.X).(*ast.UnaryExpr) - st = &SelectState{ - Dir: types.RecvOnly, - Chan: b.expr(fn, recv.X), - Pos: recv.OpPos, - } - if debugInfo { - st.DebugNode = recv - } - } - states = append(states, st) - } - - // We dispatch on the (fair) result of Select using a - // sequential if-else chain, in effect: - // - // idx, recvOk, r0...r_n-1 := select(...) - // if idx == 0 { // receive on channel 0 (first receive => r0) - // x, ok := r0, recvOk - // ...state0... - // } else if v == 1 { // send on channel 1 - // ...state1... - // } else { - // ...default... - // } - sel := &Select{ - States: states, - Blocking: blocking, - } - sel.setPos(s.Select) - var vars []*types.Var - vars = append(vars, varIndex, varOk) - for _, st := range states { - if st.Dir == types.RecvOnly { - chtyp := typeparams.CoreType(fn.typ(st.Chan.Type())).(*types.Chan) - vars = append(vars, anonVar(chtyp.Elem())) - } - } - sel.setType(types.NewTuple(vars...)) - - fn.emit(sel) - idx := emitExtract(fn, sel, 0) - - done := fn.newBasicBlock("select.done") - if label != nil { - label._break = done - } - - var defaultBody *[]ast.Stmt - state := 0 - r := 2 // index in 'sel' tuple of value; increments if st.Dir==RECV - for _, cc := range s.Body.List { - clause := cc.(*ast.CommClause) - if clause.Comm == nil { - defaultBody = &clause.Body - continue - } - body := fn.newBasicBlock("select.body") - next := fn.newBasicBlock("select.next") - emitIf(fn, emitCompare(fn, token.EQL, idx, intConst(int64(state)), token.NoPos), body, next) - fn.currentBlock = body - fn.targets = &targets{ - tail: fn.targets, - _break: done, - } - switch comm := clause.Comm.(type) { - case *ast.ExprStmt: // <-ch - if debugInfo { - v := emitExtract(fn, sel, r) - emitDebugRef(fn, states[state].DebugNode.(ast.Expr), v, false) - } - r++ - - case *ast.AssignStmt: // x := <-states[state].Chan - if comm.Tok == token.DEFINE { - emitLocalVar(fn, identVar(fn, comm.Lhs[0].(*ast.Ident))) - } - x := b.addr(fn, comm.Lhs[0], false) // non-escaping - v := emitExtract(fn, sel, r) - if debugInfo { - emitDebugRef(fn, states[state].DebugNode.(ast.Expr), v, false) - } - x.store(fn, v) - - if len(comm.Lhs) == 2 { // x, ok := ... - if comm.Tok == token.DEFINE { - emitLocalVar(fn, identVar(fn, comm.Lhs[1].(*ast.Ident))) - } - ok := b.addr(fn, comm.Lhs[1], false) // non-escaping - ok.store(fn, emitExtract(fn, sel, 1)) - } - r++ - } - b.stmtList(fn, clause.Body) - fn.targets = fn.targets.tail - emitJump(fn, done) - fn.currentBlock = next - state++ - } - if defaultBody != nil { - fn.targets = &targets{ - tail: fn.targets, - _break: done, - } - b.stmtList(fn, *defaultBody) - fn.targets = fn.targets.tail - } else { - // A blocking select must match some case. - // (This should really be a runtime.errorString, not a string.) - fn.emit(&Panic{ - X: emitConv(fn, stringConst("blocking select matched no case"), tEface), - }) - fn.currentBlock = fn.newBasicBlock("unreachable") - } - emitJump(fn, done) - fn.currentBlock = done -} - -// forStmt emits to fn code for the for statement s, optionally -// labelled by label. -func (b *builder) forStmt(fn *Function, s *ast.ForStmt, label *lblock) { - // Use forStmtGo122 instead if it applies. - if s.Init != nil { - if assign, ok := s.Init.(*ast.AssignStmt); ok && assign.Tok == token.DEFINE { - if versions.AtLeast(fn.goversion, versions.Go1_22) { - b.forStmtGo122(fn, s, label) - return - } - } - } - - // ...init... - // jump loop - // loop: - // if cond goto body else done - // body: - // ...body... - // jump post - // post: (target of continue) - // ...post... - // jump loop - // done: (target of break) - if s.Init != nil { - b.stmt(fn, s.Init) - } - - body := fn.newBasicBlock("for.body") - done := fn.newBasicBlock("for.done") // target of 'break' - loop := body // target of back-edge - if s.Cond != nil { - loop = fn.newBasicBlock("for.loop") - } - cont := loop // target of 'continue' - if s.Post != nil { - cont = fn.newBasicBlock("for.post") - } - if label != nil { - label._break = done - label._continue = cont - } - emitJump(fn, loop) - fn.currentBlock = loop - if loop != body { - b.cond(fn, s.Cond, body, done) - fn.currentBlock = body - } - fn.targets = &targets{ - tail: fn.targets, - _break: done, - _continue: cont, - } - b.stmt(fn, s.Body) - fn.targets = fn.targets.tail - emitJump(fn, cont) - - if s.Post != nil { - fn.currentBlock = cont - b.stmt(fn, s.Post) - emitJump(fn, loop) // back-edge - } - fn.currentBlock = done -} - -// forStmtGo122 emits to fn code for the for statement s, optionally -// labelled by label. s must define its variables. -// -// This allocates once per loop iteration. This is only correct in -// GoVersions >= go1.22. -func (b *builder) forStmtGo122(fn *Function, s *ast.ForStmt, label *lblock) { - // i_outer = alloc[T] - // *i_outer = ...init... // under objects[i] = i_outer - // jump loop - // loop: - // i = phi [head: i_outer, loop: i_next] - // ...cond... // under objects[i] = i - // if cond goto body else done - // body: - // ...body... // under objects[i] = i (same as loop) - // jump post - // post: - // tmp = *i - // i_next = alloc[T] - // *i_next = tmp - // ...post... // under objects[i] = i_next - // goto loop - // done: - - init := s.Init.(*ast.AssignStmt) - startingBlocks := len(fn.Blocks) - - pre := fn.currentBlock // current block before starting - loop := fn.newBasicBlock("for.loop") // target of back-edge - body := fn.newBasicBlock("for.body") - post := fn.newBasicBlock("for.post") // target of 'continue' - done := fn.newBasicBlock("for.done") // target of 'break' - - // For each of the n loop variables, we create five SSA values, - // outer, phi, next, load, and store in pre, loop, and post. - // There is no limit on n. - type loopVar struct { - obj *types.Var - outer *Alloc - phi *Phi - load *UnOp - next *Alloc - store *Store - } - vars := make([]loopVar, len(init.Lhs)) - for i, lhs := range init.Lhs { - v := identVar(fn, lhs.(*ast.Ident)) - typ := fn.typ(v.Type()) - - fn.currentBlock = pre - outer := emitLocal(fn, typ, v.Pos(), v.Name()) - - fn.currentBlock = loop - phi := &Phi{Comment: v.Name()} - phi.pos = v.Pos() - phi.typ = outer.Type() - fn.emit(phi) - - fn.currentBlock = post - // If next is is local, it reuses the address and zeroes the old value so - // load before allocating next. - load := emitLoad(fn, phi) - next := emitLocal(fn, typ, v.Pos(), v.Name()) - store := emitStore(fn, next, load, token.NoPos) - - phi.Edges = []Value{outer, next} // pre edge is emitted before post edge. - - vars[i] = loopVar{v, outer, phi, load, next, store} - } - - // ...init... under fn.objects[v] = i_outer - fn.currentBlock = pre - for _, v := range vars { - fn.vars[v.obj] = v.outer - } - const isDef = false // assign to already-allocated outers - b.assignStmt(fn, init.Lhs, init.Rhs, isDef) - if label != nil { - label._break = done - label._continue = post - } - emitJump(fn, loop) - - // ...cond... under fn.objects[v] = i - fn.currentBlock = loop - for _, v := range vars { - fn.vars[v.obj] = v.phi - } - if s.Cond != nil { - b.cond(fn, s.Cond, body, done) - } else { - emitJump(fn, body) - } - - // ...body... under fn.objects[v] = i - fn.currentBlock = body - fn.targets = &targets{ - tail: fn.targets, - _break: done, - _continue: post, - } - b.stmt(fn, s.Body) - fn.targets = fn.targets.tail - emitJump(fn, post) - - // ...post... under fn.objects[v] = i_next - for _, v := range vars { - fn.vars[v.obj] = v.next - } - fn.currentBlock = post - if s.Post != nil { - b.stmt(fn, s.Post) - } - emitJump(fn, loop) // back-edge - fn.currentBlock = done - - // For each loop variable that does not escape, - // (the common case), fuse its next cells into its - // (local) outer cell as they have disjoint live ranges. - // - // It is sufficient to test whether i_next escapes, - // because its Heap flag will be marked true if either - // the cond or post expression causes i to escape - // (because escape distributes over phi). - var nlocals int - for _, v := range vars { - if !v.next.Heap { - nlocals++ - } - } - if nlocals > 0 { - replace := make(map[Value]Value, 2*nlocals) - dead := make(map[Instruction]bool, 4*nlocals) - for _, v := range vars { - if !v.next.Heap { - replace[v.next] = v.outer - replace[v.phi] = v.outer - dead[v.phi], dead[v.next], dead[v.load], dead[v.store] = true, true, true, true - } - } - - // Replace all uses of i_next and phi with i_outer. - // Referrers have not been built for fn yet so only update Instruction operands. - // We need only look within the blocks added by the loop. - var operands []*Value // recycle storage - for _, b := range fn.Blocks[startingBlocks:] { - for _, instr := range b.Instrs { - operands = instr.Operands(operands[:0]) - for _, ptr := range operands { - k := *ptr - if v := replace[k]; v != nil { - *ptr = v - } - } - } - } - - // Remove instructions for phi, load, and store. - // lift() will remove the unused i_next *Alloc. - isDead := func(i Instruction) bool { return dead[i] } - loop.Instrs = removeInstrsIf(loop.Instrs, isDead) - post.Instrs = removeInstrsIf(post.Instrs, isDead) - } -} - -// rangeIndexed emits to fn the header for an integer-indexed loop -// over array, *array or slice value x. -// The v result is defined only if tv is non-nil. -// forPos is the position of the "for" token. -func (b *builder) rangeIndexed(fn *Function, x Value, tv types.Type, pos token.Pos) (k, v Value, loop, done *BasicBlock) { - // - // length = len(x) - // index = -1 - // loop: (target of continue) - // index++ - // if index < length goto body else done - // body: - // k = index - // v = x[index] - // ...body... - // jump loop - // done: (target of break) - - // Determine number of iterations. - var length Value - dt, _ := deref(x.Type()) - if arr, ok := typeparams.CoreType(dt).(*types.Array); ok { - // For array or *array, the number of iterations is - // known statically thanks to the type. We avoid a - // data dependence upon x, permitting later dead-code - // elimination if x is pure, static unrolling, etc. - // Ranging over a nil *array may have >0 iterations. - // We still generate code for x, in case it has effects. - length = intConst(arr.Len()) - } else { - // length = len(x). - var c Call - c.Call.Value = makeLen(x.Type()) - c.Call.Args = []Value{x} - c.setType(tInt) - length = fn.emit(&c) - } - - index := emitLocal(fn, tInt, token.NoPos, "rangeindex") - emitStore(fn, index, intConst(-1), pos) - - loop = fn.newBasicBlock("rangeindex.loop") - emitJump(fn, loop) - fn.currentBlock = loop - - incr := &BinOp{ - Op: token.ADD, - X: emitLoad(fn, index), - Y: vOne, - } - incr.setType(tInt) - emitStore(fn, index, fn.emit(incr), pos) - - body := fn.newBasicBlock("rangeindex.body") - done = fn.newBasicBlock("rangeindex.done") - emitIf(fn, emitCompare(fn, token.LSS, incr, length, token.NoPos), body, done) - fn.currentBlock = body - - k = emitLoad(fn, index) - if tv != nil { - switch t := typeparams.CoreType(x.Type()).(type) { - case *types.Array: - instr := &Index{ - X: x, - Index: k, - } - instr.setType(t.Elem()) - instr.setPos(x.Pos()) - v = fn.emit(instr) - - case *types.Pointer: // *array - instr := &IndexAddr{ - X: x, - Index: k, - } - instr.setType(types.NewPointer(t.Elem().Underlying().(*types.Array).Elem())) - instr.setPos(x.Pos()) - v = emitLoad(fn, fn.emit(instr)) - - case *types.Slice: - instr := &IndexAddr{ - X: x, - Index: k, - } - instr.setType(types.NewPointer(t.Elem())) - instr.setPos(x.Pos()) - v = emitLoad(fn, fn.emit(instr)) - - default: - panic("rangeIndexed x:" + t.String()) - } - } - return -} - -// rangeIter emits to fn the header for a loop using -// Range/Next/Extract to iterate over map or string value x. -// tk and tv are the types of the key/value results k and v, or nil -// if the respective component is not wanted. -func (b *builder) rangeIter(fn *Function, x Value, tk, tv types.Type, pos token.Pos) (k, v Value, loop, done *BasicBlock) { - // - // it = range x - // loop: (target of continue) - // okv = next it (ok, key, value) - // ok = extract okv #0 - // if ok goto body else done - // body: - // k = extract okv #1 - // v = extract okv #2 - // ...body... - // jump loop - // done: (target of break) - // - - if tk == nil { - tk = tInvalid - } - if tv == nil { - tv = tInvalid - } - - rng := &Range{X: x} - rng.setPos(pos) - rng.setType(tRangeIter) - it := fn.emit(rng) - - loop = fn.newBasicBlock("rangeiter.loop") - emitJump(fn, loop) - fn.currentBlock = loop - - okv := &Next{ - Iter: it, - IsString: isBasic(typeparams.CoreType(x.Type())), - } - okv.setType(types.NewTuple( - varOk, - newVar("k", tk), - newVar("v", tv), - )) - fn.emit(okv) - - body := fn.newBasicBlock("rangeiter.body") - done = fn.newBasicBlock("rangeiter.done") - emitIf(fn, emitExtract(fn, okv, 0), body, done) - fn.currentBlock = body - - if tk != tInvalid { - k = emitExtract(fn, okv, 1) - } - if tv != tInvalid { - v = emitExtract(fn, okv, 2) - } - return -} - -// rangeChan emits to fn the header for a loop that receives from -// channel x until it fails. -// tk is the channel's element type, or nil if the k result is -// not wanted -// pos is the position of the '=' or ':=' token. -func (b *builder) rangeChan(fn *Function, x Value, tk types.Type, pos token.Pos) (k Value, loop, done *BasicBlock) { - // - // loop: (target of continue) - // ko = <-x (key, ok) - // ok = extract ko #1 - // if ok goto body else done - // body: - // k = extract ko #0 - // ...body... - // goto loop - // done: (target of break) - - loop = fn.newBasicBlock("rangechan.loop") - emitJump(fn, loop) - fn.currentBlock = loop - recv := &UnOp{ - Op: token.ARROW, - X: x, - CommaOk: true, - } - recv.setPos(pos) - recv.setType(types.NewTuple( - newVar("k", typeparams.CoreType(x.Type()).(*types.Chan).Elem()), - varOk, - )) - ko := fn.emit(recv) - body := fn.newBasicBlock("rangechan.body") - done = fn.newBasicBlock("rangechan.done") - emitIf(fn, emitExtract(fn, ko, 1), body, done) - fn.currentBlock = body - if tk != nil { - k = emitExtract(fn, ko, 0) - } - return -} - -// rangeInt emits to fn the header for a range loop with an integer operand. -// tk is the key value's type, or nil if the k result is not wanted. -// pos is the position of the "for" token. -func (b *builder) rangeInt(fn *Function, x Value, tk types.Type, pos token.Pos) (k Value, loop, done *BasicBlock) { - // - // iter = 0 - // if 0 < x goto body else done - // loop: (target of continue) - // iter++ - // if iter < x goto body else done - // body: - // k = x - // ...body... - // jump loop - // done: (target of break) - - if isUntyped(x.Type()) { - x = emitConv(fn, x, tInt) - } - - T := x.Type() - iter := emitLocal(fn, T, token.NoPos, "rangeint.iter") - // x may be unsigned. Avoid initializing x to -1. - - body := fn.newBasicBlock("rangeint.body") - done = fn.newBasicBlock("rangeint.done") - emitIf(fn, emitCompare(fn, token.LSS, zeroConst(T), x, token.NoPos), body, done) - - loop = fn.newBasicBlock("rangeint.loop") - fn.currentBlock = loop - - incr := &BinOp{ - Op: token.ADD, - X: emitLoad(fn, iter), - Y: emitConv(fn, vOne, T), - } - incr.setType(T) - emitStore(fn, iter, fn.emit(incr), pos) - emitIf(fn, emitCompare(fn, token.LSS, incr, x, token.NoPos), body, done) - fn.currentBlock = body - - if tk != nil { - // Integer types (int, uint8, etc.) are named and - // we know that k is assignable to x when tk != nil. - // This implies tk and T are identical so no conversion is needed. - k = emitLoad(fn, iter) - } - - return -} - -// rangeStmt emits to fn code for the range statement s, optionally -// labelled by label. -func (b *builder) rangeStmt(fn *Function, s *ast.RangeStmt, label *lblock) { - var tk, tv types.Type - if s.Key != nil && !isBlankIdent(s.Key) { - tk = fn.typeOf(s.Key) - } - if s.Value != nil && !isBlankIdent(s.Value) { - tv = fn.typeOf(s.Value) - } - - // create locals for s.Key and s.Value. - createVars := func() { - // Unlike a short variable declaration, a RangeStmt - // using := never redeclares an existing variable; it - // always creates a new one. - if tk != nil { - emitLocalVar(fn, identVar(fn, s.Key.(*ast.Ident))) - } - if tv != nil { - emitLocalVar(fn, identVar(fn, s.Value.(*ast.Ident))) - } - } - - afterGo122 := versions.AtLeast(fn.goversion, versions.Go1_22) - if s.Tok == token.DEFINE && !afterGo122 { - // pre-go1.22: If iteration variables are defined (:=), this - // occurs once outside the loop. - createVars() - } - - x := b.expr(fn, s.X) - - var k, v Value - var loop, done *BasicBlock - switch rt := typeparams.CoreType(x.Type()).(type) { - case *types.Slice, *types.Array, *types.Pointer: // *array - k, v, loop, done = b.rangeIndexed(fn, x, tv, s.For) - - case *types.Chan: - k, loop, done = b.rangeChan(fn, x, tk, s.For) - - case *types.Map: - k, v, loop, done = b.rangeIter(fn, x, tk, tv, s.For) - - case *types.Basic: - switch { - case rt.Info()&types.IsString != 0: - k, v, loop, done = b.rangeIter(fn, x, tk, tv, s.For) - - case rt.Info()&types.IsInteger != 0: - k, loop, done = b.rangeInt(fn, x, tk, s.For) - - default: - panic("Cannot range over basic type: " + rt.String()) - } - - default: - panic("Cannot range over: " + rt.String()) - } - - if s.Tok == token.DEFINE && afterGo122 { - // go1.22: If iteration variables are defined (:=), this occurs inside the loop. - createVars() - } - - // Evaluate both LHS expressions before we update either. - var kl, vl lvalue - if tk != nil { - kl = b.addr(fn, s.Key, false) // non-escaping - } - if tv != nil { - vl = b.addr(fn, s.Value, false) // non-escaping - } - if tk != nil { - kl.store(fn, k) - } - if tv != nil { - vl.store(fn, v) - } - - if label != nil { - label._break = done - label._continue = loop - } - - fn.targets = &targets{ - tail: fn.targets, - _break: done, - _continue: loop, - } - b.stmt(fn, s.Body) - fn.targets = fn.targets.tail - emitJump(fn, loop) // back-edge - fn.currentBlock = done -} - -// stmt lowers statement s to SSA form, emitting code to fn. -func (b *builder) stmt(fn *Function, _s ast.Stmt) { - // The label of the current statement. If non-nil, its _goto - // target is always set; its _break and _continue are set only - // within the body of switch/typeswitch/select/for/range. - // It is effectively an additional default-nil parameter of stmt(). - var label *lblock -start: - switch s := _s.(type) { - case *ast.EmptyStmt: - // ignore. (Usually removed by gofmt.) - - case *ast.DeclStmt: // Con, Var or Typ - d := s.Decl.(*ast.GenDecl) - if d.Tok == token.VAR { - for _, spec := range d.Specs { - if vs, ok := spec.(*ast.ValueSpec); ok { - b.localValueSpec(fn, vs) - } - } - } - - case *ast.LabeledStmt: - label = fn.labelledBlock(s.Label) - emitJump(fn, label._goto) - fn.currentBlock = label._goto - _s = s.Stmt - goto start // effectively: tailcall stmt(fn, s.Stmt, label) - - case *ast.ExprStmt: - b.expr(fn, s.X) - - case *ast.SendStmt: - chtyp := typeparams.CoreType(fn.typeOf(s.Chan)).(*types.Chan) - fn.emit(&Send{ - Chan: b.expr(fn, s.Chan), - X: emitConv(fn, b.expr(fn, s.Value), chtyp.Elem()), - pos: s.Arrow, - }) - - case *ast.IncDecStmt: - op := token.ADD - if s.Tok == token.DEC { - op = token.SUB - } - loc := b.addr(fn, s.X, false) - b.assignOp(fn, loc, NewConst(constant.MakeInt64(1), loc.typ()), op, s.Pos()) - - case *ast.AssignStmt: - switch s.Tok { - case token.ASSIGN, token.DEFINE: - b.assignStmt(fn, s.Lhs, s.Rhs, s.Tok == token.DEFINE) - - default: // +=, etc. - op := s.Tok + token.ADD - token.ADD_ASSIGN - b.assignOp(fn, b.addr(fn, s.Lhs[0], false), b.expr(fn, s.Rhs[0]), op, s.Pos()) - } - - case *ast.GoStmt: - // The "intrinsics" new/make/len/cap are forbidden here. - // panic is treated like an ordinary function call. - v := Go{pos: s.Go} - b.setCall(fn, s.Call, &v.Call) - fn.emit(&v) - - case *ast.DeferStmt: - // The "intrinsics" new/make/len/cap are forbidden here. - // panic is treated like an ordinary function call. - v := Defer{pos: s.Defer} - b.setCall(fn, s.Call, &v.Call) - fn.emit(&v) - - // A deferred call can cause recovery from panic, - // and control resumes at the Recover block. - createRecoverBlock(fn) - - case *ast.ReturnStmt: - var results []Value - if len(s.Results) == 1 && fn.Signature.Results().Len() > 1 { - // Return of one expression in a multi-valued function. - tuple := b.exprN(fn, s.Results[0]) - ttuple := tuple.Type().(*types.Tuple) - for i, n := 0, ttuple.Len(); i < n; i++ { - results = append(results, - emitConv(fn, emitExtract(fn, tuple, i), - fn.Signature.Results().At(i).Type())) - } - } else { - // 1:1 return, or no-arg return in non-void function. - for i, r := range s.Results { - v := emitConv(fn, b.expr(fn, r), fn.Signature.Results().At(i).Type()) - results = append(results, v) - } - } - if fn.namedResults != nil { - // Function has named result parameters (NRPs). - // Perform parallel assignment of return operands to NRPs. - for i, r := range results { - emitStore(fn, fn.namedResults[i], r, s.Return) - } - } - // Run function calls deferred in this - // function when explicitly returning from it. - fn.emit(new(RunDefers)) - if fn.namedResults != nil { - // Reload NRPs to form the result tuple. - results = results[:0] - for _, r := range fn.namedResults { - results = append(results, emitLoad(fn, r)) - } - } - fn.emit(&Return{Results: results, pos: s.Return}) - fn.currentBlock = fn.newBasicBlock("unreachable") - - case *ast.BranchStmt: - var block *BasicBlock - switch s.Tok { - case token.BREAK: - if s.Label != nil { - block = fn.labelledBlock(s.Label)._break - } else { - for t := fn.targets; t != nil && block == nil; t = t.tail { - block = t._break - } - } - - case token.CONTINUE: - if s.Label != nil { - block = fn.labelledBlock(s.Label)._continue - } else { - for t := fn.targets; t != nil && block == nil; t = t.tail { - block = t._continue - } - } - - case token.FALLTHROUGH: - for t := fn.targets; t != nil && block == nil; t = t.tail { - block = t._fallthrough - } - - case token.GOTO: - block = fn.labelledBlock(s.Label)._goto - } - emitJump(fn, block) - fn.currentBlock = fn.newBasicBlock("unreachable") - - case *ast.BlockStmt: - b.stmtList(fn, s.List) - - case *ast.IfStmt: - if s.Init != nil { - b.stmt(fn, s.Init) - } - then := fn.newBasicBlock("if.then") - done := fn.newBasicBlock("if.done") - els := done - if s.Else != nil { - els = fn.newBasicBlock("if.else") - } - b.cond(fn, s.Cond, then, els) - fn.currentBlock = then - b.stmt(fn, s.Body) - emitJump(fn, done) - - if s.Else != nil { - fn.currentBlock = els - b.stmt(fn, s.Else) - emitJump(fn, done) - } - - fn.currentBlock = done - - case *ast.SwitchStmt: - b.switchStmt(fn, s, label) - - case *ast.TypeSwitchStmt: - b.typeSwitchStmt(fn, s, label) - - case *ast.SelectStmt: - b.selectStmt(fn, s, label) - - case *ast.ForStmt: - b.forStmt(fn, s, label) - - case *ast.RangeStmt: - b.rangeStmt(fn, s, label) - - default: - panic(fmt.Sprintf("unexpected statement kind: %T", s)) - } -} - -// A buildFunc is a strategy for building the SSA body for a function. -type buildFunc = func(*builder, *Function) - -// iterate causes all created but unbuilt functions to be built. As -// this may create new methods, the process is iterated until it -// converges. -func (b *builder) iterate() { - for ; b.finished < b.created.Len(); b.finished++ { - fn := b.created.At(b.finished) - b.buildFunction(fn) - } -} - -// buildFunction builds SSA code for the body of function fn. Idempotent. -func (b *builder) buildFunction(fn *Function) { - if fn.build != nil { - assert(fn.parent == nil, "anonymous functions should not be built by buildFunction()") - - if fn.Prog.mode&LogSource != 0 { - defer logStack("build %s @ %s", fn, fn.Prog.Fset.Position(fn.pos))() - } - fn.build(b, fn) - fn.done() - } -} - -// buildParamsOnly builds fn.Params from fn.Signature, but does not build fn.Body. -func (b *builder) buildParamsOnly(fn *Function) { - // For external (C, asm) functions or functions loaded from - // export data, we must set fn.Params even though there is no - // body code to reference them. - if recv := fn.Signature.Recv(); recv != nil { - fn.addParamVar(recv) - } - params := fn.Signature.Params() - for i, n := 0, params.Len(); i < n; i++ { - fn.addParamVar(params.At(i)) - } -} - -// buildFromSyntax builds fn.Body from fn.syntax, which must be non-nil. -func (b *builder) buildFromSyntax(fn *Function) { - var ( - recvField *ast.FieldList - body *ast.BlockStmt - functype *ast.FuncType - ) - switch syntax := fn.syntax.(type) { - case *ast.FuncDecl: - functype = syntax.Type - recvField = syntax.Recv - body = syntax.Body - if body == nil { - b.buildParamsOnly(fn) // no body (non-Go function) - return - } - case *ast.FuncLit: - functype = syntax.Type - body = syntax.Body - case nil: - panic("no syntax") - default: - panic(syntax) // unexpected syntax - } - - fn.startBody() - fn.createSyntacticParams(recvField, functype) - b.stmt(fn, body) - if cb := fn.currentBlock; cb != nil && (cb == fn.Blocks[0] || cb == fn.Recover || cb.Preds != nil) { - // Control fell off the end of the function's body block. - // - // Block optimizations eliminate the current block, if - // unreachable. It is a builder invariant that - // if this no-arg return is ill-typed for - // fn.Signature.Results, this block must be - // unreachable. The sanity checker checks this. - fn.emit(new(RunDefers)) - fn.emit(new(Return)) - } - fn.finishBody() -} - -// addRuntimeType records t as a runtime type, -// along with all types derivable from it using reflection. -// -// Acquires prog.runtimeTypesMu. -func addRuntimeType(prog *Program, t types.Type) { - prog.runtimeTypesMu.Lock() - defer prog.runtimeTypesMu.Unlock() - forEachReachable(&prog.MethodSets, t, func(t types.Type) bool { - prev, _ := prog.runtimeTypes.Set(t, true).(bool) - return !prev // already seen? - }) -} - -// Build calls Package.Build for each package in prog. -// Building occurs in parallel unless the BuildSerially mode flag was set. -// -// Build is intended for whole-program analysis; a typical compiler -// need only build a single package. -// -// Build is idempotent and thread-safe. -func (prog *Program) Build() { - var wg sync.WaitGroup - for _, p := range prog.packages { - if prog.mode&BuildSerially != 0 { - p.Build() - } else { - wg.Add(1) - go func(p *Package) { - p.Build() - wg.Done() - }(p) - } - } - wg.Wait() -} - -// Build builds SSA code for all functions and vars in package p. -// -// CreatePackage must have been called for all of p's direct imports -// (and hence its direct imports must have been error-free). It is not -// necessary to call CreatePackage for indirect dependencies. -// Functions will be created for all necessary methods in those -// packages on demand. -// -// Build is idempotent and thread-safe. -func (p *Package) Build() { p.buildOnce.Do(p.build) } - -func (p *Package) build() { - if p.info == nil { - return // synthetic package, e.g. "testmain" - } - if p.Prog.mode&LogSource != 0 { - defer logStack("build %s", p)() - } - - b := builder{created: &p.created} - b.iterate() - - // We no longer need transient information: ASTs or go/types deductions. - p.info = nil - p.created = nil - p.files = nil - p.initVersion = nil - - if p.Prog.mode&SanityCheckFunctions != 0 { - sanityCheckPackage(p) - } -} - -// buildPackageInit builds fn.Body for the synthetic package initializer. -func (b *builder) buildPackageInit(fn *Function) { - p := fn.Pkg - fn.startBody() - - var done *BasicBlock - - if p.Prog.mode&BareInits == 0 { - // Make init() skip if package is already initialized. - initguard := p.Var("init$guard") - doinit := fn.newBasicBlock("init.start") - done = fn.newBasicBlock("init.done") - emitIf(fn, emitLoad(fn, initguard), done, doinit) - fn.currentBlock = doinit - emitStore(fn, initguard, vTrue, token.NoPos) - - // Call the init() function of each package we import. - for _, pkg := range p.Pkg.Imports() { - prereq := p.Prog.packages[pkg] - if prereq == nil { - panic(fmt.Sprintf("Package(%q).Build(): unsatisfied import: Program.CreatePackage(%q) was not called", p.Pkg.Path(), pkg.Path())) - } - var v Call - v.Call.Value = prereq.init - v.Call.pos = fn.pos - v.setType(types.NewTuple()) - fn.emit(&v) - } - } - - // Initialize package-level vars in correct order. - if len(p.info.InitOrder) > 0 && len(p.files) == 0 { - panic("no source files provided for package. cannot initialize globals") - } - - for _, varinit := range p.info.InitOrder { - if fn.Prog.mode&LogSource != 0 { - fmt.Fprintf(os.Stderr, "build global initializer %v @ %s\n", - varinit.Lhs, p.Prog.Fset.Position(varinit.Rhs.Pos())) - } - // Initializers for global vars are evaluated in dependency - // order, but may come from arbitrary files of the package - // with different versions, so we transiently update - // fn.goversion for each one. (Since init is a synthetic - // function it has no syntax of its own that needs a version.) - fn.goversion = p.initVersion[varinit.Rhs] - if len(varinit.Lhs) == 1 { - // 1:1 initialization: var x, y = a(), b() - var lval lvalue - if v := varinit.Lhs[0]; v.Name() != "_" { - lval = &address{addr: p.objects[v].(*Global), pos: v.Pos()} - } else { - lval = blank{} - } - b.assign(fn, lval, varinit.Rhs, true, nil) - } else { - // n:1 initialization: var x, y := f() - tuple := b.exprN(fn, varinit.Rhs) - for i, v := range varinit.Lhs { - if v.Name() == "_" { - continue - } - emitStore(fn, p.objects[v].(*Global), emitExtract(fn, tuple, i), v.Pos()) - } - } - } - - // The rest of the init function is synthetic: - // no syntax, info, goversion. - fn.info = nil - fn.goversion = "" - - // Call all of the declared init() functions in source order. - for _, file := range p.files { - for _, decl := range file.Decls { - if decl, ok := decl.(*ast.FuncDecl); ok { - id := decl.Name - if !isBlankIdent(id) && id.Name == "init" && decl.Recv == nil { - declaredInit := p.objects[p.info.Defs[id]].(*Function) - var v Call - v.Call.Value = declaredInit - v.setType(types.NewTuple()) - p.init.emit(&v) - } - } - } - } - - // Finish up init(). - if p.Prog.mode&BareInits == 0 { - emitJump(fn, done) - fn.currentBlock = done - } - fn.emit(new(Return)) - fn.finishBody() -} diff --git a/vendor/golang.org/x/tools/go/ssa/const.go b/vendor/golang.org/x/tools/go/ssa/const.go deleted file mode 100644 index e0d79f5ef..000000000 --- a/vendor/golang.org/x/tools/go/ssa/const.go +++ /dev/null @@ -1,232 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package ssa - -// This file defines the Const SSA value type. - -import ( - "fmt" - "go/constant" - "go/token" - "go/types" - "strconv" - "strings" - - "golang.org/x/tools/internal/aliases" - "golang.org/x/tools/internal/typeparams" -) - -// NewConst returns a new constant of the specified value and type. -// val must be valid according to the specification of Const.Value. -func NewConst(val constant.Value, typ types.Type) *Const { - if val == nil { - switch soleTypeKind(typ) { - case types.IsBoolean: - val = constant.MakeBool(false) - case types.IsInteger: - val = constant.MakeInt64(0) - case types.IsString: - val = constant.MakeString("") - } - } - return &Const{typ, val} -} - -// soleTypeKind returns a BasicInfo for which constant.Value can -// represent all zero values for the types in the type set. -// -// types.IsBoolean for false is a representative. -// types.IsInteger for 0 -// types.IsString for "" -// 0 otherwise. -func soleTypeKind(typ types.Type) types.BasicInfo { - // State records the set of possible zero values (false, 0, ""). - // Candidates (perhaps all) are eliminated during the type-set - // iteration, which executes at least once. - state := types.IsBoolean | types.IsInteger | types.IsString - underIs(typeSetOf(typ), func(t types.Type) bool { - var c types.BasicInfo - if t, ok := aliases.Unalias(t).(*types.Basic); ok { - c = t.Info() - } - if c&types.IsNumeric != 0 { // int/float/complex - c = types.IsInteger - } - state = state & c - return state != 0 - }) - return state -} - -// intConst returns an 'int' constant that evaluates to i. -// (i is an int64 in case the host is narrower than the target.) -func intConst(i int64) *Const { - return NewConst(constant.MakeInt64(i), tInt) -} - -// stringConst returns a 'string' constant that evaluates to s. -func stringConst(s string) *Const { - return NewConst(constant.MakeString(s), tString) -} - -// zeroConst returns a new "zero" constant of the specified type. -func zeroConst(t types.Type) *Const { - return NewConst(nil, t) -} - -func (c *Const) RelString(from *types.Package) string { - var s string - if c.Value == nil { - s = zeroString(c.typ, from) - } else if c.Value.Kind() == constant.String { - s = constant.StringVal(c.Value) - const max = 20 - // TODO(adonovan): don't cut a rune in half. - if len(s) > max { - s = s[:max-3] + "..." // abbreviate - } - s = strconv.Quote(s) - } else { - s = c.Value.String() - } - return s + ":" + relType(c.Type(), from) -} - -// zeroString returns the string representation of the "zero" value of the type t. -func zeroString(t types.Type, from *types.Package) string { - switch t := t.(type) { - case *types.Basic: - switch { - case t.Info()&types.IsBoolean != 0: - return "false" - case t.Info()&types.IsNumeric != 0: - return "0" - case t.Info()&types.IsString != 0: - return `""` - case t.Kind() == types.UnsafePointer: - fallthrough - case t.Kind() == types.UntypedNil: - return "nil" - default: - panic(fmt.Sprint("zeroString for unexpected type:", t)) - } - case *types.Pointer, *types.Slice, *types.Interface, *types.Chan, *types.Map, *types.Signature: - return "nil" - case *types.Named, *aliases.Alias: - return zeroString(t.Underlying(), from) - case *types.Array, *types.Struct: - return relType(t, from) + "{}" - case *types.Tuple: - // Tuples are not normal values. - // We are currently format as "(t[0], ..., t[n])". Could be something else. - components := make([]string, t.Len()) - for i := 0; i < t.Len(); i++ { - components[i] = zeroString(t.At(i).Type(), from) - } - return "(" + strings.Join(components, ", ") + ")" - case *types.TypeParam: - return "*new(" + relType(t, from) + ")" - } - panic(fmt.Sprint("zeroString: unexpected ", t)) -} - -func (c *Const) Name() string { - return c.RelString(nil) -} - -func (c *Const) String() string { - return c.Name() -} - -func (c *Const) Type() types.Type { - return c.typ -} - -func (c *Const) Referrers() *[]Instruction { - return nil -} - -func (c *Const) Parent() *Function { return nil } - -func (c *Const) Pos() token.Pos { - return token.NoPos -} - -// IsNil returns true if this constant is a nil value of -// a nillable reference type (pointer, slice, channel, map, or function), -// a basic interface type, or -// a type parameter all of whose possible instantiations are themselves nillable. -func (c *Const) IsNil() bool { - return c.Value == nil && nillable(c.typ) -} - -// nillable reports whether *new(T) == nil is legal for type T. -func nillable(t types.Type) bool { - if typeparams.IsTypeParam(t) { - return underIs(typeSetOf(t), func(u types.Type) bool { - // empty type set (u==nil) => any underlying types => not nillable - return u != nil && nillable(u) - }) - } - switch t.Underlying().(type) { - case *types.Pointer, *types.Slice, *types.Chan, *types.Map, *types.Signature: - return true - case *types.Interface: - return true // basic interface. - default: - return false - } -} - -// TODO(adonovan): move everything below into golang.org/x/tools/go/ssa/interp. - -// Int64 returns the numeric value of this constant truncated to fit -// a signed 64-bit integer. -func (c *Const) Int64() int64 { - switch x := constant.ToInt(c.Value); x.Kind() { - case constant.Int: - if i, ok := constant.Int64Val(x); ok { - return i - } - return 0 - case constant.Float: - f, _ := constant.Float64Val(x) - return int64(f) - } - panic(fmt.Sprintf("unexpected constant value: %T", c.Value)) -} - -// Uint64 returns the numeric value of this constant truncated to fit -// an unsigned 64-bit integer. -func (c *Const) Uint64() uint64 { - switch x := constant.ToInt(c.Value); x.Kind() { - case constant.Int: - if u, ok := constant.Uint64Val(x); ok { - return u - } - return 0 - case constant.Float: - f, _ := constant.Float64Val(x) - return uint64(f) - } - panic(fmt.Sprintf("unexpected constant value: %T", c.Value)) -} - -// Float64 returns the numeric value of this constant truncated to fit -// a float64. -func (c *Const) Float64() float64 { - x := constant.ToFloat(c.Value) // (c.Value == nil) => x.Kind() == Unknown - f, _ := constant.Float64Val(x) - return f -} - -// Complex128 returns the complex value of this constant truncated to -// fit a complex128. -func (c *Const) Complex128() complex128 { - x := constant.ToComplex(c.Value) // (c.Value == nil) => x.Kind() == Unknown - re, _ := constant.Float64Val(constant.Real(x)) - im, _ := constant.Float64Val(constant.Imag(x)) - return complex(re, im) -} diff --git a/vendor/golang.org/x/tools/go/ssa/coretype.go b/vendor/golang.org/x/tools/go/ssa/coretype.go deleted file mode 100644 index 3a512830b..000000000 --- a/vendor/golang.org/x/tools/go/ssa/coretype.go +++ /dev/null @@ -1,160 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package ssa - -import ( - "go/types" - - "golang.org/x/tools/internal/aliases" - "golang.org/x/tools/internal/typeparams" -) - -// Utilities for dealing with core types. - -// isBytestring returns true if T has the same terms as interface{[]byte | string}. -// These act like a core type for some operations: slice expressions, append and copy. -// -// See https://go.dev/ref/spec#Core_types for the details on bytestring. -func isBytestring(T types.Type) bool { - U := T.Underlying() - if _, ok := U.(*types.Interface); !ok { - return false - } - - tset := typeSetOf(U) - if tset.Len() != 2 { - return false - } - hasBytes, hasString := false, false - underIs(tset, func(t types.Type) bool { - switch { - case isString(t): - hasString = true - case isByteSlice(t): - hasBytes = true - } - return hasBytes || hasString - }) - return hasBytes && hasString -} - -// termList is a list of types. -type termList []*types.Term // type terms of the type set -func (s termList) Len() int { return len(s) } -func (s termList) At(i int) types.Type { return s[i].Type() } - -// typeSetOf returns the type set of typ. Returns an empty typeset on an error. -func typeSetOf(typ types.Type) termList { - // This is a adaptation of x/exp/typeparams.NormalTerms which x/tools cannot depend on. - var terms []*types.Term - var err error - switch typ := aliases.Unalias(typ).(type) { - case *types.TypeParam: - terms, err = typeparams.StructuralTerms(typ) - case *types.Union: - terms, err = typeparams.UnionTermSet(typ) - case *types.Interface: - terms, err = typeparams.InterfaceTermSet(typ) - default: - // Common case. - // Specializing the len=1 case to avoid a slice - // had no measurable space/time benefit. - terms = []*types.Term{types.NewTerm(false, typ)} - } - - if err != nil { - return termList(nil) - } - return termList(terms) -} - -// underIs calls f with the underlying types of the specific type terms -// of s and reports whether all calls to f returned true. If there are -// no specific terms, underIs returns the result of f(nil). -func underIs(s termList, f func(types.Type) bool) bool { - if s.Len() == 0 { - return f(nil) - } - for i := 0; i < s.Len(); i++ { - u := s.At(i).Underlying() - if !f(u) { - return false - } - } - return true -} - -// indexType returns the element type and index mode of a IndexExpr over a type. -// It returns (nil, invalid) if the type is not indexable; this should never occur in a well-typed program. -func indexType(typ types.Type) (types.Type, indexMode) { - switch U := typ.Underlying().(type) { - case *types.Array: - return U.Elem(), ixArrVar - case *types.Pointer: - if arr, ok := U.Elem().Underlying().(*types.Array); ok { - return arr.Elem(), ixVar - } - case *types.Slice: - return U.Elem(), ixVar - case *types.Map: - return U.Elem(), ixMap - case *types.Basic: - return tByte, ixValue // must be a string - case *types.Interface: - tset := typeSetOf(U) - if tset.Len() == 0 { - return nil, ixInvalid // no underlying terms or error is empty. - } - - elem, mode := indexType(tset.At(0)) - for i := 1; i < tset.Len() && mode != ixInvalid; i++ { - e, m := indexType(tset.At(i)) - if !types.Identical(elem, e) { // if type checked, just a sanity check - return nil, ixInvalid - } - // Update the mode to the most constrained address type. - mode = mode.meet(m) - } - if mode != ixInvalid { - return elem, mode - } - } - return nil, ixInvalid -} - -// An indexMode specifies the (addressing) mode of an index operand. -// -// Addressing mode of an index operation is based on the set of -// underlying types. -// Hasse diagram of the indexMode meet semi-lattice: -// -// ixVar ixMap -// | | -// ixArrVar | -// | | -// ixValue | -// \ / -// ixInvalid -type indexMode byte - -const ( - ixInvalid indexMode = iota // index is invalid - ixValue // index is a computed value (not addressable) - ixArrVar // like ixVar, but index operand contains an array - ixVar // index is an addressable variable - ixMap // index is a map index expression (acts like a variable on lhs, commaok on rhs of an assignment) -) - -// meet is the address type that is constrained by both x and y. -func (x indexMode) meet(y indexMode) indexMode { - if (x == ixMap || y == ixMap) && x != y { - return ixInvalid - } - // Use int representation and return min. - if x < y { - return y - } - return x -} diff --git a/vendor/golang.org/x/tools/go/ssa/create.go b/vendor/golang.org/x/tools/go/ssa/create.go deleted file mode 100644 index f8f584a1a..000000000 --- a/vendor/golang.org/x/tools/go/ssa/create.go +++ /dev/null @@ -1,332 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package ssa - -// This file implements the CREATE phase of SSA construction. -// See builder.go for explanation. - -import ( - "fmt" - "go/ast" - "go/token" - "go/types" - "os" - "sync" - - "golang.org/x/tools/internal/versions" -) - -// NewProgram returns a new SSA Program. -// -// mode controls diagnostics and checking during SSA construction. -// -// To construct an SSA program: -// -// - Call NewProgram to create an empty Program. -// - Call CreatePackage providing typed syntax for each package -// you want to build, and call it with types but not -// syntax for each of those package's direct dependencies. -// - Call [Package.Build] on each syntax package you wish to build, -// or [Program.Build] to build all of them. -// -// See the Example tests for simple examples. -func NewProgram(fset *token.FileSet, mode BuilderMode) *Program { - return &Program{ - Fset: fset, - imported: make(map[string]*Package), - packages: make(map[*types.Package]*Package), - mode: mode, - canon: newCanonizer(), - ctxt: types.NewContext(), - parameterized: tpWalker{seen: make(map[types.Type]bool)}, - } -} - -// memberFromObject populates package pkg with a member for the -// typechecker object obj. -// -// For objects from Go source code, syntax is the associated syntax -// tree (for funcs and vars only) and goversion defines the -// appropriate interpretation; they will be used during the build -// phase. -func memberFromObject(pkg *Package, obj types.Object, syntax ast.Node, goversion string) { - name := obj.Name() - switch obj := obj.(type) { - case *types.Builtin: - if pkg.Pkg != types.Unsafe { - panic("unexpected builtin object: " + obj.String()) - } - - case *types.TypeName: - if name != "_" { - pkg.Members[name] = &Type{ - object: obj, - pkg: pkg, - } - } - - case *types.Const: - c := &NamedConst{ - object: obj, - Value: NewConst(obj.Val(), obj.Type()), - pkg: pkg, - } - pkg.objects[obj] = c - if name != "_" { - pkg.Members[name] = c - } - - case *types.Var: - g := &Global{ - Pkg: pkg, - name: name, - object: obj, - typ: types.NewPointer(obj.Type()), // address - pos: obj.Pos(), - } - pkg.objects[obj] = g - if name != "_" { - pkg.Members[name] = g - } - - case *types.Func: - sig := obj.Type().(*types.Signature) - if sig.Recv() == nil && name == "init" { - pkg.ninit++ - name = fmt.Sprintf("init#%d", pkg.ninit) - } - fn := createFunction(pkg.Prog, obj, name, syntax, pkg.info, goversion, &pkg.created) - fn.Pkg = pkg - pkg.objects[obj] = fn - if name != "_" && sig.Recv() == nil { - pkg.Members[name] = fn // package-level function - } - - default: // (incl. *types.Package) - panic("unexpected Object type: " + obj.String()) - } -} - -// createFunction creates a function or method. It supports both -// CreatePackage (with or without syntax) and the on-demand creation -// of methods in non-created packages based on their types.Func. -func createFunction(prog *Program, obj *types.Func, name string, syntax ast.Node, info *types.Info, goversion string, cr *creator) *Function { - sig := obj.Type().(*types.Signature) - - // Collect type parameters. - var tparams *types.TypeParamList - if rtparams := sig.RecvTypeParams(); rtparams.Len() > 0 { - tparams = rtparams // method of generic type - } else if sigparams := sig.TypeParams(); sigparams.Len() > 0 { - tparams = sigparams // generic function - } - - /* declared function/method (from syntax or export data) */ - fn := &Function{ - name: name, - object: obj, - Signature: sig, - build: (*builder).buildFromSyntax, - syntax: syntax, - info: info, - goversion: goversion, - pos: obj.Pos(), - Pkg: nil, // may be set by caller - Prog: prog, - typeparams: tparams, - } - if fn.syntax == nil { - fn.Synthetic = "from type information" - fn.build = (*builder).buildParamsOnly - } - if tparams.Len() > 0 { - fn.generic = new(generic) - } - cr.Add(fn) - return fn -} - -// membersFromDecl populates package pkg with members for each -// typechecker object (var, func, const or type) associated with the -// specified decl. -func membersFromDecl(pkg *Package, decl ast.Decl, goversion string) { - switch decl := decl.(type) { - case *ast.GenDecl: // import, const, type or var - switch decl.Tok { - case token.CONST: - for _, spec := range decl.Specs { - for _, id := range spec.(*ast.ValueSpec).Names { - memberFromObject(pkg, pkg.info.Defs[id], nil, "") - } - } - - case token.VAR: - for _, spec := range decl.Specs { - for _, rhs := range spec.(*ast.ValueSpec).Values { - pkg.initVersion[rhs] = goversion - } - for _, id := range spec.(*ast.ValueSpec).Names { - memberFromObject(pkg, pkg.info.Defs[id], spec, goversion) - } - } - - case token.TYPE: - for _, spec := range decl.Specs { - id := spec.(*ast.TypeSpec).Name - memberFromObject(pkg, pkg.info.Defs[id], nil, "") - } - } - - case *ast.FuncDecl: - id := decl.Name - memberFromObject(pkg, pkg.info.Defs[id], decl, goversion) - } -} - -// creator tracks functions that have finished their CREATE phases. -// -// All Functions belong to the same Program. May have differing packages. -// -// creators are not thread-safe. -type creator []*Function - -func (c *creator) Add(fn *Function) { - *c = append(*c, fn) -} -func (c *creator) At(i int) *Function { return (*c)[i] } -func (c *creator) Len() int { return len(*c) } - -// CreatePackage creates and returns an SSA Package from the -// specified type-checked, error-free file ASTs, and populates its -// Members mapping. -// -// importable determines whether this package should be returned by a -// subsequent call to ImportedPackage(pkg.Path()). -// -// The real work of building SSA form for each function is not done -// until a subsequent call to Package.Build. -// -// CreatePackage should not be called after building any package in -// the program. -func (prog *Program) CreatePackage(pkg *types.Package, files []*ast.File, info *types.Info, importable bool) *Package { - // TODO(adonovan): assert that no package has yet been built. - if pkg == nil { - panic("nil pkg") // otherwise pkg.Scope below returns types.Universe! - } - p := &Package{ - Prog: prog, - Members: make(map[string]Member), - objects: make(map[types.Object]Member), - Pkg: pkg, - syntax: info != nil, - // transient values (cleared after Package.Build) - info: info, - files: files, - initVersion: make(map[ast.Expr]string), - } - - /* synthesized package initializer */ - p.init = &Function{ - name: "init", - Signature: new(types.Signature), - Synthetic: "package initializer", - Pkg: p, - Prog: prog, - build: (*builder).buildPackageInit, - info: p.info, - goversion: "", // See Package.build for details. - } - p.Members[p.init.name] = p.init - p.created.Add(p.init) - - // Allocate all package members: vars, funcs, consts and types. - if len(files) > 0 { - // Go source package. - for _, file := range files { - goversion := versions.Lang(versions.FileVersion(p.info, file)) - for _, decl := range file.Decls { - membersFromDecl(p, decl, goversion) - } - } - } else { - // GC-compiled binary package (or "unsafe") - // No code. - // No position information. - scope := p.Pkg.Scope() - for _, name := range scope.Names() { - obj := scope.Lookup(name) - memberFromObject(p, obj, nil, "") - if obj, ok := obj.(*types.TypeName); ok { - // No Unalias: aliases should not duplicate methods. - if named, ok := obj.Type().(*types.Named); ok { - for i, n := 0, named.NumMethods(); i < n; i++ { - memberFromObject(p, named.Method(i), nil, "") - } - } - } - } - } - - if prog.mode&BareInits == 0 { - // Add initializer guard variable. - initguard := &Global{ - Pkg: p, - name: "init$guard", - typ: types.NewPointer(tBool), - } - p.Members[initguard.Name()] = initguard - } - - if prog.mode&GlobalDebug != 0 { - p.SetDebugMode(true) - } - - if prog.mode&PrintPackages != 0 { - printMu.Lock() - p.WriteTo(os.Stdout) - printMu.Unlock() - } - - if importable { - prog.imported[p.Pkg.Path()] = p - } - prog.packages[p.Pkg] = p - - return p -} - -// printMu serializes printing of Packages/Functions to stdout. -var printMu sync.Mutex - -// AllPackages returns a new slice containing all packages created by -// prog.CreatePackage in in unspecified order. -func (prog *Program) AllPackages() []*Package { - pkgs := make([]*Package, 0, len(prog.packages)) - for _, pkg := range prog.packages { - pkgs = append(pkgs, pkg) - } - return pkgs -} - -// ImportedPackage returns the importable Package whose PkgPath -// is path, or nil if no such Package has been created. -// -// A parameter to CreatePackage determines whether a package should be -// considered importable. For example, no import declaration can resolve -// to the ad-hoc main package created by 'go build foo.go'. -// -// TODO(adonovan): rethink this function and the "importable" concept; -// most packages are importable. This function assumes that all -// types.Package.Path values are unique within the ssa.Program, which is -// false---yet this function remains very convenient. -// Clients should use (*Program).Package instead where possible. -// SSA doesn't really need a string-keyed map of packages. -// -// Furthermore, the graph of packages may contain multiple variants -// (e.g. "p" vs "p as compiled for q.test"), and each has a different -// view of its dependencies. -func (prog *Program) ImportedPackage(path string) *Package { - return prog.imported[path] -} diff --git a/vendor/golang.org/x/tools/go/ssa/doc.go b/vendor/golang.org/x/tools/go/ssa/doc.go deleted file mode 100644 index 3310b5509..000000000 --- a/vendor/golang.org/x/tools/go/ssa/doc.go +++ /dev/null @@ -1,122 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package ssa defines a representation of the elements of Go programs -// (packages, types, functions, variables and constants) using a -// static single-assignment (SSA) form intermediate representation -// (IR) for the bodies of functions. -// -// For an introduction to SSA form, see -// http://en.wikipedia.org/wiki/Static_single_assignment_form. -// This page provides a broader reading list: -// http://www.dcs.gla.ac.uk/~jsinger/ssa.html. -// -// The level of abstraction of the SSA form is intentionally close to -// the source language to facilitate construction of source analysis -// tools. It is not intended for machine code generation. -// -// All looping, branching and switching constructs are replaced with -// unstructured control flow. Higher-level control flow constructs -// such as multi-way branch can be reconstructed as needed; see -// [golang.org/x/tools/go/ssa/ssautil.Switches] for an example. -// -// The simplest way to create the SSA representation of a package is -// to load typed syntax trees using [golang.org/x/tools/go/packages], then -// invoke the [golang.org/x/tools/go/ssa/ssautil.Packages] helper function. -// (See the package-level Examples named LoadPackages and LoadWholeProgram.) -// The resulting [ssa.Program] contains all the packages and their -// members, but SSA code is not created for function bodies until a -// subsequent call to [Package.Build] or [Program.Build]. -// -// The builder initially builds a naive SSA form in which all local -// variables are addresses of stack locations with explicit loads and -// stores. Registerisation of eligible locals and φ-node insertion -// using dominance and dataflow are then performed as a second pass -// called "lifting" to improve the accuracy and performance of -// subsequent analyses; this pass can be skipped by setting the -// NaiveForm builder flag. -// -// The primary interfaces of this package are: -// -// - [Member]: a named member of a Go package. -// - [Value]: an expression that yields a value. -// - [Instruction]: a statement that consumes values and performs computation. -// - [Node]: a [Value] or [Instruction] (emphasizing its membership in the SSA value graph) -// -// A computation that yields a result implements both the [Value] and -// [Instruction] interfaces. The following table shows for each -// concrete type which of these interfaces it implements. -// -// Value? Instruction? Member? -// *Alloc ✔ ✔ -// *BinOp ✔ ✔ -// *Builtin ✔ -// *Call ✔ ✔ -// *ChangeInterface ✔ ✔ -// *ChangeType ✔ ✔ -// *Const ✔ -// *Convert ✔ ✔ -// *DebugRef ✔ -// *Defer ✔ -// *Extract ✔ ✔ -// *Field ✔ ✔ -// *FieldAddr ✔ ✔ -// *FreeVar ✔ -// *Function ✔ ✔ (func) -// *Global ✔ ✔ (var) -// *Go ✔ -// *If ✔ -// *Index ✔ ✔ -// *IndexAddr ✔ ✔ -// *Jump ✔ -// *Lookup ✔ ✔ -// *MakeChan ✔ ✔ -// *MakeClosure ✔ ✔ -// *MakeInterface ✔ ✔ -// *MakeMap ✔ ✔ -// *MakeSlice ✔ ✔ -// *MapUpdate ✔ -// *MultiConvert ✔ ✔ -// *NamedConst ✔ (const) -// *Next ✔ ✔ -// *Panic ✔ -// *Parameter ✔ -// *Phi ✔ ✔ -// *Range ✔ ✔ -// *Return ✔ -// *RunDefers ✔ -// *Select ✔ ✔ -// *Send ✔ -// *Slice ✔ ✔ -// *SliceToArrayPointer ✔ ✔ -// *Store ✔ -// *Type ✔ (type) -// *TypeAssert ✔ ✔ -// *UnOp ✔ ✔ -// -// Other key types in this package include: [Program], [Package], [Function] -// and [BasicBlock]. -// -// The program representation constructed by this package is fully -// resolved internally, i.e. it does not rely on the names of Values, -// Packages, Functions, Types or BasicBlocks for the correct -// interpretation of the program. Only the identities of objects and -// the topology of the SSA and type graphs are semantically -// significant. (There is one exception: [types.Id] values, which identify field -// and method names, contain strings.) Avoidance of name-based -// operations simplifies the implementation of subsequent passes and -// can make them very efficient. Many objects are nonetheless named -// to aid in debugging, but it is not essential that the names be -// either accurate or unambiguous. The public API exposes a number of -// name-based maps for client convenience. -// -// The [golang.org/x/tools/go/ssa/ssautil] package provides various -// helper functions, for example to simplify loading a Go program into -// SSA form. -// -// TODO(adonovan): write a how-to document for all the various cases -// of trying to determine corresponding elements across the four -// domains of source locations, ast.Nodes, types.Objects, -// ssa.Values/Instructions. -package ssa // import "golang.org/x/tools/go/ssa" diff --git a/vendor/golang.org/x/tools/go/ssa/dom.go b/vendor/golang.org/x/tools/go/ssa/dom.go deleted file mode 100644 index 02c1ae83a..000000000 --- a/vendor/golang.org/x/tools/go/ssa/dom.go +++ /dev/null @@ -1,340 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package ssa - -// This file defines algorithms related to dominance. - -// Dominator tree construction ---------------------------------------- -// -// We use the algorithm described in Lengauer & Tarjan. 1979. A fast -// algorithm for finding dominators in a flowgraph. -// http://doi.acm.org/10.1145/357062.357071 -// -// We also apply the optimizations to SLT described in Georgiadis et -// al, Finding Dominators in Practice, JGAA 2006, -// http://jgaa.info/accepted/2006/GeorgiadisTarjanWerneck2006.10.1.pdf -// to avoid the need for buckets of size > 1. - -import ( - "bytes" - "fmt" - "math/big" - "os" - "sort" -) - -// Idom returns the block that immediately dominates b: -// its parent in the dominator tree, if any. -// Neither the entry node (b.Index==0) nor recover node -// (b==b.Parent().Recover()) have a parent. -func (b *BasicBlock) Idom() *BasicBlock { return b.dom.idom } - -// Dominees returns the list of blocks that b immediately dominates: -// its children in the dominator tree. -func (b *BasicBlock) Dominees() []*BasicBlock { return b.dom.children } - -// Dominates reports whether b dominates c. -func (b *BasicBlock) Dominates(c *BasicBlock) bool { - return b.dom.pre <= c.dom.pre && c.dom.post <= b.dom.post -} - -// DomPreorder returns a new slice containing the blocks of f -// in a preorder traversal of the dominator tree. -func (f *Function) DomPreorder() []*BasicBlock { - slice := append([]*BasicBlock(nil), f.Blocks...) - sort.Slice(slice, func(i, j int) bool { - return slice[i].dom.pre < slice[j].dom.pre - }) - return slice -} - -// DomPostorder returns a new slice containing the blocks of f -// in a postorder traversal of the dominator tree. -// (This is not the same as a postdominance order.) -func (f *Function) DomPostorder() []*BasicBlock { - slice := append([]*BasicBlock(nil), f.Blocks...) - sort.Slice(slice, func(i, j int) bool { - return slice[i].dom.post < slice[j].dom.post - }) - return slice -} - -// domInfo contains a BasicBlock's dominance information. -type domInfo struct { - idom *BasicBlock // immediate dominator (parent in domtree) - children []*BasicBlock // nodes immediately dominated by this one - pre, post int32 // pre- and post-order numbering within domtree -} - -// ltState holds the working state for Lengauer-Tarjan algorithm -// (during which domInfo.pre is repurposed for CFG DFS preorder number). -type ltState struct { - // Each slice is indexed by b.Index. - sdom []*BasicBlock // b's semidominator - parent []*BasicBlock // b's parent in DFS traversal of CFG - ancestor []*BasicBlock // b's ancestor with least sdom -} - -// dfs implements the depth-first search part of the LT algorithm. -func (lt *ltState) dfs(v *BasicBlock, i int32, preorder []*BasicBlock) int32 { - preorder[i] = v - v.dom.pre = i // For now: DFS preorder of spanning tree of CFG - i++ - lt.sdom[v.Index] = v - lt.link(nil, v) - for _, w := range v.Succs { - if lt.sdom[w.Index] == nil { - lt.parent[w.Index] = v - i = lt.dfs(w, i, preorder) - } - } - return i -} - -// eval implements the EVAL part of the LT algorithm. -func (lt *ltState) eval(v *BasicBlock) *BasicBlock { - // TODO(adonovan): opt: do path compression per simple LT. - u := v - for ; lt.ancestor[v.Index] != nil; v = lt.ancestor[v.Index] { - if lt.sdom[v.Index].dom.pre < lt.sdom[u.Index].dom.pre { - u = v - } - } - return u -} - -// link implements the LINK part of the LT algorithm. -func (lt *ltState) link(v, w *BasicBlock) { - lt.ancestor[w.Index] = v -} - -// buildDomTree computes the dominator tree of f using the LT algorithm. -// Precondition: all blocks are reachable (e.g. optimizeBlocks has been run). -func buildDomTree(f *Function) { - // The step numbers refer to the original LT paper; the - // reordering is due to Georgiadis. - - // Clear any previous domInfo. - for _, b := range f.Blocks { - b.dom = domInfo{} - } - - n := len(f.Blocks) - // Allocate space for 5 contiguous [n]*BasicBlock arrays: - // sdom, parent, ancestor, preorder, buckets. - space := make([]*BasicBlock, 5*n) - lt := ltState{ - sdom: space[0:n], - parent: space[n : 2*n], - ancestor: space[2*n : 3*n], - } - - // Step 1. Number vertices by depth-first preorder. - preorder := space[3*n : 4*n] - root := f.Blocks[0] - prenum := lt.dfs(root, 0, preorder) - recover := f.Recover - if recover != nil { - lt.dfs(recover, prenum, preorder) - } - - buckets := space[4*n : 5*n] - copy(buckets, preorder) - - // In reverse preorder... - for i := int32(n) - 1; i > 0; i-- { - w := preorder[i] - - // Step 3. Implicitly define the immediate dominator of each node. - for v := buckets[i]; v != w; v = buckets[v.dom.pre] { - u := lt.eval(v) - if lt.sdom[u.Index].dom.pre < i { - v.dom.idom = u - } else { - v.dom.idom = w - } - } - - // Step 2. Compute the semidominators of all nodes. - lt.sdom[w.Index] = lt.parent[w.Index] - for _, v := range w.Preds { - u := lt.eval(v) - if lt.sdom[u.Index].dom.pre < lt.sdom[w.Index].dom.pre { - lt.sdom[w.Index] = lt.sdom[u.Index] - } - } - - lt.link(lt.parent[w.Index], w) - - if lt.parent[w.Index] == lt.sdom[w.Index] { - w.dom.idom = lt.parent[w.Index] - } else { - buckets[i] = buckets[lt.sdom[w.Index].dom.pre] - buckets[lt.sdom[w.Index].dom.pre] = w - } - } - - // The final 'Step 3' is now outside the loop. - for v := buckets[0]; v != root; v = buckets[v.dom.pre] { - v.dom.idom = root - } - - // Step 4. Explicitly define the immediate dominator of each - // node, in preorder. - for _, w := range preorder[1:] { - if w == root || w == recover { - w.dom.idom = nil - } else { - if w.dom.idom != lt.sdom[w.Index] { - w.dom.idom = w.dom.idom.dom.idom - } - // Calculate Children relation as inverse of Idom. - w.dom.idom.dom.children = append(w.dom.idom.dom.children, w) - } - } - - pre, post := numberDomTree(root, 0, 0) - if recover != nil { - numberDomTree(recover, pre, post) - } - - // printDomTreeDot(os.Stderr, f) // debugging - // printDomTreeText(os.Stderr, root, 0) // debugging - - if f.Prog.mode&SanityCheckFunctions != 0 { - sanityCheckDomTree(f) - } -} - -// numberDomTree sets the pre- and post-order numbers of a depth-first -// traversal of the dominator tree rooted at v. These are used to -// answer dominance queries in constant time. -func numberDomTree(v *BasicBlock, pre, post int32) (int32, int32) { - v.dom.pre = pre - pre++ - for _, child := range v.dom.children { - pre, post = numberDomTree(child, pre, post) - } - v.dom.post = post - post++ - return pre, post -} - -// Testing utilities ---------------------------------------- - -// sanityCheckDomTree checks the correctness of the dominator tree -// computed by the LT algorithm by comparing against the dominance -// relation computed by a naive Kildall-style forward dataflow -// analysis (Algorithm 10.16 from the "Dragon" book). -func sanityCheckDomTree(f *Function) { - n := len(f.Blocks) - - // D[i] is the set of blocks that dominate f.Blocks[i], - // represented as a bit-set of block indices. - D := make([]big.Int, n) - - one := big.NewInt(1) - - // all is the set of all blocks; constant. - var all big.Int - all.Set(one).Lsh(&all, uint(n)).Sub(&all, one) - - // Initialization. - for i, b := range f.Blocks { - if i == 0 || b == f.Recover { - // A root is dominated only by itself. - D[i].SetBit(&D[0], 0, 1) - } else { - // All other blocks are (initially) dominated - // by every block. - D[i].Set(&all) - } - } - - // Iteration until fixed point. - for changed := true; changed; { - changed = false - for i, b := range f.Blocks { - if i == 0 || b == f.Recover { - continue - } - // Compute intersection across predecessors. - var x big.Int - x.Set(&all) - for _, pred := range b.Preds { - x.And(&x, &D[pred.Index]) - } - x.SetBit(&x, i, 1) // a block always dominates itself. - if D[i].Cmp(&x) != 0 { - D[i].Set(&x) - changed = true - } - } - } - - // Check the entire relation. O(n^2). - // The Recover block (if any) must be treated specially so we skip it. - ok := true - for i := 0; i < n; i++ { - for j := 0; j < n; j++ { - b, c := f.Blocks[i], f.Blocks[j] - if c == f.Recover { - continue - } - actual := b.Dominates(c) - expected := D[j].Bit(i) == 1 - if actual != expected { - fmt.Fprintf(os.Stderr, "dominates(%s, %s)==%t, want %t\n", b, c, actual, expected) - ok = false - } - } - } - - preorder := f.DomPreorder() - for _, b := range f.Blocks { - if got := preorder[b.dom.pre]; got != b { - fmt.Fprintf(os.Stderr, "preorder[%d]==%s, want %s\n", b.dom.pre, got, b) - ok = false - } - } - - if !ok { - panic("sanityCheckDomTree failed for " + f.String()) - } - -} - -// Printing functions ---------------------------------------- - -// printDomTreeText prints the dominator tree as text, using indentation. -func printDomTreeText(buf *bytes.Buffer, v *BasicBlock, indent int) { - fmt.Fprintf(buf, "%*s%s\n", 4*indent, "", v) - for _, child := range v.dom.children { - printDomTreeText(buf, child, indent+1) - } -} - -// printDomTreeDot prints the dominator tree of f in AT&T GraphViz -// (.dot) format. -func printDomTreeDot(buf *bytes.Buffer, f *Function) { - fmt.Fprintln(buf, "//", f) - fmt.Fprintln(buf, "digraph domtree {") - for i, b := range f.Blocks { - v := b.dom - fmt.Fprintf(buf, "\tn%d [label=\"%s (%d, %d)\",shape=\"rectangle\"];\n", v.pre, b, v.pre, v.post) - // TODO(adonovan): improve appearance of edges - // belonging to both dominator tree and CFG. - - // Dominator tree edge. - if i != 0 { - fmt.Fprintf(buf, "\tn%d -> n%d [style=\"solid\",weight=100];\n", v.idom.dom.pre, v.pre) - } - // CFG edges. - for _, pred := range b.Preds { - fmt.Fprintf(buf, "\tn%d -> n%d [style=\"dotted\",weight=0];\n", pred.dom.pre, v.pre) - } - } - fmt.Fprintln(buf, "}") -} diff --git a/vendor/golang.org/x/tools/go/ssa/emit.go b/vendor/golang.org/x/tools/go/ssa/emit.go deleted file mode 100644 index 549c9114d..000000000 --- a/vendor/golang.org/x/tools/go/ssa/emit.go +++ /dev/null @@ -1,622 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package ssa - -// Helpers for emitting SSA instructions. - -import ( - "fmt" - "go/ast" - "go/token" - "go/types" - - "golang.org/x/tools/internal/aliases" - "golang.org/x/tools/internal/typeparams" -) - -// emitAlloc emits to f a new Alloc instruction allocating a variable -// of type typ. -// -// The caller must set Alloc.Heap=true (for an heap-allocated variable) -// or add the Alloc to f.Locals (for a frame-allocated variable). -// -// During building, a variable in f.Locals may have its Heap flag -// set when it is discovered that its address is taken. -// These Allocs are removed from f.Locals at the end. -// -// The builder should generally call one of the emit{New,Local,LocalVar} wrappers instead. -func emitAlloc(f *Function, typ types.Type, pos token.Pos, comment string) *Alloc { - v := &Alloc{Comment: comment} - v.setType(types.NewPointer(typ)) - v.setPos(pos) - f.emit(v) - return v -} - -// emitNew emits to f a new Alloc instruction heap-allocating a -// variable of type typ. pos is the optional source location. -func emitNew(f *Function, typ types.Type, pos token.Pos, comment string) *Alloc { - alloc := emitAlloc(f, typ, pos, comment) - alloc.Heap = true - return alloc -} - -// emitLocal creates a local var for (t, pos, comment) and -// emits an Alloc instruction for it. -// -// (Use this function or emitNew for synthetic variables; -// for source-level variables, use emitLocalVar.) -func emitLocal(f *Function, t types.Type, pos token.Pos, comment string) *Alloc { - local := emitAlloc(f, t, pos, comment) - f.Locals = append(f.Locals, local) - return local -} - -// emitLocalVar creates a local var for v and emits an Alloc instruction for it. -// Subsequent calls to f.lookup(v) return it. -// It applies the appropriate generic instantiation to the type. -func emitLocalVar(f *Function, v *types.Var) *Alloc { - alloc := emitLocal(f, f.typ(v.Type()), v.Pos(), v.Name()) - f.vars[v] = alloc - return alloc -} - -// emitLoad emits to f an instruction to load the address addr into a -// new temporary, and returns the value so defined. -func emitLoad(f *Function, addr Value) *UnOp { - v := &UnOp{Op: token.MUL, X: addr} - v.setType(typeparams.MustDeref(addr.Type())) - f.emit(v) - return v -} - -// emitDebugRef emits to f a DebugRef pseudo-instruction associating -// expression e with value v. -func emitDebugRef(f *Function, e ast.Expr, v Value, isAddr bool) { - if !f.debugInfo() { - return // debugging not enabled - } - if v == nil || e == nil { - panic("nil") - } - var obj types.Object - e = unparen(e) - if id, ok := e.(*ast.Ident); ok { - if isBlankIdent(id) { - return - } - obj = f.objectOf(id) - switch obj.(type) { - case *types.Nil, *types.Const, *types.Builtin: - return - } - } - f.emit(&DebugRef{ - X: v, - Expr: e, - IsAddr: isAddr, - object: obj, - }) -} - -// emitArith emits to f code to compute the binary operation op(x, y) -// where op is an eager shift, logical or arithmetic operation. -// (Use emitCompare() for comparisons and Builder.logicalBinop() for -// non-eager operations.) -func emitArith(f *Function, op token.Token, x, y Value, t types.Type, pos token.Pos) Value { - switch op { - case token.SHL, token.SHR: - x = emitConv(f, x, t) - // y may be signed or an 'untyped' constant. - - // There is a runtime panic if y is signed and <0. Instead of inserting a check for y<0 - // and converting to an unsigned value (like the compiler) leave y as is. - - if isUntyped(y.Type().Underlying()) { - // Untyped conversion: - // Spec https://go.dev/ref/spec#Operators: - // The right operand in a shift expression must have integer type or be an untyped constant - // representable by a value of type uint. - y = emitConv(f, y, types.Typ[types.Uint]) - } - - case token.ADD, token.SUB, token.MUL, token.QUO, token.REM, token.AND, token.OR, token.XOR, token.AND_NOT: - x = emitConv(f, x, t) - y = emitConv(f, y, t) - - default: - panic("illegal op in emitArith: " + op.String()) - - } - v := &BinOp{ - Op: op, - X: x, - Y: y, - } - v.setPos(pos) - v.setType(t) - return f.emit(v) -} - -// emitCompare emits to f code compute the boolean result of -// comparison 'x op y'. -func emitCompare(f *Function, op token.Token, x, y Value, pos token.Pos) Value { - xt := x.Type().Underlying() - yt := y.Type().Underlying() - - // Special case to optimise a tagless SwitchStmt so that - // these are equivalent - // switch { case e: ...} - // switch true { case e: ... } - // if e==true { ... } - // even in the case when e's type is an interface. - // TODO(adonovan): opt: generalise to x==true, false!=y, etc. - if x == vTrue && op == token.EQL { - if yt, ok := yt.(*types.Basic); ok && yt.Info()&types.IsBoolean != 0 { - return y - } - } - - if types.Identical(xt, yt) { - // no conversion necessary - } else if isNonTypeParamInterface(x.Type()) { - y = emitConv(f, y, x.Type()) - } else if isNonTypeParamInterface(y.Type()) { - x = emitConv(f, x, y.Type()) - } else if _, ok := x.(*Const); ok { - x = emitConv(f, x, y.Type()) - } else if _, ok := y.(*Const); ok { - y = emitConv(f, y, x.Type()) - } else { - // other cases, e.g. channels. No-op. - } - - v := &BinOp{ - Op: op, - X: x, - Y: y, - } - v.setPos(pos) - v.setType(tBool) - return f.emit(v) -} - -// isValuePreserving returns true if a conversion from ut_src to -// ut_dst is value-preserving, i.e. just a change of type. -// Precondition: neither argument is a named or alias type. -func isValuePreserving(ut_src, ut_dst types.Type) bool { - // Identical underlying types? - if types.IdenticalIgnoreTags(ut_dst, ut_src) { - return true - } - - switch ut_dst.(type) { - case *types.Chan: - // Conversion between channel types? - _, ok := ut_src.(*types.Chan) - return ok - - case *types.Pointer: - // Conversion between pointers with identical base types? - _, ok := ut_src.(*types.Pointer) - return ok - } - return false -} - -// emitConv emits to f code to convert Value val to exactly type typ, -// and returns the converted value. Implicit conversions are required -// by language assignability rules in assignments, parameter passing, -// etc. -func emitConv(f *Function, val Value, typ types.Type) Value { - t_src := val.Type() - - // Identical types? Conversion is a no-op. - if types.Identical(t_src, typ) { - return val - } - ut_dst := typ.Underlying() - ut_src := t_src.Underlying() - - // Conversion to, or construction of a value of, an interface type? - if isNonTypeParamInterface(typ) { - // Interface name change? - if isValuePreserving(ut_src, ut_dst) { - c := &ChangeType{X: val} - c.setType(typ) - return f.emit(c) - } - - // Assignment from one interface type to another? - if isNonTypeParamInterface(t_src) { - c := &ChangeInterface{X: val} - c.setType(typ) - return f.emit(c) - } - - // Untyped nil constant? Return interface-typed nil constant. - if ut_src == tUntypedNil { - return zeroConst(typ) - } - - // Convert (non-nil) "untyped" literals to their default type. - if t, ok := ut_src.(*types.Basic); ok && t.Info()&types.IsUntyped != 0 { - val = emitConv(f, val, types.Default(ut_src)) - } - - // Record the types of operands to MakeInterface, if - // non-parameterized, as they are the set of runtime types. - t := val.Type() - if f.typeparams.Len() == 0 || !f.Prog.parameterized.isParameterized(t) { - addRuntimeType(f.Prog, t) - } - - mi := &MakeInterface{X: val} - mi.setType(typ) - return f.emit(mi) - } - - // In the common case, the typesets of src and dst are singletons - // and we emit an appropriate conversion. But if either contains - // a type parameter, the conversion may represent a cross product, - // in which case which we emit a MultiConvert. - dst_terms := typeSetOf(ut_dst) - src_terms := typeSetOf(ut_src) - - // conversionCase describes an instruction pattern that maybe emitted to - // model d <- s for d in dst_terms and s in src_terms. - // Multiple conversions can match the same pattern. - type conversionCase uint8 - const ( - changeType conversionCase = 1 << iota - sliceToArray - sliceToArrayPtr - sliceTo0Array - sliceTo0ArrayPtr - convert - ) - classify := func(s, d types.Type) conversionCase { - // Just a change of type, but not value or representation? - if isValuePreserving(s, d) { - return changeType - } - - // Conversion from slice to array or slice to array pointer? - if slice, ok := aliases.Unalias(s).(*types.Slice); ok { - var arr *types.Array - var ptr bool - // Conversion from slice to array pointer? - switch d := aliases.Unalias(d).(type) { - case *types.Array: - arr = d - case *types.Pointer: - arr, _ = d.Elem().Underlying().(*types.Array) - ptr = true - } - if arr != nil && types.Identical(slice.Elem(), arr.Elem()) { - if arr.Len() == 0 { - if ptr { - return sliceTo0ArrayPtr - } else { - return sliceTo0Array - } - } - if ptr { - return sliceToArrayPtr - } else { - return sliceToArray - } - } - } - - // The only remaining case in well-typed code is a representation- - // changing conversion of basic types (possibly with []byte/[]rune). - if !isBasic(s) && !isBasic(d) { - panic(fmt.Sprintf("in %s: cannot convert term %s (%s [within %s]) to type %s [within %s]", f, val, val.Type(), s, typ, d)) - } - return convert - } - - var classifications conversionCase - for _, s := range src_terms { - us := s.Type().Underlying() - for _, d := range dst_terms { - ud := d.Type().Underlying() - classifications |= classify(us, ud) - } - } - if classifications == 0 { - panic(fmt.Sprintf("in %s: cannot convert %s (%s) to %s", f, val, val.Type(), typ)) - } - - // Conversion of a compile-time constant value? - if c, ok := val.(*Const); ok { - // Conversion to a basic type? - if isBasic(ut_dst) { - // Conversion of a compile-time constant to - // another constant type results in a new - // constant of the destination type and - // (initially) the same abstract value. - // We don't truncate the value yet. - return NewConst(c.Value, typ) - } - // Can we always convert from zero value without panicking? - const mayPanic = sliceToArray | sliceToArrayPtr - if c.Value == nil && classifications&mayPanic == 0 { - return NewConst(nil, typ) - } - - // We're converting from constant to non-constant type, - // e.g. string -> []byte/[]rune. - } - - switch classifications { - case changeType: // representation-preserving change - c := &ChangeType{X: val} - c.setType(typ) - return f.emit(c) - - case sliceToArrayPtr, sliceTo0ArrayPtr: // slice to array pointer - c := &SliceToArrayPointer{X: val} - c.setType(typ) - return f.emit(c) - - case sliceToArray: // slice to arrays (not zero-length) - ptype := types.NewPointer(typ) - p := &SliceToArrayPointer{X: val} - p.setType(ptype) - x := f.emit(p) - unOp := &UnOp{Op: token.MUL, X: x} - unOp.setType(typ) - return f.emit(unOp) - - case sliceTo0Array: // slice to zero-length arrays (constant) - return zeroConst(typ) - - case convert: // representation-changing conversion - c := &Convert{X: val} - c.setType(typ) - return f.emit(c) - - default: // multiple conversion - c := &MultiConvert{X: val, from: src_terms, to: dst_terms} - c.setType(typ) - return f.emit(c) - } -} - -// emitTypeCoercion emits to f code to coerce the type of a -// Value v to exactly type typ, and returns the coerced value. -// -// Requires that coercing v.Typ() to typ is a value preserving change. -// -// Currently used only when v.Type() is a type instance of typ or vice versa. -// A type v is a type instance of a type t if there exists a -// type parameter substitution σ s.t. σ(v) == t. Example: -// -// σ(func(T) T) == func(int) int for σ == [T ↦ int] -// -// This happens in instantiation wrappers for conversion -// from an instantiation to a parameterized type (and vice versa) -// with σ substituting f.typeparams by f.typeargs. -func emitTypeCoercion(f *Function, v Value, typ types.Type) Value { - if types.Identical(v.Type(), typ) { - return v // no coercion needed - } - // TODO(taking): for instances should we record which side is the instance? - c := &ChangeType{ - X: v, - } - c.setType(typ) - f.emit(c) - return c -} - -// emitStore emits to f an instruction to store value val at location -// addr, applying implicit conversions as required by assignability rules. -func emitStore(f *Function, addr, val Value, pos token.Pos) *Store { - typ := typeparams.MustDeref(addr.Type()) - s := &Store{ - Addr: addr, - Val: emitConv(f, val, typ), - pos: pos, - } - f.emit(s) - return s -} - -// emitJump emits to f a jump to target, and updates the control-flow graph. -// Postcondition: f.currentBlock is nil. -func emitJump(f *Function, target *BasicBlock) { - b := f.currentBlock - b.emit(new(Jump)) - addEdge(b, target) - f.currentBlock = nil -} - -// emitIf emits to f a conditional jump to tblock or fblock based on -// cond, and updates the control-flow graph. -// Postcondition: f.currentBlock is nil. -func emitIf(f *Function, cond Value, tblock, fblock *BasicBlock) { - b := f.currentBlock - b.emit(&If{Cond: cond}) - addEdge(b, tblock) - addEdge(b, fblock) - f.currentBlock = nil -} - -// emitExtract emits to f an instruction to extract the index'th -// component of tuple. It returns the extracted value. -func emitExtract(f *Function, tuple Value, index int) Value { - e := &Extract{Tuple: tuple, Index: index} - e.setType(tuple.Type().(*types.Tuple).At(index).Type()) - return f.emit(e) -} - -// emitTypeAssert emits to f a type assertion value := x.(t) and -// returns the value. x.Type() must be an interface. -func emitTypeAssert(f *Function, x Value, t types.Type, pos token.Pos) Value { - a := &TypeAssert{X: x, AssertedType: t} - a.setPos(pos) - a.setType(t) - return f.emit(a) -} - -// emitTypeTest emits to f a type test value,ok := x.(t) and returns -// a (value, ok) tuple. x.Type() must be an interface. -func emitTypeTest(f *Function, x Value, t types.Type, pos token.Pos) Value { - a := &TypeAssert{ - X: x, - AssertedType: t, - CommaOk: true, - } - a.setPos(pos) - a.setType(types.NewTuple( - newVar("value", t), - varOk, - )) - return f.emit(a) -} - -// emitTailCall emits to f a function call in tail position. The -// caller is responsible for all fields of 'call' except its type. -// Intended for wrapper methods. -// Precondition: f does/will not use deferred procedure calls. -// Postcondition: f.currentBlock is nil. -func emitTailCall(f *Function, call *Call) { - tresults := f.Signature.Results() - nr := tresults.Len() - if nr == 1 { - call.typ = tresults.At(0).Type() - } else { - call.typ = tresults - } - tuple := f.emit(call) - var ret Return - switch nr { - case 0: - // no-op - case 1: - ret.Results = []Value{tuple} - default: - for i := 0; i < nr; i++ { - v := emitExtract(f, tuple, i) - // TODO(adonovan): in principle, this is required: - // v = emitConv(f, o.Type, f.Signature.Results[i].Type) - // but in practice emitTailCall is only used when - // the types exactly match. - ret.Results = append(ret.Results, v) - } - } - f.emit(&ret) - f.currentBlock = nil -} - -// emitImplicitSelections emits to f code to apply the sequence of -// implicit field selections specified by indices to base value v, and -// returns the selected value. -// -// If v is the address of a struct, the result will be the address of -// a field; if it is the value of a struct, the result will be the -// value of a field. -func emitImplicitSelections(f *Function, v Value, indices []int, pos token.Pos) Value { - for _, index := range indices { - if st, vptr := deref(v.Type()); vptr { - fld := fieldOf(st, index) - instr := &FieldAddr{ - X: v, - Field: index, - } - instr.setPos(pos) - instr.setType(types.NewPointer(fld.Type())) - v = f.emit(instr) - // Load the field's value iff indirectly embedded. - if _, fldptr := deref(fld.Type()); fldptr { - v = emitLoad(f, v) - } - } else { - fld := fieldOf(v.Type(), index) - instr := &Field{ - X: v, - Field: index, - } - instr.setPos(pos) - instr.setType(fld.Type()) - v = f.emit(instr) - } - } - return v -} - -// emitFieldSelection emits to f code to select the index'th field of v. -// -// If wantAddr, the input must be a pointer-to-struct and the result -// will be the field's address; otherwise the result will be the -// field's value. -// Ident id is used for position and debug info. -func emitFieldSelection(f *Function, v Value, index int, wantAddr bool, id *ast.Ident) Value { - if st, vptr := deref(v.Type()); vptr { - fld := fieldOf(st, index) - instr := &FieldAddr{ - X: v, - Field: index, - } - instr.setPos(id.Pos()) - instr.setType(types.NewPointer(fld.Type())) - v = f.emit(instr) - // Load the field's value iff we don't want its address. - if !wantAddr { - v = emitLoad(f, v) - } - } else { - fld := fieldOf(v.Type(), index) - instr := &Field{ - X: v, - Field: index, - } - instr.setPos(id.Pos()) - instr.setType(fld.Type()) - v = f.emit(instr) - } - emitDebugRef(f, id, v, wantAddr) - return v -} - -// createRecoverBlock emits to f a block of code to return after a -// recovered panic, and sets f.Recover to it. -// -// If f's result parameters are named, the code loads and returns -// their current values, otherwise it returns the zero values of their -// type. -// -// Idempotent. -func createRecoverBlock(f *Function) { - if f.Recover != nil { - return // already created - } - saved := f.currentBlock - - f.Recover = f.newBasicBlock("recover") - f.currentBlock = f.Recover - - var results []Value - if f.namedResults != nil { - // Reload NRPs to form value tuple. - for _, r := range f.namedResults { - results = append(results, emitLoad(f, r)) - } - } else { - R := f.Signature.Results() - for i, n := 0, R.Len(); i < n; i++ { - T := R.At(i).Type() - - // Return zero value of each result type. - results = append(results, zeroConst(T)) - } - } - f.emit(&Return{Results: results}) - - f.currentBlock = saved -} diff --git a/vendor/golang.org/x/tools/go/ssa/func.go b/vendor/golang.org/x/tools/go/ssa/func.go deleted file mode 100644 index 4d3e39129..000000000 --- a/vendor/golang.org/x/tools/go/ssa/func.go +++ /dev/null @@ -1,642 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package ssa - -// This file implements the Function type. - -import ( - "bytes" - "fmt" - "go/ast" - "go/types" - "io" - "os" - "strings" - - "golang.org/x/tools/internal/typeparams" -) - -// Like ObjectOf, but panics instead of returning nil. -// Only valid during f's create and build phases. -func (f *Function) objectOf(id *ast.Ident) types.Object { - if o := f.info.ObjectOf(id); o != nil { - return o - } - panic(fmt.Sprintf("no types.Object for ast.Ident %s @ %s", - id.Name, f.Prog.Fset.Position(id.Pos()))) -} - -// Like TypeOf, but panics instead of returning nil. -// Only valid during f's create and build phases. -func (f *Function) typeOf(e ast.Expr) types.Type { - if T := f.info.TypeOf(e); T != nil { - return f.typ(T) - } - panic(fmt.Sprintf("no type for %T @ %s", e, f.Prog.Fset.Position(e.Pos()))) -} - -// typ is the locally instantiated type of T. T==typ(T) if f is not an instantiation. -func (f *Function) typ(T types.Type) types.Type { - return f.subst.typ(T) -} - -// If id is an Instance, returns info.Instances[id].Type. -// Otherwise returns f.typeOf(id). -func (f *Function) instanceType(id *ast.Ident) types.Type { - if t, ok := f.info.Instances[id]; ok { - return t.Type - } - return f.typeOf(id) -} - -// selection returns a *selection corresponding to f.info.Selections[selector] -// with potential updates for type substitution. -func (f *Function) selection(selector *ast.SelectorExpr) *selection { - sel := f.info.Selections[selector] - if sel == nil { - return nil - } - - switch sel.Kind() { - case types.MethodExpr, types.MethodVal: - if recv := f.typ(sel.Recv()); recv != sel.Recv() { - // recv changed during type substitution. - pkg := f.declaredPackage().Pkg - obj, index, indirect := types.LookupFieldOrMethod(recv, true, pkg, sel.Obj().Name()) - - // sig replaces sel.Type(). See (types.Selection).Typ() for details. - sig := obj.Type().(*types.Signature) - sig = changeRecv(sig, newVar(sig.Recv().Name(), recv)) - if sel.Kind() == types.MethodExpr { - sig = recvAsFirstArg(sig) - } - return &selection{ - kind: sel.Kind(), - recv: recv, - typ: sig, - obj: obj, - index: index, - indirect: indirect, - } - } - } - return toSelection(sel) -} - -// Destinations associated with unlabelled for/switch/select stmts. -// We push/pop one of these as we enter/leave each construct and for -// each BranchStmt we scan for the innermost target of the right type. -type targets struct { - tail *targets // rest of stack - _break *BasicBlock - _continue *BasicBlock - _fallthrough *BasicBlock -} - -// Destinations associated with a labelled block. -// We populate these as labels are encountered in forward gotos or -// labelled statements. -type lblock struct { - _goto *BasicBlock - _break *BasicBlock - _continue *BasicBlock -} - -// labelledBlock returns the branch target associated with the -// specified label, creating it if needed. -func (f *Function) labelledBlock(label *ast.Ident) *lblock { - obj := f.objectOf(label).(*types.Label) - lb := f.lblocks[obj] - if lb == nil { - lb = &lblock{_goto: f.newBasicBlock(label.Name)} - if f.lblocks == nil { - f.lblocks = make(map[*types.Label]*lblock) - } - f.lblocks[obj] = lb - } - return lb -} - -// addParamVar adds a parameter to f.Params. -func (f *Function) addParamVar(v *types.Var) *Parameter { - name := v.Name() - if name == "" { - name = fmt.Sprintf("arg%d", len(f.Params)) - } - param := &Parameter{ - name: name, - object: v, - typ: f.typ(v.Type()), - parent: f, - } - f.Params = append(f.Params, param) - return param -} - -// addSpilledParam declares a parameter that is pre-spilled to the -// stack; the function body will load/store the spilled location. -// Subsequent lifting will eliminate spills where possible. -func (f *Function) addSpilledParam(obj *types.Var) { - param := f.addParamVar(obj) - spill := emitLocalVar(f, obj) - f.emit(&Store{Addr: spill, Val: param}) -} - -// startBody initializes the function prior to generating SSA code for its body. -// Precondition: f.Type() already set. -func (f *Function) startBody() { - f.currentBlock = f.newBasicBlock("entry") - f.vars = make(map[*types.Var]Value) // needed for some synthetics, e.g. init -} - -// createSyntacticParams populates f.Params and generates code (spills -// and named result locals) for all the parameters declared in the -// syntax. In addition it populates the f.objects mapping. -// -// Preconditions: -// f.startBody() was called. f.info != nil. -// Postcondition: -// len(f.Params) == len(f.Signature.Params) + (f.Signature.Recv() ? 1 : 0) -func (f *Function) createSyntacticParams(recv *ast.FieldList, functype *ast.FuncType) { - // Receiver (at most one inner iteration). - if recv != nil { - for _, field := range recv.List { - for _, n := range field.Names { - f.addSpilledParam(identVar(f, n)) - } - // Anonymous receiver? No need to spill. - if field.Names == nil { - f.addParamVar(f.Signature.Recv()) - } - } - } - - // Parameters. - if functype.Params != nil { - n := len(f.Params) // 1 if has recv, 0 otherwise - for _, field := range functype.Params.List { - for _, n := range field.Names { - f.addSpilledParam(identVar(f, n)) - } - // Anonymous parameter? No need to spill. - if field.Names == nil { - f.addParamVar(f.Signature.Params().At(len(f.Params) - n)) - } - } - } - - // Named results. - if functype.Results != nil { - for _, field := range functype.Results.List { - // Implicit "var" decl of locals for named results. - for _, n := range field.Names { - namedResult := emitLocalVar(f, identVar(f, n)) - f.namedResults = append(f.namedResults, namedResult) - } - } - } -} - -type setNumable interface { - setNum(int) -} - -// numberRegisters assigns numbers to all SSA registers -// (value-defining Instructions) in f, to aid debugging. -// (Non-Instruction Values are named at construction.) -func numberRegisters(f *Function) { - v := 0 - for _, b := range f.Blocks { - for _, instr := range b.Instrs { - switch instr.(type) { - case Value: - instr.(setNumable).setNum(v) - v++ - } - } - } -} - -// buildReferrers populates the def/use information in all non-nil -// Value.Referrers slice. -// Precondition: all such slices are initially empty. -func buildReferrers(f *Function) { - var rands []*Value - for _, b := range f.Blocks { - for _, instr := range b.Instrs { - rands = instr.Operands(rands[:0]) // recycle storage - for _, rand := range rands { - if r := *rand; r != nil { - if ref := r.Referrers(); ref != nil { - *ref = append(*ref, instr) - } - } - } - } - } -} - -// finishBody() finalizes the contents of the function after SSA code generation of its body. -// -// The function is not done being built until done() is called. -func (f *Function) finishBody() { - f.vars = nil - f.currentBlock = nil - f.lblocks = nil - - // Remove from f.Locals any Allocs that escape to the heap. - j := 0 - for _, l := range f.Locals { - if !l.Heap { - f.Locals[j] = l - j++ - } - } - // Nil out f.Locals[j:] to aid GC. - for i := j; i < len(f.Locals); i++ { - f.Locals[i] = nil - } - f.Locals = f.Locals[:j] - - optimizeBlocks(f) - - buildReferrers(f) - - buildDomTree(f) - - if f.Prog.mode&NaiveForm == 0 { - // For debugging pre-state of lifting pass: - // numberRegisters(f) - // f.WriteTo(os.Stderr) - lift(f) - } - - // clear remaining builder state - f.namedResults = nil // (used by lifting) - f.subst = nil - - numberRegisters(f) // uses f.namedRegisters -} - -// done marks the building of f's SSA body complete, -// along with any nested functions, and optionally prints them. -func (f *Function) done() { - assert(f.parent == nil, "done called on an anonymous function") - - var visit func(*Function) - visit = func(f *Function) { - for _, anon := range f.AnonFuncs { - visit(anon) // anon is done building before f. - } - - f.build = nil // function is built - - if f.Prog.mode&PrintFunctions != 0 { - printMu.Lock() - f.WriteTo(os.Stdout) - printMu.Unlock() - } - - if f.Prog.mode&SanityCheckFunctions != 0 { - mustSanityCheck(f, nil) - } - } - visit(f) -} - -// removeNilBlocks eliminates nils from f.Blocks and updates each -// BasicBlock.Index. Use this after any pass that may delete blocks. -func (f *Function) removeNilBlocks() { - j := 0 - for _, b := range f.Blocks { - if b != nil { - b.Index = j - f.Blocks[j] = b - j++ - } - } - // Nil out f.Blocks[j:] to aid GC. - for i := j; i < len(f.Blocks); i++ { - f.Blocks[i] = nil - } - f.Blocks = f.Blocks[:j] -} - -// SetDebugMode sets the debug mode for package pkg. If true, all its -// functions will include full debug info. This greatly increases the -// size of the instruction stream, and causes Functions to depend upon -// the ASTs, potentially keeping them live in memory for longer. -func (pkg *Package) SetDebugMode(debug bool) { - pkg.debug = debug -} - -// debugInfo reports whether debug info is wanted for this function. -func (f *Function) debugInfo() bool { - // debug info for instantiations follows the debug info of their origin. - p := f.declaredPackage() - return p != nil && p.debug -} - -// lookup returns the address of the named variable identified by obj -// that is local to function f or one of its enclosing functions. -// If escaping, the reference comes from a potentially escaping pointer -// expression and the referent must be heap-allocated. -// We assume the referent is a *Alloc or *Phi. -// (The only Phis at this stage are those created directly by go1.22 "for" loops.) -func (f *Function) lookup(obj *types.Var, escaping bool) Value { - if v, ok := f.vars[obj]; ok { - if escaping { - switch v := v.(type) { - case *Alloc: - v.Heap = true - case *Phi: - for _, edge := range v.Edges { - if alloc, ok := edge.(*Alloc); ok { - alloc.Heap = true - } - } - } - } - return v // function-local var (address) - } - - // Definition must be in an enclosing function; - // plumb it through intervening closures. - if f.parent == nil { - panic("no ssa.Value for " + obj.String()) - } - outer := f.parent.lookup(obj, true) // escaping - v := &FreeVar{ - name: obj.Name(), - typ: outer.Type(), - pos: outer.Pos(), - outer: outer, - parent: f, - } - f.vars[obj] = v - f.FreeVars = append(f.FreeVars, v) - return v -} - -// emit emits the specified instruction to function f. -func (f *Function) emit(instr Instruction) Value { - return f.currentBlock.emit(instr) -} - -// RelString returns the full name of this function, qualified by -// package name, receiver type, etc. -// -// The specific formatting rules are not guaranteed and may change. -// -// Examples: -// -// "math.IsNaN" // a package-level function -// "(*bytes.Buffer).Bytes" // a declared method or a wrapper -// "(*bytes.Buffer).Bytes$thunk" // thunk (func wrapping method; receiver is param 0) -// "(*bytes.Buffer).Bytes$bound" // bound (func wrapping method; receiver supplied by closure) -// "main.main$1" // an anonymous function in main -// "main.init#1" // a declared init function -// "main.init" // the synthesized package initializer -// -// When these functions are referred to from within the same package -// (i.e. from == f.Pkg.Object), they are rendered without the package path. -// For example: "IsNaN", "(*Buffer).Bytes", etc. -// -// All non-synthetic functions have distinct package-qualified names. -// (But two methods may have the same name "(T).f" if one is a synthetic -// wrapper promoting a non-exported method "f" from another package; in -// that case, the strings are equal but the identifiers "f" are distinct.) -func (f *Function) RelString(from *types.Package) string { - // Anonymous? - if f.parent != nil { - // An anonymous function's Name() looks like "parentName$1", - // but its String() should include the type/package/etc. - parent := f.parent.RelString(from) - for i, anon := range f.parent.AnonFuncs { - if anon == f { - return fmt.Sprintf("%s$%d", parent, 1+i) - } - } - - return f.name // should never happen - } - - // Method (declared or wrapper)? - if recv := f.Signature.Recv(); recv != nil { - return f.relMethod(from, recv.Type()) - } - - // Thunk? - if f.method != nil { - return f.relMethod(from, f.method.recv) - } - - // Bound? - if len(f.FreeVars) == 1 && strings.HasSuffix(f.name, "$bound") { - return f.relMethod(from, f.FreeVars[0].Type()) - } - - // Package-level function? - // Prefix with package name for cross-package references only. - if p := f.relPkg(); p != nil && p != from { - return fmt.Sprintf("%s.%s", p.Path(), f.name) - } - - // Unknown. - return f.name -} - -func (f *Function) relMethod(from *types.Package, recv types.Type) string { - return fmt.Sprintf("(%s).%s", relType(recv, from), f.name) -} - -// writeSignature writes to buf the signature sig in declaration syntax. -func writeSignature(buf *bytes.Buffer, from *types.Package, name string, sig *types.Signature) { - buf.WriteString("func ") - if recv := sig.Recv(); recv != nil { - buf.WriteString("(") - if name := recv.Name(); name != "" { - buf.WriteString(name) - buf.WriteString(" ") - } - types.WriteType(buf, recv.Type(), types.RelativeTo(from)) - buf.WriteString(") ") - } - buf.WriteString(name) - types.WriteSignature(buf, sig, types.RelativeTo(from)) -} - -// declaredPackage returns the package fn is declared in or nil if the -// function is not declared in a package. -func (fn *Function) declaredPackage() *Package { - switch { - case fn.Pkg != nil: - return fn.Pkg // non-generic function (does that follow??) - case fn.topLevelOrigin != nil: - return fn.topLevelOrigin.Pkg // instance of a named generic function - case fn.parent != nil: - return fn.parent.declaredPackage() // instance of an anonymous [generic] function - default: - return nil // function is not declared in a package, e.g. a wrapper. - } -} - -// relPkg returns types.Package fn is printed in relationship to. -func (fn *Function) relPkg() *types.Package { - if p := fn.declaredPackage(); p != nil { - return p.Pkg - } - return nil -} - -var _ io.WriterTo = (*Function)(nil) // *Function implements io.Writer - -func (f *Function) WriteTo(w io.Writer) (int64, error) { - var buf bytes.Buffer - WriteFunction(&buf, f) - n, err := w.Write(buf.Bytes()) - return int64(n), err -} - -// WriteFunction writes to buf a human-readable "disassembly" of f. -func WriteFunction(buf *bytes.Buffer, f *Function) { - fmt.Fprintf(buf, "# Name: %s\n", f.String()) - if f.Pkg != nil { - fmt.Fprintf(buf, "# Package: %s\n", f.Pkg.Pkg.Path()) - } - if syn := f.Synthetic; syn != "" { - fmt.Fprintln(buf, "# Synthetic:", syn) - } - if pos := f.Pos(); pos.IsValid() { - fmt.Fprintf(buf, "# Location: %s\n", f.Prog.Fset.Position(pos)) - } - - if f.parent != nil { - fmt.Fprintf(buf, "# Parent: %s\n", f.parent.Name()) - } - - if f.Recover != nil { - fmt.Fprintf(buf, "# Recover: %s\n", f.Recover) - } - - from := f.relPkg() - - if f.FreeVars != nil { - buf.WriteString("# Free variables:\n") - for i, fv := range f.FreeVars { - fmt.Fprintf(buf, "# % 3d:\t%s %s\n", i, fv.Name(), relType(fv.Type(), from)) - } - } - - if len(f.Locals) > 0 { - buf.WriteString("# Locals:\n") - for i, l := range f.Locals { - fmt.Fprintf(buf, "# % 3d:\t%s %s\n", i, l.Name(), relType(typeparams.MustDeref(l.Type()), from)) - } - } - writeSignature(buf, from, f.Name(), f.Signature) - buf.WriteString(":\n") - - if f.Blocks == nil { - buf.WriteString("\t(external)\n") - } - - // NB. column calculations are confused by non-ASCII - // characters and assume 8-space tabs. - const punchcard = 80 // for old time's sake. - const tabwidth = 8 - for _, b := range f.Blocks { - if b == nil { - // Corrupt CFG. - fmt.Fprintf(buf, ".nil:\n") - continue - } - n, _ := fmt.Fprintf(buf, "%d:", b.Index) - bmsg := fmt.Sprintf("%s P:%d S:%d", b.Comment, len(b.Preds), len(b.Succs)) - fmt.Fprintf(buf, "%*s%s\n", punchcard-1-n-len(bmsg), "", bmsg) - - if false { // CFG debugging - fmt.Fprintf(buf, "\t# CFG: %s --> %s --> %s\n", b.Preds, b, b.Succs) - } - for _, instr := range b.Instrs { - buf.WriteString("\t") - switch v := instr.(type) { - case Value: - l := punchcard - tabwidth - // Left-align the instruction. - if name := v.Name(); name != "" { - n, _ := fmt.Fprintf(buf, "%s = ", name) - l -= n - } - n, _ := buf.WriteString(instr.String()) - l -= n - // Right-align the type if there's space. - if t := v.Type(); t != nil { - buf.WriteByte(' ') - ts := relType(t, from) - l -= len(ts) + len(" ") // (spaces before and after type) - if l > 0 { - fmt.Fprintf(buf, "%*s", l, "") - } - buf.WriteString(ts) - } - case nil: - // Be robust against bad transforms. - buf.WriteString("") - default: - buf.WriteString(instr.String()) - } - // -mode=S: show line numbers - if f.Prog.mode&LogSource != 0 { - if pos := instr.Pos(); pos.IsValid() { - fmt.Fprintf(buf, " L%d", f.Prog.Fset.Position(pos).Line) - } - } - buf.WriteString("\n") - } - } - fmt.Fprintf(buf, "\n") -} - -// newBasicBlock adds to f a new basic block and returns it. It does -// not automatically become the current block for subsequent calls to emit. -// comment is an optional string for more readable debugging output. -func (f *Function) newBasicBlock(comment string) *BasicBlock { - b := &BasicBlock{ - Index: len(f.Blocks), - Comment: comment, - parent: f, - } - b.Succs = b.succs2[:0] - f.Blocks = append(f.Blocks, b) - return b -} - -// NewFunction returns a new synthetic Function instance belonging to -// prog, with its name and signature fields set as specified. -// -// The caller is responsible for initializing the remaining fields of -// the function object, e.g. Pkg, Params, Blocks. -// -// It is practically impossible for clients to construct well-formed -// SSA functions/packages/programs directly, so we assume this is the -// job of the Builder alone. NewFunction exists to provide clients a -// little flexibility. For example, analysis tools may wish to -// construct fake Functions for the root of the callgraph, a fake -// "reflect" package, etc. -// -// TODO(adonovan): think harder about the API here. -func (prog *Program) NewFunction(name string, sig *types.Signature, provenance string) *Function { - return &Function{Prog: prog, name: name, Signature: sig, Synthetic: provenance} -} - -// Syntax returns the function's syntax (*ast.Func{Decl,Lit) -// if it was produced from syntax. -func (f *Function) Syntax() ast.Node { return f.syntax } - -// identVar returns the variable defined by id. -func identVar(fn *Function, id *ast.Ident) *types.Var { - return fn.info.Defs[id].(*types.Var) -} diff --git a/vendor/golang.org/x/tools/go/ssa/instantiate.go b/vendor/golang.org/x/tools/go/ssa/instantiate.go deleted file mode 100644 index c155f6736..000000000 --- a/vendor/golang.org/x/tools/go/ssa/instantiate.go +++ /dev/null @@ -1,114 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package ssa - -import ( - "fmt" - "go/types" - "sync" - - "golang.org/x/tools/internal/typeparams" -) - -// A generic records information about a generic origin function, -// including a cache of existing instantiations. -type generic struct { - instancesMu sync.Mutex - instances map[*typeList]*Function // canonical type arguments to an instance. -} - -// instance returns a Function that is the instantiation of generic -// origin function fn with the type arguments targs. -// -// Any created instance is added to cr. -// -// Acquires fn.generic.instancesMu. -func (fn *Function) instance(targs []types.Type, cr *creator) *Function { - key := fn.Prog.canon.List(targs) - - gen := fn.generic - - gen.instancesMu.Lock() - defer gen.instancesMu.Unlock() - inst, ok := gen.instances[key] - if !ok { - inst = createInstance(fn, targs, cr) - if gen.instances == nil { - gen.instances = make(map[*typeList]*Function) - } - gen.instances[key] = inst - } - return inst -} - -// createInstance returns the instantiation of generic function fn using targs. -// If the instantiation is created, this is added to cr. -// -// Requires fn.generic.instancesMu. -func createInstance(fn *Function, targs []types.Type, cr *creator) *Function { - prog := fn.Prog - - // Compute signature. - var sig *types.Signature - var obj *types.Func - if recv := fn.Signature.Recv(); recv != nil { - // method - obj = prog.canon.instantiateMethod(fn.object, targs, prog.ctxt) - sig = obj.Type().(*types.Signature) - } else { - // function - instSig, err := types.Instantiate(prog.ctxt, fn.Signature, targs, false) - if err != nil { - panic(err) - } - instance, ok := instSig.(*types.Signature) - if !ok { - panic("Instantiate of a Signature returned a non-signature") - } - obj = fn.object // instantiation does not exist yet - sig = prog.canon.Type(instance).(*types.Signature) - } - - // Choose strategy (instance or wrapper). - var ( - synthetic string - subst *subster - build buildFunc - ) - if prog.mode&InstantiateGenerics != 0 && !prog.parameterized.anyParameterized(targs) { - synthetic = fmt.Sprintf("instance of %s", fn.Name()) - if fn.syntax != nil { - scope := typeparams.OriginMethod(obj).Scope() - subst = makeSubster(prog.ctxt, scope, fn.typeparams, targs, false) - build = (*builder).buildFromSyntax - } else { - build = (*builder).buildParamsOnly - } - } else { - synthetic = fmt.Sprintf("instantiation wrapper of %s", fn.Name()) - build = (*builder).buildInstantiationWrapper - } - - /* generic instance or instantiation wrapper */ - instance := &Function{ - name: fmt.Sprintf("%s%s", fn.Name(), targs), // may not be unique - object: obj, - Signature: sig, - Synthetic: synthetic, - syntax: fn.syntax, // \ - info: fn.info, // } empty for non-created packages - goversion: fn.goversion, // / - build: build, - topLevelOrigin: fn, - pos: obj.Pos(), - Pkg: nil, - Prog: fn.Prog, - typeparams: fn.typeparams, // share with origin - typeargs: targs, - subst: subst, - } - cr.Add(instance) - return instance -} diff --git a/vendor/golang.org/x/tools/go/ssa/lift.go b/vendor/golang.org/x/tools/go/ssa/lift.go deleted file mode 100644 index 8bb194944..000000000 --- a/vendor/golang.org/x/tools/go/ssa/lift.go +++ /dev/null @@ -1,646 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package ssa - -// This file defines the lifting pass which tries to "lift" Alloc -// cells (new/local variables) into SSA registers, replacing loads -// with the dominating stored value, eliminating loads and stores, and -// inserting φ-nodes as needed. - -// Cited papers and resources: -// -// Ron Cytron et al. 1991. Efficiently computing SSA form... -// http://doi.acm.org/10.1145/115372.115320 -// -// Cooper, Harvey, Kennedy. 2001. A Simple, Fast Dominance Algorithm. -// Software Practice and Experience 2001, 4:1-10. -// http://www.hipersoft.rice.edu/grads/publications/dom14.pdf -// -// Daniel Berlin, llvmdev mailing list, 2012. -// http://lists.cs.uiuc.edu/pipermail/llvmdev/2012-January/046638.html -// (Be sure to expand the whole thread.) - -// TODO(adonovan): opt: there are many optimizations worth evaluating, and -// the conventional wisdom for SSA construction is that a simple -// algorithm well engineered often beats those of better asymptotic -// complexity on all but the most egregious inputs. -// -// Danny Berlin suggests that the Cooper et al. algorithm for -// computing the dominance frontier is superior to Cytron et al. -// Furthermore he recommends that rather than computing the DF for the -// whole function then renaming all alloc cells, it may be cheaper to -// compute the DF for each alloc cell separately and throw it away. -// -// Consider exploiting liveness information to avoid creating dead -// φ-nodes which we then immediately remove. -// -// Also see many other "TODO: opt" suggestions in the code. - -import ( - "fmt" - "go/token" - "math/big" - "os" - - "golang.org/x/tools/internal/typeparams" -) - -// If true, show diagnostic information at each step of lifting. -// Very verbose. -const debugLifting = false - -// domFrontier maps each block to the set of blocks in its dominance -// frontier. The outer slice is conceptually a map keyed by -// Block.Index. The inner slice is conceptually a set, possibly -// containing duplicates. -// -// TODO(adonovan): opt: measure impact of dups; consider a packed bit -// representation, e.g. big.Int, and bitwise parallel operations for -// the union step in the Children loop. -// -// domFrontier's methods mutate the slice's elements but not its -// length, so their receivers needn't be pointers. -type domFrontier [][]*BasicBlock - -func (df domFrontier) add(u, v *BasicBlock) { - p := &df[u.Index] - *p = append(*p, v) -} - -// build builds the dominance frontier df for the dominator (sub)tree -// rooted at u, using the Cytron et al. algorithm. -// -// TODO(adonovan): opt: consider Berlin approach, computing pruned SSA -// by pruning the entire IDF computation, rather than merely pruning -// the DF -> IDF step. -func (df domFrontier) build(u *BasicBlock) { - // Encounter each node u in postorder of dom tree. - for _, child := range u.dom.children { - df.build(child) - } - for _, vb := range u.Succs { - if v := vb.dom; v.idom != u { - df.add(u, vb) - } - } - for _, w := range u.dom.children { - for _, vb := range df[w.Index] { - // TODO(adonovan): opt: use word-parallel bitwise union. - if v := vb.dom; v.idom != u { - df.add(u, vb) - } - } - } -} - -func buildDomFrontier(fn *Function) domFrontier { - df := make(domFrontier, len(fn.Blocks)) - df.build(fn.Blocks[0]) - if fn.Recover != nil { - df.build(fn.Recover) - } - return df -} - -func removeInstr(refs []Instruction, instr Instruction) []Instruction { - return removeInstrsIf(refs, func(i Instruction) bool { return i == instr }) -} - -func removeInstrsIf(refs []Instruction, p func(Instruction) bool) []Instruction { - // TODO(taking): replace with go1.22 slices.DeleteFunc. - i := 0 - for _, ref := range refs { - if p(ref) { - continue - } - refs[i] = ref - i++ - } - for j := i; j != len(refs); j++ { - refs[j] = nil // aid GC - } - return refs[:i] -} - -// lift replaces local and new Allocs accessed only with -// load/store by SSA registers, inserting φ-nodes where necessary. -// The result is a program in classical pruned SSA form. -// -// Preconditions: -// - fn has no dead blocks (blockopt has run). -// - Def/use info (Operands and Referrers) is up-to-date. -// - The dominator tree is up-to-date. -func lift(fn *Function) { - // TODO(adonovan): opt: lots of little optimizations may be - // worthwhile here, especially if they cause us to avoid - // buildDomFrontier. For example: - // - // - Alloc never loaded? Eliminate. - // - Alloc never stored? Replace all loads with a zero constant. - // - Alloc stored once? Replace loads with dominating store; - // don't forget that an Alloc is itself an effective store - // of zero. - // - Alloc used only within a single block? - // Use degenerate algorithm avoiding φ-nodes. - // - Consider synergy with scalar replacement of aggregates (SRA). - // e.g. *(&x.f) where x is an Alloc. - // Perhaps we'd get better results if we generated this as x.f - // i.e. Field(x, .f) instead of Load(FieldIndex(x, .f)). - // Unclear. - // - // But we will start with the simplest correct code. - df := buildDomFrontier(fn) - - if debugLifting { - title := false - for i, blocks := range df { - if blocks != nil { - if !title { - fmt.Fprintf(os.Stderr, "Dominance frontier of %s:\n", fn) - title = true - } - fmt.Fprintf(os.Stderr, "\t%s: %s\n", fn.Blocks[i], blocks) - } - } - } - - newPhis := make(newPhiMap) - - // During this pass we will replace some BasicBlock.Instrs - // (allocs, loads and stores) with nil, keeping a count in - // BasicBlock.gaps. At the end we will reset Instrs to the - // concatenation of all non-dead newPhis and non-nil Instrs - // for the block, reusing the original array if space permits. - - // While we're here, we also eliminate 'rundefers' - // instructions in functions that contain no 'defer' - // instructions. - usesDefer := false - - // A counter used to generate ~unique ids for Phi nodes, as an - // aid to debugging. We use large numbers to make them highly - // visible. All nodes are renumbered later. - fresh := 1000 - - // Determine which allocs we can lift and number them densely. - // The renaming phase uses this numbering for compact maps. - numAllocs := 0 - for _, b := range fn.Blocks { - b.gaps = 0 - b.rundefers = 0 - for _, instr := range b.Instrs { - switch instr := instr.(type) { - case *Alloc: - index := -1 - if liftAlloc(df, instr, newPhis, &fresh) { - index = numAllocs - numAllocs++ - } - instr.index = index - case *Defer: - usesDefer = true - case *RunDefers: - b.rundefers++ - } - } - } - - // renaming maps an alloc (keyed by index) to its replacement - // value. Initially the renaming contains nil, signifying the - // zero constant of the appropriate type; we construct the - // Const lazily at most once on each path through the domtree. - // TODO(adonovan): opt: cache per-function not per subtree. - renaming := make([]Value, numAllocs) - - // Renaming. - rename(fn.Blocks[0], renaming, newPhis) - - // Eliminate dead φ-nodes. - removeDeadPhis(fn.Blocks, newPhis) - - // Prepend remaining live φ-nodes to each block. - for _, b := range fn.Blocks { - nps := newPhis[b] - j := len(nps) - - rundefersToKill := b.rundefers - if usesDefer { - rundefersToKill = 0 - } - - if j+b.gaps+rundefersToKill == 0 { - continue // fast path: no new phis or gaps - } - - // Compact nps + non-nil Instrs into a new slice. - // TODO(adonovan): opt: compact in situ (rightwards) - // if Instrs has sufficient space or slack. - dst := make([]Instruction, len(b.Instrs)+j-b.gaps-rundefersToKill) - for i, np := range nps { - dst[i] = np.phi - } - for _, instr := range b.Instrs { - if instr == nil { - continue - } - if !usesDefer { - if _, ok := instr.(*RunDefers); ok { - continue - } - } - dst[j] = instr - j++ - } - b.Instrs = dst - } - - // Remove any fn.Locals that were lifted. - j := 0 - for _, l := range fn.Locals { - if l.index < 0 { - fn.Locals[j] = l - j++ - } - } - // Nil out fn.Locals[j:] to aid GC. - for i := j; i < len(fn.Locals); i++ { - fn.Locals[i] = nil - } - fn.Locals = fn.Locals[:j] -} - -// removeDeadPhis removes φ-nodes not transitively needed by a -// non-Phi, non-DebugRef instruction. -func removeDeadPhis(blocks []*BasicBlock, newPhis newPhiMap) { - // First pass: find the set of "live" φ-nodes: those reachable - // from some non-Phi instruction. - // - // We compute reachability in reverse, starting from each φ, - // rather than forwards, starting from each live non-Phi - // instruction, because this way visits much less of the - // Value graph. - livePhis := make(map[*Phi]bool) - for _, npList := range newPhis { - for _, np := range npList { - phi := np.phi - if !livePhis[phi] && phiHasDirectReferrer(phi) { - markLivePhi(livePhis, phi) - } - } - } - - // Existing φ-nodes due to && and || operators - // are all considered live (see Go issue 19622). - for _, b := range blocks { - for _, phi := range b.phis() { - markLivePhi(livePhis, phi.(*Phi)) - } - } - - // Second pass: eliminate unused phis from newPhis. - for block, npList := range newPhis { - j := 0 - for _, np := range npList { - if livePhis[np.phi] { - npList[j] = np - j++ - } else { - // discard it, first removing it from referrers - for _, val := range np.phi.Edges { - if refs := val.Referrers(); refs != nil { - *refs = removeInstr(*refs, np.phi) - } - } - np.phi.block = nil - } - } - newPhis[block] = npList[:j] - } -} - -// markLivePhi marks phi, and all φ-nodes transitively reachable via -// its Operands, live. -func markLivePhi(livePhis map[*Phi]bool, phi *Phi) { - livePhis[phi] = true - for _, rand := range phi.Operands(nil) { - if q, ok := (*rand).(*Phi); ok { - if !livePhis[q] { - markLivePhi(livePhis, q) - } - } - } -} - -// phiHasDirectReferrer reports whether phi is directly referred to by -// a non-Phi instruction. Such instructions are the -// roots of the liveness traversal. -func phiHasDirectReferrer(phi *Phi) bool { - for _, instr := range *phi.Referrers() { - if _, ok := instr.(*Phi); !ok { - return true - } - } - return false -} - -type blockSet struct{ big.Int } // (inherit methods from Int) - -// add adds b to the set and returns true if the set changed. -func (s *blockSet) add(b *BasicBlock) bool { - i := b.Index - if s.Bit(i) != 0 { - return false - } - s.SetBit(&s.Int, i, 1) - return true -} - -// take removes an arbitrary element from a set s and -// returns its index, or returns -1 if empty. -func (s *blockSet) take() int { - l := s.BitLen() - for i := 0; i < l; i++ { - if s.Bit(i) == 1 { - s.SetBit(&s.Int, i, 0) - return i - } - } - return -1 -} - -// newPhi is a pair of a newly introduced φ-node and the lifted Alloc -// it replaces. -type newPhi struct { - phi *Phi - alloc *Alloc -} - -// newPhiMap records for each basic block, the set of newPhis that -// must be prepended to the block. -type newPhiMap map[*BasicBlock][]newPhi - -// liftAlloc determines whether alloc can be lifted into registers, -// and if so, it populates newPhis with all the φ-nodes it may require -// and returns true. -// -// fresh is a source of fresh ids for phi nodes. -func liftAlloc(df domFrontier, alloc *Alloc, newPhis newPhiMap, fresh *int) bool { - // Don't lift named return values in functions that defer - // calls that may recover from panic. - if fn := alloc.Parent(); fn.Recover != nil { - for _, nr := range fn.namedResults { - if nr == alloc { - return false - } - } - } - - // Compute defblocks, the set of blocks containing a - // definition of the alloc cell. - var defblocks blockSet - for _, instr := range *alloc.Referrers() { - // Bail out if we discover the alloc is not liftable; - // the only operations permitted to use the alloc are - // loads/stores into the cell, and DebugRef. - switch instr := instr.(type) { - case *Store: - if instr.Val == alloc { - return false // address used as value - } - if instr.Addr != alloc { - panic("Alloc.Referrers is inconsistent") - } - defblocks.add(instr.Block()) - case *UnOp: - if instr.Op != token.MUL { - return false // not a load - } - if instr.X != alloc { - panic("Alloc.Referrers is inconsistent") - } - case *DebugRef: - // ok - default: - return false // some other instruction - } - } - // The Alloc itself counts as a (zero) definition of the cell. - defblocks.add(alloc.Block()) - - if debugLifting { - fmt.Fprintln(os.Stderr, "\tlifting ", alloc, alloc.Name()) - } - - fn := alloc.Parent() - - // Φ-insertion. - // - // What follows is the body of the main loop of the insert-φ - // function described by Cytron et al, but instead of using - // counter tricks, we just reset the 'hasAlready' and 'work' - // sets each iteration. These are bitmaps so it's pretty cheap. - // - // TODO(adonovan): opt: recycle slice storage for W, - // hasAlready, defBlocks across liftAlloc calls. - var hasAlready blockSet - - // Initialize W and work to defblocks. - var work blockSet = defblocks // blocks seen - var W blockSet // blocks to do - W.Set(&defblocks.Int) - - // Traverse iterated dominance frontier, inserting φ-nodes. - for i := W.take(); i != -1; i = W.take() { - u := fn.Blocks[i] - for _, v := range df[u.Index] { - if hasAlready.add(v) { - // Create φ-node. - // It will be prepended to v.Instrs later, if needed. - phi := &Phi{ - Edges: make([]Value, len(v.Preds)), - Comment: alloc.Comment, - } - // This is merely a debugging aid: - phi.setNum(*fresh) - *fresh++ - - phi.pos = alloc.Pos() - phi.setType(typeparams.MustDeref(alloc.Type())) - phi.block = v - if debugLifting { - fmt.Fprintf(os.Stderr, "\tplace %s = %s at block %s\n", phi.Name(), phi, v) - } - newPhis[v] = append(newPhis[v], newPhi{phi, alloc}) - - if work.add(v) { - W.add(v) - } - } - } - } - - return true -} - -// replaceAll replaces all intraprocedural uses of x with y, -// updating x.Referrers and y.Referrers. -// Precondition: x.Referrers() != nil, i.e. x must be local to some function. -func replaceAll(x, y Value) { - var rands []*Value - pxrefs := x.Referrers() - pyrefs := y.Referrers() - for _, instr := range *pxrefs { - rands = instr.Operands(rands[:0]) // recycle storage - for _, rand := range rands { - if *rand != nil { - if *rand == x { - *rand = y - } - } - } - if pyrefs != nil { - *pyrefs = append(*pyrefs, instr) // dups ok - } - } - *pxrefs = nil // x is now unreferenced -} - -// renamed returns the value to which alloc is being renamed, -// constructing it lazily if it's the implicit zero initialization. -func renamed(renaming []Value, alloc *Alloc) Value { - v := renaming[alloc.index] - if v == nil { - v = zeroConst(typeparams.MustDeref(alloc.Type())) - renaming[alloc.index] = v - } - return v -} - -// rename implements the (Cytron et al) SSA renaming algorithm, a -// preorder traversal of the dominator tree replacing all loads of -// Alloc cells with the value stored to that cell by the dominating -// store instruction. For lifting, we need only consider loads, -// stores and φ-nodes. -// -// renaming is a map from *Alloc (keyed by index number) to its -// dominating stored value; newPhis[x] is the set of new φ-nodes to be -// prepended to block x. -func rename(u *BasicBlock, renaming []Value, newPhis newPhiMap) { - // Each φ-node becomes the new name for its associated Alloc. - for _, np := range newPhis[u] { - phi := np.phi - alloc := np.alloc - renaming[alloc.index] = phi - } - - // Rename loads and stores of allocs. - for i, instr := range u.Instrs { - switch instr := instr.(type) { - case *Alloc: - if instr.index >= 0 { // store of zero to Alloc cell - // Replace dominated loads by the zero value. - renaming[instr.index] = nil - if debugLifting { - fmt.Fprintf(os.Stderr, "\tkill alloc %s\n", instr) - } - // Delete the Alloc. - u.Instrs[i] = nil - u.gaps++ - } - - case *Store: - if alloc, ok := instr.Addr.(*Alloc); ok && alloc.index >= 0 { // store to Alloc cell - // Replace dominated loads by the stored value. - renaming[alloc.index] = instr.Val - if debugLifting { - fmt.Fprintf(os.Stderr, "\tkill store %s; new value: %s\n", - instr, instr.Val.Name()) - } - // Remove the store from the referrer list of the stored value. - if refs := instr.Val.Referrers(); refs != nil { - *refs = removeInstr(*refs, instr) - } - // Delete the Store. - u.Instrs[i] = nil - u.gaps++ - } - - case *UnOp: - if instr.Op == token.MUL { - if alloc, ok := instr.X.(*Alloc); ok && alloc.index >= 0 { // load of Alloc cell - newval := renamed(renaming, alloc) - if debugLifting { - fmt.Fprintf(os.Stderr, "\tupdate load %s = %s with %s\n", - instr.Name(), instr, newval.Name()) - } - // Replace all references to - // the loaded value by the - // dominating stored value. - replaceAll(instr, newval) - // Delete the Load. - u.Instrs[i] = nil - u.gaps++ - } - } - - case *DebugRef: - if alloc, ok := instr.X.(*Alloc); ok && alloc.index >= 0 { // ref of Alloc cell - if instr.IsAddr { - instr.X = renamed(renaming, alloc) - instr.IsAddr = false - - // Add DebugRef to instr.X's referrers. - if refs := instr.X.Referrers(); refs != nil { - *refs = append(*refs, instr) - } - } else { - // A source expression denotes the address - // of an Alloc that was optimized away. - instr.X = nil - - // Delete the DebugRef. - u.Instrs[i] = nil - u.gaps++ - } - } - } - } - - // For each φ-node in a CFG successor, rename the edge. - for _, v := range u.Succs { - phis := newPhis[v] - if len(phis) == 0 { - continue - } - i := v.predIndex(u) - for _, np := range phis { - phi := np.phi - alloc := np.alloc - newval := renamed(renaming, alloc) - if debugLifting { - fmt.Fprintf(os.Stderr, "\tsetphi %s edge %s -> %s (#%d) (alloc=%s) := %s\n", - phi.Name(), u, v, i, alloc.Name(), newval.Name()) - } - phi.Edges[i] = newval - if prefs := newval.Referrers(); prefs != nil { - *prefs = append(*prefs, phi) - } - } - } - - // Continue depth-first recursion over domtree, pushing a - // fresh copy of the renaming map for each subtree. - for i, v := range u.dom.children { - r := renaming - if i < len(u.dom.children)-1 { - // On all but the final iteration, we must make - // a copy to avoid destructive update. - r = make([]Value, len(renaming)) - copy(r, renaming) - } - rename(v, r, newPhis) - } - -} diff --git a/vendor/golang.org/x/tools/go/ssa/lvalue.go b/vendor/golang.org/x/tools/go/ssa/lvalue.go deleted file mode 100644 index eede307ea..000000000 --- a/vendor/golang.org/x/tools/go/ssa/lvalue.go +++ /dev/null @@ -1,155 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package ssa - -// lvalues are the union of addressable expressions and map-index -// expressions. - -import ( - "go/ast" - "go/token" - "go/types" - - "golang.org/x/tools/internal/typeparams" -) - -// An lvalue represents an assignable location that may appear on the -// left-hand side of an assignment. This is a generalization of a -// pointer to permit updates to elements of maps. -type lvalue interface { - store(fn *Function, v Value) // stores v into the location - load(fn *Function) Value // loads the contents of the location - address(fn *Function) Value // address of the location - typ() types.Type // returns the type of the location -} - -// An address is an lvalue represented by a true pointer. -type address struct { - addr Value // must have a pointer core type. - pos token.Pos // source position - expr ast.Expr // source syntax of the value (not address) [debug mode] -} - -func (a *address) load(fn *Function) Value { - load := emitLoad(fn, a.addr) - load.pos = a.pos - return load -} - -func (a *address) store(fn *Function, v Value) { - store := emitStore(fn, a.addr, v, a.pos) - if a.expr != nil { - // store.Val is v, converted for assignability. - emitDebugRef(fn, a.expr, store.Val, false) - } -} - -func (a *address) address(fn *Function) Value { - if a.expr != nil { - emitDebugRef(fn, a.expr, a.addr, true) - } - return a.addr -} - -func (a *address) typ() types.Type { - return typeparams.MustDeref(a.addr.Type()) -} - -// An element is an lvalue represented by m[k], the location of an -// element of a map. These locations are not addressable -// since pointers cannot be formed from them, but they do support -// load() and store(). -type element struct { - m, k Value // map - t types.Type // map element type - pos token.Pos // source position of colon ({k:v}) or lbrack (m[k]=v) -} - -func (e *element) load(fn *Function) Value { - l := &Lookup{ - X: e.m, - Index: e.k, - } - l.setPos(e.pos) - l.setType(e.t) - return fn.emit(l) -} - -func (e *element) store(fn *Function, v Value) { - up := &MapUpdate{ - Map: e.m, - Key: e.k, - Value: emitConv(fn, v, e.t), - } - up.pos = e.pos - fn.emit(up) -} - -func (e *element) address(fn *Function) Value { - panic("map elements are not addressable") -} - -func (e *element) typ() types.Type { - return e.t -} - -// A lazyAddress is an lvalue whose address is the result of an instruction. -// These work like an *address except a new address.address() Value -// is created on each load, store and address call. -// A lazyAddress can be used to control when a side effect (nil pointer -// dereference, index out of bounds) of using a location happens. -type lazyAddress struct { - addr func(fn *Function) Value // emit to fn the computation of the address - t types.Type // type of the location - pos token.Pos // source position - expr ast.Expr // source syntax of the value (not address) [debug mode] -} - -func (l *lazyAddress) load(fn *Function) Value { - load := emitLoad(fn, l.addr(fn)) - load.pos = l.pos - return load -} - -func (l *lazyAddress) store(fn *Function, v Value) { - store := emitStore(fn, l.addr(fn), v, l.pos) - if l.expr != nil { - // store.Val is v, converted for assignability. - emitDebugRef(fn, l.expr, store.Val, false) - } -} - -func (l *lazyAddress) address(fn *Function) Value { - addr := l.addr(fn) - if l.expr != nil { - emitDebugRef(fn, l.expr, addr, true) - } - return addr -} - -func (l *lazyAddress) typ() types.Type { return l.t } - -// A blank is a dummy variable whose name is "_". -// It is not reified: loads are illegal and stores are ignored. -type blank struct{} - -func (bl blank) load(fn *Function) Value { - panic("blank.load is illegal") -} - -func (bl blank) store(fn *Function, v Value) { - // no-op -} - -func (bl blank) address(fn *Function) Value { - panic("blank var is not addressable") -} - -func (bl blank) typ() types.Type { - // This should be the type of the blank Ident; the typechecker - // doesn't provide this yet, but fortunately, we don't need it - // yet either. - panic("blank.typ is unimplemented") -} diff --git a/vendor/golang.org/x/tools/go/ssa/methods.go b/vendor/golang.org/x/tools/go/ssa/methods.go deleted file mode 100644 index 5f46a1848..000000000 --- a/vendor/golang.org/x/tools/go/ssa/methods.go +++ /dev/null @@ -1,277 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package ssa - -// This file defines utilities for population of method sets. - -import ( - "fmt" - "go/types" - - "golang.org/x/tools/go/types/typeutil" - "golang.org/x/tools/internal/aliases" - "golang.org/x/tools/internal/typeparams" -) - -// MethodValue returns the Function implementing method sel, building -// wrapper methods on demand. It returns nil if sel denotes an -// interface or generic method. -// -// Precondition: sel.Kind() == MethodVal. -// -// Thread-safe. -// -// Acquires prog.methodsMu. -func (prog *Program) MethodValue(sel *types.Selection) *Function { - if sel.Kind() != types.MethodVal { - panic(fmt.Sprintf("MethodValue(%s) kind != MethodVal", sel)) - } - T := sel.Recv() - if types.IsInterface(T) { - return nil // interface method or type parameter - } - - if prog.parameterized.isParameterized(T) { - return nil // generic method - } - - if prog.mode&LogSource != 0 { - defer logStack("MethodValue %s %v", T, sel)() - } - - var cr creator - - m := func() *Function { - prog.methodsMu.Lock() - defer prog.methodsMu.Unlock() - - // Get or create SSA method set. - mset, ok := prog.methodSets.At(T).(*methodSet) - if !ok { - mset = &methodSet{mapping: make(map[string]*Function)} - prog.methodSets.Set(T, mset) - } - - // Get or create SSA method. - id := sel.Obj().Id() - fn, ok := mset.mapping[id] - if !ok { - obj := sel.Obj().(*types.Func) - _, ptrObj := deptr(recvType(obj)) - _, ptrRecv := deptr(T) - needsPromotion := len(sel.Index()) > 1 - needsIndirection := !ptrObj && ptrRecv - if needsPromotion || needsIndirection { - fn = createWrapper(prog, toSelection(sel), &cr) - } else { - fn = prog.objectMethod(obj, &cr) - } - if fn.Signature.Recv() == nil { - panic(fn) - } - mset.mapping[id] = fn - } - - return fn - }() - - b := builder{created: &cr} - b.iterate() - - return m -} - -// objectMethod returns the Function for a given method symbol. -// The symbol may be an instance of a generic function. It need not -// belong to an existing SSA package created by a call to -// prog.CreatePackage. -// -// objectMethod panics if the function is not a method. -// -// Acquires prog.objectMethodsMu. -func (prog *Program) objectMethod(obj *types.Func, cr *creator) *Function { - sig := obj.Type().(*types.Signature) - if sig.Recv() == nil { - panic("not a method: " + obj.String()) - } - - // Belongs to a created package? - if fn := prog.FuncValue(obj); fn != nil { - return fn - } - - // Instantiation of generic? - if originObj := typeparams.OriginMethod(obj); originObj != obj { - origin := prog.objectMethod(originObj, cr) - assert(origin.typeparams.Len() > 0, "origin is not generic") - targs := receiverTypeArgs(obj) - return origin.instance(targs, cr) - } - - // Consult/update cache of methods created from types.Func. - prog.objectMethodsMu.Lock() - defer prog.objectMethodsMu.Unlock() - fn, ok := prog.objectMethods[obj] - if !ok { - fn = createFunction(prog, obj, obj.Name(), nil, nil, "", cr) - fn.Synthetic = "from type information (on demand)" - - if prog.objectMethods == nil { - prog.objectMethods = make(map[*types.Func]*Function) - } - prog.objectMethods[obj] = fn - } - return fn -} - -// LookupMethod returns the implementation of the method of type T -// identified by (pkg, name). It returns nil if the method exists but -// is an interface method or generic method, and panics if T has no such method. -func (prog *Program) LookupMethod(T types.Type, pkg *types.Package, name string) *Function { - sel := prog.MethodSets.MethodSet(T).Lookup(pkg, name) - if sel == nil { - panic(fmt.Sprintf("%s has no method %s", T, types.Id(pkg, name))) - } - return prog.MethodValue(sel) -} - -// methodSet contains the (concrete) methods of a concrete type (non-interface, non-parameterized). -type methodSet struct { - mapping map[string]*Function // populated lazily -} - -// RuntimeTypes returns a new unordered slice containing all types in -// the program for which a runtime type is required. -// -// A runtime type is required for any non-parameterized, non-interface -// type that is converted to an interface, or for any type (including -// interface types) derivable from one through reflection. -// -// The methods of such types may be reachable through reflection or -// interface calls even if they are never called directly. -// -// Thread-safe. -// -// Acquires prog.runtimeTypesMu. -func (prog *Program) RuntimeTypes() []types.Type { - prog.runtimeTypesMu.Lock() - defer prog.runtimeTypesMu.Unlock() - return prog.runtimeTypes.Keys() -} - -// forEachReachable calls f for type T and each type reachable from -// its type through reflection. -// -// The function f must use memoization to break cycles and -// return false when the type has already been visited. -// -// TODO(adonovan): publish in typeutil and share with go/callgraph/rta. -func forEachReachable(msets *typeutil.MethodSetCache, T types.Type, f func(types.Type) bool) { - var visit func(T types.Type, skip bool) - visit = func(T types.Type, skip bool) { - if !skip { - if !f(T) { - return - } - } - - // Recursion over signatures of each method. - tmset := msets.MethodSet(T) - for i := 0; i < tmset.Len(); i++ { - sig := tmset.At(i).Type().(*types.Signature) - // It is tempting to call visit(sig, false) - // but, as noted in golang.org/cl/65450043, - // the Signature.Recv field is ignored by - // types.Identical and typeutil.Map, which - // is confusing at best. - // - // More importantly, the true signature rtype - // reachable from a method using reflection - // has no receiver but an extra ordinary parameter. - // For the Read method of io.Reader we want: - // func(Reader, []byte) (int, error) - // but here sig is: - // func([]byte) (int, error) - // with .Recv = Reader (though it is hard to - // notice because it doesn't affect Signature.String - // or types.Identical). - // - // TODO(adonovan): construct and visit the correct - // non-method signature with an extra parameter - // (though since unnamed func types have no methods - // there is essentially no actual demand for this). - // - // TODO(adonovan): document whether or not it is - // safe to skip non-exported methods (as RTA does). - visit(sig.Params(), true) // skip the Tuple - visit(sig.Results(), true) // skip the Tuple - } - - switch T := T.(type) { - case *aliases.Alias: - visit(aliases.Unalias(T), false) - - case *types.Basic: - // nop - - case *types.Interface: - // nop---handled by recursion over method set. - - case *types.Pointer: - visit(T.Elem(), false) - - case *types.Slice: - visit(T.Elem(), false) - - case *types.Chan: - visit(T.Elem(), false) - - case *types.Map: - visit(T.Key(), false) - visit(T.Elem(), false) - - case *types.Signature: - if T.Recv() != nil { - panic(fmt.Sprintf("Signature %s has Recv %s", T, T.Recv())) - } - visit(T.Params(), true) // skip the Tuple - visit(T.Results(), true) // skip the Tuple - - case *types.Named: - // A pointer-to-named type can be derived from a named - // type via reflection. It may have methods too. - visit(types.NewPointer(T), false) - - // Consider 'type T struct{S}' where S has methods. - // Reflection provides no way to get from T to struct{S}, - // only to S, so the method set of struct{S} is unwanted, - // so set 'skip' flag during recursion. - visit(T.Underlying(), true) // skip the unnamed type - - case *types.Array: - visit(T.Elem(), false) - - case *types.Struct: - for i, n := 0, T.NumFields(); i < n; i++ { - // TODO(adonovan): document whether or not - // it is safe to skip non-exported fields. - visit(T.Field(i).Type(), false) - } - - case *types.Tuple: - for i, n := 0, T.Len(); i < n; i++ { - visit(T.At(i).Type(), false) - } - - case *types.TypeParam, *types.Union: - // forEachReachable must not be called on parameterized types. - panic(T) - - default: - panic(T) - } - } - visit(T, false) -} diff --git a/vendor/golang.org/x/tools/go/ssa/mode.go b/vendor/golang.org/x/tools/go/ssa/mode.go deleted file mode 100644 index 8381639a5..000000000 --- a/vendor/golang.org/x/tools/go/ssa/mode.go +++ /dev/null @@ -1,111 +0,0 @@ -// Copyright 2015 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package ssa - -// This file defines the BuilderMode type and its command-line flag. - -import ( - "bytes" - "fmt" -) - -// BuilderMode is a bitmask of options for diagnostics and checking. -// -// *BuilderMode satisfies the flag.Value interface. Example: -// -// var mode = ssa.BuilderMode(0) -// func init() { flag.Var(&mode, "build", ssa.BuilderModeDoc) } -type BuilderMode uint - -const ( - PrintPackages BuilderMode = 1 << iota // Print package inventory to stdout - PrintFunctions // Print function SSA code to stdout - LogSource // Log source locations as SSA builder progresses - SanityCheckFunctions // Perform sanity checking of function bodies - NaiveForm // Build naïve SSA form: don't replace local loads/stores with registers - BuildSerially // Build packages serially, not in parallel. - GlobalDebug // Enable debug info for all packages - BareInits // Build init functions without guards or calls to dependent inits - InstantiateGenerics // Instantiate generics functions (monomorphize) while building -) - -const BuilderModeDoc = `Options controlling the SSA builder. -The value is a sequence of zero or more of these letters: -C perform sanity [C]hecking of the SSA form. -D include [D]ebug info for every function. -P print [P]ackage inventory. -F print [F]unction SSA code. -S log [S]ource locations as SSA builder progresses. -L build distinct packages seria[L]ly instead of in parallel. -N build [N]aive SSA form: don't replace local loads/stores with registers. -I build bare [I]nit functions: no init guards or calls to dependent inits. -G instantiate [G]eneric function bodies via monomorphization -` - -func (m BuilderMode) String() string { - var buf bytes.Buffer - if m&GlobalDebug != 0 { - buf.WriteByte('D') - } - if m&PrintPackages != 0 { - buf.WriteByte('P') - } - if m&PrintFunctions != 0 { - buf.WriteByte('F') - } - if m&LogSource != 0 { - buf.WriteByte('S') - } - if m&SanityCheckFunctions != 0 { - buf.WriteByte('C') - } - if m&NaiveForm != 0 { - buf.WriteByte('N') - } - if m&BuildSerially != 0 { - buf.WriteByte('L') - } - if m&BareInits != 0 { - buf.WriteByte('I') - } - if m&InstantiateGenerics != 0 { - buf.WriteByte('G') - } - return buf.String() -} - -// Set parses the flag characters in s and updates *m. -func (m *BuilderMode) Set(s string) error { - var mode BuilderMode - for _, c := range s { - switch c { - case 'D': - mode |= GlobalDebug - case 'P': - mode |= PrintPackages - case 'F': - mode |= PrintFunctions - case 'S': - mode |= LogSource | BuildSerially - case 'C': - mode |= SanityCheckFunctions - case 'N': - mode |= NaiveForm - case 'L': - mode |= BuildSerially - case 'I': - mode |= BareInits - case 'G': - mode |= InstantiateGenerics - default: - return fmt.Errorf("unknown BuilderMode option: %q", c) - } - } - *m = mode - return nil -} - -// Get returns m. -func (m BuilderMode) Get() interface{} { return m } diff --git a/vendor/golang.org/x/tools/go/ssa/parameterized.go b/vendor/golang.org/x/tools/go/ssa/parameterized.go deleted file mode 100644 index 74c541107..000000000 --- a/vendor/golang.org/x/tools/go/ssa/parameterized.go +++ /dev/null @@ -1,145 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package ssa - -import ( - "go/types" - "sync" - - "golang.org/x/tools/internal/aliases" - "golang.org/x/tools/internal/typeparams" -) - -// tpWalker walks over types looking for parameterized types. -// -// NOTE: Adapted from go/types/infer.go. If that is exported in a future release remove this copy. -type tpWalker struct { - mu sync.Mutex - seen map[types.Type]bool -} - -// isParameterized reports whether t recursively contains a type parameter. -// Thread-safe. -func (w *tpWalker) isParameterized(t types.Type) bool { - // TODO(adonovan): profile. If this operation is expensive, - // handle the most common but shallow cases such as T, pkg.T, - // *T without consulting the cache under the lock. - - w.mu.Lock() - defer w.mu.Unlock() - return w.isParameterizedLocked(t) -} - -// Requires w.mu. -func (w *tpWalker) isParameterizedLocked(typ types.Type) (res bool) { - // NOTE: Adapted from go/types/infer.go. Try to keep in sync. - - // detect cycles - if x, ok := w.seen[typ]; ok { - return x - } - w.seen[typ] = false - defer func() { - w.seen[typ] = res - }() - - switch t := typ.(type) { - case nil, *types.Basic: // TODO(gri) should nil be handled here? - break - - case *aliases.Alias: - return w.isParameterizedLocked(aliases.Unalias(t)) - - case *types.Array: - return w.isParameterizedLocked(t.Elem()) - - case *types.Slice: - return w.isParameterizedLocked(t.Elem()) - - case *types.Struct: - for i, n := 0, t.NumFields(); i < n; i++ { - if w.isParameterizedLocked(t.Field(i).Type()) { - return true - } - } - - case *types.Pointer: - return w.isParameterizedLocked(t.Elem()) - - case *types.Tuple: - n := t.Len() - for i := 0; i < n; i++ { - if w.isParameterizedLocked(t.At(i).Type()) { - return true - } - } - - case *types.Signature: - // t.tparams may not be nil if we are looking at a signature - // of a generic function type (or an interface method) that is - // part of the type we're testing. We don't care about these type - // parameters. - // Similarly, the receiver of a method may declare (rather than - // use) type parameters, we don't care about those either. - // Thus, we only need to look at the input and result parameters. - return w.isParameterizedLocked(t.Params()) || w.isParameterizedLocked(t.Results()) - - case *types.Interface: - for i, n := 0, t.NumMethods(); i < n; i++ { - if w.isParameterizedLocked(t.Method(i).Type()) { - return true - } - } - terms, err := typeparams.InterfaceTermSet(t) - if err != nil { - panic(err) - } - for _, term := range terms { - if w.isParameterizedLocked(term.Type()) { - return true - } - } - - case *types.Map: - return w.isParameterizedLocked(t.Key()) || w.isParameterizedLocked(t.Elem()) - - case *types.Chan: - return w.isParameterizedLocked(t.Elem()) - - case *types.Named: - args := t.TypeArgs() - // TODO(taking): this does not match go/types/infer.go. Check with rfindley. - if params := t.TypeParams(); params.Len() > args.Len() { - return true - } - for i, n := 0, args.Len(); i < n; i++ { - if w.isParameterizedLocked(args.At(i)) { - return true - } - } - return w.isParameterizedLocked(t.Underlying()) // recurse for types local to parameterized functions - - case *types.TypeParam: - return true - - default: - panic(t) // unreachable - } - - return false -} - -// anyParameterized reports whether any element of ts is parameterized. -// Thread-safe. -func (w *tpWalker) anyParameterized(ts []types.Type) bool { - w.mu.Lock() - defer w.mu.Unlock() - for _, t := range ts { - if w.isParameterizedLocked(t) { - return true - } - } - return false -} diff --git a/vendor/golang.org/x/tools/go/ssa/print.go b/vendor/golang.org/x/tools/go/ssa/print.go deleted file mode 100644 index 38d8404fd..000000000 --- a/vendor/golang.org/x/tools/go/ssa/print.go +++ /dev/null @@ -1,465 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package ssa - -// This file implements the String() methods for all Value and -// Instruction types. - -import ( - "bytes" - "fmt" - "go/types" - "io" - "reflect" - "sort" - "strings" - - "golang.org/x/tools/go/types/typeutil" - "golang.org/x/tools/internal/typeparams" -) - -// relName returns the name of v relative to i. -// In most cases, this is identical to v.Name(), but references to -// Functions (including methods) and Globals use RelString and -// all types are displayed with relType, so that only cross-package -// references are package-qualified. -func relName(v Value, i Instruction) string { - var from *types.Package - if i != nil { - from = i.Parent().relPkg() - } - switch v := v.(type) { - case Member: // *Function or *Global - return v.RelString(from) - case *Const: - return v.RelString(from) - } - return v.Name() -} - -// normalizeAnyFortesting controls whether we replace occurrences of -// interface{} with any. It is only used for normalizing test output. -var normalizeAnyForTesting bool - -func relType(t types.Type, from *types.Package) string { - s := types.TypeString(t, types.RelativeTo(from)) - if normalizeAnyForTesting { - s = strings.ReplaceAll(s, "interface{}", "any") - } - return s -} - -func relTerm(term *types.Term, from *types.Package) string { - s := relType(term.Type(), from) - if term.Tilde() { - return "~" + s - } - return s -} - -func relString(m Member, from *types.Package) string { - // NB: not all globals have an Object (e.g. init$guard), - // so use Package().Object not Object.Package(). - if pkg := m.Package().Pkg; pkg != nil && pkg != from { - return fmt.Sprintf("%s.%s", pkg.Path(), m.Name()) - } - return m.Name() -} - -// Value.String() -// -// This method is provided only for debugging. -// It never appears in disassembly, which uses Value.Name(). - -func (v *Parameter) String() string { - from := v.Parent().relPkg() - return fmt.Sprintf("parameter %s : %s", v.Name(), relType(v.Type(), from)) -} - -func (v *FreeVar) String() string { - from := v.Parent().relPkg() - return fmt.Sprintf("freevar %s : %s", v.Name(), relType(v.Type(), from)) -} - -func (v *Builtin) String() string { - return fmt.Sprintf("builtin %s", v.Name()) -} - -// Instruction.String() - -func (v *Alloc) String() string { - op := "local" - if v.Heap { - op = "new" - } - from := v.Parent().relPkg() - return fmt.Sprintf("%s %s (%s)", op, relType(typeparams.MustDeref(v.Type()), from), v.Comment) -} - -func (v *Phi) String() string { - var b bytes.Buffer - b.WriteString("phi [") - for i, edge := range v.Edges { - if i > 0 { - b.WriteString(", ") - } - // Be robust against malformed CFG. - if v.block == nil { - b.WriteString("??") - continue - } - block := -1 - if i < len(v.block.Preds) { - block = v.block.Preds[i].Index - } - fmt.Fprintf(&b, "%d: ", block) - edgeVal := "" // be robust - if edge != nil { - edgeVal = relName(edge, v) - } - b.WriteString(edgeVal) - } - b.WriteString("]") - if v.Comment != "" { - b.WriteString(" #") - b.WriteString(v.Comment) - } - return b.String() -} - -func printCall(v *CallCommon, prefix string, instr Instruction) string { - var b bytes.Buffer - b.WriteString(prefix) - if !v.IsInvoke() { - b.WriteString(relName(v.Value, instr)) - } else { - fmt.Fprintf(&b, "invoke %s.%s", relName(v.Value, instr), v.Method.Name()) - } - b.WriteString("(") - for i, arg := range v.Args { - if i > 0 { - b.WriteString(", ") - } - b.WriteString(relName(arg, instr)) - } - if v.Signature().Variadic() { - b.WriteString("...") - } - b.WriteString(")") - return b.String() -} - -func (c *CallCommon) String() string { - return printCall(c, "", nil) -} - -func (v *Call) String() string { - return printCall(&v.Call, "", v) -} - -func (v *BinOp) String() string { - return fmt.Sprintf("%s %s %s", relName(v.X, v), v.Op.String(), relName(v.Y, v)) -} - -func (v *UnOp) String() string { - return fmt.Sprintf("%s%s%s", v.Op, relName(v.X, v), commaOk(v.CommaOk)) -} - -func printConv(prefix string, v, x Value) string { - from := v.Parent().relPkg() - return fmt.Sprintf("%s %s <- %s (%s)", - prefix, - relType(v.Type(), from), - relType(x.Type(), from), - relName(x, v.(Instruction))) -} - -func (v *ChangeType) String() string { return printConv("changetype", v, v.X) } -func (v *Convert) String() string { return printConv("convert", v, v.X) } -func (v *ChangeInterface) String() string { return printConv("change interface", v, v.X) } -func (v *SliceToArrayPointer) String() string { return printConv("slice to array pointer", v, v.X) } -func (v *MakeInterface) String() string { return printConv("make", v, v.X) } - -func (v *MultiConvert) String() string { - from := v.Parent().relPkg() - - var b strings.Builder - b.WriteString(printConv("multiconvert", v, v.X)) - b.WriteString(" [") - for i, s := range v.from { - for j, d := range v.to { - if i != 0 || j != 0 { - b.WriteString(" | ") - } - fmt.Fprintf(&b, "%s <- %s", relTerm(d, from), relTerm(s, from)) - } - } - b.WriteString("]") - return b.String() -} - -func (v *MakeClosure) String() string { - var b bytes.Buffer - fmt.Fprintf(&b, "make closure %s", relName(v.Fn, v)) - if v.Bindings != nil { - b.WriteString(" [") - for i, c := range v.Bindings { - if i > 0 { - b.WriteString(", ") - } - b.WriteString(relName(c, v)) - } - b.WriteString("]") - } - return b.String() -} - -func (v *MakeSlice) String() string { - from := v.Parent().relPkg() - return fmt.Sprintf("make %s %s %s", - relType(v.Type(), from), - relName(v.Len, v), - relName(v.Cap, v)) -} - -func (v *Slice) String() string { - var b bytes.Buffer - b.WriteString("slice ") - b.WriteString(relName(v.X, v)) - b.WriteString("[") - if v.Low != nil { - b.WriteString(relName(v.Low, v)) - } - b.WriteString(":") - if v.High != nil { - b.WriteString(relName(v.High, v)) - } - if v.Max != nil { - b.WriteString(":") - b.WriteString(relName(v.Max, v)) - } - b.WriteString("]") - return b.String() -} - -func (v *MakeMap) String() string { - res := "" - if v.Reserve != nil { - res = relName(v.Reserve, v) - } - from := v.Parent().relPkg() - return fmt.Sprintf("make %s %s", relType(v.Type(), from), res) -} - -func (v *MakeChan) String() string { - from := v.Parent().relPkg() - return fmt.Sprintf("make %s %s", relType(v.Type(), from), relName(v.Size, v)) -} - -func (v *FieldAddr) String() string { - // Be robust against a bad index. - name := "?" - if fld := fieldOf(typeparams.MustDeref(v.X.Type()), v.Field); fld != nil { - name = fld.Name() - } - return fmt.Sprintf("&%s.%s [#%d]", relName(v.X, v), name, v.Field) -} - -func (v *Field) String() string { - // Be robust against a bad index. - name := "?" - if fld := fieldOf(v.X.Type(), v.Field); fld != nil { - name = fld.Name() - } - return fmt.Sprintf("%s.%s [#%d]", relName(v.X, v), name, v.Field) -} - -func (v *IndexAddr) String() string { - return fmt.Sprintf("&%s[%s]", relName(v.X, v), relName(v.Index, v)) -} - -func (v *Index) String() string { - return fmt.Sprintf("%s[%s]", relName(v.X, v), relName(v.Index, v)) -} - -func (v *Lookup) String() string { - return fmt.Sprintf("%s[%s]%s", relName(v.X, v), relName(v.Index, v), commaOk(v.CommaOk)) -} - -func (v *Range) String() string { - return "range " + relName(v.X, v) -} - -func (v *Next) String() string { - return "next " + relName(v.Iter, v) -} - -func (v *TypeAssert) String() string { - from := v.Parent().relPkg() - return fmt.Sprintf("typeassert%s %s.(%s)", commaOk(v.CommaOk), relName(v.X, v), relType(v.AssertedType, from)) -} - -func (v *Extract) String() string { - return fmt.Sprintf("extract %s #%d", relName(v.Tuple, v), v.Index) -} - -func (s *Jump) String() string { - // Be robust against malformed CFG. - block := -1 - if s.block != nil && len(s.block.Succs) == 1 { - block = s.block.Succs[0].Index - } - return fmt.Sprintf("jump %d", block) -} - -func (s *If) String() string { - // Be robust against malformed CFG. - tblock, fblock := -1, -1 - if s.block != nil && len(s.block.Succs) == 2 { - tblock = s.block.Succs[0].Index - fblock = s.block.Succs[1].Index - } - return fmt.Sprintf("if %s goto %d else %d", relName(s.Cond, s), tblock, fblock) -} - -func (s *Go) String() string { - return printCall(&s.Call, "go ", s) -} - -func (s *Panic) String() string { - return "panic " + relName(s.X, s) -} - -func (s *Return) String() string { - var b bytes.Buffer - b.WriteString("return") - for i, r := range s.Results { - if i == 0 { - b.WriteString(" ") - } else { - b.WriteString(", ") - } - b.WriteString(relName(r, s)) - } - return b.String() -} - -func (*RunDefers) String() string { - return "rundefers" -} - -func (s *Send) String() string { - return fmt.Sprintf("send %s <- %s", relName(s.Chan, s), relName(s.X, s)) -} - -func (s *Defer) String() string { - return printCall(&s.Call, "defer ", s) -} - -func (s *Select) String() string { - var b bytes.Buffer - for i, st := range s.States { - if i > 0 { - b.WriteString(", ") - } - if st.Dir == types.RecvOnly { - b.WriteString("<-") - b.WriteString(relName(st.Chan, s)) - } else { - b.WriteString(relName(st.Chan, s)) - b.WriteString("<-") - b.WriteString(relName(st.Send, s)) - } - } - non := "" - if !s.Blocking { - non = "non" - } - return fmt.Sprintf("select %sblocking [%s]", non, b.String()) -} - -func (s *Store) String() string { - return fmt.Sprintf("*%s = %s", relName(s.Addr, s), relName(s.Val, s)) -} - -func (s *MapUpdate) String() string { - return fmt.Sprintf("%s[%s] = %s", relName(s.Map, s), relName(s.Key, s), relName(s.Value, s)) -} - -func (s *DebugRef) String() string { - p := s.Parent().Prog.Fset.Position(s.Pos()) - var descr interface{} - if s.object != nil { - descr = s.object // e.g. "var x int" - } else { - descr = reflect.TypeOf(s.Expr) // e.g. "*ast.CallExpr" - } - var addr string - if s.IsAddr { - addr = "address of " - } - return fmt.Sprintf("; %s%s @ %d:%d is %s", addr, descr, p.Line, p.Column, s.X.Name()) -} - -func (p *Package) String() string { - return "package " + p.Pkg.Path() -} - -var _ io.WriterTo = (*Package)(nil) // *Package implements io.Writer - -func (p *Package) WriteTo(w io.Writer) (int64, error) { - var buf bytes.Buffer - WritePackage(&buf, p) - n, err := w.Write(buf.Bytes()) - return int64(n), err -} - -// WritePackage writes to buf a human-readable summary of p. -func WritePackage(buf *bytes.Buffer, p *Package) { - fmt.Fprintf(buf, "%s:\n", p) - - var names []string - maxname := 0 - for name := range p.Members { - if l := len(name); l > maxname { - maxname = l - } - names = append(names, name) - } - - from := p.Pkg - sort.Strings(names) - for _, name := range names { - switch mem := p.Members[name].(type) { - case *NamedConst: - fmt.Fprintf(buf, " const %-*s %s = %s\n", - maxname, name, mem.Name(), mem.Value.RelString(from)) - - case *Function: - fmt.Fprintf(buf, " func %-*s %s\n", - maxname, name, relType(mem.Type(), from)) - - case *Type: - fmt.Fprintf(buf, " type %-*s %s\n", - maxname, name, relType(mem.Type().Underlying(), from)) - for _, meth := range typeutil.IntuitiveMethodSet(mem.Type(), &p.Prog.MethodSets) { - fmt.Fprintf(buf, " %s\n", types.SelectionString(meth, types.RelativeTo(from))) - } - - case *Global: - fmt.Fprintf(buf, " var %-*s %s\n", - maxname, name, relType(typeparams.MustDeref(mem.Type()), from)) - } - } - - fmt.Fprintf(buf, "\n") -} - -func commaOk(x bool) string { - if x { - return ",ok" - } - return "" -} diff --git a/vendor/golang.org/x/tools/go/ssa/sanity.go b/vendor/golang.org/x/tools/go/ssa/sanity.go deleted file mode 100644 index 13bd39fe8..000000000 --- a/vendor/golang.org/x/tools/go/ssa/sanity.go +++ /dev/null @@ -1,557 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package ssa - -// An optional pass for sanity-checking invariants of the SSA representation. -// Currently it checks CFG invariants but little at the instruction level. - -import ( - "bytes" - "fmt" - "go/types" - "io" - "os" - "strings" -) - -type sanity struct { - reporter io.Writer - fn *Function - block *BasicBlock - instrs map[Instruction]struct{} - insane bool -} - -// sanityCheck performs integrity checking of the SSA representation -// of the function fn and returns true if it was valid. Diagnostics -// are written to reporter if non-nil, os.Stderr otherwise. Some -// diagnostics are only warnings and do not imply a negative result. -// -// Sanity-checking is intended to facilitate the debugging of code -// transformation passes. -func sanityCheck(fn *Function, reporter io.Writer) bool { - if reporter == nil { - reporter = os.Stderr - } - return (&sanity{reporter: reporter}).checkFunction(fn) -} - -// mustSanityCheck is like sanityCheck but panics instead of returning -// a negative result. -func mustSanityCheck(fn *Function, reporter io.Writer) { - if !sanityCheck(fn, reporter) { - fn.WriteTo(os.Stderr) - panic("SanityCheck failed") - } -} - -func (s *sanity) diagnostic(prefix, format string, args ...interface{}) { - fmt.Fprintf(s.reporter, "%s: function %s", prefix, s.fn) - if s.block != nil { - fmt.Fprintf(s.reporter, ", block %s", s.block) - } - io.WriteString(s.reporter, ": ") - fmt.Fprintf(s.reporter, format, args...) - io.WriteString(s.reporter, "\n") -} - -func (s *sanity) errorf(format string, args ...interface{}) { - s.insane = true - s.diagnostic("Error", format, args...) -} - -func (s *sanity) warnf(format string, args ...interface{}) { - s.diagnostic("Warning", format, args...) -} - -// findDuplicate returns an arbitrary basic block that appeared more -// than once in blocks, or nil if all were unique. -func findDuplicate(blocks []*BasicBlock) *BasicBlock { - if len(blocks) < 2 { - return nil - } - if blocks[0] == blocks[1] { - return blocks[0] - } - // Slow path: - m := make(map[*BasicBlock]bool) - for _, b := range blocks { - if m[b] { - return b - } - m[b] = true - } - return nil -} - -func (s *sanity) checkInstr(idx int, instr Instruction) { - switch instr := instr.(type) { - case *If, *Jump, *Return, *Panic: - s.errorf("control flow instruction not at end of block") - case *Phi: - if idx == 0 { - // It suffices to apply this check to just the first phi node. - if dup := findDuplicate(s.block.Preds); dup != nil { - s.errorf("phi node in block with duplicate predecessor %s", dup) - } - } else { - prev := s.block.Instrs[idx-1] - if _, ok := prev.(*Phi); !ok { - s.errorf("Phi instruction follows a non-Phi: %T", prev) - } - } - if ne, np := len(instr.Edges), len(s.block.Preds); ne != np { - s.errorf("phi node has %d edges but %d predecessors", ne, np) - - } else { - for i, e := range instr.Edges { - if e == nil { - s.errorf("phi node '%s' has no value for edge #%d from %s", instr.Comment, i, s.block.Preds[i]) - } else if !types.Identical(instr.typ, e.Type()) { - s.errorf("phi node '%s' has a different type (%s) for edge #%d from %s (%s)", - instr.Comment, instr.Type(), i, s.block.Preds[i], e.Type()) - } - } - } - - case *Alloc: - if !instr.Heap { - found := false - for _, l := range s.fn.Locals { - if l == instr { - found = true - break - } - } - if !found { - s.errorf("local alloc %s = %s does not appear in Function.Locals", instr.Name(), instr) - } - } - - case *BinOp: - case *Call: - if common := instr.Call; common.IsInvoke() { - if !types.IsInterface(common.Value.Type()) { - s.errorf("invoke on %s (%s) which is not an interface type (or type param)", common.Value, common.Value.Type()) - } - } - case *ChangeInterface: - case *ChangeType: - case *SliceToArrayPointer: - case *Convert: - if from := instr.X.Type(); !isBasicConvTypes(typeSetOf(from)) { - if to := instr.Type(); !isBasicConvTypes(typeSetOf(to)) { - s.errorf("convert %s -> %s: at least one type must be basic (or all basic, []byte, or []rune)", from, to) - } - } - case *MultiConvert: - case *Defer: - case *Extract: - case *Field: - case *FieldAddr: - case *Go: - case *Index: - case *IndexAddr: - case *Lookup: - case *MakeChan: - case *MakeClosure: - numFree := len(instr.Fn.(*Function).FreeVars) - numBind := len(instr.Bindings) - if numFree != numBind { - s.errorf("MakeClosure has %d Bindings for function %s with %d free vars", - numBind, instr.Fn, numFree) - - } - if recv := instr.Type().(*types.Signature).Recv(); recv != nil { - s.errorf("MakeClosure's type includes receiver %s", recv.Type()) - } - - case *MakeInterface: - case *MakeMap: - case *MakeSlice: - case *MapUpdate: - case *Next: - case *Range: - case *RunDefers: - case *Select: - case *Send: - case *Slice: - case *Store: - case *TypeAssert: - case *UnOp: - case *DebugRef: - // TODO(adonovan): implement checks. - default: - panic(fmt.Sprintf("Unknown instruction type: %T", instr)) - } - - if call, ok := instr.(CallInstruction); ok { - if call.Common().Signature() == nil { - s.errorf("nil signature: %s", call) - } - } - - // Check that value-defining instructions have valid types - // and a valid referrer list. - if v, ok := instr.(Value); ok { - t := v.Type() - if t == nil { - s.errorf("no type: %s = %s", v.Name(), v) - } else if t == tRangeIter { - // not a proper type; ignore. - } else if b, ok := t.Underlying().(*types.Basic); ok && b.Info()&types.IsUntyped != 0 { - s.errorf("instruction has 'untyped' result: %s = %s : %s", v.Name(), v, t) - } - s.checkReferrerList(v) - } - - // Untyped constants are legal as instruction Operands(), - // for example: - // _ = "foo"[0] - // or: - // if wordsize==64 {...} - - // All other non-Instruction Values can be found via their - // enclosing Function or Package. -} - -func (s *sanity) checkFinalInstr(instr Instruction) { - switch instr := instr.(type) { - case *If: - if nsuccs := len(s.block.Succs); nsuccs != 2 { - s.errorf("If-terminated block has %d successors; expected 2", nsuccs) - return - } - if s.block.Succs[0] == s.block.Succs[1] { - s.errorf("If-instruction has same True, False target blocks: %s", s.block.Succs[0]) - return - } - - case *Jump: - if nsuccs := len(s.block.Succs); nsuccs != 1 { - s.errorf("Jump-terminated block has %d successors; expected 1", nsuccs) - return - } - - case *Return: - if nsuccs := len(s.block.Succs); nsuccs != 0 { - s.errorf("Return-terminated block has %d successors; expected none", nsuccs) - return - } - if na, nf := len(instr.Results), s.fn.Signature.Results().Len(); nf != na { - s.errorf("%d-ary return in %d-ary function", na, nf) - } - - case *Panic: - if nsuccs := len(s.block.Succs); nsuccs != 0 { - s.errorf("Panic-terminated block has %d successors; expected none", nsuccs) - return - } - - default: - s.errorf("non-control flow instruction at end of block") - } -} - -func (s *sanity) checkBlock(b *BasicBlock, index int) { - s.block = b - - if b.Index != index { - s.errorf("block has incorrect Index %d", b.Index) - } - if b.parent != s.fn { - s.errorf("block has incorrect parent %s", b.parent) - } - - // Check all blocks are reachable. - // (The entry block is always implicitly reachable, - // as is the Recover block, if any.) - if (index > 0 && b != b.parent.Recover) && len(b.Preds) == 0 { - s.warnf("unreachable block") - if b.Instrs == nil { - // Since this block is about to be pruned, - // tolerating transient problems in it - // simplifies other optimizations. - return - } - } - - // Check predecessor and successor relations are dual, - // and that all blocks in CFG belong to same function. - for _, a := range b.Preds { - found := false - for _, bb := range a.Succs { - if bb == b { - found = true - break - } - } - if !found { - s.errorf("expected successor edge in predecessor %s; found only: %s", a, a.Succs) - } - if a.parent != s.fn { - s.errorf("predecessor %s belongs to different function %s", a, a.parent) - } - } - for _, c := range b.Succs { - found := false - for _, bb := range c.Preds { - if bb == b { - found = true - break - } - } - if !found { - s.errorf("expected predecessor edge in successor %s; found only: %s", c, c.Preds) - } - if c.parent != s.fn { - s.errorf("successor %s belongs to different function %s", c, c.parent) - } - } - - // Check each instruction is sane. - n := len(b.Instrs) - if n == 0 { - s.errorf("basic block contains no instructions") - } - var rands [10]*Value // reuse storage - for j, instr := range b.Instrs { - if instr == nil { - s.errorf("nil instruction at index %d", j) - continue - } - if b2 := instr.Block(); b2 == nil { - s.errorf("nil Block() for instruction at index %d", j) - continue - } else if b2 != b { - s.errorf("wrong Block() (%s) for instruction at index %d ", b2, j) - continue - } - if j < n-1 { - s.checkInstr(j, instr) - } else { - s.checkFinalInstr(instr) - } - - // Check Instruction.Operands. - operands: - for i, op := range instr.Operands(rands[:0]) { - if op == nil { - s.errorf("nil operand pointer %d of %s", i, instr) - continue - } - val := *op - if val == nil { - continue // a nil operand is ok - } - - // Check that "untyped" types only appear on constant operands. - if _, ok := (*op).(*Const); !ok { - if basic, ok := (*op).Type().Underlying().(*types.Basic); ok { - if basic.Info()&types.IsUntyped != 0 { - s.errorf("operand #%d of %s is untyped: %s", i, instr, basic) - } - } - } - - // Check that Operands that are also Instructions belong to same function. - // TODO(adonovan): also check their block dominates block b. - if val, ok := val.(Instruction); ok { - if val.Block() == nil { - s.errorf("operand %d of %s is an instruction (%s) that belongs to no block", i, instr, val) - } else if val.Parent() != s.fn { - s.errorf("operand %d of %s is an instruction (%s) from function %s", i, instr, val, val.Parent()) - } - } - - // Check that each function-local operand of - // instr refers back to instr. (NB: quadratic) - switch val := val.(type) { - case *Const, *Global, *Builtin: - continue // not local - case *Function: - if val.parent == nil { - continue // only anon functions are local - } - } - - // TODO(adonovan): check val.Parent() != nil <=> val.Referrers() is defined. - - if refs := val.Referrers(); refs != nil { - for _, ref := range *refs { - if ref == instr { - continue operands - } - } - s.errorf("operand %d of %s (%s) does not refer to us", i, instr, val) - } else { - s.errorf("operand %d of %s (%s) has no referrers", i, instr, val) - } - } - } -} - -func (s *sanity) checkReferrerList(v Value) { - refs := v.Referrers() - if refs == nil { - s.errorf("%s has missing referrer list", v.Name()) - return - } - for i, ref := range *refs { - if _, ok := s.instrs[ref]; !ok { - s.errorf("%s.Referrers()[%d] = %s is not an instruction belonging to this function", v.Name(), i, ref) - } - } -} - -func (s *sanity) checkFunction(fn *Function) bool { - // TODO(adonovan): check Function invariants: - // - check params match signature - // - check transient fields are nil - // - warn if any fn.Locals do not appear among block instructions. - - // TODO(taking): Sanity check origin, typeparams, and typeargs. - s.fn = fn - if fn.Prog == nil { - s.errorf("nil Prog") - } - - var buf bytes.Buffer - _ = fn.String() // must not crash - _ = fn.RelString(fn.relPkg()) // must not crash - WriteFunction(&buf, fn) // must not crash - - // All functions have a package, except delegates (which are - // shared across packages, or duplicated as weak symbols in a - // separate-compilation model), and error.Error. - if fn.Pkg == nil { - if strings.HasPrefix(fn.Synthetic, "from type information (on demand)") || - strings.HasPrefix(fn.Synthetic, "wrapper ") || - strings.HasPrefix(fn.Synthetic, "bound ") || - strings.HasPrefix(fn.Synthetic, "thunk ") || - strings.HasSuffix(fn.name, "Error") || - strings.HasPrefix(fn.Synthetic, "instance ") || - strings.HasPrefix(fn.Synthetic, "instantiation ") || - (fn.parent != nil && len(fn.typeargs) > 0) /* anon fun in instance */ { - // ok - } else { - s.errorf("nil Pkg") - } - } - if src, syn := fn.Synthetic == "", fn.Syntax() != nil; src != syn { - if len(fn.typeargs) > 0 && fn.Prog.mode&InstantiateGenerics != 0 { - // ok (instantiation with InstantiateGenerics on) - } else if fn.topLevelOrigin != nil && len(fn.typeargs) > 0 { - // ok (we always have the syntax set for instantiation) - } else { - s.errorf("got fromSource=%t, hasSyntax=%t; want same values", src, syn) - } - } - for i, l := range fn.Locals { - if l.Parent() != fn { - s.errorf("Local %s at index %d has wrong parent", l.Name(), i) - } - if l.Heap { - s.errorf("Local %s at index %d has Heap flag set", l.Name(), i) - } - } - // Build the set of valid referrers. - s.instrs = make(map[Instruction]struct{}) - for _, b := range fn.Blocks { - for _, instr := range b.Instrs { - s.instrs[instr] = struct{}{} - } - } - for i, p := range fn.Params { - if p.Parent() != fn { - s.errorf("Param %s at index %d has wrong parent", p.Name(), i) - } - // Check common suffix of Signature and Params match type. - if sig := fn.Signature; sig != nil { - j := i - len(fn.Params) + sig.Params().Len() // index within sig.Params - if j < 0 { - continue - } - if !types.Identical(p.Type(), sig.Params().At(j).Type()) { - s.errorf("Param %s at index %d has wrong type (%s, versus %s in Signature)", p.Name(), i, p.Type(), sig.Params().At(j).Type()) - - } - } - s.checkReferrerList(p) - } - for i, fv := range fn.FreeVars { - if fv.Parent() != fn { - s.errorf("FreeVar %s at index %d has wrong parent", fv.Name(), i) - } - s.checkReferrerList(fv) - } - - if fn.Blocks != nil && len(fn.Blocks) == 0 { - // Function _had_ blocks (so it's not external) but - // they were "optimized" away, even the entry block. - s.errorf("Blocks slice is non-nil but empty") - } - for i, b := range fn.Blocks { - if b == nil { - s.warnf("nil *BasicBlock at f.Blocks[%d]", i) - continue - } - s.checkBlock(b, i) - } - if fn.Recover != nil && fn.Blocks[fn.Recover.Index] != fn.Recover { - s.errorf("Recover block is not in Blocks slice") - } - - s.block = nil - for i, anon := range fn.AnonFuncs { - if anon.Parent() != fn { - s.errorf("AnonFuncs[%d]=%s but %s.Parent()=%s", i, anon, anon, anon.Parent()) - } - if i != int(anon.anonIdx) { - s.errorf("AnonFuncs[%d]=%s but %s.anonIdx=%d", i, anon, anon, anon.anonIdx) - } - } - s.fn = nil - return !s.insane -} - -// sanityCheckPackage checks invariants of packages upon creation. -// It does not require that the package is built. -// Unlike sanityCheck (for functions), it just panics at the first error. -func sanityCheckPackage(pkg *Package) { - if pkg.Pkg == nil { - panic(fmt.Sprintf("Package %s has no Object", pkg)) - } - _ = pkg.String() // must not crash - - for name, mem := range pkg.Members { - if name != mem.Name() { - panic(fmt.Sprintf("%s: %T.Name() = %s, want %s", - pkg.Pkg.Path(), mem, mem.Name(), name)) - } - obj := mem.Object() - if obj == nil { - // This check is sound because fields - // {Global,Function}.object have type - // types.Object. (If they were declared as - // *types.{Var,Func}, we'd have a non-empty - // interface containing a nil pointer.) - - continue // not all members have typechecker objects - } - if obj.Name() != name { - if obj.Name() == "init" && strings.HasPrefix(mem.Name(), "init#") { - // Ok. The name of a declared init function varies between - // its types.Func ("init") and its ssa.Function ("init#%d"). - } else { - panic(fmt.Sprintf("%s: %T.Object().Name() = %s, want %s", - pkg.Pkg.Path(), mem, obj.Name(), name)) - } - } - if obj.Pos() != mem.Pos() { - panic(fmt.Sprintf("%s Pos=%d obj.Pos=%d", mem, mem.Pos(), obj.Pos())) - } - } -} diff --git a/vendor/golang.org/x/tools/go/ssa/source.go b/vendor/golang.org/x/tools/go/ssa/source.go deleted file mode 100644 index 6700305bd..000000000 --- a/vendor/golang.org/x/tools/go/ssa/source.go +++ /dev/null @@ -1,290 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package ssa - -// This file defines utilities for working with source positions -// or source-level named entities ("objects"). - -// TODO(adonovan): test that {Value,Instruction}.Pos() positions match -// the originating syntax, as specified. - -import ( - "go/ast" - "go/token" - "go/types" - - "golang.org/x/tools/internal/typeparams" -) - -// EnclosingFunction returns the function that contains the syntax -// node denoted by path. -// -// Syntax associated with package-level variable specifications is -// enclosed by the package's init() function. -// -// Returns nil if not found; reasons might include: -// - the node is not enclosed by any function. -// - the node is within an anonymous function (FuncLit) and -// its SSA function has not been created yet -// (pkg.Build() has not yet been called). -func EnclosingFunction(pkg *Package, path []ast.Node) *Function { - // Start with package-level function... - fn := findEnclosingPackageLevelFunction(pkg, path) - if fn == nil { - return nil // not in any function - } - - // ...then walk down the nested anonymous functions. - n := len(path) -outer: - for i := range path { - if lit, ok := path[n-1-i].(*ast.FuncLit); ok { - for _, anon := range fn.AnonFuncs { - if anon.Pos() == lit.Type.Func { - fn = anon - continue outer - } - } - // SSA function not found: - // - package not yet built, or maybe - // - builder skipped FuncLit in dead block - // (in principle; but currently the Builder - // generates even dead FuncLits). - return nil - } - } - return fn -} - -// HasEnclosingFunction returns true if the AST node denoted by path -// is contained within the declaration of some function or -// package-level variable. -// -// Unlike EnclosingFunction, the behaviour of this function does not -// depend on whether SSA code for pkg has been built, so it can be -// used to quickly reject check inputs that will cause -// EnclosingFunction to fail, prior to SSA building. -func HasEnclosingFunction(pkg *Package, path []ast.Node) bool { - return findEnclosingPackageLevelFunction(pkg, path) != nil -} - -// findEnclosingPackageLevelFunction returns the Function -// corresponding to the package-level function enclosing path. -func findEnclosingPackageLevelFunction(pkg *Package, path []ast.Node) *Function { - if n := len(path); n >= 2 { // [... {Gen,Func}Decl File] - switch decl := path[n-2].(type) { - case *ast.GenDecl: - if decl.Tok == token.VAR && n >= 3 { - // Package-level 'var' initializer. - return pkg.init - } - - case *ast.FuncDecl: - if decl.Recv == nil && decl.Name.Name == "init" { - // Explicit init() function. - for _, b := range pkg.init.Blocks { - for _, instr := range b.Instrs { - if instr, ok := instr.(*Call); ok { - if callee, ok := instr.Call.Value.(*Function); ok && callee.Pkg == pkg && callee.Pos() == decl.Name.NamePos { - return callee - } - } - } - } - // Hack: return non-nil when SSA is not yet - // built so that HasEnclosingFunction works. - return pkg.init - } - // Declared function/method. - return findNamedFunc(pkg, decl.Name.NamePos) - } - } - return nil // not in any function -} - -// findNamedFunc returns the named function whose FuncDecl.Ident is at -// position pos. -func findNamedFunc(pkg *Package, pos token.Pos) *Function { - // Look at all package members and method sets of named types. - // Not very efficient. - for _, mem := range pkg.Members { - switch mem := mem.(type) { - case *Function: - if mem.Pos() == pos { - return mem - } - case *Type: - mset := pkg.Prog.MethodSets.MethodSet(types.NewPointer(mem.Type())) - for i, n := 0, mset.Len(); i < n; i++ { - // Don't call Program.Method: avoid creating wrappers. - obj := mset.At(i).Obj().(*types.Func) - if obj.Pos() == pos { - // obj from MethodSet may not be the origin type. - m := typeparams.OriginMethod(obj) - return pkg.objects[m].(*Function) - } - } - } - } - return nil -} - -// ValueForExpr returns the SSA Value that corresponds to non-constant -// expression e. -// -// It returns nil if no value was found, e.g. -// - the expression is not lexically contained within f; -// - f was not built with debug information; or -// - e is a constant expression. (For efficiency, no debug -// information is stored for constants. Use -// go/types.Info.Types[e].Value instead.) -// - e is a reference to nil or a built-in function. -// - the value was optimised away. -// -// If e is an addressable expression used in an lvalue context, -// value is the address denoted by e, and isAddr is true. -// -// The types of e (or &e, if isAddr) and the result are equal -// (modulo "untyped" bools resulting from comparisons). -// -// (Tip: to find the ssa.Value given a source position, use -// astutil.PathEnclosingInterval to locate the ast.Node, then -// EnclosingFunction to locate the Function, then ValueForExpr to find -// the ssa.Value.) -func (f *Function) ValueForExpr(e ast.Expr) (value Value, isAddr bool) { - if f.debugInfo() { // (opt) - e = unparen(e) - for _, b := range f.Blocks { - for _, instr := range b.Instrs { - if ref, ok := instr.(*DebugRef); ok { - if ref.Expr == e { - return ref.X, ref.IsAddr - } - } - } - } - } - return -} - -// --- Lookup functions for source-level named entities (types.Objects) --- - -// Package returns the SSA Package corresponding to the specified -// type-checker package. It returns nil if no such Package was -// created by a prior call to prog.CreatePackage. -func (prog *Program) Package(pkg *types.Package) *Package { - return prog.packages[pkg] -} - -// packageLevelMember returns the package-level member corresponding -// to the specified symbol, which may be a package-level const -// (*NamedConst), var (*Global) or func/method (*Function) of some -// package in prog. -// -// It returns nil if the object belongs to a package that has not been -// created by prog.CreatePackage. -func (prog *Program) packageLevelMember(obj types.Object) Member { - if pkg, ok := prog.packages[obj.Pkg()]; ok { - return pkg.objects[obj] - } - return nil -} - -// FuncValue returns the SSA function or (non-interface) method -// denoted by the specified func symbol. It returns nil id the symbol -// denotes an interface method, or belongs to a package that was not -// created by prog.CreatePackage. -func (prog *Program) FuncValue(obj *types.Func) *Function { - fn, _ := prog.packageLevelMember(obj).(*Function) - return fn -} - -// ConstValue returns the SSA constant denoted by the specified const symbol. -func (prog *Program) ConstValue(obj *types.Const) *Const { - // TODO(adonovan): opt: share (don't reallocate) - // Consts for const objects and constant ast.Exprs. - - // Universal constant? {true,false,nil} - if obj.Parent() == types.Universe { - return NewConst(obj.Val(), obj.Type()) - } - // Package-level named constant? - if v := prog.packageLevelMember(obj); v != nil { - return v.(*NamedConst).Value - } - return NewConst(obj.Val(), obj.Type()) -} - -// VarValue returns the SSA Value that corresponds to a specific -// identifier denoting the specified var symbol. -// -// VarValue returns nil if a local variable was not found, perhaps -// because its package was not built, the debug information was not -// requested during SSA construction, or the value was optimized away. -// -// ref is the path to an ast.Ident (e.g. from PathEnclosingInterval), -// and that ident must resolve to obj. -// -// pkg is the package enclosing the reference. (A reference to a var -// always occurs within a function, so we need to know where to find it.) -// -// If the identifier is a field selector and its base expression is -// non-addressable, then VarValue returns the value of that field. -// For example: -// -// func f() struct {x int} -// f().x // VarValue(x) returns a *Field instruction of type int -// -// All other identifiers denote addressable locations (variables). -// For them, VarValue may return either the variable's address or its -// value, even when the expression is evaluated only for its value; the -// situation is reported by isAddr, the second component of the result. -// -// If !isAddr, the returned value is the one associated with the -// specific identifier. For example, -// -// var x int // VarValue(x) returns Const 0 here -// x = 1 // VarValue(x) returns Const 1 here -// -// It is not specified whether the value or the address is returned in -// any particular case, as it may depend upon optimizations performed -// during SSA code generation, such as registerization, constant -// folding, avoidance of materialization of subexpressions, etc. -func (prog *Program) VarValue(obj *types.Var, pkg *Package, ref []ast.Node) (value Value, isAddr bool) { - // All references to a var are local to some function, possibly init. - fn := EnclosingFunction(pkg, ref) - if fn == nil { - return // e.g. def of struct field; SSA not built? - } - - id := ref[0].(*ast.Ident) - - // Defining ident of a parameter? - if id.Pos() == obj.Pos() { - for _, param := range fn.Params { - if param.Object() == obj { - return param, false - } - } - } - - // Other ident? - for _, b := range fn.Blocks { - for _, instr := range b.Instrs { - if dr, ok := instr.(*DebugRef); ok { - if dr.Pos() == id.Pos() { - return dr.X, dr.IsAddr - } - } - } - } - - // Defining ident of package-level var? - if v := prog.packageLevelMember(obj); v != nil { - return v.(*Global), true - } - - return // e.g. debug info not requested, or var optimized away -} diff --git a/vendor/golang.org/x/tools/go/ssa/ssa.go b/vendor/golang.org/x/tools/go/ssa/ssa.go deleted file mode 100644 index 30bf4bc67..000000000 --- a/vendor/golang.org/x/tools/go/ssa/ssa.go +++ /dev/null @@ -1,1835 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package ssa - -// This package defines a high-level intermediate representation for -// Go programs using static single-assignment (SSA) form. - -import ( - "fmt" - "go/ast" - "go/constant" - "go/token" - "go/types" - "sync" - - "golang.org/x/tools/go/types/typeutil" - "golang.org/x/tools/internal/typeparams" -) - -// A Program is a partial or complete Go program converted to SSA form. -type Program struct { - Fset *token.FileSet // position information for the files of this Program - imported map[string]*Package // all importable Packages, keyed by import path - packages map[*types.Package]*Package // all created Packages - mode BuilderMode // set of mode bits for SSA construction - MethodSets typeutil.MethodSetCache // cache of type-checker's method-sets - - canon *canonizer // type canonicalization map - ctxt *types.Context // cache for type checking instantiations - - methodsMu sync.Mutex - methodSets typeutil.Map // maps type to its concrete *methodSet - - parameterized tpWalker // memoization of whether a type refers to type parameters - - runtimeTypesMu sync.Mutex - runtimeTypes typeutil.Map // set of runtime types (from MakeInterface) - - // objectMethods is a memoization of objectMethod - // to avoid creation of duplicate methods from type information. - objectMethodsMu sync.Mutex - objectMethods map[*types.Func]*Function -} - -// A Package is a single analyzed Go package containing Members for -// all package-level functions, variables, constants and types it -// declares. These may be accessed directly via Members, or via the -// type-specific accessor methods Func, Type, Var and Const. -// -// Members also contains entries for "init" (the synthetic package -// initializer) and "init#%d", the nth declared init function, -// and unspecified other things too. -type Package struct { - Prog *Program // the owning program - Pkg *types.Package // the corresponding go/types.Package - Members map[string]Member // all package members keyed by name (incl. init and init#%d) - objects map[types.Object]Member // mapping of package objects to members (incl. methods). Contains *NamedConst, *Global, *Function (values but not types) - init *Function // Func("init"); the package's init function - debug bool // include full debug info in this package - syntax bool // package was loaded from syntax - - // The following fields are set transiently, then cleared - // after building. - buildOnce sync.Once // ensures package building occurs once - ninit int32 // number of init functions - info *types.Info // package type information - files []*ast.File // package ASTs - created creator // members created as a result of building this package (includes declared functions, wrappers) - initVersion map[ast.Expr]string // goversion to use for each global var init expr -} - -// A Member is a member of a Go package, implemented by *NamedConst, -// *Global, *Function, or *Type; they are created by package-level -// const, var, func and type declarations respectively. -type Member interface { - Name() string // declared name of the package member - String() string // package-qualified name of the package member - RelString(*types.Package) string // like String, but relative refs are unqualified - Object() types.Object // typechecker's object for this member, if any - Pos() token.Pos // position of member's declaration, if known - Type() types.Type // type of the package member - Token() token.Token // token.{VAR,FUNC,CONST,TYPE} - Package() *Package // the containing package -} - -// A Type is a Member of a Package representing a package-level named type. -type Type struct { - object *types.TypeName - pkg *Package -} - -// A NamedConst is a Member of a Package representing a package-level -// named constant. -// -// Pos() returns the position of the declaring ast.ValueSpec.Names[*] -// identifier. -// -// NB: a NamedConst is not a Value; it contains a constant Value, which -// it augments with the name and position of its 'const' declaration. -type NamedConst struct { - object *types.Const - Value *Const - pkg *Package -} - -// A Value is an SSA value that can be referenced by an instruction. -type Value interface { - // Name returns the name of this value, and determines how - // this Value appears when used as an operand of an - // Instruction. - // - // This is the same as the source name for Parameters, - // Builtins, Functions, FreeVars, Globals. - // For constants, it is a representation of the constant's value - // and type. For all other Values this is the name of the - // virtual register defined by the instruction. - // - // The name of an SSA Value is not semantically significant, - // and may not even be unique within a function. - Name() string - - // If this value is an Instruction, String returns its - // disassembled form; otherwise it returns unspecified - // human-readable information about the Value, such as its - // kind, name and type. - String() string - - // Type returns the type of this value. Many instructions - // (e.g. IndexAddr) change their behaviour depending on the - // types of their operands. - Type() types.Type - - // Parent returns the function to which this Value belongs. - // It returns nil for named Functions, Builtin, Const and Global. - Parent() *Function - - // Referrers returns the list of instructions that have this - // value as one of their operands; it may contain duplicates - // if an instruction has a repeated operand. - // - // Referrers actually returns a pointer through which the - // caller may perform mutations to the object's state. - // - // Referrers is currently only defined if Parent()!=nil, - // i.e. for the function-local values FreeVar, Parameter, - // Functions (iff anonymous) and all value-defining instructions. - // It returns nil for named Functions, Builtin, Const and Global. - // - // Instruction.Operands contains the inverse of this relation. - Referrers() *[]Instruction - - // Pos returns the location of the AST token most closely - // associated with the operation that gave rise to this value, - // or token.NoPos if it was not explicit in the source. - // - // For each ast.Node type, a particular token is designated as - // the closest location for the expression, e.g. the Lparen - // for an *ast.CallExpr. This permits a compact but - // approximate mapping from Values to source positions for use - // in diagnostic messages, for example. - // - // (Do not use this position to determine which Value - // corresponds to an ast.Expr; use Function.ValueForExpr - // instead. NB: it requires that the function was built with - // debug information.) - Pos() token.Pos -} - -// An Instruction is an SSA instruction that computes a new Value or -// has some effect. -// -// An Instruction that defines a value (e.g. BinOp) also implements -// the Value interface; an Instruction that only has an effect (e.g. Store) -// does not. -type Instruction interface { - // String returns the disassembled form of this value. - // - // Examples of Instructions that are Values: - // "x + y" (BinOp) - // "len([])" (Call) - // Note that the name of the Value is not printed. - // - // Examples of Instructions that are not Values: - // "return x" (Return) - // "*y = x" (Store) - // - // (The separation Value.Name() from Value.String() is useful - // for some analyses which distinguish the operation from the - // value it defines, e.g., 'y = local int' is both an allocation - // of memory 'local int' and a definition of a pointer y.) - String() string - - // Parent returns the function to which this instruction - // belongs. - Parent() *Function - - // Block returns the basic block to which this instruction - // belongs. - Block() *BasicBlock - - // setBlock sets the basic block to which this instruction belongs. - setBlock(*BasicBlock) - - // Operands returns the operands of this instruction: the - // set of Values it references. - // - // Specifically, it appends their addresses to rands, a - // user-provided slice, and returns the resulting slice, - // permitting avoidance of memory allocation. - // - // The operands are appended in undefined order, but the order - // is consistent for a given Instruction; the addresses are - // always non-nil but may point to a nil Value. Clients may - // store through the pointers, e.g. to effect a value - // renaming. - // - // Value.Referrers is a subset of the inverse of this - // relation. (Referrers are not tracked for all types of - // Values.) - Operands(rands []*Value) []*Value - - // Pos returns the location of the AST token most closely - // associated with the operation that gave rise to this - // instruction, or token.NoPos if it was not explicit in the - // source. - // - // For each ast.Node type, a particular token is designated as - // the closest location for the expression, e.g. the Go token - // for an *ast.GoStmt. This permits a compact but approximate - // mapping from Instructions to source positions for use in - // diagnostic messages, for example. - // - // (Do not use this position to determine which Instruction - // corresponds to an ast.Expr; see the notes for Value.Pos. - // This position may be used to determine which non-Value - // Instruction corresponds to some ast.Stmts, but not all: If - // and Jump instructions have no Pos(), for example.) - Pos() token.Pos -} - -// A Node is a node in the SSA value graph. Every concrete type that -// implements Node is also either a Value, an Instruction, or both. -// -// Node contains the methods common to Value and Instruction, plus the -// Operands and Referrers methods generalized to return nil for -// non-Instructions and non-Values, respectively. -// -// Node is provided to simplify SSA graph algorithms. Clients should -// use the more specific and informative Value or Instruction -// interfaces where appropriate. -type Node interface { - // Common methods: - String() string - Pos() token.Pos - Parent() *Function - - // Partial methods: - Operands(rands []*Value) []*Value // nil for non-Instructions - Referrers() *[]Instruction // nil for non-Values -} - -// Function represents the parameters, results, and code of a function -// or method. -// -// If Blocks is nil, this indicates an external function for which no -// Go source code is available. In this case, FreeVars, Locals, and -// Params are nil too. Clients performing whole-program analysis must -// handle external functions specially. -// -// Blocks contains the function's control-flow graph (CFG). -// Blocks[0] is the function entry point; block order is not otherwise -// semantically significant, though it may affect the readability of -// the disassembly. -// To iterate over the blocks in dominance order, use DomPreorder(). -// -// Recover is an optional second entry point to which control resumes -// after a recovered panic. The Recover block may contain only a return -// statement, preceded by a load of the function's named return -// parameters, if any. -// -// A nested function (Parent()!=nil) that refers to one or more -// lexically enclosing local variables ("free variables") has FreeVars. -// Such functions cannot be called directly but require a -// value created by MakeClosure which, via its Bindings, supplies -// values for these parameters. -// -// If the function is a method (Signature.Recv() != nil) then the first -// element of Params is the receiver parameter. -// -// A Go package may declare many functions called "init". -// For each one, Object().Name() returns "init" but Name() returns -// "init#1", etc, in declaration order. -// -// Pos() returns the declaring ast.FuncLit.Type.Func or the position -// of the ast.FuncDecl.Name, if the function was explicit in the -// source. Synthetic wrappers, for which Synthetic != "", may share -// the same position as the function they wrap. -// Syntax.Pos() always returns the position of the declaring "func" token. -// -// Type() returns the function's Signature. -// -// A generic function is a function or method that has uninstantiated type -// parameters (TypeParams() != nil). Consider a hypothetical generic -// method, (*Map[K,V]).Get. It may be instantiated with all -// non-parameterized types as (*Map[string,int]).Get or with -// parameterized types as (*Map[string,U]).Get, where U is a type parameter. -// In both instantiations, Origin() refers to the instantiated generic -// method, (*Map[K,V]).Get, TypeParams() refers to the parameters [K,V] of -// the generic method. TypeArgs() refers to [string,U] or [string,int], -// respectively, and is nil in the generic method. -type Function struct { - name string - object *types.Func // symbol for declared function (nil for FuncLit or synthetic init) - method *selection // info about provenance of synthetic methods; thunk => non-nil - Signature *types.Signature - pos token.Pos - - // source information - Synthetic string // provenance of synthetic function; "" for true source functions - syntax ast.Node // *ast.Func{Decl,Lit}, if from syntax (incl. generic instances) - info *types.Info // type annotations (iff syntax != nil) - goversion string // Go version of syntax (NB: init is special) - - build buildFunc // algorithm to build function body (nil => built) - parent *Function // enclosing function if anon; nil if global - Pkg *Package // enclosing package; nil for shared funcs (wrappers and error.Error) - Prog *Program // enclosing program - - // These fields are populated only when the function body is built: - - Params []*Parameter // function parameters; for methods, includes receiver - FreeVars []*FreeVar // free variables whose values must be supplied by closure - Locals []*Alloc // frame-allocated variables of this function - Blocks []*BasicBlock // basic blocks of the function; nil => external - Recover *BasicBlock // optional; control transfers here after recovered panic - AnonFuncs []*Function // anonymous functions directly beneath this one - referrers []Instruction // referring instructions (iff Parent() != nil) - anonIdx int32 // position of a nested function in parent's AnonFuncs. fn.Parent()!=nil => fn.Parent().AnonFunc[fn.anonIdx] == fn. - - typeparams *types.TypeParamList // type parameters of this function. typeparams.Len() > 0 => generic or instance of generic function - typeargs []types.Type // type arguments that instantiated typeparams. len(typeargs) > 0 => instance of generic function - topLevelOrigin *Function // the origin function if this is an instance of a source function. nil if Parent()!=nil. - generic *generic // instances of this function, if generic - - // The following fields are cleared after building. - currentBlock *BasicBlock // where to emit code - vars map[*types.Var]Value // addresses of local variables - namedResults []*Alloc // tuple of named results - targets *targets // linked stack of branch targets - lblocks map[*types.Label]*lblock // labelled blocks - subst *subster // type parameter substitutions (if non-nil) -} - -// BasicBlock represents an SSA basic block. -// -// The final element of Instrs is always an explicit transfer of -// control (If, Jump, Return, or Panic). -// -// A block may contain no Instructions only if it is unreachable, -// i.e., Preds is nil. Empty blocks are typically pruned. -// -// BasicBlocks and their Preds/Succs relation form a (possibly cyclic) -// graph independent of the SSA Value graph: the control-flow graph or -// CFG. It is illegal for multiple edges to exist between the same -// pair of blocks. -// -// Each BasicBlock is also a node in the dominator tree of the CFG. -// The tree may be navigated using Idom()/Dominees() and queried using -// Dominates(). -// -// The order of Preds and Succs is significant (to Phi and If -// instructions, respectively). -type BasicBlock struct { - Index int // index of this block within Parent().Blocks - Comment string // optional label; no semantic significance - parent *Function // parent function - Instrs []Instruction // instructions in order - Preds, Succs []*BasicBlock // predecessors and successors - succs2 [2]*BasicBlock // initial space for Succs - dom domInfo // dominator tree info - gaps int // number of nil Instrs (transient) - rundefers int // number of rundefers (transient) -} - -// Pure values ---------------------------------------- - -// A FreeVar represents a free variable of the function to which it -// belongs. -// -// FreeVars are used to implement anonymous functions, whose free -// variables are lexically captured in a closure formed by -// MakeClosure. The value of such a free var is an Alloc or another -// FreeVar and is considered a potentially escaping heap address, with -// pointer type. -// -// FreeVars are also used to implement bound method closures. Such a -// free var represents the receiver value and may be of any type that -// has concrete methods. -// -// Pos() returns the position of the value that was captured, which -// belongs to an enclosing function. -type FreeVar struct { - name string - typ types.Type - pos token.Pos - parent *Function - referrers []Instruction - - // Transiently needed during building. - outer Value // the Value captured from the enclosing context. -} - -// A Parameter represents an input parameter of a function. -type Parameter struct { - name string - object *types.Var // non-nil - typ types.Type - parent *Function - referrers []Instruction -} - -// A Const represents a value known at build time. -// -// Consts include true constants of boolean, numeric, and string types, as -// defined by the Go spec; these are represented by a non-nil Value field. -// -// Consts also include the "zero" value of any type, of which the nil values -// of various pointer-like types are a special case; these are represented -// by a nil Value field. -// -// Pos() returns token.NoPos. -// -// Example printed forms: -// -// 42:int -// "hello":untyped string -// 3+4i:MyComplex -// nil:*int -// nil:[]string -// [3]int{}:[3]int -// struct{x string}{}:struct{x string} -// 0:interface{int|int64} -// nil:interface{bool|int} // no go/constant representation -type Const struct { - typ types.Type - Value constant.Value -} - -// A Global is a named Value holding the address of a package-level -// variable. -// -// Pos() returns the position of the ast.ValueSpec.Names[*] -// identifier. -type Global struct { - name string - object types.Object // a *types.Var; may be nil for synthetics e.g. init$guard - typ types.Type - pos token.Pos - - Pkg *Package -} - -// A Builtin represents a specific use of a built-in function, e.g. len. -// -// Builtins are immutable values. Builtins do not have addresses. -// Builtins can only appear in CallCommon.Value. -// -// Name() indicates the function: one of the built-in functions from the -// Go spec (excluding "make" and "new") or one of these ssa-defined -// intrinsics: -// -// // wrapnilchk returns ptr if non-nil, panics otherwise. -// // (For use in indirection wrappers.) -// func ssa:wrapnilchk(ptr *T, recvType, methodName string) *T -// -// Object() returns a *types.Builtin for built-ins defined by the spec, -// nil for others. -// -// Type() returns a *types.Signature representing the effective -// signature of the built-in for this call. -type Builtin struct { - name string - sig *types.Signature -} - -// Value-defining instructions ---------------------------------------- - -// The Alloc instruction reserves space for a variable of the given type, -// zero-initializes it, and yields its address. -// -// Alloc values are always addresses, and have pointer types, so the -// type of the allocated variable is actually -// Type().Underlying().(*types.Pointer).Elem(). -// -// If Heap is false, Alloc zero-initializes the same local variable in -// the call frame and returns its address; in this case the Alloc must -// be present in Function.Locals. We call this a "local" alloc. -// -// If Heap is true, Alloc allocates a new zero-initialized variable -// each time the instruction is executed. We call this a "new" alloc. -// -// When Alloc is applied to a channel, map or slice type, it returns -// the address of an uninitialized (nil) reference of that kind; store -// the result of MakeSlice, MakeMap or MakeChan in that location to -// instantiate these types. -// -// Pos() returns the ast.CompositeLit.Lbrace for a composite literal, -// or the ast.CallExpr.Rparen for a call to new() or for a call that -// allocates a varargs slice. -// -// Example printed form: -// -// t0 = local int -// t1 = new int -type Alloc struct { - register - Comment string - Heap bool - index int // dense numbering; for lifting -} - -// The Phi instruction represents an SSA φ-node, which combines values -// that differ across incoming control-flow edges and yields a new -// value. Within a block, all φ-nodes must appear before all non-φ -// nodes. -// -// Pos() returns the position of the && or || for short-circuit -// control-flow joins, or that of the *Alloc for φ-nodes inserted -// during SSA renaming. -// -// Example printed form: -// -// t2 = phi [0: t0, 1: t1] -type Phi struct { - register - Comment string // a hint as to its purpose - Edges []Value // Edges[i] is value for Block().Preds[i] -} - -// The Call instruction represents a function or method call. -// -// The Call instruction yields the function result if there is exactly -// one. Otherwise it returns a tuple, the components of which are -// accessed via Extract. -// -// See CallCommon for generic function call documentation. -// -// Pos() returns the ast.CallExpr.Lparen, if explicit in the source. -// -// Example printed form: -// -// t2 = println(t0, t1) -// t4 = t3() -// t7 = invoke t5.Println(...t6) -type Call struct { - register - Call CallCommon -} - -// The BinOp instruction yields the result of binary operation X Op Y. -// -// Pos() returns the ast.BinaryExpr.OpPos, if explicit in the source. -// -// Example printed form: -// -// t1 = t0 + 1:int -type BinOp struct { - register - // One of: - // ADD SUB MUL QUO REM + - * / % - // AND OR XOR SHL SHR AND_NOT & | ^ << >> &^ - // EQL NEQ LSS LEQ GTR GEQ == != < <= < >= - Op token.Token - X, Y Value -} - -// The UnOp instruction yields the result of Op X. -// ARROW is channel receive. -// MUL is pointer indirection (load). -// XOR is bitwise complement. -// SUB is negation. -// NOT is logical negation. -// -// If CommaOk and Op=ARROW, the result is a 2-tuple of the value above -// and a boolean indicating the success of the receive. The -// components of the tuple are accessed using Extract. -// -// Pos() returns the ast.UnaryExpr.OpPos, if explicit in the source. -// For receive operations (ARROW) implicit in ranging over a channel, -// Pos() returns the ast.RangeStmt.For. -// For implicit memory loads (STAR), Pos() returns the position of the -// most closely associated source-level construct; the details are not -// specified. -// -// Example printed form: -// -// t0 = *x -// t2 = <-t1,ok -type UnOp struct { - register - Op token.Token // One of: NOT SUB ARROW MUL XOR ! - <- * ^ - X Value - CommaOk bool -} - -// The ChangeType instruction applies to X a value-preserving type -// change to Type(). -// -// Type changes are permitted: -// - between a named type and its underlying type. -// - between two named types of the same underlying type. -// - between (possibly named) pointers to identical base types. -// - from a bidirectional channel to a read- or write-channel, -// optionally adding/removing a name. -// - between a type (t) and an instance of the type (tσ), i.e. -// Type() == σ(X.Type()) (or X.Type()== σ(Type())) where -// σ is the type substitution of Parent().TypeParams by -// Parent().TypeArgs. -// -// This operation cannot fail dynamically. -// -// Type changes may to be to or from a type parameter (or both). All -// types in the type set of X.Type() have a value-preserving type -// change to all types in the type set of Type(). -// -// Pos() returns the ast.CallExpr.Lparen, if the instruction arose -// from an explicit conversion in the source. -// -// Example printed form: -// -// t1 = changetype *int <- IntPtr (t0) -type ChangeType struct { - register - X Value -} - -// The Convert instruction yields the conversion of value X to type -// Type(). One or both of those types is basic (but possibly named). -// -// A conversion may change the value and representation of its operand. -// Conversions are permitted: -// - between real numeric types. -// - between complex numeric types. -// - between string and []byte or []rune. -// - between pointers and unsafe.Pointer. -// - between unsafe.Pointer and uintptr. -// - from (Unicode) integer to (UTF-8) string. -// -// A conversion may imply a type name change also. -// -// Conversions may to be to or from a type parameter. All types in -// the type set of X.Type() can be converted to all types in the type -// set of Type(). -// -// This operation cannot fail dynamically. -// -// Conversions of untyped string/number/bool constants to a specific -// representation are eliminated during SSA construction. -// -// Pos() returns the ast.CallExpr.Lparen, if the instruction arose -// from an explicit conversion in the source. -// -// Example printed form: -// -// t1 = convert []byte <- string (t0) -type Convert struct { - register - X Value -} - -// The MultiConvert instruction yields the conversion of value X to type -// Type(). Either X.Type() or Type() must be a type parameter. Each -// type in the type set of X.Type() can be converted to each type in the -// type set of Type(). -// -// See the documentation for Convert, ChangeType, and SliceToArrayPointer -// for the conversions that are permitted. Additionally conversions of -// slices to arrays are permitted. -// -// This operation can fail dynamically (see SliceToArrayPointer). -// -// Pos() returns the ast.CallExpr.Lparen, if the instruction arose -// from an explicit conversion in the source. -// -// Example printed form: -// -// t1 = multiconvert D <- S (t0) [*[2]rune <- []rune | string <- []rune] -type MultiConvert struct { - register - X Value - from []*types.Term - to []*types.Term -} - -// ChangeInterface constructs a value of one interface type from a -// value of another interface type known to be assignable to it. -// This operation cannot fail. -// -// Pos() returns the ast.CallExpr.Lparen if the instruction arose from -// an explicit T(e) conversion; the ast.TypeAssertExpr.Lparen if the -// instruction arose from an explicit e.(T) operation; or token.NoPos -// otherwise. -// -// Example printed form: -// -// t1 = change interface interface{} <- I (t0) -type ChangeInterface struct { - register - X Value -} - -// The SliceToArrayPointer instruction yields the conversion of slice X to -// array pointer. -// -// Pos() returns the ast.CallExpr.Lparen, if the instruction arose -// from an explicit conversion in the source. -// -// Conversion may to be to or from a type parameter. All types in -// the type set of X.Type() must be a slice types that can be converted to -// all types in the type set of Type() which must all be pointer to array -// types. -// -// This operation can fail dynamically if the length of the slice is less -// than the length of the array. -// -// Example printed form: -// -// t1 = slice to array pointer *[4]byte <- []byte (t0) -type SliceToArrayPointer struct { - register - X Value -} - -// MakeInterface constructs an instance of an interface type from a -// value of a concrete type. -// -// Use Program.MethodSets.MethodSet(X.Type()) to find the method-set -// of X, and Program.MethodValue(m) to find the implementation of a method. -// -// To construct the zero value of an interface type T, use: -// -// NewConst(constant.MakeNil(), T, pos) -// -// Pos() returns the ast.CallExpr.Lparen, if the instruction arose -// from an explicit conversion in the source. -// -// Example printed form: -// -// t1 = make interface{} <- int (42:int) -// t2 = make Stringer <- t0 -type MakeInterface struct { - register - X Value -} - -// The MakeClosure instruction yields a closure value whose code is -// Fn and whose free variables' values are supplied by Bindings. -// -// Type() returns a (possibly named) *types.Signature. -// -// Pos() returns the ast.FuncLit.Type.Func for a function literal -// closure or the ast.SelectorExpr.Sel for a bound method closure. -// -// Example printed form: -// -// t0 = make closure anon@1.2 [x y z] -// t1 = make closure bound$(main.I).add [i] -type MakeClosure struct { - register - Fn Value // always a *Function - Bindings []Value // values for each free variable in Fn.FreeVars -} - -// The MakeMap instruction creates a new hash-table-based map object -// and yields a value of kind map. -// -// Type() returns a (possibly named) *types.Map. -// -// Pos() returns the ast.CallExpr.Lparen, if created by make(map), or -// the ast.CompositeLit.Lbrack if created by a literal. -// -// Example printed form: -// -// t1 = make map[string]int t0 -// t1 = make StringIntMap t0 -type MakeMap struct { - register - Reserve Value // initial space reservation; nil => default -} - -// The MakeChan instruction creates a new channel object and yields a -// value of kind chan. -// -// Type() returns a (possibly named) *types.Chan. -// -// Pos() returns the ast.CallExpr.Lparen for the make(chan) that -// created it. -// -// Example printed form: -// -// t0 = make chan int 0 -// t0 = make IntChan 0 -type MakeChan struct { - register - Size Value // int; size of buffer; zero => synchronous. -} - -// The MakeSlice instruction yields a slice of length Len backed by a -// newly allocated array of length Cap. -// -// Both Len and Cap must be non-nil Values of integer type. -// -// (Alloc(types.Array) followed by Slice will not suffice because -// Alloc can only create arrays of constant length.) -// -// Type() returns a (possibly named) *types.Slice. -// -// Pos() returns the ast.CallExpr.Lparen for the make([]T) that -// created it. -// -// Example printed form: -// -// t1 = make []string 1:int t0 -// t1 = make StringSlice 1:int t0 -type MakeSlice struct { - register - Len Value - Cap Value -} - -// The Slice instruction yields a slice of an existing string, slice -// or *array X between optional integer bounds Low and High. -// -// Dynamically, this instruction panics if X evaluates to a nil *array -// pointer. -// -// Type() returns string if the type of X was string, otherwise a -// *types.Slice with the same element type as X. -// -// Pos() returns the ast.SliceExpr.Lbrack if created by a x[:] slice -// operation, the ast.CompositeLit.Lbrace if created by a literal, or -// NoPos if not explicit in the source (e.g. a variadic argument slice). -// -// Example printed form: -// -// t1 = slice t0[1:] -type Slice struct { - register - X Value // slice, string, or *array - Low, High, Max Value // each may be nil -} - -// The FieldAddr instruction yields the address of Field of *struct X. -// -// The field is identified by its index within the field list of the -// struct type of X. -// -// Dynamically, this instruction panics if X evaluates to a nil -// pointer. -// -// Type() returns a (possibly named) *types.Pointer. -// -// Pos() returns the position of the ast.SelectorExpr.Sel for the -// field, if explicit in the source. For implicit selections, returns -// the position of the inducing explicit selection. If produced for a -// struct literal S{f: e}, it returns the position of the colon; for -// S{e} it returns the start of expression e. -// -// Example printed form: -// -// t1 = &t0.name [#1] -type FieldAddr struct { - register - X Value // *struct - Field int // index into CoreType(CoreType(X.Type()).(*types.Pointer).Elem()).(*types.Struct).Fields -} - -// The Field instruction yields the Field of struct X. -// -// The field is identified by its index within the field list of the -// struct type of X; by using numeric indices we avoid ambiguity of -// package-local identifiers and permit compact representations. -// -// Pos() returns the position of the ast.SelectorExpr.Sel for the -// field, if explicit in the source. For implicit selections, returns -// the position of the inducing explicit selection. - -// Example printed form: -// -// t1 = t0.name [#1] -type Field struct { - register - X Value // struct - Field int // index into CoreType(X.Type()).(*types.Struct).Fields -} - -// The IndexAddr instruction yields the address of the element at -// index Index of collection X. Index is an integer expression. -// -// The elements of maps and strings are not addressable; use Lookup (map), -// Index (string), or MapUpdate instead. -// -// Dynamically, this instruction panics if X evaluates to a nil *array -// pointer. -// -// Type() returns a (possibly named) *types.Pointer. -// -// Pos() returns the ast.IndexExpr.Lbrack for the index operation, if -// explicit in the source. -// -// Example printed form: -// -// t2 = &t0[t1] -type IndexAddr struct { - register - X Value // *array, slice or type parameter with types array, *array, or slice. - Index Value // numeric index -} - -// The Index instruction yields element Index of collection X, an array, -// string or type parameter containing an array, a string, a pointer to an, -// array or a slice. -// -// Pos() returns the ast.IndexExpr.Lbrack for the index operation, if -// explicit in the source. -// -// Example printed form: -// -// t2 = t0[t1] -type Index struct { - register - X Value // array, string or type parameter with types array, *array, slice, or string. - Index Value // integer index -} - -// The Lookup instruction yields element Index of collection map X. -// Index is the appropriate key type. -// -// If CommaOk, the result is a 2-tuple of the value above and a -// boolean indicating the result of a map membership test for the key. -// The components of the tuple are accessed using Extract. -// -// Pos() returns the ast.IndexExpr.Lbrack, if explicit in the source. -// -// Example printed form: -// -// t2 = t0[t1] -// t5 = t3[t4],ok -type Lookup struct { - register - X Value // map - Index Value // key-typed index - CommaOk bool // return a value,ok pair -} - -// SelectState is a helper for Select. -// It represents one goal state and its corresponding communication. -type SelectState struct { - Dir types.ChanDir // direction of case (SendOnly or RecvOnly) - Chan Value // channel to use (for send or receive) - Send Value // value to send (for send) - Pos token.Pos // position of token.ARROW - DebugNode ast.Node // ast.SendStmt or ast.UnaryExpr(<-) [debug mode] -} - -// The Select instruction tests whether (or blocks until) one -// of the specified sent or received states is entered. -// -// Let n be the number of States for which Dir==RECV and T_i (0<=i string iterator; false => map iterator. -} - -// The TypeAssert instruction tests whether interface value X has type -// AssertedType. -// -// If !CommaOk, on success it returns v, the result of the conversion -// (defined below); on failure it panics. -// -// If CommaOk: on success it returns a pair (v, true) where v is the -// result of the conversion; on failure it returns (z, false) where z -// is AssertedType's zero value. The components of the pair must be -// accessed using the Extract instruction. -// -// If Underlying: tests whether interface value X has the underlying -// type AssertedType. -// -// If AssertedType is a concrete type, TypeAssert checks whether the -// dynamic type in interface X is equal to it, and if so, the result -// of the conversion is a copy of the value in the interface. -// -// If AssertedType is an interface, TypeAssert checks whether the -// dynamic type of the interface is assignable to it, and if so, the -// result of the conversion is a copy of the interface value X. -// If AssertedType is a superinterface of X.Type(), the operation will -// fail iff the operand is nil. (Contrast with ChangeInterface, which -// performs no nil-check.) -// -// Type() reflects the actual type of the result, possibly a -// 2-types.Tuple; AssertedType is the asserted type. -// -// Depending on the TypeAssert's purpose, Pos may return: -// - the ast.CallExpr.Lparen of an explicit T(e) conversion; -// - the ast.TypeAssertExpr.Lparen of an explicit e.(T) operation; -// - the ast.CaseClause.Case of a case of a type-switch statement; -// - the Ident(m).NamePos of an interface method value i.m -// (for which TypeAssert may be used to effect the nil check). -// -// Example printed form: -// -// t1 = typeassert t0.(int) -// t3 = typeassert,ok t2.(T) -type TypeAssert struct { - register - X Value - AssertedType types.Type - CommaOk bool -} - -// The Extract instruction yields component Index of Tuple. -// -// This is used to access the results of instructions with multiple -// return values, such as Call, TypeAssert, Next, UnOp(ARROW) and -// IndexExpr(Map). -// -// Example printed form: -// -// t1 = extract t0 #1 -type Extract struct { - register - Tuple Value - Index int -} - -// Instructions executed for effect. They do not yield a value. -------------------- - -// The Jump instruction transfers control to the sole successor of its -// owning block. -// -// A Jump must be the last instruction of its containing BasicBlock. -// -// Pos() returns NoPos. -// -// Example printed form: -// -// jump done -type Jump struct { - anInstruction -} - -// The If instruction transfers control to one of the two successors -// of its owning block, depending on the boolean Cond: the first if -// true, the second if false. -// -// An If instruction must be the last instruction of its containing -// BasicBlock. -// -// Pos() returns NoPos. -// -// Example printed form: -// -// if t0 goto done else body -type If struct { - anInstruction - Cond Value -} - -// The Return instruction returns values and control back to the calling -// function. -// -// len(Results) is always equal to the number of results in the -// function's signature. -// -// If len(Results) > 1, Return returns a tuple value with the specified -// components which the caller must access using Extract instructions. -// -// There is no instruction to return a ready-made tuple like those -// returned by a "value,ok"-mode TypeAssert, Lookup or UnOp(ARROW) or -// a tail-call to a function with multiple result parameters. -// -// Return must be the last instruction of its containing BasicBlock. -// Such a block has no successors. -// -// Pos() returns the ast.ReturnStmt.Return, if explicit in the source. -// -// Example printed form: -// -// return -// return nil:I, 2:int -type Return struct { - anInstruction - Results []Value - pos token.Pos -} - -// The RunDefers instruction pops and invokes the entire stack of -// procedure calls pushed by Defer instructions in this function. -// -// It is legal to encounter multiple 'rundefers' instructions in a -// single control-flow path through a function; this is useful in -// the combined init() function, for example. -// -// Pos() returns NoPos. -// -// Example printed form: -// -// rundefers -type RunDefers struct { - anInstruction -} - -// The Panic instruction initiates a panic with value X. -// -// A Panic instruction must be the last instruction of its containing -// BasicBlock, which must have no successors. -// -// NB: 'go panic(x)' and 'defer panic(x)' do not use this instruction; -// they are treated as calls to a built-in function. -// -// Pos() returns the ast.CallExpr.Lparen if this panic was explicit -// in the source. -// -// Example printed form: -// -// panic t0 -type Panic struct { - anInstruction - X Value // an interface{} - pos token.Pos -} - -// The Go instruction creates a new goroutine and calls the specified -// function within it. -// -// See CallCommon for generic function call documentation. -// -// Pos() returns the ast.GoStmt.Go. -// -// Example printed form: -// -// go println(t0, t1) -// go t3() -// go invoke t5.Println(...t6) -type Go struct { - anInstruction - Call CallCommon - pos token.Pos -} - -// The Defer instruction pushes the specified call onto a stack of -// functions to be called by a RunDefers instruction or by a panic. -// -// See CallCommon for generic function call documentation. -// -// Pos() returns the ast.DeferStmt.Defer. -// -// Example printed form: -// -// defer println(t0, t1) -// defer t3() -// defer invoke t5.Println(...t6) -type Defer struct { - anInstruction - Call CallCommon - pos token.Pos -} - -// The Send instruction sends X on channel Chan. -// -// Pos() returns the ast.SendStmt.Arrow, if explicit in the source. -// -// Example printed form: -// -// send t0 <- t1 -type Send struct { - anInstruction - Chan, X Value - pos token.Pos -} - -// The Store instruction stores Val at address Addr. -// Stores can be of arbitrary types. -// -// Pos() returns the position of the source-level construct most closely -// associated with the memory store operation. -// Since implicit memory stores are numerous and varied and depend upon -// implementation choices, the details are not specified. -// -// Example printed form: -// -// *x = y -type Store struct { - anInstruction - Addr Value - Val Value - pos token.Pos -} - -// The MapUpdate instruction updates the association of Map[Key] to -// Value. -// -// Pos() returns the ast.KeyValueExpr.Colon or ast.IndexExpr.Lbrack, -// if explicit in the source. -// -// Example printed form: -// -// t0[t1] = t2 -type MapUpdate struct { - anInstruction - Map Value - Key Value - Value Value - pos token.Pos -} - -// A DebugRef instruction maps a source-level expression Expr to the -// SSA value X that represents the value (!IsAddr) or address (IsAddr) -// of that expression. -// -// DebugRef is a pseudo-instruction: it has no dynamic effect. -// -// Pos() returns Expr.Pos(), the start position of the source-level -// expression. This is not the same as the "designated" token as -// documented at Value.Pos(). e.g. CallExpr.Pos() does not return the -// position of the ("designated") Lparen token. -// -// If Expr is an *ast.Ident denoting a var or func, Object() returns -// the object; though this information can be obtained from the type -// checker, including it here greatly facilitates debugging. -// For non-Ident expressions, Object() returns nil. -// -// DebugRefs are generated only for functions built with debugging -// enabled; see Package.SetDebugMode() and the GlobalDebug builder -// mode flag. -// -// DebugRefs are not emitted for ast.Idents referring to constants or -// predeclared identifiers, since they are trivial and numerous. -// Nor are they emitted for ast.ParenExprs. -// -// (By representing these as instructions, rather than out-of-band, -// consistency is maintained during transformation passes by the -// ordinary SSA renaming machinery.) -// -// Example printed form: -// -// ; *ast.CallExpr @ 102:9 is t5 -// ; var x float64 @ 109:72 is x -// ; address of *ast.CompositeLit @ 216:10 is t0 -type DebugRef struct { - // TODO(generics): Reconsider what DebugRefs are for generics. - anInstruction - Expr ast.Expr // the referring expression (never *ast.ParenExpr) - object types.Object // the identity of the source var/func - IsAddr bool // Expr is addressable and X is the address it denotes - X Value // the value or address of Expr -} - -// Embeddable mix-ins and helpers for common parts of other structs. ----------- - -// register is a mix-in embedded by all SSA values that are also -// instructions, i.e. virtual registers, and provides a uniform -// implementation of most of the Value interface: Value.Name() is a -// numbered register (e.g. "t0"); the other methods are field accessors. -// -// Temporary names are automatically assigned to each register on -// completion of building a function in SSA form. -// -// Clients must not assume that the 'id' value (and the Name() derived -// from it) is unique within a function. As always in this API, -// semantics are determined only by identity; names exist only to -// facilitate debugging. -type register struct { - anInstruction - num int // "name" of virtual register, e.g. "t0". Not guaranteed unique. - typ types.Type // type of virtual register - pos token.Pos // position of source expression, or NoPos - referrers []Instruction -} - -// anInstruction is a mix-in embedded by all Instructions. -// It provides the implementations of the Block and setBlock methods. -type anInstruction struct { - block *BasicBlock // the basic block of this instruction -} - -// CallCommon is contained by Go, Defer and Call to hold the -// common parts of a function or method call. -// -// Each CallCommon exists in one of two modes, function call and -// interface method invocation, or "call" and "invoke" for short. -// -// 1. "call" mode: when Method is nil (!IsInvoke), a CallCommon -// represents an ordinary function call of the value in Value, -// which may be a *Builtin, a *Function or any other value of kind -// 'func'. -// -// Value may be one of: -// -// (a) a *Function, indicating a statically dispatched call -// to a package-level function, an anonymous function, or -// a method of a named type. -// (b) a *MakeClosure, indicating an immediately applied -// function literal with free variables. -// (c) a *Builtin, indicating a statically dispatched call -// to a built-in function. -// (d) any other value, indicating a dynamically dispatched -// function call. -// -// StaticCallee returns the identity of the callee in cases -// (a) and (b), nil otherwise. -// -// Args contains the arguments to the call. If Value is a method, -// Args[0] contains the receiver parameter. -// -// Example printed form: -// -// t2 = println(t0, t1) -// go t3() -// defer t5(...t6) -// -// 2. "invoke" mode: when Method is non-nil (IsInvoke), a CallCommon -// represents a dynamically dispatched call to an interface method. -// In this mode, Value is the interface value and Method is the -// interface's abstract method. The interface value may be a type -// parameter. Note: an interface method may be shared by multiple -// interfaces due to embedding; Value.Type() provides the specific -// interface used for this call. -// -// Value is implicitly supplied to the concrete method implementation -// as the receiver parameter; in other words, Args[0] holds not the -// receiver but the first true argument. -// -// Example printed form: -// -// t1 = invoke t0.String() -// go invoke t3.Run(t2) -// defer invoke t4.Handle(...t5) -// -// For all calls to variadic functions (Signature().Variadic()), -// the last element of Args is a slice. -type CallCommon struct { - Value Value // receiver (invoke mode) or func value (call mode) - Method *types.Func // interface method (invoke mode) - Args []Value // actual parameters (in static method call, includes receiver) - pos token.Pos // position of CallExpr.Lparen, iff explicit in source -} - -// IsInvoke returns true if this call has "invoke" (not "call") mode. -func (c *CallCommon) IsInvoke() bool { - return c.Method != nil -} - -func (c *CallCommon) Pos() token.Pos { return c.pos } - -// Signature returns the signature of the called function. -// -// For an "invoke"-mode call, the signature of the interface method is -// returned. -// -// In either "call" or "invoke" mode, if the callee is a method, its -// receiver is represented by sig.Recv, not sig.Params().At(0). -func (c *CallCommon) Signature() *types.Signature { - if c.Method != nil { - return c.Method.Type().(*types.Signature) - } - return typeparams.CoreType(c.Value.Type()).(*types.Signature) -} - -// StaticCallee returns the callee if this is a trivially static -// "call"-mode call to a function. -func (c *CallCommon) StaticCallee() *Function { - switch fn := c.Value.(type) { - case *Function: - return fn - case *MakeClosure: - return fn.Fn.(*Function) - } - return nil -} - -// Description returns a description of the mode of this call suitable -// for a user interface, e.g., "static method call". -func (c *CallCommon) Description() string { - switch fn := c.Value.(type) { - case *Builtin: - return "built-in function call" - case *MakeClosure: - return "static function closure call" - case *Function: - if fn.Signature.Recv() != nil { - return "static method call" - } - return "static function call" - } - if c.IsInvoke() { - return "dynamic method call" // ("invoke" mode) - } - return "dynamic function call" -} - -// The CallInstruction interface, implemented by *Go, *Defer and *Call, -// exposes the common parts of function-calling instructions, -// yet provides a way back to the Value defined by *Call alone. -type CallInstruction interface { - Instruction - Common() *CallCommon // returns the common parts of the call - Value() *Call // returns the result value of the call (*Call) or nil (*Go, *Defer) -} - -func (s *Call) Common() *CallCommon { return &s.Call } -func (s *Defer) Common() *CallCommon { return &s.Call } -func (s *Go) Common() *CallCommon { return &s.Call } - -func (s *Call) Value() *Call { return s } -func (s *Defer) Value() *Call { return nil } -func (s *Go) Value() *Call { return nil } - -func (v *Builtin) Type() types.Type { return v.sig } -func (v *Builtin) Name() string { return v.name } -func (*Builtin) Referrers() *[]Instruction { return nil } -func (v *Builtin) Pos() token.Pos { return token.NoPos } -func (v *Builtin) Object() types.Object { return types.Universe.Lookup(v.name) } -func (v *Builtin) Parent() *Function { return nil } - -func (v *FreeVar) Type() types.Type { return v.typ } -func (v *FreeVar) Name() string { return v.name } -func (v *FreeVar) Referrers() *[]Instruction { return &v.referrers } -func (v *FreeVar) Pos() token.Pos { return v.pos } -func (v *FreeVar) Parent() *Function { return v.parent } - -func (v *Global) Type() types.Type { return v.typ } -func (v *Global) Name() string { return v.name } -func (v *Global) Parent() *Function { return nil } -func (v *Global) Pos() token.Pos { return v.pos } -func (v *Global) Referrers() *[]Instruction { return nil } -func (v *Global) Token() token.Token { return token.VAR } -func (v *Global) Object() types.Object { return v.object } -func (v *Global) String() string { return v.RelString(nil) } -func (v *Global) Package() *Package { return v.Pkg } -func (v *Global) RelString(from *types.Package) string { return relString(v, from) } - -func (v *Function) Name() string { return v.name } -func (v *Function) Type() types.Type { return v.Signature } -func (v *Function) Pos() token.Pos { return v.pos } -func (v *Function) Token() token.Token { return token.FUNC } -func (v *Function) Object() types.Object { - if v.object != nil { - return types.Object(v.object) - } - return nil -} -func (v *Function) String() string { return v.RelString(nil) } -func (v *Function) Package() *Package { return v.Pkg } -func (v *Function) Parent() *Function { return v.parent } -func (v *Function) Referrers() *[]Instruction { - if v.parent != nil { - return &v.referrers - } - return nil -} - -// TypeParams are the function's type parameters if generic or the -// type parameters that were instantiated if fn is an instantiation. -func (fn *Function) TypeParams() *types.TypeParamList { - return fn.typeparams -} - -// TypeArgs are the types that TypeParams() were instantiated by to create fn -// from fn.Origin(). -func (fn *Function) TypeArgs() []types.Type { return fn.typeargs } - -// Origin returns the generic function from which fn was instantiated, -// or nil if fn is not an instantiation. -func (fn *Function) Origin() *Function { - if fn.parent != nil && len(fn.typeargs) > 0 { - // Nested functions are BUILT at a different time than their instances. - // Build declared package if not yet BUILT. This is not an expected use - // case, but is simple and robust. - fn.declaredPackage().Build() - } - return origin(fn) -} - -// origin is the function that fn is an instantiation of. Returns nil if fn is -// not an instantiation. -// -// Precondition: fn and the origin function are done building. -func origin(fn *Function) *Function { - if fn.parent != nil && len(fn.typeargs) > 0 { - return origin(fn.parent).AnonFuncs[fn.anonIdx] - } - return fn.topLevelOrigin -} - -func (v *Parameter) Type() types.Type { return v.typ } -func (v *Parameter) Name() string { return v.name } -func (v *Parameter) Object() types.Object { return v.object } -func (v *Parameter) Referrers() *[]Instruction { return &v.referrers } -func (v *Parameter) Pos() token.Pos { return v.object.Pos() } -func (v *Parameter) Parent() *Function { return v.parent } - -func (v *Alloc) Type() types.Type { return v.typ } -func (v *Alloc) Referrers() *[]Instruction { return &v.referrers } -func (v *Alloc) Pos() token.Pos { return v.pos } - -func (v *register) Type() types.Type { return v.typ } -func (v *register) setType(typ types.Type) { v.typ = typ } -func (v *register) Name() string { return fmt.Sprintf("t%d", v.num) } -func (v *register) setNum(num int) { v.num = num } -func (v *register) Referrers() *[]Instruction { return &v.referrers } -func (v *register) Pos() token.Pos { return v.pos } -func (v *register) setPos(pos token.Pos) { v.pos = pos } - -func (v *anInstruction) Parent() *Function { return v.block.parent } -func (v *anInstruction) Block() *BasicBlock { return v.block } -func (v *anInstruction) setBlock(block *BasicBlock) { v.block = block } -func (v *anInstruction) Referrers() *[]Instruction { return nil } - -func (t *Type) Name() string { return t.object.Name() } -func (t *Type) Pos() token.Pos { return t.object.Pos() } -func (t *Type) Type() types.Type { return t.object.Type() } -func (t *Type) Token() token.Token { return token.TYPE } -func (t *Type) Object() types.Object { return t.object } -func (t *Type) String() string { return t.RelString(nil) } -func (t *Type) Package() *Package { return t.pkg } -func (t *Type) RelString(from *types.Package) string { return relString(t, from) } - -func (c *NamedConst) Name() string { return c.object.Name() } -func (c *NamedConst) Pos() token.Pos { return c.object.Pos() } -func (c *NamedConst) String() string { return c.RelString(nil) } -func (c *NamedConst) Type() types.Type { return c.object.Type() } -func (c *NamedConst) Token() token.Token { return token.CONST } -func (c *NamedConst) Object() types.Object { return c.object } -func (c *NamedConst) Package() *Package { return c.pkg } -func (c *NamedConst) RelString(from *types.Package) string { return relString(c, from) } - -func (d *DebugRef) Object() types.Object { return d.object } - -// Func returns the package-level function of the specified name, -// or nil if not found. -func (p *Package) Func(name string) (f *Function) { - f, _ = p.Members[name].(*Function) - return -} - -// Var returns the package-level variable of the specified name, -// or nil if not found. -func (p *Package) Var(name string) (g *Global) { - g, _ = p.Members[name].(*Global) - return -} - -// Const returns the package-level constant of the specified name, -// or nil if not found. -func (p *Package) Const(name string) (c *NamedConst) { - c, _ = p.Members[name].(*NamedConst) - return -} - -// Type returns the package-level type of the specified name, -// or nil if not found. -func (p *Package) Type(name string) (t *Type) { - t, _ = p.Members[name].(*Type) - return -} - -func (v *Call) Pos() token.Pos { return v.Call.pos } -func (s *Defer) Pos() token.Pos { return s.pos } -func (s *Go) Pos() token.Pos { return s.pos } -func (s *MapUpdate) Pos() token.Pos { return s.pos } -func (s *Panic) Pos() token.Pos { return s.pos } -func (s *Return) Pos() token.Pos { return s.pos } -func (s *Send) Pos() token.Pos { return s.pos } -func (s *Store) Pos() token.Pos { return s.pos } -func (s *If) Pos() token.Pos { return token.NoPos } -func (s *Jump) Pos() token.Pos { return token.NoPos } -func (s *RunDefers) Pos() token.Pos { return token.NoPos } -func (s *DebugRef) Pos() token.Pos { return s.Expr.Pos() } - -// Operands. - -func (v *Alloc) Operands(rands []*Value) []*Value { - return rands -} - -func (v *BinOp) Operands(rands []*Value) []*Value { - return append(rands, &v.X, &v.Y) -} - -func (c *CallCommon) Operands(rands []*Value) []*Value { - rands = append(rands, &c.Value) - for i := range c.Args { - rands = append(rands, &c.Args[i]) - } - return rands -} - -func (s *Go) Operands(rands []*Value) []*Value { - return s.Call.Operands(rands) -} - -func (s *Call) Operands(rands []*Value) []*Value { - return s.Call.Operands(rands) -} - -func (s *Defer) Operands(rands []*Value) []*Value { - return s.Call.Operands(rands) -} - -func (v *ChangeInterface) Operands(rands []*Value) []*Value { - return append(rands, &v.X) -} - -func (v *ChangeType) Operands(rands []*Value) []*Value { - return append(rands, &v.X) -} - -func (v *Convert) Operands(rands []*Value) []*Value { - return append(rands, &v.X) -} - -func (v *MultiConvert) Operands(rands []*Value) []*Value { - return append(rands, &v.X) -} - -func (v *SliceToArrayPointer) Operands(rands []*Value) []*Value { - return append(rands, &v.X) -} - -func (s *DebugRef) Operands(rands []*Value) []*Value { - return append(rands, &s.X) -} - -func (v *Extract) Operands(rands []*Value) []*Value { - return append(rands, &v.Tuple) -} - -func (v *Field) Operands(rands []*Value) []*Value { - return append(rands, &v.X) -} - -func (v *FieldAddr) Operands(rands []*Value) []*Value { - return append(rands, &v.X) -} - -func (s *If) Operands(rands []*Value) []*Value { - return append(rands, &s.Cond) -} - -func (v *Index) Operands(rands []*Value) []*Value { - return append(rands, &v.X, &v.Index) -} - -func (v *IndexAddr) Operands(rands []*Value) []*Value { - return append(rands, &v.X, &v.Index) -} - -func (*Jump) Operands(rands []*Value) []*Value { - return rands -} - -func (v *Lookup) Operands(rands []*Value) []*Value { - return append(rands, &v.X, &v.Index) -} - -func (v *MakeChan) Operands(rands []*Value) []*Value { - return append(rands, &v.Size) -} - -func (v *MakeClosure) Operands(rands []*Value) []*Value { - rands = append(rands, &v.Fn) - for i := range v.Bindings { - rands = append(rands, &v.Bindings[i]) - } - return rands -} - -func (v *MakeInterface) Operands(rands []*Value) []*Value { - return append(rands, &v.X) -} - -func (v *MakeMap) Operands(rands []*Value) []*Value { - return append(rands, &v.Reserve) -} - -func (v *MakeSlice) Operands(rands []*Value) []*Value { - return append(rands, &v.Len, &v.Cap) -} - -func (v *MapUpdate) Operands(rands []*Value) []*Value { - return append(rands, &v.Map, &v.Key, &v.Value) -} - -func (v *Next) Operands(rands []*Value) []*Value { - return append(rands, &v.Iter) -} - -func (s *Panic) Operands(rands []*Value) []*Value { - return append(rands, &s.X) -} - -func (v *Phi) Operands(rands []*Value) []*Value { - for i := range v.Edges { - rands = append(rands, &v.Edges[i]) - } - return rands -} - -func (v *Range) Operands(rands []*Value) []*Value { - return append(rands, &v.X) -} - -func (s *Return) Operands(rands []*Value) []*Value { - for i := range s.Results { - rands = append(rands, &s.Results[i]) - } - return rands -} - -func (*RunDefers) Operands(rands []*Value) []*Value { - return rands -} - -func (v *Select) Operands(rands []*Value) []*Value { - for i := range v.States { - rands = append(rands, &v.States[i].Chan, &v.States[i].Send) - } - return rands -} - -func (s *Send) Operands(rands []*Value) []*Value { - return append(rands, &s.Chan, &s.X) -} - -func (v *Slice) Operands(rands []*Value) []*Value { - return append(rands, &v.X, &v.Low, &v.High, &v.Max) -} - -func (s *Store) Operands(rands []*Value) []*Value { - return append(rands, &s.Addr, &s.Val) -} - -func (v *TypeAssert) Operands(rands []*Value) []*Value { - return append(rands, &v.X) -} - -func (v *UnOp) Operands(rands []*Value) []*Value { - return append(rands, &v.X) -} - -// Non-Instruction Values: -func (v *Builtin) Operands(rands []*Value) []*Value { return rands } -func (v *FreeVar) Operands(rands []*Value) []*Value { return rands } -func (v *Const) Operands(rands []*Value) []*Value { return rands } -func (v *Function) Operands(rands []*Value) []*Value { return rands } -func (v *Global) Operands(rands []*Value) []*Value { return rands } -func (v *Parameter) Operands(rands []*Value) []*Value { return rands } diff --git a/vendor/golang.org/x/tools/go/ssa/ssautil/load.go b/vendor/golang.org/x/tools/go/ssa/ssautil/load.go deleted file mode 100644 index 3daa67a07..000000000 --- a/vendor/golang.org/x/tools/go/ssa/ssautil/load.go +++ /dev/null @@ -1,214 +0,0 @@ -// Copyright 2015 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package ssautil - -// This file defines utility functions for constructing programs in SSA form. - -import ( - "go/ast" - "go/token" - "go/types" - - "golang.org/x/tools/go/loader" - "golang.org/x/tools/go/packages" - "golang.org/x/tools/go/ssa" - "golang.org/x/tools/internal/versions" -) - -// Packages creates an SSA program for a set of packages. -// -// The packages must have been loaded from source syntax using the -// [packages.Load] function in [packages.LoadSyntax] or -// [packages.LoadAllSyntax] mode. -// -// Packages creates an SSA package for each well-typed package in the -// initial list, plus all their dependencies. The resulting list of -// packages corresponds to the list of initial packages, and may contain -// a nil if SSA code could not be constructed for the corresponding initial -// package due to type errors. -// -// Code for bodies of functions is not built until [Program.Build] is -// called on the resulting Program. SSA code is constructed only for -// the initial packages with well-typed syntax trees. -// -// The mode parameter controls diagnostics and checking during SSA construction. -func Packages(initial []*packages.Package, mode ssa.BuilderMode) (*ssa.Program, []*ssa.Package) { - // TODO(adonovan): opt: this calls CreatePackage far more than - // necessary: for all dependencies, not just the (non-initial) - // direct dependencies of the initial packages. - // - // But can it reasonably be changed without breaking the - // spirit and/or letter of the law above? Clients may notice - // if we call CreatePackage less, as methods like - // Program.FuncValue will return nil. Or must we provide a new - // function (and perhaps deprecate this one)? Is it worth it? - // - // Tim King makes the interesting point that it would be - // possible to entirely alleviate the client from the burden - // of calling CreatePackage for non-syntax packages, if we - // were to treat vars and funcs lazily in the same way we now - // treat methods. (In essence, try to move away from the - // notion of ssa.Packages, and make the Program answer - // all reasonable questions about any types.Object.) - - return doPackages(initial, mode, false) -} - -// AllPackages creates an SSA program for a set of packages plus all -// their dependencies. -// -// The packages must have been loaded from source syntax using the -// [packages.Load] function in [packages.LoadAllSyntax] mode. -// -// AllPackages creates an SSA package for each well-typed package in the -// initial list, plus all their dependencies. The resulting list of -// packages corresponds to the list of initial packages, and may contain -// a nil if SSA code could not be constructed for the corresponding -// initial package due to type errors. -// -// Code for bodies of functions is not built until Build is called on -// the resulting Program. SSA code is constructed for all packages with -// well-typed syntax trees. -// -// The mode parameter controls diagnostics and checking during SSA construction. -func AllPackages(initial []*packages.Package, mode ssa.BuilderMode) (*ssa.Program, []*ssa.Package) { - return doPackages(initial, mode, true) -} - -func doPackages(initial []*packages.Package, mode ssa.BuilderMode, deps bool) (*ssa.Program, []*ssa.Package) { - - var fset *token.FileSet - if len(initial) > 0 { - fset = initial[0].Fset - } - - prog := ssa.NewProgram(fset, mode) - - isInitial := make(map[*packages.Package]bool, len(initial)) - for _, p := range initial { - isInitial[p] = true - } - - ssamap := make(map[*packages.Package]*ssa.Package) - packages.Visit(initial, nil, func(p *packages.Package) { - if p.Types != nil && !p.IllTyped { - var files []*ast.File - var info *types.Info - if deps || isInitial[p] { - files = p.Syntax - info = p.TypesInfo - } - ssamap[p] = prog.CreatePackage(p.Types, files, info, true) - } - }) - - var ssapkgs []*ssa.Package - for _, p := range initial { - ssapkgs = append(ssapkgs, ssamap[p]) // may be nil - } - return prog, ssapkgs -} - -// CreateProgram returns a new program in SSA form, given a program -// loaded from source. An SSA package is created for each transitively -// error-free package of lprog. -// -// Code for bodies of functions is not built until Build is called -// on the result. -// -// The mode parameter controls diagnostics and checking during SSA construction. -// -// Deprecated: Use [golang.org/x/tools/go/packages] and the [Packages] -// function instead; see ssa.Example_loadPackages. -func CreateProgram(lprog *loader.Program, mode ssa.BuilderMode) *ssa.Program { - prog := ssa.NewProgram(lprog.Fset, mode) - - for _, info := range lprog.AllPackages { - if info.TransitivelyErrorFree { - prog.CreatePackage(info.Pkg, info.Files, &info.Info, info.Importable) - } - } - - return prog -} - -// BuildPackage builds an SSA program with SSA intermediate -// representation (IR) for all functions of a single package. -// -// It populates pkg by type-checking the specified file syntax trees. All -// dependencies are loaded using the importer specified by tc, which -// typically loads compiler export data; SSA code cannot be built for -// those packages. BuildPackage then constructs an [ssa.Program] with all -// dependency packages created, and builds and returns the SSA package -// corresponding to pkg. -// -// The caller must have set pkg.Path to the import path. -// -// The operation fails if there were any type-checking or import errors. -// -// See ../example_test.go for an example. -func BuildPackage(tc *types.Config, fset *token.FileSet, pkg *types.Package, files []*ast.File, mode ssa.BuilderMode) (*ssa.Package, *types.Info, error) { - if fset == nil { - panic("no token.FileSet") - } - if pkg.Path() == "" { - panic("package has no import path") - } - - info := &types.Info{ - Types: make(map[ast.Expr]types.TypeAndValue), - Defs: make(map[*ast.Ident]types.Object), - Uses: make(map[*ast.Ident]types.Object), - Implicits: make(map[ast.Node]types.Object), - Instances: make(map[*ast.Ident]types.Instance), - Scopes: make(map[ast.Node]*types.Scope), - Selections: make(map[*ast.SelectorExpr]*types.Selection), - } - versions.InitFileVersions(info) - if err := types.NewChecker(tc, fset, pkg, info).Files(files); err != nil { - return nil, nil, err - } - - prog := ssa.NewProgram(fset, mode) - - // Create SSA packages for all imports. - // Order is not significant. - created := make(map[*types.Package]bool) - var createAll func(pkgs []*types.Package) - createAll = func(pkgs []*types.Package) { - for _, p := range pkgs { - if !created[p] { - created[p] = true - prog.CreatePackage(p, nil, nil, true) - createAll(p.Imports()) - } - } - } - createAll(pkg.Imports()) - - // TODO(adonovan): we could replace createAll with just: - // - // // Create SSA packages for all imports. - // for _, p := range pkg.Imports() { - // prog.CreatePackage(p, nil, nil, true) - // } - // - // (with minor changes to changes to ../builder_test.go as - // shown in CL 511715 PS 10.) But this would strictly violate - // the letter of the doc comment above, which says "all - // dependencies created". - // - // Tim makes the good point with some extra work we could - // remove the need for any CreatePackage calls except the - // ones with syntax (i.e. primary packages). Of course - // You wouldn't have ssa.Packages and Members for as - // many things but no-one really uses that anyway. - // I wish I had done this from the outset. - - // Create and build the primary package. - ssapkg := prog.CreatePackage(pkg, files, info, false) - ssapkg.Build() - return ssapkg, info, nil -} diff --git a/vendor/golang.org/x/tools/go/ssa/ssautil/switch.go b/vendor/golang.org/x/tools/go/ssa/ssautil/switch.go deleted file mode 100644 index dd4b04e76..000000000 --- a/vendor/golang.org/x/tools/go/ssa/ssautil/switch.go +++ /dev/null @@ -1,230 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package ssautil - -// This file implements discovery of switch and type-switch constructs -// from low-level control flow. -// -// Many techniques exist for compiling a high-level switch with -// constant cases to efficient machine code. The optimal choice will -// depend on the data type, the specific case values, the code in the -// body of each case, and the hardware. -// Some examples: -// - a lookup table (for a switch that maps constants to constants) -// - a computed goto -// - a binary tree -// - a perfect hash -// - a two-level switch (to partition constant strings by their first byte). - -import ( - "bytes" - "fmt" - "go/token" - "go/types" - - "golang.org/x/tools/go/ssa" -) - -// A ConstCase represents a single constant comparison. -// It is part of a Switch. -type ConstCase struct { - Block *ssa.BasicBlock // block performing the comparison - Body *ssa.BasicBlock // body of the case - Value *ssa.Const // case comparand -} - -// A TypeCase represents a single type assertion. -// It is part of a Switch. -type TypeCase struct { - Block *ssa.BasicBlock // block performing the type assert - Body *ssa.BasicBlock // body of the case - Type types.Type // case type - Binding ssa.Value // value bound by this case -} - -// A Switch is a logical high-level control flow operation -// (a multiway branch) discovered by analysis of a CFG containing -// only if/else chains. It is not part of the ssa.Instruction set. -// -// One of ConstCases and TypeCases has length >= 2; -// the other is nil. -// -// In a value switch, the list of cases may contain duplicate constants. -// A type switch may contain duplicate types, or types assignable -// to an interface type also in the list. -// TODO(adonovan): eliminate such duplicates. -type Switch struct { - Start *ssa.BasicBlock // block containing start of if/else chain - X ssa.Value // the switch operand - ConstCases []ConstCase // ordered list of constant comparisons - TypeCases []TypeCase // ordered list of type assertions - Default *ssa.BasicBlock // successor if all comparisons fail -} - -func (sw *Switch) String() string { - // We represent each block by the String() of its - // first Instruction, e.g. "print(42:int)". - var buf bytes.Buffer - if sw.ConstCases != nil { - fmt.Fprintf(&buf, "switch %s {\n", sw.X.Name()) - for _, c := range sw.ConstCases { - fmt.Fprintf(&buf, "case %s: %s\n", c.Value, c.Body.Instrs[0]) - } - } else { - fmt.Fprintf(&buf, "switch %s.(type) {\n", sw.X.Name()) - for _, c := range sw.TypeCases { - fmt.Fprintf(&buf, "case %s %s: %s\n", - c.Binding.Name(), c.Type, c.Body.Instrs[0]) - } - } - if sw.Default != nil { - fmt.Fprintf(&buf, "default: %s\n", sw.Default.Instrs[0]) - } - fmt.Fprintf(&buf, "}") - return buf.String() -} - -// Switches examines the control-flow graph of fn and returns the -// set of inferred value and type switches. A value switch tests an -// ssa.Value for equality against two or more compile-time constant -// values. Switches involving link-time constants (addresses) are -// ignored. A type switch type-asserts an ssa.Value against two or -// more types. -// -// The switches are returned in dominance order. -// -// The resulting switches do not necessarily correspond to uses of the -// 'switch' keyword in the source: for example, a single source-level -// switch statement with non-constant cases may result in zero, one or -// many Switches, one per plural sequence of constant cases. -// Switches may even be inferred from if/else- or goto-based control flow. -// (In general, the control flow constructs of the source program -// cannot be faithfully reproduced from the SSA representation.) -func Switches(fn *ssa.Function) []Switch { - // Traverse the CFG in dominance order, so we don't - // enter an if/else-chain in the middle. - var switches []Switch - seen := make(map[*ssa.BasicBlock]bool) // TODO(adonovan): opt: use ssa.blockSet - for _, b := range fn.DomPreorder() { - if x, k := isComparisonBlock(b); x != nil { - // Block b starts a switch. - sw := Switch{Start: b, X: x} - valueSwitch(&sw, k, seen) - if len(sw.ConstCases) > 1 { - switches = append(switches, sw) - } - } - - if y, x, T := isTypeAssertBlock(b); y != nil { - // Block b starts a type switch. - sw := Switch{Start: b, X: x} - typeSwitch(&sw, y, T, seen) - if len(sw.TypeCases) > 1 { - switches = append(switches, sw) - } - } - } - return switches -} - -func valueSwitch(sw *Switch, k *ssa.Const, seen map[*ssa.BasicBlock]bool) { - b := sw.Start - x := sw.X - for x == sw.X { - if seen[b] { - break - } - seen[b] = true - - sw.ConstCases = append(sw.ConstCases, ConstCase{ - Block: b, - Body: b.Succs[0], - Value: k, - }) - b = b.Succs[1] - if len(b.Instrs) > 2 { - // Block b contains not just 'if x == k', - // so it may have side effects that - // make it unsafe to elide. - break - } - if len(b.Preds) != 1 { - // Block b has multiple predecessors, - // so it cannot be treated as a case. - break - } - x, k = isComparisonBlock(b) - } - sw.Default = b -} - -func typeSwitch(sw *Switch, y ssa.Value, T types.Type, seen map[*ssa.BasicBlock]bool) { - b := sw.Start - x := sw.X - for x == sw.X { - if seen[b] { - break - } - seen[b] = true - - sw.TypeCases = append(sw.TypeCases, TypeCase{ - Block: b, - Body: b.Succs[0], - Type: T, - Binding: y, - }) - b = b.Succs[1] - if len(b.Instrs) > 4 { - // Block b contains not just - // {TypeAssert; Extract #0; Extract #1; If} - // so it may have side effects that - // make it unsafe to elide. - break - } - if len(b.Preds) != 1 { - // Block b has multiple predecessors, - // so it cannot be treated as a case. - break - } - y, x, T = isTypeAssertBlock(b) - } - sw.Default = b -} - -// isComparisonBlock returns the operands (v, k) if a block ends with -// a comparison v==k, where k is a compile-time constant. -func isComparisonBlock(b *ssa.BasicBlock) (v ssa.Value, k *ssa.Const) { - if n := len(b.Instrs); n >= 2 { - if i, ok := b.Instrs[n-1].(*ssa.If); ok { - if binop, ok := i.Cond.(*ssa.BinOp); ok && binop.Block() == b && binop.Op == token.EQL { - if k, ok := binop.Y.(*ssa.Const); ok { - return binop.X, k - } - if k, ok := binop.X.(*ssa.Const); ok { - return binop.Y, k - } - } - } - } - return -} - -// isTypeAssertBlock returns the operands (y, x, T) if a block ends with -// a type assertion "if y, ok := x.(T); ok {". -func isTypeAssertBlock(b *ssa.BasicBlock) (y, x ssa.Value, T types.Type) { - if n := len(b.Instrs); n >= 4 { - if i, ok := b.Instrs[n-1].(*ssa.If); ok { - if ext1, ok := i.Cond.(*ssa.Extract); ok && ext1.Block() == b && ext1.Index == 1 { - if ta, ok := ext1.Tuple.(*ssa.TypeAssert); ok && ta.Block() == b { - // hack: relies upon instruction ordering. - if ext0, ok := b.Instrs[n-3].(*ssa.Extract); ok { - return ext0, ta.X, ta.AssertedType - } - } - } - } - } - return -} diff --git a/vendor/golang.org/x/tools/go/ssa/ssautil/visit.go b/vendor/golang.org/x/tools/go/ssa/ssautil/visit.go deleted file mode 100644 index b4feb42cb..000000000 --- a/vendor/golang.org/x/tools/go/ssa/ssautil/visit.go +++ /dev/null @@ -1,157 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package ssautil // import "golang.org/x/tools/go/ssa/ssautil" - -import ( - "go/ast" - "go/types" - - "golang.org/x/tools/go/ssa" - - _ "unsafe" // for linkname hack -) - -// This file defines utilities for visiting the SSA representation of -// a Program. -// -// TODO(adonovan): test coverage. - -// AllFunctions finds and returns the set of functions potentially -// needed by program prog, as determined by a simple linker-style -// reachability algorithm starting from the members and method-sets of -// each package. The result may include anonymous functions and -// synthetic wrappers. -// -// Precondition: all packages are built. -// -// TODO(adonovan): this function is underspecified. It doesn't -// actually work like a linker, which computes reachability from main -// using something like go/callgraph/cha (without materializing the -// call graph). In fact, it treats all public functions and all -// methods of public non-parameterized types as roots, even though -// they may be unreachable--but only in packages created from syntax. -// -// I think we should deprecate AllFunctions function in favor of two -// clearly defined ones: -// -// 1. The first would efficiently compute CHA reachability from a set -// of main packages, making it suitable for a whole-program -// analysis context with InstantiateGenerics, in conjunction with -// Program.Build. -// -// 2. The second would return only the set of functions corresponding -// to source Func{Decl,Lit} syntax, like SrcFunctions in -// go/analysis/passes/buildssa; this is suitable for -// package-at-a-time (or handful of packages) context. -// ssa.Package could easily expose it as a field. -// -// We could add them unexported for now and use them via the linkname hack. -func AllFunctions(prog *ssa.Program) map[*ssa.Function]bool { - seen := make(map[*ssa.Function]bool) - - var function func(fn *ssa.Function) - function = func(fn *ssa.Function) { - if !seen[fn] { - seen[fn] = true - var buf [10]*ssa.Value // avoid alloc in common case - for _, b := range fn.Blocks { - for _, instr := range b.Instrs { - for _, op := range instr.Operands(buf[:0]) { - if fn, ok := (*op).(*ssa.Function); ok { - function(fn) - } - } - } - } - } - } - - // TODO(adonovan): opt: provide a way to share a builder - // across a sequence of MethodValue calls. - - methodsOf := func(T types.Type) { - if !types.IsInterface(T) { - mset := prog.MethodSets.MethodSet(T) - for i := 0; i < mset.Len(); i++ { - function(prog.MethodValue(mset.At(i))) - } - } - } - - // Historically, Program.RuntimeTypes used to include the type - // of any exported member of a package loaded from syntax that - // has a non-parameterized type, plus all types - // reachable from that type using reflection, even though - // these runtime types may not be required for them. - // - // Rather than break existing programs that rely on - // AllFunctions visiting extra methods that are unreferenced - // by IR and unreachable via reflection, we moved the logic - // here, unprincipled though it is. - // (See doc comment for better ideas.) - // - // Nonetheless, after the move, we no longer visit every - // method of any type recursively reachable from T, only the - // methods of T and *T themselves, and we only apply this to - // named types T, and not to the type of every exported - // package member. - exportedTypeHack := func(t *ssa.Type) { - if isSyntactic(t.Package()) && - ast.IsExported(t.Name()) && - !types.IsInterface(t.Type()) { - // Consider only named types. - // (Ignore aliases and unsafe.Pointer.) - if named, ok := t.Type().(*types.Named); ok { - if named.TypeParams() == nil { - methodsOf(named) // T - methodsOf(types.NewPointer(named)) // *T - } - } - } - } - - for _, pkg := range prog.AllPackages() { - for _, mem := range pkg.Members { - switch mem := mem.(type) { - case *ssa.Function: - // Visit all package-level declared functions. - function(mem) - - case *ssa.Type: - exportedTypeHack(mem) - } - } - } - - // Visit all methods of types for which runtime types were - // materialized, as they are reachable through reflection. - for _, T := range prog.RuntimeTypes() { - methodsOf(T) - } - - return seen -} - -// MainPackages returns the subset of the specified packages -// named "main" that define a main function. -// The result may include synthetic "testmain" packages. -func MainPackages(pkgs []*ssa.Package) []*ssa.Package { - var mains []*ssa.Package - for _, pkg := range pkgs { - if pkg.Pkg.Name() == "main" && pkg.Func("main") != nil { - mains = append(mains, pkg) - } - } - return mains -} - -// TODO(adonovan): propose a principled API for this. One possibility -// is a new field, Package.SrcFunctions []*Function, which would -// contain the list of SrcFunctions described in point 2 of the -// AllFunctions doc comment, or nil if the package is not from syntax. -// But perhaps overloading nil vs empty slice is too subtle. -// -//go:linkname isSyntactic golang.org/x/tools/go/ssa.isSyntactic -func isSyntactic(pkg *ssa.Package) bool diff --git a/vendor/golang.org/x/tools/go/ssa/subst.go b/vendor/golang.org/x/tools/go/ssa/subst.go deleted file mode 100644 index 9f2f2f300..000000000 --- a/vendor/golang.org/x/tools/go/ssa/subst.go +++ /dev/null @@ -1,480 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package ssa - -import ( - "go/types" - - "golang.org/x/tools/internal/aliases" -) - -// Type substituter for a fixed set of replacement types. -// -// A nil *subster is an valid, empty substitution map. It always acts as -// the identity function. This allows for treating parameterized and -// non-parameterized functions identically while compiling to ssa. -// -// Not concurrency-safe. -type subster struct { - replacements map[*types.TypeParam]types.Type // values should contain no type params - cache map[types.Type]types.Type // cache of subst results - ctxt *types.Context // cache for instantiation - scope *types.Scope // *types.Named declared within this scope can be substituted (optional) - debug bool // perform extra debugging checks - // TODO(taking): consider adding Pos - // TODO(zpavlinovic): replacements can contain type params - // when generating instances inside of a generic function body. -} - -// Returns a subster that replaces tparams[i] with targs[i]. Uses ctxt as a cache. -// targs should not contain any types in tparams. -// scope is the (optional) lexical block of the generic function for which we are substituting. -func makeSubster(ctxt *types.Context, scope *types.Scope, tparams *types.TypeParamList, targs []types.Type, debug bool) *subster { - assert(tparams.Len() == len(targs), "makeSubster argument count must match") - - subst := &subster{ - replacements: make(map[*types.TypeParam]types.Type, tparams.Len()), - cache: make(map[types.Type]types.Type), - ctxt: ctxt, - scope: scope, - debug: debug, - } - for i := 0; i < tparams.Len(); i++ { - subst.replacements[tparams.At(i)] = targs[i] - } - if subst.debug { - subst.wellFormed() - } - return subst -} - -// wellFormed asserts that subst was properly initialized. -func (subst *subster) wellFormed() { - if subst == nil { - return - } - // Check that all of the type params do not appear in the arguments. - s := make(map[types.Type]bool, len(subst.replacements)) - for tparam := range subst.replacements { - s[tparam] = true - } - for _, r := range subst.replacements { - if reaches(r, s) { - panic(subst) - } - } -} - -// typ returns the type of t with the type parameter tparams[i] substituted -// for the type targs[i] where subst was created using tparams and targs. -func (subst *subster) typ(t types.Type) (res types.Type) { - if subst == nil { - return t // A nil subst is type preserving. - } - if r, ok := subst.cache[t]; ok { - return r - } - defer func() { - subst.cache[t] = res - }() - - // fall through if result r will be identical to t, types.Identical(r, t). - switch t := t.(type) { - case *aliases.Alias: - return subst.typ(aliases.Unalias(t)) - - case *types.TypeParam: - r := subst.replacements[t] - assert(r != nil, "type param without replacement encountered") - return r - - case *types.Basic: - return t - - case *types.Array: - if r := subst.typ(t.Elem()); r != t.Elem() { - return types.NewArray(r, t.Len()) - } - return t - - case *types.Slice: - if r := subst.typ(t.Elem()); r != t.Elem() { - return types.NewSlice(r) - } - return t - - case *types.Pointer: - if r := subst.typ(t.Elem()); r != t.Elem() { - return types.NewPointer(r) - } - return t - - case *types.Tuple: - return subst.tuple(t) - - case *types.Struct: - return subst.struct_(t) - - case *types.Map: - key := subst.typ(t.Key()) - elem := subst.typ(t.Elem()) - if key != t.Key() || elem != t.Elem() { - return types.NewMap(key, elem) - } - return t - - case *types.Chan: - if elem := subst.typ(t.Elem()); elem != t.Elem() { - return types.NewChan(t.Dir(), elem) - } - return t - - case *types.Signature: - return subst.signature(t) - - case *types.Union: - return subst.union(t) - - case *types.Interface: - return subst.interface_(t) - - case *types.Named: - return subst.named(t) - - default: - panic("unreachable") - } -} - -// types returns the result of {subst.typ(ts[i])}. -func (subst *subster) types(ts []types.Type) []types.Type { - res := make([]types.Type, len(ts)) - for i := range ts { - res[i] = subst.typ(ts[i]) - } - return res -} - -func (subst *subster) tuple(t *types.Tuple) *types.Tuple { - if t != nil { - if vars := subst.varlist(t); vars != nil { - return types.NewTuple(vars...) - } - } - return t -} - -type varlist interface { - At(i int) *types.Var - Len() int -} - -// fieldlist is an adapter for structs for the varlist interface. -type fieldlist struct { - str *types.Struct -} - -func (fl fieldlist) At(i int) *types.Var { return fl.str.Field(i) } -func (fl fieldlist) Len() int { return fl.str.NumFields() } - -func (subst *subster) struct_(t *types.Struct) *types.Struct { - if t != nil { - if fields := subst.varlist(fieldlist{t}); fields != nil { - tags := make([]string, t.NumFields()) - for i, n := 0, t.NumFields(); i < n; i++ { - tags[i] = t.Tag(i) - } - return types.NewStruct(fields, tags) - } - } - return t -} - -// varlist reutrns subst(in[i]) or return nils if subst(v[i]) == v[i] for all i. -func (subst *subster) varlist(in varlist) []*types.Var { - var out []*types.Var // nil => no updates - for i, n := 0, in.Len(); i < n; i++ { - v := in.At(i) - w := subst.var_(v) - if v != w && out == nil { - out = make([]*types.Var, n) - for j := 0; j < i; j++ { - out[j] = in.At(j) - } - } - if out != nil { - out[i] = w - } - } - return out -} - -func (subst *subster) var_(v *types.Var) *types.Var { - if v != nil { - if typ := subst.typ(v.Type()); typ != v.Type() { - if v.IsField() { - return types.NewField(v.Pos(), v.Pkg(), v.Name(), typ, v.Embedded()) - } - return types.NewVar(v.Pos(), v.Pkg(), v.Name(), typ) - } - } - return v -} - -func (subst *subster) union(u *types.Union) *types.Union { - var out []*types.Term // nil => no updates - - for i, n := 0, u.Len(); i < n; i++ { - t := u.Term(i) - r := subst.typ(t.Type()) - if r != t.Type() && out == nil { - out = make([]*types.Term, n) - for j := 0; j < i; j++ { - out[j] = u.Term(j) - } - } - if out != nil { - out[i] = types.NewTerm(t.Tilde(), r) - } - } - - if out != nil { - return types.NewUnion(out) - } - return u -} - -func (subst *subster) interface_(iface *types.Interface) *types.Interface { - if iface == nil { - return nil - } - - // methods for the interface. Initially nil if there is no known change needed. - // Signatures for the method where recv is nil. NewInterfaceType fills in the receivers. - var methods []*types.Func - initMethods := func(n int) { // copy first n explicit methods - methods = make([]*types.Func, iface.NumExplicitMethods()) - for i := 0; i < n; i++ { - f := iface.ExplicitMethod(i) - norecv := changeRecv(f.Type().(*types.Signature), nil) - methods[i] = types.NewFunc(f.Pos(), f.Pkg(), f.Name(), norecv) - } - } - for i := 0; i < iface.NumExplicitMethods(); i++ { - f := iface.ExplicitMethod(i) - // On interfaces, we need to cycle break on anonymous interface types - // being in a cycle with their signatures being in cycles with their receivers - // that do not go through a Named. - norecv := changeRecv(f.Type().(*types.Signature), nil) - sig := subst.typ(norecv) - if sig != norecv && methods == nil { - initMethods(i) - } - if methods != nil { - methods[i] = types.NewFunc(f.Pos(), f.Pkg(), f.Name(), sig.(*types.Signature)) - } - } - - var embeds []types.Type - initEmbeds := func(n int) { // copy first n embedded types - embeds = make([]types.Type, iface.NumEmbeddeds()) - for i := 0; i < n; i++ { - embeds[i] = iface.EmbeddedType(i) - } - } - for i := 0; i < iface.NumEmbeddeds(); i++ { - e := iface.EmbeddedType(i) - r := subst.typ(e) - if e != r && embeds == nil { - initEmbeds(i) - } - if embeds != nil { - embeds[i] = r - } - } - - if methods == nil && embeds == nil { - return iface - } - if methods == nil { - initMethods(iface.NumExplicitMethods()) - } - if embeds == nil { - initEmbeds(iface.NumEmbeddeds()) - } - return types.NewInterfaceType(methods, embeds).Complete() -} - -func (subst *subster) named(t *types.Named) types.Type { - // A named type may be: - // (1) ordinary named type (non-local scope, no type parameters, no type arguments), - // (2) locally scoped type, - // (3) generic (type parameters but no type arguments), or - // (4) instantiated (type parameters and type arguments). - tparams := t.TypeParams() - if tparams.Len() == 0 { - if subst.scope != nil && !subst.scope.Contains(t.Obj().Pos()) { - // Outside the current function scope? - return t // case (1) ordinary - } - - // case (2) locally scoped type. - // Create a new named type to represent this instantiation. - // We assume that local types of distinct instantiations of a - // generic function are distinct, even if they don't refer to - // type parameters, but the spec is unclear; see golang/go#58573. - // - // Subtle: We short circuit substitution and use a newly created type in - // subst, i.e. cache[t]=n, to pre-emptively replace t with n in recursive - // types during traversal. This both breaks infinite cycles and allows for - // constructing types with the replacement applied in subst.typ(under). - // - // Example: - // func foo[T any]() { - // type linkedlist struct { - // next *linkedlist - // val T - // } - // } - // - // When the field `next *linkedlist` is visited during subst.typ(under), - // we want the substituted type for the field `next` to be `*n`. - n := types.NewNamed(t.Obj(), nil, nil) - subst.cache[t] = n - subst.cache[n] = n - n.SetUnderlying(subst.typ(t.Underlying())) - return n - } - targs := t.TypeArgs() - - // insts are arguments to instantiate using. - insts := make([]types.Type, tparams.Len()) - - // case (3) generic ==> targs.Len() == 0 - // Instantiating a generic with no type arguments should be unreachable. - // Please report a bug if you encounter this. - assert(targs.Len() != 0, "substition into a generic Named type is currently unsupported") - - // case (4) instantiated. - // Substitute into the type arguments and instantiate the replacements/ - // Example: - // type N[A any] func() A - // func Foo[T](g N[T]) {} - // To instantiate Foo[string], one goes through {T->string}. To get the type of g - // one subsitutes T with string in {N with typeargs == {T} and typeparams == {A} } - // to get {N with TypeArgs == {string} and typeparams == {A} }. - assert(targs.Len() == tparams.Len(), "typeargs.Len() must match typeparams.Len() if present") - for i, n := 0, targs.Len(); i < n; i++ { - inst := subst.typ(targs.At(i)) // TODO(generic): Check with rfindley for mutual recursion - insts[i] = inst - } - r, err := types.Instantiate(subst.ctxt, t.Origin(), insts, false) - assert(err == nil, "failed to Instantiate Named type") - return r -} - -func (subst *subster) signature(t *types.Signature) types.Type { - tparams := t.TypeParams() - - // We are choosing not to support tparams.Len() > 0 until a need has been observed in practice. - // - // There are some known usages for types.Types coming from types.{Eval,CheckExpr}. - // To support tparams.Len() > 0, we just need to do the following [psuedocode]: - // targs := {subst.replacements[tparams[i]]]}; Instantiate(ctxt, t, targs, false) - - assert(tparams.Len() == 0, "Substituting types.Signatures with generic functions are currently unsupported.") - - // Either: - // (1)non-generic function. - // no type params to substitute - // (2)generic method and recv needs to be substituted. - - // Receivers can be either: - // named - // pointer to named - // interface - // nil - // interface is the problematic case. We need to cycle break there! - recv := subst.var_(t.Recv()) - params := subst.tuple(t.Params()) - results := subst.tuple(t.Results()) - if recv != t.Recv() || params != t.Params() || results != t.Results() { - return types.NewSignatureType(recv, nil, nil, params, results, t.Variadic()) - } - return t -} - -// reaches returns true if a type t reaches any type t' s.t. c[t'] == true. -// It updates c to cache results. -// -// reaches is currently only part of the wellFormed debug logic, and -// in practice c is initially only type parameters. It is not currently -// relied on in production. -func reaches(t types.Type, c map[types.Type]bool) (res bool) { - if c, ok := c[t]; ok { - return c - } - - // c is populated with temporary false entries as types are visited. - // This avoids repeat visits and break cycles. - c[t] = false - defer func() { - c[t] = res - }() - - switch t := t.(type) { - case *types.TypeParam, *types.Basic: - return false - case *types.Array: - return reaches(t.Elem(), c) - case *types.Slice: - return reaches(t.Elem(), c) - case *types.Pointer: - return reaches(t.Elem(), c) - case *types.Tuple: - for i := 0; i < t.Len(); i++ { - if reaches(t.At(i).Type(), c) { - return true - } - } - case *types.Struct: - for i := 0; i < t.NumFields(); i++ { - if reaches(t.Field(i).Type(), c) { - return true - } - } - case *types.Map: - return reaches(t.Key(), c) || reaches(t.Elem(), c) - case *types.Chan: - return reaches(t.Elem(), c) - case *types.Signature: - if t.Recv() != nil && reaches(t.Recv().Type(), c) { - return true - } - return reaches(t.Params(), c) || reaches(t.Results(), c) - case *types.Union: - for i := 0; i < t.Len(); i++ { - if reaches(t.Term(i).Type(), c) { - return true - } - } - case *types.Interface: - for i := 0; i < t.NumEmbeddeds(); i++ { - if reaches(t.Embedded(i), c) { - return true - } - } - for i := 0; i < t.NumExplicitMethods(); i++ { - if reaches(t.ExplicitMethod(i).Type(), c) { - return true - } - } - case *types.Named, *aliases.Alias: - return reaches(t.Underlying(), c) - default: - panic("unreachable") - } - return false -} diff --git a/vendor/golang.org/x/tools/go/ssa/util.go b/vendor/golang.org/x/tools/go/ssa/util.go deleted file mode 100644 index 4d65259ed..000000000 --- a/vendor/golang.org/x/tools/go/ssa/util.go +++ /dev/null @@ -1,375 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package ssa - -// This file defines a number of miscellaneous utility functions. - -import ( - "fmt" - "go/ast" - "go/token" - "go/types" - "io" - "os" - "sync" - - "golang.org/x/tools/go/ast/astutil" - "golang.org/x/tools/go/types/typeutil" - "golang.org/x/tools/internal/aliases" - "golang.org/x/tools/internal/typeparams" - "golang.org/x/tools/internal/typesinternal" -) - -//// Sanity checking utilities - -// assert panics with the mesage msg if p is false. -// Avoid combining with expensive string formatting. -func assert(p bool, msg string) { - if !p { - panic(msg) - } -} - -//// AST utilities - -func unparen(e ast.Expr) ast.Expr { return astutil.Unparen(e) } - -// isBlankIdent returns true iff e is an Ident with name "_". -// They have no associated types.Object, and thus no type. -func isBlankIdent(e ast.Expr) bool { - id, ok := e.(*ast.Ident) - return ok && id.Name == "_" -} - -//// Type utilities. Some of these belong in go/types. - -// isNonTypeParamInterface reports whether t is an interface type but not a type parameter. -func isNonTypeParamInterface(t types.Type) bool { - return !typeparams.IsTypeParam(t) && types.IsInterface(t) -} - -// isBasic reports whether t is a basic type. -func isBasic(t types.Type) bool { - _, ok := aliases.Unalias(t).(*types.Basic) - return ok -} - -// isString reports whether t is exactly a string type. -// t is assumed to be an Underlying type (not Named or Alias). -func isString(t types.Type) bool { - basic, ok := t.(*types.Basic) - return ok && basic.Info()&types.IsString != 0 -} - -// isByteSlice reports whether t is of the form []~bytes. -// t is assumed to be an Underlying type (not Named or Alias). -func isByteSlice(t types.Type) bool { - if b, ok := t.(*types.Slice); ok { - e, _ := b.Elem().Underlying().(*types.Basic) - return e != nil && e.Kind() == types.Byte - } - return false -} - -// isRuneSlice reports whether t is of the form []~runes. -// t is assumed to be an Underlying type (not Named or Alias). -func isRuneSlice(t types.Type) bool { - if b, ok := t.(*types.Slice); ok { - e, _ := b.Elem().Underlying().(*types.Basic) - return e != nil && e.Kind() == types.Rune - } - return false -} - -// isBasicConvTypes returns true when a type set can be -// one side of a Convert operation. This is when: -// - All are basic, []byte, or []rune. -// - At least 1 is basic. -// - At most 1 is []byte or []rune. -func isBasicConvTypes(tset termList) bool { - basics := 0 - all := underIs(tset, func(t types.Type) bool { - if isBasic(t) { - basics++ - return true - } - return isByteSlice(t) || isRuneSlice(t) - }) - return all && basics >= 1 && tset.Len()-basics <= 1 -} - -// deptr returns a pointer's element type and true; otherwise it returns (typ, false). -// This function is oblivious to core types and is not suitable for generics. -// -// TODO: Deprecate this function once all usages have been audited. -func deptr(typ types.Type) (types.Type, bool) { - if p, ok := typ.Underlying().(*types.Pointer); ok { - return p.Elem(), true - } - return typ, false -} - -// deref returns the element type of a type with a pointer core type and true; -// otherwise it returns (typ, false). -func deref(typ types.Type) (types.Type, bool) { - if p, ok := typeparams.CoreType(typ).(*types.Pointer); ok { - return p.Elem(), true - } - return typ, false -} - -// recvType returns the receiver type of method obj. -func recvType(obj *types.Func) types.Type { - return obj.Type().(*types.Signature).Recv().Type() -} - -// fieldOf returns the index'th field of the (core type of) a struct type; -// otherwise returns nil. -func fieldOf(typ types.Type, index int) *types.Var { - if st, ok := typeparams.CoreType(typ).(*types.Struct); ok { - if 0 <= index && index < st.NumFields() { - return st.Field(index) - } - } - return nil -} - -// isUntyped reports whether typ is the type of an untyped constant. -func isUntyped(typ types.Type) bool { - // No Underlying/Unalias: untyped constant types cannot be Named or Alias. - b, ok := typ.(*types.Basic) - return ok && b.Info()&types.IsUntyped != 0 -} - -// logStack prints the formatted "start" message to stderr and -// returns a closure that prints the corresponding "end" message. -// Call using 'defer logStack(...)()' to show builder stack on panic. -// Don't forget trailing parens! -func logStack(format string, args ...interface{}) func() { - msg := fmt.Sprintf(format, args...) - io.WriteString(os.Stderr, msg) - io.WriteString(os.Stderr, "\n") - return func() { - io.WriteString(os.Stderr, msg) - io.WriteString(os.Stderr, " end\n") - } -} - -// newVar creates a 'var' for use in a types.Tuple. -func newVar(name string, typ types.Type) *types.Var { - return types.NewParam(token.NoPos, nil, name, typ) -} - -// anonVar creates an anonymous 'var' for use in a types.Tuple. -func anonVar(typ types.Type) *types.Var { - return newVar("", typ) -} - -var lenResults = types.NewTuple(anonVar(tInt)) - -// makeLen returns the len builtin specialized to type func(T)int. -func makeLen(T types.Type) *Builtin { - lenParams := types.NewTuple(anonVar(T)) - return &Builtin{ - name: "len", - sig: types.NewSignature(nil, lenParams, lenResults, false), - } -} - -// receiverTypeArgs returns the type arguments to a method's receiver. -// Returns an empty list if the receiver does not have type arguments. -func receiverTypeArgs(method *types.Func) []types.Type { - recv := method.Type().(*types.Signature).Recv() - _, named := typesinternal.ReceiverNamed(recv) - if named == nil { - return nil // recv is anonymous struct/interface - } - ts := named.TypeArgs() - if ts.Len() == 0 { - return nil - } - targs := make([]types.Type, ts.Len()) - for i := 0; i < ts.Len(); i++ { - targs[i] = ts.At(i) - } - return targs -} - -// recvAsFirstArg takes a method signature and returns a function -// signature with receiver as the first parameter. -func recvAsFirstArg(sig *types.Signature) *types.Signature { - params := make([]*types.Var, 0, 1+sig.Params().Len()) - params = append(params, sig.Recv()) - for i := 0; i < sig.Params().Len(); i++ { - params = append(params, sig.Params().At(i)) - } - return types.NewSignatureType(nil, nil, nil, types.NewTuple(params...), sig.Results(), sig.Variadic()) -} - -// instance returns whether an expression is a simple or qualified identifier -// that is a generic instantiation. -func instance(info *types.Info, expr ast.Expr) bool { - // Compare the logic here against go/types.instantiatedIdent, - // which also handles *IndexExpr and *IndexListExpr. - var id *ast.Ident - switch x := expr.(type) { - case *ast.Ident: - id = x - case *ast.SelectorExpr: - id = x.Sel - default: - return false - } - _, ok := info.Instances[id] - return ok -} - -// instanceArgs returns the Instance[id].TypeArgs as a slice. -func instanceArgs(info *types.Info, id *ast.Ident) []types.Type { - targList := info.Instances[id].TypeArgs - if targList == nil { - return nil - } - - targs := make([]types.Type, targList.Len()) - for i, n := 0, targList.Len(); i < n; i++ { - targs[i] = targList.At(i) - } - return targs -} - -// Mapping of a type T to a canonical instance C s.t. types.Indentical(T, C). -// Thread-safe. -type canonizer struct { - mu sync.Mutex - types typeutil.Map // map from type to a canonical instance - lists typeListMap // map from a list of types to a canonical instance -} - -func newCanonizer() *canonizer { - c := &canonizer{} - h := typeutil.MakeHasher() - c.types.SetHasher(h) - c.lists.hasher = h - return c -} - -// List returns a canonical representative of a list of types. -// Representative of the empty list is nil. -func (c *canonizer) List(ts []types.Type) *typeList { - if len(ts) == 0 { - return nil - } - - c.mu.Lock() - defer c.mu.Unlock() - return c.lists.rep(ts) -} - -// Type returns a canonical representative of type T. -func (c *canonizer) Type(T types.Type) types.Type { - c.mu.Lock() - defer c.mu.Unlock() - - if r := c.types.At(T); r != nil { - return r.(types.Type) - } - c.types.Set(T, T) - return T -} - -// A type for representing a canonized list of types. -type typeList []types.Type - -func (l *typeList) identical(ts []types.Type) bool { - if l == nil { - return len(ts) == 0 - } - n := len(*l) - if len(ts) != n { - return false - } - for i, left := range *l { - right := ts[i] - if !types.Identical(left, right) { - return false - } - } - return true -} - -type typeListMap struct { - hasher typeutil.Hasher - buckets map[uint32][]*typeList -} - -// rep returns a canonical representative of a slice of types. -func (m *typeListMap) rep(ts []types.Type) *typeList { - if m == nil || len(ts) == 0 { - return nil - } - - if m.buckets == nil { - m.buckets = make(map[uint32][]*typeList) - } - - h := m.hash(ts) - bucket := m.buckets[h] - for _, l := range bucket { - if l.identical(ts) { - return l - } - } - - // not present. create a representative. - cp := make(typeList, len(ts)) - copy(cp, ts) - rep := &cp - - m.buckets[h] = append(bucket, rep) - return rep -} - -func (m *typeListMap) hash(ts []types.Type) uint32 { - if m == nil { - return 0 - } - // Some smallish prime far away from typeutil.Hash. - n := len(ts) - h := uint32(13619) + 2*uint32(n) - for i := 0; i < n; i++ { - h += 3 * m.hasher.Hash(ts[i]) - } - return h -} - -// instantiateMethod instantiates m with targs and returns a canonical representative for this method. -func (canon *canonizer) instantiateMethod(m *types.Func, targs []types.Type, ctxt *types.Context) *types.Func { - recv := recvType(m) - if p, ok := aliases.Unalias(recv).(*types.Pointer); ok { - recv = p.Elem() - } - named := aliases.Unalias(recv).(*types.Named) - inst, err := types.Instantiate(ctxt, named.Origin(), targs, false) - if err != nil { - panic(err) - } - rep := canon.Type(inst) - obj, _, _ := types.LookupFieldOrMethod(rep, true, m.Pkg(), m.Name()) - return obj.(*types.Func) -} - -// Exposed to ssautil using the linkname hack. -func isSyntactic(pkg *Package) bool { return pkg.syntax } - -// mapValues returns a new unordered array of map values. -func mapValues[K comparable, V any](m map[K]V) []V { - vals := make([]V, 0, len(m)) - for _, fn := range m { - vals = append(vals, fn) - } - return vals - -} diff --git a/vendor/golang.org/x/tools/go/ssa/wrappers.go b/vendor/golang.org/x/tools/go/ssa/wrappers.go deleted file mode 100644 index 7c7ee4099..000000000 --- a/vendor/golang.org/x/tools/go/ssa/wrappers.go +++ /dev/null @@ -1,351 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package ssa - -// This file defines synthesis of Functions that delegate to declared -// methods; they come in three kinds: -// -// (1) wrappers: methods that wrap declared methods, performing -// implicit pointer indirections and embedded field selections. -// -// (2) thunks: funcs that wrap declared methods. Like wrappers, -// thunks perform indirections and field selections. The thunk's -// first parameter is used as the receiver for the method call. -// -// (3) bounds: funcs that wrap declared methods. The bound's sole -// free variable, supplied by a closure, is used as the receiver -// for the method call. No indirections or field selections are -// performed since they can be done before the call. - -import ( - "fmt" - - "go/token" - "go/types" -) - -// -- wrappers ----------------------------------------------------------- - -// createWrapper returns a synthetic method that delegates to the -// declared method denoted by meth.Obj(), first performing any -// necessary pointer indirections or field selections implied by meth. -// -// The resulting method's receiver type is meth.Recv(). -// -// This function is versatile but quite subtle! Consider the -// following axes of variation when making changes: -// - optional receiver indirection -// - optional implicit field selections -// - meth.Obj() may denote a concrete or an interface method -// - the result may be a thunk or a wrapper. -func createWrapper(prog *Program, sel *selection, cr *creator) *Function { - obj := sel.obj.(*types.Func) // the declared function - sig := sel.typ.(*types.Signature) // type of this wrapper - - var recv *types.Var // wrapper's receiver or thunk's params[0] - name := obj.Name() - var description string - if sel.kind == types.MethodExpr { - name += "$thunk" - description = "thunk" - recv = sig.Params().At(0) - } else { - description = "wrapper" - recv = sig.Recv() - } - - description = fmt.Sprintf("%s for %s", description, sel.obj) - if prog.mode&LogSource != 0 { - defer logStack("create %s to (%s)", description, recv.Type())() - } - /* method wrapper */ - fn := &Function{ - name: name, - method: sel, - object: obj, - Signature: sig, - Synthetic: description, - Prog: prog, - pos: obj.Pos(), - // wrappers have no syntax - build: (*builder).buildWrapper, - syntax: nil, - info: nil, - goversion: "", - } - cr.Add(fn) - return fn -} - -// buildWrapper builds fn.Body for a method wrapper. -func (b *builder) buildWrapper(fn *Function) { - var recv *types.Var // wrapper's receiver or thunk's params[0] - var start int // first regular param - if fn.method.kind == types.MethodExpr { - recv = fn.Signature.Params().At(0) - start = 1 - } else { - recv = fn.Signature.Recv() - } - - fn.startBody() - fn.addSpilledParam(recv) - createParams(fn, start) - - indices := fn.method.index - - var v Value = fn.Locals[0] // spilled receiver - srdt, ptrRecv := deptr(fn.method.recv) - if ptrRecv { - v = emitLoad(fn, v) - - // For simple indirection wrappers, perform an informative nil-check: - // "value method (T).f called using nil *T pointer" - _, ptrObj := deptr(recvType(fn.object)) - if len(indices) == 1 && !ptrObj { - var c Call - c.Call.Value = &Builtin{ - name: "ssa:wrapnilchk", - sig: types.NewSignature(nil, - types.NewTuple(anonVar(fn.method.recv), anonVar(tString), anonVar(tString)), - types.NewTuple(anonVar(fn.method.recv)), false), - } - c.Call.Args = []Value{ - v, - stringConst(srdt.String()), - stringConst(fn.method.obj.Name()), - } - c.setType(v.Type()) - v = fn.emit(&c) - } - } - - // Invariant: v is a pointer, either - // value of *A receiver param, or - // address of A spilled receiver. - - // We use pointer arithmetic (FieldAddr possibly followed by - // Load) in preference to value extraction (Field possibly - // preceded by Load). - - v = emitImplicitSelections(fn, v, indices[:len(indices)-1], token.NoPos) - - // Invariant: v is a pointer, either - // value of implicit *C field, or - // address of implicit C field. - - var c Call - if r := recvType(fn.object); !types.IsInterface(r) { // concrete method - if _, ptrObj := deptr(r); !ptrObj { - v = emitLoad(fn, v) - } - c.Call.Value = fn.Prog.objectMethod(fn.object, b.created) - c.Call.Args = append(c.Call.Args, v) - } else { - c.Call.Method = fn.object - c.Call.Value = emitLoad(fn, v) // interface (possibly a typeparam) - } - for _, arg := range fn.Params[1:] { - c.Call.Args = append(c.Call.Args, arg) - } - emitTailCall(fn, &c) - fn.finishBody() -} - -// createParams creates parameters for wrapper method fn based on its -// Signature.Params, which do not include the receiver. -// start is the index of the first regular parameter to use. -func createParams(fn *Function, start int) { - tparams := fn.Signature.Params() - for i, n := start, tparams.Len(); i < n; i++ { - fn.addParamVar(tparams.At(i)) - } -} - -// -- bounds ----------------------------------------------------------- - -// createBound returns a bound method wrapper (or "bound"), a synthetic -// function that delegates to a concrete or interface method denoted -// by obj. The resulting function has no receiver, but has one free -// variable which will be used as the method's receiver in the -// tail-call. -// -// Use MakeClosure with such a wrapper to construct a bound method -// closure. e.g.: -// -// type T int or: type T interface { meth() } -// func (t T) meth() -// var t T -// f := t.meth -// f() // calls t.meth() -// -// f is a closure of a synthetic wrapper defined as if by: -// -// f := func() { return t.meth() } -// -// Unlike createWrapper, createBound need perform no indirection or field -// selections because that can be done before the closure is -// constructed. -func createBound(prog *Program, obj *types.Func, cr *creator) *Function { - description := fmt.Sprintf("bound method wrapper for %s", obj) - if prog.mode&LogSource != 0 { - defer logStack("%s", description)() - } - /* bound method wrapper */ - fn := &Function{ - name: obj.Name() + "$bound", - object: obj, - Signature: changeRecv(obj.Type().(*types.Signature), nil), // drop receiver - Synthetic: description, - Prog: prog, - pos: obj.Pos(), - // wrappers have no syntax - build: (*builder).buildBound, - syntax: nil, - info: nil, - goversion: "", - } - fn.FreeVars = []*FreeVar{{name: "recv", typ: recvType(obj), parent: fn}} // (cyclic) - cr.Add(fn) - return fn -} - -// buildBound builds fn.Body for a bound method closure. -func (b *builder) buildBound(fn *Function) { - fn.startBody() - createParams(fn, 0) - var c Call - - recv := fn.FreeVars[0] - if !types.IsInterface(recvType(fn.object)) { // concrete - c.Call.Value = fn.Prog.objectMethod(fn.object, b.created) - c.Call.Args = []Value{recv} - } else { - c.Call.Method = fn.object - c.Call.Value = recv // interface (possibly a typeparam) - } - for _, arg := range fn.Params { - c.Call.Args = append(c.Call.Args, arg) - } - emitTailCall(fn, &c) - fn.finishBody() -} - -// -- thunks ----------------------------------------------------------- - -// createThunk returns a thunk, a synthetic function that delegates to a -// concrete or interface method denoted by sel.obj. The resulting -// function has no receiver, but has an additional (first) regular -// parameter. -// -// Precondition: sel.kind == types.MethodExpr. -// -// type T int or: type T interface { meth() } -// func (t T) meth() -// f := T.meth -// var t T -// f(t) // calls t.meth() -// -// f is a synthetic wrapper defined as if by: -// -// f := func(t T) { return t.meth() } -func createThunk(prog *Program, sel *selection, cr *creator) *Function { - if sel.kind != types.MethodExpr { - panic(sel) - } - - fn := createWrapper(prog, sel, cr) - if fn.Signature.Recv() != nil { - panic(fn) // unexpected receiver - } - - return fn -} - -func changeRecv(s *types.Signature, recv *types.Var) *types.Signature { - return types.NewSignature(recv, s.Params(), s.Results(), s.Variadic()) -} - -// A local version of *types.Selection. -// Needed for some additional control, such as creating a MethodExpr for an instantiation. -type selection struct { - kind types.SelectionKind - recv types.Type - typ types.Type - obj types.Object - index []int - indirect bool -} - -func toSelection(sel *types.Selection) *selection { - return &selection{ - kind: sel.Kind(), - recv: sel.Recv(), - typ: sel.Type(), - obj: sel.Obj(), - index: sel.Index(), - indirect: sel.Indirect(), - } -} - -// -- instantiations -------------------------------------------------- - -// buildInstantiationWrapper builds the body of an instantiation -// wrapper fn. The body calls the original generic function, -// bracketed by ChangeType conversions on its arguments and results. -func (b *builder) buildInstantiationWrapper(fn *Function) { - orig := fn.topLevelOrigin - sig := fn.Signature - - fn.startBody() - if sig.Recv() != nil { - fn.addParamVar(sig.Recv()) - } - createParams(fn, 0) - - // Create body. Add a call to origin generic function - // and make type changes between argument and parameters, - // as well as return values. - var c Call - c.Call.Value = orig - if res := orig.Signature.Results(); res.Len() == 1 { - c.typ = res.At(0).Type() - } else { - c.typ = res - } - - // parameter of instance becomes an argument to the call - // to the original generic function. - argOffset := 0 - for i, arg := range fn.Params { - var typ types.Type - if i == 0 && sig.Recv() != nil { - typ = orig.Signature.Recv().Type() - argOffset = 1 - } else { - typ = orig.Signature.Params().At(i - argOffset).Type() - } - c.Call.Args = append(c.Call.Args, emitTypeCoercion(fn, arg, typ)) - } - - results := fn.emit(&c) - var ret Return - switch res := sig.Results(); res.Len() { - case 0: - // no results, do nothing. - case 1: - ret.Results = []Value{emitTypeCoercion(fn, results, res.At(0).Type())} - default: - for i := 0; i < sig.Results().Len(); i++ { - v := emitExtract(fn, results, i) - ret.Results = append(ret.Results, emitTypeCoercion(fn, v, res.At(i).Type())) - } - } - - fn.emit(&ret) - fn.currentBlock = nil - - fn.finishBody() -} diff --git a/vendor/golang.org/x/tools/internal/analysisinternal/analysis.go b/vendor/golang.org/x/tools/internal/analysisinternal/analysis.go deleted file mode 100644 index c3022a286..000000000 --- a/vendor/golang.org/x/tools/internal/analysisinternal/analysis.go +++ /dev/null @@ -1,395 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package analysisinternal provides gopls' internal analyses with a -// number of helper functions that operate on typed syntax trees. -package analysisinternal - -import ( - "bytes" - "fmt" - "go/ast" - "go/token" - "go/types" - "strconv" - - "golang.org/x/tools/internal/aliases" -) - -func TypeErrorEndPos(fset *token.FileSet, src []byte, start token.Pos) token.Pos { - // Get the end position for the type error. - offset, end := fset.PositionFor(start, false).Offset, start - if offset >= len(src) { - return end - } - if width := bytes.IndexAny(src[offset:], " \n,():;[]+-*"); width > 0 { - end = start + token.Pos(width) - } - return end -} - -func ZeroValue(f *ast.File, pkg *types.Package, typ types.Type) ast.Expr { - // TODO(adonovan): think about generics, and also generic aliases. - under := aliases.Unalias(typ) - // Don't call Underlying unconditionally: although it removed - // Named and Alias, it also removes TypeParam. - if n, ok := typ.(*types.Named); ok { - under = n.Underlying() - } - switch u := under.(type) { - case *types.Basic: - switch { - case u.Info()&types.IsNumeric != 0: - return &ast.BasicLit{Kind: token.INT, Value: "0"} - case u.Info()&types.IsBoolean != 0: - return &ast.Ident{Name: "false"} - case u.Info()&types.IsString != 0: - return &ast.BasicLit{Kind: token.STRING, Value: `""`} - default: - panic(fmt.Sprintf("unknown basic type %v", u)) - } - case *types.Chan, *types.Interface, *types.Map, *types.Pointer, *types.Signature, *types.Slice, *types.Array: - return ast.NewIdent("nil") - case *types.Struct: - texpr := TypeExpr(f, pkg, typ) // typ because we want the name here. - if texpr == nil { - return nil - } - return &ast.CompositeLit{ - Type: texpr, - } - } - return nil -} - -// IsZeroValue checks whether the given expression is a 'zero value' (as determined by output of -// analysisinternal.ZeroValue) -func IsZeroValue(expr ast.Expr) bool { - switch e := expr.(type) { - case *ast.BasicLit: - return e.Value == "0" || e.Value == `""` - case *ast.Ident: - return e.Name == "nil" || e.Name == "false" - default: - return false - } -} - -// TypeExpr returns syntax for the specified type. References to -// named types from packages other than pkg are qualified by an appropriate -// package name, as defined by the import environment of file. -func TypeExpr(f *ast.File, pkg *types.Package, typ types.Type) ast.Expr { - switch t := typ.(type) { - case *types.Basic: - switch t.Kind() { - case types.UnsafePointer: - return &ast.SelectorExpr{X: ast.NewIdent("unsafe"), Sel: ast.NewIdent("Pointer")} - default: - return ast.NewIdent(t.Name()) - } - case *types.Pointer: - x := TypeExpr(f, pkg, t.Elem()) - if x == nil { - return nil - } - return &ast.UnaryExpr{ - Op: token.MUL, - X: x, - } - case *types.Array: - elt := TypeExpr(f, pkg, t.Elem()) - if elt == nil { - return nil - } - return &ast.ArrayType{ - Len: &ast.BasicLit{ - Kind: token.INT, - Value: fmt.Sprintf("%d", t.Len()), - }, - Elt: elt, - } - case *types.Slice: - elt := TypeExpr(f, pkg, t.Elem()) - if elt == nil { - return nil - } - return &ast.ArrayType{ - Elt: elt, - } - case *types.Map: - key := TypeExpr(f, pkg, t.Key()) - value := TypeExpr(f, pkg, t.Elem()) - if key == nil || value == nil { - return nil - } - return &ast.MapType{ - Key: key, - Value: value, - } - case *types.Chan: - dir := ast.ChanDir(t.Dir()) - if t.Dir() == types.SendRecv { - dir = ast.SEND | ast.RECV - } - value := TypeExpr(f, pkg, t.Elem()) - if value == nil { - return nil - } - return &ast.ChanType{ - Dir: dir, - Value: value, - } - case *types.Signature: - var params []*ast.Field - for i := 0; i < t.Params().Len(); i++ { - p := TypeExpr(f, pkg, t.Params().At(i).Type()) - if p == nil { - return nil - } - params = append(params, &ast.Field{ - Type: p, - Names: []*ast.Ident{ - { - Name: t.Params().At(i).Name(), - }, - }, - }) - } - if t.Variadic() { - last := params[len(params)-1] - last.Type = &ast.Ellipsis{Elt: last.Type.(*ast.ArrayType).Elt} - } - var returns []*ast.Field - for i := 0; i < t.Results().Len(); i++ { - r := TypeExpr(f, pkg, t.Results().At(i).Type()) - if r == nil { - return nil - } - returns = append(returns, &ast.Field{ - Type: r, - }) - } - return &ast.FuncType{ - Params: &ast.FieldList{ - List: params, - }, - Results: &ast.FieldList{ - List: returns, - }, - } - case *types.Named: - if t.Obj().Pkg() == nil { - return ast.NewIdent(t.Obj().Name()) - } - if t.Obj().Pkg() == pkg { - return ast.NewIdent(t.Obj().Name()) - } - pkgName := t.Obj().Pkg().Name() - - // If the file already imports the package under another name, use that. - for _, cand := range f.Imports { - if path, _ := strconv.Unquote(cand.Path.Value); path == t.Obj().Pkg().Path() { - if cand.Name != nil && cand.Name.Name != "" { - pkgName = cand.Name.Name - } - } - } - if pkgName == "." { - return ast.NewIdent(t.Obj().Name()) - } - return &ast.SelectorExpr{ - X: ast.NewIdent(pkgName), - Sel: ast.NewIdent(t.Obj().Name()), - } - case *types.Struct: - return ast.NewIdent(t.String()) - case *types.Interface: - return ast.NewIdent(t.String()) - default: - return nil - } -} - -// StmtToInsertVarBefore returns the ast.Stmt before which we can safely insert a new variable. -// Some examples: -// -// Basic Example: -// z := 1 -// y := z + x -// If x is undeclared, then this function would return `y := z + x`, so that we -// can insert `x := ` on the line before `y := z + x`. -// -// If stmt example: -// if z == 1 { -// } else if z == y {} -// If y is undeclared, then this function would return `if z == 1 {`, because we cannot -// insert a statement between an if and an else if statement. As a result, we need to find -// the top of the if chain to insert `y := ` before. -func StmtToInsertVarBefore(path []ast.Node) ast.Stmt { - enclosingIndex := -1 - for i, p := range path { - if _, ok := p.(ast.Stmt); ok { - enclosingIndex = i - break - } - } - if enclosingIndex == -1 { - return nil - } - enclosingStmt := path[enclosingIndex] - switch enclosingStmt.(type) { - case *ast.IfStmt: - // The enclosingStmt is inside of the if declaration, - // We need to check if we are in an else-if stmt and - // get the base if statement. - return baseIfStmt(path, enclosingIndex) - case *ast.CaseClause: - // Get the enclosing switch stmt if the enclosingStmt is - // inside of the case statement. - for i := enclosingIndex + 1; i < len(path); i++ { - if node, ok := path[i].(*ast.SwitchStmt); ok { - return node - } else if node, ok := path[i].(*ast.TypeSwitchStmt); ok { - return node - } - } - } - if len(path) <= enclosingIndex+1 { - return enclosingStmt.(ast.Stmt) - } - // Check if the enclosing statement is inside another node. - switch expr := path[enclosingIndex+1].(type) { - case *ast.IfStmt: - // Get the base if statement. - return baseIfStmt(path, enclosingIndex+1) - case *ast.ForStmt: - if expr.Init == enclosingStmt || expr.Post == enclosingStmt { - return expr - } - } - return enclosingStmt.(ast.Stmt) -} - -// baseIfStmt walks up the if/else-if chain until we get to -// the top of the current if chain. -func baseIfStmt(path []ast.Node, index int) ast.Stmt { - stmt := path[index] - for i := index + 1; i < len(path); i++ { - if node, ok := path[i].(*ast.IfStmt); ok && node.Else == stmt { - stmt = node - continue - } - break - } - return stmt.(ast.Stmt) -} - -// WalkASTWithParent walks the AST rooted at n. The semantics are -// similar to ast.Inspect except it does not call f(nil). -func WalkASTWithParent(n ast.Node, f func(n ast.Node, parent ast.Node) bool) { - var ancestors []ast.Node - ast.Inspect(n, func(n ast.Node) (recurse bool) { - if n == nil { - ancestors = ancestors[:len(ancestors)-1] - return false - } - - var parent ast.Node - if len(ancestors) > 0 { - parent = ancestors[len(ancestors)-1] - } - ancestors = append(ancestors, n) - return f(n, parent) - }) -} - -// MatchingIdents finds the names of all identifiers in 'node' that match any of the given types. -// 'pos' represents the position at which the identifiers may be inserted. 'pos' must be within -// the scope of each of identifier we select. Otherwise, we will insert a variable at 'pos' that -// is unrecognized. -func MatchingIdents(typs []types.Type, node ast.Node, pos token.Pos, info *types.Info, pkg *types.Package) map[types.Type][]string { - - // Initialize matches to contain the variable types we are searching for. - matches := make(map[types.Type][]string) - for _, typ := range typs { - if typ == nil { - continue // TODO(adonovan): is this reachable? - } - matches[typ] = nil // create entry - } - - seen := map[types.Object]struct{}{} - ast.Inspect(node, func(n ast.Node) bool { - if n == nil { - return false - } - // Prevent circular definitions. If 'pos' is within an assignment statement, do not - // allow any identifiers in that assignment statement to be selected. Otherwise, - // we could do the following, where 'x' satisfies the type of 'f0': - // - // x := fakeStruct{f0: x} - // - if assign, ok := n.(*ast.AssignStmt); ok && pos > assign.Pos() && pos <= assign.End() { - return false - } - if n.End() > pos { - return n.Pos() <= pos - } - ident, ok := n.(*ast.Ident) - if !ok || ident.Name == "_" { - return true - } - obj := info.Defs[ident] - if obj == nil || obj.Type() == nil { - return true - } - if _, ok := obj.(*types.TypeName); ok { - return true - } - // Prevent duplicates in matches' values. - if _, ok = seen[obj]; ok { - return true - } - seen[obj] = struct{}{} - // Find the scope for the given position. Then, check whether the object - // exists within the scope. - innerScope := pkg.Scope().Innermost(pos) - if innerScope == nil { - return true - } - _, foundObj := innerScope.LookupParent(ident.Name, pos) - if foundObj != obj { - return true - } - // The object must match one of the types that we are searching for. - // TODO(adonovan): opt: use typeutil.Map? - if names, ok := matches[obj.Type()]; ok { - matches[obj.Type()] = append(names, ident.Name) - } else { - // If the object type does not exactly match - // any of the target types, greedily find the first - // target type that the object type can satisfy. - for typ := range matches { - if equivalentTypes(obj.Type(), typ) { - matches[typ] = append(matches[typ], ident.Name) - } - } - } - return true - }) - return matches -} - -func equivalentTypes(want, got types.Type) bool { - if types.Identical(want, got) { - return true - } - // Code segment to help check for untyped equality from (golang/go#32146). - if rhs, ok := want.(*types.Basic); ok && rhs.Info()&types.IsUntyped > 0 { - if lhs, ok := got.Underlying().(*types.Basic); ok { - return rhs.Info()&types.IsConstType == lhs.Info()&types.IsConstType - } - } - return types.AssignableTo(want, got) -} diff --git a/vendor/golang.org/x/tools/internal/analysisinternal/extractdoc.go b/vendor/golang.org/x/tools/internal/analysisinternal/extractdoc.go deleted file mode 100644 index 39507723d..000000000 --- a/vendor/golang.org/x/tools/internal/analysisinternal/extractdoc.go +++ /dev/null @@ -1,113 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package analysisinternal - -import ( - "fmt" - "go/parser" - "go/token" - "strings" -) - -// MustExtractDoc is like [ExtractDoc] but it panics on error. -// -// To use, define a doc.go file such as: -// -// // Package halting defines an analyzer of program termination. -// // -// // # Analyzer halting -// // -// // halting: reports whether execution will halt. -// // -// // The halting analyzer reports a diagnostic for functions -// // that run forever. To suppress the diagnostics, try inserting -// // a 'break' statement into each loop. -// package halting -// -// import _ "embed" -// -// //go:embed doc.go -// var doc string -// -// And declare your analyzer as: -// -// var Analyzer = &analysis.Analyzer{ -// Name: "halting", -// Doc: analysisutil.MustExtractDoc(doc, "halting"), -// ... -// } -func MustExtractDoc(content, name string) string { - doc, err := ExtractDoc(content, name) - if err != nil { - panic(err) - } - return doc -} - -// ExtractDoc extracts a section of a package doc comment from the -// provided contents of an analyzer package's doc.go file. -// -// A section is a portion of the comment between one heading and -// the next, using this form: -// -// # Analyzer NAME -// -// NAME: SUMMARY -// -// Full description... -// -// where NAME matches the name argument, and SUMMARY is a brief -// verb-phrase that describes the analyzer. The following lines, up -// until the next heading or the end of the comment, contain the full -// description. ExtractDoc returns the portion following the colon, -// which is the form expected by Analyzer.Doc. -// -// Example: -// -// # Analyzer printf -// -// printf: checks consistency of calls to printf -// -// The printf analyzer checks consistency of calls to printf. -// Here is the complete description... -// -// This notation allows a single doc comment to provide documentation -// for multiple analyzers, each in its own section. -// The HTML anchors generated for each heading are predictable. -// -// It returns an error if the content was not a valid Go source file -// containing a package doc comment with a heading of the required -// form. -// -// This machinery enables the package documentation (typically -// accessible via the web at https://pkg.go.dev/) and the command -// documentation (typically printed to a terminal) to be derived from -// the same source and formatted appropriately. -func ExtractDoc(content, name string) (string, error) { - if content == "" { - return "", fmt.Errorf("empty Go source file") - } - fset := token.NewFileSet() - f, err := parser.ParseFile(fset, "", content, parser.ParseComments|parser.PackageClauseOnly) - if err != nil { - return "", fmt.Errorf("not a Go source file") - } - if f.Doc == nil { - return "", fmt.Errorf("Go source file has no package doc comment") - } - for _, section := range strings.Split(f.Doc.Text(), "\n# ") { - if body := strings.TrimPrefix(section, "Analyzer "+name); body != section && - body != "" && - body[0] == '\r' || body[0] == '\n' { - body = strings.TrimSpace(body) - rest := strings.TrimPrefix(body, name+":") - if rest == body { - return "", fmt.Errorf("'Analyzer %s' heading not followed by '%s: summary...' line", name, name) - } - return strings.TrimSpace(rest), nil - } - } - return "", fmt.Errorf("package doc comment contains no 'Analyzer %s' heading", name) -} diff --git a/vendor/google.golang.org/protobuf/encoding/protodelim/protodelim.go b/vendor/google.golang.org/protobuf/encoding/protodelim/protodelim.go deleted file mode 100644 index 2ef36bbcf..000000000 --- a/vendor/google.golang.org/protobuf/encoding/protodelim/protodelim.go +++ /dev/null @@ -1,160 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package protodelim marshals and unmarshals varint size-delimited messages. -package protodelim - -import ( - "bufio" - "encoding/binary" - "fmt" - "io" - - "google.golang.org/protobuf/encoding/protowire" - "google.golang.org/protobuf/internal/errors" - "google.golang.org/protobuf/proto" -) - -// MarshalOptions is a configurable varint size-delimited marshaler. -type MarshalOptions struct{ proto.MarshalOptions } - -// MarshalTo writes a varint size-delimited wire-format message to w. -// If w returns an error, MarshalTo returns it unchanged. -func (o MarshalOptions) MarshalTo(w io.Writer, m proto.Message) (int, error) { - msgBytes, err := o.MarshalOptions.Marshal(m) - if err != nil { - return 0, err - } - - sizeBytes := protowire.AppendVarint(nil, uint64(len(msgBytes))) - sizeWritten, err := w.Write(sizeBytes) - if err != nil { - return sizeWritten, err - } - msgWritten, err := w.Write(msgBytes) - if err != nil { - return sizeWritten + msgWritten, err - } - return sizeWritten + msgWritten, nil -} - -// MarshalTo writes a varint size-delimited wire-format message to w -// with the default options. -// -// See the documentation for [MarshalOptions.MarshalTo]. -func MarshalTo(w io.Writer, m proto.Message) (int, error) { - return MarshalOptions{}.MarshalTo(w, m) -} - -// UnmarshalOptions is a configurable varint size-delimited unmarshaler. -type UnmarshalOptions struct { - proto.UnmarshalOptions - - // MaxSize is the maximum size in wire-format bytes of a single message. - // Unmarshaling a message larger than MaxSize will return an error. - // A zero MaxSize will default to 4 MiB. - // Setting MaxSize to -1 disables the limit. - MaxSize int64 -} - -const defaultMaxSize = 4 << 20 // 4 MiB, corresponds to the default gRPC max request/response size - -// SizeTooLargeError is an error that is returned when the unmarshaler encounters a message size -// that is larger than its configured [UnmarshalOptions.MaxSize]. -type SizeTooLargeError struct { - // Size is the varint size of the message encountered - // that was larger than the provided MaxSize. - Size uint64 - - // MaxSize is the MaxSize limit configured in UnmarshalOptions, which Size exceeded. - MaxSize uint64 -} - -func (e *SizeTooLargeError) Error() string { - return fmt.Sprintf("message size %d exceeded unmarshaler's maximum configured size %d", e.Size, e.MaxSize) -} - -// Reader is the interface expected by [UnmarshalFrom]. -// It is implemented by *[bufio.Reader]. -type Reader interface { - io.Reader - io.ByteReader -} - -// UnmarshalFrom parses and consumes a varint size-delimited wire-format message -// from r. -// The provided message must be mutable (e.g., a non-nil pointer to a message). -// -// The error is [io.EOF] error only if no bytes are read. -// If an EOF happens after reading some but not all the bytes, -// UnmarshalFrom returns a non-io.EOF error. -// In particular if r returns a non-io.EOF error, UnmarshalFrom returns it unchanged, -// and if only a size is read with no subsequent message, [io.ErrUnexpectedEOF] is returned. -func (o UnmarshalOptions) UnmarshalFrom(r Reader, m proto.Message) error { - var sizeArr [binary.MaxVarintLen64]byte - sizeBuf := sizeArr[:0] - for i := range sizeArr { - b, err := r.ReadByte() - if err != nil { - // Immediate EOF is unexpected. - if err == io.EOF && i != 0 { - break - } - return err - } - sizeBuf = append(sizeBuf, b) - if b < 0x80 { - break - } - } - size, n := protowire.ConsumeVarint(sizeBuf) - if n < 0 { - return protowire.ParseError(n) - } - - maxSize := o.MaxSize - if maxSize == 0 { - maxSize = defaultMaxSize - } - if maxSize != -1 && size > uint64(maxSize) { - return errors.Wrap(&SizeTooLargeError{Size: size, MaxSize: uint64(maxSize)}, "") - } - - var b []byte - var err error - if br, ok := r.(*bufio.Reader); ok { - // Use the []byte from the bufio.Reader instead of having to allocate one. - // This reduces CPU usage and allocated bytes. - b, err = br.Peek(int(size)) - if err == nil { - defer br.Discard(int(size)) - } else { - b = nil - } - } - if b == nil { - b = make([]byte, size) - _, err = io.ReadFull(r, b) - } - - if err == io.EOF { - return io.ErrUnexpectedEOF - } - if err != nil { - return err - } - if err := o.Unmarshal(b, m); err != nil { - return err - } - return nil -} - -// UnmarshalFrom parses and consumes a varint size-delimited wire-format message -// from r with the default options. -// The provided message must be mutable (e.g., a non-nil pointer to a message). -// -// See the documentation for [UnmarshalOptions.UnmarshalFrom]. -func UnmarshalFrom(r Reader, m proto.Message) error { - return UnmarshalOptions{}.UnmarshalFrom(r, m) -} diff --git a/vendor/gopkg.in/ini.v1/.editorconfig b/vendor/gopkg.in/ini.v1/.editorconfig deleted file mode 100644 index 4a2d9180f..000000000 --- a/vendor/gopkg.in/ini.v1/.editorconfig +++ /dev/null @@ -1,12 +0,0 @@ -# http://editorconfig.org - -root = true - -[*] -charset = utf-8 -end_of_line = lf -insert_final_newline = true -trim_trailing_whitespace = true - -[*_test.go] -trim_trailing_whitespace = false diff --git a/vendor/gopkg.in/ini.v1/.gitignore b/vendor/gopkg.in/ini.v1/.gitignore deleted file mode 100644 index 588388bda..000000000 --- a/vendor/gopkg.in/ini.v1/.gitignore +++ /dev/null @@ -1,7 +0,0 @@ -testdata/conf_out.ini -ini.sublime-project -ini.sublime-workspace -testdata/conf_reflect.ini -.idea -/.vscode -.DS_Store diff --git a/vendor/gopkg.in/ini.v1/.golangci.yml b/vendor/gopkg.in/ini.v1/.golangci.yml deleted file mode 100644 index 631e36925..000000000 --- a/vendor/gopkg.in/ini.v1/.golangci.yml +++ /dev/null @@ -1,27 +0,0 @@ -linters-settings: - staticcheck: - checks: [ - "all", - "-SA1019" # There are valid use cases of strings.Title - ] - nakedret: - max-func-lines: 0 # Disallow any unnamed return statement - -linters: - enable: - - deadcode - - errcheck - - gosimple - - govet - - ineffassign - - staticcheck - - structcheck - - typecheck - - unused - - varcheck - - nakedret - - gofmt - - rowserrcheck - - unconvert - - goimports - - unparam diff --git a/vendor/gopkg.in/ini.v1/LICENSE b/vendor/gopkg.in/ini.v1/LICENSE deleted file mode 100644 index d361bbcdf..000000000 --- a/vendor/gopkg.in/ini.v1/LICENSE +++ /dev/null @@ -1,191 +0,0 @@ -Apache License -Version 2.0, January 2004 -http://www.apache.org/licenses/ - -TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - -1. Definitions. - -"License" shall mean the terms and conditions for use, reproduction, and -distribution as defined by Sections 1 through 9 of this document. - -"Licensor" shall mean the copyright owner or entity authorized by the copyright -owner that is granting the License. - -"Legal Entity" shall mean the union of the acting entity and all other entities -that control, are controlled by, or are under common control with that entity. -For the purposes of this definition, "control" means (i) the power, direct or -indirect, to cause the direction or management of such entity, whether by -contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the -outstanding shares, or (iii) beneficial ownership of such entity. - -"You" (or "Your") shall mean an individual or Legal Entity exercising -permissions granted by this License. - -"Source" form shall mean the preferred form for making modifications, including -but not limited to software source code, documentation source, and configuration -files. - -"Object" form shall mean any form resulting from mechanical transformation or -translation of a Source form, including but not limited to compiled object code, -generated documentation, and conversions to other media types. - -"Work" shall mean the work of authorship, whether in Source or Object form, made -available under the License, as indicated by a copyright notice that is included -in or attached to the work (an example is provided in the Appendix below). - -"Derivative Works" shall mean any work, whether in Source or Object form, that -is based on (or derived from) the Work and for which the editorial revisions, -annotations, elaborations, or other modifications represent, as a whole, an -original work of authorship. For the purposes of this License, Derivative Works -shall not include works that remain separable from, or merely link (or bind by -name) to the interfaces of, the Work and Derivative Works thereof. - -"Contribution" shall mean any work of authorship, including the original version -of the Work and any modifications or additions to that Work or Derivative Works -thereof, that is intentionally submitted to Licensor for inclusion in the Work -by the copyright owner or by an individual or Legal Entity authorized to submit -on behalf of the copyright owner. For the purposes of this definition, -"submitted" means any form of electronic, verbal, or written communication sent -to the Licensor or its representatives, including but not limited to -communication on electronic mailing lists, source code control systems, and -issue tracking systems that are managed by, or on behalf of, the Licensor for -the purpose of discussing and improving the Work, but excluding communication -that is conspicuously marked or otherwise designated in writing by the copyright -owner as "Not a Contribution." - -"Contributor" shall mean Licensor and any individual or Legal Entity on behalf -of whom a Contribution has been received by Licensor and subsequently -incorporated within the Work. - -2. Grant of Copyright License. - -Subject to the terms and conditions of this License, each Contributor hereby -grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, -irrevocable copyright license to reproduce, prepare Derivative Works of, -publicly display, publicly perform, sublicense, and distribute the Work and such -Derivative Works in Source or Object form. - -3. Grant of Patent License. - -Subject to the terms and conditions of this License, each Contributor hereby -grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, -irrevocable (except as stated in this section) patent license to make, have -made, use, offer to sell, sell, import, and otherwise transfer the Work, where -such license applies only to those patent claims licensable by such Contributor -that are necessarily infringed by their Contribution(s) alone or by combination -of their Contribution(s) with the Work to which such Contribution(s) was -submitted. If You institute patent litigation against any entity (including a -cross-claim or counterclaim in a lawsuit) alleging that the Work or a -Contribution incorporated within the Work constitutes direct or contributory -patent infringement, then any patent licenses granted to You under this License -for that Work shall terminate as of the date such litigation is filed. - -4. Redistribution. - -You may reproduce and distribute copies of the Work or Derivative Works thereof -in any medium, with or without modifications, and in Source or Object form, -provided that You meet the following conditions: - -You must give any other recipients of the Work or Derivative Works a copy of -this License; and -You must cause any modified files to carry prominent notices stating that You -changed the files; and -You must retain, in the Source form of any Derivative Works that You distribute, -all copyright, patent, trademark, and attribution notices from the Source form -of the Work, excluding those notices that do not pertain to any part of the -Derivative Works; and -If the Work includes a "NOTICE" text file as part of its distribution, then any -Derivative Works that You distribute must include a readable copy of the -attribution notices contained within such NOTICE file, excluding those notices -that do not pertain to any part of the Derivative Works, in at least one of the -following places: within a NOTICE text file distributed as part of the -Derivative Works; within the Source form or documentation, if provided along -with the Derivative Works; or, within a display generated by the Derivative -Works, if and wherever such third-party notices normally appear. The contents of -the NOTICE file are for informational purposes only and do not modify the -License. You may add Your own attribution notices within Derivative Works that -You distribute, alongside or as an addendum to the NOTICE text from the Work, -provided that such additional attribution notices cannot be construed as -modifying the License. -You may add Your own copyright statement to Your modifications and may provide -additional or different license terms and conditions for use, reproduction, or -distribution of Your modifications, or for any such Derivative Works as a whole, -provided Your use, reproduction, and distribution of the Work otherwise complies -with the conditions stated in this License. - -5. Submission of Contributions. - -Unless You explicitly state otherwise, any Contribution intentionally submitted -for inclusion in the Work by You to the Licensor shall be under the terms and -conditions of this License, without any additional terms or conditions. -Notwithstanding the above, nothing herein shall supersede or modify the terms of -any separate license agreement you may have executed with Licensor regarding -such Contributions. - -6. Trademarks. - -This License does not grant permission to use the trade names, trademarks, -service marks, or product names of the Licensor, except as required for -reasonable and customary use in describing the origin of the Work and -reproducing the content of the NOTICE file. - -7. Disclaimer of Warranty. - -Unless required by applicable law or agreed to in writing, Licensor provides the -Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, -including, without limitation, any warranties or conditions of TITLE, -NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are -solely responsible for determining the appropriateness of using or -redistributing the Work and assume any risks associated with Your exercise of -permissions under this License. - -8. Limitation of Liability. - -In no event and under no legal theory, whether in tort (including negligence), -contract, or otherwise, unless required by applicable law (such as deliberate -and grossly negligent acts) or agreed to in writing, shall any Contributor be -liable to You for damages, including any direct, indirect, special, incidental, -or consequential damages of any character arising as a result of this License or -out of the use or inability to use the Work (including but not limited to -damages for loss of goodwill, work stoppage, computer failure or malfunction, or -any and all other commercial damages or losses), even if such Contributor has -been advised of the possibility of such damages. - -9. Accepting Warranty or Additional Liability. - -While redistributing the Work or Derivative Works thereof, You may choose to -offer, and charge a fee for, acceptance of support, warranty, indemnity, or -other liability obligations and/or rights consistent with this License. However, -in accepting such obligations, You may act only on Your own behalf and on Your -sole responsibility, not on behalf of any other Contributor, and only if You -agree to indemnify, defend, and hold each Contributor harmless for any liability -incurred by, or claims asserted against, such Contributor by reason of your -accepting any such warranty or additional liability. - -END OF TERMS AND CONDITIONS - -APPENDIX: How to apply the Apache License to your work - -To apply the Apache License to your work, attach the following boilerplate -notice, with the fields enclosed by brackets "[]" replaced with your own -identifying information. (Don't include the brackets!) The text should be -enclosed in the appropriate comment syntax for the file format. We also -recommend that a file or class name and description of purpose be included on -the same "printed page" as the copyright notice for easier identification within -third-party archives. - - Copyright 2014 Unknwon - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/vendor/gopkg.in/ini.v1/Makefile b/vendor/gopkg.in/ini.v1/Makefile deleted file mode 100644 index f3b0dae2d..000000000 --- a/vendor/gopkg.in/ini.v1/Makefile +++ /dev/null @@ -1,15 +0,0 @@ -.PHONY: build test bench vet coverage - -build: vet bench - -test: - go test -v -cover -race - -bench: - go test -v -cover -test.bench=. -test.benchmem - -vet: - go vet - -coverage: - go test -coverprofile=c.out && go tool cover -html=c.out && rm c.out diff --git a/vendor/gopkg.in/ini.v1/README.md b/vendor/gopkg.in/ini.v1/README.md deleted file mode 100644 index 30606d970..000000000 --- a/vendor/gopkg.in/ini.v1/README.md +++ /dev/null @@ -1,43 +0,0 @@ -# INI - -[![GitHub Workflow Status](https://img.shields.io/github/checks-status/go-ini/ini/main?logo=github&style=for-the-badge)](https://github.com/go-ini/ini/actions?query=branch%3Amain) -[![codecov](https://img.shields.io/codecov/c/github/go-ini/ini/master?logo=codecov&style=for-the-badge)](https://codecov.io/gh/go-ini/ini) -[![GoDoc](https://img.shields.io/badge/GoDoc-Reference-blue?style=for-the-badge&logo=go)](https://pkg.go.dev/github.com/go-ini/ini?tab=doc) -[![Sourcegraph](https://img.shields.io/badge/view%20on-Sourcegraph-brightgreen.svg?style=for-the-badge&logo=sourcegraph)](https://sourcegraph.com/github.com/go-ini/ini) - -![](https://avatars0.githubusercontent.com/u/10216035?v=3&s=200) - -Package ini provides INI file read and write functionality in Go. - -## Features - -- Load from multiple data sources(file, `[]byte`, `io.Reader` and `io.ReadCloser`) with overwrites. -- Read with recursion values. -- Read with parent-child sections. -- Read with auto-increment key names. -- Read with multiple-line values. -- Read with tons of helper methods. -- Read and convert values to Go types. -- Read and **WRITE** comments of sections and keys. -- Manipulate sections, keys and comments with ease. -- Keep sections and keys in order as you parse and save. - -## Installation - -The minimum requirement of Go is **1.13**. - -```sh -$ go get gopkg.in/ini.v1 -``` - -Please add `-u` flag to update in the future. - -## Getting Help - -- [Getting Started](https://ini.unknwon.io/docs/intro/getting_started) -- [API Documentation](https://gowalker.org/gopkg.in/ini.v1) -- 中国大陆镜像:https://ini.unknwon.cn - -## License - -This project is under Apache v2 License. See the [LICENSE](LICENSE) file for the full license text. diff --git a/vendor/gopkg.in/ini.v1/codecov.yml b/vendor/gopkg.in/ini.v1/codecov.yml deleted file mode 100644 index e02ec84bc..000000000 --- a/vendor/gopkg.in/ini.v1/codecov.yml +++ /dev/null @@ -1,16 +0,0 @@ -coverage: - range: "60...95" - status: - project: - default: - threshold: 1% - informational: true - patch: - defualt: - only_pulls: true - informational: true - -comment: - layout: 'diff' - -github_checks: false diff --git a/vendor/gopkg.in/ini.v1/data_source.go b/vendor/gopkg.in/ini.v1/data_source.go deleted file mode 100644 index c3a541f1d..000000000 --- a/vendor/gopkg.in/ini.v1/data_source.go +++ /dev/null @@ -1,76 +0,0 @@ -// Copyright 2019 Unknwon -// -// Licensed under the Apache License, Version 2.0 (the "License"): you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - -package ini - -import ( - "bytes" - "fmt" - "io" - "io/ioutil" - "os" -) - -var ( - _ dataSource = (*sourceFile)(nil) - _ dataSource = (*sourceData)(nil) - _ dataSource = (*sourceReadCloser)(nil) -) - -// dataSource is an interface that returns object which can be read and closed. -type dataSource interface { - ReadCloser() (io.ReadCloser, error) -} - -// sourceFile represents an object that contains content on the local file system. -type sourceFile struct { - name string -} - -func (s sourceFile) ReadCloser() (_ io.ReadCloser, err error) { - return os.Open(s.name) -} - -// sourceData represents an object that contains content in memory. -type sourceData struct { - data []byte -} - -func (s *sourceData) ReadCloser() (io.ReadCloser, error) { - return ioutil.NopCloser(bytes.NewReader(s.data)), nil -} - -// sourceReadCloser represents an input stream with Close method. -type sourceReadCloser struct { - reader io.ReadCloser -} - -func (s *sourceReadCloser) ReadCloser() (io.ReadCloser, error) { - return s.reader, nil -} - -func parseDataSource(source interface{}) (dataSource, error) { - switch s := source.(type) { - case string: - return sourceFile{s}, nil - case []byte: - return &sourceData{s}, nil - case io.ReadCloser: - return &sourceReadCloser{s}, nil - case io.Reader: - return &sourceReadCloser{ioutil.NopCloser(s)}, nil - default: - return nil, fmt.Errorf("error parsing data source: unknown type %q", s) - } -} diff --git a/vendor/gopkg.in/ini.v1/deprecated.go b/vendor/gopkg.in/ini.v1/deprecated.go deleted file mode 100644 index 48b8e66d6..000000000 --- a/vendor/gopkg.in/ini.v1/deprecated.go +++ /dev/null @@ -1,22 +0,0 @@ -// Copyright 2019 Unknwon -// -// Licensed under the Apache License, Version 2.0 (the "License"): you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - -package ini - -var ( - // Deprecated: Use "DefaultSection" instead. - DEFAULT_SECTION = DefaultSection - // Deprecated: AllCapsUnderscore converts to format ALL_CAPS_UNDERSCORE. - AllCapsUnderscore = SnackCase -) diff --git a/vendor/gopkg.in/ini.v1/error.go b/vendor/gopkg.in/ini.v1/error.go deleted file mode 100644 index f66bc94b8..000000000 --- a/vendor/gopkg.in/ini.v1/error.go +++ /dev/null @@ -1,49 +0,0 @@ -// Copyright 2016 Unknwon -// -// Licensed under the Apache License, Version 2.0 (the "License"): you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - -package ini - -import ( - "fmt" -) - -// ErrDelimiterNotFound indicates the error type of no delimiter is found which there should be one. -type ErrDelimiterNotFound struct { - Line string -} - -// IsErrDelimiterNotFound returns true if the given error is an instance of ErrDelimiterNotFound. -func IsErrDelimiterNotFound(err error) bool { - _, ok := err.(ErrDelimiterNotFound) - return ok -} - -func (err ErrDelimiterNotFound) Error() string { - return fmt.Sprintf("key-value delimiter not found: %s", err.Line) -} - -// ErrEmptyKeyName indicates the error type of no key name is found which there should be one. -type ErrEmptyKeyName struct { - Line string -} - -// IsErrEmptyKeyName returns true if the given error is an instance of ErrEmptyKeyName. -func IsErrEmptyKeyName(err error) bool { - _, ok := err.(ErrEmptyKeyName) - return ok -} - -func (err ErrEmptyKeyName) Error() string { - return fmt.Sprintf("empty key name: %s", err.Line) -} diff --git a/vendor/gopkg.in/ini.v1/file.go b/vendor/gopkg.in/ini.v1/file.go deleted file mode 100644 index f8b22408b..000000000 --- a/vendor/gopkg.in/ini.v1/file.go +++ /dev/null @@ -1,541 +0,0 @@ -// Copyright 2017 Unknwon -// -// Licensed under the Apache License, Version 2.0 (the "License"): you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - -package ini - -import ( - "bytes" - "errors" - "fmt" - "io" - "io/ioutil" - "os" - "strings" - "sync" -) - -// File represents a combination of one or more INI files in memory. -type File struct { - options LoadOptions - dataSources []dataSource - - // Should make things safe, but sometimes doesn't matter. - BlockMode bool - lock sync.RWMutex - - // To keep data in order. - sectionList []string - // To keep track of the index of a section with same name. - // This meta list is only used with non-unique section names are allowed. - sectionIndexes []int - - // Actual data is stored here. - sections map[string][]*Section - - NameMapper - ValueMapper -} - -// newFile initializes File object with given data sources. -func newFile(dataSources []dataSource, opts LoadOptions) *File { - if len(opts.KeyValueDelimiters) == 0 { - opts.KeyValueDelimiters = "=:" - } - if len(opts.KeyValueDelimiterOnWrite) == 0 { - opts.KeyValueDelimiterOnWrite = "=" - } - if len(opts.ChildSectionDelimiter) == 0 { - opts.ChildSectionDelimiter = "." - } - - return &File{ - BlockMode: true, - dataSources: dataSources, - sections: make(map[string][]*Section), - options: opts, - } -} - -// Empty returns an empty file object. -func Empty(opts ...LoadOptions) *File { - var opt LoadOptions - if len(opts) > 0 { - opt = opts[0] - } - - // Ignore error here, we are sure our data is good. - f, _ := LoadSources(opt, []byte("")) - return f -} - -// NewSection creates a new section. -func (f *File) NewSection(name string) (*Section, error) { - if len(name) == 0 { - return nil, errors.New("empty section name") - } - - if (f.options.Insensitive || f.options.InsensitiveSections) && name != DefaultSection { - name = strings.ToLower(name) - } - - if f.BlockMode { - f.lock.Lock() - defer f.lock.Unlock() - } - - if !f.options.AllowNonUniqueSections && inSlice(name, f.sectionList) { - return f.sections[name][0], nil - } - - f.sectionList = append(f.sectionList, name) - - // NOTE: Append to indexes must happen before appending to sections, - // otherwise index will have off-by-one problem. - f.sectionIndexes = append(f.sectionIndexes, len(f.sections[name])) - - sec := newSection(f, name) - f.sections[name] = append(f.sections[name], sec) - - return sec, nil -} - -// NewRawSection creates a new section with an unparseable body. -func (f *File) NewRawSection(name, body string) (*Section, error) { - section, err := f.NewSection(name) - if err != nil { - return nil, err - } - - section.isRawSection = true - section.rawBody = body - return section, nil -} - -// NewSections creates a list of sections. -func (f *File) NewSections(names ...string) (err error) { - for _, name := range names { - if _, err = f.NewSection(name); err != nil { - return err - } - } - return nil -} - -// GetSection returns section by given name. -func (f *File) GetSection(name string) (*Section, error) { - secs, err := f.SectionsByName(name) - if err != nil { - return nil, err - } - - return secs[0], err -} - -// HasSection returns true if the file contains a section with given name. -func (f *File) HasSection(name string) bool { - section, _ := f.GetSection(name) - return section != nil -} - -// SectionsByName returns all sections with given name. -func (f *File) SectionsByName(name string) ([]*Section, error) { - if len(name) == 0 { - name = DefaultSection - } - if f.options.Insensitive || f.options.InsensitiveSections { - name = strings.ToLower(name) - } - - if f.BlockMode { - f.lock.RLock() - defer f.lock.RUnlock() - } - - secs := f.sections[name] - if len(secs) == 0 { - return nil, fmt.Errorf("section %q does not exist", name) - } - - return secs, nil -} - -// Section assumes named section exists and returns a zero-value when not. -func (f *File) Section(name string) *Section { - sec, err := f.GetSection(name) - if err != nil { - if name == "" { - name = DefaultSection - } - sec, _ = f.NewSection(name) - return sec - } - return sec -} - -// SectionWithIndex assumes named section exists and returns a new section when not. -func (f *File) SectionWithIndex(name string, index int) *Section { - secs, err := f.SectionsByName(name) - if err != nil || len(secs) <= index { - // NOTE: It's OK here because the only possible error is empty section name, - // but if it's empty, this piece of code won't be executed. - newSec, _ := f.NewSection(name) - return newSec - } - - return secs[index] -} - -// Sections returns a list of Section stored in the current instance. -func (f *File) Sections() []*Section { - if f.BlockMode { - f.lock.RLock() - defer f.lock.RUnlock() - } - - sections := make([]*Section, len(f.sectionList)) - for i, name := range f.sectionList { - sections[i] = f.sections[name][f.sectionIndexes[i]] - } - return sections -} - -// ChildSections returns a list of child sections of given section name. -func (f *File) ChildSections(name string) []*Section { - return f.Section(name).ChildSections() -} - -// SectionStrings returns list of section names. -func (f *File) SectionStrings() []string { - list := make([]string, len(f.sectionList)) - copy(list, f.sectionList) - return list -} - -// DeleteSection deletes a section or all sections with given name. -func (f *File) DeleteSection(name string) { - secs, err := f.SectionsByName(name) - if err != nil { - return - } - - for i := 0; i < len(secs); i++ { - // For non-unique sections, it is always needed to remove the first one so - // in the next iteration, the subsequent section continue having index 0. - // Ignoring the error as index 0 never returns an error. - _ = f.DeleteSectionWithIndex(name, 0) - } -} - -// DeleteSectionWithIndex deletes a section with given name and index. -func (f *File) DeleteSectionWithIndex(name string, index int) error { - if !f.options.AllowNonUniqueSections && index != 0 { - return fmt.Errorf("delete section with non-zero index is only allowed when non-unique sections is enabled") - } - - if len(name) == 0 { - name = DefaultSection - } - if f.options.Insensitive || f.options.InsensitiveSections { - name = strings.ToLower(name) - } - - if f.BlockMode { - f.lock.Lock() - defer f.lock.Unlock() - } - - // Count occurrences of the sections - occurrences := 0 - - sectionListCopy := make([]string, len(f.sectionList)) - copy(sectionListCopy, f.sectionList) - - for i, s := range sectionListCopy { - if s != name { - continue - } - - if occurrences == index { - if len(f.sections[name]) <= 1 { - delete(f.sections, name) // The last one in the map - } else { - f.sections[name] = append(f.sections[name][:index], f.sections[name][index+1:]...) - } - - // Fix section lists - f.sectionList = append(f.sectionList[:i], f.sectionList[i+1:]...) - f.sectionIndexes = append(f.sectionIndexes[:i], f.sectionIndexes[i+1:]...) - - } else if occurrences > index { - // Fix the indices of all following sections with this name. - f.sectionIndexes[i-1]-- - } - - occurrences++ - } - - return nil -} - -func (f *File) reload(s dataSource) error { - r, err := s.ReadCloser() - if err != nil { - return err - } - defer r.Close() - - return f.parse(r) -} - -// Reload reloads and parses all data sources. -func (f *File) Reload() (err error) { - for _, s := range f.dataSources { - if err = f.reload(s); err != nil { - // In loose mode, we create an empty default section for nonexistent files. - if os.IsNotExist(err) && f.options.Loose { - _ = f.parse(bytes.NewBuffer(nil)) - continue - } - return err - } - if f.options.ShortCircuit { - return nil - } - } - return nil -} - -// Append appends one or more data sources and reloads automatically. -func (f *File) Append(source interface{}, others ...interface{}) error { - ds, err := parseDataSource(source) - if err != nil { - return err - } - f.dataSources = append(f.dataSources, ds) - for _, s := range others { - ds, err = parseDataSource(s) - if err != nil { - return err - } - f.dataSources = append(f.dataSources, ds) - } - return f.Reload() -} - -func (f *File) writeToBuffer(indent string) (*bytes.Buffer, error) { - equalSign := DefaultFormatLeft + f.options.KeyValueDelimiterOnWrite + DefaultFormatRight - - if PrettyFormat || PrettyEqual { - equalSign = fmt.Sprintf(" %s ", f.options.KeyValueDelimiterOnWrite) - } - - // Use buffer to make sure target is safe until finish encoding. - buf := bytes.NewBuffer(nil) - lastSectionIdx := len(f.sectionList) - 1 - for i, sname := range f.sectionList { - sec := f.SectionWithIndex(sname, f.sectionIndexes[i]) - if len(sec.Comment) > 0 { - // Support multiline comments - lines := strings.Split(sec.Comment, LineBreak) - for i := range lines { - if lines[i][0] != '#' && lines[i][0] != ';' { - lines[i] = "; " + lines[i] - } else { - lines[i] = lines[i][:1] + " " + strings.TrimSpace(lines[i][1:]) - } - - if _, err := buf.WriteString(lines[i] + LineBreak); err != nil { - return nil, err - } - } - } - - if i > 0 || DefaultHeader || (i == 0 && strings.ToUpper(sec.name) != DefaultSection) { - if _, err := buf.WriteString("[" + sname + "]" + LineBreak); err != nil { - return nil, err - } - } else { - // Write nothing if default section is empty - if len(sec.keyList) == 0 { - continue - } - } - - isLastSection := i == lastSectionIdx - if sec.isRawSection { - if _, err := buf.WriteString(sec.rawBody); err != nil { - return nil, err - } - - if PrettySection && !isLastSection { - // Put a line between sections - if _, err := buf.WriteString(LineBreak); err != nil { - return nil, err - } - } - continue - } - - // Count and generate alignment length and buffer spaces using the - // longest key. Keys may be modified if they contain certain characters so - // we need to take that into account in our calculation. - alignLength := 0 - if PrettyFormat { - for _, kname := range sec.keyList { - keyLength := len(kname) - // First case will surround key by ` and second by """ - if strings.Contains(kname, "\"") || strings.ContainsAny(kname, f.options.KeyValueDelimiters) { - keyLength += 2 - } else if strings.Contains(kname, "`") { - keyLength += 6 - } - - if keyLength > alignLength { - alignLength = keyLength - } - } - } - alignSpaces := bytes.Repeat([]byte(" "), alignLength) - - KeyList: - for _, kname := range sec.keyList { - key := sec.Key(kname) - if len(key.Comment) > 0 { - if len(indent) > 0 && sname != DefaultSection { - buf.WriteString(indent) - } - - // Support multiline comments - lines := strings.Split(key.Comment, LineBreak) - for i := range lines { - if lines[i][0] != '#' && lines[i][0] != ';' { - lines[i] = "; " + strings.TrimSpace(lines[i]) - } else { - lines[i] = lines[i][:1] + " " + strings.TrimSpace(lines[i][1:]) - } - - if _, err := buf.WriteString(lines[i] + LineBreak); err != nil { - return nil, err - } - } - } - - if len(indent) > 0 && sname != DefaultSection { - buf.WriteString(indent) - } - - switch { - case key.isAutoIncrement: - kname = "-" - case strings.Contains(kname, "\"") || strings.ContainsAny(kname, f.options.KeyValueDelimiters): - kname = "`" + kname + "`" - case strings.Contains(kname, "`"): - kname = `"""` + kname + `"""` - } - - writeKeyValue := func(val string) (bool, error) { - if _, err := buf.WriteString(kname); err != nil { - return false, err - } - - if key.isBooleanType { - buf.WriteString(LineBreak) - return true, nil - } - - // Write out alignment spaces before "=" sign - if PrettyFormat { - buf.Write(alignSpaces[:alignLength-len(kname)]) - } - - // In case key value contains "\n", "`", "\"", "#" or ";" - if strings.ContainsAny(val, "\n`") { - val = `"""` + val + `"""` - } else if !f.options.IgnoreInlineComment && strings.ContainsAny(val, "#;") { - val = "`" + val + "`" - } else if len(strings.TrimSpace(val)) != len(val) { - val = `"` + val + `"` - } - if _, err := buf.WriteString(equalSign + val + LineBreak); err != nil { - return false, err - } - return false, nil - } - - shadows := key.ValueWithShadows() - if len(shadows) == 0 { - if _, err := writeKeyValue(""); err != nil { - return nil, err - } - } - - for _, val := range shadows { - exitLoop, err := writeKeyValue(val) - if err != nil { - return nil, err - } else if exitLoop { - continue KeyList - } - } - - for _, val := range key.nestedValues { - if _, err := buf.WriteString(indent + " " + val + LineBreak); err != nil { - return nil, err - } - } - } - - if PrettySection && !isLastSection { - // Put a line between sections - if _, err := buf.WriteString(LineBreak); err != nil { - return nil, err - } - } - } - - return buf, nil -} - -// WriteToIndent writes content into io.Writer with given indention. -// If PrettyFormat has been set to be true, -// it will align "=" sign with spaces under each section. -func (f *File) WriteToIndent(w io.Writer, indent string) (int64, error) { - buf, err := f.writeToBuffer(indent) - if err != nil { - return 0, err - } - return buf.WriteTo(w) -} - -// WriteTo writes file content into io.Writer. -func (f *File) WriteTo(w io.Writer) (int64, error) { - return f.WriteToIndent(w, "") -} - -// SaveToIndent writes content to file system with given value indention. -func (f *File) SaveToIndent(filename, indent string) error { - // Note: Because we are truncating with os.Create, - // so it's safer to save to a temporary file location and rename after done. - buf, err := f.writeToBuffer(indent) - if err != nil { - return err - } - - return ioutil.WriteFile(filename, buf.Bytes(), 0666) -} - -// SaveTo writes content to file system. -func (f *File) SaveTo(filename string) error { - return f.SaveToIndent(filename, "") -} diff --git a/vendor/gopkg.in/ini.v1/helper.go b/vendor/gopkg.in/ini.v1/helper.go deleted file mode 100644 index f9d80a682..000000000 --- a/vendor/gopkg.in/ini.v1/helper.go +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright 2019 Unknwon -// -// Licensed under the Apache License, Version 2.0 (the "License"): you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - -package ini - -func inSlice(str string, s []string) bool { - for _, v := range s { - if str == v { - return true - } - } - return false -} diff --git a/vendor/gopkg.in/ini.v1/ini.go b/vendor/gopkg.in/ini.v1/ini.go deleted file mode 100644 index 99e7f8651..000000000 --- a/vendor/gopkg.in/ini.v1/ini.go +++ /dev/null @@ -1,176 +0,0 @@ -// Copyright 2014 Unknwon -// -// Licensed under the Apache License, Version 2.0 (the "License"): you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - -// Package ini provides INI file read and write functionality in Go. -package ini - -import ( - "os" - "regexp" - "runtime" - "strings" -) - -const ( - // Maximum allowed depth when recursively substituing variable names. - depthValues = 99 -) - -var ( - // DefaultSection is the name of default section. You can use this var or the string literal. - // In most of cases, an empty string is all you need to access the section. - DefaultSection = "DEFAULT" - - // LineBreak is the delimiter to determine or compose a new line. - // This variable will be changed to "\r\n" automatically on Windows at package init time. - LineBreak = "\n" - - // Variable regexp pattern: %(variable)s - varPattern = regexp.MustCompile(`%\(([^)]+)\)s`) - - // DefaultHeader explicitly writes default section header. - DefaultHeader = false - - // PrettySection indicates whether to put a line between sections. - PrettySection = true - // PrettyFormat indicates whether to align "=" sign with spaces to produce pretty output - // or reduce all possible spaces for compact format. - PrettyFormat = true - // PrettyEqual places spaces around "=" sign even when PrettyFormat is false. - PrettyEqual = false - // DefaultFormatLeft places custom spaces on the left when PrettyFormat and PrettyEqual are both disabled. - DefaultFormatLeft = "" - // DefaultFormatRight places custom spaces on the right when PrettyFormat and PrettyEqual are both disabled. - DefaultFormatRight = "" -) - -var inTest = len(os.Args) > 0 && strings.HasSuffix(strings.TrimSuffix(os.Args[0], ".exe"), ".test") - -func init() { - if runtime.GOOS == "windows" && !inTest { - LineBreak = "\r\n" - } -} - -// LoadOptions contains all customized options used for load data source(s). -type LoadOptions struct { - // Loose indicates whether the parser should ignore nonexistent files or return error. - Loose bool - // Insensitive indicates whether the parser forces all section and key names to lowercase. - Insensitive bool - // InsensitiveSections indicates whether the parser forces all section to lowercase. - InsensitiveSections bool - // InsensitiveKeys indicates whether the parser forces all key names to lowercase. - InsensitiveKeys bool - // IgnoreContinuation indicates whether to ignore continuation lines while parsing. - IgnoreContinuation bool - // IgnoreInlineComment indicates whether to ignore comments at the end of value and treat it as part of value. - IgnoreInlineComment bool - // SkipUnrecognizableLines indicates whether to skip unrecognizable lines that do not conform to key/value pairs. - SkipUnrecognizableLines bool - // ShortCircuit indicates whether to ignore other configuration sources after loaded the first available configuration source. - ShortCircuit bool - // AllowBooleanKeys indicates whether to allow boolean type keys or treat as value is missing. - // This type of keys are mostly used in my.cnf. - AllowBooleanKeys bool - // AllowShadows indicates whether to keep track of keys with same name under same section. - AllowShadows bool - // AllowNestedValues indicates whether to allow AWS-like nested values. - // Docs: http://docs.aws.amazon.com/cli/latest/topic/config-vars.html#nested-values - AllowNestedValues bool - // AllowPythonMultilineValues indicates whether to allow Python-like multi-line values. - // Docs: https://docs.python.org/3/library/configparser.html#supported-ini-file-structure - // Relevant quote: Values can also span multiple lines, as long as they are indented deeper - // than the first line of the value. - AllowPythonMultilineValues bool - // SpaceBeforeInlineComment indicates whether to allow comment symbols (\# and \;) inside value. - // Docs: https://docs.python.org/2/library/configparser.html - // Quote: Comments may appear on their own in an otherwise empty line, or may be entered in lines holding values or section names. - // In the latter case, they need to be preceded by a whitespace character to be recognized as a comment. - SpaceBeforeInlineComment bool - // UnescapeValueDoubleQuotes indicates whether to unescape double quotes inside value to regular format - // when value is surrounded by double quotes, e.g. key="a \"value\"" => key=a "value" - UnescapeValueDoubleQuotes bool - // UnescapeValueCommentSymbols indicates to unescape comment symbols (\# and \;) inside value to regular format - // when value is NOT surrounded by any quotes. - // Note: UNSTABLE, behavior might change to only unescape inside double quotes but may noy necessary at all. - UnescapeValueCommentSymbols bool - // UnparseableSections stores a list of blocks that are allowed with raw content which do not otherwise - // conform to key/value pairs. Specify the names of those blocks here. - UnparseableSections []string - // KeyValueDelimiters is the sequence of delimiters that are used to separate key and value. By default, it is "=:". - KeyValueDelimiters string - // KeyValueDelimiterOnWrite is the delimiter that are used to separate key and value output. By default, it is "=". - KeyValueDelimiterOnWrite string - // ChildSectionDelimiter is the delimiter that is used to separate child sections. By default, it is ".". - ChildSectionDelimiter string - // PreserveSurroundedQuote indicates whether to preserve surrounded quote (single and double quotes). - PreserveSurroundedQuote bool - // DebugFunc is called to collect debug information (currently only useful to debug parsing Python-style multiline values). - DebugFunc DebugFunc - // ReaderBufferSize is the buffer size of the reader in bytes. - ReaderBufferSize int - // AllowNonUniqueSections indicates whether to allow sections with the same name multiple times. - AllowNonUniqueSections bool - // AllowDuplicateShadowValues indicates whether values for shadowed keys should be deduplicated. - AllowDuplicateShadowValues bool -} - -// DebugFunc is the type of function called to log parse events. -type DebugFunc func(message string) - -// LoadSources allows caller to apply customized options for loading from data source(s). -func LoadSources(opts LoadOptions, source interface{}, others ...interface{}) (_ *File, err error) { - sources := make([]dataSource, len(others)+1) - sources[0], err = parseDataSource(source) - if err != nil { - return nil, err - } - for i := range others { - sources[i+1], err = parseDataSource(others[i]) - if err != nil { - return nil, err - } - } - f := newFile(sources, opts) - if err = f.Reload(); err != nil { - return nil, err - } - return f, nil -} - -// Load loads and parses from INI data sources. -// Arguments can be mixed of file name with string type, or raw data in []byte. -// It will return error if list contains nonexistent files. -func Load(source interface{}, others ...interface{}) (*File, error) { - return LoadSources(LoadOptions{}, source, others...) -} - -// LooseLoad has exactly same functionality as Load function -// except it ignores nonexistent files instead of returning error. -func LooseLoad(source interface{}, others ...interface{}) (*File, error) { - return LoadSources(LoadOptions{Loose: true}, source, others...) -} - -// InsensitiveLoad has exactly same functionality as Load function -// except it forces all section and key names to be lowercased. -func InsensitiveLoad(source interface{}, others ...interface{}) (*File, error) { - return LoadSources(LoadOptions{Insensitive: true}, source, others...) -} - -// ShadowLoad has exactly same functionality as Load function -// except it allows have shadow keys. -func ShadowLoad(source interface{}, others ...interface{}) (*File, error) { - return LoadSources(LoadOptions{AllowShadows: true}, source, others...) -} diff --git a/vendor/gopkg.in/ini.v1/key.go b/vendor/gopkg.in/ini.v1/key.go deleted file mode 100644 index a19d9f38e..000000000 --- a/vendor/gopkg.in/ini.v1/key.go +++ /dev/null @@ -1,837 +0,0 @@ -// Copyright 2014 Unknwon -// -// Licensed under the Apache License, Version 2.0 (the "License"): you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - -package ini - -import ( - "bytes" - "errors" - "fmt" - "strconv" - "strings" - "time" -) - -// Key represents a key under a section. -type Key struct { - s *Section - Comment string - name string - value string - isAutoIncrement bool - isBooleanType bool - - isShadow bool - shadows []*Key - - nestedValues []string -} - -// newKey simply return a key object with given values. -func newKey(s *Section, name, val string) *Key { - return &Key{ - s: s, - name: name, - value: val, - } -} - -func (k *Key) addShadow(val string) error { - if k.isShadow { - return errors.New("cannot add shadow to another shadow key") - } else if k.isAutoIncrement || k.isBooleanType { - return errors.New("cannot add shadow to auto-increment or boolean key") - } - - if !k.s.f.options.AllowDuplicateShadowValues { - // Deduplicate shadows based on their values. - if k.value == val { - return nil - } - for i := range k.shadows { - if k.shadows[i].value == val { - return nil - } - } - } - - shadow := newKey(k.s, k.name, val) - shadow.isShadow = true - k.shadows = append(k.shadows, shadow) - return nil -} - -// AddShadow adds a new shadow key to itself. -func (k *Key) AddShadow(val string) error { - if !k.s.f.options.AllowShadows { - return errors.New("shadow key is not allowed") - } - return k.addShadow(val) -} - -func (k *Key) addNestedValue(val string) error { - if k.isAutoIncrement || k.isBooleanType { - return errors.New("cannot add nested value to auto-increment or boolean key") - } - - k.nestedValues = append(k.nestedValues, val) - return nil -} - -// AddNestedValue adds a nested value to the key. -func (k *Key) AddNestedValue(val string) error { - if !k.s.f.options.AllowNestedValues { - return errors.New("nested value is not allowed") - } - return k.addNestedValue(val) -} - -// ValueMapper represents a mapping function for values, e.g. os.ExpandEnv -type ValueMapper func(string) string - -// Name returns name of key. -func (k *Key) Name() string { - return k.name -} - -// Value returns raw value of key for performance purpose. -func (k *Key) Value() string { - return k.value -} - -// ValueWithShadows returns raw values of key and its shadows if any. Shadow -// keys with empty values are ignored from the returned list. -func (k *Key) ValueWithShadows() []string { - if len(k.shadows) == 0 { - if k.value == "" { - return []string{} - } - return []string{k.value} - } - - vals := make([]string, 0, len(k.shadows)+1) - if k.value != "" { - vals = append(vals, k.value) - } - for _, s := range k.shadows { - if s.value != "" { - vals = append(vals, s.value) - } - } - return vals -} - -// NestedValues returns nested values stored in the key. -// It is possible returned value is nil if no nested values stored in the key. -func (k *Key) NestedValues() []string { - return k.nestedValues -} - -// transformValue takes a raw value and transforms to its final string. -func (k *Key) transformValue(val string) string { - if k.s.f.ValueMapper != nil { - val = k.s.f.ValueMapper(val) - } - - // Fail-fast if no indicate char found for recursive value - if !strings.Contains(val, "%") { - return val - } - for i := 0; i < depthValues; i++ { - vr := varPattern.FindString(val) - if len(vr) == 0 { - break - } - - // Take off leading '%(' and trailing ')s'. - noption := vr[2 : len(vr)-2] - - // Search in the same section. - // If not found or found the key itself, then search again in default section. - nk, err := k.s.GetKey(noption) - if err != nil || k == nk { - nk, _ = k.s.f.Section("").GetKey(noption) - if nk == nil { - // Stop when no results found in the default section, - // and returns the value as-is. - break - } - } - - // Substitute by new value and take off leading '%(' and trailing ')s'. - val = strings.Replace(val, vr, nk.value, -1) - } - return val -} - -// String returns string representation of value. -func (k *Key) String() string { - return k.transformValue(k.value) -} - -// Validate accepts a validate function which can -// return modifed result as key value. -func (k *Key) Validate(fn func(string) string) string { - return fn(k.String()) -} - -// parseBool returns the boolean value represented by the string. -// -// It accepts 1, t, T, TRUE, true, True, YES, yes, Yes, y, ON, on, On, -// 0, f, F, FALSE, false, False, NO, no, No, n, OFF, off, Off. -// Any other value returns an error. -func parseBool(str string) (value bool, err error) { - switch str { - case "1", "t", "T", "true", "TRUE", "True", "YES", "yes", "Yes", "y", "ON", "on", "On": - return true, nil - case "0", "f", "F", "false", "FALSE", "False", "NO", "no", "No", "n", "OFF", "off", "Off": - return false, nil - } - return false, fmt.Errorf("parsing \"%s\": invalid syntax", str) -} - -// Bool returns bool type value. -func (k *Key) Bool() (bool, error) { - return parseBool(k.String()) -} - -// Float64 returns float64 type value. -func (k *Key) Float64() (float64, error) { - return strconv.ParseFloat(k.String(), 64) -} - -// Int returns int type value. -func (k *Key) Int() (int, error) { - v, err := strconv.ParseInt(k.String(), 0, 64) - return int(v), err -} - -// Int64 returns int64 type value. -func (k *Key) Int64() (int64, error) { - return strconv.ParseInt(k.String(), 0, 64) -} - -// Uint returns uint type valued. -func (k *Key) Uint() (uint, error) { - u, e := strconv.ParseUint(k.String(), 0, 64) - return uint(u), e -} - -// Uint64 returns uint64 type value. -func (k *Key) Uint64() (uint64, error) { - return strconv.ParseUint(k.String(), 0, 64) -} - -// Duration returns time.Duration type value. -func (k *Key) Duration() (time.Duration, error) { - return time.ParseDuration(k.String()) -} - -// TimeFormat parses with given format and returns time.Time type value. -func (k *Key) TimeFormat(format string) (time.Time, error) { - return time.Parse(format, k.String()) -} - -// Time parses with RFC3339 format and returns time.Time type value. -func (k *Key) Time() (time.Time, error) { - return k.TimeFormat(time.RFC3339) -} - -// MustString returns default value if key value is empty. -func (k *Key) MustString(defaultVal string) string { - val := k.String() - if len(val) == 0 { - k.value = defaultVal - return defaultVal - } - return val -} - -// MustBool always returns value without error, -// it returns false if error occurs. -func (k *Key) MustBool(defaultVal ...bool) bool { - val, err := k.Bool() - if len(defaultVal) > 0 && err != nil { - k.value = strconv.FormatBool(defaultVal[0]) - return defaultVal[0] - } - return val -} - -// MustFloat64 always returns value without error, -// it returns 0.0 if error occurs. -func (k *Key) MustFloat64(defaultVal ...float64) float64 { - val, err := k.Float64() - if len(defaultVal) > 0 && err != nil { - k.value = strconv.FormatFloat(defaultVal[0], 'f', -1, 64) - return defaultVal[0] - } - return val -} - -// MustInt always returns value without error, -// it returns 0 if error occurs. -func (k *Key) MustInt(defaultVal ...int) int { - val, err := k.Int() - if len(defaultVal) > 0 && err != nil { - k.value = strconv.FormatInt(int64(defaultVal[0]), 10) - return defaultVal[0] - } - return val -} - -// MustInt64 always returns value without error, -// it returns 0 if error occurs. -func (k *Key) MustInt64(defaultVal ...int64) int64 { - val, err := k.Int64() - if len(defaultVal) > 0 && err != nil { - k.value = strconv.FormatInt(defaultVal[0], 10) - return defaultVal[0] - } - return val -} - -// MustUint always returns value without error, -// it returns 0 if error occurs. -func (k *Key) MustUint(defaultVal ...uint) uint { - val, err := k.Uint() - if len(defaultVal) > 0 && err != nil { - k.value = strconv.FormatUint(uint64(defaultVal[0]), 10) - return defaultVal[0] - } - return val -} - -// MustUint64 always returns value without error, -// it returns 0 if error occurs. -func (k *Key) MustUint64(defaultVal ...uint64) uint64 { - val, err := k.Uint64() - if len(defaultVal) > 0 && err != nil { - k.value = strconv.FormatUint(defaultVal[0], 10) - return defaultVal[0] - } - return val -} - -// MustDuration always returns value without error, -// it returns zero value if error occurs. -func (k *Key) MustDuration(defaultVal ...time.Duration) time.Duration { - val, err := k.Duration() - if len(defaultVal) > 0 && err != nil { - k.value = defaultVal[0].String() - return defaultVal[0] - } - return val -} - -// MustTimeFormat always parses with given format and returns value without error, -// it returns zero value if error occurs. -func (k *Key) MustTimeFormat(format string, defaultVal ...time.Time) time.Time { - val, err := k.TimeFormat(format) - if len(defaultVal) > 0 && err != nil { - k.value = defaultVal[0].Format(format) - return defaultVal[0] - } - return val -} - -// MustTime always parses with RFC3339 format and returns value without error, -// it returns zero value if error occurs. -func (k *Key) MustTime(defaultVal ...time.Time) time.Time { - return k.MustTimeFormat(time.RFC3339, defaultVal...) -} - -// In always returns value without error, -// it returns default value if error occurs or doesn't fit into candidates. -func (k *Key) In(defaultVal string, candidates []string) string { - val := k.String() - for _, cand := range candidates { - if val == cand { - return val - } - } - return defaultVal -} - -// InFloat64 always returns value without error, -// it returns default value if error occurs or doesn't fit into candidates. -func (k *Key) InFloat64(defaultVal float64, candidates []float64) float64 { - val := k.MustFloat64() - for _, cand := range candidates { - if val == cand { - return val - } - } - return defaultVal -} - -// InInt always returns value without error, -// it returns default value if error occurs or doesn't fit into candidates. -func (k *Key) InInt(defaultVal int, candidates []int) int { - val := k.MustInt() - for _, cand := range candidates { - if val == cand { - return val - } - } - return defaultVal -} - -// InInt64 always returns value without error, -// it returns default value if error occurs or doesn't fit into candidates. -func (k *Key) InInt64(defaultVal int64, candidates []int64) int64 { - val := k.MustInt64() - for _, cand := range candidates { - if val == cand { - return val - } - } - return defaultVal -} - -// InUint always returns value without error, -// it returns default value if error occurs or doesn't fit into candidates. -func (k *Key) InUint(defaultVal uint, candidates []uint) uint { - val := k.MustUint() - for _, cand := range candidates { - if val == cand { - return val - } - } - return defaultVal -} - -// InUint64 always returns value without error, -// it returns default value if error occurs or doesn't fit into candidates. -func (k *Key) InUint64(defaultVal uint64, candidates []uint64) uint64 { - val := k.MustUint64() - for _, cand := range candidates { - if val == cand { - return val - } - } - return defaultVal -} - -// InTimeFormat always parses with given format and returns value without error, -// it returns default value if error occurs or doesn't fit into candidates. -func (k *Key) InTimeFormat(format string, defaultVal time.Time, candidates []time.Time) time.Time { - val := k.MustTimeFormat(format) - for _, cand := range candidates { - if val == cand { - return val - } - } - return defaultVal -} - -// InTime always parses with RFC3339 format and returns value without error, -// it returns default value if error occurs or doesn't fit into candidates. -func (k *Key) InTime(defaultVal time.Time, candidates []time.Time) time.Time { - return k.InTimeFormat(time.RFC3339, defaultVal, candidates) -} - -// RangeFloat64 checks if value is in given range inclusively, -// and returns default value if it's not. -func (k *Key) RangeFloat64(defaultVal, min, max float64) float64 { - val := k.MustFloat64() - if val < min || val > max { - return defaultVal - } - return val -} - -// RangeInt checks if value is in given range inclusively, -// and returns default value if it's not. -func (k *Key) RangeInt(defaultVal, min, max int) int { - val := k.MustInt() - if val < min || val > max { - return defaultVal - } - return val -} - -// RangeInt64 checks if value is in given range inclusively, -// and returns default value if it's not. -func (k *Key) RangeInt64(defaultVal, min, max int64) int64 { - val := k.MustInt64() - if val < min || val > max { - return defaultVal - } - return val -} - -// RangeTimeFormat checks if value with given format is in given range inclusively, -// and returns default value if it's not. -func (k *Key) RangeTimeFormat(format string, defaultVal, min, max time.Time) time.Time { - val := k.MustTimeFormat(format) - if val.Unix() < min.Unix() || val.Unix() > max.Unix() { - return defaultVal - } - return val -} - -// RangeTime checks if value with RFC3339 format is in given range inclusively, -// and returns default value if it's not. -func (k *Key) RangeTime(defaultVal, min, max time.Time) time.Time { - return k.RangeTimeFormat(time.RFC3339, defaultVal, min, max) -} - -// Strings returns list of string divided by given delimiter. -func (k *Key) Strings(delim string) []string { - str := k.String() - if len(str) == 0 { - return []string{} - } - - runes := []rune(str) - vals := make([]string, 0, 2) - var buf bytes.Buffer - escape := false - idx := 0 - for { - if escape { - escape = false - if runes[idx] != '\\' && !strings.HasPrefix(string(runes[idx:]), delim) { - buf.WriteRune('\\') - } - buf.WriteRune(runes[idx]) - } else { - if runes[idx] == '\\' { - escape = true - } else if strings.HasPrefix(string(runes[idx:]), delim) { - idx += len(delim) - 1 - vals = append(vals, strings.TrimSpace(buf.String())) - buf.Reset() - } else { - buf.WriteRune(runes[idx]) - } - } - idx++ - if idx == len(runes) { - break - } - } - - if buf.Len() > 0 { - vals = append(vals, strings.TrimSpace(buf.String())) - } - - return vals -} - -// StringsWithShadows returns list of string divided by given delimiter. -// Shadows will also be appended if any. -func (k *Key) StringsWithShadows(delim string) []string { - vals := k.ValueWithShadows() - results := make([]string, 0, len(vals)*2) - for i := range vals { - if len(vals) == 0 { - continue - } - - results = append(results, strings.Split(vals[i], delim)...) - } - - for i := range results { - results[i] = k.transformValue(strings.TrimSpace(results[i])) - } - return results -} - -// Float64s returns list of float64 divided by given delimiter. Any invalid input will be treated as zero value. -func (k *Key) Float64s(delim string) []float64 { - vals, _ := k.parseFloat64s(k.Strings(delim), true, false) - return vals -} - -// Ints returns list of int divided by given delimiter. Any invalid input will be treated as zero value. -func (k *Key) Ints(delim string) []int { - vals, _ := k.parseInts(k.Strings(delim), true, false) - return vals -} - -// Int64s returns list of int64 divided by given delimiter. Any invalid input will be treated as zero value. -func (k *Key) Int64s(delim string) []int64 { - vals, _ := k.parseInt64s(k.Strings(delim), true, false) - return vals -} - -// Uints returns list of uint divided by given delimiter. Any invalid input will be treated as zero value. -func (k *Key) Uints(delim string) []uint { - vals, _ := k.parseUints(k.Strings(delim), true, false) - return vals -} - -// Uint64s returns list of uint64 divided by given delimiter. Any invalid input will be treated as zero value. -func (k *Key) Uint64s(delim string) []uint64 { - vals, _ := k.parseUint64s(k.Strings(delim), true, false) - return vals -} - -// Bools returns list of bool divided by given delimiter. Any invalid input will be treated as zero value. -func (k *Key) Bools(delim string) []bool { - vals, _ := k.parseBools(k.Strings(delim), true, false) - return vals -} - -// TimesFormat parses with given format and returns list of time.Time divided by given delimiter. -// Any invalid input will be treated as zero value (0001-01-01 00:00:00 +0000 UTC). -func (k *Key) TimesFormat(format, delim string) []time.Time { - vals, _ := k.parseTimesFormat(format, k.Strings(delim), true, false) - return vals -} - -// Times parses with RFC3339 format and returns list of time.Time divided by given delimiter. -// Any invalid input will be treated as zero value (0001-01-01 00:00:00 +0000 UTC). -func (k *Key) Times(delim string) []time.Time { - return k.TimesFormat(time.RFC3339, delim) -} - -// ValidFloat64s returns list of float64 divided by given delimiter. If some value is not float, then -// it will not be included to result list. -func (k *Key) ValidFloat64s(delim string) []float64 { - vals, _ := k.parseFloat64s(k.Strings(delim), false, false) - return vals -} - -// ValidInts returns list of int divided by given delimiter. If some value is not integer, then it will -// not be included to result list. -func (k *Key) ValidInts(delim string) []int { - vals, _ := k.parseInts(k.Strings(delim), false, false) - return vals -} - -// ValidInt64s returns list of int64 divided by given delimiter. If some value is not 64-bit integer, -// then it will not be included to result list. -func (k *Key) ValidInt64s(delim string) []int64 { - vals, _ := k.parseInt64s(k.Strings(delim), false, false) - return vals -} - -// ValidUints returns list of uint divided by given delimiter. If some value is not unsigned integer, -// then it will not be included to result list. -func (k *Key) ValidUints(delim string) []uint { - vals, _ := k.parseUints(k.Strings(delim), false, false) - return vals -} - -// ValidUint64s returns list of uint64 divided by given delimiter. If some value is not 64-bit unsigned -// integer, then it will not be included to result list. -func (k *Key) ValidUint64s(delim string) []uint64 { - vals, _ := k.parseUint64s(k.Strings(delim), false, false) - return vals -} - -// ValidBools returns list of bool divided by given delimiter. If some value is not 64-bit unsigned -// integer, then it will not be included to result list. -func (k *Key) ValidBools(delim string) []bool { - vals, _ := k.parseBools(k.Strings(delim), false, false) - return vals -} - -// ValidTimesFormat parses with given format and returns list of time.Time divided by given delimiter. -func (k *Key) ValidTimesFormat(format, delim string) []time.Time { - vals, _ := k.parseTimesFormat(format, k.Strings(delim), false, false) - return vals -} - -// ValidTimes parses with RFC3339 format and returns list of time.Time divided by given delimiter. -func (k *Key) ValidTimes(delim string) []time.Time { - return k.ValidTimesFormat(time.RFC3339, delim) -} - -// StrictFloat64s returns list of float64 divided by given delimiter or error on first invalid input. -func (k *Key) StrictFloat64s(delim string) ([]float64, error) { - return k.parseFloat64s(k.Strings(delim), false, true) -} - -// StrictInts returns list of int divided by given delimiter or error on first invalid input. -func (k *Key) StrictInts(delim string) ([]int, error) { - return k.parseInts(k.Strings(delim), false, true) -} - -// StrictInt64s returns list of int64 divided by given delimiter or error on first invalid input. -func (k *Key) StrictInt64s(delim string) ([]int64, error) { - return k.parseInt64s(k.Strings(delim), false, true) -} - -// StrictUints returns list of uint divided by given delimiter or error on first invalid input. -func (k *Key) StrictUints(delim string) ([]uint, error) { - return k.parseUints(k.Strings(delim), false, true) -} - -// StrictUint64s returns list of uint64 divided by given delimiter or error on first invalid input. -func (k *Key) StrictUint64s(delim string) ([]uint64, error) { - return k.parseUint64s(k.Strings(delim), false, true) -} - -// StrictBools returns list of bool divided by given delimiter or error on first invalid input. -func (k *Key) StrictBools(delim string) ([]bool, error) { - return k.parseBools(k.Strings(delim), false, true) -} - -// StrictTimesFormat parses with given format and returns list of time.Time divided by given delimiter -// or error on first invalid input. -func (k *Key) StrictTimesFormat(format, delim string) ([]time.Time, error) { - return k.parseTimesFormat(format, k.Strings(delim), false, true) -} - -// StrictTimes parses with RFC3339 format and returns list of time.Time divided by given delimiter -// or error on first invalid input. -func (k *Key) StrictTimes(delim string) ([]time.Time, error) { - return k.StrictTimesFormat(time.RFC3339, delim) -} - -// parseBools transforms strings to bools. -func (k *Key) parseBools(strs []string, addInvalid, returnOnInvalid bool) ([]bool, error) { - vals := make([]bool, 0, len(strs)) - parser := func(str string) (interface{}, error) { - val, err := parseBool(str) - return val, err - } - rawVals, err := k.doParse(strs, addInvalid, returnOnInvalid, parser) - if err == nil { - for _, val := range rawVals { - vals = append(vals, val.(bool)) - } - } - return vals, err -} - -// parseFloat64s transforms strings to float64s. -func (k *Key) parseFloat64s(strs []string, addInvalid, returnOnInvalid bool) ([]float64, error) { - vals := make([]float64, 0, len(strs)) - parser := func(str string) (interface{}, error) { - val, err := strconv.ParseFloat(str, 64) - return val, err - } - rawVals, err := k.doParse(strs, addInvalid, returnOnInvalid, parser) - if err == nil { - for _, val := range rawVals { - vals = append(vals, val.(float64)) - } - } - return vals, err -} - -// parseInts transforms strings to ints. -func (k *Key) parseInts(strs []string, addInvalid, returnOnInvalid bool) ([]int, error) { - vals := make([]int, 0, len(strs)) - parser := func(str string) (interface{}, error) { - val, err := strconv.ParseInt(str, 0, 64) - return val, err - } - rawVals, err := k.doParse(strs, addInvalid, returnOnInvalid, parser) - if err == nil { - for _, val := range rawVals { - vals = append(vals, int(val.(int64))) - } - } - return vals, err -} - -// parseInt64s transforms strings to int64s. -func (k *Key) parseInt64s(strs []string, addInvalid, returnOnInvalid bool) ([]int64, error) { - vals := make([]int64, 0, len(strs)) - parser := func(str string) (interface{}, error) { - val, err := strconv.ParseInt(str, 0, 64) - return val, err - } - - rawVals, err := k.doParse(strs, addInvalid, returnOnInvalid, parser) - if err == nil { - for _, val := range rawVals { - vals = append(vals, val.(int64)) - } - } - return vals, err -} - -// parseUints transforms strings to uints. -func (k *Key) parseUints(strs []string, addInvalid, returnOnInvalid bool) ([]uint, error) { - vals := make([]uint, 0, len(strs)) - parser := func(str string) (interface{}, error) { - val, err := strconv.ParseUint(str, 0, 64) - return val, err - } - - rawVals, err := k.doParse(strs, addInvalid, returnOnInvalid, parser) - if err == nil { - for _, val := range rawVals { - vals = append(vals, uint(val.(uint64))) - } - } - return vals, err -} - -// parseUint64s transforms strings to uint64s. -func (k *Key) parseUint64s(strs []string, addInvalid, returnOnInvalid bool) ([]uint64, error) { - vals := make([]uint64, 0, len(strs)) - parser := func(str string) (interface{}, error) { - val, err := strconv.ParseUint(str, 0, 64) - return val, err - } - rawVals, err := k.doParse(strs, addInvalid, returnOnInvalid, parser) - if err == nil { - for _, val := range rawVals { - vals = append(vals, val.(uint64)) - } - } - return vals, err -} - -type Parser func(str string) (interface{}, error) - -// parseTimesFormat transforms strings to times in given format. -func (k *Key) parseTimesFormat(format string, strs []string, addInvalid, returnOnInvalid bool) ([]time.Time, error) { - vals := make([]time.Time, 0, len(strs)) - parser := func(str string) (interface{}, error) { - val, err := time.Parse(format, str) - return val, err - } - rawVals, err := k.doParse(strs, addInvalid, returnOnInvalid, parser) - if err == nil { - for _, val := range rawVals { - vals = append(vals, val.(time.Time)) - } - } - return vals, err -} - -// doParse transforms strings to different types -func (k *Key) doParse(strs []string, addInvalid, returnOnInvalid bool, parser Parser) ([]interface{}, error) { - vals := make([]interface{}, 0, len(strs)) - for _, str := range strs { - val, err := parser(str) - if err != nil && returnOnInvalid { - return nil, err - } - if err == nil || addInvalid { - vals = append(vals, val) - } - } - return vals, nil -} - -// SetValue changes key value. -func (k *Key) SetValue(v string) { - if k.s.f.BlockMode { - k.s.f.lock.Lock() - defer k.s.f.lock.Unlock() - } - - k.value = v - k.s.keysHash[k.name] = v -} diff --git a/vendor/gopkg.in/ini.v1/parser.go b/vendor/gopkg.in/ini.v1/parser.go deleted file mode 100644 index 44fc526c2..000000000 --- a/vendor/gopkg.in/ini.v1/parser.go +++ /dev/null @@ -1,520 +0,0 @@ -// Copyright 2015 Unknwon -// -// Licensed under the Apache License, Version 2.0 (the "License"): you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - -package ini - -import ( - "bufio" - "bytes" - "fmt" - "io" - "regexp" - "strconv" - "strings" - "unicode" -) - -const minReaderBufferSize = 4096 - -var pythonMultiline = regexp.MustCompile(`^([\t\f ]+)(.*)`) - -type parserOptions struct { - IgnoreContinuation bool - IgnoreInlineComment bool - AllowPythonMultilineValues bool - SpaceBeforeInlineComment bool - UnescapeValueDoubleQuotes bool - UnescapeValueCommentSymbols bool - PreserveSurroundedQuote bool - DebugFunc DebugFunc - ReaderBufferSize int -} - -type parser struct { - buf *bufio.Reader - options parserOptions - - isEOF bool - count int - comment *bytes.Buffer -} - -func (p *parser) debug(format string, args ...interface{}) { - if p.options.DebugFunc != nil { - p.options.DebugFunc(fmt.Sprintf(format, args...)) - } -} - -func newParser(r io.Reader, opts parserOptions) *parser { - size := opts.ReaderBufferSize - if size < minReaderBufferSize { - size = minReaderBufferSize - } - - return &parser{ - buf: bufio.NewReaderSize(r, size), - options: opts, - count: 1, - comment: &bytes.Buffer{}, - } -} - -// BOM handles header of UTF-8, UTF-16 LE and UTF-16 BE's BOM format. -// http://en.wikipedia.org/wiki/Byte_order_mark#Representations_of_byte_order_marks_by_encoding -func (p *parser) BOM() error { - mask, err := p.buf.Peek(2) - if err != nil && err != io.EOF { - return err - } else if len(mask) < 2 { - return nil - } - - switch { - case mask[0] == 254 && mask[1] == 255: - fallthrough - case mask[0] == 255 && mask[1] == 254: - _, err = p.buf.Read(mask) - if err != nil { - return err - } - case mask[0] == 239 && mask[1] == 187: - mask, err := p.buf.Peek(3) - if err != nil && err != io.EOF { - return err - } else if len(mask) < 3 { - return nil - } - if mask[2] == 191 { - _, err = p.buf.Read(mask) - if err != nil { - return err - } - } - } - return nil -} - -func (p *parser) readUntil(delim byte) ([]byte, error) { - data, err := p.buf.ReadBytes(delim) - if err != nil { - if err == io.EOF { - p.isEOF = true - } else { - return nil, err - } - } - return data, nil -} - -func cleanComment(in []byte) ([]byte, bool) { - i := bytes.IndexAny(in, "#;") - if i == -1 { - return nil, false - } - return in[i:], true -} - -func readKeyName(delimiters string, in []byte) (string, int, error) { - line := string(in) - - // Check if key name surrounded by quotes. - var keyQuote string - if line[0] == '"' { - if len(line) > 6 && line[0:3] == `"""` { - keyQuote = `"""` - } else { - keyQuote = `"` - } - } else if line[0] == '`' { - keyQuote = "`" - } - - // Get out key name - var endIdx int - if len(keyQuote) > 0 { - startIdx := len(keyQuote) - // FIXME: fail case -> """"""name"""=value - pos := strings.Index(line[startIdx:], keyQuote) - if pos == -1 { - return "", -1, fmt.Errorf("missing closing key quote: %s", line) - } - pos += startIdx - - // Find key-value delimiter - i := strings.IndexAny(line[pos+startIdx:], delimiters) - if i < 0 { - return "", -1, ErrDelimiterNotFound{line} - } - endIdx = pos + i - return strings.TrimSpace(line[startIdx:pos]), endIdx + startIdx + 1, nil - } - - endIdx = strings.IndexAny(line, delimiters) - if endIdx < 0 { - return "", -1, ErrDelimiterNotFound{line} - } - if endIdx == 0 { - return "", -1, ErrEmptyKeyName{line} - } - - return strings.TrimSpace(line[0:endIdx]), endIdx + 1, nil -} - -func (p *parser) readMultilines(line, val, valQuote string) (string, error) { - for { - data, err := p.readUntil('\n') - if err != nil { - return "", err - } - next := string(data) - - pos := strings.LastIndex(next, valQuote) - if pos > -1 { - val += next[:pos] - - comment, has := cleanComment([]byte(next[pos:])) - if has { - p.comment.Write(bytes.TrimSpace(comment)) - } - break - } - val += next - if p.isEOF { - return "", fmt.Errorf("missing closing key quote from %q to %q", line, next) - } - } - return val, nil -} - -func (p *parser) readContinuationLines(val string) (string, error) { - for { - data, err := p.readUntil('\n') - if err != nil { - return "", err - } - next := strings.TrimSpace(string(data)) - - if len(next) == 0 { - break - } - val += next - if val[len(val)-1] != '\\' { - break - } - val = val[:len(val)-1] - } - return val, nil -} - -// hasSurroundedQuote check if and only if the first and last characters -// are quotes \" or \'. -// It returns false if any other parts also contain same kind of quotes. -func hasSurroundedQuote(in string, quote byte) bool { - return len(in) >= 2 && in[0] == quote && in[len(in)-1] == quote && - strings.IndexByte(in[1:], quote) == len(in)-2 -} - -func (p *parser) readValue(in []byte, bufferSize int) (string, error) { - - line := strings.TrimLeftFunc(string(in), unicode.IsSpace) - if len(line) == 0 { - if p.options.AllowPythonMultilineValues && len(in) > 0 && in[len(in)-1] == '\n' { - return p.readPythonMultilines(line, bufferSize) - } - return "", nil - } - - var valQuote string - if len(line) > 3 && line[0:3] == `"""` { - valQuote = `"""` - } else if line[0] == '`' { - valQuote = "`" - } else if p.options.UnescapeValueDoubleQuotes && line[0] == '"' { - valQuote = `"` - } - - if len(valQuote) > 0 { - startIdx := len(valQuote) - pos := strings.LastIndex(line[startIdx:], valQuote) - // Check for multi-line value - if pos == -1 { - return p.readMultilines(line, line[startIdx:], valQuote) - } - - if p.options.UnescapeValueDoubleQuotes && valQuote == `"` { - return strings.Replace(line[startIdx:pos+startIdx], `\"`, `"`, -1), nil - } - return line[startIdx : pos+startIdx], nil - } - - lastChar := line[len(line)-1] - // Won't be able to reach here if value only contains whitespace - line = strings.TrimSpace(line) - trimmedLastChar := line[len(line)-1] - - // Check continuation lines when desired - if !p.options.IgnoreContinuation && trimmedLastChar == '\\' { - return p.readContinuationLines(line[:len(line)-1]) - } - - // Check if ignore inline comment - if !p.options.IgnoreInlineComment { - var i int - if p.options.SpaceBeforeInlineComment { - i = strings.Index(line, " #") - if i == -1 { - i = strings.Index(line, " ;") - } - - } else { - i = strings.IndexAny(line, "#;") - } - - if i > -1 { - p.comment.WriteString(line[i:]) - line = strings.TrimSpace(line[:i]) - } - - } - - // Trim single and double quotes - if (hasSurroundedQuote(line, '\'') || - hasSurroundedQuote(line, '"')) && !p.options.PreserveSurroundedQuote { - line = line[1 : len(line)-1] - } else if len(valQuote) == 0 && p.options.UnescapeValueCommentSymbols { - line = strings.ReplaceAll(line, `\;`, ";") - line = strings.ReplaceAll(line, `\#`, "#") - } else if p.options.AllowPythonMultilineValues && lastChar == '\n' { - return p.readPythonMultilines(line, bufferSize) - } - - return line, nil -} - -func (p *parser) readPythonMultilines(line string, bufferSize int) (string, error) { - parserBufferPeekResult, _ := p.buf.Peek(bufferSize) - peekBuffer := bytes.NewBuffer(parserBufferPeekResult) - - for { - peekData, peekErr := peekBuffer.ReadBytes('\n') - if peekErr != nil && peekErr != io.EOF { - p.debug("readPythonMultilines: failed to peek with error: %v", peekErr) - return "", peekErr - } - - p.debug("readPythonMultilines: parsing %q", string(peekData)) - - peekMatches := pythonMultiline.FindStringSubmatch(string(peekData)) - p.debug("readPythonMultilines: matched %d parts", len(peekMatches)) - for n, v := range peekMatches { - p.debug(" %d: %q", n, v) - } - - // Return if not a Python multiline value. - if len(peekMatches) != 3 { - p.debug("readPythonMultilines: end of value, got: %q", line) - return line, nil - } - - // Advance the parser reader (buffer) in-sync with the peek buffer. - _, err := p.buf.Discard(len(peekData)) - if err != nil { - p.debug("readPythonMultilines: failed to skip to the end, returning error") - return "", err - } - - line += "\n" + peekMatches[0] - } -} - -// parse parses data through an io.Reader. -func (f *File) parse(reader io.Reader) (err error) { - p := newParser(reader, parserOptions{ - IgnoreContinuation: f.options.IgnoreContinuation, - IgnoreInlineComment: f.options.IgnoreInlineComment, - AllowPythonMultilineValues: f.options.AllowPythonMultilineValues, - SpaceBeforeInlineComment: f.options.SpaceBeforeInlineComment, - UnescapeValueDoubleQuotes: f.options.UnescapeValueDoubleQuotes, - UnescapeValueCommentSymbols: f.options.UnescapeValueCommentSymbols, - PreserveSurroundedQuote: f.options.PreserveSurroundedQuote, - DebugFunc: f.options.DebugFunc, - ReaderBufferSize: f.options.ReaderBufferSize, - }) - if err = p.BOM(); err != nil { - return fmt.Errorf("BOM: %v", err) - } - - // Ignore error because default section name is never empty string. - name := DefaultSection - if f.options.Insensitive || f.options.InsensitiveSections { - name = strings.ToLower(DefaultSection) - } - section, _ := f.NewSection(name) - - // This "last" is not strictly equivalent to "previous one" if current key is not the first nested key - var isLastValueEmpty bool - var lastRegularKey *Key - - var line []byte - var inUnparseableSection bool - - // NOTE: Iterate and increase `currentPeekSize` until - // the size of the parser buffer is found. - // TODO(unknwon): When Golang 1.10 is the lowest version supported, replace with `parserBufferSize := p.buf.Size()`. - parserBufferSize := 0 - // NOTE: Peek 4kb at a time. - currentPeekSize := minReaderBufferSize - - if f.options.AllowPythonMultilineValues { - for { - peekBytes, _ := p.buf.Peek(currentPeekSize) - peekBytesLength := len(peekBytes) - - if parserBufferSize >= peekBytesLength { - break - } - - currentPeekSize *= 2 - parserBufferSize = peekBytesLength - } - } - - for !p.isEOF { - line, err = p.readUntil('\n') - if err != nil { - return err - } - - if f.options.AllowNestedValues && - isLastValueEmpty && len(line) > 0 { - if line[0] == ' ' || line[0] == '\t' { - err = lastRegularKey.addNestedValue(string(bytes.TrimSpace(line))) - if err != nil { - return err - } - continue - } - } - - line = bytes.TrimLeftFunc(line, unicode.IsSpace) - if len(line) == 0 { - continue - } - - // Comments - if line[0] == '#' || line[0] == ';' { - // Note: we do not care ending line break, - // it is needed for adding second line, - // so just clean it once at the end when set to value. - p.comment.Write(line) - continue - } - - // Section - if line[0] == '[' { - // Read to the next ']' (TODO: support quoted strings) - closeIdx := bytes.LastIndexByte(line, ']') - if closeIdx == -1 { - return fmt.Errorf("unclosed section: %s", line) - } - - name := string(line[1:closeIdx]) - section, err = f.NewSection(name) - if err != nil { - return err - } - - comment, has := cleanComment(line[closeIdx+1:]) - if has { - p.comment.Write(comment) - } - - section.Comment = strings.TrimSpace(p.comment.String()) - - // Reset auto-counter and comments - p.comment.Reset() - p.count = 1 - // Nested values can't span sections - isLastValueEmpty = false - - inUnparseableSection = false - for i := range f.options.UnparseableSections { - if f.options.UnparseableSections[i] == name || - ((f.options.Insensitive || f.options.InsensitiveSections) && strings.EqualFold(f.options.UnparseableSections[i], name)) { - inUnparseableSection = true - continue - } - } - continue - } - - if inUnparseableSection { - section.isRawSection = true - section.rawBody += string(line) - continue - } - - kname, offset, err := readKeyName(f.options.KeyValueDelimiters, line) - if err != nil { - switch { - // Treat as boolean key when desired, and whole line is key name. - case IsErrDelimiterNotFound(err): - switch { - case f.options.AllowBooleanKeys: - kname, err := p.readValue(line, parserBufferSize) - if err != nil { - return err - } - key, err := section.NewBooleanKey(kname) - if err != nil { - return err - } - key.Comment = strings.TrimSpace(p.comment.String()) - p.comment.Reset() - continue - - case f.options.SkipUnrecognizableLines: - continue - } - case IsErrEmptyKeyName(err) && f.options.SkipUnrecognizableLines: - continue - } - return err - } - - // Auto increment. - isAutoIncr := false - if kname == "-" { - isAutoIncr = true - kname = "#" + strconv.Itoa(p.count) - p.count++ - } - - value, err := p.readValue(line[offset:], parserBufferSize) - if err != nil { - return err - } - isLastValueEmpty = len(value) == 0 - - key, err := section.NewKey(kname, value) - if err != nil { - return err - } - key.isAutoIncrement = isAutoIncr - key.Comment = strings.TrimSpace(p.comment.String()) - p.comment.Reset() - lastRegularKey = key - } - return nil -} diff --git a/vendor/gopkg.in/ini.v1/section.go b/vendor/gopkg.in/ini.v1/section.go deleted file mode 100644 index a3615d820..000000000 --- a/vendor/gopkg.in/ini.v1/section.go +++ /dev/null @@ -1,256 +0,0 @@ -// Copyright 2014 Unknwon -// -// Licensed under the Apache License, Version 2.0 (the "License"): you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - -package ini - -import ( - "errors" - "fmt" - "strings" -) - -// Section represents a config section. -type Section struct { - f *File - Comment string - name string - keys map[string]*Key - keyList []string - keysHash map[string]string - - isRawSection bool - rawBody string -} - -func newSection(f *File, name string) *Section { - return &Section{ - f: f, - name: name, - keys: make(map[string]*Key), - keyList: make([]string, 0, 10), - keysHash: make(map[string]string), - } -} - -// Name returns name of Section. -func (s *Section) Name() string { - return s.name -} - -// Body returns rawBody of Section if the section was marked as unparseable. -// It still follows the other rules of the INI format surrounding leading/trailing whitespace. -func (s *Section) Body() string { - return strings.TrimSpace(s.rawBody) -} - -// SetBody updates body content only if section is raw. -func (s *Section) SetBody(body string) { - if !s.isRawSection { - return - } - s.rawBody = body -} - -// NewKey creates a new key to given section. -func (s *Section) NewKey(name, val string) (*Key, error) { - if len(name) == 0 { - return nil, errors.New("error creating new key: empty key name") - } else if s.f.options.Insensitive || s.f.options.InsensitiveKeys { - name = strings.ToLower(name) - } - - if s.f.BlockMode { - s.f.lock.Lock() - defer s.f.lock.Unlock() - } - - if inSlice(name, s.keyList) { - if s.f.options.AllowShadows { - if err := s.keys[name].addShadow(val); err != nil { - return nil, err - } - } else { - s.keys[name].value = val - s.keysHash[name] = val - } - return s.keys[name], nil - } - - s.keyList = append(s.keyList, name) - s.keys[name] = newKey(s, name, val) - s.keysHash[name] = val - return s.keys[name], nil -} - -// NewBooleanKey creates a new boolean type key to given section. -func (s *Section) NewBooleanKey(name string) (*Key, error) { - key, err := s.NewKey(name, "true") - if err != nil { - return nil, err - } - - key.isBooleanType = true - return key, nil -} - -// GetKey returns key in section by given name. -func (s *Section) GetKey(name string) (*Key, error) { - if s.f.BlockMode { - s.f.lock.RLock() - } - if s.f.options.Insensitive || s.f.options.InsensitiveKeys { - name = strings.ToLower(name) - } - key := s.keys[name] - if s.f.BlockMode { - s.f.lock.RUnlock() - } - - if key == nil { - // Check if it is a child-section. - sname := s.name - for { - if i := strings.LastIndex(sname, s.f.options.ChildSectionDelimiter); i > -1 { - sname = sname[:i] - sec, err := s.f.GetSection(sname) - if err != nil { - continue - } - return sec.GetKey(name) - } - break - } - return nil, fmt.Errorf("error when getting key of section %q: key %q not exists", s.name, name) - } - return key, nil -} - -// HasKey returns true if section contains a key with given name. -func (s *Section) HasKey(name string) bool { - key, _ := s.GetKey(name) - return key != nil -} - -// Deprecated: Use "HasKey" instead. -func (s *Section) Haskey(name string) bool { - return s.HasKey(name) -} - -// HasValue returns true if section contains given raw value. -func (s *Section) HasValue(value string) bool { - if s.f.BlockMode { - s.f.lock.RLock() - defer s.f.lock.RUnlock() - } - - for _, k := range s.keys { - if value == k.value { - return true - } - } - return false -} - -// Key assumes named Key exists in section and returns a zero-value when not. -func (s *Section) Key(name string) *Key { - key, err := s.GetKey(name) - if err != nil { - // It's OK here because the only possible error is empty key name, - // but if it's empty, this piece of code won't be executed. - key, _ = s.NewKey(name, "") - return key - } - return key -} - -// Keys returns list of keys of section. -func (s *Section) Keys() []*Key { - keys := make([]*Key, len(s.keyList)) - for i := range s.keyList { - keys[i] = s.Key(s.keyList[i]) - } - return keys -} - -// ParentKeys returns list of keys of parent section. -func (s *Section) ParentKeys() []*Key { - var parentKeys []*Key - sname := s.name - for { - if i := strings.LastIndex(sname, s.f.options.ChildSectionDelimiter); i > -1 { - sname = sname[:i] - sec, err := s.f.GetSection(sname) - if err != nil { - continue - } - parentKeys = append(parentKeys, sec.Keys()...) - } else { - break - } - - } - return parentKeys -} - -// KeyStrings returns list of key names of section. -func (s *Section) KeyStrings() []string { - list := make([]string, len(s.keyList)) - copy(list, s.keyList) - return list -} - -// KeysHash returns keys hash consisting of names and values. -func (s *Section) KeysHash() map[string]string { - if s.f.BlockMode { - s.f.lock.RLock() - defer s.f.lock.RUnlock() - } - - hash := make(map[string]string, len(s.keysHash)) - for key, value := range s.keysHash { - hash[key] = value - } - return hash -} - -// DeleteKey deletes a key from section. -func (s *Section) DeleteKey(name string) { - if s.f.BlockMode { - s.f.lock.Lock() - defer s.f.lock.Unlock() - } - - for i, k := range s.keyList { - if k == name { - s.keyList = append(s.keyList[:i], s.keyList[i+1:]...) - delete(s.keys, name) - delete(s.keysHash, name) - return - } - } -} - -// ChildSections returns a list of child sections of current section. -// For example, "[parent.child1]" and "[parent.child12]" are child sections -// of section "[parent]". -func (s *Section) ChildSections() []*Section { - prefix := s.name + s.f.options.ChildSectionDelimiter - children := make([]*Section, 0, 3) - for _, name := range s.f.sectionList { - if strings.HasPrefix(name, prefix) { - children = append(children, s.f.sections[name]...) - } - } - return children -} diff --git a/vendor/gopkg.in/ini.v1/struct.go b/vendor/gopkg.in/ini.v1/struct.go deleted file mode 100644 index a486b2fe0..000000000 --- a/vendor/gopkg.in/ini.v1/struct.go +++ /dev/null @@ -1,747 +0,0 @@ -// Copyright 2014 Unknwon -// -// Licensed under the Apache License, Version 2.0 (the "License"): you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. - -package ini - -import ( - "bytes" - "errors" - "fmt" - "reflect" - "strings" - "time" - "unicode" -) - -// NameMapper represents a ini tag name mapper. -type NameMapper func(string) string - -// Built-in name getters. -var ( - // SnackCase converts to format SNACK_CASE. - SnackCase NameMapper = func(raw string) string { - newstr := make([]rune, 0, len(raw)) - for i, chr := range raw { - if isUpper := 'A' <= chr && chr <= 'Z'; isUpper { - if i > 0 { - newstr = append(newstr, '_') - } - } - newstr = append(newstr, unicode.ToUpper(chr)) - } - return string(newstr) - } - // TitleUnderscore converts to format title_underscore. - TitleUnderscore NameMapper = func(raw string) string { - newstr := make([]rune, 0, len(raw)) - for i, chr := range raw { - if isUpper := 'A' <= chr && chr <= 'Z'; isUpper { - if i > 0 { - newstr = append(newstr, '_') - } - chr -= 'A' - 'a' - } - newstr = append(newstr, chr) - } - return string(newstr) - } -) - -func (s *Section) parseFieldName(raw, actual string) string { - if len(actual) > 0 { - return actual - } - if s.f.NameMapper != nil { - return s.f.NameMapper(raw) - } - return raw -} - -func parseDelim(actual string) string { - if len(actual) > 0 { - return actual - } - return "," -} - -var reflectTime = reflect.TypeOf(time.Now()).Kind() - -// setSliceWithProperType sets proper values to slice based on its type. -func setSliceWithProperType(key *Key, field reflect.Value, delim string, allowShadow, isStrict bool) error { - var strs []string - if allowShadow { - strs = key.StringsWithShadows(delim) - } else { - strs = key.Strings(delim) - } - - numVals := len(strs) - if numVals == 0 { - return nil - } - - var vals interface{} - var err error - - sliceOf := field.Type().Elem().Kind() - switch sliceOf { - case reflect.String: - vals = strs - case reflect.Int: - vals, err = key.parseInts(strs, true, false) - case reflect.Int64: - vals, err = key.parseInt64s(strs, true, false) - case reflect.Uint: - vals, err = key.parseUints(strs, true, false) - case reflect.Uint64: - vals, err = key.parseUint64s(strs, true, false) - case reflect.Float64: - vals, err = key.parseFloat64s(strs, true, false) - case reflect.Bool: - vals, err = key.parseBools(strs, true, false) - case reflectTime: - vals, err = key.parseTimesFormat(time.RFC3339, strs, true, false) - default: - return fmt.Errorf("unsupported type '[]%s'", sliceOf) - } - if err != nil && isStrict { - return err - } - - slice := reflect.MakeSlice(field.Type(), numVals, numVals) - for i := 0; i < numVals; i++ { - switch sliceOf { - case reflect.String: - slice.Index(i).Set(reflect.ValueOf(vals.([]string)[i])) - case reflect.Int: - slice.Index(i).Set(reflect.ValueOf(vals.([]int)[i])) - case reflect.Int64: - slice.Index(i).Set(reflect.ValueOf(vals.([]int64)[i])) - case reflect.Uint: - slice.Index(i).Set(reflect.ValueOf(vals.([]uint)[i])) - case reflect.Uint64: - slice.Index(i).Set(reflect.ValueOf(vals.([]uint64)[i])) - case reflect.Float64: - slice.Index(i).Set(reflect.ValueOf(vals.([]float64)[i])) - case reflect.Bool: - slice.Index(i).Set(reflect.ValueOf(vals.([]bool)[i])) - case reflectTime: - slice.Index(i).Set(reflect.ValueOf(vals.([]time.Time)[i])) - } - } - field.Set(slice) - return nil -} - -func wrapStrictError(err error, isStrict bool) error { - if isStrict { - return err - } - return nil -} - -// setWithProperType sets proper value to field based on its type, -// but it does not return error for failing parsing, -// because we want to use default value that is already assigned to struct. -func setWithProperType(t reflect.Type, key *Key, field reflect.Value, delim string, allowShadow, isStrict bool) error { - vt := t - isPtr := t.Kind() == reflect.Ptr - if isPtr { - vt = t.Elem() - } - switch vt.Kind() { - case reflect.String: - stringVal := key.String() - if isPtr { - field.Set(reflect.ValueOf(&stringVal)) - } else if len(stringVal) > 0 { - field.SetString(key.String()) - } - case reflect.Bool: - boolVal, err := key.Bool() - if err != nil { - return wrapStrictError(err, isStrict) - } - if isPtr { - field.Set(reflect.ValueOf(&boolVal)) - } else { - field.SetBool(boolVal) - } - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: - // ParseDuration will not return err for `0`, so check the type name - if vt.Name() == "Duration" { - durationVal, err := key.Duration() - if err != nil { - if intVal, err := key.Int64(); err == nil { - field.SetInt(intVal) - return nil - } - return wrapStrictError(err, isStrict) - } - if isPtr { - field.Set(reflect.ValueOf(&durationVal)) - } else if int64(durationVal) > 0 { - field.Set(reflect.ValueOf(durationVal)) - } - return nil - } - - intVal, err := key.Int64() - if err != nil { - return wrapStrictError(err, isStrict) - } - if isPtr { - pv := reflect.New(t.Elem()) - pv.Elem().SetInt(intVal) - field.Set(pv) - } else { - field.SetInt(intVal) - } - // byte is an alias for uint8, so supporting uint8 breaks support for byte - case reflect.Uint, reflect.Uint16, reflect.Uint32, reflect.Uint64: - durationVal, err := key.Duration() - // Skip zero value - if err == nil && uint64(durationVal) > 0 { - if isPtr { - field.Set(reflect.ValueOf(&durationVal)) - } else { - field.Set(reflect.ValueOf(durationVal)) - } - return nil - } - - uintVal, err := key.Uint64() - if err != nil { - return wrapStrictError(err, isStrict) - } - if isPtr { - pv := reflect.New(t.Elem()) - pv.Elem().SetUint(uintVal) - field.Set(pv) - } else { - field.SetUint(uintVal) - } - - case reflect.Float32, reflect.Float64: - floatVal, err := key.Float64() - if err != nil { - return wrapStrictError(err, isStrict) - } - if isPtr { - pv := reflect.New(t.Elem()) - pv.Elem().SetFloat(floatVal) - field.Set(pv) - } else { - field.SetFloat(floatVal) - } - case reflectTime: - timeVal, err := key.Time() - if err != nil { - return wrapStrictError(err, isStrict) - } - if isPtr { - field.Set(reflect.ValueOf(&timeVal)) - } else { - field.Set(reflect.ValueOf(timeVal)) - } - case reflect.Slice: - return setSliceWithProperType(key, field, delim, allowShadow, isStrict) - default: - return fmt.Errorf("unsupported type %q", t) - } - return nil -} - -func parseTagOptions(tag string) (rawName string, omitEmpty bool, allowShadow bool, allowNonUnique bool, extends bool) { - opts := strings.SplitN(tag, ",", 5) - rawName = opts[0] - for _, opt := range opts[1:] { - omitEmpty = omitEmpty || (opt == "omitempty") - allowShadow = allowShadow || (opt == "allowshadow") - allowNonUnique = allowNonUnique || (opt == "nonunique") - extends = extends || (opt == "extends") - } - return rawName, omitEmpty, allowShadow, allowNonUnique, extends -} - -// mapToField maps the given value to the matching field of the given section. -// The sectionIndex is the index (if non unique sections are enabled) to which the value should be added. -func (s *Section) mapToField(val reflect.Value, isStrict bool, sectionIndex int, sectionName string) error { - if val.Kind() == reflect.Ptr { - val = val.Elem() - } - typ := val.Type() - - for i := 0; i < typ.NumField(); i++ { - field := val.Field(i) - tpField := typ.Field(i) - - tag := tpField.Tag.Get("ini") - if tag == "-" { - continue - } - - rawName, _, allowShadow, allowNonUnique, extends := parseTagOptions(tag) - fieldName := s.parseFieldName(tpField.Name, rawName) - if len(fieldName) == 0 || !field.CanSet() { - continue - } - - isStruct := tpField.Type.Kind() == reflect.Struct - isStructPtr := tpField.Type.Kind() == reflect.Ptr && tpField.Type.Elem().Kind() == reflect.Struct - isAnonymousPtr := tpField.Type.Kind() == reflect.Ptr && tpField.Anonymous - if isAnonymousPtr { - field.Set(reflect.New(tpField.Type.Elem())) - } - - if extends && (isAnonymousPtr || (isStruct && tpField.Anonymous)) { - if isStructPtr && field.IsNil() { - field.Set(reflect.New(tpField.Type.Elem())) - } - fieldSection := s - if rawName != "" { - sectionName = s.name + s.f.options.ChildSectionDelimiter + rawName - if secs, err := s.f.SectionsByName(sectionName); err == nil && sectionIndex < len(secs) { - fieldSection = secs[sectionIndex] - } - } - if err := fieldSection.mapToField(field, isStrict, sectionIndex, sectionName); err != nil { - return fmt.Errorf("map to field %q: %v", fieldName, err) - } - } else if isAnonymousPtr || isStruct || isStructPtr { - if secs, err := s.f.SectionsByName(fieldName); err == nil { - if len(secs) <= sectionIndex { - return fmt.Errorf("there are not enough sections (%d <= %d) for the field %q", len(secs), sectionIndex, fieldName) - } - // Only set the field to non-nil struct value if we have a section for it. - // Otherwise, we end up with a non-nil struct ptr even though there is no data. - if isStructPtr && field.IsNil() { - field.Set(reflect.New(tpField.Type.Elem())) - } - if err = secs[sectionIndex].mapToField(field, isStrict, sectionIndex, fieldName); err != nil { - return fmt.Errorf("map to field %q: %v", fieldName, err) - } - continue - } - } - - // Map non-unique sections - if allowNonUnique && tpField.Type.Kind() == reflect.Slice { - newField, err := s.mapToSlice(fieldName, field, isStrict) - if err != nil { - return fmt.Errorf("map to slice %q: %v", fieldName, err) - } - - field.Set(newField) - continue - } - - if key, err := s.GetKey(fieldName); err == nil { - delim := parseDelim(tpField.Tag.Get("delim")) - if err = setWithProperType(tpField.Type, key, field, delim, allowShadow, isStrict); err != nil { - return fmt.Errorf("set field %q: %v", fieldName, err) - } - } - } - return nil -} - -// mapToSlice maps all sections with the same name and returns the new value. -// The type of the Value must be a slice. -func (s *Section) mapToSlice(secName string, val reflect.Value, isStrict bool) (reflect.Value, error) { - secs, err := s.f.SectionsByName(secName) - if err != nil { - return reflect.Value{}, err - } - - typ := val.Type().Elem() - for i, sec := range secs { - elem := reflect.New(typ) - if err = sec.mapToField(elem, isStrict, i, sec.name); err != nil { - return reflect.Value{}, fmt.Errorf("map to field from section %q: %v", secName, err) - } - - val = reflect.Append(val, elem.Elem()) - } - return val, nil -} - -// mapTo maps a section to object v. -func (s *Section) mapTo(v interface{}, isStrict bool) error { - typ := reflect.TypeOf(v) - val := reflect.ValueOf(v) - if typ.Kind() == reflect.Ptr { - typ = typ.Elem() - val = val.Elem() - } else { - return errors.New("not a pointer to a struct") - } - - if typ.Kind() == reflect.Slice { - newField, err := s.mapToSlice(s.name, val, isStrict) - if err != nil { - return err - } - - val.Set(newField) - return nil - } - - return s.mapToField(val, isStrict, 0, s.name) -} - -// MapTo maps section to given struct. -func (s *Section) MapTo(v interface{}) error { - return s.mapTo(v, false) -} - -// StrictMapTo maps section to given struct in strict mode, -// which returns all possible error including value parsing error. -func (s *Section) StrictMapTo(v interface{}) error { - return s.mapTo(v, true) -} - -// MapTo maps file to given struct. -func (f *File) MapTo(v interface{}) error { - return f.Section("").MapTo(v) -} - -// StrictMapTo maps file to given struct in strict mode, -// which returns all possible error including value parsing error. -func (f *File) StrictMapTo(v interface{}) error { - return f.Section("").StrictMapTo(v) -} - -// MapToWithMapper maps data sources to given struct with name mapper. -func MapToWithMapper(v interface{}, mapper NameMapper, source interface{}, others ...interface{}) error { - cfg, err := Load(source, others...) - if err != nil { - return err - } - cfg.NameMapper = mapper - return cfg.MapTo(v) -} - -// StrictMapToWithMapper maps data sources to given struct with name mapper in strict mode, -// which returns all possible error including value parsing error. -func StrictMapToWithMapper(v interface{}, mapper NameMapper, source interface{}, others ...interface{}) error { - cfg, err := Load(source, others...) - if err != nil { - return err - } - cfg.NameMapper = mapper - return cfg.StrictMapTo(v) -} - -// MapTo maps data sources to given struct. -func MapTo(v, source interface{}, others ...interface{}) error { - return MapToWithMapper(v, nil, source, others...) -} - -// StrictMapTo maps data sources to given struct in strict mode, -// which returns all possible error including value parsing error. -func StrictMapTo(v, source interface{}, others ...interface{}) error { - return StrictMapToWithMapper(v, nil, source, others...) -} - -// reflectSliceWithProperType does the opposite thing as setSliceWithProperType. -func reflectSliceWithProperType(key *Key, field reflect.Value, delim string, allowShadow bool) error { - slice := field.Slice(0, field.Len()) - if field.Len() == 0 { - return nil - } - sliceOf := field.Type().Elem().Kind() - - if allowShadow { - var keyWithShadows *Key - for i := 0; i < field.Len(); i++ { - var val string - switch sliceOf { - case reflect.String: - val = slice.Index(i).String() - case reflect.Int, reflect.Int64: - val = fmt.Sprint(slice.Index(i).Int()) - case reflect.Uint, reflect.Uint64: - val = fmt.Sprint(slice.Index(i).Uint()) - case reflect.Float64: - val = fmt.Sprint(slice.Index(i).Float()) - case reflect.Bool: - val = fmt.Sprint(slice.Index(i).Bool()) - case reflectTime: - val = slice.Index(i).Interface().(time.Time).Format(time.RFC3339) - default: - return fmt.Errorf("unsupported type '[]%s'", sliceOf) - } - - if i == 0 { - keyWithShadows = newKey(key.s, key.name, val) - } else { - _ = keyWithShadows.AddShadow(val) - } - } - *key = *keyWithShadows - return nil - } - - var buf bytes.Buffer - for i := 0; i < field.Len(); i++ { - switch sliceOf { - case reflect.String: - buf.WriteString(slice.Index(i).String()) - case reflect.Int, reflect.Int64: - buf.WriteString(fmt.Sprint(slice.Index(i).Int())) - case reflect.Uint, reflect.Uint64: - buf.WriteString(fmt.Sprint(slice.Index(i).Uint())) - case reflect.Float64: - buf.WriteString(fmt.Sprint(slice.Index(i).Float())) - case reflect.Bool: - buf.WriteString(fmt.Sprint(slice.Index(i).Bool())) - case reflectTime: - buf.WriteString(slice.Index(i).Interface().(time.Time).Format(time.RFC3339)) - default: - return fmt.Errorf("unsupported type '[]%s'", sliceOf) - } - buf.WriteString(delim) - } - key.SetValue(buf.String()[:buf.Len()-len(delim)]) - return nil -} - -// reflectWithProperType does the opposite thing as setWithProperType. -func reflectWithProperType(t reflect.Type, key *Key, field reflect.Value, delim string, allowShadow bool) error { - switch t.Kind() { - case reflect.String: - key.SetValue(field.String()) - case reflect.Bool: - key.SetValue(fmt.Sprint(field.Bool())) - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: - key.SetValue(fmt.Sprint(field.Int())) - case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: - key.SetValue(fmt.Sprint(field.Uint())) - case reflect.Float32, reflect.Float64: - key.SetValue(fmt.Sprint(field.Float())) - case reflectTime: - key.SetValue(fmt.Sprint(field.Interface().(time.Time).Format(time.RFC3339))) - case reflect.Slice: - return reflectSliceWithProperType(key, field, delim, allowShadow) - case reflect.Ptr: - if !field.IsNil() { - return reflectWithProperType(t.Elem(), key, field.Elem(), delim, allowShadow) - } - default: - return fmt.Errorf("unsupported type %q", t) - } - return nil -} - -// CR: copied from encoding/json/encode.go with modifications of time.Time support. -// TODO: add more test coverage. -func isEmptyValue(v reflect.Value) bool { - switch v.Kind() { - case reflect.Array, reflect.Map, reflect.Slice, reflect.String: - return v.Len() == 0 - case reflect.Bool: - return !v.Bool() - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: - return v.Int() == 0 - case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: - return v.Uint() == 0 - case reflect.Float32, reflect.Float64: - return v.Float() == 0 - case reflect.Interface, reflect.Ptr: - return v.IsNil() - case reflectTime: - t, ok := v.Interface().(time.Time) - return ok && t.IsZero() - } - return false -} - -// StructReflector is the interface implemented by struct types that can extract themselves into INI objects. -type StructReflector interface { - ReflectINIStruct(*File) error -} - -func (s *Section) reflectFrom(val reflect.Value) error { - if val.Kind() == reflect.Ptr { - val = val.Elem() - } - typ := val.Type() - - for i := 0; i < typ.NumField(); i++ { - if !val.Field(i).CanInterface() { - continue - } - - field := val.Field(i) - tpField := typ.Field(i) - - tag := tpField.Tag.Get("ini") - if tag == "-" { - continue - } - - rawName, omitEmpty, allowShadow, allowNonUnique, extends := parseTagOptions(tag) - if omitEmpty && isEmptyValue(field) { - continue - } - - if r, ok := field.Interface().(StructReflector); ok { - return r.ReflectINIStruct(s.f) - } - - fieldName := s.parseFieldName(tpField.Name, rawName) - if len(fieldName) == 0 || !field.CanSet() { - continue - } - - if extends && tpField.Anonymous && (tpField.Type.Kind() == reflect.Ptr || tpField.Type.Kind() == reflect.Struct) { - if err := s.reflectFrom(field); err != nil { - return fmt.Errorf("reflect from field %q: %v", fieldName, err) - } - continue - } - - if (tpField.Type.Kind() == reflect.Ptr && tpField.Type.Elem().Kind() == reflect.Struct) || - (tpField.Type.Kind() == reflect.Struct && tpField.Type.Name() != "Time") { - // Note: The only error here is section doesn't exist. - sec, err := s.f.GetSection(fieldName) - if err != nil { - // Note: fieldName can never be empty here, ignore error. - sec, _ = s.f.NewSection(fieldName) - } - - // Add comment from comment tag - if len(sec.Comment) == 0 { - sec.Comment = tpField.Tag.Get("comment") - } - - if err = sec.reflectFrom(field); err != nil { - return fmt.Errorf("reflect from field %q: %v", fieldName, err) - } - continue - } - - if allowNonUnique && tpField.Type.Kind() == reflect.Slice { - slice := field.Slice(0, field.Len()) - if field.Len() == 0 { - return nil - } - sliceOf := field.Type().Elem().Kind() - - for i := 0; i < field.Len(); i++ { - if sliceOf != reflect.Struct && sliceOf != reflect.Ptr { - return fmt.Errorf("field %q is not a slice of pointer or struct", fieldName) - } - - sec, err := s.f.NewSection(fieldName) - if err != nil { - return err - } - - // Add comment from comment tag - if len(sec.Comment) == 0 { - sec.Comment = tpField.Tag.Get("comment") - } - - if err := sec.reflectFrom(slice.Index(i)); err != nil { - return fmt.Errorf("reflect from field %q: %v", fieldName, err) - } - } - continue - } - - // Note: Same reason as section. - key, err := s.GetKey(fieldName) - if err != nil { - key, _ = s.NewKey(fieldName, "") - } - - // Add comment from comment tag - if len(key.Comment) == 0 { - key.Comment = tpField.Tag.Get("comment") - } - - delim := parseDelim(tpField.Tag.Get("delim")) - if err = reflectWithProperType(tpField.Type, key, field, delim, allowShadow); err != nil { - return fmt.Errorf("reflect field %q: %v", fieldName, err) - } - - } - return nil -} - -// ReflectFrom reflects section from given struct. It overwrites existing ones. -func (s *Section) ReflectFrom(v interface{}) error { - typ := reflect.TypeOf(v) - val := reflect.ValueOf(v) - - if s.name != DefaultSection && s.f.options.AllowNonUniqueSections && - (typ.Kind() == reflect.Slice || typ.Kind() == reflect.Ptr) { - // Clear sections to make sure none exists before adding the new ones - s.f.DeleteSection(s.name) - - if typ.Kind() == reflect.Ptr { - sec, err := s.f.NewSection(s.name) - if err != nil { - return err - } - return sec.reflectFrom(val.Elem()) - } - - slice := val.Slice(0, val.Len()) - sliceOf := val.Type().Elem().Kind() - if sliceOf != reflect.Ptr { - return fmt.Errorf("not a slice of pointers") - } - - for i := 0; i < slice.Len(); i++ { - sec, err := s.f.NewSection(s.name) - if err != nil { - return err - } - - err = sec.reflectFrom(slice.Index(i)) - if err != nil { - return fmt.Errorf("reflect from %dth field: %v", i, err) - } - } - - return nil - } - - if typ.Kind() == reflect.Ptr { - val = val.Elem() - } else { - return errors.New("not a pointer to a struct") - } - - return s.reflectFrom(val) -} - -// ReflectFrom reflects file from given struct. -func (f *File) ReflectFrom(v interface{}) error { - return f.Section("").ReflectFrom(v) -} - -// ReflectFromWithMapper reflects data sources from given struct with name mapper. -func ReflectFromWithMapper(cfg *File, v interface{}, mapper NameMapper) error { - cfg.NameMapper = mapper - return cfg.ReflectFrom(v) -} - -// ReflectFrom reflects data sources from given struct. -func ReflectFrom(cfg *File, v interface{}) error { - return ReflectFromWithMapper(cfg, v, nil) -} diff --git a/vendor/honnef.co/go/tools/LICENSE b/vendor/honnef.co/go/tools/LICENSE deleted file mode 100644 index dfd031454..000000000 --- a/vendor/honnef.co/go/tools/LICENSE +++ /dev/null @@ -1,20 +0,0 @@ -Copyright (c) 2016 Dominik Honnef - -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -"Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/honnef.co/go/tools/LICENSE-THIRD-PARTY b/vendor/honnef.co/go/tools/LICENSE-THIRD-PARTY deleted file mode 100644 index f2c0fa93a..000000000 --- a/vendor/honnef.co/go/tools/LICENSE-THIRD-PARTY +++ /dev/null @@ -1,121 +0,0 @@ -Staticcheck and its related tools make use of third party projects, -either by reusing their code, or by statically linking them into -resulting binaries. These projects are: - -* The Go Programming Language - https://golang.org/ - golang.org/x/mod - https://github.com/golang/mod - golang.org/x/tools - https://github.com/golang/tools - golang.org/x/sys - https://github.com/golang/sys - golang.org/x/xerrors - https://github.com/golang/xerrors - - Copyright (c) 2009 The Go Authors. All rights reserved. - - Redistribution and use in source and binary forms, with or without - modification, are permitted provided that the following conditions are - met: - - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above - copyright notice, this list of conditions and the following disclaimer - in the documentation and/or other materials provided with the - distribution. - * Neither the name of Google Inc. nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - - THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS - "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT - LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR - A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT - OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, - SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT - LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, - DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY - THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT - (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE - OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - - -* github.com/BurntSushi/toml - https://github.com/BurntSushi/toml - - The MIT License (MIT) - - Copyright (c) 2013 TOML authors - - Permission is hereby granted, free of charge, to any person obtaining a copy - of this software and associated documentation files (the "Software"), to deal - in the Software without restriction, including without limitation the rights - to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - copies of the Software, and to permit persons to whom the Software is - furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included in - all copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN - THE SOFTWARE. - -* gogrep - https://github.com/mvdan/gogrep - - Copyright (c) 2017, Daniel Martí. All rights reserved. - - Redistribution and use in source and binary forms, with or without - modification, are permitted provided that the following conditions are - met: - - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above - copyright notice, this list of conditions and the following disclaimer - in the documentation and/or other materials provided with the - distribution. - * Neither the name of the copyright holder nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - - THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS - "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT - LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR - A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT - OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, - SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT - LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, - DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY - THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT - (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE - OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -* gosmith - https://github.com/dvyukov/gosmith - - Copyright (c) 2014 Dmitry Vyukov. All rights reserved. - - Redistribution and use in source and binary forms, with or without - modification, are permitted provided that the following conditions are - met: - - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above - copyright notice, this list of conditions and the following disclaimer - in the documentation and/or other materials provided with the - distribution. - * The name of Dmitry Vyukov may be used to endorse or promote - products derived from this software without specific prior written permission. - - THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS - "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT - LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR - A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT - OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, - SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT - LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, - DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY - THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT - (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE - OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/honnef.co/go/tools/analysis/code/code.go b/vendor/honnef.co/go/tools/analysis/code/code.go deleted file mode 100644 index f200363d9..000000000 --- a/vendor/honnef.co/go/tools/analysis/code/code.go +++ /dev/null @@ -1,357 +0,0 @@ -// Package code answers structural and type questions about Go code. -package code - -import ( - "flag" - "fmt" - "go/ast" - "go/constant" - "go/token" - "go/types" - "strings" - - "honnef.co/go/tools/analysis/facts/generated" - "honnef.co/go/tools/analysis/facts/purity" - "honnef.co/go/tools/analysis/facts/tokenfile" - "honnef.co/go/tools/go/ast/astutil" - "honnef.co/go/tools/go/types/typeutil" - "honnef.co/go/tools/pattern" - - "golang.org/x/tools/go/analysis" -) - -type Positioner interface { - Pos() token.Pos -} - -func IsOfType(pass *analysis.Pass, expr ast.Expr, name string) bool { - return typeutil.IsType(pass.TypesInfo.TypeOf(expr), name) -} - -func IsInTest(pass *analysis.Pass, node Positioner) bool { - // FIXME(dh): this doesn't work for global variables with - // initializers - f := pass.Fset.File(node.Pos()) - return f != nil && strings.HasSuffix(f.Name(), "_test.go") -} - -// IsMain reports whether the package being processed is a package -// main. -func IsMain(pass *analysis.Pass) bool { - return pass.Pkg.Name() == "main" -} - -// IsMainLike reports whether the package being processed is a -// main-like package. A main-like package is a package that is -// package main, or that is intended to be used by a tool framework -// such as cobra to implement a command. -// -// Note that this function errs on the side of false positives; it may -// return true for packages that aren't main-like. IsMainLike is -// intended for analyses that wish to suppress diagnostics for -// main-like packages to avoid false positives. -func IsMainLike(pass *analysis.Pass) bool { - if pass.Pkg.Name() == "main" { - return true - } - for _, imp := range pass.Pkg.Imports() { - if imp.Path() == "github.com/spf13/cobra" { - return true - } - } - return false -} - -func SelectorName(pass *analysis.Pass, expr *ast.SelectorExpr) string { - info := pass.TypesInfo - sel := info.Selections[expr] - if sel == nil { - if x, ok := expr.X.(*ast.Ident); ok { - pkg, ok := info.ObjectOf(x).(*types.PkgName) - if !ok { - // This shouldn't happen - return fmt.Sprintf("%s.%s", x.Name, expr.Sel.Name) - } - return fmt.Sprintf("%s.%s", pkg.Imported().Path(), expr.Sel.Name) - } - panic(fmt.Sprintf("unsupported selector: %v", expr)) - } - if v, ok := sel.Obj().(*types.Var); ok && v.IsField() { - return fmt.Sprintf("(%s).%s", typeutil.DereferenceR(sel.Recv()), sel.Obj().Name()) - } else { - return fmt.Sprintf("(%s).%s", sel.Recv(), sel.Obj().Name()) - } -} - -func IsNil(pass *analysis.Pass, expr ast.Expr) bool { - return pass.TypesInfo.Types[expr].IsNil() -} - -func BoolConst(pass *analysis.Pass, expr ast.Expr) bool { - val := pass.TypesInfo.ObjectOf(expr.(*ast.Ident)).(*types.Const).Val() - return constant.BoolVal(val) -} - -func IsBoolConst(pass *analysis.Pass, expr ast.Expr) bool { - // We explicitly don't support typed bools because more often than - // not, custom bool types are used as binary enums and the - // explicit comparison is desired. - - ident, ok := expr.(*ast.Ident) - if !ok { - return false - } - obj := pass.TypesInfo.ObjectOf(ident) - c, ok := obj.(*types.Const) - if !ok { - return false - } - basic, ok := c.Type().(*types.Basic) - if !ok { - return false - } - if basic.Kind() != types.UntypedBool && basic.Kind() != types.Bool { - return false - } - return true -} - -func ExprToInt(pass *analysis.Pass, expr ast.Expr) (int64, bool) { - tv := pass.TypesInfo.Types[expr] - if tv.Value == nil { - return 0, false - } - if tv.Value.Kind() != constant.Int { - return 0, false - } - return constant.Int64Val(tv.Value) -} - -func ExprToString(pass *analysis.Pass, expr ast.Expr) (string, bool) { - val := pass.TypesInfo.Types[expr].Value - if val == nil { - return "", false - } - if val.Kind() != constant.String { - return "", false - } - return constant.StringVal(val), true -} - -func CallName(pass *analysis.Pass, call *ast.CallExpr) string { - fun := astutil.Unparen(call.Fun) - - // Instantiating a function cannot return another generic function, so doing this once is enough - switch idx := fun.(type) { - case *ast.IndexExpr: - fun = idx.X - case *ast.IndexListExpr: - fun = idx.X - } - - // (foo)[T] is not a valid instantiationg, so no need to unparen again. - - switch fun := fun.(type) { - case *ast.SelectorExpr: - fn, ok := pass.TypesInfo.ObjectOf(fun.Sel).(*types.Func) - if !ok { - return "" - } - return typeutil.FuncName(fn) - case *ast.Ident: - obj := pass.TypesInfo.ObjectOf(fun) - switch obj := obj.(type) { - case *types.Func: - return typeutil.FuncName(obj) - case *types.Builtin: - return obj.Name() - default: - return "" - } - default: - return "" - } -} - -func IsCallTo(pass *analysis.Pass, node ast.Node, name string) bool { - call, ok := node.(*ast.CallExpr) - if !ok { - return false - } - return CallName(pass, call) == name -} - -func IsCallToAny(pass *analysis.Pass, node ast.Node, names ...string) bool { - call, ok := node.(*ast.CallExpr) - if !ok { - return false - } - q := CallName(pass, call) - for _, name := range names { - if q == name { - return true - } - } - return false -} - -func File(pass *analysis.Pass, node Positioner) *ast.File { - m := pass.ResultOf[tokenfile.Analyzer].(map[*token.File]*ast.File) - return m[pass.Fset.File(node.Pos())] -} - -// IsGenerated reports whether pos is in a generated file, It ignores -// //line directives. -func IsGenerated(pass *analysis.Pass, pos token.Pos) bool { - _, ok := Generator(pass, pos) - return ok -} - -// Generator returns the generator that generated the file containing -// pos. It ignores //line directives. -func Generator(pass *analysis.Pass, pos token.Pos) (generated.Generator, bool) { - file := pass.Fset.PositionFor(pos, false).Filename - m := pass.ResultOf[generated.Analyzer].(map[string]generated.Generator) - g, ok := m[file] - return g, ok -} - -// MayHaveSideEffects reports whether expr may have side effects. If -// the purity argument is nil, this function implements a purely -// syntactic check, meaning that any function call may have side -// effects, regardless of the called function's body. Otherwise, -// purity will be consulted to determine the purity of function calls. -func MayHaveSideEffects(pass *analysis.Pass, expr ast.Expr, purity purity.Result) bool { - switch expr := expr.(type) { - case *ast.BadExpr: - return true - case *ast.Ellipsis: - return MayHaveSideEffects(pass, expr.Elt, purity) - case *ast.FuncLit: - // the literal itself cannot have side effects, only calling it - // might, which is handled by CallExpr. - return false - case *ast.ArrayType, *ast.StructType, *ast.FuncType, *ast.InterfaceType, *ast.MapType, *ast.ChanType: - // types cannot have side effects - return false - case *ast.BasicLit: - return false - case *ast.BinaryExpr: - return MayHaveSideEffects(pass, expr.X, purity) || MayHaveSideEffects(pass, expr.Y, purity) - case *ast.CallExpr: - if purity == nil { - return true - } - switch obj := typeutil.Callee(pass.TypesInfo, expr).(type) { - case *types.Func: - if _, ok := purity[obj]; !ok { - return true - } - case *types.Builtin: - switch obj.Name() { - case "len", "cap": - default: - return true - } - default: - return true - } - for _, arg := range expr.Args { - if MayHaveSideEffects(pass, arg, purity) { - return true - } - } - return false - case *ast.CompositeLit: - if MayHaveSideEffects(pass, expr.Type, purity) { - return true - } - for _, elt := range expr.Elts { - if MayHaveSideEffects(pass, elt, purity) { - return true - } - } - return false - case *ast.Ident: - return false - case *ast.IndexExpr: - return MayHaveSideEffects(pass, expr.X, purity) || MayHaveSideEffects(pass, expr.Index, purity) - case *ast.IndexListExpr: - // In theory, none of the checks are necessary, as IndexListExpr only involves types. But there is no harm in - // being safe. - if MayHaveSideEffects(pass, expr.X, purity) { - return true - } - for _, idx := range expr.Indices { - if MayHaveSideEffects(pass, idx, purity) { - return true - } - } - return false - case *ast.KeyValueExpr: - return MayHaveSideEffects(pass, expr.Key, purity) || MayHaveSideEffects(pass, expr.Value, purity) - case *ast.SelectorExpr: - return MayHaveSideEffects(pass, expr.X, purity) - case *ast.SliceExpr: - return MayHaveSideEffects(pass, expr.X, purity) || - MayHaveSideEffects(pass, expr.Low, purity) || - MayHaveSideEffects(pass, expr.High, purity) || - MayHaveSideEffects(pass, expr.Max, purity) - case *ast.StarExpr: - return MayHaveSideEffects(pass, expr.X, purity) - case *ast.TypeAssertExpr: - return MayHaveSideEffects(pass, expr.X, purity) - case *ast.UnaryExpr: - if MayHaveSideEffects(pass, expr.X, purity) { - return true - } - return expr.Op == token.ARROW || expr.Op == token.AND - case *ast.ParenExpr: - return MayHaveSideEffects(pass, expr.X, purity) - case nil: - return false - default: - panic(fmt.Sprintf("internal error: unhandled type %T", expr)) - } -} - -func IsGoVersion(pass *analysis.Pass, minor int) bool { - f, ok := pass.Analyzer.Flags.Lookup("go").Value.(flag.Getter) - if !ok { - panic("requested Go version, but analyzer has no version flag") - } - version := f.Get().(int) - return version >= minor -} - -var integerLiteralQ = pattern.MustParse(`(IntegerLiteral tv)`) - -func IntegerLiteral(pass *analysis.Pass, node ast.Node) (types.TypeAndValue, bool) { - m, ok := Match(pass, integerLiteralQ, node) - if !ok { - return types.TypeAndValue{}, false - } - return m.State["tv"].(types.TypeAndValue), true -} - -func IsIntegerLiteral(pass *analysis.Pass, node ast.Node, value constant.Value) bool { - tv, ok := IntegerLiteral(pass, node) - if !ok { - return false - } - return constant.Compare(tv.Value, token.EQL, value) -} - -// IsMethod reports whether expr is a method call of a named method with signature meth. -// If name is empty, it is not checked. -// For now, method expressions (Type.Method(recv, ..)) are not considered method calls. -func IsMethod(pass *analysis.Pass, expr *ast.SelectorExpr, name string, meth *types.Signature) bool { - if name != "" && expr.Sel.Name != name { - return false - } - sel, ok := pass.TypesInfo.Selections[expr] - if !ok || sel.Kind() != types.MethodVal { - return false - } - return types.Identical(sel.Type(), meth) -} diff --git a/vendor/honnef.co/go/tools/analysis/code/visit.go b/vendor/honnef.co/go/tools/analysis/code/visit.go deleted file mode 100644 index 0f0d644a1..000000000 --- a/vendor/honnef.co/go/tools/analysis/code/visit.go +++ /dev/null @@ -1,51 +0,0 @@ -package code - -import ( - "bytes" - "go/ast" - "go/format" - - "honnef.co/go/tools/pattern" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/ast/inspector" -) - -func Preorder(pass *analysis.Pass, fn func(ast.Node), types ...ast.Node) { - pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder(types, fn) -} - -func PreorderStack(pass *analysis.Pass, fn func(ast.Node, []ast.Node), types ...ast.Node) { - pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).WithStack(types, func(n ast.Node, push bool, stack []ast.Node) (proceed bool) { - if push { - fn(n, stack) - } - return true - }) -} - -func Match(pass *analysis.Pass, q pattern.Pattern, node ast.Node) (*pattern.Matcher, bool) { - // Note that we ignore q.Relevant – callers of Match usually use - // AST inspectors that already filter on nodes we're interested - // in. - m := &pattern.Matcher{TypesInfo: pass.TypesInfo} - ok := m.Match(q, node) - return m, ok -} - -func MatchAndEdit(pass *analysis.Pass, before, after pattern.Pattern, node ast.Node) (*pattern.Matcher, []analysis.TextEdit, bool) { - m, ok := Match(pass, before, node) - if !ok { - return m, nil, false - } - r := pattern.NodeToAST(after.Root, m.State) - buf := &bytes.Buffer{} - format.Node(buf, pass.Fset, r) - edit := []analysis.TextEdit{{ - Pos: node.Pos(), - End: node.End(), - NewText: buf.Bytes(), - }} - return m, edit, true -} diff --git a/vendor/honnef.co/go/tools/analysis/edit/edit.go b/vendor/honnef.co/go/tools/analysis/edit/edit.go deleted file mode 100644 index b118bdc02..000000000 --- a/vendor/honnef.co/go/tools/analysis/edit/edit.go +++ /dev/null @@ -1,83 +0,0 @@ -// Package edit contains helpers for creating suggested fixes. -package edit - -import ( - "bytes" - "go/ast" - "go/format" - "go/token" - - "golang.org/x/tools/go/analysis" - "honnef.co/go/tools/pattern" -) - -// Ranger describes values that have a start and end position. -// In most cases these are either ast.Node or manually constructed ranges. -type Ranger interface { - Pos() token.Pos - End() token.Pos -} - -// Range implements the Ranger interface. -type Range [2]token.Pos - -func (r Range) Pos() token.Pos { return r[0] } -func (r Range) End() token.Pos { return r[1] } - -// ReplaceWithString replaces a range with a string. -func ReplaceWithString(old Ranger, new string) analysis.TextEdit { - return analysis.TextEdit{ - Pos: old.Pos(), - End: old.End(), - NewText: []byte(new), - } -} - -// ReplaceWithNode replaces a range with an AST node. -func ReplaceWithNode(fset *token.FileSet, old Ranger, new ast.Node) analysis.TextEdit { - buf := &bytes.Buffer{} - if err := format.Node(buf, fset, new); err != nil { - panic("internal error: " + err.Error()) - } - return analysis.TextEdit{ - Pos: old.Pos(), - End: old.End(), - NewText: buf.Bytes(), - } -} - -// ReplaceWithPattern replaces a range with the result of executing a pattern. -func ReplaceWithPattern(fset *token.FileSet, old Ranger, new pattern.Pattern, state pattern.State) analysis.TextEdit { - r := pattern.NodeToAST(new.Root, state) - buf := &bytes.Buffer{} - format.Node(buf, fset, r) - return analysis.TextEdit{ - Pos: old.Pos(), - End: old.End(), - NewText: buf.Bytes(), - } -} - -// Delete deletes a range of code. -func Delete(old Ranger) analysis.TextEdit { - return analysis.TextEdit{ - Pos: old.Pos(), - End: old.End(), - NewText: nil, - } -} - -func Fix(msg string, edits ...analysis.TextEdit) analysis.SuggestedFix { - return analysis.SuggestedFix{ - Message: msg, - TextEdits: edits, - } -} - -// Selector creates a new selector expression. -func Selector(x, sel string) *ast.SelectorExpr { - return &ast.SelectorExpr{ - X: &ast.Ident{Name: x}, - Sel: &ast.Ident{Name: sel}, - } -} diff --git a/vendor/honnef.co/go/tools/analysis/facts/deprecated/deprecated.go b/vendor/honnef.co/go/tools/analysis/facts/deprecated/deprecated.go deleted file mode 100644 index dd6d655c3..000000000 --- a/vendor/honnef.co/go/tools/analysis/facts/deprecated/deprecated.go +++ /dev/null @@ -1,154 +0,0 @@ -package deprecated - -import ( - "go/ast" - "go/token" - "go/types" - "reflect" - "strings" - - "golang.org/x/tools/go/analysis" -) - -type IsDeprecated struct{ Msg string } - -func (*IsDeprecated) AFact() {} -func (d *IsDeprecated) String() string { return "Deprecated: " + d.Msg } - -type Result struct { - Objects map[types.Object]*IsDeprecated - Packages map[*types.Package]*IsDeprecated -} - -var Analyzer = &analysis.Analyzer{ - Name: "fact_deprecated", - Doc: "Mark deprecated objects", - Run: deprecated, - FactTypes: []analysis.Fact{(*IsDeprecated)(nil)}, - ResultType: reflect.TypeOf(Result{}), -} - -func deprecated(pass *analysis.Pass) (interface{}, error) { - var names []*ast.Ident - - extractDeprecatedMessage := func(docs []*ast.CommentGroup) string { - for _, doc := range docs { - if doc == nil { - continue - } - parts := strings.Split(doc.Text(), "\n\n") - for _, part := range parts { - if !strings.HasPrefix(part, "Deprecated: ") { - continue - } - alt := part[len("Deprecated: "):] - alt = strings.Replace(alt, "\n", " ", -1) - return alt - } - } - return "" - } - - doDocs := func(names []*ast.Ident, docs []*ast.CommentGroup) { - alt := extractDeprecatedMessage(docs) - if alt == "" { - return - } - - for _, name := range names { - obj := pass.TypesInfo.ObjectOf(name) - pass.ExportObjectFact(obj, &IsDeprecated{alt}) - } - } - - var docs []*ast.CommentGroup - for _, f := range pass.Files { - docs = append(docs, f.Doc) - } - if alt := extractDeprecatedMessage(docs); alt != "" { - // Don't mark package syscall as deprecated, even though - // it is. A lot of people still use it for simple - // constants like SIGKILL, and I am not comfortable - // telling them to use x/sys for that. - if pass.Pkg.Path() != "syscall" { - pass.ExportPackageFact(&IsDeprecated{alt}) - } - } - - docs = docs[:0] - for _, f := range pass.Files { - fn := func(node ast.Node) bool { - if node == nil { - return true - } - var ret bool - switch node := node.(type) { - case *ast.GenDecl: - switch node.Tok { - case token.TYPE, token.CONST, token.VAR: - docs = append(docs, node.Doc) - for i := range node.Specs { - switch n := node.Specs[i].(type) { - case *ast.ValueSpec: - names = append(names, n.Names...) - case *ast.TypeSpec: - names = append(names, n.Name) - } - } - ret = true - default: - return false - } - case *ast.FuncDecl: - docs = append(docs, node.Doc) - names = []*ast.Ident{node.Name} - ret = false - case *ast.TypeSpec: - docs = append(docs, node.Doc) - names = []*ast.Ident{node.Name} - ret = true - case *ast.ValueSpec: - docs = append(docs, node.Doc) - names = node.Names - ret = false - case *ast.File: - return true - case *ast.StructType: - for _, field := range node.Fields.List { - doDocs(field.Names, []*ast.CommentGroup{field.Doc}) - } - return false - case *ast.InterfaceType: - for _, field := range node.Methods.List { - doDocs(field.Names, []*ast.CommentGroup{field.Doc}) - } - return false - default: - return false - } - if len(names) == 0 || len(docs) == 0 { - return ret - } - doDocs(names, docs) - - docs = docs[:0] - names = nil - return ret - } - ast.Inspect(f, fn) - } - - out := Result{ - Objects: map[types.Object]*IsDeprecated{}, - Packages: map[*types.Package]*IsDeprecated{}, - } - - for _, fact := range pass.AllObjectFacts() { - out.Objects[fact.Object] = fact.Fact.(*IsDeprecated) - } - for _, fact := range pass.AllPackageFacts() { - out.Packages[fact.Package] = fact.Fact.(*IsDeprecated) - } - - return out, nil -} diff --git a/vendor/honnef.co/go/tools/analysis/facts/directives/directives.go b/vendor/honnef.co/go/tools/analysis/facts/directives/directives.go deleted file mode 100644 index a8c3522dd..000000000 --- a/vendor/honnef.co/go/tools/analysis/facts/directives/directives.go +++ /dev/null @@ -1,20 +0,0 @@ -package directives - -import ( - "reflect" - - "golang.org/x/tools/go/analysis" - "honnef.co/go/tools/analysis/lint" -) - -func directives(pass *analysis.Pass) (interface{}, error) { - return lint.ParseDirectives(pass.Files, pass.Fset), nil -} - -var Analyzer = &analysis.Analyzer{ - Name: "directives", - Doc: "extracts linter directives", - Run: directives, - RunDespiteErrors: true, - ResultType: reflect.TypeOf([]lint.Directive{}), -} diff --git a/vendor/honnef.co/go/tools/analysis/facts/generated/generated.go b/vendor/honnef.co/go/tools/analysis/facts/generated/generated.go deleted file mode 100644 index 240d06669..000000000 --- a/vendor/honnef.co/go/tools/analysis/facts/generated/generated.go +++ /dev/null @@ -1,97 +0,0 @@ -package generated - -import ( - "bufio" - "bytes" - "io" - "os" - "reflect" - "strings" - - "golang.org/x/tools/go/analysis" -) - -type Generator int - -// A list of known generators we can detect -const ( - Unknown Generator = iota - Goyacc - Cgo - Stringer - ProtocGenGo -) - -var ( - // used by cgo before Go 1.11 - oldCgo = []byte("// Created by cgo - DO NOT EDIT") - prefix = []byte("// Code generated ") - suffix = []byte(" DO NOT EDIT.") - nl = []byte("\n") - crnl = []byte("\r\n") -) - -func isGenerated(path string) (Generator, bool) { - f, err := os.Open(path) - if err != nil { - return 0, false - } - defer f.Close() - br := bufio.NewReader(f) - for { - s, err := br.ReadBytes('\n') - if err != nil && err != io.EOF { - return 0, false - } - s = bytes.TrimSuffix(s, crnl) - s = bytes.TrimSuffix(s, nl) - if bytes.HasPrefix(s, prefix) && bytes.HasSuffix(s, suffix) { - if len(s)-len(suffix) < len(prefix) { - return Unknown, true - } - - text := string(s[len(prefix) : len(s)-len(suffix)]) - switch text { - case "by goyacc.": - return Goyacc, true - case "by cmd/cgo;": - return Cgo, true - case "by protoc-gen-go.": - return ProtocGenGo, true - } - if strings.HasPrefix(text, `by "stringer `) { - return Stringer, true - } - if strings.HasPrefix(text, `by goyacc `) { - return Goyacc, true - } - - return Unknown, true - } - if bytes.Equal(s, oldCgo) { - return Cgo, true - } - if err == io.EOF { - break - } - } - return 0, false -} - -var Analyzer = &analysis.Analyzer{ - Name: "isgenerated", - Doc: "annotate file names that have been code generated", - Run: func(pass *analysis.Pass) (interface{}, error) { - m := map[string]Generator{} - for _, f := range pass.Files { - path := pass.Fset.PositionFor(f.Pos(), false).Filename - g, ok := isGenerated(path) - if ok { - m[path] = g - } - } - return m, nil - }, - RunDespiteErrors: true, - ResultType: reflect.TypeOf(map[string]Generator{}), -} diff --git a/vendor/honnef.co/go/tools/analysis/facts/nilness/nilness.go b/vendor/honnef.co/go/tools/analysis/facts/nilness/nilness.go deleted file mode 100644 index d296b5b0f..000000000 --- a/vendor/honnef.co/go/tools/analysis/facts/nilness/nilness.go +++ /dev/null @@ -1,256 +0,0 @@ -package nilness - -import ( - "fmt" - "go/token" - "go/types" - "reflect" - - "honnef.co/go/tools/go/ir" - "honnef.co/go/tools/go/types/typeutil" - "honnef.co/go/tools/internal/passes/buildir" - - "golang.org/x/tools/go/analysis" -) - -// neverReturnsNilFact denotes that a function's return value will never -// be nil (typed or untyped). The analysis errs on the side of false -// negatives. -type neverReturnsNilFact struct { - Rets []neverNilness -} - -func (*neverReturnsNilFact) AFact() {} -func (fact *neverReturnsNilFact) String() string { - return fmt.Sprintf("never returns nil: %v", fact.Rets) -} - -type Result struct { - m map[*types.Func][]neverNilness -} - -var Analysis = &analysis.Analyzer{ - Name: "nilness", - Doc: "Annotates return values that will never be nil (typed or untyped)", - Run: run, - Requires: []*analysis.Analyzer{buildir.Analyzer}, - FactTypes: []analysis.Fact{(*neverReturnsNilFact)(nil)}, - ResultType: reflect.TypeOf((*Result)(nil)), -} - -// MayReturnNil reports whether the ret's return value of fn might be -// a typed or untyped nil value. The value of ret is zero-based. When -// globalOnly is true, the only possible nil values are global -// variables. -// -// The analysis has false positives: MayReturnNil can incorrectly -// report true, but never incorrectly reports false. -func (r *Result) MayReturnNil(fn *types.Func, ret int) (yes bool, globalOnly bool) { - if !typeutil.IsPointerLike(fn.Type().(*types.Signature).Results().At(ret).Type()) { - return false, false - } - if len(r.m[fn]) == 0 { - return true, false - } - - v := r.m[fn][ret] - return v != neverNil, v == onlyGlobal -} - -func run(pass *analysis.Pass) (interface{}, error) { - seen := map[*ir.Function]struct{}{} - out := &Result{ - m: map[*types.Func][]neverNilness{}, - } - for _, fn := range pass.ResultOf[buildir.Analyzer].(*buildir.IR).SrcFuncs { - impl(pass, fn, seen) - } - - for _, fact := range pass.AllObjectFacts() { - out.m[fact.Object.(*types.Func)] = fact.Fact.(*neverReturnsNilFact).Rets - } - - return out, nil -} - -type neverNilness uint8 - -const ( - neverNil neverNilness = 1 - onlyGlobal neverNilness = 2 - nilly neverNilness = 3 -) - -func (n neverNilness) String() string { - switch n { - case neverNil: - return "never" - case onlyGlobal: - return "global" - case nilly: - return "nil" - default: - return "BUG" - } -} - -func impl(pass *analysis.Pass, fn *ir.Function, seenFns map[*ir.Function]struct{}) []neverNilness { - if fn.Object() == nil { - // TODO(dh): support closures - return nil - } - if fact := new(neverReturnsNilFact); pass.ImportObjectFact(fn.Object(), fact) { - return fact.Rets - } - if fn.Pkg != pass.ResultOf[buildir.Analyzer].(*buildir.IR).Pkg { - return nil - } - if fn.Blocks == nil { - return nil - } - if _, ok := seenFns[fn]; ok { - // break recursion - return nil - } - - seenFns[fn] = struct{}{} - - seen := map[ir.Value]struct{}{} - - var mightReturnNil func(v ir.Value) neverNilness - mightReturnNil = func(v ir.Value) neverNilness { - if _, ok := seen[v]; ok { - // break cycle - return nilly - } - if !typeutil.IsPointerLike(v.Type()) { - return neverNil - } - seen[v] = struct{}{} - switch v := v.(type) { - case *ir.MakeInterface: - return mightReturnNil(v.X) - case *ir.Convert: - return mightReturnNil(v.X) - case *ir.SliceToArrayPointer: - if typeutil.CoreType(v.Type()).(*types.Pointer).Elem().Underlying().(*types.Array).Len() == 0 { - return mightReturnNil(v.X) - } else { - // converting a slice to an array pointer of length > 0 panics if the slice is nil - return neverNil - } - case *ir.Slice: - return mightReturnNil(v.X) - case *ir.Phi: - ret := neverNil - for _, e := range v.Edges { - if n := mightReturnNil(e); n > ret { - ret = n - } - } - return ret - case *ir.Extract: - switch d := v.Tuple.(type) { - case *ir.Call: - if callee := d.Call.StaticCallee(); callee != nil { - ret := impl(pass, callee, seenFns) - if len(ret) == 0 { - return nilly - } - return ret[v.Index] - } else { - return nilly - } - case *ir.TypeAssert, *ir.Next, *ir.Select, *ir.MapLookup, *ir.TypeSwitch, *ir.Recv, *ir.Sigma: - // we don't need to look at the Extract's index - // because we've already checked its type. - return nilly - default: - panic(fmt.Sprintf("internal error: unhandled type %T", d)) - } - case *ir.Call: - if callee := v.Call.StaticCallee(); callee != nil { - ret := impl(pass, callee, seenFns) - if len(ret) == 0 { - return nilly - } - return ret[0] - } else { - return nilly - } - case *ir.BinOp, *ir.UnOp, *ir.Alloc, *ir.FieldAddr, *ir.IndexAddr, *ir.Global, *ir.MakeSlice, *ir.MakeClosure, *ir.Function, *ir.MakeMap, *ir.MakeChan: - return neverNil - case *ir.Sigma: - iff, ok := v.From.Control().(*ir.If) - if !ok { - return nilly - } - binop, ok := iff.Cond.(*ir.BinOp) - if !ok { - return nilly - } - isNil := func(v ir.Value) bool { - k, ok := v.(*ir.Const) - if !ok { - return false - } - return k.Value == nil - } - if binop.X == v.X && isNil(binop.Y) || binop.Y == v.X && isNil(binop.X) { - op := binop.Op - if v.From.Succs[0] != v.Block() { - // we're in the false branch, negate op - switch op { - case token.EQL: - op = token.NEQ - case token.NEQ: - op = token.EQL - default: - panic(fmt.Sprintf("internal error: unhandled token %v", op)) - } - } - switch op { - case token.EQL: - return nilly - case token.NEQ: - return neverNil - default: - panic(fmt.Sprintf("internal error: unhandled token %v", op)) - } - } - return nilly - case *ir.ChangeType: - return mightReturnNil(v.X) - case *ir.Load: - if _, ok := v.X.(*ir.Global); ok { - return onlyGlobal - } - return nilly - case *ir.AggregateConst: - return neverNil - case *ir.TypeAssert, *ir.ChangeInterface, *ir.Field, *ir.Const, *ir.GenericConst, *ir.Index, *ir.MapLookup, *ir.Parameter, *ir.Recv, *ir.TypeSwitch: - return nilly - case *ir.CompositeValue: - // We can get here via composite literals of type parameters, for which typeutil.IsPointerLike doesn't - // currently return false (see https://staticcheck.io/issues/1364). However, we only emit ir.CompositeValue - // for value types, so we know it can't be nil. - return neverNil - default: - panic(fmt.Sprintf("internal error: unhandled type %T", v)) - } - } - ret := fn.Exit.Control().(*ir.Return) - out := make([]neverNilness, len(ret.Results)) - export := false - for i, v := range ret.Results { - v := mightReturnNil(v) - out[i] = v - if v != nilly && typeutil.IsPointerLike(fn.Signature.Results().At(i).Type()) { - export = true - } - } - if export { - pass.ExportObjectFact(fn.Object(), &neverReturnsNilFact{out}) - } - return out -} diff --git a/vendor/honnef.co/go/tools/analysis/facts/purity/purity.go b/vendor/honnef.co/go/tools/analysis/facts/purity/purity.go deleted file mode 100644 index 0f6895a8c..000000000 --- a/vendor/honnef.co/go/tools/analysis/facts/purity/purity.go +++ /dev/null @@ -1,264 +0,0 @@ -package purity - -// TODO(dh): we should split this into two facts, one tracking actual purity, and one tracking side-effects. A function -// that returns a heap allocation isn't pure, but it may be free of side effects. - -import ( - "go/types" - "reflect" - - "honnef.co/go/tools/go/ir" - "honnef.co/go/tools/go/ir/irutil" - "honnef.co/go/tools/internal/passes/buildir" - - "golang.org/x/tools/go/analysis" -) - -type IsPure struct{} - -func (*IsPure) AFact() {} -func (d *IsPure) String() string { return "is pure" } - -type Result map[*types.Func]*IsPure - -var Analyzer = &analysis.Analyzer{ - Name: "fact_purity", - Doc: "Mark pure functions", - Run: purity, - Requires: []*analysis.Analyzer{buildir.Analyzer}, - FactTypes: []analysis.Fact{(*IsPure)(nil)}, - ResultType: reflect.TypeOf(Result{}), -} - -var pureStdlib = map[string]struct{}{ - "errors.New": {}, - "fmt.Errorf": {}, - "fmt.Sprintf": {}, - "fmt.Sprint": {}, - "sort.Reverse": {}, - "strings.Map": {}, - "strings.Repeat": {}, - "strings.Replace": {}, - "strings.Title": {}, - "strings.ToLower": {}, - "strings.ToLowerSpecial": {}, - "strings.ToTitle": {}, - "strings.ToTitleSpecial": {}, - "strings.ToUpper": {}, - "strings.ToUpperSpecial": {}, - "strings.Trim": {}, - "strings.TrimFunc": {}, - "strings.TrimLeft": {}, - "strings.TrimLeftFunc": {}, - "strings.TrimPrefix": {}, - "strings.TrimRight": {}, - "strings.TrimRightFunc": {}, - "strings.TrimSpace": {}, - "strings.TrimSuffix": {}, - "(*net/http.Request).WithContext": {}, - "time.Now": {}, - "time.Parse": {}, - "time.ParseInLocation": {}, - "time.Unix": {}, - "time.UnixMicro": {}, - "time.UnixMilli": {}, - "(time.Time).Add": {}, - "(time.Time).AddDate": {}, - "(time.Time).After": {}, - "(time.Time).Before": {}, - "(time.Time).Clock": {}, - "(time.Time).Compare": {}, - "(time.Time).Date": {}, - "(time.Time).Day": {}, - "(time.Time).Equal": {}, - "(time.Time).Format": {}, - "(time.Time).GoString": {}, - "(time.Time).GobEncode": {}, - "(time.Time).Hour": {}, - "(time.Time).ISOWeek": {}, - "(time.Time).In": {}, - "(time.Time).IsDST": {}, - "(time.Time).IsZero": {}, - "(time.Time).Local": {}, - "(time.Time).Location": {}, - "(time.Time).MarshalBinary": {}, - "(time.Time).MarshalJSON": {}, - "(time.Time).MarshalText": {}, - "(time.Time).Minute": {}, - "(time.Time).Month": {}, - "(time.Time).Nanosecond": {}, - "(time.Time).Round": {}, - "(time.Time).Second": {}, - "(time.Time).String": {}, - "(time.Time).Sub": {}, - "(time.Time).Truncate": {}, - "(time.Time).UTC": {}, - "(time.Time).Unix": {}, - "(time.Time).UnixMicro": {}, - "(time.Time).UnixMilli": {}, - "(time.Time).UnixNano": {}, - "(time.Time).Weekday": {}, - "(time.Time).Year": {}, - "(time.Time).YearDay": {}, - "(time.Time).Zone": {}, - "(time.Time).ZoneBounds": {}, -} - -func purity(pass *analysis.Pass) (interface{}, error) { - seen := map[*ir.Function]struct{}{} - irpkg := pass.ResultOf[buildir.Analyzer].(*buildir.IR).Pkg - var check func(fn *ir.Function) (ret bool) - check = func(fn *ir.Function) (ret bool) { - if fn.Object() == nil { - // TODO(dh): support closures - return false - } - if pass.ImportObjectFact(fn.Object(), new(IsPure)) { - return true - } - if fn.Pkg != irpkg { - // Function is in another package but wasn't marked as - // pure, ergo it isn't pure - return false - } - // Break recursion - if _, ok := seen[fn]; ok { - return false - } - - seen[fn] = struct{}{} - defer func() { - if ret { - pass.ExportObjectFact(fn.Object(), &IsPure{}) - } - }() - - if irutil.IsStub(fn) { - return false - } - - if _, ok := pureStdlib[fn.Object().(*types.Func).FullName()]; ok { - return true - } - - if fn.Signature.Results().Len() == 0 { - // A function with no return values is empty or is doing some - // work we cannot see (for example because of build tags); - // don't consider it pure. - return false - } - - var isBasic func(typ types.Type) bool - isBasic = func(typ types.Type) bool { - switch u := typ.Underlying().(type) { - case *types.Basic: - return true - case *types.Struct: - for i := 0; i < u.NumFields(); i++ { - if !isBasic(u.Field(i).Type()) { - return false - } - } - return true - default: - return false - } - } - - for _, param := range fn.Params { - // TODO(dh): this may not be strictly correct. pure code can, to an extent, operate on non-basic types. - if !isBasic(param.Type()) { - return false - } - } - - // Don't consider external functions pure. - if fn.Blocks == nil { - return false - } - checkCall := func(common *ir.CallCommon) bool { - if common.IsInvoke() { - return false - } - builtin, ok := common.Value.(*ir.Builtin) - if !ok { - if common.StaticCallee() != fn { - if common.StaticCallee() == nil { - return false - } - if !check(common.StaticCallee()) { - return false - } - } - } else { - switch builtin.Name() { - case "len", "cap": - default: - return false - } - } - return true - } - - var isStackAddr func(ir.Value) bool - isStackAddr = func(v ir.Value) bool { - switch v := v.(type) { - case *ir.Alloc: - return !v.Heap - case *ir.FieldAddr: - return isStackAddr(v.X) - default: - return false - } - } - for _, b := range fn.Blocks { - for _, ins := range b.Instrs { - switch ins := ins.(type) { - case *ir.Call: - if !checkCall(ins.Common()) { - return false - } - case *ir.Defer: - if !checkCall(&ins.Call) { - return false - } - case *ir.Select: - return false - case *ir.Send: - return false - case *ir.Go: - return false - case *ir.Panic: - return false - case *ir.Store: - if !isStackAddr(ins.Addr) { - return false - } - case *ir.FieldAddr: - if !isStackAddr(ins.X) { - return false - } - case *ir.Alloc: - // TODO(dh): make use of proper escape analysis - if ins.Heap { - return false - } - case *ir.Load: - if !isStackAddr(ins.X) { - return false - } - } - } - } - return true - } - for _, fn := range pass.ResultOf[buildir.Analyzer].(*buildir.IR).SrcFuncs { - check(fn) - } - - out := Result{} - for _, fact := range pass.AllObjectFacts() { - out[fact.Object.(*types.Func)] = fact.Fact.(*IsPure) - } - return out, nil -} diff --git a/vendor/honnef.co/go/tools/analysis/facts/tokenfile/token.go b/vendor/honnef.co/go/tools/analysis/facts/tokenfile/token.go deleted file mode 100644 index e7f747f00..000000000 --- a/vendor/honnef.co/go/tools/analysis/facts/tokenfile/token.go +++ /dev/null @@ -1,24 +0,0 @@ -package tokenfile - -import ( - "go/ast" - "go/token" - "reflect" - - "golang.org/x/tools/go/analysis" -) - -var Analyzer = &analysis.Analyzer{ - Name: "tokenfileanalyzer", - Doc: "creates a mapping of *token.File to *ast.File", - Run: func(pass *analysis.Pass) (interface{}, error) { - m := map[*token.File]*ast.File{} - for _, af := range pass.Files { - tf := pass.Fset.File(af.Pos()) - m[tf] = af - } - return m, nil - }, - RunDespiteErrors: true, - ResultType: reflect.TypeOf(map[*token.File]*ast.File{}), -} diff --git a/vendor/honnef.co/go/tools/analysis/facts/typedness/typedness.go b/vendor/honnef.co/go/tools/analysis/facts/typedness/typedness.go deleted file mode 100644 index 3bbe20603..000000000 --- a/vendor/honnef.co/go/tools/analysis/facts/typedness/typedness.go +++ /dev/null @@ -1,253 +0,0 @@ -package typedness - -import ( - "fmt" - "go/token" - "go/types" - "reflect" - - "honnef.co/go/tools/go/ir" - "honnef.co/go/tools/go/ir/irutil" - "honnef.co/go/tools/internal/passes/buildir" - - "golang.org/x/exp/typeparams" - "golang.org/x/tools/go/analysis" -) - -// alwaysTypedFact denotes that a function's return value will never -// be untyped nil. The analysis errs on the side of false negatives. -type alwaysTypedFact struct { - Rets uint8 -} - -func (*alwaysTypedFact) AFact() {} -func (fact *alwaysTypedFact) String() string { - return fmt.Sprintf("always typed: %08b", fact.Rets) -} - -type Result struct { - m map[*types.Func]uint8 -} - -var Analysis = &analysis.Analyzer{ - Name: "typedness", - Doc: "Annotates return values that are always typed values", - Run: run, - Requires: []*analysis.Analyzer{buildir.Analyzer}, - FactTypes: []analysis.Fact{(*alwaysTypedFact)(nil)}, - ResultType: reflect.TypeOf((*Result)(nil)), -} - -// MustReturnTyped reports whether the ret's return value of fn must -// be a typed value, i.e. an interface value containing a concrete -// type or trivially a concrete type. The value of ret is zero-based. -// -// The analysis has false negatives: MustReturnTyped may incorrectly -// report false, but never incorrectly reports true. -func (r *Result) MustReturnTyped(fn *types.Func, ret int) bool { - if _, ok := fn.Type().(*types.Signature).Results().At(ret).Type().Underlying().(*types.Interface); !ok { - return true - } - return (r.m[fn] & (1 << ret)) != 0 -} - -func run(pass *analysis.Pass) (interface{}, error) { - seen := map[*ir.Function]struct{}{} - out := &Result{ - m: map[*types.Func]uint8{}, - } - for _, fn := range pass.ResultOf[buildir.Analyzer].(*buildir.IR).SrcFuncs { - impl(pass, fn, seen) - } - - for _, fact := range pass.AllObjectFacts() { - out.m[fact.Object.(*types.Func)] = fact.Fact.(*alwaysTypedFact).Rets - } - - return out, nil -} - -func impl(pass *analysis.Pass, fn *ir.Function, seenFns map[*ir.Function]struct{}) (out uint8) { - if fn.Signature.Results().Len() > 8 { - return 0 - } - if fn.Object() == nil { - // TODO(dh): support closures - return 0 - } - if fact := new(alwaysTypedFact); pass.ImportObjectFact(fn.Object(), fact) { - return fact.Rets - } - if fn.Pkg != pass.ResultOf[buildir.Analyzer].(*buildir.IR).Pkg { - return 0 - } - if fn.Blocks == nil { - return 0 - } - if irutil.IsStub(fn) { - return 0 - } - if _, ok := seenFns[fn]; ok { - // break recursion - return 0 - } - - seenFns[fn] = struct{}{} - defer func() { - for i := 0; i < fn.Signature.Results().Len(); i++ { - if _, ok := fn.Signature.Results().At(i).Type().Underlying().(*types.Interface); !ok { - // we don't need facts to know that non-interface - // types can't be untyped nil. zeroing out those bits - // may result in all bits being zero, in which case we - // don't have to save any fact. - out &= ^(1 << i) - } - } - if out > 0 { - pass.ExportObjectFact(fn.Object(), &alwaysTypedFact{out}) - } - }() - - isUntypedNil := func(v ir.Value) bool { - k, ok := v.(*ir.Const) - if !ok { - return false - } - if _, ok := k.Type().Underlying().(*types.Interface); !ok { - return false - } - return k.Value == nil - } - - var do func(v ir.Value, seen map[ir.Value]struct{}) bool - do = func(v ir.Value, seen map[ir.Value]struct{}) bool { - if _, ok := seen[v]; ok { - // break cycle - return false - } - seen[v] = struct{}{} - switch v := v.(type) { - case *ir.Const: - // can't be a typed nil, because then we'd be returning the - // result of MakeInterface. - return false - case *ir.ChangeInterface: - return do(v.X, seen) - case *ir.Extract: - call, ok := v.Tuple.(*ir.Call) - if !ok { - // We only care about extracts of function results. For - // everything else (e.g. channel receives and map - // lookups), we can either not deduce any information, or - // will see a MakeInterface. - return false - } - if callee := call.Call.StaticCallee(); callee != nil { - return impl(pass, callee, seenFns)&(1<interface conversions, which - // don't tell us anything about the nilness. - return false - case *ir.MapLookup, *ir.Index, *ir.Recv, *ir.Parameter, *ir.Load, *ir.Field: - // All other instructions that tell us nothing about the - // typedness of interface values. - return false - default: - panic(fmt.Sprintf("internal error: unhandled type %T", v)) - } - } - - ret := fn.Exit.Control().(*ir.Return) - for i, v := range ret.Results { - typ := fn.Signature.Results().At(i).Type() - if _, ok := typ.Underlying().(*types.Interface); ok && !typeparams.IsTypeParam(typ) { - if do(v, map[ir.Value]struct{}{}) { - out |= 1 << i - } - } - } - return out -} diff --git a/vendor/honnef.co/go/tools/analysis/lint/lint.go b/vendor/honnef.co/go/tools/analysis/lint/lint.go deleted file mode 100644 index 7d116a23d..000000000 --- a/vendor/honnef.co/go/tools/analysis/lint/lint.go +++ /dev/null @@ -1,291 +0,0 @@ -// Package lint provides abstractions on top of go/analysis. -// These abstractions add extra information to analyzes, such as structured documentation and severities. -package lint - -import ( - "flag" - "fmt" - "go/ast" - "go/build" - "go/token" - "regexp" - "strconv" - "strings" - - "golang.org/x/tools/go/analysis" -) - -// Analyzer wraps a go/analysis.Analyzer and provides structured documentation. -type Analyzer struct { - // The analyzer's documentation. Unlike go/analysis.Analyzer.Doc, - // this field is structured, providing access to severity, options - // etc. - Doc *Documentation - Analyzer *analysis.Analyzer -} - -func (a *Analyzer) initialize() { - a.Analyzer.Doc = a.Doc.String() - if a.Analyzer.Flags.Usage == nil { - fs := flag.NewFlagSet("", flag.PanicOnError) - fs.Var(newVersionFlag(), "go", "Target Go version") - a.Analyzer.Flags = *fs - } -} - -// InitializeAnalyzers takes a map of documentation and a map of go/analysis.Analyzers and returns a slice of Analyzers. -// The map keys are the analyzer names. -func InitializeAnalyzers(docs map[string]*Documentation, analyzers map[string]*analysis.Analyzer) []*Analyzer { - out := make([]*Analyzer, 0, len(analyzers)) - for k, v := range analyzers { - v.Name = k - a := &Analyzer{ - Doc: docs[k], - Analyzer: v, - } - a.initialize() - out = append(out, a) - } - return out -} - -// Severity describes the severity of diagnostics reported by an analyzer. -type Severity int - -const ( - SeverityNone Severity = iota - SeverityError - SeverityDeprecated - SeverityWarning - SeverityInfo - SeverityHint -) - -// MergeStrategy sets how merge mode should behave for diagnostics of an analyzer. -type MergeStrategy int - -const ( - MergeIfAny MergeStrategy = iota - MergeIfAll -) - -type RawDocumentation struct { - Title string - Text string - Before string - After string - Since string - NonDefault bool - Options []string - Severity Severity - MergeIf MergeStrategy -} - -type Documentation struct { - Title string - Text string - - TitleMarkdown string - TextMarkdown string - - Before string - After string - Since string - NonDefault bool - Options []string - Severity Severity - MergeIf MergeStrategy -} - -func Markdownify(m map[string]*RawDocumentation) map[string]*Documentation { - out := make(map[string]*Documentation, len(m)) - for k, v := range m { - out[k] = &Documentation{ - Title: strings.TrimSpace(stripMarkdown(v.Title)), - Text: strings.TrimSpace(stripMarkdown(v.Text)), - - TitleMarkdown: strings.TrimSpace(toMarkdown(v.Title)), - TextMarkdown: strings.TrimSpace(toMarkdown(v.Text)), - - Before: strings.TrimSpace(v.Before), - After: strings.TrimSpace(v.After), - Since: v.Since, - NonDefault: v.NonDefault, - Options: v.Options, - Severity: v.Severity, - MergeIf: v.MergeIf, - } - } - return out -} - -func toMarkdown(s string) string { - return strings.NewReplacer(`\'`, "`", `\"`, "`").Replace(s) -} - -func stripMarkdown(s string) string { - return strings.NewReplacer(`\'`, "", `\"`, "'").Replace(s) -} - -func (doc *Documentation) Format(metadata bool) string { - return doc.format(false, metadata) -} - -func (doc *Documentation) FormatMarkdown(metadata bool) string { - return doc.format(true, metadata) -} - -func (doc *Documentation) format(markdown bool, metadata bool) string { - b := &strings.Builder{} - if markdown { - fmt.Fprintf(b, "%s\n\n", doc.TitleMarkdown) - if doc.Text != "" { - fmt.Fprintf(b, "%s\n\n", doc.TextMarkdown) - } - } else { - fmt.Fprintf(b, "%s\n\n", doc.Title) - if doc.Text != "" { - fmt.Fprintf(b, "%s\n\n", doc.Text) - } - } - - if doc.Before != "" { - fmt.Fprintln(b, "Before:") - fmt.Fprintln(b, "") - for _, line := range strings.Split(doc.Before, "\n") { - fmt.Fprint(b, " ", line, "\n") - } - fmt.Fprintln(b, "") - fmt.Fprintln(b, "After:") - fmt.Fprintln(b, "") - for _, line := range strings.Split(doc.After, "\n") { - fmt.Fprint(b, " ", line, "\n") - } - fmt.Fprintln(b, "") - } - - if metadata { - fmt.Fprint(b, "Available since\n ") - if doc.Since == "" { - fmt.Fprint(b, "unreleased") - } else { - fmt.Fprintf(b, "%s", doc.Since) - } - if doc.NonDefault { - fmt.Fprint(b, ", non-default") - } - fmt.Fprint(b, "\n") - if len(doc.Options) > 0 { - fmt.Fprintf(b, "\nOptions\n") - for _, opt := range doc.Options { - fmt.Fprintf(b, " %s", opt) - } - fmt.Fprint(b, "\n") - } - } - - return b.String() -} - -func (doc *Documentation) String() string { - return doc.Format(true) -} - -func newVersionFlag() flag.Getter { - tags := build.Default.ReleaseTags - v := tags[len(tags)-1][2:] - version := new(VersionFlag) - if err := version.Set(v); err != nil { - panic(fmt.Sprintf("internal error: %s", err)) - } - return version -} - -type VersionFlag int - -func (v *VersionFlag) String() string { - return fmt.Sprintf("1.%d", *v) -} - -var goVersionRE = regexp.MustCompile(`^(?:go)?1.(\d+).*$`) - -// ParseGoVersion parses Go versions of the form 1.M, 1.M.N, or 1.M.NrcR, with an optional "go" prefix. It assumes that -// versions have already been verified and are valid. -func ParseGoVersion(s string) (int, bool) { - m := goVersionRE.FindStringSubmatch(s) - if m == nil { - return 0, false - } - n, err := strconv.Atoi(m[1]) - if err != nil { - return 0, false - } - return n, true -} - -func (v *VersionFlag) Set(s string) error { - n, ok := ParseGoVersion(s) - if !ok { - return fmt.Errorf("invalid Go version: %q", s) - } - *v = VersionFlag(n) - return nil -} - -func (v *VersionFlag) Get() interface{} { - return int(*v) -} - -// ExhaustiveTypeSwitch panics when called. It can be used to ensure -// that type switches are exhaustive. -func ExhaustiveTypeSwitch(v interface{}) { - panic(fmt.Sprintf("internal error: unhandled case %T", v)) -} - -// A directive is a comment of the form '//lint: -// [arguments...]'. It represents instructions to the static analysis -// tool. -type Directive struct { - Command string - Arguments []string - Directive *ast.Comment - Node ast.Node -} - -func parseDirective(s string) (cmd string, args []string) { - if !strings.HasPrefix(s, "//lint:") { - return "", nil - } - s = strings.TrimPrefix(s, "//lint:") - fields := strings.Split(s, " ") - return fields[0], fields[1:] -} - -// ParseDirectives extracts all directives from a list of Go files. -func ParseDirectives(files []*ast.File, fset *token.FileSet) []Directive { - var dirs []Directive - for _, f := range files { - // OPT(dh): in our old code, we skip all the comment map work if we - // couldn't find any directives, benchmark if that's actually - // worth doing - cm := ast.NewCommentMap(fset, f, f.Comments) - for node, cgs := range cm { - for _, cg := range cgs { - for _, c := range cg.List { - if !strings.HasPrefix(c.Text, "//lint:") { - continue - } - cmd, args := parseDirective(c.Text) - d := Directive{ - Command: cmd, - Arguments: args, - Directive: c, - Node: node, - } - dirs = append(dirs, d) - } - } - } - } - return dirs -} diff --git a/vendor/honnef.co/go/tools/analysis/report/report.go b/vendor/honnef.co/go/tools/analysis/report/report.go deleted file mode 100644 index d52945e72..000000000 --- a/vendor/honnef.co/go/tools/analysis/report/report.go +++ /dev/null @@ -1,247 +0,0 @@ -package report - -import ( - "bytes" - "fmt" - "go/ast" - "go/format" - "go/token" - "path/filepath" - "strconv" - "strings" - - "honnef.co/go/tools/analysis/facts/generated" - "honnef.co/go/tools/go/ast/astutil" - - "golang.org/x/tools/go/analysis" -) - -type Options struct { - ShortRange bool - FilterGenerated bool - Fixes []analysis.SuggestedFix - Related []analysis.RelatedInformation -} - -type Option func(*Options) - -func ShortRange() Option { - return func(opts *Options) { - opts.ShortRange = true - } -} - -func FilterGenerated() Option { - return func(opts *Options) { - opts.FilterGenerated = true - } -} - -func Fixes(fixes ...analysis.SuggestedFix) Option { - return func(opts *Options) { - opts.Fixes = append(opts.Fixes, fixes...) - } -} - -func Related(node Positioner, message string) Option { - return func(opts *Options) { - pos, end, ok := getRange(node, opts.ShortRange) - if !ok { - return - } - r := analysis.RelatedInformation{ - Pos: pos, - End: end, - Message: message, - } - opts.Related = append(opts.Related, r) - } -} - -type Positioner interface { - Pos() token.Pos -} - -type fullPositioner interface { - Pos() token.Pos - End() token.Pos -} - -type sourcer interface { - Source() ast.Node -} - -// shortRange returns the position and end of the main component of an -// AST node. For nodes that have no body, the short range is identical -// to the node's Pos and End. For nodes that do have a body, the short -// range excludes the body. -func shortRange(node ast.Node) (pos, end token.Pos) { - switch node := node.(type) { - case *ast.File: - return node.Pos(), node.Name.End() - case *ast.CaseClause: - return node.Pos(), node.Colon + 1 - case *ast.CommClause: - return node.Pos(), node.Colon + 1 - case *ast.DeferStmt: - return node.Pos(), node.Defer + token.Pos(len("defer")) - case *ast.ExprStmt: - return shortRange(node.X) - case *ast.ForStmt: - if node.Post != nil { - return node.For, node.Post.End() - } else if node.Cond != nil { - return node.For, node.Cond.End() - } else if node.Init != nil { - // +1 to catch the semicolon, for gofmt'ed code - return node.Pos(), node.Init.End() + 1 - } else { - return node.Pos(), node.For + token.Pos(len("for")) - } - case *ast.FuncDecl: - return node.Pos(), node.Type.End() - case *ast.FuncLit: - return node.Pos(), node.Type.End() - case *ast.GoStmt: - if _, ok := astutil.Unparen(node.Call.Fun).(*ast.FuncLit); ok { - return node.Pos(), node.Go + token.Pos(len("go")) - } else { - return node.Pos(), node.End() - } - case *ast.IfStmt: - return node.Pos(), node.Cond.End() - case *ast.RangeStmt: - return node.Pos(), node.X.End() - case *ast.SelectStmt: - return node.Pos(), node.Pos() + token.Pos(len("select")) - case *ast.SwitchStmt: - if node.Tag != nil { - return node.Pos(), node.Tag.End() - } else if node.Init != nil { - // +1 to catch the semicolon, for gofmt'ed code - return node.Pos(), node.Init.End() + 1 - } else { - return node.Pos(), node.Pos() + token.Pos(len("switch")) - } - case *ast.TypeSwitchStmt: - return node.Pos(), node.Assign.End() - default: - return node.Pos(), node.End() - } -} - -func HasRange(node Positioner) bool { - // we don't know if getRange will be called with shortRange set to - // true, so make sure that both work. - _, _, ok := getRange(node, false) - if !ok { - return false - } - _, _, ok = getRange(node, true) - return ok -} - -func getRange(node Positioner, short bool) (pos, end token.Pos, ok bool) { - switch n := node.(type) { - case sourcer: - s := n.Source() - if s == nil { - return 0, 0, false - } - if short { - p, e := shortRange(s) - return p, e, true - } - return s.Pos(), s.End(), true - case fullPositioner: - if short { - p, e := shortRange(n) - return p, e, true - } - return n.Pos(), n.End(), true - default: - return n.Pos(), token.NoPos, true - } -} - -func Report(pass *analysis.Pass, node Positioner, message string, opts ...Option) { - cfg := &Options{} - for _, opt := range opts { - opt(cfg) - } - - file := DisplayPosition(pass.Fset, node.Pos()).Filename - if cfg.FilterGenerated { - m := pass.ResultOf[generated.Analyzer].(map[string]generated.Generator) - if _, ok := m[file]; ok { - return - } - } - - pos, end, ok := getRange(node, cfg.ShortRange) - if !ok { - panic(fmt.Sprintf("no valid position for reporting node %v", node)) - } - d := analysis.Diagnostic{ - Pos: pos, - End: end, - Message: message, - SuggestedFixes: cfg.Fixes, - Related: cfg.Related, - } - pass.Report(d) -} - -func Render(pass *analysis.Pass, x interface{}) string { - var buf bytes.Buffer - if err := format.Node(&buf, pass.Fset, x); err != nil { - panic(err) - } - return buf.String() -} - -func RenderArgs(pass *analysis.Pass, args []ast.Expr) string { - var ss []string - for _, arg := range args { - ss = append(ss, Render(pass, arg)) - } - return strings.Join(ss, ", ") -} - -func DisplayPosition(fset *token.FileSet, p token.Pos) token.Position { - if p == token.NoPos { - return token.Position{} - } - - // Only use the adjusted position if it points to another Go file. - // This means we'll point to the original file for cgo files, but - // we won't point to a YACC grammar file. - pos := fset.PositionFor(p, false) - adjPos := fset.PositionFor(p, true) - - if filepath.Ext(adjPos.Filename) == ".go" { - return adjPos - } - - return pos -} - -func Ordinal(n int) string { - suffix := "th" - if n < 10 || n > 20 { - switch n % 10 { - case 0: - suffix = "th" - case 1: - suffix = "st" - case 2: - suffix = "nd" - case 3: - suffix = "rd" - default: - suffix = "th" - } - } - - return strconv.Itoa(n) + suffix -} diff --git a/vendor/honnef.co/go/tools/config/config.go b/vendor/honnef.co/go/tools/config/config.go deleted file mode 100644 index a815a8a84..000000000 --- a/vendor/honnef.co/go/tools/config/config.go +++ /dev/null @@ -1,266 +0,0 @@ -package config - -import ( - "bytes" - "fmt" - "go/ast" - "go/token" - "os" - "path/filepath" - "reflect" - "strings" - - "github.com/BurntSushi/toml" - "golang.org/x/tools/go/analysis" -) - -// Dir looks at a list of absolute file names, which should make up a -// single package, and returns the path of the directory that may -// contain a staticcheck.conf file. It returns the empty string if no -// such directory could be determined, for example because all files -// were located in Go's build cache. -func Dir(files []string) string { - if len(files) == 0 { - return "" - } - cache, err := os.UserCacheDir() - if err != nil { - cache = "" - } - var path string - for _, p := range files { - // FIXME(dh): using strings.HasPrefix isn't technically - // correct, but it should be good enough for now. - if cache != "" && strings.HasPrefix(p, cache) { - // File in the build cache of the standard Go build system - continue - } - path = p - break - } - - if path == "" { - // The package only consists of generated files. - return "" - } - - dir := filepath.Dir(path) - return dir -} - -func dirAST(files []*ast.File, fset *token.FileSet) string { - names := make([]string, len(files)) - for i, f := range files { - names[i] = fset.PositionFor(f.Pos(), true).Filename - } - return Dir(names) -} - -var Analyzer = &analysis.Analyzer{ - Name: "config", - Doc: "loads configuration for the current package tree", - Run: func(pass *analysis.Pass) (interface{}, error) { - dir := dirAST(pass.Files, pass.Fset) - if dir == "" { - cfg := DefaultConfig - return &cfg, nil - } - cfg, err := Load(dir) - if err != nil { - return nil, fmt.Errorf("error loading staticcheck.conf: %s", err) - } - return &cfg, nil - }, - RunDespiteErrors: true, - ResultType: reflect.TypeOf((*Config)(nil)), -} - -func For(pass *analysis.Pass) *Config { - return pass.ResultOf[Analyzer].(*Config) -} - -func mergeLists(a, b []string) []string { - out := make([]string, 0, len(a)+len(b)) - for _, el := range b { - if el == "inherit" { - out = append(out, a...) - } else { - out = append(out, el) - } - } - - return out -} - -func normalizeList(list []string) []string { - if len(list) > 1 { - nlist := make([]string, 0, len(list)) - nlist = append(nlist, list[0]) - for i, el := range list[1:] { - if el != list[i] { - nlist = append(nlist, el) - } - } - list = nlist - } - - for _, el := range list { - if el == "inherit" { - // This should never happen, because the default config - // should not use "inherit" - panic(`unresolved "inherit"`) - } - } - - return list -} - -func (cfg Config) Merge(ocfg Config) Config { - if ocfg.Checks != nil { - cfg.Checks = mergeLists(cfg.Checks, ocfg.Checks) - } - if ocfg.Initialisms != nil { - cfg.Initialisms = mergeLists(cfg.Initialisms, ocfg.Initialisms) - } - if ocfg.DotImportWhitelist != nil { - cfg.DotImportWhitelist = mergeLists(cfg.DotImportWhitelist, ocfg.DotImportWhitelist) - } - if ocfg.HTTPStatusCodeWhitelist != nil { - cfg.HTTPStatusCodeWhitelist = mergeLists(cfg.HTTPStatusCodeWhitelist, ocfg.HTTPStatusCodeWhitelist) - } - return cfg -} - -type Config struct { - // TODO(dh): this implementation makes it impossible for external - // clients to add their own checkers with configuration. At the - // moment, we don't really care about that; we don't encourage - // that people use this package. In the future, we may. The - // obvious solution would be using map[string]interface{}, but - // that's obviously subpar. - - Checks []string `toml:"checks"` - Initialisms []string `toml:"initialisms"` - DotImportWhitelist []string `toml:"dot_import_whitelist"` - HTTPStatusCodeWhitelist []string `toml:"http_status_code_whitelist"` -} - -func (c Config) String() string { - buf := &bytes.Buffer{} - - fmt.Fprintf(buf, "Checks: %#v\n", c.Checks) - fmt.Fprintf(buf, "Initialisms: %#v\n", c.Initialisms) - fmt.Fprintf(buf, "DotImportWhitelist: %#v\n", c.DotImportWhitelist) - fmt.Fprintf(buf, "HTTPStatusCodeWhitelist: %#v", c.HTTPStatusCodeWhitelist) - - return buf.String() -} - -// DefaultConfig is the default configuration. -// Its initial value describes the majority of the default configuration, -// but the Checks field can be updated at runtime based on the analyzers being used, to disable non-default checks. -// For cmd/staticcheck, this is handled by (*lintcmd.Command).Run. -// -// Note that DefaultConfig shouldn't be modified while analyzers are executing. -var DefaultConfig = Config{ - Checks: []string{"all"}, - Initialisms: []string{ - "ACL", "API", "ASCII", "CPU", "CSS", "DNS", - "EOF", "GUID", "HTML", "HTTP", "HTTPS", "ID", - "IP", "JSON", "QPS", "RAM", "RPC", "SLA", - "SMTP", "SQL", "SSH", "TCP", "TLS", "TTL", - "UDP", "UI", "GID", "UID", "UUID", "URI", - "URL", "UTF8", "VM", "XML", "XMPP", "XSRF", - "XSS", "SIP", "RTP", "AMQP", "DB", "TS", - }, - DotImportWhitelist: []string{ - "github.com/mmcloughlin/avo/build", - "github.com/mmcloughlin/avo/operand", - "github.com/mmcloughlin/avo/reg", - }, - HTTPStatusCodeWhitelist: []string{"200", "400", "404", "500"}, -} - -const ConfigName = "staticcheck.conf" - -type ParseError struct { - Filename string - toml.ParseError -} - -func parseConfigs(dir string) ([]Config, error) { - var out []Config - - // TODO(dh): consider stopping at the GOPATH/module boundary - for dir != "" { - f, err := os.Open(filepath.Join(dir, ConfigName)) - if os.IsNotExist(err) { - ndir := filepath.Dir(dir) - if ndir == dir { - break - } - dir = ndir - continue - } - if err != nil { - return nil, err - } - var cfg Config - _, err = toml.NewDecoder(f).Decode(&cfg) - f.Close() - if err != nil { - if err, ok := err.(toml.ParseError); ok { - return nil, ParseError{ - Filename: filepath.Join(dir, ConfigName), - ParseError: err, - } - } - return nil, err - } - out = append(out, cfg) - ndir := filepath.Dir(dir) - if ndir == dir { - break - } - dir = ndir - } - out = append(out, DefaultConfig) - if len(out) < 2 { - return out, nil - } - for i := 0; i < len(out)/2; i++ { - out[i], out[len(out)-1-i] = out[len(out)-1-i], out[i] - } - return out, nil -} - -func mergeConfigs(confs []Config) Config { - if len(confs) == 0 { - // This shouldn't happen because we always have at least a - // default config. - panic("trying to merge zero configs") - } - if len(confs) == 1 { - return confs[0] - } - conf := confs[0] - for _, oconf := range confs[1:] { - conf = conf.Merge(oconf) - } - return conf -} - -func Load(dir string) (Config, error) { - confs, err := parseConfigs(dir) - if err != nil { - return Config{}, err - } - conf := mergeConfigs(confs) - - conf.Checks = normalizeList(conf.Checks) - conf.Initialisms = normalizeList(conf.Initialisms) - conf.DotImportWhitelist = normalizeList(conf.DotImportWhitelist) - conf.HTTPStatusCodeWhitelist = normalizeList(conf.HTTPStatusCodeWhitelist) - - return conf, nil -} diff --git a/vendor/honnef.co/go/tools/config/example.conf b/vendor/honnef.co/go/tools/config/example.conf deleted file mode 100644 index b25c3a815..000000000 --- a/vendor/honnef.co/go/tools/config/example.conf +++ /dev/null @@ -1,14 +0,0 @@ -checks = ["all", "-ST1000", "-ST1003", "-ST1016", "-ST1020", "-ST1021", "-ST1022", "-ST1023"] -initialisms = ["ACL", "API", "ASCII", "CPU", "CSS", "DNS", - "EOF", "GUID", "HTML", "HTTP", "HTTPS", "ID", - "IP", "JSON", "QPS", "RAM", "RPC", "SLA", - "SMTP", "SQL", "SSH", "TCP", "TLS", "TTL", - "UDP", "UI", "GID", "UID", "UUID", "URI", - "URL", "UTF8", "VM", "XML", "XMPP", "XSRF", - "XSS", "SIP", "RTP", "AMQP", "DB", "TS"] -dot_import_whitelist = [ - "github.com/mmcloughlin/avo/build", - "github.com/mmcloughlin/avo/operand", - "github.com/mmcloughlin/avo/reg", -] -http_status_code_whitelist = ["200", "400", "404", "500"] diff --git a/vendor/honnef.co/go/tools/go/ast/astutil/upstream.go b/vendor/honnef.co/go/tools/go/ast/astutil/upstream.go deleted file mode 100644 index fc647c4d3..000000000 --- a/vendor/honnef.co/go/tools/go/ast/astutil/upstream.go +++ /dev/null @@ -1,20 +0,0 @@ -package astutil - -import ( - "go/ast" - "go/token" - _ "unsafe" - - "golang.org/x/tools/go/ast/astutil" -) - -type Cursor = astutil.Cursor -type ApplyFunc = astutil.ApplyFunc - -func Apply(root ast.Node, pre, post ApplyFunc) (result ast.Node) { - return astutil.Apply(root, pre, post) -} - -func PathEnclosingInterval(root *ast.File, start, end token.Pos) (path []ast.Node, exact bool) { - return astutil.PathEnclosingInterval(root, start, end) -} diff --git a/vendor/honnef.co/go/tools/go/ast/astutil/util.go b/vendor/honnef.co/go/tools/go/ast/astutil/util.go deleted file mode 100644 index e04e1fb0c..000000000 --- a/vendor/honnef.co/go/tools/go/ast/astutil/util.go +++ /dev/null @@ -1,362 +0,0 @@ -package astutil - -import ( - "fmt" - "go/ast" - "go/token" - "reflect" - "strings" -) - -func IsIdent(expr ast.Expr, ident string) bool { - id, ok := expr.(*ast.Ident) - return ok && id.Name == ident -} - -// isBlank returns whether id is the blank identifier "_". -// If id == nil, the answer is false. -func IsBlank(id ast.Expr) bool { - ident, _ := id.(*ast.Ident) - return ident != nil && ident.Name == "_" -} - -// Deprecated: use code.IsIntegerLiteral instead. -func IsIntLiteral(expr ast.Expr, literal string) bool { - lit, ok := expr.(*ast.BasicLit) - return ok && lit.Kind == token.INT && lit.Value == literal -} - -// Deprecated: use IsIntLiteral instead -func IsZero(expr ast.Expr) bool { - return IsIntLiteral(expr, "0") -} - -func Preamble(f *ast.File) string { - cutoff := f.Package - if f.Doc != nil { - cutoff = f.Doc.Pos() - } - var out []string - for _, cmt := range f.Comments { - if cmt.Pos() >= cutoff { - break - } - out = append(out, cmt.Text()) - } - return strings.Join(out, "\n") -} - -func GroupSpecs(fset *token.FileSet, specs []ast.Spec) [][]ast.Spec { - if len(specs) == 0 { - return nil - } - groups := make([][]ast.Spec, 1) - groups[0] = append(groups[0], specs[0]) - - for _, spec := range specs[1:] { - g := groups[len(groups)-1] - if fset.PositionFor(spec.Pos(), false).Line-1 != - fset.PositionFor(g[len(g)-1].End(), false).Line { - - groups = append(groups, nil) - } - - groups[len(groups)-1] = append(groups[len(groups)-1], spec) - } - - return groups -} - -// Unparen returns e with any enclosing parentheses stripped. -func Unparen(e ast.Expr) ast.Expr { - for { - p, ok := e.(*ast.ParenExpr) - if !ok { - return e - } - e = p.X - } -} - -// CopyExpr creates a deep copy of an expression. -// It doesn't support copying FuncLits and returns ok == false when encountering one. -func CopyExpr(node ast.Expr) (ast.Expr, bool) { - switch node := node.(type) { - case *ast.BasicLit: - cp := *node - return &cp, true - case *ast.BinaryExpr: - cp := *node - var ok1, ok2 bool - cp.X, ok1 = CopyExpr(cp.X) - cp.Y, ok2 = CopyExpr(cp.Y) - return &cp, ok1 && ok2 - case *ast.CallExpr: - var ok bool - cp := *node - cp.Fun, ok = CopyExpr(cp.Fun) - if !ok { - return nil, false - } - cp.Args = make([]ast.Expr, len(node.Args)) - for i, v := range node.Args { - cp.Args[i], ok = CopyExpr(v) - if !ok { - return nil, false - } - } - return &cp, true - case *ast.CompositeLit: - var ok bool - cp := *node - cp.Type, ok = CopyExpr(cp.Type) - if !ok { - return nil, false - } - cp.Elts = make([]ast.Expr, len(node.Elts)) - for i, v := range node.Elts { - cp.Elts[i], ok = CopyExpr(v) - if !ok { - return nil, false - } - } - return &cp, true - case *ast.Ident: - cp := *node - return &cp, true - case *ast.IndexExpr: - var ok1, ok2 bool - cp := *node - cp.X, ok1 = CopyExpr(cp.X) - cp.Index, ok2 = CopyExpr(cp.Index) - return &cp, ok1 && ok2 - case *ast.IndexListExpr: - var ok bool - cp := *node - cp.X, ok = CopyExpr(cp.X) - if !ok { - return nil, false - } - for i, v := range node.Indices { - cp.Indices[i], ok = CopyExpr(v) - if !ok { - return nil, false - } - } - return &cp, true - case *ast.KeyValueExpr: - var ok1, ok2 bool - cp := *node - cp.Key, ok1 = CopyExpr(cp.Key) - cp.Value, ok2 = CopyExpr(cp.Value) - return &cp, ok1 && ok2 - case *ast.ParenExpr: - var ok bool - cp := *node - cp.X, ok = CopyExpr(cp.X) - return &cp, ok - case *ast.SelectorExpr: - var ok bool - cp := *node - cp.X, ok = CopyExpr(cp.X) - if !ok { - return nil, false - } - sel, ok := CopyExpr(cp.Sel) - if !ok { - // this is impossible - return nil, false - } - cp.Sel = sel.(*ast.Ident) - return &cp, true - case *ast.SliceExpr: - var ok1, ok2, ok3, ok4 bool - cp := *node - cp.X, ok1 = CopyExpr(cp.X) - cp.Low, ok2 = CopyExpr(cp.Low) - cp.High, ok3 = CopyExpr(cp.High) - cp.Max, ok4 = CopyExpr(cp.Max) - return &cp, ok1 && ok2 && ok3 && ok4 - case *ast.StarExpr: - var ok bool - cp := *node - cp.X, ok = CopyExpr(cp.X) - return &cp, ok - case *ast.TypeAssertExpr: - var ok1, ok2 bool - cp := *node - cp.X, ok1 = CopyExpr(cp.X) - cp.Type, ok2 = CopyExpr(cp.Type) - return &cp, ok1 && ok2 - case *ast.UnaryExpr: - var ok bool - cp := *node - cp.X, ok = CopyExpr(cp.X) - return &cp, ok - case *ast.MapType: - var ok1, ok2 bool - cp := *node - cp.Key, ok1 = CopyExpr(cp.Key) - cp.Value, ok2 = CopyExpr(cp.Value) - return &cp, ok1 && ok2 - case *ast.ArrayType: - var ok1, ok2 bool - cp := *node - cp.Len, ok1 = CopyExpr(cp.Len) - cp.Elt, ok2 = CopyExpr(cp.Elt) - return &cp, ok1 && ok2 - case *ast.Ellipsis: - var ok bool - cp := *node - cp.Elt, ok = CopyExpr(cp.Elt) - return &cp, ok - case *ast.InterfaceType: - cp := *node - return &cp, true - case *ast.StructType: - cp := *node - return &cp, true - case *ast.FuncLit, *ast.FuncType: - // TODO(dh): implement copying of function literals and types. - return nil, false - case *ast.ChanType: - var ok bool - cp := *node - cp.Value, ok = CopyExpr(cp.Value) - return &cp, ok - case nil: - return nil, true - default: - panic(fmt.Sprintf("unreachable: %T", node)) - } -} - -func Equal(a, b ast.Node) bool { - if a == b { - return true - } - if a == nil || b == nil { - return false - } - if reflect.TypeOf(a) != reflect.TypeOf(b) { - return false - } - - switch a := a.(type) { - case *ast.BasicLit: - b := b.(*ast.BasicLit) - return a.Kind == b.Kind && a.Value == b.Value - case *ast.BinaryExpr: - b := b.(*ast.BinaryExpr) - return Equal(a.X, b.X) && a.Op == b.Op && Equal(a.Y, b.Y) - case *ast.CallExpr: - b := b.(*ast.CallExpr) - if len(a.Args) != len(b.Args) { - return false - } - for i, arg := range a.Args { - if !Equal(arg, b.Args[i]) { - return false - } - } - return Equal(a.Fun, b.Fun) && - (a.Ellipsis == token.NoPos && b.Ellipsis == token.NoPos || a.Ellipsis != token.NoPos && b.Ellipsis != token.NoPos) - case *ast.CompositeLit: - b := b.(*ast.CompositeLit) - if len(a.Elts) != len(b.Elts) { - return false - } - for i, elt := range b.Elts { - if !Equal(elt, b.Elts[i]) { - return false - } - } - return Equal(a.Type, b.Type) && a.Incomplete == b.Incomplete - case *ast.Ident: - b := b.(*ast.Ident) - return a.Name == b.Name - case *ast.IndexExpr: - b := b.(*ast.IndexExpr) - return Equal(a.X, b.X) && Equal(a.Index, b.Index) - case *ast.IndexListExpr: - b := b.(*ast.IndexListExpr) - if len(a.Indices) != len(b.Indices) { - return false - } - for i, v := range a.Indices { - if !Equal(v, b.Indices[i]) { - return false - } - } - return Equal(a.X, b.X) - case *ast.KeyValueExpr: - b := b.(*ast.KeyValueExpr) - return Equal(a.Key, b.Key) && Equal(a.Value, b.Value) - case *ast.ParenExpr: - b := b.(*ast.ParenExpr) - return Equal(a.X, b.X) - case *ast.SelectorExpr: - b := b.(*ast.SelectorExpr) - return Equal(a.X, b.X) && Equal(a.Sel, b.Sel) - case *ast.SliceExpr: - b := b.(*ast.SliceExpr) - return Equal(a.X, b.X) && Equal(a.Low, b.Low) && Equal(a.High, b.High) && Equal(a.Max, b.Max) && a.Slice3 == b.Slice3 - case *ast.StarExpr: - b := b.(*ast.StarExpr) - return Equal(a.X, b.X) - case *ast.TypeAssertExpr: - b := b.(*ast.TypeAssertExpr) - return Equal(a.X, b.X) && Equal(a.Type, b.Type) - case *ast.UnaryExpr: - b := b.(*ast.UnaryExpr) - return a.Op == b.Op && Equal(a.X, b.X) - case *ast.MapType: - b := b.(*ast.MapType) - return Equal(a.Key, b.Key) && Equal(a.Value, b.Value) - case *ast.ArrayType: - b := b.(*ast.ArrayType) - return Equal(a.Len, b.Len) && Equal(a.Elt, b.Elt) - case *ast.Ellipsis: - b := b.(*ast.Ellipsis) - return Equal(a.Elt, b.Elt) - case *ast.InterfaceType: - b := b.(*ast.InterfaceType) - return a.Incomplete == b.Incomplete && Equal(a.Methods, b.Methods) - case *ast.StructType: - b := b.(*ast.StructType) - return a.Incomplete == b.Incomplete && Equal(a.Fields, b.Fields) - case *ast.FuncLit: - // TODO(dh): support function literals - return false - case *ast.ChanType: - b := b.(*ast.ChanType) - return a.Dir == b.Dir && (a.Arrow == token.NoPos && b.Arrow == token.NoPos || a.Arrow != token.NoPos && b.Arrow != token.NoPos) - case *ast.FieldList: - b := b.(*ast.FieldList) - if len(a.List) != len(b.List) { - return false - } - for i, fieldA := range a.List { - if !Equal(fieldA, b.List[i]) { - return false - } - } - return true - case *ast.Field: - b := b.(*ast.Field) - if len(a.Names) != len(b.Names) { - return false - } - for j, name := range a.Names { - if !Equal(name, b.Names[j]) { - return false - } - } - if !Equal(a.Type, b.Type) || !Equal(a.Tag, b.Tag) { - return false - } - return true - default: - panic(fmt.Sprintf("unreachable: %T", a)) - } -} diff --git a/vendor/honnef.co/go/tools/go/ir/LICENSE b/vendor/honnef.co/go/tools/go/ir/LICENSE deleted file mode 100644 index aee48041e..000000000 --- a/vendor/honnef.co/go/tools/go/ir/LICENSE +++ /dev/null @@ -1,28 +0,0 @@ -Copyright (c) 2009 The Go Authors. All rights reserved. -Copyright (c) 2016 Dominik Honnef. All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above -copyright notice, this list of conditions and the following disclaimer -in the documentation and/or other materials provided with the -distribution. - * Neither the name of Google Inc. nor the names of its -contributors may be used to endorse or promote products derived from -this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/honnef.co/go/tools/go/ir/UPSTREAM b/vendor/honnef.co/go/tools/go/ir/UPSTREAM deleted file mode 100644 index e92b016b3..000000000 --- a/vendor/honnef.co/go/tools/go/ir/UPSTREAM +++ /dev/null @@ -1,9 +0,0 @@ -This package started as a copy of golang.org/x/tools/go/ssa, imported from an unknown commit in 2016. -It has since been heavily modified to match our own needs in an IR. -The changes are too many to list here, and it is best to consider this package independent of go/ssa. - -Upstream changes still get applied when they address bugs in portions of code we have inherited. - -The last upstream commit we've looked at was: -e854e0228e2ef1cc6e42bbfde1951925096a1272 - diff --git a/vendor/honnef.co/go/tools/go/ir/blockopt.go b/vendor/honnef.co/go/tools/go/ir/blockopt.go deleted file mode 100644 index 537886117..000000000 --- a/vendor/honnef.co/go/tools/go/ir/blockopt.go +++ /dev/null @@ -1,205 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package ir - -// Simple block optimizations to simplify the control flow graph. - -// TODO(adonovan): opt: instead of creating several "unreachable" blocks -// per function in the Builder, reuse a single one (e.g. at Blocks[1]) -// to reduce garbage. - -import ( - "fmt" - "os" -) - -// If true, perform sanity checking and show progress at each -// successive iteration of optimizeBlocks. Very verbose. -const debugBlockOpt = false - -// markReachable sets Index=-1 for all blocks reachable from b. -func markReachable(b *BasicBlock) { - b.gaps = -1 - for _, succ := range b.Succs { - if succ.gaps == 0 { - markReachable(succ) - } - } -} - -// deleteUnreachableBlocks marks all reachable blocks of f and -// eliminates (nils) all others, including possibly cyclic subgraphs. -func deleteUnreachableBlocks(f *Function) { - const white, black = 0, -1 - // We borrow b.gaps temporarily as the mark bit. - for _, b := range f.Blocks { - b.gaps = white - } - markReachable(f.Blocks[0]) - // In SSI form, we need the exit to be reachable for correct - // post-dominance information. In original form, however, we - // cannot unconditionally mark it reachable because we won't - // be adding fake edges, and this breaks the calculation of - // dominance information. - markReachable(f.Exit) - for i, b := range f.Blocks { - if b.gaps == white { - for _, c := range b.Succs { - if c.gaps == black { - c.removePred(b) // delete white->black edge - } - } - if debugBlockOpt { - fmt.Fprintln(os.Stderr, "unreachable", b) - } - f.Blocks[i] = nil // delete b - } - } - f.removeNilBlocks() -} - -// jumpThreading attempts to apply simple jump-threading to block b, -// in which a->b->c become a->c if b is just a Jump. -// The result is true if the optimization was applied. -func jumpThreading(f *Function, b *BasicBlock) bool { - if b.Index == 0 { - return false // don't apply to entry block - } - if b.Instrs == nil { - return false - } - for _, pred := range b.Preds { - switch pred.Control().(type) { - case *ConstantSwitch: - // don't optimize away the head blocks of switch statements - return false - } - } - if _, ok := b.Instrs[0].(*Jump); !ok { - return false // not just a jump - } - c := b.Succs[0] - if c == b { - return false // don't apply to degenerate jump-to-self. - } - if c.hasPhi() { - return false // not sound without more effort - } - for j, a := range b.Preds { - a.replaceSucc(b, c) - - // If a now has two edges to c, replace its degenerate If by Jump. - if len(a.Succs) == 2 && a.Succs[0] == c && a.Succs[1] == c { - jump := new(Jump) - jump.setBlock(a) - a.Instrs[len(a.Instrs)-1] = jump - a.Succs = a.Succs[:1] - c.removePred(b) - } else { - if j == 0 { - c.replacePred(b, a) - } else { - c.Preds = append(c.Preds, a) - } - } - - if debugBlockOpt { - fmt.Fprintln(os.Stderr, "jumpThreading", a, b, c) - } - } - f.Blocks[b.Index] = nil // delete b - return true -} - -// fuseBlocks attempts to apply the block fusion optimization to block -// a, in which a->b becomes ab if len(a.Succs)==len(b.Preds)==1. -// The result is true if the optimization was applied. -func fuseBlocks(f *Function, a *BasicBlock) bool { - if len(a.Succs) != 1 { - return false - } - if a.Succs[0] == f.Exit { - return false - } - b := a.Succs[0] - if len(b.Preds) != 1 { - return false - } - if _, ok := a.Instrs[len(a.Instrs)-1].(*Panic); ok { - // panics aren't simple jumps, they have side effects. - return false - } - - // Degenerate &&/|| ops may result in a straight-line CFG - // containing φ-nodes. (Ideally we'd replace such them with - // their sole operand but that requires Referrers, built later.) - if b.hasPhi() { - return false // not sound without further effort - } - - // Eliminate jump at end of A, then copy all of B across. - a.Instrs = append(a.Instrs[:len(a.Instrs)-1], b.Instrs...) - for _, instr := range b.Instrs { - instr.setBlock(a) - } - - // A inherits B's successors - a.Succs = append(a.succs2[:0], b.Succs...) - - // Fix up Preds links of all successors of B. - for _, c := range b.Succs { - c.replacePred(b, a) - } - - if debugBlockOpt { - fmt.Fprintln(os.Stderr, "fuseBlocks", a, b) - } - - f.Blocks[b.Index] = nil // delete b - return true -} - -// optimizeBlocks() performs some simple block optimizations on a -// completed function: dead block elimination, block fusion, jump -// threading. -func optimizeBlocks(f *Function) { - if debugBlockOpt { - f.WriteTo(os.Stderr) - mustSanityCheck(f, nil) - } - - deleteUnreachableBlocks(f) - - // Loop until no further progress. - changed := true - for changed { - changed = false - - if debugBlockOpt { - f.WriteTo(os.Stderr) - mustSanityCheck(f, nil) - } - - for _, b := range f.Blocks { - // f.Blocks will temporarily contain nils to indicate - // deleted blocks; we remove them at the end. - if b == nil { - continue - } - - // Fuse blocks. b->c becomes bc. - if fuseBlocks(f, b) { - changed = true - } - - // a->b->c becomes a->c if b contains only a Jump. - if jumpThreading(f, b) { - changed = true - continue // (b was disconnected) - } - } - } - f.removeNilBlocks() -} diff --git a/vendor/honnef.co/go/tools/go/ir/builder.go b/vendor/honnef.co/go/tools/go/ir/builder.go deleted file mode 100644 index 82ca94ba1..000000000 --- a/vendor/honnef.co/go/tools/go/ir/builder.go +++ /dev/null @@ -1,2683 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package ir - -// This file implements the BUILD phase of IR construction. -// -// IR construction has two phases, CREATE and BUILD. In the CREATE phase -// (create.go), all packages are constructed and type-checked and -// definitions of all package members are created, method-sets are -// computed, and wrapper methods are synthesized. -// ir.Packages are created in arbitrary order. -// -// In the BUILD phase (builder.go), the builder traverses the AST of -// each Go source function and generates IR instructions for the -// function body. Initializer expressions for package-level variables -// are emitted to the package's init() function in the order specified -// by go/types.Info.InitOrder, then code for each function in the -// package is generated in lexical order. -// -// The builder's and Program's indices (maps) are populated and -// mutated during the CREATE phase, but during the BUILD phase they -// remain constant. The sole exception is Prog.methodSets and its -// related maps, which are protected by a dedicated mutex. - -import ( - "fmt" - "go/ast" - "go/constant" - "go/token" - "go/types" - "os" - - "honnef.co/go/tools/analysis/lint" - "honnef.co/go/tools/go/types/typeutil" - - "golang.org/x/exp/typeparams" -) - -var ( - varOk = newVar("ok", tBool) - varIndex = newVar("index", tInt) - - // Type constants. - tBool = types.Typ[types.Bool] - tInt = types.Typ[types.Int] - tInvalid = types.Typ[types.Invalid] - tString = types.Typ[types.String] - tUntypedNil = types.Typ[types.UntypedNil] - tEface = types.NewInterfaceType(nil, nil).Complete() -) - -// builder holds state associated with the package currently being built. -// Its methods contain all the logic for AST-to-IR conversion. -type builder struct { - printFunc string - - blocksets [5]BlockSet -} - -// cond emits to fn code to evaluate boolean condition e and jump -// to t or f depending on its value, performing various simplifications. -// -// Postcondition: fn.currentBlock is nil. -func (b *builder) cond(fn *Function, e ast.Expr, t, f *BasicBlock) *If { - switch e := e.(type) { - case *ast.ParenExpr: - return b.cond(fn, e.X, t, f) - - case *ast.BinaryExpr: - switch e.Op { - case token.LAND: - ltrue := fn.newBasicBlock("cond.true") - b.cond(fn, e.X, ltrue, f) - fn.currentBlock = ltrue - return b.cond(fn, e.Y, t, f) - - case token.LOR: - lfalse := fn.newBasicBlock("cond.false") - b.cond(fn, e.X, t, lfalse) - fn.currentBlock = lfalse - return b.cond(fn, e.Y, t, f) - } - - case *ast.UnaryExpr: - if e.Op == token.NOT { - return b.cond(fn, e.X, f, t) - } - } - - // A traditional compiler would simplify "if false" (etc) here - // but we do not, for better fidelity to the source code. - // - // The value of a constant condition may be platform-specific, - // and may cause blocks that are reachable in some configuration - // to be hidden from subsequent analyses such as bug-finding tools. - return emitIf(fn, b.expr(fn, e), t, f, e) -} - -// logicalBinop emits code to fn to evaluate e, a &&- or -// ||-expression whose reified boolean value is wanted. -// The value is returned. -func (b *builder) logicalBinop(fn *Function, e *ast.BinaryExpr) Value { - rhs := fn.newBasicBlock("binop.rhs") - done := fn.newBasicBlock("binop.done") - - // T(e) = T(e.X) = T(e.Y) after untyped constants have been - // eliminated. - // TODO(adonovan): not true; MyBool==MyBool yields UntypedBool. - t := fn.Pkg.typeOf(e) - - var short Value // value of the short-circuit path - switch e.Op { - case token.LAND: - b.cond(fn, e.X, rhs, done) - short = emitConst(fn, NewConst(constant.MakeBool(false), t)) - - case token.LOR: - b.cond(fn, e.X, done, rhs) - short = emitConst(fn, NewConst(constant.MakeBool(true), t)) - } - - // Is rhs unreachable? - if rhs.Preds == nil { - // Simplify false&&y to false, true||y to true. - fn.currentBlock = done - return short - } - - // Is done unreachable? - if done.Preds == nil { - // Simplify true&&y (or false||y) to y. - fn.currentBlock = rhs - return b.expr(fn, e.Y) - } - - // All edges from e.X to done carry the short-circuit value. - var edges []Value - for range done.Preds { - edges = append(edges, short) - } - - // The edge from e.Y to done carries the value of e.Y. - fn.currentBlock = rhs - edges = append(edges, b.expr(fn, e.Y)) - emitJump(fn, done, e) - fn.currentBlock = done - - phi := &Phi{Edges: edges} - phi.typ = t - return done.emit(phi, e) -} - -// exprN lowers a multi-result expression e to IR form, emitting code -// to fn and returning a single Value whose type is a *types.Tuple. -// The caller must access the components via Extract. -// -// Multi-result expressions include CallExprs in a multi-value -// assignment or return statement, and "value,ok" uses of -// TypeAssertExpr, IndexExpr (when X is a map), and Recv. -func (b *builder) exprN(fn *Function, e ast.Expr) Value { - typ := fn.Pkg.typeOf(e).(*types.Tuple) - switch e := e.(type) { - case *ast.ParenExpr: - return b.exprN(fn, e.X) - - case *ast.CallExpr: - // Currently, no built-in function nor type conversion - // has multiple results, so we can avoid some of the - // cases for single-valued CallExpr. - var c Call - b.setCall(fn, e, &c.Call) - c.typ = typ - return fn.emit(&c, e) - - case *ast.IndexExpr: - mapt := typeutil.CoreType(fn.Pkg.typeOf(e.X)).Underlying().(*types.Map) - lookup := &MapLookup{ - X: b.expr(fn, e.X), - Index: emitConv(fn, b.expr(fn, e.Index), mapt.Key(), e), - CommaOk: true, - } - lookup.setType(typ) - return fn.emit(lookup, e) - - case *ast.TypeAssertExpr: - return emitTypeTest(fn, b.expr(fn, e.X), typ.At(0).Type(), e) - - case *ast.UnaryExpr: // must be receive <- - return emitRecv(fn, b.expr(fn, e.X), true, typ, e) - } - panic(fmt.Sprintf("exprN(%T) in %s", e, fn)) -} - -// builtin emits to fn IR instructions to implement a call to the -// built-in function obj with the specified arguments -// and return type. It returns the value defined by the result. -// -// The result is nil if no special handling was required; in this case -// the caller should treat this like an ordinary library function -// call. -func (b *builder) builtin(fn *Function, obj *types.Builtin, args []ast.Expr, typ types.Type, source ast.Node) Value { - switch obj.Name() { - case "make": - styp := typ.Underlying() - if _, ok := typ.Underlying().(*types.Interface); ok { - // This must be a type parameter with a core type. - // Set styp to the core type and generate instructions based on it. - assert(typeparams.IsTypeParam(typ)) - styp = typeutil.CoreType(typ) - assert(styp != nil) - } - switch styp.(type) { - case *types.Slice: - n := b.expr(fn, args[1]) - m := n - if len(args) == 3 { - m = b.expr(fn, args[2]) - } - if m, ok := m.(*Const); ok { - // treat make([]T, n, m) as new([m]T)[:n] - cap := m.Int64() - at := types.NewArray(styp.Underlying().(*types.Slice).Elem(), cap) - alloc := emitNew(fn, at, source) - v := &Slice{ - X: alloc, - High: n, - } - v.setType(typ) - return fn.emit(v, source) - } - v := &MakeSlice{ - Len: n, - Cap: m, - } - v.setType(typ) - return fn.emit(v, source) - - case *types.Map: - var res Value - if len(args) == 2 { - res = b.expr(fn, args[1]) - } - v := &MakeMap{Reserve: res} - v.setType(typ) - return fn.emit(v, source) - - case *types.Chan: - var sz Value = emitConst(fn, intConst(0)) - if len(args) == 2 { - sz = b.expr(fn, args[1]) - } - v := &MakeChan{Size: sz} - v.setType(typ) - return fn.emit(v, source) - - default: - lint.ExhaustiveTypeSwitch(typ.Underlying()) - } - - case "new": - alloc := emitNew(fn, deref(typ), source) - return alloc - - case "len", "cap": - // Special case: len or cap of an array or *array is based on the type, not the value which may be nil. We must - // still evaluate the value, though. (If it was side-effect free, the whole call would have been - // constant-folded.) - // - // For example, for len(gen()), we need to evaluate gen() for its side-effects, but don't need the returned - // value to determine the length of the array, which is constant. - // - // This never applies to type parameters. Even if the constraint has a structural type, len/cap on a type - // parameter aren't constant. - t := deref(fn.Pkg.typeOf(args[0])).Underlying() - if at, ok := t.(*types.Array); ok { - b.expr(fn, args[0]) // for effects only - return emitConst(fn, intConst(at.Len())) - } - // Otherwise treat as normal. - - case "panic": - fn.emit(&Panic{ - X: emitConv(fn, b.expr(fn, args[0]), tEface, source), - }, source) - addEdge(fn.currentBlock, fn.Exit) - fn.currentBlock = fn.newBasicBlock("unreachable") - return emitConst(fn, NewConst(constant.MakeBool(true), tBool)) // any non-nil Value will do - } - return nil // treat all others as a regular function call -} - -// addr lowers a single-result addressable expression e to IR form, -// emitting code to fn and returning the location (an lvalue) defined -// by the expression. -// -// If escaping is true, addr marks the base variable of the -// addressable expression e as being a potentially escaping pointer -// value. For example, in this code: -// -// a := A{ -// b: [1]B{B{c: 1}} -// } -// return &a.b[0].c -// -// the application of & causes a.b[0].c to have its address taken, -// which means that ultimately the local variable a must be -// heap-allocated. This is a simple but very conservative escape -// analysis. -// -// Operations forming potentially escaping pointers include: -// - &x, including when implicit in method call or composite literals. -// - a[:] iff a is an array (not *array) -// - references to variables in lexically enclosing functions. -func (b *builder) addr(fn *Function, e ast.Expr, escaping bool) (RET lvalue) { - switch e := e.(type) { - case *ast.Ident: - if isBlankIdent(e) { - return blank{} - } - obj := fn.Pkg.objectOf(e) - v := fn.Prog.packageLevelValue(obj) // var (address) - if v == nil { - v = fn.lookup(obj, escaping) - } - return &address{addr: v, expr: e} - - case *ast.CompositeLit: - t := deref(fn.Pkg.typeOf(e)) - var v *Alloc - if escaping { - v = emitNew(fn, t, e) - } else { - v = fn.addLocal(t, e) - } - var sb storebuf - b.compLit(fn, v, e, true, &sb) - sb.emit(fn) - return &address{addr: v, expr: e} - - case *ast.ParenExpr: - return b.addr(fn, e.X, escaping) - - case *ast.SelectorExpr: - sel, ok := fn.Pkg.info.Selections[e] - if !ok { - // qualified identifier - return b.addr(fn, e.Sel, escaping) - } - if sel.Kind() != types.FieldVal { - panic(sel) - } - wantAddr := true - v := b.receiver(fn, e.X, wantAddr, escaping, sel, e) - index := sel.Index()[len(sel.Index())-1] - vut := typeutil.CoreType(deref(v.Type())).Underlying().(*types.Struct) - fld := vut.Field(index) - // Due to the two phases of resolving AssignStmt, a panic from x.f = p() - // when x is nil is required to come after the side-effects of - // evaluating x and p(). - emit := func(fn *Function) Value { - return emitFieldSelection(fn, v, index, true, e.Sel) - } - return &lazyAddress{addr: emit, t: fld.Type(), expr: e.Sel} - - case *ast.IndexExpr: - var x Value - var et types.Type - xt := fn.Pkg.typeOf(e.X) - - // Indexing doesn't need a core type, it only requires all types to be similar enough. For example, []int64 | - // [5]int64 can be indexed. The element types do have to match though. - - terms, err := typeparams.NormalTerms(xt) - if err != nil { - panic(fmt.Sprintf("unexpected error: %s", err)) - } - isArrayLike := func() (types.Type, bool) { - for _, term := range terms { - arr, ok := term.Type().Underlying().(*types.Array) - if ok { - return arr.Elem(), true - } - } - return nil, false - } - - isSliceLike := func() (types.Type, bool) { - for _, term := range terms { - switch t := term.Type().Underlying().(type) { - case *types.Slice: - return t.Elem(), true - case *types.Pointer: - return t.Elem().Underlying().(*types.Array).Elem(), true - } - } - return nil, false - } - - if elem, ok := isArrayLike(); ok { - // array - x = b.addr(fn, e.X, escaping).address(fn) - et = types.NewPointer(elem) - } else if elem, ok := isSliceLike(); ok { - // slice or *array - x = b.expr(fn, e.X) - et = types.NewPointer(elem) - } else if t, ok := typeutil.CoreType(xt).Underlying().(*types.Map); ok { - return &element{ - m: b.expr(fn, e.X), - k: emitConv(fn, b.expr(fn, e.Index), t.Key(), e.Index), - t: t.Elem(), - } - } else { - panic("unexpected container type in IndexExpr: " + t.String()) - } - - // Due to the two phases of resolving AssignStmt, a panic from x[i] = p() - // when x is nil or i is out-of-bounds is required to come after the - // side-effects of evaluating x, i and p(). - index := b.expr(fn, e.Index) - emit := func(fn *Function) Value { - v := &IndexAddr{ - X: x, - Index: index, - } - v.setType(et) - return fn.emit(v, e) - } - return &lazyAddress{addr: emit, t: deref(et), expr: e} - - case *ast.StarExpr: - return &address{addr: b.expr(fn, e.X), expr: e} - } - - panic(fmt.Sprintf("unexpected address expression: %T", e)) -} - -type store struct { - lhs lvalue - rhs Value - source ast.Node - - // if debugRef is set no other fields will be set - debugRef *DebugRef -} - -type storebuf struct{ stores []store } - -func (sb *storebuf) store(lhs lvalue, rhs Value, source ast.Node) { - sb.stores = append(sb.stores, store{lhs, rhs, source, nil}) -} - -func (sb *storebuf) storeDebugRef(ref *DebugRef) { - sb.stores = append(sb.stores, store{debugRef: ref}) -} - -func (sb *storebuf) emit(fn *Function) { - for _, s := range sb.stores { - if s.debugRef == nil { - s.lhs.store(fn, s.rhs, s.source) - } else { - fn.emit(s.debugRef, nil) - } - } -} - -// assign emits to fn code to initialize the lvalue loc with the value -// of expression e. If isZero is true, assign assumes that loc holds -// the zero value for its type. -// -// This is equivalent to loc.store(fn, b.expr(fn, e)), but may generate -// better code in some cases, e.g., for composite literals in an -// addressable location. -// -// If sb is not nil, assign generates code to evaluate expression e, but -// not to update loc. Instead, the necessary stores are appended to the -// storebuf sb so that they can be executed later. This allows correct -// in-place update of existing variables when the RHS is a composite -// literal that may reference parts of the LHS. -func (b *builder) assign(fn *Function, loc lvalue, e ast.Expr, isZero bool, sb *storebuf, source ast.Node) { - // Can we initialize it in place? - if e, ok := unparen(e).(*ast.CompositeLit); ok { - // A CompositeLit never evaluates to a pointer, - // so if the type of the location is a pointer, - // an &-operation is implied. - if _, ok := loc.(blank); !ok { // avoid calling blank.typ() - if isPointer(loc.typ()) { - // Example input that hits this code: - // - // type S1 struct{ X int } - // x := []*S1{ - // {1}, // <-- & is implied - // } - // _ = x - ptr := b.addr(fn, e, true).address(fn) - // copy address - if sb != nil { - sb.store(loc, ptr, source) - } else { - loc.store(fn, ptr, source) - } - return - } - } - - if _, ok := loc.(*address); ok { - if isInterface(loc.typ()) && !typeparams.IsTypeParam(loc.typ()) { - // e.g. var x interface{} = T{...} - // Can't in-place initialize an interface value. - // Fall back to copying. - } else { - // x = T{...} or x := T{...} - addr := loc.address(fn) - if sb != nil { - b.compLit(fn, addr, e, isZero, sb) - } else { - var sb storebuf - b.compLit(fn, addr, e, isZero, &sb) - sb.emit(fn) - } - - // Subtle: emit debug ref for aggregate types only; - // slice and map are handled by store ops in compLit. - switch typeutil.CoreType(loc.typ()).Underlying().(type) { - case *types.Struct, *types.Array: - if sb != nil { - // Make sure we don't emit DebugRefs before the store has actually occurred - if ref := makeDebugRef(fn, e, addr, true); ref != nil { - sb.storeDebugRef(ref) - } - } else { - emitDebugRef(fn, e, addr, true) - } - } - - return - } - } - } - - // simple case: just copy - rhs := b.expr(fn, e) - if sb != nil { - sb.store(loc, rhs, source) - } else { - loc.store(fn, rhs, source) - } -} - -// expr lowers a single-result expression e to IR form, emitting code -// to fn and returning the Value defined by the expression. -func (b *builder) expr(fn *Function, e ast.Expr) Value { - e = unparen(e) - - tv := fn.Pkg.info.Types[e] - - // Is expression a constant? - if tv.Value != nil { - return emitConst(fn, NewConst(tv.Value, tv.Type)) - } - - var v Value - if tv.Addressable() { - // Prefer pointer arithmetic ({Index,Field}Addr) followed - // by Load over subelement extraction (e.g. Index, Field), - // to avoid large copies. - v = b.addr(fn, e, false).load(fn, e) - } else { - v = b.expr0(fn, e, tv) - } - if fn.debugInfo() { - emitDebugRef(fn, e, v, false) - } - return v -} - -func (b *builder) expr0(fn *Function, e ast.Expr, tv types.TypeAndValue) Value { - switch e := e.(type) { - case *ast.BasicLit: - panic("non-constant BasicLit") // unreachable - - case *ast.FuncLit: - fn2 := &Function{ - name: fmt.Sprintf("%s$%d", fn.Name(), 1+len(fn.AnonFuncs)), - Signature: fn.Pkg.typeOf(e.Type).Underlying().(*types.Signature), - parent: fn, - Pkg: fn.Pkg, - Prog: fn.Prog, - functionBody: new(functionBody), - } - fn2.source = e - fn.AnonFuncs = append(fn.AnonFuncs, fn2) - fn2.initHTML(b.printFunc) - b.buildFunction(fn2) - if fn2.FreeVars == nil { - return fn2 - } - v := &MakeClosure{Fn: fn2} - v.setType(tv.Type) - for _, fv := range fn2.FreeVars { - v.Bindings = append(v.Bindings, fv.outer) - fv.outer = nil - } - return fn.emit(v, e) - - case *ast.TypeAssertExpr: // single-result form only - return emitTypeAssert(fn, b.expr(fn, e.X), tv.Type, e) - - case *ast.CallExpr: - if fn.Pkg.info.Types[e.Fun].IsType() { - // Explicit type conversion, e.g. string(x) or big.Int(x) - x := b.expr(fn, e.Args[0]) - y := emitConv(fn, x, tv.Type, e) - return y - } - // Call to "intrinsic" built-ins, e.g. new, make, panic. - if id, ok := unparen(e.Fun).(*ast.Ident); ok { - if obj, ok := fn.Pkg.info.Uses[id].(*types.Builtin); ok { - if v := b.builtin(fn, obj, e.Args, tv.Type, e); v != nil { - return v - } - } - } - // Regular function call. - var v Call - b.setCall(fn, e, &v.Call) - v.setType(tv.Type) - return fn.emit(&v, e) - - case *ast.UnaryExpr: - switch e.Op { - case token.AND: // &X --- potentially escaping. - addr := b.addr(fn, e.X, true) - if _, ok := unparen(e.X).(*ast.StarExpr); ok { - // &*p must panic if p is nil (http://golang.org/s/go12nil). - // For simplicity, we'll just (suboptimally) rely - // on the side effects of a load. - // TODO(adonovan): emit dedicated nilcheck. - addr.load(fn, e) - } - return addr.address(fn) - case token.ADD: - return b.expr(fn, e.X) - case token.NOT, token.SUB, token.XOR: // ! <- - ^ - v := &UnOp{ - Op: e.Op, - X: b.expr(fn, e.X), - } - v.setType(tv.Type) - return fn.emit(v, e) - case token.ARROW: - return emitRecv(fn, b.expr(fn, e.X), false, tv.Type, e) - default: - panic(e.Op) - } - - case *ast.BinaryExpr: - switch e.Op { - case token.LAND, token.LOR: - return b.logicalBinop(fn, e) - case token.SHL, token.SHR: - fallthrough - case token.ADD, token.SUB, token.MUL, token.QUO, token.REM, token.AND, token.OR, token.XOR, token.AND_NOT: - return emitArith(fn, e.Op, b.expr(fn, e.X), b.expr(fn, e.Y), tv.Type, e) - - case token.EQL, token.NEQ, token.GTR, token.LSS, token.LEQ, token.GEQ: - cmp := emitCompare(fn, e.Op, b.expr(fn, e.X), b.expr(fn, e.Y), e) - // The type of x==y may be UntypedBool. - return emitConv(fn, cmp, types.Default(tv.Type), e) - default: - panic("illegal op in BinaryExpr: " + e.Op.String()) - } - - case *ast.SliceExpr: - var x Value - if core := typeutil.CoreType(fn.Pkg.typeOf(e.X)); core != nil { - switch core.Underlying().(type) { - case *types.Array: - // Potentially escaping. - x = b.addr(fn, e.X, true).address(fn) - case *types.Basic, *types.Slice, *types.Pointer: // *array - x = b.expr(fn, e.X) - default: - panic("unreachable") - } - } else { - // We're indexing a string | []byte. Note that other combinations such as []byte | [4]byte are currently not - // allowed by the language. - x = b.expr(fn, e.X) - } - - var low, high, max Value - if e.Low != nil { - low = b.expr(fn, e.Low) - } - if e.High != nil { - high = b.expr(fn, e.High) - } - if e.Slice3 { - max = b.expr(fn, e.Max) - } - v := &Slice{ - X: x, - Low: low, - High: high, - Max: max, - } - v.setType(tv.Type) - return fn.emit(v, e) - - case *ast.Ident: - obj := fn.Pkg.info.Uses[e] - // Universal built-in or nil? - switch obj := obj.(type) { - case *types.Builtin: - return &Builtin{name: obj.Name(), sig: tv.Type.(*types.Signature)} - case *types.Nil: - return emitConst(fn, nilConst(tv.Type)) - } - // Package-level func or var? - if v := fn.Prog.packageLevelValue(obj); v != nil { - if _, ok := obj.(*types.Var); ok { - return emitLoad(fn, v, e) // var (address) - } - if instance, ok := fn.Pkg.info.Instances[e]; ok { - // Instantiated generic function - return makeInstance(fn.Prog, v.(*Function), instance.Type.(*types.Signature), instance.TypeArgs) - } - return v // (func) - } - // Local var. - return emitLoad(fn, fn.lookup(obj, false), e) // var (address) - - case *ast.SelectorExpr: - sel, ok := fn.Pkg.info.Selections[e] - if !ok { - // builtin unsafe.{Add,Slice} - if obj, ok := fn.Pkg.info.Uses[e.Sel].(*types.Builtin); ok { - return &Builtin{name: "Unsafe" + obj.Name(), sig: tv.Type.(*types.Signature)} - } - // qualified identifier - return b.expr(fn, e.Sel) - } - switch sel.Kind() { - case types.MethodExpr: - // (*T).f or T.f, the method f from the method-set of type T. - // The result is a "thunk". - return emitConv(fn, makeThunk(fn.Prog, sel), tv.Type, e) - - case types.MethodVal: - // e.f where e is an expression and f is a method. - // The result is a "bound". - obj := sel.Obj().(*types.Func) - rt := recvType(obj) - wantAddr := isPointer(rt) - escaping := true - v := b.receiver(fn, e.X, wantAddr, escaping, sel, e) - if isInterface(rt) { - // If v has interface type I, - // we must emit a check that v is non-nil. - // We use: typeassert v.(I). - emitTypeAssert(fn, v, rt, e) - } - c := &MakeClosure{ - Fn: makeBound(fn.Prog, obj), - Bindings: []Value{v}, - } - c.source = e.Sel - c.setType(tv.Type) - return fn.emit(c, e) - - case types.FieldVal: - indices := sel.Index() - last := len(indices) - 1 - v := b.expr(fn, e.X) - v = emitImplicitSelections(fn, v, indices[:last], e) - v = emitFieldSelection(fn, v, indices[last], false, e.Sel) - return v - } - - panic("unexpected expression-relative selector") - - case *ast.IndexExpr: - // IndexExpr might either be an actual indexing operation, or an instantiation - xt := fn.Pkg.typeOf(e.X) - - terms, err := typeparams.NormalTerms(xt) - if err != nil { - panic(fmt.Sprintf("unexpected error: %s", err)) - } - isNonAddressableIndexable := func() (types.Type, bool) { - for _, term := range terms { - switch t := term.Type().Underlying().(type) { - case *types.Array: - return t.Elem(), true - case *types.Basic: - // a string - return types.Universe.Lookup("byte").Type(), true - } - } - return nil, false - } - - isAddressableIndexable := func() (types.Type, bool) { - for _, term := range terms { - switch t := term.Type().Underlying().(type) { - case *types.Slice: - return t.Elem(), true - case *types.Pointer: - return t.Elem().Underlying().(*types.Array).Elem(), true - } - } - return nil, false - } - - if elem, ok := isNonAddressableIndexable(); ok { - // At least one of the types is non-addressable - v := &Index{ - X: b.expr(fn, e.X), - Index: b.expr(fn, e.Index), - } - v.setType(elem) - return fn.emit(v, e) - } else if _, ok := isAddressableIndexable(); ok { - // All types are addressable (otherwise the previous branch would've fired) - return b.addr(fn, e, false).load(fn, e) - } else if t, ok := typeutil.CoreType(xt).Underlying().(*types.Map); ok { - // Maps are not addressable. - v := &MapLookup{ - X: b.expr(fn, e.X), - Index: emitConv(fn, b.expr(fn, e.Index), t.Key(), e.Index), - } - v.setType(t.Elem()) - return fn.emit(v, e) - } else if _, ok := xt.Underlying().(*types.Signature); ok { - // Instantiating a generic function - return b.expr(fn, e.X) - } else { - panic("unexpected container type in IndexExpr: " + t.String()) - } - - case *ast.IndexListExpr: - // Instantiating a generic function - return b.expr(fn, e.X) - - case *ast.CompositeLit, *ast.StarExpr: - // Addressable types (lvalues) - return b.addr(fn, e, false).load(fn, e) - } - - panic(fmt.Sprintf("unexpected expr: %T", e)) -} - -// stmtList emits to fn code for all statements in list. -func (b *builder) stmtList(fn *Function, list []ast.Stmt) { - for _, s := range list { - b.stmt(fn, s) - } -} - -// receiver emits to fn code for expression e in the "receiver" -// position of selection e.f (where f may be a field or a method) and -// returns the effective receiver after applying the implicit field -// selections of sel. -// -// wantAddr requests that the result is an an address. If -// !sel.Indirect(), this may require that e be built in addr() mode; it -// must thus be addressable. -// -// escaping is defined as per builder.addr(). -func (b *builder) receiver(fn *Function, e ast.Expr, wantAddr, escaping bool, sel *types.Selection, source ast.Node) Value { - var v Value - if wantAddr && !sel.Indirect() && !isPointer(fn.Pkg.typeOf(e)) { - v = b.addr(fn, e, escaping).address(fn) - } else { - v = b.expr(fn, e) - } - - last := len(sel.Index()) - 1 - v = emitImplicitSelections(fn, v, sel.Index()[:last], source) - if !wantAddr && isPointer(v.Type()) { - v = emitLoad(fn, v, e) - } - return v -} - -// setCallFunc populates the function parts of a CallCommon structure -// (Func, Method, Recv, Args[0]) based on the kind of invocation -// occurring in e. -func (b *builder) setCallFunc(fn *Function, e *ast.CallExpr, c *CallCommon) { - // Is this a method call? - if selector, ok := unparen(e.Fun).(*ast.SelectorExpr); ok { - sel, ok := fn.Pkg.info.Selections[selector] - if ok && sel.Kind() == types.MethodVal { - obj := sel.Obj().(*types.Func) - recv := recvType(obj) - wantAddr := isPointer(recv) - escaping := true - v := b.receiver(fn, selector.X, wantAddr, escaping, sel, selector) - if isInterface(recv) { - // Invoke-mode call. - - // Methods in interfaces cannot have their own type parameters, so we needn't do anything for type - // parameters. - c.Value = v - c.Method = obj - } else { - // "Call"-mode call. - - // declaredFunc takes care of creating wrappers for functions with type parameters. - c.Value = fn.Prog.declaredFunc(obj) - c.Args = append(c.Args, v) - } - return - } - - // sel.Kind()==MethodExpr indicates T.f() or (*T).f(): - // a statically dispatched call to the method f in the - // method-set of T or *T. T may be an interface. - // - // e.Fun would evaluate to a concrete method, interface - // wrapper function, or promotion wrapper. - // - // For now, we evaluate it in the usual way. - // - // TODO(adonovan): opt: inline expr() here, to make the - // call static and to avoid generation of wrappers. - // It's somewhat tricky as it may consume the first - // actual parameter if the call is "invoke" mode. - // - // Examples: - // type T struct{}; func (T) f() {} // "call" mode - // type T interface { f() } // "invoke" mode - // - // type S struct{ T } - // - // var s S - // S.f(s) - // (*S).f(&s) - // - // Suggested approach: - // - consume the first actual parameter expression - // and build it with b.expr(). - // - apply implicit field selections. - // - use MethodVal logic to populate fields of c. - } - // Evaluate the function operand in the usual way. - // - // Code in expr takes care of creating wrappers for functions with type parameters. - c.Value = b.expr(fn, e.Fun) -} - -// emitCallArgs emits to f code for the actual parameters of call e to -// a (possibly built-in) function of effective type sig. -// The argument values are appended to args, which is then returned. -func (b *builder) emitCallArgs(fn *Function, sig *types.Signature, e *ast.CallExpr, args []Value) []Value { - // f(x, y, z...): pass slice z straight through. - if e.Ellipsis != 0 { - for i, arg := range e.Args { - v := emitConv(fn, b.expr(fn, arg), sig.Params().At(i).Type(), arg) - args = append(args, v) - } - return args - } - - offset := len(args) // 1 if call has receiver, 0 otherwise - - // Evaluate actual parameter expressions. - // - // If this is a chained call of the form f(g()) where g has - // multiple return values (MRV), they are flattened out into - // args; a suffix of them may end up in a varargs slice. - for _, arg := range e.Args { - v := b.expr(fn, arg) - if ttuple, ok := v.Type().(*types.Tuple); ok { // MRV chain - for i, n := 0, ttuple.Len(); i < n; i++ { - args = append(args, emitExtract(fn, v, i, arg)) - } - } else { - args = append(args, v) - } - } - - // Actual->formal assignability conversions for normal parameters. - np := sig.Params().Len() // number of normal parameters - if sig.Variadic() { - np-- - } - for i := 0; i < np; i++ { - args[offset+i] = emitConv(fn, args[offset+i], sig.Params().At(i).Type(), args[offset+i].Source()) - } - - // Actual->formal assignability conversions for variadic parameter, - // and construction of slice. - if sig.Variadic() { - varargs := args[offset+np:] - st := sig.Params().At(np).Type().(*types.Slice) - vt := st.Elem() - if len(varargs) == 0 { - args = append(args, emitConst(fn, nilConst(st))) - } else { - // Replace a suffix of args with a slice containing it. - at := types.NewArray(vt, int64(len(varargs))) - a := emitNew(fn, at, e) - a.source = e - for i, arg := range varargs { - iaddr := &IndexAddr{ - X: a, - Index: emitConst(fn, intConst(int64(i))), - } - iaddr.setType(types.NewPointer(vt)) - fn.emit(iaddr, e) - emitStore(fn, iaddr, arg, arg.Source()) - } - s := &Slice{X: a} - s.setType(st) - args[offset+np] = fn.emit(s, args[offset+np].Source()) - args = args[:offset+np+1] - } - } - return args -} - -// setCall emits to fn code to evaluate all the parameters of a function -// call e, and populates *c with those values. -func (b *builder) setCall(fn *Function, e *ast.CallExpr, c *CallCommon) { - // First deal with the f(...) part and optional receiver. - b.setCallFunc(fn, e, c) - - // Then append the other actual parameters. - sig, _ := typeutil.CoreType(fn.Pkg.typeOf(e.Fun)).(*types.Signature) - if sig == nil { - panic(fmt.Sprintf("no signature for call of %s", e.Fun)) - } - c.Args = b.emitCallArgs(fn, sig, e, c.Args) -} - -// assignOp emits to fn code to perform loc = val. -func (b *builder) assignOp(fn *Function, loc lvalue, val Value, op token.Token, source ast.Node) { - loc.store(fn, emitArith(fn, op, loc.load(fn, source), val, loc.typ(), source), source) -} - -// localValueSpec emits to fn code to define all of the vars in the -// function-local ValueSpec, spec. -func (b *builder) localValueSpec(fn *Function, spec *ast.ValueSpec) { - switch { - case len(spec.Values) == len(spec.Names): - // e.g. var x, y = 0, 1 - // 1:1 assignment - for i, id := range spec.Names { - if !isBlankIdent(id) { - fn.addLocalForIdent(id) - } - lval := b.addr(fn, id, false) // non-escaping - b.assign(fn, lval, spec.Values[i], true, nil, spec) - } - - case len(spec.Values) == 0: - // e.g. var x, y int - // Locals are implicitly zero-initialized. - for _, id := range spec.Names { - if !isBlankIdent(id) { - lhs := fn.addLocalForIdent(id) - if fn.debugInfo() { - emitDebugRef(fn, id, lhs, true) - } - } - } - - default: - // e.g. var x, y = pos() - tuple := b.exprN(fn, spec.Values[0]) - for i, id := range spec.Names { - if !isBlankIdent(id) { - fn.addLocalForIdent(id) - lhs := b.addr(fn, id, false) // non-escaping - lhs.store(fn, emitExtract(fn, tuple, i, id), id) - } - } - } -} - -// assignStmt emits code to fn for a parallel assignment of rhss to lhss. -// isDef is true if this is a short variable declaration (:=). -// -// Note the similarity with localValueSpec. -func (b *builder) assignStmt(fn *Function, lhss, rhss []ast.Expr, isDef bool, source ast.Node) { - // Side effects of all LHSs and RHSs must occur in left-to-right order. - lvals := make([]lvalue, len(lhss)) - isZero := make([]bool, len(lhss)) - for i, lhs := range lhss { - var lval lvalue = blank{} - if !isBlankIdent(lhs) { - if isDef { - if obj := fn.Pkg.info.Defs[lhs.(*ast.Ident)]; obj != nil { - fn.addNamedLocal(obj, lhs) - isZero[i] = true - } - } - lval = b.addr(fn, lhs, false) // non-escaping - } - lvals[i] = lval - } - if len(lhss) == len(rhss) { - // Simple assignment: x = f() (!isDef) - // Parallel assignment: x, y = f(), g() (!isDef) - // or short var decl: x, y := f(), g() (isDef) - // - // In all cases, the RHSs may refer to the LHSs, - // so we need a storebuf. - var sb storebuf - for i := range rhss { - b.assign(fn, lvals[i], rhss[i], isZero[i], &sb, source) - } - sb.emit(fn) - } else { - // e.g. x, y = pos() - tuple := b.exprN(fn, rhss[0]) - emitDebugRef(fn, rhss[0], tuple, false) - for i, lval := range lvals { - lval.store(fn, emitExtract(fn, tuple, i, source), source) - } - } -} - -// arrayLen returns the length of the array whose composite literal elements are elts. -func (b *builder) arrayLen(fn *Function, elts []ast.Expr) int64 { - var max int64 = -1 - var i int64 = -1 - for _, e := range elts { - if kv, ok := e.(*ast.KeyValueExpr); ok { - i = b.expr(fn, kv.Key).(*Const).Int64() - } else { - i++ - } - if i > max { - max = i - } - } - return max + 1 -} - -// compLit emits to fn code to initialize a composite literal e at -// address addr with type typ. -// -// Nested composite literals are recursively initialized in place -// where possible. If isZero is true, compLit assumes that addr -// holds the zero value for typ. -// -// Because the elements of a composite literal may refer to the -// variables being updated, as in the second line below, -// -// x := T{a: 1} -// x = T{a: x.a} -// -// all the reads must occur before all the writes. This is implicitly handled by the write buffering effected by -// compositeElement and explicitly by the storebuf for when we don't use CompositeValue. -// -// A CompositeLit may have pointer type only in the recursive (nested) -// case when the type name is implicit. e.g. in []*T{{}}, the inner -// literal has type *T behaves like &T{}. -// In that case, addr must hold a T, not a *T. -func (b *builder) compLit(fn *Function, addr Value, e *ast.CompositeLit, isZero bool, sb *storebuf) { - typ := deref(fn.Pkg.typeOf(e)) - switch t := typeutil.CoreType(typ).(type) { - case *types.Struct: - lvalue := &address{addr: addr, expr: e} - if len(e.Elts) == 0 { - if !isZero { - sb.store(lvalue, zeroValue(fn, deref(addr.Type()), e), e) - } - } else { - v := &CompositeValue{ - Values: make([]Value, t.NumFields()), - } - for i := 0; i < t.NumFields(); i++ { - v.Values[i] = emitConst(fn, zeroConst(t.Field(i).Type())) - } - v.setType(typ) - - for i, e := range e.Elts { - fieldIndex := i - if kv, ok := e.(*ast.KeyValueExpr); ok { - fname := kv.Key.(*ast.Ident).Name - for i, n := 0, t.NumFields(); i < n; i++ { - sf := t.Field(i) - if sf.Name() == fname { - fieldIndex = i - e = kv.Value - break - } - } - } - - ce := &compositeElement{ - cv: v, - idx: fieldIndex, - t: t.Field(fieldIndex).Type(), - expr: e, - } - b.assign(fn, ce, e, isZero, sb, e) - v.Bitmap.SetBit(&v.Bitmap, fieldIndex, 1) - v.NumSet++ - } - fn.emit(v, e) - sb.store(lvalue, v, e) - } - - case *types.Array, *types.Slice: - var at *types.Array - var array Value - switch t := t.(type) { - case *types.Slice: - at = types.NewArray(t.Elem(), b.arrayLen(fn, e.Elts)) - alloc := emitNew(fn, at, e) - array = alloc - case *types.Array: - at = t - array = addr - } - - var final Value - if len(e.Elts) == 0 { - if !isZero { - zc := emitConst(fn, zeroConst(at)) - final = zc - } - } else { - if at.Len() == int64(len(e.Elts)) { - // The literal specifies all elements, so we can use a composite value - v := &CompositeValue{ - Values: make([]Value, at.Len()), - } - zc := emitConst(fn, zeroConst(at.Elem())) - for i := range v.Values { - v.Values[i] = zc - } - v.setType(at) - - var idx *Const - for _, e := range e.Elts { - if kv, ok := e.(*ast.KeyValueExpr); ok { - idx = b.expr(fn, kv.Key).(*Const) - e = kv.Value - } else { - var idxval int64 - if idx != nil { - idxval = idx.Int64() + 1 - } - idx = emitConst(fn, intConst(idxval)).(*Const) - } - - iaddr := &compositeElement{ - cv: v, - idx: int(idx.Int64()), - t: at.Elem(), - expr: e, - } - - b.assign(fn, iaddr, e, true, sb, e) - v.Bitmap.SetBit(&v.Bitmap, int(idx.Int64()), 1) - v.NumSet++ - } - final = v - fn.emit(v, e) - } else { - // Not all elements are specified. Populate the array with a series of stores, to guard against literals - // like []int{1<<62: 1}. - if !isZero { - // memclear - sb.store(&address{array, nil}, zeroValue(fn, deref(array.Type()), e), e) - } - - var idx *Const - for _, e := range e.Elts { - if kv, ok := e.(*ast.KeyValueExpr); ok { - idx = b.expr(fn, kv.Key).(*Const) - e = kv.Value - } else { - var idxval int64 - if idx != nil { - idxval = idx.Int64() + 1 - } - idx = emitConst(fn, intConst(idxval)).(*Const) - } - iaddr := &IndexAddr{ - X: array, - Index: idx, - } - iaddr.setType(types.NewPointer(at.Elem())) - fn.emit(iaddr, e) - if t != at { // slice - // backing array is unaliased => storebuf not needed. - b.assign(fn, &address{addr: iaddr, expr: e}, e, true, nil, e) - } else { - b.assign(fn, &address{addr: iaddr, expr: e}, e, true, sb, e) - } - } - } - } - if t != at { // slice - if final != nil { - sb.store(&address{addr: array}, final, e) - } - s := &Slice{X: array} - s.setType(typ) - sb.store(&address{addr: addr, expr: e}, fn.emit(s, e), e) - } else if final != nil { - sb.store(&address{addr: array, expr: e}, final, e) - } - - case *types.Map: - m := &MakeMap{Reserve: emitConst(fn, intConst(int64(len(e.Elts))))} - m.setType(typ) - fn.emit(m, e) - for _, e := range e.Elts { - e := e.(*ast.KeyValueExpr) - - // If a key expression in a map literal is itself a - // composite literal, the type may be omitted. - // For example: - // map[*struct{}]bool{{}: true} - // An &-operation may be implied: - // map[*struct{}]bool{&struct{}{}: true} - var key Value - if _, ok := unparen(e.Key).(*ast.CompositeLit); ok && isPointer(t.Key()) { - // A CompositeLit never evaluates to a pointer, - // so if the type of the location is a pointer, - // an &-operation is implied. - key = b.addr(fn, e.Key, true).address(fn) - } else { - key = b.expr(fn, e.Key) - } - - loc := element{ - m: m, - k: emitConv(fn, key, t.Key(), e), - t: t.Elem(), - } - - // We call assign() only because it takes care - // of any &-operation required in the recursive - // case, e.g., - // map[int]*struct{}{0: {}} implies &struct{}{}. - // In-place update is of course impossible, - // and no storebuf is needed. - b.assign(fn, &loc, e.Value, true, nil, e) - } - sb.store(&address{addr: addr, expr: e}, m, e) - - default: - panic("unexpected CompositeLit type: " + t.String()) - } -} - -func (b *builder) switchStmt(fn *Function, s *ast.SwitchStmt, label *lblock) { - if s.Tag == nil { - b.switchStmtDynamic(fn, s, label) - return - } - dynamic := false - for _, iclause := range s.Body.List { - clause := iclause.(*ast.CaseClause) - for _, cond := range clause.List { - if fn.Pkg.info.Types[unparen(cond)].Value == nil { - dynamic = true - break - } - } - } - - if dynamic { - b.switchStmtDynamic(fn, s, label) - return - } - - if s.Init != nil { - b.stmt(fn, s.Init) - } - - entry := fn.currentBlock - tag := b.expr(fn, s.Tag) - - heads := make([]*BasicBlock, 0, len(s.Body.List)) - bodies := make([]*BasicBlock, len(s.Body.List)) - conds := make([]Value, 0, len(s.Body.List)) - - hasDefault := false - done := fn.newBasicBlock("switch.done") - if label != nil { - label._break = done - } - for i, stmt := range s.Body.List { - body := fn.newBasicBlock(fmt.Sprintf("switch.body.%d", i)) - bodies[i] = body - cas := stmt.(*ast.CaseClause) - if cas.List == nil { - // default branch - hasDefault = true - head := fn.newBasicBlock(fmt.Sprintf("switch.head.%d", i)) - conds = append(conds, nil) - heads = append(heads, head) - fn.currentBlock = head - emitJump(fn, body, cas) - } - for j, cond := range stmt.(*ast.CaseClause).List { - fn.currentBlock = entry - head := fn.newBasicBlock(fmt.Sprintf("switch.head.%d.%d", i, j)) - conds = append(conds, b.expr(fn, cond)) - heads = append(heads, head) - fn.currentBlock = head - emitJump(fn, body, cond) - } - } - - for i, stmt := range s.Body.List { - clause := stmt.(*ast.CaseClause) - body := bodies[i] - fn.currentBlock = body - fallthru := done - if i+1 < len(bodies) { - fallthru = bodies[i+1] - } - fn.targets = &targets{ - tail: fn.targets, - _break: done, - _fallthrough: fallthru, - } - b.stmtList(fn, clause.Body) - fn.targets = fn.targets.tail - emitJump(fn, done, stmt) - } - - if !hasDefault { - head := fn.newBasicBlock("switch.head.implicit-default") - body := fn.newBasicBlock("switch.body.implicit-default") - fn.currentBlock = head - emitJump(fn, body, s) - fn.currentBlock = body - emitJump(fn, done, s) - heads = append(heads, head) - conds = append(conds, nil) - } - - if len(heads) != len(conds) { - panic(fmt.Sprintf("internal error: %d heads for %d conds", len(heads), len(conds))) - } - for _, head := range heads { - addEdge(entry, head) - } - fn.currentBlock = entry - entry.emit(&ConstantSwitch{ - Tag: tag, - Conds: conds, - }, s) - fn.currentBlock = done -} - -// switchStmt emits to fn code for the switch statement s, optionally -// labelled by label. -func (b *builder) switchStmtDynamic(fn *Function, s *ast.SwitchStmt, label *lblock) { - // We treat SwitchStmt like a sequential if-else chain. - // Multiway dispatch can be recovered later by irutil.Switches() - // to those cases that are free of side effects. - if s.Init != nil { - b.stmt(fn, s.Init) - } - kTrue := emitConst(fn, NewConst(constant.MakeBool(true), tBool)) - - var tagv Value = kTrue - var tagSource ast.Node = s - if s.Tag != nil { - tagv = b.expr(fn, s.Tag) - tagSource = s.Tag - } - // lifting only considers loads and stores, but we want different - // sigma nodes for the different comparisons. use a temporary and - // load it in every branch. - tag := fn.addLocal(tagv.Type(), tagSource) - emitStore(fn, tag, tagv, tagSource) - - done := fn.newBasicBlock("switch.done") - if label != nil { - label._break = done - } - // We pull the default case (if present) down to the end. - // But each fallthrough label must point to the next - // body block in source order, so we preallocate a - // body block (fallthru) for the next case. - // Unfortunately this makes for a confusing block order. - var dfltBody *[]ast.Stmt - var dfltFallthrough *BasicBlock - var fallthru, dfltBlock *BasicBlock - ncases := len(s.Body.List) - for i, clause := range s.Body.List { - body := fallthru - if body == nil { - body = fn.newBasicBlock("switch.body") // first case only - } - - // Preallocate body block for the next case. - fallthru = done - if i+1 < ncases { - fallthru = fn.newBasicBlock("switch.body") - } - - cc := clause.(*ast.CaseClause) - if cc.List == nil { - // Default case. - dfltBody = &cc.Body - dfltFallthrough = fallthru - dfltBlock = body - continue - } - - var nextCond *BasicBlock - for _, cond := range cc.List { - nextCond = fn.newBasicBlock("switch.next") - if tagv == kTrue { - // emit a proper if/else chain instead of a comparison - // of a value against true. - // - // NOTE(dh): adonovan had a todo saying "don't forget - // conversions though". As far as I can tell, there - // aren't any conversions that we need to take care of - // here. `case bool(a) && bool(b)` as well as `case - // bool(a && b)` are being taken care of by b.cond, - // and `case a` where a is not of type bool is - // invalid. - b.cond(fn, cond, body, nextCond) - } else { - cond := emitCompare(fn, token.EQL, emitLoad(fn, tag, cond), b.expr(fn, cond), cond) - emitIf(fn, cond, body, nextCond, cond.Source()) - } - - fn.currentBlock = nextCond - } - fn.currentBlock = body - fn.targets = &targets{ - tail: fn.targets, - _break: done, - _fallthrough: fallthru, - } - b.stmtList(fn, cc.Body) - fn.targets = fn.targets.tail - emitJump(fn, done, s) - fn.currentBlock = nextCond - } - if dfltBlock != nil { - // The lack of a Source for the jump doesn't matter, block - // fusing will get rid of the jump later. - - emitJump(fn, dfltBlock, s) - fn.currentBlock = dfltBlock - fn.targets = &targets{ - tail: fn.targets, - _break: done, - _fallthrough: dfltFallthrough, - } - b.stmtList(fn, *dfltBody) - fn.targets = fn.targets.tail - } - emitJump(fn, done, s) - fn.currentBlock = done -} - -func (b *builder) typeSwitchStmt(fn *Function, s *ast.TypeSwitchStmt, label *lblock) { - if s.Init != nil { - b.stmt(fn, s.Init) - } - - var tag Value - switch e := s.Assign.(type) { - case *ast.ExprStmt: // x.(type) - tag = b.expr(fn, unparen(e.X).(*ast.TypeAssertExpr).X) - case *ast.AssignStmt: // y := x.(type) - tag = b.expr(fn, unparen(e.Rhs[0]).(*ast.TypeAssertExpr).X) - default: - panic("unreachable") - } - tagPtr := fn.addLocal(tag.Type(), tag.Source()) - emitStore(fn, tagPtr, tag, tag.Source()) - - // +1 in case there's no explicit default case - heads := make([]*BasicBlock, 0, len(s.Body.List)+1) - - entry := fn.currentBlock - done := fn.newBasicBlock("done") - if label != nil { - label._break = done - } - - // set up type switch and constant switch, populate their conditions - tswtch := &TypeSwitch{ - Tag: emitLoad(fn, tagPtr, tag.Source()), - Conds: make([]types.Type, 0, len(s.Body.List)+1), - } - cswtch := &ConstantSwitch{ - Conds: make([]Value, 0, len(s.Body.List)+1), - } - - rets := make([]types.Type, 0, len(s.Body.List)+1) - index := 0 - var default_ *ast.CaseClause - for _, clause := range s.Body.List { - cc := clause.(*ast.CaseClause) - if obj := fn.Pkg.info.Implicits[cc]; obj != nil { - fn.addNamedLocal(obj, cc) - } - if cc.List == nil { - // default case - default_ = cc - } else { - for _, expr := range cc.List { - tswtch.Conds = append(tswtch.Conds, fn.Pkg.typeOf(expr)) - cswtch.Conds = append(cswtch.Conds, emitConst(fn, intConst(int64(index)))) - index++ - } - if len(cc.List) == 1 { - rets = append(rets, fn.Pkg.typeOf(cc.List[0])) - } else { - for range cc.List { - rets = append(rets, tag.Type()) - } - } - } - } - - // default branch - rets = append(rets, tag.Type()) - - var vars []*types.Var - vars = append(vars, varIndex) - for _, typ := range rets { - vars = append(vars, anonVar(typ)) - } - tswtch.setType(types.NewTuple(vars...)) - // default branch - fn.currentBlock = entry - fn.emit(tswtch, s) - cswtch.Conds = append(cswtch.Conds, emitConst(fn, intConst(int64(-1)))) - // in theory we should add a local and stores/loads for tswtch, to - // generate sigma nodes in the branches. however, there isn't any - // useful information we could possibly attach to it. - cswtch.Tag = emitExtract(fn, tswtch, 0, s) - fn.emit(cswtch, s) - - // build heads and bodies - index = 0 - for _, clause := range s.Body.List { - cc := clause.(*ast.CaseClause) - if cc.List == nil { - continue - } - - body := fn.newBasicBlock("typeswitch.body") - for _, expr := range cc.List { - head := fn.newBasicBlock("typeswitch.head") - heads = append(heads, head) - fn.currentBlock = head - - if obj := fn.Pkg.info.Implicits[cc]; obj != nil { - // In a switch y := x.(type), each case clause - // implicitly declares a distinct object y. - // In a single-type case, y has that type. - // In multi-type cases, 'case nil' and default, - // y has the same type as the interface operand. - - l := fn.objects[obj] - if rets[index] == tUntypedNil { - emitStore(fn, l, emitConst(fn, nilConst(tswtch.Tag.Type())), s.Assign) - } else { - x := emitExtract(fn, tswtch, index+1, s.Assign) - emitStore(fn, l, x, nil) - } - } - - emitJump(fn, body, expr) - index++ - } - fn.currentBlock = body - fn.targets = &targets{ - tail: fn.targets, - _break: done, - } - b.stmtList(fn, cc.Body) - fn.targets = fn.targets.tail - emitJump(fn, done, clause) - } - - if default_ == nil { - // implicit default - heads = append(heads, done) - } else { - body := fn.newBasicBlock("typeswitch.default") - heads = append(heads, body) - fn.currentBlock = body - fn.targets = &targets{ - tail: fn.targets, - _break: done, - } - if obj := fn.Pkg.info.Implicits[default_]; obj != nil { - l := fn.objects[obj] - x := emitExtract(fn, tswtch, index+1, s.Assign) - emitStore(fn, l, x, s) - } - b.stmtList(fn, default_.Body) - fn.targets = fn.targets.tail - emitJump(fn, done, s) - } - - fn.currentBlock = entry - for _, head := range heads { - addEdge(entry, head) - } - fn.currentBlock = done -} - -// selectStmt emits to fn code for the select statement s, optionally -// labelled by label. -func (b *builder) selectStmt(fn *Function, s *ast.SelectStmt, label *lblock) (noreturn bool) { - if len(s.Body.List) == 0 { - instr := &Select{Blocking: true} - instr.setType(types.NewTuple(varIndex, varOk)) - fn.emit(instr, s) - fn.emit(new(Unreachable), s) - addEdge(fn.currentBlock, fn.Exit) - return true - } - - // A blocking select of a single case degenerates to a - // simple send or receive. - // TODO(adonovan): opt: is this optimization worth its weight? - if len(s.Body.List) == 1 { - clause := s.Body.List[0].(*ast.CommClause) - if clause.Comm != nil { - b.stmt(fn, clause.Comm) - done := fn.newBasicBlock("select.done") - if label != nil { - label._break = done - } - fn.targets = &targets{ - tail: fn.targets, - _break: done, - } - b.stmtList(fn, clause.Body) - fn.targets = fn.targets.tail - emitJump(fn, done, clause) - fn.currentBlock = done - return false - } - } - - // First evaluate all channels in all cases, and find - // the directions of each state. - var states []*SelectState - blocking := true - debugInfo := fn.debugInfo() - for _, clause := range s.Body.List { - var st *SelectState - switch comm := clause.(*ast.CommClause).Comm.(type) { - case nil: // default case - blocking = false - continue - - case *ast.SendStmt: // ch<- i - ch := b.expr(fn, comm.Chan) - st = &SelectState{ - Dir: types.SendOnly, - Chan: ch, - Send: emitConv(fn, b.expr(fn, comm.Value), - typeutil.CoreType(ch.Type()).Underlying().(*types.Chan).Elem(), comm), - Pos: comm.Arrow, - } - if debugInfo { - st.DebugNode = comm - } - - case *ast.AssignStmt: // x := <-ch - recv := unparen(comm.Rhs[0]).(*ast.UnaryExpr) - st = &SelectState{ - Dir: types.RecvOnly, - Chan: b.expr(fn, recv.X), - Pos: recv.OpPos, - } - if debugInfo { - st.DebugNode = recv - } - - case *ast.ExprStmt: // <-ch - recv := unparen(comm.X).(*ast.UnaryExpr) - st = &SelectState{ - Dir: types.RecvOnly, - Chan: b.expr(fn, recv.X), - Pos: recv.OpPos, - } - if debugInfo { - st.DebugNode = recv - } - } - states = append(states, st) - } - - // We dispatch on the (fair) result of Select using a - // switch on the returned index. - sel := &Select{ - States: states, - Blocking: blocking, - } - sel.source = s - var vars []*types.Var - vars = append(vars, varIndex, varOk) - for _, st := range states { - if st.Dir == types.RecvOnly { - tElem := typeutil.CoreType(st.Chan.Type()).Underlying().(*types.Chan).Elem() - vars = append(vars, anonVar(tElem)) - } - } - sel.setType(types.NewTuple(vars...)) - fn.emit(sel, s) - idx := emitExtract(fn, sel, 0, s) - - done := fn.newBasicBlock("select.done") - if label != nil { - label._break = done - } - - entry := fn.currentBlock - swtch := &ConstantSwitch{ - Tag: idx, - // one condition per case - Conds: make([]Value, 0, len(s.Body.List)+1), - } - // note that we don't need heads; a select case can only have a single condition - var bodies []*BasicBlock - - state := 0 - r := 2 // index in 'sel' tuple of value; increments if st.Dir==RECV - for _, cc := range s.Body.List { - clause := cc.(*ast.CommClause) - if clause.Comm == nil { - body := fn.newBasicBlock("select.default") - fn.currentBlock = body - bodies = append(bodies, body) - fn.targets = &targets{ - tail: fn.targets, - _break: done, - } - b.stmtList(fn, clause.Body) - emitJump(fn, done, s) - fn.targets = fn.targets.tail - swtch.Conds = append(swtch.Conds, emitConst(fn, intConst(-1))) - continue - } - swtch.Conds = append(swtch.Conds, emitConst(fn, intConst(int64(state)))) - body := fn.newBasicBlock("select.body") - fn.currentBlock = body - bodies = append(bodies, body) - fn.targets = &targets{ - tail: fn.targets, - _break: done, - } - switch comm := clause.Comm.(type) { - case *ast.ExprStmt: // <-ch - if debugInfo { - v := emitExtract(fn, sel, r, comm) - emitDebugRef(fn, states[state].DebugNode.(ast.Expr), v, false) - } - r++ - - case *ast.AssignStmt: // x := <-states[state].Chan - if comm.Tok == token.DEFINE { - fn.addLocalForIdent(comm.Lhs[0].(*ast.Ident)) - } - x := b.addr(fn, comm.Lhs[0], false) // non-escaping - v := emitExtract(fn, sel, r, comm) - if debugInfo { - emitDebugRef(fn, states[state].DebugNode.(ast.Expr), v, false) - } - x.store(fn, v, comm) - - if len(comm.Lhs) == 2 { // x, ok := ... - if comm.Tok == token.DEFINE { - fn.addLocalForIdent(comm.Lhs[1].(*ast.Ident)) - } - ok := b.addr(fn, comm.Lhs[1], false) // non-escaping - ok.store(fn, emitExtract(fn, sel, 1, comm), comm) - } - r++ - } - b.stmtList(fn, clause.Body) - fn.targets = fn.targets.tail - emitJump(fn, done, s) - state++ - } - fn.currentBlock = entry - fn.emit(swtch, s) - for _, body := range bodies { - addEdge(entry, body) - } - fn.currentBlock = done - return false -} - -// forStmt emits to fn code for the for statement s, optionally -// labelled by label. -func (b *builder) forStmt(fn *Function, s *ast.ForStmt, label *lblock) { - // ...init... - // jump loop - // loop: - // if cond goto body else done - // body: - // ...body... - // jump post - // post: (target of continue) - // ...post... - // jump loop - // done: (target of break) - if s.Init != nil { - b.stmt(fn, s.Init) - } - body := fn.newBasicBlock("for.body") - done := fn.newBasicBlock("for.done") // target of 'break' - loop := body // target of back-edge - if s.Cond != nil { - loop = fn.newBasicBlock("for.loop") - } - cont := loop // target of 'continue' - if s.Post != nil { - cont = fn.newBasicBlock("for.post") - } - if label != nil { - label._break = done - label._continue = cont - } - emitJump(fn, loop, s) - fn.currentBlock = loop - if loop != body { - b.cond(fn, s.Cond, body, done) - fn.currentBlock = body - } - fn.targets = &targets{ - tail: fn.targets, - _break: done, - _continue: cont, - } - b.stmt(fn, s.Body) - fn.targets = fn.targets.tail - emitJump(fn, cont, s) - - if s.Post != nil { - fn.currentBlock = cont - b.stmt(fn, s.Post) - emitJump(fn, loop, s) // back-edge - } - fn.currentBlock = done -} - -// rangeIndexed emits to fn the header for an integer-indexed loop -// over array, *array or slice value x. -// The v result is defined only if tv is non-nil. -// forPos is the position of the "for" token. -func (b *builder) rangeIndexed(fn *Function, x Value, tv types.Type, source ast.Node) (k, v Value, loop, done *BasicBlock) { - // - // length = len(x) - // index = -1 - // loop: (target of continue) - // index++ - // if index < length goto body else done - // body: - // k = index - // v = x[index] - // ...body... - // jump loop - // done: (target of break) - - // We store in an Alloc and load it on each iteration so that lifting produces the necessary σ nodes - xAlloc := newVariable(fn, x.Type(), source) - xAlloc.store(x) - - // Determine number of iterations. - // - // We store the length in an Alloc and load it on each iteration so that lifting produces the necessary σ nodes - length := newVariable(fn, tInt, source) - if arr, ok := deref(x.Type()).Underlying().(*types.Array); ok && !typeparams.IsTypeParam(x.Type()) { - // For array or *array, the number of iterations is known statically thanks to the type. We avoid a data - // dependence upon x, permitting later dead-code elimination if x is pure, static unrolling, etc. Ranging over a - // nil *array may have >0 iterations. We still generate code for x, in case it has effects. - // - // This intentionally misses type parameters with core types, because their length isn't technically constant. - length.store(emitConst(fn, intConst(arr.Len()))) - } else { - // length = len(x). - var c Call - c.Call.Value = makeLen(x.Type()) - c.Call.Args = []Value{x} - c.setType(tInt) - length.store(fn.emit(&c, source)) - } - - index := fn.addLocal(tInt, source) - emitStore(fn, index, emitConst(fn, intConst(-1)), source) - - loop = fn.newBasicBlock("rangeindex.loop") - emitJump(fn, loop, source) - fn.currentBlock = loop - - incr := &BinOp{ - Op: token.ADD, - X: emitLoad(fn, index, source), - Y: emitConst(fn, intConst(1)), - } - incr.setType(tInt) - emitStore(fn, index, fn.emit(incr, source), source) - - body := fn.newBasicBlock("rangeindex.body") - done = fn.newBasicBlock("rangeindex.done") - emitIf(fn, emitCompare(fn, token.LSS, incr, length.load(), source), body, done, source) - fn.currentBlock = body - - k = emitLoad(fn, index, source) - if tv != nil { - x := xAlloc.load() - switch t := typeutil.CoreType(x.Type()).Underlying().(type) { - case *types.Array: - instr := &Index{ - X: x, - Index: k, - } - instr.setType(t.Elem()) - v = fn.emit(instr, source) - - case *types.Pointer: // *array - instr := &IndexAddr{ - X: x, - Index: k, - } - instr.setType(types.NewPointer(t.Elem().Underlying().(*types.Array).Elem())) - v = emitLoad(fn, fn.emit(instr, source), source) - - case *types.Slice: - instr := &IndexAddr{ - X: x, - Index: k, - } - instr.setType(types.NewPointer(t.Elem())) - v = emitLoad(fn, fn.emit(instr, source), source) - - default: - panic("rangeIndexed x:" + t.String()) - } - } - return -} - -// rangeIter emits to fn the header for a loop using -// Range/Next/Extract to iterate over map or string value x. -// tk and tv are the types of the key/value results k and v, or nil -// if the respective component is not wanted. -func (b *builder) rangeIter(fn *Function, x Value, tk, tv types.Type, source ast.Node) (k, v Value, loop, done *BasicBlock) { - // - // it = range x - // loop: (target of continue) - // okv = next it (ok, key, value) - // ok = extract okv #0 - // if ok goto body else done - // body: - // k = extract okv #1 - // v = extract okv #2 - // ...body... - // jump loop - // done: (target of break) - // - - if tk == nil { - tk = tInvalid - } - if tv == nil { - tv = tInvalid - } - - rng := &Range{X: x} - rng.setType(typeutil.NewIterator(types.NewTuple( - varOk, - newVar("k", tk), - newVar("v", tv), - ))) - it := newVariable(fn, rng.typ, source) - it.store(fn.emit(rng, source)) - - loop = fn.newBasicBlock("rangeiter.loop") - emitJump(fn, loop, source) - fn.currentBlock = loop - - // Go doesn't currently allow ranging over string|[]byte, so isString is decidable. - _, isString := typeutil.CoreType(x.Type()).Underlying().(*types.Basic) - - okvInstr := &Next{ - Iter: it.load(), - IsString: isString, - } - okvInstr.setType(rng.typ.(*typeutil.Iterator).Elem()) - fn.emit(okvInstr, source) - okv := newVariable(fn, okvInstr.Type(), source) - okv.store(okvInstr) - - body := fn.newBasicBlock("rangeiter.body") - done = fn.newBasicBlock("rangeiter.done") - emitIf(fn, emitExtract(fn, okv.load(), 0, source), body, done, source) - fn.currentBlock = body - - if tk != tInvalid { - k = emitExtract(fn, okv.load(), 1, source) - } - if tv != tInvalid { - v = emitExtract(fn, okv.load(), 2, source) - } - return -} - -// rangeChan emits to fn the header for a loop that receives from -// channel x until it fails. -// tk is the channel's element type, or nil if the k result is -// not wanted -// pos is the position of the '=' or ':=' token. -func (b *builder) rangeChan(fn *Function, x Value, tk types.Type, source ast.Node) (k Value, loop, done *BasicBlock) { - // - // loop: (target of continue) - // ko = <-x (key, ok) - // ok = extract ko #1 - // if ok goto body else done - // body: - // k = extract ko #0 - // ... - // goto loop - // done: (target of break) - - loop = fn.newBasicBlock("rangechan.loop") - emitJump(fn, loop, source) - fn.currentBlock = loop - - recv := emitRecv(fn, x, true, types.NewTuple(newVar("k", typeutil.CoreType(x.Type()).Underlying().(*types.Chan).Elem()), varOk), source) - retv := newVariable(fn, recv.Type(), source) - retv.store(recv) - - body := fn.newBasicBlock("rangechan.body") - done = fn.newBasicBlock("rangechan.done") - emitIf(fn, emitExtract(fn, retv.load(), 1, source), body, done, source) - fn.currentBlock = body - if tk != nil { - k = emitExtract(fn, retv.load(), 0, source) - } - return -} - -type variable struct { - alloc *Alloc - fn *Function - source ast.Node -} - -func newVariable(fn *Function, typ types.Type, source ast.Node) *variable { - alloc := &Alloc{} - alloc.setType(types.NewPointer(typ)) - fn.emit(alloc, source) - fn.Locals = append(fn.Locals, alloc) - return &variable{ - alloc: alloc, - fn: fn, - source: source, - } -} - -func (v *variable) store(sv Value) { - emitStore(v.fn, v.alloc, sv, v.source) -} - -func (v *variable) load() Value { - return emitLoad(v.fn, v.alloc, v.source) -} - -// rangeStmt emits to fn code for the range statement s, optionally -// labelled by label. -func (b *builder) rangeStmt(fn *Function, s *ast.RangeStmt, label *lblock, source ast.Node) { - var tk, tv types.Type - if s.Key != nil && !isBlankIdent(s.Key) { - tk = fn.Pkg.typeOf(s.Key) - } - if s.Value != nil && !isBlankIdent(s.Value) { - tv = fn.Pkg.typeOf(s.Value) - } - - // If iteration variables are defined (:=), this - // occurs once outside the loop. - // - // Unlike a short variable declaration, a RangeStmt - // using := never redeclares an existing variable; it - // always creates a new one. - if s.Tok == token.DEFINE { - if tk != nil { - fn.addLocalForIdent(s.Key.(*ast.Ident)) - } - if tv != nil { - fn.addLocalForIdent(s.Value.(*ast.Ident)) - } - } - - x := b.expr(fn, s.X) - - var k, v Value - var loop, done *BasicBlock - switch rt := typeutil.CoreType(x.Type()).Underlying().(type) { - case *types.Slice, *types.Array, *types.Pointer: // *array - k, v, loop, done = b.rangeIndexed(fn, x, tv, source) - - case *types.Chan: - k, loop, done = b.rangeChan(fn, x, tk, source) - - case *types.Map, *types.Basic: // string - k, v, loop, done = b.rangeIter(fn, x, tk, tv, source) - - default: - panic("Cannot range over: " + rt.String()) - } - - // Evaluate both LHS expressions before we update either. - var kl, vl lvalue - if tk != nil { - kl = b.addr(fn, s.Key, false) // non-escaping - } - if tv != nil { - vl = b.addr(fn, s.Value, false) // non-escaping - } - if tk != nil { - kl.store(fn, k, s) - } - if tv != nil { - vl.store(fn, v, s) - } - - if label != nil { - label._break = done - label._continue = loop - } - - fn.targets = &targets{ - tail: fn.targets, - _break: done, - _continue: loop, - } - b.stmt(fn, s.Body) - fn.targets = fn.targets.tail - emitJump(fn, loop, source) // back-edge - fn.currentBlock = done -} - -// stmt lowers statement s to IR form, emitting code to fn. -func (b *builder) stmt(fn *Function, _s ast.Stmt) { - // The label of the current statement. If non-nil, its _goto - // target is always set; its _break and _continue are set only - // within the body of switch/typeswitch/select/for/range. - // It is effectively an additional default-nil parameter of stmt(). - var label *lblock -start: - switch s := _s.(type) { - case *ast.EmptyStmt: - // ignore. (Usually removed by gofmt.) - - case *ast.DeclStmt: // Con, Var or Typ - d := s.Decl.(*ast.GenDecl) - if d.Tok == token.VAR { - for _, spec := range d.Specs { - if vs, ok := spec.(*ast.ValueSpec); ok { - b.localValueSpec(fn, vs) - } - } - } - - case *ast.LabeledStmt: - label = fn.labelledBlock(s.Label) - emitJump(fn, label._goto, s) - fn.currentBlock = label._goto - _s = s.Stmt - goto start // effectively: tailcall stmt(fn, s.Stmt, label) - - case *ast.ExprStmt: - b.expr(fn, s.X) - - case *ast.SendStmt: - instr := &Send{ - Chan: b.expr(fn, s.Chan), - X: emitConv(fn, b.expr(fn, s.Value), - typeutil.CoreType(fn.Pkg.typeOf(s.Chan)).Underlying().(*types.Chan).Elem(), s), - } - fn.emit(instr, s) - - case *ast.IncDecStmt: - op := token.ADD - if s.Tok == token.DEC { - op = token.SUB - } - loc := b.addr(fn, s.X, false) - b.assignOp(fn, loc, emitConst(fn, NewConst(constant.MakeInt64(1), loc.typ())), op, s) - - case *ast.AssignStmt: - switch s.Tok { - case token.ASSIGN, token.DEFINE: - b.assignStmt(fn, s.Lhs, s.Rhs, s.Tok == token.DEFINE, _s) - - default: // +=, etc. - op := s.Tok + token.ADD - token.ADD_ASSIGN - b.assignOp(fn, b.addr(fn, s.Lhs[0], false), b.expr(fn, s.Rhs[0]), op, s) - } - - case *ast.GoStmt: - // The "intrinsics" new/make/len/cap are forbidden here. - // panic is treated like an ordinary function call. - v := Go{} - b.setCall(fn, s.Call, &v.Call) - fn.emit(&v, s) - - case *ast.DeferStmt: - // The "intrinsics" new/make/len/cap are forbidden here. - // panic is treated like an ordinary function call. - v := Defer{} - b.setCall(fn, s.Call, &v.Call) - fn.hasDefer = true - fn.emit(&v, s) - - case *ast.ReturnStmt: - // TODO(dh): we could emit tighter position information by - // using the ith returned expression - - var results []Value - if len(s.Results) == 1 && fn.Signature.Results().Len() > 1 { - // Return of one expression in a multi-valued function. - tuple := b.exprN(fn, s.Results[0]) - ttuple := tuple.Type().(*types.Tuple) - for i, n := 0, ttuple.Len(); i < n; i++ { - results = append(results, - emitConv(fn, emitExtract(fn, tuple, i, s), - fn.Signature.Results().At(i).Type(), s)) - } - } else { - // 1:1 return, or no-arg return in non-void function. - for i, r := range s.Results { - v := emitConv(fn, b.expr(fn, r), fn.Signature.Results().At(i).Type(), s) - results = append(results, v) - } - } - - ret := fn.results() - for i, r := range results { - emitStore(fn, ret[i], r, s) - } - - emitJump(fn, fn.Exit, s) - fn.currentBlock = fn.newBasicBlock("unreachable") - - case *ast.BranchStmt: - var block *BasicBlock - switch s.Tok { - case token.BREAK: - if s.Label != nil { - block = fn.labelledBlock(s.Label)._break - } else { - for t := fn.targets; t != nil && block == nil; t = t.tail { - block = t._break - } - } - - case token.CONTINUE: - if s.Label != nil { - block = fn.labelledBlock(s.Label)._continue - } else { - for t := fn.targets; t != nil && block == nil; t = t.tail { - block = t._continue - } - } - - case token.FALLTHROUGH: - for t := fn.targets; t != nil && block == nil; t = t.tail { - block = t._fallthrough - } - - case token.GOTO: - block = fn.labelledBlock(s.Label)._goto - } - j := emitJump(fn, block, s) - j.comment = s.Tok.String() - fn.currentBlock = fn.newBasicBlock("unreachable") - - case *ast.BlockStmt: - b.stmtList(fn, s.List) - - case *ast.IfStmt: - if s.Init != nil { - b.stmt(fn, s.Init) - } - then := fn.newBasicBlock("if.then") - done := fn.newBasicBlock("if.done") - els := done - if s.Else != nil { - els = fn.newBasicBlock("if.else") - } - instr := b.cond(fn, s.Cond, then, els) - instr.source = s - fn.currentBlock = then - b.stmt(fn, s.Body) - emitJump(fn, done, s) - - if s.Else != nil { - fn.currentBlock = els - b.stmt(fn, s.Else) - emitJump(fn, done, s) - } - - fn.currentBlock = done - - case *ast.SwitchStmt: - b.switchStmt(fn, s, label) - - case *ast.TypeSwitchStmt: - b.typeSwitchStmt(fn, s, label) - - case *ast.SelectStmt: - if b.selectStmt(fn, s, label) { - // the select has no cases, it blocks forever - fn.currentBlock = fn.newBasicBlock("unreachable") - } - - case *ast.ForStmt: - b.forStmt(fn, s, label) - - case *ast.RangeStmt: - b.rangeStmt(fn, s, label, s) - - default: - panic(fmt.Sprintf("unexpected statement kind: %T", s)) - } -} - -// buildFunction builds IR code for the body of function fn. Idempotent. -func (b *builder) buildFunction(fn *Function) { - if fn.Blocks != nil { - return // building already started - } - - var recvField *ast.FieldList - var body *ast.BlockStmt - var functype *ast.FuncType - switch n := fn.source.(type) { - case nil: - return // not a Go source function. (Synthetic, or from object file.) - case *ast.FuncDecl: - functype = n.Type - recvField = n.Recv - body = n.Body - case *ast.FuncLit: - functype = n.Type - body = n.Body - default: - panic(n) - } - - if fn.Package().Pkg.Path() == "syscall" && fn.Name() == "Exit" { - // syscall.Exit is a stub and the way os.Exit terminates the - // process. Note that there are other functions in the runtime - // that also terminate or unwind that we cannot analyze. - // However, they aren't stubs, so buildExits ends up getting - // called on them, so that's where we handle those special - // cases. - fn.NoReturn = AlwaysExits - } - - if body == nil { - // External function. - if fn.Params == nil { - // This condition ensures we add a non-empty - // params list once only, but we may attempt - // the degenerate empty case repeatedly. - // TODO(adonovan): opt: don't do that. - - // We set Function.Params even though there is no body - // code to reference them. This simplifies clients. - if recv := fn.Signature.Recv(); recv != nil { - // XXX synthesize an ast.Node - fn.addParamObj(recv, nil) - } - params := fn.Signature.Params() - for i, n := 0, params.Len(); i < n; i++ { - // XXX synthesize an ast.Node - fn.addParamObj(params.At(i), nil) - } - } - return - } - if fn.Prog.mode&LogSource != 0 { - defer logStack("build function %s @ %s", fn, fn.Prog.Fset.Position(fn.Pos()))() - } - fn.blocksets = b.blocksets - fn.Blocks = make([]*BasicBlock, 0, avgBlocks) - fn.startBody() - fn.createSyntacticParams(recvField, functype) - fn.exitBlock() - b.stmt(fn, body) - if cb := fn.currentBlock; cb != nil && (cb == fn.Blocks[0] || cb.Preds != nil) { - // Control fell off the end of the function's body block. - // - // Block optimizations eliminate the current block, if - // unreachable. It is a builder invariant that - // if this no-arg return is ill-typed for - // fn.Signature.Results, this block must be - // unreachable. The sanity checker checks this. - // fn.emit(new(RunDefers)) - // fn.emit(new(Return)) - emitJump(fn, fn.Exit, nil) - } - optimizeBlocks(fn) - buildFakeExits(fn) - b.buildExits(fn) - b.addUnreachables(fn) - fn.finishBody() - b.blocksets = fn.blocksets - fn.functionBody = nil -} - -// buildFuncDecl builds IR code for the function or method declared -// by decl in package pkg. -func (b *builder) buildFuncDecl(pkg *Package, decl *ast.FuncDecl) { - id := decl.Name - if isBlankIdent(id) { - return // discard - } - fn := pkg.values[pkg.info.Defs[id]].(*Function) - if decl.Recv == nil && id.Name == "init" { - var v Call - v.Call.Value = fn - v.setType(types.NewTuple()) - pkg.init.emit(&v, decl) - } - fn.source = decl - b.buildFunction(fn) -} - -// Build calls Package.Build for each package in prog. -// -// Build is intended for whole-program analysis; a typical compiler -// need only build a single package. -// -// Build is idempotent and thread-safe. -func (prog *Program) Build() { - for _, p := range prog.packages { - p.Build() - } -} - -// Build builds IR code for all functions and vars in package p. -// -// Precondition: CreatePackage must have been called for all of p's -// direct imports (and hence its direct imports must have been -// error-free). -// -// Build is idempotent and thread-safe. -func (p *Package) Build() { p.buildOnce.Do(p.build) } - -func (p *Package) build() { - if p.info == nil { - return // synthetic package, e.g. "testmain" - } - - // Ensure we have runtime type info for all exported members. - // TODO(adonovan): ideally belongs in memberFromObject, but - // that would require package creation in topological order. - for name, mem := range p.Members { - if ast.IsExported(name) { - p.Prog.needMethodsOf(mem.Type()) - } - } - if p.Prog.mode&LogSource != 0 { - defer logStack("build %s", p)() - } - init := p.init - init.startBody() - init.exitBlock() - - var done *BasicBlock - - // Make init() skip if package is already initialized. - initguard := p.Var("init$guard") - doinit := init.newBasicBlock("init.start") - done = init.Exit - emitIf(init, emitLoad(init, initguard, nil), done, doinit, nil) - init.currentBlock = doinit - emitStore(init, initguard, emitConst(init, NewConst(constant.MakeBool(true), tBool)), nil) - - // Call the init() function of each package we import. - for _, pkg := range p.Pkg.Imports() { - prereq := p.Prog.packages[pkg] - if prereq == nil { - panic(fmt.Sprintf("Package(%q).Build(): unsatisfied import: Program.CreatePackage(%q) was not called", p.Pkg.Path(), pkg.Path())) - } - var v Call - v.Call.Value = prereq.init - v.setType(types.NewTuple()) - init.emit(&v, nil) - } - - b := builder{ - printFunc: p.printFunc, - } - - // Initialize package-level vars in correct order. - for _, varinit := range p.info.InitOrder { - if init.Prog.mode&LogSource != 0 { - fmt.Fprintf(os.Stderr, "build global initializer %v @ %s\n", - varinit.Lhs, p.Prog.Fset.Position(varinit.Rhs.Pos())) - } - if len(varinit.Lhs) == 1 { - // 1:1 initialization: var x, y = a(), b() - var lval lvalue - if v := varinit.Lhs[0]; v.Name() != "_" { - lval = &address{addr: p.values[v].(*Global)} - } else { - lval = blank{} - } - // TODO(dh): do emit position information - b.assign(init, lval, varinit.Rhs, true, nil, nil) - } else { - // n:1 initialization: var x, y := f() - tuple := b.exprN(init, varinit.Rhs) - for i, v := range varinit.Lhs { - if v.Name() == "_" { - continue - } - emitStore(init, p.values[v].(*Global), emitExtract(init, tuple, i, nil), nil) - } - } - } - - // Build all package-level functions, init functions - // and methods, including unreachable/blank ones. - // We build them in source order, but it's not significant. - for _, file := range p.files { - for _, decl := range file.Decls { - if decl, ok := decl.(*ast.FuncDecl); ok { - b.buildFuncDecl(p, decl) - } - } - } - - // Finish up init(). - emitJump(init, done, nil) - init.finishBody() - - p.info = nil // We no longer need ASTs or go/types deductions. - - if p.Prog.mode&SanityCheckFunctions != 0 { - sanityCheckPackage(p) - } -} - -// Like ObjectOf, but panics instead of returning nil. -// Only valid during p's create and build phases. -func (p *Package) objectOf(id *ast.Ident) types.Object { - if o := p.info.ObjectOf(id); o != nil { - return o - } - panic(fmt.Sprintf("no types.Object for ast.Ident %s @ %s", - id.Name, p.Prog.Fset.Position(id.Pos()))) -} - -// Like TypeOf, but panics instead of returning nil. -// Only valid during p's create and build phases. -func (p *Package) typeOf(e ast.Expr) types.Type { - if T := p.info.TypeOf(e); T != nil { - return T - } - panic(fmt.Sprintf("no type for %T @ %s", - e, p.Prog.Fset.Position(e.Pos()))) -} diff --git a/vendor/honnef.co/go/tools/go/ir/const.go b/vendor/honnef.co/go/tools/go/ir/const.go deleted file mode 100644 index 0faf3852a..000000000 --- a/vendor/honnef.co/go/tools/go/ir/const.go +++ /dev/null @@ -1,285 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package ir - -// This file defines the Const SSA value type. - -import ( - "fmt" - "go/constant" - "go/types" - "strconv" - "strings" - - "golang.org/x/exp/typeparams" - "honnef.co/go/tools/go/types/typeutil" -) - -// NewConst returns a new constant of the specified value and type. -// val must be valid according to the specification of Const.Value. -func NewConst(val constant.Value, typ types.Type) *Const { - return &Const{ - register: register{ - typ: typ, - }, - Value: val, - } -} - -// intConst returns an 'int' constant that evaluates to i. -// (i is an int64 in case the host is narrower than the target.) -func intConst(i int64) *Const { - return NewConst(constant.MakeInt64(i), tInt) -} - -// nilConst returns a nil constant of the specified type, which may -// be any reference type, including interfaces. -func nilConst(typ types.Type) *Const { - return NewConst(nil, typ) -} - -// stringConst returns a 'string' constant that evaluates to s. -func stringConst(s string) *Const { - return NewConst(constant.MakeString(s), tString) -} - -// zeroConst returns a new "zero" constant of the specified type. -func zeroConst(t types.Type) Constant { - if _, ok := t.Underlying().(*types.Interface); ok && !typeparams.IsTypeParam(t) { - // Handle non-generic interface early to simplify following code. - return nilConst(t) - } - - tset := typeutil.NewTypeSet(t) - - switch typ := tset.CoreType().(type) { - case *types.Struct: - values := make([]Value, typ.NumFields()) - for i := 0; i < typ.NumFields(); i++ { - values[i] = zeroConst(typ.Field(i).Type()) - } - return &AggregateConst{ - register: register{typ: t}, - Values: values, - } - case *types.Tuple: - values := make([]Value, typ.Len()) - for i := 0; i < typ.Len(); i++ { - values[i] = zeroConst(typ.At(i).Type()) - } - return &AggregateConst{ - register: register{typ: t}, - Values: values, - } - } - - isNillable := func(term *types.Term) bool { - switch typ := term.Type().Underlying().(type) { - case *types.Pointer, *types.Slice, *types.Interface, *types.Chan, *types.Map, *types.Signature, *typeutil.Iterator: - return true - case *types.Basic: - switch typ.Kind() { - case types.UnsafePointer, types.UntypedNil: - return true - default: - return false - } - default: - return false - } - } - - isInfo := func(info types.BasicInfo) func(*types.Term) bool { - return func(term *types.Term) bool { - basic, ok := term.Type().Underlying().(*types.Basic) - if !ok { - return false - } - return (basic.Info() & info) != 0 - } - } - - isArray := func(term *types.Term) bool { - _, ok := term.Type().Underlying().(*types.Array) - return ok - } - - switch { - case tset.All(isInfo(types.IsNumeric)): - return NewConst(constant.MakeInt64(0), t) - case tset.All(isInfo(types.IsString)): - return NewConst(constant.MakeString(""), t) - case tset.All(isInfo(types.IsBoolean)): - return NewConst(constant.MakeBool(false), t) - case tset.All(isNillable): - return nilConst(t) - case tset.All(isArray): - var k ArrayConst - k.setType(t) - return &k - default: - var k GenericConst - k.setType(t) - return &k - } -} - -func (c *Const) RelString(from *types.Package) string { - var p string - if c.Value == nil { - p = "nil" - } else if c.Value.Kind() == constant.String { - v := constant.StringVal(c.Value) - const max = 20 - // TODO(adonovan): don't cut a rune in half. - if len(v) > max { - v = v[:max-3] + "..." // abbreviate - } - p = strconv.Quote(v) - } else { - p = c.Value.String() - } - return fmt.Sprintf("Const <%s> {%s}", relType(c.Type(), from), p) -} - -func (c *Const) String() string { - if c.block == nil { - // Constants don't have a block till late in the compilation process. But we want to print consts during - // debugging. - return c.RelString(nil) - } - return c.RelString(c.Parent().pkg()) -} - -func (v *ArrayConst) RelString(pkg *types.Package) string { - return fmt.Sprintf("ArrayConst <%s>", relType(v.Type(), pkg)) -} - -func (v *ArrayConst) String() string { - return v.RelString(v.Parent().pkg()) -} - -func (v *AggregateConst) RelString(pkg *types.Package) string { - values := make([]string, len(v.Values)) - for i, v := range v.Values { - if v != nil { - values[i] = v.Name() - } else { - values[i] = "nil" - } - } - return fmt.Sprintf("AggregateConst <%s> (%s)", relType(v.Type(), pkg), strings.Join(values, ", ")) -} - -func (v *AggregateConst) String() string { - if v.block == nil { - return v.RelString(nil) - } - return v.RelString(v.Parent().pkg()) -} - -func (v *GenericConst) RelString(pkg *types.Package) string { - return fmt.Sprintf("GenericConst <%s>", relType(v.Type(), pkg)) -} - -func (v *GenericConst) String() string { - return v.RelString(v.Parent().pkg()) -} - -// IsNil returns true if this constant represents a typed or untyped nil value. -func (c *Const) IsNil() bool { - return c.Value == nil -} - -// Int64 returns the numeric value of this constant truncated to fit -// a signed 64-bit integer. -func (c *Const) Int64() int64 { - switch x := constant.ToInt(c.Value); x.Kind() { - case constant.Int: - if i, ok := constant.Int64Val(x); ok { - return i - } - return 0 - case constant.Float: - f, _ := constant.Float64Val(x) - return int64(f) - } - panic(fmt.Sprintf("unexpected constant value: %T", c.Value)) -} - -// Uint64 returns the numeric value of this constant truncated to fit -// an unsigned 64-bit integer. -func (c *Const) Uint64() uint64 { - switch x := constant.ToInt(c.Value); x.Kind() { - case constant.Int: - if u, ok := constant.Uint64Val(x); ok { - return u - } - return 0 - case constant.Float: - f, _ := constant.Float64Val(x) - return uint64(f) - } - panic(fmt.Sprintf("unexpected constant value: %T", c.Value)) -} - -// Float64 returns the numeric value of this constant truncated to fit -// a float64. -func (c *Const) Float64() float64 { - f, _ := constant.Float64Val(c.Value) - return f -} - -// Complex128 returns the complex value of this constant truncated to -// fit a complex128. -func (c *Const) Complex128() complex128 { - re, _ := constant.Float64Val(constant.Real(c.Value)) - im, _ := constant.Float64Val(constant.Imag(c.Value)) - return complex(re, im) -} - -func (c *Const) equal(o Constant) bool { - // TODO(dh): don't use == for types, this will miss identical pointer types, among others - oc, ok := o.(*Const) - if !ok { - return false - } - return c.typ == oc.typ && c.Value == oc.Value -} - -func (c *AggregateConst) equal(o Constant) bool { - oc, ok := o.(*AggregateConst) - if !ok { - return false - } - // TODO(dh): don't use == for types, this will miss identical pointer types, among others - if c.typ != oc.typ { - return false - } - for i, v := range c.Values { - if !v.(Constant).equal(oc.Values[i].(Constant)) { - return false - } - } - return true -} - -func (c *ArrayConst) equal(o Constant) bool { - oc, ok := o.(*ArrayConst) - if !ok { - return false - } - // TODO(dh): don't use == for types, this will miss identical pointer types, among others - return c.typ == oc.typ -} - -func (c *GenericConst) equal(o Constant) bool { - oc, ok := o.(*GenericConst) - if !ok { - return false - } - // TODO(dh): don't use == for types, this will miss identical pointer types, among others - return c.typ == oc.typ -} diff --git a/vendor/honnef.co/go/tools/go/ir/create.go b/vendor/honnef.co/go/tools/go/ir/create.go deleted file mode 100644 index 28e7da7e9..000000000 --- a/vendor/honnef.co/go/tools/go/ir/create.go +++ /dev/null @@ -1,282 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package ir - -// This file implements the CREATE phase of IR construction. -// See builder.go for explanation. - -import ( - "fmt" - "go/ast" - "go/token" - "go/types" - "os" - "sync" - - "honnef.co/go/tools/go/types/typeutil" -) - -// measured on the standard library and rounded up to powers of two, -// on average there are 8 blocks and 16 instructions per block in a -// function. -const avgBlocks = 8 -const avgInstructionsPerBlock = 16 - -// NewProgram returns a new IR Program. -// -// mode controls diagnostics and checking during IR construction. -func NewProgram(fset *token.FileSet, mode BuilderMode) *Program { - prog := &Program{ - Fset: fset, - imported: make(map[string]*Package), - packages: make(map[*types.Package]*Package), - thunks: make(map[selectionKey]*Function), - bounds: make(map[*types.Func]*Function), - mode: mode, - } - - h := typeutil.MakeHasher() // protected by methodsMu, in effect - prog.methodSets.SetHasher(h) - prog.canon.SetHasher(h) - - return prog -} - -// memberFromObject populates package pkg with a member for the -// typechecker object obj. -// -// For objects from Go source code, syntax is the associated syntax -// tree (for funcs and vars only); it will be used during the build -// phase. -func memberFromObject(pkg *Package, obj types.Object, syntax ast.Node) { - name := obj.Name() - switch obj := obj.(type) { - case *types.Builtin: - if pkg.Pkg != types.Unsafe { - panic("unexpected builtin object: " + obj.String()) - } - - case *types.TypeName: - pkg.Members[name] = &Type{ - object: obj, - pkg: pkg, - } - - case *types.Const: - c := &NamedConst{ - object: obj, - Value: NewConst(obj.Val(), obj.Type()), - pkg: pkg, - } - pkg.values[obj] = c.Value - pkg.Members[name] = c - - case *types.Var: - g := &Global{ - Pkg: pkg, - name: name, - object: obj, - typ: types.NewPointer(obj.Type()), // address - } - pkg.values[obj] = g - pkg.Members[name] = g - - case *types.Func: - sig := obj.Type().(*types.Signature) - if sig.Recv() == nil && name == "init" { - pkg.ninit++ - name = fmt.Sprintf("init#%d", pkg.ninit) - } - fn := &Function{ - name: name, - object: obj, - Signature: sig, - Pkg: pkg, - Prog: pkg.Prog, - } - - fn.source = syntax - fn.initHTML(pkg.printFunc) - if syntax == nil { - fn.Synthetic = SyntheticLoadedFromExportData - } else { - // Note: we initialize fn.Blocks in - // (*builder).buildFunction and not here because Blocks - // being nil is used to indicate that building of the - // function hasn't started yet. - - fn.functionBody = &functionBody{ - scratchInstructions: make([]Instruction, avgBlocks*avgInstructionsPerBlock), - } - } - - pkg.values[obj] = fn - pkg.Functions = append(pkg.Functions, fn) - if sig.Recv() == nil { - pkg.Members[name] = fn // package-level function - } - - default: // (incl. *types.Package) - panic("unexpected Object type: " + obj.String()) - } -} - -// membersFromDecl populates package pkg with members for each -// typechecker object (var, func, const or type) associated with the -// specified decl. -func membersFromDecl(pkg *Package, decl ast.Decl) { - switch decl := decl.(type) { - case *ast.GenDecl: // import, const, type or var - switch decl.Tok { - case token.CONST: - for _, spec := range decl.Specs { - for _, id := range spec.(*ast.ValueSpec).Names { - if !isBlankIdent(id) { - memberFromObject(pkg, pkg.info.Defs[id], nil) - } - } - } - - case token.VAR: - for _, spec := range decl.Specs { - for _, id := range spec.(*ast.ValueSpec).Names { - if !isBlankIdent(id) { - memberFromObject(pkg, pkg.info.Defs[id], spec) - } - } - } - - case token.TYPE: - for _, spec := range decl.Specs { - id := spec.(*ast.TypeSpec).Name - if !isBlankIdent(id) { - memberFromObject(pkg, pkg.info.Defs[id], nil) - } - } - } - - case *ast.FuncDecl: - id := decl.Name - if !isBlankIdent(id) { - memberFromObject(pkg, pkg.info.Defs[id], decl) - } - } -} - -// CreatePackage constructs and returns an IR Package from the -// specified type-checked, error-free file ASTs, and populates its -// Members mapping. -// -// importable determines whether this package should be returned by a -// subsequent call to ImportedPackage(pkg.Path()). -// -// The real work of building IR form for each function is not done -// until a subsequent call to Package.Build(). -func (prog *Program) CreatePackage(pkg *types.Package, files []*ast.File, info *types.Info, importable bool) *Package { - p := &Package{ - Prog: prog, - Members: make(map[string]Member), - values: make(map[types.Object]Value), - Pkg: pkg, - info: info, // transient (CREATE and BUILD phases) - files: files, // transient (CREATE and BUILD phases) - printFunc: prog.PrintFunc, - } - - // Add init() function. - p.init = &Function{ - name: "init", - Signature: new(types.Signature), - Synthetic: SyntheticPackageInitializer, - Pkg: p, - Prog: prog, - functionBody: new(functionBody), - } - p.init.initHTML(prog.PrintFunc) - p.Members[p.init.name] = p.init - p.Functions = append(p.Functions, p.init) - - // CREATE phase. - // Allocate all package members: vars, funcs, consts and types. - if len(files) > 0 { - // Go source package. - for _, file := range files { - for _, decl := range file.Decls { - membersFromDecl(p, decl) - } - } - } else { - // GC-compiled binary package (or "unsafe") - // No code. - // No position information. - scope := p.Pkg.Scope() - for _, name := range scope.Names() { - obj := scope.Lookup(name) - memberFromObject(p, obj, nil) - if obj, ok := obj.(*types.TypeName); ok { - if named, ok := obj.Type().(*types.Named); ok { - for i, n := 0, named.NumMethods(); i < n; i++ { - memberFromObject(p, named.Method(i), nil) - } - } - } - } - } - - // Add initializer guard variable. - initguard := &Global{ - Pkg: p, - name: "init$guard", - typ: types.NewPointer(tBool), - } - p.Members[initguard.Name()] = initguard - - if prog.mode&GlobalDebug != 0 { - p.SetDebugMode(true) - } - - if prog.mode&PrintPackages != 0 { - printMu.Lock() - p.WriteTo(os.Stdout) - printMu.Unlock() - } - - if importable { - prog.imported[p.Pkg.Path()] = p - } - prog.packages[p.Pkg] = p - - return p -} - -// printMu serializes printing of Packages/Functions to stdout. -var printMu sync.Mutex - -// AllPackages returns a new slice containing all packages in the -// program prog in unspecified order. -func (prog *Program) AllPackages() []*Package { - pkgs := make([]*Package, 0, len(prog.packages)) - for _, pkg := range prog.packages { - pkgs = append(pkgs, pkg) - } - return pkgs -} - -// ImportedPackage returns the importable Package whose PkgPath -// is path, or nil if no such Package has been created. -// -// A parameter to CreatePackage determines whether a package should be -// considered importable. For example, no import declaration can resolve -// to the ad-hoc main package created by 'go build foo.go'. -// -// TODO(adonovan): rethink this function and the "importable" concept; -// most packages are importable. This function assumes that all -// types.Package.Path values are unique within the ir.Program, which is -// false---yet this function remains very convenient. -// Clients should use (*Program).Package instead where possible. -// IR doesn't really need a string-keyed map of packages. -func (prog *Program) ImportedPackage(path string) *Package { - return prog.imported[path] -} diff --git a/vendor/honnef.co/go/tools/go/ir/doc.go b/vendor/honnef.co/go/tools/go/ir/doc.go deleted file mode 100644 index 5ee6637db..000000000 --- a/vendor/honnef.co/go/tools/go/ir/doc.go +++ /dev/null @@ -1,130 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package ir defines a representation of the elements of Go programs -// (packages, types, functions, variables and constants) using a -// static single-information (SSI) form intermediate representation -// (IR) for the bodies of functions. -// -// THIS INTERFACE IS EXPERIMENTAL AND IS LIKELY TO CHANGE. -// -// For an introduction to SSA form, upon which SSI builds, see -// http://en.wikipedia.org/wiki/Static_single_assignment_form. -// This page provides a broader reading list: -// http://www.dcs.gla.ac.uk/~jsinger/ssa.html. -// -// For an introduction to SSI form, see The static single information -// form by C. Scott Ananian. -// -// The level of abstraction of the IR form is intentionally close to -// the source language to facilitate construction of source analysis -// tools. It is not intended for machine code generation. -// -// The simplest way to create the IR of a package is -// to load typed syntax trees using golang.org/x/tools/go/packages, then -// invoke the irutil.Packages helper function. See ExampleLoadPackages -// and ExampleWholeProgram for examples. -// The resulting ir.Program contains all the packages and their -// members, but IR code is not created for function bodies until a -// subsequent call to (*Package).Build or (*Program).Build. -// -// The builder initially builds a naive IR form in which all local -// variables are addresses of stack locations with explicit loads and -// stores. Registerization of eligible locals and φ-node insertion -// using dominance and dataflow are then performed as a second pass -// called "lifting" to improve the accuracy and performance of -// subsequent analyses; this pass can be skipped by setting the -// NaiveForm builder flag. -// -// The primary interfaces of this package are: -// -// - Member: a named member of a Go package. -// - Value: an expression that yields a value. -// - Instruction: a statement that consumes values and performs computation. -// - Node: a Value or Instruction (emphasizing its membership in the IR value graph) -// -// A computation that yields a result implements both the Value and -// Instruction interfaces. The following table shows for each -// concrete type which of these interfaces it implements. -// -// Value? Instruction? Member? -// *Alloc ✔ ✔ -// *BinOp ✔ ✔ -// *BlankStore ✔ -// *Builtin ✔ -// *Call ✔ ✔ -// *ChangeInterface ✔ ✔ -// *ChangeType ✔ ✔ -// *Const ✔ ✔ -// *Convert ✔ ✔ -// *DebugRef ✔ -// *Defer ✔ ✔ -// *Extract ✔ ✔ -// *Field ✔ ✔ -// *FieldAddr ✔ ✔ -// *FreeVar ✔ -// *Function ✔ ✔ (func) -// *Global ✔ ✔ (var) -// *Go ✔ ✔ -// *If ✔ -// *Index ✔ ✔ -// *IndexAddr ✔ ✔ -// *Jump ✔ -// *Load ✔ ✔ -// *MakeChan ✔ ✔ -// *MakeClosure ✔ ✔ -// *MakeInterface ✔ ✔ -// *MakeMap ✔ ✔ -// *MakeSlice ✔ ✔ -// *MapLookup ✔ ✔ -// *MapUpdate ✔ ✔ -// *NamedConst ✔ (const) -// *Next ✔ ✔ -// *Panic ✔ -// *Parameter ✔ ✔ -// *Phi ✔ ✔ -// *Range ✔ ✔ -// *Recv ✔ ✔ -// *Return ✔ -// *RunDefers ✔ -// *Select ✔ ✔ -// *Send ✔ ✔ -// *Sigma ✔ ✔ -// *Slice ✔ ✔ -// *SliceToArrayPointer ✔ ✔ -// *SliceToArray ✔ ✔ -// *Store ✔ ✔ -// *StringLookup ✔ ✔ -// *Type ✔ (type) -// *TypeAssert ✔ ✔ -// *UnOp ✔ ✔ -// *Unreachable ✔ -// -// Other key types in this package include: Program, Package, Function -// and BasicBlock. -// -// The program representation constructed by this package is fully -// resolved internally, i.e. it does not rely on the names of Values, -// Packages, Functions, Types or BasicBlocks for the correct -// interpretation of the program. Only the identities of objects and -// the topology of the IR and type graphs are semantically -// significant. (There is one exception: Ids, used to identify field -// and method names, contain strings.) Avoidance of name-based -// operations simplifies the implementation of subsequent passes and -// can make them very efficient. Many objects are nonetheless named -// to aid in debugging, but it is not essential that the names be -// either accurate or unambiguous. The public API exposes a number of -// name-based maps for client convenience. -// -// The ir/irutil package provides various utilities that depend only -// on the public API of this package. -// -// TODO(adonovan): Consider the exceptional control-flow implications -// of defer and recover(). -// -// TODO(adonovan): write a how-to document for all the various cases -// of trying to determine corresponding elements across the four -// domains of source locations, ast.Nodes, types.Objects, -// ir.Values/Instructions. -package ir diff --git a/vendor/honnef.co/go/tools/go/ir/dom.go b/vendor/honnef.co/go/tools/go/ir/dom.go deleted file mode 100644 index 4febd284b..000000000 --- a/vendor/honnef.co/go/tools/go/ir/dom.go +++ /dev/null @@ -1,466 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package ir - -// This file defines algorithms related to dominance. - -// Dominator tree construction ---------------------------------------- -// -// We use the algorithm described in Lengauer & Tarjan. 1979. A fast -// algorithm for finding dominators in a flowgraph. -// http://doi.acm.org/10.1145/357062.357071 -// -// We also apply the optimizations to SLT described in Georgiadis et -// al, Finding Dominators in Practice, JGAA 2006, -// http://jgaa.info/accepted/2006/GeorgiadisTarjanWerneck2006.10.1.pdf -// to avoid the need for buckets of size > 1. - -import ( - "bytes" - "fmt" - "io" - "math/big" - "os" - "sort" -) - -// Idom returns the block that immediately dominates b: -// its parent in the dominator tree, if any. -// The entry node (b.Index==0) does not have a parent. -func (b *BasicBlock) Idom() *BasicBlock { return b.dom.idom } - -// Dominees returns the list of blocks that b immediately dominates: -// its children in the dominator tree. -func (b *BasicBlock) Dominees() []*BasicBlock { return b.dom.children } - -// Dominates reports whether b dominates c. -func (b *BasicBlock) Dominates(c *BasicBlock) bool { - return b.dom.pre <= c.dom.pre && c.dom.post <= b.dom.post -} - -type byDomPreorder []*BasicBlock - -func (a byDomPreorder) Len() int { return len(a) } -func (a byDomPreorder) Swap(i, j int) { a[i], a[j] = a[j], a[i] } -func (a byDomPreorder) Less(i, j int) bool { return a[i].dom.pre < a[j].dom.pre } - -// DomPreorder returns a new slice containing the blocks of f in -// dominator tree preorder. -func (f *Function) DomPreorder() []*BasicBlock { - n := len(f.Blocks) - order := make(byDomPreorder, n) - copy(order, f.Blocks) - sort.Sort(order) - return order -} - -// domInfo contains a BasicBlock's dominance information. -type domInfo struct { - idom *BasicBlock // immediate dominator (parent in domtree) - children []*BasicBlock // nodes immediately dominated by this one - pre, post int32 // pre- and post-order numbering within domtree -} - -// buildDomTree computes the dominator tree of f using the LT algorithm. -// Precondition: all blocks are reachable (e.g. optimizeBlocks has been run). -func buildDomTree(fn *Function) { - // The step numbers refer to the original LT paper; the - // reordering is due to Georgiadis. - - // Clear any previous domInfo. - for _, b := range fn.Blocks { - b.dom = domInfo{} - } - - idoms := make([]*BasicBlock, len(fn.Blocks)) - - order := make([]*BasicBlock, 0, len(fn.Blocks)) - seen := fn.blockset(0) - var dfs func(b *BasicBlock) - dfs = func(b *BasicBlock) { - if !seen.Add(b) { - return - } - for _, succ := range b.Succs { - dfs(succ) - } - if fn.fakeExits.Has(b) { - dfs(fn.Exit) - } - order = append(order, b) - b.post = len(order) - 1 - } - dfs(fn.Blocks[0]) - - for i := 0; i < len(order)/2; i++ { - o := len(order) - i - 1 - order[i], order[o] = order[o], order[i] - } - - idoms[fn.Blocks[0].Index] = fn.Blocks[0] - changed := true - for changed { - changed = false - // iterate over all nodes in reverse postorder, except for the - // entry node - for _, b := range order[1:] { - var newIdom *BasicBlock - do := func(p *BasicBlock) { - if idoms[p.Index] == nil { - return - } - if newIdom == nil { - newIdom = p - } else { - finger1 := p - finger2 := newIdom - for finger1 != finger2 { - for finger1.post < finger2.post { - finger1 = idoms[finger1.Index] - } - for finger2.post < finger1.post { - finger2 = idoms[finger2.Index] - } - } - newIdom = finger1 - } - } - for _, p := range b.Preds { - do(p) - } - if b == fn.Exit { - for _, p := range fn.Blocks { - if fn.fakeExits.Has(p) { - do(p) - } - } - } - - if idoms[b.Index] != newIdom { - idoms[b.Index] = newIdom - changed = true - } - } - } - - for i, b := range idoms { - fn.Blocks[i].dom.idom = b - if b == nil { - // malformed CFG - continue - } - if i == b.Index { - continue - } - b.dom.children = append(b.dom.children, fn.Blocks[i]) - } - - numberDomTree(fn.Blocks[0], 0, 0) - - // printDomTreeDot(os.Stderr, fn) // debugging - // printDomTreeText(os.Stderr, root, 0) // debugging - - if fn.Prog.mode&SanityCheckFunctions != 0 { - sanityCheckDomTree(fn) - } -} - -// buildPostDomTree is like buildDomTree, but builds the post-dominator tree instead. -func buildPostDomTree(fn *Function) { - // The step numbers refer to the original LT paper; the - // reordering is due to Georgiadis. - - // Clear any previous domInfo. - for _, b := range fn.Blocks { - b.pdom = domInfo{} - } - - idoms := make([]*BasicBlock, len(fn.Blocks)) - - order := make([]*BasicBlock, 0, len(fn.Blocks)) - seen := fn.blockset(0) - var dfs func(b *BasicBlock) - dfs = func(b *BasicBlock) { - if !seen.Add(b) { - return - } - for _, pred := range b.Preds { - dfs(pred) - } - if b == fn.Exit { - for _, p := range fn.Blocks { - if fn.fakeExits.Has(p) { - dfs(p) - } - } - } - order = append(order, b) - b.post = len(order) - 1 - } - dfs(fn.Exit) - - for i := 0; i < len(order)/2; i++ { - o := len(order) - i - 1 - order[i], order[o] = order[o], order[i] - } - - idoms[fn.Exit.Index] = fn.Exit - changed := true - for changed { - changed = false - // iterate over all nodes in reverse postorder, except for the - // exit node - for _, b := range order[1:] { - var newIdom *BasicBlock - do := func(p *BasicBlock) { - if idoms[p.Index] == nil { - return - } - if newIdom == nil { - newIdom = p - } else { - finger1 := p - finger2 := newIdom - for finger1 != finger2 { - for finger1.post < finger2.post { - finger1 = idoms[finger1.Index] - } - for finger2.post < finger1.post { - finger2 = idoms[finger2.Index] - } - } - newIdom = finger1 - } - } - for _, p := range b.Succs { - do(p) - } - if fn.fakeExits.Has(b) { - do(fn.Exit) - } - - if idoms[b.Index] != newIdom { - idoms[b.Index] = newIdom - changed = true - } - } - } - - for i, b := range idoms { - fn.Blocks[i].pdom.idom = b - if b == nil { - // malformed CFG - continue - } - if i == b.Index { - continue - } - b.pdom.children = append(b.pdom.children, fn.Blocks[i]) - } - - numberPostDomTree(fn.Exit, 0, 0) - - // printPostDomTreeDot(os.Stderr, fn) // debugging - // printPostDomTreeText(os.Stderr, fn.Exit, 0) // debugging - - if fn.Prog.mode&SanityCheckFunctions != 0 { // XXX - sanityCheckDomTree(fn) // XXX - } -} - -// numberDomTree sets the pre- and post-order numbers of a depth-first -// traversal of the dominator tree rooted at v. These are used to -// answer dominance queries in constant time. -func numberDomTree(v *BasicBlock, pre, post int32) (int32, int32) { - v.dom.pre = pre - pre++ - for _, child := range v.dom.children { - pre, post = numberDomTree(child, pre, post) - } - v.dom.post = post - post++ - return pre, post -} - -// numberPostDomTree sets the pre- and post-order numbers of a depth-first -// traversal of the post-dominator tree rooted at v. These are used to -// answer post-dominance queries in constant time. -func numberPostDomTree(v *BasicBlock, pre, post int32) (int32, int32) { - v.pdom.pre = pre - pre++ - for _, child := range v.pdom.children { - pre, post = numberPostDomTree(child, pre, post) - } - v.pdom.post = post - post++ - return pre, post -} - -// Testing utilities ---------------------------------------- - -// sanityCheckDomTree checks the correctness of the dominator tree -// computed by the LT algorithm by comparing against the dominance -// relation computed by a naive Kildall-style forward dataflow -// analysis (Algorithm 10.16 from the "Dragon" book). -func sanityCheckDomTree(f *Function) { - n := len(f.Blocks) - - // D[i] is the set of blocks that dominate f.Blocks[i], - // represented as a bit-set of block indices. - D := make([]big.Int, n) - - one := big.NewInt(1) - - // all is the set of all blocks; constant. - var all big.Int - all.Set(one).Lsh(&all, uint(n)).Sub(&all, one) - - // Initialization. - for i := range f.Blocks { - if i == 0 { - // A root is dominated only by itself. - D[i].SetBit(&D[0], 0, 1) - } else { - // All other blocks are (initially) dominated - // by every block. - D[i].Set(&all) - } - } - - // Iteration until fixed point. - for changed := true; changed; { - changed = false - for i, b := range f.Blocks { - if i == 0 { - continue - } - // Compute intersection across predecessors. - var x big.Int - x.Set(&all) - for _, pred := range b.Preds { - x.And(&x, &D[pred.Index]) - } - if b == f.Exit { - for _, p := range f.Blocks { - if f.fakeExits.Has(p) { - x.And(&x, &D[p.Index]) - } - } - } - x.SetBit(&x, i, 1) // a block always dominates itself. - if D[i].Cmp(&x) != 0 { - D[i].Set(&x) - changed = true - } - } - } - - // Check the entire relation. O(n^2). - ok := true - for i := 0; i < n; i++ { - for j := 0; j < n; j++ { - b, c := f.Blocks[i], f.Blocks[j] - actual := b.Dominates(c) - expected := D[j].Bit(i) == 1 - if actual != expected { - fmt.Fprintf(os.Stderr, "dominates(%s, %s)==%t, want %t\n", b, c, actual, expected) - ok = false - } - } - } - - preorder := f.DomPreorder() - for _, b := range f.Blocks { - if got := preorder[b.dom.pre]; got != b { - fmt.Fprintf(os.Stderr, "preorder[%d]==%s, want %s\n", b.dom.pre, got, b) - ok = false - } - } - - if !ok { - panic("sanityCheckDomTree failed for " + f.String()) - } - -} - -// Printing functions ---------------------------------------- - -// printDomTree prints the dominator tree as text, using indentation. -// -//lint:ignore U1000 used during debugging -func printDomTreeText(buf *bytes.Buffer, v *BasicBlock, indent int) { - fmt.Fprintf(buf, "%*s%s\n", 4*indent, "", v) - for _, child := range v.dom.children { - printDomTreeText(buf, child, indent+1) - } -} - -// printDomTreeDot prints the dominator tree of f in AT&T GraphViz -// (.dot) format. -// -//lint:ignore U1000 used during debugging -func printDomTreeDot(buf io.Writer, f *Function) { - fmt.Fprintln(buf, "//", f) - fmt.Fprintln(buf, "digraph domtree {") - for i, b := range f.Blocks { - v := b.dom - fmt.Fprintf(buf, "\tn%d [label=\"%s (%d, %d)\",shape=\"rectangle\"];\n", v.pre, b, v.pre, v.post) - // TODO(adonovan): improve appearance of edges - // belonging to both dominator tree and CFG. - - // Dominator tree edge. - if i != 0 { - fmt.Fprintf(buf, "\tn%d -> n%d [style=\"solid\",weight=100];\n", v.idom.dom.pre, v.pre) - } - // CFG edges. - for _, pred := range b.Preds { - fmt.Fprintf(buf, "\tn%d -> n%d [style=\"dotted\",weight=0];\n", pred.dom.pre, v.pre) - } - - if f.fakeExits.Has(b) { - fmt.Fprintf(buf, "\tn%d -> n%d [style=\"dotted\",weight=0,color=red];\n", b.dom.pre, f.Exit.dom.pre) - } - } - fmt.Fprintln(buf, "}") -} - -// printDomTree prints the dominator tree as text, using indentation. -// -//lint:ignore U1000 used during debugging -func printPostDomTreeText(buf io.Writer, v *BasicBlock, indent int) { - fmt.Fprintf(buf, "%*s%s\n", 4*indent, "", v) - for _, child := range v.pdom.children { - printPostDomTreeText(buf, child, indent+1) - } -} - -// printDomTreeDot prints the dominator tree of f in AT&T GraphViz -// (.dot) format. -// -//lint:ignore U1000 used during debugging -func printPostDomTreeDot(buf io.Writer, f *Function) { - fmt.Fprintln(buf, "//", f) - fmt.Fprintln(buf, "digraph pdomtree {") - for _, b := range f.Blocks { - v := b.pdom - fmt.Fprintf(buf, "\tn%d [label=\"%s (%d, %d)\",shape=\"rectangle\"];\n", v.pre, b, v.pre, v.post) - // TODO(adonovan): improve appearance of edges - // belonging to both dominator tree and CFG. - - // Dominator tree edge. - if b != f.Exit { - fmt.Fprintf(buf, "\tn%d -> n%d [style=\"solid\",weight=100];\n", v.idom.pdom.pre, v.pre) - } - // CFG edges. - for _, pred := range b.Preds { - fmt.Fprintf(buf, "\tn%d -> n%d [style=\"dotted\",weight=0];\n", pred.pdom.pre, v.pre) - } - - if f.fakeExits.Has(b) { - fmt.Fprintf(buf, "\tn%d -> n%d [style=\"dotted\",weight=0,color=red];\n", b.dom.pre, f.Exit.dom.pre) - } - } - fmt.Fprintln(buf, "}") -} diff --git a/vendor/honnef.co/go/tools/go/ir/emit.go b/vendor/honnef.co/go/tools/go/ir/emit.go deleted file mode 100644 index f6a1ef373..000000000 --- a/vendor/honnef.co/go/tools/go/ir/emit.go +++ /dev/null @@ -1,548 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package ir - -// Helpers for emitting IR instructions. - -import ( - "fmt" - "go/ast" - "go/constant" - "go/token" - "go/types" - - "honnef.co/go/tools/go/types/typeutil" - - "golang.org/x/exp/typeparams" -) - -// emitNew emits to f a new (heap Alloc) instruction allocating an -// object of type typ. pos is the optional source location. -func emitNew(f *Function, typ types.Type, source ast.Node) *Alloc { - v := &Alloc{Heap: true} - v.setType(types.NewPointer(typ)) - f.emit(v, source) - return v -} - -// emitLoad emits to f an instruction to load the address addr into a -// new temporary, and returns the value so defined. -func emitLoad(f *Function, addr Value, source ast.Node) *Load { - v := &Load{X: addr} - v.setType(deref(addr.Type())) - f.emit(v, source) - return v -} - -func emitRecv(f *Function, ch Value, commaOk bool, typ types.Type, source ast.Node) Value { - recv := &Recv{ - Chan: ch, - CommaOk: commaOk, - } - recv.setType(typ) - return f.emit(recv, source) -} - -// emitDebugRef emits to f a DebugRef pseudo-instruction associating -// expression e with value v. -func emitDebugRef(f *Function, e ast.Expr, v Value, isAddr bool) { - ref := makeDebugRef(f, e, v, isAddr) - if ref == nil { - return - } - f.emit(ref, nil) -} - -func makeDebugRef(f *Function, e ast.Expr, v Value, isAddr bool) *DebugRef { - if !f.debugInfo() { - return nil // debugging not enabled - } - if v == nil || e == nil { - panic("nil") - } - var obj types.Object - e = unparen(e) - if id, ok := e.(*ast.Ident); ok { - if isBlankIdent(id) { - return nil - } - obj = f.Pkg.objectOf(id) - switch obj.(type) { - case *types.Nil, *types.Const, *types.Builtin: - return nil - } - } - return &DebugRef{ - X: v, - Expr: e, - IsAddr: isAddr, - object: obj, - } -} - -// emitArith emits to f code to compute the binary operation op(x, y) -// where op is an eager shift, logical or arithmetic operation. -// (Use emitCompare() for comparisons and Builder.logicalBinop() for -// non-eager operations.) -func emitArith(f *Function, op token.Token, x, y Value, t types.Type, source ast.Node) Value { - switch op { - case token.SHL, token.SHR: - x = emitConv(f, x, t, source) - // y may be signed or an 'untyped' constant. - // There is a runtime panic if y is signed and <0. Instead of inserting a check for y<0 - // and converting to an unsigned value (like the compiler) leave y as is. - if b, ok := y.Type().Underlying().(*types.Basic); ok && b.Info()&types.IsUntyped != 0 { - // Untyped conversion: - // Spec https://go.dev/ref/spec#Operators: - // The right operand in a shift expression must have integer type or be an untyped constant - // representable by a value of type uint. - y = emitConv(f, y, types.Typ[types.Uint], source) - } - - case token.ADD, token.SUB, token.MUL, token.QUO, token.REM, token.AND, token.OR, token.XOR, token.AND_NOT: - x = emitConv(f, x, t, source) - y = emitConv(f, y, t, source) - - default: - panic("illegal op in emitArith: " + op.String()) - - } - v := &BinOp{ - Op: op, - X: x, - Y: y, - } - v.setType(t) - return f.emit(v, source) -} - -// emitCompare emits to f code compute the boolean result of -// comparison comparison 'x op y'. -func emitCompare(f *Function, op token.Token, x, y Value, source ast.Node) Value { - xt := x.Type().Underlying() - yt := y.Type().Underlying() - - // Special case to optimise a tagless SwitchStmt so that - // these are equivalent - // switch { case e: ...} - // switch true { case e: ... } - // if e==true { ... } - // even in the case when e's type is an interface. - // TODO(adonovan): opt: generalise to x==true, false!=y, etc. - if x, ok := x.(*Const); ok && op == token.EQL && x.Value != nil && x.Value.Kind() == constant.Bool && constant.BoolVal(x.Value) { - if yt, ok := yt.(*types.Basic); ok && yt.Info()&types.IsBoolean != 0 { - return y - } - } - - if types.Identical(xt, yt) { - // no conversion necessary - } else if _, ok := xt.(*types.Interface); ok && !typeparams.IsTypeParam(x.Type()) { - y = emitConv(f, y, x.Type(), source) - } else if _, ok := yt.(*types.Interface); ok && !typeparams.IsTypeParam(y.Type()) { - x = emitConv(f, x, y.Type(), source) - } else if _, ok := x.(*Const); ok { - x = emitConv(f, x, y.Type(), source) - } else if _, ok := y.(*Const); ok { - y = emitConv(f, y, x.Type(), source) - //lint:ignore SA9003 no-op - } else { - // other cases, e.g. channels. No-op. - } - - v := &BinOp{ - Op: op, - X: x, - Y: y, - } - v.setType(tBool) - return f.emit(v, source) -} - -// isValuePreserving returns true if a conversion from ut_src to -// ut_dst is value-preserving, i.e. just a change of type. -// Precondition: neither argument is a named type. -func isValuePreserving(ut_src, ut_dst types.Type) bool { - // Identical underlying types? - if types.IdenticalIgnoreTags(ut_dst, ut_src) { - return true - } - - switch ut_dst.(type) { - case *types.Chan: - // Conversion between channel types? - _, ok := ut_src.(*types.Chan) - return ok - - case *types.Pointer: - // Conversion between pointers with identical base types? - _, ok := ut_src.(*types.Pointer) - return ok - } - return false -} - -// emitConv emits to f code to convert Value val to exactly type typ, -// and returns the converted value. Implicit conversions are required -// by language assignability rules in assignments, parameter passing, -// etc. -func emitConv(f *Function, val Value, t_dst types.Type, source ast.Node) Value { - t_src := val.Type() - - // Identical types? Conversion is a no-op. - if types.Identical(t_src, t_dst) { - return val - } - - ut_dst := t_dst.Underlying() - ut_src := t_src.Underlying() - - tset_dst := typeutil.NewTypeSet(ut_dst) - tset_src := typeutil.NewTypeSet(ut_src) - - // Just a change of type, but not value or representation? - if tset_src.All(func(termSrc *types.Term) bool { - return tset_dst.All(func(termDst *types.Term) bool { - return isValuePreserving(termSrc.Type().Underlying(), termDst.Type().Underlying()) - }) - }) { - c := &ChangeType{X: val} - c.setType(t_dst) - return f.emit(c, source) - } - - // Conversion to, or construction of a value of, an interface type? - if _, ok := ut_dst.(*types.Interface); ok && !typeparams.IsTypeParam(t_dst) { - // Assignment from one interface type to another? - if _, ok := ut_src.(*types.Interface); ok && !typeparams.IsTypeParam(t_src) { - c := &ChangeInterface{X: val} - c.setType(t_dst) - return f.emit(c, source) - } - - // Untyped nil constant? Return interface-typed nil constant. - if ut_src == tUntypedNil { - return emitConst(f, nilConst(t_dst)) - } - - // Convert (non-nil) "untyped" literals to their default type. - if t, ok := ut_src.(*types.Basic); ok && t.Info()&types.IsUntyped != 0 { - val = emitConv(f, val, types.Default(ut_src), source) - } - - f.Pkg.Prog.needMethodsOf(val.Type()) - mi := &MakeInterface{X: val} - mi.setType(t_dst) - return f.emit(mi, source) - } - - // Conversion of a compile-time constant value? Note that converting a constant to a type parameter never results in - // a constant value. - if c, ok := val.(*Const); ok { - if _, ok := ut_dst.(*types.Basic); ok || c.IsNil() { - // Conversion of a compile-time constant to - // another constant type results in a new - // constant of the destination type and - // (initially) the same abstract value. - // We don't truncate the value yet. - return emitConst(f, NewConst(c.Value, t_dst)) - } - - // We're converting from constant to non-constant type, - // e.g. string -> []byte/[]rune. - } - - // Conversion from slice to array pointer? - if tset_src.All(func(termSrc *types.Term) bool { - return tset_dst.All(func(termDst *types.Term) bool { - if slice, ok := termSrc.Type().Underlying().(*types.Slice); ok { - if ptr, ok := termDst.Type().Underlying().(*types.Pointer); ok { - if arr, ok := ptr.Elem().Underlying().(*types.Array); ok && types.Identical(slice.Elem(), arr.Elem()) { - return true - } - } - } - return false - }) - }) { - c := &SliceToArrayPointer{X: val} - c.setType(t_dst) - return f.emit(c, source) - } - - // Conversion from slice to array. This is almost the same as converting from slice to array pointer, then - // dereferencing the pointer. Except that a nil slice can be converted to [0]T, whereas converting a nil slice to - // (*[0]T) results in a nil pointer, dereferencing which would panic. To hide the extra branching we use a dedicated - // instruction, SliceToArray. - if tset_src.All(func(termSrc *types.Term) bool { - return tset_dst.All(func(termDst *types.Term) bool { - if slice, ok := termSrc.Type().Underlying().(*types.Slice); ok { - if arr, ok := termDst.Type().Underlying().(*types.Array); ok && types.Identical(slice.Elem(), arr.Elem()) { - return true - } - } - return false - }) - }) { - c := &SliceToArray{X: val} - c.setType(t_dst) - return f.emit(c, source) - } - - // A representation-changing conversion? - // At least one of {ut_src,ut_dst} must be *Basic. - // (The other may be []byte or []rune.) - ok1 := tset_src.Any(func(term *types.Term) bool { _, ok := term.Type().Underlying().(*types.Basic); return ok }) - ok2 := tset_dst.Any(func(term *types.Term) bool { _, ok := term.Type().Underlying().(*types.Basic); return ok }) - if ok1 || ok2 { - c := &Convert{X: val} - c.setType(t_dst) - return f.emit(c, source) - } - - panic(fmt.Sprintf("in %s: cannot convert %s (%s) to %s", f, val, val.Type(), t_dst)) -} - -// emitStore emits to f an instruction to store value val at location -// addr, applying implicit conversions as required by assignability rules. -func emitStore(f *Function, addr, val Value, source ast.Node) *Store { - s := &Store{ - Addr: addr, - Val: emitConv(f, val, deref(addr.Type()), source), - } - f.emit(s, source) - return s -} - -// emitJump emits to f a jump to target, and updates the control-flow graph. -// Postcondition: f.currentBlock is nil. -func emitJump(f *Function, target *BasicBlock, source ast.Node) *Jump { - b := f.currentBlock - j := new(Jump) - b.emit(j, source) - addEdge(b, target) - f.currentBlock = nil - return j -} - -// emitIf emits to f a conditional jump to tblock or fblock based on -// cond, and updates the control-flow graph. -// Postcondition: f.currentBlock is nil. -func emitIf(f *Function, cond Value, tblock, fblock *BasicBlock, source ast.Node) *If { - b := f.currentBlock - stmt := &If{Cond: cond} - b.emit(stmt, source) - addEdge(b, tblock) - addEdge(b, fblock) - f.currentBlock = nil - return stmt -} - -// emitExtract emits to f an instruction to extract the index'th -// component of tuple. It returns the extracted value. -func emitExtract(f *Function, tuple Value, index int, source ast.Node) Value { - e := &Extract{Tuple: tuple, Index: index} - e.setType(tuple.Type().(*types.Tuple).At(index).Type()) - return f.emit(e, source) -} - -// emitTypeAssert emits to f a type assertion value := x.(t) and -// returns the value. x.Type() must be an interface. -func emitTypeAssert(f *Function, x Value, t types.Type, source ast.Node) Value { - a := &TypeAssert{X: x, AssertedType: t} - a.setType(t) - return f.emit(a, source) -} - -// emitTypeTest emits to f a type test value,ok := x.(t) and returns -// a (value, ok) tuple. x.Type() must be an interface. -func emitTypeTest(f *Function, x Value, t types.Type, source ast.Node) Value { - a := &TypeAssert{ - X: x, - AssertedType: t, - CommaOk: true, - } - a.setType(types.NewTuple( - newVar("value", t), - varOk, - )) - return f.emit(a, source) -} - -// emitTailCall emits to f a function call in tail position. The -// caller is responsible for all fields of 'call' except its type. -// Intended for wrapper methods. -// Precondition: f does/will not use deferred procedure calls. -// Postcondition: f.currentBlock is nil. -func emitTailCall(f *Function, call *Call, source ast.Node) { - tresults := f.Signature.Results() - nr := tresults.Len() - if nr == 1 { - call.typ = tresults.At(0).Type() - } else { - call.typ = tresults - } - tuple := f.emit(call, source) - var ret Return - switch nr { - case 0: - // no-op - case 1: - ret.Results = []Value{tuple} - default: - for i := 0; i < nr; i++ { - v := emitExtract(f, tuple, i, source) - // TODO(adonovan): in principle, this is required: - // v = emitConv(f, o.Type, f.Signature.Results[i].Type) - // but in practice emitTailCall is only used when - // the types exactly match. - ret.Results = append(ret.Results, v) - } - } - - f.Exit = f.newBasicBlock("exit") - emitJump(f, f.Exit, source) - f.currentBlock = f.Exit - f.emit(&ret, source) - f.currentBlock = nil -} - -// emitImplicitSelections emits to f code to apply the sequence of -// implicit field selections specified by indices to base value v, and -// returns the selected value. -// -// If v is the address of a struct, the result will be the address of -// a field; if it is the value of a struct, the result will be the -// value of a field. -func emitImplicitSelections(f *Function, v Value, indices []int, source ast.Node) Value { - for _, index := range indices { - // We may have a generic type containing a pointer, or a pointer to a generic type containing a struct. A - // pointer to a generic containing a pointer to a struct shouldn't be possible because the outer pointer gets - // dereferenced implicitly before we get here. - fld := typeutil.CoreType(deref(v.Type())).Underlying().(*types.Struct).Field(index) - - if isPointer(v.Type()) { - instr := &FieldAddr{ - X: v, - Field: index, - } - instr.setType(types.NewPointer(fld.Type())) - v = f.emit(instr, source) - // Load the field's value iff indirectly embedded. - if isPointer(fld.Type()) { - v = emitLoad(f, v, source) - } - } else { - instr := &Field{ - X: v, - Field: index, - } - instr.setType(fld.Type()) - v = f.emit(instr, source) - } - } - return v -} - -// emitFieldSelection emits to f code to select the index'th field of v. -// -// If wantAddr, the input must be a pointer-to-struct and the result -// will be the field's address; otherwise the result will be the -// field's value. -// Ident id is used for position and debug info. -func emitFieldSelection(f *Function, v Value, index int, wantAddr bool, id *ast.Ident) Value { - // We may have a generic type containing a pointer, or a pointer to a generic type containing a struct. A - // pointer to a generic containing a pointer to a struct shouldn't be possible because the outer pointer gets - // dereferenced implicitly before we get here. - vut := typeutil.CoreType(deref(v.Type())).Underlying().(*types.Struct) - fld := vut.Field(index) - if isPointer(v.Type()) { - instr := &FieldAddr{ - X: v, - Field: index, - } - instr.setSource(id) - instr.setType(types.NewPointer(fld.Type())) - v = f.emit(instr, id) - // Load the field's value iff we don't want its address. - if !wantAddr { - v = emitLoad(f, v, id) - } - } else { - instr := &Field{ - X: v, - Field: index, - } - instr.setSource(id) - instr.setType(fld.Type()) - v = f.emit(instr, id) - } - emitDebugRef(f, id, v, wantAddr) - return v -} - -// zeroValue emits to f code to produce a zero value of type t, -// and returns it. -func zeroValue(f *Function, t types.Type, source ast.Node) Value { - return emitConst(f, zeroConst(t)) -} - -type constKey struct { - typ types.Type - value constant.Value -} - -func emitConst(f *Function, c Constant) Constant { - if f.consts == nil { - f.consts = map[constKey]constValue{} - } - - typ := c.Type() - var val constant.Value - switch c := c.(type) { - case *Const: - val = c.Value - case *ArrayConst, *GenericConst: - // These can only represent zero values, so all we need is the type - case *AggregateConst: - candidates, _ := f.aggregateConsts.At(c.typ) - for _, candidate := range candidates { - if c.equal(candidate) { - return candidate - } - } - - for i := range c.Values { - c.Values[i] = emitConst(f, c.Values[i].(Constant)) - } - - c.setBlock(f.Blocks[0]) - rands := c.Operands(nil) - updateOperandsReferrers(c, rands) - candidates = append(candidates, c) - f.aggregateConsts.Set(c.typ, candidates) - return c - - default: - panic(fmt.Sprintf("unexpected type %T", c)) - } - k := constKey{ - typ: typ, - value: val, - } - dup, ok := f.consts[k] - if ok { - return dup.c - } else { - c.setBlock(f.Blocks[0]) - f.consts[k] = constValue{ - c: c, - idx: len(f.consts), - } - rands := c.Operands(nil) - updateOperandsReferrers(c, rands) - return c - } -} diff --git a/vendor/honnef.co/go/tools/go/ir/exits.go b/vendor/honnef.co/go/tools/go/ir/exits.go deleted file mode 100644 index 03aa2866c..000000000 --- a/vendor/honnef.co/go/tools/go/ir/exits.go +++ /dev/null @@ -1,369 +0,0 @@ -package ir - -import ( - "go/types" -) - -func (b *builder) buildExits(fn *Function) { - if obj := fn.Object(); obj != nil { - switch obj.Pkg().Path() { - case "runtime": - switch obj.Name() { - case "exit": - fn.NoReturn = AlwaysExits - return - case "throw": - fn.NoReturn = AlwaysExits - return - case "Goexit": - fn.NoReturn = AlwaysUnwinds - return - } - case "go.uber.org/zap": - switch obj.(*types.Func).FullName() { - case "(*go.uber.org/zap.Logger).Fatal", - "(*go.uber.org/zap.SugaredLogger).Fatal", - "(*go.uber.org/zap.SugaredLogger).Fatalw", - "(*go.uber.org/zap.SugaredLogger).Fatalf": - // Technically, this method does not unconditionally exit - // the process. It dynamically calls a function stored in - // the logger. If the function is nil, it defaults to - // os.Exit. - // - // The main intent of this method is to terminate the - // process, and that's what the vast majority of people - // will use it for. We'll happily accept some false - // negatives to avoid a lot of false positives. - fn.NoReturn = AlwaysExits - case "(*go.uber.org/zap.Logger).Panic", - "(*go.uber.org/zap.SugaredLogger).Panicw", - "(*go.uber.org/zap.SugaredLogger).Panicf": - fn.NoReturn = AlwaysUnwinds - return - case "(*go.uber.org/zap.Logger).DPanic", - "(*go.uber.org/zap.SugaredLogger).DPanicf", - "(*go.uber.org/zap.SugaredLogger).DPanicw": - // These methods will only panic in development. - } - case "github.com/sirupsen/logrus": - switch obj.(*types.Func).FullName() { - case "(*github.com/sirupsen/logrus.Logger).Exit": - // Technically, this method does not unconditionally exit - // the process. It dynamically calls a function stored in - // the logger. If the function is nil, it defaults to - // os.Exit. - // - // The main intent of this method is to terminate the - // process, and that's what the vast majority of people - // will use it for. We'll happily accept some false - // negatives to avoid a lot of false positives. - fn.NoReturn = AlwaysExits - return - case "(*github.com/sirupsen/logrus.Logger).Panic", - "(*github.com/sirupsen/logrus.Logger).Panicf", - "(*github.com/sirupsen/logrus.Logger).Panicln": - - // These methods will always panic, but that's not - // statically known from the code alone, because they - // take a detour through the generic Log methods. - fn.NoReturn = AlwaysUnwinds - return - case "(*github.com/sirupsen/logrus.Entry).Panicf", - "(*github.com/sirupsen/logrus.Entry).Panicln": - - // Entry.Panic has an explicit panic, but Panicf and - // Panicln do not, relying fully on the generic Log - // method. - fn.NoReturn = AlwaysUnwinds - return - case "(*github.com/sirupsen/logrus.Logger).Log", - "(*github.com/sirupsen/logrus.Logger).Logf", - "(*github.com/sirupsen/logrus.Logger).Logln": - // TODO(dh): we cannot handle these cases. Whether they - // exit or unwind depends on the level, which is set - // via the first argument. We don't currently support - // call-site-specific exit information. - } - case "github.com/golang/glog": - switch obj.(*types.Func).FullName() { - case "github.com/golang/glog.Exit", - "github.com/golang/glog.ExitDepth", - "github.com/golang/glog.Exitf", - "github.com/golang/glog.Exitln", - "github.com/golang/glog.Fatal", - "github.com/golang/glog.FatalDepth", - "github.com/golang/glog.Fatalf", - "github.com/golang/glog.Fatalln": - // all of these call os.Exit after logging - fn.NoReturn = AlwaysExits - } - case "k8s.io/klog": - switch obj.(*types.Func).FullName() { - case "k8s.io/klog.Exit", - "k8s.io/klog.ExitDepth", - "k8s.io/klog.Exitf", - "k8s.io/klog.Exitln", - "k8s.io/klog.Fatal", - "k8s.io/klog.FatalDepth", - "k8s.io/klog.Fatalf", - "k8s.io/klog.Fatalln": - // all of these call os.Exit after logging - fn.NoReturn = AlwaysExits - } - case "k8s.io/klog/v2": - switch obj.(*types.Func).FullName() { - case "k8s.io/klog/v2.Exit", - "k8s.io/klog/v2.ExitDepth", - "k8s.io/klog/v2.Exitf", - "k8s.io/klog/v2.Exitln", - "k8s.io/klog/v2.Fatal", - "k8s.io/klog/v2.FatalDepth", - "k8s.io/klog/v2.Fatalf", - "k8s.io/klog/v2.Fatalln": - // all of these call os.Exit after logging - fn.NoReturn = AlwaysExits - } - } - } - - isRecoverCall := func(instr Instruction) bool { - if instr, ok := instr.(*Call); ok { - if builtin, ok := instr.Call.Value.(*Builtin); ok { - if builtin.Name() == "recover" { - return true - } - } - } - return false - } - - both := NewBlockSet(len(fn.Blocks)) - exits := NewBlockSet(len(fn.Blocks)) - unwinds := NewBlockSet(len(fn.Blocks)) - recovers := false - for _, u := range fn.Blocks { - for _, instr := range u.Instrs { - instrSwitch: - switch instr := instr.(type) { - case *Defer: - if recovers { - // avoid doing extra work, we already know that this function calls recover - continue - } - call := instr.Call.StaticCallee() - if call == nil { - // not a static call, so we can't be sure the - // deferred call isn't calling recover - recovers = true - break - } - if call.Package() == fn.Package() { - b.buildFunction(call) - } - if len(call.Blocks) == 0 { - // external function, we don't know what's - // happening inside it - // - // TODO(dh): this includes functions from - // imported packages, due to how go/analysis - // works. We could introduce another fact, - // like we've done for exiting and unwinding. - recovers = true - break - } - for _, y := range call.Blocks { - for _, instr2 := range y.Instrs { - if isRecoverCall(instr2) { - recovers = true - break instrSwitch - } - } - } - - case *Panic: - both.Add(u) - unwinds.Add(u) - - case CallInstruction: - switch instr.(type) { - case *Defer, *Call: - default: - continue - } - if instr.Common().IsInvoke() { - // give up - return - } - var call *Function - switch instr.Common().Value.(type) { - case *Function, *MakeClosure: - call = instr.Common().StaticCallee() - case *Builtin: - // the only builtins that affect control flow are - // panic and recover, and we've already handled - // those - continue - default: - // dynamic dispatch - return - } - // buildFunction is idempotent. if we're part of a - // (mutually) recursive call chain, then buildFunction - // will immediately return, and fn.WillExit will be false. - if call.Package() == fn.Package() { - b.buildFunction(call) - } - switch call.NoReturn { - case AlwaysExits: - both.Add(u) - exits.Add(u) - case AlwaysUnwinds: - both.Add(u) - unwinds.Add(u) - case NeverReturns: - both.Add(u) - } - } - } - } - - // depth-first search trying to find a path to the exit block that - // doesn't cross any of the blacklisted blocks - seen := NewBlockSet(len(fn.Blocks)) - var findPath func(root *BasicBlock, bl *BlockSet) bool - findPath = func(root *BasicBlock, bl *BlockSet) bool { - if root == fn.Exit { - return true - } - if seen.Has(root) { - return false - } - if bl.Has(root) { - return false - } - seen.Add(root) - for _, succ := range root.Succs { - if findPath(succ, bl) { - return true - } - } - return false - } - findPathEntry := func(root *BasicBlock, bl *BlockSet) bool { - if bl.Num() == 0 { - return true - } - seen.Clear() - return findPath(root, bl) - } - - if !findPathEntry(fn.Blocks[0], exits) { - fn.NoReturn = AlwaysExits - } else if !recovers { - // Only consider unwinding and "never returns" if we don't - // call recover. If we do call recover, then panics don't - // bubble up the stack. - - // TODO(dh): the position of the defer matters. If we - // unconditionally terminate before we defer a recover, then - // the recover is ineffective. - - if !findPathEntry(fn.Blocks[0], unwinds) { - fn.NoReturn = AlwaysUnwinds - } else if !findPathEntry(fn.Blocks[0], both) { - fn.NoReturn = NeverReturns - } - } -} - -func (b *builder) addUnreachables(fn *Function) { - var unreachable *BasicBlock - - for _, bb := range fn.Blocks { - instrLoop: - for i, instr := range bb.Instrs { - if instr, ok := instr.(*Call); ok { - var call *Function - switch v := instr.Common().Value.(type) { - case *Function: - call = v - case *MakeClosure: - call = v.Fn.(*Function) - } - if call == nil { - continue - } - if call.Package() == fn.Package() { - // make sure we have information on all functions in this package - b.buildFunction(call) - } - switch call.NoReturn { - case AlwaysExits: - // This call will cause the process to terminate. - // Remove remaining instructions in the block and - // replace any control flow with Unreachable. - for _, succ := range bb.Succs { - succ.removePred(bb) - } - bb.Succs = bb.Succs[:0] - - bb.Instrs = bb.Instrs[:i+1] - bb.emit(new(Unreachable), instr.Source()) - addEdge(bb, fn.Exit) - break instrLoop - - case AlwaysUnwinds: - // This call will cause the goroutine to terminate - // and defers to run (i.e. a panic or - // runtime.Goexit). Remove remaining instructions - // in the block and replace any control flow with - // an unconditional jump to the exit block. - for _, succ := range bb.Succs { - succ.removePred(bb) - } - bb.Succs = bb.Succs[:0] - - bb.Instrs = bb.Instrs[:i+1] - bb.emit(new(Jump), instr.Source()) - addEdge(bb, fn.Exit) - break instrLoop - - case NeverReturns: - // This call will either cause the goroutine to - // terminate, or the process to terminate. Remove - // remaining instructions in the block and replace - // any control flow with a conditional jump to - // either the exit block, or Unreachable. - for _, succ := range bb.Succs { - succ.removePred(bb) - } - bb.Succs = bb.Succs[:0] - - bb.Instrs = bb.Instrs[:i+1] - var c Call - c.Call.Value = &Builtin{ - name: "ir:noreturnWasPanic", - sig: types.NewSignatureType(nil, nil, nil, - types.NewTuple(), - types.NewTuple(anonVar(types.Typ[types.Bool])), - false, - ), - } - c.setType(types.Typ[types.Bool]) - - if unreachable == nil { - unreachable = fn.newBasicBlock("unreachable") - unreachable.emit(&Unreachable{}, nil) - addEdge(unreachable, fn.Exit) - } - - bb.emit(&c, instr.Source()) - bb.emit(&If{Cond: &c}, instr.Source()) - addEdge(bb, fn.Exit) - addEdge(bb, unreachable) - break instrLoop - } - } - } - } -} diff --git a/vendor/honnef.co/go/tools/go/ir/func.go b/vendor/honnef.co/go/tools/go/ir/func.go deleted file mode 100644 index 4449b405d..000000000 --- a/vendor/honnef.co/go/tools/go/ir/func.go +++ /dev/null @@ -1,957 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package ir - -// This file implements the Function and BasicBlock types. - -import ( - "bytes" - "fmt" - "go/ast" - "go/format" - "go/token" - "go/types" - "io" - "os" - "sort" - "strings" - - "honnef.co/go/tools/go/types/typeutil" -) - -// addEdge adds a control-flow graph edge from from to to. -func addEdge(from, to *BasicBlock) { - from.Succs = append(from.Succs, to) - to.Preds = append(to.Preds, from) -} - -// Control returns the last instruction in the block. -func (b *BasicBlock) Control() Instruction { - if len(b.Instrs) == 0 { - return nil - } - return b.Instrs[len(b.Instrs)-1] -} - -// SigmaFor returns the sigma node for v coming from pred. -func (b *BasicBlock) SigmaFor(v Value, pred *BasicBlock) *Sigma { - for _, instr := range b.Instrs { - sigma, ok := instr.(*Sigma) - if !ok { - // no more sigmas - return nil - } - if sigma.From == pred && sigma.X == v { - return sigma - } - } - return nil -} - -// Parent returns the function that contains block b. -func (b *BasicBlock) Parent() *Function { return b.parent } - -// String returns a human-readable label of this block. -// It is not guaranteed unique within the function. -func (b *BasicBlock) String() string { - return fmt.Sprintf("%d", b.Index) -} - -// emit appends an instruction to the current basic block. -// If the instruction defines a Value, it is returned. -func (b *BasicBlock) emit(i Instruction, source ast.Node) Value { - i.setSource(source) - i.setBlock(b) - b.Instrs = append(b.Instrs, i) - v, _ := i.(Value) - return v -} - -// predIndex returns the i such that b.Preds[i] == c or panics if -// there is none. -func (b *BasicBlock) predIndex(c *BasicBlock) int { - for i, pred := range b.Preds { - if pred == c { - return i - } - } - panic(fmt.Sprintf("no edge %s -> %s", c, b)) -} - -// succIndex returns the i such that b.Succs[i] == c or -1 if there is none. -func (b *BasicBlock) succIndex(c *BasicBlock) int { - for i, succ := range b.Succs { - if succ == c { - return i - } - } - return -1 -} - -// hasPhi returns true if b.Instrs contains φ-nodes. -func (b *BasicBlock) hasPhi() bool { - _, ok := b.Instrs[0].(*Phi) - return ok -} - -func (b *BasicBlock) Phis() []Instruction { - return b.phis() -} - -// phis returns the prefix of b.Instrs containing all the block's φ-nodes. -func (b *BasicBlock) phis() []Instruction { - for i, instr := range b.Instrs { - if _, ok := instr.(*Phi); !ok { - return b.Instrs[:i] - } - } - return nil // unreachable in well-formed blocks -} - -// replacePred replaces all occurrences of p in b's predecessor list with q. -// Ordinarily there should be at most one. -func (b *BasicBlock) replacePred(p, q *BasicBlock) { - for i, pred := range b.Preds { - if pred == p { - b.Preds[i] = q - } - } -} - -// replaceSucc replaces all occurrences of p in b's successor list with q. -// Ordinarily there should be at most one. -func (b *BasicBlock) replaceSucc(p, q *BasicBlock) { - for i, succ := range b.Succs { - if succ == p { - b.Succs[i] = q - } - } -} - -// removePred removes all occurrences of p in b's -// predecessor list and φ-nodes. -// Ordinarily there should be at most one. -func (b *BasicBlock) removePred(p *BasicBlock) { - phis := b.phis() - - // We must preserve edge order for φ-nodes. - j := 0 - for i, pred := range b.Preds { - if pred != p { - b.Preds[j] = b.Preds[i] - // Strike out φ-edge too. - for _, instr := range phis { - phi := instr.(*Phi) - phi.Edges[j] = phi.Edges[i] - } - j++ - } - } - // Nil out b.Preds[j:] and φ-edges[j:] to aid GC. - for i := j; i < len(b.Preds); i++ { - b.Preds[i] = nil - for _, instr := range phis { - instr.(*Phi).Edges[i] = nil - } - } - b.Preds = b.Preds[:j] - for _, instr := range phis { - phi := instr.(*Phi) - phi.Edges = phi.Edges[:j] - } -} - -// Destinations associated with unlabelled for/switch/select stmts. -// We push/pop one of these as we enter/leave each construct and for -// each BranchStmt we scan for the innermost target of the right type. -type targets struct { - tail *targets // rest of stack - _break *BasicBlock - _continue *BasicBlock - _fallthrough *BasicBlock -} - -// Destinations associated with a labelled block. -// We populate these as labels are encountered in forward gotos or -// labelled statements. -type lblock struct { - _goto *BasicBlock - _break *BasicBlock - _continue *BasicBlock -} - -// labelledBlock returns the branch target associated with the -// specified label, creating it if needed. -func (f *Function) labelledBlock(label *ast.Ident) *lblock { - obj := f.Pkg.info.ObjectOf(label) - if obj == nil { - // Blank label, as in '_:' - don't store to f.lblocks, this label can never be referred to; just return a fresh - // lbock. - return &lblock{_goto: f.newBasicBlock(label.Name)} - } - - lb := f.lblocks[obj] - if lb == nil { - lb = &lblock{_goto: f.newBasicBlock(label.Name)} - if f.lblocks == nil { - f.lblocks = make(map[types.Object]*lblock) - } - f.lblocks[obj] = lb - } - return lb -} - -// addParam adds a (non-escaping) parameter to f.Params of the -// specified name, type and source position. -func (f *Function) addParam(name string, typ types.Type, source ast.Node) *Parameter { - var b *BasicBlock - if len(f.Blocks) > 0 { - b = f.Blocks[0] - } - v := &Parameter{ - name: name, - } - v.setBlock(b) - v.setType(typ) - v.setSource(source) - f.Params = append(f.Params, v) - if b != nil { - // There may be no blocks if this function has no body. We - // still create params, but aren't interested in the - // instruction. - f.Blocks[0].Instrs = append(f.Blocks[0].Instrs, v) - } - return v -} - -func (f *Function) addParamObj(obj types.Object, source ast.Node) *Parameter { - name := obj.Name() - if name == "" { - name = fmt.Sprintf("arg%d", len(f.Params)) - } - param := f.addParam(name, obj.Type(), source) - param.object = obj - return param -} - -// addSpilledParam declares a parameter that is pre-spilled to the -// stack; the function body will load/store the spilled location. -// Subsequent lifting will eliminate spills where possible. -func (f *Function) addSpilledParam(obj types.Object, source ast.Node) { - param := f.addParamObj(obj, source) - spill := &Alloc{} - spill.setType(types.NewPointer(obj.Type())) - spill.source = source - f.objects[obj] = spill - f.Locals = append(f.Locals, spill) - f.emit(spill, source) - emitStore(f, spill, param, source) - // f.emit(&Store{Addr: spill, Val: param}) -} - -// startBody initializes the function prior to generating IR code for its body. -// Precondition: f.Type() already set. -func (f *Function) startBody() { - entry := f.newBasicBlock("entry") - f.currentBlock = entry - f.objects = make(map[types.Object]Value) // needed for some synthetics, e.g. init -} - -func (f *Function) blockset(i int) *BlockSet { - bs := &f.blocksets[i] - if len(bs.values) != len(f.Blocks) { - if cap(bs.values) >= len(f.Blocks) { - bs.values = bs.values[:len(f.Blocks)] - bs.Clear() - } else { - bs.values = make([]bool, len(f.Blocks)) - } - } else { - bs.Clear() - } - return bs -} - -func (f *Function) exitBlock() { - old := f.currentBlock - - f.Exit = f.newBasicBlock("exit") - f.currentBlock = f.Exit - - ret := f.results() - results := make([]Value, len(ret)) - // Run function calls deferred in this - // function when explicitly returning from it. - f.emit(new(RunDefers), nil) - for i, r := range ret { - results[i] = emitLoad(f, r, nil) - } - - f.emit(&Return{Results: results}, nil) - f.currentBlock = old -} - -// createSyntacticParams populates f.Params and generates code (spills -// and named result locals) for all the parameters declared in the -// syntax. In addition it populates the f.objects mapping. -// -// Preconditions: -// f.startBody() was called. -// Postcondition: -// len(f.Params) == len(f.Signature.Params) + (f.Signature.Recv() ? 1 : 0) -func (f *Function) createSyntacticParams(recv *ast.FieldList, functype *ast.FuncType) { - // Receiver (at most one inner iteration). - if recv != nil { - for _, field := range recv.List { - for _, n := range field.Names { - f.addSpilledParam(f.Pkg.info.Defs[n], n) - } - // Anonymous receiver? No need to spill. - if field.Names == nil { - f.addParamObj(f.Signature.Recv(), field) - } - } - } - - // Parameters. - if functype.Params != nil { - n := len(f.Params) // 1 if has recv, 0 otherwise - for _, field := range functype.Params.List { - for _, n := range field.Names { - f.addSpilledParam(f.Pkg.info.Defs[n], n) - } - // Anonymous parameter? No need to spill. - if field.Names == nil { - f.addParamObj(f.Signature.Params().At(len(f.Params)-n), field) - } - } - } - - // Named results. - if functype.Results != nil { - for _, field := range functype.Results.List { - // Implicit "var" decl of locals for named results. - for _, n := range field.Names { - f.namedResults = append(f.namedResults, f.addLocalForIdent(n)) - } - } - - if len(f.namedResults) == 0 { - sig := f.Signature.Results() - for i := 0; i < sig.Len(); i++ { - // XXX position information - v := f.addLocal(sig.At(i).Type(), nil) - f.implicitResults = append(f.implicitResults, v) - } - } - } -} - -func numberNodes(f *Function) { - var base ID - for _, b := range f.Blocks { - for _, instr := range b.Instrs { - if instr == nil { - continue - } - base++ - instr.setID(base) - } - } -} - -func updateOperandsReferrers(instr Instruction, ops []*Value) { - for _, op := range ops { - if r := *op; r != nil { - if refs := (*op).Referrers(); refs != nil { - if len(*refs) == 0 { - // per median, each value has two referrers, so we can avoid one call into growslice - // - // Note: we experimented with allocating - // sequential scratch space, but we - // couldn't find a value that gave better - // performance than making many individual - // allocations - *refs = make([]Instruction, 1, 2) - (*refs)[0] = instr - } else { - *refs = append(*refs, instr) - } - } - } - } -} - -// buildReferrers populates the def/use information in all non-nil -// Value.Referrers slice. -// Precondition: all such slices are initially empty. -func buildReferrers(f *Function) { - var rands []*Value - - for _, b := range f.Blocks { - for _, instr := range b.Instrs { - rands = instr.Operands(rands[:0]) // recycle storage - updateOperandsReferrers(instr, rands) - } - } - - for _, c := range f.consts { - rands = c.c.Operands(rands[:0]) - updateOperandsReferrers(c.c, rands) - } -} - -func (f *Function) emitConsts() { - defer func() { - f.consts = nil - f.aggregateConsts = typeutil.Map[[]*AggregateConst]{} - }() - - if len(f.Blocks) == 0 { - return - } - - // TODO(dh): our deduplication only works on booleans and - // integers. other constants are represented as pointers to - // things. - head := make([]constValue, 0, len(f.consts)) - for _, c := range f.consts { - if len(*c.c.Referrers()) == 0 { - // TODO(dh): killing a const may make other consts dead, too - killInstruction(c.c) - } else { - head = append(head, c) - } - } - sort.Slice(head, func(i, j int) bool { - return head[i].idx < head[j].idx - }) - entry := f.Blocks[0] - instrs := make([]Instruction, 0, len(entry.Instrs)+len(head)) - for _, c := range head { - instrs = append(instrs, c.c) - } - f.aggregateConsts.Iterate(func(key types.Type, value []*AggregateConst) { - for _, c := range value { - instrs = append(instrs, c) - } - }) - - instrs = append(instrs, entry.Instrs...) - entry.Instrs = instrs -} - -// buildFakeExits ensures that every block in the function is -// reachable in reverse from the Exit block. This is required to build -// a full post-dominator tree, and to ensure the exit block's -// inclusion in the dominator tree. -func buildFakeExits(fn *Function) { - // Find back-edges via forward DFS - fn.fakeExits = BlockSet{values: make([]bool, len(fn.Blocks))} - seen := fn.blockset(0) - backEdges := fn.blockset(1) - - var dfs func(b *BasicBlock) - dfs = func(b *BasicBlock) { - if !seen.Add(b) { - backEdges.Add(b) - return - } - for _, pred := range b.Succs { - dfs(pred) - } - } - dfs(fn.Blocks[0]) -buildLoop: - for { - seen := fn.blockset(2) - var dfs func(b *BasicBlock) - dfs = func(b *BasicBlock) { - if !seen.Add(b) { - return - } - for _, pred := range b.Preds { - dfs(pred) - } - if b == fn.Exit { - for _, b := range fn.Blocks { - if fn.fakeExits.Has(b) { - dfs(b) - } - } - } - } - dfs(fn.Exit) - - for _, b := range fn.Blocks { - if !seen.Has(b) && backEdges.Has(b) { - // Block b is not reachable from the exit block. Add a - // fake jump from b to exit, then try again. Note that we - // only add one fake edge at a time, as it may make - // multiple blocks reachable. - // - // We only consider those blocks that have back edges. - // Any unreachable block that doesn't have a back edge - // must flow into a loop, which by definition has a - // back edge. Thus, by looking for loops, we should - // need fewer fake edges overall. - fn.fakeExits.Add(b) - continue buildLoop - } - } - - break - } -} - -// finishBody() finalizes the function after IR code generation of its body. -func (f *Function) finishBody() { - f.objects = nil - f.currentBlock = nil - f.lblocks = nil - - // Remove from f.Locals any Allocs that escape to the heap. - j := 0 - for _, l := range f.Locals { - if !l.Heap { - f.Locals[j] = l - j++ - } - } - // Nil out f.Locals[j:] to aid GC. - for i := j; i < len(f.Locals); i++ { - f.Locals[i] = nil - } - f.Locals = f.Locals[:j] - - optimizeBlocks(f) - buildFakeExits(f) - buildReferrers(f) - buildDomTree(f) - buildPostDomTree(f) - - if f.Prog.mode&NaiveForm == 0 { - for lift(f) { - } - if doSimplifyConstantCompositeValues { - for simplifyConstantCompositeValues(f) { - } - } - } - - // emit constants after lifting, because lifting may produce new constants, but before other variable splitting, - // because it expects constants to have been deduplicated. - f.emitConsts() - - if f.Prog.mode&SplitAfterNewInformation != 0 { - splitOnNewInformation(f.Blocks[0], &StackMap{}) - } - - f.namedResults = nil // (used by lifting) - f.implicitResults = nil - - numberNodes(f) - - defer f.wr.Close() - f.wr.WriteFunc("start", "start", f) - - if f.Prog.mode&PrintFunctions != 0 { - printMu.Lock() - f.WriteTo(os.Stdout) - printMu.Unlock() - } - - if f.Prog.mode&SanityCheckFunctions != 0 { - mustSanityCheck(f, nil) - } -} - -func isUselessPhi(phi *Phi) (Value, bool) { - var v0 Value - for _, e := range phi.Edges { - if e == phi { - continue - } - if v0 == nil { - v0 = e - } - if v0 != e { - if v0, ok := v0.(*Const); ok { - if e, ok := e.(*Const); ok { - if v0.typ == e.typ && v0.Value == e.Value { - continue - } - } - } - return nil, false - } - } - return v0, true -} - -func (f *Function) RemoveNilBlocks() { - f.removeNilBlocks() -} - -// removeNilBlocks eliminates nils from f.Blocks and updates each -// BasicBlock.Index. Use this after any pass that may delete blocks. -func (f *Function) removeNilBlocks() { - j := 0 - for _, b := range f.Blocks { - if b != nil { - b.Index = j - f.Blocks[j] = b - j++ - } - } - // Nil out f.Blocks[j:] to aid GC. - for i := j; i < len(f.Blocks); i++ { - f.Blocks[i] = nil - } - f.Blocks = f.Blocks[:j] -} - -// SetDebugMode sets the debug mode for package pkg. If true, all its -// functions will include full debug info. This greatly increases the -// size of the instruction stream, and causes Functions to depend upon -// the ASTs, potentially keeping them live in memory for longer. -func (pkg *Package) SetDebugMode(debug bool) { - // TODO(adonovan): do we want ast.File granularity? - pkg.debug = debug -} - -// debugInfo reports whether debug info is wanted for this function. -func (f *Function) debugInfo() bool { - return f.Pkg != nil && f.Pkg.debug -} - -// addNamedLocal creates a local variable, adds it to function f and -// returns it. Its name and type are taken from obj. Subsequent -// calls to f.lookup(obj) will return the same local. -func (f *Function) addNamedLocal(obj types.Object, source ast.Node) *Alloc { - l := f.addLocal(obj.Type(), source) - f.objects[obj] = l - return l -} - -func (f *Function) addLocalForIdent(id *ast.Ident) *Alloc { - return f.addNamedLocal(f.Pkg.info.Defs[id], id) -} - -// addLocal creates an anonymous local variable of type typ, adds it -// to function f and returns it. pos is the optional source location. -func (f *Function) addLocal(typ types.Type, source ast.Node) *Alloc { - v := &Alloc{} - v.setType(types.NewPointer(typ)) - f.Locals = append(f.Locals, v) - f.emit(v, source) - return v -} - -// lookup returns the address of the named variable identified by obj -// that is local to function f or one of its enclosing functions. -// If escaping, the reference comes from a potentially escaping pointer -// expression and the referent must be heap-allocated. -func (f *Function) lookup(obj types.Object, escaping bool) Value { - if v, ok := f.objects[obj]; ok { - if alloc, ok := v.(*Alloc); ok && escaping { - alloc.Heap = true - } - return v // function-local var (address) - } - - // Definition must be in an enclosing function; - // plumb it through intervening closures. - if f.parent == nil { - panic("no ir.Value for " + obj.String()) - } - outer := f.parent.lookup(obj, true) // escaping - v := &FreeVar{ - name: obj.Name(), - typ: outer.Type(), - outer: outer, - parent: f, - } - f.objects[obj] = v - f.FreeVars = append(f.FreeVars, v) - return v -} - -// emit emits the specified instruction to function f. -func (f *Function) emit(instr Instruction, source ast.Node) Value { - return f.currentBlock.emit(instr, source) -} - -// RelString returns the full name of this function, qualified by -// package name, receiver type, etc. -// -// The specific formatting rules are not guaranteed and may change. -// -// Examples: -// -// "math.IsNaN" // a package-level function -// "(*bytes.Buffer).Bytes" // a declared method or a wrapper -// "(*bytes.Buffer).Bytes$thunk" // thunk (func wrapping method; receiver is param 0) -// "(*bytes.Buffer).Bytes$bound" // bound (func wrapping method; receiver supplied by closure) -// "main.main$1" // an anonymous function in main -// "main.init#1" // a declared init function -// "main.init" // the synthesized package initializer -// -// When these functions are referred to from within the same package -// (i.e. from == f.Pkg.Object), they are rendered without the package path. -// For example: "IsNaN", "(*Buffer).Bytes", etc. -// -// All non-synthetic functions have distinct package-qualified names. -// (But two methods may have the same name "(T).f" if one is a synthetic -// wrapper promoting a non-exported method "f" from another package; in -// that case, the strings are equal but the identifiers "f" are distinct.) -func (f *Function) RelString(from *types.Package) string { - // Anonymous? - if f.parent != nil { - // An anonymous function's Name() looks like "parentName$1", - // but its String() should include the type/package/etc. - parent := f.parent.RelString(from) - for i, anon := range f.parent.AnonFuncs { - if anon == f { - return fmt.Sprintf("%s$%d", parent, 1+i) - } - } - - return f.name // should never happen - } - - // Method (declared or wrapper)? - if recv := f.Signature.Recv(); recv != nil { - return f.relMethod(from, recv.Type()) - } - - // Thunk? - if f.method != nil { - return f.relMethod(from, f.method.Recv()) - } - - // Bound? - if len(f.FreeVars) == 1 && strings.HasSuffix(f.name, "$bound") { - return f.relMethod(from, f.FreeVars[0].Type()) - } - - // Package-level function? - // Prefix with package name for cross-package references only. - if p := f.pkg(); p != nil && p != from { - return fmt.Sprintf("%s.%s", p.Path(), f.name) - } - - // Unknown. - return f.name -} - -func (f *Function) relMethod(from *types.Package, recv types.Type) string { - return fmt.Sprintf("(%s).%s", relType(recv, from), f.name) -} - -// writeSignature writes to buf the signature sig in declaration syntax. -func writeSignature(buf *bytes.Buffer, from *types.Package, name string, sig *types.Signature, params []*Parameter) { - buf.WriteString("func ") - if recv := sig.Recv(); recv != nil { - buf.WriteString("(") - if n := params[0].Name(); n != "" { - buf.WriteString(n) - buf.WriteString(" ") - } - types.WriteType(buf, params[0].Type(), types.RelativeTo(from)) - buf.WriteString(") ") - } - buf.WriteString(name) - types.WriteSignature(buf, sig, types.RelativeTo(from)) -} - -func (f *Function) pkg() *types.Package { - if f.Pkg != nil { - return f.Pkg.Pkg - } - return nil -} - -var _ io.WriterTo = (*Function)(nil) // *Function implements io.Writer - -func (f *Function) WriteTo(w io.Writer) (int64, error) { - var buf bytes.Buffer - WriteFunction(&buf, f) - n, err := w.Write(buf.Bytes()) - return int64(n), err -} - -// WriteFunction writes to buf a human-readable "disassembly" of f. -func WriteFunction(buf *bytes.Buffer, f *Function) { - fmt.Fprintf(buf, "# Name: %s\n", f.String()) - if f.Pkg != nil { - fmt.Fprintf(buf, "# Package: %s\n", f.Pkg.Pkg.Path()) - } - if syn := f.Synthetic; syn != 0 { - fmt.Fprintln(buf, "# Synthetic:", syn) - } - if pos := f.Pos(); pos.IsValid() { - fmt.Fprintf(buf, "# Location: %s\n", f.Prog.Fset.Position(pos)) - } - - if f.parent != nil { - fmt.Fprintf(buf, "# Parent: %s\n", f.parent.Name()) - } - - from := f.pkg() - - if f.FreeVars != nil { - buf.WriteString("# Free variables:\n") - for i, fv := range f.FreeVars { - fmt.Fprintf(buf, "# % 3d:\t%s %s\n", i, fv.Name(), relType(fv.Type(), from)) - } - } - - if len(f.Locals) > 0 { - buf.WriteString("# Locals:\n") - for i, l := range f.Locals { - fmt.Fprintf(buf, "# % 3d:\t%s %s\n", i, l.Name(), relType(deref(l.Type()), from)) - } - } - writeSignature(buf, from, f.Name(), f.Signature, f.Params) - buf.WriteString(":\n") - - if f.Blocks == nil { - buf.WriteString("\t(external)\n") - } - - for _, b := range f.Blocks { - if b == nil { - // Corrupt CFG. - fmt.Fprintf(buf, ".nil:\n") - continue - } - fmt.Fprintf(buf, "b%d:", b.Index) - if len(b.Preds) > 0 { - fmt.Fprint(buf, " ←") - for _, pred := range b.Preds { - fmt.Fprintf(buf, " b%d", pred.Index) - } - } - if b.Comment != "" { - fmt.Fprintf(buf, " # %s", b.Comment) - } - buf.WriteByte('\n') - - if false { // CFG debugging - fmt.Fprintf(buf, "\t# CFG: %s --> %s --> %s\n", b.Preds, b, b.Succs) - } - - buf2 := &bytes.Buffer{} - for _, instr := range b.Instrs { - buf.WriteString("\t") - switch v := instr.(type) { - case Value: - // Left-align the instruction. - if name := v.Name(); name != "" { - fmt.Fprintf(buf, "%s = ", name) - } - buf.WriteString(instr.String()) - case nil: - // Be robust against bad transforms. - buf.WriteString("") - default: - buf.WriteString(instr.String()) - } - if instr != nil && instr.Comment() != "" { - buf.WriteString(" # ") - buf.WriteString(instr.Comment()) - } - buf.WriteString("\n") - - if f.Prog.mode&PrintSource != 0 { - if s := instr.Source(); s != nil { - buf2.Reset() - format.Node(buf2, f.Prog.Fset, s) - for { - line, err := buf2.ReadString('\n') - if len(line) == 0 { - break - } - buf.WriteString("\t\t> ") - buf.WriteString(line) - if line[len(line)-1] != '\n' { - buf.WriteString("\n") - } - if err != nil { - break - } - } - } - } - } - buf.WriteString("\n") - } -} - -// newBasicBlock adds to f a new basic block and returns it. It does -// not automatically become the current block for subsequent calls to emit. -// comment is an optional string for more readable debugging output. -func (f *Function) newBasicBlock(comment string) *BasicBlock { - var instrs []Instruction - if len(f.functionBody.scratchInstructions) > 0 { - instrs = f.functionBody.scratchInstructions[0:0:avgInstructionsPerBlock] - f.functionBody.scratchInstructions = f.functionBody.scratchInstructions[avgInstructionsPerBlock:] - } else { - instrs = make([]Instruction, 0, avgInstructionsPerBlock) - } - - b := &BasicBlock{ - Index: len(f.Blocks), - Comment: comment, - parent: f, - Instrs: instrs, - } - b.Succs = b.succs2[:0] - f.Blocks = append(f.Blocks, b) - return b -} - -// NewFunction returns a new synthetic Function instance belonging to -// prog, with its name and signature fields set as specified. -// -// The caller is responsible for initializing the remaining fields of -// the function object, e.g. Pkg, Params, Blocks. -// -// It is practically impossible for clients to construct well-formed -// IR functions/packages/programs directly, so we assume this is the -// job of the Builder alone. NewFunction exists to provide clients a -// little flexibility. For example, analysis tools may wish to -// construct fake Functions for the root of the callgraph, a fake -// "reflect" package, etc. -// -// TODO(adonovan): think harder about the API here. -func (prog *Program) NewFunction(name string, sig *types.Signature, provenance Synthetic) *Function { - return &Function{Prog: prog, name: name, Signature: sig, Synthetic: provenance} -} - -//lint:ignore U1000 we may make use of this for functions loaded from export data -type extentNode [2]token.Pos - -func (n extentNode) Pos() token.Pos { return n[0] } -func (n extentNode) End() token.Pos { return n[1] } - -func (f *Function) initHTML(name string) { - if name == "" { - return - } - if rel := f.RelString(nil); rel == name { - f.wr = NewHTMLWriter("ir.html", rel, "") - } -} - -func killInstruction(instr Instruction) { - ops := instr.Operands(nil) - for _, op := range ops { - if refs := (*op).Referrers(); refs != nil { - *refs = removeInstr(*refs, instr) - } - } -} diff --git a/vendor/honnef.co/go/tools/go/ir/html.go b/vendor/honnef.co/go/tools/go/ir/html.go deleted file mode 100644 index 35b421a70..000000000 --- a/vendor/honnef.co/go/tools/go/ir/html.go +++ /dev/null @@ -1,1124 +0,0 @@ -// Copyright 2015 The Go Authors. All rights reserved. -// Copyright 2019 Dominik Honnef. All rights reserved. - -package ir - -import ( - "bytes" - "fmt" - "go/types" - "html" - "io" - "log" - "os" - "os/exec" - "path/filepath" - "reflect" - "sort" - "strings" -) - -func live(f *Function) []bool { - max := 0 - var ops []*Value - - for _, b := range f.Blocks { - for _, instr := range b.Instrs { - if int(instr.ID()) > max { - max = int(instr.ID()) - } - } - } - - out := make([]bool, max+1) - var q []Node - for _, b := range f.Blocks { - for _, instr := range b.Instrs { - switch instr.(type) { - case *BlankStore, *Call, *ConstantSwitch, *Defer, *Go, *If, *Jump, *MapUpdate, *Next, *Panic, *Recv, *Return, *RunDefers, *Send, *Store, *Unreachable: - out[instr.ID()] = true - q = append(q, instr) - } - } - } - - for len(q) > 0 { - v := q[len(q)-1] - q = q[:len(q)-1] - for _, op := range v.Operands(ops) { - if *op == nil { - continue - } - if !out[(*op).ID()] { - out[(*op).ID()] = true - q = append(q, *op) - } - } - } - - return out -} - -type funcPrinter interface { - startBlock(b *BasicBlock, reachable bool) - endBlock(b *BasicBlock) - value(v Node, live bool) - startDepCycle() - endDepCycle() - named(n string, vals []Value) -} - -func namedValues(f *Function) map[types.Object][]Value { - names := map[types.Object][]Value{} - for _, b := range f.Blocks { - for _, instr := range b.Instrs { - if instr, ok := instr.(*DebugRef); ok { - if obj := instr.object; obj != nil { - names[obj] = append(names[obj], instr.X) - } - } - } - } - // XXX deduplicate values - return names -} - -func fprintFunc(p funcPrinter, f *Function) { - // XXX does our IR form preserve unreachable blocks? - // reachable, live := findlive(f) - - l := live(f) - for _, b := range f.Blocks { - // XXX - // p.startBlock(b, reachable[b.Index]) - p.startBlock(b, true) - - end := len(b.Instrs) - 1 - if end < 0 { - end = 0 - } - for _, v := range b.Instrs[:end] { - if _, ok := v.(*DebugRef); !ok { - p.value(v, l[v.ID()]) - } - } - p.endBlock(b) - } - - names := namedValues(f) - keys := make([]types.Object, 0, len(names)) - for key := range names { - keys = append(keys, key) - } - sort.Slice(keys, func(i, j int) bool { - return keys[i].Pos() < keys[j].Pos() - }) - for _, key := range keys { - p.named(key.Name(), names[key]) - } -} - -func opName(v Node) string { - switch v := v.(type) { - case *Call: - if v.Common().IsInvoke() { - return "Invoke" - } - return "Call" - case *Alloc: - if v.Heap { - return "HeapAlloc" - } - return "StackAlloc" - case *Select: - if v.Blocking { - return "SelectBlocking" - } - return "SelectNonBlocking" - default: - return reflect.ValueOf(v).Type().Elem().Name() - } -} - -type HTMLWriter struct { - w io.WriteCloser - path string - dot *dotWriter -} - -func NewHTMLWriter(path string, funcname, cfgMask string) *HTMLWriter { - out, err := os.OpenFile(path, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0644) - if err != nil { - log.Fatalf("%v", err) - } - pwd, err := os.Getwd() - if err != nil { - log.Fatalf("%v", err) - } - html := HTMLWriter{w: out, path: filepath.Join(pwd, path)} - html.dot = newDotWriter() - html.start(funcname) - return &html -} - -func (w *HTMLWriter) start(name string) { - if w == nil { - return - } - w.WriteString("") - w.WriteString(` - - - - - -`) - w.WriteString("") - w.WriteString("

") - w.WriteString(html.EscapeString(name)) - w.WriteString("

") - w.WriteString(` -help -
- -

-Click on a value or block to toggle highlighting of that value/block -and its uses. (Values and blocks are highlighted by ID, and IDs of -dead items may be reused, so not all highlights necessarily correspond -to the clicked item.) -

- -

-Faded out values and blocks are dead code that has not been eliminated. -

- -

-Values printed in italics have a dependency cycle. -

- -

-CFG: Dashed edge is for unlikely branches. Blue color is for backward edges. -Edge with a dot means that this edge follows the order in which blocks were laid out. -

- -
-`) - w.WriteString("") - w.WriteString("") -} - -func (w *HTMLWriter) Close() { - if w == nil { - return - } - io.WriteString(w.w, "") - io.WriteString(w.w, "
") - io.WriteString(w.w, "") - io.WriteString(w.w, "") - w.w.Close() - fmt.Printf("dumped IR to %v\n", w.path) -} - -// WriteFunc writes f in a column headed by title. -// phase is used for collapsing columns and should be unique across the table. -func (w *HTMLWriter) WriteFunc(phase, title string, f *Function) { - if w == nil { - return - } - w.WriteColumn(phase, title, "", funcHTML(f, phase, w.dot)) -} - -// WriteColumn writes raw HTML in a column headed by title. -// It is intended for pre- and post-compilation log output. -func (w *HTMLWriter) WriteColumn(phase, title, class, html string) { - if w == nil { - return - } - id := strings.Replace(phase, " ", "-", -1) - // collapsed column - w.Printf("
%v
", id, phase) - - if class == "" { - w.Printf("", id) - } else { - w.Printf("", id, class) - } - w.WriteString("

" + title + "

") - w.WriteString(html) - w.WriteString("") -} - -func (w *HTMLWriter) Printf(msg string, v ...interface{}) { - if _, err := fmt.Fprintf(w.w, msg, v...); err != nil { - log.Fatalf("%v", err) - } -} - -func (w *HTMLWriter) WriteString(s string) { - if _, err := io.WriteString(w.w, s); err != nil { - log.Fatalf("%v", err) - } -} - -func valueHTML(v Node) string { - if v == nil { - return "<nil>" - } - // TODO: Using the value ID as the class ignores the fact - // that value IDs get recycled and that some values - // are transmuted into other values. - class := fmt.Sprintf("t%d", v.ID()) - var label string - switch v := v.(type) { - case *Function: - label = v.RelString(nil) - case *Builtin: - label = v.Name() - default: - label = class - } - return fmt.Sprintf("%s", class, label) -} - -func valueLongHTML(v Node) string { - // TODO: Any intra-value formatting? - // I'm wary of adding too much visual noise, - // but a little bit might be valuable. - // We already have visual noise in the form of punctuation - // maybe we could replace some of that with formatting. - s := fmt.Sprintf("", v.ID()) - - linenumber := "(?)" - if v.Pos().IsValid() { - line := v.Parent().Prog.Fset.Position(v.Pos()).Line - linenumber = fmt.Sprintf("(%d)", line, line) - } - - s += fmt.Sprintf("%s %s = %s", valueHTML(v), linenumber, opName(v)) - - if v, ok := v.(Value); ok { - s += " <" + html.EscapeString(v.Type().String()) + ">" - } - - switch v := v.(type) { - case *Parameter: - s += fmt.Sprintf(" {%s}", html.EscapeString(v.name)) - case *BinOp: - s += fmt.Sprintf(" {%s}", html.EscapeString(v.Op.String())) - case *UnOp: - s += fmt.Sprintf(" {%s}", html.EscapeString(v.Op.String())) - case *Extract: - name := v.Tuple.Type().(*types.Tuple).At(v.Index).Name() - s += fmt.Sprintf(" [%d] (%s)", v.Index, name) - case *Field: - st := v.X.Type().Underlying().(*types.Struct) - // Be robust against a bad index. - name := "?" - if 0 <= v.Field && v.Field < st.NumFields() { - name = st.Field(v.Field).Name() - } - s += fmt.Sprintf(" [%d] (%s)", v.Field, name) - case *FieldAddr: - st := deref(v.X.Type()).Underlying().(*types.Struct) - // Be robust against a bad index. - name := "?" - if 0 <= v.Field && v.Field < st.NumFields() { - name = st.Field(v.Field).Name() - } - - s += fmt.Sprintf(" [%d] (%s)", v.Field, name) - case *Recv: - s += fmt.Sprintf(" {%t}", v.CommaOk) - case *Call: - if v.Common().IsInvoke() { - s += fmt.Sprintf(" {%s}", html.EscapeString(v.Common().Method.FullName())) - } - case *Const: - if v.Value == nil { - s += " {<nil>}" - } else { - s += fmt.Sprintf(" {%s}", html.EscapeString(v.Value.String())) - } - case *Sigma: - s += fmt.Sprintf(" [#%s]", v.From) - } - for _, a := range v.Operands(nil) { - s += fmt.Sprintf(" %s", valueHTML(*a)) - } - - // OPT(dh): we're calling namedValues many times on the same function. - allNames := namedValues(v.Parent()) - var names []string - for name, values := range allNames { - for _, value := range values { - if v == value { - names = append(names, name.Name()) - break - } - } - } - if len(names) != 0 { - s += " (" + strings.Join(names, ", ") + ")" - } - - s += "" - return s -} - -func blockHTML(b *BasicBlock) string { - // TODO: Using the value ID as the class ignores the fact - // that value IDs get recycled and that some values - // are transmuted into other values. - s := html.EscapeString(b.String()) - return fmt.Sprintf("%s", s, s) -} - -func blockLongHTML(b *BasicBlock) string { - var kind string - var term Instruction - if len(b.Instrs) > 0 { - term = b.Control() - kind = opName(term) - } - // TODO: improve this for HTML? - s := fmt.Sprintf("%s", b.Index, kind) - - if term != nil { - ops := term.Operands(nil) - if len(ops) > 0 { - var ss []string - for _, op := range ops { - ss = append(ss, valueHTML(*op)) - } - s += " " + strings.Join(ss, ", ") - } - } - if len(b.Succs) > 0 { - s += " →" // right arrow - for _, c := range b.Succs { - s += " " + blockHTML(c) - } - } - return s -} - -func funcHTML(f *Function, phase string, dot *dotWriter) string { - buf := new(bytes.Buffer) - if dot != nil { - dot.writeFuncSVG(buf, phase, f) - } - fmt.Fprint(buf, "") - p := htmlFuncPrinter{w: buf} - fprintFunc(p, f) - - // fprintFunc(&buf, f) // TODO: HTML, not text,
for line breaks, etc. - fmt.Fprint(buf, "
") - return buf.String() -} - -type htmlFuncPrinter struct { - w io.Writer -} - -func (p htmlFuncPrinter) startBlock(b *BasicBlock, reachable bool) { - var dead string - if !reachable { - dead = "dead-block" - } - fmt.Fprintf(p.w, "
    ", b, dead) - fmt.Fprintf(p.w, "
  • %s:", blockHTML(b)) - if len(b.Preds) > 0 { - io.WriteString(p.w, " ←") // left arrow - for _, pred := range b.Preds { - fmt.Fprintf(p.w, " %s", blockHTML(pred)) - } - } - if len(b.Instrs) > 0 { - io.WriteString(p.w, ``) - } - io.WriteString(p.w, "
  • ") - if len(b.Instrs) > 0 { // start list of values - io.WriteString(p.w, "
  • ") - io.WriteString(p.w, "
      ") - } -} - -func (p htmlFuncPrinter) endBlock(b *BasicBlock) { - if len(b.Instrs) > 0 { // end list of values - io.WriteString(p.w, "
    ") - io.WriteString(p.w, "
  • ") - } - io.WriteString(p.w, "
  • ") - fmt.Fprint(p.w, blockLongHTML(b)) - io.WriteString(p.w, "
  • ") - io.WriteString(p.w, "
") -} - -func (p htmlFuncPrinter) value(v Node, live bool) { - var dead string - if !live { - dead = "dead-value" - } - fmt.Fprintf(p.w, "
  • ", dead) - fmt.Fprint(p.w, valueLongHTML(v)) - io.WriteString(p.w, "
  • ") -} - -func (p htmlFuncPrinter) startDepCycle() { - fmt.Fprintln(p.w, "") -} - -func (p htmlFuncPrinter) endDepCycle() { - fmt.Fprintln(p.w, "") -} - -func (p htmlFuncPrinter) named(n string, vals []Value) { - fmt.Fprintf(p.w, "
  • name %s: ", n) - for _, val := range vals { - fmt.Fprintf(p.w, "%s ", valueHTML(val)) - } - fmt.Fprintf(p.w, "
  • ") -} - -type dotWriter struct { - path string - broken bool -} - -// newDotWriter returns non-nil value when mask is valid. -// dotWriter will generate SVGs only for the phases specified in the mask. -// mask can contain following patterns and combinations of them: -// * - all of them; -// x-y - x through y, inclusive; -// x,y - x and y, but not the passes between. -func newDotWriter() *dotWriter { - path, err := exec.LookPath("dot") - if err != nil { - fmt.Println(err) - return nil - } - return &dotWriter{path: path} -} - -func (d *dotWriter) writeFuncSVG(w io.Writer, phase string, f *Function) { - if d.broken { - return - } - cmd := exec.Command(d.path, "-Tsvg") - pipe, err := cmd.StdinPipe() - if err != nil { - d.broken = true - fmt.Println(err) - return - } - buf := new(bytes.Buffer) - cmd.Stdout = buf - bufErr := new(bytes.Buffer) - cmd.Stderr = bufErr - err = cmd.Start() - if err != nil { - d.broken = true - fmt.Println(err) - return - } - fmt.Fprint(pipe, `digraph "" { margin=0; size="4,40"; ranksep=.2; `) - id := strings.Replace(phase, " ", "-", -1) - fmt.Fprintf(pipe, `id="g_graph_%s";`, id) - fmt.Fprintf(pipe, `node [style=filled,fillcolor=white,fontsize=16,fontname="Menlo,Times,serif",margin="0.01,0.03"];`) - fmt.Fprintf(pipe, `edge [fontsize=16,fontname="Menlo,Times,serif"];`) - for _, b := range f.Blocks { - layout := "" - fmt.Fprintf(pipe, `%v [label="%v%s\n%v",id="graph_node_%v_%v"];`, b, b, layout, b.Control().String(), id, b) - } - indexOf := make([]int, len(f.Blocks)) - for i, b := range f.Blocks { - indexOf[b.Index] = i - } - - // XXX - /* - ponums := make([]int32, len(f.Blocks)) - _ = postorderWithNumbering(f, ponums) - isBackEdge := func(from, to int) bool { - return ponums[from] <= ponums[to] - } - */ - isBackEdge := func(from, to int) bool { return false } - - for _, b := range f.Blocks { - for i, s := range b.Succs { - style := "solid" - color := "black" - arrow := "vee" - if isBackEdge(b.Index, s.Index) { - color = "blue" - } - fmt.Fprintf(pipe, `%v -> %v [label=" %d ",style="%s",color="%s",arrowhead="%s"];`, b, s, i, style, color, arrow) - } - } - fmt.Fprint(pipe, "}") - pipe.Close() - err = cmd.Wait() - if err != nil { - d.broken = true - fmt.Printf("dot: %v\n%v\n", err, bufErr.String()) - return - } - - svgID := "svg_graph_" + id - fmt.Fprintf(w, `
    `, svgID, svgID) - // For now, an awful hack: edit the html as it passes through - // our fingers, finding ' 0 { - fset = initial[0].Fset - } - - prog := ir.NewProgram(fset, mode) - if opts != nil { - prog.PrintFunc = opts.PrintFunc - } - - isInitial := make(map[*packages.Package]bool, len(initial)) - for _, p := range initial { - isInitial[p] = true - } - - irmap := make(map[*packages.Package]*ir.Package) - packages.Visit(initial, nil, func(p *packages.Package) { - if p.Types != nil && !p.IllTyped { - var files []*ast.File - if deps || isInitial[p] { - files = p.Syntax - } - irmap[p] = prog.CreatePackage(p.Types, files, p.TypesInfo, true) - } - }) - - var irpkgs []*ir.Package - for _, p := range initial { - irpkgs = append(irpkgs, irmap[p]) // may be nil - } - return prog, irpkgs -} - -// CreateProgram returns a new program in IR form, given a program -// loaded from source. An IR package is created for each transitively -// error-free package of lprog. -// -// Code for bodies of functions is not built until Build is called -// on the result. -// -// The mode parameter controls diagnostics and checking during IR construction. -// -// Deprecated: use golang.org/x/tools/go/packages and the Packages -// function instead; see ir.ExampleLoadPackages. -func CreateProgram(lprog *loader.Program, mode ir.BuilderMode) *ir.Program { - prog := ir.NewProgram(lprog.Fset, mode) - - for _, info := range lprog.AllPackages { - if info.TransitivelyErrorFree { - prog.CreatePackage(info.Pkg, info.Files, &info.Info, info.Importable) - } - } - - return prog -} - -// BuildPackage builds an IR program with IR for a single package. -// -// It populates pkg by type-checking the specified file ASTs. All -// dependencies are loaded using the importer specified by tc, which -// typically loads compiler export data; IR code cannot be built for -// those packages. BuildPackage then constructs an ir.Program with all -// dependency packages created, and builds and returns the IR package -// corresponding to pkg. -// -// The caller must have set pkg.Path() to the import path. -// -// The operation fails if there were any type-checking or import errors. -// -// See ../ir/example_test.go for an example. -func BuildPackage(tc *types.Config, fset *token.FileSet, pkg *types.Package, files []*ast.File, mode ir.BuilderMode) (*ir.Package, *types.Info, error) { - if fset == nil { - panic("no token.FileSet") - } - if pkg.Path() == "" { - panic("package has no import path") - } - - info := &types.Info{ - Types: make(map[ast.Expr]types.TypeAndValue), - Defs: make(map[*ast.Ident]types.Object), - Uses: make(map[*ast.Ident]types.Object), - Implicits: make(map[ast.Node]types.Object), - Scopes: make(map[ast.Node]*types.Scope), - Selections: make(map[*ast.SelectorExpr]*types.Selection), - } - if err := types.NewChecker(tc, fset, pkg, info).Files(files); err != nil { - return nil, nil, err - } - - prog := ir.NewProgram(fset, mode) - - // Create IR packages for all imports. - // Order is not significant. - created := make(map[*types.Package]bool) - var createAll func(pkgs []*types.Package) - createAll = func(pkgs []*types.Package) { - for _, p := range pkgs { - if !created[p] { - created[p] = true - prog.CreatePackage(p, nil, nil, true) - createAll(p.Imports()) - } - } - } - createAll(pkg.Imports()) - - // Create and build the primary package. - irpkg := prog.CreatePackage(pkg, files, info, false) - irpkg.Build() - return irpkg, info, nil -} diff --git a/vendor/honnef.co/go/tools/go/ir/irutil/loops.go b/vendor/honnef.co/go/tools/go/ir/irutil/loops.go deleted file mode 100644 index 751cc680b..000000000 --- a/vendor/honnef.co/go/tools/go/ir/irutil/loops.go +++ /dev/null @@ -1,54 +0,0 @@ -package irutil - -import "honnef.co/go/tools/go/ir" - -type Loop struct{ *ir.BlockSet } - -func FindLoops(fn *ir.Function) []Loop { - if fn.Blocks == nil { - return nil - } - tree := fn.DomPreorder() - var sets []Loop - for _, h := range tree { - for _, n := range h.Preds { - if !h.Dominates(n) { - continue - } - // n is a back-edge to h - // h is the loop header - if n == h { - set := Loop{ir.NewBlockSet(len(fn.Blocks))} - set.Add(n) - sets = append(sets, set) - continue - } - set := Loop{ir.NewBlockSet(len(fn.Blocks))} - set.Add(h) - set.Add(n) - for _, b := range allPredsBut(n, h, nil) { - set.Add(b) - } - sets = append(sets, set) - } - } - return sets -} - -func allPredsBut(b, but *ir.BasicBlock, list []*ir.BasicBlock) []*ir.BasicBlock { -outer: - for _, pred := range b.Preds { - if pred == but { - continue - } - for _, p := range list { - // TODO improve big-o complexity of this function - if pred == p { - continue outer - } - } - list = append(list, pred) - list = allPredsBut(pred, but, list) - } - return list -} diff --git a/vendor/honnef.co/go/tools/go/ir/irutil/stub.go b/vendor/honnef.co/go/tools/go/ir/irutil/stub.go deleted file mode 100644 index 4311c7dbe..000000000 --- a/vendor/honnef.co/go/tools/go/ir/irutil/stub.go +++ /dev/null @@ -1,32 +0,0 @@ -package irutil - -import ( - "honnef.co/go/tools/go/ir" -) - -// IsStub reports whether a function is a stub. A function is -// considered a stub if it has no instructions or if all it does is -// return a constant value. -func IsStub(fn *ir.Function) bool { - for _, b := range fn.Blocks { - for _, instr := range b.Instrs { - switch instr.(type) { - case *ir.Const: - // const naturally has no side-effects - case *ir.Panic: - // panic is a stub if it only uses constants - case *ir.Return: - // return is a stub if it only uses constants - case *ir.DebugRef: - case *ir.Jump: - // if there are no disallowed instructions, then we're - // only jumping to the exit block (or possibly - // somewhere else that's stubby?) - default: - // all other instructions are assumed to do actual work - return false - } - } - } - return true -} diff --git a/vendor/honnef.co/go/tools/go/ir/irutil/switch.go b/vendor/honnef.co/go/tools/go/ir/irutil/switch.go deleted file mode 100644 index afe899d86..000000000 --- a/vendor/honnef.co/go/tools/go/ir/irutil/switch.go +++ /dev/null @@ -1,260 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package irutil - -// This file implements discovery of switch and type-switch constructs -// from low-level control flow. -// -// Many techniques exist for compiling a high-level switch with -// constant cases to efficient machine code. The optimal choice will -// depend on the data type, the specific case values, the code in the -// body of each case, and the hardware. -// Some examples: -// - a lookup table (for a switch that maps constants to constants) -// - a computed goto -// - a binary tree -// - a perfect hash -// - a two-level switch (to partition constant strings by their first byte). - -import ( - "bytes" - "fmt" - "go/token" - "go/types" - - "honnef.co/go/tools/go/ir" -) - -// A ConstCase represents a single constant comparison. -// It is part of a Switch. -type ConstCase struct { - Block *ir.BasicBlock // block performing the comparison - Body *ir.BasicBlock // body of the case - Value *ir.Const // case comparand -} - -// A TypeCase represents a single type assertion. -// It is part of a Switch. -type TypeCase struct { - Block *ir.BasicBlock // block performing the type assert - Body *ir.BasicBlock // body of the case - Type types.Type // case type - Binding ir.Value // value bound by this case -} - -// A Switch is a logical high-level control flow operation -// (a multiway branch) discovered by analysis of a CFG containing -// only if/else chains. It is not part of the ir.Instruction set. -// -// One of ConstCases and TypeCases has length >= 2; -// the other is nil. -// -// In a value switch, the list of cases may contain duplicate constants. -// A type switch may contain duplicate types, or types assignable -// to an interface type also in the list. -// TODO(adonovan): eliminate such duplicates. -type Switch struct { - Start *ir.BasicBlock // block containing start of if/else chain - X ir.Value // the switch operand - ConstCases []ConstCase // ordered list of constant comparisons - TypeCases []TypeCase // ordered list of type assertions - Default *ir.BasicBlock // successor if all comparisons fail -} - -func (sw *Switch) String() string { - // We represent each block by the String() of its - // first Instruction, e.g. "print(42:int)". - var buf bytes.Buffer - if sw.ConstCases != nil { - fmt.Fprintf(&buf, "switch %s {\n", sw.X.Name()) - for _, c := range sw.ConstCases { - fmt.Fprintf(&buf, "case %s: %s\n", c.Value.Name(), c.Body.Instrs[0]) - } - } else { - fmt.Fprintf(&buf, "switch %s.(type) {\n", sw.X.Name()) - for _, c := range sw.TypeCases { - fmt.Fprintf(&buf, "case %s %s: %s\n", - c.Binding.Name(), c.Type, c.Body.Instrs[0]) - } - } - if sw.Default != nil { - fmt.Fprintf(&buf, "default: %s\n", sw.Default.Instrs[0]) - } - fmt.Fprintf(&buf, "}") - return buf.String() -} - -// Switches examines the control-flow graph of fn and returns the -// set of inferred value and type switches. A value switch tests an -// ir.Value for equality against two or more compile-time constant -// values. Switches involving link-time constants (addresses) are -// ignored. A type switch type-asserts an ir.Value against two or -// more types. -// -// The switches are returned in dominance order. -// -// The resulting switches do not necessarily correspond to uses of the -// 'switch' keyword in the source: for example, a single source-level -// switch statement with non-constant cases may result in zero, one or -// many Switches, one per plural sequence of constant cases. -// Switches may even be inferred from if/else- or goto-based control flow. -// (In general, the control flow constructs of the source program -// cannot be faithfully reproduced from the IR.) -func Switches(fn *ir.Function) []Switch { - // Traverse the CFG in dominance order, so we don't - // enter an if/else-chain in the middle. - var switches []Switch - seen := make(map[*ir.BasicBlock]bool) // TODO(adonovan): opt: use ir.blockSet - for _, b := range fn.DomPreorder() { - if x, k := isComparisonBlock(b); x != nil { - // Block b starts a switch. - sw := Switch{Start: b, X: x} - valueSwitch(&sw, k, seen) - if len(sw.ConstCases) > 1 { - switches = append(switches, sw) - } - } - - if y, x, T := isTypeAssertBlock(b); y != nil { - // Block b starts a type switch. - sw := Switch{Start: b, X: x} - typeSwitch(&sw, y, T, seen) - if len(sw.TypeCases) > 1 { - switches = append(switches, sw) - } - } - } - return switches -} - -func isSameX(x1 ir.Value, x2 ir.Value) bool { - if x1 == x2 { - return true - } - if x2, ok := x2.(*ir.Sigma); ok { - return isSameX(x1, x2.X) - } - return false -} - -func valueSwitch(sw *Switch, k *ir.Const, seen map[*ir.BasicBlock]bool) { - b := sw.Start - x := sw.X - for isSameX(sw.X, x) { - if seen[b] { - break - } - seen[b] = true - - sw.ConstCases = append(sw.ConstCases, ConstCase{ - Block: b, - Body: b.Succs[0], - Value: k, - }) - b = b.Succs[1] - n := 0 - for _, instr := range b.Instrs { - switch instr.(type) { - case *ir.If, *ir.BinOp: - n++ - case *ir.Sigma, *ir.Phi, *ir.DebugRef: - default: - n += 1000 - } - } - if n != 2 { - // Block b contains not just 'if x == k' and σ/ϕ nodes, - // so it may have side effects that - // make it unsafe to elide. - break - } - if len(b.Preds) != 1 { - // Block b has multiple predecessors, - // so it cannot be treated as a case. - break - } - x, k = isComparisonBlock(b) - } - sw.Default = b -} - -func typeSwitch(sw *Switch, y ir.Value, T types.Type, seen map[*ir.BasicBlock]bool) { - b := sw.Start - x := sw.X - for isSameX(sw.X, x) { - if seen[b] { - break - } - seen[b] = true - - sw.TypeCases = append(sw.TypeCases, TypeCase{ - Block: b, - Body: b.Succs[0], - Type: T, - Binding: y, - }) - b = b.Succs[1] - n := 0 - for _, instr := range b.Instrs { - switch instr.(type) { - case *ir.TypeAssert, *ir.Extract, *ir.If: - n++ - case *ir.Sigma, *ir.Phi: - default: - n += 1000 - } - } - if n != 4 { - // Block b contains not just - // {TypeAssert; Extract #0; Extract #1; If} - // so it may have side effects that - // make it unsafe to elide. - break - } - if len(b.Preds) != 1 { - // Block b has multiple predecessors, - // so it cannot be treated as a case. - break - } - y, x, T = isTypeAssertBlock(b) - } - sw.Default = b -} - -// isComparisonBlock returns the operands (v, k) if a block ends with -// a comparison v==k, where k is a compile-time constant. -func isComparisonBlock(b *ir.BasicBlock) (v ir.Value, k *ir.Const) { - if n := len(b.Instrs); n >= 2 { - if i, ok := b.Instrs[n-1].(*ir.If); ok { - if binop, ok := i.Cond.(*ir.BinOp); ok && binop.Block() == b && binop.Op == token.EQL { - if k, ok := binop.Y.(*ir.Const); ok { - return binop.X, k - } - if k, ok := binop.X.(*ir.Const); ok { - return binop.Y, k - } - } - } - } - return -} - -// isTypeAssertBlock returns the operands (y, x, T) if a block ends with -// a type assertion "if y, ok := x.(T); ok {". -func isTypeAssertBlock(b *ir.BasicBlock) (y, x ir.Value, T types.Type) { - if n := len(b.Instrs); n >= 4 { - if i, ok := b.Instrs[n-1].(*ir.If); ok { - if ext1, ok := i.Cond.(*ir.Extract); ok && ext1.Block() == b && ext1.Index == 1 { - if ta, ok := ext1.Tuple.(*ir.TypeAssert); ok && ta.Block() == b { - // hack: relies upon instruction ordering. - if ext0, ok := b.Instrs[n-3].(*ir.Extract); ok { - return ext0, ta.X, ta.AssertedType - } - } - } - } - } - return -} diff --git a/vendor/honnef.co/go/tools/go/ir/irutil/terminates.go b/vendor/honnef.co/go/tools/go/ir/irutil/terminates.go deleted file mode 100644 index 84e7503bb..000000000 --- a/vendor/honnef.co/go/tools/go/ir/irutil/terminates.go +++ /dev/null @@ -1,70 +0,0 @@ -package irutil - -import ( - "go/types" - - "honnef.co/go/tools/go/ir" -) - -// Terminates reports whether fn is supposed to return, that is if it -// has at least one theoretic path that returns from the function. -// Explicit panics do not count as terminating. -func Terminates(fn *ir.Function) bool { - if fn.Blocks == nil { - // assuming that a function terminates is the conservative - // choice - return true - } - - for _, block := range fn.Blocks { - if _, ok := block.Control().(*ir.Return); ok { - if len(block.Preds) == 0 { - return true - } - for _, pred := range block.Preds { - switch ctrl := pred.Control().(type) { - case *ir.Panic: - // explicit panics do not count as terminating - case *ir.If: - // Check if we got here by receiving from a closed - // time.Tick channel – this cannot happen at - // runtime and thus doesn't constitute termination - iff := ctrl - if !ok { - return true - } - ex, ok := iff.Cond.(*ir.Extract) - if !ok { - return true - } - if ex.Index != 1 { - return true - } - recv, ok := ex.Tuple.(*ir.Recv) - if !ok { - return true - } - call, ok := recv.Chan.(*ir.Call) - if !ok { - return true - } - fn, ok := call.Common().Value.(*ir.Function) - if !ok { - return true - } - fn2, ok := fn.Object().(*types.Func) - if !ok { - return true - } - if fn2.FullName() != "time.Tick" { - return true - } - default: - // we've reached the exit block - return true - } - } - } - } - return false -} diff --git a/vendor/honnef.co/go/tools/go/ir/irutil/util.go b/vendor/honnef.co/go/tools/go/ir/irutil/util.go deleted file mode 100644 index d2f10948d..000000000 --- a/vendor/honnef.co/go/tools/go/ir/irutil/util.go +++ /dev/null @@ -1,178 +0,0 @@ -package irutil - -import ( - "go/types" - "strings" - - "honnef.co/go/tools/go/ir" - "honnef.co/go/tools/go/types/typeutil" -) - -func Reachable(from, to *ir.BasicBlock) bool { - if from == to { - return true - } - if from.Dominates(to) { - return true - } - - found := false - Walk(from, func(b *ir.BasicBlock) bool { - if b == to { - found = true - return false - } - return true - }) - return found -} - -func Walk(b *ir.BasicBlock, fn func(*ir.BasicBlock) bool) { - seen := map[*ir.BasicBlock]bool{} - wl := []*ir.BasicBlock{b} - for len(wl) > 0 { - b := wl[len(wl)-1] - wl = wl[:len(wl)-1] - if seen[b] { - continue - } - seen[b] = true - if !fn(b) { - continue - } - wl = append(wl, b.Succs...) - } -} - -func Vararg(x *ir.Slice) ([]ir.Value, bool) { - var out []ir.Value - alloc, ok := ir.Unwrap(x.X).(*ir.Alloc) - if !ok { - return nil, false - } - var checkAlloc func(alloc ir.Value) bool - checkAlloc = func(alloc ir.Value) bool { - for _, ref := range *alloc.Referrers() { - if ref == x { - continue - } - if ref.Block() != x.Block() { - return false - } - switch ref := ref.(type) { - case *ir.IndexAddr: - idx := ref - if len(*idx.Referrers()) != 1 { - return false - } - store, ok := (*idx.Referrers())[0].(*ir.Store) - if !ok { - return false - } - out = append(out, store.Val) - case *ir.Copy: - if !checkAlloc(ref) { - return false - } - default: - return false - } - } - return true - } - if !checkAlloc(alloc) { - return nil, false - } - return out, true -} - -func CallName(call *ir.CallCommon) string { - if call.IsInvoke() { - return "" - } - switch v := call.Value.(type) { - case *ir.Function: - fn, ok := v.Object().(*types.Func) - if !ok { - return "" - } - return typeutil.FuncName(fn) - case *ir.Builtin: - return v.Name() - } - return "" -} - -func IsCallTo(call *ir.CallCommon, name string) bool { return CallName(call) == name } - -func IsCallToAny(call *ir.CallCommon, names ...string) bool { - q := CallName(call) - for _, name := range names { - if q == name { - return true - } - } - return false -} - -func FilterDebug(instr []ir.Instruction) []ir.Instruction { - var out []ir.Instruction - for _, ins := range instr { - if _, ok := ins.(*ir.DebugRef); !ok { - out = append(out, ins) - } - } - return out -} - -func IsExample(fn *ir.Function) bool { - if !strings.HasPrefix(fn.Name(), "Example") { - return false - } - f := fn.Prog.Fset.File(fn.Pos()) - if f == nil { - return false - } - return strings.HasSuffix(f.Name(), "_test.go") -} - -// Flatten recursively returns the underlying value of an ir.Sigma or -// ir.Phi node. If all edges in an ir.Phi node are the same (after -// flattening), the flattened edge will get returned. If flattening is -// not possible, nil is returned. -func Flatten(v ir.Value) ir.Value { - failed := false - seen := map[ir.Value]struct{}{} - var out ir.Value - var dfs func(v ir.Value) - dfs = func(v ir.Value) { - if failed { - return - } - if _, ok := seen[v]; ok { - return - } - seen[v] = struct{}{} - - switch v := v.(type) { - case *ir.Sigma: - dfs(v.X) - case *ir.Phi: - for _, e := range v.Edges { - dfs(e) - } - default: - if out == nil { - out = v - } else if out != v { - failed = true - } - } - } - dfs(v) - - if failed { - return nil - } - return out -} diff --git a/vendor/honnef.co/go/tools/go/ir/irutil/visit.go b/vendor/honnef.co/go/tools/go/ir/irutil/visit.go deleted file mode 100644 index f2135dca4..000000000 --- a/vendor/honnef.co/go/tools/go/ir/irutil/visit.go +++ /dev/null @@ -1,78 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package irutil - -import "honnef.co/go/tools/go/ir" - -// This file defines utilities for visiting the IR of -// a Program. -// -// TODO(adonovan): test coverage. - -// AllFunctions finds and returns the set of functions potentially -// needed by program prog, as determined by a simple linker-style -// reachability algorithm starting from the members and method-sets of -// each package. The result may include anonymous functions and -// synthetic wrappers. -// -// Precondition: all packages are built. -func AllFunctions(prog *ir.Program) map[*ir.Function]bool { - visit := visitor{ - prog: prog, - seen: make(map[*ir.Function]bool), - } - visit.program() - return visit.seen -} - -type visitor struct { - prog *ir.Program - seen map[*ir.Function]bool -} - -func (visit *visitor) program() { - for _, pkg := range visit.prog.AllPackages() { - for _, mem := range pkg.Members { - if fn, ok := mem.(*ir.Function); ok { - visit.function(fn) - } - } - } - for _, T := range visit.prog.RuntimeTypes() { - mset := visit.prog.MethodSets.MethodSet(T) - for i, n := 0, mset.Len(); i < n; i++ { - visit.function(visit.prog.MethodValue(mset.At(i))) - } - } -} - -func (visit *visitor) function(fn *ir.Function) { - if !visit.seen[fn] { - visit.seen[fn] = true - var buf [10]*ir.Value // avoid alloc in common case - for _, b := range fn.Blocks { - for _, instr := range b.Instrs { - for _, op := range instr.Operands(buf[:0]) { - if fn, ok := (*op).(*ir.Function); ok { - visit.function(fn) - } - } - } - } - } -} - -// MainPackages returns the subset of the specified packages -// named "main" that define a main function. -// The result may include synthetic "testmain" packages. -func MainPackages(pkgs []*ir.Package) []*ir.Package { - var mains []*ir.Package - for _, pkg := range pkgs { - if pkg.Pkg.Name() == "main" && pkg.Func("main") != nil { - mains = append(mains, pkg) - } - } - return mains -} diff --git a/vendor/honnef.co/go/tools/go/ir/lift.go b/vendor/honnef.co/go/tools/go/ir/lift.go deleted file mode 100644 index 1a4cd3026..000000000 --- a/vendor/honnef.co/go/tools/go/ir/lift.go +++ /dev/null @@ -1,1770 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package ir - -// This file defines the lifting pass which tries to "lift" Alloc -// cells (new/local variables) into SSA registers, replacing loads -// with the dominating stored value, eliminating loads and stores, and -// inserting φ- and σ-nodes as needed. - -// Cited papers and resources: -// -// Ron Cytron et al. 1991. Efficiently computing SSA form... -// http://doi.acm.org/10.1145/115372.115320 -// -// Cooper, Harvey, Kennedy. 2001. A Simple, Fast Dominance Algorithm. -// Software Practice and Experience 2001, 4:1-10. -// http://www.hipersoft.rice.edu/grads/publications/dom14.pdf -// -// Daniel Berlin, llvmdev mailing list, 2012. -// http://lists.cs.uiuc.edu/pipermail/llvmdev/2012-January/046638.html -// (Be sure to expand the whole thread.) -// -// C. Scott Ananian. 1997. The static single information form. -// -// Jeremy Singer. 2006. Static program analysis based on virtual register renaming. - -// TODO(adonovan): opt: there are many optimizations worth evaluating, and -// the conventional wisdom for SSA construction is that a simple -// algorithm well engineered often beats those of better asymptotic -// complexity on all but the most egregious inputs. -// -// Danny Berlin suggests that the Cooper et al. algorithm for -// computing the dominance frontier is superior to Cytron et al. -// Furthermore he recommends that rather than computing the DF for the -// whole function then renaming all alloc cells, it may be cheaper to -// compute the DF for each alloc cell separately and throw it away. -// -// Consider exploiting liveness information to avoid creating dead -// φ-nodes which we then immediately remove. -// -// Also see many other "TODO: opt" suggestions in the code. - -import ( - "encoding/binary" - "fmt" - "os" -) - -// If true, show diagnostic information at each step of lifting. -// Very verbose. -const debugLifting = false - -// domFrontier maps each block to the set of blocks in its dominance -// frontier. The outer slice is conceptually a map keyed by -// Block.Index. The inner slice is conceptually a set, possibly -// containing duplicates. -// -// TODO(adonovan): opt: measure impact of dups; consider a packed bit -// representation, e.g. big.Int, and bitwise parallel operations for -// the union step in the Children loop. -// -// domFrontier's methods mutate the slice's elements but not its -// length, so their receivers needn't be pointers. -type domFrontier BlockMap[[]*BasicBlock] - -func (df domFrontier) add(u, v *BasicBlock) { - df[u.Index] = append(df[u.Index], v) -} - -// build builds the dominance frontier df for the dominator tree of -// fn, using the algorithm found in A Simple, Fast Dominance -// Algorithm, Figure 5. -// -// TODO(adonovan): opt: consider Berlin approach, computing pruned SSA -// by pruning the entire IDF computation, rather than merely pruning -// the DF -> IDF step. -func (df domFrontier) build(fn *Function) { - for _, b := range fn.Blocks { - preds := b.Preds[0:len(b.Preds):len(b.Preds)] - if b == fn.Exit { - for i, v := range fn.fakeExits.values { - if v { - preds = append(preds, fn.Blocks[i]) - } - } - } - if len(preds) >= 2 { - for _, p := range preds { - runner := p - for runner != b.dom.idom { - df.add(runner, b) - runner = runner.dom.idom - } - } - } - } -} - -func buildDomFrontier(fn *Function) domFrontier { - df := make(domFrontier, len(fn.Blocks)) - df.build(fn) - return df -} - -type postDomFrontier BlockMap[[]*BasicBlock] - -func (rdf postDomFrontier) add(u, v *BasicBlock) { - rdf[u.Index] = append(rdf[u.Index], v) -} - -func (rdf postDomFrontier) build(fn *Function) { - for _, b := range fn.Blocks { - succs := b.Succs[0:len(b.Succs):len(b.Succs)] - if fn.fakeExits.Has(b) { - succs = append(succs, fn.Exit) - } - if len(succs) >= 2 { - for _, s := range succs { - runner := s - for runner != b.pdom.idom { - rdf.add(runner, b) - runner = runner.pdom.idom - } - } - } - } -} - -func buildPostDomFrontier(fn *Function) postDomFrontier { - rdf := make(postDomFrontier, len(fn.Blocks)) - rdf.build(fn) - return rdf -} - -func removeInstr(refs []Instruction, instr Instruction) []Instruction { - i := 0 - for _, ref := range refs { - if ref == instr { - continue - } - refs[i] = ref - i++ - } - for j := i; j != len(refs); j++ { - refs[j] = nil // aid GC - } - return refs[:i] -} - -func clearInstrs(instrs []Instruction) { - for i := range instrs { - instrs[i] = nil - } -} - -func numberNodesPerBlock(f *Function) { - for _, b := range f.Blocks { - var base ID - for _, instr := range b.Instrs { - if instr == nil { - continue - } - instr.setID(base) - base++ - } - } -} - -// lift replaces local and new Allocs accessed only with -// load/store by IR registers, inserting φ- and σ-nodes where necessary. -// The result is a program in pruned SSI form. -// -// Preconditions: -// - fn has no dead blocks (blockopt has run). -// - Def/use info (Operands and Referrers) is up-to-date. -// - The dominator tree is up-to-date. -func lift(fn *Function) bool { - // TODO(adonovan): opt: lots of little optimizations may be - // worthwhile here, especially if they cause us to avoid - // buildDomFrontier. For example: - // - // - Alloc never loaded? Eliminate. - // - Alloc never stored? Replace all loads with a zero constant. - // - Alloc stored once? Replace loads with dominating store; - // don't forget that an Alloc is itself an effective store - // of zero. - // - Alloc used only within a single block? - // Use degenerate algorithm avoiding φ-nodes. - // - Consider synergy with scalar replacement of aggregates (SRA). - // e.g. *(&x.f) where x is an Alloc. - // Perhaps we'd get better results if we generated this as x.f - // i.e. Field(x, .f) instead of Load(FieldIndex(x, .f)). - // Unclear. - // - // But we will start with the simplest correct code. - var df domFrontier - var rdf postDomFrontier - var closure *closure - var newPhis BlockMap[[]newPhi] - var newSigmas BlockMap[[]newSigma] - - // During this pass we will replace some BasicBlock.Instrs - // (allocs, loads and stores) with nil, keeping a count in - // BasicBlock.gaps. At the end we will reset Instrs to the - // concatenation of all non-dead newPhis and non-nil Instrs - // for the block, reusing the original array if space permits. - - // While we're here, we also eliminate 'rundefers' - // instructions in functions that contain no 'defer' - // instructions. - usesDefer := false - - // Determine which allocs we can lift and number them densely. - // The renaming phase uses this numbering for compact maps. - numAllocs := 0 - - instructions := make(BlockMap[liftInstructions], len(fn.Blocks)) - for i := range instructions { - instructions[i].insertInstructions = map[Instruction][]Instruction{} - } - - // Number nodes, for liftable - numberNodesPerBlock(fn) - - for _, b := range fn.Blocks { - b.gaps = 0 - b.rundefers = 0 - - for _, instr := range b.Instrs { - switch instr := instr.(type) { - case *Alloc: - if !liftable(instr, instructions) { - instr.index = -1 - continue - } - - if numAllocs == 0 { - df = buildDomFrontier(fn) - rdf = buildPostDomFrontier(fn) - if len(fn.Blocks) > 2 { - closure = transitiveClosure(fn) - } - newPhis = make(BlockMap[[]newPhi], len(fn.Blocks)) - newSigmas = make(BlockMap[[]newSigma], len(fn.Blocks)) - - if debugLifting { - title := false - for i, blocks := range df { - if blocks != nil { - if !title { - fmt.Fprintf(os.Stderr, "Dominance frontier of %s:\n", fn) - title = true - } - fmt.Fprintf(os.Stderr, "\t%s: %s\n", fn.Blocks[i], blocks) - } - } - } - } - instr.index = numAllocs - numAllocs++ - case *Defer: - usesDefer = true - case *RunDefers: - b.rundefers++ - } - } - } - - if numAllocs > 0 { - for _, b := range fn.Blocks { - work := instructions[b.Index] - for _, rename := range work.renameAllocs { - for _, instr_ := range b.Instrs[rename.startingAt:] { - replace(instr_, rename.from, rename.to) - } - } - } - - for _, b := range fn.Blocks { - work := instructions[b.Index] - if len(work.insertInstructions) != 0 { - newInstrs := make([]Instruction, 0, len(fn.Blocks)+len(work.insertInstructions)*3) - for _, instr := range b.Instrs { - if add, ok := work.insertInstructions[instr]; ok { - newInstrs = append(newInstrs, add...) - } - newInstrs = append(newInstrs, instr) - } - b.Instrs = newInstrs - } - } - - // TODO(dh): remove inserted allocs that end up unused after lifting. - - for _, b := range fn.Blocks { - for _, instr := range b.Instrs { - if instr, ok := instr.(*Alloc); ok && instr.index >= 0 { - liftAlloc(closure, df, rdf, instr, newPhis, newSigmas) - } - } - } - - // renaming maps an alloc (keyed by index) to its replacement - // value. Initially the renaming contains nil, signifying the - // zero constant of the appropriate type; we construct the - // Const lazily at most once on each path through the domtree. - // TODO(adonovan): opt: cache per-function not per subtree. - renaming := make([]Value, numAllocs) - - // Renaming. - rename(fn.Blocks[0], renaming, newPhis, newSigmas) - - simplifyPhisAndSigmas(newPhis, newSigmas) - - // Eliminate dead φ- and σ-nodes. - markLiveNodes(fn.Blocks, newPhis, newSigmas) - } - - // Prepend remaining live φ-nodes to each block and possibly kill rundefers. - for _, b := range fn.Blocks { - var head []Instruction - if numAllocs > 0 { - nps := newPhis[b.Index] - head = make([]Instruction, 0, len(nps)) - for _, pred := range b.Preds { - nss := newSigmas[pred.Index] - idx := pred.succIndex(b) - for _, newSigma := range nss { - if sigma := newSigma.sigmas[idx]; sigma != nil && sigma.live { - head = append(head, sigma) - - // we didn't populate referrers before, as most - // sigma nodes will be killed - if refs := sigma.X.Referrers(); refs != nil { - *refs = append(*refs, sigma) - } - } else if sigma != nil { - sigma.block = nil - } - } - } - for _, np := range nps { - if np.phi.live { - head = append(head, np.phi) - } else { - for _, edge := range np.phi.Edges { - if refs := edge.Referrers(); refs != nil { - *refs = removeInstr(*refs, np.phi) - } - } - np.phi.block = nil - } - } - } - - rundefersToKill := b.rundefers - if usesDefer { - rundefersToKill = 0 - } - - j := len(head) - if j+b.gaps+rundefersToKill == 0 { - continue // fast path: no new phis or gaps - } - - // We could do straight copies instead of element-wise copies - // when both b.gaps and rundefersToKill are zero. However, - // that seems to only be the case ~1% of the time, which - // doesn't seem worth the extra branch. - - // Remove dead instructions, add phis and sigmas - ns := len(b.Instrs) + j - b.gaps - rundefersToKill - if ns <= cap(b.Instrs) { - // b.Instrs has enough capacity to store all instructions - - // OPT(dh): check cap vs the actually required space; if - // there is a big enough difference, it may be worth - // allocating a new slice, to avoid pinning memory. - dst := b.Instrs[:cap(b.Instrs)] - i := len(dst) - 1 - for n := len(b.Instrs) - 1; n >= 0; n-- { - instr := dst[n] - if instr == nil { - continue - } - if !usesDefer { - if _, ok := instr.(*RunDefers); ok { - continue - } - } - dst[i] = instr - i-- - } - off := i + 1 - len(head) - // aid GC - clearInstrs(dst[:off]) - dst = dst[off:] - copy(dst, head) - b.Instrs = dst - } else { - // not enough space, so allocate a new slice and copy - // over. - dst := make([]Instruction, ns) - copy(dst, head) - - for _, instr := range b.Instrs { - if instr == nil { - continue - } - if !usesDefer { - if _, ok := instr.(*RunDefers); ok { - continue - } - } - dst[j] = instr - j++ - } - b.Instrs = dst - } - } - - // Remove any fn.Locals that were lifted. - j := 0 - for _, l := range fn.Locals { - if l.index < 0 { - fn.Locals[j] = l - j++ - } - } - // Nil out fn.Locals[j:] to aid GC. - for i := j; i < len(fn.Locals); i++ { - fn.Locals[i] = nil - } - fn.Locals = fn.Locals[:j] - - return numAllocs > 0 -} - -func hasDirectReferrer(instr Instruction) bool { - for _, instr := range *instr.Referrers() { - switch instr.(type) { - case *Phi, *Sigma: - // ignore - default: - return true - } - } - return false -} - -func markLiveNodes(blocks []*BasicBlock, newPhis BlockMap[[]newPhi], newSigmas BlockMap[[]newSigma]) { - // Phis and sigmas may become dead due to optimization passes. We may also insert more nodes than strictly - // necessary, e.g. sigma nodes for constants, which will never be used. - - // Phi and sigma nodes are considered live if a non-phi, non-sigma - // node uses them. Once we find a node that is live, we mark all - // of its operands as used, too. - for _, npList := range newPhis { - for _, np := range npList { - phi := np.phi - if !phi.live && hasDirectReferrer(phi) { - markLivePhi(phi) - } - } - } - for _, npList := range newSigmas { - for _, np := range npList { - for _, sigma := range np.sigmas { - if sigma != nil && !sigma.live && hasDirectReferrer(sigma) { - markLiveSigma(sigma) - } - } - } - } - // Existing φ-nodes due to && and || operators - // are all considered live (see Go issue 19622). - for _, b := range blocks { - for _, phi := range b.phis() { - markLivePhi(phi.(*Phi)) - } - } -} - -func markLivePhi(phi *Phi) { - phi.live = true - for _, rand := range phi.Edges { - switch rand := rand.(type) { - case *Phi: - if !rand.live { - markLivePhi(rand) - } - case *Sigma: - if !rand.live { - markLiveSigma(rand) - } - } - } -} - -func markLiveSigma(sigma *Sigma) { - sigma.live = true - switch rand := sigma.X.(type) { - case *Phi: - if !rand.live { - markLivePhi(rand) - } - case *Sigma: - if !rand.live { - markLiveSigma(rand) - } - } -} - -// simplifyPhisAndSigmas removes duplicate phi and sigma nodes, -// and replaces trivial phis with non-phi alternatives. Phi -// nodes where all edges are identical, or consist of only the phi -// itself and one other value, may be replaced with the value. -func simplifyPhisAndSigmas(newPhis BlockMap[[]newPhi], newSigmas BlockMap[[]newSigma]) { - // temporary numbering of values used in phis so that we can build map keys - var id ID - for _, npList := range newPhis { - for _, np := range npList { - for _, edge := range np.phi.Edges { - edge.setID(id) - id++ - } - } - } - // find all phis that are trivial and can be replaced with a - // non-phi value. run until we reach a fixpoint, because replacing - // a phi may make other phis trivial. - for changed := true; changed; { - changed = false - for _, npList := range newPhis { - for _, np := range npList { - if np.phi.live { - // we're reusing 'live' to mean 'dead' in the context of simplifyPhisAndSigmas - continue - } - if r, ok := isUselessPhi(np.phi); ok { - // useless phi, replace its uses with the - // replacement value. the dead phi pass will clean - // up the phi afterwards. - replaceAll(np.phi, r) - np.phi.live = true - changed = true - } - } - } - - // Replace duplicate sigma nodes with a single node. These nodes exist when multiple allocs get replaced with the - // same dominating store. - for _, sigmaList := range newSigmas { - primarySigmas := map[struct { - succ int - v Value - }]*Sigma{} - for _, sigmas := range sigmaList { - for succ, sigma := range sigmas.sigmas { - if sigma == nil { - continue - } - if sigma.live { - // we're reusing 'live' to mean 'dead' in the context of simplifyPhisAndSigmas - continue - } - key := struct { - succ int - v Value - }{succ, sigma.X} - if alt, ok := primarySigmas[key]; ok { - replaceAll(sigma, alt) - sigma.live = true - changed = true - } else { - primarySigmas[key] = sigma - } - } - } - } - - // Replace duplicate phi nodes with a single node. As far as we know, these duplicate nodes only ever exist - // because of the previous sigma deduplication. - keyb := make([]byte, 0, 4*8) - for _, npList := range newPhis { - primaryPhis := map[string]*Phi{} - for _, np := range npList { - if np.phi.live { - continue - } - if n := len(np.phi.Edges) * 8; cap(keyb) >= n { - keyb = keyb[:n] - } else { - keyb = make([]byte, n, n*2) - } - for i, e := range np.phi.Edges { - binary.LittleEndian.PutUint64(keyb[i*8:i*8+8], uint64(e.ID())) - } - if alt, ok := primaryPhis[string(keyb)]; ok { - replaceAll(np.phi, alt) - np.phi.live = true - changed = true - } else { - primaryPhis[string(keyb)] = np.phi - } - } - } - - } - - for _, npList := range newPhis { - for _, np := range npList { - np.phi.live = false - for _, edge := range np.phi.Edges { - edge.setID(0) - } - } - } - - for _, sigmaList := range newSigmas { - for _, sigmas := range sigmaList { - for _, sigma := range sigmas.sigmas { - if sigma != nil { - sigma.live = false - } - } - } - } -} - -type BlockSet struct { - idx int - values []bool - count int -} - -func NewBlockSet(size int) *BlockSet { - return &BlockSet{values: make([]bool, size)} -} - -func (s *BlockSet) Set(s2 *BlockSet) { - copy(s.values, s2.values) - s.count = 0 - for _, v := range s.values { - if v { - s.count++ - } - } -} - -func (s *BlockSet) Num() int { - return s.count -} - -func (s *BlockSet) Has(b *BasicBlock) bool { - if b.Index >= len(s.values) { - return false - } - return s.values[b.Index] -} - -// add adds b to the set and returns true if the set changed. -func (s *BlockSet) Add(b *BasicBlock) bool { - if s.values[b.Index] { - return false - } - s.count++ - s.values[b.Index] = true - s.idx = b.Index - - return true -} - -func (s *BlockSet) Clear() { - for j := range s.values { - s.values[j] = false - } - s.count = 0 -} - -// take removes an arbitrary element from a set s and -// returns its index, or returns -1 if empty. -func (s *BlockSet) Take() int { - // [i, end] - for i := s.idx; i < len(s.values); i++ { - if s.values[i] { - s.values[i] = false - s.idx = i - s.count-- - return i - } - } - - // [start, i) - for i := 0; i < s.idx; i++ { - if s.values[i] { - s.values[i] = false - s.idx = i - s.count-- - return i - } - } - - return -1 -} - -type closure struct { - span []uint32 - reachables BlockMap[interval] -} - -type interval uint32 - -const ( - flagMask = 1 << 31 - numBits = 20 - lengthBits = 32 - numBits - 1 - lengthMask = (1<>numBits - } else { - // large interval - i++ - start = uint32(inv & numMask) - end = uint32(r[i]) - } - if idx >= start && idx <= end { - return true - } - } - return false -} - -func (c closure) reachable(id int) []interval { - return c.reachables[c.span[id]:c.span[id+1]] -} - -func (c closure) walk(current *BasicBlock, b *BasicBlock, visited []bool) { - // TODO(dh): the 'current' argument seems to be unused - // TODO(dh): there's no reason for this to be a method - visited[b.Index] = true - for _, succ := range b.Succs { - if visited[succ.Index] { - continue - } - visited[succ.Index] = true - c.walk(current, succ, visited) - } -} - -func transitiveClosure(fn *Function) *closure { - reachable := make(BlockMap[bool], len(fn.Blocks)) - c := &closure{} - c.span = make([]uint32, len(fn.Blocks)+1) - - addInterval := func(start, end uint32) { - if l := end - start; l <= 1<= desc.firstUnliftable { - continue - } - hasLiftable := false - switch instr := instr.(type) { - case *Store: - if instr.Val != alloc { - desc.hasLiftableOther = true - hasLiftable = true - } - case *Load: - desc.hasLiftableLoad = true - hasLiftable = true - case *DebugRef: - desc.hasLiftableOther = true - } - if hasLiftable { - if int(instr.ID()) > desc.lastLiftable { - desc.lastLiftable = int(instr.ID()) - } - } - } - - for i := range blocks { - // Update firstUnliftable to be one after lastLiftable. We do this to include the unliftable's preceding - // DebugRefs in the renaming. - if blocks[i].lastLiftable == -1 && !blocks[i].storeInPreds { - // There are no liftable instructions (for this alloc) in this block. Set firstUnliftable to the - // first non-head instruction to avoid inserting the store before phi instructions, which would - // fail validation. - first := -1 - instrLoop: - for i, instr := range fn.Blocks[i].Instrs { - switch instr.(type) { - case *Phi, *Sigma: - default: - first = i - break instrLoop - } - } - blocks[i].firstUnliftable = first - } else { - blocks[i].firstUnliftable = blocks[i].lastLiftable + 1 - } - } - - // If a block is reachable by a (partially) unliftable block, then the entirety of the block is unliftable. In that - // case, stores have to be inserted in the predecessors. - // - // TODO(dh): this isn't always necessary. If the block is reachable by itself, i.e. part of a loop, then if the - // Alloc instruction is itself part of that loop, then there is a subset of instructions in the loop that can be - // lifted. For example: - // - // for { - // x := 42 - // println(x) - // escape(&x) - // } - // - // The x that escapes in one iteration of the loop isn't the same x that we read from on the next iteration. - seen := make(BlockMap[bool], len(fn.Blocks)) - var dfs func(b *BasicBlock) - dfs = func(b *BasicBlock) { - if seen[b.Index] { - return - } - seen[b.Index] = true - desc := &blocks[b.Index] - desc.hasLiftableLoad = false - desc.hasLiftableOther = false - desc.isUnliftable = true - desc.firstUnliftable = 0 - desc.storeInPreds = true - for _, succ := range b.Succs { - dfs(succ) - } - } - for _, b := range fn.Blocks { - if blocks[b.Index].isUnliftable { - for _, succ := range b.Succs { - dfs(succ) - } - } - } - - hasLiftableLoad := false - hasLiftableOther := false - hasUnliftable := false - for _, b := range fn.Blocks { - desc := blocks[b.Index] - hasLiftableLoad = hasLiftableLoad || desc.hasLiftableLoad - hasLiftableOther = hasLiftableOther || desc.hasLiftableOther - if desc.isUnliftable { - hasUnliftable = true - } - } - if !hasLiftableLoad && !hasLiftableOther { - // There are no liftable uses - return false - } else if !hasUnliftable { - // The alloc is entirely liftable without splitting - return true - } else if !hasLiftableLoad { - // The alloc is not entirely liftable, and the only liftable uses are stores. While some of those stores could - // get lifted away, it would also lead to an infinite loop when lifting to a fixpoint, because the newly created - // allocs also get stored into repeatable and that's their only liftable uses. - return false - } - - // We need to insert stores for the new alloc. If a (partially) unliftable block has no unliftable - // predecessors and the use isn't in a phi node, then the store can be inserted right before the unliftable use. - // Otherwise, stores have to be inserted at the end of all liftable predecessors. - - newAlloc := &Alloc{Heap: true} - newAlloc.setBlock(alloc.block) - newAlloc.setType(alloc.typ) - newAlloc.setSource(alloc.source) - newAlloc.index = -1 - newAlloc.comment = "split alloc" - - { - work := instructions[alloc.block.Index] - work.insertInstructions[alloc] = append(work.insertInstructions[alloc], newAlloc) - } - - predHasStore := make(BlockMap[bool], len(fn.Blocks)) - for _, b := range fn.Blocks { - desc := &blocks[b.Index] - bWork := &instructions[b.Index] - - if desc.isUnliftable { - bWork.renameAllocs = append(bWork.renameAllocs, struct { - from *Alloc - to *Alloc - startingAt int - }{ - alloc, newAlloc, int(desc.firstUnliftable), - }) - } - - if !desc.isUnliftable { - continue - } - - propagate := func(in *BasicBlock, before Instruction) { - load := &Load{ - X: alloc, - } - store := &Store{ - Addr: newAlloc, - Val: load, - } - load.setType(deref(alloc.typ)) - load.setBlock(in) - load.comment = "split alloc" - store.setBlock(in) - updateOperandReferrers(load) - updateOperandReferrers(store) - store.comment = "split alloc" - - entry := &instructions[in.Index] - entry.insertInstructions[before] = append(entry.insertInstructions[before], load, store) - } - - if desc.storeInPreds { - // emit stores at the end of liftable preds - for _, pred := range b.Preds { - if blocks[pred.Index].isUnliftable { - continue - } - - if !alloc.block.Dominates(pred) { - // Consider this cfg: - // - // 1 - // /| - // / | - // ↙ ↓ - // 2--→3 - // - // with an Alloc in block 2. It doesn't make sense to insert a store in block 1 for the jump to - // block 3, because 1 can never see the Alloc in the first place. - // - // Ignoring phi nodes, an Alloc always dominates all of its uses, and phi nodes don't matter here, - // because for the incoming edges that do matter, we do emit the stores. - - continue - } - - if predHasStore[pred.Index] { - // Don't generate redundant propagations. Not only is it unnecessary, it can lead to infinite loops - // when trying to lift to a fix point, because redundant stores are liftable. - continue - } - - predHasStore[pred.Index] = true - - before := pred.Instrs[len(pred.Instrs)-1] - propagate(pred, before) - } - } else { - // emit store before the first unliftable use - before := b.Instrs[desc.firstUnliftable] - propagate(b, before) - } - } - - return true -} - -// liftAlloc lifts alloc into registers and populates newPhis and newSigmas with all the φ- and σ-nodes it may require. -func liftAlloc(closure *closure, df domFrontier, rdf postDomFrontier, alloc *Alloc, newPhis BlockMap[[]newPhi], newSigmas BlockMap[[]newSigma]) { - fn := alloc.Parent() - - defblocks := fn.blockset(0) - useblocks := fn.blockset(1) - Aphi := fn.blockset(2) - Asigma := fn.blockset(3) - W := fn.blockset(4) - - // Compute defblocks, the set of blocks containing a - // definition of the alloc cell. - for _, instr := range *alloc.Referrers() { - switch instr := instr.(type) { - case *Store: - defblocks.Add(instr.Block()) - case *Load: - useblocks.Add(instr.Block()) - for _, ref := range *instr.Referrers() { - useblocks.Add(ref.Block()) - } - } - } - // The Alloc itself counts as a (zero) definition of the cell. - defblocks.Add(alloc.Block()) - - if debugLifting { - fmt.Fprintln(os.Stderr, "\tlifting ", alloc, alloc.Name()) - } - - // Φ-insertion. - // - // What follows is the body of the main loop of the insert-φ - // function described by Cytron et al, but instead of using - // counter tricks, we just reset the 'hasAlready' and 'work' - // sets each iteration. These are bitmaps so it's pretty cheap. - - // Initialize W and work to defblocks. - - for change := true; change; { - change = false - { - // Traverse iterated dominance frontier, inserting φ-nodes. - W.Set(defblocks) - - for i := W.Take(); i != -1; i = W.Take() { - n := fn.Blocks[i] - for _, y := range df[n.Index] { - if Aphi.Add(y) { - if len(*alloc.Referrers()) == 0 { - continue - } - live := false - if closure == nil { - live = true - } else { - for _, ref := range *alloc.Referrers() { - if _, ok := ref.(*Load); ok { - if closure.has(y, ref.Block()) { - live = true - break - } - } - } - } - if !live { - continue - } - - // Create φ-node. - // It will be prepended to v.Instrs later, if needed. - phi := &Phi{ - Edges: make([]Value, len(y.Preds)), - } - - phi.source = alloc.source - phi.setType(deref(alloc.Type())) - phi.block = y - if debugLifting { - fmt.Fprintf(os.Stderr, "\tplace %s = %s at block %s\n", phi.Name(), phi, y) - } - newPhis[y.Index] = append(newPhis[y.Index], newPhi{phi, alloc}) - - for _, p := range y.Preds { - useblocks.Add(p) - } - change = true - if defblocks.Add(y) { - W.Add(y) - } - } - } - } - } - - { - W.Set(useblocks) - for i := W.Take(); i != -1; i = W.Take() { - n := fn.Blocks[i] - for _, y := range rdf[n.Index] { - if Asigma.Add(y) { - sigmas := make([]*Sigma, 0, len(y.Succs)) - anyLive := false - for _, succ := range y.Succs { - live := false - for _, ref := range *alloc.Referrers() { - if closure == nil || closure.has(succ, ref.Block()) { - live = true - anyLive = true - break - } - } - if live { - sigma := &Sigma{ - From: y, - X: alloc, - } - sigma.source = alloc.source - sigma.setType(deref(alloc.Type())) - sigma.block = succ - sigmas = append(sigmas, sigma) - } else { - sigmas = append(sigmas, nil) - } - } - - if anyLive { - newSigmas[y.Index] = append(newSigmas[y.Index], newSigma{alloc, sigmas}) - for _, s := range y.Succs { - defblocks.Add(s) - } - change = true - if useblocks.Add(y) { - W.Add(y) - } - } - } - } - } - } - } -} - -// replaceAll replaces all intraprocedural uses of x with y, -// updating x.Referrers and y.Referrers. -// Precondition: x.Referrers() != nil, i.e. x must be local to some function. -func replaceAll(x, y Value) { - var rands []*Value - pxrefs := x.Referrers() - pyrefs := y.Referrers() - for _, instr := range *pxrefs { - switch instr := instr.(type) { - case *CompositeValue: - // Special case CompositeValue because it might have very large lists of operands - // - // OPT(dh): this loop is still expensive for large composite values - for i, rand := range instr.Values { - if rand == x { - instr.Values[i] = y - } - } - default: - rands = instr.Operands(rands[:0]) // recycle storage - for _, rand := range rands { - if *rand != nil { - if *rand == x { - *rand = y - } - } - } - } - if pyrefs != nil { - *pyrefs = append(*pyrefs, instr) // dups ok - } - } - *pxrefs = nil // x is now unreferenced -} - -func replace(instr Instruction, x, y Value) { - args := instr.Operands(nil) - matched := false - for _, arg := range args { - if *arg == x { - *arg = y - matched = true - } - } - if matched { - yrefs := y.Referrers() - if yrefs != nil { - *yrefs = append(*yrefs, instr) - } - - xrefs := x.Referrers() - if xrefs != nil { - *xrefs = removeInstr(*xrefs, instr) - } - } -} - -// renamed returns the value to which alloc is being renamed, -// constructing it lazily if it's the implicit zero initialization. -func renamed(fn *Function, renaming []Value, alloc *Alloc) Value { - v := renaming[alloc.index] - if v == nil { - v = emitConst(fn, zeroConst(deref(alloc.Type()))) - renaming[alloc.index] = v - } - return v -} - -func copyValue(v Value, why Instruction, info CopyInfo) *Copy { - c := &Copy{ - X: v, - Why: why, - Info: info, - } - if refs := v.Referrers(); refs != nil { - *refs = append(*refs, c) - } - c.setType(v.Type()) - c.setSource(v.Source()) - return c -} - -func splitOnNewInformation(u *BasicBlock, renaming *StackMap) { - renaming.Push() - defer renaming.Pop() - - rename := func(v Value, why Instruction, info CopyInfo, i int) { - c := copyValue(v, why, info) - c.setBlock(u) - renaming.Set(v, c) - u.Instrs = append(u.Instrs, nil) - copy(u.Instrs[i+2:], u.Instrs[i+1:]) - u.Instrs[i+1] = c - } - - replacement := func(v Value) (Value, bool) { - r, ok := renaming.Get(v) - if !ok { - return nil, false - } - for { - rr, ok := renaming.Get(r) - if !ok { - // Store replacement in the map so that future calls to replacement(v) don't have to go through the - // iterative process again. - renaming.Set(v, r) - return r, true - } - r = rr - } - } - - var hasInfo func(v Value, info CopyInfo) bool - hasInfo = func(v Value, info CopyInfo) bool { - switch v := v.(type) { - case *Copy: - return (v.Info&info) == info || hasInfo(v.X, info) - case *FieldAddr, *IndexAddr, *TypeAssert, *MakeChan, *MakeMap, *MakeSlice, *Alloc: - return info == CopyInfoNotNil - case Member, *Builtin: - return info == CopyInfoNotNil - case *Sigma: - return hasInfo(v.X, info) - default: - return false - } - } - - var args []*Value - for i := 0; i < len(u.Instrs); i++ { - instr := u.Instrs[i] - if instr == nil { - continue - } - args = instr.Operands(args[:0]) - for _, arg := range args { - if *arg == nil { - continue - } - if r, ok := replacement(*arg); ok { - *arg = r - replace(instr, *arg, r) - } - } - - // TODO write some bits on why we copy values instead of encoding the actual control flow and panics - - switch instr := instr.(type) { - case *IndexAddr: - // Note that we rename instr.Index and instr.X even if they're already copies, because unique combinations - // of X and Index may lead to unique information. - - // OPT we should rename both variables at once and avoid one memmove - rename(instr.Index, instr, CopyInfoNotNegative, i) - rename(instr.X, instr, CopyInfoNotNil, i) - i += 2 // skip over instructions we just inserted - case *FieldAddr: - if !hasInfo(instr.X, CopyInfoNotNil) { - rename(instr.X, instr, CopyInfoNotNil, i) - i++ - } - case *TypeAssert: - // If we've already type asserted instr.X without comma-ok before, then it can only contain a single type, - // and successive type assertions, no matter the type, don't tell us anything new. - if !hasInfo(instr.X, CopyInfoNotNil|CopyInfoSingleConcreteType) { - rename(instr.X, instr, CopyInfoNotNil|CopyInfoSingleConcreteType, i) - i++ // skip over instruction we just inserted - } - case *Load: - if !hasInfo(instr.X, CopyInfoNotNil) { - rename(instr.X, instr, CopyInfoNotNil, i) - i++ - } - case *Store: - if !hasInfo(instr.Addr, CopyInfoNotNil) { - rename(instr.Addr, instr, CopyInfoNotNil, i) - i++ - } - case *MapUpdate: - if !hasInfo(instr.Map, CopyInfoNotNil) { - rename(instr.Map, instr, CopyInfoNotNil, i) - i++ - } - case CallInstruction: - off := 0 - if !instr.Common().IsInvoke() && !hasInfo(instr.Common().Value, CopyInfoNotNil) { - rename(instr.Common().Value, instr, CopyInfoNotNil, i) - off++ - } - if f, ok := instr.Common().Value.(*Builtin); ok { - switch f.name { - case "close": - arg := instr.Common().Args[0] - if !hasInfo(arg, CopyInfoNotNil|CopyInfoClosed) { - rename(arg, instr, CopyInfoNotNil|CopyInfoClosed, i) - off++ - } - } - } - i += off - case *SliceToArrayPointer: - // A slice to array pointer conversion tells us the minimum length of the slice - rename(instr.X, instr, CopyInfoUnspecified, i) - i++ - case *SliceToArray: - // A slice to array conversion tells us the minimum length of the slice - rename(instr.X, instr, CopyInfoUnspecified, i) - i++ - case *Slice: - // Slicing tells us about some of the bounds - off := 0 - if instr.Low == nil && instr.High == nil && instr.Max == nil { - // If all indices are unspecified, then we can only learn something about instr.X if it might've been - // nil. - if !hasInfo(instr.X, CopyInfoNotNil) { - rename(instr.X, instr, CopyInfoUnspecified, i) - off++ - } - } else { - rename(instr.X, instr, CopyInfoUnspecified, i) - off++ - } - // We copy the indices even if we already know they are not negative, because we can associate numeric - // ranges with them. - if instr.Low != nil { - rename(instr.Low, instr, CopyInfoNotNegative, i) - off++ - } - if instr.High != nil { - rename(instr.High, instr, CopyInfoNotNegative, i) - off++ - } - if instr.Max != nil { - rename(instr.Max, instr, CopyInfoNotNegative, i) - off++ - } - i += off - case *StringLookup: - rename(instr.X, instr, CopyInfoUnspecified, i) - rename(instr.Index, instr, CopyInfoNotNegative, i) - i += 2 - case *Recv: - if !hasInfo(instr.Chan, CopyInfoNotNil) { - // Receiving from a nil channel never completes - rename(instr.Chan, instr, CopyInfoNotNil, i) - i++ - } - case *Send: - if !hasInfo(instr.Chan, CopyInfoNotNil) { - // Sending to a nil channel never completes. Sending to a closed channel panics, but whether a channel - // is closed isn't local to this function, so we didn't learn anything. - rename(instr.Chan, instr, CopyInfoNotNil, i) - i++ - } - } - } - - for _, v := range u.dom.children { - splitOnNewInformation(v, renaming) - } -} - -// rename implements the Cytron et al-based SSI renaming algorithm, a -// preorder traversal of the dominator tree replacing all loads of -// Alloc cells with the value stored to that cell by the dominating -// store instruction. -// -// renaming is a map from *Alloc (keyed by index number) to its -// dominating stored value; newPhis[x] is the set of new φ-nodes to be -// prepended to block x. -func rename(u *BasicBlock, renaming []Value, newPhis BlockMap[[]newPhi], newSigmas BlockMap[[]newSigma]) { - // Each φ-node becomes the new name for its associated Alloc. - for _, np := range newPhis[u.Index] { - phi := np.phi - alloc := np.alloc - renaming[alloc.index] = phi - } - - // Rename loads and stores of allocs. - for i, instr := range u.Instrs { - switch instr := instr.(type) { - case *Alloc: - if instr.index >= 0 { // store of zero to Alloc cell - // Replace dominated loads by the zero value. - renaming[instr.index] = nil - if debugLifting { - fmt.Fprintf(os.Stderr, "\tkill alloc %s\n", instr) - } - // Delete the Alloc. - u.Instrs[i] = nil - u.gaps++ - } - - case *Store: - if alloc, ok := instr.Addr.(*Alloc); ok && alloc.index >= 0 { // store to Alloc cell - // Replace dominated loads by the stored value. - renaming[alloc.index] = instr.Val - if debugLifting { - fmt.Fprintf(os.Stderr, "\tkill store %s; new value: %s\n", - instr, instr.Val.Name()) - } - if refs := instr.Addr.Referrers(); refs != nil { - *refs = removeInstr(*refs, instr) - } - if refs := instr.Val.Referrers(); refs != nil { - *refs = removeInstr(*refs, instr) - } - // Delete the Store. - u.Instrs[i] = nil - u.gaps++ - } - - case *Load: - if alloc, ok := instr.X.(*Alloc); ok && alloc.index >= 0 { // load of Alloc cell - // In theory, we wouldn't be able to replace loads directly, because a loaded value could be used in - // different branches, in which case it should be replaced with different sigma nodes. But we can't - // simply defer replacement, either, because then later stores might incorrectly affect this load. - // - // To avoid doing renaming on _all_ values (instead of just loads and stores like we're doing), we make - // sure during code generation that each load is only used in one block. For example, in constant switch - // statements, where the tag is only evaluated once, we store it in a temporary and load it for each - // comparison, so that we have individual loads to replace. - // - // Because we only rename stores and loads, the end result will not contain sigma nodes for all - // constants. Some constants may be used directly, e.g. in comparisons such as 'x == 5'. We may still - // end up inserting dead sigma nodes in branches, but these will never get used in renaming and will be - // cleaned up when we remove dead phis and sigmas. - newval := renamed(u.Parent(), renaming, alloc) - if debugLifting { - fmt.Fprintf(os.Stderr, "\tupdate load %s = %s with %s\n", - instr.Name(), instr, newval) - } - replaceAll(instr, newval) - u.Instrs[i] = nil - u.gaps++ - } - - case *DebugRef: - if x, ok := instr.X.(*Alloc); ok && x.index >= 0 { - if instr.IsAddr { - instr.X = renamed(u.Parent(), renaming, x) - instr.IsAddr = false - - // Add DebugRef to instr.X's referrers. - if refs := instr.X.Referrers(); refs != nil { - *refs = append(*refs, instr) - } - } else { - // A source expression denotes the address - // of an Alloc that was optimized away. - instr.X = nil - - // Delete the DebugRef. - u.Instrs[i] = nil - u.gaps++ - } - } - } - } - - // update all outgoing sigma nodes with the dominating store - for _, sigmas := range newSigmas[u.Index] { - for _, sigma := range sigmas.sigmas { - if sigma == nil { - continue - } - sigma.X = renamed(u.Parent(), renaming, sigmas.alloc) - } - } - - // For each φ-node in a CFG successor, rename the edge. - for succi, v := range u.Succs { - phis := newPhis[v.Index] - if len(phis) == 0 { - continue - } - i := v.predIndex(u) - for _, np := range phis { - phi := np.phi - alloc := np.alloc - // if there's a sigma node, use it, else use the dominating value - var newval Value - for _, sigmas := range newSigmas[u.Index] { - if sigmas.alloc == alloc && sigmas.sigmas[succi] != nil { - newval = sigmas.sigmas[succi] - break - } - } - if newval == nil { - newval = renamed(u.Parent(), renaming, alloc) - } - if debugLifting { - fmt.Fprintf(os.Stderr, "\tsetphi %s edge %s -> %s (#%d) (alloc=%s) := %s\n", - phi.Name(), u, v, i, alloc.Name(), newval.Name()) - } - phi.Edges[i] = newval - if prefs := newval.Referrers(); prefs != nil { - *prefs = append(*prefs, phi) - } - } - } - - // Continue depth-first recursion over domtree, pushing a - // fresh copy of the renaming map for each subtree. - r := make([]Value, len(renaming)) - for _, v := range u.dom.children { - copy(r, renaming) - - // on entry to a block, the incoming sigma nodes become the new values for their alloc - if idx := u.succIndex(v); idx != -1 { - for _, sigma := range newSigmas[u.Index] { - if sigma.sigmas[idx] != nil { - r[sigma.alloc.index] = sigma.sigmas[idx] - } - } - } - rename(v, r, newPhis, newSigmas) - } - -} - -func simplifyConstantCompositeValues(fn *Function) bool { - changed := false - - for _, b := range fn.Blocks { - n := 0 - for _, instr := range b.Instrs { - replaced := false - - if cv, ok := instr.(*CompositeValue); ok { - ac := &AggregateConst{} - ac.typ = cv.typ - replaced = true - for _, v := range cv.Values { - if c, ok := v.(Constant); ok { - ac.Values = append(ac.Values, c) - } else { - replaced = false - break - } - } - if replaced { - replaceAll(cv, emitConst(fn, ac)) - killInstruction(cv) - } - - } - - if replaced { - changed = true - } else { - b.Instrs[n] = instr - n++ - } - } - - clearInstrs(b.Instrs[n:]) - b.Instrs = b.Instrs[:n] - } - - return changed -} - -func updateOperandReferrers(instr Instruction) { - for _, op := range instr.Operands(nil) { - refs := (*op).Referrers() - if refs != nil { - *refs = append(*refs, instr) - } - } -} diff --git a/vendor/honnef.co/go/tools/go/ir/lvalue.go b/vendor/honnef.co/go/tools/go/ir/lvalue.go deleted file mode 100644 index 86eb4a5d1..000000000 --- a/vendor/honnef.co/go/tools/go/ir/lvalue.go +++ /dev/null @@ -1,175 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package ir - -// lvalues are the union of addressable expressions and map-index -// expressions. - -import ( - "go/ast" - "go/types" -) - -// An lvalue represents an assignable location that may appear on the -// left-hand side of an assignment. This is a generalization of a -// pointer to permit updates to elements of maps. -type lvalue interface { - store(fn *Function, v Value, source ast.Node) // stores v into the location - load(fn *Function, source ast.Node) Value // loads the contents of the location - address(fn *Function) Value // address of the location - typ() types.Type // returns the type of the location -} - -// An address is an lvalue represented by a true pointer. -type address struct { - addr Value - expr ast.Expr // source syntax of the value (not address) [debug mode] -} - -func (a *address) load(fn *Function, source ast.Node) Value { - return emitLoad(fn, a.addr, source) -} - -func (a *address) store(fn *Function, v Value, source ast.Node) { - store := emitStore(fn, a.addr, v, source) - if a.expr != nil { - // store.Val is v, converted for assignability. - emitDebugRef(fn, a.expr, store.Val, false) - } -} - -func (a *address) address(fn *Function) Value { - if a.expr != nil { - emitDebugRef(fn, a.expr, a.addr, true) - } - return a.addr -} - -func (a *address) typ() types.Type { - return deref(a.addr.Type()) -} - -type compositeElement struct { - cv *CompositeValue - idx int - t types.Type - expr ast.Expr -} - -func (ce *compositeElement) load(fn *Function, source ast.Node) Value { - panic("not implemented") -} - -func (ce *compositeElement) store(fn *Function, v Value, source ast.Node) { - v = emitConv(fn, v, ce.t, source) - ce.cv.Values[ce.idx] = v - if ce.expr != nil { - // store.Val is v, converted for assignability. - emitDebugRef(fn, ce.expr, v, false) - } -} - -func (ce *compositeElement) address(fn *Function) Value { - panic("not implemented") -} - -func (ce *compositeElement) typ() types.Type { - return ce.t -} - -// An element is an lvalue represented by m[k], the location of an -// element of a map. These locations are not addressable -// since pointers cannot be formed from them, but they do support -// load() and store(). -type element struct { - m, k Value // map - t types.Type // map element type -} - -func (e *element) load(fn *Function, source ast.Node) Value { - l := &MapLookup{ - X: e.m, - Index: e.k, - } - l.setType(e.t) - return fn.emit(l, source) -} - -func (e *element) store(fn *Function, v Value, source ast.Node) { - up := &MapUpdate{ - Map: e.m, - Key: e.k, - Value: emitConv(fn, v, e.t, source), - } - fn.emit(up, source) -} - -func (e *element) address(fn *Function) Value { - panic("map elements are not addressable") -} - -func (e *element) typ() types.Type { - return e.t -} - -// A lazyAddress is an lvalue whose address is the result of an instruction. -// These work like an *address except a new address.address() Value -// is created on each load, store and address call. -// A lazyAddress can be used to control when a side effect (nil pointer -// dereference, index out of bounds) of using a location happens. -type lazyAddress struct { - addr func(fn *Function) Value // emit to fn the computation of the address - t types.Type // type of the location - expr ast.Expr // source syntax of the value (not address) [debug mode] -} - -func (l *lazyAddress) load(fn *Function, source ast.Node) Value { - load := emitLoad(fn, l.addr(fn), source) - return load -} - -func (l *lazyAddress) store(fn *Function, v Value, source ast.Node) { - store := emitStore(fn, l.addr(fn), v, source) - if l.expr != nil { - // store.Val is v, converted for assignability. - emitDebugRef(fn, l.expr, store.Val, false) - } -} - -func (l *lazyAddress) address(fn *Function) Value { - addr := l.addr(fn) - if l.expr != nil { - emitDebugRef(fn, l.expr, addr, true) - } - return addr -} - -func (l *lazyAddress) typ() types.Type { return l.t } - -// A blank is a dummy variable whose name is "_". -// It is not reified: loads are illegal and stores are ignored. -type blank struct{} - -func (bl blank) load(fn *Function, source ast.Node) Value { - panic("blank.load is illegal") -} - -func (bl blank) store(fn *Function, v Value, source ast.Node) { - s := &BlankStore{ - Val: v, - } - fn.emit(s, source) -} - -func (bl blank) address(fn *Function) Value { - panic("blank var is not addressable") -} - -func (bl blank) typ() types.Type { - // This should be the type of the blank Ident; the typechecker - // doesn't provide this yet, but fortunately, we don't need it - // yet either. - panic("blank.typ is unimplemented") -} diff --git a/vendor/honnef.co/go/tools/go/ir/methods.go b/vendor/honnef.co/go/tools/go/ir/methods.go deleted file mode 100644 index b7903c847..000000000 --- a/vendor/honnef.co/go/tools/go/ir/methods.go +++ /dev/null @@ -1,241 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package ir - -// This file defines utilities for population of method sets. - -import ( - "fmt" - "go/types" - - "honnef.co/go/tools/analysis/lint" -) - -// MethodValue returns the Function implementing method sel, building -// wrapper methods on demand. It returns nil if sel denotes an -// abstract (interface) method. -// -// Precondition: sel.Kind() == MethodVal. -// -// Thread-safe. -// -// EXCLUSIVE_LOCKS_ACQUIRED(prog.methodsMu) -func (prog *Program) MethodValue(sel *types.Selection) *Function { - if sel.Kind() != types.MethodVal { - panic(fmt.Sprintf("MethodValue(%s) kind != MethodVal", sel)) - } - T := sel.Recv() - if isInterface(T) { - return nil // abstract method - } - if prog.mode&LogSource != 0 { - defer logStack("MethodValue %s %v", T, sel)() - } - - prog.methodsMu.Lock() - defer prog.methodsMu.Unlock() - - return prog.addMethod(prog.createMethodSet(T), sel) -} - -// LookupMethod returns the implementation of the method of type T -// identified by (pkg, name). It returns nil if the method exists but -// is abstract, and panics if T has no such method. -func (prog *Program) LookupMethod(T types.Type, pkg *types.Package, name string) *Function { - sel := prog.MethodSets.MethodSet(T).Lookup(pkg, name) - if sel == nil { - panic(fmt.Sprintf("%s has no method %s", T, types.Id(pkg, name))) - } - return prog.MethodValue(sel) -} - -// methodSet contains the (concrete) methods of a non-interface type. -type methodSet struct { - mapping map[string]*Function // populated lazily - complete bool // mapping contains all methods -} - -// Precondition: !isInterface(T). -// EXCLUSIVE_LOCKS_REQUIRED(prog.methodsMu) -func (prog *Program) createMethodSet(T types.Type) *methodSet { - mset, ok := prog.methodSets.At(T) - if !ok { - mset = &methodSet{mapping: make(map[string]*Function)} - prog.methodSets.Set(T, mset) - } - return mset -} - -// EXCLUSIVE_LOCKS_REQUIRED(prog.methodsMu) -func (prog *Program) addMethod(mset *methodSet, sel *types.Selection) *Function { - if sel.Kind() == types.MethodExpr { - panic(sel) - } - id := sel.Obj().Id() - fn := mset.mapping[id] - if fn == nil { - obj := sel.Obj().(*types.Func) - - needsPromotion := len(sel.Index()) > 1 - needsIndirection := !isPointer(recvType(obj)) && isPointer(sel.Recv()) - if needsPromotion || needsIndirection { - fn = makeWrapper(prog, sel) - } else { - fn = prog.declaredFunc(obj) - } - if fn.Signature.Recv() == nil { - panic(fn) // missing receiver - } - mset.mapping[id] = fn - } - return fn -} - -// RuntimeTypes returns a new unordered slice containing all -// concrete types in the program for which a complete (non-empty) -// method set is required at run-time. -// -// Thread-safe. -// -// EXCLUSIVE_LOCKS_ACQUIRED(prog.methodsMu) -func (prog *Program) RuntimeTypes() []types.Type { - prog.methodsMu.Lock() - defer prog.methodsMu.Unlock() - - var res []types.Type - prog.methodSets.Iterate(func(T types.Type, v *methodSet) { - if v.complete { - res = append(res, T) - } - }) - return res -} - -// declaredFunc returns the concrete function/method denoted by obj. -// Panic ensues if there is none. -func (prog *Program) declaredFunc(obj *types.Func) *Function { - if origin := obj.Origin(); origin != obj { - // Calling method on instantiated type, create a wrapper that calls the generic type's method - base := prog.packageLevelValue(origin) - return makeInstance(prog, base.(*Function), obj.Type().(*types.Signature), nil) - } else { - if v := prog.packageLevelValue(obj); v != nil { - return v.(*Function) - } - } - panic("no concrete method: " + obj.String()) -} - -// needMethodsOf ensures that runtime type information (including the -// complete method set) is available for the specified type T and all -// its subcomponents. -// -// needMethodsOf must be called for at least every type that is an -// operand of some MakeInterface instruction, and for the type of -// every exported package member. -// -// Precondition: T is not a method signature (*Signature with Recv()!=nil). -// -// Thread-safe. (Called via emitConv from multiple builder goroutines.) -// -// TODO(adonovan): make this faster. It accounts for 20% of SSA build time. -// -// EXCLUSIVE_LOCKS_ACQUIRED(prog.methodsMu) -func (prog *Program) needMethodsOf(T types.Type) { - prog.methodsMu.Lock() - prog.needMethods(T, false) - prog.methodsMu.Unlock() -} - -// Precondition: T is not a method signature (*Signature with Recv()!=nil). -// Recursive case: skip => don't create methods for T. -// -// EXCLUSIVE_LOCKS_REQUIRED(prog.methodsMu) -func (prog *Program) needMethods(T types.Type, skip bool) { - // Each package maintains its own set of types it has visited. - if prevSkip, ok := prog.runtimeTypes.At(T); ok { - // needMethods(T) was previously called - if !prevSkip || skip { - return // already seen, with same or false 'skip' value - } - } - prog.runtimeTypes.Set(T, skip) - - tmset := prog.MethodSets.MethodSet(T) - - if !skip && !isInterface(T) && tmset.Len() > 0 { - // Create methods of T. - mset := prog.createMethodSet(T) - if !mset.complete { - mset.complete = true - n := tmset.Len() - for i := 0; i < n; i++ { - prog.addMethod(mset, tmset.At(i)) - } - } - } - - // Recursion over signatures of each method. - for i := 0; i < tmset.Len(); i++ { - sig := tmset.At(i).Type().(*types.Signature) - prog.needMethods(sig.Params(), false) - prog.needMethods(sig.Results(), false) - } - - switch t := T.(type) { - case *types.Basic: - // nop - - case *types.Interface, *types.TypeParam: - // nop---handled by recursion over method set. - - case *types.Pointer: - prog.needMethods(t.Elem(), false) - - case *types.Slice: - prog.needMethods(t.Elem(), false) - - case *types.Chan: - prog.needMethods(t.Elem(), false) - - case *types.Map: - prog.needMethods(t.Key(), false) - prog.needMethods(t.Elem(), false) - - case *types.Signature: - if t.Recv() != nil { - panic(fmt.Sprintf("Signature %s has Recv %s", t, t.Recv())) - } - prog.needMethods(t.Params(), false) - prog.needMethods(t.Results(), false) - - case *types.Named: - // A pointer-to-named type can be derived from a named - // type via reflection. It may have methods too. - prog.needMethods(types.NewPointer(t), false) - - // Consider 'type T struct{S}' where S has methods. - // Reflection provides no way to get from T to struct{S}, - // only to S, so the method set of struct{S} is unwanted, - // so set 'skip' flag during recursion. - prog.needMethods(t.Underlying(), true) - - case *types.Array: - prog.needMethods(t.Elem(), false) - - case *types.Struct: - for i, n := 0, t.NumFields(); i < n; i++ { - prog.needMethods(t.Field(i).Type(), false) - } - - case *types.Tuple: - for i, n := 0, t.Len(); i < n; i++ { - prog.needMethods(t.At(i).Type(), false) - } - - default: - lint.ExhaustiveTypeSwitch(T) - } -} diff --git a/vendor/honnef.co/go/tools/go/ir/mode.go b/vendor/honnef.co/go/tools/go/ir/mode.go deleted file mode 100644 index 15b5a33f7..000000000 --- a/vendor/honnef.co/go/tools/go/ir/mode.go +++ /dev/null @@ -1,104 +0,0 @@ -// Copyright 2015 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package ir - -// This file defines the BuilderMode type and its command-line flag. - -import ( - "bytes" - "fmt" -) - -// BuilderMode is a bitmask of options for diagnostics and checking. -// -// *BuilderMode satisfies the flag.Value interface. Example: -// -// var mode = ir.BuilderMode(0) -// func init() { flag.Var(&mode, "build", ir.BuilderModeDoc) } -type BuilderMode uint - -const ( - PrintPackages BuilderMode = 1 << iota // Print package inventory to stdout - PrintFunctions // Print function IR code to stdout - PrintSource // Print source code when printing function IR - LogSource // Log source locations as IR builder progresses - SanityCheckFunctions // Perform sanity checking of function bodies - NaiveForm // Build naïve IR form: don't replace local loads/stores with registers - GlobalDebug // Enable debug info for all packages - SplitAfterNewInformation // Split live range after we learn something new about a value -) - -const BuilderModeDoc = `Options controlling the IR builder. -The value is a sequence of zero or more of these symbols: -C perform sanity [C]hecking of the IR form. -D include [D]ebug info for every function. -P print [P]ackage inventory. -F print [F]unction IR code. -A print [A]ST nodes responsible for IR instructions -S log [S]ource locations as IR builder progresses. -N build [N]aive IR form: don't replace local loads/stores with registers. -I Split live range after a value is used as slice or array index -` - -func (m BuilderMode) String() string { - var buf bytes.Buffer - if m&GlobalDebug != 0 { - buf.WriteByte('D') - } - if m&PrintPackages != 0 { - buf.WriteByte('P') - } - if m&PrintFunctions != 0 { - buf.WriteByte('F') - } - if m&PrintSource != 0 { - buf.WriteByte('A') - } - if m&LogSource != 0 { - buf.WriteByte('S') - } - if m&SanityCheckFunctions != 0 { - buf.WriteByte('C') - } - if m&NaiveForm != 0 { - buf.WriteByte('N') - } - if m&SplitAfterNewInformation != 0 { - buf.WriteByte('I') - } - return buf.String() -} - -// Set parses the flag characters in s and updates *m. -func (m *BuilderMode) Set(s string) error { - var mode BuilderMode - for _, c := range s { - switch c { - case 'D': - mode |= GlobalDebug - case 'P': - mode |= PrintPackages - case 'F': - mode |= PrintFunctions - case 'A': - mode |= PrintSource - case 'S': - mode |= LogSource - case 'C': - mode |= SanityCheckFunctions - case 'N': - mode |= NaiveForm - case 'I': - mode |= SplitAfterNewInformation - default: - return fmt.Errorf("unknown BuilderMode option: %q", c) - } - } - *m = mode - return nil -} - -// Get returns m. -func (m BuilderMode) Get() interface{} { return m } diff --git a/vendor/honnef.co/go/tools/go/ir/print.go b/vendor/honnef.co/go/tools/go/ir/print.go deleted file mode 100644 index ad23d16dc..000000000 --- a/vendor/honnef.co/go/tools/go/ir/print.go +++ /dev/null @@ -1,507 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package ir - -// This file implements the String() methods for all Value and -// Instruction types. - -import ( - "bytes" - "fmt" - "go/types" - "io" - "reflect" - "sort" - - "honnef.co/go/tools/go/types/typeutil" -) - -// relName returns the name of v relative to i. -// In most cases, this is identical to v.Name(), but references to -// Functions (including methods) and Globals use RelString and -// all types are displayed with relType, so that only cross-package -// references are package-qualified. -func relName(v Value, i Instruction) string { - if v == nil { - return "" - } - var from *types.Package - if i != nil { - from = i.Parent().pkg() - } - switch v := v.(type) { - case Member: // *Function or *Global - return v.RelString(from) - } - return v.Name() -} - -func relType(t types.Type, from *types.Package) string { - return types.TypeString(t, types.RelativeTo(from)) -} - -func relString(m Member, from *types.Package) string { - // NB: not all globals have an Object (e.g. init$guard), - // so use Package().Object not Object.Package(). - if pkg := m.Package().Pkg; pkg != nil && pkg != from { - return fmt.Sprintf("%s.%s", pkg.Path(), m.Name()) - } - return m.Name() -} - -// Value.String() -// -// This method is provided only for debugging. -// It never appears in disassembly, which uses Value.Name(). - -func (v *Parameter) String() string { - from := v.Parent().pkg() - return fmt.Sprintf("Parameter <%s> {%s}", relType(v.Type(), from), v.name) -} - -func (v *FreeVar) String() string { - from := v.Parent().pkg() - return fmt.Sprintf("FreeVar <%s> %s", relType(v.Type(), from), v.Name()) -} - -func (v *Builtin) String() string { - return fmt.Sprintf("Builtin %s", v.Name()) -} - -// Instruction.String() - -func (v *Alloc) String() string { - from := v.Parent().pkg() - storage := "Stack" - if v.Heap { - storage = "Heap" - } - return fmt.Sprintf("%sAlloc <%s>", storage, relType(v.Type(), from)) -} - -func (v *Sigma) String() string { - from := v.Parent().pkg() - s := fmt.Sprintf("Sigma <%s> [b%d] %s", relType(v.Type(), from), v.From.Index, v.X.Name()) - return s -} - -func (v *Phi) String() string { - var b bytes.Buffer - fmt.Fprintf(&b, "Phi <%s>", v.Type()) - for i, edge := range v.Edges { - b.WriteString(" ") - // Be robust against malformed CFG. - if v.block == nil { - b.WriteString("??") - continue - } - block := -1 - if i < len(v.block.Preds) { - block = v.block.Preds[i].Index - } - fmt.Fprintf(&b, "%d:", block) - edgeVal := "" // be robust - if edge != nil { - edgeVal = relName(edge, v) - } - b.WriteString(edgeVal) - } - return b.String() -} - -func printCall(v *CallCommon, prefix string, instr Instruction) string { - var b bytes.Buffer - if !v.IsInvoke() { - if value, ok := instr.(Value); ok { - fmt.Fprintf(&b, "%s <%s> %s", prefix, relType(value.Type(), instr.Parent().pkg()), relName(v.Value, instr)) - } else { - fmt.Fprintf(&b, "%s %s", prefix, relName(v.Value, instr)) - } - } else { - if value, ok := instr.(Value); ok { - fmt.Fprintf(&b, "%sInvoke <%s> %s.%s", prefix, relType(value.Type(), instr.Parent().pkg()), relName(v.Value, instr), v.Method.Name()) - } else { - fmt.Fprintf(&b, "%sInvoke %s.%s", prefix, relName(v.Value, instr), v.Method.Name()) - } - } - for _, arg := range v.TypeArgs { - b.WriteString(" ") - b.WriteString(relType(arg, instr.Parent().pkg())) - } - for _, arg := range v.Args { - b.WriteString(" ") - b.WriteString(relName(arg, instr)) - } - return b.String() -} - -func (c *CallCommon) String() string { - return printCall(c, "", nil) -} - -func (v *Call) String() string { - return printCall(&v.Call, "Call", v) -} - -func (v *BinOp) String() string { - return fmt.Sprintf("BinOp <%s> {%s} %s %s", relType(v.Type(), v.Parent().pkg()), v.Op.String(), relName(v.X, v), relName(v.Y, v)) -} - -func (v *UnOp) String() string { - return fmt.Sprintf("UnOp <%s> {%s} %s", relType(v.Type(), v.Parent().pkg()), v.Op.String(), relName(v.X, v)) -} - -func (v *Load) String() string { - return fmt.Sprintf("Load <%s> %s", relType(v.Type(), v.Parent().pkg()), relName(v.X, v)) -} - -func (v *Copy) String() string { - return fmt.Sprintf("Copy <%s> %s", relType(v.Type(), v.Parent().pkg()), relName(v.X, v)) -} - -func printConv(prefix string, v, x Value) string { - from := v.Parent().pkg() - return fmt.Sprintf("%s <%s> %s", - prefix, - relType(v.Type(), from), - relName(x, v.(Instruction))) -} - -func (v *ChangeType) String() string { return printConv("ChangeType", v, v.X) } -func (v *Convert) String() string { return printConv("Convert", v, v.X) } -func (v *ChangeInterface) String() string { return printConv("ChangeInterface", v, v.X) } -func (v *SliceToArrayPointer) String() string { return printConv("SliceToArrayPointer", v, v.X) } -func (v *SliceToArray) String() string { return printConv("SliceToArray", v, v.X) } -func (v *MakeInterface) String() string { return printConv("MakeInterface", v, v.X) } - -func (v *MakeClosure) String() string { - from := v.Parent().pkg() - var b bytes.Buffer - fmt.Fprintf(&b, "MakeClosure <%s> %s", relType(v.Type(), from), relName(v.Fn, v)) - if v.Bindings != nil { - for _, c := range v.Bindings { - b.WriteString(" ") - b.WriteString(relName(c, v)) - } - } - return b.String() -} - -func (v *MakeSlice) String() string { - from := v.Parent().pkg() - return fmt.Sprintf("MakeSlice <%s> %s %s", - relType(v.Type(), from), - relName(v.Len, v), - relName(v.Cap, v)) -} - -func (v *Slice) String() string { - from := v.Parent().pkg() - return fmt.Sprintf("Slice <%s> %s %s %s %s", - relType(v.Type(), from), relName(v.X, v), relName(v.Low, v), relName(v.High, v), relName(v.Max, v)) -} - -func (v *MakeMap) String() string { - res := "" - if v.Reserve != nil { - res = relName(v.Reserve, v) - } - from := v.Parent().pkg() - return fmt.Sprintf("MakeMap <%s> %s", relType(v.Type(), from), res) -} - -func (v *MakeChan) String() string { - from := v.Parent().pkg() - return fmt.Sprintf("MakeChan <%s> %s", relType(v.Type(), from), relName(v.Size, v)) -} - -func (v *FieldAddr) String() string { - from := v.Parent().pkg() - // v.X.Type() might be a pointer to a type parameter whose core type is a pointer to a struct - st := deref(typeutil.CoreType(deref(v.X.Type()))).Underlying().(*types.Struct) - // Be robust against a bad index. - name := "?" - if 0 <= v.Field && v.Field < st.NumFields() { - name = st.Field(v.Field).Name() - } - return fmt.Sprintf("FieldAddr <%s> [%d] (%s) %s", relType(v.Type(), from), v.Field, name, relName(v.X, v)) -} - -func (v *Field) String() string { - st := typeutil.CoreType(v.X.Type()).Underlying().(*types.Struct) - // Be robust against a bad index. - name := "?" - if 0 <= v.Field && v.Field < st.NumFields() { - name = st.Field(v.Field).Name() - } - from := v.Parent().pkg() - return fmt.Sprintf("Field <%s> [%d] (%s) %s", relType(v.Type(), from), v.Field, name, relName(v.X, v)) -} - -func (v *IndexAddr) String() string { - from := v.Parent().pkg() - return fmt.Sprintf("IndexAddr <%s> %s %s", relType(v.Type(), from), relName(v.X, v), relName(v.Index, v)) -} - -func (v *Index) String() string { - from := v.Parent().pkg() - return fmt.Sprintf("Index <%s> %s %s", relType(v.Type(), from), relName(v.X, v), relName(v.Index, v)) -} - -func (v *MapLookup) String() string { - from := v.Parent().pkg() - return fmt.Sprintf("MapLookup <%s> %s %s", relType(v.Type(), from), relName(v.X, v), relName(v.Index, v)) -} - -func (v *StringLookup) String() string { - from := v.Parent().pkg() - return fmt.Sprintf("StringLookup <%s> %s %s", relType(v.Type(), from), relName(v.X, v), relName(v.Index, v)) -} - -func (v *Range) String() string { - from := v.Parent().pkg() - return fmt.Sprintf("Range <%s> %s", relType(v.Type(), from), relName(v.X, v)) -} - -func (v *Next) String() string { - from := v.Parent().pkg() - return fmt.Sprintf("Next <%s> %s", relType(v.Type(), from), relName(v.Iter, v)) -} - -func (v *TypeAssert) String() string { - from := v.Parent().pkg() - return fmt.Sprintf("TypeAssert <%s> %s", relType(v.Type(), from), relName(v.X, v)) -} - -func (v *Extract) String() string { - from := v.Parent().pkg() - name := v.Tuple.Type().(*types.Tuple).At(v.Index).Name() - return fmt.Sprintf("Extract <%s> [%d] (%s) %s", relType(v.Type(), from), v.Index, name, relName(v.Tuple, v)) -} - -func (s *Jump) String() string { - // Be robust against malformed CFG. - block := -1 - if s.block != nil && len(s.block.Succs) == 1 { - block = s.block.Succs[0].Index - } - str := fmt.Sprintf("Jump → b%d", block) - if s.Comment() != "" { - str = fmt.Sprintf("%s # %s", str, s.Comment()) - } - return str -} - -func (s *Unreachable) String() string { - // Be robust against malformed CFG. - block := -1 - if s.block != nil && len(s.block.Succs) == 1 { - block = s.block.Succs[0].Index - } - return fmt.Sprintf("Unreachable → b%d", block) -} - -func (s *If) String() string { - // Be robust against malformed CFG. - tblock, fblock := -1, -1 - if s.block != nil && len(s.block.Succs) == 2 { - tblock = s.block.Succs[0].Index - fblock = s.block.Succs[1].Index - } - return fmt.Sprintf("If %s → b%d b%d", relName(s.Cond, s), tblock, fblock) -} - -func (s *ConstantSwitch) String() string { - var b bytes.Buffer - fmt.Fprintf(&b, "ConstantSwitch %s", relName(s.Tag, s)) - for _, cond := range s.Conds { - fmt.Fprintf(&b, " %s", relName(cond, s)) - } - fmt.Fprint(&b, " →") - for _, succ := range s.block.Succs { - fmt.Fprintf(&b, " b%d", succ.Index) - } - return b.String() -} - -func (v *CompositeValue) String() string { - var b bytes.Buffer - from := v.Parent().pkg() - fmt.Fprintf(&b, "CompositeValue <%s>", relType(v.Type(), from)) - if v.NumSet >= len(v.Values) { - // All values provided - fmt.Fprint(&b, " [all]") - } else if v.Bitmap.BitLen() == 0 { - // No values provided - fmt.Fprint(&b, " [none]") - } else { - // Some values provided - bits := []byte(fmt.Sprintf("%0*b", len(v.Values), &v.Bitmap)) - for i := 0; i < len(bits)/2; i++ { - o := len(bits) - 1 - i - bits[i], bits[o] = bits[o], bits[i] - } - fmt.Fprintf(&b, " [%s]", bits) - } - for _, vv := range v.Values { - fmt.Fprintf(&b, " %s", relName(vv, v)) - } - return b.String() -} - -func (s *TypeSwitch) String() string { - from := s.Parent().pkg() - var b bytes.Buffer - fmt.Fprintf(&b, "TypeSwitch <%s> %s", relType(s.typ, from), relName(s.Tag, s)) - for _, cond := range s.Conds { - fmt.Fprintf(&b, " %q", relType(cond, s.block.parent.pkg())) - } - return b.String() -} - -func (s *Go) String() string { - return printCall(&s.Call, "Go", s) -} - -func (s *Panic) String() string { - // Be robust against malformed CFG. - block := -1 - if s.block != nil && len(s.block.Succs) == 1 { - block = s.block.Succs[0].Index - } - return fmt.Sprintf("Panic %s → b%d", relName(s.X, s), block) -} - -func (s *Return) String() string { - var b bytes.Buffer - b.WriteString("Return") - for _, r := range s.Results { - b.WriteString(" ") - b.WriteString(relName(r, s)) - } - return b.String() -} - -func (*RunDefers) String() string { - return "RunDefers" -} - -func (s *Send) String() string { - return fmt.Sprintf("Send %s %s", relName(s.Chan, s), relName(s.X, s)) -} - -func (recv *Recv) String() string { - from := recv.Parent().pkg() - return fmt.Sprintf("Recv <%s> %s", relType(recv.Type(), from), relName(recv.Chan, recv)) -} - -func (s *Defer) String() string { - return printCall(&s.Call, "Defer", s) -} - -func (s *Select) String() string { - var b bytes.Buffer - for i, st := range s.States { - if i > 0 { - b.WriteString(", ") - } - if st.Dir == types.RecvOnly { - b.WriteString("<-") - b.WriteString(relName(st.Chan, s)) - } else { - b.WriteString(relName(st.Chan, s)) - b.WriteString("<-") - b.WriteString(relName(st.Send, s)) - } - } - non := "" - if !s.Blocking { - non = "Non" - } - from := s.Parent().pkg() - return fmt.Sprintf("Select%sBlocking <%s> [%s]", non, relType(s.Type(), from), b.String()) -} - -func (s *Store) String() string { - return fmt.Sprintf("Store {%s} %s %s", - s.Val.Type(), relName(s.Addr, s), relName(s.Val, s)) -} - -func (s *BlankStore) String() string { - return fmt.Sprintf("BlankStore %s", relName(s.Val, s)) -} - -func (s *MapUpdate) String() string { - return fmt.Sprintf("MapUpdate %s %s %s", relName(s.Map, s), relName(s.Key, s), relName(s.Value, s)) -} - -func (s *DebugRef) String() string { - p := s.Parent().Prog.Fset.Position(s.Pos()) - var descr interface{} - if s.object != nil { - descr = s.object // e.g. "var x int" - } else { - descr = reflect.TypeOf(s.Expr) // e.g. "*ast.CallExpr" - } - var addr string - if s.IsAddr { - addr = "address of " - } - return fmt.Sprintf("; %s%s @ %d:%d is %s", addr, descr, p.Line, p.Column, s.X.Name()) -} - -func (p *Package) String() string { - return "package " + p.Pkg.Path() -} - -var _ io.WriterTo = (*Package)(nil) // *Package implements io.Writer - -func (p *Package) WriteTo(w io.Writer) (int64, error) { - var buf bytes.Buffer - WritePackage(&buf, p) - n, err := w.Write(buf.Bytes()) - return int64(n), err -} - -// WritePackage writes to buf a human-readable summary of p. -func WritePackage(buf *bytes.Buffer, p *Package) { - fmt.Fprintf(buf, "%s:\n", p) - - var names []string - maxname := 0 - for name := range p.Members { - if l := len(name); l > maxname { - maxname = l - } - names = append(names, name) - } - - from := p.Pkg - sort.Strings(names) - for _, name := range names { - switch mem := p.Members[name].(type) { - case *NamedConst: - fmt.Fprintf(buf, " const %-*s %s = %s\n", - maxname, name, mem.Name(), mem.Value.RelString(from)) - - case *Function: - fmt.Fprintf(buf, " func %-*s %s\n", - maxname, name, relType(mem.Type(), from)) - - case *Type: - fmt.Fprintf(buf, " type %-*s %s\n", - maxname, name, relType(mem.Type().Underlying(), from)) - for _, meth := range typeutil.IntuitiveMethodSet(mem.Type(), &p.Prog.MethodSets) { - fmt.Fprintf(buf, " %s\n", types.SelectionString(meth, types.RelativeTo(from))) - } - - case *Global: - fmt.Fprintf(buf, " var %-*s %s\n", - maxname, name, relType(mem.Type().(*types.Pointer).Elem(), from)) - } - } - - fmt.Fprintf(buf, "\n") -} diff --git a/vendor/honnef.co/go/tools/go/ir/sanity.go b/vendor/honnef.co/go/tools/go/ir/sanity.go deleted file mode 100644 index b6c59c95f..000000000 --- a/vendor/honnef.co/go/tools/go/ir/sanity.go +++ /dev/null @@ -1,560 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package ir - -// An optional pass for sanity-checking invariants of the IR representation. -// Currently it checks CFG invariants but little at the instruction level. - -import ( - "fmt" - "go/types" - "io" - "os" - "strings" - - "honnef.co/go/tools/go/types/typeutil" -) - -type sanity struct { - reporter io.Writer - fn *Function - block *BasicBlock - instrs map[Instruction]struct{} - insane bool -} - -// sanityCheck performs integrity checking of the IR representation -// of the function fn and returns true if it was valid. Diagnostics -// are written to reporter if non-nil, os.Stderr otherwise. Some -// diagnostics are only warnings and do not imply a negative result. -// -// Sanity-checking is intended to facilitate the debugging of code -// transformation passes. -func sanityCheck(fn *Function, reporter io.Writer) bool { - if reporter == nil { - reporter = os.Stderr - } - return (&sanity{reporter: reporter}).checkFunction(fn) -} - -// mustSanityCheck is like sanityCheck but panics instead of returning -// a negative result. -func mustSanityCheck(fn *Function, reporter io.Writer) { - if !sanityCheck(fn, reporter) { - fn.WriteTo(os.Stderr) - panic("SanityCheck failed") - } -} - -func (s *sanity) diagnostic(prefix, format string, args ...interface{}) { - fmt.Fprintf(s.reporter, "%s: function %s", prefix, s.fn) - if s.block != nil { - fmt.Fprintf(s.reporter, ", block %s", s.block) - } - io.WriteString(s.reporter, ": ") - fmt.Fprintf(s.reporter, format, args...) - io.WriteString(s.reporter, "\n") -} - -func (s *sanity) errorf(format string, args ...interface{}) { - s.insane = true - s.diagnostic("Error", format, args...) -} - -func (s *sanity) warnf(format string, args ...interface{}) { - s.diagnostic("Warning", format, args...) -} - -// findDuplicate returns an arbitrary basic block that appeared more -// than once in blocks, or nil if all were unique. -func findDuplicate(blocks []*BasicBlock) *BasicBlock { - if len(blocks) < 2 { - return nil - } - if blocks[0] == blocks[1] { - return blocks[0] - } - // Slow path: - m := make(map[*BasicBlock]bool) - for _, b := range blocks { - if m[b] { - return b - } - m[b] = true - } - return nil -} - -func (s *sanity) checkInstr(idx int, instr Instruction) { - switch instr := instr.(type) { - case *If, *Jump, *Return, *Panic, *Unreachable, *ConstantSwitch: - s.errorf("control flow instruction not at end of block") - case *Sigma: - if idx > 0 { - prev := s.block.Instrs[idx-1] - if _, ok := prev.(*Sigma); !ok { - s.errorf("Sigma instruction follows a non-Sigma: %T", prev) - } - } - case *Phi: - if idx == 0 { - // It suffices to apply this check to just the first phi node. - if dup := findDuplicate(s.block.Preds); dup != nil { - s.errorf("phi node in block with duplicate predecessor %s", dup) - } - } else { - prev := s.block.Instrs[idx-1] - switch prev.(type) { - case *Phi, *Sigma: - default: - s.errorf("Phi instruction follows a non-Phi, non-Sigma: %T", prev) - } - } - if ne, np := len(instr.Edges), len(s.block.Preds); ne != np { - s.errorf("phi node has %d edges but %d predecessors", ne, np) - - } else { - for i, e := range instr.Edges { - if e == nil { - s.errorf("phi node '%v' has no value for edge #%d from %s", instr, i, s.block.Preds[i]) - } - } - } - - case *Alloc: - if !instr.Heap { - found := false - for _, l := range s.fn.Locals { - if l == instr { - found = true - break - } - } - if !found { - s.errorf("local alloc %s = %s does not appear in Function.Locals", instr.Name(), instr) - } - } - - case *BinOp: - case *Call: - case *ChangeInterface: - case *ChangeType: - case *SliceToArrayPointer: - case *SliceToArray: - case *Convert: - tsetInstrX := typeutil.NewTypeSet(instr.X.Type().Underlying()) - tsetInstr := typeutil.NewTypeSet(instr.Type().Underlying()) - ok1 := tsetInstr.Any(func(term *types.Term) bool { _, ok := term.Type().Underlying().(*types.Basic); return ok }) - ok2 := tsetInstrX.Any(func(term *types.Term) bool { _, ok := term.Type().Underlying().(*types.Basic); return ok }) - if !ok1 && !ok2 { - s.errorf("convert %s -> %s: at least one type set must contain basic type", instr.X.Type(), instr.Type()) - } - - case *Defer: - case *Extract: - case *Field: - case *FieldAddr: - case *Go: - case *Index: - case *IndexAddr: - case *MapLookup: - case *StringLookup: - case *MakeChan: - case *MakeClosure: - numFree := len(instr.Fn.(*Function).FreeVars) - numBind := len(instr.Bindings) - if numFree != numBind { - s.errorf("MakeClosure has %d Bindings for function %s with %d free vars", - numBind, instr.Fn, numFree) - - } - if recv := instr.Type().(*types.Signature).Recv(); recv != nil { - s.errorf("MakeClosure's type includes receiver %s", recv.Type()) - } - - case *MakeInterface: - case *MakeMap: - case *MakeSlice: - case *MapUpdate: - case *Next: - case *Range: - case *RunDefers: - case *Select: - case *Send: - case *Slice: - case *Store: - case *TypeAssert: - case *UnOp: - case *DebugRef: - case *BlankStore: - case *Load: - case *Parameter: - case *Const: - case *AggregateConst: - case *ArrayConst: - case *GenericConst: - case *Recv: - case *TypeSwitch: - case *CompositeValue: - default: - panic(fmt.Sprintf("Unknown instruction type: %T", instr)) - } - - if call, ok := instr.(CallInstruction); ok { - if call.Common().Signature() == nil { - s.errorf("nil signature: %s", call) - } - } - - // Check that value-defining instructions have valid types - // and a valid referrer list. - if v, ok := instr.(Value); ok { - t := v.Type() - if t == nil { - s.errorf("no type: %s = %s", v.Name(), v) - } else if b, ok := t.Underlying().(*types.Basic); ok && b.Info()&types.IsUntyped != 0 { - if _, ok := v.(*Const); !ok { - s.errorf("instruction has 'untyped' result: %s = %s : %s", v.Name(), v, t) - } - } - s.checkReferrerList(v) - } - - // Untyped constants are legal as instruction Operands(), - // for example: - // _ = "foo"[0] - // or: - // if wordsize==64 {...} - - // All other non-Instruction Values can be found via their - // enclosing Function or Package. -} - -func (s *sanity) checkFinalInstr(instr Instruction) { - switch instr := instr.(type) { - case *If: - if nsuccs := len(s.block.Succs); nsuccs != 2 { - s.errorf("If-terminated block has %d successors; expected 2", nsuccs) - return - } - if s.block.Succs[0] == s.block.Succs[1] { - s.errorf("If-instruction has same True, False target blocks: %s", s.block.Succs[0]) - return - } - - case *Jump: - if nsuccs := len(s.block.Succs); nsuccs != 1 { - s.errorf("Jump-terminated block has %d successors; expected 1", nsuccs) - return - } - - case *Return: - if nsuccs := len(s.block.Succs); nsuccs != 0 { - s.errorf("Return-terminated block has %d successors; expected none", nsuccs) - return - } - if na, nf := len(instr.Results), s.fn.Signature.Results().Len(); nf != na { - s.errorf("%d-ary return in %d-ary function", na, nf) - } - - case *Panic: - if nsuccs := len(s.block.Succs); nsuccs != 1 { - s.errorf("Panic-terminated block has %d successors; expected one", nsuccs) - return - } - - case *Unreachable: - if nsuccs := len(s.block.Succs); nsuccs != 1 { - s.errorf("Unreachable-terminated block has %d successors; expected one", nsuccs) - return - } - - case *ConstantSwitch: - - default: - s.errorf("non-control flow instruction at end of block") - } -} - -func (s *sanity) checkBlock(b *BasicBlock, index int) { - s.block = b - - if b.Index != index { - s.errorf("block has incorrect Index %d", b.Index) - } - if b.parent != s.fn { - s.errorf("block has incorrect parent %s", b.parent) - } - - // Check all blocks are reachable. - // (The entry block is always implicitly reachable, the exit block may be unreachable.) - if index > 1 && len(b.Preds) == 0 { - s.warnf("unreachable block") - if b.Instrs == nil { - // Since this block is about to be pruned, - // tolerating transient problems in it - // simplifies other optimizations. - return - } - } - - // Check predecessor and successor relations are dual, - // and that all blocks in CFG belong to same function. - for _, a := range b.Preds { - found := false - for _, bb := range a.Succs { - if bb == b { - found = true - break - } - } - if !found { - s.errorf("expected successor edge in predecessor %s; found only: %s", a, a.Succs) - } - if a.parent != s.fn { - s.errorf("predecessor %s belongs to different function %s", a, a.parent) - } - } - for _, c := range b.Succs { - found := false - for _, bb := range c.Preds { - if bb == b { - found = true - break - } - } - if !found { - s.errorf("expected predecessor edge in successor %s; found only: %s", c, c.Preds) - } - if c.parent != s.fn { - s.errorf("successor %s belongs to different function %s", c, c.parent) - } - } - - // Check each instruction is sane. - n := len(b.Instrs) - if n == 0 { - s.errorf("basic block contains no instructions") - } - var rands [10]*Value // reuse storage - for j, instr := range b.Instrs { - if instr == nil { - s.errorf("nil instruction at index %d", j) - continue - } - if b2 := instr.Block(); b2 == nil { - s.errorf("nil Block() for instruction at index %d", j) - continue - } else if b2 != b { - s.errorf("wrong Block() (%s) for instruction at index %d ", b2, j) - continue - } - if j < n-1 { - s.checkInstr(j, instr) - } else { - s.checkFinalInstr(instr) - } - - // Check Instruction.Operands. - operands: - for i, op := range instr.Operands(rands[:0]) { - if op == nil { - s.errorf("nil operand pointer %d of %s", i, instr) - continue - } - val := *op - if val == nil { - continue // a nil operand is ok - } - - // Check that "untyped" types only appear on constant operands. - if _, ok := (*op).(*Const); !ok { - if basic, ok := (*op).Type().(*types.Basic); ok { - if basic.Info()&types.IsUntyped != 0 { - s.errorf("operand #%d of %s is untyped: %s", i, instr, basic) - } - } - } - - // Check that Operands that are also Instructions belong to same function. - // TODO(adonovan): also check their block dominates block b. - if val, ok := val.(Instruction); ok { - if val.Block() == nil { - s.errorf("operand %d of %s is an instruction (%s) that belongs to no block", i, instr, val) - } else if val.Parent() != s.fn { - s.errorf("operand %d of %s is an instruction (%s) from function %s", i, instr, val, val.Parent()) - } - } - - // Check that each function-local operand of - // instr refers back to instr. (NB: quadratic) - switch val := val.(type) { - case *Const, *Global, *Builtin: - continue // not local - case *Function: - if val.parent == nil { - continue // only anon functions are local - } - } - - // TODO(adonovan): check val.Parent() != nil <=> val.Referrers() is defined. - - if refs := val.Referrers(); refs != nil { - for _, ref := range *refs { - if ref == instr { - continue operands - } - } - s.errorf("operand %d of %s (%s) does not refer to us", i, instr, val) - } else { - s.errorf("operand %d of %s (%s) has no referrers", i, instr, val) - } - } - } -} - -func (s *sanity) checkReferrerList(v Value) { - refs := v.Referrers() - if refs == nil { - s.errorf("%s has missing referrer list", v.Name()) - return - } - for i, ref := range *refs { - if _, ok := s.instrs[ref]; !ok { - if val, ok := ref.(Value); ok { - s.errorf("%s.Referrers()[%d] = %s = %s is not an instruction belonging to this function", v.Name(), i, val.Name(), val) - } else { - s.errorf("%s.Referrers()[%d] = %s is not an instruction belonging to this function", v.Name(), i, ref) - } - } - } -} - -func (s *sanity) checkFunction(fn *Function) bool { - // TODO(adonovan): check Function invariants: - // - check params match signature - // - check transient fields are nil - // - warn if any fn.Locals do not appear among block instructions. - s.fn = fn - if fn.Prog == nil { - s.errorf("nil Prog") - } - - _ = fn.String() // must not crash - _ = fn.RelString(fn.pkg()) // must not crash - - // All functions have a package, except delegates (which are - // shared across packages, or duplicated as weak symbols in a - // separate-compilation model), and error.Error. - if fn.Pkg == nil { - switch fn.Synthetic { - case SyntheticWrapper, SyntheticBound, SyntheticThunk, SyntheticGeneric: - default: - if !strings.HasSuffix(fn.name, "Error") { - s.errorf("nil Pkg") - } - } - } - if src, syn := fn.Synthetic == 0, fn.source != nil; src != syn { - s.errorf("got fromSource=%t, hasSyntax=%t; want same values", src, syn) - } - for i, l := range fn.Locals { - if l.Parent() != fn { - s.errorf("Local %s at index %d has wrong parent", l.Name(), i) - } - if l.Heap { - s.errorf("Local %s at index %d has Heap flag set", l.Name(), i) - } - } - // Build the set of valid referrers. - s.instrs = make(map[Instruction]struct{}) - for _, b := range fn.Blocks { - for _, instr := range b.Instrs { - s.instrs[instr] = struct{}{} - } - } - for i, p := range fn.Params { - if p.Parent() != fn { - s.errorf("Param %s at index %d has wrong parent", p.Name(), i) - } - // Check common suffix of Signature and Params match type. - if sig := fn.Signature; sig != nil { - j := i - len(fn.Params) + sig.Params().Len() // index within sig.Params - if j < 0 { - continue - } - if !types.Identical(p.Type(), sig.Params().At(j).Type()) { - s.errorf("Param %s at index %d has wrong type (%s, versus %s in Signature)", p.Name(), i, p.Type(), sig.Params().At(j).Type()) - - } - } - - s.checkReferrerList(p) - } - for i, fv := range fn.FreeVars { - if fv.Parent() != fn { - s.errorf("FreeVar %s at index %d has wrong parent", fv.Name(), i) - } - s.checkReferrerList(fv) - } - - if fn.Blocks != nil && len(fn.Blocks) == 0 { - // Function _had_ blocks (so it's not external) but - // they were "optimized" away, even the entry block. - s.errorf("Blocks slice is non-nil but empty") - } - for i, b := range fn.Blocks { - if b == nil { - s.warnf("nil *BasicBlock at f.Blocks[%d]", i) - continue - } - s.checkBlock(b, i) - } - - s.block = nil - for i, anon := range fn.AnonFuncs { - if anon.Parent() != fn { - s.errorf("AnonFuncs[%d]=%s but %s.Parent()=%s", i, anon, anon, anon.Parent()) - } - } - s.fn = nil - return !s.insane -} - -// sanityCheckPackage checks invariants of packages upon creation. -// It does not require that the package is built. -// Unlike sanityCheck (for functions), it just panics at the first error. -func sanityCheckPackage(pkg *Package) { - if pkg.Pkg == nil { - panic(fmt.Sprintf("Package %s has no Object", pkg)) - } - _ = pkg.String() // must not crash - - for name, mem := range pkg.Members { - if name != mem.Name() { - panic(fmt.Sprintf("%s: %T.Name() = %s, want %s", - pkg.Pkg.Path(), mem, mem.Name(), name)) - } - obj := mem.Object() - if obj == nil { - // This check is sound because fields - // {Global,Function}.object have type - // types.Object. (If they were declared as - // *types.{Var,Func}, we'd have a non-empty - // interface containing a nil pointer.) - - continue // not all members have typechecker objects - } - if obj.Name() != name { - if obj.Name() == "init" && strings.HasPrefix(mem.Name(), "init#") { - // Ok. The name of a declared init function varies between - // its types.Func ("init") and its ir.Function ("init#%d"). - } else { - panic(fmt.Sprintf("%s: %T.Object().Name() = %s, want %s", - pkg.Pkg.Path(), mem, obj.Name(), name)) - } - } - } -} diff --git a/vendor/honnef.co/go/tools/go/ir/source.go b/vendor/honnef.co/go/tools/go/ir/source.go deleted file mode 100644 index 155c5f733..000000000 --- a/vendor/honnef.co/go/tools/go/ir/source.go +++ /dev/null @@ -1,263 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package ir - -// This file defines utilities for working with source positions -// or source-level named entities ("objects"). - -// TODO(adonovan): test that {Value,Instruction}.Pos() positions match -// the originating syntax, as specified. - -import ( - "go/ast" - "go/token" - "go/types" -) - -// EnclosingFunction returns the function that contains the syntax -// node denoted by path. -// -// Syntax associated with package-level variable specifications is -// enclosed by the package's init() function. -// -// Returns nil if not found; reasons might include: -// - the node is not enclosed by any function. -// - the node is within an anonymous function (FuncLit) and -// its IR function has not been created yet -// (pkg.Build() has not yet been called). -func EnclosingFunction(pkg *Package, path []ast.Node) *Function { - // Start with package-level function... - fn := findEnclosingPackageLevelFunction(pkg, path) - if fn == nil { - return nil // not in any function - } - - // ...then walk down the nested anonymous functions. - n := len(path) -outer: - for i := range path { - if lit, ok := path[n-1-i].(*ast.FuncLit); ok { - for _, anon := range fn.AnonFuncs { - if anon.Pos() == lit.Type.Func { - fn = anon - continue outer - } - } - // IR function not found: - // - package not yet built, or maybe - // - builder skipped FuncLit in dead block - // (in principle; but currently the Builder - // generates even dead FuncLits). - return nil - } - } - return fn -} - -// HasEnclosingFunction returns true if the AST node denoted by path -// is contained within the declaration of some function or -// package-level variable. -// -// Unlike EnclosingFunction, the behaviour of this function does not -// depend on whether IR code for pkg has been built, so it can be -// used to quickly reject check inputs that will cause -// EnclosingFunction to fail, prior to IR building. -func HasEnclosingFunction(pkg *Package, path []ast.Node) bool { - return findEnclosingPackageLevelFunction(pkg, path) != nil -} - -// findEnclosingPackageLevelFunction returns the Function -// corresponding to the package-level function enclosing path. -func findEnclosingPackageLevelFunction(pkg *Package, path []ast.Node) *Function { - if n := len(path); n >= 2 { // [... {Gen,Func}Decl File] - switch decl := path[n-2].(type) { - case *ast.GenDecl: - if decl.Tok == token.VAR && n >= 3 { - // Package-level 'var' initializer. - return pkg.init - } - - case *ast.FuncDecl: - // Declared function/method. - fn := findNamedFunc(pkg, decl.Pos()) - if fn == nil && decl.Recv == nil && decl.Name.Name == "init" { - // Hack: return non-nil when IR is not yet - // built so that HasEnclosingFunction works. - return pkg.init - } - return fn - } - } - return nil // not in any function -} - -// findNamedFunc returns the named function whose FuncDecl.Ident is at -// position pos. -func findNamedFunc(pkg *Package, pos token.Pos) *Function { - for _, fn := range pkg.Functions { - if fn.Pos() == pos { - return fn - } - } - return nil -} - -// ValueForExpr returns the IR Value that corresponds to non-constant -// expression e. -// -// It returns nil if no value was found, e.g. -// - the expression is not lexically contained within f; -// - f was not built with debug information; or -// - e is a constant expression. (For efficiency, no debug -// information is stored for constants. Use -// go/types.Info.Types[e].Value instead.) -// - e is a reference to nil or a built-in function. -// - the value was optimised away. -// -// If e is an addressable expression used in an lvalue context, -// value is the address denoted by e, and isAddr is true. -// -// The types of e (or &e, if isAddr) and the result are equal -// (modulo "untyped" bools resulting from comparisons). -// -// (Tip: to find the ir.Value given a source position, use -// astutil.PathEnclosingInterval to locate the ast.Node, then -// EnclosingFunction to locate the Function, then ValueForExpr to find -// the ir.Value.) -func (f *Function) ValueForExpr(e ast.Expr) (value Value, isAddr bool) { - if f.debugInfo() { // (opt) - e = unparen(e) - for _, b := range f.Blocks { - for _, instr := range b.Instrs { - if ref, ok := instr.(*DebugRef); ok { - if ref.Expr == e { - return ref.X, ref.IsAddr - } - } - } - } - } - return -} - -// --- Lookup functions for source-level named entities (types.Objects) --- - -// Package returns the IR Package corresponding to the specified -// type-checker package object. -// It returns nil if no such IR package has been created. -func (prog *Program) Package(obj *types.Package) *Package { - return prog.packages[obj] -} - -// packageLevelValue returns the package-level value corresponding to -// the specified named object, which may be a package-level const -// (*Const), var (*Global) or func (*Function) of some package in -// prog. It returns nil if the object is not found. -func (prog *Program) packageLevelValue(obj types.Object) Value { - if pkg, ok := prog.packages[obj.Pkg()]; ok { - return pkg.values[obj] - } - return nil -} - -// FuncValue returns the concrete Function denoted by the source-level -// named function obj, or nil if obj denotes an interface method. -// -// TODO(adonovan): check the invariant that obj.Type() matches the -// result's Signature, both in the params/results and in the receiver. -func (prog *Program) FuncValue(obj *types.Func) *Function { - obj = obj.Origin() - fn, _ := prog.packageLevelValue(obj).(*Function) - return fn -} - -// ConstValue returns the IR Value denoted by the source-level named -// constant obj. -func (prog *Program) ConstValue(obj *types.Const) *Const { - // TODO(adonovan): opt: share (don't reallocate) - // Consts for const objects and constant ast.Exprs. - - // Universal constant? {true,false,nil} - if obj.Parent() == types.Universe { - return NewConst(obj.Val(), obj.Type()) - } - // Package-level named constant? - if v := prog.packageLevelValue(obj); v != nil { - return v.(*Const) - } - return NewConst(obj.Val(), obj.Type()) -} - -// VarValue returns the IR Value that corresponds to a specific -// identifier denoting the source-level named variable obj. -// -// VarValue returns nil if a local variable was not found, perhaps -// because its package was not built, the debug information was not -// requested during IR construction, or the value was optimized away. -// -// ref is the path to an ast.Ident (e.g. from PathEnclosingInterval), -// and that ident must resolve to obj. -// -// pkg is the package enclosing the reference. (A reference to a var -// always occurs within a function, so we need to know where to find it.) -// -// If the identifier is a field selector and its base expression is -// non-addressable, then VarValue returns the value of that field. -// For example: -// -// func f() struct {x int} -// f().x // VarValue(x) returns a *Field instruction of type int -// -// All other identifiers denote addressable locations (variables). -// For them, VarValue may return either the variable's address or its -// value, even when the expression is evaluated only for its value; the -// situation is reported by isAddr, the second component of the result. -// -// If !isAddr, the returned value is the one associated with the -// specific identifier. For example, -// -// var x int // VarValue(x) returns Const 0 here -// x = 1 // VarValue(x) returns Const 1 here -// -// It is not specified whether the value or the address is returned in -// any particular case, as it may depend upon optimizations performed -// during IR code generation, such as registerization, constant -// folding, avoidance of materialization of subexpressions, etc. -func (prog *Program) VarValue(obj *types.Var, pkg *Package, ref []ast.Node) (value Value, isAddr bool) { - // All references to a var are local to some function, possibly init. - fn := EnclosingFunction(pkg, ref) - if fn == nil { - return // e.g. def of struct field; IR not built? - } - - id := ref[0].(*ast.Ident) - - // Defining ident of a parameter? - if id.Pos() == obj.Pos() { - for _, param := range fn.Params { - if param.Object() == obj { - return param, false - } - } - } - - // Other ident? - for _, b := range fn.Blocks { - for _, instr := range b.Instrs { - if dr, ok := instr.(*DebugRef); ok { - if dr.Pos() == id.Pos() { - return dr.X, dr.IsAddr - } - } - } - } - - // Defining ident of package-level var? - if v := prog.packageLevelValue(obj); v != nil { - return v.(*Global), true - } - - return // e.g. debug info not requested, or var optimized away -} diff --git a/vendor/honnef.co/go/tools/go/ir/ssa.go b/vendor/honnef.co/go/tools/go/ir/ssa.go deleted file mode 100644 index 1ef87f9e8..000000000 --- a/vendor/honnef.co/go/tools/go/ir/ssa.go +++ /dev/null @@ -1,2104 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package ir - -// This package defines a high-level intermediate representation for -// Go programs using static single-information (SSI) form. - -import ( - "fmt" - "go/ast" - "go/constant" - "go/token" - "go/types" - "math/big" - "sync" - - "honnef.co/go/tools/go/types/typeutil" -) - -const ( - // Replace CompositeValue with only constant values with AggregateConst. Currently disabled because it breaks field - // tracking in U1000. - doSimplifyConstantCompositeValues = false -) - -type ID int - -// A Program is a partial or complete Go program converted to IR form. -type Program struct { - Fset *token.FileSet // position information for the files of this Program - PrintFunc string // create ir.html for function specified in PrintFunc - imported map[string]*Package // all importable Packages, keyed by import path - packages map[*types.Package]*Package // all loaded Packages, keyed by object - mode BuilderMode // set of mode bits for IR construction - MethodSets typeutil.MethodSetCache // cache of type-checker's method-sets - - methodsMu sync.Mutex // guards the following maps: - methodSets typeutil.Map[*methodSet] // maps type to its concrete methodSet - runtimeTypes typeutil.Map[bool] // types for which rtypes are needed - canon typeutil.Map[types.Type] // type canonicalization map - bounds map[*types.Func]*Function // bounds for curried x.Method closures - thunks map[selectionKey]*Function // thunks for T.Method expressions -} - -// A Package is a single analyzed Go package containing Members for -// all package-level functions, variables, constants and types it -// declares. These may be accessed directly via Members, or via the -// type-specific accessor methods Func, Type, Var and Const. -// -// Members also contains entries for "init" (the synthetic package -// initializer) and "init#%d", the nth declared init function, -// and unspecified other things too. -type Package struct { - Prog *Program // the owning program - Pkg *types.Package // the corresponding go/types.Package - Members map[string]Member // all package members keyed by name (incl. init and init#%d) - Functions []*Function // all functions, excluding anonymous ones - values map[types.Object]Value // package members (incl. types and methods), keyed by object - init *Function // Func("init"); the package's init function - debug bool // include full debug info in this package - printFunc string // which function to print in HTML form - - // The following fields are set transiently, then cleared - // after building. - buildOnce sync.Once // ensures package building occurs once - ninit int32 // number of init functions - info *types.Info // package type information - files []*ast.File // package ASTs -} - -// A Member is a member of a Go package, implemented by *NamedConst, -// *Global, *Function, or *Type; they are created by package-level -// const, var, func and type declarations respectively. -type Member interface { - Name() string // declared name of the package member - String() string // package-qualified name of the package member - RelString(*types.Package) string // like String, but relative refs are unqualified - Object() types.Object // typechecker's object for this member, if any - Type() types.Type // type of the package member - Token() token.Token // token.{VAR,FUNC,CONST,TYPE} - Package() *Package // the containing package -} - -// A Type is a Member of a Package representing a package-level named type. -type Type struct { - object *types.TypeName - pkg *Package -} - -// A NamedConst is a Member of a Package representing a package-level -// named constant. -// -// Pos() returns the position of the declaring ast.ValueSpec.Names[*] -// identifier. -// -// NB: a NamedConst is not a Value; it contains a constant Value, which -// it augments with the name and position of its 'const' declaration. -type NamedConst struct { - object *types.Const - Value *Const - pkg *Package -} - -// A Value is an IR value that can be referenced by an instruction. -type Value interface { - setID(ID) - - // Name returns the name of this value, and determines how - // this Value appears when used as an operand of an - // Instruction. - // - // This is the same as the source name for Parameters, - // Builtins, Functions, FreeVars, Globals. - // For constants, it is a representation of the constant's value - // and type. For all other Values this is the name of the - // virtual register defined by the instruction. - // - // The name of an IR Value is not semantically significant, - // and may not even be unique within a function. - Name() string - - // ID returns the ID of this value. IDs are unique within a single - // function and are densely numbered, but may contain gaps. - // Values and other Instructions share the same ID space. - // Globally, values are identified by their addresses. However, - // IDs exist to facilitate efficient storage of mappings between - // values and data when analysing functions. - // - // NB: IDs are allocated late in the IR construction process and - // are not available to early stages of said process. - ID() ID - - // If this value is an Instruction, String returns its - // disassembled form; otherwise it returns unspecified - // human-readable information about the Value, such as its - // kind, name and type. - String() string - - // Type returns the type of this value. Many instructions - // (e.g. IndexAddr) change their behaviour depending on the - // types of their operands. - Type() types.Type - - // Parent returns the function to which this Value belongs. - // It returns nil for named Functions, Builtin and Global. - Parent() *Function - - // Referrers returns the list of instructions that have this - // value as one of their operands; it may contain duplicates - // if an instruction has a repeated operand. - // - // Referrers actually returns a pointer through which the - // caller may perform mutations to the object's state. - // - // Referrers is currently only defined if Parent()!=nil, - // i.e. for the function-local values FreeVar, Parameter, - // Functions (iff anonymous) and all value-defining instructions. - // It returns nil for named Functions, Builtin and Global. - // - // Instruction.Operands contains the inverse of this relation. - Referrers() *[]Instruction - - Operands(rands []*Value) []*Value // nil for non-Instructions - - // Source returns the AST node responsible for creating this - // value. A single AST node may be responsible for more than one - // value, and not all values have an associated AST node. - // - // Do not use this method to find a Value given an ast.Expr; use - // ValueForExpr instead. - Source() ast.Node - - // Pos returns Source().Pos() if Source is not nil, else it - // returns token.NoPos. - Pos() token.Pos -} - -// An Instruction is an IR instruction that computes a new Value or -// has some effect. -// -// An Instruction that defines a value (e.g. BinOp) also implements -// the Value interface; an Instruction that only has an effect (e.g. Store) -// does not. -type Instruction interface { - setSource(ast.Node) - setID(ID) - - Comment() string - - // String returns the disassembled form of this value. - // - // Examples of Instructions that are Values: - // "BinOp {+} t1 t2" (BinOp) - // "Call len t1" (Call) - // Note that the name of the Value is not printed. - // - // Examples of Instructions that are not Values: - // "Return t1" (Return) - // "Store {int} t2 t1" (Store) - // - // (The separation of Value.Name() from Value.String() is useful - // for some analyses which distinguish the operation from the - // value it defines, e.g., 'y = local int' is both an allocation - // of memory 'local int' and a definition of a pointer y.) - String() string - - // ID returns the ID of this instruction. IDs are unique within a single - // function and are densely numbered, but may contain gaps. - // Globally, instructions are identified by their addresses. However, - // IDs exist to facilitate efficient storage of mappings between - // instructions and data when analysing functions. - // - // NB: IDs are allocated late in the IR construction process and - // are not available to early stages of said process. - ID() ID - - // Parent returns the function to which this instruction - // belongs. - Parent() *Function - - // Block returns the basic block to which this instruction - // belongs. - Block() *BasicBlock - - // setBlock sets the basic block to which this instruction belongs. - setBlock(*BasicBlock) - - // Operands returns the operands of this instruction: the - // set of Values it references. - // - // Specifically, it appends their addresses to rands, a - // user-provided slice, and returns the resulting slice, - // permitting avoidance of memory allocation. - // - // The operands are appended in undefined order, but the order - // is consistent for a given Instruction; the addresses are - // always non-nil but may point to a nil Value. Clients may - // store through the pointers, e.g. to effect a value - // renaming. - // - // Value.Referrers is a subset of the inverse of this - // relation. (Referrers are not tracked for all types of - // Values.) - Operands(rands []*Value) []*Value - - Referrers() *[]Instruction // nil for non-Values - - // Source returns the AST node responsible for creating this - // instruction. A single AST node may be responsible for more than - // one instruction, and not all instructions have an associated - // AST node. - Source() ast.Node - - // Pos returns Source().Pos() if Source is not nil, else it - // returns token.NoPos. - Pos() token.Pos -} - -// A Node is a node in the IR value graph. Every concrete type that -// implements Node is also either a Value, an Instruction, or both. -// -// Node contains the methods common to Value and Instruction, plus the -// Operands and Referrers methods generalized to return nil for -// non-Instructions and non-Values, respectively. -// -// Node is provided to simplify IR graph algorithms. Clients should -// use the more specific and informative Value or Instruction -// interfaces where appropriate. -type Node interface { - setID(ID) - - // Common methods: - ID() ID - String() string - Source() ast.Node - Pos() token.Pos - Parent() *Function - - // Partial methods: - Operands(rands []*Value) []*Value // nil for non-Instructions - Referrers() *[]Instruction // nil for non-Values -} - -type Synthetic int - -const ( - SyntheticLoadedFromExportData Synthetic = iota + 1 - SyntheticPackageInitializer - SyntheticThunk - SyntheticWrapper - SyntheticBound - SyntheticGeneric -) - -func (syn Synthetic) String() string { - switch syn { - case SyntheticLoadedFromExportData: - return "loaded from export data" - case SyntheticPackageInitializer: - return "package initializer" - case SyntheticThunk: - return "thunk" - case SyntheticWrapper: - return "wrapper" - case SyntheticBound: - return "bound" - case SyntheticGeneric: - return "generic" - default: - return fmt.Sprintf("Synthetic(%d)", syn) - } -} - -// Function represents the parameters, results, and code of a function -// or method. -// -// If Blocks is nil, this indicates an external function for which no -// Go source code is available. In this case, FreeVars and Locals -// are nil too. Clients performing whole-program analysis must -// handle external functions specially. -// -// Blocks contains the function's control-flow graph (CFG). -// Blocks[0] is the function entry point; block order is not otherwise -// semantically significant, though it may affect the readability of -// the disassembly. -// To iterate over the blocks in dominance order, use DomPreorder(). -// -// A nested function (Parent()!=nil) that refers to one or more -// lexically enclosing local variables ("free variables") has FreeVars. -// Such functions cannot be called directly but require a -// value created by MakeClosure which, via its Bindings, supplies -// values for these parameters. -// -// If the function is a method (Signature.Recv() != nil) then the first -// element of Params is the receiver parameter. -// -// A Go package may declare many functions called "init". -// For each one, Object().Name() returns "init" but Name() returns -// "init#1", etc, in declaration order. -// -// Pos() returns the declaring ast.FuncLit.Type.Func or the position -// of the ast.FuncDecl.Name, if the function was explicit in the -// source. Synthetic wrappers, for which Synthetic != "", may share -// the same position as the function they wrap. -// Syntax.Pos() always returns the position of the declaring "func" token. -// -// Type() returns the function's Signature. -type Function struct { - node - - name string - object types.Object // a declared *types.Func or one of its wrappers - method *types.Selection // info about provenance of synthetic methods - Signature *types.Signature - generics instanceWrapperMap - - Synthetic Synthetic - parent *Function // enclosing function if anon; nil if global - Pkg *Package // enclosing package; nil for shared funcs (wrappers and error.Error) - Prog *Program // enclosing program - Params []*Parameter // function parameters; for methods, includes receiver - FreeVars []*FreeVar // free variables whose values must be supplied by closure - Locals []*Alloc // local variables of this function - Blocks []*BasicBlock // basic blocks of the function; nil => external - Exit *BasicBlock // The function's exit block - AnonFuncs []*Function // anonymous functions directly beneath this one - referrers []Instruction // referring instructions (iff Parent() != nil) - NoReturn NoReturn // Calling this function will always terminate control flow. - - *functionBody -} - -type instanceWrapperMap struct { - h typeutil.Hasher - entries map[uint32][]struct { - key *types.TypeList - val *Function - } - len int -} - -func typeListIdentical(l1, l2 *types.TypeList) bool { - if l1.Len() != l2.Len() { - return false - } - for i := 0; i < l1.Len(); i++ { - t1 := l1.At(i) - t2 := l2.At(i) - if !types.Identical(t1, t2) { - return false - } - } - return true -} - -func (m *instanceWrapperMap) At(key *types.TypeList) *Function { - if m.entries == nil { - m.entries = make(map[uint32][]struct { - key *types.TypeList - val *Function - }) - m.h = typeutil.MakeHasher() - } - - var hash uint32 - for i := 0; i < key.Len(); i++ { - t := key.At(i) - hash += m.h.Hash(t) - } - - for _, e := range m.entries[hash] { - if typeListIdentical(e.key, key) { - return e.val - } - } - return nil -} - -func (m *instanceWrapperMap) Set(key *types.TypeList, val *Function) { - if m.entries == nil { - m.entries = make(map[uint32][]struct { - key *types.TypeList - val *Function - }) - m.h = typeutil.MakeHasher() - } - - var hash uint32 - for i := 0; i < key.Len(); i++ { - t := key.At(i) - hash += m.h.Hash(t) - } - for i, e := range m.entries[hash] { - if typeListIdentical(e.key, key) { - m.entries[hash][i].val = val - return - } - } - m.entries[hash] = append(m.entries[hash], struct { - key *types.TypeList - val *Function - }{key, val}) - m.len++ -} - -func (m *instanceWrapperMap) Len() int { - return m.len -} - -type NoReturn uint8 - -const ( - Returns NoReturn = iota - AlwaysExits - AlwaysUnwinds - NeverReturns -) - -type constValue struct { - c Constant - idx int -} - -type functionBody struct { - // The following fields are set transiently during building, - // then cleared. - currentBlock *BasicBlock // where to emit code - objects map[types.Object]Value // addresses of local variables - namedResults []*Alloc // tuple of named results - implicitResults []*Alloc // tuple of results - targets *targets // linked stack of branch targets - lblocks map[types.Object]*lblock // labelled blocks - - consts map[constKey]constValue - aggregateConsts typeutil.Map[[]*AggregateConst] - - wr *HTMLWriter - fakeExits BlockSet - blocksets [5]BlockSet - hasDefer bool - - // a contiguous block of instructions that will be used by blocks, - // to avoid making multiple allocations. - scratchInstructions []Instruction -} - -func (fn *Function) results() []*Alloc { - if len(fn.namedResults) > 0 { - return fn.namedResults - } - return fn.implicitResults -} - -// BasicBlock represents an IR basic block. -// -// The final element of Instrs is always an explicit transfer of -// control (If, Jump, Return, Panic, or Unreachable). -// -// A block may contain no Instructions only if it is unreachable, -// i.e., Preds is nil. Empty blocks are typically pruned. -// -// BasicBlocks and their Preds/Succs relation form a (possibly cyclic) -// graph independent of the IR Value graph: the control-flow graph or -// CFG. It is illegal for multiple edges to exist between the same -// pair of blocks. -// -// Each BasicBlock is also a node in the dominator tree of the CFG. -// The tree may be navigated using Idom()/Dominees() and queried using -// Dominates(). -// -// The order of Preds and Succs is significant (to Phi and If -// instructions, respectively). -type BasicBlock struct { - Index int // index of this block within Parent().Blocks - Comment string // optional label; no semantic significance - parent *Function // parent function - Instrs []Instruction // instructions in order - Preds, Succs []*BasicBlock // predecessors and successors - succs2 [2]*BasicBlock // initial space for Succs - dom domInfo // dominator tree info - pdom domInfo // post-dominator tree info - post int - gaps int // number of nil Instrs (transient) - rundefers int // number of rundefers (transient) -} - -// Pure values ---------------------------------------- - -// A FreeVar represents a free variable of the function to which it -// belongs. -// -// FreeVars are used to implement anonymous functions, whose free -// variables are lexically captured in a closure formed by -// MakeClosure. The value of such a free var is an Alloc or another -// FreeVar and is considered a potentially escaping heap address, with -// pointer type. -// -// FreeVars are also used to implement bound method closures. Such a -// free var represents the receiver value and may be of any type that -// has concrete methods. -// -// Pos() returns the position of the value that was captured, which -// belongs to an enclosing function. -type FreeVar struct { - node - - name string - typ types.Type - parent *Function - referrers []Instruction - - // Transiently needed during building. - outer Value // the Value captured from the enclosing context. -} - -// A Parameter represents an input parameter of a function. -type Parameter struct { - register - - name string - object types.Object // a *types.Var; nil for non-source locals -} - -// A Const represents the value of a constant expression. -// -// The underlying type of a constant may be any boolean, numeric, or -// string type. In addition, a Const may represent the nil value of -// any reference type---interface, map, channel, pointer, slice, or -// function---but not "untyped nil". -// -// All source-level constant expressions are represented by a Const -// of the same type and value. -// -// Value holds the exact value of the constant, independent of its -// Type(), using the same representation as package go/constant uses for -// constants, or nil for a typed nil value. -// -// Pos() returns token.NoPos. -// -// Example printed form: -// -// Const {42} -// Const {"test"} -// Const {(3 + 4i)} -type Const struct { - register - - Value constant.Value -} - -type AggregateConst struct { - register - - Values []Value -} - -type CompositeValue struct { - register - - // Bitmap records which elements were explicitly provided. For example, [4]byte{2: x} would have a bitmap of 0010. - Bitmap big.Int - // The number of bits set in Bitmap - NumSet int - // Dense list of values in the composite literal. Omitted elements are filled in with zero values. - Values []Value -} - -// TODO add the element's zero constant to ArrayConst -type ArrayConst struct { - register -} - -type GenericConst struct { - register -} - -type Constant interface { - Instruction - Value - aConstant() - RelString(*types.Package) string - equal(Constant) bool - setType(types.Type) -} - -func (*Const) aConstant() {} -func (*AggregateConst) aConstant() {} -func (*ArrayConst) aConstant() {} -func (*GenericConst) aConstant() {} - -// A Global is a named Value holding the address of a package-level -// variable. -// -// Pos() returns the position of the ast.ValueSpec.Names[*] -// identifier. -type Global struct { - node - - name string - object types.Object // a *types.Var; may be nil for synthetics e.g. init$guard - typ types.Type - - Pkg *Package -} - -// A Builtin represents a specific use of a built-in function, e.g. len. -// -// Builtins are immutable values. Builtins do not have addresses. -// Builtins can only appear in CallCommon.Func. -// -// Name() indicates the function: one of the built-in functions from the -// Go spec (excluding "make" and "new") or one of these ir-defined -// intrinsics: -// -// // wrapnilchk returns ptr if non-nil, panics otherwise. -// // (For use in indirection wrappers.) -// func ir:wrapnilchk(ptr *T, recvType, methodName string) *T -// -// // noreturnWasPanic returns true if the previously called -// // function panicked, false if it exited the process. -// func ir:noreturnWasPanic() bool -// -// Object() returns a *types.Builtin for built-ins defined by the spec, -// nil for others. -// -// Type() returns a *types.Signature representing the effective -// signature of the built-in for this call. -type Builtin struct { - node - - name string - sig *types.Signature -} - -// Value-defining instructions ---------------------------------------- - -// The Alloc instruction reserves space for a variable of the given type, -// zero-initializes it, and yields its address. -// -// Alloc values are always addresses, and have pointer types, so the -// type of the allocated variable is actually -// Type().Underlying().(*types.Pointer).Elem(). -// -// If Heap is false, Alloc allocates space in the function's -// activation record (frame); we refer to an Alloc(Heap=false) as a -// "stack" alloc. Each stack Alloc returns the same address each time -// it is executed within the same activation; the space is -// re-initialized to zero. -// -// If Heap is true, Alloc allocates space in the heap; we -// refer to an Alloc(Heap=true) as a "heap" alloc. Each heap Alloc -// returns a different address each time it is executed. -// -// When Alloc is applied to a channel, map or slice type, it returns -// the address of an uninitialized (nil) reference of that kind; store -// the result of MakeSlice, MakeMap or MakeChan in that location to -// instantiate these types. -// -// Pos() returns the ast.CompositeLit.Lbrace for a composite literal, -// or the ast.CallExpr.Rparen for a call to new() or for a call that -// allocates a varargs slice. -// -// Example printed form: -// -// t1 = StackAlloc <*int> -// t2 = HeapAlloc <*int> (new) -type Alloc struct { - register - Heap bool - index int // dense numbering; for lifting -} - -var _ Instruction = (*Sigma)(nil) -var _ Value = (*Sigma)(nil) - -// The Sigma instruction represents an SSI σ-node, which splits values -// at branches in the control flow. -// -// Conceptually, σ-nodes exist at the end of blocks that branch and -// constitute parallel assignments to one value per destination block. -// However, such a representation would be awkward to work with, so -// instead we place σ-nodes at the beginning of branch targets. The -// From field denotes to which incoming edge the node applies. -// -// Within a block, all σ-nodes must appear before all non-σ nodes. -// -// Example printed form: -// -// t2 = Sigma [#0] t1 (x) -type Sigma struct { - register - From *BasicBlock - X Value - - live bool // used during lifting -} - -type CopyInfo uint64 - -const ( - CopyInfoUnspecified CopyInfo = 0 - CopyInfoNotNil CopyInfo = 1 << iota - CopyInfoNotZeroLength - CopyInfoNotNegative - CopyInfoSingleConcreteType - CopyInfoClosed -) - -type Copy struct { - register - X Value - Why Instruction - Info CopyInfo -} - -// The Phi instruction represents an SSA φ-node, which combines values -// that differ across incoming control-flow edges and yields a new -// value. Within a block, all φ-nodes must appear before all non-φ, non-σ -// nodes. -// -// Pos() returns the position of the && or || for short-circuit -// control-flow joins, or that of the *Alloc for φ-nodes inserted -// during SSA renaming. -// -// Example printed form: -// -// t3 = Phi 2:t1 4:t2 (x) -type Phi struct { - register - Edges []Value // Edges[i] is value for Block().Preds[i] - - live bool // used during lifting -} - -// The Call instruction represents a function or method call. -// -// The Call instruction yields the function result if there is exactly -// one. Otherwise it returns a tuple, the components of which are -// accessed via Extract. -// -// See CallCommon for generic function call documentation. -// -// Pos() returns the ast.CallExpr.Lparen, if explicit in the source. -// -// Example printed form: -// -// t3 = Call <()> println t1 t2 -// t4 = Call <()> foo$1 -// t6 = Invoke t5.String -type Call struct { - register - Call CallCommon -} - -// The BinOp instruction yields the result of binary operation X Op Y. -// -// Pos() returns the ast.BinaryExpr.OpPos, if explicit in the source. -// -// Example printed form: -// -// t3 = BinOp {+} t2 t1 -type BinOp struct { - register - // One of: - // ADD SUB MUL QUO REM + - * / % - // AND OR XOR SHL SHR AND_NOT & | ^ << >> &^ - // EQL NEQ LSS LEQ GTR GEQ == != < <= < >= - Op token.Token - X, Y Value -} - -// The UnOp instruction yields the result of Op X. -// XOR is bitwise complement. -// SUB is negation. -// NOT is logical negation. -// -// Example printed form: -// -// t2 = UnOp {^} t1 -type UnOp struct { - register - Op token.Token // One of: NOT SUB XOR ! - ^ - X Value -} - -// The Load instruction loads a value from a memory address. -// -// For implicit memory loads, Pos() returns the position of the -// most closely associated source-level construct; the details are not -// specified. -// -// Example printed form: -// -// t2 = Load t1 -type Load struct { - register - X Value -} - -// The ChangeType instruction applies to X a value-preserving type -// change to Type(). -// -// Type changes are permitted: -// - between a named type and its underlying type. -// - between two named types of the same underlying type. -// - between (possibly named) pointers to identical base types. -// - from a bidirectional channel to a read- or write-channel, -// optionally adding/removing a name. -// -// This operation cannot fail dynamically. -// -// Pos() returns the ast.CallExpr.Lparen, if the instruction arose -// from an explicit conversion in the source. -// -// Example printed form: -// -// t2 = ChangeType <*T> t1 -type ChangeType struct { - register - X Value -} - -// The Convert instruction yields the conversion of value X to type -// Type(). One or both of those types is basic (but possibly named). -// -// A conversion may change the value and representation of its operand. -// Conversions are permitted: -// - between real numeric types. -// - between complex numeric types. -// - between string and []byte or []rune. -// - between pointers and unsafe.Pointer. -// - between unsafe.Pointer and uintptr. -// - from (Unicode) integer to (UTF-8) string. -// -// A conversion may imply a type name change also. -// -// This operation cannot fail dynamically. -// -// Conversions of untyped string/number/bool constants to a specific -// representation are eliminated during IR construction. -// -// Pos() returns the ast.CallExpr.Lparen, if the instruction arose -// from an explicit conversion in the source. -// -// Example printed form: -// -// t2 = Convert <[]byte> t1 -type Convert struct { - register - X Value -} - -// ChangeInterface constructs a value of one interface type from a -// value of another interface type known to be assignable to it. -// This operation cannot fail. -// -// Pos() returns the ast.CallExpr.Lparen if the instruction arose from -// an explicit T(e) conversion; the ast.TypeAssertExpr.Lparen if the -// instruction arose from an explicit e.(T) operation; or token.NoPos -// otherwise. -// -// Example printed form: -// -// t2 = ChangeInterface t1 -type ChangeInterface struct { - register - X Value -} - -// The SliceToArrayPointer instruction yields the conversion of slice X to -// array pointer. -// -// Pos() returns the ast.CallExpr.Lparen, if the instruction arose -// from an explicit conversion in the source. -// -// Example printed form: -// -// t2 = SliceToArrayPointer <*[4]byte> t1 -type SliceToArrayPointer struct { - register - X Value -} - -// The SliceToArray instruction yields the conversion of slice X to -// array. -// -// Pos() returns the ast.CallExpr.Lparen, if the instruction arose -// from an explicit conversion in the source. -// -// Example printed form: -// -// t2 = SliceToArray <[4]byte> t1 -type SliceToArray struct { - register - X Value -} - -// MakeInterface constructs an instance of an interface type from a -// value of a concrete type. -// -// Use Program.MethodSets.MethodSet(X.Type()) to find the method-set -// of X, and Program.MethodValue(m) to find the implementation of a method. -// -// To construct the zero value of an interface type T, use: -// -// NewConst(constant.MakeNil(), T, pos) -// -// Pos() returns the ast.CallExpr.Lparen, if the instruction arose -// from an explicit conversion in the source. -// -// Example printed form: -// -// t2 = MakeInterface t1 -type MakeInterface struct { - register - X Value -} - -// The MakeClosure instruction yields a closure value whose code is -// Fn and whose free variables' values are supplied by Bindings. -// -// Type() returns a (possibly named) *types.Signature. -// -// Pos() returns the ast.FuncLit.Type.Func for a function literal -// closure or the ast.SelectorExpr.Sel for a bound method closure. -// -// Example printed form: -// -// t1 = MakeClosure foo$1 t1 t2 -// t5 = MakeClosure (T).foo$bound t4 -type MakeClosure struct { - register - Fn Value // always a *Function - Bindings []Value // values for each free variable in Fn.FreeVars -} - -// The MakeMap instruction creates a new hash-table-based map object -// and yields a value of kind map. -// -// Type() returns a (possibly named) *types.Map. -// -// Pos() returns the ast.CallExpr.Lparen, if created by make(map), or -// the ast.CompositeLit.Lbrack if created by a literal. -// -// Example printed form: -// -// t1 = MakeMap -// t2 = MakeMap t1 -type MakeMap struct { - register - Reserve Value // initial space reservation; nil => default -} - -// The MakeChan instruction creates a new channel object and yields a -// value of kind chan. -// -// Type() returns a (possibly named) *types.Chan. -// -// Pos() returns the ast.CallExpr.Lparen for the make(chan) that -// created it. -// -// Example printed form: -// -// t3 = MakeChan t1 -// t4 = MakeChan t2 -type MakeChan struct { - register - Size Value // int; size of buffer; zero => synchronous. -} - -// The MakeSlice instruction yields a slice of length Len backed by a -// newly allocated array of length Cap. -// -// Both Len and Cap must be non-nil Values of integer type. -// -// (Alloc(types.Array) followed by Slice will not suffice because -// Alloc can only create arrays of constant length.) -// -// Type() returns a (possibly named) *types.Slice. -// -// Pos() returns the ast.CallExpr.Lparen for the make([]T) that -// created it. -// -// Example printed form: -// -// t3 = MakeSlice <[]string> t1 t2 -// t4 = MakeSlice t1 t2 -type MakeSlice struct { - register - Len Value - Cap Value -} - -// The Slice instruction yields a slice of an existing string, slice -// or *array X between optional integer bounds Low and High. -// -// Dynamically, this instruction panics if X evaluates to a nil *array -// pointer. -// -// Type() returns string if the type of X was string, otherwise a -// *types.Slice with the same element type as X. -// -// Pos() returns the ast.SliceExpr.Lbrack if created by a x[:] slice -// operation, the ast.CompositeLit.Lbrace if created by a literal, or -// NoPos if not explicit in the source (e.g. a variadic argument slice). -// -// Example printed form: -// -// t4 = Slice <[]int> t3 t2 t1 -type Slice struct { - register - X Value // slice, string, or *array - Low, High, Max Value // each may be nil -} - -// The FieldAddr instruction yields the address of Field of *struct X. -// -// The field is identified by its index within the field list of the -// struct type of X. -// -// Dynamically, this instruction panics if X evaluates to a nil -// pointer. -// -// Type() returns a (possibly named) *types.Pointer. -// -// Pos() returns the position of the ast.SelectorExpr.Sel for the -// field, if explicit in the source. -// -// Example printed form: -// -// t2 = FieldAddr <*int> [0] (X) t1 -type FieldAddr struct { - register - X Value // *struct - Field int // field is X.Type().Underlying().(*types.Pointer).Elem().Underlying().(*types.Struct).Field(Field) -} - -// The Field instruction yields the Field of struct X. -// -// The field is identified by its index within the field list of the -// struct type of X; by using numeric indices we avoid ambiguity of -// package-local identifiers and permit compact representations. -// -// Pos() returns the position of the ast.SelectorExpr.Sel for the -// field, if explicit in the source. -// -// Example printed form: -// -// t2 = FieldAddr [0] (X) t1 -type Field struct { - register - X Value // struct - Field int // index into X.Type().(*types.Struct).Fields -} - -// The IndexAddr instruction yields the address of the element at -// index Index of collection X. Index is an integer expression. -// -// The elements of maps and strings are not addressable; use StringLookup, MapLookup or -// MapUpdate instead. -// -// Dynamically, this instruction panics if X evaluates to a nil *array -// pointer. -// -// Type() returns a (possibly named) *types.Pointer. -// -// Pos() returns the ast.IndexExpr.Lbrack for the index operation, if -// explicit in the source. -// -// Example printed form: -// -// t3 = IndexAddr <*int> t2 t1 -type IndexAddr struct { - register - X Value // slice or *array, - Index Value // numeric index -} - -// The Index instruction yields element Index of array X. -// -// Pos() returns the ast.IndexExpr.Lbrack for the index operation, if -// explicit in the source. -// -// Example printed form: -// -// t3 = Index t2 t1 -type Index struct { - register - X Value // array - Index Value // integer index -} - -// The MapLookup instruction yields element Index of collection X, a map. -// -// If CommaOk, the result is a 2-tuple of the value above and a -// boolean indicating the result of a map membership test for the key. -// The components of the tuple are accessed using Extract. -// -// Pos() returns the ast.IndexExpr.Lbrack, if explicit in the source. -// -// Example printed form: -// -// t4 = MapLookup t3 t1 -// t6 = MapLookup <(string, bool)> t3 t2 -type MapLookup struct { - register - X Value // map - Index Value // key-typed index - CommaOk bool // return a value,ok pair -} - -// The StringLookup instruction yields element Index of collection X, a string. -// Index is an integer expression. -// -// Pos() returns the ast.IndexExpr.Lbrack, if explicit in the source. -// -// Example printed form: -// -// t3 = StringLookup t2 t1 -type StringLookup struct { - register - X Value // string - Index Value // numeric index -} - -// SelectState is a helper for Select. -// It represents one goal state and its corresponding communication. -type SelectState struct { - Dir types.ChanDir // direction of case (SendOnly or RecvOnly) - Chan Value // channel to use (for send or receive) - Send Value // value to send (for send) - Pos token.Pos // position of token.ARROW - DebugNode ast.Node // ast.SendStmt or ast.UnaryExpr(<-) [debug mode] -} - -// The Select instruction tests whether (or blocks until) one -// of the specified sent or received states is entered. -// -// Let n be the number of States for which Dir==RECV and Tᵢ (0 ≤ i < n) -// be the element type of each such state's Chan. -// Select returns an n+2-tuple -// -// (index int, recvOk bool, r₀ T₀, ... rₙ-1 Tₙ-1) -// -// The tuple's components, described below, must be accessed via the -// Extract instruction. -// -// If Blocking, select waits until exactly one state holds, i.e. a -// channel becomes ready for the designated operation of sending or -// receiving; select chooses one among the ready states -// pseudorandomly, performs the send or receive operation, and sets -// 'index' to the index of the chosen channel. -// -// If !Blocking, select doesn't block if no states hold; instead it -// returns immediately with index equal to -1. -// -// If the chosen channel was used for a receive, the rᵢ component is -// set to the received value, where i is the index of that state among -// all n receive states; otherwise rᵢ has the zero value of type Tᵢ. -// Note that the receive index i is not the same as the state -// index index. -// -// The second component of the triple, recvOk, is a boolean whose value -// is true iff the selected operation was a receive and the receive -// successfully yielded a value. -// -// Pos() returns the ast.SelectStmt.Select. -// -// Example printed form: -// -// t6 = SelectNonBlocking <(index int, ok bool, int)> [<-t4, t5<-t1] -// t11 = SelectBlocking <(index int, ok bool)> [] -type Select struct { - register - States []*SelectState - Blocking bool -} - -// The Range instruction yields an iterator over the domain and range -// of X, which must be a string or map. -// -// Elements are accessed via Next. -// -// Type() returns an opaque and degenerate "rangeIter" type. -// -// Pos() returns the ast.RangeStmt.For. -// -// Example printed form: -// -// t2 = Range t1 -type Range struct { - register - X Value // string or map -} - -// The Next instruction reads and advances the (map or string) -// iterator Iter and returns a 3-tuple value (ok, k, v). If the -// iterator is not exhausted, ok is true and k and v are the next -// elements of the domain and range, respectively. Otherwise ok is -// false and k and v are undefined. -// -// Components of the tuple are accessed using Extract. -// -// The IsString field distinguishes iterators over strings from those -// over maps, as the Type() alone is insufficient: consider -// map[int]rune. -// -// Type() returns a *types.Tuple for the triple (ok, k, v). -// The types of k and/or v may be types.Invalid. -// -// Example printed form: -// -// t5 = Next <(ok bool, k int, v rune)> t2 -// t5 = Next <(ok bool, k invalid type, v invalid type)> t2 -type Next struct { - register - Iter Value - IsString bool // true => string iterator; false => map iterator. -} - -// The TypeAssert instruction tests whether interface value X has type -// AssertedType. -// -// If !CommaOk, on success it returns v, the result of the conversion -// (defined below); on failure it panics. -// -// If CommaOk: on success it returns a pair (v, true) where v is the -// result of the conversion; on failure it returns (z, false) where z -// is AssertedType's zero value. The components of the pair must be -// accessed using the Extract instruction. -// -// If AssertedType is a concrete type, TypeAssert checks whether the -// dynamic type in interface X is equal to it, and if so, the result -// of the conversion is a copy of the value in the interface. -// -// If AssertedType is an interface, TypeAssert checks whether the -// dynamic type of the interface is assignable to it, and if so, the -// result of the conversion is a copy of the interface value X. -// If AssertedType is a superinterface of X.Type(), the operation will -// fail iff the operand is nil. (Contrast with ChangeInterface, which -// performs no nil-check.) -// -// Type() reflects the actual type of the result, possibly a -// 2-types.Tuple; AssertedType is the asserted type. -// -// Pos() returns the ast.CallExpr.Lparen if the instruction arose from -// an explicit T(e) conversion; the ast.TypeAssertExpr.Lparen if the -// instruction arose from an explicit e.(T) operation; or the -// ast.CaseClause.Case if the instruction arose from a case of a -// type-switch statement. -// -// Example printed form: -// -// t2 = TypeAssert t1 -// t4 = TypeAssert <(value fmt.Stringer, ok bool)> t1 -type TypeAssert struct { - register - X Value - AssertedType types.Type - CommaOk bool -} - -// The Extract instruction yields component Index of Tuple. -// -// This is used to access the results of instructions with multiple -// return values, such as Call, TypeAssert, Next, Recv, -// MapLookup and others. -// -// Example printed form: -// -// t7 = Extract [1] (ok) t4 -type Extract struct { - register - Tuple Value - Index int -} - -// Instructions executed for effect. They do not yield a value. -------------------- - -// The Jump instruction transfers control to the sole successor of its -// owning block. -// -// A Jump must be the last instruction of its containing BasicBlock. -// -// Pos() returns NoPos. -// -// Example printed form: -// -// Jump → b1 -type Jump struct { - anInstruction -} - -// The Unreachable pseudo-instruction signals that execution cannot -// continue after the preceding function call because it terminates -// the process. -// -// The instruction acts as a control instruction, jumping to the exit -// block. However, this jump will never execute. -// -// An Unreachable instruction must be the last instruction of its -// containing BasicBlock. -// -// Example printed form: -// -// Unreachable → b1 -type Unreachable struct { - anInstruction -} - -// The If instruction transfers control to one of the two successors -// of its owning block, depending on the boolean Cond: the first if -// true, the second if false. -// -// An If instruction must be the last instruction of its containing -// BasicBlock. -// -// Pos() returns the *ast.IfStmt, if explicit in the source. -// -// Example printed form: -// -// If t2 → b1 b2 -type If struct { - anInstruction - Cond Value -} - -type ConstantSwitch struct { - anInstruction - Tag Value - // Constant branch conditions. A nil Value denotes the (implicit - // or explicit) default branch. - Conds []Value -} - -type TypeSwitch struct { - register - Tag Value - Conds []types.Type -} - -// The Return instruction returns values and control back to the calling -// function. -// -// len(Results) is always equal to the number of results in the -// function's signature. -// -// If len(Results) > 1, Return returns a tuple value with the specified -// components which the caller must access using Extract instructions. -// -// There is no instruction to return a ready-made tuple like those -// returned by a "value,ok"-mode TypeAssert, MapLookup or Recv or -// a tail-call to a function with multiple result parameters. -// -// Return must be the last instruction of its containing BasicBlock. -// Such a block has no successors. -// -// Pos() returns the ast.ReturnStmt.Return, if explicit in the source. -// -// Example printed form: -// -// Return -// Return t1 t2 -type Return struct { - anInstruction - Results []Value -} - -// The RunDefers instruction pops and invokes the entire stack of -// procedure calls pushed by Defer instructions in this function. -// -// It is legal to encounter multiple 'rundefers' instructions in a -// single control-flow path through a function; this is useful in -// the combined init() function, for example. -// -// Pos() returns NoPos. -// -// Example printed form: -// -// RunDefers -type RunDefers struct { - anInstruction -} - -// The Panic instruction initiates a panic with value X. -// -// A Panic instruction must be the last instruction of its containing -// BasicBlock, which must have one successor, the exit block. -// -// NB: 'go panic(x)' and 'defer panic(x)' do not use this instruction; -// they are treated as calls to a built-in function. -// -// Pos() returns the ast.CallExpr.Lparen if this panic was explicit -// in the source. -// -// Example printed form: -// -// Panic t1 -type Panic struct { - anInstruction - X Value // an interface{} -} - -// The Go instruction creates a new goroutine and calls the specified -// function within it. -// -// See CallCommon for generic function call documentation. -// -// Pos() returns the ast.GoStmt.Go. -// -// Example printed form: -// -// Go println t1 -// Go t3 -// GoInvoke t4.Bar t2 -type Go struct { - anInstruction - Call CallCommon -} - -// The Defer instruction pushes the specified call onto a stack of -// functions to be called by a RunDefers instruction or by a panic. -// -// See CallCommon for generic function call documentation. -// -// Pos() returns the ast.DeferStmt.Defer. -// -// Example printed form: -// -// Defer println t1 -// Defer t3 -// DeferInvoke t4.Bar t2 -type Defer struct { - anInstruction - Call CallCommon -} - -// The Send instruction sends X on channel Chan. -// -// Pos() returns the ast.SendStmt.Arrow, if explicit in the source. -// -// Example printed form: -// -// Send t2 t1 -type Send struct { - anInstruction - Chan, X Value -} - -// The Recv instruction receives from channel Chan. -// -// If CommaOk, the result is a 2-tuple of the value above -// and a boolean indicating the success of the receive. The -// components of the tuple are accessed using Extract. -// -// Pos() returns the ast.UnaryExpr.OpPos, if explicit in the source. -// For receive operations implicit in ranging over a channel, -// Pos() returns the ast.RangeStmt.For. -// -// Example printed form: -// -// t2 = Recv t1 -// t3 = Recv <(int, bool)> t1 -type Recv struct { - register - Chan Value - CommaOk bool -} - -// The Store instruction stores Val at address Addr. -// Stores can be of arbitrary types. -// -// Pos() returns the position of the source-level construct most closely -// associated with the memory store operation. -// Since implicit memory stores are numerous and varied and depend upon -// implementation choices, the details are not specified. -// -// Example printed form: -// -// Store {int} t2 t1 -type Store struct { - anInstruction - Addr Value - Val Value -} - -// The BlankStore instruction is emitted for assignments to the blank -// identifier. -// -// BlankStore is a pseudo-instruction: it has no dynamic effect. -// -// Pos() returns NoPos. -// -// Example printed form: -// -// BlankStore t1 -type BlankStore struct { - anInstruction - Val Value -} - -// The MapUpdate instruction updates the association of Map[Key] to -// Value. -// -// Pos() returns the ast.KeyValueExpr.Colon or ast.IndexExpr.Lbrack, -// if explicit in the source. -// -// Example printed form: -// -// MapUpdate t3 t1 t2 -type MapUpdate struct { - anInstruction - Map Value - Key Value - Value Value -} - -// A DebugRef instruction maps a source-level expression Expr to the -// IR value X that represents the value (!IsAddr) or address (IsAddr) -// of that expression. -// -// DebugRef is a pseudo-instruction: it has no dynamic effect. -// -// Pos() returns Expr.Pos(), the start position of the source-level -// expression. This is not the same as the "designated" token as -// documented at Value.Pos(). e.g. CallExpr.Pos() does not return the -// position of the ("designated") Lparen token. -// -// DebugRefs are generated only for functions built with debugging -// enabled; see Package.SetDebugMode() and the GlobalDebug builder -// mode flag. -// -// DebugRefs are not emitted for ast.Idents referring to constants or -// predeclared identifiers, since they are trivial and numerous. -// Nor are they emitted for ast.ParenExprs. -// -// (By representing these as instructions, rather than out-of-band, -// consistency is maintained during transformation passes by the -// ordinary SSA renaming machinery.) -// -// Example printed form: -// -// ; *ast.CallExpr @ 102:9 is t5 -// ; var x float64 @ 109:72 is x -// ; address of *ast.CompositeLit @ 216:10 is t0 -type DebugRef struct { - anInstruction - Expr ast.Expr // the referring expression (never *ast.ParenExpr) - object types.Object // the identity of the source var/func - IsAddr bool // Expr is addressable and X is the address it denotes - X Value // the value or address of Expr -} - -// Embeddable mix-ins and helpers for common parts of other structs. ----------- - -// register is a mix-in embedded by all IR values that are also -// instructions, i.e. virtual registers, and provides a uniform -// implementation of most of the Value interface: Value.Name() is a -// numbered register (e.g. "t0"); the other methods are field accessors. -// -// Temporary names are automatically assigned to each register on -// completion of building a function in IR form. -type register struct { - anInstruction - typ types.Type // type of virtual register - referrers []Instruction -} - -type node struct { - source ast.Node - id ID -} - -func (n *node) setID(id ID) { n.id = id } -func (n node) ID() ID { return n.id } - -func (n *node) setSource(source ast.Node) { n.source = source } -func (n *node) Source() ast.Node { return n.source } - -func (n *node) Pos() token.Pos { - if n.source != nil { - return n.source.Pos() - } - return token.NoPos -} - -// anInstruction is a mix-in embedded by all Instructions. -// It provides the implementations of the Block and setBlock methods. -type anInstruction struct { - node - block *BasicBlock // the basic block of this instruction - comment string -} - -func (instr anInstruction) Comment() string { - return instr.comment -} - -// CallCommon is contained by Go, Defer and Call to hold the -// common parts of a function or method call. -// -// Each CallCommon exists in one of two modes, function call and -// interface method invocation, or "call" and "invoke" for short. -// -// 1. "call" mode: when Method is nil (!IsInvoke), a CallCommon -// represents an ordinary function call of the value in Value, -// which may be a *Builtin, a *Function or any other value of kind -// 'func'. -// -// Value may be one of: -// -// (a) a *Function, indicating a statically dispatched call -// to a package-level function, an anonymous function, or -// a method of a named type. -// (b) a *MakeClosure, indicating an immediately applied -// function literal with free variables. -// (c) a *Builtin, indicating a statically dispatched call -// to a built-in function. -// (d) any other value, indicating a dynamically dispatched -// function call. -// -// StaticCallee returns the identity of the callee in cases -// (a) and (b), nil otherwise. -// -// Args contains the arguments to the call. If Value is a method, -// Args[0] contains the receiver parameter. -// -// Example printed form: -// -// t3 = Call <()> println t1 t2 -// Go t3 -// Defer t3 -// -// 2. "invoke" mode: when Method is non-nil (IsInvoke), a CallCommon -// represents a dynamically dispatched call to an interface method. -// In this mode, Value is the interface value and Method is the -// interface's abstract method. Note: an abstract method may be -// shared by multiple interfaces due to embedding; Value.Type() -// provides the specific interface used for this call. -// -// Value is implicitly supplied to the concrete method implementation -// as the receiver parameter; in other words, Args[0] holds not the -// receiver but the first true argument. -// -// Example printed form: -// -// t6 = Invoke t5.String -// GoInvoke t4.Bar t2 -// DeferInvoke t4.Bar t2 -// -// For all calls to variadic functions (Signature().Variadic()), -// the last element of Args is a slice. -type CallCommon struct { - Value Value // receiver (invoke mode) or func value (call mode) - Method *types.Func // abstract method (invoke mode) - Args []Value // actual parameters (in static method call, includes receiver) - TypeArgs []types.Type - Results Value -} - -// IsInvoke returns true if this call has "invoke" (not "call") mode. -func (c *CallCommon) IsInvoke() bool { - return c.Method != nil -} - -// Signature returns the signature of the called function. -// -// For an "invoke"-mode call, the signature of the interface method is -// returned. -// -// In either "call" or "invoke" mode, if the callee is a method, its -// receiver is represented by sig.Recv, not sig.Params().At(0). -func (c *CallCommon) Signature() *types.Signature { - if c.Method != nil { - return c.Method.Type().(*types.Signature) - } - return typeutil.CoreType(c.Value.Type()).(*types.Signature) -} - -// StaticCallee returns the callee if this is a trivially static -// "call"-mode call to a function. -func (c *CallCommon) StaticCallee() *Function { - switch fn := c.Value.(type) { - case *Function: - return fn - case *MakeClosure: - return fn.Fn.(*Function) - } - return nil -} - -// Description returns a description of the mode of this call suitable -// for a user interface, e.g., "static method call". -func (c *CallCommon) Description() string { - switch fn := c.Value.(type) { - case *Builtin: - return "built-in function call" - case *MakeClosure: - return "static function closure call" - case *Function: - if fn.Signature.Recv() != nil { - return "static method call" - } - return "static function call" - } - if c.IsInvoke() { - return "dynamic method call" // ("invoke" mode) - } - return "dynamic function call" -} - -// The CallInstruction interface, implemented by *Go, *Defer and *Call, -// exposes the common parts of function-calling instructions, -// yet provides a way back to the Value defined by *Call alone. -type CallInstruction interface { - Instruction - Common() *CallCommon // returns the common parts of the call - Value() *Call -} - -func (s *Call) Common() *CallCommon { return &s.Call } -func (s *Defer) Common() *CallCommon { return &s.Call } -func (s *Go) Common() *CallCommon { return &s.Call } - -func (s *Call) Value() *Call { return s } -func (s *Defer) Value() *Call { return nil } -func (s *Go) Value() *Call { return nil } - -func (v *Builtin) Type() types.Type { return v.sig } -func (v *Builtin) Name() string { return v.name } -func (*Builtin) Referrers() *[]Instruction { return nil } -func (v *Builtin) Pos() token.Pos { return token.NoPos } -func (v *Builtin) Object() types.Object { return types.Universe.Lookup(v.name) } -func (v *Builtin) Parent() *Function { return nil } - -func (v *FreeVar) Type() types.Type { return v.typ } -func (v *FreeVar) Name() string { return v.name } -func (v *FreeVar) Referrers() *[]Instruction { return &v.referrers } -func (v *FreeVar) Parent() *Function { return v.parent } - -func (v *Global) Type() types.Type { return v.typ } -func (v *Global) Name() string { return v.name } -func (v *Global) Parent() *Function { return nil } -func (v *Global) Referrers() *[]Instruction { return nil } -func (v *Global) Token() token.Token { return token.VAR } -func (v *Global) Object() types.Object { return v.object } -func (v *Global) String() string { return v.RelString(nil) } -func (v *Global) Package() *Package { return v.Pkg } -func (v *Global) RelString(from *types.Package) string { return relString(v, from) } - -func (v *Function) Name() string { return v.name } -func (v *Function) Type() types.Type { return v.Signature } -func (v *Function) Token() token.Token { return token.FUNC } -func (v *Function) Object() types.Object { return v.object } -func (v *Function) String() string { return v.RelString(nil) } -func (v *Function) Package() *Package { return v.Pkg } -func (v *Function) Parent() *Function { return v.parent } -func (v *Function) Referrers() *[]Instruction { - if v.parent != nil { - return &v.referrers - } - return nil -} - -func (v *Parameter) Object() types.Object { return v.object } - -func (v *Alloc) Type() types.Type { return v.typ } -func (v *Alloc) Referrers() *[]Instruction { return &v.referrers } - -func (v *register) Type() types.Type { return v.typ } -func (v *register) setType(typ types.Type) { v.typ = typ } -func (v *register) Name() string { return fmt.Sprintf("t%d", v.id) } -func (v *register) Referrers() *[]Instruction { return &v.referrers } - -func (v *anInstruction) Parent() *Function { return v.block.parent } -func (v *anInstruction) Block() *BasicBlock { return v.block } -func (v *anInstruction) setBlock(block *BasicBlock) { v.block = block } -func (v *anInstruction) Referrers() *[]Instruction { return nil } - -func (t *Type) Name() string { return t.object.Name() } -func (t *Type) Pos() token.Pos { return t.object.Pos() } -func (t *Type) Type() types.Type { return t.object.Type() } -func (t *Type) Token() token.Token { return token.TYPE } -func (t *Type) Object() types.Object { return t.object } -func (t *Type) String() string { return t.RelString(nil) } -func (t *Type) Package() *Package { return t.pkg } -func (t *Type) RelString(from *types.Package) string { return relString(t, from) } - -func (c *NamedConst) Name() string { return c.object.Name() } -func (c *NamedConst) Pos() token.Pos { return c.object.Pos() } -func (c *NamedConst) String() string { return c.RelString(nil) } -func (c *NamedConst) Type() types.Type { return c.object.Type() } -func (c *NamedConst) Token() token.Token { return token.CONST } -func (c *NamedConst) Object() types.Object { return c.object } -func (c *NamedConst) Package() *Package { return c.pkg } -func (c *NamedConst) RelString(from *types.Package) string { return relString(c, from) } - -// Func returns the package-level function of the specified name, -// or nil if not found. -func (p *Package) Func(name string) (f *Function) { - f, _ = p.Members[name].(*Function) - return -} - -// Var returns the package-level variable of the specified name, -// or nil if not found. -func (p *Package) Var(name string) (g *Global) { - g, _ = p.Members[name].(*Global) - return -} - -// Const returns the package-level constant of the specified name, -// or nil if not found. -func (p *Package) Const(name string) (c *NamedConst) { - c, _ = p.Members[name].(*NamedConst) - return -} - -// Type returns the package-level type of the specified name, -// or nil if not found. -func (p *Package) Type(name string) (t *Type) { - t, _ = p.Members[name].(*Type) - return -} - -func (s *DebugRef) Pos() token.Pos { return s.Expr.Pos() } - -// Operands. - -func (v *Alloc) Operands(rands []*Value) []*Value { - return rands -} - -func (v *BinOp) Operands(rands []*Value) []*Value { - return append(rands, &v.X, &v.Y) -} - -func (c *CallCommon) Operands(rands []*Value) []*Value { - rands = append(rands, &c.Value) - for i := range c.Args { - rands = append(rands, &c.Args[i]) - } - return rands -} - -func (s *Go) Operands(rands []*Value) []*Value { - return s.Call.Operands(rands) -} - -func (s *Call) Operands(rands []*Value) []*Value { - return s.Call.Operands(rands) -} - -func (s *Defer) Operands(rands []*Value) []*Value { - return s.Call.Operands(rands) -} - -func (v *ChangeInterface) Operands(rands []*Value) []*Value { - return append(rands, &v.X) -} - -func (v *ChangeType) Operands(rands []*Value) []*Value { - return append(rands, &v.X) -} - -func (v *Convert) Operands(rands []*Value) []*Value { - return append(rands, &v.X) -} - -func (v *SliceToArrayPointer) Operands(rands []*Value) []*Value { - return append(rands, &v.X) -} - -func (v *SliceToArray) Operands(rands []*Value) []*Value { - return append(rands, &v.X) -} - -func (s *DebugRef) Operands(rands []*Value) []*Value { - return append(rands, &s.X) -} - -func (s *Copy) Operands(rands []*Value) []*Value { - return append(rands, &s.X) -} - -func (v *Extract) Operands(rands []*Value) []*Value { - return append(rands, &v.Tuple) -} - -func (v *Field) Operands(rands []*Value) []*Value { - return append(rands, &v.X) -} - -func (v *FieldAddr) Operands(rands []*Value) []*Value { - return append(rands, &v.X) -} - -func (s *If) Operands(rands []*Value) []*Value { - return append(rands, &s.Cond) -} - -func (s *ConstantSwitch) Operands(rands []*Value) []*Value { - rands = append(rands, &s.Tag) - for i := range s.Conds { - rands = append(rands, &s.Conds[i]) - } - return rands -} - -func (s *TypeSwitch) Operands(rands []*Value) []*Value { - rands = append(rands, &s.Tag) - return rands -} - -func (v *Index) Operands(rands []*Value) []*Value { - return append(rands, &v.X, &v.Index) -} - -func (v *IndexAddr) Operands(rands []*Value) []*Value { - return append(rands, &v.X, &v.Index) -} - -func (*Jump) Operands(rands []*Value) []*Value { - return rands -} - -func (*Unreachable) Operands(rands []*Value) []*Value { - return rands -} - -func (v *MapLookup) Operands(rands []*Value) []*Value { - return append(rands, &v.X, &v.Index) -} - -func (v *StringLookup) Operands(rands []*Value) []*Value { - return append(rands, &v.X, &v.Index) -} - -func (v *MakeChan) Operands(rands []*Value) []*Value { - return append(rands, &v.Size) -} - -func (v *MakeClosure) Operands(rands []*Value) []*Value { - rands = append(rands, &v.Fn) - for i := range v.Bindings { - rands = append(rands, &v.Bindings[i]) - } - return rands -} - -func (v *MakeInterface) Operands(rands []*Value) []*Value { - return append(rands, &v.X) -} - -func (v *MakeMap) Operands(rands []*Value) []*Value { - return append(rands, &v.Reserve) -} - -func (v *MakeSlice) Operands(rands []*Value) []*Value { - return append(rands, &v.Len, &v.Cap) -} - -func (v *MapUpdate) Operands(rands []*Value) []*Value { - return append(rands, &v.Map, &v.Key, &v.Value) -} - -func (v *Next) Operands(rands []*Value) []*Value { - return append(rands, &v.Iter) -} - -func (s *Panic) Operands(rands []*Value) []*Value { - return append(rands, &s.X) -} - -func (v *Sigma) Operands(rands []*Value) []*Value { - return append(rands, &v.X) -} - -func (v *Phi) Operands(rands []*Value) []*Value { - for i := range v.Edges { - rands = append(rands, &v.Edges[i]) - } - return rands -} - -func (v *Range) Operands(rands []*Value) []*Value { - return append(rands, &v.X) -} - -func (s *Return) Operands(rands []*Value) []*Value { - for i := range s.Results { - rands = append(rands, &s.Results[i]) - } - return rands -} - -func (*RunDefers) Operands(rands []*Value) []*Value { - return rands -} - -func (v *Select) Operands(rands []*Value) []*Value { - for i := range v.States { - rands = append(rands, &v.States[i].Chan, &v.States[i].Send) - } - return rands -} - -func (s *Send) Operands(rands []*Value) []*Value { - return append(rands, &s.Chan, &s.X) -} - -func (recv *Recv) Operands(rands []*Value) []*Value { - return append(rands, &recv.Chan) -} - -func (v *Slice) Operands(rands []*Value) []*Value { - return append(rands, &v.X, &v.Low, &v.High, &v.Max) -} - -func (s *Store) Operands(rands []*Value) []*Value { - return append(rands, &s.Addr, &s.Val) -} - -func (s *BlankStore) Operands(rands []*Value) []*Value { - return append(rands, &s.Val) -} - -func (v *TypeAssert) Operands(rands []*Value) []*Value { - return append(rands, &v.X) -} - -func (v *UnOp) Operands(rands []*Value) []*Value { - return append(rands, &v.X) -} - -func (v *Load) Operands(rands []*Value) []*Value { - return append(rands, &v.X) -} - -func (v *AggregateConst) Operands(rands []*Value) []*Value { - for i := range v.Values { - rands = append(rands, &v.Values[i]) - } - return rands -} - -func (v *CompositeValue) Operands(rands []*Value) []*Value { - for i := range v.Values { - rands = append(rands, &v.Values[i]) - } - return rands -} - -// Non-Instruction Values: -func (v *Builtin) Operands(rands []*Value) []*Value { return rands } -func (v *FreeVar) Operands(rands []*Value) []*Value { return rands } -func (v *Const) Operands(rands []*Value) []*Value { return rands } -func (v *ArrayConst) Operands(rands []*Value) []*Value { return rands } -func (v *GenericConst) Operands(rands []*Value) []*Value { return rands } -func (v *Function) Operands(rands []*Value) []*Value { return rands } -func (v *Global) Operands(rands []*Value) []*Value { return rands } -func (v *Parameter) Operands(rands []*Value) []*Value { return rands } diff --git a/vendor/honnef.co/go/tools/go/ir/util.go b/vendor/honnef.co/go/tools/go/ir/util.go deleted file mode 100644 index 0a733b654..000000000 --- a/vendor/honnef.co/go/tools/go/ir/util.go +++ /dev/null @@ -1,148 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package ir - -// This file defines a number of miscellaneous utility functions. - -import ( - "fmt" - "go/ast" - "go/token" - "go/types" - "io" - "os" - - "honnef.co/go/tools/go/ast/astutil" - "honnef.co/go/tools/go/types/typeutil" -) - -//// AST utilities - -func unparen(e ast.Expr) ast.Expr { return astutil.Unparen(e) } - -// isBlankIdent returns true iff e is an Ident with name "_". -// They have no associated types.Object, and thus no type. -func isBlankIdent(e ast.Expr) bool { - id, ok := e.(*ast.Ident) - return ok && id.Name == "_" -} - -//// Type utilities. Some of these belong in go/types. - -// isPointer returns true for types whose underlying type is a pointer, -// and for type parameters whose core type is a pointer. -func isPointer(typ types.Type) bool { - if ctyp := typeutil.CoreType(typ); ctyp != nil { - _, ok := ctyp.(*types.Pointer) - return ok - } - _, ok := typ.Underlying().(*types.Pointer) - return ok -} - -func isInterface(T types.Type) bool { return types.IsInterface(T) } - -// deref returns a pointer's element type; otherwise it returns typ. -func deref(typ types.Type) types.Type { - orig := typ - - if t, ok := typ.(*types.TypeParam); ok { - if ctyp := typeutil.CoreType(t); ctyp != nil { - typ = ctyp - } - } - if p, ok := typ.Underlying().(*types.Pointer); ok { - return p.Elem() - } - return orig -} - -// recvType returns the receiver type of method obj. -func recvType(obj *types.Func) types.Type { - return obj.Type().(*types.Signature).Recv().Type() -} - -// logStack prints the formatted "start" message to stderr and -// returns a closure that prints the corresponding "end" message. -// Call using 'defer logStack(...)()' to show builder stack on panic. -// Don't forget trailing parens! -func logStack(format string, args ...interface{}) func() { - msg := fmt.Sprintf(format, args...) - io.WriteString(os.Stderr, msg) - io.WriteString(os.Stderr, "\n") - return func() { - io.WriteString(os.Stderr, msg) - io.WriteString(os.Stderr, " end\n") - } -} - -// newVar creates a 'var' for use in a types.Tuple. -func newVar(name string, typ types.Type) *types.Var { - return types.NewParam(token.NoPos, nil, name, typ) -} - -// anonVar creates an anonymous 'var' for use in a types.Tuple. -func anonVar(typ types.Type) *types.Var { - return newVar("", typ) -} - -var lenResults = types.NewTuple(anonVar(tInt)) - -// makeLen returns the len builtin specialized to type func(T)int. -func makeLen(T types.Type) *Builtin { - lenParams := types.NewTuple(anonVar(T)) - return &Builtin{ - name: "len", - sig: types.NewSignatureType(nil, nil, nil, lenParams, lenResults, false), - } -} - -type StackMap struct { - m []map[Value]Value -} - -func (m *StackMap) Push() { - m.m = append(m.m, map[Value]Value{}) -} - -func (m *StackMap) Pop() { - m.m = m.m[:len(m.m)-1] -} - -func (m *StackMap) Get(key Value) (Value, bool) { - for i := len(m.m) - 1; i >= 0; i-- { - if v, ok := m.m[i][key]; ok { - return v, true - } - } - return nil, false -} - -func (m *StackMap) Set(k Value, v Value) { - m.m[len(m.m)-1][k] = v -} - -// Unwrap recursively unwraps Sigma and Copy nodes. -func Unwrap(v Value) Value { - for { - switch vv := v.(type) { - case *Sigma: - v = vv.X - case *Copy: - v = vv.X - default: - return v - } - } -} - -func assert(x bool) { - if !x { - panic("failed assertion") - } -} - -// BlockMap is a mapping from basic blocks (identified by their indices) to values. -type BlockMap[T any] []T diff --git a/vendor/honnef.co/go/tools/go/ir/wrappers.go b/vendor/honnef.co/go/tools/go/ir/wrappers.go deleted file mode 100644 index 69537fb77..000000000 --- a/vendor/honnef.co/go/tools/go/ir/wrappers.go +++ /dev/null @@ -1,381 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package ir - -// This file defines synthesis of Functions that delegate to declared -// methods; they come in three kinds: -// -// (1) wrappers: methods that wrap declared methods, performing -// implicit pointer indirections and embedded field selections. -// -// (2) thunks: funcs that wrap declared methods. Like wrappers, -// thunks perform indirections and field selections. The thunk's -// first parameter is used as the receiver for the method call. -// -// (3) bounds: funcs that wrap declared methods. The bound's sole -// free variable, supplied by a closure, is used as the receiver -// for the method call. No indirections or field selections are -// performed since they can be done before the call. - -import ( - "fmt" - "go/types" -) - -// -- wrappers ----------------------------------------------------------- - -// makeWrapper returns a synthetic method that delegates to the -// declared method denoted by meth.Obj(), first performing any -// necessary pointer indirections or field selections implied by meth. -// -// The resulting method's receiver type is meth.Recv(). -// -// This function is versatile but quite subtle! Consider the -// following axes of variation when making changes: -// - optional receiver indirection -// - optional implicit field selections -// - meth.Obj() may denote a concrete or an interface method -// - the result may be a thunk or a wrapper. -// -// EXCLUSIVE_LOCKS_REQUIRED(prog.methodsMu) -func makeWrapper(prog *Program, sel *types.Selection) *Function { - obj := sel.Obj().(*types.Func) // the declared function - sig := sel.Type().(*types.Signature) // type of this wrapper - - var recv *types.Var // wrapper's receiver or thunk's params[0] - name := obj.Name() - var description Synthetic - var start int // first regular param - if sel.Kind() == types.MethodExpr { - name += "$thunk" - description = SyntheticThunk - recv = sig.Params().At(0) - start = 1 - } else { - description = SyntheticWrapper - recv = sig.Recv() - } - - if prog.mode&LogSource != 0 { - defer logStack("make %s to (%s)", description, recv.Type())() - } - fn := &Function{ - name: name, - method: sel, - object: obj, - Signature: sig, - Synthetic: description, - Prog: prog, - functionBody: new(functionBody), - } - fn.initHTML(prog.PrintFunc) - fn.startBody() - fn.addSpilledParam(recv, nil) - createParams(fn, start) - - indices := sel.Index() - - var v Value = fn.Locals[0] // spilled receiver - if isPointer(sel.Recv()) { - v = emitLoad(fn, v, nil) - - // For simple indirection wrappers, perform an informative nil-check: - // "value method (T).f called using nil *T pointer" - if len(indices) == 1 && !isPointer(recvType(obj)) { - var c Call - c.Call.Value = &Builtin{ - name: "ir:wrapnilchk", - sig: types.NewSignatureType(nil, nil, nil, - types.NewTuple(anonVar(sel.Recv()), anonVar(tString), anonVar(tString)), - types.NewTuple(anonVar(sel.Recv())), false), - } - c.Call.Args = []Value{ - v, - emitConst(fn, stringConst(deref(sel.Recv()).String())), - emitConst(fn, stringConst(sel.Obj().Name())), - } - c.setType(v.Type()) - v = fn.emit(&c, nil) - } - } - - // Invariant: v is a pointer, either - // value of *A receiver param, or - // address of A spilled receiver. - - // We use pointer arithmetic (FieldAddr possibly followed by - // Load) in preference to value extraction (Field possibly - // preceded by Load). - - v = emitImplicitSelections(fn, v, indices[:len(indices)-1], nil) - - // Invariant: v is a pointer, either - // value of implicit *C field, or - // address of implicit C field. - - var c Call - if r := recvType(obj); !isInterface(r) { // concrete method - if !isPointer(r) { - v = emitLoad(fn, v, nil) - } - c.Call.Value = prog.declaredFunc(obj) - c.Call.Args = append(c.Call.Args, v) - } else { - c.Call.Method = obj - c.Call.Value = emitLoad(fn, v, nil) - } - for _, arg := range fn.Params[1:] { - c.Call.Args = append(c.Call.Args, arg) - } - emitTailCall(fn, &c, nil) - fn.finishBody() - return fn -} - -// createParams creates parameters for wrapper method fn based on its -// Signature.Params, which do not include the receiver. -// start is the index of the first regular parameter to use. -func createParams(fn *Function, start int) { - tparams := fn.Signature.Params() - for i, n := start, tparams.Len(); i < n; i++ { - fn.addParamObj(tparams.At(i), nil) - } -} - -// -- bounds ----------------------------------------------------------- - -// makeBound returns a bound method wrapper (or "bound"), a synthetic -// function that delegates to a concrete or interface method denoted -// by obj. The resulting function has no receiver, but has one free -// variable which will be used as the method's receiver in the -// tail-call. -// -// Use MakeClosure with such a wrapper to construct a bound method -// closure. e.g.: -// -// type T int or: type T interface { meth() } -// func (t T) meth() -// var t T -// f := t.meth -// f() // calls t.meth() -// -// f is a closure of a synthetic wrapper defined as if by: -// -// f := func() { return t.meth() } -// -// Unlike makeWrapper, makeBound need perform no indirection or field -// selections because that can be done before the closure is -// constructed. -// -// EXCLUSIVE_LOCKS_ACQUIRED(meth.Prog.methodsMu) -func makeBound(prog *Program, obj *types.Func) *Function { - prog.methodsMu.Lock() - defer prog.methodsMu.Unlock() - fn, ok := prog.bounds[obj] - if !ok { - if prog.mode&LogSource != 0 { - defer logStack("%s", SyntheticBound)() - } - fn = &Function{ - name: obj.Name() + "$bound", - object: obj, - Signature: changeRecv(obj.Type().(*types.Signature), nil), // drop receiver - Synthetic: SyntheticBound, - Prog: prog, - functionBody: new(functionBody), - } - fn.initHTML(prog.PrintFunc) - - fv := &FreeVar{name: "recv", typ: recvType(obj), parent: fn} - fn.FreeVars = []*FreeVar{fv} - fn.startBody() - createParams(fn, 0) - var c Call - - if !isInterface(recvType(obj)) { // concrete - c.Call.Value = prog.declaredFunc(obj) - c.Call.Args = []Value{fv} - } else { - c.Call.Value = fv - c.Call.Method = obj - } - for _, arg := range fn.Params { - c.Call.Args = append(c.Call.Args, arg) - } - emitTailCall(fn, &c, nil) - fn.finishBody() - - prog.bounds[obj] = fn - } - return fn -} - -// -- thunks ----------------------------------------------------------- - -// makeThunk returns a thunk, a synthetic function that delegates to a -// concrete or interface method denoted by sel.Obj(). The resulting -// function has no receiver, but has an additional (first) regular -// parameter. -// -// Precondition: sel.Kind() == types.MethodExpr. -// -// type T int or: type T interface { meth() } -// func (t T) meth() -// f := T.meth -// var t T -// f(t) // calls t.meth() -// -// f is a synthetic wrapper defined as if by: -// -// f := func(t T) { return t.meth() } -// -// TODO(adonovan): opt: currently the stub is created even when used -// directly in a function call: C.f(i, 0). This is less efficient -// than inlining the stub. -// -// EXCLUSIVE_LOCKS_ACQUIRED(meth.Prog.methodsMu) -func makeThunk(prog *Program, sel *types.Selection) *Function { - if sel.Kind() != types.MethodExpr { - panic(sel) - } - - key := selectionKey{ - kind: sel.Kind(), - recv: sel.Recv(), - obj: sel.Obj(), - index: fmt.Sprint(sel.Index()), - indirect: sel.Indirect(), - } - - prog.methodsMu.Lock() - defer prog.methodsMu.Unlock() - - // Canonicalize key.recv to avoid constructing duplicate thunks. - canonRecv, ok := prog.canon.At(key.recv) - if !ok { - canonRecv = key.recv - prog.canon.Set(key.recv, canonRecv) - } - key.recv = canonRecv - - fn, ok := prog.thunks[key] - if !ok { - fn = makeWrapper(prog, sel) - if fn.Signature.Recv() != nil { - panic(fn) // unexpected receiver - } - prog.thunks[key] = fn - } - return fn -} - -func changeRecv(s *types.Signature, recv *types.Var) *types.Signature { - return types.NewSignatureType(recv, nil, nil, s.Params(), s.Results(), s.Variadic()) -} - -// selectionKey is like types.Selection but a usable map key. -type selectionKey struct { - kind types.SelectionKind - recv types.Type // canonicalized via Program.canon - obj types.Object - index string - indirect bool -} - -// makeInstance creates a wrapper function with signature sig that calls the generic function fn. -// If targs is not nil, fn is a function and targs describes the concrete type arguments. -// If targs is nil, fn is a method and the type arguments are derived from the receiver. -func makeInstance(prog *Program, fn *Function, sig *types.Signature, targs *types.TypeList) *Function { - if sig.Recv() != nil { - assert(targs == nil) - // Methods don't have their own type parameters, but the receiver does - targs = deref(sig.Recv().Type()).(*types.Named).TypeArgs() - } else { - assert(targs != nil) - } - - wrapper := fn.generics.At(targs) - if wrapper != nil { - return wrapper - } - - var name string - if sig.Recv() != nil { - name = fn.name - } else { - name = fmt.Sprintf("%s$generic#%d", fn.name, fn.generics.Len()) - } - w := &Function{ - name: name, - object: fn.object, - Signature: sig, - Synthetic: SyntheticGeneric, - Prog: prog, - functionBody: new(functionBody), - } - w.initHTML(prog.PrintFunc) - w.startBody() - if sig.Recv() != nil { - w.addParamObj(sig.Recv(), nil) - } - createParams(w, 0) - var c Call - c.Call.Value = fn - tresults := fn.Signature.Results() - if tresults.Len() == 1 { - c.typ = tresults.At(0).Type() - } else { - c.typ = tresults - } - - changeType := func(v Value, typ types.Type) Value { - if types.Identical(v.Type(), typ) { - return v - } - var c ChangeType - c.X = v - c.typ = typ - return w.emit(&c, nil) - } - - for i, arg := range w.Params { - if sig.Recv() != nil { - if i == 0 { - c.Call.Args = append(c.Call.Args, changeType(w.Params[0], fn.Signature.Recv().Type())) - } else { - c.Call.Args = append(c.Call.Args, changeType(arg, fn.Signature.Params().At(i-1).Type())) - } - } else { - c.Call.Args = append(c.Call.Args, changeType(arg, fn.Signature.Params().At(i).Type())) - } - } - for i := 0; i < targs.Len(); i++ { - arg := targs.At(i) - c.Call.TypeArgs = append(c.Call.TypeArgs, arg) - } - results := w.emit(&c, nil) - var ret Return - switch tresults.Len() { - case 0: - case 1: - ret.Results = []Value{changeType(results, sig.Results().At(0).Type())} - default: - for i := 0; i < tresults.Len(); i++ { - v := emitExtract(w, results, i, nil) - ret.Results = append(ret.Results, changeType(v, sig.Results().At(i).Type())) - } - } - - w.Exit = w.newBasicBlock("exit") - emitJump(w, w.Exit, nil) - w.currentBlock = w.Exit - w.emit(&ret, nil) - w.currentBlock = nil - - w.finishBody() - - fn.generics.Set(targs, w) - return w -} diff --git a/vendor/honnef.co/go/tools/go/ir/write.go b/vendor/honnef.co/go/tools/go/ir/write.go deleted file mode 100644 index 139c8cf32..000000000 --- a/vendor/honnef.co/go/tools/go/ir/write.go +++ /dev/null @@ -1,5 +0,0 @@ -package ir - -func NewJump(parent *BasicBlock) *Jump { - return &Jump{anInstruction{block: parent}} -} diff --git a/vendor/honnef.co/go/tools/go/types/typeutil/ext.go b/vendor/honnef.co/go/tools/go/types/typeutil/ext.go deleted file mode 100644 index bc553ec01..000000000 --- a/vendor/honnef.co/go/tools/go/types/typeutil/ext.go +++ /dev/null @@ -1,18 +0,0 @@ -package typeutil - -import ( - "fmt" - "go/types" -) - -type Iterator struct { - elem types.Type -} - -func (t *Iterator) Underlying() types.Type { return t } -func (t *Iterator) String() string { return fmt.Sprintf("iterator(%s)", t.elem) } -func (t *Iterator) Elem() types.Type { return t.elem } - -func NewIterator(elem types.Type) *Iterator { - return &Iterator{elem: elem} -} diff --git a/vendor/honnef.co/go/tools/go/types/typeutil/typeparams.go b/vendor/honnef.co/go/tools/go/types/typeutil/typeparams.go deleted file mode 100644 index 2bf6ec609..000000000 --- a/vendor/honnef.co/go/tools/go/types/typeutil/typeparams.go +++ /dev/null @@ -1,110 +0,0 @@ -package typeutil - -import ( - "errors" - "go/types" - - "golang.org/x/exp/typeparams" -) - -type TypeSet struct { - Terms []*types.Term - empty bool -} - -func NewTypeSet(typ types.Type) TypeSet { - terms, err := typeparams.NormalTerms(typ) - if err != nil { - if errors.Is(err, typeparams.ErrEmptyTypeSet) { - return TypeSet{nil, true} - } else { - // We couldn't determine the type set. Assume it's all types. - return TypeSet{nil, false} - } - } - return TypeSet{terms, false} -} - -// CoreType returns the type set's core type, or nil if it has none. -// The function only looks at type terms and may thus return core types for some empty type sets, such as -// 'interface { map[int]string; foo() }' -func (ts TypeSet) CoreType() types.Type { - if len(ts.Terms) == 0 { - // Either the type set is empty, or it isn't constrained. Either way it doesn't have a core type. - return nil - } - typ := ts.Terms[0].Type().Underlying() - for _, term := range ts.Terms[1:] { - ut := term.Type().Underlying() - if types.Identical(typ, ut) { - continue - } - - ch1, ok := typ.(*types.Chan) - if !ok { - return nil - } - ch2, ok := ut.(*types.Chan) - if !ok { - return nil - } - if ch1.Dir() == types.SendRecv { - // typ is currently a bidirectional channel. The term's type is either also bidirectional, or - // unidirectional. Use the term's type. - typ = ut - } else if ch2.Dir() == types.SendRecv { - // typ is currently a unidirectional channel and the term's type is bidirectional, which means it has no - // effect. - continue - } else if ch1.Dir() != ch2.Dir() { - // typ is not bidirectional and typ and term disagree about the direction - return nil - } - } - return typ -} - -// CoreType is a wrapper for NewTypeSet(typ).CoreType() -func CoreType(typ types.Type) types.Type { - return NewTypeSet(typ).CoreType() -} - -// All calls fn for each term in the type set and reports whether all invocations returned true. -// If the type set is empty or unconstrained, All immediately returns false. -func (ts TypeSet) All(fn func(*types.Term) bool) bool { - if len(ts.Terms) == 0 { - return false - } - for _, term := range ts.Terms { - if !fn(term) { - return false - } - } - return true -} - -// Any calls fn for each term in the type set and reports whether any invocation returned true. -// It stops after the first call that returned true. -func (ts TypeSet) Any(fn func(*types.Term) bool) bool { - for _, term := range ts.Terms { - if fn(term) { - return true - } - } - return false -} - -// All is a wrapper for NewTypeSet(typ).All(fn). -func All(typ types.Type, fn func(*types.Term) bool) bool { - return NewTypeSet(typ).All(fn) -} - -// Any is a wrapper for NewTypeSet(typ).Any(fn). -func Any(typ types.Type, fn func(*types.Term) bool) bool { - return NewTypeSet(typ).Any(fn) -} - -func IsSlice(term *types.Term) bool { - _, ok := term.Type().Underlying().(*types.Slice) - return ok -} diff --git a/vendor/honnef.co/go/tools/go/types/typeutil/upstream.go b/vendor/honnef.co/go/tools/go/types/typeutil/upstream.go deleted file mode 100644 index 04d8c21ba..000000000 --- a/vendor/honnef.co/go/tools/go/types/typeutil/upstream.go +++ /dev/null @@ -1,52 +0,0 @@ -package typeutil - -import ( - "go/ast" - "go/types" - _ "unsafe" - - "golang.org/x/tools/go/types/typeutil" -) - -type MethodSetCache = typeutil.MethodSetCache -type Hasher = typeutil.Hasher - -func Callee(info *types.Info, call *ast.CallExpr) types.Object { - return typeutil.Callee(info, call) -} - -func IntuitiveMethodSet(T types.Type, msets *MethodSetCache) []*types.Selection { - return typeutil.IntuitiveMethodSet(T, msets) -} - -func MakeHasher() Hasher { - return typeutil.MakeHasher() -} - -type Map[V any] struct { - m typeutil.Map -} - -func (m *Map[V]) Delete(key types.Type) bool { return m.m.Delete(key) } -func (m *Map[V]) At(key types.Type) (V, bool) { - v := m.m.At(key) - if v == nil { - var zero V - return zero, false - } else { - return v.(V), true - } -} -func (m *Map[V]) Set(key types.Type, value V) { m.m.Set(key, value) } -func (m *Map[V]) Len() int { return m.m.Len() } -func (m *Map[V]) Iterate(f func(key types.Type, value V)) { - ff := func(key types.Type, value interface{}) { - f(key, value.(V)) - } - m.m.Iterate(ff) - -} -func (m *Map[V]) Keys() []types.Type { return m.m.Keys() } -func (m *Map[V]) String() string { return m.m.String() } -func (m *Map[V]) KeysString() string { return m.m.KeysString() } -func (m *Map[V]) SetHasher(h typeutil.Hasher) { m.m.SetHasher(h) } diff --git a/vendor/honnef.co/go/tools/go/types/typeutil/util.go b/vendor/honnef.co/go/tools/go/types/typeutil/util.go deleted file mode 100644 index 3a2ad973b..000000000 --- a/vendor/honnef.co/go/tools/go/types/typeutil/util.go +++ /dev/null @@ -1,133 +0,0 @@ -package typeutil - -import ( - "bytes" - "go/types" - "sync" -) - -var bufferPool = &sync.Pool{ - New: func() interface{} { - buf := bytes.NewBuffer(nil) - buf.Grow(64) - return buf - }, -} - -func FuncName(f *types.Func) string { - buf := bufferPool.Get().(*bytes.Buffer) - buf.Reset() - if f.Type() != nil { - sig := f.Type().(*types.Signature) - if recv := sig.Recv(); recv != nil { - buf.WriteByte('(') - if _, ok := recv.Type().(*types.Interface); ok { - // gcimporter creates abstract methods of - // named interfaces using the interface type - // (not the named type) as the receiver. - // Don't print it in full. - buf.WriteString("interface") - } else { - types.WriteType(buf, recv.Type(), nil) - } - buf.WriteByte(')') - buf.WriteByte('.') - } else if f.Pkg() != nil { - writePackage(buf, f.Pkg()) - } - } - buf.WriteString(f.Name()) - s := buf.String() - bufferPool.Put(buf) - return s -} - -func writePackage(buf *bytes.Buffer, pkg *types.Package) { - if pkg == nil { - return - } - s := pkg.Path() - if s != "" { - buf.WriteString(s) - buf.WriteByte('.') - } -} - -// Dereference returns a pointer's element type; otherwise it returns -// T. -func Dereference(T types.Type) types.Type { - if p, ok := T.Underlying().(*types.Pointer); ok { - return p.Elem() - } - return T -} - -// DereferenceR returns a pointer's element type; otherwise it returns -// T. If the element type is itself a pointer, DereferenceR will be -// applied recursively. -func DereferenceR(T types.Type) types.Type { - if p, ok := T.Underlying().(*types.Pointer); ok { - return DereferenceR(p.Elem()) - } - return T -} - -func IsObject(obj types.Object, name string) bool { - var path string - if pkg := obj.Pkg(); pkg != nil { - path = pkg.Path() + "." - } - return path+obj.Name() == name -} - -// OPT(dh): IsType is kind of expensive; should we really use it? -func IsType(T types.Type, name string) bool { return types.TypeString(T, nil) == name } - -func IsPointerLike(T types.Type) bool { - switch T := T.Underlying().(type) { - case *types.Interface, *types.Chan, *types.Map, *types.Signature, *types.Pointer, *types.Slice: - return true - case *types.Basic: - return T.Kind() == types.UnsafePointer - } - return false -} - -type Field struct { - Var *types.Var - Tag string - Path []int -} - -// FlattenFields recursively flattens T and embedded structs, -// returning a list of fields. If multiple fields with the same name -// exist, all will be returned. -func FlattenFields(T *types.Struct) []Field { - return flattenFields(T, nil, nil) -} - -func flattenFields(T *types.Struct, path []int, seen map[types.Type]bool) []Field { - if seen == nil { - seen = map[types.Type]bool{} - } - if seen[T] { - return nil - } - seen[T] = true - var out []Field - for i := 0; i < T.NumFields(); i++ { - field := T.Field(i) - tag := T.Tag(i) - np := append(path[:len(path):len(path)], i) - if field.Anonymous() { - if s, ok := Dereference(field.Type()).Underlying().(*types.Struct); ok { - out = append(out, flattenFields(s, np, seen)...) - } else { - out = append(out, Field{field, tag, np}) - } - } else { - out = append(out, Field{field, tag, np}) - } - } - return out -} diff --git a/vendor/honnef.co/go/tools/internal/passes/buildir/buildir.go b/vendor/honnef.co/go/tools/internal/passes/buildir/buildir.go deleted file mode 100644 index 51dfaef53..000000000 --- a/vendor/honnef.co/go/tools/internal/passes/buildir/buildir.go +++ /dev/null @@ -1,107 +0,0 @@ -// Copyright 2018 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package buildir defines an Analyzer that constructs the IR -// of an error-free package and returns the set of all -// functions within it. It does not report any diagnostics itself but -// may be used as an input to other analyzers. -// -// THIS INTERFACE IS EXPERIMENTAL AND MAY BE SUBJECT TO INCOMPATIBLE CHANGE. -package buildir - -import ( - "go/ast" - "go/types" - "reflect" - - "honnef.co/go/tools/go/ir" - - "golang.org/x/tools/go/analysis" -) - -type noReturn struct { - Kind ir.NoReturn -} - -func (*noReturn) AFact() {} - -var Analyzer = &analysis.Analyzer{ - Name: "buildir", - Doc: "build IR for later passes", - Run: run, - ResultType: reflect.TypeOf(new(IR)), - FactTypes: []analysis.Fact{new(noReturn)}, -} - -// IR provides intermediate representation for all the -// non-blank source functions in the current package. -type IR struct { - Pkg *ir.Package - SrcFuncs []*ir.Function -} - -func run(pass *analysis.Pass) (interface{}, error) { - // Plundered from ssautil.BuildPackage. - - // We must create a new Program for each Package because the - // analysis API provides no place to hang a Program shared by - // all Packages. Consequently, IR Packages and Functions do not - // have a canonical representation across an analysis session of - // multiple packages. This is unlikely to be a problem in - // practice because the analysis API essentially forces all - // packages to be analysed independently, so any given call to - // Analysis.Run on a package will see only IR objects belonging - // to a single Program. - - mode := ir.GlobalDebug - - prog := ir.NewProgram(pass.Fset, mode) - - // Create IR packages for all imports. - // Order is not significant. - created := make(map[*types.Package]bool) - var createAll func(pkgs []*types.Package) - createAll = func(pkgs []*types.Package) { - for _, p := range pkgs { - if !created[p] { - created[p] = true - irpkg := prog.CreatePackage(p, nil, nil, true) - for _, fn := range irpkg.Functions { - if ast.IsExported(fn.Name()) { - var noRet noReturn - if pass.ImportObjectFact(fn.Object(), &noRet) { - fn.NoReturn = noRet.Kind - } - } - } - createAll(p.Imports()) - } - } - } - createAll(pass.Pkg.Imports()) - - // Create and build the primary package. - irpkg := prog.CreatePackage(pass.Pkg, pass.Files, pass.TypesInfo, false) - irpkg.Build() - - // Compute list of source functions, including literals, - // in source order. - var addAnons func(f *ir.Function) - funcs := make([]*ir.Function, len(irpkg.Functions)) - copy(funcs, irpkg.Functions) - addAnons = func(f *ir.Function) { - for _, anon := range f.AnonFuncs { - funcs = append(funcs, anon) - addAnons(anon) - } - } - for _, fn := range irpkg.Functions { - addAnons(fn) - if fn.NoReturn > 0 { - pass.ExportObjectFact(fn.Object(), &noReturn{fn.NoReturn}) - } - } - - return &IR{Pkg: irpkg, SrcFuncs: funcs}, nil -} diff --git a/vendor/honnef.co/go/tools/internal/sharedcheck/lint.go b/vendor/honnef.co/go/tools/internal/sharedcheck/lint.go deleted file mode 100644 index 6e4cdff26..000000000 --- a/vendor/honnef.co/go/tools/internal/sharedcheck/lint.go +++ /dev/null @@ -1,209 +0,0 @@ -package sharedcheck - -import ( - "fmt" - "go/ast" - "go/token" - "go/types" - - "honnef.co/go/tools/analysis/code" - "honnef.co/go/tools/analysis/edit" - "honnef.co/go/tools/analysis/facts/generated" - "honnef.co/go/tools/analysis/facts/tokenfile" - "honnef.co/go/tools/analysis/report" - "honnef.co/go/tools/go/ast/astutil" - "honnef.co/go/tools/go/ir" - "honnef.co/go/tools/go/ir/irutil" - "honnef.co/go/tools/go/types/typeutil" - "honnef.co/go/tools/internal/passes/buildir" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" -) - -func CheckRangeStringRunes(pass *analysis.Pass) (interface{}, error) { - for _, fn := range pass.ResultOf[buildir.Analyzer].(*buildir.IR).SrcFuncs { - cb := func(node ast.Node) bool { - rng, ok := node.(*ast.RangeStmt) - if !ok || !astutil.IsBlank(rng.Key) { - return true - } - - v, _ := fn.ValueForExpr(rng.X) - - // Check that we're converting from string to []rune - val, _ := v.(*ir.Convert) - if val == nil { - return true - } - Tsrc, ok := typeutil.CoreType(val.X.Type()).(*types.Basic) - if !ok || Tsrc.Kind() != types.String { - return true - } - Tdst, ok := typeutil.CoreType(val.Type()).(*types.Slice) - if !ok { - return true - } - TdstElem, ok := Tdst.Elem().(*types.Basic) - if !ok || TdstElem.Kind() != types.Int32 { - return true - } - - // Check that the result of the conversion is only used to - // range over - refs := val.Referrers() - if refs == nil { - return true - } - - // Expect two refs: one for obtaining the length of the slice, - // one for accessing the elements - if len(irutil.FilterDebug(*refs)) != 2 { - // TODO(dh): right now, we check that only one place - // refers to our slice. This will miss cases such as - // ranging over the slice twice. Ideally, we'd ensure that - // the slice is only used for ranging over (without - // accessing the key), but that is harder to do because in - // IR form, ranging over a slice looks like an ordinary - // loop with index increments and slice accesses. We'd - // have to look at the associated AST node to check that - // it's a range statement. - return true - } - - pass.Reportf(rng.Pos(), "should range over string, not []rune(string)") - - return true - } - if source := fn.Source(); source != nil { - ast.Inspect(source, cb) - } - } - return nil, nil -} - -// RedundantTypeInDeclarationChecker returns a checker that flags variable declarations with redundantly specified types. -// That is, it flags 'var v T = e' where e's type is identical to T and 'var v = e' (or 'v := e') would have the same effect. -// -// It does not flag variables under the following conditions, to reduce the number of false positives: -// - global variables – these often specify types to aid godoc -// - files that use cgo – cgo code generation and pointer checking emits redundant types -// -// It does not flag variables under the following conditions, unless flagHelpfulTypes is true, to reduce the number of noisy positives: -// - packages that import syscall or unsafe – these sometimes use this form of assignment to make sure types are as expected -// - variables named the blank identifier – a pattern used to confirm the types of variables -// - untyped expressions on the rhs – the explicitness might aid readability -func RedundantTypeInDeclarationChecker(verb string, flagHelpfulTypes bool) *analysis.Analyzer { - fn := func(pass *analysis.Pass) (interface{}, error) { - eval := func(expr ast.Expr) (types.TypeAndValue, error) { - info := &types.Info{ - Types: map[ast.Expr]types.TypeAndValue{}, - } - err := types.CheckExpr(pass.Fset, pass.Pkg, expr.Pos(), expr, info) - return info.Types[expr], err - } - - if !flagHelpfulTypes { - // Don't look at code in low-level packages - for _, imp := range pass.Pkg.Imports() { - if imp.Path() == "syscall" || imp.Path() == "unsafe" { - return nil, nil - } - } - } - - fn := func(node ast.Node) { - decl := node.(*ast.GenDecl) - if decl.Tok != token.VAR { - return - } - - gen, _ := code.Generator(pass, decl.Pos()) - if gen == generated.Cgo { - // TODO(dh): remove this exception once we can use UsesCgo - return - } - - // Delay looking up parent AST nodes until we have to - checkedDecl := false - - specLoop: - for _, spec := range decl.Specs { - spec := spec.(*ast.ValueSpec) - if spec.Type == nil { - continue - } - if len(spec.Names) != len(spec.Values) { - continue - } - Tlhs := pass.TypesInfo.TypeOf(spec.Type) - for i, v := range spec.Values { - if !flagHelpfulTypes && spec.Names[i].Name == "_" { - continue specLoop - } - Trhs := pass.TypesInfo.TypeOf(v) - if !types.Identical(Tlhs, Trhs) { - continue specLoop - } - - // Some expressions are untyped and get converted to the lhs type implicitly. - // This applies to untyped constants, shift operations with an untyped lhs, and possibly others. - // - // Check if the type is truly redundant, i.e. if the type on the lhs doesn't match the default type of the untyped constant. - tv, err := eval(v) - if err != nil { - panic(err) - } - if b, ok := tv.Type.(*types.Basic); ok && (b.Info()&types.IsUntyped) != 0 { - if Tlhs != types.Default(b) { - // The rhs is untyped and its default type differs from the explicit type on the lhs - continue specLoop - } - switch v := v.(type) { - case *ast.Ident: - // Only flag named constant rhs if it's a predeclared identifier. - // Don't flag other named constants, as the explicit type may aid readability. - if pass.TypesInfo.ObjectOf(v).Pkg() != nil && !flagHelpfulTypes { - continue specLoop - } - case *ast.BasicLit: - // Do flag basic literals - default: - // Don't flag untyped rhs expressions unless flagHelpfulTypes is set - if !flagHelpfulTypes { - continue specLoop - } - } - } - } - - if !checkedDecl { - // Don't flag global variables. These often have explicit types for godoc's sake. - path, _ := astutil.PathEnclosingInterval(code.File(pass, decl), decl.Pos(), decl.Pos()) - pathLoop: - for _, el := range path { - switch el.(type) { - case *ast.FuncDecl, *ast.FuncLit: - checkedDecl = true - break pathLoop - } - } - if !checkedDecl { - // decl is not inside a function - break specLoop - } - } - - report.Report(pass, spec.Type, fmt.Sprintf("%s omit type %s from declaration; it will be inferred from the right-hand side", verb, report.Render(pass, spec.Type)), report.FilterGenerated(), - report.Fixes(edit.Fix("Remove redundant type", edit.Delete(spec.Type)))) - } - } - code.Preorder(pass, fn, (*ast.GenDecl)(nil)) - return nil, nil - } - - return &analysis.Analyzer{ - Run: fn, - Requires: []*analysis.Analyzer{generated.Analyzer, inspect.Analyzer, tokenfile.Analyzer}, - } -} diff --git a/vendor/honnef.co/go/tools/knowledge/arg.go b/vendor/honnef.co/go/tools/knowledge/arg.go deleted file mode 100644 index 1fe61c1d5..000000000 --- a/vendor/honnef.co/go/tools/knowledge/arg.go +++ /dev/null @@ -1,69 +0,0 @@ -package knowledge - -var Args = map[string]int{ - "(*encoding/json.Decoder).Decode.v": 0, - "(*encoding/json.Encoder).Encode.v": 0, - "(*encoding/xml.Decoder).Decode.v": 0, - "(*encoding/xml.Encoder).Encode.v": 0, - "(*sync.Pool).Put.x": 0, - "(*text/template.Template).Parse.text": 0, - "(io.Seeker).Seek.offset": 0, - "(time.Time).Sub.u": 0, - "append.elems": 1, - "append.slice": 0, - "bytes.Equal.a": 0, - "bytes.Equal.b": 1, - "encoding/binary.Write.data": 2, - "errors.New.text": 0, - "fmt.Fprintf.format": 1, - "fmt.Printf.format": 0, - "fmt.Sprintf.a[0]": 1, - "fmt.Sprintf.format": 0, - "json.Marshal.v": 0, - "json.Unmarshal.v": 1, - "len.v": 0, - "make.size[0]": 1, - "make.size[1]": 2, - "make.t": 0, - "net/url.Parse.rawurl": 0, - "os.OpenFile.flag": 1, - "os/exec.Command.name": 0, - "os/signal.Notify.c": 0, - "regexp.Compile.expr": 0, - "runtime.SetFinalizer.finalizer": 1, - "runtime.SetFinalizer.obj": 0, - "sort.Sort.data": 0, - "strconv.AppendFloat.bitSize": 4, - "strconv.AppendFloat.fmt": 2, - "strconv.AppendInt.base": 2, - "strconv.AppendUint.base": 2, - "strconv.FormatComplex.bitSize": 3, - "strconv.FormatComplex.fmt": 1, - "strconv.FormatFloat.bitSize": 3, - "strconv.FormatFloat.fmt": 1, - "strconv.FormatInt.base": 1, - "strconv.FormatUint.base": 1, - "strconv.ParseComplex.bitSize": 1, - "strconv.ParseFloat.bitSize": 1, - "strconv.ParseInt.base": 1, - "strconv.ParseInt.bitSize": 2, - "strconv.ParseUint.base": 1, - "strconv.ParseUint.bitSize": 2, - "time.Parse.layout": 0, - "time.Sleep.d": 0, - "xml.Marshal.v": 0, - "xml.Unmarshal.v": 1, -} - -// Arg turns the name of an argument into an argument index. -// Indices are zero-based and method receivers do not count as arguments. -// -// Arg refers to a manually compiled mapping (see the Args variable.) -// Modify the knowledge package to add new arguments. -func Arg(name string) int { - n, ok := Args[name] - if !ok { - panic("unknown argument " + name) - } - return n -} diff --git a/vendor/honnef.co/go/tools/knowledge/deprecated.go b/vendor/honnef.co/go/tools/knowledge/deprecated.go deleted file mode 100644 index caeb49975..000000000 --- a/vendor/honnef.co/go/tools/knowledge/deprecated.go +++ /dev/null @@ -1,272 +0,0 @@ -package knowledge - -const ( - // DeprecatedNeverUse indicates that an API should never be used, regardless of Go version. - DeprecatedNeverUse = -1 - // DeprecatedUseNoLonger indicates that an API has no use anymore. - DeprecatedUseNoLonger = -2 -) - -// Deprecation describes when a Go API has been deprecated. -type Deprecation struct { - // The minor Go version since which this API has been deprecated. - DeprecatedSince int - // The minor Go version since which an alternative API has been available. - // May also be one of DeprecatedNeverUse or DeprecatedUseNoLonger. - AlternativeAvailableSince int -} - -// go/importer.ForCompiler contains "Deprecated:", but it refers to a single argument, not the whole function. -// Luckily, the notice starts in the middle of a paragraph, and as such isn't detected by us. - -// TODO(dh): StdlibDeprecations doesn't contain entries for internal packages and unexported API. That's fine for normal -// users, but makes the Deprecated check less useful for people working on Go itself. - -// StdlibDeprecations contains a mapping of Go API (such as variables, methods, or fields, among others) -// to information about when it has been deprecated. -var StdlibDeprecations = map[string]Deprecation{ - // FIXME(dh): AllowBinary isn't being detected as deprecated - // because the comment has a newline right after "Deprecated:" - "go/build.AllowBinary": {7, 7}, - "(archive/zip.FileHeader).CompressedSize": {1, 1}, - "(archive/zip.FileHeader).UncompressedSize": {1, 1}, - "(archive/zip.FileHeader).ModifiedTime": {10, 10}, - "(archive/zip.FileHeader).ModifiedDate": {10, 10}, - "(*archive/zip.FileHeader).ModTime": {10, 10}, - "(*archive/zip.FileHeader).SetModTime": {10, 10}, - "(go/doc.Package).Bugs": {1, 1}, - "os.SEEK_SET": {7, 7}, - "os.SEEK_CUR": {7, 7}, - "os.SEEK_END": {7, 7}, - "(net.Dialer).Cancel": {7, 7}, - "runtime.CPUProfile": {9, 0}, - "compress/flate.ReadError": {6, DeprecatedUseNoLonger}, - "compress/flate.WriteError": {6, DeprecatedUseNoLonger}, - "path/filepath.HasPrefix": {0, DeprecatedNeverUse}, - "(net/http.Transport).Dial": {7, 7}, - "(net/http.Transport).DialTLS": {14, 14}, - "(*net/http.Transport).CancelRequest": {6, 5}, - "net/http.ErrWriteAfterFlush": {7, DeprecatedUseNoLonger}, - "net/http.ErrHeaderTooLong": {8, DeprecatedUseNoLonger}, - "net/http.ErrShortBody": {8, DeprecatedUseNoLonger}, - "net/http.ErrMissingContentLength": {8, DeprecatedUseNoLonger}, - "net/http/httputil.ErrPersistEOF": {0, DeprecatedUseNoLonger}, - "net/http/httputil.ErrClosed": {0, DeprecatedUseNoLonger}, - "net/http/httputil.ErrPipeline": {0, DeprecatedUseNoLonger}, - "net/http/httputil.ServerConn": {0, 0}, - "net/http/httputil.NewServerConn": {0, 0}, - "net/http/httputil.ClientConn": {0, 0}, - "net/http/httputil.NewClientConn": {0, 0}, - "net/http/httputil.NewProxyClientConn": {0, 0}, - "(net/http.Request).Cancel": {7, 7}, - "(text/template/parse.PipeNode).Line": {1, DeprecatedUseNoLonger}, - "(text/template/parse.ActionNode).Line": {1, DeprecatedUseNoLonger}, - "(text/template/parse.BranchNode).Line": {1, DeprecatedUseNoLonger}, - "(text/template/parse.TemplateNode).Line": {1, DeprecatedUseNoLonger}, - "database/sql/driver.ColumnConverter": {9, 9}, - "database/sql/driver.Execer": {8, 8}, - "database/sql/driver.Queryer": {8, 8}, - "(database/sql/driver.Conn).Begin": {8, 8}, - "(database/sql/driver.Stmt).Exec": {8, 8}, - "(database/sql/driver.Stmt).Query": {8, 8}, - "syscall.StringByteSlice": {1, 1}, - "syscall.StringBytePtr": {1, 1}, - "syscall.StringSlicePtr": {1, 1}, - "syscall.StringToUTF16": {1, 1}, - "syscall.StringToUTF16Ptr": {1, 1}, - "(*regexp.Regexp).Copy": {12, DeprecatedUseNoLonger}, - "(archive/tar.Header).Xattrs": {10, 10}, - "archive/tar.TypeRegA": {11, 1}, - "go/types.NewInterface": {11, 11}, - "(*go/types.Interface).Embedded": {11, 11}, - "go/importer.For": {12, 12}, - "encoding/json.InvalidUTF8Error": {2, DeprecatedUseNoLonger}, - "encoding/json.UnmarshalFieldError": {2, DeprecatedUseNoLonger}, - "encoding/csv.ErrTrailingComma": {2, DeprecatedUseNoLonger}, - "(encoding/csv.Reader).TrailingComma": {2, DeprecatedUseNoLonger}, - "(net.Dialer).DualStack": {12, 12}, - "net/http.ErrUnexpectedTrailer": {12, DeprecatedUseNoLonger}, - "net/http.CloseNotifier": {11, 7}, - // This is hairy. The notice says "Not all errors in the http package related to protocol errors are of type ProtocolError", but doesn't that imply that some errors do? - "net/http.ProtocolError": {8, DeprecatedUseNoLonger}, - "(crypto/x509.CertificateRequest).Attributes": {5, 3}, - "(*crypto/x509.Certificate).CheckCRLSignature": {19, 19}, - "crypto/x509.ParseCRL": {19, 19}, - "crypto/x509.ParseDERCRL": {19, 19}, - "(*crypto/x509.Certificate).CreateCRL": {19, 19}, - "crypto/x509/pkix.TBSCertificateList": {19, 19}, - "crypto/x509/pkix.RevokedCertificate": {19, 19}, - "go/doc.ToHTML": {20, 20}, - "go/doc.ToText": {20, 20}, - "go/doc.Synopsis": {20, 20}, - "math/rand.Seed": {20, 0}, - "math/rand.Read": {20, DeprecatedNeverUse}, - - // These functions have no direct alternative, but they are insecure and should no longer be used. - "crypto/x509.IsEncryptedPEMBlock": {16, DeprecatedNeverUse}, - "crypto/x509.DecryptPEMBlock": {16, DeprecatedNeverUse}, - "crypto/x509.EncryptPEMBlock": {16, DeprecatedNeverUse}, - "crypto/dsa": {16, DeprecatedNeverUse}, - - // This function has no alternative, but also no purpose. - "(*crypto/rc4.Cipher).Reset": {12, DeprecatedNeverUse}, - "(net/http/httptest.ResponseRecorder).HeaderMap": {11, 7}, - "image.ZP": {13, 0}, - "image.ZR": {13, 0}, - "(*debug/gosym.LineTable).LineToPC": {2, 2}, - "(*debug/gosym.LineTable).PCToLine": {2, 2}, - "crypto/tls.VersionSSL30": {13, DeprecatedNeverUse}, - "(crypto/tls.Config).NameToCertificate": {14, DeprecatedUseNoLonger}, - "(*crypto/tls.Config).BuildNameToCertificate": {14, DeprecatedUseNoLonger}, - "(crypto/tls.Config).SessionTicketKey": {16, 5}, - // No alternative, no use - "(crypto/tls.ConnectionState).NegotiatedProtocolIsMutual": {16, DeprecatedNeverUse}, - // No alternative, but insecure - "(crypto/tls.ConnectionState).TLSUnique": {16, DeprecatedNeverUse}, - "image/jpeg.Reader": {4, DeprecatedNeverUse}, - - // All of these have been deprecated in favour of external libraries - "syscall.AttachLsf": {7, 0}, - "syscall.DetachLsf": {7, 0}, - "syscall.LsfSocket": {7, 0}, - "syscall.SetLsfPromisc": {7, 0}, - "syscall.LsfJump": {7, 0}, - "syscall.LsfStmt": {7, 0}, - "syscall.BpfStmt": {7, 0}, - "syscall.BpfJump": {7, 0}, - "syscall.BpfBuflen": {7, 0}, - "syscall.SetBpfBuflen": {7, 0}, - "syscall.BpfDatalink": {7, 0}, - "syscall.SetBpfDatalink": {7, 0}, - "syscall.SetBpfPromisc": {7, 0}, - "syscall.FlushBpf": {7, 0}, - "syscall.BpfInterface": {7, 0}, - "syscall.SetBpfInterface": {7, 0}, - "syscall.BpfTimeout": {7, 0}, - "syscall.SetBpfTimeout": {7, 0}, - "syscall.BpfStats": {7, 0}, - "syscall.SetBpfImmediate": {7, 0}, - "syscall.SetBpf": {7, 0}, - "syscall.CheckBpfVersion": {7, 0}, - "syscall.BpfHeadercmpl": {7, 0}, - "syscall.SetBpfHeadercmpl": {7, 0}, - "syscall.RouteRIB": {8, 0}, - "syscall.RoutingMessage": {8, 0}, - "syscall.RouteMessage": {8, 0}, - "syscall.InterfaceMessage": {8, 0}, - "syscall.InterfaceAddrMessage": {8, 0}, - "syscall.ParseRoutingMessage": {8, 0}, - "syscall.ParseRoutingSockaddr": {8, 0}, - "syscall.InterfaceAnnounceMessage": {7, 0}, - "syscall.InterfaceMulticastAddrMessage": {7, 0}, - "syscall.FormatMessage": {5, 0}, - "syscall.PostQueuedCompletionStatus": {17, 0}, - "syscall.GetQueuedCompletionStatus": {17, 0}, - "syscall.CreateIoCompletionPort": {17, 0}, - - // We choose to only track the package itself, even though all functions are derecated individually, too. Anyone - // using ioutil directly will have to import it, and this keeps the noise down. - "io/ioutil": {19, 19}, - - "bytes.Title": {18, 0}, - "strings.Title": {18, 0}, - "(crypto/tls.Config).PreferServerCipherSuites": {18, DeprecatedUseNoLonger}, - // It's not clear if Subjects was okay to use in the past, so we err on the less noisy side of assuming that it was. - "(*crypto/x509.CertPool).Subjects": {18, DeprecatedUseNoLonger}, - "go/types.NewSignature": {18, 18}, - "(net.Error).Temporary": {18, DeprecatedNeverUse}, - // InterfaceData is another tricky case. It was deprecated in Go 1.18, but has been useless since Go 1.4, and an - // "alternative" (using your own unsafe hacks) has existed forever. We don't want to get into hairsplitting with - // users who somehow successfully used this between 1.4 and 1.18, so we'll just tag it as deprecated since 1.18. - "(reflect.Value).InterfaceData": {18, 18}, - - // The following objects are only deprecated on Windows. - "syscall.Syscall": {18, 18}, - "syscall.Syscall12": {18, 18}, - "syscall.Syscall15": {18, 18}, - "syscall.Syscall18": {18, 18}, - "syscall.Syscall6": {18, 18}, - "syscall.Syscall9": {18, 18}, - - "reflect.SliceHeader": {21, 17}, - "reflect.StringHeader": {21, 20}, - "crypto/elliptic.GenerateKey": {21, 21}, - "crypto/elliptic.Marshal": {21, 21}, - "crypto/elliptic.Unmarshal": {21, 21}, - "(*crypto/elliptic.CurveParams).Add": {21, 21}, - "(*crypto/elliptic.CurveParams).Double": {21, 21}, - "(*crypto/elliptic.CurveParams).IsOnCurve": {21, 21}, - "(*crypto/elliptic.CurveParams).ScalarBaseMult": {21, 21}, - "(*crypto/elliptic.CurveParams).ScalarMult": {21, 21}, - "crypto/rsa.GenerateMultiPrimeKey": {21, DeprecatedNeverUse}, - "(crypto/rsa.PrecomputedValues).CRTValues": {21, DeprecatedNeverUse}, - "(crypto/x509.RevocationList).RevokedCertificates": {21, 21}, -} - -// Last imported from Go at c19c4c566c63818dfd059b352e52c4710eecf14d with the following numbers of deprecations: -// -// archive/tar/common.go:2 -// archive/zip/struct.go:6 -// bytes/bytes.go:1 -// compress/flate/inflate.go:2 -// crypto/dsa/dsa.go:1 -// crypto/elliptic/elliptic.go:8 -// crypto/elliptic/params.go:5 -// crypto/rc4/rc4.go:1 -// crypto/rsa/rsa.go:2 -// crypto/tls/common.go:6 -// crypto/x509/cert_pool.go:1 -// crypto/x509/pem_decrypt.go:3 -// crypto/x509/pkix/pkix.go:2 -// crypto/x509/x509.go:6 -// database/sql/driver/driver.go:6 -// debug/gosym/pclntab.go:2 -// encoding/csv/reader.go:2 -// encoding/json/decode.go:1 -// encoding/json/encode.go:1 -// go/build/build.go:1 -// go/doc/comment.go:2 -// go/doc/doc.go:1 -// go/doc/synopsis.go:1 -// go/importer/importer.go:2 -// go/types/interface.go:2 -// go/types/signature.go:1 -// image/geom.go:2 -// image/jpeg/reader.go:1 -// internal/types/errors/codes.go:1 -// io/ioutil/ioutil.go:7 -// io/ioutil/tempfile.go:2 -// math/rand/rand.go:2 -// net/dial.go:2 -// net/http/h2_bundle.go:1 -// net/http/httptest/recorder.go:1 -// net/http/httputil/persist.go:8 -// net/http/request.go:6 -// net/http/server.go:2 -// net/http/socks_bundle.go:1 -// net/http/transport.go:3 -// net/net.go:1 -// os/file.go:1 -// path/filepath/path_plan9.go:1 -// path/filepath/path_unix.go:1 -// path/filepath/path_windows.go:1 -// reflect/value.go:3 -// regexp/regexp.go:1 -// runtime/cpuprof.go:1 -// strings/strings.go:1 -// syscall/bpf_bsd.go:18 -// syscall/bpf_darwin.go:18 -// syscall/dll_windows.go:6 -// syscall/exec_plan9.go:1 -// syscall/exec_unix.go:1 -// syscall/lsf_linux.go:6 -// syscall/route_bsd.go:7 -// syscall/route_darwin.go:1 -// syscall/route_dragonfly.go:2 -// syscall/route_freebsd.go:2 -// syscall/route_netbsd.go:1 -// syscall/route_openbsd.go:1 -// syscall/syscall.go:3 -// syscall/syscall_windows.go:6 -// text/template/parse/node.go:5 -// vendor/golang.org/x/text/transform/transform.go:1 diff --git a/vendor/honnef.co/go/tools/knowledge/doc.go b/vendor/honnef.co/go/tools/knowledge/doc.go deleted file mode 100644 index 72ff52444..000000000 --- a/vendor/honnef.co/go/tools/knowledge/doc.go +++ /dev/null @@ -1,2 +0,0 @@ -// Package knowledge contains manually collected information about Go APIs. -package knowledge diff --git a/vendor/honnef.co/go/tools/knowledge/signatures.go b/vendor/honnef.co/go/tools/knowledge/signatures.go deleted file mode 100644 index 03f4d53e8..000000000 --- a/vendor/honnef.co/go/tools/knowledge/signatures.go +++ /dev/null @@ -1,107 +0,0 @@ -package knowledge - -import ( - "go/token" - "go/types" -) - -var Signatures = map[string]*types.Signature{ - "(io.Seeker).Seek": types.NewSignatureType(nil, nil, nil, - types.NewTuple( - types.NewParam(token.NoPos, nil, "", types.Typ[types.Int64]), - types.NewParam(token.NoPos, nil, "", types.Typ[types.Int]), - ), - types.NewTuple( - types.NewParam(token.NoPos, nil, "", types.Typ[types.Int64]), - types.NewParam(token.NoPos, nil, "", types.Universe.Lookup("error").Type()), - ), - false, - ), - - "(io.Writer).Write": types.NewSignatureType(nil, nil, nil, - types.NewTuple( - types.NewParam(token.NoPos, nil, "", types.NewSlice(types.Typ[types.Byte])), - ), - types.NewTuple( - types.NewParam(token.NoPos, nil, "", types.Typ[types.Int]), - types.NewParam(token.NoPos, nil, "", types.Universe.Lookup("error").Type()), - ), - false, - ), - - "(io.StringWriter).WriteString": types.NewSignatureType(nil, nil, nil, - types.NewTuple( - types.NewParam(token.NoPos, nil, "", types.Typ[types.String]), - ), - types.NewTuple( - types.NewParam(token.NoPos, nil, "", types.Typ[types.Int]), - types.NewParam(token.NoPos, nil, "", types.Universe.Lookup("error").Type()), - ), - false, - ), - - "(encoding.TextMarshaler).MarshalText": types.NewSignatureType(nil, nil, nil, - types.NewTuple(), - types.NewTuple( - types.NewParam(token.NoPos, nil, "", types.NewSlice(types.Typ[types.Byte])), - types.NewParam(token.NoPos, nil, "", types.Universe.Lookup("error").Type()), - ), - false, - ), - - "(encoding/json.Marshaler).MarshalJSON": types.NewSignatureType(nil, nil, nil, - types.NewTuple(), - types.NewTuple( - types.NewParam(token.NoPos, nil, "", types.NewSlice(types.Typ[types.Byte])), - types.NewParam(token.NoPos, nil, "", types.Universe.Lookup("error").Type()), - ), - false, - ), - - "(fmt.Stringer).String": types.NewSignatureType(nil, nil, nil, - types.NewTuple(), - types.NewTuple( - types.NewParam(token.NoPos, nil, "", types.Typ[types.String]), - ), - false, - ), -} - -var Interfaces = map[string]*types.Interface{ - "fmt.Stringer": types.NewInterfaceType( - []*types.Func{ - types.NewFunc(token.NoPos, nil, "String", Signatures["(fmt.Stringer).String"]), - }, - nil, - ).Complete(), - - "error": types.Universe.Lookup("error").Type().Underlying().(*types.Interface), - - "io.Writer": types.NewInterfaceType( - []*types.Func{ - types.NewFunc(token.NoPos, nil, "Write", Signatures["(io.Writer).Write"]), - }, - nil, - ).Complete(), - - "io.StringWriter": types.NewInterfaceType( - []*types.Func{ - types.NewFunc(token.NoPos, nil, "WriteString", Signatures["(io.StringWriter).WriteString"]), - }, - nil, - ).Complete(), - - "encoding.TextMarshaler": types.NewInterfaceType( - []*types.Func{ - types.NewFunc(token.NoPos, nil, "MarshalText", Signatures["(encoding.TextMarshaler).MarshalText"]), - }, - nil, - ).Complete(), - - "encoding/json.Marshaler": types.NewInterfaceType( - []*types.Func{ - types.NewFunc(token.NoPos, nil, "MarshalJSON", Signatures["(encoding/json.Marshaler).MarshalJSON"]), - }, - nil, - ).Complete(), -} diff --git a/vendor/honnef.co/go/tools/pattern/convert.go b/vendor/honnef.co/go/tools/pattern/convert.go deleted file mode 100644 index aed3617cd..000000000 --- a/vendor/honnef.co/go/tools/pattern/convert.go +++ /dev/null @@ -1,243 +0,0 @@ -package pattern - -import ( - "fmt" - "go/ast" - "go/token" - "go/types" - "reflect" -) - -var astTypes = map[string]reflect.Type{ - "Ellipsis": reflect.TypeOf(ast.Ellipsis{}), - "RangeStmt": reflect.TypeOf(ast.RangeStmt{}), - "AssignStmt": reflect.TypeOf(ast.AssignStmt{}), - "IndexExpr": reflect.TypeOf(ast.IndexExpr{}), - "IndexListExpr": reflect.TypeOf(ast.IndexListExpr{}), - "Ident": reflect.TypeOf(ast.Ident{}), - "ValueSpec": reflect.TypeOf(ast.ValueSpec{}), - "GenDecl": reflect.TypeOf(ast.GenDecl{}), - "BinaryExpr": reflect.TypeOf(ast.BinaryExpr{}), - "ForStmt": reflect.TypeOf(ast.ForStmt{}), - "ArrayType": reflect.TypeOf(ast.ArrayType{}), - "DeferStmt": reflect.TypeOf(ast.DeferStmt{}), - "MapType": reflect.TypeOf(ast.MapType{}), - "ReturnStmt": reflect.TypeOf(ast.ReturnStmt{}), - "SliceExpr": reflect.TypeOf(ast.SliceExpr{}), - "StarExpr": reflect.TypeOf(ast.StarExpr{}), - "UnaryExpr": reflect.TypeOf(ast.UnaryExpr{}), - "SendStmt": reflect.TypeOf(ast.SendStmt{}), - "SelectStmt": reflect.TypeOf(ast.SelectStmt{}), - "ImportSpec": reflect.TypeOf(ast.ImportSpec{}), - "IfStmt": reflect.TypeOf(ast.IfStmt{}), - "GoStmt": reflect.TypeOf(ast.GoStmt{}), - "Field": reflect.TypeOf(ast.Field{}), - "SelectorExpr": reflect.TypeOf(ast.SelectorExpr{}), - "StructType": reflect.TypeOf(ast.StructType{}), - "KeyValueExpr": reflect.TypeOf(ast.KeyValueExpr{}), - "FuncType": reflect.TypeOf(ast.FuncType{}), - "FuncLit": reflect.TypeOf(ast.FuncLit{}), - "FuncDecl": reflect.TypeOf(ast.FuncDecl{}), - "ChanType": reflect.TypeOf(ast.ChanType{}), - "CallExpr": reflect.TypeOf(ast.CallExpr{}), - "CaseClause": reflect.TypeOf(ast.CaseClause{}), - "CommClause": reflect.TypeOf(ast.CommClause{}), - "CompositeLit": reflect.TypeOf(ast.CompositeLit{}), - "EmptyStmt": reflect.TypeOf(ast.EmptyStmt{}), - "SwitchStmt": reflect.TypeOf(ast.SwitchStmt{}), - "TypeSwitchStmt": reflect.TypeOf(ast.TypeSwitchStmt{}), - "TypeAssertExpr": reflect.TypeOf(ast.TypeAssertExpr{}), - "TypeSpec": reflect.TypeOf(ast.TypeSpec{}), - "InterfaceType": reflect.TypeOf(ast.InterfaceType{}), - "BranchStmt": reflect.TypeOf(ast.BranchStmt{}), - "IncDecStmt": reflect.TypeOf(ast.IncDecStmt{}), - "BasicLit": reflect.TypeOf(ast.BasicLit{}), -} - -func ASTToNode(node interface{}) Node { - switch node := node.(type) { - case *ast.File: - panic("cannot convert *ast.File to Node") - case nil: - return Nil{} - case string: - return String(node) - case token.Token: - return Token(node) - case *ast.ExprStmt: - return ASTToNode(node.X) - case *ast.BlockStmt: - if node == nil { - return Nil{} - } - return ASTToNode(node.List) - case *ast.FieldList: - if node == nil { - return Nil{} - } - return ASTToNode(node.List) - case *ast.BasicLit: - if node == nil { - return Nil{} - } - case *ast.ParenExpr: - return ASTToNode(node.X) - } - - if node, ok := node.(ast.Node); ok { - name := reflect.TypeOf(node).Elem().Name() - T, ok := structNodes[name] - if !ok { - panic(fmt.Sprintf("internal error: unhandled type %T", node)) - } - - if reflect.ValueOf(node).IsNil() { - return Nil{} - } - v := reflect.ValueOf(node).Elem() - objs := make([]Node, T.NumField()) - for i := 0; i < T.NumField(); i++ { - f := v.FieldByName(T.Field(i).Name) - objs[i] = ASTToNode(f.Interface()) - } - - n, err := populateNode(name, objs, false) - if err != nil { - panic(fmt.Sprintf("internal error: %s", err)) - } - return n - } - - s := reflect.ValueOf(node) - if s.Kind() == reflect.Slice { - if s.Len() == 0 { - return List{} - } - if s.Len() == 1 { - return ASTToNode(s.Index(0).Interface()) - } - - tail := List{} - for i := s.Len() - 1; i >= 0; i-- { - head := ASTToNode(s.Index(i).Interface()) - l := List{ - Head: head, - Tail: tail, - } - tail = l - } - return tail - } - - panic(fmt.Sprintf("internal error: unhandled type %T", node)) -} - -func NodeToAST(node Node, state State) interface{} { - switch node := node.(type) { - case Binding: - v, ok := state[node.Name] - if !ok { - // really we want to return an error here - panic("XXX") - } - switch v := v.(type) { - case types.Object: - return &ast.Ident{Name: v.Name()} - default: - return v - } - case Builtin, Any, Object, Symbol, Not, Or: - panic("XXX") - case List: - if (node == List{}) { - return []ast.Node{} - } - x := []ast.Node{NodeToAST(node.Head, state).(ast.Node)} - x = append(x, NodeToAST(node.Tail, state).([]ast.Node)...) - return x - case Token: - return token.Token(node) - case String: - return string(node) - case Nil: - return nil - } - - name := reflect.TypeOf(node).Name() - T, ok := astTypes[name] - if !ok { - panic(fmt.Sprintf("internal error: unhandled type %T", node)) - } - v := reflect.ValueOf(node) - out := reflect.New(T) - for i := 0; i < T.NumField(); i++ { - fNode := v.FieldByName(T.Field(i).Name) - if (fNode == reflect.Value{}) { - continue - } - fAST := out.Elem().FieldByName(T.Field(i).Name) - switch fAST.Type().Kind() { - case reflect.Slice: - c := reflect.ValueOf(NodeToAST(fNode.Interface().(Node), state)) - if c.Kind() != reflect.Slice { - // it's a single node in the pattern, we have to wrap - // it in a slice - slice := reflect.MakeSlice(fAST.Type(), 1, 1) - slice.Index(0).Set(c) - c = slice - } - switch fAST.Interface().(type) { - case []ast.Node: - switch cc := c.Interface().(type) { - case []ast.Node: - fAST.Set(c) - case []ast.Expr: - var slice []ast.Node - for _, el := range cc { - slice = append(slice, el) - } - fAST.Set(reflect.ValueOf(slice)) - default: - panic("XXX") - } - case []ast.Expr: - switch cc := c.Interface().(type) { - case []ast.Node: - var slice []ast.Expr - for _, el := range cc { - slice = append(slice, el.(ast.Expr)) - } - fAST.Set(reflect.ValueOf(slice)) - case []ast.Expr: - fAST.Set(c) - default: - panic("XXX") - } - default: - panic("XXX") - } - case reflect.Int: - c := reflect.ValueOf(NodeToAST(fNode.Interface().(Node), state)) - switch c.Kind() { - case reflect.String: - tok, ok := tokensByString[c.Interface().(string)] - if !ok { - // really we want to return an error here - panic("XXX") - } - fAST.SetInt(int64(tok)) - case reflect.Int: - fAST.Set(c) - default: - panic(fmt.Sprintf("internal error: unexpected kind %s", c.Kind())) - } - default: - r := NodeToAST(fNode.Interface().(Node), state) - if r != nil { - fAST.Set(reflect.ValueOf(r)) - } - } - } - - return out.Interface().(ast.Node) -} diff --git a/vendor/honnef.co/go/tools/pattern/doc.go b/vendor/honnef.co/go/tools/pattern/doc.go deleted file mode 100644 index 22fe2cf30..000000000 --- a/vendor/honnef.co/go/tools/pattern/doc.go +++ /dev/null @@ -1,272 +0,0 @@ -/* -Package pattern implements a simple language for pattern matching Go ASTs. - -# Design decisions and trade-offs - -The language is designed specifically for the task of filtering ASTs -to simplify the implementation of analyses in staticcheck. -It is also intended to be trivial to parse and execute. - -To that end, we make certain decisions that make the language more -suited to its task, while making certain queries infeasible. - -Furthermore, it is fully expected that the majority of analyses will still require ordinary Go code -to further process the filtered AST, to make use of type information and to enforce complex invariants. -It is not our goal to design a scripting language for writing entire checks in. - -# The language - -At its core, patterns are a representation of Go ASTs, allowing for the use of placeholders to enable pattern matching. -Their syntax is inspired by LISP and Haskell, but unlike LISP, the core unit of patterns isn't the list, but the node. -There is a fixed set of nodes, identified by name, and with the exception of the Or node, all nodes have a fixed number of arguments. -In addition to nodes, there are atoms, which represent basic units such as strings or the nil value. - -Pattern matching is implemented via bindings, represented by the Binding node. -A Binding can match nodes and associate them with names, to later recall the nodes. -This allows for expressing "this node must be equal to that node" constraints. - -To simplify writing and reading patterns, a small amount of additional syntax exists on top of nodes and atoms. -This additional syntax doesn't add any new features of its own, it simply provides shortcuts to creating nodes and atoms. - -To show an example of a pattern, first consider this snippet of Go code: - - if x := fn(); x != nil { - for _, v := range x { - println(v, x) - } - } - -The corresponding AST expressed as an idiomatic pattern would look as follows: - - (IfStmt - (AssignStmt (Ident "x") ":=" (CallExpr (Ident "fn") [])) - (BinaryExpr (Ident "x") "!=" (Ident "nil")) - (RangeStmt - (Ident "_") (Ident "v") ":=" (Ident "x") - (CallExpr (Ident "println") [(Ident "v") (Ident "x")])) - nil) - -Two things are worth noting about this representation. -First, the [el1 el2 ...] syntax is a short-hand for creating lists. -It is a short-hand for el1:el2:[], which itself is a short-hand for (List el1 (List el2 (List nil nil)). -Second, note the absence of a lot of lists in places that normally accept lists. -For example, assignment assigns a number of right-hands to a number of left-hands, yet our AssignStmt is lacking any form of list. -This is due to the fact that a single node can match a list of exactly one element. -Thus, the two following forms have identical matching behavior: - - (AssignStmt (Ident "x") ":=" (CallExpr (Ident "fn") [])) - (AssignStmt [(Ident "x")] ":=" [(CallExpr (Ident "fn") [])]) - -This section serves as an overview of the language's syntax. -More in-depth explanations of the matching behavior as well as an exhaustive list of node types follows in the coming sections. - -# Pattern matching - -# TODO write about pattern matching - -- inspired by haskell syntax, but much, much simpler and naive - -# Node types - -The language contains two kinds of nodes: those that map to nodes in the AST, and those that implement additional logic. - -Nodes that map directly to AST nodes are named identically to the types in the go/ast package. -What follows is an exhaustive list of these nodes: - - (ArrayType len elt) - (AssignStmt lhs tok rhs) - (BasicLit kind value) - (BinaryExpr x op y) - (BranchStmt tok label) - (CallExpr fun args) - (CaseClause list body) - (ChanType dir value) - (CommClause comm body) - (CompositeLit type elts) - (DeferStmt call) - (Ellipsis elt) - (EmptyStmt) - (Field names type tag) - (ForStmt init cond post body) - (FuncDecl recv name type body) - (FuncLit type body) - (FuncType params results) - (GenDecl specs) - (GoStmt call) - (Ident name) - (IfStmt init cond body else) - (ImportSpec name path) - (IncDecStmt x tok) - (IndexExpr x index) - (InterfaceType methods) - (KeyValueExpr key value) - (MapType key value) - (RangeStmt key value tok x body) - (ReturnStmt results) - (SelectStmt body) - (SelectorExpr x sel) - (SendStmt chan value) - (SliceExpr x low high max) - (StarExpr x) - (StructType fields) - (SwitchStmt init tag body) - (TypeAssertExpr) - (TypeSpec name type) - (TypeSwitchStmt init assign body) - (UnaryExpr op x) - (ValueSpec names type values) - -Additionally, there are the String, Token and nil atoms. -Strings are double-quoted string literals, as in (Ident "someName"). -Tokens are also represented as double-quoted string literals, but are converted to token.Token values in contexts that require tokens, -such as in (BinaryExpr x "<" y), where "<" is transparently converted to token.LSS during matching. -The keyword 'nil' denotes the nil value, which represents the absence of any value. - -We also define the (List head tail) node, which is used to represent sequences of elements as a singly linked list. -The head is a single element, and the tail is the remainder of the list. -For example, - - (List "foo" (List "bar" (List "baz" (List nil nil)))) - -represents a list of three elements, "foo", "bar" and "baz". There is dedicated syntax for writing lists, which looks as follows: - - ["foo" "bar" "baz"] - -This syntax is itself syntactic sugar for the following form: - - "foo":"bar":"baz":[] - -This form is of particular interest for pattern matching, as it allows matching on the head and tail. For example, - - "foo":"bar":_ - -would match any list with at least two elements, where the first two elements are "foo" and "bar". This is equivalent to writing - - (List "foo" (List "bar" _)) - -Note that it is not possible to match from the end of the list. -That is, there is no way to express a query such as "a list of any length where the last element is foo". - -Note that unlike in LISP, nil and empty lists are distinct from one another. -In patterns, with respect to lists, nil is akin to Go's untyped nil. -It will match a nil ast.Node, but it will not match a nil []ast.Expr. Nil will, however, match pointers to named types such as *ast.Ident. -Similarly, lists are akin to Go's -slices. An empty list will match both a nil and an empty []ast.Expr, but it will not match a nil ast.Node. - -Due to the difference between nil and empty lists, an empty list is represented as (List nil nil), i.e. a list with no head or tail. -Similarly, a list of one element is represented as (List el (List nil nil)). Unlike in LISP, it cannot be represented by (List el nil). - -Finally, there are nodes that implement special logic or matching behavior. - -(Any) matches any value. The underscore (_) maps to this node, making the following two forms equivalent: - - (Ident _) - (Ident (Any)) - -(Builtin name) matches a built-in identifier or function by name. -This is a type-aware variant of (Ident name). -Instead of only comparing the name, it resolves the object behind the name and makes sure it's a pre-declared identifier. - -For example, in the following piece of code - - func fn() { - println(true) - true := false - println(true) - } - -the pattern - - (Builtin "true") - -will match exactly once, on the first use of 'true' in the function. -Subsequent occurrences of 'true' no longer refer to the pre-declared identifier. - -(Object name) matches an identifier by name, but yields the -types.Object it refers to. - -(Symbol name) matches ast.Idents and ast.SelectorExprs that refer to a symbol with a given fully qualified name. -For example, "net/url.PathEscape" matches the PathEscape function in the net/url package, -and "(net/url.EscapeError).Error" refers to the Error method on the net/url.EscapeError type, -either on an instance of the type, or on the type itself. - -For example, the following patterns match the following lines of code: - - (CallExpr (Symbol "fmt.Println") _) // pattern 1 - (CallExpr (Symbol "(net/url.EscapeError).Error") _) // pattern 2 - - fmt.Println("hello, world") // matches pattern 1 - var x url.EscapeError - x.Error() // matches pattern 2 - (url.EscapeError).Error(x) // also matches pattern 2 - -(Binding name node) creates or uses a binding. -Bindings work like variable assignments, allowing referring to already matched nodes. -As an example, bindings are necessary to match self-assignment of the form "x = x", -since we need to express that the right-hand side is identical to the left-hand side. - -If a binding's node is not nil, the matcher will attempt to match a node according to the pattern. -If a binding's node is nil, the binding will either recall an existing value, or match the Any node. -It is an error to provide a non-nil node to a binding that has already been bound. - -Referring back to the earlier example, the following pattern will match self-assignment of idents: - - (AssignStmt (Binding "lhs" (Ident _)) "=" (Binding "lhs" nil)) - -Because bindings are a crucial component of pattern matching, there is special syntax for creating and recalling bindings. -Lower-case names refer to bindings. If standing on its own, the name "foo" will be equivalent to (Binding "foo" nil). -If a name is followed by an at-sign (@) then it will create a binding for the node that follows. -Together, this allows us to rewrite the earlier example as follows: - - (AssignStmt lhs@(Ident _) "=" lhs) - -(Or nodes...) is a variadic node that tries matching each node until one succeeds. For example, the following pattern matches all idents of name "foo" or "bar": - - (Ident (Or "foo" "bar")) - -We could also have written - - (Or (Ident "foo") (Ident "bar")) - -and achieved the same result. We can also mix different kinds of nodes: - - (Or (Ident "foo") (CallExpr (Ident "bar") _)) - -When using bindings inside of nodes used inside Or, all or none of the bindings will be bound. -That is, partially matched nodes that ultimately failed to match will not produce any bindings observable outside of the matching attempt. -We can thus write - - (Or (Ident name) (CallExpr name)) - -and 'name' will either be a String if the first option matched, or an Ident or SelectorExpr if the second option matched. - -(Not node) - -The Not node negates a match. For example, (Not (Ident _)) will match all nodes that aren't identifiers. - -ChanDir(0) - -# Automatic unnesting of AST nodes - -The Go AST has several types of nodes that wrap other nodes. -To simplify matching, we automatically unwrap some of these nodes. - -These nodes are ExprStmt (for using expressions in a statement context), -ParenExpr (for parenthesized expressions), -DeclStmt (for declarations in a statement context), -and LabeledStmt (for labeled statements). - -Thus, the query - - (FuncLit _ [(CallExpr _ _)] - -will match a function literal containing a single function call, -even though in the actual Go AST, the CallExpr is nested inside an ExprStmt, -as function bodies are made up of sequences of statements. - -On the flip-side, there is no way to specifically match these wrapper nodes. -For example, there is no way of searching for unnecessary parentheses, like in the following piece of Go code: - - ((x)) += 2 -*/ -package pattern diff --git a/vendor/honnef.co/go/tools/pattern/lexer.go b/vendor/honnef.co/go/tools/pattern/lexer.go deleted file mode 100644 index fb72e392b..000000000 --- a/vendor/honnef.co/go/tools/pattern/lexer.go +++ /dev/null @@ -1,221 +0,0 @@ -package pattern - -import ( - "fmt" - "go/token" - "unicode" - "unicode/utf8" -) - -type lexer struct { - f *token.File - - input string - start int - pos int - width int - items chan item -} - -type itemType int - -const eof = -1 - -const ( - itemError itemType = iota - itemLeftParen - itemRightParen - itemLeftBracket - itemRightBracket - itemTypeName - itemVariable - itemAt - itemColon - itemBlank - itemString - itemEOF -) - -func (typ itemType) String() string { - switch typ { - case itemError: - return "ERROR" - case itemLeftParen: - return "(" - case itemRightParen: - return ")" - case itemLeftBracket: - return "[" - case itemRightBracket: - return "]" - case itemTypeName: - return "TYPE" - case itemVariable: - return "VAR" - case itemAt: - return "@" - case itemColon: - return ":" - case itemBlank: - return "_" - case itemString: - return "STRING" - case itemEOF: - return "EOF" - default: - return fmt.Sprintf("itemType(%d)", typ) - } -} - -type item struct { - typ itemType - val string - pos int -} - -type stateFn func(*lexer) stateFn - -func (l *lexer) run() { - for state := lexStart; state != nil; { - state = state(l) - } - close(l.items) -} - -func (l *lexer) emitValue(t itemType, value string) { - l.items <- item{t, value, l.start} - l.start = l.pos -} - -func (l *lexer) emit(t itemType) { - l.items <- item{t, l.input[l.start:l.pos], l.start} - l.start = l.pos -} - -func lexStart(l *lexer) stateFn { - switch r := l.next(); { - case r == eof: - l.emit(itemEOF) - return nil - case unicode.IsSpace(r): - l.ignore() - case r == '(': - l.emit(itemLeftParen) - case r == ')': - l.emit(itemRightParen) - case r == '[': - l.emit(itemLeftBracket) - case r == ']': - l.emit(itemRightBracket) - case r == '@': - l.emit(itemAt) - case r == ':': - l.emit(itemColon) - case r == '_': - l.emit(itemBlank) - case r == '"': - l.backup() - return lexString - case unicode.IsUpper(r): - l.backup() - return lexType - case unicode.IsLower(r): - l.backup() - return lexVariable - default: - return l.errorf("unexpected character %c", r) - } - return lexStart -} - -func (l *lexer) next() (r rune) { - if l.pos >= len(l.input) { - l.width = 0 - return eof - } - r, l.width = utf8.DecodeRuneInString(l.input[l.pos:]) - - if r == '\n' { - l.f.AddLine(l.pos) - } - - l.pos += l.width - - return r -} - -func (l *lexer) ignore() { - l.start = l.pos -} - -func (l *lexer) backup() { - l.pos -= l.width -} - -func (l *lexer) errorf(format string, args ...interface{}) stateFn { - // TODO(dh): emit position information in errors - l.items <- item{ - itemError, - fmt.Sprintf(format, args...), - l.start, - } - return nil -} - -func isAlphaNumeric(r rune) bool { - return r >= '0' && r <= '9' || - r >= 'a' && r <= 'z' || - r >= 'A' && r <= 'Z' -} - -func lexString(l *lexer) stateFn { - l.next() // skip quote - escape := false - - var runes []rune - for { - switch r := l.next(); r { - case eof: - return l.errorf("unterminated string") - case '"': - if !escape { - l.emitValue(itemString, string(runes)) - return lexStart - } else { - runes = append(runes, '"') - escape = false - } - case '\\': - if escape { - runes = append(runes, '\\') - escape = false - } else { - escape = true - } - default: - runes = append(runes, r) - } - } -} - -func lexType(l *lexer) stateFn { - l.next() - for { - if !isAlphaNumeric(l.next()) { - l.backup() - l.emit(itemTypeName) - return lexStart - } - } -} - -func lexVariable(l *lexer) stateFn { - l.next() - for { - if !isAlphaNumeric(l.next()) { - l.backup() - l.emit(itemVariable) - return lexStart - } - } -} diff --git a/vendor/honnef.co/go/tools/pattern/match.go b/vendor/honnef.co/go/tools/pattern/match.go deleted file mode 100644 index 3eae3bd63..000000000 --- a/vendor/honnef.co/go/tools/pattern/match.go +++ /dev/null @@ -1,699 +0,0 @@ -package pattern - -import ( - "fmt" - "go/ast" - "go/token" - "go/types" - "reflect" - - "golang.org/x/tools/go/ast/astutil" -) - -var tokensByString = map[string]Token{ - "INT": Token(token.INT), - "FLOAT": Token(token.FLOAT), - "IMAG": Token(token.IMAG), - "CHAR": Token(token.CHAR), - "STRING": Token(token.STRING), - "+": Token(token.ADD), - "-": Token(token.SUB), - "*": Token(token.MUL), - "/": Token(token.QUO), - "%": Token(token.REM), - "&": Token(token.AND), - "|": Token(token.OR), - "^": Token(token.XOR), - "<<": Token(token.SHL), - ">>": Token(token.SHR), - "&^": Token(token.AND_NOT), - "+=": Token(token.ADD_ASSIGN), - "-=": Token(token.SUB_ASSIGN), - "*=": Token(token.MUL_ASSIGN), - "/=": Token(token.QUO_ASSIGN), - "%=": Token(token.REM_ASSIGN), - "&=": Token(token.AND_ASSIGN), - "|=": Token(token.OR_ASSIGN), - "^=": Token(token.XOR_ASSIGN), - "<<=": Token(token.SHL_ASSIGN), - ">>=": Token(token.SHR_ASSIGN), - "&^=": Token(token.AND_NOT_ASSIGN), - "&&": Token(token.LAND), - "||": Token(token.LOR), - "<-": Token(token.ARROW), - "++": Token(token.INC), - "--": Token(token.DEC), - "==": Token(token.EQL), - "<": Token(token.LSS), - ">": Token(token.GTR), - "=": Token(token.ASSIGN), - "!": Token(token.NOT), - "!=": Token(token.NEQ), - "<=": Token(token.LEQ), - ">=": Token(token.GEQ), - ":=": Token(token.DEFINE), - "...": Token(token.ELLIPSIS), - "IMPORT": Token(token.IMPORT), - "VAR": Token(token.VAR), - "TYPE": Token(token.TYPE), - "CONST": Token(token.CONST), - "BREAK": Token(token.BREAK), - "CONTINUE": Token(token.CONTINUE), - "GOTO": Token(token.GOTO), - "FALLTHROUGH": Token(token.FALLTHROUGH), -} - -func maybeToken(node Node) (Node, bool) { - if node, ok := node.(String); ok { - if tok, ok := tokensByString[string(node)]; ok { - return tok, true - } - return node, false - } - return node, false -} - -func isNil(v interface{}) bool { - if v == nil { - return true - } - if _, ok := v.(Nil); ok { - return true - } - return false -} - -type matcher interface { - Match(*Matcher, interface{}) (interface{}, bool) -} - -type State = map[string]any - -type Matcher struct { - TypesInfo *types.Info - State State - - bindingsMapping []string - - setBindings []uint64 -} - -func (m *Matcher) set(b Binding, value interface{}) { - m.State[b.Name] = value - m.setBindings[len(m.setBindings)-1] |= 1 << b.idx -} - -func (m *Matcher) push() { - m.setBindings = append(m.setBindings, 0) -} - -func (m *Matcher) pop() { - set := m.setBindings[len(m.setBindings)-1] - if set != 0 { - for i := 0; i < len(m.bindingsMapping); i++ { - if (set & (1 << i)) != 0 { - key := m.bindingsMapping[i] - delete(m.State, key) - } - } - } - m.setBindings = m.setBindings[:len(m.setBindings)-1] -} - -func (m *Matcher) merge() { - m.setBindings = m.setBindings[:len(m.setBindings)-1] -} - -func (m *Matcher) Match(a Pattern, b ast.Node) bool { - m.bindingsMapping = a.Bindings - m.State = State{} - m.push() - _, ok := match(m, a.Root, b) - m.merge() - if len(m.setBindings) != 0 { - panic(fmt.Sprintf("%d entries left on the stack, expected none", len(m.setBindings))) - } - return ok -} - -func Match(a Pattern, b ast.Node) (*Matcher, bool) { - m := &Matcher{} - ret := m.Match(a, b) - return m, ret -} - -// Match two items, which may be (Node, AST) or (AST, AST) -func match(m *Matcher, l, r interface{}) (interface{}, bool) { - if _, ok := r.(Node); ok { - panic("Node mustn't be on right side of match") - } - - switch l := l.(type) { - case *ast.ParenExpr: - return match(m, l.X, r) - case *ast.ExprStmt: - return match(m, l.X, r) - case *ast.DeclStmt: - return match(m, l.Decl, r) - case *ast.LabeledStmt: - return match(m, l.Stmt, r) - case *ast.BlockStmt: - return match(m, l.List, r) - case *ast.FieldList: - if l == nil { - return match(m, nil, r) - } else { - return match(m, l.List, r) - } - } - - switch r := r.(type) { - case *ast.ParenExpr: - return match(m, l, r.X) - case *ast.ExprStmt: - return match(m, l, r.X) - case *ast.DeclStmt: - return match(m, l, r.Decl) - case *ast.LabeledStmt: - return match(m, l, r.Stmt) - case *ast.BlockStmt: - if r == nil { - return match(m, l, nil) - } - return match(m, l, r.List) - case *ast.FieldList: - if r == nil { - return match(m, l, nil) - } - return match(m, l, r.List) - case *ast.BasicLit: - if r == nil { - return match(m, l, nil) - } - } - - if l, ok := l.(matcher); ok { - return l.Match(m, r) - } - - if l, ok := l.(Node); ok { - // Matching of pattern with concrete value - return matchNodeAST(m, l, r) - } - - if l == nil || r == nil { - return nil, l == r - } - - { - ln, ok1 := l.(ast.Node) - rn, ok2 := r.(ast.Node) - if ok1 && ok2 { - return matchAST(m, ln, rn) - } - } - - { - obj, ok := l.(types.Object) - if ok { - switch r := r.(type) { - case *ast.Ident: - return obj, obj == m.TypesInfo.ObjectOf(r) - case *ast.SelectorExpr: - return obj, obj == m.TypesInfo.ObjectOf(r.Sel) - default: - return obj, false - } - } - } - - // TODO(dh): the three blocks handling slices can be combined into a single block if we use reflection - - { - ln, ok1 := l.([]ast.Expr) - rn, ok2 := r.([]ast.Expr) - if ok1 || ok2 { - if ok1 && !ok2 { - cast, ok := r.(ast.Expr) - if !ok { - return nil, false - } - rn = []ast.Expr{cast} - } else if !ok1 && ok2 { - cast, ok := l.(ast.Expr) - if !ok { - return nil, false - } - ln = []ast.Expr{cast} - } - - if len(ln) != len(rn) { - return nil, false - } - for i, ll := range ln { - if _, ok := match(m, ll, rn[i]); !ok { - return nil, false - } - } - return r, true - } - } - - { - ln, ok1 := l.([]ast.Stmt) - rn, ok2 := r.([]ast.Stmt) - if ok1 || ok2 { - if ok1 && !ok2 { - cast, ok := r.(ast.Stmt) - if !ok { - return nil, false - } - rn = []ast.Stmt{cast} - } else if !ok1 && ok2 { - cast, ok := l.(ast.Stmt) - if !ok { - return nil, false - } - ln = []ast.Stmt{cast} - } - - if len(ln) != len(rn) { - return nil, false - } - for i, ll := range ln { - if _, ok := match(m, ll, rn[i]); !ok { - return nil, false - } - } - return r, true - } - } - - { - ln, ok1 := l.([]*ast.Field) - rn, ok2 := r.([]*ast.Field) - if ok1 || ok2 { - if ok1 && !ok2 { - cast, ok := r.(*ast.Field) - if !ok { - return nil, false - } - rn = []*ast.Field{cast} - } else if !ok1 && ok2 { - cast, ok := l.(*ast.Field) - if !ok { - return nil, false - } - ln = []*ast.Field{cast} - } - - if len(ln) != len(rn) { - return nil, false - } - for i, ll := range ln { - if _, ok := match(m, ll, rn[i]); !ok { - return nil, false - } - } - return r, true - } - } - - return nil, false -} - -// Match a Node with an AST node -func matchNodeAST(m *Matcher, a Node, b interface{}) (interface{}, bool) { - switch b := b.(type) { - case []ast.Stmt: - // 'a' is not a List or we'd be using its Match - // implementation. - - if len(b) != 1 { - return nil, false - } - return match(m, a, b[0]) - case []ast.Expr: - // 'a' is not a List or we'd be using its Match - // implementation. - - if len(b) != 1 { - return nil, false - } - return match(m, a, b[0]) - case []*ast.Field: - // 'a' is not a List or we'd be using its Match - // implementation - if len(b) != 1 { - return nil, false - } - return match(m, a, b[0]) - case ast.Node: - ra := reflect.ValueOf(a) - rb := reflect.ValueOf(b).Elem() - - if ra.Type().Name() != rb.Type().Name() { - return nil, false - } - - for i := 0; i < ra.NumField(); i++ { - af := ra.Field(i) - fieldName := ra.Type().Field(i).Name - bf := rb.FieldByName(fieldName) - if (bf == reflect.Value{}) { - panic(fmt.Sprintf("internal error: could not find field %s in type %t when comparing with %T", fieldName, b, a)) - } - ai := af.Interface() - bi := bf.Interface() - if ai == nil { - return b, bi == nil - } - if _, ok := match(m, ai.(Node), bi); !ok { - return b, false - } - } - return b, true - case nil: - return nil, a == Nil{} - case string, token.Token: - // 'a' can't be a String, Token, or Binding or we'd be using their Match implementations. - return nil, false - default: - panic(fmt.Sprintf("unhandled type %T", b)) - } -} - -// Match two AST nodes -func matchAST(m *Matcher, a, b ast.Node) (interface{}, bool) { - ra := reflect.ValueOf(a) - rb := reflect.ValueOf(b) - - if ra.Type() != rb.Type() { - return nil, false - } - if ra.IsNil() || rb.IsNil() { - return rb, ra.IsNil() == rb.IsNil() - } - - ra = ra.Elem() - rb = rb.Elem() - for i := 0; i < ra.NumField(); i++ { - af := ra.Field(i) - bf := rb.Field(i) - if af.Type() == rtTokPos || af.Type() == rtObject || af.Type() == rtCommentGroup { - continue - } - - switch af.Kind() { - case reflect.Slice: - if af.Len() != bf.Len() { - return nil, false - } - for j := 0; j < af.Len(); j++ { - if _, ok := match(m, af.Index(j).Interface().(ast.Node), bf.Index(j).Interface().(ast.Node)); !ok { - return nil, false - } - } - case reflect.String: - if af.String() != bf.String() { - return nil, false - } - case reflect.Int: - if af.Int() != bf.Int() { - return nil, false - } - case reflect.Bool: - if af.Bool() != bf.Bool() { - return nil, false - } - case reflect.Ptr, reflect.Interface: - if _, ok := match(m, af.Interface(), bf.Interface()); !ok { - return nil, false - } - default: - panic(fmt.Sprintf("internal error: unhandled kind %s (%T)", af.Kind(), af.Interface())) - } - } - return b, true -} - -func (b Binding) Match(m *Matcher, node interface{}) (interface{}, bool) { - if isNil(b.Node) { - v, ok := m.State[b.Name] - if ok { - // Recall value - return match(m, v, node) - } - // Matching anything - b.Node = Any{} - } - - // Store value - if _, ok := m.State[b.Name]; ok { - panic(fmt.Sprintf("binding already created: %s", b.Name)) - } - new, ret := match(m, b.Node, node) - if ret { - m.set(b, new) - } - return new, ret -} - -func (Any) Match(m *Matcher, node interface{}) (interface{}, bool) { - return node, true -} - -func (l List) Match(m *Matcher, node interface{}) (interface{}, bool) { - v := reflect.ValueOf(node) - if v.Kind() == reflect.Slice { - if isNil(l.Head) { - return node, v.Len() == 0 - } - if v.Len() == 0 { - return nil, false - } - // OPT(dh): don't check the entire tail if head didn't match - _, ok1 := match(m, l.Head, v.Index(0).Interface()) - _, ok2 := match(m, l.Tail, v.Slice(1, v.Len()).Interface()) - return node, ok1 && ok2 - } - // Our empty list does not equal an untyped Go nil. This way, we can - // tell apart an if with no else and an if with an empty else. - return nil, false -} - -func (s String) Match(m *Matcher, node interface{}) (interface{}, bool) { - switch o := node.(type) { - case token.Token: - if tok, ok := maybeToken(s); ok { - return match(m, tok, node) - } - return nil, false - case string: - return o, string(s) == o - case types.TypeAndValue: - return o, o.Value != nil && o.Value.String() == string(s) - default: - return nil, false - } -} - -func (tok Token) Match(m *Matcher, node interface{}) (interface{}, bool) { - o, ok := node.(token.Token) - if !ok { - return nil, false - } - return o, token.Token(tok) == o -} - -func (Nil) Match(m *Matcher, node interface{}) (interface{}, bool) { - if isNil(node) { - return nil, true - } - v := reflect.ValueOf(node) - switch v.Kind() { - case reflect.Chan, reflect.Func, reflect.Interface, reflect.Map, reflect.Pointer, reflect.Slice: - return nil, v.IsNil() - default: - return nil, false - } -} - -func (builtin Builtin) Match(m *Matcher, node interface{}) (interface{}, bool) { - r, ok := match(m, Ident(builtin), node) - if !ok { - return nil, false - } - ident := r.(*ast.Ident) - obj := m.TypesInfo.ObjectOf(ident) - if obj != types.Universe.Lookup(ident.Name) { - return nil, false - } - return ident, true -} - -func (obj Object) Match(m *Matcher, node interface{}) (interface{}, bool) { - r, ok := match(m, Ident(obj), node) - if !ok { - return nil, false - } - ident := r.(*ast.Ident) - - id := m.TypesInfo.ObjectOf(ident) - _, ok = match(m, obj.Name, ident.Name) - return id, ok -} - -func (fn Symbol) Match(m *Matcher, node interface{}) (interface{}, bool) { - var name string - var obj types.Object - - base := []Node{ - Ident{Any{}}, - SelectorExpr{Any{}, Any{}}, - } - p := Or{ - Nodes: append(base, - IndexExpr{Or{Nodes: base}, Any{}}, - IndexListExpr{Or{Nodes: base}, Any{}})} - - r, ok := match(m, p, node) - if !ok { - return nil, false - } - - fun := r.(ast.Expr) - switch idx := fun.(type) { - case *ast.IndexExpr: - fun = idx.X - case *ast.IndexListExpr: - fun = idx.X - } - fun = astutil.Unparen(fun) - - switch fun := fun.(type) { - case *ast.Ident: - obj = m.TypesInfo.ObjectOf(fun) - case *ast.SelectorExpr: - obj = m.TypesInfo.ObjectOf(fun.Sel) - default: - panic("unreachable") - } - switch obj := obj.(type) { - case *types.Func: - // OPT(dh): optimize this similar to code.FuncName - name = obj.FullName() - case *types.Builtin: - name = obj.Name() - case *types.TypeName: - if obj.Pkg() == nil { - return nil, false - } - if obj.Parent() != obj.Pkg().Scope() { - return nil, false - } - name = types.TypeString(obj.Type(), nil) - case *types.Const, *types.Var: - if obj.Pkg() == nil { - return nil, false - } - if obj.Parent() != obj.Pkg().Scope() { - return nil, false - } - name = fmt.Sprintf("%s.%s", obj.Pkg().Path(), obj.Name()) - default: - return nil, false - } - - _, ok = match(m, fn.Name, name) - return obj, ok -} - -func (or Or) Match(m *Matcher, node interface{}) (interface{}, bool) { - for _, opt := range or.Nodes { - m.push() - if ret, ok := match(m, opt, node); ok { - m.merge() - return ret, true - } else { - m.pop() - } - } - return nil, false -} - -func (not Not) Match(m *Matcher, node interface{}) (interface{}, bool) { - _, ok := match(m, not.Node, node) - if ok { - return nil, false - } - return node, true -} - -var integerLiteralQ = MustParse(`(Or (BasicLit "INT" _) (UnaryExpr (Or "+" "-") (IntegerLiteral _)))`) - -func (lit IntegerLiteral) Match(m *Matcher, node interface{}) (interface{}, bool) { - matched, ok := match(m, integerLiteralQ.Root, node) - if !ok { - return nil, false - } - tv, ok := m.TypesInfo.Types[matched.(ast.Expr)] - if !ok { - return nil, false - } - if tv.Value == nil { - return nil, false - } - _, ok = match(m, lit.Value, tv) - return matched, ok -} - -func (texpr TrulyConstantExpression) Match(m *Matcher, node interface{}) (interface{}, bool) { - expr, ok := node.(ast.Expr) - if !ok { - return nil, false - } - tv, ok := m.TypesInfo.Types[expr] - if !ok { - return nil, false - } - if tv.Value == nil { - return nil, false - } - truly := true - ast.Inspect(expr, func(node ast.Node) bool { - if _, ok := node.(*ast.Ident); ok { - truly = false - return false - } - return true - }) - if !truly { - return nil, false - } - _, ok = match(m, texpr.Value, tv) - return expr, ok -} - -var ( - // Types of fields in go/ast structs that we want to skip - rtTokPos = reflect.TypeOf(token.Pos(0)) - rtObject = reflect.TypeOf((*ast.Object)(nil)) - rtCommentGroup = reflect.TypeOf((*ast.CommentGroup)(nil)) -) - -var ( - _ matcher = Binding{} - _ matcher = Any{} - _ matcher = List{} - _ matcher = String("") - _ matcher = Token(0) - _ matcher = Nil{} - _ matcher = Builtin{} - _ matcher = Object{} - _ matcher = Symbol{} - _ matcher = Or{} - _ matcher = Not{} - _ matcher = IntegerLiteral{} - _ matcher = TrulyConstantExpression{} -) diff --git a/vendor/honnef.co/go/tools/pattern/parser.go b/vendor/honnef.co/go/tools/pattern/parser.go deleted file mode 100644 index ba0897524..000000000 --- a/vendor/honnef.co/go/tools/pattern/parser.go +++ /dev/null @@ -1,526 +0,0 @@ -package pattern - -import ( - "errors" - "fmt" - "go/ast" - "go/token" - "reflect" -) - -type Pattern struct { - Root Node - // Relevant contains instances of ast.Node that could potentially - // initiate a successful match of the pattern. - Relevant map[reflect.Type]struct{} - - // Mapping from binding index to binding name - Bindings []string -} - -func MustParse(s string) Pattern { - p := &Parser{AllowTypeInfo: true} - pat, err := p.Parse(s) - if err != nil { - panic(err) - } - return pat -} - -func roots(node Node, m map[reflect.Type]struct{}) { - switch node := node.(type) { - case Or: - for _, el := range node.Nodes { - roots(el, m) - } - case Not: - roots(node.Node, m) - case Binding: - roots(node.Node, m) - case Nil, nil: - // this branch is reached via bindings - for _, T := range allTypes { - m[T] = struct{}{} - } - default: - Ts, ok := nodeToASTTypes[reflect.TypeOf(node)] - if !ok { - panic(fmt.Sprintf("internal error: unhandled type %T", node)) - } - for _, T := range Ts { - m[T] = struct{}{} - } - } -} - -var allTypes = []reflect.Type{ - reflect.TypeOf((*ast.RangeStmt)(nil)), - reflect.TypeOf((*ast.AssignStmt)(nil)), - reflect.TypeOf((*ast.IndexExpr)(nil)), - reflect.TypeOf((*ast.Ident)(nil)), - reflect.TypeOf((*ast.ValueSpec)(nil)), - reflect.TypeOf((*ast.GenDecl)(nil)), - reflect.TypeOf((*ast.BinaryExpr)(nil)), - reflect.TypeOf((*ast.ForStmt)(nil)), - reflect.TypeOf((*ast.ArrayType)(nil)), - reflect.TypeOf((*ast.DeferStmt)(nil)), - reflect.TypeOf((*ast.MapType)(nil)), - reflect.TypeOf((*ast.ReturnStmt)(nil)), - reflect.TypeOf((*ast.SliceExpr)(nil)), - reflect.TypeOf((*ast.StarExpr)(nil)), - reflect.TypeOf((*ast.UnaryExpr)(nil)), - reflect.TypeOf((*ast.SendStmt)(nil)), - reflect.TypeOf((*ast.SelectStmt)(nil)), - reflect.TypeOf((*ast.ImportSpec)(nil)), - reflect.TypeOf((*ast.IfStmt)(nil)), - reflect.TypeOf((*ast.GoStmt)(nil)), - reflect.TypeOf((*ast.Field)(nil)), - reflect.TypeOf((*ast.SelectorExpr)(nil)), - reflect.TypeOf((*ast.StructType)(nil)), - reflect.TypeOf((*ast.KeyValueExpr)(nil)), - reflect.TypeOf((*ast.FuncType)(nil)), - reflect.TypeOf((*ast.FuncLit)(nil)), - reflect.TypeOf((*ast.FuncDecl)(nil)), - reflect.TypeOf((*ast.ChanType)(nil)), - reflect.TypeOf((*ast.CallExpr)(nil)), - reflect.TypeOf((*ast.CaseClause)(nil)), - reflect.TypeOf((*ast.CommClause)(nil)), - reflect.TypeOf((*ast.CompositeLit)(nil)), - reflect.TypeOf((*ast.EmptyStmt)(nil)), - reflect.TypeOf((*ast.SwitchStmt)(nil)), - reflect.TypeOf((*ast.TypeSwitchStmt)(nil)), - reflect.TypeOf((*ast.TypeAssertExpr)(nil)), - reflect.TypeOf((*ast.TypeSpec)(nil)), - reflect.TypeOf((*ast.InterfaceType)(nil)), - reflect.TypeOf((*ast.BranchStmt)(nil)), - reflect.TypeOf((*ast.IncDecStmt)(nil)), - reflect.TypeOf((*ast.BasicLit)(nil)), -} - -var nodeToASTTypes = map[reflect.Type][]reflect.Type{ - reflect.TypeOf(String("")): nil, - reflect.TypeOf(Token(0)): nil, - reflect.TypeOf(List{}): {reflect.TypeOf((*ast.BlockStmt)(nil)), reflect.TypeOf((*ast.FieldList)(nil))}, - reflect.TypeOf(Builtin{}): {reflect.TypeOf((*ast.Ident)(nil))}, - reflect.TypeOf(Object{}): {reflect.TypeOf((*ast.Ident)(nil))}, - reflect.TypeOf(Symbol{}): {reflect.TypeOf((*ast.Ident)(nil)), reflect.TypeOf((*ast.SelectorExpr)(nil))}, - reflect.TypeOf(Any{}): allTypes, - reflect.TypeOf(RangeStmt{}): {reflect.TypeOf((*ast.RangeStmt)(nil))}, - reflect.TypeOf(AssignStmt{}): {reflect.TypeOf((*ast.AssignStmt)(nil))}, - reflect.TypeOf(IndexExpr{}): {reflect.TypeOf((*ast.IndexExpr)(nil))}, - reflect.TypeOf(Ident{}): {reflect.TypeOf((*ast.Ident)(nil))}, - reflect.TypeOf(ValueSpec{}): {reflect.TypeOf((*ast.ValueSpec)(nil))}, - reflect.TypeOf(GenDecl{}): {reflect.TypeOf((*ast.GenDecl)(nil))}, - reflect.TypeOf(BinaryExpr{}): {reflect.TypeOf((*ast.BinaryExpr)(nil))}, - reflect.TypeOf(ForStmt{}): {reflect.TypeOf((*ast.ForStmt)(nil))}, - reflect.TypeOf(ArrayType{}): {reflect.TypeOf((*ast.ArrayType)(nil))}, - reflect.TypeOf(DeferStmt{}): {reflect.TypeOf((*ast.DeferStmt)(nil))}, - reflect.TypeOf(MapType{}): {reflect.TypeOf((*ast.MapType)(nil))}, - reflect.TypeOf(ReturnStmt{}): {reflect.TypeOf((*ast.ReturnStmt)(nil))}, - reflect.TypeOf(SliceExpr{}): {reflect.TypeOf((*ast.SliceExpr)(nil))}, - reflect.TypeOf(StarExpr{}): {reflect.TypeOf((*ast.StarExpr)(nil))}, - reflect.TypeOf(UnaryExpr{}): {reflect.TypeOf((*ast.UnaryExpr)(nil))}, - reflect.TypeOf(SendStmt{}): {reflect.TypeOf((*ast.SendStmt)(nil))}, - reflect.TypeOf(SelectStmt{}): {reflect.TypeOf((*ast.SelectStmt)(nil))}, - reflect.TypeOf(ImportSpec{}): {reflect.TypeOf((*ast.ImportSpec)(nil))}, - reflect.TypeOf(IfStmt{}): {reflect.TypeOf((*ast.IfStmt)(nil))}, - reflect.TypeOf(GoStmt{}): {reflect.TypeOf((*ast.GoStmt)(nil))}, - reflect.TypeOf(Field{}): {reflect.TypeOf((*ast.Field)(nil))}, - reflect.TypeOf(SelectorExpr{}): {reflect.TypeOf((*ast.SelectorExpr)(nil))}, - reflect.TypeOf(StructType{}): {reflect.TypeOf((*ast.StructType)(nil))}, - reflect.TypeOf(KeyValueExpr{}): {reflect.TypeOf((*ast.KeyValueExpr)(nil))}, - reflect.TypeOf(FuncType{}): {reflect.TypeOf((*ast.FuncType)(nil))}, - reflect.TypeOf(FuncLit{}): {reflect.TypeOf((*ast.FuncLit)(nil))}, - reflect.TypeOf(FuncDecl{}): {reflect.TypeOf((*ast.FuncDecl)(nil))}, - reflect.TypeOf(ChanType{}): {reflect.TypeOf((*ast.ChanType)(nil))}, - reflect.TypeOf(CallExpr{}): {reflect.TypeOf((*ast.CallExpr)(nil))}, - reflect.TypeOf(CaseClause{}): {reflect.TypeOf((*ast.CaseClause)(nil))}, - reflect.TypeOf(CommClause{}): {reflect.TypeOf((*ast.CommClause)(nil))}, - reflect.TypeOf(CompositeLit{}): {reflect.TypeOf((*ast.CompositeLit)(nil))}, - reflect.TypeOf(EmptyStmt{}): {reflect.TypeOf((*ast.EmptyStmt)(nil))}, - reflect.TypeOf(SwitchStmt{}): {reflect.TypeOf((*ast.SwitchStmt)(nil))}, - reflect.TypeOf(TypeSwitchStmt{}): {reflect.TypeOf((*ast.TypeSwitchStmt)(nil))}, - reflect.TypeOf(TypeAssertExpr{}): {reflect.TypeOf((*ast.TypeAssertExpr)(nil))}, - reflect.TypeOf(TypeSpec{}): {reflect.TypeOf((*ast.TypeSpec)(nil))}, - reflect.TypeOf(InterfaceType{}): {reflect.TypeOf((*ast.InterfaceType)(nil))}, - reflect.TypeOf(BranchStmt{}): {reflect.TypeOf((*ast.BranchStmt)(nil))}, - reflect.TypeOf(IncDecStmt{}): {reflect.TypeOf((*ast.IncDecStmt)(nil))}, - reflect.TypeOf(BasicLit{}): {reflect.TypeOf((*ast.BasicLit)(nil))}, - reflect.TypeOf(IntegerLiteral{}): {reflect.TypeOf((*ast.BasicLit)(nil)), reflect.TypeOf((*ast.UnaryExpr)(nil))}, - reflect.TypeOf(TrulyConstantExpression{}): allTypes, // this is an over-approximation, which is fine -} - -var requiresTypeInfo = map[string]bool{ - "Symbol": true, - "Builtin": true, - "Object": true, - "IntegerLiteral": true, - "TrulyConstantExpression": true, -} - -type Parser struct { - // Allow nodes that rely on type information - AllowTypeInfo bool - - lex *lexer - cur item - last *item - items chan item - - bindings map[string]int -} - -func (p *Parser) bindingIndex(name string) int { - if p.bindings == nil { - p.bindings = map[string]int{} - } - if idx, ok := p.bindings[name]; ok { - return idx - } - idx := len(p.bindings) - p.bindings[name] = idx - return idx -} - -func (p *Parser) Parse(s string) (Pattern, error) { - p.cur = item{} - p.last = nil - p.items = nil - - fset := token.NewFileSet() - p.lex = &lexer{ - f: fset.AddFile("

    ") - } - p.text(out, x.Text) - out.WriteString("\n") - - case *Heading: - out.WriteString("") - p.text(out, x.Text) - out.WriteString("\n") - - case *Code: - out.WriteString("

    ")
    -		p.escape(out, x.Text)
    -		out.WriteString("
    \n") - - case *List: - kind := "ol>\n" - if x.Items[0].Number == "" { - kind = "ul>\n" - } - out.WriteString("<") - out.WriteString(kind) - next := "1" - for _, item := range x.Items { - out.WriteString("") - p.tight = !x.BlankBetween() - for _, blk := range item.Content { - p.block(out, blk) - } - p.tight = false - } - out.WriteString("= 0; i-- { - if b[i] < '9' { - b[i]++ - return string(b) - } - b[i] = '0' - } - return "1" + string(b) -} - -// text prints the text sequence x to out. -func (p *htmlPrinter) text(out *bytes.Buffer, x []Text) { - for _, t := range x { - switch t := t.(type) { - case Plain: - p.escape(out, string(t)) - case Italic: - out.WriteString("") - p.escape(out, string(t)) - out.WriteString("") - case *Link: - out.WriteString(``) - p.text(out, t.Text) - out.WriteString("") - case *DocLink: - url := p.docLinkURL(t) - if url != "" { - out.WriteString(``) - } - p.text(out, t.Text) - if url != "" { - out.WriteString("") - } - } - } -} - -// escape prints s to out as plain text, -// escaping < & " ' and > to avoid being misinterpreted -// in larger HTML constructs. -func (p *htmlPrinter) escape(out *bytes.Buffer, s string) { - start := 0 - for i := 0; i < len(s); i++ { - switch s[i] { - case '<': - out.WriteString(s[start:i]) - out.WriteString("<") - start = i + 1 - case '&': - out.WriteString(s[start:i]) - out.WriteString("&") - start = i + 1 - case '"': - out.WriteString(s[start:i]) - out.WriteString(""") - start = i + 1 - case '\'': - out.WriteString(s[start:i]) - out.WriteString("'") - start = i + 1 - case '>': - out.WriteString(s[start:i]) - out.WriteString(">") - start = i + 1 - } - } - out.WriteString(s[start:]) -} diff --git a/vendor/mvdan.cc/gofumpt/internal/govendor/go/doc/comment/markdown.go b/vendor/mvdan.cc/gofumpt/internal/govendor/go/doc/comment/markdown.go deleted file mode 100644 index d8550f2e3..000000000 --- a/vendor/mvdan.cc/gofumpt/internal/govendor/go/doc/comment/markdown.go +++ /dev/null @@ -1,188 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package comment - -import ( - "bytes" - "fmt" - "strings" -) - -// An mdPrinter holds the state needed for printing a Doc as Markdown. -type mdPrinter struct { - *Printer - headingPrefix string - raw bytes.Buffer -} - -// Markdown returns a Markdown formatting of the Doc. -// See the [Printer] documentation for ways to customize the Markdown output. -func (p *Printer) Markdown(d *Doc) []byte { - mp := &mdPrinter{ - Printer: p, - headingPrefix: strings.Repeat("#", p.headingLevel()) + " ", - } - - var out bytes.Buffer - for i, x := range d.Content { - if i > 0 { - out.WriteByte('\n') - } - mp.block(&out, x) - } - return out.Bytes() -} - -// block prints the block x to out. -func (p *mdPrinter) block(out *bytes.Buffer, x Block) { - switch x := x.(type) { - default: - fmt.Fprintf(out, "?%T", x) - - case *Paragraph: - p.text(out, x.Text) - out.WriteString("\n") - - case *Heading: - out.WriteString(p.headingPrefix) - p.text(out, x.Text) - if id := p.headingID(x); id != "" { - out.WriteString(" {#") - out.WriteString(id) - out.WriteString("}") - } - out.WriteString("\n") - - case *Code: - md := x.Text - for md != "" { - var line string - line, md, _ = strings.Cut(md, "\n") - if line != "" { - out.WriteString("\t") - out.WriteString(line) - } - out.WriteString("\n") - } - - case *List: - loose := x.BlankBetween() - for i, item := range x.Items { - if i > 0 && loose { - out.WriteString("\n") - } - if n := item.Number; n != "" { - out.WriteString(" ") - out.WriteString(n) - out.WriteString(". ") - } else { - out.WriteString(" - ") // SP SP - SP - } - for i, blk := range item.Content { - const fourSpace = " " - if i > 0 { - out.WriteString("\n" + fourSpace) - } - p.text(out, blk.(*Paragraph).Text) - out.WriteString("\n") - } - } - } -} - -// text prints the text sequence x to out. -func (p *mdPrinter) text(out *bytes.Buffer, x []Text) { - p.raw.Reset() - p.rawText(&p.raw, x) - line := bytes.TrimSpace(p.raw.Bytes()) - if len(line) == 0 { - return - } - switch line[0] { - case '+', '-', '*', '#': - // Escape what would be the start of an unordered list or heading. - out.WriteByte('\\') - case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': - i := 1 - for i < len(line) && '0' <= line[i] && line[i] <= '9' { - i++ - } - if i < len(line) && (line[i] == '.' || line[i] == ')') { - // Escape what would be the start of an ordered list. - out.Write(line[:i]) - out.WriteByte('\\') - line = line[i:] - } - } - out.Write(line) -} - -// rawText prints the text sequence x to out, -// without worrying about escaping characters -// that have special meaning at the start of a Markdown line. -func (p *mdPrinter) rawText(out *bytes.Buffer, x []Text) { - for _, t := range x { - switch t := t.(type) { - case Plain: - p.escape(out, string(t)) - case Italic: - out.WriteString("*") - p.escape(out, string(t)) - out.WriteString("*") - case *Link: - out.WriteString("[") - p.rawText(out, t.Text) - out.WriteString("](") - out.WriteString(t.URL) - out.WriteString(")") - case *DocLink: - url := p.docLinkURL(t) - if url != "" { - out.WriteString("[") - } - p.rawText(out, t.Text) - if url != "" { - out.WriteString("](") - url = strings.ReplaceAll(url, "(", "%28") - url = strings.ReplaceAll(url, ")", "%29") - out.WriteString(url) - out.WriteString(")") - } - } - } -} - -// escape prints s to out as plain text, -// escaping special characters to avoid being misinterpreted -// as Markdown markup sequences. -func (p *mdPrinter) escape(out *bytes.Buffer, s string) { - start := 0 - for i := 0; i < len(s); i++ { - switch s[i] { - case '\n': - // Turn all \n into spaces, for a few reasons: - // - Avoid introducing paragraph breaks accidentally. - // - Avoid the need to reindent after the newline. - // - Avoid problems with Markdown renderers treating - // every mid-paragraph newline as a
    . - out.WriteString(s[start:i]) - out.WriteByte(' ') - start = i + 1 - continue - case '`', '_', '*', '[', '<', '\\': - // Not all of these need to be escaped all the time, - // but is valid and easy to do so. - // We assume the Markdown is being passed to a - // Markdown renderer, not edited by a person, - // so it's fine to have escapes that are not strictly - // necessary in some cases. - out.WriteString(s[start:i]) - out.WriteByte('\\') - out.WriteByte(s[i]) - start = i + 1 - } - } - out.WriteString(s[start:]) -} diff --git a/vendor/mvdan.cc/gofumpt/internal/govendor/go/doc/comment/parse.go b/vendor/mvdan.cc/gofumpt/internal/govendor/go/doc/comment/parse.go deleted file mode 100644 index 372577b2b..000000000 --- a/vendor/mvdan.cc/gofumpt/internal/govendor/go/doc/comment/parse.go +++ /dev/null @@ -1,1262 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package comment - -import ( - "sort" - "strings" - "unicode" - "unicode/utf8" -) - -// A Doc is a parsed Go doc comment. -type Doc struct { - // Content is the sequence of content blocks in the comment. - Content []Block - - // Links is the link definitions in the comment. - Links []*LinkDef -} - -// A LinkDef is a single link definition. -type LinkDef struct { - Text string // the link text - URL string // the link URL - Used bool // whether the comment uses the definition -} - -// A Block is block-level content in a doc comment, -// one of [*Code], [*Heading], [*List], or [*Paragraph]. -type Block interface { - block() -} - -// A Heading is a doc comment heading. -type Heading struct { - Text []Text // the heading text -} - -func (*Heading) block() {} - -// A List is a numbered or bullet list. -// Lists are always non-empty: len(Items) > 0. -// In a numbered list, every Items[i].Number is a non-empty string. -// In a bullet list, every Items[i].Number is an empty string. -type List struct { - // Items is the list items. - Items []*ListItem - - // ForceBlankBefore indicates that the list must be - // preceded by a blank line when reformatting the comment, - // overriding the usual conditions. See the BlankBefore method. - // - // The comment parser sets ForceBlankBefore for any list - // that is preceded by a blank line, to make sure - // the blank line is preserved when printing. - ForceBlankBefore bool - - // ForceBlankBetween indicates that list items must be - // separated by blank lines when reformatting the comment, - // overriding the usual conditions. See the BlankBetween method. - // - // The comment parser sets ForceBlankBetween for any list - // that has a blank line between any two of its items, to make sure - // the blank lines are preserved when printing. - ForceBlankBetween bool -} - -func (*List) block() {} - -// BlankBefore reports whether a reformatting of the comment -// should include a blank line before the list. -// The default rule is the same as for [BlankBetween]: -// if the list item content contains any blank lines -// (meaning at least one item has multiple paragraphs) -// then the list itself must be preceded by a blank line. -// A preceding blank line can be forced by setting [List].ForceBlankBefore. -func (l *List) BlankBefore() bool { - return l.ForceBlankBefore || l.BlankBetween() -} - -// BlankBetween reports whether a reformatting of the comment -// should include a blank line between each pair of list items. -// The default rule is that if the list item content contains any blank lines -// (meaning at least one item has multiple paragraphs) -// then list items must themselves be separated by blank lines. -// Blank line separators can be forced by setting [List].ForceBlankBetween. -func (l *List) BlankBetween() bool { - if l.ForceBlankBetween { - return true - } - for _, item := range l.Items { - if len(item.Content) != 1 { - // Unreachable for parsed comments today, - // since the only way to get multiple item.Content - // is multiple paragraphs, which must have been - // separated by a blank line. - return true - } - } - return false -} - -// A ListItem is a single item in a numbered or bullet list. -type ListItem struct { - // Number is a decimal string in a numbered list - // or an empty string in a bullet list. - Number string // "1", "2", ...; "" for bullet list - - // Content is the list content. - // Currently, restrictions in the parser and printer - // require every element of Content to be a *Paragraph. - Content []Block // Content of this item. -} - -// A Paragraph is a paragraph of text. -type Paragraph struct { - Text []Text -} - -func (*Paragraph) block() {} - -// A Code is a preformatted code block. -type Code struct { - // Text is the preformatted text, ending with a newline character. - // It may be multiple lines, each of which ends with a newline character. - // It is never empty, nor does it start or end with a blank line. - Text string -} - -func (*Code) block() {} - -// A Text is text-level content in a doc comment, -// one of [Plain], [Italic], [*Link], or [*DocLink]. -type Text interface { - text() -} - -// A Plain is a string rendered as plain text (not italicized). -type Plain string - -func (Plain) text() {} - -// An Italic is a string rendered as italicized text. -type Italic string - -func (Italic) text() {} - -// A Link is a link to a specific URL. -type Link struct { - Auto bool // is this an automatic (implicit) link of a literal URL? - Text []Text // text of link - URL string // target URL of link -} - -func (*Link) text() {} - -// A DocLink is a link to documentation for a Go package or symbol. -type DocLink struct { - Text []Text // text of link - - // ImportPath, Recv, and Name identify the Go package or symbol - // that is the link target. The potential combinations of - // non-empty fields are: - // - ImportPath: a link to another package - // - ImportPath, Name: a link to a const, func, type, or var in another package - // - ImportPath, Recv, Name: a link to a method in another package - // - Name: a link to a const, func, type, or var in this package - // - Recv, Name: a link to a method in this package - ImportPath string // import path - Recv string // receiver type, without any pointer star, for methods - Name string // const, func, type, var, or method name -} - -func (*DocLink) text() {} - -// A Parser is a doc comment parser. -// The fields in the struct can be filled in before calling Parse -// in order to customize the details of the parsing process. -type Parser struct { - // Words is a map of Go identifier words that - // should be italicized and potentially linked. - // If Words[w] is the empty string, then the word w - // is only italicized. Otherwise it is linked, using - // Words[w] as the link target. - // Words corresponds to the [go/doc.ToHTML] words parameter. - Words map[string]string - - // LookupPackage resolves a package name to an import path. - // - // If LookupPackage(name) returns ok == true, then [name] - // (or [name.Sym] or [name.Sym.Method]) - // is considered a documentation link to importPath's package docs. - // It is valid to return "", true, in which case name is considered - // to refer to the current package. - // - // If LookupPackage(name) returns ok == false, - // then [name] (or [name.Sym] or [name.Sym.Method]) - // will not be considered a documentation link, - // except in the case where name is the full (but single-element) import path - // of a package in the standard library, such as in [math] or [io.Reader]. - // LookupPackage is still called for such names, - // in order to permit references to imports of other packages - // with the same package names. - // - // Setting LookupPackage to nil is equivalent to setting it to - // a function that always returns "", false. - LookupPackage func(name string) (importPath string, ok bool) - - // LookupSym reports whether a symbol name or method name - // exists in the current package. - // - // If LookupSym("", "Name") returns true, then [Name] - // is considered a documentation link for a const, func, type, or var. - // - // Similarly, if LookupSym("Recv", "Name") returns true, - // then [Recv.Name] is considered a documentation link for - // type Recv's method Name. - // - // Setting LookupSym to nil is equivalent to setting it to a function - // that always returns false. - LookupSym func(recv, name string) (ok bool) -} - -// parseDoc is parsing state for a single doc comment. -type parseDoc struct { - *Parser - *Doc - links map[string]*LinkDef - lines []string - lookupSym func(recv, name string) bool -} - -// lookupPkg is called to look up the pkg in [pkg], [pkg.Name], and [pkg.Name.Recv]. -// If pkg has a slash, it is assumed to be the full import path and is returned with ok = true. -// -// Otherwise, pkg is probably a simple package name like "rand" (not "crypto/rand" or "math/rand"). -// d.LookupPackage provides a way for the caller to allow resolving such names with reference -// to the imports in the surrounding package. -// -// There is one collision between these two cases: single-element standard library names -// like "math" are full import paths but don't contain slashes. We let d.LookupPackage have -// the first chance to resolve it, in case there's a different package imported as math, -// and otherwise we refer to a built-in list of single-element standard library package names. -func (d *parseDoc) lookupPkg(pkg string) (importPath string, ok bool) { - if strings.Contains(pkg, "/") { // assume a full import path - if validImportPath(pkg) { - return pkg, true - } - return "", false - } - if d.LookupPackage != nil { - // Give LookupPackage a chance. - if path, ok := d.LookupPackage(pkg); ok { - return path, true - } - } - return DefaultLookupPackage(pkg) -} - -func isStdPkg(path string) bool { - // TODO(rsc): Use sort.Find once we don't have to worry about - // copying this code into older Go environments. - i := sort.Search(len(stdPkgs), func(i int) bool { return stdPkgs[i] >= path }) - return i < len(stdPkgs) && stdPkgs[i] == path -} - -// DefaultLookupPackage is the default package lookup -// function, used when [Parser].LookupPackage is nil. -// It recognizes names of the packages from the standard -// library with single-element import paths, such as math, -// which would otherwise be impossible to name. -// -// Note that the go/doc package provides a more sophisticated -// lookup based on the imports used in the current package. -func DefaultLookupPackage(name string) (importPath string, ok bool) { - if isStdPkg(name) { - return name, true - } - return "", false -} - -// Parse parses the doc comment text and returns the *Doc form. -// Comment markers (/* // and */) in the text must have already been removed. -func (p *Parser) Parse(text string) *Doc { - lines := unindent(strings.Split(text, "\n")) - d := &parseDoc{ - Parser: p, - Doc: new(Doc), - links: make(map[string]*LinkDef), - lines: lines, - lookupSym: func(recv, name string) bool { return false }, - } - if p.LookupSym != nil { - d.lookupSym = p.LookupSym - } - - // First pass: break into block structure and collect known links. - // The text is all recorded as Plain for now. - var prev span - for _, s := range parseSpans(lines) { - var b Block - switch s.kind { - default: - panic("mvdan.cc/gofumpt/internal/govendor/go/doc/comment: internal error: unknown span kind") - case spanList: - b = d.list(lines[s.start:s.end], prev.end < s.start) - case spanCode: - b = d.code(lines[s.start:s.end]) - case spanOldHeading: - b = d.oldHeading(lines[s.start]) - case spanHeading: - b = d.heading(lines[s.start]) - case spanPara: - b = d.paragraph(lines[s.start:s.end]) - } - if b != nil { - d.Content = append(d.Content, b) - } - prev = s - } - - // Second pass: interpret all the Plain text now that we know the links. - for _, b := range d.Content { - switch b := b.(type) { - case *Paragraph: - b.Text = d.parseLinkedText(string(b.Text[0].(Plain))) - case *List: - for _, i := range b.Items { - for _, c := range i.Content { - p := c.(*Paragraph) - p.Text = d.parseLinkedText(string(p.Text[0].(Plain))) - } - } - } - } - - return d.Doc -} - -// A span represents a single span of comment lines (lines[start:end]) -// of an identified kind (code, heading, paragraph, and so on). -type span struct { - start int - end int - kind spanKind -} - -// A spanKind describes the kind of span. -type spanKind int - -const ( - _ spanKind = iota - spanCode - spanHeading - spanList - spanOldHeading - spanPara -) - -func parseSpans(lines []string) []span { - var spans []span - - // The loop may process a line twice: once as unindented - // and again forced indented. So the maximum expected - // number of iterations is 2*len(lines). The repeating logic - // can be subtle, though, and to protect against introduction - // of infinite loops in future changes, we watch to see that - // we are not looping too much. A panic is better than a - // quiet infinite loop. - watchdog := 2 * len(lines) - - i := 0 - forceIndent := 0 -Spans: - for { - // Skip blank lines. - for i < len(lines) && lines[i] == "" { - i++ - } - if i >= len(lines) { - break - } - if watchdog--; watchdog < 0 { - panic("mvdan.cc/gofumpt/internal/govendor/go/doc/comment: internal error: not making progress") - } - - var kind spanKind - start := i - end := i - if i < forceIndent || indented(lines[i]) { - // Indented (or force indented). - // Ends before next unindented. (Blank lines are OK.) - // If this is an unindented list that we are heuristically treating as indented, - // then accept unindented list item lines up to the first blank lines. - // The heuristic is disabled at blank lines to contain its effect - // to non-gofmt'ed sections of the comment. - unindentedListOK := isList(lines[i]) && i < forceIndent - i++ - for i < len(lines) && (lines[i] == "" || i < forceIndent || indented(lines[i]) || (unindentedListOK && isList(lines[i]))) { - if lines[i] == "" { - unindentedListOK = false - } - i++ - } - - // Drop trailing blank lines. - end = i - for end > start && lines[end-1] == "" { - end-- - } - - // If indented lines are followed (without a blank line) - // by an unindented line ending in a brace, - // take that one line too. This fixes the common mistake - // of pasting in something like - // - // func main() { - // fmt.Println("hello, world") - // } - // - // and forgetting to indent it. - // The heuristic will never trigger on a gofmt'ed comment, - // because any gofmt'ed code block or list would be - // followed by a blank line or end of comment. - if end < len(lines) && strings.HasPrefix(lines[end], "}") { - end++ - } - - if isList(lines[start]) { - kind = spanList - } else { - kind = spanCode - } - } else { - // Unindented. Ends at next blank or indented line. - i++ - for i < len(lines) && lines[i] != "" && !indented(lines[i]) { - i++ - } - end = i - - // If unindented lines are followed (without a blank line) - // by an indented line that would start a code block, - // check whether the final unindented lines - // should be left for the indented section. - // This can happen for the common mistakes of - // unindented code or unindented lists. - // The heuristic will never trigger on a gofmt'ed comment, - // because any gofmt'ed code block would have a blank line - // preceding it after the unindented lines. - if i < len(lines) && lines[i] != "" && !isList(lines[i]) { - switch { - case isList(lines[i-1]): - // If the final unindented line looks like a list item, - // this may be the first indented line wrap of - // a mistakenly unindented list. - // Leave all the unindented list items. - forceIndent = end - end-- - for end > start && isList(lines[end-1]) { - end-- - } - - case strings.HasSuffix(lines[i-1], "{") || strings.HasSuffix(lines[i-1], `\`): - // If the final unindented line ended in { or \ - // it is probably the start of a misindented code block. - // Give the user a single line fix. - // Often that's enough; if not, the user can fix the others themselves. - forceIndent = end - end-- - } - - if start == end && forceIndent > start { - i = start - continue Spans - } - } - - // Span is either paragraph or heading. - if end-start == 1 && isHeading(lines[start]) { - kind = spanHeading - } else if end-start == 1 && isOldHeading(lines[start], lines, start) { - kind = spanOldHeading - } else { - kind = spanPara - } - } - - spans = append(spans, span{start, end, kind}) - i = end - } - - return spans -} - -// indented reports whether line is indented -// (starts with a leading space or tab). -func indented(line string) bool { - return line != "" && (line[0] == ' ' || line[0] == '\t') -} - -// unindent removes any common space/tab prefix -// from each line in lines, returning a copy of lines in which -// those prefixes have been trimmed from each line. -// It also replaces any lines containing only spaces with blank lines (empty strings). -func unindent(lines []string) []string { - // Trim leading and trailing blank lines. - for len(lines) > 0 && isBlank(lines[0]) { - lines = lines[1:] - } - for len(lines) > 0 && isBlank(lines[len(lines)-1]) { - lines = lines[:len(lines)-1] - } - if len(lines) == 0 { - return nil - } - - // Compute and remove common indentation. - prefix := leadingSpace(lines[0]) - for _, line := range lines[1:] { - if !isBlank(line) { - prefix = commonPrefix(prefix, leadingSpace(line)) - } - } - - out := make([]string, len(lines)) - for i, line := range lines { - line = strings.TrimPrefix(line, prefix) - if strings.TrimSpace(line) == "" { - line = "" - } - out[i] = line - } - for len(out) > 0 && out[0] == "" { - out = out[1:] - } - for len(out) > 0 && out[len(out)-1] == "" { - out = out[:len(out)-1] - } - return out -} - -// isBlank reports whether s is a blank line. -func isBlank(s string) bool { - return len(s) == 0 || (len(s) == 1 && s[0] == '\n') -} - -// commonPrefix returns the longest common prefix of a and b. -func commonPrefix(a, b string) string { - i := 0 - for i < len(a) && i < len(b) && a[i] == b[i] { - i++ - } - return a[0:i] -} - -// leadingSpace returns the longest prefix of s consisting of spaces and tabs. -func leadingSpace(s string) string { - i := 0 - for i < len(s) && (s[i] == ' ' || s[i] == '\t') { - i++ - } - return s[:i] -} - -// isOldHeading reports whether line is an old-style section heading. -// line is all[off]. -func isOldHeading(line string, all []string, off int) bool { - if off <= 0 || all[off-1] != "" || off+2 >= len(all) || all[off+1] != "" || leadingSpace(all[off+2]) != "" { - return false - } - - line = strings.TrimSpace(line) - - // a heading must start with an uppercase letter - r, _ := utf8.DecodeRuneInString(line) - if !unicode.IsLetter(r) || !unicode.IsUpper(r) { - return false - } - - // it must end in a letter or digit: - r, _ = utf8.DecodeLastRuneInString(line) - if !unicode.IsLetter(r) && !unicode.IsDigit(r) { - return false - } - - // exclude lines with illegal characters. we allow "()," - if strings.ContainsAny(line, ";:!?+*/=[]{}_^°&§~%#@<\">\\") { - return false - } - - // allow "'" for possessive "'s" only - for b := line; ; { - var ok bool - if _, b, ok = strings.Cut(b, "'"); !ok { - break - } - if b != "s" && !strings.HasPrefix(b, "s ") { - return false // ' not followed by s and then end-of-word - } - } - - // allow "." when followed by non-space - for b := line; ; { - var ok bool - if _, b, ok = strings.Cut(b, "."); !ok { - break - } - if b == "" || strings.HasPrefix(b, " ") { - return false // not followed by non-space - } - } - - return true -} - -// oldHeading returns the *Heading for the given old-style section heading line. -func (d *parseDoc) oldHeading(line string) Block { - return &Heading{Text: []Text{Plain(strings.TrimSpace(line))}} -} - -// isHeading reports whether line is a new-style section heading. -func isHeading(line string) bool { - return len(line) >= 2 && - line[0] == '#' && - (line[1] == ' ' || line[1] == '\t') && - strings.TrimSpace(line) != "#" -} - -// heading returns the *Heading for the given new-style section heading line. -func (d *parseDoc) heading(line string) Block { - return &Heading{Text: []Text{Plain(strings.TrimSpace(line[1:]))}} -} - -// code returns a code block built from the lines. -func (d *parseDoc) code(lines []string) *Code { - body := unindent(lines) - body = append(body, "") // to get final \n from Join - return &Code{Text: strings.Join(body, "\n")} -} - -// paragraph returns a paragraph block built from the lines. -// If the lines are link definitions, paragraph adds them to d and returns nil. -func (d *parseDoc) paragraph(lines []string) Block { - // Is this a block of known links? Handle. - var defs []*LinkDef - for _, line := range lines { - def, ok := parseLink(line) - if !ok { - goto NoDefs - } - defs = append(defs, def) - } - for _, def := range defs { - d.Links = append(d.Links, def) - if d.links[def.Text] == nil { - d.links[def.Text] = def - } - } - return nil -NoDefs: - - return &Paragraph{Text: []Text{Plain(strings.Join(lines, "\n"))}} -} - -// parseLink parses a single link definition line: -// -// [text]: url -// -// It returns the link definition and whether the line was well formed. -func parseLink(line string) (*LinkDef, bool) { - if line == "" || line[0] != '[' { - return nil, false - } - i := strings.Index(line, "]:") - if i < 0 || i+3 >= len(line) || (line[i+2] != ' ' && line[i+2] != '\t') { - return nil, false - } - - text := line[1:i] - url := strings.TrimSpace(line[i+3:]) - j := strings.Index(url, "://") - if j < 0 || !isScheme(url[:j]) { - return nil, false - } - - // Line has right form and has valid scheme://. - // That's good enough for us - we are not as picky - // about the characters beyond the :// as we are - // when extracting inline URLs from text. - return &LinkDef{Text: text, URL: url}, true -} - -// list returns a list built from the indented lines, -// using forceBlankBefore as the value of the List's ForceBlankBefore field. -func (d *parseDoc) list(lines []string, forceBlankBefore bool) *List { - num, _, _ := listMarker(lines[0]) - var ( - list *List = &List{ForceBlankBefore: forceBlankBefore} - item *ListItem - text []string - ) - flush := func() { - if item != nil { - if para := d.paragraph(text); para != nil { - item.Content = append(item.Content, para) - } - } - text = nil - } - - for _, line := range lines { - if n, after, ok := listMarker(line); ok && (n != "") == (num != "") { - // start new list item - flush() - - item = &ListItem{Number: n} - list.Items = append(list.Items, item) - line = after - } - line = strings.TrimSpace(line) - if line == "" { - list.ForceBlankBetween = true - flush() - continue - } - text = append(text, strings.TrimSpace(line)) - } - flush() - return list -} - -// listMarker parses the line as beginning with a list marker. -// If it can do that, it returns the numeric marker ("" for a bullet list), -// the rest of the line, and ok == true. -// Otherwise, it returns "", "", false. -func listMarker(line string) (num, rest string, ok bool) { - line = strings.TrimSpace(line) - if line == "" { - return "", "", false - } - - // Can we find a marker? - if r, n := utf8.DecodeRuneInString(line); r == '•' || r == '*' || r == '+' || r == '-' { - num, rest = "", line[n:] - } else if '0' <= line[0] && line[0] <= '9' { - n := 1 - for n < len(line) && '0' <= line[n] && line[n] <= '9' { - n++ - } - if n >= len(line) || (line[n] != '.' && line[n] != ')') { - return "", "", false - } - num, rest = line[:n], line[n+1:] - } else { - return "", "", false - } - - if !indented(rest) || strings.TrimSpace(rest) == "" { - return "", "", false - } - - return num, rest, true -} - -// isList reports whether the line is the first line of a list, -// meaning starts with a list marker after any indentation. -// (The caller is responsible for checking the line is indented, as appropriate.) -func isList(line string) bool { - _, _, ok := listMarker(line) - return ok -} - -// parseLinkedText parses text that is allowed to contain explicit links, -// such as [math.Sin] or [Go home page], into a slice of Text items. -// -// A “pkg” is only assumed to be a full import path if it starts with -// a domain name (a path element with a dot) or is one of the packages -// from the standard library (“[os]”, “[encoding/json]”, and so on). -// To avoid problems with maps, generics, and array types, doc links -// must be both preceded and followed by punctuation, spaces, tabs, -// or the start or end of a line. An example problem would be treating -// map[ast.Expr]TypeAndValue as containing a link. -func (d *parseDoc) parseLinkedText(text string) []Text { - var out []Text - wrote := 0 - flush := func(i int) { - if wrote < i { - out = d.parseText(out, text[wrote:i], true) - wrote = i - } - } - - start := -1 - var buf []byte - for i := 0; i < len(text); i++ { - c := text[i] - if c == '\n' || c == '\t' { - c = ' ' - } - switch c { - case '[': - start = i - case ']': - if start >= 0 { - if def, ok := d.links[string(buf)]; ok { - def.Used = true - flush(start) - out = append(out, &Link{ - Text: d.parseText(nil, text[start+1:i], false), - URL: def.URL, - }) - wrote = i + 1 - } else if link, ok := d.docLink(text[start+1:i], text[:start], text[i+1:]); ok { - flush(start) - link.Text = d.parseText(nil, text[start+1:i], false) - out = append(out, link) - wrote = i + 1 - } - } - start = -1 - buf = buf[:0] - } - if start >= 0 && i != start { - buf = append(buf, c) - } - } - - flush(len(text)) - return out -} - -// docLink parses text, which was found inside [ ] brackets, -// as a doc link if possible, returning the DocLink and ok == true -// or else nil, false. -// The before and after strings are the text before the [ and after the ] -// on the same line. Doc links must be preceded and followed by -// punctuation, spaces, tabs, or the start or end of a line. -func (d *parseDoc) docLink(text, before, after string) (link *DocLink, ok bool) { - if before != "" { - r, _ := utf8.DecodeLastRuneInString(before) - if !unicode.IsPunct(r) && r != ' ' && r != '\t' && r != '\n' { - return nil, false - } - } - if after != "" { - r, _ := utf8.DecodeRuneInString(after) - if !unicode.IsPunct(r) && r != ' ' && r != '\t' && r != '\n' { - return nil, false - } - } - text = strings.TrimPrefix(text, "*") - pkg, name, ok := splitDocName(text) - var recv string - if ok { - pkg, recv, _ = splitDocName(pkg) - } - if pkg != "" { - if pkg, ok = d.lookupPkg(pkg); !ok { - return nil, false - } - } else { - if ok = d.lookupSym(recv, name); !ok { - return nil, false - } - } - link = &DocLink{ - ImportPath: pkg, - Recv: recv, - Name: name, - } - return link, true -} - -// If text is of the form before.Name, where Name is a capitalized Go identifier, -// then splitDocName returns before, name, true. -// Otherwise it returns text, "", false. -func splitDocName(text string) (before, name string, foundDot bool) { - i := strings.LastIndex(text, ".") - name = text[i+1:] - if !isName(name) { - return text, "", false - } - if i >= 0 { - before = text[:i] - } - return before, name, true -} - -// parseText parses s as text and returns the result of appending -// those parsed Text elements to out. -// parseText does not handle explicit links like [math.Sin] or [Go home page]: -// those are handled by parseLinkedText. -// If autoLink is true, then parseText recognizes URLs and words from d.Words -// and converts those to links as appropriate. -func (d *parseDoc) parseText(out []Text, s string, autoLink bool) []Text { - var w strings.Builder - wrote := 0 - writeUntil := func(i int) { - w.WriteString(s[wrote:i]) - wrote = i - } - flush := func(i int) { - writeUntil(i) - if w.Len() > 0 { - out = append(out, Plain(w.String())) - w.Reset() - } - } - for i := 0; i < len(s); { - t := s[i:] - if autoLink { - if url, ok := autoURL(t); ok { - flush(i) - // Note: The old comment parser would look up the URL in words - // and replace the target with words[URL] if it was non-empty. - // That would allow creating links that display as one URL but - // when clicked go to a different URL. Not sure what the point - // of that is, so we're not doing that lookup here. - out = append(out, &Link{Auto: true, Text: []Text{Plain(url)}, URL: url}) - i += len(url) - wrote = i - continue - } - if id, ok := ident(t); ok { - url, italics := d.Words[id] - if !italics { - i += len(id) - continue - } - flush(i) - if url == "" { - out = append(out, Italic(id)) - } else { - out = append(out, &Link{Auto: true, Text: []Text{Italic(id)}, URL: url}) - } - i += len(id) - wrote = i - continue - } - } - switch { - case strings.HasPrefix(t, "``"): - if len(t) >= 3 && t[2] == '`' { - // Do not convert `` inside ```, in case people are mistakenly writing Markdown. - i += 3 - for i < len(t) && t[i] == '`' { - i++ - } - break - } - writeUntil(i) - w.WriteRune('“') - i += 2 - wrote = i - case strings.HasPrefix(t, "''"): - writeUntil(i) - w.WriteRune('”') - i += 2 - wrote = i - default: - i++ - } - } - flush(len(s)) - return out -} - -// autoURL checks whether s begins with a URL that should be hyperlinked. -// If so, it returns the URL, which is a prefix of s, and ok == true. -// Otherwise it returns "", false. -// The caller should skip over the first len(url) bytes of s -// before further processing. -func autoURL(s string) (url string, ok bool) { - // Find the ://. Fast path to pick off non-URL, - // since we call this at every position in the string. - // The shortest possible URL is ftp://x, 7 bytes. - var i int - switch { - case len(s) < 7: - return "", false - case s[3] == ':': - i = 3 - case s[4] == ':': - i = 4 - case s[5] == ':': - i = 5 - case s[6] == ':': - i = 6 - default: - return "", false - } - if i+3 > len(s) || s[i:i+3] != "://" { - return "", false - } - - // Check valid scheme. - if !isScheme(s[:i]) { - return "", false - } - - // Scan host part. Must have at least one byte, - // and must start and end in non-punctuation. - i += 3 - if i >= len(s) || !isHost(s[i]) || isPunct(s[i]) { - return "", false - } - i++ - end := i - for i < len(s) && isHost(s[i]) { - if !isPunct(s[i]) { - end = i + 1 - } - i++ - } - i = end - - // At this point we are definitely returning a URL (scheme://host). - // We just have to find the longest path we can add to it. - // Heuristics abound. - // We allow parens, braces, and brackets, - // but only if they match (#5043, #22285). - // We allow .,:;?! in the path but not at the end, - // to avoid end-of-sentence punctuation (#18139, #16565). - stk := []byte{} - end = i -Path: - for ; i < len(s); i++ { - if isPunct(s[i]) { - continue - } - if !isPath(s[i]) { - break - } - switch s[i] { - case '(': - stk = append(stk, ')') - case '{': - stk = append(stk, '}') - case '[': - stk = append(stk, ']') - case ')', '}', ']': - if len(stk) == 0 || stk[len(stk)-1] != s[i] { - break Path - } - stk = stk[:len(stk)-1] - } - if len(stk) == 0 { - end = i + 1 - } - } - - return s[:end], true -} - -// isScheme reports whether s is a recognized URL scheme. -// Note that if strings of new length (beyond 3-7) -// are added here, the fast path at the top of autoURL will need updating. -func isScheme(s string) bool { - switch s { - case "file", - "ftp", - "gopher", - "http", - "https", - "mailto", - "nntp": - return true - } - return false -} - -// isHost reports whether c is a byte that can appear in a URL host, -// like www.example.com or user@[::1]:8080 -func isHost(c byte) bool { - // mask is a 128-bit bitmap with 1s for allowed bytes, - // so that the byte c can be tested with a shift and an and. - // If c > 128, then 1<>64)) != 0 -} - -// isPunct reports whether c is a punctuation byte that can appear -// inside a path but not at the end. -func isPunct(c byte) bool { - // mask is a 128-bit bitmap with 1s for allowed bytes, - // so that the byte c can be tested with a shift and an and. - // If c > 128, then 1<>64)) != 0 -} - -// isPath reports whether c is a (non-punctuation) path byte. -func isPath(c byte) bool { - // mask is a 128-bit bitmap with 1s for allowed bytes, - // so that the byte c can be tested with a shift and an and. - // If c > 128, then 1<>64)) != 0 -} - -// isName reports whether s is a capitalized Go identifier (like Name). -func isName(s string) bool { - t, ok := ident(s) - if !ok || t != s { - return false - } - r, _ := utf8.DecodeRuneInString(s) - return unicode.IsUpper(r) -} - -// ident checks whether s begins with a Go identifier. -// If so, it returns the identifier, which is a prefix of s, and ok == true. -// Otherwise it returns "", false. -// The caller should skip over the first len(id) bytes of s -// before further processing. -func ident(s string) (id string, ok bool) { - // Scan [\pL_][\pL_0-9]* - n := 0 - for n < len(s) { - if c := s[n]; c < utf8.RuneSelf { - if isIdentASCII(c) && (n > 0 || c < '0' || c > '9') { - n++ - continue - } - break - } - r, nr := utf8.DecodeRuneInString(s[n:]) - if unicode.IsLetter(r) { - n += nr - continue - } - break - } - return s[:n], n > 0 -} - -// isIdentASCII reports whether c is an ASCII identifier byte. -func isIdentASCII(c byte) bool { - // mask is a 128-bit bitmap with 1s for allowed bytes, - // so that the byte c can be tested with a shift and an and. - // If c > 128, then 1<>64)) != 0 -} - -// validImportPath reports whether path is a valid import path. -// It is a lightly edited copy of golang.org/x/mod/module.CheckImportPath. -func validImportPath(path string) bool { - if !utf8.ValidString(path) { - return false - } - if path == "" { - return false - } - if path[0] == '-' { - return false - } - if strings.Contains(path, "//") { - return false - } - if path[len(path)-1] == '/' { - return false - } - elemStart := 0 - for i, r := range path { - if r == '/' { - if !validImportPathElem(path[elemStart:i]) { - return false - } - elemStart = i + 1 - } - } - return validImportPathElem(path[elemStart:]) -} - -func validImportPathElem(elem string) bool { - if elem == "" || elem[0] == '.' || elem[len(elem)-1] == '.' { - return false - } - for i := 0; i < len(elem); i++ { - if !importPathOK(elem[i]) { - return false - } - } - return true -} - -func importPathOK(c byte) bool { - // mask is a 128-bit bitmap with 1s for allowed bytes, - // so that the byte c can be tested with a shift and an and. - // If c > 128, then 1<>64)) != 0 -} diff --git a/vendor/mvdan.cc/gofumpt/internal/govendor/go/doc/comment/print.go b/vendor/mvdan.cc/gofumpt/internal/govendor/go/doc/comment/print.go deleted file mode 100644 index e1c070d5a..000000000 --- a/vendor/mvdan.cc/gofumpt/internal/govendor/go/doc/comment/print.go +++ /dev/null @@ -1,288 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package comment - -import ( - "bytes" - "fmt" - "strings" -) - -// A Printer is a doc comment printer. -// The fields in the struct can be filled in before calling -// any of the printing methods -// in order to customize the details of the printing process. -type Printer struct { - // HeadingLevel is the nesting level used for - // HTML and Markdown headings. - // If HeadingLevel is zero, it defaults to level 3, - // meaning to use

    and ###. - HeadingLevel int - - // HeadingID is a function that computes the heading ID - // (anchor tag) to use for the heading h when generating - // HTML and Markdown. If HeadingID returns an empty string, - // then the heading ID is omitted. - // If HeadingID is nil, h.DefaultID is used. - HeadingID func(h *Heading) string - - // DocLinkURL is a function that computes the URL for the given DocLink. - // If DocLinkURL is nil, then link.DefaultURL(p.DocLinkBaseURL) is used. - DocLinkURL func(link *DocLink) string - - // DocLinkBaseURL is used when DocLinkURL is nil, - // passed to [DocLink.DefaultURL] to construct a DocLink's URL. - // See that method's documentation for details. - DocLinkBaseURL string - - // TextPrefix is a prefix to print at the start of every line - // when generating text output using the Text method. - TextPrefix string - - // TextCodePrefix is the prefix to print at the start of each - // preformatted (code block) line when generating text output, - // instead of (not in addition to) TextPrefix. - // If TextCodePrefix is the empty string, it defaults to TextPrefix+"\t". - TextCodePrefix string - - // TextWidth is the maximum width text line to generate, - // measured in Unicode code points, - // excluding TextPrefix and the newline character. - // If TextWidth is zero, it defaults to 80 minus the number of code points in TextPrefix. - // If TextWidth is negative, there is no limit. - TextWidth int -} - -func (p *Printer) headingLevel() int { - if p.HeadingLevel <= 0 { - return 3 - } - return p.HeadingLevel -} - -func (p *Printer) headingID(h *Heading) string { - if p.HeadingID == nil { - return h.DefaultID() - } - return p.HeadingID(h) -} - -func (p *Printer) docLinkURL(link *DocLink) string { - if p.DocLinkURL != nil { - return p.DocLinkURL(link) - } - return link.DefaultURL(p.DocLinkBaseURL) -} - -// DefaultURL constructs and returns the documentation URL for l, -// using baseURL as a prefix for links to other packages. -// -// The possible forms returned by DefaultURL are: -// - baseURL/ImportPath, for a link to another package -// - baseURL/ImportPath#Name, for a link to a const, func, type, or var in another package -// - baseURL/ImportPath#Recv.Name, for a link to a method in another package -// - #Name, for a link to a const, func, type, or var in this package -// - #Recv.Name, for a link to a method in this package -// -// If baseURL ends in a trailing slash, then DefaultURL inserts -// a slash between ImportPath and # in the anchored forms. -// For example, here are some baseURL values and URLs they can generate: -// -// "/pkg/" → "/pkg/math/#Sqrt" -// "/pkg" → "/pkg/math#Sqrt" -// "/" → "/math/#Sqrt" -// "" → "/math#Sqrt" -func (l *DocLink) DefaultURL(baseURL string) string { - if l.ImportPath != "" { - slash := "" - if strings.HasSuffix(baseURL, "/") { - slash = "/" - } else { - baseURL += "/" - } - switch { - case l.Name == "": - return baseURL + l.ImportPath + slash - case l.Recv != "": - return baseURL + l.ImportPath + slash + "#" + l.Recv + "." + l.Name - default: - return baseURL + l.ImportPath + slash + "#" + l.Name - } - } - if l.Recv != "" { - return "#" + l.Recv + "." + l.Name - } - return "#" + l.Name -} - -// DefaultID returns the default anchor ID for the heading h. -// -// The default anchor ID is constructed by converting every -// rune that is not alphanumeric ASCII to an underscore -// and then adding the prefix “hdr-”. -// For example, if the heading text is “Go Doc Comments”, -// the default ID is “hdr-Go_Doc_Comments”. -func (h *Heading) DefaultID() string { - // Note: The “hdr-” prefix is important to avoid DOM clobbering attacks. - // See https://pkg.go.dev/github.com/google/safehtml#Identifier. - var out strings.Builder - var p textPrinter - p.oneLongLine(&out, h.Text) - s := strings.TrimSpace(out.String()) - if s == "" { - return "" - } - out.Reset() - out.WriteString("hdr-") - for _, r := range s { - if r < 0x80 && isIdentASCII(byte(r)) { - out.WriteByte(byte(r)) - } else { - out.WriteByte('_') - } - } - return out.String() -} - -type commentPrinter struct { - *Printer -} - -// Comment returns the standard Go formatting of the Doc, -// without any comment markers. -func (p *Printer) Comment(d *Doc) []byte { - cp := &commentPrinter{Printer: p} - var out bytes.Buffer - for i, x := range d.Content { - if i > 0 && blankBefore(x) { - out.WriteString("\n") - } - cp.block(&out, x) - } - - // Print one block containing all the link definitions that were used, - // and then a second block containing all the unused ones. - // This makes it easy to clean up the unused ones: gofmt and - // delete the final block. And it's a nice visual signal without - // affecting the way the comment formats for users. - for i := 0; i < 2; i++ { - used := i == 0 - first := true - for _, def := range d.Links { - if def.Used == used { - if first { - out.WriteString("\n") - first = false - } - out.WriteString("[") - out.WriteString(def.Text) - out.WriteString("]: ") - out.WriteString(def.URL) - out.WriteString("\n") - } - } - } - - return out.Bytes() -} - -// blankBefore reports whether the block x requires a blank line before it. -// All blocks do, except for Lists that return false from x.BlankBefore(). -func blankBefore(x Block) bool { - if x, ok := x.(*List); ok { - return x.BlankBefore() - } - return true -} - -// block prints the block x to out. -func (p *commentPrinter) block(out *bytes.Buffer, x Block) { - switch x := x.(type) { - default: - fmt.Fprintf(out, "?%T", x) - - case *Paragraph: - p.text(out, "", x.Text) - out.WriteString("\n") - - case *Heading: - out.WriteString("# ") - p.text(out, "", x.Text) - out.WriteString("\n") - - case *Code: - md := x.Text - for md != "" { - var line string - line, md, _ = strings.Cut(md, "\n") - if line != "" { - out.WriteString("\t") - out.WriteString(line) - } - out.WriteString("\n") - } - - case *List: - loose := x.BlankBetween() - for i, item := range x.Items { - if i > 0 && loose { - out.WriteString("\n") - } - out.WriteString(" ") - if item.Number == "" { - out.WriteString(" - ") - } else { - out.WriteString(item.Number) - out.WriteString(". ") - } - for i, blk := range item.Content { - const fourSpace = " " - if i > 0 { - out.WriteString("\n" + fourSpace) - } - p.text(out, fourSpace, blk.(*Paragraph).Text) - out.WriteString("\n") - } - } - } -} - -// text prints the text sequence x to out. -func (p *commentPrinter) text(out *bytes.Buffer, indent string, x []Text) { - for _, t := range x { - switch t := t.(type) { - case Plain: - p.indent(out, indent, string(t)) - case Italic: - p.indent(out, indent, string(t)) - case *Link: - if t.Auto { - p.text(out, indent, t.Text) - } else { - out.WriteString("[") - p.text(out, indent, t.Text) - out.WriteString("]") - } - case *DocLink: - out.WriteString("[") - p.text(out, indent, t.Text) - out.WriteString("]") - } - } -} - -// indent prints s to out, indenting with the indent string -// after each newline in s. -func (p *commentPrinter) indent(out *bytes.Buffer, indent, s string) { - for s != "" { - line, rest, ok := strings.Cut(s, "\n") - out.WriteString(line) - if ok { - out.WriteString("\n") - out.WriteString(indent) - } - s = rest - } -} diff --git a/vendor/mvdan.cc/gofumpt/internal/govendor/go/doc/comment/std.go b/vendor/mvdan.cc/gofumpt/internal/govendor/go/doc/comment/std.go deleted file mode 100644 index d128eda8c..000000000 --- a/vendor/mvdan.cc/gofumpt/internal/govendor/go/doc/comment/std.go +++ /dev/null @@ -1,47 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Code generated by 'go generate' DO NOT EDIT. -//disabled go:generate ./mkstd.sh - -package comment - -var stdPkgs = []string{ - "bufio", - "bytes", - "cmp", - "context", - "crypto", - "embed", - "encoding", - "errors", - "expvar", - "flag", - "fmt", - "hash", - "html", - "image", - "io", - "log", - "maps", - "math", - "mime", - "net", - "os", - "path", - "plugin", - "reflect", - "regexp", - "runtime", - "slices", - "sort", - "strconv", - "strings", - "sync", - "syscall", - "testing", - "time", - "unicode", - "unsafe", -} diff --git a/vendor/mvdan.cc/gofumpt/internal/govendor/go/doc/comment/text.go b/vendor/mvdan.cc/gofumpt/internal/govendor/go/doc/comment/text.go deleted file mode 100644 index 6f9c2e201..000000000 --- a/vendor/mvdan.cc/gofumpt/internal/govendor/go/doc/comment/text.go +++ /dev/null @@ -1,337 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package comment - -import ( - "bytes" - "fmt" - "sort" - "strings" - "unicode/utf8" -) - -// A textPrinter holds the state needed for printing a Doc as plain text. -type textPrinter struct { - *Printer - long strings.Builder - prefix string - codePrefix string - width int -} - -// Text returns a textual formatting of the Doc. -// See the [Printer] documentation for ways to customize the text output. -func (p *Printer) Text(d *Doc) []byte { - tp := &textPrinter{ - Printer: p, - prefix: p.TextPrefix, - codePrefix: p.TextCodePrefix, - width: p.TextWidth, - } - if tp.codePrefix == "" { - tp.codePrefix = p.TextPrefix + "\t" - } - if tp.width == 0 { - tp.width = 80 - utf8.RuneCountInString(tp.prefix) - } - - var out bytes.Buffer - for i, x := range d.Content { - if i > 0 && blankBefore(x) { - out.WriteString(tp.prefix) - writeNL(&out) - } - tp.block(&out, x) - } - anyUsed := false - for _, def := range d.Links { - if def.Used { - anyUsed = true - break - } - } - if anyUsed { - writeNL(&out) - for _, def := range d.Links { - if def.Used { - fmt.Fprintf(&out, "[%s]: %s\n", def.Text, def.URL) - } - } - } - return out.Bytes() -} - -// writeNL calls out.WriteByte('\n') -// but first trims trailing spaces on the previous line. -func writeNL(out *bytes.Buffer) { - // Trim trailing spaces. - data := out.Bytes() - n := 0 - for n < len(data) && (data[len(data)-n-1] == ' ' || data[len(data)-n-1] == '\t') { - n++ - } - if n > 0 { - out.Truncate(len(data) - n) - } - out.WriteByte('\n') -} - -// block prints the block x to out. -func (p *textPrinter) block(out *bytes.Buffer, x Block) { - switch x := x.(type) { - default: - fmt.Fprintf(out, "?%T\n", x) - - case *Paragraph: - out.WriteString(p.prefix) - p.text(out, "", x.Text) - - case *Heading: - out.WriteString(p.prefix) - out.WriteString("# ") - p.text(out, "", x.Text) - - case *Code: - text := x.Text - for text != "" { - var line string - line, text, _ = strings.Cut(text, "\n") - if line != "" { - out.WriteString(p.codePrefix) - out.WriteString(line) - } - writeNL(out) - } - - case *List: - loose := x.BlankBetween() - for i, item := range x.Items { - if i > 0 && loose { - out.WriteString(p.prefix) - writeNL(out) - } - out.WriteString(p.prefix) - out.WriteString(" ") - if item.Number == "" { - out.WriteString(" - ") - } else { - out.WriteString(item.Number) - out.WriteString(". ") - } - for i, blk := range item.Content { - const fourSpace = " " - if i > 0 { - writeNL(out) - out.WriteString(p.prefix) - out.WriteString(fourSpace) - } - p.text(out, fourSpace, blk.(*Paragraph).Text) - } - } - } -} - -// text prints the text sequence x to out. -func (p *textPrinter) text(out *bytes.Buffer, indent string, x []Text) { - p.oneLongLine(&p.long, x) - words := strings.Fields(p.long.String()) - p.long.Reset() - - var seq []int - if p.width < 0 || len(words) == 0 { - seq = []int{0, len(words)} // one long line - } else { - seq = wrap(words, p.width-utf8.RuneCountInString(indent)) - } - for i := 0; i+1 < len(seq); i++ { - if i > 0 { - out.WriteString(p.prefix) - out.WriteString(indent) - } - for j, w := range words[seq[i]:seq[i+1]] { - if j > 0 { - out.WriteString(" ") - } - out.WriteString(w) - } - writeNL(out) - } -} - -// oneLongLine prints the text sequence x to out as one long line, -// without worrying about line wrapping. -// Explicit links have the [ ] dropped to improve readability. -func (p *textPrinter) oneLongLine(out *strings.Builder, x []Text) { - for _, t := range x { - switch t := t.(type) { - case Plain: - out.WriteString(string(t)) - case Italic: - out.WriteString(string(t)) - case *Link: - p.oneLongLine(out, t.Text) - case *DocLink: - p.oneLongLine(out, t.Text) - } - } -} - -// wrap wraps words into lines of at most max runes, -// minimizing the sum of the squares of the leftover lengths -// at the end of each line (except the last, of course), -// with a preference for ending lines at punctuation (.,:;). -// -// The returned slice gives the indexes of the first words -// on each line in the wrapped text with a final entry of len(words). -// Thus the lines are words[seq[0]:seq[1]], words[seq[1]:seq[2]], -// ..., words[seq[len(seq)-2]:seq[len(seq)-1]]. -// -// The implementation runs in O(n log n) time, where n = len(words), -// using the algorithm described in D. S. Hirschberg and L. L. Larmore, -// “[The least weight subsequence problem],” FOCS 1985, pp. 137-143. -// -// [The least weight subsequence problem]: https://doi.org/10.1109/SFCS.1985.60 -func wrap(words []string, max int) (seq []int) { - // The algorithm requires that our scoring function be concave, - // meaning that for all i₀ ≤ i₁ < j₀ ≤ j₁, - // weight(i₀, j₀) + weight(i₁, j₁) ≤ weight(i₀, j₁) + weight(i₁, j₀). - // - // Our weights are two-element pairs [hi, lo] - // ordered by elementwise comparison. - // The hi entry counts the weight for lines that are longer than max, - // and the lo entry counts the weight for lines that are not. - // This forces the algorithm to first minimize the number of lines - // that are longer than max, which correspond to lines with - // single very long words. Having done that, it can move on to - // minimizing the lo score, which is more interesting. - // - // The lo score is the sum for each line of the square of the - // number of spaces remaining at the end of the line and a - // penalty of 64 given out for not ending the line in a - // punctuation character (.,:;). - // The penalty is somewhat arbitrarily chosen by trying - // different amounts and judging how nice the wrapped text looks. - // Roughly speaking, using 64 means that we are willing to - // end a line with eight blank spaces in order to end at a - // punctuation character, even if the next word would fit in - // those spaces. - // - // We care about ending in punctuation characters because - // it makes the text easier to skim if not too many sentences - // or phrases begin with a single word on the previous line. - - // A score is the score (also called weight) for a given line. - // add and cmp add and compare scores. - type score struct { - hi int64 - lo int64 - } - add := func(s, t score) score { return score{s.hi + t.hi, s.lo + t.lo} } - cmp := func(s, t score) int { - switch { - case s.hi < t.hi: - return -1 - case s.hi > t.hi: - return +1 - case s.lo < t.lo: - return -1 - case s.lo > t.lo: - return +1 - } - return 0 - } - - // total[j] is the total number of runes - // (including separating spaces) in words[:j]. - total := make([]int, len(words)+1) - total[0] = 0 - for i, s := range words { - total[1+i] = total[i] + utf8.RuneCountInString(s) + 1 - } - - // weight returns weight(i, j). - weight := func(i, j int) score { - // On the last line, there is zero weight for being too short. - n := total[j] - 1 - total[i] - if j == len(words) && n <= max { - return score{0, 0} - } - - // Otherwise the weight is the penalty plus the square of the number of - // characters remaining on the line or by which the line goes over. - // In the latter case, that value goes in the hi part of the score. - // (See note above.) - p := wrapPenalty(words[j-1]) - v := int64(max-n) * int64(max-n) - if n > max { - return score{v, p} - } - return score{0, v + p} - } - - // The rest of this function is “The Basic Algorithm” from - // Hirschberg and Larmore's conference paper, - // using the same names as in the paper. - f := []score{{0, 0}} - g := func(i, j int) score { return add(f[i], weight(i, j)) } - - bridge := func(a, b, c int) bool { - k := c + sort.Search(len(words)+1-c, func(k int) bool { - k += c - return cmp(g(a, k), g(b, k)) > 0 - }) - if k > len(words) { - return true - } - return cmp(g(c, k), g(b, k)) <= 0 - } - - // d is a one-ended deque implemented as a slice. - d := make([]int, 1, len(words)) - d[0] = 0 - bestleft := make([]int, 1, len(words)) - bestleft[0] = -1 - for m := 1; m < len(words); m++ { - f = append(f, g(d[0], m)) - bestleft = append(bestleft, d[0]) - for len(d) > 1 && cmp(g(d[1], m+1), g(d[0], m+1)) <= 0 { - d = d[1:] // “Retire” - } - for len(d) > 1 && bridge(d[len(d)-2], d[len(d)-1], m) { - d = d[:len(d)-1] // “Fire” - } - if cmp(g(m, len(words)), g(d[len(d)-1], len(words))) < 0 { - d = append(d, m) // “Hire” - // The next few lines are not in the paper but are necessary - // to handle two-word inputs correctly. It appears to be - // just a bug in the paper's pseudocode. - if len(d) == 2 && cmp(g(d[1], m+1), g(d[0], m+1)) <= 0 { - d = d[1:] - } - } - } - bestleft = append(bestleft, d[0]) - - // Recover least weight sequence from bestleft. - n := 1 - for m := len(words); m > 0; m = bestleft[m] { - n++ - } - seq = make([]int, n) - for m := len(words); m > 0; m = bestleft[m] { - n-- - seq[n] = m - } - return seq -} - -// wrapPenalty is the penalty for inserting a line break after word s. -func wrapPenalty(s string) int64 { - switch s[len(s)-1] { - case '.', ',', ':', ';': - return 0 - } - return 64 -} diff --git a/vendor/mvdan.cc/gofumpt/internal/govendor/go/format/format.go b/vendor/mvdan.cc/gofumpt/internal/govendor/go/format/format.go deleted file mode 100644 index 5540686ed..000000000 --- a/vendor/mvdan.cc/gofumpt/internal/govendor/go/format/format.go +++ /dev/null @@ -1,134 +0,0 @@ -// Copyright 2012 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package format implements standard formatting of Go source. -// -// Note that formatting of Go source code changes over time, so tools relying on -// consistent formatting should execute a specific version of the gofmt binary -// instead of using this package. That way, the formatting will be stable, and -// the tools won't need to be recompiled each time gofmt changes. -// -// For example, pre-submit checks that use this package directly would behave -// differently depending on what Go version each developer uses, causing the -// check to be inherently fragile. -package format - -import ( - "bytes" - "fmt" - "go/ast" - "go/parser" - "go/token" - "io" - - "mvdan.cc/gofumpt/internal/govendor/go/printer" -) - -// Keep these in sync with cmd/gofmt/gofmt.go. -const ( - tabWidth = 8 - printerMode = printer.UseSpaces | printer.TabIndent | printerNormalizeNumbers - - // printerNormalizeNumbers means to canonicalize number literal prefixes - // and exponents while printing. See https://golang.org/doc/go1.13#gofmt. - // - // This value is defined in mvdan.cc/gofumpt/internal/govendor/go/printer specifically for mvdan.cc/gofumpt/internal/govendor/go/format and cmd/gofmt. - printerNormalizeNumbers = 1 << 30 -) - -var config = printer.Config{Mode: printerMode, Tabwidth: tabWidth} - -const parserMode = parser.ParseComments | parser.SkipObjectResolution - -// Node formats node in canonical gofmt style and writes the result to dst. -// -// The node type must be *ast.File, *printer.CommentedNode, []ast.Decl, -// []ast.Stmt, or assignment-compatible to ast.Expr, ast.Decl, ast.Spec, -// or ast.Stmt. Node does not modify node. Imports are not sorted for -// nodes representing partial source files (for instance, if the node is -// not an *ast.File or a *printer.CommentedNode not wrapping an *ast.File). -// -// The function may return early (before the entire result is written) -// and return a formatting error, for instance due to an incorrect AST. -func Node(dst io.Writer, fset *token.FileSet, node any) error { - // Determine if we have a complete source file (file != nil). - var file *ast.File - var cnode *printer.CommentedNode - switch n := node.(type) { - case *ast.File: - file = n - case *printer.CommentedNode: - if f, ok := n.Node.(*ast.File); ok { - file = f - cnode = n - } - } - - // Sort imports if necessary. - if file != nil && hasUnsortedImports(file) { - // Make a copy of the AST because ast.SortImports is destructive. - // TODO(gri) Do this more efficiently. - var buf bytes.Buffer - err := config.Fprint(&buf, fset, file) - if err != nil { - return err - } - file, err = parser.ParseFile(fset, "", buf.Bytes(), parserMode) - if err != nil { - // We should never get here. If we do, provide good diagnostic. - return fmt.Errorf("format.Node internal error (%s)", err) - } - ast.SortImports(fset, file) - - // Use new file with sorted imports. - node = file - if cnode != nil { - node = &printer.CommentedNode{Node: file, Comments: cnode.Comments} - } - } - - return config.Fprint(dst, fset, node) -} - -// Source formats src in canonical gofmt style and returns the result -// or an (I/O or syntax) error. src is expected to be a syntactically -// correct Go source file, or a list of Go declarations or statements. -// -// If src is a partial source file, the leading and trailing space of src -// is applied to the result (such that it has the same leading and trailing -// space as src), and the result is indented by the same amount as the first -// line of src containing code. Imports are not sorted for partial source files. -func Source(src []byte) ([]byte, error) { - fset := token.NewFileSet() - file, sourceAdj, indentAdj, err := parse(fset, "", src, true) - if err != nil { - return nil, err - } - - if sourceAdj == nil { - // Complete source file. - // TODO(gri) consider doing this always. - ast.SortImports(fset, file) - } - - return format(fset, file, sourceAdj, indentAdj, src, config) -} - -func hasUnsortedImports(file *ast.File) bool { - for _, d := range file.Decls { - d, ok := d.(*ast.GenDecl) - if !ok || d.Tok != token.IMPORT { - // Not an import declaration, so we're done. - // Imports are always first. - return false - } - if d.Lparen.IsValid() { - // For now assume all grouped imports are unsorted. - // TODO(gri) Should check if they are sorted already. - return true - } - // Ungrouped imports are sorted by default. - } - return false -} diff --git a/vendor/mvdan.cc/gofumpt/internal/govendor/go/format/internal.go b/vendor/mvdan.cc/gofumpt/internal/govendor/go/format/internal.go deleted file mode 100644 index df0358714..000000000 --- a/vendor/mvdan.cc/gofumpt/internal/govendor/go/format/internal.go +++ /dev/null @@ -1,177 +0,0 @@ -// Copyright 2015 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// TODO(gri): This file and the file src/cmd/gofmt/internal.go are -// the same (but for this comment and the package name). Do not modify -// one without the other. Determine if we can factor out functionality -// in a public API. See also #11844 for context. - -package format - -import ( - "bytes" - "go/ast" - "go/parser" - "go/token" - "strings" - - "mvdan.cc/gofumpt/internal/govendor/go/printer" -) - -// parse parses src, which was read from the named file, -// as a Go source file, declaration, or statement list. -func parse(fset *token.FileSet, filename string, src []byte, fragmentOk bool) ( - file *ast.File, - sourceAdj func(src []byte, indent int) []byte, - indentAdj int, - err error, -) { - // Try as whole source file. - file, err = parser.ParseFile(fset, filename, src, parserMode) - // If there's no error, return. If the error is that the source file didn't begin with a - // package line and source fragments are ok, fall through to - // try as a source fragment. Stop and return on any other error. - if err == nil || !fragmentOk || !strings.Contains(err.Error(), "expected 'package'") { - return - } - - // If this is a declaration list, make it a source file - // by inserting a package clause. - // Insert using a ';', not a newline, so that the line numbers - // in psrc match the ones in src. - psrc := append([]byte("package p;"), src...) - file, err = parser.ParseFile(fset, filename, psrc, parserMode) - if err == nil { - sourceAdj = func(src []byte, indent int) []byte { - // Remove the package clause. - // Gofmt has turned the ';' into a '\n'. - src = src[indent+len("package p\n"):] - return bytes.TrimSpace(src) - } - return - } - // If the error is that the source file didn't begin with a - // declaration, fall through to try as a statement list. - // Stop and return on any other error. - if !strings.Contains(err.Error(), "expected declaration") { - return - } - - // If this is a statement list, make it a source file - // by inserting a package clause and turning the list - // into a function body. This handles expressions too. - // Insert using a ';', not a newline, so that the line numbers - // in fsrc match the ones in src. Add an extra '\n' before the '}' - // to make sure comments are flushed before the '}'. - fsrc := append(append([]byte("package p; func _() {"), src...), '\n', '\n', '}') - file, err = parser.ParseFile(fset, filename, fsrc, parserMode) - if err == nil { - sourceAdj = func(src []byte, indent int) []byte { - // Cap adjusted indent to zero. - if indent < 0 { - indent = 0 - } - // Remove the wrapping. - // Gofmt has turned the "; " into a "\n\n". - // There will be two non-blank lines with indent, hence 2*indent. - src = src[2*indent+len("package p\n\nfunc _() {"):] - // Remove only the "}\n" suffix: remaining whitespaces will be trimmed anyway - src = src[:len(src)-len("}\n")] - return bytes.TrimSpace(src) - } - // Gofmt has also indented the function body one level. - // Adjust that with indentAdj. - indentAdj = -1 - } - - // Succeeded, or out of options. - return -} - -// format formats the given package file originally obtained from src -// and adjusts the result based on the original source via sourceAdj -// and indentAdj. -func format( - fset *token.FileSet, - file *ast.File, - sourceAdj func(src []byte, indent int) []byte, - indentAdj int, - src []byte, - cfg printer.Config, -) ([]byte, error) { - if sourceAdj == nil { - // Complete source file. - var buf bytes.Buffer - err := cfg.Fprint(&buf, fset, file) - if err != nil { - return nil, err - } - return buf.Bytes(), nil - } - - // Partial source file. - // Determine and prepend leading space. - i, j := 0, 0 - for j < len(src) && isSpace(src[j]) { - if src[j] == '\n' { - i = j + 1 // byte offset of last line in leading space - } - j++ - } - var res []byte - res = append(res, src[:i]...) - - // Determine and prepend indentation of first code line. - // Spaces are ignored unless there are no tabs, - // in which case spaces count as one tab. - indent := 0 - hasSpace := false - for _, b := range src[i:j] { - switch b { - case ' ': - hasSpace = true - case '\t': - indent++ - } - } - if indent == 0 && hasSpace { - indent = 1 - } - for i := 0; i < indent; i++ { - res = append(res, '\t') - } - - // Format the source. - // Write it without any leading and trailing space. - cfg.Indent = indent + indentAdj - var buf bytes.Buffer - err := cfg.Fprint(&buf, fset, file) - if err != nil { - return nil, err - } - out := sourceAdj(buf.Bytes(), cfg.Indent) - - // If the adjusted output is empty, the source - // was empty but (possibly) for white space. - // The result is the incoming source. - if len(out) == 0 { - return src, nil - } - - // Otherwise, append output to leading space. - res = append(res, out...) - - // Determine and append trailing space. - i = len(src) - for i > 0 && isSpace(src[i-1]) { - i-- - } - return append(res, src[i:]...), nil -} - -// isSpace reports whether the byte is a space character. -// isSpace defines a space as being among the following bytes: ' ', '\t', '\n' and '\r'. -func isSpace(b byte) bool { - return b == ' ' || b == '\t' || b == '\n' || b == '\r' -} diff --git a/vendor/mvdan.cc/gofumpt/internal/govendor/go/printer/comment.go b/vendor/mvdan.cc/gofumpt/internal/govendor/go/printer/comment.go deleted file mode 100644 index 1f0e7df9d..000000000 --- a/vendor/mvdan.cc/gofumpt/internal/govendor/go/printer/comment.go +++ /dev/null @@ -1,156 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package printer - -import ( - "go/ast" - "strings" - - "mvdan.cc/gofumpt/internal/govendor/go/doc/comment" -) - -// formatDocComment reformats the doc comment list, -// returning the canonical formatting. -func formatDocComment(list []*ast.Comment) []*ast.Comment { - // Extract comment text (removing comment markers). - var kind, text string - var directives []*ast.Comment - if len(list) == 1 && strings.HasPrefix(list[0].Text, "/*") { - kind = "/*" - text = list[0].Text - if !strings.Contains(text, "\n") || allStars(text) { - // Single-line /* .. */ comment in doc comment position, - // or multiline old-style comment like - // /* - // * Comment - // * text here. - // */ - // Should not happen, since it will not work well as a - // doc comment, but if it does, just ignore: - // reformatting it will only make the situation worse. - return list - } - text = text[2 : len(text)-2] // cut /* and */ - } else if strings.HasPrefix(list[0].Text, "//") { - kind = "//" - var b strings.Builder - for _, c := range list { - after, found := strings.CutPrefix(c.Text, "//") - if !found { - return list - } - // Accumulate //go:build etc lines separately. - if isDirective(after) { - directives = append(directives, c) - continue - } - b.WriteString(strings.TrimPrefix(after, " ")) - b.WriteString("\n") - } - text = b.String() - } else { - // Not sure what this is, so leave alone. - return list - } - - if text == "" { - return list - } - - // Parse comment and reformat as text. - var p comment.Parser - d := p.Parse(text) - - var pr comment.Printer - text = string(pr.Comment(d)) - - // For /* */ comment, return one big comment with text inside. - slash := list[0].Slash - if kind == "/*" { - c := &ast.Comment{ - Slash: slash, - Text: "/*\n" + text + "*/", - } - return []*ast.Comment{c} - } - - // For // comment, return sequence of // lines. - var out []*ast.Comment - for text != "" { - var line string - line, text, _ = strings.Cut(text, "\n") - if line == "" { - line = "//" - } else if strings.HasPrefix(line, "\t") { - line = "//" + line - } else { - line = "// " + line - } - out = append(out, &ast.Comment{ - Slash: slash, - Text: line, - }) - } - if len(directives) > 0 { - out = append(out, &ast.Comment{ - Slash: slash, - Text: "//", - }) - for _, c := range directives { - out = append(out, &ast.Comment{ - Slash: slash, - Text: c.Text, - }) - } - } - return out -} - -// isDirective reports whether c is a comment directive. -// See go.dev/issue/37974. -// This code is also in go/ast. -func isDirective(c string) bool { - // "//line " is a line directive. - // "//extern " is for gccgo. - // "//export " is for cgo. - // (The // has been removed.) - if strings.HasPrefix(c, "line ") || strings.HasPrefix(c, "extern ") || strings.HasPrefix(c, "export ") { - return true - } - - // "//[a-z0-9]+:[a-z0-9]" - // (The // has been removed.) - colon := strings.Index(c, ":") - if colon <= 0 || colon+1 >= len(c) { - return false - } - for i := 0; i <= colon+1; i++ { - if i == colon { - continue - } - b := c[i] - if !('a' <= b && b <= 'z' || '0' <= b && b <= '9') { - return false - } - } - return true -} - -// allStars reports whether text is the interior of an -// old-style /* */ comment with a star at the start of each line. -func allStars(text string) bool { - for i := 0; i < len(text); i++ { - if text[i] == '\n' { - j := i + 1 - for j < len(text) && (text[j] == ' ' || text[j] == '\t') { - j++ - } - if j < len(text) && text[j] != '*' { - return false - } - } - } - return true -} diff --git a/vendor/mvdan.cc/gofumpt/internal/govendor/go/printer/gobuild.go b/vendor/mvdan.cc/gofumpt/internal/govendor/go/printer/gobuild.go deleted file mode 100644 index f00492d07..000000000 --- a/vendor/mvdan.cc/gofumpt/internal/govendor/go/printer/gobuild.go +++ /dev/null @@ -1,170 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package printer - -import ( - "go/build/constraint" - "sort" - "text/tabwriter" -) - -func (p *printer) fixGoBuildLines() { - if len(p.goBuild)+len(p.plusBuild) == 0 { - return - } - - // Find latest possible placement of //go:build and // +build comments. - // That's just after the last blank line before we find a non-comment. - // (We'll add another blank line after our comment block.) - // When we start dropping // +build comments, we can skip over /* */ comments too. - // Note that we are processing tabwriter input, so every comment - // begins and ends with a tabwriter.Escape byte. - // And some newlines have turned into \f bytes. - insert := 0 - for pos := 0; ; { - // Skip leading space at beginning of line. - blank := true - for pos < len(p.output) && (p.output[pos] == ' ' || p.output[pos] == '\t') { - pos++ - } - // Skip over // comment if any. - if pos+3 < len(p.output) && p.output[pos] == tabwriter.Escape && p.output[pos+1] == '/' && p.output[pos+2] == '/' { - blank = false - for pos < len(p.output) && !isNL(p.output[pos]) { - pos++ - } - } - // Skip over \n at end of line. - if pos >= len(p.output) || !isNL(p.output[pos]) { - break - } - pos++ - - if blank { - insert = pos - } - } - - // If there is a //go:build comment before the place we identified, - // use that point instead. (Earlier in the file is always fine.) - if len(p.goBuild) > 0 && p.goBuild[0] < insert { - insert = p.goBuild[0] - } else if len(p.plusBuild) > 0 && p.plusBuild[0] < insert { - insert = p.plusBuild[0] - } - - var x constraint.Expr - switch len(p.goBuild) { - case 0: - // Synthesize //go:build expression from // +build lines. - for _, pos := range p.plusBuild { - y, err := constraint.Parse(p.commentTextAt(pos)) - if err != nil { - x = nil - break - } - if x == nil { - x = y - } else { - x = &constraint.AndExpr{X: x, Y: y} - } - } - case 1: - // Parse //go:build expression. - x, _ = constraint.Parse(p.commentTextAt(p.goBuild[0])) - } - - var block []byte - if x == nil { - // Don't have a valid //go:build expression to treat as truth. - // Bring all the lines together but leave them alone. - // Note that these are already tabwriter-escaped. - for _, pos := range p.goBuild { - block = append(block, p.lineAt(pos)...) - } - for _, pos := range p.plusBuild { - block = append(block, p.lineAt(pos)...) - } - } else { - block = append(block, tabwriter.Escape) - block = append(block, "//go:build "...) - block = append(block, x.String()...) - block = append(block, tabwriter.Escape, '\n') - if len(p.plusBuild) > 0 { - lines, err := constraint.PlusBuildLines(x) - if err != nil { - lines = []string{"// +build error: " + err.Error()} - } - for _, line := range lines { - block = append(block, tabwriter.Escape) - block = append(block, line...) - block = append(block, tabwriter.Escape, '\n') - } - } - } - block = append(block, '\n') - - // Build sorted list of lines to delete from remainder of output. - toDelete := append(p.goBuild, p.plusBuild...) - sort.Ints(toDelete) - - // Collect output after insertion point, with lines deleted, into after. - var after []byte - start := insert - for _, end := range toDelete { - if end < start { - continue - } - after = appendLines(after, p.output[start:end]) - start = end + len(p.lineAt(end)) - } - after = appendLines(after, p.output[start:]) - if n := len(after); n >= 2 && isNL(after[n-1]) && isNL(after[n-2]) { - after = after[:n-1] - } - - p.output = p.output[:insert] - p.output = append(p.output, block...) - p.output = append(p.output, after...) -} - -// appendLines is like append(x, y...) -// but it avoids creating doubled blank lines, -// which would not be gofmt-standard output. -// It assumes that only whole blocks of lines are being appended, -// not line fragments. -func appendLines(x, y []byte) []byte { - if len(y) > 0 && isNL(y[0]) && // y starts in blank line - (len(x) == 0 || len(x) >= 2 && isNL(x[len(x)-1]) && isNL(x[len(x)-2])) { // x is empty or ends in blank line - y = y[1:] // delete y's leading blank line - } - return append(x, y...) -} - -func (p *printer) lineAt(start int) []byte { - pos := start - for pos < len(p.output) && !isNL(p.output[pos]) { - pos++ - } - if pos < len(p.output) { - pos++ - } - return p.output[start:pos] -} - -func (p *printer) commentTextAt(start int) string { - if start < len(p.output) && p.output[start] == tabwriter.Escape { - start++ - } - pos := start - for pos < len(p.output) && p.output[pos] != tabwriter.Escape && !isNL(p.output[pos]) { - pos++ - } - return string(p.output[start:pos]) -} - -func isNL(b byte) bool { - return b == '\n' || b == '\f' -} diff --git a/vendor/mvdan.cc/gofumpt/internal/govendor/go/printer/nodes.go b/vendor/mvdan.cc/gofumpt/internal/govendor/go/printer/nodes.go deleted file mode 100644 index a58525b85..000000000 --- a/vendor/mvdan.cc/gofumpt/internal/govendor/go/printer/nodes.go +++ /dev/null @@ -1,2001 +0,0 @@ -// Copyright 2009 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// This file implements printing of AST nodes; specifically -// expressions, statements, declarations, and files. It uses -// the print functionality implemented in printer.go. - -package printer - -import ( - "go/ast" - "go/token" - "math" - "strconv" - "strings" - "unicode" - "unicode/utf8" -) - -// Formatting issues: -// - better comment formatting for /*-style comments at the end of a line (e.g. a declaration) -// when the comment spans multiple lines; if such a comment is just two lines, formatting is -// not idempotent -// - formatting of expression lists -// - should use blank instead of tab to separate one-line function bodies from -// the function header unless there is a group of consecutive one-liners - -// ---------------------------------------------------------------------------- -// Common AST nodes. - -// Print as many newlines as necessary (but at least min newlines) to get to -// the current line. ws is printed before the first line break. If newSection -// is set, the first line break is printed as formfeed. Returns 0 if no line -// breaks were printed, returns 1 if there was exactly one newline printed, -// and returns a value > 1 if there was a formfeed or more than one newline -// printed. -// -// TODO(gri): linebreak may add too many lines if the next statement at "line" -// is preceded by comments because the computation of n assumes -// the current position before the comment and the target position -// after the comment. Thus, after interspersing such comments, the -// space taken up by them is not considered to reduce the number of -// linebreaks. At the moment there is no easy way to know about -// future (not yet interspersed) comments in this function. -func (p *printer) linebreak(line, min int, ws whiteSpace, newSection bool) (nbreaks int) { - n := nlimit(line - p.pos.Line) - if n < min { - n = min - } - if n > 0 { - p.print(ws) - if newSection { - p.print(formfeed) - n-- - nbreaks = 2 - } - nbreaks += n - for ; n > 0; n-- { - p.print(newline) - } - } - return -} - -// setComment sets g as the next comment if g != nil and if node comments -// are enabled - this mode is used when printing source code fragments such -// as exports only. It assumes that there is no pending comment in p.comments -// and at most one pending comment in the p.comment cache. -func (p *printer) setComment(g *ast.CommentGroup) { - if g == nil || !p.useNodeComments { - return - } - if p.comments == nil { - // initialize p.comments lazily - p.comments = make([]*ast.CommentGroup, 1) - } else if p.cindex < len(p.comments) { - // for some reason there are pending comments; this - // should never happen - handle gracefully and flush - // all comments up to g, ignore anything after that - p.flush(p.posFor(g.List[0].Pos()), token.ILLEGAL) - p.comments = p.comments[0:1] - // in debug mode, report error - p.internalError("setComment found pending comments") - } - p.comments[0] = g - p.cindex = 0 - // don't overwrite any pending comment in the p.comment cache - // (there may be a pending comment when a line comment is - // immediately followed by a lead comment with no other - // tokens between) - if p.commentOffset == infinity { - p.nextComment() // get comment ready for use - } -} - -type exprListMode uint - -const ( - commaTerm exprListMode = 1 << iota // list is optionally terminated by a comma - noIndent // no extra indentation in multi-line lists -) - -// If indent is set, a multi-line identifier list is indented after the -// first linebreak encountered. -func (p *printer) identList(list []*ast.Ident, indent bool) { - // convert into an expression list so we can re-use exprList formatting - xlist := make([]ast.Expr, len(list)) - for i, x := range list { - xlist[i] = x - } - var mode exprListMode - if !indent { - mode = noIndent - } - p.exprList(token.NoPos, xlist, 1, mode, token.NoPos, false) -} - -const filteredMsg = "contains filtered or unexported fields" - -// Print a list of expressions. If the list spans multiple -// source lines, the original line breaks are respected between -// expressions. -// -// TODO(gri) Consider rewriting this to be independent of []ast.Expr -// so that we can use the algorithm for any kind of list -// -// (e.g., pass list via a channel over which to range). -func (p *printer) exprList(prev0 token.Pos, list []ast.Expr, depth int, mode exprListMode, next0 token.Pos, isIncomplete bool) { - if len(list) == 0 { - if isIncomplete { - prev := p.posFor(prev0) - next := p.posFor(next0) - if prev.IsValid() && prev.Line == next.Line { - p.print("/* " + filteredMsg + " */") - } else { - p.print(newline) - p.print(indent, "// "+filteredMsg, unindent, newline) - } - } - return - } - - prev := p.posFor(prev0) - next := p.posFor(next0) - line := p.lineFor(list[0].Pos()) - endLine := p.lineFor(list[len(list)-1].End()) - - if prev.IsValid() && prev.Line == line && line == endLine { - // all list entries on a single line - for i, x := range list { - if i > 0 { - // use position of expression following the comma as - // comma position for correct comment placement - p.setPos(x.Pos()) - p.print(token.COMMA, blank) - } - p.expr0(x, depth) - } - if isIncomplete { - p.print(token.COMMA, blank, "/* "+filteredMsg+" */") - } - return - } - - // list entries span multiple lines; - // use source code positions to guide line breaks - - // Don't add extra indentation if noIndent is set; - // i.e., pretend that the first line is already indented. - ws := ignore - if mode&noIndent == 0 { - ws = indent - } - - // The first linebreak is always a formfeed since this section must not - // depend on any previous formatting. - prevBreak := -1 // index of last expression that was followed by a linebreak - if prev.IsValid() && prev.Line < line && p.linebreak(line, 0, ws, true) > 0 { - ws = ignore - prevBreak = 0 - } - - // initialize expression/key size: a zero value indicates expr/key doesn't fit on a single line - size := 0 - - // We use the ratio between the geometric mean of the previous key sizes and - // the current size to determine if there should be a break in the alignment. - // To compute the geometric mean we accumulate the ln(size) values (lnsum) - // and the number of sizes included (count). - lnsum := 0.0 - count := 0 - - // print all list elements - prevLine := prev.Line - for i, x := range list { - line = p.lineFor(x.Pos()) - - // Determine if the next linebreak, if any, needs to use formfeed: - // in general, use the entire node size to make the decision; for - // key:value expressions, use the key size. - // TODO(gri) for a better result, should probably incorporate both - // the key and the node size into the decision process - useFF := true - - // Determine element size: All bets are off if we don't have - // position information for the previous and next token (likely - // generated code - simply ignore the size in this case by setting - // it to 0). - prevSize := size - const infinity = 1e6 // larger than any source line - size = p.nodeSize(x, infinity) - pair, isPair := x.(*ast.KeyValueExpr) - if size <= infinity && prev.IsValid() && next.IsValid() { - // x fits on a single line - if isPair { - size = p.nodeSize(pair.Key, infinity) // size <= infinity - } - } else { - // size too large or we don't have good layout information - size = 0 - } - - // If the previous line and the current line had single- - // line-expressions and the key sizes are small or the - // ratio between the current key and the geometric mean - // if the previous key sizes does not exceed a threshold, - // align columns and do not use formfeed. - if prevSize > 0 && size > 0 { - const smallSize = 40 - if count == 0 || prevSize <= smallSize && size <= smallSize { - useFF = false - } else { - const r = 2.5 // threshold - geomean := math.Exp(lnsum / float64(count)) // count > 0 - ratio := float64(size) / geomean - useFF = r*ratio <= 1 || r <= ratio - } - } - - needsLinebreak := 0 < prevLine && prevLine < line - if i > 0 { - // Use position of expression following the comma as - // comma position for correct comment placement, but - // only if the expression is on the same line. - if !needsLinebreak { - p.setPos(x.Pos()) - } - p.print(token.COMMA) - needsBlank := true - if needsLinebreak { - // Lines are broken using newlines so comments remain aligned - // unless useFF is set or there are multiple expressions on - // the same line in which case formfeed is used. - nbreaks := p.linebreak(line, 0, ws, useFF || prevBreak+1 < i) - if nbreaks > 0 { - ws = ignore - prevBreak = i - needsBlank = false // we got a line break instead - } - // If there was a new section or more than one new line - // (which means that the tabwriter will implicitly break - // the section), reset the geomean variables since we are - // starting a new group of elements with the next element. - if nbreaks > 1 { - lnsum = 0 - count = 0 - } - } - if needsBlank { - p.print(blank) - } - } - - if len(list) > 1 && isPair && size > 0 && needsLinebreak { - // We have a key:value expression that fits onto one line - // and it's not on the same line as the prior expression: - // Use a column for the key such that consecutive entries - // can align if possible. - // (needsLinebreak is set if we started a new line before) - p.expr(pair.Key) - p.setPos(pair.Colon) - p.print(token.COLON, vtab) - p.expr(pair.Value) - } else { - p.expr0(x, depth) - } - - if size > 0 { - lnsum += math.Log(float64(size)) - count++ - } - - prevLine = line - } - - if mode&commaTerm != 0 && next.IsValid() && p.pos.Line < next.Line { - // Print a terminating comma if the next token is on a new line. - p.print(token.COMMA) - if isIncomplete { - p.print(newline) - p.print("// " + filteredMsg) - } - if ws == ignore && mode&noIndent == 0 { - // unindent if we indented - p.print(unindent) - } - p.print(formfeed) // terminating comma needs a line break to look good - return - } - - if isIncomplete { - p.print(token.COMMA, newline) - p.print("// "+filteredMsg, newline) - } - - if ws == ignore && mode&noIndent == 0 { - // unindent if we indented - p.print(unindent) - } -} - -type paramMode int - -const ( - funcParam paramMode = iota - funcTParam - typeTParam -) - -func (p *printer) parameters(fields *ast.FieldList, mode paramMode) { - openTok, closeTok := token.LPAREN, token.RPAREN - if mode != funcParam { - openTok, closeTok = token.LBRACK, token.RBRACK - } - p.setPos(fields.Opening) - p.print(openTok) - if len(fields.List) > 0 { - prevLine := p.lineFor(fields.Opening) - ws := indent - for i, par := range fields.List { - // determine par begin and end line (may be different - // if there are multiple parameter names for this par - // or the type is on a separate line) - parLineBeg := p.lineFor(par.Pos()) - parLineEnd := p.lineFor(par.End()) - // separating "," if needed - needsLinebreak := 0 < prevLine && prevLine < parLineBeg - if i > 0 { - // use position of parameter following the comma as - // comma position for correct comma placement, but - // only if the next parameter is on the same line - if !needsLinebreak { - p.setPos(par.Pos()) - } - p.print(token.COMMA) - } - // separator if needed (linebreak or blank) - if needsLinebreak && p.linebreak(parLineBeg, 0, ws, true) > 0 { - // break line if the opening "(" or previous parameter ended on a different line - ws = ignore - } else if i > 0 { - p.print(blank) - } - // parameter names - if len(par.Names) > 0 { - // Very subtle: If we indented before (ws == ignore), identList - // won't indent again. If we didn't (ws == indent), identList will - // indent if the identList spans multiple lines, and it will outdent - // again at the end (and still ws == indent). Thus, a subsequent indent - // by a linebreak call after a type, or in the next multi-line identList - // will do the right thing. - p.identList(par.Names, ws == indent) - p.print(blank) - } - // parameter type - p.expr(stripParensAlways(par.Type)) - prevLine = parLineEnd - } - - // if the closing ")" is on a separate line from the last parameter, - // print an additional "," and line break - if closing := p.lineFor(fields.Closing); 0 < prevLine && prevLine < closing { - p.print(token.COMMA) - p.linebreak(closing, 0, ignore, true) - } else if mode == typeTParam && fields.NumFields() == 1 && combinesWithName(fields.List[0].Type) { - // A type parameter list [P T] where the name P and the type expression T syntactically - // combine to another valid (value) expression requires a trailing comma, as in [P *T,] - // (or an enclosing interface as in [P interface(*T)]), so that the type parameter list - // is not parsed as an array length [P*T]. - p.print(token.COMMA) - } - - // unindent if we indented - if ws == ignore { - p.print(unindent) - } - } - - p.setPos(fields.Closing) - p.print(closeTok) -} - -// combinesWithName reports whether a name followed by the expression x -// syntactically combines to another valid (value) expression. For instance -// using *T for x, "name *T" syntactically appears as the expression x*T. -// On the other hand, using P|Q or *P|~Q for x, "name P|Q" or name *P|~Q" -// cannot be combined into a valid (value) expression. -func combinesWithName(x ast.Expr) bool { - switch x := x.(type) { - case *ast.StarExpr: - // name *x.X combines to name*x.X if x.X is not a type element - return !isTypeElem(x.X) - case *ast.BinaryExpr: - return combinesWithName(x.X) && !isTypeElem(x.Y) - case *ast.ParenExpr: - // name(x) combines but we are making sure at - // the call site that x is never parenthesized. - panic("unexpected parenthesized expression") - } - return false -} - -// isTypeElem reports whether x is a (possibly parenthesized) type element expression. -// The result is false if x could be a type element OR an ordinary (value) expression. -func isTypeElem(x ast.Expr) bool { - switch x := x.(type) { - case *ast.ArrayType, *ast.StructType, *ast.FuncType, *ast.InterfaceType, *ast.MapType, *ast.ChanType: - return true - case *ast.UnaryExpr: - return x.Op == token.TILDE - case *ast.BinaryExpr: - return isTypeElem(x.X) || isTypeElem(x.Y) - case *ast.ParenExpr: - return isTypeElem(x.X) - } - return false -} - -func (p *printer) signature(sig *ast.FuncType) { - if sig.TypeParams != nil { - p.parameters(sig.TypeParams, funcTParam) - } - if sig.Params != nil { - p.parameters(sig.Params, funcParam) - } else { - p.print(token.LPAREN, token.RPAREN) - } - res := sig.Results - n := res.NumFields() - if n > 0 { - // res != nil - p.print(blank) - if n == 1 && res.List[0].Names == nil { - // single anonymous res; no ()'s - p.expr(stripParensAlways(res.List[0].Type)) - return - } - p.parameters(res, funcParam) - } -} - -func identListSize(list []*ast.Ident, maxSize int) (size int) { - for i, x := range list { - if i > 0 { - size += len(", ") - } - size += utf8.RuneCountInString(x.Name) - if size >= maxSize { - break - } - } - return -} - -func (p *printer) isOneLineFieldList(list []*ast.Field) bool { - if len(list) != 1 { - return false // allow only one field - } - f := list[0] - if f.Tag != nil || f.Comment != nil { - return false // don't allow tags or comments - } - // only name(s) and type - const maxSize = 30 // adjust as appropriate, this is an approximate value - namesSize := identListSize(f.Names, maxSize) - if namesSize > 0 { - namesSize = 1 // blank between names and types - } - typeSize := p.nodeSize(f.Type, maxSize) - return namesSize+typeSize <= maxSize -} - -func (p *printer) setLineComment(text string) { - p.setComment(&ast.CommentGroup{List: []*ast.Comment{{Slash: token.NoPos, Text: text}}}) -} - -func (p *printer) fieldList(fields *ast.FieldList, isStruct, isIncomplete bool) { - lbrace := fields.Opening - list := fields.List - rbrace := fields.Closing - hasComments := isIncomplete || p.commentBefore(p.posFor(rbrace)) - srcIsOneLine := lbrace.IsValid() && rbrace.IsValid() && p.lineFor(lbrace) == p.lineFor(rbrace) - - if !hasComments && srcIsOneLine { - // possibly a one-line struct/interface - if len(list) == 0 { - // no blank between keyword and {} in this case - p.setPos(lbrace) - p.print(token.LBRACE) - p.setPos(rbrace) - p.print(token.RBRACE) - return - } else if p.isOneLineFieldList(list) { - // small enough - print on one line - // (don't use identList and ignore source line breaks) - p.setPos(lbrace) - p.print(token.LBRACE, blank) - f := list[0] - if isStruct { - for i, x := range f.Names { - if i > 0 { - // no comments so no need for comma position - p.print(token.COMMA, blank) - } - p.expr(x) - } - if len(f.Names) > 0 { - p.print(blank) - } - p.expr(f.Type) - } else { // interface - if len(f.Names) > 0 { - name := f.Names[0] // method name - p.expr(name) - p.signature(f.Type.(*ast.FuncType)) // don't print "func" - } else { - // embedded interface - p.expr(f.Type) - } - } - p.print(blank) - p.setPos(rbrace) - p.print(token.RBRACE) - return - } - } - // hasComments || !srcIsOneLine - - p.print(blank) - p.setPos(lbrace) - p.print(token.LBRACE, indent) - if hasComments || len(list) > 0 { - p.print(formfeed) - } - - if isStruct { - - sep := vtab - if len(list) == 1 { - sep = blank - } - var line int - for i, f := range list { - if i > 0 { - p.linebreak(p.lineFor(f.Pos()), 1, ignore, p.linesFrom(line) > 0) - } - extraTabs := 0 - p.setComment(f.Doc) - p.recordLine(&line) - if len(f.Names) > 0 { - // named fields - p.identList(f.Names, false) - p.print(sep) - p.expr(f.Type) - extraTabs = 1 - } else { - // anonymous field - p.expr(f.Type) - extraTabs = 2 - } - if f.Tag != nil { - if len(f.Names) > 0 && sep == vtab { - p.print(sep) - } - p.print(sep) - p.expr(f.Tag) - extraTabs = 0 - } - if f.Comment != nil { - for ; extraTabs > 0; extraTabs-- { - p.print(sep) - } - p.setComment(f.Comment) - } - } - if isIncomplete { - if len(list) > 0 { - p.print(formfeed) - } - p.flush(p.posFor(rbrace), token.RBRACE) // make sure we don't lose the last line comment - p.setLineComment("// " + filteredMsg) - } - - } else { // interface - - var line int - var prev *ast.Ident // previous "type" identifier - for i, f := range list { - var name *ast.Ident // first name, or nil - if len(f.Names) > 0 { - name = f.Names[0] - } - if i > 0 { - // don't do a line break (min == 0) if we are printing a list of types - // TODO(gri) this doesn't work quite right if the list of types is - // spread across multiple lines - min := 1 - if prev != nil && name == prev { - min = 0 - } - p.linebreak(p.lineFor(f.Pos()), min, ignore, p.linesFrom(line) > 0) - } - p.setComment(f.Doc) - p.recordLine(&line) - if name != nil { - // method - p.expr(name) - p.signature(f.Type.(*ast.FuncType)) // don't print "func" - prev = nil - } else { - // embedded interface - p.expr(f.Type) - prev = nil - } - p.setComment(f.Comment) - } - if isIncomplete { - if len(list) > 0 { - p.print(formfeed) - } - p.flush(p.posFor(rbrace), token.RBRACE) // make sure we don't lose the last line comment - p.setLineComment("// contains filtered or unexported methods") - } - - } - p.print(unindent, formfeed) - p.setPos(rbrace) - p.print(token.RBRACE) -} - -// ---------------------------------------------------------------------------- -// Expressions - -func walkBinary(e *ast.BinaryExpr) (has4, has5 bool, maxProblem int) { - switch e.Op.Precedence() { - case 4: - has4 = true - case 5: - has5 = true - } - - switch l := e.X.(type) { - case *ast.BinaryExpr: - if l.Op.Precedence() < e.Op.Precedence() { - // parens will be inserted. - // pretend this is an *ast.ParenExpr and do nothing. - break - } - h4, h5, mp := walkBinary(l) - has4 = has4 || h4 - has5 = has5 || h5 - if maxProblem < mp { - maxProblem = mp - } - } - - switch r := e.Y.(type) { - case *ast.BinaryExpr: - if r.Op.Precedence() <= e.Op.Precedence() { - // parens will be inserted. - // pretend this is an *ast.ParenExpr and do nothing. - break - } - h4, h5, mp := walkBinary(r) - has4 = has4 || h4 - has5 = has5 || h5 - if maxProblem < mp { - maxProblem = mp - } - - case *ast.StarExpr: - if e.Op == token.QUO { // `*/` - maxProblem = 5 - } - - case *ast.UnaryExpr: - switch e.Op.String() + r.Op.String() { - case "/*", "&&", "&^": - maxProblem = 5 - case "++", "--": - if maxProblem < 4 { - maxProblem = 4 - } - } - } - return -} - -func cutoff(e *ast.BinaryExpr, depth int) int { - has4, has5, maxProblem := walkBinary(e) - if maxProblem > 0 { - return maxProblem + 1 - } - if has4 && has5 { - if depth == 1 { - return 5 - } - return 4 - } - if depth == 1 { - return 6 - } - return 4 -} - -func diffPrec(expr ast.Expr, prec int) int { - x, ok := expr.(*ast.BinaryExpr) - if !ok || prec != x.Op.Precedence() { - return 1 - } - return 0 -} - -func reduceDepth(depth int) int { - depth-- - if depth < 1 { - depth = 1 - } - return depth -} - -// Format the binary expression: decide the cutoff and then format. -// Let's call depth == 1 Normal mode, and depth > 1 Compact mode. -// (Algorithm suggestion by Russ Cox.) -// -// The precedences are: -// -// 5 * / % << >> & &^ -// 4 + - | ^ -// 3 == != < <= > >= -// 2 && -// 1 || -// -// The only decision is whether there will be spaces around levels 4 and 5. -// There are never spaces at level 6 (unary), and always spaces at levels 3 and below. -// -// To choose the cutoff, look at the whole expression but excluding primary -// expressions (function calls, parenthesized exprs), and apply these rules: -// -// 1. If there is a binary operator with a right side unary operand -// that would clash without a space, the cutoff must be (in order): -// -// /* 6 -// && 6 -// &^ 6 -// ++ 5 -// -- 5 -// -// (Comparison operators always have spaces around them.) -// -// 2. If there is a mix of level 5 and level 4 operators, then the cutoff -// is 5 (use spaces to distinguish precedence) in Normal mode -// and 4 (never use spaces) in Compact mode. -// -// 3. If there are no level 4 operators or no level 5 operators, then the -// cutoff is 6 (always use spaces) in Normal mode -// and 4 (never use spaces) in Compact mode. -func (p *printer) binaryExpr(x *ast.BinaryExpr, prec1, cutoff, depth int) { - prec := x.Op.Precedence() - if prec < prec1 { - // parenthesis needed - // Note: The parser inserts an ast.ParenExpr node; thus this case - // can only occur if the AST is created in a different way. - p.print(token.LPAREN) - p.expr0(x, reduceDepth(depth)) // parentheses undo one level of depth - p.print(token.RPAREN) - return - } - - printBlank := prec < cutoff - - ws := indent - p.expr1(x.X, prec, depth+diffPrec(x.X, prec)) - if printBlank { - p.print(blank) - } - xline := p.pos.Line // before the operator (it may be on the next line!) - yline := p.lineFor(x.Y.Pos()) - p.setPos(x.OpPos) - p.print(x.Op) - if xline != yline && xline > 0 && yline > 0 { - // at least one line break, but respect an extra empty line - // in the source - if p.linebreak(yline, 1, ws, true) > 0 { - ws = ignore - printBlank = false // no blank after line break - } - } - if printBlank { - p.print(blank) - } - p.expr1(x.Y, prec+1, depth+1) - if ws == ignore { - p.print(unindent) - } -} - -func isBinary(expr ast.Expr) bool { - _, ok := expr.(*ast.BinaryExpr) - return ok -} - -func (p *printer) expr1(expr ast.Expr, prec1, depth int) { - p.setPos(expr.Pos()) - - switch x := expr.(type) { - case *ast.BadExpr: - p.print("BadExpr") - - case *ast.Ident: - p.print(x) - - case *ast.BinaryExpr: - if depth < 1 { - p.internalError("depth < 1:", depth) - depth = 1 - } - p.binaryExpr(x, prec1, cutoff(x, depth), depth) - - case *ast.KeyValueExpr: - p.expr(x.Key) - p.setPos(x.Colon) - p.print(token.COLON, blank) - p.expr(x.Value) - - case *ast.StarExpr: - const prec = token.UnaryPrec - if prec < prec1 { - // parenthesis needed - p.print(token.LPAREN) - p.print(token.MUL) - p.expr(x.X) - p.print(token.RPAREN) - } else { - // no parenthesis needed - p.print(token.MUL) - p.expr(x.X) - } - - case *ast.UnaryExpr: - const prec = token.UnaryPrec - if prec < prec1 { - // parenthesis needed - p.print(token.LPAREN) - p.expr(x) - p.print(token.RPAREN) - } else { - // no parenthesis needed - p.print(x.Op) - if x.Op == token.RANGE { - // TODO(gri) Remove this code if it cannot be reached. - p.print(blank) - } - p.expr1(x.X, prec, depth) - } - - case *ast.BasicLit: - if p.Config.Mode&normalizeNumbers != 0 { - x = normalizedNumber(x) - } - p.print(x) - - case *ast.FuncLit: - p.setPos(x.Type.Pos()) - p.print(token.FUNC) - // See the comment in funcDecl about how the header size is computed. - startCol := p.out.Column - len("func") - p.signature(x.Type) - p.funcBody(p.distanceFrom(x.Type.Pos(), startCol), blank, x.Body) - - case *ast.ParenExpr: - if _, hasParens := x.X.(*ast.ParenExpr); hasParens { - // don't print parentheses around an already parenthesized expression - // TODO(gri) consider making this more general and incorporate precedence levels - p.expr0(x.X, depth) - } else { - p.print(token.LPAREN) - p.expr0(x.X, reduceDepth(depth)) // parentheses undo one level of depth - p.setPos(x.Rparen) - p.print(token.RPAREN) - } - - case *ast.SelectorExpr: - p.selectorExpr(x, depth, false) - - case *ast.TypeAssertExpr: - p.expr1(x.X, token.HighestPrec, depth) - p.print(token.PERIOD) - p.setPos(x.Lparen) - p.print(token.LPAREN) - if x.Type != nil { - p.expr(x.Type) - } else { - p.print(token.TYPE) - } - p.setPos(x.Rparen) - p.print(token.RPAREN) - - case *ast.IndexExpr: - // TODO(gri): should treat[] like parentheses and undo one level of depth - p.expr1(x.X, token.HighestPrec, 1) - p.setPos(x.Lbrack) - p.print(token.LBRACK) - p.expr0(x.Index, depth+1) - p.setPos(x.Rbrack) - p.print(token.RBRACK) - - case *ast.IndexListExpr: - // TODO(gri): as for IndexExpr, should treat [] like parentheses and undo - // one level of depth - p.expr1(x.X, token.HighestPrec, 1) - p.setPos(x.Lbrack) - p.print(token.LBRACK) - p.exprList(x.Lbrack, x.Indices, depth+1, commaTerm, x.Rbrack, false) - p.setPos(x.Rbrack) - p.print(token.RBRACK) - - case *ast.SliceExpr: - // TODO(gri): should treat[] like parentheses and undo one level of depth - p.expr1(x.X, token.HighestPrec, 1) - p.setPos(x.Lbrack) - p.print(token.LBRACK) - indices := []ast.Expr{x.Low, x.High} - if x.Max != nil { - indices = append(indices, x.Max) - } - // determine if we need extra blanks around ':' - var needsBlanks bool - if depth <= 1 { - var indexCount int - var hasBinaries bool - for _, x := range indices { - if x != nil { - indexCount++ - if isBinary(x) { - hasBinaries = true - } - } - } - if indexCount > 1 && hasBinaries { - needsBlanks = true - } - } - for i, x := range indices { - if i > 0 { - if indices[i-1] != nil && needsBlanks { - p.print(blank) - } - p.print(token.COLON) - if x != nil && needsBlanks { - p.print(blank) - } - } - if x != nil { - p.expr0(x, depth+1) - } - } - p.setPos(x.Rbrack) - p.print(token.RBRACK) - - case *ast.CallExpr: - if len(x.Args) > 1 { - depth++ - } - var wasIndented bool - if _, ok := x.Fun.(*ast.FuncType); ok { - // conversions to literal function types require parentheses around the type - p.print(token.LPAREN) - wasIndented = p.possibleSelectorExpr(x.Fun, token.HighestPrec, depth) - p.print(token.RPAREN) - } else { - wasIndented = p.possibleSelectorExpr(x.Fun, token.HighestPrec, depth) - } - p.setPos(x.Lparen) - p.print(token.LPAREN) - if x.Ellipsis.IsValid() { - p.exprList(x.Lparen, x.Args, depth, 0, x.Ellipsis, false) - p.setPos(x.Ellipsis) - p.print(token.ELLIPSIS) - if x.Rparen.IsValid() && p.lineFor(x.Ellipsis) < p.lineFor(x.Rparen) { - p.print(token.COMMA, formfeed) - } - } else { - p.exprList(x.Lparen, x.Args, depth, commaTerm, x.Rparen, false) - } - p.setPos(x.Rparen) - p.print(token.RPAREN) - if wasIndented { - p.print(unindent) - } - - case *ast.CompositeLit: - // composite literal elements that are composite literals themselves may have the type omitted - if x.Type != nil { - p.expr1(x.Type, token.HighestPrec, depth) - } - p.level++ - p.setPos(x.Lbrace) - p.print(token.LBRACE) - p.exprList(x.Lbrace, x.Elts, 1, commaTerm, x.Rbrace, x.Incomplete) - // do not insert extra line break following a /*-style comment - // before the closing '}' as it might break the code if there - // is no trailing ',' - mode := noExtraLinebreak - // do not insert extra blank following a /*-style comment - // before the closing '}' unless the literal is empty - if len(x.Elts) > 0 { - mode |= noExtraBlank - } - // need the initial indent to print lone comments with - // the proper level of indentation - p.print(indent, unindent, mode) - p.setPos(x.Rbrace) - p.print(token.RBRACE, mode) - p.level-- - - case *ast.Ellipsis: - p.print(token.ELLIPSIS) - if x.Elt != nil { - p.expr(x.Elt) - } - - case *ast.ArrayType: - p.print(token.LBRACK) - if x.Len != nil { - p.expr(x.Len) - } - p.print(token.RBRACK) - p.expr(x.Elt) - - case *ast.StructType: - p.print(token.STRUCT) - p.fieldList(x.Fields, true, x.Incomplete) - - case *ast.FuncType: - p.print(token.FUNC) - p.signature(x) - - case *ast.InterfaceType: - p.print(token.INTERFACE) - p.fieldList(x.Methods, false, x.Incomplete) - - case *ast.MapType: - p.print(token.MAP, token.LBRACK) - p.expr(x.Key) - p.print(token.RBRACK) - p.expr(x.Value) - - case *ast.ChanType: - switch x.Dir { - case ast.SEND | ast.RECV: - p.print(token.CHAN) - case ast.RECV: - p.print(token.ARROW, token.CHAN) // x.Arrow and x.Pos() are the same - case ast.SEND: - p.print(token.CHAN) - p.setPos(x.Arrow) - p.print(token.ARROW) - } - p.print(blank) - p.expr(x.Value) - - default: - panic("unreachable") - } -} - -// normalizedNumber rewrites base prefixes and exponents -// of numbers to use lower-case letters (0X123 to 0x123 and 1.2E3 to 1.2e3), -// and removes leading 0's from integer imaginary literals (0765i to 765i). -// It leaves hexadecimal digits alone. -// -// normalizedNumber doesn't modify the ast.BasicLit value lit points to. -// If lit is not a number or a number in canonical format already, -// lit is returned as is. Otherwise a new ast.BasicLit is created. -func normalizedNumber(lit *ast.BasicLit) *ast.BasicLit { - if lit.Kind != token.INT && lit.Kind != token.FLOAT && lit.Kind != token.IMAG { - return lit // not a number - nothing to do - } - if len(lit.Value) < 2 { - return lit // only one digit (common case) - nothing to do - } - // len(lit.Value) >= 2 - - // We ignore lit.Kind because for lit.Kind == token.IMAG the literal may be an integer - // or floating-point value, decimal or not. Instead, just consider the literal pattern. - x := lit.Value - switch x[:2] { - default: - // 0-prefix octal, decimal int, or float (possibly with 'i' suffix) - if i := strings.LastIndexByte(x, 'E'); i >= 0 { - x = x[:i] + "e" + x[i+1:] - break - } - // remove leading 0's from integer (but not floating-point) imaginary literals - if x[len(x)-1] == 'i' && !strings.ContainsAny(x, ".e") { - x = strings.TrimLeft(x, "0_") - if x == "i" { - x = "0i" - } - } - case "0X": - x = "0x" + x[2:] - // possibly a hexadecimal float - if i := strings.LastIndexByte(x, 'P'); i >= 0 { - x = x[:i] + "p" + x[i+1:] - } - case "0x": - // possibly a hexadecimal float - i := strings.LastIndexByte(x, 'P') - if i == -1 { - return lit // nothing to do - } - x = x[:i] + "p" + x[i+1:] - case "0O": - x = "0o" + x[2:] - case "0o": - return lit // nothing to do - case "0B": - x = "0b" + x[2:] - case "0b": - return lit // nothing to do - } - - return &ast.BasicLit{ValuePos: lit.ValuePos, Kind: lit.Kind, Value: x} -} - -func (p *printer) possibleSelectorExpr(expr ast.Expr, prec1, depth int) bool { - if x, ok := expr.(*ast.SelectorExpr); ok { - return p.selectorExpr(x, depth, true) - } - p.expr1(expr, prec1, depth) - return false -} - -// selectorExpr handles an *ast.SelectorExpr node and reports whether x spans -// multiple lines. -func (p *printer) selectorExpr(x *ast.SelectorExpr, depth int, isMethod bool) bool { - p.expr1(x.X, token.HighestPrec, depth) - p.print(token.PERIOD) - if line := p.lineFor(x.Sel.Pos()); p.pos.IsValid() && p.pos.Line < line { - p.print(indent, newline) - p.setPos(x.Sel.Pos()) - p.print(x.Sel) - if !isMethod { - p.print(unindent) - } - return true - } - p.setPos(x.Sel.Pos()) - p.print(x.Sel) - return false -} - -func (p *printer) expr0(x ast.Expr, depth int) { - p.expr1(x, token.LowestPrec, depth) -} - -func (p *printer) expr(x ast.Expr) { - const depth = 1 - p.expr1(x, token.LowestPrec, depth) -} - -// ---------------------------------------------------------------------------- -// Statements - -// Print the statement list indented, but without a newline after the last statement. -// Extra line breaks between statements in the source are respected but at most one -// empty line is printed between statements. -func (p *printer) stmtList(list []ast.Stmt, nindent int, nextIsRBrace bool) { - if nindent > 0 { - p.print(indent) - } - var line int - i := 0 - for _, s := range list { - // ignore empty statements (was issue 3466) - if _, isEmpty := s.(*ast.EmptyStmt); !isEmpty { - // nindent == 0 only for lists of switch/select case clauses; - // in those cases each clause is a new section - if len(p.output) > 0 { - // only print line break if we are not at the beginning of the output - // (i.e., we are not printing only a partial program) - p.linebreak(p.lineFor(s.Pos()), 1, ignore, i == 0 || nindent == 0 || p.linesFrom(line) > 0) - } - p.recordLine(&line) - p.stmt(s, nextIsRBrace && i == len(list)-1) - // labeled statements put labels on a separate line, but here - // we only care about the start line of the actual statement - // without label - correct line for each label - for t := s; ; { - lt, _ := t.(*ast.LabeledStmt) - if lt == nil { - break - } - line++ - t = lt.Stmt - } - i++ - } - } - if nindent > 0 { - p.print(unindent) - } -} - -// block prints an *ast.BlockStmt; it always spans at least two lines. -func (p *printer) block(b *ast.BlockStmt, nindent int) { - p.setPos(b.Lbrace) - p.print(token.LBRACE) - p.stmtList(b.List, nindent, true) - p.linebreak(p.lineFor(b.Rbrace), 1, ignore, true) - p.setPos(b.Rbrace) - p.print(token.RBRACE) -} - -func isTypeName(x ast.Expr) bool { - switch t := x.(type) { - case *ast.Ident: - return true - case *ast.SelectorExpr: - return isTypeName(t.X) - } - return false -} - -func stripParens(x ast.Expr) ast.Expr { - if px, strip := x.(*ast.ParenExpr); strip { - // parentheses must not be stripped if there are any - // unparenthesized composite literals starting with - // a type name - ast.Inspect(px.X, func(node ast.Node) bool { - switch x := node.(type) { - case *ast.ParenExpr: - // parentheses protect enclosed composite literals - return false - case *ast.CompositeLit: - if isTypeName(x.Type) { - strip = false // do not strip parentheses - } - return false - } - // in all other cases, keep inspecting - return true - }) - if strip { - return stripParens(px.X) - } - } - return x -} - -func stripParensAlways(x ast.Expr) ast.Expr { - if x, ok := x.(*ast.ParenExpr); ok { - return stripParensAlways(x.X) - } - return x -} - -func (p *printer) controlClause(isForStmt bool, init ast.Stmt, expr ast.Expr, post ast.Stmt) { - p.print(blank) - needsBlank := false - if init == nil && post == nil { - // no semicolons required - if expr != nil { - p.expr(stripParens(expr)) - needsBlank = true - } - } else { - // all semicolons required - // (they are not separators, print them explicitly) - if init != nil { - p.stmt(init, false) - } - p.print(token.SEMICOLON, blank) - if expr != nil { - p.expr(stripParens(expr)) - needsBlank = true - } - if isForStmt { - p.print(token.SEMICOLON, blank) - needsBlank = false - if post != nil { - p.stmt(post, false) - needsBlank = true - } - } - } - if needsBlank { - p.print(blank) - } -} - -// indentList reports whether an expression list would look better if it -// were indented wholesale (starting with the very first element, rather -// than starting at the first line break). -func (p *printer) indentList(list []ast.Expr) bool { - // Heuristic: indentList reports whether there are more than one multi- - // line element in the list, or if there is any element that is not - // starting on the same line as the previous one ends. - if len(list) >= 2 { - b := p.lineFor(list[0].Pos()) - e := p.lineFor(list[len(list)-1].End()) - if 0 < b && b < e { - // list spans multiple lines - n := 0 // multi-line element count - line := b - for _, x := range list { - xb := p.lineFor(x.Pos()) - xe := p.lineFor(x.End()) - if line < xb { - // x is not starting on the same - // line as the previous one ended - return true - } - if xb < xe { - // x is a multi-line element - n++ - } - line = xe - } - return n > 1 - } - } - return false -} - -func (p *printer) stmt(stmt ast.Stmt, nextIsRBrace bool) { - p.setPos(stmt.Pos()) - - switch s := stmt.(type) { - case *ast.BadStmt: - p.print("BadStmt") - - case *ast.DeclStmt: - p.decl(s.Decl) - - case *ast.EmptyStmt: - // nothing to do - - case *ast.LabeledStmt: - // a "correcting" unindent immediately following a line break - // is applied before the line break if there is no comment - // between (see writeWhitespace) - p.print(unindent) - p.expr(s.Label) - p.setPos(s.Colon) - p.print(token.COLON, indent) - if e, isEmpty := s.Stmt.(*ast.EmptyStmt); isEmpty { - if !nextIsRBrace { - p.print(newline) - p.setPos(e.Pos()) - p.print(token.SEMICOLON) - break - } - } else { - p.linebreak(p.lineFor(s.Stmt.Pos()), 1, ignore, true) - } - p.stmt(s.Stmt, nextIsRBrace) - - case *ast.ExprStmt: - const depth = 1 - p.expr0(s.X, depth) - - case *ast.SendStmt: - const depth = 1 - p.expr0(s.Chan, depth) - p.print(blank) - p.setPos(s.Arrow) - p.print(token.ARROW, blank) - p.expr0(s.Value, depth) - - case *ast.IncDecStmt: - const depth = 1 - p.expr0(s.X, depth+1) - p.setPos(s.TokPos) - p.print(s.Tok) - - case *ast.AssignStmt: - depth := 1 - if len(s.Lhs) > 1 && len(s.Rhs) > 1 { - depth++ - } - p.exprList(s.Pos(), s.Lhs, depth, 0, s.TokPos, false) - p.print(blank) - p.setPos(s.TokPos) - p.print(s.Tok, blank) - p.exprList(s.TokPos, s.Rhs, depth, 0, token.NoPos, false) - - case *ast.GoStmt: - p.print(token.GO, blank) - p.expr(s.Call) - - case *ast.DeferStmt: - p.print(token.DEFER, blank) - p.expr(s.Call) - - case *ast.ReturnStmt: - p.print(token.RETURN) - if s.Results != nil { - p.print(blank) - // Use indentList heuristic to make corner cases look - // better (issue 1207). A more systematic approach would - // always indent, but this would cause significant - // reformatting of the code base and not necessarily - // lead to more nicely formatted code in general. - if p.indentList(s.Results) { - p.print(indent) - // Use NoPos so that a newline never goes before - // the results (see issue #32854). - p.exprList(token.NoPos, s.Results, 1, noIndent, token.NoPos, false) - p.print(unindent) - } else { - p.exprList(token.NoPos, s.Results, 1, 0, token.NoPos, false) - } - } - - case *ast.BranchStmt: - p.print(s.Tok) - if s.Label != nil { - p.print(blank) - p.expr(s.Label) - } - - case *ast.BlockStmt: - p.block(s, 1) - - case *ast.IfStmt: - p.print(token.IF) - p.controlClause(false, s.Init, s.Cond, nil) - p.block(s.Body, 1) - if s.Else != nil { - p.print(blank, token.ELSE, blank) - switch s.Else.(type) { - case *ast.BlockStmt, *ast.IfStmt: - p.stmt(s.Else, nextIsRBrace) - default: - // This can only happen with an incorrectly - // constructed AST. Permit it but print so - // that it can be parsed without errors. - p.print(token.LBRACE, indent, formfeed) - p.stmt(s.Else, true) - p.print(unindent, formfeed, token.RBRACE) - } - } - - case *ast.CaseClause: - if s.List != nil { - p.print(token.CASE, blank) - p.exprList(s.Pos(), s.List, 1, 0, s.Colon, false) - } else { - p.print(token.DEFAULT) - } - p.setPos(s.Colon) - p.print(token.COLON) - p.stmtList(s.Body, 1, nextIsRBrace) - - case *ast.SwitchStmt: - p.print(token.SWITCH) - p.controlClause(false, s.Init, s.Tag, nil) - p.block(s.Body, 0) - - case *ast.TypeSwitchStmt: - p.print(token.SWITCH) - if s.Init != nil { - p.print(blank) - p.stmt(s.Init, false) - p.print(token.SEMICOLON) - } - p.print(blank) - p.stmt(s.Assign, false) - p.print(blank) - p.block(s.Body, 0) - - case *ast.CommClause: - if s.Comm != nil { - p.print(token.CASE, blank) - p.stmt(s.Comm, false) - } else { - p.print(token.DEFAULT) - } - p.setPos(s.Colon) - p.print(token.COLON) - p.stmtList(s.Body, 1, nextIsRBrace) - - case *ast.SelectStmt: - p.print(token.SELECT, blank) - body := s.Body - if len(body.List) == 0 && !p.commentBefore(p.posFor(body.Rbrace)) { - // print empty select statement w/o comments on one line - p.setPos(body.Lbrace) - p.print(token.LBRACE) - p.setPos(body.Rbrace) - p.print(token.RBRACE) - } else { - p.block(body, 0) - } - - case *ast.ForStmt: - p.print(token.FOR) - p.controlClause(true, s.Init, s.Cond, s.Post) - p.block(s.Body, 1) - - case *ast.RangeStmt: - p.print(token.FOR, blank) - if s.Key != nil { - p.expr(s.Key) - if s.Value != nil { - // use position of value following the comma as - // comma position for correct comment placement - p.setPos(s.Value.Pos()) - p.print(token.COMMA, blank) - p.expr(s.Value) - } - p.print(blank) - p.setPos(s.TokPos) - p.print(s.Tok, blank) - } - p.print(token.RANGE, blank) - p.expr(stripParens(s.X)) - p.print(blank) - p.block(s.Body, 1) - - default: - panic("unreachable") - } -} - -// ---------------------------------------------------------------------------- -// Declarations - -// The keepTypeColumn function determines if the type column of a series of -// consecutive const or var declarations must be kept, or if initialization -// values (V) can be placed in the type column (T) instead. The i'th entry -// in the result slice is true if the type column in spec[i] must be kept. -// -// For example, the declaration: -// -// const ( -// foobar int = 42 // comment -// x = 7 // comment -// foo -// bar = 991 -// ) -// -// leads to the type/values matrix below. A run of value columns (V) can -// be moved into the type column if there is no type for any of the values -// in that column (we only move entire columns so that they align properly). -// -// matrix formatted result -// matrix -// T V -> T V -> true there is a T and so the type -// - V - V true column must be kept -// - - - - false -// - V V - false V is moved into T column -func keepTypeColumn(specs []ast.Spec) []bool { - m := make([]bool, len(specs)) - - populate := func(i, j int, keepType bool) { - if keepType { - for ; i < j; i++ { - m[i] = true - } - } - } - - i0 := -1 // if i0 >= 0 we are in a run and i0 is the start of the run - var keepType bool - for i, s := range specs { - t := s.(*ast.ValueSpec) - if t.Values != nil { - if i0 < 0 { - // start of a run of ValueSpecs with non-nil Values - i0 = i - keepType = false - } - } else { - if i0 >= 0 { - // end of a run - populate(i0, i, keepType) - i0 = -1 - } - } - if t.Type != nil { - keepType = true - } - } - if i0 >= 0 { - // end of a run - populate(i0, len(specs), keepType) - } - - return m -} - -func (p *printer) valueSpec(s *ast.ValueSpec, keepType bool) { - p.setComment(s.Doc) - p.identList(s.Names, false) // always present - extraTabs := 3 - if s.Type != nil || keepType { - p.print(vtab) - extraTabs-- - } - if s.Type != nil { - p.expr(s.Type) - } - if s.Values != nil { - p.print(vtab, token.ASSIGN, blank) - p.exprList(token.NoPos, s.Values, 1, 0, token.NoPos, false) - extraTabs-- - } - if s.Comment != nil { - for ; extraTabs > 0; extraTabs-- { - p.print(vtab) - } - p.setComment(s.Comment) - } -} - -func sanitizeImportPath(lit *ast.BasicLit) *ast.BasicLit { - // Note: An unmodified AST generated by go/parser will already - // contain a backward- or double-quoted path string that does - // not contain any invalid characters, and most of the work - // here is not needed. However, a modified or generated AST - // may possibly contain non-canonical paths. Do the work in - // all cases since it's not too hard and not speed-critical. - - // if we don't have a proper string, be conservative and return whatever we have - if lit.Kind != token.STRING { - return lit - } - s, err := strconv.Unquote(lit.Value) - if err != nil { - return lit - } - - // if the string is an invalid path, return whatever we have - // - // spec: "Implementation restriction: A compiler may restrict - // ImportPaths to non-empty strings using only characters belonging - // to Unicode's L, M, N, P, and S general categories (the Graphic - // characters without spaces) and may also exclude the characters - // !"#$%&'()*,:;<=>?[\]^`{|} and the Unicode replacement character - // U+FFFD." - if s == "" { - return lit - } - const illegalChars = `!"#$%&'()*,:;<=>?[\]^{|}` + "`\uFFFD" - for _, r := range s { - if !unicode.IsGraphic(r) || unicode.IsSpace(r) || strings.ContainsRune(illegalChars, r) { - return lit - } - } - - // otherwise, return the double-quoted path - s = strconv.Quote(s) - if s == lit.Value { - return lit // nothing wrong with lit - } - return &ast.BasicLit{ValuePos: lit.ValuePos, Kind: token.STRING, Value: s} -} - -// The parameter n is the number of specs in the group. If doIndent is set, -// multi-line identifier lists in the spec are indented when the first -// linebreak is encountered. -func (p *printer) spec(spec ast.Spec, n int, doIndent bool) { - switch s := spec.(type) { - case *ast.ImportSpec: - p.setComment(s.Doc) - if s.Name != nil { - p.expr(s.Name) - p.print(blank) - } - p.expr(sanitizeImportPath(s.Path)) - p.setComment(s.Comment) - p.setPos(s.EndPos) - - case *ast.ValueSpec: - if n != 1 { - p.internalError("expected n = 1; got", n) - } - p.setComment(s.Doc) - p.identList(s.Names, doIndent) // always present - if s.Type != nil { - p.print(blank) - p.expr(s.Type) - } - if s.Values != nil { - p.print(blank, token.ASSIGN, blank) - p.exprList(token.NoPos, s.Values, 1, 0, token.NoPos, false) - } - p.setComment(s.Comment) - - case *ast.TypeSpec: - p.setComment(s.Doc) - p.expr(s.Name) - if s.TypeParams != nil { - p.parameters(s.TypeParams, typeTParam) - } - if n == 1 { - p.print(blank) - } else { - p.print(vtab) - } - if s.Assign.IsValid() { - p.print(token.ASSIGN, blank) - } - p.expr(s.Type) - p.setComment(s.Comment) - - default: - panic("unreachable") - } -} - -func (p *printer) genDecl(d *ast.GenDecl) { - p.setComment(d.Doc) - p.setPos(d.Pos()) - p.print(d.Tok, blank) - - if d.Lparen.IsValid() || len(d.Specs) > 1 { - // group of parenthesized declarations - p.setPos(d.Lparen) - p.print(token.LPAREN) - if n := len(d.Specs); n > 0 { - p.print(indent, formfeed) - if n > 1 && (d.Tok == token.CONST || d.Tok == token.VAR) { - // two or more grouped const/var declarations: - // determine if the type column must be kept - keepType := keepTypeColumn(d.Specs) - var line int - for i, s := range d.Specs { - if i > 0 { - p.linebreak(p.lineFor(s.Pos()), 1, ignore, p.linesFrom(line) > 0) - } - p.recordLine(&line) - p.valueSpec(s.(*ast.ValueSpec), keepType[i]) - } - } else { - var line int - for i, s := range d.Specs { - if i > 0 { - p.linebreak(p.lineFor(s.Pos()), 1, ignore, p.linesFrom(line) > 0) - } - p.recordLine(&line) - p.spec(s, n, false) - } - } - p.print(unindent, formfeed) - } - p.setPos(d.Rparen) - p.print(token.RPAREN) - - } else if len(d.Specs) > 0 { - // single declaration - p.spec(d.Specs[0], 1, true) - } -} - -// sizeCounter is an io.Writer which counts the number of bytes written, -// as well as whether a newline character was seen. -type sizeCounter struct { - hasNewline bool - size int -} - -func (c *sizeCounter) Write(p []byte) (int, error) { - if !c.hasNewline { - for _, b := range p { - if b == '\n' || b == '\f' { - c.hasNewline = true - break - } - } - } - c.size += len(p) - return len(p), nil -} - -// nodeSize determines the size of n in chars after formatting. -// The result is <= maxSize if the node fits on one line with at -// most maxSize chars and the formatted output doesn't contain -// any control chars. Otherwise, the result is > maxSize. -func (p *printer) nodeSize(n ast.Node, maxSize int) (size int) { - // nodeSize invokes the printer, which may invoke nodeSize - // recursively. For deep composite literal nests, this can - // lead to an exponential algorithm. Remember previous - // results to prune the recursion (was issue 1628). - if size, found := p.nodeSizes[n]; found { - return size - } - - size = maxSize + 1 // assume n doesn't fit - p.nodeSizes[n] = size - - // nodeSize computation must be independent of particular - // style so that we always get the same decision; print - // in RawFormat - cfg := Config{Mode: RawFormat} - var counter sizeCounter - if err := cfg.fprint(&counter, p.fset, n, p.nodeSizes); err != nil { - return - } - if counter.size <= maxSize && !counter.hasNewline { - // n fits in a single line - size = counter.size - p.nodeSizes[n] = size - } - return -} - -// numLines returns the number of lines spanned by node n in the original source. -func (p *printer) numLines(n ast.Node) int { - if from := n.Pos(); from.IsValid() { - if to := n.End(); to.IsValid() { - return p.lineFor(to) - p.lineFor(from) + 1 - } - } - return infinity -} - -// bodySize is like nodeSize but it is specialized for *ast.BlockStmt's. -func (p *printer) bodySize(b *ast.BlockStmt, maxSize int) int { - pos1 := b.Pos() - pos2 := b.Rbrace - if pos1.IsValid() && pos2.IsValid() && p.lineFor(pos1) != p.lineFor(pos2) { - // opening and closing brace are on different lines - don't make it a one-liner - return maxSize + 1 - } - if len(b.List) > 5 { - // too many statements - don't make it a one-liner - return maxSize + 1 - } - // otherwise, estimate body size - bodySize := p.commentSizeBefore(p.posFor(pos2)) - for i, s := range b.List { - if bodySize > maxSize { - break // no need to continue - } - if i > 0 { - bodySize += 2 // space for a semicolon and blank - } - bodySize += p.nodeSize(s, maxSize) - } - return bodySize -} - -// funcBody prints a function body following a function header of given headerSize. -// If the header's and block's size are "small enough" and the block is "simple enough", -// the block is printed on the current line, without line breaks, spaced from the header -// by sep. Otherwise the block's opening "{" is printed on the current line, followed by -// lines for the block's statements and its closing "}". -func (p *printer) funcBody(headerSize int, sep whiteSpace, b *ast.BlockStmt) { - if b == nil { - return - } - - // save/restore composite literal nesting level - defer func(level int) { - p.level = level - }(p.level) - p.level = 0 - - const maxSize = 100 - if headerSize+p.bodySize(b, maxSize) <= maxSize { - p.print(sep) - p.setPos(b.Lbrace) - p.print(token.LBRACE) - if len(b.List) > 0 { - p.print(blank) - for i, s := range b.List { - if i > 0 { - p.print(token.SEMICOLON, blank) - } - p.stmt(s, i == len(b.List)-1) - } - p.print(blank) - } - p.print(noExtraLinebreak) - p.setPos(b.Rbrace) - p.print(token.RBRACE, noExtraLinebreak) - return - } - - if sep != ignore { - p.print(blank) // always use blank - } - p.block(b, 1) -} - -// distanceFrom returns the column difference between p.out (the current output -// position) and startOutCol. If the start position is on a different line from -// the current position (or either is unknown), the result is infinity. -func (p *printer) distanceFrom(startPos token.Pos, startOutCol int) int { - if startPos.IsValid() && p.pos.IsValid() && p.posFor(startPos).Line == p.pos.Line { - return p.out.Column - startOutCol - } - return infinity -} - -func (p *printer) funcDecl(d *ast.FuncDecl) { - p.setComment(d.Doc) - p.setPos(d.Pos()) - p.print(token.FUNC, blank) - // We have to save startCol only after emitting FUNC; otherwise it can be on a - // different line (all whitespace preceding the FUNC is emitted only when the - // FUNC is emitted). - startCol := p.out.Column - len("func ") - if d.Recv != nil { - p.parameters(d.Recv, funcParam) // method: print receiver - p.print(blank) - } - p.expr(d.Name) - p.signature(d.Type) - p.funcBody(p.distanceFrom(d.Pos(), startCol), vtab, d.Body) -} - -func (p *printer) decl(decl ast.Decl) { - switch d := decl.(type) { - case *ast.BadDecl: - p.setPos(d.Pos()) - p.print("BadDecl") - case *ast.GenDecl: - p.genDecl(d) - case *ast.FuncDecl: - p.funcDecl(d) - default: - panic("unreachable") - } -} - -// ---------------------------------------------------------------------------- -// Files - -func declToken(decl ast.Decl) (tok token.Token) { - tok = token.ILLEGAL - switch d := decl.(type) { - case *ast.GenDecl: - tok = d.Tok - case *ast.FuncDecl: - tok = token.FUNC - } - return -} - -func (p *printer) declList(list []ast.Decl) { - tok := token.ILLEGAL - for _, d := range list { - prev := tok - tok = declToken(d) - // If the declaration token changed (e.g., from CONST to TYPE) - // or the next declaration has documentation associated with it, - // print an empty line between top-level declarations. - // (because p.linebreak is called with the position of d, which - // is past any documentation, the minimum requirement is satisfied - // even w/o the extra getDoc(d) nil-check - leave it in case the - // linebreak logic improves - there's already a TODO). - if len(p.output) > 0 { - // only print line break if we are not at the beginning of the output - // (i.e., we are not printing only a partial program) - min := 1 - if prev != tok || getDoc(d) != nil { - min = 2 - } - // start a new section if the next declaration is a function - // that spans multiple lines (see also issue #19544) - p.linebreak(p.lineFor(d.Pos()), min, ignore, tok == token.FUNC && p.numLines(d) > 1) - } - p.decl(d) - } -} - -func (p *printer) file(src *ast.File) { - p.setComment(src.Doc) - p.setPos(src.Pos()) - p.print(token.PACKAGE, blank) - p.expr(src.Name) - p.declList(src.Decls) - p.print(newline) -} diff --git a/vendor/mvdan.cc/gofumpt/internal/govendor/go/printer/printer.go b/vendor/mvdan.cc/gofumpt/internal/govendor/go/printer/printer.go deleted file mode 100644 index 2ab0278b0..000000000 --- a/vendor/mvdan.cc/gofumpt/internal/govendor/go/printer/printer.go +++ /dev/null @@ -1,1435 +0,0 @@ -// Copyright 2009 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package printer implements printing of AST nodes. -package printer - -import ( - "fmt" - "go/ast" - "go/build/constraint" - "go/token" - "io" - "os" - "strings" - "sync" - "text/tabwriter" - "unicode" -) - -const ( - maxNewlines = 2 // max. number of newlines between source text - debug = false // enable for debugging - infinity = 1 << 30 -) - -type whiteSpace byte - -const ( - ignore = whiteSpace(0) - blank = whiteSpace(' ') - vtab = whiteSpace('\v') - newline = whiteSpace('\n') - formfeed = whiteSpace('\f') - indent = whiteSpace('>') - unindent = whiteSpace('<') -) - -// A pmode value represents the current printer mode. -type pmode int - -const ( - noExtraBlank pmode = 1 << iota // disables extra blank after /*-style comment - noExtraLinebreak // disables extra line break after /*-style comment -) - -type commentInfo struct { - cindex int // current comment index - comment *ast.CommentGroup // = printer.comments[cindex]; or nil - commentOffset int // = printer.posFor(printer.comments[cindex].List[0].Pos()).Offset; or infinity - commentNewline bool // true if the comment group contains newlines -} - -type printer struct { - // Configuration (does not change after initialization) - Config - fset *token.FileSet - - // Current state - output []byte // raw printer result - indent int // current indentation - level int // level == 0: outside composite literal; level > 0: inside composite literal - mode pmode // current printer mode - endAlignment bool // if set, terminate alignment immediately - impliedSemi bool // if set, a linebreak implies a semicolon - lastTok token.Token // last token printed (token.ILLEGAL if it's whitespace) - prevOpen token.Token // previous non-brace "open" token (, [, or token.ILLEGAL - wsbuf []whiteSpace // delayed white space - goBuild []int // start index of all //go:build comments in output - plusBuild []int // start index of all // +build comments in output - - // Positions - // The out position differs from the pos position when the result - // formatting differs from the source formatting (in the amount of - // white space). If there's a difference and SourcePos is set in - // ConfigMode, //line directives are used in the output to restore - // original source positions for a reader. - pos token.Position // current position in AST (source) space - out token.Position // current position in output space - last token.Position // value of pos after calling writeString - linePtr *int // if set, record out.Line for the next token in *linePtr - sourcePosErr error // if non-nil, the first error emitting a //line directive - - // The list of all source comments, in order of appearance. - comments []*ast.CommentGroup // may be nil - useNodeComments bool // if not set, ignore lead and line comments of nodes - - // Information about p.comments[p.cindex]; set up by nextComment. - commentInfo - - // Cache of already computed node sizes. - nodeSizes map[ast.Node]int - - // Cache of most recently computed line position. - cachedPos token.Pos - cachedLine int // line corresponding to cachedPos -} - -func (p *printer) internalError(msg ...any) { - if debug { - fmt.Print(p.pos.String() + ": ") - fmt.Println(msg...) - panic("mvdan.cc/gofumpt/internal/govendor/go/printer") - } -} - -// commentsHaveNewline reports whether a list of comments belonging to -// an *ast.CommentGroup contains newlines. Because the position information -// may only be partially correct, we also have to read the comment text. -func (p *printer) commentsHaveNewline(list []*ast.Comment) bool { - // len(list) > 0 - line := p.lineFor(list[0].Pos()) - for i, c := range list { - if i > 0 && p.lineFor(list[i].Pos()) != line { - // not all comments on the same line - return true - } - if t := c.Text; len(t) >= 2 && (t[1] == '/' || strings.Contains(t, "\n")) { - return true - } - } - _ = line - return false -} - -func (p *printer) nextComment() { - for p.cindex < len(p.comments) { - c := p.comments[p.cindex] - p.cindex++ - if list := c.List; len(list) > 0 { - p.comment = c - p.commentOffset = p.posFor(list[0].Pos()).Offset - p.commentNewline = p.commentsHaveNewline(list) - return - } - // we should not reach here (correct ASTs don't have empty - // ast.CommentGroup nodes), but be conservative and try again - } - // no more comments - p.commentOffset = infinity -} - -// commentBefore reports whether the current comment group occurs -// before the next position in the source code and printing it does -// not introduce implicit semicolons. -func (p *printer) commentBefore(next token.Position) bool { - return p.commentOffset < next.Offset && (!p.impliedSemi || !p.commentNewline) -} - -// commentSizeBefore returns the estimated size of the -// comments on the same line before the next position. -func (p *printer) commentSizeBefore(next token.Position) int { - // save/restore current p.commentInfo (p.nextComment() modifies it) - defer func(info commentInfo) { - p.commentInfo = info - }(p.commentInfo) - - size := 0 - for p.commentBefore(next) { - for _, c := range p.comment.List { - size += len(c.Text) - } - p.nextComment() - } - return size -} - -// recordLine records the output line number for the next non-whitespace -// token in *linePtr. It is used to compute an accurate line number for a -// formatted construct, independent of pending (not yet emitted) whitespace -// or comments. -func (p *printer) recordLine(linePtr *int) { - p.linePtr = linePtr -} - -// linesFrom returns the number of output lines between the current -// output line and the line argument, ignoring any pending (not yet -// emitted) whitespace or comments. It is used to compute an accurate -// size (in number of lines) for a formatted construct. -func (p *printer) linesFrom(line int) int { - return p.out.Line - line -} - -func (p *printer) posFor(pos token.Pos) token.Position { - // not used frequently enough to cache entire token.Position - return p.fset.PositionFor(pos, false /* absolute position */) -} - -func (p *printer) lineFor(pos token.Pos) int { - if pos != p.cachedPos { - p.cachedPos = pos - p.cachedLine = p.fset.PositionFor(pos, false /* absolute position */).Line - } - return p.cachedLine -} - -// writeLineDirective writes a //line directive if necessary. -func (p *printer) writeLineDirective(pos token.Position) { - if pos.IsValid() && (p.out.Line != pos.Line || p.out.Filename != pos.Filename) { - if strings.ContainsAny(pos.Filename, "\r\n") { - if p.sourcePosErr == nil { - p.sourcePosErr = fmt.Errorf("mvdan.cc/gofumpt/internal/govendor/go/printer: source filename contains unexpected newline character: %q", pos.Filename) - } - return - } - - p.output = append(p.output, tabwriter.Escape) // protect '\n' in //line from tabwriter interpretation - p.output = append(p.output, fmt.Sprintf("//line %s:%d\n", pos.Filename, pos.Line)...) - p.output = append(p.output, tabwriter.Escape) - // p.out must match the //line directive - p.out.Filename = pos.Filename - p.out.Line = pos.Line - } -} - -// writeIndent writes indentation. -func (p *printer) writeIndent() { - // use "hard" htabs - indentation columns - // must not be discarded by the tabwriter - n := p.Config.Indent + p.indent // include base indentation - for i := 0; i < n; i++ { - p.output = append(p.output, '\t') - } - - // update positions - p.pos.Offset += n - p.pos.Column += n - p.out.Column += n -} - -// writeByte writes ch n times to p.output and updates p.pos. -// Only used to write formatting (white space) characters. -func (p *printer) writeByte(ch byte, n int) { - if p.endAlignment { - // Ignore any alignment control character; - // and at the end of the line, break with - // a formfeed to indicate termination of - // existing columns. - switch ch { - case '\t', '\v': - ch = ' ' - case '\n', '\f': - ch = '\f' - p.endAlignment = false - } - } - - if p.out.Column == 1 { - // no need to write line directives before white space - p.writeIndent() - } - - for i := 0; i < n; i++ { - p.output = append(p.output, ch) - } - - // update positions - p.pos.Offset += n - if ch == '\n' || ch == '\f' { - p.pos.Line += n - p.out.Line += n - p.pos.Column = 1 - p.out.Column = 1 - return - } - p.pos.Column += n - p.out.Column += n -} - -// writeString writes the string s to p.output and updates p.pos, p.out, -// and p.last. If isLit is set, s is escaped w/ tabwriter.Escape characters -// to protect s from being interpreted by the tabwriter. -// -// Note: writeString is only used to write Go tokens, literals, and -// comments, all of which must be written literally. Thus, it is correct -// to always set isLit = true. However, setting it explicitly only when -// needed (i.e., when we don't know that s contains no tabs or line breaks) -// avoids processing extra escape characters and reduces run time of the -// printer benchmark by up to 10%. -func (p *printer) writeString(pos token.Position, s string, isLit bool) { - if p.out.Column == 1 { - if p.Config.Mode&SourcePos != 0 { - p.writeLineDirective(pos) - } - p.writeIndent() - } - - if pos.IsValid() { - // update p.pos (if pos is invalid, continue with existing p.pos) - // Note: Must do this after handling line beginnings because - // writeIndent updates p.pos if there's indentation, but p.pos - // is the position of s. - p.pos = pos - } - - if isLit { - // Protect s such that is passes through the tabwriter - // unchanged. Note that valid Go programs cannot contain - // tabwriter.Escape bytes since they do not appear in legal - // UTF-8 sequences. - p.output = append(p.output, tabwriter.Escape) - } - - if debug { - p.output = append(p.output, fmt.Sprintf("/*%s*/", pos)...) // do not update p.pos! - } - p.output = append(p.output, s...) - - // update positions - nlines := 0 - var li int // index of last newline; valid if nlines > 0 - for i := 0; i < len(s); i++ { - // Raw string literals may contain any character except back quote (`). - if ch := s[i]; ch == '\n' || ch == '\f' { - // account for line break - nlines++ - li = i - // A line break inside a literal will break whatever column - // formatting is in place; ignore any further alignment through - // the end of the line. - p.endAlignment = true - } - } - p.pos.Offset += len(s) - if nlines > 0 { - p.pos.Line += nlines - p.out.Line += nlines - c := len(s) - li - p.pos.Column = c - p.out.Column = c - } else { - p.pos.Column += len(s) - p.out.Column += len(s) - } - - if isLit { - p.output = append(p.output, tabwriter.Escape) - } - - p.last = p.pos -} - -// writeCommentPrefix writes the whitespace before a comment. -// If there is any pending whitespace, it consumes as much of -// it as is likely to help position the comment nicely. -// pos is the comment position, next the position of the item -// after all pending comments, prev is the previous comment in -// a group of comments (or nil), and tok is the next token. -func (p *printer) writeCommentPrefix(pos, next token.Position, prev *ast.Comment, tok token.Token) { - if len(p.output) == 0 { - // the comment is the first item to be printed - don't write any whitespace - return - } - - if pos.IsValid() && pos.Filename != p.last.Filename { - // comment in a different file - separate with newlines - p.writeByte('\f', maxNewlines) - return - } - - if pos.Line == p.last.Line && (prev == nil || prev.Text[1] != '/') { - // comment on the same line as last item: - // separate with at least one separator - hasSep := false - if prev == nil { - // first comment of a comment group - j := 0 - for i, ch := range p.wsbuf { - switch ch { - case blank: - // ignore any blanks before a comment - p.wsbuf[i] = ignore - continue - case vtab: - // respect existing tabs - important - // for proper formatting of commented structs - hasSep = true - continue - case indent: - // apply pending indentation - continue - } - j = i - break - } - p.writeWhitespace(j) - } - // make sure there is at least one separator - if !hasSep { - sep := byte('\t') - if pos.Line == next.Line { - // next item is on the same line as the comment - // (which must be a /*-style comment): separate - // with a blank instead of a tab - sep = ' ' - } - p.writeByte(sep, 1) - } - - } else { - // comment on a different line: - // separate with at least one line break - droppedLinebreak := false - j := 0 - for i, ch := range p.wsbuf { - switch ch { - case blank, vtab: - // ignore any horizontal whitespace before line breaks - p.wsbuf[i] = ignore - continue - case indent: - // apply pending indentation - continue - case unindent: - // if this is not the last unindent, apply it - // as it is (likely) belonging to the last - // construct (e.g., a multi-line expression list) - // and is not part of closing a block - if i+1 < len(p.wsbuf) && p.wsbuf[i+1] == unindent { - continue - } - // if the next token is not a closing }, apply the unindent - // if it appears that the comment is aligned with the - // token; otherwise assume the unindent is part of a - // closing block and stop (this scenario appears with - // comments before a case label where the comments - // apply to the next case instead of the current one) - if tok != token.RBRACE && pos.Column == next.Column { - continue - } - case newline, formfeed: - p.wsbuf[i] = ignore - droppedLinebreak = prev == nil // record only if first comment of a group - } - j = i - break - } - p.writeWhitespace(j) - - // determine number of linebreaks before the comment - n := 0 - if pos.IsValid() && p.last.IsValid() { - n = pos.Line - p.last.Line - if n < 0 { // should never happen - n = 0 - } - } - - // at the package scope level only (p.indent == 0), - // add an extra newline if we dropped one before: - // this preserves a blank line before documentation - // comments at the package scope level (issue 2570) - if p.indent == 0 && droppedLinebreak { - n++ - } - - // make sure there is at least one line break - // if the previous comment was a line comment - if n == 0 && prev != nil && prev.Text[1] == '/' { - n = 1 - } - - if n > 0 { - // use formfeeds to break columns before a comment; - // this is analogous to using formfeeds to separate - // individual lines of /*-style comments - p.writeByte('\f', nlimit(n)) - } - } -} - -// Returns true if s contains only white space -// (only tabs and blanks can appear in the printer's context). -func isBlank(s string) bool { - for i := 0; i < len(s); i++ { - if s[i] > ' ' { - return false - } - } - return true -} - -// commonPrefix returns the common prefix of a and b. -func commonPrefix(a, b string) string { - i := 0 - for i < len(a) && i < len(b) && a[i] == b[i] && (a[i] <= ' ' || a[i] == '*') { - i++ - } - return a[0:i] -} - -// trimRight returns s with trailing whitespace removed. -func trimRight(s string) string { - return strings.TrimRightFunc(s, unicode.IsSpace) -} - -// stripCommonPrefix removes a common prefix from /*-style comment lines (unless no -// comment line is indented, all but the first line have some form of space prefix). -// The prefix is computed using heuristics such that is likely that the comment -// contents are nicely laid out after re-printing each line using the printer's -// current indentation. -func stripCommonPrefix(lines []string) { - if len(lines) <= 1 { - return // at most one line - nothing to do - } - // len(lines) > 1 - - // The heuristic in this function tries to handle a few - // common patterns of /*-style comments: Comments where - // the opening /* and closing */ are aligned and the - // rest of the comment text is aligned and indented with - // blanks or tabs, cases with a vertical "line of stars" - // on the left, and cases where the closing */ is on the - // same line as the last comment text. - - // Compute maximum common white prefix of all but the first, - // last, and blank lines, and replace blank lines with empty - // lines (the first line starts with /* and has no prefix). - // In cases where only the first and last lines are not blank, - // such as two-line comments, or comments where all inner lines - // are blank, consider the last line for the prefix computation - // since otherwise the prefix would be empty. - // - // Note that the first and last line are never empty (they - // contain the opening /* and closing */ respectively) and - // thus they can be ignored by the blank line check. - prefix := "" - prefixSet := false - if len(lines) > 2 { - for i, line := range lines[1 : len(lines)-1] { - if isBlank(line) { - lines[1+i] = "" // range starts with lines[1] - } else { - if !prefixSet { - prefix = line - prefixSet = true - } - prefix = commonPrefix(prefix, line) - } - } - } - // If we don't have a prefix yet, consider the last line. - if !prefixSet { - line := lines[len(lines)-1] - prefix = commonPrefix(line, line) - } - - /* - * Check for vertical "line of stars" and correct prefix accordingly. - */ - lineOfStars := false - if p, _, ok := strings.Cut(prefix, "*"); ok { - // remove trailing blank from prefix so stars remain aligned - prefix = strings.TrimSuffix(p, " ") - lineOfStars = true - } else { - // No line of stars present. - // Determine the white space on the first line after the /* - // and before the beginning of the comment text, assume two - // blanks instead of the /* unless the first character after - // the /* is a tab. If the first comment line is empty but - // for the opening /*, assume up to 3 blanks or a tab. This - // whitespace may be found as suffix in the common prefix. - first := lines[0] - if isBlank(first[2:]) { - // no comment text on the first line: - // reduce prefix by up to 3 blanks or a tab - // if present - this keeps comment text indented - // relative to the /* and */'s if it was indented - // in the first place - i := len(prefix) - for n := 0; n < 3 && i > 0 && prefix[i-1] == ' '; n++ { - i-- - } - if i == len(prefix) && i > 0 && prefix[i-1] == '\t' { - i-- - } - prefix = prefix[0:i] - } else { - // comment text on the first line - suffix := make([]byte, len(first)) - n := 2 // start after opening /* - for n < len(first) && first[n] <= ' ' { - suffix[n] = first[n] - n++ - } - if n > 2 && suffix[2] == '\t' { - // assume the '\t' compensates for the /* - suffix = suffix[2:n] - } else { - // otherwise assume two blanks - suffix[0], suffix[1] = ' ', ' ' - suffix = suffix[0:n] - } - // Shorten the computed common prefix by the length of - // suffix, if it is found as suffix of the prefix. - prefix = strings.TrimSuffix(prefix, string(suffix)) - } - } - - // Handle last line: If it only contains a closing */, align it - // with the opening /*, otherwise align the text with the other - // lines. - last := lines[len(lines)-1] - closing := "*/" - before, _, _ := strings.Cut(last, closing) // closing always present - if isBlank(before) { - // last line only contains closing */ - if lineOfStars { - closing = " */" // add blank to align final star - } - lines[len(lines)-1] = prefix + closing - } else { - // last line contains more comment text - assume - // it is aligned like the other lines and include - // in prefix computation - prefix = commonPrefix(prefix, last) - } - - // Remove the common prefix from all but the first and empty lines. - for i, line := range lines { - if i > 0 && line != "" { - lines[i] = line[len(prefix):] - } - } -} - -func (p *printer) writeComment(comment *ast.Comment) { - text := comment.Text - pos := p.posFor(comment.Pos()) - - const linePrefix = "//line " - if strings.HasPrefix(text, linePrefix) && (!pos.IsValid() || pos.Column == 1) { - // Possibly a //-style line directive. - // Suspend indentation temporarily to keep line directive valid. - defer func(indent int) { p.indent = indent }(p.indent) - p.indent = 0 - } - - // shortcut common case of //-style comments - if text[1] == '/' { - if constraint.IsGoBuild(text) { - p.goBuild = append(p.goBuild, len(p.output)) - } else if constraint.IsPlusBuild(text) { - p.plusBuild = append(p.plusBuild, len(p.output)) - } - p.writeString(pos, trimRight(text), true) - return - } - - // for /*-style comments, print line by line and let the - // write function take care of the proper indentation - lines := strings.Split(text, "\n") - - // The comment started in the first column but is going - // to be indented. For an idempotent result, add indentation - // to all lines such that they look like they were indented - // before - this will make sure the common prefix computation - // is the same independent of how many times formatting is - // applied (was issue 1835). - if pos.IsValid() && pos.Column == 1 && p.indent > 0 { - for i, line := range lines[1:] { - lines[1+i] = " " + line - } - } - - stripCommonPrefix(lines) - - // write comment lines, separated by formfeed, - // without a line break after the last line - for i, line := range lines { - if i > 0 { - p.writeByte('\f', 1) - pos = p.pos - } - if len(line) > 0 { - p.writeString(pos, trimRight(line), true) - } - } -} - -// writeCommentSuffix writes a line break after a comment if indicated -// and processes any leftover indentation information. If a line break -// is needed, the kind of break (newline vs formfeed) depends on the -// pending whitespace. The writeCommentSuffix result indicates if a -// newline was written or if a formfeed was dropped from the whitespace -// buffer. -func (p *printer) writeCommentSuffix(needsLinebreak bool) (wroteNewline, droppedFF bool) { - for i, ch := range p.wsbuf { - switch ch { - case blank, vtab: - // ignore trailing whitespace - p.wsbuf[i] = ignore - case indent, unindent: - // don't lose indentation information - case newline, formfeed: - // if we need a line break, keep exactly one - // but remember if we dropped any formfeeds - if needsLinebreak { - needsLinebreak = false - wroteNewline = true - } else { - if ch == formfeed { - droppedFF = true - } - p.wsbuf[i] = ignore - } - } - } - p.writeWhitespace(len(p.wsbuf)) - - // make sure we have a line break - if needsLinebreak { - p.writeByte('\n', 1) - wroteNewline = true - } - - return -} - -// containsLinebreak reports whether the whitespace buffer contains any line breaks. -func (p *printer) containsLinebreak() bool { - for _, ch := range p.wsbuf { - if ch == newline || ch == formfeed { - return true - } - } - return false -} - -// intersperseComments consumes all comments that appear before the next token -// tok and prints it together with the buffered whitespace (i.e., the whitespace -// that needs to be written before the next token). A heuristic is used to mix -// the comments and whitespace. The intersperseComments result indicates if a -// newline was written or if a formfeed was dropped from the whitespace buffer. -func (p *printer) intersperseComments(next token.Position, tok token.Token) (wroteNewline, droppedFF bool) { - var last *ast.Comment - for p.commentBefore(next) { - list := p.comment.List - changed := false - if p.lastTok != token.IMPORT && // do not rewrite cgo's import "C" comments - p.posFor(p.comment.Pos()).Column == 1 && - p.posFor(p.comment.End()+1) == next { - // Unindented comment abutting next token position: - // a top-level doc comment. - list = formatDocComment(list) - changed = true - - if len(p.comment.List) > 0 && len(list) == 0 { - // The doc comment was removed entirely. - // Keep preceding whitespace. - p.writeCommentPrefix(p.posFor(p.comment.Pos()), next, last, tok) - // Change print state to continue at next. - p.pos = next - p.last = next - // There can't be any more comments. - p.nextComment() - return p.writeCommentSuffix(false) - } - } - for _, c := range list { - p.writeCommentPrefix(p.posFor(c.Pos()), next, last, tok) - p.writeComment(c) - last = c - } - // In case list was rewritten, change print state to where - // the original list would have ended. - if len(p.comment.List) > 0 && changed { - last = p.comment.List[len(p.comment.List)-1] - p.pos = p.posFor(last.End()) - p.last = p.pos - } - p.nextComment() - } - - if last != nil { - // If the last comment is a /*-style comment and the next item - // follows on the same line but is not a comma, and not a "closing" - // token immediately following its corresponding "opening" token, - // add an extra separator unless explicitly disabled. Use a blank - // as separator unless we have pending linebreaks, they are not - // disabled, and we are outside a composite literal, in which case - // we want a linebreak (issue 15137). - // TODO(gri) This has become overly complicated. We should be able - // to track whether we're inside an expression or statement and - // use that information to decide more directly. - needsLinebreak := false - if p.mode&noExtraBlank == 0 && - last.Text[1] == '*' && p.lineFor(last.Pos()) == next.Line && - tok != token.COMMA && - (tok != token.RPAREN || p.prevOpen == token.LPAREN) && - (tok != token.RBRACK || p.prevOpen == token.LBRACK) { - if p.containsLinebreak() && p.mode&noExtraLinebreak == 0 && p.level == 0 { - needsLinebreak = true - } else { - p.writeByte(' ', 1) - } - } - // Ensure that there is a line break after a //-style comment, - // before EOF, and before a closing '}' unless explicitly disabled. - if last.Text[1] == '/' || - tok == token.EOF || - tok == token.RBRACE && p.mode&noExtraLinebreak == 0 { - needsLinebreak = true - } - return p.writeCommentSuffix(needsLinebreak) - } - - // no comment was written - we should never reach here since - // intersperseComments should not be called in that case - p.internalError("intersperseComments called without pending comments") - return -} - -// writeWhitespace writes the first n whitespace entries. -func (p *printer) writeWhitespace(n int) { - // write entries - for i := 0; i < n; i++ { - switch ch := p.wsbuf[i]; ch { - case ignore: - // ignore! - case indent: - p.indent++ - case unindent: - p.indent-- - if p.indent < 0 { - p.internalError("negative indentation:", p.indent) - p.indent = 0 - } - case newline, formfeed: - // A line break immediately followed by a "correcting" - // unindent is swapped with the unindent - this permits - // proper label positioning. If a comment is between - // the line break and the label, the unindent is not - // part of the comment whitespace prefix and the comment - // will be positioned correctly indented. - if i+1 < n && p.wsbuf[i+1] == unindent { - // Use a formfeed to terminate the current section. - // Otherwise, a long label name on the next line leading - // to a wide column may increase the indentation column - // of lines before the label; effectively leading to wrong - // indentation. - p.wsbuf[i], p.wsbuf[i+1] = unindent, formfeed - i-- // do it again - continue - } - fallthrough - default: - p.writeByte(byte(ch), 1) - } - } - - // shift remaining entries down - l := copy(p.wsbuf, p.wsbuf[n:]) - p.wsbuf = p.wsbuf[:l] -} - -// ---------------------------------------------------------------------------- -// Printing interface - -// nlimit limits n to maxNewlines. -func nlimit(n int) int { - if n > maxNewlines { - n = maxNewlines - } - return n -} - -func mayCombine(prev token.Token, next byte) (b bool) { - switch prev { - case token.INT: - b = next == '.' // 1. - case token.ADD: - b = next == '+' // ++ - case token.SUB: - b = next == '-' // -- - case token.QUO: - b = next == '*' // /* - case token.LSS: - b = next == '-' || next == '<' // <- or << - case token.AND: - b = next == '&' || next == '^' // && or &^ - } - return -} - -func (p *printer) setPos(pos token.Pos) { - if pos.IsValid() { - p.pos = p.posFor(pos) // accurate position of next item - } -} - -// print prints a list of "items" (roughly corresponding to syntactic -// tokens, but also including whitespace and formatting information). -// It is the only print function that should be called directly from -// any of the AST printing functions in nodes.go. -// -// Whitespace is accumulated until a non-whitespace token appears. Any -// comments that need to appear before that token are printed first, -// taking into account the amount and structure of any pending white- -// space for best comment placement. Then, any leftover whitespace is -// printed, followed by the actual token. -func (p *printer) print(args ...any) { - for _, arg := range args { - // information about the current arg - var data string - var isLit bool - var impliedSemi bool // value for p.impliedSemi after this arg - - // record previous opening token, if any - switch p.lastTok { - case token.ILLEGAL: - // ignore (white space) - case token.LPAREN, token.LBRACK: - p.prevOpen = p.lastTok - default: - // other tokens followed any opening token - p.prevOpen = token.ILLEGAL - } - - switch x := arg.(type) { - case pmode: - // toggle printer mode - p.mode ^= x - continue - - case whiteSpace: - if x == ignore { - // don't add ignore's to the buffer; they - // may screw up "correcting" unindents (see - // LabeledStmt) - continue - } - i := len(p.wsbuf) - if i == cap(p.wsbuf) { - // Whitespace sequences are very short so this should - // never happen. Handle gracefully (but possibly with - // bad comment placement) if it does happen. - p.writeWhitespace(i) - i = 0 - } - p.wsbuf = p.wsbuf[0 : i+1] - p.wsbuf[i] = x - if x == newline || x == formfeed { - // newlines affect the current state (p.impliedSemi) - // and not the state after printing arg (impliedSemi) - // because comments can be interspersed before the arg - // in this case - p.impliedSemi = false - } - p.lastTok = token.ILLEGAL - continue - - case *ast.Ident: - data = x.Name - impliedSemi = true - p.lastTok = token.IDENT - - case *ast.BasicLit: - data = x.Value - isLit = true - impliedSemi = true - p.lastTok = x.Kind - - case token.Token: - s := x.String() - if mayCombine(p.lastTok, s[0]) { - // the previous and the current token must be - // separated by a blank otherwise they combine - // into a different incorrect token sequence - // (except for token.INT followed by a '.' this - // should never happen because it is taken care - // of via binary expression formatting) - if len(p.wsbuf) != 0 { - p.internalError("whitespace buffer not empty") - } - p.wsbuf = p.wsbuf[0:1] - p.wsbuf[0] = ' ' - } - data = s - // some keywords followed by a newline imply a semicolon - switch x { - case token.BREAK, token.CONTINUE, token.FALLTHROUGH, token.RETURN, - token.INC, token.DEC, token.RPAREN, token.RBRACK, token.RBRACE: - impliedSemi = true - } - p.lastTok = x - - case string: - // incorrect AST - print error message - data = x - isLit = true - impliedSemi = true - p.lastTok = token.STRING - - default: - fmt.Fprintf(os.Stderr, "print: unsupported argument %v (%T)\n", arg, arg) - panic("mvdan.cc/gofumpt/internal/govendor/go/printer type") - } - // data != "" - - next := p.pos // estimated/accurate position of next item - wroteNewline, droppedFF := p.flush(next, p.lastTok) - - // intersperse extra newlines if present in the source and - // if they don't cause extra semicolons (don't do this in - // flush as it will cause extra newlines at the end of a file) - if !p.impliedSemi { - n := nlimit(next.Line - p.pos.Line) - // don't exceed maxNewlines if we already wrote one - if wroteNewline && n == maxNewlines { - n = maxNewlines - 1 - } - if n > 0 { - ch := byte('\n') - if droppedFF { - ch = '\f' // use formfeed since we dropped one before - } - p.writeByte(ch, n) - impliedSemi = false - } - } - - // the next token starts now - record its line number if requested - if p.linePtr != nil { - *p.linePtr = p.out.Line - p.linePtr = nil - } - - p.writeString(next, data, isLit) - p.impliedSemi = impliedSemi - } -} - -// flush prints any pending comments and whitespace occurring textually -// before the position of the next token tok. The flush result indicates -// if a newline was written or if a formfeed was dropped from the whitespace -// buffer. -func (p *printer) flush(next token.Position, tok token.Token) (wroteNewline, droppedFF bool) { - if p.commentBefore(next) { - // if there are comments before the next item, intersperse them - wroteNewline, droppedFF = p.intersperseComments(next, tok) - } else { - // otherwise, write any leftover whitespace - p.writeWhitespace(len(p.wsbuf)) - } - return -} - -// getDoc returns the ast.CommentGroup associated with n, if any. -func getDoc(n ast.Node) *ast.CommentGroup { - switch n := n.(type) { - case *ast.Field: - return n.Doc - case *ast.ImportSpec: - return n.Doc - case *ast.ValueSpec: - return n.Doc - case *ast.TypeSpec: - return n.Doc - case *ast.GenDecl: - return n.Doc - case *ast.FuncDecl: - return n.Doc - case *ast.File: - return n.Doc - } - return nil -} - -func getLastComment(n ast.Node) *ast.CommentGroup { - switch n := n.(type) { - case *ast.Field: - return n.Comment - case *ast.ImportSpec: - return n.Comment - case *ast.ValueSpec: - return n.Comment - case *ast.TypeSpec: - return n.Comment - case *ast.GenDecl: - if len(n.Specs) > 0 { - return getLastComment(n.Specs[len(n.Specs)-1]) - } - case *ast.File: - if len(n.Comments) > 0 { - return n.Comments[len(n.Comments)-1] - } - } - return nil -} - -func (p *printer) printNode(node any) error { - // unpack *CommentedNode, if any - var comments []*ast.CommentGroup - if cnode, ok := node.(*CommentedNode); ok { - node = cnode.Node - comments = cnode.Comments - } - - if comments != nil { - // commented node - restrict comment list to relevant range - n, ok := node.(ast.Node) - if !ok { - goto unsupported - } - beg := n.Pos() - end := n.End() - // if the node has associated documentation, - // include that commentgroup in the range - // (the comment list is sorted in the order - // of the comment appearance in the source code) - if doc := getDoc(n); doc != nil { - beg = doc.Pos() - } - if com := getLastComment(n); com != nil { - if e := com.End(); e > end { - end = e - } - } - // token.Pos values are global offsets, we can - // compare them directly - i := 0 - for i < len(comments) && comments[i].End() < beg { - i++ - } - j := i - for j < len(comments) && comments[j].Pos() < end { - j++ - } - if i < j { - p.comments = comments[i:j] - } - } else if n, ok := node.(*ast.File); ok { - // use ast.File comments, if any - p.comments = n.Comments - } - - // if there are no comments, use node comments - p.useNodeComments = p.comments == nil - - // get comments ready for use - p.nextComment() - - p.print(pmode(0)) - - // format node - switch n := node.(type) { - case ast.Expr: - p.expr(n) - case ast.Stmt: - // A labeled statement will un-indent to position the label. - // Set p.indent to 1 so we don't get indent "underflow". - if _, ok := n.(*ast.LabeledStmt); ok { - p.indent = 1 - } - p.stmt(n, false) - case ast.Decl: - p.decl(n) - case ast.Spec: - p.spec(n, 1, false) - case []ast.Stmt: - // A labeled statement will un-indent to position the label. - // Set p.indent to 1 so we don't get indent "underflow". - for _, s := range n { - if _, ok := s.(*ast.LabeledStmt); ok { - p.indent = 1 - } - } - p.stmtList(n, 0, false) - case []ast.Decl: - p.declList(n) - case *ast.File: - p.file(n) - default: - goto unsupported - } - - return p.sourcePosErr - -unsupported: - return fmt.Errorf("mvdan.cc/gofumpt/internal/govendor/go/printer: unsupported node type %T", node) -} - -// ---------------------------------------------------------------------------- -// Trimmer - -// A trimmer is an io.Writer filter for stripping tabwriter.Escape -// characters, trailing blanks and tabs, and for converting formfeed -// and vtab characters into newlines and htabs (in case no tabwriter -// is used). Text bracketed by tabwriter.Escape characters is passed -// through unchanged. -type trimmer struct { - output io.Writer - state int - space []byte -} - -// trimmer is implemented as a state machine. -// It can be in one of the following states: -const ( - inSpace = iota // inside space - inEscape // inside text bracketed by tabwriter.Escapes - inText // inside text -) - -func (p *trimmer) resetSpace() { - p.state = inSpace - p.space = p.space[0:0] -} - -// Design note: It is tempting to eliminate extra blanks occurring in -// whitespace in this function as it could simplify some -// of the blanks logic in the node printing functions. -// However, this would mess up any formatting done by -// the tabwriter. - -var aNewline = []byte("\n") - -func (p *trimmer) Write(data []byte) (n int, err error) { - // invariants: - // p.state == inSpace: - // p.space is unwritten - // p.state == inEscape, inText: - // data[m:n] is unwritten - m := 0 - var b byte - for n, b = range data { - if b == '\v' { - b = '\t' // convert to htab - } - switch p.state { - case inSpace: - switch b { - case '\t', ' ': - p.space = append(p.space, b) - case '\n', '\f': - p.resetSpace() // discard trailing space - _, err = p.output.Write(aNewline) - case tabwriter.Escape: - _, err = p.output.Write(p.space) - p.state = inEscape - m = n + 1 // +1: skip tabwriter.Escape - default: - _, err = p.output.Write(p.space) - p.state = inText - m = n - } - case inEscape: - if b == tabwriter.Escape { - _, err = p.output.Write(data[m:n]) - p.resetSpace() - } - case inText: - switch b { - case '\t', ' ': - _, err = p.output.Write(data[m:n]) - p.resetSpace() - p.space = append(p.space, b) - case '\n', '\f': - _, err = p.output.Write(data[m:n]) - p.resetSpace() - if err == nil { - _, err = p.output.Write(aNewline) - } - case tabwriter.Escape: - _, err = p.output.Write(data[m:n]) - p.state = inEscape - m = n + 1 // +1: skip tabwriter.Escape - } - default: - panic("unreachable") - } - if err != nil { - return - } - } - n = len(data) - - switch p.state { - case inEscape, inText: - _, err = p.output.Write(data[m:n]) - p.resetSpace() - } - - return -} - -// ---------------------------------------------------------------------------- -// Public interface - -// A Mode value is a set of flags (or 0). They control printing. -type Mode uint - -const ( - RawFormat Mode = 1 << iota // do not use a tabwriter; if set, UseSpaces is ignored - TabIndent // use tabs for indentation independent of UseSpaces - UseSpaces // use spaces instead of tabs for alignment - SourcePos // emit //line directives to preserve original source positions -) - -// The mode below is not included in printer's public API because -// editing code text is deemed out of scope. Because this mode is -// unexported, it's also possible to modify or remove it based on -// the evolving needs of mvdan.cc/gofumpt/internal/govendor/go/format and cmd/gofmt without breaking -// users. See discussion in CL 240683. -const ( - // normalizeNumbers means to canonicalize number - // literal prefixes and exponents while printing. - // - // This value is known in and used by mvdan.cc/gofumpt/internal/govendor/go/format and cmd/gofmt. - // It is currently more convenient and performant for those - // packages to apply number normalization during printing, - // rather than by modifying the AST in advance. - normalizeNumbers Mode = 1 << 30 -) - -// A Config node controls the output of Fprint. -type Config struct { - Mode Mode // default: 0 - Tabwidth int // default: 8 - Indent int // default: 0 (all code is indented at least by this much) -} - -var printerPool = sync.Pool{ - New: func() any { - return &printer{ - // Whitespace sequences are short. - wsbuf: make([]whiteSpace, 0, 16), - // We start the printer with a 16K output buffer, which is currently - // larger than about 80% of Go files in the standard library. - output: make([]byte, 0, 16<<10), - } - }, -} - -func newPrinter(cfg *Config, fset *token.FileSet, nodeSizes map[ast.Node]int) *printer { - p := printerPool.Get().(*printer) - *p = printer{ - Config: *cfg, - fset: fset, - pos: token.Position{Line: 1, Column: 1}, - out: token.Position{Line: 1, Column: 1}, - wsbuf: p.wsbuf[:0], - nodeSizes: nodeSizes, - cachedPos: -1, - output: p.output[:0], - } - return p -} - -func (p *printer) free() { - // Hard limit on buffer size; see https://golang.org/issue/23199. - if cap(p.output) > 64<<10 { - return - } - - printerPool.Put(p) -} - -// fprint implements Fprint and takes a nodesSizes map for setting up the printer state. -func (cfg *Config) fprint(output io.Writer, fset *token.FileSet, node any, nodeSizes map[ast.Node]int) (err error) { - // print node - p := newPrinter(cfg, fset, nodeSizes) - defer p.free() - if err = p.printNode(node); err != nil { - return - } - // print outstanding comments - p.impliedSemi = false // EOF acts like a newline - p.flush(token.Position{Offset: infinity, Line: infinity}, token.EOF) - - // output is buffered in p.output now. - // fix //go:build and // +build comments if needed. - p.fixGoBuildLines() - - // redirect output through a trimmer to eliminate trailing whitespace - // (Input to a tabwriter must be untrimmed since trailing tabs provide - // formatting information. The tabwriter could provide trimming - // functionality but no tabwriter is used when RawFormat is set.) - output = &trimmer{output: output} - - // redirect output through a tabwriter if necessary - if cfg.Mode&RawFormat == 0 { - minwidth := cfg.Tabwidth - - padchar := byte('\t') - if cfg.Mode&UseSpaces != 0 { - padchar = ' ' - } - - twmode := tabwriter.DiscardEmptyColumns - if cfg.Mode&TabIndent != 0 { - minwidth = 0 - twmode |= tabwriter.TabIndent - } - - output = tabwriter.NewWriter(output, minwidth, cfg.Tabwidth, 1, padchar, twmode) - } - - // write printer result via tabwriter/trimmer to output - if _, err = output.Write(p.output); err != nil { - return - } - - // flush tabwriter, if any - if tw, _ := output.(*tabwriter.Writer); tw != nil { - err = tw.Flush() - } - - return -} - -// A CommentedNode bundles an AST node and corresponding comments. -// It may be provided as argument to any of the Fprint functions. -type CommentedNode struct { - Node any // *ast.File, or ast.Expr, ast.Decl, ast.Spec, or ast.Stmt - Comments []*ast.CommentGroup -} - -// Fprint "pretty-prints" an AST node to output for a given configuration cfg. -// Position information is interpreted relative to the file set fset. -// The node type must be *ast.File, *CommentedNode, []ast.Decl, []ast.Stmt, -// or assignment-compatible to ast.Expr, ast.Decl, ast.Spec, or ast.Stmt. -func (cfg *Config) Fprint(output io.Writer, fset *token.FileSet, node any) error { - return cfg.fprint(output, fset, node, make(map[ast.Node]int)) -} - -// Fprint "pretty-prints" an AST node to output. -// It calls Config.Fprint with default settings. -// Note that gofmt uses tabs for indentation but spaces for alignment; -// use format.Node (package mvdan.cc/gofumpt/internal/govendor/go/format) for output that matches gofmt. -func Fprint(output io.Writer, fset *token.FileSet, node any) error { - return (&Config{Tabwidth: 8}).Fprint(output, fset, node) -} diff --git a/vendor/mvdan.cc/gofumpt/internal/version/version.go b/vendor/mvdan.cc/gofumpt/internal/version/version.go deleted file mode 100644 index 785b6b317..000000000 --- a/vendor/mvdan.cc/gofumpt/internal/version/version.go +++ /dev/null @@ -1,105 +0,0 @@ -// Copyright (c) 2020, Daniel Martí -// See LICENSE for licensing information - -package version - -import ( - "encoding/json" - "fmt" - "os" - "runtime" - "runtime/debug" - "time" - - "golang.org/x/mod/module" -) - -// Note that this is not a main package, so a "var version" will not work with -// our go-cross script which uses -ldflags=main.version=xxx. - -const ourModulePath = "mvdan.cc/gofumpt" - -const fallbackVersion = "(devel)" // to match the default from runtime/debug - -func findModule(info *debug.BuildInfo, modulePath string) *debug.Module { - if info.Main.Path == modulePath { - return &info.Main - } - for _, dep := range info.Deps { - if dep.Path == modulePath { - return dep - } - } - return nil -} - -func gofumptVersion() string { - info, ok := debug.ReadBuildInfo() - if !ok { - return fallbackVersion // no build info available - } - // Note that gofumpt may be used as a library via the format package, - // so we cannot assume it is the main module in the build. - mod := findModule(info, ourModulePath) - if mod == nil { - return fallbackVersion // not found? - } - if mod.Replace != nil { - mod = mod.Replace - } - - // If we found a meaningful version, we are done. - // If gofumpt is not the main module, stop as well, - // as VCS info is only for the main module. - if mod.Version != "(devel)" || mod != &info.Main { - return mod.Version - } - - // Fall back to trying to use VCS information. - // Until https://github.com/golang/go/issues/50603 is implemented, - // manually construct something like a pseudo-version. - // TODO: remove when this code is dead, hopefully in Go 1.20. - - // For the tests, as we don't want the VCS information to change over time. - if v := os.Getenv("GARBLE_TEST_BUILDSETTINGS"); v != "" { - var extra []debug.BuildSetting - if err := json.Unmarshal([]byte(v), &extra); err != nil { - panic(err) - } - info.Settings = append(info.Settings, extra...) - } - - var vcsTime time.Time - var vcsRevision string - for _, setting := range info.Settings { - switch setting.Key { - case "vcs.time": - // If the format is invalid, we'll print a zero timestamp. - vcsTime, _ = time.Parse(time.RFC3339Nano, setting.Value) - case "vcs.revision": - vcsRevision = setting.Value - if len(vcsRevision) > 12 { - vcsRevision = vcsRevision[:12] - } - } - } - if vcsRevision != "" { - return module.PseudoVersion("", "", vcsTime, vcsRevision) - } - return fallbackVersion -} - -func goVersion() string { - // For the tests, as we don't want the Go version to change over time. - if testVersion := os.Getenv("GO_VERSION_TEST"); testVersion != "" { - return testVersion - } - return runtime.Version() -} - -func String(injected string) string { - if injected != "" { - return fmt.Sprintf("%s (%s)", injected, goVersion()) - } - return fmt.Sprintf("%s (%s)", gofumptVersion(), goVersion()) -} diff --git a/vendor/mvdan.cc/interfacer/LICENSE b/vendor/mvdan.cc/interfacer/LICENSE deleted file mode 100644 index 7d71d51a5..000000000 --- a/vendor/mvdan.cc/interfacer/LICENSE +++ /dev/null @@ -1,27 +0,0 @@ -Copyright (c) 2015, Daniel Martí. All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above -copyright notice, this list of conditions and the following disclaimer -in the documentation and/or other materials provided with the -distribution. - * Neither the name of the copyright holder nor the names of its -contributors may be used to endorse or promote products derived from -this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/mvdan.cc/interfacer/check/cache.go b/vendor/mvdan.cc/interfacer/check/cache.go deleted file mode 100644 index 757eca55e..000000000 --- a/vendor/mvdan.cc/interfacer/check/cache.go +++ /dev/null @@ -1,50 +0,0 @@ -// Copyright (c) 2015, Daniel Martí -// See LICENSE for licensing information - -package check - -import ( - "go/ast" - "go/types" -) - -type pkgTypes struct { - ifaces map[string]string - funcSigns map[string]bool -} - -func (p *pkgTypes) getTypes(pkg *types.Package) { - p.ifaces = make(map[string]string) - p.funcSigns = make(map[string]bool) - done := make(map[*types.Package]bool) - addTypes := func(pkg *types.Package, top bool) { - if done[pkg] { - return - } - done[pkg] = true - ifs, funs := fromScope(pkg.Scope()) - fullName := func(name string) string { - if !top { - return pkg.Path() + "." + name - } - return name - } - for iftype, name := range ifs { - // only suggest exported interfaces - if ast.IsExported(name) { - p.ifaces[iftype] = fullName(name) - } - } - for ftype := range funs { - // ignore non-exported func signatures too - p.funcSigns[ftype] = true - } - } - for _, imp := range pkg.Imports() { - addTypes(imp, false) - for _, imp2 := range imp.Imports() { - addTypes(imp2, false) - } - } - addTypes(pkg, true) -} diff --git a/vendor/mvdan.cc/interfacer/check/check.go b/vendor/mvdan.cc/interfacer/check/check.go deleted file mode 100644 index f4d3b4037..000000000 --- a/vendor/mvdan.cc/interfacer/check/check.go +++ /dev/null @@ -1,462 +0,0 @@ -// Copyright (c) 2015, Daniel Martí -// See LICENSE for licensing information - -package check // import "mvdan.cc/interfacer/check" - -import ( - "fmt" - "go/ast" - "go/token" - "go/types" - "os" - "strings" - - "golang.org/x/tools/go/loader" - "golang.org/x/tools/go/ssa" - "golang.org/x/tools/go/ssa/ssautil" - - "github.com/kisielk/gotool" - "mvdan.cc/lint" -) - -func toDiscard(usage *varUsage) bool { - if usage.discard { - return true - } - for to := range usage.assigned { - if toDiscard(to) { - return true - } - } - return false -} - -func allCalls(usage *varUsage, all, ftypes map[string]string) { - for fname := range usage.calls { - all[fname] = ftypes[fname] - } - for to := range usage.assigned { - allCalls(to, all, ftypes) - } -} - -func (c *Checker) interfaceMatching(param *types.Var, usage *varUsage) (string, string) { - if toDiscard(usage) { - return "", "" - } - ftypes := typeFuncMap(param.Type()) - called := make(map[string]string, len(usage.calls)) - allCalls(usage, called, ftypes) - s := funcMapString(called) - return c.ifaces[s], s -} - -type varUsage struct { - calls map[string]struct{} - discard bool - - assigned map[*varUsage]struct{} -} - -type funcDecl struct { - astDecl *ast.FuncDecl - ssaFn *ssa.Function -} - -// CheckArgs checks the packages specified by their import paths in -// args. -func CheckArgs(args []string) ([]string, error) { - paths := gotool.ImportPaths(args) - conf := loader.Config{} - conf.AllowErrors = true - rest, err := conf.FromArgs(paths, false) - if err != nil { - return nil, err - } - if len(rest) > 0 { - return nil, fmt.Errorf("unwanted extra args: %v", rest) - } - lprog, err := conf.Load() - if err != nil { - return nil, err - } - prog := ssautil.CreateProgram(lprog, 0) - prog.Build() - c := new(Checker) - c.Program(lprog) - c.ProgramSSA(prog) - issues, err := c.Check() - if err != nil { - return nil, err - } - wd, err := os.Getwd() - if err != nil { - return nil, err - } - lines := make([]string, len(issues)) - for i, issue := range issues { - fpos := prog.Fset.Position(issue.Pos()).String() - if strings.HasPrefix(fpos, wd) { - fpos = fpos[len(wd)+1:] - } - lines[i] = fmt.Sprintf("%s: %s", fpos, issue.Message()) - } - return lines, nil -} - -type Checker struct { - lprog *loader.Program - prog *ssa.Program - - pkgTypes - *loader.PackageInfo - - funcs []*funcDecl - - ssaByPos map[token.Pos]*ssa.Function - - discardFuncs map[*types.Signature]struct{} - - vars map[*types.Var]*varUsage -} - -var ( - _ lint.Checker = (*Checker)(nil) - _ lint.WithSSA = (*Checker)(nil) -) - -func (c *Checker) Program(lprog *loader.Program) { - c.lprog = lprog -} - -func (c *Checker) ProgramSSA(prog *ssa.Program) { - c.prog = prog -} - -func (c *Checker) Check() ([]lint.Issue, error) { - var total []lint.Issue - c.ssaByPos = make(map[token.Pos]*ssa.Function) - wantPkg := make(map[*types.Package]bool) - for _, pinfo := range c.lprog.InitialPackages() { - wantPkg[pinfo.Pkg] = true - } - for fn := range ssautil.AllFunctions(c.prog) { - if fn.Pkg == nil { // builtin? - continue - } - if len(fn.Blocks) == 0 { // stub - continue - } - if !wantPkg[fn.Pkg.Pkg] { // not part of given pkgs - continue - } - c.ssaByPos[fn.Pos()] = fn - } - for _, pinfo := range c.lprog.InitialPackages() { - pkg := pinfo.Pkg - c.getTypes(pkg) - c.PackageInfo = c.lprog.AllPackages[pkg] - total = append(total, c.checkPkg()...) - } - return total, nil -} - -func (c *Checker) checkPkg() []lint.Issue { - c.discardFuncs = make(map[*types.Signature]struct{}) - c.vars = make(map[*types.Var]*varUsage) - c.funcs = c.funcs[:0] - findFuncs := func(node ast.Node) bool { - decl, ok := node.(*ast.FuncDecl) - if !ok { - return true - } - ssaFn := c.ssaByPos[decl.Name.Pos()] - if ssaFn == nil { - return true - } - fd := &funcDecl{ - astDecl: decl, - ssaFn: ssaFn, - } - if c.funcSigns[signString(fd.ssaFn.Signature)] { - // implements interface - return true - } - c.funcs = append(c.funcs, fd) - ast.Walk(c, decl.Body) - return true - } - for _, f := range c.Files { - ast.Inspect(f, findFuncs) - } - return c.packageIssues() -} - -func paramVarAndType(sign *types.Signature, i int) (*types.Var, types.Type) { - params := sign.Params() - extra := sign.Variadic() && i >= params.Len()-1 - if !extra { - if i >= params.Len() { - // builtins with multiple signatures - return nil, nil - } - vr := params.At(i) - return vr, vr.Type() - } - last := params.At(params.Len() - 1) - switch x := last.Type().(type) { - case *types.Slice: - return nil, x.Elem() - default: - return nil, x - } -} - -func (c *Checker) varUsage(e ast.Expr) *varUsage { - id, ok := e.(*ast.Ident) - if !ok { - return nil - } - param, ok := c.ObjectOf(id).(*types.Var) - if !ok { - // not a variable - return nil - } - if usage, e := c.vars[param]; e { - return usage - } - if !interesting(param.Type()) { - return nil - } - usage := &varUsage{ - calls: make(map[string]struct{}), - assigned: make(map[*varUsage]struct{}), - } - c.vars[param] = usage - return usage -} - -func (c *Checker) addUsed(e ast.Expr, as types.Type) { - if as == nil { - return - } - if usage := c.varUsage(e); usage != nil { - // using variable - iface, ok := as.Underlying().(*types.Interface) - if !ok { - usage.discard = true - return - } - for i := 0; i < iface.NumMethods(); i++ { - m := iface.Method(i) - usage.calls[m.Name()] = struct{}{} - } - } else if t, ok := c.TypeOf(e).(*types.Signature); ok { - // using func - c.discardFuncs[t] = struct{}{} - } -} - -func (c *Checker) addAssign(to, from ast.Expr) { - pto := c.varUsage(to) - pfrom := c.varUsage(from) - if pto == nil || pfrom == nil { - // either isn't interesting - return - } - pfrom.assigned[pto] = struct{}{} -} - -func (c *Checker) discard(e ast.Expr) { - if usage := c.varUsage(e); usage != nil { - usage.discard = true - } -} - -func (c *Checker) comparedWith(e, with ast.Expr) { - if _, ok := with.(*ast.BasicLit); ok { - c.discard(e) - } -} - -func (c *Checker) Visit(node ast.Node) ast.Visitor { - switch x := node.(type) { - case *ast.SelectorExpr: - if _, ok := c.TypeOf(x.Sel).(*types.Signature); !ok { - c.discard(x.X) - } - case *ast.StarExpr: - c.discard(x.X) - case *ast.UnaryExpr: - c.discard(x.X) - case *ast.IndexExpr: - c.discard(x.X) - case *ast.IncDecStmt: - c.discard(x.X) - case *ast.BinaryExpr: - switch x.Op { - case token.EQL, token.NEQ: - c.comparedWith(x.X, x.Y) - c.comparedWith(x.Y, x.X) - default: - c.discard(x.X) - c.discard(x.Y) - } - case *ast.ValueSpec: - for _, val := range x.Values { - c.addUsed(val, c.TypeOf(x.Type)) - } - case *ast.AssignStmt: - for i, val := range x.Rhs { - left := x.Lhs[i] - if x.Tok == token.ASSIGN { - c.addUsed(val, c.TypeOf(left)) - } - c.addAssign(left, val) - } - case *ast.CompositeLit: - for i, e := range x.Elts { - switch y := e.(type) { - case *ast.KeyValueExpr: - c.addUsed(y.Key, c.TypeOf(y.Value)) - c.addUsed(y.Value, c.TypeOf(y.Key)) - case *ast.Ident: - c.addUsed(y, compositeIdentType(c.TypeOf(x), i)) - } - } - case *ast.CallExpr: - switch y := c.TypeOf(x.Fun).Underlying().(type) { - case *types.Signature: - c.onMethodCall(x, y) - default: - // type conversion - if len(x.Args) == 1 { - c.addUsed(x.Args[0], y) - } - } - } - return c -} - -func compositeIdentType(t types.Type, i int) types.Type { - switch x := t.(type) { - case *types.Named: - return compositeIdentType(x.Underlying(), i) - case *types.Struct: - return x.Field(i).Type() - case *types.Array: - return x.Elem() - case *types.Slice: - return x.Elem() - } - return nil -} - -func (c *Checker) onMethodCall(ce *ast.CallExpr, sign *types.Signature) { - for i, e := range ce.Args { - paramObj, t := paramVarAndType(sign, i) - // Don't if this is a parameter being re-used as itself - // in a recursive call - if id, ok := e.(*ast.Ident); ok { - if paramObj == c.ObjectOf(id) { - continue - } - } - c.addUsed(e, t) - } - sel, ok := ce.Fun.(*ast.SelectorExpr) - if !ok { - return - } - // receiver func call on the left side - if usage := c.varUsage(sel.X); usage != nil { - usage.calls[sel.Sel.Name] = struct{}{} - } -} - -func (fd *funcDecl) paramGroups() [][]*types.Var { - astList := fd.astDecl.Type.Params.List - groups := make([][]*types.Var, len(astList)) - signIndex := 0 - for i, field := range astList { - group := make([]*types.Var, len(field.Names)) - for j := range field.Names { - group[j] = fd.ssaFn.Signature.Params().At(signIndex) - signIndex++ - } - groups[i] = group - } - return groups -} - -func (c *Checker) packageIssues() []lint.Issue { - var issues []lint.Issue - for _, fd := range c.funcs { - if _, e := c.discardFuncs[fd.ssaFn.Signature]; e { - continue - } - for _, group := range fd.paramGroups() { - issues = append(issues, c.groupIssues(fd, group)...) - } - } - return issues -} - -type Issue struct { - pos token.Pos - msg string -} - -func (i Issue) Pos() token.Pos { return i.pos } -func (i Issue) Message() string { return i.msg } - -func (c *Checker) groupIssues(fd *funcDecl, group []*types.Var) []lint.Issue { - var issues []lint.Issue - for _, param := range group { - usage := c.vars[param] - if usage == nil { - return nil - } - newType := c.paramNewType(fd.astDecl.Name.Name, param, usage) - if newType == "" { - return nil - } - issues = append(issues, Issue{ - pos: param.Pos(), - msg: fmt.Sprintf("%s can be %s", param.Name(), newType), - }) - } - return issues -} - -func willAddAllocation(t types.Type) bool { - switch t.Underlying().(type) { - case *types.Pointer, *types.Interface: - return false - } - return true -} - -func (c *Checker) paramNewType(funcName string, param *types.Var, usage *varUsage) string { - t := param.Type() - if !ast.IsExported(funcName) && willAddAllocation(t) { - return "" - } - if named := typeNamed(t); named != nil { - tname := named.Obj().Name() - vname := param.Name() - if mentionsName(funcName, tname) || mentionsName(funcName, vname) { - return "" - } - } - ifname, iftype := c.interfaceMatching(param, usage) - if ifname == "" { - return "" - } - if types.IsInterface(t.Underlying()) { - if have := funcMapString(typeFuncMap(t)); have == iftype { - return "" - } - } - return ifname -} diff --git a/vendor/mvdan.cc/interfacer/check/types.go b/vendor/mvdan.cc/interfacer/check/types.go deleted file mode 100644 index 393bb0b9f..000000000 --- a/vendor/mvdan.cc/interfacer/check/types.go +++ /dev/null @@ -1,170 +0,0 @@ -// Copyright (c) 2015, Daniel Martí -// See LICENSE for licensing information - -package check - -import ( - "bytes" - "fmt" - "go/types" - "sort" - "strings" -) - -type methoder interface { - NumMethods() int - Method(int) *types.Func -} - -func methoderFuncMap(m methoder, skip bool) map[string]string { - ifuncs := make(map[string]string, m.NumMethods()) - for i := 0; i < m.NumMethods(); i++ { - f := m.Method(i) - if !f.Exported() { - if skip { - continue - } - return nil - } - sign := f.Type().(*types.Signature) - ifuncs[f.Name()] = signString(sign) - } - return ifuncs -} - -func typeFuncMap(t types.Type) map[string]string { - switch x := t.(type) { - case *types.Pointer: - return typeFuncMap(x.Elem()) - case *types.Named: - u := x.Underlying() - if types.IsInterface(u) { - return typeFuncMap(u) - } - return methoderFuncMap(x, true) - case *types.Interface: - return methoderFuncMap(x, false) - default: - return nil - } -} - -func funcMapString(iface map[string]string) string { - fnames := make([]string, 0, len(iface)) - for fname := range iface { - fnames = append(fnames, fname) - } - sort.Strings(fnames) - var b bytes.Buffer - for i, fname := range fnames { - if i > 0 { - fmt.Fprint(&b, "; ") - } - fmt.Fprint(&b, fname, iface[fname]) - } - return b.String() -} - -func tupleJoin(buf *bytes.Buffer, t *types.Tuple) { - buf.WriteByte('(') - for i := 0; i < t.Len(); i++ { - if i > 0 { - buf.WriteString(", ") - } - buf.WriteString(t.At(i).Type().String()) - } - buf.WriteByte(')') -} - -// signString is similar to Signature.String(), but it ignores -// param/result names. -func signString(sign *types.Signature) string { - var buf bytes.Buffer - tupleJoin(&buf, sign.Params()) - tupleJoin(&buf, sign.Results()) - return buf.String() -} - -func interesting(t types.Type) bool { - switch x := t.(type) { - case *types.Interface: - return x.NumMethods() > 1 - case *types.Named: - if u := x.Underlying(); types.IsInterface(u) { - return interesting(u) - } - return x.NumMethods() >= 1 - case *types.Pointer: - return interesting(x.Elem()) - default: - return false - } -} - -func anyInteresting(params *types.Tuple) bool { - for i := 0; i < params.Len(); i++ { - t := params.At(i).Type() - if interesting(t) { - return true - } - } - return false -} - -func fromScope(scope *types.Scope) (ifaces map[string]string, funcs map[string]bool) { - ifaces = make(map[string]string) - funcs = make(map[string]bool) - for _, name := range scope.Names() { - tn, ok := scope.Lookup(name).(*types.TypeName) - if !ok { - continue - } - switch x := tn.Type().Underlying().(type) { - case *types.Interface: - iface := methoderFuncMap(x, false) - if len(iface) == 0 { - continue - } - for i := 0; i < x.NumMethods(); i++ { - f := x.Method(i) - sign := f.Type().(*types.Signature) - if !anyInteresting(sign.Params()) { - continue - } - funcs[signString(sign)] = true - } - s := funcMapString(iface) - if _, e := ifaces[s]; !e { - ifaces[s] = tn.Name() - } - case *types.Signature: - if !anyInteresting(x.Params()) { - continue - } - funcs[signString(x)] = true - } - } - return ifaces, funcs -} - -func mentionsName(fname, name string) bool { - if len(name) < 2 { - return false - } - capit := strings.ToUpper(name[:1]) + name[1:] - lower := strings.ToLower(name) - return strings.Contains(fname, capit) || strings.HasPrefix(fname, lower) -} - -func typeNamed(t types.Type) *types.Named { - for { - switch x := t.(type) { - case *types.Named: - return x - case *types.Pointer: - t = x.Elem() - default: - return nil - } - } -} diff --git a/vendor/mvdan.cc/lint/.travis.yml b/vendor/mvdan.cc/lint/.travis.yml deleted file mode 100644 index 2ccdeab9a..000000000 --- a/vendor/mvdan.cc/lint/.travis.yml +++ /dev/null @@ -1,7 +0,0 @@ -language: go - -go: - - 1.8.x - - 1.9.x - -go_import_path: mvdan.cc/lint diff --git a/vendor/mvdan.cc/lint/LICENSE b/vendor/mvdan.cc/lint/LICENSE deleted file mode 100644 index a06c5ebfc..000000000 --- a/vendor/mvdan.cc/lint/LICENSE +++ /dev/null @@ -1,27 +0,0 @@ -Copyright (c) 2017, Daniel Martí. All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above -copyright notice, this list of conditions and the following disclaimer -in the documentation and/or other materials provided with the -distribution. - * Neither the name of the copyright holder nor the names of its -contributors may be used to endorse or promote products derived from -this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/mvdan.cc/lint/README.md b/vendor/mvdan.cc/lint/README.md deleted file mode 100644 index 8a9c8b51c..000000000 --- a/vendor/mvdan.cc/lint/README.md +++ /dev/null @@ -1,27 +0,0 @@ -# lint - -[![GoDoc](https://godoc.org/mvdan.cc/lint?status.svg)](https://godoc.org/mvdan.cc/lint) -[![Build Status](https://travis-ci.org/mvdan/lint.svg?branch=master)](https://travis-ci.org/mvdan/lint) - -Work in progress. Its API might change before the 1.0 release. - -This package intends to define simple interfaces that Go code checkers -can implement. This would simplify calling them from Go code, as well as -running multiple linters while sharing initial loading work. - -### metalint - - go get -u mvdan.cc/lint/cmd/metalint - -The start of a linter that runs many linters leveraging the common -interface. Not stable yet. - -Linters included: - -* [unparam](https://mvdan.cc/unparam) -* [interfacer](https://github.com/mvdan/interfacer) - -### Related projects - -* [golinters](https://github.com/thomasheller/golinters) - Report on - linter support diff --git a/vendor/mvdan.cc/lint/lint.go b/vendor/mvdan.cc/lint/lint.go deleted file mode 100644 index a16789fad..000000000 --- a/vendor/mvdan.cc/lint/lint.go +++ /dev/null @@ -1,28 +0,0 @@ -// Copyright (c) 2017, Daniel Martí -// See LICENSE for licensing information - -// Package lint defines common interfaces for Go code checkers. -package lint // import "mvdan.cc/lint" - -import ( - "go/token" - - "golang.org/x/tools/go/loader" - "golang.org/x/tools/go/ssa" -) - -// A Checker points out issues in a program. -type Checker interface { - Program(*loader.Program) - Check() ([]Issue, error) -} - -type WithSSA interface { - ProgramSSA(*ssa.Program) -} - -// Issue represents an issue somewhere in a source code file. -type Issue interface { - Pos() token.Pos - Message() string -} diff --git a/vendor/mvdan.cc/unparam/LICENSE b/vendor/mvdan.cc/unparam/LICENSE deleted file mode 100644 index a06c5ebfc..000000000 --- a/vendor/mvdan.cc/unparam/LICENSE +++ /dev/null @@ -1,27 +0,0 @@ -Copyright (c) 2017, Daniel Martí. All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above -copyright notice, this list of conditions and the following disclaimer -in the documentation and/or other materials provided with the -distribution. - * Neither the name of the copyright holder nor the names of its -contributors may be used to endorse or promote products derived from -this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/mvdan.cc/unparam/check/check.go b/vendor/mvdan.cc/unparam/check/check.go deleted file mode 100644 index 39de800d9..000000000 --- a/vendor/mvdan.cc/unparam/check/check.go +++ /dev/null @@ -1,1018 +0,0 @@ -// Copyright (c) 2017, Daniel Martí -// See LICENSE for licensing information - -// Package check implements the unparam linter. Note that its API is not -// stable. -package check - -import ( - "bytes" - "fmt" - "go/ast" - "go/constant" - "go/parser" - "go/printer" - "go/token" - "go/types" - "io" - "os" - "path/filepath" - "regexp" - "sort" - "strings" - - "golang.org/x/tools/go/packages" - "golang.org/x/tools/go/ssa" - "golang.org/x/tools/go/ssa/ssautil" -) - -// UnusedParams returns a list of human-readable issues that point out unused -// function parameters. -func UnusedParams(tests, exported, debug bool, args ...string) ([]string, error) { - wd, err := os.Getwd() - if err != nil { - return nil, err - } - c := &Checker{ - wd: wd, - tests: tests, - exported: exported, - } - if debug { - c.debugLog = os.Stderr - } - return c.lines(args...) -} - -// Checker finds unused parameters in a program. You probably want to use -// UnusedParams instead, unless you want to use a *loader.Program and -// *ssa.Program directly. -type Checker struct { - pkgs []*packages.Package - prog *ssa.Program - - wd string - - tests bool - exported bool - debugLog io.Writer - - issues []Issue - - cachedDeclCounts map[string]map[string]int - - // callByPos maps from a call site position to its CallExpr. - callByPos map[token.Pos]*ast.CallExpr - - // funcBodyByPos maps from a function position to its body. We can't map - // to the declaration, as that could be either a FuncDecl or FuncLit. - funcBodyByPos map[token.Pos]*ast.BlockStmt - - // typesImplementing records the method names that each named type needs - // to typecheck properly, as they're required to implement interfaces. - typesImplementing map[*types.Named][]string - - // localCallSites is a very simple form of a callgraph, only recording - // direct function calls within a single package. - localCallSites map[*ssa.Function][]ssa.CallInstruction - - // These three maps record whether an entire func's signature cannot be - // changed, or only its list of parameters or results. - - signRequiredBy map[*ssa.Function]string - paramsRequiredBy map[*ssa.Function]string - resultsRequiredBy map[*ssa.Function]string -} - -var errorType = types.Universe.Lookup("error").Type() - -// lines runs the checker and returns the list of readable issues. -func (c *Checker) lines(args ...string) ([]string, error) { - cfg := &packages.Config{ - Mode: packages.LoadSyntax, - Tests: c.tests, - } - pkgs, err := packages.Load(cfg, args...) - if err != nil { - return nil, err - } - if packages.PrintErrors(pkgs) > 0 { - return nil, fmt.Errorf("encountered errors") - } - - prog, _ := ssautil.Packages(pkgs, 0) - prog.Build() - c.Packages(pkgs) - c.ProgramSSA(prog) - issues, err := c.Check() - if err != nil { - return nil, err - } - lines := make([]string, 0, len(issues)) - prevLine := "" - for _, issue := range issues { - fpos := prog.Fset.Position(issue.Pos()).String() - if strings.HasPrefix(fpos, c.wd) { - fpos = fpos[len(c.wd)+1:] - } - line := fmt.Sprintf("%s: %s", fpos, issue.Message()) - if line == prevLine { - // Deduplicate lines, since we may look at the same - // package multiple times if tests are involved. - // TODO: is there a better way to handle this? - continue - } - prevLine = line - lines = append(lines, fmt.Sprintf("%s: %s", fpos, issue.Message())) - } - return lines, nil -} - -// Issue identifies a found unused parameter. -type Issue struct { - pos token.Pos - fname string - msg string -} - -func (i Issue) Pos() token.Pos { return i.pos } -func (i Issue) Message() string { return i.fname + " - " + i.msg } - -// Program supplies Checker with the needed *loader.Program. -func (c *Checker) Packages(pkgs []*packages.Package) { - c.pkgs = pkgs -} - -// ProgramSSA supplies Checker with the needed *ssa.Program. -func (c *Checker) ProgramSSA(prog *ssa.Program) { - c.prog = prog -} - -// CheckExportedFuncs sets whether to inspect exported functions -func (c *Checker) CheckExportedFuncs(exported bool) { - c.exported = exported -} - -func (c *Checker) debug(format string, a ...interface{}) { - if c.debugLog != nil { - fmt.Fprintf(c.debugLog, format, a...) - } -} - -// generatedDoc reports whether a comment text describes its file as being code -// generated. -func generatedDoc(text string) bool { - return strings.Contains(text, "Code generated") || - strings.Contains(text, "DO NOT EDIT") -} - -// eqlConsts reports whether two constant values, possibly nil, are equal. -func eqlConsts(c1, c2 *ssa.Const) bool { - if c1 == nil || c2 == nil { - return c1 == c2 - } - if c1.Type() != c2.Type() { - return false - } - if c1.Value == nil || c2.Value == nil { - return c1.Value == c2.Value - } - return constant.Compare(c1.Value, token.EQL, c2.Value) -} - -var stdSizes = types.SizesFor("gc", "amd64") - -// Check runs the unused parameter check and returns the list of found issues, -// and any error encountered. -func (c *Checker) Check() ([]Issue, error) { - c.cachedDeclCounts = make(map[string]map[string]int) - c.callByPos = make(map[token.Pos]*ast.CallExpr) - c.funcBodyByPos = make(map[token.Pos]*ast.BlockStmt) - c.typesImplementing = make(map[*types.Named][]string) - - wantPkg := make(map[*types.Package]*packages.Package) - genFiles := make(map[string]bool) - for _, pkg := range c.pkgs { - wantPkg[pkg.Types] = pkg - for _, f := range pkg.Syntax { - if len(f.Comments) > 0 && generatedDoc(f.Comments[0].Text()) { - fname := c.prog.Fset.Position(f.Pos()).Filename - genFiles[fname] = true - } - ast.Inspect(f, func(node ast.Node) bool { - switch node := node.(type) { - case *ast.ValueSpec: - if len(node.Values) == 0 || node.Type == nil || - len(node.Names) != 1 || node.Names[0].Name != "_" { - break - } - iface, ok := pkg.TypesInfo.TypeOf(node.Type).Underlying().(*types.Interface) - if !ok { - break - } - // var _ someIface = named - valTyp := pkg.TypesInfo.Types[node.Values[0]].Type - c.addImplementing(findNamed(valTyp), iface) - case *ast.CallExpr: - c.callByPos[node.Lparen] = node - // ssa.Function.Pos returns the declaring - // FuncLit.Type.Func or the position of the - // FuncDecl.Name. - case *ast.FuncDecl: - c.funcBodyByPos[node.Name.Pos()] = node.Body - case *ast.FuncLit: - c.funcBodyByPos[node.Pos()] = node.Body - } - return true - }) - } - } - allFuncs := ssautil.AllFunctions(c.prog) - - // map from *ssa.FreeVar to *ssa.Function, to find function literals - // behind closure vars in the simpler scenarios. - freeVars := map[*ssa.FreeVar]*ssa.Function{} - for curFunc := range allFuncs { - for _, b := range curFunc.Blocks { - for _, instr := range b.Instrs { - instr, ok := instr.(*ssa.MakeClosure) - if !ok { - continue - } - fn := instr.Fn.(*ssa.Function) - for i, fv := range fn.FreeVars { - binding := instr.Bindings[i] - alloc, ok := binding.(*ssa.Alloc) - if !ok { - continue - } - for _, ref := range *alloc.Referrers() { - store, ok := ref.(*ssa.Store) - if !ok { - continue - } - if fn, ok := store.Val.(*ssa.Function); ok { - freeVars[fv] = fn - break - } - } - } - } - } - } - - c.signRequiredBy = make(map[*ssa.Function]string) - c.paramsRequiredBy = make(map[*ssa.Function]string) - c.resultsRequiredBy = make(map[*ssa.Function]string) - c.localCallSites = make(map[*ssa.Function][]ssa.CallInstruction) - for curFunc := range allFuncs { - if strings.HasPrefix(curFunc.Synthetic, "wrapper for func") { - // Synthetic func wrappers are uninteresting, and can - // lead to false negatives. - continue - } - for _, b := range curFunc.Blocks { - for _, instr := range b.Instrs { - if instr, ok := instr.(ssa.CallInstruction); ok { - if fn := findFunction(freeVars, instr.Common().Value); fn != nil { - c.localCallSites[fn] = append(c.localCallSites[fn], instr) - } - fn := receivesExtractedArgs(freeVars, instr) - if fn != nil { - // fn(someFunc()) fixes params - c.paramsRequiredBy[fn] = "forwarded call" - } - } - switch instr := instr.(type) { - case *ssa.Call: - for _, arg := range instr.Call.Args { - if fn := findFunction(freeVars, arg); fn != nil { - // someFunc(fn) - c.signRequiredBy[fn] = "call" - } - } - case *ssa.Phi: - for _, val := range instr.Edges { - if fn := findFunction(freeVars, val); fn != nil { - // nonConstVar = fn - c.signRequiredBy[fn] = "phi" - } - } - case *ssa.Return: - for _, val := range instr.Results { - if fn := findFunction(freeVars, val); fn != nil { - // return fn - c.signRequiredBy[fn] = "result" - } - } - if call := callExtract(instr, instr.Results); call != nil { - if fn := findFunction(freeVars, call.Call.Value); fn != nil { - // return fn() - c.resultsRequiredBy[fn] = "return" - } - } - case *ssa.Store: - as := "" - switch instr.Addr.(type) { - case *ssa.FieldAddr: - // x.someField = fn - as = "field" - case *ssa.IndexAddr: - // x[someIndex] = fn - as = "element" - case *ssa.Global: - // someGlobal = fn - as = "global" - default: - continue - } - if fn := findFunction(freeVars, instr.Val); fn != nil { - c.signRequiredBy[fn] = as - } - case *ssa.MakeInterface: - // someIface(named) - iface := instr.Type().Underlying().(*types.Interface) - c.addImplementing(findNamed(instr.X.Type()), iface) - - if fn := findFunction(freeVars, instr.X); fn != nil { - // emptyIface = fn - c.signRequiredBy[fn] = "interface" - } - case *ssa.ChangeType: - if fn := findFunction(freeVars, instr.X); fn != nil { - // someType(fn) - c.signRequiredBy[fn] = "type conversion" - } - } - } - } - } - - for fn := range allFuncs { - switch { - case fn.Pkg == nil: // builtin? - continue - case fn.Name() == "init": - continue - case len(fn.Blocks) == 0: // stub - continue - } - pkg := wantPkg[fn.Pkg.Pkg] - if pkg == nil { // not part of given pkgs - continue - } - if c.exported || fn.Pkg.Pkg.Name() == "main" { - // we want exported funcs, or this is a main package so - // nothing is exported - } else if strings.Contains(fn.Name(), "$") { - // anonymous function within a possibly exported func - } else if ast.IsExported(fn.Name()) { - continue // user doesn't want to change signatures here - } - fname := c.prog.Fset.Position(fn.Pos()).Filename - if genFiles[fname] { - continue // generated file - } - - c.checkFunc(fn, pkg) - } - sort.Slice(c.issues, func(i, j int) bool { - p1 := c.prog.Fset.Position(c.issues[i].Pos()) - p2 := c.prog.Fset.Position(c.issues[j].Pos()) - if p1.Filename == p2.Filename { - return p1.Offset < p2.Offset - } - return p1.Filename < p2.Filename - }) - return c.issues, nil -} - -func stringsContains(list []string, elem string) bool { - for _, e := range list { - if e == elem { - return true - } - } - return false -} - -func (c *Checker) addImplementing(named *types.Named, iface *types.Interface) { - if named == nil || iface == nil { - return - } - list := c.typesImplementing[named] - for i := 0; i < iface.NumMethods(); i++ { - name := iface.Method(i).Name() - if !stringsContains(list, name) { - list = append(list, name) - } - } - c.typesImplementing[named] = list -} - -func findNamed(typ types.Type) *types.Named { - switch typ := typ.(type) { - case *types.Pointer: - return findNamed(typ.Elem()) - case *types.Named: - return typ - } - return nil -} - -// findFunction returns the function that is behind a value, if any. -func findFunction(freeVars map[*ssa.FreeVar]*ssa.Function, value ssa.Value) *ssa.Function { - switch value := value.(type) { - case *ssa.Function: - name := value.Name() - if strings.HasSuffix(name, "$thunk") || strings.HasSuffix(name, "$bound") { - // Method wrapper funcs contain a single block, which - // calls the function being wrapped, and returns. We - // want the function being wrapped. - for _, instr := range value.Blocks[0].Instrs { - call, ok := instr.(*ssa.Call) - if !ok { - continue - } - if callee := call.Call.StaticCallee(); callee != nil { - return callee - } - } - return nil // no static callee? - } - return value - case *ssa.MakeClosure: - // closure of a func - return findFunction(freeVars, value.Fn) - case *ssa.UnOp: - if value.Op != token.MUL { - break - } - if fv, ok := value.X.(*ssa.FreeVar); ok { - return freeVars[fv] - } - } - return nil -} - -// addIssue records a newly found unused parameter. -func (c *Checker) addIssue(fn *ssa.Function, pos token.Pos, format string, args ...interface{}) { - c.issues = append(c.issues, Issue{ - pos: pos, - fname: fn.RelString(fn.Package().Pkg), - msg: fmt.Sprintf(format, args...), - }) -} - -// constValueString is cnst.Value.String() without panicking on untyped nils. -func constValueString(cnst *ssa.Const) string { - if cnst.Value == nil { - return "nil" - } - return cnst.Value.String() -} - -// checkFunc checks a single function for unused parameters. -func (c *Checker) checkFunc(fn *ssa.Function, pkg *packages.Package) { - c.debug("func %s\n", fn.RelString(fn.Package().Pkg)) - if dummyImpl(fn.Blocks[0]) { // panic implementation - c.debug(" skip - dummy implementation\n") - return - } - if by := c.signRequiredBy[fn]; by != "" { - c.debug(" skip - func signature required by %s\n", by) - return - } - if recv := fn.Signature.Recv(); recv != nil { - named := findNamed(recv.Type()) - if stringsContains(c.typesImplementing[named], fn.Name()) { - c.debug(" skip - method required to implement an interface\n") - return - } - } - if c.multipleImpls(pkg, fn) { - c.debug(" skip - multiple implementations via build tags\n") - return - } - paramsBy := c.paramsRequiredBy[fn] - resultsBy := c.resultsRequiredBy[fn] - callSites := c.localCallSites[fn] - - results := fn.Signature.Results() - sameConsts := make([]*ssa.Const, results.Len()) - numRets := 0 - allRetsExtracting := true - for _, block := range fn.Blocks { - if resultsBy != "" { - continue // we can't change the returns - } - last := block.Instrs[len(block.Instrs)-1] - ret, ok := last.(*ssa.Return) - if !ok { - continue - } - for i, val := range ret.Results { - if _, ok := val.(*ssa.Extract); !ok { - allRetsExtracting = false - } - cnst := constValue(val) - if numRets == 0 { - sameConsts[i] = cnst - } else if !eqlConsts(sameConsts[i], cnst) { - sameConsts[i] = nil - } - } - numRets++ - } - for i, cnst := range sameConsts { - if cnst == nil { - // no consistent returned constant - continue - } - if cnst.Value != nil && numRets == 1 { - // just one return and it's not untyped nil (too many - // false positives) - continue - } - res := results.At(i) - name := paramDesc(i, res) - c.addIssue(fn, res.Pos(), "result %s is always %s", name, constValueString(cnst)) - } - -resLoop: - for i := 0; i < results.Len(); i++ { - if resultsBy != "" { - continue // we can't change the returns - } - if allRetsExtracting { - continue - } - res := results.At(i) - if res.Type() == errorType { - // "error is never used" is less useful, and it's up to - // tools like errcheck anyway. - continue - } - count := 0 - for _, site := range callSites { - val := site.Value() - if val == nil { // e.g. go statement - count++ - continue - } - for _, instr := range *val.Referrers() { - extract, ok := instr.(*ssa.Extract) - if !ok { - continue resLoop // direct, real use - } - if extract.Index != i { - continue // not the same result param - } - if len(*extract.Referrers()) > 0 { - continue resLoop // real use after extraction - } - } - count++ - } - if count < 2 { - continue // require ignoring at least twice - } - name := paramDesc(i, res) - c.addIssue(fn, res.Pos(), "result %s is never used", name) - } - - fnIsGeneric := fn.TypeParams().Len() > 0 - - for i, par := range fn.Params { - if paramsBy != "" { - continue // we can't change the params - } - if i == 0 && fn.Signature.Recv() != nil { // receiver - continue - } - c.debug("%s\n", par.String()) - if name := par.Object().Name(); name == "" || name[0] == '_' { - c.debug(" skip - no name or underscore name\n") - continue - } - t := par.Type() - // asking for the size of a type param would panic, as it is unknowable - if !fnIsGeneric || !containsTypeParam(t) { - if stdSizes.Sizeof(par.Type()) == 0 { - c.debug(" skip - zero size\n") - continue - } - } else { - c.debug(" examine - type parameter\n") - } - reason := "is unused" - constStr := c.alwaysReceivedConst(callSites, par, i) - if constStr != "" { - reason = fmt.Sprintf("always receives %s", constStr) - } else if c.anyRealUse(par, i, pkg) { - c.debug(" skip - used somewhere in the func body\n") - continue - } - c.addIssue(fn, par.Pos(), "%s %s", par.Name(), reason) - } -} - -func containsTypeParam(t types.Type) bool { - switch t := t.(type) { - case *types.TypeParam, *types.Union: - return true - case *types.Struct: - nf := t.NumFields() - for i := 0; i < nf; i++ { - if containsTypeParam(t.Field(nf).Type()) { - return true - } - } - case *types.Array: - return containsTypeParam(t.Elem()) - case *types.Named: - args := t.TypeArgs() - for i := 0; i < args.Len(); i++ { - if containsTypeParam(args.At(i)) { - return true - } - } - } - return false -} - -// nodeStr stringifies a syntax tree node. It is only meant for simple nodes, -// such as short value expressions. -func nodeStr(node ast.Node) string { - var buf bytes.Buffer - fset := token.NewFileSet() - if err := printer.Fprint(&buf, fset, node); err != nil { - panic(err) - } - return buf.String() -} - -// alwaysReceivedConst checks if a function parameter always receives the same -// constant value, given a list of inbound calls. If it does, a description of -// the value is returned. If not, an empty string is returned. -// -// This function is used to recommend that the parameter be replaced by a direct -// use of the constant. To avoid false positives, the function will return false -// if the number of inbound calls is too low. -func (c *Checker) alwaysReceivedConst(callSites []ssa.CallInstruction, par *ssa.Parameter, pos int) string { - if len(callSites) < 4 { - // We can't possibly receive the same constant value enough - // times, hence a potential false positive. - return "" - } - if ast.IsExported(par.Parent().Name()) { - // we might not have all call sites for an exported func - return "" - } - var seen *ssa.Const - origPos := pos - if par.Parent().Signature.Recv() != nil { - // go/ast's CallExpr.Args does not include the receiver, but - // go/ssa's equivalent does. - origPos-- - } - seenOrig := "" - for _, site := range callSites { - call := site.Common() - if pos >= len(call.Args) { - // TODO: investigate? Weird crash in - // internal/x/net/http2/hpack/hpack_test.go, where we - // roughly do: "at := d.mustAt; at(3)". - return "" - } - cnst := constValue(call.Args[pos]) - if cnst == nil { - return "" // not a constant - } - origArg := "" - origCall := c.callByPos[call.Pos()] - if origPos >= len(origCall.Args) { - // variadic parameter that wasn't given - } else { - origArg = nodeStr(origCall.Args[origPos]) - } - if seen == nil { - seen = cnst // first constant - seenOrig = origArg - } else if !eqlConsts(seen, cnst) { - return "" // different constants - } else if origArg != seenOrig { - seenOrig = "" - } - } - seenStr := constValueString(seen) - if seenOrig != "" && seenStr != seenOrig { - return fmt.Sprintf("%s (%s)", seenOrig, seenStr) - } - return seenStr -} - -func constValue(value ssa.Value) *ssa.Const { - switch x := value.(type) { - case *ssa.Const: - return x - case *ssa.MakeInterface: - return constValue(x.X) - } - return nil -} - -// anyRealUse reports whether a parameter has any relevant use within its -// function body. Certain uses are ignored, such as recursive calls where the -// parameter is re-used as itself. -func (c *Checker) anyRealUse(par *ssa.Parameter, pos int, pkg *packages.Package) bool { - refs := *par.Referrers() - if len(refs) == 0 { - // Look for any uses like "_ = par", which are the developer's - // way to tell they want to keep the parameter. SSA does not - // keep that kind of statement around. - body := c.funcBodyByPos[par.Parent().Pos()] - any := false - ast.Inspect(body, func(node ast.Node) bool { - if any { - return false - } - asgn, ok := node.(*ast.AssignStmt) - if !ok || asgn.Tok != token.ASSIGN || len(asgn.Lhs) != 1 || len(asgn.Rhs) != 1 { - return true - } - if left, ok := asgn.Lhs[0].(*ast.Ident); !ok || left.Name != "_" { - return true - } - if right, ok := asgn.Rhs[0].(*ast.Ident); ok { - obj := pkg.TypesInfo.Uses[right] - if obj != nil && obj.Pos() == par.Pos() { - any = true - } - } - return true - }) - return any - } -refLoop: - for _, ref := range refs { - switch x := ref.(type) { - case *ssa.Call: - if x.Call.Value != par.Parent() { - return true // not a recursive call - } - for i, arg := range x.Call.Args { - if arg != par { - continue - } - if i == pos { - // reused directly in a recursive call - continue refLoop - } - } - return true - case *ssa.Store: - if insertedStore(x) { - continue // inserted by go/ssa, not from the code - } - return true - default: - return true - } - } - return false -} - -// insertedStore reports whether a SSA instruction was inserted by the SSA -// building algorithm. That is, the store was not directly translated from an -// original Go statement. -func insertedStore(instr ssa.Instruction) bool { - if instr.Pos() != token.NoPos { - return false - } - store, ok := instr.(*ssa.Store) - if !ok { - return false - } - alloc, ok := store.Addr.(*ssa.Alloc) - // we want exactly one use of this alloc value for it to be - // inserted by ssa and dummy - the alloc instruction itself. - return ok && len(*alloc.Referrers()) == 1 -} - -// rxHarmlessCall matches all the function expression strings which are allowed -// in a dummy implementation. -var rxHarmlessCall = regexp.MustCompile(`(?i)\b(log(ger)?|errors)\b|\bf?print|errorf?$`) - -// dummyImpl reports whether a block is a dummy implementation. This is -// true if the block will almost immediately panic, throw or return -// constants only. -func dummyImpl(blk *ssa.BasicBlock) bool { - var ops [8]*ssa.Value - for _, instr := range blk.Instrs { - if insertedStore(instr) { - continue // inserted by go/ssa, not from the code - } - for _, val := range instr.Operands(ops[:0]) { - switch x := (*val).(type) { - case nil, *ssa.Const, *ssa.ChangeType, *ssa.Alloc, - *ssa.MakeInterface, *ssa.MakeMap, - *ssa.Function, *ssa.Global, - *ssa.IndexAddr, *ssa.Slice, - *ssa.UnOp, *ssa.Parameter: - case *ssa.Call: - if rxHarmlessCall.MatchString(x.Call.Value.String()) { - continue - } - default: - return false - } - } - switch x := instr.(type) { - case *ssa.Alloc, *ssa.Store, *ssa.UnOp, *ssa.BinOp, - *ssa.MakeInterface, *ssa.MakeMap, *ssa.Extract, - *ssa.IndexAddr, *ssa.FieldAddr, *ssa.Slice, - *ssa.Lookup, *ssa.ChangeType, *ssa.TypeAssert, - *ssa.Convert, *ssa.ChangeInterface: - // non-trivial expressions in panic/log/print calls - case *ssa.Return, *ssa.Panic: - return true - case *ssa.Call: - if rxHarmlessCall.MatchString(x.Call.Value.String()) { - continue - } - return x.Call.Value.Name() == "throw" // runtime's panic - default: - return false - } - } - return false -} - -// declCounts reports how many times a package's functions are declared. This is -// used, for example, to find if a function has many implementations. -// -// Since this function parses all of the package's Go source files on disk, its -// results are cached. -func (c *Checker) declCounts(pkgDir, pkgName string) map[string]int { - key := pkgDir + ":" + pkgName - if m, ok := c.cachedDeclCounts[key]; ok { - return m - } - fset := token.NewFileSet() - pkgs, err := parser.ParseDir(fset, pkgDir, nil, 0) - if err != nil { - // Don't panic or error here. In some part of the go/* libraries - // stack, we sometimes end up with a package directory that is - // wrong. That's not our fault, and we can't simply break the - // tool until we fix the underlying issue. - println(err.Error()) - c.cachedDeclCounts[pkgDir] = nil - return nil - } - if len(pkgs) == 0 { - // TODO: investigate why this started happening after switching - // to go/packages - return nil - } - pkg := pkgs[pkgName] - count := make(map[string]int) - for _, file := range pkg.Files { - for _, decl := range file.Decls { - fd, ok := decl.(*ast.FuncDecl) - if !ok { - continue - } - name := recvPrefix(fd.Recv) + fd.Name.Name - count[name]++ - } - } - c.cachedDeclCounts[key] = count - return count -} - -// recvPrefix returns the string prefix for a receiver field list. Star -// expressions are ignored, so as to conservatively assume that pointer and -// non-pointer receivers may still implement the same function. -// -// For example, for "function (*Foo) Bar()", recvPrefix will return "Foo.". -func recvPrefix(recv *ast.FieldList) string { - if recv == nil { - return "" - } - expr := recv.List[0].Type - for { - star, ok := expr.(*ast.StarExpr) - if !ok { - break - } - expr = star.X - } - switch expr := expr.(type) { - case *ast.Ident: - return expr.Name + "." - case *ast.IndexExpr: - return expr.X.(*ast.Ident).Name + "." - case *ast.IndexListExpr: - return expr.X.(*ast.Ident).Name + "." - default: - panic(fmt.Sprintf("unexpected receiver AST node: %T", expr)) - } -} - -// multipleImpls reports whether a function has multiple implementations in the -// source code. For example, if there are different function bodies depending on -// the operating system or architecture. That tends to mean that an unused -// parameter in one implementation may not be unused in another. -func (c *Checker) multipleImpls(pkg *packages.Package, fn *ssa.Function) bool { - if fn.Parent() != nil { // nested func - return false - } - path := c.prog.Fset.Position(fn.Pos()).Filename - count := c.declCounts(filepath.Dir(path), pkg.Types.Name()) - name := fn.Name() - if recv := fn.Signature.Recv(); recv != nil { - named := findNamed(recv.Type()) - name = named.Obj().Name() + "." + name - } - return count[name] > 1 -} - -// receivesExtractedArgs returns the statically called function, if its multiple -// arguments were all received via another function call. That is, if a call to -// function "foo" was of the form "foo(bar())". This often means that the -// parameters in "foo" are difficult to remove, even if unused. -func receivesExtractedArgs(freeVars map[*ssa.FreeVar]*ssa.Function, call ssa.CallInstruction) *ssa.Function { - comm := call.Common() - callee := findFunction(freeVars, comm.Value) - if callee == nil { - return nil - } - if callee.Signature.Params().Len() < 2 { - // there aren't multiple parameters - return nil - } - args := comm.Args - if callee.Signature.Recv() != nil { - // skip the receiver argument - args = args[1:] - } - if c := callExtract(call, args); c != nil { - return callee - } - return nil -} - -// callExtract returns the call instruction fn(...) if it is used directly as -// arguments to the parent instruction, such as fn2(fn(...)) or return fn(...). -func callExtract(parent ssa.Instruction, values []ssa.Value) *ssa.Call { - if len(values) == 1 { - if call, ok := values[0].(*ssa.Call); ok { - return call - } - } - var prev *ssa.Call - for i, val := range values { - ext, ok := val.(*ssa.Extract) - if !ok { - return nil - } - if ext.Index != i { - return nil // not extracted in the same order - } - call, ok := ext.Tuple.(*ssa.Call) - if !ok { - return nil // not a call - } - if prev == nil { - prev = call - } else if prev != call { - return nil // not the same calls - } - } - if prev == nil { - return nil - } - if prev.Call.Signature().Results().Len() != len(values) { - return nil // not extracting all the results - } - if prev.Pos() < parent.Pos() { - // Of the form: - // - // a, b := fn() - // fn2(a, b) - return nil - } - return prev -} - -// paramDesc returns a string describing a parameter variable. If the parameter -// had no name, the function will fall back to describing the parameter by its -// position within the parameter list and its type. -func paramDesc(i int, v *types.Var) string { - name := v.Name() - if name != "" && name != "_" { - return name - } - return fmt.Sprintf("%d (%s)", i, v.Type().String()) -}